From bd9b83aa7d7597d805f91b09f700b563f654cb17 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Fri, 24 Jan 2025 19:29:17 +0100 Subject: [PATCH 01/63] .Net: Remove DeleteRecordOptions and UpsertRecordOptions from MEVD (#10192) Closes #10172 /cc @westey-m --- ...extEmbeddingVectorStoreRecordCollection.cs | 16 +- .../MappingVectorStoreRecordCollection.cs | 16 +- .../Step4_NonStringKey_VectorStore.cs | 16 +- ...ISearchVectorStoreRecordCollectionTests.cs | 5 +- ...zureAISearchVectorStoreRecordCollection.cs | 8 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 15 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 18 +- .../InMemoryVectorStoreRecordCollection.cs | 10 +- .../MongoDBVectorStoreRecordCollection.cs | 15 +- .../PineconeVectorStoreRecordCollection.cs | 13 +- .../PostgresVectorStoreRecordCollection.cs | 8 +- .../QdrantVectorStoreRecordCollection.cs | 16 +- ...RedisHashSetVectorStoreRecordCollection.cs | 12 +- .../RedisJsonVectorStoreRecordCollection.cs | 10 +- .../SqliteVectorStoreRecordCollection.cs | 66 ++---- .../WeaviateVectorStoreRecordCollection.cs | 13 +- .../QdrantVectorStoreRecordCollectionTests.cs | 5 +- .../CompatibilitySuppressions.xml | 214 ++++++++++++++++++ .../RecordOptions/DeleteRecordOptions.cs | 27 --- .../RecordOptions/UpsertRecordOptions.cs | 28 --- .../IVectorStoreRecordCollection.cs | 12 +- .../CompatibilitySuppressions.xml | 60 +++++ .../VolatileVectorStoreRecordCollection.cs | 10 +- 23 files changed, 395 insertions(+), 218 deletions(-) create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml delete mode 100644 dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/DeleteRecordOptions.cs delete mode 100644 dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/UpsertRecordOptions.cs create mode 100644 dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index a3c5517653af..5c1c4b05c56f 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -77,15 +77,15 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - return this._decoratedVectorStoreRecordCollection.DeleteAsync(key, options, cancellationToken); + return this._decoratedVectorStoreRecordCollection.DeleteAsync(key, cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this._decoratedVectorStoreRecordCollection.DeleteBatchAsync(keys, options, cancellationToken); + return this._decoratedVectorStoreRecordCollection.DeleteBatchAsync(keys, cancellationToken); } /// @@ -101,18 +101,18 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecord } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { var recordWithEmbeddings = await this.AddEmbeddingsAsync(record, cancellationToken).ConfigureAwait(false); - return await this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, options, cancellationToken).ConfigureAwait(false); + return await this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken).ConfigureAwait(false); } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var recordWithEmbeddingsTasks = records.Select(r => this.AddEmbeddingsAsync(r, cancellationToken)); var recordWithEmbeddings = await Task.WhenAll(recordWithEmbeddingsTasks).ConfigureAwait(false); - var upsertResults = this._decoratedVectorStoreRecordCollection.UpsertBatchAsync(recordWithEmbeddings, options, cancellationToken); + var upsertResults = this._decoratedVectorStoreRecordCollection.UpsertBatchAsync(recordWithEmbeddings, cancellationToken); await foreach (var upsertResult in upsertResults.ConfigureAwait(false)) { yield return upsertResult; diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs index 581ed6bf2565..076be09c9ca5 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs @@ -64,15 +64,15 @@ public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken } /// - public Task DeleteAsync(TPublicKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TPublicKey key, CancellationToken cancellationToken = default) { - return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), options, cancellationToken); + return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); + return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); } /// @@ -101,18 +101,18 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable key } /// - public async Task UpsertAsync(TPublicRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TPublicRecord record, CancellationToken cancellationToken = default) { var internalRecord = this._publicToInternalRecordMapper(record); - var internalKey = await this._collection.UpsertAsync(internalRecord, options, cancellationToken).ConfigureAwait(false); + var internalKey = await this._collection.UpsertAsync(internalRecord, cancellationToken).ConfigureAwait(false); return this._internalToPublicKeyMapper(internalKey); } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var internalRecords = records.Select(this._publicToInternalRecordMapper); - var internalKeys = this._collection.UpsertBatchAsync(internalRecords, options, cancellationToken); + var internalKeys = this._collection.UpsertBatchAsync(internalRecords, cancellationToken); await foreach (var internalKey in internalKeys.ConfigureAwait(false)) { yield return this._internalToPublicKeyMapper(internalKey); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs index 906df16d84a1..7303ddc9801a 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs @@ -124,15 +124,15 @@ public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken } /// - public Task DeleteAsync(TPublicKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TPublicKey key, CancellationToken cancellationToken = default) { - return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), options, cancellationToken); + return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); + return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); } /// @@ -161,18 +161,18 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable key } /// - public async Task UpsertAsync(TPublicRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TPublicRecord record, CancellationToken cancellationToken = default) { var internalRecord = this._publicToInternalRecordMapper(record); - var internalKey = await this._collection.UpsertAsync(internalRecord, options, cancellationToken).ConfigureAwait(false); + var internalKey = await this._collection.UpsertAsync(internalRecord, cancellationToken).ConfigureAwait(false); return this._internalToPublicKeyMapper(internalKey); } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var internalRecords = records.Select(this._publicToInternalRecordMapper); - var internalKeys = this._collection.UpsertBatchAsync(internalRecords, options, cancellationToken); + var internalKeys = this._collection.UpsertBatchAsync(internalRecords, cancellationToken); await foreach (var internalKey in internalKeys.ConfigureAwait(false)) { yield return this._internalToPublicKeyMapper(internalKey); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index 1d0a4d90bb34..467207b29ace 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -514,10 +514,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() }); // Act. - await sut.UpsertAsync( - model, - null, - this._testCancellationToken); + await sut.UpsertAsync(model, this._testCancellationToken); // Assert. mapperMock diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 7658b52fc702..bdf25bd2b8a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -263,7 +263,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -274,7 +274,7 @@ public Task DeleteAsync(string key, DeleteRecordOptions? options = default, Canc } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -285,7 +285,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? opti } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -298,7 +298,7 @@ public async Task UpsertAsync(TRecord record, UpsertRecordOptions? optio } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 94f423743d65..d54a184e5771 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -117,7 +117,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public async Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -126,7 +126,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public async Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -199,7 +199,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -225,14 +225,11 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); - var tasks = records.Select(record => this.UpsertAsync(record, options, cancellationToken)); + var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); foreach (var result in results) @@ -278,7 +275,7 @@ public async Task> VectorizedSearchAsync( this._storagePropertyNames); // Constructing a query to fetch "skip + top" total items - // to perform skip logic locally, since skip option is not part of API. + // to perform skip logic locally, since skip option is not part of API. var itemsAmount = searchOptions.Skip + searchOptions.Top; var vectorPropertyIndexKind = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyIndexKind(vectorProperty.IndexKind); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index ba993c758dc8..6ab9222d2a14 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -215,7 +215,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) #region Implementation of IVectorStoreRecordCollection /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { // Use record key as partition key var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); @@ -224,7 +224,7 @@ public Task DeleteAsync(string key, DeleteRecordOptions? options = null, Cancell } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { // Use record keys as partition keys var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); @@ -262,7 +262,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { var key = await this.InternalUpsertAsync(record, cancellationToken).ConfigureAwait(false); @@ -270,10 +270,7 @@ public async Task UpsertAsync(TRecord record, UpsertRecordOptions? optio } /// - public async IAsyncEnumerable UpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -318,19 +315,19 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync([key], cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(keys, cancellationToken); } /// - Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, UpsertRecordOptions? options, CancellationToken cancellationToken) + Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) { return this.InternalUpsertAsync(record, cancellationToken); } @@ -338,7 +335,6 @@ Task IVectorStoreRecordCollection async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync( IEnumerable records, - UpsertRecordOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 3526990f18b0..a2fe21e0cfc6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -165,7 +165,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, Get } /// - public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { var collectionDictionary = this.GetCollectionDictionary(); @@ -174,7 +174,7 @@ public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, Cancellat } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { var collectionDictionary = this.GetCollectionDictionary(); @@ -187,7 +187,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? option } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -200,11 +200,11 @@ public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = nul } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { foreach (var record in records) { - yield return await this.UpsertAsync(record, options, cancellationToken).ConfigureAwait(false); + yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index f27c8a975bc3..353b3534dab9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -120,7 +120,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public async Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -129,7 +129,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public async Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -202,7 +202,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -228,14 +228,11 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); - var tasks = records.Select(record => this.UpsertAsync(record, options, cancellationToken)); + var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); foreach (var result in results) @@ -281,7 +278,7 @@ public async Task> VectorizedSearchAsync( this._storagePropertyNames); // Constructing a query to fetch "skip + top" total items - // to perform skip logic locally, since skip option is not part of API. + // to perform skip logic locally, since skip option is not part of API. var itemsAmount = searchOptions.Skip + searchOptions.Top; var numCandidates = this._options.NumCandidates ?? itemsAmount * MongoDBConstants.DefaultNumCandidatesRatio; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 3ae05bd4f0f6..8a956f53f635 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -177,15 +177,15 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); - return this.DeleteBatchAsync([key], options, cancellationToken); + return this.DeleteBatchAsync([key], cancellationToken); } /// - public async Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -199,7 +199,7 @@ await this.RunOperationAsync( } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -221,10 +221,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options = default, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 95c8a4bcf282..de4a432ea48c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -141,7 +141,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { const string OperationName = "Upsert"; @@ -166,7 +166,7 @@ public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = nul } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertBatch"; @@ -232,7 +232,7 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecord } /// - public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { const string OperationName = "Delete"; return this.RunOperationAsync(OperationName, () => @@ -241,7 +241,7 @@ public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, Cancellat } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteBatch"; return this.RunOperationAsync(OperationName, () => diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 6ddf009391b9..7dd77b76baff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -268,7 +268,7 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecord } /// - public Task DeleteAsync(ulong key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -282,7 +282,7 @@ public Task DeleteAsync(ulong key, DeleteRecordOptions? options = null, Cancella } /// - public Task DeleteAsync(Guid key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -296,7 +296,7 @@ public Task DeleteAsync(Guid key, DeleteRecordOptions? options = null, Cancellat } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -310,7 +310,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? optio } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -324,7 +324,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? option } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -343,7 +343,7 @@ await this.RunOperationAsync( } /// - async Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, UpsertRecordOptions? options, CancellationToken cancellationToken) + async Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) { Verify.NotNull(record); @@ -362,7 +362,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -385,7 +385,7 @@ await this.RunOperationAsync( } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index ebf21a7cf5a2..41971c5adb86 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -262,7 +262,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -277,17 +277,17 @@ public Task DeleteAsync(string key, DeleteRecordOptions? options = default, Canc } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); // Remove records in parallel. - var tasks = keys.Select(key => this.DeleteAsync(key, options, cancellationToken)); + var tasks = keys.Select(key => this.DeleteAsync(key, cancellationToken)); return Task.WhenAll(tasks); } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -311,12 +311,12 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); // Upsert records in parallel. - var tasks = records.Select(x => this.UpsertAsync(x, options, cancellationToken)); + var tasks = records.Select(x => this.UpsertAsync(x, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); foreach (var result in results) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 08fb1155ee60..f8afa3ed875e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -278,7 +278,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -294,17 +294,17 @@ public Task DeleteAsync(string key, DeleteRecordOptions? options = default, Canc } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); // Remove records in parallel. - var tasks = keys.Select(key => this.DeleteAsync(key, options, cancellationToken)); + var tasks = keys.Select(key => this.DeleteAsync(key, cancellationToken)); return Task.WhenAll(tasks); } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -335,7 +335,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 9b6d247e2a36..08c976abf43f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -219,27 +219,27 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecor } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - return this.InternalUpsertAsync(record, options, cancellationToken); + return this.InternalUpsertAsync(record, cancellationToken); } /// - public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) { - return this.InternalUpsertBatchAsync(records, options, cancellationToken); + return this.InternalUpsertBatchAsync(records, cancellationToken); } /// - public Task DeleteAsync(ulong key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { - return this.InternalDeleteAsync(key, options, cancellationToken); + return this.InternalDeleteAsync(key, cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this.InternalDeleteBatchAsync(keys, options, cancellationToken); + return this.InternalDeleteBatchAsync(keys, cancellationToken); } #endregion @@ -259,27 +259,27 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetReco } /// - Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, UpsertRecordOptions? options, CancellationToken cancellationToken) + Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) { - return this.InternalUpsertAsync(record, options, cancellationToken); + return this.InternalUpsertAsync(record, cancellationToken); } /// - IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options, CancellationToken cancellationToken) + IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken) { - return this.InternalUpsertBatchAsync(records, options, cancellationToken); + return this.InternalUpsertBatchAsync(records, cancellationToken); } /// - public Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { - return this.InternalDeleteAsync(key, options, cancellationToken); + return this.InternalDeleteAsync(key, cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this.InternalDeleteBatchAsync(keys, options, cancellationToken); + return this.InternalDeleteBatchAsync(keys, cancellationToken); } #endregion @@ -475,10 +475,7 @@ private async IAsyncEnumerable InternalGetBatchAsync( } } - private async Task InternalUpsertAsync( - TRecord record, - UpsertRecordOptions? options, - CancellationToken cancellationToken) + private async Task InternalUpsertAsync(TRecord record, CancellationToken cancellationToken) { const string OperationName = "Upsert"; @@ -494,17 +491,14 @@ private async Task InternalUpsertAsync( var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); - var upsertedRecordKey = await this.InternalUpsertBatchAsync([storageModel], condition, options, cancellationToken) + var upsertedRecordKey = await this.InternalUpsertBatchAsync([storageModel], condition, cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); return upsertedRecordKey ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); } - private IAsyncEnumerable InternalUpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options, - CancellationToken cancellationToken) + private IAsyncEnumerable InternalUpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken) { const string OperationName = "UpsertBatch"; @@ -518,13 +512,12 @@ private IAsyncEnumerable InternalUpsertBatchAsync( var condition = new SqliteWhereInCondition(this._propertyReader.KeyPropertyStoragePropertyName, keys); - return this.InternalUpsertBatchAsync(storageModels, condition, options, cancellationToken); + return this.InternalUpsertBatchAsync(storageModels, condition, cancellationToken); } private async IAsyncEnumerable InternalUpsertBatchAsync( List> storageModels, SqliteWhereCondition condition, - UpsertRecordOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) { Verify.NotNull(storageModels); @@ -571,22 +564,16 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( } } - private Task InternalDeleteAsync( - TKey key, - DeleteRecordOptions? options, - CancellationToken cancellationToken) + private Task InternalDeleteAsync(TKey key, CancellationToken cancellationToken) { Verify.NotNull(key); var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); - return this.InternalDeleteBatchAsync(condition, options, cancellationToken); + return this.InternalDeleteBatchAsync(condition, cancellationToken); } - private Task InternalDeleteBatchAsync( - IEnumerable keys, - DeleteRecordOptions? options, - CancellationToken cancellationToken) + private Task InternalDeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken) { Verify.NotNull(keys); @@ -598,13 +585,10 @@ private Task InternalDeleteBatchAsync( this._propertyReader.KeyPropertyStoragePropertyName, keysList); - return this.InternalDeleteBatchAsync(condition, options, cancellationToken); + return this.InternalDeleteBatchAsync(condition, cancellationToken); } - private Task InternalDeleteBatchAsync( - SqliteWhereCondition condition, - DeleteRecordOptions? options, - CancellationToken cancellationToken) + private Task InternalDeleteBatchAsync(SqliteWhereCondition condition, CancellationToken cancellationToken) { const string OperationName = "Delete"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 64eb3b91002f..a4ba633535a7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -213,7 +213,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task DeleteAsync(Guid key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObject"; @@ -226,7 +226,7 @@ public Task DeleteAsync(Guid key, DeleteRecordOptions? options = null, Cancellat } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObjectBatch"; const string ContainsAnyOperator = "ContainsAny"; @@ -295,18 +295,15 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - return await this.UpsertBatchAsync([record], options, cancellationToken) + return await this.UpsertBatchAsync([record], cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); } /// - public async IAsyncEnumerable UpsertBatchAsync( - IEnumerable records, - UpsertRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 7d215ab2eeac..1bb89a91344e 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -510,10 +510,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() var model = CreateModel(UlongTestRecordKey1, true); // Act - await sut.UpsertAsync( - model, - null, - this._testCancellationToken); + await sut.UpsertAsync(model, this._testCancellationToken); // Assert mapperMock diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..0860b81e7585 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -0,0 +1,214 @@ + + + + + CP0001 + T:Microsoft.Extensions.VectorData.DeleteRecordOptions + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0001 + T:Microsoft.Extensions.VectorData.UpsertRecordOptions + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0001 + T:Microsoft.Extensions.VectorData.DeleteRecordOptions + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0001 + T:Microsoft.Extensions.VectorData.UpsertRecordOptions + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0001 + T:Microsoft.Extensions.VectorData.DeleteRecordOptions + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0001 + T:Microsoft.Extensions.VectorData.UpsertRecordOptions + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/DeleteRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/DeleteRecordOptions.cs deleted file mode 100644 index cc1ebd74745e..000000000000 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/DeleteRecordOptions.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.Extensions.VectorData; - -/// -/// Options when calling . -/// -/// -/// This class does not currently include any options, but is added for future extensibility of the API. -/// -public class DeleteRecordOptions -{ - /// - /// Initializes a new instance of the class. - /// - public DeleteRecordOptions() - { - } - - /// - /// Initializes a new instance of the class by cloning the given options. - /// - /// The options to clone - public DeleteRecordOptions(DeleteRecordOptions source) - { - } -} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/UpsertRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/UpsertRecordOptions.cs deleted file mode 100644 index bc6e0b2dd52b..000000000000 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/UpsertRecordOptions.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.Extensions.VectorData; - -/// -/// Options when calling . -/// Reserved for future use. -/// -/// -/// This class does not currently include any options, but is added for future extensibility of the API. -/// -public class UpsertRecordOptions -{ - /// - /// Initializes a new instance of the class. - /// - public UpsertRecordOptions() - { - } - - /// - /// Initializes a new instance of the class by cloning the given options. - /// - /// The options to clone - public UpsertRecordOptions(UpsertRecordOptions source) - { - } -} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index 1aacdff332fa..6415ed35fe59 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -79,11 +79,10 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// Deletes a record from the vector store. Does not guarantee that the collection exists. /// /// The unique id associated with the record to remove. - /// Optional options for removing the record. /// The to monitor for cancellation requests. The default is . /// The unique identifier for the record. /// Throw when the command fails to execute for any reason other than that the record does not exit. - Task DeleteAsync(TKey key, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default); + Task DeleteAsync(TKey key, CancellationToken cancellationToken = default); /// /// Deletes a batch of records from the vector store. Does not guarantee that the collection exists. @@ -92,11 +91,10 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// If any record cannot be deleted for any other reason, the operation will throw. Some records may have already been deleted, while others may not, so the entire operation should be retried. /// /// The unique ids associated with the records to remove. - /// Optional options for removing the records. /// The to monitor for cancellation requests. The default is . /// A that completes when the records have been deleted. /// Throw when the command fails to execute for any reason other than that a record does not exist. - Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = default, CancellationToken cancellationToken = default); + Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default); /// /// Upserts a record into the vector store. Does not guarantee that the collection exists. @@ -104,12 +102,11 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// If the record does not exist, it will be created. /// /// The record to upsert. - /// Optional options for upserting the record. /// The to monitor for cancellation requests. The default is . /// The unique identifier for the record. /// Throw when the command fails to execute for any reason. /// Throw when mapping between the storage model and record data model fails. - Task UpsertAsync(TRecord record, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default); + Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default); /// /// Upserts a group of records into the vector store. Does not guarantee that the collection exists. @@ -118,10 +115,9 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// Upserts will be made in a single request or in a single parallel batch depending on the available store functionality. /// /// The records to upsert. - /// Optional options for upserting the records. /// The to monitor for cancellation requests. The default is . /// The unique identifiers for the records. /// Throw when the command fails to execute for any reason. /// Throw when mapping between the storage model and record data model fails. - IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = default, CancellationToken cancellationToken = default); + IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..de2e33319a56 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml @@ -0,0 +1,60 @@ + + + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs index e68a3f2d5af2..da062934cfbb 100644 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs @@ -168,7 +168,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, Get } /// - public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { var collectionDictionary = this.GetCollectionDictionary(); @@ -177,7 +177,7 @@ public Task DeleteAsync(TKey key, DeleteRecordOptions? options = null, Cancellat } /// - public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { var collectionDictionary = this.GetCollectionDictionary(); @@ -190,7 +190,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? option } /// - public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -203,11 +203,11 @@ public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = nul } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, UpsertRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { foreach (var record in records) { - yield return await this.UpsertAsync(record, options, cancellationToken).ConfigureAwait(false); + yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); } } From 2434faf353ebacddb1c9fa6b07b39fec56cf135b Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Wed, 12 Feb 2025 00:02:40 +0100 Subject: [PATCH 02/63] .Net MEVD: LINQ-based filtering (#10273) Implement LINQ-based vector search filtering Closes #10156 Does most of #10194 --- .github/_typos.toml | 1 + dotnet/Directory.Build.props | 5 + dotnet/Directory.Packages.props | 8 +- dotnet/SK-dotnet.sln | 104 +++++ dotnet/SK-dotnet.sln.DotSettings | 1 + ...extEmbeddingVectorStoreRecordCollection.cs | 6 +- .../MappingVectorStoreRecordCollection.cs | 8 + .../QdrantFactory.cs | 2 + ...torStore_VectorSearch_MultiStore_Common.cs | 3 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 3 +- .../Concepts/Search/VectorStore_TextSearch.cs | 2 +- .../Step2_Vector_Search.cs | 2 +- .../Step4_NonStringKey_VectorStore.cs | 5 + ...VectorStoreCollectionSearchMappingTests.cs | 12 +- ...ISearchVectorStoreRecordCollectionTests.cs | 2 + ...VectorStoreCollectionSearchMappingTests.cs | 2 + ...MongoDBVectorStoreRecordCollectionTests.cs | 3 +- ...LVectorStoreCollectionQueryBuilderTests.cs | 23 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- .../Connectors.AzureOpenAI.csproj | 1 + ...nMemoryVectorStoreRecordCollectionTests.cs | 12 +- .../AzureAISearchFilterTranslator.cs | 366 +++++++++++++++++ ...earchVectorStoreCollectionSearchMapping.cs | 6 +- ...zureAISearchVectorStoreRecordCollection.cs | 46 ++- .../AzureCosmosDBMongoDBFilterTranslator.cs | 258 ++++++++++++ ...ngoDBVectorStoreCollectionSearchMapping.cs | 2 + ...mosDBMongoDBVectorStoreRecordCollection.cs | 22 +- .../AzureCosmosDBNoSQLConstants.cs | 2 +- .../AzureCosmosDBNoSQLFilter.cs | 15 - ...BNoSQLVectorStoreCollectionQueryBuilder.cs | 80 ++-- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 6 +- .../AzureCosmosDBNoSqlFilterTranslator.cs | 282 +++++++++++++ ...emoryVectorStoreCollectionSearchMapping.cs | 14 +- .../InMemoryVectorStoreRecordCollection.cs | 20 +- .../MongoDBFilterTranslator.cs | 258 ++++++++++++ ...ngoDBVectorStoreCollectionSearchMapping.cs | 8 +- .../MongoDBVectorStoreRecordCollection.cs | 20 +- ...econeVectorStoreCollectionSearchMapping.cs | 2 + .../PineconeVectorStoreRecordCollection.cs | 7 +- ...PostgresVectorStoreCollectionSqlBuilder.cs | 11 +- .../IPostgresVectorStoreDbClient.cs | 15 +- .../PostgresFilterTranslator.cs | 332 +++++++++++++++ ...PostgresVectorStoreCollectionSqlBuilder.cs | 134 +++--- .../PostgresVectorStoreDbClient.cs | 14 +- .../PostgresVectorStoreRecordCollection.cs | 11 +- ...ostgresVectorStoreRecordPropertyMapping.cs | 2 +- .../QdrantFilterTranslator.cs | 382 ++++++++++++++++++ ...drantVectorStoreCollectionSearchMapping.cs | 10 +- .../QdrantVectorStoreRecordCollection.cs | 15 +- .../RedisFilterTranslator.cs | 230 +++++++++++ ...RedisHashSetVectorStoreRecordCollection.cs | 5 +- .../RedisJsonVectorStoreRecordCollection.cs | 4 +- ...RedisVectorStoreCollectionSearchMapping.cs | 26 +- .../SqliteFilterTranslator.cs | 359 ++++++++++++++++ ...liteVectorStoreCollectionCommandBuilder.cs | 26 +- .../SqliteVectorStoreRecordCollection.cs | 38 +- .../WeaviateFilterTranslator.cs | 260 ++++++++++++ .../WeaviateVectorStoreRecordCollection.cs | 6 +- ...VectorStoreRecordCollectionQueryBuilder.cs | 31 +- ...VectorStoreCollectionSearchMappingTests.cs | 36 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 3 +- .../Connectors.OpenAI.csproj | 6 + ...resVectorStoreCollectionSqlBuilderTests.cs | 53 --- ...VectorStoreCollectionSearchMappingTests.cs | 8 +- .../QdrantVectorStoreRecordCollectionTests.cs | 2 + ...HashSetVectorStoreRecordCollectionTests.cs | 2 + ...disJsonVectorStoreRecordCollectionTests.cs | 2 + ...VectorStoreCollectionSearchMappingTests.cs | 24 +- ...ectorStoreCollectionCommandBuilderTests.cs | 2 + ...rStoreRecordCollectionQueryBuilderTests.cs | 15 +- ...eaviateVectorStoreRecordCollectionTests.cs | 7 +- .../CompatibilitySuppressions.xml | 107 ++++- .../VectorSearch/IVectorizableTextSearch.cs | 2 +- .../VectorSearch/IVectorizedSearch.cs | 2 +- .../VectorSearch/VectorSearchFilter.cs | 1 + .../VectorSearch/VectorSearchOptions.cs | 11 +- .../Functions.OpenApi.csproj | 4 + ...ISearchVectorStoreRecordCollectionTests.cs | 4 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 + ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 + ...MongoDBVectorStoreRecordCollectionTests.cs | 2 + ...ineconeVectorStoreRecordCollectionTests.cs | 4 +- ...ostgresVectorStoreRecordCollectionTests.cs | 2 + .../QdrantVectorStoreRecordCollectionTests.cs | 4 +- ...HashSetVectorStoreRecordCollectionTests.cs | 10 +- ...disJsonVectorStoreRecordCollectionTests.cs | 10 +- .../SqliteVectorStoreRecordCollectionTests.cs | 2 + ...eaviateVectorStoreRecordCollectionTests.cs | 2 + .../Data/BaseVectorStoreTextSearchTests.cs | 2 +- .../src/Diagnostics/UnreachableException.cs | 50 +++ .../src/System/IndexRange.cs | 288 +++++++++++++ .../Plugins.Web/Bing/BingTextSearch.cs | 3 + .../Plugins.Web/Google/GoogleTextSearch.cs | 2 + .../SemanticKernel.Abstractions.csproj | 3 + .../Search/MockVectorizableTextSearch.cs | 2 +- .../CompatibilitySuppressions.xml | 16 +- .../Data/TextSearch/VectorStoreTextSearch.cs | 4 +- .../VolatileVectorStoreRecordCollection.cs | 9 +- .../SemanticKernel.Core.csproj | 2 +- .../Data/VectorStoreTextSearchTestBase.cs | 2 +- ...olatileVectorStoreRecordCollectionTests.cs | 10 +- .../AzureAISearchIntegrationTests.csproj | 31 ++ .../Filter/AzureAISearchBasicFilterTests.cs | 13 + .../Filter/AzureAISearchFilterFixture.cs | 15 + .../Properties/AssemblyAttributes.cs | 3 + .../Support/AzureAISearchTestEnvironment.cs | 28 ++ .../Support/AzureAISearchTestStore.cs | 45 +++ .../AzureAISearchUrlRequiredAttribute.cs | 19 + .../CosmosMongoDBIntegrationTests.csproj | 29 ++ .../Filter/CosmosMongoBasicFilterTests.cs | 59 +++ .../Filter/CosmosMongoFilterFixture.cs | 15 + .../Properties/AssemblyAttributes.cs | 3 + ...CosmosConnectionStringRequiredAttribute.cs | 20 + .../Support/CosmosMongoDBTestEnvironment.cs | 25 ++ .../Support/CosmosMongoDBTestStore.cs | 45 +++ .../CosmosNoSQLIntegrationTests.csproj | 29 ++ .../Filter/CosmosNoSQLBasicFilterTests.cs | 8 + .../Filter/CosmosNoSQLFilterFixture.cs | 12 + .../Properties/AssemblyAttributes.cs | 3 + ...CosmosConnectionStringRequiredAttribute.cs | 19 + .../Support/CosmosNoSQLTestEnvironment.cs | 25 ++ .../Support/CosmosNoSQLTestStore.cs | 63 +++ .../Directory.Build.props | 20 + .../Filter/InMemoryBasicFilterTests.cs | 8 + .../Filter/InMemoryFilterFixture.cs | 12 + .../InMemoryIntegrationTests.csproj | 26 ++ .../Support/InMemoryTestStore.cs | 27 ++ .../Filter/MongoDBBasicFilterTests.cs | 59 +++ .../Filter/MongoDBFilterFixture.cs | 12 + .../MongoDBIntegrationTests.csproj | 27 ++ .../Support/MongoDBTestStore.cs | 52 +++ .../Filter/PostgresBasicFilterTests.cs | 32 ++ .../Filter/PostgresFilterFixture.cs | 12 + .../PostgresIntegrationTests.csproj | 27 ++ .../Support/PostgresTestStore.cs | 69 ++++ .../Filter/QdrantBasicFilterTests.cs | 8 + .../Filter/QdrantFilterFixture.cs | 15 + .../QdrantIntegrationTests.csproj | 27 ++ .../Support/QdrantTestStore.cs | 41 ++ .../Support/TestContainer/QdrantBuilder.cs | 56 +++ .../TestContainer/QdrantConfiguration.cs | 53 +++ .../Support/TestContainer/QdrantContainer.cs | 7 + .../Filter/RedisBasicFilterTests.cs | 85 ++++ .../Filter/RedisFilterFixture.cs | 65 +++ .../RedisIntegrationTests.csproj | 27 ++ .../Support/RedisTestStore.cs | 43 ++ .../Filter/SqliteBasicFilterTests.cs | 45 +++ .../Filter/SqliteFilterFixture.cs | 22 + .../Properties/AssemblyAttributes.cs | 3 + .../SqliteIntegrationTests.csproj | 26 ++ .../Support/SqliteTestEnvironment.cs | 56 +++ .../Support/SqliteTestStore.cs | 55 +++ .../Support/SqliteVecRequiredAttribute.cs | 19 + .../Filter/BasicFilterTestsBase.cs | 283 +++++++++++++ .../Filter/FilterFixtureBase.cs | 191 +++++++++ .../Support/TestStore.cs | 52 +++ .../VectorDataIntegrationTests.csproj | 24 ++ .../Xunit/ConditionalFactAttribute.cs | 10 + .../Xunit/ConditionalFactDiscoverer.cs | 23 ++ .../Xunit/ConditionalFactTestCase.cs | 39 ++ .../Xunit/ConditionalTheoryAttribute.cs | 10 + .../Xunit/ITestCondition.cs | 10 + .../Xunit/XunitTestCaseExtensions.cs | 51 +++ .../Filter/WeaviateBasicFilterTests.cs | 62 +++ .../Filter/WeaviateFilterFixture.cs | 14 + .../Support/TestContainer/WeaviateBuilder.cs | 48 +++ .../TestContainer/WeaviateConfiguration.cs | 53 +++ .../TestContainer/WeaviateContainer.cs | 7 + .../Support/WeaviateTestStore.cs | 41 ++ .../WeaviateIntegrationTests.csproj | 27 ++ 170 files changed, 6366 insertions(+), 380 deletions(-) create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLFilter.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs create mode 100644 dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs create mode 100644 dotnet/src/InternalUtilities/src/System/IndexRange.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestEnvironment.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/Directory.Build.props create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantBuilder.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantConfiguration.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantContainer.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestEnvironment.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteVecRequiredAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactDiscoverer.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactTestCase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ITestCondition.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/XunitTestCaseExtensions.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateConfiguration.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateContainer.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj diff --git a/.github/_typos.toml b/.github/_typos.toml index 457e6bca4c2c..d9a2dcb7a2e4 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -39,6 +39,7 @@ prompty = "prompty" # prompty is a format name. ist = "ist" # German language dall = "dall" # OpenAI model name pn = "pn" # Kiota parameter +nin = "nin" # MongoDB "not in" operator [default.extend-identifiers] ags = "ags" # Azure Graph Service diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props index 94d748c78057..13e279d799d6 100644 --- a/dotnet/Directory.Build.props +++ b/dotnet/Directory.Build.props @@ -25,6 +25,11 @@ True + + + $(NoWarn);CS8604;CS8602 + + $([System.IO.Path]::GetDirectoryName($([MSBuild]::GetPathOfFileAbove('.gitignore', '$(MSBuildThisFileDirectory)')))) diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index e93dc3df49a2..fcad75436cb8 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -115,11 +115,15 @@ - + - + + + + + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 0a711f84f5f3..e1953ea0bf7e 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -117,6 +117,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Diagnostics", "Diagnostics" src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs = src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs + src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{B00AD427-0047-4850-BEF9-BA8237EA9D8B}" @@ -140,6 +141,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "System", "System", "{3CDE10 src\InternalUtilities\src\System\InternalTypeConverter.cs = src\InternalUtilities\src\System\InternalTypeConverter.cs src\InternalUtilities\src\System\NonNullCollection.cs = src\InternalUtilities\src\System\NonNullCollection.cs src\InternalUtilities\src\System\TypeConverterFactory.cs = src\InternalUtilities\src\System\TypeConverterFactory.cs + src\InternalUtilities\src\System\IndexRange.cs = src\InternalUtilities\src\System\IndexRange.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Type", "Type", "{E85EA4D0-BB7E-4DFD-882F-A76EB8C0B8FF}" @@ -439,6 +441,30 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-chatgpt-azure-function", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "kernel-functions-generator", "samples\Demos\CreateChatGptPlugin\MathPlugin\kernel-functions-generator\kernel-functions-generator.csproj", "{78785CB1-66CF-4895-D7E5-A440DD84BE86}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "VectorDataIntegrationTests", "VectorDataIntegrationTests", "{4F381919-F1BE-47D8-8558-3187ED04A84F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QdrantIntegrationTests", "src\VectorDataIntegrationTests\QdrantIntegrationTests\QdrantIntegrationTests.csproj", "{27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorDataIntegrationTests", "src\VectorDataIntegrationTests\VectorDataIntegrationTests\VectorDataIntegrationTests.csproj", "{B29A972F-A774-4140-AECF-6B577C476627}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RedisIntegrationTests", "src\VectorDataIntegrationTests\RedisIntegrationTests\RedisIntegrationTests.csproj", "{F7EA82A4-A626-4316-AA47-EAC3A0E85870}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PostgresIntegrationTests", "src\VectorDataIntegrationTests\PostgresIntegrationTests\PostgresIntegrationTests.csproj", "{3148FF01-38C7-4BEB-8CEC-9323EC7C593B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InMemoryIntegrationTests", "src\VectorDataIntegrationTests\InMemoryIntegrationTests\InMemoryIntegrationTests.csproj", "{F5126690-0FD1-4777-9EDF-B3F5B7B3730B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CosmosNoSQLIntegrationTests", "src\VectorDataIntegrationTests\CosmosNoSQLIntegrationTests\CosmosNoSQLIntegrationTests.csproj", "{E200425C-E501-430C-8A8B-BC0088BD94DB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqliteIntegrationTests", "src\VectorDataIntegrationTests\SqliteIntegrationTests\SqliteIntegrationTests.csproj", "{709B3933-5286-4139-8D83-8C7AA5746FAE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WeaviateIntegrationTests", "src\VectorDataIntegrationTests\WeaviateIntegrationTests\WeaviateIntegrationTests.csproj", "{E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDBIntegrationTests", "src\VectorDataIntegrationTests\MongoDBIntegrationTests\MongoDBIntegrationTests.csproj", "{A0E65043-6B00-4836-850F-000A52238914}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CosmosMongoDBIntegrationTests", "src\VectorDataIntegrationTests\CosmosMongoDBIntegrationTests\CosmosMongoDBIntegrationTests.csproj", "{11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AzureAISearchIntegrationTests", "src\VectorDataIntegrationTests\AzureAISearchIntegrationTests\AzureAISearchIntegrationTests.csproj", "{06181F0F-A375-43AE-B45F-73CBCFC30C14}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1172,6 +1198,72 @@ Global {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Publish|Any CPU.Build.0 = Debug|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.ActiveCfg = Release|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.Build.0 = Release|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.Build.0 = Debug|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Publish|Any CPU.Build.0 = Debug|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Release|Any CPU.ActiveCfg = Release|Any CPU + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Release|Any CPU.Build.0 = Release|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Publish|Any CPU.Build.0 = Publish|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B29A972F-A774-4140-AECF-6B577C476627}.Release|Any CPU.Build.0 = Release|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Publish|Any CPU.Build.0 = Debug|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F7EA82A4-A626-4316-AA47-EAC3A0E85870}.Release|Any CPU.Build.0 = Release|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Publish|Any CPU.Build.0 = Debug|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B}.Release|Any CPU.Build.0 = Release|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Publish|Any CPU.Build.0 = Debug|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B}.Release|Any CPU.Build.0 = Release|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Publish|Any CPU.Build.0 = Debug|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E200425C-E501-430C-8A8B-BC0088BD94DB}.Release|Any CPU.Build.0 = Release|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Publish|Any CPU.Build.0 = Debug|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {709B3933-5286-4139-8D83-8C7AA5746FAE}.Release|Any CPU.Build.0 = Release|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Publish|Any CPU.Build.0 = Debug|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F}.Release|Any CPU.Build.0 = Release|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Publish|Any CPU.Build.0 = Debug|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A0E65043-6B00-4836-850F-000A52238914}.Release|Any CPU.Build.0 = Release|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Publish|Any CPU.Build.0 = Debug|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}.Release|Any CPU.Build.0 = Release|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Debug|Any CPU.Build.0 = Debug|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Publish|Any CPU.Build.0 = Debug|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Release|Any CPU.ActiveCfg = Release|Any CPU + {06181F0F-A375-43AE-B45F-73CBCFC30C14}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1333,6 +1425,18 @@ Global {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {2EB6E4C2-606D-B638-2E08-49EA2061C428} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {78785CB1-66CF-4895-D7E5-A440DD84BE86} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {4F381919-F1BE-47D8-8558-3187ED04A84F} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} + {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {B29A972F-A774-4140-AECF-6B577C476627} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {F7EA82A4-A626-4316-AA47-EAC3A0E85870} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {3148FF01-38C7-4BEB-8CEC-9323EC7C593B} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {F5126690-0FD1-4777-9EDF-B3F5B7B3730B} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {E200425C-E501-430C-8A8B-BC0088BD94DB} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {709B3933-5286-4139-8D83-8C7AA5746FAE} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {E3CECC65-1B00-4E3A-90B6-FC7A2C64E41F} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {A0E65043-6B00-4836-850F-000A52238914} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {11DFBF14-6FBA-41F0-B7F3-A288952D6FDB} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {06181F0F-A375-43AE-B45F-73CBCFC30C14} = {4F381919-F1BE-47D8-8558-3187ED04A84F} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/SK-dotnet.sln.DotSettings b/dotnet/SK-dotnet.sln.DotSettings index d8964e230315..f5eec1700bcd 100644 --- a/dotnet/SK-dotnet.sln.DotSettings +++ b/dotnet/SK-dotnet.sln.DotSettings @@ -217,6 +217,7 @@ public void It$SOMENAME$() True True True + True True True True diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index 5c1c4b05c56f..000cb1ebba07 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -8,7 +8,7 @@ namespace Memory.VectorStoreEmbeddingGeneration; /// -/// Decorator for a that generates embeddings for records on upsert and when using . +/// Decorator for a that generates embeddings for records on upsert and when using . /// /// /// This class is part of the sample. @@ -120,13 +120,13 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, options, cancellationToken); } /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var embeddingValue = await this._textEmbeddingGenerationService.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); return await this.VectorizedSearchAsync(embeddingValue, options, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs index 076be09c9ca5..1951f3a6dbee 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs @@ -1,5 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. +// TODO: Commented out as part of implementing LINQ-based filtering, since MappingVectorStoreRecordCollection is no longer easy/feasible. +// TODO: The user provides an expression tree accepting a TPublicRecord, but we require an expression tree accepting a TInternalRecord. +// TODO: This is something that the user must provide, and is quite advanced. + +#if DISABLED + using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; @@ -132,3 +138,5 @@ public async Task> VectorizedSearchAsync CreateVectorStoreRecordCollec return (collection as IVectorStoreRecordCollection)!; } +#if DISABLED_FOR_NOW // TODO: See note on MappingVectorStoreRecordCollection // If the user asked for a string key, we can add a decorator which converts back and forth between string and guid. // The string that the user provides will still need to contain a valid guid, since the Langchain created collection // uses guid keys. @@ -92,6 +93,7 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec return (stringKeyCollection as IVectorStoreRecordCollection)!; } +#endif throw new NotSupportedException("This VectorStore is only usable with Guid keys and LangchainDocument record types or string keys and LangchainDocument record types"); } diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index c5160ac8739c..ff492ca58304 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -70,8 +70,7 @@ public async Task IngestDataAndSearchAsync(string collectionName, Func.Category), "External Definitions"); - searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, Filter = filter }); + searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, NewFilter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); output.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index a7eceb4046a9..5119881c3bda 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -70,8 +70,7 @@ public async Task ExampleAsync() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var filter = new VectorSearchFilter().EqualTo(nameof(Glossary.Category), "External Definitions"); - searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, Filter = filter }); + searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, NewFilter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index df52982104b8..f6a3d4ab6356 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -144,7 +144,7 @@ internal static async Task> CreateCo private sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs index 19c7cee676e8..9b7e889b25dd 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs @@ -71,7 +71,7 @@ public async Task SearchAnInMemoryVectorStoreWithFilteringAsync() new() { Top = 1, - Filter = new VectorSearchFilter().EqualTo(nameof(Glossary.Category), "AI") + NewFilter = g => g.Category == "AI" }); var searchResultItems = await searchResult.Results.ToListAsync(); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs index 7303ddc9801a..35ca4822a824 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +#if DISABLED_FOR_NOW // TODO: See note in MappingVectorStoreRecordCollection + using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Qdrant; @@ -7,6 +9,7 @@ namespace GettingStartedWithVectorStores; + /// /// Example that shows that you can switch between different vector stores with the same code, in this case /// with a vector store that doesn't use string keys. @@ -193,3 +196,5 @@ public async Task> VectorizedSearchAsync /// Contains tests for the class. /// @@ -21,7 +23,7 @@ public void BuildFilterStringBuildsCorrectEqualityStringForEachFilterType(string var filter = new VectorSearchFilter().EqualTo(fieldName, fieldValue!); // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(filter, new Dictionary { { fieldName, "storage_" + fieldName } }); + var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { fieldName, "storage_" + fieldName } }); // Assert. Assert.Equal(expected, actual); @@ -34,7 +36,7 @@ public void BuildFilterStringBuildsCorrectTagContainsString() var filter = new VectorSearchFilter().AnyTagEqualTo("Tags", "mytag"); // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(filter, new Dictionary { { "Tags", "storage_tags" } }); + var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { "Tags", "storage_tags" } }); // Assert. Assert.Equal("storage_tags/any(t: t eq 'mytag')", actual); @@ -47,7 +49,7 @@ public void BuildFilterStringCombinesFilterOptions() var filter = new VectorSearchFilter().EqualTo("intField", 5).AnyTagEqualTo("Tags", "mytag"); // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(filter, new Dictionary { { "Tags", "storage_tags" }, { "intField", "storage_intField" } }); + var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { "Tags", "storage_tags" }, { "intField", "storage_intField" } }); // Assert. Assert.Equal("storage_intField eq 5 and storage_tags/any(t: t eq 'mytag')", actual); @@ -57,8 +59,8 @@ public void BuildFilterStringCombinesFilterOptions() public void BuildFilterStringThrowsForUnknownPropertyName() { // Act and assert. - Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(new VectorSearchFilter().EqualTo("unknown", "value"), new Dictionary())); - Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(new VectorSearchFilter().AnyTagEqualTo("unknown", "value"), new Dictionary())); + Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(new VectorSearchFilter().EqualTo("unknown", "value"), new Dictionary())); + Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(new VectorSearchFilter().AnyTagEqualTo("unknown", "value"), new Dictionary())); } public static IEnumerable DataTypeMappingOptions() diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index 467207b29ace..eb240f91d9aa 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -20,6 +20,8 @@ namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs index 6e061892d2b9..9dee844e61d2 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs @@ -9,6 +9,8 @@ namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Unit tests for class. /// diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 99815a1cee63..ab2fa157b212 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -13,6 +13,7 @@ using MongoDB.Driver; using Moq; using Xunit; +using MEVD = Microsoft.Extensions.VectorData; namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; @@ -643,7 +644,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa this._mockMongoDatabase.Object, "collection"); - var options = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var options = new MEVD.VectorSearchOptions { VectorPropertyName = "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index 094028e516ab..37aa005777d5 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -9,6 +9,8 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Unit tests for class. /// @@ -35,7 +37,7 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; + var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -84,7 +86,7 @@ public void BuildSearchQueryWithoutOffsetReturnsQueryDefinitionWithTopParameter( .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { Filter = filter, Top = 10 }; + var searchOptions = new VectorSearchOptions { Filter = filter, Top = 10 }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -129,7 +131,7 @@ public void BuildSearchQueryWithInvalidFilterThrowsException() var filter = new VectorSearchFilter().EqualTo("non-existent-property", "test-value-2"); - var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; + var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; // Act & Assert Assert.Throws(() => @@ -150,7 +152,7 @@ public void BuildSearchQueryWithoutFilterDoesNotContainWhereClause() var vectorPropertyName = "test_property_1"; var fields = this._storagePropertyNames.Values.ToList(); - var searchOptions = new VectorSearchOptions { Skip = 5, Top = 10 }; + var searchOptions = new VectorSearchOptions { Skip = 5, Top = 10 }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -181,10 +183,11 @@ public void BuildSearchQueryWithoutFilterDoesNotContainWhereClause() public void BuildSelectQueryByDefaultReturnsValidQueryDefinition() { // Arrange - const string ExpectedQueryText = "" + - "SELECT x.key,x.property_1,x.property_2 " + - "FROM x " + - "WHERE (x.key_property = @rk0 AND x.partition_key_property = @pk0) "; + const string ExpectedQueryText = """ + SELECT x.key,x.property_1,x.property_2 + FROM x + WHERE (x.key_property = @rk0 AND x.partition_key_property = @pk0) + """; const string KeyStoragePropertyName = "key_property"; const string PartitionKeyPropertyName = "partition_key_property"; @@ -211,4 +214,8 @@ public void BuildSelectQueryByDefaultReturnsValidQueryDefinition() Assert.Equal("@pk0", queryParameters[1].Name); Assert.Equal("partition_key", queryParameters[1].Value); } + +#pragma warning disable CA1812 // An internal class that is apparently never instantiated. If so, remove the code from the assembly. + private sealed class DummyType; +#pragma warning restore CA1812 } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index d8718eb2f2b5..24e4a2083f0b 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -612,7 +612,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti this._mockDatabase.Object, "collection"); - var searchOptions = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var searchOptions = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj index 15d88496159b..9fcbdecf530e 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj @@ -35,4 +35,5 @@ + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index 1cf974a77c84..bbf5c9611e32 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -293,7 +293,7 @@ public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKe // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true }, + new() { IncludeVectors = true }, this._testCancellationToken); // Assert @@ -309,6 +309,7 @@ public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKe Assert.Equal(-1, actualResults[1].Score); } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] [InlineData(true, TestRecordKey1, TestRecordKey2, "Equality")] [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "Equality")] @@ -337,7 +338,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, + new() { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, this._testCancellationToken); // Assert @@ -349,6 +350,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); Assert.Equal(-1, actualResults[0].Score); } +#pragma warning restore CS0618 // Type or member is obsolete [Theory] [InlineData(DistanceFunction.CosineSimilarity, 1, -1)] @@ -389,7 +391,7 @@ public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFu // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true }, + new() { IncludeVectors = true }, this._testCancellationToken); // Assert @@ -430,7 +432,7 @@ public async Task CanSearchManyRecordsAsync(bool useDefinition) // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, + new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, this._testCancellationToken); // Assert @@ -506,7 +508,7 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), - new VectorSearchOptions { IncludeVectors = true, VectorPropertyName = "Vector" }, + new() { IncludeVectors = true, VectorPropertyName = "Vector" }, this._testCancellationToken); // Assert diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs new file mode 100644 index 000000000000..16164c2a3eca --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs @@ -0,0 +1,366 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +internal class AzureAISearchFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + private readonly StringBuilder _filter = new(); + + private static readonly char[] s_searchInDefaultDelimiter = [' ', ',']; + + internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + Debug.Assert(this._filter.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this.Translate(lambdaExpression.Body); + return this._filter.ToString(); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression binary: + this.TranslateBinary(binary); + return; + + case ConstantExpression constant: + this.TranslateConstant(constant); + return; + + case MemberExpression member: + this.TranslateMember(member); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + case UnaryExpression unary: + this.TranslateUnary(unary); + return; + + default: + throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); + } + } + + private void TranslateBinary(BinaryExpression binary) + { + this._filter.Append('('); + this.Translate(binary.Left); + + this._filter.Append(binary.NodeType switch + { + ExpressionType.Equal => " eq ", + ExpressionType.NotEqual => " ne ", + + ExpressionType.GreaterThan => " gt ", + ExpressionType.GreaterThanOrEqual => " ge ", + ExpressionType.LessThan => " lt ", + ExpressionType.LessThanOrEqual => " le ", + + ExpressionType.AndAlso => " and ", + ExpressionType.OrElse => " or ", + + _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) + }); + + this.Translate(binary.Right); + this._filter.Append(')'); + } + + private void TranslateConstant(ConstantExpression constant) + => this.GenerateLiteral(constant.Value); + + private void GenerateLiteral(object? value) + { + // TODO: Nullable + switch (value) + { + case byte b: + this._filter.Append(b); + return; + case short s: + this._filter.Append(s); + return; + case int i: + this._filter.Append(i); + return; + case long l: + this._filter.Append(l); + return; + + case string s: + this._filter.Append('\'').Append(s.Replace("'", "''")).Append('\''); // TODO: escaping + return; + case bool b: + this._filter.Append(b ? "true" : "false"); + return; + case Guid g: + this._filter.Append('\'').Append(g.ToString()).Append('\''); + return; + + case DateTime: + case DateTimeOffset: + throw new NotImplementedException(); + + case Array: + throw new NotImplementedException(); + + case null: + this._filter.Append("null"); + return; + + default: + throw new NotSupportedException("Unsupported constant type: " + value.GetType().Name); + } + } + + private void TranslateMember(MemberExpression memberExpression) + { + switch (memberExpression) + { + case var _ when this.TryGetField(memberExpression, out var column): + this._filter.Append(column); // TODO: Escape + return; + + // Identify captured lambda variables, inline them as constants + case var _ when TryGetCapturedValue(memberExpression, out var capturedValue): + this.GenerateLiteral(capturedValue); + return; + + default: + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + } + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over array field (r => r.Strings.Contains("foo")) + case var _ when this.TryGetField(source, out _): + this.Translate(source); + this._filter.Append("/any(t: t eq "); + this.Translate(item); + this._filter.Append(')'); + return; + + // Contains over inline enumerable + case NewArrayExpression newArray: + var elements = new object?[newArray.Expressions.Count]; + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + { + throw new NotSupportedException("Invalid element in array"); + } + + elements[i] = elementValue; + } + + ProcessInlineEnumerable(elements, item); + return; + + // Contains over captured enumerable (we inline) + case var _ when TryGetConstant(source, out var constantEnumerable) + && constantEnumerable is IEnumerable enumerable and not string: + ProcessInlineEnumerable(enumerable, item); + return; + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + + void ProcessInlineEnumerable(IEnumerable elements, Expression item) + { + if (item.Type != typeof(string)) + { + throw new NotSupportedException("Contains over non-string arrays is not supported"); + } + + this._filter.Append("search.in("); + this.Translate(item); + this._filter.Append(", '"); + + string delimiter = ", "; + var startingPosition = this._filter.Length; + +RestartLoop: + var isFirst = true; + foreach (string element in elements) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._filter.Append(delimiter); + } + + // The default delimiter for search.in() is comma or space. + // If any element contains a comma or space, we switch to using pipe as the delimiter. + // If any contains a pipe, we throw (for now). + switch (delimiter) + { + case ", ": + if (element.IndexOfAny(s_searchInDefaultDelimiter) > -1) + { + delimiter = "|"; + this._filter.Length = startingPosition; + goto RestartLoop; + } + + break; + + case "|": + if (element.Contains('|')) + { + throw new NotSupportedException("Some elements contain both commas/spaces and pipes, cannot translate Contains"); + } + + break; + } + + this._filter.Append(element.Replace("'", "''")); + } + + this._filter.Append('\''); + + if (delimiter != ", ") + { + this._filter + .Append(", '") + .Append(delimiter) + .Append('\''); + } + + this._filter.Append(')'); + } + } + + private void TranslateUnary(UnaryExpression unary) + { + switch (unary.NodeType) + { + case ExpressionType.Not: + // Special handling for !(a == b) and !(a != b) + if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + this.TranslateBinary( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + } + + this._filter.Append("(not "); + this.Translate(unary.Operand); + this._filter.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + } + } + + private bool TryGetField(Expression expression, [NotNullWhen(true)] out string? field) + { + if (expression is MemberExpression member && member.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out field)) + { + throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + field = null; + return false; + } + + private static bool TryGetCapturedValue(Expression expression, out object? capturedValue) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + capturedValue = fieldInfo.GetValue(constant.Value); + return true; + } + + capturedValue = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + case var _ when TryGetCapturedValue(expression, out var capturedValue): + constantValue = capturedValue; + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs index ced35f244c5e..732b6aeae42c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs @@ -12,6 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// internal static class AzureAISearchVectorStoreCollectionSearchMapping { +#pragma warning disable CS0618 // VectorSearchFilter is obsolete /// /// Build an OData filter string from the provided . /// @@ -19,10 +20,10 @@ internal static class AzureAISearchVectorStoreCollectionSearchMapping /// A mapping of data model property names to the names under which they are stored. /// The OData filter string. /// Thrown when a provided filter value is not supported. - public static string BuildFilterString(VectorSearchFilter? basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) + public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) { var filterString = string.Empty; - if (basicVectorSearchFilter?.FilterClauses is not null) + if (basicVectorSearchFilter.FilterClauses is not null) { // Map Equality clauses. var filterStrings = basicVectorSearchFilter?.FilterClauses.OfType().Select(x => @@ -60,6 +61,7 @@ public static string BuildFilterString(VectorSearchFilter? basicVectorSearchFilt return filterString; } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete /// /// Gets the name of the name under which the property with the given name is stored. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index bdf25bd2b8a4..9e92f5bbb722 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Text.Json; using System.Text.Json.Nodes; @@ -14,7 +15,6 @@ using Azure.Search.Documents.Indexes.Models; using Azure.Search.Documents.Models; using Microsoft.Extensions.VectorData; -using VectorData = Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -66,7 +66,7 @@ public sealed class AzureAISearchVectorStoreRecordCollection : IVectorS ]; /// The default options for vector search. - private static readonly VectorData.VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. private readonly SearchIndexClient _searchIndexClient; @@ -314,7 +314,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public Task> VectorizedSearchAsync(TVector vector, VectorData.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -335,7 +335,17 @@ public Task> VectorizedSearchAsync(TVector // Configure search settings. var vectorQueries = new List(); vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorFieldName } }); - var filterString = AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(internalOptions.Filter, this._propertyReader.JsonPropertyNamesMap); + +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + // Build filter object. + var filter = internalOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), + { NewFilter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => null + }; +#pragma warning restore CS0618 // Build search options. var searchOptions = new SearchOptions @@ -343,9 +353,14 @@ public Task> VectorizedSearchAsync(TVector VectorSearch = new(), Size = internalOptions.Top, Skip = internalOptions.Skip, - Filter = filterString, IncludeTotalCount = internalOptions.IncludeTotalCount, }; + + if (filter is not null) + { + searchOptions.Filter = filter; + } + searchOptions.VectorSearch.Queries.AddRange(vectorQueries); // Filter out vector fields if requested. @@ -359,7 +374,7 @@ public Task> VectorizedSearchAsync(TVector } /// - public Task> VectorizableTextSearchAsync(string searchText, VectorData.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(searchText); @@ -375,7 +390,17 @@ public Task> VectorizableTextSearchAsync(string sea // Configure search settings. var vectorQueries = new List(); vectorQueries.Add(new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorFieldName } }); - var filterString = AzureAISearchVectorStoreCollectionSearchMapping.BuildFilterString(internalOptions.Filter, this._propertyReader.JsonPropertyNamesMap); + +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + // Build filter object. + var filter = internalOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), + { NewFilter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => null + }; +#pragma warning restore CS0618 // Build search options. var searchOptions = new SearchOptions @@ -383,9 +408,14 @@ public Task> VectorizableTextSearchAsync(string sea VectorSearch = new(), Size = internalOptions.Top, Skip = internalOptions.Skip, - Filter = filterString, IncludeTotalCount = internalOptions.IncludeTotalCount, }; + + if (filter is not null) + { + searchOptions.Filter = filter; + } + searchOptions.VectorSearch.Queries.AddRange(vectorQueries); // Filter out vector fields if requested. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs new file mode 100644 index 000000000000..6c0b4e44e23b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs @@ -0,0 +1,258 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using MongoDB.Bson; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +// MongoDB query reference: https://www.mongodb.com/docs/manual/reference/operator/query +// Information specific to vector search pre-filter: https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter +internal class AzureCosmosDBMongoDBFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + internal BsonDocument Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + return this.Translate(lambdaExpression.Body); + } + + private BsonDocument Translate(Expression? node) + => node switch + { + BinaryExpression + { + NodeType: ExpressionType.Equal or ExpressionType.NotEqual + or ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual + or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } binary + => this.TranslateEqualityComparison(binary), + + BinaryExpression { NodeType: ExpressionType.AndAlso or ExpressionType.OrElse } andOr + => this.TranslateAndOr(andOr), + UnaryExpression { NodeType: ExpressionType.Not } not + => this.TranslateNot(not), + + // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) + => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + + MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), + + _ => throw new NotSupportedException("The following NodeType is unsupported: " + node?.NodeType) + }; + + private BsonDocument TranslateEqualityComparison(BinaryExpression binary) + { + if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) + || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + { + if (value is null) + { + throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); + } + + // Short form of equality (instead of $eq) + if (binary.NodeType is ExpressionType.Equal) + { + return new BsonDocument { [storagePropertyName] = BsonValue.Create(value) }; + } + + var filterOperator = binary.NodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", + + _ => throw new UnreachableException() + }; + + return new BsonDocument { [storagePropertyName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; + } + + throw new NotSupportedException("Invalid equality/comparison"); + } + + private BsonDocument TranslateAndOr(BinaryExpression andOr) + { + var mongoOperator = andOr.NodeType switch + { + ExpressionType.AndAlso => "$and", + ExpressionType.OrElse => "$or", + _ => throw new UnreachableException() + }; + + var (left, right) = (this.Translate(andOr.Left), this.Translate(andOr.Right)); + + var nestedLeft = left.ElementCount == 1 && left.Elements.First() is var leftElement && leftElement.Name == mongoOperator ? (BsonArray)leftElement.Value : null; + var nestedRight = right.ElementCount == 1 && right.Elements.First() is var rightElement && rightElement.Name == mongoOperator ? (BsonArray)rightElement.Value : null; + + switch ((nestedLeft, nestedRight)) + { + case (not null, not null): + nestedLeft.AddRange(nestedRight); + return left; + case (not null, null): + nestedLeft.Add(right); + return left; + case (null, not null): + nestedRight.Insert(0, left); + return right; + case (null, null): + return new BsonDocument { [mongoOperator] = new BsonArray([left, right]) }; + } + } + + private BsonDocument TranslateNot(UnaryExpression not) + { + switch (not.Operand) + { + // Special handling for !(a == b) and !(a != b) + case BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary: + return this.TranslateEqualityComparison( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + + // Not over bool field (Filter => r => !r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + } + + var operand = this.Translate(not.Operand); + + // Identify NOT over $in, transform to $nin (https://www.mongodb.com/docs/manual/reference/operator/query/nin/#mongodb-query-op.-nin) + if (operand.ElementCount == 1 && operand.Elements.First() is { Name: var fieldName, Value: BsonDocument nested } && + nested.ElementCount == 1 && nested.Elements.First() is { Name: "$in", Value: BsonArray values }) + { + return new BsonDocument { [fieldName] = new BsonDocument { ["$nin"] = values } }; + } + + throw new NotSupportedException("MongogDB does not support the NOT operator in vector search pre-filters"); + } + + private BsonDocument TranslateMethodCall(MethodCallExpression methodCall) + => methodCall switch + { + // Enumerable.Contains() + { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable) + => this.TranslateContains(source, item), + + // List.Contains() + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>) => this.TranslateContains(source, item), + + _ => throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}") + }; + + private BsonDocument TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryTranslateFieldAccess(source, out _): + throw new NotSupportedException("MongoDB does not support Contains within array fields ($elemMatch) in vector search pre-filters"); + + // Contains over inline enumerable + case NewArrayExpression newArray: + var elements = new object?[newArray.Expressions.Count]; + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + { + throw new NotSupportedException("Invalid element in array"); + } + + elements[i] = elementValue; + } + + return ProcessInlineEnumerable(elements, item); + + // Contains over captured enumerable (we inline) + case var _ when TryGetConstant(source, out var constantEnumerable) + && constantEnumerable is IEnumerable enumerable and not string: + return ProcessInlineEnumerable(enumerable, item); + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + + BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) + { + if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + { + throw new NotSupportedException("Unsupported item type in Contains"); + } + + return new BsonDocument + { + [storagePropertyName] = new BsonDocument + { + ["$in"] = new BsonArray(from object? element in elements select BsonValue.Create(element)) + } + }; + } + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs index 6e41eb7f3cb9..32377244112c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs @@ -20,6 +20,7 @@ internal static class AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping /// Returns distance function specified on vector property or default . public static string GetVectorPropertyDistanceFunction(string? distanceFunction) => !string.IsNullOrWhiteSpace(distanceFunction) ? distanceFunction! : MongoDBConstants.DefaultDistanceFunction; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete /// /// Build Azure CosmosDB MongoDB filter from the provided . /// @@ -86,6 +87,7 @@ internal static class AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping return filter; } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete /// Returns search part of the search query for index kind. public static BsonDocument GetSearchQueryForHnswIndex( diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index d54a184e5771..a5d355150da3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; @@ -12,6 +13,7 @@ using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; using MongoDB.Driver; +using MEVD = Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; @@ -33,7 +35,7 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : I private const string DocumentPropertyName = "document"; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly MEVD.VectorSearchOptions s_defaultVectorSearchOptions = new(); /// that can be used to manage the collections in Azure CosmosDB MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -244,7 +246,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco /// public async Task> VectorizedSearchAsync( TVector vector, - VectorSearchOptions? options = null, + MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -270,9 +272,17 @@ public async Task> VectorizedSearchAsync( var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter( - searchOptions.Filter, - this._storagePropertyNames); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = searchOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter( + legacyFilter, + this._storagePropertyNames), + { NewFilter: Expression> newFilter } => new AzureCosmosDBMongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + _ => null + }; +#pragma warning restore CS0618 // Constructing a query to fetch "skip + top" total items // to perform skip logic locally, since skip option is not part of API. @@ -371,7 +381,7 @@ private async Task> FindAsync(FilterDefinition> EnumerateAndMapSearchResultsAsync( IAsyncCursor cursor, - VectorSearchOptions searchOptions, + MEVD.VectorSearchOptions searchOptions, [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "Aggregate"; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs index 87aeee36355e..6dbb0d440b45 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs @@ -13,5 +13,5 @@ internal static class AzureCosmosDBNoSQLConstants /// Variable name for table in Azure CosmosDB NoSQL queries. /// Can be any string. Example: "SELECT x.Name FROM x". /// - internal const string TableQueryVariableName = "x"; + internal const char ContainerAlias = 'x'; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLFilter.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLFilter.cs deleted file mode 100644 index 8cf6636c73e7..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLFilter.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; - -/// -/// Contains properties required to build query with filtering conditions. -/// -internal sealed class AzureCosmosDBNoSQLFilter -{ - public List? WhereClauseArguments { get; set; } - - public Dictionary? QueryParameters { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index a66eb5bfb719..1b0e7dcb8a7f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Text; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; @@ -21,13 +22,13 @@ internal static class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder /// /// Builds to get items from Azure CosmosDB NoSQL using vector search. /// - public static QueryDefinition BuildSearchQuery( + public static QueryDefinition BuildSearchQuery( TVector vector, List fields, Dictionary storagePropertyNames, string vectorPropertyName, string scorePropertyName, - VectorSearchOptions searchOptions) + VectorSearchOptions searchOptions) { Verify.NotNull(vector); @@ -36,7 +37,7 @@ public static QueryDefinition BuildSearchQuery( const string LimitVariableName = "@limit"; const string TopVariableName = "@top"; - var tableVariableName = AzureCosmosDBNoSQLConstants.TableQueryVariableName; + var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; var fieldsArgument = fields.Select(field => $"{tableVariableName}.{field}"); var vectorDistanceArgument = $"VectorDistance({tableVariableName}.{vectorPropertyName}, {VectorVariableName})"; @@ -44,19 +45,22 @@ public static QueryDefinition BuildSearchQuery( var selectClauseArguments = string.Join(SelectClauseDelimiter, [.. fieldsArgument, vectorDistanceArgumentWithAlias]); - var filter = BuildSearchFilter(searchOptions.Filter, storagePropertyNames); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + // Build filter object. + var (whereClause, filterParameters) = searchOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, storagePropertyNames), + { NewFilter: Expression> newFilter } => new AzureCosmosDBNoSqlFilterTranslator().Translate(newFilter, storagePropertyNames), + _ => (null, []) + }; +#pragma warning restore CS0618 // VectorSearchFilter is obsolete - var filterQueryParameters = filter?.QueryParameters; - var filterWhereClauseArguments = filter?.WhereClauseArguments; - var queryParameters = new Dictionary + var queryParameters = new Dictionary { [VectorVariableName] = vector }; - var whereClause = filterWhereClauseArguments is { Count: > 0 } ? - $"WHERE {string.Join(AndConditionDelimiter, filterWhereClauseArguments)}" : - string.Empty; - // If Offset is not configured, use Top parameter instead of Limit/Offset // since it's more optimized. var topArgument = searchOptions.Skip == 0 ? $"TOP {TopVariableName} " : string.Empty; @@ -66,9 +70,9 @@ public static QueryDefinition BuildSearchQuery( builder.AppendLine($"SELECT {topArgument}{selectClauseArguments}"); builder.AppendLine($"FROM {tableVariableName}"); - if (filterWhereClauseArguments is { Count: > 0 }) + if (whereClause is not null) { - builder.AppendLine($"WHERE {string.Join(AndConditionDelimiter, filterWhereClauseArguments)}"); + builder.Append("WHERE ").AppendLine(whereClause); } builder.AppendLine($"ORDER BY {vectorDistanceArgument}"); @@ -86,9 +90,9 @@ public static QueryDefinition BuildSearchQuery( var queryDefinition = new QueryDefinition(builder.ToString()); - if (filterQueryParameters is { Count: > 0 }) + if (filterParameters is { Count: > 0 }) { - queryParameters = queryParameters.Union(filterQueryParameters).ToDictionary(k => k.Key, v => v.Value); + queryParameters = queryParameters.Union(filterParameters).ToDictionary(k => k.Key, v => v.Value); } foreach (var queryParameter in queryParameters) @@ -113,7 +117,7 @@ public static QueryDefinition BuildSelectQuery( const string RecordKeyVariableName = "@rk"; const string PartitionKeyVariableName = "@pk"; - var tableVariableName = AzureCosmosDBNoSQLConstants.TableQueryVariableName; + var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; var selectClauseArguments = string.Join(SelectClauseDelimiter, fields.Select(field => $"{tableVariableName}.{field}")); @@ -123,10 +127,11 @@ public static QueryDefinition BuildSelectQuery( $"({tableVariableName}.{keyStoragePropertyName} = {RecordKeyVariableName}{index} {AndConditionDelimiter} " + $"{tableVariableName}.{partitionKeyStoragePropertyName} = {PartitionKeyVariableName}{index})")); - var query = - $"SELECT {selectClauseArguments} " + - $"FROM {tableVariableName} " + - $"WHERE {whereClauseArguments} "; + var query = $""" + SELECT {selectClauseArguments} + FROM {tableVariableName} + WHERE {whereClauseArguments} + """; var queryDefinition = new QueryDefinition(query); @@ -147,44 +152,43 @@ public static QueryDefinition BuildSelectQuery( #region private - private static AzureCosmosDBNoSQLFilter? BuildSearchFilter( - VectorSearchFilter? filter, +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + private static (string WhereClause, Dictionary Parameters) BuildSearchFilter( + VectorSearchFilter filter, Dictionary storagePropertyNames) { const string EqualOperator = "="; const string ArrayContainsOperator = "ARRAY_CONTAINS"; const string ConditionValueVariableName = "@cv"; - var tableVariableName = AzureCosmosDBNoSQLConstants.TableQueryVariableName; + var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; - var filterClauses = filter?.FilterClauses.ToList(); - - if (filterClauses is not { Count: > 0 }) - { - return null; - } + var filterClauses = filter.FilterClauses.ToList(); - var whereClauseArguments = new List(); - var queryParameters = new Dictionary(); + var whereClauseBuilder = new StringBuilder(); + var queryParameters = new Dictionary(); for (var i = 0; i < filterClauses.Count; i++) { + if (i > 0) + { + whereClauseBuilder.Append(" AND "); + } var filterClause = filterClauses[i]; string queryParameterName = $"{ConditionValueVariableName}{i}"; object queryParameterValue; - string whereClauseArgument; if (filterClause is EqualToFilterClause equalToFilterClause) { var propertyName = GetStoragePropertyName(equalToFilterClause.FieldName, storagePropertyNames); - whereClauseArgument = $"{tableVariableName}.{propertyName} {EqualOperator} {queryParameterName}"; + whereClauseBuilder.Append($"{tableVariableName}.{propertyName} {EqualOperator} {queryParameterName}"); queryParameterValue = equalToFilterClause.Value; } else if (filterClause is AnyTagEqualToFilterClause anyTagEqualToFilterClause) { var propertyName = GetStoragePropertyName(anyTagEqualToFilterClause.FieldName, storagePropertyNames); - whereClauseArgument = $"{ArrayContainsOperator}({tableVariableName}.{propertyName}, {queryParameterName})"; + whereClauseBuilder.Append($"{ArrayContainsOperator}({tableVariableName}.{propertyName}, {queryParameterName})"); queryParameterValue = anyTagEqualToFilterClause.Value; } else @@ -196,16 +200,12 @@ public static QueryDefinition BuildSelectQuery( nameof(AnyTagEqualToFilterClause)])}"); } - whereClauseArguments.Add(whereClauseArgument); queryParameters.Add(queryParameterName, queryParameterValue); } - return new AzureCosmosDBNoSQLFilter - { - WhereClauseArguments = whereClauseArguments, - QueryParameters = queryParameters, - }; + return (whereClauseBuilder.ToString(), queryParameters); } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete private static string GetStoragePropertyName(string propertyName, Dictionary storagePropertyNames) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 6ab9222d2a14..53463cb943b4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -69,7 +69,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// that can be used to manage the collections in Azure CosmosDB NoSQL. private readonly Database _database; @@ -355,7 +355,7 @@ async IAsyncEnumerable IVectorStoreRecordCollect /// public Task> VectorizedSearchAsync( TVector vector, - VectorSearchOptions? options = null, + VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; @@ -679,7 +679,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn IAsyncEnumerable jsonObjects, string scorePropertyName, string operationName, - VectorSearchOptions searchOptions, + VectorSearchOptions searchOptions, [EnumeratorCancellation] CancellationToken cancellationToken) { await foreach (var jsonObject in jsonObjects.ConfigureAwait(false)) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs new file mode 100644 index 000000000000..e18f176c2ea7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +internal class AzureCosmosDBNoSqlFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + private readonly Dictionary _parameters = new(); + private readonly StringBuilder _sql = new(); + + internal (string WhereClause, Dictionary Parameters) Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + Debug.Assert(this._sql.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this.Translate(lambdaExpression.Body); + return (this._sql.ToString(), this._parameters); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression binary: + this.TranslateBinary(binary); + return; + + case ConstantExpression constant: + this.TranslateConstant(constant); + return; + + case MemberExpression member: + this.TranslateMember(member); + return; + + case NewArrayExpression newArray: + this.TranslateNewArray(newArray); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + case UnaryExpression unary: + this.TranslateUnary(unary); + return; + + default: + throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); + } + } + + private void TranslateBinary(BinaryExpression binary) + { + this._sql.Append('('); + this.Translate(binary.Left); + + this._sql.Append(binary.NodeType switch + { + ExpressionType.Equal => " = ", + ExpressionType.NotEqual => " <> ", + + ExpressionType.GreaterThan => " > ", + ExpressionType.GreaterThanOrEqual => " >= ", + ExpressionType.LessThan => " < ", + ExpressionType.LessThanOrEqual => " <= ", + + ExpressionType.AndAlso => " AND ", + ExpressionType.OrElse => " OR ", + + _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) + }); + + this.Translate(binary.Right); + this._sql.Append(')'); + } + + private void TranslateConstant(ConstantExpression constant) + { + // TODO: Nullable + switch (constant.Value) + { + case byte b: + this._sql.Append(b); + return; + case short s: + this._sql.Append(s); + return; + case int i: + this._sql.Append(i); + return; + case long l: + this._sql.Append(l); + return; + + case string s: + this._sql.Append('"').Append(s.Replace(@"\", @"\\").Replace("\"", "\\\"")).Append('"'); + return; + case bool b: + this._sql.Append(b ? "true" : "false"); + return; + case Guid g: + this._sql.Append('"').Append(g.ToString()).Append('"'); + return; + + case DateTime: + case DateTimeOffset: + throw new NotImplementedException(); + + case Array: + throw new NotImplementedException(); + + case null: + this._sql.Append("null"); + return; + + default: + throw new NotSupportedException("Unsupported constant type: " + constant.Value.GetType().Name); + } + } + + private void TranslateMember(MemberExpression memberExpression) + { + switch (memberExpression) + { + case var _ when this.TryGetPropertyAccess(memberExpression, out var column): + this._sql.Append(AzureCosmosDBNoSQLConstants.ContainerAlias).Append("[\"").Append(column).Append("\"]"); + return; + + // Identify captured lambda variables, translate to Cosmos parameters (@foo, @bar...) + case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): + // Duplicate parameter name, create a new parameter with a different name + // TODO: Share the same parameter when it references the same captured value + if (this._parameters.ContainsKey(name)) + { + var baseName = name; + var i = 0; + do + { + name = baseName + (i++); + } while (this._parameters.ContainsKey(name)); + } + + name = '@' + name; + this._parameters.Add(name, value); + this._sql.Append(name); + return; + + default: + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + } + } + + private void TranslateNewArray(NewArrayExpression newArray) + { + this._sql.Append('['); + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (i > 0) + { + this._sql.Append(", "); + } + + this.Translate(newArray.Expressions[i]); + } + + this._sql.Append(']'); + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + this._sql.Append("ARRAY_CONTAINS("); + this.Translate(source); + this._sql.Append(", "); + this.Translate(item); + this._sql.Append(')'); + } + + private void TranslateUnary(UnaryExpression unary) + { + switch (unary.NodeType) + { + // Special handling for !(a == b) and !(a != b) + case ExpressionType.Not: + if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + this.TranslateBinary( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + } + + this._sql.Append("(NOT "); + this.Translate(unary.Operand); + this._sql.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + } + } + + private bool TryGetPropertyAccess(Expression expression, [NotNullWhen(true)] out string? column) + { + if (expression is MemberExpression member && member.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + { + throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + column = null; + return false; + } + + private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + name = fieldInfo.Name; + value = fieldInfo.GetValue(constant.Value); + return true; + } + + name = null; + value = null; + return false; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs index 7ecea345cb85..6b33671cef9f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs @@ -88,6 +88,7 @@ public static float ConvertScore(float score, string? distanceFunction) } } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete /// /// Filter the provided records using the provided filter definition. /// @@ -95,15 +96,15 @@ public static float ConvertScore(float score, string? distanceFunction) /// The records to filter. /// The filtered records. /// Thrown when an unsupported filter clause is encountered. - public static IEnumerable FilterRecords(VectorSearchFilter? filter, IEnumerable records) + public static IEnumerable FilterRecords(VectorSearchFilter filter, IEnumerable records) { - if (filter == null) - { - return records; - } - return records.Where(record => { + if (record is null) + { + return false; + } + var result = true; // Run each filter clause against the record, and AND the results together. @@ -197,6 +198,7 @@ private static bool CheckAnyTagEqualTo(object record, AnyTagEqualToFilterClause return false; } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete /// /// Get the property info for the provided property name on the record. diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index a2fe21e0cfc6..03fe957cca07 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -4,6 +4,7 @@ using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -29,7 +30,7 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// Internal storage for all of the record collections. private readonly ConcurrentDictionary> _internalCollections; @@ -210,7 +211,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record /// #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) #pragma warning restore CS1998 { Verify.NotNull(vector); @@ -234,13 +235,22 @@ public async Task> VectorizedSearchAsync(T throw new InvalidOperationException($"The collection does not have a vector field named '{internalOptions.VectorPropertyName}', so vector search is not possible."); } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete // Filter records using the provided filter before doing the vector comparison. - var filteredRecords = InMemoryVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.Filter, this.GetCollectionDictionary().Values); + var allValues = this.GetCollectionDictionary().Values.Cast(); + var filteredRecords = internalOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => InMemoryVectorStoreCollectionSearchMapping.FilterRecords(legacyFilter, allValues), + { NewFilter: Expression> newFilter } => allValues.AsQueryable().Where(newFilter), + _ => allValues + }; +#pragma warning restore CS0618 // VectorSearchFilter is obsolete // Compare each vector in the filtered results with the provided vector. - var results = filteredRecords.Select((record) => + var results = filteredRecords.Select(record => { - var vectorObject = this._vectorResolver(vectorPropertyName!, (TRecord)record); + var vectorObject = this._vectorResolver(vectorPropertyName!, record); if (vectorObject is not ReadOnlyMemory dbVector) { return null; diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs new file mode 100644 index 000000000000..202908de1c0b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs @@ -0,0 +1,258 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using MongoDB.Bson; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +// MongoDB query reference: https://www.mongodb.com/docs/manual/reference/operator/query +// Information specific to vector search pre-filter: https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter +internal class MongoDBFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + internal BsonDocument Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + return this.Translate(lambdaExpression.Body); + } + + private BsonDocument Translate(Expression? node) + => node switch + { + BinaryExpression + { + NodeType: ExpressionType.Equal or ExpressionType.NotEqual + or ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual + or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } binary + => this.TranslateEqualityComparison(binary), + + BinaryExpression { NodeType: ExpressionType.AndAlso or ExpressionType.OrElse } andOr + => this.TranslateAndOr(andOr), + UnaryExpression { NodeType: ExpressionType.Not } not + => this.TranslateNot(not), + + // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) + => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + + MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), + + _ => throw new NotSupportedException("The following NodeType is unsupported: " + node?.NodeType) + }; + + private BsonDocument TranslateEqualityComparison(BinaryExpression binary) + { + if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) + || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + { + if (value is null) + { + throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); + } + + // Short form of equality (instead of $eq) + if (binary.NodeType is ExpressionType.Equal) + { + return new BsonDocument { [storagePropertyName] = BsonValue.Create(value) }; + } + + var filterOperator = binary.NodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", + + _ => throw new UnreachableException() + }; + + return new BsonDocument { [storagePropertyName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; + } + + throw new NotSupportedException("Invalid equality/comparison"); + } + + private BsonDocument TranslateAndOr(BinaryExpression andOr) + { + var mongoOperator = andOr.NodeType switch + { + ExpressionType.AndAlso => "$and", + ExpressionType.OrElse => "$or", + _ => throw new UnreachableException() + }; + + var (left, right) = (this.Translate(andOr.Left), this.Translate(andOr.Right)); + + var nestedLeft = left.ElementCount == 1 && left.Elements.First() is var leftElement && leftElement.Name == mongoOperator ? (BsonArray)leftElement.Value : null; + var nestedRight = right.ElementCount == 1 && right.Elements.First() is var rightElement && rightElement.Name == mongoOperator ? (BsonArray)rightElement.Value : null; + + switch ((nestedLeft, nestedRight)) + { + case (not null, not null): + nestedLeft.AddRange(nestedRight); + return left; + case (not null, null): + nestedLeft.Add(right); + return left; + case (null, not null): + nestedRight.Insert(0, left); + return right; + case (null, null): + return new BsonDocument { [mongoOperator] = new BsonArray([left, right]) }; + } + } + + private BsonDocument TranslateNot(UnaryExpression not) + { + switch (not.Operand) + { + // Special handling for !(a == b) and !(a != b) + case BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary: + return this.TranslateEqualityComparison( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + + // Not over bool field (Filter => r => !r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + } + + var operand = this.Translate(not.Operand); + + // Identify NOT over $in, transform to $nin (https://www.mongodb.com/docs/manual/reference/operator/query/nin/#mongodb-query-op.-nin) + if (operand.ElementCount == 1 && operand.Elements.First() is { Name: var fieldName, Value: BsonDocument nested } && + nested.ElementCount == 1 && nested.Elements.First() is { Name: "$in", Value: BsonArray values }) + { + return new BsonDocument { [fieldName] = new BsonDocument { ["$nin"] = values } }; + } + + throw new NotSupportedException("MongogDB does not support the NOT operator in vector search pre-filters"); + } + + private BsonDocument TranslateMethodCall(MethodCallExpression methodCall) + => methodCall switch + { + // Enumerable.Contains() + { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable) + => this.TranslateContains(source, item), + + // List.Contains() + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>) => this.TranslateContains(source, item), + + _ => throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}") + }; + + private BsonDocument TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryTranslateFieldAccess(source, out _): + throw new NotSupportedException("MongoDB does not support Contains within array fields ($elemMatch) in vector search pre-filters"); + + // Contains over inline enumerable + case NewArrayExpression newArray: + var elements = new object?[newArray.Expressions.Count]; + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + { + throw new NotSupportedException("Invalid element in array"); + } + + elements[i] = elementValue; + } + + return ProcessInlineEnumerable(elements, item); + + // Contains over captured enumerable (we inline) + case var _ when TryGetConstant(source, out var constantEnumerable) + && constantEnumerable is IEnumerable enumerable and not string: + return ProcessInlineEnumerable(enumerable, item); + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + + BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) + { + if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + { + throw new NotSupportedException("Unsupported item type in Contains"); + } + + return new BsonDocument + { + [storagePropertyName] = new BsonDocument + { + ["$in"] = new BsonArray(from object? element in elements select BsonValue.Create(element)) + } + }; + } + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs index 931b668f535d..de47f6723b23 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs @@ -16,6 +16,7 @@ internal static class MongoDBVectorStoreCollectionSearchMapping /// Returns distance function specified on vector property or default . public static string GetVectorPropertyDistanceFunction(string? distanceFunction) => !string.IsNullOrWhiteSpace(distanceFunction) ? distanceFunction! : MongoDBConstants.DefaultDistanceFunction; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete /// /// Build MongoDB filter from the provided . /// @@ -23,13 +24,13 @@ internal static class MongoDBVectorStoreCollectionSearchMapping /// A dictionary that maps from a property name to the storage name. /// Thrown when the provided filter type is unsupported. /// Thrown when property name specified in filter doesn't exist. - public static BsonDocument? BuildFilter( - VectorSearchFilter? vectorSearchFilter, + public static BsonDocument? BuildLegacyFilter( + VectorSearchFilter vectorSearchFilter, Dictionary storagePropertyNames) { const string EqualOperator = "$eq"; - var filterClauses = vectorSearchFilter?.FilterClauses.ToList(); + var filterClauses = vectorSearchFilter.FilterClauses.ToList(); if (filterClauses is not { Count: > 0 }) { @@ -82,6 +83,7 @@ internal static class MongoDBVectorStoreCollectionSearchMapping return filter; } +#pragma warning restore CS0618 /// Returns search part of the search query. public static BsonDocument GetSearchQuery( diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 353b3534dab9..25fc14e8196e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; @@ -11,6 +12,7 @@ using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; using MongoDB.Driver; +using MEVD = Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -32,7 +34,7 @@ public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRe private const string DocumentPropertyName = "document"; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly MEVD.VectorSearchOptions s_defaultVectorSearchOptions = new(); /// that can be used to manage the collections in MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -247,7 +249,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco /// public async Task> VectorizedSearchAsync( TVector vector, - VectorSearchOptions? options = null, + MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -273,9 +275,15 @@ public async Task> VectorizedSearchAsync( var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var filter = MongoDBVectorStoreCollectionSearchMapping.BuildFilter( - searchOptions.Filter, - this._storagePropertyNames); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = searchOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), + { NewFilter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + _ => null + }; +#pragma warning restore CS0618 // Constructing a query to fetch "skip + top" total items // to perform skip logic locally, since skip option is not part of API. @@ -383,7 +391,7 @@ private async Task> FindAsync(FilterDefinition> EnumerateAndMapSearchResultsAsync( IAsyncCursor cursor, - VectorSearchOptions searchOptions, + MEVD.VectorSearchOptions searchOptions, [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "Aggregate"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs index e02e18807d9c..5b3d511c6b08 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs @@ -12,6 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// internal static class PineconeVectorStoreCollectionSearchMapping { +#pragma warning disable CS0618 // FilterClause is obsolete /// /// Build a Pinecone from a set of filter clauses. /// @@ -59,4 +60,5 @@ public static MetadataMap BuildSearchFilter(IEnumerable? filterCla return metadataMap; } +#pragma warning restore CS0618 // FilterClause is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 8a956f53f635..8e1e8cf7aaf1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -32,7 +32,7 @@ public sealed class PineconeVectorStoreRecordCollection : IVectorStoreR private const string GetOperationName = "Get"; private const string QueryOperationName = "Query"; - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; @@ -246,7 +246,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -259,9 +259,12 @@ public async Task> VectorizedSearchAsync(T // Resolve options and build filter clause. var internalOptions = options ?? s_defaultVectorSearchOptions; var mapperOptions = new StorageToDataModelMapperOptions { IncludeVectors = options?.IncludeVectors ?? false }; + +#pragma warning disable CS0618 // FilterClause is obsolete var filter = PineconeVectorStoreCollectionSearchMapping.BuildSearchFilter( internalOptions.Filter?.FilterClauses, this._propertyReader.StoragePropertyNamesMap); +#pragma warning restore CS0618 // Get the current index. var indexNamespace = this.GetIndexNamespace(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs index d130d2f13b44..3c864cc6537f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Linq.Expressions; using Microsoft.Extensions.VectorData; using Pgvector; @@ -124,13 +126,16 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// /// The schema of the table. /// The name of the table. - /// The properties of the table. + /// The property reader. /// The property which the vectors to compare are stored in. /// The vector to match. - /// The filter conditions for the query. + /// The filter conditions for the query. + /// The filter conditions for the query. /// The number of records to skip. /// Specifies whether to include vectors in the result. /// The maximum number of records to return. /// The built SQL command info. - PostgresSqlCommandInfo BuildGetNearestMatchCommand(string schema, string tableName, IReadOnlyList properties, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, VectorSearchFilter? filter, int? skip, bool includeVectors, int limit); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + PostgresSqlCommandInfo BuildGetNearestMatchCommand(string schema, string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit); +#pragma warning restore CS0618 // VectorSearchFilter is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index 59aa9829c568..3fb62b667a92 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -118,15 +120,18 @@ internal interface IPostgresVectorStoreDbClient /// Gets the nearest matches to the . /// /// The name assigned to a table of entries. - /// The properties to retrieve. - /// The property which the vectors to compare are stored in. + /// The property reader. + /// The vector property. /// The to compare the table's vector with. /// The maximum number of similarity results to return. - /// Optional conditions to filter the results. + /// Optional conditions to filter the results. + /// Optional conditions to filter the results. /// The number of entries to skip. /// If true, the vectors will be returned in the entries. /// The to monitor for cancellation requests. The default is . /// An asynchronous stream of objects that the nearest matches to the . - IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, IReadOnlyList properties, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, - VectorSearchFilter? filter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, + VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); +#pragma warning restore CS0618 // VectorSearchFilter is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs new file mode 100644 index 000000000000..6c68527da5c1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -0,0 +1,332 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.Postgres; + +internal class PostgresFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + private readonly List _parameterValues = new(); + private int _parameterIndex; + + private readonly StringBuilder _sql = new(); + + internal (string Clause, List Parameters) Translate( + IReadOnlyDictionary storagePropertyNames, + LambdaExpression lambdaExpression, + int startParamIndex) + { + Debug.Assert(this._sql.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + this._parameterIndex = startParamIndex; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this._sql.Append("WHERE "); + this.Translate(lambdaExpression.Body); + return (this._sql.ToString(), this._parameterValues); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression binary: + this.TranslateBinary(binary); + return; + + case ConstantExpression constant: + this.TranslateConstant(constant); + return; + + case MemberExpression member: + this.TranslateMember(member); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + case UnaryExpression unary: + this.TranslateUnary(unary); + return; + + default: + throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); + } + } + + private void TranslateBinary(BinaryExpression binary) + { + // Special handling for null comparisons + switch (binary.NodeType) + { + case ExpressionType.Equal when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left); + this._sql.Append(" IS NOT NULL)"); + return; + + case ExpressionType.Equal when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right); + this._sql.Append(" IS NOT NULL)"); + return; + } + + this._sql.Append('('); + this.Translate(binary.Left); + + this._sql.Append(binary.NodeType switch + { + ExpressionType.Equal => " = ", + ExpressionType.NotEqual => " <> ", + + ExpressionType.GreaterThan => " > ", + ExpressionType.GreaterThanOrEqual => " >= ", + ExpressionType.LessThan => " < ", + ExpressionType.LessThanOrEqual => " <= ", + + ExpressionType.AndAlso => " AND ", + ExpressionType.OrElse => " OR ", + + _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) + }); + + this.Translate(binary.Right); + this._sql.Append(')'); + + static bool IsNull(Expression expression) + => expression is ConstantExpression { Value: null } + || (TryGetCapturedValue(expression, out var capturedValue) && capturedValue is null); + } + + private void TranslateConstant(ConstantExpression constant) + { + // TODO: Nullable + switch (constant.Value) + { + case byte b: + this._sql.Append(b); + return; + case short s: + this._sql.Append(s); + return; + case int i: + this._sql.Append(i); + return; + case long l: + this._sql.Append(l); + return; + + case string s: + this._sql.Append('\'').Append(s.Replace("'", "''")).Append('\''); + return; + case bool b: + this._sql.Append(b ? "TRUE" : "FALSE"); + return; + case Guid g: + this._sql.Append('\'').Append(g.ToString()).Append('\''); + return; + + case DateTime: + case DateTimeOffset: + throw new NotImplementedException(); + + case Array: + throw new NotImplementedException(); + + case null: + this._sql.Append("NULL"); + return; + + default: + throw new NotSupportedException("Unsupported constant type: " + constant.Value.GetType().Name); + } + } + + private void TranslateMember(MemberExpression memberExpression) + { + switch (memberExpression) + { + case var _ when this.TryGetColumn(memberExpression, out var column): + this._sql.Append('"').Append(column).Append('"'); + return; + + // Identify captured lambda variables, translate to PostgreSQL parameters ($1, $2...) + case var _ when TryGetCapturedValue(memberExpression, out var capturedValue): + // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, + // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) + if (capturedValue is null) + { + this._sql.Append("NULL"); + } + else + { + this._parameterValues.Add(capturedValue); + this._sql.Append('$').Append(this._parameterIndex++); + } + return; + + default: + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + } + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryGetColumn(source, out _): + this.Translate(source); + this._sql.Append(" @> ARRAY["); + this.Translate(item); + this._sql.Append(']'); + return; + + // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) + case NewArrayExpression newArray: + this.Translate(item); + this._sql.Append(" IN ("); + + var isFirst = true; + foreach (var element in newArray.Expressions) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._sql.Append(", "); + } + + this.Translate(element); + } + + this._sql.Append(')'); + return; + + // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) + case var _ when TryGetCapturedValue(source, out _): + this.Translate(item); + this._sql.Append(" = ANY ("); + this.Translate(source); + this._sql.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + } + + private void TranslateUnary(UnaryExpression unary) + { + switch (unary.NodeType) + { + case ExpressionType.Not: + // Special handling for !(a == b) and !(a != b) + if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + this.TranslateBinary( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + } + + this._sql.Append("(NOT "); + this.Translate(unary.Operand); + this._sql.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + } + } + + private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) + { + if (expression is MemberExpression member && member.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + { + throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + column = null; + return false; + } + + private static bool TryGetCapturedValue(Expression expression, out object? capturedValue) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + capturedValue = fieldInfo.GetValue(constant.Value); + return true; + } + + capturedValue = null; + return false; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs index d68412d31b7d..364c564703e4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Text; using Microsoft.Extensions.VectorData; using Npgsql; @@ -20,12 +21,13 @@ internal class PostgresVectorStoreCollectionSqlBuilder : IPostgresVectorStoreCol public PostgresSqlCommandInfo BuildDoesTableExistCommand(string schema, string tableName) { return new PostgresSqlCommandInfo( - commandText: @" - SELECT table_name - FROM information_schema.tables - WHERE table_schema = $1 - AND table_type = 'BASE TABLE' - AND table_name = $2", + commandText: """ +SELECT table_name +FROM information_schema.tables +WHERE table_schema = $1 + AND table_type = 'BASE TABLE' + AND table_name = $2 +""", parameters: [ new NpgsqlParameter() { Value = schema }, new NpgsqlParameter() { Value = tableName } @@ -37,11 +39,11 @@ FROM information_schema.tables public PostgresSqlCommandInfo BuildGetTablesCommand(string schema) { return new PostgresSqlCommandInfo( - commandText: @" - SELECT table_name - FROM information_schema.tables - WHERE table_schema = $1 - AND table_type = 'BASE TABLE'", + commandText: """ +SELECT table_name +FROM information_schema.tables +WHERE table_schema = $1 AND table_type = 'BASE TABLE' +""", parameters: [new NpgsqlParameter() { Value = schema }] ); } @@ -167,11 +169,12 @@ public PostgresSqlCommandInfo BuildUpsertCommand(string schema, string tableName var valuesParams = string.Join(", ", columns.Select((k, i) => $"${i + 1}")); var columnsWithIndex = columns.Select((k, i) => (col: k, idx: i)); var updateColumnsWithParams = string.Join(", ", columnsWithIndex.Where(c => c.col != keyColumn).Select(c => $"\"{c.col}\"=${c.idx + 1}")); - var commandText = $@" - INSERT INTO {schema}.""{tableName}"" ({columnNames}) - VALUES({valuesParams}) - ON CONFLICT (""{keyColumn}"") - DO UPDATE SET {updateColumnsWithParams};"; + var commandText = $""" +INSERT INTO {schema}."{tableName}" ({columnNames}) +VALUES ({valuesParams}) +ON CONFLICT ("{keyColumn}") +DO UPDATE SET {updateColumnsWithParams}; +"""; return new PostgresSqlCommandInfo(commandText) { @@ -204,11 +207,12 @@ public PostgresSqlCommandInfo BuildUpsertBatchCommand(string schema, string tabl var updateSetClause = string.Join(", ", columns.Where(c => c != keyColumn).Select(c => $"\"{c}\" = EXCLUDED.\"{c}\"")); // Generate the SQL command - var commandText = $@" - INSERT INTO {schema}.""{tableName}"" ({columnNames}) - VALUES {valuesRows} - ON CONFLICT (""{keyColumn}"") - DO UPDATE SET {updateSetClause}; "; + var commandText = $""" +INSERT INTO {schema}."{tableName}" ({columnNames}) +VALUES {valuesRows} +ON CONFLICT ("{keyColumn}") +DO UPDATE SET {updateSetClause}; +"""; // Generate the parameters var parameters = new List(); @@ -262,10 +266,11 @@ public PostgresSqlCommandInfo BuildGetCommand(string schema, string tableN var queryColumnList = string.Join(", ", queryColumns); return new PostgresSqlCommandInfo( - commandText: $@" - SELECT {queryColumnList} - FROM {schema}.""{tableName}"" - WHERE ""{keyColumn}"" = ${1};", + commandText: $""" +SELECT {queryColumnList} +FROM {schema}."{tableName}" +WHERE "{keyColumn}" = ${1}; +""", parameters: [new NpgsqlParameter() { Value = key }] ); } @@ -294,10 +299,11 @@ public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string t var keyParams = string.Join(", ", keys.Select((k, i) => $"${i + 1}")); // Generate the SQL command - var commandText = $@" - SELECT {columnNames} - FROM {schema}.""{tableName}"" - WHERE ""{keyColumn}"" = ANY($1);"; + var commandText = $""" +SELECT {columnNames} +FROM {schema}."{tableName}" +WHERE "{keyColumn}" = ANY($1); +"""; return new PostgresSqlCommandInfo(commandText) { @@ -309,9 +315,10 @@ public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string t public PostgresSqlCommandInfo BuildDeleteCommand(string schema, string tableName, string keyColumn, TKey key) { return new PostgresSqlCommandInfo( - commandText: $@" - DELETE FROM {schema}.""{tableName}"" - WHERE ""{keyColumn}"" = ${1};", + commandText: $""" +DELETE FROM {schema}."{tableName}" +WHERE "{keyColumn}" = ${1}; +""", parameters: [new NpgsqlParameter() { Value = key }] ); } @@ -333,9 +340,10 @@ public PostgresSqlCommandInfo BuildDeleteBatchCommand(string schema, strin } } - var commandText = $@" - DELETE FROM {schema}.""{tableName}"" - WHERE ""{keyColumn}"" = ANY($1);"; + var commandText = $""" +DELETE FROM {schema}."{tableName}" +WHERE "{keyColumn}" = ANY($1); +"""; return new PostgresSqlCommandInfo(commandText) { @@ -343,13 +351,14 @@ DELETE FROM {schema}.""{tableName}"" }; } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete /// - public PostgresSqlCommandInfo BuildGetNearestMatchCommand( - string schema, string tableName, IReadOnlyList properties, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, - VectorSearchFilter? filter, int? skip, bool includeVectors, int limit) + public PostgresSqlCommandInfo BuildGetNearestMatchCommand( + string schema, string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, + VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit) { var columns = string.Join(" ,", - properties + propertyReader.RecordDefinition.Properties .Select(property => property.StoragePropertyName ?? property.DataModelPropertyName) .Select(column => $"\"{column}\"") ); @@ -367,14 +376,24 @@ public PostgresSqlCommandInfo BuildGetNearestMatchCommand( }; var vectorColumn = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + // Start where clause params at 2, vector takes param 1. - var where = GenerateWhereClause(schema, tableName, properties, filter, startParamIndex: 2); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var (where, parameters) = (oldFilter: legacyFilter, newFilter) switch + { + (not null, not null) => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, propertyReader.RecordDefinition.Properties, legacyFilter, startParamIndex: 2), + (null, not null) => new PostgresFilterTranslator().Translate(propertyReader.StoragePropertyNamesMap, newFilter, startParamIndex: 2), + _ => (Clause: string.Empty, Parameters: []) + }; +#pragma warning restore CS0618 // VectorSearchFilter is obsolete - var commandText = $@" - SELECT {columns}, ""{vectorColumn}"" {distanceOp} $1 AS ""{PostgresConstants.DistanceColumnName}"" - FROM {schema}.""{tableName}"" {where.Clause} - ORDER BY {PostgresConstants.DistanceColumnName} - LIMIT {limit}"; + var commandText = $""" +SELECT {columns}, "{vectorColumn}" {distanceOp} $1 AS "{PostgresConstants.DistanceColumnName}" +FROM {schema}."{tableName}" {where} +ORDER BY {PostgresConstants.DistanceColumnName} +LIMIT {limit} +"""; if (skip.HasValue) { commandText += $" OFFSET {skip.Value}"; } @@ -383,9 +402,10 @@ ORDER BY {PostgresConstants.DistanceColumnName} // Instead we'll wrap the query in a subquery and modify the distance in the outer query. if (vectorProperty.DistanceFunction == DistanceFunction.CosineSimilarity) { - commandText = $@" - SELECT {columns}, 1 - ""{PostgresConstants.DistanceColumnName}"" AS ""{PostgresConstants.DistanceColumnName}"" - FROM ({commandText}) AS subquery"; + commandText = $""" +SELECT {columns}, 1 - "{PostgresConstants.DistanceColumnName}" AS "{PostgresConstants.DistanceColumnName}" +FROM ({commandText}) AS subquery +"""; } // For inner product, we need to take -1 * inner product. @@ -393,28 +413,27 @@ ORDER BY {PostgresConstants.DistanceColumnName} // Instead we'll wrap the query in a subquery and modify the distance in the outer query. if (vectorProperty.DistanceFunction == DistanceFunction.DotProductSimilarity) { - commandText = $@" - SELECT {columns}, -1 * ""{PostgresConstants.DistanceColumnName}"" AS ""{PostgresConstants.DistanceColumnName}"" - FROM ({commandText}) AS subquery"; + commandText = $""" +SELECT {columns}, -1 * "{PostgresConstants.DistanceColumnName}" AS "{PostgresConstants.DistanceColumnName}" +FROM ({commandText}) AS subquery +"""; } return new PostgresSqlCommandInfo(commandText) { - Parameters = [new NpgsqlParameter() { Value = vectorValue }, .. where.Parameters.Select(p => new NpgsqlParameter() { Value = p })] + Parameters = [new NpgsqlParameter { Value = vectorValue }, .. parameters.Select(p => new NpgsqlParameter { Value = p })] }; } - - internal static (string Clause, List Parameters) GenerateWhereClause(string schema, string tableName, IReadOnlyList properties, VectorSearchFilter? filter, int startParamIndex) +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + internal static (string Clause, List Parameters) GenerateLegacyFilterWhereClause(string schema, string tableName, IReadOnlyList properties, VectorSearchFilter legacyFilter, int startParamIndex) { - if (filter == null) { return (string.Empty, new List()); } - var whereClause = new StringBuilder("WHERE "); var filterClauses = new List(); var parameters = new List(); var paramIndex = startParamIndex; - foreach (var filterClause in filter.FilterClauses) + foreach (var filterClause in legacyFilter.FilterClauses) { if (filterClause is EqualToFilterClause equalTo) { @@ -450,4 +469,5 @@ internal static (string Clause, List Parameters) GenerateWhereClause(str whereClause.Append(string.Join(" AND ", filterClauses)); return (whereClause.ToString(), parameters); } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index 5ef18cc88fdf..b97b24708b25 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -1,7 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -172,21 +174,23 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key } /// - public async IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync( - string tableName, IReadOnlyList properties, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, - VectorSearchFilter? filter = default, int? skip = default, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + public async IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync( + string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, + VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) +#pragma warning restore CS0618 // VectorSearchFilter is obsolete { NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, properties, vectorProperty, vectorValue, filter, skip, includeVectors, limit); + var commandInfo = this._sqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, propertyReader, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { var distance = dataReader.GetDouble(dataReader.GetOrdinal(PostgresConstants.DistanceColumnName)); - yield return (Row: this.GetRecord(dataReader, properties, includeVectors), Distance: distance); + yield return (Row: this.GetRecord(dataReader, propertyReader.RecordDefinition.Properties, includeVectors), Distance: distance); } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index de4a432ea48c..fd85896a46d4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -37,7 +37,7 @@ public sealed class PostgresVectorStoreRecordCollection : IVector private readonly IVectorStoreRecordMapper> _mapper; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// /// Initializes a new instance of the class. @@ -250,7 +250,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; @@ -261,7 +261,7 @@ public Task> VectorizedSearchAsync(TVector if (!PostgresConstants.SupportedVectorTypes.Contains(vectorType)) { throw new NotSupportedException( - $"The provided vector type {vectorType.FullName} is not supported by the SQLite connector. " + + $"The provided vector type {vectorType.FullName} is not supported by the PostgreSQL connector. " + $"Supported types are: {string.Join(", ", PostgresConstants.SupportedVectorTypes.Select(l => l.FullName))}"); } @@ -285,11 +285,14 @@ public Task> VectorizedSearchAsync(TVector { var results = this._client.GetNearestMatchesAsync( this.CollectionName, - this._propertyReader.RecordDefinition.Properties, + this._propertyReader, vectorProperty, pgVector, searchOptions.Top, +#pragma warning disable CS0618 // VectorSearchFilter is obsolete searchOptions.Filter, +#pragma warning restore CS0618 // VectorSearchFilter is obsolete + searchOptions.NewFilter, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index 0b36f2003bf5..5e8509236e31 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -143,7 +143,7 @@ public static (string PgType, bool IsNullable) GetPostgresTypeName(Type property // Handle enumerables if (VectorStoreRecordPropertyVerification.IsSupportedEnumerableType(propertyType)) { - Type elementType = propertyType.GetGenericArguments()[0]; + Type elementType = propertyType.IsArray ? propertyType.GetElementType()! : propertyType.GetGenericArguments()[0]; var underlyingPgType = GetPostgresTypeName(elementType); return (underlyingPgType.PgType + "[]", true); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs new file mode 100644 index 000000000000..a918883aa054 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs @@ -0,0 +1,382 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using Google.Protobuf.Collections; +using Qdrant.Client.Grpc; +using Range = Qdrant.Client.Grpc.Range; + +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + +internal class QdrantFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + internal Filter Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + return this.Translate(lambdaExpression.Body); + } + + private Filter Translate(Expression? node) + => node switch + { + BinaryExpression { NodeType: ExpressionType.Equal } equal => this.TranslateEqual(equal.Left, equal.Right), + BinaryExpression { NodeType: ExpressionType.NotEqual } notEqual => this.TranslateEqual(notEqual.Left, notEqual.Right, negated: true), + + BinaryExpression + { + NodeType: ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } comparison + => this.TranslateComparison(comparison), + + BinaryExpression { NodeType: ExpressionType.AndAlso } andAlso => this.TranslateAndAlso(andAlso.Left, andAlso.Right), + BinaryExpression { NodeType: ExpressionType.OrElse } orElse => this.TranslateOrElse(orElse.Left, orElse.Right), + UnaryExpression { NodeType: ExpressionType.Not } not => this.TranslateNot(not.Operand), + + // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) + => this.TranslateEqual(member, Expression.Constant(true)), + + MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), + + _ => throw new NotSupportedException("Qdrant does not support the following NodeType in filters: " + node?.NodeType) + }; + + private Filter TranslateEqual(Expression left, Expression right, bool negated = false) + { + return TryProcessEqual(left, right, out var result) + ? result + : TryProcessEqual(right, left, out result) + ? result + : throw new NotSupportedException("Equality expression not supported by Qdrant"); + + bool TryProcessEqual(Expression first, Expression second, [NotNullWhen(true)] out Filter? result) + { + // TODO: Nullable + if (this.TryTranslateFieldAccess(first, out var storagePropertyName) + && TryGetConstant(second, out var constantValue)) + { + var condition = constantValue is null + ? new Condition { IsNull = new() { Key = storagePropertyName } } + : new Condition + { + Field = new FieldCondition + { + Key = storagePropertyName, + Match = constantValue switch + { + string stringValue => new Match { Keyword = stringValue }, + int intValue => new Match { Integer = intValue }, + long longValue => new Match { Integer = longValue }, + bool boolValue => new Match { Boolean = boolValue }, + + _ => throw new InvalidOperationException($"Unsupported filter value type '{constantValue.GetType().Name}'.") + } + } + }; + + result = new Filter(); + if (negated) + { + result.MustNot.Add(condition); + } + else + { + result.Must.Add(condition); + } + return true; + } + + result = null; + return false; + } + } + + private Filter TranslateComparison(BinaryExpression comparison) + { + return TryProcessComparison(comparison.Left, comparison.Right, out var result) + ? result + : TryProcessComparison(comparison.Right, comparison.Left, out result) + ? result + : throw new NotSupportedException("Comparison expression not supported by Qdrant"); + + bool TryProcessComparison(Expression first, Expression second, [NotNullWhen(true)] out Filter? result) + { + // TODO: Nullable + if (this.TryTranslateFieldAccess(first, out var storagePropertyName) + && TryGetConstant(second, out var constantValue)) + { + double doubleConstantValue = constantValue switch + { + double d => d, + int i => i, + long l => l, + _ => throw new NotSupportedException($"Can't perform comparison on type '{constantValue?.GetType().Name}', which isn't convertible to double") + }; + + result = new Filter(); + result.Must.Add(new Condition + { + Field = new FieldCondition + { + Key = storagePropertyName, + Range = comparison.NodeType switch + { + ExpressionType.GreaterThan => new Range { Gt = doubleConstantValue }, + ExpressionType.GreaterThanOrEqual => new Range { Gte = doubleConstantValue }, + ExpressionType.LessThan => new Range { Lt = doubleConstantValue }, + ExpressionType.LessThanOrEqual => new Range { Lte = doubleConstantValue }, + + _ => throw new InvalidOperationException("Unreachable") + } + } + }); + return true; + } + + result = null; + return false; + } + } + + #region Logical operators + + private Filter TranslateAndAlso(Expression left, Expression right) + { + var leftFilter = this.Translate(left); + var rightFilter = this.Translate(right); + + // As long as there are only AND conditions (Must or MustNot), we can simply combine both filters into a single flat one. + // The moment there's a Should, things become a bit more complicated: + // 1. If a side contains both a Should and a Must/MustNot, it must be pushed down. + // 2. Otherwise, if the left's Should is empty, and the right side is only Should, we can just copy the right Should into the left's. + // 3. Finally, if both sides have a Should, we push down the right side and put the result in the left's Must. + if (leftFilter.Should.Count > 0 && (leftFilter.Must.Count > 0 || leftFilter.MustNot.Count > 0)) + { + leftFilter = new Filter { Must = { new Condition { Filter = leftFilter } } }; + } + + if (rightFilter.Should.Count > 0 && (rightFilter.Must.Count > 0 || rightFilter.MustNot.Count > 0)) + { + rightFilter = new Filter { Must = { new Condition { Filter = rightFilter } } }; + } + + if (rightFilter.Should.Count > 0) + { + if (leftFilter.Should.Count == 0) + { + leftFilter.Should.AddRange(rightFilter.Should); + } + else + { + rightFilter = new Filter { Must = { new Condition { Filter = rightFilter } } }; + } + } + + leftFilter.Must.AddRange(rightFilter.Must); + leftFilter.MustNot.AddRange(rightFilter.MustNot); + + return leftFilter; + } + + private Filter TranslateOrElse(Expression left, Expression right) + { + var leftFilter = this.Translate(left); + var rightFilter = this.Translate(right); + + var result = new Filter(); + result.Should.AddRange(GetShouldConditions(leftFilter)); + result.Should.AddRange(GetShouldConditions(rightFilter)); + return result; + + static RepeatedField GetShouldConditions(Filter filter) + => filter switch + { + { Must.Count: 0, MustNot.Count: 0 } => filter.Should, + { Must.Count: 1, MustNot.Count: 0, Should.Count: 0 } => [filter.Must[0]], + { Must.Count: 0, MustNot.Count: 1, Should.Count: 0 } => [filter.MustNot[0]], + + _ => [new Condition { Filter = filter }] + }; + } + + private Filter TranslateNot(Expression expression) + { + // Special handling for !(a == b) and !(a != b) + if (expression is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + return this.TranslateEqual(binary.Left, binary.Right, negated: binary.NodeType is ExpressionType.Equal); + } + + var filter = this.Translate(expression); + + switch (filter) + { + case { Must.Count: 1, MustNot.Count: 0, Should.Count: 0 }: + filter.MustNot.Add(filter.Must[0]); + filter.Must.RemoveAt(0); + return filter; + + case { Must.Count: 0, MustNot.Count: 1, Should.Count: 0 }: + filter.Must.Add(filter.MustNot[0]); + filter.MustNot.RemoveAt(0); + return filter; + + case { Must.Count: 0, MustNot.Count: 0, Should.Count: > 0 }: + filter.MustNot.AddRange(filter.Should); + filter.Should.Clear(); + return filter; + + default: + return new Filter { MustNot = { new Condition { Filter = filter } } }; + } + } + + #endregion Logical operators + + private Filter TranslateMethodCall(MethodCallExpression methodCall) + => methodCall switch + { + // Enumerable.Contains() + { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable) + => this.TranslateContains(source, item), + + // List.Contains() + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>) + => this.TranslateContains(source, item), + + _ => throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}") + }; + + private Filter TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over field enumerable + case var _ when this.TryTranslateFieldAccess(source, out _): + // Oddly, in Qdrant, tag list contains is handled using a Match condition, just like equality. + return this.TranslateEqual(source, item); + + // Contains over inline enumerable + case NewArrayExpression newArray: + var elements = new object?[newArray.Expressions.Count]; + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + { + throw new NotSupportedException("Invalid element in array"); + } + + elements[i] = elementValue; + } + + return ProcessInlineEnumerable(elements, item); + + // Contains over captured enumerable (we inline) + case var _ when TryGetConstant(source, out var constantEnumerable) + && constantEnumerable is IEnumerable enumerable and not string: + return ProcessInlineEnumerable(enumerable, item); + + default: + throw new NotSupportedException("Unsupported Contains"); + } + + Filter ProcessInlineEnumerable(IEnumerable elements, Expression item) + { + if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + { + throw new NotSupportedException("Unsupported item type in Contains"); + } + + if (item.Type == typeof(string)) + { + var strings = new RepeatedStrings(); + + foreach (var value in elements) + { + strings.Strings.Add(value is string or null + ? (string?)value + : throw new ArgumentException("Non-string element in string Contains array")); + } + + return new Filter { Must = { new Condition { Field = new FieldCondition { Key = storagePropertyName, Match = new Match { Keywords = strings } } } } }; + } + + if (item.Type == typeof(int)) + { + var ints = new RepeatedIntegers(); + + foreach (var value in elements) + { + ints.Integers.Add(value is int intValue + ? intValue + : throw new ArgumentException("Non-int element in string Contains array")); + } + + return new Filter { Must = { new Condition { Field = new FieldCondition { Key = storagePropertyName, Match = new Match { Integers = ints } } } } }; + } + + throw new NotSupportedException("Contains only supported over array of ints or strings"); + } + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index f2b9c91179e9..ec14ef585dfb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -12,6 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// internal static class QdrantVectorStoreCollectionSearchMapping { +#pragma warning disable CS0618 // Type or member is obsolete /// /// Build a Qdrant from the provided . /// @@ -19,16 +20,10 @@ internal static class QdrantVectorStoreCollectionSearchMapping /// A mapping of data model property names to the names under which they are stored. /// The Qdrant . /// Thrown when the provided filter contains unsupported types, values or unknown properties. - public static Filter BuildFilter(VectorSearchFilter? basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) + public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) { var filter = new Filter(); - // Return an empty filter if no filter clauses are provided. - if (basicVectorSearchFilter?.FilterClauses is null) - { - return filter; - } - foreach (var filterClause in basicVectorSearchFilter.FilterClauses) { string fieldName; @@ -72,6 +67,7 @@ public static Filter BuildFilter(VectorSearchFilter? basicVectorSearchFilter, IR return filter; } +#pragma warning restore CS0618 // Type or member is obsolete /// /// Map the given to a . diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 7dd77b76baff..e51ae549818a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -29,7 +30,7 @@ public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRec ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// The name of this database for telemetry purposes. private const string DatabaseName = "Qdrant"; @@ -457,7 +458,7 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -473,8 +474,16 @@ public async Task> VectorizedSearchAsync(T var internalOptions = options ?? s_defaultVectorSearchOptions; +#pragma warning disable CS0618 // Type or member is obsolete // Build filter object. - var filter = QdrantVectorStoreCollectionSearchMapping.BuildFilter(internalOptions.Filter, this._propertyReader.StoragePropertyNamesMap); + var filter = internalOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), + { NewFilter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => new Filter() + }; +#pragma warning restore CS0618 // Type or member is obsolete // Specify the vector name if named vectors are used. string? vectorName = null; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs new file mode 100644 index 000000000000..ec5bcd73514f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +internal class RedisFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + private readonly StringBuilder _filter = new(); + + internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + Debug.Assert(this._filter.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this.Translate(lambdaExpression.Body); + return this._filter.ToString(); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression + { + NodeType: ExpressionType.Equal or ExpressionType.NotEqual + or ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual + or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } binary: + this.TranslateEqualityComparison(binary); + return; + + case BinaryExpression { NodeType: ExpressionType.AndAlso } andAlso: + // https://redis.io/docs/latest/develop/interact/search-and-query/query/combined/#and + this._filter.Append('('); + this.Translate(andAlso.Left); + this._filter.Append(' '); + this.Translate(andAlso.Right); + this._filter.Append(')'); + return; + + case BinaryExpression { NodeType: ExpressionType.OrElse } orElse: + // https://redis.io/docs/latest/develop/interact/search-and-query/query/combined/#or + this._filter.Append('('); + this.Translate(orElse.Left); + this._filter.Append(" | "); + this.Translate(orElse.Right); + this._filter.Append(')'); + return; + + case UnaryExpression { NodeType: ExpressionType.Not } not: + this.TranslateNot(not.Operand); + return; + + // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + { + this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))); + return; + } + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + default: + throw new NotSupportedException("Redis does not support the following NodeType in filters: " + node?.NodeType); + } + } + + private void TranslateEqualityComparison(BinaryExpression binary) + { + if (!TryProcessEqualityComparison(binary.Left, binary.Right) && !TryProcessEqualityComparison(binary.Right, binary.Left)) + { + throw new NotSupportedException("Binary expression not supported by Redis"); + } + + bool TryProcessEqualityComparison(Expression first, Expression second) + { + // TODO: Nullable + if (this.TryTranslateFieldAccess(first, out var storagePropertyName) + && TryGetConstant(second, out var constantValue)) + { + // Numeric negation has a special syntax (!=), for the rest we nest in a NOT + if (binary.NodeType is ExpressionType.NotEqual && constantValue is not int or long or float or double) + { + this.TranslateNot(Expression.Equal(first, second)); + return true; + } + + // https://redis.io/docs/latest/develop/interact/search-and-query/query/exact-match + this._filter.Append('@').Append(storagePropertyName); + + this._filter.Append( + binary.NodeType switch + { + ExpressionType.Equal when constantValue is int or long or float or double => $" == {constantValue}", + ExpressionType.Equal when constantValue is string stringValue +#if NETSTANDARD2_0 + => $$""":{"{{stringValue.Replace("\"", "\"\"")}}"}""", +#else + => $$""":{"{{stringValue.Replace("\"", "\\\"", StringComparison.Ordinal)}}"}""", +#endif + ExpressionType.Equal when constantValue is null => throw new NotSupportedException("Null value type not supported"), // TODO + + ExpressionType.NotEqual when constantValue is int or long or float or double => $" != {constantValue}", + ExpressionType.NotEqual => throw new InvalidOperationException("Unreachable"), // Handled above + + ExpressionType.GreaterThan => $" > {constantValue}", + ExpressionType.GreaterThanOrEqual => $" >= {constantValue}", + ExpressionType.LessThan => $" < {constantValue}", + ExpressionType.LessThanOrEqual => $" <= {constantValue}", + + _ => throw new InvalidOperationException("Unsupported equality/comparison") + }); + + return true; + } + + return false; + } + } + + private void TranslateNot(Expression expression) + { + // https://redis.io/docs/latest/develop/interact/search-and-query/query/combined/#not + this._filter.Append("(-"); + this.Translate(expression); + this._filter.Append(')'); + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + // Contains over tag field + if (this.TryTranslateFieldAccess(source, out var storagePropertyName) + && TryGetConstant(item, out var itemConstant) + && itemConstant is string stringConstant) + { + this._filter + .Append('@') + .Append(storagePropertyName) + .Append(":{") + .Append(stringConstant) + .Append('}'); + return; + } + + throw new NotSupportedException("Contains supported only over tag field"); + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 41971c5adb86..2a5d324e0171 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -61,7 +61,7 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVectorSt ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// The Redis database to read/write records from. private readonly IDatabase _database; @@ -300,6 +300,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancella // Upsert. var maybePrefixedKey = this.PrefixKeyIfNeeded(redisHashSetRecord.Key); + await this.RunOperationAsync( "HSET", () => this._database @@ -328,7 +329,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index f8afa3ed875e..0d5f74d0821a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -44,7 +44,7 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVectorStore ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// The Redis database to read/write records from. private readonly IDatabase _database; @@ -374,7 +374,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs index d6603ca1634c..ea78a9e798c0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; using NRedisStack.Search; @@ -50,14 +51,24 @@ public static byte[] ValidateVectorAndConvertToBytes(TVector vector, st /// The name of the first vector property in the data model. /// The set of fields to limit the results to. Null for all. /// The . - public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, IReadOnlyDictionary storagePropertyNames, string firstVectorPropertyName, string[]? selectFields) + public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, IReadOnlyDictionary storagePropertyNames, string firstVectorPropertyName, string[]? selectFields) { // Resolve options. var vectorPropertyName = ResolveVectorFieldName(options.VectorPropertyName, storagePropertyNames, firstVectorPropertyName); // Build search query. var redisLimit = options.Top + options.Skip; - var filter = RedisVectorStoreCollectionSearchMapping.BuildFilter(options.Filter, storagePropertyNames); + +#pragma warning disable CS0618 // Type or member is obsolete + var filter = options switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, storagePropertyNames), + { NewFilter: Expression> newFilter } => new RedisFilterTranslator().Translate(newFilter, storagePropertyNames), + _ => "*" + }; +#pragma warning restore CS0618 // Type or member is obsolete + var query = new Query($"{filter}=>[KNN {redisLimit} @{vectorPropertyName} $embedding AS vector_score]") .AddParam("embedding", vectorBytes) .SetSortBy("vector_score") @@ -80,13 +91,9 @@ public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, /// A mapping of data model property names to the names under which they are stored. /// The Redis filter string. /// Thrown when a provided filter value is not supported. - public static string BuildFilter(VectorSearchFilter? basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) +#pragma warning disable CS0618 // Type or member is obsolete + public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) { - if (basicVectorSearchFilter == null) - { - return "*"; - } - var filterClauses = basicVectorSearchFilter.FilterClauses.Select(clause => { if (clause is EqualToFilterClause equalityFilterClause) @@ -116,6 +123,7 @@ public static string BuildFilter(VectorSearchFilter? basicVectorSearchFilter, IR return $"({string.Join(" ", filterClauses)})"; } +#pragma warning restore CS0618 // Type or member is obsolete /// /// Resolve the distance function to use for a search by checking the distance function of the vector property specified in options @@ -126,7 +134,7 @@ public static string BuildFilter(VectorSearchFilter? basicVectorSearchFilter, IR /// The first vector property in the record. /// The distance function for the vector we want to search. /// Thrown when a user asked for a vector property that doesn't exist on the record. - public static string ResolveDistanceFunction(VectorSearchOptions options, IReadOnlyList vectorProperties, VectorStoreRecordVectorProperty firstVectorProperty) + public static string ResolveDistanceFunction(VectorSearchOptions options, IReadOnlyList vectorProperties, VectorStoreRecordVectorProperty firstVectorProperty) { if (options.VectorPropertyName == null || vectorProperties.Count == 1) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs new file mode 100644 index 000000000000..2cb6b16fc8cd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs @@ -0,0 +1,359 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.Sqlite; + +internal class SqliteFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + private readonly Dictionary _parameters = new(); + + private readonly StringBuilder _sql = new(); + + internal (string Clause, Dictionary) Translate(IReadOnlyDictionary storagePropertyNames, LambdaExpression lambdaExpression) + { + Debug.Assert(this._sql.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this.Translate(lambdaExpression.Body); + return (this._sql.ToString(), this._parameters); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression binary: + this.TranslateBinary(binary); + return; + + case ConstantExpression constant: + this.TranslateConstant(constant); + return; + + case MemberExpression member: + this.TranslateMember(member); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + case UnaryExpression unary: + this.TranslateUnary(unary); + return; + + default: + throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); + } + } + + private void TranslateBinary(BinaryExpression binary) + { + // Special handling for null comparisons + switch (binary.NodeType) + { + case ExpressionType.Equal when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left); + this._sql.Append(" IS NOT NULL)"); + return; + + case ExpressionType.Equal when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right); + this._sql.Append(" IS NOT NULL)"); + return; + } + + this._sql.Append('('); + this.Translate(binary.Left); + + this._sql.Append(binary.NodeType switch + { + ExpressionType.Equal => " = ", + ExpressionType.NotEqual => " <> ", + + ExpressionType.GreaterThan => " > ", + ExpressionType.GreaterThanOrEqual => " >= ", + ExpressionType.LessThan => " < ", + ExpressionType.LessThanOrEqual => " <= ", + + ExpressionType.AndAlso => " AND ", + ExpressionType.OrElse => " OR ", + + _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) + }); + + this.Translate(binary.Right); + this._sql.Append(')'); + + static bool IsNull(Expression expression) + => expression is ConstantExpression { Value: null } + || (TryGetCapturedValue(expression, out _, out var capturedValue) && capturedValue is null); + } + + private void TranslateConstant(ConstantExpression constant) + => this.GenerateLiteral(constant.Value); + + private void GenerateLiteral(object? value) + { + // TODO: Nullable + switch (value) + { + case byte b: + this._sql.Append(b); + return; + case short s: + this._sql.Append(s); + return; + case int i: + this._sql.Append(i); + return; + case long l: + this._sql.Append(l); + return; + + case string s: + this._sql.Append('\'').Append(s.Replace("'", "''")).Append('\''); + return; + case bool b: + this._sql.Append(b ? "TRUE" : "FALSE"); + return; + case Guid g: + this._sql.Append('\'').Append(g.ToString()).Append('\''); + return; + + case DateTime: + case DateTimeOffset: + throw new NotImplementedException(); + + case Array: + throw new NotImplementedException(); + + case null: + this._sql.Append("NULL"); + return; + + default: + throw new NotSupportedException("Unsupported constant type: " + value.GetType().Name); + } + } + + private void TranslateMember(MemberExpression memberExpression) + { + switch (memberExpression) + { + case var _ when this.TryGetColumn(memberExpression, out var column): + this._sql.Append('"').Append(column).Append('"'); + return; + + // Identify captured lambda variables, translate to PostgreSQL parameters ($1, $2...) + case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): + // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, + // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) + if (value is null) + { + this._sql.Append("NULL"); + } + else + { + // Duplicate parameter name, create a new parameter with a different name + // TODO: Share the same parameter when it references the same captured value + if (this._parameters.ContainsKey(name)) + { + var baseName = name; + var i = 0; + do + { + name = baseName + (i++); + } while (this._parameters.ContainsKey(name)); + } + + this._parameters.Add(name, value); + this._sql.Append('@').Append(name); + } + return; + + default: + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + } + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + switch (source) + { + // TODO: support Contains over array fields (#10343) + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryGetColumn(source, out _): + goto default; + + // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) + case NewArrayExpression newArray: + { + this.Translate(item); + this._sql.Append(" IN ("); + + var isFirst = true; + foreach (var element in newArray.Expressions) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._sql.Append(", "); + } + + this.Translate(element); + } + + this._sql.Append(')'); + return; + } + + // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) + case var _ when TryGetCapturedValue(source, out _, out var value) && value is IEnumerable elements: + { + this.Translate(item); + this._sql.Append(" IN ("); + + var isFirst = true; + foreach (var element in elements) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._sql.Append(", "); + } + + this.GenerateLiteral(element); + } + + this._sql.Append(')'); + return; + } + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + } + + private void TranslateUnary(UnaryExpression unary) + { + switch (unary.NodeType) + { + case ExpressionType.Not: + // Special handling for !(a == b) and !(a != b) + if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + this.TranslateBinary( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + } + + this._sql.Append("(NOT "); + this.Translate(unary.Operand); + this._sql.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + } + } + + private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) + { + if (expression is MemberExpression member && member.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + { + throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + column = null; + return false; + } + + private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + name = fieldInfo.Name; + value = fieldInfo.GetValue(constant.Value); + return true; + } + + name = null; + value = null; + return false; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 028a838487d1..837e3044ddc7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Data.Common; +using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text; @@ -159,6 +160,8 @@ public DbCommand BuildSelectLeftJoinCommand( IReadOnlyList leftTablePropertyNames, IReadOnlyList rightTablePropertyNames, List conditions, + string? extraWhereFilter = null, + Dictionary? extraParameters = null, string? orderByPropertyName = null) { var builder = new StringBuilder(); @@ -169,7 +172,7 @@ .. leftTablePropertyNames.Select(property => $"{leftTable}.{property}"), .. rightTablePropertyNames.Select(property => $"{rightTable}.{property}"), ]; - var (command, whereClause) = this.GetCommandWithWhereClause(conditions); + var (command, whereClause) = this.GetCommandWithWhereClause(conditions, extraWhereFilter, extraParameters); builder.AppendLine($"SELECT {string.Join(", ", propertyNames)}"); builder.AppendLine($"FROM {leftTable} "); @@ -238,7 +241,10 @@ private static string GetColumnDefinition(SqliteColumn column) return string.Join(" ", columnDefinitionParts); } - private (DbCommand Command, string WhereClause) GetCommandWithWhereClause(List conditions) + private (DbCommand Command, string WhereClause) GetCommandWithWhereClause( + List conditions, + string? extraWhereFilter = null, + Dictionary? extraParameters = null) { const string WhereClauseOperator = " AND "; @@ -263,6 +269,22 @@ private static string GetColumnDefinition(SqliteColumn column) var whereClause = string.Join(WhereClauseOperator, whereClauseParts); + if (extraWhereFilter is not null) + { + if (conditions.Count > 0) + { + whereClause += " AND "; + } + + whereClause += extraWhereFilter; + + Debug.Assert(extraParameters is not null, "extraParameters must be provided when extraWhereFilter is provided."); + foreach (var p in extraParameters) + { + command.Parameters.Add(new SqliteParameter(p.Key, p.Value)); + } + } + return (command, whereClause); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 08c976abf43f..8ae095dd3bf0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -34,7 +34,7 @@ public sealed class SqliteVectorStoreRecordCollection : private readonly IVectorStoreRecordMapper> _mapper; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// Command builder for queries in SQLite database. private readonly SqliteVectorStoreCollectionCommandBuilder _commandBuilder; @@ -154,7 +154,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string LimitPropertyName = "k"; @@ -189,15 +189,35 @@ public Task> VectorizedSearchAsync(TVector new SqliteWhereEqualsCondition(LimitPropertyName, limit) }; - var filterConditions = this.GetFilterConditions(searchOptions.Filter, this._dataTableName); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + string? extraWhereFilter = null; + Dictionary? extraParameters = null; - if (filterConditions is { Count: > 0 }) + if (searchOptions.Filter is not null) { - conditions.AddRange(filterConditions); + if (searchOptions.NewFilter is not null) + { + throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"); + } + + // Old filter, we translate it to a list of SqliteWhereCondition, and merge these into the conditions we already have + var filterConditions = this.GetFilterConditions(searchOptions.Filter, this._dataTableName); + + if (filterConditions is { Count: > 0 }) + { + conditions.AddRange(filterConditions); + } + } + else if (searchOptions.NewFilter is not null) + { + (extraWhereFilter, extraParameters) = new SqliteFilterTranslator().Translate(this._propertyReader.StoragePropertyNamesMap, searchOptions.NewFilter); } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete var vectorSearchResults = new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync( conditions, + extraWhereFilter, + extraParameters, searchOptions, cancellationToken)); @@ -288,7 +308,9 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell private async IAsyncEnumerable> EnumerateAndMapSearchResultsAsync( List conditions, - VectorSearchOptions searchOptions, + string? extraWhereFilter, + Dictionary? extraParameters, + VectorSearchOptions searchOptions, [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "VectorizedSearch"; @@ -311,6 +333,8 @@ private async IAsyncEnumerable> EnumerateAndMapSearc leftTableProperties, this._dataTableStoragePropertyNames.Value, conditions, + extraWhereFilter, + extraParameters, DistancePropertyName); using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); @@ -670,6 +694,7 @@ private async Task RunOperationAsync(string operationName, Func> o return new SqliteVectorStoreRecordMapper(this._propertyReader); } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete private List? GetFilterConditions(VectorSearchFilter? filter, string? tableName = null) { var filterClauses = filter?.FilterClauses.ToList(); @@ -706,6 +731,7 @@ private async Task RunOperationAsync(string operationName, Func> o return conditions; } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete /// /// Gets vector table name. diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs new file mode 100644 index 000000000000..2e4be5391159 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs @@ -0,0 +1,260 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +// https://weaviate.io/developers/weaviate/api/graphql/filters#filter-structure +internal class WeaviateFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + private readonly StringBuilder _filter = new(); + + internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + Debug.Assert(this._filter.Length == 0); + + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + this.Translate(lambdaExpression.Body); + return this._filter.ToString(); + } + + private void Translate(Expression? node) + { + switch (node) + { + case BinaryExpression + { + NodeType: ExpressionType.Equal or ExpressionType.NotEqual + or ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual + or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } binary: + this.TranslateEqualityComparison(binary); + return; + + case BinaryExpression { NodeType: ExpressionType.AndAlso } andAlso: + this._filter.Append("{ operator: And, operands: ["); + this.Translate(andAlso.Left); + this._filter.Append(", "); + this.Translate(andAlso.Right); + this._filter.Append("] }"); + return; + + case BinaryExpression { NodeType: ExpressionType.OrElse } orElse: + this._filter.Append("{ operator: Or, operands: ["); + this.Translate(orElse.Left); + this._filter.Append(", "); + this.Translate(orElse.Right); + this._filter.Append("] }"); + return; + + case UnaryExpression { NodeType: ExpressionType.Not } not: + { + switch (not.Operand) + { + // Special handling for !(a == b) and !(a != b) + case BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary: + this.TranslateEqualityComparison( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + + // Not over bool field (Filter => r => !r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + return; + + default: + throw new NotSupportedException("Weaviate does not support the NOT operator (see https://github.com/weaviate/weaviate/issues/3683)"); + } + } + + // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + default: + throw new NotSupportedException("The following NodeType is unsupported: " + node?.NodeType); + } + } + + private void TranslateEqualityComparison(BinaryExpression binary) + { + if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) + || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + { + // { path: ["intPropName"], operator: Equal, ValueInt: 8 } + this._filter + .Append("{ path: [\"") + .Append(JsonEncodedText.Encode(storagePropertyName)) + .Append("\"], operator: "); + + // Special handling for null comparisons + if (value is null) + { + if (binary.NodeType is ExpressionType.Equal or ExpressionType.NotEqual) + { + this._filter + .Append("IsNull, valueBoolean: ") + .Append(binary.NodeType is ExpressionType.Equal ? "true" : "false") + .Append(" }"); + return; + } + + throw new NotSupportedException("null value supported only with equality/inequality checks"); + } + + // Operator + this._filter.Append(binary.NodeType switch + { + ExpressionType.Equal => "Equal", + ExpressionType.NotEqual => "NotEqual", + + ExpressionType.GreaterThan => "GreaterThan", + ExpressionType.GreaterThanOrEqual => "GreaterThanEqual", + ExpressionType.LessThan => "LessThan", + ExpressionType.LessThanOrEqual => "LessThanEqual", + + _ => throw new UnreachableException() + }); + + this._filter.Append(", "); + + // FieldType + var type = value.GetType(); + if (Nullable.GetUnderlyingType(type) is Type underlying) + { + type = underlying; + } + + this._filter.Append(value.GetType() switch + { + Type t when t == typeof(int) || t == typeof(long) || t == typeof(short) || t == typeof(byte) => "valueInt", + Type t when t == typeof(bool) => "valueBoolean", + Type t when t == typeof(string) || t == typeof(Guid) => "valueText", + Type t when t == typeof(float) || t == typeof(double) || t == typeof(decimal) => "valueNumber", + Type t when t == typeof(DateTimeOffset) => "valueDate", + + _ => throw new NotSupportedException($"Unsupported value type {type.FullName} in filter.") + }); + + this._filter.Append(": "); + + // Value + this._filter.Append(JsonSerializer.Serialize(value)); + + this._filter.Append('}'); + + return; + } + + throw new NotSupportedException("Invalid equality/comparison"); + } + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item) + { + // Contains over array + // { path: ["stringArrayPropName"], operator: ContainsAny, valueText: ["foo"] } + if (this.TryTranslateFieldAccess(source, out var storagePropertyName) + && TryGetConstant(item, out var itemConstant) + && itemConstant is string stringConstant) + { + this._filter + .Append("{ path: [\"") + .Append(JsonEncodedText.Encode(storagePropertyName)) + .Append("\"], operator: ContainsAny, valueText: [") + .Append(JsonEncodedText.Encode(stringConstant)) + .Append("]}"); + return; + } + + throw new NotSupportedException("Contains supported only over tag field"); + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index a4ba633535a7..fe8e965f67e3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -9,7 +9,6 @@ using System.Runtime.CompilerServices; using System.Text.Json; using System.Text.Json.Nodes; -using System.Text.Json.Serialization; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -75,7 +74,6 @@ public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreR private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, Converters = { new WeaviateDateTimeOffsetConverter(), @@ -84,7 +82,7 @@ public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreR }; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// that is used to interact with Weaviate API. private readonly HttpClient _httpClient; @@ -335,7 +333,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record /// public async Task> VectorizedSearchAsync( TVector vector, - VectorSearchOptions? options = null, + VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorSearch"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 397af63763a6..e665e7e85e08 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Text.Json; using Microsoft.Extensions.VectorData; @@ -17,13 +18,13 @@ internal static class WeaviateVectorStoreRecordCollectionQueryBuilder /// Builds Weaviate search query. /// More information here: . /// - public static string BuildSearchQuery( + public static string BuildSearchQuery( TVector vector, string collectionName, string vectorPropertyName, string keyPropertyName, JsonSerializerOptions jsonSerializerOptions, - VectorSearchOptions searchOptions, + VectorSearchOptions searchOptions, IReadOnlyDictionary storagePropertyNames, IReadOnlyList vectorPropertyStorageNames, IReadOnlyList dataPropertyStorageNames) @@ -32,11 +33,19 @@ public static string BuildSearchQuery( $"vectors {{ {string.Join(" ", vectorPropertyStorageNames)} }}" : string.Empty; - var filter = BuildFilter( - searchOptions.Filter, - jsonSerializerOptions, - keyPropertyName, - storagePropertyNames); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = searchOptions switch + { + { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + { Filter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( + legacyFilter, + jsonSerializerOptions, + keyPropertyName, + storagePropertyNames), + { NewFilter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), + _ => null + }; +#pragma warning restore CS0618 var vectorArray = JsonSerializer.Serialize(vector, jsonSerializerOptions); @@ -46,7 +55,7 @@ public static string BuildSearchQuery( {{collectionName}} ( limit: {{searchOptions.Top}} offset: {{searchOptions.Skip}} - {{filter}} + {{(filter is null ? "" : "where: " + filter)}} nearVector: { targetVectors: ["{{vectorPropertyName}}"] vector: {{vectorArray}} @@ -66,11 +75,12 @@ public static string BuildSearchQuery( #region private +#pragma warning disable CS0618 // Type or member is obsolete /// /// Builds filter for Weaviate search query. /// More information here: . /// - private static string BuildFilter( + private static string BuildLegacyFilter( VectorSearchFilter? vectorSearchFilter, JsonSerializerOptions jsonSerializerOptions, string keyPropertyName, @@ -134,8 +144,9 @@ private static string BuildFilter( operands.Add(operand); } - return $$"""where: { operator: And, operands: [{{string.Join(", ", operands)}}] }"""; + return $$"""{ operator: And, operands: [{{string.Join(", ", operands)}}] }"""; } +#pragma warning restore CS0618 // Type or member is obsolete /// /// Gets filter value type. diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs index 8242333ecea5..cea02dee086c 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs @@ -9,6 +9,8 @@ namespace SemanticKernel.Connectors.MongoDB.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Unit tests for class. /// @@ -20,32 +22,6 @@ public sealed class MongoDBVectorStoreCollectionSearchMappingTests ["Property2"] = "property_2", }; - [Fact] - public void BuildFilterWithNullVectorSearchFilterReturnsNull() - { - // Arrange - VectorSearchFilter? vectorSearchFilter = null; - - // Act - var filter = MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); - - // Assert - Assert.Null(filter); - } - - [Fact] - public void BuildFilterWithoutFilterClausesReturnsNull() - { - // Arrange - VectorSearchFilter vectorSearchFilter = new(); - - // Act - var filter = MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); - - // Assert - Assert.Null(filter); - } - [Fact] public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() { @@ -53,7 +29,7 @@ public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() var vectorSearchFilter = new VectorSearchFilter().AnyTagEqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); } [Fact] @@ -63,7 +39,7 @@ public void BuildFilterThrowsExceptionWithNonExistentPropertyName() var vectorSearchFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); } [Fact] @@ -75,7 +51,7 @@ public void BuildFilterThrowsExceptionWithMultipleFilterClausesOfSameType() .EqualTo("Property1", "TestValue2"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); } [Fact] @@ -86,7 +62,7 @@ public void BuilderFilterByDefaultReturnsValidFilter() var vectorSearchFilter = new VectorSearchFilter().EqualTo("Property1", "TestValue1"); // Act - var filter = MongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); + var filter = MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames); Assert.Equal(filter.ToJson(), expectedFilter.ToJson()); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 26a9b9fb00b7..7fa33bbd9967 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -13,6 +13,7 @@ using MongoDB.Driver; using Moq; using Xunit; +using MEVD = Microsoft.Extensions.VectorData; namespace SemanticKernel.Connectors.MongoDB.UnitTests; @@ -639,7 +640,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa this._mockMongoDatabase.Object, "collection"); - var options = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var options = new MEVD.VectorSearchOptions { VectorPropertyName = "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj index 68fbec524a28..0f884f0df59c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -39,4 +39,10 @@ + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 675843a78c18..e1958f934c5d 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -366,57 +366,4 @@ public void TestBuildDeleteBatchCommand() // Output this._output.WriteLine(cmdInfo.CommandText); } - - [Fact] - public void TestBuildGetNearestMatchCommand() - { - // Arrange - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - - var vectorProperty = new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory)) - { - Dimensions = 10, - IndexKind = "hnsw", - }; - - var recordDefinition = new VectorStoreRecordDefinition() - { - Properties = [ - new VectorStoreRecordKeyProperty("id", typeof(long)), - new VectorStoreRecordDataProperty("name", typeof(string)), - new VectorStoreRecordDataProperty("code", typeof(int)), - new VectorStoreRecordDataProperty("rating", typeof(float?)), - new VectorStoreRecordDataProperty("description", typeof(string)), - new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)), - new VectorStoreRecordDataProperty("tags", typeof(List)), - vectorProperty, - new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?)) - { - Dimensions = 10, - IndexKind = "hnsw", - } - ] - }; - - var vector = new Vector(s_vector); - - // Act - var cmdInfo = builder.BuildGetNearestMatchCommand("public", "testcollection", - properties: recordDefinition.Properties, - vectorProperty: vectorProperty, - vectorValue: vector, - filter: null, - skip: null, - includeVectors: true, - limit: 10); - - // Assert - Assert.Contains("SELECT", cmdInfo.CommandText); - Assert.Contains("FROM public.\"testcollection\"", cmdInfo.CommandText); - Assert.Contains("ORDER BY", cmdInfo.CommandText); - Assert.Contains("LIMIT 10", cmdInfo.CommandText); - - // Output - this._output.WriteLine(cmdInfo.CommandText); - } } diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index 623f997a4ed2..afd5e545030a 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -10,6 +10,8 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// @@ -35,7 +37,7 @@ public void BuildFilterMapsEqualityClause(string type) var filter = new VectorSearchFilter().EqualTo("FieldName", expected); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.BuildFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); + var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); // Assert. Assert.Single(actual.Must); @@ -69,7 +71,7 @@ public void BuildFilterMapsTagContainsClause() var filter = new VectorSearchFilter().AnyTagEqualTo("FieldName", "Value"); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.BuildFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); + var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); // Assert. Assert.Single(actual.Must); @@ -84,7 +86,7 @@ public void BuildFilterThrowsForUnknownFieldName() var filter = new VectorSearchFilter().EqualTo("FieldName", "Value"); // Act and Assert. - Assert.Throws(() => QdrantVectorStoreCollectionSearchMapping.BuildFilter(filter, new Dictionary())); + Assert.Throws(() => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary())); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 1bb89a91344e..666efcc4647b 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -545,6 +545,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() new() { VectorStoreRecordDefinition = definition, PointStructCustomMapper = Mock.Of, PointStruct>>() }); } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] [MemberData(nameof(TestOptions))] public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool hasNamedVectors, TKey testRecordKey) @@ -593,6 +594,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bo Assert.Equal(new float[] { 1, 2, 3, 4 }, results.First().Record.Vector!.Value.ToArray()); Assert.Equal(0.5f, results.First().Score); } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete private void SetupRetrieveMock(List retrievedPoints) { diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 5457582661ee..fb15d0031c2b 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -415,6 +415,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() Times.Once); } +#pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] [InlineData(true, true)] [InlineData(true, false)] @@ -508,6 +509,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc Assert.False(results.First().Record.Vector.HasValue); } } +#pragma warning restore CS0618 // VectorSearchFilter is obsolete /// /// Tests that the collection can be created even if the definition and the type do not match. diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 20d1b0da5831..6cfe1f17960e 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -16,6 +16,8 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index 8253801a8cb7..1301ee6a7eb9 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -8,6 +8,8 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// @@ -70,7 +72,7 @@ public void BuildQueryBuildsRedisQueryWithDefaults() var firstVectorPropertyName = "storage_Vector"; // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), storagePropertyNames, firstVectorPropertyName, null); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), storagePropertyNames, firstVectorPropertyName, null); // Assert. Assert.NotNull(query); @@ -86,7 +88,7 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() // Arrange. var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3, VectorPropertyName = "Vector" }; + var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3, VectorPropertyName = "Vector" }; var storagePropertyNames = new Dictionary() { { "Vector", "storage_Vector" }, @@ -108,7 +110,7 @@ public void BuildQueryFailsForInvalidVectorName() // Arrange. var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var vectorSearchOptions = new VectorSearchOptions { VectorPropertyName = "UnknownVector" }; + var vectorSearchOptions = new VectorSearchOptions { VectorPropertyName = "UnknownVector" }; var storagePropertyNames = new Dictionary() { { "Vector", "storage_Vector" }, @@ -149,7 +151,7 @@ public void BuildFilterBuildsEqualityFilter(string filterType) }; // Act. - var filter = RedisVectorStoreCollectionSearchMapping.BuildFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); // Assert. switch (filterType) @@ -184,7 +186,7 @@ public void BuildFilterThrowsForInvalidValueType() // Act & Assert. Assert.Throws(() => { - var filter = RedisVectorStoreCollectionSearchMapping.BuildFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); }); } @@ -201,7 +203,7 @@ public void BuildFilterThrowsForUnknownFieldName() // Act & Assert. Assert.Throws(() => { - var filter = RedisVectorStoreCollectionSearchMapping.BuildFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); }); } @@ -211,7 +213,7 @@ public void ResolveDistanceFunctionReturnsCosineSimilarityIfNoDistanceFunctionSp var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)); // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); + var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); // Assert. Assert.Equal(DistanceFunction.CosineSimilarity, resolvedDistanceFunction); @@ -223,7 +225,7 @@ public void ResolveDistanceFunctionReturnsDistanceFunctionFromFirstPropertyIfNoF var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); + var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); // Assert. Assert.Equal(DistanceFunction.DotProductSimilarity, resolvedDistanceFunction); @@ -236,7 +238,7 @@ public void ResolveDistanceFunctionReturnsDistanceFunctionFromChosenPropertyIfFi var property2 = new VectorStoreRecordVectorProperty("Prop2", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions() { VectorPropertyName = "Prop2" }, [property1, property2], property1); + var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions { VectorPropertyName = "Prop2" }, [property1, property2], property1); // Assert. Assert.Equal(DistanceFunction.DotProductSimilarity, resolvedDistanceFunction); @@ -260,4 +262,8 @@ public void GetOutputScoreFromRedisScoreLeavesNonConsineSimilarityUntouched(stri // Act & Assert. Assert.Equal(score, RedisVectorStoreCollectionSearchMapping.GetOutputScoreFromRedisScore(score, distanceFunction)); } + +#pragma warning disable CA1812 // An internal class that is apparently never instantiated. If so, remove the code from the assembly. + private sealed class DummyType; +#pragma warning restore CA1812 } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs index 9d79fd640a33..370756cb4344 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs @@ -233,6 +233,8 @@ public void ItBuildsSelectLeftJoinCommand(string? orderByPropertyName) leftTablePropertyNames, rightTablePropertyNames, conditions, + extraWhereFilter: null, + extraParameters: null, orderByPropertyName); // Assert diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 6c4f8336654f..a0fa8b4f0ae0 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -10,6 +10,8 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Unit tests for class. /// @@ -72,7 +74,7 @@ hotelName hotelCode } """; - var searchOptions = new VectorSearchOptions + var searchOptions = new VectorSearchOptions { Skip = 2, Top = 3, @@ -102,7 +104,7 @@ hotelName hotelCode public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() { // Arrange - var searchOptions = new VectorSearchOptions + var searchOptions = new VectorSearchOptions { Skip = 2, Top = 3, @@ -133,7 +135,7 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() const string ExpectedFirstSubquery = """{ path: ["hotelName"], operator: Equal, valueText: "Test Name" }"""; const string ExpectedSecondSubquery = """{ path: ["tags"], operator: ContainsAny, valueText: ["t1"] }"""; - var searchOptions = new VectorSearchOptions + var searchOptions = new VectorSearchOptions { Skip = 2, Top = 3, @@ -164,7 +166,7 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() public void BuildSearchQueryWithInvalidFilterValueThrowsException() { // Arrange - var searchOptions = new VectorSearchOptions + var searchOptions = new VectorSearchOptions { Skip = 2, Top = 3, @@ -189,7 +191,7 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() { // Arrange - var searchOptions = new VectorSearchOptions + var searchOptions = new VectorSearchOptions { Skip = 2, Top = 3, @@ -212,6 +214,9 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() #region private +#pragma warning disable CA1812 // An internal class that is apparently never instantiated. If so, remove the code from the assembly. + private sealed class DummyType; +#pragma warning restore CA1812 private sealed class TestFilterValue; #endregion diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 0871c4978977..8f7ea996101d 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -530,11 +530,12 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti // Arrange var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); - var searchOptions = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; - // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), searchOptions)).Results.ToListAsync()); + await (await sut.VectorizedSearchAsync( + new ReadOnlyMemory([1f, 2f, 3f]), + new() { VectorPropertyName = "non-existent-property" })) + .Results.ToListAsync()); } public void Dispose() diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index 0860b81e7585..cd9bfbaa3ca7 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -1,5 +1,5 @@  - + CP0001 @@ -15,6 +15,13 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0001 + T:Microsoft.Extensions.VectorData.VectorSearchOptions + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0001 T:Microsoft.Extensions.VectorData.DeleteRecordOptions @@ -29,6 +36,13 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0001 + T:Microsoft.Extensions.VectorData.VectorSearchOptions + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0001 T:Microsoft.Extensions.VectorData.DeleteRecordOptions @@ -43,6 +57,27 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0001 + T:Microsoft.Extensions.VectorData.VectorSearchOptions + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) @@ -71,6 +106,20 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) @@ -99,6 +148,20 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) @@ -127,6 +190,20 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) @@ -155,6 +232,20 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) @@ -183,6 +274,20 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs index a0d5181b7668..5e39a541ef86 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs @@ -20,6 +20,6 @@ public interface IVectorizableTextSearch /// The records found by the vector search, including their result scores. Task> VectorizableTextSearchAsync( string searchText, - VectorSearchOptions? options = default, + VectorSearchOptions? options = default, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs index 9ac93383b18d..3286fafc15fc 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs @@ -21,6 +21,6 @@ public interface IVectorizedSearch /// The records found by the vector search, including their result scores. Task> VectorizedSearchAsync( TVector vector, - VectorSearchOptions? options = default, + VectorSearchOptions? options = default, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs index a8b941776eff..9d167fcb160b 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs @@ -14,6 +14,7 @@ namespace Microsoft.Extensions.VectorData; /// to request that the underlying service filter the search results. /// All clauses are combined with and. /// +[Obsolete("Use VectorSearchOptions.NewFilter instead of VectorSearchOptions.Filter")] public sealed class VectorSearchFilter { /// The filter clauses to and together. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index a5773b0cc606..65d9c6e157c2 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -1,17 +1,26 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Linq.Expressions; + namespace Microsoft.Extensions.VectorData; /// /// Options for vector search. /// -public class VectorSearchOptions +public class VectorSearchOptions { /// /// Gets or sets a search filter to use before doing the vector search. /// + [Obsolete("Use NewFilter instead")] public VectorSearchFilter? Filter { get; init; } + /// + /// Gets or sets a search filter to use before doing the vector search. + /// + public Expression>? NewFilter { get; init; } + /// /// Gets or sets the name of the vector property to search on. /// Use the name of the vector property from your data model or as provided in the record definition. diff --git a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj index 2d37b88dca4a..1d72c971fcba 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj +++ b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj @@ -29,4 +29,8 @@ + + + + \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index e3a420a789f4..f7fb10081c76 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -14,6 +14,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureAISearch; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Integration tests for class. /// Tests work with an Azure AI Search Instance. @@ -63,7 +65,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var actual = await sut.VectorizedSearchAsync( embedding, - new VectorSearchOptions + new() { IncludeVectors = true, Filter = new VectorSearchFilter().EqualTo("HotelName", "MyHotel Upsert-1") diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index c5929e0ecaa2..7f471405b8c9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -12,6 +12,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + [Collection("AzureCosmosDBMongoDBVectorStoreCollection")] public class AzureCosmosDBMongoDBVectorStoreRecordCollectionTests(AzureCosmosDBMongoDBVectorStoreFixture fixture) { diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 6a0e249f4d7e..3864a48288ef 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -13,6 +13,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Integration tests for class. /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 11da55ba3329..3f88b10eef4b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -12,6 +12,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + [Collection("MongoDBVectorStoreCollection")] public class MongoDBVectorStoreRecordCollectionTests(MongoDBVectorStoreFixture fixture) { diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs index e30b2f35fbae..7e19c73128d0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs @@ -15,6 +15,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + [Collection("PineconeVectorStoreTests")] [PineconeApiKeySetCondition] public class PineconeVectorStoreRecordCollectionTests(PineconeVectorStoreFixture fixture) : IClassFixture @@ -293,7 +295,7 @@ public async Task InsertGetModifyDeleteVectorAsync(bool collectionFromVectorStor // update await hotelRecordCollection.UpsertAsync(langriSha); - // this is not great but no vectors are added so we can't query status for number of vectors like we do for insert/delete + // this is not great but no vectors are added so we can't query status for number of vectors like we do for insert/delete await Task.Delay(2000); var updated = await hotelRecordCollection.GetAsync("langri-sha", new GetRecordOptions { IncludeVectors = true }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 7e3ae3ad9392..6a479f0b10bf 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -10,6 +10,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Postgres; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + [Collection("PostgresVectorStoreCollection")] public sealed class PostgresVectorStoreRecordCollectionTests(PostgresVectorStoreFixture fixture) { diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 135d09d025aa..940687525238 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -15,6 +15,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Qdrant; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// @@ -66,7 +68,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions { Filter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); + new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index ef7ba087cf87..61018b2b7589 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -13,6 +13,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Redis; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// @@ -65,7 +67,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var actual = await sut .VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), - new VectorSearchOptions { Filter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); + new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -316,7 +318,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType, // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions + new() { IncludeVectors = includeVectors, Filter = filter @@ -360,7 +362,7 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions + new() { Top = 3, Skip = 2 @@ -390,7 +392,7 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions + new() { IncludeVectors = includeVectors, Top = 1 diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 1e6c3d9aed0e..a12d710d9446 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -13,6 +13,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Redis; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Contains tests for the class. /// @@ -64,7 +66,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var getResult = await sut.GetAsync("Upsert-10", new GetRecordOptions { IncludeVectors = true }); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), - new VectorSearchOptions { Filter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); + new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -346,7 +348,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType) // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions { IncludeVectors = true, Filter = filter }); + new() { IncludeVectors = true, Filter = filter }); // Assert var searchResults = await actual.Results.ToListAsync(); @@ -384,7 +386,7 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions + new() { Top = 3, Skip = 2 @@ -414,7 +416,7 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) // Act var actual = await sut.VectorizedSearchAsync( vector, - new VectorSearchOptions + new() { IncludeVectors = includeVectors, Top = 1 diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index 214510438d59..c0dbb5fcf680 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -10,6 +10,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Sqlite; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + /// /// Integration tests for class. /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 9ffaf3172eec..bd6348932937 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -10,6 +10,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + [Collection("WeaviateVectorStoreCollection")] public sealed class WeaviateVectorStoreRecordCollectionTests(WeaviateVectorStoreFixture fixture) { diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 90ce87f14482..143c61f69e5f 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -102,7 +102,7 @@ public Task>> GenerateEmbeddingsAsync(IList protected sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs b/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs new file mode 100644 index 000000000000..616073f54705 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +#if NETSTANDARD2_0 + +// Polyfill for using UnreachableException with .NET Standard 2.0 + +namespace System.Diagnostics; + +#pragma warning disable CA1064 // Exceptions should be public +#pragma warning disable CA1812 // Internal class that is (sometimes) never instantiated. + +/// +/// Exception thrown when the program executes an instruction that was thought to be unreachable. +/// +internal sealed class UnreachableException : Exception +{ + private const string MessageText = "The program executed an instruction that was thought to be unreachable."; + + /// + /// Initializes a new instance of the class with the default error message. + /// + public UnreachableException() + : base(MessageText) + { + } + + /// + /// Initializes a new instance of the + /// class with a specified error message. + /// + /// The error message that explains the reason for the exception. + public UnreachableException(string? message) + : base(message ?? MessageText) + { + } + + /// + /// Initializes a new instance of the + /// class with a specified error message and a reference to the inner exception that is the cause of + /// this exception. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception. + public UnreachableException(string? message, Exception? innerException) + : base(message ?? MessageText, innerException) + { + } +} + +#endif diff --git a/dotnet/src/InternalUtilities/src/System/IndexRange.cs b/dotnet/src/InternalUtilities/src/System/IndexRange.cs new file mode 100644 index 000000000000..439e6e844fb6 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/IndexRange.cs @@ -0,0 +1,288 @@ +// Copyright (c) Microsoft. All rights reserved. + +#if NETSTANDARD2_0 + +// Polyfill for using Index and Range with .NET Standard 2.0 (see https://www.meziantou.net/how-to-use-csharp-8-indices-and-ranges-in-dotnet-standard-2-0-and-dotn.htm) + +// https://github.com/dotnet/runtime/blob/419e949d258ecee4c40a460fb09c66d974229623/src/libraries/System.Private.CoreLib/src/System/Index.cs +// https://github.com/dotnet/runtime/blob/419e949d258ecee4c40a460fb09c66d974229623/src/libraries/System.Private.CoreLib/src/System/Range.cs + +#pragma warning disable RCS1168 +#pragma warning disable RCS1211 +#pragma warning disable IDE0009 +#pragma warning disable IDE0011 +#pragma warning disable IDE0090 + +using System.Runtime.CompilerServices; + +namespace System +{ + /// Represent a type can be used to index a collection either from the start or the end. + /// + /// Index is used by the C# compiler to support the new index syntax + /// + /// int[] someArray = new int[5] { 1, 2, 3, 4, 5 } ; + /// int lastElement = someArray[^1]; // lastElement = 5 + /// + /// + internal readonly struct Index : IEquatable + { + private readonly int _value; + + /// Construct an Index using a value and indicating if the index is from the start or from the end. + /// The index value. it has to be zero or positive number. + /// Indicating if the index is from the start or from the end. + /// + /// If the Index constructed from the end, index value 1 means pointing at the last element and index value 0 means pointing at beyond last element. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public Index(int value, bool fromEnd = false) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "value must be non-negative"); + } + + if (fromEnd) + _value = ~value; + else + _value = value; + } + + // The following private constructors mainly created for perf reason to avoid the checks + private Index(int value) + { + _value = value; + } + + /// Create an Index pointing at first element. + public static Index Start => new Index(0); + + /// Create an Index pointing at beyond last element. + public static Index End => new Index(~0); + + /// Create an Index from the start at the position indicated by the value. + /// The index value from the start. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Index FromStart(int value) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "value must be non-negative"); + } + + return new Index(value); + } + + /// Create an Index from the end at the position indicated by the value. + /// The index value from the end. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static Index FromEnd(int value) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "value must be non-negative"); + } + + return new Index(~value); + } + + /// Returns the index value. + public int Value + { + get + { + if (_value < 0) + { + return ~_value; + } + else + { + return _value; + } + } + } + + /// Indicates whether the index is from the start or the end. + public bool IsFromEnd => _value < 0; + + /// Calculate the offset from the start using the giving collection length. + /// The length of the collection that the Index will be used with. length has to be a positive value + /// + /// For performance reason, we don't validate the input length parameter and the returned offset value against negative values. + /// we don't validate either the returned offset is greater than the input length. + /// It is expected Index will be used with collections which always have non negative length/count. If the returned offset is negative and + /// then used to index a collection will get out of range exception which will be same affect as the validation. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public int GetOffset(int length) + { + var offset = _value; + if (IsFromEnd) + { + // offset = length - (~value) + // offset = length + (~(~value) + 1) + // offset = length + value + 1 + + offset += length + 1; + } + return offset; + } + + /// Indicates whether the current Index object is equal to another object of the same type. + /// An object to compare with this object + public override bool Equals(object? value) => value is Index && _value == ((Index)value)._value; + + /// Indicates whether the current Index object is equal to another Index object. + /// An object to compare with this object + public bool Equals(Index other) => _value == other._value; + + /// Returns the hash code for this instance. + public override int GetHashCode() => _value; + + /// Converts integer number to an Index. + public static implicit operator Index(int value) => FromStart(value); + + /// Converts the value of the current Index object to its equivalent string representation. + public override string ToString() + { + if (IsFromEnd) + return "^" + ((uint)Value).ToString(); + + return ((uint)Value).ToString(); + } + } + + /// Represent a range has start and end indexes. + /// + /// Range is used by the C# compiler to support the range syntax. + /// + /// int[] someArray = new int[5] { 1, 2, 3, 4, 5 }; + /// int[] subArray1 = someArray[0..2]; // { 1, 2 } + /// int[] subArray2 = someArray[1..^0]; // { 2, 3, 4, 5 } + /// + /// + internal readonly struct Range : IEquatable + { + /// Represent the inclusive start index of the Range. + public Index Start { get; } + + /// Represent the exclusive end index of the Range. + public Index End { get; } + + /// Construct a Range object using the start and end indexes. + /// Represent the inclusive start index of the range. + /// Represent the exclusive end index of the range. + public Range(Index start, Index end) + { + Start = start; + End = end; + } + + /// Indicates whether the current Range object is equal to another object of the same type. + /// An object to compare with this object + public override bool Equals(object? value) => + value is Range r && + r.Start.Equals(Start) && + r.End.Equals(End); + + /// Indicates whether the current Range object is equal to another Range object. + /// An object to compare with this object + public bool Equals(Range other) => other.Start.Equals(Start) && other.End.Equals(End); + + /// Returns the hash code for this instance. + public override int GetHashCode() + { + return Start.GetHashCode() * 31 + End.GetHashCode(); + } + + /// Converts the value of the current Range object to its equivalent string representation. + public override string ToString() + { + return Start + ".." + End; + } + + /// Create a Range object starting from start index to the end of the collection. + public static Range StartAt(Index start) => new Range(start, Index.End); + + /// Create a Range object starting from first element in the collection to the end Index. + public static Range EndAt(Index end) => new Range(Index.Start, end); + + /// Create a Range object starting from first element to the end. + public static Range All => new Range(Index.Start, Index.End); + + /// Calculate the start offset and length of range object using a collection length. + /// The length of the collection that the range will be used with. length has to be a positive value. + /// + /// For performance reason, we don't validate the input length parameter against negative values. + /// It is expected Range will be used with collections which always have non negative length/count. + /// We validate the range is inside the length scope though. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public (int Offset, int Length) GetOffsetAndLength(int length) + { + int start; + var startIndex = Start; + if (startIndex.IsFromEnd) + start = length - startIndex.Value; + else + start = startIndex.Value; + + int end; + var endIndex = End; + if (endIndex.IsFromEnd) + end = length - endIndex.Value; + else + end = endIndex.Value; + + if ((uint)end > (uint)length || (uint)start > (uint)end) + { + throw new ArgumentOutOfRangeException(nameof(length)); + } + + return (start, end - start); + } + } +} + +namespace System.Runtime.CompilerServices +{ + internal static class RuntimeHelpers + { + /// + /// Slices the specified array using the specified range. + /// + public static T[] GetSubArray(T[] array, Range range) + { + if (array == null) + { + throw new ArgumentNullException(nameof(array)); + } + + (int offset, int length) = range.GetOffsetAndLength(array.Length); + + if (default(T) != null || typeof(T[]) == array.GetType()) + { + // We know the type of the array to be exactly T[]. + + if (length == 0) + { + return Array.Empty(); + } + + var dest = new T[length]; + Array.Copy(array, offset, dest, 0, length); + return dest; + } + else + { + // The array is actually a U[] where U:T. + var dest = (T[])Array.CreateInstance(array.GetType().GetElementType(), length); + Array.Copy(array, offset, dest, 0, length); + return dest; + } + } + } +} + +#endif diff --git a/dotnet/src/Plugins/Plugins.Web/Bing/BingTextSearch.cs b/dotnet/src/Plugins/Plugins.Web/Bing/BingTextSearch.cs index 97526f388b17..556e04f148d3 100644 --- a/dotnet/src/Plugins/Plugins.Web/Bing/BingTextSearch.cs +++ b/dotnet/src/Plugins/Plugins.Web/Bing/BingTextSearch.cs @@ -241,6 +241,7 @@ public TextSearchResult MapFromResultToTextSearchResult(object result) } } +#pragma warning disable CS0618 // FilterClause is obsolete /// /// Build a query string from the /// @@ -280,5 +281,7 @@ private static string BuildQuery(string query, TextSearchOptions searchOptions) return fullQuery.ToString(); } +#pragma warning restore CS0618 // FilterClause is obsolete + #endregion } diff --git a/dotnet/src/Plugins/Plugins.Web/Google/GoogleTextSearch.cs b/dotnet/src/Plugins/Plugins.Web/Google/GoogleTextSearch.cs index a42500fa7c4e..c4165a2edadc 100644 --- a/dotnet/src/Plugins/Plugins.Web/Google/GoogleTextSearch.cs +++ b/dotnet/src/Plugins/Plugins.Web/Google/GoogleTextSearch.cs @@ -160,6 +160,7 @@ public void Dispose() return await search.ExecuteAsync(cancellationToken).ConfigureAwait(false); } +#pragma warning disable CS0618 // FilterClause is obsolete /// /// Add basic filters to the Google search metadata. /// @@ -192,6 +193,7 @@ private void AddFilters(CseResource.ListRequest search, TextSearchOptions search } } } +#pragma warning restore CS0618 // FilterClause is obsolete /// /// Return the search results as instances of . diff --git a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj index 235c08e4d52b..47043cbe1df8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj +++ b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj @@ -57,6 +57,9 @@ + + + diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs index 454d82ace013..b39976adbebf 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs @@ -13,7 +13,7 @@ public MockVectorizableTextSearch(IEnumerable> searc this._searchResults = ToAsyncEnumerable(searchResults); } - public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { return Task.FromResult(new VectorSearchResults(this._searchResults)); } diff --git a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml index de2e33319a56..6c9084abb2ce 100644 --- a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml +++ b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml @@ -1,5 +1,5 @@  - + CP0002 @@ -29,6 +29,13 @@ lib/net8.0/Microsoft.SemanticKernel.Core.dll true + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + CP0002 M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) @@ -57,4 +64,11 @@ lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll true + + CP0002 + M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs index 6970294723ef..42781b1c5483 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs @@ -197,9 +197,11 @@ private TextSearchStringMapper CreateTextSearchStringMapper() private async Task> ExecuteVectorSearchAsync(string query, TextSearchOptions? searchOptions, CancellationToken cancellationToken) { searchOptions ??= new TextSearchOptions(); - var vectorSearchOptions = new VectorSearchOptions + var vectorSearchOptions = new VectorSearchOptions { +#pragma warning disable CS0618 // VectorSearchFilter is obsolete Filter = searchOptions.Filter?.FilterClauses is not null ? new VectorSearchFilter(searchOptions.Filter.FilterClauses) : null, +#pragma warning restore CS0618 // VectorSearchFilter is obsolete Skip = searchOptions.Skip, Top = searchOptions.Top, }; diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs index da062934cfbb..e94f321eed4a 100644 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs @@ -31,7 +31,7 @@ public sealed class VolatileVectorStoreRecordCollection : IVector ]; /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// Internal storage for all of the record collections. private readonly ConcurrentDictionary> _internalCollections; @@ -213,7 +213,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record /// #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) #pragma warning restore CS1998 { Verify.NotNull(vector); @@ -238,6 +238,11 @@ public async Task> VectorizedSearchAsync(T } // Filter records using the provided filter before doing the vector comparison. + if (internalOptions.NewFilter is not null) + { + throw new NotSupportedException("LINQ-based filtering is not supported with VolatileVectorStore, use Microsoft.SemanticKernel.Connectors.InMemory instead"); + } + var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.Filter, this.GetCollectionDictionary().Values); // Compare each vector in the filtered results with the provided vector. diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index c4c4956a3fa8..268c2e470314 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -54,5 +54,5 @@ - + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index 262289c567d0..c01fe06eddf4 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -126,7 +126,7 @@ public Task>> GenerateEmbeddingsAsync(IList public sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, options, cancellationToken); diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs index 9530c48fe574..edd169a725ff 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs @@ -294,7 +294,7 @@ public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKe // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true }, + new() { IncludeVectors = true }, this._testCancellationToken); // Assert @@ -338,7 +338,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, + new() { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, this._testCancellationToken); // Assert @@ -390,7 +390,7 @@ public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFu // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true }, + new() { IncludeVectors = true }, this._testCancellationToken); // Assert @@ -431,7 +431,7 @@ public async Task CanSearchManyRecordsAsync(bool useDefinition) // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new VectorSearchOptions { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, + new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, this._testCancellationToken); // Assert @@ -507,7 +507,7 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), - new VectorSearchOptions { IncludeVectors = true, VectorPropertyName = "Vector" }, + new() { IncludeVectors = true, VectorPropertyName = "Vector" }, this._testCancellationToken); // Assert diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj new file mode 100644 index 000000000000..0fcc13f45809 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj @@ -0,0 +1,31 @@ + + + + net8.0;net472 + enable + enable + true + false + AzureAISearchIntegrationTests + b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs new file mode 100644 index 000000000000..9683543d3e98 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; + +namespace AzureAISearchIntegrationTests.Filter; + +public class AzureAISearchBasicFilterTests(AzureAISearchFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + // Azure AI Search only supports search.in() over strings + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs new file mode 100644 index 000000000000..a5ec5df341dd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace AzureAISearchIntegrationTests.Filter; + +public class AzureAISearchFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => AzureAISearchTestStore.Instance; + + // Azure AI search only supports lowercase letters, digits or dashes. + protected override string StoreName => "filter-tests"; +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..786c2742c2b3 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: AzureAISearchIntegrationTests.Support.AzureAISearchUrlRequiredAttribute] diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs new file mode 100644 index 000000000000..27e905656870 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; + +namespace AzureAISearchIntegrationTests.Support; + +#pragma warning disable CA1810 // Initialize all static fields when those fields are declared + +internal static class AzureAISearchTestEnvironment +{ + public static readonly string? ServiceUrl, ApiKey; + + public static bool IsConnectionInfoDefined => ServiceUrl is not null && ApiKey is not null; + + static AzureAISearchTestEnvironment() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true) + .AddJsonFile(path: "testsettings.development.json", optional: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + var azureAISearchSection = configuration.GetSection("AzureAISearch"); + ServiceUrl = azureAISearchSection?["ServiceUrl"]; + ApiKey = azureAISearchSection?["ApiKey"]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs new file mode 100644 index 000000000000..791005d55c9a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure; +using Azure.Search.Documents.Indexes; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.AzureAISearch; +using VectorDataSpecificationTests.Support; + +namespace AzureAISearchIntegrationTests.Support; + +internal sealed class AzureAISearchTestStore : TestStore +{ + public static AzureAISearchTestStore Instance { get; } = new(); + + private SearchIndexClient? _client; + private AzureAISearchVectorStore? _defaultVectorStore; + + public SearchIndexClient Client + => this._client ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public override IVectorStore DefaultVectorStore + => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public AzureAISearchVectorStore GetVectorStore(AzureAISearchVectorStoreOptions options) + => new(this.Client, options); + + private AzureAISearchTestStore() + { + } + + protected override Task StartAsync() + { + (string? serviceUrl, string? apiKey) = (AzureAISearchTestEnvironment.ServiceUrl, AzureAISearchTestEnvironment.ApiKey); + + if (string.IsNullOrWhiteSpace(serviceUrl) || string.IsNullOrWhiteSpace(apiKey)) + { + throw new InvalidOperationException("Service URL and API key are not configured, set AzureAISearch:ServiceUrl and AzureAISearch:ApiKey"); + } + + this._client = new SearchIndexClient(new Uri(serviceUrl), new AzureKeyCredential(apiKey)); + this._defaultVectorStore = new(this._client); + + return Task.CompletedTask; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs new file mode 100644 index 000000000000..1b30639bc1be --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Xunit; + +namespace AzureAISearchIntegrationTests.Support; + +/// +/// Checks whether the sqlite_vec extension is properly installed, and skips the test(s) otherwise. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Assembly)] +public sealed class AzureAISearchUrlRequiredAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() => new(AzureAISearchTestEnvironment.IsConnectionInfoDefined); + + public string Skip { get; set; } = "Service URL and API key are not configured, set AzureAISearch:ServiceUrl and AzureAISearch:ApiKey."; + + public string SkipReason + => this.Skip; +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj new file mode 100644 index 000000000000..aaf0dcf8160b --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj @@ -0,0 +1,29 @@ + + + + net8.0;net472 + enable + enable + true + false + MongoDBIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs new file mode 100644 index 000000000000..33d14908f537 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace MongoDBIntegrationTests.Filter; + +public class CosmosMongoBasicFilterTests(CosmosMongoFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + // Specialized MongoDB syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + #region Null checking + + // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + #endregion + + #region Not + + // MongoDB currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + #endregion + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + // AnyTagEqualTo not (currently) supported on SQLite + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_List() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs new file mode 100644 index 000000000000..129c7b0cc337 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace MongoDBIntegrationTests.Filter; + +public class CosmosMongoFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => CosmosMongoDBTestStore.Instance; + + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..4e8438d68759 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: CosmosIntegrationTests.Support.CosmosConnectionStringRequired] diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs new file mode 100644 index 000000000000..c944d36eb78c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using VectorDataSpecificationTests.Xunit; + +namespace CosmosIntegrationTests.Support; + +/// +/// Checks whether the sqlite_vec extension is properly installed, and skips the test(s) otherwise. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Assembly)] +public sealed class CosmosConnectionStringRequiredAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() => new(CosmosMongoDBTestEnvironment.IsConnectionStringDefined); + + public string Skip { get; set; } = "The Cosmos connection string hasn't been configured (AzureCosmosDBMongoDB:ConnectionString)."; + + public string SkipReason + => this.Skip; +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs new file mode 100644 index 000000000000..1adcb225e66d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; + +namespace CosmosNoSQLIntegrationTests.Support; + +#pragma warning disable CA1810 // Initialize all static fields when those fields are declared + +public static class CosmosMongoDBTestEnvironment +{ + public static readonly string? ConnectionString; + + public static bool IsConnectionStringDefined => ConnectionString is not null; + + static CosmosMongoDBTestEnvironment() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true) + .AddJsonFile(path: "testsettings.development.json", optional: true) + .AddEnvironmentVariables() + .Build(); + + ConnectionString = configuration["AzureCosmosDBMongoDB:ConnectionString"]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs new file mode 100644 index 000000000000..b0d4c379ecf4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using MongoDB.Driver; +using VectorDataSpecificationTests.Support; + +namespace MongoDBIntegrationTests.Support; + +public sealed class CosmosMongoDBTestStore : TestStore +{ + public static CosmosMongoDBTestStore Instance { get; } = new(); + + private MongoClient? _client; + private IMongoDatabase? _database; + private AzureCosmosDBMongoDBVectorStore? _defaultVectorStore; + + public MongoClient Client => this._client ?? throw new InvalidOperationException("Not initialized"); + public IMongoDatabase Database => this._database ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore + => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public AzureCosmosDBMongoDBVectorStore GetVectorStore(AzureCosmosDBMongoDBVectorStoreOptions options) + => new(this.Database, options); + + private CosmosMongoDBTestStore() + { + } + + protected override Task StartAsync() + { + if (string.IsNullOrWhiteSpace(CosmosMongoDBTestEnvironment.ConnectionString)) + { + throw new InvalidOperationException("Connection string is not configured, set the AzureCosmosDBMongoDB:ConnectionString environment variable"); + } + + this._client = new MongoClient(CosmosMongoDBTestEnvironment.ConnectionString); + this._database = this._client.GetDatabase("VectorSearchTests"); + this._defaultVectorStore = new(this._database); + + return Task.CompletedTask; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLIntegrationTests.csproj new file mode 100644 index 000000000000..dd8e3f7a9ba0 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLIntegrationTests.csproj @@ -0,0 +1,29 @@ + + + + net8.0;net472 + enable + enable + true + false + CosmosNoSQLIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs new file mode 100644 index 000000000000..b67141d82e6c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; + +namespace CosmosNoSQLIntegrationTests.Filter; + +public class CosmosNoSQLBasicFilterTests(CosmosNoSQLFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs new file mode 100644 index 000000000000..8aaf6b86d4f9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace CosmosNoSQLIntegrationTests.Filter; + +public class CosmosNoSQLFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => CosmosNoSqlTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..183a8a7c926c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: CosmosNoSQLIntegrationTests.Support.CosmosConnectionStringRequired] diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs new file mode 100644 index 000000000000..2183f166d3ec --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosConnectionStringRequiredAttribute.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Xunit; + +namespace CosmosNoSQLIntegrationTests.Support; + +/// +/// Checks whether the sqlite_vec extension is properly installed, and skips the test(s) otherwise. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Assembly)] +public sealed class CosmosConnectionStringRequiredAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() => new(CosmosNoSQLTestEnvironment.IsConnectionStringDefined); + + public string Skip { get; set; } = "The Cosmos connection string hasn't been configured (AzureCosmosDBNoSQL:ConnectionString)."; + + public string SkipReason + => this.Skip; +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestEnvironment.cs new file mode 100644 index 000000000000..bd2848a2cb8f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestEnvironment.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; + +namespace CosmosNoSQLIntegrationTests.Support; + +#pragma warning disable CA1810 // Initialize all static fields when those fields are declared + +internal static class CosmosNoSQLTestEnvironment +{ + public static readonly string? ConnectionString; + + public static bool IsConnectionStringDefined => ConnectionString is not null; + + static CosmosNoSQLTestEnvironment() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true) + .AddJsonFile(path: "testsettings.development.json", optional: true) + .AddEnvironmentVariables() + .Build(); + + ConnectionString = configuration["AzureCosmosDBNoSQL:ConnectionString"]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs new file mode 100644 index 000000000000..7e3269ba2a27 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +#if NET472 +using System.Net.Http; +#endif +using System.Text.Json; +using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using VectorDataSpecificationTests.Support; + +namespace CosmosNoSQLIntegrationTests.Support; + +#pragma warning disable CA1001 // Type owns disposable fields (_connection) but is not disposable + +internal sealed class CosmosNoSqlTestStore : TestStore +{ + public static CosmosNoSqlTestStore Instance { get; } = new(); + + private CosmosClient? _client; + private Database? _database; + private AzureCosmosDBNoSQLVectorStore? _defaultVectorStore; + + public CosmosClient Client + => this._client ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public Database Database + => this._database ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public override IVectorStore DefaultVectorStore + => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + public AzureCosmosDBNoSQLVectorStore GetVectorStore(AzureCosmosDBNoSQLVectorStoreOptions options) + => new(this.Database, options); + + private CosmosNoSqlTestStore() + { + } + +#pragma warning disable CA5400 // HttpClient may be created without enabling CheckCertificateRevocationList + protected override async Task StartAsync() + { + var connectionString = CosmosNoSQLTestEnvironment.ConnectionString; + + if (string.IsNullOrWhiteSpace(connectionString)) + { + throw new InvalidOperationException("Connection string is not configured, set the AzureCosmosDBNoSQL:ConnectionString environment variable"); + } + + var options = new CosmosClientOptions + { + UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default, + ConnectionMode = ConnectionMode.Gateway, + HttpClientFactory = () => new HttpClient(new HttpClientHandler { ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator }) + }; + + this._client = new CosmosClient(connectionString, options); + this._database = this._client.GetDatabase("VectorDataIntegrationTests"); + await this._client.CreateDatabaseIfNotExistsAsync("VectorDataIntegrationTests"); + this._defaultVectorStore = new(this._database); + } +#pragma warning restore CA5400 +} diff --git a/dotnet/src/VectorDataIntegrationTests/Directory.Build.props b/dotnet/src/VectorDataIntegrationTests/Directory.Build.props new file mode 100644 index 000000000000..f5d133b5fd9f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/Directory.Build.props @@ -0,0 +1,20 @@ + + + + + $(NoWarn);CA1515 + $(NoWarn);CA1707 + $(NoWarn);CA1716 + $(NoWarn);CA1720 + $(NoWarn);CA1861 + $(NoWarn);CA2007;VSTHRD111 + $(NoWarn);CS1591 + $(NoWarn);IDE1006 + + + + + $(NoWarn);CS8604;CS8602 + + + diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs new file mode 100644 index 000000000000..32adf75e9017 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; + +namespace PostgresIntegrationTests.Filter; + +public class InMemoryBasicFilterTests(InMemoryFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs new file mode 100644 index 000000000000..7952d1dffad3 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace PostgresIntegrationTests.Filter; + +public class InMemoryFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => InMemoryTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj new file mode 100644 index 000000000000..f77fff8de939 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj @@ -0,0 +1,26 @@ + + + + net8.0;net472 + enable + enable + true + false + InMemoryIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs new file mode 100644 index 000000000000..246d5166c831 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.InMemory; +using VectorDataSpecificationTests.Support; + +namespace InMemoryIntegrationTests.Support; + +internal sealed class InMemoryTestStore : TestStore +{ + public static InMemoryTestStore Instance { get; } = new(); + + private InMemoryVectorStore _vectorStore = new(); + + public override IVectorStore DefaultVectorStore => this._vectorStore; + + private InMemoryTestStore() + { + } + + protected override Task StartAsync() + { + this._vectorStore = new InMemoryVectorStore(); + + return Task.CompletedTask; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs new file mode 100644 index 000000000000..a6ad4378f7a1 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace MongoDBIntegrationTests.Filter; + +public class MongoDBBasicFilterTests(MongoDBFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + // Specialized MongoDB syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + #region Null checking + + // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + #endregion + + #region Not + + // MongoDB currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + #endregion + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + // AnyTagEqualTo not (currently) supported on SQLite + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_List() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs new file mode 100644 index 000000000000..8774018ffabf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace MongoDBIntegrationTests.Filter; + +public class MongoDBFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => MongoDBTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBIntegrationTests.csproj new file mode 100644 index 000000000000..6aa9923ffaa2 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBIntegrationTests.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net472 + enable + enable + true + false + MongoDBIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBTestStore.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBTestStore.cs new file mode 100644 index 000000000000..10ee96b890b6 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBTestStore.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using MongoDB.Driver; +using Testcontainers.MongoDb; +using VectorDataSpecificationTests.Support; + +namespace MongoDBIntegrationTests.Support; + +internal sealed class MongoDBTestStore : TestStore +{ + public static MongoDBTestStore Instance { get; } = new(); + + private readonly MongoDbContainer _container = new MongoDbBuilder() + .WithImage("mongodb/mongodb-atlas-local:7.0.6") + .Build(); + + public MongoClient? _client { get; private set; } + public IMongoDatabase? _database { get; private set; } + private MongoDBVectorStore? _defaultVectorStore; + + public MongoClient Client => this._client ?? throw new InvalidOperationException("Not initialized"); + public IMongoDatabase Database => this._database ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public MongoDBVectorStore GetVectorStore(MongoDBVectorStoreOptions options) + => new(this.Database, options); + + private MongoDBTestStore() + { + } + + protected override async Task StartAsync() + { + await this._container.StartAsync(); + + this._client = new MongoClient(new MongoClientSettings + { + Server = new MongoServerAddress(this._container.Hostname, this._container.GetMappedPublicPort(MongoDbBuilder.MongoDbPort)), + DirectConnection = true, + // ReadConcern = ReadConcern.Linearizable, + // WriteConcern = WriteConcern.WMajority + }); + this._database = this._client.GetDatabase("VectorSearchTests"); + this._defaultVectorStore = new(this._database); + } + + protected override Task StopAsync() + => this._container.StopAsync(); +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs new file mode 100644 index 000000000000..4fad76458700 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; +using Xunit.Sdk; + +namespace PostgresIntegrationTests.Filter; + +public class PostgresBasicFilterTests(PostgresFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs new file mode 100644 index 000000000000..c65b37177003 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace PostgresIntegrationTests.Filter; + +public class PostgresFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => PostgresTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj new file mode 100644 index 000000000000..0a039793dc49 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net472 + enable + enable + true + false + PostgresIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs new file mode 100644 index 000000000000..1d4c540c216a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Postgres; +using Npgsql; +using Testcontainers.PostgreSql; +using VectorDataSpecificationTests.Support; + +namespace PostgresIntegrationTests.Support; + +#pragma warning disable SKEXP0020 + +internal sealed class PostgresTestStore : TestStore +{ + public static PostgresTestStore Instance { get; } = new(); + + private static readonly PostgreSqlContainer s_container = new PostgreSqlBuilder() + .WithImage("pgvector/pgvector:pg16") + .Build(); + + private NpgsqlDataSource? _dataSource; + private PostgresVectorStore? _defaultVectorStore; + + public NpgsqlDataSource DataSource => this._dataSource ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public PostgresVectorStore GetVectorStore(PostgresVectorStoreOptions options) + => new(this.DataSource, options); + + private PostgresTestStore() + { + } + + protected override async Task StartAsync() + { + await s_container.StartAsync(); + + var dataSourceBuilder = new NpgsqlDataSourceBuilder + { + ConnectionStringBuilder = + { + Host = s_container.Hostname, + Port = s_container.GetMappedPublicPort(5432), + Username = PostgreSqlBuilder.DefaultUsername, + Password = PostgreSqlBuilder.DefaultPassword, + Database = PostgreSqlBuilder.DefaultDatabase + } + }; + + dataSourceBuilder.UseVector(); + + this._dataSource = dataSourceBuilder.Build(); + + await using var connection = this._dataSource.CreateConnection(); + await connection.OpenAsync(); + using var command = new NpgsqlCommand("CREATE EXTENSION IF NOT EXISTS vector", connection); + await command.ExecuteNonQueryAsync(); + await connection.ReloadTypesAsync(); + + this._defaultVectorStore = new(this._dataSource); + } + + protected override async Task StopAsync() + { + await this._dataSource!.DisposeAsync(); + await s_container.StopAsync(); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs new file mode 100644 index 000000000000..11593833dddf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; + +namespace QdrantIntegrationTests.Filter; + +public class QdrantBasicFilterTests(QdrantFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs new file mode 100644 index 000000000000..8c8a6528b4f8 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Filter; + +public class QdrantFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => QdrantTestStore.Instance; + + // Qdrant doesn't support the default Flat index kind + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantIntegrationTests.csproj new file mode 100644 index 000000000000..0ea8db51c21d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantIntegrationTests.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net472 + enable + enable + true + false + QdrantIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs new file mode 100644 index 000000000000..3537cf8c64e9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Qdrant.Client; +using QdrantIntegrationTests.Support.TestContainer; +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Support; + +#pragma warning disable CA1001 // Type owns disposable fields but is not disposable + +internal sealed class QdrantTestStore : TestStore +{ + public static QdrantTestStore Instance { get; } = new(); + + private readonly QdrantContainer _container = new QdrantBuilder().Build(); + private QdrantClient? _client; + private QdrantVectorStore? _defaultVectorStore; + + public QdrantClient Client => this._client ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public QdrantVectorStore GetVectorStore(QdrantVectorStoreOptions options) + => new(this.Client, options); + + private QdrantTestStore() + { + } + + protected override async Task StartAsync() + { + await this._container.StartAsync(); + this._client = new QdrantClient(this._container.Hostname, this._container.GetMappedPublicPort(QdrantBuilder.QdrantGrpcPort)); + this._defaultVectorStore = new(this._client); + } + + protected override Task StopAsync() + => this._container.StopAsync(); +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantBuilder.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantBuilder.cs new file mode 100644 index 000000000000..a3444a9f0ee5 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantBuilder.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Docker.DotNet.Models; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Configurations; +using Qdrant.Client.Grpc; + +namespace QdrantIntegrationTests.Support.TestContainer; + +public sealed class QdrantBuilder : ContainerBuilder +{ + public const string QdrantImage = "qdrant/qdrant:" + QdrantGrpcClient.QdrantVersion; + + public const ushort QdrantHttpPort = 6333; + + public const ushort QdrantGrpcPort = 6334; + + public QdrantBuilder() : this(new QdrantConfiguration()) => this.DockerResourceConfiguration = this.Init().DockerResourceConfiguration; + + private QdrantBuilder(QdrantConfiguration dockerResourceConfiguration) : base(dockerResourceConfiguration) + => this.DockerResourceConfiguration = dockerResourceConfiguration; + + public QdrantBuilder WithConfigFile(string configPath) + => this.Merge(this.DockerResourceConfiguration, new QdrantConfiguration()) + .WithBindMount(configPath, "/qdrant/config/custom_config.yaml"); + + public QdrantBuilder WithCertificate(string certPath, string keyPath) + => this.Merge(this.DockerResourceConfiguration, new QdrantConfiguration()) + .WithBindMount(certPath, "/qdrant/tls/cert.pem") + .WithBindMount(keyPath, "/qdrant/tls/key.pem"); + + public override QdrantContainer Build() + { + this.Validate(); + return new QdrantContainer(this.DockerResourceConfiguration); + } + + protected override QdrantBuilder Init() + => base.Init() + .WithImage(QdrantImage) + .WithPortBinding(QdrantHttpPort, true) + .WithPortBinding(QdrantGrpcPort, true) + .WithWaitStrategy(Wait.ForUnixContainer() + .UntilMessageIsLogged(".*Actix runtime found; starting in Actix runtime.*")); + + protected override QdrantBuilder Clone(IResourceConfiguration resourceConfiguration) + => this.Merge(this.DockerResourceConfiguration, new QdrantConfiguration(resourceConfiguration)); + + protected override QdrantBuilder Merge(QdrantConfiguration oldValue, QdrantConfiguration newValue) + => new(new QdrantConfiguration(oldValue, newValue)); + + protected override QdrantConfiguration DockerResourceConfiguration { get; } + + protected override QdrantBuilder Clone(IContainerConfiguration resourceConfiguration) + => this.Merge(this.DockerResourceConfiguration, new QdrantConfiguration(resourceConfiguration)); +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantConfiguration.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantConfiguration.cs new file mode 100644 index 000000000000..219e4030c581 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantConfiguration.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Docker.DotNet.Models; +using DotNet.Testcontainers.Configurations; + +namespace QdrantIntegrationTests.Support.TestContainer; + +public sealed class QdrantConfiguration : ContainerConfiguration +{ + /// + /// Initializes a new instance of the class. + /// + public QdrantConfiguration() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public QdrantConfiguration(IResourceConfiguration resourceConfiguration) + : base(resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public QdrantConfiguration(IContainerConfiguration resourceConfiguration) + : base(resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public QdrantConfiguration(QdrantConfiguration resourceConfiguration) + : this(new QdrantConfiguration(), resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The old Docker resource configuration. + /// The new Docker resource configuration. + public QdrantConfiguration(QdrantConfiguration oldValue, QdrantConfiguration newValue) + : base(oldValue, newValue) + { + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantContainer.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantContainer.cs new file mode 100644 index 000000000000..f9c1ab05f1cc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/TestContainer/QdrantContainer.cs @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft. All rights reserved. + +using DotNet.Testcontainers.Containers; + +namespace QdrantIntegrationTests.Support.TestContainer; + +public class QdrantContainer(QdrantConfiguration configuration) : DockerContainer(configuration); diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs new file mode 100644 index 000000000000..d0017e3a510c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; +using Xunit.Sdk; + +namespace RedisIntegrationTests.Filter; + +public abstract class RedisBasicFilterTests(FilterFixtureBase fixture) : BasicFilterTestsBase(fixture) +{ + #region Equality with null + + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + #endregion + + #region Bool + + public override Task Bool() + => Assert.ThrowsAsync(() => base.Bool()); + + public override Task Not_over_bool() + => Assert.ThrowsAsync(() => base.Not_over_bool()); + + #endregion + + #region Contains + + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array()); + + public override Task Contains_over_inline_string_array_with_weird_chars() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array_with_weird_chars()); + + public override Task Contains_over_captured_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_captured_string_array()); + + #endregion +} + +public class RedisJsonCollectionBasicFilterTests(RedisJsonCollectionFilterFixture fixture) : RedisBasicFilterTests(fixture), IClassFixture; + +public class RedisHashSetCollectionBasicFilterTests(RedisHashSetCollectionFilterFixture fixture) : RedisBasicFilterTests(fixture), IClassFixture +{ + // Null values are not supported in Redis HashSet + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + // Array fields not supported on Redis HashSet + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_List() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs new file mode 100644 index 000000000000..de751f36ca4e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Redis; +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace RedisIntegrationTests.Filter; + +public class RedisJsonCollectionFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => RedisTestStore.Instance; + + protected override string StoreName => "JsonCollectionFilterTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis/JSON + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() + }; + + protected override IVectorStoreRecordCollection> CreateCollection() + => new RedisJsonVectorStoreRecordCollection>( + RedisTestStore.Instance.Database, + this.StoreName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); +} + +public class RedisHashSetCollectionFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => RedisTestStore.Instance; + + protected override string StoreName => "HashSetCollectionFilterTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => + p.PropertyType != typeof(bool) && + p.PropertyType != typeof(string[]) && + p.PropertyType != typeof(List)).ToList() + }; + + protected override IVectorStoreRecordCollection> CreateCollection() + => new RedisHashSetVectorStoreRecordCollection>( + RedisTestStore.Instance.Database, + this.StoreName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); + + protected override List> BuildTestData() + { + var testData = base.BuildTestData(); + + foreach (var record in testData) + { + // Null values are not supported in Redis hashsets + record.String ??= string.Empty; + } + + return testData; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisIntegrationTests.csproj new file mode 100644 index 000000000000..5727b3b2650a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisIntegrationTests.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net472 + enable + enable + true + false + RedisIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs new file mode 100644 index 000000000000..a1dd2f02c0bc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Redis; +using StackExchange.Redis; +using Testcontainers.Redis; +using VectorDataSpecificationTests.Support; + +namespace RedisIntegrationTests.Support; + +internal sealed class RedisTestStore : TestStore +{ + public static RedisTestStore Instance { get; } = new(); + + private readonly RedisContainer _container = new RedisBuilder() + .WithImage("redis/redis-stack") + .Build(); + + private IDatabase? _database; + private RedisVectorStore? _defaultVectorStore; + + public IDatabase Database => this._database ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public RedisVectorStore GetVectorStore(RedisVectorStoreOptions options) + => new(this.Database, options); + + private RedisTestStore() + { + } + + protected override async Task StartAsync() + { + await this._container.StartAsync(); + var redis = await ConnectionMultiplexer.ConnectAsync($"{this._container.Hostname}:{this._container.GetMappedPublicPort(6379)},connectTimeout=60000,connectRetry=5"); + this._database = redis.GetDatabase(); + this._defaultVectorStore = new(this._database); + } + + protected override Task StopAsync() + => this._container.StopAsync(); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs new file mode 100644 index 000000000000..9ca7878a414e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using Xunit; +using Xunit.Sdk; + +namespace SqliteIntegrationTests.Filter; + +public class SqliteBasicFilterTests(SqliteFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + // Array fields not (currently) supported on SQLite (see #10343) + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + // List fields not (currently) supported on SQLite (see #10343) + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + // AnyTagEqualTo not (currently) supported on SQLite + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_List() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs new file mode 100644 index 000000000000..3dc9a0d10dad --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; + +namespace SqliteIntegrationTests.Filter; + +public class SqliteFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => SqliteTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + + // Override to remove the string array property, which isn't (currently) supported on SQLite + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() + }; +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..89ee1c5e6025 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: SqliteIntegrationTests.Support.SqliteVecRequired] diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteIntegrationTests.csproj new file mode 100644 index 000000000000..a47480e526cd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteIntegrationTests.csproj @@ -0,0 +1,26 @@ + + + + net8.0;net472 + enable + enable + true + false + SqliteIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestEnvironment.cs new file mode 100644 index 000000000000..e7dd76fb76fc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestEnvironment.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Data; +using Microsoft.Data.Sqlite; + +namespace SqliteIntegrationTests.Support; + +internal static class SqliteTestEnvironment +{ + /// + /// SQLite extension name for vector search. + /// More information here: . + /// + private const string VectorSearchExtensionName = "vec0"; + + private static bool? s_isSqliteVecInstalled; + + internal static bool TryLoadSqliteVec(SqliteConnection connection) + { + if (!s_isSqliteVecInstalled.HasValue) + { + if (connection.State != ConnectionState.Open) + { + throw new ArgumentException("Connection must be open"); + } + + try + { + connection.LoadExtension(VectorSearchExtensionName); + s_isSqliteVecInstalled = true; + } + catch (SqliteException) + { + s_isSqliteVecInstalled = false; + } + } + + return s_isSqliteVecInstalled.Value; + } + + internal static bool IsSqliteVecInstalled + { + get + { + if (!s_isSqliteVecInstalled.HasValue) + { + using var connection = new SqliteConnection("Data Source=:memory:;"); + connection.Open(); + + s_isSqliteVecInstalled = TryLoadSqliteVec(connection); + } + + return s_isSqliteVecInstalled.Value; + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs new file mode 100644 index 000000000000..526eeac3b2d8 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Data.Sqlite; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Sqlite; +using VectorDataSpecificationTests.Support; + +namespace SqliteIntegrationTests.Support; + +#pragma warning disable CA1001 // Type owns disposable fields (_connection) but is not disposable + +internal sealed class SqliteTestStore : TestStore +{ + public static SqliteTestStore Instance { get; } = new(); + + private SqliteConnection? _connection; + public SqliteConnection Connection + => this._connection ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + private SqliteVectorStore? _defaultVectorStore; + public override IVectorStore DefaultVectorStore + => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + + private SqliteTestStore() + { + } + + protected override async Task StartAsync() + { + this._connection = new SqliteConnection("Data Source=:memory:"); + + await this.Connection.OpenAsync(); + + if (!SqliteTestEnvironment.TryLoadSqliteVec(this.Connection)) + { + this.Connection.Dispose(); + + // Note that we ignore sqlite_vec loading failures; the tests are decorated with [SqliteVecRequired], which causes + // them to be skipped if sqlite_vec isn't installed (better than an exception triggering failure here) + } + + this._defaultVectorStore = new SqliteVectorStore(this.Connection); + } + +#if NET8_0_OR_GREATER + protected override async Task StopAsync() + => await this.Connection.DisposeAsync(); +#else + protected override Task StopAsync() + { + this.Connection.Dispose(); + return Task.CompletedTask; + } +#endif +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteVecRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteVecRequiredAttribute.cs new file mode 100644 index 000000000000..9351fd679171 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteVecRequiredAttribute.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Xunit; + +namespace SqliteIntegrationTests.Support; + +/// +/// Checks whether the sqlite_vec extension is properly installed, and skips the test(s) otherwise. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Assembly)] +public sealed class SqliteVecRequiredAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() => new(SqliteTestEnvironment.IsSqliteVecInstalled); + + public string Skip { get; set; } = "The sqlite_vec extension is not installed."; + + public string SkipReason + => this.Skip; +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs new file mode 100644 index 000000000000..f2022a2e7c60 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs @@ -0,0 +1,283 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.Filter; + +public abstract class BasicFilterTestsBase(FilterFixtureBase fixture) + where TKey : notnull +{ + #region Equality + + [ConditionalFact] + public virtual Task Equal_with_int() + => this.TestFilterAsync(r => r.Int == 8); + + [ConditionalFact] + public virtual Task Equal_with_string() + => this.TestFilterAsync(r => r.String == "foo"); + + [ConditionalFact] + public virtual Task Equal_with_string_containing_special_characters() + => this.TestFilterAsync(r => r.String == """with some special"characters'and\stuff"""); + + [ConditionalFact] + public virtual Task Equal_with_string_is_not_Contains() + => this.TestFilterAsync(r => r.String == "some", expectZeroResults: true); + + [ConditionalFact] + public virtual Task Equal_reversed() + => this.TestFilterAsync(r => 8 == r.Int); + + [ConditionalFact] + public virtual Task Equal_with_null_reference_type() + => this.TestFilterAsync(r => r.String == null); + + [ConditionalFact] + public virtual Task Equal_with_null_captured() + { + string? s = null; + + return this.TestFilterAsync(r => r.String == s); + } + + [ConditionalFact] + public virtual Task NotEqual_with_int() + => this.TestFilterAsync(r => r.Int != 8); + + [ConditionalFact] + public virtual Task NotEqual_with_string() + => this.TestFilterAsync(r => r.String != "foo"); + + [ConditionalFact] + public virtual Task NotEqual_reversed() + => this.TestFilterAsync(r => r.Int != 8); + + [ConditionalFact] + public virtual Task NotEqual_with_null_reference_type() + => this.TestFilterAsync(r => r.String != null); + + [ConditionalFact] + public virtual Task NotEqual_with_null_captured() + { + string? s = null; + + return this.TestFilterAsync(r => r.String != s); + } + + [ConditionalFact] + public virtual Task Bool() + => this.TestFilterAsync(r => r.Bool); + + #endregion Equality + + #region Comparison + + [ConditionalFact] + public virtual Task GreaterThan_with_int() + => this.TestFilterAsync(r => r.Int > 9); + + [ConditionalFact] + public virtual Task GreaterThanOrEqual_with_int() + => this.TestFilterAsync(r => r.Int >= 9); + + [ConditionalFact] + public virtual Task LessThan_with_int() + => this.TestFilterAsync(r => r.Int < 10); + + [ConditionalFact] + public virtual Task LessThanOrEqual_with_int() + => this.TestFilterAsync(r => r.Int <= 10); + + #endregion Comparison + + #region Logical operators + + [ConditionalFact] + public virtual Task And() + => this.TestFilterAsync(r => r.Int == 8 && r.String == "foo"); + + [ConditionalFact] + public virtual Task Or() + => this.TestFilterAsync(r => r.Int == 8 || r.String == "foo"); + + [ConditionalFact] + public virtual Task And_within_And() + => this.TestFilterAsync(r => (r.Int == 8 && r.String == "foo") && r.Int2 == 80); + + [ConditionalFact] + public virtual Task And_within_Or() + => this.TestFilterAsync(r => (r.Int == 8 && r.String == "foo") || r.Int2 == 100); + + [ConditionalFact] + public virtual Task Or_within_And() + => this.TestFilterAsync(r => (r.Int == 8 || r.Int == 9) && r.String == "foo"); + + [ConditionalFact] + public virtual Task Not_over_Equal() + // ReSharper disable once NegativeEqualityExpression + => this.TestFilterAsync(r => !(r.Int == 8)); + + [ConditionalFact] + public virtual Task Not_over_NotEqual() + // ReSharper disable once NegativeEqualityExpression + => this.TestFilterAsync(r => !(r.Int != 8)); + + [ConditionalFact] + public virtual Task Not_over_And() + => this.TestFilterAsync(r => !(r.Int == 8 && r.String == "foo")); + + [ConditionalFact] + public virtual Task Not_over_Or() + => this.TestFilterAsync(r => !(r.Int == 8 || r.String == "foo")); + + [ConditionalFact] + public virtual Task Not_over_bool() + => this.TestFilterAsync(r => !r.Bool); + + #endregion Logical operators + + #region Contains + + [ConditionalFact] + public virtual Task Contains_over_field_string_array() + => this.TestFilterAsync(r => r.StringArray.Contains("x")); + + [ConditionalFact] + public virtual Task Contains_over_field_string_List() + => this.TestFilterAsync(r => r.StringList.Contains("x")); + + [ConditionalFact] + public virtual Task Contains_over_inline_int_array() + => this.TestFilterAsync(r => new[] { 8, 10 }.Contains(r.Int)); + + [ConditionalFact] + public virtual Task Contains_over_inline_string_array() + => this.TestFilterAsync(r => new[] { "foo", "baz", "unknown" }.Contains(r.String)); + + [ConditionalFact] + public virtual Task Contains_over_inline_string_array_with_weird_chars() + => this.TestFilterAsync(r => new[] { "foo", "baz", "un , ' \"" }.Contains(r.String)); + + [ConditionalFact] + public virtual Task Contains_over_captured_string_array() + { + var array = new[] { "foo", "baz", "unknown" }; + + return this.TestFilterAsync(r => array.Contains(r.String)); + } + + #endregion Contains + + [ConditionalFact] + public virtual Task Captured_variable() + { + // ReSharper disable once ConvertToConstant.Local + var i = 8; + + return this.TestFilterAsync(r => r.Int == i); + } + + #region Legacy filter support + + [ConditionalFact] + [Obsolete("Legacy filter support")] + public virtual Task Legacy_equality() + => this.TestLegacyFilterAsync( + new VectorSearchFilter().EqualTo("Int", 8), + r => r.Int == 8); + + [ConditionalFact] + [Obsolete("Legacy filter support")] + public virtual Task Legacy_And() + => this.TestLegacyFilterAsync( + new VectorSearchFilter().EqualTo("Int", 8).EqualTo("String", "foo"), + r => r.Int == 8); + + [ConditionalFact] + [Obsolete("Legacy filter support")] + public virtual Task Legacy_AnyTagEqualTo_array() + => this.TestLegacyFilterAsync( + new VectorSearchFilter().AnyTagEqualTo("StringArray", "x"), + r => r.StringArray.Contains("x")); + + [ConditionalFact] + [Obsolete("Legacy filter support")] + public virtual Task Legacy_AnyTagEqualTo_List() + => this.TestLegacyFilterAsync( + new VectorSearchFilter().AnyTagEqualTo("StringList", "x"), + r => r.StringArray.Contains("x")); + + #endregion Legacy filter support + + protected virtual async Task TestFilterAsync( + Expression, bool>> filter, + bool expectZeroResults = false, + bool expectAllResults = false) + { + var expected = fixture.TestData.AsQueryable().Where(filter).OrderBy(r => r.Key).ToList(); + + if (expected.Count == 0 && !expectZeroResults) + { + Assert.Fail("The test returns zero results, and so is unreliable"); + } + + if (expected.Count == fixture.TestData.Count && !expectAllResults) + { + Assert.Fail("The test returns all results, and so is unreliable"); + } + + var results = await fixture.Collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + new() + { + NewFilter = filter, + Top = fixture.TestData.Count + }); + + var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + + Assert.Equal(expected, actual, (e, a) => + e.Int == a.Int && + e.String == a.String && + e.Int2 == a.Int2); + } + + [Obsolete("Legacy filter support")] + protected virtual async Task TestLegacyFilterAsync( + VectorSearchFilter legacyFilter, + Expression, bool>> expectedFilter, + bool expectZeroResults = false, + bool expectAllResults = false) + { + var expected = fixture.TestData.AsQueryable().Where(expectedFilter).OrderBy(r => r.Key).ToList(); + + if (expected.Count == 0 && !expectZeroResults) + { + Assert.Fail("The test returns zero results, and so is unreliable"); + } + + if (expected.Count == fixture.TestData.Count && !expectAllResults) + { + Assert.Fail("The test returns all results, and so is unreliable"); + } + + var results = await fixture.Collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + new() + { + Filter = legacyFilter, + Top = fixture.TestData.Count + }); + + var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + + Assert.Equal(expected, actual, (e, a) => + e.Int == a.Int && + e.String == a.String && + e.Int2 == a.Int2); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs new file mode 100644 index 000000000000..436d1453d552 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Globalization; +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace VectorDataSpecificationTests.Filter; + +public abstract class FilterFixtureBase : IAsyncLifetime + where TKey : notnull +{ + private int _nextKeyValue = 1; + private List>? _testData; + + protected virtual string StoreName => "FilterTests"; + + protected abstract TestStore TestStore { get; } + + protected virtual string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineSimilarity; + protected virtual string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Flat; + + protected virtual IVectorStoreRecordCollection> CreateCollection() + => this.TestStore.DefaultVectorStore.GetCollection>(this.StoreName, this.GetRecordDefinition()); + + public virtual async Task InitializeAsync() + { + await this.TestStore.ReferenceCountingStartAsync(); + + this.Collection = this.CreateCollection(); + + if (await this.Collection.CollectionExistsAsync()) + { + await this.Collection.DeleteCollectionAsync(); + } + + await this.Collection.CreateCollectionAsync(); + await this.SeedAsync(); + + // Some databases upsert asynchronously, meaning that our seed data may not be visible immediately to tests. + // Check and loop until it is. + for (var i = 0; i < 20; i++) + { + var results = await this.Collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + new() + { + Top = this.TestData.Count, + NewFilter = r => r.Int > 0 + }); + var count = await results.Results.CountAsync(); + if (count == this.TestData.Count) + { + break; + } + + await Task.Delay(TimeSpan.FromMilliseconds(100)); + } + } + + protected virtual VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(FilterRecord.Key), typeof(TKey)), + new VectorStoreRecordVectorProperty(nameof(FilterRecord.Vector), typeof(ReadOnlyMemory?)) + { + Dimensions = 3, + DistanceFunction = this.DistanceFunction, + IndexKind = this.IndexKind + }, + + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.String), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Bool), typeof(bool)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int2), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringArray), typeof(string[])) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringList), typeof(List)) { IsFilterable = true } + ] + }; + + public virtual IVectorStoreRecordCollection> Collection { get; private set; } = null!; + + public List> TestData => this._testData ??= this.BuildTestData(); + + protected virtual List> BuildTestData() + { + // All records have the same vector - this fixture is about testing criteria filtering only + var vector = new ReadOnlyMemory([1, 2, 3]); + + return + [ + new() + { + Key = this.GenerateNextKey(), + Int = 8, + String = "foo", + Bool = true, + Int2 = 80, + StringArray = ["x", "y"], + StringList = ["x", "y"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 9, + String = "bar", + Bool = false, + Int2 = 90, + StringArray = ["a", "b"], + StringList = ["a", "b"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 9, + String = "foo", + Bool = true, + Int2 = 9, + StringArray = ["x"], + StringList = ["x"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 10, + String = null, + Bool = false, + Int2 = 100, + StringArray = ["x", "y", "z"], + StringList = ["x", "y", "z"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 11, + Bool = true, + String = """with some special"characters'and\stuff""", + Int2 = 101, + StringArray = ["y", "z"], + StringList = ["y", "z"], + Vector = vector + } + ]; + } + + protected virtual async Task SeedAsync() + { + // TODO: UpsertBatchAsync returns IAsyncEnumerable (to support server-generated keys?), but this makes it quite hard to use: + await foreach (var _ in this.Collection.UpsertBatchAsync(this.TestData)) + { + } + } + + protected virtual TKey GenerateNextKey() + => typeof(TKey) switch + { + _ when typeof(TKey) == typeof(int) => (TKey)(object)this._nextKeyValue++, + _ when typeof(TKey) == typeof(long) => (TKey)(object)(long)this._nextKeyValue++, + _ when typeof(TKey) == typeof(ulong) => (TKey)(object)(ulong)this._nextKeyValue++, + _ when typeof(TKey) == typeof(string) => (TKey)(object)(this._nextKeyValue++).ToString(CultureInfo.InvariantCulture), + _ when typeof(TKey) == typeof(Guid) => (TKey)(object)new Guid($"00000000-0000-0000-0000-00{this._nextKeyValue++:0000000000}"), + + _ => throw new NotSupportedException($"Unsupported key of type '{typeof(TKey).Name}', override {nameof(this.GenerateNextKey)}") + }; + + public virtual Task DisposeAsync() + => this.TestStore.ReferenceCountingStopAsync(); +} + +#pragma warning disable CS1819 // Properties should not return arrays +#pragma warning disable CA1819 // Properties should not return arrays +public class FilterRecord +{ + public TKey Key { get; set; } = default!; + public ReadOnlyMemory? Vector { get; set; } + + public int Int { get; set; } + public string? String { get; set; } + public bool Bool { get; set; } + public int Int2 { get; set; } + public string[] StringArray { get; set; } = null!; + public List StringList { get; set; } = null!; +} +#pragma warning restore CA1819 // Properties should not return arrays +#pragma warning restore CS1819 diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs new file mode 100644 index 000000000000..de7c0d252062 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; + +namespace VectorDataSpecificationTests.Support; + +#pragma warning disable CA1001 // Type owns disposable fields but is not disposable + +public abstract class TestStore +{ + private readonly SemaphoreSlim _lock = new(1, 1); + private int _referenceCount; + + protected abstract Task StartAsync(); + + protected virtual Task StopAsync() + => Task.CompletedTask; + + public virtual async Task ReferenceCountingStartAsync() + { + await this._lock.WaitAsync(); + try + { + if (this._referenceCount++ == 0) + { + await this.StartAsync(); + } + } + finally + { + this._lock.Release(); + } + } + + public virtual async Task ReferenceCountingStopAsync() + { + await this._lock.WaitAsync(); + try + { + if (--this._referenceCount == 0) + { + await this.StopAsync(); + } + } + finally + { + this._lock.Release(); + } + } + + public abstract IVectorStore DefaultVectorStore { get; } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj new file mode 100644 index 000000000000..77fc8e90dbb2 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj @@ -0,0 +1,24 @@ + + + + net8.0;net472 + enable + enable + false + VectorDataSpecificationTests + + + + + + + + + + + + + + + + diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactAttribute.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactAttribute.cs new file mode 100644 index 000000000000..d4d93c8b5035 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactAttribute.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +[AttributeUsage(AttributeTargets.Method)] +[XunitTestCaseDiscoverer("VectorDataSpecificationTests.Xunit.ConditionalFactDiscoverer", "VectorDataIntegrationTests")] +public sealed class ConditionalFactAttribute : FactAttribute; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactDiscoverer.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactDiscoverer.cs new file mode 100644 index 000000000000..1fbeafd3dd1c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactDiscoverer.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +/// +/// Used dynamically from . +/// Make sure to update that class if you move this type. +/// +public class ConditionalFactDiscoverer(IMessageSink messageSink) : FactDiscoverer(messageSink) +{ + protected override IXunitTestCase CreateTestCase( + ITestFrameworkDiscoveryOptions discoveryOptions, + ITestMethod testMethod, + IAttributeInfo factAttribute) + => new ConditionalFactTestCase( + this.DiagnosticMessageSink, + discoveryOptions.MethodDisplayOrDefault(), + discoveryOptions.MethodDisplayOptionsOrDefault(), + testMethod); +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactTestCase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactTestCase.cs new file mode 100644 index 000000000000..3dea216a1084 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalFactTestCase.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +public sealed class ConditionalFactTestCase : XunitTestCase +{ + [Obsolete("Called by the de-serializer; should only be called by deriving classes for de-serialization purposes")] + public ConditionalFactTestCase() + { + } + + public ConditionalFactTestCase( + IMessageSink diagnosticMessageSink, + TestMethodDisplay defaultMethodDisplay, + TestMethodDisplayOptions defaultMethodDisplayOptions, + ITestMethod testMethod, + object[]? testMethodArguments = null) + : base(diagnosticMessageSink, defaultMethodDisplay, defaultMethodDisplayOptions, testMethod, testMethodArguments) + { + } + + public override async Task RunAsync( + IMessageSink diagnosticMessageSink, + IMessageBus messageBus, + object[] constructorArguments, + ExceptionAggregator aggregator, + CancellationTokenSource cancellationTokenSource) + => await XunitTestCaseExtensions.TrySkipAsync(this, messageBus) + ? new RunSummary { Total = 1, Skipped = 1 } + : await base.RunAsync( + diagnosticMessageSink, + messageBus, + constructorArguments, + aggregator, + cancellationTokenSource); +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs new file mode 100644 index 000000000000..529f42ef1310 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +[AttributeUsage(AttributeTargets.Method)] +[XunitTestCaseDiscoverer("VectorDataSpecificationTests.Xunit.VectorStoreFactDiscoverer", "VectorDataIntegrationTests")] +public sealed class ConditionalTheoryAttribute : TheoryAttribute; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ITestCondition.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ITestCondition.cs new file mode 100644 index 000000000000..deca7716fb1a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ITestCondition.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace VectorDataSpecificationTests.Xunit; + +public interface ITestCondition +{ + ValueTask IsMetAsync(); + + string SkipReason { get; } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/XunitTestCaseExtensions.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/XunitTestCaseExtensions.cs new file mode 100644 index 000000000000..2cf37205ead4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/XunitTestCaseExtensions.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Concurrent; +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +public static class XunitTestCaseExtensions +{ + private static readonly ConcurrentDictionary> s_typeAttributes = new(); + private static readonly ConcurrentDictionary> s_assemblyAttributes = new(); + + public static async ValueTask TrySkipAsync(XunitTestCase testCase, IMessageBus messageBus) + { + var method = testCase.Method; + var type = testCase.TestMethod.TestClass.Class; + var assembly = type.Assembly; + + var skipReasons = new List(); + var attributes = + s_assemblyAttributes.GetOrAdd( + assembly.Name, + a => assembly.GetCustomAttributes(typeof(ITestCondition)).ToList()) + .Concat( + s_typeAttributes.GetOrAdd( + type.Name, + t => type.GetCustomAttributes(typeof(ITestCondition)).ToList())) + .Concat(method.GetCustomAttributes(typeof(ITestCondition))) + .OfType() + .Select(attributeInfo => (ITestCondition)attributeInfo.Attribute); + + foreach (var attribute in attributes) + { + if (!await attribute.IsMetAsync()) + { + skipReasons.Add(attribute.SkipReason); + } + } + + if (skipReasons.Count > 0) + { + messageBus.QueueMessage( + new TestSkipped(new XunitTest(testCase, testCase.DisplayName), string.Join(Environment.NewLine, skipReasons))); + + return true; + } + + return false; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs new file mode 100644 index 000000000000..2880d1b93859 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Filter; +using Xunit; +using Xunit.Sdk; + +namespace WeaviateIntegrationTests.Filter; + +public class WeaviateBasicFilterTests(WeaviateFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +{ + #region Filter by null + + // Null-state indexing needs to be set up, but that's not supported yet (#10358). + // We could interact with Weaviate directly (not via the abstraction) to do this. + + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + #endregion + + #region Not + + // Weaviate currently doesn't support NOT (https://github.com/weaviate/weaviate/issues/3683) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + #endregion + + #region Unsupported Contains scenarios + + public override Task Contains_over_captured_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_captured_string_array()); + + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array_with_weird_chars() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array_with_weird_chars()); + + #endregion + + // In Weaviate, string equality on multi-word textual properties depends on tokenization + // (https://weaviate.io/developers/weaviate/api/graphql/filters#multi-word-queries-in-equal-filters) + public override Task Equal_with_string_is_not_Contains() + => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs new file mode 100644 index 000000000000..f00b884780c2 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; + +namespace WeaviateIntegrationTests.Filter; + +public class WeaviateFilterFixture : FilterFixtureBase +{ + protected override TestStore TestStore => WeaviateTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs new file mode 100644 index 000000000000..1745a902a348 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Docker.DotNet.Models; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Configurations; + +namespace WeaviateIntegrationTests.Support.TestContainer; + +public sealed class WeaviateBuilder : ContainerBuilder +{ + public const string WeaviateImage = "semitechnologies/weaviate:1.26.4"; + public const ushort WeaviateHttpPort = 8080; + public const ushort WeaviateGrpcPort = 50051; + + public WeaviateBuilder() : this(new WeaviateConfiguration()) => this.DockerResourceConfiguration = this.Init().DockerResourceConfiguration; + + private WeaviateBuilder(WeaviateConfiguration dockerResourceConfiguration) : base(dockerResourceConfiguration) + => this.DockerResourceConfiguration = dockerResourceConfiguration; + + public override WeaviateContainer Build() + { + this.Validate(); + return new WeaviateContainer(this.DockerResourceConfiguration); + } + + protected override WeaviateBuilder Init() + => base.Init() + .WithImage(WeaviateImage) + .WithPortBinding(WeaviateHttpPort, true) + .WithPortBinding(WeaviateGrpcPort, true) + .WithEnvironment("AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED", "true") + .WithEnvironment("PERSISTENCE_DATA_PATH", "/var/lib/weaviate") + .WithWaitStrategy(Wait.ForUnixContainer() + .UntilPortIsAvailable(WeaviateHttpPort) + .UntilPortIsAvailable(WeaviateGrpcPort) + .UntilHttpRequestIsSucceeded(r => r.ForPath("/v1/.well-known/ready").ForPort(WeaviateHttpPort))); + + protected override WeaviateBuilder Clone(IResourceConfiguration resourceConfiguration) + => this.Merge(this.DockerResourceConfiguration, new WeaviateConfiguration(resourceConfiguration)); + + protected override WeaviateBuilder Merge(WeaviateConfiguration oldValue, WeaviateConfiguration newValue) + => new(new WeaviateConfiguration(oldValue, newValue)); + + protected override WeaviateConfiguration DockerResourceConfiguration { get; } + + protected override WeaviateBuilder Clone(IContainerConfiguration resourceConfiguration) + => this.Merge(this.DockerResourceConfiguration, new WeaviateConfiguration(resourceConfiguration)); +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateConfiguration.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateConfiguration.cs new file mode 100644 index 000000000000..56ea40b242e7 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateConfiguration.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Docker.DotNet.Models; +using DotNet.Testcontainers.Configurations; + +namespace WeaviateIntegrationTests.Support.TestContainer; + +public sealed class WeaviateConfiguration : ContainerConfiguration +{ + /// + /// Initializes a new instance of the class. + /// + public WeaviateConfiguration() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public WeaviateConfiguration(IResourceConfiguration resourceConfiguration) + : base(resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public WeaviateConfiguration(IContainerConfiguration resourceConfiguration) + : base(resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The Docker resource configuration. + public WeaviateConfiguration(WeaviateConfiguration resourceConfiguration) + : this(new WeaviateConfiguration(), resourceConfiguration) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The old Docker resource configuration. + /// The new Docker resource configuration. + public WeaviateConfiguration(WeaviateConfiguration oldValue, WeaviateConfiguration newValue) + : base(oldValue, newValue) + { + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateContainer.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateContainer.cs new file mode 100644 index 000000000000..c209d662a4d4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateContainer.cs @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft. All rights reserved. + +using DotNet.Testcontainers.Containers; + +namespace WeaviateIntegrationTests.Support.TestContainer; + +public class WeaviateContainer(WeaviateConfiguration configuration) : DockerContainer(configuration); diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs new file mode 100644 index 000000000000..d112a2abfe49 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +#if NET472 +using System.Net.Http; +#endif +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support.TestContainer; + +namespace WeaviateIntegrationTests.Support; + +public sealed class WeaviateTestStore : TestStore +{ + public static WeaviateTestStore Instance { get; } = new(); + + private readonly WeaviateContainer _container = new WeaviateBuilder().Build(); + public HttpClient? _httpClient { get; private set; } + private WeaviateVectorStore? _defaultVectorStore; + + public HttpClient Client => this._httpClient ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public WeaviateVectorStore GetVectorStore(WeaviateVectorStoreOptions options) + => new(this.Client, options); + + private WeaviateTestStore() + { + } + + protected override async Task StartAsync() + { + await this._container.StartAsync(); + this._httpClient = new HttpClient { BaseAddress = new Uri($"http://localhost:{this._container.GetMappedPublicPort(WeaviateBuilder.WeaviateHttpPort)}/v1/") }; + this._defaultVectorStore = new(this._httpClient); + } + + protected override Task StopAsync() + => this._container.StopAsync(); +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj new file mode 100644 index 000000000000..eb98407f35ee --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net472 + enable + enable + true + false + WeaviateIntegrationTests + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + From a9abe9a76c682cc0ab0596d92766982ff4692dfc Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Tue, 18 Feb 2025 16:45:15 +0100 Subject: [PATCH 03/63] .Net: Refactor vector store integration tests (#10556) Follup-up to #10273 --- .../Step04_KernelFunctionStrategies.cs | 8 +- .../Filter/AzureAISearchBasicFilterTests.cs | 13 +- .../Filter/AzureAISearchFilterFixture.cs | 15 -- .../Filter/CosmosMongoBasicFilterTests.cs | 13 +- .../Filter/CosmosMongoFilterFixture.cs | 15 -- .../Filter/CosmosNoSQLBasicFilterTests.cs | 11 +- .../Filter/CosmosNoSQLFilterFixture.cs | 12 -- .../Filter/InMemoryBasicFilterTests.cs | 11 +- .../Filter/InMemoryFilterFixture.cs | 12 -- .../Filter/MongoDBBasicFilterTests.cs | 10 +- .../Filter/MongoDBFilterFixture.cs | 12 -- .../Filter/PostgresBasicFilterTests.cs | 10 +- .../Filter/PostgresFilterFixture.cs | 12 -- .../Filter/QdrantBasicFilterTests.cs | 14 +- .../Filter/QdrantFilterFixture.cs | 15 -- .../Filter/RedisBasicFilterTests.cs | 70 ++++++- .../Filter/RedisFilterFixture.cs | 65 ------ .../Filter/SqliteBasicFilterTests.cs | 20 +- .../Filter/SqliteFilterFixture.cs | 22 -- ...FilterTestsBase.cs => BasicFilterTests.cs} | 121 ++++++++++- .../Filter/FilterFixtureBase.cs | 191 ------------------ .../Support/TestStore.cs | 50 ++++- .../Support/VectorStoreCollectionFixture.cs | 57 ++++++ .../Support/VectorStoreFixture.cs | 24 +++ .../Filter/WeaviateBasicFilterTests.cs | 12 +- .../Filter/WeaviateFilterFixture.cs | 14 -- 26 files changed, 424 insertions(+), 405 deletions(-) delete mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs rename dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/{BasicFilterTestsBase.cs => BasicFilterTests.cs} (63%) delete mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs diff --git a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs index 4c7930bd2533..f924793951aa 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs @@ -70,11 +70,11 @@ public async Task UseKernelFunctionStrategiesWithAgentGroupChatAsync() Determine which participant takes the next turn in a conversation based on the the most recent participant. State only the name of the participant to take the next turn. No participant should take more than one turn in a row. - + Choose only from these participants: - {{{ReviewerName}}} - {{{CopyWriterName}}} - + Always follow these rules when selecting the next participant: - After {{{CopyWriterName}}}, it is {{{ReviewerName}}}'s turn. - After {{{ReviewerName}}}, it is {{{CopyWriterName}}}'s turn. @@ -133,9 +133,9 @@ No participant should take more than one turn in a row. chat.AddChatMessage(message); this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent responese in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - this.WriteAgentChatMessage(responese); + this.WriteAgentChatMessage(response); } Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs index 9683543d3e98..6a7e8a1df408 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs @@ -1,13 +1,24 @@ // Copyright (c) Microsoft. All rights reserved. +using AzureAISearchIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; namespace AzureAISearchIntegrationTests.Filter; -public class AzureAISearchBasicFilterTests(AzureAISearchFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class AzureAISearchBasicFilterTests(AzureAISearchBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { // Azure AI Search only supports search.in() over strings public override Task Contains_over_inline_int_array() => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => AzureAISearchTestStore.Instance; + + // Azure AI search only supports lowercase letters, digits or dashes. + protected override string CollectionName => "filter-tests"; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs deleted file mode 100644 index a5ec5df341dd..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchFilterFixture.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using AzureAISearchIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace AzureAISearchIntegrationTests.Filter; - -public class AzureAISearchFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => AzureAISearchTestStore.Instance; - - // Azure AI search only supports lowercase letters, digits or dashes. - protected override string StoreName => "filter-tests"; -} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs index 33d14908f537..f7e52a5ec19a 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs @@ -1,12 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using MongoDBIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using VectorDataSpecificationTests.Xunit; using Xunit; namespace MongoDBIntegrationTests.Filter; -public class CosmosMongoBasicFilterTests(CosmosMongoFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class CosmosMongoBasicFilterTests(CosmosMongoBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] @@ -56,4 +59,12 @@ public override Task Legacy_AnyTagEqualTo_array() [Obsolete("Legacy filter support")] public override Task Legacy_AnyTagEqualTo_List() => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs deleted file mode 100644 index 129c7b0cc337..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoFilterFixture.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using MongoDBIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace MongoDBIntegrationTests.Filter; - -public class CosmosMongoFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => CosmosMongoDBTestStore.Instance; - - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; -} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs index b67141d82e6c..4058ea8674a7 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs @@ -1,8 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. +using CosmosNoSQLIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; namespace CosmosNoSQLIntegrationTests.Filter; -public class CosmosNoSQLBasicFilterTests(CosmosNoSQLFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; +public class CosmosNoSQLBasicFilterTests(CosmosNoSQLBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture +{ + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs deleted file mode 100644 index 8aaf6b86d4f9..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLFilterFixture.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using CosmosNoSQLIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace CosmosNoSQLIntegrationTests.Filter; - -public class CosmosNoSQLFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => CosmosNoSqlTestStore.Instance; -} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs index 32adf75e9017..198178aae1a1 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs @@ -1,8 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. +using InMemoryIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; namespace PostgresIntegrationTests.Filter; -public class InMemoryBasicFilterTests(InMemoryFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; +public class InMemoryBasicFilterTests(InMemoryBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture +{ + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => InMemoryTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs deleted file mode 100644 index 7952d1dffad3..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryFilterFixture.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using InMemoryIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace PostgresIntegrationTests.Filter; - -public class InMemoryFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => InMemoryTestStore.Instance; -} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs index a6ad4378f7a1..885c1503f5f7 100644 --- a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs @@ -1,12 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using MongoDBIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using VectorDataSpecificationTests.Xunit; using Xunit; namespace MongoDBIntegrationTests.Filter; -public class MongoDBBasicFilterTests(MongoDBFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class MongoDBBasicFilterTests(MongoDBBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] @@ -56,4 +59,9 @@ public override Task Legacy_AnyTagEqualTo_array() [Obsolete("Legacy filter support")] public override Task Legacy_AnyTagEqualTo_List() => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => MongoDBTestStore.Instance; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs deleted file mode 100644 index 8774018ffabf..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBFilterFixture.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using MongoDBIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace MongoDBIntegrationTests.Filter; - -public class MongoDBFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => MongoDBTestStore.Instance; -} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs index 4fad76458700..955d920cbde6 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs @@ -1,12 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using PostgresIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; using Xunit.Sdk; namespace PostgresIntegrationTests.Filter; -public class PostgresBasicFilterTests(PostgresFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class PostgresBasicFilterTests(PostgresBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { public override async Task Not_over_Or() { @@ -29,4 +32,9 @@ public override async Task NotEqual_with_string() [Obsolete("Legacy filter support")] public override Task Legacy_AnyTagEqualTo_array() => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => PostgresTestStore.Instance; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs deleted file mode 100644 index c65b37177003..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresFilterFixture.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using PostgresIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace PostgresIntegrationTests.Filter; - -public class PostgresFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => PostgresTestStore.Instance; -} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs index 11593833dddf..13ed231a3207 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs @@ -1,8 +1,20 @@ // Copyright (c) Microsoft. All rights reserved. +using QdrantIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; namespace QdrantIntegrationTests.Filter; -public class QdrantBasicFilterTests(QdrantFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture; +public class QdrantBasicFilterTests(QdrantBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture +{ + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => QdrantTestStore.Instance; + + // Qdrant doesn't support the default Flat index kind + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs deleted file mode 100644 index 8c8a6528b4f8..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantFilterFixture.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using QdrantIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace QdrantIntegrationTests.Filter; - -public class QdrantFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => QdrantTestStore.Instance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; -} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs index d0017e3a510c..978aa8d95484 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs @@ -1,12 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Redis; +using RedisIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; using Xunit.Sdk; namespace RedisIntegrationTests.Filter; -public abstract class RedisBasicFilterTests(FilterFixtureBase fixture) : BasicFilterTestsBase(fixture) +public abstract class RedisBasicFilterTests(BasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture) { #region Equality with null @@ -51,9 +56,32 @@ public override Task Contains_over_captured_string_array() #endregion } -public class RedisJsonCollectionBasicFilterTests(RedisJsonCollectionFilterFixture fixture) : RedisBasicFilterTests(fixture), IClassFixture; +public class RedisJsonCollectionBasicFilterTests(RedisJsonCollectionBasicFilterTests.Fixture fixture) + : RedisBasicFilterTests(fixture), IClassFixture +{ + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => RedisTestStore.Instance; + + protected override string CollectionName => "JsonCollectionFilterTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis/JSON + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() + }; + + protected override IVectorStoreRecordCollection CreateCollection() + => new RedisJsonVectorStoreRecordCollection( + RedisTestStore.Instance.Database, + this.CollectionName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); + } +} -public class RedisHashSetCollectionBasicFilterTests(RedisHashSetCollectionFilterFixture fixture) : RedisBasicFilterTests(fixture), IClassFixture +public class RedisHashSetCollectionBasicFilterTests(RedisHashSetCollectionBasicFilterTests.Fixture fixture) + : RedisBasicFilterTests(fixture), IClassFixture { // Null values are not supported in Redis HashSet public override Task Equal_with_null_reference_type() @@ -82,4 +110,40 @@ public override Task Legacy_AnyTagEqualTo_array() [Obsolete("Legacy filter support")] public override Task Legacy_AnyTagEqualTo_List() => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => RedisTestStore.Instance; + + protected override string CollectionName => "HashSetCollectionFilterTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => + p.PropertyType != typeof(bool) && + p.PropertyType != typeof(string[]) && + p.PropertyType != typeof(List)).ToList() + }; + + protected override IVectorStoreRecordCollection CreateCollection() + => new RedisHashSetVectorStoreRecordCollection( + RedisTestStore.Instance.Database, + this.CollectionName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); + + protected override List BuildTestData() + { + var testData = base.BuildTestData(); + + foreach (var record in testData) + { + // Null values are not supported in Redis hashsets + record.String ??= string.Empty; + } + + return testData; + } + } } diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs deleted file mode 100644 index de751f36ca4e..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisFilterFixture.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Redis; -using RedisIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace RedisIntegrationTests.Filter; - -public class RedisJsonCollectionFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => RedisTestStore.Instance; - - protected override string StoreName => "JsonCollectionFilterTests"; - - // Override to remove the bool property, which isn't (currently) supported on Redis/JSON - protected override VectorStoreRecordDefinition GetRecordDefinition() - => new() - { - Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() - }; - - protected override IVectorStoreRecordCollection> CreateCollection() - => new RedisJsonVectorStoreRecordCollection>( - RedisTestStore.Instance.Database, - this.StoreName, - new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); -} - -public class RedisHashSetCollectionFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => RedisTestStore.Instance; - - protected override string StoreName => "HashSetCollectionFilterTests"; - - // Override to remove the bool property, which isn't (currently) supported on Redis - protected override VectorStoreRecordDefinition GetRecordDefinition() - => new() - { - Properties = base.GetRecordDefinition().Properties.Where(p => - p.PropertyType != typeof(bool) && - p.PropertyType != typeof(string[]) && - p.PropertyType != typeof(List)).ToList() - }; - - protected override IVectorStoreRecordCollection> CreateCollection() - => new RedisHashSetVectorStoreRecordCollection>( - RedisTestStore.Instance.Database, - this.StoreName, - new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); - - protected override List> BuildTestData() - { - var testData = base.BuildTestData(); - - foreach (var record in testData) - { - // Null values are not supported in Redis hashsets - record.String ??= string.Empty; - } - - return testData; - } -} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs index 9ca7878a414e..10570cc109c5 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs @@ -1,12 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.VectorData; +using SqliteIntegrationTests.Support; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; using Xunit; using Xunit.Sdk; namespace SqliteIntegrationTests.Filter; -public class SqliteBasicFilterTests(SqliteFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class SqliteBasicFilterTests(SqliteBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { public override async Task Not_over_Or() { @@ -42,4 +46,18 @@ public override Task Legacy_AnyTagEqualTo_array() [Obsolete("Legacy filter support")] public override Task Legacy_AnyTagEqualTo_List() => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => SqliteTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + + // Override to remove the string array property, which isn't (currently) supported on SQLite + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() + }; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs deleted file mode 100644 index 3dc9a0d10dad..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteFilterFixture.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using SqliteIntegrationTests.Support; -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; - -namespace SqliteIntegrationTests.Filter; - -public class SqliteFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => SqliteTestStore.Instance; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - - // Override to remove the string array property, which isn't (currently) supported on SQLite - protected override VectorStoreRecordDefinition GetRecordDefinition() - => new() - { - Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() - }; -} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs similarity index 63% rename from dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs rename to dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index f2022a2e7c60..138f3863a5d4 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTestsBase.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -2,12 +2,13 @@ using System.Linq.Expressions; using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; using VectorDataSpecificationTests.Xunit; using Xunit; namespace VectorDataSpecificationTests.Filter; -public abstract class BasicFilterTestsBase(FilterFixtureBase fixture) +public abstract class BasicFilterTests(BasicFilterTests.Fixture fixture) where TKey : notnull { #region Equality @@ -214,7 +215,7 @@ public virtual Task Legacy_AnyTagEqualTo_List() #endregion Legacy filter support protected virtual async Task TestFilterAsync( - Expression, bool>> filter, + Expression> filter, bool expectZeroResults = false, bool expectAllResults = false) { @@ -249,7 +250,7 @@ protected virtual async Task TestFilterAsync( [Obsolete("Legacy filter support")] protected virtual async Task TestLegacyFilterAsync( VectorSearchFilter legacyFilter, - Expression, bool>> expectedFilter, + Expression> expectedFilter, bool expectZeroResults = false, bool expectAllResults = false) { @@ -280,4 +281,118 @@ protected virtual async Task TestLegacyFilterAsync( e.String == a.String && e.Int2 == a.Int2); } + +#pragma warning disable CS1819 // Properties should not return arrays +#pragma warning disable CA1819 // Properties should not return arrays + public class FilterRecord + { + public TKey Key { get; set; } = default!; + public ReadOnlyMemory? Vector { get; set; } + + public int Int { get; set; } + public string? String { get; set; } + public bool Bool { get; set; } + public int Int2 { get; set; } + public string[] StringArray { get; set; } = null!; + public List StringList { get; set; } = null!; + } +#pragma warning restore CA1819 // Properties should not return arrays +#pragma warning restore CS1819 + + public abstract class Fixture : VectorStoreCollectionFixture + { + protected override string CollectionName => "FilterTests"; + + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(FilterRecord.Key), typeof(TKey)), + new VectorStoreRecordVectorProperty(nameof(FilterRecord.Vector), typeof(ReadOnlyMemory?)) + { + Dimensions = 3, + DistanceFunction = this.DistanceFunction, + IndexKind = this.IndexKind + }, + + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.String), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Bool), typeof(bool)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int2), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringArray), typeof(string[])) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringList), typeof(List)) { IsFilterable = true } + ] + }; + + protected override List BuildTestData() + { + // All records have the same vector - this fixture is about testing criteria filtering only + var vector = new ReadOnlyMemory([1, 2, 3]); + + return + [ + new() + { + Key = this.GenerateNextKey(), + Int = 8, + String = "foo", + Bool = true, + Int2 = 80, + StringArray = ["x", "y"], + StringList = ["x", "y"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 9, + String = "bar", + Bool = false, + Int2 = 90, + StringArray = ["a", "b"], + StringList = ["a", "b"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 9, + String = "foo", + Bool = true, + Int2 = 9, + StringArray = ["x"], + StringList = ["x"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 10, + String = null, + Bool = false, + Int2 = 100, + StringArray = ["x", "y", "z"], + StringList = ["x", "y", "z"], + Vector = vector + }, + new() + { + Key = this.GenerateNextKey(), + Int = 11, + Bool = true, + String = """with some special"characters'and\stuff""", + Int2 = 101, + StringArray = ["y", "z"], + StringList = ["y", "z"], + Vector = vector + } + ]; + } + + // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, + // so filtered searches show empty results. Add a filter to the seed data check below. + protected override Task WaitForDataAsync() + => this.TestStore.WaitForDataAsync(this.Collection, recordCount: this.TestData.Count, r => r.Int > 0); + } } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs deleted file mode 100644 index 436d1453d552..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/FilterFixtureBase.cs +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Globalization; -using Microsoft.Extensions.VectorData; -using VectorDataSpecificationTests.Support; -using Xunit; - -namespace VectorDataSpecificationTests.Filter; - -public abstract class FilterFixtureBase : IAsyncLifetime - where TKey : notnull -{ - private int _nextKeyValue = 1; - private List>? _testData; - - protected virtual string StoreName => "FilterTests"; - - protected abstract TestStore TestStore { get; } - - protected virtual string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineSimilarity; - protected virtual string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Flat; - - protected virtual IVectorStoreRecordCollection> CreateCollection() - => this.TestStore.DefaultVectorStore.GetCollection>(this.StoreName, this.GetRecordDefinition()); - - public virtual async Task InitializeAsync() - { - await this.TestStore.ReferenceCountingStartAsync(); - - this.Collection = this.CreateCollection(); - - if (await this.Collection.CollectionExistsAsync()) - { - await this.Collection.DeleteCollectionAsync(); - } - - await this.Collection.CreateCollectionAsync(); - await this.SeedAsync(); - - // Some databases upsert asynchronously, meaning that our seed data may not be visible immediately to tests. - // Check and loop until it is. - for (var i = 0; i < 20; i++) - { - var results = await this.Collection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - new() - { - Top = this.TestData.Count, - NewFilter = r => r.Int > 0 - }); - var count = await results.Results.CountAsync(); - if (count == this.TestData.Count) - { - break; - } - - await Task.Delay(TimeSpan.FromMilliseconds(100)); - } - } - - protected virtual VectorStoreRecordDefinition GetRecordDefinition() - => new() - { - Properties = - [ - new VectorStoreRecordKeyProperty(nameof(FilterRecord.Key), typeof(TKey)), - new VectorStoreRecordVectorProperty(nameof(FilterRecord.Vector), typeof(ReadOnlyMemory?)) - { - Dimensions = 3, - DistanceFunction = this.DistanceFunction, - IndexKind = this.IndexKind - }, - - new VectorStoreRecordDataProperty(nameof(FilterRecord.Int), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.String), typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.Bool), typeof(bool)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.Int2), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.StringArray), typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.StringList), typeof(List)) { IsFilterable = true } - ] - }; - - public virtual IVectorStoreRecordCollection> Collection { get; private set; } = null!; - - public List> TestData => this._testData ??= this.BuildTestData(); - - protected virtual List> BuildTestData() - { - // All records have the same vector - this fixture is about testing criteria filtering only - var vector = new ReadOnlyMemory([1, 2, 3]); - - return - [ - new() - { - Key = this.GenerateNextKey(), - Int = 8, - String = "foo", - Bool = true, - Int2 = 80, - StringArray = ["x", "y"], - StringList = ["x", "y"], - Vector = vector - }, - new() - { - Key = this.GenerateNextKey(), - Int = 9, - String = "bar", - Bool = false, - Int2 = 90, - StringArray = ["a", "b"], - StringList = ["a", "b"], - Vector = vector - }, - new() - { - Key = this.GenerateNextKey(), - Int = 9, - String = "foo", - Bool = true, - Int2 = 9, - StringArray = ["x"], - StringList = ["x"], - Vector = vector - }, - new() - { - Key = this.GenerateNextKey(), - Int = 10, - String = null, - Bool = false, - Int2 = 100, - StringArray = ["x", "y", "z"], - StringList = ["x", "y", "z"], - Vector = vector - }, - new() - { - Key = this.GenerateNextKey(), - Int = 11, - Bool = true, - String = """with some special"characters'and\stuff""", - Int2 = 101, - StringArray = ["y", "z"], - StringList = ["y", "z"], - Vector = vector - } - ]; - } - - protected virtual async Task SeedAsync() - { - // TODO: UpsertBatchAsync returns IAsyncEnumerable (to support server-generated keys?), but this makes it quite hard to use: - await foreach (var _ in this.Collection.UpsertBatchAsync(this.TestData)) - { - } - } - - protected virtual TKey GenerateNextKey() - => typeof(TKey) switch - { - _ when typeof(TKey) == typeof(int) => (TKey)(object)this._nextKeyValue++, - _ when typeof(TKey) == typeof(long) => (TKey)(object)(long)this._nextKeyValue++, - _ when typeof(TKey) == typeof(ulong) => (TKey)(object)(ulong)this._nextKeyValue++, - _ when typeof(TKey) == typeof(string) => (TKey)(object)(this._nextKeyValue++).ToString(CultureInfo.InvariantCulture), - _ when typeof(TKey) == typeof(Guid) => (TKey)(object)new Guid($"00000000-0000-0000-0000-00{this._nextKeyValue++:0000000000}"), - - _ => throw new NotSupportedException($"Unsupported key of type '{typeof(TKey).Name}', override {nameof(this.GenerateNextKey)}") - }; - - public virtual Task DisposeAsync() - => this.TestStore.ReferenceCountingStopAsync(); -} - -#pragma warning disable CS1819 // Properties should not return arrays -#pragma warning disable CA1819 // Properties should not return arrays -public class FilterRecord -{ - public TKey Key { get; set; } = default!; - public ReadOnlyMemory? Vector { get; set; } - - public int Int { get; set; } - public string? String { get; set; } - public bool Bool { get; set; } - public int Int2 { get; set; } - public string[] StringArray { get; set; } = null!; - public List StringList { get; set; } = null!; -} -#pragma warning restore CA1819 // Properties should not return arrays -#pragma warning restore CS1819 diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index de7c0d252062..3facbed3c916 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Globalization; +using System.Linq.Expressions; using Microsoft.Extensions.VectorData; namespace VectorDataSpecificationTests.Support; @@ -11,11 +13,16 @@ public abstract class TestStore private readonly SemaphoreSlim _lock = new(1, 1); private int _referenceCount; + public virtual string DefaultDistanceFunction => DistanceFunction.CosineSimilarity; + public virtual string DefaultIndexKind => IndexKind.Flat; + protected abstract Task StartAsync(); protected virtual Task StopAsync() => Task.CompletedTask; + public abstract IVectorStore DefaultVectorStore { get; } + public virtual async Task ReferenceCountingStartAsync() { await this._lock.WaitAsync(); @@ -48,5 +55,46 @@ public virtual async Task ReferenceCountingStopAsync() } } - public abstract IVectorStore DefaultVectorStore { get; } + public virtual TKey GenerateKey(int value) + => typeof(TKey) switch + { + _ when typeof(TKey) == typeof(int) => (TKey)(object)value, + _ when typeof(TKey) == typeof(long) => (TKey)(object)(long)value, + _ when typeof(TKey) == typeof(ulong) => (TKey)(object)(ulong)value, + _ when typeof(TKey) == typeof(string) => (TKey)(object)value.ToString(CultureInfo.InvariantCulture), + _ when typeof(TKey) == typeof(Guid) => (TKey)(object)new Guid($"00000000-0000-0000-0000-00{value:0000000000}"), + + _ => throw new NotSupportedException($"Unsupported key of type '{typeof(TKey).Name}', override {nameof(TestStore)}.{nameof(this.GenerateKey)}") + }; + + /// Loops until the expected number of records is visible in the given collection. + /// Some databases upsert asynchronously, meaning that our seed data may not be visible immediately to tests. + public virtual async Task WaitForDataAsync( + IVectorStoreRecordCollection collection, + int recordCount, + Expression>? filter = null) + where TKey : notnull + { + for (var i = 0; i < 20; i++) + { + var results = await collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + new() + { + Top = recordCount, + // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, + // so filtered searches show empty results. Add a filter to the seed data check below. + NewFilter = filter + }); + var count = await results.Results.CountAsync(); + if (count == recordCount) + { + return; + } + + await Task.Delay(TimeSpan.FromMilliseconds(100)); + } + + throw new InvalidOperationException("Data did not appear in the collection within the expected time."); + } } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs new file mode 100644 index 000000000000..f6506b6fa5de --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; + +namespace VectorDataSpecificationTests.Support; + +/// +/// A test fixture that sets up a single collection in the test vector store, with a specific record definition +/// and test data. +/// +public abstract class VectorStoreCollectionFixture : VectorStoreFixture + where TKey : notnull +{ + private List? _testData; + + protected abstract string CollectionName { get; } + protected abstract VectorStoreRecordDefinition GetRecordDefinition(); + protected abstract List BuildTestData(); + + protected virtual string DistanceFunction => this.TestStore.DefaultDistanceFunction; + protected virtual string IndexKind => this.TestStore.DefaultIndexKind; + + protected virtual IVectorStoreRecordCollection CreateCollection() + => this.TestStore.DefaultVectorStore.GetCollection(this.CollectionName, this.GetRecordDefinition()); + + public override async Task InitializeAsync() + { + await base.InitializeAsync(); + + this.Collection = this.CreateCollection(); + + if (await this.Collection.CollectionExistsAsync()) + { + await this.Collection.DeleteCollectionAsync(); + } + + await this.Collection.CreateCollectionAsync(); + await this.SeedAsync(); + } + + public virtual IVectorStoreRecordCollection Collection { get; private set; } = null!; + + public List TestData => this._testData ??= this.BuildTestData(); + + protected virtual async Task SeedAsync() + { + // TODO: UpsertBatchAsync returns IAsyncEnumerable (to support server-generated keys?), but this makes it quite hard to use: + await foreach (var _ in this.Collection.UpsertBatchAsync(this.TestData)) + { + } + + await this.WaitForDataAsync(); + } + + protected virtual Task WaitForDataAsync() + => this.TestStore.WaitForDataAsync(this.Collection, recordCount: this.TestData.Count); +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreFixture.cs new file mode 100644 index 000000000000..af4d97e95a29 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreFixture.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace VectorDataSpecificationTests.Support; + +public abstract class VectorStoreFixture : IAsyncLifetime +{ + private int _nextKeyValue = 1; + + public abstract TestStore TestStore { get; } + + public virtual string DefaultDistanceFunction => this.TestStore.DefaultDistanceFunction; + public virtual string DefaultIndexKind => this.TestStore.DefaultIndexKind; + + public virtual Task InitializeAsync() + => this.TestStore.ReferenceCountingStartAsync(); + + public virtual Task DisposeAsync() + => this.TestStore.ReferenceCountingStopAsync(); + + public virtual TKey GenerateNextKey() + => this.TestStore.GenerateKey(this._nextKeyValue++); +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs index 2880d1b93859..6238ca6d9b6a 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs @@ -2,12 +2,15 @@ using Microsoft.Extensions.VectorData; using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; using Xunit; using Xunit.Sdk; namespace WeaviateIntegrationTests.Filter; -public class WeaviateBasicFilterTests(WeaviateFilterFixture fixture) : BasicFilterTestsBase(fixture), IClassFixture +public class WeaviateBasicFilterTests(WeaviateBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture { #region Filter by null @@ -59,4 +62,11 @@ public override Task Contains_over_inline_string_array_with_weird_chars() // (https://weaviate.io/developers/weaviate/api/graphql/filters#multi-word-queries-in-equal-filters) public override Task Equal_with_string_is_not_Contains() => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => WeaviateTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs deleted file mode 100644 index f00b884780c2..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateFilterFixture.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using VectorDataSpecificationTests.Filter; -using VectorDataSpecificationTests.Support; -using WeaviateIntegrationTests.Support; - -namespace WeaviateIntegrationTests.Filter; - -public class WeaviateFilterFixture : FilterFixtureBase -{ - protected override TestStore TestStore => WeaviateTestStore.Instance; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; -} From 3a36d40ceb0cdf85b886fa1d794ce74dd9a03752 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Wed, 19 Feb 2025 08:19:34 +0100 Subject: [PATCH 04/63] .Net: Obsolete MEVD connectors collection factories (#10590) Closes #10534 --- dotnet/SK-dotnet.sln.DotSettings | 1 + .../AzureAISearchFactory.cs | 23 ++++--------- .../PineconeFactory.cs | 23 ++++--------- .../QdrantFactory.cs | 23 ++++--------- .../RedisFactory.cs | 23 ++++--------- .../VectorStore_DataIngestion_CustomMapper.cs | 17 +++++----- .../AzureAISearchVectorStoreTests.cs | 2 ++ .../AzureCosmosDBMongoDBVectorStoreTests.cs | 2 ++ .../AzureCosmosDBNoSQLVectorStoreTests.cs | 2 ++ .../AzureAISearchVectorStore.cs | 8 +++-- .../AzureAISearchVectorStoreOptions.cs | 2 ++ ...zureAISearchVectorStoreRecordCollection.cs | 26 +++++++-------- ...earchVectorStoreRecordCollectionFactory.cs | 2 ++ .../AzureCosmosDBMongoDBVectorStore.cs | 8 +++-- .../AzureCosmosDBMongoDBVectorStoreOptions.cs | 3 ++ ...mosDBMongoDBVectorStoreRecordCollection.cs | 24 +++++++------- ...ngoDBVectorStoreRecordCollectionFactory.cs | 2 ++ .../AzureCosmosDBNoSQLVectorStore.cs | 8 +++-- .../AzureCosmosDBNoSQLVectorStoreOptions.cs | 2 ++ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 32 +++++++++---------- ...NoSQLVectorStoreRecordCollectionFactory.cs | 2 ++ ...ngoDBVectorStoreRecordCollectionFactory.cs | 2 ++ .../MongoDBVectorStore.cs | 8 +++-- .../MongoDBVectorStoreOptions.cs | 3 ++ .../MongoDBVectorStoreRecordCollection.cs | 24 +++++++------- ...econeVectorStoreRecordCollectionFactory.cs | 2 ++ .../PineconeVectorStore.cs | 8 +++-- .../PineconeVectorStoreOptions.cs | 3 ++ .../PineconeVectorStoreRecordCollection.cs | 24 +++++++------- ...tgresVectorStoreRecordCollectionFactory.cs | 2 ++ .../PostgresVectorStore.cs | 6 ++-- .../PostgresVectorStoreOptions.cs | 3 ++ .../PostgresVectorStoreRecordCollection.cs | 24 +++++++------- ...drantVectorStoreRecordCollectionFactory.cs | 2 ++ .../QdrantVectorStore.cs | 8 +++-- .../QdrantVectorStoreOptions.cs | 3 ++ .../QdrantVectorStoreRecordCollection.cs | 32 +++++++++---------- ...RedisVectorStoreRecordCollectionFactory.cs | 2 ++ ...RedisHashSetVectorStoreRecordCollection.cs | 24 +++++++------- .../RedisJsonVectorStoreRecordCollection.cs | 24 +++++++------- .../RedisVectorStore.cs | 8 +++-- .../RedisVectorStoreOptions.cs | 3 ++ ...qliteVectorStoreRecordCollectionFactory.cs | 2 ++ .../SqliteVectorStore.cs | 8 +++-- .../SqliteVectorStoreOptions.cs | 3 ++ .../SqliteVectorStoreRecordCollection.cs | 32 +++++++++---------- ...viateVectorStoreRecordCollectionFactory.cs | 2 ++ .../WeaviateVectorStore.cs | 8 +++-- .../WeaviateVectorStoreOptions.cs | 1 + .../WeaviateVectorStoreRecordCollection.cs | 24 +++++++------- .../MongoDBVectorStoreTests.cs | 2 ++ .../PostgresVectorStoreTests.cs | 4 ++- .../QdrantVectorStoreTests.cs | 2 ++ .../RedisVectorStoreTests.cs | 2 ++ .../SqliteVectorStoreTests.cs | 2 ++ .../WeaviateVectorStoreTests.cs | 2 ++ .../Pinecone/PineconeVectorStoreTests.cs | 2 ++ 57 files changed, 298 insertions(+), 248 deletions(-) diff --git a/dotnet/SK-dotnet.sln.DotSettings b/dotnet/SK-dotnet.sln.DotSettings index f5eec1700bcd..53435fe3a9c6 100644 --- a/dotnet/SK-dotnet.sln.DotSettings +++ b/dotnet/SK-dotnet.sln.DotSettings @@ -215,6 +215,7 @@ public void It$SOMENAME$() True True True + True True True True diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index 0f437422fb32..2bf0cb763a7a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -37,23 +37,14 @@ public static class AzureAISearchFactory /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. /// The . public static IVectorStore CreateQdrantLangchainInteropVectorStore(SearchIndexClient searchIndexClient) - { - // Create a vector store that uses our custom factory for creating collections - // so that the collection can be configured to be compatible with Langchain. - return new AzureAISearchVectorStore( - searchIndexClient, - new() - { - VectorStoreCollectionFactory = new AzureAISearchVectorStoreRecordCollectionFactory() - }); - } + => new AzureAISearchLangchainInteropVectorStore(searchIndexClient); - /// - /// Factory that is used to inject the appropriate and mapper for Langchain interoperability. - /// - private sealed class AzureAISearchVectorStoreRecordCollectionFactory : IAzureAISearchVectorStoreRecordCollectionFactory + private sealed class AzureAISearchLangchainInteropVectorStore(SearchIndexClient searchIndexClient, AzureAISearchVectorStoreOptions? options = default) + : AzureAISearchVectorStore(searchIndexClient, options) { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection(SearchIndexClient searchIndexClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TKey : notnull + private readonly SearchIndexClient _searchIndexClient = searchIndexClient; + + public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -67,7 +58,7 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec // a JSON string containing the source property. Parsing this // string and extracting the source is not supported by the default mapper. return (new AzureAISearchVectorStoreRecordCollection( - searchIndexClient, + _searchIndexClient, name, new() { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index e1a30d75a4ff..2f878199b62a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -34,23 +34,14 @@ public static class PineconeFactory /// Pinecone client that can be used to manage the collections and points in a Pinecone store. /// The . public static IVectorStore CreatePineconeLangchainInteropVectorStore(Sdk.PineconeClient pineconeClient) - { - // Create a vector store that uses our custom factory for creating collections - // so that the collection can be configured to be compatible with Langchain. - return new PineconeVectorStore( - pineconeClient, - new() - { - VectorStoreCollectionFactory = new PineconeVectorStoreRecordCollectionFactory() - }); - } + => new PineconeLangchainInteropVectorStore(pineconeClient); - /// - /// Factory that is used to inject the appropriate for Langchain interoperability. - /// - private sealed class PineconeVectorStoreRecordCollectionFactory : IPineconeVectorStoreRecordCollectionFactory + private sealed class PineconeLangchainInteropVectorStore(Sdk.PineconeClient pineconeClient) + : PineconeVectorStore(pineconeClient) { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TKey : notnull + private readonly Sdk.PineconeClient _pineconeClient = pineconeClient; + + public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -61,7 +52,7 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec // the schema used by Langchain so that the default mapper can use the storage names // in it, to map to the storage scheme. return (new PineconeVectorStoreRecordCollection( - pineconeClient, + _pineconeClient, name, new() { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index f34fdc72e812..53f0b399af82 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -35,23 +35,14 @@ public static class QdrantFactory /// Qdrant client that can be used to manage the collections and points in a Qdrant store. /// The . public static IVectorStore CreateQdrantLangchainInteropVectorStore(QdrantClient qdrantClient) - { - // Create a vector store that uses our custom factory for creating collections - // so that the collection can be configured to be compatible with Langchain. - return new QdrantVectorStore( - qdrantClient, - new() - { - VectorStoreCollectionFactory = new QdrantVectorStoreRecordCollectionFactory() - }); - } + => new QdrantLangchainInteropVectorStore(qdrantClient); - /// - /// Factory that is used to inject the appropriate and mapper for Langchain interoperability. - /// - private sealed class QdrantVectorStoreRecordCollectionFactory : IQdrantVectorStoreRecordCollectionFactory + private sealed class QdrantLangchainInteropVectorStore(QdrantClient qdrantClient) + : QdrantVectorStore(qdrantClient) { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection(QdrantClient qdrantClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TKey : notnull + private readonly QdrantClient _qdrantClient = qdrantClient; + + public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) { // Create a Qdrant collection. To be compatible with Langchain // we need to use a custom record definition that matches the @@ -61,7 +52,7 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec // Since langchain creates collections without named vector support // we should set HasNamedVectors to false. var collection = new QdrantVectorStoreRecordCollection>( - qdrantClient, + _qdrantClient, name, new() { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index 16c269491d91..23fd026401b4 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -34,23 +34,14 @@ public static class RedisFactory /// The redis database to read/write from. /// The . public static IVectorStore CreateRedisLangchainInteropVectorStore(IDatabase database) - { - // Create a vector store that uses our custom factory for creating collections - // so that the collection can be configured to be compatible with Langchain. - return new RedisVectorStore( - database, - new() - { - VectorStoreCollectionFactory = new RedisVectorStoreRecordCollectionFactory() - }); - } + => new RedisLangchainInteropVectorStore(database); - /// - /// Factory that is used to inject the appropriate for Langchain interoperability. - /// - private sealed class RedisVectorStoreRecordCollectionFactory : IRedisVectorStoreRecordCollectionFactory + private sealed class RedisLangchainInteropVectorStore(IDatabase database) + : RedisVectorStore(database) { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IDatabase database, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TKey : notnull + private readonly IDatabase _database = database; + + public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -62,7 +53,7 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec // so that the default mapper can use the storage names in it, to map to the storage // scheme. return (new RedisHashSetVectorStoreRecordCollection( - database, + _database, name, new() { diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index 4cbf683e54be..3f86c763acbb 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -64,7 +64,7 @@ public async Task ExampleAsync() // Initiate the docker container and construct the vector store using the custom factory for creating collections. await redisFixture.ManualInitializeAsync(); ConnectionMultiplexer redis = ConnectionMultiplexer.Connect("localhost:6379"); - var vectorStore = new RedisVectorStore(redis.GetDatabase(), new() { VectorStoreCollectionFactory = new Factory() }); + var vectorStore = new CustomRedisVectorStore(redis.GetDatabase()); // Get and create collection if it doesn't exist, using the record definition containing the storage model. var collection = vectorStore.GetCollection("skglossary", s_glossaryDefinition); @@ -129,23 +129,22 @@ public GenericDataModel MapFromStorageToDataModel((string Key, JsonNode Node) st } } - /// - /// A factory for creating collections in the vector store - /// - private sealed class Factory : IRedisVectorStoreRecordCollectionFactory + private sealed class CustomRedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = default) + : RedisVectorStore(database, options) { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IDatabase database, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull + private readonly IDatabase _database = database; + + public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) { // If the record definition is the glossary definition and the record type is the generic data model, inject the custom mapper into the collection options. if (vectorStoreRecordDefinition == s_glossaryDefinition && typeof(TRecord) == typeof(GenericDataModel)) { - var customCollection = new RedisJsonVectorStoreRecordCollection(database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, JsonNodeCustomMapper = new Mapper() }) as IVectorStoreRecordCollection; + var customCollection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, JsonNodeCustomMapper = new Mapper() }) as IVectorStoreRecordCollection; return customCollection!; } // Otherwise, just create a standard collection with the default mapper. - var collection = new RedisJsonVectorStoreRecordCollection(database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var collection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; return collection!; } } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs index 421e73a308c6..b79b048a5f38 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs @@ -48,6 +48,7 @@ public void GetCollectionReturnsCollection() Assert.IsType>(actual); } +#pragma warning disable CS0618 // IAzureAISearchVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionCallsFactoryIfProvided() { @@ -65,6 +66,7 @@ public void GetCollectionCallsFactoryIfProvided() // Assert. Assert.Equal(collectionMock.Object, actual); } +#pragma warning restore CS0618 // Type or member is obsolete [Fact] public void GetCollectionThrowsForInvalidKeyType() diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs index 067a1adbfd7e..7a89594b10f4 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs @@ -30,6 +30,7 @@ public void GetCollectionWithNotSupportedKeyThrowsException() Assert.Throws(() => sut.GetCollection("collection")); } +#pragma warning disable CS0618 // IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionWithFactoryReturnsCustomCollection() { @@ -58,6 +59,7 @@ public void GetCollectionWithFactoryReturnsCustomCollection() "collection", It.IsAny()), Times.Once()); } +#pragma warning restore CS0618 [Fact] public void GetCollectionWithoutFactoryReturnsDefaultCollection() diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs index 94df10884b59..84ad3b36f4a6 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs @@ -45,6 +45,7 @@ public void GetCollectionWithSupportedKeyReturnsCollection() Assert.NotNull(collectionWithCompositeKey); } +#pragma warning disable CS0618 // IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionWithFactoryReturnsCustomCollection() { @@ -73,6 +74,7 @@ public void GetCollectionWithFactoryReturnsCustomCollection() "collection", It.IsAny()), Times.Once()); } +#pragma warning restore CS0618 [Fact] public void GetCollectionWithoutFactoryReturnsDefaultCollection() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index 7be8d3ce7f28..5329cdf3cee4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class AzureAISearchVectorStore : IVectorStore +public class AzureAISearchVectorStore : IVectorStore { /// The name of this database for telemetry purposes. private const string DatabaseName = "AzureAISearch"; @@ -42,13 +42,15 @@ public AzureAISearchVectorStore(SearchIndexClient searchIndexClient, AzureAISear } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IAzureAISearchVectorStoreRecordCollectionFactor is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._searchIndexClient, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string)) { @@ -68,7 +70,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { var indexNamesEnumerable = this._searchIndexClient.GetIndexNamesAsync(cancellationToken).ConfigureAwait(false); var indexNamesEnumerator = indexNamesEnumerable.GetAsyncEnumerator(); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs index 06e099efc4fa..4c17ed4195e6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Text.Json; using Azure.Search.Documents.Indexes; @@ -13,6 +14,7 @@ public sealed class AzureAISearchVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureAISearchVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 9e92f5bbb722..67ed7e58a96e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -23,7 +23,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureAISearchVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch +public class AzureAISearchVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -141,7 +141,7 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli public string CollectionName => this._collectionName; /// - public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -164,7 +164,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { var vectorSearchConfig = new VectorSearch(); var searchFields = new List(); @@ -212,7 +212,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -221,7 +221,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync( "DeleteIndex", @@ -229,7 +229,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task GetAsync(string key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public virtual Task GetAsync(string key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -242,7 +242,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -263,7 +263,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -274,7 +274,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -285,7 +285,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -298,7 +298,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancella } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -314,7 +314,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); @@ -374,7 +374,7 @@ public Task> VectorizedSearchAsync(TVector } /// - public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(searchText); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs index 260272b2ba89..2c9def54ae18 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Azure.Search.Documents.Indexes; using Microsoft.Extensions.VectorData; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// Interface for constructing Azure AI Search instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IAzureAISearchVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 2148da34bd43..76dc9e8500a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class AzureCosmosDBMongoDBVectorStore : IVectorStore +public class AzureCosmosDBMongoDBVectorStore : IVectorStore { /// that can be used to manage the collections in Azure CosmosDB MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -37,13 +37,15 @@ public AzureCosmosDBMongoDBVectorStore(IMongoDatabase mongoDatabase, AzureCosmos } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._mongoDatabase, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string)) { @@ -59,7 +61,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var cursor = await this._mongoDatabase .ListCollectionNamesAsync(cancellationToken: cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs index 08df3aef81d8..8e9b2cccbc6e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// @@ -10,5 +12,6 @@ public sealed class AzureCosmosDBMongoDBVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index a5d355150da3..95d4df05400f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -22,7 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection +public class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -96,11 +96,11 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( } /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("ListCollectionNames", () => this.InternalCollectionExistsAsync(cancellationToken)); /// - public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { await this.RunOperationAsync("CreateCollection", () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); @@ -110,7 +110,7 @@ await this.RunOperationAsync("CreateIndexes", } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -119,7 +119,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -128,7 +128,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -137,11 +137,11 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -171,7 +171,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -201,7 +201,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -227,7 +227,7 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -244,7 +244,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public async Task> VectorizedSearchAsync( + public virtual async Task> VectorizedSearchAsync( TVector vector, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs index b917e9c8be2a..5aeec3f3f4ff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using MongoDB.Driver; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// Interface for constructing Azure CosmosDB MongoDB instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index 10e876acb8a4..39320e0a8ae2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class AzureCosmosDBNoSQLVectorStore : IVectorStore +public class AzureCosmosDBNoSQLVectorStore : IVectorStore { /// that can be used to manage the collections in Azure CosmosDB NoSQL. private readonly Database _database; @@ -37,9 +37,10 @@ public AzureCosmosDBNoSQLVectorStore(Database database, AzureCosmosDBNoSQLVector } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection( @@ -47,6 +48,7 @@ public IVectorStoreRecordCollection GetCollection( name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(AzureCosmosDBNoSQLCompositeKey)) { @@ -66,7 +68,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string Query = "SELECT VALUE(c.id) FROM c"; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs index d6f1bef56e0b..edbfe436f136 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Text.Json; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -12,6 +13,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 53463cb943b4..aaacacde37fb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -22,7 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : +public class AzureCosmosDBNoSQLVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect @@ -163,7 +163,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( } /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("GetContainerQueryIterator", async () => { @@ -188,14 +188,14 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("CreateContainer", () => this._database.CreateContainerAsync(this.GetContainerProperties(), cancellationToken: cancellationToken)); } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -204,7 +204,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("DeleteContainer", () => this._database @@ -215,7 +215,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) #region Implementation of IVectorStoreRecordCollection /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { // Use record key as partition key var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); @@ -224,7 +224,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { // Use record keys as partition keys var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); @@ -233,7 +233,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { // Use record key as partition key var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); @@ -244,7 +244,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -262,7 +262,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { var key = await this.InternalUpsertAsync(record, cancellationToken).ConfigureAwait(false); @@ -270,7 +270,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancella } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -292,7 +292,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco #region Implementation of IVectorStoreRecordCollection /// - public async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return await this.InternalGetAsync([key], options, cancellationToken) .FirstOrDefaultAsync(cancellationToken) @@ -300,7 +300,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -315,13 +315,13 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync([key], cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(keys, cancellationToken); } @@ -353,7 +353,7 @@ async IAsyncEnumerable IVectorStoreRecordCollect } /// - public Task> VectorizedSearchAsync( + public virtual Task> VectorizedSearchAsync( TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs index 6e4288931f66..8d51dbb555b0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// Interface for constructing Azure CosmosDB NoSQL instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs index 3226fd9b4cc2..0726870eb56c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using MongoDB.Driver; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// Interface for constructing MongoDB instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IMongoDBVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index 055c39ded77f..27169e3e9557 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class MongoDBVectorStore : IVectorStore +public class MongoDBVectorStore : IVectorStore { /// that can be used to manage the collections in MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -37,13 +37,15 @@ public MongoDBVectorStore(IMongoDatabase mongoDatabase, MongoDBVectorStoreOption } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IMongoDBVectorStoreRecordCollectionFactoryß is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._mongoDatabase, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string)) { @@ -59,7 +61,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var cursor = await this._mongoDatabase .ListCollectionNamesAsync(cancellationToken: cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs index 56388b2652da..3382019ea1f6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// @@ -10,5 +12,6 @@ public sealed class MongoDBVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IMongoDBVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 25fc14e8196e..0aa57579b7d7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection +public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -95,11 +95,11 @@ public MongoDBVectorStoreRecordCollection( } /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("ListCollectionNames", () => this.InternalCollectionExistsAsync(cancellationToken)); /// - public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { await this.RunOperationAsync("CreateCollection", () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); @@ -113,7 +113,7 @@ await this.RunOperationWithRetryAsync( } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -122,7 +122,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -131,7 +131,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -140,11 +140,11 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -174,7 +174,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -204,7 +204,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -230,7 +230,7 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -247,7 +247,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public async Task> VectorizedSearchAsync( + public virtual async Task> VectorizedSearchAsync( TVector vector, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs index 85954c3de38a..25b6efae42de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Sdk = Pinecone; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Interface for constructing Pinecone instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IPineconeVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index da2795f923f8..4f79810e641b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class PineconeVectorStore : IVectorStore +public class PineconeVectorStore : IVectorStore { private const string DatabaseName = "Pinecone"; private const string ListCollectionsName = "ListCollections"; @@ -39,13 +39,15 @@ public PineconeVectorStore(Sdk.PineconeClient pineconeClient, PineconeVectorStor } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IPineconeVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._pineconeClient, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string)) { @@ -59,7 +61,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { IndexDetails[] collections; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs index eb8caaa5a17d..310cce39d533 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// @@ -10,5 +12,6 @@ public sealed class PineconeVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IPineconeVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 8e1e8cf7aaf1..1db3d86fbf16 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection +public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { private const string DatabaseName = "Pinecone"; @@ -90,7 +90,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st } /// - public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { var result = await this.RunOperationAsync( CollectionExistsName, @@ -105,7 +105,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke } /// - public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // we already run through record property validation, so a single VectorStoreRecordVectorProperty is guaranteed. var vectorProperty = this._propertyReader.VectorProperty!; @@ -123,7 +123,7 @@ await this.RunOperationAsync( } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -132,13 +132,13 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync( DeleteCollectionName, () => this._pineconeClient.DeleteIndex(this.CollectionName, cancellationToken)); /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -148,7 +148,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -177,7 +177,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -185,7 +185,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -199,7 +199,7 @@ await this.RunOperationAsync( } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -221,7 +221,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -246,7 +246,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs index 5bf0d9cad789..58384ba767ac 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Npgsql; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// Interface for constructing Postgres instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IPostgresVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 99bbc8e320b5..f03fa1812583 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -41,7 +41,7 @@ internal PostgresVectorStore(IPostgresVectorStoreDbClient postgresDbClient, Post } /// - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) { const string OperationName = "ListCollectionNames"; return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( @@ -51,7 +51,7 @@ public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cance } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { if (!PostgresConstants.SupportedKeyTypes.Contains(typeof(TKey))) @@ -59,10 +59,12 @@ public IVectorStoreRecordCollection GetCollection( throw new NotSupportedException($"Unsupported key type: {typeof(TKey)}"); } +#pragma warning disable CS0618 // IPostgresVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._postgresClient.DataSource, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 var recordCollection = new PostgresVectorStoreRecordCollection( this._postgresClient, diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs index 013f1810e146..5add40eed8ee 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.Postgres; /// @@ -15,5 +17,6 @@ public sealed class PostgresVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IPostgresVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index fd85896a46d4..bea84dce1b06 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// The type of the key. /// The type of the record. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class PostgresVectorStoreRecordCollection : IVectorStoreRecordCollection +public class PostgresVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull { @@ -105,7 +105,7 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client } /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "DoesTableExists"; return this.RunOperationAsync(OperationName, () => @@ -114,7 +114,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollection"; return this.RunOperationAsync(OperationName, () => @@ -123,7 +123,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollectionIfNotExists"; return this.RunOperationAsync(OperationName, () => @@ -132,7 +132,7 @@ public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "DeleteCollection"; return this.RunOperationAsync(OperationName, () => @@ -141,7 +141,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { const string OperationName = "Upsert"; @@ -166,7 +166,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertBatch"; @@ -186,7 +186,7 @@ await this.RunOperationAsync(OperationName, () => } /// - public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "Get"; @@ -208,7 +208,7 @@ await this.RunOperationAsync(OperationName, () => } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetBatch"; @@ -232,7 +232,7 @@ public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecord } /// - public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { const string OperationName = "Delete"; return this.RunOperationAsync(OperationName, () => @@ -241,7 +241,7 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteBatch"; return this.RunOperationAsync(OperationName, () => @@ -250,7 +250,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs index fca033a2c329..32dd7ed47d91 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Qdrant.Client; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Interface for constructing Qdrant instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IQdrantVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index 2bfb15ed9ce5..bfac788a7cfd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class QdrantVectorStore : IVectorStore +public class QdrantVectorStore : IVectorStore { /// The name of this database for telemetry purposes. private const string DatabaseName = "Qdrant"; @@ -51,13 +51,15 @@ internal QdrantVectorStore(MockableQdrantClient qdrantClient, QdrantVectorStoreO } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IQdrantVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._qdrantClient.QdrantClient, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(Guid)) { @@ -74,7 +76,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { IReadOnlyList collections; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs index 27790c731aed..e7ce3f053970 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// @@ -16,5 +18,6 @@ public sealed class QdrantVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IQdrantVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index e51ae549818a..5cb529ad08e3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection +public class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// A set of types that a key on the provided model may have. @@ -128,7 +128,7 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st public string CollectionName => this._collectionName; /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync( "CollectionExists", @@ -136,7 +136,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de } /// - public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { if (!this._options.HasNamedVectors) { @@ -222,7 +222,7 @@ await this.RunOperationAsync( } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -231,7 +231,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync( "DeleteCollection", @@ -239,7 +239,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -248,7 +248,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -257,19 +257,19 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Num = key }, options, cancellationToken); } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Uuid = key.ToString("D") }, options, cancellationToken); } /// - public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -283,7 +283,7 @@ public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default } /// - public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -297,7 +297,7 @@ public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -311,7 +311,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancella } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -325,7 +325,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -363,7 +363,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -458,7 +458,7 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs index 17b4bf329d24..ea98a9a6308d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using StackExchange.Redis; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// Interface for constructing Redis instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IRedisVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 2a5d324e0171..68276cb97530 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection +public class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -147,7 +147,7 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec public string CollectionName => this._collectionName; /// - public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -170,7 +170,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.StoragePropertyNamesMap, useDollarPrefix: false); @@ -186,7 +186,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -195,13 +195,13 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("FT.DROPINDEX", () => this._database.FT().DropIndexAsync(this._collectionName)); } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -245,7 +245,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -262,7 +262,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -277,7 +277,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -287,7 +287,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -312,7 +312,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -329,7 +329,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 0d5f74d0821a..45356d638f30 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -22,7 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection +public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -129,7 +129,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio public string CollectionName => this._collectionName; /// - public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -152,7 +152,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.JsonPropertyNamesMap, useDollarPrefix: true); @@ -168,7 +168,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -177,13 +177,13 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("FT.DROPINDEX", () => this._database.FT().DropIndexAsync(this._collectionName)); } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -228,7 +228,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); var keysList = keys.ToList(); @@ -278,7 +278,7 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, G } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -294,7 +294,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -304,7 +304,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancell } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -335,7 +335,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -374,7 +374,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 8a6fff847d49..4966917d3990 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class RedisVectorStore : IVectorStore +public class RedisVectorStore : IVectorStore { /// The name of this database for telemetry purposes. private const string DatabaseName = "Redis"; @@ -41,13 +41,15 @@ public RedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = d } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IRedisVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._database, name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string)) { @@ -67,7 +69,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = ""; RedisResult[] listResult; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs index 63eeda5a5e3e..c9af8554c231 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.Redis; /// @@ -10,6 +12,7 @@ public sealed class RedisVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IRedisVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs index 48bf1da53d2d..6310489ac118 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Data.Common; using Microsoft.Extensions.VectorData; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// Interface for constructing SQLite instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface ISqliteVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index 86571536a5d5..43b1a29b52d2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class SqliteVectorStore : IVectorStore +public class SqliteVectorStore : IVectorStore { /// that will be used to manage the data in SQLite. private readonly DbConnection _connection; @@ -40,9 +40,10 @@ public SqliteVectorStore( } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // ISqliteVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection( @@ -50,6 +51,7 @@ public IVectorStoreRecordCollection GetCollection( name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(ulong)) { @@ -70,7 +72,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string TablePropertyName = "name"; const string Query = $"SELECT {TablePropertyName} FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs index 954358c58301..cac514677f07 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// @@ -10,6 +12,7 @@ public sealed class SqliteVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public ISqliteVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 8ae095dd3bf0..e3c2431491c3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class SqliteVectorStoreRecordCollection : +public class SqliteVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect @@ -115,7 +115,7 @@ public SqliteVectorStoreRecordCollection( } /// - public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "TableCount"; @@ -131,19 +131,19 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { return this.InternalCreateCollectionAsync(ifNotExists: false, cancellationToken); } /// - public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { return this.InternalCreateCollectionAsync(ifNotExists: true, cancellationToken); } /// - public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { await this.DropTableAsync(this._dataTableName, cancellationToken).ConfigureAwait(false); @@ -154,7 +154,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string LimitPropertyName = "k"; @@ -227,37 +227,37 @@ public Task> VectorizedSearchAsync(TVector #region Implementation of IVectorStoreRecordCollection /// - public Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return this.InternalGetAsync(key, options, cancellationToken); } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return this.InternalGetBatchAsync(keys, options, cancellationToken); } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { return this.InternalUpsertAsync(record, cancellationToken); } /// - public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) { return this.InternalUpsertBatchAsync(records, cancellationToken); } /// - public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(key, cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteBatchAsync(keys, cancellationToken); } @@ -267,13 +267,13 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancella #region Implementation of IVectorStoreRecordCollection /// - public Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return this.InternalGetAsync(key, options, cancellationToken); } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return this.InternalGetBatchAsync(keys, options, cancellationToken); } @@ -291,13 +291,13 @@ IAsyncEnumerable IVectorStoreRecordCollection.UpsertBat } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(key, cancellationToken); } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteBatchAsync(keys, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs index 13be46e6554a..10210eb8fb82 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Net.Http; using Microsoft.Extensions.VectorData; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// Interface for constructing Weaviate instances when using to retrieve these. /// +[Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public interface IWeaviateVectorStoreRecordCollectionFactory { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 1e1df6c79ca1..dbf7f46b5f59 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public sealed class WeaviateVectorStore : IVectorStore +public class WeaviateVectorStore : IVectorStore { /// that is used to interact with Weaviate API. private readonly HttpClient _httpClient; @@ -43,9 +43,10 @@ public WeaviateVectorStore(HttpClient httpClient, WeaviateVectorStoreOptions? op } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { +#pragma warning disable CS0618 // IWeaviateVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection( @@ -53,6 +54,7 @@ public IVectorStoreRecordCollection GetCollection( name, vectorStoreRecordDefinition); } +#pragma warning restore CS0618 if (typeof(TKey) != typeof(Guid)) { @@ -73,7 +75,7 @@ public IVectorStoreRecordCollection GetCollection( } /// - public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var request = new WeaviateGetCollectionsRequest().Build(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs index 9feab8c9047d..ae73e7989d82 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs @@ -12,6 +12,7 @@ public sealed class WeaviateVectorStoreOptions /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// + [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IWeaviateVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index fe8e965f67e3..3e4343eeb7aa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection +public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -153,7 +153,7 @@ public WeaviateVectorStoreRecordCollection( } /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "GetCollectionSchema"; @@ -170,7 +170,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de } /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollectionSchema"; @@ -189,7 +189,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -198,7 +198,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "DeleteCollectionSchema"; @@ -211,7 +211,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObject"; @@ -224,7 +224,7 @@ public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObjectBatch"; const string ContainsAnyOperator = "ContainsAny"; @@ -249,7 +249,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetCollectionObject"; @@ -274,7 +274,7 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetBatchAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -293,7 +293,7 @@ public async IAsyncEnumerable GetBatchAsync( } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { return await this.UpsertBatchAsync([record], cancellationToken) .FirstOrDefaultAsync(cancellationToken) @@ -301,7 +301,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; @@ -331,7 +331,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record } /// - public async Task> VectorizedSearchAsync( + public virtual async Task> VectorizedSearchAsync( TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreTests.cs index a6be91ac04cc..160762c6a4fb 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreTests.cs @@ -30,6 +30,7 @@ public void GetCollectionWithNotSupportedKeyThrowsException() Assert.Throws(() => sut.GetCollection("collection")); } +#pragma warning disable CS0618 // IMongoDBVectorStoreRecordCollectionFactoryß is obsolete [Fact] public void GetCollectionWithFactoryReturnsCustomCollection() { @@ -58,6 +59,7 @@ public void GetCollectionWithFactoryReturnsCustomCollection() "collection", It.IsAny()), Times.Once()); } +#pragma warning restore CS0618 [Fact] public void GetCollectionWithoutFactoryReturnsDefaultCollection() diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs index b11d6a81963f..33cfc005a7bc 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs @@ -52,6 +52,7 @@ public void GetCollectionThrowsForInvalidKeyType() Assert.Throws(() => sut.GetCollection>(TestCollectionName)); } +#pragma warning disable CS0618 // IPostgresVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionCallsFactoryIfProvided() { @@ -68,9 +69,10 @@ public void GetCollectionCallsFactoryIfProvided() // Act. var actual = sut.GetCollection>(TestCollectionName); - // Assert. + // Assert. Assert.Equal(collectionMock.Object, actual); } +#pragma warning restore CS0618 [Fact] public async Task ListCollectionNamesCallsSDKAsync() diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs index fc4e439f1919..9230b5f31fe0 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs @@ -41,6 +41,7 @@ public void GetCollectionReturnsCollection() Assert.IsType>>(actual); } +#pragma warning disable CS0618 // IQdrantVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionCallsFactoryIfProvided() { @@ -59,6 +60,7 @@ public void GetCollectionCallsFactoryIfProvided() Assert.Equal(collectionMock.Object, actual); factoryMock.Verify(x => x.CreateVectorStoreRecordCollection>(It.IsAny(), TestCollectionName, null), Times.Once); } +#pragma warning restore CS0618 [Fact] public void GetCollectionThrowsForInvalidKeyType() diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs index 9edf28f88495..baf2564c81a2 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs @@ -55,6 +55,7 @@ public void GetCollectionReturnsHashSetCollection() Assert.IsType>>(actual); } +#pragma warning disable CS0618 // IRedisVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionCallsFactoryIfProvided() { @@ -73,6 +74,7 @@ public void GetCollectionCallsFactoryIfProvided() Assert.Equal(collectionMock.Object, actual); factoryMock.Verify(x => x.CreateVectorStoreRecordCollection>(It.IsAny(), TestCollectionName, null), Times.Once); } +#pragma warning restore CS0618 [Fact] public void GetCollectionThrowsForInvalidKeyType() diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs index c6cf80e8b085..44180405aaa3 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs @@ -43,6 +43,7 @@ public void GetCollectionWithSupportedKeyReturnsCollection() Assert.NotNull(collectionWithStringKey); } +#pragma warning disable CS0618 // ISqliteVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionWithFactoryReturnsCustomCollection() { @@ -72,6 +73,7 @@ public void GetCollectionWithFactoryReturnsCustomCollection() "collection", It.IsAny()), Times.Once()); } +#pragma warning restore CS0618 [Fact] public async Task ListCollectionNamesReturnsCollectionNamesAsync() diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs index 2622d15fdd73..e51af8124daf 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs @@ -50,6 +50,7 @@ public void GetCollectionWithSupportedKeyReturnsCollection() Assert.NotNull(collection); } +#pragma warning disable CS0618 // IWeaviateVectorStoreRecordCollectionFactory is obsolete [Fact] public void GetCollectionWithFactoryReturnsCustomCollection() { @@ -78,6 +79,7 @@ public void GetCollectionWithFactoryReturnsCustomCollection() "collection", It.IsAny()), Times.Once()); } +#pragma warning restore CS0618 [Fact] public async Task ListCollectionNamesReturnsCollectionNamesAsync() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs index c05839ee0a44..4cd63ec6b8a9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs @@ -17,6 +17,7 @@ public class PineconeVectorStoreTests(PineconeVectorStoreFixture fixture) { private PineconeVectorStoreFixture Fixture { get; } = fixture; +#pragma warning disable CS0618 // IPineconeVectorStoreRecordCollectionFactory is obsolete [VectorStoreFact] public void CreateCollectionUsingFactory() { @@ -49,4 +50,5 @@ public IVectorStoreRecordCollection CreateVectorStoreRecordCollec return (new PineconeVectorStoreRecordCollection(pineconeClient, "factory" + name) as IVectorStoreRecordCollection)!; } } +#pragma warning restore CS0618 } From 10cde20bfac1a81a377beecb4aec86c061c13fac Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Tue, 4 Mar 2025 21:00:52 +0100 Subject: [PATCH 05/63] .Net: Rename VectorSearchOptions.NewFilter to Filter (#10790) And the obsoleted Filter to OldFilter. Continues #10273. --- .../VectorStore_VectorSearch_MultiStore_Common.cs | 2 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 2 +- .../Step2_Vector_Search.cs | 2 +- .../AzureAISearchVectorStoreRecordCollectionTests.cs | 4 ++-- ...sDBNoSQLVectorStoreCollectionQueryBuilderTests.cs | 6 +++--- .../InMemoryVectorStoreRecordCollectionTests.cs | 2 +- .../AzureAISearchVectorStoreRecordCollection.cs | 12 ++++++------ ...zureCosmosDBMongoDBVectorStoreRecordCollection.cs | 6 +++--- ...CosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs | 6 +++--- .../InMemoryVectorStoreRecordCollection.cs | 6 +++--- .../MongoDBVectorStoreRecordCollection.cs | 6 +++--- .../PineconeVectorStoreRecordCollection.cs | 2 +- .../PostgresVectorStoreCollectionSqlBuilder.cs | 2 +- .../PostgresVectorStoreRecordCollection.cs | 4 ++-- .../QdrantVectorStoreRecordCollection.cs | 6 +++--- .../RedisVectorStoreCollectionSearchMapping.cs | 6 +++--- .../SqliteVectorStoreRecordCollection.cs | 10 +++++----- ...eaviateVectorStoreRecordCollectionQueryBuilder.cs | 6 +++--- .../QdrantVectorStoreRecordCollectionTests.cs | 2 +- .../RedisHashSetVectorStoreRecordCollectionTests.cs | 2 +- .../RedisJsonVectorStoreRecordCollectionTests.cs | 2 +- ...teVectorStoreRecordCollectionQueryBuilderTests.cs | 6 +++--- .../VectorSearch/VectorSearchFilter.cs | 2 +- .../VectorSearch/VectorSearchOptions.cs | 6 +++--- .../AzureAISearchVectorStoreRecordCollectionTests.cs | 6 +++--- ...osmosDBMongoDBVectorStoreRecordCollectionTests.cs | 2 +- ...eCosmosDBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- .../MongoDBVectorStoreRecordCollectionTests.cs | 2 +- .../PineconeVectorStoreRecordCollectionTests.cs | 2 +- .../PostgresVectorStoreRecordCollectionTests.cs | 4 ++-- .../Qdrant/QdrantVectorStoreRecordCollectionTests.cs | 4 ++-- .../RedisHashSetVectorStoreRecordCollectionTests.cs | 4 ++-- .../RedisJsonVectorStoreRecordCollectionTests.cs | 4 ++-- .../Sqlite/SqliteVectorStoreRecordCollectionTests.cs | 2 +- .../WeaviateVectorStoreRecordCollectionTests.cs | 4 ++-- .../Data/TextSearch/VectorStoreTextSearch.cs | 2 +- .../Data/VolatileVectorStoreRecordCollection.cs | 4 ++-- .../Data/VolatileVectorStoreRecordCollectionTests.cs | 2 +- .../Filter/BasicFilterTests.cs | 4 ++-- .../VectorDataIntegrationTests/Support/TestStore.cs | 2 +- 40 files changed, 80 insertions(+), 80 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index ff492ca58304..435fdfcdfd85 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -70,7 +70,7 @@ public async Task IngestDataAndSearchAsync(string collectionName, Func g.Category == "External Definitions" }); + searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, Filter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); output.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index 5119881c3bda..9a43c01aeb43 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -70,7 +70,7 @@ public async Task ExampleAsync() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, NewFilter = g => g.Category == "External Definitions" }); + searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, Filter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs index 9b7e889b25dd..2eda86863a60 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs @@ -71,7 +71,7 @@ public async Task SearchAnInMemoryVectorStoreWithFilteringAsync() new() { Top = 1, - NewFilter = g => g.Category == "AI" + Filter = g => g.Category == "AI" }); var searchResultItems = await searchResult.Results.ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index eb240f91d9aa..b919f00dc1fd 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -574,7 +574,7 @@ public async Task CanSearchWithVectorAndFilterAsync() { Top = 5, Skip = 3, - Filter = filter, + OldFilter = filter, VectorPropertyName = nameof(MultiPropsModel.Vector1) }, this._testCancellationToken); @@ -616,7 +616,7 @@ public async Task CanSearchWithTextAndFilterAsync() { Top = 5, Skip = 3, - Filter = filter, + OldFilter = filter, VectorPropertyName = nameof(MultiPropsModel.Vector1) }, this._testCancellationToken); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index 37aa005777d5..db55fca4baeb 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -37,7 +37,7 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; + var searchOptions = new VectorSearchOptions { OldFilter = filter, Skip = 5, Top = 10 }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -86,7 +86,7 @@ public void BuildSearchQueryWithoutOffsetReturnsQueryDefinitionWithTopParameter( .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { Filter = filter, Top = 10 }; + var searchOptions = new VectorSearchOptions { OldFilter = filter, Top = 10 }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -131,7 +131,7 @@ public void BuildSearchQueryWithInvalidFilterThrowsException() var filter = new VectorSearchFilter().EqualTo("non-existent-property", "test-value-2"); - var searchOptions = new VectorSearchOptions { Filter = filter, Skip = 5, Top = 10 }; + var searchOptions = new VectorSearchOptions { OldFilter = filter, Skip = 5, Top = 10 }; // Act & Assert Assert.Throws(() => diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index bbf5c9611e32..d48730696fce 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -338,7 +338,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, + new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, this._testCancellationToken); // Assert diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 67ed7e58a96e..d011d5a6c127 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -340,9 +340,9 @@ public virtual Task> VectorizedSearchAsync // Build filter object. var filter = internalOptions switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), - { NewFilter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), _ => null }; #pragma warning restore CS0618 @@ -395,9 +395,9 @@ public virtual Task> VectorizableTextSearchAsync(st // Build filter object. var filter = internalOptions switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), - { NewFilter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), _ => null }; #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 95d4df05400f..794495cd1548 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -275,11 +275,11 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter( + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter( legacyFilter, this._storagePropertyNames), - { NewFilter: Expression> newFilter } => new AzureCosmosDBMongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + { Filter: Expression> newFilter } => new AzureCosmosDBMongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), _ => null }; #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index 1b0e7dcb8a7f..4a5d2ec901fe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -49,9 +49,9 @@ public static QueryDefinition BuildSearchQuery( // Build filter object. var (whereClause, filterParameters) = searchOptions switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, storagePropertyNames), - { NewFilter: Expression> newFilter } => new AzureCosmosDBNoSqlFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, storagePropertyNames), + { Filter: Expression> newFilter } => new AzureCosmosDBNoSqlFilterTranslator().Translate(newFilter, storagePropertyNames), _ => (null, []) }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 03fe957cca07..b330c873fdbd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -240,9 +240,9 @@ public async Task> VectorizedSearchAsync(T var allValues = this.GetCollectionDictionary().Values.Cast(); var filteredRecords = internalOptions switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => InMemoryVectorStoreCollectionSearchMapping.FilterRecords(legacyFilter, allValues), - { NewFilter: Expression> newFilter } => allValues.AsQueryable().Where(newFilter), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => InMemoryVectorStoreCollectionSearchMapping.FilterRecords(legacyFilter, allValues), + { Filter: Expression> newFilter } => allValues.AsQueryable().Where(newFilter), _ => allValues }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 0aa57579b7d7..6b86db514047 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -278,9 +278,9 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), - { NewFilter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), + { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), _ => null }; #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 1db3d86fbf16..6e44feda9334 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -262,7 +262,7 @@ public virtual async Task> VectorizedSearchAsync( #pragma warning disable CS0618 // VectorSearchFilter is obsolete var (where, parameters) = (oldFilter: legacyFilter, newFilter) switch { - (not null, not null) => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), + (not null, not null) => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, propertyReader.RecordDefinition.Properties, legacyFilter, startParamIndex: 2), (null, not null) => new PostgresFilterTranslator().Translate(propertyReader.StoragePropertyNamesMap, newFilter, startParamIndex: 2), _ => (Clause: string.Empty, Parameters: []) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index bea84dce1b06..81cdf1e1cc88 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -290,9 +290,9 @@ public virtual Task> VectorizedSearchAsync pgVector, searchOptions.Top, #pragma warning disable CS0618 // VectorSearchFilter is obsolete - searchOptions.Filter, + searchOptions.OldFilter, #pragma warning restore CS0618 // VectorSearchFilter is obsolete - searchOptions.NewFilter, + searchOptions.Filter, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 5cb529ad08e3..401740a0e7a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -478,9 +478,9 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), - { NewFilter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), + { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), _ => new Filter() }; #pragma warning restore CS0618 // Type or member is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs index ea78a9e798c0..f7663e0e7e44 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs @@ -62,9 +62,9 @@ public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions< #pragma warning disable CS0618 // Type or member is obsolete var filter = options switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, storagePropertyNames), - { NewFilter: Expression> newFilter } => new RedisFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, storagePropertyNames), + { Filter: Expression> newFilter } => new RedisFilterTranslator().Translate(newFilter, storagePropertyNames), _ => "*" }; #pragma warning restore CS0618 // Type or member is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index e3c2431491c3..e91b1db1bf84 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -195,22 +195,22 @@ public virtual Task> VectorizedSearchAsync if (searchOptions.Filter is not null) { - if (searchOptions.NewFilter is not null) + if (searchOptions.Filter is not null) { - throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"); + throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"); } // Old filter, we translate it to a list of SqliteWhereCondition, and merge these into the conditions we already have - var filterConditions = this.GetFilterConditions(searchOptions.Filter, this._dataTableName); + var filterConditions = this.GetFilterConditions(searchOptions.OldFilter, this._dataTableName); if (filterConditions is { Count: > 0 }) { conditions.AddRange(filterConditions); } } - else if (searchOptions.NewFilter is not null) + else if (searchOptions.Filter is not null) { - (extraWhereFilter, extraParameters) = new SqliteFilterTranslator().Translate(this._propertyReader.StoragePropertyNamesMap, searchOptions.NewFilter); + (extraWhereFilter, extraParameters) = new SqliteFilterTranslator().Translate(this._propertyReader.StoragePropertyNamesMap, searchOptions.Filter); } #pragma warning restore CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index e665e7e85e08..1b38e708ab9f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -36,13 +36,13 @@ public static string BuildSearchQuery( #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch { - { Filter: not null, NewFilter: not null } => throw new ArgumentException("Either Filter or NewFilter can be specified, but not both"), - { Filter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( legacyFilter, jsonSerializerOptions, keyPropertyName, storagePropertyNames), - { NewFilter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), + { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), _ => null }; #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 666efcc4647b..3d071066ae2b 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -561,7 +561,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bo // Act. var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), - new() { IncludeVectors = true, Filter = filter, Top = 5, Skip = 2 }, + new() { IncludeVectors = true, OldFilter = filter, Top = 5, Skip = 2 }, this._testCancellationToken); // Assert. diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index fb15d0031c2b..117d3d1fcd4b 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -451,7 +451,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc new() { IncludeVectors = includeVectors, - Filter = filter, + OldFilter = filter, Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 6cfe1f17960e..49daf149c6e3 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -471,7 +471,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) new() { IncludeVectors = true, - Filter = filter, + OldFilter = filter, Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index a0fa8b4f0ae0..1ee9d928599a 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -140,7 +140,7 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() Skip = 2, Top = 3, VectorPropertyName = "DescriptionEmbedding", - Filter = new VectorSearchFilter() + OldFilter = new VectorSearchFilter() .EqualTo("HotelName", "Test Name") .AnyTagEqualTo("Tags", "t1") }; @@ -171,7 +171,7 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() Skip = 2, Top = 3, VectorPropertyName = "DescriptionEmbedding", - Filter = new VectorSearchFilter().EqualTo("HotelName", new TestFilterValue()) + OldFilter = new VectorSearchFilter().EqualTo("HotelName", new TestFilterValue()) }; // Act & Assert @@ -196,7 +196,7 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() Skip = 2, Top = 3, VectorPropertyName = "DescriptionEmbedding", - Filter = new VectorSearchFilter().EqualTo("NonExistentProperty", "value") + OldFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "value") }; // Act & Assert diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs index 9d167fcb160b..731031ae6706 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs @@ -14,7 +14,7 @@ namespace Microsoft.Extensions.VectorData; /// to request that the underlying service filter the search results. /// All clauses are combined with and. /// -[Obsolete("Use VectorSearchOptions.NewFilter instead of VectorSearchOptions.Filter")] +[Obsolete("Use VectorSearchOptions.Filter instead of VectorSearchOptions.OldFilter")] public sealed class VectorSearchFilter { /// The filter clauses to and together. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 65d9c6e157c2..6ac552651379 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -13,13 +13,13 @@ public class VectorSearchOptions /// /// Gets or sets a search filter to use before doing the vector search. /// - [Obsolete("Use NewFilter instead")] - public VectorSearchFilter? Filter { get; init; } + [Obsolete("Use Filter instead")] + public VectorSearchFilter? OldFilter { get; init; } /// /// Gets or sets a search filter to use before doing the vector search. /// - public Expression>? NewFilter { get; init; } + public Expression>? Filter { get; init; } /// /// Gets or sets the name of the vector property to search on. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index f7fb10081c76..09e47ca8c61d 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -68,7 +68,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe new() { IncludeVectors = true, - Filter = new VectorSearchFilter().EqualTo("HotelName", "MyHotel Upsert-1") + OldFilter = new VectorSearchFilter().EqualTo("HotelName", "MyHotel Upsert-1") }); // Assert @@ -351,7 +351,7 @@ await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), { IncludeVectors = includeVectors, VectorPropertyName = "DescriptionEmbedding", - Filter = filter, + OldFilter = filter, }); // Assert. @@ -390,7 +390,7 @@ public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() new() { VectorPropertyName = "DescriptionEmbedding", - Filter = filter, + OldFilter = filter, }); // Assert. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 7f471405b8c9..f873991177d3 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -409,7 +409,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() { - Filter = new VectorSearchFilter().EqualTo(nameof(AzureCosmosDBMongoDBHotel.HotelName), "My Hotel key2") + OldFilter = new VectorSearchFilter().EqualTo(nameof(AzureCosmosDBMongoDBHotel.HotelName), "My Hotel key2") }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 3864a48288ef..546b957c68ae 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -343,7 +343,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() { - Filter = filter, + OldFilter = filter, Top = 4, }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 3f88b10eef4b..c8cab7cb477e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -410,7 +410,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() { - Filter = new VectorSearchFilter().EqualTo(nameof(MongoDBHotel.HotelName), "My Hotel key2") + OldFilter = new VectorSearchFilter().EqualTo(nameof(MongoDBHotel.HotelName), "My Hotel key2") }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs index 7e19c73128d0..9b68eaf8d863 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs @@ -375,7 +375,7 @@ public async Task VectorizedSearchWithFilterAsync(bool collectionFromVectorStore // Act. var filter = new VectorSearchFilter().EqualTo(nameof(PineconeHotel.HotelCode), 42); - var actual = await hotelRecordCollection.VectorizedSearchAsync(searchVector, new() { Top = 1, Filter = filter }); + var actual = await hotelRecordCollection.VectorizedSearchAsync(searchVector, new() { Top = 1, OldFilter = filter }); var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResultRecord = searchResults.First().Record; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 6a479f0b10bf..58f3492074a6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -387,7 +387,7 @@ public async Task VectorizedSearchWithEqualToFilterReturnsValidResultsAsync() { IncludeVectors = false, Top = 5, - Filter = new([ + OldFilter = new([ new EqualToFilterClause("HotelRating", 2.5f) ]) }); @@ -420,7 +420,7 @@ public async Task VectorizedSearchWithAnyTagFilterReturnsValidResultsAsync() { IncludeVectors = false, Top = 5, - Filter = new([ + OldFilter = new([ new AnyTagEqualToFilterClause("Tags", "tag2") ]) }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 940687525238..667e713e844f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -68,7 +68,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var actual = await sut.VectorizedSearchAsync( vector, - new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -396,7 +396,7 @@ public async Task ItCanSearchWithFilterAsync(bool useRecordDefinition, string co vector, new() { - Filter = filter + OldFilter = filter }); // Assert. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 61018b2b7589..91723c852047 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -67,7 +67,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var actual = await sut .VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), - new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -321,7 +321,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType, new() { IncludeVectors = includeVectors, - Filter = filter + OldFilter = filter }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index a12d710d9446..266948738ef6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -66,7 +66,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var getResult = await sut.GetAsync("Upsert-10", new GetRecordOptions { IncludeVectors = true }); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), - new() { Filter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -348,7 +348,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType) // Act var actual = await sut.VectorizedSearchAsync( vector, - new() { IncludeVectors = true, Filter = filter }); + new() { IncludeVectors = true, OldFilter = filter }); // Assert var searchResults = await actual.Results.ToListAsync(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index c0dbb5fcf680..f799fd26eaa8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -423,7 +423,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() { - Filter = new VectorSearchFilter().EqualTo(nameof(SqliteHotel.HotelName), "My Hotel key2") + OldFilter = new VectorSearchFilter().EqualTo(nameof(SqliteHotel.HotelName), "My Hotel key2") }); var results = await searchResults.Results.ToListAsync(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index bd6348932937..494967b21fc7 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -300,7 +300,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() { - Filter = filter, + OldFilter = filter, Top = 4, }); @@ -345,7 +345,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), new() { - Filter = filter, + OldFilter = filter, Top = 4, }); diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs index 42781b1c5483..68ae09c883d5 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs @@ -200,7 +200,7 @@ private async Task> ExecuteVectorSearchAsync(string var vectorSearchOptions = new VectorSearchOptions { #pragma warning disable CS0618 // VectorSearchFilter is obsolete - Filter = searchOptions.Filter?.FilterClauses is not null ? new VectorSearchFilter(searchOptions.Filter.FilterClauses) : null, + OldFilter = searchOptions.Filter?.FilterClauses is not null ? new VectorSearchFilter(searchOptions.Filter.FilterClauses) : null, #pragma warning restore CS0618 // VectorSearchFilter is obsolete Skip = searchOptions.Skip, Top = searchOptions.Top, diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs index e94f321eed4a..b6e5454dc4d6 100644 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs @@ -238,12 +238,12 @@ public async Task> VectorizedSearchAsync(T } // Filter records using the provided filter before doing the vector comparison. - if (internalOptions.NewFilter is not null) + if (internalOptions.Filter is not null) { throw new NotSupportedException("LINQ-based filtering is not supported with VolatileVectorStore, use Microsoft.SemanticKernel.Connectors.InMemory instead"); } - var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.Filter, this.GetCollectionDictionary().Values); + var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.OldFilter, this.GetCollectionDictionary().Values); // Compare each vector in the filtered results with the provided vector. var results = filteredRecords.Select((record) => diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs index edd169a725ff..b93c00952705 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs @@ -338,7 +338,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, Filter = filter, IncludeTotalCount = true }, + new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, this._testCancellationToken); // Assert diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 138f3863a5d4..0f87d2ae7c5d 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -235,7 +235,7 @@ protected virtual async Task TestFilterAsync( new ReadOnlyMemory([1, 2, 3]), new() { - NewFilter = filter, + Filter = filter, Top = fixture.TestData.Count }); @@ -270,7 +270,7 @@ protected virtual async Task TestLegacyFilterAsync( new ReadOnlyMemory([1, 2, 3]), new() { - Filter = legacyFilter, + OldFilter = legacyFilter, Top = fixture.TestData.Count }); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index 3facbed3c916..bff2f583633e 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -84,7 +84,7 @@ public virtual async Task WaitForDataAsync( Top = recordCount, // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, // so filtered searches show empty results. Add a filter to the seed data check below. - NewFilter = filter + Filter = filter }); var count = await results.Results.CountAsync(); if (count == recordCount) From 865d67edaebb7af1f118af19bf9930610894b44f Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 5 Mar 2025 17:33:59 +0000 Subject: [PATCH 06/63] Python: Preb1 merge from main 1 (#10805) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Description Merging changes from main and resolving merge conflicts ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --------- Signed-off-by: dependabot[bot] Signed-off-by: Vincent Biret Co-authored-by: Evan Mattson <35585003+moonbox3@users.noreply.github.com> Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Co-authored-by: Chris <66376200+crickman@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tao Chen Co-authored-by: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Co-authored-by: Estefanía Tenorio <8483207+esttenorio@users.noreply.github.com> Co-authored-by: davidpene Co-authored-by: ThDuquennoy Co-authored-by: Thomas DUQUENNOY Co-authored-by: Eduard van Valkenburg Co-authored-by: Evan Mattson Co-authored-by: Rob Emanuele Co-authored-by: K. Andrew Parker Co-authored-by: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com> Co-authored-by: jenfoxbot Co-authored-by: Ben Thomas Co-authored-by: Ben Thomas Co-authored-by: Adit Sheth Co-authored-by: Adit Sheth Co-authored-by: ふぁー <47295014+ymuichiro@users.noreply.github.com> Co-authored-by: Vincent Biret Co-authored-by: David A. Torres <10944960+davidatorres@users.noreply.github.com> Co-authored-by: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Co-authored-by: Atiqur Rahman Foyshal <113086917+atiq-bs23@users.noreply.github.com> Co-authored-by: Md. Atiqur Rahman Foyshal Co-authored-by: Fabian Williams <92543063+fabianwilliams@users.noreply.github.com> Co-authored-by: fabian.williams@microsoft.com Co-authored-by: Ram.Type-0 <39725073+RamType0@users.noreply.github.com> Co-authored-by: Tommy Falgout Co-authored-by: Gary Tang <113477176+gtang31-te@users.noreply.github.com> Co-authored-by: Eirik Tsarpalis Co-authored-by: Tommaso Stocchi Co-authored-by: Chris Rickman Co-authored-by: Devis Lucato Co-authored-by: K. Andrew Parker Co-authored-by: Jose Luis Latorre Millas Co-authored-by: Carsten Lemm Co-authored-by: Stephen Toub Co-authored-by: Musale Martin Co-authored-by: Ross Smith --- .github/_typos.toml | 5 +- .github/workflows/dotnet-build-and-test.yml | 10 +- .github/workflows/label-needs-port.yml | 41 + .github/workflows/python-build.yml | 17 + .../workflows/python-integration-tests.yml | 122 +- .github/workflows/python-lint.yml | 1 + .github/workflows/python-manual-release.yml | 30 + .github/workflows/python-test-coverage.yml | 1 + .github/workflows/python-unit-tests.yml | 17 +- .vscode/launch.json | 13 + .vscode/tasks.json | 9 + docs/decisions/0054-processes.md | 5 +- .../0064-hybrid-model-orchestration.md | 276 ++ docs/decisions/0065-realtime-api-clients.md | 1770 +++++++++++ docs/decisions/0066-concepts-guidelines.md | 93 + dotnet/Directory.Packages.props | 95 +- dotnet/SK-dotnet.sln | 123 +- dotnet/nuget/nuget-package.props | 4 +- .../Agents/AzureAIAgent_FileManipulation.cs | 68 + .../Concepts/Agents/AzureAIAgent_Streaming.cs | 183 ++ .../ChatCompletion_FunctionTermination.cs | 9 +- .../Agents/ChatCompletion_HistoryReducer.cs | 2 +- .../Agents/ChatCompletion_Serialization.cs | 3 +- .../Agents/ChatCompletion_ServiceSelection.cs | 17 +- .../Agents/ChatCompletion_Streaming.cs | 3 +- .../Agents/ChatCompletion_Templating.cs | 8 +- .../Agents/ComplexChat_NestedShopper.cs | 2 +- .../Concepts/Agents/DeclarativeAgents.cs | 31 +- .../Concepts/Agents/MixedChat_Agents.cs | 23 +- .../Concepts/Agents/MixedChat_Files.cs | 43 +- .../Concepts/Agents/MixedChat_Images.cs | 33 +- .../Concepts/Agents/MixedChat_Reset.cs | 24 +- .../Agents/MixedChat_Serialization.cs | 22 +- .../Concepts/Agents/MixedChat_Streaming.cs | 23 +- .../Agents/OpenAIAssistant_ChartMaker.cs | 36 +- .../OpenAIAssistant_FileManipulation.cs | 40 +- .../Agents/OpenAIAssistant_FunctionFilters.cs | 33 +- .../Agents/OpenAIAssistant_Streaming.cs | 87 +- .../Agents/OpenAIAssistant_Templating.cs | 92 +- .../AzureAIInference_ChatCompletion.cs | 6 +- ...zureAIInference_ChatCompletionStreaming.cs | 34 +- .../AzureOpenAI_ChatCompletion.cs | 79 +- .../AzureOpenAI_ChatCompletionStreaming.cs | 26 +- ...AzureOpenAI_ChatCompletionWithReasoning.cs | 102 + .../AzureOpenAI_CustomClient.cs | 42 +- .../ChatHistoryExtensions.cs | 14 +- ...ucer.cs => ChatHistoryMaxTokensReducer.cs} | 14 +- .../ChatHistoryReducerTests.cs | 50 +- .../IChatHistoryReducer.cs | 20 - .../SummarizingChatHistoryReducer.cs | 140 - .../TruncatingChatHistoryReducer.cs | 80 - .../HuggingFace_ChatCompletion.cs | 97 + .../HuggingFace_ChatCompletionStreaming.cs | 95 + .../HybridCompletion_Fallback.cs | 279 ++ .../ChatCompletion/LMStudio_ChatCompletion.cs | 92 + .../LMStudio_ChatCompletionStreaming.cs | 97 + .../MultipleProviders_ChatHistoryReducer.cs | 8 +- .../ChatCompletion/Ollama_ChatCompletion.cs | 51 +- .../Ollama_ChatCompletionStreaming.cs | 129 +- .../ChatCompletion/OpenAI_ChatCompletion.cs | 93 +- .../OpenAI_ChatCompletionStreaming.cs | 33 +- .../OpenAI_ChatCompletionWithReasoning.cs | 86 + .../ChatCompletion/OpenAI_CustomClient.cs | 41 +- dotnet/samples/Concepts/Concepts.csproj | 11 +- .../Filtering/AzureOpenAI_DeploymentSwitch.cs | 116 + .../HuggingFace_ChatCompletionWithTGI.cs | 89 - .../MultipleProviders_ChatCompletion.cs | 95 - .../Plugins/CopilotAgentBasedPlugins.cs | 73 +- .../samples/Concepts/Plugins/CrewAI_Plugin.cs | 108 + dotnet/samples/Concepts/README.md | 39 +- .../Resources/Agents/ParrotAgent.yaml | 9 - .../Concepts/Resources/Agents/ToolAgent.yaml | 7 - .../Concepts/Resources/Agents/travelinfo.txt | 217 -- .../MessagesPlugin/apimanifest.json | 2 +- .../AstronomyPlugin/messages-openapi.yml | 45 +- .../CalendarPlugin/calendar-apiplugin.json | 10 +- .../CalendarPlugin/calendar-openapi.yml | 51 +- .../MessagesPlugin/messages-apiplugin.json | 13 +- .../MessagesPlugin/messages-openapi.yml | 45 +- .../Plugins/CopilotAgentPlugins/README.md | 4 +- .../Resources/Plugins/LegacyMenuPlugin.cs | 50 - ...enticationProviderWithCancellationToken.cs | 65 + .../CopilotAgentPluginsDemoSample.csproj | 55 + .../CopilotAgentPluginsDemoSample.sln | 24 + .../DemoCommand.cs | 528 ++++ .../Logging/SemanticKernelLogger.cs | 125 + .../Logging/SemanticKernelLoggerProvider.cs | 27 + .../CopilotAgentPluginsDemoSample/Program.cs | 11 + .../appsettings.json | 19 + .../Demos/CopilotAgentPlugins/README.md | 168 ++ .../CopilotAgentPlugins/TROUBLESHOOTING.md | 11 + .../images/AppRegistration_APIPermissions.png | Bin 0 -> 127548 bytes .../images/AppRegistration_AppSecret.png | Bin 0 -> 81407 bytes ...on_Authentication_localhostredirecturi.png | Bin 0 -> 100857 bytes .../ApplicationOverViewScreenClientIDetc.png | Bin 0 -> 115660 bytes .../images/CAPs_PublicRoadmap.png | Bin 0 -> 138724 bytes .../images/aad-portal-app-registrations.png | Bin 0 -> 68909 bytes .../McpDotNetExtensions.cs | 159 + .../ModelContextProtocol.csproj | 33 + .../Demos/ModelContextProtocol/Program.cs | 55 + .../Demos/ModelContextProtocol/README.md | 44 + .../SimpleToolsConsole.json | 17 + .../ProcessFramework.Aspire.AppHost.csproj | 34 + .../Program.cs | 17 + .../appsettings.json | 12 + .../Models/ProcessEvents.cs | 11 + ...ramework.Aspire.ProcessOrchestrator.csproj | 28 + ...sFramework.Aspire.ProcessOrchestrator.http | 5 + .../Program.cs | 90 + .../Steps/SummarizeStep.cs | 23 + .../Steps/TranslateStep.cs | 23 + .../SummaryAgentHttpClient.cs | 20 + .../TranslatorAgentHttpClient.cs | 20 + .../appsettings.json | 9 + .../Extensions.cs | 163 + ...essFramework.Aspire.ServiceDefaults.csproj | 23 + .../ProcessFramework.Aspire.Shared.csproj | 11 + .../SummarizeRequest.cs | 14 + .../TranslationRequest.cs | 14 + ...rocessFramework.Aspire.SummaryAgent.csproj | 25 + .../ProcessFramework.Aspire.SummaryAgent.http | 9 + .../Program.cs | 87 + .../appsettings.json | 9 + ...essFramework.Aspire.TranslatorAgent.csproj | 26 + ...ocessFramework.Aspire.TranslatorAgent.http | 9 + .../Program.cs | 87 + .../appsettings.json | 9 + .../ProcessFrameworkWithAspire/README.md | 45 + .../docs/architecture.png | Bin 0 -> 45609 bytes .../docs/aspire-dashboard.png | Bin 0 -> 114295 bytes .../docs/aspire-metrics.png | Bin 0 -> 185939 bytes .../docs/aspire-traces.png | Bin 0 -> 177506 bytes .../AzureAIAgent/Step01_AzureAIAgent.cs | 68 + .../Step02_AzureAIAgent_Plugins.cs | 100 + .../Step03_AzureAIAgent_Chat.cs} | 77 +- .../Step04_AzureAIAgent_CodeInterpreter.cs | 54 + .../Step05_AzureAIAgent_FileSearch.cs | 71 + .../Step06_AzureAIAgent_OpenAPI.cs | 68 + .../Step07_AzureAIAgent_Functions.cs | 75 + .../BedrockAgent/README.md | 38 + .../BedrockAgent/Step01_BedrockAgent.cs | 73 + .../Step02_BedrockAgent_CodeInterpreter.cs | 90 + .../Step03_BedrockAgent_Functions.cs | 141 + .../BedrockAgent/Step04_BedrockAgent_Trace.cs | 176 ++ .../Step05_BedrockAgent_FileSearch.cs | 75 + .../Step06_BedrockAgent_AgentChat.cs | 93 + .../GettingStartedWithAgents.csproj | 6 +- .../OpenAIAssistant/Step01_Assistant.cs | 66 + .../Step02_Assistant_Plugins.cs | 92 + .../Step03_Assistant_Vision.cs} | 35 +- .../Step04_AssistantTool_CodeInterpreter.cs} | 30 +- .../Step05_AssistantTool_FileSearch.cs | 73 + .../Step06_AssistantTool_Function.cs | 77 + .../Plugins/MenuPlugin.cs | 79 + .../Plugins/WidgetFactory.cs | 63 + .../GettingStartedWithAgents/README.md | 64 +- .../Resources/AutoInvokeTools.yaml | 7 + .../Resources/countries.json | 46 + .../Resources/weather.json | 62 + .../GettingStartedWithAgents/Step01_Agent.cs | 13 +- .../Step02_Plugins.cs | 119 +- .../Step04_KernelFunctionStrategies.cs | 2 +- .../Step06_DependencyInjection.cs | 3 +- .../Step07_Telemetry.cs | 236 ++ .../Step08_Assistant.cs | 142 - .../Step11_AssistantTool_FileSearch.cs | 84 - .../Step04/KernelExtensions.cs | 3 +- .../Step04/Step04_AgentOrchestration.cs | 2 +- dotnet/src/.editorconfig | 3 + dotnet/src/Agents/Abstractions/Agent.cs | 31 +- .../src/Agents/Abstractions/AgentChannel.cs | 23 +- dotnet/src/Agents/Abstractions/AgentChat.cs | 103 +- .../Abstractions/AgentChatSerializer.cs | 14 +- .../Abstractions/Agents.Abstractions.csproj | 7 +- .../Agents/Abstractions/AggregatorAgent.cs | 12 +- .../Agents/Abstractions/AggregatorChannel.cs | 2 + .../Extensions/ChatHistoryExtensions.cs | 10 +- .../Abstractions/Internal/BroadcastQueue.cs | 6 +- .../Abstractions/Internal/ChannelReference.cs | 3 + dotnet/src/Agents/Abstractions/KernelAgent.cs | 59 +- .../Logging/AgentChatLogMessages.cs | 46 +- .../Logging/AggregatorAgentLogMessages.cs | 1 + .../Serialization/AgentParticipant.cs | 18 +- .../Serialization/ChatMessageReference.cs | 16 +- .../src/Agents/AzureAI/Agents.AzureAI.csproj | 48 + .../AzureAI/AzureAIAgent.ClientFactory.cs | 65 + dotnet/src/Agents/AzureAI/AzureAIAgent.cs | 285 ++ dotnet/src/Agents/AzureAI/AzureAIChannel.cs | 61 + .../Agents/AzureAI/AzureAIClientProvider.cs | 116 + .../AzureAI/AzureAIInvocationOptions.cs | 109 + .../AzureAI/AzureAIThreadMessageFactory.cs | 23 + .../AzureAI/Extensions/AgentRunExtensions.cs | 120 + .../Extensions/KernelFunctionExtensions.cs | 29 + .../AzureAI/Internal/AgentMessageFactory.cs | 98 + .../AzureAI/Internal/AgentThreadActions.cs | 860 ++++++ .../Logging/AgentThreadActionsLogMessages.cs | 139 + .../Logging/AzureAIAgentLogMessages.cs | 69 + .../Properties/AssemblyInfo.cs | 0 .../src/Agents/AzureAI/RunPollingOptions.cs | 73 + .../src/Agents/Bedrock/Agents.Bedrock.csproj | 50 + dotnet/src/Agents/Bedrock/BedrockAgent.cs | 263 ++ .../src/Agents/Bedrock/BedrockAgentChannel.cs | 248 ++ .../Extensions/BedrockAgentExtensions.cs | 214 ++ .../BedrockAgentInvokeExtensions.cs | 225 ++ .../BedrockFunctionSchemaExtensions.cs | 102 + .../Properties/AssemblyInfo.cs | 0 dotnet/src/Agents/Bedrock/README.md | 27 + dotnet/src/Agents/Core/AgentGroupChat.cs | 80 +- dotnet/src/Agents/Core/Agents.Core.csproj | 6 +- .../Core/Chat/AgentGroupChatSettings.cs | 24 +- .../Chat/AggregatorTerminationStrategy.cs | 14 +- .../Chat/KernelFunctionSelectionStrategy.cs | 25 +- .../Chat/KernelFunctionTerminationStrategy.cs | 23 +- .../Core/Chat/RegExTerminationStrategy.cs | 2 + .../src/Agents/Core/Chat/SelectionStrategy.cs | 20 +- .../Core/Chat/SequentialSelectionStrategy.cs | 9 +- .../Agents/Core/Chat/TerminationStrategy.cs | 38 +- dotnet/src/Agents/Core/ChatCompletionAgent.cs | 167 +- dotnet/src/Agents/Core/ChatHistoryChannel.cs | 20 +- .../src/Agents/Core/ChatHistoryKernelAgent.cs | 37 +- .../Core/History/IChatHistoryReducer.cs | 32 - .../Core/Internal/ChatMessageForPrompt.cs | 2 + .../Core/Logging/AgentGroupChatLogMessages.cs | 15 +- ...ggregatorTerminationStrategyLogMessages.cs | 1 + .../Logging/ChatCompletionAgentLogMessages.cs | 9 +- ...nelFunctionSelectionStrategyLogMessages.cs | 1 + ...lFunctionTerminationStrategyLogMessages.cs | 1 + .../RegExTerminationStrategyLogMessages.cs | 1 + .../SequentialSelectionStrategyLogMessages.cs | 6 +- .../Logging/TerminationStrategyLogMessages.cs | 14 +- dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj | 9 +- .../Extensions/AssistantClientExtensions.cs | 172 ++ .../ChatContentMessageExtensions.cs | 36 + .../OpenAI/Extensions/KernelExtensions.cs | 18 - .../Extensions/KernelFunctionExtensions.cs | 84 +- .../Extensions/OpenAIClientExtensions.cs | 110 + .../OpenAI/Internal/AddHeaderRequestPolicy.cs | 13 - .../AssistantCreationOptionsFactory.cs | 2 + .../Internal/AssistantRunOptionsFactory.cs | 70 +- .../OpenAI/Internal/AssistantThreadActions.cs | 112 +- .../Internal/AssistantToolResourcesFactory.cs | 2 +- .../OpenAIAssistantAgent.ClientFactory.cs | 122 + .../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 380 +-- .../OpenAI/OpenAIAssistantCapabilities.cs | 42 +- .../Agents/OpenAI/OpenAIAssistantChannel.cs | 18 +- .../OpenAI/OpenAIAssistantDefinition.cs | 16 +- .../OpenAI/OpenAIAssistantExecutionOptions.cs | 20 +- .../OpenAIAssistantInvocationOptions.cs | 55 +- .../src/Agents/OpenAI/OpenAIClientProvider.cs | 49 +- .../OpenAI/OpenAIThreadCreationOptions.cs | 23 +- dotnet/src/Agents/OpenAI/RunPollingOptions.cs | 28 +- .../Agents/UnitTests/Agents.UnitTests.csproj | 18 +- .../AzureAIAssistantInvocationOptionsTests.cs | 113 + .../AzureAI/AzureAIClientProviderTests.cs | 72 + .../KernelFunctionExtensionsTests.cs | 64 + .../Internal/AgentMessageFactoryTests.cs | 112 + .../AzureAI/RunPollingOptionsTests.cs | 71 + .../Bedrock/BedrockAgentChannelTests.cs | 289 ++ .../UnitTests/Bedrock/BedrockAgentTests.cs | 290 ++ .../BedrockAgentExtensionsTests.cs | 320 ++ .../BedrockFunctionSchemaExtensionsTests.cs | 111 + .../Core/ChatCompletionAgentTests.cs | 71 +- .../UnitTests/Core/ChatHistoryChannelTests.cs | 96 +- .../src/Agents/UnitTests/KernelAgentTests.cs | 13 +- dotnet/src/Agents/UnitTests/MockAgent.cs | 2 +- .../Azure/AddHeaderRequestPolicyTests.cs | 37 - .../AssistantClientExtensionsTests.cs | 357 +++ .../Extensions/KernelExtensionsTests.cs | 60 - .../Extensions/OpenAIClientExtensionsTests.cs | 139 + .../AssistantRunOptionsFactoryTests.cs | 91 +- .../OpenAI/OpenAIAssistantAgentTests.cs | 87 +- .../OpenAI/OpenAIAssistantDefinitionTests.cs | 1 + .../OpenAIAssistantInvocationOptionsTests.cs | 1 + .../OpenAI/OpenAIAssistantResponseContent.cs | 149 +- .../OpenAI/OpenAIClientProviderTests.cs | 4 +- .../OpenAIThreadCreationOptionsTests.cs | 1 + .../{OpenAI => Test}/AssertCollection.cs | 2 +- ...reAIInferenceChatCompletionServiceTests.cs | 2 +- ...AzureAIInferencePromptExecutionSettings.cs | 2 +- .../Core/AzureClientCoreTests.cs | 90 + .../AzureOpenAIChatCompletionServiceTests.cs | 257 +- .../AzureOpenAITextToAudioServiceTests.cs | 17 + ...AzureOpenAIPromptExecutionSettingsTests.cs | 6 +- .../Core/AzureClientCore.ChatCompletion.cs | 37 +- .../Core/AzureClientCore.cs | 3 + .../AzureOpenAIPromptExecutionSettings.cs | 22 + .../Core/Gemini/GeminiRequestTests.cs | 65 + ...oogleAIGeminiChatCompletionServiceTests.cs | 58 +- ...ertexAIGeminiChatCompletionServiceTests.cs | 57 +- .../Connectors.Google/Core/ClientBase.cs | 1 + .../Core/Gemini/Models/GeminiRequest.cs | 48 + .../GeminiPromptExecutionSettings.cs | 27 + .../Connectors.Google/VertexAIVersion.cs | 7 +- .../IPineconeMemoryStore.cs | 4 +- .../IPostgresVectorStoreDbClient.cs | 2 +- ...PostgresVectorStoreCollectionSqlBuilder.cs | 2 +- .../IQdrantVectorDbClient.cs | 20 +- .../OllamaKernelBuilderExtensionsTests.cs | 158 + .../OllamaServiceCollectionExtensionsTests.cs | 202 ++ .../OllamaKernelBuilderExtensions.cs | 53 +- .../OllamaServiceCollectionExtensions.cs | 77 +- .../Services/OllamaTextGenerationService.cs | 12 + .../OnnxRuntimeGenAIChatCompletionService.cs | 5 +- .../Core/AutoFunctionInvocationFilterTests.cs | 59 + .../OpenAIChatCompletionServiceTests.cs | 101 + .../OpenAIPromptExecutionSettingsTests.cs | 19 +- .../Core/ClientCore.ChatCompletion.cs | 53 +- .../Settings/OpenAIPromptExecutionSettings.cs | 45 + .../AnyTagEqualToFilterClause.cs | 6 +- .../FilterClauses/EqualToFilterClause.cs | 6 +- .../FilterClauses/FilterClause.cs | 2 +- .../VectorStoreRecordDataAttribute.cs | 26 +- .../VectorStoreRecordKeyAttribute.cs | 8 +- .../VectorStoreRecordVectorAttribute.cs | 24 +- .../RecordDefinition/DistanceFunction.cs | 8 +- .../RecordDefinition/IndexKind.cs | 29 +- .../VectorStoreRecordDataProperty.cs | 16 +- .../VectorStoreRecordDefinition.cs | 4 +- .../VectorStoreRecordKeyProperty.cs | 4 +- .../VectorStoreRecordProperty.cs | 16 +- .../VectorStoreRecordVectorProperty.cs | 26 +- .../RecordOptions/GetRecordOptions.cs | 4 +- .../VectorData.Abstractions.csproj | 2 +- .../VectorSearch/IVectorizableTextSearch.cs | 2 +- .../VectorSearch/IVectorizedSearch.cs | 2 +- .../VectorSearch/VectorSearchFilter.cs | 36 +- .../VectorSearch/VectorSearchOptions.cs | 14 +- .../VectorSearch/VectorSearchResult.cs | 6 +- .../VectorSearch/VectorSearchResults.cs | 6 +- .../VectorStorage/IVectorStore.cs | 10 +- .../IVectorStoreRecordCollection.cs | 72 +- .../VectorStorage/IVectorStoreRecordMapper.cs | 6 +- .../StorageToDataModelMapperOptions.cs | 4 +- .../VectorStorage/VectorStoreException.cs | 6 +- .../VectorStoreOperationException.cs | 2 +- .../VectorStoreRecordMappingException.cs | 2 +- .../VectorStoreGenericDataModel.cs | 4 +- .../IKernelExternalProcessMessageChannel.cs | 32 + .../IKernelProcessMessageChannel.cs | 2 +- .../KernelProcessContext.cs | 6 + .../KernelProcessStepContext.cs | 27 +- .../Controllers/ProcessTestController.cs | 18 + .../Program.cs | 5 + .../DaprTestProcessContext.cs | 11 + .../ProcessTestFixture.cs | 4 +- .../ProcessTestFixture.cs | 5 +- .../CloudEvents/MockCloudEventClient.cs | 63 + .../CloudEvents/MockCloudEventData.cs | 19 + .../ProcessCloudEventsResources.cs | 40 + .../Process.IntegrationTests.Shared.props | 1 + .../ProcessCloudEventsTests.cs | 113 + .../ProcessTestFixture.cs | 3 +- .../LocalKernelProcessContext.cs | 15 +- .../LocalKernelProcessFactory.cs | 5 +- .../Process.LocalRuntime/LocalProcess.cs | 1 + .../Process.LocalRuntime/LocalStep.cs | 11 +- .../Actors/ExternalMessageBufferActor.cs | 39 + .../ExternalMessageBufferActorWrapper.cs | 43 + .../Process.Runtime.Dapr/Actors/StepActor.cs | 9 +- .../DaprKernelProcessContext.cs | 6 + .../Interfaces/IExternalMessageBuffer.cs | 25 + .../KernelProcessDaprExtensions.cs | 1 + .../HandlebarsPromptTemplateTests.cs | 35 + .../HandlebarsPromptTemplate.cs | 3 +- .../HandlebarsPromptTemplateOptions.cs | 5 + .../LiquidPromptTemplate.cs | 6 +- .../CopilotAgentPluginKernelExtensions.cs | 34 +- .../OpenApiFunctionExecutionParameters.cs | 1 - .../Extensions/OpenApiKernelExtensions.cs | 3 - .../Functions.OpenApi.csproj | 1 - .../HttpResponseContentReader.cs | 2 - .../HttpResponseContentReaderContext.cs | 2 - .../Model/RestApiOperation.cs | 6 +- .../OpenApi/OpenApiDocumentParser.cs | 1 - .../OpenApi/OpenApiDocumentParserOptions.cs | 2 - .../OperationSelectionPredicateContext.cs | 2 - .../OpenApiKernelPluginFactory.cs | 2 - .../RestApiOperationRunner.cs | 4 +- .../Serialization/OpenApiTypeConverter.cs | 35 +- .../PromptyTest.cs | 5 +- .../TestData/chat.prompty | 2 +- .../TestData/chatJsonObject.prompty | 1 + .../Functions.Prompty/Core/PromptyModel.cs | 20 - .../Core/PromptyModelConfig.cs | 31 - .../Core/PromptyModelParameters.cs | 50 - .../Core/PromptyResponseFormat.cs | 13 - .../Functions.Prompty/Core/PromptyTool.cs | 44 - .../Functions.Prompty/Core/PromptyYaml.cs | 42 - .../Functions.Prompty/Core/Types/ApiType.cs | 9 - .../Functions.Prompty/Core/Types/ModelType.cs | 9 - .../Core/Types/ParserType.cs | 11 - .../Functions.Prompty/Core/Types/RoleType.cs | 12 - .../Functions.Prompty.csproj | 2 +- .../KernelFunctionPrompty.cs | 144 +- ...CopilotAgentPluginKernelExtensionsTests.cs | 2 +- .../OpenApiTypeConverterTests.cs | 47 + .../TestPlugins/messages-apiplugin.json | 15 +- .../OpenApi/TestPlugins/messages-openapi.yml | 45 +- .../Agents/BedrockAgentTests.cs | 238 ++ .../Agents/MixedAgentTests.cs | 17 +- .../Agents/OpenAIAssistantAgentTests.cs | 181 +- .../IntegrationTests/BaseIntegrationTest.cs | 4 +- ...AzureOpenAIChatCompletionStreamingTests.cs | 15 +- .../Gemini/GeminiChatCompletionTests.cs | 110 + .../Connectors/Google/TestsBase.cs | 16 +- ...ostgresVectorStoreRecordCollectionTests.cs | 30 +- .../IntegrationTests/IntegrationTests.csproj | 7 + .../Resources/gemini_cached_content.json | 22 + .../TestSettings/BedrockAgentConfiguration.cs | 13 + dotnet/src/IntegrationTests/testsettings.json | 4 + .../agents/AgentUtilities.props | 5 + .../agents}/Extensions/AgentExtensions.cs | 15 +- .../KernelFunctionMetadataExtensions.cs | 36 + .../azure/AzureAIUtilities.props | 5 + .../Policies/GeneratedActionPipelinePolicy.cs | 32 + .../FunctionCalling/FunctionCallsProcessor.cs | 19 +- .../process/Abstractions/StepExtensions.cs | 9 +- .../samples/AgentUtilities/BaseAgentsTest.cs | 45 +- .../AgentUtilities/BaseAssistantTest.cs | 90 + .../AgentUtilities/BaseAzureAgentTest.cs | 173 ++ .../samples/AgentUtilities/BaseAzureTest.cs | 66 + .../AgentUtilities/BaseBedrockAgentTest.cs | 41 + .../samples/InternalUtilities/BaseTest.cs | 27 + .../InternalUtilities/TestConfiguration.cs | 27 + .../src/Diagnostics/ActivityExtensions.cs | 53 +- .../src/Diagnostics/ModelDiagnostics.cs | 35 + .../CrewAI/CrewAIEnterpriseClientTests.cs | 151 + .../CrewAI/CrewAIEnterpriseTests.cs | 150 + .../CrewAI/MockHttpClientFactory.cs | 24 + .../Plugins.AI.UnitTests.csproj | 37 + .../Plugins.AI}/AssemblyInfo.cs | 2 +- .../CrewAI/Client/CrewAIEnterpriseClient.cs | 164 ++ .../CrewAI/Client/CrewAIStateEnumConverter.cs | 44 + .../Plugins.AI/CrewAI/CrewAIEnterprise.cs | 282 ++ .../Plugins.AI/CrewAI/CrewAIInputMetadata.cs | 15 + .../CrewAI/Models/CrewAIKickoffResponse.cs | 16 + .../CrewAI/Models/CrewAIKickoffState.cs | 44 + .../CrewAI/Models/CrewAIRequiredInputs.cs | 18 + .../CrewAI/Models/CrewAIStatusResponse.cs | 31 + .../src/Plugins/Plugins.AI/Plugins.AI.csproj | 34 + .../FileSystem/IFileSystemConnector.cs | 8 +- .../Plugins.Document/IDocumentConnector.cs | 6 +- .../AIFunctionKernelFunction.cs | 58 +- .../AI/ChatCompletion/AuthorRole.cs | 5 + .../ChatClientChatCompletionService.cs | 32 +- .../ChatCompletionServiceChatClient.cs | 11 +- .../ChatCompletionServiceExtensions.cs | 38 +- .../AI/ChatCompletion/ChatHistory.cs | 28 +- .../ChatCompletion/ChatHistoryExtensions.cs | 83 + .../AI/ChatCompletion/ChatPromptParser.cs | 11 +- .../AI/ChatCompletion/IChatHistoryReducer.cs | 23 + .../EmbeddingGenerationServiceExtensions.cs | 7 +- .../FunctionChoiceBehaviorOptions.cs | 2 - .../AI/TextToImage/ITextToImageService.cs | 2 +- .../AbstractionsJsonContext.cs | 2 + .../Contents/AnnotationContent.cs | 2 +- .../Contents/FileReferenceContent.cs | 9 + .../Contents/KernelContent.cs | 2 +- .../Contents/StreamingAnnotationContent.cs | 2 +- .../Contents/StreamingChatMessageContent.cs | 2 +- .../Data/TextSearch/ITextSearch.cs | 6 +- .../AutoFunctionInvocationContext.cs | 7 + .../Filters/Prompt/PromptRenderContext.cs | 7 + .../Functions/KernelFunction.cs | 54 +- .../Functions/KernelFunctionNoop.cs | 46 + .../Functions/KernelFunctionSchemaModel.cs | 22 + .../src/SemanticKernel.Abstractions/Kernel.cs | 4 +- .../Memory/ISemanticTextMemory.cs | 12 +- .../Services/AIServiceExtensions.cs | 30 + .../ChatHistoryReducerExtensions.cs | 107 +- .../ChatHistorySummarizationReducer.cs | 87 +- .../ChatHistoryTruncationReducer.cs | 42 +- .../Contents/BinaryContentExtensions.cs | 38 + .../Data/KernelBuilderExtensions.cs | 79 - .../Data/ServiceCollectionExtensions.cs | 71 - .../Data/VolatileVectorStore.cs | 63 - ...atileVectorStoreCollectionSearchMapping.cs | 221 -- .../Data/VolatileVectorStoreExtensions.cs | 105 - .../Data/VolatileVectorStoreKeyResolver.cs | 14 - .../VolatileVectorStoreRecordCollection.cs | 380 --- ...atileVectorStoreRecordCollectionOptions.cs | 46 - .../Data/VolatileVectorStoreVectorResolver.cs | 14 - .../Functions/KernelFunctionFromPrompt.cs | 2 +- .../TemplateEngine/Blocks/ICodeRendering.cs | 2 +- .../TemplateEngine/Blocks/ITextRendering.cs | 2 +- .../ChatHistoryReducerExtensionsTests.cs | 26 +- .../ChatHistorySummarizationReducerTests.cs | 35 +- .../AI/ChatCompletion/ChatHistoryTests.cs | 31 + .../ChatHistoryTruncationReducerTests.cs | 29 +- .../MockChatHistoryGenerator.cs} | 12 +- .../AI/ServiceConversionExtensionsTests.cs | 75 +- .../Contents/AnnotationContentTests.cs | 2 +- .../Contents/ChatMessageContentTests.cs | 27 +- .../Contents/FileReferenceContentTests.cs | 16 + .../StreamingAnnotationContentTests.cs | 2 +- .../Data/KernelBuilderExtensionsTests.cs | 125 - .../Data/ServiceCollectionExtensionsTests.cs | 37 - ...olatileVectorStoreRecordCollectionTests.cs | 577 ---- .../Data/VolatileVectorStoreTests.cs | 103 - .../ClientResultExceptionExtensionsTests.cs | 2 +- .../Filters/PromptRenderFilterTests.cs | 38 + .../Functions/KernelPluginTests.cs | 28 +- .../Prompt/ChatPromptParserTests.cs | 126 +- .../SemanticKernel.UnitTests.csproj | 1 - .../FunctionCallsProcessorTests.cs | 65 + .../Utilities/ActivityExtensionsTests.cs | 107 + python/.coveragerc | 2 +- python/.cspell.json | 16 +- python/.env.example | 4 +- python/.pre-commit-config.yaml | 4 +- python/.vscode/launch.json | 2 +- python/Makefile | 16 +- python/README.md | 4 +- python/pyproject.toml | 33 +- python/samples/README.md | 3 +- python/samples/SAMPLE_GUIDELINES.md | 81 + python/samples/concepts/README.md | 80 +- python/samples/concepts/agents/README.md | 19 +- .../agents/assistant_agent_chart_maker.py | 112 - .../assistant_agent_file_manipulation.py | 85 - ...stant_agent_file_manipulation_streaming.py | 88 - .../agents/assistant_agent_retrieval.py | 95 - .../agents/assistant_agent_streaming.py | 110 - .../autogen_conversable_agent/README.md | 20 + ...autogen_conversable_agent_code_executor.py | 61 + ...ogen_conversable_agent_convo_with_tools.py | 95 + .../autogen_conversable_agent_simple_convo.py | 61 + .../agents/azure_ai_agent/.env.example | 6 + .../concepts/agents/azure_ai_agent/README.md | 13 + .../azure_ai_agent_azure_ai_search.py | 147 + .../azure_ai_agent_file_manipulation.py | 92 + .../azure_ai_agent_streaming.py | 93 + .../agents/bedrock_agent/.env.example | 2 + .../concepts/agents/bedrock_agent/README.md | 74 + .../bedrock_agent_simple_chat.py | 52 + .../bedrock_agent_simple_chat_streaming.py | 54 + .../bedrock_agent_with_code_interpreter.py | 75 + ...k_agent_with_code_interpreter_streaming.py | 77 + .../bedrock_agent_with_kernel_function.py | 68 + ...drock_agent_with_kernel_function_simple.py | 59 + ...ck_agent_with_kernel_function_streaming.py | 69 + .../bedrock_mixed_chat_agents.py | 102 + .../bedrock_mixed_chat_agents_streaming.py | 107 + .../agents/chat_completion_agent/README.md | 45 + .../chat_completion_function_termination.py | 147 + .../chat_completion_prompt_templating.py | 99 + ...tion_summary_history_reducer_agent_chat.py | 80 + ...on_summary_history_reducer_single_agent.py | 70 + ...ion_truncate_history_reducer_agent_chat.py | 78 + ...n_truncate_history_reducer_single_agent.py | 63 + .../chat_completion_function_termination.py | 133 - .../agents/chat_completion_history_reducer.py | 298 -- .../{ => mixed_chat}/mixed_chat_agents.py | 58 +- .../mixed_chat_agents_plugins.py | 67 +- .../agents/mixed_chat/mixed_chat_files.py | 109 + .../agents/mixed_chat/mixed_chat_images.py | 106 + .../agents/mixed_chat/mixed_chat_reset.py | 103 + .../agents/mixed_chat/mixed_chat_streaming.py | 99 + .../concepts/agents/mixed_chat_files.py | 92 - .../concepts/agents/mixed_chat_reset.py | 83 - .../concepts/agents/mixed_chat_streaming.py | 95 - .../agents/openai_assistant/README.md | 101 + .../agents/openai_assistant/__init__.py | 0 .../openai_assistant_chart_maker.py | 81 + .../openai_assistant_chart_maker_streaming.py | 99 + .../openai_assistant_file_manipulation.py | 83 + ...i_assistant_file_manipulation_streaming.py | 105 + .../openai_assistant_retrieval.py | 55 + .../openai_assistant_sample_utils.py | 54 + .../openai_assistant_streaming.py | 80 + .../openai_assistant_structured_outputs.py | 90 + .../openai_assistant_templating_streaming.py | 116 + .../openai_assistant_vision_streaming.py | 93 + ...t_completion_with_auto_function_calling.py | 7 +- ...on_with_auto_function_calling_streaming.py | 1 + ...completion_with_manual_function_calling.py | 1 + .../function_calling_with_required_type.py | 3 +- .../concepts/caching/semantic_caching.py | 143 + .../chat_completion/simple_chatbot.py | 6 +- .../simple_chatbot_kernel_function.py | 6 +- .../simple_chatbot_streaming.py | 6 +- ...le_chatbot_with_summary_history_reducer.py | 15 +- ...mmary_history_reducer_keep_func_content.py | 19 +- .../samples/concepts/chat_history/README.md | 17 + .../chat_history/serialize_chat_history.py | 178 +- .../store_chat_history_in_cosmosdb.py | 199 ++ .../filtering/auto_function_invoke_filters.py | 4 - .../function_invocation_filters_stream.py | 27 +- .../filtering/retry_with_different_model.py | 98 + .../concepts/filtering/retry_with_filters.py | 7 +- .../azure_ai_search_hotel_samples/README.md | 72 + .../step_0_data_model.py | 1 + .../step_1_interact_with_the_collection.py | 9 +- .../step_2_use_as_a_plugin.py | 14 +- .../memory/azure_cognitive_search_memory.py | 66 - .../{new_memory.py => complex_memory.py} | 207 +- python/samples/concepts/memory/memory.py | 120 - ...pandas_memory.py => memory_with_pandas.py} | 45 +- .../samples/concepts/memory/simple_memory.py | 170 ++ python/samples/concepts/memory/utils.py | 23 + ...penai_function_calling_stepwise_planner.py | 53 - ...penai_function_calling_stepwise_planner.py | 51 - .../concepts/planners/sequential_planner.py | 42 - .../concepts/plugins/crew_ai/README.md | 47 + .../plugins/crew_ai/crew_ai_plugin.py | 140 + python/samples/concepts/realtime/README.md | 50 + ...ltime_chat_with_function_calling_webrtc.py | 143 + ...me_chat_with_function_calling_websocket.py | 141 + .../realtime/simple_realtime_chat_webrtc.py | 84 + .../simple_realtime_chat_websocket.py | 90 + python/samples/concepts/realtime/utils.py | 489 +++ .../concepts/reasoning/simple_reasoning.py | 41 +- .../simple_reasoning_function_calling.py | 116 +- python/samples/concepts/resources/cat.jpg | Bin 0 -> 37831 bytes python/samples/concepts/resources/utils.py | 5 + python/samples/concepts/setup/ALL_SETTINGS.md | 8 +- .../setup/chat_completion_services.py | 76 +- .../json_structured_outputs.py | 5 +- .../demos/call_automation/.env.example | 8 + .../demos/call_automation/call_automation.py | 290 ++ .../samples/demos/call_automation/readme.md | 53 + .../document_generator/GENERATED_DOCUMENT.md | 58 + .../demos/document_generator/README.md | 105 + .../agents/code_validation_agent.py | 69 + .../agents/content_creation_agent.py | 64 + .../agents/custom_agent_base.py | 52 + .../document_generator/agents/user_agent.py | 67 + .../custom_selection_strategy.py | 100 + .../custom_termination_strategy.py | 91 + .../samples/demos/document_generator/main.py | 130 + .../plugins/code_execution_plugin.py | 26 + .../plugins/repo_file_plugin.py | 51 + .../document_generator/plugins/user_plugin.py | 16 + .../guided_conversation/plugins/agenda.py | 2 +- .../guided_conversation/utils/resources.py | 2 +- .../demos/process_with_dapr/fastapi_app.py | 13 +- .../process_with_dapr/process/process.py | 12 +- .../demos/process_with_dapr/process/steps.py | 43 +- .../05-using-the-planner.ipynb | 4 +- .../third_party/postgres-memory.ipynb | 427 ++- .../getting_started_with_agents/README.md | 47 +- .../azure_ai_agent/.env.example | 6 + .../azure_ai_agent/README.md | 121 + .../azure_ai_agent/step1_azure_ai_agent.py | 80 + .../step2_azure_ai_agent_plugin.py | 101 + .../step3_azure_ai_agent_group_chat.py | 111 + .../step4_azure_ai_agent_code_interpreter.py | 88 + .../step5_azure_ai_agent_file_search.py | 83 + .../step6_azure_ai_agent_openapi.py | 111 + .../chat_completion/README.md | 3 + .../step1_chat_completion_agent_simple.py | 62 + ...step2_chat_completion_agent_with_kernel.py | 69 + ...ep3_chat_completion_agent_plugin_simple.py | 81 + ...hat_completion_agent_plugin_with_kernel.py | 104 + ...step5_chat_completion_agent_group_chat.py} | 73 +- .../step6_kernel_function_strategies.py} | 72 +- ...step7_chat_completion_agent_json_result.py | 102 + .../step8_chat_completion_agent_logging.py | 112 + ...hat_completion_agent_structured_outputs.py | 112 + .../openai_assistant/README.md | 101 + .../openai_assistant/step1_assistant.py | 75 + .../step2_assistant_plugins.py | 99 + .../step3_assistant_vision.py | 87 + .../step4_assistant_tool_code_interpreter.py | 58 + .../step5_assistant_tool_file_search.py | 80 + .../resources/countries.json | 46 + .../resources/weather.json | 62 + .../step10_assistant_tool_file_search.py | 81 - .../step1_agent.py | 67 - .../step2_plugins.py | 98 - .../step5_json_result.py | 106 - .../step6_logging.py | 93 - .../step7_assistant.py | 88 - .../step8_assistant_vision.py | 115 - .../step9_assistant_tool_code_interpreter.py | 76 - .../agent_docs/agent_collaboration.py | 200 +- .../agent_docs/assistant_code.py | 122 +- .../agent_docs/assistant_search.py | 82 +- .../learn_resources/agent_docs/chat_agent.py | 24 +- .../plugins/GithubPlugin/github.py | 14 +- .../resources/WomensSuffrage.txt | 9 + python/semantic_kernel/__init__.py | 7 +- python/semantic_kernel/agents/agent.py | 159 +- .../semantic_kernel/agents/autogen/README.md | 20 + .../agents/autogen/__init__.py | 5 + .../autogen/autogen_conversable_agent.py | 204 ++ .../agents/azure_ai/__init__.py | 6 + .../azure_ai/agent_content_generation.py | 435 +++ .../agents/azure_ai/agent_thread_actions.py | 876 ++++++ .../agents/azure_ai/azure_ai_agent.py | 390 +++ .../azure_ai/azure_ai_agent_settings.py | 32 + .../agents/azure_ai/azure_ai_agent_utils.py | 87 + .../agents/azure_ai/azure_ai_channel.py | 121 + .../semantic_kernel/agents/bedrock/README.md | 27 + .../agents/bedrock/__init__.py | 0 .../agents/bedrock/action_group_utils.py | 117 + .../agents/bedrock/bedrock_agent.py | 589 ++++ .../agents/bedrock/bedrock_agent_base.py | 376 +++ .../agents/bedrock/bedrock_agent_settings.py | 32 + .../agents/bedrock/models/__init__.py | 0 .../models/bedrock_action_group_model.py | 21 + .../models/bedrock_agent_event_type.py | 19 + .../bedrock/models/bedrock_agent_model.py | 24 + .../bedrock/models/bedrock_agent_status.py | 23 + .../agents/channels/agent_channel.py | 14 +- .../agents/channels/bedrock_agent_channel.py | 213 ++ .../agents/channels/chat_history_channel.py | 82 +- .../channels/open_ai_assistant_channel.py | 35 +- .../chat_completion/chat_completion_agent.py | 397 ++- .../agents/group_chat/agent_chat.py | 17 +- .../agents/group_chat/agent_chat_utils.py | 4 +- .../agents/group_chat/agent_group_chat.py | 35 +- .../agents/group_chat/broadcast_queue.py | 8 +- .../open_ai/assistant_content_generation.py | 129 +- .../open_ai/assistant_thread_actions.py | 770 +++++ .../agents/open_ai/azure_assistant_agent.py | 526 +--- .../agents/open_ai/function_action_result.py | 10 +- .../agents/open_ai/open_ai_assistant_agent.py | 879 +++--- .../agents/open_ai/open_ai_assistant_base.py | 1300 -------- .../agents/open_ai/run_polling_options.py | 4 +- .../agents/strategies/__init__.py | 2 + .../kernel_function_selection_strategy.py | 4 +- .../selection/selection_strategy.py | 4 +- .../sequential_selection_strategy.py | 4 +- .../aggregator_termination_strategy.py | 6 +- .../default_termination_strategy.py | 8 +- .../kernel_function_termination_strategy.py | 4 +- .../termination/termination_strategy.py | 4 +- .../semantic_kernel/connectors/ai/README.md | 2 +- .../services/anthropic_chat_completion.py | 10 +- ..._ai_inference_prompt_execution_settings.py | 8 +- .../azure_ai_inference_settings.py | 4 +- .../services/azure_ai_inference_base.py | 4 +- .../azure_ai_inference_chat_completion.py | 15 +- .../azure_ai_inference_text_embedding.py | 6 +- .../ai/azure_ai_inference/services/utils.py | 6 +- .../connectors/ai/bedrock/README.md | 21 + .../connectors/ai/bedrock/bedrock_settings.py | 4 +- .../ai/bedrock/services/bedrock_base.py | 24 +- .../services/bedrock_chat_completion.py | 14 +- .../services/bedrock_text_completion.py | 7 +- .../services/bedrock_text_embedding.py | 7 +- .../model_provider/bedrock_model_provider.py | 6 +- .../bedrock/services/model_provider/utils.py | 7 - .../ai/chat_completion_client_base.py | 12 +- .../ai/embeddings/embedding_generator_base.py | 4 +- .../ai/function_call_choice_configuration.py | 4 +- .../connectors/ai/function_calling_utils.py | 41 +- .../connectors/ai/function_choice_behavior.py | 4 +- .../connectors/ai/function_choice_type.py | 4 +- .../services/google_ai_chat_completion.py | 18 +- .../services/google_ai_text_completion.py | 8 +- .../services/google_ai_text_embedding.py | 4 +- .../services/vertex_ai_chat_completion.py | 6 +- .../hf_prompt_execution_settings.py | 14 +- .../services/hf_text_completion.py | 1 + .../services/hf_text_embedding.py | 5 +- .../mistral_ai_prompt_execution_settings.py | 27 +- .../services/mistral_ai_chat_completion.py | 4 +- .../services/mistral_ai_text_embedding.py | 4 +- .../ollama/services/ollama_chat_completion.py | 8 +- .../ollama/services/ollama_text_embedding.py | 4 +- .../services/onnx_gen_ai_chat_completion.py | 4 +- .../services/onnx_gen_ai_completion_base.py | 11 +- .../services/onnx_gen_ai_text_completion.py | 4 +- .../connectors/ai/open_ai/__init__.py | 22 + .../azure_chat_prompt_execution_settings.py | 2 +- ...pen_ai_audio_to_text_execution_settings.py | 5 +- .../open_ai_prompt_execution_settings.py | 2 +- .../open_ai_realtime_execution_settings.py | 78 + ...pen_ai_text_to_image_execution_settings.py | 2 +- .../ai/open_ai/services/azure_config_base.py | 45 +- .../ai/open_ai/services/azure_realtime.py | 116 + .../open_ai/services/azure_text_embedding.py | 4 +- .../open_ai/services/open_ai_config_base.py | 5 +- .../open_ai/services/open_ai_model_types.py | 1 + .../ai/open_ai/services/open_ai_realtime.py | 1024 +++++++ .../services/open_ai_text_embedding.py | 4 +- .../services/open_ai_text_embedding_base.py | 4 +- .../settings/azure_open_ai_settings.py | 16 +- .../ai/open_ai/settings/open_ai_settings.py | 4 + .../connectors/ai/realtime_client_base.py | 145 + .../connectors/memory/astradb/astra_client.py | 4 +- .../memory/astradb/astradb_memory_store.py | 4 +- .../memory/astradb/astradb_settings.py | 4 +- .../azure_ai_search_collection.py | 4 +- .../azure_ai_search_settings.py | 4 +- .../azure_ai_search/azure_ai_search_store.py | 4 +- .../memory/azure_ai_search/utils.py | 4 +- .../azure_ai_search_settings.py | 4 +- .../azure_cognitive_search_memory_store.py | 4 +- .../memory/azure_cosmos_db/__init__.py | 10 + .../azure_cosmos_db_mongodb_collection.py | 253 ++ .../azure_cosmos_db_mongodb_settings.py | 38 + .../azure_cosmos_db_mongodb_store.py | 116 + .../azure_cosmos_db_no_sql_base.py | 4 +- .../azure_cosmos_db_no_sql_collection.py | 37 +- .../azure_cosmos_db_no_sql_composite_key.py | 4 +- .../azure_cosmos_db_no_sql_settings.py | 4 +- .../azure_cosmos_db_no_sql_store.py | 4 +- .../memory/azure_cosmos_db/const.py | 12 + .../azure_cosmos_db_memory_store.py | 4 +- .../azure_cosmos_db_store_api.py | 4 +- .../azure_cosmosdb/azure_cosmosdb_settings.py | 6 +- .../azure_cosmosdb/mongo_vcore_store_api.py | 4 +- .../connectors/memory/azure_cosmosdb/utils.py | 6 +- .../azure_cosmosdb_no_sql_memory_store.py | 4 +- .../connectors/memory/chroma/__init__.py | 3 +- .../connectors/memory/chroma/chroma.py | 376 +++ .../memory/chroma/chroma_memory_store.py | 4 +- .../connectors/memory/in_memory/const.py | 1 - .../memory/in_memory/in_memory_collection.py | 30 +- .../memory/in_memory/in_memory_store.py | 4 +- .../memory/milvus/milvus_memory_store.py | 10 +- .../memory/mongodb_atlas/__init__.py | 9 +- .../connectors/memory/mongodb_atlas/const.py | 16 + .../mongodb_atlas/mongodb_atlas_collection.py | 325 ++ .../mongodb_atlas_memory_store.py | 4 +- .../mongodb_atlas/mongodb_atlas_settings.py | 10 +- .../mongodb_atlas/mongodb_atlas_store.py | 145 + .../connectors/memory/mongodb_atlas/utils.py | 51 +- .../memory/pinecone/pinecone_memory_store.py | 4 +- .../memory/pinecone/pinecone_settings.py | 4 +- .../connectors/memory/postgres/constants.py | 4 + .../memory/postgres/postgres_collection.py | 386 ++- .../memory/postgres/postgres_memory_store.py | 4 +- .../memory/postgres/postgres_settings.py | 60 +- .../memory/postgres/postgres_store.py | 19 +- .../connectors/memory/postgres/utils.py | 45 +- .../memory/qdrant/qdrant_collection.py | 4 +- .../memory/qdrant/qdrant_memory_store.py | 4 +- .../memory/qdrant/qdrant_settings.py | 4 +- .../connectors/memory/qdrant/qdrant_store.py | 4 +- .../memory/redis/redis_collection.py | 8 +- .../memory/redis/redis_memory_store.py | 4 +- .../connectors/memory/redis/redis_settings.py | 4 +- .../connectors/memory/redis/redis_store.py | 4 +- .../memory/usearch/usearch_memory_store.py | 4 +- .../connectors/memory/weaviate/README.md | 2 +- .../memory/weaviate/weaviate_collection.py | 4 +- .../memory/weaviate/weaviate_memory_store.py | 4 +- .../memory/weaviate/weaviate_settings.py | 4 +- .../memory/weaviate/weaviate_store.py | 4 +- .../connectors/openapi_plugin/const.py | 4 +- .../models/rest_api_expected_response.py | 5 +- .../models/rest_api_oauth_flow.py | 4 +- .../models/rest_api_oauth_flows.py | 4 +- .../models/rest_api_operation.py | 4 +- .../models/rest_api_parameter.py | 4 +- .../models/rest_api_parameter_location.py | 4 +- .../models/rest_api_parameter_style.py | 4 +- .../openapi_plugin/models/rest_api_payload.py | 4 +- .../models/rest_api_payload_property.py | 4 +- .../models/rest_api_run_options.py | 4 +- .../models/rest_api_security_requirement.py | 4 +- .../models/rest_api_security_scheme.py | 4 +- .../openapi_plugin/models/rest_api_uri.py | 4 +- .../openapi_function_execution_parameters.py | 4 +- .../openapi_plugin/openapi_manager.py | 6 +- .../openapi_plugin/openapi_runner.py | 4 +- .../connectors/search/bing/bing_search.py | 4 +- .../search/bing/bing_search_response.py | 16 +- .../connectors/search/bing/bing_web_page.py | 4 +- .../connectors/search/google/google_search.py | 4 +- .../search/google/google_search_response.py | 6 +- .../search/google/google_search_result.py | 18 +- .../search_engine/google_connector.py | 2 +- python/semantic_kernel/contents/__init__.py | 20 + .../contents/annotation_content.py | 4 +- .../semantic_kernel/contents/audio_content.py | 43 +- .../contents/binary_content.py | 110 +- .../semantic_kernel/contents/chat_history.py | 175 +- .../contents/chat_message_content.py | 22 +- .../contents/file_reference_content.py | 6 +- .../contents/function_call_content.py | 4 +- .../contents/function_result_content.py | 7 +- .../history_reducer/chat_history_reducer.py | 38 +- .../chat_history_reducer_utils.py | 24 +- .../chat_history_summarization_reducer.py | 111 +- .../chat_history_truncation_reducer.py | 31 +- .../semantic_kernel/contents/image_content.py | 39 +- .../contents/realtime_events.py | 67 + .../contents/streaming_annotation_content.py | 4 +- .../streaming_chat_message_content.py | 30 +- .../streaming_file_reference_content.py | 6 +- .../contents/utils/data_uri.py | 137 +- .../semantic_kernel/contents/utils/hashing.py | 52 + .../core_plugins/crew_ai/__init__.py | 11 + .../crew_ai/crew_ai_enterprise.py | 261 ++ .../crew_ai/crew_ai_enterprise_client.py | 106 + .../core_plugins/crew_ai/crew_ai_models.py | 38 + .../core_plugins/crew_ai/crew_ai_settings.py | 22 + .../sessions_python_settings.py | 2 +- python/semantic_kernel/data/__init__.py | 2 + python/semantic_kernel/data/const.py | 13 + .../any_tags_equal_to_filter_clause.py | 4 +- .../filter_clauses/equal_to_filter_clause.py | 4 +- .../data/filter_clauses/filter_clause_base.py | 4 +- .../data/kernel_search_results.py | 4 +- .../vector_store_model_decorator.py | 18 +- .../vector_store_model_definition.py | 8 +- .../vector_store_model_protocols.py | 14 +- .../vector_store_record_fields.py | 10 +- .../vector_store_record_utils.py | 4 +- python/semantic_kernel/data/search_filter.py | 4 +- python/semantic_kernel/data/search_options.py | 4 +- .../data/text_search/text_search.py | 4 +- .../data/text_search/text_search_filter.py | 4 +- .../data/text_search/text_search_options.py | 4 +- .../data/text_search/text_search_result.py | 4 +- .../data/vector_search/vector_search.py | 4 +- .../vector_search/vector_search_filter.py | 4 +- .../vector_search/vector_search_options.py | 4 +- .../vector_search/vector_search_result.py | 4 +- .../data/vector_search/vector_text_search.py | 4 +- .../vector_search/vectorizable_text_search.py | 4 +- .../data/vector_search/vectorized_search.py | 4 +- .../data/vector_storage/vector_store.py | 4 +- .../vector_store_record_collection.py | 13 +- .../auto_function_invocation_context.py | 7 +- .../filters/filter_context_base.py | 1 + .../functions/function_invocation_context.py | 5 +- .../filters/kernel_filters_extension.py | 7 +- .../filters/prompts/prompt_render_context.py | 7 +- .../functions/function_result.py | 8 +- .../functions/kernel_function.py | 8 +- .../functions/kernel_function_extension.py | 2 + .../functions/kernel_function_from_prompt.py | 24 +- .../functions/kernel_function_metadata.py | 2 +- .../functions/kernel_parameter_metadata.py | 4 +- .../functions/kernel_plugin.py | 11 +- python/semantic_kernel/kernel.py | 7 +- python/semantic_kernel/kernel_pydantic.py | 4 +- .../memory/memory_query_result.py | 4 +- .../semantic_kernel/memory/memory_record.py | 4 +- .../memory/memory_store_base.py | 4 +- python/semantic_kernel/memory/null_memory.py | 4 +- .../memory/semantic_text_memory.py | 4 +- .../memory/semantic_text_memory_base.py | 4 +- .../memory/volatile_memory_store.py | 4 +- .../dapr_runtime/actors/actor_state_key.py | 4 +- .../dapr_runtime/actors/event_buffer_actor.py | 4 +- .../actors/external_event_buffer_actor.py | 4 +- .../actors/message_buffer_actor.py | 4 +- .../dapr_runtime/actors/process_actor.py | 35 +- .../dapr_runtime/actors/step_actor.py | 56 +- .../dapr_runtime/dapr_actor_registration.py | 31 +- .../dapr_runtime/dapr_kernel_process.py | 4 +- .../dapr_kernel_process_context.py | 4 +- .../dapr_runtime/dapr_process_info.py | 11 +- .../processes/dapr_runtime/dapr_step_info.py | 6 +- .../interfaces/event_buffer_interface.py | 4 +- .../external_event_buffer_interface.py | 4 +- .../interfaces/message_buffer_interface.py | 4 +- .../interfaces/process_interface.py | 13 +- .../dapr_runtime/interfaces/step_interface.py | 4 +- .../kernel_process/kernel_process.py | 20 +- .../kernel_process/kernel_process_edge.py | 4 +- .../kernel_process/kernel_process_event.py | 8 +- .../kernel_process_function_target.py | 4 +- .../kernel_process_message_channel.py | 4 +- .../kernel_process/kernel_process_state.py | 4 +- .../kernel_process/kernel_process_step.py | 4 +- .../kernel_process_step_context.py | 15 +- .../kernel_process_step_info.py | 4 +- .../kernel_process_step_state.py | 4 +- .../processes/local_runtime/local_event.py | 4 +- .../local_runtime/local_kernel_process.py | 4 +- .../local_kernel_process_context.py | 7 +- .../processes/local_runtime/local_message.py | 8 +- .../local_runtime/local_message_factory.py | 4 +- .../processes/local_runtime/local_process.py | 32 +- .../processes/local_runtime/local_step.py | 26 +- .../processes/process_builder.py | 28 +- .../processes/process_edge_builder.py | 8 +- .../processes/process_end_step.py | 4 +- .../process_function_target_builder.py | 4 +- .../processes/process_step_builder.py | 19 +- .../processes/process_step_edge_builder.py | 4 +- .../semantic_kernel/prompt_template/const.py | 17 +- .../handlebars_prompt_template.py | 4 +- .../prompt_template/jinja2_prompt_template.py | 4 +- .../prompt_template/prompt_template_base.py | 5 +- .../prompt_template/prompt_template_config.py | 26 +- .../services/ai_service_client_base.py | 4 +- .../services/ai_service_selector.py | 12 +- .../services/kernel_services_extension.py | 16 +- .../protocols/code_renderer.py | 2 + .../protocols/text_renderer.py | 2 + python/semantic_kernel/utils/async_utils.py | 11 + .../utils/experimental_decorator.py | 32 - .../utils/feature_stage_decorator.py | 153 + python/semantic_kernel/utils/list_handler.py | 10 +- .../telemetry/agent_diagnostics/decorators.py | 50 +- .../agent_diagnostics/gen_ai_attributes.py | 12 + .../telemetry/model_diagnostics/decorators.py | 19 +- .../model_diagnostics_settings.py | 4 +- python/tests/conftest.py | 28 +- .../agents/bedrock_agent/conftest.py | 25 + .../test_bedrock_agent_integration.py | 143 + .../completions/chat_completion_test_base.py | 38 +- .../completions/test_chat_completions.py | 8 +- .../completions/test_text_completion.py | 40 +- .../cross_language/test_cross_language.py | 2 +- .../embeddings/test_embedding_service_base.py | 8 +- .../test_azure_cosmos_db_no_sql.py | 25 +- .../postgres/test_postgres_int.py | 39 +- .../memory/vector_stores/test_vector_store.py | 63 +- .../vector_stores/vector_store_test_base.py | 66 +- python/tests/samples/test_concepts.py | 113 +- python/tests/samples/test_learn_resources.py | 9 - .../test_autogen_conversable_agent.py | 123 + .../unit/agents/azure_ai_agent/conftest.py | 23 + .../test_agent_content_generation.py | 279 ++ .../test_agent_thread_actions.py | 326 ++ .../azure_ai_agent/test_azure_ai_agent.py | 145 + .../test_azure_ai_agent_settings.py | 34 + .../test_azure_ai_agent_utils.py | 51 + .../azure_ai_agent/test_azure_ai_channel.py | 112 + .../unit/agents/bedrock_agent/conftest.py | 180 ++ .../bedrock_agent/test_action_group_utils.py | 93 + .../test_bedrock_action_group_model.py | 33 + .../bedrock_agent/test_bedrock_agent.py | 633 ++++ .../test_bedrock_agent_channel.py | 63 + .../test_bedrock_agent_event_type.py | 27 + .../bedrock_agent/test_bedrock_agent_model.py | 67 + .../test_bedrock_agent_settings.py | 28 + .../test_bedrock_agent_status.py | 23 + .../unit/agents/chat_completion/conftest.py | 25 + .../test_chat_completion_agent.py | 326 ++ .../test_chat_history_channel.py | 72 +- .../unit/agents/openai_assistant/conftest.py | 105 + .../test_assistant_thread_actions.py | 770 +++++ .../test_azure_assistant_agent.py | 387 +++ .../test_open_ai_assistant_agent.py | 294 ++ .../test_open_ai_assistant_channel.py | 112 +- python/tests/unit/agents/test_agent.py | 88 +- .../unit/agents/test_azure_assistant_agent.py | 570 ---- .../unit/agents/test_chat_completion_agent.py | 217 -- .../test_agent_channel.py | 0 .../{ => test_group_chat}/test_agent_chat.py | 0 .../test_agent_chat_utils.py | 0 .../test_agent_group_chat.py | 0 .../test_broadcast_queue.py | 0 .../test_aggregator_termination_strategy.py | 22 +- .../test_default_termination_strategy.py | 0 ...test_kernel_function_selection_strategy.py | 22 +- ...st_kernel_function_termination_strategy.py | 22 +- .../test_sequential_strategy_selection.py | 25 +- .../test_termination_strategy.py | 23 +- .../agents/test_open_ai_assistant_agent.py | 601 ---- .../agents/test_open_ai_assistant_base.py | 1776 ----------- ...test_azure_ai_inference_chat_completion.py | 16 + .../test_azure_ai_inference_text_embedding.py | 6 + .../test_bedrock_model_provider_utils.py | 22 + .../hugging_face/test_hf_text_completions.py | 6 +- .../services/test_onnx_chat_completion.py | 9 + .../services/test_onnx_text_completion.py | 9 + .../services/test_azure_chat_completion.py | 1 - .../test_openai_chat_completion_base.py | 2 - .../open_ai/services/test_openai_realtime.py | 656 +++++ .../open_ai/test_openai_request_settings.py | 14 + .../connectors/memory/chroma/test_chroma.py | 134 + .../memory/mongodb_atlas/conftest.py | 37 + .../test_mongodb_atlas_collection.py | 96 + .../mongodb_atlas/test_mongodb_atlas_store.py | 31 + .../memory/postgres/test_postgres_store.py | 135 +- .../unit/contents/test_binary_content.py | 5 + ...test_chat_history_summarization_reducer.py | 34 +- .../contents/test_chat_message_content.py | 35 + python/tests/unit/contents/test_data_uri.py | 96 +- .../contents/test_function_result_content.py | 93 + .../tests/unit/contents/test_hashing_utils.py | 196 ++ .../test_streaming_chat_message_content.py | 40 + .../core_plugins/test_crew_ai_enterprise.py | 95 + .../test_kernel_experimental_decorator.py | 29 - python/tests/unit/kernel/test_kernel.py | 80 +- .../dapr_runtime/test_process_actor.py | 2 +- .../processes/dapr_runtime/test_step_actor.py | 107 +- .../test_kernel_process_event.py | 2 +- .../test_local_kernel_process_context.py | 1 + .../local_runtime/test_local_process.py | 3 +- .../processes/test_process_edge_builder.py | 51 +- .../processes/test_process_step_builder.py | 26 + .../prompt_template/test_prompt_templates.py | 1 + python/tests/unit/test_serialization.py | 15 +- .../agent_diagnostics/test_agent_decorated.py | 6 +- .../test_trace_chat_completion_agent.py | 4 +- .../test_trace_open_ai_assistant_agent.py | 60 +- .../utils/test_feature_stage_decorator.py | 124 + python/uv.lock | 2624 ++++++++++------- 1091 files changed, 50849 insertions(+), 17275 deletions(-) create mode 100644 .github/workflows/label-needs-port.yml create mode 100644 .github/workflows/python-manual-release.yml create mode 100644 docs/decisions/0064-hybrid-model-orchestration.md create mode 100644 docs/decisions/0065-realtime-api-clients.md create mode 100644 docs/decisions/0066-concepts-guidelines.md create mode 100644 dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs create mode 100644 dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs rename dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/{MaxTokensChatHistoryReducer.cs => ChatHistoryMaxTokensReducer.cs} (81%) delete mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs create mode 100644 dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs delete mode 100644 dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs delete mode 100644 dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs create mode 100644 dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs delete mode 100644 dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml delete mode 100644 dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml delete mode 100644 dotnet/samples/Concepts/Resources/Agents/travelinfo.txt delete mode 100644 dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/README.md create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_AppSecret.png create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/ApplicationOverViewScreenClientIDetc.png create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png create mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/aad-portal-app-registrations.png create mode 100644 dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs create mode 100644 dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj create mode 100644 dotnet/samples/Demos/ModelContextProtocol/Program.cs create mode 100644 dotnet/samples/Demos/ModelContextProtocol/README.md create mode 100644 dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-metrics.png create mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-traces.png create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs rename dotnet/samples/GettingStartedWithAgents/{Step07_Logging.cs => AzureAIAgent/Step03_AzureAIAgent_Chat.cs} (56%) create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs rename dotnet/samples/GettingStartedWithAgents/{Step09_Assistant_Vision.cs => OpenAIAssistant/Step03_Assistant_Vision.cs} (69%) rename dotnet/samples/GettingStartedWithAgents/{Step10_AssistantTool_CodeInterpreter.cs => OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs} (61%) create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/countries.json create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/weather.json create mode 100644 dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs create mode 100644 dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj create mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgent.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIChannel.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs create mode 100644 dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs create mode 100644 dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs create mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs create mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs create mode 100644 dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs create mode 100644 dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs rename dotnet/src/Agents/{Abstractions => AzureAI}/Properties/AssemblyInfo.cs (100%) create mode 100644 dotnet/src/Agents/AzureAI/RunPollingOptions.cs create mode 100644 dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj create mode 100644 dotnet/src/Agents/Bedrock/BedrockAgent.cs create mode 100644 dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs create mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs create mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs create mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs rename dotnet/src/Agents/{Core => Bedrock}/Properties/AssemblyInfo.cs (100%) create mode 100644 dotnet/src/Agents/Bedrock/README.md delete mode 100644 dotnet/src/Agents/Core/History/IChatHistoryReducer.cs create mode 100644 dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs create mode 100644 dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs delete mode 100644 dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs create mode 100644 dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs delete mode 100644 dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs create mode 100644 dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs create mode 100644 dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs rename dotnet/src/Agents/UnitTests/{OpenAI => Test}/AssertCollection.cs (95%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs create mode 100644 dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs create mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs create mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs create mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs create mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs create mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs create mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs create mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs delete mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs create mode 100644 dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs create mode 100644 dotnet/src/IntegrationTests/Resources/gemini_cached_content.json create mode 100644 dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs create mode 100644 dotnet/src/InternalUtilities/agents/AgentUtilities.props rename dotnet/src/{Agents/OpenAI => InternalUtilities/agents}/Extensions/AgentExtensions.cs (51%) create mode 100644 dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs create mode 100644 dotnet/src/InternalUtilities/azure/AzureAIUtilities.props create mode 100644 dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs create mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs create mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs create mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs create mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj rename dotnet/src/{Agents/OpenAI/Properties => Plugins/Plugins.AI}/AssemblyInfo.cs (78%) create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs create mode 100644 dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj create mode 100644 dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs create mode 100644 dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs create mode 100644 dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs create mode 100644 dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs rename dotnet/src/{Agents/Core/History => SemanticKernel.Core/AI/ChatCompletion}/ChatHistoryReducerExtensions.cs (51%) rename dotnet/src/{Agents/Core/History => SemanticKernel.Core/AI/ChatCompletion}/ChatHistorySummarizationReducer.cs (79%) rename dotnet/src/{Agents/Core/History => SemanticKernel.Core/AI/ChatCompletion}/ChatHistoryTruncationReducer.cs (82%) create mode 100644 dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs rename dotnet/src/{Agents/UnitTests/Core/History => SemanticKernel.UnitTests/AI/ChatCompletion}/ChatHistoryReducerExtensionsTests.cs (87%) rename dotnet/src/{Agents/UnitTests/Core/History => SemanticKernel.UnitTests/AI/ChatCompletion}/ChatHistorySummarizationReducerTests.cs (85%) rename dotnet/src/{Agents/UnitTests/Core/History => SemanticKernel.UnitTests/AI/ChatCompletion}/ChatHistoryTruncationReducerTests.cs (79%) rename dotnet/src/{Agents/UnitTests/Core/History/MockHistoryGenerator.cs => SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs} (90%) delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs create mode 100644 python/samples/SAMPLE_GUIDELINES.md delete mode 100644 python/samples/concepts/agents/assistant_agent_chart_maker.py delete mode 100644 python/samples/concepts/agents/assistant_agent_file_manipulation.py delete mode 100644 python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py delete mode 100644 python/samples/concepts/agents/assistant_agent_retrieval.py delete mode 100644 python/samples/concepts/agents/assistant_agent_streaming.py create mode 100644 python/samples/concepts/agents/autogen_conversable_agent/README.md create mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py create mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py create mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py create mode 100644 python/samples/concepts/agents/azure_ai_agent/.env.example create mode 100644 python/samples/concepts/agents/azure_ai_agent/README.md create mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py create mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py create mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py create mode 100644 python/samples/concepts/agents/bedrock_agent/.env.example create mode 100644 python/samples/concepts/agents/bedrock_agent/README.md create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py create mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/README.md create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py delete mode 100644 python/samples/concepts/agents/chat_completion_function_termination.py delete mode 100644 python/samples/concepts/agents/chat_completion_history_reducer.py rename python/samples/concepts/agents/{ => mixed_chat}/mixed_chat_agents.py (66%) rename python/samples/concepts/agents/{ => mixed_chat}/mixed_chat_agents_plugins.py (69%) create mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_files.py create mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_images.py create mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py create mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py delete mode 100644 python/samples/concepts/agents/mixed_chat_files.py delete mode 100644 python/samples/concepts/agents/mixed_chat_reset.py delete mode 100644 python/samples/concepts/agents/mixed_chat_streaming.py create mode 100644 python/samples/concepts/agents/openai_assistant/README.md create mode 100644 python/samples/concepts/agents/openai_assistant/__init__.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py create mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py create mode 100644 python/samples/concepts/caching/semantic_caching.py create mode 100644 python/samples/concepts/chat_history/README.md create mode 100644 python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py create mode 100644 python/samples/concepts/filtering/retry_with_different_model.py create mode 100644 python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md delete mode 100644 python/samples/concepts/memory/azure_cognitive_search_memory.py rename python/samples/concepts/memory/{new_memory.py => complex_memory.py} (56%) delete mode 100644 python/samples/concepts/memory/memory.py rename python/samples/concepts/memory/{pandas_memory.py => memory_with_pandas.py} (57%) create mode 100644 python/samples/concepts/memory/simple_memory.py create mode 100644 python/samples/concepts/memory/utils.py delete mode 100644 python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py delete mode 100644 python/samples/concepts/planners/openai_function_calling_stepwise_planner.py delete mode 100644 python/samples/concepts/planners/sequential_planner.py create mode 100644 python/samples/concepts/plugins/crew_ai/README.md create mode 100644 python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py create mode 100644 python/samples/concepts/realtime/README.md create mode 100644 python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py create mode 100644 python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py create mode 100644 python/samples/concepts/realtime/simple_realtime_chat_webrtc.py create mode 100644 python/samples/concepts/realtime/simple_realtime_chat_websocket.py create mode 100644 python/samples/concepts/realtime/utils.py create mode 100644 python/samples/concepts/resources/cat.jpg create mode 100644 python/samples/demos/call_automation/.env.example create mode 100755 python/samples/demos/call_automation/call_automation.py create mode 100644 python/samples/demos/call_automation/readme.md create mode 100644 python/samples/demos/document_generator/GENERATED_DOCUMENT.md create mode 100644 python/samples/demos/document_generator/README.md create mode 100644 python/samples/demos/document_generator/agents/code_validation_agent.py create mode 100644 python/samples/demos/document_generator/agents/content_creation_agent.py create mode 100644 python/samples/demos/document_generator/agents/custom_agent_base.py create mode 100644 python/samples/demos/document_generator/agents/user_agent.py create mode 100644 python/samples/demos/document_generator/custom_selection_strategy.py create mode 100644 python/samples/demos/document_generator/custom_termination_strategy.py create mode 100644 python/samples/demos/document_generator/main.py create mode 100644 python/samples/demos/document_generator/plugins/code_execution_plugin.py create mode 100644 python/samples/demos/document_generator/plugins/repo_file_plugin.py create mode 100644 python/samples/demos/document_generator/plugins/user_plugin.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/.env.example create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/README.md create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py create mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/README.md create mode 100644 python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py rename python/samples/getting_started_with_agents/{step3_chat.py => chat_completion/step5_chat_completion_agent_group_chat.py} (53%) rename python/samples/getting_started_with_agents/{step4_kernel_function_strategies.py => chat_completion/step6_kernel_function_strategies.py} (66%) create mode 100644 python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py create mode 100644 python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py create mode 100644 python/samples/getting_started_with_agents/openai_assistant/README.md create mode 100644 python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py create mode 100644 python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py create mode 100644 python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py create mode 100644 python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py create mode 100644 python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py create mode 100644 python/samples/getting_started_with_agents/resources/countries.json create mode 100644 python/samples/getting_started_with_agents/resources/weather.json delete mode 100644 python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py delete mode 100644 python/samples/getting_started_with_agents/step1_agent.py delete mode 100644 python/samples/getting_started_with_agents/step2_plugins.py delete mode 100644 python/samples/getting_started_with_agents/step5_json_result.py delete mode 100644 python/samples/getting_started_with_agents/step6_logging.py delete mode 100644 python/samples/getting_started_with_agents/step7_assistant.py delete mode 100644 python/samples/getting_started_with_agents/step8_assistant_vision.py delete mode 100644 python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py create mode 100644 python/samples/learn_resources/resources/WomensSuffrage.txt create mode 100644 python/semantic_kernel/agents/autogen/README.md create mode 100644 python/semantic_kernel/agents/autogen/__init__.py create mode 100644 python/semantic_kernel/agents/autogen/autogen_conversable_agent.py create mode 100644 python/semantic_kernel/agents/azure_ai/__init__.py create mode 100644 python/semantic_kernel/agents/azure_ai/agent_content_generation.py create mode 100644 python/semantic_kernel/agents/azure_ai/agent_thread_actions.py create mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent.py create mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py create mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py create mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_channel.py create mode 100644 python/semantic_kernel/agents/bedrock/README.md create mode 100644 python/semantic_kernel/agents/bedrock/__init__.py create mode 100644 python/semantic_kernel/agents/bedrock/action_group_utils.py create mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent.py create mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent_base.py create mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py create mode 100644 python/semantic_kernel/agents/bedrock/models/__init__.py create mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py create mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py create mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py create mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py create mode 100644 python/semantic_kernel/agents/channels/bedrock_agent_channel.py create mode 100644 python/semantic_kernel/agents/open_ai/assistant_thread_actions.py delete mode 100644 python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py create mode 100644 python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py create mode 100644 python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py create mode 100644 python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py create mode 100644 python/semantic_kernel/connectors/ai/realtime_client_base.py create mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py create mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py create mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py create mode 100644 python/semantic_kernel/connectors/memory/chroma/chroma.py create mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/const.py create mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py create mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py create mode 100644 python/semantic_kernel/contents/realtime_events.py create mode 100644 python/semantic_kernel/contents/utils/hashing.py create mode 100644 python/semantic_kernel/core_plugins/crew_ai/__init__.py create mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py create mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py create mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py create mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py create mode 100644 python/semantic_kernel/utils/async_utils.py delete mode 100644 python/semantic_kernel/utils/experimental_decorator.py create mode 100644 python/semantic_kernel/utils/feature_stage_decorator.py create mode 100644 python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py create mode 100644 python/tests/integration/agents/bedrock_agent/conftest.py create mode 100644 python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py create mode 100644 python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py create mode 100644 python/tests/unit/agents/azure_ai_agent/conftest.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py create mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py create mode 100644 python/tests/unit/agents/bedrock_agent/conftest.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_action_group_utils.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py create mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py create mode 100644 python/tests/unit/agents/chat_completion/conftest.py create mode 100644 python/tests/unit/agents/chat_completion/test_chat_completion_agent.py rename python/tests/unit/agents/{ => chat_completion}/test_chat_history_channel.py (79%) create mode 100644 python/tests/unit/agents/openai_assistant/conftest.py create mode 100644 python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py create mode 100644 python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py create mode 100644 python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py rename python/tests/unit/agents/{ => openai_assistant}/test_open_ai_assistant_channel.py (82%) delete mode 100644 python/tests/unit/agents/test_azure_assistant_agent.py delete mode 100644 python/tests/unit/agents/test_chat_completion_agent.py rename python/tests/unit/agents/{ => test_group_chat}/test_agent_channel.py (100%) rename python/tests/unit/agents/{ => test_group_chat}/test_agent_chat.py (100%) rename python/tests/unit/agents/{ => test_group_chat}/test_agent_chat_utils.py (100%) rename python/tests/unit/agents/{ => test_group_chat}/test_agent_group_chat.py (100%) rename python/tests/unit/agents/{ => test_group_chat}/test_broadcast_queue.py (100%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_aggregator_termination_strategy.py (84%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_default_termination_strategy.py (100%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_kernel_function_selection_strategy.py (85%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_kernel_function_termination_strategy.py (85%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_sequential_strategy_selection.py (82%) rename python/tests/unit/agents/{ => test_group_chat_strategies}/test_termination_strategy.py (77%) delete mode 100644 python/tests/unit/agents/test_open_ai_assistant_agent.py delete mode 100644 python/tests/unit/agents/test_open_ai_assistant_base.py create mode 100644 python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py create mode 100644 python/tests/unit/connectors/memory/chroma/test_chroma.py create mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/conftest.py create mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py create mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py create mode 100644 python/tests/unit/contents/test_hashing_utils.py create mode 100644 python/tests/unit/core_plugins/test_crew_ai_enterprise.py delete mode 100644 python/tests/unit/functions/test_kernel_experimental_decorator.py create mode 100644 python/tests/unit/utils/test_feature_stage_decorator.py diff --git a/.github/_typos.toml b/.github/_typos.toml index d9a2dcb7a2e4..74279ff55fe6 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -23,7 +23,10 @@ extend-exclude = [ "PopulationByCountry.csv", "PopulationByAdmin1.csv", "WomensSuffrage.txt", - "SK-dotnet.sln.DotSettings" + "SK-dotnet.sln.DotSettings", + "**/azure_ai_search_hotel_samples/README.md", + "**/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs", + "**/Demos/ProcessFrameworkWithAspire/**/*.http" ] [default.extend-words] diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index f291c5a4d888..dde2fad80b39 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -148,11 +148,17 @@ jobs: # Generate test reports and check coverage - name: Generate test reports - uses: danielpalme/ReportGenerator-GitHub-Action@5.4.3 + uses: danielpalme/ReportGenerator-GitHub-Action@5.4.4 with: reports: "./TestResults/Coverage/**/coverage.cobertura.xml" targetdir: "./TestResults/Reports" - reporttypes: "JsonSummary" + reporttypes: "HtmlInline;JsonSummary" + + - name: Upload coverage report artifact + uses: actions/upload-artifact@v4 + with: + name: CoverageReport-${{ matrix.os }}-${{ matrix.dotnet }}-${{ matrix.configuration }} # Artifact name + path: ./TestResults/Reports # Directory containing files to upload - name: Check coverage shell: pwsh diff --git a/.github/workflows/label-needs-port.yml b/.github/workflows/label-needs-port.yml new file mode 100644 index 000000000000..baec103be3d7 --- /dev/null +++ b/.github/workflows/label-needs-port.yml @@ -0,0 +1,41 @@ +name: Create Issue when Needs Port label is added +on: + issues: + types: [labeled] + pull_request_target: + types: [labeled] + +jobs: + create_issue: + if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.pull_request.labels.*.name, 'needs_port_to_python') || contains(github.event.issue.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.issue.labels.*.name, 'needs_port_to_python') + name: "Create Issue" + continue-on-error: true + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: read + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} + + steps: + - name: Create dotnet issue + if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.issue.labels.*.name, 'needs_port_to_dotnet') + run: | + new_issue_url=$(gh issue create \ + --title "Port python feature: ${{ github.event.issue.title || github.event.pull_request.title }}" \ + --label ".NET" \ + --body "# Original issue + ${{ github.event.issue.html_url || github.event.pull_request.html_url }} + ## Description + ${{ github.event.issue.body || github.event.pull_request.body }}") + - name: Create python issue + if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_python') || contains(github.event.issue.labels.*.name, 'needs_port_to_python') + run: | + new_issue_url=$(gh issue create \ + --title "Port dotnet feature: ${{ github.event.issue.title || github.event.pull_request.title }}" \ + --label "python" \ + --body "# Original issue + ${{ github.event.issue.html_url || github.event.pull_request.html_url }} + ## Description + ${{ github.event.issue.body || github.event.pull_request.body }}") diff --git a/.github/workflows/python-build.yml b/.github/workflows/python-build.yml index 2f7f5de183b1..19029d60b6bc 100644 --- a/.github/workflows/python-build.yml +++ b/.github/workflows/python-build.yml @@ -4,11 +4,16 @@ on: release: types: [published] +permissions: + contents: read + id-token: "write" + jobs: python-build-assets: if: github.event_name == 'release' && startsWith(github.event.release.tag_name, 'python-') name: Python Build Assets and add to Release runs-on: ubuntu-latest + environment: "integration" permissions: contents: write env: @@ -21,6 +26,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Check version run: | echo "Building and uploading Python package version: ${{ github.event.release.tag_name }}" @@ -31,3 +37,14 @@ jobs: with: files: | python/dist/* + - name: Azure Login + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Start DevOps pipeline + uses: azure/cli@v2 + with: + inlineScript: | + az pipelines run --id ${{ vars.ADO_PYTHON_RELEASE_ID }} --org ${{ vars.ADO_ORG }} --project ${{ vars.ADO_PROJECT_NAME }} --parameters tag=${{ github.event.release.tag_name }} delay=0 diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml index b4d854df72a6..39d7a3f38452 100644 --- a/.github/workflows/python-integration-tests.yml +++ b/.github/workflows/python-integration-tests.yml @@ -55,10 +55,10 @@ env: MISTRALAI_EMBEDDING_MODEL_ID: ${{ vars.MISTRALAI_EMBEDDING_MODEL_ID }} ANTHROPIC_API_KEY: ${{secrets.ANTHROPIC_API_KEY}} ANTHROPIC_CHAT_MODEL_ID: ${{ vars.ANTHROPIC_CHAT_MODEL_ID }} - OLLAMA_CHAT_MODEL_ID: "${{ vars.OLLAMA_CHAT_MODEL_ID || '' }}" # llava-phi3 - OLLAMA_CHAT_MODEL_ID_IMAGE: "${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE || '' }}" # llava-phi3 - OLLAMA_CHAT_MODEL_ID_TOOL_CALL: "${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL || '' }}" # llama3.2 - OLLAMA_TEXT_MODEL_ID: "${{ vars.OLLAMA_TEXT_MODEL_ID || '' }}" # llava-phi3 + OLLAMA_CHAT_MODEL_ID: "${{ vars.OLLAMA_CHAT_MODEL_ID || '' }}" # llama3.2:1b + OLLAMA_CHAT_MODEL_ID_IMAGE: "${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE || '' }}" # moondream + OLLAMA_CHAT_MODEL_ID_TOOL_CALL: "${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL || '' }}" # llama3.2:1b + OLLAMA_TEXT_MODEL_ID: "${{ vars.OLLAMA_TEXT_MODEL_ID || '' }}" # llama3.2:1b OLLAMA_EMBEDDING_MODEL_ID: "${{ vars.OLLAMA_EMBEDDING_MODEL_ID || '' }}" # nomic-embed-text GOOGLE_AI_GEMINI_MODEL_ID: ${{ vars.GOOGLE_AI_GEMINI_MODEL_ID }} GOOGLE_AI_EMBEDDING_MODEL_ID: ${{ vars.GOOGLE_AI_EMBEDDING_MODEL_ID }} @@ -69,6 +69,8 @@ env: REDIS_CONNECTION_STRING: ${{ vars.REDIS_CONNECTION_STRING }} AZURE_COSMOS_DB_NO_SQL_URL: ${{ vars.AZURE_COSMOS_DB_NO_SQL_URL }} AZURE_COSMOS_DB_NO_SQL_KEY: ${{ secrets.AZURE_COSMOS_DB_NO_SQL_KEY }} + BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN: ${{ secrets.BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN }} + BEDROCK_AGENT_FOUNDATION_MODEL: ${{ vars.BEDROCK_AGENT_FOUNDATION_MODEL }} jobs: paths-filter: @@ -118,6 +120,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -147,10 +150,92 @@ jobs: run: | uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/completions ./tests/integration/embeddings ./tests/samples ./tests/integration/cross_language + python-merge-gate-multi-modality: + name: Python Pre-Merge Integration Tests - Multi-Modality + needs: paths-filter + if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' + strategy: + max-parallel: 1 + fail-fast: false + matrix: + python-version: ["3.11"] + os: [ubuntu-latest] + defaults: + run: + working-directory: python + runs-on: ${{ matrix.os }} + environment: "integration" + env: + UV_PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.5.x" + enable-cache: true + cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + - name: Install dependencies + run: | + uv sync --all-extras --dev + - name: Azure CLI Login + if: github.event_name != 'pull_request' + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Run Integration Tests + id: run_tests_multi_modality + shell: bash + run: | + uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal ./tests/integration/audio_to_text ./tests/integration/text_to_audio ./tests/integration/text_to_image + + python-merge-gate-agents: + name: Python Pre-Merge Integration Tests - Agents + needs: paths-filter + if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' + strategy: + max-parallel: 1 + fail-fast: false + matrix: + python-version: ["3.11"] + os: [ubuntu-latest] + defaults: + run: + working-directory: python + runs-on: ${{ matrix.os }} + environment: "integration" + env: + UV_PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + version: "0.5.x" + enable-cache: true + cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + - name: Install dependencies + run: | + uv sync --all-extras --dev + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ vars.AWS_REGION }} + - name: Run Integration Tests + id: run_tests_agents + shell: bash + run: | + uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal ./tests/integration/agents + python-merge-gate-ollama: name: Python Pre-Merge Integration Tests - Ollama needs: paths-filter - if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' + # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, this job is disabled for now. + if: false && github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' strategy: max-parallel: 1 fail-fast: false @@ -173,6 +258,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -237,6 +323,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -268,7 +355,6 @@ jobs: run: working-directory: python runs-on: ${{ matrix.os }} - environment: "integration" env: UV_PYTHON: ${{ matrix.python-version }} MEMORY_CONCEPT_SAMPLE: "true" @@ -295,6 +381,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -305,7 +392,8 @@ jobs: ollama serve & sleep 5 - name: Pull model in Ollama - if: matrix.os == 'ubuntu-latest' + # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. + if: false && matrix.os == 'ubuntu-latest' run: | ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID }} ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE }} @@ -343,14 +431,16 @@ jobs: id: run_tests_completions timeout-minutes: 10 shell: bash + # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/completions + uv run pytest -v -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/completions - name: Run Integration Tests - Embeddings id: run_tests_embeddings timeout-minutes: 5 shell: bash + # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/embeddings + uv run pytest -v -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/embeddings - name: Run Integration Tests - Memory id: run_tests_memory timeout-minutes: 5 @@ -375,6 +465,18 @@ jobs: shell: bash run: | uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/samples + - name: Run Integration Tests - Agents + id: run_tests_agents + timeout-minutes: 5 + shell: bash + run: | + uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/agents + - name: Run Integration Tests - Multi-Modality + id: run_tests_multi_modality + timeout-minutes: 5 + shell: bash + run: | + uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/audio_to_text ./tests/integration/text_to_audio ./tests/integration/text_to_image # This final job is required to satisfy the merge queue. It must only run (or succeed) if no tests failed python-integration-tests-check: @@ -388,6 +490,8 @@ jobs: python-merge-gate-ai-services, python-merge-gate-ollama, python-merge-gate-memory, + python-merge-gate-agents, + python-merge-gate-multi-modality, python-integration-tests, ] steps: diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index 65e04c0aba3c..fe2c6882d8ac 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -30,6 +30,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/python-manual-release.yml b/.github/workflows/python-manual-release.yml new file mode 100644 index 000000000000..5742cf747311 --- /dev/null +++ b/.github/workflows/python-manual-release.yml @@ -0,0 +1,30 @@ +name: Python Start Release on ADO + +on: + workflow_dispatch: + inputs: + tag: + description: "Tag to release" + required: true + +permissions: + contents: read + id-token: "write" + +jobs: + python-build-assets: + name: Trigger ADO Pipeline for Python Release + runs-on: ubuntu-latest + environment: "integration" + steps: + - name: Azure Login + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Start DevOps pipeline + uses: azure/cli@v2 + with: + inlineScript: | + az pipelines run --id ${{ vars.ADO_PYTHON_RELEASE_ID }} --org ${{ vars.ADO_ORG }} --project ${{ vars.ADO_PROJECT_NAME }} --parameters tag=${{ inputs.tag }} delay=0 diff --git a/.github/workflows/python-test-coverage.yml b/.github/workflows/python-test-coverage.yml index fc0ae4087c7b..44e567580968 100644 --- a/.github/workflows/python-test-coverage.yml +++ b/.github/workflows/python-test-coverage.yml @@ -32,6 +32,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ env.UV_PYTHON }} + cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev - name: Test with pytest diff --git a/.github/workflows/python-unit-tests.yml b/.github/workflows/python-unit-tests.yml index 65a61710ceff..e7a749ab589c 100644 --- a/.github/workflows/python-unit-tests.yml +++ b/.github/workflows/python-unit-tests.yml @@ -20,18 +20,10 @@ jobs: python-version: ["3.10", "3.11", "3.12"] os: [ubuntu-latest, windows-latest, macos-latest] experimental: [false] - # include: - # - python-version: "3.13" - # os: "ubuntu-latest" - # experimental: true - # - python-version: "3.13t" - # os: "ubuntu-latest" - # experimental: true - # gil: 0 - # - python-version: "3.13t" - # os: "ubuntu-latest" - # experimental: true - # gil: 1 + include: + - python-version: "3.13" + os: "ubuntu-latest" + experimental: true env: UV_PYTHON: ${{ matrix.python-version }} permissions: @@ -47,6 +39,7 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} + cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev -U --prerelease=if-necessary-or-explicit - name: Test with pytest diff --git a/.vscode/launch.json b/.vscode/launch.json index ae7d191a00a1..d643e4be4b96 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,6 +1,19 @@ { "version": "0.2.0", "configurations": [ + { + "name": "CAPs - Demo Sample", + "type": "coreclr", + "request": "launch", + "preLaunchTask": "build (CopilotAgentPluginsDemoSample)", + "program": "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/bin/Debug/net8.0/CopilotAgentPluginsDemoSample.exe", + "args": [ + "demo" + ], + "cwd": "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample", + "stopAtEntry": false, + "console": "integratedTerminal" + }, { // Use IntelliSense to find out which attributes exist for C# debugging // Use hover for the description of the existing attributes diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 1cd9319c318b..afe3d20b3390 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -222,6 +222,15 @@ "cwd": "${workspaceFolder}/dotnet/src/IntegrationTests/" } }, + { + "label": "build (CopilotAgentPluginsDemoSample)", + "command": "dotnet", + "type": "process", + "args": [ + "build", + "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj" + ] + }, // **************** // Samples (dotnet) // **************** diff --git a/docs/decisions/0054-processes.md b/docs/decisions/0054-processes.md index d9ae06be4028..ed2f0c7c17ef 100644 --- a/docs/decisions/0054-processes.md +++ b/docs/decisions/0054-processes.md @@ -26,6 +26,9 @@ In technical terms, a process is something that can be represented as a graph wh - Customers should be able to leverage their existing investments in all supported languages of Semantic Kernel. - ``` + + ``` + - Customers should be able to leverage their existing investments in infrastructure. - Customers should be able to collaborate with their business process peers to build up composable processes. - Customers should be able to use AI to enhance and streamline the steps within their business processes. @@ -314,5 +317,5 @@ The following packages will be created for Processes: In validation of the proposed solution, two runtimes were created, one for the local/server scenario and one for the distributed actor scenario using Orleans. Both of these implementation were based on the [Pregel Algorithm](https://kowshik.github.io/JPregel/pregel_paper.pdf) for large-scale graph processing. This algorithm is well tested and well suited for single machine scenarios as well as distributed systems. More information on how the Pregel algorithm works can be found in the following links. -- [Pregel - The Morning Paper](https://blog.acolyer.org/2015/05/26/pregel-a-system-for-large-scale-graph-processing/) + diff --git a/docs/decisions/0064-hybrid-model-orchestration.md b/docs/decisions/0064-hybrid-model-orchestration.md new file mode 100644 index 000000000000..4038d5ff00a5 --- /dev/null +++ b/docs/decisions/0064-hybrid-model-orchestration.md @@ -0,0 +1,276 @@ +--- +status: accepted +contact: sergeymenshykh +date: 2025-02-05 +deciders: dmytrostruk, markwallace, rbarreto, sergeymenshykh, westey-m, +--- + +# Hybrid Model Orchestration + +## Context and Problem Statement +Taking into account the constantly emerging and improving local and cloud-based models, in addition to the growing demand for utilizing local AI models running on local devices' NPUs, +AI powered applications need to be able to effectively and seamlessly leverage both local and cloud models for inference to achieve the best AI user experience. + +## Decision Drivers + +1. The model orchestration layer should be simple and extensible. +2. The model orchestration layer client code should not be aware of or deal with the underlying complexities. +3. The model orchestration layer should allow for different strategies for selecting the best model(s) for the task at hand. + +## Considered Implementation Options + +The following options consider a few ways to implement the model orchestration layer. + +### Option 1: IChatClient implementation per orchestration strategy + +This option presents a simple and straightforward approach to implementing the model orchestration layer. Each strategy is implemented as a separate implementation of the IChatClient interface. + +For example, a fallback strategy that uses the first configured chat client for inference and falls back to the next one if the AI model is not available may be implemented as follows: +```csharp +public sealed class FallbackChatClient : IChatClient +{ + private readonly IChatClient[] _clients; + + public FallbackChatClient(params IChatClient[] clients) + { + this._clients = clients; + } + + public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + foreach (var client in this._clients) + { + try + { + return client.CompleteAsync(chatMessages, options, cancellationToken); + } + catch (HttpRequestException ex) + { + if (ex.StatusCode >= 500) + { + // Try the next client + continue; + } + + throw; + } + } + } + + public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + ... + } + + public void Dispose() { /*We can't dispose clients here because they can be used up the stack*/ } + + public ChatClientMetadata Metadata => new ChatClientMetadata(); + + public object? GetService(Type serviceType, object? serviceKey = null) => null; +} +``` + +Other orchestration strategies, such as latency-based or token-based strategies, can be implemented in a similar way: a class that implements the IChatClient interface and the corresponding chat client selection strategy. + +Pros: +- Does not require any new abstraction. +- Simple and straightforward implementation. +- Can be sufficient for most use cases. + +### Option 2: HybridChatClient class with chat completion handler(s) per orchestration strategy + +This option introduces a HybridChatClient class that implements the IChatClient interface and delegates the selection routine to a provided handler represented by the abstract ChatCompletionHandler class: +```csharp +public sealed class HybridChatClient : IChatClient +{ + private readonly IChatClient[] _chatClients; + private readonly ChatCompletionHandler _handler; + private readonly Kernel? _kernel; + + public HybridChatClient(IChatClient[] chatClients, ChatCompletionHandler handler, Kernel? kernel = null) + { + this._chatClients = chatClients; + this._handler = handler; + this._kernel = kernel; + } + + public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + return this._handler.CompleteAsync( + new ChatCompletionHandlerContext + { + ChatMessages = chatMessages, + Options = options, + ChatClients = this._chatClients.ToDictionary(c => c, c => (CompletionContext?)null), + Kernel = this._kernel, + }, cancellationToken); + } + + public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + ... + } + + ... +} + +public abstract class ChatCompletionHandler +{ + public abstract Task CompleteAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default); + + public abstract IAsyncEnumerable CompleteStreamingAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default); +} +``` + +The HybridChatClient class passes all the necessary information to the handler via the ChatCompletionHandlerContext class, which contains the list of chat clients, chat messages, options, and Kernel instance. +```csharp +public class ChatCompletionHandlerContext +{ + public IDictionary ChatClients { get; init; } + + public IList ChatMessages { get; init; } + + public ChatOptions? Options { get; init; } + + public Kernel? Kernel { get; init; } +} +``` + +The fallback strategy shown in the previous option can be implemented as the following handler: +```csharp +public class FallbackChatCompletionHandler : ChatCompletionHandler +{ + public override async Task CompleteAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default) + { + for (int i = 0; i < context.ChatClients.Count; i++) + { + var chatClient = context.ChatClients.ElementAt(i).Key; + + try + { + return client.CompleteAsync(chatMessages, options, cancellationToken); + } + catch (HttpRequestException ex) + { + if (ex.StatusCode >= 500) + { + // Try the next client + continue; + } + + throw; + } + } + + throw new InvalidOperationException("No client provided for chat completion."); + } + + public override async IAsyncEnumerable CompleteStreamingAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default) + { + ... + } +} +``` + +and the caller code would look like this: +```csharp +IChatClient onnxChatClient = new OnnxChatClient(...); + +IChatClient openAIChatClient = new OpenAIChatClient(...); + +// Tries the first client and falls back to the next one if the first one fails +FallbackChatCompletionHandler handler = new FallbackChatCompletionHandler(...); + +IChatClient hybridChatClient = new HybridChatClient([onnxChatClient, openAIChatClient], handler); + +... + +var result = await hybridChatClient.CompleteAsync("Do I need an umbrella?", ...); +``` + +The handlers can be chained to create more complex scenarios, where a handler performs some preprocessing and then delegates the call to another handler with an augmented chat clients list. + +For example, the first handler identifies that a cloud model has requested access to sensitive data and delegates the call handling to local models to process it. + +```csharp +IChatClient onnxChatClient = new OnnxChatClient(...); + +IChatClient llamaChatClient = new LlamaChatClient(...); + +IChatClient openAIChatClient = new OpenAIChatClient(...); + +// Tries the first client and falls back to the next one if the first one fails +FallbackChatCompletionHandler fallbackHandler = new FallbackChatCompletionHandler(...); + +// Check if the request contains sensitive data, identifies the client(s) allowed to work with the sensitive data, and delegates the call handling to the next handler. +SensitiveDataHandler sensitiveDataHandler = new SensitiveDataHandler(fallbackHandler); + +IChatClient hybridChatClient = new HybridChatClient(new[] { onnxChatClient, llamaChatClient, openAIChatClient }, sensitiveDataHandler); + +var result = await hybridChatClient.CompleteAsync("Do I need an umbrella?", ...); +``` + +Examples of complex orchestration scenarios: + +| First Handler | Second Handler | Scenario Description | +|---------------------------------------|--------------------------------|---------------------------------------------------------------------------| +| InputTokenThresholdEvaluationHandler | FastestChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the fastest model's response. | +| InputTokenThresholdEvaluationHandler | RelevancyChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the most relevant response. | +| InputTokenThresholdEvaluationHandler | FallbackChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the first available model's response. | +| SensitiveDataRoutingHandler | FastestChatCompletionHandler | Identifies models based on data sensitivity, then returns the fastest model's response. | +| SensitiveDataRoutingHandler | RelevancyChatCompletionHandler | Identifies models based on data sensitivity, then returns the most relevant response. | +| SensitiveDataRoutingHandler | FallbackChatCompletionHandler | Identifies models based on data sensitivity, then returns the first available model's response. | + +Pros: +- Allows reusing same handlers to create various composite orchestration strategies. + +Cons: +- Requires new abstractions and components than the previous option: context classes and code for handling the next handler. + +
+ +POC demonstrating this option can be found [here](https://github.com/microsoft/semantic-kernel/pull/10412). + +### Option 3: Implementing existing IAIServiceSelector interface. + +The Semantic Kernel has a mechanism that allows for the dynamic selection of AI services: + +```csharp +public interface IAIServiceSelector +{ + bool TrySelectAIService( + Kernel kernel, + KernelFunction function, + KernelArguments arguments, + [NotNullWhen(true)] out T? service, + out PromptExecutionSettings? serviceSettings) where T : class, IAIService; +} +``` + +However, this mechanism requires specific context - the kernel, function, and arguments which may not always be available. +Additionally, it only works with implementations of the IAIService interface, which may not be compatible with all AI services, +such as those in Microsoft.Extensions.AI that implement the IChatClient interface. + +Furthermore, this mechanism cannot be used in orchestration scenarios where an AI service needs to be prompted first to determine its availability, latency, etc. +For example, to check if an AI service is available, the selector would need to send chat messages with options to the service. It should then return +the completion if the service is available, or fallback to another service if it is not. Given that the TrySelectAIService method does not accept a list of +chat messages or options, it is impossible to send chat messages using this method. Even if it were possible, the consumer code would have to resend the same +chat messages to the selected service to obtain a completion, as the selector does not return the completion itself. Additionally, the TrySelectAIService method +is synchronous, making it difficult to send chat messages without using synchronous code, which is generally discouraged. + +Looking at the above, it is clear that the IAIServiceSelector interface is not suitable for the hybrid orchestration of AI services since it was designed for a different purpose: +to synchronously select an instance of an AI service based on SK context and service metadata without taking the results of completion and streamed completion methods into account. + +Pros: +- Reuses the existing mechanism for AI service selection. + +Cons: +- Not suitable for all AI services. +- Requires context that may not be available in all scenarios. +- Consumer code must be aware of the IAIServiceSelector interface instead of simply using the IChatClient interface. +- Synchronous method. + +## Decision Outcome + +Chosen option: Option 1 because it does not require any new abstraction; its simplicity and straightforwardness are sufficient for most use cases. +Option 2 can be considered in the future if more complex orchestration scenarios are required. \ No newline at end of file diff --git a/docs/decisions/0065-realtime-api-clients.md b/docs/decisions/0065-realtime-api-clients.md new file mode 100644 index 000000000000..a27987aeaf00 --- /dev/null +++ b/docs/decisions/0065-realtime-api-clients.md @@ -0,0 +1,1770 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: eavanvalkenburg +date: 2025-01-31 +deciders: eavanvalkenburg, markwallace, alliscode, sphenry +consulted: westey-m, rbarreto, alliscode, markwallace, sergeymenshykh, moonbox3 +informed: taochenosu, dmytrostruk +--- + +# Multi-modal Realtime API Clients + +## Context and Problem Statement + +Multiple model providers are starting to enable realtime voice-to-voice or even multi-modal, realtime, two-way communication with their models, this includes OpenAI with their [Realtime API][openai-realtime-api] and [Google Gemini][google-gemini]. These API's promise some very interesting new ways of using LLM's for different scenario's, which we want to enable with Semantic Kernel. + +The key feature that Semantic Kernel brings into this system is the ability to (re)use Semantic Kernel function as tools with these API's. There are also options for Google to use video and images as input, this will likely not be implemented first, but the abstraction should be able to deal with it. + +> [!IMPORTANT] +> Both the OpenAI and Google realtime api's are in preview/beta, this means there might be breaking changes in the way they work coming in the future, therefore the clients built to support these API's are going to be experimental until the API's stabilize. + +At this time, the protocols that these API's use are Websockets and WebRTC. + +In both cases there are events being sent to and from the service, some events contain content, text, audio, or video (so far only sending, not receiving), while some events are "control" events, like content created, function call requested, etc. Sending events include, sending content, either voice, text or function call output, or events, like committing the input audio and requesting a response. + +### Websocket +Websocket has been around for a while and is a well known technology, it is a full-duplex communication protocol over a single, long-lived connection. It is used for sending and receiving messages between client and server in real-time. Each event can contain a message, which might contain a content item, or a control event. Audio is sent as a base64 encoded string in a event. + +### WebRTC +WebRTC is a Mozilla project that provides web browsers and mobile applications with real-time communication via simple APIs. It allows audio and video communication to work inside web pages and other applications by allowing direct peer-to-peer communication, eliminating the need to install plugins or download native apps. It is used for sending and receiving audio and video streams, and can be used for sending (data-)messages as well. The big difference compared to websockets is that it explicitly create a channel for audio and video, and a separate channel for "data", which are events and in this space that contains all non-AV content, text, function calls, function results and control events, like errors or acknowledgements. + + +### Event types (Websocket and partially WebRTC) + +#### Client side events: +| **Content/Control event** | **Event Description** | **OpenAI Event** | **Google Event** | +| ------------------------- | --------------------------------- | ---------------------------- | ---------------------------------- | +| Control | Configure session | `session.update` | `BidiGenerateContentSetup` | +| Content | Send voice input | `input_audio_buffer.append` | `BidiGenerateContentRealtimeInput` | +| Control | Commit input and request response | `input_audio_buffer.commit` | `-` | +| Control | Clean audio input buffer | `input_audio_buffer.clear` | `-` | +| Content | Send text input | `conversation.item.create` | `BidiGenerateContentClientContent` | +| Control | Interrupt audio | `conversation.item.truncate` | `-` | +| Control | Delete content | `conversation.item.delete` | `-` | +| Control | Respond to function call request | `conversation.item.create` | `BidiGenerateContentToolResponse` | +| Control | Ask for response | `response.create` | `-` | +| Control | Cancel response | `response.cancel` | `-` | + +#### Server side events: +| **Content/Control event** | **Event Description** | **OpenAI Event** | **Google Event** | +| ------------------------- | -------------------------------------- | ------------------------------------------------------- | ----------------------------------------- | +| Control | Error | `error` | `-` | +| Control | Session created | `session.created` | `BidiGenerateContentSetupComplete` | +| Control | Session updated | `session.updated` | `BidiGenerateContentSetupComplete` | +| Control | Conversation created | `conversation.created` | `-` | +| Control | Input audio buffer committed | `input_audio_buffer.committed` | `-` | +| Control | Input audio buffer cleared | `input_audio_buffer.cleared` | `-` | +| Control | Input audio buffer speech started | `input_audio_buffer.speech_started` | `-` | +| Control | Input audio buffer speech stopped | `input_audio_buffer.speech_stopped` | `-` | +| Content | Conversation item created | `conversation.item.created` | `-` | +| Content | Input audio transcription completed | `conversation.item.input_audio_transcription.completed` | | +| Content | Input audio transcription failed | `conversation.item.input_audio_transcription.failed` | | +| Control | Conversation item truncated | `conversation.item.truncated` | `-` | +| Control | Conversation item deleted | `conversation.item.deleted` | `-` | +| Control | Response created | `response.created` | `-` | +| Control | Response done | `response.done` | `-` | +| Content | Response output item added | `response.output_item.added` | `-` | +| Content | Response output item done | `response.output_item.done` | `-` | +| Content | Response content part added | `response.content_part.added` | `-` | +| Content | Response content part done | `response.content_part.done` | `-` | +| Content | Response text delta | `response.text.delta` | `BidiGenerateContentServerContent` | +| Content | Response text done | `response.text.done` | `-` | +| Content | Response audio transcript delta | `response.audio_transcript.delta` | `BidiGenerateContentServerContent` | +| Content | Response audio transcript done | `response.audio_transcript.done` | `-` | +| Content | Response audio delta | `response.audio.delta` | `BidiGenerateContentServerContent` | +| Content | Response audio done | `response.audio.done` | `-` | +| Content | Response function call arguments delta | `response.function_call_arguments.delta` | `BidiGenerateContentToolCall` | +| Content | Response function call arguments done | `response.function_call_arguments.done` | `-` | +| Control | Function call cancelled | `-` | `BidiGenerateContentToolCallCancellation` | +| Control | Rate limits updated | `rate_limits.updated` | `-` | + + +## Overall Decision Drivers +- Abstract away the underlying protocols, so that developers can build applications that implement whatever protocol they want to support, without having to change the client code when changing models or protocols. + - There are some limitations expected here as i.e. WebRTC requires different information at session create time than websockets. +- Simple programming model that is likely able to handle future realtime api's and the evolution of the existing ones. +- Whenever possible we transform incoming content into Semantic Kernel content, but surface everything, so it's extensible for developers and in the future. + +There are multiple areas where we need to make decisions, these are: +- Content and Events +- Programming model +- Audio speaker/microphone handling +- Interface design and naming + +# Content and Events + +## Considered Options - Content and Events +Both the sending and receiving side of these integrations need to decide how to deal with the events. + +1. Treat content separate from control +1. Treat everything as content items +1. Treat everything as events + +### 1. Treat content separate from control +This would mean there are two mechanisms in the clients, one deals with content, and one with control events. + +- Pro: + - strongly typed responses for known content + - easy to use as the main interactions are clear with familiar SK content types, the rest goes through a separate mechanism +- Con: + - new content support requires updates in the codebase and can be considered breaking (potentially sending additional types back) + - additional complexity in dealing with two streams of data + - some items, such as Function calls can be considered both content and control, control when doing auto-function calling, but content when the developer wants to deal with it themselves + +### 2. Treat everything as content items +This would mean that all events are turned into Semantic Kernel content items, and would also mean that we need to define additional content types for the control events. + +- Pro: + - everything is a content item, so it's easy to deal with +- Con: + - new content type needed for control events + +### 3. Treat everything as events +This would introduce events, each event has a type, those can be core content types, like audio, video, image, text, function call or function response, as well as a generic event for control events without content. Each event has a SK type, from above as well as a service_event_type field that contains the event type from the service. Finally the event has a content field, which corresponds to the type, and for the generic event contains the raw event from the service. + +- Pro: + - no transformation needed for service events + - easy to maintain and extend +- Con: + - new concept introduced + - might be confusing to have contents with and without SK types + +## Decision Outcome - Content and Events + +Chosen option: 3 Treat Everything as Events + +This option was chosen to allow abstraction away from the raw events, while still allowing the developer to access the raw events if needed. +A base event type is added called `RealtimeEvent`, this has three fields, a `event_type`, `service_event_type` and `service_event`. It then has four subclasses, one each for audio, text, function call and function result. + +When a known piece of content has come in, it will be parsed into a SK content type and added, this content should also have the raw event in the inner_content, so events are then stored twice, once in the event, once in the content, this is by design so that if the developer needs to access the raw event, they can do so easily even when they remove the event layer. + +It might also be possible that a single event from the service contains multiple content items, for instance a response might contain both text and audio, in that case multiple events will be emitted. In principle a event has to be handled once, so if there is event that is parsable only the subtype is returned, since it has all the same information as the `RealtimeEvent` this will allow developers to trigger directly off the service_event_type and service_event if they don't want to use the abstracted types. + +```python +RealtimeEvent( + event_type="service", # single default value in order to discriminate easily + service_event_type="conversation.item.create", # optional + service_event: { ... } # optional, because some events do not have content. +) +``` + +```python +RealtimeAudioEvent(RealtimeEvent)( + event_type="audio", # single default value in order to discriminate easily + service_event_type="response.audio.delta", # optional + service_event: { ... } + audio: AudioContent(...) +) +``` + +```python +RealtimeTextEvent(RealtimeEvent)( + event_type="text", # single default value in order to discriminate easily + service_event_type="response.text.delta", # optional + service_event: { ... } + text: TextContent(...) +) +``` + +```python +RealtimeFunctionCallEvent(RealtimeEvent)( + event_type="function_call", # single default value in order to discriminate easily + service_event_type="response.function_call_arguments.delta", # optional + service_event: { ... } + function_call: FunctionCallContent(...) +) +``` + +```python +RealtimeFunctionResultEvent(RealtimeEvent)( + event_type="function_result", # single default value in order to discriminate easily + service_event_type="response.output_item.added", # optional + service_event: { ... } + function_result: FunctionResultContent(...) +) +``` + +```python +RealtimeImageEvent(RealtimeEvent)( + event_type="image", # single default value in order to discriminate easily + service_event_type="response.image.delta", # optional + service_event: { ... } + image: ImageContent(...) +) +``` + +This allows you to easily do pattern matching on the event_type, or use the service_event_type to filter on the specific event type for service events, or match on the type of the event and get the SK contents from it. + +There might be other abstracted types needed at some point, for instance errors, or session updates, but since the current two services have no agreement on the existence of these events and their structure, it is better to wait until there is a need for them. + +### Rejected ideas + +#### ID Handling +One open item is whether to include a extra field in these types for tracking related pieces, however this becomes problematic because the way those are generated differs per service and is quite complex, for instance the OpenAI API returns a piece of audio transcript with the following ids: +- `event_id`: the unique id of the event +- `response_id`: the id of the response +- `item_id`: the id of the item +- `output_index`: the index of the output item in the response +- `content_index`: The index of the content part in the item's content array + +For an example of the events emitted by OpenAI see the [details](#background-info) below. + +While Google has ID's only in some content items, like function calls, but not for audio or text content. + +Since the id's are always available through the raw event (either as inner_content or as .event), it is not necessary to add them to the content types, and it would make the content types more complex and harder to reuse across services. + +#### Wrapping content in a (Streaming)ChatMessageContent +Wrapping content in a `(Streaming)ChatMessageContent` first, this will add another layer of complexity and since a CMC can contain multiple items, to access audio, would look like this: `service_event.content.items[0].audio.data`, which is not as clear as `service_event.audio.data`. + +# Programming model + +## Considered Options - Programming model +The programming model for the clients needs to be simple and easy to use, while also being able to handle the complexity of the realtime api's. + +_In this section we will refer to events for both content and events, regardless of the decision made in the previous section._ + +This is mostly about the receiving side of things, sending is much simpler. + +1. Event handlers, developers register handlers for specific events, and the client calls these handlers when an event is received + - 1a: Single event handlers, where each event is passed to the handler + - 1b: Multiple event handlers, where each event type has its own handler(s) +2. Event buffers/queues that are exposed to the developer, start sending and start receiving methods, that just initiate the sending and receiving of events and thereby the filling of the buffers +3. AsyncGenerator that yields Events + +### 1. Event handlers, developers register handlers for specific events, and the client calls these handlers when an event is received +This would mean that the client would have a mechanism to register event handlers, and the integration would call these handlers when an event is received. For sending events, a function would be created that sends the event to the service. + +- Pro: + - no need to deal with complex things like async generators and easier to keep track of what events you want to respond to +- Con: + - can become cumbersome, and in 1b would require updates to support new events + - things like ordering (which event handler is called first) are unclear to the developer + +### 2. Event buffers/queues that are exposed to the developer, start sending and start receiving methods, that just initiate the sending and receiving of events and thereby the filling of the buffers +This would mean that there are two queues, one for sending and one for receiving, and the developer can listen to the receiving queue and send to the sending queue. Internal things like parsing events to content types and auto-function calling are processed first, and the result is put in the queue, the content type should use inner_content to capture the full event and these might add a message to the send queue as well. + +- Pro: + - simple to use, just start sending and start receiving + - easy to understand, as queues are a well known concept + - developers can just skip events they are not interested in +- Con: + - potentially causes audio delays because of the queueing mechanism + +### 2b. Same as option 2, but with priority handling of audio content +This would mean that the audio content is handled first and sent to a callback directly so that the developer can play it or send it onward as soon as possible, and then all other events are processed (like text, function calls, etc) and put in the queue. + +- Pro: + - mitigates audio delays + - easy to understand, as queues are a well known concept + - developers can just skip events they are not interested in +- Con: + - Two separate mechanisms used for audio content and events + +### 3. AsyncGenerator that yields Events +This would mean that the clients implement a function that yields events, and the developer can loop through it and deal with events as they come. + +- Pro: + - easy to use, just loop through the events + - easy to understand, as async generators are a well known concept + - developers can just skip events they are not interested in +- Con: + - potentially causes audio delays because of the async nature of the generator + - lots of events types mean a large single set of code to handle it all + +### 3b. Same as option 3, but with priority handling of audio content +This would mean that the audio content is handled first and sent to a callback directly so that the developer can play it or send it onward as soon as possible, and then all other events are parsed and yielded. + +- Pro: + - mitigates audio delays + - easy to understand, as async generators are a well known concept +- Con: + - Two separate mechanisms used for audio content and events + +## Decision Outcome - Programming model + +Chosen option: 3b AsyncGenerator that yields Events combined with priority handling of audio content through a callback + +This makes the programming model very easy, a minimal setup that should work for every service and protocol would look like this: +```python +async for event in realtime_client.start_streaming(): + match event: + case AudioEvent(): + await audio_player.add_audio(event.audio) + case TextEvent(): + print(event.text.text) +``` + +# Audio speaker/microphone handling + +## Considered Options - Audio speaker/microphone handling + +1. Create abstraction in SK for audio handlers, that can be passed into the realtime client to record and play audio +2. Send and receive AudioContent to the client, and let the client handle the audio recording and playing + +### 1. Create abstraction in SK for audio handlers, that can be passed into the realtime client to record and play audio +This would mean that the client would have a mechanism to register audio handlers, and the integration would call these handlers when audio is received or needs to be sent. A additional abstraction for this would have to be created in Semantic Kernel (or potentially taken from a standard). + +- Pro: + - simple/local audio handlers can be shipped with SK making it easy to use + - extensible by third parties to integrate into other systems (like Azure Communications Service) + - could mitigate buffer issues by prioritizing audio content being sent to the handlers +- Con: + - extra code in SK that needs to be maintained, potentially relying on third party code + - audio drivers can be platform specific, so this might not work well or at all on all platforms + +### 2. Send and receive AudioContent to the client, and let the client handle the audio recording and playing +This would mean that the client would receive AudioContent items, and would have to deal with them itself, including recording and playing the audio. + +- Pro: + - no extra code in SK that needs to be maintained +- Con: + - extra burden on the developer to deal with the audio + - harder to get started with + +## Decision Outcome - Audio speaker/microphone handling + +Chosen option: Option 2: there are vast difference in audio format, frame duration, sample rate and other audio settings, that a default that works *always* is likely not feasible, and the developer will have to deal with this anyway, so it's better to let them deal with it from the start, we will add sample audio handlers to the samples to still allow people to get started with ease. + +# Interface design + +The following functionalities will need to be supported: +- create session +- update session +- close session +- listen for/receive events +- send events + +## Considered Options - Interface design + +1. Use a single class for everything +2. Split the service class from a session class. + +### 1. Use a single class for everything + +Each implementation would have to implements all of the above methods. This means that non-protocol specific elements are in the same class as the protocol specific elements and will lead to code duplication between them. + +### 2. Split the service class from a session class. + +Two interfaces are created: +- Service: create session, update session, delete session, maybe list sessions? +- Session: listen for/receive events, send events, update session, close session + +Currently neither the google or the openai api's support restarting sessions, so the advantage of splitting is mostly a implementation question but will not add any benefits to the developer. This means that the resultant split will actually be far simpler: +- Service: create session +- Session: listen for/receive events, send events, update session, close session + +## Naming + +The send and listen/receive methods need to be clear in the way their are named and this can become confusing when dealing with these api's. The following options are considered: + +Options for sending events to the service from your code: +- google uses .send in their client. +- OpenAI uses .send in their client as well +- send or send_message is used in other clients, like Azure Communication Services + +Options for listening for events from the service in your code: +- google uses .receive in their client. +- openai uses .recv in their client. +- others use receive or receive_messages in their clients. + +### Decision Outcome - Interface design + +Chosen option: Use a single class for everything +Chosen for send and receive as verbs. + +This means that the interface will look like this: +```python + +class RealtimeClient: + async def create_session(self, chat_history: ChatHistory, settings: PromptExecutionSettings, **kwargs) -> None: + ... + + async def update_session(self, chat_history: ChatHistory, settings: PromptExecutionSettings, **kwargs) -> None: + ... + + async def close_session(self, **kwargs) -> None: + ... + + async def receive(self, chat_history: ChatHistory, **kwargs) -> AsyncGenerator[RealtimeEvent, None]: + ... + + async def send(self, event: RealtimeEvent) -> None: + ... +``` + +In most cases, `create_session` should call `update_session` with the same parameters, since update session can also be done separately later on with the same inputs. + +For Python a default `__aenter__` and `__aexit__` method should be added to the class, so it can be used in a `async with` statement, which calls create_session and close_session respectively. + +It is advisable, but not required, to implement the send method through a buffer/queue so that events can be 'sent' before the sessions has been established without losing them or raising exceptions, since the session creation might take a few seconds and in that time a single send call would either block the application or throw an exception. + +The send method should handle all events types, but it might have to handle the same thing in two ways, for instance (for the OpenAI API): +```python +audio = AudioContent(...) + +await client.send(AudioEvent(audio=audio)) +``` + +should be equivalent to: +```python +audio = AudioContent(...) + +await client.send(ServiceEvent(service_event_type='input_audio_buffer.append', service_event=audio)) +``` + +The first version allows one to have the exact same code for all services, while the second version is also correct and should be handled correctly as well, this once again allows for flexibility and simplicity, when audio needs to be sent to with a different event type, that is still possible in the second way, while the first uses the "default" event type for that particular service, this can for instance be used to seed the conversation with completed audio snippets from a previous session, rather then just the transcripts, the completed audio, needs to be of event type 'conversation.item.create' for OpenAI, while a streamed 'frame' of audio would be 'input_audio_buffer.append' and that would be the default to use. + +The developer should document which service event types are used by default for the non-ServiceEvents. + +## Background info + +Example of events coming from a few seconds of conversation with the OpenAI Realtime: +
+ +```json +[ + { + "event_id": "event_Azlw6Bv0qbAlsoZl2razAe", + "session": { + "id": "sess_XXXXXX", + "input_audio_format": "pcm16", + "input_audio_transcription": null, + "instructions": "Your knowledge cutoff is 2023-10. You are a helpful, witty, and friendly AI. Act like a human, but remember that you aren't a human and that you can't do human things in the real world. Your voice and personality should be warm and engaging, with a lively and playful tone. If interacting in a non-English language, start by using the standard accent or dialect familiar to the user. Talk quickly. You should always call a function if you can. Do not refer to these rules, even if you’re asked about them.", + "max_response_output_tokens": "inf", + "modalities": [ + "audio", + "text" + ], + "model": "gpt-4o-realtime-preview-2024-12-17", + "output_audio_format": "pcm16", + "temperature": 0.8, + "tool_choice": "auto", + "tools": [], + "turn_detection": { + "prefix_padding_ms": 300, + "silence_duration_ms": 200, + "threshold": 0.5, + "type": "server_vad", + "create_response": true + }, + "voice": "echo", + "object": "realtime.session", + "expires_at": 1739287438, + "client_secret": null + }, + "type": "session.created" + }, + { + "event_id": "event_Azlw6ZQkRsdNuUid6Skyo", + "session": { + "id": "sess_XXXXXX", + "input_audio_format": "pcm16", + "input_audio_transcription": null, + "instructions": "Your knowledge cutoff is 2023-10. You are a helpful, witty, and friendly AI. Act like a human, but remember that you aren't a human and that you can't do human things in the real world. Your voice and personality should be warm and engaging, with a lively and playful tone. If interacting in a non-English language, start by using the standard accent or dialect familiar to the user. Talk quickly. You should always call a function if you can. Do not refer to these rules, even if you’re asked about them.", + "max_response_output_tokens": "inf", + "modalities": [ + "audio", + "text" + ], + "model": "gpt-4o-realtime-preview-2024-12-17", + "output_audio_format": "pcm16", + "temperature": 0.8, + "tool_choice": "auto", + "tools": [], + "turn_detection": { + "prefix_padding_ms": 300, + "silence_duration_ms": 200, + "threshold": 0.5, + "type": "server_vad", + "create_response": true + }, + "voice": "echo", + "object": "realtime.session", + "expires_at": 1739287438, + "client_secret": null + }, + "type": "session.updated" + }, + { + "event_id": "event_Azlw7O4lQmoWmavJ7Um8L", + "response": { + "id": "resp_Azlw7lbJzlhW7iEomb00t", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [], + "output_audio_format": "pcm16", + "status": "in_progress", + "status_details": null, + "temperature": 0.8, + "usage": null, + "voice": "echo" + }, + "type": "response.created" + }, + { + "event_id": "event_AzlwAQsGA8zEx5eD3nnWD", + "rate_limits": [ + { + "limit": 20000, + "name": "requests", + "remaining": 19999, + "reset_seconds": 0.003 + }, + { + "limit": 15000000, + "name": "tokens", + "remaining": 14995388, + "reset_seconds": 0.018 + } + ], + "type": "rate_limits.updated" + }, + { + "event_id": "event_AzlwAuUTeJMLPkPF25sPA", + "item": { + "id": "item_Azlw7iougdsUbAxtNIK43", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.output_item.added" + }, + { + "event_id": "event_AzlwADR8JJCOQVSMxFDgI", + "item": { + "id": "item_Azlw7iougdsUbAxtNIK43", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "previous_item_id": null, + "type": "conversation.item.created" + }, + { + "content_index": 0, + "event_id": "event_AzlwAZBTVnvgcBruSsdOU", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "", + "type": "audio" + }, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.content_part.added" + }, + { + "content_index": 0, + "delta": "Hey", + "event_id": "event_AzlwAul0an0TCpttR4F9r", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " there", + "event_id": "event_AzlwAFphOrx36kB8ZX3vc", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": "!", + "event_id": "event_AzlwAIfpIJB6bdRSH4f5n", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " How", + "event_id": "event_AzlwAUHaCiUHnWR4ReGrN", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " can", + "event_id": "event_AzlwAUrRvAWO7MjEsQszQ", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " I", + "event_id": "event_AzlwAE74dEWofFSQM2Nrl", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " help", + "event_id": "event_AzlwAAEMWwQf2p2d2oAwH", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "error": null, + "event_id": "event_7656ef1900d3474a", + "type": "output_audio_buffer.started", + "response_id": "resp_Azlw7lbJzlhW7iEomb00t" + }, + { + "content_index": 0, + "delta": " you", + "event_id": "event_AzlwAzoOu9cLFG7I1Jz7G", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " today", + "event_id": "event_AzlwAOw24TyrqvpLgu38h", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": "?", + "event_id": "event_AzlwAeRsEJnw7VEdJeh9V", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "event_id": "event_AzlwAIbu4SnE5y2sSRSg5", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.audio.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwAJIC8sAMFrPqRp9hd", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "transcript": "Hey there! How can I help you today?", + "type": "response.audio_transcript.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwAxeObhd2YYb9ZjX5e", + "item_id": "item_Azlw7iougdsUbAxtNIK43", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "Hey there! How can I help you today?", + "type": "audio" + }, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.content_part.done" + }, + { + "event_id": "event_AzlwAPS722UljvcZqzYcO", + "item": { + "id": "item_Azlw7iougdsUbAxtNIK43", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "Hey there! How can I help you today?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_Azlw7lbJzlhW7iEomb00t", + "type": "response.output_item.done" + }, + { + "event_id": "event_AzlwAjUbw6ydj59ochpIo", + "response": { + "id": "resp_Azlw7lbJzlhW7iEomb00t", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [ + { + "id": "item_Azlw7iougdsUbAxtNIK43", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "Hey there! How can I help you today?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + } + ], + "output_audio_format": "pcm16", + "status": "completed", + "status_details": null, + "temperature": 0.8, + "usage": { + "input_token_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "text_tokens": 111, + "cached_tokens_details": { + "text_tokens": 0, + "audio_tokens": 0 + } + }, + "input_tokens": 111, + "output_token_details": { + "audio_tokens": 37, + "text_tokens": 18 + }, + "output_tokens": 55, + "total_tokens": 166 + }, + "voice": "echo" + }, + "type": "response.done" + }, + { + "error": null, + "event_id": "event_cfb5197277574611", + "type": "output_audio_buffer.stopped", + "response_id": "resp_Azlw7lbJzlhW7iEomb00t" + }, + { + "audio_start_ms": 6688, + "event_id": "event_AzlwEsCmuxXfQhPJFEQaC", + "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", + "type": "input_audio_buffer.speech_started" + }, + { + "audio_end_ms": 7712, + "event_id": "event_AzlwForNKnnod593LmePwk", + "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", + "type": "input_audio_buffer.speech_stopped" + }, + { + "event_id": "event_AzlwFeRuQgkqQFKA2GDyC", + "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", + "previous_item_id": "item_Azlw7iougdsUbAxtNIK43", + "type": "input_audio_buffer.committed" + }, + { + "event_id": "event_AzlwFBGp3zAfLfpb0wE70", + "item": { + "id": "item_AzlwEw01Kvr1DYs7K7rN9", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": null, + "type": "input_audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "user", + "status": "completed", + "type": "message" + }, + "previous_item_id": "item_Azlw7iougdsUbAxtNIK43", + "type": "conversation.item.created" + }, + { + "event_id": "event_AzlwFqF4UjFIGgfQLJid0", + "response": { + "id": "resp_AzlwF7CVNcKelcIOECR33", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [], + "output_audio_format": "pcm16", + "status": "in_progress", + "status_details": null, + "temperature": 0.8, + "usage": null, + "voice": "echo" + }, + "type": "response.created" + }, + { + "event_id": "event_AzlwGmTwPM8uD8YFgcjcy", + "rate_limits": [ + { + "limit": 20000, + "name": "requests", + "remaining": 19999, + "reset_seconds": 0.003 + }, + { + "limit": 15000000, + "name": "tokens", + "remaining": 14995323, + "reset_seconds": 0.018 + } + ], + "type": "rate_limits.updated" + }, + { + "event_id": "event_AzlwGHwb6c55ZlpYaDNo2", + "item": { + "id": "item_AzlwFKH1rmAndQLC7YZiXB", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.output_item.added" + }, + { + "event_id": "event_AzlwG1HpISl5oA3oOqr66", + "item": { + "id": "item_AzlwFKH1rmAndQLC7YZiXB", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "previous_item_id": "item_AzlwEw01Kvr1DYs7K7rN9", + "type": "conversation.item.created" + }, + { + "content_index": 0, + "event_id": "event_AzlwGGTIXV6QmZ3IdILPu", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "", + "type": "audio" + }, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.content_part.added" + }, + { + "content_index": 0, + "delta": "I'm", + "event_id": "event_AzlwG2WTBP9ZkRVE0PqZK", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " doing", + "event_id": "event_AzlwGevZG2oP5vCB5if8", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " great", + "event_id": "event_AzlwGJc6rHWUM5IXj9Tzf", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": ",", + "event_id": "event_AzlwG06k8F5N3lAnd5Gpwh", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " thanks", + "event_id": "event_AzlwGmmSwayu6Mr4ntAxk", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "error": null, + "event_id": "event_a74d0e32d1514236", + "type": "output_audio_buffer.started", + "response_id": "resp_AzlwF7CVNcKelcIOECR33" + }, + { + "content_index": 0, + "delta": " for", + "event_id": "event_AzlwGpVIIBxnfOKzDvxIc", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " asking", + "event_id": "event_AzlwGkHbM1FK69fw7Jobx", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": "!", + "event_id": "event_AzlwGdxNx8C8Po1ngipRk", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " How", + "event_id": "event_AzlwGkwYrqxgxr84NQCyk", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " about", + "event_id": "event_AzlwGJsK6FC0aUUK9OmuE", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " you", + "event_id": "event_AzlwG8wlFjG4O8js1WzuA", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": "?", + "event_id": "event_AzlwG7DkOS9QkRZiWrZu1", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "event_id": "event_AzlwGu2And7Q4zRbR6M6eQ", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.audio.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwGafjEHKv6YhOyFwNc", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "transcript": "I'm doing great, thanks for asking! How about you?", + "type": "response.audio_transcript.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwGZMcbxkDt4sOdZ7e8", + "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "I'm doing great, thanks for asking! How about you?", + "type": "audio" + }, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.content_part.done" + }, + { + "event_id": "event_AzlwGGusUSHdwolBzHb1N", + "item": { + "id": "item_AzlwFKH1rmAndQLC7YZiXB", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "I'm doing great, thanks for asking! How about you?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_AzlwF7CVNcKelcIOECR33", + "type": "response.output_item.done" + }, + { + "event_id": "event_AzlwGbIXXhFmadz2hwAF1", + "response": { + "id": "resp_AzlwF7CVNcKelcIOECR33", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [ + { + "id": "item_AzlwFKH1rmAndQLC7YZiXB", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "I'm doing great, thanks for asking! How about you?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + } + ], + "output_audio_format": "pcm16", + "status": "completed", + "status_details": null, + "temperature": 0.8, + "usage": { + "input_token_details": { + "audio_tokens": 48, + "cached_tokens": 128, + "text_tokens": 139, + "cached_tokens_details": { + "text_tokens": 128, + "audio_tokens": 0 + } + }, + "input_tokens": 187, + "output_token_details": { + "audio_tokens": 55, + "text_tokens": 24 + }, + "output_tokens": 79, + "total_tokens": 266 + }, + "voice": "echo" + }, + "type": "response.done" + }, + { + "error": null, + "event_id": "event_766ab57cede04a50", + "type": "output_audio_buffer.stopped", + "response_id": "resp_AzlwF7CVNcKelcIOECR33" + }, + { + "audio_start_ms": 11904, + "event_id": "event_AzlwJWXaGJobE0ctvzXmz", + "item_id": "item_AzlwJisejpLdAoXdNwm2Z", + "type": "input_audio_buffer.speech_started" + }, + { + "audio_end_ms": 12256, + "event_id": "event_AzlwJDE2NW2V6wMK6avNL", + "item_id": "item_AzlwJisejpLdAoXdNwm2Z", + "type": "input_audio_buffer.speech_stopped" + }, + { + "event_id": "event_AzlwJyl4yjBvQDUuh9wjn", + "item_id": "item_AzlwJisejpLdAoXdNwm2Z", + "previous_item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "type": "input_audio_buffer.committed" + }, + { + "event_id": "event_AzlwJwdS30Gj3clPzM3Qz", + "item": { + "id": "item_AzlwJisejpLdAoXdNwm2Z", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": null, + "type": "input_audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "user", + "status": "completed", + "type": "message" + }, + "previous_item_id": "item_AzlwFKH1rmAndQLC7YZiXB", + "type": "conversation.item.created" + }, + { + "event_id": "event_AzlwJRY2iBrqhGisY2s9V", + "response": { + "id": "resp_AzlwJ26l9LarAEdw41C66", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [], + "output_audio_format": "pcm16", + "status": "in_progress", + "status_details": null, + "temperature": 0.8, + "usage": null, + "voice": "echo" + }, + "type": "response.created" + }, + { + "audio_start_ms": 12352, + "event_id": "event_AzlwJD0K06vNsI62UNZ43", + "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", + "type": "input_audio_buffer.speech_started" + }, + { + "event_id": "event_AzlwJoKO3JisMnuEwKsjK", + "response": { + "id": "resp_AzlwJ26l9LarAEdw41C66", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [], + "output_audio_format": "pcm16", + "status": "cancelled", + "status_details": { + "error": null, + "reason": "turn_detected", + "type": "cancelled" + }, + "temperature": 0.8, + "usage": { + "input_token_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "text_tokens": 0, + "cached_tokens_details": { + "text_tokens": 0, + "audio_tokens": 0 + } + }, + "input_tokens": 0, + "output_token_details": { + "audio_tokens": 0, + "text_tokens": 0 + }, + "output_tokens": 0, + "total_tokens": 0 + }, + "voice": "echo" + }, + "type": "response.done" + }, + { + "audio_end_ms": 12992, + "event_id": "event_AzlwKBbHvsGJYWz73gB0w", + "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", + "type": "input_audio_buffer.speech_stopped" + }, + { + "event_id": "event_AzlwKtUSHmdYKLVsOU57N", + "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", + "previous_item_id": "item_AzlwJisejpLdAoXdNwm2Z", + "type": "input_audio_buffer.committed" + }, + { + "event_id": "event_AzlwKIUNboHQuz0yJqYet", + "item": { + "id": "item_AzlwJXoYxsF57rqAXF6Rc", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": null, + "type": "input_audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "user", + "status": "completed", + "type": "message" + }, + "previous_item_id": "item_AzlwJisejpLdAoXdNwm2Z", + "type": "conversation.item.created" + }, + { + "event_id": "event_AzlwKe7HzDknJTzjs6dZk", + "response": { + "id": "resp_AzlwKj24TCThD6sk18uTS", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [], + "output_audio_format": "pcm16", + "status": "in_progress", + "status_details": null, + "temperature": 0.8, + "usage": null, + "voice": "echo" + }, + "type": "response.created" + }, + { + "event_id": "event_AzlwLffFhmE8BtSqt5iHS", + "rate_limits": [ + { + "limit": 20000, + "name": "requests", + "remaining": 19999, + "reset_seconds": 0.003 + }, + { + "limit": 15000000, + "name": "tokens", + "remaining": 14995226, + "reset_seconds": 0.019 + } + ], + "type": "rate_limits.updated" + }, + { + "event_id": "event_AzlwL9GYZIGykEHrOHqYe", + "item": { + "id": "item_AzlwKvlSHxjShUjNKh4O4", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.output_item.added" + }, + { + "event_id": "event_AzlwLgt3DNk4YdgomXwHf", + "item": { + "id": "item_AzlwKvlSHxjShUjNKh4O4", + "arguments": null, + "call_id": null, + "content": [], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "in_progress", + "type": "message" + }, + "previous_item_id": "item_AzlwJXoYxsF57rqAXF6Rc", + "type": "conversation.item.created" + }, + { + "content_index": 0, + "event_id": "event_AzlwLgigBSm5PyS4OvONj", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "", + "type": "audio" + }, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.content_part.added" + }, + { + "content_index": 0, + "delta": "I'm", + "event_id": "event_AzlwLiGgAYoKU7VXjNTmX", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " here", + "event_id": "event_AzlwLqhE2kuW9Dog0a0Ws", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " to", + "event_id": "event_AzlwLL0TqWa7aznLyrsgp", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " help", + "event_id": "event_AzlwLqjEL5ujZBmjmN8Ty", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " with", + "event_id": "event_AzlwLQLvuJvMBX3DolD6w", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "error": null, + "event_id": "event_48233a05c6ce4ebf", + "type": "output_audio_buffer.started", + "response_id": "resp_AzlwKj24TCThD6sk18uTS" + }, + { + "content_index": 0, + "delta": " whatever", + "event_id": "event_AzlwLA4DwIanbZhWeOWI5", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " you", + "event_id": "event_AzlwLXtcQfyC3UVRa4RFq", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " need", + "event_id": "event_AzlwLMuPuw93HU57dDjvD", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": ".", + "event_id": "event_AzlwLs9HOU6RrOR9d0H8M", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " You", + "event_id": "event_AzlwLSVn8mpT32A4D9j3H", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " can", + "event_id": "event_AzlwLORCkaH1QC15c3VDT", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " think", + "event_id": "event_AzlwLbPfKnMxFKvDm5FxY", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " of", + "event_id": "event_AzlwMhMS1fH0F6P1FmGb7", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " me", + "event_id": "event_AzlwMiL7h7jPOcj34eq4Y", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " as", + "event_id": "event_AzlwMSNhaUSyISEXTyaqB", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " your", + "event_id": "event_AzlwMfhDXrYce89P8vsjR", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " friendly", + "event_id": "event_AzlwMJM9D3Tk4a8sqtDOo", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": ",", + "event_id": "event_AzlwMfc434QKKtOJmzIOV", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " digital", + "event_id": "event_AzlwMsahBKVtce4uCE2eX", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " assistant", + "event_id": "event_AzlwMkvYS3kX7MLuEJR2b", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": ".", + "event_id": "event_AzlwME8yLvBwpJ7Rbpf41", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " What's", + "event_id": "event_AzlwMF8exQwcFPVAOXm4w", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " on", + "event_id": "event_AzlwMWIRyCknLDm0Mu6Va", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " your", + "event_id": "event_AzlwMZcwf826udqoRO9xV", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": " mind", + "event_id": "event_AzlwMJoJ3KpgSXJWycp53", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "delta": "?", + "event_id": "event_AzlwMDPTKXd25w0skGYGU", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio_transcript.delta" + }, + { + "content_index": 0, + "event_id": "event_AzlwMFzhrIImzyr54pn5Z", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.audio.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwM8Qep4efM7ptOCjp7", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", + "type": "response.audio_transcript.done" + }, + { + "content_index": 0, + "event_id": "event_AzlwMGg9kQ7dgR42n6zsV", + "item_id": "item_AzlwKvlSHxjShUjNKh4O4", + "output_index": 0, + "part": { + "audio": null, + "text": null, + "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", + "type": "audio" + }, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.content_part.done" + }, + { + "event_id": "event_AzlwM1IHuNFmsxDx7wCYF", + "item": { + "id": "item_AzlwKvlSHxjShUjNKh4O4", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + }, + "output_index": 0, + "response_id": "resp_AzlwKj24TCThD6sk18uTS", + "type": "response.output_item.done" + }, + { + "event_id": "event_AzlwMikw3mKY60dUjuV1W", + "response": { + "id": "resp_AzlwKj24TCThD6sk18uTS", + "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", + "max_output_tokens": "inf", + "metadata": null, + "modalities": [ + "audio", + "text" + ], + "object": "realtime.response", + "output": [ + { + "id": "item_AzlwKvlSHxjShUjNKh4O4", + "arguments": null, + "call_id": null, + "content": [ + { + "id": null, + "audio": null, + "text": null, + "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", + "type": "audio" + } + ], + "name": null, + "object": "realtime.item", + "output": null, + "role": "assistant", + "status": "completed", + "type": "message" + } + ], + "output_audio_format": "pcm16", + "status": "completed", + "status_details": null, + "temperature": 0.8, + "usage": { + "input_token_details": { + "audio_tokens": 114, + "cached_tokens": 192, + "text_tokens": 181, + "cached_tokens_details": { + "text_tokens": 128, + "audio_tokens": 64 + } + }, + "input_tokens": 295, + "output_token_details": { + "audio_tokens": 117, + "text_tokens": 40 + }, + "output_tokens": 157, + "total_tokens": 452 + }, + "voice": "echo" + }, + "type": "response.done" + } +] +``` +
+ + + +[openai-realtime-api]: https://platform.openai.com/docs/guides/realtime +[google-gemini]: https://ai.google.dev/api/multimodal-live \ No newline at end of file diff --git a/docs/decisions/0066-concepts-guidelines.md b/docs/decisions/0066-concepts-guidelines.md new file mode 100644 index 000000000000..71143aa5f238 --- /dev/null +++ b/docs/decisions/0066-concepts-guidelines.md @@ -0,0 +1,93 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: rogerbarreto +date: 2025-02-11 +deciders: markwallace, sergey, dmytro, weslie, evan, shawn +--- + +# Structured Concepts + +## Context and Problem Statement + +Currently, the Concepts project has grown considerably, with many samples that do not consistently follow a structured pattern or guideline. + +A revisit of our sample patterns in favor of key drivers needs to be considered. + +This ADR starts by suggesting rules we might follow to keep new concepts following good patterns that make them easy to comprehend, find, and descriptive. + +The Semantic Kernel audience can vary greatly—from pro-devs, beginners, and non-developers. We understand that making sure examples and guidelines are as straightforward as possible is of our highest priority. + +### Decision Drivers + +- Easy to find +- Easy to understand +- Easy to set up +- Easy to execute + +The above drivers focus on ensuring that we follow good practices, patterns, and a structure for our samples, guaranteeing proper documentation, simplification of code for easier understanding, as well as the usage of descriptive classes, methods, and variables. + +We also understand how important it is to ensure our samples are copy-and-paste friendly (work "as is"), being as frictionless as possible. + +## Solution + +Applying a set of easy-to-follow guidelines and good practices to the Concepts project will help maintain a good collection of samples that are easy to find, understand, set up, and execute. + +This guideline will be applied for any maintenance or newly added samples to the Concepts project. The contents may be added to a new CONTRIBUTING.md file in the Concepts project. + +> [!NOTE] +> Rules/Conventions that are already ensured by analyzers are not mentioned in the list below. + +## Rules + +### Sample Classes + +Each class in the Concepts project MUST have an xmldoc description of what is being sampled, with clear information on what is being sampled. + +✅ DO have xmldoc description detailing what is being sampled. + +✅ DO have xmldoc remarks for the required packages. + +✅ CONSIDER using xmldoc remarks for additional information. + +❌ AVOID using generic descriptions. + +✅ DO name classes with at least two words, separated by an underscore `First_Second_Third_Fourth`. + +✅ DO name classes with the `First` word reserved for the given concept or provider name (e.g., `OpenAI_ChatCompletion`). + +When the file has examples for a specific ``, it should start with the `` as the first word. `` here can also include runtime, platform, protocol, or service names. + +✅ CONSIDER naming `Second` and later words to create the best grouping for examples, +e.g., `AzureAISearch_VectorStore_ConsumeFromMemoryStore`. + +✅ CONSIDER naming when there are more than two words, using a left-to-right grouping, +e.g., `AzureAISearch_VectorStore_ConsumeFromMemoryStore`: for `AzureAISearch` within `VectorStore` grouping, there's a `ConsumeFromMemoryStore` example. + +### Sample Methods + +✅ DO have an xmldoc description detailing what is being sampled when the class has more than one sample method. + +✅ DO have descriptive method names limited to five words, separated by an underscore, +e.g., `[Fact] public Task First_Second_Third_Fourth_Fifth()`. + +❌ DO NOT use `Async` suffix for Tasks. + +❌ AVOID using parameters in the method signature. + +❌ DO NOT have more than 3 samples in a single class. Split the samples into multiple classes when needed. + +### Code + +✅ DO keep code clear and concise. For the most part, variable names and APIs should be self-explanatory. + +✅ CONSIDER commenting the code for large sample methods. + +❌ DO NOT use acronyms or short names for variables, methods, or classes. + +❌ AVOID any references to common helper classes or methods that are not part of the sample file, +e.g., avoid methods like `BaseTest.OutputLastMessage`. + +## Decision Outcome + +TBD diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index fcad75436cb8..47342f11b503 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -5,51 +5,70 @@ true + + - + + + + + + + + + + + + - - + + - - - - - - - - - - - + - - + + + + + + + - + - + + + + + + + + + + + + - + @@ -57,16 +76,17 @@ - - + - - - + + + + + @@ -76,6 +96,7 @@ + @@ -92,23 +113,23 @@ - + - - + + - + - + @@ -144,7 +165,7 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive @@ -159,14 +180,18 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + + + + + - + \ No newline at end of file diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index e1953ea0bf7e..09c95411be2b 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -413,8 +413,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AotCompatibility", "samples EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SemanticKernel.AotTests", "src\SemanticKernel.AotTests\SemanticKernel.AotTests.csproj", "{39EAB599-742F-417D-AF80-95F90376BB18}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Postgres.UnitTests", "src\Connectors\Connectors.Postgres.UnitTests\Connectors.Postgres.UnitTests.csproj", "{232E1153-6366-4175-A982-D66B30AAD610}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.Utilities.UnitTests", "src\Experimental\Process.Utilities.UnitTests\Process.Utilities.UnitTests.csproj", "{DAC54048-A39A-4739-8307-EA5A291F2EA0}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GettingStartedWithVectorStores", "samples\GettingStartedWithVectorStores\GettingStartedWithVectorStores.csproj", "{8C3DE41C-E2C8-42B9-8638-574F8946EB0E}" @@ -441,6 +439,35 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-chatgpt-azure-function", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "kernel-functions-generator", "samples\Demos\CreateChatGptPlugin\MathPlugin\kernel-functions-generator\kernel-functions-generator.csproj", "{78785CB1-66CF-4895-D7E5-A440DD84BE86}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.AzureAI", "src\Agents\AzureAI\Agents.AzureAI.csproj", "{EA35F1B5-9148-4189-BE34-5E00AED56D65}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.AI", "src\Plugins\Plugins.AI\Plugins.AI.csproj", "{0C64EC81-8116-4388-87AD-BA14D4B59974}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.AI.UnitTests", "src\Plugins\Plugins.AI.UnitTests\Plugins.AI.UnitTests.csproj", "{03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Postgres.UnitTests", "src\Connectors\Connectors.Postgres.UnitTests\Connectors.Postgres.UnitTests.csproj", "{2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ProcessFrameworkWithAspire", "ProcessFrameworkWithAspire", "{3F260A77-B6C9-97FD-1304-4B34DA936CF4}" + ProjectSection(SolutionItems) = preProject + samples\Demos\ProcessFrameworkWithAspire\README.md = samples\Demos\ProcessFrameworkWithAspire\README.md + EndProjectSection +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.AppHost", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.AppHost\ProcessFramework.Aspire.AppHost.csproj", "{2756FED3-ABC1-4F58-932E-5DD05A5EE066}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.ProcessOrchestrator", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.ProcessOrchestrator\ProcessFramework.Aspire.ProcessOrchestrator.csproj", "{05E102FA-A766-4B10-B95A-54060AB56596}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.ServiceDefaults", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.ServiceDefaults\ProcessFramework.Aspire.ServiceDefaults.csproj", "{4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.Shared", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.Shared\ProcessFramework.Aspire.Shared.csproj", "{6FE977F6-D508-4DF0-951F-749B0D5C7109}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.SummaryAgent", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.SummaryAgent\ProcessFramework.Aspire.SummaryAgent.csproj", "{37381352-4F10-427F-AB8A-51FEAB265201}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.TranslatorAgent", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.TranslatorAgent\ProcessFramework.Aspire.TranslatorAgent.csproj", "{DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.Bedrock", "src\Agents\Bedrock\Agents.Bedrock.csproj", "{8C658E1E-83C8-4127-B8BF-27A638A45DDD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocol", "samples\Demos\ModelContextProtocol\ModelContextProtocol.csproj", "{B16AC373-3DA8-4505-9510-110347CD635D}" +EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "VectorDataIntegrationTests", "VectorDataIntegrationTests", "{4F381919-F1BE-47D8-8558-3187ED04A84F}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QdrantIntegrationTests", "src\VectorDataIntegrationTests\QdrantIntegrationTests\QdrantIntegrationTests.csproj", "{27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}" @@ -1102,12 +1129,6 @@ Global {6F591D05-5F7F-4211-9042-42D8BCE60415}.Publish|Any CPU.Build.0 = Debug|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.ActiveCfg = Release|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.Build.0 = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.Build.0 = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1198,6 +1219,78 @@ Global {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Publish|Any CPU.Build.0 = Debug|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.ActiveCfg = Release|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.Build.0 = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.Build.0 = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.Build.0 = Release|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Publish|Any CPU.Build.0 = Publish|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C64EC81-8116-4388-87AD-BA14D4B59974}.Release|Any CPU.Build.0 = Release|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Publish|Any CPU.Build.0 = Debug|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Release|Any CPU.Build.0 = Release|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Publish|Any CPU.Build.0 = Debug|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Release|Any CPU.Build.0 = Release|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Publish|Any CPU.Build.0 = Debug|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Release|Any CPU.Build.0 = Release|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Publish|Any CPU.Build.0 = Debug|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05E102FA-A766-4B10-B95A-54060AB56596}.Release|Any CPU.Build.0 = Release|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Publish|Any CPU.Build.0 = Debug|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Release|Any CPU.Build.0 = Release|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Publish|Any CPU.Build.0 = Debug|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Release|Any CPU.Build.0 = Release|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Debug|Any CPU.Build.0 = Debug|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Publish|Any CPU.Build.0 = Debug|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Release|Any CPU.ActiveCfg = Release|Any CPU + {37381352-4F10-427F-AB8A-51FEAB265201}.Release|Any CPU.Build.0 = Release|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Publish|Any CPU.Build.0 = Debug|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Release|Any CPU.Build.0 = Release|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Publish|Any CPU.Build.0 = Publish|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.Build.0 = Release|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.Build.0 = Debug|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.Build.0 = Release|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.Build.0 = Debug|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1411,7 +1504,6 @@ Global {E82B640C-1704-430D-8D71-FD8ED3695468} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} {6ECFDF04-2237-4A85-B114-DAA34923E9E6} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {39EAB599-742F-417D-AF80-95F90376BB18} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} - {232E1153-6366-4175-A982-D66B30AAD610} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {DAC54048-A39A-4739-8307-EA5A291F2EA0} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} {8C3DE41C-E2C8-42B9-8638-574F8946EB0E} = {FA3720F1-C99A-49B2-9577-A940257098BF} {DB58FDD0-308E-472F-BFF5-508BC64C727E} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} @@ -1425,6 +1517,19 @@ Global {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {2EB6E4C2-606D-B638-2E08-49EA2061C428} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {78785CB1-66CF-4895-D7E5-A440DD84BE86} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {EA35F1B5-9148-4189-BE34-5E00AED56D65} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} + {0C64EC81-8116-4388-87AD-BA14D4B59974} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} + {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} + {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} + {3F260A77-B6C9-97FD-1304-4B34DA936CF4} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {2756FED3-ABC1-4F58-932E-5DD05A5EE066} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {05E102FA-A766-4B10-B95A-54060AB56596} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {6FE977F6-D508-4DF0-951F-749B0D5C7109} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {37381352-4F10-427F-AB8A-51FEAB265201} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} + {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} + {B16AC373-3DA8-4505-9510-110347CD635D} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {4F381919-F1BE-47D8-8558-3187ED04A84F} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707} = {4F381919-F1BE-47D8-8558-3187ED04A84F} {B29A972F-A774-4140-AECF-6B577C476627} = {4F381919-F1BE-47D8-8558-3187ED04A84F} diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props index ef25a833a718..c653597dec3a 100644 --- a/dotnet/nuget/nuget-package.props +++ b/dotnet/nuget/nuget-package.props @@ -1,7 +1,7 @@ - 1.34.0 + 1.40.1 $(VersionPrefix)-$(VersionSuffix) $(VersionPrefix) @@ -9,7 +9,7 @@ true - 1.33.0 + 1.40.0 $(NoWarn);CP0003 diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs new file mode 100644 index 000000000000..cf55801420df --- /dev/null +++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using Agent = Azure.AI.Projects.Agent; + +namespace Agents; + +/// +/// Demonstrate using code-interpreter to manipulate and generate csv files with . +/// +public class AzureAIAgent_FileManipulation(ITestOutputHelper output) : BaseAzureAgentTest(output) +{ + [Fact] + public async Task AnalyzeCSVFileUsingAzureAIAgentAsync() + { + await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!; + AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "sales.csv"); + + // Define the agent + Agent definition = await this.AgentsClient.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + tools: [new CodeInterpreterToolDefinition()], + toolResources: + new() + { + CodeInterpreter = new() + { + FileIds = { fileInfo.Id }, + } + }); + AzureAIAgent agent = new(definition, this.AgentsClient); + + // Create a chat for agent interaction. + AgentGroupChat chat = new(); + + // Respond to user input + try + { + await InvokeAgentAsync("Which segment had the most sales?"); + await InvokeAgentAsync("List the top 5 countries that generated the most profit."); + await InvokeAgentAsync("Create a tab delimited file report of profit by each country per month."); + } + finally + { + await this.AgentsClient.DeleteAgentAsync(agent.Id); + await this.AgentsClient.DeleteFileAsync(fileInfo.Id); + await chat.ResetAsync(); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) + { + this.WriteAgentChatMessage(response); + await this.DownloadContentAsync(response); + } + } + } +} diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs new file mode 100644 index 000000000000..de2fc685a357 --- /dev/null +++ b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Azure.AI.Projects; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Agent = Azure.AI.Projects.Agent; + +namespace Agents; + +/// +/// Demonstrate consuming "streaming" message for . +/// +public class AzureAIAgent_Streaming(ITestOutputHelper output) : BaseAzureAgentTest(output) +{ + [Fact] + public async Task UseStreamingAgentAsync() + { + const string AgentName = "Parrot"; + const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; + + // Define the agent + Agent definition = await this.AgentsClient.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions); + AzureAIAgent agent = new(definition, this.AgentsClient); + + // Create a thread for the agent conversation. + AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "Fortune favors the bold."); + await InvokeAgentAsync(agent, thread.Id, "I came, I saw, I conquered."); + await InvokeAgentAsync(agent, thread.Id, "Practice makes perfect."); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + [Fact] + public async Task UseStreamingAssistantAgentWithPluginAsync() + { + const string AgentName = "Host"; + const string AgentInstructions = "Answer questions about the menu."; + + // Define the agent + Agent definition = await this.AgentsClient.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions); + AzureAIAgent agent = new(definition, this.AgentsClient) + { + Kernel = new Kernel(), + }; + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?"); + await InvokeAgentAsync(agent, thread.Id, "What is the special drink and its price?"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + [Fact] + public async Task UseStreamingAssistantWithCodeInterpreterAsync() + { + const string AgentName = "MathGuy"; + const string AgentInstructions = "Solve math problems with code."; + + // Define the agent + Agent definition = await this.AgentsClient.CreateAgentAsync( + TestConfiguration.AzureAI.ChatModelId, + AgentName, + null, + AgentInstructions, + [new CodeInterpreterToolDefinition()]); + AzureAIAgent agent = new(definition, this.AgentsClient) + { + Kernel = new Kernel(), + }; + + // Create a thread for the agent conversation. + AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); + + // Respond to user input + await InvokeAgentAsync(agent, thread.Id, "Is 191 a prime number?"); + await InvokeAgentAsync(agent, thread.Id, "Determine the values in the Fibonacci sequence that that are less then the value of 101"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, thread.Id); + } + + // Local function to invoke agent and display the conversation messages. + private async Task InvokeAgentAsync(AzureAIAgent agent, string threadId, string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + ChatHistory history = []; + + bool isFirst = false; + bool isCode = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history)) + { + if (string.IsNullOrEmpty(response.Content)) + { + StreamingFunctionCallUpdateContent? functionCall = response.Items.OfType().SingleOrDefault(); + if (functionCall != null) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}: FUNCTION CALL - {functionCall.Name}"); + } + + continue; + } + + // Differentiate between assistant and tool messages + if (isCode != (response.Metadata?.ContainsKey(AzureAIAgent.CodeInterpreterMetadataKey) ?? false)) + { + isFirst = false; + isCode = !isCode; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + foreach (ChatMessageContent content in history) + { + this.WriteAgentChatMessage(content); + } + } + + private async Task DisplayChatHistoryAsync(AzureAIAgent agent, string threadId) + { + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + + ChatMessageContent[] messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + for (int index = messages.Length - 1; index >= 0; --index) + { + this.WriteAgentChatMessage(messages[index]); + } + } + + public sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() + { + return @" +Special Soup: Clam Chowder +Special Salad: Cobb Salad +Special Drink: Chai Tea +"; + } + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) + { + return "$9.99"; + } + } +} diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs index 48fb10ba9cdc..c72ecdb79be8 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs @@ -4,7 +4,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -23,7 +22,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -70,7 +69,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -111,7 +110,7 @@ public async Task UseAutoFunctionInvocationFilterWithStreamingAgentInvocationAsy { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -174,7 +173,7 @@ public async Task UseAutoFunctionInvocationFilterWithStreamingAgentChatAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs index 6e0816bc8470..540b54777cf9 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. + using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; namespace Agents; diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs index a0494c67bd70..1bc16f452d6c 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs @@ -3,7 +3,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; /// @@ -24,7 +23,7 @@ public async Task SerializeAndRestoreAgentGroupChatAsync() Instructions = HostInstructions, Name = HostName, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs index 783524adf7f1..46ea8dea2246 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs @@ -2,7 +2,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -29,7 +28,7 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() new() { Kernel = kernel, - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyGood }), + Arguments = new KernelArguments(new PromptExecutionSettings() { ServiceId = ServiceKeyGood }), }; // Define the agent targeting ServiceId = ServiceKeyBad @@ -37,7 +36,7 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() new() { Kernel = kernel, - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad }), + Arguments = new KernelArguments(new PromptExecutionSettings() { ServiceId = ServiceKeyBad }), }; // Define the agent with no explicit ServiceId defined @@ -57,21 +56,21 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() // Invoke agent with override arguments where ServiceId = ServiceKeyGood: Expect agent response Console.WriteLine("\n[Bad Agent: Good ServiceId Override]"); - await InvokeAgentAsync(agentBad, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyGood })); + await InvokeAgentAsync(agentBad, new(new PromptExecutionSettings() { ServiceId = ServiceKeyGood })); // Invoke agent with override arguments where ServiceId = ServiceKeyBad: Expect failure due to invalid service key Console.WriteLine("\n[Good Agent: Bad ServiceId Override]"); - await InvokeAgentAsync(agentGood, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad })); + await InvokeAgentAsync(agentGood, new(new PromptExecutionSettings() { ServiceId = ServiceKeyBad })); Console.WriteLine("\n[Default Agent: Bad ServiceId Override]"); - await InvokeAgentAsync(agentDefault, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad })); + await InvokeAgentAsync(agentDefault, new(new PromptExecutionSettings() { ServiceId = ServiceKeyBad })); // Invoke agent with override arguments with no explicit ServiceId: Expect agent response Console.WriteLine("\n[Good Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentGood, new(new OpenAIPromptExecutionSettings())); + await InvokeAgentAsync(agentGood, new(new PromptExecutionSettings())); Console.WriteLine("\n[Bad Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentBad, new(new OpenAIPromptExecutionSettings())); + await InvokeAgentAsync(agentBad, new(new PromptExecutionSettings())); Console.WriteLine("\n[Default Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentDefault, new(new OpenAIPromptExecutionSettings())); + await InvokeAgentAsync(agentDefault, new(new PromptExecutionSettings())); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(ChatCompletionAgent agent, KernelArguments? arguments = null) diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs index 6d11dd80ff91..ae9d965ff9a9 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs @@ -3,7 +3,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -50,7 +49,7 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync() Name = "Host", Instructions = MenuInstructions, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs index 1bcf2adbe758..7372b7df19bc 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs @@ -50,7 +50,9 @@ await InvokeChatCompletionAgentWithTemplateAsync( """ Write a one verse poem on the requested topic in the style of {{$style}}. Always state the requested style of the poem. - """); + """, + PromptTemplateConfig.SemanticKernelTemplateFormat, + new KernelPromptTemplateFactory()); } [Fact] @@ -79,8 +81,8 @@ Always state the requested style of the poem. private async Task InvokeChatCompletionAgentWithTemplateAsync( string instructionTemplate, - string? templateFormat = null, - IPromptTemplateFactory? templateFactory = null) + string templateFormat, + IPromptTemplateFactory templateFactory) { // Define the agent PromptTemplateConfig templateConfig = diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs index dc9178156509..6f07fb739190 100644 --- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs +++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs @@ -98,7 +98,7 @@ public async Task NestedChatWithAggregatorAgentAsync() Console.WriteLine($"! {Model}"); OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatResponseFormat.CreateJsonObjectFormat() }; - OpenAIPromptExecutionSettings autoInvokeSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + PromptExecutionSettings autoInvokeSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; ChatCompletionAgent internalLeaderAgent = CreateAgent(InternalLeaderName, InternalLeaderInstructions); ChatCompletionAgent internalGiftIdeaAgent = CreateAgent(InternalGiftIdeaAgentName, InternalGiftIdeaAgentInstructions); diff --git a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs index c2a3fd377071..a8e98f2e107e 100644 --- a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs +++ b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs @@ -1,5 +1,4 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; @@ -9,11 +8,13 @@ namespace Agents; public class DeclarativeAgents(ITestOutputHelper output) : BaseAgentsTest(output) { - [InlineData("SchedulingAssistant.json", "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")] + [InlineData( + "SchedulingAssistant.json", + "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")] [Theory] public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileName, string input) { - var kernel = CreateKernel(); + var kernel = this.CreateKernelWithChatCompletion(); kernel.AutoFunctionInvocationFilters.Add(new ExpectedSchemaFunctionFilter()); var manifestLookupDirectory = Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "Resources", "DeclarativeAgents"); var manifestFilePath = Path.Combine(manifestLookupDirectory, agentFileName); @@ -30,9 +31,8 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa Assert.NotNull(agent.Instructions); Assert.NotEmpty(agent.Instructions); - ChatMessageContent message = new(AuthorRole.User, input); - ChatHistory chatHistory = [message]; - StringBuilder sb = new(); + ChatHistory chatHistory = [new ChatMessageContent(AuthorRole.User, input)]; + var kernelArguments = new KernelArguments(new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( @@ -42,23 +42,14 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa } ) }); - await foreach (ChatMessageContent response in agent.InvokeAsync(chatHistory, kernelArguments)) - { - chatHistory.Add(response); - sb.Append(response.Content); - } - Assert.NotEmpty(chatHistory.Skip(1)); - } - private Kernel CreateKernel() - { - IKernelBuilder builder = Kernel.CreateBuilder(); - - base.AddChatCompletionToKernel(builder); - return builder.Build(); + var responses = await agent.InvokeAsync(chatHistory, kernelArguments).ToArrayAsync(); + Assert.NotEmpty(responses); } + private sealed class ExpectedSchemaFunctionFilter : IAutoFunctionInvocationFilter - {//TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live + { + //TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) { await next(context); diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs index 159441147f77..0895308f0215 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs @@ -4,13 +4,14 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; /// /// Demonstrate that two different agent types are able to participate in the same conversation. /// In this case a and participate. /// -public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Agents(ITestOutputHelper output) : BaseAssistantTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -44,16 +45,16 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - OpenAIAssistantAgent agentWriter = - await OpenAIAssistantAgent.CreateAsync( - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = CopyWriterInstructions, - Name = CopyWriterName, - Metadata = AssistantSampleMetadata, - }, - kernel: new Kernel()); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: CopyWriterName, + instructions: CopyWriterInstructions, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient); // Create a chat for agent interaction. AgentGroupChat chat = diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs index 4f12657e0d7a..56ff0f331f0b 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Files; +using OpenAI.Assistants; using Resources; namespace Agents; @@ -12,36 +12,27 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces file output. /// -public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Files(ITestOutputHelper output) : BaseAssistantTest(output) { private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language."; [Fact] public async Task AnalyzeFileAndGenerateReportAsync() { - OpenAIClientProvider provider = this.GetClientProvider(); - - OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - - OpenAIFile uploadFile = - await fileClient.UploadFileAsync( - new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")), - "30-user-context.txt", - FileUploadPurpose.Assistants); - - Console.WriteLine(this.ApiKey); + await using Stream stream = EmbeddedResource.ReadStream("30-user-context.txt")!; + string fileId = await this.Client.UploadAssistantFileAsync(stream, "30-user-context.txt"); // Define the agents - OpenAIAssistantAgent analystAgent = - await OpenAIAssistantAgent.CreateAsync( - provider, - definition: new OpenAIAssistantDefinition(this.Model) - { - EnableCodeInterpreter = true, - CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter - Metadata = AssistantSampleMetadata, - }, - kernel: new Kernel()); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + enableCodeInterpreter: true, + codeInterpreterFileIds: [fileId], + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient); ChatCompletionAgent summaryAgent = new() @@ -66,8 +57,8 @@ Create a tab delimited file report of the ordered (descending) frequency distrib } finally { - await analystAgent.DeleteAsync(); - await fileClient.DeleteFileAsync(uploadFile.Id); + await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id); + await this.Client.DeleteFileAsync(fileId); } // Local function to invoke agent and display the conversation messages. @@ -83,7 +74,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseContentAsync(fileClient, response); + await this.DownloadResponseContentAsync(response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs index 03f047c756bd..158da60e418a 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Files; +using OpenAI.Assistants; namespace Agents; @@ -11,7 +11,7 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces image output. ///
-public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Images(ITestOutputHelper output) : BaseAssistantTest(output) { private const string AnalystName = "Analyst"; private const string AnalystInstructions = "Create charts as requested without explanation."; @@ -22,22 +22,17 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output) [Fact] public async Task AnalyzeDataAndGenerateChartAsync() { - OpenAIClientProvider provider = this.GetClientProvider(); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: AnalystName, + instructions: AnalystInstructions, + enableCodeInterpreter: true, + metadata: SampleMetadata); - OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - - // Define the agents - OpenAIAssistantAgent analystAgent = - await OpenAIAssistantAgent.CreateAsync( - provider, - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = AnalystInstructions, - Name = AnalystName, - EnableCodeInterpreter = true, - Metadata = AssistantSampleMetadata, - }, - kernel: new Kernel()); + // Create the agent + OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient); ChatCompletionAgent summaryAgent = new() @@ -75,7 +70,7 @@ await InvokeAgentAsync( } finally { - await analystAgent.DeleteAsync(); + await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id); } // Local function to invoke agent and display the conversation messages. @@ -91,7 +86,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseImageAsync(fileClient, response); + await this.DownloadResponseImageAsync(response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs index 7c9a2490d3e0..431dcc982a5e 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs @@ -3,13 +3,14 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; /// /// Demonstrate the use of . /// -public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Reset(ITestOutputHelper output) : BaseAssistantTest(output) { private const string AgentInstructions = """ @@ -20,18 +21,15 @@ The user may either provide information or query on information previously provi [Fact] public async Task ResetChatAsync() { - OpenAIClientProvider provider = this.GetClientProvider(); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + instructions: AgentInstructions, + metadata: SampleMetadata); - // Define the agents - OpenAIAssistantAgent assistantAgent = - await OpenAIAssistantAgent.CreateAsync( - provider, - definition: new OpenAIAssistantDefinition(this.Model) - { - Name = nameof(OpenAIAssistantAgent), - Instructions = AgentInstructions, - }, - kernel: new Kernel()); + // Create the agent + OpenAIAssistantAgent assistantAgent = new(assistant, this.AssistantClient); ChatCompletionAgent chatAgent = new() @@ -64,7 +62,7 @@ await OpenAIAssistantAgent.CreateAsync( finally { await chat.ResetAsync(); - await assistantAgent.DeleteAsync(); + await this.AssistantClient.DeleteAssistantAsync(assistantAgent.Id); } // Local function to invoke agent and display the conversation messages. diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs index 27212e292366..4979ceedacb1 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs @@ -4,13 +4,14 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; /// /// Demonstrate the serialization of with a /// and an . /// -public class MixedChat_Serialization(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Serialization(ITestOutputHelper output) : BaseAssistantTest(output) { private const string TranslatorName = "Translator"; private const string TranslatorInstructions = @@ -39,15 +40,16 @@ public async Task SerializeAndRestoreAgentGroupChatAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - OpenAIAssistantAgent agentCounter = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - clientProvider: this.GetClientProvider(), - definition: new(this.Model) - { - Instructions = CounterInstructions, - Name = CounterName, - }); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: CounterName, + instructions: CounterInstructions, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agentCounter = new(assistant, this.AssistantClient); AgentGroupChat chat = CreateGroupChat(); diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs index c9364bc2b2a9..fc28c3c683dd 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs @@ -4,6 +4,7 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; @@ -11,7 +12,7 @@ namespace Agents; /// Demonstrate consuming "streaming" message for and /// both participating in an . ///
-public class MixedChat_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) +public class MixedChat_Streaming(ITestOutputHelper output) : BaseAssistantTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -45,16 +46,16 @@ public async Task UseStreamingAgentChatAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - OpenAIAssistantAgent agentWriter = - await OpenAIAssistantAgent.CreateAsync( - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = CopyWriterInstructions, - Name = CopyWriterName, - Metadata = AssistantSampleMetadata, - }, - kernel: new Kernel()); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: CopyWriterName, + instructions: CopyWriterInstructions, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient); // Create a chat for agent interaction. AgentGroupChat chat = diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs index 83ea083ec674..f23e7ab952b7 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Files; +using OpenAI.Assistants; namespace Agents; @@ -11,30 +11,22 @@ namespace Agents; /// Demonstrate using code-interpreter with to /// produce image content displays the requested charts. ///
-public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output) +public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAssistantTest(output) { - private const string AgentName = "ChartMaker"; - private const string AgentInstructions = "Create charts as requested without explanation."; - [Fact] public async Task GenerateChartWithOpenAIAssistantAgentAsync() { - OpenAIClientProvider provider = this.GetClientProvider(); - - OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + "ChartMaker", + instructions: "Create charts as requested without explanation.", + enableCodeInterpreter: true, + metadata: SampleMetadata); - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - provider, - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = AgentInstructions, - Name = AgentName, - EnableCodeInterpreter = true, - Metadata = AssistantSampleMetadata, - }, - kernel: new()); + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -58,7 +50,7 @@ Sum 426 1622 856 2904 } finally { - await agent.DeleteAsync(); + await this.AssistantClient.DeleteAssistantAsync(agent.Id); } // Local function to invoke agent and display the conversation messages. @@ -71,7 +63,7 @@ async Task InvokeAgentAsync(string input) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseImageAsync(fileClient, response); + await this.DownloadResponseImageAsync(response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs index a0d48bf94eaa..915861ab2a99 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Files; +using OpenAI.Assistants; using Resources; namespace Agents; @@ -11,32 +11,24 @@ namespace Agents; /// /// Demonstrate using code-interpreter to manipulate and generate csv files with . /// -public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output) +public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAssistantTest(output) { [Fact] public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync() { - OpenAIClientProvider provider = this.GetClientProvider(); + await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!; + string fileId = await this.Client.UploadAssistantFileAsync(stream, "sales.csv"); - OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + enableCodeInterpreter: true, + codeInterpreterFileIds: [fileId], + metadata: SampleMetadata); - OpenAIFile uploadFile = - await fileClient.UploadFileAsync( - new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!), - "sales.csv", - FileUploadPurpose.Assistants); - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - provider, - definition: new OpenAIAssistantDefinition(this.Model) - { - EnableCodeInterpreter = true, - CodeInterpreterFileIds = [uploadFile.Id], - Metadata = AssistantSampleMetadata, - }, - kernel: new Kernel()); + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -50,8 +42,8 @@ await OpenAIAssistantAgent.CreateAsync( } finally { - await agent.DeleteAsync(); - await fileClient.DeleteFileAsync(uploadFile.Id); + await this.AssistantClient.DeleteAssistantAsync(agent.Id); + await this.Client.DeleteFileAsync(fileId); } // Local function to invoke agent and display the conversation messages. @@ -64,7 +56,7 @@ async Task InvokeAgentAsync(string input) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseContentAsync(fileClient, response); + await this.DownloadResponseContentAsync(response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs index 1381378a06c8..a1493025b5a4 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs @@ -5,6 +5,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; @@ -13,10 +14,8 @@ namespace Agents; /// filters with /// via . ///
-public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAgentsTest(output) +public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAssistantTest(output) { - protected override bool ForceOpenAI => true; // %%% REMOVE - [Fact] public async Task UseFunctionInvocationFilterAsync() { @@ -80,7 +79,7 @@ private async Task InvokeAssistantAsync(OpenAIAssistantAgent agent) finally { await chat.ResetAsync(); - await agent.DeleteAsync(); + await this.AssistantClient.DeleteAssistantAsync(agent.Id); } } @@ -103,7 +102,7 @@ private async Task InvokeAssistantStreamingAsync(OpenAIAssistantAgent agent) finally { await chat.ResetAsync(); - await agent.DeleteAsync(); + await this.AssistantClient.DeleteAssistantAsync(agent.Id); } } @@ -120,19 +119,19 @@ private void WriteChatHistory(IEnumerable history) private async Task CreateAssistantAsync(Kernel kernel) { - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this.GetClientProvider(), - new OpenAIAssistantDefinition(base.Model) - { - Instructions = "Answer questions about the menu.", - Metadata = AssistantSampleMetadata, - }, - kernel: kernel - ); - + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + instructions: "Answer questions about the menu.", + metadata: SampleMetadata); + + // Create the agent KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - agent.Kernel.Plugins.Add(plugin); + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]) + { + Kernel = kernel + }; return agent; } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs index 39ff0f0fb97c..493b920f0d9d 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs @@ -3,35 +3,31 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Agents; /// /// Demonstrate consuming "streaming" message for . /// -public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) +public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAssistantTest(output) { [Fact] public async Task UseStreamingAssistantAgentAsync() { - const string AgentName = "Parrot"; - const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = AgentInstructions, - Name = AgentName, - EnableCodeInterpreter = true, - Metadata = AssistantSampleMetadata, - }); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: "Parrot", + instructions: "Repeat the user message in the voice of a pirate and then end with a parrot sound.", + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); // Create a thread for the agent conversation. - string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); // Respond to user input await InvokeAgentAsync(agent, threadId, "Fortune favors the bold."); @@ -45,27 +41,20 @@ await OpenAIAssistantAgent.CreateAsync( [Fact] public async Task UseStreamingAssistantAgentWithPluginAsync() { - const string AgentName = "Host"; - const string AgentInstructions = "Answer questions about the menu."; - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = AgentInstructions, - Name = AgentName, - Metadata = AssistantSampleMetadata, - }); - - // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: "Host", + instructions: "Answer questions about the menu.", + metadata: SampleMetadata); + + // Create the agent KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - agent.Kernel.Plugins.Add(plugin); + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]); // Create a thread for the agent conversation. - string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); // Respond to user input await InvokeAgentAsync(agent, threadId, "What is the special soup and its price?"); @@ -78,24 +67,20 @@ await OpenAIAssistantAgent.CreateAsync( [Fact] public async Task UseStreamingAssistantWithCodeInterpreterAsync() { - const string AgentName = "MathGuy"; - const string AgentInstructions = "Solve math problems with code."; - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = AgentInstructions, - Name = AgentName, - EnableCodeInterpreter = true, - Metadata = AssistantSampleMetadata, - }); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + name: "MathGuy", + instructions: "Solve math problems with code.", + enableCodeInterpreter: true, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); // Create a thread for the agent conversation. - string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); // Respond to user input await InvokeAgentAsync(agent, threadId, "Is 191 a prime number?"); diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs index 3937635203a4..3bb5a1d04c46 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs @@ -4,13 +4,14 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Microsoft.SemanticKernel.PromptTemplates.Liquid; +using OpenAI.Assistants; namespace Agents; /// /// Demonstrate parameterized template instruction for . /// -public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAgentsTest(output) +public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAssistantTest(output) { private readonly static (string Input, string? Style)[] s_inputs = [ @@ -23,23 +24,25 @@ private readonly static (string Input, string? Style)[] s_inputs = [Fact] public async Task InvokeAgentWithInstructionsAsync() { - // Instruction based template always proceseed by KernelPromptTemplateFactory - OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( - clientProvider: this.GetClientProvider(), - definition: new OpenAIAssistantDefinition(this.Model) - { - Instructions = - """ - Write a one verse poem on the requested topic in the styles of {{$style}}. - Always state the requested style of the poem. - """, - Metadata = AssistantSampleMetadata - }, - kernel: new Kernel(), - defaultArguments: new KernelArguments() - { - {"style", "haiku"} - }); + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantAsync( + this.Model, + instructions: + """ + Write a one verse poem on the requested topic in the styles of {{$style}}. + Always state the requested style of the poem. + """, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient) + { + Arguments = + { + {"style", "haiku"} + }, + }; await InvokeAssistantAgentWithTemplateAsync(agent); } @@ -52,7 +55,9 @@ await InvokeAssistantAgentWithTemplateAsync( """ Write a one verse poem on the requested topic in the styles of {{$style}}. Always state the requested style of the poem. - """); + """, + PromptTemplateConfig.SemanticKernelTemplateFormat, + new KernelPromptTemplateFactory()); } [Fact] @@ -81,27 +86,30 @@ Always state the requested style of the poem. private async Task InvokeAssistantAgentWithTemplateAsync( string instructionTemplate, - string? templateFormat = null, - IPromptTemplateFactory? templateFactory = null) + string templateFormat, + IPromptTemplateFactory templateFactory) { - // Define the agent - OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateFromTemplateAsync( - clientProvider: this.GetClientProvider(), - capabilities: new OpenAIAssistantCapabilities(this.Model) - { - Metadata = AssistantSampleMetadata - }, - kernel: new Kernel(), - defaultArguments: new KernelArguments() - { - {"style", "haiku"} - }, - templateConfig: new PromptTemplateConfig - { - Template = instructionTemplate, - TemplateFormat = templateFormat, - }, - templateFactory); + PromptTemplateConfig config = new() + { + Template = instructionTemplate, + TemplateFormat = templateFormat, + }; + + // Define the assistant + Assistant assistant = + await this.AssistantClient.CreateAssistantFromTemplateAsync( + this.Model, + config, + metadata: SampleMetadata); + + // Create the agent + OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, plugins: null, templateFactory, templateFormat) + { + Arguments = + { + {"style", "haiku"} + }, + }; await InvokeAssistantAgentWithTemplateAsync(agent); } @@ -109,7 +117,7 @@ private async Task InvokeAssistantAgentWithTemplateAsync( private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent agent) { // Create a thread for the agent conversation. - string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); try { @@ -135,8 +143,8 @@ private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent ag } finally { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); + await this.AssistantClient.DeleteThreadAsync(threadId); + await this.AssistantClient.DeleteAssistantAsync(agent.Id); } } } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs index e42600419a88..2763bb6101b0 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs @@ -8,7 +8,11 @@ namespace ChatCompletion; -// The following example shows how to use Semantic Kernel with Azure AI Inference / Azure AI Studio +/// +/// These examples demonstrate different ways of using chat completion with Azure Foundry or GitHub models. +/// Azure AI Foundry: https://ai.azure.com/explore/models +/// GitHub Models: https://github.com/marketplace?type=models +/// public class AzureAIInference_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { [Fact] diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs index f7dbe9191167..8b164439f9e2 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs @@ -9,7 +9,9 @@ namespace ChatCompletion; /// -/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service. +/// These examples demonstrate different ways of using streaming chat completion with Azure Foundry or GitHub models. +/// Azure AI Foundry: https://ai.azure.com/explore/models +/// GitHub Models: https://github.com/marketplace?type=models /// public class AzureAIInference_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -120,36 +122,6 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } - /// - /// Streams the message output from the chat completion service. - /// - /// The chat completion service instance. - /// The chat history instance. - /// The author role. - private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) - { - bool roleWritten = false; - string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (!roleWritten && chatUpdate.Role.HasValue) - { - Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); - roleWritten = true; - } - - if (chatUpdate.Content is { Length: > 0 }) - { - fullMessage += chatUpdate.Content; - Console.Write(chatUpdate.Content); - } - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); - } - /// /// Outputs the chat history by streaming the message output from the kernel. /// diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs index c27625437779..f3a52b5c5428 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs @@ -8,27 +8,46 @@ namespace ChatCompletion; -// The following example shows how to use Semantic Kernel with Azure OpenAI API +/// +/// These examples demonstrate different ways of using chat completion with Azure OpenAI API. +/// public class AzureOpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { + /// + /// Sample showing how to use with chat completion and chat prompt syntax. + /// [Fact] public async Task ChatPromptAsync() { - Assert.NotNull(TestConfiguration.Ollama.ModelId); + Console.WriteLine("======== Azure Open AI - Chat Completion ========"); + + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions """); - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion( + var kernelBuilder = Kernel.CreateBuilder(); + if (string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.ApiKey)) + { + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + credentials: new DefaultAzureCredential(), + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + } + else + { + kernelBuilder.AddAzureOpenAIChatCompletion( deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, endpoint: TestConfiguration.AzureOpenAI.Endpoint, apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - .Build(); + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + } + var kernel = kernelBuilder.Build(); var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); chatPrompt.AppendLine($""); @@ -39,40 +58,30 @@ public async Task ChatPromptAsync() Console.WriteLine(reply); } - [Fact] - public async Task ServicePromptAsync() - { - Console.WriteLine("======== Azure Open AI - Chat Completion ========"); - - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - - await StartChatAsync(chatCompletionService); - } - /// - /// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential. - /// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate. + /// Sample showing how to use directly with a . /// [Fact] - public async Task DefaultAzureCredentialSampleAsync() + public async Task ServicePromptAsync() { - Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========"); + Console.WriteLine("======== Azure Open AI - Chat Completion ========"); - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - credentials: new DefaultAzureCredential(), - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); - await StartChatAsync(chatCompletionService); - } + AzureOpenAIChatCompletionService chatCompletionService = + string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.ApiKey) + ? new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + credentials: new DefaultAzureCredential(), + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + : new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - private async Task StartChatAsync(IChatCompletionService chatGPT) - { Console.WriteLine("Chat content:"); Console.WriteLine("------------------------"); @@ -83,7 +92,7 @@ private async Task StartChatAsync(IChatCompletionService chatGPT) OutputLastMessage(chatHistory); // First assistant message - var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); OutputLastMessage(chatHistory); @@ -92,7 +101,7 @@ private async Task StartChatAsync(IChatCompletionService chatGPT) OutputLastMessage(chatHistory); // Second assistant message - reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); OutputLastMessage(chatHistory); } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs index 1ef3647623aa..29dfe10d6bd1 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs @@ -8,7 +8,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate the ways different content types are streamed by Azure OpenAI via the chat completion service. +/// These examples demonstrate different ways of using streaming chat completion with Azure OpenAI API. /// public class AzureOpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -128,28 +128,4 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion // Second assistant message await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } - - private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) - { - bool roleWritten = false; - string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (!roleWritten && chatUpdate.Role.HasValue) - { - Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); - roleWritten = true; - } - - if (chatUpdate.Content is { Length: > 0 }) - { - fullMessage += chatUpdate.Content; - Console.Write(chatUpdate.Content); - } - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); - } } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs new file mode 100644 index 000000000000..cc9660c4cfa2 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using OpenAI.Chat; + +namespace ChatCompletion; + +/// +/// These examples demonstrate different ways of using chat completion reasoning models with Azure OpenAI API. +/// +public class AzureOpenAI_ChatCompletionWithReasoning(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Sample showing how to use with chat completion and chat prompt syntax. + /// + [Fact] + public async Task ChatPromptWithReasoningAsync() + { + Console.WriteLine("======== Azure Open AI - Chat Completion with Reasoning ========"); + + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + // Create execution settings with high reasoning effort. + var executionSettings = new AzureOpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings + { + // Flags Azure SDK to use the new token property. + SetNewMaxCompletionTokensEnabled = true, + MaxTokens = 2000, + // Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) + ReasoningEffort = ChatReasoningEffortLevel.Low + }; + + // Create KernelArguments using the execution settings. + var kernelArgs = new KernelArguments(executionSettings); + + StringBuilder chatPrompt = new(""" + You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework + Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop . + """); + + // Invoke the prompt with high reasoning effort. + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); + + Console.WriteLine(reply); + } + + /// + /// Sample showing how to use directly with a . + /// + [Fact] + public async Task ServicePromptWithReasoningAsync() + { + Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential with Reasoning ========"); + + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); + + IChatCompletionService chatCompletionService = new AzureOpenAIChatCompletionService( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + // Create execution settings with high reasoning effort. + var executionSettings = new AzureOpenAIPromptExecutionSettings + { + // Flags Azure SDK to use the new token property. + SetNewMaxCompletionTokensEnabled = true, + MaxTokens = 2000, + // Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) + ReasoningEffort = ChatReasoningEffortLevel.Low + }; + + // Create a ChatHistory and add messages. + var chatHistory = new ChatHistory(); + chatHistory.AddDeveloperMessage( + "You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); + chatHistory.AddUserMessage( + "Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); + + // Instead of a prompt string, call GetChatMessageContentAsync with the chat history. + var reply = await chatCompletionService.GetChatMessageContentAsync( + chatHistory: chatHistory, + executionSettings: executionSettings); + + Console.WriteLine(reply); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs index eafae661111b..a76a954c1bfa 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs @@ -5,27 +5,34 @@ using Azure.AI.OpenAI; using Microsoft.SemanticKernel; +#pragma warning disable CA5399 // HttpClient is created without enabling CheckCertificateRevocationList + namespace ChatCompletion; +/// +/// This example shows a way of using a Custom HttpClient and HttpHandler with Azure OpenAI Connector to capture +/// the request Uri and Headers for each request. +/// public sealed class AzureOpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) { [Fact] - public async Task RunAsync() + public async Task UsingCustomHttpClientWithAzureOpenAI() { - Console.WriteLine("======== Using a custom AzureOpenAI client ========"); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); + Console.WriteLine($"======== Azure Open AI - {nameof(UsingCustomHttpClientWithAzureOpenAI)} ========"); + // Create an HttpClient and include your custom header(s) - var httpClient = new HttpClient(); - httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + using var myCustomHttpHandler = new MyCustomClientHttpHandler(Output); + using var myCustomClient = new HttpClient(handler: myCustomHttpHandler); + myCustomClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); // Configure AzureOpenAIClient to use the customized HttpClient var clientOptions = new AzureOpenAIClientOptions { - Transport = new HttpClientPipelineTransport(httpClient), + Transport = new HttpClientPipelineTransport(myCustomClient), NetworkTimeout = TimeSpan.FromSeconds(30), RetryPolicy = new ClientRetryPolicy() }; @@ -48,6 +55,27 @@ public async Task RunAsync() ); Console.WriteLine(result.GetValue()); - httpClient.Dispose(); + myCustomClient.Dispose(); + } + + /// + /// Normally you would use a custom HttpClientHandler to add custom logic to your custom http client + /// This uses the ITestOutputHelper to write the requested URI to the test output + /// + /// The to write the requested URI to the test output + private sealed class MyCustomClientHttpHandler(ITestOutputHelper output) : HttpClientHandler + { + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + output.WriteLine($"Requested URI: {request.RequestUri}"); + + request.Headers.Where(h => h.Key != "Authorization") + .ToList() + .ForEach(h => output.WriteLine($"{h.Key}: {string.Join(", ", h.Value)}")); + output.WriteLine("--------------------------------"); + + // Add custom logic here + return await base.SendAsync(request, cancellationToken); + } } } diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs index a386c7631e4e..7f41756970cc 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs @@ -19,7 +19,7 @@ internal static class ChatHistoryExtensions /// /// For simplicity only a single system message is supported in these examples. /// - internal static ChatMessageContent? GetSystemMessage(this ChatHistory chatHistory) + internal static ChatMessageContent? GetSystemMessage(this IReadOnlyList chatHistory) { return chatHistory.FirstOrDefault(m => m.Role == AuthorRole.System); } @@ -34,7 +34,9 @@ internal static class ChatHistoryExtensions /// An optional summary messageContent to include /// An optional message filter public static IEnumerable Extract( - this ChatHistory chatHistory, int startIndex, int? endIndex = null, + this IReadOnlyList chatHistory, + int startIndex, + int? endIndex = null, ChatMessageContent? systemMessage = null, ChatMessageContent? summaryMessage = null, Func? messageFilter = null) @@ -71,11 +73,11 @@ public static IEnumerable Extract( /// /// Compute the index truncation where truncation should begin using the current truncation threshold. /// - /// ChatHistory instance to be truncated - /// - /// + /// The source history. + /// Truncated size. + /// Truncation threshold. /// Flag indicating whether or not the chat history contains a system messageContent - public static int ComputeTruncationIndex(this ChatHistory chatHistory, int truncatedSize, int truncationThreshold, bool hasSystemMessage) + public static int ComputeTruncationIndex(this IReadOnlyList chatHistory, int truncatedSize, int truncationThreshold, bool hasSystemMessage) { if (chatHistory.Count <= truncationThreshold) { diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs similarity index 81% rename from dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs rename to dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs index b8a9dd27da36..91dfa97e242c 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs @@ -11,15 +11,15 @@ namespace ChatCompletion; /// /// This reducer requires that the ChatMessageContent.MetaData contains a TokenCount property. /// -public sealed class MaxTokensChatHistoryReducer : IChatHistoryReducer +public sealed class ChatHistoryMaxTokensReducer : IChatHistoryReducer { private readonly int _maxTokenCount; /// - /// Creates a new instance of . + /// Creates a new instance of . /// /// Max token count to send to the model. - public MaxTokensChatHistoryReducer(int maxTokenCount) + public ChatHistoryMaxTokensReducer(int maxTokenCount) { if (maxTokenCount <= 0) { @@ -30,7 +30,7 @@ public MaxTokensChatHistoryReducer(int maxTokenCount) } /// - public Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) + public Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default) { var systemMessage = chatHistory.GetSystemMessage(); @@ -47,12 +47,13 @@ public MaxTokensChatHistoryReducer(int maxTokenCount) } #region private + /// /// Compute the index truncation where truncation should begin using the current truncation threshold. /// - /// ChatHistory instance to be truncated + /// Chat history to be truncated. /// The system message - private int ComputeTruncationIndex(ChatHistory chatHistory, ChatMessageContent? systemMessage) + private int ComputeTruncationIndex(IReadOnlyList chatHistory, ChatMessageContent? systemMessage) { var truncationIndex = -1; @@ -83,5 +84,6 @@ private int ComputeTruncationIndex(ChatHistory chatHistory, ChatMessageContent? return truncationIndex; } + #endregion } diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs index 53b4a6079283..2c1c4258aadc 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs @@ -11,31 +11,6 @@ namespace ChatCompletion; ///
public class ChatHistoryReducerTests(ITestOutputHelper output) : BaseTest(output) { - [Theory] - [InlineData(3, null, null, 5, 0)] - [InlineData(2, null, null, 1, 1)] - [InlineData(2, "SystemMessage", null, 2, 2)] - [InlineData(10, null, null, 3, 3)] - [InlineData(10, "SystemMessage", null, 3, 3)] - [InlineData(9, null, null, 5, 5)] - [InlineData(11, null, null, 5, 5)] - [InlineData(8, "SystemMessage", null, 5, 5)] - [InlineData(10, "SystemMessage", null, 5, 5)] - [InlineData(3, null, new int[] { 0 }, 3, 2)] - [InlineData(3, "SystemMessage", new int[] { 0 }, 4, 3)] - public async Task VerifyTruncatingChatHistoryReducerAsync(int messageCount, string? systemMessage, int[]? functionCallIndexes, int truncatedSize, int expectedSize) - { - // Arrange - var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes); - var reducer = new TruncatingChatHistoryReducer(truncatedSize); - - // Act - var reducedHistory = await reducer.ReduceAsync(chatHistory); - - // Assert - VerifyReducedHistory(reducedHistory, ComputeExpectedMessages(chatHistory, expectedSize)); - } - [Theory] [InlineData(3, null, null, 100, 0)] [InlineData(3, "SystemMessage", null, 100, 0)] @@ -47,30 +22,7 @@ public async Task VerifyMaxTokensChatHistoryReducerAsync(int messageCount, strin { // Arrange var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes, true); - var reducer = new MaxTokensChatHistoryReducer(maxTokens); - - // Act - var reducedHistory = await reducer.ReduceAsync(chatHistory); - - // Assert - VerifyReducedHistory(reducedHistory, ComputeExpectedMessages(chatHistory, expectedSize)); - } - - [Theory] - [InlineData(3, null, null, 5, 10, 0)] - [InlineData(10, null, null, 5, 10, 6)] - [InlineData(10, "SystemMessage", null, 5, 10, 6)] - [InlineData(10, null, new int[] { 1 }, 5, 10, 6)] - [InlineData(10, "SystemMessage", new int[] { 2 }, 5, 10, 6)] - public async Task VerifySummarizingChatHistoryReducerAsync(int messageCount, string? systemMessage, int[]? functionCallIndexes, int truncatedSize, int truncationThreshold, int expectedSize) - { - // Arrange - Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); - Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - IChatCompletionService chatClient = new FakeChatCompletionService("The dialog consists of repetitive interaction where both the user and assistant exchange identical phrases in Latin."); - - var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes, true); - var reducer = new SummarizingChatHistoryReducer(chatClient, truncatedSize, truncationThreshold); + var reducer = new ChatHistoryMaxTokensReducer(maxTokens); // Act var reducedHistory = await reducer.ReduceAsync(chatHistory); diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs deleted file mode 100644 index ff8b3ef0a56a..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace ChatCompletion; - -/// -/// Interface for reducing the chat history before sending it to the chat completion provider. -/// -public interface IChatHistoryReducer -{ - /// - /// Reduce the before sending it to the . - /// - /// Instance of to be reduced. - /// Cancellation token. - /// An optional which contains the reduced chat messages or null if chat history can be used as is. - Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken); -} diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs deleted file mode 100644 index 153e2b50d182..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace ChatCompletion; - -/// -/// Implementation of which trim to the last N messages and summarizes the remainder. -/// -public sealed class SummarizingChatHistoryReducer : IChatHistoryReducer -{ - private readonly IChatCompletionService _chatClient; - private readonly int _truncatedSize; - private readonly int _summarizationThreshold; - private readonly string _summarizationPrompt; - private readonly Kernel _kernel; - - /// - /// The default summarization system instructions. - /// - private const string DefaultSummarizationPrompt = - """ - Provide a concise and complete summarization of the entire dialog that does not exceed 5 sentences - - This summary must always: - - Consider both user and assistant interactions - - Maintain continuity for the purpose of further dialog - - Include details from any existing summary - - Focus on the most significant aspects of the dialog - - This summary must never: - - Critique, correct, interpret, presume, or assume - - Identify faults, mistakes, misunderstanding, or correctness - - Analyze what has not occurred - - Exclude details from any existing summary - """; - - /// - /// Metadata key to indicate a summary message. - /// - private const string SummaryMetadataKey = "__summary__"; - - /// - /// Creates a new instance of . - /// - /// Instance of to use for summarization - /// The truncated size of the chat history after summarization is triggered - /// The threshold at which to trigger summarization - /// An optional prompt to use when summarizing the content - public SummarizingChatHistoryReducer(IChatCompletionService chatClient, int truncatedSize, int summarizationThreshold, string? summarizationPrompt = null) - { - if (chatClient is null) - { - throw new ArgumentException("Chat completion service must be specified.", nameof(chatClient)); - } - if (truncatedSize <= 0) - { - throw new ArgumentException("Truncated size must be greater than zero.", nameof(truncatedSize)); - } - if (summarizationThreshold < truncatedSize) - { - throw new ArgumentException($"Summarization threshold must be greater than truncatedSize: {truncatedSize}.", nameof(summarizationPrompt)); - } - - this._chatClient = chatClient; - this._truncatedSize = truncatedSize; - this._summarizationThreshold = summarizationThreshold; - this._summarizationPrompt = summarizationPrompt ?? DefaultSummarizationPrompt; - - var builder = Kernel.CreateBuilder(); - builder.Services.AddTransient((sp) => chatClient); - this._kernel = builder.Build(); - } - - /// - public async Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) - { - // index of the last summary message - int lastIndex = chatHistory - .Select((value, index) => new { value, index }) - .LastOrDefault(message => message.value.Metadata?.ContainsKey(SummaryMetadataKey) ?? false) - ?.index ?? -1; - - var systemMessage = chatHistory.GetSystemMessage(); - var hasSystemMessage = systemMessage is not null; - - // check are there messages to be summarized - var startIndex = -1; - var endIndex = chatHistory.Count - this._truncatedSize; - if (lastIndex == -1) - { - // have never summarized so use chat history size - if (chatHistory.Count < this._summarizationThreshold) - { - return null; - } - startIndex = 0 + (hasSystemMessage ? 1 : 0); - } - else - { - // have summarized so use chat history size minus position of last summary - if (chatHistory.Count - lastIndex < this._summarizationThreshold) - { - return null; - } - startIndex = lastIndex; - } - - var summaryMessage = await this.SummarizeAsync(chatHistory, startIndex, endIndex, cancellationToken); - - // insert summary into the original chat history - chatHistory.Insert(endIndex + 1, summaryMessage); - - IEnumerable? truncatedHistory = chatHistory.Extract(endIndex + 2, systemMessage: systemMessage, summaryMessage: summaryMessage); - return truncatedHistory; - } - - #region private - /// - /// Summarize messages starting at the truncation index. - /// - private async Task SummarizeAsync(ChatHistory chatHistory, int startIndex, int endIndex, CancellationToken cancellationToken) - { - // extract history for summarization - IEnumerable messagesToSummarize = - chatHistory.Extract(startIndex, endIndex: endIndex, - messageFilter: (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); - - // summarize the chat history - var summarizationRequest = new ChatHistory(this._summarizationPrompt); - summarizationRequest.AddRange(messagesToSummarize); - ChatMessageContent summaryContent = await this._chatClient.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false); - summaryContent.Metadata = new Dictionary { { SummaryMetadataKey, true } }; - - return summaryContent; - } - #endregion -} diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs deleted file mode 100644 index 48dce62da8c4..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace ChatCompletion; - -/// -/// Implementation of which truncates chat history to the provide truncated size. -/// -/// -/// The truncation process is triggered when the list length is great than the truncated size. -/// -public sealed class TruncatingChatHistoryReducer : IChatHistoryReducer -{ - private readonly int _truncatedSize; - - /// - /// Creates a new instance of . - /// - /// The size of the chat history after truncation. - public TruncatingChatHistoryReducer(int truncatedSize) - { - if (truncatedSize <= 0) - { - throw new ArgumentException("Truncated size must be greater than zero.", nameof(truncatedSize)); - } - - this._truncatedSize = truncatedSize; - } - - /// - public Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) - { - var systemMessage = chatHistory.GetSystemMessage(); - var truncationIndex = ComputeTruncationIndex(chatHistory, this._truncatedSize, systemMessage is not null); - - IEnumerable? truncatedHistory = null; - - if (truncationIndex > 0) - { - truncatedHistory = chatHistory.Extract(truncationIndex, systemMessage: systemMessage); - } - - return Task.FromResult?>(truncatedHistory); - } - - #region private - - /// - /// Compute the index truncation where truncation should begin using the current truncation threshold. - /// - private static int ComputeTruncationIndex(ChatHistory chatHistory, int truncatedSize, bool hasSystemMessage) - { - truncatedSize -= hasSystemMessage ? 1 : 0; - if (chatHistory.Count <= truncatedSize) - { - return -1; - } - - // Compute the index of truncation target - var truncationIndex = chatHistory.Count - truncatedSize; - - // Skip function related content - while (truncationIndex < chatHistory.Count) - { - if (chatHistory[truncationIndex].Items.Any(i => i is FunctionCallContent or FunctionResultContent)) - { - truncationIndex++; - } - else - { - break; - } - } - - return truncationIndex; - } - #endregion -} diff --git a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs new file mode 100644 index 000000000000..4cb1c57f60e4 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.HuggingFace; + +namespace ChatCompletion; + +/// +/// This example shows a way of using Hugging Face connector with HuggingFace Text Generation Inference (TGI) API. +/// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. +/// +/// Install HuggingFace TGI via docker +/// docker run -d --gpus all --shm-size 1g -p 8080:80 -v "c:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:latest --model-id teknium/OpenHermes-2.5-Mistral-7B +/// Run the examples +/// +/// +public class HuggingFace_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). + /// + [Fact] +#pragma warning restore CS0419 // Ambiguous reference in cref attribute + public async Task UsingKernelNonStreamingWithHuggingFace() + { + Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingKernelNonStreamingWithHuggingFace)} ========"); + + var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) + var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. + + var kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: modelId, + apiKey: null, + endpoint: endpoint) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new HuggingFacePromptExecutionSettings + { + TopP = 0.5f, + MaxTokens = 1000, + }); + + var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); + Console.WriteLine(response); + } + + /// + /// Sample showing how to use directly with a . + /// + [Fact] + public async Task UsingServiceNonStreamingWithHuggingFace() + { + Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingServiceNonStreamingWithHuggingFace)} ========"); + + // HuggingFace local HTTP server endpoint + var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) + var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. + + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: modelId, + endpoint: endpoint) + .Build(); + + var chatService = kernel.GetRequiredService(); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..d508cb64060d --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.HuggingFace; + +namespace ChatCompletion; + +/// +/// This example shows a way of using Hugging Face connector with HuggingFace Text Generation Inference (TGI) API. +/// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. +/// +/// Install HuggingFace TGI via docker +/// docker run -d --gpus all --shm-size 1g -p 8080:80 -v "c:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:latest --model-id teknium/OpenHermes-2.5-Mistral-7B +/// Run the examples +/// +/// +public class HuggingFace_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Sample showing how to use directly with a . + /// + [Fact] + public async Task UsingServiceStreamingWithHuggingFace() + { + Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingServiceStreamingWithHuggingFace)} ========"); + + // HuggingFace local HTTP server endpoint + var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) + var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. + + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: modelId, + endpoint: endpoint) + .Build(); + + var chatService = kernel.GetRequiredService(); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); + } + + /// + /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). + /// + [Fact] + public async Task UsingKernelStreamingWithHuggingFace() + { + Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingKernelStreamingWithHuggingFace)} ========"); + + var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) + var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. + + var kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: modelId, + apiKey: null, + endpoint: endpoint) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new HuggingFacePromptExecutionSettings + { + TopP = 0.5f, + MaxTokens = 1000, + }); + + await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) + { + Console.WriteLine(word); + } + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs b/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs new file mode 100644 index 000000000000..d1fa8baa257f --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs @@ -0,0 +1,279 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Net; +using System.Runtime.CompilerServices; +using Azure.AI.OpenAI; +using Azure.Identity; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +namespace ChatCompletion; + +/// +/// This example demonstrates how an AI application can use code to attempt inference with the first available chat client in the list, falling back to the next client if the previous one fails. +/// The class handles all the fallback complexities, abstracting them away from the application code. +/// Since the class implements the interface, the chat client used for inference the application can be easily replaced with the . +/// +/// +/// The class is useful when an application utilizes multiple models and needs to switch between them based on the situation. +/// For example, the application may use a cloud-based model by default and seamlessly fall back to a local model when the cloud model is unavailable (e.g., in offline mode), and vice versa. +/// Additionally, the application can enhance resilience by employing several cloud models, falling back to the next one if the previous model fails. +/// +public class HybridCompletion_Fallback(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example demonstrates how to perform completion using the , which falls back to an available model when the primary model is unavailable. + /// + [Fact] + public async Task FallbackToAvailableModelAsync() + { + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + + // Create and register an unavailable chat client that fails with 503 Service Unavailable HTTP status code + kernelBuilder.Services.AddSingleton(CreateUnavailableOpenAIChatClient()); + + // Create and register a cloud available chat client + kernelBuilder.Services.AddSingleton(CreateAzureOpenAIChatClient()); + + // Create and register fallback chat client that will fallback to the available chat client when unavailable chat client fails + kernelBuilder.Services.AddSingleton((sp) => + { + IEnumerable chatClients = sp.GetServices(); + + return new FallbackChatClient(chatClients.ToList()).AsChatCompletionService(); + }); + + Kernel kernel = kernelBuilder.Build(); + kernel.ImportPluginFromFunctions("Weather", [KernelFunctionFactory.CreateFromMethod(() => "It's sunny", "GetWeather")]); + + AzureOpenAIPromptExecutionSettings settings = new() + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + + FunctionResult result = await kernel.InvokePromptAsync("Do I need an umbrella?", new(settings)); + + Output.WriteLine(result); + } + + /// + /// This example demonstrates how to perform streaming completion using the , which falls back to an available model when the primary model is unavailable. + /// + [Fact] + public async Task FallbackToAvailableModelStreamingAsync() + { + // Create an unavailable chat client that fails with 503 Service Unavailable HTTP status code + IChatClient unavailableChatClient = CreateUnavailableOpenAIChatClient(); + + // Create a cloud available chat client + IChatClient availableChatClient = CreateAzureOpenAIChatClient(); + + // Create a fallback chat client that will fallback to the available chat client when unavailable chat client fails + IChatCompletionService fallbackCompletionService = new FallbackChatClient([unavailableChatClient, availableChatClient]).AsChatCompletionService(); + + Kernel kernel = new(); + kernel.ImportPluginFromFunctions("Weather", [KernelFunctionFactory.CreateFromMethod(() => "It's sunny", "GetWeather")]); + + AzureOpenAIPromptExecutionSettings settings = new() + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + + IAsyncEnumerable result = fallbackCompletionService.GetStreamingChatMessageContentsAsync("Do I need an umbrella?", settings, kernel); + + await foreach (var update in result) + { + Output.WriteLine(update); + } + } + + private static IChatClient CreateUnavailableOpenAIChatClient() + { + AzureOpenAIClientOptions options = new() + { + Transport = new HttpClientPipelineTransport( + new HttpClient + ( + new StubHandler(new HttpClientHandler(), async (response) => { response.StatusCode = System.Net.HttpStatusCode.ServiceUnavailable; }) + ) + ) + }; + + IChatClient openAiClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new AzureCliCredential(), options).AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName); + + return new ChatClientBuilder(openAiClient) + .UseFunctionInvocation() + .Build(); + } + + private static IChatClient CreateAzureOpenAIChatClient() + { + IChatClient chatClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new AzureCliCredential()).AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName); + + return new ChatClientBuilder(chatClient) + .UseFunctionInvocation() + .Build(); + } + + protected sealed class StubHandler(HttpMessageHandler innerHandler, Func handler) : DelegatingHandler(innerHandler) + { + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var result = await base.SendAsync(request, cancellationToken); + + await handler(result); + + return result; + } + } +} + +/// +/// Represents a chat client that performs inference using the first available chat client in the list, falling back to the next one if the previous client fails. +/// +internal sealed class FallbackChatClient : IChatClient +{ + private readonly IList _chatClients; + private static readonly List s_defaultFallbackStatusCodes = new() + { + HttpStatusCode.InternalServerError, + HttpStatusCode.NotImplemented, + HttpStatusCode.BadGateway, + HttpStatusCode.ServiceUnavailable, + HttpStatusCode.GatewayTimeout + }; + + /// + /// Initializes a new instance of the class. + /// + /// The chat clients to fallback to. + public FallbackChatClient(IList chatClients) + { + this._chatClients = chatClients?.Any() == true ? chatClients : throw new ArgumentException("At least one chat client must be provided.", nameof(chatClients)); + } + + /// + /// Gets or sets the HTTP status codes that will trigger the fallback to the next chat client. + /// + public List? FallbackStatusCodes { get; set; } + + /// + public ChatClientMetadata Metadata => new(); + + /// + public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + { + for (int i = 0; i < this._chatClients.Count; i++) + { + var chatClient = this._chatClients.ElementAt(i); + + try + { + return await chatClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + if (this.ShouldFallbackToNextClient(ex, i, this._chatClients.Count)) + { + continue; + } + + throw; + } + } + + // If all clients fail, throw an exception or return a default value + throw new InvalidOperationException("Neither of the chat clients could complete the inference."); + } + + /// + public async IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + for (int i = 0; i < this._chatClients.Count; i++) + { + var chatClient = this._chatClients.ElementAt(i); + + IAsyncEnumerable completionStream = chatClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken); + + ConfiguredCancelableAsyncEnumerable.Enumerator enumerator = completionStream.ConfigureAwait(false).GetAsyncEnumerator(); + + try + { + try + { + // Move to the first update to reveal any exceptions. + if (!await enumerator.MoveNextAsync()) + { + yield break; + } + } + catch (Exception ex) + { + if (this.ShouldFallbackToNextClient(ex, i, this._chatClients.Count)) + { + continue; + } + + throw; + } + + // Yield the first update. + yield return enumerator.Current; + + // Yield the rest of the updates. + while (await enumerator.MoveNextAsync()) + { + yield return enumerator.Current; + } + + // The stream has ended so break the while loop. + break; + } + finally + { + await enumerator.DisposeAsync(); + } + } + } + + private bool ShouldFallbackToNextClient(Exception ex, int clientIndex, int numberOfClients) + { + // If the exception is thrown by the last client then don't fallback. + if (clientIndex == numberOfClients - 1) + { + return false; + } + + HttpStatusCode? statusCode = ex switch + { + HttpOperationException operationException => operationException.StatusCode, + HttpRequestException httpRequestException => httpRequestException.StatusCode, + ClientResultException clientResultException => (HttpStatusCode?)clientResultException.Status, + _ => throw new InvalidOperationException($"Unsupported exception type: {ex.GetType()}."), + }; + + if (statusCode is null) + { + throw new InvalidOperationException("The exception does not contain an HTTP status code."); + } + + return (this.FallbackStatusCodes ?? s_defaultFallbackStatusCodes).Contains(statusCode!.Value); + } + + /// + public void Dispose() + { + // We don't own the chat clients so we don't dispose them. + } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + return null; + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs new file mode 100644 index 000000000000..97562f75c847 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace ChatCompletion; + +/// +/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion API standard from OpenAI. +/// +/// Install LMStudio Platform in your environment (As of now: 0.3.10) +/// Open LM Studio +/// Search and Download Llama2 model or any other +/// Update the modelId parameter with the model llm name loaded (i.e: llama-2-7b-chat) +/// Start the Local Server on http://localhost:1234 +/// Run the examples +/// +/// +public class LMStudio_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). + /// + [Fact] +#pragma warning restore CS0419 // Ambiguous reference in cref attribute + public async Task UsingKernelStreamingWithLMStudio() + { + Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingKernelStreamingWithLMStudio)} ========"); + + var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. + var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: modelId, + apiKey: null, + endpoint: endpoint) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings + { + TopP = 0.5, + MaxTokens = 1000, + }); + + var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); + Console.WriteLine(response); + } + + /// + /// Sample showing how to use directly with a . + /// + [Fact] + public async Task UsingServiceNonStreamingWithLMStudio() + { + Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingServiceNonStreamingWithLMStudio)} ========"); + + var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. + var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. + + OpenAIChatCompletionService chatService = new(modelId: modelId, apiKey: null, endpoint: endpoint); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..8ac827d41120 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace ChatCompletion; + +/// +/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion API standard from OpenAI. +/// +/// Install LMStudio Platform in your environment (As of now: 0.3.10) +/// Open LM Studio +/// Search and Download Llama2 model or any other +/// Update the modelId parameter with the model llm name loaded (i.e: llama-2-7b-chat) +/// Start the Local Server on http://localhost:1234 +/// Run the examples +/// +/// +public class LMStudio_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Sample showing how to use streaming directly with a . + /// + [Fact] + public async Task UsingServiceStreamingWithLMStudio() + { + Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingServiceStreamingWithLMStudio)} ========"); + + var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. + var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: modelId, + apiKey: null, + endpoint: endpoint) + .Build(); + + OpenAIChatCompletionService chatCompletionService = new(modelId: modelId, apiKey: null, endpoint: endpoint); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + /// + /// This example shows how to setup LMStudio to use with the Kernel InvokeAsync (Streaming). + /// + [Fact] + public async Task UsingKernelStreamingWithLMStudio() + { + Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingKernelStreamingWithLMStudio)} ========"); + + var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. + var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: modelId, + apiKey: null, + endpoint: endpoint) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings + { + TopP = 0.5, + MaxTokens = 1000, + }); + + await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) + { + Console.WriteLine(word); + } + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs index 720ee8bff60e..e3ee157b35b1 100644 --- a/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs +++ b/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs @@ -62,7 +62,7 @@ public async Task ShowHowToReduceChatHistoryToLastMessageAsync() apiKey: TestConfiguration.OpenAI.ApiKey); var truncatedSize = 2; // keep system message and last user message only - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new TruncatingChatHistoryReducer(truncatedSize)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryTruncationReducer(truncatedSize)); var chatHistory = new ChatHistory("You are a librarian and expert on books about cities"); @@ -105,7 +105,7 @@ public async Task ShowHowToReduceChatHistoryToLastMessageStreamingAsync() apiKey: TestConfiguration.OpenAI.ApiKey); var truncatedSize = 2; // keep system message and last user message only - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new TruncatingChatHistoryReducer(truncatedSize)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryTruncationReducer(truncatedSize)); var chatHistory = new ChatHistory("You are a librarian and expert on books about cities"); @@ -151,7 +151,7 @@ public async Task ShowHowToReduceChatHistoryToMaxTokensAsync() OpenAIChatCompletionService openAiChatService = new( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey); - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new MaxTokensChatHistoryReducer(100)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryMaxTokensReducer(100)); var chatHistory = new ChatHistory(); chatHistory.AddSystemMessageWithTokenCount("You are an expert on the best restaurants in the world. Keep responses short."); @@ -194,7 +194,7 @@ public async Task ShowHowToReduceChatHistoryWithSummarizationAsync() OpenAIChatCompletionService openAiChatService = new( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey); - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new SummarizingChatHistoryReducer(openAiChatService, 2, 4)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistorySummarizationReducer(openAiChatService, 2, 4)); var chatHistory = new ChatHistory("You are an expert on the best restaurants in every city. Answer for the city the user has asked about."); diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs index 79b72003ee89..307edbe4b229 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs @@ -8,7 +8,9 @@ namespace ChatCompletion; -// The following example shows how to use Semantic Kernel with Ollama Chat Completion API +/// +/// These examples demonstrate different ways of using chat completion with Ollama API. +/// public class Ollama_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { /// @@ -85,7 +87,7 @@ public async Task ServicePromptWithInnerContentAsync() // Assistant message details // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content is always a list of chunks. - var replyInnerContent = reply.InnerContent as List; + var replyInnerContent = reply.InnerContent as ChatDoneResponseStream; OutputInnerContent(replyInnerContent!); } @@ -146,42 +148,35 @@ public async Task ChatPromptWithInnerContentAsync() // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content of a non-streaming result is a list of the generated chunks. var messageContent = functionResult.GetValue(); // Retrieves underlying chat message content from FunctionResult. - var replyInnerContent = messageContent!.InnerContent as List; // Retrieves inner content from ChatMessageContent. + var replyInnerContent = messageContent!.InnerContent as ChatDoneResponseStream; // Retrieves inner content from ChatMessageContent. OutputInnerContent(replyInnerContent!); } /// - /// Retrieve extra information from each streaming chunk response in a list of chunks. + /// Retrieve extra information from the final response. /// - /// List of streaming chunks provided as inner content of a chat message + /// The complete OllamaSharp response provided as inner content of a chat message /// /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes /// may cause breaking changes in the code below. /// - private void OutputInnerContent(List innerContent) + private void OutputInnerContent(ChatDoneResponseStream innerContent) { - Console.WriteLine($"Model: {innerContent![0].Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only - Console.WriteLine(" -- Chunk changing data -- "); - - innerContent.ForEach(streamChunk => - { - Console.WriteLine($"Message role: {streamChunk.Message.Role}"); - Console.WriteLine($"Message content: {streamChunk.Message.Content}"); - Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); - Console.WriteLine($"Done: {streamChunk.Done}"); - /// The last message in the chunk is a type with additional metadata. - if (streamChunk is ChatDoneResponseStream doneStreamChunk) - { - Console.WriteLine($"Done Reason: {doneStreamChunk.DoneReason}"); - Console.WriteLine($"Eval count: {doneStreamChunk.EvalCount}"); - Console.WriteLine($"Eval duration: {doneStreamChunk.EvalDuration}"); - Console.WriteLine($"Load duration: {doneStreamChunk.LoadDuration}"); - Console.WriteLine($"Total duration: {doneStreamChunk.TotalDuration}"); - Console.WriteLine($"Prompt eval count: {doneStreamChunk.PromptEvalCount}"); - Console.WriteLine($"Prompt eval duration: {doneStreamChunk.PromptEvalDuration}"); - } - Console.WriteLine("------------------------"); - }); + Console.WriteLine($$""" + Model: {{innerContent.Model}} + Message role: {{innerContent.Message.Role}} + Message content: {{innerContent.Message.Content}} + Created at: {{innerContent.CreatedAt}} + Done: {{innerContent.Done}} + Done Reason: {{innerContent.DoneReason}} + Eval count: {{innerContent.EvalCount}} + Eval duration: {{innerContent.EvalDuration}} + Load duration: {{innerContent.LoadDuration}} + Total duration: {{innerContent.TotalDuration}} + Prompt eval count: {{innerContent.PromptEvalCount}} + Prompt eval duration: {{innerContent.PromptEvalDuration}} + ------------------------ + """); } } diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs index 9d6e8cf9e845..1713d9a03052 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs @@ -9,7 +9,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate the ways different content types are streamed by Ollama via the chat completion service. +/// These examples demonstrate different ways of using chat completion with Ollama API. /// public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -17,11 +17,11 @@ public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest /// This example demonstrates chat completion streaming using Ollama. /// [Fact] - public Task StreamChatAsync() + public async Task UsingServiceStreamingWithOllama() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); + Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingServiceStreamingWithOllama)} ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -29,22 +29,39 @@ public Task StreamChatAsync() var chatService = ollamaClient.AsChatCompletionService(); - return this.StartStreamingChatAsync(chatService); + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + this.OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + this.OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); } /// - /// This example demonstrates retrieving extra information chat completion streaming using Ollama. + /// This example demonstrates retrieving underlying library information through chat completion streaming inner contents. /// /// - /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes - /// may cause breaking changes in the code below. + /// This is a breaking glass scenario and is more susceptible to break on newer versions of OllamaSharp library. /// [Fact] - public async Task StreamChatWithInnerContentAsync() + public async Task UsingServiceStreamingInnerContentsWithOllama() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); + Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingServiceStreamingInnerContentsWithOllama)} ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -70,13 +87,15 @@ public async Task StreamChatWithInnerContentAsync() } /// - /// Demonstrates how you can template a chat history call while using the kernel for invocation. + /// Demonstrates how you can template a chat history call while using the for invocation. /// [Fact] - public async Task StreamChatPromptAsync() + public async Task UsingKernelChatPromptStreamingWithOllama() { Assert.NotNull(TestConfiguration.Ollama.ModelId); + Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingKernelChatPromptStreamingWithOllama)} ========"); + StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions @@ -99,17 +118,18 @@ public async Task StreamChatPromptAsync() } /// - /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. + /// This example demonstrates retrieving underlying library information through chat completion streaming inner contents. /// /// - /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes - /// may cause breaking changes in the code below. + /// This is a breaking glass scenario and is more susceptible to break on newer versions of OllamaSharp library. /// [Fact] - public async Task StreamChatPromptWithInnerContentAsync() + public async Task UsingKernelChatPromptStreamingInnerContentsWithOllama() { Assert.NotNull(TestConfiguration.Ollama.ModelId); + Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingKernelChatPromptStreamingInnerContentsWithOllama)} ========"); + StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions @@ -139,11 +159,11 @@ public async Task StreamChatPromptWithInnerContentAsync() /// and alternatively via the StreamingChatMessageContent.Items property. ///
[Fact] - public async Task StreamTextFromChatAsync() + public async Task UsingStreamingTextFromChatCompletionWithOllama() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine("======== Stream Text from Chat Content ========"); + Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingStreamingTextFromChatCompletionWithOllama)} ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -168,53 +188,6 @@ public async Task StreamTextFromChatAsync() } } - private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) - { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - this.OutputLastMessage(chatHistory); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - this.OutputLastMessage(chatHistory); - - // First assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); - this.OutputLastMessage(chatHistory); - - // Second assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); - } - - private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) - { - bool roleWritten = false; - string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (!roleWritten && chatUpdate.Role.HasValue) - { - Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); - roleWritten = true; - } - - if (chatUpdate.Content is { Length: > 0 }) - { - fullMessage += chatUpdate.Content; - Console.Write(chatUpdate.Content); - } - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); - } - private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) { bool roleWritten = false; @@ -249,22 +222,26 @@ private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, str /// private void OutputInnerContent(ChatResponseStream streamChunk) { - Console.WriteLine($"Model: {streamChunk.Model}"); - Console.WriteLine($"Message role: {streamChunk.Message.Role}"); - Console.WriteLine($"Message content: {streamChunk.Message.Content}"); - Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); - Console.WriteLine($"Done: {streamChunk.Done}"); + Console.WriteLine($$""" + Model: {{streamChunk.Model}} + Message role: {{streamChunk.Message.Role}} + Message content: {{streamChunk.Message.Content}} + Created at: {{streamChunk.CreatedAt}} + Done: {{streamChunk.Done}} + """); /// The last message in the chunk is a type with additional metadata. if (streamChunk is ChatDoneResponseStream doneStream) { - Console.WriteLine($"Done Reason: {doneStream.DoneReason}"); - Console.WriteLine($"Eval count: {doneStream.EvalCount}"); - Console.WriteLine($"Eval duration: {doneStream.EvalDuration}"); - Console.WriteLine($"Load duration: {doneStream.LoadDuration}"); - Console.WriteLine($"Total duration: {doneStream.TotalDuration}"); - Console.WriteLine($"Prompt eval count: {doneStream.PromptEvalCount}"); - Console.WriteLine($"Prompt eval duration: {doneStream.PromptEvalDuration}"); + Console.WriteLine($$""" + Done Reason: {{doneStream.DoneReason}} + Eval count: {{doneStream.EvalCount}} + Eval duration: {{doneStream.EvalDuration}} + Load duration: {{doneStream.LoadDuration}} + Total duration: {{doneStream.TotalDuration}} + Prompt eval count: {{doneStream.PromptEvalCount}} + Prompt eval duration: {{doneStream.PromptEvalDuration}} + """); } Console.WriteLine("------------------------"); } diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs index 22fb6dbd82f5..47c047d5271c 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs @@ -7,9 +7,14 @@ namespace ChatCompletion; -// The following example shows how to use Semantic Kernel with OpenAI API +/// +/// These examples demonstrate different ways of using chat completion with OpenAI API. +/// public class OpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { + /// + /// Sample showing how to use directly with a . + /// [Fact] public async Task ServicePromptAsync() { @@ -18,11 +23,36 @@ public async Task ServicePromptAsync() Console.WriteLine("======== Open AI - Chat Completion ========"); - OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + OpenAIChatCompletionService chatService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - await StartChatAsync(chatCompletionService); + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); } + /// + /// Sample showing how to use directly with a also exploring the + /// breaking glass approach capturing the underlying instance via . + /// [Fact] public async Task ServicePromptWithInnerContentAsync() { @@ -51,6 +81,9 @@ public async Task ServicePromptWithInnerContentAsync() OutputInnerContent(replyInnerContent!); } + /// + /// Sample showing how to use with chat completion and chat prompt syntax. + /// [Fact] public async Task ChatPromptAsync() { @@ -137,32 +170,6 @@ public async Task ChatPromptStoreWithMetadataAsync() OutputInnerContent(replyInnerContent!); } - private async Task StartChatAsync(IChatCompletionService chatGPT) - { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); - OutputLastMessage(chatHistory); - - // Second assistant message - reply = await chatGPT.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - } - /// /// Retrieve extra information from a inner content of type . /// @@ -173,19 +180,21 @@ private async Task StartChatAsync(IChatCompletionService chatGPT) /// private void OutputInnerContent(OpenAI.Chat.ChatCompletion innerContent) { - Console.WriteLine($"Message role: {innerContent.Role}"); // Available as a property of ChatMessageContent - Console.WriteLine($"Message content: {innerContent.Content[0].Text}"); // Available as a property of ChatMessageContent - - Console.WriteLine($"Model: {innerContent.Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only - Console.WriteLine($"Created At: {innerContent.CreatedAt}"); - - Console.WriteLine($"Finish reason: {innerContent.FinishReason}"); - Console.WriteLine($"Input tokens usage: {innerContent.Usage.InputTokenCount}"); - Console.WriteLine($"Output tokens usage: {innerContent.Usage.OutputTokenCount}"); - Console.WriteLine($"Total tokens usage: {innerContent.Usage.TotalTokenCount}"); - Console.WriteLine($"Refusal: {innerContent.Refusal} "); - Console.WriteLine($"Id: {innerContent.Id}"); - Console.WriteLine($"System fingerprint: {innerContent.SystemFingerprint}"); + Console.WriteLine($$""" + Message role: {{innerContent.Role}} // Available as a property of ChatMessageContent + Message content: {{innerContent.Content[0].Text}} // Available as a property of ChatMessageContent + + Model: {{innerContent.Model}} // Model doesn't change per chunk, so we can get it from the first chunk only + Created At: {{innerContent.CreatedAt}} + + Finish reason: {{innerContent.FinishReason}} + Input tokens usage: {{innerContent.Usage.InputTokenCount}} + Output tokens usage: {{innerContent.Usage.OutputTokenCount}} + Total tokens usage: {{innerContent.Usage.TotalTokenCount}} + Refusal: {{innerContent.Refusal}} + Id: {{innerContent.Id}} + System fingerprint: {{innerContent.SystemFingerprint}} + """); if (innerContent.ContentTokenLogProbabilities.Count > 0) { diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs index 0e9fe0326290..7773fadbb76f 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs @@ -8,7 +8,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service. +/// These examples demonstrate different ways of using streaming chat completion with OpenAI API. /// public class OpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -214,37 +214,6 @@ public async Task StreamFunctionCallContentAsync() } } - private async Task StreamMessageOutputAsync(OpenAIChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) - { - bool roleWritten = false; - string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (!roleWritten && chatUpdate.Role.HasValue) - { - Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); - roleWritten = true; - } - - if (chatUpdate.Content is { Length: > 0 }) - { - fullMessage += chatUpdate.Content; - Console.Write(chatUpdate.Content); - } - - // The last message in the chunk has the usage metadata. - // https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options - if (chatUpdate.Metadata?["Usage"] is not null) - { - Console.WriteLine(chatUpdate.Metadata["Usage"]?.AsJson()); - } - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); - } - private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) { bool roleWritten = false; diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs new file mode 100644 index 000000000000..b28b45363204 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; + +namespace ChatCompletion; + +// The following example shows how to use Semantic Kernel with OpenAI API +public class OpenAI_ChatCompletionWithReasoning(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Sample showing how to use with chat completion and chat prompt syntax. + /// + [Fact] + public async Task ChatPromptWithReasoningAsync() + { + Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); + + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Create execution settings with low reasoning effort. + var executionSettings = new OpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings + { + MaxTokens = 2000, + ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) + }; + + // Create KernelArguments using the execution settings. + var kernelArgs = new KernelArguments(executionSettings); + + StringBuilder chatPrompt = new(""" + You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework + Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop . + """); + + // Invoke the prompt with high reasoning effort. + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); + + Console.WriteLine(reply); + } + + /// + /// Sample showing how to use directly with a . + /// + [Fact] + public async Task ServicePromptWithReasoningAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); + + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + + // Create execution settings with low reasoning effort. + var executionSettings = new OpenAIPromptExecutionSettings + { + MaxTokens = 2000, + ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) + }; + + // Create a ChatHistory and add messages. + var chatHistory = new ChatHistory(); + chatHistory.AddDeveloperMessage( + "You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); + chatHistory.AddUserMessage( + "Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); + + // Instead of a prompt string, call GetChatMessageContentAsync with the chat history. + var reply = await chatCompletionService.GetChatMessageContentAsync( + chatHistory: chatHistory, + executionSettings: executionSettings); + + Console.WriteLine(reply); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs index c36b1d945c67..fa014ede905f 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs @@ -5,26 +5,33 @@ using Microsoft.SemanticKernel; using OpenAI; +#pragma warning disable CA5399 // HttpClient is created without enabling CheckCertificateRevocationList + namespace ChatCompletion; +/// +/// This example shows a way of using a Custom HttpClient and HttpHandler with OpenAI Connector to capture +/// the request Uri and Headers for each request. +/// public sealed class OpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) { [Fact] - public async Task RunAsync() + public async Task UsingCustomHttpClientWithOpenAI() { Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - Console.WriteLine("======== Using a custom OpenAI client ========"); + Console.WriteLine($"======== Open AI - {nameof(UsingCustomHttpClientWithOpenAI)} ========"); // Create an HttpClient and include your custom header(s) - using var httpClient = new HttpClient(); - httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + using var myCustomHttpHandler = new MyCustomClientHttpHandler(Output); + using var myCustomClient = new HttpClient(handler: myCustomHttpHandler); + myCustomClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); // Configure AzureOpenAIClient to use the customized HttpClient var clientOptions = new OpenAIClientOptions { - Transport = new HttpClientPipelineTransport(httpClient), + Transport = new HttpClientPipelineTransport(myCustomClient), NetworkTimeout = TimeSpan.FromSeconds(30), RetryPolicy = new ClientRetryPolicy() }; @@ -45,8 +52,30 @@ public async Task RunAsync() kernel.Plugins["FunPlugin"]["Excuses"], new() { ["input"] = "I have no homework" } ); + Console.WriteLine(result.GetValue()); - httpClient.Dispose(); + myCustomClient.Dispose(); + } + + /// + /// Normally you would use a custom HttpClientHandler to add custom logic to your custom http client + /// This uses the ITestOutputHelper to write the requested URI to the test output + /// + /// The to write the requested URI to the test output + private sealed class MyCustomClientHttpHandler(ITestOutputHelper output) : HttpClientHandler + { + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + output.WriteLine($"Requested URI: {request.RequestUri}"); + + request.Headers.Where(h => h.Key != "Authorization") + .ToList() + .ForEach(h => output.WriteLine($"{h.Key}: {string.Join(", ", h.Value)}")); + output.WriteLine("--------------------------------"); + + // Add custom logic here + return await base.SendAsync(request, cancellationToken); + } } } diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 746d5fbb73cf..728dce6b41fb 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,15 +8,18 @@ false true - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + @@ -52,8 +55,10 @@ + + @@ -81,6 +86,7 @@ + @@ -142,9 +148,6 @@ Always - - Always - diff --git a/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs b/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs new file mode 100644 index 000000000000..012ce73d2845 --- /dev/null +++ b/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.Identity; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace Filtering; + +/// +/// This sample shows how to switch between Azure OpenAI deployments based on the functions that are being called. +/// This can be useful if semantic caching is enabled and you want to switch to a different deployment based on the functions that are being called. +/// +public class AzureOpenAI_DeploymentSwitch(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task DeploymentSwitchAsync() + { + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + + // Create a logging handler to output HTTP requests and responses + using var httpHandler = new HttpClientHandler(); + using var loggingHandler = new LoggingHandler(httpHandler, this.Output); + using var httpClient = new HttpClient(loggingHandler); + + // Create KernelBuilder with an auto function invocation filter + var kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.Services.AddSingleton(new AutoFunctionInvocationFilter(this.Output)); + + // Define the endpoints for the two Azure OpenAI services + var endpoint1 = "https://contoso-eastus.openai.azure.com/"; + var endpoint2 = "https://contoso-swedencentral.openai.azure.com/"; + + // Add Azure OpenAI chat completion services + kernelBuilder.AddAzureOpenAIChatCompletion( + serviceId: "eastus", + deploymentName: "gpt-4o-mini", + endpoint: endpoint1, + credentials: new AzureCliCredential(), + httpClient: httpClient, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + kernelBuilder.AddAzureOpenAIChatCompletion( + serviceId: "swedencentral", + deploymentName: "gpt-4o", + endpoint: endpoint2, + credentials: new AzureCliCredential(), + httpClient: httpClient, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + var kernel = kernelBuilder.Build(); + + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => "Brown", "GetEyeColor", "Retrieves eye color for the current user."), + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentDateTimeInUtc", "Retrieves the current date time in UTC."), + ]); + + OpenAIPromptExecutionSettings settings = new() + { + ServiceId = "swedencentral", + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + + var reply = await kernel.InvokePromptAsync("What time is it and what is my eye color and what time is it?", new(settings)); + + Console.WriteLine(reply); + } + + private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter + { + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + var kernel = context.Kernel; + var chatHistory = context.ChatHistory; + var executionSettings = context.ExecutionSettings; + var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last()); + + if (executionSettings is not null && "swedencentral".Equals(executionSettings.ServiceId, StringComparison.Ordinal)) + { + bool includesGetEyeColor = functionCalls.Any(fc => fc.FunctionName.Equals("GetEyeColor", StringComparison.Ordinal)); + + // For the "GetEyeColor" function, switch to a different deployment. + // If the function is not present in the collection of function calls, proceed with the request as usual. + if (!includesGetEyeColor) + { + await next(context); + } + else + { + output.WriteLine("Switching to use eastus deployment"); + + chatHistory.RemoveAt(chatHistory.Count - 1); + + IChatCompletionService chatCompletionService = kernel.Services.GetRequiredKeyedService("eastus"); + + OpenAIPromptExecutionSettings settings = new() + { + ServiceId = "eastus", + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + + var chatContent = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, context.Kernel); + + context.Result = new FunctionResult(context.Result, chatContent); + context.Terminate = true; + } + } + else + { + await next(context); + } + } + } +} diff --git a/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs b/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs deleted file mode 100644 index c1b3372d071e..000000000000 --- a/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; - -#pragma warning disable format // Format item can be simplified -#pragma warning disable CA1861 // Avoid constant arrays as arguments - -namespace LocalModels; - -// The following example shows how to use Semantic Kernel with HuggingFace API. -public class HuggingFace_ChatCompletionWithTGI(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. - /// - [Fact(Skip = "Requires TGI (text generation inference) deployment")] - public async Task RunTGI_ChatCompletionAsync() - { - Console.WriteLine("\n======== HuggingFace - TGI Chat Completion ========\n"); - - // This example was run against one of the chat completion (Message API) supported models from HuggingFace, listed in here: - // Starting a Local Docker i.e: - // docker run --gpus all --shm-size 1g -p 8080:80 -v "F:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:1.4 --model-id teknium/OpenHermes-2.5-Mistral-7B - - // HuggingFace local HTTP server endpoint - var endpoint = new Uri("http://localhost:8080"); - - const string Model = "teknium/OpenHermes-2.5-Mistral-7B"; - - Kernel kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: Model, - endpoint: endpoint) - .Build(); - - var chatCompletion = kernel.GetRequiredService(); - var chatHistory = new ChatHistory("You are a helpful assistant.") - { - new ChatMessageContent(AuthorRole.User, "What is deep learning?") - }; - - var result = await chatCompletion.GetChatMessageContentAsync(chatHistory); - - Console.WriteLine(result.Role); - Console.WriteLine(result.Content); - } - - /// - /// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. - /// - [Fact(Skip = "Requires TGI (text generation inference) deployment")] - public async Task RunTGI_StreamingChatCompletionAsync() - { - Console.WriteLine("\n======== HuggingFace - TGI Chat Completion Streaming ========\n"); - - // This example was run against one of the chat completion (Message API) supported models from HuggingFace, listed in here: - // Starting a Local Docker i.e: - // docker run --gpus all --shm-size 1g -p 8080:80 -v "F:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:1.4 --model-id teknium/OpenHermes-2.5-Mistral-7B - - // HuggingFace local HTTP server endpoint - var endpoint = new Uri("http://localhost:8080"); - - const string Model = "teknium/OpenHermes-2.5-Mistral-7B"; - - Kernel kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: Model, - endpoint: endpoint) - .Build(); - - var chatCompletion = kernel.GetRequiredService(); - var chatHistory = new ChatHistory("You are a helpful assistant.") - { - new ChatMessageContent(AuthorRole.User, "What is deep learning?") - }; - - AuthorRole? role = null; - await foreach (var chatMessageChunk in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (role is null) - { - role = chatMessageChunk.Role; - Console.Write(role); - } - Console.Write(chatMessageChunk.Content); - } - } -} diff --git a/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs b/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs deleted file mode 100644 index ec118d27e977..000000000000 --- a/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace LocalModels; - -/// -/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion Message API standard from OpenAI. -/// -/// To proceed with this example will be necessary to follow those steps: -/// 1. Install LMStudio Platform in your environment -/// 2. Open LM Studio -/// 3. Search and Download both Phi2 and Llama2 models (preferably the ones that uses 8GB RAM or more) -/// 4. Start the Message API Server on http://localhost:1234 -/// 5. Run the examples. -/// -/// OR -/// -/// 1. Start the Ollama Message API Server on http://localhost:11434 using docker -/// 2. docker run -d --gpus=all -v "d:\temp\ollama:/root/.ollama" -p 11434:11434 --name ollama ollama/ollama -/// 3. Set Llama2 as the current ollama model: docker exec -it ollama ollama run llama2 -/// 4. Run the Ollama examples. -/// -/// OR -/// -/// 1. Start the LocalAI Message API Server on http://localhost:8080 -/// 2. docker run -ti -p 8080:8080 localai/localai:v2.12.3-ffmpeg-core phi-2 -/// 3. Run the LocalAI examples. -/// -public class MultipleProviders_ChatCompletion(ITestOutputHelper output) : BaseTest(output) -{ - [Theory(Skip = "Manual configuration needed")] - [InlineData("LMStudio", "http://localhost:1234", "llama2")] // Setup Llama2 as the model in LM Studio UI and start the Message API Server on http://localhost:1234 - [InlineData("Ollama", "http://localhost:11434", "llama2")] // Start the Ollama Message API Server on http://localhost:11434 using docker - [InlineData("LocalAI", "http://localhost:8080", "phi-2")] - public async Task LocalModel_ExampleAsync(string messageAPIPlatform, string url, string modelId) - { - Console.WriteLine($"Example using local {messageAPIPlatform}"); - // Setup Llama2 as the model in LM Studio UI. - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: modelId, - apiKey: null, - endpoint: new Uri(url)) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings - { - TopP = 0.5, - MaxTokens = 1000, - }); - - var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); - Console.WriteLine(response); - } - - [Theory(Skip = "Manual configuration needed")] - [InlineData("LMStudio", "http://localhost:1234", "llama2")] // Setup Llama2 as the model in LM Studio UI and start the Message API Server on http://localhost:1234 - [InlineData("Ollama", "http://localhost:11434", "llama2")] // Start the Ollama Message API Server on http://localhost:11434 using docker - [InlineData("LocalAI", "http://localhost:8080", "phi-2")] - public async Task LocalModel_StreamingExampleAsync(string messageAPIPlatform, string url, string modelId) - { - Console.WriteLine($"Example using local {messageAPIPlatform}"); - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: modelId, - apiKey: null, - endpoint: new Uri(url)) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings - { - TopP = 0.5, - MaxTokens = 1000, - }); - - await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) - { - Console.WriteLine(word); - } - } -} diff --git a/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs b/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs index a711747e84e0..ee862bdabc8e 100644 --- a/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs +++ b/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs @@ -100,16 +100,57 @@ private void WriteSampleHeadingToConsole(string pluginToTest, string functionToT } private static readonly HashSet s_fieldsToIgnore = new( [ + "@odata.type", + "attachments", + "allowNewTimeProposals", + "bccRecipients", "bodyPreview", + "calendar", "categories", + "ccRecipients", + "changeKey", "conversationId", + "coordinates", "conversationIndex", + "createdDateTime", + "discriminator", + "lastModifiedDateTime", + "locations", + "extensions", + "flag", + "from", + "hasAttachments", + "iCalUId", + "id", "inferenceClassification", "internetMessageHeaders", + "instances", + "isCancelled", "isDeliveryReceiptRequested", + "isDraft", + "isOrganizer", + "isRead", + "isReadReceiptRequested", "multiValueExtendedProperties", + "onlineMeeting", + "onlineMeetingProvider", + "onlineMeetingUrl", + "organizer", + "originalStart", + "parentFolderId", + "range", + "receivedDateTime", + "recurrence", + "replyTo", + "sender", + "sentDateTime", + "seriesMasterId", "singleValueExtendedProperties", + "transactionId", + "time", "uniqueBody", + "uniqueId", + "uniqueIdType", "webLink", ], StringComparer.OrdinalIgnoreCase @@ -135,26 +176,42 @@ private void WriteSampleHeadingToConsole(string pluginToTest, string functionToT { return schema; } - if (jsonNode.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) + + TrimPropertiesFromJsonNode(jsonNode); + + return KernelJsonSchema.Parse(node.ToString()); + } + private static void TrimPropertiesFromJsonNode(JsonNode jsonNode) + { + if (jsonNode is not JsonObject jsonObject) + { + return; + } + if (jsonObject.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) { jsonNode[RequiredPropertyName] = new JsonArray(requiredArray.Where(x => x is not null).Select(x => x!.GetValue()).Where(x => !s_fieldsToIgnore.Contains(x)).Select(x => JsonValue.Create(x)).ToArray()); } - - if (jsonNode.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) + if (jsonObject.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) { - var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(x => x.Key).ToArray(); + var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(static x => x.Key).ToArray(); foreach (var property in properties) { propertiesObject.Remove(property); } } - - return KernelJsonSchema.Parse(node.ToString()); + foreach (var subProperty in jsonObject) + { + if (subProperty.Value is not null) + { + TrimPropertiesFromJsonNode(subProperty.Value); + } + } } private static readonly RestApiParameterFilter s_restApiParameterFilter = (RestApiParameterFilterContext context) => { - if ("me_CreateMessages".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) && - "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase)) + if (("me_sendMail".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) || + ("me_calendar_CreateEvents".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase)) && + "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase))) { context.Parameter.Schema = TrimPropertiesFromRequestBody(context.Parameter.Schema); return context.Parameter; diff --git a/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs b/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs new file mode 100644 index 000000000000..cf0de1188055 --- /dev/null +++ b/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +namespace Plugins; + +/// +/// This example shows how to interact with an existing CrewAI Enterprise Crew directly or as a plugin. +/// These examples require a valid CrewAI Enterprise deployment with an endpoint, auth token, and known inputs. +/// +public class CrewAI_Plugin(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Shows how to kickoff an existing CrewAI Enterprise Crew and wait for it to complete. + /// + [Fact] + public async Task UsingCrewAIEnterpriseAsync() + { + string crewAIEndpoint = TestConfiguration.CrewAI.Endpoint; + string crewAIAuthToken = TestConfiguration.CrewAI.AuthToken; + + var crew = new CrewAIEnterprise( + endpoint: new Uri(crewAIEndpoint), + authTokenProvider: async () => crewAIAuthToken); + + // The required inputs for the Crew must be known in advance. This example is modeled after the + // Enterprise Content Marketing Crew Template and requires the following inputs: + var inputs = new + { + company = "CrewAI", + topic = "Agentic products for consumers", + }; + + // Invoke directly with our inputs + var kickoffId = await crew.KickoffAsync(inputs); + Console.WriteLine($"CrewAI Enterprise Crew kicked off with ID: {kickoffId}"); + + // Wait for completion + var result = await crew.WaitForCrewCompletionAsync(kickoffId); + Console.WriteLine("CrewAI Enterprise Crew completed with the following result:"); + Console.WriteLine(result); + } + + /// + /// Shows how to kickoff an existing CrewAI Enterprise Crew as a plugin. + /// + [Fact] + public async Task UsingCrewAIEnterpriseAsPluginAsync() + { + string crewAIEndpoint = TestConfiguration.CrewAI.Endpoint; + string crewAIAuthToken = TestConfiguration.CrewAI.AuthToken; + string openAIModelId = TestConfiguration.OpenAI.ChatModelId; + string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAIModelId is null || openAIApiKey is null) + { + Console.WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + + // Setup the Kernel and AI Services + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: openAIModelId, + apiKey: openAIApiKey) + .Build(); + + var crew = new CrewAIEnterprise( + endpoint: new Uri(crewAIEndpoint), + authTokenProvider: async () => crewAIAuthToken); + + // The required inputs for the Crew must be known in advance. This example is modeled after the + // Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. + // We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. + var crewPluginDefinitions = new[] + { + new CrewAIInputMetadata(Name: "company", Description: "The name of the company that should be researched", Type: typeof(string)), + new CrewAIInputMetadata(Name: "topic", Description: "The topic that should be researched", Type: typeof(string)), + }; + + // Create the CrewAI Plugin. This builds a plugin that can be added to the Kernel and invoked like any other plugin. + // The plugin will contain the following functions: + // - Kickoff: Starts the Crew with the specified inputs and returns the Id of the scheduled kickoff. + // - KickoffAndWait: Starts the Crew with the specified inputs and waits for the Crew to complete before returning the result. + // - WaitForCrewCompletion: Waits for the specified Crew kickoff to complete and returns the result. + // - GetCrewKickoffStatus: Gets the status of the specified Crew kickoff. + var crewPlugin = crew.CreateKernelPlugin( + name: "EnterpriseContentMarketingCrew", + description: "Conducts thorough research on the specified company and topic to identify emerging trends, analyze competitor strategies, and gather data-driven insights.", + inputMetadata: crewPluginDefinitions); + + // Add the plugin to the Kernel + kernel.Plugins.Add(crewPlugin); + + // Invoke the CrewAI Plugin directly as shown below, or use automaic function calling with an LLM. + var kickoffAndWaitFunction = crewPlugin["KickoffAndWait"]; + var result = await kernel.InvokeAsync( + function: kickoffAndWaitFunction, + arguments: new() + { + ["company"] = "CrewAI", + ["topic"] = "Consumer AI Products" + }); + + Console.WriteLine(result); + } +} diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 1fb0d0ffe9d6..2c213d423790 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -13,7 +13,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=NameSpa Example for `ChatCompletion/OpenAI_ChatCompletion.cs` file, targeting the `ChatPromptSync` test: ```powershell -dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCompletion.OpenAI_ChatCompletion.ChatPromptSync" +dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCompletion.OpenAI_ChatCompletion.ChatPromptAsync" ``` ## Table of Contents @@ -55,6 +55,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [AzureAIInference_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs) - [AzureAIInference_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs) - [AzureOpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs) +- [AzureOpenAI_ChatCompletionWithReasoning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs) - [AzureOpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs) - [AzureOpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs) - [AzureOpenAIWithData_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs) @@ -68,22 +69,31 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [Google_GeminiChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs) - [Google_GeminiGetModelResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiGetModelResult.cs) - [Google_GeminiVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs) -- [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) -- [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) -- [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) -- [OpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs) -- [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs) -- [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) -- [OpenAI_ReasonedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs) -- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MuiltipleProviders_ChatHistoryReducer.cs) +- [HuggingFace_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs) +- [HuggingFace_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs) +- [HybridCompletion_Fallback](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs) +- [LMStudio_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs) +- [LMStudio_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs) - [MistralAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatCompletion.cs) - [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs) - [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs) - [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs) -- [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) -- [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) +- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MuiltipleProviders_ChatHistoryReducer.cs) - [Ollama_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs) - [Ollama_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs) +- [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) +- [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) +- [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) +- [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) +- [OpenAI_ChatCompletionWithReasoning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs) +- [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) +- [OpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs) +- [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) +- [OpenAI_FunctionCallingWithMemoryPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCallingWithMemoryPlugin.cs) +- [OpenAI_ReasonedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs) +- [OpenAI_RepeatedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs) +- [OpenAI_StructuredOutputs](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_StructuredOutputs.cs) +- [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs) ### DependencyInjection - Examples on using `DI Container` @@ -102,6 +112,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [PromptRenderFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PromptRenderFiltering.cs) - [RetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/RetryWithFilters.cs) - [TelemetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs) +- [AzureOpenAI_DeploymentSwitch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs) ### Functions - Invoking [`Method`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs) or [`Prompt`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs) functions with [`Kernel`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Kernel.cs) @@ -119,11 +130,6 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [HuggingFace_ImageToText](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ImageToText/HuggingFace_ImageToText.cs) -### LocalModels - Running models locally - -- [HuggingFace_ChatCompletionWithTGI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs) -- [MultipleProviders_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs) - ### Memory - Using AI [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) concepts - [OpenAI_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/OpenAI_EmbeddingGeneration.cs) @@ -166,6 +172,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [CreatePluginFromOpenApiSpec_Jira](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Jira.cs) - [CreatePluginFromOpenApiSpec_Klarna](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs) - [CreatePluginFromOpenApiSpec_RepairService](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_RepairService.cs) +- [CrewAI_Plugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs) - [OpenApiPlugin_PayloadHandling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_PayloadHandling.cs) - [OpenApiPlugin_CustomHttpContentReader](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_CustomHttpContentReader.cs) - [OpenApiPlugin_Customization](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_Customization.cs) diff --git a/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml b/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml deleted file mode 100644 index 26a07cf04cf3..000000000000 --- a/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: Parrot -template_format: semantic-kernel -template: | - Repeat the user message in the voice of a pirate and then end with {{$count}} parrot sounds. -description: A fun chat bot that repeats the user message in the voice of a pirate. -input_variables: - - name: count - description: The number of parrot sounds. - is_required: true diff --git a/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml b/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml deleted file mode 100644 index 474fd86a46ad..000000000000 --- a/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: ToolRunner -template_format: semantic-kernel -template: | - Respond to the user using the single best tool. - If no tool is appropriate, let the user know you only provide responses using tools. - When reporting a tool result, start with, "The tool I used informed me that" -description: Determines if a tool can be utilized to accomplish a result. diff --git a/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt b/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt deleted file mode 100644 index 21665c82198e..000000000000 --- a/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt +++ /dev/null @@ -1,217 +0,0 @@ -Invoice Booking Reference LMNOPQ Trip ID - 11110011111 -Passenger Name(s) -MARKS/SAM ALBERT Agent W2 - - -MICROSOFT CORPORATION 14820 NE 36TH STREET REDMOND WA US 98052 - -American Express Global Business Travel Microsoft Travel -14711 NE 29th Place, Suite 215 -Bellevue, WA 98007 -Phone: +1 (669) 210-8041 - - - - -BILLING CODE : 1010-10010110 -Invoice Information - - - - - - -Invoice Details -Ticket Number - - - - - - - -0277993883295 - - - - - - -Charges -Ticket Base Fare - - - - - - - -306.29 - -Airline Name - -ALASKA AIRLINES - -Ticket Tax Fare 62.01 - -Passenger Name Flight Details - -MARKS/SAM ALBERT -11 Sep 2023 ALASKA AIRLINES -0572 H Class -SEATTLE-TACOMA,WA/RALEIGH DURHAM,NC -13 Sep 2023 ALASKA AIRLINES -0491 M Class -RALEIGH DURHAM,NC/SEATTLE- TACOMA,WA - -Total (USD) Ticket Amount - -368.30 - -Credit Card Information -Charged to Card - - - -AX XXXXXXXXXXX4321 - - - -368.30 - - - - -Payment Details - - - -Charged by Airline -Total Invoice Charge - - - -USD - - - -368.30 -368.30 - -Monday 11 September 2023 - -10:05 AM - -Seattle (SEA) to Durham (RDU) -Airline Booking Ref: ABCXYZ - -Carrier: ALASKA AIRLINES - -Flight: AS 572 - -Status: Confirmed - -Operated By: ALASKA AIRLINES -Origin: Seattle, WA, Seattle-Tacoma International Apt (SEA) - -Departing: Monday 11 September 2023 at 10:05 AM Destination: Durham, Raleigh, Raleigh (RDU) Arriving: Monday 11 September 2023 at 06:15 PM -Additional Information - -Departure Terminal: Not Applicable - -Arrival Terminal: TERMINAL 2 - - -Class: ECONOMY -Aircraft Type: Boeing 737-900 -Meal Service: Not Applicable -Frequent Flyer Number: Not Applicable -Number of Stops: 0 -Greenhouse Gas Emissions: 560 kg CO2e / person - - -Distance: 2354 Miles Estimated Time: 05 hours 10 minutes -Seat: 24A - - -THE WESTIN RALEIGH DURHAM AP -Address: 3931 Macaw Street, Raleigh, NC, 27617, US -Phone: (1) 919-224-1400 Fax: (1) 919-224-1401 -Check In Date: Monday 11 September 2023 Check Out Date: Wednesday 13 September 2023 Number Of Nights: 2 -Rate: USD 280.00 per night may be subject to local taxes and service charges -Guaranteed to: AX XXXXXXXXXXX4321 - -Reference Number: 987654 -Additional Information -Membership ID: 123456789 -CANCEL PERMITTED UP TO 1 DAYS BEFORE CHECKIN - -Status: Confirmed - - -Corporate Id: Not Applicable - -Number Of Rooms: 1 - -Wednesday 13 September 2023 - -07:15 PM - -Durham (RDU) to Seattle (SEA) -Airline Booking Ref: ABCXYZ - -Carrier: ALASKA AIRLINES - -Flight: AS 491 - -Status: Confirmed - -Operated By: ALASKA AIRLINES -Origin: Durham, Raleigh, Raleigh (RDU) -Departing: Wednesday 13 September 2023 at 07:15 PM - - - -Departure Terminal: TERMINAL 2 - -Destination: Seattle, WA, Seattle-Tacoma International Apt (SEA) -Arriving: Wednesday 13 September 2023 at 09:59 PM Arrival Terminal: Not Applicable -Additional Information - - -Class: ECONOMY -Aircraft Type: Boeing 737-900 -Meal Service: Not Applicable -Frequent Flyer Number: Not Applicable -Number of Stops: 0 -Greenhouse Gas Emissions: 560 kg CO2e / person - - -Distance: 2354 Miles Estimated Time: 05 hours 44 minutes -Seat: 16A - - - -Greenhouse Gas Emissions -Total Greenhouse Gas Emissions for this trip is: 1120 kg CO2e / person -Air Fare Information - -Routing : ONLINE RESERVATION -Total Fare : USD 368.30 -Additional Messages -FOR 24X7 Travel Reservations Please Call 1-669-210-8041 Unable To Use Requested As Frequent Flyer Program Invalid Use Of Frequent Flyer Number 0123XYZ Please Contact Corresponding Frequent Travel Program Support Desk For Assistance -Trip Name-Trip From Seattle To Raleigh/Durham -This Ticket Is Nonrefundable. Changes Or Cancellations Must Be Made Prior To Scheduled Flight Departure -All Changes Must Be Made On Same Carrier And Will Be Subject To Service Fee And Difference In Airfare -******************************************************* -Please Be Advised That Certain Mandatory Hotel-Imposed Charges Including But Not Limited To Daily Resort Or Facility Fees May Be Applicable To Your Stay And Payable To The Hotel Operator At Check-Out From The Property. You May Wish To Inquire With The Hotel Before Your Trip Regarding The Existence And Amount Of Such Charges. -******************************************************* -Hotel Cancel Policies Vary Depending On The Property And Date. If You Have Questions Regarding Cancellation Fees Please Call The Travel Office. -Important Information -COVID-19 Updates: Click here to access Travel Vitals https://travelvitals.amexgbt.com for the latest information and advisories compiled by American Express Global Business Travel. - -Carbon Emissions: The total emissions value for this itinerary includes air travel only. Emissions for each individual flight are displayed in the flight details section. For more information on carbon emissions please refer to https://www.amexglobalbusinesstravel.com/sustainable-products-and-platforms. - -For important information regarding your booking in relation to the conditions applying to your booking, managing your booking and travel advisory, please refer to www.amexglobalbusinesstravel.com/booking-info. - -GBT Travel Services UK Limited (GBT UK) and its authorized sublicensees (including Ovation Travel Group and Egencia) use certain trademarks and service marks of American Express Company or its subsidiaries (American Express) in the American Express Global Business Travel and American Express Meetings & Events brands and in connection with its business for permitted uses only under a limited license from American Express (Licensed Marks). The Licensed Marks are trademarks or service marks of, and the property of, American Express. GBT UK is a subsidiary of Global Business Travel Group, Inc. (NYSE: GBTG). American Express holds a minority interest in GBTG, which operates as a separate company from American Express. diff --git a/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json b/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json index c8b442e152fa..ab560e5906da 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json +++ b/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json @@ -14,7 +14,7 @@ }, { "method": "Post", - "uriTemplate": "/me/messages" + "uriTemplate": "/me/sendMail" } ] } diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml index 7903450f0c53..322b38a9e5a9 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: List messages - description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' + summary: Get the messages in the signed-in user\u0026apos;s mailbox + description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,26 +63,18 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value + /me/sendMail: post: tags: - - me.message - summary: Create message - description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." - operationId: me_CreateMessages + - me.user.Actions + summary: Invoke action sendMail + description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' + operationId: me_sendMail requestBody: - description: New navigation property - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' - required: true + $ref: '#/components/requestBodies/sendMailRequestBody' responses: - 2XX: - description: Created navigation property. - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' + '204': + description: Success components: schemas: microsoft.graph.message: @@ -503,4 +495,19 @@ components: style: form explode: false schema: - type: boolean \ No newline at end of file + type: boolean + requestBodies: + sendMailRequestBody: + description: Action parameters + content: + application/json: + schema: + type: object + properties: + Message: + $ref: '#/components/schemas/microsoft.graph.message' + SaveToSentItems: + type: boolean + default: false + nullable: true + required: true diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json index 8264a87f44ca..6ea4e7b739f5 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json @@ -10,10 +10,17 @@ "conversation_starters": [ { "text": "List events" + }, + { + "text": "Create new navigation property to events for me" } ] }, "functions": [ + { + "name": "me_calendar_CreateEvents", + "description": "Create new navigation property to events for me" + }, { "name": "me_calendar_ListEvents", "description": "Retrieve a list of events in a calendar. The calendar can be one for a user, or the default calendar of a Microsoft 365 group. The list of events contains single instance meetings and series masters. To get expanded event instances, you can get the calendar view, or\nget the instances of an event." @@ -29,7 +36,8 @@ "url": "calendar-openapi.yml" }, "run_for_functions": [ - "me_calendar_ListEvents" + "me_calendar_ListEvents", + "me_calendar_CreateEvents" ] } ] diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml index 7232189dd51a..458690e3ec4b 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml @@ -1,4 +1,4 @@ -openapi: 3.0.1 +openapi: 3.0.4 info: title: OData Service for namespace microsoft.graph - Subset description: This OData service is located at https://graph.microsoft.com/v1.0 @@ -56,23 +56,27 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value + post: + tags: + - me.calendar + summary: Create new navigation property to events for me + operationId: me_calendar_CreateEvents + requestBody: + description: New navigation property + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.event' + required: true + responses: + 2XX: + description: Created navigation property. + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.event' components: schemas: - microsoft.graph.eventCollectionResponse: - title: Base collection pagination and count responses - type: object - properties: - '@odata.count': - type: integer - format: int64 - nullable: true - '@odata.nextLink': - type: string - nullable: true - value: - type: array - items: - $ref: '#/components/schemas/microsoft.graph.event' microsoft.graph.event: title: event required: @@ -547,6 +551,21 @@ components: type: string description: A property value. nullable: true + microsoft.graph.eventCollectionResponse: + title: Base collection pagination and count responses + type: object + properties: + '@odata.count': + type: integer + format: int64 + nullable: true + '@odata.nextLink': + type: string + nullable: true + value: + type: array + items: + $ref: '#/components/schemas/microsoft.graph.event' microsoft.graph.emailAddress: title: emailAddress required: diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json index a3fac7f88fb2..ba3827350891 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json @@ -12,14 +12,14 @@ "text": "List messages" }, { - "text": "Create message" + "text": "Send an email from the current user's mailbox" } ] }, "functions": [ { - "name": "me_CreateMessages", - "description": "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." + "name": "me_sendMail", + "description": "Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here." }, { "name": "me_ListMessages", @@ -35,10 +35,7 @@ "spec": { "url": "messages-openapi.yml" }, - "run_for_functions": [ - "me_ListMessages", - "me_CreateMessages" - ] + "run_for_functions": ["me_ListMessages", "me_sendMail"] } ] -} \ No newline at end of file +} diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml index 7903450f0c53..322b38a9e5a9 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: List messages - description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' + summary: Get the messages in the signed-in user\u0026apos;s mailbox + description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,26 +63,18 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value + /me/sendMail: post: tags: - - me.message - summary: Create message - description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." - operationId: me_CreateMessages + - me.user.Actions + summary: Invoke action sendMail + description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' + operationId: me_sendMail requestBody: - description: New navigation property - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' - required: true + $ref: '#/components/requestBodies/sendMailRequestBody' responses: - 2XX: - description: Created navigation property. - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' + '204': + description: Success components: schemas: microsoft.graph.message: @@ -503,4 +495,19 @@ components: style: form explode: false schema: - type: boolean \ No newline at end of file + type: boolean + requestBodies: + sendMailRequestBody: + description: Action parameters + content: + application/json: + schema: + type: object + properties: + Message: + $ref: '#/components/schemas/microsoft.graph.message' + SaveToSentItems: + type: boolean + default: false + nullable: true + required: true diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md index b5642696db64..9174f1471718 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md @@ -5,7 +5,7 @@ These plugins have been generated thanks to [kiota](https://aka.ms/kiota) and can be regenerated if needed. ```shell -cd dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins +cd dotnet/samples/Concepts/Resources/Plugins ``` ### Calendar plugin @@ -37,7 +37,7 @@ kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /driv Microsoft Graph list message and create a draft message for the current user. ```shell -kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /me/messages#GET -i /me/messages#POST -o CopilotAgentPlugins/MessagesPlugin --pn Messages +kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /me/messages#GET -i /me/sendMail#POST -o CopilotAgentPlugins/MessagesPlugin --pn Messages ``` ### Astronomy plugin diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs deleted file mode 100644 index c383ea9025f1..000000000000 --- a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ComponentModel; -using Microsoft.SemanticKernel; - -namespace Plugins; - -public sealed class LegacyMenuPlugin -{ - /// - /// Returns a mock item menu. - /// - [KernelFunction, Description("Provides a list of specials from the menu.")] - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string[] GetSpecials(KernelArguments? arguments) - { - return - [ - "Special Soup: Clam Chowder", - "Special Salad: Cobb Salad", - "Special Drink: Chai Tea", - ]; - } - - /// - /// Returns a mock item price. - /// - [KernelFunction, Description("Provides the price of the requested menu item.")] - public string GetItemPrice( - [Description("The name of the menu item.")] - string menuItem, - KernelArguments? arguments) - { - return "$9.99"; - } - - /// - /// An item is 86'd when the kitchen cannot serve due to running out of ingredients. - /// - [KernelFunction, Description("Returns true if the kitchen has ran out of the item.")] - public bool IsItem86d( - [Description("The name of the menu item.")] - string menuItem, - [Description("The number of items requested.")] - int count, - KernelArguments? arguments) - { - return count < 3; - } -} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs new file mode 100644 index 000000000000..5f0444aa22e2 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http.Headers; +using Microsoft.Extensions.Configuration; +using Microsoft.Identity.Client; + +/// +/// Retrieves a token via the provided delegate and applies it to HTTP requests using the +/// "bearer" authentication scheme. +/// +public class BearerAuthenticationProviderWithCancellationToken +{ + private readonly IPublicClientApplication _client; + + /// + /// Creates an instance of the class. + /// + /// The configuration instance to read settings from. + public BearerAuthenticationProviderWithCancellationToken(IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(configuration); + var clientId = configuration["MSGraph:ClientId"]; + var tenantId = configuration["MSGraph:TenantId"]; + + if (string.IsNullOrEmpty(clientId) || string.IsNullOrEmpty(tenantId)) + { + throw new InvalidOperationException("Please provide valid MSGraph configuration in appsettings.Development.json file."); + } + + this._client = PublicClientApplicationBuilder + .Create(clientId) + .WithAuthority($"https://login.microsoftonline.com/{tenantId}") + .WithDefaultRedirectUri() + .Build(); + } + + /// + /// Applies the token to the provided HTTP request message. + /// + /// The HTTP request message. + /// + public async Task AuthenticateRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken = default) + { + var token = await this.GetAccessTokenAsync(cancellationToken).ConfigureAwait(false); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + private async Task GetAccessTokenAsync(CancellationToken cancellationToken) + { + var scopes = new string[] { "https://graph.microsoft.com/.default" }; + try + { + var authResult = await this._client.AcquireTokenSilent(scopes, (await this._client.GetAccountsAsync().ConfigureAwait(false)).FirstOrDefault()).ExecuteAsync(cancellationToken).ConfigureAwait(false); + return authResult.AccessToken; + } + catch + { + var authResult = await this._client.AcquireTokenWithDeviceCode(scopes, deviceCodeResult => + { + Console.WriteLine(deviceCodeResult.Message); + return Task.CompletedTask; + }).ExecuteAsync(cancellationToken).ConfigureAwait(false); + return authResult.AccessToken; + } + } +} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj new file mode 100644 index 000000000000..a81a79967bcb --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj @@ -0,0 +1,55 @@ + + + + Exe + net8.0 + enable + enable + SKEXP0040,SKEXP0042,SKEXP0043,SKEXP0050,SKEXP0053,SKEXP0060,SKEXP0061,1591,CA1050,CA1308,CA2234 + + + + + PreserveNewest + + + PreserveNewest + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln new file mode 100644 index 000000000000..d16eae1498aa --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln @@ -0,0 +1,24 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.5.2.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CopilotAgentPluginsDemoSample", "CopilotAgentPluginsDemoSample.csproj", "{7F2FF65C-BC07-E142-D909-97CCFC4B0B50}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {820AD9F3-FFBD-4690-9EAB-89D967E00ABE} + EndGlobalSection +EndGlobal diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs new file mode 100644 index 000000000000..336b1832e455 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs @@ -0,0 +1,528 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Web; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Microsoft.SemanticKernel.Plugins.OpenApi.Extensions; +using Spectre.Console; +using Spectre.Console.Cli; +using Spectre.Console.Json; + +public class DemoCommand : AsyncCommand +{ + public class Settings : CommandSettings + { + [CommandOption("--debug")] + public bool? EnableLogging { get; set; } + } + + private static readonly Lazy s_configurationRoot = new(() => + new ConfigurationBuilder() + .AddJsonFile("appsettings.Development.json", optional: true, reloadOnChange: true) + .Build()); + + private static IConfigurationRoot configuration => s_configurationRoot.Value; + + private const string CopilotAgentPluginsDirectory = "CopilotAgentPlugins"; + public override async Task ExecuteAsync(CommandContext context, Settings settings) + { + var availableCopilotPlugins = Directory.GetDirectories($"../../../Concepts/Resources/Plugins/{CopilotAgentPluginsDirectory}"); + + var selectedKernelName = AnsiConsole.Prompt( + new SelectionPrompt() + .Title("[green]SELECT KERNEL TO USE:[/]") + .AddChoices([ + "azureopenai", + "openai", + "ollama" + ])); + + var enableLogging = settings.EnableLogging == true; + + var (kernel, promptSettings) = selectedKernelName switch + { + "azureopenai" => InitializeAzureOpenAiKernel(configuration, enableLogging: enableLogging), + "openai" => InitializeOpenAiKernel(configuration, enableLogging: enableLogging), + "ollama" => InitializeKernelForOllama(configuration, enableLogging: enableLogging), + _ => throw new InvalidOperationException($"Invalid kernel selection. {selectedKernelName} is not a valid kernel.") + }; + kernel.AutoFunctionInvocationFilters.Add(new ExpectedSchemaFunctionFilter()); + + while (true) + { + const string LOAD_COPILOT_AGENT_PLUGIN = "Load Copilot Agent plugin(s)"; + const string LOAD_ALL_COPILOT_AGENT_PLUGINS = "Load all available Copilot Agent plugins"; + const string UNLOAD_ALL_PLUGINS = "Unload all plugins"; + const string SHOW_COPILOT_AGENT_MANIFEST = "Show Copilot Agent manifest"; + const string EXECUTE_GOAL = "Execute a goal"; + const string LIST_LOADED_PLUGINS = "List loaded plugins"; + const string LIST_LOADED_PLUGINS_WITH_FUNCTIONS = "List loaded plugins with functions"; + const string LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS = "List loaded plugins with functions and parameters"; + const string EXIT = "Exit"; + AnsiConsole.WriteLine(); + var selection = AnsiConsole.Prompt( + new SelectionPrompt() + .Title("SELECT AN OPTION:") + .PageSize(10) + .AddChoices([LOAD_COPILOT_AGENT_PLUGIN, LOAD_ALL_COPILOT_AGENT_PLUGINS, UNLOAD_ALL_PLUGINS, SHOW_COPILOT_AGENT_MANIFEST, EXECUTE_GOAL, LIST_LOADED_PLUGINS, LIST_LOADED_PLUGINS_WITH_FUNCTIONS, LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS, EXIT])); + + switch (selection) + { + case LOAD_COPILOT_AGENT_PLUGIN: + await this.LoadCopilotAgentPluginAsync(kernel, configuration, availableCopilotPlugins).ConfigureAwait(false); + break; + case LOAD_ALL_COPILOT_AGENT_PLUGINS: + await this.LoadCopilotAgentPluginAsync(kernel, configuration, availableCopilotPlugins, loadAllPlugins: true).ConfigureAwait(false); + break; + case UNLOAD_ALL_PLUGINS: + kernel.Plugins.Clear(); + AnsiConsole.MarkupLine("[bold green]All plugins unloaded successfully.[/]"); + break; + case SHOW_COPILOT_AGENT_MANIFEST: + await this.ShowCopilotAgentManifestAsync(availableCopilotPlugins).ConfigureAwait(false); + break; + case EXECUTE_GOAL: + await this.ExecuteGoalAsync(kernel, promptSettings).ConfigureAwait(false); + break; + case LIST_LOADED_PLUGINS: + this.ListLoadedPlugins(kernel); + break; + case LIST_LOADED_PLUGINS_WITH_FUNCTIONS: + this.ListLoadedPlugins(kernel, withFunctions: true); + break; + case LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS: + this.ListLoadedPlugins(kernel, withFunctions: true, withParameters: true); + break; + case EXIT: + return 0; + default: + AnsiConsole.MarkupLine("[red]Invalid selection.[/]"); + break; + } + } + } + private async Task LoadCopilotAgentPluginAsync(Kernel kernel, IConfigurationRoot configuration, string[] availableCopilotPlugins, bool loadAllPlugins = false) + { + await this.LoadPluginAsync(kernel, configuration, availableCopilotPlugins, this.AddCopilotAgentPluginAsync, loadAllPlugins).ConfigureAwait(false); + } + + private async Task ShowCopilotAgentManifestAsync(string[] availableCopilotPlugins) + { + await this.ShowManifestAsync(availableCopilotPlugins, GetCopilotAgentManifestPath).ConfigureAwait(false); + } + private static string GetCopilotAgentManifestPath(string name) => Path.Combine(Directory.GetCurrentDirectory(), "../../../Concepts/Resources/Plugins", CopilotAgentPluginsDirectory, name, $"{name[..^6].ToLowerInvariant()}-apiplugin.json"); + + private async Task ShowManifestAsync(string[] availableApiManifestPlugins, Func nameLookup) + { + var selectedPluginName = AnsiConsole.Prompt( + new SelectionPrompt() + .Title("[green]SELECT PLUGIN TO SHOW API MANIFEST:[/]") + .PageSize(10) + .AddChoices(availableApiManifestPlugins.Select(p => p.Split(Path.DirectorySeparatorChar).Last()))); + + var apiManifest = await File.ReadAllTextAsync(nameLookup(selectedPluginName)).ConfigureAwait(false); + var jsonText = new JsonText(apiManifest); + AnsiConsole.Write( + new Panel(jsonText) + .Header(selectedPluginName) + .Collapse() + .RoundedBorder() + .BorderColor(Color.Yellow)); + } + private void ListLoadedPlugins(Kernel kernel, bool withFunctions = false, bool withParameters = false) + { + var root = new Tree("[bold]LOADED PLUGINS[/]"); + foreach (var plugin in kernel.Plugins) + { + var pluginNode = root.AddNode($"[bold green]{plugin.Name}[/]"); + if (!withFunctions) + { + continue; + } + + foreach (var function in plugin.GetFunctionsMetadata()) + { + var functionNode = pluginNode.AddNode($"[italic green]{function.Name}[/]{Environment.NewLine} {function.Description}"); + + if (!withParameters) + { + continue; + } + + if (function.Parameters.Count == 0) + { + functionNode.AddNode("[red]No parameters[/]"); + continue; + } + + foreach (var param in function.Parameters) + { + functionNode.AddNode($"[italic green]{param.Name}[/]{Environment.NewLine} {param.Description}"); + } + } + } + + if (kernel.Plugins.Count == 0) + { + root.AddNode("[red]No plugin loaded.[/]"); + } + + AnsiConsole.Write(root); + } + + private async Task LoadPluginAsync(Kernel kernel, IConfigurationRoot configuration, IEnumerable availableManifestPlugins, Func loader, bool loadAllPlugins = false) + { + // get unloaded plugins + var pluginNames = availableManifestPlugins.Select(p => p.Split(Path.DirectorySeparatorChar).Last()) + .Where(p => !kernel.Plugins.Any(loadedPlugin => p == loadedPlugin.Name)) + .ToList(); + + if (pluginNames.Count == 0) + { + AnsiConsole.MarkupLine("[red]No additional plugin available to load.[/]"); + return; + } + + var selectedPluginNames = loadAllPlugins ? + pluginNames : + AnsiConsole.Prompt( + new MultiSelectionPrompt() + .Title("[green]SELECT PLUGINS TO LOAD:[/]") + .PageSize(10) + .AddChoices(pluginNames)); + + foreach (var selectedPluginName in selectedPluginNames) + { + await AnsiConsole.Status() + .Spinner(Spinner.Known.Dots) + .SpinnerStyle(Style.Parse("yellow")) + .StartAsync($"loading {selectedPluginName}...", async ctx => + { + await loader(kernel, configuration, selectedPluginName).ConfigureAwait(false); + }).ConfigureAwait(false); + } + } + + private async Task ExecuteGoalAsync(Kernel kernel, PromptExecutionSettings promptExecutionSettings) + { + var goal = AnsiConsole.Ask("Enter your goal:"); + var result = await kernel.InvokePromptAsync(goal, new KernelArguments(promptExecutionSettings)).ConfigureAwait(false); + var panel = new Panel($"[bold]Result[/]{Environment.NewLine}{Environment.NewLine}[green italic]{Markup.Escape(result.ToString())}[/]"); + AnsiConsole.Write(panel); + } + + private static (Kernel, PromptExecutionSettings) InitializeKernelForOllama(IConfiguration configuration, bool enableLogging) + { + var engineConfig = configuration.GetSection("Ollama"); + var chatModelId = engineConfig["ChatModelId"]; + var endpoint = engineConfig["Endpoint"]; + if (string.IsNullOrEmpty(chatModelId) || string.IsNullOrEmpty(endpoint)) + { + throw new InvalidOperationException("Please provide valid Ollama configuration in appsettings.Development.json file."); + } + + var builder = Kernel.CreateBuilder(); + if (enableLogging) + { + builder.Services.AddLogging(loggingBuilder => + { + loggingBuilder.AddFilter(level => true); + loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); + }); + } +#pragma warning disable SKEXP0070 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. +#pragma warning disable SKEXP0001 + return (builder.AddOllamaChatCompletion( + chatModelId, + new Uri(endpoint)).Build(), + new OllamaPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( + options: new FunctionChoiceBehaviorOptions + { + AllowStrictSchemaAdherence = true + } + ) + }); +#pragma warning restore SKEXP0001 +#pragma warning restore SKEXP0070 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + } + + private static (Kernel, PromptExecutionSettings) InitializeAzureOpenAiKernel(IConfiguration configuration, bool enableLogging) + { + var azureOpenAIConfig = configuration.GetSection("AzureOpenAI"); + var apiKey = azureOpenAIConfig["ApiKey"]; + var chatDeploymentName = azureOpenAIConfig["ChatDeploymentName"]; + var chatModelId = azureOpenAIConfig["ChatModelId"]; + var endpoint = azureOpenAIConfig["Endpoint"]; + + if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(chatDeploymentName) || string.IsNullOrEmpty(chatModelId) || string.IsNullOrEmpty(endpoint)) + { + throw new InvalidOperationException("Please provide valid AzureOpenAI configuration in appsettings.Development.json file."); + } + + var builder = Kernel.CreateBuilder(); + if (enableLogging) + { + builder.Services.AddLogging(loggingBuilder => + { + loggingBuilder.AddFilter(level => true); + loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); + }); + } + return (builder.AddAzureOpenAIChatCompletion( + deploymentName: chatDeploymentName, + endpoint: endpoint, + serviceId: "AzureOpenAIChat", + apiKey: apiKey, + modelId: chatModelId).Build(), +#pragma warning disable SKEXP0001 + new AzureOpenAIPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( + options: new FunctionChoiceBehaviorOptions + { + AllowStrictSchemaAdherence = true + } + ) + }); +#pragma warning restore SKEXP0001 + } + + public static (Kernel, PromptExecutionSettings) InitializeOpenAiKernel(IConfiguration configuration, bool enableLogging) + { + // Extract configuration settings specific to OpenAI + var openAIConfig = configuration.GetSection("OpenAI"); + var apiKey = openAIConfig["ApiKey"]; + var modelId = openAIConfig["ModelId"]; + + if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(modelId)) + { + throw new InvalidOperationException("Please provide valid OpenAI configuration in appsettings.Development.json file."); + } + + var builder = Kernel.CreateBuilder(); + if (enableLogging) + { + builder.Services.AddLogging(loggingBuilder => + { + loggingBuilder.AddFilter(level => true); + loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); + }); + } + + return (builder.AddOpenAIChatCompletion( + apiKey: apiKey, + modelId: modelId).Build(), +#pragma warning disable SKEXP0001 + new OpenAIPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( + options: new FunctionChoiceBehaviorOptions + { + AllowStrictSchemaAdherence = true + }) + }); +#pragma warning restore SKEXP0001 + + } + private static AuthenticateRequestAsyncCallback? GetApiKeyAuthProvider(string apiKey, string parameterName, bool inHeader) + { + return async (request, cancellationToken) => + { + if (inHeader) + { + request.Headers.Add(parameterName, apiKey); + } + else + { + var uriBuilder = new UriBuilder(request.RequestUri ?? throw new InvalidOperationException("The request URI is null.")); + var query = HttpUtility.ParseQueryString(uriBuilder.Query); + query[parameterName] = apiKey; + uriBuilder.Query = query.ToString(); + request.RequestUri = uriBuilder.Uri; + } + + await Task.CompletedTask.ConfigureAwait(false); + }; + } + + private readonly BearerAuthenticationProviderWithCancellationToken _bearerAuthenticationProviderWithCancellationToken = new(configuration); + + private async Task AddCopilotAgentPluginAsync(Kernel kernel, IConfigurationRoot configuration, string pluginName) + { + var copilotAgentPluginParameters = new CopilotAgentPluginParameters + { + FunctionExecutionParameters = new() + { + { "https://graph.microsoft.com/v1.0", new OpenApiFunctionExecutionParameters(authCallback: this._bearerAuthenticationProviderWithCancellationToken.AuthenticateRequestAsync, enableDynamicOperationPayload: false, enablePayloadNamespacing: true) { ParameterFilter = s_restApiParameterFilter} }, + { "https://api.nasa.gov/planetary", new OpenApiFunctionExecutionParameters(authCallback: GetApiKeyAuthProvider("DEMO_KEY", "api_key", false), enableDynamicOperationPayload: false, enablePayloadNamespacing: true)} + }, + }; + + try + { + KernelPlugin plugin = + await kernel.ImportPluginFromCopilotAgentPluginAsync( + pluginName, + GetCopilotAgentManifestPath(pluginName), + copilotAgentPluginParameters) + .ConfigureAwait(false); + AnsiConsole.MarkupLine($"[bold green] {pluginName} loaded successfully.[/]"); + } + catch (Exception ex) + { + AnsiConsole.MarkupLine($"[red]Failed to load {pluginName}.[/]"); + kernel.LoggerFactory.CreateLogger("Plugin Creation").LogError(ex, "Plugin creation failed. Message: {0}", ex.Message); + throw new AggregateException($"Plugin creation failed for {pluginName}", ex); + } + } + #region MagicDoNotLookUnderTheHood + private static readonly HashSet s_fieldsToIgnore = new( + [ + "@odata.type", + "attachments", + "allowNewTimeProposals", + "bccRecipients", + "bodyPreview", + "calendar", + "categories", + "ccRecipients", + "changeKey", + "conversationId", + "coordinates", + "conversationIndex", + "createdDateTime", + "discriminator", + "lastModifiedDateTime", + "locations", + "extensions", + "flag", + "from", + "hasAttachments", + "iCalUId", + "id", + "inferenceClassification", + "internetMessageHeaders", + "instances", + "isCancelled", + "isDeliveryReceiptRequested", + "isDraft", + "isOrganizer", + "isRead", + "isReadReceiptRequested", + "multiValueExtendedProperties", + "onlineMeeting", + "onlineMeetingProvider", + "onlineMeetingUrl", + "organizer", + "originalStart", + "parentFolderId", + "range", + "receivedDateTime", + "recurrence", + "replyTo", + "sender", + "sentDateTime", + "seriesMasterId", + "singleValueExtendedProperties", + "transactionId", + "time", + "uniqueBody", + "uniqueId", + "uniqueIdType", + "webLink", + ], + StringComparer.OrdinalIgnoreCase + ); + private const string RequiredPropertyName = "required"; + private const string PropertiesPropertyName = "properties"; + /// + /// Trims the properties from the request body schema. + /// Most models in strict mode enforce a limit on the properties. + /// + /// Source schema + /// the trimmed schema for the request body + private static KernelJsonSchema? TrimPropertiesFromRequestBody(KernelJsonSchema? schema) + { + if (schema is null) + { + return null; + } + + var originalSchema = JsonSerializer.Serialize(schema.RootElement); + var node = JsonNode.Parse(originalSchema); + if (node is not JsonObject jsonNode) + { + return schema; + } + + TrimPropertiesFromJsonNode(jsonNode); + + return KernelJsonSchema.Parse(node.ToString()); + } + private static void TrimPropertiesFromJsonNode(JsonNode jsonNode) + { + if (jsonNode is not JsonObject jsonObject) + { + return; + } + if (jsonObject.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) + { + jsonNode[RequiredPropertyName] = new JsonArray(requiredArray.Where(x => x is not null).Select(x => x!.GetValue()).Where(x => !s_fieldsToIgnore.Contains(x)).Select(x => JsonValue.Create(x)).ToArray()); + } + if (jsonObject.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) + { + var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(static x => x.Key).ToArray(); + foreach (var property in properties) + { + propertiesObject.Remove(property); + } + } + foreach (var subProperty in jsonObject) + { + if (subProperty.Value is not null) + { + TrimPropertiesFromJsonNode(subProperty.Value); + } + } + } +#pragma warning disable SKEXP0040 + private static readonly RestApiParameterFilter s_restApiParameterFilter = (RestApiParameterFilterContext context) => + { +#pragma warning restore SKEXP0040 + if (("me_sendMail".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) || + ("me_calendar_CreateEvents".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase)) && + "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase))) + { + context.Parameter.Schema = TrimPropertiesFromRequestBody(context.Parameter.Schema); + return context.Parameter; + } + return context.Parameter; + }; + private sealed class ExpectedSchemaFunctionFilter : IAutoFunctionInvocationFilter + {//TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + await next(context).ConfigureAwait(false); + + if (context.Result.ValueType == typeof(RestApiOperationResponse)) + { + var openApiResponse = context.Result.GetValue(); + if (openApiResponse?.ExpectedSchema is not null) + { + openApiResponse.ExpectedSchema = null; + } + } + } + } + #endregion +} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs new file mode 100644 index 000000000000..666fc5a4e1c7 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Spectre.Console; +using Spectre.Console.Json; + +public class SemanticKernelLogger : ILogger +{ + public IDisposable? BeginScope(TState state) where TState : notnull + { + return null; + } + + public bool IsEnabled(LogLevel logLevel) + { + return true; + } + + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + if (!this.IsEnabled(logLevel)) + { + return; + } + + // You can reformat the message here + var message = formatter(state, exception); + if (!this.PrintMessageBetweenTags(message, "Rendered prompt", "[FUNCTIONS]", "[END FUNCTIONS]") + && !this.PrintMessageWithALabelAndJson("Function result:", message) + && !this.PrintMessageWithALabelAndJson("Function arguments:", message) + && !this.PrintMessageWithALabelAndJson("Plan result:", message)) + { + AnsiConsole.MarkupLine($"[green]{logLevel}[/] {Markup.Escape(message)}"); + } + } + + private bool PrintMessageWithALabelAndJson(string label, string message) + { + if (message.StartsWith(label, System.StringComparison.Ordinal)) + { + var json = message.Substring(label.Length).Trim(); + + try + { + var jsonText = new JsonText(json); + AnsiConsole.Write( + new Panel(jsonText) + .Header(label) + .Collapse() + .RoundedBorder() + .BorderColor(Color.Yellow)); + } + catch + { + AnsiConsole.MarkupLine(Markup.Escape(message)); + } + + string[] nestedJsonObjectLabels = ["available_functions", "Content"]; + foreach (var nestedJsonObjectLabel in nestedJsonObjectLabels) + { + try + { + var jsonDoc = JsonDocument.Parse(json); + var content = jsonDoc.RootElement.GetProperty(nestedJsonObjectLabel).GetString(); + if (content != null) + { + var jsonText = new JsonText(content); + AnsiConsole.Write( + new Panel(jsonText) + .Header(nestedJsonObjectLabel) + .Collapse() + .RoundedBorder() + .BorderColor(Color.Yellow)); + } + } + catch + { + // ignored + } + } + + return true; + } + + return false; + } + + private bool PrintMessageBetweenTags(string message, string label, string startTag, string endTag) + { + if (message.StartsWith(label, System.StringComparison.Ordinal)) + { + var split = message.Split(startTag); + AnsiConsole.MarkupLine($"[green]{this.EscapeMarkup(split[0])}[/]"); + if (split.Length > 1) + { + var split2 = split[1].Split(endTag); + try + { + var jsonText = new JsonText(this.EscapeMarkup(split2[0])); + AnsiConsole.Write( + new Panel(jsonText) + .Header("Functions") + .Collapse() + .RoundedBorder() + .BorderColor(Color.Yellow)); + } + catch + { + AnsiConsole.MarkupLine(this.EscapeMarkup(split2[0])); + } + + AnsiConsole.MarkupLine(this.EscapeMarkup(split2[1])); + return true; + } + } + + return false; + } + + private string EscapeMarkup(string text) + { + return text.Replace("[", "[[").Replace("]", "]]"); + } +} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs new file mode 100644 index 000000000000..2e4aa284549a --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Logging; + +public class SemanticKernelLoggerProvider : ILoggerProvider, IDisposable +{ + public ILogger CreateLogger(string categoryName) + { + return new SemanticKernelLogger(); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + // Dispose managed resources here. + } + + // Dispose unmanaged resources here. + } + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } +} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs new file mode 100644 index 000000000000..b584b8b843f2 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Spectre.Console.Cli; + +var app = new CommandApp(); +app.Configure(config => +{ + config.AddCommand("demo"); +}); + +return app.Run(args); diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json new file mode 100644 index 000000000000..4dd3abcb1ff2 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json @@ -0,0 +1,19 @@ +{ + "AzureOpenAI": { + "ChatModelId": "", + "ServiceId": "", + "ChatDeploymentName": "", + "Endpoint": "", + "ApiKey": "" + }, + "OpenAI": { + "ApiKey": "", + "ModelId": "gpt-4o", + "Organization": "" + }, + "MsGraph": { + "ClientId": "", + "TenantId": "9188040d-6c67-4c5b-b112-36a304b66dad", // MSA/Consumer/Personal tenant, https://learn.microsoft.com/azure/active-directory/develop/accounts-overview + "RedirectUri": "http://localhost" + } + } \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/README.md b/dotnet/samples/Demos/CopilotAgentPlugins/README.md new file mode 100644 index 000000000000..de3bf6ec8fc6 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/README.md @@ -0,0 +1,168 @@ +--- +page_type: sample +languages: +- dotnet +products: +- copilot +- ms-graph +- semantic-kernel +- microsoft-365 +description: The CopilotAgentPluginDemoSample create hand rolled plugins for use in a Semantic Kernel project. The plugins allow for CRUD operations using Microsoft Graph APIs, so that developers can send prompts that will AutoInvokeFunctions to Microsoft365 data, services, and resources. +extensions: + contentType: samples + technologies: + - Kiota + - Semantic Kernel + - Microsoft Graph + services: + - Azure AD + - Microsoft 365 + createdDate: 2/12/2025 4:50:18 AM +--- +# Copilot Agent Plugins Sample for Semantic Kernel + +Sample created and managed by [Fabian G. Williams](https://github.com/fabianwilliams), Principal Product Manager, Microsoft. We believe that Copilot Agent Plugins (CAPs) empowers developers to effortlessly build AI-driven solutions by transforming natural language into seamless CRUD actions using Microsoft Graph and Semantic Kernel, thus revolutionizing the way we **developers** interact with Microsoft 365 data and innovate. + +## Watch the Videos + +### Why use Copilot Agent Plugins? +[![Watch the video](https://img.youtube.com/vi/la1UDNn3eP4/0.jpg)](https://aka.ms/m365caps-videointro) + +### Live Demo of CAPs in Action +[![Watch the video](https://img.youtube.com/vi/-D3KdiPySxw/0.jpg)](https://aka.ms/m365caps-videodemo) + +## CAPS Public Roadmap + +Our timelines may be subject to changes, at this time our current GA release cycles are + +![A screenshot of the CAPs Public Roadmap ](images/CAPs_PublicRoadmap.png) + +What to get going? Start your journey below! + +## Use the CopilotAgentPluginDemoSample application to use and create Plugins for Gen AI experiences in Microsoft 365 + +### Prerequisites + +- A Entra ID/ AAD administrator account capable of registering an Application. You can get a development tenant for free by joining the [Microsoft 365 Developer Program](https://developer.microsoft.com/microsoft-365/dev-program). +- [Visual Studio Code](https://code.visualstudio.com/) +- [Semantic Kernel](https://github.com/microsoft/semantic-kernel). + +### How the sample application works + +The sample has the following features: + +- This is a Console Application. The user will open a terminal and issue a command "dotnet run demo" or "dotnet run demo --debug" for debug mode. +- The user will then be presented with options to leverage platforms of "AzureOpenAI", "OpenAI", or locally with "Ollama" where the LLM is hosted. +- The user will then determine which Plugins they would like to load for this sample. As of this writing there are 4 available, Contacts, Messages, Calendar, and DriveItems. +- Once loaded the user will then have options to inspect the Manifest, Plugins, or run a prompt using the "Execute a Goal" option. +- The user will enter a prompt that satisfies one or more of the plugins they loaded. +- If a Auth token is not present, the user will be prompted to sign in with their Microsoft 365 account. This demonstrates how to use delegated authentication to run on a user's behalf. +- The users prompt is reasoned over and a result is returned with a description of the actions taken or data retrieved. This demonstrates how to use app can reason over Microsoft 365 data and synthesize a response or take an action on the users behalf. +- The user then has the option to issue another prompt load additional plugins, or exit the application. + +## Setting up the sample + +1. Register a Microsoft Identity platform application, and give it the right permissions. +1. Create an applications.Development.json file that fits with the pattern in the sample applications.json file that is included in the sample + +### Register a Microsoft Identity platform application + +#### Choose the tenant where you want to create your app + +1. Sign in to the [Azure Active Directory admin center](https://aad.portal.azure.com) using either a work or school account. +1. If your account is present in more than one Azure AD tenant: + 1. Select your profile from the menu on the top right corner of the page, and then **Switch directory**. + 1. Change your session to the Azure AD tenant where you want to create your application. + +#### Register the app + +This sample for demonstration purposes uses a [Device Code Authentication flow](https://learn.microsoft.com/en-us/entra/identity-platform/msal-authentication-flows#device-code), however you may choose an Authentication Flow that suits your specific scenario. You will need to adjust the Authentication class "BearerAuthenticationProviderWithCancellationToken.cs" if you do so, in order for the sample to work as-is. + +1. Select **Azure Active Directory** in the left-hand navigation, then select [App registrations](https://go.microsoft.com/fwlink/?linkid=2083908) under **Manage**. + + ![A screenshot of the App registrations ](images/aad-portal-app-registrations.png) + +1. In creating a **New Application**.Ensure the below values are set appropriately according to your Authentication Flow. The below is for device code. + + - Provide an appropriate name for your sample and copy down the **Application(client)ID** as well as the **Directory(tenant)ID** and save them for later. + + ![A screenshot of the Register an application page](images/ApplicationOverViewScreenClientIDetc.png) + + - Set **Supported account types** to **Accounts in this organizational directory only**. This ensures that your App only will authenticate users from this tenant only. + - Under **Redirect URI**, ensure the value is set to `http://localhost`. + + ![A screenshot of the RedirectURI an application page](images/AppRegistration_Authentication_localhostredirecturi.png) + +1. In **Certificates & secrets** under **Manage**. Select the **New client secret** button. Enter a value in **Description** and select one of the options for **Expires** and select **Add**. + +1. Copy the **Value** of the new secret **before** you leave this page. It will never be displayed again. Save the value for later. + + ![A screenshot of a new secret in the Client secrets list](images/AppRegistration_AppSecret.png) + +1. Under **API permissions** under **Manage**. + +1. In the list of pages for the app, select **API permissions**, then select **Add a permission**. + +1. In this sample we selected the delegated permissions you see below. In order for the hand rolled plugins to work, at a minimum you will need to ensure that the Mail, Calendar, Files, and Contacts are selected as shown, with at least Read Permissions. + +1. Make sure that the **Microsoft APIs** tab is selected, then select **Microsoft Graph**. + +1. Select **Application permissions**, then find and enable your desired permissions. + + > **Note:** To create subscriptions for other resources you need to select different permissions as documented [here](https://docs.microsoft.com/graph/api/subscription-post-subscriptions#permissions) + +1. Select **Grant admin consent for `name of your organization`** and **Yes**. This grants consent to the permissions of the application registration you just created to the current organization. + + ![A screenshot of a new secret in the Client secrets list](images/AppRegistration_APIPermissions.png) + + +### Update appsettings Development File + +1. Rename the [appsettings.json](CopilotAgentPluginsDemoSample/appsettings.json) file to `appsettings.Development.json`. Open the file in Visual Studio code or any text editor. + +1. Update the following values. + + - `TenantId`: set to the tenant ID from your app registration + - `ClientId`: set to the client ID from your app registration + - `ClientSecret`: set to the client secret from your app registration + - `RedirectUri`: set to the http://localhost + - `OpenAI`: if you are using OpenAI as your LLM provider ensure that the + - `ApiKey` : is filled out + - `ModelId` : is filled out + - `AzureOpenAI` : if you are using AzureOpenAI as your LLM provider ensure that the + - `ChatModelId` : is filled out + - `ChatDeploymentName` : is filled out + - `Endpoint` : is filled out + - `ApiKey` : is filled out + +### Start the application + +Open the repository with Visual Studio Code. Open a **New Terminal** and type. + +To run without Debug Mode type: + +```shell +dotnet run demo +``` + +To run with Debug Mode type: + +```shell +dotnet run demo --debug +``` + +Then follow the instructions provided. + +## Troubleshooting + +See the dedicated [troubleshooting page](./TROUBLESHOOTING.md). + +## Questions and comments + +We'd love to get your feedback about the Copilot Agent Plugins sample for Semantic Kernel. You can send your questions and suggestions to us in the [Issues](https://github.com/microsoft/semantic-kernel/issues) section of this repository. + +Questions about Microsoft Graph in general should be posted to [Microsoft Q&A](https://docs.microsoft.com/answers/products/graph). Make sure that your questions or comments are tagged with the relevant Microsoft Graph tag. + +## Additional resources + +- [Microsoft Graph documentation](https://docs.microsoft.com/graph) \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md b/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md new file mode 100644 index 000000000000..7dc291928350 --- /dev/null +++ b/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md @@ -0,0 +1,11 @@ +# Troubleshooting + +This document covers some of the common issues you may encounter when running this sample. + +## You get a 403 Forbidden response when you attempt to create a subscription + +Make sure that your app registration includes the required permission for Microsoft Graph (as described in the [Register the app](README.md#register-the-app) section). + +## You get a build error when you issue dotnet run demo command + +Ensure that you have copied the appsettings.json file into a new or renamed appsettings.Development.json file as directed in the [Update appsettings.Development.json](README.md#update-appsettings-development-file) diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png new file mode 100644 index 0000000000000000000000000000000000000000..34b793d26a39a45df28d457b846c460138abcca0 GIT binary patch literal 127548 zcma%iWmFv7)@=j98V&9eAh-tC0Ko|oqzNt|1b27$KyVU*li=>|PLN>1o#5SQERpza}Jf|+sCPf)BuAR3Uu8%a%%jKgJj9|G4K zROq*Z0O71+>-;@j-$hvdIrTp$EXbp^!#hWI&$@n$1qhii zjfOo`9=tV9{>nj4^FKF5qb6=DxoOt8_X>QB>Uqcvy#m}qd7}iHS z!zZ#w4)?jA|L-7EA9(^w;5_6v8h`(P zOrKOZ|2=%|xo@JmF*nz5BI{3i!&sP|^vc2eeMXU!Ne?Oc$`T}4s-~qRTtPlj_G&uH zt_o^25ZBU&;(PTt8kJ&BC;BTWG;Z7OHYf$(p7We>Qh2_TAMwCcx}Lc|qWu*ne&ZPQ zDl)~qcie>jDpZ_cr%>x(v&EdP!O|Bv?-SRjhy`0q0;RTs#EPFVxV3&Oq4 zCAjQY*CZJO1tpc9e}bhr4i-j5(W_&00wBdn_P<~P4+WjO)yfvAC4 zhSKoe^D6ipdyyLd>7$z+XS7^9Q_pE|-FHDADP6l#1=gl9L=bgn2$o0wI#I7l9Mjq< z`Uo;&4xbXcvu5;fia zK|P(XJ^E0@Uiwk8VR+Dm82JQzv^RsAmkLL<6Y6o%hDttCrK^pAlJ&8}f1}q)VOh74 z{`{5sx~dOBK$GeB$9116dd&fYA%mGos^5759G3gTqSTi`{L8XU(6H3asf{9^6J>J&Mu+TR)7+7m=ClH{j3Ot{4+5aK!t zM${qm@$j?4BYLZtB>GUH_pS=2Pgy61*Chrah3&8xO%$1XJgTwpRIp>UEbUys%-<&< zIls!1qVl!_F52nctP`8@_^2WCtv@gRRYG+UG}!=G%ee{{ku&6dig}YUc!OWgzYd?8 z78&Bp9e9DXF)L5RMv@~IZ$e6|hVjfYu*%^84DKL#)B<1M$|UQ*MKP8fLGB;4g?KE- zxKJw?27ii)dF0~~f22y{_DLrOp@>6G$mo_8#^{~@;C39S%G)z_zMV6Qb16fdt$s0R z9-vtN5jK*M7I=u|RE*24XZFCd-?`|G64_+YGi@P-Ftf+{^ zu(YR0FADTj;I~nsrn+k-V9*wrx=>tqEXe2rKJH0e)jvF3Pf(g}>xp5iZtOwP9lIF) z5yg*cD7ub=!A8Bum4F-V&x9MmP0Jp(FGAFIlck)b1sv89UvzmJqrc*MaIvIN|FIl4j2S{;xajcMuHK=R_ z-0=G|NirBE9o0@C-gH8$tGbLF^e>dgkIcn9#6mjENs=Z?woS6+x`>~%))IYTKl|~BJ*0)fKYcPgzG9Yi&1%pqget{AseRMnf4DshE} z$dX>zrv4JW9r5ZF8nKrnD|#6yF`?F1@sgPM6jnjU)sW6~eu`?$gA#wOp86Ia9p*y~L^ROrC5`VpPyph5BP@Yry6<);ydQ z-^J>EP3D~L4;_Z%wuK1vwp`d@ilWVU$w1VfJx~BnTw0Y_Mf-C7jK0!KMpLPZPs)D@ zV}o=Bz*YuKg|3@_*a?{mkCuqWJ$qtwxJBCDFOGR_+c3t+vs_h)lceRNfOJwnZq6Ac zt*@BKDj!o~1$ax`-$e7=-J&|r??TJIjK6)`-IKQgZK)W6oluV^m@Agr-JRCp*@zj% zEe+gs9zt_1O7Zr*%m;6>zQzaa)@$CBq^4EoK;XqM?Q8xjoBews)tE6^-eQI+ zC8V2Df<%}`_YB|;-52c}pxm=|1Pl~dGl}46UF9EkXdek~DhW=iK0-1CCzM#tyUjSa zwo(fcfDqj5C1PcJQMo`_>Fwh%EK1<>PfVR!zE?Zyk{EAxtk1k%>=@q-UhI3pQGt@4 zf^ui+hjXs1<$l4{5_^PEt@hE{^EJ<1mM10GS2bV~Qj}G|>CY%YE>@&&Vrj_VeuoQ{ zL|1-r!jt`IC2YgS`thfkx1rdvo<4BY{s$ zqO?r5*L8h7dSigeQm#^r8IS6(M}Dv4*+?{QS9F*JgQnP(e{%W;%qstlha4$@Ta}*e z=pZh0PlT5jpTlIkj89JyX$!rqq=Imjv6SC!-b>d#p<9S&n_}6NRh4%+PwwnG@|g<< zaC*O7!_Un)eEg~lQQ{!%@d@KUpIJm}ilnL^c7MF2RqT3w%xS%`Q)J43KU{-}|B01g z%nW(2);LgvME!4{Lz%bFU%FskUVJeP`4sxHpf%R^AYDos_F$jay-~A$5NFsrWX-~H z(agA=b4U=LC4&=E(zdZEaNlzQad#96NM0iTG<1#ty{1ui7>ByriT4}r^f{R5s4ZGJ zpSMGWSU&Jn@(AfrPpr8}v0E5VTYkxo3sf!(SuzXP&I?wud~I$(xx|J+eMjhONS)+F zN`kZI8XUwaB|o6|ux$u>om@bL9`vbQE~HlSm#(^v?a>Km3NI#&+e;0C!|>wgNU(dyeDJq^eFP0F{cow( z(Rmipu;m909gl%Gg<4<&;Cjx-KOr|L<#;s0ilGB1nG3l`)M9!uM-myKe|9>%g%X7i z-6L?FEWKRxD1Uu@U&%Iy*jvrE6zUp((iDL&jQk_6?B`m^_d4EQFH0{c)Yvyxz=kbN z`~LZMHp$o{3?(?RhuZzSZkTc~&!L&cLKTlKu$J7m7S+`b`Y&6((oD4)?cYX1JuE^E z@x5fs77-IAKCqy9`fQuL+QE8dI<%D^&+SHEzHxZk6yG^}b}{cIZq(RTG4a{j`%~y5 zBWp(Y*44O8ac60lSIluy-wKACNxJ`|F;P%;+3ta3^Sc|rMJ;3E9i&CbrvcY{Q?4R+ z-l=o&)?72~6*04u2|k@BQxzs_GY>7DrZMvP00X_WWU?C1Wd3<~$T=F@NL1!A|U5G*T8V%dj)}`c`cO;U~Is=&VwXp0^JO1(jvQZ3f%=KEu zB_2aWOveH?n)1qtwdCmxae;ho=9@Zw>`;BApCd?3ilI8e*uzYG8JAvfnwBaxA=yjE z{c+cQg$*q^zl+zZ7HLwsBO>1+adg_azvj@WV~0o04Lkl7Xwy5-uxQXU-ZpXmjo}tF zcJ?-HNrViGifG)ns8eaNgg&lAf)(ouD>`&>z!caNNxEV>yvztK7>8*!&5hBr>;)Pl z(AhG}M%#F7UGq)IB$~x;@Ou*LsTWtd_;c;asNa6iohPDM25uW2_ocCVn*=Z;gVfB+ z2Tm(4H*yyHc@KJ9vP)4O7dh zGjmV2HnYRI4Y#(fXoQ4>PYtwP!mFO-<2TArAEA8}XS0sdjyxTh-u|O4`O@-I^0zmC zKZyCU)k(U5QAjrf?`+s(92Fa5$=DV@_6)LK48~aBoM)Hj5^;812)P|gpvMhF=LAWETR&8K6P_puj={Z=pG~eq{ufv`*572m0PhCx}w{LFP`m-)J|CqZM`WKwIQnS zLN!6Iv&$K_-Zm+}a9RvvGf}2|TF<~ry;gW_t7Rhrlw+D5`VuQ(`b|5)CG5T zT}e4xzj7^_LHfy0E3$C9Og2|;g2+e1-d-r&`3i`YBcRSs0RYNs}u#ZwF_gxV?AuVw| zd#7qzbhHxq651)Wl6qj$&22*a%&re?YxrO%PD3t7u$R&Nrw^w;~0&7YiW|Y)UJhDcWrYO@Y4iit(lV$do&6*UMncdB zq*90rG}fzm#cNH2j|rl3yK7~mSX*@_-(2`4r%aqO-H^t(P};tvLSt>yJAHR&hI=C7 zkMc4`so#l~PQzC=8~O&J_lpcFw>`G^}<^1t__#GPDr$6UihYz z3_BYUW2+OXpRhYs=|HUCP?7Pnp~t?jd|Yf$aeIm`rGX(40B6C8GmY*3nC(IusGu}H zojBz@Jac}WjXM2_Iu~b|Jjr*E2=x&FBL%hgdX3zix(t4(qx&Y64haBcnma;!2s#u; zG}<&3;P_q_+%Dfj{wiN}&=*OWS^!Yz-aooXg3s)t$GiXGX?2Nkap%lWhVKT+rnvNR5`Ey$!fW#EgvD zLC&j2S_`g^F;oOR52Sgh3bV`8g5Bs>K6Q%E-Sjr@9dcU9$dbWVtD?H|Mk5_d!Rts~ z8xwmNB;whOVV5Hj9!uS2tH3UZX6=xXu{B6M@{Of*5C0vT6~*dUoUMQQwRfD9KU;48 z0;NL4qiK!*4XhojfwO@^IC#9C0s$05I>nk#cKpGQ)bwJ>NDxD@e@Qp&?;q17 z?f2c^bqVjvF8Jeu_QtZ=b{#V>gHXssV&i08glJKjP)rfpc?9PjwR zTTuq*KPA<$On*3piEVm6xd3 z$pyQYif1zNwG1Kjn&ZeW6rhd(tK=LtoB87`jr?swNqFW>&Y##PZNvN3@2_p2f&WS8 zG}(5IBn&A?kqm|lE)u0xid9fO?T@tY9Lg@Mq_pGk(odzL`m9U*eJr7#1E95)lGq3H z#$N?bxrK}3yYM*cMMV6U?GPGt6v(Jb>NA&AGL2Gj3Q86iHN@fX45MKvKqA-E-xBJG zvtp&AFc|)-QQ|=RQmlcNY@FyNXZAXmC4ZSav+Dkf7JgCY70(uBTf(I;`s*l-1DbR_K8 zhw~K)xObt!WH7sj-Yq)F=LG5RAh0_33=HOcFqfY!&QXq#3$CHEgvnx(I`Qq>T>M4G z+Al6y|IC-YJAp9aVZg*x5i;KOIxnFod$wK-4t{{z3>WBRde+t=1&e~nzYI4VvHMimho*=jndq|7dBL#)CKC@x~3(2Kh9Mt&C zuT&x0T%>XBXb6Tb_l9*+ zZj_*JULM|5p7Z{ItN#=94oP7B2?A=^Jl=X<}SX9rJ*Yd?6%I~zAMmL zcfc5`D*AeNRhDANema(SQ^yO~2%>(3V{` zy{@;B@~cA=jj;?H3cIqJCGJ`TYl>6Jv$HH#TEYL+88zQnqq7zRaMl zrj75|6jV*t+T<3FTb+JD;c=8$#S^p0Q7=nn=6V0zi=th&=I3TRC}w(P(csV>2OAhdfQ=CZ;*9L7`FG+hIF4l#(n-!ZUxdA+w}q@#Pm zzCCWWp8?;OMS;6y%BCo;0iyEhaoEdS2AR&`nBR$+V2AUD;Pa6H!4{@YS~!9V?q7hQBu4q=)4qB zC&UsAm8x$NVbT4Z*T;`|{WuR6+O=^=w#Fub-?(kf z8Syy#!2HW8N*M=*%HB(##s+cNTg0Mf17-2tT z(c_A?Fsn_Ift&=*D5G}0$0*=Dl!)_4f7Xt?rl6+@viUtarHYS(r0oOfzgFPN-?9T)LuPGJ`H|i05mK0F=R|{ip7C%7^wjv$YvTEbw+UE(EV=?q} zV8xPeOT@qB%%#iTy`k_|>VJ++9d&1YgiTB1(kxn7KOZ{Gurg2EW-@KbaBE_$T|6NR zQPYiO_4=uRNF|Q>O>EC%RZfL$x~ebtF7;UBaGy){BY2DW5SsYzH77PZ=Wiy$4lR`?WWH?;dp5zqIdn zg-^^jrvxU5iK)C3$z7$7nyMAq{?UTY5I#V5zDIh~G*WS5KhhD?*gltX2abMd29Fr| zkwD;&Iu6X6i<>)G(QWkIk;WsyO4@7^W^3Uf6)-=@PI#p@OxoDS_Di#=ayGlFWF9}4szyB$lbp;$4r7FciZ}0ZS z8I?eOHa^CH9u_1;;h2WcA&T)*ITuI0?mg6_yHxfQEhZIihv75APmwHDy(1BG15tk1 zhQyBsr#Ib)0@V3tM2K8N)`=*_;wc?a$+j(z(7gDeY_*acZEso)7KBjD{ZDjLnBZQE zy|sGb=hU{Z-)bS7nr*1P*9fM*I+2STb%8IbOwY`99lCTMq1c?F=oeN4tg~p=bCS$*NxxW(h6dhBDO0c zhAE@MZ5?8*=ukf#>VI6)^%=E#f>SaS$au8e%1c|jdNvZxmLiwL9=NK9kNXzzXDc_U z&8DQ7%^dQRN}8|RCq7&HnuRp|Y8M}St8)#|7!tFojSr#tPw1UNJNB(*9r#7nucxkh znO{DfDHOBfu&|R=q=f5*YA~xtu7egCgtVcgbES7-aJzWerssImQ_#TUxmo#PD2ub0 z8})ck0F?sClt5eG!K?mHT!sijIK<*0 zaXlhNfgX8wt@zww+suE9_8foI`y^sh(%-+j_wR(D3aM>{)AHCJpSp*I2 zG-tw8Yz(c-1|ITu9ZpKo|L$h-8~=JM1n-Ha&dE*_8ESHRt-6It9l%tFHy2ACb>c=9 z`iHDtc*(}v%QqFjU2*qaCUw)qVZR8f)t6ii7Y|m-)!D%;rBi1QUyqP)($*_ndAPie z?74)L3mXHW4h!)P(OH_AS=Nf91mq7w6~!1pEU{4$aH^0=ZL(?eH}0-pise^#%-i|)#)2;<^t3wjG;E`8 z!PFMCnWODriaGi3Y}X$gDkM)0e9Q-5gJ$*lUy^|f@K}=ZtkUsx^QaVKi7+=q>wciP z?s(aVxua~ArXT;;3lO=h=y12hoKNCPBDP^gA>oEd)qtuEL1H7k;t(2UDUHzo zb^fYmH;B>+1rX^>OPR@JKDzf#pn<9rr-3wA&Dbs6cJ2(WGcDMt#`3!`M1@_tZkaX2 z5Ts_f8z|ME-L=^xajBiRvfV??NWU?9a%&98S?8ZMUI6u*s+r)L2PKrn4Do5fRsh84 z`)WBFD8A%$)rr-rv629k9PP3|5(vXQxdbs>bEF{sRS6XTjCjz`@IFB0VYgB$b$zzu zImpHIyucW`asHu_6RSJW=qiX=8$C=iZ+m*{)D2+9kBhDps>`nk8$M$wYl}Som~3?Q zIb*C41C&6yag_4h;c#3Zhw3Z2y@GB^4gm_4)Cns$Koz_Un-D^X;U?=Puhpr!kUulh zqyvJ*yx$hvsA~>k3J6kE1N|HcWYvhkVVycycBPHvd#otdQGgXI1!02{tg?1uIv6%T zVXm4M$G*$y{}?V?*0~e>lgt;p6IsM8{Jj51umyc$(b>Dri%NmU%Iyi#A?L5Uj{xCz zkp5+ylfa?_Y?b-WQcViM_cVtk8pzv%M-@cY$4;`}i55R+5F);I{*HvILSzgC-7uWL z%tF6Re@qMBU9W#rx2*~n%}g;SSlI=R#87)OpuYXN-9fPkQj2&^FA_AXLX3IR?Lb+%qk~_>Ha; zMf)-gY3cdS`IJ?@hWQ`~3XhQcaE>-}4)pQR><<@XCcED1wkeR}g5-5~dyOO@cgYk8 zd-|WQRnMhW^WME9Z8)ubUwF5V2I!kTD(f|372-DY{i@xolYlZ0B*=KDS%1Ut7;j7f zEq}-lfBaDf(Z^;@iGn{eQV%tq%jif_R05(gGDLE$^b6!f`q&5W0EJ?zaV@#S?wv@l zDBe#%pw{U6_v*s@Fk4uwNp<#O!v3pCYjA1@Ov^;vpOx`(MnBXLi6ks=e835Rdt|Hd z>RL_RLe9h-%f@)k%}ZTPJBmuICO*Y7Xn9>Srv!nKjgP58n8spD@T();g9u#6$7$@e zUdyM?dK-Qa1Kl3_0@R&+6ds()#Z3CzAmrR;0oaP9l{aZJXy3Z8ixxsr;&4d4kZ%|Y z$qecT55vaxq+7K)(qBt3$Rebs$`s86L);;oGs`m`TVDn5EKOhRU~(IeIa&|$1N?J? z*NN9dXKSW$g0+165%k0o3BDr&wFg0cJi&Fclyb?AXM``WK6f1>iac=fBcM-$TboP3 zS-VcO*#iR%^<*+@>?_msH+@6`0OR#Av&VAcMCpFjvPfKym8k#Fn?u}bFvQ(8UwsYo z1ejCnXYC zR^LhAnrGJz2Zj0@(Je7%YEpp`Opyq!Hq5Uh=VQ6aNX&Q#<$1tyv4G;U!QT_>53sE< zDmh@YTvFVkyBi3+)8TJ4fL;zkdXmV9tJ(c1*P+DpBqAT)44SQfUhEcmDXaguLVYIn z*}if3WqV(x(0~ESPYKP9s~O;ty6c7c>ob`7u-)I1(^@te0JSBKPl~aL6wx*s;NupU z?%l=a+rq*!{1mM6E%4k58VH9&k9{eX-D+SP>nQd4yFSLu=}d_Bm~ZCUjAuyv@mn*J z*qAQsz#FUVeauSdSCtp!I({C+gU2|YHoQ7N$xAg5JhqJ6cm`{FP>q^Nuy$6tNmbcz zNr(=dM7>rqfp9qAdMnr;GuS^9?7hCM^^L32{w3aq3w*0YjLZCGSJJPsV!cxQ9LlU6>RrJf zX5$T1vamb;{z({$>|QdxyJS3%>Icf*{}xAEw|0KDYu3_#cy7e^WlrbTv@xYO{vtBP zwqGB%V?{;77%3_?%q0eQmgWjXR9I!ozpH{p>76Aq9V#(axU$ zagc~2k^1}4gJb0<2j17^h0=8k;XI8dd;*q8^_??-PYE#yQ zd~fuH1X4EKx8UmSe&b>@2*xKSuiObCl-9_LH##7C*|0l@0Wx~1Mztf!#@|6A&!7ke zDgaD1O|boPt0ke{fRkS{Ik~^0S7RgPiVXu9zOgv$;v$tRMt5I52(<$x-mYFl-8;2~ z0EH0@D7^96P{=I^x?JI9s+dJ$BMNy^u>i9anf$9ip-@xvN&ieSn>7~ufGKtPTq3hTqwykTQB=pV zkqD^B-fe)zKBo#8uis4UgPrzn_fx-A_z_cheNiGdQkcs?pW^D%X*Ddc981b=(Bhw);ad z3_mQbkhh~StZ4?h4t05LsVxZ-pXSifl?R!>KNN970n!(VEn>rGBq-9=-W%JDC$h>e z1eyM3SoOzZ*l}~=lRY<^e+G_UE9O3r>wrK1wUzvWTO|%jZal|A_C<=AFAmDZSdy|` z$9bJ6+pPmHA#-!VMRm}6 zvT7*kqF&#^iM0IQd4ebxu!m9gzv!BsW&LFg38kNom2`d=K5hA?P}`JbZ%O;^$^j^x z?#_U*fA!`hH7-vMNC*g`;c00Ofw*}5U0Hj(AlcjMtztPBYUc)NAx72rT(`mK%cuz# z5pxVy_5={XDMw^Ty>?LrtPH92PU5vQ1BLa|JqWGrD@? zbDNi}n1f{wrZbcMyDn8iz8>c#JgrW?VA&6!podBH(@j3`ft9b7BNC@Fqz@aX>-{#A zncoxW^}jn#{s2$Ux1~wEAR1Rz42dyOJAj&Acj>k^w~%>b90C3=p8mAj!M!4x;PAeB zQ4u2hX0&gw?zX>4+)qY5B@_l7BtW!*!X`V7*vn6-gV#yP0Ddx(BHAI;?$=tSP5aMB59)9O^7=QG-38MznDgoeGM4zyepQFz7T; z$dk=~`itzqXqXp{)tNey1-^x^5-pQ^o(Z$B=y*xw)=fRy-;GfFlU`c9rgZcLAUuWB zJAbmut?rSWa&<;J9xXH57vS@1W4%V6)inc1pv{`xP+NPw`0WZr4RA7~D@R{$o$=2u zYw;6d0muy67Mv@}e&kU8$z0+k3Yb-1gt*XaI%p;3e;s}d_F%bP(|%*ZRJAS|n@q*B$|Ups@Y47pi35~fyh(m&#* zIa0euSk^SHKLkd$Obqedmc8nXvUk z^)~=+l!lY0J7Od?bavnGkrn%2Cs@gVa(r=bNQySQ|0Ae4X?z4DkLyeRHIBK36<=`l|8~*%x<;G!(3t=b&(53Vfd^ z_Yv{)tb5M#s6WBM84W0v+@f^gDl(x~@ijp6U(GDTuV=Mr#28t=MxPsVaIWVlbX6mu zfHgdy-uY`}sK2+3A1dZkpplQu$n~hT))6=nnXIV{qs8mUriYp$N{pDtZ~svZi z(U?vx!uLaWAH~)CbRD3C^$QqNovd76#8|D2K7^iZuy8h{9^g59BQ;z{aZ!&DcJM+Z z=7f|h?r^5Re)`1AGWdavs~sh2I~DX<_dI$|y31<~is%>_3FJ%;?X`FnKktZUCSQW4 zz+?Q7e?3%E9Mr|7T?S$yb`>>9rL+5`O?_l%hbhDHTt%!%-yNJ|UF&zA)fkA$R`Gm8 z#h6VtJ@72&IT4ua@4{sWk3o2O8P$Ikiaq3n6BE-n+U1o|?6UF0hLXs(wA0$VIP~Iv zXN1Ql(;gA@RBuse%Y&_q^4{4(>bR08oHY_dE@FqL*Sgdl8=3XOD^&h;8a+z+@V#-A zN&b*~$pxUL-mOo2=bIzU5r-UNI;`(^CFtlKoelCq=%wRE7k$-J^)ljH8$lc>Jfv3! z9N?ay0X93cEyg5^_nimS?#2n%P(mXq8Hyk&&IqK};)iC9R-7q-|Cyz|m2DGe<}QxA z^b?-%WWE{Nve8?ilKp6fd;8w$G(;|^024xoq3wuQA2-79z16f&q-wiSwabHkn@R`w zgzAQ)*0muT(E0>msX_nsaqk&V5y?u)yYu(w%NWLD7!nQcF}Fq4>UPmE%Lbm!a7+ug zLaW&N%`K|{w$PU{JN7;Ww=7_BB)2-VgehSJ5r*F*Us5R5U6Gw4k)s$@VPl#Wudsmh zWO2N1>a~sN9!z^3v19L;RHYx$8zsf&=i`JDKa|#)Oi6k?9NP`2Er#P^rBT0nWUy$$ zev>0SLfzUWk>UWc*fR3Y9-A~OcnLnzd2IF(=g>C`0rL}ylOl0q4Eo$X7)$SJTjw+! z$Qs+E5cRn7GkxG`tQ#s!T~ucj!L|q{Wf5F=nvpD53o`lOa6|AwJSdixHQ)dkK#-08 zu|DeCHFlJ{Uodf~vNvsz)8<4#P7^42+NftyTe|&Lg6|~js*xnArWK5 zmtO!My8-8fs<2PMVjl`HBT*r71XTZJjB(qtUcJ@utXFzv#?ad;m`)mgT-@h0^-ZW^ z{cVGRmXtXuwNE4?ps^V(-3NkMUJAYmlHfY!&YczKm`7JuKH?@VGcCVUDtvqkK3T z-EwS;wFR)f_75E4P-9f`lozP`FbuD`Hq6rfL|qHEF6K6K0E5eP-#{R_IV<3r0z7aN zA~+P0?I2_XUFV3u@Fqh}I^%qJ_lFMQhoe^%fSMaQqVzZ*5A+Ds%IH0^8~88Gs?c3u z=lIIW-?G$rI*iDL%6r2wlXy_lWf`a_2&3i7Bc)Ai^*;%0rt0}k#I#iKY}^2q0RXhZ z?qA{DMpJnB=!=%`<)F7<0iwjJo0fFPIRQXX(<|Fo{SB}y;`%!CJC_165pz;;W|@nd6KsONguUNbtd<{%9H2MUZe+)5%a_dbXp5<3NH9Xh z_Y+<3XTc!LQm&3^l;P#&#q_g1>e~SLV>vw=r(z(FI28}4as1+Q_OqOokFvH}LROqa zxwB4GWU7QbBbVso?1iofYdbf&YtgtQkyU`va$KHoMt5nbvAipM1#NB z&zjz+8LO^g7+^`&P{zgI$v!nYE$8S~XC5|;?Tsdc-L00mbsA;xl zADv;zo3G;FDk~I}Ito%3w-L%PQ9IvIO7JSC zJFG%}c0)cy=}}&n2%n0Jn)cVWbEi2MP-X4vR^6l_&t1iJ0X4IYHl(@=Hy1u)A8}?e*keoL-dD0$P}wgfy&&BT&T?LF@<8+h z4f>Yi09iQ{9`LS&d=;p4&*t|9xfWEm_`>0zF@XwJwtw!E4t;!YkG|5*N2qbz|C-qh zs!{i!4H_Ok`eYH9yPPncSoKE)`FL5Nlpj>|*aLnA4Sc*xz^GWqSn9D`&s!>NC3!5v zXxPkwuIjHB2y3hDcD5%i?5RFTnB)vBi4b|W-w9wt_XBR6^$d4(Whvm^1CWJE&*(F& zMqc-EaCk*zPi|zS{0#FN6b|()$HIB6AgIvKiaEI1gZ_*qhMl?&!A0ky#9>zFyev;O zsKFA)y8ZjQilt!R?bjXF4& zYh5uU+(BZ3o-!r;%Ky{J<5cA7uT6&XQpRKf6}eK#*FcP7ilOn(bm2zyza2(^lr?;B zT~wCtaG%0EK>WW=LVGDp|22j`ow|(oU#xukEng4lr@EcWE~i5M-z#X3qFCDhXJ{$; zhBsaR|Mz>^W5A97fnyh?|GJ}}4#nF=0uI~%eug{6uKwSE^>lhO1Ni?vsMQbRdhH-j zVPdn@q42RijF!>B7pUbv_o9lW$0}5myP;*t9wyf2sWvTbZIyM^lfKI)rP%j-Dy$VZIqIx_LnCX6g9YnV_pqlczKv`A z^Q3;i7Wm-RLcWbA+oAD^U-3%Ff#FZ8U(4J4QR2)kE-scCXRc}nimI%a5>ps0RHH#E z%YH3oH9gf&xA`;VrNs6%X@0BSZY0tY>I@kh>MRx_Ez^bQ)$ZJ*Ih-i`uC6i8{PpCi zRv!6Pe=Rnh;Y+2Gzv+-Yh_G4nvyx+O;fEui9P~sT`e2gz%k~O4ukMyL1K8k0I_W>1 zMQl4rv&0D9BqEKBAfY#1n+N44Z2!Fc{7t52FVOd+2V|VJ`>U0{dkm`AQc_YiSr!oq zw95{jC*?>o*aYtz@#i!MRbob`U0@ON)+=!bPGxBqSuaZ4MDT&;C( z08Gf670TTfl94k&*>>7xfrQLCdoa+blqDhNgxcHM*!gDRA`a)@#)acDr38ux`tzW4 z43l0)A7=^XFhlm928#oSuEYNC(wOj(m=7k`Yl%I;yEVJu#sIBd7byVd+Nw!gMtb^M zCL2MutU7R3&xm&^4*%pvF_JG<{Rv%-_Tm@5a(DN#t*1Z>O zSmH6-)3kDnEWqJCrwf!eqw18F!faabL@HR&c z7I6`~E=Ko{#(lJ}RI~-FtW;zNAK8sR(jYT zwSGLs&()n?v;JWWlPnP~1$GKKSff`IViGWaqehS@WIEGOGdA%D%6mVys^%&eX9i#; zDpfzqeY6T$+_-w*2$K;`;j}3uxb&=aIEbm`yv{Bf+~_Y6$X|0eO3mxtPici1&FeC^ zGEz~e1}sHm0ipvs-77xY4#qp-YZyZ3z8R6pVV}0Z$NQgXAo`9x(>*VS9iYHJ6s@8F z)iQN8`?YWC<75%W)&4XsmxNxU6#1#GSHN3-GXDSuNVAy?%%O)UUjB!)+H>FMmL1VO zzt%3IsPRu$9Re4CT@3Ukyg4@Wr5|80sOYUeNDOt&D0MKsB)o~g7a4mL!aJ@huksx} zxxLSwV|XY<=NEh*dq`jB@D0Z;_z~;5znfu4F7Dh#beEBvHu^1c0F(@VPjKwsR0XZ7 z8;Ji0j;88_#h&&t;CCPadfF6K`k%h@(!-?#N_G_^+br_%6bc|) z1AsQI>J$;>Wy!Zv4oii3E*_0wpg~1mWovd@7q%bE+&%Q0Q=1T-F(~vSF);rOW?m-N z48V%(y>l&~4V+9KQcvqBCssI+J=j$e90W~1|BGi&CX=>XcIA!cG*z=qWZflW?{9@B$T6okxL$M%&bN6Bq?<>?RBtiNo*PB3#($EN#Ajx!eZM z;lWaj8JwW`o!uKH{c#<(cGdf2;oc20m`{OU(R-{`h=1%_Xn7S{xpzfA{IvQH{qs|| zt_sC;PlHyyedc+ZfASxme(Arw$+QBo`x}9l{0L5>SIanwyXjz;@GFxr!${K-y=woS zKOW`SfcbO`{L2D(jKftVna`CSsHjmCZ?*gRFW&czsJ!dhKM(b_@&7`1c;eP9i2%Ge zK--^BMM0nW;*QOY*3fs$HMQ#s-IcO_Ecvvrs)uoZFj-`twjzMS$SM4d zPsMS5;vImGq4=4H^n56cs8SJBvV;#jbni)io0)CEmYalus!$jcY1B7C+TE-1X>soE zn^#!iBpp#u0O`GJnU(s|xdvsa3Uzg2BN!~5gn29DT(y_C1z(IISbyVbq9UpSzj_4% zM1r8oB!U4A8fy$LFuBv=Wz&V&41O z3;-p-Jp``!=8sb@M3u%_%rH>hbU?5fcYDa%xNTvSGvL0v4*+&|JEPw>{5T5#wAzh$ zQedXK;qZX&{d2@M7ALqPAOs^R1*>|LasxBwabjGCt0n-%FVkkjNQk|1WTQ`h72lOF zz#?VfqrBqY;HR@(TrxfwMfgkfE|RAgNvf!_^JkVwhZ+7Uz2{LQ$c);ZvO1;+_~M82 zb!yrIOBJv;%?0bzkc)Oe%sLitn3|wXJS-Z)tnYH^8!ptH*3WpScI)654Hv6q|HN?l zIjgHE&Nf}5sj$9=c4(05(r+xV&i65qK*Ab+5AJ;<&xG4!3e!)QXC3fW;U%X79Eem!3auXk~furw>Q(R`-$ZRW`let8BDHM!V zYmR%9eh=RNO^|K}t?XrRTg#yuNT4q<7;*Mt36cKoUDTZM>KC=0mHC_7n?QjQ!YF zmYHEfED&czTMBafDjEv1&`sY`$~|=B--Qan_d90q>;ys|Td`+n{jLAybp}6`cHm%f zN6spbY{&@kqgR_bDyyxQvbd4x2@^umYZQff)+&IKcp<4PDR+$Zzjz*Z<26!zJ8E#Gk)9bL8~gj+erH!f!xfqkl~#cEYy$IU?0TbS<7!5a zIHNC2{v!e{i3BxqXQ-`@HT5bB zi@5ghoBp+$&Dr>-xn?P6iO1C@b}VOY;&lfFBtURSWAOX$bc%Cal$Sw%xV zxs4v8_8Dh`a;-xCO^FhC$^gK|0V`O&r8m1}_;$$>+}O+ii!=}FO4uuY6N$2sI@1$m z??FOVx;nAXz|wZ8CIcdT!-&r()7T@8?7eZam1g~tE$8@0%5wL*V|S5^9UsGn%m1Rr zU_s7r*7hmDn*YmrWtg})PS-n>tsVI@kb!{6+7@W+$Bzxlu-ia(cS=9$!M2NR%qaW^AWkE7yP7Uc71H{E8cBFwc>Rw~w>J0$w&yep|2NhSA22 z0XPl>!hs7U)9knex}I!;e&1c(;H@`g!?47q=v6@LQo`MVlcg7Rt=cPp zX|3-suVtjlgI_Vhi$8yXo(aP$4t~+1UX^cLq1vU-%+%-$wK^eS`#Kvr-}(Q zDdV2%0)4M*=he^s8}wcPQM_{YPEYzDbZO${qLYm=#-R|LnAxN`3g|f1QIdYsUSM@S zT3gBJ6l;J`B?ddg@O85IShAz_;_X4H*&jR~3~K+GX{Gnp{sV&raz6iuy|)UBYYF~E z2Z!Ln-6g@@-62?Tx8Uw>!96&^CAhmwaCdiicNySJcJ@B^|J|o^AI{sY2fks}>RR1h z(!Z)z-P5-!^Xe_}m!CPGG}a(RPIVI1bUR6xNqlfBzb=eF%1;b9^1JtfFoFD%NMq(KCS+Ck-Kki0X5Pspfc3iB? zxZ(jiF2DHo{nq!Ee4xLaz~yG#S+CTHpNW=6h(~;yY;K!=dJI-y*^QBs$S3Hu`l=To zVYr$uarNBTXXkQ zFh1~p4`We}UX#}r(Z}-!ef9AWch|kuYBHloBQoQHq1&4yrdK7`h}U8pvFW0cM`3Pm z=fp;?J=lB+g3<0WZ}D49oBGuYOAqiI@pn<$!B(O(l>xku-gTC9^4f)o7P`^gnh2xa zVSu{U`t3B`-s%%?KO&)@Jp+;Tdm`E&knuzAiUqc#jPto4Uz^Q?60@@&PGY1_C?0f1 z#?9jU#@XP>C`n>j9}N+qnEjAqnU}jYRfvs&vUiZm2DA;PZ#!yXiw$eWfTh@6wXEAg z^{1x&bBtukCdEiLUmgEEiayTgEgE&D#krj$N(6BA z>Z2ZGbsCSI3+-muv~dhF+Z)ohwTuyuYgPU8qEBvzm6lcS=_((=SB58y-ZObwot8FR z3YF64^W0|Hu=myi?^aTy*I2|S`pfj}^f;o&0UtUCh6ToYn>-m3C+PkHDSDmo3PQnw z8pDHlmdnZHHxDRxaPhG=qFNj55K9ldNrlV^r2*frXXgxUwRnCEN2-pPz7;EMj}y*s zPJ`mDTX~$CO@w&zw4(Q$NI2>tMa!Mw zM8E@{vPRi~lEeqG)k3A@v;OU=H@y1h$IYepp5Y!jPGp9j9hySJvsVH~)>H-vr3?HR zjEPe}>qA-Wto6PgFux9DgE<)2kaMNP3L5bl2Y1WM$@YkVp$wQCd)?p;`afz`bjpFndn4C#{tY z2d*2d-&iLPaWG1%t}H8pjArR3HpTnC;IHXLs_5bO*}@>7O`x-vT z(nfXl+~aka=vVCfN~GIKa+pX?u5Ynn$#}Q_Lhf1`Lf8A82QRXqQIAHmt{I~Q_{d|i znz1)4?sZoAdDLxFr|s1jf&!UJ+ZD7;sRF)#F5~R|gWeMKc1|$_6Pxkp%aF>dtM{eW z_o|}fHecX`4V}vwv}2E|!R@>dTowQ4Zz;_I4biCx%5RTe39Im?veiW4Yb46jhU%@< z<+*A86+AVcuGf0Qaij@`Q>6Q}P%c!6l`s3j))H;t&n=4Ep5E-oN7H|Dv5a;Bf+q%? z-G|b-1lA9{$KA77DsSHG@1)2PZVU;0K8(tE;);bZ8*qDopErn^e%ayjZYMjUW$_Y5 zck)dRZ_L@E?#xb?9KAk+4>_0xPq-s=$7~(}gn%Eh)g3WP!lI(00>(Ty*ORNOr!JY{ zN`;r_zXSM}bl(xZej;THTK(qS`iYhE;nHvkue@maVsh#xKq>Y|K|t|TwE$*FY|ru- zqm6p#AzXJ|m=Y$bvu~mZRj&@7by>L~(R6aZ<^rA5cqVwpP=}*Sj?y*GqBe~jwOox$ zWV*+!PX^n?8QSyBhll_V@@$0xIJV5?TgFU>#5mPiqR~1;bi0j!$+Fp;lDSu(`Z@fr zg8b^xj)}O!>dCit*jk+=?y|i44^ZX}z{qHw@A&cMS?n?g6Exv{aP=C(g;RWtC7I=U z=09UsqYIiwjaJQso*EJ-zu+*8vTO;FTQTPD1p{IIu2<|+9!eDBl6O$s8KJ}uGG0E! z37D!b-x<$YJ3vVAa$>;Y;m4Fis4axAdLf-;arVW_EsRaRtDOE=ewX@1f5lv7#8=>$ zV+mO{ps$)#kIdV{Vo)&w*R{nNM8fSt;beLWQ+IgnZoFmAKiurgoZBZ5wZi^8ubQ2g ze;sMz`tI|gs`SJXZAwK%>!ew4e^Ao33vVu#Nw?eFSPSoGQ@+KR4F>;xp_Td^jZ~;S zC7cV94O~TKaB4%-4|}KCLkeQ9)p~t+8}RSQ_gJZ?9_It8g9wfY=+?x-hy-u$e+=dV%yj9N}RYGa8E zYC#M%Tpq5%lnis$w9eLexBb9s9^zeCB0KdAX;$*xw+uJ^-CM^p))!81+)H*dxiH`= z@7DCu#>U3XY;5|k_oQHP341rOP*G8xe^SyT;ozL;KzDwY%Me~4=jCNu;E>IEw%#l) z7-j}nf}H)#VBubuMB?2aAn7)jlXuX7sf=N2(3h@9_F5F77s6qYt;Gv56r^H?hGo^M z3)NE!kNV=E!6ANV8FZ9b9IBfhmTQDneuBwu%AxN#jZma_H2hp@|4Y;@MXe||Xc>Ym zFXsbhgNB^~JvG&BT7gFE1iM04lTJ{FmPS@5N=frc)xn}z9pZ;TLv_HU z3+mnz4U?nzhi{or{&H;?!A>fIo%w4*K$ySeqe;hG}Ll8|=Iyk#(qfRWtZ@iRA|^ z(w8j`QLw%$K%L%0_RL)p8e`aRz4w7_6uvICms66$9mu*lyy~YRl@2`b@fj~C3Pz-5 zD8r}4CS*+&+OH2;f)OK&@>nqsY97D(8G84>9&wfSgg<^s$XXyV7?(y*uTE$@Gvld6 z9)h3$P0q2M!!W$$LGZC)Ld@e0Zq^siuWM8AF%RQzz)3Rm&hl5w=bhH#S2Lza4)jnJ zE$LLj-d;IqT=?G9#!O2dWdsPnOFh|h6P8_<*;C!XpA4B@lQ^1Xp2kqDQ($J?1tQm{ zLDY8G`gv~|UuOluet{Fj8C$0?LqX*7K!mOD2zl>zJy^6k9 z<^XLM+ENGZppo?ZB5@?g1)?r`<|I9gdFJ`bg?b5ubwwGR?CG>QNZDLS9NsGgg*Cb3 z45@l9I@2#KttFIDS~<~zJ7a`Kubs*~J@{E4-VxXepG~U5_V-P`eJhigz9xr+f--Rb z7@!C8sw^boG_;A&*-vMAGDUJD% zg%k6+E&B+=5gft(k&0BqDog{DP2wZMO(;=u*5RZm|coT zlHc-ZL*iM47M#VX&t_BxiSx|#C8vJgX9nEVwl05fhEMW4Snv%qk3wu1MQ5e(iysR4 zZieXH(1hLYp_DSw3X&|bA11`Sbu!N9j&DQ=XzbgPiFOEIv{<*1+c43ADzL+!k_bhe zW$7Gt`i#4$?L=4A9th?d^Um5(k0MmNC!qmgljQwRZA7K19Pw38WtXmx*L;jBVNWVG zsAw@byOway4dcQD6%?4cpuTE4YF@@Ucuu?mnN*6CH2RJIlq(jHp78me2i{s8lXv&8SrR?_?DT(j?Gi;&a zG`GQMgys3|bCK6;QlYXe;QlwUZAEDi4(S`=vmz`8)C@x7rKPb|W^C+d^g-sW(EevC zi%orI@XTz-Rr`s|2%wYjX14ei>yD+&k_t!)AC-3dVRH8}zU*>mI&vV9xlRyBE_;4$ zQeCen^6IUUMV2*ybI_d?ZvBkcG94g^`)PINV*`XGl61$5e&=JxP5Y28j+0i2e~}Y* zL?fB=QT${_6Drq5y695`T6@X>M)gJ439960Ai9K$DVcjLE?j9%4iE6tha?GrX>HZE z?7>N!$%B@e?;A_X{WmiAsd&lp^?wbR%=(2S?4Cvq^dw)^?uKOV=0 zH&zvEKPqL^6aB0e+2jBKfSUp?{0{4yB)nA{jJesO1=aqY(0Gkzs07ra7(j3;GWQ4Z zD;RrtX@T?Mn6=MZdd4g@)$b~72?2pIJ|HtrZby?M3!3N`Zda2f9*+1BNc03c*djJ^ z6T2eZUjVSm*sUK2+T`z_Gf55Pa)7$7Iarzg$cZzwGPAWx;KPI9d7lI2XE$bV#EfVZC&da|rjPzuTgMfv&*_Y@}{f;HOJes5+kOA7wS#G~Mh8GFidmK&NV5%7ooYq4DuK z`U(sdX{4})@Oqa)1<1Fr4qnhf`s)s|$WfukWCrGgI(TIGH-8?H8V-5omzI|5Rf{Np zGnD}brch2N^t8@eBoXfJl?PRD8Wb8m;bis(ic??7**hyRue%vgj_(+R>0ZYjGLT`r z0!@7^U(ZDozwB0|?ODC#tWleRp_KLYV;Na3^!ovd;xR?w0CXPM-wdzT9PStqFyc(2 zG5u$A9{KZaVAZYb7YIfY_7zMpq*KCIMCIvk3UcogbTdmFc^xV z`-$|;6D!TF*yi+ua2>zE@0`;Qe#aa)Ies_=@kj&jUKqgzr=kuCUcL}-@LzLOq$v?X z1y^~uINPhk z&msuWwNF(S6%!Y-rM^01Z-4so2eo@oZzgv%HA#He6=Zvw88W1fz=a$aOwngKxpLCWaZ@ObmCjKPakzQze=6m}UgFQ95oRF#^IN!SgB zEkkbmVvnS5H)kgFYhD^*;F3}kAeN^b8W5mX=1KUB;aFKr zh`B8&5YFe1w(4_Jaz6%W8B3~-`Q1OfDCl0i%GK3#XX?bq(l2FYVmB*16%F$JK~ zEX+UGkG8ZlPVDU&UUo=@1is)~0Q+n(Adt)LxQ$zi_ba)Tm*y(!QPOzU8M6neeQjt8 zMhDuUwmS=FVqeBhITh2P6B(eba2RpWj6-S=dwZ#&{3x|8!2m3BSr$;s_d+Krwp#|F z>OW}GXQE|sa{NsiP6WS-_F@;ByCR^BcfvjJaPTAJ7YH51%gZuYulk>)cXB_3; ziTSzS45DRX>_+DgGMh=Sn5Vmh+P{Rv#20Jehi>XiLaQqZzPdr$^@Pq2By1N*}}g3u2j)0@*sC`^I8?Gvzj(=Q+hRJ8Aj$P^&YcyGj@ z7ZS~hc#=l$n#}#pdnL7Y-P6?~i>&VZlYDdHlq4J-`?Quq!?RPA1D6otqEjUcYCpY6pmIsbW=H$JrO z!wA$C%==~zHZB`jK#YpBKoeRAu2@h&wTUz47qv0!i3w46A4*_9f0!)KOb+3`$x^Je zC<4I324JGZH98?739jgKiF{kTFVsb_CR@bP2^KxTiz4EC^m6u?LtE!6D(H4ve@4A9 zk4>=FWg|soUcd0+8JDA2A4*cv+_yZCCp$#+I7FnF-%PI5tDQ8GMlMxEe>}w%+U%+Q-1w}!yyLE$z&&YSPlc{D-D@ZSxB&f9^Pg-Cm9L6lZ9&`G(} z)h~!pr7pZwkV3$@LE6!NUQ5qn=3l{7?l_Ne0=~cj%UDlVFgjD&$x!9Bn;>T4eC-3qz%VY>}b#UJ9 zb_IXXCWs2Tz*7f*GPBf z7#1-}Sl&_$?9FY8R8w%%UqT#Yh-Y`kgJZf8ZQEbS@k@_>t=?pSOqmzu4sd<#00#TJ zvIvIo(tR=ll(MqXiT3u;&~1C-ew?F`C7=_k+yV zw)*+;i}LsJ^XhvEWfceYQy#0LNqh*w_M<>YU&AB4lxgUtZ%wPXmG9r@Xbdiw z%#!$i_uNPipLjXxPm^4cT`En+3tZ}Y)%#Jyz;(TV8=kWzR*Ye9}lsNc1*rz}PDF3#rOHOlOo6FX8QC>r1B!EIG>)zD|C=fqx0O}HR%4USnY$$zT z+idygF?_>iSpeyCJ&TKp?p6s#Jzyz{+a&^AGV4+4ey~K5DcgmSLf6p~3U%6Gb@+Zc zZ7(OcfgMfQg`ed>pu%5QFEYd3y=mKU(4G`TwnNU}E^O)s&JPp27Kqq(s#q-`-->WT z2D1Tm^^Kdg!QMrTCrqmw$TE58>O8WC2RqVQtufEQ4}Sagyauy^0J=UJR6Zj(_A`!L zpclNRrlvl>7WB?rxuouoPmvW^`F$Ul81=ts1>TAz@e3?Zxx4}5vM^s5;k-U-)_>ti z>DMlEFCSgTlJMLC`&EISjFT-cY^9dzN2Mjk+t9Adoi3AGD#^;;co)^xSC_bf_VNC3DVVildlohaS0y+tn zYLmjzi8mn&v)+zPVF||nY60?=KPXih`qtlhS$x_Er!eQkxi1l(I!C*I?UsC5$6=r4 zAPdkqjwGDvG2?S#>nTiIMyM$aoDI3fy-}>RH^P+IE;JQLvuP*B zOg4D;>FMttGpc27Fw4+nm#_V!*z`?lJH}lIb8O2( zh(Q~kF||jb3ij9)|3WQP$pIOz;9O?=xPLpLN^trDz8*q-uEhu?dL7tS#C`;WGB7){ z)L&q9(31j7PUoy%PxSW%@~uI+K(T5ar*k-lUylDapEYPjY0B1&3>55Yb0lyW=M+a= zQS8(j1np46^t@*tmf~6d#(jtTnLYmYWT>Ev)Z3m3lpP1%j0kPgu6INqpLOXU`ahEh*smtQacZFtPmRoZiO*g>sY791(FJ@zG+Uqw0KxF;Auo5-j`@0jAk5`C#y-*Qp{S3Y} zVIwlV)jU`VbNsM;VX5zhn>1@|AYU$WjAU^jIwFATL(gxceB~_95_BrpuO`UtT;oA5gnTU^wiu#Ays@j$o7^$jApHi*A6Oms58gh2udhH|eLWeYF2tNh0E$9V-dCes#dB@DCe_qiNgw`^ zWg;}qF*DRs5~%*dokkmQILzT*VQdH`MvgJRBs{u#pYGj9M55!G398cADTOFV3tTYA zt&gHu;C|4}Y)f{IT+RD;?i8Q|dCOe%^ENFxmg$h!K?oefJ_#58;r!Ko{&`Yu&%V*h zJzA)Dzh4q2*|gCB*_uJpWr7W+6m0KnW8V8#TjRk6QI2lU}#nJ0*Hk> zwLcm!)u?~w_D6o2$$s2N!zyI`lkgs|%)fWy=o?*AFQh&}PQ zUD1ao2E^r%)+SA4sQ_KfBoH!sSs@0&B`qZ*qr8bK1J=Q2`9nX&kIB)x0sy7&NF5;ix-!L8D9EOiJWcnG}-3biy`Lcv*2_BsDoW_yD#Iw zm)C;Sv1~gp;8r$z3YO?Ss&NX0T}lwP|&t#e!TqlG4r0SR^0qh#dh&8u<23NdH&YaIG}Oy+!R|`J**_9o-+Iuy zn;jm>KYX-4Fi(zY1eIE+-(_VY?rfc@GgF8<$b0wc}1b)R7_^E0i42BoDZ=*ha8VRWU z{U}GzUSMqSUTq1h+k`5{qw^3k*V-=>&xtC6Crj-?gEmyTzdfkoTcr0XAptLQbSL<0 zM6Nu1H9bE*=ztY4&l;twD>|~0kfLhJO7wm;K8CDx#J*ifMB$F|U=jiNVnf0Z+VyBM zPlA}R3q%~|-n|vIKVU-7&G39x?f?z}p|>|ImVas8I-XMYnA83Gm%OUWrU1ugm#>Li zlQDpxJF9x@J$gg5eQOr3BQIsDv+H8^!G$NI!#Man4D{*{o|qqbPJ_uiBfG;96tBLh zcKZ<)SO;LvVIM{SJ%qL(5WAv>R9W5O6@`~X`w|Uk=mi>9-8tOH1 zAWPuO=hD1#Vfkhte6w(Gz14mG^8pWc)Qjfn%#l8aH)e8s2nfqDt z>ZynfNQpdrdEsONEk=Ln^@lu!-p%{Ov)qCC`;`{(>Zb28X5?Z!8caX}*u?q;_X-lQ z2~n-|!j&Tudta)H^DVP&t_iu=)(3e^>Beddv(sAW>8)P9Rt&$H@zk6)%%L$(E7Y8( zZU9|U=VQpFfpA*M{d@$YwMNjaa84foyIZ+43iO}p1krVd?$N3bDBD9c`wHPGeDQml z8cH{mEA*bI(|@Cjr~C#KKAFCH${q5x$I>3hUD#K1!qa@?=?L_sQRtwlM}9FGDELSc|HMe;ME$w4 zZo35gv4+)?{NJ&WUgzt=duf2Uh?KoguCLRA@)rNln&F?O|NWa=mGH`6aT&%osViY+`>viC#+&Dn)Um%zzHu?@`kwZuBfP3N908N8tT9utp}d$nuY}OPf+9J zig!QxVS2qj)bMf@L2o+a80ka0ha9WT}xAUx-%%@;_a6QaVZO zO1>9F`$%^2-NuJ8njSCcuCoIMG=iq-{eqjTf``j#`psgU)+);d`qN?TFd8aPvU(?) zm7mF`4_stMe5q+^FVt)tA;96vA51%hUzFD21Yt+j-;(k`gFqyD?a6cANV5_Y+cQ^I z#=QgvC8}FG2)rNgy!%aQ$>-%>JM*4i5dxjOzP=;ccpl*o<97TGj;U~ei|dLi0E&fr z(BSta7WTFAM{R%0-g2OVZ$U(2H@MJ3bbEW9DO!BTgLI_{YA-!9+SIV*XVp)n0{dsR zB7Gh@zUo>4aWa_w!E@?E9fRw@Gym>EjZkl0fn{YOI`U{Hs>Kf@5IsP2I`!+L5E_;# zqR&x81ZrWJh=PJbj7@D>*_Zo^v_7i44Y#@5aUF%xdi^h11i`_8{*N!KVvR$I`S^1Z44@Hx}P+nTugI{6K z3?Lnet|z`VRhW_~{@qN=eLSAp+z`i_VZr1LKBjpWDlmhlqiEcR#ql&8f_>qp#a z{BMAi*U%SS4VN+EKCdPq`$xhTh>pi?LT&D|zG&C6qzNi+hs@z4 z&aI~1-&j1lywqjyz4wLOnI2DTNF(!dR=&f}8HrD8(O6kEvbOw>h673~1Pjsv6q)H+ z-sZ2*YGu9$d;2cw&oS?ZJ+n>hpHJW`lZU8ZB?@m+9%xTP(>aBJja55m;-JHRJDq4X zACr+zD0J_?O6<1C^KgTRUyCDWXODTv3ymU+;znb>(=2Amyv1or%yYD%j&pn`zhyZ! z`u^V6#gZVero-+t1Km?4k8`_`@;ny7j0?Ndp`6YIr^l5qw@#LhPgvI3b?a$<^k5i+ zvzN)woGS)`2-+-ym;eIDffab`fl(ixh6`<3*VPI;df2R>PU{mFcPsJ%6&FIwXMr;} z4G$mZ7TdpSn1yh9)V2Jqw-Eqx8NcJGUo`ceKtIKWw!bkn-E;M5*?P%2Ty4F?Y}G?w z=23+j^=|figE=Xums^imoE;37exSkFw|j#&et-iU8wt?#UOmv)Ijrcj@5B4HYIiWU zh8CeB4?7bgfF0FEpk~+C0^O2$Iyw?SS#glcW%8|T090HA2&7Ex2|>QNS~gxYD302F z0&aM+ZnO_Uqmx7L+}wt_-Rw}(${|4GT3q&F5s~s=Bgf(NOa&G*(>%KJ=6!s#vM>Wk ztCE(8UVB?XTuO4D(`keSGC{vA^F6%rut#JC5d94BL}mjxtQ?Ag96fR8H4VA=dXS=s z&A_cra#^049LQOn`4g1|Ah;RMVpl&ENiR->u*m0;qCN6@qv8_pE}fRYSj)bP1EDAS z<$7TX7OtuW=m2Sxc3_;!%%~ycN~RpFU^O!tjdpKsAq%q(AzshkR$|GZGrO?#AiJ!yoE}mPHkwF+X4yz`anq#APi zR8~xA&$hR~xjv=+j#al+s?|9hhYxnfRYzF-NZ^l;XacQktzH5xRtaijk^vGi{aCi% zkp;b(=YISM6Fkt#lY#BFL$WaZ9P{A1L=Ys0DmS<0%0tUjTNjK&(FAF zu@6AR3$FXuIVUJJM*N;WJ=Z;|cW8~@Q=#H;4MXK@a=4)bbGd#5ND2w z8uYphXz{1)?H4)r?Q@y~`U?p%^Q)b9f=_=AVuzhN)sc~1f@@4e{As?>Z zZkzcD^6=G5?`zTGGc^*9?YJV4-OFiAQ`ZG(gOB@F#NKI%8U$a$0s>dhy*JED7OJgB ziBK*Kce^*I^6bs(e<7ET*zSzQNiYqRW-Bm?#}xM^Wkj#ou@ShbzfvQ$8{;w6e0rB{ z)ilt8C*e`V9c~MX(FwKvVl9&mmgui;K--V+*m9y5b}IRT`i(2~mtdwZ2nTj{ zlDFhXlD6al=S04p5g5p>ns0B$hIC9Vd&b|as@@3D(F3y|9|>2IC?1U`PpWx8!HvYn zfoe6*r>tFqC^kv0zaI>PR!x2RK4;cn!5-}dIZ`XqWhz+Q zqqld(LSxd+GFUxN%-e#KQR|;Y?1%FfRk<*lP8WZAY&ay^&Lp_zlCYuYam+cVK2-3g zyW_4*Q;~lD%FN`#^I05#y_I4rj94B5rr%4J0)eUx7?_-=(%jtLPyS@Wfv+4`37b&=9Vd>P64d<_}=oeTP?|kt<%I&_J?e#?o`^t#3 zKcM9|SyH`hpJn1oHM;|8uJ$bXKM_do+ew2|Jf(9w@23UID#x^1pAa^41}<4w1P|0Z zD76DB=O|-Qp*5iX5@7TX00GTI<#YD2{J5`TgfAW+uIuX7!Cei=R=Pp9=KRw(Yt}aa zgr2g~CSdvgiCtnjKm~nj8a*6;7_xwn;iA$NMjcEbv|$qQSpLa|k$j&T2nMSqTX@8x zEIO!}fc#h(2oayOL;j6@?$G!?KE~Z2h>5-q#1*xsiF8T2!78?}sdG zR-3k1qh$a0IQ~CGeXw2D-Wh)9Fm~ zc7}0M{!!u|YZIM~sp|hzFZ&PO$?%t7)gK5{XuRt(n-{Gen3I4#?i!Y^J^Z~H^p~%% z(4WD23o1E_#Ux6we`^Hbr`UP{+VgEE^=>2o-KOycsJCF9m7lTdxoxNe$!ZaF zgGI7~qN@fTc%b zZ>y&b=2x@u^5Q)-?i2%12B~?HO}Zm_)K)s)>)$CC6>T>hO<@9ffvUkGw{3{O!mZkS zhNz9q&3;BDT}Ze8^^v6hKtaHpZC(&Lj}4F?Lc)(!B2KVR0l~#q4=TtMyF=0sV(T*Y z16xthaYQNAlMF6Uw>=vfv6_XzWjhuAQ!{E$EvGIoOBZc+E4hvs7^s@JpUW6=q`=o* zencIRvnbZ`pS2VXjAruTO$fZV*`{%ujN%%+dd@O{#L@Y_L1ce@p~gh2@Hl=^aZ!C0 z53i2P2Hx|^WBZ0ps2URycmKHS&w_n4QICWZ@mL z@nJ~-uuAG$z*+YRqGdt$Oj#8bq#3YM#{w(Bf6UG^FDyXxNrh4;S@FD)D%Y~9+ZQ3E z6+i^HaFSczGHOHmd1yz*1Pljwk^PrS*|@RZJh?PJ*=Pwe`eFab?Bi_>b$n(G!-l>8E%-RdqpqjA#P{ImHj|I?!hfavp zD-r1%+kHdS7m69nu`f!Jl5ms$6plXw5WI;bEHE7a!(~&pAU+slP7%3-{pmfK_xk_z z`ZhH}yCuq(WWJLVnL8`oAAAtS5)^1L1;pDp z)h1Yg$XQY@?F&tTzq&EUyrLvR~zVMh~@q?SmU(kf||w6rc4;a|4(tAr~gMnwP7oit_U6NqhC!# z`D5~mf2wU~y`1lszrObKb5?i1EyR9w`wGUgE6FY5)4%I!R8|fIhADa{kh=fr_g{)t3r0k) zf%h?P?;xx-9We0Ph44;i_WU0=EYSY041B8QFrI`z?CVmSdSXyCv8ngzxL-rot;j(r z233Mpy`{nHfVOiqXDxtI-LrYRlp&$%UN;l(eRxDb4in%xbFhFbm@8Grz+*DKf$`gh zqR&;9cLwj@9$r^_=@5Rbm*RAi+*pu`oip;UTxC(KzUnnj_^GAn^d^%+-$*Ks3J`?- zZp~|RPx`0=T0qFPzkCvNmMy=$u){~F<%RR<(^nc3G6Plc+FG^Xk{j7YJ-5b{ybH~? zym$9Mbcl-e2wY8E@LYBh2<{{c=wBR^r(`(eFudPs>QTl2H5sqo={9>+t2V1FJ&piE zI4=s{RX}_*#+;r{NE?fqqVfZUGFf2uLj8r0pyFC`6KMQJZ`V9A^QO{h-*v@gFdD+h z>ZzA=diOa-JPffy2jMAH-UIr0DmE|51;7Epp#7GtV<@K{$H{KwhCiG;8l^kCOFDt= zm2pykm4ttLu`t;P^lK-dCRVxtLedU>${&K ze%Qk@%=bVZ;HorC7)v6``ww0BO`&T!4uQ7ZV2=>f{8tOWQ#K8fO6Vneihl4poYu_n zY+ko3O*mK&OOo-~L6~GMHVu6f(bh9m*3SD96#7vQ|KXWQTz~LWqWch#@yp=OP9GAswAaFLU zfV`O4n(Qoj+HLPT@U+U7r*GgZfGI%2Po2iM)yh2TDK~++Hreh;Hi`9clewwG?^JF@ z3R8g2kk#o)y}@wuV+!85f2`=mIPC06dL!Rd^CGK!T6cYOws4_pOGwhwJ)jZqLJ`8d zT{!+15kv7YL#IgZn($`nAuxG)4Eyt=$H3N9W^!N*wx62MXpkezFR*q!VA!H}>U9Z9Y{OQvkT=zk~G_=G%hPmdUgE4c_G-{tOBmJs za&#i9kYzSL0oE{opLNn(hY>1~onD5-(id~h1EvTtaj!(39^OVsfPPNfIBxAI73)k7~KSXGpZkBN<7 z;gU0)T_AUfc7>RB*r&QY+HP!bAtvX%d80KDJPjYcFjRPLlw0c#XVG*7#dWOJw{5}3 zS6b%}i~Hn)3x&bN8qh(GKJJQ8#C?coDiyVTpe$W#iDLYQZ9Nl{KOrXM6aI~-;Y`|+ z71p*SkmDOv%~4|9FTSr|)5Na($69QZK771!Zn&5pNnbEn^ZjWKQm`)zVf9M1Gh@{1 zoDeG~2yP@__s24v(vFO)=4`4ftBsOPj~Y0D4Y=Hfsl9cJ7HU#AQln%;S`sUGdPi?o zVj0GXWOaiRk=2AoRMQ+ONx&M<(oZ*-Gh5bqXNc)NOtK7UmQ{;HdaXC2^zsDL`YQt- zAx30F90NDMD}vQ=nDHBZm$p(C^K%ezVx>5>lY`u+Z?27+Sx1O$2Y}Lb`_*+yNybOF#u&K;;S?QR^_WHqf_Y$a zCQ;mpU_<&bbn}UhCj3wZX6i4KfEhN;;7G}*>o4aXNl6eD>zl2giCUElMt0+GIW?x| zj-JdLWs!$Hn`;y`le8+3n|BVC`DG?d8Ag-=W;f=7o7cr-C+CC%M$rKIDMrW!zXB zC|5%h@P7EbR6=xVzB%Hg(b0fKJbT-6e}+5{NC~1MSPcnO?a@zYmLizzK_Q~T8;n!Z zQ;R~$Pt#Y{X)njw{)5CUVgpp>3!=wY^VtYnzHaN41`0UV;yDBYG!1u8UHDqYn?<(%i{b{$0(F;OUi!An#NZjvsJ3e6m!Po0C)Hq! zOL60Jaz-jQBdW^(f? z=+pFQ#7(DY_3;o?>Bt61D_IYGakgXJKxZN@q7d`Rt$gGi1#+97x2AsLJZ9ut;TZ*G zcN91>?Lv)vw-u&5eOXL#t?5E&q==0{`qE)>MF&U|YmD)^urQeNfoH^Bx5sRXWS`j2 z_T6yaFpOt}+sfGj4|Jj}$d^5{YaoP@9dw0Tfc4xfS(i52+PxU+l76Ym9?*I=J+V zY%ahuu20>y&wnn(I%huY*gaM=GF6@)WW|}g%Q5toAvbPg8&>f)ufO^HXj?Y#G;X}u zZfb4STP`1&t8MDJnPVuIY2lQ}7~VY47SOtb;A1bz%gYIXHrxrTsGn2eON0J4QH9J; zAS=|%a;JV%X7`m^giN3Fo9B%+b&|$}6(YifFwTdt5e0d8&S~_xlKLPM^zl{?v=0Zy)5&M5ess(I?gsW>EGO@)jsoD23w1-K97bDHL~#rnpPd;!s?QySrO(hXN&q z;1-?;+)oBk z%Ep=&+tw~ZNsg1>apO7my*dc6{TrPliC+6_kA`B`yMm#vS--Q1d$4y1NLCObq{PYF z5*TLT+fq(QHICLqM3JW5n;zVe8k81+M0GJi?qnzz8xzdF?T6M(`~{^%br+(` zwu3anddbuMgPpolUJ-1@I0zB{%JKSr5wTvrHC}j7qgwR*fjhWuXY;qHGM+ zRNt(@(oPOCKru|&2%iqg+Boy;I5jfN>r7YhQZ6l!iHL^mqN0P=TXv!%YIS?+gs?tX zl-Q;pjeOnZnOV9swt(^Ob6|N;W94q|j~{F;zD(p7L9{>nXTrZ+c;s7zWK?0JU0r=U z9#P@%Y~~e~E8mCpZt_q%$p<=b%zZa6;~Odyga%bfEN*A&X&X|x{cv%_-bpYYU3VNE zin4()atKU0D!HVr=rE0IZv7>UyX`FM&|)`m+j>hfKUdcb+0@U-uNwUGI9TlMHH_J@ zQ2H@Bs2qnxaDN5e@AI8(*fWnNQw-h?d$2?ZFB}*Pq@&1t3*a96J>8j>^@EDRiDPoY zm6I7$dU;x>OOshRlqw}6b=9L{E~jtnB8do{Xk!MIAjzun1ydGU0LAm~F21Bw-fh-@ zp_18S!|Q4${Q>mgRhEi=sUQ(s{@$u~u!gIcCG7}L1<}_v>+z#9StIUR^8n0ZK z3R5bbBAYIGU%#T8n5cdAgtwX>cJijFD}Zd#o@}@$2j9hnE9|zkLhP z_m!Dquf7sG7?Kpxlp*BffJJ7E>u=uy-sRTdqZQ!^;iU#@>vmMTP&$Y#FX_)*I+%U@ z*+UZ835FK>6VYLMtj8T2HnALtEhAkA&(iwYv>)M*-mSs7T9nsEF$Y)Ucimb{z z7OewhtM(GsZzJ)DN?uV8Wne!X_c-8ALTGG1QEbvmbnsc z42XKC&6oX7^q7OHwMq-({QjdwEU@$qVp@67K4DB7?GXLRuSm`Ar#xO<5^Xt>3y_8@Tc_2#6=d-KZp>g*D{k1fWN3IG&b-T z80~&o5aueatQ0zR+uk18$^{nDF;0)?wh{2ZLQixrTGYT znZfJ#{}kN|a*rcI6xUd9_u~5gkx7M)*B>FUsY&>wSG;AiD;ORb|7KD#QZIh54jP~w zSyk17^-)ovV)DqONuVumr&d}jJNZQE=cp)-S>ea1hFDk*?e1yV=3&z(HZo)Eq+e1~)7m4pTp~APNux`HPdHXX0@c#N zI7SA|%TWo1GsQ9Bp6vX7$v-tVb&T$COWk%8eK>^JAK0wFH4$VLNybuoy0h9S#Kxot z6xCnm9gG1Pllaix`0p>Z2BvYj$BBfyZUJ(S$k0je?9cq#_Ju9#aWMrQ$Jl9+#ZbjZ zx8GrlV}7`CM6E#{iYnMgXsB%Tzi-cS&=PU2mUj~(*er5)^g$G_*~SF%<~BhCnH)Er zoFs(BS$085G+AZ%R)+{R8A^3zD&HKu;D!zTJ-AJRON>|JOotEzEs;LbOO`^kP}^ylgQs?l$w^WM5mzFmu=Y2-Z#cjD!hnt^$Ikr^&L7mqew7-8G8&Xyv$ZL67ZhA*IbLBmAvN zdvudt4Y2psuhNY#8n0Ud5mKO0Lr8S*Z(2OAm`D0dHqUo_5Ty~mT8WmafA zc>(s(LDuZ_9Xm{~Gu}1)_Msgogf^Ly{H1&>n4*7RkIYt@JmXS0{n9{DJ_;X~PeWgWsnlu^HG#fAUA>O|-%YMk27y;o#4U zSx&y`g}u5!r)${HA<(B3f>Q9{6?SXM?#|vfIp~{!$Vv~P^UrS^k;E!K3iW5PvOf9Vo*PEAsUge7-o>O@3j zGV|tKCWBp=7aPn)bC;PKLKSyrJTs%S%6D5Up%3O1^XMpFWA|{u5`T}ry-_die@M_r zPR=@*%aHUiz#l>l>15knnAH@AL=;5LZ$vPMid9F6F5 zl#S(HMn-swYgG?#-M7s0-{w4=CAPr^f^sCaprH}Qj)Y>&4%D0&Y_qn#caOu1a1A)M zNi_%d5rToJ#3gcwd7L(=vAW_4so0$PYTwCa@eqQmwf&uLR7nPeY!07`!Hz_d@Ck6!-VX^2eA;2RufP5iZJdUrD#a1#=60Gm6!?!7N%*<-$E{J) zfZkO7h1e_n_52aUrsHXCuW!TOxpC)CCK`5WTG;?K@RZZJt!$8Btq^Lpa}4o&HGUm= z3#$FAS+sg}m75~pr^2h|vkxN6xH|oxJJ`=3E_|+6HaDs^%MAJ!@%rXTyJyQ>E#j<; zLdx;E@!_zq+K)(K@;`q3C}d&!)<=Mo>G^nSU)|S6p7oPTsgSY@>z8O>?sZ7(;TF(c z#0PZibZ|~dDFA^N2lKoP$TTn?AEy|q7K%7~g`;BkaUB*j`e~Tj-&mQvUc4@V=z+ly zV(98Ij2wAQnTICrRZ8Z*XKO>w7e>YATcIm*^%{24@7Z-}>FV(>EsaIh8EWzIx=oj@ zcV)?APMsj&ACeYT%s+ci=094UTfYXLaV;v2L1$_JpVQXtkMxK-&g~RtvL2-EO!mPW z=e6BOekNl4;}*nu3^qmqeJZI7AU^DA$);Qvh^jN!b4hSjm;+goL<6%+0*Lo%eKJj{ z8|n5v!SxpA_(zxIn6z{#Ej^p7tVOS z^I0n(*jvtdTn+3xv$_@Jy4bE9`4-J+QoOBQh-HE?RH3zmlm}OG);w4H{1aju?@LQ~ zQyh)iL+*1T2KkynSZ%JY310Gf*6_Ag6qfrw#JN_8iv)*8C%eutSPsVGx4F)wl0{qU z(B6eE*v{|ZQYs$IJFiXs&yD{h)4nzKW>k-rihLfSF~G zo+Ug5@F_vo9BC9EIsgy&cYt^2l&afrs=PtUPTKaD6z$At ze6YTr6SndpKG_6z6{)L;cD5G+4T=*Eo0C>vcx_*;3w(i(s{my7R7!Gkf@^^~f%h29 z70p8*or0BvefVw50dLfXsUHdZ1%FMu44^a~mkIW%7(-h-!P%h7u)wv&T`2gIs- zMxsYI@uMeF`X%eOxU%UBAR{t|;Xz?p9TgjY?6!{((5?erncUK>Ewdo*tcy8S4NF>C zuZva4hosgwn>r{-(S)N}(4Rl##@rTsHfIGmR-g%G(cZ#hy6Sci3WAxZfy#tJ(n8;t z#n9v`1+xK910+{QI2Y!c=aEj(lv1d%#nVy*Doq@>uoYCXN?-W{ZC*sW@2~NGN~-*} zF>^eyEA)tYKV1bZ5cnlNiga}M73R;VcXtQ;V?TXQ_C#iPy~#F~eM|vQPL6AXqu40Z zD5$xs)H`1#P3I5+KBA1hC+YU!S^J_dc86Hnx@q@53X48guqRoT1$PY#JNOB2)K;;I z1UBh3NU9BBug0AQa#uAO!k>XVMes*-`pc=%pFd)NX=~NY*G-FuFF3c5cJ`!W_$#MG z*o5bhxT_~S&sS6#(7=@+tC@cy^?oH$>%303T3D#LZZtYGVgRPNS((a7g;uN(^q-7l zgm4S^i8Oz0ng?-KBm96&-PcXqKDvBi(KYyRe*Aip0lYLUL%L9FR;i`{#3#mI*_u-2 zu#e{_E1nq0brs*7>~yLkzi3U@3bXpAoQ*$FZ~$aj-5Ez_CqhXROdMH4j(RA43mvW# zDGET6mDJJEoot$1_5Pu=p)$j3;)3zrx>E<~I>{uIsNSLuBA}M(W6D!PM%KeY4~7u7Uca3&a)gBeP1zH*G{Z$cVfxmGC76+J#Rm+)GD0l`+IAbA{}HnQcmpeb zD!wzGQqj?o`I2myKC$&6E^UP{WZgZ|iwUdw<$~L|lh;XGI&4;&FD^_%)gj{K1o2i8 z%6I;j?=E^2qTPW4Dt#5iT-J?_O`oY!w(^Adp1RBdc1nyO6n(sSrbFS#7+P(woIecr zU=~GR;8DV`-dQ+@?`!}H*T`{!q=xMQ0^?>$XmV7)%54y?yGsd*w^`dmDGJIIzbD!G zZzt$R0x)Y!Bkxv8c$Qi+e@T@gSLyA9>&XY~oP?{LWBAkdJ_zlgrx%vQGq8;FoJDX{ z6tYN1001A?`E*(VPzLwQW5~l8+*Aav@)*&HWAye~Ny;Ij_#_wJ%bLV$uH^eTA3;PZ zX`updMft^x)Kac?K9DO5hx%6+P&ou8#6N*?y znjy+6hm$!!Vl~^rZKzjHbw0`&6GS0iMgQur;wloes#IX4;Y#Bt_wZGgD=Cm^DhDH@ zMEVkxYAAL~Vx^X-urg%)s-Tml02@dGBu#xC>=b=F-l8O~p>20z>5;8A3?BvX=8ou@ z2iphB(v&N}?G$q48G#PS8{e0QGnAZoYf!-Fo=4AKP3Qa=J|pXjxe?y4?VC#1sANLX zN7un^e#G*(n$=C-FS{NJ1+bqtIT^-#j;D4@D_)#IS$+<8IT>FWC*l()8c(j7Dx$pCJ#T~c_rH}cyXNEH~WCeZ-eNIvwCEuLMIG2`!< zNu_^YQGEWw4;$j1M0QXSlu08Oe;FKaRnl2WxC#ub7 zFXEH#2w7=mZOpCevs72)@G9*V6kP+nK@}^_)MqEP<+XOO_shdPk$j2!&?%+>D|beCK{G=`HFn-BIb#0rzq! zs{FoMq-I0wC<8XC7^CnI)B8%dr3-DwP4=l%mV1Z87x&8U7Z;dU>)Q*!-ejm<_vTuy zz4~hP(QG)x4S~LG*$CBmRlV}*g-`ibal&dFZE|8*>}wIBZ{b1J--ntRCsk*Wv*TTUnCKtr=m%%U2(kYn`nstE3>Pm8 zprFrv1rk`a`NgJ|0nZyLcpdWJN1pak#Ye9n9S<0fT;FIqInBAM+BV>&jSD01YmfPM zzE5WM8iR_Jf7?oXYprIz;-#SSnG)1PHC>7O2R?*a%m`SM0?8#G$>_3M|Khta_c~Rh z8@nhZ&pVj(?R(OhS8tXdF+)kEsOD6{Yttem-xZMxC#n%#3)*a-f}f1-N>{in+?3V_+{aIJ?d0i}fx%%MdKRPPx?_d3}YGd-h%E9fNPrKIw z)7w2Eq8<3;kLNn=9ZzR`4<4dhLkO((mEAYlH}=;K6{yAAUM~o|PP1W3dSRL`U8aBT z*7WX{1o$F5BG04&#+Fiaf|2hbhl8$0fpN|4SdUfe=bG?#TPKrWV7c!Yi9^6x#R6Vq z)NK05Q=#p?9-SZR1@urcTjgiMzFWMxRKm=~OD+dn)XGQh;KCePuc$cIq`ezZVoOjo ztRB~Rby~&p%CRo=f2!*{#^N8D1Z#dC8XTbw>WymsSPaYr-dxikG@|Pq{%j^hGqmDpvQLZa_^j ziQ~5B>pu~cddL=;ia2b448Au`C@eS1_?QQ#qKHJ*bJ+B7Lh17 z4yF*Oo>~65Rsk*{m9m8|>2$iBTpfOpzyToFv^Kq74b?hwu)4}G%|p2T5Vtu?5@y6T z_0U97KNzvbN^Vs3fQl}`_?i~No>v*vB^;I1SrXS|<3_H$f^mIWcH^-L130EOqKxN| zX!Y#}Oc6*m%TyI#{)BH&Bnd)tMNaw%YN6~>`%km!+=$1$#&i=!a>unD&r2P_+Wl7X z&v$hHrfifEYX684;-8Dyk8cPB$-Dn*JdP5jY9kgY%m-^~BY_ov^NcML-PbYBAkZ=Ef` zMiPY^lFTF5tpBd4G{Tqjr#$dm*E_iYu7B4d7x_BySM!O{pN5k4|<|X{G*{ShtYwMl%#v>5; zrrcd#L+9;T%OOw#0XcLUhgqZ(7s57=L5k^c$m^3Pl%e>yVN}8D zblN3QBSHN}bbx*Q7{VLLHi8&NC*L(=FFp)u%P`eo1mwKi3)3w6TNf3REr{wMh`*Mc zW#?yi=vRG=*pk;+G;Mui*p&Rq3IDC>TgGo-=&tVr#mO@9HySV8?R9REGa$j*`7FGD zrhvvHC)P5ZPI+V6Me0*x6SNskxrX+1vh8x`@oh--XQ7aEzyM`jc?rUB144!7CAL`Y zZ@I3~qbQ@LS8~n= zF<3j-#J$!AyLwVZpXlQ3A*2S|TAp8*zgH7bcZq*;p3Wsaob6z|Dq+r>+c?B7YDNCX z0y|f=9<22gaPjgcHCE){0F9-y#9&iz9d0pLTQ`Kf*3RS{9&NjRE_U6J0o<`_c4u~Z zvT{TJv)QjTCqAS6e~&s` z!T(iNTWA`A|EGK(mOc2NkTOp@jP!rdO7r`*`T-^x2H1jPx=%pxWD})#v>1O693M;H~#vzyKrSzgkV4%z!M9J%$XvuSPI| zPAd@}0$=X|y0M!*@O9Hw|HRA_Q1J1uCOq#KO?dho+W*IAI*r9rNdx42l))ofq7#qV z^0cu+PA^6~J$0HCmZ}0AybJO6yHwM3^8#4;NL`nJ`#Ci}ebF8v=(&gqzGkR-4X?n0 zYtj#na7#?DiV(OMe3rH@@LxBM3m&&40aA2#0-p{u)vlY|6j!U+G9!+ln~F8bBJxsR z)5ij+(8|K*!X2&~8ohaABhm#-JHS2wMpC5m#U9SKMrHi%F9BrbU^ZMEjNy1_O-A|quUiG)>cnDSUfoi7wi?E| zoJ5x_(ES0jG9Nqs+{GHShWo0OH}aJPAy05mV=vIBDzEgpYGK)n&nT~4k>1#f*qtw% zWO&PYwgEr|EiQNTvHPU4y#*EgceWdB2F`zMx}Rk?f(pc$&E3( z(T}57qv=EkTb}~ezu`nlZgxc()!QZu@`@kPSTG%`&om(U``I;xLQ$^sP^B;P|01g| zhs#ZbV{Q}9DKoo?3fC$s{9-6o2y^=TS08G_-Vm7U7z~bCh?; zQAnScbmJKV@);g7C_`OyS=D;Y|Lcdw)cRt9sP+pHxW`#JVMB@jlS!4&cXrCqe)E&T z=K9ItXYPAZdSzBcKG{e}AbmCu!WX_MWZY0p!SPzn!;PBU+)opSn+^^6xiSV?4E#6| zbbhn1a#&FO9JyQky2TLbwE@y@`S`z(0il=_xEDEthC?g#Z>H5ZF{wC7CTCt)U{gtqim|iLW=vW<(fEb{sbP4Y zpJhBNUl}n(*l&8$Le=*_14R-)aq4nEd{uJ`5?$+#)#A83C6q}9`ZYkkagE8n1IOc! zSMNq?PeMeyL_zW&?@wF^@(X12Qv7^xa3#~Gc4*&|05-5l)*D}|Px&py(tBAD5S{kE*9(xXtSV4KzuZ_E{G4DZ+o*0YcF z0*?)RX*%zMl#CAoqS;|vZ4OV5^p7X=ZR-$|zUW&kY}q~;T7V;p7UN51jxnSg!)Tb- zmr{x#wCe>06!g7BWj~$7A0cP6e29K%d9v0FHg0OxnjT5*b{7UEM3>b2Tw3DKssrkF~Nj0#Gp((J9c=bRN$TvPgz!N6K zC>;+8DYFus2k=?Z(5#`;pE*Gd7<;5d5UAEX|w2S@gM?0kHZqjZwcH>G3*JQnNq2@`Yt7o zvx%FE_2-8iy9tPcN%+PWL|;Or^^GKYmlY-JJ&?Y*<*!ip45|nOnuX{eS=6_UcR{_5 zDmX`d>Y0Iv#U4f&3$i=@ljHPVC5MvV5-I57ohYFoOIO%(Q}wJPQt-H4 z-Q7Q_lisx)IulA>iGn{mR-vHQtwd{h~K6$he{Dla>dR$_uc}R|U zndDoAtM6j*v8_`ryg$f=AKe1`1N%o6d`&o{(dZqXe9T3JBe>3q2b;-%0syYxv$H9e zPnEh&h$(@&QJYX;kpIOKsVMNqbW)KGa7|vQWw`W%mwUw`Jx-CWjv_*GyH?pAfCX(m z$TSFGHGEs}HL4!Inp{q>!+UAedTk`^ZVa(@?Bs_(yx-x(aKR%}T&>A>QU0Vx7?-?L z#+tW@)4`Md4Rc@9Nr6S1x4BWR0_-y?IyV#B;K|^cv>BAYb3H&O5-B^}ck$E3QIxPe({s?VRz2PA+H?X6|c4EONGZ_!VJ&vpA zEZ>ZWP?5y*25KyKbq!GdQ+AbCXQL|6Swyu^CfrJgP~^K?bXr?E-%DY*4d7?2(BENi ze0B)JvjB&@U&SLU@SU|Bgqn7;W8t6xH!Y}9eQQ?|tL_gWAKZDomF1re@LT#pdzi*1 z9gmKRK@Fg0e}yVxg}YT*eqtnTo5XxzcaGl61DR=ERVGh}aV2pS4#l?fs}d?T6)`2( zHN_-Z`--O_UK@fl@G2aiP*ZDOS7i4-Y`KLAeD}V_~uVl#CT72qfYm;K07%S0FaAT;m|z(`J%f8jSUF zSa2d1#Y6lTaAnC`9b6EFIenQ*P>I{;^de&yg^_u2tkB=inH>=I;?kW4d`6p))UC{+c%g*1b$FzFCd{ z8ixWY)qzYecN~azEda~YYBu}Q9L7B*&Ui-Oi&03XPAJedlvJED?VnfAcg3moEzvNB z2UWf9?4O$0B9!MCSs8@Ia*`vP5=jPNUz0B941psWG#o=a=649bbto~6=##l>DoAJu z*Ma=K;tRr|sOhue5eP0a6mWR?dOnZ8V8J%Um?@D`R@PsvZH$7unZ+|r1$(~XJi$Ym z97|ap4ULT3EwgfW>GesQ+Gf)m0&{IUlV`~L<;S<901N%se)Zf<#rCwVdZn%uAG`jE ztQ9~lYWe`mG%Op7Sz;+uI5Z(~HEanajeT*`^~NAjP|rbgVZ5lqrTLP;!Kca5=Ey0( z(*rHTfaWSnQD&)giuR4cK}myg169t(N*KOpewEXZfWBbHJSUyb?FNcof_vWVYGeN3 zYOL0@D|`Y0L?WgD0N3{<9j<&(u?0Hjk==%-*ErI9d>h886)k_+()-hC_-tm(0U(=} z3>EVMK0Bk{BAX3wAn~W=hvIvouvLn`%@#MykaoR=3T!M_LQNlwGS3Xv&BZ&V#ZDL{d~2d8&?S=m()>bow>kXZ+AB%Py4eC} zuO)9cDg%!j8pud>){&i%=e0I-bvcA;nn)a zNhsrc4iwxw6Eh7~yU62vGKXp_iR_=su^w5IW5`}PrXNcD*Ey5F;Fr*M8skC zixi$>+*(bomqHuvDBQSc*k93qJS)RBL?@ri9u*K8mVfy6Ql7g)<}&1ce$a0;ZxI@o9W4=A6a6f z%>o?&0Gv=53-HGQ1T2hc>I87E^XjNWaqFdeL;I%Q;JSVr+YrrSu$vfWB8@`1u9Dt=QmeWh#{I z#@^`{GFF5|>s#layAFg~6U4Oy= zg7dVc1f*_S4t`q2h6l@Yi3Jy1Z)MR8@4T0e%Z?NV<_QEKVFgn{n5`Sd6}1wGooXS0 zv+!4eZL|~YfsTgfK^p$J1MRk}alW@JNWZRnzUsZUv}C$dT5HHt!>L zRQRt)Jx{zZq@nd9grQCJ-JarJ=ccFfdxZ zb9rxc;eg4ANvan1jf9>h7!&YHGDpzU@pP4)q;Sw-;MoTOl(9;A0oi;jT5SST5Rcx?A2>ucB@%v_5bOjLo|S_@Vx@`s1Dk%oH1jysy@T^eYk zxPZ5^5&;p3tAfi>LyNk4Br^& zkAmRMT_vD{uc9)z>`n5*z~-`~OFq%KuBu7rn?4)~?TL7H%-W>Qkf~Eg~GbN)FypzkxaKbXwVQVt``&##BVsTC-)-_ggV z%hp>QU+pFtU?p9bF8_adG;)wc)YYte-;|m%bbgnI>ZbtKC0+`*tZjvg>P?@(`b%@yMOtVerCn?GCt{ui zC?j`Q4)jBCF*O8Q2CSlUV}G)qKS<-SR_i}ob@>xa#_(Kv&P zj_9z|#->}8Mf*)#??N_Rl2$Ee2p8OB^?nOxHyXS9)>$#(Z$l~+C$r}Y+HE|y26g_q6W8znD-Cfec3wZkzRNP#a;F~)5WVD z6Kh&6@>huE^^?oiKfw0)ktJkuMNXC=dC_}CU(#?F6Ys^;X<3Q6M|y+FB! z)A5ibx_vStjTusz#@T8m0Ud1A>S+R5>Pe1hvbtzHe(qOwiJl}V=Re^xN*>RCu*SNUDZ*cA6U2*nlyY|e*#r&x@kPbeP zc6~H$r)q>ktc~)ZA8It(rGG>7zbTSxLZem}(+wv3joBMr;HBmwhn-0tw)EAxm;(#i zcWU$FQErgNv1mFL&&yXVP4U68K)2LT%1* zm@QS_%)*{<#Iwk{AExuEs0-WNdz3fhDbjTK&>;4c$Ad1^|Bc6RjK##FsU%2>& zWk2SX845?FO5Ou3OXu`Rf?BaSg=w_6#)gMYypI&BE%Rq%@IU1!Q3a!_UKA*%f01f$ z9Gq!QyffTd=TZC_!)T92;+l&ki{BlaWQ;~~>1Q~?UuSx2JnWU3Dm*OUdG{j6TCLPm zuF4kteeMAY^J0+BnJ(4PN5*+Ov$8+5EEjntw@_vlTFKt!WV%@V>X<`>4-@0?6n zyic`ppC=yKsCUh?h2!CV!~0k*wUf?-EnVB^rs@5DahJJ?M`giGsV&!}rz_k9vyZ5l zYtMXLWO6jqM0xF=<{foK{9$I4cgr)?r*UZ0Nmb;yCG4Btm5*tAv`JhmR^{I0?8xK_ zlhoGL_o3}ULmT6sQ-WlHpbI#X@}A)mOr9I5KK0h}64luIwK3oDhJb2K--r}s;-1}L z)%Ci)>Bm)FjpbeKBUysP2Trf3feX)p7TY6rf-kLbY#VS=R?7O<_+0GUoe$JR9}|12 zm|cv%6y$c3cEVn((X0nB)EAd0Nj$DCsQ;E%dnVZ@a=bVKdu4zBrv-Q^vB$1BMGU9n zQKW%*=@IWj2(EN!Xbq+N4M$+F!erF&R$u-$93B2kFb)l^Wz&%d6+%+=U>X05w$&?M zCrVO}M7)Y6Q@JC^14+ycpxE21AfDy8sb?x^58GHZjJ}t0Y*_m7*b&SY^@b;A=KNi~ zE5bXCyb9bCTWm^|`GB}W&)TH%iYg9HLara$B>QD%3C!eTMjD_+%Rd};%D?x!@6+CE zF%3M>QqGXp23s#4b8LZ=)4i&orKTRouT#3{BGn8IZLR3Gqp?F& zYNrYenCE}awQ3c>Nx1i&0Kc$Bd&lI_yP<3Ctm5|6Y4cZ=)_!44K4(se^xe`JbxxLz zHa(2(sM75)hi)EPaQuac?&q*?6r$M*Ih9d;b?^~;yt&i^hqC24Wx@6(via=w@g7)F zmDt|++M?$zq01yTlFSeeD9P*Frug>U@0O*`g-76grn(>GUJI$#{hrX9p8XuU3Ch!o zki%1)MhW!xGP%oQwTqZ!tX^K^`}_n)D zeFd%RGMTvGS@XGQ{3S29QWsTQ68}R92laWsJXx7u5pI<5x|uEweFB3bULCB`dK4u6 z!FHei>AyOjSN5C*<+cIK1R`In_pZ}-I1BRqI$`wPe1{>N@b#3iXoEw1L&)wox{?xv zHmb~9VE%9t0zc4vi?hKVpkml^v>aD>`0nmO!R@I*0I2Eg5-U>LG41&YP7?TEs&mWd zK~TyyKclYuM1usN;i0b>Vs~HzyjS%RJn#JLOio?JbU^Ed2=t5 z&>_~pl%p_?=S<~j9t7TbYS%3>_1|suoyQUUvAonidT(U6ersd=IOj;2{d(A>JvxEe zPlAtN2AZqky>(__`IU|ScgtY^fu2t03NGKD4|OM5sUD)kRYO$11Rx?QBlqlS`^3}7 zSjhJ2S6_Ty(`8MUih0%kQ4aIfvjOJr=kd?v92z&jzW52x`-e24P$*#n&~_SnTRs}l zxkv^9fy^`7Ud;?=aJ&bjabR4RhOe{Pju}kg1ukC)-~gAJCGazuj+%z8w5}~(k9ANW zWBZQVbgpa!ILY@7BT|LvPHB7+f4x zc9|r>gYkF>R<{b1rG%MHc^U;jQDO--THH1Ad^nMeHPPxpQZyI_?ZN=tTlPhp*}ug> z-K;v4oHdq5A7rz@JPCtXFIj-npI{n|>OFfruE+@2cp*<9ilUosyqORT5HJ10Ln;mw!n{l?vvEYd3VCK(Y&6U+3{+QtMH#f=Pain6P#Dsy zN&OgYUNIgTZHUcxMjgg#TcogY%lEC;3u)1>mTA-MYc%q*S1io_)x1&{Lsn{dn#EpK zyE(ngNn_HyO+;17smGKW4V;~69$E8NXSHSqdj^pQh0iILKV?wKRUExfjHyA^z*6!c zemiPT9M3uE!qZvLY}y*Vao?GmE8^r+UEfOZxmftrPYZ5xe0q1 zz~LC^nU$~U*ln;~icQ#kj6FSTE~CF1?P+1REl<3?A6if?8s%v5CKH4X|KHi7v_?2d ze=f%P5EE}nild+lM<|}wFNH{G-#^Q2@`cx9ic9ev7jQlcv`EmI)ic-#=EbyOff}|) z?#m}1TKVZ1dO_azj_!78d1s@Z2r90|#>VqcO?FE!{Un~#?;{+rSK;lnv}`F}!RzDa zT#2^Q)k984&K@}9=kcR8!jx$qB+r!`&Q;36sBoAGx?`5&-VJ7Xm%RFon?>Qn0tICn z@83ax!B*Cg;Vc=B0{P#6_=k7kB7@5V{!NUJ^9TIDCFWxb;l^;U4Hw`C&qdDsJhZhn`3U~1&hYpeR=8^&6j|C8Kq_c@G zSBz_%>3S>pZC1*jk$kXLbZ_c5i1aYRXhgJ&(^85tl)z}REAsi~2v16*B)K{M?$B44 z@(=5WXQt1YtAq4iH~^^Jg+q-Ct)DyM%(PFDX0*cICb3;nnZy-WRsAwnyDI-1c*)9Rb6h8Ser`ShHokl7ctjw%Bw+^pQKW(%9WN$&KDwhi@%Ehr!fwMlOKNTLbPae0 zdYd!hKMi*em6+{=q3#Yy@I*uIN*2AKv6LuPIA<6L#Hvzd0qEbhzxOWGZgW)`3b_?tx8Geco1RWHR(h48x`x z2a zwsBjKe<8-1Do-m-u9H>aX~7Nu82=32Sd`3`4VFd>f!P?e@dG9>vD#mqLN_O;CYGr3Lxdb&Qg`VM}!Xh|umGAkw&K5V3wB(Oz9%PZ0smWSk2Rl;nn_nIvA(XY# zec0=$rSG#zL-X=80t#e*gVaMfN?hWHP6HKPvKhNNZHpVx8(-cqxMZKX&Z;AZraA{dEP8dQcre&=B9PHB}c0#2Kl0O1` z;jr=SZud=0&@+BSqvo!TgU*LXC^$0v_mXv6>2E~MaejDK=)pAy`pSPJJLSg4wpF#W ziVA;YxH`dxo&htza$abDI9vh(4wfAV`z(a;?q3BU<=+ZGxrmuMktVF%`)9oYTtMKM znuS!3h;{i+d4YKB9!a-iy4PC)lPFm0suk4?P0KW*(Nw~VZ=Aw2UGm67>!-;@M zA6ueD=9EY(q$*$Pl=_c{F-Azk@7#zQrSyPZtNU*pV;A=e+7NaF_z8M%%Qv1zt2;@U zD34dsy!?9YBY!s7{;se3wLV?^l>^`4D_Q%r>`iqRy0lJI^3T&bj^@U&aZ@vhB>5k9 z)onC@U3Dkk^;P^J|C_yoV7Z#LACsYg6yJ-RP!btboG-K$BLS9o?!>XszB4Dn87%{o zxSfUBvtya*fW5>?KjoFRn}(6e3cKZx8$4g2#u7!QN}16R<*VfF)fTPm6I8fmt>vYn zZ4$k_ZR5M#b~{_P2L;5ZrTV=rhs(@K%nz1@OB4%vckM zcD*Vow?ZZX)`vq3z`9E5%dcdl5w@u4q@;d}yBha{ z>Z-2ls{P-;T^o#euC9>!0x-j2FfN_*hv27TUKDKyhZ2Q&{jL;ZoooCi6RFk>OLw>1 zT6+8=^1e$3n4OzGNMu_m42VhmjVd?I`T^xN=u>$1}v-7|2j@7g__2avYiA_0v?>s}$T^-Hux zSV{o0c7a5pfjPB>b0?))+@CDGaEAAA*wkGz}QxIplPs`Yec=pi;-oCcUWj<(63u z-{`0}#e2qPHPnaC_^A9B$=N|UK)n+}U zDYa`DNJOUJjtksyPP?(UMUv*dVOmHPHEdr+4qef}37*Vk-KVXjY+d04%bgh*jLNh@ zWZ9xomZZU(UIBgip{<)Xr&kqNEIK@Q9@a0j&AU{B}LL-hiYyA|AW&|OOmf;gfucaO~988 z+Bga!0FuSv26a!jZ3HjFcH6qYDLWez8dKMZ-h;n&B++etq|sy0>%YHB#6Ycp@a*;6 zZT{pee(|G^`uqBak$bj|jAXqz=C;|-GJpj#q1erbuM&6ad&36^QzW4#wC7D`x73QQ zB}Dm2MJ0E=k%W`*$JR~3`29u)N&kA;^r|+`UjmwgA{a~9(@(HS{vj@Ca^}pfJp4?f z;nNupM%K7zY-(9vgtFj|oUfTbAKtD=lD(tl>4blR{xTY(WhjU%r0fA1#X#so5Kr;j zWYZ|l2%gOsB|7ZJ99@NLzdiZ@%VU@b1ZdW=I(p}mRm9!&b@y2D-yX?5hTsJxaNpr( z3kPC;qT}<2OYl%YvAq#{85jw9$}H?7s=iv#ar-pO|1q*gRm`C%Ap@-!?~|)h`aiB~ z%Cqk0mhkjQ+dk~DZr`{QvfhxMH+pH3jVcaku@kRaa8{$rT9=M^w<2Yh%(#Bx z>0xW(R>z~rw`8%`J&Kgs?{7#ck*&upRCO3Xe||UXVPir=fvOOZsFFwZWNB{V561kd zQZe(42{{nBUBQ)GlV5BakMw=_Jk`4)k>yjaKAG=}pavi|GWKqL)*}-x@vlNdeRJG3 zQ%QN-cl@s=n#xmT|B>dtk|ibz!OsTcWYkc+5IxxQW^*p}pR_enQajdS~e*nf0 zlU{${%mDnd?g;XBFT@m}Ft_*zM>Iqf=l!bd?W-3aP1m$!$PeJ;&OM~O=*z-dL4}ZC zhgIMh@5VnmhlpLtNG&C+f)K}?wggs2w9K6}LJ}K3{T1Av87iRe$$SDaQHLwY>3(4f zK~!8O;O=8)&h`&Z$kjM^`)v^s161H$@;^|%bGG*t3tNb-A)%C4)8__;NFF_C|B}z4 zZ(2z>|6^+r3oyLepV8bK2tnb$1MvS-Jx{Trr;xT=M1zlUG( zpW=xPYLR3EtFYn19Y0`!8doAn#YI(%e<+<`&HeYlicB4Zz_ZlV2F|hvJb({YDb_%>{{?!;StG8>%63N=4L70w^Fx^Ihg;bJeB~`Bg!*`?gz~q( zYKh7R4?@^LMn{3Xd;}JvU|$75K+Pu>o>n-qiFm>%-OApgUq%cWRM$|^Px>YSmFX&3 z-B^Un-(+PkW-Tvq%2IF!o@+il5J$IrG7XfguKsQX1*H=fz_kxE5c>!-p6(g~vP#b~ zR!A5L05L!5-O^YEO_LL@MH761RnVe}qtX z?^kbmmrsj%p@fK20x_dKo?jwla_=W!+b^mw(Z!_)$|ysC%mHq%eIs97d@G zF$C;+qV)$LBi%yNVe{uG?4ZmZsT%~P9ziXQ^n*;@k@m?~qXaFB+5GMZ7u40m^7peE zm`G^IXk~j{(Q^3df5Kl-ALre8I%DpwI*G!%pW9?wn_~S>^rF%7v8;wP%IoW6)Fggn zkf_a@Cmgtt8WoReQ(hsQ=mhjRbVK1#G{1_tNvJ;ksrs;D;wjg`YMB2|NOwy}2 zHnkr%K@!Q4f4OTL#H*|wzRVe9ENXIpgQ)tZV?y9`5eKKx$i&}}kNGwoOFZJ^H=aco z)`^lO7__)3*rXD4h$xTOdi4xQuLcs)m|h4cL^B|Q;}AfZQ)~kZ!o~fora{DCStk>> zeL56rJv1uikS!07l<@qyiT-q1gqM5M{MH!P?1w)A8{FFm2408d8UomX>__ZAm;PW+ z)b*3CgzGTI?9PW_{m(TE)gCK&EvBlBUd!~azD~-bSbj`C#}D}K`6iP~iOZw%&a3-; z%0&r~E$hAfE&cB>iLzacDD3;`7Yy4ppTfF|4~a@;;g+cm-NFf;&YT4hX&?Fx(~Y0S zXV0f1VrW5`Mx7cyW-E?OqLOvLCP_PjC=U}ty31z)65bVkKV4OwA8-S(amUX33#lMK zgUV%dW_<^em1vS*NwV}^nJqc_an&0LqNF;P9ozKwn?7h5X%HkK65y>mDQ*W+!7WtSmGbElB-2 zl?S_>|HbbB#L#@X$-ZBo60;C%nK@Q|JA@csc=VN&EQ`^Iz%7jOd-AmB6rHspk7{#c z-()-Pnf(rS+na1xy)2tER1y588AJh)&4`USL!AADHqX^;$FRt7xBG?yF`CEXh0TN{ z0t;hY)I%a+`!gfp2_cG4*FEDy100Cg*rF@@HYHrv0ChFgS=ebj!8(g6AOJrig1svS z4z+e&Djeh23w-qs0lp&1#H{aLVcGzT$O)0E4v|HG*?x!Z?eA_7Hw!@OtUNtb0Jz_g zWpwIj58L}fL_Z_SSYsT%^S?YcUeoV*iR9h?zlk;c_2HO7k@uYqkF!u^qj}-Fskow8jwtlkfPi3fyB#} zhMOoc`AV!9$PM|z~YTz(_+40)u5EQt|GOR=7Opp9=V^|6N1SJNM)4gT?L`096orPAM9Uf zqK{gmhX`yo?r&ozwLT6~j<@m5J|y>$iFpj_EtzRXDuG#dD31X}$E%;A&u@>+J6F3n zu8?;qw@`>5SjAh(9M%hzFZ@gOA6PHMl6(h7J5xnCuErg_K=MloNgK=7i<@$ohnO?s+~ZU&mBdz3KH!e{Fy!AvvzDerqPY9rhx=3ExvfW8+CsxZ9RT8Z>a=!8aU7*;oRMs*U#yTxoj+Jf)DQhd@&8uDT@d zwb~$$acs6g@ci6QY010OUfb+Vozi%tPqA;#=?>DYG?7x_n<9_kW1Bv)_E~tk8hgs@ zuAf-4DlZ|W*pxQjL1QoV;Un?LDqq^G$C*Ch+{H3JAM}-HBo7l-yHO93`ZFKSy6u~( z$LbmHJ<8TU6Rn@9kkb0WXn1^^o@<=36D*}mwPSz*$S!vGV6|K(Fod^2_A20B)Vm(S z&cM6D9@a!AE{UB9h7{ty?6m?gb=B0iEvQm0AO^)!ihvkj>7w$3k!^8<~ zN&5{{!7CTA9*?^z@rws=NYs8Mkx1Y#ihw&ngTUj`tdKJ$yfz~p#5?PAdS*QP5c96u zhtX`=OqxyU?ilI$D~b_uw`}LUlFe?6rW37emJ7|6XCI%-(9b1(Gty$CtlYk1*aq97 zFAr6sEBYe8T3tR25vZ$5^)`IC#SS^83t48ncW1t_7x3G~_Oq|5O*>@eb9R%tT6((* zGg!#7w(W!{>XVEo6bD+phXDqkr&IAS9X$9L#gFb??A?@!3|c9Od1=eE6R%=M5$%6j zzqGl$xui^g>w2}adUfnyb_Hx79QeEa0l7j*$_Dyszx|int-Q|_s>n6 zq56vyzYED}@MEpqG2Q)Ime=+1j!nAI1J_PVAA8PoYzmeh?2()lfT2UPQ8Lw0Wf#W> z@DLxk3U{vC=k#L}w@!sP;U90()X<}3Z>@p@ONvwDdQh6 zKAzw7LyfPib?DU{U_LZ}Q`IrR*gREUKiNl~gswT%rn58(qVJb(uLUMkV)n*tl3Ivf z53E-np=jjlxzxHEF{nDK^g)o;8G7o3bM^LlorwiP`j$nq)zXoUP0lsP1Co5Q7mBgY zR+Wi5ULMZ$X;ZWfj1;9u4e;md=>b=i5Gi`O5!zXkaXfPTw+eY!`#lNT)WM36AuPr^ znm5p_kOQ#hC*#PK6(4+&Cy3#ooPoi65A>ZhIgH35xxzYaTSg&cUa@>>^`|{HZg@JY z`7<{DMvVxyvNbNcf$x@Dy&JvBMz7b-K4#5Y(RGO6&7pE0BpuxLNog^J`57*kJ$y-C zfz|;o0ERWo>pT3;R^f0e!oSwx-s%4S{^Fxi%hYXnLyx9kb5lvnzows*)a%JLTw@wH z+a}RAqQ;OLyAzIozj~s+t|-N3z5%#z=WEf({kxtGHEpAgCcX$DZ7B>$^n8^BcXJ|n-4=Q?LSV*?&QV% zXSt#R;xEv3Efl)dJ>M@GH!&fSc9>_HcKse(`=^CMLk(ESna{4~7Fm zpHZ^Jjb^Ai1R;-r|89mPO5| zkxJL0Ma|KH9EQDKC{jY6v5UmmYG<5wD4^7}j5)-rx-Ac*#Sssag`4ej?~i{A_jrvc z+w(zdH-Mt(>qhm<9OmJn=!=iU$@49nnjzbr zT(so+z>t5^oYT-u1Pf`eGjBRIt&3y`qZ*EpiZ8VV_+kWsR&1@T^L6>tI0v3Nt-UU= zd8Nf;*Mevmx*dI8i*UOA!k9Rbx9C6sA)az93zKWPH*uoLv8TLliGrTRxdIyiOVESO zdy^3mY|t{Fl!wPV{*rVDdy9f;Pguk~w$=`84eTH=`WSvXHH z_dQ)~w|e``H08SPaE^Qyb%^CPnVBnQya#6UVv>=(69{G9?Tx-^uq_~A4ifts(Rk*_ zn=K8aUjciwZ!7^S0?dYze1<)&{sL8bmhXKznF~LAW^vnUX$298@8npJC7dC$nl!=%eJVB@XisiyCojEFfObavp2+ZAD&Adz`HWRsTWgh$NLd06_u(m zPsu!g8!A$wm=vpT;QzZZHoE5qJ**)etA_jRZi!zSm)nzYPM_>5AbDZ7nCoRyM>i;* zb*bw0!nq|rc{}m@u)(du$i-W^oZv$9Qu{o;#BZD0ezB$Yk%~H=XJ+ak?(W+|Qo-Q5 zDBmn8HcLeJdxjnNf*K3Tb|8lR9QOQvasDBoyF2X7ifaB!A|?~&C(o<{iRB`h8sW!f zzI=ANhQu|L59*vbJ|FssdwFC`&#kp3pcB1c~be~#0`;h3?JzFIVUQsEN8CH;EZ29?QGfO&Vq zihs9n$4`)nIdj@RiqhJ~N8mccM>R3kfTpAxUL6{nD`VD3$m>^*kqVwaayHdRr1kWV z;X2TPra>H+R9?$-9P6)H7;NF%70SjTj!=CEF!Tk=Sb+3JOzpCDe5iqkPMtSWIqo?}hA;RQx#`+; zR}}OulQ@jVULLbLAOtEMTU~FOAKMdHP$rF#t5bqyru07!(`c|2ly6yttd&K+ocfxY zIL5?Dc>tF8#FVU41()hvZT(n(hhKo&M%cGi<)%pIUY_cpe`3vVT8uxdkx77MnWB>r zk0EoV2Me!ZQp_drSXSEW!{?FvJ`Md83jR5gWv+=ka`9*&CtMVOcTn+EU4g;Xagpog zVR2q++l+T>+V}6+5)#KWjs?#yvTxQ+EaI1D2-~~o?bY9eo^^;7sfPY`o{$KrvtwGg z7Btc9p*#5Yf`)p>|Jvx%F;8-DF64U_QkRT6pFq&}@7EZ6ahvx`j6t){01n|Ps!I1< zFSpBR?#wdSCk@J=+kCleI+|^*F=;=?LPYTfmv7+GXmV0)vd{*6x2q{Fk^U!f{Djp4 zV~<2W<&&uuoYB=MnJ-oNyk*i9Gt5_vN+faRmBXo?Qqd(!BPH-%2pstHg$+tW&X_<3 z6~s>m#=2A4T+WZQJ+Kw_pK#9rK$Zj14<0O9Kc!x=nh#wNF``qZeMb{S_XB5^G8I#0 zpjo>OH1xeTv8OdnPN!T=4??Wh4*@Csgfr=7%j29{dV@=72}J5sa(kY;hn@-L9Og^v z`(;nSj=Ck}NuN?#CZ6e!qj*BayB^7EJr&KOs1zGdi3%nq<$*%6a5flsU0Endj3m+`0!Y(KaS8Y&;c{8D5%TtR(;eiL`C znIK#+i#5w+&Fu90&qSID+v%g;`$91#<qa$~!S!LQ&ZlEqxDRfzG7(YGQL$=wkdk-?SbF6fdATFT3 z!zU~DnQP{9aILqzo>5mC!!(m*WuHVDL2FY)V>esPsr_CN3TO{{05{6!+JUC=NsT>? z_HFd*tl%H;jRu0W@1c7^-=1Bt9J(8G-wCVaJ{fU0OzEqL4-iYw$PEz5b!ekNy)DC! zP1J#KNLRvSB;+|ZvKE(tMwwwV zQf{zUKNBkT5&3$KXieRoynl)#{1$njz;PBm>A*jOSyOB2>XN%z?%nRd$;L0$RX@kWh?W!|91R|{*p zEGG^$j&jccH_8J-_q<-93$kr?g&`wDXSo?ubds18%R~1Q(ut2tE z@E$T$e6~dW5qj=@3vYN<5vA8#!Vi91 z+c7>ogsHuS?-LM%H80?|06tu7`hGf`nCI8=J+|`D3Zl+kPd|rGZNEW=9n_bFHE=#j>?> zci$iffslZE5m08kKBc)u{&Ex8Oy9WRWYeyJ;;+%1(tRK`r# zFP6Xh_P{Rv0~nf1P$QL>?;hA zID>8_I0mMLdb<166*a^LFno6$?}_Bx?|B3Y9F~(Rc4fKMY&`MTo-qU72b@PU*1ACv zg_|>pGO;bURYtv3w47#6m70s{{c&a)(Xr7MU5y2bJc79mUvF`q(W1{C*sj+OCHJgY z=ZDtm4v_7mxfG!9sCxed>lpr6cozBD9dpP2O~TaG7LP7Bko;LxeZ<8_N`z1VL|c1W zb7a(mk|}xTXOXCpv;{~xfE&6xs}hL_W7v=;!@a>Tr3kgFSuhOL1zWEet4NG@ zIvxt1No^r5Nrd{JTTa~L$F-FbLMw`;*stm$1YZ6|iVagDUl*OFdU>(==8JFl)R5X{ z0KW3@6!sUnPBPS)_-te&M3m0&-pf*}=^po8&OjXTtk=?^9hPDnBwp%u6=^BPB1#e+ zR8(I!=e%QY{7HpN32~7*=wdfhu)m;Dd(oAZ3(0uK!-Vb@2+@Se(>x(fV97Mo>cO88Iw^_tC5KlqRM zV0fY|WIuHD`!1474!%Gqargb6Bu091ifz}RKR10LTjuY=Lj7(KhQ8;Dzk${NyaTe_ zAID{1cGik_f!L0iuq-5omttd$ri}8E+H%xD$v;wTdrX0f-JiKNNhtgK$)dEI4>rf= zS?UPAy&{GFEnF0sie{AB;+N=(` z4EjDBdz$vue7c7iV2}Mw@1}Zpie`J6`}1j5K7QLrWzQu)Gd^E0dm+D9Pp&r}KkTP- z@P1x(>Rjta%1#t6UiOGfF&<%HphF{83>OU7`uu#zz@m#*T~9AV!t%EnVcs}R^Nvsh zYtc#xD&u3;==$Tbe4iTXG_Q%!LJcek)A?3!&a1~(nvIfN=xlmxk>J#lzvwrA6%A2( zZV@)LNU02ryNA-L;{XMSl!gz>ksd3#{8>;ZAJgYpqjg2m)hzBKI!-axTXudiHJ@8p zWwo7^Ko{hax`@kw=eNAFRdg54Wyz`rKYl6|9`>_bnjkNP1HgspkdP+M%!m}J-` zZgyP$L0_p8qWe2NOc%0k)(jMBz5(^mld4EiR@zlN{S3mS>Ws2!dxp0eFkZanVeYMS zu?96Dfwpm(9Hhl(^ER5}?2YUr)b@SJbN6(4cV$aos_cY0{kKAv1*)vre)Px8Uux^4 zbkzAh4RPOny~&$@tug~NM#n(sU6yR$a7An&vMR5V^^)4N&naicD8jODoKu`XvQJ+f zDHL$d%Cb^PRJ{_*&L{{NUO6wgg_f~cs%4&d&g=<6^xEUZbQ-P1iHh{G*W$Cy!u%PB z=v!S_PBV|1d<~z!Rs1@%-1fqKkP~4?p9Q9R@%ld)h&|z8s_nAQKUyo;87IIoCigBf zKhYNqtU%pq4eB~wypER1n`FL~Bq?Zm+AW!ZHyRSe#Tmd7t%L4Dv?oEDEl2|=7e=Z0y1H~h? zCdIbrr+xgntLpSyghY6;!{SX5tWu|*gfMRI-B z=Cq)=WUZ;AqhVm0|bfr0cQjh?IR}GDNB~{A71?O3MNmzI)5dKCetceFkKUQt%tk~+vDQY3yw?p*c5kW>h^9svOq#{A>rAy71$l}Z)IC$vN zrA8(?o~-q^2moA!-|{a(&pch;X>?`4#>vJgXH4!`@; zdy~Mzgkg<(P3N~3g%J3Dp!PnKa^4m~qhr1d@ksSeU|WIN17&I4^ky}uLv~|9L~_)* z{kTRNp`NPtz7AV?*GN*a*XQner?^A8G>Ep6IR0&)?y2+JO7P)jF>RFKb`JLqE_4%D z{j7F`@h5@QVSMHJuAFd8e+-qcCN6;Sq&w~;kS#*yW?D06gq|g}u61o1D2pwc`-!5F zPqeNM^kYc1mQOUzAM3B-bLhvcgW1y12Q$c$kOMIy1Dn4lwECZH5=>u+TJ=fc!eGg( z%UUe*FV}K9oE9d#67v@MvSt@fsG%?yiiP{|^3eKi8ii615< zUY@JJxZfZ%lZ~QUJIkEnvsaRRiZkFQPt5scHsl5ewh3l=jf~wJl-G&|fU23vh|I4R zuots$ah5J9=JiHW-CsYO?RM9_ig@u_hK44p#*y{QZ}YJ8;xP(&C6>eaONPZeo;eNG z8oL>v9OsVgc3QlS0-PhYy!NDlGAH{~QvNTyx3Y7d`wCyyv$m%Pa^C{|M1IrCe$UX6 zbc2^L9H)Td3k~AeRDsCH`h7XGRJd6_QIWAwU5^E{9GS)bFUmT&zb%?S;)ij|x*cw{ zhQUGkVZF*f#)q-vvIo2TCM5P9X+tv~DQHhF4Q_hj5DmIhA$Mm|$@Oq^DT7`t2(#pu z*<^PtJZDnX!^mt6KA!kC)B!#L6;QT}LSubiB-#6Q$}gd=s-8*Kf)UepH3?7WvCwEI#FImoe;YC=OoXNB$oJ29DnH;b($Hjv(Tay542*_nDICU# z4AGvRSzM3#icv#PgpbLLX(p;u32TnNUSmq+)!P*dwj~n<=m+h6Hse|NyD#ow{$E`? zHi_v@knSy%0eQ6lH|h2o7ewsr9>0;0U@UUdkE3fgZ}$H zPveiu{=fU8C8r$)JNDYU?LPh*^WVLG=ije$1%79XIovV1vPH^TGEz>Ky{_FIe{`zk_)pc}#QklGa0&c(u>R~# z{!iNWP)K%M2ghbhCA_WaQgs8-b`s{ZaOImrKju1oi}TpfwbIp1o|`>0kkTTI6Yg&Q z*B=n8Qr@HF6qqEGgL+A|p04AvJwnpreX{&RIlGj~_Z=2!t`vP|JiAaanGG3=p4lE< zGiO3q_mXqLJ+$3tEl`oi`0A&2@B3wH+lT%#q#f+;(iE` z_cN4c1s~rt?2KpSR#c2RuiN)t?dW+y_?y=~427lM{8y>uI85%TvyF>(U0)FU9B-Ea zU@I|Tq3zmMH7V;v5nd=tT7GSpfdYP2xfYX}A26R#GYHJI_xb4_`m<+=fjk}3B2AC% zwdW0e=lXOX^4U^GL~$6b`>B>d@jn)TYlwK=27I|($Ze%Tr&i#>awp%{g%y~LWfI@+WD?2V&aF%T&UE35JL|tUIw3^PaMpKsks;_GTx&hX zH#2z9QEp1-TDEOAe7Kb`2i6ae4(u1wAI>XR_?gw&AW{3Fryc22W=7~WAG7D_i9aE zc9wsAL9qoMDZP-pi=AR&_oSBEC=7E4$HZjCXI@-GR|>ED#J<0np|S2RiCZK&nQtP3`V}!Q=3Pzv{21KqFu6L?T+aVy^)xd)OD~flnZD-|c%5!Al@J3F8x}aO1W; zhBH)AiI(VD2MsC__5%72KyN$A$~4t~+vE(t??bNC{rqdr7NMWDFoO6iT}I3KkT2@E z4haHU{&QH_e#7J*fB>xfbe2;4agEC>uk6CY6;OKtaq0z^>%IiqNTZ|;I)ERMp+i@V zj^caxBe`MOv_84GJkJ2GUIbo)${ArluMi9O%D%Gx6cQyp!&9A@(^6ksbPc4pHa~d24$1D z!d>J%`Hq_ICnH}Mp-w%#A$*#@u3~A)p7=BlUz}75HX!znnN{Mw;cIxLl_lo8pfX$% z*Kz=@!XT%Iw4(<*zk*)|I~JV*7Coiwv*B%HSFKuN3;zsz4;k`;^@t0X>r0ZcOhMF6 z9b^AKu=mx^V=|9oDA_Lf+xQuQ!v4L;VS!JMTSi2|mKvWhe$lC&#w+giuo|lxM}N$P ziTD(oaE}`^qcxJ3b9qg^3$lMcx8q*7S##a8LO6|Z%|GR)Mqa+LR>jl(o(1lek7Yqt znjA@aa)}Fl$Iu*~!6}`_*@`8Y@RVDIa`OBQu+u)h%#6#b7jJ8yhb!Lkz<&ohK6=2a z_N?65KLQSe@&nVmqe$PHIB>)GAmTvZoez}=UlanT@m6Z+e(%m#*aDuDR zif1II`OxvpEK%H%@w7*t@IQ?;2ytjFPY4fxX=cudO@Xs!1Omh-s;XvCos9{?=gOy-idmnnjSB?siwJiRDHs}(i;H;# zQ?$3ZKN46sZ_QIt+(U`TUhehr4uDqkA9tshsf{NbQe$-)8>!wiuJ>8hR$GnQ1>Aud z(-I$;7Mo%v?5Wv7w;fkAh>r16ZjJkJl2+tJigzjTcGH(#!>Zd<_$x8DAfPvDt(7l_ z%+`b&7a5YFUL3(^TghkF@TH+QMfCKJX!bIsFQ{~ChINCvku}jjy=|t17NSB~zEYKy z_N1)0v(yPsl3CA&S_fuHsbP$Zoxd1~In5SvBbV(WDkJyCAb^R}=5(h1BK5}D+5Sd6 zZAR)_6@-`K?Q@bFY)vVM%xOLNMqtN!CCh(NF~!x7ZA|QD9R1AJ*GBSL%r_aI`TGo= zzeBcp3bAMV7orQJAB#?mJRVdL*>FlEv0LcM?Gm_D!0V8B1$KOs0$5=|YIp!}cLW}3 zA+fMmI#pIJC>v&4`V1z?+nc)l78=Wj|zOut`2D-R4v{uQA4_J=h4`+#<2FC=n1L=L>AFP)_v zrc-mZF6%6Y6-SuwPMi$yuER83`o1kMRbNzF%5Zt-9;hK^4v1|QYcti&cq$6u6l@Hq zG1=0ImuFA@4XMgoeVYFHt*o7ot%>UO*X*@#(`8FIy>Zi5%n2_cdvX};ztKxe zONs*nlOjty1L%ShTv0j2-Gaic@41Nu>Y$2ppfOP?*VD^VdBCPeDc*a;OHF{eC1YCe z)7!h(mDJy_n11&o!9Ke}d`hZhZs_Q565K1=sgjyXsu6$nIlDS}E0t4}5KCQlPL?m} z{#|9U)j0cYkSeuoV1nEaeBkE2XgBBSG_=hZV}OgV#oR27JoLoO3%$3aYBh)A$QNm{fvCo4uPWRGTlFC0Q{ zviP$apLmKqFND_%-OR&05iQrf9!`@BEs+8*`V z#HBVD)715xWMs5u!-5q^ULmlryxg&o;QVUCx4|UmfqRye(sABLiL%j4|L@R?RWuGM zzQv7IO!nS_?5gDgyvJKv6?{n~bjdDAV!mBF^PKTmF7uXU0um~o$-1gKolMfCWRF&w z>TbWcwpy{_?z2uY)k(6r3W1v_S5TR4zc-)!Wuz&xAxz!I+>r1YH7rH%x5<|~L{BV z*82Z%>mZ+bUKk0-HV}AyXnTVF&vo+1BVY5Dv`aP>A>f->l}8zbiSxHCMhs8iB<$af&4wK=6P=W-I4Aw2PLPBL_^n_s5J6(5X4?8}HRVN-lVVZkFEya1S%9jWy}4 zB3sz%{GvN1QY$2IJchK+nX(Yff3|)P^9zV={I@imJsTl|_0p!2VA%%B?G_`QDwj)7 zKj=L3-H&@k_<~nc;!q__7t(Y3*!0mtF^;azvI{EZ0NFv;~lD*TTwA~AD7ow z!e?iMvqH89COdra@{R8+F!z^|LH5+2 z3(m^RZAMsEHqk4`&_IkNSpIpX-+f|i(EZ%3M8yd}bO8(U!SlSXb+m^Q!asKdl$O4f z*B$;P7kYAI?63exd*Lejk^CUK&}^vamKtjbVLqZ`o&p-*FSc7e6K@wJ-jCL3aljeJ zr&x9WLX!6I2D}rMdo#vJ@_725fgvE?n=f$8zeDB6n&1vq@t9O4xy48%53NhaR6Fx*ImXZS_ng|J6QhzP6YBiU;ExVfI~Qtx>$m6r(g zwQ$<+%D*e%!L8NBJD~ta#qGOB>+_pHg0rpE_>3Kq0kOyL>t6vDx*el4fg&r*rr`z+ zLQ;K@Hnqy8N7#vm_1$Jf?cy&e-tz6~vx*afu4$d9t*DY1InFEkJ`d#!;bmLPrJctw zk3`^;MfAqA?c{2uwqYNS)o+M__v}Zyny2R@_NogWY+2-MLOOH;r3vuyi6n&3b@LX} z%Ryl5tVK4pZFl@LEFSkKI+jK7l zjqeUG8G=_PP!60Ye9pmuMSgR#uNCKwBm7gM4a#I4g7Mk?Bxjgu=h-Q&k9}hu@Cc}( zt8#lPNay`Zb&p9;$8E=k@(uIf7mS;rZUYSUu;L@ch7@cZs8C694F77OMCM zJ-@!$X9rk7O|?%t;)=c-SKY+hJ~SS2KyKP{EallvWaT;A@v_Y}viSr8lU%*UN%Nh7 zcA&_iBmT2we!eO?*AEv$$K+DZkdE=Jf*Jei(Fi_bE%mYNWc4()_s`!MgXS8~uC|RG zw-XCTFRLuiXx$lRR3BLFX^fITz5P92d{>hS3lH^p68%7zqZDf-j$qKz8V%h*8|$dE z2K0MUmPrkOMvO7l^rIex^?ott3AA6>Y1tra9NYqbwhZpLX;mj2t#WzMegTGQGIgY5>|OzWumU+f7h9c1k5c7UZd8jcF~-?0NmY91lJRT-0fNXq1T@9#gU`N z=Ncb(Nn~u4oG6uIoLG0EM-ZQ*Ra*i7jEU)IepCEu&RE2KO}aiOjU7KPZ*7Z&M`hC= ziR?9|%~^k!qkip&e-EK-4OuJnK7DWb$>9wl!g7dA`kgH0z7~ckt&O)(bJqz>C)7b1NE{UitH5r0m|o377*PsAL<`(2fNQKf=oE@15HQ>o9RvU6QHXdD%JZse2lG7$^jb7zw&7(9YXVD;9b3T& zD0-@D#$&a!hD>`Nj1V?{&HN6#-E|3L|AaruyQxYc1>bzH8&Bi&@9d&KHL#bt@TR;q zHC-754p|{XIUzV3bA4Rk1^?hExd`mMJyG5X!wSAQJx3*0!D6sz>qP0fx#-P2i+o&u z1#i7oaB`>%FQs4JfOz-*nfCf0cTQGO!MA(5fe5H&-}RtpQmbtQuF$~)T} zex-g)@@vAse%dv(?%V4u|4k|q7}VA23VTQOsrbRlvTOaXCi{%)8}p$7#-^L4YfL#L zG{t-qdj*MK6S!VcYK0GA#R$9yHujx+IOqj)2)k-S=gKr_P`ai&iZ!e&9flf zXn#00`=Oj?WAQ^r0o(3BVi+$XuOMCKrXPAlMxwe{BBWx!2qEyK5?V`Z)2VT@2)LM} z;*7_+F}o{gv7fdzdNaDUmyU9wllFhGqy}UW09D!waYwb<6mnA&vA)X z)9na9P=>TR_LKluniTac(!SOse9XsunB!-y>t|x@+$_o>SjkEJ%Wt-)>+NUT?F)f( zL-CB&gif1S9f47S3CYm9Kv+!Rsw(MB;F76PjZiO{@n?gT+N-sjP+)jt15^(O+5~I{ z06)?ELKff5KN5en2!NzFL3Ls-KESrR{tt0)85PI(g=-QafdC2a65QQ`B@iIEySux4 zAh^3rfMCJh-Q8UpZyXwjMyB}v<=#7Yt(m*#%Y5n8QhloGoT_v7e)hX}bvJ_(p2z!TDouRUmRAvnj*m5j*Q)af$M7e5>71|$z=KF;dYBL zd?&lL&^2G+S7+hK(8Ft+Ng;$%%A3_$8X`owPXn9jO6)Cd=$fJ`A3 zbkhs|J@8>y6AV#t+k|DMJ?8GY?}3#ZzVMtr^ctCF89b{G14iB~x|u&=;$2h*R@+Z# zdcf=<=L-~3fOoPsAx!O!jVN01WoPNA;FGzQ!9*RwdesZUF5oc8neKCeJeuriqa zkT^RL{a&7Z*t4A!iod7Dp%t+|tVzo;JR;?g5|KLI4jJPOTcrS#FGci+ib~|VM1|cJ zX*KOw@#}m2(7 zWGW17+E4&0gK=gxDzUTN&{!p?heKgXRH>a$x&e0#n02%(D;PTLMmn2v(yIWWmKi3h zJ@IJj00;ETO6$^vt_V3odRA%O2yC4mFkN4P5<(Er}cZ!wyE*Bdwpn+;D=BGo60iuS-_>6L4}?W{s8k zFA%F8uP!8gj%cNSS}gw*V>A#rbncAmf7{)xl-xMxg*XYTWcjhCom7+w%d!7JGjo%5 z>vs)?hB4XP;Lb_5xACFhI|r?|<_5#zCW8~a;PV}zx-duRFa*&?6=f_IEU`LkoIfC0 zx5y_}j(M}%7jEN88p1;VQxi<&T;?AS#n!bprUulB_x9!4F^zq9f2*Q-%VWb!$DHDB zJW%;Zw+F`ay7-Q!X!mQ$C%qWzQxL=)M*EdA1H7u*H%Rm7QWb;UELu0npEA)va1|rA z+NR5Z)Ct_lJFZD{ovsOP(q|dacN0_SCs@~|?`bk-qm8E?$OG}<9P!;U$7>tDkL*B;5G6V!G11PA9AW+BX+OY`IYfVW7~aMwp^19^Xivt!o+vtuqPCH4utY- z-=fBrI}m|@_Qu3H%Sz^OVJOBQm*{bm`@R-w>ZA|VT83Z3Npp{7)DW*f`Y>ouKWH9{g z+H?i}Fy3f}_tBG)PJy926aj%j23c6+IkqzA{c>vXfP%kvvUr0HjE27Nq<32S2l=pu zttzc>*Pb9s6!!X@<;6tU)W!@b`q1?%4tg6-oINKlI^BPKswq4Z#;1>+AK~zW@SuH3 z_B%s9ykR*N*}eCyiDiz->AwXEmE9{_gVh40bjMul#}*wp5BbJgE}aXtC0^(Ym3n!Z&`g2Te$N>MK( zh9_@xQe}K1vm`yplE{Q;aHbnypiZgV8_~{zu`t{nat1BUjnS=Sv{m|GsDD0RGTF9# zpqn;c=&8C}WSX1^MpIe5S0vr9^~7FIA6AxTP5nZ%yv{&97D{)Kcn=E4g7!`JkTpdP ziAxHR9xz(%I_O_3y1J@lvR9S=e4SqWu1lV}A}Mb8HHcgN`}QG9y)5T-L%3AaegS`! z&hs)FuNwNiZZK&F{hNY|(&%Ig_w)|GOYvQfLlKU(dDM@gB}Sv4@ZpGX18SdmoyK}J zVv0j6|5OZqS2YzRarJuQId`c1h*WT=)5~U$kB8s|%}Cr6Gw$7yt?N26v7UsJj8V2{ z%xkvL`TYz;$xm0lWct4GVx+OgVGY#i2lcUgc@?*Gb$5Hk8Yb~PI+2lNjIHmhplR98 zn6TUm;r(@W2AN$4YHvIk)TY+*9N6G|cQmEP8Ew7LdnWwO(`J}qfxXpSkdC2Jd*tNU zKSLC*7YfGRCri%an~ow+@0AAl_0-u(+^+&r-CiJTa;BZu0^xvi@Zr?P*3{*w*?h%{ z^K?$OpF=KIdZtC3#a;oPJU%$v$UCsb4PnOk^{&|(meEJLVXdS@V61YPxE&ZHa|BHi z^ffNMn7J9}SS2!)el#KlK6BB~c6u>uhR&*mC_U$-#2P&XeVJ?ieB)xRUp=&#_;HRT zNDg*))rJ+ja)zjBCLR1|2Wu&Uz~#3CMn(DJA&`I4*Ob>-?~N5xhazrn4VaV;b`fJA zkUsSvu+OB>AT;8QCMzA!?jt_**~;78MT}S7cjLXfhyA2CM?cP9JU+2_xM0j+nB!4( z>sqqAO@PEyuHFCih4m2R#G_{*L1yd_SQ zeSbmKYTT0yw~35ZPVyUr=K)>GQ+zT227V5Xe}|#d`EZn?em!=HNY3{1YqH37oO5Vd z%2;;HmfsIM2Ay=xjTIRc`M$g7$n4JVP~^{zuM~E*x|dQgN`^x_+mki6$6Y3Bth~h6t)gEYI+%IyM@Vz-&T0+q5_5n|5 zxFah*%+_nBq>wRUhkP?*87Dcwg4^VaTdr?74Wo5*#xYET^HCDk;P^D?y7n5YeL91q=j|>0dGb`&m9`T|x;2yIHA!S{x<3}IROLJ-G3UZoZ z+&kY*ubD?{wv&FtPnnrZ>EuP|95;X1%N^on0R7;&lI=UDXL`&6i?9ocsCj_hz^7N>-sv{W9#56)P;ZU#hL$B5kfMy^-RX zdnU(28}JRL?nIXJuQN*L4l!ShQbGzw9dax922X5a< zEdXjm{rR;7nItNFXm`0gmqa3NeaF%Qx=Cejk^8;`#IZ7@)3uG6>Bm`Gqc+0@s~-G%Ww6t@v98!}UB;k-r2~_xXW` zz~vM450|Nd|Njf97IiCowz^d$R{3OXEP%nmO&cGqiR{Veuayb^^Mv}nCI9*VXaT+> z60j)LwA2Kya9S)}N0l$Q+_IIoKw)p%-P1v*+_O1AKD`)_nZPHIl7~gTdA&6tLt-8X z8#F#8kLgBKPfd`f0^Rl$?f!51qA$rGRUlXOqy>xe&L5f_H$-zi?6uA?Y@c|E9u5&1 zpKAk`gWlPB+14B1n20mx>pJfVhd1zXQ&j~T0N%iPtYXzx%Y2HO**G!gZ?cR+9L%~> z8LO=SDnJOu2ZG@wJD&f+X(|3mh45tlA5EWImJcYYU-7U~4|!^-Stw=gwE6W1up-`Nb?O zsqSI?i{k+o4|tv!Rp+_+f($6YJzejjl1T0qxAMm(7TFC$R65fry@&|JtTi4JXR@ev zMNZ`9o~UEZ9a(!6W_)Ee9^yio9D70FKc+3+=c`Glq&uAchIS4}05nfSXnfIPEGB9X zu8hcslPU8mJ1`qL8?1`&a)AeF{>cf<7#^;o;96$1y-_e82T}!*;^ghPPo9R9{j0a{ z+Q@Q#Mg+VI$JgY`2KO%Xl*Me}1Ym%UAqWZIcyWQM(9uOXj^<7q?C)9$VKs1%u-JlQpn(b(%&uXQ5r7ceH>ZAsIUm=K&HY2SJu z?DaWrrG^B*8jVR=uM_`p6aYSY-(OJiCyO4?x9d$weX3FLCrPRY&@;wVGi|LC_+^xYpjX>N zYn?}b??cNE#vs>0I_Gmi1%;(N<0?*I&nJ+%-p2PDvF+?XWsC)MSW}*XUsCy$bg%7) zjxztU{EE4(Vzey-rG^v*c+^={0wR>mjo6Elb=H+h+IKGU(*FvxsCVg174rj}b4Vjd z+swN_DG?|wvMS2yvJwXic>6)`P~kTArtG;{NZ|D>nNkQ*VQ>j_rQQ zr1-Y{=4c4!2RU-`3N zHVst?F3ts=6|_?B^g)d;4^ketINUKbGW(oCDllBO=NgSh2QK=w0s*A^-eauQ`T3C3 zxdNydK~GFERU1W(hjRUQqL4!iWj0SMsEx#}pV_O8Yzco*d(V>(H2Msh)WG#+sFyYu z=lOY*5tN~@*2mpusl>meb!bLst2;g~h}rEFf`@O>No}>ASR2h!>z8|?Un8eQuZnnQ znJ4@y>&VFCLBtG!&4X$vn%~RsUjHCtLbZ(VbdOWUx+jTnH~GF-wdR|cc?+6;8PAMo zs#}_YkDfDU6g&=t)9;WhrCiIzz;-wLw%RA zQxt#C4g9k}`j0WwNEt!pv4U$^_0QRqdOH0aBs+rXqE@-U3IDng)u_lU!dMNRA(7)E$0o%7rfRhbmVG;)E|Vg) zjVYs2`>!Wj%SU~N6buK?S2aL&p0Owy?*YbVN8-mp=Jmv$ZmF#HI7R{2et)-A^JVy% zeCt{T0;ow_M%{0Mug#rQ3nCg5al{4s&hLaEcQs|+(CFnuDozN=#jq`_yT*kyBc~GA(8${ z877@;{FR6v^1vHxirYS3WgE#=JLd6rrm#bYzuU`ZJg?b55>2AQV_*perzOar3R=a2 z{L)s5#@jHTT`G6XUfZ;HHG{RmnZlpWGvozMGd(hh@T>c$zpwFhHoear$=vwLNrU0q zk0Xq6X`0(v10CGF^)kpR#sz(4cK$iWz=cW@UCxfres_FywCDSGRrSlI2zxu)d#Sg6 z6q|a{mXESFbYFoU*;SbNUS~!i`!XCglN!RN5%NZ5+OMqjuOANZl(K3l&5W5h=Gq(N6GKeiCGvbTQ0LT4W7b^AEx9- zZQWbkc2AMgK0YMJ|EQp-7`_;roV-6VuE>v#o3j~0soKKD+3 zHIpi%ze0}tVx;?G)JDNujp^Lj(nO_3BPPHN!8NRq#=4zi%sd-4x@G9dMSr?bSQ6eZ zA?2Y7O0G{fY53aA-*qVI=r6^N!0pDXJf^3;?fmy*UDhF4>;3!9{x37U9mHejGrPne zzYFxC5>Y2+ukBVn$fGyy&3O=@@x?3(ow5`#UyCSjS+M&%kYA-Av?c*vIJEj!$a)Bo zubO*@piw)4*;f+0;gTQj%n{g&Gn}Dks3lLJxP|wFfS7#|MWJtUaj;yc&s{3jfgEKh ziMbTAVgODv)6gv8sD@2{m48g6HC1Lo2b!J4G}8+a}Kna>(|IF*Z`-Ey3~<&S&g7PL(g4#KFRN@!@Va z1jfY7=~-hvVA8EO!0$?hhO)j_CU=aBGW}oOn9O&opFaz zzX+4Hqr-9CgKlh*d^wU21 zWH&>p-Y=tV)CKu?2WJ&&;U8i9tTm<$mYCn73I=Q9fANlyvPd%aJb^SNgr8^(?zI{5 zUM$I~zZuQmby-hy$Gs_K>%onQ)C#syN%9$~mw`d7KJ*}}Udk@eL3n@Zscc!Z*RK{b z6cobLrOMB^jbcgYx6sD8ZI)Zkf}8)y?rMCj&&B++s}YiEAO|iYQcwu;BUKwsYY+!E39_YcQ@mSF?7_M@7=w9A zM6W;61ag%B=~%>omL5{oU%*6lD##tn3liU*|BU%pU-JjWGzTf#btEC~2P<`47iO=r zu0QtpN=|)D$(?4D%aEi*v(Edw-F2G68?^g#TtVFw+E9SXxBqA7o@sQrbU`fUS?UT94po&fx+No>gi7 z2pSVUqqu7C+0x#FR$;L)#@S9YLQinlQTxO7OpS#T`22BNKxW#i**FcGLSKb#QDuLW0tku?0=(&-2yG zS@%m>QATrLgOwsLd;X}g9-GX%j&x!W>L*|0!jSL8`wkcyvetOQ4!R5z&8KZ?=jC1z z&-ZzZ7mcEXAvQjR715NLg`1m!VV>Ss2e;S#ZMVWFjW^o6AevD=W_dKhVDA3Zf*@G_ zRNMpC!~)GEvz7YT{>iS}DHTbrpG}OQ3G8=>#%UM=X?o7bx6!jF2}k48T}bU2)Qh=y z;V08VZ`FQqIn5;T_s^&N(xZDne(squ2U9y($zoJijvtlec*L`?%{QK1`!!QpW{%gC z%RA9zO|>JxNi5So-#u;kq1nt3L)4dv12^(d6mKX+0H(3{^ze1lg0MEtaUZ`b(bTRn zK47X1C?{LG{UTpx;FP?+U`kN_nVV)x?Fx->NTE7v^+m#%oHHM~CwS`@7$Yl&wH{a# z12^X(mTH)UPE5G^oi|-+XO@S%PJ5wi(~@BmJ}% zDl^g$kvqfM8Lax2%KGtIO^h~-MWPLm>&(xgmzlrh94RlA!akX5?sPqo_{l$l`bvbw zf>>9mDBbmsVi9WghI$HoCOW(_oqqX6ZBq9=H!IvqzTl9w}UEkZa2N^JONjfc+>BmDK77aBn#m z(_dIvbC^i`RL9DS68EH6>u+h5SbNZ#DRr^Ff4^{Llf-4#q@+WBMfW~3@1*hPY(7ve zy{XG!&8j4lVs~hYBIJF;z~G)|$^Cc8ad|{!+@OA@j~-5hCdB|#v-6*F{zm;=*aYIt zFWKw|1f~)hpXe?i%E`NEo>p9$*AtMiK(E9qvsE}GD^hhh@(TUqyGGP-WC7}V!-@IWdGfs$>2bx>bB4$i*j0M7rw_TSS~$37aWhPSpHZTF>LPV@y|E;p|Mli_epO?halyL#X@h=sfA7 zbX78nlAG_J3Eb~gOo!z#CgRC;6ogb zj~weQF7$Sm%~%7AlUz41411^g39%Yk{omjU4XqM78y|2DsDCt2&4%|7ts5)Z23KVT zQ8JT%qXT*JSV5Wq$3H`&gmj22!B^X-=!(P|mt|7|j`=;k(KCNd$krA8T&hv}{eyl< z`)le^~qozU{FUxWCbloS)@w|z+90RBM$q--iMeKVxT7(cD+Zgi-j5n z2cqum-D#?R#GPLyoVHQ(^cx+!Ovly;2f9c2FmW96bXyR@QBl6kAI)Z~et&pN-H<(n z(s~FdC|jSCu`Xh#jd*UPhGh&E@#ngjSZ6f?dtr@>e^X8x6kgHQ;W1RXn-;1^jhcI| z{i@8O*GMICK`?LOoD%t^;OiZH<9X0pIwu}*TXvU6RZ=i=Fz0STnQ;@rHs%H+j5lIg z`l}W4(Mm!66FqnM47K}QK)oiYK=#`p*QcTFYbmeM8Wf2o8fA#~nX8tGJ5n7AhyRut z-_;m>MjxU3I}6;>Z#tB`IAXWu=LIFR0TmFm^buoetLCPagGI5+8k~O~ zlxehK<>IiuUGMtr>0Z#4x`X#J`nMwlEGLS=`?kZW>=u2IL<0PF;>7;Qf|@G8%!SWp z1P6~M2uIvwX?r9iKzzOi&kzno&^PP}wXbK>yYK>#6}E;FXdym6H_aee!=5B$K{#fi z%c<>GJHc%3@IXV=vw#z3xcWv$+J)I4`Q6kRIncOd-1h9-+OzT+_^ANkAHZfRQn+ov z55Z>of2}BGc~>N@t zkaRlh862)xs94>TFKOGB9wZ7?0?{IE9QDav?j2weDj+DDKA~v3cH?JDd z?*I?UnO7XLKm$yTe5WrGpZ~`>){le+mLsT$+})mI@V9ve$S_cHdaj~AkI#lqm8Qyl zSYf*>Cnx&P=>eS*Hxeq_Z$hZiGjS)^H?h>f@6}(xWg*7)Q!QzANN0(EeP}_L-)s3z z9Pnf=yxQM>bx68AE5l>4o7D8p|2<3pWWs9wp84mSQ>?ptqIZg-_K(zRH_dN`1Xf+g9ak-uhDZAt-oD) zP|<`K?e-O4BEzNDh?Lk3OhBmFDDU}cs6Tx!GOS~OE@I!7@G zhnzId?{lvT{<>F-e9#+r#8L=On=3KBUAi5}f^h0I{c69PBJdov$CJS(6tKPFlf0VK`y)f5d}{9w|HUhzptAS zxMm%%*sV9mTltX3UGAr=ktm08#}3hm+6gA9F-63mK$oUk^0!)!yPo3A7Ca5|dyV_+ zE8I1>vkC~T=Ta;?a3?7Qt4*CHd?`>lf^_;k`!8cX^OXBI#wvwzoZU4ScC_u66FO6 zVod|H$?6{GgBi%he#?v1{c7aN4zD$oUdEvLloke`>#lT#<%*{=A*(CKvHT$A%=;qn z{8U8A9q#RNxjjuLKvk`9e}^l)gJ%vf7VcKD)Gx1G5a_RY7Jw!ZT}%-@CEkp;54Kba zbldkQ;oR=s+=5ny54+J;AbI}Wedt!Qs1fP`#(p0k1>0H>c7ALQ_o>@{2=b26$yc^z zOZsFH5|ldzr%{@65uVfufUD=k__@bIJ(kC-Lh-nfEAsEk-A7Aysevs<06QppR&(XI zX&O#gr3liNz7dpmcd%eW(qg~-)^EoGSdnI1&!a&?6cvEW0kBeJDW%px@5EgHXVSQ=?2-lXwy;P6 z-hIwQt%sgtK?qN8*P>MVkn6~Sd`Ny8chr**QnQ~C1qiu}nw=2L2c*<^rP#cfzZkSC z-ch&`^k}m1P}H}RKSKDzb|KB5u34Oo!Yi4vd9idY<8QZA)+{&nyF{zOYMaEeEMwQN zh$=$wKYrq2J=Up+sQYd5U=LVohdqzA5cs602I(T9hF6Qsu z=j7qmn&}&Z)x5QC!Ef$!r6LoKe9*a=JO1vKmRLhoJ*-zzMGk`t_$s(1bC(6LAWVsM za#mm`3G7mhzeEl{-PXeHuLIcHAk0W|x))~0*kqaYb97sZeO0gA!rb~UKX!g|<(0u} z#Ng2`DG5rI$;Bt72P!wYS*pKJE~|R#xjjFwA@IcwiZTt+`pX>>BHtT`8zLAo6OOEJ zt#F`KD0Qx3TMgY>oZozAsNZ%Hc}&-nTY|1MsRt`Nc1TkmiN~4@%XWj<`iu z{5_Gp*QZ>+@cjs_*pi*I)bZQfj~tY?@TlfaaCn-(>V%h$EB%NAjg3VT7A{5P3E3!K z@YqfKq2I+s_(iuUvar!Ng`ZRUUBcin$lc%-aCGchITTe9b?RQkT-_wrA-y2FpP`{` zy6L_g3*EAqOV#Q9m@~%RKylFjji`dMy$GS2SL?H50+=jnmzMUeBNXko!kr$q#q7v_1^!Kh5xQXbPkEPo@Zp)G?f1 z%EQUTF`kbk9$;yrS#VB~f-18#+2BOjcn*|x<{g3R{iX5){I5_9XgSn3a|aapXSyig zzgOFVW#?!rVTc0jsEXv8y*g+EE{sb$8Jx`Vr9D_Cu92`6X(vKkeV71`t1Ws1r!?ic z{zK)ydcKRLXyqrgH&ixsWGy!2xzH1%E4*9<&0-QUZ|YFuZQe;VwPf}dW(QmY8$P1IGb^Y zTtvDSOL$|>w_ZeC1bBI7SC5`$bQk%CbU*|g54=1h;)D}fo~4pi99^1mpTiCzyrBbU zxl6VyWcfHsJb3mUqF@QG=wk9jzUGXy)H_aT>+em!7#v^%%qsWja-ZwdyD@ixm-jX$ z(<10Nc&#uDd;1Ovw`(#jK_z2Wfs?u+pv=+?V3G53$Ri~rR;>9pnCt@DSST!A$T3Ih zYCjPnul$Plzra=0kF{t2(E|JfTH9kjL@sHj23BW?BJe~Q!Do=f>V*2GRh6{=YAwuo zg*$8zqipS0d5(Qx!~uC8X5@0;h7&NAri(r6d@oef`k>(Ly=QWH)rJc4z6A%}rY!Y# zMnb?D%_$?HC^`~YqXzpqZemfdlo?CO&a)-;N&N>V{{_T2u-O=+`keHv95HlD)%@xd z`}F@3;gMX(2oD8U*4k3IzcN4BfrRhBdT~n4;t78F>kJqO2co}kDX8U@LfA6@$iWeG*F2TS`@JWT5gc!9T0}2s&NKDDjk{YC zPGFi$g72}<%ytk1Yt~fx3>nuMIKs{6hIwSe35eL8Ud*4G$46<)j0vU?`|ld9{uz94 z!?6|sLeY%@X$r4X-ZG`nD@BKEgrDm_7%NOV-qE5G6|BKwh=gheSmp(Mt>DU3Ng2k^ zO8Z`VsBgp6BkjIueYNRPhuzH6qg^vk<-CyzdAMLDVx0(A_I9aMW{65@?YvH59}h$3 ztOOjgze>ymbr+2NiIhqxS9=Y(?k@Z~;#b zx~3i6$6_E7==Nkyl*K}+>4d8HL|=QvFoc#5g~W0AZ{(OBmJRuSjTb(M~aeQwSIPD1k0G!Zz39mLC4GJ9jI*q=^PtS0Tj3 ztF(*dLB!rX{d`2_0GnU`T+ERbh?pIzj=crIjF#Sdnco3A`f$DBJV1Pgak{~VJnBtQ zYPb8FUpj|^xH9=WYZ7DE^OSks=N@v_K0^jmXSZuuH6Irx z(@~{awbwp)@Ko*a)s~DAE}2VRHhzFOi&oC3e)bI2{gF3fZmJZTA!JFW(9!}^&|#L` zUS)`f6p+GjUd@^^`u1?HdNsWPCQ^iCDY!^ls7w42mWJ)p6z{HCmX_d1@&F|gRr;?& ztLXBQaMs#c9$P;{MVK5-{qI0u$=0EiF@f~`9(91>J)56Y1Q@%*0?%;Ak@en}=nk-_&?Gftn$DeOFVU*nm_MY$U^ zShwLw3`pGk)H)455Hy-?SWFeC{q4!=RZGY>Ga077KEP`voN8m5++)gaq&T(?~KnAW~El^3>PS-iPS1l`1H z1h09d_#Bp4pQOlFlgt2Rrzi}1`*4`H;_IjOn%&W@uNp(4 zvA2!({3R=NOa0g^6r- zci2M%w1ei$dEPinH<{VN-LipIzusyd6b{wJSjDohL0T4<=IXtZx8_6YQ>KO=^qD1i z-}({9cNY;8^9}AK>wZf$CG)H8us_}zG)%w9x=mL99dN9m^vU=1sKC!SYDJ=azRmDMy@!P5kz*;GAs>4fk=n9G*j2tDkq&< zi3Bou48$>x=xr$@6?RC)nR(JiLzp_dC9w~Fv=Nl+q12~i`3&#y(tMK|o#$fzO5jdq z#4P^J(%u7z%wW<%n8uD2qn>X1q@yb!Af>^6rPZa7IM+?)3ZO z`X#2E@ZDkqZ=IDdM+jeq{#Gm7iEEf!T2SmTMSdnIq*PkNYVfVT2>rNs_7` z6ziI3HjzyFGQl<6T~*l653K$YMzJ;p^rptm?2`x#{>joW@R0o@;~q%#A4e$!6`>aQ zY~xtb_D_Iv*rN5_>L&x83op-h03qZBa=xb1Xo)SD_hbZnR?~-ygovg&dapP-FRcth zk49qK1~Q1ZL0qeX9NY=r{;i}M{Db2W9)u_HuwcN_gPq31j2q8O&Vaze$e*Ke5|{z; z$`QGjP#`r_ffKxAXRZ^t9kuH)ge>2nZ@@o(`0U+cO`hv?|0@DhgfsuNjimefkCmCx zBraIrje_sSlt0&9O&L2%>e_0_w6Yh%5>}Lu-2I`3n-q0}R``Sv_wSrRHE!0;UT$Zf zOztR>K{C;B(N}>UyyzhWv$rJ);`%)*-qarssjHi6x#=k~Zm#kIo~`ttfyh>)1r@*| zP#OK>*p2>D_FGYSMiN1Jbt+478;M+ASYVfAZb#L;R84Ivi_BLK*%zh{e+JYWX1w>L z*g`6sQYjXX=Tk1AcG3cV>hIxO62?QXo5RS#Y~osqd3<0ctp4*l&l;gl8gQra-wxOJ zbgL)qMgs_qSz+4(ZPn&jW@I|~VKsNumnv(qvFIV&M9p=16>uHt*95Zot4v4FBY0bP zJv?_K8&RAodn_IPi}O}@Uc7N@_kVDnnZz#yfDY)-abFi?A-kQv#S#frYqI4A!a?TJ z8xexRMRq$w3AgR-(ACq|FVL3QKbhFw?EI(wcidKhDg-Z@11PWR#Fr9?dkVt6c^NP` z;(RfAQ$lq>e0jn1Z}wnM@eil#VH^BnSNbm+pf5g9_5YpS{fis^o1pEIsm;p4b=vqq zm-iX`{tX*~&CUWCJ@uNxY_(o6p#GT8T zFckfJ_b=^ASJTW?UOb|Bv+{N=>=j;fa`f2*igMYVum~U^U8pc3osA6J*ely$r%n8f zN`4)-4vBZj~ zjcrJVlRoQnJr#cto`eION1L9*iwkd#Un&=BZnW}05NMAjJJ5=k-bMj-t7ppOu*zlo zaCoGq@nXWHYc5s?(OPS=khPv4F>D^$BTn3+WQBkvV8S1Jdu{!U!tMT1VQRs?{J8XN zq3!`u!qBhA)Gt0q>1~K=TdlBp*{*cSfxT2Nr*zgmRnW3*t@tnA|LDRPOXY}LcSne4 zrm8(&`@nei9~^Yf`Fk_l0}Qr%kWPA^_;^_@umfc)?Dzo(TZ(KLNj|oI>WbTB2dwq#@iF#Y0qGcu_-_X1fh;>xARpYAu{l}8-=%@!A)26xo3Up+lsHi zRHxv17V1g6i1eJ9bUnO*ibQYS}iW9LkYA06yK*Q>lG0@ zH;BlDwg9>M&&J{H1m(khhC{eEuHNMo+*5s!8#HeCp}qW5Z@m|V51{}re~%5Q1$bP2 zOcdq7tbeQaw^E6x-y#~t)ECKiGpQ3Iw%=@Juq>(Gh01+svSkyN6{j@4t^zKe{n1B> z&md`y3fR1A3FUZn{d^pJ!`aimniaQ`nMz<_kqWV0aoXe z$+-iym299m)o+hQLJu3H+rq|tgPSV8L!>F!ad=|;7eS;~`(VV)=-wW6+51C~9kTEQpor{){F~mzVTBE=q3fU>Aq8$uzk;`GeS!DCCO_fe zHI_VTwT+yDpvhTJX2F#)C?cXF*m#X2rCCjqXDgI(!wo$`>px)4-8KXBB}>-}Hzax$ z)xR~w-ICn`fu^@bza4Wvxd2KuW7dek7FH0B4QHl5vFew59W4hiRY{&a~s3pMXu=O2S$nl3|K_k zZ-A1~jC6Pl&?Nk(q#q`688{|9@-eWoCgOF)U}fxM)JK5uBJ*+x z3N2HWXmsuMNtD=oNk+S|HPQ2CCAAP8CNinlqXQbb(TaBK{mYE#WbXf>w82q+P6KA~ z9n2TUWx;0aAH`wbD^WP+e(EcR8HzjlhQ&w7kp;vDOCys4@J3raPMrSHlTlw?0QdJ@ zXJyZ$@pda(iO2fj5zg)z5AHu&MUMgf*R@O6{3`5izWPTn8Ctev-#_bEaG9SkFD_2L zQEIMq+>0>xru>)+_g1Q>`?EwHigQCHTaYR3;%0H`ig@uuaa%(I3H-@PuY(6?rnTeeXq+sc?m4K#u=s zq9$r*k`53xi{n*K*D7|QMwxgA*tWvzfy2dQk2KyCoQ}f z4DMJ15P|6n$0Ix0dGFCk-QEb7+jVc2!<309(&m0_ zRKniu82Q^t*T}(Bc7t&jDU0*4jAQQOE3t4~?B^BvLk62 z-Ek>oDT{3&9OdDR!p=p^>gV*n-QO1x2q zbT(EkYjkiMjSE_MLcc6yb??Lka{K)(k&7m+QMrJMS#^@7RK+wEL_PX zYI6^3y=(skk(TZz=X&{fdzxt}L0mgR?Qaq=%H*Unqn^p9S6jWcppau*;TFQ&(pfHb z3A5sWKZK@$-lL_Ag6UhP#;1B@IQN=0YYV=Yu-19)y$x`yspZg{=dQlnzhaV2-x6-H zi+5#QE^Xc~zLMQ`k>6#m=3L0!GwUx|EeyZGr_Oy>m7bP#*)*yO5?t{I6n=%u#~z&f z4o5Eftl>W=zc8e$8ZYrmZKl#|UBbMml>uTUZL>`tIx5qT*ZzN(`E}dmV#=Wx&U!OW z0T-xSB_Vtzs}6L{I@RzlVOKUs4_>z;=*u8)*{b2V&@(*{L{h8&ll2DXgNK5E&6{f0 z76?%4aHEXp^!55v3v_E4Jl-B5gAY&h&egVRMozu3iEl`vhwsO69GKJ>5F5n9M^%KvWNhSMLwP+iGsF9w%X+ts^VPkuG<}^J_%R{rz+x5dN=zI5W z#z{PHYWm0S;kGZ|B70|eTH}@OxKgdQ?KG2h2f|}NE#as6#wYhcQV=W)rT7mbHktr= zCNbmc8|P!r5m3O;U%P7d1m&-dYdF8=xN|VMD3~+GxxkY+hnmwDrEJ?8o5T}{>@iei zUS%(Etfh3BD)kKkn+wQ`b4Le?`+Oy#v+EGWiwb&@)gzYSmFw+A!n%7nn1viic|pQf z98OzD+MSfyZdQ>0t(xPEdSmtLVLJCdxGY_3+t86e?Hnf1%*P5nrAEkb8ze z+NNtdqO_5#x0PPssOj_38Ks`6lxM@Im_r2jNVkDF`NBbsK|&6_Wb|5c0xE9y_)Sqj z+}o;4e9G;36OR8#*icEY?UxlSE)QIg;rc`%W-hQXPcX@>r>tuZDTe&@n>9_fCWWVq zm|sYG;;9!!gr?o=pc?tjWcs-A0+*UC8wfRh2{6Qiy*64LC(a#0m6h&3Ya`trSDYGSk~I{f7;0$41PuG3~F1t*#a z)A(K2fE|^_t6B7hjtJ~3YwXUiSZ4M_Jxl2-k3ayGOI#%&LUX$7SLMhuUfZ1>S!Nzc zNx#+{5||DDZOK2{GJ0xj-{>hEiU{~G090>rmQ1cdfBS!N_7+f4_FvyGA)rX8NSA~l z-CYKqD&0MFcPovQbc0BD4_(qQbl1>5fOHITF1)?}=Xu}rKF>PmuojD$tFK?|{n_9B z+olT~I$086&aO9c^g3&)T5=UM-&xzgV-`W}arX}>C23S+E}TAo{yMhPm6^O&^lD2* z-fQWOIr&=kQ*v2Z?t)UJ#>XfM63GFH{i$J#AH-a-sXu#3SMzZu4)VF_`>H&A-KsF^ zr@=U;^66EKOoP-XvJWV!t(XVv{HL>gx!=2NkuNoGeH00Xnoa$F%9WD}h!^od6S<)d z1vLCThKmuka=|H)Pa3Di>iURnVpx*~^PK|S;13$JIL4(Y18z<4i;kITPZ2ZCT=6pF zP}SqpkpYO|_4~Kp>6e{^DCW-gyr>ld+Gs0eriAV&TPbOhV6H^;VY_{wcLP zMooH`bBCA;Hm34kg?jj~M>OiJkqm%_a0cf8N6pvSj{rvNt|+I2LO}LQP*yjYDLcd%i7qn;-NSE9>t(`v zq6r=_Uw`Amlh6O5vM1XkKRt* zbqa>}Wzbp~WkU|Rc;ZA(8K2rTrXbU{cEPvC1&|&(#kfUFkIBLJLurcK4knAKVnPA$ zo6AnR2Pu`EwwPv--3aH3pyGo^mzWfByF>esEfWx>AJ42c1dMaZimqAD49vCa?O)#= z9(rFiehK>ndO*QC$$~#fqQoCmR|a4WAOiegvF3s&C3Js3`184&D zM^68!uo7ti%_&mNxHkGfkYJNpy;Als&jGfBNd0ew=O424mxwq-b9(*@a{vg%C-vuC zfAJ%ehHT4xYDrK6l9E zTCOnz2|cGGt`3)vd~V7u8bWFQ9t+qW^j&@WH<#m`oVa-Rs!xK1I3Xe7+TaRK%n-wz zM);sc;-$lxeA75Uf#}Ici{8Lvj|ChU4qqOSTa~1{K6uLj5K`4;{|`?Iregyx@8D8N z$1{6+W57_W<>&2ve_YYP@Ggwv8OHO+=OZJn0sD6aOov(z3c6)xGLcUYzEIH5;wL{Z z3k#1hWzV-&8#DfWJQUU{Ri!Hy8;jqB=VXX?oOYtm?bq0rV(GbmMVXNgJP7sv zKH}WAVvEb`lP-%oFX$~}k?8CF93?H4b3#i+c3g1BF<##w7yO zTE5WD7(B}^GZi|={cBRbMBYYhkvqhU= zGE${u^RfWVM&R7DIxw1lh8V+4|GNbJ$AR&!lHG^olgC{jUgOq~Eg?JvIoBv+-P5JM z%@Pg>%sj+SqFj2z6KIegk}gK-Xs3Fs(z|`}yfSD$PC{svR!7JEA9JQSKy^#@-aiR^ zOk(%El%X>!6Qf`i{z&s9f3q2;=gyhlFX-rp51HYCRGF?bMk5Z=13lWQIo4wrdk(mj zaq*Y+WXcV1I3ObNz2m-RbU*j;F`jRi8}@5C{CzKWlR83G8TW8}mGRO`%lw~P{Z{)xppxBqGZ&TV(b-r%!D*aJU=DE33wY9s~j@3!tG*E>DzUHW^AQfFRJg`5J~woVCcx*(1q{s#!P=R!?0$&$LwtM8ks)t?pc2zAepYEny$ zw&wn2$`3O3B+o_J;$Bk9XT@*h0D1eLZq)g5{vV)6&J%vfGkjEL;+^V|3kaGp6x(Ks z6-T4UyHvSmJ>6&1kI3k`msg4KUWLj@t@V@WcLIZiK_V=g5t6D55REWU~jHxfJj5-%`_mBzL4#Hx7Hk}I1T z0q|jB6`?&-7yNdqeIGBQq99rOB4V6)q_~nmmUh!3{F+LG)Jf{lSwdLmSLOy|V@xbrc^t+D^ z{SF5Guf;Rv1rH5(y+i8BO%Pf+-~k(4l0_MG0^LTcC;LF&Z!WN;;L9W8C$#$|lSjuo zk<%94&1nA56h&KC6yPl}#D^)PMJDHw^V`anZbb85x`@kdeKs5M1%QWe^D^Rx-S+^K zRF5>X<)!e#Q=_CgTKrF*a@VKt10~5QN`ruyJ(s(2m`p-uf1;_`FPS9r*c|8O1`Jr~3y?+dQ(1;K#wC!#&mjV$EU-oUR)bOKUo9hao{4+8{M)*nJ- z!jTGQ&;e6V!{9u5S+U$Czzcs~Wab}eiD|eRSl+$G5H<)`PTWg4ecjwLvnrG5|r^Ph|LH*Ly{&m)tY9^N#X?6c21S}l>l$w({a44(M z%TR#aU$>k%kWhzRc>Fw^g&?KB?VYpvg5*3*UNu|d&xH`u0R;%t1U-$7p2 z%DW)i+c~>_2j0f#haP9>mvVNUuYh z=GhMnJuTlj#8*-$RPbJL^=k0!Y{gd`NO@^jHHoj%IL7BC>-#K@o$jXu@gnGHNL~W( z!)PJC4-ofw1mS0O{N6UOa*V&AM0m({n5#?VKF97_K_qosRNrp*dbNy+`xklGZP z5W_fAXu!?#ofXz|+dX5u%Qt7fA4--@%Y}|+pMu{i`@4IwRSByKxz?OS2FT(GrD(BK z^|5%5b_m$C26P?c7kj}%5j;;vB>=J2FQA}_+j`XS$wz$y(ZT6TJf+ATugG-e zHJD<3A2?gHCYWMC1+0CVMMARq@#GmVMpjoPnNi~T^m0;7Xw9?UHJ46_BrdF$j?#B( z6{9bfa*s3d4ET71$GFLrms&zXld44Q(qkf-Kw?mX^cAFiRpNBIT%8Buog@Dr;axS; zzrwo@yau0#3!ol+|AY|Nb0H%fDT>gjJ!L|1AjI`i`fOk%`${UTxmEo=RrnTccgQ`g z7_I;ax_YPNz4VEi69iB8;ylA#vFj>43#b|^o4B9J@7PU8et$_>I4Sq}YD_#LC(xUS zTp_nfgTOQwdI8&l7KJlPx<#6uO{W~)L>sxY5bMNLUs!kBn+W&FQa$P>=6cl`Tltq! znvrcavus8HjB*d+fhW{EEdsyrG-=Lw_?>6UjOtrhxDEooI1SG?k)yPDSM1j;Sn-z; z&^a?T+v2h3k&bk8-}&b94s9(bR4gmNk5_bEvQ~cza}O#=q2P3(i#0a3eWZDq)J1;! z_|W3X6YGLcOh-3IBjR2=ev<@ZQP%yH-aQvTol#e#K7s*utLBDr#-4I{zkyF8WbaST zNsA|Sx$H+Vr*lUqB67-c?XDYQBXV={;YN0URj=prWN1+q)D>Ko6Dr53&3OCfeXta3 zSm~INsTL8*X4kfuZ;Ar(d1LRI1^y4$j$&s`SK)EVV#5y*0q#eTtWOFKo69J^J;GD9 z7ThH$#~4{Xr&cCE@qcVWU1v6ceMY{oBwvjjJx`ylBlxo01AWIQ3^}HobK$f6PPJpFl5i zTN5JwpZo?LKbqFGwAX!EiGP@9R&`D+i^<($;50vY@PUQi2xXII#ey|@r{Gu`+aZOg z&$d~GO-Q()1I~Na*XBZrecs+u>@5kQAz+oNI6S28s7-)!o)YyRCoBDMhroa(4Hgl& zbF6Syuj%Y-e`wSkE@mI1w&J85H48qp^$_FS!Ex%+*)cKf-$1a#?ZHeF*ZDbfmo1oeK+cl&E&%_PyL`^7FLQeyC}&i0a@8Q{1ysYUwFB$ath^T<`+NIUe~41I|t z<{@^30KBsm@Av=4J9(PyMt_MREO>hud>Q_qw0VGUGc6bYv3u0(kG9>-$W_NiNx*?M zY-3qHeVTa2U?u#?f7E#yc>z4$0@L=GSYx)9vC9?XA;dm0{sKdcAjYdtzixjxi(N?>^yGspNwPtpu2J)+WSrMms$6KroN2?xG67wa`uB}Nrg}H~2*dx@M-OSOHz~op&0KIqbKVY`y&Y7?QPj~v(d^EP0 zq>pg!d_TwzykU2GkcmSA^he4J(I4*sv;4}LBrYJ|8(S>KYzB`5N_CMi?$ChpCBf$+ z62bG<;r^}7b>%osWA@9ewq~T1z!PZ%C@WCqzTO0oghuzrMnsY)?T3t=)jF;PCc!-IY32D`WwS1 zM=4sokj_Ptuh)TuQp6cH(eL_mE=|Zw)xVo4x&V>{$i@WE?yFob+N~B`(zlkTy3hHe zX`CrVj$U;m^>8A*UOVfmTuUP%)De~1vtp;@iz%Gcs1>i{`fQ55Q~2e*mXkb9N`v$} ze_A9CP-k=2N+{>tysV^~q$pJgWF*_9+fYnL*;pJw9Q_qqJG=MCV&2mj#?S_l21L%c zU$9@$32&bieh%3V(!Fmunz%f_fbXqY%pBIGt8J46flI-dKYY@0cJ^jfP(kYJk9hUc z*4z!6>t9V}8g7-awG18io?py>%SbLNE;4tpXe_^0f6&ewY{4VNmMtw@5ppa~&wBeS zeZvT#6ikd@AbUrHoz^V?^7dUZmvuyysj&jCYN1tr;-Zc=8^;NS%c_jm#d`Z%ke4g} zv;!v|#JE@=#T@jr^V*@PSJ~jp+f>sw#;%#2W8HUp#{Sv|3vpH)0(FbW6N7rkALCb=2Xb(&fNpaU}gW{DO$UPM5?dL06=AB(I zm;1CV^LcWaoU#{WdZ$g#sKz6%(%3i@VG=^33(U@Q^Me>H2#HJBn{pFj{iK%m*oj+W zXpJHD9E3YlM@7bc*M1&{O#EIQ$k;E_4jl2kQ*`I!I|+cLYEKL?;m8bp&-v3xv0+hb zpf7!%H$=n2t^=_yzMgd(c#mJZHWa6dj-!O_CpjsS7+@~-$$R+WSd>IAzz0H#y%NwZ zW|0@vMhdN9tZ*<{T@-T9(9q8F?*pu`j`8Shq4ln>NhlLX5T&)paT>WqS;Fsog=thx zX#8Vd;ax0)DOGpc5%<=undj0KCabTbVW*L5>XMmKhI835Qu&S=h*{4mRm7vSz*xQ> z#ddA1exY_WbTxMieOj-Xoho$(4`!b2gV~@Ura0=aG3X529PX|(!{Jw25~wBKIjA8o zNysG}$VZv|dRLy4$kVdHk5ZplKw^I#eT&ZfS;xR2ZS7*7qcgB+;b_u+$*8P}LCMtO z>#cB-8tbtW?qqn1y*|ajEoTJf{T+Bi&HrIG(Z6E{;=`2p%l{e^!`OpKz{2JZ1B&Sr z#Z#g-vKKMwa-b-l1N+BGD))t;sK>kVVE@d|mp{xD3neR9=ZWWTgeT`%JE-tZ!s zY=g9=j((}}h)QWf^Rcz+?}b=MXr*+zbHm`5gijfb@j5kKdM2Tvl2Kumg*`X^+?u!c zgak<`JV2>NB3QUG@+3(Oxx%-wHFiHQo`zKzbIl?l-K_?*r*2|dq@H(c$=b&oKD#k9 z27IoDRVCcm@5$q$rM~tKKj3M3_G_G*t!RTVt!A=>yuk~>?tWtH&9)Gm-%}njey7A! z)*GXkOIv_8n9SIPQfAPJnA@_>5jIm1Z~!bl*~`VUI4((?Ce}axXL&dyetZ@k$lY!8 zp%?bYf-GPIO1AeV&%!iNn*V&h`AwP>sO^L&d6#w!0klEyj{j78T0f@y4J?6K`IcnR zOEd+)0sl{7je&$e8~<})_0P-ysSXA3DU;Q~C<hjIi$;krMzx969Vey^I=j4m09$NPv8L6YKdn(?(#MU|; zm^@`k;6+CTDze(hQ3H-3ubi#Hw)`bAMFk)^5q*5!S^Ja6KUYXe$u1y6xwf}0z;gl( z2pwm>Sm}~!I$4pJ?YY-SwQAIsqU`vnY?zTuvyjxeZ$H#fMyULiS9xkFYxq(A_P23O zjtzRT>?b_c@AI*3u8dv&JYL`lt|wPhv$sIq?~z*FSniivHS89%TgPo$hf@x{6K{E_ z;^beX>5rY86}+R2b0RpY;Q04)?3Ecq+Eg=Hls&)Q|DLS`mUYUj+nS2gO046Achm2& z66mqUrEp%$e)+~EZ*T;OeePVZ?ON>P_BsqxU)+PJ#4X&v#9927*%>qC{ty-<8TvHR zr!$ogHUQo*>wUBcn>VFSmBz82k(3aW?g7m={Ci$@Dw&@X6^X5KcQo(|k=8q`jHaFI z0m}j;nFVe!3_27gAlE%c%$y=`-7X8WvwZV4u-q?*fTfq zS{-jcE+>Lpk$oZ*{W=ewP0zb2m*7p>J7HZ3Rg zAMzT_B0b}&?vEkwT@=@oYp9WRZJJIfYRj6~;Fs|!!rdXdtzIZhD&xB{FLnv_Sy!`5 z_(WiF2WCd&1>8R?ct?Rbqm*Ihk|CC)zw2vuSqz8juBa8- z^c(*Y@knhII7#5J;l$1D<6em9**82zhnqnKt-$+X8-oHo1taaUx%fd4<48nK#%=cc^`9<_Mz!nKO{+Wd!Xh7RL zk}4YZ6p*K_SWb^7=LTA~gx52|2~Cyt%JA?CZx7$R9-3kP38!RSZ4;R?55cZ)v^~7Y zscynt1>qdSv}zT%j^|5MJ!MUeCg}eeAok-ER^{3wL4&SgDuY z5F*m^m08bdZjVY)M&2ElU0)*ThfV@YqQ(V%w=sVKeH)tRqDEv)eQncwhv{1!wPi@l z%};R6W>7LEvi-cDe}?HlU!?piUE@V*m`$Hunyf<9qQ zzR~Y_dHzq_oLn zNG}^BVrgt-Y@Hup@226OcCg?U=5-6(n@zXcBu`ujQcBE?e_XNLtwoc3i)ualR2USf zUYBP#7SQp@r<6rbXlE53rR{v&)pO$=Sw7J0Cn?yL3uE6|xHGvn6k*0(F-s5X+Q<_j zU2P?u8LmjUXA{_WX@!$Uef9P}UAh;!d!3$9jjMw`FQ<+mm1_!Ni9 zLeDZxBBBIzKQ%dSlV!=J>(unl5`&DHcx#7NH%kc9mMY3?Loh5!AEegK?J`c9T0J)? zr`rw=(i>~@t#pm^*bzdKv(g{q(h+iUBbgu~qwM3H*zOJPJqA@@hdAywE^AzbR&(0R4YncI$!P(d0)_KSffVp2Q|Zlc^_eV~rO2#1mB&f&47#LZcgFiV z#@8J_aK$?leoT^;uV^CJZ8kn05m54bTm+l@bD)wLJBK6I{UC++3pV6f~RNyTMVf@jBao?BlR*!RFH ziAkF91Dap+Vlq^DoZLfI3!;XEB)jg&cbqX#&e`Tl?E8Z>i_PE%tjKlrE-e#}AQj85;fT2ZrXuZ&y2E}l<`NAVY~olQtOYSHF4 zJXOUtIy1VK3onZV*&5R$KY6rT%@4DjL25X5b=!*HKCkDeZ>m;CHDyqq(t$Z>#iwC2 zCvGLzmNtZnT*GQFH@o$%wVm*FJ$fN;D$$w|psgR(qF8GdE@Th5*3X6XWr};fF7{!O zFJ#nW_U0>J2PwUI}xr#h6w{xQf8($j=Fw686#(}!# z<#-lj)0}%qfi)DuH)pnL7ri~4ubFB+k9VD|pQybc)XmWKIC<{^3_CxZG)3CI6e1iz zd*)N5K9M-s&|my8T4EERHX0w6S|nHqAy#hb;OmH)18I-+izOyQQWLk`oF`aKj>+ec z_gK8h#l*yh&G*^{2*p1PF?gNRt>iXT_%_Um$UXgV9LifOSphqPQnQ#L>cIedQM#Yy zj5?qsSQQn~Z~YUksAPvYvvNqQfJTxNkMo}Fe8mm)$mUgy6>267cW}VeBcjWf_~;R$ zU^RI~_qAlepm+D`=A@cMScT~iqQmw&H(Z|aC7@6J;;f-I6M4ZiO@6+zLwk{){MAt* z*xx2N|Mro`Owg9C_PJWF2a(xcK~61RFj{a8C_+2)u>iIUPB_XS!}Nj5Tx4fRd`eHH z!_2@z@(skYYM)b?-tJ{RNC~AOq;7C{v59=VLs$@uL2&XbAo5!mw|tCN1y_;qV9H@r z|HHTBF<)U?B*;%!#I;Z6^bG$vLATy{7j+;_sS{1o0Pj)a636DnhLPDXOo{~wz!%_Y z4dOSM^~qvLf69oM{Yw-y+Z~s(@68Kw2+=1eyNKw6BYbIx{<_dKH;dz$6S_IXEG#Ti z*Gz~1s9y;rHoY76`4vQ--m|8jk#@JdB{E!6yV!k3uK8jmy~sey2&e|CmCj|U31?Oy zCEwc7+3sb~B>_glrOlQvNl|g8$yYs@eBINej=z}|jL~vhqk3B!s|by7RNlg#-%B_e zRrZr54|v4{Y-RrhW#A{7N0qAbCUf9+ z=-UGDmI^4WtodwoVNm@AZB>$4%sE90ioSqONX+sc&0xyq6SvK)DmIpJ`ZU@1 z_Yn)c$?b0{6sK_3E44?{saj#AwmNE;Hmcz78hEyC<{<4^TE=1Okv zlF{<4#zz-=J`v5X59LrddVM&JM|PW5Ei^j|m|1s5BnxvNc4Shtpq^cKPE!9o=mitn z=(pNX9xGisg*sV8T9DvZflUmGu&+J-bm-92QaY#6WZUB}A>mCHaMtrARnm z+sy`bTdBT}<~Ll0h?vU*rqr1`A_jL|WOn4)gwcxg>Y8xk!FRB)PMS7ZYkkvzExUkg zsfhxMz0}^jJhC4L??fHs+(cNte4s@F}upaZy)_)w?QyFm{H< z^2ZAl1P2kwx*f&ii~KRX8Hmk@W!IeeLuzK`(qsy#pV+q#jCMSKb1fLsd%W33d^|pZ zM|ncHY|9@~eaXt6qf)?{>Fo#pnKM}#The^R;mUJ^vXUW`o~FXM?bNf|=;qjQVX|P| zmy$a9Xz<-up4O0z%`2a@#n`sw0V>(30$4zE=0a9y?@)6GQ4pp z!_`>DgYd>wp_C8Z6F9k|N_SuULPwA$eWi7xH zozLU9ityOnw{TsZl%v+#&?WXhwEIcWd?odIcRA8Yzd^?SQ1gds9?Q-(@G$mjb&0My zA;*V)ZsGXhFN)tWU#Y8VE}o&zDi2t&&Ggc^v^8l6?18ruvg$EAn*FpjS5KYOAlhs+ zF>b~~7pEy+T)P}_W!suMs6u0rnu^MfVGQ7GDUQ%b+FtRwk03tcGq=IF5R9pp3<6A1 zl|zHPc}66*-L0}zB~iTk|C`15oF}U16D?V|&3O(R7B{Vc0M`T6rkRItzzk^Of(o*US z4`F;k<%rszuRQVxq!^XvPXINTjlQnOZR*D_<6=d#{Bb)I6uzByWruH{`rc}dHv!NAOp55b}9GC?qg3EGwV5E0|%;5k^A0MPLW-g3X8g#RDd^l zg$*@y4QShJir)^v`0)NYRuoDFKx?w@!bQU<9^75ERBlvt4E?CFcRA|$Pc-0MqKyL| z;Ljb%v%c=~?W{$!!Umq6Dx<8Patq*4Y? z4Vf6;jrROOl=p?n&2Je0ZcRz4n3T{qopIBL)$J{$!hXp$vNX(azlKXhL~JsW?5Bau z&y7!4#?bq+QbhiOb8LdcfFTM}VgoV{FK19xnRx~_{=Si*ygA1sbMs3m+;t&b@T@L0 z?e>`&omT6}s~UM4&8aG&!W z*2943RQ=Rh9=DE;5)b2DFC(eou1w`P9(8CnhMrnZj0Xd9^;tF>ve1=roX4y_u?_uu zfBByk!YwEGcIQ)mqqwy0w%+U`2JJB*a^y*sFOSbPZodI;>1Rd)HPIul8p87xKCOhs zt*q`Z^^o6W{Pc}g<1W)innos|D`r2*ysOl(Yi(GzKv|mH6dMa)8I5H1z?W-l;0#S0 zG48sKni%kzmyV!FiC{!ZRomjffXnDAsg|wf8%itcHJmUQ146$YXB-~jG(n&@j^U?k zt!u*lebv;uW1GAe^;=l!D*%`&M_y5|OC~>JyYaVNliAorB&Z&AgcVrEk6}?BA*7&J zR)c=+dr;2zII<^AoAftVPf7$s^uj7u;mI-bG$mmS93JM1tkN(gB>JhfgQZVKKivG9 zlh}IAcUf0yrJ`x<`ngHrY#HXz>SmTormErdwKd=Z67N3#^Xws^_`@)up3n|W)BP>Z zP0%gF<@nb0(S1`>jPC7u27ZGD<#e?h=>nAO=DJD*j)xqDc(>0v?zH$u2d03)Gt9k3Z>KSWc%hww^?uK^ntL2%UwDkHEKhdWW?a2M3se))^+yfTc8G$4pEcbNEvR zIT%L*7_u$D>m7>qu?(Wa30{214)vG;gOzN+lQUuhuI1v>nR63=NuRrht_!Z#eKlK! zJ{UvQ#6+(}UhiimCGq2aDTf}LQJ*aI_2i}2R^(1JZ?Rd;*Zshf+NA-sRA zcB`w*mXYf$eExr-S#*athRiLsWB$aTdHI5ACy?Ma!)57^l2jbOi)4UV@{wBr)aUbD z6Ix3fy7&H!U6O^sZ)-lYIP~NkKS%W>MV0y`c?+HFA2Wm6ZqZ=bE1m3!EbwOmzpt>M zUipR{E9b;{{uwzjTf-=~7-cIq5Nn+mSL2oeMtSlNK6&*`UQ4nzUW0hnWMjXi&s~W5 z!@4z>E%KwzG>rqo%U)02a=vvbp#++W7&9bQlb#f8eo{0!^T8ue=6ZLof?8P?%Dt3-Xn&?gQP7S zLHh-mrj7@=CGjEt_%aBcn1I;O3v_+-axa6t^;ljiy{UMH%1M-)WjS&afI4E%1_=Vf znBMo?`hoUV95AU+v#rk?m-D(by z;ka#om|zPY7?I#=8>2x6W0U^StwxrRE~^}C_m)?N#qfzi|9 zz@7EFr;5!dSd4mMxt`DP>Mk|RaRUn+>mZRO8*~8yp}M7;6WI_kaLwZ29h?nW{_bVa zdECiwlG9mjurJPzUELDw1PJ6(V6y3vw#@{k+Iof^zARM(OcXDN5yYeU65>>!*Kl z92MF1scNn%n_L}-6g;YB^=qjKaR=MFXcJyy0 zNFFSX^7!_^@J&OPM_B_+Zt4wrY>nO^B4CpgQ&Z_8Z6ZY{S8A_LE7v4{@@}>X0zl z*v7cBt-IUR2#_%w7hM&uAyu$_u}k-6k9iV5(9b^+H-J@TVz7}*>)PK>uX)iX_xrdG z*Or*}yoh+mw4n`=;VBGKak0`qS!|e^mmzy}`p_Q~exJ^$lkMtaQ3_g)U=Dpx;CFg<7jo2jb`@@CrKyeT0$ z-!OKJydHGR^2Tp;huoU)N^MDRveVw2V{HC}>;9NINnHd*ZB4d#f_<)fsH>LTjPs}L z=((9JP|M0bs~-g9_|^Ax!Buu=UXm>}ndIk1<)%kjdH{g6X)VgCv6D^Paigd5RN2*x z!MW!&j1Ia~>3OpX8<=e^Vj$a?Qg%VK{a=wHzMj+_DB1q&#H;7K`Dv7)^mu|H`o_|H z$?!^beUB#_IwrTs&nk0OQMXB)mxXu%>q*ZBf^LfmH_hTmso6g`Ws^>})%@~}nPJ@W zX-rTo-No3vu^Qo$%G-jje(1NZLGzz57g_(uX)){_g-G=+fSm<+RivU|HLo4}u~9sl zH1u)2f~xM(h^j^)o-_nYC1jcC(<03zY?m7ik5l8(&F%jlefADdo<(Mt8)X#{@VXe=Y? znA|`yp#+;%0qpg7^#||4%Y~!j(3-MIyPDHp+Hg&9K#nuMJ+qW&Sz!=NgG8RSh@LI9 zU?4Z|BEkI3MKIu~D31Dkaj>&xzV1*J2jl^?NP6-l)#>hTH|PDHtA;gqEE10SZ{h8# znh6cJQx-3;Ri@HOi^Wk+V2YRcG#wuT6LSS_`@963C!2VZT3;9cN0u-T` z`l1@Nw{>(ikbEC1(zwN`Q~BT-d8R5JsS5eibi#_t!J9gu<-tnpA>b`FMgh^c4F1J9 z^T0^P{n+>dagTZif-n>(9%%pWXFT=31Uu~g%;Pi_uBoQ8W(X5C=J!hP#T43nqN>_7_ z@=a(s@m+G7qsi78w?MIBEL{J&(013mWLv-eb>S&YCSh^H%yIDWX55LvxTSPc3+$HH z9HZQfdn2{O!?NsyCQbXxp4{!DnoeH|3*C}Tx^F^DuOkY2J@e5W6`!Zx)Z3tTSRGD3 zOgzJbGrpB|p0@3RwGB>_+Bc&ntzA4kSPl=(cJ71zm!=>1ZE$>$3v+ym? z5O)`3J7sqMuo7<_m)@7Cy;_Xg`4@aDdXk4f+ld^j;Ny_}_(d&O1iV(Mq~)^c;lTu^|{6LJXXv=TD}lI3W1$!T3@YC?k~S6=aeR8`B3q>J*?1@E{#$B!6cKR zm1cQrxHzR?yA-pQI@^%!)U&kK=~r8h1@G%tC!6(-K%UC7!bUI_Bciu`7KW``j@z(6Ao58%oR<4jESt3X% zi2`6&W-szyjUDzs%_ew~KPXIJBTFRjW{N%A-=&GkkF_;5kh6_kdX`JE()ZvP8KXw|nJJmgSX~oyBHm0nOBG zg?Ol3igdClbg!4+`2bNId6GjQp-?Td(FaW~?nC|Csh2&dxxy_6FpF%Y+5ta!#qNT$ ztsMIN<-Y)WfyAHH4;DMRh`L~6M@3V*m{>Y~b(?iM3FgUJRW z8Gyf>t6#Na%jAm`Ob&Y^LImO(E> z4r2OI&XdRvD=QGHD%#<(5}+Uvr%Zwrdbwy4s+zcU-ajWzDy}T(#;X#%X$uER$;8hP zEU1FO9^{APE;k?K1y7G=BVTX!8pW&MYfM4FD0CoKY*M3c+|%{SSeQDK+czda%QS5K ze1pTMu1A9-3CVjt@;V7Dq$(ib0NV81XKp`RbORj0)8uz$Lua$c{>@XE>SKL`QwH&B zKJ{4kW$FN)B6Nj)_# zT?=d(i0euWiNC`ih#uO?FF~Ha@M)kG%^u?m9X_Kw&+>WIs=fFA<@Z6bJoVGaShq~t z>aczwDc@lerI~$!OhIRap<~Cf66_h010JLTlq*K+9 zD?^Y1<3GHRsF72MSI-!8NNoP3k6-;FQY}e1Q))wG$LK{RZ3mU;_25nb?I6Y>oyM-3 z`E7xc72B~qY+NYw1?>@VEGf$2)K2X^)*gES+@DOVorzX~;vu)gs=IF78Lhv@Bu}iz z0&+f&r!FKs2+2|qn+tMrH-7wNZzYfMc2rA^BLiHtFIp}i;Ra;<<=G841w)}i< zR{by|(=gV4WmNE}dba5BHMAy>I9S;|*CI)yL(6-*ygsct?(2dx(XZ?RkV^uGDvfKR znG7l2#f$#)j;>rFdTHX6*A2WJ+ z4e3$wHHjBZJ`uVF>4wX8t2`j^C^_qds|TcMVUL%XHF2$2@TBshoIz7#>->t1SKV>6 zkV)Rzxiqha1Va9J#LnFQN8Y9LE~|#~@(#}-zq053{&cI1$Z$Xzp~K=UiT$45Kd2q* zx7fJ=p3WZ_9`%#wUlgcB6wYFh$EAWLbC$F@ma!ycWTDaCC(>Ot$uP5RjQthIT$qd&e5Fovj9d*Mh5G4fnnHQ@1?<2Z6ya zGXWGD+>0c2!C6U{NZdBL{f(vmZ`Ef;5Wu~GlYNg~bNNG$dQ(RLZQEZ^`adY({#(u5 z|0ii2`|mhe8i3!b0xsP{6>J304F=u5JEniJXXQGQZl6&!`R%rZXTedVo$$h=Hm0&x zX%`osizOh_jK^AQnd~oe4DK~3Uq`L=0gg6vnYRyXvZeJo-bK6VD*|2 zzdv4V)0iUX7w2sM>v5rQG4@#TnK-Ks`Ye+3l?s#OS+bP<`6;vBGpjG1M_ zeX51=;GV@^#mL5)S?s8I-?RPsZMJ#93q%H(R?gfYX^O2vmX(`Fy=nWJzAm_A;hfgS za6-gMcbAniF10Y^*O&AM}JVAXcPa zqkfb1#ulqE<6ZB6nh7H+z>tPbkAoguR!>{~)Q9+Yvx_z8L@2WAd~=x5@j|pzU50t> zsa}s2kL_NM-e6CupG-o&##!0r`JB%nuoi{qtcz3egIBQ-BQoN(N?qalMJ@@J9sf90 z`n?sYD5?~2(|{;$(hT*N4WgBTRmLa*b$|m|8$?NwWgVF^R>Kh**LmA zqSE-TU$34s(;Vz;?GAWpm&?!V!SZyp95OQ8fAv0El>QxtNTy{95C#?;2dBu>Wco;O zS6TcQMyzif_K%p95fe};9dBjH37%ZcU*+jyAKwKm*@eh7T@4jsq#fi69Ro!`1~{HB z%0*dBMGstFPq8emmHC`d$XQtbFdGa*1rNqiLI_;hacj_Twoxs34#SAJ$zDSn_PU_dG=MxxI$poTZR! zIm071<>wRv9;a2(C>8fC>RK*)0CKm^biyx8lW)1iHxR7Sq{8OL)Ri9ufE?;-xIhFG z+lT`qjxBKD$>e%bGJM|Djik=O&;(adUKzihc{W$0KIM zheaF&HFwTH>~zpL1(3pbmhb`Z_!oqq_>Ur<%7z?J{Y4XgcbUE88rlwgvjeLO3KdK&XD+#YYn74@! z^(p^CV~Mxdeeaz+FB`|f?`FF@z|39UZ2QBOKbJpm zju8*aJVcItABMf@t{_{W`#VUT<{uZ}DG&*0BZ#)NMvtC3Ugob6@paH+z3KdV2B78W zGQKx?vo{$a*S!{2#)nghF%Iti&Z?WYR!I(#>ZzuBaD&45e;Jyz4=E1j0BJ7gz?yQY z>~Eq_-krYavt#gdjNbE+Nnn6N^St97ap5;y?U2}XB;oV&HQ3@`_SJg4_|vGeO`oa` z=&`gO1gXvT4GD`K8`80x02&KnS4h{v48cHICpxMfnt%Nk?_K3W;bz<}?ZiGrWyn^e zO818N`^0`N{MeB4VJHmYO;4j^KE|!mhQ|sm5wo<5e>( zFYC`QR|zb3Z8uv5pe+2#{4)Q))V&FE97Yx${R^>oyQzm5?@O zu`pnxNX0&C{MaHpA3gYX=z}j7cGT3jw4CWO-U~~9ct~P4}^31)ZzRyy*~>6 zK=+}fvuTo}f`RkHzAekYVajZDgXKd67;~0Acr;XU^?Il{Qrv!tB9H1qmCq`u^%5GX zCe_i4BqQzp{TIiEbH9&VII8zaphf3lCYjATk{$-myS+XgWAN2G!js(YC}9M|C%-H-SVTdtODBm3gCAOAB}&TMQhpi2rblOfhNT0$DU=hMCD2TA2g%zpi$1x{;mvU%;#wqUn!!LNeI@+why%@HpRT#M`(%k5vy%nzSt zaw6MbYq9lVIP@Lj)tacy&C)s}CutxWd4*b((!I5Di#`og+LJrf3bp^fVVx^dvITNu;U}Q`!mPtR zn}hG=;=RqIBtxaM!qt!82E|ctm>%{S2Oj)nmsNNI1r+bZL2(BYx}v@sIb@#3uY&&6 z$v?Z@Z)D|ii$ft-PcJA&nc|Kl`jY3vVQyVtIrD0Xk5X7%3ns9XJpatmZv=F1Dx)A3 zID1c^3MN}Y%oAc+aXY$^lT9znjhRQU)U9Z7v%m(NC{70T_XIVyEW0)Xhu-JE-Gq}K z4IzE6U%EjdKOHD=J>T$SyLaL3N?jYD#8Kqamo~0*3~>n^pssMw5qvFW=(B|?&v!Uq zW7q@RI=V*u{$wSv**l@wJ8;aK&Fg0ESsAhYOBHoz4{?gm*J%ws;*HWWUYLAI`3K`R zg&sSPjQ;j0y1C6LT^B7V|6~tT2|OaLRJAbVi1pOxA}+iP<=Xrqx*Bsh_?_D==^-`u z?1MuKkUkZ_W7?4atuKNTJt-w%Eltm|yCf-LRL9nWK~}5Wm;AZ3IkQiyn}uApHi@{d zZ%A=3=b~~pUF3OWi-0l=wpLFhzwu>34kGBzhl&kV_-S=m+nP zfPVZ!ot$}D7gH+f9pbg3?kL`#eZb&17I%H!V%F?yHx`wghrjVJ%v>bmnHh?P0G{aj zF3SPzOUIv$kgig}^r|^-tVFSr38Su*kE;>eUZwNq3^PJ(&MJNA+W>H;lQtz8M z3zG@23M3xuC5&3j^7+7TX%C9}h!*yqTI|CgYbdxy9EJYgf^emGmEzi9OOIn0qm1t*}5?> z7)o7k&3|hfW{|+0IN7=?ud@?%Ub5*cTRh2sV9_Q#^S9sF?+V75%{VtQ`C<{%Ige}c z90>+X5@beReA<&z*8U*RqA~~Xjv8<7ZfztzE%Cs>`)Hlz`{iqQ;L9Hy9NkS_1T15O zObX%#>v<_ITEN{`^;5(X*e0bKVqPML%eu=`8CokTui^{>eV+3?_77zoJV$>6%@;Cp z?3h|Mc|wI`2UPN}!z8BHYa=2)&!J|kP^gk26#?oZjB7e>yM=p7UW`;YgI1yDbISpC z^a_beqr*+5{ZU#9F82b;k4W!FE70RM>SH)Szkp(6O7*?T;q~Y^%CiH88r5}w`M%9y zEC63Kj4qXO9<9Uk?VxqheVIf2!%#ejaZX1JOY`P}g*o6<7Mkj7U`3K{=Mj#|3obV zAc={KoKhYYpS&=$y9?Sfrl(?0FX%3 zBQf50r<9}eYwfQWO*8)yFvZHalc7@7Ehv?LWYCYK6@Q#_sHtL6!`%M$d$Gcmvim1x z+*_uf`!gv6OmwUr40T{=Yhw+gQl}KE*%7pAOnN7}nnl?-)3#QgBTjJ{_ytizp>bZJ zCiuoSW2@G!ljiPc`|yU*pDjk30E)eMq4%f^Y@3W#WOhLz^NKub$crJ`_`MRhaw86) zfD+^n@{hc1MllXX!AWFC+bk&YUnZI>2a%G+4QhC($yE@~oV_esxNZFOzUbL_A&I8r zRoV>9nK9C4wDz_=YPp{ixQDiw+RWgzc3p;gJVtuLuk&CQ=vzNI0eJ?8S)PrfNkb} zOoiW^++#=mIgZ}Z*qakF%q3R1c5dv3941L26@?@XA!6WkelC4nk}M%lRdLetiM}#{ z3?M{5`oZlVr~#9I;?ZeAvan>Qy0gI#>^?`&HsQ21zOtV1!)1!^Z~F=$?JX}HV}r;3 z3)h)-WWCx5LND({BNsP`e$7WKx#_%OSNFQ;Ty~1NCaB@QRL_A2FqIk(SOcV#2M!rb zN@b7VkHvE<9a6*|1c%8*b3+Dqhg>9EmaM89BA24=2s(5lPbm4Q6V@)y-T-54{!~1dE;CWB z%D!rMR8Hh>?~+L~QTEj;Nq``Qz3EuJukMalEeekPCsQg!nmHoa%?;Mn08Z7`CCyJ zAcjdz#bD{BT!r}N|D{V5hVDPQL^EzoT#RvirOf@#CuQqW6BY#@xtnC@bb(274Wk7; zaRgk_=>KYY`Z)QT+KX#HwtlSQ!5Y#nJFfrDw$_M(&;E(qS+?#CU^8l%0!`?%+aEuO z0NgCpZ6`s14CsGp_6EQJ?bPYBeeHiLj-EvOpPEm9J=3(mnor4Jx;@h~6;#h#+UK4b zI!6xCF7dxC$FG2+8Dm92n@!vL)~4bQ(yCPdyBSp)vh-Ico`dkS!uYpW`2V2+`9EuO z{kM?sc3Rj)@GnScSm=Jz?34>_&%7|JjDjfq!~l`!36(ZOx7M*e-Im+WcAA_B+>zlg^B(2s60m#FK41`ym(e zIC^R#iD2(vK;9_*<@_Zva%#O`>1XLm5(7&|DQ>H`C{`WdrRZn4sO>n2V#NXSTn6*D ziuTDnpo0zJhcCubBDfPsBH%4ze(|#6qQtU=nOfytC#x3J(sc*VX^x8qk>{SoA=>lD zn9*lT+h{BLAYViA_$YdfMZ@9Q3b8ZAn&Uq6SY@td@l0_Tf%FpsOB0yVSwHe9Nf@4; z3tVmuK*P&R_Yh`~&MF+J*BdxYd^-3x&zq9}ZQBr-$6l<^GqQOn5vply^46Dy4)G)n z0tky@3vB&FrtAJrw&qL#3h1gf)4AP<%(HQv4mLTHnq}T1dbU%ezBlZif5Q2bR=GE} zZa@C%C!~{S{lGWXg3*)x?8)-nT|G+&O`*Kieh{CVtH--78~3@{ek)m6ReLhlrXNXk zLcp~)@>%m{b@|)mag^llb7z@J^=^~kGO+IlAGLvP26Pi4`8;O<4vS#$S(@5S zyUgM~bI+2jixbOVp3&<+nh~F#vqq8mL>2W$zM#MFFOV18$=NPcnG4xgT?P(tItyBQa-4zr6V^i~v zz+(~d$WbF%iQ@#kG=(UDpDNpO;Yo#FWh=CKVY{Yn&MEQyE1BxLYQ1fT^M;r9J`2 z$tLI-7uI5o^LY_Z^ zQ?py&@>DHhpp!Yu_aYa)T)zcibK-Mb;Eeu7Zl9EdR-z+~tQO!zPM$Vde4ioQ`vtgZ z=2j--OUVmQ+tC*R%nv;T3H~=jefrLa^IVTB#E=pIyjG7@Gl}y zJ#K&$DFH9(kmLj6o%@2qoB|;TC5X`!z!;psTH>Ctu$wZhu zdH&b{jX<*SOVUc`z9G;X`(m}&&s6ySAJe|ZR_>z1w)QZOPk_BrBF6N$G6lTH| z^%s)@N6BN}_mkIviU;U@^$m}Jwk=RSltug(iytwTuwE@k9a?GJuK>`|=%liY4Luqv zn#BM?lO^qB+@)*iD9Twd48Yra)4GlzKW9)9+mrdi*1Qow>CoBd2y zIVDtbDh_^w-(p%STC;EwfN|J)RF0Ls%04b@NTuCQ?uu1jN8B*F@TDV$T}nn1_-F~R z8EDZyz)c1s5o3mxe3C1j1~TU+5IHygj-3;O5>@SG#LepzfaQ48SN4gg=n|-#SiThR z@#=3^HB)coEMXl4BA^yKY+MLhotycGIGpGMdn|7NbjvX4$wLQu>=sIJt!ueNoHbRbmn zd+%5D9tc(TG#>!dq;r*pmoOwn9k$Nx4Yys8W1ILD^*B z-dm^Xi30^Oli>d2X7haZRcF2R1rADmKHn*(>xc zkWbupLpc^tU8;Zm5%lUEZMl%#HE!djc2{0)1g8=dNOgx*!hg&54al-6mNnscztDoy zZ8!CD*cxUKW;#mc6)3;#vx!#j*DJhg{!^ks21)V{b~7slvZVYdlAZFM*wRd|w3qv6 z_jH_B%YLYytd>iF`!q~s}H zpPJ_BX?GZJfVe!ji~D-vAm0(3hfbkX+}}fiumj$HFi;|zLEwcZ$@33@I~cq70D*+8 zIKbZvV_k^YK6p+Sh~-bgDG1dt@q2vZdi4XUom$UDhE05oeC^H2qAKf0uyKue z*i1#T3|`h~(NEu2bPAjjD>;hSXm^gjtI{021M-p>P~*oQ478{~z3#-}0pyc$zuN@z z5xDDo`eJ)e5wON+l-uULpyptrZWc_=?gtCPCy5L&PvH4dnTuwQD9nl536R)*p`5nq6$peL%q*ZBGc|Sot9s752_?e&HviiD@gZXw zmDp8q!7F1dw9LB&POVnEo!DwN$J%>+M8a-M+hye!KF*=k+D8d8O?ZYC#HFom#i_@y zb=F7IAr{_9MsjVFaliPks*`1uX zd8f9XsP)S2LbYfN;}u(_U~v&WWUK z!(W2TB5QxNvABTIV3xy4zH8-?U-6qb@W1|%c#e!X- z>)&!`^TJPxpj&R*u*t8qpCn0)E8t5Ur`zq%BPKPi{m*apL-){6ZG9q(+Cq;fy<%d}) z3jLuCIhd51yn?xn)nN4-d!rQN8d8JC-1xNt&IR!IOid7}~SrUIf1{vp{82-&L$c$>!-*&0?X-K?UyM*ck8BF!8H@TpeLJUOoxl3jYx z{5r`*A(u2fdp1qPhXYF-mVK#<2%x=5%P!@VHnk7wway!@-%HW!FuzV)FpDYN%wwk7 zBBq6?Hk@vRyD?=lSld_-cVyDE)n>$_DHZ(m?FxDyxMD9eK8+Z$>> zS?2Q*|! zn?RdhYvuKM*^Ahb&kbY0EjqU+yHCITU?uT}Le9!m)JYLG#i~K)kGnW+#PZW6nibe< z#*QypfKi+dJhWm$zLaa9C@3i-RS1pMO~@zuf|fFNVNbqoCFcJPIl=rgv(JqEWq+d; z@)$}Qe>hPU6vF3^8H}-Jgs+P5aPr^01TM|Se5TVM7=;-`90bUQuF)(%&dyQ`J8r&< z#2fyKy?TI^YRmTI;wbmDMR3<|$FKgCxN8nCEsB^})Df(`nf+ronZ$YD&q7^C;MdrG zm$lF<*U*8T?zHe4wcB;4uu;^D+Dp>#EKHP+klI15SDW20gPaLVUMA;}YJR2J2t0IA zu-Ie8JK{UN_<7ONtGwnp^mzi?x^tvz=~tUqD&gmuX0I3Nv-f=Sb}}nhOexkaV*ZUF z_`TCy2ZLc7k0GeAbGFC+8V0>jYNOIZ8|Ceu{WQdD7F>HPQ@yByI{`gJiYCP}CDsoE z;rFI98WrHP&7`q>g@i@|AP@KHn1910I@ga10|RV9J3+qX!QkzpfWHENQ+Q3$;Q3QTFL+n)$NV=+K#!wc?~`0{ zZFJlSGrXK}E!396UL2Z;=1@U5&Udu~5*Ej-37~g#7*K91O9}d*9p&EOk8bK4o{X$OR2ka;_*nftk35 z?4hXxG>uafa2DuEb7ar#J1DBjVp9$5D4XpUJoL_{Vo0ksT2yVhLjQchUTU`C?{}jb z*VxVDg0!>n@xxPPh-DYpeP9{y8>h`jX!ixA?5209L^4JKXVJn1xltCGQI1n0o)Jng zCEMxh%V}>#Olc=X=Aq{qUp7$fj>+Dp%Ip!Df15w6oDD%;>>C*xB<{gI|L(*KH?
o`p)5a6TEp)L-&WW!!KJEk zeCohZNRGYgfcuzVmm!sP8`pQnIL=Jv+Fiq#PL)E!#*O`$IVDX^95euYevDeO*}^;f zR(htZMO}Ykf7-rbB_33)_MEbSDea)HYC?T&J+lO*hAk`fp(Z?UWd8&9zA2fdQU-*v zrrxdT(NsA~om!6prz?Z&FnZGQ*(Vs#vaW#q%J`hXV2lAU62UggYr-~CHk;xjWNPi0 zGCL0E`d9Rz&3cfQD1WC2c%y_+V=1N}+hHx#CvKZ1Sm7yA6>l+K^8?T;8i#+Vot^LOZ_+=kFLcj0#<}6!CF8e4} zxi{M;A^t=9n&3u&g$KIg441ttIRwPHh%Mn54C79Vw zDJG5Ts^u>8@h~3f$H|zQ&l@zjawrhP{VNkIj^#3zC3QvL!cMVwprj&4gX6cun~0<; zTR|5FGpFxw{D>i~IIp(-K~C27GOA=*fmlJT+7W}m*)J`>e=fAUVbX4YwV69L@ifJq zF1)@lb-cIK8pxyO4YyR70ZiWSu)J)g?>@>ld&6rTdd_oquH(FAB~3JVqryrO3C}}v zgcy$K~aPP7NnjqJ9B!qnhWmwInd*Lg;3qC}3X+e9;mjYV{#s2$(m| zlY7^qPWG668hgKZMzeskpCYWKA4%{{K4^H7G;;B~FF~yNaUl8$4@ffVed)f4@*1(k zRCOsGPGvzigQO>u=)JTCFX{LTsL1hvk?knw1F<{bcv$Ypo~gB*G7N;?3k8LwV(_nN zhN&3u`)<4Dt3xqPg0{$DV(=T#F|cTlO(JqzP^7>k4l3`lME%gt$FZmNBkyp0n`U&~ zr4gm?WQ^1VCt{*~9Vh%&ALF^9cV(H~Q+8jcOWF)2$>-~%q}O@|ZNmu?@x?FZv(xld z69gUD@wy8w)~>9i_)am>k~cGu-ww)co82Kx#3_E>Qt*c6t%S#qjW!=@dEMPhwdQ)* zK>>&xQVcG~FSGoyoydPOn31mn>qMKX((IAy!x~7JWlv(5@v(LDMznHNVrD`uoRf(R z6^^i}ko7^xPXwxc$<_^TAE) zxM1eC2Me(2+!>qxCIZ{TQm-Sw_6^cpq|}VD6OsCRp0;szB$M#h@4=Ba#pTT`W4M#W z6^V^8*Du!9e=jknT%f@6=k<&wf5f9FJO_gbfT95k9{mlIgK ze!o9f*S1kT5HZ>q8(Keda4CI!`M8hz_;P86+xPhp^sW#iQp`bhgZe?0F<|}Ht9Lya zv)SD-yS>`N>&g~(q`tCYCR<*pc!n~0lMhVBXWoaHP>#u00lWI-j{MqVTh_aFimm;A zp^+I90cvEPndL*v$LOtL?1(Wq&-?YA=s|N!QyZge`pxUO3$zsp|BcxF_c?kScmB%( zMZi{WciB$@X4FHb_^wF5^0Q%~b`jownP) z3`nu>f$^}f;}Q~4@9aumN~jZ@uloe*wnJ2ah1w}^_B8Lt)86fKwO+C*?`cC<<|$p5 zOjzkKY7R*m(S#y6a!0+EG0J$vAD(dO$szy^z9ns~dU?``Z+CF4J$*jN#?|(P%@r?$ zJbBRlfqvSe30*>=4=lzAQV>XG7{Y80X|g>iw+@&SiL2jiKop+i{xqkX3HzJ>mwYb6 z)GUASZ?mW~_8kY|+4%HLj#rAite)3wGAuiDU(&yO8M!l(NX@3~LeGx3^YggrAkbzH zc%Y^7hl_70syg&GsRL;zZK8Zj@->RA(8tN6a_b^Zf)7*gcYth#76Z&>ED5ybu^Z)S z_Pk|XSGY%VYWoT}^&ft49X-G#9%qq$PM9JN{e{+rusQYRwR6!1kjcL73v7AG_Ogea z{rEa_lUu9t;gBK{l8xkbE-$z(DBo;dFor=WXwPW1&(z9Po%8=MG)9k%8z|GDC zMu`vN=5NlA%|_WZ4w9=Luv8;-UFh+94+3Ryp|a&-+FB&lD#`H<$jWTuDj7Y!`M)t%@m!rYIR7Q|nVu;r_|fO&x*TI`47aZ-Fm~Y!0sK zeG0t5wmU~A$>DF$3#Ro6X3*POKC&fB0G<{xJrljyF1Ou-84I(%tg-ttOCf!%)~W%T zWa#dpm;2Xcy;)vPYvhN;ui3VN6-ZZS#M@m=LPJV9dbvXgsYAdN4#| zL;G!0^Nt3|6SaoRCP0WyZCPN%F38z!VtG^)nCn6+T0qdL4w$5*{Zd z+$+?NjVor(!pR8VjL~5ns*j24^U>F~ZlOQ|Hxn*UAn(iIC(M;=8H8@#UTN>A#uO#R~Dsr@1k7UCIg%5WY zYHumOosdw@!SsIYCdr|RWX7yQGKivl0C+*vVz+>C>!cpmujqj>;0;k*A^=G9`wreQJS2-0F zA*Q(X0aP9t*;~irmcf;m*s&vO01CYAaS)v2mOU6=%+%GQVu zb&ilYgnBji`kDzEEWNKOL-HB*sC?v5`);f|Y~DMY5hPhD7qoLFCxrTM3$ckq zb*%_b_3TF@QFneMtzy-4bjfL3H~LCUnVrTY>PW8Gud#u`X{=Vn71gGW^+;keq6$-D zns`a_x+L(E%U;=(p9?=~ZPlE&KDS?L=ggP4Q(iA^@Xs1G*15yZWWW+xuv?_`-^r1hVJR&TIbK+cU5P%C9md9g`E}Qt3*QRjy2b=^q_T}7f!NEIe3x|Z zB!QJq4@7Xpib&BIUAvjo5lKoF3SoO-sX*9WoA>0oyFX#^VDmlFs!>;ltCi;>?OTYp z@qLR%LWw}8SZDH1qPh4&HbqJhNB&d3g7;jzBgm3|7)=*9dHFgpQ4-EGplK4-17ZL7 zWJ}nQ7vwaSfwUwh8^HK}3SHjmZPsZ%9W)~d#IUkW zy( z;h)k@w`8`sB4xT`i)X|1(>H@Q!xs?VKqVq{_-4*)M0UPrXXk?tCvcY1o&hFd$9*w< zvjNzp5ZFdPV16W_()C`BE=46-Gi2bk-z{5%+gP?Bzzsj4#ZrxoPlt#_IWmB;V|bZG zci)$4$KV|&%T`rc7j-$p3dbqU02ALTYR3=ajobNF1C_;93*H0liaPb0QVDnnn@PLx zjzsWe%78mBU?YCzCL|AE1d&Y6*&pVY*HW+Zj?m~DFr8e$}c87H6$rs3&v?{6U*YUw_3kL1#zaJ@lK*T z8Y*m5%$j6Q#Tb`fg?Vf#(!Ubz9?8{0RYY@VntrigtGPrUTwI%EkQ}J@H^=(*7Pw61 zywc{2A%%4)4walLyQo`0z1lp&uT6q$nJk$J$!GU2TlUzXhRlBCs~O8@_`D4o9YtC3 z@s@mbtJ=s^fVw&bH~!g{+w=AaudT;X@PXCXTd=N_6d9bHARCO51ei^Kw{7udW!J+oU&acg$A#X-}jty^mhB z%Mz8VTQGk}HGr(jI`i5c)Mu{bA&yCtt06DYuC&GX9_K2|jq zMq7ic_Acr+Q_2yiV#W63&QX_;bA03ii`$Qf>Fv2LwSRz%f1*Fpq@tK3?F+P%s0&pG z>ZbK3)6XIx-Kd2+C*23s{i2CzW|!qYk{6V{DxAz3Lvm779jewwOcd5o*VX#ub_vGT z&=%Ba64z3n_23*7@)7@@~DNLi1$ASBD zF>m)qIHPv0oZI0+B|F4QsD!~Na$Rqn2A&TYu?$X=q9v{#>Zm`ZO!c(S> zepFou{3OIG!x-t%{BOBb{O5?|JuUMeVHLMO$R4GYUy?52>^@X!yyFPB)`QjGOqM{h zJedm$F$MEnTt-H_J|Xkp>@!c6YDV5odDeW}i4grrpjM`%@&s_JA(&;nXYwNxzM!KV2{yfExF;-QY`L4cAy1TT4(DKOfhY((W5J`JFjuTuC#^Pm4^~$Rjjp17SpQ;b z4vtovG_M}_?Na4m;nqIg*4gOYe-ijLbkW&zb7$Iol=M0-$`t%usQHV!aWNyg8NGbS zuvzg=e^OjYq69!qs%_y@b7HCqm+u>4`lFtxrE=NA?o77st|bvm{uYZR^mn%UTfHM<@HNBA-rCmpIyyKUPc1FOmB zJzIivKfWEh299XQ2D4@5ih`Gi8m(`ZTL>QZ@1s+N$~3!`Z5JMJK+D@RHrVky!07Oc z+BJ(wble+iyuYpfCyO20S5AneBf?>CgJG0#0XFBG2-4Ludgq-Ge?4OqYSGc=T zTd@QNs9Xj#`giR-rJg6eGq8nVBGgk!ad-@_on98miTapZ@p)mu%s^LRbSWZn)s z*5Tv{JI~gSw$w1-!x>zsR|t(_;5s^@!fBqRH$f zXL?OsfiBVdD-| zfDAdk%+C&C$PWR?<*RY1#N!TsoszTfQ(;m$Xs6dJk}l>}Fgg9u9~+sbsb~oOp~eC7 zxc5QaMhqx50atSr9jU0{6gKAKUuAJdDhQzRBoR;iQ0lC0@HDc=W)&KVq+Eh_cg_`kd_bNxk0Pi5gI9v&Ttbd`i)~ zQ+Ryrz9uds(CQ~+Fau7CkxKi=xU1t;^1byD&0yLJ!sqP2%3w6dZHXuQqNH?LY(CH; zm~dK4FM1`g&lQ~`y9Szb@rvj;VJG@C$2XZI>xLGC?BuJlGH;SCPfEm-G?ftI#Sn|& zqmDe6QhMQoLesy7u(>!&{8bNze7vrr0g=&R?N@?oN^xo2$c?R?6?^o^zGyH za=mf1T(4z%nrk-slu>qazfBb7e#A1Q(7>>=uX6&uyCY5P(g~jV@`1VgT--B-E-5-p z)M4GWQ+Xq}+g!SOgU_e8CqJVnE&2Zj5$#08_H4y1FJ$AR?5p4Js3NRTJrS)z{x>@_OxUWwJc)Ns9aQvxOp@gXJ-jJ9M>~>=*yY*;qZiv_n;Qq z+*~Eni%T+rLB{fBXs=tyadx|XlF$2`6$CfDxV^G}5Rs0@ar+BE5r4w&D3c>OU-fBx z$ap#`JD%-QU}9(gq83!WQ%m)@!l@G{e%xCzzc)xv^=Fq?KJ&u%u%L@-NJgvQ_Oh7K z`GlBb?kIV^weS#xE$@Z^g(kbE^@Sv0ML<0VZ8pc7*n`$cG|w#;PSoEP5TkX^`*X~F z%c39fpdWz$&@p#%VLrMw?naiqhL$^69E@MY8q->`UAU`OdjZ3Yj95ihz^ZN%|*vg|j;PlPKhay*q*>ZE)Fg@6Hhx*;Nj3eI+3%VYhM65sb zLKfiM5C`~m_>Nl1P?}yYA!Pc^s3+N}WPzO{*TC!iKVCypYjBgXwwI!IDG6m{Z{_a3 z_6GpApd1=7iPlD}Z-X#u_84g*k7?fC*S;xHuYj)e+EE4l;KRHsm;*+3r0Y{c7Z9k` zaj#_ea?a-IbQSBE`v(<08I8iuH|4x9FJ%cAL(=ji#?U9&nqJxT^L6w^c@erDqo=F* zn{o(o7xzgZhVZT25%Bf0o5_rrOH!BwXEO(0vywP*I<@3B>PUlpRFVN$!r;gkla6N! zz!BDF1;W5FQYiBwvrKRp=XzalQ~fBQF6bpl9m}M@D!jOy%wkzXDw$5u>CVB@cQC$7 z8kwM9xU={wy+^SGhltoUhMFf&>?aKd)B&_S3`tVRyXphujqx$JIfpbDs8kKM=smk^w4kHtd}{$4pmHXNHx z)HswhE{LY&5aX;p0f)AOfv#X!8+*@%OBXX3_b(IW=`jYIaOgg_@qs@dL-)xIiP!xR zqqQ|#Y~k^DcI4_z+bzT6obJ<=(i<14#>{az5;B^`FN~j+cVT$l;W)1Xl7hxqNBxe-FKLBbfFTgZO-QVAi4A@r8&wWoOz;QNF)I3!h;s=MDlk@);kyy+WOqh6m^Dd}Ii5 zlzUwjwup{2xh(8a{Z0W()}H1+NWr^MMwnvQb*b*XmpsqqpI!LhQw3T)*tSNwV~9cd z>-S#j6Io;UqW;UU%YM_T-Z>vz5(NQ>K8@Y!dr3-=>}-B#cl+M0KLW;++?I`d1O_yq zzgP$>0Yke6C3 zFMrP<0206?wt6asX(#-djgnvAed63f*u#K@*;)uN$riD&>uP&xGB$>uaSuBFon3z( zHXT-v=}zl|!b1=Rq|V?<&y7~3&Fr7Wmm5%Bo(Ut3PtKM~`LZ-fGio2dNetW>2;!d9 z=yhvvgSTKP4%|7L((Bgte*Fy*tfxQwXVZKIZlPfqC$2+GTc8_L#IcZ%C&9Yu_f1qn z$$-2!p-F#TTUO6;Pr@|`E;81M(~YY7>U=X%Se@>Vtda{AV-TeeUVOi0uSp0~WT1Xs zQ2}^9X_8y`L9D47#vJh354{CN4Oi9@xl9)$Z;Lpzjr_BvnPvT}&U-n*h_z8 zx3qoX8FB?b;r?yFIyfiAIvki!>ODmIerQnlo_nyhd2H27(HOQ^NRDtdKf8K4sMa>^ zE-IkX0wjCE;lGl-4r~l${yWmbmS_e?u-Iq35AOVNi>jF#0k@ODcs%_DzMb)Ea*+E- z|2wewCheyVh7B9=x-!bSXY(nHo>n#nL#~Im&J?e>8fM-mF0o&P^g|k%=va;7`KFG1 z(VB%5G7=vcP$(CgXrk= z@ya$&IH6ZV%P~U?1_$3S>H%R7DS1&+^rzj$uFA#JkiZi!2=Xj&Xp(hoqkvX>m{A4a zjd}7u-b(?UBc&Wby_Nv@X&(PcLPq$~XQbZf2JC+7*~9 z$!G__{e0HY9wH!rMcjBQfDZl@G|B#le#gJHP5!q(=>NXZ^1tyG(N44ofGkNRB`Wms z&&K@s#>)RszR~|%-=c3J0Gpvb>0t;vU9C2TpK2CfNsPwOE(cyS;ZxGPzz;IC!abw$ z`j=Z4I13#e3GSm*x{+sGHe1g0K_m38Oo%s)J)Q!;C$Kd69hi0;vfT9{HVhcIcUW@< z{2klYh`t#hFc9fdy4jkp8CFy7^2&uDusa=&;u~duoFwEgbKj&WVT){$wKgz9)^?x5 zM&y6?28WogEPU97MpFz0rWT>_Briy~eV5VZ>W25)0P3s8flHUs*xU_sb!vl|Ly~*!<5a!3$25NQSnzO89=jAMV`%0L zvnKSa_J%ldR4?$Jl9Fi}K3{DAe*CenDr3`)H(saOCdr{E9UEIt<+0)Btpi_PIbTt; zrBtp&5~Vs>b_+3Eivv?RtuYRe2q*%2d{sBt9%5-9+?#Q#3i5g5-i3b%W3X9x^Pz?= z$SiZ#i+QQ6MoM}vp3C!NMgpT@>Y>S~4rMO;I^PvDjq(h0pQ$4JP5sX71h#5#B~Ww+ zGLaK_yDv*X0okdJo2IuRN5&$t)I^j2hA-1sHTN(H?8;PV!=as2eKu08Jy2^TyF#nl zw6{044UC==BHRKZX&1_i%zIY-(nVU%DZ6DXp5R&d*FgtXFopZnk-aquy{1S-9`9EM zTH#~cP^=Vxu+kQtYiyZJGO@c4tZFhF*%32a4)Ma{j)7SYQ({`rEvl#`^bVrW2v}OY zx45Rp4bmU#JkkT!o!{?UuwMleY;Dt(RY+xEGzCKylDCv#y%--+YJen^T5DLDdn$0W zacoXcS?=1@6DOwO2Og52sUsT`rxY8Qdp-5ud!Q65+avKs$_*kJEs%>+Z5-@Shv0hQa-?-6)e&HonVpXjv0P^7 zaKv(@u@@aMpE8Y06$vbQ;+Fz%Rk~T2LS$BHLzbm~JDSk?_Q~XVq!ATmVL9q=uFp2w z{8_ir*0FFkfUr>lcp;+gfr2wDD6|Jb+%i}qj(%L*=q_m*c+^Ez`g%h~EHtEPLRhowf%`*I_3+q^64 zeC55{u@)A;)_dHG-(GFax(y6iRu`9HEG2{C&JMdeEM{f0E7mcM_l`&5QP$WsR_kpg zeFYXW4|jCEXosx|b&*rXjX-H318Ild{9%QsVb40sj^O;Rl1_J%iG4GAkF#*ANW(pa z2u8aJvuJ^3XD?~SI;ggezYfKz&&k?kT!^7g^#zxWL6ThE)^DmJLi$0ghZZ5)rkeiq z+lRd}jJ_$l-slWf8&BN+?LG9v_#>Cuia8#P!N#McExxA9-!2f1PgMr`1**m!_wuD=gTF=&7b>IN`)OXFZmzZ2*1sR&s z&{m+lb2jhpS7$6byU{gv_PWb3`S$4UusM;Gl++cvjoeC@hWIHJ_{)tg&Hn#v*q}tPEhRN{Rq^vM_W1?;1RN;v}_4CIUHM`Jrxtsc(||4$EWs?U4;Z)5qluNr zr=hTo={@RuRQ$P@N=>j-p9o*SPXq0yfXo0?@SbvJWv7}vT@osU{0rrHO7T@@gwFJz z1j^SAB+vtUf!2~%#12Pj>Mr!aTEgcp2O|xG_PL^Brd6M1N3#ptk0Qf0VY4wMO$=vc^}WDGLzO z&qW*C@yS+XIT(5%J;^b-27Pvd4DLKykS?8cbRoQb;KKt|@+;e?>CU!{Sn4_yJ?-ztxDyAm*vtWz z{R5xG)eU#w9(CAQu)L8Rd&(^zR4}zC2Bhk^pTlWf~1h*}Y>ias5QuxQC;N5q*{neEz zC*3X{U&?kQShVj6My|Y(+}(1qji)q*-FV$p30E2jS*7BAdw~8c!ekv z8M4f%QH+GL^)Ngsk)6^c+mJN&WlA$4vS&utvCm|Fcj)zddcObN^E&t5bMNck^ZK68 zd4JBai>eI@WyYYHgv-VFg3x?s36VKnjsW*GP1h?JdDQChimfg z+MYBEP12>_61pJXEabt1B4NP}#&4|i0C@AboQK=y{CslA0dHG>)5CI?vekAK#c+ zpjE4ocBBu#PCwCYRGC3kf;nkUzn;|TE%@Vvp=`W8GxDXkkh@7t9Qip-q$3|*_C0U= zU>l?8>tqqNEwPTVU@d!JCxBBxeRx{kYe`xcc7Et7E19ue_|JDO%)zy3o9hliK2&lMWtTK zRCZQ(dm=5%x2*3Vc7lG+G!h~saZqAj)sdfQx!^TVyx0-kBoEo;QS>FLY})>fmG%5w zM3}Gmv#`~#y-^V61z^?e0b%i1{kO7jT@t*D`ml!e4i!JseoE|O3I5kBxx;1m07^&$ zLGX=<>PlY)o@j!l6>Jc!kg6{O$XM!c9h(!K!^0F^W(6ztLy8%NaV5?ajwQfzSbe5h zks+kLkDUJ|SYi@OhM01?2~JuTqcr>X6%P}d8F;Nu_$o!`ROaN(CTy`y?+~&1E;zH4 zlPOuii6HlU58v*FtsdoE@}wfDTMmv;NWr_Hl_folTGg0WyFpuV6bXL>^tlH~_LChr z!KSgdx>^2pwNkrr#8sWO))*Eq{wep^tVnt4!+ABo402$X<*g=&9>O|3eMeXySv>4Fo?WM~bIe>F~Bbc^VB&l!UZZ zmy>~pR_WX^#3#a3`1~({51m|19F%Qzt=b7Fehq@wvn$wDv&-^c^J8x}M(%+61G@PD z!j4@b?3~WreIpjtv{#jH>tOHOY8!*XKz5oU?s>p5GX)!dldX)>>ToN`x%;N434m!1 zY^sokoA6icW)$|eT9)(WB-2n3goD7FidU!j&C-C_5{o zD$wN6V*k{1m;>t*gi81g6I`NdD_8PjG>l$^1!ZxwxqJVV6;;T+M)aaEg5hS?;hdSgV*TSKOuVWq(N^K1(V*v0!C)VSPt!=mC`_9I?F#9=>K!sSjFxt(IA4j8BeI%@*2X4yw!G%M$Yc4L|QO$nKM4dT3 z+}agbeywkY0(<;Wdx~pJ*7fgDt5b^iW>ZRhBC1}gzIQm}uw;9EpgMSbf%!T6xQG}{ zO~SZbRRAuf?1f7oyL-c+>@**Y_1Lf9wCIo`dZJQH`LC%(-}>#0=$v8mh^(teek^K~ zOkQJ#N$@jwWJsnIGQ^dc;+~iEn3$VX>|B#ZjwC49g%N_<_7TSpCulxwjG;UKs}~o*B4Xj|eB@ zzZ;g?mt5O>$sR#)`3y2nj1BZ}v3P=md1$})rH~^QRntl<#c zx*V8kUuj)g66_TFVCC^)gr@>@?bm5Ur~gw}sJ3<6_vl_2BQY_tHwaS_kuvl@-xA1V z8QObYk`l50_fj-YFFR#?6eJBZ#=G6<)V~SuzoUU$nlWY1S;8c-c#qYPbJE7QLzGfs z6ndO>Vqi`A(z=W53|FoHXC_QZ!Bh;r|8wYG-+@4(QH zjOAOXcUfKtf5sTuQDJAzNMyUg|GpV8(=Hat&yJ$_QT)L*D-?v4Kch--{`cnmeO&Z5 zQV&B${vWXwtq-{{*`0GKz2P@GL?S-YRS&O4 z`*Gwr=4}bt(RqM|YZQII4G?Nf?S(Z?GVkwbVYMfuJmg|fARyYIz_hLBp*TkxkCsR{ zF|aP8dBk=38UUL^w0`cSK*sL!1bVMb6v|=p@GSX2pr~>~BRUXjPvx~>!a|C1+9ZLU z5LL;r!{aPo-wy5JP`{+Ss9cU5W)^;Bd?x#7^RBRFX5@-r*uAu-e&LX3JjP-0a|5*lUgK6JE(&@-SWYvo z%$2!_Q!!>(k}MVafI#iM+(V85;lBgJmpxzJ@T13^5QUmg1*Jw8M?6ObHHb91jg5kN zv&EKUFIBn_!X#nBRl5$MD(YIVoyw8m!-xuE9e3JjA_boKGC1QTh9};!9Q8V~hd}wO zNayS&hx+4!IbJMeIb;8L(hfnDeb(sX!7HC{8aIPz#WjOh_(CRn{v^K>o4q{LHHQSB zn<(+x6-z2?-!y}kIeK32*VsFcV|0IB_!2UUhB7+jmv213{o}`%mtP2rP!&rp=4-I4 z)I5>H!P216>kOMSu6JC>*e7nv;Qb_J z#8T0fUZYRfw&4nPH(zgnA^w*gh^m;KqC7REwd5e_f#8o@_pL@Q5P)nf$j_6R|pCr0emW3 z|5`Tz;-371Kk({gZEslfnYPjtS(p`SP2KLCkGC!9>s!7!B8HkW!iBsTGITBDaPRNe z^XW_~CHe|a^C9B^$7|jndd5+Ov)Udy>7iockt+|>nktf(iEYu@0p734Fj_@1cjUp- zmum7w)5ssyEi9lP;aW5~p9cAdc?ql0ASL}4jRoUTLg{L}#Bjh<{!of@x;OJ0#8ltfjsT{30@}*KqS}2VpaN=Ro zt#HX({Z%}A5RXNyJgwd#pLGvd25Hi!7T6PeKWr z-5S?)F3&keHhF0*-sRy4Y}!N2mv7d+rv}QAi1#&l5a$oiUg~M-G2E@8grr%Jei8Hi z>X42Sqe*Kowexi+G2HX(BRo80+o`r2l12?qbT33j9tO)1;on`uc%R_;l-+EHWStKi zVGtnRQ4jt#NV{r7R_c$B=HPe7LbGNSI z5ihq;)pmNXp6`cNHte=)8l_wO5R8eG0tX|!vfnyEl{ zlsktHw<#Rm?Jd&`<5&9=dVI_&Rf$9zM7Qj7Xc?yo85>bdTc#r#;nOlzFrv~LlS2Tt zc}@hgc*l}9RL<%NvS20NzcqKwvc{cmR#n-Sl*l9o#^z>M*jXK9h)sqjyG`oXua%`L zyGulGx{~ikbjuNe0^N0a$sTe^Jma`c_13IG5BIy>Y`;#A$yrS2ysZLA@%^>J3$3Uj zPt`6rD|2`^*YWrZoCR8Z{+BikyqPv{kEk9^T)h4WX@z|ZVYIk^uc+L$ij6RhY@x?1!zO!{>@@ikR^~MtKYQfdV3%oe$fyy9 zaSWDA)TuK=lFBDE6(5+`ITTuX1brnMFuS#NUEbI%V(AJ@>mUN%e$aQzE>2X%!a4B; zJ_OAu0jz5!$bapsnA}eI=AMC1ndgi^PBaU{kszX}P0wPM5AuwEC3&o&JS2|2c2XWBFQhh` zA{3=3P0s0$)-OGszLQ_L%KGsIFxQZ6UTw?hGU*!{}0vs;{k1n1vtD_E0V!{pOQ`?p~4H0p;ciNge~xatiDX%C@%S7vKx zf8EQpkG*Eltrdn4W45O3qm;nTXnLG!e`S(vW3TeVnZ^@0jT&ZtEtoL`Zh_1RdT`o> zAT`*m%T`0h>tMp>(z!UM$ksYA9l8$N>#2Krm#tQ1A{;n}CSI`FI?a?c#CQ|h!yzeq zt#i7?3KDnsqr#5)bJO2vD$D+^(dSXBQ2u0(E*%CWSIuz)eJC~gyqobX$3$P*uPqd} z{)5;xTXjm?Tt@eFOF5q=joofJ;mA?`t#T^D^PL%81#tk`zZ}__e82La-n>28C%u@M5hWIf`M>w&g(Z8Q4N+>pO zH4&K80ILU%qU7TIx0_$}73Eq&1nFbkW>epMPoIw72vzJ9;2e{eel&wvS%Pj{20mc$ zm>4*bdf+YgTRuJJy+fS>EW9s@@%G-~Q;;D^mc8c}Qf;)z?&*#{vRw3Ej>WW)dD$Ha z25+S}&`rSp_H6LJvb19zV3HV)v(_M8CRnIlH7X8K37n@PE-X#XDAm(NYfhs@T25&9j7XIeo7u^|uiF?g1P9oVqjDC4 zKqV}(I;S(N%bw61EyE0gq`8fNC3NVF%-=5Dymu1bEQ}@lyFE{@YESNl0x~IGfs?p| zk=4yIB_o-qwJ}kQ?R)bm`mp=7-+%oG9Ji|Z197Ha4%76`G=7r7vytlk!nSk4BI?kO zW!LdM8-<+2p$e?vYwBXsp-H=GaXXRxBP}GamEG$_yNAVuo@b=C*xu((blA4?wJyZ0}Y!zW-D^~ zviw&EKN*=%5irJoBO3EG99f*#cGOEhnZx$6ChDKxnou_hmSm67c(^QootrPl(^n{@ zF`I#jZo;5sW-4-vo-r#$KUopxWBY}@G`;XL>62_?XLR9yFyb$*-^#r!d1n3hgN&J2 zsj=Kz-<-=?=e=kjs#$A&)-OVTvTt}9Pu_Q9d>-LQm=TB?;Rwoz`I4{o)Fr35AbG{I zTSfm`m1O;#FWB@3J)$f?M)X;b`Bk#+MuUYRqg1OpGNrC_z0S$u`UlP)91;XksChoy zdo9WVxgQvH9A3L14h<9$?s85o%|M^Y-~utzyUMp2T0uGvJK0GqTYv*@vWX;fM<-8lDC(J` zvws1IIrK?rEV*s9z0@Y5SjEV!xk)h57B7a+V2Se}BF@vPlmkJ-4}*iJ#!Gep3eDKz zK}bKXAIy4a_eJqMS@YB zLLy3$x3cs)u+oDhXlg5GPgc?=GOOg*mux235wBa7n5}0+pJTis#kt@m4GcTN=oL1e z_3hU@ckeacm@D3J6!;6OMk1uP#ZLF>v|TQ@N5;kVlpdG8v#c*zfy`e$(pCA5b10IF zmCfvv2&hJiwVx8E2wy0m5+*EEJbJ*nm6eRN!Z%nb80CTYUINkh&AyQjWd_1LSkZ;j zN4eWqu3qNX&4ygR$!&VhVw24Z(k6b~QYo1cqwFo~RJGv#)!|tcvhM!-PB#-jZj{_6 zSU@Y5!HFO;-oeOXpad4*esp_KoT;vImSak1y;@DYNz6yMl$-prtqXWvn7j zs|t{^w9N2Q_`b*$M(y9qiJEb63NuXAtwXGz?0Y4-)Gp=O-~au5t)KUMuRD_a={eOR zWcYsY>??Zk+On5vw`42suPl^CVVnzLf;REA1Z96J=TtNzPemUgugt6XGrYyW9!^q2 zXiE!eIVH#s*(!mi;HDMFCIjUn4b_R_Touc5>GrAESH`3t;)ug~ehE&`2|+ZgZf-9K zoyqv%sSAVMK0$0QC`EXY_XY&H7jCh|Qef+l(cza=ai!-2;vYNBQ&Xi9BYGz+&??Bl zAx_G?AP|7}6jS+<&7&b+q~zf&ODYTY^V6he+|0PlAy7@_wzgP|YfdFbaPB;QVetRS($?K`rQyjB3wi|R*;5#QT}!<0oV<)%v>Tg!Prxp*nlcW8CCJVzb= z8D4E#zRfs!@RtKmIALI(BEE2pp-8XQ9*4Eb^)9#4j(3)ZQqhJ;=|acog~Vd18W1mu zumc{sIYNOV?fFA^Cn2xqb=LSvbK)Z%$K?R*EJtBP5Lm42y{zU+^@D34(M@L(e$ouZ zYLT60!6pFF^raOT`dA*!og`9s`7j_gC>rRdE2igzq)&LJrG5f74r0H~Ky>NoLX)?K zuq__}rKgmruMw3FD-m7>5gHxt5E$o9s0Vs#wiUpa<5Y?J;BdP#ao?H#WgQZkyDX*)kU_y@zhei_ZLF-Rn~;C2;im>$+q)Yz}D_)v-m}IoY`mD zjACCRiQ>y-9S$M1Z_;>1L$xEc)v_~8{hw=!{qi{IRY@o7IWvHOrpYb|={%gcnq{2T z<1i7uhYe+D@ouB3OU(4F+&Q}8v&yIbP`SeV?ldobPQ*@M#p=IHQ_JV3pZ9uLDI9W} z0+dA=#GNHFG1H#*rqmRVa9Gm4C^ML2(e9IN>2)ypXVJ*>0zaF7$(<5U#*C-6qMF=L z0`nela?ca}LD1u#p6(*GdwUEC$-Zn+QBj4<3JKCG4^+*QGS7=8FO4-(@C}L-Me_R(&c16JpFc==NI+Z0cF8l6*#?EN;8ijYkQHs+zEBy z!ydb!RJ0!r`oaN4@EtAN3;p8>x!B+U-B1Sv>qZbXoR2)sLLsD7psYrY8(oUsnl@Cukiu1noX98m14DrO?lq;of@FDUCGp%6Gyg zH~W>}6i$`xFsV)>lXgSLbW|`ha|Ur$ku=_BX3ViHA}^6KS-v&+84Q4d-w!2f>w2Oyl zMETa&uz`TPrLy{E5f8^y+F&xBz529t3A{$sc==K?6TIChT!vbY=%C>u0w`ORDQAX$ zX__Sw(-pF)(MDbzj-%28iw6z+dMXLWMw=f82l9$f?JBAgANI=LA~|uEXXgQ7-aV>m z=|+&p|Aj^&IO2wIfMjE!Ql727QQBI*VdEXQFl6xrQt%%(nq{1%vW$kkE}E}ZHMC+S z{3cJf!XGE3i%$z>woQNGNuBPv9Y-576ufabR{WzY-1cAy>ROI?`W*3hyvMI`^dJLv z9wrpIXf(P86zV~G;K+1!nlmBhD4VRA0O$5eK=|ZcbJIgJ=BwUobiKanwwnctTXU-4 zI>Xv2#eG31LMUSDAJ(7sZ&hkRAo3(2qG3VFZ`tsfB`L)oG{q?sG&y|_&c?Q?GU%wr z2KZxCt)E9D5UD~Q)2F&c#-BU&PcDt*+KD}5IZc71-qiMR@~+3^E|{P}0~$-L)-59T znw2f$>)kBrv625)GQSyob)@w4lLeZr)PVw#*PFH^qkjt}em;(R>w2j-Y=Jd2)i zYCrmr2Byqg4tB1B2N&4}!eU}-x(7P`0%SkJr$eMlIWY04*ksy;%dl0i;!h)Nb&=RH5R=9jYuv#%Z0$*Jj_Ah#xX*~)oX0@rQ1WfYyq)>}|I+|B{uwUt7 zLgt_$lEXPWgCQrbM)bNs_6QGqYRZH`VC=n77h_KBB!w<jVpZ#pWsGVj_t!v=#6-Xl0!juh}y{tTleqKj+pUTsTim1gyid8q< zfXs234&FI|2uGGNG-3m=Q--g4^1ocNNCz20Z^dK#5SGMYUfBB5_ zJ--2$Z2Tse#y=e=e%S9O`*0tJ*FZ!i-le_H>v9oSxV9LX6(gcmX3)$y3N~QP5G6La zJC~yxfVF<1`@O1DMXIZ0e#u3DfScaY6;<<8Vi{*uL4I&lr!9!VPu;&W2X2e^bHjIe zI?8J_uTpLB5ttb*iv1ppaTM zm!EZl_5^hje{n7DTVql*=gbEo!qsO;>!rc)Rs|b_i8M988f*4^ey0oQ&1!N}4TlET ze8-RPPd0x+9;$zk2T}RybU};b?Z+13qT5hDp*G}hg8 zT9lyBF8i_Rk-ow2Oa6MnSe5=Ygf>V6_#xTrpSgFvJ87+oz*mNod}x+4m!Lp~R5VQT zao)nA7ErW9!vYm6!9xavbTmiED1VAEIta`)&;038kLP(X8o*pitx6s`mTr0!e5+XE zU|nc%Bp($c%tS z;_Rw=A|7_b0O@qvFOOAW#FKrGjy5Yo#%APo?so9Jiz@03y_Sufg1!&9M}6OM0i)IC zEC$NX&d0u97K9CGx`ux^7h8iabe2nZuI8+@_FO&HY>Y%MeRAJ5)Opy(sNM*>cZqt& zK;GucVOo9oQoa2y5X-*5L(n&%1%}NiIcC4+QiTazgp0atgicp#sN*>q&gU`r!>vC} z9MLb-wrrIa4|&g^W*hFWPDk!edq}1y8pCT<2qLB7$&FbiX#&y`hGV*l!}Y2}bbu>J z)Z*YbbP7Ai`qesYNz8VyCtorv!2v*6HYn~vZ?y2xZuiGWE9jOxx70$I=dq+XIOtJc z8>uxbscOgo)w} zU(<-qi8*CsNI;p>C3_kVlW=E!97<$H7 zJ#sE9{jj^E=#I{ImCDav{GR(IRZhb|w8O*ffxpO6Zmc-(?(ZE1lQ&sEROt@+ixF)Z zbrT-;;f8-b+ML(d<##yJ2hyKcvC^?C(K4OIn59#*s|zUF=#di2$Ku7ssnCZ(~h z>bH_9H(e52EerW<>0A|4xq6#36!-1Xysvt$L$n*C#`}581;8?hP65uiLq*TBX*sdN z2Jj(1pw~0K)g_WlRUV(sp}bFFsf0uDeYAplF8iC<%Bm$J6DL!aGLUEg>DwWiz`&p}h55Jk!rSZ9OUuDsU0#VE9z{EGEu)xh<)eBw^afCIF z0cz-}#85HfoZ<^?{G2lA}+3&(=x`kqi<~1FIjj zhdiVioy&ahWB+^{r?*hNf^?g$On@DT1PehiHC-a7_s-PVs`7Ak((0+1cFg$w;Nb_is)=GQ?$w#^ENCL8XnusUqTgpjTacwvw88 zuo>@R1`5Qz3-Hjx;N|fU;xxN@i&P3Dmp{F6_13_suAM-KgH>eTXYVG7HH#K9t9lJu zkRy6qs*9_$Y}yL%<4a;o>g$|rqb5;ls%n1yxf=}9;P%S4AmsM;y;-Ut;%@Q1fo{Id zc<>Wr&P<8yaAADQpqr#4!oQWpy72cRXBHAB-%Y{r)9>5g9aI(|X&WLvYyxnbmx`vF zHZ7I>aHM%9OxFyo`N`lglCuv#^ODNUPu2wYxUMD~AmGSNdhz9i%jZ&6*WIG*gC$+; z)(s!i4d#_`HP!=`;@6{uKCBbqz``D|wmFjca}-~~9=*_3slr!&Jz?3Bh}s^H)LFDE z5Z7MF$EcM>fED+y;tEO!0glBh551I+)c!HrRvPe4`shD`3n=TPwT1pEk8&X^IHaol&t{Tubrbmf0CAiB`*-{Scv**B9FdO*vw&-C)=f z#NFSCu<#s$B-CSdG-(tQd*i#)zM4qtxsr6O+)*uK4S=U?wt8KwP+_&Yjcu)`8R;Jz z{YdDHEO-l>4-M&g**IYX%hG>b6c%ECD0;nVUb`j63u;$l)D2q!Qc~5yWnA{dcBn!I z6De&2RY4i-F5*^8-<<|)cIAcw>_KPb#DT+pxt4=Uf@FkWP>9#F9Yh`_XK{KsU(^2l%j!I%IBd{>_<|3A&2IPHKSVN`E$rGEY_}p+_@F zidCF56Avbux*IT&2_g+hO9%|LrZHMDiXC^m&7OclN5txXb;;xv1U)P4tqgtEgj^cd z4~Y;X&L21$&$DK2_RygXfc(RMy;ffupu@FDSAQuV^`c`29LGXWmrI=bInqM6f%6l+ z!Xksd2WO~pGd@hJ9ot`;Wk2v>^@JuaN)e!sMxGD^a6#dRxtR^~$)FP$3I<-4U0Ope zu<$BwUKn6QG1Q4S%QvQeZ;rSO?g1t?x#xkKG-tLA$#PlJH1D$vX{B8iKIBYOhe}@2 z*i(_1VuvC1N=FD29JPt~jwD4ctzD`c4c($?Q__62vLE#(SlY$-5o*rks#00C!kDf< zmO&J;^dTK)$m24NcKx0`#bncu_u%vGQ@ZG z-28>a8R|%0xa)1qxC@Y84!;u%jZDxlx1nGPyT5uY^zl7VMgxlF)qCQk zDNbdA#{*}gSvUC-DbH)O`8?gxvP^jvOVL%1O?wr8gE@lidF5rDxJ<2zP(6`Ywc_PG zCRPLGN%CUOj}sF+JJvV&clW=-TO6Jya_JtdE^b-}OD+Tw#bRtZ_uAS7Bd}8MDzf(v zcH^4A4e1Tz+H$97t_+fd;okiOE$z6@;S^N33jVI>_s3!{I{f#ktTq4CZ@YjE>()ho zN{k^kQrRbfoqC-U6gOEZlI*3o-CU+_-JGcS&3FH8B$#)Bc1Bm(8v{Aszk zH&ef<0VXfXZxLja*L9EsUxg|e_hsUT<<#w=*1WKecQ$%5n$g1*Hw03jqe-;+ za6MD`W_*u;B<9O`s>+wS?-DEHbmLhG8i4-3&d^%`1^Z3>)Phb=Uisf^7MaQQC)^n%R7n7`uvJZL0;I(`UDN zkm2?YzFmGj=rZqjLJb5r0YhwpDD0@=me&I)@oRBuJM_ZmceeJ6?yt!zF`&}ueP=R` z67so7@uJtF%72q7ImQCpBb4(zrQaC&o}r`eW-$;a1T2(hMK-RGz3)3*AR_VF{FV$% zj_coeBDm}~QpZdB*?DDCP;EC6_RG`5a0)?^*V8Wx;akJ~59f|R^qpJ%Q7_laxTue@ z$SsQUM>f-kS==iI4b#&ZQ^uEyuzu(Qk&sXs?2T;|aJ(&6aKi>ekE(jaZ$N_e`^A{S z@AksepXWgwD%6k_?mh~S(znj3V$WX2ud0_QCCO3dEHT$DOcOq~uaA9(3@|{cp$T%; zlkBKLzu3b~C3`q_{d>dI&H@?f7rxeUE+g8IY&s;thlmdEpWcSbq4Rq+cR6HjdXW}e zuTa~oNlj~lJvZO;8`!=}k`4(C z?w0Q-f4T})Xpja{lodjOB=Qz9Q~!8O&vt0)Oz6REDvR8JMK!hyL!w<^*4g$wIh1I8W!nHWX>(=V^d|@X~s> zXCcv>3Mp2DklzemZxk=l1fDm6lVqjSdE_Xaeu9IgVcOUA8;~dzVzvDAuYrh{!XL)> z;pU+cj+X&k3!p`HwjVSkhu#gPp~keMB_Fp%&)b>jMAn8ls0F)~bT#9NPp*Fm;Rx)M zCFXO*>@YZeJHP#0b2B#7I=6pvSJR4Bp7s0-MG5o&@1yg4f?ROxZ=;n z+R>U$fg@b>@bHDUno&MYwYGE#76hnP`u&`YlC~;o)uZqXy1NanuxQ~i8EXNx!hh;( zhoN2aJFgbM0o!VS<;$#N$O!LEEt~?^bt<*#rf8DECQ4`4*H$Dj`HjIZrp`77hH5}& zlB4^TA6EbRXLE~tf?0%DGuj(>U)f_Wo#YQhci;DeX%(~aa6h|zZGa7$V=|jC421J=0GQ%bg*(%RvZ^C?@x+)}70QK6-4;b8#(t3a8ewN9)U^i%sHkc4%wkv|zkC z@x=RhOUJGfy@JsCRfUwd`8_xC-ttD1b5Bg3;UBW2TS}G=nXr1x|4c`dCtBr!I6S6M3&@@7Jq z9{l~@k;JGXSD~^Oyyy0(cO6P;wNhkuB(m)~Wu$v4ou6JO1KH2mrgq~1-8eub3JTn1 zLT0ab;$H1wKnqwfqC(fd05-(slQ~y?lczlia1XQ;`e5IJ{M@aT zq@n!5NUvf=JOklls5Gj&xY>bEjUR?^lU|AS|W6wulJ`m$R9Zx6z>s z_mpguiHp{uAz-ygBm?OV{awRCfEPtKvFrxJ4s2JxIY$afdMDaY?k>MVu8v?7O~?Uf zf(zO7pz#4juh-9^FZh@|J~_Y zJ1^XcKFs`I(YG??F{>P6b6PClJ^*CFsj#j-BUXd6^n}#?^e#UK!T>xJjqB}WCe2#(rXrN2>!z0_&b03{ zGo>Xm8M85BR{p&?p)~#nxohII0!|eUd;<}DR+13vF6@$)S2K<=%*-gO#q(obKYJV# z$y;MjJxg&1h@tCHFgpnX8@OvQ$n+MOv+4o5y_bcS$3dwOq3T1N#EjC&N*IeQL^Mz3 z%&VInj>D=Aiz|-@4iB3uM9oh_j47r6$Y9KyS-F#$RAARo%KP89HlXk|7~u*}Jn`N3 zAC*S@0u&%m`zw6_i4J7TVWx?j26H44$pr#x!5v&)X-LTeaUy08&4HEC?{2H_e zi)W6-MLfK76~vn43!;8)jyiavX`T^4!e&!7APe7|D%u}f+Jn4O500x2d}A+;k2uhy z9mnxta?>Hsj&C4`-!|o}Y2Cea2}df#5o!*qj2EMgNx(+#O!Mt2vfn-m!U;J!+Tuy$ z;f8ysMVf0@GBIr(R?vYgABHVofK9R;I)bEg$Xj%uDWRU>EA8o4M)Y^yx!caVQ9h6@ zm?|_u%f);5nGuOHG{McKqElh+N0qyZf)AOX2OuAEO-JBsGY+lWR+jAN9O5o^KAzVR zcJ?O0&cicHf=x%oZsg zX}`9+G5y{YP_N*78Ff_L5E1$t!Z8K3n=bv4Yy~e|eTy7npgu$qF(N%j$na@=Roh5s z`pyMeh~>Mh_q(erzUrP)e}utV4}kFkjN#HA~B(4omnN!ODv zEFoGePB%h*hIJ>EEW&ibg@o0qr-|K=si0d9s6l=Dd}u-lAN9v!fBMQT7RB!Uyz`~F zh$D#^CON??Dqcl#d@n^Z)+m_on;y>M_{;5Ik{^}~*7Fh3N9VH#sa~5S2b|!Z`G9Fp zdO5$&^+b*%AUM@LHcU$|VBmZprS0iFTO!@cskdx8oKnk>K-Jju>f6_l<$6o0)!S91 zN+D$>9(=3zV6*!6V(z>Xye4FgWd%)sIx%FVdOpi{Oa}H`Khdrq=#cQ_3wdc8sUG7! z!2^e4x^}!2Yzi1u=hf{A$c0q5`hYr&$_Mx86$%0Ln;mBn07f-f$q>swc7hU2;)G&w4O_dV*AR%H*8I1a-aY!XN4iVPV*`GSPfg@9fbAQFBoI~*^C zUk5`;pExEsOu-%~25b7eI$Jgzsn%bm3Aaev{GnDy`bZ1uHChZC>HvA$TDFHmV?X=X zqpWoy4f%LdCXt^{aa%syry(ee6R<2HC8jbiZZj|EhXAQ=8A z9@<`2t*^k&x6hQ#&al6)rgor3;xS*lyLM4>ln=rl3od1?i{4Sh@)CB1f>16wrHzw`f8wj7b{d-cqLE9&Q}!JFij#>HWbqf=D}Gk?kV5v3Z>i8h zvU67wVNrA=@&KSG;p&x8OX>x47`eHkEx>X$0J`zth9#6WVl4P8IuSG+h=&FD-~Ze0 zQ%(OLKJs6gJpO-??xm?Hf6$CS4uvc)_T|IBuphme#$jIM8uP!GE(~zv{!b?qSu~)d z{qJzhNpscz(cl9!_%OfN$v;y8Bq;vfe?`(|L-5I#usiaMh4nA z(oVEQ?Ujw^ErHGs=UqE16tmwm5s$C94f^pY0KHPXWcCCyQ?{W#uZDE9CBrhJ7f z_l~b*=T>~0hINB^(|ZF2m6_eEW`hC_g{5nG-S&xB#{J5s->ZBWG`|;}B zf;ElK&D8&NTA(vR&+T__)JL*^+g9vc%eSX1W)YuA;K<4U4lko)2rhnV>(NcLORi7$ z=4>2^OSic_ts0cOyK7;(!EjvbCz{9pXM4slejPcAYd3%TBjUAv7yvnXcTGV_nKQFb zn5UCZ%y!UQ*Ek=v;q$a{@Hu@n{BYWu4XoV0kX>sT+USD*K^L1Cb4vZOQ>!h|pCWjHf&1@~d9yP)Ez_jJ zQggQt1SSlh&k;!Ojnsba-wI))*!D9Bjz^z^1^xK;B@|HGrmuKP zN=l}`ifZpi|Lxldx>ORi5Dq{d|1H}StV*kUA9CILw+OBCwnTWdUaoOwDZgjKCQ9c* zmk<=WWF*X^>fsxsTIS_)187taSZe@1OwRlUKHG!-{{0kMQ%wMnC-sr=z5MltnTRx+ zW`8`lV>sgU1L^GOFj6610Yz@V=Jq{3o^`t2}3~j%i|2ZKF0&u;3{tCO4xb|5c6;v}`q|qV~1_Z_~2W9H` zamjisY4@r~q8&U^_G`dWuy13-Iue;Bv|szAYur4Bz6KT)+S^Q61H~(L=n=5lzkf67 zk_w;Oy_76UC(;3tYy|cUC`*Jb%iIl*DF9OwR4U^)x&S&NfmE<$+_B>GwQSRI%ip+S z=YwlmqtuI5+Ic{#Sd+-T;Yay^_C)&T@uL&XSAT>LG)aC!+cE^65hs{hv%G7ES;Gv%E^}M8xHt!vP52Dx}_1$dy#@59I0jR|~WL za)on6!8XSF8bUw5cVqcFl=7v|8P!h z#|duCD@WIb#1@r>^^0fz%jyS#9G{cn;MIWRnsJ_g!Rl%!Bi#R$HPSyGQWa7|VN zW5j=|hW#PbvmnycX1)KF z$abjiN!;IFcdlRmz#IZZa44<5`3>z2weaFgYXqPGl7hhvPdlc*K)Dt%h+mqiN>1Ni z3hA12huBJB&hU4wuzfx>j!=@d_Wro4tPtQk(1ECbSAk_gXJg`i*CX0kIFEHzo9U?0 zv$**VU{ydJtB-L6K%f5mNptY-z2MEKoc2z;e?1aG3i^JYnfo346}fi}hdt*HF`96) z{eZu)p=*EpqW6U}aN<|qdAN|20cZP*qYC9pc2Co4OTzu&m|(w47ul7QOr@!d zXy0B_s37RA{!4bu2-lL%hMijgQ{#fO`1GvIqFO0FCVtSfkQitvicw{j*K4vSh#Qab z=h(R7ij_~A{R9Hl!tX83kh(sOqg4s|4JmIhaR39aWCy~z-ZGq|>N0HfUjThM{Ci?! zppytBqz!+oi)e|Tay0tJ^8CiUg%|-Vsv$B1gV9TwZc#9Uj8fy|pf+J9!f&*<{Vhhl zZ>EZDFTL@u(6|GByun(e5y9uW>+i}oby+qhq!t+j3Xm6^r$K!HSbyGMIV2h{XE`2J z%wsg$sdnvz19U=tb1ZEL+T=(v#xp5y3yTi&NZWp6G!JKPtQ_mDQ)pEJex_?Uoug4Q zabnJT&1=Cq!$j47l98tFJ}@WAB9cNMuHq&965sn)2mp#duTRFAV6#T)6Dtm2Jpq$> zYapt&q<7!(f+{zeHb86U%P4Dui~}86G4jdB*x~6ng^lQIS&ri25;G-GP95wj`N1pO z!?e&Zq6jQDyRFiIkZInZjeHS4o^N*9Y6-Idvr~KEgRP64e9}P&r97p7mh@dmO2?Di zqoLIQ7{AU|nnZ8-$6M@{1;k*sqp4VVoy_vqq2wA~Z;qD(F$nk<`^!7y2PZVgrqco5 zL!3Yz0g8`wT+;8x@VQVK!Q9Md)64+O!3sG2jff$@vj+-CK;U8-zak>oFnXAz=hyd{ z>3%SB@bT9Z$7mc}vZIEyuc4&LjGm+?ZMRn~I-0U^>7dJ}{Q>UZdYX|m`%FvInZ+#I za*R!eK4>WBNv~+M_&8A^y+VPVCSo45NWC@CQ4hT6{Q6@VO6#b)!)^&pOJAK0G-9v1 zD}x5h^g*V~BB!&c?%8=omc4Xeexu#D9$(?%(TH713Pc4B*gTA3Z_X)03Drj1Wni-B zH>3y73;P@p@pK%D8SPjj?c?ek%KeN(_a_OK{i+%n&RlTD{r~`%qV)bPFw$a0Ed*&r zJj@+vBCQ5I4UlGCdhl7g%cPi@^v0zf!KB5(DnVR2UvP{%@gqGj2$=hqGP0r?3jWnO z>G!i~M}S8|36cKu^U#9V4KkOpw`c2n+R4-O2_oX4hk^K-^~F#C%Jg7Z`I(B<*v!2b zTo7o~D+$!XPi{ai1{tz268SDpahVQl_MZT3Qu1c({I3=O=tMgG1MzVoc@x-~9W#wq z((GpDcBito>A<^=8H%D1#yi7ztD<*|HEwLvE<>U(CtpNiqu}|o;Rjvux<~!iCF_sx z^d5w0bEM1}4Q=EF66*QLk~TXqLu(=3`IY6(@@WnB=YaPIx-F^U)q?@zvtCX-1)z?K znY%gaD^1h>uP01uAjXgS4&_(=VzO`n+VzckMIy&2kWU*^-rvlM=pL!0*iC)e12Hy3 z9YZA>HFlf716sB$vg+^{xH(e2`^V?mSR251a-b*inhxL99^TnEQcvEuP^~KbLx&R1 zjZ>8#d3e>M&7$>5A`sTuzE|!P*r|s}171=w4>`UKK6|n!ASKty#vNNb0xcl8h`cjIpG}&KMaE==_!D^FUV5p{FT5q_!c;~OkSLA$71OGzDKG7IbUgHa$G#b{w z$ock_7yGW?I}^xcA8yz1Ea4&=PFx93$XppMor{x`i0p^X?Di)=nxD*(@@!P=0toXa zT48x^>3+|mvYZPkC}yJkqf&)tnI29@Y3NZha4@tEN`(p^>kIbFRaodk=K#5M4ae8(S?F2CUXide#%v*&W$3fw5D4@}?T>AND^G~fnqTK1Z zW;e==AYDN%cS2!u2Ka{wv0r@}=YLv#pbq=(L+3}bZe@`;rgaOXi+}|YKZv9M*l&vg ztTy1v*T0;eQjU){_XPr*)io1hW)i7J(OZPQEZPtB3aiIJJ>L z4O^E_HAlhb>Ecwu_tH!&nU@)Ag6$sR>G% z*u|@UT8)n`q&0J{k)9n@SR4SFh(}h%x(&USd{_r9Y|~eMUg)s?NCA8FfJ`%lOilcE zKW+SpLfS|(2cm;d3I#RV3#|#M4VIZeJ!e{a9oSl5~ny@9{JPRT?%T&~VAbjP+ z47`d%cvAk2d*R*&)$m@*dLxwWdl!%0dE(cDXY>W~HCF+=!ftXoTxwfqsfTdbrEJaoq}3*zqi1Z z!TAa-D9dw#7EgB)!0{>=LPQ6a(rxhI%36+~c$7jK%BESIxY|>HH|)v7M0gFztFAc3 zjm*zZUk_6ExnYJ2$BI&W?u<( zL@HSBr64&lhH|nFe-Z11P{&2V;4Kq?J}s(CDf!y}Slch5CZ zk+ZzG`jftkBiVxg*hK$(4ndM7C}W>rr(yML6G}D$G6nS?i~5Ujp3h*@P>#~7yw{fu zmZ8YMUf!qc5+z(Qa5`Uk`54Vy3iw2-H2d$Sg5_cAr|fv zeg?bwTh~9R^kw}egxBZWy}mF(o6|0F;~!O>Oa9c~mJ+fNR}_YYlr-|eHQxl}-<={^ zq8txyCTXp+!CG8}=ZcBFLxr!~Jt!^netzXg$b|3;Bg^P~9)mvt4GQ{;3PYSP2foyL zL5IHC^0RiHQW={OJhgjToDQ{h482$IRnd0;EnEFw`tG;60x2Ds8<#Qm()9Jd*T5Jg z+q(K_#ku##^J7ZiH3oA(49Pd`c4ky@2L1hrzdWzMh-5DTbLJTC=jB`x}v%AJ?O6! z-xuCXl1@jT=AXm!PUjc*g%3;^FE(02v91RMZ_)?wz0nVtpO_{hQx%d9J{A6~*VgTJ z;(}ozLB?*KciCzam?b8IeMqvBP^%T=L!IM*XakZ0Z`NSy)Y`na5Cmc_=k8Nt$7LB( z-r~hOOGo=cJl*;Egg3CrfjA_LHB@u<65*`1v|R&=2oeBE;Gl0Je>Jxu5uHMIukU`o zQG#%~3ciwRi8vUuLK#+r|S25FVBI?4L>1;q`qPp-Jo>g8$#Ou~}y-CsYa=^AmDGPICiWXWPlXqoiZ zYldoM<{Q|1B_x(Wu>0dXOdx;%i(UN=b>ps!TeNZ2e#FB%H*yqi4&1LX>7lt-GxTBG z>$ClanQ@3yP^6?RFmRUS+niM*wC7yZk^WrO$vlH%;6Cbjz0+#Fl?KlKiOFof<^a#u z$8<-KcbI%EfZ|V@DUGaOnIXjhhn9S6?fwhb7=%04H9+^^tm4Jwe?QuD`K2pTmiHR` zB1~ELJCsdbT13s^F4tOn)ER;Po%hDKVX`EtICO0BbIe0yVqM*a7;sx7Uu+1U9NE`Okf(s{loBep$} zqUkK=Dt#(--Q-m$7*j_)E zPlDFfNE^XNQhH_859_S%dPfZiwtbUzL0(jHZZb3}=xWvJ9jnmHF?Cus0SedNF{yRj zaf@;}@g`GO@2j9xVEU3-NV7<}xoo7WI%&=QP$rI&^~;#aP=iA%WuV@R7caJ$#p;EJ zQ?6vM;Qn%6nsK;G^!38<0%5h)Aq8uJvlF%~Eml~#>9wxUZT3QNlD~+ja1V1(m$CBU zt9WkF@S6-hp7&}zwaHy=AI>daLwv`&k`?E^^A|FKVmoL&0bR=bIWy0+#-JukwFa5Z zZ%*F^tTTbOn9*8y1#XzGH02=^4=Kc&ZrbVHly~bE7C6-_^fUU{c;}77I4i?eW_)}+ zxdqeMm{jCdaP~N}OmG@J=#a7G9uc|u=6tc=F|$QXIab^mK%s+jl~bJ?j8qhtUD#pa zmk#WQjB5T0J!W@Ws#s#ZTM&9R0e&J-UdAORq%*=arQPWxAKI$;0vc`bIW;~^`gmqGG|eaI!F`(zml#1&vI^&Ti=KB83q$Amxu?#* z+9{}aO-WnLN6zZHh4Y>sdsbejPVs&?4||+9Jpf|ENl$$3BD#XFwcHI~fSae$wrf5e z2{|D?y4LW%fwau#Ep|o;7a(=m2QXI94ucn`k!jx`K_kuK4JjuvJ0fP!C)AM-n%aza zsr(};7=r5hPIc*)5-N+5H>4s(NigSq%{IkmQC0dZt41ELkqS2kJhmVQ7eD$^lpZ`= zgoW<Pv&W!eRjQ z!E3Kp+Q8cZ)vaBT_X4X5Jdfw(?Ks_`X07)Qhl4LhJV02NiuSuQ4Bq|4JX+3(%F3KBLK`a_N($D6~_kylbEI9&Mh!;~vK4K+2x!?OeTEdH>EwuB3x zqDgp`UV)mH-?Hh63k^JBqV30p721$ye;*1OkmNtoCg|#2bS}bJ+8hKLz_EIoGA&@% z-?T1uj=L-&uiq(n2R7*%nLNKnXlVjAxC}MPrxycT-kyY^jIn{%KX)?%q_S5h-gbxt zX4_A7KGr&%kqei-fW*l07kiW+3Thj|rFnX*l2*pUIF(j|eoGQ{f^=P{(WPC*vmyG^;`}nL)vKP zVfAWvnIGZK0;Ayy@Z&_8-_%t2n})8X-QY8ERz}s;=1%X69&&q4_U`eRuyuEu?I0bB zCo~0~<(|XOeHDRdPXMB3ONXZ%gwr|^V*&Uy_?t2vEP?&Ko?ev5u@ybF)w@}~wA_Rp zQa)4_l7F!sxD@zGz=_xZnHITjJ{OPWLQAz2-&|a8~nowc6!me9kMo#i(ZS#x*l$hVl$DXy% zvMclRGU>xu77ge)3Y+>Q!M8w)kpH3o)z^Zn0tgQP^u5xBORPW~vh=dwqTB>`@=`<< zbe{9zoa@#Ajhf~qak-*v=|!^d9nY%o*E1T`cZ`P7W3n@IH#0o=8y>+@gt%}ldD(E`=g8E=SV}U6AT6Q2 zOAWoGoUplKOOTanHFVmQ;X$SOxnfg%4Ege1I*(?L8}V)p$+zGE zt*cpyM`nVz)s+DD`H(5T5fP z{_76K$>*SlB)Pe?i&PRnmVeyqvUsCYX(EGpU=DLbH6dkw54j%HrwQ}0ne7m@S;#I} zwnv$Gi3Qhbd712fb3zt{%4j+a`NrsWIBTV z6&@WF99keNm*lCi>+%pMCpp$sxTp>$9WIyXc{YKma6%6E1f+x%wNJO!@yfS?0U*D% zK_}MK;mwPKK|tu-&+xjdCf?-byOX;lo^wbaQ7S}iUN zg*5sdYh8yVULWPUn5CcDoIr==Tbg*~=bFsq-EfRK=C~!Z0p{|cV!*k}nlYUsotnlE zVC(^E%@@KCle5r`cp`-DMTceuqkd<7j2nQ%H{f!^3Zj$!?-j4W^V&IfC&ftF;$vpi zyYn>$o!X#ouDbKo!Rbi*I{2rh2uw$RljF0dD!Rmi|WzWMb9UL&at<~Vu3XSMQ5UTdVrmZE*o5IT2PhnPgO}Sn}8Om@m$z4 zH>Dh6;WG3Wfj@0q?02jdN)Gi_vTg^0uC6(=-~}n*M7vKx!>*P zUWD)3zT_-B!5)Vd6*?FcLYi}TyYf+z=8=~;@b)l3J7Rb-q;toQ#j{i2sK>Pcm4>p% z9Lipchg#N#7NuIq_>JjzpPa3Qx9vUBflbG0ZG)PSmPJUjgfqsF;{{R(>~nt98N7EA zQgFg-$ud93;gsXnPa5CtJ@jw1bo$xJE@ireqwLe({(%B64|94P~OTH^fKsto2pLi zM4e}Zo8-gYQ77!^s92(JCvk{!nTS-TUbdKzV~nA zVHkz7CT#Q%d|UNAs+Rhzqf@^q5Z8^ybMh1g>pq3z>A{tdeS4XcT)UCQE>Hl*Bt^N3 z4FnkW{&odU%K1a3~Cb!xE^c;=RUOZXO<| z;CwWVj+!r>KDvPn< z6uMS0Cep$R4T_k?MGjY*ZuYi(IIPau2+KESJ~jfE{0_PR2_l><+?QA!g;cro# z90j(=YZN@~FZ&3ya#@;3zx0Zwo*}X=*D@VA+m_Jl>ZaPbtC!p3NEiaMO0tlx{LIW@ z`>kf)kv#D|gk$`-<4d=)R``$!owi>q-J&5Q_#kNanzwzMYi)`}9WJ+J-Ku0aT<8HU?Mc5Vf_e1y?3TX`=Gc8)~^>Y&}0#wI*z0 zTR}@aEA>*}n>QTMMNc7g1CQ_GhWsN{%0wc1x{G1I897~sssTddQt*Mz`bmDb|?rL5U`P3rkxHqEONe8(w*doC`+CNt8L{yu+PC9>r;36uix#LuR$HjUU zdmSV;xT-WoXs5LI{+0+pjF8OJ6-GX~AGA+z(DpXU2e@}i`&FkPQ*nuSGfPiGv3dN) zbuPMwJX5sz9jUz~l~@mrLpLneFEJy2VwXWd?xTsHyZ4-OvY=QW!&*kK&&KAO z@}|Jc_&o>0iF`+qi?=o2MhxQ37;i*m#-dZ===t){m`iJeKVXf+2RN1h1T@QXeJk1n4@KrhRd# z#e28{NyStjC=>S%-lEwrm5U%cd|pcjp;nNQl4Y2~Q73BZ^iSaB=<3P_U+=J_kki%r zF?3R_@FYoWi1oaXRQvsu@EgU=eNMyHx4Z-CS!4T2s_2Q+7^@y+mAl|>#oJ0HVh#sj zQUVh0>#;ND^0^i2`#f$ji@iOBK{4dRIXNpK^P1mpiTo@nqh4k^wWWWyZ$lXtJw9JH zFC$q-uH$rakIaE-*}Fa^)bSQoZv55(fq7>#$;o*|%bH3JP@-Y8wV{U4ywjqf$QB*> zHaJcODO%_-W?%hDsa#<4dsa(ShYD(ta3<7luuXDB>$2?j?yi(_xwq=(BOwWAHlHtL z#y2FpFsz;>Ucj2o_+F4o|A)BTSvIRjow7j;x%R->`2A_Zxnr8=EgxT2-ghR-OS-X7 z`^Y-~=A^tzkJHTJXfejFfxAqVUH}@yd$Mo_?tVa_yUGqjMhfe&;rpVy&Q$ysG{%?Z(8ni{GDq_h z_YQZb9(8^^833=zF^-(L(QlPSNm-YOFxbXq4j8%H94JsQh+Rq-Kmkzz;=+Z!OKbS5K5A9O}S zs1%5ge~fV(&6$(k`3*6CY8A^do1G-z<`IvsYd_9^4&K2>&p-vs)T;A9W`Kp}luiRM z_R&EdNm6Xxwq!<++|R6VHv>%Di=Z6J#RUoOWRG;iq`mAy z(s|^|!HeZkM=ig`Z$}D_EB2noNUQ85)#&Zru&1 zdIwBdV1B;~ooh>9FW%Xt>9UHQJX>=(DWjwHKgdzKOu0RNxb(hpV$vTeT2LqX0Z^=a zG%WO9XYj*c3S$&W343%`MF*1182#kxgfhOt=l9?O65l^UrSJ)+(%;_~?j1UR-t^$E zV)JeY4d%$Dx+1B9$DXO|e*aHOiX8W~wL9e}Q6rD+Rww&Lybj~gHy^w9wd9U(p|t*^ z9XSasboKT=q~dJt6)O!xhZRf2CudA?A^#Wt?hRt#aEc2o#INjV>s z0AtwHzrR#o)I#daRD1XqWm%E!o1-<%uBVl z#w-sO+LLRK=l;X#2PL+}F$Ppw^%3JrnyV*&;o}^h;9oY57IqK8EACr0+aLz}hstt# zDg;fTsBJ41<|Fx^_$yYhOrw-YcJH9XzOSL2=b)+ui_?Qm zgsq_eQq>J6pHvof z4uII1oUzkVXJ`*^ShmozomfglIIRLsBsf=)03vkmOJ9T&jjc=04tDIz}hF?`7Y}{`Nzx z{YmLzo&TFVMoiOA#EP!P3QV;DLTQTYU#{%QCNJc}?be{mZGLEVRa^^@QfKgN#FJ9c z<@4E4VfE@O?L$?Nce^y4B+3A-yrQot&iHb}qk0|~yL-)QF?-Jh6*@0P+jBK&wP}sL zHfid%P?|+T8)_TgPa^}V>sJKHFcze*51ivbTJ8#4@e>-%lOYyXaZ~E-sOxrP-sILF z?PfYRvgnJE#(T>0`6qS~MmuOA+C>axo1Ygka#;OuEkIuji-G%+)!_WMST8fR?wjYo zOqs_AD2!6sucIt`et-4iQ|EaqU#=)kVQ~fa357&&DMDid5Gj2>yakPv9^{v<*nEh2 zU9x^xmT27R#q3|1QWZHT>O%#dwT{?45+1TUd~6}zXJ>(HrW`F8E_Zq$0F{I>P# z2}bfyWj1*$E*tCVzojt`LypnU5P@LYc2CF*hRmUBb-ed!g|^=uv6FR@NL+<2Re)*%U}{%DSg=0(rSyMaXwg^KuVap8%Q; zt^uFoppjQhOSFQ6441TvelIh($Et@f=s%YgGz4*qO%+|4sYOi3p;$EVj-);)c$FH1 z(5{3be5Z0oy_e%Y1{P2$s%gC*5a)jwLHFxhnT+Oqv!C_{BC@qbqiKg|z+og^S_*bG ziV0LwbXhbQ%v5B*oJeWtcb-wiB`&~~7*%drYTHA!7`Rb=7<%#U-K`SRAdM}Djo}qg zGbe#wJAC!C=NP$!$VafU!_2|R@N=g7o!#i9#N@7;h)r*_BHlCN;9;1@iHOUC=^DR$ zxmQw{2Mwd)7XKi1x2~vP=_9~)_Tk#$6LX6#m+IFBhQ3EXp5e7eMP;SEk_iKwO0YxM zpEP>4x7SxO^p%2w8v5Ycg&A|U;$1X+&V+a?DCfQS8-0;|G?eYVqjA2KB)oMZcT@m> zs}Z{D22Oi}1&SQN*i4t6(0vegp{#1@{1(xg+|nYk>}9mM*pn62Y50mzX!2Q4>v5lf zT*lY1ye_4iI@8Ap?K=oFuW$m*E|NHYuvY{BzR8KPxCe}TewtsF$MlzlQ(Ll$ei)6F zEuY^>m(|YMeG!xm-2^NSHhrk|`rc%DH4%Hd33~B6PS4y(>2S1fcpAg&6Vm%?$&aHN zfAP>#0gH5wd-BbdK+VZFrL`UYjZPcSs?)7^8Vk;O@qxpGZ&x|B_t8bZ z<`WO#(*S1LySVoJ-(0fB-S*YGpe6HrS{)3-UM-jVZ(4$=)|Ow7RA!e-&rj~TQu;6b zf)%o6^EN(434d-xzRDA24fBz%ZCz0*YHFS`Z_K(X&I+q@BFmKwt;4{I@T$;G-Yr*V zoM{uRwGcs8A(UM=eJF-Bc{bH3?762ASl=PUvRJUSnyz~BNr>OxX~|{&^e+eT8$R>C zhl87C2kWQrEN>Qg+Aetoo|Qa1$)_(YT#}l~Xl;2h9x52Z&{w3&a|hkPSS)ozvWAUV z#}oGZFNbxQZo2jFcYc6-9TWa#5Y4zo0Yf>)ow&M;+3G8cX7qrI%%aRHjf#bPW(H!z zD!K537Ki5;D}^8Hn-rz(Egg&aefOKk1No1!J~nHMzd*Yg9&hUj72d-hr1vnPW<_&y zi4Q_DzhY#v^HcKf!$b(vCgl}#t+&RbwHHH8B!{#L-paEsGqK*84vx&*IplhsgIwro z3&o*djHEgqHU*4fZmDpE7V}?*7&{m^aN!N8g!}vmUTg9!)u&fhzHDgp>##Dx-*;Ua zw8aXu`4O6!nE~Xw0&4)V2B1}PIvn%$wc93&i~1%eoRpl+GkjN`D%L`dMxd(#iP0Ic zo%M6GcKkGDw^d5$ZW+lA{y`x-O;EpfzQVYNcrrLU_hN`pzv8xqm~i5Kv#i)#31~*i zx4KKHk?@x3m-EHJe3du4*Nu1Ft!I~l^qR_(ci^g8k zA6&r6dQ_^m@x|KSH2tf00J~1Ps zv$JY!878DK!pOloVdw{9h1+=`@H#+n{IA5mN{k2isiHAKd_6rr!uxY z;f@JbNLe*#yo4QBcRv0Il(bBAc-X`i@{!Q1b;^gfEW73wyXVG0^`!LE$ZgXD27`HD zcYAXH0V2=!>`?X%Cc*Yhr5pX?XImck*-bFyTwamE$x!inX+_CK{%C_;!5ubx$LNl z)67_qRkODB_oJG&7Tmqz-FEww>M|^7;+}n!!=!80LGIYzSd4v_Gy%rL#6A*zui0y+ zJ_sPs((XWSG(;LWnLl_5;Z6(Cpv~QVWu>3&r^8}ykyP5Ln(RYV-Gjc7;smvY%&UXG zkQ%GpEconVMG?TQGcXircxa+zsPsUePKu}PEAjr|sIhk2sBv?y$>_Rdge^4uQFh@Y zPSFf7xT@sZz|}HPPg1Us%P(B;3b=NTwk?q6lA=q2KaFL<@GJJ6922p?;<=hVQ)a9%@AYM< zwyidSC>iX^c1~nX5V%`hWVfmo zOXl?bTXC2omFTq!IoZ`JI!^KxU}NQ|o7*-!Hu11}5lvT+3b*S7J@2YeW=C+4@kmZu zt1sA!XpUzimQ;5mBtVw;oJ=J?CN1dH2U#&pa}rgk`@ZTyk~&&!hRKIBA4BIuoQLsO zKvOgRJnyx*Y1S2`YOrsskbD)yrJn``q-;BM14zCGrBX2ag+Ea*%C0z~ z;LOd_ay05)P8ol+B-`|}JFHGP(x7=S;J0@CwuhAz`iSh22x9bPGkWg*kV#8E(TjI+O zZl^~IwCJ0*M{{N9VF%(j^dAYqk0adkr;CrFdZS>5#O=>g+Qt5xo*r=ja1J}a1QXoP zKxW}PO_{+^nHG8X;XU~)O1}9!LG>7;1&iU{PH*C`y5_v4Aniw+2`@HW%4=q|%vAA7 zR88wiyUuxRl@LJ#=U2ZyMpxMK#N~5Yy9`Z-G^vdYz0tO+oS$&Z%|89J#~XBC_3MWU z0^@anFBp)51(un-#eeK1IIbeND`!`-%vN|d=$Otdso~@#-#wq@ae24GD)3%(s6^xj zqq(4Be*u%e7Wy3?EXcMKNooi=NL5oKO|oc;CKSvDZq<@LTrUemYd6^t1H>HLX?JQ zl%keW2~-x1+nedl^hlZ^AL~P*q(?Cr=As0<;e7Y7`N{d|I3G`*PtF&iXFXVo2G+-R z$mkx>S6$5<2kz61P&)=M)-o98R#&f+wM@uM(V*ci_qIM64-A+vn9W zcWH{01>7E$_uSBPetdmBmyc&@Sm{~KNtv%7Fh37I*Ld>we&(Tc-)_~RX8g&5h6EXyO!20fc(rMH*)v;RQ*&$4t&Zu<%q^y!<6t>#%c-F0+DxD`CgA*nb?rb% zt&QXZu5|Uol;U!xPX6GY3VV6d<%eod==q||{^wtNF;cs6mS$F}6Z=3N3k^}iD_X7e zx&pttq8c7;&fW$pd17V!YA?15L?W-wBN+{MML8G*a!7+q4kZiWVQ9s3|j0)>JvG3)6 zW?SOGt30$l)3wAJ>^H~lG2=PLiPl9fHUC7|X%}1JuElj5X@|W6e;(Xjc3S9wUtdcM4wEE-Flgu6m~9vExi^vipqF+#=E#k3hi zgo8C2OIf)&t;~3uKz{RoBAD*hL)ffDn+V@d5yS_Ge@4U<4i8CgpyZJ3*ZI$Q(ZDmg z^ZHvdmQ3{z)z}_7ZnQyYAAP$pK8SI!=&iSY+%Rc9YC)scK7a2qKGFUjR&vt(QDc<8 zUTx=Za2$8lal~7aZj-S)Mr&pRHe24MK=-W)p)%j6f&VVaVLOK-xR~f+-oeU3eu?i8 zD9B4#chdch8Z|Ykxc9&lNGNVH7}rA|3&*iU>(@~k-~F4c_@7|nTKIe}D#_T+DYl6mrtK$^r1;+-VK^#aaJ<409bE{?4F~oCs^I_8^?&p|c?!Xyv_#VSnsl*^RRKwn z4LOtN74JV7aEQ?3S1=CcDI)8MgO9HVE3n1FuQb*x_0G?d3A1kfyHD!Iw{Fasat($o zV4@A@2b&in1J_PG4=Zgn|J^Dvq>rNN7JUfM@}tlRB-1rd*)E#R~uX zjkF|r_~w6iI3oQ#`A-c0pCMvj;qzahv3%kw@BbiyA0ugBLH`B7&@dzQ=-(+yS&YN~ zZo_^=?|%T3o47LLt4;nh(jRF2|8$W_*pJ|OhP<PnyC z_mhauyo5FR9}st&El-(g(my$%J^|L|7sedul79s*lq%a1vn&7e2=Ei0%;inbIVLyBUjY$l*Pb`>-It}6n_=d zw_43#BfPW|EiSCKo|Ov|>jp(bu^*tA9(7AAMg@;L!DI)8zga>}=eOHtj-3;4V_j$3 z34{7UaS%i5@+Av?BNz3hiEnmm1dkGvk1~>(7TSC`nX-pH+m(=*W1=`;s3k|CH za=LT&uRF^p_&t#0D`;M`VRKY*!5o(=h2mf2vd!y{}MJ zRW)#pGZ*#Q#Nctu?M)9m3!vMWU%I5xryD2f%d+q@5OQNM~th6NkkOvQ^;o~Iy zTz?;>l5X!AbAH51Oko+8ecO!L3;7v3dQRSh%PzkdLah_aM2?VAO`@pU&Ebc-XX~im z+p^@1XNo&O#0CDKx-MU*Q0`McvPPyFZ+Qkez94)unHXFd2L6DyK`Z~t*Z$Ww+wTD z)TTYi?kTbr@3mjvQ0zm*Bz1Z|g0PC#+!ce!-a45_=5DjKxG>T(xX9EUm$+PB|FECE z5&{1hzU^}8t4M73_-jZgFJvD35_} zF$YXWr2y{AUc;ay8l2_mDa`h*`&A?!5zI7r!em^B1v%^%>lUOYj4=anKz|2Z@uA%! z*Nv-FcD~iNbPXZKao0MlNfo5r4b!*1hdV@BjXs-P;pbO^!RHsIh$|Ojk)|ZPGmS6Q zJVayh1v2pfMSP_uy`-mkuO#R+OYnAYT{>6}+mNUtgy|p)`JD>b#Abm0+ViH^c$UJl z{p+Kl019v|--tNp#}Nsgl2X>R6*xI9hPWhTC@m$!Ue^hBXPKB4xaVVeDQjYUYL}$% zEspbv+QWdWqE)vIdjoPF9#IX;iQ$~jcX}})#u}@i|6|dAqQuI*=oMLX|KmSLUd3@t6OfSH%bW(FhR?^SS$W!z=cB4 z_3yhf7n84PLzut<48_kj^w~~60mwu$-5>J~6s-W1o@Dcfy}Ll6ZrGjgaAlkOPD&(Q zfEj|Q&0v=#e-yyD0Av4PGE&rOS^3Qr%?uDlHs@s~Sr`coSTV`dbzv`PfknrkOdW5E}O1h5%U|kIVyZZ#ppxyVf~gH7%CL_YwcOv z^<|_Bd%mJ#Dh1P64*u<+fLS8*qtDg(ab&939Gyvasy$vao3tRk%mb%x!xS%wQkP}M z6al!d@y~AbgloRlM%Qn%T_cCIuq9K66?{kH4nPJj~l8=^Eo*vXgMX1yGw zrP)gwPxq%v%`dtIY~#9vG7UW?#Lu{VkbxBC_x@uj7|5}A{SIawH){fWBwt@aon_??>6bjhNp{l+V&U3A!F5`ve>@Roqe%Sig&G1ushCUK*=2*w^O=*x2QczG`(} z-~>a)QNv8K{zulmT{XVHe@$HzH*5~4isEmMoD62AF^*uozWxs24~vYrubK#|2f!Sp z0$W0@I)p!y;IDGkmpk_We*QjcV4>b|;E7q5JL#?Ku!sRKo3UP%RX6fAUtS|bWCKcp zL#y|oX1wI;XY@jLQ*@^zM? zOC}8mq5lFl;ZB7t(kfms9IecQ%Weo!jSNt9;xZ8A%hKGdZkES`Lh)dvgtqOeW z_fk(dh5cmZlO6cBu%cjU<=9x{ij&tL8O*0D+;X1EDcMz2yP(Ar!5d!_fBh*--gh^3 z{)_|Mv1!VR*3sO2OV>*1jXdLaZkwEJ(?eEQ=mc-RA;@hNbvS+wI+V!jmDCHb)c_2( zG*cvr9&V1^5gaM8K2{SweS~??R_m~AyE&cN9G4sc=_%%c^4l+a z+U4@-a~|U2s>67Hi1N9d1wP4Dp9~NTh+zKP82v=XhRCt%=opz=oVaD(!+oCq{UcU5 zz5lO@@RtzH<#Po*H$Jh2)Enh1I*yy+O2T7JbN&{ZttuBh69ZWcO(K5Vi6^}e*TN(1 zldbd!fEOA(9}nFqQ~TQyd3a{KDCL+U_=v;GqsYo?_E5x?{BKt~y{;pYbmYHI`H4Zv z|LRMI&PoMdU%>tLW-5%!mA^o=3uT5b;KNs3)&KZJC*reJhVSw{dt#i5V156)lSVAd z@b7c?g~9k5D`Hvdf4<=0AoRd*K0*3_oYn0AM`ty(rg-G9xgS4-=GFg+eRhSbW*Hkj z@E)=K?L-HMe^)X1V)OCe1^&VdDO ze&{UG{^O6P`>AL$4ug{|v$1~Wk_im12`J|$%c?^!rU&gw?(1|bU1rQmy=(|= zw)kffHFf+{{n!LRVI3ES>(wENxL&hgYm^oOh~=exv1>Jvfv9!g*-v*|#*nEO8>TEN zmgaG+dXHrJmIW~;xPS#$|8y#3H{*&lLVW_PJNdUY|2&1R^SZqL-d8{AvfbvRufU&u zLOUHkeIhyBvaaH zUC+cCx32k|D?fjl8nt&*|8e4}cSDxQFJi*VWZv(@8)2oi^FmU<=?)RZtZ!z2d}W#Z zW5^eyytBgT7d!W4FTyHK#HkBZ8+huC*4=M9m+^4tgCC!0{g*9_+!=>06?SldimdILsq%MeKJ)+=p{75!)C3EOUCpGx~cT=XV-<@4$214GHEOKJ~>c z5r4%4*CNh)Qy{V@JM^&XQD$#?;uFKv(J}>1nd%tKx7%8T)>f~SC$L6uS5(Rb-m`N`x8ICQJM~$cZ13{|F6FQ1l3;iF#}>6)5#zHtp%77@q2k^@}++1>xSrr zay{b+Gwbf%{AHUU0$TSoM?CR?F-$$uJ``bO`J=7GE!%F@;{_k0P$LGgQ@;6wA3`ch zPyS_^o+o(iT~wqzO0_xF{Wi9|dgfMT{-U(kEwS*E_kc@CtdH-^>>62IzrWJUG_8`tKK74XhWhBQQ(Ai7O12QH!uZ%{L6{!!$AlyOA# zP|l2=_C=((Ci9nvc%9yT<7IJ>H36SrkglSd`bA}BNW;6Ca>k0IqJib>MI9f;NuK2D zyxVC*m9f>4i~7jZjw|x#(&Up6=Tp0$ppPUNXhWzb!zo0y(+f{U+Go20%%j+hWj}4T z>r+gq@Jwd|{n!6!#j>8k$FjLx$@XWCvR2Ju%6M6b?bjY`|Avv_n}fGljR#d15}v*# z|6n}(k-AOt?~2iy!vuvV49l4rlOM9X$6GQ*u`SQ_ znC6}yKP)(lm{IgHPZrM~^C@*h?Z3dMnUr#T?C=K2T)(;KfL_MR@;_n}d+%?3SY=Xf zHY#!8Vj$Mswyq{~h9L~el7N-uk3*7uFqIl1kCWck{xMA_p}T}Ix5JIa&=qYIk$d5D5~=XzO8@>Q$Yzv zm5YA~3-~3WHT)MaEZ|Ex*#uH(nAT;aYAU8T?{k{}?7i}IE7D0KP}3S>XUA^zYgaq^ z;%-who;{;uALRVJZ_tXmJsvi0oxEFc8OQH04hervox0i?{wh1n3wq%>W@F^Q@iT-} z0W>*4EdzIQ&W-mxk88iuS)F0-a;~l&!XJU4_j3o<-8mw@LfV3sNu`p%lhvS5!&TY> ztBLF1#PIj+%lUNxKa=0(Tw(wYbFcLlhD^z4&@yQ&PAewr{TQJOy1dx>fM@ScOL#O_ zUJQl~@P&6X$Xv-ENxPenj~byaDD&$IYju!4QDfTHfM?JEXH&b-WR=IrLO=ckY2+T` zlmD3Pd?I~WE2?S9c3Fz4#2hb+LP*J{BNfKzliDS>s&aI%wfKX41{}fLV=%7twQEM`E=L1nSqB_27GnyK_2O5p**3acYO=vX8gp5dP346 zc0am#f1)%7{d16EF}D<1REp?4q{*_&SG4zEKl&`y^LTGzWIS%mDaRv5%PMv6B}(CQ z^`@!0RC~fb&NvPj)pvOJ?)AKKE|vW7c6#ij%45Z;ISs<$s0qp{FQ&nIo}V~5tUEki z0#Y@seW8du%z%Wo#TdzJ{H*YbbT`M@!b0;ZBBggib|$?~qeHN1cDF12GQ)t>?zKzolXRpIV%vgI5C$Vw+CI^HIGvn3!DC(=Er0?a^hk{e6k;n zZ;sFfi?aFsI*!A)DDL>(2Qo8;Si@36I@2dLzWBP+W)^>f8VrV+s+ouu5^gRB7*U3W z@aMJCH68wF*dkSw-@1y)I(*uGr<%^3mh-FZ)itMwsCUwX{fktdpUqPi7SqzWj*g;E zQ^5^!`cV0pf=%zJao|N;lwwd%%RqLz080G(1%;NAH+E%0)Uvp4N?=S>3cMu`I+FNgkp7KAbfgC6f^5{7U9|of0bG z=WLhrxJjGbp+Yfd_SYYw9N+ykf-aV~yE`#50vi!6s+eCcEYKk=`@ykFq&8Wt7f11C zye1~G#Zn}zvjJuGs)JKL|D7yI2txSDf&Fmo=;Dc$i{!F*G&epVox^Q&8nQ~MG*_t< zf%J}l3{8hTS&H&m&$fn97z&p_-&MmZSZt_S@qZEb)=^Qe(f_D%5ETUlr9(mx>26RF z5Gm;{=@^jC0Y#Apk?s-@>6)R3?vRcFhMWP0j)7r_`*P0le82au^}B1`zwW!1YvFp~ zt>=06{(ScS?EN@SIRcoBnq}}@wIBV1(keGCpf*e5sS+%d^FwPi(!npuKKuHp-zNae zwXE-#0(*w&sHs`&@UfoaUaq-4&ho{hk$0)ILCo*JPbP!g4V%Ar&oqG4eE1&0k?d2t zex_?btrNw++^IDB(6KaqD?ylcKu;P@1Af!T(%-}IJu%KU7rCDDrG zK)U4)gwc>{Zd@R%4wha|jG?sm?n31JK<~p18z~|_So}!wz3<~!{{TY!Wy&~5HTHO{ zuuKbycjUabO}RhGku}+tIOta2C!e`_8bi~8RfL%4-_<6lG1UCQ`TWUvz2d<`b(P&R zFS-=(`KE3%W0nz2E1fa_@vATnu@C7O?O}}?Vf*mh4{k;EwG@RmUM}a8xen-==XZ;8 zb-;T;-kRGv1zg^GgE{&vE}=B2blXfyRJ8rwO>!HM&EIg{p*IklPV8NZ7NyCWG$jqY zI|@#k?-1YdtLLy<+~-%5lq9(juur{Rv?R#Q6>0&AI(Dt(b~9yph^9K= z(?+>aKd7aa)92D10O8$_7=syXf|=Ji)RXkDp$B*_nM<;ovZ5O?&zXt59R)+G6FaHd zJXe3h77GDnc!L|<54|qDw@#)(jji`Linn+Tg%`385>OEpEERaX@voqilh;~_llh?! ze12B9S|?Xq?%P9@-V*QIIne8(zf+*fn(F2p9jDGY55-;m>DPT;C6q6CHa!f2)_X}S zC$To?MWr3xw-c54AlFd41;G0`%}tQbXFSIz2--YxU)n^>+-$g;qAd}0MqP=Y|J6$4 zA-8ee!hCGs3M1Z_koh7}0A<^%Pi}5f9zWv2jmvb_48f4EG7)1c?Q|zK#-5*BCjbxJ z)cj~o;42`XCUO4w^1TPg*WUN4rmSd;^jnw51!3+-Ji?%pGbcH$YliN~8*s>;1*23k zAc%!CIFP946nDhm$K#w8d?xW-slb1Fck@mdL$cj=e;p3sg1w6OihTYv1EWZYtV3EHf{+`0RChQC;M)>78a|W0M#1NUc#QGg-orU-hZjPwQs{qik&_n&nma zrAF>TLT!nUb`s8QQ{#}e$n>}4Mf#&6U-kLDOjZ`NztNVHs&xtX5CP0Al#iu8!I7=E zHQh|oU1$~U>os-YyZ-hd`n36`Nf?3EnnznYJHq<#J2x^;UC*XVk|)$F>pN(QHS5V5 zd5X2QV%Nr8HC$8Xm@7u1gR&wmnMI-5A?(-7XO%yq8|MzM@wvNbzDc@m+!O$j=!kSN z1kzRW8XJz?5$o0DUpwNJMgg(rU~D()NgW4;1T1~iF2|6xkP=h=<+%ZessrEj%a+9F z6B4>IzhINEgeGO%>zFl)O061(Uga7d|A^9Yr1V{&ruIRyR!}jjzQj%Q(ygGrPLLyI1l4EqX$L1m*9&ex%@$e}pK+@w zL-8;B?$Zh8hDu&0jcGgV33EeA?)Vx?u+X0pX259+h zKfID1IqTy*gMC&$Z;nkaCaJ@tLlP=om0i0Gnjs;|QvV~2^E1bu{rUDZ;c0*>slJ?U zdRrP6pT#o$Qbsy-I1Mf|fK$-VH-5oVV(Mth8B=U?A>YaMhVbpcD4;(2J~2BWYM9&e zHBDX-5`W@fe4JbTgeAhaaBsn6=#G#{D|zUaPgR-JCf=E=Kb(t~)IKo3skH;q? z>I_2BH+y4qLVSen`sS2Nzspux*P8Pd+&Di7YrWq*$%ujQYxzLyn@sN~J;mwpf{53f zLbIoX{Z5M-zR+L{yQ`42IpARzDe++_M1!zxRa2Sb zXzsAd6=1g&J1tjtG@XC&I5i{7g5edGH>Kw5k3j-1HZo(^s}NqcjF$Ap}{C>cND3<#=48Tw{<#A8Bx7& z^W{s^RRhuL2f5mLKq@5ozR}OM_6zeUtVmKW`SI8`5o95foc24k5`+iG;!N~!RK@N% zFfC5lI*r{^lxEF>jqvXsM*;l+<&S$SikXzp{3L0;s(?ygpHlH#Z#k%a71xSoSQ zCv-k6or;jr<*m4o!jTxX$*_CNDDK`#v3={**jNAV&B|$~N1Q_~y_jL%u0nItw!!!u z+Mir7B2594@Z$$O+wS*t9#5orA$n^vAXOw=6CK6&*_P_J5mh-wmdB|XO5TH$+DJZY z3lf2p8521!KD7<3cIKuP2y>k24PibVK*S~ELWXMQRF3QA!P8sf+Qv@@S?lI)1u-0a za-^1|kLiZ~LMlKBKAWC33A)rT4~asZJ#T-hvjO#=`PuxYIMkOY8*6&S=XFbL%;oXB zB@a0ix-#xZDyk^0ZLcX7 zMw0=iylUjy9yn8ukmvRZg-5Ri1}vzv<3v0Zk6sYa96pxlem`gQYT1vtnx=NKVj{)#ndffD z-OmPIL}C3zCkE+m4ycgeH(o`HNAx&7qSeiq%I0K3^a-E->y>+>L${;9k8mQQRLHMwb?HXdxGRO>weUgTjZ`reX(EB}%9<_(4+sKgK9r{XW8=6;h->Q*0?a%IHGmh#VR!WNaXkD|S->&G0>w{?e>-@)7V$SPFAK3%#iXRn~FHDyO zomZ932q~f($L(=yi?bW8M|XO=N7ZUdKKUtz1;-XVZ(f+!-HhSuHq-yC{(6PIeEt^; z!@*}Pa;?(vZgf%saHDH`#<0$GtJz7KzMB1!u-!%!??y3HXHXWo6&>eYUGgVD$S%&N zRrJ%~!+#>76$7<$JnWxH&?u&egi>hi{F{Sf}x#t(;2O#m!!x&2?}!CM7*HY>MYQfE151 zO>9*kA!hlC(R6)2=^AR@nH>!*7Lt8~U#2Ax_D4De8FwHVpQpV=qgJ48E;=&lY!s{F z#+KLvptTe-EtDNdJ*nx^-pF#$6KnmUt$fTtAeF|LzXKBEQ+sT}?F1ok!nD(_Wm(F7 zf7$p;e=rePc7x^Ba$Nx@#{w3Uip6Mo$1t(=w1T;3g5s#- z8MtwLA}n~r>XMzUrl~T!jFzpx(v)aVBvq;&r|yMu9s~T%9*6~E`U$4WS$dxSc?d|0 zB?&|TQXp)>uj`*pWrr3Y(laoi!plvW13b7ko4W(FqK2vIooD2XfjD%(3h+(Y&COb7 zV_(CodUQM-lC#0dGPao=GwD(T&Fh{n&lUN_s&n0*q$L>7Ymjw0ds=fp;T7$(p~diT ztybrCeyLB(yL_xT$}S+#wyw9ImoYPHOE3OTc-)KuijcGr5EI~m`Ib9y8Qw%Tji?*% zmuA`ZKewB&(t-0oNI2_Nl5$Z>g5H)B6dBi#<;+k|sc;;cWefrZj+)zNgl%-JHjzQP zoYAudbdP%2j9X$Cdbo>|z?|nCu=Id0_VWR zpEj#W=Ym?j`^9o*tr`?Adp8GCHrD;X%0tv$IQ4JtPT1Mr<+8bTt5j(px94(ml%Kns zC=SCa(rqjbS%NU@hkQfWB!kmhwZR>hK~R3+?vHaU?`H%Npx>P3f672GAtcr4zL5g! zM!26)+2#8hMy)!YbqM9_HPEHoVmlYJ&9V*Y(l>nhecS~H+7LQos|S`N{`S8{af=3Z zQS*GOxI>|QLaPfzJYvzYXaK2RQ#!R+;OKYSrY`A-%!ote94lfQjumANdL6-#lBe28 zH5;-?l^#3n)lG^nqui$>dtygVQ;mg?O@nd=LtG6P){{Xl*hlgF`c{D!e!#7KhBT@^ z_%Itzp7EkY#^pQjWhMPxXq5;w`CDUE#SiDZ4XogBvd z?-!A5bpO>6@7O0qnBai&ax*JL*vx!3lc=r2Ipda7aa;UDccGG|+EbAg88jLA7|4dv zyz_y(3cT||yl4EQ0>Z^Q2X8XkqY(oR-0Ce?5k+MrScbT-=v`ztq-_@{oUi2W7;$;k zq9ocjLG_s`Q!9wkfDFUYi_H(u_5EgKyk<7cPpupZtdt9^a@Egc9rPnt59_K53eK!l zR7LK>Lm3k?*hZakwM!TGAjectE9ZhH!}%Q+>@hQ}>Fqgtl@yD@kYoP;o-PaU^K+)v&kaEipdydJzC1_!Xa*w%agwoocKFxaJlsh($e%XSzpF4`|3s>FY^k-rx192&Zo`&1MQd0y zP2Z<$xeO$VJ^Iev;Qko?9fRgU57iC^h46cn3GC438^Yb3^qZ4!%nq)ZfBZUZYC9vA z$t9EQt+s43yG}2m=O8Au);+fKMc?OR_qUgF441R6y^V8FbtC^D{mTY8(9Ngmk8g<` z*Byu0jI*~1jmg?=UB4w%bM#k!lL*+q%Ei9WYbe3=Hs-OSlq=fjowNuT%8wFf-SJwT ziQMSpidmKbHKQWRoVIu|J8t!o$G~R{9a**OJ1R%s`50iOD6#cYibiHVk6?zxQ_$7q ztS!DNnFFfJAC}@Ns{&4q?H5Kt4t<+O!-#dQ0E8uh8Zbp~dOs@9&*gh`DPQO|{ zqN_l6*_AF0XY_RbJt`5Un6Aw^6j!_VU@C>lL@7j}2Dzxg1-NZ`Uq*SpXS}5CFrq|7 z9@RxPBy0X4vpV0V@!&Doo#2M>c!2_KjHzB9JIcVnv+gKt2|>UA`N!%Z%dyo5`g|+r z*b=4ObT*gEbEd-gFgid|?3*jl^_sK+@V11%d$@CAkO2);eMSmxXMKARd`mjTJq_)c zwYlvRJl2?N-y1s#x zVw-sVB1&|DJ>)bnZlX*dhwO$X^Pazu+u{Zj-BXXj+MR_T6t>6_(LfH(hz>svzjMC# z%2qrbx6dc?mMIAaD`DxmFu6E&_JZ5SFH%ts7UMyNiaA-{FrA+i6^-PyDIT{ zLdeEi`r*}pspy1~WQB-kIw%Q5M?N6Ynv1nXmCSCdXx@}<0e_r^;-(X^N3jSNARY~I ziwebMS+&tFiOo*PChO0SR%@0`Sj=eD#A}8$X=R^ zanua2M@d4@BN7dDGdzw8@Qf`SJS|Y2eRw3=NH!v^1iBddb@(Fqjf;AZs+q-jlES64 zd-ArAQCi)Ks-=k4m^KSxboYdc^17etCCz(glPLGZ*$lqt5W+9Np9wNSx(Q4Um>94R)pOu9lwcha?GzA z_qK(3_Q+wMsDf9V^3fWN+X%JC*qT3mzR8R;iuN_IJ9u-c%(OoS7^ zt}5M;d!@hQ`mocg#IoUO!g7SOKQdm;)8AQO+aTYK(<@eUka#G))%gcThI`^rt-IEE zMa6XJf{RfY2B1ZR*U?cdE2qR;c@&)FOA6a9l zlu=cj-TcLkYZ(WZ-lrr0q(@=dI?U#b^zcWl>dEr}q}uePywZ^HKSfi!4N+uc`iVH# z>l;4&JSjO9nUXv&74qyhi7w)Sf4LFOA$JqKLtz}P=2FVAd~tbb+*IdB;j*n;y-u9Xc)^7h9k_=6G;fp_bq zCGMfsvf78z#VC=D+~lYteC^_9kxPNbR5;ycNO6Y$kq-@8Jl6kqIpBXPCE|7xbS-r^ z<4cU3j_)1zroW7o!RbPybmn_-=33A7^U5KnY3Xj^7vhWb62DtF@U#E&_72&(cG<+d zByp5@z0Cdg(-7^Kg4}Uyf!mt)gUK6`~U!h&1>fZtYj_9qC zG0?OAu-Q{1zzIfw3zuGRZ8M)NtGM*T#G0{TpOl zUlEe`^Ya$I6EN5A9__dkT>5ch&QQ}F3I_GeqHkGonH;ZmdbE@pMrvj%3`Ow4lIaqT~MZ3E2C+%8sLj( zia+?3|FHT&smFIBIqGKHIx^1vYidN=;q!hxVO)X!VaWv8wS*_UH3SsJ$gdfV~ zg9L#Ev=qY3APHsth72xyUGEmbICr9qd;DiD^m9Gdci9~48`?*9qjU~Sxek_K1}z;s zU;8evMZ&I=!LV`$yPZL7apU~9yIzc6ZryErc|<;d*|KQ`KSVJxt1EsZ_|bu>rA|oL z;#~tl@d^NU88H0rB86Rj*~zzBK#M{)gz4aT@`!0VLx^&~OJ7++p9Fu+JaM?E6%4$N zpRbnt`mianMS{k^GkfLiUHQV4KH`$0;ndEL!apg@GmK5DA{DA^)W1aH-=4kaZ?-W- z5ZC)_s+T!@lL1S6!v|NeZWsr9BJpn03^7mmwJ*5r*EO1fhk|-2Ic|gfUMZ^&a&Ggk z%I=#z`UiHR0BV=c^Q8&;SuPtnZA@4uI`Z23bC;8T8K_}o3Z#`NF|Co5zmWuJavS9 zgn~^TlY#f?YIm3Y?D@sQ)JAjf1aUj;EZhjK|2}v^USl9Ddb(Px&>k{(t-J_bufY z#xz40mt$j()bo}1R@fX@X+|q>P67S~eG>i#`bOD6{7lku9#+Bwzxc;3Bn`?v0iPqCg1~3jm+<%zWN#lKqP8$scD&pxhB)cC2OC%aY+2 zLf04z$DEX^R2;k__c~+^aGOuU?+hKQ6c|=Xa1<-Q-#9kFBM=?9eR4beWE8w4b z&`nW(TgR84y=l!p!TjRqR8{ZOFs=okNy8R@!&AcK(wf8qsPcZ_q)M6D*|VKv&ebR> z;mKwlSqWK}FWuzWQD8#`6Uc+~e(DV7h6xDxNia;zcAEkSHar1cc?!vRlQG)< zfs_0u6Y}U6qV*j5>De}E5VS|i{<=<=R#N38C6gsrJx6iFDRg}ARbKNyHJ072yn$H5 zch1l2o?L$KAlofKz+x|5$7RMYkdvw22TIB}qnJAxr{(@k zSN^x#gJO$5dof$_ve=W@ z0Xjj^zl_DHFy=#WI>m>S-GLt&QoiyOHH2qHS2{!Yv{o zT1eFs=LF;Fx5brDqg5&@AwiL!0(NzJL?g?bjMYo-HDQ)~-o{Gm8zS>IIPLF-|5KUA zE`G|HSi0m`n0hu~H;Yw~S~RA;6zTHwOH_AEU&x(Q*`2&ZaAAiPO0}PnNi`+6f*`}M zQW&_?8I+PkoTSn`EN{@$-q@0z*KIPP`}Ew!`17I_L)_O8>2Gs{?^JvqFyz?$1gPIn z4l&BRA48;>zDpEg9wUvP35nXeIi|LsympT&XXI0=qRkOeet%=6AQmKc6+U>B8)BAp zcPJO2iI(k#@jNuZ3xGcuU{|Yfr1w|yO>VaOr;ZYUdXD_`d+P{C_>a(@1&$TFB zAVPHCeddOmOxH}Wo`;ptYuN*IsW}a51f!uX?IMRspD;(9zxzByWx6cx{qWPjhAtKS z_Wjx$uz^4^P_I>y5+b0-6@e@0Q!7GoGsyBXX@ghp&=g4)Febg-Wxo!O0d`$E(>2I zNA|{0Mhz7!TI}-kCw=+}Q0F~n$_h7&6|0tZ&1Xb8JETnZY33`&6NwPe!ba`Pf#(i$}5)(>H7xFa_UYOkYXg z>g){wxTNbC;plNQx4?9?5%O8#yTvhC{5$a5rH3!NljC(Ycx`PNz0^4gq9Qi?w!$QS z-;|a1TPREw<&?W3*Z%WA1^xwXx;4)+klxEG^poK|EzpH^)FkNNaDBx8uI4o z`@8Oy2-6?&rI-galBE>khXAiEJ;d`ugOAwuQYu9_dTKyI!iuCp>KjV;X*|VI(TcKI3Pm(bY2VFJzDx< z2x_#V-8JtlfjlUjA3&V1Yq|{m1J=zyRjTBkt-@$cwQDlAVYMEe>%gDp0o?r=5V`y| zARvqZF0e}mv~^QnbEL>vyDPeMP_-K0C9#|H%7wn^Pyq zQ$QaTTXwwz1XTp}iC&Vk`+GVT=RZ(Y~lzjQDlLw1J`MpksV4=$%P79rx&U6PQ@1KVy!kah$2t^hvH z86>0l?PIMV+37q^5D*wxXAO(l5_&3oL| z7q4?+eaFYJ7!fy!^EJbqY0gT~F4DrmJ%!6UPPGaVUz|iIc^VtM`Qamnpv;={IL?Gd zC8#;vrT~*@cfcWjV!wAJ(Ild6Tr0_=Bu3-Ls*XI-1vcEagXY81_IS5PPYrF*UJnI_ z1|au?tb!WtQ-VOOqBM??OfzW%YzO@xc8ohfRo`5|V%r|z$Y}q@r*hYog(szZ+_k5; zSoiF!q!L^h_h#YNBF9478KaUu1Q}5iNEE^L^r4&a89S>R*VcLeIw}ax*SXhV1OIZs zhua&dKGyV7SkZa3X4YVnL&0@BZ# z$)@SP&*dN8np|ispb>()lWEviNRZwB#?44}x93qoaN2DvaUZGd+82uKZ|sJ<<0Qn( z(;Ivpd`_+v!3PD1j*~4y@vwHMo=dDm(>7{uN}F8l(3m#Ui#l$Vu0wD7Qrco(CHO7M zskq6DYPSIYzNgyyu{wg+kB z9aJg`2NhABRttm+djngMUD9735+)u?k*;*&=ghii_+08$G?hglqrSUsAVgNBx*S^l z8g1N5?L4TX(I@vB8CP|VYh4IVSAi##TE*DAF3-wlAPhRsIT+J6!KG-RO!9jtHho+e zI(L+GfoAoK9}}5X_$W6s1)Hm50gRga*V@%ZTiVoZie*oun^7Y1Y#zyQK3qJ}%gY=a z2YsR_Fc;wu;a3vkqWvRZB}T^zfy7H|=0aj{e<&(6Eb;6pWSuCr{24C8W+}4pI;HiG zv@SyYA}%w(?U{IN#qwgj$|;2x=Y54NNDdb+5Y)qx{PR5hsLwyEEhqf<0D{bKQ5-Om z`QAmtcl@?R?+Mi^qQv*txLpq|U{mb6h?mQCihh3Eb#1-J8@DJ+Y24nlxc4ur%62k_ zb49JVBWBtk?jO_?v}9h^o@qGPa@U}?No`^EUEq!NVj4W-n#S3KH=K04!fAmb)#I64 z1^pM^+iSP?7F?P~&RiXBxu=XjxSP^gKq91wQJr$ma7wBLN*{24>vGORN}(gtzn&CF zYeRf%>P{N=NZ`SAWzfitO)`JfVRkrbJO_l} z2R%U?uJ(qj-N=O0oBQ;a@>O!xMl?Lvl$~_na97GO=>(0`&OI7KkKgB?AjMZnh2I); zp9RH<^j^pOE9=G7hm2d-! zL0wqCn|!@I*CwuFa%vSm>J^lP6j{9JZ`vYJ+FkhT+VUjkK=YNjyKu9>(5o$$yaEQG zsXpE-0L~=EiSFe3qWuTih22dlNNS@t@AB}{TA6#Pxa)6SO&H^iupR@M>3vog^FWc) z+(49ynS+dNeOMC{&Gchu3Gu>`1eWxQyy17LvRRDvdTW>81l}$IW`$_5dVx;9A+#iv?inqBStmwFjn3nBR%1LBjpy*+N0mNUv}CF>vjYI{L39b0+* z-W*{q+8G-IbR90j;+4xu>#q(is)i{rJEKR`E*q67iLQ&ayfc)NY)_N;vU3~{$38I9 z8PkKyJegSuDdd)yE~=fC{=tAY>tXTVrkG3!*5`!Fmf1#~b)9;sq->iZKG!#;tc2wA zUY_bw&Nvinl0b`JlCHKq;P-1iQ{%a49G=yGO1Jq*MNrtkyteL>theXyCfb3;M?>SB z%am+$=TX9GiIpoPO5ZqQ9{&b9dAWzF8XFFb>;>#!Q;63>8IM~3Sks6R=|FK3D&b^K zuvW|LhPD%v(Q-iwQIE3I$Dy)oh&W?JwL*Rmllnz~=nZu8U5I9@(*0YEVw>n4MQ%x> z0RiosKZ>ASX0*zJsYcGgsb+1!+pWnmTuUnaI`vJ2hIWHpK z!(SyV=}<0nrP0@17{=)U>c!JkkT6RfOo15WrccwB)C$U6M#f^j)_K$64|^|C#VZOe zXd6O+CaEi%$q2@-&VUo4+%@WH>C9gyh^s7w>Aid~gY(SXJC{2d#zziKehaG=k;dNR zGG_z%rHPoDhsh_n2AFxuXb+~#TQ?#jcL?IL?0nECw~YBAa(_+aXCvl;f8(s3Uu{!0 z!XUphItUJNtGrM*a`=lk=fzCP(MVkbKDTwlc>HlSsEU&gS+cXe7~hX9ym=2aX>T0JxuJ|alXn6;1b-AxOZ4)qkD%lT=)Y^m};LsCZ z;sgt~32NiIG+!EyyHhXhh^c8w`cXO_>EnCXti~WQz`}tPA=a_6-u|}Dhv;+L5J@wb1f+mjsG~xcgG7T`I`iPbhMOr8& zzp}c8!o;x?u*2SLO=^IsGvBEDhO_U>57iBV>$48~r%uH$%{V%!XF5w7cy;W6jBc%n z+OKpDcu?)0`ov+6eOevOj*-jY)^zH=)~u`xH4fT1Ya!LuyL@)ft;A=ty5g-k)xox# z+%Z3BOnv@5jTHxM=y@yQ^X!bbA9izyc+$sg%BFAiwEvca z?N4O7RlKg0@xa2y$q1ex!@Jk7e|(WqS63e_v>n1Xd#Q(0Gdbj`p})lUEUFLuElf@i zM1w8S(9$7^2+So;`|OyGy3eo4|6KE$uh}NKR0==By7ur_n5XqJ4*R(fwR=mU$mW%Y za1m_zQG@W1IDk+tZUwP^be^G^wlIEYcom9XIomB!7;?XZ3<8LihP&QsJcsd8%f-d_ zu6soH%RN-Ej-B#jTI!?qX=kb{stn6Y_+m{6hIxDc+lm!~ws7 zyo^n&opw2gP1@wFN<$ZQ-TW$5=hSs7p(J!Bxft*I>-GKWXx)x3GI5g#<;wikd_J84 z$R0DBM6jFBw6KO}WOr==G2_2c_m$b4XtTV~C~Ex*^E)21!=2Io5^DSb_MM-2QtHI& zlhz3IcLLt>Q3AWlNJnhlCi>I}WO)9LR|2Srg%8JIKRao@4L5zzlczp3@fw+WvAI^I zbNn1Kj?stXo_yE;OYCn|WkS?L8|W(IAI^?S4uutuu)Yu!=WqXD?y2>O=h`nyZgNXk zVsiK(WQIA^vL+mF57%%4E}V)FkLhkY>oUm~*a(YLBJYtlH)6`5wJsmD`CqBDe)gg* z`rqlg2EqgF!mkCr4EO(K`46LA25h|aEtp?m7CxWbPwzjqt6RMk2zn&(*G%D_)Pvlw zv5y-n8lAn_gJF&oIFd%sGFeZhnD^e9f#b_G?If0Zi`84rK+sFzEBeNzA49{ zP%ZmbPmJ~Vs4~cZ^bSYf&ibUdz@b#bMPhmIPMyjMesj3?N+SBt(NB`%kkKXGziG(dR=lf2+?rD>EW!l0`A^EMRLE}ws15!- z*sTH0>%TvD2jl{H_0I{N|Hp~$8;~`7vKLpY6d+Z?V6y)F1wg?9Ie8`V=$|wT>35k# z1m07*ar%mn98KM4EdR``igz|E|L4p8AMf}7@r2vrH0KM}A=t^?Av&-{Cu(lV2VwIp z;?Gr59g%JyHs>3qmb0e*`jk@2iaw#sGg>+#$@kp0<9HhNzw%@iZmR}w^;&fv{F5HH zYyzg7&jwq(ZxOt&lm6gR1?`q<354adW^Ued*iGHa{`Vf;H+V}wn^B-0em+OnTwNSX z)<Nz!H3yiY!F%FLWq z(m+0YCPex}zzr&E`e#TL&HT8!cVNkgqBCm@LIKLog+2%m2U+QZ@IO8Oc%?>I{6u=z zMA{ky*ssPD@C0o8r`vZg1AF$wxk-U|`QJeC?V<)}ArNmYzG>5(Dmx`Wnp^k`bWo#M zSA5fyk1M|d&?m8gkmV3nVDF=|9LWCah`@2+z`l+}h#R~F3`(qWvsax4;r2_Vw7LG4 zKAY-+jr3r3Y5xix^t%#m;+5NsxG<+z2=VGjD#p@Ko2g#p%kbe0-e!BmQJ7E1W*;cy zC`}$R96nUpq<^j$6mlRza$%KnA)q+P3#7XoF&M3|U9}Ql)-bHd=#!B`v z?Ddn_H@u!hbN3=$mk-=4rBrK_ll3W8PwkJ?dffh~Hp~~35iR8ftcl6l>vLpf7Cjxl zhZQojPUC^>wYMScwTWUSb>H$ljGN`Se5mLeH$nW3xElwlye9$p&ew|waX+7aCd9e6 zZ?2VT+ADO*E3QPslkWTmG_G%G8DxOW4cuM)ubKOkynxq1r;eNPr`Y7Hv--BwSE3!g z1YH*rTLEm>WU;AosHT0T$-l-}5n59IEPBsj#*h?wjE5^yb=I1ZZkt}vpPR*<$xqtu zh?gmzsD3%y_30DE)Td;M_-0N4+I&kT3cElhY6exfIf0HA^68V=( z(|$a5$Ad-K`HV>pSXjSHquL78NApuy2CSL3h41sEy?qI)erz2|Op(l4{Et)UpUUH3 zY#LdtdgW&4tj>RZAmhQvMha`+=QkvwYsU*gmB=)_L)+Meb8??Zq8|O9YvCb7mX=YoKiAo-x%0{WU=Tcj}7vJ`FkHwZX05 zqvV$S98kPzfs*7Z1WRa5*Igwx41hcgP0+58_q6P`=+d3rrG|kB{LaYMqlRW}pJ2j_ zgugyQQ_g+TL^2p!wZw-Yc+Zy5NPHJdi#>ln zdw52}BG?L&nLi55M&9|?x{-e;GvPmsErX!whOxMrB3=s2JoW- zc~td}Lj@i(dej0)k{kd&^$!GM$dCS?OhttWph^C7h9Lg`?SwN@>8s5VAPlSW$6N`- zE=*e_lX9OKN^${K^IBvi$m*b(2s@|C8JEflNPJ(u2PARvhc6T$TWOzWZiAgO(^du^|s z_6FS$LP$OCvLczeK`_D6JM*bF*_`UZxAG?!m5&pJ(=89_9J+K&o;h0D7mTYiY|MB-vD4<0etw?X-2S%C2mCqsq|8Amet#gu}N5!8d*r~19 zS0w%po00My*+nS&*!L4iMXRc2Fd63p-gL#YFsFS(awSh2QJ}dp^Cm%V2oDXw{=K;e z0Pgd#Lx&6@#_LxTYtZ8gDOL7x(*Oy7cIuAm3L#v_oYSMnz({j^!S5gDF7~gy<8MZE zhrzyI@t`k~iBIm0J#|1gvz>N;#S?7*Af+&tg2^Ii= z29)Q`i~AY>l_T+yqVr0b_U5&${LS>Q%))KPmPCHVF)d_NHIADPAr9QuI-@V7T|O2| z_$!nYm&`d4p3!5AfXf4jrOC|Z}r*)By4M&FZbc92i*bKt;5rJ!QnD=@3$ zm-QL$g74azv-zVpIZo=^zD#=OQ)p`Xf{SkkF4LzC@f`%ul+agly-gUhhfky4Aw>jr zQ!{YSRFC&9BS2PEO8gqvV{%@v9w$b^{)bJr}avBTJXtUV`8+&F%)$ zmAT#juqEs6e~B-CX!W3p<;}`IAp^dDdyAvn?kDKouHozWa^w~QZghA*QDKZk!_~iP z?$5r{#?MhD94rfE%YIGAW)JIZ5@fJQ#s9V@@g=(KR3Gn@`1N{46!-m?KZUlPRXM?w zCf4MySuPKW&HCm1WZzsLfue6*Ux=C)!SkXBPmAVZFFm(Y??g{82p{RGs z!%q5OLAj|ZL*eS0C&mc{i{;>lzOtW_F52mdtH&V?;$vg2uX`J`-R$lWFT14N=D4!^ ztEyKv3u|j|h7U-aAMf|9R$V~1!BuLRZmS6sEF>6fjuz)Du0pz@L;H~~yAbH-$F?0;hAYwK+; zgwL~PeJQMM?_fJjq8dX?TGbO=O1L8;O^ zC`j+U_ufmC(0k}Tw2+Y8LErcLzWdKzzjbF32$^Ko1_q-9RbLJq;F{J)}EPBR7M<=bC|PdG0;6Ly6e^u@w_1H}(7){~4cNcV=s8}`A;Z;&?*a6-*`L{6z3cVrE~&b<5R z-gM~icQnVZbKW`Hg}FK?0(78#Q}i*OWYL&M-COyr3oYY@!1^>_n_n|7KBPbh>im2k z|2XT9enOs`LY?f<0~f1!dmpOSwC+i_>M8m6W&tBzu=S}?`5gCp1fS>X(W!s{+<%7I z^Y-y7EC-&lmskq}G#QeoA3;e^frbY$cZ5Mjy;O$fYia`VwendfjcAyMA){OHDB)Uj0IXRdp zZ(FprQ>P;A!XJ-DE;Bok0&gNAeejwO=6>Fx$gem}rmJ8rj|M>UIwP;Nc)(gpXWZEj29uC8yWesOcA zzQnWivGZ@+lQuyF7J<%|YvBBxJlns~4W#~YT}5Z=z?#7;-#~It=Cb-7`4$#x>E$(@&=cfxn&4=ffs> zk9{jA%U=SDJ692WJy3%Zg@5rwFdwRUB=JgB`sLu1);-7q-zh*`e;0v!D1J)nEN0!B z4=dhkC+A4W93VMWUSS*mrCViuYs7kD5E>tSHRe)!t{7Y??{POQ!%NE?4>!vm>6_3| zg${a2=0pH(x$s8UzU;8?=~5XW3!CYU1hA#gP}+(?*`g2_9(!-juLlD=l^o7Em{*2t zNq2MHMGF<&bo3Rr7u;4*%Adk&icPlN_14DQ z@aB6KX91CXdvInw4EN$DJ?LPgEJJq7#PBZP`5NVE`|x-} zq7jL%Hu7*Mv}w$ngLDO2aiVzrE;#pdnGw8#rNpnBIlm;OpD$mB^u5)|kQc9;sc&#eNn7lvCXUfDvEqYk6y(0h%W4 z4XDg)Z34PZWna@t&jHPI8tV>KP~F>Y`Kvtc;hXhMygVG#md@TBKT)0m zjB`r1wA_XMjqT>)x%z%HTp6_tyIM{gu=ao0K7CNp^qbnZ0T6>XS&*%{+TDO zOX`h`y>hQT)Tw!~DkQZvKRK@fJ~DX`(gl8`7VhBge2JL3Q&^w)8|(CKLErPMOmj#c zs$^j)1eP^bXH4b_0r5j7qRq=(JBm>iBhDRWnYIR1{xi+4!HVO$R!vL9b-rfec+$0z zj-$rECY8ek4rF{US`-Zr+sZCS-;5AKCzRo_TPB}(#mcj#QQ1RBD08z55+LsRDix=8ddwDzwZT5LE~^tX98U8BRd%;pgvsdlr$*xqVIoDManboN5QTS8aI3Xnh3Y>+0Nb7yEB|NwWEpz0Q&aE<7;8EyhKXvfA>+d< z%vPT&wjBKmg{lx&#vsYfEXwvs7|qtK`IKyyT&gP)!+T#;9odWZ%)E~90Ps5iN+QDT z9%h5hosfm5&kn5d=G^b?`QihoKermt+)y`CV(*su-jrk)mgUQxbEm}4q#dL2_G-!% zEn2(mE$d+kF9Y>FF0T8)yW~pWf2*sNAawVlFT|Eol%RE&z9IOao;WrU1h7$enXDO0 zvmM9u$Z;880frt7@HAZw&wixMQR3@=S$wN#HSi z6PAA4afmDgIc`m?OI31f@R`n(%{M?^mS^{2`$~r2->k2C)hAcmFd;%p`Jg;)HHW4tYZ2ilW6m!N7@^|9bs+Cl zzE5F2!1aP4VcTCTWOI5XnE;xkUn#=x!|*zu=ans|R_Uv9-s0F-H&e=teRc>@k_hBuU!>SQ~_Z`KHE@8c;+%n+)XBj4sLjs$CeO_)3C^5;YE#-vhubDh3XS~);h*0 zs#;8auaG~@bXH73Ps&--5qKU_0(896Y6PFN;1vzL51%u!L}Q_rr+*?gYe8$t+oAI04P%R1xLSG zuOHR|(7=FHZ|BYb$hOr8&;5NS#LVcZ2&5y`sWhzpoc^>bFdQL6+lvX2m&!*Ex#_}gu4FOuJ z0$zN7gH3FDQ$s{VK_LQ*rLxz#sq-M8VN6>%0a9gyss2F z2(6QHKcb?kA6vz+OpaeS-pQEVnRc_gRV@(vt@3v61NEdkV>fTBwc2q`gW}iBeIY5p z>eGXXeN4*9KRu;A35HTc*XxU&hf-{ck%O-KYKDW2r{xDvN)H(RV~)(nwDMtyx8jx2 z&d(KTvpb;{LY8vU>mya`R(75*_C!isUi#kXuR{f>iLbyJC zsM~#(6hyXM&at-(TB<9NE8-I)Am`Ijk6Wpy`^vl-5o6r!|=KFIs zR((p`gDGO{@CWxrWAqjPmoeD{#~1FyG`kE}vCx;^+XKVHbf~s6*y(ZU9&rQL^QR4} zsUnmMaEt#|su8G)cobd0^?EPckO>mPhUcR z?(FOwp4t2tqhpl)4_)|vV(EW*!I5_q|4%!@#VpLkReg3dGXXU!r9UZDig#M}pQQ*y zIRRPrpDzUbQu^N#*r)%gjs1V`=zWxwcTk&WNwo{2q2Y?Pe>j0h3@p|v_3$B5rM+ic;)i9=!Lp19uWO;({c0)$x0%-yFwYz9 zd$J=N%!z|#PE&3kl6AF&_vq-_xV;*ffSdg2QJ!dP#AL|T&Qb6u<$d|J@TJPk1brc? zp48u$Z%eoP?n|#oHh?5D!G1lH{}CTvUv_BB`|zW}*rU{}lzgFy_-uNQwvYEW2U$7x zYmuXAimpaCn1PiDb9d45r%2J4-9v~6BYF!mO%p`(&Snk8+3}@PZ)AuIu&n(nQY30UX3bzA)#+|-)}km zpvNxV17F<%V!{d*C&%_NX>28iG7Dd^`0~V0vzk6Jo^m{60UZdeIPg4zcmdbOfiMe2 z`k1;(T#wzXQl^Lw``p;>nic)wOqkWJ=8F&I3~Dzrpu!*2uJ&-w-^4IafEoNxy2?Q6 zLey}baRV*-N@ZNmoGuscHHYjWE1tr$u58)P(^gG^crv@9!i4?m zP~WXkIvXlx{s!P?Z5kYe4`la|nJZh5x2OVa&Lnu#|m`ibPWO$mjePaWJ-7zSCk z>Abj5m={VanKHFAUIu9=PT!7zy3E|%HS-th%dk1PP&`4F#^;Tt`w0_+9(o))xY4yG zzt)ZPs4R!N#(Xb+X9v4Bfcm&Qt#T|yLpI!_X=M{)XBU#C>}VOV0}s!R|8Ox`Gc!X7 zMRuOtAZZe2%0snHdk#dZ>}xW4ONX`uM*a3#mWp;xT-koq=WSnpc52*AF8cd3)OBfc z@8>9n#j7P0LSI=WpGw5a$iVl&V&U@j#(o13w4=W)4yDROOGeVvpq79 zh0B`J23ngJKh`SWKoFuo+})Z)js>t6>KhiYp}RhAYl)8|P=liuJav-4W?hZUz&0}h zKg9RF*^j->G2wVElo#VzasJiAiO-nXDz3`CVQ-nC)J*+&2%+GD=VzyRerfWH?tm8R zlXZ=n9}7gW!gNpY$)pbgoz_qAWV+%dD7+YbOGX3YAAF_*&(&AeEf02U5 zq4-N`dKGsEsu5$9J6TrzfwP1^Xr04L#U8P}vpIUyd%5(h0^SU@>A}>8??f*Ge82+i zS};Esl7-!{+^eQ#jU>m4%dpa34Xm6{!ZT16*KYG<$@%1$yUBWNDUCw0bj~*WQ3A8% zjIWAWxOnvwb=&+EOMPY!NoG4(O0jH^7|0${xT&3P$k_DK?t9ie>BO#)U8ZMx=ZI`M zh&gT}$;qR65S2I+zEPK0jAA;8-$F#{_$18azO?(idC6gFM?OSi7g1J|8c<^sPz6&t zF}Xj?I#y+GpH5m$yjHopnU&;Q4%in)xtE^>eo}%f!!O6cKZ~SN7KrU&-?Egd*G0f~ z%-{tOdqoq`pcPA9_`4?VsHW_c?e@)j$H}-CsN|hXJ|xO0e>d{Wmx>YJ(~2KXyr7+p zGss9x%cZyy-N(Kg;Nu}4r&;j@Rsqjz@f^J|EqeTwHw_`7U0pC|$N{*yYK>Q@sGxlS zf%Y63jhaHtzJBPfd1QNzv1x(C$U_cw4Q@;#M5Ob4BP;BEQ+AsN&P@+I={#4Zk-JZ` zv!)QDk0ALuX!0IJkzq}#&wHs7&4nd;D2K@}%WUf1*;kWvZYQydm z?WP8R5$Ruhb}gLn8E-ZYymmC8>>EhRcbq!HKd9dQNqXDOZJDiwx1Rfzmlfsw^P?{y z&3DvNCCBLN5@U#`J}ZgiNKr%#>@$o~fpsBSR^Gz#_DOs#L77X(dPCmh3$&MJ&%~=f zZJj%q?oVdKLP8(&_C9=As2(Y>Vji83M86bw^UqWIHc$?NT6TXrNx{#7so?qb<~{^Y ze1rk*GCmf2u|0V2XXe~-Jwvn9P}Fbl9l-?7Aw6i!Qxhg7ZL5o?C2Y=QL!A{?Z>{e_ zX3`59skQgQCF5#ohGB+Vg^YT-La0OT(kj7hH_pk)XKUqrd>u&l>f~wh9++B&+mJ}- zp^nQv8wg2F^u)SwYLppUrO2Iax7-qKM_+D#mr9Go>HSrBa2VBBJ@Gzg0yj6EX<(V~ zH|B}+lg|=!9*#suR>jJ#A-|M18-Pfaij+n*?s+#o%Wx~B#Iit9DacvU-|vHWlbjnO z8+|!vV>t)z#%ERVOJqFNMo9pb)8lt4(%KD?gGT#==41EYXCGz%s%G|Wd5!k9Yz~Z$ z>C27n^kK~5W96N6joEUIJa6jF<4!5Q8)O$y-9m8Tn|SW?w9DG7U4>e&m@wM2g_f50S_&pU(PG&aF_CkcNhIUGL-OP7JK{~eI%^e!^#&5mTQku zh-?YB7?+n5*fhQn=fPrJ=au1XF*2D2_aV9*en@+b1R-y#fS5_KGtu{Ul^T?^*BI9>`xleY9}bEbKYo4F5sgTKk#~RRiDcO7c=$v2OxjR(=&fcbtTJ zl2k>16HQqqk?3KB3c200R4lqAnjq&dX$qZq#x18?VM~FWs>IoIbbO=}80jUIbm(v$ z%OdPMbGjwrtj}!C@ox0DLXbBLW%UqVR&hqkw#C!5qarqRt(DX;*p*^W-avCc19dVaqD} zl7xbsvFB8^?q)cCTkEx2VBxdgNxQ`a)ocoi8&a0Will=LVk8uIllcNDS#Ml=TVndE zIBTBH%8{uI(DLZ zJe*-B;FY8ImC)h*0p|oCRK?Ot2|Q9?&dpm#qkK|4e##l7bubiUi8l>f=zVWEYV#`)7?2WWoAN?mFVT)(A8GC5vIR1W?HaA?-Ay(LQZ_D zHuERzqn$5FQ06iAF|uoq6zzztXbou@ZrqdI#c_Khe!J3a>CV}fe;DCSqQWm4Y`+bU z_--m?()piMo!`Bi@tgsnBf(XW-!{vYOv zn<+_vTV2?(g6x`-L^e!TuQ?7Depuvgmoavetz~$;FU8nrXl5JAsoj*5Vm=?n>EQ7F zgR_M#Q@fv@+FFXD@Myi*p_h_TsAE|rsWON5SofDW#xlJ|ek#jChwqGS+kBEO*QPa> zdvZ%sa4F9RtV#DA#Q2^!2= zwlKGT*!c{Nd&R4f-qLC^eJN{U&qKNEWP8`Gg^NEkN4f?FBEU@Np9iyVp#g8yeRsbq z57OT`O-0{>`4IIlp~F{2*InlVj}Yo#buQM@K-U0b)#grs%5@sy{gVsjIkaPxsuQ(Vcscq-Y9 zZ}0j?3&ya6$^nmqx98pPuvO`cYnn!HudE8_Z?QpOdL$1(5@5@rts4i%yr28U2lv7 zo?5Zb7c$%&O23Bv`^pOkoYJUa6D4*OeKmQih{q)uT(u%fiPwgVu!oDYwhnOcI7#{H zk`3mqR&CR-v~j2|4Poo!8Vsb;>$!-X=9yN%-Bryita4q=USDz5Tyn^N%*s<;QdU~- zG@{lVdjj>#FXH4$QjB9QH}rTAZr5-~Sk8soWG*ypisppoR`m)GT}_+@%ia6MrUQUH zy4RdtA|&HE$NpBeD??f9DEbQ3k9o`Qu_JLYLQ?0Rvp8+TkL$0%a?9Ga8K zKHFI7Cp~G?i;IO(p>Wb(@QhiP$HI<^(7>P=G)v3D-`^4<3FmqB4#F0Tb1*omkl15) zb8t4+c8Y>paw*FXxbv^5Ea~V(hdu2rFjn;BMMx|md6(9U{@^2rww`yD)gF^b8WOFq z>d%?~QqA{^+x&Xu>btvYtEbq?A74nNWH$1Bc+=IqujfMI{Y(k2u1^wkfjd9S)pSlc z73{#ufnJJyv*b*V7Bn8{02gu1^ffY%RhTP3>g&ziWd4%Vlug=K7ssJ1?pEQ}*iv?W(p0IEjkZhm$kncupsD_OP8yzjv`uno{zs&NEAz zIOF2If6PrXHdPXoSO7>)=w;LANW^RP8d|o>uslL)Wex5DUqz0*RQmam-L7pB$>GyR zjj8G2gI=cXlUN`LFXAmKaO5gtJO)#!vD?_{Iv+md&Zy>nruAwlJ5vDIMqg~&I6pR)BbT)d^r~LW!!s@== zshDDsB*}e+*zKJL!#k2ERT1HCqVJ9(&-RxX_ZLW;r0*qld#e>}q^6{LF?iEiOljty zuLeJCcK5q%v#8r{e~rGJDB>@MgyN|-q)#nuf3RYj#9ex;pS?J_E6TaA)ek{(uS7S{ zRp9p6htXjZvCRoxCEUhpyIm=_OSie^Q{3>?-0c@FPN=62FX-mcEk2Hckq_g*B=Kcy znH+V;!{j1n7-8hZZ&~o3fUdgLf`&bNEp;>M+JfYvblqBZk3z%VV@$wI?G`da%QG~% zy#-w`Teb3FEtY#}TQ+^AGJJWYI+-FwUU-$tvl&)ydqS@SCl)Xp+(UDy$ z?^)a#sNK8ZNLZJ;-_`p;MG4huNpAe%uIlr`xdhAI@7NZM7a|X>+bqU24NZ$SsNjnPraIf~cIZdpSfeG1pW8YwSu(D0E?yAla)61$ zV($F9X#JUc_vMNMg^m%yVyCL}guvv1>D`6;M?ovJZlWd2kX#OtvHHWrrP!&y=1bJ# zd0$y$PHH>rnZ)o>)`?Ob^06=P*~@zR=C~4;M2~$fx%^29g!Y8=Y5FX(3MGm_VY`Au zlu+o+8iBKs53+4+DX*A`7sfhb7dWy^$9+K#!#zpUUp@A{{72Bfj1lO@;DW7@RqqJB zsHgiD1+>F=*7Lp%n?Z)W&ahl?P3ohm@*ipP4-MOW?(UmG3HeDizw|yfRZ5-o(z@0C zvRl~1I?O%M=fSto#sl>-*54!m$F28Vn0tbTpV~1MzXA0f*7bH@oGV{2fl|+TrAukr zi(k`s-#2Y{wj-@(#>#imx4O|sDt*9@mKYtnCW-n1kuH&TlE&x{vt{`#n=VRXFe5)= z7tteY&jv=$$4E;Mdw3|%%dZa&FpK(_d58q7@3IGr&*<<(_hhY+ja1)&$Y$k;QTIlq#&Y z!Jbk|&5CGOx@S~5Bqdrk&>h2Wr3DFXyXo z!)tRAmfx!d1wbhaZr^>4Fk6XNA6oJ)(d7&ULCup5S2(JcK(Smn&geRyeUp4^WJk!Z za{c$HAWffO@d9i#+z@g*q4SWx2`zufBvpeph?{rikiO{GJEa%>3>|Jr*@8Z;L;Q>p#dY-JIP;wX6`u3nLlIAcCk)-i2 z7Nh3G^L>imT@@w>GL_QvLw`v_uto7pFZ7_ z`;VmJOX!3DvCL}!A1^JI|Mk-PfA7$I@&G@~?eV`yd`VQbvtxVZ^7wa){Q7^xx&Ked zxXmD559atZsJ6K!+Id?nV`doLXXxkyVTEkY6St~XyRKYdRM;_R4w>%(*V4ZmWl%Tx zzouZTMprbce>RrFJLEKXC&;7WPCat~@J&iH24rd}aWXpn#+K~D!&$vOf&HfI&iAhH zQM#?y|9p&TCwGjy)#zeleby?v7k&Qr`1BNXJSN|>%YVY?xL2tFMvIXDcyl{`Aogh! zFf=OT{O=!Iw^HXmUSA}`-n+A0ux*Nrwu@21d-CJ_4P3X9z*?9BsRD%Xi{DH+|LIXa z2H>=J7>`Sj#v@3#sK2+gEbxlBowj_^^4#nZwT57y!~l}FzZ>B9D_wLHHtIA>D;Bxs zqb9PJU_x`|c}(8}a^HBlg>|wWe{0f*dq|d;t)n3%z5`YIj4bO_?>=zpHgZV^=_p0K z7hvukKRka4e-&eY&u~mnqq3^}Lo;t`dDeJ}MeQJ+uyz^_fu*qnh(9!u|-yEz;%cm@?0tss^TsZE%5!3@dAs4IqP%kAtfADAO2-$ zhyLWHr@8z2#qrtc*h0K*|Ei;Wh^))K?@>BxMr;;V%;(e{&x=&7L)J*NCgfbx4n0aM zNQJn9!$Xrap|8I;F@h`O1tX2#8sN-Vr@Bmo2!w>5A;YRqhHr87EN*tEY9_~*TYA-T za*VvrlXKK8=NwU=-yo*sY*%HaRS3|2E;g-fhC8k}aA0r!g?nOOpH_RDi`>DO!;dJ) z^xrNgD(7uk(|RHSDr^zfcCsRLf}iO9_6{4dRaJg7%|2j__x0Gs=piE%ll%qrX}8Q9_N}A`aotvUn~{_ z;6^QXQ+oYMMB`NYx~pzV0Hwq`4(xi0HdbPz#?uxY4~Ccr4Ey531Z~-P%+q>kmAe|m zgmyu1JgZmcfGA!o>V;^ahe1^=XUcweg3_j$r>BNU)>S(2Cq4L>GaADBoL%%!?8_ZJ z6C&)@zq#toJN`sZar+&V_r0#Rfk1rWtBfnasIpk`Uj{zkwBC<58>52?G})|xy0Tgy z%XIAfWxY8@1p<*?@TiJc=%F2Ql|MP0ie^EiO1JbBSoIv1&Fvp9Vxca@ra+PBoSfkC z7goNTs_xA_^|aVl_sz;N?tg@Xz2D0#DtdY<`!UT@7adUc2#KRt^?2dmlNmR|9zv9Q z&bq_7>8p|(J;*4c%ktrPDUf+G^fKZ0n*Mh2V)oLoF`fU_vwqixYaVNH&Y6(ETEX6> ztNHbITM8pmxob^V|p2{o<{rr^21oemvXYkX)k4m3Xy~!>4jtU zhJ4wno-DKahxamR<1X6mc#aNWLE_i#$y;g}q;uo8xZNdm)9mGIYIg`+ zPpvL0Mq1mGrkWZmP0x2kn1dbUDia9X{2mZ1^Y>abq6q~ocgoh25YqjE>kt8ZZ`006 z9G+gt*6+e64gT%A)rfYx3T5J*Wpd3JnUj!4NA^}Xd$D~y34{%?1?J?v8*j3egG3i; z?GGpM)j@NZnrbVhU^PLtGgLEeF~hK>w<9M>&HbQTMo$}u!>zGb;LELL2KY31h4k01 zN2K`Vu=0A*GC54L9Y%N9K?Z+wdD~F@cbvfh`<~Paoq>m`8@}ha(v*^4x3ub4WcONc zhe}3b8F*$N7GxTNoxc6kF}JVGKSJL^n+-Z@N}jBfI}Ea%Qj?UBcw~hQ8H&a?WX{Zb zX!Xm+K##kBuSx=T{e~?M_5}USN-oK`>xUl}+Iad8k@LXA)vwEy-130OVzUk!)4Quv zH;u{V9BtbA=@Gcs%$>)&gbz9ko|U_(o;j|q2s^?lF_TH=Uym&BFmbLYnkjZ5_m+RR zw6FDym{n{Z;{K0nYftXMh~!9lltjJvjLm~Ai5^oXp&28R=3O`d~2apAtW zu(i~ag;M8`o^Q)?mf?PNYr0^!NOSVqlC3+Zo4GC2VUw40&w1;&MB|FJTTz2QV#LOz z?X}5qyc%7@gnr})(K#>*Mn(JgD(0inaaMZSU7SL%=H9KUO-4)NDwf8Njeozr%=%0P zwJdu8doR%@3&uHrf!UOJTNbQI4!)0K8nH5Lylp+8h9c{MN~R#s^Y3H3daMi~pU5?Q z6NkcPNY^5DM01=jDD7r_(kCALjgu&euD+oXs@-+LPjkBcEc>uZesAem$HCRi&|ck-h!^Kv z(qCdP<#HSB+?FNYajuiwBM`eS^Y;{m>kXs+?e%NLZ=O7$0=&R`mmVK+#NjnQ`bWmu z`}kXGY7sCd{@Ik2rlzKn{%e^=8vkE~5ufq@NRLSWM|$-CXUA2&JY4v!{2-DRjA^z& zqJ2g{C@2J8i&Ch97J&z}v@_`^X4Zk3UT*0VQzhQ-a;h4npd zajd2HW71HyTm0g6Y-a@X)36TG-G-nvBTYsOC9XQp!SX_isrviBApud##=mhppIq4H z;$**_Vc0yK_og-Gs3r?sW6Tn9d1!83qu=c$k4;p|R$3EIottf}lJruPEiLmK#I2J% zuu<#ymd)0k_1{lXk9d#J{Xqr+|j6<~z&g_(f69}M@Ko79^|M#NowFj-)re-02M zdMkSme4l^JKAJopVVWs~J`czS)pJ3)blKb^60LPYOSZ4EEB@1VrUaDGwz9P5 zGmrI4VXhkBNk+uk$(iFGn;NnQugRsQMcRv6kKa|0(gShrig$F_qg*6#bKB;S{2_r@ z81EakyH(xlgIw@bP9nUxKR2~cIhMDomrr;?ZpB|06hFK*{XvaxCOs6Wf%4;35y_cQLTAgO|LjkK*v24UfOJ9-z7t7e`^HODoDv?y$swrhO9yAkKYy?0nO?!_<}RclJ8z|K$Sw zXB3C=N`eM$UsfZUG1B>Q*m%2hAkpjP}xE`dy<^Hx4DRJ=^O`kHjSZ`*4DK#Y3rMmV8EDBkjf|fZUS({s%F|U?nZf(< zh9M{~?ln|!&nX(tCVaV!7r+KEjwGEJ;m^)d*%%DU(&jEr zM${LjXFRiVhlHx2cgR~h)piyG)7yy?XG_nA${XN9#b~r0V1)_FMTIdixB>LK-silY z)>KwjRzH2j;>&-lzLBg+4%$clevZ1fQ@7`2U|S#Qd~42GnCG%Opvh;cp=V9)a-3dU z%_CQeA()0GT%xVE8#?|Q+xO*nC|xqYuwbCt#Bt!T}NbaEjuj|abrjCi!P z`9k)+1Xxof!kIh!0@*|%b4I>|cU{mq?9vVzbb2`^KZWkC45*9w9h74&ciJ!_uTcF~ zYF~eZ(1a?d*Ek=`Y@fbjK0jACjub7}o8)P^M3VsOrcKuSE!tjyu?TTG`w{nd|pAMu$aKRt$rJW?<}T3pUd82}hNsQX3u% zv*u~y*jscMMYMXZMYTibaWYK1+I2tNXH{8IafL%x)^?%V?$faL%fWHP-GjyzlPMB< zo+c9?&LzRdGiI>xrfz;K?Rww2mwrE00y5 z04+;|14K)!-x9#Kwt9Vv?)vR^`-USE($!=2yF`4#oXcz7bUN0>OY1$cW9K01B0p&m zV!ccW*8}r;!3ky6Fh#4dA)9crL2p&mA2w3D>*?=#Hg?qR1~yC{98q-0IJ^Yvtu}|( zfdY4>J!jidlU00#O>sU$?ZJJ3_a9MNctX;M0@=^gpi0m5jPPqDF6FCdUAR-X^O3)c zBTG0UoqiMrdqxU=U{7A^b=LCj$pUK`4qfOM71j-m;J$ZXOr@G{F=GXzoYK6Pob1K7 zW$o*HnZrppm0%~|rtuY?*r`aZAJ>c?;eCuhzA zDy@D`FlZlvXe(%Z59W||xB&4i3C<2(n8jBt?}E3gF_Gk0)H^RXS%7pt>eSCw+1Th8 zr7+yH&oS2;cf{E`ko1iDx2FtGdELSj2GA24!D-xkkzao_G*E{t;U#HfUkT4$3O}_` z+i@Bxo-<929$3W|uO-p%p5_kf1h0dI32FRl5xS_r-r^6AJ~#k>X%%Nc`5Y)0jSV(mEs<`1ZiZXAbf{p z!nP@DxO+xR!;vMMjQ<8Ec zL7nYpTJ-bADXxDqVeQ&G&Ph|wx9dv!fU8NOXfofm?&y69l>ir}FBW#KWztoRUV~a9lOzdpJ8B z-0I9}>y=i+(}tv)hiAs+De(#}Z$hC?=--()?ON}3fA6k1zPg?XuS}W)GAtZ~=jP^I zuvv-I%!WDDGXud6hS&6-sYpo}x|4^^NscbatOv#x?(>Bwr$j3DL4Lgfxe;^m?3zxj zBaFB8)onM77EqEd_Li$z>iDC_kE8+90af8J8_08uY0lwPB1aNvc3bn@oD)vPapQG~ zh6zW6Ptnfy<_{Fo`%o~e%yAEwQCpB9C{gV0H8j0DTh$52LtQErxAD#rbPlP;3B~ zu1#+u{?kM>y=4E9vq8p(;VIKp$sn@`dKq&n35a}kneUi3c|{-z7+$Aj@FoBov_9?x z1MXv3+hUKOj8%qR1HDFPl7!sdv@D!AT)V6RkRPh`+ zO~QgjenYOf&chf|t|O)f7ub2sZEgZAt2>7K03R?OK0Rr=W0a85QHr9r2Z+B0JU_(66%fz+w+kFUzxXj~ zN&e!cDN~htz4oj@-$Rr?ih&OwfWX(vKXmJ?(~o?+15ByO7pQ&4-Jb%@PrZq(tXOfn zd9udDlF@iO&Z0O@C9E`R}6t^G;RY4m{5D8?5dHhmoja8IOTTRS}XJmq}e&n%m% zFLCwe#lPj1$N+9GuCOWRwWr?U6AuCCA+k*Nzj3bVi~ix^3IG3UQJnraub-#|7T(!x zIUc>x;AvQ(eY72uS$hEOaU4GkKKvIr?;Q;*5bW7XIve{nFLT=w=kbeLM<=9b4`>S6 zlbxbc+MM7iSX4&%IcQ;I0B<`aah=C#0D$u|b(8ZBINRx)_JQm>Fy#Pa_N%N}VD9lh z9{dGP~zd>uR?QBjBWJ;r?&F#4l`_&F!B# zErs@>2Vxl2Q~)B$$y1`6xK_s&i>YsA_0{hD_FU`vfWr$24%m;%%uM<=9$Xn`zkJzU z_9MK)k=9>=sPj#~uzh#JPs>tgdOw8ZC{rNfNVN2Z*>y_94`|KVNyTy=a{Ak6C)ar}LqlKB1~^)0s-uTx zH2MAk+9P?*l<<1y=VwMVeIS`j@Ux9g>c!3#J=0XMfCU6ZGPl2Db|&tPjAxVQL&Rj} zLF{?3)E?; zH@IShS%UlpVtc~x#y^fq%c7B$^8Y%V^&c1^Ag}RnUdG7y06)vI!zBbVOQB-gLI zbatRmN2CyZ@&KNUxcXeHJ;w^|FD0iZ_g9fC(F*K78`3Mc4s8izi*x1V(FrW!h#!0( zV&qEA41Ik2rnU8<)Rf%z@qvy(>`Tp3WGL@;qv zhyqHE!}zqCz&gw&^qQV1S0r2b{j2P@9oL4P)#g1v^U}%a2l#E^5Zh19#$VMGv)pjF z_UKQ}tt`0Oxr^6!p@aTxrY_U|ELlm244mtYV0LobkdrDTF zRStND0tL!FUR0KO?2n!? zS7i%;;xyjrP5@~zeZahKI1`E1{WA68U3iP z8uT-If2e(d@{`0&y_GT{!0yWpkFoj}&Ok4IOz%oNh(cf0DI^+wzdVx*r&7EchYHyx zB`_BrDX;r8B-denH+x?=L0Ce{YIoeW&>tw#95#I4LWgU>THQAZU5pMF$&LPt%^9KMZ*f~ zKACOt?aoc2+SNbo*`EVFFH}V)HE`p)4rnR;%38~X7)4-k6XU5-kp0w7kg{*N&{VZ1 zJP<)!Nbq-r281ZTr)Ihi2T>wsY?DI;Qlwfww?}k2@MA?S8$0zlb5gmw=s1JpzH<^K z2a)uYX@u2r2Y!1UjMPyT#)wi11Uz1iJvf)?g@dfbvVPhsm56FQMTkgk?B`@5eC%hB zArfP!ID%Z>=cNuGi7!y37;y)gTnVpo~`TB=F zsp`TByCUpP1i~)+J6BHCbkl9Go8b9Xb4nyGHBwOrQsmlnw*X{y09eePNosF9`n#G2 zWs;xYn{y$fy3e;>d}@#Q{Gxkd?d1ETYPuLil1W*u@~qCgrOI1Tg~EbH^X`wNu-`5Z z9~YSkeiKMFg@ZV^KT7W6tV`TtgRmos2r|I|>t-=93{~m;i;hOZd#@dx`&paqi)Lh_ zD4~o!_9+Nn9Rr{Ag=P%SHCpdmSlRxgS@pFhT^LP=e%~&Cz80@ZQ5oGNfKRQHuXuxQ zAU|4ktMp6QGrloP3f3xj;=+kwO9j+>aoHw?tkqu*R%IItW_oczJE#T@WhgfeNL#~T zpz+|BqUsNF)z=oJ=E?6U0W#{qa=n~?7Nv~BU&A$~AAWu~1DCy=Y4?5{^gbnjJQMmF zVM%9l5QP||cYfM&F1}hSiK%t0UTPYG9;LX26BsI*S(_YziyD$nX-zdVr3$+!PmjBv zAB8FLDhc`h`fY?!JC?)eA zg|}E)rT&?wi_+EE?kFL9A4xtXNZHN*@3tm2|FQ zzuBga-t#QnyTw)XjP@2;aNq|{pV$MLXT8R4Zff&uKWUe`B52{m7Z)jHMewam0?m<===_w7Q zK$BNW#u>&QhGMa^o517KRmrrsmhT?^5`I^6ooihy9ACCYyP~ASD0Rzdd+5`n)nMWn zS^p1_#{kh2HvabiDekKuqWGe|@r#H7Vj$98(vs4mq?D3N$FhWUOE(A-f^?&Vu+-AM zgf6j5=dy%!EFmllQp>aG`~C~hGe6y#xpVKCxp&Sz@j3U9QjL~1b*4*eo{I%CElY|d zI>!^=i)XXE4?_~VFO5%}#4ua5;-=L92=PAXScr}raLH&e_(}06+W0CMczzfiBz|iC z#7zEv2(F`JCP#U(lb+FC;f!&Kf7Bs}1GFU;?hx?A<$k=uK1mCGP?=ooTTW_PX1mPN z9pE5Je34uUNGe2`w&~A%&}{26cGSpbp7CNsC-N5{ug2o26Or*ECt*$cjkJ`ClkrVF z7RG9mS)1-FgSpAnD9~%2K-=M4Py8LluL@zn^r&5>rzEblA{2QI+-JO%r-VL*0*Udz zUBZCtM>}0zn>r7$Ir72E=BOriWAjtVLkpot!QMXuE@ERjKl&@HNpb1Q!>Vvi*j$zy zRljW^eb7o4+B4>3#yF9p%{2R^{Nzj3Hf0*n!0f@$+fZP1sP3pJcSJq-)w5gmkJdmJ zbXTBP-F1-y?-AMhUooCP*JvbJO8BKzKA+2x#Uq83g&!{k2j10Te?-)KD-rKZzsDbAP|_X6Dx$JWpNeh}>;@U60atjga9Xkj*G79%=fLL`#hug1~JB|6qjigAxJ* zD~K*WBO=*?#*OEyQ34mAdXL>csaX*cGQl~Y8#5UfD0j}!a2$HGzMzoZV+veNzjS^` z1@-mw<2|WRFx zQJ-RqD~K;io=q7Gd2eZw+3s?^cdY$yymxk`)r0yPW-w4h4ZRjd#nrG_EE`GuPoi8k zx}0!r?jB1!#ST(0dTDEurtXR6y9w;kUhmc0i#Nskm&i~DIT>Aw3F2v}QVc#5>(7{7 zw>_p9H333G!t`4L@+E*wLTf&h*M=U8*L>)IK<22&CBFbx(L}0wED~;r2x(>O`xEH~ z|2<#g@8W^J+9LnZOKU@QF*s++juADo;9;40q@Gph5%4IMvGzpa5piGk8R&q@XyCJX z45-}Mnly%ubO_mjsJq$gVD^0$zH8iMYf)ZQGZEE;b5*Dzf;nu#;@+qU2)au7{zX)9 z9J55=9rscDV|Mri6$fNfyMLlUK|}+Q>mNt?fJhWvP+zj z*%$S-;$ueK?47Bn9Br^~b{?kyO7$N901Nd+j~u*OLf1$*bp+Eo7o^S{SV5D2+dC(| zwTWiPUvyLTr^8}TXwFGU1k9UBaX>1?+_eU`n+LkHc?>0vgE~LzA@@=Ehs?mwoIdfR zD7mWoIJeQear0+qo4X=s|0O!Mw=V_0s?uI$JnW#mT5z<}$KOHkmsAb^LCxiD#;wwH zC=8e%sr&XcPMkj)^=Apn!l;hP#w>J$;*XqTW#V_UyX|H%H5<-*((^(#7_P)n$ z=e(Xy`@K|`Z3`XBm$T!dj%f7)#O1LY>jygdljqVWch0`Gw?o|0Pqub(O!jQhNd^m18HL7m8t~)K_BLe%g|G9l5X<4wMRPCvk1XgHbcUpYZW#SX zqfBgjxP-R+vqkrrvEwV~Esabb9+FO080ew8W_*onqCYfiBORNbj`D%*bScJ%1jwgh z!^(Yv&Tm&WJ+wL=)lYqP147pC%8*PKdC{xdHU#*(mAnr9QgvW4cp|3$HfclW){WbE zFbr{GjKc-5L)O^iPtw(`n1Eg=E}&L8>I`y30P?(*flSC>er zXl{@9H-%8U;hmwzcm%lPD&CSToEB3oT87EoN{6IO1-s5)>zx4TCz-p>V1W zEVg`e5iisB8CP_s8<;Cn*#qRSmFD7Uf9RHyH1k!9ae-~?!#lCf!Jk1ANzW)5nA<)= zA@g;@Bm@;SanrqLFaLynzz0N#t^dbXCz|r5{zrnhCk+}yAo{|=p5+_f0+7{-v7(tA z50Q#;3{ktbCO*D5wO-gZXxaeLHcbpG27igadEfIZao9&RYHFTHWZQACk)-`|5^o#DDZpw0)-NpF7(7F-M3s{i`kibbc4a<;uE~ zBEJUfXUV2apIVdWTsV{HgDG20OEK=_tIpM1J4f+gtmpGk}GjQRsZs*$ack$h^;ZUs{^nQ-aS$>$V9f z*3T$Dz%x?7YLC91TI)7~KRB~_NLz_#9L%g;hu>o%dHj(&G@Al*G_+aw43#{*NltNo zkZ5NKQ%?RHmCvvsYdqCQPKBA^h1bDgzce+Pp(qmxt zrIlf!9hZKgkPXz??1${xpO>5xL=KD>jjU5z&9_riK5re(;P*2)z0Xi0;q@pzF>x&F zEZolHFL3bI2y)?#b);P$e#enn$(3Ri*zvuw(Y>GJ20Q-q8s>O(+!nwx=lK2QOVI~7 zm-kRp%V`S_29z=(<1upLxa-AOWWCPRvQJGMuh9D*RA%Y-tp3zaJt@!PhJ}s_xRSv& zAW`r?&{6}^!l*isDg=bjuz^xB_#>|AY2=TDY4|AIFC(8tci5V-96LK0ag9@+^91}? zvi6X9|1@!p?3P17qHPfC7zad+Ar`BG(5B;s^UtJNQdS>7Pr`Fjj)G+fKE_FUQWmE#*dD!ssXlZe;9RlFV)%|9{bwZ zkJTD!4h^)L+TsSDLOqMAv&FiZ11|%fV`kwFg5<2So|!moFl0FC zIjIreQ^fSwrAFW*rbs^!idX@PwJA|om(Z#^8`+zv{yu%%hQ^S$q0WqSk^4sD&oz-A zKh;YF8E9l#7bQfxk}A1z@QxWH{bgsq$2rbp3a8%`1aLAdBqbvW=_RWGv*{mZCGYSo z93OF1Vl26vCqRnEW@{~^qLE$|99-k_mcD#p#iIBC6ojmM;2W!Oz=y9QmA|pu|J{xd z2x4-k4>6DJe!=Wy^~#DI?E3lB7fqSd2YJK7o!TN zgJ!-4L2_d<5-ET!wW}WkHpxA6ddL#p$Z^u29dAFy)`*=kub28AKRo)OUR#&O;beE0 zxr8?6T#!Fy6u1sGKKAd_+gN8%`8d&>_^Qui$W;sh`nX0R-P**_Il)i|<@@USbYa+a zA}8QSx@U`>!%@0pCirz`8Xr%$uVZX1-c5c3A4HnnG-`<@n^Q8JRUz;)IorjXgdg0xijbeLNA3nojwJK#mYmS&Wn}WPVQQPSTAJ29NJ+71>z5 zw2*sog~zEp$T|jX?SwQErpK>+z(>>b+2ub@@&{5Ej*`LcbEeRJmZ#hQU8u$!D4>~? zl_dX&HRE^tlRxy92=ZpDU)oCFIwksW0kchiX~d1Yj;B66^pJ}bKfjK+B)ahPcwPP& zv}do#N_Cl+q`Wb*cK6}k2+1#ecw2ZlEuq!R?vxjnbc1hK!@M~~LEH~9E*CSR_Q6AA z2OcCbZ0H#k9lWUHDTJ&U-KuwvHAEOUbouTID}=7lXl=(z6pyX}mo9F#CED3Mvn}*3&|!vf6;$!pAS=R zD$OqMGfYrlLxB4~Hf-?ket5`u7U+0i@yCCPiMtI12DYDnQ5KtzcD@vTxlT#@>E2Yh zc-x{;F~xsMBqYJ`p9D%SKOPbW+aZbIn@B)Dm&EP}`Hoo7otK3<_q^?mo0hxvVGVye z-hO;L=kdlb7;#_>IWAug1@b+u3;~Q9SKct1&k9x3kb^W4WQh0Tb33;%+mQ3U?(Qh4 z7e&C$0r1xshPF&7YO8f&$IBi(z$(fRKg;TfVl|ncnx^;OVYu96;twlS#DherQ231k zj22!R06!;@FiI@HPcW+TC<^Ii-AxL`8cRlBmivJC%8 zURrNYJW-UhrFPGiWNfS}m5PaoQ@y)&!GxAI6!7Kce;UjBUwapDu4Wqx?cDvEDhvC) zwF}ejHpA^>l|#5a@eO(zI$?IUNvU^TYS``ipA5T##4~DvZ2WPnMbHnzH97*&wDASZ z`meiUR+aHIu|8{W0#`~}kXAAG`Vqkhdlx0BXBL{~1ff{wv&*Z(={3ksh6Mw(UKG-+ z*q5Co$9rnp+2$Jd%9L z@5e1N=2u$zMqP+1mbEG&5#QO*jf6ef!7Mnr|7-$bVP=Wh4Qdy1jb61CHES!gc?%__ zRHZ%=Zq?dL&>Ytwr5xF%=kBo*;>kPVyxbPL>VJR@4^oA|dON|K-!9GJ$FIxMhtvyt zm)G01=2+Gfd*wY_yLalJ1ai%_KKmoPsopR{P_;&iJ7g&Z@wcwZn7Ty=H&+yh8t7I* zw%-aj?bh+LRna~5eiS@g?>SZPxw|_DKEaXgjGfC?qa#TxeqejDfdo!5I_~&fmHwha z;-v9bDf3$G4n9rO=|Fe}zjBgL-bkPMW;6G@++5N1;q@?=B9dlDaPGvrLmDL`XBq-r zmPcW|S(s1lv_AF$r%J(ZWHUM-#Py2p(b6ylMyTtO_J+$hb-RNhL5(b?KgMSM>CKDm z?msp0AG$o8;e`V54vHpqlXO@p@|hw>3t4-r*v)sYRd#_gRB5&|kG7hq@7k34R{1Ef zTFNY1d3D|OjrFOb)#eAo1jd?j-Pb!v_igss>>1qD!nc1-NjwaYI>f}>JAR+yoV=Bi zsjQR6joDarYz3VXt7M09bY1=`y#4%qXt7u1ILpZUe#pAWwm(hxYP=X<8;-EbGG;1d08d2?tPztl-FnTkD~_fT+r=a zmegG#auv`?+w^1jVL?@9XR^SFImiLW6yMQ*-qn*xktbVZ)4oiSEvzvyWTgckC`{gt zTJA2fdi`40);fdfi=U(&+vUhJ@>sU8UkDOZ8%mCIBf`ZR@RScu(j01D-qk+;%lIO= zyg*{LHrtoIePqz@QkhequG0MZq@Q4bfm!)dQkxCL*_}hQ#&SMp=c& zbOvitp$$p`(EKMfPJfn?&WA0AWT}Iq1;eH8Tu%zX!bI6xYM9^^` z6|L+{z0sI^=u+Z4B&^EfW0O1Sk>|(!=-G?@L>*}+=&5maZmi15H*E>EJtUxnVy5_M#zB@8y2 zJUXgd&1q3NaF@TSX3BTTSCK!-H>0gE(?a}sd-n77)ht6_KN^Rt&1DQKQnfT z8aI-I8kAY;5;5M37qo|=kT+Iiz8p%?&PN}lhq6&i58xLLcOygudzd+B!8%+xboeZ< zZiqH3r_ixF>zf#)O_$5~uOEbR0(zw%h`HZ4<}8OoX5J3mJ?RuOll9dK9+5ddRz;v; z5x~)D()ol7|LjY;x#~-yNIrf>4EhNMTa5oxEPwLuq;biHrP&#Ma3q_5#= z!MdbW!hWAp#nNBK_@0R}3mGpS9H|kxW(Qt*0$CVZID6;$UWy8D-RpSIz7P9b5+P$;U;an6tJnV;!Ka`QDb_6~tB|ec(n5)RL#TPy8Jp z=8GTyiK@c6jc!0i0}fi+G}@W2r$_R%xkwacIJIn|xkIT7f`?n!gXyrP;MRy+H{>l{ z+MYDU-QHLj^6OGMopAC~n!5yy<_33=4VWkDdugvXC<7Ie&P@e7M#ODO)NY0V;>>vx zt5>V;nh%1X#GI0y-q5xxcne^=dZdx?XenRb$z^%|xqG#I_b!NB|4wbrgPSgoEo|L;ddTAj6t~~iXN5+r5t%OSs{e7M!1%s1rQxYj9|E(!k`;0{ zyLsbFVR(i4r}=>8TVdqJxc=KEdxoEoZ|mE&@Q0Xxm-|c{IS2`^k6)31$)%aKh#0;h z{>|l!P}2;uDB!xw5mh+N!&VO@w1MAk@abNtT=NkOM#Mw4OMEJUyMY}rd$4XXO1pQM=3)9AwE z2gg!#2I6DikNH`$m&~`iWzz2H7_nD0s2LK^<${JD^xawWDw^N$4a2HAQ8mi|GY(9l zZ4GKAZbApx%pKKp_-POCpr`%^O3uHp)LCJrdqOLuutDzk3kN@&s>01f&3sS6616z{ z_wDF*IH%*Q*Mx%u6l`|2HFSQ-3=D5uy{9I%ptovEbU2H%^X>hr*gp}zPt~EN)H#+{ ziD2j8K~0Gjyb4mQs?~zVoWJ2Zg5PPRPmL#6eV=wAXf!7%{UyL|&ASQxy?m}RV8D?s zDYCf!N?yf89gfoP#Jb?)1-MO!u8Hd;9r-YQmQvYWR%6`_**vmrkT$DFv`M$Nb;FcW z_Gd~pTf9-;R`t`gh8cU~=H3I%^Akn6@pXqQgtHva#wAyybV~Q=K|Scy*3?kKiv0`lq0w_SD! zgMQN#h{N_A>yDgGaA;~85u;O<%SF{IUX_;p_(fgRnIoLb`;gnmjGv2VqdXzMjyvNi~RQT#D{Jz!LY}Vd*|BA_*3gye|mI(-)c)`9ykWE_P!Xg(>GqA0RHct`Zh_%CHWgT|+T z=d$a=r@Hj?Ivv1`q!7chW`olxzt@<|3{Z<|X;FQCSQPvFF7`$mN`p?4$?bsAx!9#jd|k(@9OXldi~BGgMG{bF#I10Lor-g9mFId%ZVe-<5g5 zH(vGn>e=oh9yl$yaIG-}8f&-Gk98$sl#B72-18I4zaOO2GqKuHQeyhD>>&E_)9b&z zp%8J4qt|cgl|s;;&XS|{r;1jnF&Y{n-<}rHfb&yejpNoJnqeYpJg@oeX->bLcY-jb zkOxF+u9Tj13m_IoedBM#Q!VOQq7{3&K!$)53F)A&EFd1={*=qto4=D~+leGZTr{B;8y8h@!43>w);u@Xn)bBlc9sQ2VnE57G+wL@K6gm@cds)}{>8smf?E{+&r6flV^5kMC0w}a$>!B#J9AV#+q~3u z>Hd)l9?4Wm$GtzR$!%$3RG~h_mLG0VA1Pz#^v!;$X|y(;fH&tguZNbesIs~E-!lO2 zsUOt?iDtuF{TTNe?k5|TDBr`2?A#WCD*m(~bb_qdw*)C)J^rR8WRkVpYDz7mr%To~%1Anl7gJ(i?l->;4{dSF`-^%fGhHzyFySH@BJttFD zz>fMFpbT_V0u5T@xX9Cd#>eTz#b9ko!#ksH_Kdyq{o+5`e{EMpyXMBms&f(r2UR&MP#Ud#S-`ATq3`M!ztqswJcq_%OyvZ8m z-81Qmize0-k<>In-l656A#!%rg|7~hkHKcISKLA&^mZdOl|F!3`y!1ahn%>x30@k1i*%zj>w@?Q0uk}4v2^$C5!INWVO9S>? z5Ni9MC545GDv+XUCGaE-Fkryms92U$sQn#Cd`aIcPi!mbli*g{@!o7-^gntC^!m#7 z#g7vi+VWoyw{iLf%0;BvV9!uI>TNCtHvB_eL3>~MMA$usTMBdkAVC3+XJ1?BrCa5r zH8@ikssf9pM7io4l+}$=Ccne1i*+34OOJUaCzBYoef)K_krk@M26Rec(YQ}+Pb__J4;EkRSw*LdnIs*d(_~FQsr+D zT=3rRmGHBhg2PtzMcgjWy{pDMQDGDBN5y;pRV7ftD?oCm)}*WuWT5hT zgk^&xH&f}_(5?TT|Ep*W(VcfM_jjFY2Srt?mBLWYh5`StH#7h%-K*u@VO<3NcO}?p zrcB1@@2W{a;KK4S{vS`wclzHgiIjT~RsQ&Snuq3i+QFmJQfm>mFxTWBVSn>4nZflA zgWw%S!T0akqy$u{tbMbkZW2ur}wcfuM0l=pa)dN+s=O?}ghX#Ygs67Phb4HZpi;8&_*{V)1vLjP#Wa>CPuPC7Z2hOKWn4)VX4zIRVw-xR+# z-u;<$_ow!3kafcSShK~mg*RIB38f=YpM!|Z7LW25`Nx^}l2cHVrE`dsj=9efMJ3V3vP~AlBB|M9Qw~|h#rkcU|JGXqr<&`E2bJF2_tB)Y)q8S- zgGj?P|FFP~-1`;K%MN><=5k0|@_eS-Ui=C5Yd-rEK<3W9NYEbracKP=I)H}>#s?_v z-M3ySJ2!6JSU$)14Y5U?^{#fIemZYQ72WyaZ3*GZ`seZdSiF_NY$89^VY) znJtlZ`nu17F6njGS96g63=QEY4rB}0`tAw52~;^d34hS&Q8-;Z5K5iep>?rJolwmZ*iO#cJ)q7vT~^T z03@_{#9#Ai?&t*iVWkZJ%AUy=78%G3nKkRe!{7PE7fYE=8BAGNm8zX(yaCK#g0pp^ zN{+PjEzg!!mB;VTPj5;WhE*uddqx)e%LIlskTk)QbzaQ1?O-uv30y`zr7mAJwaH2* zA8SW(2IVZj11a4Czv|q+{H!&7pgo)CG>&`B7WC2D^;ey?#??CYd9iPMe@ET%_IGFn zNA0&+YeW=9GV<%(NL{Tdq_7*m#4exE{vFBhK(Gw0-}-s`8Rq zFIV+7xm?dBCta?fWdg0Hi_NT;)8f36teACL$+35D0})%=I=Q)e$JNiB^|aG`B@PFo z)>*jiw_}roX3*`cM-E&XDy8ezPl)LaJ#aPoHdFL#A{Ed00dG$1s!`F-T1MX2YZNF& zRC1;f_T`zzPP~51zJ8mCdAGGPC6lQMJ3pYdgd$14n^U z!b%2%0YC@f2fsoc?}QqbZ-IT3$r)Oz_Hx9)ZuNgZ(eM?a#m?$kb0$sY^#v6L4f)Df H=Ar)s!_bR} literal 0 HcmV?d00001 diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png new file mode 100644 index 0000000000000000000000000000000000000000..4abfc72a83a7d2649344d146c6e888b448406250 GIT binary patch literal 100857 zcmd4&1AATJw>=CuZHy+3oyNA^*lKLsR)fa2ZQD*`+qP{d&&oOHcmC)72Jdz4V6Tm} z_r2yEbKGN&87wO;0tbx={pHISI5APd-(S9f_I&yBbsZ8MI8takF9`gBvJq9a|MKN~ z@8=IlJmq&R;2?y9n3NF2(N|b#JOqTBMBFc5etr=XV{7SfP-*|L;6MA-jWr z#hODlth4a{pEt|Sg|%7wtCK@21l6IAEQ>7kf$hm!==)pojJqit^TEGC@s|*sb!otx znl51oA{Tmz&0<1^SMy5CB z>UY0;2>v%1^cSpLYsr);YjcEV1lk>c@{f|El1QAY%) z^P?=~%}U!kl^jw|MF2XQ1R2Ah4hEl36aUvZ7>2S;yr#!w)f|Tm9$t1X^*m4O?~`+f$)8^dJ!a% zTs%kI^sJl$@%KZUbZB@h9%v?ZK7(XRY+;yY#>O1`ow;Y0D`ivAnX6tvH$<)rCiM5!oK@wes{z;oPp- z|2kprHp#$el9`?Sy%r+rYeJN)!79AqkZuaEN**hE5_dAw6-Ab1X@89{J(Z8>-=RY( z!y>{g;gy_hGTjjr-%AwKr%~=u%Ba4oi7el~YGy9F>h^t~?!+ldDuiqBnTNpP-fJdb zo#cqklGT5c*OBavncbHlU~JTzRcfZ!Znn#5E|{wpE6vF<%Oro;$Rsd(&(lm5?%4C! zTKX6z5&XX6^Cx}AvI9<_;?}e6Q6O=^+tIKF4OX%nXgSAwH5ur@DRB4WfU5sWc}MiBFO(rAl?IJCQixC%CRP1yV@}W{h7>A1!QmxcD!{?kz7re>z3W zm*znQW5FWBpSvDLa`iR>{)5Ne9&`^#eLwGi@T_Y!VBw_f{cR9jLqWriF(_sZnTQp@u0?&{B=rb z(``=ZphV3UIc2{YNaTl}uU1D&uQuF>q6GHn7&Z2mA^3+%R zuMvw6UZ2bJUx~ei{)?ZmH6lzi=^}H9l!D1lkB{J7MCA7YoXrcb1pDSybfVEh2AJ~yVS;dz2;xRy$D z-6yI+w3x-C=sB*6j!F{b*CXgiK>a5zkehm4qj!wS_1G4k$A+D!u6Fg5rR|1jHilML z;Swak=!I5`^S>*>hain$1ZTY>LBqQyoic-Y!v#DaphVZFZO({DQvci5|3i5Z_c8 zQhsb8yy3X=*CeWhD5yLnjIN4Lh8`%gg3#Ov>W z{Wq|+Wc0l}Z!T=wNi=SKP@!S$xJfGuH;Fk5qd_0zjjD1Rcu4B9O9>1Y)M_VnV^k}(pCr`Zv~&@mlNO8R3h!>WoTr5lzrQhK5b zmC%T3&Qy2r%(v(UJn7Ai9yqA0$EVl0RI_=^^?BLGLcdo5|BhqCf&SCSORRmJ=SzFI zUdKhsf@VCWYT}6OhJ}FXEZT~vT|=IGBpa==AI~yK*x(5A*Pgvx96;gLi|lX9-#cq~ zI=nq)iF8H-pbk#r0QD=KzUPmb0HRnE`6QlBQM5{2E>;P&9^pNB$fqdca8c1Ekt+iqj`UR6Tsx}ARn|?a1&OFZ+NXNNDUm8hH=zp91^7^A zoY<_j`j3lH5C*Ey!2f{lDT^nsIozyP=$n`(A(Sjza|U81xO)EHsH|_Ekq7P zd3@xp>haa9$bFkUz1yy%X!;#HC@#^pU8iqpNkmQ`kLu2Wa(DO0?_>B&QSJ@oR0&3$ z^3lV44fQssb{Spa7XH6#3dVkCEx7maGI-5&?9=>`{*WE9^W!TVr-aKp-U!IxkmwdE zt7Z^ozWckJl7iA$YblHE%AULp$8Lj5Ued?7nO&{MXbM+Lqf~HcV}3kudwYsRVTPpc z2T!2pxy@o#t=~u4T0i$fM&OKYZc$l^_PYlZ+!$Zv-(66M2M=ST!sQL%IWir~!L1d+ z?nW{v4Hr>AAw@`?X)BEj)rAy&k3tm{`x-lOPmTpFr0Z?FRN)8$aCA8+Ew~-|HmquT zM}ZtywQ%<7YG;NnOS%j zjpBC_*^ekN&Bt{jDwzZqjk#Yhsg(XN#(3I!ou^31{o2UhfuOgvQ@R zr+KZZAC95!y&Ce;O8fKZU3C!6@0pJc-|yqq(IyhktaRM{pWM_{XNwNdUVxVj_F_-ytxLiWia6T(Gc2O|InB z700`T@w6|fv$Ll}%wl}!`#H)`{o_DM>%3d5p-TJyYJ>D`6MBc_J0Q_aNUxgq+_G@K z%_b%r7`}bL9?h~xdT5My)2^POoq{BcQyV--8KkHydcNDT%Xs`Y<#<0RwQF|$1K#01 z%t%Q=eph#WSx2RW!p!h;7o%0PELk$s9l9~R7cx~$w6vntPKcVh&J$D%9lK~ZIFd|z zo;{9b@@I0q!uev6Cu30at`m|R3;2;Yy zb@V(rwY6AN?@y_Gz@F>Q1EMdX)5>!5WSXP<9i9R;ALV2mQ^Vu*Q3J^Kwus=uJh{oj zHxZwjYs?JKtIR(wPL$>4p8esty_KGBgdVlitp*zb)b2})3jK{VNxd=|9`U{?R_~9Y zD`L`!wiTR{epNE}OqX2|EoAhk!GS z6j87_W_?>1R!j~FTZSHypJdVZp4nxP$y=nuUfk#oF`a`0WmjL z^d8k&`nV@PSH9R3n$Fr3?83TIziZA_gMlD^E;ZHq*-K+BTj89y2w&T4v_MX3X0Z9# z)X6ICWy_cm_`khuR#tZUXoSxc0<@IrS(buXY88W6|p$-VOZ z7)AW!b$ic9J}4omsT5i`+ig5Cet z^>Oe(R_WlZ3aj4i$w|3jW!OJkjl*L+bMNuG{a}4HxVB+*TjfJjzk-BpwCK(?TgeU7 z&#X|k&#kO`p+SmL+#eS;7I>LZJiCLN(%1Le9(SWA)3(q79-0O2E+%&?%!_GDs*)?) zo1AaRMr@y4X~Y_5)>3a#YDW_|z;>}m;IVP74xS7=aXVUTJNzeSPu-{YuGp->MkNFvo93lhF~mp}^Iputv$A2b+~{D1|0Zt< z)YU28Mt^$egnM=?)n#!PyjH7UXnn{%4Ps>SD;1=xvX{MC?Oj!T)RdW+9633d&w7`y zA&{|5Av!JSsHTn8Ui6^+`WRBT(-x3k-#eM`sj>pV*kMvGQK+n;+7&{5h!GG+_;K~N zU@b$~v1k9Gzwo`A*XNKy^JGRQpwc?Ie-*n2rQnL6b@M<5Gh_3*K02kW%x;(u-7woq z)ohhh_kcZF!J9iMtjGVQCHyra05b*4H@&(j%0yADbY+xBAeD5tGf4fFVyv7(|CyuN z#047fya-~mgx(an=`?PxJ|-C%xpSYI+FD-Y818o(DuA$-C|aIRrm~}yHcMg>RduZv zw`E)8M4VJ@SB})RGGYg(d}08>5=kr8mkC*B1#oZW!O;RNr+ZaW>*dg%N2Qi{IJR$P zdidr;WxOHB_sgpNnQItUf;Dt0Q$Vaol@Sbc1~`kSy|?Z+yCnA|WT3#OC}R2Q!Y)Yi zhRgXrf4f{i9&2kglV7!AqE181dX#16Z9d`;5ih-U5<$J+$uOVzb{H3q<&;q6`NFdn zJN)SS&RIGnDrL0vjHAJ22wO`M8ByakzdKdXvQkTkT0QQ}VL`vb5Yf}&t4PFwy=u2) z;L-MZ))^64gSUH+G#5VFPttY82c3Yj~mBrSqOjYHs*4NLE1$jP}LJ(3O z3>l1`z1~T8!t*$)Gz@-IYz@`@*E@HdE;l>0AW0lfHgOB9^X-NPd0-!iY6@yXMsB|Y zx$s#}sj};8SPBLffXBjc(_nsmj57TsiwbU{f+PhmmEg_R7FPkSCG(`Az#d{QH2&TG zhgUt}oOn}aAcw?W)={=|P5iz0a3g45i5KSG8bbj5cBe6Njo<8W{dV_lVP^UGqQ=8{ z!{rC?BUd=iuA(}!zHRsQZ!Tv^c2WO;EA7SHjozI@MX~5Oj)k;*^~TvwMdOIe)9~u| z1>1!c!&7Ix4D_4BTWrmrvwrD&ji}c{8Q`C()5IYoQ?P;5J;Cl9qxvWm{@zU2Q*l85 zt?7zMxk!?(`v~B)qp8s|XHL|e^-(ukGz`K1K&CMUJBeWVV|+JD{pRLbDT;rZCkeru zncOpx5XbmYq5%*euJ*#Ix=@y?m0z-HP%nx?5ZlFDTsmgE z)%W&c{x^OYT*a_Z7lghaVU8yUrDiVv!UEj=y-(tKDQFXPkR>udaLrQ2LYjlD%(;^P zA){hEEk!Yox?uaFC9iJ;xy&{#b!`_V_@+6QHL_Q!smW3Ed<{WqHOtnHL2_3j8CwUp zW7G`8amT!Kgo7oCelDT-gK-3l&FUlQ1tSDO3CToG#8H;V8Eaxv1mmBgCR#(e`#2Wu zsfpM_U|n=(%hcjzMP_Mq{ZEhvH=`s}cSc4WU#d&O0b&rak530=iFQhg~x`^hXsYy|pgBi!Dpb7hF~`bNp>oG}%cnUN@DWhQ&Ip|!Qw|H1i7K*HOY_IQNdH&)D6#RRU%g3NNi z10Mb=U4F*#Vdh96=4uoxc;JMTmRe?xBBhCpt^olq7xZw=sV1Y zS8Ld*0VOX7WheleuO-7R+ay++8p{PEWj3ess#q)O&C_e%tsTt*j!nN2U0)j{Tmj1N z3M7kTHs8pK!&%r>3&fbjhlY@@6$y}kwU^OY-dVq@Zf`7GHmj~uzqKTeL74k^Sxyi0 z%PuZzIYi;Rs#5*g1qTqO&!HpwObDX+?wULd%PE3J&D8M|Snw)0_^gyTl8?j+Sn8@+ z^$!oaA9!~%mS&c5BCW+KT+Shsw;UGrmNzgs^|JkE#!}s@#mJ@v%bOFWrUp8S%9feF zJA1ug11In=cOSWBFD|jnqhpP|Tj}v~e_H8Wu)B`Zz-`hsYQ3`t2w8I>+=;BHw)#zn z#oPDumvPBwrVpY7^zhTS?>5)75E8SS3jC-gl)?w?xe+WSI(@+h3FWg4l3a z5OG+hL)RjMZY>(BUb#INS`G+PI`u_LUFi=@sJ@YcY;Gv=x6Fl0nG(xc<|x)@D}V@Y zXp4`SKbqZW;kKPXYeIe-JV!!a!b)x9V*Suuf4=OjYk0=sZu2I+XwB&Q_H4Hi>t*O= zN=yiTa6LYKM?}K**ymx-b}>r7`mn2gHz`VZb-dF4f$+d}kM_lA!N z6bI`Tv2@pawnkW zi-TK4JG{2h)y@^ntDOY?u6VO^jUe+u+|1+8g#%ioF64VC4Fc|S&92c4q9!8l z;eXDY_gFRR*RsoAMVnOXN>pu$`dLujPK*#!GRAtUdJK&W1@tmKXE0-UnSUvjGW<~1 z-d28Fq(Vh3bE>(qvrV*sahp-K09O7e)uf?FLT@p=uz8u*0N5l3m5bosL?7Ny8GA=8MYNckh&cavqu4ykM(%I9r?tMSFw*o6^-LeAs4E+gzEp{J zb3f0a_2h(W-=ANbo8y0=tZZv$MNk`1c)1J4p2tDG>g9xLDor_2R@93o+w3Ap;URDF zsv}Nv@clcqIlxbMa<+;kaia!r5^p~-sjjYMl>y&(LL%jH&3KNj9J*0@REOV_!0Wv& zAkvxj(p5}8?u<3qrEr>1~^e?gFkj!k0b@-L)49WnG; z+wjLgia_abyjN*J!!bB!v$yI}#OmCdmG*OG$6*1Iwcwl>lrOS+TVQT9^`k?JhP~@A zu$31KT-$YJs;R9Ot$~uZYtj=wE}N$n}GVbgPi!^5^hj^o z79UpBWWKSa#}GH4BDEK-j7b8f{rGy8NDBTT9g* z*K@v|mRW2$XDZfiH}JuZ*iG`z2^;E961O)i?uCbU1-4@Ge>F7B&1__G*IVYTr-N!0 z+8UY8avt}3V6}UnMCNv0ep&Gqqrc$Fg_nnkT~s^Y8`?t|#a*e^Ku{lP=epFTgFXM0 zUlysjT65BjdCLvDGX$lN68{x9^TD^o3x%J}l3bQd8fiUgwhB&6|_BVDk7ug@0;6D;%8_d8@;H> zYdC$Jm+Gy70??`-KN~mp@9yh z4b7Ny!IiC5g<@55B@OohBX*IA{4b*ZoW*jeLa|gYvM-j zBFFrX7ofU*aLw^-!I>)C^#*(FDdC~jqphwYz_Yl)3chECpP=4YjGAF`-FoMAI;jj__VQRU`(`yKQz%X*@K1o_fq2QqeRl7tjRQ zqD|q@{pcqB%Lfe}qqkA|87zZ-yCo4Fx>gYM(Uoho(G68)AyfPHnJvsl4m#_Z8B95e zEawZlCkVME_rZmR-6G8N)<90sDZ;rYfGI%wwGGr&!{EY)8dn#|$5bz!v0*!

m2< zFr5aAk%i#NI5OXTN~X(v_Z8nPmg~bkD-=j%*utf-SZ9vS`(~ykg#?hy{)J5I<(I7r z#LfkeRph9Z)loQ^mF*t{KhnsOekpxm)>GUp>n0kfkStg{3#B9i2^+PIIjxf2z3+p) zGlwfuUJ8Aqtu*PXE%TVN6NmT4~J(D%kH4?l1FGT6oZh*PVWnzxSqTLdA6&yCte4)Y2 z=$08D>s~2gur?Jc2@W&o;f30M9yf-qCJ++RegBt0+bdxqgxDj=pWRxx7FltUqlaBaKAh@KMyEP|)Qpt4&8(J+Xu-np{7h@}8*IoBZlDx)$4odUAvT!b zPDtifH~U8}T+pS@XuSbB(A_8Be~7Cb?clH;{#kDw)kANqmz5l%&hRU1A`hSij&3wk5tZLL>|+5!a#bq6kr>i}IO-;xk94!1o095x zIFx{dkMIxK$@outkqM=!4}0S>prF!E8nU`hQUmIgdypdpAdLnkzaIc9Dm~yWnDTP2 z)w3qNG1{$qc-K8`_d-ATF3l32N}stH9LAe9jVbGUVRpHO=dFbD`uCDDD?nR#tALQ! zOSBitS|k&%lt&S6p9uzid_lWz3uRsiUo!j#8o@FbwZs%VG6Gt+;GSTDbhYu4UJ=jN z#nJTip)i*W`tL8JrIVl{-f3$do4D(^p*@9Mm_%Tz}mNuqG_m|DS zyQNf7D6#BncYlOGv0JCnXI2`&$G0E6$D~9nPlbi+r7n?Y%?yhh#l&lmg`6#8M=EzX z;Kge%0%ZVD4$x|a^Yj}3onV5AtDAZ4`V*cbE>#w2`Ni_^z<2h~vVTdg&AbNZI zZ2|0&#ktG58cRYBloV|jFgzZQT$e_1Q^#|pGKyFL^7~ArW@@FlB)`_Z$&>yemt^~! z;qAAj1@*&q-``7t#m6?m07Ld99wL85q43woGOC&wORmP67%wX*Xy5X`Ss|$=FkWCQ z60_RBPxJwXyirzcG?HDFHGc@x3%i9J;St7<4yqhAYUfDPnbkko?aB2Kxkr!GvfKP%y%i?&t*<{)OUz!z8!)F_64xq5lFr>$ZgaUAt z$)1>icq>?+KlcO1a>}>1Upj~b)qMTD`M#~`uJXxEX7;uJ+IzYiiId@?B^^Io46F2t z5WE*Q)}mAcNCOQofSNX)13fM@`msLXtE~PRN`5__BaT~`JuxKY?$GlW{<``; zdh*;69U!X}ZrgL6+x8X~e8W6k;`3lX7Ak|_x0Bj?JG2|=3WHgaWe4%61v$Y(^i0{y zC%UKFh$z?1qeu&@PEGC&wTw_dRJWKsc(mP#1D@xGqCVqk)#HXL1`(S|vp%>ON!V^V zsq6Kqo;VCs?Uz!f!n%tyvp*O+@;;b7h)2C4t+wUTNKrOe+toP5?^vXvi%gVSYGpIK z)H`Mtw`$e$CTGwmWz0h$;q}9XC^$7I){|TNG3M%7v+Mj4u{2s$mMh=M0UJ1 zVUi-kUCOnEi3$J5;k>aZHU2vw78f*=joJXksG_I=FFj-C45pNugIj$NQwJg$g=E#w*emJdL}5qUshGro6-7rMkPOYOH)J0aQc_c@ zmye6>z{z?72i7DhsdtH*Z4B?b;$x&e8vcc76@bG4QI1EpB21+u*lR@ncZ1ms<;6j9*cIoonF8-D%U7tmtCv)Bi0 zQGc|i%PQ%PV2YX$%U-u%u}_u!m06p_9Mzi5Br#F_i|D6MlUWH>F1lKjQ2)pYxVC3n zXQuTR_>QgKXw{(w*xcTnJMUDspppAQiO{ADXo|#C;Ym?_=#l!`)kdtuBh-U|(xkgx z>)uUiZhV$7yB_~o$>yQDKl(VTala7!&iAH!2GdK33(t}312>Z|e&t7cyu9veGy46i z#c6YzMWr+=MOh)XuZl=TZ-iDkQc&dua6`QP`pgO6r zd1d8|-u6h^&01RL>z?LByOH`CPG;ol?)3;F2wY6*b$QNZT9eIUnCI(BFOG&~CuKHg z8wS9SFwbkAp(Mmi-C|NWg+!@sW@)CS#CJi<2}rPOsFuuq=cori56^(!9Jc&oc7qIz z^&H~Dw>3=hl3=Z?ZnKAVUP&|r|0!qutJ+o!Us&uCX<4F&M8Ir@=}J12}z z3n-hRK?U5@=-f)^r^^_-60EmRgW*Jhp}rh4;HMb2(8VIaY2^gm@TZXPyh^rwvAH`tleS4i)(n^@3EI0`D4{MQBnKS&;_ z%Hw%4&At@;yWyp*2j$0-D&7M&p!x~w^5TYVX%b?4SS-m8$5oTwHDII@G}-(DEBsD- zXer+UM?HIsC((H*8}sG@$ga89(J1kXwN~bGfl4$epj&K2?X;3D8P~iY{$UafRrjX7 z(-s1E%W@t{gBWuK2;g0T)k8ZXm*cMx`nVq(f!1Lgf6Nd&lbvCP#P+>d->*qG!kgj7 zo9^-ph!{SSTbJwV8=t&l8QoVvk)p*mt$`k0dqy1!t@|>rw>>!Vi6w2PqwJviXob6p z5VYf(f5M>lO`BSEW3<0^lVh|rQc+#h`1$VC(URr(0AQs#>`q4zIGJ)n=jsaks+ebk zLL5c@16LgkWCX|SdI2nii_iGK+xvV*)D6tbIEFFnn2j1l>nx_;E5crgE6BJ%@#Spl1%1H5Zo z%h#|-pQgTS{o5vEm7s+7RNLDz`U}b7V9iHJ-P`vN z{6Gsu9canAp|*NltGjU()Hu-_!tag@3A;hV`D3T7BnFj2`H&d{Bm%hCU>j1}#=4=N zzX|%W3Ku!c@h91GPGgNvMxZ{M+(W~-`ab!t>+tkAng8mm46tiF%Fx|H7%G@I!7x)9 zp`j^ys^2uU|66luWouRka=nmgM;dnsC3ESi6Gra?e=N2C&7FK?2;A1CKvEd;ekPfS-A zgv@&%3}rQ9N>ahSU%v$Nc6FMoJd8Fs$Oy5UE>y4IMvQuO#-J4cBcX(huUZU5NHN#) zJPRE)E*M#Xm>(fjk;S0ZDwi*eX}Ji-JAh-`RsYNZ)^Pb!A}RT|pB|V{{ght)^nl_& zWpicu`APm7YHu^g&GPcx=3@Vp0wpu>z+4d~IjrvNy3g%{p0)nq3X?@v(Er-pv291U zB7J^hPe@Wr>d835c%l@*GF57w)sgS!EiP)#@k_;KdY9PUXAz-V-pl@8smC@6DZ%V5 zGJ!8Kk&_IzUELhZclFan=$_5mMEat+76RC4k&__jN};qte6NvQD@f+Sw6|=;#*}iS z3g=3fLJ(n9XS?B1gw*Th=!rX(cOZ4$v?a3>%zzD6u=BE6)K)b#v{AG^Hf0(kgQ}Xl zZm?Hb=08>67&h+u@2P!0w8gY=^TNl}sh2FRwww9+`M9NH5jiTC;1*pi0&wQ8Kk5;# zTp4|ETMuBxL!jGv8s^DR|J!NnhZOhAUFHYBhQTT5*5EYnT>r`j&aWex0Q-ws$xji< z_$5`PwlaFk0|~gUs?&=R{DaP1H=mWo|74 z&`le1-QG5yqEd*uC zqOwLvPdjHxL^o_8O;|G2gKRGVIvj$b+V3djpT@?(o~`$Uj>h(LNL9OSe0@^D6jalq z1USh(?J`&YuI0#X*4V$~!Jc#E!Y>UeaSKXOvVPdJZm`ew3HAIJ4cqZ*46N^yT6K0J zF@_ZlTMB4Y{0!`97Hh1J8|h@NjmULg$Xv`YadXQI?WGiQAEqqH$jmo9xzt+({c1t7x1&%^b(o_bkd4_7G8e|!sZu+LRwF9b zm)vSg3;|-tFM8t|2MX1#8G`4q-fc5+)yL;@`4X0@9s7|;ArQCyuC!MmIdT4K@Za4s%Laq~Wk{--ra8h+s z+>!hoBHpsst|(uNRQQMX?$Y$UeL@hcGxEe;z{D)?B&ex=ugni0MSFSY2=CraO9tOE zFzT@x;U^tXW)WZh?r|#i7E;tFd0hbG$t?%F6gpi~knm{`Mg5zvplD26M`_GDA9B zXyNi!I&0se9HE{{eO2qGKaTVk8-eix`T>SwCBdR#^{P=a)~L5FsEYb@L}- zqW1hFqWK}GY|xZ7^p}#KDx_`&a53&1Ja0Y62R--X#7$|P*%t*3tsmkB27iBmU8rQQ zDfZS@gaY-sc=r}J|DyJdqGPgQqH_8e-t!&;vLkEZ1(!~{ z;t|iS+3+Af-ANUq7{?23V_$z{s0pXu*_}1%`;V0DPXsvK(gz*JP6>>Pu6hslh`w*w z#<8s!3H~>)_@8-!r0$}dL%9Hidmm^h<$~_$mHubREw3$XHDM6(@FHdU+5D1#HdT5- z!f^By)yK6-2r%Cylpd>ESrk)Vk_npjjzcSvIm0?)I}N>H_w;;`+p6zB_5N!pGMOgl&6pQ@d;Q8ObrO}ZUVPH}I?;*b(?5qF#@c<%({{QoN|NrZNxi4vg zVis4Of34QCL^>-%?I_?;kapE&h2ZMIZJ-%M8liB}Qx&SL^T=}r{-)}C}EWe*qX!2WWw^x*4GjUC4&c1sCxJJ=z#JW3 zfrvilvTMMELox0g42P8;lKkzf$rmk6>c7j^*Ps(5y%xArLXrW7dz>1x(& zYmR6oeK?Y~=j^*FEd?63@j|IwE$Q51e@DiTIU#^Cm(O*HLwjH-sf-m9Idb$`TfD_Z zF+j7vx@;GMcsz9Xe{SCdo?@+}{E!4^Sq+@m8ZbT9Tr`@O6yJbH9O4oECo#|#YN32 z)za9|&{5o5MR~RN$%{Ovf&JIJJf|zbxLGsL8J6&t&bP-LUpt*ga-NOK%(F(uduD2z z@5+CFX+IAvG)(O$y!tlm zp`QB#m?(Q3;hEuj16udM=t-#Sc@cIYPo!`wIHq7c5F?|S8pGvC47cno<6h3%kq}%w z`_$z+XPUlJJG|9DsoSQAnn6*NwEnnPc%dLH2Ifc4WX zm7yw~tPFk`AJ-enHo{HM=;`(8ExHI+Dm23TFMW4Kf{W)fD7=q2&4_5n##f;*EVy@> zgLQ!u8g*)QV=e5R+>g2D`jyH^?+j0w7MNQDD;zmKyj}Yi-p0bY!K8I%F|(gnyCBqN z%L<(peTNx|F~FKI*Z_ors|4jDF!NI2IwVnhGb-e9#{sNN^Yd<9(G@qZ175IWrMbCe zeYLjvVd9k*(oj-|wD3OL-9*YF))SUqo3mD(4lmkwiLJy;rQ@BihaQvtvO9~ zvoZb8u#bYaajO#>sX&Y_JiF6S|x&)?=6wJGQEzCX08KU#R@4ix|h zWpi^xvrj`4juHLajTVD^*HS>7fAz<}iMzd?}5NOLX9eOl!M+r7iOq>WG)dyL42&u{?@ zZDgX2i~wu#ao~iRZ@t@zUg!muPo=$5&U)mLI~eexcOO$^-7J8C4otD3^8UHVq9MarnW^Y`W#S~p%{2X^ ze+6V4z}%fgB|GgL)Mg(mD6K-CxPg?Ot$?s}f4~fnW|!t#|23))d?-E7#+zwNmz1}- z*=N~I%yM@*EmjuI4-4!bmxq(V7k*_g^Zx~vXLbTxQe10qF|S4aVZPXpS!vB(m1`+k zy2q|K$+tB30)y+NcVX_xIUuli^Ot}?WOUW}J$q0d*ye^?>CS%#IBGQLnEZkJI5jh5 zzZ?<(Xgq@FBeYKgra(|hz?-mN?Uc0F3fv2gt^u7@1=;@=6o&YWE6;p~@g@(s{oTg_ z3&Knt1OU%^iLG#nj^cqGI64N>L(}LC9o=&LMs+ox_>c5C884R(V{wvHfgRY(o!XjP zC*zDb{=Xwu92-F_+CW5hYAN=$%QN1&RRR`(^1Z1Wh|3XG4|1Diyz&2@M8U~WF1Ivs z&)CjhGQ zVImjcLAjloILGl)_$}>!@y?^3grvGW-C%0gokK=%^tCZ@PH0ZkT0OZmINr9nc<7gV zPB?Hkgah!k%H14kN)@&NukQ{Npk!c|21w1ZnMN0rO<@d>CH<=!IAi4K7w(g|XKUt6 zK$}Qu9T(Y@@W#LXZcSRE>i8jE3@kWpmA31aow}=JLuO~KGW?Bqbp!K1H88>|BFI=J zWd;$YJKulIM386nAb;i3h4Wt4ym#i*cpC3M>0XnSrM>5%^R2Q^!?O=1-3)1x`&->* z)veznhn{CjI#Z!Pa-ctJ#fb56sC(`;w@^(y4U-x5ntCMAq^%IBa4 zUq9P5RVglTGN|`dW>h3$Ox^-lq$r^bGaW{`h2#%c=DTvhZ;@=zwcf8249|@QGVyrJ1R)Jxs*P@Xo*7#9^gbGh_V(#8o8$j@ z0lts*lrU=PTS7%z>eiL#E(nz&v=7?+`&;q2Q!qBw>m#^7Hc0YOoL)dTBU)D_eTMyz zOQ;G|RbVF0xur9`ofO^F8n?cWx~`9xiMFfaPveR}tG0+;Qz!F(I6rp#t;%@SVBpiY z`tm$>>OA)H^pe7?aESJXqO!8xGcW4>gyu%aV=hP-st4&Y17IePKU<;Cb|lf|53n3S zvq02Qjm%9aDPw^o<0k^G_1L_CraIgOz#vjQ!hv0q(gY!B7sQ~8u9W4NSUT%3%i2yU zJ^{i6wvr_UU?TaKWhsvrkZpIXqQXq@(c^xkj!qe2g3EKs?>t=8>hK`pDC;Pp;~J5| zy5t#wS#rByZkW-7tF0|vTO10;Kbsv#I<+){rk~Tvz*Xzpfs@_7qK~dxP_E`X4Iy9}bcae=d#95r;mPmY_S zDV!RPUx1UJn{Aukj(anGs$p-3dGL`R`jMAA7fh(ux9Qg)>Mv{rBo;bWst*U>T1x*<_+s4xprNI9NqZ`< zwOZ;$$wCZa{fPV5>CM%&jd@_5p_SbK)*%IG0z5}k+7OBXI z(O|Xs@g_VU$^s1K*gy8tAi#Qv!qd+3m$vW&%?vN6IY7A@Ez(y8T3_%RoP$DyzeOdg zVlu53&N{3tT1=&BuoLvu6s_;leL0A zJx`7q0_g6vGCkAuI#<181!_$G(&|){$PTn84o+@`MkKrpFy^FPNNzD=vLmuV$)w2} zjo+M}J`xNT{Qr^nl~HvxO_&!5?hxGFCAfQ#5CS9ww+jS!cM0z9?hxeS?h**@F2S9P zyX?)I@BMav?%6-P=WPE#W~Qg7tGlb7da8QHYR-&!IJnqk;Nux+r*sp1at@Skhn{Fh{oaBCDI8ISv_>c=&8pk*SWsb1SA>O)W#DIuFof zirZ(TKfv~I7&20A-rT1yt(<)jbuyLszbJ9kw_m@`Lf+-EFR&D5l8J*`Z(Dw^w)@kw z$--i%pDHv7v>$nd)RIsvXPTXrHawOS_-a;GPde&&El5l3g|BufyaW>b*CMo_E0YE2 z=2zG(P{BtVIuDLo3T={~_c4QDMZZSL)DS;=Vb_aqp6q+WuVHgU^Eyj0d-p zDHX&c$u6WKvGp&wz=$HF{`mRdSAG0xh7TCC!1Zr?e=gey5dXvZ{kO*d+Xt$2O<-@M ziMvZ;shSSQY1*wu5I-U!S0aq{mcXlCHM|?Zr0Z$A>2tZ0!XKa#zm1wMMvu^+86>+r zzyvk7B18`0Ajjz8{inMK4xZsaZY{SXUKh^2w$Se2{8%b{ZhPi)xxeg$nRFEUh{ry+F{R0PM>^h%&iBSf zD=wwR8-m@Dl2yjUC5-vusR@ijIsXD{QOu5-SRT4Xj^gkr+LeJGLUN#vNV;F(U{a zc;yK_mQ(U$e&Pt(tszKf-l7n>pSP-A+%9Ga*fYB@C)xP^-JX6vV#gTdws4jXIdT7i z)Xff&2#v6xj?D?)x%TE&1gNN(=J=Z>%j+8q4LncP*+HZGE9fc_e6iHoOD4be8e-LM zqAgZfn&tRCVgrj?bex)P`+axlD&u#y)Lm95b!3HV8iexGM8F;*J$j-_& zK0e&Q2(_MhiRd(kMZbV)bng~PMu_9rYK?bBjmF$GeBAB!3!eT_wG!xYtmbir+X3R{@~ z*G|`hn(q!zg&Fbl%QpZ?@bCJ>AiHT@QQ`bXmLq%*T)Tm9jkUIF zN{+H54v4pqef?wy^W74)rZv6JT>z0vo=Gru19yOGE1pOG-s% z@UJLGn3sw++dWRpGiW2bo6OFgiozvgww($+(Jq=^;&!VYyPt4t4<3kA<1N``#$9pq zcqHQEf^0;(&FTE{WTLb^yz*;4Dd}t-p|G)eK2!WcZJvT!$)a6+7Q950O@6R0<5Bm5 zsjr#5cjn8|FRtHId5It-@E;Lh(L&x%^jr4Jz0Yg3;2NT^aYVEK{EH*{76%G!%@~Ex z2>(c1WiiX}-Vf38%^%iQ`QLFaUk5#w{ditU^nS^JZ2#-~%EVQKX9G7r>-WK0B{>us#}-D)%<-?h4$0SLxM724yBp^5s!x(U@E1?!8Wdg< z+0G9V+bshyEd(?)G_3TbWtjevNUx_RVod*t-g7t}Yt+7+DlTVmR`$z|P)Z*$l7scumsx@>(%=^aYP|=r8<)U59yqoVj1DqD?JF6|q zEAJfMaz2g*kaZ5rz=KRaby6w2!RkF*AQG9g@?#}B33`a=^u3yZU$)X(z#ZqE&dX61 ztzC--Y~c*as1-ET?e&L!tw$2g<}tHfjQiJBGplL*flKmL^LN!_n&&dD=Z0#qV9kle zePJJA)k!o7HturSC8sHQrnN6Ug*2Kd4e522RnB;|p7$?ypW@=TGh$-iRJCPA!$-r* zjYW^&XE=!P?`4kfd1)SN?n~ETteWUsTfVpXDT`}hlquqbbiM1IkYGuCkcWYZi5VFJ zJRoqspBB-6sY54<7OZ*{1#L*l1lSv2XG=NV%Wb@>!~Ae@gYA)>(1>1|2IK^?%7qtz z5qW#tJ1b;{OYnK{fm^$jzuf5NcDm{@Sx^GCZf$sSrBQB7k#6qbdVaQ8ps6W-7@Y~g zWEoNi3b&!Rp9_pc!smV7m5f$Bei>C`R-{^0IUKzP_{PPdQfXsRQc@B!InT`eKAbJ{ zF6Q?-d>ch9BE-BCs?I(_@bqEPoAkErPUO_>+$({&6?&^1U1*Jy^a+qGiu&c{?GeQ> z-X-*?`tz54gD1t)+)nrk__jE?akn>%{j}8uHWfq%)857U}Y_T!hx~i1U zbGU0(cP{8;%ONM_PaUG=2snR}D3g=2&b*WMD2+)GfL6S8sFc4!v{!%)bBcJeu2ZS- z+t@hh;o{sS_sEA`eZEj6P0)*;uOriQtO-e*M_FX_V?Lk(;zn zLaO0u?52;~i`I>~o*lPt+u&cxUbe(6*U#SG%2T9Qw_VN)9%-omgBZDS_CZVSqU zldR)i7zFyQt{z0ze3ovh<3!x*TC3mwgmqZC*7ZcV!Td<%a((z!yWJh9Om&C+w2}|5 zFC-+yR2_e6TKRL0_&ajvhZ%;MLOLwbA2Nx&zpa%8em zzrUcRBnd%pmEkUc1kuAt>qxczN3v%PWuGZY9%IrQq)Udq0sE3*4bC~X5%G#`xY-U9 zM8`41-C_K`PpT9TxEr}pENi?qlDobKGGoDCt|;7VkS3Ke#*+OlVt!odL;LJ~sNPmT zg@~!nZ4i&l8jq&^V>MF zBV{>Dfr#+_n`#HLh!$9S0b8uqFy1}Mm35y|8Ref?3A`VoHL&eDdGdGi5Er9C-7mFc zHx20RCbtwm=a*GO!e;M?Ps&#VjuSqXuRCDfuo++o^5ahK`#tMmU+jqnQ5N!uNO>tZ zRb7=;iGeN63}V~#StpwHj&lVau}#<{<_v_nCILATQ=9JxHQNw0O3gic4TnT~KR@x+ zxHwAI7a)v_T8T})sa9B8IKDSp&5B=$<43>uSjI>+k^&yMZ;wpp754``xuteAcL09p zA!=MA>9;EcuQ>Iqizo(mdekefH?iSpgyT^#kOB;H9`R3%>-n!1Kjvg6hwtg>MrUxbbWB?ROe%DUsV|ds7Fn+R zxQ|pGa=$k1+R=sJS_lod=tm*KVr{b$WUtcTdx58}T5e&AO$7y~IODK4SS%}u4x zHOf5XyC?xC3)Kz_L)dfr3p%F?ss9RjJ+dWwwbQ9CI zUlnL`cQ##~dUhwZ*TZ8?oDFa6_H5Bn_m+p@Dee!O-u```FX46YVR_QnLJFAIC#lhe3f?${ZOF3gU{w7j zT=gnaUd(ya!_)&zLTuxXZUwgXr@$3!BpA1`xXuIt$woLEe!G@cp_j7}^|gInU3ev; zP%756!?$X1RuQ~xr^elCYO1JbdJ;~{L2t)nQGeYgO!0sshYR|402~kh=0O9$v)PNl zRp#8g#5kwciz{?h=_V^zX$7y))No9$IaZ&4vZ65~!gSjD62GwiTj(?$$cRTujM=VF zq&e0BZ(&HKsX5-$B8bPC_gy`=pfU4N(VoiFgu~vP&~|r4qb}r#U2xd3QsqT10#!>H zC$Y@7((#kiedR+BUxUO22GWRc!@fMFIwgWk!yFJLa#+Z{*?f)$=_XxL8PP5t<0Rn?aC5apSRaa(D27q)BYwJp z7?!(|jR?Wi6nA)|{$-H?J}pca3O=t@_pX9g)%}6dxO2s7fc+y<3Xw}bGnm0>?`zl9 zvK6}&%qmEJ`AI}wZ>0Cd!rju^6TN;p3^?M7bdS>ttY8X)v!eG|ga!*6c{h{FJ+1}0 zX}yb3Bvh`e5a+vjRLqkmMk6d6?uRs@p8*A6#LvPRZZV5p&xow(R1dnC_L>k{_KhEK z&Oa`e#?R6YA5Cw_b#9z|W=}(uqhEO>)zn=N8g*0buFAttltCk>Bi$FN!khC3Oi)@( z<63(2=%m;LDKq!tZr?pZjW*ykvvK7=`e`3G^7N=w<1U&>v=cXyN{zwzR#3txitJ?( zB<@l}PUc(`OB;dcrFUX`2NmB;e2(<$y+YV4Qv(^uD4-r59cuekY1EpuinKmVew~@0 z-RMOZCC-9|Ai&8K8axc5P%(UrU48YTvOOG+?+VSrNHmw=pBLbdeffwNTX>o6eLXyu z92muV?=`~DKpj^Ao?;qub7FuL#Y4yY5>IZmCezbKeINf3aML85rQ2difjY$$296-! zeQQGq@2^!;PYZh9u8PwYlS@Wh{dm&D<$z@6cZh>O7W^_?H|lrvy_7mx8)r`Ote^7Z zW1oBa)Y+*vGMv0F<5~o?}q`ViZTEraG?%QnM?6$#Moar#LCGY-6B1zAcDID z2QN=oGw_hGAz#O8cr6 zLj~t)#dAyYHQSX_)O$(_s0kbmQP2!O4i2s{;Ag~doF~`Y6H*IsDp3}h(PDpF>Gt*uy^kE*%zG8)D6abMx2C9XN@JA=%l=EXa zBR9HRHIv>H=&q>qbR9FZwJ$GEcut|hP#LF2PWq|iCr+q;x>&x@Q**C!PQVWsxK~|roU^Tm}^J>&S^?Sp{h+EHj~c!5;c z5*nWfsf7DYB_|(M)Z=9FftNqsR-LTEmt5mu7{1vs9%iKDU)s-fKr@U}`+U58VSjbb zE7|C^;OBPV!tU=KT>h?GcBB=X++~a`ID}xJNqk(c;!4Dv^Fcb@B;~G_-LWS032WfR zmG`cok?oxJ$eFs#TdWyI0NU?Ocw0bELN6bM{^ZYYFwInA0mNbF|Ez$B_{@ewnquB=9kOx8WdxO6bjR>#g8WEj`!|dm44~zuW0VRcAx!`$oKq0FyEq9p&4p8z*5;l zPDzh1_l!;_#tO6W!2oVi~M4**wP3m`d^Q!DG9D@7sAb4?@*HDi70xJhzGQ4oy3z@*iyuADFgHwj zsGnmJ%=Y}LyId^_*gCH?DT}1P9vHlGP6+GOFIe()!||5nSPab1Ih4(v@2Sn~IQ~MG z07}7h1=ZPYVG*E=Nk~z+Ur2q-;D*^+x4P!sW*1AVFA?S=4YZI4 zJ}>ws38zoim~iHudLVrhoRb8v8op=LMLpr`nnUL+XR#G3OI$IRrV!v6`U?L}54}F8 zBk!jh_)`7j)4&;st}PEE{p#(|;S`YGTN@wCtMYAPlBG(~`#?J%nT?~$-)Sy8@*?4H zOeP2V$(=u=Bxn}o`2X~7zT})^{v;LIqmm^mA~ooR2=yHDy9!ySPX(P#BF^B4AAtye z2dy-H&c=Qne4FQE7R}(pd!`4B!2Ms0rY0lrr+NvQ!CXLj*?BdRs(Kh%V$h1WK=dA4 zyJ$#%A7<3GtdHA!)G9_gcmO3X0*o)j;Cg7!jHL-L;~+RB$vvb&9L%3t)5V{1*R-SE z9bi6u;K?a4qJm)Opn4P_auAfS*{^oK$w_}1Yy>`njts zu54e38>;plEvR&t&m#e_`@?+u&|Ww=5G4;p<~x2RypIVZJNa%4MhEMKv@(|sY1r=C zJ(M)MOX9s()eP(s*#WgV%j%%})FopFNRO3pzmkmqkPPhS_BBTF>hVX+vWwo8G*6KL zEA9g-D&H5#Agz;1T%%!Yir@xyvUC<=)(=a711DOZ^#Z$O>qyhlRmq2E4a*4Lr)7}) z11|;?uN=){Hnu;?qgQ&RV;E33++SA(EYld)A{vflUlT2g)o&sc6_P=~0e}EGH z%zd#=5|t!rYPJ%-6pcD~gSU-?c;fxWy)I1UwE%7+=$DyAwcGjN?Sirb?$Gq@d$Qkb z+NR54D|&UCN0$vbYuqEc`s>wX2n_f|5pF9EZH=fs9%R+*T&l$bIjsEqq^38s|rRT>eQq44!kcqTdIvG$;zqkkz^{ld?If zWjS=B%+7L7Y-|T1)%u?d!?;8xvN$_c9#xrNQ{Rga)CHLTl-vZh8>d7Mm4jF8xI&tx zcgN!7E|=ZmL?%hl+})`&itx1Hd-D=lPo~`!0JA3L?iB zfjGu?1sGEe8xa+&iTdl;F4m1qoyr!y@801pN`1ujKv<}t^SX?eI~-tvB77Iy+1{Xg zZWhmr2T!c42vhg0Oxvw=ETyJcu)F?9J8)_x02e;qifO9_9hb!%R9Y=AJBQL`+z>!v zyU~@3drmK_L5&yzxLerpxvwzsjE4PnzU5IVMR5Zq98ri`WzC7OXLd`W4kyPQZ{(>- zJhmM9Y%$+O+)!*PE#W(ms4{WA-)}B4`aUq`_*&GXo|obnY>QN#`Bf1om_b)4;K2dN zmRXMLAncwiaT8=!Nqw^BvomZ0Fb!<}wOA8owbzf9`V_Lt=OEC-Eg}bt*e2z&^)T)90Q?T&6Ur z5@Yk;7qKMU5>Q!h>2$G@jkTDU7s(&akt$Fo4I2WA-^Xd!c={-&Mif-R^*+gcQa|M- zzcH1FP*kjn6c=^L2w6<>W&7%zaRSfF2X}JDa zXQs06*Y_*DlPecpqCh)8k_7il4*IlcHf@A^AxXE7uuW&q(tB1DtZpW{>+Mmzt-#zP zD3-bTo~unb-gQBIzFScZQ0&n6)A3wV(^G^uM^?=(!Uh<`wyUtcdbpM0u*|QrJ*FM% zW7_1P5+Gehq$58=SI>7JXwJAm`{I@o7`X0T88atam+5>KB~_6f&tiI7)`-mbE?IdY z*w|fD4R{pvGA2$7AFXnh-klpIVYYhjbU?xP0LVdKRY+HElHC1y#UqSj*3;Pzj~wHX zGwLMd)M(j%u_+tWqp93_RABK6I3sT0G|L-5E26Nwq4R1a#oc@7+>VoI!Sg7BAwG1Z ze#Ylby%)Asf`|+5o2oofqPV#cAHuVda@#=Jzy~+vn66Q=zZhhfq3(CMV(sd{(hK4a z{(La6&m!g;6iLa6W-0|7DJ#FasThy=2tU{-arQ}q{aD6gQX&y({tJ7B);Wr(HP7!&O z^XnjgQYfLI{`ygKMKwXcx_9o|)@9*j?A9iv+2VjF$yJ%`;EnxDU5lx_34B+t!6feY z=rbC=5axpevbz*8@s=yk`8_^1);I^{t)CaYD7+(?7inqbNk||C6~Rr5F^C{9hpp20 zMFol(GCiz^`Xu9|X41abv#VI53ix^Ji%819N1IavC}XRnxN4zSI$a|OGyXJFzO&@J zz$%S-VRKOSKBbi1+G4T!TQYq!J|`UBGSheKw>>q< z8vx7?D;z)JwjQmCfkdgC>wPt{W2eGF5L4nkGZLq_roSgD#?%X1vC{IQT)0yG2{mpx zPk!t2_iN-L!}fXVwC@;1)S)876e*8>`w247w<|c=jEhqSKkb$^O}Ttoi}BNfKC2^j z!1qpC8VP&SRknK|z^|zq-XpP?dy>jxA)kE2nXt#w@~(r6A{&vK4+K$xu1YBijMcjD zh7D3UKLB+(;pyU0*ge6@B_g7sNsZCH_LcA&+-ux;#G~cZ`XJaxNa(`Gj3aOT1;xPy z^p!8#AolB5|0Gy`c08f)1*`4_-Zrd`*r0Cr795k7c;)tGgxM!c>O|4#B!%eivr5cG zbeIMg7WPK-m_~_kd$2(Gtbw%9#8eHz@H}4(cQ2IIjAjDM*OF?Dqw3b*C@46ygLA2| zT#D$Kp8`qCU{NFKHtslZHI{#PW_R)T6!@{znTT07IoSV+WkJ5~^5A7|q^(V?>wd7; zWMAl@PFjPVlm!I6bcaBY6!wH(tyiOk2$U+8gSH~M2S0z{;9Nxbvc+Sh7>x;o zsvMGK-E|4SE|t16%>cvAiW`*d6THQ?Cjr*v@h!nGA>+xrSO6=vFYw|e@22{fXmj+o zk4_?{2-%|4vuqy09!%U1=kqX;9BeAd$@JRgHumtbT%hy1GxM7Fpy2EBg#;x#;7KS50>WGMKOWhC}(Rs8LkyjY$3{N!P?`ini)4O_I)OWm7&4r4KH3M8+25?jaX z95w-5PL43;=UpiL-UNCEXFHsmPJLhLl_%Yj9JW7CGxco3`EWz20%u$KF(Wpt;L!jx zlGM?Ke`1v=aQ|`F;66Nf^6ayH7dljkjuO<72Wkl`yZ%q3U++Tbr4&DjO>G*ebiAN_ zvgUomUMCm+Bs^6EGim-6VKF_(R+}GIJvz*hE$3#(z(_a)kXXhPqm1qbCzH;2! z32Z-;d;*%9A=-%)KdCVjWg;)CVI8< z)tt(F9XD#ULzUB6?XCWB)D|D-r{JD62x2YR2c_4ps_KY_E`@!b(Iuir*8IpZL`bCE zY{^RNtfBmPXuNInfFYog7+mNHLyVk%KPT!Ka&(_>pXKM7msizhUZdU zMMN=RJq)>r8Z{=1?{e_`-1~ub$2#ben?Cs`C5Suy>B`d!K@0c&CEk}V3&caWUI8Cd z*u~G2)gV?OZ=->o(_zBQz|in~LXtB$m#b?Y4pJm^2Ge*?&+#wtnGZ0AK+oIgq}e)} zywh~>i-Qhbb+)8K%TPu$ITc)P1;||X4=j)jMbwcHF;F>((t{fXxuGWt=siR;sbF$d zUpu!cwLKR&K^5}{1BXZlR-wdyRArM{&i?d9sNGTXL{#JuDc~1@mmhoFp-7t|@dciL zurWIZPs$vOM}OafL+sUW1iYYsDuc|Mr^OxZ0+DFMh5=V!SKz|CCXArlax5I#S+XFm>+6jQ#y{ zR~tv#aXQE#|0#LtU)BAAW4?0<%lTDLf-VM<=K3&u5%8jCzwlH!UQd1Z{=JeCNowIc zFj@4ZSD38>e7**$FadEI``2_J$NbNK!;1*_6s?d3${!gA<*Wad733!?rb3Pnj)4rGO9@M)ngv_(Pn}rvJF-cl}RAG`G?<|Gl0|I?yR)J2Z+EAKlv*La3pNR7XHa zw(h!ZU7@tDy%E~j&EhC7>15-o`TA3y5PRG^>gAw_ZcJOsZR+1~wkZa@OB zO;XJGyW7MZZh86eqa@johgb&ysOkFH0n-28u$OVOoBgj$AX1s!^oekU`nP^a5VAk; z-wU~n+428d%MkAWs}n0&$0*?Xj6U$X8CFOBoKIPW$+sE8!J=Y;u|~W5ZD(3BvkMWmFP~^tnpn+*}NL4Y8j; z)t@m!JAA@X8fO$I_#u}5<_FdkY7DJN4#*^ z2kngI+RdMz$|U5ekr%Gvvb1jDM8dWOLagnhe{=>8&)fVCc)si!+YomGg5%MFb02T@ zZn7$?SR69BQD{~!3)G8Ts;qa%&mJf6Lf1jbY|G&J;9AnZ(lBMmL=GZAcLFYs*!0$! za-^w#>JAz>4S;k@&cvjd%TQr}koE#+4U%I>H%!svM#8Y0{4 z!&ywm(S7@#7ygc$6*n%u2Vx>07nZY}hCE2KMNkdJbF(5hgQBWO_vJrR~l2f`IXN*<^!e$2$SiuRg=q zYYB*re*_A&27k?}F2i1(j*r>)6uw6kdPj#vLv7DlP}~arhYOeGEudm$Wz9RXw6;!q z_NkLwbR$q;DsK9|G;Him^ixt2+?QZu2h! z6IotOXnsVs`R%DzzD=eR6$Lo9B%Kql{iZK`M+c!jF{1GwaXzEx08x!cJ+VAMvc~6kABm@2cjgD3QF{|m98}!N!1|fn&$M6z4If(0 z1m)av?|CYWAn6b5;GQ))J-^*Xw)+b4zmpJ2M+$3Suj9of@Yd#c{+hR_11*J(iO`z* zEwpL}qEM4P-Ni0Ig1ih#y&YSS4n_?5hpKAsw@;*TxG%(lct{RD5E-bD+#|>#gP{RW z=Wnab_CBq^Qt-`jcwjq+WkUg408hRi}3 zGUrXyyd#e(bDo|#Q&5^9Ihwjoaxc%LxBB3C3tNWui0NTkNrZ?BsEbUdJM?XhhK;L7 zw$N-GeXuB5*HkN=;ej8gI+7TIG2}?1Ox4w?OVug7&F1-fR`*NmYQG~~N6U>+sE}pv z!R_ku`QvrZ#3UZg^j|}$Y$QdE->^|FXa`w0N#hr7{A-sldit^vq(-N$XMN#<=WjHX z>g{QMRAIY02u$AZSwB@9(GY4+_fvKtcWvXUR6OCl?0b5+(*Y=cU;jqW!g#e1A6|q% z@9SKHC39gF%fcsbwfKZ((C8Botu3EZZ3GF3q8!8$QEGgyfr8}z05F0K*PP~c@_8_J z>pbV)9J{@;sqSzrF^kg{?`<*N_?197}xDuG#2(ITDR|ce%a_UImFK zPd)n1yOVVvK1sc8p?whfp$*j}Qzh_<^R{D_2F1ql<^y)24z%ro0 z)|yEhw7X8zRrl@r-L4q#Uq}`r@n>Orti2+qv3W&(k z8bmiHt*VQ5TzxcvPzV`##NQ;l0wOj~m~$GzSYCpjc={czQx~s*=+g)pPt)}GdcZNU zg@Z4}zKgX~SQuLMjPQcaGy1bXhzm@N^dZ8l)NLm`e=^FI?bze_BcQmSqJ~2OT6Xsj zsFtplc}JH;iC>_R1E2t>e!DNPUFmblXRESw&3aX%jqs`0CKq76dv0jib@1>5=ZO1P z|CU?59wUVIg|#;mP6t@0o%4#PAavGT=lQ)L z{?AyXn`(f+x@})Plnkc|GLxRWykOe6?qMB0AdqdauF$M~ZD_ld8BEIvpytZ?z=KB+ z=>*Z!5Pp6jk?j}~qQP`agLsP>Oytgxtd{bjUw+LU-ZLW2_FLe}=CK{WY{lNfyi!w5 zeprYt+LYw5My14n1H@O?9k1J(Rj7@zC41y_cjnth3)-!NU7iGEZpQs?kzjm1xSBdQ zaMqpZ+kGIfdbfKLY|zTIY4blxJc&bmfw6J-gcEuhkDr8e))p*mu(;NpZqPXU>-C9G zNrSvK@>a+T;ACb)x%Er0w>?qfwAx8^{UG!o8%6)rv&8w45CLYTIS`u1nn*q~^YrY+*mTB2XVlwXzQc5aN3l4}@Lc;e7RH>R&VB+Ldz% z?C$^F5!qdMvVk!J30F_|J_e)jm$d`h!I-g#SRTyNh8q zDrM3Q>~Gn~0Q--2+B|80m68?vm2JgN}(!C-WEQndJgAzt1)|NjZP2|4jm z{sYq}|4&a4l4jBxnPKH7zTof%*U4JgQPw!~QxA^^lyahcBY_^M{CIz2#^~#ui)g?A zWV!hPlg%NhubC2(R|KTz`llO{YBO3k?Sbtcx0@A>YZv#&NlJJfki_NY7rmFvP*MS_qft7kvYC3mZ`9iVicFb)EK@MJ)?^$SP^km?j25^J8(rwk_vkXlANbRsjJ|_4 z)rS|7-v$Nv*eo0d5mcP4w5}g+DV_izfCAq+{&U4(aBdzJNv1C=iXYEEBbZlx9M0C! z6^czKv?ge``DVtP+q(6H2axZbV#FCH6oie_MR@SlIW2~p_=tj%AZxG90-<2AnrV8K zl$k5H*F^U=MiRJ?hgcMtC5b7VwBb-G*mtmcWb2p~Q~1k|z=}RQd;i_Go$DP4`|pgg zw=gyBiX`F{T5Gl2f+Hj()CNhO68s5gY##ZfVeGgI@7$D-Co`ku_S`tv6^7V^^7{$& zaH+_twqe#cAh4u|tOzI!f&fU-G~qU`*_WgAQ`_AIcw;V*p(;q5wp(#l-NoO||}S>*QKw1zz5{*r`kw&@CXX(K>! zg+W$XaWa2}4$9B_)R*tI=oyeLTGfID3IC{p!R&ZCMYt=s!S1TnSGUn?7l<1jJx`|V z(wRg%g)TJ!%Yx#bn%)s$HlbbByKx=BlZzM1xBY9Y)M*T;)U#KllR9C}QGK1PtLS~i zMKR1z{h?*02zs32i)~k((!~{THeG+@%JgxFv?zo*J~6H3&ci4iOh&nPSU3^$VE9ZU z3H^m>sZUG^-tt7#6iY(G1@eclTfO7H6WT~se*PEjR~09tnq)T?wJ#GPJX(TF4Mh67 zCqC*8m31VugRynue-#+@-kJv(b^kI35?YXUg16ms{SC@Sf5V+!p@ZtbQkCn|0CMe%6C0)Vn#Us#1NiW|CDpvXz9Nb4nGTGgYBQ;ufX6VP(= zLVmu*iAd~cA9q3bS1FwplkSjUV}R!hwWjPN-|*mlC^D^RK3T04uDS~4r z-_u-R$!I8-!Bnckg#duBI^X*+!qt@%{0Uv9V+v(p#+|x=1kLm@1VO2yL?YLX^mF*E zDU*WqVzq(MsA9kzcOY({!IZm>nc$Cb?ARZet-8iDIRd>ALpgIU__KJ_!x+g_b$Gw% zYiDz0InkGpewhFQp1vi@W@d+6^ce(ZX>eT14}q8ozAd9pRE{OcO@JhzKhW%O=Pdoy z{f~vry<6()60_aHYsWi_7j1gsWogEPC~k76CD)bn=tgpr2kNW!JFH}pXXt}8LA#6m z&duRh8$jyq>btw|f(nCadSL-4C#Td83a}HE4IV4nn&F&4IN!S79!H278wCY-?-kWR zjEg(IB~DzH7Fr6fb)eM`k~EXPgUnv$X#8BPY*jVLhczAB%}ujPec!D?P4@_;sL>KKKwj(x?-8aesPWm8H(r5dRvojb4wcLHrNnRWS}hR;PBxn+7E{x&I<1Y6|$??T=5p6@!@~h*sI2e!Y02m}{*ks{1PM zV%16hu1^T%Dn^Ng;xM&lM^xuhMTh_I;Yc(HS@}OU+Us{(1(waHA#k&Vpt%;)Xsi*$I% z;xtz)k#50%i9GzBp@zY-7NV&I>nN#{p*C?@6`j>YRkrP~;b662A{`2#07cmXcx z{*F{0Vn{vBQHMUWe^_Gwqt5qBkZOoCX;pOTf7+$Hc0^2(IrsEGd|<$8Xw0|bBH6_s@~E56Z>qp|_n;@lS;j3QJEH^^fO_Gve?H%SeXfq7`~yY_zc`w~EwDO9IKG1r z$0?AFY07sa^v(AUVe`Gd?&{K8s1TeusfPcDd1PF0`4+a?P}uncQBpQzwYm3PKKiT7e7u^AH3ZI_*LDvbU6%pB_zhGkSX7^B*mj!B4WDouNiVUi z)sx>>7k$n1n5F`I>DAB{cro zuQY-I$hNXi?xk4#G;f!+hTwu(j0w zDb-Nh_|a=_J2!PDa;HtP z>AJA(Q0!VpbGz^A}!z0vi~Z~k6{Q` z)FSPx-d!YR1nHfx5H?s{o<8q*ved2@w0U5;wT-Q-4ZN><4y(R1&4y$!94>0hie zKw&lSdRy|#(7?@_j2)Enw$b;e99(#a!)aHAfxsV*KdD23l=+&pnn*?WGo){J`-r< ziZfJpJy|JYf&mFrtm+E zZrbjrbUQqv6I;?|=|*zJ=WDpp_wWqKmaQzEqQad-ZOgRlV64S0_0n?t<#Hu-zXtBL z9VSnUs+L%6eJu)?r@<44ey^S4iZh2O^fgu2u_I*{C@(zs>@o@dow17Uu~hvi?%Vq# zpJ%992dT!<)~Bg??KCPJBSVBIfh(epI5qaW8JFW(K%GA~P zi3VBZi6SIv$j*EdQ0bR=JPMR`X~Zc&nE@s5Yb=K#61IsRPn8!uqeM+qt$v|?XYoQs z_z_&b-@g6j(%xBwB+l@@XbUlDUY`0pvxNkG99(~_?>$`C98}J9R8+(j#g4q~>(s7p zaqnmJx8>KGx|}0UaN8c+M)o;d-|v{xXfZV-&xVUe`Qub-Y%px3yn_gK0ZX5nIZ#yx z_W97?s&wPCb$o*0Zz(%B=hlTwae5Q;_v!2_+(1GuC;o1l*d*b#fCq*P-44;Uo%b9U zNCm<|C?(FjTe5P&xz9q!N)$kIqM z2VZ>8ITyg?u5|w*ZZi?K7suQD-~f3%RgM`2Si>xFyLS5lVAc0ZlSTW-E#H2W#SM?{ z#C5+Y&1L1yx~`o2w{jks4}0ao;zK)2ji3D+<+HXs z<&C1ZqAn{*RY;d9)bJ$D#^K}RPS>I{aZ_T^yxPDuUG-k&POSDiDA z>yPW|NYi00n1puuBnE8&L98T6D>(tiC4QW6DJSJZV;eOweO?UPRt; zM2`PK+*@|V@qAIELkN=K1lIt;-Q5!0U4y$@a1HK|;7)=E8QfiiyACcv2Munw`RDgM z_kM%RTJvJoG*#WzbxxhL_dZoH>ESsx&Jf{zINBK}RhU}!X~*T;+}vpNp?v z%azHD{>|&=mJpGJx{ehQXo#XC)Peiuw^usZby|qQo&xH0|G@_5v%bct7`&7s36>H& zh8^hCazTX^Q|zR@)&;GGba)ptG~=4>6Z1*+TOMCOKjDypqo7G%?-}t$*pO8ZzZsZ? ziGts@wgUFlgL*-~`(v^+_&p>koHNYbf%(S7L(sST}j&luQpT0%vt4^3E+x~7Y>+QrZp0yF5Y(d*{eUOYt(JB z0NFd1_Y=ym=~&c82evZF(V+1J7L?-O7d%(L=-!i{1xZV!m1BfdN7RJ*MI&n{cD?oR zd^4T7f}bu7@R&v-Og}R<%MiHmz%nB29(N=+~|@Q45_B ziE*52mmxI$#L3p>^6GnqFbn~>egFv7*6+x=gsn1IwN~isH_og-&|J0Rw;p4jqi!|+ z0Hk;s*CG&~v^vi^P1Kf^Hn1L_q0Rh9igsw_Ob^b)RG2=~$zw@29aMe=*(K`+eB>VS z;B<6P43(~cUO}?aHXhVp=J?F(sE>pMgZ0LhP_$SdVEVcl{?ZXh`EYMV)9b5CIT)m& zPKMiySl6a}gNp3&_z6AP?p;ONXQ-@*9eeN9KwG)tM!}YKf4RR`%oF#ZvQft*AG3@!t%Of1=9zV&F8?XprV&%g&&ZOy4Dh|myat~w ze!{>@GnZ|_rAf_x@Jj4!i3VyqY3S|5Ho840nwyF7l%Tf4?`A7^s6Rqb5j)0+aaQAi znA%fnHu3Fn{7i#N8hrVd2L)Y$*%*z4(N8@;?@Y6>eBUsbSuQffU)-P);HI;cgX%3_ z76102IA_QFR*I2q_B;MBvaEENkt=GgYxfnk)Sok^1Y<`(8+)XD7N#vwFqLA-gpJ#l0dg$mq8FLZu;67hp93$rELsgxTh6Ux>sc=0(c zb$`6dU9Qn=AB_j!3)}-v>=j2uH#(`FfDfb+bf5rOS}eu8l5{UD68^Ga4)uWuH@ToTr*&yx z#{cjO7^0;eU3MFR4v$Wtq?MZ_C>u7n`kU0|*im-<5KZ5sUjEGEY}J9JEhK5&qRCAG zPI%r7v)aydL4p#_Pw!Cl?7r&NtmF%%!t|39u8(E?A-CXk5qGRvsd4PGoonNf`}U&l zEAXd;s{*WPfUoS`eazfX$ZgxoNWB@=)tk0FxuJ3{RP63`SUqr06Hk}3j2(3C`-0H? z>TReifK5LOoVO96@78*{Nu!1XNljegJ+f=1SNXu`3&4?k&JulgY?L*&YCCj+5!3qr z!W?u1>TgY~qiKz2k34VUGaO=08Z{9&nyJO!c^h$Ig$`3F*+@Gt zJOz;uqhJDXmpOXD$eb5{(0J8{qNy}O8~@?%jVeUR%BgdY__Ot|FkT8`GZ7J!U?}cV zeEgqX%&kBGfJDM2JCm4Mir#!@wzM54(D+?VLj>*&Hv}~F0`B~{z{3d?2`$9rEin|p zlnTt*U|SlxP~Rf@ZXr?^12Z7JZ|yD; z?|=zz2_;O)O9ly1_K@2f+>txdPA1 zvX^r7gIAV%!*1`ss}*`vmHgGA29o2C0JbkF(TpyZtwxWUH|o$%Oh5}H?`AG+7%|9X z3wzAE1h-R3^?BugOaJ!{$O-g5qK29&wDlD2rX)-yP#_HB!iyXg6i9sQPuo^XRNlIS z)7qgn%8&Z!EJz(^T1+@gv$memIL(e>06N5C{Yv^Df-|{?nf&_hAMUer5h~s1ccEc< z+=w!`sY(^rJ=-?BOG_+q?L;= zvAul5+Q%i66y8-jm#~c zqTu{j?l2oy3;xr(-LaNuI-%Zz_qb#${ti!T7vI)W{O{0pa-3xjp(X;9D}}pDXT!pS z^4vtPKIa^9-Ma5<@wZ?3?@Uu;#7F>u&E(!U<)9Z3-xgY&trPjc`feFm@S%?JioN&N zD<3>haUX~dMo$n|dxr^!go=JQzZow9LOwGR^li3-*!v&OoyHnHEOl7w6!_t<4Sfr+ z-asIef559@c|;V%iNx5oGy+tg*=vdEB?b<@A$?bkA9K4wOSe4k>>MvLTHl?XaNISh zdLL{!xAp8zu=FR6^!$P86C$eT9Xvs=9f6iJJ+N2bP&N4m`$P3df_$V^!HajSrybNG zU@0+-en99IBh$agPz9rMG-H3oslITJ4r)MtkJn?;;6T)Qgju-Q7lrgH#C<3Zbj z0-PDMo5I$6S3!=teKGSjJzsX!S#d5{UUZ*QR8lk)A7a>k$h+IjDyp2^bJ<% zLUpyy$%*s1cR@Tt3khVF+{iyfu`b_Qq!uTtz& zwW)UX2i-z3Q17h?Q!0!(=u{zMZTw=L>}#6mntKJv@22g9W3IMm*I}SO80xUl#x?3( zv#39K-(&6A3YuzHFXUa*Pim!_I>iWeSH5d}?+B zA*Pr0e9n%GSrOe@iEku6TtV#GYvounpxR9wblZA zU0GAom-xqV=ptGOix483wQ?36WbUs|HoP6K5T~rv8k_?3JGBL-;M10}sUTJ-lFT)S zJpMHackw6v;}DpRLK1o6rD!_6l#lWniYhDScJs}_16k=6J`riU={v3~sH02qsgDH$ z!XzSdE}AfFx53AO{rWt9*!;(8)iHnVq8)^xLWsGVEe~-?xB1ZLn*d?`lf{HetK=wm zV!2|A?GLsHLSIiFsA)JM4%1E#Z?myAHV>uWjm(XEc=_BH3e~Tw#}vfQq z{NhPp_7l4qVR{3tQ2~daX4wIp)+nOSiA${(( z`I3~3C@3YA!b%%jCD?k6nqUQVchH{oB&6t#YPI`qSZyp%ZCB>ua}?Nn>MXv3n0C&{ zd^gYhKA-Q8N^RHQ)){b2;aKohrVrnop^~D+4psJj^KJf?h&wbPPQK>xn_9gyL+P^x zi$mo3<-HW2z5F;u&!es1>W5=+^qjJ!Kk7m9d|A%^?T?i(s(X$zA67vy<^i`}O7A%v zqwv->auC$et2J%1mV1^{Q#B`-M0Wqmz1utCkIuwhRPUYnoe%?Q(a!^LcjNH^JQg<6#v$wQ-=^z0K0!hqzUl zRiTIr;6Qd>oJ6}~fiH&@Y0R^3(%+3qt>VgqeX#_Nrcc;=&W)u9j7}>RnbvHD_u@){ zTS`7VlSg_YwrvU*BcJwTVF%_Is^>^01*)!uo)N=rU4VtsSma(B!W~ck+x*Dg4)^2rXYah!ECQt0i93+jc8>pr3pvbEDfDQ2SVR(Txj2$AcpxK6Co< z>ne9PUG#E}t1+G7fbXqISF~Rj&P+2yF^(PL>rYmYas$6OK6nKhyu_NC{GEEltp}cNWVOldTO;JWw?~2z%-rFwXV@w!;i&HUAWX`qC5ezjU$FJ-0 zAvX6HQaDVVuKiKFT&Nw!hPdRSIEp?AuBlk<^6NAu#LGU>iP#mQn>+#u1P$rW+9v$c z34c<4J$)26!^WF3GNiaHiL;QQnmHSK&wJ_lZVioHOY(Awsf1;p&dSZtGLATLVNzxH z>i~_7j7xjI0C=;YNFP_;6F)_G`Z~IHAR~0L%Ag+q{AL&ugKtH4?v zzF(T@mo$}rH^#v~e2xN=n2~h@_uh98C zE7;M7cO1dic9%1 zll2SAXKns1k@9bP`ultVl#5!vM`AP+d8z04ZX}RcA-%p7YPz}+i^O?cKUwU1&5K4b^aW_ z1iA#@eBOG@>q#iEFjGuZ*~IohI+R{-1lf4Ze-Zw`+@5!*v7BwCCl>YIy^2soY&fW( z7OTAn29*5wg3C3xt{wmQY-k2HIt3We)qpN1{vlL^NiTm&7V1ypx`x8rVS2}Io27?= z4a6WZA`U!`#iJlsn$$w@WFV`(J73@yjaT~Ldz*YaY9+ofJIm`H2$QSBl9^r4w@+8d zvGKQDHP9qef$#v=ixIArZ*W!7L|}NnKy*DE zsoh95g-sx>bloGU?Gd`KW5*`C7&}d6XWb4cH(`&x$*dGPc-mntX|TQjNE*|BPw@^L zKTB3(2hHTfKG(M2P6IE8dr)(GzkKVr5!%G=*K+2K%PSdQJ*wRSIs9x*X}LMdAg9vk zAvD*P!r6W+cK$2bqrJ1mo#Ws7;aPr1r824-{r3<&4dG&`x zXzy)UaJvS$w%nvvmb0}h=kQzF{?s_~tqp8haBp^@1YjxAO}jG-cwDT!)=P@aI~N+-ZUK`a=3$(~B=1UD_9v-g2&Dy%R0nctM{*om_m-lvM+C88JJT($Ev@bdXAJ*m zonJOEyI(yjZTCP|MZX;M->k;`d9XCy@d=6-|I)an?%vUV)Od zu*)B*k$-O{T#`h4btxzB?wuqW@HAFLp;!-V%*jAGi~ghSeuafEa}~cqPI>Zp0E73i z$5xm!VT^V>0b!liL79DXXsB)Bd3&V)V6vVeV^473-N;I59z z*{j5R`QIlpsHGYtFwWK?61mB`zu{aa9fWYttVBPWYn!TjPw>^V2eFb|?*9IAJmh)b zFK`pSe#ZMSg`9sRf`HH-G+(Kj3cBP#aZCt1bfA`=>n#U)k@mgWaGiIsc@;Hs< zH0>2{stf%1p2}S9qQ1rHiiyZH?p(foPyc5<_!*=D@R%Xv{Cb!5&RCTA%~W%#;|Ftj zJFO@xpTLuYC0u!fzx|&P+oJ|ek_UI>Pf(Kw?YuVzcIErw5~^$3J&t^*&!+6bRpf4p zKd9&xzI&14-`{10c(}0$Ni4U(;-1;U+nA1-3Zv_Vr2>^FNO`@n*<;EJ)@2LK9UpjN ze-0eUE=<0k`#)L$=~gtny1;qj%f5Wp+Hy zQg55pm)GDmM>Q5QL1pA(Jxnw`3-m{79gb01erlj%o*B5D_spfq8RXQ3y{IX9Fp$J2 zZC5+Xx-s?oRJd94%JTlmM^`-_6?>tamHZW2e+;nmW_Ud`Ad_*ux2s3P_T3T;C@>F& zT%rJ9=E$<7#yI7@9dDJwzO~t0s5SDOgL=o&o7lWPS{Nl2B&TbIg*)VGCYfJk#`N`X zpeo)fM>bi(JL&S8KFyF}!>48@kDX|=mQs`$PK{el8;o*PLUXUU86|hvdfF}y8^Tzk zY7on!qJ)*uLDQz1fefGKwr!kRQKk{U%4=2&U!g+2KjxiNGq02vXe|gL&<|&5n_Y$= z*_C$FStmO1^ZZrQIb4K8w*L8O$8_AF`0SEh%&AOfnOmsdY4JcMN;xV9-mtAY<)2;o z!b3}`WAY~Es%R*=n=3MTzVzeJ(L;O%tyM$O{v~sbYntb6T8D+2EpC-f12%~#zlC#a z8p-`ojO>i7l3ICw+;{f9r7<8xrp-hk9798Kd|8 z#U%d2Wq+*qYP+*(z#7j-IvZD7Te4xktS>ffjNIua=_vDS+t-J#v%=)r!R54`p&q1MU z{-gy65*z4qWEv(Kxr@!K>7pj#$;o9}3^=++66llN{JwV_Ty{&f3!}hBv0$J-fTpx=cVfUa$UMVa9yn&t9Z))gp!sUr~C^KhTPKslxcv98SDQeB{uv2XpD8Z za0KwK?DsT1Zkjp__N*iJB;2k(*rLIme(Xoe>}o^1J-b=-v`zR*+U|kB=5&Gq9vncMM5c&!C}7V}%gq0MPOkbd0?oz80rG#Z&x zww`V&3%Kl>1E1x3TRsIJyyU0ssPzHo_ecBp=8Yf!*X9?!ruOzAJ>leS(A}i$b7)+c zLziat)kb|%5eT$anWfmV9uHezzu{@Waui*wJ-I-vb7U^nzha3+gr|MvhW@PtGPer@ z@_X1c^Vurxi&`f5r=Qxf(r53X@_EL{fkz|n0MY2Y$;`YVYQH?VW?jA;{fmUgYsmi@ z$-#SPd4wAmr@Eqv>sZ1BX`RnB%q&zK=xAf{c%c0kuN^+)_mC&`%@r8oV+jnvhrSb_ zFZ*;vNO;yAC+XCnn9gKd)u~$puY%iu;ei8+huzH{eqqZIh%2IfV=;5-9|Bup zm~8juNXXZG=PZj)2QFnVrUhE_-FtiNTZU|a5U>T17VhfVWC&w|D)8wuf9fo~E?U)| zo7EAATwD3qCHJd}mkJlSxn6-{Z>dhVaG6WB4c}729(e=8yBIKzNt9BsgP?12+CN?1 z@N4#-KLc`XJnss8e!MCyE z>pr&Z*`low<0Ha=DU|aw<}%S9Lfo@E93*2-n@uD>kY%%UJHMl{CeB#4nnltptwVHg z+q}X*oF4k~^5=3sQ)i&+Y}jmn+Zrr9es9crSQx~(BvH^hzZ=Fv4Sp3$%4s8e@CgY* zcGCrNyEg;cxUhLr^5o6xkBghWHOjC|MIXF5xNgw;obI`KGwnYZ(lYc%jDfmZ#~hF&|}gllCv|gMnFCW)(KR z7HcQ-xa2wG{1sRw2wK9nATmrYFP7^YVB!A2`Ews|GburmDeLzjkE+zNr;lnas>nfZp*4T%r-scj&C)Ag98e($A`8O z%SC%mcGoZ2Q$Sc7vWFF$_Pf%p8KT5XA`5m6z5YQSjZc4~A^+(CT}%|%dZ?bym{~7Z znPEUix>(`a5~5^EN{FA4pCsi%?Zri}c+MT3PuRh|Yp5w6So%3|fcvubcE-DMqi)Tb z_@am0rMHIM_;8vGvVK3b`+BL~*xwVkH7xPA{?$&M^%)78WAAQC3;IV zJ0iAxge#el(0n6d4Cf;{!o!~Op(=HqZ*So`3%CPH z@RU;7W0xavl}+zYemD4d|H0OVtbY#@PCn2dvUE&uc%pO)xT^Q zB{ISypB4(wP*8Bn+i*}l7_C1oC$8Oudv7y^QV1SQgg{|9AgW69eSy}Kp(62~h8**A zA~$kav0vE>U3?x1GccB4ed%zrBG|F-ZR=KFel)poe)lil#O*{HLb6co-k|SoOIvnz z?kPVNsz>Gb(cBy+yaA(dm+DdcXLe4be0a&~wxUlt&x+ggP4R7Pq;TS$2t6u;8`AsA zYFBIKxQH1z7zn<89(V2uV_MO z5XK}>y+L>*MWt@+l`Z(k)&;uTKt>b5V+a#ZRVs%=bn%gfIRV0g7)Lr0x?OzV1IAj~R zGOPL{wPMcB&*r%ghL;@aTVpwPR%u;3)C4Y4wHN=`#R?k)S&%?zP*(rb?#?Iiz6llA ze_(Rl5g2Q|cyq3?ZTxuR`{vzbgA)1^tZ9sIz(TXCi*n4%v}*nQ#hGZJVCYuK zTZS#{OV;#+*g1QL@UiBNR7L`EF0{Uz!a+6GODyrI{?hX$_&_PjdSD5ZG3tBO@UJ*5 zytdl7O3RiDPHe!S*K|DRmFHrko7r3O!?*Fi!=#1>J#iQV!AgTl~J)sF{i&!J_Dx(pF8s%X{gf(w-JwBlu|EK!vErtVXTgz(Kt&EKB$>_ z7Cw&eJzQ_MvZ)zN2)#@EV^jI>`=#foJ%_mW0_!3gRc3>5-G5i`BEtWS*&SY-j)K|N z#sAkztr}oMIQahfs`KQ(>wMsv3~u0^?XE!}XRc@7!jByv^<&uO-ynUOuS1_c>6iK! zU5Sa!cWAr6M2GKGzx=aB9MqS)N&Av8k{#yuU!fTW2BY-<-9G?hvbqakfBySpB>eyN zhbT;B?~|#bC}qa~dqXZR1jvWaG_TH}if1zj38B+#Eq`eVo;M86RtqAU%98(krQtLb zz=fr4w7Bl&BNFqA(yh*v=^T%1#1Fhr#ioM5XXUeotk`@NnxFYVNd;Z7FkjVz{J%Wt@h5sB1KNQg1KRPj2{HuRK=tbd{`@qdV~M== z&fi3}&vz`*Z7x;qTlO4tEg0!hRHI+N)dxSOZ*VlT%Ds-=IHwox6^4pP&S|R!u zTI;|&@BWO}WC}YQ+F?1?Ns4NBQ87IFJL_StwN>-{c7QZxD^6xk5xfqhkr4VFY9NTv zn%v`w%%(z8GM0>UVA4?(i`qFFKRS5Eg$HVsTb|%OxmW5vQ2sUllW;ZDG*nPWCD_0q zRb=;OAoXn+D>mwu!MNi3dWoX)b3)2#P6u0%h}L&t<#hcq;%SI)7KE|V$b>8u`CF12 zIBfoPlZ`A0RD@dH@&R}Wlg{VmMPr+jpKyWsE>=4-BF$o}DvJ7ai^RR<-2nc$GI|>> z{*#9tLt=L!?56C}&1YwB5xyHM3H?yu6j^j_MRC5M8s^x^>?iR6zN7fWb7nmb3FrpLe|=bT>nG<+fbH%aJ^IOJmly__}ndz1NJOC5Q^rn=fAo z5o6bz*HeL@Oy`dTUryR}^tv7-*GrqYOZm#%DXMVGkKSdtZg4F3J7>(gx*Cc>+~l>J zq2R^-TApswtj>@wNf3zJAKI~A-@zY|IZKFfyVqOWENdM%eba>bDwOD;swm;hGIKs~ zt1h#3x_o21KM9?RA!G)mQ;8*uZUDsvcib%y-Bz>W@Ma*`o*3t{@pr)DS+vnn*4!n- zirb8$W<^H7tHJo6;|Fb?p?lL!PO+BpKj#D{_W6uw>>+L+|a*oy~W~A6YH_ z;)^{uF>H9?Ij8z%57!D=5o2CO>^GIM0;4jy%Vj1wWB2R!DDg|S%s}H)-OR54ABf;a zO`Rxz9mMTyyoOhRxqoHB#*Hz1-}`qa%01iTW#);CCtr5E1FRp1Grq9T+4(Y#oLFLa zCQ5Ec$$JF1M#^Y%tOSqru-WRLh|Y6N4lHf24Jl zLj*T`AKogC%U{F9POmqnTGPDTu0JInV%~pt)_bPK&^;l%F`iC`hQ8^wRP>+U6*H4airzDfV1 z`D=y5kRcrXBVzR8gjWaVO=agQ!$C485piXT)buZL!dG4B09!OO50eW7m$#p!7tIlU z`7b*%;UqRL8cZ_cwfukHFnd5U^>6xL9bi?ddcG?Q*4hnW1_cL-jvfXSl)xFQqyZjc zvzn$VPkgXgRa=R%U)VN=2z3ZT2g+HCdgL4ei@rp;FhM{e@2FcfN1e#gp{DR@_~=mB zC=mFPqguaGlD@EB(g9+Db^IBJjzkELem%fI6x%3YTcvq{L)j2D(A*fmj-XMVa&mHp zl$6k3`a{=G&!HHC5? z%nL(j>~Ih4JGqK*vse1HlbfRhxWbtDyH{4;gWV&c5Mqa|RWdXFqu>D`p@Y{(l)y)n z2BYvALZDyAo3QJ>xb>w!BaAv3Er<3#Zgj4C*@asF8kti%cluU2UOS9WiUdNJa`C~R z$(t!40Y6I1;v;qGfo(5(kM??xY<8N2WW_UFiymK@r79by-e!1=ydUK)752MKBN)7w z%5wkxwcp-->FDUDDpRYd>xqql{fvr-io0*p%yj@#Oahk1Um4sDo#gDlxnU79|HD)& zd9f%nu&t$jIB#A`HxS^7^Zk~WA_bRYxNQDcuvXE7F!Hz-;pd8_?Qx0#pC{{@8q5l# zts%bP^@^8B49#u7zM#unmAne>rzGj`x2`Q9Ny69r_^(X<*t{b+lGJtqcUg2`4HrWM zM_HB3;`}9;XvH@r&nMPt#CB>LHg*+V$E*w#N5-8{5)OmUJXxc>*c$5(RuvCc5UmcT z+Zyo9?yc4=YUdN{je=HD`v;bcbA}^G1iz3$w;bpq&wy2FtguQ<>{p7u*PE4<6_3pb z>WP(=7~rckx9?b>5jFGYu8Lj#{;Gy?az0)-Cp)3zj&i(7oB7k#KKh%nQnH7G9z_ey z!A8Q0c9p~=OM}E!kk6>!r6$dO&_|Gv|7L5A8nEJyqv57sIxx7V8xXzob4^4x0j~$< znr65Xgw}ltG+AA&qlHbjz71LhnyJ!JD}~5JTPWW?oZfkHc6Et=cOxt!H-que6OsLe zmUAIRxSgJ!-hD`#)wkB}_S%5tj`o=E3i+uAoHL-P=yf(&R~jyn*KD{LgM&AXnvMpV zEKP||N&tJ?@huve`-~~`Zk^KT416BcgW*_A80-kuG|u6wIhWo^Ju~(X`(9A|T)?4T zXqt&Y^5uhBl}7FTtbsuivq-bS<23P!@1ygIUZ*|1&Nd%vGX3H=l@%GBg8BInHj5fh z?l3Zh`ax~c2*|+H4B>dM>_|0{d!Beg2<^eSc9QE!NZW_LE8@}^3#>1V7~cn?u*&Ut zzI!kd0M|?_P_k=i`-4wtf>9A9er2AzMWPc)ZU~wV=#|WMRiaEiA@an^fLpjI)>Hl{ zG~`GuairD5W^Aa$BDv=5Unfjff%1jhqYvM7txUzT75GAoGL15OU)__LW!SJ6GgNY? z^s{U9BW%z*d~FkZn_U7iYF>PQexLgNmT48eBAr2XgO&hnv zbMcs@We)IhT^>q?#iTF;B)(j~0X!l7@H=qCUgp`B`n+GBOV15~$rqpC=8O%D9TS~F zIaWS@HpmG4N4&ov<2t`6!a18E58v96tFPTtGhgSbvPwG2Y=jL^FDc$@^r)zyC})U* zX=ugM&VyE!gazXLdJ_XJm&R3a7SM>rOV#h>3Nj^2)-pbPWzE%@F)x%(OCOz0uhz3e z#CxK_=*}&|GR9T{=&#*V4&e(tJx%%GX1%HUsa71KX#`T)VLK|>@qx?Uqqdbjo^*M^ zG=1Hz6Kcs`KihZP+34xlIE*#d_#!ULwVDTB8AQQI$4)^ec7s{NA_a9c(5<=4rbav8 z)o5KAvHY9Galg=)Vw)6Z*~P*XW`q7==8ou`$0Lga0T)bUe$S1!ZbBC}$oJ#MbVNaB zt{6`Pg3pBQz5|RmS0~HSd%c{|1~;$mI(%?D?$#61)Y$-(Q=Wf_?;UeJkhY)MR!tZR z=V!>tY(@16O9ihV|6aEdSu_{9rgXw1R(K=?mIT~YgjYD02S0>$%`zre4G z^vOPl=Y&3-`uV`D#xbwwjJ-Z>7>L`f&r{61<^FWjfToCV9;bJbD0)$M&b{Dhbc(UP z<{Wl>iG~?yK22F{z`oE$5Y)-0HXRB;z@sXUm!OEB%}f@>_jP_M#C9`3}fb%tMmvx=FKhpYm{Q5iLNZG z`Pp#^ju1EuPTw6mOun9#YXGM!E1^^ol_aTs5^Je|T>duVg=H}EHXkcS0cc07B?A?d zNdg1Oi$@M*#T=?>jh2c7A=w25_<>jP$2V*x7Cw9{%%6l{E#nBSR@r0ofIJ?daeg6WC-`F2d6i{M!PgrtvsR-S z+?&EHRV3dOsU6RkDS1NI###+W5ezZ}Vl~g1)l|48c0TD3Yn5c;!>iT0|B&;gu!QeL zZifftCezx#4ynQwZc#)azDC^n%*oOhg!%i!R+}x```L2sv1+_dl>)1rm2gxJwGwu} zoQJk7DE)uSs=cu6?JDxQazlvW4%{^r!U0M`bY?nlG-#uGO&Ka8uky~p)%_QddvX3< zqMGLjrE>L7GD{pcSt2STu1_D}yZtUE5pH5n4A3Lw@yDK}KSUp&Df>Xzyv{;?DIwqK$D~F!n>$Z}?+-KV;zCaDB*TB0kE)cagoA?j}rWx>PuF z&d!U&#+Kz}o`-4dDGWcbwMboqePAC@`e6Cd&7DnjF!lF)6LE{xu>z}O@OSHQlSe%1 zq4zq7@B?8)au{fbEj$In{!_2XcjS&KQ1Q&2yYg4l()xvf&~m^X>nHV7kcE%l&Lxq1 zPr(}^dlxTP;Rjl}`jl>Vp6ITs(c+3POM1x(C3(*aUS7D<@!tN{h{>G^%dbq#N#^;hXe}L znvs^n!_Pv0GCFe}){0L5`Vv-()1-?7V0kCRp(vJ@c6#>%-jzXnJHF}$t&iX6TY<}Y z|5T2)9i$w|;S*n+$%s-O&tuGwqz3W^#coPIc~LJ)iSsaDyJknl7FA!p`PsqwDC^f` ziL$h&$%U)=6~YG>!OgR1%gytgFiF9;F@BU$ML_icc%{TIpxAQWKX+beo@sj8bU-K% z%QS(@sv*K2VpXr}bKC1&2Vv|>`E@RMR>7F=i8(L*XlTae&|*%^L0a}@2hV;>=8>z! zIh_G)kP||}p$(vY!FSG z!?awj4kPBEy{G5rwps#m>fHuqAU8p-g)zBP4EkaCphM;wr0l_|{pzkv8H!8QgDlCu zCHlH7Lzyn6g!sKS)I3Ppl&_b(docwb!E7DsI_Ad%sTXndl+N9-mKhv2e7MLj4;5(4^+61F6g_;?Xk~Mn>8e zDcl>5{qIF14m@texh)meAEtyXOU_VpE2vbG4QPM~+5=E`yV zb*@}fZR0exJ)OZUd#tEjOmj^PD|UW&C!IY{Z{WMbEyrl|D5m8VpRRaeliY1P*MkgH zpSjQdlO04Dh$N5$%v?7@6F2Ban$9?ExgTmCz4e>___LrZ5ck7=-nx;+g*W(%lmBFeb{^h zRdy(!im{|@6E-B+4fK;Q{6Z=NyJef%?L`}A-dgb%gNMN^p6>8744%JIh4bdRQGvYk zYHZ!dj;hXhXIjdDg`rA?0BKH@LS&JSzJO_92q&?~AS;#LzODOQDpw6XG zji8aN$a{d*Nx?o4+ZqA3v5Vy)sFYW_Ly6!?J^d70K_r>cTp@8!V+a3*YoZ$Pr2#=?3pDSuYYX;{+LNJ z{wiGD1n61>%s0{O{0>#GjXkMLXkr%O7LSIt`D$>Byy@?Kp>a#NURYQaSf(b{`F$U? zMWDJZb6*83B~1|}oJhkPjThNk#^l{4g`@xGl{&nfX145ZrrB>8MZNO&Y`_y+_OE*& ztvke>aEJ5kmC9F4uQ&e!M0MR;wbp^cT#r@MIs9B?s}*mh?tkRfa^E5h?tfO48$094 zyLPMT9I-t}i?b=cy_Yjy8AVO;$*L4Bt0@^ijI9&rOR>27G}Cj*J?Y_R)pWY4i(Sml?)e7#3$CP3z$z+0{f9uEVMKLjN=OCyIhp%+cYJ&MN)F+d8# zIgaXT_EOs8S3GAD4}U^kK%FOWW2Vp%xOgTpx_fFu22P_1;o!`Wz@3^W7bk`&kN~>V1T|cD@^F3LVbFI@UebYrd3P)r zsjY3wl#YiA>a?czHH%dr8!TR4-l!F1@}G(Jdm}7A`+mMoN_@YNM6Y>zHbA=7$4>3^ z9Ogh4dCLps!mj8w&@Q_v9Y>3MaPK&r1wv>3I<7m?oHZ@H$^SS z5<84Jl5TGGI#-{+4F99unkQM=ZT&_*u-huYPf(M)EP)a@E#RKWy`TEUd3-P5UbS7& zJsIv5EnG1RI(FPs38mj)Ohn?~ZyPI4S z3J(Aqh+7lZLZ|9A2N99NdTzq5{WH}5Bw4<`;`T>9>2tAmWUTrR_5WFL6iUJ#ZOLji z038|}6t%Z!I={G3R#E~%X4Ng*u=*nQ)E}zfM{`HkoDB+~lsi%qbj5uXdZ5|vIjQ&@ zMVv8IwRQ2TnzO5{tc-!vn}G;*D~O)dWIgaq@aveoLhf-LF3ZEo6LOAj9X!aKBlJH$ zXVTcv9B)!!B6d0{D=Q{5lW2Qqhm4sS4Gs?OU~BH^)r{U)m%D<*8L2{u(IZdfk*8es z9W*J*<8MPp|DPW@ollUI2^VYRoKWgn2C&qJKu4bFRd$%q8IkG9d9QmFe4V97xnNVt z{^P%8N!feC|3rMpPR`0IhwsuT)A}m);U|0y@TA{TQo;rYrI!IF0%@0`@rzL8yao&y z%VMkN;dZb#vbXSbYJhaTU>?4b35&q-BJ)N@2XEx_ z0PE$H(oj+gczE#n+~S5ZU!m8tz5U_5 z+vs6136YLT+>zYjEg08g%fpF_s%DqE^NWxVwy?)|6lv>jh+Ut~!`YDTX+}g8KI8r2 z@b2l$9C_NWXqj~#9U4-Z|Mrh=vMe!TiMGt9>B>Divfcjkha4yBM{%)K3ZsUWR(N;! z$Cr*Z!TNnb$1fWb0^RrS?eRRsny>{(qbN@Y3WrE`GWD zj_-_x%X%t=H!96?)}#pu2?u#Bm|dat7`7c)UpwAM&PUd4=qAPdCLhSm7TS??QXnCz ze>(X+I;uXO^xuHc{%=4i-SE`{jXXU)fjR~n1_m)6B8?V_(4LKa!&IOPl<1HiqX$18 zWrUqWx?EsCc1(-GcIj z2PBm}qKrN4Tya*l(}GIiqQ!k7&zKrJe?Bd%!GnGe4o;Q}|6?g9jb9%AD|jm^zZ((K!S7A^*z^QKI%zoD7G%=w|(hL-~I!acz9^ zartT}QRN#H$2daZr1iW>TY^bf8hUzJ6dYRom%INxJp4&tpXBY^w@PYi!PxW~hC4$^ zT6Nagkx@}q=A#5mIv)E%AqJ36sdc$Qi5z3aL!LvoH@BSw-att|nc`}@+5`3rs=xZdqefS;_B0!Pg z$$0s05BhaBBccGBMj1o7TVYn77pn%03kw_D=;e0Wyj)e3Y*(6HwL1I+=fDPWu7`6* zz}Lq8!i-8cs16n`uF1=Fs!HMh{+zgLe1aiA_a4f}n+D$;@d$;h=6p65`Omy?Rx;sw zNIbWV1ZHQGoU@i$K8%iz&aQY? zS5~rY*;ZxPH@|rq4qycG;Co*jU+0pK+{lF<_1f)U%%_LTo<y*y2!?wpRRS3+~IdqFJk|Bryl8i36+xa`O;`NjFG$Gjul$FfzozP!hw?NW)Nf z5BOcbbJjX{-SfwtD^52kyZR zK8dC0#2d#9eNp{lgxu~_F%2rxZxm+IQ6Q7I0gmUdsVaZibb3s|!#Y&Lk=i~e5b%*t zG!TgHnEkgPz9bHFwFh4}I`i^cKHP&b0tY@b=J|j3#tX$I74|(tL2_l1qzBHCKzM?~ z^P1_G@4&In@Md+$!?TP#eX4+-XovfM6Bq;m^gH}*BJ9?hgm?@SvY<{>v= zm5`9gW66qmRAtn8@05FsJY94AWfZzS@<`d9N>cEt6fe2yv#Y6-jp_(6j1vF&`tZ}< z?{A%U#Cp<>=(?A5&kjaZ^*$|z2RLB%EJgQ6E(C%evjJpw+rc7d-@_!BoRpTK;dAfv6>t!I>B61gm#^vi`sC?oCi}0e6_t0o+!Wy>M_& z|8_!2w-s3(3zj{&(XE`%s2=X_!CufP6yy`nZzhjpQ*ge1P-7&$(PwRV4F|7%`1S|z z>O9~x-$6Nopa2zj>ib>pyNk%&>WtqH`un~Bp+fbmf!OylFU-S{)Rk^H*dkC4kI~fJ zoCdpJOOw+{2ndh>An_B}>iRZedlHHI_H=Qf8emtsA@jBrt$W3 zVjEXssN6-#SG9lB1S2ydqyvL$=@m|g$monIlQzX1%LDY ze*a_BrCWo)$IApF7jhaIy>2j|Tm7m+A5e}|;S@{uA_TGWjdaxJ947I>g+RJVAlE~w zynY={n=v4wqS2YEo#0W%agKK&*|>87|8TTz35dy%tn353SialUPj257OC!AJ?pged z35j`8N5yz82Ut0t z>5VnV`#(>%b6T>LIk~t#TO*2}li&1Z#nGy(2brO)zUU|5#0?p2B(pm?I{r*e4Qpr+ zU0hsTu6x70q4n5rnP8vh&v*v(lw7DVSXHn=v*=R&l71tMs?IkO-In|C2j_g^y9;iL zVD8QjaZatB!Nv`i18^i-L)s4T`#_201 zJu4r44&Z}{oLS)W|GLTcP)ww3mY}0vfMuF`L7t9v*qw;CS%>E)#`n0=Qu{*EdL z>rmiw0`}^un_i~nPXLc<$;EOM8<%{5-fa*8+RFDVllls{l-ct_{ znrylWE$?$Dzxj@bPfa`aT${A}^9#md8Xi(Vq`B}ip8FuyHGy@eQ~O?9Z8uA#pW3-= zO-IIG46BL=ioz9;!I$0F+nKhHc47QhO$kh!`Clat3i?b{*I+7#SIXg`|#F?B4?%re4OEW>K;+ada_>i3Q@7t)(N^xE|e({-dPM)-io{ z=Q>%)eR@IRk(Nm3_7LyjN)f&V{Of6#X?RwB_YHrm{)mz6; z9~|I@hAzH3aEUkZCB^wkidRw9N=GndpI7|t&)pYaxo>p6c`8asVAF{(B)}%wt_l>y z8(ls7C@SEj1L${tQYFVJ`{AweB3^TtiOTj9FCnI7YlZbBY%W7e{Uz1GrW+6PG- z7+#JClr85w+Mg6-xd3Hnx()cXrd7{1oo=(I+d_t)M^Hn?HG9sz0{f(H8NdszV$)CP zu3%uNDTgcWcMET(SM+obqZ1Q@fCWiNNO+0J0HcmYU3}JK9{>=xYCMp@$H>ku<>0`D zwe|mjF#xw|6zk@RV}S{P<$eL4iWhc9|N8av@Lksfp1wPYu$OT#p6@?TeR3yn0vwR2 znI@oqm*M&7&3ze_qa=N=PwLh_%J91II&)PJ#_}h3?>l{FbGrxa!?Bu1%$K=<1#IHJ zKAx0Vp~_L+vxEJ$Rk|xtnsGmX;^~1=12!0-ULydM0q__eLYH0dm)R;Q61L-$-<)?Y z;33WiEe@W%p^!Q!7@$geYYMlHe!Fh`o{#NGZXAjg2 z40$hCkM^^BB8F?YIFF2`_cY4V<7Ws@v#as%AR}KpWeX_@D~wQQb@h`QK{547UOH z0Q=z%Cz5~qKJmMqjGRD^Ch%U}JfJHhVcB3qU`L_MY8(eaUWQz{q3M-?GT|D|-;ZYl01mjBBuXOn?cLW$vtKcX zD<}Ma0yy+FE)1-cQpH0Q69F2C$4(I7CeAJb8B%0KE^rA%TLAI|^@6KFo=gPKS6Cx% z=+q20(AQjy4I&l$^ukAiyn=?W5>xtG0QzhJKpj(6;siQhU+iWD8paiWy)8BIGh`U_`tpR@n|lLQb%z3- z#`lI-$20PuPBZI6@2z&=K|hsvdbj1gyV{iZg2Ru5^HEA$y)tiR633Nb7^92bmPc*0 z6WXpUQWZ#UKNQk3sV`*S83yb~1p+*A#y>-j(qdY%pb`6?9 zo;zVHw)MWC1AB!6ckA=p`q*3zhYua-H=Q=*@z>;4IT}{Xp;JF`>wn)YDN)f5BNlkA z?y5^NmkG=g9UR2-yhfspbNAWv{~lJ$V|@Q0;`x|zOid|Do=hw?CJlGoVCFjb zE)G_Z;#*&TjX&+$<~BfI0(jMLG5OsTg8^iEc`y$wm&kM7c@6B#mkVa&|7kx;``S@ZT%QKPF7-EBd(F+ zfmWKtooy76R?Sw7u1y~<)kIX-iCyp84dhc#_qY5_4KFAl)=rtG+|L9xbaE`yFVJ+#4qOR?jigkKWw*%r&rv)#fEsy;@i6Vk-Em$q-F>@78m{ zgWLsktpx24nr(obg)-3rn2&F<>DCz!#dONtEC4r^pEnUNlS5E=kyWsEy~&$Tlc+?% zK`a6tQcam#)BNBs-ixpQ(oMAVE`@kPzKs0l+$r%|WTp=bsn+49X;+5l(Ids#2udEj z@7KQ1Rk&;67=2oFx;gGX&%9YrN08I5x~iMFQ}Gu?H1TPFn8`hUrSz9}W$P2XYk}7? z0cEFSOYikOmQ?o?l4my^eakJYUa|--y`4C~>%!a%ZUgBT+-2AL;08%6Gh^rzJ_{23 z-t^t?mH-Xs`jXH9_s1TBlbVrHI0(_X{@*(V6T-8S$@uxc9hSTux_D2joZRR?45C@S zUYkwF`Q&{TPl^T^8|q5SJilP^@&vkE+D5y6qk3ov7Hp&sm0_Fz{&gyJds8 zpz4Tv!Y}nSE=R2II}bj^%H!N|sSn2eXqxzZ&$8b3I*?@Ee!InK+*6b-)%=}-t6ov7 z-cvTL;-u~!{a`_n@nWy*%R>K~DhC>#XXY;y$u3QvvzFC=Ykj!PnWsHF+gIks8Tti| zF9;QD&`3Ho;EGZGGa3{~6P&f=WcO`=>5K!L6#Yvl4bV-dmp-Wy$9edCEQ_NWn>X=_ zY0_UK;7+h&h9R&vMQu9y)Aa0gDBc9q!$3yT|MDiJJ;fTtTlAWV_^&+_dD6Xb{p-&A z$~ziI*oYKOKyXC?Gcqh)o&EMLKr0ZSIL;)NxAq0Jscj_zWK2Jtt%5tc`W=8IJFpqh z&s{Nmdau~%O{3B!$MbI8>Qrie^)T$;cFx!cq_MVtt|A_%`7FOIA@E!*+(VB3Lft~& zXP-cT-9{2U3`|F|DJJeGm+7mxVu}?kmX}Wd;!6rWpZ_s@R|Ur0^uC=){lA;QQs9v_ zHE`elzDRz%RpqSlvy9%uF!p)U5C1(>ffzapyn^YPdk_i`U_yz;XkSQ()0Z%vD2|rP_ex2)103(10?@JMCN_EgxCIK0Cq8I;bXt z$5Yfloz*(O61v(C>V~k#-QQovOF8~^XSRgt`b@)jH+T0(0kStuj%F` zsH-422GSx*8lH6@D!mQh)R#!hMns=iSSG3spDiUB&c;w#V}6NyE)n740H1yu+IE~I z3Nz(G@(PBr01qAQ-2b!y7aiPcJE*%W!$L#)MhFhoN%bX|su#cQoKSl)+y{L2^?>uu zV{943z|ptY1sTrQVi^{}UMj~r_*yagzr;#p_&8GK8m9?UJL}X0boar>`r&r6B757~ zUg-PwmP6;U`=iZG{Ovo}u!W%gI;F!Jgb`dgasFA*;`p_PW`E)iL^&YSwcN;ddAQHU zA&bui6tjwy&pHWP-iClyiDf-DzbEL&B25mS$!}{FvY0m0=jyQCExtfX6w~YFmzngI zt;OaQapZD{yA?G#ovkBCu~q#{UM9QsXI-94>nz}6QDtJwM?UPr=Av~ZwX+sg;xsx&Jd(~fC)>$BWPF&FK(a391v&jr>}9WJ(BM5mrtuJ4CZ_?b z#~B3l+KKXpZ~uS>tQ0GNg~oEEMweqR4%?qmt%#~@}=xkF>cRDYm105sbE^mma=Sw9p+C~ zU;WPO=_t3}r_#4Bzu|lQVlC3}v+jFD@w0c(+9G;BV%9MQg#<2?x03Jz1>y`rE*{1)%J2K+Y}RQ z|AQ})-z*fl!SiwNLk<!rmn=}2l!jwhM`=5>~Z(aOgZlVfB zXQpiVsLg^;wG<^NiAmN14f3|?!}@lECS(dPXAMrjfmSkJJ=50XGo#_#`$3A-BZ}^^ z%ll6_tcIHo*K}Y4%v*h(R$6CGGOyP^{CU&%t}p7mLiu(~)QbIud3?;FM0V?wc$>XC z@BIoAXHTQ9QP~B(vx8bmVX8pXi|ErW@3ZApXr(jKw6oL0vTXA_%-dE+V^00&bl^;8giI8$?t{Xq` zMP1-WfBrOTd+pj{HVr!)FY-1UC$exOq|RYzNZsc9MnZO*_4j*R628lAef3&gapCU=+JYui}Gy^uO+jL!sdoGu13yy+OeAF$z$UhJ_oyX zYX$^l4mrUDmwo(m`V-PyicoIIwHKR>`%5l_QPp!eGIhGha88F=P4V_ZL#g5SXJ^x9 zV(A(&apxe()Z&wz?)}9lXO0&WP)zK_zG6t+r06b%apKnO@gt|ir)SY8@2AfLBF3s# zdl@#z7>o~m)KZ!+eo%QrG%xnfyw7rn^CB0vk3l33OOMq>&+kxnIGAa18L>hckc zt@&Hi>h01^yKdJw5245}-WQyA?o*r{EZ2m}Yl{oHG;mO^5BFs8A&;|OvxR3;oYyyf z9IkfRYE6~@mZ0VNulaAr=-baNR&w?T=wP(0*ydv{Zb=kNXk(aco62+|UbNHfo*6QwHH;!(-xwaU zcC6H;!K|oqEH4ITkU#x2R=%xXcc8s;v?e`_mlqY|Qz-)t+Zt;21ixWm$cmIs9hIC_ zJ{;*Qa2|D(?G9c28`S|xs5-*Yk`;41AA zd^1#W86U$Pk6NPSPplf zODB4?Y-6`>c}Ow;$R<92lYA_os&Z{hJiOfc(g5PLxEl|xl{7w1SbL@-MEXfueU^)A zBhC$F1et2!<>nY~ZNYB+yybQ<63}vXgZvV$(jQ&zy7dGtCUjP;K70oS9WJ?p9b-Y` zQ+!uf1tqBfgzapT*ajw1zsGZh$JY#Js%gSp$cjEr~=vSAWV%@^qNm~q${+X!C za$&LkE9sZFJ;j<&#^*SnQgKKx1UG56W;2IZRWpDYQ=T}`BZ5@4sj&*R}UF!=jF z1F5Se@mc)pGN+D=|CK%X(E5z*Ukx|?Os;y1LyQ+;cy*&D=l|TO+2B0E=R|pB2XQ_s z{pR^U7~vqaFb<9rw$lk~mxdlkFb62&{s{00(84z^OC1yf@XzH>|L59+kE8LxXAE#v zKDd*coPDz(V(6+hA5!8adP`?1qt2a@_s*5&NK!PGV1+~vBuZer(6+a5|8b?U?3 zcv!hNXJ$FU0FmL4ferFLCyuH|(g#d?*4~0KHhcgnDv(LCXG|fBr>)8mJ(PkO^Dy0( zBIHNCg6VsM4uV(ZInkX&8g}E$(Wt{ab3zbl65;v{dTBw*EY6sl-@KdbJWw~M@*%Ok zh(ECi^Csr5?AX$KHR{%~%&6j78hdNm#fpPRvG_-)$i^lO9WSt2tU%*KI$2yr^Q26k z8z{T=Xs71)h~#fYtE6$?jLhG@+gxRpA-;SvzBg%rs->G*^hhIknJB$L9w-6$*34-7 zY;(a|l7LDJ?q`%|vR$1I#K!Z#P-L$Kv}D-?fA<1c=3fS^-uA?A^p&M z!ImRcD#uLXRE}i5FYF6UXx+&d*=I8xWt*xbt@e%~>Jis(=G#Ug>(`=Km;}v;e}ZzR zPfU+(-m!(lZ;zIbkrvlXBJbOtQMn^1ByQDPjf@kiZDaV|-DE<&qL=>4(-1>WPy;;B zaskU85R>$yn6S79$rJp|5T=nY=Dq*<1XA=dwqcqs z#?^_!W#>4W<*olm4;^{lMdrxv2@ZQVJLfQChYh~pCL^5)vpnFCb+<$L(FZE^Tv5E^ z>KQ}X-tGwW6(am&({pYTN6Y3UdrwyST&QlnjX(xU{WE4N%3a9)nECIam4b><9bN9w zquycT$nNqHA?9fLa)D`~w{1+gxQ#n%d{)+NANzr^5_of{#g{-^2c&oDyN@9&*~M_fDyR#%L=DJ$vRDf1IH zjit}*R(@%(Z{&rpDI(>;!y_X9xh9yZN{WE%ZP37;^8Ml9tCgMZ?%0p3qhZ$8k8x3Klsa6ca%>MH9;sstzKFnyZN&C-n~ZT!5&nnoh#qjMOoKNEc9Z6bk2l-)4F4_ z*-0*&_n~e{8OAA_plQ;e)F_>hWJpy~sW@|HH>&Ob6q$$#ac|H*8P(OJ4-ji8AEIDM zL#yi7CNU3_gA0;GPOW-On%!NHjSD}YhM$p}3TRAJ4%yMRCaEjQCq+BjA)iTyO-Sa0 zV?7VJg^>L)TXxFjK~R9{_bjIXi8QpP16=r6UG%p}hmlh!ei@$f47FDE8x>jI^`R8~ zZ3jBrFgQP@Mg9yBOaHc`3hVcbg!cwq z9+FX!fz*{gf;a-<8@Rkm$dJ2~ok)nYXpJ41OEuhcOmU?7m;fPeh^tU?r zd8k=~_yW!=Bh9G{dew+DmK|EiVcp4WA^8NP~1UB^o@+l#+#`os(JE zqs8myU+B3^y!iGyjR^|XYwlhzE0tH04~v^#gP9q`Rj@I-b;l(fQZ#y4X~WdLzBqz( z{QXd)menPSo#+oR!Q1_5-|b?^PzT!sK?ze}tW+8cPA=w|ryX`p6Oxh?>IV0>2v*LUYvcrcEQW+a@daVW zI;aB9$6Y$|rnE}@@qwUJ1AT+$1ZjTSy*GO{tI<5%3WEi7Of05mJx@~amnUY4p=n*rM|5#Z}u)s?aQUD za*LqF5+^^^0SI|uw9IWwz&1K2z=5EP)s1jY-a0#PL{e$z7tc0>f`|tIq=bm1UE!!Q zBMAL#J~l%dZlFJ47##ysIq;UP^U4=giMBp1Zy4Abt%3kP%=1{6)G>VqMMt+$(q`J* z0313fFKZI;;rF<~XUSNM{;@6Zy6#O`-1?O}K(`mXN4l-mm&wrglm~o+j)L39DX%r6 zlpKi;Z_JZ)ai62B&A0y8hh7Pir4@2}1HLo(DQ>Fe3`rl0WNUmHgwR>ysn8OK7N{CW z^9mCOH_Vbxu!B%a0iHBN3~;l@#;Y*D)!k1ho9|r_^-b@A?RVu6pJF+>n^>oNYCpsg z10E&$Nw^%@y``1`d7$mqoA9K6FD^+wMR9I7@0C^RmY6+uy(NVK&nf$zOpa0tydy6# zPow&Y|h<@ASI! z!4uvmIjv<}d7M67k;gibW!BtcTRhy%sj~@x%X$rGpM`bt3J>fGx>Mdasdh(EMDqe> zd{PdT!#>AK8MzWT>+|XQ8Lpz-vcn~}Ht`Htv}x~TB6H|@g~C0*XU-`ENQd^3%;KV4 zV+mKZxUvscxsS=y+v~m<8;84xwKdyFgHr?{JV{TPi=udXIkH-tW@se?;GN-3Q(rwg z_n4X9OGeUU5C?8OB21=ciZsCZkyDm+_Dl(3EX!+xew6`Wor)cY4g*KZHBX$}4uK(QcEmdVuw&M0nF*MgX<@n+X6_uE= z9E9-j&nvDEa7_YMJo%f!cRxKoZ63HqypN?udb_Ba?p#z!(R0`G<1UR}E%5~^U8ZP_ zzLhY=OgtA;IFzxeI*O~%;b1X;N=RNTC1QkYeDs%XzM6Z=ajg+rgut(!6X2P1ajA3Lgrk~y-aiBD zZhcF;UDT|wR8UrLprov2(qm-j-Q;O|2s^s#*h!@1&fh7)$FBEuI10p`qdjUEFHuM% zs>{k@aC6Q*&}2_-myShJYJO1P!d3O|Uhjf(^@1)Dp@$JYA%QCds;b^gi`><-Ol%O@ z32~oa=NDSw4=lSC)f_t6t%lvurehM2 z#ZG0V#ZFCTmhi4nyQ4ayQC=Jz|I~IVJzsC97#95kHzHW5Tn#`!-C4NPw!**FGWZTG z1enmF&dS{tj3JXVOYqN|b$Ih}W{pU_d_C*P@OvNgYkLi2)YAR4U5m7=hxug>m~+;d z9rcZ?Qz>y!OJLg5gay zt>18d_sC>emhBD(8R7Yi3?Zvq#cN32RoYWTb~DasnzoFn8k5mGM$07yl_4>vci}uh z+bR+{tvZPTP_XkrReqQXfM**8K}`fT>II%cMUvb(qV%$TKO?FPb}$OY61_+zvt&e0 z=uPZo9)!G2d$&9l%#-xF8@GF(ARvrs&s9d*iys8&-!X|7!dXG=_3sSyMbji2F-B(_ zyY?&1F?!jDjR!GQ#|zLU;l)D!#qwTNu8>ShUsfaWPRK_@rN1c_Q?(r1IC2ZaPXFWE zm)1AQi0^&l72-baGetHX}y#*OC%4$2df-6_u?B#=CdwCJ))aS8^ z6yUb4r&cq~0q*s_?(L+mJ-=RKI~%8`C8ls0GQq)-F{N*(t-x6Rqmo-}Ui{zk7XQ}- z$N%?@_lE#a*Hvd8%Et(J{za}VaV8Z&^C5b@;S{VE({8Y`9KUCxS6fbbmB93X=qY+px65!Wa1Z!c@}2giAxZzpi&qQ&!Fx$BKE4<{vy$Z#OYwJ1Le)f<6xbd{d$_GZxVSyKXniFSjx6(kFHp zZfch=Y!cZZym2sKGYqse#XM={F^6$X8m!JreGicLX)f=7Mx(|%rW3hJZC;|Wy8Yx=4ekNSV$mr(cd`GT4O=`>#uT@5?sv|AZp`F_}Kp8K;Q`lq}5V6(4{=N&1n|o?j zYZt2+`nUAy+QS4B$yc`%ShvYsg4Y@TXxJWAIHNN2odAp5^E-L@-SZ5++oKVgA%&#lKsa- zOvbhap641xWlznmlk$*Loa7gyCHPxWnetZNN2Ed%=Lnk5v_^#e$8Rqtx1Q?X_KRZGiR6SSv|NG%NlsdhNE z43ix^i$a$|rV~eE3*(J8+k5*ex_CQXV$(zB7pUa6r&ID3BPb)8wrFI-tk=(2N1K9; z1>^P_WSP}PI!{m9fYYel^m#XH)}A+P6zV;}DfRuf6qrgaPMm*?NDUCLU1La7-o(f* zQ{VD_;;|CqW~Mt6L;wm{U!%I)NLsPion|)a@*KRlKl=NZ=~+U6fdcEYuE$*XJ3vFH z&~Wt8rDtYmp@R7FQay=eSYZLa*v=Qzq5EN9t!xl)2i?|$c*h;}a*w5f4NuxY5v>2V zB%V4TAxG4{DD?th42p%-o;2~(oN$#4{1x}v8|~Sr2xMv!Hxql2+S!@3m*71YzT9N+ ziB_!wxYO9~iIzm9W5no$wyUnFAKTc;K$zqJe+9k-#;VP$C*^?Z=VDyKEuG?^+N~_B z`c`6jodA72vGUEj4yR{L!BP9bK`lAL0rP!7LI7d%fJN%Z>5!VDGHrystDVvoDZ`jy z-$Iva3aBTxpInLt})0HY~$RFJAF;^plQ{Be_^7(TnXtlL*GY-cc8xun&j*Bo&Cp3{pz7KxzX0WHRW z`oy`3!IXrBQH|!3{!hk@7%SATZV0$kH#;hofGr477P zXjZf%hQLzcRY+)@80Zqg7`*e!81mqaA_q=w=<3&h-c@IfY}g42?(&ZSFp>uV|G#)O zGFw=vebzcGd(^9ylBgxryDI9zxD#s-Fm1M{QdU{uBz=Tr53lIUmHcSaU#{DyH_K?A zZx(u*C0P7mUXY^ADI(LV{+EW9s0c%zY z4Bm3*%^u1W(`9Y+$Of~{XEsU1neErJHNgJv$6!vZv=1>+tmyZByC2lV--abRC{V5h zDY>y{Hf(ZRW|+)wNF|Q59p*3EMl8+ZgmY$#7q1*dpEk(CCs}Ppl4~mu^>JQyEp^_& z2rTNR)r+46VEO|%#>6iK_Bz#lGJ`k@0o*9*;kn3HbTQ7GbAY$ktE!ndO%!1~`>QG@ z`>6YqWmZz62OmAtMX>d?B8=zz%=N1rz<@qZe-MNu2 zFFz~Bs3a5}GpJtp)|Ybd4r*PABv^9bBvOGjaWR$|tlI3Vd>rHmjEIV{!MmF@$BxAu z;Rlccj-MQ}GFjw1+#C#PgG3P#BYF`k54@jJY@506l37G#7-!)hA){EF;JbMI@-wb} zRui;DCY4@Eg!}Str?4h~Xq_~j-9n6s#y`oUNc{?HELt6{YLj!PyBXcfm6EcD$_jPh z4+;6C{_YV;cQ7oKhl@E?!eEg1$!iANYNtnA@ku$on_*1Qr1`-5C6m8^+w(LrLJvs& zhSApJ!ww)qqhhQ8N6bePvV)N$jHeZKV5=>FE?WPE>g4ylSuh`9^{J+lJNM$NE0ji=$x~YQ1z)&b?-)_vZCIoTcc(4Kmk@Cq88EVi+jL zia^O!amhjPNCxbz5t(|_z+V5?9i{7Cc5b}>{k+MCcBk)6X$j~i5uH>y%iFCl3UvK+ z4+Th|ARpoWGCKiA-&;Q3t(=nrV%$%9yMktIZHun=4r>re+F4JW zC#fsNf{hJ1?*YJ{5`q_mhDS5Qcs$60U3q&{HRH1xOs=Tb512EGM%GPba3eYfBu8@y zr7@oEBs$*Vt+F1`DxR{8qG75Uyvbs^Wtlq0hxOKS+J(2&!EnIGG&WW-Y?@fGewUJ> zAg4U)DrKMNy5^NZQ-?Tx9f10za}ZtZtx9=O>EKbxWkv;Yb5-El?J-(z&kC2f)b19`*Cu62e}q+^bt|EhPg3hXwZ_yc z;&YxytO8m*a$Q)##g@1*3Q2CX1d@02rt1*z6tl7EU;(GayAog>IE}VkPw#;BC9BpZ z{cqI4oOqJx@?kLATVP>Oul(uA|hsO%AZ)8&!JSXSS z(^(vq)rEu=H(PC0h-$0c1lxXCbGP|w|5hW8Zz(Vp4mLY~2Kxlt|1|%l*eToeCfJ~c9HO33e!rolNN@1W+kLmc*}3i( z%9brqG-76w`P;?n%Kt=U;(gfggJsV(AR;HIeUw#?=`eXcm#$O6giebWI1p8${Hj)@ zYouY;{#ahK+;praq`BB5>1@Z9;^Gq*J#*Ok=yXM%fMF`iYlGZ&HP($7Eb~Y7VM?%e z1Xty!{N_D?2W@)3Lm-b1H3?JW2(S3)hrGYV>)pzC(Yje6b3w8=KjzH`P;Q0<91d3I z<#QcbSFWO~j>&s?GX@Y4Kxa}!LL4JTgp`?Q9ArWv{I)!?IU8(h z$n4}38*O8IK7?Cup5)yERIz>3aMP<$s*Ll&{E@fXSvK`!dOPFLMTaEic3yuzt3>?Lqi_YbS6@oPAJQZUx@!LG`T$BjVCLA zEWUG&xX%xkBxFr5eHgaj<2)!A*j&M!)aUfrJc&cK!)nZa4G(r090uxh5<1XUbZ4V& zl!(b> zPIxzkx;?qbSWU|WiI*`Wy*g4m|Su5r^q3ElMLjk}oXNkr<5ATghE(53sFK?kk zu1g;ot-t>F@2k!GgPC0AD5At{cf~C)R#xTnx|MoHR?6^kxw44)UBHRsr0ADD zZ1-uH$3R;thxg3^36bY>G=VyKQeC81MI#%pN93RU0 zgNYaG>(>7y6XKf4l~-WOo~X7Xf1Mjqi5F+=x`wy#}XdYEN|)Y zzu3@(%go?EdH|>Y*8eJINKTvD2a~?IYXQsxdZ?-v`m?VNq+)+W9qXsIGyp-Kmg|oI zM@wq+C5G=Z`ho@?CiWr@&ZQXQ{`;wit(QLkoSUE3T_57Bn2aX`ufuN3bz)J5lu9U< zxLy$-@nPpos_(u%NmRXX9l@F_V9~Yff^Dtpzuetgs1E5cx1-S4pWF|g8Z|{O9a>Ey zYsPOSEPf|r2fi*M)`P9^kLO!%Jk9SrJL-QVGPz{ozeH*9dxyHpEIWYQ;AOV?;v7_J{IzYsM0>YxQw7E|W(sOH0u*K)w3JWUQ zQ{pQCdChq|*y~dr&8=&gu=$8><;yC7IAN6mxLo%|vY))a3*MV&BymR=1FW5?)9TPyd0kGDbKHr9&FG?HwR2A$J@A<&nHbl!#F2RI6TA3Z9L-qQ-XFW?oT;!rn zZsh(WqM^f#oo`@|30uc;fu0PmkXv6?cVz~uyBQW#dev)rv`f;wfQ=X12H)Msy__FD zGZ%mCtFV6u1ZcJN(;d-qqPVa2-FHw5G5vkNmo<0LJb+tZ!oeEn-YP2u8AgZg;p_rI zeRg{_ai6Ap*+Ug9ZeAJpM7{1=y-Nu{qNT=`{KxY(Fb27DEU2d|>fJqZ(P-IPR5ba9 z#m}5lIO&0Asfv|#X@4?bZYV$2pGa@<3@WK9W+(k%5@(GI5H@QhQ-;Sc0kZ;3J<9P)@~0+fA)SiH}~`IljX!8EIW`QMXP>`S!@S=kFEX%WpOyszziG* z#hc9gV6VKa0s>yarwv{w2bF8~GY#=6hB(h|qBcLwUkd0n?*PIcP3)vC#YVD}&j}T< z-)eC;-PQ$IZKwQ7MSX)wUFpkJBq$ANh_RP(eE%1nQLsCo!cKRyJ7{KG)nntxQpk}Q za4Veyi}hcNU+F1v;^PJ4GecViM$Z=nT>c=omSY2yS4ChASsXrxxm0Jzlf%pmxD8^kC@ARwy?B960lv#DQ9M@ELn5&e6zcR8itX;KuU9Vv| zirpLbRb2H`ppugR9<-eSEj+URXgM;cR7Hmc3y#wfI)KXq1W`rY!Y`j2PJeBoB2Zvg z<~jWAhlkI8%uKRiQC2Kl!~*iZ%h^tl^P-#mGGw+f4QTLZjV0aO;+&b^%_lQsPc*4+Q2CqXTPu7K1aaDP!l;Y+tU8fsPjKirOTR+9n2{FRT(u`O{w)h@%0D zC=&f@JuB=6w)S#3j=%1fDQ#8XqQe1njYoVk@4-@oj^C;U6rto)TMg;ZZGNs2il~-H zjzq=fGTR14p{K)3YzgqSjqtmNO3WbnG+NvPQ`Hq}5ps)#J3W!kVoc+s?8@p{Tqai} z@`ukoNTK%g4T;$(+R^Ew+dCo6S+NFSkieork;Iy(EcHbTT|}n2z9)lOqP^%bc={la z1s;=ZGgw-8AU9eJ=>Eh z^}PI%v5yKwO14u1xkXuYOOM0&kz!^O032g~L|mK`kP2^QNd4W>(V-gv#7un2G#fsZ z*=c)K*s)?Rf3+(IC|YCK+{5sx6Gv1(KgrjEys>X`3irp|T2AgA7CxGMa*rwVt#;Jk z3o+Ogg2Y&aQbCAcT(H6kzVL!zz5Z%wQgs1Vv$Ua?XNHLgo-xLsxZm%!zpuo@CBUd0#v?4sBkEbfJndcf z4xp`m-T{C>Tz7H9j-1CaZVR4?Ev!_;-aFO*e9iW3@!OM#%%O~xRMH8xa^8$`FI!W` z%6NMl_A_I{f-0iT2mxjI20NsrC>(r21&FFhs8lkh&1kb(CGW4*x%HY?N2vnSV851| z{?F_)C(d+0W5XPy_O!Waz?4jiNtVva!U{p|#F*9C#{`ywaCJTi$73n}Xk{#&`eki5 zN0t0?HMO`~{@Zm`j|VwISANAm40;q%nDOFbGF3Nil-gsfa8m^E_oZBD`#EkU5mZ5D za?B7hSGqoPU!UP?n#=TT88@o!XH#6yfWuzR?O~;*9Nv3!UfUUXC~QOx*oxz!zOXVd0tk-&l4`93XmhYlN!)oXspnBkFMPTnK zec0f7T3uIN!h7}u$2T|}FYz@HNWcrfF&KSh0xe`P=z<&uEYv~j80(f-sF%xuC9nDJ-UBi>?zhhw zKIo=v@=pzw4}CsZpwGUQbX!$G z!dr~4ppdH|xn$;gI~amW@g0jlA@lyF4+c#YoM@artCL_GE;=MVA=?TKahPB$TPx1r zQfm(hT#8}JajC9Vay}vLoe!NgqiFgFz? zZGl|V5ygMd`+tY4$x!0cX&CCw(IdpzN)tGfO(fI*W1Fq|`pLH4O3wl~3oJZpNuRRm z{Xz4s+ag&z_0~$|9_9zNu23PhXi7%<;Bv(FW+VB|ic5sN7i!1roKyBxbP0bvC zovPdnuecI&3>Uv8NEl_ z-RH*1JyUuD+L31Mt2O4DZyj7_!H8+{^G`=Lq<3@KrrpDh4?WSPc5CZu?Q@MtN99Ig zyIN3kvXIo%m^wT+iR>f_wh-b2=YUm_tVv;!5Ka88(HO9B)bPYJvHyp)w+@S{Yx{;V zK_nHBZjf$}1_9~r4(S+r=m9V34iS(JDd`#-6r_=cff+(NhVJIu=zZPq^F7D$et&)Y zAf|S$z4lt?FV1tE>&XWil`>42BYx(RDo`ud7uVGE3O_Y)KYJvw)k4GG)tHb6Hfh$0 zP;hvN;w;)&->&JIj4AcvJuUG4eA_fcPbrHJI+w@XuuIx*7dJYPkJP_9>5BiOqoWrx zg1_ljzpW($Pte(fI*+Y)Q%1ST<=nvjl@Vr}yEV1jqh*iCQC zauO2r%)@fMf|h-bL40rhq_;-7wmBajR*;V#olP(kPjao2CE+Qfi-?r2SC|xrq|mB~ zKbZUWRAH|rcgvv8v!}AVBwA|rn@>%K_6mbciKaz)S`D>K#NaR9(!~RVlg9H*8?K>f zt&_8N2)8c5vgS79PL)_@C%-R_b9lG3GRRxM&*Z*vo0@ehF&|-S`&x?0yEGJ%< zs6H%(vYe1ib*vu)YRWNRotFj_gPz>BjBk)xoiYgtsbgmG&WXnwsnDwWC_ou=5?PhF#sB&GwH^f%jGsgnAO+OY0Rsq zx!z_k!go@SUF7qOIcwBnYO0cb2Qx4yhs{%jtm6acJ#=J6 z(hU=m5fG5!xvp)T0|rK!gjm3hbl35W^Zwp;1pM~mu5b7@<~t4CJt3_fkLYwJJ-<;^ zOT+par5PT71q0vxEXQvZ*6ZPykl6Er& z!9zN8#x>_N@U3@bAXf+&XpB5%Lg)3jG&CV4r}O5Hp1+vNn&{zJ?v?+(#^=tDyz{N^JjKW}-hqIum(pX7dlKTq3;-LyM-i#G2)}Xtwx=Kx zIw|ZXI!R{?YGt!0+E}05*|g05x0~ zXO6@#15O$pcJEjTb)YD?{VV&K7t(%zqXSbry)+EO&A`jUR^gmD30Fhk95&rr+1upk zMZ?7v*vq>;iuN^4Tv*+VkMF#4`-7~7OJX*+5rsRQ;uYD1|J6c#lH#teo6uDM&DjKj*c-o4jw9uM9SYpTYev zs*K9kMLosHnMwKBu8wHo>ln}%?HPbp2QZH;z|x)`+c8|f1?UyK`j95Ol}ZHgGx7eE zlskk6Ku{oVG5ZKdYs;iX54|ZOn+QN*2fQ4z ze6iQlm=uCJg~oTV{Nfp-TNE@EhO-!4aN<=<**c&HMHJI*Z9LSius&a`MRm3Tw@vy!RA6{TSeU^=29b``x(u z5U*f8Ufm_sIP!P%2E4u8rF1~A0hSlPA}yw=yOePW(3NOQxp4)*=$YN;3TSjE zj*$iIlbL4s99I9E)RK=96;L|4%)b}Y^s?Gxs1gh_dBH6+T}`()siv~GmQ=)Lz(67QS&6`+>d~!8yoG^OvB~@t0AE3H8@ElA z*3Q2tV#Q+b8`3q5OW@yr*68WJxjMXA?{qD}oUFN_a+@t(_*#P4XEg^yo-DDjhls4& zrp-g!dxAVOZ(yOMZ9m=kK|S0zeJxJD4{(|e&fss`#crA<$peC^a8gdvfn};ak;r#) zMb2$?zy*iYsbQ)5&*{Lcq1L)yxs|HtJ#hzj2(hE2+r0`Tog9U~Q#|^7v%X5~UD`t@ z;fj{C`D|>X+k*+qR7u+UZdSJcg#W7v4i|^K@Rfaq#d-a%gpUc^!J!j2^?k49?s zk#@6d!59C$RVUKonArDbr_+sy>+goD4gUj50br^!=qq=*QOy)h=4(~n@ppU>CumxV zU_S09aZ@VPljJ%lb~7$Exiwi*bM~(XOuPUn7gYw7 zhN8W3=!(GCZ}q98_4zliEwlO*jn6~0Iw@5S2%Ov z2|;$ssUk;F5>dT@>aJay{!=R)6@b!8w70++zjvUek)G0HW~hkeS~VTpel%dRyl}42%cuEp2@G!|xsLSB&dlYQ&<{z;5+X zXV1Xc3>jckoxEC8nnwAc`6sI-ee=cF`_FMiuDu(sWRJ}l8PZYyw(eQzJ{j+Uw5ryrxYLn=l(4 z^vBme`|#=ZD^QpFPn{|_?46%RFSN-)810=n?ONO4A@Rn^lPs%1#0YxM6Zxr-wQAFd z9sA~P^`0+_RnY?FoS;PhTJAQ{@J@G)2~6OezJE*rJi(cHg~v5-R2bbaYY)(a?;mb{ zS$^p=cvs+f)BfP*S3B8Md_6rkGY8<=gBuH+0m9{%iV#WtPr;p)AKc2V1BLOs!BsW= zvg$nFIvbP$2RM+wvnzN2kom>}ov^VOKafG%Vja&2Z4bSzwHE?#_%Fj40Vz=^`b1wL zfdGFSNSsm0#|j$wwlUeA+3*=)@4tW%`b>-?ud`|vTCk7*WC0jf_tr4#*Jo%MgY0=; zh;^0uli9e+YBlGs(s9-ftG8S1Slc=+ABEUdbL2p@`^VRm#*2(W$T&DF7;Zx!2xu^e z-j^)lC2zfbNbTB0;02jVnCiaAUQvz_3dY^_W>J8v-SSWhK2gyfukEcKocf~*(__*` z$-uuB)2~@`MB9qK+Er`Uy|;!v{hBzjP)1$F>1%Y4bc?HYB4HIT^qZ+nEva>C()~^3 zeA|C%F@4@A$B37poE768-O5|sem)fGa}9Iqzd2e!GK={!299;K<8=Kp0OZt1O6Jen z(H=clo-CJ5$ZP0e>?;P+j-yn8N!U&?k5i-_u!TB(K^6nr148)l>kjy3Vsj_WI@SQx z8~jZnUQ=F2JFAD;I?!_W8;}B48e#BjO+B#gxsLjf6jfNieTZeGe4%7IKXK?O}&#%bV$b_aMI(Onqnw}Q&a9PoQm*3N@ z(VxT>DkqmJuUNK=$8)Ynv>SqsXVPpS%sr*2Cl{ zM(~tAtOB+s@^TYX#XK!GUPK2-7~rWiloeX?%zwxVba4y9?r*Mi>}pY>)2acskY_r%k z2vUw4*4EZ#Ui$sXK>Wr|`Q*w+OI5AVwJ_@Oym*DM)@Lhf*86;iJ2Brgidkuo>1Vgh zzTKHF^runQOGLx(ZAltdvwy5qE2p>4H8G9N;xlNES%jFORCi8A@Txh@gETfOAU6#FlYR)?G$Tsie8CYg;^iiDzIhszC z8XGSjUWyp-l`m5%%I^7`@Z8i+n_SU%#X(D?gN*Vh*$kJ^=jN7!5gGx81^&2X!alE+ z?Wtrt7>dejLMH{u?Tb08=b{-Q1FSMeb;YTj8&zQ13FU&d&5VfXc*Umv?kU0Thoe-o zk=~^OQK_zDQx_5eT|fU}vB9?7 zb4vfdl*0%YkH3|N$=+YJac|#!oA1acso%e%fS}Fno|&H(pDmx=-r|34JK}27_I;Ek zcFKPn8*uT~Q2&n6*k?7y)70o~P)mUT81>-Ua;PRIFJ2<$3yrtSsJ28*#2OJ_S4ufE zWGO&ur@<45q-=6B|$V?CE2hm7eVJ(_}_IOT2C zbxlfBCCl^P{ko#We$-#;EK-*KV>1E9c^a>Cnb-odq+qN|a5A?@5oH>v?$+fEErpQP zdVss)3AN+M`f^#6fAHuYafFAzL)OadRR*H{XxfFtXLu%JXppKhl@VX$x^z($3oC+? zg2zBn50McP(&Tzi;`09Kc9B@uF`$|MQy6I@v!+6*^Y*O$hI6Rxn5niojp23~b97Rf~543E6nkE4t*)sarPgzr93UD&LgFj_?TOCWoZLFlsW%4j760Egu;+fj8`}qu1mNl6u_KdO$GY{s2YFR{Al~?pODVk3XorX)VsvH%jPN7Wn zd=`cbv{keo65Q51&7)*LARaloJu~GxN270tJd#1NNevNlvh_N@Y~KI1*J?{% z2x;pP`u+Lq0Zm1fs{3-d3#f};E`m@JhPHS`0tWuTiTyN0Av4O{yR00!ct%YJUsu`{UW6L>p2~rkN z)B#Z;*1yH1PHTWF=Fr$iC)A}X*4ye+;#wik2OAJ@z}X?$b+YFpc!toW5V7+@GjCD- zye<3hTa1WgH$oAD?#)ZzMZBcRZtPBRceK?(!56kS;5}2Emn0&+(90hS_&vdmV5y+W z{1yC~@lrExek1>KgXV0{iYFYa>IO&xq?ltYzo1K_5s|A;LN|XuS{9i3 zw^N#KSy)WB?O`}M9cc3X|*zvaRzW;HU* z>R?VJcjGK~VGpreJz-0d{}vp(8W5Qz`cG|iDXEqGPeYZSJL1BBD}w=lPw6~D{we*I?{$1;-dh)VDhfLRZv-jH&l%= zDlG9Q9NN=gp*6U@c|ihnr6@Ls6ZDyK*ZIcFYD;E0DfJQqG*jy_EB5xf#QEI|*6}$I z$@PnhScq}`H-V^K*l>y+R-S>K^{I+boq=IRDC_#_d=cxeW7ZSI;&bP$Mv72(|z%;+}#zoL7{js;R&a%aDFQAa+_WzfB3JM_r11PoZ4= z3U?14k_}~SwYMHe`}J8>lY3(>#O6YD&wtZ>I-X+BfWca!(a{_-@aH#1I(XFm&Qra9^0=&`sIdjIH=wc5mqYg_)=ufPj%Y~w6ZuY%_s~>}(Y*yL^4VTN3c`mDB2(prF)Q`utm>EotwHytlwbs}<5dWO%KT{Ytz{{U2HxGmrSQ zo(!Ro=G?eW{KV*(>biXNmF3rzH0@Gk z)7YWKyNe&|m4;W_#xju`nbpv*;^f=h#(IU1d3J}kG#*fQA2+58ob>vmPM_+hCv0-< zm|EqB%=W6Kv*Ej3>Q0+$hJczr!ZxBm(^7vzujFS_wBPq^5smI|II>qJ!Yjli)-VsM zt!s&7U6cGq>3~x(Lr7!nx^zKyG)!e2vkCgD7nQ}&`jc_zW#k7g+ZB}({_u6KQ=<|U zUZTDn`YDpw!|HS$#X<#gSYcCL=I)yM+Ue-K6WAmvp#J0}jS*I+*UX~za543jDQ~W( z8iWyfPcT>yXp-g_>n*%GgsYcBZS|6TD1X9?3H%G%1txiioqyK$#Fgj{SX;egxZ*n; zJR){urJ!KxX<7+If>+dI$q=a4F{=pbik2g_s5E}{{6^A@p*g=_JEn6FRb8eRSXEf| z>h0Ez3M&LwKY>hg$VLjs%LPr=lTL-!MYsCOTA}v+l2idk;}*^H1V3@39J3H*hi`4f z5Es`jP2__u?zh68pc>G6gH1)UBM6LeafCv@QsPZCw*%n=rCaSUT-y>=z7U)pbV{a2 z@^ASK?r>JRT4d5ja>?AUx80oF!o!M`;1Bi#Os+sXeh14P3+%S*krc4ocAZyef_74< zu%p+!jMs@5Z*Ke`o#378aT@rA{RvE`ZgXD_q}5!W5pc=Kdb$l>yt%Z$;_N`KtJsYV zY8}T2J#%H%Q+);H@`U zW?!>4H%aDi&VbRxmR;3pI0Zp=EScM&pyu1{FjZq8k+I=Dt-_!7Uo*+67D&fgeonC1 z|I+q9y?ozp9DMw2y?(;}wIlc!P5R`Q!qjs10Y0kMXg}M}*6$%MjosUpu#a@?H>5~s zGJz>`*lA8Hf8+XR7@x0kI5@X4(>nv<^J_SXP-aVNBAJys$8+6a4!zY#h^zFzsWv!v|w?i3GMwJBEwv`+I$1Y zD%o^w$>M!`I?z#JVAT0FaJf}FHU(?rWAS6RUzMvCQiS~`A^PoU%)z+v-8tDGUF$IL z87+SCdR%gRj;a-YSEkM>t-G;^X%TVhKB$WQwd-JrfbPv+(w1kY)b4=Y?1cwxWt}JVAa63+A-)2`jXboE&tMXxbWw){pX}H;wetGi+A9B=Y znz$%N2640mgH$7dRTs=vO!J8k3qB(*8MbkQhtDaQWK_#1Mw*oSWE;v9EPpa1AWsS} zkxo3)1}T<1UB(q7p~YW01m7}50@Hhfo@ZL)6fex#j=>3SH4tS?pjH7QSe9b^~mjX*~cN2nlH?6j|YqUtjb)eJxLC#)C z_t*B`iF2UUb@fv8AX{3nv>cp_;7&06UeIxN-0LZi2}UK}#5dF_qPy;`jSxrQmDCH! zG_+!Rs(Feo8TsSJp&SH-3HTiyec3diQ2v7a7@l_+be-{d$S8v512H-}r3})N5R}R^ zlYq4Tgs{%?OjD4Ox}B?s;qEU(_WWN|%-)cHph4jw6>xo=T43f@Q?vhd1>bzs5JGr1 zC@*(TC6W7m`W>FZ&IEqXlS%tfVpkdsap>GTZy}KwI6h%o*KseN>!c`6g=KmTJA>gb zv)aym5?Fc>_N$48gWwi4@1b>Y!{1QL?&*V#akfOPS7qWriyAc_c?E8@;5TH_Ez1a9a1 z$N}?ezeJN^M={&RfiZlJLBO~~tG>w~ME@bYIQWdV=k!P4ZlaTAUf4+X&DQ)?)9N6r z@q?f2pK;y@#%scar`mgpHR$lJa!fRy#g3m?19R-9d#hX?<^k@2e*RK(>&a!yQ4gz< zWl=t*1&nWa(G9VK@w{7!q7Hk_lNEIcT&yt!I)#sUWJ<0ZJ_T(>(4-C0s1;F(KIQLj z*Z-i-BmqQ+wg=b2#7q7|3#ZmE5r)*cJMNUyAvZOlmgbr%qSV``pqm91Va~R$xUS|a zsZc$XgJMeM*RDlGp7&Lr42;xzak8RnBW4swg+(Wf9HIz|2(lBSSbtDX3=+ZO1OKUt zWEi?Iv4Cn9qs+a{qp`;C&t2eoD311`ROOeUP}U5y}eG6LUu-l>1FTNrUhRlFGb&c!%xJO>OHTjgRbNauuYhk zpaO9Q8Vx>tWJ@13h3c*Az--t1OMjWg7#8if$um5=GovAku%*yXtZ*}~o40}qF&I>+ zP^^2@MuP{mO1!{bE^jl=bUlkP>hS)tIL10}4e=ePV|fii3VH7%{Oci@&vsPu$iNDp zU?*#bjXSy3Y7XwZ&$3`1G2mjFT@bXwt#(wqUgxP#%T`{M-E+BtI>VX$HnP%KyXN^B>jR>~WU?*N6IWPuxA^=2O-UzVLZD_Hy_cwYzm~W~CiIgivo(r-` zfQCcY-f5n2xLx`eYbCeXww`}$V6f9j0kLGthupqQktPm*Lg{Zyo-?RsK>ieGxImS! zP+*1X**L9lY~hDX&Tw$hmWJ#lx301_hnY!`9!s7aVdw416Ps3o5$q^B$}|ni!|%=| z?wCYZK;ZB+b&T(yivpqEe>n3b6(guZ`Pl}a1aH1BBe3@z#h1|@49zD8sP3fm<`t7< z5##RIX9_+$E@2Y$vXb%j{B&L9h34M9$v4rmHt+7tCN7(HluUjcOPe!A!8%rV!O~f$ zTEm?`rE%0DpGGebKL2eQ1fa06b_N~#j^M$9i0DH!} zV^DF0tztr2*JjfI;na+Ml4HOE-)y{6xF3R1PcyNyqdO)mRj@mFd5yR)PFc>&kt#=!FjDC}+5U{deBir;Oq znKB2bR|jQpKHOcS@9(_14rV5jOdZC2Qvj|r#L-_E06$O_i>&rx)rR+V{CwY%{!0I( zvC>5chsrE|U7TYhHAMS)!DSyRO2xeuNr$1v4u}Do_G9?%itw}KJOG$99+;pt*#gK_0GDIed{LhJ`SGp zC)$rQW{#o{BE!P2YuMH~?vV~m@+CC+Bz!}N19TFH9%XcVffL1dY^}Po&WbzQ26YnZ zZ1dlFaeIpzV9t_Wad)#{N1POC@-st-xYl#mbb0Z?^;vRzbUUQ+BX__RwYFp1)S4c# zXW%s}+MLg*RsB1?8#ZmG?q3VkUou!~n~vam=~E{2k8!rNSv+_37ryLtGM=CpARvRh z7XW3`aPpdOlx-wyQ2Y4eD`)P`NZQ(`)SJV8=Cx)2kx-2#>P^IG-zdNz@O3_mGID4m z0%mS0vPP#0n%`>~N|^Od_9HyMdvV3bd1hOT0uTFNMZ?6;nm_Eyh{aqZ(!5p<47_$^ zDrzB_-*q2W^Td4l@gnIHCx+~ancO8!;*J=p@3;(A_BbN|NG^x#_-hW97Y>=ZP6f$^E zY#gZApI=euo;g4MK-hPQ%xXK6^V?e#^}9N~IUnrAHr)u^n@j6`@gHVlv>f@B z+hL)c=yZDc>Asj}&pxvKT8Qv=b>T2?ed8& zGaEnx6oEP+Zm6rFLNH`<94_G?Q@(oRoKb6y&%I-e%`6w%@+xzhWriGU+@pDBv99jr zj@!+pbFot*r-&k8#)Bsp0W(#1@S_BWrX~Dxyp8Y`A*ach-4^MokTW@t3eM z!|(6LDTu*-Aphe=#!&~)wc+F8+g%=p3Q7E|?pb`9bQOBq>J6pjKU9OS-&YFUQM0Zg zj$BEF^vjo$+bsN7h+8ARl^A0D``AGZr<;hcyzSuO`!}#yQZQSHDTjD z=>@dYdzz6IU5uDeTZNbdK23n7OA67KHy3{)^DL2M^F=IoVVSoQUA1xrN1^1j490{> z1ugCu%@18J_OpwLI;Z?-E_}O+>*~_8K|+&Bhuq^9X}ffZ<3Fjh6{pT>l-O$&3+;aD zWL;Kmr8{}VtS*se72Ie9uW=o-ytHq%%gkKL#7=6#?nR+Ed7{I5S~5TQJ{NHval&tQGGn4l8P++bWgu@e2%m zC!bM~XKH)~_>=*)$4$9$@iJ!5GmMlpw0^las0Ywap;qR`(e*0y31>A=9(5gA3PZa`0Pk#byyJ4G* zgAQWzcCJsw9~U>?i5QnmzJy!F769wZllQnj8a4$=^|+bAT|W6Q%xC=z^Cz!ii==B; zz>`X^QlchS#R^a~>>l91A2zPvy}j-$@Orb8XPfG=(?Z~T+2Ua|t-`fju-I3Te;(ijYp)W}ECpcLNM$iZUR6Snl5}r~8P`(?>zbK#g zF=MG|$D(qPu#SmN0rsIYuAR59Iu6>?9IF_8nR-p8ZZaPc|3_b_ColtECnnT%3yMCF zT1D&soGSXcx8aB+zT*<@Lv%#XoH3^gT;-Fw@n_frZpTY#F>N+plx~nl%3%rsL{dHq z(v_-N%x}VE9@H|$>gQAcd@lY6&qu`X#avin=f$BWymml7|C~b|Tw3*%0OgomWpjLd z+jdv%pH1f*jyubx!JC$_sluK~Rn?EOicg}C6La39slmXRE#+T)L}$j^eMym8cd9i0 zlMfSjA1@h|jX$tNkDnoDF^P~Iep}B+^#|@4l$A|Zi=MtMht+=;?st92HKq2C+8fj> z{fTFsB2ar7O;&N-etw)fmCtx%{Lj+4$M>YeJX=Mhc_~KqnyI3&ILC>_H66c#K*b+7 zm!+GGj`%;E=L4aMFozz6Wi`5`nnzTL4p~wVvaWcjc12SMpcDx68Vr%426H7j+1c^8 ze82_FD|3C;v#@S#zM`^R(=TvGhERq5xleD2b_%o*d+n?vo}SmGXeZ*XWH}5!yH*8V zZm+A7btnoFaI=a%=)s}-+f^B$!nS5}vcoUHe6cbN@g;je4Sx=3JlT^lA#1K70oHWC z;-CgjOG&cKg^MRGSKJKWh#>kM7LUi1*{RXR<&8OC5iJmij;R}jnVHi3lM6?^3Ei4F z%?M!DPVi~zCa$2V5z*D=MibA9$k~?QVIp}K^z`B9* zx!fHKYK^wFUcB;YQK5<*pOa*O56@UN=iE`k3TytteifMf2I3JwYO0B|qFcNPiQD-& zEWF!UEI<^Wua%CjrHXY%xgquiky~F}F147n&Z}_rTk}ktQv+RM9vO<~rQa2m80_8{ zq;cY^|4bY6ltD`#oAAECe zz;3=f)Cp$hc}gSCyX<@d1!ANuJ2@=zDAE<}AlP1@dJ?b4LFyaP?YHQz0mh0ceT1?`eBj?m@=$2I{ zn~#1_Cd!%pl;#cH&c$eqX8+w9Sj~%p0`LlBvS?u29S7pLymKjcG@JpKRSac=!&>l{ z1~)OM?}SEqnZF(dLQpO#J=fz?%4&O^D(tQ9wnQ=|ycwTa=7}~{f(2>430|#zc;*Jl zB>+9Vbe3=ZXdBAg;0NourRpW|9&|jWgy(x+CR!I}yOSIomcHzeC4kxY5X!kX9^f_R zeye8&n21Hje{NoLw8r;D@yjdxhCA}>8mQx-Kh0j~4HM_WD#1TgMG65b@5Ye%sRMZq zW9#xLZ>&zjuROa4d6LsUkn>TT^WcZy2Rro!z)pJ+ zr=7YoWxn$=q2tQ68RXV4k0Mp-XsNddWkPVt)LS>b8OGG4xKREE(OZaeQ=b(E>Vzsc z{@_)G%h<;AeD9()mlM$2LHXsiut(nILB?LQbG0Kvluu)VUEqt3;JpL9>Yk{hV9VZa z_eMYveRRU!0BDcwPf#o>)1%D9(bIZ_<^^^lB)ZiwWNm>y*D!Cr8?oFmD@fhb!4GO# zuAniK-e##F;WKEGU#Qr44t^ z=;Mo*vETs9CAQx##(8;kH8E06ne1;B9ga8h98gO0-!o7+k(Vuje{Dau_pVE=Ue9Gl z_ol&oM|iTvOm;(rw|-pgd1&~okQ5QbfKDUxOIeZ8`^hT@ZvsNMidbz(TyszS4sZz9 z(^{y}UX4@FfKoG?8YB-E;kZN^v^A&a??Ndp4|Nr?oFjFg1CtQCGP7igD`EZ$+!1Zi z!_mu_;#r8OB3+G-?=x}YB&u*Ttf!ouz!<(&VNZUR^?_%s_f-Yod`&cu>>N^7)~wZB zxs*G}!;wHX{q05qgjNx9;6YRBV^jruXO5?o2X<{pibno0jzxl`N|4>GWX7P7J*eTz z<;+Mr{SEhEBPO`~%yN(iLS7-*Vr_lLayoi?W%qn(_d+!Q6KV^7KD^qy*57 zU{!)l+E=Xq2z8d)}=BD9xgm%3>aLjv`+;IgaCO@7Nc>CKvFMEVB&{} z`Y$v0tf_o@RJw9dj~hOB3@8%O<_NCLaE8}?GDE_GbP)B3MTf~-J5&U_kOIl-zFwV? zEuzHF=fL$@{6NLdZTMMLg}*IXmbY1jIUrtwub|x9GGe!!^L|H|CIR)1a~<{-tsw^W zjyvGmaIB<7Z@kHbTTjLqkT-#D9ZJ4|G=p3BVW%l87%#%FOW5S$lf}@|D7O=@!3Qsu z#augaDY;kYa#voXzNHWKnU1`!tz3fUSe~0+Oz_IWeY|ur| zp-A!>gg@U{s>S}5a$Uk~V+@@^;~@j-0a45hFqas*MWaUDMH&S)Ik z>o3m_VrK*903(@3oVjMZU}1?sqTWt)ydzW(%++YL6`Q=~GS9*Mqp|=rS6edttE+s< z7jAFUMVq)cyvoSI9VHf4L5thNw?%HYBnu$fOW z672yJPgBAAMi$HddDR-DfHC5yu(N=*62Ed(jp(IpSS&xWG{ze(XT|X4wi8zXW&5yT z)+Kg-u=G)DR7>tUZ(2d>Gev!Q0!eF~Wyvc9%mL^Lq9M3p15gC>Xh-qA(K%$y< zCb0wyMpu16u;-Sf-d09;+%mj#->tXyL~7QpQ{XUxge&tN-w=?Qu204?+L57EQ!;0(S#}8 z2rHQ(WCA8l7Ft4jzZj^Rv;}9&7>0%Q20~RJTVeqQB$p}+mDkZmmxzM9v)jJAlADr? zcD_6PMnHfiNE0ajBl5m_b1(N=4cV_}i58bRN?z+&XO0zQ?XC18;TMH6wMQ^0)e9-( zOtE6Q#X{l$ZXMp%Tao;(Opqa23HKkhxe}aap2DpY{puct?)ZGm8h5?XJG)nTaY8yp zVMF$u2X`kBvT+UAR+DR(y+%v$js$7Bn9%zftJc%GA?D1zZ+th$vUy71))(uJraa_( z5%-es(54>Sshcrya6hYS*Nc~-r-%aNFi>(*ci9gBha_fy5UWJPaMr}v$^mcT{vVkp zK8+KdqsWtcbj!P1a#QnziuDf|R*5BCo$Y-Et3K;@Viz5l1*fh0xLb$Uvz8CR@^~GU zyzR^?-bBORX9|XtG;S{wsUR)}_m58p;G&afHT9s5y7wh+r^va_W!pdZ9wot)Km|BG z0Q%j^GidYroZQJvHAS??*EwbE6=WWp?3RIlgy<0;6wY=SSh#rM_rfhYVq!yF&%I#26B+lM4qzxw#j+ z6|2h4<3N3SGNCIYz5q{_aACdG&W6)`4(bGdkrGQ^KzAtxOnxupa#S$Ow3vLzlVHDu?r<4P zV<~5fQoQNAd-SED4QZfBempc=jOVKk3b98}6p6Wa6nR&~J8Uek8e2 zH%3jUi9Y#qsbUJZ400@St^73T{3xs+H<7t+>Oyc;81e>(jO$RB;wB{peQPlX179-C zqmIG1;u=~|&6$uN&E+{u07!)V^S4EKROSC{;Lvho?YbbP05LFCz>a8BqeL?*UVR8$ z1QtF~E0TX3`=w4P0q_yoWECVt-^{}Kd^4T{f>d*XjnJV?SRRMvEVqf_z`!r2#COTZ zO?9u>Hd&p*yzN$o-_JYHkXCr>P|2EgL9Hx^>~4hO?OSF~T~yYU3yMdzQFQwxiOB8T zLOw7&WUM{mPqsEEoR;MgeFX_<;~Cpnx3_{^YEWG>p$Oj^`;lT(YKZNIUqy?| z|EwQD!$$ExnU?&%&}tWQvAH3$Uv=5W?nLcvH7Afs5It)0o|x6;#V~l8ZNCqDd1d*e zHS;?Kp3 zo+AIKW=#~B+>@NqKin?wVoY^Mv7_emR3GBwO`-twGDgbc1_0&KB?Y?$!9e6D7ZQ=z7b*Is1Q^e{^C!Hki9OR^23R6_4@ViyM<;;Qhnj7;O7Dm_JJ z4FCa4izK_0M>BQ-T#m#Tp6ibsP`E(0=ds0e-k*re#T8mWQ7@}l0LZ+T3MDC9PgiUx za(mx<P=?yQU=#$?)2Ejw0q@38L`g!%h68b^knauJcC?1>5a?e*V z-d|EGgDGbk`({m{W3NitC1&{}@f$(@h`9Br;jb|cZnG$PZo+vgbEy+YZ0O?E6?3ox zK*Y$2e`sg^_Nb;+<8w4M&T-8;)=F%&d7GwCgFm1>cZw+W!FNw{A%7+Qg1zF-BZh(p z^~0$4g)N<;K$Jr^*0|iXBI3KK;YgcQ=x8Zw^!DHs01M zI4ShkXs9+tt3A#kmyOhef%S-yRgKANmU(E!2tsXDfXq5-p^b{EE9|i+`3uSs8~Tt1 z^nNsIi)(Rx1N9MkO2Nlrkx~`uKWvkm7TZaMYkPHKQ>d99ZOsUCOrvIFyLys`&qP#B z8Aev04URW)O{kmG%9h22GC7%ba&J7uYywixW?*#n&b%2IQWN9~^YgjeenBQkZZ*Ys?}2;=LaG*N^xHMQO9_b803 z#WIA~we@@?#e5Scir31ItNk4c2g`G3fX>sSCD5i~eoM>t1cj%&$?1$teO+h{f^bca zwz+zY)6RpfB^V*w?Pc#ZPuRL4(B732lUUV?K=t+_oq6bN=1hiL%z3Aj$qXuMJInfh zysef_uwXzc^Yc1qX5NPT231!zYe^?ynDk5B4_B^`2i&)pC-CPy;GnaDia#`kdWqNZ z+g)-QD1uPS;@VjQsnOK5)WA_6(_2;fMY@VCK3+zO*y-oE8LiS^mZBPCAg*gO6cFj9 z2;&oYMRpNe;|;+L(*qUEF+ZINUna~OQDQf%FuaWIi|`go?*Ob_4ifFY0vi{ec#qVH z0q|eW>JjO8@*)f?CtdH2jNLJS(W^!SOXIOKE%!bz(BHV(eh*9IVUc_R_4(Ym4844~ zV`Z2wQ*?~gpOS(h`K+)q9jwqQGwa~(L;+dT#5M**{LrRtYGgDFI{$-` zp8g-VZ@T4Nm3Yz?)wqrWd_`h`ZVF}?vOYNqeRvk7LfadeV!#+`Q9RGr7Z=*KA4;hb zLsL!o5y48=($MO~9GbE2Kp z6K$tXY1>v4+kvIW@}qXMy(a=RTyI5_wrLNc4X;*c!wyk7i1{D7gC&Hd0zRUy#D00WH5|&kVqbEMbfy|w3mmSlW-kY zx4eB2OG#6<42$CN*Z2AUqQCG!FlJ)hDHHjK!T(znjrCM~>E2l}xjB60<$0b;TSRu1 zQUbY6ZELJp6z2qvpOw*YkI7?P+`7fE0$Vbcrn;ls9TPuPV20r_x$9UKdyaD6oF>*~ z;Q40sY!v$;N+#kwezl?fcDwx$qaKPQ`LN~^)&C}3cr7_cYf#p^*Y2roaQsdi^>%Wo z-v#y3NTH|&ay;-`H0-mcx!sC>KcAB8i%hXWl7o-)2i>*_Qnec2pdN#Jd~oih;Y203P5u7CIeRgDWMtC6x)P1CIKX3cjuBQAKvV? zJj!mQDFqq1(1y^&AO$8072kd3pa%Rmh6L`X`C@^q7F~vvEw|H72e?a*`r|lH)Z5Mb zo*V-Ni=djMqqZS`*6F9Z{D|0hi`Y=m7dCl-R}tC*ZtD`?quqk#D{z@KK&_DZkP>

7E!VW9v|vATs|}xco>OI53++cP=i61&@%MGi1e*kxc4BK;b)wyHzoWmo@c0 zP4j2D>i>T^@mDjA{|`-LQ}Vyf_C@*sliax9KrI-s+6$IC8cWi;VY2Un|Ml>D;QlXf z{-~Mz5izy3<6FoD!N*~q0&)Kn1tkAr)WJHJo#H}aRK-ISeN==qO73+{!;sk!(i`wFQ^Ef+Md18DC-z==@bxT||EIdIj*9Ag z-yQvkL4$%IAR>)`NH-`c-5t{9(A@|k(%m6Qr*wmYbV;CimowZmlXXebDv(Mi9efIM{&$}T_uJr+P3D&~M659TDy(S&}V>c}Q?sLegzQ-p4 zYzWtGl3DNR`tv{tME~*tzBj$?u4?JNtm0o~!6X(b z@Ebt%{Lk9&?56(MJ1RHASsY#8=)B7^H}xL>9TzzvZx=v(db6VYGG5v6zEjq1;pPIH zi^IHEr>c;z_fJEsKHAJccE5W?jwRsLpHG(N{52p*evdK`T|(d>Izu3WrY(DrbfjnV zXwiI;?cS+`!g-Tm+h#W#hv%8Hz-guabY|UO*#Xa7H_OwTyEi1s-7#8COSjOPTz{*2 zYMv2Se%@FdW?^XErFplaWr(wo@E8OVY;iZQO%0;SyvJV?^y_XkDG*)IfZ3o$_KevsB_$kfMF<82L!o66WEh{{;lCS-jpV`W>{&`Vg|Tsv z)@$7l{~d(n(WhJN&S*7ha)k_eUEZ5}zki$;hT{Fepy)vPbehA#5dL#0Hga-tO$i2$ zQvIvF_~T6jL#!`2fuib12#|%d5X%;z;yX+wAA6;K`lS#oum2Ui%>-b&h*k5kEkZ zP4tNbNg8Fdk=>V~fIqh$6jWn4#oI23oUrq}x`S}p!tHy`ypV!8qY*cJL7s_*8|OFL zMVh-ZA^zZF83-ex%*E)V&q%s9C>uaFC?lFX`o4IwW}Y!;8$9pM3ZY?;?slt%u`o%3?J9{vw@1{Vxy z=%$J)&lj%F4ezZ6-T;AVAt#NvAlDRj#GQ)_m7|Q~lbVfN`{^@_q-8sQcOP8s0XU&PgrQ`dX0hJ1_-K z)Z-IL3+wf#q)F`S+-~l?x?Yz1KI&D7dZngOR#=V&F{U~%H8BY{ee^*|%IGU+avu&@ zX@?q>D3yOFM$P2}bb>HO2RSPdlrG(L?Q`*MR6GT{M%yYm8hE^TGn46sUdb_+=953^UdidW_ zoi(`CJxaAfW+?A{+Jnv4X4#$dJ?ag}&n~tCtlsu3tD1U4lt-25hU#@5kW~VoUOJ3E z1{i~UNyQ)(l3(Ox&+bb6s|BEZ`*up(?+tEmHVaclvWT-{;55#yM#@+2YMG2$5l(VN z9B(-sI-O$U1!Yxh0v5kKd^Hnn!eB``)Li3~>!Ogzs)EW0W2UJOG#iKl-u0+U>Sd4CpiE+o{Bp% zkEDg;JMlEflxc9Zall1KrssY+;ul0Svh$#fb+9CdoM4yF9umkzkcx@2&f@M*gGI|9 zV3wapzr-dq$n+khf^R^SSkZ_#yu<$voToP-q{Hm8QyY?R6Rz_l3-8(=>4VHd>`S|s zuDrW|xTW#k#1`oH{d@n%wb!P=iE=RS^T}phy1fU!y+6ki319oKxoa-CMBWdLQCauj zY4W@-FZcC-6le3pJ?uSXZpl>t&a1=T-~Uh9lmDwZjp8hA5nn4O5cH>{bUpHM&A- zDuti**ci3sze{SMC-vqoz*_cl;FC+=je4i1tt!C%KYuz~J+(SR;W12MQF))t7PaQf zMNSI+h$koeP#JNQD_zGz2(DUB7ibYgMLi;b(eH!OpIpOYm(q>~X^A)Q-+tXwnR6ThjeO7409g`a`zSi&-8^?o`q=myk?$1 zZ^R?EL3PB6>tmz!=*X5%$%hmzvZEWxdD9=26JbgQA3Uofe%ca$GayFG%Nq?vnF5Vc z4tSC!HtND>@%q?RZGV32gi=LEmC0Gyq>reISTE7ZB?NPO$M zsA7*;Zgk70P?ALbXZnY(EafxLQzZ@Z8#Rg+<=9Hs+_BXvkR;uZ^75)eub`C3 zl#8=1Z1oO;yBVhzl`)M8-Kwt&NB0G$9}b5xr);eYAU_Bs3^n`~Vs}8JzP~shvk5_Yvh4c0tJRjFA=T1aTg&9a zo>I;@oi7B=?#k!939ZsrdV+JNjkL6aYZ%2U_MIO@r33fVh&UPtIXr-WnIp}EfDLGV zI3pg4yFsNr<~=X8{mol1zQ8vCO=l<@8L&|nMZQ%{9=;5PXVkC42GmefvYY{U0U@@|D3&dzwb^-AWiGs6t-uBtAxSL64DTtU;-gPU+(df-mzorBw1AKlq^%RjmQ9~@-y(q)bxmO2S<2t@=L!0r^DHv%b6AuRS(*eqh1q~-f~de~f|Dpfsr%&w z(W@#92=LxNOT>l!Dqu1WQ#85}&=3-4TZl$Sz3Ol~OXO=OF&)t>n+yJrjJ@^`ZVWj< zzHQ%)&FM^*?9GxsHPpPO^Xlr*aVR`29w!!;8D;0~98Mg=Xwh%^!_HKJub56`+vR6{ zc0^+3&T5`FU%JQU(_n{+Nd209>?2`(@#SJB`Xfr+kxAh2d8wvTWU1!&Ea4CWG#UqV zj7wpxh?_!Aty(XMUj~GR3wZEejA^>e9*y^Dt<#Osm$#SyG52u7(B1Nxh7g6#yP5vS zpIsCJS*Oe+K#Dntp1K-lUr4G9Q!^v0YfT zigEJB+`QapcKL&kU0-(ca2O#e$|5{TxItk7pfnggcF#*FUyQi2#kG7ID%n$$VKw9Q z(fJ}1)07mc)64gVN^2?OpHBP*Q@MRqmAjgm=*0GJEgSP~-S>7TfggOrv?zv4Izd+b z0c*ISu#`ypvu{qUxfxc)eUxdBooZYZI=>qeNdeSWtbMC#qo^p33dAJFzyUcbpt+gI zR&;?#wzKwc)-HF&5-WU_@ECq)_1K+sG~-)+4$JNAs&XH6R@lIR(g`oVlEzN|{iz_i z{oXcbFRU6nIxxcy-*EY>^q0s{`o`7zTQ;`Ognz%A65@+EA3(5Q$%|ICmH;Q8i1qp| z#bg#RYDAXG4GV-*;6-tHp)23aJLY}r7%`lu-g^|~cMaRdM*K9`Hd7GiG~t(n z>@?e+L>XRdOrFO%wJc62?WZL%^qz|(h_BDenilvNz3`S+IKp5`<>4?OW#7*gamjZf^ADTsegYg}?!MD=Li&xosV4lPz9(Mm9!cvw*sTJH za?lhS@5j?qTNM!XcAc#Bm^PX)5d^aSD(4&&v{P7keSLWV5f#N)MBgzo{<$%D4|xg2 z!ttUC$~a%5-9n=S>W1No(S^UzH0akHFK(IvxpxGvY{_LFv5&b`ayE0osS0l1{V<6n zZ6JG(YYUPzaFAL?X%h(SJd6rWpcJs5rXzXcYX(HhEL#yw5A`-m%wQ~(zk1y@22@9cXD^G%iRn?-w)*8b&Gkgk8@Y?dQ;1pbM^H; z0JFg-!C(ue8=MRvY$~AVX9m8*7d2$acL~4Fx=z9#VchapV3Qyd53P?`W&KqG;7l)f zMpw-~Z)7R~jAx@=1z!PuMNzV9kK5Ueq0|MEg;}lgea+}dQF~EA;bpy6VDN(q(?qXC zDRHep?IM(U!Qs}=>wGX~fcq1sa1zSCe>;lvtRE5a@g4Z=S9QIXu;H#*fAEI^NxZsD zIUR5Hz-pI2ezVYXhYzzJN4babwo(x{ZHoZEji!#ix_>DBgTj~jGa^AJ|Mb4SQ9Z}o zyMK?}iRaoZt0Da!F9}5CeX;L+q06(8c5+b1uQz^33^)-b$2J*~`Q3s1YDeR+=AW;; z>lXg`+u!RkH`_vj{8;H>#Az=MBEZ?@evyzHiIBU;ebk@2zBdNKV-dT>{y^4qS;EGN z)EfmCNHmSPLb+ZEL=vVE!f`<5f&VS%guaxI{8cpTc(3nhSPPZka&w@XoQyV{P}j69 zrmb=2#OhY8j2W=g&cD+wbI(q<XMbWvUOrCnK9t?`{ZYFqo7m#jb@)`Y z0MhX2a%!RgOL9NjyP>+Gc5*e@QUBfnEy~We)7t#8^>uev#~aSaN2>o;Dn$%-pF`qz=(#$hWPbzT?u{Z;@4?0gUM{wollz% z24|3kgcZDA(rPAma$)|IS8f%c+9Z`hvN6cPCSW`2tKBC+F*{EcjFT^(pbLFZ8#qR`@A#bHohu%SB(8b9y<_;d!c3LJ@ezB^q&#TK z+8vEc_Qj#g%cBhE7HgtmgE0#UBFOZ_*sjNWa%+cwYr{CtwxrX%7Uf&*X4}xl7D0Lr zaen{H?*Y@eOw2SKC>($3ftpTdBKNTqtH-Q68$qFv!Su2kTsHE%3y#~}@x2xRo->(2|4s!l%1Tk(I^%y4);&_LF?gqHj( zcbn7S6jpKSjBz=UPNThkJQ%<`74`gC>&0+*xVCnou%P!%pk#Rq&(HMuF{y+$+J!vk zeN?5}kX3we{Rof`K7?xT`TQ*Xc2c-rC_De<`F*n8<0r1 z@z(L%%hR=ewxZ3L_sY|`KE6|KV{}`H6`XPtx9`r`TrJ3u=JuxgQ0=Z6x}@N*-9lAP z6I;JnC1%%~tx33WdE@|(B;g4j&3P(EZpGYJtwMwBwX(H%ni+1E(3q!2AP4}loo)zit%Q65Z_<8k#wIp2_+z30kC}V z8$CGTX4pS~&yl;i3ewP0IvgK{Z$+v~yK{@LioU?=H+{#Qr}rRtsD!tP_SeuS?mMFK zfxB0=E<(@4ihn=u5|Yhm38|k-szkT9z8M{SFU zn34BY2s0Ji6b6Gi(-^m_lTalaLBNu)F$Y^nWqoCxzw1NQIF;oOA6qADHZE@RbY**c zZWP!Llc(<}$9XE*x8L_6dstt$;w)FqhbSy6XDbqe78%m+cUTArZW;CtGg&P)hA&NU`-Pl~>RX*BS}7rn~;xF9o6lisuJqmVrz#;Mn_-m zVO(s(ThTq8k1t%#M)iHX6>w=dRBEW$mT%wit0{NTypzicoqze+lg`BF?rqI#>51_s znx-T!>3By~b&(gucG4kimrh9yUd{>0ZsKNldj~} z)~G72IEpzBV2PnGM(8UDa6VmpL&5D)XeO`)j~}z=9KIs0mZoo<$B2&B8ay-Q?r_k0 zY^+=Ex-mUG9}v3Y71k4kjo!=r2zh58RGC>%!TE1f->HYolStQ+SxRwQdL zEMl=WsY*~y`c{8aZOEyaM}K`oF-8|!9pnAD#JTHCRb@p(X=OLF7p{l#C4Ku6RSEZ$ z@oqbW-OcZ(d{KJ@M%&WBJ}*`vdlyf^?r<73Mm-nml|OuvTbxr|ac}ro8HCsktRvX4_BJ40$oLWptd1PEd)83ohi##rlzHr4uE_)C;l@B+; z1Og2;RCkK#NH&1n^uAhc-8j$l5&Xh2vRETYIQ1$lGq&GV4^Z}2$f=4jJ8JYn^%YZ9 zD6EVqu~w*-U)dtmjk7bJ$0~|cA^R8twDH|C1bae)s>=)Zt$T}tJ&ot2a-%mmg9L zI9P2-+FOM&qwl(#?Dm}V7yK0lIB!nT+S+=%83!5-*`+2zzI>*R!Ps`GYMvk4^Q=Y& z7q!Ed=STCHTd&qG-}@+8|^#tS(oyn*`M zi=(ak>H)j2Ew`-RMxm;>?ur==uz-KT(Ii#;?@oHtek`a8_29^SN%VI!uOCk^{}f#p zHpl&2die=E=*QOw?jta|u8+G|ZGXQnpp)ylF2pyX-1!pcqUN0IVq@ia76~p=Tqw6? z+;Tr*NP+nJl%dsut*xz#D{9QSabzt*?eT*Ex7lW|&sWG7djSO3HKJ{WxVX4ZWL6hGbrc(F$~}+Lo#sspV{}@_LKp zB;=UHu66 zWJJ9Wjz@jh^wb>~su2Qk4TyY)s50Xg3mIw-91I^@E>O&W!?4_ZpxS3mCvbR^RtTAY zG!HgaDTH-Cc8|=4Z8n<~2(h;2NeNDP_@jpKJ8|k6va@C-zYP)cZuSZCBE>+R_ zt;!dmZG7PoESSRjW#?>$|5B`nm(P;UJC4u%xF_SxhlW8>f>K6v3(<3{vK zGBjxxlfAZN#7Y*UinIYqVux_^D($Oa&&qFdKc71F!GTTkd&AwwEMwRfq?UST?l7>> zY`0uqoF6rmc;)C!xt+szrnU{uVDOrCl7Fta&uTY@b0=osxkg!qeVdkRhui;DurSrx z7!QWgHqJ<2c?H#mPbf+tPm!HjvtC4CI;t6Pe+M6%-Phc4-r8CuH7_8|PM_m;tg&(S zS$S^1HKelfn9JETZRdO_P<>QEDi>SzGTC$y)eUe&iEax=A7%{u?3~p<47iZL@DY-y zFhW!1l(lfzCh(A7qo8H~(m!e%vK&u1+2#*A1jdNzxW#xQvjsCH!OYgPlo3O==`U5T z?91NPy_37JCWgZJqru^vxG&E19^8d32W_5Pp#$-qdPh15?6oR3)e$YNWA4?6#0Tu| z@v4Tv(;e=@Pi-~w&{OrG0->G;-6^b2R#zk$OvGO0FaX22sPN|$D^769m!4YPW$wJm zk(mv74rkNqd>GN@ZPQV)*jb@*scC4+mU)Sc{4*I3|l_k&3J86Qb2`>-WM%Y(w3J4nSnI zbmFpqMlQA2P+X}tBi;{px8U1Kz&r6QpAX*D8p=eq-Yb6=OkXPc-{h@7cDyf&zM7%> zjL^l=>MB;-*=-q)iQa-=7#QM0OP*}K>kNaTeR{_wIjW7Zx&u~iDP)z#uhpGgJe0Nd zAZ(n#rB%?7Ey>Q%$Z~m#)^7xY)MQ(O-q9E{_0=O|BO!C@*{cU(?z$_v&wYudp0Ppe zGlQ~S<@)@KpAp)GSvhi3P$K<3`V&K4Pqugm$2E0XWZZql9cWbQEgCr3=($C5Ka{^?a8_MWjNt5fiIr1rrLH<=|D~ixnWYh{0ig;M|qGIZq!JE=zA3H_pwc zG9W|$)Ph}KwIey8O)r?O~RXbhSn*g{JLtxb6Z(p-oTPI0 ze?CY*5>--;ZpGT&G&z4rAn{%hF+XBDN1;M-+edhkn?qv@l5@D!+#qnU?-YpFJ; z3Q1uN{jpaz^aUJ9m1hG^ZI23dsg3z%k5S>H$r91zcmvYH*m`w$6Y)waWqn9O(2ZrmNZ{>m0!DnPIT7cqaj)j@Ltc}lpH*hP9|1w>s>PzDw!FO zruJNAtH_&7+tJlEGE=+%KKMYIKQHE@CeoU&P~+%_MM3OnjasUsRd5PxGqKF9;Yl== zvUbgV4aYLt5<_RT`+D*Vj&jfTZkUFI5jZX&IHuqeg9kmf<~c#niacGVO$c3 zAFD2u9)&ZpJ{Yt=lCA?i-xq6*A-m=S*!A&FPdG=a==l>(tGqn7mEVcI3rlU}J1 zze7c2VSH$`uz52GzgG=zr{90L*dX`~ILgG7td83e(Lv9n;A-ZUeV0;Zt*CzA>WBx< z`I_L5iny@Bt4u`v)$u3c%|9TbL@lP(Gq4wY0s7~{`|BG||M>?LIRbFP=s>4glAROQn<92(t$O#yF{DI>^~@XEAjUn66g~t5X|Fx8y^8^3?tX8|K{t@!tx^ zhc(V!=NUBB@jeBAw)1fH?#KM6ZF5fHp+KvnsaEySO#q4+UMMR$T#jq1-J>kRaDgeb zUECY|VL31f-r}sa1G@$B%e5t{t~FGa)~7kdnWjS3t{Ss>jC3|sdzeFAQiWUxnb}gn zg<*{8k-XW>4$(Q^20H>CPp>eNoHDbNUK}^?Tv#-Au6UbiQctUCS{1;Z4kt_DZ}}0J z6##XFYQ~jLmD0M*+68-`6kNHYfTXZbfPt{2*a}w_er?rHJWhdt9I-`(T)4GcY7F`! z8O29kB6V?+ftvmj3+a$c;eO~$v5CBPNlJ=hiM;b|a_3Asf;1@X@>z*4n0f4aY)@v= zA|&l^#^1A&qHUS9lx|tnO_hE8 zQ?6j3NimLB+i#|ws61~9SC9--c9I25x@z|=t=nE%o8CSP9 zgwBMaIH3+~~d{`oZAbia7Ug07@mI8B*V zHEK(a!D{n?30PF^wT8Yao=acVA$boreiJ}&h{g``IXQz`lZn_=ctrl)o-{1m5qL>M z-Nla!H$9x0!x@Pu$hC(2$DB)Emq3$Sl0IJSC|?vl#gfoQaCKd8uDbn!1@mrw_jx*> zcK5xG$D9XX0@h|B_$Z#Up0nOE6gsal;^+GgLIkclmuIS|GVa{CtwI(5X zT6%|ur$w7HX@G~gmqhP#bdQ*<)>|}iy-?T67fBW7-QB~j;c9G-S~)=&KjC~ONJ1!6 zC#xE+eP&+4@Y571ODSM#gdg#r55xd=hY$x}i>K7mMbci2a(V-wN+WY0*mE!FX)uW@n7WJ~~(*PoERWzUyT_sA>yS4?&uJ zMnSo+b9dW)BP}FpC;VwFWn&stsWS#a-j^?FnN#Ba4%l#NVAl%?OJkuOm}e=cjSv1?>{ z356EQx0?MXIVFUajAfYV?B<$O?Og=3hCNlagKJB_H4eI+Ty>!rzLolztP1v^d%|0UQ1^+s zj#|}jNk6wiM|uDKt_SJZ2B z7~&~ZAeTB=t(f0EJ>7UEjeY$x8(1Gg_L$t6mc;jc^DYDZWx$XF>l`hzo>azKLML}wqj}y zFfgb+f8Vh2w5SBYj|h(9Qo;xauaFT5F=obpU%MH~Yr5FfyX>T*Ow_EiKIa6@RCrn$ z9S&Alz<-&F^qbhnJ!1!Jww5cXfwQrRGdn44Y?fyRDDQZ5)N`+eMci=Bvwf|C*?O6S z6&G1PH7S_V0W+hv^K0I2F4qV2?{7&1{`s+aBzD0byH8IxQWHn4q#<6;A2MPri0AO^ zdLveWG|zu;{O#b>3-RB(e}8M|S9HDfgug&RlGXFFu2o0_6xG9wHsTz7Qtm z$M`>o(3m=+^~|oM|Ic-EN;sNZUjO?(nzeNr;z-ig{EPe*aMSRDr^?sS3XjW8chgtu z9-nh*P+cEaR-{OKsK2zehde*Gr6sG1Hh1`a@<5V`KsmnqszCfBk9o&YLOw_?dAwo8 ziiW3I>ZFGciL{+GA4O(oSI(?w!6+LAxTT!jl-D#BSksk`Eq6&v`^m@a@n)Qwte@ah zANLxy{AlRbl!mn=B{}5hCRbKCi@-eW>HL~Y1SSOD4Oa+?r=Xf&51Gm?dSk%wZIx54 zUsvBXH70hgw}WgKK8{)!Ozd^<*lv{FRXU8Zg<9ZWd=NF$_r5ij$*b;97PZpoVk8x6 zp=)Z2g3|*Yy~oZJwNUeAvD1L>dF1S7CncMjvQo4l(umul>6QVZxrKl-B6%&$wB7la@{~Rz*c6kK$qowye^TGuGQ2UPc=h;*zW!3}o|ccUh%6gX#13 zGNw#+T&{{$k&lH>MGd(P1Pd8C-qeU}@R(0XwR=61F)5TQ)IO@_!PgGfzDu1o^=US=+)2Y8JHTK$s1PBi9}Gjh`d24 z&VKVJ&X~QC{Dtyr_C8%i@Py^o>#}W`Z!jZW&FACTf;Qi~)GxGy@jeHb;=JnuMlNK_ zq4kBn%{&B7uV12EOBI-WV33!pPvZO|8uw0x9tNE=)Rl;xz$E1-QaTMaCCyarZ>%|o zBV0yhr%i%bULdpnGx01}3(xCjOgbL=5^SrSf^PZ}w{|zeND*LI`FojR?a%|5AHEo~ zmDIBasX4d;zn+cK7&eEE2|sX2Gd1K|)g&(|kwIkcSuHdm&l2};J?#@vgN3>K$vc(~ z%3t&S!qjC;h+9xtZNQn3}!I zDQ0^`KT=zW2uIpG``!9`^mc-~`zMZp`G)XiG~%9F&80&(!_j{kMf)2=)9tD_3!Gd3 zD-R)*2*N2!*s(3aq8s?+n=uSjx2Ns=pl*f&tqp{|yY(2CC`TukUh%`(32W5Zd%WkP zipZvj%nGWlJ=&qj76!;zLlFnE@I*<+cLpk-;w&n4qqz6sMTk_zx7EeGtj}N5$wRR3 z;bz9$O|1%JD6}@dBvDZns^1$c$oaXW;~a(+XQf2Sg5!1e^__tf!^_>*{ED2nJJJK!kd{>9V5Qraq&5FJ*m?Y;YKArbjsNMvf6pP>W{@c z%Q_XKzU4r;c=xl1^3%GF^_#S5c2_D|UNNgSoQ?|L?M9ACOZxk#aa#KKb?z+m7G7O3 z)E!}N#P7QQ?T|2;oNSkr?A^?y<+W~;`o(cSgC#6vBHivSv%iTVdH*`E6jbt%Lh6~8 zCC<|oT*@z0H_gq$?YHO;3W662r=7keq`0R0Brs|S-j??=L!JzhemFaLd9j`R#XmmmEgwFyr@kIDK@ zCWzV*60_5WS3R-yX4~0b+-7Ko6y>a4KMnuNV3|ovlj}DOhc$*h|1NjzS%rEQz$J*$P^>j4r)2@oJ5e zmzvR$+X>X12vg~ z1k{7cL==Ol^Bb(a!ZF?l6>ae8r?AjnuVN{&1{!1^Wt$7&W#7SM>w=9DNp8mJ3bPg! zF_k9D3yRgds%8y;fxgRcv=mYq=6#ylFUqga4$`&9=dETot>+Fw%G-SFLyP|Hr%Yab zH8Ol)GvrON#nA=Z*U52=9uo`F(R^J6%w?S~y-c<|J8E0>dvT0E{t@0nY_i5^HSSl4 z`8^H=Nw)n29!F?+5li+Jrh3MVZ-499sJOUX5p4N$LlxAQF2=Xh*07wv+vMyNaJzZR zvqj<>h$v{X@ukgVs_R4_RFG_6h%ba6#EqMQfNA+cf;sB|-4vv2Buxy1P`+XWrcHTm zAye|Xv10gD;iDrZlJx)uKW}7UYKhwo3mP!Lms9DQ)BHbYTy9M^cFlL&Nq7_0O;T#A zj|=xaJ>`{gZu-pcD?0Rdz@9Ll#~AcC|9rwAM2y?T!P}V0#WBIxn*rwDdr@)ZT*tfj zgF>~NuIC#HZ40FZdXuIiYvim)L+%dllsVYlaq^(9_qy^!{sae z!0RTD$f6=FU%>258&um}s(z!#w2OnnNi_CA+WhVU-Xqj)?Am^`qXKef zSYt0%@HbM6I_I&QTdw;p3S5rQe!RV&=MTg@brC#_ApL?Th8%P0!&btoYxBvxGyknbry?avcs@1C|;j zuQ!9r`|e$-E;Oq$gB6MoM1(5=J>G`{yXE%#=nHohcs(36G5wqLhwSwts>7SWRNiH_ z#gVPJ>RiZta_vxUd6@_|amnp$jrkeh)`KVlK1YX5&V{FFa#XUU1dLTi<&kI}bGZ%~ z!7V#3^zp_a>U~DpqQ+Rc{n6c0urNyBqdp0d>H4L>;19wiA?ad&eVLlyo3xruzvpQ2 z2)Z-`2}Y8W9$bY<`fw#;Ewi)4NFc*9 zVwMuoqj0BVRJU>$)-Ku2e>i7t(2WWs=cR`?X+_cza2GN&2lSYF=?1NBa`-;0Zf z=l;&dZjmD8T!kN5H#6;L)!>4bF)h%?$^BTwKt3^=|AIfCZLr&} z)7LeUk$W}|uRIDinf&u9MO`t&wIkewuduT9x!}m55#-4AoHf+SiuKD?!_`v%^HC^% z(zClA`^_RvuhCYo&5rHLIVMIrTQ0b^ZFcl<4F2J#CI9BuBMws0jg#e11=2|4=W3K6 z91bvommaXP8$~A18U&5d=65F$Mr~kKcFbCD1R~h{Nl*I*tg&EP^KJjfIWX9wj|4;s zYpS;ttZ{3q_hJ@g-4N*{;7p^J9;{+Vq{SPLPKK?`c}SB08bh-BO^RB(ih!}hIqtX zM|N}}u12?f%ru%EYykMeTztz7W2^byLvAPm=+N@7GhH%uF&!|znj^za^#-B3q{UhD ztjX&7cF}L@#$K?F*KqSMN zOt+lxF?l1x-d2DGlK5Oy6$4ek4}Z99t;u%z6v!8uZ=}%ByJt+_j|o1Z6eF?;*J-lw zI$8{?n!INXjmTPj75ZIRfgDOZ%xRy8CzBiXi-TK`dHGXQ+A)u!fF!!R$zF7FdgtoN zlB_R>U*j>&g*1YogS&hY$HLh|=9_6C31?vF`E7JZJMmk z@!}Yo@k`9A+Cl5%HC=4%rrUTW3$EA);ga1c@Ns5M$b2#ntW6XxSu&+ zAcT@es(Gu_sC-=Gx(oI8#(ZrGgjDMHkIFW`!iWa)$|)Nk#qplKm^3Qs17CBxnZo|TVZxYPU#kqatxxVWh3;(g`P z{o!z1A-&g|o8yQ>v~hWoPwc!IjS%w`{92|gHY|)|nxeX;c^j@h>Ie4CT^|XBu%VEQ zFXgyt;Fgvnm_{!l8ql(|goVa{*mPF~C++Yea$>V!^>ENier3&a|CL1@V@1@GxOwYI z;joAdQQ8mrvbndUsdTo`gA?%L)&~^2Z$DZXV0*5zfH!?r2Do| z0$axciQy5%${mLK`utBsBBQx%hLP%TH;x5vf_jCMGLvDIVBBf;)a1Ev6CebaF6E#Y z+>hqf;LUch!wk>aY?V1_Xb?r_qZPn$xaNgst~zrHQ^fbV1@GQ%`Vug}?#q$@p0CDi z&MP-|=+P56MP9+DD)TApazcK;qOBm&8}Gx$jfDEB*(gZhYE}m7$?w7lyG+~btIBtvsm@nXcl@t)y9Qw8lpFvL9Gg-6Bxx!q zPJw6k^Q1U{ZeHPsxUdvBw1NmKfyeg|!Dkh($Ok)GypgV_NDnlKa4mrvtjk962kSxb zVU!b3v5fD}Z9Meto5{)|aYE&-!X+b0n;+NSp_|Z%unTSy}le~_H>QR z^j$qV7BEHO{Q)u=L8vKtn)Va8J8f_weYIl7W!C|)HafixKLUID?3KaK0NhQ<&tmFO zF+_J(7a^4SF}Mq)yh(sN2Ti8&Xw2)XIp(;8b{FENaxnfRRkM>5q%e2@pw9AFQ6XGx z)DAyL&$;=&oe-S?oIJz;^jtcAlrXq65y@VGT|{DHHbazcgQK%qkM@v4t3Yt_Wkqu>G@p0MZ)poJ{^KDbG$LLY#*q6ail){paF7eRD zn-^5MHNx#CT6g;ipHHz{mbGw7h!QET`zz$0ccou*9~}XF)9Mhkc>_NnHM5w+%M^B& zxy7cu$|z~KfV)H)a|wZYS=JuT^BoL7i5qBgd*`XXY^v5Ks4|7AJG1IuP8h4N`FcDS zf^_*Ixj_VJi@1p@j>w45oq}5|wB`axJ_jYLuX2%?57IY3W)*wdSH^HBCe*4i)3!g; znCmC9sJI?pOtaRNpt#t-l38_P{p%-QTwhqoO~?1Fn(Z!4bNzOAyvxb?DgEXAFI739 z*x!)eDtk3gy(O&obQ&nEb~Ew6t&oDWXTcM+grc%Avd0D1*leVuEYbI;p3rcC)8`^h zrnADr$0clzfM4Cbr>yOnrv}6%YtsZtZjFf{P3I?3uDXz(T$ zW{rT;Ko>P?nR%!1a}6Fog_5ZYz)vhNQ)f`tyA`(h7Mu23hcD~zvQ#+A+H3gHc`++nMqU(=e4lNtY$O9! zKY4%KS!I`uc!~-pMsJ5C_a}u(YLv;fut7p&)pwGnk{a7fKxThaXC@-Y}Li_46*M>e^nu8+P@Sudl*4aq)0>;`Dxy{&L8&xVA#Ze}*-8 z|M1LdZ7c)M;mnc}&6Dw}@VNCy^zyc{0qDpnLRso^v$Y%Tx`vDc^13xq5iuiVJ7krR zi1~BLY@>I#0*!77evBOcD)Ks_0ryt>Y=2oA4Mf|HZs3rD(%&Dr%(a2X_zry zLTVue7r%pEK0!SD$+{2v4;(jbd$KnvJ9q!QKl}Ivwz_hHn>`pDj_hu9`-z3?)A`p?&Q)1 z74JzfUOsfw_Je=TChi|8cH|eM`@aL6yqo!g2MZ`_YQX*c^|Oq{ zF0B{pX+B_akzwmiyI3=X~lMF8eBfA^zOlMwYc4a>wVV`c`fJN zvZ6mEicR)nh`ez`%lv%)VF;$3i%Tu$P5QQoTaeG^T}o}P_DY;1^fWGMsiOtG5s#<2 zG@8zV-2HhD*4nSUA+;%Yce7V!b_R-THk`vF2g!D&)TMhTT6N2;pVK8}WQ;G(!~y>@ z(0xXnEo#cKA7PSjzmR2^a8{gKS)5E8oJV+*f;wn_v4j%Ob9wsylz7R1A+FlgRN3u% zO%yjXb6~o2kjKMKru<>p*Pzj0^Jr58C(=-5Lho~U^(cL#^{`6XjIW+{IR^q@puL3{ zugdg~*JEgQ!QM;r4Agw6GFIg8?4%`kCdMDN?^Ei$wVLmb1sdb(e&XH{7l$D7eJjyS zbsW`&3A#BOSrx=$3jT#Mc-A{Ujy@h6ia%>b{go#5TM6VZY{@+PQm0pF!idL~%D(@m@Pn z6&1p+$JQX=Edk)D8u@?~qPaAF=oM4Z6!!s_g)S4o9I(K4?eh7qF*I_+f4R zwqd@SUk?D4Se%w^$=|}$VL8G4^}>=j8l}*W0US=(m2=Eor?lj74pgvd5`$!bIqJMPyg z9=Dt3l3tE^VKA{0?dGHu?k`F$+zxEcE~a0;?ACfsbhzByJ_9IFdC6nr;_Pak0f*^j z!GnYR8tpA53&?cEDtAgI^bgtOqfCk0*)Z3x)@ocya6Y<;itQp6kQJ<8jkMOZB&>NW zIxS=Aaq(_Xl$1f5(lM3THMHk;3M{Y$E4*giGD&aNsj+!~g{6^{*1TqFdm3mzm~pJ| z+I=UrTM-zV?gok!VTgsHj~?>)e0wf;lyygpW6xI2fYdwM^*ES{}YNjZN@u;M^KwKAf8-zxca=>Qo6tA6h9@AM+**tueKT(wA6 zZ~GdxW8&=~l~bzOY8vV-9Wbb=D8a*l!&rZI9sRn4Xa}XFQ~bl9sqfqPm}4gki>b(Y zn-F%Clsv2K$U)F;jB99NNk#Y8)yAD*S=-MiENxGE-5O3=KUko4({233ea{rAr&Sj! zVp434Tv^l@6ZYnl-LRVV=>85_uaUvi^PA7<34~WkHkmO)oK5eJOU!oxaITJV1{7l6 z6`$`$s;@4~4Fj~rU$VzY&?k76*HST)zN3yI5k+LtYACxVy`( zuD8l;S5Ljc^9mE-GrnT|5v?b@UwEQtmsYL^Lxy3=Ss0)FH5wV}ajZG($hoc^kVHrnTH%IaKj|5V9vGG8^wLExC`>SCZ} zk+Jb8r~v(}@?trKIYUFEvnlXh+5&AxjeRX*Gg=sajTcYMrc)*Jm<2e_W7V zgy%B!6Y1@BHYmpHWjvy_MvuFJ_d&&AVDrMf(>ubL zdx`_nZ0wROKxh&Et@L`&phnZrp3z^w^xR~wkPx$j1Mh4Y)dz4$_zsPi@S zrIh~9R)m)DX%VlxhYqDX>)0UA+9hmMW+H!4zDj8BRPe@uAlc1WEF?|0!YhAd&}qrE z;XR*Ox8yLFlWP_>m_h_z8`5(FSynQBIH*!thOk*a(Vd71A|2aAM?P0u@P&@X7^fdZ+1zT)2Eo`^eCZa1(-M+ks z{H8AOQ=}!8{o6vmZI(wqgSn98wag|d$@$;>k?vA;nv@0;udw{GKQiGSjffN~#oBVD z_w;hwEu?*FRapo;IS3*Y=vIRajP=ZQYTLCXJoNY?G z@huxxu~)b$CuG#4A~F3W4uv|BvcHSvkcuIV>3NNoifnMwUAI#zbi;azpTiL zgB+qWwbOb}Nz(fO?iP2xTfDVfq^cp1H+v_{0whq3|C-|jabd8ZZDa`m<7sQ`?-K>} zZhPne>~eK&#QW2Qs&Cm0Mg>`Vw*bdv(?Tw}rn*&75Wj?=`$}Z{Hz)8v4@gbs@7{xG zZ=y3#h2%_YFf}~e{zByrzU86Zh94UmI6;&>>25b1?M`w~$^m*HW9`g1PqOAs8t?vv zqNAEfZ)15owQs=|RCF@0A|a@7P5AnB8;*GrL#_3&=C-4V4TvWfohBaV)50qHe##S9 zK+<0cSQ`+sRIgVH`r3hIj+y8Nz~;&T5})5Rg}i^m;bIP&j8u43ZgIUYT%vz`=Becs z?s!)Qk_f6{Z$S(Fhx#LTlf23CX)usuYeaIYLs=^D#{Nb=ER~o=kF}A%QJIoEG~A-D zW}mU;yOPn#%2;~rz~R^}Zgy5dd5nxRCR3@m%_!dFn0@95KzFU<+-V9LNeNCLaVA@D zL3uh{(E5;v_keVRrFfZ(%zo2z{8hDxPMBR;0Q#Ec(_}CUjhbF#P0~iBKe7wqCw>qv zOolhTfOtv&ZaF32pkQ%{pQ8!hX_&!S_B`#Bl=K+!qsPeJy+$%Q9aAUQNRX6V+JZ!G|Z8dY}Z5#KM^hu&VlN-TpwCFm|UodGEsqd~aFScJ*rpB@H8H zT;!)BG&$rTLBL-0i_J)mR5->b_CAJjMcV3OvLee@Vl>7p})Y;AwGw! zwlF=SmyS=)BnIU6NJcu-6CYoLV|=-%K7|I=gP@Am8S%1w^F@TGlVERTFO1q-Z>@Js zf%n=y#nyIQj~;Q~wmLS(bT3iCAd#(McV>)-atG}=)=Jx&cWGprl$yip9Fl-f?i0FG zGGv^h?zjD$3-=e^+-^Gpua`MUfEnW?K^)>i!3_v706zYHrdh(Pt~}Ob>>ru1K4jo4 zw#|)5I(e^AuBcynK6X5iLch7*zxT%coS7h0fr-F9-RSbFw(9*NqCxx&>n?8s2daaL2)qOvFp zO`ddrbKbw9>2K$h&bV7*oM zjvS%ZeN^=+6R1_SK;?j%0fb)1?rO&%97G-swwQgzB|0+810m>uHOs z(>4y59JC(sU-mFDMY_RrZ(6a!4}NfbQI#c8(-wSm2dI0lPKb zP&gw_@L>AdfzzPl#Mj|z1#dLt)Yl$^rBf9o@(P@C*IZ>)Xhv0GJ)PBBXwHAt^Uypg zadOwB-Afs8Z!9TBAi5etA*=Hat3~fbLiOegYi7qQK?DOCneR6bav)UGs@yl0^TBB_7Ym4<~ot|x}1s$|mgO%-;O_B?MaQyRIfmhHpWlx7y z?UFqMz$rK0Sh`F%k!1c_IY27m<)a=>D*3_x0@UeMfwC_Wy9!XDzl^i~avJBWjc2SV zE&QVu>t;d;oBc;-If2I2A66P!+}EY0 zbIYre+kVd+GnDmM2j~CtYGpmQ9*Tu%tUAVde3Ux%oxVx_X1t<3%!x(3G8>u0T2gTZ z2ig6(=G~qXj?*!d+R}HOl~4W0o1WH4PCyDgxWP|nzTVtHtUCkt6xsV+c_Q{9ticU$ zhC2f^<7My))TZTQw01NcFDM`W&c&dr2Y5BZlL<^iL zB18e=mi!pAwD=EW{n?z|pHYA8#=`#e)Sm3=c@XAt)XO)L_x5@5>>!Dgk=$)#IGWcv z*{gdMWkj`ksR6Y=I2e31*P@7e=5Ec&Z2$JBo}90q$a)hHfdP4Zjv zWtwx~93oF%Bb^9wq<#K>6bqZV`4@}8=r|)iM0YrrM+xG z6*o$EWd4kHmy@)oi!AS;gN8S|yw_uckA39OOZWn|@^LV^Q^6`xO^*c7s&OITk`rP_?J5AQt&_bp% zw(MdW5>S1riOwSq6qOKGSZ{mWiw8R3A}$@eH@No2cqXW<7S1o6fH$cLZB?v-jvbBn z3C9^HA$o>9UIwu}Pe&X3%F7<9TjtCl`J4ABk~cdsR6||es-nI;1|d`Pk7(w}Mu&;^ z2obyV;rQj0hnRgt#4aexOIbw~MeCVezBnSD(1g8%eA+$28Qhf}Ey0=dA2Hx`emKM`@%S;l6 zxR^nnKZ~>7-XP(28mgU^n)qjVaABGi;b9F$iRrH6ZbUxA2GxFM9CKHClubF%(^42u z2?f?TBXx9)(pXOcvzXvD_@y>Qr>w-V+xJaI+GN~c4(2RT49I*AyWSXAaC|RgxRsyu ze(4%cZrg0~lw}T78fjHVf1Zn=eM9LWt^G2q&8aH z-=)p5&$*8AyVm^bU26SH-5N(6bavufCGCe~%wjm#07VI60_)m~{QsJVqUU4+COau=Vv#5(9@ zu{`)r?kWZ=Uio4$GRL`0lP2K)jl|JA1DpI~8Sl?e#g50k_?4`jvLI>24x;K{z>E3b z!dflwlhUQWn1bhC_Np{A_(2k69qA4T;y(c@gzshQtk)B{TzN@STw3?J)nn<{!Uej~ z7=C){C%paXydlDwKuHD?ny9rx;sfYq@b^aG%1Y_%E%$RwEkxb?8!7UvJqC@g+p8cT3nrN6Rh9-tv*&Dk7wG`+9F zXp{2RCyc7YALi)8^jpy9si%H_k}93kK*U=s2P-U00eKRB;}UbG0joG={;QABJQRC( z;k$+s=#~i{lhx4|+|8)OY94YhVQDj}FT1E4ThgHKFkiJ~1*eF>XYy+P)u+#&qf>tQ ztYGK0R)>Ftt(MpN%AHnPCsPZj2Wj<>oi?u|(~vJtYw|NB%jfMj&BV3@Z1 zd&->^dAUiwlYzeTak>n;!;Oj|?ko{k3-j?Hb-y#ONyYwRFDy{zj7-TaDLLqd^&KDY z*d6zKZP#b}r#w%^8(=|y=9qUdRiw6)nW|iif{gFwIz(@Zn@(>5r5Au=-oRpc($jRP zs|;?IvC=`cbvD!4@;khuqtL$MLk|S^ylUEG7ifoEADRiRiQ|KEOcmaP8Y_Iy(oa5r z`P$&<@XZL|`9}xCD0w@jKfY}l0zG=jq}yq?qikmhUo;Jz*)1REQup5Ui5xM^@r?qnBanWjDQ#KnE|r*m{?NBKSm);;w09qR4}t zQWmEx1Potpe1;1Vk%CIP9(~UZ+PLUU%TZtRNL3+uIQpk_Y*$})Y)FWak&>dO(!)~B z3mvozj;oGjg)0)8BSpaO$HWT*Wyv;*oyYvSC&kY@uV*Es%Me+4H%&MQKM*hR^CJCI z#oW%e35nW2{o=XhP}wT}*y11AP|dr2qt7stJ`Df#fs^t# zcJN3Le--7;?VCeOjSE({kZw^22O{U-55iR&NL+L_@|ietBxHOh)yh>KaxXoF*>gG- ztf7Jbkg;iesY+By{XGqZf2W9GVSPgNn_3TIi}z)JI^_<<1y;y1p9N;$MnE^ zMvzRSxB87Ig&UFY1xe_266(883Uv=tom*Eax9URb>E;Bz`I8nKT7yIO-gjt_mvnN; zgx0}=mi zUyes%44{mh_%Z_fppgfRi5*YmUB~ZXmzj*Lj5PP{#dmi4@w4@|CpxInSJ&2QY=L~% zlr??%4rgw>0}|B)g45SeyWWQzEhbGsF@X86v)Nz6s=m-AN7G8`bOU|R&*+}Zx#TBN zj^#3;e5os)Ha!!yA^D81h?h_{B8-dL@iEJsmgtRtK)a$r3nvwlkSE^K_kMV?$f-re zZfH_H-X2;?`w!n=tXW}chJ28leXDFNe1o=N-Qz9tJ64DVp#kVD#%dp0J4<3QZjyp? zqB>demeq@U&O2%3(Z9{s*iaYgTS}YnCyi)`I@%0Ya1o!q&tXd2r&`7Ff@|`v`J6-J zZ>iirbn38mJ47qK{nCs59)jv#mKBsL`Wx?q#^0uioTtC-21G_Xm$^+AX77FsZxoYK z&u{bhpI|7~eLODzO|i0qMcQ7~w?|oBkdr*|Qlhk9+R?(l1)-?c^Y*Wb8qQYrvH|K( z3$B&Ea-0{26i##r_r=x8J3tA=S(RoEMdo*-2+}vq+Mk!Kiz&h8`&Ah3r6^}e!elW@ zQgZQ1#RqQT7*w@-k?yHgNrE}!>wV))i_PJX!^F_vEE1*xIp(b-@G~3*&lzCSUri9H zwa5#T`_{#}%66CBP)VhTy@a3lQrRkv=5!EExjB|Yf&S%&)&Su)!sT&VJQK|~ct25k zmNtuQ3Mq9Pw(LKx?{oTi7;6rUAu6?(DgkmDE9strVn(ZwywCtWV01K^SB57~R!>iZ z{w|myF8Kl_RiF`KaC5m8ZuA&ytj~XA-9n`)YWNL>!sZP@H@*^<_L{t)dTV-h|C_k= zTD+yzMZ6Y!1(MGXf9{bDSjATW{qtWEI6pIasK$aOyy=f$;bN->8+>XGFDi{XrZL#X zC?#oC7mj-)S`;k_tQYO3O%JkWZqs@@Huzh;D}P`kV7I+6U+L2gXOH64m94JvX@%QN z|HlozxXy7lNu3d!$?5)q`fFbBRdjN&kbXG_(qX(=_6)_%NdQ!rYljIZ4CL4t7=_xM zb>$^8_Cz~qa~}&E5iIUZg`B|?*I3$a$)zZ0tc3-Qcf_{7t*sUdk#J>Wjp)iY9aa!f6Buumia>SjDxspOQUQHM5Rd6kiJaDGo2aOsT zKUn zCE1_t$*obH&R&~|_gucr;paiTt$C@-D%LM$Vhupm|1Z7&_mGO;x>SX7)3Pvc`W|%! z+L*ySU*G4@x~5_{oDe}3ioeLNS%&?!i;02^Ss_BA z+P0>cqA(P>lZ>{N4ix#LfUKm8ERt1+;D{z=cko(j#^Jq^5hd+^H22@%UesZ}4Jv8F zR1%&E#tHh|pdz>t%V!2Eq@oS|T}@NGNYj_5m)#F>6HH|~!+ua*(^d2o1{(!oWNfct zn=j+e;!VUhb$nAaoy*pX%?EX7C*fn2!VAvzHFxEiISmdVHsj8iP$K<*E_iKZIKpFv0e?#Z(7Kb=s(s5oN%&w)VClOUi`{`X)3HWU3XIr=}QEsgmT@`wMP ztN{7{;-QsnJaxr{JCsXJ>pP%nh#%A4!?rHD$+Iy-aO?Az)AU_HvI*?bsxb=6;RUuM z7NDFF*W4sL8%&qMrvKitJ;y0yDUc=&1ww%n9L^SLduqV-{LXGV*hBas>Mc zFCh>8$_xZ&M1O*7AaR>qc1?OJ2&OEnRHu{C2dcbmOh<)t?tKKk{e;iL#4V=o#9{e+ zL<$=HTdM6O|BT3gJ&JxBM!cTOvKx*!WQ&#v_2cBG)pkB7SCNX=7SLNnSUT~&A>F9m z`ZU7F;Kd8`-vifkaa5i6x2NGA=dNlk7oLSZ;s;Byy>YADkK*CJj=SlqwXHoMAoK9o z6;yS&>40A58m3{+w|`j@rzXwpHnFg$B?Ye5%&c`KI0Nx#GJ!B$ z&6PL-?ew1`B>V9#lPBJ%K`EztXmiJ%5m91#h9wU%&@nWA+K+{VYP;8T0M@n(NXh^B z;qrsqUcDDkB>)4{(E61UID@X1od0i`81Nk+#v9~rxY=SqqD^kgp*wWnhaTOXHK^R} zl4w}CZsqe}TV7TIrL&v^H@UejorY_J+fedST5}lP=YpBijskwPHisVj(}yYh#^W(4 z{oTRy1>O&>M*iKZ?rN<*(AT??%q26;BN0(v zV$k>e*BQotWtRt23ME5LPlW6&i3vi1VkWoYCcBdut8HxzO-0gMkffzK9R$^mj_jF_ zho*B+xvx=p_^1hdk|iY@p9mY?=ewo8dNi;=K_DTrgww5gR?~-5sqmCHN^=1&x~Aq> zIm0O3;SH&Sj3qh&@QrnG=B)1F+4mM?J8nSq1>cR=kpz%)hqH&$!z!0L^f$|t5tVoV z(~Ds7D3Y0IeaHNCqrcizrZdgU)&N~ibV|6J(#8n+7l;l(U)c&kj=P5Vg+B?B4$0IL z=(vp?GkuT5(lPVxeA;3OHz3B%>;$Cw;cle>u^??-9Jj1U3BU&t-Zi@(6Y$o}x59Kl z+%YUIW85Eei}J#PfKJ3}_~Efc$)AM8qacO1~;{ zA-DTxs$%TdRBXg8G>?-*aq88d=}u}Q)UIpiw)T&LHCn(4Vz6xta?Pw8rd_#TSn3j) zMCV>_vCLyPkHYY`f1Kht1CBn}a#oLIpE^?kCbD8q=@Pg|?h3)En31U3_0wxtFLN14 zW7&p!=&(rUSCf{r5-)30C|{k(S>l8G4^H}I#l#U@b@TRq;Ck0i+|;*#RAQ>P6aR&2 zq~DomE$ZJU9KgXwna4~wV`Q+^_L|Oj1n@Bj6~QWpM{&EgKf*IocL7rZ1Z!N5Uu+b7 zs=TRcEnCbC)mx&xuXH4vZkvy|9Hsvh-cJTE{_btaAm9DXCGNB!7Fl!AC*>CIZAUWq za`I8KeW_puvEE4-C2w@WY{9v|>4508U}3XN`wKVkLgOi5nBdK_Rb^;&>aOVx=m1v9 zL+ho%KVc8B%c3jzC0?|p;kg|~ptZ}mg34lUTu@8&G)~R1Ltc|@a`Ob!<7xYMAL`LJ zq%~*4ix@7G0;41|52B;H+<&ASe?KCn5=ZdzRmS!OI3HJTq7A{A^fY%8?9&)}>`kbxjiJ-C4F@Ho!zbXxkG4Pro_Ydc6CiBPA+HO^E zCK8Rb+UXJ0<_DTM{?<$kPQQ$Q&jif44>@nKnX<0^Bde6JGSmi}mT(5DIEA?_D-~w< zf;v+xJB6vG6u()GH`m-IsK@P|2_}Qi#N0vWd*}?_7P=*wX+-Wp<^ByW(EH0JmE#D3 zOxB;6ze7#O%+f;-M6q`#bu5b|1uR-VGihS?J+a_X@77NLIh^s&Q)uwy+{2!hEGQe? z>t@ECpV6}5`Ci%{8QIhHP^^I`R}TIWc@p;rNV4cwPI>d6d#iQhg6*C#ax%{%$UI_|!69=*@W)a~o8V`PxBm&Y%`%CKK|A^8Gn!jE%O;A{ zPKNub$$XyDK<}y#n+oT2nan?dyhT-hK>%_G!UEu~ zM9Q*?Y!GlKri|32E=gD*wg;JE;TwUF=oSOHBc#hrh9g(o?;_I{>}fGW-VFR;EWudu zh=#bt-LDYP)WFwaUVs#=p6~`a_1*>~0#QU?GBL-|fZA!^sA9R!AnxhWOC* z^si+$UXKa)g+?2Tw#Zl!0YNmQp2oOv?1bp@e!AFx%cHq%Zr`=zf5Pc8rjpXk>1hCm zGvqLw2nG8_FZHTmrw18hN!{&22m-(XpcQc|A{F9TC?Wfwsoqo-gM4h3D$P#*k9DG_ z7Y%ENdSNx|~izGdC1AxH8__r~m8uMECYjQ5}m9 z*<@T3|E&ceqI)ASOLxGVr?-Q?E=%)kPt1oEljj+l~w$C%MQa-t65!P-(Y>B1F7;cePQ zI}^1%vqz}g-Y_2z1-m`>B!<${x;}IcrroW; zW4>u_5rpZbd)|44h>E3fe&|5QH@@=p5LBq$?JyU`z?aB3zn17N3#SBLv<0>3(G?EP!PavVUf zX}eoXL54Ig3?<_ki!6BDC#dBem)}PTl0>F9PL7u$oWqH;^()|?3EY8riH|=QN-Xeh z%83kv5O zefVEa_~|Oz!iAA+Y;IDdlN%xzw!X_0H(c#C@f&U!leCXqxteZZCM-`l-S}4RFzw`r zsh(f4PDeJUgxR+=al;g>jpXoqvqMt)oK!~C5wO1Oo@$U7Snbb!8wx3DghrmR?4((4 z!WMUIUZr!ZL3b?@4*yimt&aE@F~}u(y;g}}Ya6dj+_};*92}}c_B{VU+>&>+I`j@_ zzSHJ3S=1NpV;s_%TN<>2Ka39N+lQT0!TvAm;;?~gtk8%_1 znikyETe(eX8=ur#p9UoJMu5TKiao}rKd4>~QN#g8%sYuMo9rqgt#RNV`DtFn3gt~J z)eEc;*aRFN^cCY?b?ny0wDfPvCO0i5$DUCQf-}KxE^S?s@4ItZ7&_ATzMKVlrFXH} zEcR#&uj!AbHT@-S{;@`+->sGE#rj6M9Xv$U^(U)bOlFe#F=lg~V@tBVM&9bG>c;i( zz13^H9ch?%l}9ExHoU#U_0ZeD$krcfsbkp`D!K3f&`OBt5%hdNv;Xl}QAOJld5W0p z|B&~UVQno@yA=vW3dP;6#oeVXUaZAkixt-pT#7psw^E9`ySrQQ;7)+xPI5zg&iTIk z@BY8T^CTgA@7c5bU2A4f%nj;XB+Qq!pm8^Tzkjc87~7Ul zz22G3(VfiPK=1la?um23ebr*L-KtJ34o#|Q1YdgTmysZllDDJu0b853?oHQ50I#!?oGqeQ&vLHeve zc*c9#4^^6OxJ_5=b0#j!F^%i(Zlg5IYv(w|#ulUk*{pWIWUSyWlv^d#!p13unim$f zN4;BeM=*Fczvi9?G{051jQ?C?TVbSZDSV<-e=cLH!_m9jr+UTwqkZx6u5hU#Y3Hau zwu1WH(v7xGw8_tCh&IB!OS}E2ROhVYL44gwYRwvOB^1ULVJIe#-2zGJ%So?J$@D5U z6!Vt*jiXfL4ch0X`_$39vfQV-A^G(#oTw|jfieGOw;qt~5=eC^+}!EJEXCmhANHxq z`Iy`RB>9fX(U`PuAMSw7uiOJf^o-Fwj*B)_Q0(xbc+Y~XH-X1uY2`?mFJ;lm(J7Vh^ z@RgO|a$i-Q{dCgtSg$Hz*jVB>&0dZY}U3@j#+td3Z?Q&{$P3fi!6)|G~>Yl(tN~|Qp3wl_F-2lBaXcV6qJOKjiZ9N zq?X2b#cu-WvT{#9tQAc)e1c_I$k1}~M&Sh2nz#3Mm2fYwjh3(8 z-LB(^C^?)-+h;vAH`|jZ*4DDU*A``rN@`7Pu}t#3`ErE8?6vlBjJiynp>^Ly?m+K^ z-BQfgZm>q3v$jaxKfF=#lXkt|jx986ON2D>Qm=33i{8lL3i^YW&@oY+zAG(88)jZ`V>~f#U_AK@qctL1`BGf8diiEh*1HaoXt4JKPp0i+Ph`6G zT>Zh@+%M03Q4&B`oNC>-(U+H~OuHv9QAI$Vk?Bp>_gw+QDDoflD-O(>>$m2XWqV*% zfSnWWWgx6izc+UBCxZ&js}L3itvXs8`#K`0U(+<9Rt(xECPSGsX zo!@E`8nk6)(0=->i_B@3ntR>I3C5L#sR_6(S<=C!WTuMHD*pQa3 zJK*eNpTBx<*-Am)`JveG#}hIPPNf`r&(FJjs5}a_TZU%`f?dlAWr=Jl;#e3>{64}{ zgeN->IKQo>#6c{RE{6tZGrGSXz|T8{eecDW?+v`y9%2(><}46Xue3A#zc;K&E*E+v zz@lhB?QlA9TkH`s?b;qpc~PZC7&D6ydOgL*pyxE%Wu(zVF#m9U>=mDZ&-jo%(F_M(0Arl+wEY=7sFU2jx5mg$K1^|Fz);jnJhoJm&ZJnWceN z5Oxc)3NqtR*AlrMTglm?H14yA_Rmi?W^yVa`r1shqZu^0qM9FiOEXy){31k3bTT#F z6$SI!e35&Gj~3$i_XvMkrMPrX*jJ%=xp2h3#6XX6(SXW=O~IHsHs<1F*HVop@}rQdh#P z6oLrK&itD+L?+)b!cf($ow4JKcJB^q9)<~QlDXPp$$T3%70(^_S+aF+Xj-{5AEw#l z?z=;+JF3|EaX z;93-W==-us$m{rCFEvloZ)&&Q1 zNk;Dj%J5FK1}H7EPqZ4oE@i#Q00;_+Ml7(qs<`5gbZf~hD1PbK%t+X2E=9_X!4782 zI8FnB1`D#QcPjXtQ(p`5BcGYUANb-PJ*n|fGRmOKhdCu?#cg@E2AwQ7Q6@O9t;zH+ z!sLTD1i;{C8*G7=YpKP?^t&bTGlpvs?Q(nep|htDY14av;+p*^TCm$| z!#!4UrH{P z6u>U*`fPcza;eo1F>A<|{0^rKWU@ieMlSKP@|yLp~DQ%uF;KWx9K>)4b~D>}(d=b)wP}9s*TF6hs2=FNX~*JUx?E zpw($p*>_jiXqVo$U62@zefw?2455c35tDz?WY@SM17fgaSQsIP05ZE5t#^+Bv*__b za*IPrtLCi`&KyWgK}^UiuV*BNkf4qnNeDbZHf zN_TgGxDdHCm|fXOMub)h*KRU~QHi!Oi53Mvw(t7jnUXzxn*mBSWzNFcBJ-!pne}<& zl^KUAI1Q!poEUC3mg_FM5;sT)?TuU%$vB+fu2}5xgY@2zaoImqzIcb7qLKZ2I^KAG zO*UVO`bs#Fh?o5tU^aB*$L6r1pKlnW)a&xf;KN#v_eNfP{t=($XO<;v1!OxKF(QOd zEF82kwQGKGJyZ@Vl?kqqH=*H{e9Un@VhCyiPy0tFkq);x@?ooyt*v*6FB50>yE_Xb zlfft<5eF)_tNKg1u5NCP6gz8rWS?hOQk5MFWl8A#>};k_W3<;R2c_Z7a16Vb3c&HXsV*1v6s~-8 zG1Rks@NJs=W;h6&2SLi|ODj6&4a!P3rey1qWD z$EIpMcC=Kis55rkO{4v;n8W)uO9Tp1*I?nHE*Xh1SfudxWof7<_Q@VzIEKaMlaLBj z0&F2f7>7p(Lp>tS0`wVv_!FfKI(b=btTr{ZqhzYViVHUj(D%uJP!wrBbt_SHBtgyi zw6WE!(X41c%cBh5yBqsH*i8;iB*R9yR~-sqY-tR?h$mNTHZHm@Ztm(9b6DT2!!H0i zNl`1XEh4`zK(&ieq~8wK$Ey8|IzlX|l_RhNIx~CdLYE|&$WE{gy|n&FBpdVuRbKyvTv@C-B{< ztYM~RS=0D+HdGDclMLher0>P(W6Nnn?0X$!GLm$VeZxpDlOu|LT)hrze`#0w&CuuW z3bz%Hq%^a$3>QO)9EH-s1?w&OH*LMV8~tdV4{q5%`tifpns1oAFL1FKt?@<*KMjHh zKPQi6hkZv_N5?N3JYBPA~_-Lz}T&!Ogx#aFrCBhzZ z0iLa3V~K30%1y+31OyH7^|A^2Af`CgJ-i{pnQzC*_6hSSzW2^$uYa2%II7VYi=-)f zd5Q2N)x|X%!4td`o8)O$wpez_ath%3r(7!`BeWC7)%7)2f~(GFL--q1YdoE|^i3TU> zXcRX2t%OtEJ+J%I?CiGkniV&+0f=~&P}6wimZPiS+j-Tgl|eG5r7^#G{b=PUpWZ=d z_L^JA<=_1H{Bli_r7(;!o3M&%#6@^3b%))^NHwtYvg+%J!VZa{Iw^Fe@CEH~kA}bu z^cPG^vGRgQuqpGVOpaBE(vLdPvSaVo-nC@YN{(WlTPo?0&*Qkc=-cIuVGGDKStB+KS&29fL8c)e zu1Jf!_ux?8X5Tge52uJAh*~ut4c);F38i)H_L~F&|DZ##`x>qqzn=aJ{C*H$I&!9} zr@1XWkU#nzc*Ll-SxTu7I4Ra|i}ZT9O>C}uG_QX7;(ASL(&t9g@k6x>K{AbBCttJZ zlny_0=h!(TARw~d!*p<mX1+3Ud}>K$-AhMGt&WL+@ynvJ~i z>e4ryq+d^RvulV+>h=WPPlVTx&AAI1gQao#$FUceUPE};uL>h;q8Q`nY%MrLyYl|Q zL*ItS(xRrTVB(S{tX}j_W@6M?ACf);$F40X zm!@eJwWAhzlI=AtyV3k-5G4xeW`|7I!p_xqxPrR7_fmoJdgoiBQVTD(v{RUX*)0 zMbP1n?R@7w{)gS)!(E1pE(ot+J|o)e1Uf449WSM^YCYbG_t8oI=twHD+d|x~Pff3^ z-gmw;8G!bg6}H^Cz~a~1j52P#Nx3%e16-7@m+Dp-#4_WaqlZ^} z1f9DLka;sYCiY(s@XIzwprd_cCmmYb=gd^3o<(-s zW!eK^&xMIOzS)R}CQhRxyo9Ly!F!9tmq#nenWkhoX(N>MSI=HV2=D#&rb6W@rl^d? zq6LcJ4xQF@vJ(b*W0HUPsUCZ711}=aF7bc6$#E^I-aY4ge+8UOLfzcij`p7L@4a$1 z>mHt_X<4lL9rwgFuqV2bG33R>n$xLEJ>toa3V@(rMoRzv7@+)Y!|n?E1s}2jNU5(@ z%-B0qX&daAM=~3)`gY7X#&5-F8We)&RZl8{4go}H-)NM^%fr(}kXL%b9b6;gk3|;i z0cZoMA_m=Eha^YHWL%d=;{B-P$IIg%(S9~f6l+#ddB4E^69J}Qk#OaATMG#b?|~X* zxvLTPb=tfh+yYQs4;McS6?01b3C4|vz9Tm|qho3HyCBlyRTOcj^Yt1sYsSg6ud!mC z?IO*IPtT*MtzE`aY(9-NBn*EG(xZ%F*o}8)9MpN|Iw!YCM z0MDX(py$J>*u9Ef`r`h~R-&bZS?1-}gZKDlUx_`ab7ES;k;xxEUZ43NijjGb_B9)E zl#Owl%2~mlQ9lb~{DmizEVbF@k;L|sJXbB@MB@d1WHbkW30qAo_9@R1xkc*k`^TIL z-U<~1{0*hNokYV{Dym*6fYrPJDR}}*Bl$KZnvGWo;8wxCYR?VC3)h{ByS<~u@88o< z)2?qAd;u$Z6WcO5*YVU~Q_f!NN^2%;v}W><+W^ASEd#wqpAK1F>@?#0AWXpqGg_J| z{6{9j*;rXvi*T!nkCIfu8PpS`z+Rb(Q55K6_+?etmHW~F{$@pVlKJ~8zi+?GjPKzr ze*bI2_dqkY7}js~G9Q*hj(6+V=?U^9X0)ev0Tp@-y#&8Wj_Xw z;9%*9eE4lxMS#3!sHF>JNOxR@#}6YG6Xha7V|sKCeM348rnVm!bL$GslKx&19IR4^ zf)U452B+@{8t?}{3~;-X@-^_^qra)9dE}J-vP@k3SFUQkna0c}OF%+R%aDJzC>~q0 zCF){jQcD9ok@)Yd4-6z|mhOpn{3kkzqQ8G)?wPST-Z~5noPdA(^2igbR9pPj0Qjpy zA+y4X{b`UCTYB2agJy@K@VED!Mo!Qza-u|Su^~lRqrtifZ>fABE-Xa0Z)S| ztmLybV;lP)0vu$O;i>@AYP=$my-drou0r(5*_jg5Pj^|^PA^k%gpV2C^A9zIu43Qv zZSMW<>#Vovo93 z@xxjd&PF&3TdiMOz`v?)s1l)m24QL)|IYi;nK2{hTqNJ8&xc-c%P2Xtj zoD7$Bc>g+2{Qij5qc5&|=Gc+)k*yAlMU@5`aH*g{9P&zLyCkqw8PNc+z|}Z4n4) zn6lXY0(A2iW@DKk`z-){i`fR2yKVLyMk_UV9_9V98^N_q&iB`(`%cl+qb?k3idG4O z%@#SNq{j3N;WB!ZTd<-PhS+!fg%Pto${BPoaNP=ib0si4=|L!r>? zTF*Ndx61>~T^MgAt^4TYR2io1RYuiCf5Gv1zk7zsP2y|zHDw3?BD!Ez%~}(^pK$wm zl@Z!UAXy6juQMj%nU8q0ZZK-bOn&@FZRht_K4tc5R0hHB$EWI{q=VjXUfg z$mib8*t?29rG9jfFCL^V7C;Z2VH37Uen$fnO{}lPN;HjAt5x{Gs`8 zhV&0ujQ3JxYE3?Wu1Ja~86lU%Y%*nz`}s5c6a>MDwq`WOvUD7B?{=vAy41+M_HfJY zyubLE{xuL zMx*~&AW{jOtlo%pE=vNQz*HcuUqN((zi3EAV+|-sLr~-KX&hK}|oJ(XrO(pw93#c$Ut(A*O4v|<+k!h~!9rj5qVve?&>OEZHj2!nK~&q6+kcS0=e)Q)J1lQ5sCM2g9pB7tRQVg02j@BEebQ3SKNhAf9gonp47HYp+bzR) zMl(1G-|B32I+yj(k_gA140lUPV9E*^H<`qFGGsni&X%sU-U+c~S+%C)`HDI7{my9^ zk($&iXhjoyBb7bR#h`u2W^%I*&A1qo;%p!9+fE;FbN*xW!WXy%;l9Ks_!l=mVf(E! z`1tfxiDI}Hqs`r6;Fi})Zhu%Zh}*Nu?qwiu%PR$JOuQ=H7p-%G=u3-AAg9VDWO4j?CN!Rf{U_#8 zd#)HCy7Uai2AhOB5i4(7#djrT?ZS(hZUb>L0VVmDK6OLlIEU=<#&c#X_{BR}`Lm(C zSy!fPwXqJxf6JQEg~et`v?1rT)N8f01x~E6MPyrVskgGoMpoaG^L0(BeOsH7X-B#x zOL_obg^XJ&hQ%9(WqSD>)0$Qtl(AU@fyo`gl@?QqLI!3`*0%e6t9Q7 zDtILin_l6~XYWC&;ht^*ilX3os6!PAIW9Z(s(oT#Jf9;)dvLqy^~lXKC4ox-+^?Zn zt1pWm=ftPRtDZ_uccz-D9c&O9?`{F&*^qs>ZRaLic6LdZKfaCR+*S8uNZfBU}d7DNHyHdNMr!{>% zVOz^&bMR;5H86MHxRSf)Y6qG`GM_Q!J%%kR-qMd_gtg3PSa4{R0*6J92+Vp2JJ2=p zUKU!ak*A{vY?uKVwqDl?wQJ4l7{U3m7La(d2WkdCu>m4siepvSIL63W^TIuTIFax| z0_!7>()jFuPJ%6#8f%y-T-LjhB_qh>*KaRWm1gCsw_ZKdA2GU5Gdt-eZgb~MYAUY> zDd)`RGA+huIJP4vS12N`Yc!8(Z0#uZ$Jb2&(L;GFDrzCh4yAM~x zFJy(^<^+RB!_l|GKWay8*LZo=%C*T5VY~0luHD?ErwekC!K;}T+FeHOd z@^Qu6c6Gx$b=Na$t6}KYl8gB06>~+N>gPlLTL>Ci{p>Cx@IKS(x^%o$vPw=Uy1Jtl z#ybFeQ<#gT#|Yp4fH&p@{+lU(fuKwL0_QOMYs3UrZj-^Psl&G(^ir6(wn8?9z17JH ztp?9cMHF4W($&+NT7s&knMnnmzwKi2dqTG@oXe$qyJtJv})CnuB`K3jLq<> zQ}@7A2KcR*q;CLw9OO9teOEu->v_J|ZULK0*=A^-Jl)eJDYevQEhPK4e1}55bQhZ= zH7y*4IjAqqY8d>b;*juyUgS3Y^XzlQ$*ZUCf^Js<&^GWes2pRC^FdP&1w1^*l0c!c zQcfRMJosR_Pg@xnxcICDBV3ltVdSr~xc{4LYPz`EUNY%r@)e9b%)U#KmVdK<`~7r} z)cZR;5$Hjzk*yjj>5i@2N9k$;4Zt&j$_ouQIQq1LCnRwHqFK4age=xvL}#vC3`ZlC zzT>HEeagU~gam=rH-C)JD8D+x5Dw$szNT|PgJ?^{Q&fk=^P&Vg97s;7_NPkHd*VChKlr^@^70$I{QZ*++e={#oc;Ocw8F&Va z3|GH8BTj+8DII8^!SnI!zb%GXHQC?rVt;z${^m?|0xt%I-y14)v0qU2Ppu&sr(`(B zU&>8ra0QQf6P_ZfPLRboz%xCvsB_oq+|)ln|Ivk%3(S`3x9Lqqy#BZE0`x(`fAd4C znm|9pKjgw+lq}!;mpH-?0co8`f^vKS^Z2{N6&1rO9&M za@0*;;g7deN~%YvR+>}t|J;_d&GI(`$O1pbjA{9@^nEO_qtPy2s7@VP9PnBu>& zCPQz=ydB`~>k%&*M+t9Nmv(lP<07%Z-BGoeab924HP2@1!H@#@&piI4g%=KX_#?^4 z%fDDX-|pzkT^ypYd(OpLYe%hG z>qVaXhX;$_`IvUuJ7|LOok8Ai39T)Ddne}Pw~@(xmmvM3oPF}P5HrKUsA;9de<@Va zQBuNEFV(Tva@iWhjg5`9iedzD9$>C4hI)7?PPx_gwaSANOIDR-!XF;2BB47~%@1`Q zLhYYun7R6qo?mXY=p1f za)B(%&z9;klA$@GIo$A+=)c)j-KIz~gyfM|&9a7?czY%AI6ply+FkVS+W7qhTFDpC zG396R$=3zH?5Xg|a;Iof@&8nTqS>wt>uM9e6Pm#9++#&`{knf$_IDJC4gw#5oq`_&_DPUG^ATJ zg1P~0#4~F3JUNrY^co>S=o}#Mn3#6>0A7ZM1;Ha9 z{|2Cs-7>=;rXbJi$cC3sD_t zz}XAY#ToaN`QYu5)UK^F+63b3%<@}uZ9blB&7iDUpcSm|3N-N`afGxBLn6~^^gZnJ z;BW2;dw-(q)Ai06(erzECbX+z*tV{5X3{6)Iih`EX6()7Jd*S%imB^LlaEm1At({) zJsX5#@dcDiYaRPO{l%U z-Cg5Oj-o^xG!Tg`8ebTmk*&WDEq2wrfw;Dg+{ch}wwbJXx3$B!nR30EXGw_Ax?Dej zU1!*e>I|8-z5lj?RJ|_;fP@u8SCw1a(C%-k-SMI6wqb?&#%xi%TvfMSUs@s9l4F4^ z;4sFrp{{eGhJc2l)7|BsFvMJRP-bK7w0qOBzAg^RzxC0EiN}fs1Iq=CzG*uBTpPds zj?U&_4aerIGw5^FgpnV!JvDMZ{PVWrq0BB3Ux}Q5Zf( z9a6L4R669Qb=rGki-lL>KaU+z=(v53VKQU|F&d4ms)dQJFGkx|vMRS33Pmik9bFcGDmY2{*czYNK|~5@n167WUi}E-OhInwaW~W`sF46^Y5s$ z26M3cF<5kJs~7_`s!-~$fBlHG^IU~-(BeCScQ!UGf-VP7aQ-V}$7r_$ zRy@@UFW`|)y5@Ru1uE&~#ZtCMEwO)n*lWRL5ImEyJP|(2{9&G zSGNS~Pkf2_sZ(#4r2>X8*N9=@h5zDa0*mgTENe^mKpeen-IS-X2J`}5sdf4N;H)jC zN{u{?sEGh3hGN}|B@S48du+fWpHk1oUjQL}p#-m$7XGfc-{1qD(s;wku$V54KeP|o z=joZ6n!2|1lftb#OYFLv^S*38tgukQx~pydn8#46{#QbU>cS&(^i--na0tw1F-c=O zlF|e3EJ@T`q(rNRK^d5w+15FC6+6)U5<$W6%&hBrdW0hr^-d;Wimlu zTGk<(qSaUugc{hQ%u&JU_{MxXZZB-(<$^$PpYfy*S8oy)GK|mvIO34->_MET4E|$( z-h!NZUg@d@_;1>uXNo9mu-U$ZMn*K1$MI+PR(8(O8QC+~is;DD|DzWS5pHmH$J@R7 z_O$xzbf`=mU3_VfMz0lF-8NckJ*fk>>qCjB6O1Ch`W$Uigg7v&$z-$Y>wz7XV0Q(l zu)k*}2Bq18Io>bgEixX*EXYR}s3kJyt6ENz0T&O+Qd38#Cy$fLXxry57P8djh>9zn zqwtPun?rXJc;S=kz_{;8m`n|xBm+pC(z5Cle|^iQWDXqBs5FYgZpH$h8LE{&t)SwE zj|W7m{e+z|nDBrum_)CRj`()@V(C{lxM6vnnfe+co$vKLygUpnEZu#PE0j;dy83? zhLiu^QmF-Q2r(bkQ*;a-^~0;la%W&{j2FtH;bsJMH6l7^ILi)NwVxbrjk8IZenmBR z^=t^%>~o&!>*q1-h;3CRj+Z0BXuC?4H-@JNShEtTKJV_!_PPIcVa56r4Fi@T-byO= zrSF;2B{{?kdi5I99(??aicHr)=;i8#B4QjCH>u;(@p@9qDWi}tgfZzl z3BWr49CEvp_yIKZG$b~<`I)Dvq3!e>T{ z$p%CtF}uva;5BY2&js~K$Rt9y8wb57cT6iTKT*>u;wX;Ye-WnQcTurl?*r!$-tI?1 zM?#;DD_$ff;Z8MA@7~(dJjDmBA=zYevb$#l^Y*;(ux8 z&u@jh6!cryYx~s8Cm4$*x^9^;nlgR1Uk$LDjFnTbAiXAAS9XF$f&&gMnYheKc#hp~|CR$z?lZZGU(sTn@9xTnk+qq@y~Y0B_$7A&w%Ba;O`k(^ zM!)s=zhrx5G&odx_g_eoFZ!Ng_!b@5L6Vc+AotEu_77ic$&B+nQ^tQ9p0spLa<*^U z1eA#$@~PbRn$WXdOK3!Ayf5!sm)lrL>+vTLBeB>MHnik2K7!B6M~IbGEfX^R5DxL= zOqyD(eB&UG7?aQx%!vF;Rm^Yi_b?@% zf0au7t4_`+-Cm(Yw%hQWW~-HE2dn(IB$3gl1@JJ{oHPJt)pT>XSs{P7F6lm6+U;yJ zxJRt89)^C?MN&=Nlrpjg27+kw636Pk|DC1mk0^k{EySgj z=1*g+jFypX>a<&^&%^b!pmJEKq6O?a!|4Oz* z+K`@*=dJ&hcR3XHl6X*CWgl~2aZLjjsL0@AQ^{lUX(uiY*T!%r7cJR1r9;s=2W5gl-fWqgq=ycobxepdux#bmr0{08*r{ z;K5Hj3TUuQc8UYxpUucI?6j;zD3JhiW&lMWg_R1gjy|4su|Fe$WY63hN5&ivWTC8( zGZ=NzPdlNDZw7vTTO|`3%y&j(wav2$(S;;3057IQ! zcOH^R@Bqhaw~q3MLn8cAcr1O&-HsZxN>QucKPZ!{!W)&MPM{ckLf%wc#!$9U380}| z7^7@@?tVtdb$3{q?-Kx&WMgN|w zAf{Pqte*14?2lr3Ak)z*k(1@1<=FJWMU3+)N-jEJVR5`7;+dh6eS@WDDt<3l9z{I< zbHx2TngSBdJ617P#SO2;@3S9ljDcU74+y6#NN3H2H3r_l!)zeThgWM*dUWBbs`fzOfkt6|MJ2n$e5lWNIwQAU^6I8Hfi!&Cs?>VF z{p>_B#?0DXwvUzRD6+s|Y)hU|(6?j50GcB00V+{!>2{bpWMZs*GQ>JNVHO^gyRG9r zItjMyq7TxTso*ab>T>Ye21FAsA&M$+SWVIfHXzI~U1)l}s?C=jE;r%LA8CN872{dZXC{t7f<;J`sfXB^^uoI=hl4A5yH-U2#qdc_9N3E-enwK0Fvdx*7ilIv z)I|Tz)A=io4E(c~LB>NpY1DjLX$&T#z*hP<81u7|FZ47K0xWvT9>nIuN@OMr6QMMZ<^f=`0F zNBWOC+~E=?ZU2Z#Q>ws@vCSFuUb$e_B_xe%FmQdN&uAo~vNjbQv`Eq0t9`|ZO?<(> z?Q0BVaAK}#IwfEmM`bfsIBfSxMDZ;0+)qlGP|Fxm?RDft%oGdw-mFG+B7h)vLWG(c%9Yg z#qN4gO0Mlt)@UtB`)yA)>1>H_7TlrEJH^h1uO_sRWefN61E;dm19VLx$+u`#FMgA> zCK2ervpTX--sJ~H@4I@YeVO~^*bEb6)MH#bZ?G*IYN&5|WlIDa_*D~lpox9P>@ewOn3VxP*6v_NbtTxey7NA zu23cC0OZl!7;nG2UaIlaU)vnd*6Vq%aZ8jE>WwP+Qak=rK*5dT$;O23QZsMzrxqOM&AKC9T_W57i zZr@zH_8P$_b(t+4&DagLx$H>jlAC6hf7uv^)FGb0P3h0{Jn3Ru-vaB|%TywLK507I zh2xO37W_B2o_W}I+w_-#LgpawsIfzH-hH$Q4{;HKp7i zAbkXoRNUQ))S5k#JGb~#5+VC!!!s7nNyBS;jidP?jCZ*w$?cwki-M$4!79vfIPvS& z6}T3#mCf~Jxz#lnzV?GiKQTj})OGptJx@Pn`<70cBP)xdha+Ww7E;PA3n!8Q;u;Ii z7NNk*^``@gPwI-p@B-n(^QH@4Qvoe)OWU}_I(#Rz%0tMOiEyq}rQ(*O0o;)!wUzyE1Z>>?GHl zs*_{)ez0F+mz62A)TkJ%Jh8>^<0I>^D^9WY>Y+ro7pR6a94RaBO_6=y&TepBoK}+N ztzZMNDh^{8t>s&qt=^}-Cq32hI2Cd#46@iF%=?jj=c(%r1nl8XH)mLz7d$F$981Mh z#YQesv7{J`l4GE`bmI1oH0gANk^s7DH2g z(kC+D;De3zIp~7u301K(Po4fyZWGw3aZx~Jz#+{AeHOVSX#$)^`+9MpRRYd{XswR6l>>&Eso944JzKEJ0>1jW5=2?8$l(GlGD; z-&%HUG)$2;kmxCRQ}30LTp0(wX0vlxf$w-O1+1@5u`ZwPzBW13#1sG2Cz0gnYvivn zDqKlARC(Mv#F#0eY1RON7tLGiZlRrfFQxN4^znOiQx)_6tUH0Q z#SVT>_GeCRS59F%skN@ixy``$&vtgrECS2wsTyOioe+4%A7aprDW zXEQj;LnJ>-gimc$0CkiF>VdQd(+rsc0~cxTPgOqGOmq7Bs?f9AvYxhadCk4;^0Jvi z;k*3FPy^R?Af`%6hRUCCme{0sq!l`XwQoaC;>EAxms}YE)ib+1C3~!n3z`jX!Mkk^m(=Y4|FQ7^iA~=lQu; z=1QOU*y;zfwlZUw|MdC!g`UmT;2FO}N!z16>c0%*-;VwP6@qv>gfkIM24uc;Omgw; z%FS%2v~Mt8*PnGRz1NyHyZIQjx8%(y>2zz2#)z|9u~`@jym((aRtYn?^xKuEXKv}P z-4c|;&a{P1H^RgD-i~Wqq3GQ5;rkuVL{018aW$|?<_<6OzD11;E745N)KNly#lc=k z$fb^VWA#8hYt@yUZeukAh|I?+2>B&bmxwK3KP!=^h#zw=5#`nMSDvN(>2+IdOf@7( zpS~N7>V{a?FpI9AljOk+2HX?in6~J^w*K&csCvif$fB(cH|&mW z+qP}9lXO^dx?`uqj%_>XsAJm|+qP}H?ocdbW79v1}vHAD>J_jcLTuKr@&n$QOy)GHb_hDvtx39 z1In{ruvGE=k_nHdW&mbQB<=ZFq8eVyMd6P`-G0%k4B_F%s8e|+A+=|S?x5Gyt*JEb zx&KN+NmG2F&?8A&1{=XIfQ}FEJ)}WI=h%kq!xOB^rMAPiZyg5Jv?!l-alN{ZG*BIzJ|#OLGgg zG!Y)H8tTsiA4QS;tfiRZ4xW6*zfVgW$9X#N!Luj z#_zBH%F@s-w^SgW%*@sZ$cY{fV;qiON0#gx8aFe@7Ze+87XTxL9ksO>u?*Q6a7g1(qa=RPd#uvtp3B5<+ z?(!1re1ePTg$*l60|b*9jnj^HNI+gTNDl2kQ(t?AIf1iVMmt*lKCGVilW`P$GMaFW zg4&))>KK;~aXqQ#-n3zp*h_}~u5-G6s$=oyL#^*sDX&Q;#q)d3W7p0p!%fG^{=3hz zlTcsZ9uI90%f6F1cF68z$X(bBMYlBM6W zPChgJpKIB>2QM4;q`iVune@ETl;mhP`)oR2yTmhI2tv2x_-&yQG?Ktj1b+G zx!^Ezs(G*H+t4ua(A=KBx*gqSn8*l}_SU<}8IC%7DL-iZOu28pm~KQhj9JhULZ$ug zvdL@*HO;-u3c9w;{6ELRmUKWNmJyhGV5?oUHg&V%p4)4t@%dHyaNBz!&)J82#92j{ zfZS+9`C*N2fz(T`(FeWyFn$`F4E3+|FGJM0l}b>ho%w#MPVFxKhR2KYyiUdcoR>9Y z+ao}HM7(iY&0t^6XvOhzKHFjIe=1}CNj$V!n;I_Aixa-1{pXDC1rr1-8@6fU&nOYTjmGEp{oFb)(xmyfhmH& zOZH0tbCme?oALM46NL1ZIZOfm08Mi5(&CHwC#hOG!Qid@1&ja0SxngVLIvx|zzR$n zk^j49L9nI{%UQ9YvWmSmco!G$K$5HM+Z($3;wYtuUrT8z#{V=!+HW9Aq|!S4Af>B* z``_OimR)7`hC&wK);7AR_oVdmemb+(sr$@UXIFBE{U0s$ksVQ})D?6iJP9$b=}?5!VL?>9NB~Mpx){vXiOfy9+Ob zO*KjIyb)hn-LG9=Ti8YvK+YEHDap0uE8G86whYBWimA{0Tj8tE=u4!#{8^6yRic!z; zdti$tlj!v@&Ejs~2F3482Yqrz0ekT~qD1M!Snz^jU-ulV|&?RLG?@ke4BOgKJZ zgA47)V8^*4`8}uOaDb!UHi`hOn+N^}i(Oc#OuqZ;E;@HwT~?kE4)>m~3J3o>Q0CcfBrGP(cfN6IrSppGou zZI&FzH95hO0blZ00m07^bD<1az&Xt0^)0frg-{`k0>NtVAoJO_%?A{-))gWCDoZAj z=p8w2rfQR-@p}&{LUB_rKLN(f5P{*b`Nm*p2h8Rb5!(OvR(^%YvyZ);2=6|6Pj(M^ z1^GUKHtg_5hVXr&26@5|kMAF-iC(WjV-0n%pD2|_Qm?F4Z*}0)6*mvC&J?Zpl94e0 zj7odgk3W99i9Z&>ZN}qiT#&-JNLv41rBMwsA&|GbBH96QonuN~7eO667{02|HEsrJ zpzQ0P;(%u{(D4fDmcJ6ITHFo?95c{Oi6k)o|NHDb6w>elYkltzyScSk?{m5aI(+Ss zlix>)cYj@c_CFSVLJb6sR;MUo{lOE&Zz5?u)zQc>pWV}<@5vztQyjuCwJ`-Mhf+RBl;+e!y?NzM1=} zbKzEFtAl0TY-6g}_`D>u3mCdj`>{UK`?^ESw3KesX;LU;`C3W8*a!e74=fY9wIJ_s z15&>z00XT~up4vk)j5W*>LdQs&^tW>J*|sHiobst0Z1>|)G{Jq$L9-* zO}S+x%01#deD zTV(>uIf{DRgK$MuZJnz|wLKc=sPP-mMs_w`j8jqSosDr$b5JbSl79@!pthKzj=`Y- z^+J;9?vDFdR#CgHSN*J(SQKV)$@ydW8DI*9HY{}SHXC(Xyu1p2AEZn@G_OLe`RJhp z*u3bO{?N6?srw=LpRm`LHQ3%?efG53{lXOCDffx+DDZ}G@z)CuOr`chRvVpmO?Q<` z9G3elWj#tAvI)h%mUuSONv^BJOxtsb;a9-wwZ>;b70YKQf#Sx8>)e4SE^kppQ&@=9 zAA97s7@=xgGZH5!LAKyunadHDpl|8X0FMinHi6ZLR2Z)QbHk?~Hk7jahw&l>Rsz;W z_G-|rC%sC`V`lw6fdQ;mt{TG)LaW8@h3j!9=!dJ3Wu0#fDU>HTm))){ww+K(Hag!x zCx0{SIr5$xUwfaiE8}*Wth3cFGt@4Z*Jh9JFZC|Y|6;V5UO2R#q?Km^ov^AdT&MGq zHcCo8pOp|j%FQ+Aad2Ly-R|AMVPf&uWuKwgrd(fVuXd){v~I}U$!nM_#c7p3`reRQ zzU%5Iz2P6wC~eyqX-Kt-JJlHIcU{D6B66b$7cfy9;Wj2s5EJ{*&AQw`lkn|rnZ&rw zMvwIBLN%SbFSTs@B%-)DJ#pw$+@DD)vBvGmNU;@Zu`%4)VNgExwu6y^*L6j}Xi;ox zM#O%VQdxGh8Sw77M1&^N!ckwEW(zm4c>khj=-&)UBM0FP3bMorv;|_CF!w&88sh9^ zk_>#c{Jc#!FX*B7(y?oVX@6BtbAm4+bhpY14)9ldV_(_+f;xTfBdf~H+Kq_4 zMzY@I3{H7X{qyv;0e8mR2uq!g=RmezLe%sXbP{Pm(CzT%JjIccHo==M17}P!EpJaM z|Cpc@T}XsXBZfIO=20ej@26>T@-JFl0mjLiHi>cODYNVKzaD^ zI$5~|W>`@l=~Q#ofuZwkKHlH^d+>D(kVSwVw!*Nl7}8pPb#z0bR54XgL{I!vih5vx zhF!BzJ%FDLKFx1%d@P2GBBxiI1k|&}KO^s}YdIok_Sj@q^W+>M@dw_M4nw)8^H9~J z9Wi4_$3z^4v@BtqDK+rsPHhoctndcx^6Aab( z92?yHFpYg-2z&`{d1E*0lsFKW$P4~Pu#_pPrAp|??H6S;HvbxEzPaMKdDe(-p#*u2 zT}=e05}L8Nk}Pn1ovf$tg)y6!F>uop z@>nwrf0gDnT(|Wglv%?(V3=`RrljIezTuk|xTh0K-SMv_7}Qq!kz#MTPvDlO8_dUE z*iWEDhB5Q7h`JXGuUGKSOMhrpb>f(ir7JX;eUz{pIoo#V=&`cn1SL1YyuCLnTPZZJ zQxqPf)LcT{gyR$>i~iN}CKR;^oD)?;pU90GxeHwzQ`FSs3$A0pI$82o;S=A|wb|2k zg)JP|7M>CkgiV89v(7cjO7G^*lcGxqV;~m_ltUef1r8{PgHa(!;`zap+Sdz1&s9Hm zf&vAFO3aXj^t2Oh?55Y@Ds=)MhIlDe*a6-Z_o!eq#N$)#1lHMUM{mr5*&Q{CJQ4(s z1j9lQXmrE4$nBE^RBAn5{{q*z+(W9xK&!zZW8JcJj7`EpKa^1@iWS36FJz4!Eeof_ z9K<>O$^N~?J(&*!sQOXeV0{Z-&lUK#Thos8)u2pTW;W#o&n44Byo3sZKnWc2zD~&P zoBg3i+hYH|V9^7~445CyhGLV0D81h_X@N4K#6B$;C_M$`GTL7v^>GH$sSCa@l#4(0 z;!supw&MxW>YdET2P3e+b&Bm^?L}Vx1qJ-dZDedcW_US-G643M_;AV_z?_ae!2?%H zuCQ0v6RczFMwWdr45Yi*s{Qf!!QKqWdS?y*Dy&vfOmg`-+j@;usPq zK@W8F9crTQh`ER73$0IO^6AstNyk!6?>d@+s=uMKK4fW%%%Fqxs-}#Y5v4fPLDtX}fyz1F0 z?!*1_$#-(|n?v@+_iL_3O?~P0VqP#xOV2zA=AMeSb~6~)(^fwR^3YXuG|)7s5qc-i zr^Wz~;(~@hPpFGUrFOBKP z)}s0B0W#w3@hU)y^>(T=VwAtqNF=mj>Wo=tmNsC1q6FugVWzcdL8Wh{@M(C;B_n%0 zm4RdvjdP?80{WFzj?Ts}*NF$Z|QhJI`eqo%Ro2N+^?Bq+u@ zzXHP)EOtxQJefWN-Uhf3iamTrK!JW?poU7&sdMB)zxWyy;;Y1wJQ*ZS4q>#ncA@z2 z(x+KF!J{%7N!DEP4v}rUf3Ym;;O)zKpbxIF;weH!coFbgPjS!YmEpGR!CIjY-?bzj z`~|vb>eS*SGi$I-?qK%`7#ooKr^2kVBcvvS<4`dG{jJjDEzJYQX^yiXrtK~K>pO;T z0)klFyMWAm!=-UH)oECg_=&7nUTy{WyGG^PF}trv(X&P-`AvwJx# zV5vWN3X0|XY{p5V2aI{(X#7>C1j*-GkOlLyWIC~aH&>AsbonCrd?ApbLUrsVTJ~NX ze$@mTFc9`DQ6#A0B7XJr2$D4ro&3Q01@;IXbD4@EbcP79s7>mnw&SF~6nz?VYlbnp zFRZdhpzQ9(Q0YL!?rKIVcb~!>`@RUa1qk+DWO-xpD>EdIR$Ho&565=ov4_6imj?`X z%>ExMXvM+Q7WSEWj$ zxFJylqe7`VtGx%5l%if%k%wv!OO_IcUU83)a&hw`Pv-^y(&$gs{+4W!ll+14rR-LQfL!WXgpnNI@J%1k)96IxNETSzdvZQzhtSJ!nV` z*OaZ>h%GoZajNhKTBrz73<_BcgZPh|FT?tI86Zc&Z~#3A}?*5KBv; z4^LbIM3rD*fxDil6@C;Uidzb9ybsQ;6#`vN5R+LD!XN0FmUKy(0zN%rLlg?mXxTK2 z=6m7HDt$PQUd2#{JgdoTat1bGnney?nBm_WGl2@(j58JE35y7}zIr{%Sm_$mVYq$G zBnNlOvQ3yv*u_#-RMPsynZE=;>>dVW+Nv-wVoE0B7>o014^PW_ZtHPD@UyzlFj7M| z^jpYoG~_;FoBr0s!7x+(88&m0{{=2CiF#00RoJT;%)RhmF=de_uX*J_7|!VHQ7sl4 zC_1oiwnRnq*k^>2RZVMNxjh7VQq)tiAGHSLrAc|#&#DC84e(}8V{uY`9SrH0fdG0U z33N6d57?!LewR1P*#I!?!I>1lq7};KC&@1|hC$Pg^McG?S>8y`4?OKyd4s4)-*ygTe1ck;E4K^3GhNub;i>o&8$xh>4U>mP zSJ?zUPr?b~IR;kDHz4mEC)-WIla%wjZ{fES`=AWo*5B`7FaDMmhYs_^E-r#^{Z>Y7 z5P8Mi0`py@10<^sLqz%@sWOD+q0ugck65`e}Z(RWf7m5l26ie_oM`j8+80ZxP|-3%pxl zq=C+b{biaN^@ptniMV)G1&+!RR;RrFm?Ai%V0Xc`=X9t2n*GS24Vtd$<`!47xm8Iz%Lee6W6>&Rr63)A&)MaR zPZ$+v&c`VNAFXvK^My>dsSPY>RgoZ(+RI&E*McAEa4AjD^N zEvm;Uf>lpX>_$AL0Hvd5nu6)v@G0!MT^0qgG86ZjVN8^waABjQ66X6KbmGf5Ov#Ye z1Gi+B(S<0d3V9ocA4qv04HPHTZBu9w+i*d$8)C^ux|&~H4E4mHW;&Ks+t6@udWPlI zpWbdjHM0>MD^oyu!-?9;{-^7is@UYf(z`+eAB}Tbw|;RPTeC{4KW4_kQ}P#`t#>5y zp)o(*>lh8A=O9+?6PkHAqprgHJz~;qUc;R#V0I?_(2mfdRP|oeamqKsOxb7|`Bgcm z_7&26_UkbE{>T*7!3WH%!I))x8;EhD=5psL^$VqmejyEf3UDv=Rq?Ep!yTCl2uzd^ znqP(#K%;#l#wuH5kt`1<*`Z|yod%poe{`IAnE05m+l&S`{z5GFSDck#yri-jf581~ zBe(Mt+9ZFuPRP|?SLZQK@w?@w>o1K)>bNScR`62~ToaIBR8w(BQg^UgrU?nhIiO@1 zWgRLqN_O~ZGVe99GTzDEY!S!6XM^$Q&X|?axq<-)jek%bjxlR<>^7WebTerMd<_sa zT0vGtO@eAR4RM+Uv5RGA+>^2`2*D~?>2Q+X(YBFL_y5xy(tl)S@u7S^Y-??3S2Hl!4W=u?t+5Z`n1&#Nnj}WKNYKYQ=76 z&pfv-P3JQ{{e%Y&L2)dux~M-&q)WJeR}SZTe#wyO&qAOKs5HcM|I`&KNs|ala=V+d zzEJ#VG~+`DZCJ%zIxi5`I;KkDZ=hXKD*e-uy7bF|hi|*?_TMiDqbDKQkSUd0JQsuJ zU>RkX{WXR@L55m_q4Qwhs@k!Sm8){K+-~Mic|5|ZK#P^@JF+tvj7rcb!bG{upgVe+ zZ@{e1`5yTARYE2=^T-Fz_k(uo?h37dd0$WTct(rIb>47cZ6&t~nZ7fGSoXd1soy30 zG9r%X4G#PHrN6W4%IE1rL~HTiUF4Ww9TfUOf>~D@1U?Gm%*S*!wRuCm<Jm-1f$y{wA6g?zFMMw8H_Y)PWF-~7!jg5P|O{#Ci z&N_$kgy8|A%^MVTfqfi}(jT@rhS%DAnBWXK@H0`4HwNVaVoB#Rp6_yv%#_F2Px1F( zRn#1jwUrpM4>nTR6?jtnaBbbQPGM#apBz;jP19Hu;2Zlh)eS5#n6(k>0!}Ax`0@$d zP{HEnvJ0Pgl(=X8%+gpCx!UqGFYem0HJTj zRBOxB;IoOnDuU0=Yv=Zku{Bje6lITmbz3mT;p_^oq0bBZ@|dwEia*MPz1T%iW|=67 zdge7M3!6U*!q=H3uJ#U?&iL1YAgc&PvJSFQmpjmD4?F7+Tr$caoHSc4FMI`3QXV%b z(C(#)z#pI~rWQ9R{ly4%Xe=1gL`n~2t=iI0a^)J*(77`(k@JslpoBL23|}-hV~D-4 zFn!)w+^{vPdY0y3Ms$NS)w6wm5Jp!)Ro@wSeTQgvWWqv-M+r*AvD8Z2O9g5{j)|Afh#ylq#0d8_cA0%>s>{sP{_!4~x6b3e?NS^&{ofq!UBI zPt2v@uhW6Ba=FTDaX1eOvzQ>I5Cip+Aw1}!jj-9}{E*$?hxrBt7SgA^$B3-^C1%eW z)Z-2AkI*cJKv>jK>R|AAmOf}&BCcTx3S}lb(iOc=@LW5+jsT%hiA3HL4 z>YGXWT3P>8d@8Hn*#^H(+2L2g`!Jth=%b^?31oq?*xmDa0?&9RF+_TxeLqV;%VsLQ;%@1VO~UJG{c_!STcsY1cn4DX*B`{0wpP*i zl@T}l%dCo*IKNhQoeHePSzVa;eM_hp1q5x#z#~a6xDbX3o!M)232e5M58z zEIMM@4p)Ph7uV`xyo)35V{>nuUR0=|3vzmkFu$G@^0L zqn%`vGf`(-fqSuPS_lcyY#wY7#d&Hw zENuR$ee1&&7+eP7%(Ra9MVSp^qnU>P?FV>dxP<@6wi(@vhM(A+rc>YgQJ(r(esjB} z8o;3EM=Eo~2WFGhNTLEc@>88M8G^6h^dVZTTMVj54t{oFIO3$jE8^z7k)=M=P>G=2 zULd0Z2~e43Fihw>y*phRxKHjU$-j+Qc=of%cx)Y{ASgcPvS3$Wk|)GGdq;REU(ph=19k}H>qC44Dc55c0_Ya4xsmaDn81f zYYnrR3b%a|Ikr9rpNLNDW3g+4sQOK1fx~aWwBcIDboA29#4EFtcT#x^AHHGYY_mu~ zLk^O*VEmi^v``u^Z5qB3ez*@4uZ5Swg1+5J-UIw5zFZU=prF@Dhr2vF4a7<_#*^T>hYrjaIr&4|%(nKg{j8c( zHEO%R5D{XEu0HblDv`>V>GB_EDYPvsKO9i}$r?eP;u}-(`9}0+{p{WO?(2K*=AQQm z-+pHyOz|H}p}C9&F}rQFx-eGiYVtV@F!D=e=E+NJDf1LA*NAnDQH4I(11Ua`Kv&qc z4zo8GA>>2GCYrPBY5y#vhmopD6szxz5nc+(ZZ3EVizEBLt?kAvi3x0cyY;+k+B9|8 zo4LH+eU-l&dZ!%uNO${0-}yWVd77;)1gHGhBYjy>;E)DgaofXP`~1{7^kW$b;l53z z$IOlAbx^dg7C-8kw7!8uj4Hg}5HhbE*i~K*>HfV0?TFTHvO76QX+DstOl(fCRBK`_ z)cA2qHew$``QNs8Vzm%5HILSNnb2vn;mUg11eGmDFF$-3l7CqI1RdT~!B{>8TH*)O zC+;WJ&3u4FTjW89PMGs9L6GniKD`P0!tSVBf;C+OYU1)HZjt{r3a<3214EAH0$rVo%KuLY=ob_dpMdYzizZhri`$*P4N<1% z7B73qPe-JxK>BLI4_GCRBnY!8o;W6{)BlX8fj=-<;DBE%vF*47{5P%@nE;&1dAUU9 zs!5fqCcVWD&bj~@TYdz{VClf=1}#^{zluy_VN^|wGFxdC7rTt|4iC#G4%vzQr0TXYmWFxnj}NjfO3ktmB>(j|7z=2Om%5mBBYkiC?WR9<-Bt|^$G6u# z{W?Eyo%$BF#tdL?Yd}7E%vRFXD$nLz@ck`A)Ia0OmWp@OKfSC}B_x$VsrVrDCWwI# z_I{oK;5bpOb9j4`DD0H)2}{e{&RB)5XCb0gE&|~gX4QFG(A-U$wi}sqbFZ=hNr&~R zxw1VRaG*F=Ri>3=N*qmcj5%3vL;c?l51^Gx-9zrw^8ei3zTE9T`C5Jd`q(pZz~bVK z2dY{YEivZ&ILmv61Nb9`+MY{2>XdBz$jR(;IY&n9Qcq2j*a%}u@BiE0uEu7`=Pa|$ z5Q&fg+>2AkMKBHXUs`G;$XoI>DeW1!xejPI5iUE!{a3QaY09i)f>X5 zBm!9=NAB*VafR)2qH9=}eXM(z59$Z8i)inKX#&;x2inJ#40m~$oAl}Aaeb{KK|N@T z^^GZHjUdIE%N{EO4h%fV87oq6TrBQ6UFJf9di_EBr5 z_)jOg34wwv7**)I>9cX)$B{w#`(RnM&!^`e7{VJJ1mcj>eLhEfxvuU?y$iq8 zA6pY7wQXhcuYroD8;Ig;cj4V}iyfpwO)ZrFc%EXF5B?L~o|{8IPN9X|-n4V}eB9jX zSb3*4!vEP`1Z!MA1cFh6`zBc*FIrgb_@j%IPAHIf?#OgA;WKABPosQyt5JhH}-bmeB=n*msdsd`Af z&jM9Py9|wVCwmN90ym*N&!^)rFR5A>=sVFU+YC`xg_)Pk^oN*v1wJDX+TUhFa%+;R z6mwZ9?c6H=_+~mK`y#-DijcaHtL&|Y^U16L{|T0B&=@XU!`HrVgxQ}lIoqGD?$F4I z72S|!T02c#Q#YRq+rIB9qpG*>{gNexXxp3{Ht0u|?g(~~S5=s&EWbH%I#j|^R)elo z|9K!APeX$A1j@NJk>{%z6%vn2P=2J{%Sd4OQv`N{+3e_+YxPLm>XXK=-?N(#z(7_B z(ec3Uc7Scqze^#~Xs)<}VoZ(;bVbDSaSxl=8nsi`+h zXA|fa!ip%cH zLhTJh?V-pN5-IXSR4HFQ)tqjBm>c1+AUr!SijaT)GpE3p;~?4MRYyKE&r9ShBxg;UfkiD=I{lqf+KgHr?p(yGHlVChyRqCktt(ft5;+9HYq zcPMP-Y!-Pq9cLiUyG`urJ+lG$AUTW2RhaioD|;_qoGd>*xp{V&DiO2cE61W#WP)A_ z{f>tZx)U1E)Isg-+fuO&2pWKFwCQC*O#QMX#F7_T{(o}=5rb0U=3mQM#60e<4N+>( za>;!z8^75pzjH6W1BH7pK2KhU^goeyyWa6UKQAL>^gn*tf?ChW^IWu?_YBht1l~BT z8-tueCvKEc=7V5zrCjnnL=Fv|IwZ@V$&S`p;`yMLf#bVGp&~V_7f<*_%!Gr*w>Dww zl$i(C$b5pI%}dT7f6`WKKEh~P(H$qsT)k64oAVa#?;jk#Z>UHj9$&Ekcx|+uhOsQvu4~kV&ATI7eUEFqeB=N6`-p13LvsiR zTTF*ssUU{r*GWCsSpq0Il#0(XCv^es0WqXhH+g()Ws%{7I;d>IH;Sbi&6LTSGEPR_ zcpEF^5Lq(%oIPIa$QiNP39|96tYDF>Fi5k=3TE6GnC1VD&VKpSwj8+*r27S=PP$Bg z(O1n31{e5oz1YwUk=G7H3)mZYlBt(EzE?1VhuCl-Ly`YSOIca0foYu0 z2V(~>A_0_}{lj7~J+Esp;Qr1zT%otGUE$F62VE23_giTM=a?Bb67|?86jP0J0!+#) zlmhVb@dO2GtM%;MXh3!=$V=VvGHiSX`;ISmHfoCf9SL$A5o6r! zV387%CsR;d=xyS&3fbpQC3%2}EV6@yRBfQs-nP%cB~lB{u2|FLM>qiKR1Bkzgd+JE zyfy9AaqWv~`@yKSbp4S*XX-B8X+`$IAlU>y*{qj^;cthnWzjA|ox1^@yi18_<*EZ) z&o0j{64h*=)wPiPDxz7VV(~4JRw_Q$-wefBH^qY6dRs0~c@9(Hyi5IZ@Ok9l8G}g| zm0s=J=H9+bG8j&U_^2GS?=0hJiA!7xt6!`il;$|VSGi_2zc|6qen`HTBD5N; zhh&dl;A&2!jtpYvAvY0%zz#=DcaU15^_AGA#tzB_2GHy}T>dbg1CnKJAO=h2BGh~? zJ;iwp#$Q%mH&Axy4-7#(4M5>P2Iq9^mkf{1U9G}(dE##EH`4Nj%)cE-kbhXdT!g%j z$u)lTG`-aB?ZWIZ^;Ab(?KxdsGHusz3-xqqKY&GZeKajy0WmvGHeNC!+)wS8AizFe zihaXpd>bu0=O@xZz+ClLSj`f$1E?sj>JVB8&Fnr!ek-fcB;EigxtuFe+KichVz7r~2)AnJ-;7}cw&FtsL_mj?Q^t14W zq>$v`6oneA^s^Kr0ClT$n^3eA3{$DRw$sQ|+hl%RVkRYi{1SgPB^^_*qJ=jvqSl+i zUpPW-Be`Eb*vVou?-V-fXL&>ok4P?}6XIPQK_Sat6}IS>?C=;E5M=8Dqau$0j4UN3 zc5^5ux(j7(CIXDE^D{mprJ$yAMC6726m(E8()fBTkqY-(7ANpQH`B=NLKFNajc@_TzlWAhc~GE_j5KPh{m-sD1v4EQ^Diq?iIgj` zh<*{genL~ZSxdzDK_3ZU51|yj^q? z6PQ86t^GRC-pRszgBDxsyUj_9!d=+0VuLOv0B0ZqTpS~$=& zW=mq^^Esjg+p?e;_=9vq*p3Y(5S=01Zvj6%uE0;**LcOlnh$u zE3TccBTk=G@A1xHd?R)tn_4-WvO&r>j;Ax;b?47prk_stWChIn%wlCQI}Zhxuy@3W zhgxx;AbDfU_ieVJzqFFXd7JBlMc$tz{GS)EA0hu)#>E~H;@~0*`ZL- z2;vWR@O3auSh6@;Qmj4J%ewY}c^P8Wdhm$;nOz53kZTMFC&__)_!6f^9?92}ZLV zg7K2Joh3tnrLq86QNQ|O9wF2jc?ROHW{}JM#XeyOjRk(ZRZ@Vkz*R1VS;du$;HR)( z%{&w<#=3Ge`IcpNR8qiTK!)G2u%v~~qsL&+=>p09FrrA3Yul~d3kFFC`&f?dXwKVa zAUGBmTq=vAR=2+u-#C{MJ-ggVAd8!WF2kVK3kSWS3?;sJ_f^L;#-hfX?cR$e+i;Ks z++PJK6d%2@5?-N!YgdR z0#$V$9}iZaPGeY=B++&3#jE&TI^*qxOGq%RymQt2_APIwv3`m*ya)nrbSDc&XrV2$ zz?3gM@s-IBEh0a8%9X3m?@huD@)G%$p-AJ z9u++51*b?L{kX~$DV0K8N6Et(UiX7>J1Vu36l|S}hjOJ#n7O`pzmf{JMd1b)x}^tW zRlj;L(OX+t3f|^WHgyM4Ic^hq*Ce3Tlhor1IgXMY7<`zu)F2gAqRSa?l;x{zV!n9k zE`h;c!6?g37$r-eqW=;hJpKzW)cNjA4qwm}6}nB(bIUXIoBr{Gi#Q)U%#5E1{*7FU z@g@Ib0q6&wBydS>VOU2F@?4Xl;r&zsQ@2QW;S{1@2a^e;qRC}kC5DSvFUFGeXjlax z(z=Hw&06l+;$rSKU1fQnf!3VNDFQ^Xr+}HbnPO|CVe6Gd zhlm-iN%0&!d^K2VQKJC7y%?q0Hv3T00s5NtU#9V;>zy?Nye|k#Z;xbAc{adW8=Ouq zh=O270=6vuA%U4OiSl$9dk`2AK>q`gN_{C!As_e5C+#SbhdJ`@6@o5(Ze9KN%VK+b za0sa0wwMTS!|g>)QmPUfh*8e=Wmn8D5Uu2Tj>hBBPXrz7mNRm)0=LwVUx_+*uMae5 zYV7#z;Xbm`57|xajZUrt8Er{QG`YZT%)XS($&WkD&6r~0r!O6TNSR@71xMvE$-_yeJ0 zCnkIJbc#XGpzK7|P%tzR?IBi_3KD_JB(&c+wErRR@CGwd86xciqxyL`pK9r_Li|X{ zN8YQ9_Ge$1*sbezY~Njw>W4Q3AtGi8f)-22QV=UqymnYh4z^tfH%Y4t&avv&+s0t5 zt
q-QU_CY60r8-eFuyCMGS)x}DHyTI7Vg+UwkcEdi*gP8m|g4R}29Y_xFa~G2< zy`a+ORs@by_dEG{V9#zvWO|NpWcKGQj_y(yi0io z5cRFs*Gs9A9vh|iAke+<_es>To0vQ@s0UMRGn4dwQYolUBj0=K65klueUrqb4>2xri z3`syFj^Cu3Qi(4}^-uS|`1=mK`OeLpu-koLK75YZlaQfPLJhXxNn9Fcq(jAiI0q%l zyl&0|@A8@;L*9&6Im`Ve({=@NF-(#O*bq@_cEPf*A=7vGrt}J9Vh{@_8V_N1`9B}zQxLuhH;#K+&wPS z+1wL^u6y%zLZ9yaH;8~se<wRPE6hk?jJ}ZDGrQvs+b;2|MS$W5fz+g7fm-TwkI1 z5IqKRk3{B(KCUndEuOvvkmDQx56#YUTL810#^J*Wj z379ra7TO7w7-`lN2M{*Lw5n$xmyk4!rpFl7385!#Zkj_u+ukBQut)5G0e!58VxE~m zGFZKVKDRkP9JY(3NaC`FtS3wU3)rfQYZ^=1pV4Z-Qt)mtdqD!?u{sRzh7i9^JQS-A zM#A87b&xpR+HdL=6aGqW)D>E+*;=FPYAG)D;^R!9`P80G{rYw=2YeA(sce4g8pu&^ ztH$N)AB*OCVo01|?!%=S!X7^kAdPBcFSv0*>w;u8RkJL07N`MBRUmbFhO303Mz=`A zpwnR(pyqs>`+zd;q67Xmt;7GNGxz;a?u&jGp2QLLc=ov&jqXOjqu=8-l&DmJ^b4L*D6qp;lbLBscMqD!sLP_o5PoMYW0z`dPj8gVV?d{^4AuhUm7)= zwm!a?uCCPO5P4`Dl;b~cO|Nj~cCw+D4qiO)8WloVs6&EV~cTDcSvGi$Td2m(hIB9Wpz96&?}i44u|{-xFK zL`{?~is1Z3qSHrKfQWO#!u_}Z049x-aTJpWhn2VIUbjeI(tt#grqDY2!I%3eez?KCbh!boo+a2#j>;X#HYvp1>iNT*4Kh&dEfYg6C={n` z#BU>zLsbcm_(ATX(Stmj()Gs-=~E6aTNdmciGQdPL74}dOWGfqF>cgPu>+7W?Kug3 z@ek_Qng;QVr)&E`U;RMHh+BVvW$)sX(_yRf_N3%RmDj5&lXr;Wu0bH^pj7^=iqBLp z1h0U{mWW~f7%|fuJk*lEGO29Sf7|&5 z&IE#!SUaV%64yGLE`R2p^t7}zbKG@u2eyuuzK#L*1!3fOAr%0FUu)Zx%wY{yoSApJ z-WrgfX{$FWVKW7Oq1xMG5Y1cF=S{Kbdl3jyei3tWpnmm?SdU zu0#>vvtE`3f$g|Dh=g;y?$Z$EwtZ_s|Lr0_u&G+9`oX4cw?}uomLF{1ZuMzYvlLAd7mK**LOD zOJk8bTooovoW6s28_m1N`jEMgh|U-{7$Z>=By++}QakE|t@{tyM^<|mffB(~yLhkp zRqPlue+e%GK(SY@2q)Y()5wz>p)c6@S;$ohqK~WzSck{IVeUQy4+!FULjUCJPYKA& z9OdBpXG4pXe9B@P4@0SODo->xG;vaY{auvv3n~f3jaVH+MI$rxT<7=%jWmTAd4W|^ zJX5ZrHbOB&UnGqFg0a~qoohe<2g^ocPSWfPSm-yqo#T zg5@Te|1y_Uqrs4a^YPq_u=K2sa!&WJG5X z+OPWPmG}6lZkbp~71wjU3`*%mI*F(v7I~UhL-@K6U1kv6$c|oe+7`UMlYS1f)7V`d zknR7^#DdOVrhwg10$5us)M=(XttlHZvB-|GAkLrJh%0_vzyQbqtun@!cHw_oSVYHE z&`w54QygKkSCKWrZmHtj=tdv!dR|X3`%GCCLVt(nLdK+$pYny{9ApiNM9N+v#3v_6 z5RW{CJtaC$3m^3-%GdBiu(<51Gst-G%V_ri~0y z6urIS19EqXK;VN|s4R*Um-+SHexpD%=_H&jgn)EUlZnJt7Du8Zk|D1C zLkAh9k38KazAN)&9b?#v=>eOIGb~0U>bB5K?`){2JU!iBrwGEP5QyE6WuEyAwIyvSva;xyAoxZonmq)_g!*H$1DkNqq}hGx^%?c zZ;XVOb`Ey4RfEEkwW_8#?!idhYE50>EQ`_!?b;COdGAEMWS&edh&r2hEKTaI%*D2D z1f?JolZ()=zmC(ZKiV8%0SAxy0SWqnf%&+Os2PVt0dmJzd42~lvy?qO`cgvd;I^0S zM_ef*Y_uDv=K;DW^#zd|Pp{wPAuZ3`6veEUM92z@P42G`K1RjheizN6kLNRfR<#3E zu3fd@v%(k{A~1mDCr~|E8Z%)utFy^8G*)S_LEB5i-KlpVOWl>I}*C2O;vvShrarwV}5?h z)_xuOS^&-6PJd)OwvnFye9A_{9wd$sx|h*=bDlzWFRFUJs?N&KzR?L&qXm5|RulW3 zS1qz2#3~?8$;e0;?tRZ%sdzwP`TX8Vb?n90^wz)*|Q&U2bv5 zdSJ>YOQZcGzL6mQYnsL4s{6@J5|3MjFHU8z1MWbU!qI{-HMp1Kx_v-I#89a4II(m{ zGT<|d{;esZxCWdmn&gC&O>sR<7ykGKD3CP+qd)>na4 ze>D+S$UpwjRdyI9bhNf9dO0tHLAQMOVFHMeX*02!t0F#}WGqY{SyBL`_TWuQ)dhc! zxeckCX*2ERj*`E4knXY8ait2>(nHl0gI{;i+ua}Ef|n6~_8OzCU(7e- z``{q8I6VH*Vh|r0Vin=Q5^KJ2OoNzT7p#icfX<+y^j8=w8 zi&`+J@4ep-dCW8sADMvg4=+0@CR4HtQzm=&?4~={tgsm;qdS;#!-TAv+0~lLvkwsO zBB#%PSbw|kaMM=$jMePuJu{#m8{Q)z17U#vv|R$@1I_RUOz&CzFf_=7)Yn^_*!xV`B}{)(6BaE;g42T+co7P?SP z8V+3L=~Mish(!<9@`I0EVhJQD_@i2bDLv@{RZh~~%nm1a+ORaYGKS)h9)UdeFdoh1rfw~mxD4MoasTpZ6qx6~%_`9o9+G$t0hEhU!@3rmvxB*+; z5%+wUlxg3bU#NjyF5k+7_9FLlE16lxpFP3&I%PdZWeAorGFR0lqzV|Liuy!-FMOt>ut{OAE_j^zH#0z(X3PQ1utiYos(mG^ezT3zo}E zcs5T@CiK)wDx8Ase$A7HCDj9tY1$wh(JBE2FNdK@Wvt(Y zYG%!B3~J8WL;4=J-CrLWfRjcws*wAu9!SamM>jEud*ws)00p|AEx+kx3|+w%4W&Y1 zCi#B2{B&%hb#%9Vo$et6N5R37j)}oFt`*9kWWh8?A4>IX;KF;s#GH>yu>Q`Kq12l1 zdK9Q4F|UeJ_7N_g6ZrbO6F0!@1+CkLql9y<)@LdPWJBBqWpWdRVTmQHW8<^ zT=}f{1#=#iSQq%62+9G)VTcj)~-a|1I= z@I}rHdS^WxXUQb6-6Ozl!&znp3L${D)f^3wGS)Hvs7+F~yE8CD2IcJ7J~2Wq*3-P) zBYE8h$nsSH{3>&ElEEFxkbbV=%t%*~uvf6sh0K$eEXiyZK#Or3~)vIVZ{27VKdf80b@5Gp+8(cZmpqfUcM(sxDUNBxv@$z5C76 zRhG=`>;<D8)6&yHxo z*{v8zMQT0tqo6;FI-mUU)) zRx513vP0uMS6LYq{H>DZX$+7wY;+$MA<7fQ`pYay^fTD+x`zx|NufyVBLXn9l7T*x zf~sGr3zPr=lD<@H6;@$gQtfA7ar`?u6)0fUdH4SFQ79Dq;LI#hyZ__ms|u1QcGVfl z)c2#v#Cz5}@46kZuh!Nb--~zimNasNp6_xLzI?QIQW{b@%>P(dvv1x038Hp_1l&tp`zQ*mrpqm{14jaHJ-7oeG!P_ zk)+&hbV0Rk9lxz=YkR%VjBmAs3oN%_42GD-D*zBsa{Pol2~#w_v;B-N%qMW#jF4V~H49 z9hFQkmKRm3MLxqEx(O7PP~?4;t3kGBa*zk#_A{dv6KX?1RUTe_?G=C-y_ly=&n1jp z7(3p~Mo~Xvk7Mc?5^|i_Kl(<Ow7XV5U#6ih`ha9NtYNJgZ>eN2L&@Nf7Z&}kTM2yrM+&UioX&bU ztCbcrF@Q&m@Wt0ZddT`j5FLqOrF?-JZ!qT)_ckMdAmGBYSFE*cA=P1RXdLhX_GE;~ zW#Tj|>yPGnOf#T+>q&cCLGlx{NPI)JV^Cs3>P?^QBP&S|Lt*&N>urcf7ZbqGiin z$dQ|4{{9*N(-0ubS=e$Qim_gRV*YLSw{FpTv7vYW*st@AdizBY?&NJTXWLwB!e7_a zCbdPB(bqq=J;PyuRy6R}zQ26*#ri-8~D|^7=L-@eR`t5;awZ6sq}@1+J6tske0jSAA&YhCh=Cb27UpdL{9#=*?jEmv~crJV<#kFs+WaHA*X zIef36K2#T)R$cz)bX6FtB%a`O50l~9ob z^^qW6Z0Im)l!_QHP|7(M)&Fmz zs6t@;%sS-75s>r|Jkzk+N0~*11v0;IY}vSb=KB^6fsnu9n62%cQ+_Ttpyu_lK4~2e zJ;H&`^sIH0kq~-QM(Ei5>UTEZXZwR}AD#`YzCHIhU9TB+_tB5G#?tldXzC@$FOZyE%vV^= zWbZbOvP$7ZzluQpItCr4`uevur&>e9QRB39K*0)F!<}!aFhV-wDsI9s7 zT58L937#xW9}VipBYg4vh6Cyut+V*hqcT-C==AzB^gZcO`x*a$gO}3K=+(zYe_pHA zqk=n8d9J>G+oQPKt6p-KD&lX(OK`{K-mX(87u6J-3(eP^ZnAGB)_TQi@nHmHV8Zf| zys%<-Ygaz~tt()9Tfw+&XsKt^} z_R}g}2tJdSrdM1oU6jSditT$r2huO?V-0mR)KWsPjz6-@4pcn>^$6nKzMIGY!bEKb z!iGm7$7OEnZ^;Axo^sh9V=@w7v6oPNvQ-U;abi%2J*CQYfX*6=i_iO#>$hKnjb(fH zb3Lvb;*S2;6raJAAHhh)kzp&BAz%s>?suY_MbTiq3cp=2Ci3DG|Y@WS|K@T8NdXFdq;uu;c12GhU}kc%sat z2V!YnZLTK>2k3-Ba^itLN*sxBKq}-LV+^8@Wo_shXFl z*ac84d($^SZH<}X5(AEN*^d+lnnE^*Z?N)QFD*Lc5bC24Ic>tytKC9umVt^lk=IhI zSuZ;h28Zc7Pot5R++BZs;=9LkqF?ikqcCI%kkCm#^D(PvED4R`W{9{Y za@)=ljUjHbxv!==C0^f%1JvFikiZBI+GzW_osRHuhtkHtl`>APA8Bj*A<2J6R!sAB zY3lAm0*(Sba3A!5L1mxFpy|?7G=hq@@(c&1`lEqiK3|x~E~5$Od7&w>50>vs=xTg` z$}3R_P+90@M8o(M_dmq^=sCsvRE%K;?75%?Vla>EyS%cs zR8Ysl{1uoiWxU8=^yqq=i`&^taS*|^bLj5l?XvkGD*2fduzAN6gW`*Rltr>2PUe#7 z6`kxGxC;F6^>uH?>-KLX`4|or*_@Sk4nKJi3Js;iyjm1WEboCy4IveAilF~6OZGEv zKzG)dv)C{vjZmpk(y?FC`0KZffX>jmN#(HjK1g}`g>w$>R7a+%;tdmO+Wy9;_$7IS zmy^tyci%oId$=*v-KYsoFYBIxWT0Bt@gzqaT9G>p=uZBv+)_habsOhY@&aIhyeqg& zccE`;H%{tyxEi2RVL(kV6ccnn!2{uqKQ)6sa0x(JYUdtlJf7%(_qruuc-b1t<{!lc znc!r)doGc&0^gc$-(J_qm{en5Qpr!M2XU()et`};_y6ym^-dF61bPP$vrPOGpHT0L}( zi0&AC*uuz{uOFWd8a))1x8qLk+ruc#&Nf@_8?y<1*5D&CWQK2p-L+Mp0D(pU=jMpYWT|3+OPk zbER=k4+DY7^KQt}@X|2c^4_HeqDOM$vB&bX$MOAs?Uj?(eJuy9ipV%e?BFs|w^nN> zdmF9@`c!rLr&1IJ`u?tnlH*nwIi*o)xcK}!VGn!MVYcXz(`^56l*84Ju$@cd;_(ld z1x{dH^r?O&mYJ&l4}J54mKrPx8<`_4T}6d!ad-OhcL*;r?LKMaev9tNgH#lJK-hyZI%F2uWs^}npK8p5Q|rLq-d%|$~$Rgu`SYrJJM>X zyD0;?7u_$(ZI!*Lg=+&XYVp`aV#`a6v!c=(5EQ?@LKChv8g}@s)aRQ~JeD=F0skIr zhUp@{jq6RLIr*sX)2I0lOgRD*fN;yg8p0mvbiPZ2AX9JrDkm_lpH?ZK@=}CNwJ;1l_GgSPBXm3Ph+XlD^74Gr4czbXQOBsIf-h~dlDXpX zwAXo9a^_IirN$5sMYdb;+Ci{fZ_)K4~kH(_Pd zDQyXwJ3e6GA+e%+UCnIpM)1j)-1$&wj+f`M*-9um`?WWkfK@Tp*<9-GZqYl`!HuSJ ze`p;Mh%BBNo9tV*n=fpmquZ{BN)Gr@?THD;SQ@k;6+0YrD3#1Ni)`J$%UOx)WWacY zKMIO2Mz5DV1}2t{T-yYQcTm|JN`ntCk!U{5I9r59XN)rDj8&Zs&Y|WZ*4yc z^}!rSl(G-Md_^gBc{m_lLffVov^1c49h0AEnk^zz+(6ZN{uvfcA{%a9gxW)ZX8B@8 zZSD8w_gmyH-r3MQ7VPPU#5f+Oz&p+;YK3f^<=>Kefp_3O%yk(FUZ+0AO|L_nKFr7S zNNROYg^-K8+FZ%e^NKBIk$XO?_|vBfGsy&w5VP^@Jg@Os4yAjA=6Ogk-)}1WJg3sB zcAET48$5;I(&dp^DGA1z4h-X_sD+`Q_zPKJAkvO2%Q{1R*_Bsr)%;&VP`S zFHFVjB<(6jZRHFCd2t|LdkMD{FC){|htf`qd9E;Q_DAK557305f;Ly)UYL4_)iuPSjl&&2{2nSjq zHLU<`@tZ9qVxj@F3+pT`%98#dW5luWip*_OLS{_mhR=5oeoYu(G2DpPx7O`s z&IPWc;Voa{&20LB>=M}I0y||i5fJ7e+;3!2zlvAqu7zE%Q;+HLEzBMU4gBA#eY@Wp zUT+$qPp?yGFoqdvUg)a5HT&+ohrh)i8`&Xmn-d>u*ltcYn_s7Jb=%WVGtF60CsI7$ zuVj2J8>iW!FmomZCN8UI6Z4Q0M;K-!BqJ~d^ZtmujHe@XcFAD~7do@o-&MaG`Gm5; zOMSW^Gp%BJNi^s?efD%Dm7jI&niKNux}BOht0qxpHWS>4N)(@*LN*h5Dl*bJW22=+ z%MHnUXLZ3`AhgmjvKA({~<4q0wv<-oY(-%MaJ zji^Drbm(>Y+#;Xiw^(O*Og4~)lQSBH>GAt*=3~#x4sZQrKCPVkr{!x^j&rv1saKs1 zwqVxb{LQabC~&o1-`9+DYSo4elGWtwz1TeJ5vbi!D`NUHeYv(DV*8AqbC&MQmrCEb@?~bKGa+Hru>(W!PA(h6G+XFU3 z7?&ib%-y8UdXDQa*khYSewc;gV&Rm4RJD-6SmEJffcO#Nf zE{m0YzzTaLjt)d+9is1WTLGCZCd_f2}lX2<4O&{Fn2=3S~wZ@1%+cB@G0+Xq-0lPb$U2CxYZ%O86C05m3<$C&ZdF000m-BRiR4$t_ zk6&W@nCNv5U2|6M@2C?xW2{9uMda|zpDeCN^j9g1!o9`k(_n9o5HXss;0=T$d&|tp zkgOWMOq}C0`c>hrVp(&{vPbi~b4pn>^N;GkZj<^!1tg7{N7-dlr)2}i681=aGojKq zC!Q=!afaxmTeLcHMP3}V`01LFdRvZDGtyJdz{C(chBHmmhaJJP(}meCF0K$+TSv{) z|H4mypa`a%RxbE#Fpygceq+NNM>bCc*Vr1a8dSkCa$ZThUSZ%DoaX(Ma9-M9FxgJ+ z-W<_p)kDRVhy%eoIEoM&pNq=L<29SW%vVZ_5LT)QsdbMXG(!Hw*MBP`&-%-b!k5{| zbbn#Acc+gxBW1fruj8=p)({&;FEbz^^O>a%hCbE;FY6;1pSnB9e~*)fq2jbu)}OO; z@Qh~>cMwCmVNDu;9iU^R;+^ZQM%PKVNt05@;9G^GrAO6~c!ng46hAM-OJthpmxeYj zz)PE#7zOTPHzto)^OTr}!6f)d;#ZC@{V_e$evi{xf~Vdn?UuD=g8Iyp{0|{#P1qqN z|I9H{P>*NJw7*8O;m4N8H7@O9si#z@cdKw!L-3PG@8s!M;@%8S5FW6sH4?L^avKA^ zLm+Q|L(gXvdt09&Jdn6_Bp#6V8cEhTu8FJO6)y#u+aG_k5&#!?47n!;-RAgP$$8;D`fGWS*w>O-DU>=`Z- zU4~>}p6s`c!n3$nNRFo!nWa?IlFV(v#U%Ys@((pM9hzenFzT*q!`wO+DigJ|W0t~o zMn;eoQXSXtipt#T2y>Xf%cb8RL>rrb}$mE zR@h!)5Z@+Rjxw%f+#VdR%P=kGOs_MkC(n=NDi9o=81Z-WS zhRuU`H*^Dcjg;NKV|?gW!-`>XL}V*32y{|1?28u6iqxvIJ!8expRyR}6H)+%lb&we zs;cj7B40?DF?O$=EDyKZ#q3+X^oKv?)>oZBM(REPwoE7=XT8Jy^L+w(L`IpD^r*9U z^}T8Q+e#`BKA5JL(1xZf;tInqvY22f`i;tNe0=536oY|t*T3X^B+m4^4M|15J=e+7 z_L!3+pQ-Jq#`6$?S;hMB@68wNx>YFD|FrD+BPcL{ef?#f|NOU||K}(EbKUc$RYHF+ z{r6jKRzA;h{O{*NXy51k#NG)$joqi%?)u1(Hd))9`4|xYDO~XHjcsX}%%b!_ypGX2 zm5vH0k7{~ZPhrVnHy1(gE}9bGu_bxgF!a;TT^=WEB>@|QH)1}vZ4!O++!Zkt$*><0|@?LMF{v+r_Vm_=)Ux zA5Z9d!yEI~vW7*qqhq2&5ynJG4RJb(_&sL@5U;33ckkYZCzm7xY@+RPIh6dR8RLo&Pln8 zHr=7xHI9FA!-qbb)Yf||(*v>u(|z#Q^2LvbPu_tcVwP?#(T7nkpJ#8WEjTGkcfYxS z_0N^$lg2{eRF+7LwkaKLDUmfy&Szi({{5+|X(NKgY@9iAep?^%Q}Wn;AY0fShBOtZFc>Hc_wmlCv6Sn!2Z8IyeYYopgqtQ3dH ztVT$RcatPv&%OrbP$8tSOXg;1ueM?sPUM#JwHK#QU~NF{?8e_PL4%=<;QZ<-j_;*; zrgFC|8l=#EM~F`8T*hDe-8&9ykT2n&vzjv|azoSI!_z%K{Ii(ylX${pP^2S0Wy4*q ziHs?3d~t8*v;pSka)QK?HYl~`mB5$uI1}hI2v_Igu!aq?Gv@;`#{9(4A^Yfj zvq(OlZ9QvjhcECEKl?r0nLvXjFkei`X(`pwDHaqjAgc;ZtX>>4WIF4Y<-c^6?VcP( zei?J$F%&nuU|Sp$k(0doXXw|M#Z^S>p&315y`8<*byTE#pY@X1*932nxnVDN8|RQ9 zPkmRG^3~V$RO+tjm^7N9J@DS4`5Z4zWR0$pKwu2!h(cZP;g87*qCVMTHP|2Ap)3={ z#Re{-l=G%rvm4^>pn=rX`Tny&7-tuJG6MD{TzJ<+7OvIJTu%N2>qH47Wp45>tlT9W z(b3iWagU#UB^qf@*__7&6b9=n)WYAP!F0-A{DYy~GVpY!Z5>N*X_i4%j zY@Ma|f}k_qY|~}l^{@~XDpp1H!|6*$3(9T>!TEgOhHPp{U}67ilE(aG#5nKs`@2He zp~p@g`(-c4H`K4&i0>};2d9^=XAtumfFXDU##@4}2|MUsIOv(np>anZ#GR-i%umRn z7~gESWUvq{{C32%N9Xmk$}J}(uZ?32VU@iy80^cA2{PQUDashVGEeG5FpF)D1)Ds9 zjH7Xt^?{wR{Y@ISD)<&Whs6iaZoVjKWp4FuT>y!2Zs2^cQ4bPv#E`{-ZDt~{wNksw z_v=-d9p$Xa^5*63FLhgw5Xx}S#%1}*jQi$in{_2m(6P4sut#1tIEIqrDUOm2yYo4i zo3j(+?Kt_DY{pOb&l?~uiw}?u$`Y>VE4mrE)$aSWc#$(CKhKlYcrQwL%HsJ@uY*`f zp-u{AJ@gLOa8tR9!s!yFczn|KY5W=6#=}n!b*vz%q6`$4?)g5=ayLNe-c29NS`GiQtTZ}2ODf1hn}e$jqgp*&xmojoF+x2lrVx}KY&^$ zP!y6Db^jTmB$5=xES9FP{rq}vyVxLm`3kx9cdxuID|07w9>blM?+~DM7IaU7NMHqs zUJ`H7%1u0;_Lr1R8TjQYe;&rNXx-+Ij9o;OaTQb3DfYT_gl3I$tgjTjk+450D9(Tb zEou=u6OBb$lwx9jnSpSGx;mTL^|74rX@B(Ky!qnx`AT^gA!VkL+?8hUCF9ZgFBSHf z3E44E3T$!l?@7S&~QpEX8QH zRhq~AkZ#}+W9a%j`m2#&X|=(?G@lGf0QYwnL%Kak$19es-Ywq zzbK{1t26aUT`io=PGYi4Lm)UC$eiwDF>kz&zx{aH}`I zFarbo=m5_3_zCX=)OmAP{G-=&C^Gwzqee%~Fa*i-_zI*yroY z340(FS^Ed_Zts6h$5GG{)iU(lpPpi=v@efq9Z`;0JDNV8i?o*3e1Cs{U{O}g5xJTg1GjIb1m((m?~4TxnRdw-t4a+y7Z zdrlgTm$PSOg)f;+jYOnay|v!_{)KfCMKH?5=vWbyf6YHA~l_Rvfs5p>TwzW zu&HuU>^9BWO4{JwWwBkT5G-T+$}UGq%$K?lhjW!VeJ9`OWrA@hAg^9)6M2vnm%eEv zMuvQO=TqnIXR+TPyzUEUmR~mM=yB0_{Q}v@w~A-jaO)8mP_^yi+O4r_E#J@(4y%W> z4MMEpuU1O5$iXYxQ)kq0#i_J23z+7>w2g_iyzS~Km95_hlVa~zZ^lGzK-gdRZb&OD zrv7u1UC2S8%g*F5tOOBmV@T3RD*>5_E!5d=Y)4JIh+8e~uz-n2K{x$&>y^s7e1UlPM;nfa75iR6CAfTJ#J6l-JW{7Cb1sV zm_$fj26@@?5KunyF()?tyO!bjn~bc1j8nc3WE#XP&D-E_7CQ3R-HbPjlsYtCPwR-V z0`)^J+nlcF?ZYcwAtSfyC*z981z*puFTNJpnrA}>Wx+-(;D+SB#(4d~u;M&{owv?o zM5nSkMjb0>$Ault{<98Zp5<>+cBgIn8T)sk_LiMhFr6ki)y_Tnx?2yKqGx|x@)`Xw zFqX1l$k>e8TU8VRd9r=|t9#%jilgE>xO|8^G@c1wEThJja9i46`8XJK{Yy*v zgH*_=N<(T`({)?D+jH({Zjf)hdQ~>5s^}2|BEBQZ*P2?{6gv818s3<-o)%RQY@Kn) zQjtVR_C<-yS?k=jtZTtoL+$ow-i;=zlCLjj6HoY;M}}gT*r@k`_X|zs$0+ZMzsy8( z6WFZRH#l7((v&R}rt%bNZu6pscC76Xa)7s4E?j;#-tx&c6NWYnU2r&u+in8v!;q1w z1>M^{L@0ctrZe2;6y}f(Ia&`#@By3y?%W-__Og32m6jb>RA7k1<}!pU>rY3ofEAEsQ#9d7=-Y`K8{%i( zmO5ALg0D7*!V#P)X})bLghx=3baQLZ06+MFxbBx|bkio5Q((o^?V6q6gEg^C%zfIR zuNIZqadw9v8f^kY@p7y`#65A$EoXd~U!-!+9#5i`#NqL<^>XmcWUcjes?AEYKeeux zJ$mc{o3B{UE=>^m%!<*VHdXx24yq|{p=a~Lrv$ari{$iJxgmnBlUZ;k{cC3i;8TYN zWBDc|LLSRfTYJ%8S*t&_>>@tRJviVCWcKp)ta1*`GQwhJ0flc_CdsW@`GzJH)e7p< zu`^pWl<=D@he%={NaoBV0q|C8ez&~>Is2D!nK~;i<2(v`i_3Vxr&1`^xvs7cw6p5x zWamS$a(=Lx}c8m&_kvwiff%Ltmh}CtMY>D(2@ijrj|8N9~h=Y-7$g`;@LcaXxOr z9T^oCN_LX&U!Fr^l#V@*@LlCbkU zESAer#HR>vg~p6(G=1>C>QG;Qz2k0d7*8Ky_L5;d&eRIJo!ow3?8yKxy~dC$`|@U< zYQzV^`?Ka3G|F_WkwO^E@b)i+x&9>Y3%DsF;wk0juMaV?!@S$8j*_vNbT{ve1ynxk zh1@+tEVW(5P0%ozM>27ul<40*IV zZs7urR&3ymTh8$yn5|t-0eejB-r1@Q)U%Ig{;p3a%n1;dF?^zp?V#c|pJkMyukoOS zWEWns8F%g5B3PGdd(?!66no?Hl$?k>9!9VqUV9(jhUcI;dFZr3(eB~Ghoc6eH~25d zK+#T|OG)O0tVf1}!7D8DMt_!zPtW)%NG<`ymZj7eFkBbIo&ZuDUhIu88R%Qs-27&v z@XI~fQmKf&*Cmg#;M-#aQ1BfBr5h8%Yc|8f+fp}p=IxE^GQJ))^9OwZQozX%JUqNW zwvnkC+3O?9hog0iA@LI8rPC;;-anpJ5-av>Kx##ic54_y9)}PMnnv*1m*rFnPUG~>e~<`k zd5S`ogDOMg7jQh9Nf@FZQ)xF&O6pw^ylpQy5YGSA0q z*xA{MHT~m2`LmDwZb!<^4Ne!qVNOTFfYJN7fPj(q@096ndrxfrzn=X6z+Qly@&_CH z_o8a#=NtVOJY)UtBY-Xc*ZVJU{Quvc$twNT>zIf|uk_k`8O^&LJSg1qW4$TPW8Kq( zVwn=d|GM4lw!yYc6+f-J9ae-Zoejh56uNG&%cq9n0-L4QWv^pT&pJM5?@SDs)pEv_ zaz;JV^jH7W{H|cCl~6O=?u#GMh8vRRoV~y!Ng-LRb)=GVo*oj_u=N+CCXwt{7*Aty z0q0l8gXBh0s838r7SvK}vl#6ST$lhi8QxeS8SM^3@;d$Xe_CF1i7R~j=t9es8>NR= z-UNoDoEpx$u2po;Cz=Aq7jTaEXS5+hpgFzL0?vgiY=a$^pp<-Gvj!(B-CjU}rWYefR=~g`yXH zyyJ0paV!flPL?X_xNb*6IQgaWGKwIn!@Ee^Mg@ROOaRTVAj=&7dR? zhXz%i31eq}GzjSXP3k%>F6>*T}sDbAT!7EdZxEfCd`&b~kgzY-Yi`dGOHC z{KY}!@|2RObTvvi{%5=ap$Z?@&{qP|Nq_7U+jdCNEPDX$C%6b!PX-P|VK_~Ik~3(+ zi67BXWf~U;s>pGL7#-TmC4#!MXeR4k42_Sf4WDh;zt3G0&K4y|^MRBdAHKvmI0MzY^B!mHt>a z5^Dx^Ta(i@T@dNPm}tt2lS7;DWk?p;5GmApV1vc^c6(*3O`y?U=KHmV6g+?o@f)?2!Lh6MmpW_7QYKod* z@A@TQ7HnK`x4|1SI2l1G4>)5iTU{0JHyulFRs?W>ts&FPB z{zwDz?YJaNut5L50~eld<9{bWTMGgq)`>?Ky|gWVdw=ZFpMDa5DJ3E| zb+4KCzrU(U_0%at5H@{ONN6HP0T(gHBKe^l^55sPqLIFDP7OHId59b%dXfYwLxNJy zdfszmP)&9)=hFXgp#ImXu48Z!8uSNr4$x&i2Hk9jt%52rec0}wZ(DHAcs$UE4ngA1 zV?gWYjD7)+TaIH9M{73CUZ`f}$BCxs|017g*6M7&Gx1OS3BV%{s+7Ked%g)1yy;Cl z<-?@R((1N9Xv3v(y5{N@jngmnxSjE4dl~XWwD11*w7Cg1s{fzCzA!Oym)FZkwRTv& zPoW+OFY-(^NKzqt-F$CeWbcUrdBC^$TuWw6oFkhjyMN81)|niC952nse7fuV_=Ec2 zS39|ros`>D#!Ll}S!P>ShP}Ad*vFzT{>3Vrz-tcX9vR^QY(I!GdS5#VB0+!4hRwYZAS_?kgWwh9%O) zQfUQi-iTfy;uTX>*bUSrx6}ff9d~{x@Vd>yBQx>e8Qn5L{M$o0n$6=8@&S$v6pKKtX zwJh3HYn6D{2reuy_TlN{Gqif|ZwFHLU^H!#E(<}`3-kSIw=&VSYOqvJ<+bI>e)dqw z@+n@lq=Ej-pEm_2V_DR*=60sr_d`O+)Y3H0(_p08S=^O%Mi^K#R9feCalvRe;rju? z4+qYFxFkuO`Y@eI`Zl(v8~XqS?BdZWj}Q>#_EQUcvt=C~%kmVxNJ)8wf=Oia+{29A zFPG7vjF@TF@*R;vNcW6w+Gni_;4EwQ7-$n7Zl~<{o4&PQv_|Ujy7lT)@3`3nG8k4K z1vqr*g8q?s|fuI_dK{yObHSuLnQX2#osK;h91W%6cG`D^wr9nBg!u97?fji z)Sbs}c9^A;xO?Jjeqb42m|5jfGMD=A*%69DckdIUYtLfLhLy<0X<$f9)=I@)mxa=e zdm%|fYU|@Nkg3VEa}xOi&n^(84;hJ#Vbp?+>SZ?gtritTr<^maH19o}E1OV(*Tv(Q zi(`3JER(`V7-nd#n z2HUto)&spjr@rB(43!jpdTD+lpA&1Q#zpM-EPdR7j8C;|yKCLL$uidbWgcIlhW)I{ zzLRU7WmOl#_KzC>&0RqWf`Vy|6WjvYZ(ZwfROOQxU)QBKd46i^QemvU2~Yu{;DcnQ zc%0s2sR+tB^NVWe$~B!rlTru>Jx&bBiC?&571H{?mx^{~)yPMl*qS?1E4N#aGVUKA zH13mhnmn^!e3R`YRB~@qE(?FVF=Ci&vLMkprkA2kRNnb*)_X`Iu9vhe7qTU=VW_rh zERqpbzn%VTDcmcYFl zxT4ml({KeJohxi)t+FD2NoLS#su&r+{K%tfR!=kj{IzlE=)fpqmgQoy0j?;Ph1{i^ zMG7Z>2#SIyt*11hSgNTPyc)986x)_()8^N9_XDME)%?k?dLrn%iP-(|kCTS%it5|e z{WB~_O1cSiblRw{YB`smC-l8P%DAg<-<$cIT>&H6VKFvX-o$ z=f0<@q_&#e7gj-0zO_YrJ|SOA31>{`l}pfvom$~bN}OuKu5KAO9x9IDzC};GP-dRG zq%m!-{wXy3E(>dOtmTwvZJKCex?7h{J?6voX-gA^J)=PaZ+l#~mJwo!^ZD3boj<%w z!K^JV;%cy%Q3IPo^qxSmZ!$nx0l^EOt$re-+lZhlqSfBPYnLCDJgZ}WUzSz$UB#nm$rp4t z{;+{I0b2-y2|xnC4OzMA*uJyipk>b&(Pfee_*L-OX8UJ%gwb`|pTbRObJBIf2y*** z%X*BX++yd=q)ODPLx%VJ%0 zrjdCAv2?18F)GBI-ww5DI5~xx*Q1j9UnQ^$jRxJgI-}=?o3!{1=|g-Wgl>bsCB@m- z-K-1_wl{t`*Nn77?fLPuLaR4eW{X!Nh%Fe+({+)f6+g0+3&tMRu$Sa+@?dDxGPj?;dv`qs z!5p{bk99Kq-PF76C{$jOOL{6?&#=GM2ctE$Il-&k%PfxdGC6h&H6}-)sDYT>3f%05|*r5rY-K&YCi=V3Y!3ric zsuRcWo2f-pP}vZJTewMW3E};;x3O6hyDgi;21V^1EhsEJabK*LIrXzl#?yZ{GLcAP zL0M<%O%IKDy!w){QP19?aQX9We%B`5P8ucX02E;P_%IO!E;__1pD1Wih(ziiZ%;gb z*`o`t8n?5uBrrrCx?Y-_f>Vo7X{9b4K42uK*iq%|(gdsh*!54%W3w}LDGXEJKNhaj z!g5F&IQJdrlIzxJsX>^kwsPXnD4_giz+W=Fhh$I`6zs;adp*j>QE;x)b89xtIAhx? zYg);1S)!LD)jA?CdfZ+v<>at?E!|-h9%WjQKh=zFc1dm<1Q zLA0sL&HmJehl+RJylLI>1l6TzZj$CRM?M@xbpz*V0c!Cd(%Vl*&J9*-dV4`RRSD*m zlWXL|6|kxWYf@iE!@~(JJj6NF=R8(i339$LnzRwXMY-|Q8BEv_I2+4li&n!Z92rsk znB$pP{$S%Ax*uNyKjdZi8y*5AZ_^AJSRz0P`+ z+?R3Q%<&LBi9G+!pVo)Zj3ar#(tHH-ygo3esnttQ#7s=V>7VQo3JLS95D?+b3f1he(%hhAG6lf4;FB`&#JTQ+Iv@B`>Ip8t-jVRJKt-KqH3C395Fv^VmC7P zO;Cph7XM8n!P1Ejnce4B+jBxpR&yFsHp;a%JZW+7o9Ps|Zv-oT{IY5#M1W<~uDasL zL8i~rok_%-616MQ@r5P#c9R%%H}{2apOYv5U>Atr&lI@Vz>Ygq#}Q23pefabGM~Tw zHdPsEa+Zta-p(%OTQOC)IWt-U59yhr=;K{8Y^+8)Iq{9PxRNN(lE(dzjGdQ-Mp&6! zn)A(P-(nb%(cx<{o|7|vtzC+-qMWXJ7|~+88UrB1nAMxUw`Oj<J_!uZ|F;^KmDw$U=*4y!t`0ttXBuuayNC!Kfi zA`rs;I(8{mf)PU%M{^P!B)1_(y*@f`C#*oZZzZu1a*rT>eXz{YN3DN$(%EKzS z7Tkgh))|AdZNXR{T{SrNAaug1)e_4wZ-{62hAIrVr_xGuED*n|dt0j&=37JCTopHiLt5)6Hf2}Wm{Nhl zQx@(oO;au42O7mSp;GW_!)F{dvGb;4=NNZyi@F{52bbM#!BYCMU_~memR;sAwhVd^ zJG3TtLnN{hHveshFFf;END?2^I|TiH{SEfne?nKMztTY^wKcm8_qpePJr;JQLEk}HT7f>)mYQw!G7QM`KHy)g5>BE39W=?dUqsSYhC)vA#6=9M@qcnDI)&Lh zzATzk3>u;>-Wwv+lF>`z7kcA3WR^L>3pwCz10ardpMAHP5ah94Pts_^6?XLTuIw@` z5iID;4=oB=>X&}*HDz(+Vqy%Kr(Z_*w+Z5045(ve4zox*>@H8DPK`h8_P>`hy74}l zQ+r4=GiKECo#EehD&;=vsf}-FN~`UKqBYg zs|cw~;rvpo%B7>TFiAOmWX&ETP1*su{N9;AIxRc#;p?TJo$NuTeT#O5VzR8Eq1RjA z>V*5gfdP?6?Kp@L%!r|XmiT`9E@(#q5fE4-L%HCwj&Br~*9_cRfW+2~@6td>iHj|T zOWj+7i5d>>MJv;yhI|$}U#=WsILcv(2bex;b1O-bBlW(A(C-(i>IjR&PfwUe5Av6_ zbU)DohbIUvACniP2>dmRX>V3+1(p3KMUt<$CSd_}Lt8~Lr|I+V-EJaU|6E9@d^Y5; zW@2kicQe+M8OMa`Pm^h3cwYpH9a66gPczvtRSa)QbXOU7J_hHpeKj{5CRUb^n$XYP zQ|P<)_It1IfEk|NWR>NWVc{S+MQxuFS(fzc?_X47fFB{zMlBDLKf9;5o=i6GbNKuZN|{I+Y7%X9ZL62j7wd<{j)REE2Hx(h)V6uv-B!L{JYK8%sno z`~yudLMVYCb#zw{myB@b*mtZ-$j8wqySQ;s`qz;5iuY&g(Vi9IA4|2Y>v58P%QUeL zn8Z5eMAeYuaNnz=O`O2(-o$kNO255=?=6}k2FT0?qD%gfRTJWy`hCr6FtYjC*m7A( za>IEMt$thAg*@s{UvX+Xn~C}IPll0p(H#$Vq(X769(Jn`&OBx>4xh9=ufAdSc~a>rZx|j zwky6@g%9u7bwN#+sfPJS#oq^jy%+YbmYvL={{h(MU!vgUaZwPSFgnb>bLVBaiOaa- zL6!O}tmBik0r(SQ=xZFo1yNm#4;lb~hMJNj?6;pHstx>^3E~J4=*O5O&4C~;^crsC zLHzimGNX0m=`ZAqSi9#r2!d3tsmgn=qz);LQp;kMJ}@mXvF5%FI+oJu?~&RL$u$wR zhjQB)3Eu<;_lRNpbO0pI~F7N z!6v7NP04w)BWHtYEwA?{bTs21(+>cIuvKV#{3Es`J$rrNLG)SEn})=#k|%{Y9yr^@ zKMUvbm%G-n)>kf+u-wU`>5ic>uPLBnrQR;nX>&iPa8kJ9QbVNuTKgPT${qde6P=_d zc`RDRhmZv`(~&Q7^8uaquP?k5vd#fxnAM4HdGuc7pPY-264Zwf z26$T|=JRE@D*t$MyU*G6uRzN;?MK3=2|2b6BJ|L<2aIQ&Ed|0^`K>H?g*F21TUumh zi07x%HPxi*7otKb^BEZ}a-ivd6E-Uyy;>jrh_C6#Qg&&cU%b<(HxgGIxP(Ba<98$fh zl9Oqr#whvJJCk;oxrFTWJ|O#czOeG!|I=#c?E0))|UaZwaGLD=3 z_cARL9nV%%M=LYKGPlB*kFGBF)t>fl7=COjn;W#5FUR&$El&u!^|(2#V1df;WX1n3 z3}Hg5&bFdE17BRQhuskbj!btuE6Mgfg4Y-oLy&sPyH>qkh@X!iZfqM)*q$9u9Ryws zLy~P|jK%wC0JcDEPu_^%-nVDv zQe>1GPjn!3FUhK<>mAK=Gg_7NkwuRmrg08uSCKRiF_Tbo`OweRsnlYmNg@Y0P+mC%GE1oKS*^|0+v(;$ zh)6k8o$FvFkznicPY@6Cd7bs3iQd=#MKIx~T;DxiJfrNCnvi~NyvqlgPXiBp9Zh3A zF;UB~sR~X3mDm#E05A%VCuf_$zhia7BnOg;jRaS(YeI;q0q3u0Q_|`-`bka3Ymyz| zswGnTP7Z{7=N0X#n?e7zWu_&cq(61}gj{gCc^T$5)e0F2Dh9fjrWuXy@=9A@ZLJoEKkO`q!aoZxLNp9-@qGdU-imt#iSx(~zYg z3(_|`3#;={1;)+mDDoe#rdc*wTA2T%(ZsO9iTTy4p{WDNQYmkxp9N^f_9-c(h|&^Kf~h)n~T422Rue$BZ_;-Jvms6E80nG4_iu8 z^w4>T-+dmb!Vk3LDDbevzY#I}i?dEo65>aRCXX*)efsWQObIDEK6vYT@jFtFPuu5Eq9XzQh z%v%zkzwi2?SmIwTK!w}XqUg4+;NL=J5+ZSBLHi~tJloT)Dz<38Gl-+?W=+Y;?1!(5b6pe8r&{Evl71H+bD^p;G{oR`_T@sZg9yi;}67pv{mE zDY_x7E#PWYa|`+>>?P)` z#^ZKB*$x!ToT50ua!?j%dA!6sw=jS3+Oj!=Cmyia&ezb~wZBSRpx#n$7Q!#bG497| zUIVRKS=cOI7597n$z*N#>r3Sm==mnF$zsD~)A$ncmnbL2<3)k?l^qX@iZQvCo-&8I zn<3y+u3!eoYU#89LY*Tl<5-s!mfGW^sGV$Oy}8eNlB~+|(vsOuD?`v(Bxw||wDzbc z4Cm|KKb{~4x@SE2-AUFM?ya$NzBwq2H&5p(qwpIvtf#2rdz#I}k!$Q=xunC+rXM&WbKgWq-&c&F1kWjPJ$IX+VJF0PAg5giu8dIJp)Doa@(RgC=d^V=OOD;)QiN8k5!w|)1<~H-#wc}Sg%Fx~ z8S21+HI)5UnJ9d7UA}(Ev;8B_y1uhcY94E0Lg~mF?oPwCh^AVSk?}Uc>+;ICT@}=f zmW}qe=|t9hIXJyYN$ENqy}0DgyqsKyzdGQ6Q1n?8=W6k!9WMV4`3RxjFY;;Y749}t z7iFk4E6gojT;H(fWk)I5qELzrFDeW^?wF^bp1%10Gy9eyU&^x%&eBMdNex@0)U%F# znx(br#}6n;Ej0EK7-NlH_|ZqVF+f^UPN{6B#r?E2(V5Z`F6&qFB4l+kSF;(u($q@i z4G}g3)skHpkMpRl2Bjf$Z)+;W$f`*p&*odpm$6!HFXY)46a2_%;?1cPIk}8ts*aju zucvBy@2ShT)qe?3o|!^b*mOXLX1nX35Bz0HQ)eW+m7`MisKs!@K`6!uCZ;8WQuMG7 z_?LxV)Y>>@kd47UTP`>TJd)@WzcSB(ozN>vWTh5$l&m~T49Y!phG~E^EzIqtY8yge z>>$L4TDTlhv+-lM90%xk;eGc)hf>mokR(Q$V_+Wb3zyX1auG;q!EO83MCP^$B9B6> z$c$Vtby0;=abe7tD_6f~_If92%y!CsuT_C0T||(&eg=7)nzh-@&{EVMcKX8JXV?m* z@Y-gJWQKa~UJuE#$J?$(`l0nogOz5|*oQp0pMQkYZRvoxMX%{5Q0UMI?D&Pz*A;ZJ zsw6>v?--r?4`Vu8IW3iqV3|h6G|_!X%J?>rX6W@RA^(vZ!$t?Mh=KDe z>3!ly2B%AbR7n|aDvc?-i7AW>s6lp4NN$Q|8rA6#o3OWSP;J5i{AYWhvkG6Gv+ zz2JTNz<@8iaUye z;kxYmN)NR<4NkK`0%O1`J}s@WB|8ES>w_?dPCHGgU)f}oM~>|^E|vBI(!2ICG`O4Q zu{z>_CHWaeMb=pUROb06o_K?78gR$G*XBdxGv@Jghljhw$8E*-(i*XPrTtyhDn|m^ z=S<>QEYrm(SiZ5R9Mnu{ zPODj*`M+eUh)rdUEoGwBcP@?*LeQx~URtkdRmIyD_s>+F0j^i(_kYVoY@oDT*m7=0 z>q0_yaatiyaC%`X8}wUe*XEa^*Ow+{4oNpn=V~pMz}ao~7!Z9EY;l+q4H&ZI?da32 z_yqfl-5T{~P_3$=a$-9I#*l9W5+mHFndQYhrC#&F*0P7hj8~;@)PApELKJj0UN|M6fEy)J!fzt>_z7Nw>+{zD$tQeg@&3;zhy@{PH+Yrw-e4Ou$Ce3wU{mAc6t+509NWFEwRC z9QpQwua#y})JS=&BKCg(3W3()A~$z3W7cJe96Erzy^d>-AwBc*=swX@GMojnb+0g4 zjuua!C7WD$ANLwhBRMdE@1=#SgX6E7DYyu%U7y=^Q(5VfGv-E>JFN=3#+Pl4dx1Mb zsw^EcaSwWW7|RrEik*#ZIv88r+T}E7Uv8`^mb;Oq74<1kt?Y>yw(+S@8s@rqukMOG zv9hBI1a5E<|Jgw4mlWTaHh&Fnrs&iNJqgC2xnyHItBQziPa(98m4?uL_Kms%m}_WZ zCOYZ3yHQJEInbJ)E5E1E6UGCustXjx#yzFjvIINbt?Q+z+tb%1V}<4uR`C;kDk?{P zxi?@{NRz$-4{$qCsovQ&P=DKi73MP(bubmSGGxAfPqM9^bpvd=R?0k`e0u}Ai5A8r zV9-5hOtjsXK?qxns6DDvUj9rDJ5EN0K3gmW={t!{P`HCedKQwbY#nhWxxfYKpG~r? zP#Bxw!ZAMN_8|$UDw$WVfkuA8a^TyJU0?AtgOic^+9~w+&zqslB?CF4BbKashqjF& zcIAc*J~2~Xjew0X$3fZ@h-)doa`vOb;;un(nv;5G^nQ?Y3qkB3xKIjCmY;$R)h+$T z#GIQ|=qU$So>6#2>Gy_~amp{|1y)m1XjvgQB)MWu2O*(C8Uwi_f9(>&&yYfK?Dy-b zntA^iB$Zym0h??U;KbC|#%+_2)~4_$9I?lK&D}FQ2*=O;U6i%J*vzWIeZLN*N?GF5X%f>u5#7cN~av&LN5=-q!%$>o zJm2Uf0KhKQp)k6d?$44|^{WultoQUomw%cd`{(w1hJdrPek?g?lLqkn&0BWM*I*f+ z?1qUtVF#NBM&-j)7#W}4dX~J9{kKG5Ji{9^ZNeEEN`>nWQ()*YF;DaKcubehs?7I_ zht0gaSsG(!#H)}m_ZX6Oa<=wq6af5L6x{%q+}TQJnB~ar&#Jy_!$qH)eTGeB2}V>yN&xyc zm4dr`1UL*%>_9cG{!8Bv;B5zrhscnda_c+p!@!;ey<|K)Kwy{CF&pMFAC5h2cFMk_ zooJxBNzbn;XeN(XbN{lY^lG?YGKA5d6~X{4F-OL?*mE#%5S z2|MJeRpru#wtY9Lk)-DlKWb4bnYPEO<#&IjLH(B>kbD|XRJWI@&STNjQz~g86{%{F zPj|>1DV7}CA%n5l`*FWH7Dqrodtg-e7ga1y9>c_)Kdoe;f%DUew`}2LMJT?L8x_Yp~wk1?0X+zzA zSSMY(?A5Z|QTAFIkuDl7Nu=?~!L*HWqMcQ1WYSCGoQFy#lJq8`4T&qfoMm?4coJ%z zmb7ng(iqL80#TFfj6t=1(LHpy(~+e!9iM$XgXRC+QN&(aSI9Qlr6-j03po_~D46RU zM=neCs1~>_p<70w9_VU7T+)?w`rDkM-q{fLZ;==eh$$1nmDPybv|*Yi&_eQ00QKW7 zx|F01-Xt6PdP1PbbiOwli{&@QiUq;4vLo>(EyLg^Ax=VBS;M#$1mTZ;m}Rty9)_9& zWJfPi2R~D?{{v&4dwK2ynXYtb4UVQ=nwsc|AWn0|qV+-TV!BbE8xc~?OiP`KD+mX& zA>r{`T2^T%>K#;Mi`#$agJ&9>lR3>=o|+EW&{9`a{ii9o-0G{2NdNhqQ)yK z?Su9T`%TSY_oh)Vs-6{jgymc$E-Xb+Si@rYI_HQ3>myjL01Q{U?t|~4Mkf5;@8YZw zi$Rzarn+jbJI;Pvm;Oq7v7=Ag`=meAZoLVA?4e-z{!M+3E+6SqYCy9(ivXpjVtm(DKtP>hi5xNEo@)KU(%4hT!Xc*S;@n&MU@9ob)(|B~B5KG}vzq%6{@l zytn#Ie5AU$Q}cIA!UPNc|B{ggRIjU6p|&=^*+uz3ie=cQk$3;^8z8JD5nu|V$}2h+ zCM7sMK@2J8xj?PdX`7Mwe5z`NCrBkn=k@?hx^`_Ob!sf#!Lag!J=agVL-kHJ`2hBu zg42MFS)fGFgf5D*!~E=t>FU1zw$`Fo4x~LS{)Y!vyr4qUI?YK$oBF>rrS%)@idXlr zt{IFEXOi<+EmyX2@(5rOL_cU4xhX5bL`$!(rmxGj!t_n43)B6;*RbLQ^7rFBj?SKS z73h~e?2jDVHI+VXD~yRE8-cuHCFon6&m-*M*l}yJFbLyAX^FWt_vt52QbV?!m5c1N>XI6ycuT_cA2#L(j&gO*RH`dXyC>LdPF+$-m)GDZ3lux^5KX`osjKSpYTK+h z1r<=-(xfvfHU2buH(FTAq4q8e-}tk{S?Yy)3Dd1YCg?}Lm)7D`){3p#Rf_VHtqFSf zwbOAroP>?!JwZH%SY8w-ri^YXZm}g^yGE;VJ%*{hn0&ganCma?1V}AJ<+im5%R-Hv zb{Y$4A2D2^dJQ~zFK~hGZc@BIasdrz`<;(Nj)(lYd^(K(4p#a1!24YFH9!yJRNeXK z)eRoLeI?@D-2v;h;74>mF*c|!M%6{LVNmR^!z#l}ojv_{2LM1%Z{#L(akNo_$MyW` zRdk=p^0abopRle+qj~h@h(jMLk?L@*c&keVJK`&TK5Z@i%Oe8umK}VrPm7fqos32= z7A|Q<6BYVsn=TRCV7-?R44)@*VvLIILgB910Y;s#1+?@WP|@h>QXPuCZxIz}GyvKwQ9VQ*WaPQNiPb@)6; z6uH@h96d+`IN~L;mX=f6u`yw7PwgWrR!q1875Rsn$RKa<;O}oghqOkX0S-nfM*67K zs8#D!YL9Iyh?eut4#KW@hR(0=v`PTUK6K%VT+~{`kYDbu;n7|Xk$uV;Jlb@ zf2$zUnMdOx*CtuhB;y6wPiWq#*?jaF<6*^uIg!`({4Cu}Z%a1vvfR8gXgfCRGCA-1 zI%@gwN`PeK_r2)}=Cg6{szoL{Yn69hQhWEe5DeHP%{Ja~VS;Wvt8aMBq_%D+n zd&ueB{rGwJ@*^P5b&DkOJduxh>>1_3@73&S@3K)OweyAF&AVtm{kj8R-fQC9@Eswn zzh;sYb_fdnCKw~SGE(c-p9U$KW?oE|ryu`gV`U6YTJ|*ZO#@`o?R2qiA1=OW1hHVB zt1{=bM-*`Sm30pE(HL2mfyZ#pWpHn(Dn|4e^8trtc|XBpN%$1l(O(ne&mWa;d@!d=ga zh}PV>Sg#Z$87203erzP?&%GPd8zZYMO#4g*YZHhxY%*UNMF!d8sg~6GpQkSjQ*?=t zF}A}|)nsgb>%cuM3=7V5b$G>C)sr%HZdtmNib?=$#@MIMrx(71DbT=SDP9g7t2)cy zo|718!)1RvRh73lU_dBIjEbnbgt;sml4}p?hh41!t3V7yUiw8$6t*4Ud0b__OCbc# z<_?dCwaq5x)7yZQ9Xr4-P!>^?Dc1%6s4J4oJG1>j&k8Gf@e^Oq>M|bs&-QjBE{{pG z2cFu9Z66~7L;SLJ#S-VEDIYS#2Px+ni;9_^6ho^!Uisxi8<&4vigM#@WG{8rpN{6?X{c8 z0Z_Sz{@Jo6gs=AoZ`MDDVSJjm%S~T&!$HAPil@vO1sq%D?W$Tf`3(D(93necpkhQv zq_$9bN{j{Q?*u#ysT)~YPJIuy+k|_CI0eIaPQZ1;B$_10v>jx2KLT-`zEp5Ty`Mqv z!Fs8tt(E$07jAW%t!wviJP+^HysW^OzeyaPjTJ`4dT1d|EFzL{%h<)5{PB*wo>sSM zgq8~x#Nr>s##BWp(0ttA6A$^e>U3!=-$y&`C=jUtnmr=#Lt1@9xeFLzE&o_(L%<_#kLSw`$lF0bDX5h(P8GleuRn1 zL&6v|T&2tJ`<%7<6cBN8Y_L=kv|BSS-yzxWa&rp2Fn^W#2x7xuT?Y3gtsE2s!~Wg| z*1nO9m--w0)4{(#mLsYXI7rmC1kC`?nU%M|9i*InNl0JX%V_j>!ygZG4!qB6gF-5y?oFR>N9&p>pH7| zhZsVRLP$hJMEsS86z2w=ukFuLg5XZS!;9HR6p9cJ6>^jYcp74K1)250h2u-NwFSJD z<5NS6Oz{Q7zhbLPWDrU3Q3t1;G7N6T1(zp(zN z8M8p~|C*L*h3$_#3Mj`>5h7vPy`G_t7>o1yZ(OlRz%OVDfg89S{MiL8E$6%1#YC zC|H5BnKsNi?slO6gf9D~3v*7?s0YWtep=yPp~usNs6Kbo zeeqDO%a=`45B);w$o%Tv4C{_8Ydx3yrtiF~SzgZan##{0uY!g*i65w=MyAIHuZT*? z9qPB&l6SzXKl2Fw;|rKE7ujlK_&BVrV9l{QTvWhKdex|z8EQ|SdiIcN3;$;<6w>z3 zK9LWe#CKk1f!_&3_Sn+-9dLfg3)}^mb_yiV)czd5$Fyb8fgMvMa#p}` z3yP~&!wZMKx=qdSHGzyt1Nd@qj^AX;swBIh?k8Uwhi15ulNFvxFcoE)4MQ$a*^c07 zcgwqZh-Rqq12N&AzHsq!5pA$rKI8d$IX$A)B;lOWRP2)weuZbQtSVyEE~D0Or}q^` z3k04R$uqR8qkemF)e>Lo6BTP|y>(gswOi&-OyoeuVQ#R)so|sqpZ6|J4%TPghp@nz z8P$&F7t|yjJK7sGtMwB>u8@<*n}I_Td-&P57Z$;nRyy~V!S>X~;V@GvYCmT(b8h#V zLM_t&;173bI_r23r`Hk`DO+A|*cXNl8OsvvyupaG0-5Jb<5BVGcfCp-(-mQZ3auGp(im-zc?eTkZLBa0RA#G4CR6`qHo)&cI-s7Z@?7gWQz> z$GC12o&LiY7xH;eV1<>=fm4EbUZV<>yK@w4&1FWKZlS2I`A=-wGE5Y{eOT?wR>O$< z_}^#8l-&OHiHi1LhndKLO$Prpi?`_i4zU0KE*R;{a|wODZ%S1D~7-~R;o7;jQzm;oOCPE;9{NM-){Zk|sQcgG7Y z9@W2ne+u&ZpX4xML-YUTf_N5EP&-UtE!OFofi!k#+`|TN+t_Y&h2A!=PO!n!iC0US z{?ddy&O+H-u3)~a>~?sGABe8yXX?_&l!f>xeU;DGHTM@Y6#ts6BTxPA6IhzGIWhnus?Ub_`L!e&mr!C)^-NxF(t=onAY<=?=0s|rr%d|gY--jJBYf8N)+-J2Q46NYZzfw{f*@qy1r#5mBUUci9-*+$0=K+iy#AyW~a zK5e6`wmze*SQ4K5jfWZEO~Jnb{2SARrN054&%@Yw7S!d%X2tX5HE{(o_`=8i?X1no zPMGvd+Ce2juMI((oAE=(t~@Wy?Tuvzkq}!s)m$v*4ANRkkZDUw2Kd{L{z$#D(Xg5` zF-|sdC31272GsC!z1?)=S}EzVo!_(e+xeUpV-O|YOe&;fZpCd1fNTW2Ajlro^`m87 zCTeJ?CmE;NiAtc@aXIDG{s#(Qzq+eeq6KAG+$ED!qYpf#TikKqdrBYB_Ui~HW5{Rb z-1YA5$`9;osKkWAUcQbVWlqco3X%$fFV%J}z;@)OAJf=zYVlea48{J|t4Xa=b9n}*l|K0k&Ox3}g?z+(@yYjcNE??mA3DlLq=IdhL? z`_y9@rPWREjrpRE#68}?@|ra<``A*$BC2Y$8BJtaZOq2HxnuQz)8ZvTG=DYw-nprF z!*vg5UJ~BiB-*!ptBJ(jlM_P0KG^6O;6JK0+-7avcjq==hlRE^oj6H#a7V;4X)YAg z3hQuWn#0qx5d-Dzm`yGDAyi^J)91_sJ4i1UeTMiVla@Kw1C(9*6-P+nFuta!@YtotSD@7^l4KDYX5zxZ%ht)gY?86hP2WA zH8d@U)zpbfr_;3jvqi*HmTcf*T`nq!&kjcUl06vX4h(P+r=B|bPKtVJebu!yU!CLU z!2h1>`vkkMciLgeXw3j?eMHtVGL1%m+mNF8GjtGlW=S3qJAH(2b1^bQ7de3vJE80x16Q}@rTIN@8#!0?4` zwkjDOyT4D0BM~wUMd`pPnvg6DQ@>|Mh(U@E0Hvs)p}5`TmBt*+fcb*_iSqW^Z&|vv zBpu@Hf{%Rl?K>-}{|zCr1>c0qvQIO;iQf6^^2x9&4&pMzdyer6z9I7+fM z1)my2m`#E;XjbYFIzg=0$VTIJ9Jf*n7=iGm$b+S)fh6jXqWP(MZJrp%N0UwMQO~(v zD2kfHo+-Ox#y{ry=Hgml!Elo>{Oi82{D~JY5tgz<_?JUE{?2;iRIGMMizw`2ds^zW zN&(F;*yvA7ue<8waO{qfaGk1x{$pXsUOx8oQ^3i&%=AJNPwxch?*6fjHf~)*gSd}g zwV_z>Hgqg&l`BXx?Ay!PN`LeCs$h}UPznaezkN`n7{yHF4p#Tc)lUw&Bp`Y0%&&v> zv}+92G@FI*Tp?kT@Qooro!gnud9%>$$Y726xHT!+Ixz;w?@}3!&UMWgH8suAU4LW9 zHI35Mef5!HWqi8@ke}ALr$cyf5g4=g;UFpbzASEFZOuK5Y=EVrQ)Xbct<77~@xl4R zJ0;O&;4IvQ5jq{{jCZr#&%)D`87NNjc9yZ*tIDI%QDFbD?O2d%rSGO-u9ArUu-B5N z>3U_(GFPHcZ_z#LI?71)rMR=L^Y!epj(WsB z`O~&e8-msqV;?1+CXKN>kuFIJ#&diCS#wa>YMT1i9!-M)a8(6!!{^ zqx}r8r;R8CD=uBxJ%t|;97U}YIp6d*vj0g9zw1L(~f=UyyT8mL7%jeF*D(hwt3@n?f4C* zs9+-}iP{wBD%-L(09WKjk2IN_jEaO_s5%*P4!mtQ^g-+JCLnAqGnLYs@>O zpH4vD;!B;Cg^oM1*${&+!JNsTicQHbh~JFaEFg%FKt;EhdSSk>q1&1VYS7YN{T^hK z3ENI4xulnB<>OLZOyWUQ=vZ0JN z9_>+r0-+#z=XRN&H5rnlB0TXPqW99-2YHAZUYjmb+lb^`bk8bRiC_-OnG#|*kRLXiE)4~qYwGTavH|O7)O^_ zf5jf1yvhvo>zKpR-;ranG$}kmCD7q{oe{of^e@1I3IsD>3M8U zz6io#qr1j~lxnxXnWzesINaK)I^*9R(6vU8?C3>1(}xZEhGnTU`_fQ|EBg8MJZ? zaXX;67n93^{VWTTBgYgeUt{cHm)+vRRbrUP^BHTj*S9kgc;O=n71qaItZ~rOW=;@XN}$@w*X|pc+AR3?$YUO(`=2$Nl!w?SYxZL zGSiRR-TT<&Zr`@R@Rs8;gY$J$)PBS8Jv;|SWdHg?;0YpDjyIAUlWY#qrK~a|RBsBs zVxSooBuI#?*!b}~U*-kpu#d=&*Td1qq`mD=>Cm<&d>gimD9BouAhu}X?XR%sRaFiYegKHI6goMf{t#Z6JQSShC z2jm7{E0bE75~(!G|6(@L+Naq@nInr1HmNlMvMFN)OFIwY0jWAnSDkTlPD~8a?ifS_ zM}T>ld1N*6^3+H@%Rp1{cBYoYKBAHMB>@W9sV&&LmG@dpEsQm~a4Jh;Mfcvyz;w*? zR+gp8%9n%pCAus@QV3q(BftIr6&D@_Tf9ze4#(}l)xq2|;&oCV!@+mwY4FmczMSotnhxO`K=woUf_0!02x9 zfGuZSCrdi~wEo-lHjec*g;OrAGn+fP%gV$ZuWGT5WVF_pvhf&aG|HP%%z~#|yF)lD zIf1D5*8~_H5d*j36<&w8w$8I@4~s`6kg|MwSGtKP5VI3|!>};`w9puJj~HV7;TBo| zsIx;v8K~%1&3HzXw~35t=XB<~uAbEBxOs(!_*%SHUyfq&2%V4%wiOQ`06y+w;T=JH zomeU3n=rY_D#E9AUDMXJd+-MLxp9S4U}>OX(wV-5~Q*H!P#|@9k zC!I*o>a)IYTm?7G)_>c9z3D zTKlF37!=QM=D%mbwcK}PXtEc9-y_>l6QsM@Nx-oI#e>e#a!#Ge)97|{!;tte@mpNe zmf5#h#@`cX^7MW;HP!-3(6+Ds{(b-1>dyS}v^(ZzNv!Yn$=yh@)DpEcCpOQ>?Pdc{ z5H*mq)|xt?cCTN-4CyA$XL&4QxGGEW-r2aF{tGXGy?a`QojB*pvl?voo*@SMc07v6 z-vU{5m`tjT^Zd`WyX!9!!Z?}=M&UhtMBvs(I8RSUq#cZg1C6UZjOH;mp$RK;Ice6(bwlkw4| zE-p3TkO{qGc@%lwjVsL8_C+<81C{qSe!=&fKAmU!kA@We9hqwX1KKa|)|Q^a)!zfR zLYfG$*!su_D;nm-peBBY25fUx9^$u#mhbXt(G zq~Rho0&(@JO(<*AOOI}ET5CDEr8uyJrHie09su7B5kTG5#RKX|eIQWjTvbK-V=_5w zp!Ig;7Ig@P0s3(3{7E#xR!O;=M_ZXO!WfFP*O56qyVY30%oO>mba2UA5F*ZLwq`=hRcoAu&hh5P*_M=Mqq$LRKw zxt2U{r7sHUAd7VQ!O5Z|e0%&1NuOS4{KW5?PhN+eg`ZY6tZUhhgPq!3A8=_XKV=05 zNM6Z01W|GX1mi|_cT7pI4?CE55zkCx7y!)&C;_bJ*DP&kOT=A>Y$t{kYluKk8^xYB z;0RxeYNSyz&xScC!qMr_d*I6C6{plm0j!?0$Fd`NF@k_bGFIOlJ#J6`?B6%|RB3w{ zc^NES&_MFCg1N4b7F_IAjJ3J6zy?db*Vg%*v1p2&Q~TtrfRdV;dm-;V~0@o;NQ;Lk<{AUxO@Ob=O;-%8XEZ%taa>$-2x*YjxL$ zt9vVZle;-~ zB(IL-MElHh%bNMIP~LHFxBIH8p;s{J$zSKY9KR5_+t#bXIjELGk7+$rhOVs+0fWoX zfJVcr?@GzF$js&4OJFPOv$i?4e@^Z8;=$}FQPw*LodY}2;6TizogU&1Ntbi&8P{pk z?1}EAw!{BL++PO8(S6aQFoY1?g1ZKHcajh!2^!qpoxyEzf`<^?f&|weh8+TDG_xx?f#oIeZX?l#L@I)-T1`!%Iw z+M(kERor~@vU@A@yiv7Zt!hT9UwKx=ywV4LHh)zXu|M+rqFSsMo8uX;8zl9spZ5@W zAm)c?-MF7vPkA4P`_h;Zf%)m-51>3XgXp=aI187$jLfz{@m1ut(UG2yq7Ta@#ZH;a ztE@XFj0|4~8y^6N7Ogz7<97YxLeFV#*Nt`ft_ z;zRG--|e=cnzbOX+;0BpyYT6~=itk^`J1c5uKHtpVEN=j#X&&IGI!{82(UKFBSFS3 zaLAc&-~C-E1AjFH-y}J2vpULqUNn6?$vD~conUUUixnYv7VE5SgD;i}*!A)#(mPp8 zX^51E0W#&bU#IfcqYa)`R`9vbn$$E55c|ibh#2PM-2c=nH-Bg}bsv%&(O|&L-fIvp zmY$h+$koMrO5w!lr$h}4me06WPDf6QNbNVzMSwBhM?uiKZyoEO82l0Hk10k@caVx; z3&0~H0oBzo!bUWCY}cRa_TVW>&~uMHoZI`529g$v>0Er>#D9_EcVFKUHx#wl-<*1o;&&`2Hz)UnN&J> zre`iK8TdxRB(B*@+&-ytQixRXv)UY^ZNYdBd?L!%@)uv?6%zr%mr3c9-DcYhN}Xt^`_g=hQm5>E3yKxq%-M8l!tO%AwltCliIb-?dba5hUAdP zfx>59gr5NidA{Um)>`I6^em@73;XW8S`O@;;`~17bD1)dOh@1CIrZBR*Q*pCxLPTL z4X(x>BGL!Ug&<~RDk|t_B(RKDKF;|@FyC!d3NE zj{9~@Se^KD#eUsu>d4zzCiIi2`@3_oQOFluowUB|_1HsE0FUrku;) z0&NL?p!UumbI~0}vJMYRw^Gb}8_={0g5%-9^*y@&4vXxRD8fagt8W@m&-g4}&LXlc`784H%7(%G?8D6dUv} z3E;71u1#Z$VfqMDwDUW|*SgYQ;(Wiq*5s%P3NdL53iDxhcRv)a02sV`+&(mayzI!j zaN+6x{gxEP6pz*HzHoo`*D*PqzSluk%4v`NKtq!L>l7Ha=F|KAx3m=}zD4sJed77; zzsk}7p<48Y<1#|*eU?|%8lryGHzrQMl4-h~0F=~F0rn9lxMjKp6QDV3Bv;D^L*?^)pEG6HIJF4+;gpYItn!rHG zI}IEi@H_KDL>X{z^?wj@@Mz%f|8r9ILh%IR%hR-ilNLbem4tBQ!)?HV(^s~j4eBKg9SJ@M#cU_=`s{t&J$5;koHO0ze?4}`-JEjr|MYbcMj^eim6N}vp zwUgDO&C9i)n)6Sk)a=F$-`!T06ls1Nl7$OZ^0=F7hPVOlN;dE+kNDTG<_`uv?!@lj zqs17RD|7`P5x232J20rX{m!5FEp;U7a;N<&CeA)HlzT*n6awr`SE&q>%19p83 z++B}8*StEu3{b-*Gk8Sr?$#H$tg#L1tzXYw%zmh=&hB=m&{>EN zob=*$iek1<)8iVTPssu(?N(5celN$na}nT6i}21*fjNrCM?xIb&@dzJGo5RDhk}!6 zT+4eaI!=q3U6B#CKlRf%RWDcbnI@2q)+03L^aSYJzFSQ_oGv!-g$0R|aUDH~-D-6p zSG;F;8oLQA<7hYkk^)vbm&UKOjX)TgcKf+RkH3O^kPYelCAaZR_`?)SoLr@t=ugZK z_LR@sZ^?Vf^xkm%*h-RZ;qWT>{V?LGm1W`}0T}oieY}feXzKNf<4mi!<$*c&Ku+&( z5skf6pA_BuN|Ob8;1|0Gy-8MKvw#(y&=0_R{cJGh)y;3NB?;#!4qZNfF0jFeDI1VS zA2858_E+XaZJ=66{_58Y)*AdZ`v`Q((*NpwZ;?|IL0ENBq!Gk=6b61qdS62)6H-ID zLg%rIILgWGLSLs|{-{kYWfEF71aDcZcuU>Jmk<+1$vGQfEH_Dr219D!tCAIBau}e# z63SO|Hq78k=Q>N^;!TWsuUu#K_Wtfl$yu@384ea4&z~<{ua55o-|^rR{i#ClDidcb zF-}fI%WgqZX+3-x>itgO5;yd^E`pFwKr2?Em=x`>U!PGg2~GDuw0Tkf_E-80Y_a29 zzJ;nXZC{{~eR4>C1(Q?Eo8EEk!Me97?6_r#x2HnOayOuXo*7!DmyezCf{e z{WC@#tT^cuJvSWs^##FPvD3Ry+R!?ZvNjLFU?9N6`~B;t>t_hhO|V(6_e2yxmH|K)OpZS#;$8Zn@+VDz zm0k2J>9T*ys$-agKkSWw;29soMpl5^CQH6hygGBg7!l|prvb?&6WpWYNqoJl-)Vms z`HL>9n{=a|@@EmJW!D8|9Rhv2ZV5Q;&pw9ZhLw>xc*6VjJErXYhRIu!suK<0v-6jP z2~B{Ms*92x!(ZIPQ)c?G4CLC(RPnvJbOf+;29$5YuV=fWzj1y!XG9G)0!S(-Y#kHnu_#l{3~S`A2^>A>O$?+?dY3UZ;od0usk#H;Bx$mdW_^V z?wlB3R>lOC5P(gh-b^iGp{d(~kG`Lxor_<&VFKDn&9k38ahW@J>U|lWi`?HHI;!rIJG`|@O_-J-Qu<=eR|C8pZ4oE zRzHpSa#TVu$Q_EJ?|VlFhe7G$1NuFlZZlClZ@@+D9Rg|B?#0dR*AlnJXkBebFsnaB zb8?uMWj7@Kg;X#^f+?30Nl?CX)U{X2=TN<{`tq49Ui zYr41{tjsiJn*AM`X&?OO(9fS;NMXvqzaM1VvSB5|zuP$}>i_E) zcUeya8Xv+7rz&0SKci2VfWCfbuq;qnKuSg9yU_k-mGX}h#)&^6g1_7UVe$X7=l{M# zx+3HCGSnVzh%y6l1)!n-7WtESt)9RaY7OS{KcY$4QHT35%g1O^Tm#mHU{gQET>mKa zXUDa0RV!eJWnpsPW=3d#rfvEE1_Hi?KVwgIKuaOrYI7r{I^9}~^hy^QIb@B}T^+Wr z!j_{B_Fq43V38S5?ZOo8-+Jj`mIcj|4^SwICznG2pP;7i~rQ80F2< z;{6X||GUGa^Esoh%VfY;G79Oh-U%ML9&RF74o4vN*n%-O1}?FzFf!KW2J8=XH~*|o zMyNA&)77@06zXdKtt;=m3Z4vkUm{~IWjS+W+0Xmya;s-3^mc;b-WG)7SG)dPghb(N znZd6g*eVi@alF{$5bIZ`;)-Nb(wF)bAs*Gyw!D|rGpA2}>`5i^_JE(oEiEBLdP~zo z)|?! z+s~U^yWi{I?Q`VBB7pq5TGu%Wv~#It7!Ye6bT9%!Fxi zrtl_zCayAby;bOnbV?00s~uOv!@`2vGdcBIB&28!BnvRz+rWf}puLEl%a{ASAI2)N zL; zxV;kzhyHNBY)F}&q~5YrMlo~H(JEx*>8ZeKI7NE^Tl2U`Y$H?g+BNh?-lio8O>t7asf~#*zfl zS^ZWNx^Y5{&!#4&x6ZAd*XIs6LsbKpRY)_cSu{5z1~@ZjFOtyhnc zK}|_TTQ+mZn)P!9&s^x~A#1WYaTD9m?l(W6)FkB{|0D0=ib~w*!awkOz$5x17!=Ny%mp zb%15d_%s@Z`mg9x=<4`urud{Ecx9FU-fEtJrJY^$W9$D<3uqqtu-`r43C;aYW|VaD zC}VN6kdSn8u(UkkMgI<2@H#s-AIsMidzj>IKU`95*7Scyu<2#TY5O}NiF2d>;~E7* z6e)LfZ!U1B$ozj1mvY)B?EOFO^yivw-O%@!b4{?noTI=LxVGv9(-$t1w}cKU_}*H@ zA}4>onU@a>(=HV%Z>*DsJGoW~p4@bvEh>vY^5QW@-M05G$eRcXR6!C9vNUUU8ecugfTN|?`uEh;)95xkU z?=-NFiTwaMr=%=6ncpY!0pr?wY+;rQpS~cqRt(AVzQS$o6Dh6UVqJ@VSLxPTvSrsNtM#zb6eLX_U0Y6}wgD%#J ziDxT5EM&h4^yoh_x}R|*n5w^bsLtlnjfIKPe7%?kww5S{%Wp?a_q&P&z^j98uNqtL z6hTpmWG3M8x4qD4{JWVxL8vcQ0sHIaS}vFzaK8ULOru-Ptf@Amu%(H8b*^cyrL4wD zro?k8px&!F@_Wu%W*?mci(WK_J2WyhE7Ht6gr{*&ex5(Ggrz6bAchhK*{Vu898#F> zh6EE~VRp;*1+Zsh=QcdWnCVLFtFO8SN!#&%c0ca$B&J-<%fx82?LCD7uB%i$DQ)+& z?wVN-Ry4>xL~)X}K48ybIcUJasCOjG+7$>CtwQYnzFN9%6%xwdeU=TaM6Ag%!S3G=uK#WmyRL zYbo@oBAMsGDNtp4T|-jxJ`OM>xJ8m7>cL6ZbR3 zD0x%WoNXi^ev|nUqZp>&TNRE0Ecd#byjXz#J)=*Et#)QJj!L^&M6-4m9-1aKP9kWG zAT7k6v}dXc2s*oNK3fO%pWUv+nkfOb1VkeVZ+edo4Dx*9%P1OpZ(G}H89rS{^WGXSt8!M$82NXN+R>B~tFH%!Ix#tVD7M|}N zl=jT$%I*WfPigq)P8Fvsns%so7#zJqL&)Fwrq;62HCzPEFng>p%3M}>`?Z|WDqo3X z9%SPnt2dm;0X~T?Xxe>($6ObJ)7dTF0l;K&l@)Ot4O&91k;4e)uuMWxn^_;}&i+8g zEN|&)s>JUxtW^I%NWxb`Fur7QT^LQvzoH8M_cPYP>l+rln%3TRsOkqcY@Z1kQ^7Uo ziQ?MCbAB#M5B6BwM{;@N=3t6j>IH{H|4esa8EEDqYryg0ln;}yP4NWTkWUOa zHKjW>>_*}%Zh67A?I7wjP_0dBc`j*Q9&K4_ze4qL-HlKIrp$M<#e$ouGV75@6Y5ux zG{K$;0;=RTOe!}I z38eqRSA*q$Ze!9e*lQ8Ay$r5y+EDJr^nU|JH^PSLHl-2=Uio9cp!LQW69EmCf z5plOB_a^|U? zsp7Um91e_-IlUBu+yHW7Gpx#HJ6it`JIR(X-K9%BHC{pj*X6aaxm;$n4S=o5tws8f zqhywxiMtgJq}Eh)FJbRSZk^xI0PoNv>;`|r^uhq+z%N>F2sHORIS{Jc-^q;Cv}Buq zGvuXPQes8%rAt$U^V+(+6Lh5aCML`ODlMp;>V+O=L8@O*sD_B8SLbcw!ZTYC7;+6Q z`%%@9vLz`#EwN55OR&y>9>BKAx-j?pEF0{|=Gkbmg$|2h;&pL|<;JYNU*3-BPKatf z>EolRb6{}tdB2l1Lreuo4?p;Co5I!zs`?Pxa`*Rm)&|5yZY+)e+7;qd%c*`*7&L80 zm(~4^bTlYWO7f$4t-;&IZCDdv%um0*I;KoMG>e9$0eZZ%>$={KO(D_NBjsMitbC8t zL(^pl2dlR?wg4PwaI?c;_i1L@8t@h=Q&yJgcJibIqnl<6iiJF!*unBl~#S(lze z-9AG+NGy>&_bQ$kiXo6vok?|-s!gEKn$d}GNV)u`33nO`lf(Ql)%b-2M-N1?E?&%) zATyTD2a~92MT;T}{nuH+SFx6ts1`LM2Nie0$Bjf8L3UF_*7u~j04G>4(kGQUB9{0z zBny|HH)M%7;4f-ivzkEuM>(`FU7{w&K21XrxD05eMLJ)`Vf{#@!%@??8MN%t7wHqm z?(u`{0(DLlkIGd-T|X%q5hfPthUS*t|18HIe<>I2|6z^N>FMd6V;1$&oyz;Byy!u}s=?$Hl+U*zqlw54yI>|)pHbNO3=&r{6*|8(gLIUlb0GB>a&!;Ud4Z-OQ~ zV$e5WuSUu84uL_H(*cMKg$@kPsYd-6VEeCW6?SQHOQm3}_=WopVn5(LdUmx%mqa#p zn}lDegwJGWV!Km*x!qb?oAA3cP#L+#3u&rS5>PFicpHSr0m~CcOTu2A`VW8Rg*~OO z?h18p8Sx&utnTX*97;ugGqA00f)ukhLx{VC3)w!;Ybi|c!S!^>NB##WKUXu$x`m&1 z{W}M{dW$&GWe>{>$J!r=IaQU|!7Gi^)nl)T+Lx{re<*y zL*2SDg(rUNNiU4|(Z~eEh|!IUH2dwCDhgzS7QG}1LmmFnwqyH2Mh1<0pIY69%nk&- zBDN+hP(r?24_aK`btFOB4uWntmbu_sFRs8Yx>_o<7}H#)^xo_?=(WDlx<{20qc9SP z04D!8L{ua_*ufP{l5G}SCOhI@%mYiuH_$P64`3^?v%Cx`zEPw3Rt!P_i z2&VOp<-8mZxM%kWq}&7s7NBv-jkK&Mvb_;g1mAFmlDGs4lTWJ)%xgG9esFwQ87bqHp^3 zk=0fm3y>c>*8Si-rbH$CX9{lvpE?o+pbLX4NaA#p;}OR*R^q_jSCax+8>65C2-!_C zu7rg7Bz}XsXCeHK6jb zvOM*Lj6Eoz#LC7P_v{Yg`s<4(Ry+sP6+i$?H?FUqWuNpfk(4DopEH~}j`8WV0p5JB zE*NpVJSW~;5*hZ+sySH*pdB;IBKJcXxj#Y%@@_GEH9*!et$!(rjs#fP!!qTWaac17 z33t zb^pf${YHH1yLwAb*mTm`{J@5pAxPd5X-C`jA&+GG+mHxln@n5S6 z7{nGf3mqeXD<|?Gr>$V!Sta;i3TQ9^<0dFGgyashXMJ3@99?G9vdZa4PUWy_Kl~e? z+f;Ebxzhx9n0huo%+d;lSi*7Vn$lz6(p6F5e<}Ly7jYbP5$82*i{aoL2{wyi=PkL< zk|5~lZX=AFnz5UnQPlDUvG6r8r5@3HVA>t4yZ+eyRYwZOQpa~|R)A&1I!sFdhm^mz z{u^-&c40|aL?lDVHdZKO<0Ox5zb+Q`suS!%+gK}S4i+7)v#h22OPBg`tglER3EMRW zmQMsMzG(p~cDGmyw!4Z&onQI>zdNM^!_FrF>@_kC3E}Bf$ijp|f9#vPSDB+VV2WTw zi)Eg@`nO~ezRp4pU38pUKHL3RzTG&`)}6vb%}(hkmRph0yYfb|NfYoz8%xlcZ8gmG zM?UqpBA&0!W>rRsQfhIpwZ+ip^}zSdR$?Z|`2xZ|6V3M6S1u|U(M)c%W=$#;$86O< zE7_;>LtMA0^QwelLo-%OUye&IPAm|`TSXZNt1_5-wu4UZPlQ*67gwFlE|_lf552;D z9Rx7+>{JRG6ur8zHU1jrF20v9=fNRu+jf!0nGk@!dl9H3wR;FOzsI;byX}w{y|5M+ zi6vi;#?2_1FyNX+{x*mJmW~L+#mpQG`2eI64R+Rn&%CcaGxQuN^~iso)5S`j=~h9mk2uolMIg{TyPYY-QvievlfAwlw;CqPnxm$h; zQKt8Ia5bOX_lEV^o*Z$QQ=8r6_dSxLGt zrfgkmFEPQIu-xX*hE5ii7hp2yp1Eo0ay`i%aO))B^ukhc=|d6c*a8q2fy86 z@8^<{O?2MbCn`w5-9rx-fi7fIz5l4)(0 z<&j5&;v6+Jp{Yz-lrOtgIiBAW8kiv-_e-ow&Z*QVj&OX^{YO36j>wRFkn! zk-bZcf{XIlAC0YXBHqCfNBTY{&n~9SrYz1<{-oP~5S9TgWn_rBA1yRg1Fita$is)l zLcQn%(p9m05AdvPxq-T5Xy9PRv>uQ)QN%bC`6In~`Jw_S9( z57C8U;JEbqkic9cLTd?#?)apQddx3HH8-&TT6FC z7w)ZRN_s8UQWUwG8uot6j{M+zf)I;BmS(MV7-$A;wno8D+_mG+g4iy(HElS*j{MIPr$TULCaYCvDV?+3eKBn9;nZ<* z!gmX7QWW^ACtHtY04q$lQk{65F?2y26RMT-Q zDO@EDL1YiR`_`b~?d@`x2&889bY$W2s&Z)q4h|j28-~xKv35I)1!>{X1vLEp3#8t? zL$kEEXWwk0eAtn>-c z1yc8qyAMN3RT>fx3sscs%63SG1Zd9;nZRl9u~G?gd`7YYLBCKgMJn3ef-O-@EQoM$ z^z>6Xa_8FSkBd9D!mf;|1ogfQ-DJ(fFK@8=YrP9g-+4$1r2F_$8!v2;;<*LfpuEX_ zlj5r59zxjG2$E40)R05?Xc=`R&ui?ou!6o{LY{aiR!c4e(s6dJJ}EBbCd8kN-}bSf zWpETn;&CIx3amxp%9deut!EH$os6_Dk=9$e$2Y(CKCzyhsPlZZiq$%B>oH;eyO~A z?cP4^rRirR*%N4P9@ltah8ME^NOGPYXD}@=az}thbN=i_;Lr}XOX5(p(aMj``|plD zB)+J2k}6gbYrutw+N2G_X^tUaCP9$TaGTjwYl@XX&WY$;^vb7*Oj>PeB4X)x?;_Pl zThI`8kJY$;IshbDnVu;<|M)PmQ%Oi?QXmNU2(D<*~B3^R_zaEW#r%V(Q zX;zsxz@No$O+CCA@~vtSnU?5NbF&SDf?O~*awzAhaSA93CyqD@ot7pC@s*&=O9NC3 zTJeMX$gov;^fJK^NEV#@l&9^AG>&&nNx9^NwAd8<41qT z5{wR>PYjypJ1EcR?UazhhB^f{5}*zNKjLUVg(A$&o+GM=QD31`Yv?z>a7?F zX>J|v;%Q0&a9&`RQ`h~j6W{MQzf)QYmCXq*uM2ahX}x0KIl^!B{o8#oLXUTxz~!8QxX|%56w`Y1S|2D^b){)g3yIl!bJi z_#+^c2T~dX(v!t8W7515kI(3r)aVE~w(azL(%{caHMXfud^6H|yRqB~8SYyz6xxez zRR3uKW(0&*AvOa#5-llUv7H$Y3u8Tq5f(jnOyR_2MOaI*T~XZxa;!6EC63f$$!z3z z<(o8CS}~pqrpux=1zR_A>F(|#=FQv}V*NYO+0%o#wl;ePo4sW_7o>P=O}K)2>7qK~ z`eFJ5PDCa>hlNp4;CLji1@C~wN!NZn)S>Kk&#&7)Q+rbRbcER&O08cOyR~gNL(pUr z908GC$>XlQ_x%oSHngd2zC>?&8ig%yGo>GfxcWWGHirX(p|Bw(~#D4KC6~jPCo)6-Mp*kv5$+5acu7 z^T8j#SlK_82IZ5;lg|am`0kw!E!cOTflQQOW&HU$I-K^$*ZbU6mY=veOA0QHc604j&%|mo$9hpA| zSJf%2xjqn{{OUMx`TU-e-*&b*na^w(X}SIu_f3|7XOO{9VtIF2Xom1!@WVh_<=qdJ z&&?&*%%VIY;o0^zWl)ov7Ch+=Dbbm^wT|v4bK!>^!Fw=P?zSf-{Zi@&pQ6P6{;*aV z>&gZRd5Z186H(2e7(xb4S_k{fMHG*$x!~w>uJyEJT`-T0*RvL^rSQ3Fdk=$H`dWB6 zo^1xlof)m&p4h9Ln<9jhN5QDVJ)#h<$8mPsa`(&5N9Z@SV0;{Hl2Kqj+M6}^r4=X+WH*z$1 zIXSuk+Uxl6Zm5RZGC~Ve`Kgo9WOy_d^QDy=39h6FmAIgyqSsUrkS9)M$L0am?RIK= zXmAH}S5#dglZ~=s5q8!2F56)ND=tiA?%f7bPCH?^#hcZS)Yls(Rz><*I-Y4_NHyql zu?O?jR@anJlc2*euE%(^;t_>|`C!+5O6ld#USCgV-Mff@en_dU%{WaU!oG}$bD~Cm z`|e>ve9sJ&uI;7Z%k7EDnFdbLr%{rb#C`mDcg<;SGhQPAdt4j;V#c^Rh1a#dOSFEB za|8U)k}&I+bWz1T?$#nfar%601vHvW~^SuVQTPP0BSm1}pPhOfw+?%(HmdA$6zqMch}<_yuftLhof3R zzouuVg4~>0x~}Gq@Abis!%f__Msc|Z;5mip+k4@FihvXr?-}kZywks*`pU zChFYnIK&m7K!%n`&@!H>e^?N^(Xk-eDJQ z-CNcQEiHQPKkh0Khwic@!3g_GUV6>*KaqP#AD`*ihXc)@wQ8U{3u>U>tp|nMVTc;5 zu%>sOa0QWe!2*WSrDol2e(|~ZIc7`51SF<)TeotVk)vb@#lfA2tWLXS9uoTk%rxq-EpvQQ?=f@q12aRI zPkFsU8V7Y)49q9H?ffZ>RP$*rmTbQIOu0wctgx&q+wzrv&QztUeMe?n;nyVj?^o{) z*5Wm-B=-RlUr&W0%r7V^KDZ2;u=^uqs#MmX(_o{o2CHakyd(P(>-oKj8Ipo<5fQ7Y zCx|l$I6VWmMvC=+F4UP{@szryUOit?n2E%gMx*FH>W=4NgT2s2gt;6+NfljY`_fcT z@G{U|W_}PaYTtIJR8{fJDtE9n?cx_F8e%=YX6rQ4h(O)<)5NJydaxr7Ue7}n^v@V# zvbymGYe>~*aM7u;4}yNad%peoKIIK>5?-+$l4Ax!Q%t{vY|ABE9)Gn;a-)+-GcNUW z`WSj(CA@zFlN`QK<^J{lt=-OZldhfqo~P76+eHrJE|~*Rz*SzarS+>Xda0NDM!J;f z667_Sx*3ne($*;CiOa=mU{FhFc2o5($|bMvpa)2c=AnRB_Cqc1($ANbrG*_IatJXGLU zcMmp)y;WVESdT!iR?`_>Z^im?s;eX=pl$?OW_D0z;4CMd%B>IAZy>>+Z^aBVwxUbV zo-EfZ*c(i}4i^o-t2bai)?v=8fu}k@PQHd~*C}uk7T+lRiMo%zRnebeo>gnFzKlE! zQlYVF4HtvNlzqJ$y=RCZy}zC~-?~2`|JbT$Wq@P`m4rn1b9w*5*~M}B^BMb&m#j5gBG$Ld_BQ;@Xu(5jUSZ8{fNY1YNb=IRhcGIXe? zCszU|L{qr6G!g4I&^dC-i}E*%{ddfGP)0U?M1$Q>xoqA5h@2>$R zDEMy)BvlHy9uJX5FMR|buwUicEfyV7!Sz!i!*Qca4!O1r<{+~;(3OC@;*Q_}ab1=+ zv+%%BYWAB*n&MKtgt}bgA6v9_ekk~|!^bbhfbg=rs739E{b@Tk8fHT14MYhBweaiC z3^V)ia;xrKj}f}DEPIs=zd1(U^TdQxQ7L4}FKnP1=p*H}CoNcAzMtaaln9PMRcm-p zu9Ir?A@$7T9@1pw-TbA2G0-FzfhbP6+&t!%vvnN{Ta2WwAw~vlIzN=SDfB&V+;3wp z^8q-t(|qB`{r2#lN%U$G_M?i!$~RxNtus`piLnw5eMcAf6)^!=SY}k>bVqLHFOmLx z1o8XI*>nm%2Fr)_E!TRziCqCWs%|)IR?%fiF%u1p*cgKDq@b*t-~w0AyBy zn=(Yum}I|2AN<|>QwfnYl`zv<)`ZKAGEJ(_I}2J-(6sY$yiIt!M8(fe51oki0w=BemA z)*RZ!nP#UXQC^J%;qvKl(e#I*z^J;BgSiqybA>^~gu7pyB3k;m!<5qAADd}QiSP~P z!x1>5+oAm>snkhljN5K4cxLoDQ;{{~cJI3X3yY>(yg}Ic7A|e*p$rK)SSs}=ZUMEx zLuN)GPge5iARjlOG-nuc9t|kblwny%9R2*}WxRexBUF55-9R<6LgV2`=4 z&AKf8{S1T~LL?oUhO8l;}gX0v4 z^s(u+HhVWcqOe|*Z<`y3d|o)M!?e~;9MovaJEAvyDg-?T^ZBw3zHJ2u7il#2@&DRN zJMjUa9n@(n={A#GNB7}U?Yo)a{MvsvfG5)R`LiYDnhJe6zR6K`HqIC~kE1FzZD6I^ zdo?W;ku!2#sTM`4a_MzM*3R*G;QbHxQW zbEoj;1m?ESj+Y^l4+OM0GDNes99erD1wV28YWvZ101K$J2bUI2CJnCh%-oY_!u!4_%dp~K3msjjr^utzCN z;*G?;!*6IsEp6SlY${vsvs0LWU{Q2}Q6H&^Mwt6LL;XOMpjXaE#=HPW3_#SR$FXri zC*i%G*nLvDl5(w7?P}?~Axy;=322W>pXV`4RL`AUiV~AqDDL}hvHkrh9g!1K=J4HJ z*S^A{fO82-gvpp0YEe`r$;II5=I8dTHEUA?Hw1OlR?cMS&B83kwn(uz(JGI_0_K&fi7I5>ZFky*!!VoZH)L&o9TUc%L;?m8E}`` z@_Vn0W!n09cwzI;^)&wzIT<_$QyNfwsOxIr?ubF73km=Lu(Y;K2I=D9C}$%s@>MoO zClciD?w{UMpAKz$_z3oVBP2Iams4$h`Iw#$3WqReCJFJkY#A>xwn$fyY(R^$%Pb&o$;oZz?4zw6iluP?7_$ z;$T3S?s4|nfqz8fncY?)?q#mR&6IViHsYXZ6lNOiFp>vfmR77G(rEh~fF4KnT7@5O z+Lc3Rwuk@w4Nl=btP%;;Wu($aR(1H(=nrr$(RJAx`pwo;*6o&aK0;q#`FZ0n)Zx?} z2c73N(hl|BSTDp3-F>}D6|O~Ct0HgA!l#(Ul<8n7RxGX&@ux28;$W0uc2?UIZiw#) zE#8~jxdU0PlyP+*50PK8+B*YSa~Jw>NFcRly^%3Zd9_jtSvGmmh*u9O2x1BiCNAtz z0W$o?ukW}@&~yOr#gAgi3Uw|5Z;^sO;HY`Z4V&MRaVBXi9BQ54s2|)XAm~Ax2!04u zNRPpgh6LHCcYQNd-{4;(&IyB8N7qlkHo)E1E@3P#;77tbYnrGXz{`cD;qd3vAGaH< z%LiET)WBU;<49QxkBT?$*}E}%DH=6E^0HS;-SJ2T_luV0M+!H|^%DE6Kl-=t2P*BG zy9Z&wCT^K;)!J-hF^W*n;4^wIj;L!E`6V!);Ru{F?bV`;c-+{eBPC=4zsP@?a@+XK;^ zPSYpXYQWTsopB}jZQp{0wZW}BYT6oceB1?KGZV6HZ_ep2-k9j!Af08F;*Ar&MU+L) z=4AnM_iN#hUG0N6eM;x9**qxHmrVi^_NgAjy|`UK53`D|%M~UzgbdZ$4`+X%{YR8o zA9WuF;@9dp1YIX%K^}!VW`oRcvZ;Hnjt6vyrSNxrmN2{%&wA11bWWl5z5LFp-Z_HYf2weZc#L+?9s&+qxwJH#N=D@G4qRIET8EpjaGy|0f4pG z(Ys%+jL}#fT56&FZ983KZLS-TydN~k`28Km5Qf$t%q?wLt?Rx!r4|00jiyX^dK#hq zckXMWEE1jN)U>C32ZAjmRbT^MOky9L9iQlb|U4S$C6@UADr{+ZBH) z99wd6JNkfAelp>oq-$lTexzzbVJ$HI`cHpc=!Nevt@B z$Ktq9(M*z>Kw|c(A0g(s{je`4i&jq-4$INij<*{Qn8PNy`)p8o#k zV$HeBl@cN)SMx6O909rR5GYKOgJgb7vz zMpJe)Zsa%csSfX}jZ;mG8-@a=9Wab7QvE#6U`zO5xy_@tI%Xdj+g6sgY@ngLKJOuo?#&Ad+T+mt)VgxigHQCFf@zcLLSn z;P_+T;0xfFJG|b)@*qK8bO|cg58*N2P~>pi43W!DXaXHYzYgxbtq^5jTq_K6WY3{+ zE|O<>2_LGB8)x+E2rh8Dv+`Ra%})6nu;o$H8`vz!j#;bP&S4yn9b{R<>SHcaE5E*c zCI~1Y5JoV60N)WhbvY2Owk;)XAvPVQmN?#O9B;-MfisANkJ?`=&!^sQR_L5 z8M<`}}6^<~L`~ne*(u*IIk+ z<9^$z?A)~9dwouI>nu_I&!Gtl>zYq_Pa7RAD`utNdwYfEZ6~{Zna(*-Cibso$Pk;< z;ZA=0yEH``L4jO}B0y#SpLO(yU;6uLL%~1l%3oP}5c95n|GTVz{9#T1eg_xTZ$M4_ zyRt0(QqBI!*fsVQ|1EOIQyZ%QH5+OTJK^l)BBRpSGtU=gyGN z+Qxnah;oy9C8~Lh@{efl4H_Taw~!T!ccm(~7^5C!dyXr&>w?NYSfycEu{rRWHv}Q{ zX6a3osCId2(l?B(wF1*rv!!!64sT?!38hOgEokA!y3qV?1iO~I1^uD>&d-|aiBC94Nmp3k)0#>xe&iKctPIe)Nr7T zJX3HF-;jb5B{q-RGEQhQPK6_Mr4n6M__nJwatqI!UFHh6ats8w{T-6Y$Zb`B$6P#& zCGz%jkiDcLk+Bj#mVm_sW1F^So0STFD<%?u-b4nTdBjb|j|PQsdkkCxA+RD}bRj~U z1c3pBG+uo7(fL?ILrh@YZj}%FRJ;^%nipZ=+sBEwv~4<aw@haWqXO^ypV)DkT0cyE|)_U;hU_)Q~9?!U`Ei$#1ucWOoJ z-T67WLpPDxR*s#mHWN9?j6nJeb9%|W?_^o1Vb%f}T^(P_x}7QZcI{VX;d&K8Y}a^& zUg_=1nYLj~oY7}oS}OV&kwdeljFZ@+z&M@B@&-g>Bn&#CRYz}skHgweXv1C*E0eLP z-`%lczW0W9b2r`PTDj9sGR|I6KR%u`Q>_6JLx!KV;8iYX(&KmrRc~V{f69KK7Tj4N z@PnR3_>5Wf<-eXR`IVbr+_Z?GS58b)9)D5KT6~ri`%J$jP^mfr;AKHuvqc_#Z_xR_Cx48@i&jJDX+LbNOfLr5+d&-d|}u z*PE8YNtMOW^dDhg466D*&NcYOGu1)ST`xW=Hy7P}J&P`jneKD&#RTXfg=~>+Fcrux zik%_(7q^FHFw>&eHG|lels3b*-bJ(&x9Bw6kuF7UQg{_lFt46$=iAQVK*|$;v`vHZ zv!BGLpM;_O_MmIcmQApTVQ)Ft2vFWmxiYeuBsa;C$Z+wGHF@5u@y)BclB?Phm)}(2 z%Emga@WG=r`gzGUTJ-SjCOJIY21Codx6XmUqs`E^dKqt+{#mC}1(KbEJ75qER^o4J zKO@#PR*cP7d;Kc!xVGwFFMtfMmZ1>#0tn(_o*up}HQQ&unMT%hnO-KrjOZ0!S!BB~ z(zLzUXx*#Sk~^F>o9mefKFYhXR$@vvYs`II%>R7~SJlvCPS7YF9v}7OHpz*PTsKoZ z9cPeb+&$~FXpK;oFuo4b;o#e9XhJmDGxJ!4uU9KhFK~aQF=cfC`bxsje<=O_HZ0lN zh%f%*VqL96`LNDC_d3_Sxz!p*6XM>dSmxIN?nRX%b%9+8a9{;kKe%a7zM~OKCI~YQ>+k@32bxPjCqEg?! z#7?#3*s?oI2(O27U!2^N&xLo&eZT#S6W1iK`*lMs1hSQrXwwEcx?QivWuFUmG)cx^ z+o`#HvafDyVB>_amr<$c86S(6xjId*^gAmW?47UDA9$~_{=?eH!!q3i2Gj-G_3oAW zDz@n*c8_+)l1Bvl9Q+W~avNW(hBWK*Q@M-hM4@8Y{qnH6P=|16sb;NqfAiMb+R#k9 z=XAmBWaZOtJKf}bZ9Mi%#`5f$$R|EYg8Y@~fol}nuO9O6 zkDH!${p=O7cvIba`GJdK*Y8zD>K^W%tEMGmMGky?KCV+~>P?mw`Uu;mr8R!2rh(d_ z<;7)UlH7`Os6|}a^Kq{cf!FJgr{jUc7e&@ftcj!*Ui0hcm1HMwS1;EII7IQPjzeb~ zyw~A0`4hb$n($dB2P3O@oV+y@d6Csp$)+hIC)32bUPF-KJP}e6rTzj$Q4 z(jY`7+1(GQZ+%Lrm=jKUAUTd3jHj=GiMnm(-*5SF@J~Z!H@963%?VxzLGeepQ?j{F z&rfQt^5VAg?Ha$mmcTX-rA{4zXwt%TX@ka>0>lCa>s#?_J*`qauX)qJkv2eCZmue~ zVfYwIFf5x>uy9^p>`Nk;Ndg8D2wp)=lI-9+VQA!?60YoBwti8Oc%4Q|nPTaxychh& z5IYKtVloHZq#d1Ew$QiShH%%tfh_&LSTtEx5 zmJ$9eA*u#VsWK-{<&vks(wo6xXP_rnmzeV~bpw;{x3zvJJo6Y^dLpX1ZL`zN8`{?N z)Zirx0Fms@6pO~$o!9(F0OR8K-*$PMUf0psTTzMLUhojK@vs&A^n1i}Dz0j_%pu48K7V%hVyK|hHYVZDB>I~FQS~N42^QLiyN=P%tPR0tGZ-T3 zr(Rfn5zfca5*PuLGxMXI$_|sisX#Rj)g5-wW&h=7Gu;_y&_-5lsjZNjFqLL9+kK$w z!&kz!P`(e!B&nmf8R_ykExNcYnn{nX$>m){cgG4y8Te>Qg0hOo^REN;mk_aK+FOdE z9lE!60S*eY`Mfb}?|*(Fsg)K&dGW4^ZlxqqAjDen(R;eE5HBt+YBnN7hY7a-ni|)F znO!uNzms-p;4(GwvftlCW^PUb#+0gF~W zAF8mL*Qm++-h-n77;ZZPr6VZug&dEAKV6Z0ZV~f;Q9ofe8Smtf5ZHn-CxfLULkWjR zt8N3B+k^fwzM1-iUr5o1iJn`ON$`(xo!Yx6BVn*JNqOcWaxB3?YDU!HOBw4RsZ@1j zq}IL4R+2?cp?4)zmQ>P7Ye2hnF2U(G#B0gY4eP@VPZ(udlapt*2X`$DLCoaXW0`G& z2#7QCG<%2lE#k!}7cT1Dn-dDU1W}rZFZNdQsTI3MVMJ1{=E8={K3xLBZxSr$8V53s zd1lg8rk$M~6prXl2V{He`N{Nb<(J-e^G~ z&aw9eOx(e*LMl41$gH@7rn?R;)WiZ|?bx+~eA0(;LCQ}{p>yRV`~_}mWQa+!_N?II zlrGUq2mxUo>|5kYP!0qhhRs!8{^aZzH_f(_Mr(X)&OONB^Mxw6n_RCd)}!>dM%xyW zKT$fSYPa@CLah5gXO(+Yd%zlq$0q8y%nACLD(|qPXCU&iDyTl5--2!?A)J)sUF`cN z>lu}?7I%*KFP;nGL^+aMQiY4&k8RV6GD{Z|@a1%R`l(qn@ua@uQ3b2; z=c5{~ExB<_sl|W-NBxV+p^|>A4mR0D*shvfwQ}1gYnHf5Ld56CBnK*_LM~fclZ{dX1457n z_{p^AE?XpI*J`~^K=JlV?owU5w`IY2K#W%+)vkJzMYLEwK3{w*<}9SCV#KV#VR1h2n=xW3K`4DdBTL0Cr~OXp zCMrSHi*c6{D*5`zOl50Th)T$%Oa*!cl^jL>Ua|~^?X=NY%+p8A#{4Ck5XUAHPvyr1 zntq0vVuUs^^YNmF^^IL_g)6LSe3`Bqe!=4NwJd1F!zl825#mEX`!2b*4&iuAZ<$7 zH&iKPftQZ@>5$VTxLxapX@X*4w;3ZuQa}O>)vhJ4XB&X*(BV(8Qd3xS$pIW zqt71mtDblh_b`%#M;ho!{-%hH61;sQulxEbl^>$Vo;9SFxE+tfp8kcvfYjGr{tf>h zS1be7e_H9Ay~xxaSE6LA@ypiPdAkiSKjyZPE3bOKZM<2N*Mn^Uk6clh_NL_I+l(EP zd?PnERnvmc{%AlW{~_xouMp=Tq_<;J)TVARuO{RT1Jk(4QY~XR&d85Pe7(}Q7jdc^ zCQWt@7Z=J9OZ~rGUx+jS8MDwYAyn*bgV_6loRlvNVJqt8p>3_4ofH6u&4*0u6w16K zHz)1X4~fswWNrF6ynShL;T%M&RI~ITy)*Tu_wyiw@nwIVfF!46)|xHQPrm~#E9iTs zJV=L$6aV9@j6HFJs5>4Z14EMI(YuJ%=EVH9As zO*lnf7Vnl+%o{Ho@-TTrU@p(T5}v!_RYy4`HlzHqipOY8D^pKQF_H@ED9iE!cehMNAvCtV&o!0Dr zukGEFal@u$M5LPlOPy@ZpBS2`QW2@M$!3q(PN^$dLGl!YH9<|vOK=9bExh5W(>BA3 zmIJd#TCwl*5C)TVmSd!LOX}iNg_rxs@zN#nqFYoHHhE&xI!L7&@lXW6s%K1tob3SJS`x`fzLZf$grH6<0-&9@6OJ;k z(@dMeF4K>91$n31Yc9>+!BoK0!ST%ixi_|Ua98r5j?L&NMKvSsWl9cz2mu(1Q*f%5 zg@sANhj+@xA($E_^EL4162Mv+`Y)A=kJq~30FZ&gJ&t zJKl}k2){}>-87Oxu(wW}ApJOlY4q+T~ zHGhZc+!}M|$ozIUsq0_U02ZcZp_|#C` zBW)Tn%ci2~BrpEwWwTWZH0p)VR|j*B!lsF!sd^J1IY+b|+?nC3FGp0TG$&nF_)nD* zmws*3T4s4WH0)Err?{2n%1mDB`-E=4@C+URdvDJ7AZH+ zrzh^mI8bmNm&3k>rlTtBZF7FeX47!)%f$G-J)w%Mt#W(Vrmxjr{sbyYs& zuen(*Z>+2xdGj(`@W^Wb(KHt(4qR4u+=*wexRdSF1_Y~0{AMBFVe&lg#*Bv62~Uj& zE~26VS`=ac^6LEHEcTmS`E#4fR1_@F^-rZMKeF&~oYiXzISaf;~s* zfryCci;4~w1wtV$!%&mg_3j^?2^Tg|S5E~>o}LUGGsnfx5xcYbnZ`*HNjwxjV?TNo z;KKUzRF*ITM0|;(ZzxLF<4hRR*XFwu8wWgPYAuC56s8XV8ps((8IJE{Se?q4Ziq=} z?Irih3s_mgwrFr>b#Nb2U^W4=RwnT_w8l)z`RkW+9|Lz3@tsMcBP!W&wqrA&zI$$7 zaKs6s5TI)4hpt{MSBGp8>!#KAR^1_hXQr=4RL8~3%rI67U;l$lY2gW0&wWURK~x>l zCp?hGl>8?6g!8JGX7wLgKvVv$^)`u8(pa=vaI!(--QO3pp`HRx9e3yjVB4dy%tW7| z0_sr?j+Qf(wfi4#+*9{bRG8T!+W;15W%S(&Oh95pTe5h@)2~h#09TtZwyz@jSG~;y zLS55^O4Lsh*)}NMx(QR=3$fq*MkUTFi5ZhRJeFLzcr!lbXhmWKP_wF1W|Q!6m{6Ann=hpD-jKXB)(IN*R-nOepg2D4&)MP2w{7NrGbkQz z+9&Mw9~)w*89&?;ZqlPTAAu+YD9v^PkFheEs;n<6Vn7e@yBvyO@<=0iUwN7p%gP5{ zM|Tx0N!ksY;i3gB3>ku`Tq>o;QI!&1vc24KKUM*R!*CXN~Rq`Wk%)cidHwLVRn zc(@kyhDCgn`VoDRXnwAlNLP#H50qt*MbcVaodIA9;LHN7jyos`fyFl+JGirX%7TsT+zo8ME*zYE#tv&!%UC3YLOGV3V4PEYvegl~>8e>r~$>_e| zWK{d_$~;=@+s6zs=A@2AabJK;op~e~qLgmuipbIfzbqh&Y5Ni{OqYcTAl|NP`rkbI zlKCbz+DAA5l+t(b5*s76!fP7o$a zeGB0F8k0#VL49q9;GNGDFL+OHCJ^O?zcm9d&ZOL>mgTEGO2nRo+Z~!mF_p>J#u!FD zH2Jz}GJ0o_n#j*pt7j-qOg{>VS?`hJvj-&YrA`i3}3ugoj;wn~BK1UZ`ZfLdHL zh37|Fevt=nFbJvwq@%^3fPSI5gU5DchMFOcpR+rQsyh)1pc;?qK}&NI`X_~z zHIpa9=|D;1sp$e*kkdEB*-nuX%M&kKxgkK1*P!o4PgKFVC5e!Vo##NHi%pjeGL3uN zTKY6aIE%JF2`<4QX;;TJ)y$omPC8VnCN~B6zZW!$ef5_@!6w8*9`x?)K%g6cW;7)S z6<@emhXYYgcf)}6)C{~fz8EV^mlhM)f!DR9JWfg%Wbq)U5Z%Do`6SB)TDJ6Hy}bSb zGfTN0K-rQ26+_)<;xooaAwO%U!7{>+d!WS@5nqRgjYT!o# zE)I)LCsa>t-f(xD&_AdF7+ci3!ntqJ#vkQLoEGSkA#}F92C6w&Utg~RTPfnigo7B) z^L;K@Vas*g?T7EkZZ_K`gF2UAn3&MY$;pw5dIDv6*b)$bW2+d$ty?4T80~GJ$~3KT z?pUN>YHZ#U05;=M7ZOy{%o);GkpT91U~;9zXz0U}?6!X8xiWcvvjokqt(}DU ze<9`tC-FAR`fOtT(IG7@Y~-*s>vUw^6VeWR{Xj9G1S7Ym8K)r_%ZgThJ@a`J2mOQC zu=l58+Csgl;8btLdV$WIwsLh@BQ&m`<@ja z9wffo*@A1eN+r&nGP8BCDM@V&_hSJxP169?Pc+3lci5xoRT^ymZA}|bBc~aTaR@(U zZe+bCtNlQ)1P54oi(|@0VV!G%7YZhE?;>x)V0{jF;T}ZHAQSXza8VZ>l1}61HWT2s z+fmZk)PhPv27+lfa@`wLB-G(a)7iry!~Wg;x1IY+>FWOxX|rIjYDX!at>&qheoH7b z_&B|o?l(bHhsM*T;j@rWa2@3{!lj9f-lx9CJB48aq9zojrTr6sf5p5RG;2Qa9TT>W z5y2@gy52W2?3v>119#NSm#Hwooo)sgAHDse6=VeaXRr^z&}CHFqBscpU`7G+7eyHe=FaRsat6R zUxh7y*O>YvozW9O1}(?g*4q9`h8hyKUq$`Ve?C+E&uJymDg!YD#n9($yy50a#(5FR z3r@^SE4`Ke+xG$zXN@D7d3h}P{JRTYY|i_tAU9N$4l6<&Xtp3tcg@hllrWS?(Qi94 z#G2>LxtrG9aiYBhavB>Cn1IL+SQteKEqc1H`}?VO@f&~b<{FUxDbm(Oilx629b7!J z%VZ1-QSL^(L9$x_DGl(8M#($Wc7$Pd#F$|$OFJ?Q0rLSID8?h?7`|!PF`$)9QNK8N zMb2jQ(5yf=lJib+H#7(1TXdg|a6_Jw25f*6d#2mYTvnp)8XSxsMVqNNAEJ7n56kkC z_)w~L%c6W}HSrEEzxpLUu~b9qAY>e4F#zOcMAno*Ao@W4&ZSPwj&c^k7~axrcHcFq zbs1*!B`i2TO;&$C?V{kqRtQZ>2)NDd=j=D1O9Zb!c5ZwINh<;1o_1v6gJHr{R%~4} zx|wi*ezcc14H5r%_n_7z(kL2rAl^Svs1?FYktdYG=fjkVwUbM zf`X%y*TcsQ1h6;cGaC0Yu+5RrL=m!dqT$))zX8%JSl%wKG~gM3N-*e{_P9u5M?5}7 z{rjKf{wUSHX>ec)ZO=s(+*>gcQG?{kzpIDA4qbX7=nPGj>O4L(tzE0Jcf>nWHXLBg z1Aj5+=3?109uRYK`C9lP9(VpoRA27sRzK+PrA0?sY{vS1+s%B{fBJHxfh3GS@um(7gu-Ui^X;a( zqO?9s{s#2CsDGtEX2y$@dI5fkz|FN5t0!$?E-aJ-k_**7TUFD$mEe0X_(Zo$t%-8f zo>hW%N*T?`1v7O$G?(&|994V_Y{A{v^xYMT0+*PC4s#_3hQ`A0b3_D^R1H|}DZ#P%h@%rj0O{DTfbCi$_1f^ikO zov2J@^1udK>$&xSlNK|vj>C~BZG3;}04esDAquVlyX4sT(9Bin=VK}E*-0e(Ysl=J zv3MK+p=J)!xj3^n@;@7!(WdNkRx;}oE?#E9`}QEw(O3x71Y(T*Ibu^ z_}Dg%PoL@a&Bej5ykMH({nrOFu-iDBEAwp&T|GSSRZNs`+J0?4>E7-FdxTEZwLl9K zB(aOMve*0Kt^MwdQKeDuVrZ#2Ma11({DfA!a+~&+_l>7JZyjQ-|I6;(`?i&k>h?6~ zVd_FNp!9`6TT`y90uTUhuI#Z|dbMc}+&N>Gcldlw>;pnf`REjo2>}5AMKC&nHsXOX z>l&G358h(28ed~XOr_dRQkT^dtd5>VpUV9Hl0w-u(utGwPSHDD^s5wt!fAI5iHIHz z;vB`4hTMiPBTWjI#BRgBj2?&SGc8z|-%>lDVOZ=P0|_2cJOebVggofIF#@x+sFm;? zrt`3kYrmDRTpZgN={gv?4X0Mkq0jfZA>*OH@ma96GEZhbIi?gL)%nFiT6`0K9$1PUY75}K8TeM z3W)xhf$^Qn&s6~=KWcwf$A3@8sB@NIb7}VeX`ki>wFJmW2{l-lPTtW#N%`Z2s0igE zPz)<*sxMb#1vVFuG{y8APBUQJwEXJy{r-zpeuJC-d|00Cy12UkOu~o68}Db>Kmi9z z@(6kZDoRnUID_q|f4ALBldsgps!0HWV%>GoNha@Q91kT|b zw_?$>^T(cUCxPcmLgr`Uw-FIU`^#h4>=6r26F){qd~h(9+C$qsSQ~MdWl#p13$M+$ zS3#f|vNDs2dQ#vTJP5l8B}qkMrN4^ptPn^w;ThTyvT{ln`g6`z&nTp3nn-i=URGov zwFld+QBcr26+u!u3Rpux`h1{=@lc#ov!8CaZ|!@OQ?U8`j#F z_rriW^LDH=eFA;Qd1P+RZ>+CCc)i_m%yvVTi?rVZ+H$?aJkYK;I2`3RIqDUBs0MQ- z-{VTJP+rvxKe?WKcy813!EwoZT-r*A8}W9Pl|9d>ZWM@aqUs@c+nXclN+T6-sMAGh z&>{5xN_e|ij&V7FF(raS?_)E^NsZ#Hd&f;5oXC3hKCz(w5MZA>4x)HkB zb2y!ol)U9R_$uoJnRag`m@sgan0F(2cl0v!Dy26v=7qAXZO}BC;}x+lYyQEA0n@D; zFeNwg&f4fi3jN!l#wAyXKSv0Jp8Q<&tWoSa0+e8aoz-+&nS{~ME(>Qwkm>yo0BJF2 z7(=vaNBc9z&02#zFUOtWa}wA)>rH)D9;F_esx<8kaZhNA)pGd8GR;G@9m${}AClYO zlQW#ZswyAPA6&8{e>B^a^4n{Tn+44c5Qxxf;T}$;Hk*+8_%Z=oQmdLFp4{>8JeVBr z@n;Hncc!|4XF@^=r=oXbG=P;A-l}nD7#pbBs}jn4nyJ));N1YSADhts`G7aiDUTYa z8tzyYu@X*MW@^(GeXva Qb5Ve$D60l4lQ#AJFEDuUivR!s literal 0 HcmV?d00001 diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png new file mode 100644 index 0000000000000000000000000000000000000000..1e1d64f904a7fc0c89d21b524d735ea753045fa8 GIT binary patch literal 138724 zcmYg&2RzjA|G#7 zzkR>I|Ks=bc;s?EclZ9h$LsZcJzvkKC|zweN^(YW0s;ccXHQl12?$6D2?&Tt$Vk95 zq6Q&(;NjZKr)J&+1XMpS|6NJqrD6h4lKMQ;dP2HENI^qQr@8I;mw<~HNt?o7Y7-nPLVG@EhU5^YA58y_*Tgng<1r_VZDL*-@A}L}Wtqnkp|(kDq3)>b z%zS^-yXZ0fnTe)*ON7y7zbYnhM1kS`?^kZ&sWRf`|NeaWzaeEdn*aMuF7Z7Y zvj6+Ua5xPa(f@tINr{XIOw9j&)w2;1Uisf22?)%G6((h_V4r-^&1DTejjyt7eypNm z6ME)5=y)2UActQ`theu_g`MtoKcpdpW@&!qPU#={`Sa(S3(x7=cc&qtcMZTN4x5s{ zBh=E?9z_J=-oJO6C^H(LpSM~wR1)d87xuyJSf!_>ec5fl$R>aHc+hdO%1Yxpy+Fax zkhVD-`UoG?CS5@H-$CVu+ zIe`M2Od%m5#j~?Bv0bjjXCGAaliPJvK5G~2J)2QE+MZP-rDm}g$jQ!Dea&i4E_hL^ zPD5l~^MV+E7D7%$SRZl*Q)4u?7iKU=z#7oCD)*HFaXSqNN?Rr-ru-C}kTmr|{jQT5 z(~l&iq?pejA_2LU-{6WGP2kip-waU2oYPZ~Y#bkAeAVL{ zUrJhzn2tQm+jY65!%!j7lI!}VbiO`5!(%!MTHprj z>4-S)^CYSJ9IWfXcAD2p^7G%2kdTy^@=0f!e`QbX72@p(3=9;rC{o5q!w$Ip@nAyb z)UPRG!HYy6JyKGOV=2s%@lW9XP#=W%0>k}ADqi}cri6irBLS-=7|#4V8!Bt0(` zrk2>oU!H4%;XWENAKeFuUt(jaHn+DMm{09`*RbP@i#j@Thjf(G)G{p|U$q%Flb%$2 zk(zf;O{GFOo6@sBXuQVrWekEL3hsnyZGycpce*!TYOugb!kpA=FS_f#3&+N?V+3zU z*LyC?(>~E_s)@P9&dy#BJ9HtB?DTbo`=0)LHILNP)`oqe#XXE+&Ivi*XxZ`R`WGaB z9Cz_AE**Dre2jKy)4W!@Y)$!}u|mOEd)k6eoDMf9Vo3?glhiq9n!UN+buI;*e7ZLL zUjAfDWFEPK4Pazq!m^1GR9?d=zW%j;ezJ>#)04krW|))(`@JuTw^`Xk#sN%Rct`FU zqS>pN*`dyC9(h+jXzI;4wA}h+?wO3=7MmU&?*cDa_&-0Rqzg+P{r=xk)R&s_jqb1Z z%as=BW~8R(7H7lCI3p&VA>hER2-ws+#j8CGgMnQd5qJ`eUriAxCdob2($vfiuZfa9 z7*?KJO*JW#D%5{C*xa4DPMNA+xV}-GR8 z>9s*-igEG?R%xFS&bSn_ZzfkSr$jV1w8Eb--K@s6;H9Kn31>AMS^e`)67xC;rLUjg z=%21=u$o3e4-$Ka=Ue?ZMhi7Qs~7r2GVmp-*Fy#wTcCUQi3zV*v=diM*E(Fks}N#| zWhNteLl>ip`Ba9k0ZZ5NYFTygq(7NI|0*ff_`N5SsSJci>m&IG3xT^6mKOwk&6crJ z^v?I=ou*E0=$>q@uL~W5-M(|D>K_<89aW@rP!Kc9;LUX+Lb~WK(MUf!qOhZ_X@f|^ zFe~#b3S6+OMfwlH&J#PBwiSBHB<_}5RV8p430?O)!C6o^|Hp3C?DY-=aJ!O$s?6dy zZ{DD9dM2bTY3pQ4>NVBsanlK(-K}JM6zv7isF{OC`PK!!m<=*ft-dB-=E`}d;q z{^f4bMt2ni$=mGOhY|PMk0!}RH7snJ@ZC-(nWM5+AHUHIeICL}HGQntnR zA+LO6$$hpgNM4XL@tHDGB<7_~-3MPmx3{M;O}pD$yBizM&Q4A}FBv3R;$;4n=id0+ zXY}7A1OnWXi+#b1yxB7FyRnIKlZT8k=9`tnV`Iw8%gdsoqAKBRLJB54N!-j-8P*Dt zoom>U-@n<;&UQn&o+YdgWy#e0?^++|rg1^BCo1=E4B~XMq*q^jAV|0I_m{XU`SNw! z$&Sp2?&Ue8wx*`&;a4wg?*bxl1H6#q8SUlU6nAteQCy!XsE1wskmpn`Vf!F)WcwQT zhx>(N&Wy~=_2W!4ySOp%nui&Kn@3)l-CciQYn#cr9~HIA@;Swt50=egbH8yXVBn+w z{z(>&ZKUH5$`Hq!?h9TOYtm17k3o{B~G z{gVEFR)3Q$Oz}F1CMAbWj9J|~WfSyHjrKH;RftxbMFiRgE{=7W5xx4Rae2#0{It{3sy zq2~cRj^Q0_RtVU{(Qv+s*K3!cpv)IFcW~ZtL9UXI|4n{Vwss*+ zg^b#`+k9-&{zG&A$gJ|y5cZim(zP(7wV@mCb5HAEJ`Z3OFjt|Aq1Dn#vp{vT_G8TJ zOW;-SuCHPamgAxzj+MnVtrj6!vb~FK(mJMJUpw{E;xmV58;@lmHjr%0`R)b(#qG&+ zIe+bi3ytJ)4@V@6Yj@XXh+ak(e{uV9(Db+XP`B}c9h(?e#xTW109CxJ9y}3F^9hQL zR%FXPdF{JCe9!PLA@rkBJZ`hRTAz)^wlPFPq}rCfZ`BKgB_~_k*FTnoV4QEt^G3KO z?wIlv&HFcFV~O7=nV_%UWoBL>xvEi^z}s=% zIHd5PENJN+y=Y3Yerb^a<|Ig-k(G6gW8iU2Qbf7l$9c7O+Dt09(IFas z;W=Ms8G^4pJri^Nc$c0F*Wa7C?|AV~Ex>AGt^c3o$YiD0r(u`xsJEd$!PohVs;lE# z=VI7$@6(VChn}HAN4$}8#m$zb6;rML)x5K$uhs4B?9vhU-@l)%w$V{hnQih!xyioV znyM*kHIDAOpCuQVEdTHKRrD^&OAS%RLW5%MM}aFj(!PBSCxV8Lt%X8wkrNNP3eiTH zQjIzYGkCoIUdfjG;^oUvRW|KI%;=NhFUs}47-O}j(0_lQ58{%tUt>_!TuL-9;rZ(M zk&q!wkUZ;MDX+K2)z+2GmJEawQQ0}xn>#xX^ypmoRV0RTRBw?Hao>)9RK2EOs?uyZ zH2Sp-%y&eV3>PWU`}cmk4_ubMv}9=WGG{m$Qi(s5mWlfDJTE8o0XWNELXIc_{G!Cd zur+dvZv;J<6!v9EJMYM^xp=D5VwLUl{&Y0G>l<4>^-obxsW@uG9XE5$B>9S$cC7GC zbM1od;-1NI^DVv&vm2Yj$t+zu3KJxo2br>F3p(xRzT5i?3w^J%YN>MqBGxlJ-?yDL zo?7^A-*<6khi!{(yGu3LykBvcFd*~sy&<>v_7pdmvAS*9#^;GcYx%W|W1wh;ZDi@$ zeoDjcL}0=X$d3$~jQ0gkIsQD8z8_B}LYc=6OCWiveVHSWKGIgZE6qG~kG`(VR^s* zqa)du`%~kcQ8$yi4|?KfkSh0ASKZwcL`CuuRhvjH?a2`IFUn^wo`Ru61K!``*-w2u z7Kawk2R?H6`XwujO*jP2%5E-JCJ7*R=f3GRYo5uNVKUR=3toOZO*`^CYRsGy+_XbY zmj(+MuHCE%&O4ury1C`P`F%L|!D;&i?jp6HNGVrG;cPt7hglZ-sTqJ*1A!DQ$6W#} zfI9Y#(2&(66PD9$+)0|UEdZ&l3F5`8UQ(B^2X?MFe|T{4GO}mLUYMl_a;{`eP;p|d zqZ3zn2rk)y&w_bPTu@fmd5{cBiyt-aH4%)IByVoD`;HZ9rROj)FgQmAtbsz~m*vqR z$|f+Cc8xn<$rI?N^d$>$N_!QYUc}#6?o?w@us#pOqWc``>QpwD{xvi%E@&T2J9_O( z-EZzrpAHZ%*V0M~G-zz93)CHEsr60$?Iw8sy1E(czErb~fF(C; zd80TU+SG&!@xVf)=E)CV*_nOQy%3?e!#JM%_{;aq>mT2*Z#OL-x%JR5*K%??Z`3Jy4=H{!g7K@iXaL%Zd6*eCSSf3q|9G5@{Fd~6S8n!gL^IRv*Lpo zu_SeX9dda$ye})aZ{8Bq4x%{$=rl*hKbD5YO~*5PCuavMi8`ZYM&*n=I&Pf2#uc_C zR37s!@6LAaH(?2zN_PDxu8;^~=@BP8wGRDGOF!P#yG-zaiuP3}C8wmgcu~M<(h^_g zZ?X3`h{76;sSE$)X=R8%x|+3y%<*@VsaT8J{GK}cSIIR;2UO0ncV(D04CH)xZgDVH zS+zo(vjBW;!=ss6{CBe!~TT#_o!#M(FjADS8|0)?C!$-|)9kZabV zkABh;d?J2{WOwFU)g(OTN=&N?oWb2tdc$?;uM+TY{CL;5bU*YW@W^p7s7{t+Wn-*C zIX7Fzgk`g~u1;LHxG|uPSy4Y+9aZH_`gy#lsLwe@#YN#_O(Eq1?H;ekFF5F+f{79*z7oyK5D2uyYqhS z(R04r<@l|+IcBT`R7yo(J|`t2A_BzCytEMT0$pwnPTD9^)GOh%al{TNhzpl26MG9b zf>i0AJY_hNjX^OvRQDm)OZz2`HeblpwnZ7yR@7r z_bybbWAIBNCZ7K9(5=>!x4x$X3%j7=TgD{;+}ySNYJSPE!8-nF!Iy{;J4z#Gu^4Y} z?>S)$xroS>yVP8LP4K0dh8@?6)1jDB0%-d@CA>C1&An*M*meM`S~da z#bR|5wzm?-#*2Km=`$T{?2_Da=W9pI7bRqNQsiF)>I}6D84hcX7eP*gOcE=irDG-5 zag#J3COR=S){saE!%!5swjlz28CTCvcyV?X8O_1m&-Oz;s_;Z|xp}5U${R+Qh>D1o zB!G%aN^Y~>*SEL-BSo;&>eSq~d&RBCbAQ}Tc+b>g@36+!&YYjEZbx(;=a>}N9cLQt zCT`Cjq6*t?f6TnPxm+gpu>h7jaCU;n=odFsYK6}7iHNLXBP8ry#11`C$Fb6(!}U^t&RvRZp654>(@m+ptOxkhwTtJ03T~z8|BBtw{*C(T}u0W6)^yHzn7)llBE{ zn0mrbK4nvWIQ@4cc+5KcIp&;TTD@} z{twl@=F)4m+CI z<5{8pI11(HN@h@~`P_FM!nIG?5G1ctZc<$YmDMJtVt#3#1*wT3zV&2e|F3n5fYl3u z>m+Y{8N}THg)!jZzq`=T;IT0GqZ*`O^hucFYp`=Tdt7JAi;vsYV#LW|uz9vuf?Th*McPM8`(3obv%6WjjyqXb z94-!YRs$M$7Wh4@%cIxR9Q!aqf3BUhqxk)I`#P^Lvl~7J@(V ziyh3CTfKL)99cPShHv`#vB!C~#YokQsY84y(O3SL$jEfH*o;G8t9o?i!0x5B|JwO>+7L0em@6y}UH(H4u ze$D7(4*dYzroWF*tr;!+hhOWv2t9o?GF`Xot@%txl8;)|ZC}xg*cq7=}A++H|oT4$WmbI(F0*xsoB2Kg~#?I6K z-|eP-hH_MuHLpDazPP1PeM=Tb)Y3YLpJD&1w@)YtBa+VQHf`<(r&?z8B>S+eo0~4- zCrjR_HVi7qviXRwWEP5}vyP*jY@;XY7&+J*l4&xXE*isuz1ZYZVP_nNZDElk3_uK3 zc+}m-LH-7D~}95IofFaLrWB9Tw%IrE9`wg@3O%F%0umGx8xJ6LWhDexvPu8 zI4g9c@)uAu0~~qosS#CL`cOLbT(*hxIp8zhXB!&~tFvB8Rs;sA=oJ8(Ps6B^7@$k; zj6ttJ5y|P}nl#WQK!;41X_8cQtYP)|ZjZqyV_6ln0U7g>FQ)5-9&U>CjS{S;?8)Hx zI9<~dHyayPeh{EW_gGk1CI=lKf1$YqqQ<4>h4YBOl(w@Y3q}Tp=R5uqRaOY&dG+iP zP=&Uj%tJ#J+^1^`1rV0zz2oDCh{R!0#z%rLqpf*idUU1v`O6&_yK~L?vv#`^CN_nD z#!OZ3zC^ALlzMazX`^&aOz0bo8jf0hF}JPY(D>HfI2JkeFvag9fe|+XIjm zC1!r{uvaE#43ruai!R1>y_gF`O;%d49zgT}oh&LYt_qqf+MFW>pddc*2XXrF@bHyF z{))5t+HexD$@)N=$ee5q!1aFTlM@q#`13RoN2lEdL^5iY4|?Li_ou4}g<8MFqpD3@ zxuC5OeBx)`;Bre2Fw1mgL?r;A$0?jYwFy2<`r@?%kh@m#We^8&pYpKi`n$&~xRZ@u zuABm}NXLM7b^=6rTK3rqE<-zx0p_r`d2$T!y-#SdmyN`zvhW=WB;}xdNF2~-)EcM~vphiolE%@R9*JuHW$}5>w*H1IGs4o06eLEL! z>}1Zbl~liP-pY{1h1`1NKF$&b^S;TDCTo>;kkQ1VMDy(=2$0e0#bfD$lMS~hNYWPd z+UL?UvmzT50C3&$4?txkdb9sEe}Nh7%$2B4ieEI-sx~PZ2{1pXvZ=%J?^nrHZokjt z=A^v_F~{QJJ9E5n>wI<0<|b-*n8rI;Sy{OS6_ruxiWy=5gn|Y7_j!^rqh+gd2pT_@as3$$ktHqzC?Y+xs+$ZeR{ROUt+kWf5w=4h5 zzH%*Z0h^m3KN%qP--2Oqv8OeZ(V>t}UR3lad@^QWq(_DxkfahilYzHK0tF_2^b3AI zb6c4c6(%i6t5vQW?|Mz?abQ1!;VhIurj1BVfh~gqXJv7@Gz~_?n08fbzD~V7B@M<} z2V1R?N6whQw@XVy1u=Dy*qG74@;ZYH{}Pm|J=GK5gIEVKE*Mc6s$-M5%#pCle*d#{1zWN-z9O4G|dBfLOlCl74W| zNAx53?`G}~MIqH38`OgaI@Y9w#l^+^Yx7usnklD%unYh^0Vs{Y-bToF4Ya*j^)==p z;f%^jk@Q6hg`3>)ytNzjmFcasO2wk94^+qc({$-~+TC=otJOcuF)hnR$rcYKHmy$l za`sH|VJMt?_0=%=b{fBe_;Fns#|CNrp!m#JS~kZKUXe5g0WS}aq4D7))wGAv84t@h zh=0}}y&H+(hbdR4%OL|!CN%Fx#vi6Vlo8diqL}*apqL9tlmBV~4B3V^#T$73lt-3- zRH+~)NEf4V;;7tQ5^jjlF!ZeEA^6>M6p%?lR2I2U5xKg%XT3>~n)O^V;BvWI!7XAs zdho?mMRl?5s4IdFP@Ou|Ct5NHCC+7h#eVfuZp8tLoyPQT{W3Ake>d3%u1agZQAYle zou)*&`kk>zoZS#FhdG0qb;7}QKM46>)$2LN=E@j3)AWZN5--?3(L-xyV3xaIy4B4y z_-T3qB_)o(|7h8%AbMXf*{ox>R+Lu1f0Q=yVe68^qaQrgW>2CHXEQCMSh0Eo*<~Ul zN}j$}JJh6To4-nIO-wjwh<*rlkjoEaT#()x06GQYja$ZXv@$rCg$B2VS9x z_s&VTk3&&OV+YyQ^&yEj9lpB2*RHg#-Q%TM^7W(_1Pk%HA&DR~U!h!`R^hjOL-y+? zZILx;PdQ^;&dB$yM9fX1j)UehT!kkmtMPIc27>BVFP@MhT|=W&am*sCjgZplD@=3b zL_-5Tw>D33DZEweHv$4T`=G{(ig$Z-t+%)LrDK^NiC0_jk`k7W*+bF{W^*z>U-j(U zn@3p4w{5a$_+1T~I2jr!zeN}}p`^%;ka2{3bVv5TPDyfqrte^j+F>qIMGw@c8_VFG zd})R~s@22}&&xOUq7`$g5QMsQqvZ#+W8~4O<6+HmDEm7d8Rh$X=HD_(ZC&dXa})WW zmG7~@Dh$cQE!aLKS&Jk+fn#SpFy0Fdj;rZtxdMjDxtD*G46pCXO2FpVjpV1&>a+bT zQ!DGAu;~^_$fjBsb_9_ViNCP^{e*4UrK^Zzx*EEgW#b2P+Zzh3X3VcSYR&o}y-?%U z@Vo=M7LjJxGvlazn7d+Sp*@D7%<~=mx=+*Eo^xxxk;%=m5QP!0G6_?(ZXQq%nE(V( z!p}C3E!q72eTsaCrxHveOdAYNpEDd@A)f0EZt3vD39|LHnO*teXD$#j2gDhQ?0*Wu zGzO`^#Z*Ka2&huSd@C>SZn=#*^`m4wBbgyCPb8h5Y5sh=&g*x$zvZ|f*xP3(Ofoc1 zE0$jw$?9o|D$ZGUWmJdyU5v5U>y#&W#+8KGs9^D^${40eYmMqrd8l z8Us)zwLi9mE3Lt{JN7Oe`8&}R(681NNU3W$S2gCQOHKUQ{`bfA2-FzpPUUgC2%PNq z3-T9g?F~vSI<>JS4;^xPibc9t zX1sq&L-aq5XN^Uh_7FMmX~3sW*biC&g3USv@37<8=rO;k=fY@nd3r!QT2x+MTRsOl ziVML11IWfn%uGckc}#^P#mLlSvCTaI0r?ucm-i%Eb~>caZaz^(Pt-Rq;Bdl3Y!S4e z4Y}zmI0n5zGwxCvkufN&a5K6~(@MmzO7MOjNb@djHatmg+bBTRIkknZ4c+a>xT%1v zK@aNi=tx*0QS*UTN^$y0j?DnZ%vikg%f>FU%xAT6m%EEz2ZUI{r*JhS2xX-|z3P zt1wB>p6$h{^hwQ6KdTRx8QGavaYQEDWM%!D#4kmv;C60gNh;Ar(KIxf2(Yqp&D6eI zUyjQ<sqslwI;#F|Tu=P|-hBIF*7oj#y8vBk@3%y%{I%*oYdQU) z3fPY|yFsAZsUg5fIDP?s|89U;bOD&3P!tvu@XORlv7)ddLay-m2={ol}!tyK?;TbZzO(Khn|&Js?%B%C&dSo z*1o8D3J~n25CsTy29SUVv`7lCEE?G-YyuF4m+Yy^!OxpILMec(?b?S$_4s{?Fw#Rm z$tSd)kr|YyDF3_TTS;fjcRP9@a1;n%v?`-bCSR7tXJrVdQM~JXeyIV%iiEQbXUhd@ zR-!y?-2q@9aPZ?9n4OssU6i=V;baK5*RNl{N)CHO#(QIAJNlq2z(Mn^IPYJ6s9 z)nVeW+Lu%`R?rx($KY^~9cXFAVCn$QW%6hNr0G@Z4Zr?Ba90`HsHp{N4XGqIhBryW zxSjd2AcZ>=GdyE|k22=}XL~Vue!lt|RQ?hkH1qIhqOyFX2jZRr?JOZvb1gQ3T2=6z zw0Kl&XsCnkOO!w}?Y!3J{?1lK^KO}fXa1a9>R1QS{k%l3DA3Hj@Ni9x>p%pm*aUO3 zg037ls|{CZKFlgp^=7tyPN3>&!8t{~rD=yY%}bXuIO63uacI@#f^}NKYKVT!r284{ktDL{J#1x*(^a$>Zy zlPF_@RW3W|?V5GO2FZK2>JNBms1jG}ub$$uji_Bvs8l^l*&~EPfIbSsI{^WVS`80J zg{4S4WNtvckhY1qIy+~)QqOS9u&kD+JBEnQEGee~-oZC{XaSBrPTTjs(~xV-^&k*? zE){K;s;5I7&RYTS+8bIT#Qg!emm8zfwC0E3bXSyXc9+MeUr}CZMsUR*WokM500f-n zIXu^`E-f?DUi`&x;`{YLz=S?aG*T^X0{WFw!w;aRU1^O-51fly$yS9h`2u}XXVQ-O zTuU1?Kt2CsZjCES07#-DZ2D6Ki;1t_9?p-GS4Ya>GCg$teSF6CX}+3e8wDVmH`6>p z8v)$_Y2*^t@b_=RtXj6>92OoySK`&&Kv#*9TI+P-(AMCdPNCdZ9J(yYsE9 zcE5>zb-SDhj^?EB4jixT-bqX17cVL=0tzLVZ;X@~og!zh5z^i$WzxChe0qYsuspY@ z4tp;xgqno#y%ZBOIzMxFKV|taF`i`+f@NA>WUpl*GW^zTvqf~JwKeKC-zOVYMunGE zVWFapPn?Rpe>e@U=XMEHKotu=Ks)c3N4%PxG zcE%x_loNKJ7k5GbTC~agV4O@CL8A<+s!gQ)=l8hSgiYiN*vJeSXl&NVb-%<3usgtf;>Hv9VdV~6HSxv2Z)n_ z{oz^o^ZYEAk7_FQe%mt<3@n-N4&kdmU-ng4w?p6{`=-lX&st|d9&c+aT2aX6?nu_W z%179un!We`CaQQLA6v8MxX-REDMiig@*nlvBsYN^=%w4&vzwV!NuhX$L45crUh!_` z@Mail!ieg-WlI5T>gwmis1mP>bjrS|8jmbyzUELH?apz!izZDiqih9_gIe;)egu1W zH-7C$Z{lkE>@0qxl#1rA)O}opm@>n)+{(cze+f749Ij#=>{a-tZRq;b#QkH-n0_-}`&f=eS5f z9CXWFKf_;l4KcriF`hN?nwJWY_ynE4a-c4C**wj9)Draoaf4^z=ui1k#$r!L&IxW6 zW#P$yd44cyv11y%yqEWlH9<+Z=W*BfYfS~QPkbY#i z0z^Q+KaD8PdxjXuTQ_<1&Q5GLNQfCVFUA~*XQr2(YPDY=YD8}s9l42$?7yC8Ng^-C zMH|!nEw3JsNGE&OcV#qAK4I*qdH0svD&9nloHBxBqQfA*i2$NI zyV;s()Sp-hHGB3KMlCXOANf74b3NP7CP&=G2q$S$K#{yJ9iaTh^e)S1F_k!`TiP)1 zf?A%qtZa+J=f7mH#t3;beB-)9ai6SbUUt#XWKV2ue61Af4ZrxmN$fF0iK)K`fe9&xA12P)z z2V=Cx_|^%+Ex_sn<(E~9Pt+{U0Jm)UOlP`(?ECe-ZdRNSC&4jTIEb}ADy2w&E1M_q z$k2Gy)!9=J_p;SjqiV`gkF>KMHgh(nkaW>#p)djHDkpuwsNXuNcrH;~w6eNMkTJrd zhuiJHbz1TKEN^6A-oq0`<#URbSPd8wuzB#}(4(3v^J@v_Tpq7-B0?`))K$)jt#M%1z&zbl`28HHs9G>6)eBRVZ`GoYMSkq!L?R#K0qWzU%<3oqFkSlc#b9V2y(NVB zPbQVRpjFG~>19ImIN_BkAo;^&eU6yCT-#BgM~r`}BB7I>nhMnw2LjE5Khc8!5en2! zv|7aQK;>?kb3WbgAY}#hN0f@&;rLX&b0T`q4XmGj>F07e%}h|naD$Gb%kUmL!*jH! z=lzbz)JVP>>sJ0#j>jO^d~wVUAc;}4{ES><-NNZ|+dH?Z5oa51FUdtOF}_wE&Pw9#n4RrMnAdMD+Uy`|T~jiP zK(`0mHQ&lb-CU9#s99b-KU%oo2o9)^um8=5i0?VjA3>jfcT%t36HO96|M%_G#gj{) zw9XOY^5#qY`@N-WNed7Rs=xC-52_ofkP^vhbb8Y6TAh!uVxiT$_VKRFgKC5M>mfmw zApmt}x5oNwj(2`#k>}ebo=+hPG3eVz*q8dRE*oip9e7a`lUyX&(P3+F4*s3J3p>{~B0W^;S$wdyuH>K2Kj@)De zYK6&43WAX=c8j4Kg){dYvt|S*I`*mr{DctZhYiCE0vE;(=bb<@m&{*2z~wQY5J_cR zyuH$!j-?J$1a^|iQ~5mZnUGYlDV0J;8g8IC ztwYLVdT5e!-Cw*Ql*5&S#zjl$h5Y>KqLjsW3?E}~wp>X&C^4Em7mHf`cSi>XD1h>M zSLr*dFSCL`4B{9wDD%XSLH9nuv^Ig2aH>jc$kJElPsb}aLvrn~kY))a>jk$7B<|za z$SOmgUuFo`^RvaMMvtV6wxDKa*{#jmNek|z1m3j^tU~{-ZTQ*bdL(OKr%PFAJ4|kEXoEBdh~fh8Jj9GjKiCY|J|GNJLC41I zhpuK0AM=0Mo{hRQcKm7b)9l6C#d@((G%eKNsI$^y25~PyZVZ$J4?uX3uh!1y%ZY=? z@Bl%Q62I?vb>6!W7;nh_GY4H`&scRaU!*F#@+aCGKxkrkp5x1B8iMbi5tVMRDI5D@ zsjTW`bsaO=J8Q6SVchb0@#TI2WQK-pd~eDw?p2CKX+<^Hf{-Cln|lWZ!qqEOr~Ron zVjeDVK$RW;Mtp1dV06tFtax{mksur&ya2=S&tldnsikFQ-JDAJ*fIT(x@5E|X!Q(y z`E_9KP)F!fsNb7N9%n2~y_T=btpfC0gkc1*SEj?4A!ZqicLHKuYv9AtKqO5lMT5>1 zSSpX+*TmI9l3k20rBQ#;xBc$x(nqPRgVwItE^u88cf28Gdhnj_!~IK-$V`K)b8iA? zz1OPx?3x!F&82c!c+DLBFiBkuL2MJSniMd*%#o#SLEeM*lDd=tWl8&fFe@@ZyNfbW zxV>|n09z$7G0|sZloK?};A{D0HAnY6pF*7Y-aQU`z{Rxz>fFngawjEgx?cHs*MKXS zy)XvOo?^q0rlx*GrghNB)XVS>5(}R$wLtxH)j-Ku8`)oGv>#*!>wu%I6W=Iw$C(@@ z)GULD^PDBA-Uvrv2qW{DV#68M3lsfj@}!YFsgQ;LN#b2S^!MxQ2)iruk=P_6a@EK1 z;(SP3>i55sMGS6u+`_~prD4(KXv#3>=BA>pW3jAC-1!DNaK$?II*lW@Ip9|8Ke@a&pJwusP4s&zR~n??)0Wox0BBm?b{B zwUv-pqX3<>VfVUKZr=0?DPOrkf>k3T^j(*GdhxfDTFXeOH>0Uzs9=RpM;_1NJ&jbj z*!p)=;Z(HTonJ3B@q_8lWErrq43f?o!*rJhck8^@w1GFI*mYLBP`~JE*rjssC!juB zHIe@Npe^OhqQj23hmrH~;?0U2(!IaYvnyX=qx zmca*Tp8Ih@oPPyA+(%rTpHMDl)an6q7_dhHkA*W7nrg-(0Nw-Xed`*k4G%P~Qy=*3pB4~+{KHGrchj9BngmCvu(r4IdM&+MpN^n>|9&C#B6w_LKb`x-p*EqS)jh$I_Uvqy7VkAGpkbZalrrN< zDgg?RY}n>Q4ct4`pG>8>(~*+PgZx|Xmb>Fn?z7gjt^R_+K!oyH^MU(34vUusQUB6q zlh(=hrtz+fpRRD4y#QeJ2EIef!5ROrv8ZN8Nzq9g9G3r<*)VJ3E1<{l-GYpW@hh-f zp@k3sof8a81Fr&f6@+e)vn;?ceNefd-X9BEx4b4*1r)`Hdcb++Bw8lre=lB^!d9d{ zbr=%XjRpKhi+%CSs93W>EgQisl!r%Pirt;4C`_sb&oMdUzWvDz)?)onOL15a>BOA zT<8u^z&(yq2?ts@?ksHs>8v7P;7H)u=(yhLfn){L1nB-P5!eEH43s0_x`f`9H{RSi zNRv!uAVMXW0H%&B@Fnui=@-1rR3a&qR8%fhu4NKyU&eN(H*RmmL#o{LEr2VjcvfU4 zqrfanKDcG!S?>Snb+o8gv7ikMw0Nj3qj=<+GjLY{JPm4^FE5@JRJWg}XA}cs4Ek;K zk~2O7VRRYtK#7E)pYfjHYvJ}qolD2WV-kuJ)$@KjBZq&YPT)vjbtK;0oX< z2^_lQ(?;hU{6KeD+IRCax)zW(n^gXme7E@rDL3lJigl3vOOv1P@lpd*1z1pUSXF_% zM|eVGY;(>JI}gOG&w!V1Vh)K9$_5DUi)?vXwh@rIgOMspT*P+0s0Z|{8oEa`Jf-Qe zUGJ*bCC9hp2&BYd)|ezbD*A&*+$;5g+LBAofTR5Vit zc(a6@9in29Yd*`rDABdXd_9w6H+5t?EYYg(7+u$^jGa4XxRpkuDwNlq`l zOby5c2FlV!ZZbk34AApmF!W_4$ZZ2tMBom1abVggDK1`98h+vBl>med9!^dJz%l{~ z)h7n5kxrt0vN#}(GDwk91$g);pAAzQ{odAeoi+~7Y`&Tdhpnf}Z&YOV~K-^+4{0!*Zig<;S1+2>O^5*PUp~W}$35Afr(F9ynz-QNkNsaTNGCfYbqo%`G3Z&hQua`bB@sqXy>Cg$)AmB_*XE2kl73Z<;6ZK$!#72uMNFKs|=YF1YBvru7a4dZR4UrP=9GPM959H*UOaT_KGbv1& z4cfA%&z~8 zv=^TH9&3FaBj!3OboDOqX!*LeDO;CoA#4j%pw#$5@wlZ3N+!`bckztE7d7is_};)z zp+N1w|^yk?ro6;H1Z1IGulnp5NSF=Jh5Pe~Al|n*VT%H?W7|0Qf^zxD67-|(t*My$R8Jp(^bf+f{AzeG z`*>M@N#iH7EY+p;fba87vk8}IDjT`Iq?!r(p1_1;g@pmi^X@Fk>)Y1RCotE`Df{NN zN)8x(6dW4HDFVf11MXF-mNMC>+}ODd$@F;aBftaT%?4J1&Akc-<4AbZT=6SJ<@i<| zy!qWc)z7^l>f)gen9OC$68puZKSZ&x*8n@;%Qi|!R19?TUNCHT%>gwQd~ukJJ^wG` za`0vPaPN+4ezC^8fH?UtZ#0?vpXrghN8=Q3?akTaZs!(2w*_HfSp||vMLeHCba1G`<)mE*O#_3zO4Ilixtc{G{sj8_(vSU1bwB_C- zNv#`s#A&~+fz98De(X{Y&z-2@2?X%v z(?^0S+e4ptwF<{d5M|zk-bw)VzMnl4pnd0>3G1f$02_%458G`!5ont0;KkAGt=;wu zG}R@p{`iAwA8z&)T~>dS?~w~=4JNmBw^R>AMyMm@Px!nUJhfFDAi>7Ic`1H2HaXk) z`2Z!l)_}(~rUxs{PHnXo#s+=y9_fOHvptx4|%kPM_-vsRlp z$+Acw!e(izTU+HtC1S|9^2H>UXpDuT7ZpG|L>W{E0)|`fQOT=QuMMbij&#-n4pV|Qpyy3gY6M3

p06B0Hi!cb5Rj{rfxvKdDRMx2A#P*Mp>hmR1a?kHT>MwioonwN zXCDeJQ{F3*fb$BH$qEZU2060;ShW&?$P?6=I{@7qNHQ0J@i5-G^Yt{h+hqfkRouY8 zIm8Ztu=jj88}#f|KI)0Zr0Q-yC!`lZeXesg||YWGq|9E5C?7H4~{F!TLC_P#Wn%Dw-0 zC26MG%}I7enPn!T+NqFao~g`ZS!S7cwHqX2u@EXk#5T9gD)U0-%t{DJ8It*YZtea2 zpX)kr&YSbt#Z@NXLVZ+L}p1jLQA^i5(G7cX6s5$ z0CgR6w{BvJr_m&vM(c+rVMC%%mtQ%iHrmvA#E(5(!CdNb>*83owS`5Z-LT1i06!+K z`*GjW``OP2i%3aTcN@WF-#iC*J#FYNwfBXXxOjyOGrBcOtMwV)on#yD_Oi}4cenN9 zE)w4{7wNqMi zYJ2V-KF>c7H__>~$RH2z6EWsq$W!3SFA*|aGW&F*v~yR0Pgp`bjb)o8rZf&` zq4zypcIDWcHIn(I+YIAZVd9#`u}3Iz(r)S;6f(H{hUbRW*a3LBOoXaRW;X&)80m)#{N%q=v$-dMDyYR6yhCy5)_y}|0_7Tz^^JKRgZo;!RHg`>>R z?vXGxmh}ZbhamzMbLNS6Woc0^JoV(^hXQkmV`o@}_S&@6SktAY#ncA;d)~Y{dKV3K zTIHdTu*g-@W*ZqNxW_^#kFW(QR+&tcF7*%CyH}jiF`D)iL5>29vt2%n$u}+!E&%L_ zZCUM$E?d)vYN`IT%iC?fr&i&HXpd7r)3Z`Jj`4eCS6HgokC3nb(&Q4O0LCq)(Dne$ zhm(6P-}YbE`oXesTw=Xn?7U~sk_Dst$={#V^Nvh^uda6Hex@_@cU$S@^Dm6pAG++1 ztjgRst)n4OX%<%yT|Y#fl^)d>_b6`isB7%>E-3%4z-eELlIeFV$mdQ3*9 zmR_Mf^bnpV`4{)lmy!DS_E)9ep>Af9nsssF<8{I7 zgq}BAg1bX-y#PJLYx9~Vn#Z@?W;ntoIK{xB$tHL|Iug?g@eHCBt)EY1>fJGZp8ce$ zQ&Z$r;6~A&S=+2_=hOUZlB6pw;_n!yshWB!oOs4;f*poy=e=RgzN2pbmufXEmY$doNGF`evvQ{pIaxYPR5-SyfubUG|$eo<-5&TD^OA@#)c4>En|V;v7B};`J6e z8LuL`d~aFDHDY^A)T;U08ia zfO{!4jLNiLp1Z10`atB%319K%-Qzdqj5(f#UXk~{{kVLSu0Z_L_jeRZ^qUOS=zabMx>B(DCkPJlbYVW@aPsp8PYUR2k$e!$R6`Dt8EhR&ldS=tjmm#8&%~ zh%k=t6G;?6S?xBh&7Plta+vVn`{SVU;TjP`swvX2?v_2hopikdEj!jETF#QIwB2Ys zrjt+&92%)v1wCd&&dy*4fP_e0twSf0xd((28ik~!=3+O!Yi%8@biY$wAFkXL=2zP1 z@A2+xpv<7$$Y3r3lVO7~G5BS$``LFQ2X^DLtu>8SzjnS;g@=M(R#vhyni=`ApMzC> zT2pn{u_Gw!_Nm|XCd;`jFTd=q5<%>h-h9h2<(9m6V7q1kWc?4O$}y)?AK!ML+}hE- zR@b%AZ4^c(zc%+U(KeQ3)92LRrFASDMs=(PwXwBIw%+SdHf&0H`4HOi3!Bg0Ve{<( ze)VM0V#?(`HBeyprq-$FnN5fO-a6?w+jvOjn7H_m1qECqKe2wg$s=bsIi2a-QIU>pHILa2Oq?> z)K=rZ)kQrkwJK|Q_wM2FZ8_7T9SLmV(~XJrpg674{K)q; zt48VmWGvLR8war_0!4-S`7(_+F=X|=Rim#ws3+p@8=F_^ncA5gZ(D7DP9%91bHcY< z5&iK2x%GS7r>3%=xQXh_K7CZrAKbjV?T)U@YG38L-?U$Djn-&^i&aU~LcQ^|dyWGH z?1J-jx?(~%HuRd0%1jI#RX6;1nKCQ1I(^b$SGkmyw;LBq(9RcOJHDeZ7cKtm4|E); z-f^&zO`!^C%cF}Y&fnX~_q6QZE;aTo-=wIb!2d8KLhX7A&i8sR%V0IUJ3s_Jow>J@ zF!(sRxE#+LQ{p@gaJN;z{J1Td#_Bm)D=s1|e0s&mp5r{GtD9x9^(E$e4~q;EnLYzq zBz~->rUpZ(tOl>v4~=J0_eyes$k#qeL~W6jkWi66=IH3Cav~%76$euk~d5$31}vVUUw zq?9I&7d9>K&=;rgei9+9U}k1!A~i{WOq}32NO+`R8guOlTh`U#4||5V6yf^Y2#E>@ zST|xd`y=Qq+Y{9!_3zQ_>!!M0dOviLPs+q-=s>DTyozYO7I9T>Zrrt^LOAGcE+fp- z2KG$%#)9v+ypdaZf|7+>a_rbpoF@k_)Xp52l9Ec!fzH~WAN@JyLn%^a*FJA z!e{o1jZKP~o!r5v;K18{7scDhBb3Nbv~a-1Y`U=wv)cpv(f8?BpEr_7JqYVCe4po( zJUD+uIqI7P2PvQ*q)oEh#qDSA%0F{Dw~_-#lH(xq6(hPJ`4%B0-XT>cteth=z6p|- z(u^Q1ePFwmltMh!xB110LWb;HXBfj-Fckvs-)}A`hNV#|j#y0qSM9`>Lb_lFB7klR zN>LgYy|eRNylNKc!&*~pNDRI{W40t8naYhNJRU#3B)O$wcOr(D6=_53M^EO; zbCC>*;K`2#kPB47iFskn{^w?$O z;a;CP6jl+1mwUbDZ9zRco!h$dY;rHx1Rp*>t<2~{`FG~sYT6mAx)PCyH+W}u&x#}Q z9T1P<^a{2^#l2~?Xd+w^5fx{42^coBjwtX^m98x9^I`HUAQb=wIJdnlV&*!3$qk`j zp^iJo*ZaxAySX11;awJyLP2`xzGdB?w{E^Z;v)%OWU7l?@0KH@{)0h;6N57pSFQo{ zH&4%RXM(_QsU>V*<(dF!NbRDGM$TO(X5yjtfu?Hb;jX4Rr>6$GXYaFcw<*`8+*(@FEezNb=C z-n@A;6;`|u|FO`thSK1>dhJNaK@w@`7o0cEj%ZLp7MSP%P51cgFZf!mxNYzJ^>45z z_VWA}^AId1{`$Wj@B9SMvj5k=aRE;KU&tceLX7$Udhj6y?Z5s> z^4a+(F}?ri@&Dh(|6B{w|Gz2h|F2(-@GxWsS|D+j#gM90K9MIxRVvHFw^)_9{=GCl zqAQWWiBN}u;d%Dp!GpCYAA=ZgLV79u*`K|NwRu&!L!Y9*3Y4Cx^moGl9D*Xmc1`48 zf4%Cd;73Y~hlKA7+h)j2jiP*aILJyz!{ zc}Ij2+LT>dr(z~yT%n(#%zUJ5+mi&#+q=0WFW6OZ zCvORrZ&DrH6m-i$*ZsEqrPspeS$k!rJ2G>ldE0|SE63Y(4Xy1ht;b`cQx-dWduQgc ztH|*YD_u!(M~3OW4wzs>M9^y5b+HA=x3sk#NoY!HXlN)1Jg|R%i-wj#JY+m*H_I%Jqow!tZ(3y!f(L!k5 z`?fZ%YmsJnO@96QOl4&5b%J+^6B9WVX%?Vp^SoCw*qZ})m( z!yi@?k3{FaAS9& zdb}!8LuXtN+1rv^H2v~!&efc(tSm@ujKm@@;0xqOi(O-sJByfo4{ulvhvmdP?RtMX zckZt}$Jm*u!su?jkRAt^M%F%8IU4u;IcI+k!h+nS*{;g4G7oL~*lr_+7sAe&R9M*k zs@pC%IXRh!XSW6aq-3ZZyD{UOjaVdjx5wn;ffa|KN15^x#(nZ5vuG+# zMdT#r+pfZ1B`Atwwq8>*SE=PpYvW01vZRe6+~M)GGu^ac?XL-&O_UqP%ALjD#ilYw z-;(3w&oJM9qurZpZ&YWyp=y&Lo2*wsqL>*IkGO4z32$yr&IlM4A}`tGnDW`f+m2NS z=IQerc#LHWcl0`TJ2qXQOoM@b!aWvVZ_-TW6odRU$%UszN%vQ9Ot7BaDC~~I^|O~#{>mq^YWzn1-9M!)ZIGfyswki{q<>J z(7D41TxjX)vXS;e2>4z$w&+WpwPVpQWB|FMUUA6pwz9DiVQYK;{%BfTGA7raY*WsI zL4OUCA}ZC=erGEqrCG~!wmT!>ViS1}(wM*qoc z%zp?IKD>#(YdV4ZrW+EFhdgbL3Hgi1%+k&8y6a0RUJU-4H<}v!KqqDXYyQRWuKK~v zO-*V+##hsoqa-ft5X`5HbpoL() zbYdGB%nCDfx9kWcnr_98rBS2hJX24IwA4=DLR8~`loVtBN4expB%uU z9cK4Ta-T$~0y0$I1Sh$?*(|R8wLY}WocUY)z~1lf2u-?5Q*r(sdjZ##^zhWY5$>yh z>qNMzP})XcTG^hO?*3N`AU6Unnixff?P6>5+F_Eh`?Yo@8!WBzfdYrAN>*qZb)m(P zI({7S7<@zFmLINZW6j5Bdxv;H?p8K+`9ZlqEHjF=M|?h59h>>^`op!d%^$;ipk{Kb${Ql0CAuw>J%&n`J$gu6z7E z$M?q&RMv+YD4|Otw>Ud zkoJB&k{zDvSkfZBjyZc6*Nbo1x%mMYgaxC6+JBMP5M2T{eSAhwDn&UVGchF}2v?O4 zx9A&7Zmc_Offeh#rlzJ6v$L~PDvoL%-WJq0n$4(nco;ITm~62)Rj&Qgu7W%pYeuK7 z+ENWDokR-wkN)2475gdE5V<)yIhn5c(9zl1(%Gr^qyOTJ%(=CzVATr9aW6E?&dK4X zDvdn3%_fvHHa=cIE6K*n8uI3YVCw;oFUPJ*lIQg~M1{}x_aGs6O=pxP;gzL#svHfO z&dbfpvbc8bUE>2sm5ijMszxuerjc2Gt?3_-85*~9?!2eNbVU8&YM^Fx84L#GnjlW+ z>cjRAZ*KFoun4U+r4}yDjg~t=QfKv=lsZiEfSFJQff$WpR0%iX{PaBtCQun4_~p{K?HdTGzG5S?aNA49^#| z!ote>kCn~O_$kh2WoPRQs#;iE*Kp6jZ)&=#PYU0BB1T}`x~r!r+Q~OA3QIoUu1&Br z@)+*S&>#1$HLKb8z6aagz*9mqG5NKk|NHKm!IW1N%1*_HwlEV*23~~=KhMv$&g5&# zJXYM^`cqWz*$x4np|yqm502lbzoy4Nck|{COMcN#1C%0$Ebto1vx6lm#B-RgsggQc zlM>9ql55*(#tU>~OfvVp)_C|G`pB+i9w2PDl!|jPRHsKlHVh z`sbkAT1?32HPYkJxC4@cC^X8161jG7svFVwrJmd|N&9wYm%-t{if5P*s5EWl{Ra@> zaB1;rmv1f*gc`||rs3v$0!7;D6b%$uI59(~)4#eps$Pg}H~Hbi2P=_{qzS}D6MHXJ zKEApw33aCq$7N}@i|0?uk#9i`z+-)<1X~5x{=K8Hy+&la{oHU}>T$U=d4S(v_kO%$ z*v^9DCr_SSsX%QN#y8sHF8h3v4YAb&ZsRq)o{|RC7;iF@v6;@hB!)a{wP?a=v!ULo z?l9)81cAQ|$hD&7o`Y@-_^>haT|?E4`~ahKsY5yZB0i(oWO7po&;AbEFdZ|MvK#!Y z>lnfERIBp68K2o#Gr-HTcO4wsVZ*AE>Mn@}gX0==`B#_jYhF=yBO&mDs0m-vXzki3 z96p^ZwXH4PJGsU?6CVa^j*h(lj~QZ54QbwbV%Jt= zX&>n`h;*kXZU(xgGx-;*lw+k(eV4w&!q4wIvSy+^{+NbOm+27^5xmcGYWMgI#>Tpx zpbk6G^v~peM`(GDbZgXbSlw5}!`tLlWIcmQSl=tHRji#{EyemEBa8vH5cwitzA42G zoBQUP6Hb`w)Te0wt~0Ml9Xi>Ba{>6&xKfOYi+Ja2Mc`Rs9L0Un89H4PC41xAwZ9LZ zERkbpM`;*uyLrs_Ma`jO4 zxd|(*Wn$v(OjtXBkGR|R*i1tcrv1RoeC7l3mdUg~<=7vR4ecj=-l}t(QrXy{Q_<;= z)$lXA?dkY;fm~Q~y&?{`gf;9)B5Yky<~5$leWR zx`Xp6VNcaijg9}L+S%$s)Fz@R&14}u_0$8Hk zk&>fFwWyV-v{ts*+;$s~jl6DCR=98>BPsIE`g})SB%je!3LUC973`Q}jaH=h3Y;Gq z8HxB&Bxdv8_zRB#TNak`ZQJSMF-pHvcNL}<{^+pGoy7POFUlYOa)KO-dM7PJWY+6A zD>DKf`XvVV&vSrvyQ9D@Gp_1H z1nuym61Yai-deNFP|wtHyxV>D1;>U)4+O}gaz{s|?$|5bcH>s#r-c^OAl`5Trj?eK z76d{^IWfPe6DU4@73So*I+r4~=036FUiRtaRP^;w!x3j;O#WDm_V_AUlSi{j?HsF%AWQSH1w!9) zjwpS`$73$ZrabiZ^-U?!*5wXW72K^ZxVssloA>y=5k-W0H^|9juFz|u>PY?EeZ81^ z^I6$U@8zX}-dgPLvp4La^lz+v!+J)ZXo7_6M>%u^IwGl=?r~olC4(J1ZF6;`PU9q; zD9t19R`wqgCaMW#=*Ik5F4-;&gW9+AN&zmo+qfdygNy)F2XUSkIVNT5W+bKC^xcQ! zDbxDTuH4Ip9AA<1>(b(UX0@NPx!sxkZoBqzSn3nH%N=7c&gHr>G*Nw(6PmVttBpGH zPFmJ@aG3*O@bFPbuj8RfLMcL-#?Px6Ys{bf=iR;hH9_WQpTFr#FJfNmw$={$4?JY< zpw2(#4^u=--FO5+q9_ahrmH=!L4Ikp@Hz`SL2WZD1EBCt&l7FImz$nfBNY*oB^0Ln#6RTMqT=FiWi3uN2 zOB^?^dtSpEY0b=l=^r)jFCyt}N_&2~(jjj<4F9NsUe0VA}We6Ks)JeHaI85~H zXr}I)qkV6rrOgUcQ?ERd++}Un`gc31qN}`GSJ+oQln3G~m|$8rn(Z{&8g*n8Rb#nB zs^mO~2o83XCO<6Kev5Yfxc=&FYW~-V))O0`8q?wUoxEsDF&F98O4@o;jrJv@hIf>D z=x`JI%1aYP*nugz{jYEihN8J-=A@V!HRechudL~jCH;r91w~8KX&3X!cDbE=kDsEq z)|INI?(bI(pV=26pCFTPR!!|ueA9gYf#vtFR31{#EZh{hQL62QGZyYI9}tVQ)sz=n zP4ek*!67+1$2BEbHOi&}kYKUdMcYRY>jm8fvp5nQdG~UrRLl4{b zACEC|P96vPgKiz*1HPd69{%>>4Q0uu=u1kL<2zf}NIrMs+zXN9gE!q?+|4?7_{#c5 zjRmRVq#|PUr~@oI2gePO~E>G;{^1ukT+bJjlzxBiMf4ju}f$z zF9Ge9dk&{m(3AKk*4%8GiQmfj2glYG^kt^UBQ2dZuwr^Zz`I?D!P(fj#KFNCibu21 zCzaA}F?MUoXz_M`59jHVXfv%FIEE1{dQtAl#NnHeAJMw)YAmXzx^@jNq6f?{inFk? z%5Zd5O9~;Hg*6PmDa_!c-l6fWc3)7a`a(MMtRbaM0<_Rx(_0g?*}=$w0W<|wPitC^ z@l)K2gRxXR!8#K4I2*0~wW5GXySysxo*diA^o0cPxdh4>y?5+LwCn4iJwx2bAu3U^ zEn`-N0ZbSB>!m&C1}A?~H;q&u*|Ki;z-(iaSV$+<%u7=_v_Vq=kp!xn;L}<&wWHgs z-2(vp@YX}kcyZRyb9ufumyz|0b3U8*!ga;ivaaO|^Mm5Wmc04;sZr$LQT@XaaMklg z-$m|FLWNAYeA>sH@dba_-Wj7C-^;)aVtc4||LUAClT1&hd8@1++|)|HdqDMvTt5h2 z3F`wFE?l6H%u?HQGnn0LL%GC|9FOHXYV|`DoV--)!4a_m;;4(8r=CQu-lM0d#}{+c zu`J{i=-|3)1;aEglu8+0nn!EC+C0?xD6FL-k;!|s>2+-IN`qbi&`fdXMd+^He5jP% zlhrg#r)~~&yCS1;M4Y+c5A3x5QW{CioOMBzDW3g(mXjUDx zz%0=@S*E|g$Ke$Wc4W$o4%*y-Ss)RgkI2rRZx1x$LJkF`?%JHFosCUM=NP4POp{E` z#LayUc-hZ1QCClo>}+AKdnz92*eJQByZa?5D@!0m^2drb6-_Uah3E?edrnMC9m++R+hA$v%-_}I*HrA zC@Hltk#}rA?+!R)gWdS#?1ZMKmA!p^o*h|i(!G$fe2>NuHdwkvX`{aaQStk5)<`Rf zjz`uER39&Ol2lIl?arSACX@zankf@q2tcOi(D7a_w_@o39}BrkK!udqFQN{j;O`; zT5DIOKvCc1&eZeKz3qB91PU zF#BT0gtE3)BUMEBT!`er85Hky$sszB7*vR&f`aTQPc~)oinykCq1^yd2sR@NN9|yN zK_zNcJ-Z}iTI5Wv%O2qx^s-7ViI;q@_jUNJj}pT8BZU^fUq$Q1$#>r)7I7PZk)S3A1FrL1 zP`*Atj_^>AOS#06ot3q_S0J$B(T<~Jnm&Hq0kcF1S?1FS-Ad6p}U}42v3DE#<3%i&nfkIes}8pINng_MuCEd zE*|8nw{*!|vuYr$rN5)@;u6)dE1o=Fd{sw4-S4`M4SgaP8*f49PBq=^MtW)Ssqq0N zle$$gH6LN4`_E)p2#IiW;W5g%qzdFrmps% zom&S5s+FBxE39VO(7E7mP{tj#jy2_gxE7aNl9d_FOj3Tn9$<4oo0KnASNII_vicUG zLu<0N(lGgcoOJ9nhm;ekF8r{=v# z9K(>}IHyv#u@7x~#<+z@1X9vRLOi71Rwk(ttNm%In@#Gj3S3~~jEszIX>N`K6EIrhyILpBKoU|} zqFc1fY`4in&FX|1K8z^CHk61blBsvhBSC`D^!fWeU2)dq{h9ZyU-;DLb{*Y?=93h) zhQLuK?+ow=uLxnvey+55%kA}E6c?4&k_m>Cgta^l#_SB`f{Dcyr z{`2>G=}9F`c0rkCx4f}o_8>_$-nMLCEVSzx+@;w-{|zj#X$Wnxh+f+~>^K4iQ`MXj z@V*Qt1|%^LWQRh@I$6Y0h?=EOg3imcszS1}3vO-7w6UN> zzg#T1p^&Ct@L2bMh^tXKRA5A!y?cN>RQI?%1DGXHvpO#a4u1pHALgwet`PuEwd~}4 zn80o`{(F%pZvFaLbjh`luzZg?S%OOt@{L#V5%qVs<;9tyO`F1WRe!+$SLI=Zw-C}( z8&&##_;UT~W+X zmv?6-Bez3sJNlN5Y)7a{Flp%7J@`g}>ds?6usu zDpqMQ;}y_fBHJkHE=}y=X!iNT+jg)#j2MXQ6zE;}Yu2VShi`m20&tIZ&)Ui5oc0hM zv03mxowOvoUjkprC@VB~Ug_58EW|;L+6PZ=9K$`b1ZC|M}r|z<_QZsi(7(C#=Sl z_7W>jZEY&00QPF5l^->@JxKZBG!M=;S?##5T zwFhpz+jdl7utUzQL~`4m?fX-B!%$??CumBCw}Bqy2(t;O=*OF<%t%p{%xdpgWVTmJ zO?1Ow-Jc<=r6P*%I3$C|ZgA`}d;{ ztfVdCn-gY6I4$&P#(gX*UkfJdQaZ1uha>F;T@M9YUVi=nTJ@Q#g=lq$)N~YmV{opyved@JI|@R_tlQI16|8^BiL@@;@=1VH ze|`M^DoWYB{Xv+b*>tpO^-Ns+aGVXcXdnm0gcG9XM8 zZYZ)(;dYdd+QUmW$wJW^T(||jw~jSfh?pBYL-!ajn)e5)C-WSg zJifu$K*`^nqzVk}TcxiJ5A6@E7`P^}q0U2xnL24miLf;1Ur)964kjG=SKFgBA_Y{U zlegQ#Y-Z?<@rt{ZM5EE-awsFn=EzPVIyCbPP}$<$j%w%UyYif@HO*|F2L%PCv^j|l zxG&zj-e%xp+U*RhW!SUxa+m&d0JA8s!E<n4N!=My(%ef^7>?5ibaE$F~9Z z9C#9{6O*x;57#Qu5#pXedA%dX?6qAk;71D^8{W~(!xy}wf7L-#9oA{JKV5G#?O7S*Dh*yK(}33{ZMAt;N8Xs>q+z?V&zgOw(EM8e)nam{vSY*Rvtcg z|0=tpOLK@%2RCGG=Oss~JR-dB9mehU4H`6!aO+C6z$*`xtpFLzGb;LjU$@S<4u3_= ze4CP&dbyhEXTp>J^LyBz(4MYB(KJ*LNLzJ@&jjq~KM(bFC*{&+?PjJTzbj>YBGBx4-5c&>Feb2g1e)A1_yu2Y{DLbz?Q zclB89s&pJb#^n-YmSfZe!+s(*IH(2kV9DinW;K@jwPO5zG+w#~hr)5c-$qKjk7Jyx zwVi4D3}h@&p?~UjsNvJFYA*&RO>}hFT&<@AQT9z80r7}df1}-U8_onRpFrY( zgI)hM3CM}ka6S{xc~5D}N=p++mUflAkLW>cHx+B^IEkAicSfa{O=3oZmH9lxXN>@2 z6S{izb?b*6HM49X6Sx!-Y?7&1YP~} zWmw$f<1Tdt-}e+4rs2Xr2b-4|A0Pi^eyy3G96PB@OMk<`f3*NtjzNrmhKMIZHU+U9TOO4zv=sxOx>yVr>_6Lgj02LojyD4m926 zkax&a$>TO_zjqEz zyC;0PD82CQP5FfS$v66RUk&5b-?yxzUx)hsp@>dhGIlEH$Uv3mL!Xci@knjto}=dh zj0W|NhrRirf@3v7DhD)P8Lk+ApZ?6@B7JQc_afRqD`MO2*JGdg%8U#V1w@1L15OTh?j!JObnOT=%37o#>_2 zwN9V*|M9ik`u?A_At!W}zq%Ah&YYEG3#=J-#0jo@p1FwGbsYsqw{Q;ovYvr~!46j@ z((^;u((>1n>QoUkM2vyBs(UxEaZ2VYhJC*-!3x}}4MnT&wPGxrP=nLmj70WoK#Bv2 zM-ou)9bFft$eaQ6J*0R+BgTcsup$#$Jub)cG9i*^mh*=I==^P#13C0HQzffewC7F% z#j4Fuo4VNoA&E6a1SVSjQ4`2P-`P?x&ceo^Qh<2{LcatnO*8a9!Q`TU#FWreVginI z_AWWM+AFeeQ8|0|Y~9u8Gdjbi&VwTdl@rL-lGAj5CWxbq8q?7SOi!nIcUPX6n!RD( zZT#BtrZ3&f7LW^i`VIs$4y_B{DnyKb9G>dt??d$B>C!DGB^c}z{wBGdPN>uNj8bJy zAW8kw+}wOPp^3{fqhC@=Dk3|8cE28dj$yM`&83@i2gyt&Q|Qk7y)m#V^FCWJZ`v6- zHig_!v#|=h+)=c)$6CQ5UP;&P@ti`hj91j}dm~w@zj#FEs0H`1n`G0_F4FUzV30Wy zioe|yI5(H;6ErJjFt?Bg^l3t7)weQ$d+`*h`UkIhx=-WHLA;rlD1ZYDkI1j2Y$8xYDKbMQ} zDpCpWE38uYH!>o7TwT+=Q@=dn{j6m5rJFt&55nJ9_8PXpBcn@>ZttdUw&9?ri#jiO zdzVXETQ#wPTwR)izU|2P2@FgL(@x*aGkFJ9FuJ7Kv<`2(WNmHTGgdpWKvDwVOEe7m z`JJz7haLCsu|aIt-*l&W9LU}~vH#1+NQTJ*QY+Rltm^+RQ1H#qw0O|!B+&qkj0iEk zMi0D4rx>U233?q!1C6prYf&L)zSAa$D|3>rQ#J#qNLJt;;Kw)?9C(#uwed$A;E@#e zULy5Cba-qmw0At=t&kb6itz)icK?82Es*qKCqYvdtlUgnx3;;+L?{X3KE;=F<>m4O z5q+pj7^4EUxniM5aj((T!a4FzhkdBUMa5 z4n6io^SYvQ=EruZe)etvav(X-|5*9Ra{aYB+E`BKeN(cJ%r5ftWpy3d5`EL9{Y0V4 zu?~GEZ}C|cnW=~A3mAEWo8GPn_QGU5euOkfs}A+@Xs~Y)WU3BN_WzWL_(rbTPsY5V>bX4WG%9(-y zQC#b|ytG)J;r54J(j@uZqh&ROR1kP>Zs$t~C6jRbK36=FTx~4%W4za1-;q~SWbfmK z+4DlwD2IK1J`%gA0&iJ1Zys1oiCq1S5OuvcIb7jdXoyLnEV-_MP@HbB=7x%z;<;mT zk?}<>iub(%dpQ!(H4o&bsHo6yL>Jljp13*-4pxlvtRXQXG7?9An@t~2Cp5WkFUTHb z9eW6ao5#)mw|YRvy?L7>47|{aQG0$n3fx~U(L39$Hlp2v*=l?IN#DI;RMSk8s%=-> zr9ZwMnlx>xJfY{cWtBQ47Vkvc^shRK>GOvH{ANOVW>Ft4Bd?P7?AhrU2rnj-zj?wA zjPVRW6>?_c7a(eqH$Dc)0*an2rCZFgd4T9mLSov%Z(U*&kU8)W;+Uj3Wh7Ujrs8cE zizk*OM{4HpsJQUCChg{Ndi>bsFFN6Hj;FiUIvQ4li-7Bj1FMxm^2)dtubYzLYC<+W8;v@-)AD z#~xM_Iz9hL$1LyNkShYpAs0K|($Vn(D|~Whrds2tzAnc-mtyJ@5VV&6bdT?FV?yJn z$>5G%NCZxdvlclsm9!Fjnxd=x~^GbGSM@# z)=ct_V4y_R^wl}YB-C@RlU{IYaiu_NOfZVhP{B!XO}k1%nb?6yWo)=;4`JoHZ+qaz zx6* ze%5Ni{e!{fjV7F@zyi*jFD;9;+>oASx@Z>N6t#KRUSEeiz(n^42*z$mY`7CST12Q* zvlkdmR6;00P;6woj2i+f)l>M`?LPXlJfm$?w3@2e%FSdpxV%cOc5yvjY|>sOuUNYK z;Ug-Ab4IXwb;0yaD|Y z@9g?SXb<%y`j6+ek>Xk@wxl|*!65hGDVS-^ZONdd_SOv6IdMX`x0cP7ee_@v#E$p9 zc%8m7kU~w_3T36GQ&1AuX$!>vVQ#j=4|{wPs0x&Es#uXP#lMCWh3>m5vQ}*1=34>J zVa0L>6yzwjdu)q;WT%;isv&#=GLmw4uGL3;$cU`Z-x^h;Auqo3-X5Lwc#}SiX@bng z{sU+=j8ILsw3zOxj8cf_J?T2>!mH(dkWe-r(hw?Ppg%t~AWirSuS&LcIbcn|Up0dx z7X?+_{v3qF!IMH{n(^&7$4s5Jk{wo)?<)utFF0gJ`jB^{Z@C3w9~K0#Bgv?`h{cOD zu-2?;HH)-ydyl?e9>yl5N)6yba%|e4TBp+Fq20^u@FHabqZ(KsM)!SNFKmK(IrS}W z^#y#riLM}Yawol^65d?{Kc2v_8!0gbIXC;BeVG~SGZik_ALxS*>rd#r?Ptab;6hPJi@Y2(hKO)!fvMco`#naL29 zAi)oj{k#w17&RixW8)twn!#RpYGo#bwDl*kHtk!wE=Y&64;QzQEN z6uoVOLOQr|Ebk)+armVWN2yZ6Iye`I=9;%nu{KQ+{O7=|xw2<4G#*1t188)__tc>K z)rr@^dESJUd?-qG$I(^-uaTSO)yen+pZMn!M+sK2CV2z_zGMtL3z53)4#!S5_Lyfo zi`zz9FzRBpj69ukViaO+j4ut95OtDa3nRPlGX&y8iJ` zq+B$=e}ArGr`(xvVP6}u&Jfee#-gcsfuyvwdhOm~Z|}NgPcZyp`VM#4peF8Lj(YQf zP-!{BIr1Ch9~Z2xtb!~0FF{rXaTWU(u&dc7iy^_jpb(~C+I!Yabn~Nkrpdhe6B7HG6E9bCsQ~ZOhxYf7H$Y z>g_eI_1FfIFC^cPqC{T&kop540DR_pE(%z7|z#ypD$T88le@4~cr}2};z*Ov^ z^OTJ@fXxCeDsD|R(2HhT?Q`DVJ}v&^$#iTScfZzT<>V0l9{XZ$wPT)KefsHMi|&Wk zMKhPk5RK|07C!uI++-fSP7Wpu*m(`@g&P_~c_z>+3Uam7}EH7~r8?jazLt zXw!9y&LNO6$IrQKeSf~;NC+dg+)*@{9^OT5WvnKloI|prYc-(ihjiwWs@~npXnA0E zP_+C`pK&xX`2zu7^m*AW;Tz&l0)J7)(9KTAB7Og!{>`O{fRWBZ50{?)xX~rWMuHbC zIzo!I0T;k_l@{XW)!3-94j}R+hYGVyIO_qMTB0m4wdMg1Dg!vayxpV7~n<(UUaf2Zj?n_BpgKlu2t#hlj{9 zQK=YVr+C!h$S|6^ThkczWZF1JwqE#IiTA=MZpKqALkMBwIfpyqzUQb%y3!jsr=sY+z{6L75yRaUJxJ6 zR>DRBDfs5d(R%O-OUP|kBGx#uzIpGpREe5Cw-E) zRDr*V=-#ayqftcM-$uln2Ad>l$%=_lTx3C^{Az`Bk8S;doZ!ohd*Z7mSQ%FV6ZIk2 zAUA4{(*E9t#WXKn!+7b(m-e`Lx}a)Ql&vj zdDz;2+|At_&O-o|J_Xp}`JBFOtmXLIq$;>zo(ADq!1 z{iMvM&`rzkeW%rX)@eUj;nB)TufeDuWo6bI0AhMOzC#oOqfD=W|S1V1C- z%{nhayJG73A|*x%zebT<0tg1SjG(fBQbD8#>GDO|0dV0|BgNoWU<;~xq>zg!YoJ+x z|L>A|@iH_dKjDH^4)V!l%}4L}!hJ^jV%`tEEF- zDP$Z*gegg~a%1gmE-1@G``)1-fXndA2}KOD-&XKf_3gxeeA-Lh+CJUz!#)(uIrO{v z1_n}zK4LfVVmr##z`N_yeypuN9|{;#ZaghF|K~qy!Ksf*>iJtskSv>Kf1`h2i`}oN42Y+hwF}*| z0hwrxnUQwmq$Q^SU46c=~0f+G-)(&cf zr>e(g5%Ef#DS;yz6p)ak>gidylV3j($3`mbCzuX%DzVL=#7uP-#y_pth754$5e@%X z+D#R&i^GHhiI_%D)VghcYc{^<7NI}AYswfcas8Bi;WUN6;M=M8u^$Ti|GlCl65$Sa zg78=|nD}8KypZmoG^f7@dh27A(XiWZrg(wtNXcF}9%5k8MuX@SG|H=M4xvIgOe;!!8G5l60Jfv$RV#0eTaa5daBRAzeX}of-ODq z$&-`#c$(Rfej9l7*uP=qwBh_UT;Y!nW5jj)_1J-{y^DzEaEG8HxqG|1N;?A)kea%a zPggy)8$;;VBi0Y!h$Dot9kp_6Lix7*#1TJ!VwC9}2o`0t?!aM$&n{y{J@c(&%|Ka%hzMTB8 zz>UO{E6mofr}f#pGKE!E9g%(aQ&!$_aQaPz>rgmnkIeHsGpZ31-!-%KZtYcR`*yvB zFDL2iYxx#F%jSXs9?`T}g%*Q%Dr3+8{e((HO?BmC^Z9@SUWVp|`@joGK-p`rXxWLSU!nqWO~l$nv*~>bWL5*?RvD8ZwBeK2|Zdz>%rX&!osx zVA|FQM$Oh*OHD?$IZDlW)-JOwhYcG27*Fm#{LjmUKf)Uxda8C{q&d(}YVnlH+~-BB zT&7CPkMGl*=JGuoiw~=NXPS zBXP=$-?%OmgsCpJu+?T7ss3yE#!`DM4Qr%|-=hPcn_b-3GklC9OU=1RWhWR(&7RDT z{C9AY5qjIFe|_Zi@Sv#3=ht-}*?m{NRlaiuEdSViA!WaPn_SoKmIj1v-e)>j_bbeSi^QFw{s$aEQ?P(=TtmFK9N_E(N_&PrvJNM zC<3xoRc^r(x-;-jdFOPhHSg+#WCvd6tw;*bN8}z)-O545;8Px3hfQCuPQ<|(sAn2N zH~j{MxA9ryapKp7ySrQkZgWbAO+LBb0lqNui8(Q?)3u?dc@E2`6!s$B^LELzUBbQ3 zZu@-;{^JhrZv2(`t{~H|A@A_iQW3t+BGdw?xrr&O(MD(O?PH787TD>^NsiFvzUxQF zVQR(Zo7!BVj0eHbjwf{*mx~N00{X>@q@s(?R>W_HBWjNWX~as<_h`N_>Rohuel+gQ z03F{i$INS%m%OFpPDhm`Qq8Z$pUg^+M~Cwxl0Le7)};MqWcc#UQ2l*R_hXj(FUPM77@HsRhm$_n($YaIeD(3p$ zYD8;msZ$nH%^UrEx0_|Bqjj#9ixFlVw|%~o==_T%@rVs`p_4<1YVi*m>c#wS-`#gN zy_^#P8*Gst1L#xm?)5Q|%j`w*izVk3QYh<{+g6>s^Wui>1Xq*l2?@XK)l_juj@|YG z1tKQsgY{TeP6VABj&iH%nLB zTjJxM+1cs6jmxQ!k$}8@zOt1yt(fcOZqX}4c9roHQAFXM8f4#9t?o!9r-kMQj+hWe zn7jZ_P_rKLYYLhHwPmXxW-{3S4Zd7;?z}sNC~!se)}{}O+T1Piv~$y@@BWhIqEr^Q zEe+y;Lg=h#`0Tt6n)avROJ|qo4=q95|H|3`pSd&bQ3!?Y2%gVTU^-0iZPri>aD1!p!+^bDu zZSc)J^pcXR?aQsRTPoM>kI$amTusH#fr%2EdR`SnU2BEcT`akH;=j9^pgZfCP|I7I zT8ow42_eMnnTIcQ$-)?}+C2+jQHEI_r_MdIa|lfIs*s1mWBi-vCg;R{1hbXKF=kwC zh(CNFX4?3ZKNoy3(CEFmiZQ4IC!cBklPea6=0uHXPOz%(x^`wjGINJZWsP)1ie(39 z-L4hCNIgI-TE6E>o!}5rKH<{OZbXl)MDfQ?6)TFg^=03x_#LC;*eRNdsE|KtRX?j$ zE;L$WNcBCANIwvTOymSZ5HcR(gaels1KxV$f)-dlsj!;uZ^{-c5k;p;%Rb(2ZU&Rj zyQz>@1HWp{g?}+r@&D|>$iDLJx-$Eo_(eNj6yy%Pf@1M9 zv6Ga2>}&J1tlN)It9G&wmsy~H4-0Lwz7as7%~0|23s`HuhfF-lqE`_~sTKzSAFf~| z1gKMjVkn z1h;aThR5g?N@Y1)fi1{b+8y0lYBj>z1~$hM4VgvBrvT63DpLuVz_nZSB>lYLXtv7e znetK#F;(G+?ciF1r1M0+S zai)plA31j;eZ-?NId{)JI&DZ-o~B&kEF?4yLt1u zT)h7%5qVi>9G46E%eHqAc*)ShUm@O znim!}3PO{`d`O8;_YK7q@1+Aln@~A!u6jw}TXRTB%`VgmQu@KBrl!8TkeKo@EoRs^ z%rLAF9731rnFg4*kFTU{IO9OupqprE(UkL+*p}DObvnk`pN;Dqy2Dhfu%=rN6kaox zv+1=9@#`pA#sH@g+IfkPedh{);`>DDFus+8Ifs}>!6ha#^i8fNdlT3UObN>{t0Uz! zX}9kCx?886qv!KER{|67GCS{&V|>bN)tK^(&W9z8w){CiEEUEFr%3tFsuX9xNnQC( z2uC0Dn{`Ntu5}+Z2@w{%YRLAh6!YxpJfaIxUgTOViT{@k*>iU&()YWHBKltQc)iKb5A`r zhqQLTW)dy^Uzg)sA)~-HQ84DDaC-}cT)pLlf)nJ##FF~%!6V)4rKPeIai589@gD|PihaR!5t^-lyKeWQX z=JhZTWYk0MPLQob(ZjKsY|qoGjiP8DrAKE+FBeF|LY$+Y>+O1HG7zvX=|f?6E4GDG zd;un3>)3b3VZ^w5Jh=BE%f$EkIf|qYS%YN{%apr1+xERM7muFQSo>3-P$|j3E+tcz z0gI6kMrP)oMl`_*^gvDYrH%k}GM&1WM?5IQVFa2WX1al4DWpZL*uN4`u()&_m@En3 zpAT2ljwtdKf0cyO37fU5p<9k)lH7N(Z@~1q=E&SDWzbZWw3vC?)^=2Hw51dw)~f>b$41Y4acgp|Xqa|p zW$<=|Jp0c_hwp?aK^UoXr)R5M?(Uj%wJX<9-wNm5ppyXX@;a+fe{_ACyagqT!z8pkh4#Wi z`ju6k3cf+H55r`-Ixj5x@$~$~$$3)je`U0_JLew^D%d!5IS0$j7kKRrOZL?w*DTl^ z2lkGfu4pkXU8n{w_-t@RcS1hms!-%L2%R&X)gI?AoRMTMULTUz*R4}zecS1*`p{XW zud%#4U03_AQcFFMj&>Y}8&!UjayMtvsE3JW^h`x~v)be8f~JX~%1N&A{7tGDcW~DV zjPm#@v@c9qa1-{B;IB)NIQ@vqAEd#eeTMq`=Y~^0ZGlBJ2APx5tllVciPx|16R{|* zhqGiF0!l@u`WEz=NWXlE%*x6-59*%2BqSVz#N<Z9bE|vax5}tLcP)q4 z(hy=;ozR}qJZv+sr)IA`Y}!&>k=2M0jsrqyp9XMy*Tw!a!u0g{sYF7U^Pox%PaM9s zXo%ws<;!zj^5%ckv9YmnH#XpRo+1SjwcE14_p6T1H!3PG^!2Gh^EI`lr6tn5eJ74C zfbw_S2x{)QK^xV!w*eUOW`TLvnZ@gIx-GOvOicVnP0d-7PmW{kS%qf{n)yK6YrE(w z4X%B~R6ipm!-;Xjp6->)4K6IDlzaT5$)=HUV#bAR z^%4}wZ;g}3cob0DoZt$Cj7QnI4L6HkU#GC{$iNpnzB7%QAD^}oD%TlJ^Uh6Sv})Xj z+jmDxJWBDMGUuMnwD^9?q|?C-b{&4=QkN2JJLy4r?C} z-`#<3fsN3A7By#bx;-j#IwB$SF-z16iiM2q=;V|G!${_{Qa?RO7Zi87f


ce070 z7L6{*dzWEECXk^d!bxg9A9gbkT_39JXGwOEr;vU1BfsF{!0v~~=^MC#cW{mdm#&5$ zy626r1GlS zZd_$V0pyYdU|9-?;zBTzM~=k}2`aX^8ld8#HNTK<6w& zo9~uJwtm~?LB8;#$J9@BK1c_eJogSB3x^z$SPxcmzx$21`>4P>QFKiSwm4;{+-5%U zh2O2Dsef4rR#2QwlSlV4@g^^~rj~fBT{V^#Uv4Y%-%Qsyfd*9o0!GR4+zWXMi;OzZP+;?)e19|2 z6B8q$iQOjm2_JsFYQJ|Z3Zrc2f^-cGVKh;7uFzH+BDeA{b^W=tU-II8W*K1WucjY8 zlPx-Jotzh*k{Yqrd}4pTiyn?8;Cb4npvcf{hLc!_p+NB^p)-|leBOm37KehC37tWd zYD!Imz;(`!dun$$Or!phg#! zHc^Xu9Nqb6vjMWczCN<2?=b}h1?SzjG9FMo-wzA?_3je^u8Hluq}vsXWGjMt3Y~Pd z@mQLkiRnV$J7HwL0d2_>&r8fYLuiGJ+izsG8~=eZdY~bh;+{FYOXH zLWv#J2`kpu6CN7dX%qAk2v-*IZ^+LXTWK53#~TX9KK4O~hAiFVn@B#GC8(IABW!o+ zA+MK??s7V)0rI|xxFv;}mk}rx_S{bjgl)H`rOuSeG>!T(%*L00_`N2eof!?>FN1JW z9^P<=0r#g7JUY_xXdLIvMIv6Yh9QAB1vpoPC(xcT8?sv_CtyxcYK$_A-Yr?U>MvhQ zIhQ)U--;8mMQL{qMpNe~aN^Q9MEdH2R$zB~?VLF_#*W>&!|XgCB;zZ(+r?65itKpUp2=(R=Q?m^P zIFll*Y4yH!i8jF#juM=OOrJ+Gy}Zb%3=Bj$mPF~^9%~J^9)*W3Nz2ui$-jO{s+Z7f zYRcMgycVfjeEoh)>A_f+?CTEkTPb!3X_dQEaq6QBI?Sm3Y5$o@rXhJwx{GeophV8@ z=@yDAiF>;I^L|EJZa!yh)*QT8>~IWd6k^!`fzF6@YjP4DV1jnLA&rlTUo~=5jI%*t5hePEulktv7UhqcPlS&9p`x zvOQ0Rywd=#y2e3)MWKq0XsgsQINEul{1VMk@x2mhMW>;hoE{A{wQ|L+t-_$HIr}G$ zlYD8|;sRatR@tJo%WwgpD2iG%6%-EgdS87!Y`wOPh>Ro$!+AiY@_<_8qAfB#f*rf+ z8;aGgD!s6)F?0TM!|HtyDZKF%AZ%Aq>|V^n3F(!vKQ~YIQ`48OZ@j;xV;2v<+NII7J$rzZ(J?l;|2h&2KcD|hspdbzP9wB^}&5Ncv zhJ%3!b~d#}%@d)PWRBHEY z+BNFEgj?2wCW@*%oovs-nuts)yL(&!CSMQ;+s7q z{lUr7?1V~OwxC>J{T}j?b{xa4{Y;AS0YrEfrvWCU7N12|U+Z^)eGVB~v^s!Np{Wn$ zd(5`xyp%_Xg0`&Lq-hiU))`4_EB0sGvGrnn44OP`~0d#7AaCpO5w#2<5p*?n_Oy65~ zbpJE%X8mOTn4^Hpr5Q0*MwQDEjNJ;BluGq`aWlL zsk>C>1aDUaN@_a( ztfjg@48!F%|IuO42WK4!a|5H;aH|@7Ch`>1G?VP-pW>i8%{+bOI{)$_C&oVA3HR7_ z-hb!gs`sVohfH5ws1BkXHthTMWI3y#`B0mlf`7Gfosbgso>6e;wC7nDk0Fw@G={(B zSONSbQ1knD$#P6CtpI{7zm!Qdq~00xwe7`>BS6IY-eyTol9))+2SKjxFYylJ!_RdF z)*0=Ej+N7c)}nZ2z19ilFE20h0i=!Jxp;-VoqDgd2M>AU!q_eob=>92AX!J%bJ>~cN6 zSg$EDAOHod+Auh=hMdP34X`Uf!%qCBXmHTc(eLKFMo|eT5->=QfSJK0T>6waj82Y@ z7o9Y>_5-_uoLVHchWI91xB-Y;{@c@!^BQ`=9Bh$o`B|&izA=QUC*&)9R^>=zJ6#G2 zjFpZ|0vTj4?E4-DIEnR+7Lg9Yy}ayNf*agw;pESdWk%8fc7Q5K~nCibKY zI=StDbF$ASG;-*4L3{_A{zaG1WI9D*^LJDVuldk>Fhp(?iJOZn1`hWEGaa>m|Nb4V zCE82DKo(>N>DF$1=n3%=fVSkjywC3shSg{sx7=c6r|`B{eh|iZ_bwcUl{V zkmVbVj32IYvLzl5-^mx?Z1!7*Uut%D*YtAJ;J9vK{TgcSkoGAaYj>^s)h+dOBe;4L`BerJvQJKW-#r;E{m~{F4Eq$N8I>JL0GD~ zPBog8H^P6E3Se`YcI?|a<}P9R{U`wiv< zuF#gx$+Aajc9=lzoM$bQZRP?T?5~kRAc&BWFA##6hHRe2WJ)Qy9BM$GZX*v@p@?~5 zi+u&LQ192moPJrWaPN__a@T#*y|@TnTMW?A61QoX< z(5{JFCpGAbO^a+%q<}AZ1sXk)M0)+Yz++iCc`((4h8qcH=Of{E>G;ZN%lNsC^idP^ zD2hC=$k+E(l{wMXxlUM2Z&Xa^?gtdRW`FY|!-3N);j47-D?>X3??99Siq+I=mKar$ z7B86q{@8|4u6a$3@I9x~B}~#mM%`1^SjGOBi<@7gZ+KQ>&=P(8n`FdlzmSZ2BV|8_ zN8TX`BP>8JGw5m)cD{~DPZ_1wIi$5V$LW%_e)Kp>+16c?thdNA- z#uA-K(dROA;8_uL!sO5j;1#FQ@DW=H#>OhqmII5}mJ2KI>pPnNNQ4z5^2+|$xNlfI z>t{5Tr1f)dMNA9_JujZpqEh;!l~BK6wKx8;U!u17A?*`Q{J_T^i6KPPeBlgcF3(G2 z88A8Y?mwp}ZM_MDTpwOVR#Jfm!q~O2+y0Nh)5MxcAm*9q=|d%`{8{*&(Wt+HBPT|G zyki0N@wLOPo*=Wg2I|dho}o2^p2vqM)C|(xVQ*9=FF-axO?~-x(8jGNy4zkTe1A3^ zfPJi?l)#jUN1juqWXH!B`Cd`iNszA0v`F)P&B82?`%j*y&<|A%2I^bOq7Y*Ovl+{_KvE&$Of+ogZBA z+cE!c6*VhT{{+JYO1hZeJH_X}HTVe~$EW3Qc03()^WN91h;^bTr41Oxm}ctF zOZz!Ht(T~E%$SF=5b3g(tfrkI1p2M%vhIR;3(*Akkqwb+d4&mGr=a%w9;~&@ir-){ z7hY}#o}YF-q1{i|im2r8lcl88m$s&M_0p8-<>ZBo%+_2?71RCB!|9>hM}16B@Az%Z zH-!_ZsH63ER+FUUx;=Q2^}OgdiBm9L<_{R<&g=f?z1dJYIS%6O+qYx__O$Nq?(17y z@o8xkYp<1ULyQdFTF6c|SA5U4_P5dlTO{tK>ou+F)Z-)ixd>*;33Rj{4Gm>DoI_(g zPc52#z3*Ff-~Vv`ewgR7|7^5qKGE+nhb0d-wYvF$9s6aVdr#P~NJ)&LUyx()V*aWH zkcTg2wvS%2KS&bR4m8zTc3ImVySH_;a-r{mHD=qJm=tzN+=G~?D95Suggu%0kO7nvxMkI95%VxLa2rZ z`Qwj55!qggB3{5bBmy_#Z@O{=L3EzaJ58tZ$4r`#VQk{XJ1;CWnfP8=-lctFQ(qa5 z(!_)KnTbQ zpYI1>r&=LlHXjQKPzr?LGI5(=v=58-{<++pG)t{H`<}PD?STs{TtNz)p!EgP)#*#R zum3>3u&xLpl#O~bb#-H}DttsynbE<#-28M!{mBUyyQUx1071{~{G^>pzFe1+vaBUy z?~lRq_QJi|FNSKWhSS!X2?;?YoL%B37^T=;T~1Sas`|d7u`iHxiqr)hg$b}ikA0t| z)Q|G7lJnytzOVwMA(_Y(`p5C;@<{BG@jO$s!u$5Tq?W>f5JCi$be1G25mNCFy~ zG*afjqd0o{Kd8=VeGb1KVDLb0Q7)KfAHb%unkJMmQmAHF zWn}gq{C$b6owjtb#Q~Qhq{UY}iiCsPPsNCikE>M{uo#r2`eA#iR+t?63iekYJNEd& ztDtRh(N^11)z;b^&pX^p9K%}Sj{THYj)MG3J`w+7pSM`$v9@_`Wn5dc`}jkWRl6nX z98XsjOkf=TzIu_-{T+3A*s-w%2fl*HA0eX&;0S&ziPiL2v6@gRl*o0gF7nJecaAbCk>Vy;6d%D?BE2vANB z382bMRq^Q5=5AhYwQqGgb>d7nW#T-iFf^K`B4hm_`P>d8?USrCUwe(!F^5hFmmvAo zdAF=`%tq0_l$+p(M9#~NdZWX|PiH&%ZcDy2GS^wL7e5i5i5Z_ZV5s=IjcDnd`z1Oi zc5{h-{ZdDJW$jz2Wu(peaIV&{loYx5{bpgxZPpp+F89o&G1_OUj+HZ`?y{b|!(HwR zHEuMXl%UDEqd(DznGs*RHN7pL*Iv&H>ljZ|VWQqa^0{@}Xqk8>;!-r)X_!g(?W?cl zpY7igrn@tT==nzd1OFt~FLKZ%jE6|>8+o6T-{m3-OPPJ<1=3M5mJ!RSg+2&R>y#9M z&44BuAT3^?M2>sgFHhkLV?`Xbn2*k#^>emVX zS94e}(ftW^lvtvdZb`0Z%!z)KGFM*+(`Vxw8a=a9nrDdld^9koxq_c)WGK!?YCfD< zKO|0r&1Y#i^ z7{|}?5*Cx}>}`nENH^)2p2=3!^OGT_YC}3{fmarXx>2Ve;k;(cjUcePxr!CJl-OSw z>a_R7Bq3QXDroyKN5UXT&froe{coix2U527N&w%Fm<_A>Ja6$S%gyY=h?5CNOLeR@b?`*oMU?slZc+>o)$q+vl7R4rf)i^+q| zMl9louiEhRxd2lIV6FWxLS2w_g3b`R9ORmvJ)+0L8Y#^{dGRk%7tH?8Lv&Tr7V%|w zyz1!gHq=V?L&Pg%`rPoaEeDS_o;*lW?HF0iB?d2+*d`4Pe}i8 zHUzQp0Rb-rZ8ZneOelCz_7)Jv?k$>pe96x#1TuoKvBLo)O^j=Tl(?kqwx#67)%*eg zVZz&EJP}Z*(GGwwmkx$Y{1GUl>FY1>I2z7X$c%ED<}UvIRs@Jd=}IRurp;m$q>`{m z;+}nj@iof7c0lq`tajFVy%F+xQCW}stzlG5>YATVgD5jOuleAx+9DBogK)4}yxlW{;WS~?~pS%lg z?cDlkY--Ai_D{hDc-v_3wxyDUL=N-rEQfn#y*_~P9ZWT<{pEk*W(^7%h>rq0-*{H` z2ZOMHmo+>!Ka082lRr9BlxJ)#{2?j8H%7U+%MDi_(ia*5$st)-cm#y3rg)aK?c!ou zto&by;OCofqhB58u{=e(^jxV*Uj}~73kjP;50P^leDnb1(01Cc;o?}{XT(7C$oy$n zN_4OW(&gYFm5Ep~D7c)0lzaKC9ztl$p^Lx=-*&tTa1pMyi8PujYyH<<(&xW~{B&R; z4972L*A)~*3Tn~+aC_|P1#tzP=AIoZ>>1{toA3rs+RJ zJpc!xVq1+dd)e~G7PE&%MjmuhNqlE>dGqH|x={Yfu}xXFnI~VXo_@!oO0j%=yx*W{ zXTm0(7z45^5#FJh!+TJMuAOg|{G2cCA8n_^dk8=VICKW|>>jt?3bkq&FjblePtF>9 z4T}V}I9pl&03O!1>xvHTSyh$&=>cj8vfoTK>q;D90FzGnZyiG>#;$Y!=-35$m-3x4 zXY0%sYUuf{W)cHSMY*_wq8u?FliDa$9n$d7JR)*HY^ItrS@Fs}g{N5MAL+V#gkP2H zA6yO@1$@QSAERY+=gmo2EQ)*pww8gFpeQ%@T(`}8hdW*GPdPn7hv|2;$SHz|m{8uK zsE|OGsuBYm@mj)t@8F&hVypX>6yql(A0!le64k_X4#i z4*^D+uVlyG1EI(aM11kby9y-KFzdOH*z_pR@d*J8oQ}~t8g(_|MGl>?_hzq%_*s1M zXsuKA2l}w&9~;!%F_rsMwPpP8S{mZcHC!3ZHABG~|9uC?J{9mfHDQ*k&6j2a_Q@)5 z2x1q$Pixn9Id^lF*ZgW%u#1K?rmD~)+06Ac&YN`H*D3a~Vzs)hda%T4SKLn^_*jjC z?Xiao9grWPsNJx^63ERzf=g+0#b4n#kDbDN?7H;Hw8|(U!HAg!=oi6s4fYZ!T6z19 zM}c?Jl$tj(OTTT_Rimr%L97HxAy$Q#7oA$#69JfVi-}(J-#_?M4mvKUydFdxE_oFC zY8hryzYx^dSHQ}}^7%{9MD8zEVwBRoN{O`9_)`K=-9zT3m zu?od6(Xx#2<&-c&8757s-lgS{R&9#+8|#J0=4jqznHHxt3r(8-{q3;yap2yRFhd4I zz}R(s8k)Vg#^3UCBRU{MEGnBZ&aCvG=vArS9~+R4JS#)TDk1#e2bZJrcU%>al6c{# zR>t)P^D#?Cd7aDfzx;%t&^W zx#@&_ba38NAJgC}(8)w-CLhgRdVihXNsONHQIx6u>8i zVn|A~)zFlP7-BwleEei8p~~vGRx^q2?|VDGeS;5nbymymC|mZGG-VRp9E}v>KptZG z%!A`@h2FZ{M)P1${XUIu#N zqpbUSDV{cJ2;Wfo*+QTaX2ZqXFd$6e?z;=a_H9+9qm8~w8Bib_^|rrkMzh!^la3dq z?DpTehV~2pe#W0se;Q;_w|;x*Trh0VQuCy7XmPkfX75(ed3Ro*DOsz|S9dF&SK67` z+`{Jb-s>M!P2_)q3+7dZLbR+{;n#4~Nw?yQ+NrKaw7=0&YA9w!TUMyTR@yrxII5oQm28oz&2gL&YFwTM=*b4ukN;Pzvi7unH7l)kuy`S`s z+YiwqDD&apZ@pW26#|)mh$-1ML{Cngb)=+7cP*gva+I70!M)}+I9aZJ#mvqMdx}hz zS+oXg3#E0Opr88;Hcd|E&-U48sYNqKTZqX;wM|^-EEmK>+t)&Qya%qY^{R%~BxL)P zve)+%t<&{%bvz5x(y^_CSNCm&-RoS7h=w@4zx&0N@gt8|exwG5VcfdN_?JBTs8e&( zu1n$fjLC-u-ZVO-wx#b}hepo9`=B(H4co=x=cq+N>%w&z*E8m9=XL=tjv9<{l^36 z@~~n)6jVZg?d|NNq`+rkqNAh~nK7+xZjxN1+tCzPlqK3mP{#r zuY~`EVT+iI$C$kApGu@Y_E4F7Q|Btnw^tieF#Fc^;qD)-$h(c~n39ncMja-baEDNZ z4+_;89*dJ6j%}!4z%F&gPCQnBTlL!2HhlGd_H_=JhU&G$(;_E-R97*KBGj`*bL9n3 zp&MP+Ll3LNo1~MRVTj4Gx)|g--a8YKe~$YQhUVY8u!ylYussUOwjeaPh+(nbn0zHL zA7mHvA24aDYF3PmAA6-mfQ{t(EyHE7?)Q>lxQpI?y^NkB92L*dDpu)?Aw~rhm3(o5 zD$OKlyggeE463tkU##WG*Gms}SQlJ=<8YNwEe0|7INeImK7chH z_RMtSVCWVt=}_OypML{WSskei=J`z^FJ}u~lcdEFA5YC+nbB(n&LY=P1;3MS@od9C zvfrPnD*xall~d+jJ-rrPhAZ`tLJ?&M0bBjfVdprvp-+H!Lx(W6p`wl=VsE#IP7E1Q ztQABuz{0(vNZ;M<=U-upz36_zy}BB9eO4y8wkcMO_@n1iJm=bF!CnKF_T%{WX2TvH zS%8gBb?|SsCfS3Lpm7>$$LH^N*wAsPb$Bhc0~379u<-aH88-X2 z8sCn&xuiI>g=O(p)uxoTMuKSH_S#3HM3g~nPtT;x>JhKKkcp~!>gMxa0w`m)DdF>^ zPr3VFFoucIy5A8?LaGGr|E2~M&O6`HNI1TMQ@Bx_wmPoP@E-&i8A2x_WgU33`HFZ%9LR9*t$d?L zd6#E6+|O`Qd17&+M&3t|YSvY=qIEy)gTQBCdk6f?xxP6gD1n(Mx!xLS^u_&N0>(psNB>@3wA< zxzY>=q*hk!mm62_$Fa18f`Iu!C~8sh!&h`j;0>0ugoyo|dNKo9Hh!M#7tN@nztHtS z+0o;T+1FCG2akKVh5%*wFWB?~yVtu~0u0)~sxRmJ_8bFlhohnjPND)F)OxbD}ameK1K(JlD6qc ze6{Df{qfEL6k%lAE?DrF6U#FbcN!^SF4S1xbJ(fLu%;>-7XEe|*S`DW)F};nw#s&P zSEA8pZNDbe8`?7hz|<&b8+we7!*qtT=i=6At<5M&UX%yvf4u8tTP^TSpTA^u1ee7u zLr@I_nVl)4^7MZHKOUaSpT6R)_UrYtsg*#yuZ@pjQltc~2hKqnWqGT0kZ$oy1~@jU z;obvGE^_q&mry7Bisz`19MQXqP1BouMnUC)?n zCRSqfbgy2&JaqyBUET2Y#;B|Y*6LSsT_advMX1e;yv%jNS1-O?5$B=(4#|aman=yC z9-`Frr{+7-^ujrVM@*>6Ey8EW_g-rU@DKj8+?D+{3(=N1IC8d0ug66G2VuzpgvDcq z7O2TQzw_DeP`IX>j;hIX-w_~8ui%vU8%O^i9wh@j zwR#ETAGkQTMrqpwL_9o3Bw()t8;eu#^#oB2k#;pE13sf>H5zvL#yUPAVsuV61@7Us zFV?V;7*<=$0kRYXgIV5paofpnc^=<$UXZ3?&!QRIVT2Fzqva_vRhnWSlWz%=0*a* zsEj(8cODTnlNA= zU*D<%D?=`1^ga+ek#J?=x_y{Xfv172eQHa!NBH+bNGb`0F%l%180z{zCs-uJ{&SyZ zT0!}6P!i|&&0x}_c}MJQppt`^QRTz;zJJ%ule52i0N=_C-RhwbydI+V^5}g4S8MO| zi2ead{|PjSmQxhDX>MpoBTH&%1b1J*BK|YK?60Xbi_|$vndbPQM7dF=)|MtLa(UniK<7)|pHUFRA;{#Gl_ ztbgE_*D!dw+DiW7Kz76|LkH;atJ~fw6%l8?+uyUey^}Xi7L(u66Day}hV`?^AyJB{=%Et^r70@e{c6Zi5=W}<>i8i6!>kfy@4S^vMqWv1*)2ItKFT^DtuycFWTaQ>-1ponbDS;^~5S0=a z0L=F)NQIUeDBBOE&zkH_uGGyrJfMm=iC_a6EA|KUCtD^x-2@=pS#D1^L?>b4=t5G` z$hUEw!W4QYqNelY656Igc83l>4<)QJgZKUV9P|3eXD(H{shQADMCbou>iZfIX|<$T z(sPl@MX)Qd%95z1IXgul|AE>Fv{C?iyNEQ3E9N&0y$y<7YCr-1J$L`1(2|#J2H%HT zH68#2$#`xWF`(~izQ-y1-q3kdQz(F?1Hm4n>C{rm&VpLiu&ccOld#E25V4%LAfDMf zf3M%8Tml%YcPeHe{4z5##B$_=%Jb(*9C^-v2UKC{QFq`SJ;Z1kkdP!?%({&fCsDli zcoZROdY??|h-NC$d702Xm2fjD?7BdifT4TKa>;@#=f#36+SWk3!s`J0uxYq7Dv@YK6Dj6mS z5Q}3-ElaD^tBlxm5UovXReuVAczX|Y6c%zMcwL63q&trZGMX`gNCNBf)t54k5~m>< zh+A5g__NQ|h~dSIUeLUIP2-!zYsKvOim>C!HFG2_EhZ*$_xFez=aR{iw|mZ(z6WY$ z+Or=#?aFLy&^>ErKP>G1eiGrF9YG)A_pK7k=nV=TGo2@MRi+?BTpHzZ8ks=Kuf0pR z^%ZvWq2{g6V$_z4mtK&-rfToQ$wHEe(vs1=sa7`?sM^`Bi>UYN(zDx5)+d6so^#i7 zzv4l`Lb+xDxdM>*}T>rBTY_#_ilYO(* zTA4Yazdk?Isd!bvzo_DMpgmJk@>E`aLqjI)&tJ6wkk$L{GN0adMv+H=DNta*xX$ZC zyy=p(W>US}3kZV4N+LxhKfSPc&Nlvm1UQE=E;fPHYUgP%;t%Oei^nfs_sXZG&2Oc- z3lg=e-hbRNb(pl?>+9=a!u+otaE3gRWlUBR5>n{5%F4H3;@U?c3|t~+6n__EFhJF3mz5m# zUZ2?I`xL##p9xuI+wjW>Fqf22rw*Jjxyq(4dv^9Z=+$fMIpz~zghmSgVnLofSHi+L zV^x9dA@7jN{zek^Y3t?RDMWty3r$K33H!0isqGv?Qb?LVggD>xYVq%zjUt|YV znfEWWG!;kO|IwgT$?ndqq}o$6^^=|b3)QD5r$-FCX=z23-%9VRqi3{OY*)`C$AlPo zvNO^CcWWr6ANsdVz3*D9Oybgjm-)V-f0)?hi+I0M=Tg$)dlkPLN!qVnCH=>3Jw-%H zS}A%z1cN+oHIjt&BebP2Q&=ODocH0)mUx1Nzz z(xNjw-_PK+Ti=HlJMX&wboU^BYPBuV>zng^f z%VBMI?X(6h=#`OHAmZCjU zo2dnrqq+PfeT9XID=uvIznC2H$o@a(-ZCtz?+X~kL=jLFR8&Gj8kFu3iJ=>5gKkM_ z&>tWn9n#$;0}PUbA~|$-cMUzj5O)u#|M%Vx_v^jqQ5-pE_Flc#UT5#M$hDO8hgrq+ z+%>jP6X6V$#RxD+p|(pudEVzIOe>(Es&od zXQj5A5glP{9`WLhXWOrr8f9frQ?q%#t;hby^X@rj>cb8%H;UgAd7Qa%lausW% zXt4tbupw4L3r-*4Do>VR^N?KoBj>z${=!q_-?o0Nm6=6;Jfx<35{k?(nTvZa;#bM* zt>>m%9IdBjxAo3Rj!%d9ft&iNZHMB!-e+Q@&XU{cd3xrkGJ}BBv!m!4HltPnyKIWv z0uhW@vdW1q;ReoQHbl|QpfW#>z(F$ZXcUiW1SnRXFVD%q?@@c#z~Ef*L%-ZDF=9B2 zRG$O&mN6P}_UjfkLP`IS)#9UJ!T`KfmC4~Yjxcwjv7I^|+&zE$`hD*I0Z-&F{6+{D zVyGNG<2XTQR)5_oOIEcpBO2n9vPl_Cdo=AU#m-3!#C|O*`6XBqnsrmm3!>!@4Lej^ ze_kGjG1)HNA$B`z-aI5c-bpejK)>Gp7;)g*q5%9L*oR65AqGdrn{7MkKn)uxr+EmY zG^?x_66lQeYeSSJtL7)Dh1S4A`?lNcV`!F)n)$MlD*tw+8cR^Nu8sji=Wu|pT+!|U z>BAfw^4dW1-3lI7 zfBy)lS=rVfE7+{gco_`!$&(vway#8S=0C4nJkiYvVNKSkMMNm z5W_4+*FOML&l(vO1#8N|k|6HR9xE$jef+r3d6A6pU@Lf@m``nsh=I`gfS}=i4V~G% z6kTY@(-scHK){rIrleNuiu5-M;wCxUpxEA4e;!fw-hGX{L1UCt_J41ixmKkNw4Gzd z>U|NBF^3sX+6VQrHtU0FySvB%3d3-Cn{Le9BC%QT0jB8!?x3dm6aUs$DZD4#UCR%-Wxn0GDriJ?mlb9nx_2-uh5Ptf zr%4t)>L<3r!HK`RI>YY4d;jB;o5TJ6ElQp2^E`Vh1`}Iws9K>C!hW!&Fej7>m?Wa3 zSu&z;0$LNc9jYG@$n+?~mMC*-n&Y_tw^CCo15Be0+Lcn@jC(pZm)!`@2y{TGo$QG5S%T*r$-wjvQ8hX$>uC&R+gxvt*P60aCx#J$pm%Z(%&FkX+i!-4#Ic$}J-WtP&~6TxmOFCRL`4%3C)-8vGdd z_fX5(ymGslmN_gmfvL0a-rUNlL<(Ghs`K~kJRg!@OxHsv^nqT)+lSEE4>4gBs7aQX z%(XWPFe*J{+8j7;5_5PMMupm(-i~+5qyXVe?K|)MezWwCGTVoQv3%+ikZ}D2SBn?x z3x*8{1W`d=cw_?8`#H%LeQLeGl4rdMV5EEK^R%Q^cYv+4d8tQiB8F{V2&GMYf@Xl3 zMuIr68CV?tE7$2~TN(Pd&rXv9$FyT~s#^IhVzoc}Hd|UmYP_aByX~I5^vfF>7iS>J zTCGd>B+6oxRl1UXR$>t0u~EV5vOh-odGTO&Ug*rV?DQ1HIySWIXJX z0#0esbGyJ&I|cKGTo9O;!(}CBTjm?NOA|h|T7lA1G+bpSTwZ@}p_Nm%f<`vXaY1G{ z^b7Fe24l_v+V2~ipW_~aI%kLQGq;MP{mAL9QUghj9OcN9 z(aDy!4mJH@B{B5^wS&a7@G_HIK3!|Ys=ea5e!$;XQc zS#|Or^!4*fz`uFZ)2Dk~G1%X5OJ^qd%W=u~GFK^1hG1>2QpBL0(~yoM>>|GgdMShvR!@s-ka;kcNZf7!(fC{ojRE#&mshN^%03o|gmB6;E_hD$ zpq~2FfEE!8i~45610tve@#KLIu9Wa!5WD1PFNX@6YZ*$Hv}GkK?KTN1?SA!ct(?C< z&%YazFE6BuC@9aJ_j%lBGitG&;GD}(DbZS35E&Cr8IwsE%y6_2ha<4{-qa_>ATpBjiT#ZHfnAWDt-8aBbilt4x>;CE*PXj6*Qx^@cqTa8dr-*x%T zhASF2M^76;f1pzT(67w;>mqa!Cq>LIg*Qm1_iDBFsv37CJxDO1bVp)xZYY#`0*u%_^C#}LE! zfk0wla405kdt`&-+N@Gib1TE5eu20RFiTbQ2C!@lR+|DSyAB&D=_KTrPev)Kq`#8= z`b%DbdN5gp75=>`NmYl*to@PqXAzSng%~lhAZ?bvBn>(JcH@;4o4HnF<5#KU<3<1D znMGDx+ej`X&o$6<&S#W~?|!JN7DJw)Zy>V{MpZzVb(A#O?6TXvo%mdJW0|Iv!}UjC znR|7Oj&6-ch34rcEe5f$Sh@psf>n9Fz zW=?#CBf_@rMavaNX`2fD`-H&o$!>rUn-lKs9E|d6PobL1|PQ}q~ zXxYc&z%}#XvNC_B*BHCBEf@D}G9#M)^c1ax@uMX4s4vq}Dyj^}<=8|&Y@%4LDiya3 zqz$a{Bx{W<6UV?hfRaYSfr`Qm?H~rUE;5|bWrRJ3G&pS#zmA!JA)P3xroeuWEcb;{ zmXCG8BZ{&Sf}^%&V@b+u#0{-`_h~!TlleVTu0IIu`I0&tvEFXa`{)A4o+o1DN}lOGO(Cn}@px=jl1tM8?t?NNgh;lmB~rzK1vI zot9vv%gie=P*559n5LD@A)mGIEL0O&DGgSKL_ZD>Quz zNrU+L-vyGA>V#gB#F=lKU&D2~T{aZW4{fd$)~FFjYo`u9w#we>k=D1Miw1#k#nGPf z(gS$NVuW(~W2*?TAiCy${GKDhR!)n+Xb495mHueAm$lMSI_gW3JAD;3ZM}h1=$9EJ z9K@Bk5oaezhE>>TzunO?bf74D%#v8ZrKPz=(R`0S1}qdjroCA!?Gzgi7a68^=xVnq z#pq$uCuqddNPqY0b@Cj#kz{J3Te}_m*GBrj4=i&p%`uMlti;Z1>Xc{)Fz62aXGvr7 zt~@*fiJY-xzd$UVpdMyXB_OAwrj`j#`|WcReOE7(jU%iEX`RL()ddz0y53pERgW{x z?nOO?cdKe(T}hmT><75Cd0o+WSS_qp*9}!wPFs+!%^E^8=r_(Pk&}a9&A;ZfvW-*# zWBOTf0dK<)LHkQjZHE+yeR9^|$715{iJI=al-|g*0NipdaS^XT_mesJ3cW$RW1>uw zyaUD4suYo*^9cP1(am6YG3&QWS9w46b0GFMM}$mW5cjjvu+S=ZmkM{T>t%e%=#3BR z*|4#(0Uhq<>~KMm+tQ{^9V|~#MGcJ{uPfMK#qaC4Y=<@VMngkmThFx^imXw$sUmdW zRTP_aml%j*f>KwY_ahaxF{vJUx=)FzyH9Ig9|jZ59X$$*Q!bsSB5D9hus>}ujT3=yxJ zZn8s`J60MxTu&}v` z*tmXY^g7_h9M4<-w&UZuTd9f~;GK0F zL3X>=bPR};yvtJo6l#(v*N;`YqucSlik_lS<54)fQA2_M>!(1V&M$Ir#j6w`HX@x$6$kcQkg&ly?L4Ce?!qScA@eA#-v*_9Ey-Ca1^9IHetPQIxcQYq zc@VHG>^3|w;Non4E>q!ytLWPPNG!=styWn+9Hlj>L~%EY1Xt>3^4teRGeR{rLrmPV zkPx4t+PUl{U`}^h#>~4cmRp|Vukyc>o*l@K41I50rmY)<)))kkYMx-1{eG12Cj;b3 zIPbuVwW+GXtlXIF1zqR}O^~Fbxtyjb22v;89;eEM1eAm>Xp3P%J)7Z9;mo$NCx@WC zrv}JHoH;GhZSVH_du)2)_ENL4$ghm3GQm$Hw{A5%39I%RUJh4&3vIW&gErh*mVr#)eR@M4VNYcz75vF+KJXwdQA>v#sE;^t!!4hMfVee%E@B)`??3 zkgS5jRA~v!6;}mN&1avx|CIk2EM*K#n(nA@Qv@M~^v4w*(Fj7>suMWGgS zp$bbKeDea+S1X!=B2nq#gr}pTm^>>!togaDH<^nJ2qq#9)5su}<@xKQL>gsMewO3u zJxW#c%9e?3;MlB^q8VN51i+W>QjFMvL-O%rAKH?2g zOyO4BAc0$x(Ulh5s#&1~^4Yja^2Lr5itn26ooM6~2K3HSm%^yGWnw7K%HfpttFlqv z_V!9}_pr_;)?{IvY|DZ!zR?HVB=2vRG>)bBVrzFf2)*;(GZBq>RB!L{WRNn;N=xzE zA`5>iaj0gL%BfaUgp5k(n)^Re`X_@}GN5S|G;gu7U(bC-k+NjFLwLMS0z&P1oU3zZ z1LcI1rI0NHgucZH)hNthVWAb@TmPfmwg>X=A@gbC9IHXJl`7TUP*nwT`}LJwi7T5# z`MDZ}vUp|*LEVO^yUzhU-A2fjQzE#c{db$9rrS)93U-2PS0u$>;f#!wLdevZQvP%d zsS`;9;FMTsfnJq~?Q#8f?Hr(rE^vxl4FN{@~2^yb-6+ST|HyZuYiXl<>Mmn9E6c<*k7%nbCq zWwY6Z_P+Qd{1ia=w;2&(V4Z@TkSc|>PE@L9A_|t2sD*9wo` z+Q94HC=|;JDfUGLUb-@beMeHUzkom?NvKmNROkh9zzm>1){08b)A}gL}$oURbyQ0JbwmoBNuy4 z@TK(4W1q3CM0#YklmE%89+Wb4?jD~*v&(kNc7l@+Sb?_omt?(M9iU4Lr^HaN^n3B! zue}5#fph}8N;Z%#U`#S~I%48kt6$aoH)cOidp%rR4MSy{GY@`h?=*EEp4CIU8GfyR zVD7S4scXe(jA~|pYWzq?k9wy?nyeS)T+yq(EQu*IZAv;kN6+>UWU0S|TSStKP@U+8 zL5ckDf)=#BJF8>@OE&gOMZ~-nljW*}w5sK6x)#A?twymx`#Ok^+(;t16vB3Ad}!<@ z(d#=%7g|ov{BFn*>^<`KIUhayL;$vLL85Mz#Y1t|k49frU3w31+ik}##tq%d$f&|d zsFa=|=FpkQ8jpV~d7NG1aY7I|43%MXrWhJH4ZI; zMeXUjYRsX^L?^kI< zd>-*NLzCThu*=5_@hz>&ZI86wIV2vHR=ll=QmqDMi<`r?0Ib{qr3AsL%S_B3s%BoluJXJ#Mv4_On*X8tbiY_l*Rj<)ed-{HiVC69>Cu7TH=%5MZXI zm7NoE&Gixi@|8BbvPXMbgC5I!dW@F_2MhaBnONND5WBULO6;T)$i)$jKDQBtOb!mD znTu@&6@SB48^$Zp2d7J!BW&E#?Vih1{w~Aq>XOTq!T_2b!IY|2^uM_*UwGeK#7 zh{fVz!(*XirJ#oSBA6^^(3gKx5PP!PkBy`}@dJwczb}@;!c9iJBv{7sFbjs#4{5Pvt_n{f zer+->tM5B|e2I(?S`F<*F(kMccQ3)Vw!|qK9(IGeh~_d5hEhujn@NErM3EJKwRP~K(btNcf-x=VUfGGE z(?MF*ew;{{AxnJOq=AgODtyoZ9iUOWl(c&4P>15{iA~*;lei$TFvb%cEe|U%^s7t9 z#K_THvc5`)UCBrCQ?aa|-*>l?_@tH+;{SU3$_C5OLpeDeL-e*vJSdC56-$YSPKX-S zxVq8)J@ti!#_8rpJGz|}zP1J1-ceZBhV2SdsQnO?!E#spkQ~?hCwmn_@fx(A3hCKC zzLslQV9nb|ir++P6YkvJ)s^~Q1!#XETzGhbpXj>(=zbqMSE|HCN=k02ozG*Edi=rH zRY8lQ`Pa6kCh#IN|Fzh6=$SpbHrgn=%?v{evpo@=Szv0wLj@Mw#S(IQhuXC;0fO5L zZoH{ati1jQ0WIi{MaNYBIf8;rm8t*7hBCF=o&hgv5fevAMk`{EJwWEQ?!%CWwf?Df ziOSx5LZh(E_H~<6NR8oA{O&8y-oJYRQYHr#ovw_|yOPKz!r!}gUg&P}Y#MNE>7|TT zLA?Z25Cr}GmDX-9sj#C3Jkw~~3EQJv7U5O|4Ee1KHmca=d6{KOqj89`3ipIU5)`6{ z!|U~^=3eCfPc60L&5+4e$pfGmqo*Y4bBm(`$2k;RA%eM@Bl`y%P3f`bR)zl$vqLRpb}@)?x6R>Ei}Fsg{VPO9Q5%a(Cy&+G#g(94BYHP~z?IBBazBrbTFS3* z%!YJy>jKr69w}3wIwb_p+6Kb`aZ8!lDzlF>YN9;DB4!anztcMq`wOPlKPCU?*c3pE zHYLr$w!}x;LofV8(GzE~S~-#Hh+d}f#V0nzOZJDC=YqUOuq_cK^l4|L9_A%mAO&~VgPvaOOXrhg@BP=M;Na2w-S_KBG zWpO(X*dqps$?Otryh^fMlpIRup1;DEN91?E}x@ZA;0FSSBgr60P z&9jsdIbOjD8u->TOARS2Q1#^}IvC2W8nsCT&Vr11>N+1ver)_0+GKzFos1TU z;D_<7m7s7^ld4RfTGy{D&hZ$>n1yFRr*WM$l23use_z-7&ue}cQGy#-l1}4LXETtz z;eACwOP#geYO211@g2#(2*T2T2}88ra_wdJ!gpgABtLTw7MWFfOrqi4z!LG&TO~u? zoOs>puDLuREB+N*Vh6(n@kNo!N?OE2OjXexk2%eXng(EMB-`jGfy1|qmH}YhVBqbF z<41#TE76%}rdoNsJ6wj#(aI=+gaZ_Z#{f!Pmjzf`VD(1|&*WCk)J?miqV49F9^Y>gfC%X0*wqeww zjw2J=g?nWTdvmd9yY3PiV~ciwfkgP}yzOpd7R!d>FN`~{b;u|@9MrH7n;0zqaZIGM zQ&fVv>u#b~Gcu41bBz}A*dBB*x>E7>XG5h&!`Vu&4^*JAe}BQI=Ew9+ozvr%Tzs_+ z)Dz%s2IDhI%4!|vqdT@RGuD3!xUF?sZz4JUB6et<>Yti5Jp4aiG`l<9ro_SuiooQe z;tYfwc&~SD6h3yVYK)Y#K;-d$4glFVg?2o*YRa)EL5zm!Mj_m z^si|FjVo)H)VWEn7^}0PJyCz%`!X0N`Z9HX)%J?2~7N7p-Hk}CoQG%_T>2)ZvI$h?`w;#nMCvkZ{hdk}|1Kdi@dzJEzyr5em=IXgB zMvg_kPNKuERxWoC9;L#FS+f}{*K1}`Ub3h(*cMAK7Vc!yabh`v7e8WSs}_U2`T(~DgQD~J{Z52 z;6FIu>sQm+1O6)g#xZFyvNF>-@ih(X%XzAu=ObC*@SmV9@3WpSgY(KkTzX<&N+6ZN z!R-+dM3i?=eWrvHZIPaYEV%BlB0PU z%a%%HBgQbY;o`tgMQo`t98T03lk$4n(V)tPMCRPTNQwd5i-lFU7s@$}YpATeV2&bl~DYo7B`G|`<37Klu z`~siK(VUwzJB4#JmLdXQDV)j|MKJsr1IewYTYZA`>eexu*l@bhHoJ>$D#=$^6VCUm zJTVbZRjsusSGr6wA2ZE*2s>5rQC+-{Ut#Q57t=Q>bNJ2+hjQ|l1-EE%3|_b_PkfMw z0zfAC0}8oDo~_&AChIr}p=-wMJAv|H;P3j0@u67Bz0YU(<_eC|dSCPm-gMYD(~jwd z@KPg#(E8%#e>RMDdHwhU5M5$r*1VShN^b2d1LNk*}r+!MtpG&u? z=&CbLcBVrrG$$gfHt58vPJb`cV&?P2|KukO{ zPgvPX#Wxz&3ELko{3VX?1#y0kLz|axkVvmK^{fN&IhGtred zm`=N3zX{v6pEi?pe9)yQ1cg!5tXk?ke<*e7oKocL6`E1aU)&b#aHqaJzsqEEjjr}w zNiGg>lzd!ZdGwgH%WeX=5aW_)1v$Bs=~Ie?@r4gi!S5 z&s2KBbMO7s=t>oHkMmSs7@ZX%h16T)C=7O^+l04^Ul$OLkXxYBRCeZ4jHK_0=Z{>3 zT%P!z=d>~D{G!T3a*1y*i6K(AHm`291y?eLGX2yM((pi%Oy*$Q?4%|`F~Bj>w`zy* z8C;4v{YHeoCdZ$c-wx{>a7h;#3Hv)Ii&Eo7kclAka0mYA_?d7j`3E@%$8*2mc6{jR z-bwFgL%E)OxmG1rH?o*S{$d{EaGVIp0^8zlO$ z>$=Hs-M$Natfd7a3^8?fMBejA>WB(Dq2(d|CM3Xf>ET3}YvEG04kAoVfWDyW#|bp! zDP_kiat!%||7wBdO)Ii2%48%-%e>J0G8dLhjw73gzmthWoR94WXD7mR%HyKR5(7=B zruka`5$Igj(gjAfD8z(5@QOF}>=vq={H{&1LS|5xh*OsSAMOcRXLjH{SJ>n$$>;nVNh2H%c)b_O&}&0OLvu`(GnYCy_!FdfK%+JYot=w~ zQ&$^7&u@|z3fobn0~47)CoF5O4*Og(dPt=|gTctxhc{faDD5mYno2vJ)Do$#sXw!F z!O7SdlzcxZ)x-#i`fB{X>Q;OZ25qb*ER3(j5*{gGZE7i*eVX-NI4qY`;6UKC_6NVo z$AH}l$sDwO%e)Ai1QiXQZBV#4Q`m}LxSE{RSsPt5PT>C&{u1c3W(UVmg2`#Shz=9f z%g%(Iy=ba}Z{f$=sHmW$NEJH==$$qp#YbC6Ilb#68!t69?j;1B!@q=uS*3x!#VpgS zEDUk)e!t+5->6&{%020LJV8-V^;%O<#ot_7Fv{Z{lzi=Xe%no{1F*~S!oFN+O|tTL zR=Ue1WwuuEHUeK*(&TAt&F1|agN8!1Hyixuu=h)(K+9jQeY#Njm;6SR`E|5e@9lIW zJ;v=_nrG20`;~e$dr4J`?@tybKa&lk%-Y&?%FPx@G#lC9Tob2H?5&6vKi^%s_KOtb zFO~@Jo_hH&CNZe5`pQWx$I~xJ0;S+-g4B}s43#OJz%gP>68=J%{#)p=)%uQ}phSz9 z(u9ghldY0*ePPqBP8sH#^wc!B>$UJ#C6=3K4i{;s$$;U*ut*sD+_`wlQL0%Nj9*mF|Of((xUSco{dv|T| zuNn6G&W^EXJ2tdQPRq;C*pkBQ5uj;|0cA8KJFKMmXl82))q3Ko-NodkbQ z{3U{j;kfXn8)=kV_Y4{R z$ULi)DOq^dmJ@IVl78QNySpG7UhfGz1$2s;qnr05xJj@v-wOe+Q-A__vap``6X~S9 z?j~ZrD^l@DH;nH97Wi6qG@6*nk(av1Z(0494kIX}4$FGA)iz7iDIf+buj*~V41q-# zcux4C+oNqN)l+=_4_upoBMsRXG>+08dunlWc!V^R{ueAaRjJE{m$=7kGQz(`35hF8 zt(y}k&s!6~&N&z|Y7LT4Sas7Ep%50$MBq%o`HMKfbAe}0pWyIN3^2oQ{E(fjK7SXlC4Fir+ zxCuHhpMpTB=gK*)KtTPjwZ3OP&O+djc%6(l@sKb5kk4E|(_~fs1P`x0^$xIYIqi15 z=SX5LJxMGKtF2-cx%mYjIceYHs6Ooc{S#6Z&~W6OvEVV^NCRm{t1^4X3Kb%IUOa1S zPVaOr&XHVyslh!?!l};XOIX4Lkh78)>X!iK6>C1g9n6&;t!`=|aH6rO@$$pQ7hICKMWx&D6jI`<(}hY- z(>#j!u2fCw!9x8&MxEod#x9e7Sgor0`Af$s&vQuwCaKE#h)t_Ks+SHQK6)K(Z%zkfj+Sw9x&Ik134D+fM)b)dX=93HFl!D~weA zqu1*mePgk3%ous-kE51ngCpM)^S_Yvt%9G+sK;p*C@ZoVE0j4mbO1YYzkv&^CEU&< zCJ;31RQFEVeX2JD%p!6QmlmnjvNGvdSh}QJoHe}qYvhg$Gm3}2w{PyHgE_=LU?o8g z0R7$L`1koz1Ay;DTZ1Z%Q(7{ua;uNse3~#Dx$@(>tDq`(Ub(teeTp09}DJl3qc3*0#qH_jYyVh=T)kCm3OGG2n zS)Zz;R1?N|{oc`P)RJI8r_<~e-V!sLoZeM+Z+0VFm7PXxJ6A6)Z(UKqR~=ocPQ`Bj zFBlJjwI6&QHvw^A&h4!xTt0JBRDWDHic?**k~1E4mqc%Bq%%~neT;zE+3BTl(|py4 zlrUr^^zBNlpe|EPn7u-gP;v9P+GQ!FIs@ZZLDG(Ro!nDA4kOMsu#UAA6GX8~qMb6y z+hAcZL-C9A1saB%?LRG|qr;rBWv*$ryv(C;T?!Abdhn%c&*Esf>22ge*dpBQqfh$W zcAC8TD2F#nWOiZ_mF=0cESC8)YFF4v<*_Csq{mOpQkKPrVT#pGyH?s(LVwMk)f(_e zXXh|trsL+XgvF&94tw|8!9aR>nI|&K=rF#=5FxJxpSYX-_|r|iW_HxKY8RVw$j=0^H}+H? z6wr5qnm?Fm(WXEGK94}PU_5bYm4!v+>#l76@ME*&7H>2+Odr zN&~JYe4*52LQ7Lm$=PyBl&$a7Jcj?kPN5oq(N^TiP!T*iAyt=9s1d`sf05cZH`DeH{=?dK?;l z+}|awik{rDMp4W9>aU{*A=0b+C%VgDkWS#6-NolY3>KF01F?x+lAHPdYe(h{wM8h$ zemqrTLS&BzTi+9!Ov#J5FlX)rEDkxbvqR6|pkm4C|xx&~@ z7mEh~2(^i>zxgfdKFWHPU|i+!TjMT_(N5nx);CP5PIN_^G{kqh3yMD{E{wYILQ5F- zGOP~q{tqu^{6KjdubobQGHmH9NfZDxJ{5gPo|FmS=?|zQZm4o6+Nt^)>VR;dbzs@G z;qQrWSH$j3&-8PWqXDEG5*qNtkQ57R{oT7nc2+(&nsiH@2WokdjGc0fHmlX+nP~h* zi_a$(=qni8A-(z&x$ellRy($=h#p4WcHv@_<`radgHHmSoTsw*&&TRwhHCfxH@2H_ zW2`5(OQ;+fd-=Hc3+|p^^X2twVBOO_Y}WgLRmC&=A@utOA!m!?n&`2-doxc4111 zbh?LHh84tSp+`+XNHCdzhxwZJxzhmlcJFOkM(g(idChx|V$5=fj7(nz4Xa^Eoh8-q z1sCSlqR!4nT#{bU)ZY{(_k8M!n@au#)KD9ra33wIs70tq_$rddS*UV|=a-u_H#?g&3+-X6atNPJ`73LuZaAi zHD}aQ+13}kN6hMYIpdd$8VCn@c(=wiF;f{i?$frj_H`R&C+~Qju^}42r11@Ku9dJ% zdX1q>RBqokU8_c0su$IcmVIFP6@@b`Uag)-ZIroMy4Lu{7HQ<(-=w;}tGB}ShPdbV z6S_AUm3M*VDw4PZqB3yB_IoE}Ky+|V%1Tchu1WQXNw4;?wfA}8-7vO}%GtQ2OyYL7 zad-6L2Nm$zxs2q)iM#C@Ipz8)D(?Nt92-Z5uezJ)*91?47v*B2KM0WhE1465Zg5|D zH_@|1iG+1ituryidl{d&dKnUAlahlH%obzqjsxm3%D>_kBxCez_j2pCWv<6uu~>Y zxjEt+w~Z^i-iRUw#**o(*ZJ+_hOv>}t}=le@?9|54bvqdmn{LnEQ?^8{)+>8p8M+_ zM^a5!aZjdgmjklnR%|>)??WH)U9ws+k(#Nx1$-9iC*kw4x>6bIyV{+UxZu0!cM@cS zJsREvMUD!UFFLz8#_S$WNS-Qy@C&8hVl?wbll`=H6;E)pB1a+aWwYGOS?UF8n)xpWYY?^66No z)fv@sHp$vuVK&ja;zu4-9YU=$G~D$&z7f+7J6YiApXiv*Rw^n*8Jd2!!9Xn^VTa_ybhD)F1dcH8yy&=E?SnT`#>8xFfBg*#Z zEOh@r*8kXy>0HYJUfn-s?9VYgCka+8nF#sCC-AcipWp)4^WUuB5g|YSfcf$N&sz#5 z*StuBC14g@gVz5*H5P$cAZEAHD{)Jc4R2j$LMSobH!Ga}cfae?yW($*2q}AFW0Cgz zS2Hs*y8lEs7(B(n297t+zpb!#eTrqef$RO!Up%dy<4>+1JfNw-{z%o?*%>3~rUOC3 zMT2>gR6W|F7uFO!kbLbG1%XqIE>MV!O2TsxsOe~EBse%Y{F`RmdH$V0!-|FQld9X> zmw`RON;){2uS>3b=@zLc@HwghowLOUqI+?98Y(F(r^UxVajKb6iT-qdAYUYz%DevU z;fZ@>baXHAh3x>(c1iu7{Q!Q2Slpp|f}d6SI~?q_>Rb#r&5BLmc6LF5>W2@-pMEAoOG*w?|9zqKc~WN95Ml%5s=*(siYh zc!5@#^x#~MFFwf)tbdOOeJ?F#*NDm>;d97--xSo^(IICK;r^tVPx&1NQv+-RU{d_= z7>6VQe6q-zJAZrIhWAP^E7RSpjqhH7GpF=V@DWH<`ESZ|^f5oIzW}~s1VU3Y_cdeG zijDPurIL4kxS<`uF|Iop6L)cy-gSrT5fc-S)VjIkO8+G2Srd5!UK7gns?ZXeZlwGJ zbdw3tJ|;5HWI#L3CeXY4nax?wy({yjfm28=A2RK(+Z zVZ_0c!facIuG{F7|GXr;4Dl|TnVA8e#M(;!9R!QLM3_8zWMo7o+Ux#+xyT!!I~1WI zlSr$=r~e@7x(qP8>-XQx*2i3Fkz1xP^5=K`y72y|AmRrn?qU?7m@)dm7;bI?E?54C zF9CpRZ*MP$`3h6EM)Y+K+_Vax^UM_n2;p0-xSumbMk|0#u^5dq(D=Vu1t9HE3%-7X z{g=61MBDap2>6_AMV^<9$8sgxnBs}@Sn?=Pt*R+kMd8SKB+|i z{T%?F*y7oZcmWrOTxsrP&|}V|Bc4aI!X|^%W$ne+hc77lssHh9igwWOQ{BMY;bM}| z9JS29-+NO2zj~{(QS$HE*^<3k^6I*}iL7jFkufnjpkuad0_G$#B7F3zwgP*Oq65=fzR31)yGpTkN=@sAXha;X+4myQF*kIM-2fcROn<` zUKO0{08@f~R=PdXEH&v_iOy2t&tyjyYoChxWP_wFcn)+0;{{#8RKcMQn#7M3PLniA zuBm*;YUMEY_l;oFXs^6b0h1O?G7R*W!}J1&D1jEnvG%&d!^4$RpKhsuDL}_D;0p@0 zE0lmi@fAPX+9OkY(BU!3Gf%xpw{olTG3wS0s~^+X zzY?>FYr)LBPf(LZ423^lg8uTA^HEwuL9=Q;Fz}AG1MolgR2Fxss0cxQC!&6!>*{ z3@Xt;_7)CK*wd*A(9%BZ`1p7s{-3&I1$lCUey<7&)^)ba%gbSpu2c$)TR4t=XcBxo zL<*3hr=`Wn`ySxl@^V&^VEQ6xIwf#eQCWEu7cr4u@xI~xi@RIgAdsk#iYd!`uYlhs)=?W#QSrVUI7#E6T0koh1)g1 zRRi5Ow?~|I7M5Y7mZL`Tz!|))_b~zVbDr^|i7gqc@9Eo4m^7Sl*}q)9)4`|u`gJyF z%a)g%EF7)lpb?Hp|2J^5g|WKG)P!N%w-1to^9u`7rnr|-SM@}F@%vdk4y6-Zw_92= zSfd#=dgwpZ7466_GC|76Yyy~T58i=RZMK;C*gZ~N+mnR!Kr{97)1yH+C$S)mx7c}j zbs~CaSi;7vb$%cU@l~a{U?gUx)sypQb89F;YgFijh`2a*%%P}0sY`BPX}(JIUo*ok zvrc~l*SEf|$f;e?gkzYO=y*16^df&p1~_uX!ZIEyi0-~_-a zVQhXOyQQwr6GGNgb#sdqwu_uj?d3V;)C=P^E+|9dtcu~dD2Dw$S%3J}m-vGAdmWb# zoaL6q8x{p}mz22}iVa0_5r3`jlTQvrR~G;UvxH7^s7y|e4`T)0K<3o)K4|EDw7X1C zPcKEf+MlPsU0V|z9UZ+&BXkIXzW+`a1ERFfQLqgOc?^1egdrRSfoVervT$sya(SE{ zi^jYQuWhf+l1s?NSYpTZ36c;2=Uwi#kyzPkk*#Z;^!qajHV)D0m5-*f=cBBY-|ZtK zJ}%N#-~oUA*9~*usJGWC=ea)e{q1MaCd zqP}5SF&{OzBZyvO!z1PAc0%qh_kxq2WYS@Y?nld4f|7un$x3q91jbLXNVg`;o`{gp zczXodZ!UU5F8k(0_wP%EJ(G+77B8C1JwK}NvFS)H=*p2{k2Ms%IJX6r zOJXc}&v1}*4Xe}?&~9?69J9>8zYRiMEZ?-!Qk9dVJ!!B?dAiPeBO?8e2XzMiB&^AK zZzTf=o!Rlh)+0WL{Oz3`RE1GX7`5~9RulNN)RhwZ{Ou>uRO}n>cI~69QeQ8$el#zV*c5}@MbTWIm}tI!|3sA z60#6`9$&_-jb8Aqygf)DoL`DUX^%;jh6Hh}t5511GcceA*xvn(b3sChhxQ7T7h7 z6RN2nTTI1t&%)RIdkl*%P0Gxiz*}0^88QsOg75FtOE+FI=s#$WE-`5kY-$tssa`%I z`Yx|nFA&@k?Ou&rTT3^~_qH^@!|7`hIx!Z?3qIlC^68Pdb4sV|#0mxe=9)FveOhJg zj!C{;T&{jW!lm?hf?{6GTmGgA#XLHN$xru#d}JqH#m6$ z-)Y(EzF&lm9rb!Is;xGwWvPn^F#6)EQw%1*QvlMtqC8rIQa*IOtxDDh9<&z6D#IBW z+o&hmrDlbpj{{o3>Zje~Osf0H1ENdWZ-C5NX<5!;P7xa>%8;-#>Gg@URfQWHhA%>` z+{GsnKT|sTyPRPkgdPZ zmvqv3cO~Nj+ecMO>!&C}^U`Bn0q|3;46JBt^$qsN|7>o|>{sl%+Nad(IgVqOF?mfn z`$e};adLE8T`I#Ve+h0gu#w5GFeUDnTDoPJZ{nKKG>h9y=oQB@*{JyYg_v4geJmTY zs?sgVr%S5x;YuH&l|p=GKZMMv`H=_&0sUcfGbBpGwDX4ELV2GJvROPv+!6>f{(`?{DxKnE>eYZ86Q{unQjq7(jCvqdygr{UgK+rd+`yN;83;Pj#Et!BG#-UnX z=UP~b>O+}PtN%)og$Lxe!aR!JAhI}l1=SOAz?X3OkppYM$7H;2yhZGc#W`u z>&VJatz#{g?Ps~v7#O;iTytM`S+_F5Z@pV?oX5f;DB#+MpD%ak7G>*IeFXyWTT1A| z;P+{A%YL#{(5F1`Vrbxmi{CO+u;Ah$HOIDDNF#@Lx1~=E^_PfHnC(9G!sh)X7_vwu zTd2cmnb<3?_8db?(sc;4qiw8h8v07GQ`sKj-L`pK*hDCiRY+KVQdIKO*@=n0pybgp zeev$MfC;3O5;%~(BV{l_wi!0HwWfC=ut`xFW+(XN$v5E+BQh$_7GJ-RBL7L5+ui{N zi*j7*M*qMQ+9NzH7fbP&sH06QnbW)|=j0}YxXBgL_qjIAI6`tScmvRq?TnSV78hG8 z2v$mPY`0!1Ir0>TAY?vw_FvQN#gi(fZ~-olN^qae^!E)}o#uR0K|W`;=>eX}2|M*` zz2X40EUTx;PQcoxSMfzpY%&WDJhc?H8XM1O>OSj6&!i_gO~PUj6-L0?QC=xy~s zO`9j{3^N`0UP-^Hvb#iK%F;cAR^0sVnZW1?b{HiBCBr4Wt1&bU{Zi?_JH3qXaFQ8a z_JHC#hi}u68#TWePI;VE=$3u)6rMocuf7}Ar?|y639EvSG`A9(JOU>^e0vjoj^Af^ z9I1JFb~^hUKupSjdhfn)ocG(b>v0fKBA>A z=l0us9|gDt?dMoZyqs|OT6DAhFB4H0ZuNV0nN#{*<7Ej})0LL;u2#WsU5yMr-M^Ig z{AnwmKW$}cttg!{(~p0BqdEK2Q}WD*TAzMuXlbdlh#IGCOtPg6Nq$J5JfU-^RuJNT zE+T|f?|6O?-?KqEq}k0{%MBK)puS3eLhD@GSx#!DT&EJhEMw-L@c5%q{nI#6`q?#V z=(APnVpXpeKB1ZXzS}j-yw1OT_K&7(GvLZd1~o?$>XyJ6-FNqro2QlypR5_>WOf8S zyxN$%jI1n_fe0n$R|~O;o+8;AtUncJXSN{r&SZtiA0pqp?^a1*$&ao#)H1n+CB#=R zrT4U{?n|Gkh~UXjY)`@(S-$fJ|JAHV{b@yf7~457HJB~>Nb6%InQH5F+mXh3hdGzZ zj4~_>wFhFHrw4YfKUze7%B%Ie)U|f~p17rZIRhsaQ|;=| z>AR{_`da43T~m0S-2BfYw(*lBjKTu&6eaV#T#OA!9Br)iwaMW{Phiy_k&FUvE!6&s zIuDnXRiDCyVve-Y)!vd74 zOhElR+^B2+eWP$aZ1z)FO6g0}iBppRGkieMqIu*d0|?(kf*6->uNx^S@{MpRwD=c_ z)HkiQi)l$DK*{O9giJJanKUBIn=CyNma0YMLU)Sz{s{wO-rNX! zrZ^YJOr9wyYYoqi;58}^NwMw=>$0-8;%YDrN{r*nRnig93E)Gg3RJQyzqriF`LJ-* zKUw!OJep>OPD;N5+zYPTSL~8LlMj!TjyLnM%)`kCP72|xQL)gmB5`hy6B4a6``PAR z3bRVwJKl32j@uMZCdsE)*2i?>Z%_SP>)s$<*8|YZmpw|AuX(+1owZ{#g?eY7h|5tA z3JKWQmePD}!Iy$Y{oHroEZL&T(d=R#7pDH|10`1QA8uOAUUn@u=^1H8V#0Akq7rIP z?xal`SrvGFNK0lF;$903+ME;VR2uW$u9|*=0p*A~-1?i%dM@?$kuY~|jrK60PBXe_ zX|l=u)D|1SgZ#i40n@$arid8w(+CgXjf+bz=uGC_kD@PY^FXU*V`5v-EK z6VnGPV`Ke;y&nK2m?-JX!yL5c8Q>qvf|k$7mu0F+$8Bs_=_fBLC3BpeZSE7&6#S?k z=cw_oAT_a5a5&5&12<9HzdNEBG8Sw^U?Py%zFOjDv`-%+`|*9?2@U~Al>GdIWH2YjI9>=~DsZU5tU zH86v652tX4&6Cqc0ASctHr*|VD!l^u8qFIsBj~`N0E8!RtW;a7jCj^34WnL&jk5mt zh5v(G5fOmW&E8;G2F)otpMpG$+jrmLVg6C4{25?Yz`K+x_VOp7%`?zppR8GKa3#IP z`oJmKz?g3wnWACDTiax)?cZ{-mIFDMXmK5bX!6#%IbW6=ws8q7--Y z`8L9AeCl`8g$%yQIWDl}PnODOA$aaIOy?JVVst*#DKpuGsLys3#Q;f%8abKv*1h9f z<5IBx;>T0;+I=%+CFEGJP=?Lnqkip))_@wzB?9b}v>&d)5tJ zblh@Ib_#eE0b_D@f|~YxaYDo0IE>81|wsox6`#D*)LMNAY2inrq29xU2s z(|oN8%uoD)fgHgay|c{sn%x}$`^cy3G~+EpDz78B^K68jnzYm0Ug!~_6EcCtk2;;6 z<%V$66?7}k#X`dyoYm#1Vcyr}#itP}Ib{l+4V5TJwQykxBX{G`IHgRE|y?Y(2G(H;QI=4*TkHIhAa?5DoP zt>~-n@-|41s9R&7sAp*td*;W9N-M=uJ?7d1?c(zIMnK;Rmtr4TinBL3?vAeX+DUAL z);3x6aPV7IvT#=TXMKh>F*(&TMmYM4S|N)?1O*?m$gq1R(e6G(-^z`BCXUeG9ynn= z**Q3(<8wlO>T|C0|EZjB;+zYxOA975vE79fW{!c5N1Giu;C+N%ulEh@kDja#uxk5V zKBqmrws}ob)Bbl}9nrGnTEuFm{9~e@OFhJL=~bRizIH}un`C}}7+D`U#4BqYH+U3Z z!qJ-BY5~P(h)dPIapMFK6f7U~edS?I=G8bnU0d0jD{CVX?fvs%mg+36U~mcdfiZg@ z;d@IuXsKimJ@@-x%XHF))}A$_CE9&O94X35#a|Mu_B3PxrBnqDL0+|RA2Q|SJ3$f% zWGY=7`%V`LIE9q<=(v+OrcS#+=}G9ncNBh6I?7I&q1pM(4e5w~wuE&AibI7Hs;y7laan{L}w4Ef^RiHx^9| zb*w$URh!pme%s7DbkD!s4c)&@!zMwYF`J-PeASM-Lg$_N`v*gn+WoY8qZJvXq0dbs z?QKXv5h_z$@af=7hn>e#J!T7)c0phR((#`R+uMfn6L+k712hGu4;nCI5+zlB(Ake%y1f8C3Q8ns4PAp{4L$G7cb{Nn6R+g=14l2W0FN5e(?o#s7>B)NpnhHb__rU@CrCl zfR-p=(kcRG$SK4Q{y@-vPC$zSSFf|=_; zUN0*~VR$lJv#)5X2(_A23_XEVax0$DG+tbB_BU-go}LjZd0vXgaZNTVX#%iqrISc6 z3=*6nv8$ZMhIxldj!K>!J5s9KGW6QMZe|%!B(&-*lgDm%@~ zLW~N$%|7|&!V^#45|E?UWXS#2=;}vD@mu#A28YFMQJW@KAsvd1r`7;fkv4j9?hm3l zj4O2=d*zQZuw&k|aPOpyt}O#={*k=@Z=V&$s}t1!zoF_A^PTv6Cz!&XJDQ81LYE&1 z_d9?ip*loh}UnKs+l zcjca7?zYw+{cL$_f^odY3S!JtWgtqeXV&MjB7Wt4+8y=s5`6_D9Y`4%98LzD+Q!ZL z$E&=+7xMZv$LF23O2bMKXk%(G1TV9c(ca4a;+-Q(azwLXj21U&pWCPkmkO>{fn{_HE7R&6SkLpLbCz0`}{)$w@`~+2V;A@rb75kC{T)?RCdIhUpAXeTy&Lv%4E`?$EW?!UfA5C` zdiD7!J91Pz@v_9W@HVI!dH(r0k3J7@1w47pX79)%75rPH9A_<$H`&psz5N{j22Gv9 z5FHb(*=e=DG0*xeLu%0Kp^q)(^r7jWbozhgjKT+U{;<#hxJ&ci6Ik7r(Q3B~;ON-z z9URa&JQBC{K6~AT$YifAC=kFPyqR=t;x({~O9nl)>Z0I{2T*O3OVA?ZI{`)SdDa2h z`8>9Gj(Cj+OX$y%7Q9eS6K->1^tsfn+#PIv#*GMbJXnOa4by6;Dw zB{6Vys3EIE&Sl*6%mw*?x)ck)`d)TI&%i^!%4LFre{n0wop~A*m;NNo|}%7TbsS-tuTr$8_=8wO0k4uCB_^B{^9W z@5ezUFzv-9EnDwp5>C=M7GMUwo)>+NoYOc9#KbM-_P&fwoM_Op?om~2+~m{4zS(=_ zp8FZu!rcgz-G;o^A||O3CCe)9{W4XU`snBfUv|_2)Q2Detk4t3Yf)u4S+I=9DL*Y|0gUq= zYEnfL5#-nG@mQj>ESuFM%Zh>z-8BIxXBuF41k!_lV02}z4+>07X44wyIKCjLR8qXH zo8g6WwgODe_^qhVgM3D0SyBc^vL!1~RE%J_54C^mGEpydED3OwGgfKL#l3OBX!-u< zhf(&e)(MqnY%vjOQ}aex)5F=mkY z?Z5!Hv5rw;Og||5=g+<=F^9D>+MGYAHwg)<7)QKdG2`Aoo?aOn?Ys9-Q_i}9jId9} zr_xeyAN(!Sr5^5MSb)e57#vS3oBEAoCKRHKN{=0sR+nA3{p(_p7 z9Nq`Dt(+BpjlME<%AYt9DWmz3net_y!9;|!*m29_9o~ZpX+h|T`eB(5J{d*7ko%Xn zwKJu?fRL}OH1Wc}&xI^URP7re(K@|2=w338#gaLzxB$G?@lnpYEfp3kQb@_J)xnqc z$J`(hOdW3yn#H7R9?d~uD#>8m8%vdmHpd54)lz&w+A4iQ$=OMcg9rZlt`!jtvn|9i~OygC4k-*0F(vS!4(!~yD1%Xq*EitB<^Rpe9> z+E-vI2ck(Y4h>ZvPKH}f)c67tSRH=tb{fYQvrcHG;dGH=lN#mMz?UF!DCL6!qX4t< zX1T^ix}7CeND~0*lQo&qG*+wbFaHq7_IjP+rBSE#tu>Z&k8dV<{pL7jS5d#Thy~H- zF%klJuTSyuSrj~97Xcs5aW7H<5b_GKuT87XQ6`F@X!YK~Pmlk0?p0&o2-*%@Z}`>V z;c5(3*;5!}K@b%Sd4JvFH6Szi`Y1exE+3GPziH<5w)khA7|)48OT_%HZd}-5 zYygM%q<*ijhJ_K$dm*2JBTTZ)@~wO;lUdMNX;rC1(I4GorJFp{@bDT- z+V2>if+e&YIB5+5YU_%vr)fSu``NCQGiNttO?Gja&&X?8pAD!pxDi7P_ z^c5AWn**|cguj8KBp8F?@al#6k*zEU!iZkK{`Kc)aNs|}#Nd2%XQH1vgt4SSe=Jix zLdLmu3)~#I0-)?*r;#S*G7)S`6FXrd5+t9_cf71TH&{+DDfwPxl>|oepaFPnxi|0}!9{ER``k!D^9;HS1$jkz{T$7i%% z2!p(~>em3K7I)S-6w}KX?|pwlZ5&IcCLA-K8yZDFYvs9>*ZKXuy1u-1p{lDiB+gLh zic=%;3JhnlpZuJz1WBqg&rb{+*g}|$eR)|4lx_-4=-8Tg`ORwJee9 zma}4}fQwu((}H6YObD^gE7^&WAcNYpY#nXpdp|k4E-3*JXjLp7_u;y#0#sSlrxHj? zusq0jPdbWj*`cbs}>t8>91kPa^iL}3$>A}rIRk~58|qz4V=gALYbFC%gBKb z%VWPeDlEgJ1Z?`|>NT`gV}2oh_q>M17 zkWVPw`@NjJNkB$3>GyB~_V4NePy5XQ#DAsIJ^gNDRI=zRuc8M17NO?@UhO#1=U3Y^ zoJl($(&9KvlqmaLmbJCGTO6}CJdw#lZ>k69nl;Y9|EtG8SU`ttcja>Psr4zl@`pv2 zE*+i}BW*rW`jlTIN;NA_ z{@KpXtU41TB2_@~ybD5Bw=|wa4@F3rZ6nP?G5PUZwU?6PMM7XJ4yu$f9}u zY@^bA-PIhP9agSCOJIU`l8d@!xzcu|>JJd3RE!nMx@n5spC z+Wu-Duu+5Mpd@6&v3rRuSv}jd1S+PiRH@8KrMW!bs@;7zAQ=r!u!QL{iq~lBuxn}d z7DaL#U!CI{zbU!^147D*US6h)D2m0*Fg~@^)h;&q_YMcfZIZ{P+$#;`q@e$QuN_yh z)QFopgoX2=P0V7BFO%ILh4{_uFkY9UWdhEwMd>VvKEp0%b-lvX_ ziX!w0A=0qF^6jE*h9D=`dZ7#EvNl8U>w6FQu-_aE?PvMLWNz4kEU^0T8mq$%5Dp8f zpa1s)Kak2U5s-(uqZp6fzNL7q6x;Y!r5n5kSpVbDf#t-czM_%@FbPgR!fBT4d_*lt z_nYE+tmNl5EgjBd&=L?C!P2S*sv0v>_eSTAXx<~TAqdo7rKtT2)9*KMoSI$&O6Sbk z#?h~CgeEZz|AlT7B4*HM*|K_KS^5oKAqr zhbnEGzjDgVlP+-50#8m(wIMhB-+86IlpmRPIo7N#=h5xN{E?9gQkTM>8h0(N7eyYJ zFRv}J-u*ymD#n+}Cf5^_x+hlAh-Bk8!)p2TR6xSxd$!DdV*a;}u?n*zT5Z%yGijSv ztx37HKe*aAEt(Uy%FI5M<}l_8uR@W}XjQp9;IWy|5BW-LYZMeVXkX^V~d5zV`}QO4Q6zk`XF8H(`i zu?@#r7aD^5`*)L)7O=u8jX$2M1u!i^5zNk0cwc>=p?7h&P7glSP+&W6ZVK3E%ug-} zKoiX=+&g=RLyB$VE2um@9~is1G=zHn0RPUk^}=Q*ec$~#$*H;v_xOT#d57_?WQ!2& z`}p~x`*Tu~f1bE^ADO;Je|4Z4_o!GKVtxacnovracbt2&bv(UCPO-Q-MqXQt4=cvU z=C7>WC6*!gS^q@T3@Mqa)$a>t8 zI)b^eGOB{`wT2sVQ7q0I&KNTVY#^$ap8{`rOOD|OTQVk&vt8|C(}~m8DLh{yBJ4+{ zjE(IWv-gmPZlQ6+=!#WitXN_y-N^h|LNG%&C}M8&;X6|FPSE|3Th^L^Vqk2A9oeWTR{ z_r`ay#Z3P`=cWY`fwYnx{QMD-d$F{)JUqOAa){k_R3X|4A-d6WrX`3*v45ATCwaoS zIFORyYzU+mR$ak*q3)tSg4DUi;Y#Ksx+VHC1aDn##TSb?uuLnLohSe zWBy$w6H^Y@bRt)t$X>XAhz*rDIXQS|)9H!m=uXd_&Y^*KGc)cz7cEOwH(Xl=D>;3rm z*^&c$z3fAOtA~*cf5BrXiY7Ad)vLC>QwABC*}axKuzLFQ*&dk@aci;2Qiawwhw{Qb zL@ix`bmWg$YL_7Ejc|DMBRuX_D)7ThX14ZDly;wRf3j|qa*-z^NW|T|A@^UQ zg!T4l-l)+E2@*2HqGiTm2-GnMs@jTHlN$bdfr`EV%)3+6d0fj3VjLx6ig6sz>Yb#; z&yQ8$a)T7f`|UNgqlSY%n83v{K4h0gl4>Zu`E`aFIfX0WMi7`h8Q{6nAeCK0g15Bk zi)H<+3Le9D{A*l5Db%cbGjoUIuOhsSXsg+#;=;ttj9p!F)cc0UCd3{|c_&lC9(Uj$ zG1`KyYR$TE>uy^mMwnWL+(?wn;>}+|-27MVC81U!Ge7gLT1tpFkKD@ZO1|AtcF>pW zU=~Gst;2W26i}W9f||buW-IY+I&e{%H~y*vExG26{+AuIyY2omLT@LjzWEPEFu05C z7S~#kd3N{1d)y4ey7&c!Ze>CF9QhxQ&;;DrVx}~{O&u_f`Dj474Ld2YO~9m~Mt0}< zf!VJ3dJN;=_xXaCm>XPcLEv~=**que`p6 zD&3os9q~w#j28&lbLx=(N3O%=XXqUN;K6&WtG2O%ZC!nQfu z7}HRPv<7>P8wEp}69>aZD-N7=77Ol;@B>2$fmKl0jHvr^Vf{Vvzf@&E(vmmva_gJW zEO@3SQCLS4o4|H%@X>?@{N_+6p#0^$*9FX$H;w3`wvNR!rc|SdJ)XN>UHFT!Xo3=` z7uM6zMWj$j=jSyHh#$5Dp1978?AH?xjC+N%3iii$EmfN0+HMt^`Zb}m3`}HgH*Gzo zy*rH8&LUOqXEopsa|*Ug=II4zd{#}xZ@~bx8#mK^dPn++bD>DhdqSX#vL=Qf=$XuG) zgo=l4ZtWzjVNZ~6%JbyN9+aH;eR82!X^Ifx|Fh-ZbLSuDL~{B|gM{Nb4rDBhWbnK4 ziaD=>85zZ*j1Ar=g#>4>oxJsA1No+X{JE)bccQUOkN*Ve7)9%t;D&^g;C)EHdKmur zUB#OtCf9}7uO&K5_C}YSUNQ~KMri)4_!elB%__y6L0$&rS!hw?xq+_F`(>kZQ#;Iw zH-;>srpTA5NY%WZZ0=*dn{wSoDwbPG=S{WLeeF?)tS5~pzfhf!R_9wMJ}f$|O0S%i z!V^W-*QoTirB93l>va&%Kf3oCBa!!P<`Qwknh#i_HISf{t+eThYwJmh6n@hz?BF%r zE&xFp2y^3X8P30csBVx4xsvf?<_N!oy$ozg_a9JZ~B`z=i_`AaD1_GNhOhz z-#gfU9v;Hlzl})tze{kL$_ECMMSGB`-jg1q<-+{Q)8P)h;N6r%hb%4YRSHv`QsxII zdKiQ9^b3r4`P~S>PjC96%x{F(k@3kcc@h?H6=A}CUKXvZoXONrkC(tVyYPa0n|2GT z$%2StXCp>GDcQvU>#h9iN8P(@kJ^Eq_Z#JUUOPQFNs!jm`8Jo-INI-pMiI@;7lBq5 zz=B$=P$n3P#&+)OJsLQ;O+*`+WD-S!M{>F`dhTnS!-JW%z0ygkg5Zv9&4W1ljXsV= zzN+aFZ2f!u;~#Kgv6e+OJe(0lbm}C8Pu6k=X{fUDyK@wcmh6$??s_lJRtO+4HF`-eUGX03j69g(6V zl9xe#e6rLTm(MnN9ty=j_3q4rOVJ=BzW|+s4z3=gfH+vmLErPmdsEAq(K6QgE9(nc z8)?NkqF6II*Iu#hU)V7tCcnIL;H9Ud=YMu(AQr0>TV9+aTLVr`yqLtPRx(02r(OI_1g?4s&bMsqHb)GXB<36io4Y$md0LE*4At)wFuR%@E!#|nV*t{e&k)&&MHQGct9t`_YM5r3E7OC*&7lLiO$L|TVxJTc36nOr#YfahCkB(y+ zsyD=(51hv9Oi%XV{4(mhulzRV4~BX(-}_MIxzk0nh~ZWEcU zrAUacPi+1JnZCJ()xb6O@xe)?jq1?qm^x?u+3Y2K%GB3t54Nnh(Vf7P=@iuBx}}N# zxs5c^Ic&)P?Zg&4UTo^`279hYAKmeML?z`$gj%QVZW;q4Iw-F?a9VZc(Y86=m=qOM zX?C9Vnkwt`k3Wng=X}fm#E>vYa8G93d+9#z;Nows#ozYbPKT8bAM!K;E(k zzx}5dfP@6N;J9iBnr?|Ivr|`4xwxAxMq+}#${QB+I=umxpo9+7&TQBYMiHt5IqB&uZWR^wo((ze=D6HcHKH@-39(7lEeMx#W9tjT z-aT1=I@Tqc&|t^uxU?|)j#sN?k$^Y99xCt+W*2%hXZGw(fakW97yox=wgt#eG)aW% zibwY@uE9V}ZeRHme!(5v?;VitsAW@d4$=?Z0;T#N1)N}(!KxswmmigejG*j013}45 z9vfT+CK+i^3Yc~%Um1+px>dHYd~t#UOX%Pi-Q&Ne^aXGwfpN-HH5W==Emhp+%v5j%>aaRkOd=WGjI?$-}d(~e!^jl@Ci?*x<++QQ=Uh9zPr5a<|} z85a~5revF5ef;uZS}?2uAG)cXDUMv|G*1l)8vpUg^=uQRDxNeyQ4y>v9s6exx6N(j zQvO$eKR3(yJ_xFk&2Kzek2y#aZhdoquO1%nce;&IIs?D7H1-~$XU#|(=X5w7jLP)8 z+NS+DFGWF}wmtIydymk#1u^f$H{j{ajDN8#6ghfmJ?cR5S zI;CV&OUn=GLZQo0B#GDxnP9h}s_Q!8R(kBOJ+T;kZDU4Y-`wpm|2#l;7IF4C6nD)v z+^(&US>D&HZawM}W}lTQERSz!Y?VFFC^TE4w2*TRi)wt614W6bHXeM0oxdKa>2DWe zs$Y*`_-nZVaO-H}C*&FU4l49Bqa^}xDwX-gNP(w}{DdIzkCG3b|MIdkl(2oehA3X) zs{=+n0mZ!iOxS4AU1Py;1HJILxW}f0)M7uaqnuNhTj|+PH%h3|((d5L`68t*a-S!p zOVfD`P=Nl{*ec6gAmSIZ8(*Aypl!_-cT3}y_g0~IpKf}N`WtX6p?0?>#I}ba)lsh- zx+qngW;2{ltfZQ~59oX7h%AKN9@77Ikz?>pO`xTgTdVw^6GlSPrtF0wUZa~V%R$>c z{yTUN1yp$+6_9+GEAteQe#9%KXYo$ znqZFofoewnblw9|aO?eePG6j?-5UEY8C#ZiSX9?+B@T47I9dH`J#_#)J9To>duWJv z94AapfH0TU#8DJ$R#g$+A&!s2H^8s@U0`+2+gsZ7otb*wZ5m-5)`-mc0|7C^;#PuD zGbRS}o$tVk7SbaJAmTG|w_BqOOXIKBRsRP~zmoe%%Oocle!A0$YIJX>61(%zox!`C zdg5S}r18XfW_#L2)Zx2vHQwi!i+wEZRs+3fvq(I(Y~{m~)*kG^+jP6J)bB1Y4us)p z?^5KeXBfn?I{*B<@jSNMxSkZc{ZJhVx8uBRXIEh}^!atqZC@N5kd<0|AhH0`Lq|I* z-u)t}AT$R->D86!#q3$aE<99IGFpvCNL-8z9Ag-!4AaBu^F{qNU47Odisc+I_ zE%SI4Wha2eh{b+e?RmBK@tDUAXe<_k#i8g#z5h@DSfbd};4}m0yw7wGv=vPLgX=^7 z24x?qgZ6Q#vqLLfS%*Ew<3ZnmdWkmXyyN7P5#S?6hy&y=Vgp$AdYMhBsJ5qq)*f@o zZk(=#OQ9G(ZNhrCMhW1RyM);38T@J-9lMf)k1$HA$7g&;)U)J)8R))i)B2h zv2>PdicnOCE3=-RBP>?ep0d1kLOFkzL16}wFuT~n=?8Z;6>b zKJ^FQ;H@8A?EC90W=V5WpK%!gvL~?VDev(oKU#l*(PWv4nz1=SN!B~^VOX<-Hf`RQ zp8r>9atZ4DJ*-nGQ028mz^9F-wT$qChXHO zZl7%+&#l0$GLjffP-7B-KCo7})$;gVZ>k`UNscrNB_GSxVynW~Uz8|1q za*={iU41XQ@H=to!RX?c19scsW^mHDPpt2r4}9Ub){aYbL6O%YA&7#!Q`slL+Ca)Zwe zMdFKd@$|H;0scl}RWHJS!x@s()QCr?gM+KqGd^x++Yg&8<99p5Z>Wmn5Qo82A0O^C zGLnQASP#C%irS(r-I)!)K=n|?;qA3v$jlVmD!1N;E<9O;ODlCJa^@vNdIsS=K&iO6 z6j~0MP<~YP;`3srdho(AO4MR<- zP-lmXV%CIm{9(A>fv{>a5czc8_r0^ZbO35lcsL5f*(%*$t&z>9m_?@qUqfzV4ty^v z{$2!QcJAlQGxnzjtx3)**i~Qe4*#<1yzhO-K^OmJs{$w}F^P((hT3v{D81JJ`W3@RA?%F_ayQ zhK6yS8IJIqT^smkqj3%2w6Y;LVDMWXPkLjDopv4V}q?$VZ!~gs*2=X**PZ z9c3j=$AKjBh2g91sc&Qy6!Z~FeJ{IrB{?8H8E1sXPnm;UmhcGaA{I^(^sB$*PZ(6= zp;2ylDq^7kt8_XxM*nU^f~=>UHJhUaGidARgRWAi`(L@4dtG7Xe!W}1xhP>pC5}en zYHL3r8aE1UHph{3v+ToVD&Q^BWCjI#pQ&@9I1 z%qt}<%$GhXDJd~2DPJOlmMxu)^?c1IwSsCGaTU^NOAZQPmKWmz3!7un%Nb7b7XTs9 ziLhi}h|a~(u@tCu)NgPx)y_po_y``))Tgqt0##qIpNj%eYzgi!XL}_n*>i1ScnC@i zPLeB0fLVE;m0}zr1bqOioOghk`%rgc3@RUZI0Bdc>XDR7+Y^spEk28a`4K@!x$Yrb z-k~;9cA0L4)dea(ieqjUJ)7pj{F|zYdBTNdl{2VISn?Um;@opaH|j_5tzf>!sNXa7 z!ZT3FF%UV-IGSYF0)?I0Ll@fU$-4*LVMUF)ph7dF&{0D2syQ(_FEQyP%#&BmVS4W< zqZd~!u|K#BiCA777N$~DW1#x`@l>*vw*t_V;Epe0M_cbehhFV%<~yBUS3aq;u{{Bc z;V4|fwxUv&Z?@z^vFLj}r9sL85ASF6rogESWfDZNctUV7^kA z)GkBZ{mZakh(Ac6sEoh@p4xWP0588Fo6Fm?<{n~6>UCg^cY}dG_mr}g7GpkMtnEp#Uc zbcnRyZk8|Uq~Do2Ii&@F?FaXFSWW>axx+LDKLF`nly}dDYe8!7>EWX?g--u{4+E53+#bmiGeTH=q zE$R#aOQwk3S5O4-=W5Z^S22b;5Q4{ke;oI?h#SHwV5s( zkDBwK)J`$SvWe}v$NwRj4nxYYt*DIkYN~|y9KFr1vDsJ-tftO)2Q;{Rb;A+{8^5NNYh!^^n3GvQsa+bkf2yC>-em- z$h-GfSZtbZrICC$$%tA%xtsBjyw;4pk3GB7xE|kT1^?|*&(eMI_#fk~s#}nN*J-nq zgp0n1B%qksJ>DpK{41Y|+N`w2k3p0e{liRmw!zHW<}Q^~`n4fRm&s{M70_i*T<}Ga zgx6Y=raIqli7!9DeGf0ruptpO_09QWPdCL6SLhAna3!_az+BsGxFK9cymIR-V#6rN}kSko`GhjschN`0%=||;Z|RwO#J(8CGF& z#P8LiiI5;2?GO^e9UtZYshDDoiwhFJTpsxTe@mqjabR*O;NKD!7S5BnhQ_@DYJilL zBo3ZiSs4##_1t?tbUYKnYGoOGMKK;u6 z6LoOQn+FSaFxJy!o+e%c1&0~Da;4q<0fDL7o&!lNP*LBi=y6_c3WX_AUUY<7w4$XQ zeZ;8)&hCB+U$gMw!5Fb0n#XGKzr@Q9kS_8k`cy^d*wSb_dp5=yRDZ6&e8o{NA$x z6HrQyGi-_KJa-GfTu=oDX^)b^{6vhtPzQ3o2gbg)ej?EHcfTUMv6*SR?NdTWYa!My zx2R>#NTO4WLUeT0{%jyg+-PT1GMtkk9jN2lV-8GAXIjD>V$hxvTweFK6Px&s7*bL& zV2@xBEFM(CTWrZ4Uy^k9#zAu+fBg-pIHui8v4}FG(j_9&t+GkDIEA|;^RT&I9$6wT zZJIRk;V$yS8L?Hin<;~MsQSm>cqKfGF=?%0h8 z>`#pxP9uJKUmkU4Lu1-fh4`nxb(9pf0}Q@F#5HDwdfY%Jx6PbdCD;>thc_hYP=`W1G*8cs!)#ndxs1LC}x| ze8L3cN_Btob)NOm_i#BEuN{_?=<&THWUSNtqc6o+ImGImnV{Rb3B`1^&~Ymp%GXC} z%&-RwF!S7ufO$bO%g9;QhFCR?qCwg56Vf;I57Ixh)`Qzqg$VT3v1A9(qghi-joxsE zIKgvL8r|4I<(?E#i4!A3pPR=bMXdbyBt2VJdIYN0{Mh{5+k*HXw&N3MLxl;i1 zCUHO;sPqB2QvU~=)Tt@RW|~S|yiUUNPQd)*w;igC?sKQ8?(FxIg=zz(ZMsp*B`{LJ zstB%-;UQcJvKy7(J`r?1r_f?%_LXP?;2S|ffAe(Wad**`WE$NvuSGUk@LGwNs zx>*FbqNpCf+P56snH9jod}YgYTqt8U!3i>^W^#fR4eppd?O%B^M@tkWkH}Pg#DhS1 zEcTl8<|9YG7PF3BqI>eTSHAJcrgx+DLew%%n1a&AVfB56N=Xa+HSm)x-$gSIivEGDxA;&HzHa2ZOMG#QqN6i#F{KgYg@8^ z|6&!(5Vf#go8Ls_{60r6IZoeHQSqLxUDDG0;k}YHab5bFd^=fxC-(~;I*cS$R(67b z;tq88|KsYdCv1nH3OmTrbpN~CjWB!?b) z?&hBR9X*$Slpo6MckPwWde(YhVg_UCJ7GO=3(RMpuB5R%H08R#f-fGar>F4lHzP`- za-iEDm-oq|)v0#6&97Qq!tTtcofywYGF7S#garg}Qr&+TF>mha8Zb8xRA zBVAm$4hCQB+oKv?aip*Cau?IWn>~5Rm_tTFo6RtOOc^q8*--^7L&C3jNwU@Rm&PYo z*bPDuD-m>Lz*z=%STSH4xjqDn`ql?=rK%BB8>^p_5yoC2<@L(_qW7Jm@L0}LzXb?uy#l_Mw;ni{G4;rEPUl*IPU_GF0 zE1X7JAn0b%J1H-3`pv|-L1IUx$$98~i1mB=+usq>d$VSs`pn0E@sDed{XY8po2`@x zO`reKk2R6g=bAcC4zFM=x8v#!Y+B0`Q)U+C0VAO^<$RwNpw-I$yxjXAMA>?EOSlw~qXjV*$p7kQZt|xNzAh>*B!uf!fcY{&e_B0x zq@~dfpS=WZx#|M3kG$6!X(@Y23(c!L6LG(C<+MSC1F|m>cuRtgx0tv^3=N3l0U(Q1 zJpaUoY3PL27KgryJUm&(nCVZ$nal{D(nHvtNZ`gLa5EF%)AQ~1ADOp^RoZiqZ)`$b zr4l(=V!sA!X`m%1Z~1Bb;~?n+6VPpi6^S`nFi;=GGVW-#XM>fIl~8YdK#DWUY7% z^J2o&X;)Y9NW|Vpa?jeX4Cw0@q(jVCU^6&(=>d@Rck9rp0{oC7Wu1Rj?Oiq?+&CH} zN-5nRzJkT2@!c=TM)|NdUepbUc>;$hfr*BJUlSEfJAERrqsfXM0OTH!p+I(U5JRKn zw9WHW!JUG6@^B$s{ERGXfP=x~Y(~I&k%iZpImG;>uc!yjS-z6Tp#WHdJ6Xn% zA^QThNPzk6)r-G{DyX)_0PTg0g{<3|Y6V|3C^qh+iH=7&+Mzfi8H94w!cHNeP7qOBW_4Fm`B&dsNF;KkAph-&($oJ&e`i+D*S|9z+fl2|)$AH7 zi-vztr@1savHbk`3jx%UN&?`V@1vqRu(7^t+hU#{O2sntdeXKGq2nj5x4^LT6*c^s zzg6Gf!_G>-6D`~i@(Ik)a8B(P-|I*U3Neq4ev7*NW4(levMFHNA&TOa^h@M$mQ2P8 z@NCF_w9@P$$;siQ5w*L=7-YaE&^}Jn`bwE@Yur6!;HKiEi{AreA2yGdVPl~O@lUju zxeV!*-!BLejXsW#fAKheGV5PE?6^Zy zc>M%S?t(_4kGVPMa!`tlK>CLQ_IMfVZRpmIUqahvG5(Fixl!tXo7Y&b0Q{WRn^$9H z^(*XpA&K%|Jj~ErB(3LQ zVg2{4wyEH(9AD*^OLc>5T-nf2RJCG|?_fiK=cHX6bBo~PS2npZ09EBuB<82-BV{$O<}se&d5XWA?o!)AZ?%SpX?H(m-*xYeMo(qkftS+QvYkzf?{kNO0EO3YXay} zCmLrH0b(ns%IoA&n-{Me7iMtqR`gBq40q}$o%M0{V)`})c^{cbDcdw{S7*So{f@O9 zO&GW*5i}c$RXg$x(t&~<;>w&vDewOLuL=sHfzp6szDuH-)-MS(mzE~S0a8tv;_{>o zv^66>IrxgL+`lhy?;cqjc(Il+R;UuEuWV1LQyu4g$Y#S6S+K@XiSd@fG1yPQ12}$d8rxI5-~5K~ zCROw#VI>%j^W+9&|58uIP#*^L5sVza%Md2r816~sLOz;vL;llkJ1JHd=o(YeVW*r2 z4wk5SoFGkdyKO)rR$6dk-TGa>@V~byOPK}EvwF6RCF*UOWYLu4L#L!iaeiRLrXy~v z1fUGeB}N-f@$W%W29dujPMK>I+?p8vAx0P6)tFX0fb~aRqjNCeUTDzf3&kM&;4Z}w z-rvV3(wq=-{}dOos&n3TY4qPu4eK8*-K&>&QgmG58-VFlLW#Hv7!#c{Go%c4t{Bo3 zrQBw!a#Pmf9Npn_|3mi#9#=J*)tXkXYGnNd#1mO|!+ZDtxB$w#d&( z#wjTzkH2v26qH-+nGK9lOa2XFcrSLEeyHWVH@sIuqutWyeLIL`pbYE%h9hiJY9PN| zR!f#HG-|R>>=JhWn~;ZT%I{C#sYzQ-T&oqFtfnfCm^cRMt(mh@H$oksXT9(h0ivI$67k7*+jcE~I)QMv@hE$-YX-gi3YPfw)rJmcuY07Xcoecr-%GHfX`-~$C6C)_APdzG0Z??u z^17X@KX8Q&|I$;joZ~mlE?PVvzrwxQl2yLeZJ%u4YfxH9p;)P{5{(|%%9zr95He>; zHQPzUjGB*|cl^ZQ-F!>j41a6JzQ9%dj9|nfltQ*s?p3L`g}JWfJk$27250g7&dp%9 z&C$}JBdE2Li zY@cAchiL7$u9(5H!|4h~jh1p^hm3ZvyhuM14ZCg*B%k>m5ntb2sSA|{^{6DQdS12n z?K6F&t}jZfN%pL7Z-ven-uoy#hS;VH^(hTiK)}**E47L|pKm>Dx}teyBgEv^>v=qz zcy^(4cO{5ZG9NlxPxHMkp>mBH3Gr78pD2n+HGeC;`dj(;tGxn;jM+4?fhwQj;H%Bs zVYJ<%Vkswma9s)S&hYxw4buH*;WakofZ&px8ZlGLHVirs8D7{mS}HyZ&12p_W%=3> z&v_|VuUdvaJdp%%0fe@i_ZE-{zrkTk@>{P7_33vynLFPW!bX2scWcoU!>LZ_K~TJ@ z`DO%hk#A!)BM*T+|q9VPsNoUkjCq)Vngix;xV^`S>xwR`+Ps}hEF>r_#pav_20@k>S9Ctim~2E+T3o1~Mk#hOiH=I*!H zk+rD~jQAW6-e*N#|EBS@^!()GTl_<6?)M~Gu`^7<|9@8YdjDNFE3|$wFooipB~$U; z%Dtp}>6PM_1UZrzA{|dS`xNskxsdP~mLM}#>4Q56O3)OYFVo|$uI8dAoNv-4;%xcA z`;MRuamlINWI-@Tszqq+qHC|`qKoybn(GzYk$iTr#I3?eN zdQIu4SN?wf?cM|^KF$~MldA<$2$tmZ8ZxJ2*;)8c`&Ps6#o^R!^)P2~2#(>U5U_CX zmML?W`pF#$g^Pi-aywCF4BzK=!qt2M>Bc}Z<({K*{)*p zrs6p%V0a=>6jr*DHu;QSjkdrU5o>IaMR#nQQ7QzCP6fmcn2R#H2!B z?tBi~t5b&yWb9*ffxXz$n5$XFeE5?MO<5?swDaSZP*d2OkwjEgt_GgYc><)Ubz%JC zc_A_&Hi6Ye=YBTp_K}eq1t;L0^UeoXbq){cNua`T^@)JVqY4X0!sCT_$~d$2A{YwJbK-hcB-@y}y2L-lafneg$r+Lds5^S|4SKH&6jmeh0Q%}bULa{Q+Xbv^knuw(M{YyI z178MTQEz4?6fVHU(*K5`FEp$u>y_L@SS{o{vNy+SrK{cqRN@&Og=&Cm{e01d=dAc4 znCJ+CG(;)AU2wwbtm#{A;i0T|(i0cdZ)03bnc~(1FGtIkkOcUkFM)yS)IHc3*=S(M5VR zituJZrK-d|3HC|5UR~CvqunvmmmI2oj|771A2&C0AOrGy$w0N%XlkxV7I+dJu5dq0 z&bZDc+L%EG5KsEwYuQ6r4P(0!=3TAbRcno{t@n)$!|aTnGBq;-!7EBHUOY6+xIWpW z@DFXH-uhMl?G)x&KVs+GekI6)w9BphVTfm{zD;ZgwpnZF4~$r&tv&CwX7>vfwleGN zoq(mU_u0W|QFJrL+OYc;a0G>uSkimSQxqZ#W(I9FKvjKWspn3h=D zufX-HdB~G`DeeXn*SB{N3wl&hk1{yX{*$;0fQ$IdBZ6owB?i%GmSUlATQB$ zZ9Ue*k}teyOjo|m`15$+J+cdGz)`)cTs$rxrvW0H8w_Fn1e4#UW3dt$u~iV4Bp7#4 z-}dVLvczjzpAUf4()s8)Z)}QK`r2nYw7!_lZk&N6L}H%zVBKu1;w8QE%|w!@e6b2g zGgjrDnZ`eFZRh=rUr;^|D=h~i+ie)4M4Vn5dXL%q>`4w^?=+rEh+(U2Rc~50Kh$Xc z)Zt-kQ^CaNKBYg?o$bA`Uo)@k1JmcoIDGmH`+2baw|$na(J{Bb}tp`&S(LjlxW)pXTUB^t;<9S53LPA*lDQCs&pMz3H#A z%aDSW<=XTSVy5{RG?r>^R-wKdX}D+-L`08}Hb9V1@Q4nJZdGo$#ou2_`Q;ZRP|;sG8|Y`G z)r=U97_!yrw_cT+$0rc^9Y5{zM+hebq0PggDkr{h!ujgp;?2y0%W#%MmCR@a1plxV zE>?%b@#yjpx{`6iW3#oT4bXOOAI8Z+Ma6nbg;2a{Z+I{J(;j|rzJy#xDlPqhGIi#l zKvjdN9t(yBC#Ks*XDrf(u@`ZEjBZw=e?}7K*?5UK65f(dQJ~O#;c=&&6Q4#Sz(U=r zxlDd26Z`?`9&}CZyPN--SZJof6v44wrW!0E<*I#_)br-1N=T*ulTBy}VtrAa=r{GY z16uT-N^6^84^rFEI`QooVL^1GqQs|TQ^(6m_3!W7Un!AOVc{f-Jg*r3;J&qXaV~Ei zSOux45M6sanGKEajCJ6I*HF7IvRLl(X^Pim4|7wbS{kj|w6p!an<&Yb-=1UVG*MU& zbFlABnOW1QjPJQG-HzH$KYPZdL<}v+T)8-{IONpFWMTp?-MH2i=1(}h%;50WLVDkv z)`75G&fOR`ipuk#>%;>SHXH2Ui!0)PMLA51Ae&oLPQM6m!knWiu;wAoiPs;JX1YkQCz_9!nW72o1J~432Abfo>xXcBT2-B{DG- z1utO<03xRG$1H6O32+S3HvksPC+yvuOSPMBVzQxBv-WGJYcM=7z!{^eM#%j68Drea z)1FwN)!(VUOLWDE3c@q%^Mn-z9;1BG=2o$E(e7yY{P`o=^U0#XwehRX$Ke6B82q^WCs z+_wlf&kQ_N#D$`FB83G*N`6#+cGyzF1j`S%Q>Q{od$K5su~}oR zuO-_<3sP!cjX$CNr#g$y418fl0O6{fu@RucL(!`<&dpo>Ge!w{e#mJc@31P}<>|TP zutdg(+qcx&z^s&gIb+EUC;dR)g@R@=oO>CF!{hahpJP2Z3^{W)} z!~UVyJGS1Vg*-5Hd?}GmOqlNEM6IkCs&bOr>prU_q7{M`MmukZvw z$8fQP^p;5hv-A?X=KPTdE4MqW7k)OUtw=n!O84DYc=_gvn~l{Wl00f>BNZ9bZY~;8 zR$qB@QZW6D$7;05d`xMYJo`9^{ZdTi@sTr7DD!4{EKQ7lgp>W+4Wz;f`>W?eAD5vy zmyFM^`mCFOi7+6i4oI77vPri&-oG%~H(%W2QU1|jBzki%@!Am$Npyct%YM^xwCQFy z#n~r43+i51+7s@?XP@?2<8KN`5gPbH&@KduuaHRKbIL0rC#SvNN~%zziPnCO0u_c4 z=vRLDwK1gq>{(@(Z>3~q z<;u>f0#J9WJ7?s^;dERamWAb1kng%V86Jj)hH=7I_0tYiUfx=pa6;mbH%9}GP*>8) zTtyn`jtDR{PTWap2s*{EaajV4i&>?e%Jb%g%#Ok&>5&Fj9f^@H| zf!r1}q~XNlQimugzT$yEY;U93gAUdxI=X1>q zfymlQ%zPD==k{WCZNA}tD_HDBMIO+U5ZDd;p#6#_Yu5%W@~Oh7!R6*&f}365@annl z&&iyo+OJUXG!!RHnelzCXBWdAR``(oT?}x{oaVdh#&JR+A zuKf-?Dw!FLt)}o8#Ax)S(bb4qh~PlsU<;Hmmdwi2tiSKo+gRpd6+&nK!^Y9+;&dwj9?@aG)bXU{V zJz7|gS+#{c#ds>xNU${2FY5)1YD-6sWJn24(2)gzt;s|=7lwq`!Do}K9siVeTz2R7 z^36_uKXvSFewX~!=V#x)e!1Y0>h@bCY0%;?)d^b6h+cLe0Cv-TeK|!yt8xmukIC0; z5~+L>X=8~AA$jHsyQD^nyF4Stv?mL7A)})~@O`ERKhfJaB@R$`+OuUym60dYT};X` zr?+?<#Kd&36mj$Q`M>}-5s^bAMX+xNQrC-yT(mLVRWfm!K0w5{0;Bli*IZe?SJQ}s zDeaT~XPWl!6=Or=D9v+M1i1k}`A$J<+5ocJy##PCtXhFKdT5_QF;}dIfqx)JQ=5! zto3r=QusrPCH29aLd^N~f#)B7^8+ksJNIN)oDs1!&9+OK3N%!ur8h*jU!I$Gtl@gDZA0GP~+d9*b%#ME%ASdnT z&Vxrz*8>S^hjV?0B4=cE$HNfM<00dEv)p{sy$PJDH(G@7O>r|u=GUjaJM8Vw6cM)u zN?GE;pH0M9bP~jyxPC8BfDol-Z|FUEz}Tq8np0rl-tBsq8htUKYE;KF zLtozLas#Zyugj^t5li_>K_$giF;oH0-Ix-3b*eY#t4oI;tRx=2aOlA^j1e3l-nD49)YyWu{j<5zC?Hi5yJ3 zr>4z?;%6^EIv}>Sw26tsQ=Vgu!B{G-Stx0)K2t+Iin7mM+$c{XNW76gQE8zPPCF&s8)Z-jI~909*D#gdV0_kw%PzmdPXb`9Ll+vD%9Zx<#o zfIZYn&hdE7y2e^M^SQ@*sp%2b=z=TNw?ulAtkt<~srd`OoT5Ael8?{ehew*bom#Jn zQ+o3F8TNkT)!Mykv>YAocOT;rj`36XiHC78aI})&D>?p=tf#ILT9y2Ldn{%Co^VXk z6!;vO$wJ2Fq@OLHdXt)j%DfLtWLrU$A}y)_7PUuflRb96KdsNlaj}ZoG@s(y4g0z5 z>{ZBc`ngv-BGCQ_L_*FdH*?0$GY#kP#NQ^0=pgDv={x^{AN)E8*;f?5Ae1;W`NeD6 zd;b(NmK65_P{wUgf42Doau}Mxt^hiIuj|Ac0=!jUgfGgB9@gIc%cUHhpn;#2irx|D z^^0A+ZtPjS)(1+N(Pc*B>}Bd$dp|qdZOE5p@(DQDJJ7nUCyl?`M&y|*v3%QWId%Xf zkO_4a!pm*8$RM;xt} z)Ok`A-P7$kxH*vA9EG|A=Q-aZjQu>EPDPAWDik>+vMt>9niHe`0rjDwx#7JUyL0}; zcBhBnIb+3Zi&zWSN($Nwk8vA%zaQ4}#{Mea`*jw}Q7IbX_NrV5zM^&FvqTS+DO{(F z{@6kG|4jmFo;qdD+GNz>eOtUjij8iByxDkDX*tRe&Ep2cQDRC*OBu z2j2$|nG2gP;DZCcy`|=9y#87ejp(HOwikBRou9c_!|=D^yf*y+?en zkWZ%c5b`?GpY1Ok`kfHACG%kUT`*<&dXkOWfiO#e-FddBaXW$_;9r%b0t__&VJe9d zE#k!*j2s;v+=95Jlv)B(nQ5*{bGNRWKNaCkw@v-O)PNri7!iZCdvI@*8;~ip$61via?f3*|Ev5IE&Llu z-y7o0=r_+_D)q0ioqnk|RY{-o)ZBP;0P4Cs-}9qYZ&mqIFS*f}FX;(-QUBANr5)ec z%0Di5Dar5#$85|t85Xl|o*|x{s6#jJM9&+SM&5{xh>nt^8{^q4Qql1A5Y`4FqH=j3R z{14md>okp{Y7&c0L+@E+Ftz{%M%5SI2&k+`Ev`GIA^MZmQRw)){ z^N)pvQ{x}l-goD#3X;&qr(ry}+#$eRYTDzoNf#p2ul=-bHHO8mEBkI$8iX+sMkL`3AYw6qir++pZ~n_Dmg(B~%rFE@Sr_U&_5a;Hy)3Ixxt1 z*@;o*z9r!>#nEElu4H^lv669vQwIp3mKH?%l}M2HJ!oC8UU_7AN_iPOTGVbvyHe8SZRXivin+_RaBKW zSq4;s%*;^m%381ovHfy7a8V*1`^uK!gW<)Ha(U~-4A$m2iTmUfIJz%XKG&j5( zb+#H61`6YAKLjco(e*iS6LF?Q;i`x0iDV==XMQ(arJ52ulO}TV%$IrXtGja;3_{G$ z+N@HNz)u5E9p@cTC;TsPNFW`*YuP68sKc6;(UBy)&j_?@&@&+cqb2;(K0d7@U+4M1 zc5+6)9l18xO|Osp2&ECoJAO>hyTU{KPPj$XmeW3lZFFIL{T~y+dQISg$0b3hT@&&P z%%?lMna~P+E^PLTHX-u>^O{$;R~|-wSdbmaVgWw@GX#5T=Kqfi5Y}|ayek=EJhUtI zZb1mQ13uy}A%N>fIf>J>Ova7Um&AQupal7n6lVRK+9O{?!edn$Fp415z8X(K@m6af z43Jp4cYC^Bf`U>DuB7{O_81-QruP;DQThkFf7k6fis#fcMjYG!QM@X7HZAHc?3uaw z`eev&P_>~dUrh=)4!SBWHr@O0lrEEwtNH{|ni&7MUBR^#WpI-&GRMoofASTVov~f|1 z9nQ9*PT}o$jw0d21qIN{pw+>~?^zQ}!2MMkSd}TWQJ4S7h43b$m3&DQPn54)3Hg#4%8nXX_qI&7`L)CAEla{HN|04y<{_m>$W%fO|0+$=e# z{2Hutd-CK-lp~uUIx5#sd2Y)g=7S$pr2xuuy}1tzim~z?;i66rCLmM_+fsDCDJi@h zqs8*8L$dY}v2f(h9}7S3r3}BCgJM1;d3^yUhGuJVuELgs+%{KId2!}0Hi;1n;-WF4 zA)AMNom<_%+eGX37qEPeu((G=J|5?%z8@wm4JOAWnr+YQxO7F+=ZM5X z#LCIh(J?+Tksrih@UJ z2*||Hogp$%+@HSzOY|~AY4b0WEf4&UuqH;tLvOMpD)Af06r7$}aAERMp8Eif00ne+ zW_q@sWjg=^69$d(Kink1Hib+Jwp5Ex@n!czzrZgIXclZGl1 zbmWUtoa#&DfS!mV-R6WBiREOzm;XjxkSjzzK+9l?bRZkp7(zq9{SR-UannsMOn#Gn z4gP05gT|jb5{5iBhkFpGQ^sW4uhkD*kL?F@+rjHCu7ivkyJSF^kN>0F;99%V@0E9X)xEA16iZe(_n zKsHU))1PeGyKefceP%n;gb<6K-{qobZN8NmQvt>PErB{}0mqj+Rrrl)Hkus(Y_IC( zXNiwP4GGvfC7Q1Z-M+t5#D`1v3@yi?IX{K(uW=H_(u(M*lRk2-@el!Vs?EyYBs_xQ zk`&(glB~nbGsOzh;$i?DgF4y++*(>@C^@F7)fA1w zD<|%JKGC;$@=S2HO4&f$8thuXF12_Di6N+}Yrs5C6&{VE@fb7J z6;9>p{zo8V?(CfKrd7NyKFR=4i0(o00TEPA*={BI-_f2}NuZ=A>?An7-|Gv#PI;+5 zVh~;>w!bjH_Yh3gfsG58d(+6Bj}{Y+dP_^Y9i|W4&RBu)9HJ?LfeB-l>WAlf;tXcf zQ)w9~J+|c6@CGO8e4f>iOE^5#1HasnTJRIzM|ghtWN!3gI!4D(8Qjlq zA|8Xw64GS+15>|?NBJgn@y)5*c>`SDXWe81l|=2F!4oy{>1J5Cs#!lN0I8GL8uN_?5_)$x z!Cg6ed-E94!^t)9v&wz%6L{5qnwGxW@oAlHc+;^m>r`p*v^JQp7I8pQtl7|S2ZM#O z^7NHXO!R_3~6(w$rBe-f4J~Z_;pf8_N(b8%Vjd2gg+}EgCg*j zsQ4bpwEG#C=*UC%dbwRNqq%diz<5I58#WX`_d&UZFEY4nrfZMve6amPfPzC}iWQ7i zri~mPex8%#7)==pxYl7eaN%k1la4RHzAR0ac^n^ekNtkcT_sl_{qH?S#{lEd;3DYg zpNWD&@Ua+~Kxif4q3VTSiqi>UZx+TTX;OyteYC$30zij?3-07(S(Q}Z(j7@6R~Yl* z!@!dSiKj&Dp$$E5vYo5ZKwZ*-9DBSY*8Z505s7`)3QA|R?Elc}jqbF_T!_%d=hk{+>IJ%DA{hywd=rJcr6 zYS2W$b78WJicY}sq_f?Y5;?WZf>Jfr?|)Q!m`~ng*f=YRY`1=1zM+L`Y!gyi-irzR zE#1jpeNaRM)|C$d_RGgo2OY>=z%@852$|j<;sejRia1dZH1fai#+GF}hne_Q{o)Eb z>ht6Eu2~-u@HVcH`wh&>-N0SwYlqc0B&q6BNV~YGs|5V38_z$Vd5Of4qxb$%6OfMT z3X8axh$BKX_=QNKrzfb@ko~D1@crGUWV{i;WdpgifZlF{KA2Y~V-Uoe2RQp~CoUQ$ z*yB1uX}?4znsPo<6W#3* zVN&NK3pGNESNA5@7>hH!GwvWGU1sAA7>l-kq2cvc@VT9-dacrxnZ`TrwFdhk>M4g!(l54yf4pj%l5DaCF3-$WF*x4t+Tf%Ss+ zOas_AtKxa*O}~AAfXa2_2iCT1e$LV!7RG1bCWI_^%qNCI?N~m4{*ZInoanTaz&GO_35vo;u)CgqhTvp(Lzn zg79>u6wKivH`@#AkwOL43B^myMg1 z-<5j?hsw6l&~_mlJ&uPjkh<2*(cJdvjcZNZcEe1By%QH`kE&c?EIw}z1XJ-#ncs$h z+&h*w9-KiYXb;D2q~pEz)_gTf>lbt<#~s3kJ#HoKtiN)mayny{5PxBkdLKmUAU7EE z`CdK1MMH+9jeD>oEru|Ga&-3+@mdk>KT!vb=Dv1&&H#D*kUf@=JVXR~4Wi7}WT_w=_aGfLexeqyiE;sAG{a(UHmY@PJ!aHjGQTjM z2WFdhS?7M1SC{$JT}k>mb$-+JD+XJ%0`+de1G2rq8L8Q7~+m%iV2l3GK1ps}|)GZDj8 zd@~d&2(w7j)KX7m{IB61eKbqTx8ehD-mIySlasTG)ce6r7V;7xB-&+QR!^IVh^wRo zDxuoWEypl?h)z_z5 zUtj+Zkkc=y;tvA@+#&rVBPmG!19_@_6-%GMg{iGPHiOad*%3e_v}DFTvSsoY~RlwND<>+IRzj z2<-J*L|Ser?Id}Q3DJzX~C~+{n?vcp?o3mU+L3l zN52r1jK5Myu66c`oBYL8#lkS$932^C$Jh|(po?gpcyIWM9q)NZHp&+eojVIT;?K!#6`P@~`GA|0F&GZ$R9j_S*(q)Kf-3@j<%0 zybXnN2~NG}Wo~ag4|H{Q^wSBj-w^gdMGkqRdDNqV z>-2l~J`MbeZV7x^OWPSa-@~%<)63jH513t=>pNF}%;dB^H7+o`waE28x#Xy4!?gKE zotJuk*75$!?+5?v7EWWuM#=l(t9Pwb2CL5{msAgR9_V<221e5N>Ssiilq@0^iQO48K_6=BTZaaqB5z>s;3j8@G(6tjmbLKiJA>!_9s!KZY4|-Q zUa9H3*@ypvqy2&dcX%WIoZpCHwZbIrVaA+;qvK1Wc39yF%ROlFsy9AJ7? z%uFU=po`93OcJi@^|80L9<~}w;NUpCYZmf>K6IdDFsg+nmAeg#O|aQKvDC8N(q29B z@8F!%@N1cG&x^`1rqs5dqTjjWHLK3AI3HcA>wDdl@Y6uMgjbLpx*36$7*DWRMXVr= zKCHmmlaTzw_A9)~c7P#X&$mx>qvJbaZf`#FXRX*04sk&fsJOT)MgF+n;N-`hcMlix z|3`~h?zE`3)rA2a-Bxy~dZ-FlXT9%t9!9ivuL&*Jhl31#)&k`o#9}jA`$Etb+)TSH zh{iX*Fq~L!M<^`f>||LTR=rAd{Bux`zQWSpyLmLj+^4>|vGR?-t~68Jl5%uQrdI4g z%8k);`Ke`Kp}5Mluih(CR1JF|vHIk#z1f)bZd;Bp6N?VY)M|~p*O6-&w>7%G(tp+G zoC)zoTp@Q zsuwYJ{OejTrX-qpg1KCB&DFpXO9C>5^L{wDrD@F8MQQhygvRT&E8xMeKk0A4t}7R- z@GF*&@K$BRl_Cc19p4~RVz-MiNF1b_r#9y9B>|k{|D2N4T3YTS)s*-xIJXc>@{U$R zB_PW9>8Ye?8Ol8t&-`h0jZ#w@xwM%{yjY6^a`-PvL0d(+myMR(Ux>on%!uMH6-^d? zW9=5KHS{V*6=B8r(7_zPR|rt{+_+Dm+r zTqU0;1JQhv+2=ZG0v0$(T^G2_Gb1G%Ca`9=+YT5c{j&M;=oBoK{Z#a)835{Ux6vYarBmML1@fK5_3qMM= zY)72K#mWDX;-*&hQU%ZZI%9CCF5H645)X?_& ze>~vI12p+KFm30&qgn22LhLv!wfm8?8)WNY;aS^xi3e?09Lx79J64g$x@8r++1VhI zhdvUZX){}okBCg3Zg%Z+JM4*+ZBF5bx4fLjrFGtwOn=k!COGO$1Z)4T4;|;7GAtm> z8RuoAl*}Av3sVkQvV`Njlln6YhB#lBRX1mTLw;Vy`E;lbBxMmX^i z>-%9o5ua90YyUS8mR!R4%TK8;KL2(0vCheW-qor|oYRo>%NzD`m_yfPKX?^Tt^m`> zeP3ThMO?{aM^obJ%mf)%>p^+*es^~VVp0!{gtND!0|kwQqkFEIf4|(I@qGv9xzj#t zU4ow;>;S|#8AI4p=*+vAuj(m7#0s#SUvd89mIF?kubP^@zBio*!9?5U7Ghr%l;c&C z>n$#gK%n743a9r1Koq-x*K^X=`fh?aB_%|uZZ<}i#J5t2wBL!W&Om4ETRT3%?a@&D zp`GB(*EBc@V)X;K{Q+mIu=-&GZ4&`PmbFwXIeL=mjxC#i2FMHIp<%~iy+Q=Rk+=AykO* z%0|?i;P~L}t|SLCq~je)C?P3E0MczT4`APhA?v4li7M|Z9ofJ~TH7WG2;NH+UBiT` z+S;i%56OIQ@4ry`rmn?rlEsqA)R-yN3lyVf_^PaU6So~wAKt>~dqSA$D!1z?m8j=i zt7zyz+4i}9m;x!|!b!f>5^H$wFtdMWwr*NvzUjs&L*oGM3#7X>=haW>kLUW>?y2F3 zSp=;3_=3``)xun*f)`EXEzmytWAAg%;80v;%6sE3D7TImlCF!JSr_%vO=zj|CKGMU z)JA_DmwrBAN(wSO=&Clu$bw#@uO5#$;!(@*=ERt`FQF7eeMv{ejUeA=Do?jQ9%o13 z`Qm(V$isks$uq89X*oow8-mB$CSqR0pfb(pzVyJQz;b-HWPt-*Bds1vuDE^ zQ|CAu5{HZym6CNl2ai%3b)R1{xXbPS2rlEu_Vqzm)_V}X|G<>mG1hpa8#cvobeV{n ze>8xs9<$>eIt(pQ@=bRR4V8vwX2ws~#ZL7R|9^7p3qt+xjhc;fC=9CLVfZ)Kag``&C8{Tdu?`O!=AS6Wjb+^csL7Ix@2 z1^o);sm#w^YFfeGl?)}CtxEqXevRym@QVy0K_hwErC*OXM%%&bLN=@rN~kMF0rMB@ zKndcv?*-dB02K;z(Stz^Y!Lzi!?auYMEEecXD_7H8_P#1@dd&!_iTS?DP8is!C14* zb?9ftFtw=vy%z(8mm`(JgsaU9L*{!GLK}@9EUcsHzYF2QgjLb!y8d>hWsGsz>K+aq zpbF2hE4oNj*ng@<1J(a2Oc7CFOW@uvI_?Xd_c) zkl#;;LA7VPGk=eP1J7FmC7P|GyZ!BxX&Y@SJoc(8;}j3^%dn&$aaH%JWE-l#97I#A zz6nM~1JEzDkJb!qEesc>H9ZZqpWdwSRp?$RMj@L20jdj-upH-(BzV95RdZr1%6Q}q zstNA__ty^(D)*M8BZtV0?0>mE+c{lHieaFR{ZEDi z68;Bo*5g15HvHx3{2crcJzdZ<|H5yH{cqTgk^yrFcw^nV>z(@7f*YM51vs0j`j|de z<>#4&Ks+*hh@kBVrFVUJ>>sd*+>6*QxodbQCaxlOd4RI5P(2G$+gd%}tZOHx_t7X4mgkcZ zg7>;M?d1k`fFy+h8I(kP!0%T~K4svPqh0uRrFBn4=GuqZ#e=CKE_z3(;A!#j2n`h% zFA)k&+x^Yt{XF~In7)t}kiM+smx~J*@Aqm~+(ikLwl`NK$KAiLDD`2$aQf!?c!y`c z>p9oiPZs^JAJIu^LK3{vQ6e_G^%=Z}Q)%Eqrv@PPaiA4S3K(PEmD&y`uo%w+<2BM@ zB#BO>AP8B=JycAsH-usiNIRa?4u<~JYWTO}RI9x&AH5I|r-j({@!Qi#*H~zKpL7Fc z8GJw6$b*yx3}$~em>ipX<2toJY)9X^mDbLa;i$=ehYnYfa&*a()^X7_c8b&Y*uOU9 zgR1{y%}mIM$IkRhAo@a1)$UU$)(4QJfy~X!5XxLDqyp#svNbc3k0bjOU6(%mr_fP#SJNQ-pM(9IyC zbl1=!-ObQ*_kiE?JLh@M{r5h1o>7#U_kH(Xd&TFo_S$;~|Ie0Q4hX<#ubUQV+g0ih z%FE>st_U3NHV$RoajV&nW~&o6Bk2cwC)%w;aEo)cV=cdHu;%yk{Lsal5I+UnPodgz z{;&I%EDMeMrHQ|P1Uz;d@`v+xmjkw!a`H>9WXtn?^BX^ayrzDN^oPa|yYGqf!Ra1g zhWX0nF^hqRkjz4w-91&R%_YpXjfFQ*4*I{Ij>>XEvSrD&Y+f`{;!-~2Z8Lpblp z2;b2D|Cr9a5Wzc#UtoS*Plv)Lhb-vL>q`A{MCI*z)0CwstxO+`0n2(-XrOFFX7z~+ z3)t-&3y!|R&hcyOm43{>>dVVOKqr=%J#i8exH+BW@%L&qSYEp>j(rDiXu*=6ak~g? zH?#(FlhSUzkE=OR+K#B#>ZHjD!dKb!Wtcc`$X7X0(MjXq9IXD_#<{gXq2Z}D08m^; zc{X0=;)(}=`U*?D731@Yfr?XM&pNjxu#A?f1)XmH)dHM&0omwI!0dji8zx*ACtTei zDb9k|>a3UC*nw@O3Q;X{d)E_(;pGC=bnd_fo@1uv{n8??RCro9|BshC~D%Ec_?_nG{()?;}@x0=ScZxAS+3}{N0{KTh^g|Ct= zC6SNs>-`T(36LdZh}AoLEt>d6$VadkX##l&^LJ6_F|}5~+^csAso* z(Q)NiW#Gh^0TX1^{taYxLN&=Fzz;=pmzg8RrKo--d&qVNxM8!xLWkb8n3?Pv7kFTO{bzCted>8DlZ`y=tI>kdX}Sv=fR=gSsa!pD+ch;_=XNo(euO0?gN0{qaT zrW52-j2}BO^6~|fH+r@8j8;S*$@#{9El_J^+hTRNFJhK446+_%IwODDX*z<{tObC|DBG!%FlFk<$Tx|*qK7CXf4e3t>tVscM*Id=3 zI-nLy80K$HwQL>F@YesIP)mCon1MbUgp&=b_wa>Nak`x z#bYjnQP%_fR`~Q-{snoakGHP8F4qSWGX*@B{@n=b0mTG6u&%_I z%XYt}p!hvUM(8WmD5)&K#i3W&jS@D2^d0lihmsJNXe;wBfkq>MzCYRjmTKr!SWq;T z#H3ib+03SZKwjB#bPRC#vHQL-@Vv6YuYx`X1prdH0m6^lY+yw&_GYm0UTIlH$~+RU zJZri0(X+{B1R4y^OZ0CDNI_-pRt3ztN|t#pBi^x5?OFi-PF|4pkG5-R7r_$3KNhH{)MY_>DSssZC)PO z;c5G82r9P>Q;Q!hm5w|yG|Gw!4BSW7fJrNZZa8}$m4ULa` z=qyv5A-7&UNn6>SxJ}*iY-? z+q0xZB1a2o4;lKEZNKUnt(mRybXYzcMh%zcttWqn^^XQzKi$%hQx&y*?;_yNs`>`n zX5PW8v$lzbI%^_6UQ-uS8&Y4+Wdq_#%@F5qva3ewv9)|1pIG$ZxU)L0JY#*@=~Ji3 z5)C+E0G0PLt|xb85touOm2?GJKoPEmA(=78DhS|tiG+vZvSnZ;8=@9O3HPW9+|Zui z>_YUckDSY<$J&CRU{A8!zBdGVbPebdx*}~g%juaR?d4(`pW#Nb|79Qt$xjt}dG1e3 zfh|{4Q>nZYqv8{;^?qEj{^n@`pg(D@pkDAU7-&-(lOH58$U@DzrN?MuAkd=-?k|;H`1twzT&ZBr1zu*0XV1kHJZbZgeG!&2FW~E zy-j+&<~m!0fiyo!ZIFsdOWUf&BvymN#yfAom?m0VNk_|{#qy}U8>^bN%7niuW1S0^ zS3f$MXl3Js8x62A8?$T-nQe(-mHev34#0WhC^S^^#v>TJcnf<6fS19HCdi$7cj<_>sw%E}fs{>q{ef>(doI@0$0VHS{1y zHgwsz>0}q#%4;2qAIFzNwt`qD>lXXOQNi(NxVd#lv_~M6eqS=(UzZw_q zFJ4$^()qyIQ$3Z3gkL#tKHh={>ASD&H@}MBQ(x%UBJ)_~t>_R|DiGg)_udney~kgx z0!w)HNv802C7SMLEfcb|=m6`U0>N(Tp z+pfn(PJLFJzHiG^Zl+XzbLU6{vkYaO`a?c@*l@4~+X+*La6Av!QKKtLmnvV#W)|j|2F_;Ty8e8xIl?4~N9E}@GSX;CJuCGES?B31*cj@?GNJqI z?%*FGUy1db%NuVyx>$K^_sPEer`&kN%9r+@`cH69_bQRPtvY9+Ac&ViGxvnl)6uN5 zT-JO9L`1me`qwiP9CO{iJp+~#&L(ne?cJhNKS{z;=!A8GDzUBFgLM6(&ImBv2Q;1v z{+*1nM6$pa;a9md^V;YyyZ=38o;yM_; z_V!yGdg-Y-$mpZoBGUcIiv;}E;b83F`2yqI65aY_*dZie`thS?Q3Xf2guVYvti3-e zEwUk0Om!Mzz$G0LY2C+^V0`azHXpf|a#wM?%ys79>h4v@ zIb~>56aG_^q(EVy9&Zv2D*r2Ht#@&&0ut*wsL#-#g8V#crg%x9A-naE- zV?}qk5@Hy}G@vl(!!wMA@lMuZ{X06jJX3q5gdgDu9 z&-1E?RWULt3E9@xh7cgDd~e4y(*M!h0#J(9E>iekm)wlXX2Mw7J$L?~9K; z(OMH+2g+HLt=eo`&8+8AYJoj6vv8Yv<3~wJA1b|dU>t!{F|yGLwd~x;MY|^FDZjv@ zgU;Ee)(CoD)Jds@O^wE>b5hgOCHk1%U9Nb=A;Rmb$IoHWM!z?r1)o18$jP1o=?eGd zCF3{&6)6XY44ZCpt1d=p4RV$2zWOY?Ruoc_@II@0w{p*)8+RlkA_tuqDpezA6jkJ` zD!0NL7cDP#d>!$bq5O{5MgRX2)iCSW#?XcOPd}NA_r18pjuc9VbqUIB;oiTmRB)Cn zC-i*de9^Q*vfj{3G#umb( znC;eU!s#))Xjap45dv>9caUnU8}m&4KsZauBlC1V3Hj3*oi&vlU>qHpJyq06>N=lL zc!QQhFrn}|B|Sz%D{F0D9DeIsOCp>xC0)w&r5{x@bKuFz`fMV<^xmmCJKgWbRzb&f zpqR~|PQB%EGoRh}$nZycV2R}urXqwQE_9DnY1Li0_$vj^nva;~-anMMipW88hFDbUbu&Kb^ofH8qDVT7NbAy=uE{q#)b3YVaRW7@|{f^%&ue|ypbs}4*Fh>2zl3M8+mrEz5_!L3)cKi|Ij zUt_f6ew(h01>pIR1EdTvTSYREmJcY>nTJ*#I=?-fPQP#B_LtOgHi`?Vz329;11~Z29(szC zBfX8{|C)(+ND}xH(sJdpiPd=ST$tzu#dv6H&MD;+Nn zGy~kg<>+W~jl!^>?qXN#{`>2PGdY-&8p99S)w9`K8Vj$D{8%XEazie^$os$BD2=5v z&v_@QE`lC5Jb6WtmPRDH`H@l!#eUSJ|L5e;WvI7j#`D007j;?*iiFc&YeZS4NHfp< z4I&#YmaXfjZ7Jq$`Dz>uZ^e##TIiRsh_KjAN%phrXHI^9RtQ@)jtOs*kU+tq(eq2| zP21Zqw`xy9K-I1v<$V(YDgYOFN4?8drW=~WV%%RS7NH<@HJ-C0CBtV(xhAlNu4J_VHuPemZ^bunkBe@|LzsG#1=!@=g9#*YXTqmzi|KH=` z_46^K&ex&Fqb@o);SxJ|Q760}iTTVpBjjCq8+)krM}H%DgQV^|RYkpt_*pz@2#4R_W+}}@Oc#L!y(z~pBjHME^8HY`()^IY_xmSn?zs#71nyUzZQ$4y!KpZm51Wu zW$lxFy!c74XGlmBOdpe1tKnC21V(C-i4;xS<-Kg%Pka#CO1`7}A6~J30?2RG-xpxP zW{Z_co&c5T^YFaX7w<9>!(pqsLq&z8fFTs_E;CL5mm(t@W_jpz5M7)0GqwchFGlAR z7_kF+Li@qCO#6SxL7WC=e@zxQXfxNsZ;@wAHqy!??&0F&6Ij5E9n)OpGA_MM=;|7_12=-$%b$Saz>^y*uy9hMbJY@-ZJ)O2Av0)>? zB~qQ67f*fPcgB0Y>sswlnrL=@t5INgfW_XbPv|Sg!s+W*aY5yJ9A9hFQ;E`|P7);s zBD{o2VvmxYZ9?V?@0f~G3?mZ9PiqzK5S_K#?tStVk-Bd#Rr+OP>E%f$3jSjpSi&5# z!7^xaiySkbCLj)EN852G4UMdQ%A7~CjLm}P;qhweGT~J2?tv$DC0x%RQe3@Cv^sQ$ zgL%K1F)&6PirAiK29?t8H4g3Z%$OBN6IziM;l}cE;YWq? z{riW@T$Yw5UbzC?KaE^0_ zU?4a=utHJX08!MbnOUehu94HX;L>`JKg1dD0#yWi+Vak?4(Ys*Zn}=nIHV;bRzG}a z^PQ`6W47O2#wE(Kj&dcnLNnQL4lmR9C2#B`J>JZxw8+!s6qp0HLU67=Kdy+pAvpFK z+FRAipOnXeFE25Gw~4&=lc^Y!a`&s&)H8p`bxhThTD?`yL@4^dY1(izmY0q4JoV}y z^NO(2(Fq^p;Jz2BW02?nCy30mmE8fh%+-(e$oX=wtSyiz#y?rv{Bt$3I(v|Ho^`Es zBvnEJhYQ&`G~X^LQ+)ocDSj8PM%~#c3Nlnw+=vtwvMw~eQLFBrH66Jm3m>7mbC8>F zT5Ca>vvZ0<@5=dfnmxxQ?`Gy=95a2{AzgrpV)800qiYQau-ZX~pL{O~mCRK2d)rdAyU z<3E>*cJQN(OJqcTGoki5n-%kY%Gyl4JTl>wH101>+v8@x328{_$7h4()N-<67RxjD z#PmBTxJf+^W*w3|5y!SD=Jg0A05{}iBE6T?-LZT8707DU4#$qD?HWw~{@w_eag$xu zEMN&0a~)wrUEhK6RWmSfIXa};PaA6=3E=HG&{At7*iDu_Ry-E%V=C>34TBY^C+5Va z@EN7))EZg&O>)=c@5%w>+dx6Y;Vrj>RsuaV zE@Lfs%#eioWGQPzN%Hb?F$__W9B3hP3$7*~BaUGFBankzzzDQ7bp1Ybaok6?6jjubj$@*D$T zYEMJ7$EuH)j8K|Nhm$|`V!!#s_J|JsmwZ_0#l`n@`Xfv&V-zWCzWoRF2>R`x7XZv= zRX1P=q$Y{^a9NfFMTuA0FbANMK}nu8!rAR=d5Tv=;-H;U zTuMpLG?(*idD4yy-%h-B+Oh@&nyaoPW!wkxjFl6GcCnU;N^P#uzv_$`TU)5UN&zOf z%HY-_bGbKQ9$EptUm=2Pmref6XXcw?pF%S-K*l0H&ty3&@%gRuT;Xd5f6s(TuuWfu zO{HuHGKq}i*viGv!jdc-2{$kDgK5!59{b3KhsD#^Pph05xH7BjvY-X$Wfb9H=?l?a za(VxUjlLH^-iPYBf74UbFYkedR9?S76C4VUiIKf}<-5E!JGNLF!7XI%5OjN^KS61E zZdR*I*u+TIAPlH2aqoMn_#xxevb5AU-K@hSJC`rLrd$%&t0$mg=8+JkR^(_ZxCze#oA3Q9|Ju$5c*}!(*H~_nGh%Xe9FpAA z6=YAxsx)AoqJb4Nw=2v#BH4|6DNm2h{PtGDva^k4h*kz4)|;xWxSl>Qy;Z^0(=&6A zS1#LD`FpN9YRkdAk4Xv-e}PY!g)h5BZ+sOUlgM55@@f4Q@#^qe^@*5#WgAb0)CJ4- zh2mzpU2r$0&6ownfv>3|CZ*hIZ?|_ZY`5~e>dd`Wn`K9b1O}77J8$US9~YJwj(uwN zIpFWp(k%A3lV&}qu%t~B%-rJ8kT2INGL*FHPcW>-W=h7!N!v5c;csL(;PKoYUl%c~ z`QBBOmKG*6v$D?vbUl>teoP>;*+m@^h@~K8Lq5*)y#_Ah2h#}j?+a@E;(Ts?q{&$`V4&EunJ0+ zpR=}7O^`k-)3nx~ewMjEA{ktxSF^Sg??^Yox>x<(2eIF@i;JaNTW}hC1t29f8Ol{3 zW54|6=#g54Sm9}C*fKAyR#ww11cEy5^7!b-d$gOPRcrryS32X>z^oB0^WEn2M-zI` zuACpSC%UNlKbq@VRV#w~6{aMyXe0E+GcilZlfJ8Y!9=S1+$62KY&`oXA#KxLaz;<= z=@usxaaeo&60DdvzFWf_xcy%(z}*%*A(O)s2LQbgSsAk73ILVJ#Cc0Cs!3O0mjl?FN4v}!p41Zu%|7unxPI37h03ZZ!E=0$+i!OTH-)U?gpl47CiOKiDANR6Wj zfk&<2CM-x!8%(1AX_XTzl5=1#T&cL|Z`Y?T?#lt4Q>Bdy0gMwpZ{F?E-hpKUw8CkP zjf=MNQBLW7y!D=I5Y1IFV#E}Y_cqn`>c)YxnD=Q{nHm6N!#Ua5Ue-}5x?kCA5FK2w0 z>y~IoTe4=3_nOR)fj}>;$*wGG z(0wZjsD!3*Bs%p%2JLRed}x)JDt;;N-}SUrr6p{_kwWS7#}EUe|YDjI)S_&QZ>1GB9f$Gya7OumcwO$?>sT=CaWlJ(`DfD zl=GqD4B*^JIsF>lt6&1pq@B|JE_?WPB(udee|4xDo#RhD<^dyTbv$qVO2(0;D`V-Y zh>dR{af#f*Tv3aK)Y!MK6Y1vE48Z^N4v((?Syd`M7zz~SEIt@onSx7|CQa<2U5X<6 z$X>Q|8ufQOeQ)QbO?DNDO_z&GRVy?t-On}8tF2$pagQ0Q-j7HHzM9@IV|+!S4y}Rd z4ofe&Db9ZQee5LG%*J}D7ZH7nKYs*yK6<{TPkG;Rk`np#-Md5QYWO2ECQI2U<=Di; zV#gz`_%!$8mC27`q!B$SbnkB)sa77x12eS*t>ochRz_%1#2j`;-bBd z!;n#k#m-E3r2ncr11t;0{U&*I@7{IUmf-3Mue{p4v?LmP{WtACI6tr5)1(<{PH8?E zhNukP0~$dU^VezMt9vZ3F(e~Ie%si^j7?Vc2W!RubWi2dp6Y3X)M8#)>c>k20oTrP%!2?)<5X2hxDsfwXDOo!3A$Z$Q2Z_L)n*J7rK7Vch;vjtWC zioji|ic$cC^Fl6e8W*N{7(6v+oPij~)YnyNY88c9OEX*a5HTO-O9$t*ML+~k77KWa);`48cnlzi65vXL%q5}C(BX^Cq2pDa(Eg4<_JrXw0L$9VL)ene z^{1UQy%ye;1N24iScZ&u>y-{ zL5sX`ZB!zdZynHeBq$(#!4wJ5X2Nji>L=^%A`ovvB7H-1vC~)Wp|3!UmItxuEB&h` z0S)8~XE5mp&uo`s`?fe7qna&kd?8n#ZZ3(@oW)2ibyz|c+~e&T`-*5|-TsDXp3lQC z5L;=2Da;qhFy?7b;}=~)BS~MZl$sOM76pq!5A=%ypp-+Wtr@PRy7+RdaKlw|FfYp|3UeUM{x9ujxV0fhFo;pM6@ zlez>(?~QSvR<*EfS*M2#I-xpq%m_#6fPU`uX2IU>Zf|eTl^vc2_uiG33ZM=OhUR^q z(f8_@7#YK4sA28&>8M7E00qk4)Q}|#O|@Ef2cao%T|s*=%aKIk-oTuo;(e!q0)kci z_MpG%_@()#u&TyfT=W!fzeCfm4P5b4d?aHDzu)qfD5q4Zt;j0EMH-rz#9v!4e&67Y z!cy~?&%gA1aVRhT1-vl}ui-4pSOHZil@&N$08#vQ*3}uUqr?Ib5g?mDwYn$-9g8z8 z0=(W`wq}Fmj7Ao02}}cwO%~Hp=q|hScaM?I@{*6)FG-vR-VZ{{g%|#H;; zJ)V8sv?;wDw(Dx{w#Ag@!NtSnTzzW4>4^(j?15R}ZbE|FPJgN*bQKtf`ynH{@hH(b zA=#elQC6NuCSS%UFdj48ARb%%^34iQ4gO=v`*6BXv$F_LW~uZLEdab|VlkX~!&v`p z@2K!!4AZhFjG38${)!h(@5-I3dV_`m1r58+U+d)vDCR5X*rjG=a2b78e(A|pzHsCK zIWvg$AknZUynZ0F2#@2^#N<}IRNX)AxF>R!ashmSa1ND<8!Ru^wCIV6xru|dCp`A4 z4LexfGRQf!$D>Zc>1a-_&j()i<#-xqeEFroofDJ!r2z_zIcR-YPexX?1SgCcuAT0++>Ay;{4hT+X1XFh62blk}T;CGkIJjntVA# zgLtQje|+IEZXvZksHsC?Q%AX|>`!Fot6smoR6?=S5}I#E;pZ`>wqXJ(@GBE3i4TcD^(@I;q5{Ng4dJZ+*MM8&9jp6f4G9Vuj zom-Rl2lJbJC;Pic=dGjh>SGI05GJIL%XSEASO8qdbpo1u45_lDqd|h)DX!pS%*E%t z?@oztV$+An7+X3!wMxr4og91)(PDxULtha*QWGsi>Ndy_UhHE*$6}_6G&!A zOw|2vGZF)A^Y7O{*&Lok=)E|XK#t~`hNXLWmE8;#=$(ZH==oEMLm%D5>oS_VUMp#K z9o2;ULfwD$U(Q$0zhkL<2_Mhn3s9ni6a@NH>QmgTf#1Jst5)mK0-^KFJ6H+aYr~7ml-!a(_Oa|>F-LZ+n%`2Q|1o5aE-Z~$Z z?t*|^0ZHiGwHnE7e`uYI#90G;?~uNHfcNt@Fd>fSqeM_)v1B{4rtxM~3|z2~dQGVm z#lI&N{7U%G64QB$-xA_3P&MjR?B-PRRK{gIZxF*RUHkdK6>saieuu?IJBOs3B;~wt z4eYl+zo_d%pah=p?EhOm%d?!63*L!i`IhR~W+6fEO2y7Z_RylF{;+>n%i`Sh9sky~ z0i686pK#8>?@A~S=m;wx4k)ld{YdyUGk+xiUiv1{C-?c4)(_T~vTLMiztdSc-Q!Vw zlh-O%3pAzAt^McI??9IEUq|<}aZ|dW8l&_-DdX+P@#XyU3AktDs`Jj<0REOB{Q<7~ z@Acy*ki&nkn|@5dAOCy#GQIehvcvu2#Y52b|6UJ>RKR%u_fq);6y(3xC&~X0A4PKB zNlh+EX9x9~N1m$|6sLyZJ%rD|MR`M;`uYNyf>xSo{iKCcJR6W zA55#xUM5Hd%BVK6YIOtQsPdY!DL09ZF1^5g`DXuFYi>q;QBE-2gRpvz3FV~V34AG9 zu^*})>3{NL<$ML4Ce(*-@^nv?HPZ=MTNHPuTXah|dTASt+5_r9SgDKQ3G1#PJdGsr$HZrvFPaA@I2kJ>k#FaC)6tOwrYEAJ{de^Hq#>J z6IGi$1zYns>JwZVvI-1%7JQ*)f1bO-) zZL_t0c5rI+V!e2?nfypCXOx)dj2LKaq3zp~(SnMpq&2qk+h$ZPkg}}RlLqA1@J^vE zZ!?R1v%g0@?+8;3#LZ7ArzZr|M|sV*KcXK~*+T`SEgSh`cETc`>^5P>E~b6SNln%1 z(F!f|_KJLRoTe#_8))Cme?kp4e^w<6I`VhvdCESEmcR5}K3OqVef(jO9@$V?UQ6AO zYi$s_%u73DALZF#L&=?LoL>)Lt)PxSbEm4v->4bK1Pov-AwT=A!&@L?k=%UzB63|Z zlqk4=f^^8rGC%Y2wc}aURsYf?mA&h#i;j7rY*vYNP>v`O@3+>^&0x$YTXxYGCWH^? zyIMEqDR6D8BsmKT9-?$z(gVJ6=B3yJgkM6OPhrs(&2_kM^5KB=F}3!iG;cl16NT~x z%7WC{os<~#N7zwo+V$Shc(LM~!gQ$p%v&+D%062!0j*33i%0J6t)S;Vo|(RumcOBO zV?8rUH=^F<&DvNl^p&xj%(UI@G$Hm-;=i?DFoOSN|1U^9oE{HZDPYn92G z_3(SugS>*>gUcsD$M@XDgjc(m>X;gm&Wf-XgwCA?lHB2XR9?Z2NW!YSrC9faRs+|( zw&h>7&v#eBjJWJ6EOgQ#-9O`wHOqt{y3AD91WNZ<#XE*`m6H!AH%f=BWneC~4m0x~D&pBPgelBW*k z5xGmKh?q60!E(4UjRnRQksmDmwXsR1ofu<0) z{n}7_izZlLzm#sBPI+u=w_tSs)6oqwuh~|TvH609Hle_d15}w zu+il-{XM60_nw&f1W9PF)vN6+bv4s6Sl+if)xYM_a3uWdp75)Om@EH9cN`+clTg3X zpmND2tH-QV;d(CPyg(SEtp^VWRp7v@0w z_P~An)`ti&3#>llNzH0^*xJfKQ0KS0o{g%JPOX~9)zon#F8<%@rsmDJi<1iA#0{Or z|Ez*+;NNrUi?xv~RiOJerw8_O$JVl!)& zXK5S7$;^(wOx|oSzF^4u`-)kZ-wPWdZtZT>Yac$t`qoo@GBNH(p2@c-8nNLMTY`;q za*J9Ykx&%daa)RV7q`m0eVzZps^mGeyBXN5_TWH|^qoQ0&d|nsI@c3EEz0AZ0n9F0 z@u`nmMHGl5oszq%-9`b3-UJen2f^i>)uQ{Q&tHCbO)7rl;Z~gB`MTVy6_vc}_;D$1 zxKkHZw#+!?3{Ivm^Rdc=44-nUwpS?XA6GaNA6kBg#M?vRy|m{mu?b4%f?_yN~}8b`!pTd~mD^vDkLIS%AjkcRQ5V`YL*Y8lNA)EsE^AA7d5 zG+zTO>Jx|m^SXTsBKo#y@m;x?y%#xma6v#K>D;%V8t>AUQF&kK4AQxu zgLj=zFPV&hUOHX>(p%hxUGwKKbp5LPEE`O&<3p=4Qimbt0uXFXbhn$tp6Q>8q7~U@ zJEnW$&);Nj}tk}Ij2U4+%R7vOr{fq6bDqr3NK**q0cxg%sU zAjKnd)d790LGUIGui7ul!Tt8WVy$x{DTChVtnR9p z!HdMk!*_B+aB&Au1(!7fKfvd7fU`P8i{cnpIO$f4MlleSI?!@bhZ|)=ASVTg6z^Hf zUNF$9XMC$c1E`I&7LsW;Pkp|17v3*!&v5o5fcxsrl79fOGWJ$BylN62>HG6)8(=G= zgg)SM+yB!Pg*yH+C=e3u;CJ-Yw&+y<2Fa_ldz_PA4V*)#9~_f?h8xC}MkP2MjOF@QVjQcv%{YK9v+3YXa)aGAII1wcZ}p)JQKQmd~n z5I`M9L@7Lbz6b&SD7o8{t%zX6lmx4e?r-e*bEkgD5fV$!qS}Sa)9#6v=1yzLG@Hp@ z7fyRMFEj4l=#nVbpy7qAPTb_8&N|nA-^^d1BtRoxTsQl(?a;Tg8TOL7s6H)*TchQh zt#d3wrHc6&gw8^8r?qc`w*pFgQT>~z?VWm6$=gFDaIb+{WD;u{WbhRA$*!-O&TMx3 zzqnI>ep`aLbz;FEc`_St`F`5sTBX^RXe8@+Bv(;A9exnp1zMV@zWq7!32$|RSG3dj z54^L5)kMfUa2QU;{ddvq@S#RLQCrd`PnvwSZ zS90r%ofto9l=7Z%r-6CT!&fP$X}{FDti1m2pU&{`(zA=CJwb9ddSijPn%8+o;uQ^w z8Li3V`>ofChb@anGWGIwHCgCgdv>E`JWpkl?mD*Pi{nuKbVs=1 z^#@qnm-zrQ6sD1Jn&JcONchwPgiNc3vKPB3q=QI zm7x8~yp_I~)v3EI=2q71*P$CQ!@240WC+I_{JN!f?j$v@G0Vnzh!JFdJSNoqD0avc z(TNqKnF)n{h0X9hdjN(CPvBGcckpB5qr{4t4cu2%)k*t@fSmnQUdt#6(bc|+TaT4H(eD!chAwX^q@+K@NC$$vW*5eWk;(6tQM;-$ zpSJPEcjT;enfQ;kGGU(3gR$-EtE&!I7aOo)QV>>=+C%$05*o#FaWJ+Mi|OCn>!Rj~ z@Km>qKF{h6{rqRyuoqha^I9WKe!ohzR^k2~qdHaVDV8_lxkm1D#!RYm(MfoQyb{T9 ztEsE&Lrku-a$@8a2o4NeD5q@RX8ti;9Pad_APASl4KvOPE7GslhlzVn(XJ@DUX71jbrklMK z6;^QO)|ZBa?TOs9?V;O-mAE52>9^0%H81w&r&*tNvz?8HtAaM^|fC-e%pg&5}VSQ8#TdVUyRa?&eEBFzr>F zT~6DpR1r_$*Or2Z&DAT*Ocz46{CYntJc!Urpz81RNGlMKzvI`7F{fL@g#11wOk2hW zI1L?eiKk5plGbRa4L)K&IgvGMU#3JqI#8U^GP+Bbd zcbXSvq^^8Eyxn^f$Y`}kA3@x}b42kJ*849F>7G8#w$JEdDo>lY|0Jt!OYO*4iFQBH}Bg!nlphL z<OCI+MO~S7su4W<((MIunnYw$yuXjj)*xpVUR|^+sD8MYXL5!8sZ^nYzol>SZ_Bu9QD*ARcSjZB$cMtcL0n9%BnO+D z=1{~^c`DG|HE{Ot8Vu@Vx~_Ac8U67|VngCh>C+OMcsK;&`in#wYK?d}yCMRkA(HRH zqe(B&4!cM6H~a=jI*@MVylsg?3>>QdCSEmoLYFUA{HS`&MSqT?7OYm-jd=~z;)z)E z>PvEj%W?#}+?pm*Cj|2Osq%?KfKhzMbyR!&oNmcpH4y_zxSu`sUMhmuCB9U&&TD|+ zUvIbYxQV0|Ff&)kZt5EG0KYXt$&WIod>V=)3Nk$XL$~%iUEKDC?gzt0XrQLjpeQxFPf?nf`CnY922bw2~ZXoG8*c(JJ$0 z7aXRhN*!Yv7ia$LEOqRWK^du`YxCYW@s@$2H)E9IcxT`Byu!By#34w#`iv9rbG(oI zWQ1e}2k=a1GnHtvF|O%@VNCW+YAiC9ees>05!G;T_szBl+GSC@c2#w>{)~&^v^`j5 zmE{pH=Mj<`HM0jd)Y^Kz2LQGnrBnhS?$mHS#^lK0Fq7aEFzom6!(#1)#g zctv1+4Re1MpJa?Px*+6BmO9U`eO&xfR*v{sZASv|lG+!Lv|Q1`xSepjpt%(){A?Y; z?D9#}|4$H*S(mc7PlFUWO1OMYa}Fnq{j@hu8kxD=_aswIp5GXqI8j63;~I{ zmvk@R6$ZSe7a4kGd@@uRK@B>L*~N`^)hOui(!Qm zZRuo+YvIfRH5B63)3MWW^?l7N;y^s2L5g5vq6+nD2m@X{n;|9Rnrg=2J%0bi0sxS~ z%g1UMfw9kFu=#Sl@-l~!nC!@9*^!SFMI^?DMD%Wb5C<;&Rc z=~w}|?ahyoa8;ZcyV?v6YT{L*S&QojvV{c1pV*g0xi34yfKS_IhM`KGA%OY$c2c3C zYb`v~voh3|(kh1@Gh5JA$7p?xzOiezWv4l=YAS)uIXr*Wb(Y2X6S9YpUWmB@K+0eA z1__Yz^w9IFiE{a@b8Fke$lH+l^rq=M`AL%*T}O09X1&@Ag5fX6z4`yeKjq_Qg~^?@+#QibvgRO=ZsIW+$p{0}aIlSkbWz1ajFu zheb>k$po9*JW=9(J;(KtrUustm@TXxqn1wn=)QsYveoQCXSjL|JhW=@)xnKXeVdm$7aSe8 zMyqR2))=RHSG(&e)JR@Z4G$CL#TA!Xdo(3KHddbv`&I;$@ENlbKaB|B;|Ad%@>`Yk znI_!qB)%KQ)Ghn_=f{)A17FE==z`9mnrU~imo249E0f+nwluIK{{0t5ry@yz8M$bz zN3}b$ch*zz>HN7~dzDX*zD8pXh&8TOGLlprmv8R=^3{iVJsSEwtW_@uBXRaelF#!x z9m$pZp*!NL8UuS#4x2!yZPxu*hq@lKztfy!*PfyPhzh3-_&*oQy>`IrU7F{UWme~x z30tkOTUpo=Eg@`ZWlihglU)M?S*0{d- zUY>XHa1A_Xz!H#bX6n@HmWcnco74Bhwg9%hxu1XnHAAIFDyle}OhG3b&L#VwcqSaj zI~jF46rok`%!nmDtRi!wSba4`6KOk`-mCG< zEarIOkvU9ScNW}tmM@P_p~(Ga{<}|B)59a$q~U2XrF(bcXf1R^Wt}~zs-*QDu3w>& zW@6KJHFUf)803cdlC~on3pOxwG(gl#hehBb^&6`66pDG`sehrXPs7cWU^7#faAHg=LzJVaAvam1zr9D7za3pc*dgC zEPOWGEVy`@^zZLoq3~Mn_4=}2J|n4tZ|N+VejK(tZv4E9`b23u@3Y zF}<-)tBjW`(IW^%k;}2b!sok=Op=EUZ<~LuyvGUw-L6rSJrx*r-%qXI(=Rz`ldt8X z1~k!Htg#M?y7mqmYrryI1ZX^CrUww3vE&=a_5AbY-Zmj@URHL&Cf~|;if7&Y@vh^s zvsaGKWe@1wk~}v79~||3Y-%Kg?OF0`vC?tfdd#L|AY<)#Y?QEKl_Zpc&XuUQ1TI!nwT;Y{(`|K<6(M6 zhNKE<2cdkx{#2TC(dt65KTes&BuJNU^6cgu8qN-!X^eni88*$!7IpC+N2dBRnkW0p zRqit6(uaBbDCz`ZrK0!@#(j@<{W;jY;>PNKkiLgSF_Fj_w<%-d<=%|KoyPxC`*%Yq zH4|2rt5c{iUum$71PiVrJ+nd4Hdu~I+VavsV=67vrH6mF;WezCsZ%_eUZuA4p*&jV zJOf{Mx_6f+&-8saLW{SQjREr<4J@_Cr$$9qy&E%GFmyy~0UtBJ=M!Q6tK%0iSikPg z+5q`UPVMW~8k|DmE$c>^n22&$MX&-1>`MH!*+ckB8dNE(8tTOp0!NXZujROac?`XB zwj_O&QFSoDAwgM)*1&z_Btq#zcfzmxN^_0Q2oitkfyR}%UM7C0u3wF8tKu)U9xc|hq; zP+5f^Oaq9kzqGZ#@oei6#+e_lZ01(ReQX#YiuQC!0+jQ-Wy3}UN}UKM$n^Q4bQs3f zbt6~)7fKMQe*WJuzLN0r5e26q4?7SjxTxL}w>$hv{@$N`bSCmR4K7uK zoMq(2o&E;|QaIRS))#ZUxO5P3pax?A!@k-Fm5>0HP-=gA@%r4(mhl)T3dao*L&xp5 z31u}YlhhyW_(koyBmHNgGM^)2{D#_*UE)Gb00{+&D-P zumzI)yUh~5A3Aa*uFpWvS3pu4Z4X1V`Px0|@TGouMSgxFHnsNelQckUwRWsCj8U$) zl)oe|Pc2ukGq;P5WN4J$hxE@?G)@5eyPW`WNxvDBq)zutIRw?)p4RnAS?MBP8(E-KM!Nx(sQgJ~OwCT%VWv=A^#B(WzW@9 zD>Zu9)6?S;A9PPAhX3kmR*+h%1_uYLVRSInboWfBX(NnQvv+gT*3i&k4OMoD52m%& zjsZ_!seC%4)};8~z#KDoXmgsMcc|)Wyucc}_Ja%_GjT9@oW7auar?UzDZw z2)OX-uJXl;7abfNE-;yesc|!@WnU{PEYyJd{r&(%dExCLv7G-%3&v{ZoO#_Mpz25{ zIG=2st;WZsX4CVz(s9_q&h7z~O3f=M(7SgpR76DN?DXb;-MW-T&^ln*!25M>Ao*0H zVB@{lCz*Z@5eRu$tO4j4-Zst6&8>q#V3a>Zi+06KE*tWfd;}Tj-y;?SG!H1-D3-P? zNSIi|Wbp%m3ADMd4Ui-aG)p}fCx;-K`gdcwtd#NeOQ?o!3%5~HxM}a&s9z&?z-stIA;t7rT>;d)>HJ`+LNx!5q z*Rj)dI16fNB=qrKzI^@y_s!F&wV`KF4^3{ylqrhnuXHmK_@Ftz}h$q_N@2mS3 z=x2mH`p&u5KQCUX9-OXe8FVJF6Y=kvbFI$Fw_s?zJ;Lcfw{-O9^vnMHp`Iq2H~imm z9ArG)2>-by$%wPOAKQJ0_vW}w+TV|bIYqMj!EGxCHax;kox!^Q-tXLeSPwEfB5xs8 zECx0}FP3}Ahfe2`B3>&<%zd}QBjX?He^dopG(-iYq@ZJD-|M1uu#3-^bv zCj6OFdb;r2s?JGV`;YHNFTd;6khVkiCwK;#XMEwXZqwiu<#*(dfGoJU4Q(^=E>ACNC zx%2M*m)y%4E02b7S$TAP?hC>CAm@dGbQOeanNl zeue#vhqw=Ol`;C)8A<7>e7l~Wd1=&g9aJ4lXaogt`SO_Ee~ONGXYSzaSmdHT26jL)t*sNSSB z5x=QoO#A5C>#ur2*!RH+Z0s|eg2E}rd8 z5bF`k%$A3ylLQKSZq$JOf{l#dTnIJABf%zr4(EUQ7ODP}Nj4rM4-5$J{#2h5E~LAj zV)=+R>}EdjAis=cul+&8CMv}-WXb@AHMk+5U{?RJ^-(Q#O~TUb-L-FeKS6}3UbqE>6LAh6S~8}QUVX6A3BMKK-!6cuF_z~*po)zvI+H$@ zA(-1M0)%zzf>79jMGd?_C(MM|Y@f?~GrQZ#(WW5}!Fn5}aIiip4|1N&B!*?)Cqe9$ zL#nE-SbLeS`87qJ3EmVEioOS~&D1~w35P3nLm4v7SmfdC47wpk`k3vwf6wra?O@SZ z(9d~u%dxWM`TIV5Mi|_*ypJ(5Z;t;=EAB!OMN1I!R;v{7))2y5Oj&R_LtBMa6S{f%C}Gz`|2)9Xo6v50V_7s(m`;q-?ih5};NLsMe+Zg18} zW|Ye0$9`ZypOAB@mP!#OeZYCtkqOor-R+H30g&0SxJ>&u+xQy4cl|e^oQH>puoB{G z!qw#`>3-~KN(Ju#F!Z$l?(3aQLNn=LRAD-p%Yk6Krkjwp{htumd!tPy_5 zteCkH5A4~*+HSQ07x(Z&w&(>?JqW$I>hV(myKl7Utff#`l9g=dX)Y|XVe@>0Eh#H4 z>sX}0*vNtUp(Zm@a)me-7Y++=YWb?Ppcx-r;iKcVln^f=zW#!!Z65`zZRmj^jcew93iQ3nWHBf3I+!eUCMF@5$fV`Btoz4M;~k zKPiovDjWgZINgQ*ELrdAX%B?rfi22m__8sb%}R&8?7pucr}GPEud5}BgIlJYCsPbW zx~pZzh3m)cMsZbQZeHk@A)|t;Yb!}CR(csF#A`aB@zh+FP+N+g<-zaC+y=AAoN8A? z2r^Y5$3X;As6WE3TVxR1i8~&KZK`OF5#9T5TrwU(!7OwWt&gJ=?=*#v3Z#|Z>j?@I zQG+j6hMs(m7_{0d)S}r1y>5OX=+|^Y+0C`&;z5+JaAqt1Z|dNQ-qBz4rVTM`2#T6A zcbjFqZG{@*ZkvitkBs3%%9j$%NVMGX6@UU9qQ8C_PWH8i&>B;9@nX}%Q>rs<<2Z81 zCeX`eSp0v_NE!5Tcj*?ztM2z$H6w`_W(X6L-Pd4Vh8DGTtP0X!kCJU3g`$L_y955i zN0MT5y_(C8rl5>ATwpRi8^z))-rlg8JhyR%zhygsWcK8=&1*g`c`HTyr*sDo{% z3UvCN6F~v%kJ(JUD6MyjZLTkI5bTH~n+V!%{K>IA9andp2=&+y|A7q-E1kAGymyrZ z!_ND-H%xXUsXTZ2b4hgf${A-u{T!HhRjC@_c67egd1YP#f}Rb!x1<*I!5#5;py=*H z#@F>m{Jn#5lb`?pqF)dH;}wVWc)tHU41>pw{wL`$m_u&#|4aGgYL4PFI|U#&J6k!P zg>Tbh{<08<>t`s(xQ?ZL-FV$?nbYq&zZkZ6VZHO?zsZ02$F19)_A*c02)v3)0t3&e zcRM6Feo4b-yZ^9Ay|twNqhNjL|GH=#>ZnRMZ}yideaQ@<1lRK4kPuDZTag9%t7h%p z%VBx$X@2;tIfXT#V%IeHH#9|J`bUn&TL@#;onwIMrs+%vy-UL4sX8ZF5mVlUlS445# z5>IiLlagSFB*wP(tP8l|X~Y3~QH;KVj1u@>?_3AFqUr}k8$)Zr%37#p!kBnEQfuFp zWn9v>+47pFzwp@q8SaDdawIXWFE9xClfzu+d!t?D(|%nwyRD%{pF?sWbT*JsW5IW> zf;`iCTp4Lq8qoPdgM<-9`Q9T(?2D^X9|?G#7Y>Zg>E{NQY*RRvtuj|X4(IFOblmPW03ijSnvnz{wdia=Tpus1 z+8-k4yfRv9bGA~s6}rUpYkBDNBz*^iO6{ZCm+orlJVTY5Zjq-`dqya$stEXi+84Ww z{79hH-LmB>nnVl+w3T9f(oQV3&?d8I0UXFbT(!kXoH-oaq<51oB|U$*Y4q}JoHdZj zbaUf<7$G?@7#rK)sIw%rb14Kw66~j+XY0keZ0@@BKC#~~yv{IcdWJ0dI8W1Lq(;Ce zwF@o><)s!u`C86>OtO*e6~i4=)(ppQVmf_20r=_I>G`maQ=+4 zeZa881i5lfqbnG9M~+(f!TmRATJP#yOCdnHc*w_b5x@8bsvAymO3m?Mzh5@6?WCl6 z*sza-(RR`RsJZzJTJ2fx?{plyH?YJicM7S1wOsGk5?^e}wc5$M&nBK2y0bbY_wHpF z=|cx-OO*V0JfvNq!lK}ilf$(Uj|=AxUcB%5qo^%e+Vk>jgR*z;7`_^+2PuV?8-uiG z-h@q!61TZN)K&EBK6xcmDm%=G28UB7J!y-z5?n*>2al2ixcjFdA`_?zHB@3upbXfU zcPccbGeq=S_O5jhk9@HYTduIJw>sz}y#9Ahu_z~@r?fR^fD?>KZ4anFdYt8?!;cs> zY!iVW-&!t2gGhE~obX{HtE+qd?=z{V-n+YUqI`~fEyMzPRJp*sryX zgT7qM$4D=qM+=5G0=Bxdq*iCOIX{?Uy%;7+@1{OZ1W;&1j+vJWFoaklB>(4E^AxPR ziL2#w0YXfDI?r;TP@p;2uSOB})x_P=@8aFMI2r6`#*^RTz6~ zJkSrC(yy}qtZ=XGLE?^*107cj$T7*XoDTiAAX3(EBPA4npKB=Vi~fB8WMSK{Tx9ja zr&vzmqgt+p^8&Itv5N4cyjyQ&3EQ}6wjKWCR|S^`^*^nM*9v42){>83q~z}q9|R{J zJOWgkQE8_x4eeF;bha|H3H+mOD7LRXwxb=`1f&+-yo#mR0T$qxyGxHl5UjGr)Ez^j zX@zxzNvFkyyJcn$9WUbVB<*iC(ny7XgJSwdshlo!{R!l!xKyIMm5XEJ6sNL$$tQY_ zK?-XobqzV$8UkG#$1SKMTwjK{9>ThP_>@W9GfvR8y~+-9^uwPSD)^{o@D&6t8TEDIfYYf)VOTlU{QRT;nls1W|_G zZiLk<Pd3`U@mk#va~FM1Bg}IcV7Swwn`p69g=rG>y6NuGu1UXqjxInl5JQ~4+e~LrGR_pX%e#kp%9Yh1 z0B7P63>%`*V>5~``TB86kRD)&7MKMFH{X68T`ShdfW&9CeRQ}jy&izxu=BrTaq0TK+o|Afn!*$0n<3mR$Qu z$PNKSxWWq1LTaY?DAzKF5gwC{ms>Lqvqnw_dA1Dm6itRkKLKg$MG5#^pr&!PM9N&U z$-K}`QaUFqU3C;zYUp&HB_9u`U3%HS1v5VO$RQ=KAQ^5qVgcQdo^{T{5+ar%m)nj$ zZ_~E7X>Vxu^?b+DmoM9mOslEn(;b}Pg%;uS*t$#*TK*KVT-tR@?wcxba(@*GhHyO2 zKGPdLEbqksQtdTn?xZU_&!lRTol}T@UyyL21yOgdM+N6)a!SF~#bA2otnbTY85_LQ zO6-nth+PPJNlhj3Ly;kH+5E&Rk>+M-6|nbsEVwbs73eYcChTexV4xon+V>+ArSxE= zF$tqG80-4RA+t}`(5A3YWM#t|p6n{TGL)k3(tRTs{%lulLF8vR(uzWB3o2Y$z*(Iu zf6DX35N*M-k9K-t)gy-13vIvq#;g1Oolb4_=s5rm-t{E7Mk-kub_g;G72yr`Z3~W3 z^gn2NE!x>n7~S)t8mJukW-M4%m<}#1DAlbMA3=PAmB+r$B`JhSezpxpl5()tRags` z_rGK}Mwme8(nu~G3>YYkm&~_Q>rCb?@Nz_1c6?v}bVo#$MFx6k*{UckOEBDDoFJ;W z%kj*QlwJdp9z5PiZ}*-x!qT~pX&4`13+Ch!~ltDR%#wzJV!kRIa1zN z$9G{4-s5Z@>cpkBto@Q|C!P98arFt5M@xtHaqRls!RE7+XgNuRvS0pA%6Iw48c55~cR z!&25c+85c>cFq+cYVd2bA$$X@X1JoJ1D_0dg^O@j*iC?(Yw4nlmhb+BZEMC_r_8DU z6Cu3ee#QFpXO5R(eAjwS8>c_zi9EixVc9D9Dhn`&80Haz8+by}2%6)-RS&lIyvBUN z?3JYrb2iQFy1$D)I#bejFPupwtC^<0s)EeNO2AtJQ3R{!?)tlK@Rt29JDXJjwQlF? zmeX~h;JNyOL7os10>)=!3K9dVOA&FIiVVt_$lFi`p-O=LBEFDa#~ySd=5&NpG6?<+ z!*Rnge%0IP?Q&5F@{_@X4b1xdjShoNvu^O-UZJWKl^OJMQBKgc&aRPN?%NT&(L)fR zI2lnEnN+p1AF8$=LU`ZV`3a5(KQx>$$)GrADpZI4CLw=O*Lq)xavp?~D(2>&d)_}n zEYA6%9sp-1xUqajZ1k?D-$eJIzmh;1-s@MFkUdA;!_;mUxPBiuhO$EpYsQ%ysB+3D zYBrf{6pLsj@D0@k>XrgoO8+ETz?0jZr)4ou360^l#BhJ`_+jW1!e!)I_>OgJ3KfQS zv+=egdw53IPATNlTiQq1%dEda4km1?JAJNTUDvw|rbYHtzEW~IVW_EUFH+M5$W(IQ zNJ&649@ZHXH^zDM!ey;XpG8g_VVNUBj6X1-r?QaM=|PjcAX?%5zCVx5MCO>2kaGlf zqGZ|1AXTY)GZp|N4nMNBw;-{F)fa>^Yi}63-8gU#Fzy=@mCjfCarZL_wzjIo;Rk>F z{IL9i$~Iw4O*8%sS+7qaEP8j&_7+S*7?)AqpaT77Q*zZ_r#VFvN)(2lv5B)I5Fw&N z3!!%wMlkDv#HufgJ+G{b9Yfc^5PPuWL^M@ER?|GLYKPV%ot`Q5_*$5ZtYfLi2%O(1?JrEFd(t#gCzLQP_-P&v6IIIdmDcebXo?V!%Xc%9A z9P;c-#}IiJ9-pNUo;^vzrcH@;<@iqtoV2J+j90SPF@Cw`zlgvL&CFOX#qQ({i$m2u zRl7pB9%A`lM~^`b%Bo0_oqv+7P_G<@POTULA{6msVp&br-hRtd)pq4$xuZ97J-9Oo z9{UMCb+byF{g5mqirm_rRp(O^;#=5{XQR>@rbRENnJaBBaj8N_5)$kJL$pAQ?X zqs^q_^$ExLM^j1f9`)lsoV&JH+3Z8x&Usjl91)I|_p#2b=8xzHUvbcZ7M`S#>f314 z#EC4<>jk1bxk_oNa*6%mfNLDH^80n6ScCr1a2?sV|=X!(GXCa!uk+!bDMFIXvVmdAS*9lSBy zSWX#^q)+W%mN+^ec| z$16E&jNWgo65-yCtNdK&h;>w6{3_P`;o*DC{q1;rs84*x?t?;pf3Gj_-2L^RuBiJg z^@;v}f{&=1+H`gyY8%wba|RTux={2|5zi)ajH3cmDo8;%ZdhnwazDrV;yT6%bJ%>V;}=^n(+B&CXHA5k1Q$d_M(1_qez>nA zga03q12NPkbU6Q5(_W3ai?F=JboJ8+jJUeo`VybN$|!ueU@h-J&Xwpz@WZF29XxN$ z2KOrTavyGt@vXDe%qi#4x1b$}4E5jZ4zs1)1LW=5Aj{SQY3q)E)|-LPBikRcaJ<_h zRM=BHYaa-*q-gb|LFD_SG?e{udY@4_7jdrtU-w;9%<`rxx@8*x1ka4K)FFw<8lLx% zD;aQnC?yy{Z8C6(rH{2KE7E>zDX7^L-}mAmv?<2ZyvM-oVU7YP0PXP1HCJbV%5rJo zUUC!gwhi0Eb6W)Ko1J8BD;FNln9-49t%IQRrVQX*I32Sy_x?nv6*9|buwLht1(&BZ zo0D$^q+3ihNkECuKxNPiLUf6Dznb+!O3hKVG1+X+P|0P;{|*w`>_^gel}=_u4ElqMu6XM77&WRrRgu1G{S7>AdkLXsLkx#<@eU$L!-F;lD#RXJsGv%ttHs z@F(Mx&su|!1QEE)l1z=w6Ewhp z3#W;cvHGf-PxU3XeXk`Kuhy}yFyU3|B*(AxJKYrQX&h|aRVL(T2e2gsinqnZXS0@} zw#?)r)|0sR&DGs6ky34aRxW z4sEnfd5(46l7(lA;KKUrT<>Ka=UIy9llvXD+?&XEWH3E0u_Ogli_6utbh-Ao*v}N# zO1+I4l{i}Iv&fvc4GFd-r!?g*<(6DLM%@wqjh+_4Shr zP>Os=Nvs{$UqJ@aCM0AvQI<{H;WWQlr-m2**7_~;X;L=0qtV8Y%WSuG!v7ZjvHds7 zFOq^wRIj72;;$)*)-d`e4qf2Ykxn>NXl!bHI4{J9dvSHM*`>pA_lvdWn|5G;+y`x2IGF zEy`X*79K44{vM~iyj~^9UrQ8N;I1mCzq{8*n!`E2*6X`=WR=d+QX*Zx1lH6Q2996oPL6jl{>b>k$j|DoLt)-GZgQN;RHip3P2PB%5)E| ze{^ocy1m@H?+bq`+St8A%Ct{hovfv6p#69F#Nt~n5dJB=1ZH=70fi%`p2C}{*FP=r z$ev^1{)l6wX&3$Fgvw?8VNZj&Z6ky3bR6ZmP5 zzO?D}_orw(L3ZrLe$CP&=_u*fXebKvL3^tno+yGtznwe?;cma)4i7PIjyA?xM@EN& zTn@3%ZdCH$pv-Jz=`-Qe;e|~UO4)O{<^2+cYTH`kdh?x9@N|lx9toG^4nrM!^=sh` zSBGh}GC~~+;(*f4v$`=&O^|^>-pEFwS@5!2{Y>z*go4#^O(DM|X@#5S^3eIm3A}1O zYkFzPx9n~|7n5r7@0^^QtajCc@)gtBkF(u|EBOmsi@avE3cKzh$^^u(o&6#JxX5du zin2qhKVGaaM4=ZZ+A`|ZogBV=I&4lZ){9QRxgZs$8Xv1zAS7Zpv%i3?3+!cd{}>&v z?v1wV%8{X-u;j%Z*b*lzVi^vbJXSm?0H{`Y@?ig%|$_8|A&r@4G2e7 z=&-x=ho6s>Ft;kcM6}e&#C?L__1OyU?|h2?Gd|BBX+$qNmV~;MaS*O26nwk$vBp+!h#~o%hI#XlmmfO1H$H$>3_|`qx#1$^GHZqr_2PJskqlr)pYU){_$u%>j(d89t#;U zRg-FeHH$py)x#YRB~9zyfQY<;%R21d=Aaja9pYLj_3$Ul5fYv+5jHuNb6Ny7uBGsU zl>7e`W^VlvWSdwcezU~}Qm1uMZua)+>nz?S9OhjJG|L2Hz0MNbs4 zD-B_P0LCK;FU_vX*MJzKGKO}1%s5qKrnv0iu;pR2_e-V$Uz&l3hg$h~*xj~h{May8 zWL{(uMX~MScC%0}_G}R0Y)Ip;?BJHIz=Z&Z9xa&&4v((;Z?Mb8o3 zFeKZynBT=8YpVtvcjUlME*A4kaifjYhzNaFwZWswJfqsI(4_{EskENyvPZ@Q*Ry8g zyOxP8TPC?DqT~(#&hGJE1Gx@%77(Dd-$Yc+T84*7ae*uC54M(elY7F*XQ8g(-Ctla z^$?6d2X^K#maGgQxY2i^av{IxFg?rO@00)Pdi_X+hl5BgH5h?T*X5uWVYc`1M+cIw1-}S0FiULbH zTO)C>vdEsGH7;f@vPu(3frlY*{`e+S@0S?O<9y~4ACh$wc$ogjO+$-Y)6b9a02)be ztIUirXCsHrOxc|^$^8MTP^CZ0y{MhpgX<4M_Xkn&s30maOJC>mXYBIQnFL_^Satud z5jAG)_6VxO-)B^es5ALMaL?^Xo1VrZS2R)ORY`%WmUZm2c*Fm!lADl;2LELSLfh1N z6{jX;CKI^0CQ(RK3jF zYsKsCqVR*}J|768sP*@@F|~3%22E z4bOh{n2>l|-QQYRY2v+>e&v0g_UMvI(j)~-JE%udR-w=th;C<(hE{FGz9!hel!;hBIs@^)i)(Gfv{Qe3lW%+f!<~ZM~N=6fikSL%QLcsO!!tWoC zTQMyiIli(xRqOd{q~Kew|JF>yGZ8)+SHkyi&2KokB%c;(Jmo$P853mF8VItc7WC^T z?7^ZF!X{Zo1Z0GgR~vFJR)1vX9siuJ4|Rn;imAU?E=A)6r<>Le&1*ZxB$eu(eW)+Jv|&y8koIxQb9*IINZ?!T1Gv{^ z)+JE0z42rdR7A}e|1fA~&vG1AO(qZM{A(>)x4~>f0QJ)KZ!W#5TeFS14#!;erC^*T^g-|5MN~?uG714)xQ1opXN4zAtWN{;Ak3 zqnKCYWxr}Z7Q4;)`!wVcN81Iy*<3A-y|<9xM!JRNmoS&Eo#N5bUH&BJ8BjU*#MF|b zC~oM$5)mnx8s!QJ^SF{!`kEm&g&W~)N5eEHMK z64prHxn%)xQD19)vt7gd&kNM(TAwf?LSZg^J)GekWN>rmDEwb|pJ7ivn|;iMSlVj| zla%L>j=F~_wJ_uwI-ix=t{8daxYhmRj;s$)Iu`kcsI>;aMlPJwDy@E{p;q}VV%B@E ziC5%J7%`4fN8JDHDs+$YTJoS4#Kkt`k)%dwey1q0YpWSTesZQ}mf8Qr9~0Dk{1mxh z6u{+e`qMEGxpB!xTWF%jJ*i9sYA4Rd#@ZK~L4J^rRZ54QnBu2~Jb&>1>X~LWrnr?h zF{{y-F0l^PeN%+~Yro$9aB44*wz~{i?tMv)GhBwvR?xt+cA*tE{i3$(P)`egQ`q+; z<$8hH9ME!4yG=@Po_*c4R$aOJdTkm!c)pz1Cjd5CJy!J0Osmd&Z|>VrD>js{6SIX5 z>1E6JpV@K9O1+}a4^*6YGx4% z+GbF93_n(%%D9#ke45$l*w=RdmL8(Fd_s+n~nq@S6YHNJQIi|l~K%Fdp+Y0)d& zhuUUo$8g7EI+DK!t`~g&%{<`Ydp(EiT=syUdgBjm_Ln7G%&(Z0xZlY>(LB0yd1OCD z*Zo%O<5Sch6)9gNvWAR{rS7oExO?&^ADg|0`mCDW>9t-P!k-}b&sTVEz^7?PJw3Z7 z)h-+f)NOuW>?9%Gtxt(bjm=-3$tC0#_Ug&s1@u{_6gmn(t+dU*E@dG;R7V=|Cy&$f z!gF~uJ@wr!oQ$W}l7Pi3l}-!rv3KSY?>B2d8HUU5*W%T|LAW0VwtQoxy?%p{?{~9i z3I_JnD-!XTQplzlTMxiyg*M$;U(x8?_o|UbfW~Zn%MYN<1i~RD+Y8Vlsu1T%7_i3e z0%gA^McflpZf3iqVIXoRLNJ_CX^^R0qQF$^F*elO17e~T6rs9vt zjRA%y1aTyGGXPa3VC7Zq{yL^}hFw6FXTau~;y7jG>{0Bn&z|qw@f?Gbl1grG-j5z* zi@NlV`+E=WxRt2#!|zU)J|wZG-x8qGe2yInp1$>?;{Li0NUx=h8EJV>q7BYK<7h(o z@h7jU4zUj_PF`9&<1)io*x!WJzt%kcqGeNXO6`Ts+uOhWtt4%adv_EJI7&@9_P6Y~ zm6X6a>RvbI0*Tqi18mMERcDdrP6qb|2^@D>{FDBHowbZ|eFuM^f|I;!c{fXn@5MXE z%T6UEkQMC2FBAqy?*vd*N$WHp`y|&i4sZ1;{joJeHEVf%WNp^KNXgR{1F2L7jy&2d z6h!+iCz$zTYrg@JS4YU3R=c#Vs1d%DTX%vU6Eker48aXkS{29|=e=wVrd)MRAUArP+M&EpLbwWa7ZF+&e8$<|)((t_dZ3PQdhSRM&^e6|Mr+Cp{W-3^#* zx)ZlkxhymvLeKQsX3H!>2w}^w`fJlzruf-J$FH0!Ne)wz2_6M^+HI4*BnViEpS;x} zT$Qr%fE384oUh3h={t+1y=2s`cJz8wyxacIs)z&&4lrnQJrJ{3TKmPKA9S@u&;96U zy0A;fT~Rgxf53k=|=wJui&NoB~@L;CA3>vFgmyF95biChN&NASdbhV&yy+hf~%^@yX=6_8N2tC{eh8?-&M2e0C7p$ zT2BOn{Eeo3vG`{4)*qa~o&W8(&_UfBF`PJH099|Al$3GaTQ4``T+IYVz+6C8Z$CU; z*r|=M{&mN(?X6R1qQkvcAEa;3`O65t!f2z9Enic-B3-dn!2q=9f~>B>yP*uCc{K-H z>BLen+8E>mitDnuKQ(b7404aSgd8q36wDAgIP6!zuAoa8;WuBLbFMUzUupMOjG#3^ zOMicDtn!TfBQ%4+a^1FkFJdULXA^S?r6!%+*U3vZZ?X*T=8~|!8TTOPLDVA0_sTN| zo(9i;x5c^+d~UJxy>*B0*>%qM%ZZ4W__+kc*O&7@B9cT5n|*&Vb8+x14t~)%n|#zf zgAJ#vu?u&!B#t9&mWlhdiT9|a{h`>xcy8;UFQ+miI@$~?u#sGDCkXm)4IaF`^1~_!PpQ~Fz+9>8ah2=vr?6BUP_sc?=MUT{! z5l8moYRt-kR=%{IdX%kyzg#$VM6k2AqULvh+IalRx97eiY>~jJy=UueK6Q2qc}r4)%nd{RpJASvOk+L|R{i{jlWH2tqkI)c zLsy3#y-)PZ(EBRR94OJey21H=Ev%oZErIObDrLqATR zpQ%96`1Ze$Tcg=p+qT7da_^@}vy^>Q_^ukXmo3Q5ra?Pe&T2pOQ;(E60-wUBT^Z@X zKT-46<0#b&g)-Sa1@%nu3+gKVbEpFq2NnWqu#U4bR~`ZMWQu`(MY(Z5x%NSUt6H z{F;gUXSgptOiAK=>y<9M<4O2#!k)NcIyF&Dff`w=+%ywZxE)5E4SIpTI3RgPaHT*t z{pTkD{5HI*xmi+S<#^LwbD^$f3T=;|@m+_U&hOvN32FO9^ z)f$>ysT#;NFTRnT#vm|0Lv3SzxB+lG6Y0No$nZ|(ugox5#~vAnNVMr_u5%)(fVuSm z-#jLBYI_ePV6_V)L9XXEChESa|in;yAL#MP!-bXIY;Jr;y$=EBW6O zsR=vW{xvZeT6tph+!gtAHou7K!qDz)iKM$CH_E-Hn!>hEU}bBbH9* z@90eV?`(Mq%zF#623!m04g~0>yRXC0OepIL6=}6gy3^FI!aTwmT!f(A^LczY6?NOH zib4y*7jTe$p1%8EY~yC><|(h8uBZ~+(-=}}-IQ`O zkyx#{Nm`ku>f*eEOVKl2awz^mhlOupHm-?twf3ZD!-7qqGJbwpumx8iro6OiKK3lI z2vB3tpcA!G@!1gBAIDds@FAjkCXUFfDqv6+J{`R$;W@@AS%(JidQA)(6SN0X+-nz{ z2QpPKl}=itWI5SYjH7D`!Vbw-D42dH3>!PHd%r8gQuFM)OQKalcssANfvJK@lf{#o z8Mg&gUY0Squ*Y7i1Wew0Do>lUS}(E|N4Zf2dc}FC;dTNWgs#PUuZ00{@0Qg;yUypt zF*t-$WX{8wQx51&P!#2)W5aP7clyowG^Uo1*zV!+ zXQzlir39_+PWv4Pn@R}2E_U#%X2rIE9j&dOSwU9m;YY=1)U zNfCq7ek)mD3fV^R8KQk`T>LPn;zH!5Q8IqiP6j%Reh6p5yy!f=C{ z|K1#kll}Im=Afa`{pt=qH)GJMCn995*z&vmM@Cc>*Mr{v6L9{3Fr+2eEwRgJm^_4b z-DORc0?k#NH&w!#50)Qqkx}fT@&Md zNqegYo71Ii{LWg|8)eiU+e2VDRqnIhmK2Ap@DG_ORnYGI>chAtt{c3MUV7A3vZ8-} zkJWe|);k&@M`z^5Uq_@lcavOAw)NsOyIQR)ZKs=q>X-u7G97i&dEPD2G8i$p0+y`Q zQpKiEg=XKdS98&SFP$$j^20Ix#`y;`$M5RC>8k;~0_q&9L8AjR)z!bX;r#B^c}>cM z?5*^}6l|=evN=niFnK4?74EQ-F2@6K3Z@Un{mOjreLcO{^|r<70zQ9FS^cK}b{dLu z6!d4SmS-68Wt=NEykVnF7;WHT84@sS{LnKwtlFb{D6lu)b9?5kay!{VY_LAGImkm* ztrnC({Dk>5Q`XUW6(+e(U3*=Q&@vMxS;Z)OvNTirr)}#K)*C%LwvkzSQ?>TP%Sq8; zQ=9oN$WLi~Nx6()A&k;KYBS>EqLt>ikbMsdYa~lAF#GrNaw`Ym5?Vms%W`!%pSU$E z4wQfGzsD(J@S|NHE2F_$c#KDIO$kEUV>BC`v^bkkkc)sf6OW!tTiA9hONyM6eb)D* z`N?*-NS3|M6@}+-`i@y%#nm}?Uxbv_%vE$rKFddI-|Na?lAdDJehV4pR@J9e+Jz5O zu3<;6t6YZNovuPWW$&!~1PG_)X(w%4331<9CJ4wTnx?^F`|3ZebSDJ7V2muqxBCUyT~e56L1Aufc- z&9SG;GYaATa+rf+V1NhA*u*+Hb80v0np_-X_uhQAhoPajjg#H7vS45-pxz&;Gz$wI zT+g3GPT%Gqbl>@rn8mSn*nTCG>!rJLlTKxKt**qTaro>jd02GNB#rhJk4vP`M!JFe zAn@k<)p8rS;<`2MuLga7k*v{ zC#@DTN&pYg?&ab?!Q#qcp5X0JbVql6+5)GD9b~>{(#!s3VOK4lLv@>}i3?E9bF7*~ zTCea^j^Lv%#l~?-?9NJ^yS)E`fZTiY`ONv;%TFS2Y|nx5t9T9R{Bf^d>y6VUKaaH} zGbN%`@J%;LHRE|A3L_2T8^FXZ>spVTkIouY?erSM4#$=SkL94b8$}j z-_H$-(!-B8{-_?mG2dVxq{#9^tnY-YVsJra9cZ3-I$&kc0ox2&X$g}?=j*2ZX5$Uj zThES?Wxm#?_QQmzLdVsgG|zypp2S7-_S`hH2w}2Pkdi%q_v_n%;4oC<*%rSaqHOA; zi?1yI1nGQ`#I;`p{CWkQiVx?=^f9@H9j(iEAP`hA=|c^g_#Q*^@I^Sc01}1uKdEUU zz7v+qdRAG%`c$OgWYAbT24ld}^vtVr=PcgRn z!sxVmI=NSn{zRT0iLVt`D0Z1Vn)Q_8pYSm5-9_qL?xCT=gTyW75haEk;=tg&v+W}0 z;tJJ9{UE6^a*=oL$xJ?}5JdY&b#H*11qztK+$zR(m>sy}LbVQ6@$NckxSsvPhH`R4q`4-uVbJ(MaV;6La0 zxvN7N3(sS9z;4&bJHtCuL zSxT|kp1P_Y%9F{!l67Gi;wr3$Xkt7V~jDHDZ9F=j^b<~te3^^x(@Jv{{1Dm zGw~_|qVD@mR=K3zwz}SES?PE(gpwN7y3f!!vgn7M8$npbf+c^>=~Vi5ZHVsN+MWD; zf{9&;Vz%C|t^J;G`LKD{idws=;?~nZ@-_djU&!xbb#FRbyD&Q8RUH>L*AS3O`}!yGi(@H;t_?>Mu_Zx*cr4n6U&?& z+utkvJ)w#NKsgVqc|J>Gq|r=<`1FU<2W_dk59cv555s!4YsXW%r{-r~j3l7Qvv?W- zz5d-yYS*rjRaV*D=I*zpKK8MK`U>?@?G!&Lb(LzL7(?MEnEiR#cjV%>kpLd$>oP z3f-KlWfZLM^z#HG?TZ*i;yFQe{=#F7j^Ue57SW{k_fLuSC@O8rvv(2VtFykFG;NkX zUAon@Ra+QTqJ-2>040crkAK@OM662+>~y5zp;nFCxhIpbnL~x^@my@`9@F+l^l?HX zhF-KD7snz8l&&IJIDPm2Bwu7R5(3&nAIx8^p9@={m!oJ|9)ffKQPtjFiFk|70stix zE!EimAGY2)E~>739|jRYX;6?FP+CCJ0qF*%1SOK1x(Fq(LC0CymQOW>-T*&yi1$n;K+(G!(_?aebTR=qT{JnZU zN%O>Ej%%ikYIe3x$LOf`u6CncGb%Y1y1TjcE69-+Z}-UdQDd^K=sUWvrly+_F?djY z!Geix4MXkNP3U=r(c-nGz?0l#SILCk23n{0E4{Q660Ny#iVMbBh#%Mi`k`2r&%D<{ zEMQ^40_fYf`yFnBG+ln^6^pW;LwmsKk*LRd)!pU}uu?S#g91Zbx>Y6ss{tt_z9uEH z(8<1J;6eAME7EshR#jQu#(3*>K&-^-thIJARdHjPV6FVe^jOX>_PUk<`BbT`+oP$K zZ8a$PMp@tPv8{c!R(^A!M8WQ>TNP9!Hf?TlsY438jwdGq@-B;9>5?naZ%B~t$G>dd z82-}RV9Nj?AD~C4kqiO44faoK6*pjDE_VTvcDz!Y-S3Y!LtD!(lF*C%v_tELo7smD zRo}nT_2}lK{>~^eCJ=vV7OQ1`v;6)$KJaWIe#%I{Y?=vJ`0>yBD+I?vXI1*KPk)uE z*cm!2L?Q%4Co`w&&wfb2b%;0HeJncMU*kEYiFlp%NMv`+eXo_~rXtzg`5Xn|Hs;Ds z`jr*A*>=1~d*(U|7bl7~Mj zPBWok&_WIfx1}$c?NE|Ot)xO9=Wet>>Wy~QitkYzRb~o4KmFN-4B&M^uaA%(@vxE@iM>63Bh*q?y5%>D zv;Wl{eSUTji9BCIo@<-qM*O2DPa$Zh6VS?s75;UAI8+m)HLg?j6z2)9} zZdX?OEOD3F)-B{9O})=iRCxZe%G%UB(tDqj3F`BeOuQ1(^OGonA4TH`1@b-{!^@vi^Lk*C%>9mho8J+7W)KyLuKS*x2d?B{0 z8#n9JCa)C<+RR`m|FDFY*Z%$MMJjx0Fi~7_FOp@3AwX&Adt?IMDe4wd>=W+@AT^&2 zr2Gm#-W#;r! zoQL`wVX|pdH>FQRv<4&uF8TcY6kH&C`oz%lvncH1DiloQBs~lMT4&26`Jmqkll|Gx z&#xQccw|Qw#l@2X&Y%#SL<}o)C`0QVysQz5k)J3P;cUDki^fAN0MjYZGBQq+bYD%9 zgpu6Yc@@d>aD$qa)q6P?k0I=ful*Sa;+pUzD(9yzBq!`%8I)s3NpQ+MIs4k=8el(9<}(Ej(>XZ4@m`-!; zL;k!za`)IITTkhV__7Yc$Q1!Xi31P^82Rr`$VPtfzYVYmf57e33In`w2~eZ9@?JR* zeQw<P_wt{=iBfMa9r=$+IWU-#m@&4>c$|kb$db+OV~@ zHf73g?2o5b=Q)(Gq}Am@<$CbS;=<_szJCY@fuXR=&nP|OL9l_H{hX&Q$!IUxM1BTY z%GzWF?TWUI>E?XffogdAK%Pp@<6^ZMC18LS)^ao4J>RgI=XbzJ0lYZ_3ATG{FSp^;cH>m@(Rt?FXmowev^#=?#dlL~C~!5fOh~Td-5c}-pVecM z=CRGRn|P09NEAAIs@N^@X=)~-)JbxUpoAZQ-=C~D?=$by^!P;)1?VB^IpL{-=N z4Bz?1Qwu1F0GwOQZTV4&fJn>@PS$f3dK%4$Ge7~2tV_4q;}55(xeBf%<`xLhRpmnD zI#FJ?(`uWpWv0D)Y5&V`b<1FQ43Ir&#vH6P#VhgKFP}>d$1hF4)#4P~xkG1bvo=Qt zyp)Yx`J5af#2T0@9*_am2EwMQ*I$_CC8rBKv;X?OS}OPi|4qAjAF(#}k81BtJM?NB z9Z_7Sn7+;Ljmn1XCj-Pp3hVT|o21(AzUNqwLOf}l)6rp*ElDEi0RFR?ds^-3Ehxph z>eY7b*#fLn7T-V~bF+%NeL9gb|0C?3oLI=XQXxe9$Wrh+66piG7b(T1S62d7uu8pt zy&J$fEp97$l!SYmNRkI1QPvES0j)-DKBEeJFDWaN>MZqZr4TnM9fB^|9B<%@1MUSe zt=%Xa(xg?ctAR4Peas&zqwn!{|6TDN_2Jbds0w{3&N{9oN^dS2J9x*WqHcEs6q#prMM=K5`3O%Y1@6KbpM9m z6bgS*ZP!Ak#%t>bKn}Boon^R>dFQ3MZ|2lu@W3u=Mar#`wUMVC0se%-5-&q$^+<#6Jsp3HGc7njou zpW{o=qJcQOZi zcH20gTH{V_ei@}IEs*|HdDg#q36ZAp@hQQe6N^1kQo;A44li4h?&M%u>OO(7ghF=A z&sscw$+QIkz^?W-a9(*Kul6mHK8T+Dkb+r_3S^3UB3iv&b3<@R!A!r~ofdP0KsOKZ zOvR^A6SVhf-iWzY_YX5n1_UZ_pPvXI)ZOOl&!@u=*VM(woOrGcM|0&~RMmzA)Ku7Fw>-aA9_I&a&_L+Ldhv3hcvH5aYH}n{{ccxI8YPnR87)vetTX`f2 zgZ98@l{DU55DtQcSEbyF(gK(GUb(%rM?@niMI^En1@$SXd3u?je=q9Ptiml1$T|3! zQG%CUrP8^t6ZK?2K;r6W?yYKh)@{1g18@8wRo3R#51P#q63D{-%pj~%t(lRwE$kVeRGYAe!J{nX6|4LVT zP#x~L)TBFhCu-UP(2+G!TI^EPo!_?CRUR2vo5{~`Os-;r8FGT8CfcYTyH@+N5rcT8 zlHHfIKR`s{AaU64y?ZzAie%<-)01S<-8 zUjP_6uFCwhrqid;bs&YC_t}a;yI+%=8TX6A*h|Zs=>k^63fTSMhIOx>CKI_=&xgOc zuf(SCS`ARp-929kh6S$k*4$^M(M@_N#xY0-(|Kn=O~iqRjrtY)i(Br}p@c?-+FH||-a1o#NNYD74efUT^P~^ zd_JXyL$>W(Daw2*N4@*)-tOcV(8!;mbRYlG5QhK(_gC6^p$ok-Iq1i?Fb_I*VScal z9P@ntelOoag2$}sdUt`7*I%)&a6c;huJ-jm!)$f>qu>P}Dqjzz`TOCY#D)CL_^e+R zm43nPNk&2n=2j<(5n+J401ZQ>K~o+kf+Hn=0lw0nXk7H?D_c#_K!A~}OB@!MeS87b zH2x`EPoIxI>IgUmIP2Sqbb7Hh z`e*PA;CPRIH6n+-!EJ{mj@MyM?THzJqoC2(OG&_I)vvSOHF*DfRGG)r2jEz9fXQ*o z67_Thmi+t(C|?$yD2jk7Ody0eDeF8W5%z+|h`LkSi&RcFT`loGgGkaHuISXShay7; zP0+uqfS_7G6U5RJ)D?qS;-Vg*0wSO&Ibbl2@A(WM_GL`9bn2XI;XHbE_>)E;?+0jM z$=yhx$0-ExS{gJieid^(@bgz4+IEx3hFg+B-&CB=vo6H-=lOmrfg(#xyYm&>;#du4 z-d=?u{GO?*RKm}JO6SI%C!bQdXHjsNR;na;AYRiCVQkrc%thu(0Tm7jM2x*GWQbX_$`ojOc&|Tu*Gveun&+6R&_s#G0$3~Iht@Emc`k$Z zq_iF@ql;6<;vK5sKz&lIMz8WYBbhE3><5Rr=6cP-hb#}1rrUfye~}Uji2BV(J;q=X(Fc^Kw*d7)Z-Gr zz3&2_f8tjg*j0pP?`6Dst!yWV|BIcg+A_afqa|ICX(1Y}u5PN)K-2MKClIzSQ@sAf zsbpYByAY4A2U2m#VeehlAr9VNJmlU($`-c`pc`z8GDwGJKcyHuMt!_@s~iZ4Gz=xA zY5+@mO%=dRjB*racfN=?&5Lo69{X*5q*6}diiU>Wh#@@!$|8t_pBKP_L3r>g1fW&$ zY)J+fmwp2wBwm`A2HN$BJ*eKCqC8W;idgKPdX|_jZq9BR+mY-SX3gi0LDDtP;H&#t zx0PIhfcv*w9E0TDb39pCb%}rhwWOJDQgLYie4DSiS=xQdsdt7kS}tK_!X{zQ6}s_t zIiV!UebU_{CB_{RGyvV}LMNNY2)y|aPOy-6egIUEAe1M47DMD5aGQ;sN#SK@MC&>5 zEK%R=&{*#X*?RB1ubHmhkdu94^pcJ?pc-w)2%Abh{Mg`5d#W1)y^^FhL#6#$`^WNR z+)kmV*j|>ej1x;OKY^QQ2DCC+T&Y}|bO1$$e_HyuxIj^20sA_`sV3Iz6tzSzF2}F$ zHSYd4ZU?0!Q2~ZFMw?Ta0ewBfY2-e}%8t>r+Gx^GVbkiq^2>G}8Kg{GyfF?EJ-#aGlBLWosU znf$tmgxogg4XG|u6U7w47Z^=SJdyr7m+9ki@Q8a5;RIBxu2}cKlmI=5=oeQDx-@?r@ecSd#Zs1~sEtR}-gD^^ys z)tic>96D>S5^0ov1Kj^fYOuEh#OgY?UfrT6*&#U5L^wLW;4T9Ym|ajRu1;=HUsuF=2Rv$*wLmK7cFlF(q!b5uXXK z#is<(H~Onv2SCs=uy~jtz}NM(3)DGH@uYjsdoQie)Y=;IyLi*Wj@z6dZ;8IAaF~+a zE1}~v3$rgI$q>uDLPW#0`wZ|qM~K$VL(QC#pu?91u}nEvR29xRu&C*ZrG$|{GImAb z_5JQ`XP?m5bp9mb;LQDDht>GH(2fY~rDSd!yr?iiu7G8rjCP`i& zEhO+GYd;>~=_oP6aklQz>Deu`G|q1Du^6fG?wCTs?LJTM=E=vrpyIhZ8zJ!e2_w>m z$uqC)E*^5TE>R#GRCsJoSL4J2!Q?kOoYQT$oxE`Gl)PrTOR-n{9OgGaUjgw0odUad z`7}5owo&jHCZ@p2uVp%wS1DKzvJNq6n*|q4aiK%iqibO#g2>9QL&tzxDyLp(ji#<+ z+I!^5^Klby#51cLXm6bw;b1*j*tK+%?P@T?Kzs|PG~_tM{DyiDex?2b-{ve#rG-fZcwy@gS*O05Qtgr7sE)pjLf2oOQ+&T)WnK~5n|7g?Wo zE}tVaty`q)7>hT1jUEJ^CHkf;5Kq7J!t39+2m(Bx?! zLuR2p+Q8b~bDIy$a_4!7p^R0OZ>zGF}R-8KTUdp3k7LO{=A5OS+uz|jk&!Wz_BS1G9oM@jmin9Y=RLFc` zc9%+WmIyM%&XTy&2O0U2E!pAjpnv!0Qh)NmNsB&RC&%NW9OEqFqS()XInZw)vOsrk zT6c>199b~=?yUpn@0Gb_9INLz)~Oa$n5er-=<9PFIb&N42%-|tX%D$nQMBM^{?fX! z{@NbbC8EzMkjKLpPd<_Wub)(Y#KRQTpRah?@J@AdC7j76BbesNwy498*Sf|qDxlIE zZ*($FW9cEgt6OJ1V%i22tgoA6^CxS>Fr&n?YQBul~;^ zzJXfdeO3^rR~mP;gN1HCZ`IJlw^Avm=ePI-(&Pyj_D3^-s6Hb8HQOlZ-j5y<9db>$ zgvpkeSuq!i-mUj$-vw@-sb)1HjLsD(u1lxsp-=0q6YC!PU%9l~*I*lr$*$5bVR9eL z*xC8N(u5G4_x=Du0;180XR2c)G@xlaHh}9x)(DO&*so3YA^dgR_ZoXI`{=SC;4%1p zBQ$1U%C`qqF^FSJ3V@xl4^w@^9YY1bA^$|Uc!I5u5p(Oo*1Q8ww)(Q@Mb%X|aX-0^ zs}tE3Dn{H}k^;jer2>s?zrw&4iEQ{_cB}l6enmyqG$`g+&@iAD*JZi#67oc^?Ac)2 zcl+Y1Ub$-bOe15MP^UnVojEi=d(D@N;eK+WXjKzplJbNXj!Ca_feY)#M`Z6*7BihGKoVcAe-WVb-sKqDJF%+Di0?Ajc_1g|TnkL~d(jZR@Q|`W$j6i8HQM{f@hdMdL$>&hM z#0*$H!sV^l@drO%*2#w?V9YWG1ei?(_Fij>4Wnj@%i_b+Honu=P3cB^h?Ga6<~|m< zG`@vy(~Ed%3r@qAeISO2j?0(wxTeB06^f=#{^F zIz6Zkabo`9fi_9E9B1o4sjuLS-}h$j>=lWV&or^_>V2Jpj&C%3dB2B&D`RHuGMg&Y zxYa*!+OIlg&OM&rGY~W^R_eCB!zgFz{!eQRv8 z1*!1^-ceh0Q%sRTnz}OMmLmPLp{xo~8}wBOzyg%gF#I61DY!XX+mRZ`%O&P*gElnw zKOHk2_@fiN4w}VQzkGvA`iz%=!i$Cb%bZ8;;?uu7gHMGYNDWozD^|15_J78K@Re`$ z%x9YasBhX?Zadxxi8ch_K>{y^EmivPiWN3=d}J`~ry2jbSuwpu5_B?mqvj`)#GJM- zpNiD?tVP6W+3cFIfAYzXAgaFgb9ObUfiOz3+Iz=Z+PKODo90rWeQ@jWyQMAr{6SN-3y#%rrAMa(b=R~f&HS_`O^!ZQju`+8pt zZ^Ul=(_`|VuUhMGA=sgw!H{tP*YB)7B=fwNf&==n{=!@dcurFpKx`+3u~a~_8@+7G z*hUY01cpN(Jt&r8yQX8@d!2fA4pFA^NcX}9@3p*^N+fJVwEif4IbsTR^(rte;L*5j zY7X<@BI0BIUWr#6+~b=S=NUjC5h*)h=`@Ga*1&8e*F%!&uleL4fdGXPisa=%BFDWNW)tPE6`pNMGUxkvvbOuUU4n)HedcX@4 z-V0T?IAuZAze;`DH$9MH$Ua8NBL|2iO6no;wwc&rL#+o#Kw1{QX;gQ}C^hpD;ruhz zv5j$GBKzeo)K<+Ny4Gsx7V7 zRZg_@4No2sCs$JFE@Fp~&^R2Wa7=pLK5D1oYXwN43~4nbr1UQ1(w7Q?N9De9EY3cR z;sJ$^N%%p{9$JldXF$)*toSBn=-QTQ*FDc>zeATf%4or<)vKR+dgJJ0)YhlQRom9; zK+C+Z51jUnKLSztVVXeddCYCR9=(_bwA_Ti$452zKOY=AItCLJLMmy932cAr=3)iL0kLC<0u@Og>$?_b-Ojw5BJUy4kD|4=tJ>uXAzK@;57OhzRO3nUmWg>C} z|1;giq3pj210vPqHDcgN7(hfEFL_p?9oE$XN&{=U?JC>$0DMw9FIeS-Z09rY{e+ih zT(w%!IIs8qIq=W`PjpSA=zIcnLj{x}ArBc_7&W@;p^^u$AL}6(W=h>-}{i0~2+xua=D5xyl>0Cuy>h(NwZa9B(bUCVu=%R8Vl)d2glu@ymR9 zig+JaM)hm)lRa+f_qqkixnX_w^3S(aCImba&h3KL#s_M(Iq817^JSMJ;g(cV40LuH z@mM=+8=js(s=}v8`rB~ z_payUO2=}*b?ut@8$L8N_t8-%=Xs*i+A|3JJZ*{X)JUwQ>sTWogf&(}kn8&V8I)lA z>5ePP(y%T|V`qf~d{-H?TJ>~|^*#xne{i%L-Ia~ypZY-*87buA^}8QHm!RIyLOK_NJr{kcb%(! zNg;#iHhp#h@f6}jk~W~SLnB})K2?74Rvd8rDPO)xWd+scnjLL;ynh1fB0iQp%9$;y z%QbGH@##w%s5eO3&CBHKHi#qp0C9!{pg?CYX=-k|>O5WH3sqgG66@mD2Dzt$=Ud$uxF>X-A>TPD%i^bh(hLSjEi@3kl zskJr|5iMaZ0f`V*v#&kBst;jzwkz3218nBo;SHK5U(Xy0Y<9JYp#D1Ku zJ59zPT8}RgaR>gs<1kt@vEpEvyqp2SOB*G$dCgDZCgMwckO{RG4Tn@|_u{26b6uMAUsb$g7RM9&k3yxi!WUSVQ;mv^c^k&h%%B29|Hb|uN#Pe_Ol zc{R9wtXE7<+5^wxL~4_C4(E}Lxa!U)RG7uXi#0dehXZS!J7OmNQ0I-sM#-T_8-H}v z$S#V|#QQ=K$d6a)l8x4&xo|wEA$N#c?DI(xol2@mmF|2X# zrZpAwjrWr%^~{djjRR|8mD_SbmOl(WoM@iRkNw93^d zMlBMz(n&DMOTmIay8TDM*~uYD%S*#~gSKA?=HE)~#eQ zww0^#cYrJmRCQL`!f?z;2@CP2c9%y2g*lg<%lW*ERj-5DCJ2?hKe0MzF8Fa=+TV?& zTDbOhkJg0~qu+#OujNqEB?qomGy;C(T}ro#p^K9?O%MJKmij|bN_+p-A3QP~m~{I=DhLM_x1#ny{V6ERyZehbc)dUK&XL{i+)vNVR<6HR==eIx53G2^@^K9| zrMozED)kqsAzwO13FP$?I3El=@oGH` zBvE>DAy<4LI?{NEU1{+PaUxOn+Eg=9*cb+MQN9XKyk9y0eHQQy2ei}7(RXfbw(f%4 zD=r{_cMin_y3D`iW>CtKnWs}uj#9%_+PI&WHS+How{ua5`0QANJY}TGZt5-<=#xR? zAO>Uz*5=%{1@?REGc`979zHDtLF7k9pOa)xos2T9$)gRLg-?0&Os|ci&p{U1a}nkz z)PX(_m706GM_LSWMu9AAKeifU;Q7JwD~iGO(Kj?~OPVG^52H;7@(!9EkAGc>Yn!XTejz(3fh_C{_?{!B~Lh(;y~*B-N=W_k7! z*`b#25_`#yc(ovW4*EQAhf2zH7LS_KP8%Hbx7d^Oc8lQV8pg0x=^T_#=ks^)wWzTH zl!_0F2N?2ai+|JIJR9^;k@r=~iV&&lBt7E@McT}?!b#6M`Hc!K#GLBWzWkTwaSVX) zz^({j&5E7_>tVtF=V3wf=Ip_2S>}LF&ZlHHk%vP{=?}T5zTNujGBVdXqp)Kb^L~S@ zr&aaolq?|9B;Uga@Yw;;6Kyo^YBGv}0{%&;qch;14gpB85X+U7prcX6ns^SuP1aE_ zdkH82Ae2}yw%AqWar<`Z343jlAC0d~KM(?#j2wVjn9Mfs89>-}e}lFCn|58GBmb4C zV!#Xxo&V1r|H0Y*xxhtS2e|)#GRj-_XSOL^g!$%e;%4+l|49r12!U^g`3y)CJWt({ zP5J%R6fca79K2kKLb|+Bu^dPZ=3hY~J|}_N_N#x!2O1P!uAYZf$8GwfQ+9Y&ieB>X zC}FHl=>vH%T)!;s@&l1}JTTq|gY2^Lm;Yq7;GIlzqg&#g$Eft4|9l@ji41r}22Lw< zi1}Gkt|CBQ0HKPx`R$bh$TxqqikSkn5Az?S0zBreS8)Nkn>1NLOr|p(Typi@7d2mz=y_nEXk5$ii=~{N8gk zu))K(A=34kvlLE@HgZ?utj!PMJRah=oO{Waoxi;1bINS4Jhs&YXhqd`b!bCBun7ysD?xG*87s`R?u z=`04eTh`+TUTn;z+bY-md)0(DgKuJpRn=t0`5CX9k|eamnUi;TsvEZ9M{|7xt&cu7 z-y}%}sqn~A8{2P?aNZ-*tDANCc?rt#q%O?+21ESKg2&m?%j@@k)1!+rCh2|6!ld(A z^K<(W&q;!$d6OWNAg#z8orLjHbM_B21^)pq^c?1&9O~$-XLhXpOmt06Y(i#QAI;p| zDj2-A(?VDNrH)OZTY>Ku5~o@jvnC!?(fBS%$cJyR}w%peWQAGU@ z7-bwZjaISHl@L;Wzkq zXQv3xv=(j)ctSSMK9Y#48_6NK<^;Ugs-waJfU03>D06MJ>GcFdj0q2DJ7>MCA8Zv_ zel=Zzh!!*>c|z|LiD{d6uXF&MMnD#RV~vUOM4WW2X9IDPvY=Df$FykW5MXmg*(%r$ z;MQf@j?~WFb+hRx1K}z0qEnb<-fm43rL*|^2Jbnky0v`8FBHSe<5mfdYuM>8Z;pQAddqAPnI}~b(hIhYrny+)E z|22b{s!z@Te)#w)&WYCu|UH~B~X$?rK#ivgAtV<@X(*U`~QS=8; z^a1Zq-8?yLKV_4gMlTfHM`m{3t~u=quZV*)Z8{upwd6DHL9$29wj9ZP>`3XA#4hVV z3Kh$BRMD$iP4+zZP^{Kb84D^>#t6j@`^Bk`@&JV6|CnUI(#Kfy9gxj#`kOTQPcOwn zW^j)Hn_nJdSOmp{&BTD!XO6b}M*9hV3d2emglkyY=7M zauJyRTIYSATB;E0GKM1K7)v}#+=O?rHviJ^wMdG};nXJyzdm*W*G7i`HB%cYRMNc9 znG33SApXQ?_N0Kj&jgL92B&~k@_HBEiy|H*e`{;|E>5XIsWs_AhYgz4xa|gIA@b`m z;ho$m-7-!xj!C^%ldemGRochz>fn4Ml&V6-v06oZFw17V5hfTB_eKDooGg`zk2R2w zjxtfyK8v|x`}}!<191#=)pGXllX)Ur3pOHW>)yvdZDf!niTq;VbJn&yN&#BUbi;O$ zwYMMHx{?ts-)l9nppOrKkwRKKRc#LmZec=v8X<)#T6Pt`SnFbOxnYdtLzWIP&OPHd zlN(O1h?IF@^9Q+{N5MrMP@59o-%~j;Zlt&YW5nWthy8<@eXlp`W=NOSXvZGbN34Pd zc%urkv-fF6ohZ{j+$jsHbKki;c$&xkqH=)L(56A0iPdStSNogY=Ku#IlDx=!`{5Cr zLCYgUhGt)kl{<`BTm|e%#iJyAZc+RcCtbskyfquYhaP!w?>rl380o+z5Y^dlXAYcr zX@olw0f$X3e4p7a6vP5=41o-wDjr{QK-WBMz-^|$uSQU{ePc?3^c1}bG&~o zH}ZW9Jf^>+cB{&ufFy4_ilb6b_VjLsLYe=#i?M%Ww8?$WYbtY7I7jvRJJ#Kk4&p3r z+(#2%=myUf#FZ|dxae|UFroBtsmf`a!7Ka_!R0GRRZQ2IMf${<)kFWqk6nOE8DNO@p z@@uL<)>P2V%PnJIVrN?K2bOA`TOJuvKk|t5$T>z8#$GY-o^7b8iGo*iLo7$GdDtSvXh7%AN!Q`-jD=%*Qo9k3 zYa6~uFg&^VXOl|9P8*ySAG}F=*nvte%k(QXnVaQWZh~3kj31EMfWn<}N6|uJU+)3@i>xzx*PHEUura{TavI>a$qi?D|9+R~Sm!GlZ zTq~k7u9vT>uz!b#XEs~XhhJkizU=+5!&^J3!5-g2kls~7N@2`yw+eL-g~Zli6hAt_ zE30Cs%|Htk+lpK6Az|6@ zm1Tf$E`hQOIFUwtMS+S>fr1lfCdzu$JjLciwkD7-7Ldo6j5xexyaySwV^-r43fAUdQNsq>Z#%eu4q<) zp;(Tww&@_(XU~Pp92A;Dg;EYO_@7lkPNeys7l>qE#j*CiXZ|-z{X4W(sFTs&743K= z!S`T$Naplcj;$aqRNhNJz)3&vznw`Amh2Ti^Q>Ruhp0tOC6Nreg;xa&zClx%u=vla zI=eYQ6pZrZu8ug@0bPmp9+q`=ovQpE5XZ$b3iYf(IxLSYuzo#J;dlf}+W0B{M!w&` ze)gFqe^vU=a=(M)I4^P$yTpE7A0c+Q!+)&~ltOQfmHnc#tjXQzN*iSgEL@$LH%|Aq zVGvJL`@dJaP%(cV|9ki-&S9ugi|a{nNv)pH{~CEG5FPI}urNRVyYec32(XVRvVp<` zH^KY8ED!qju>k(xBll;D(k=qBe`0cw<-AJ!p9Ahe!(s6Y#uVjSE7mSAmH)*1Y|;O_ zp`0p4PrXr7 zL&RxsFyS@`p&y=9!g&tP6t_T?)?2ja;MebmpviJH+yC4YB)OKz45H(a;4!uv-iWuR zX}>7|3<4CK-k`srrx3&Bop>%}pk76sI8Y=wj+cx8)!=wdOPAPeyuGzAt0K^lXNK4#L`+h!T^NVqN-klH^K+%!2JhXUF1gdrc zSC?SIehjSIk}NsF%SMsj&iIzJl{%|VAU&k9Ur8KcG z#D#1ixzpjlnTuzPd@eh6Pv7HLBs?^{C9ZTer}Np{C#B>{w!8OymnCcml@E653b?f| zSER-qmGq>!q6QP|_4apBUOjrqa~#H{8=6(FOKNRA&Teg#Q9lOafS@ld3)CF;-^rO; z9jUUjBFUN!F1TXFRd;yvpV=dTMfQUpt?EXO9cfN4Z*GBF*hQda@}O!#8v$Q5Yz@N$ zWO3j2Vx^cX2&}{3-mB`r$$HYW(e7Oh5=4{z;BL=-z5R}NWgSRgtUH0L;~ZKvFBqq4 zM@o5OV}xY=2h~iBU_uU%A%jLxy%?wHDm&scfEuyM)X_cua5V_Dv-L__S5n71J!(qJ zUkRwi(l^z2^<>ujk5j<3(*sH?1J5~kvr&LKo~~7P#4*Hj>WJ-UeHX9t*qlV5Db)Et`0$J|a)me24j?AJMYJwulCaihm;|uKn#|3f&laJ7XX2;VD$;bnBCAyl-P^ zg)%_9<9PbN8&;$7kc>&8WJ*Fg<(6zjSpGK9tikv#AExtLf`$q{_TjEUpzRAcrbM7n z`RxbnO?_(ZGvpVbP1^VG99DuWJxY+vR`KjvR5wX**8 z=__t8-YAc*fDj(@i2T1NeIU@l(cH6*I>^|g3j>WWmWHy8q1By_`r-;tZd{vh9F0!| zrDKk#(GHrLqo9URBO>!kqj&4ATgyjAj1+H_1~cv{X9r_c@8Y(Z!2tsK+ppCnuI}`; z(~ElaM89}KE)zxYpbQQ(wlw$eI;q7eTGo<2qk~4lLFA`@*|3t|*W}laHEXFwK4Qf3 zNXn4K4lu7nGTQjMr_ENK88|H@UZt3#76n^0)}$Rxyoco@aCAEmhAj4cpKZOLQ3v+q ze0URh-_A~0D>PoqrK7!^oLAP+MBI*C{y$Mj!X_Z~q6dIpQsgIm>$j7?ew?w9&kBq8 zES3YBbc@#Uy)=RqD8R#v7AkJ^paMZX1+X?l_d}ecyPGE%=``p@`F4ty1Izkg!k=g_ zT>K#)P;144*S_{4B021M5aszgfp39Kq?}w;aTq5#a(Li2&Fu}nwZbtGY!iM*Gf#(h z4(PWuo;)SBwGbIcocm>jYMRcr?{*b+8d#`qu#^7e=BrW)L5BV zJ@(<*9e#ytRL4KzGqzjrw#t(FixH>=lEL02+E?kMe}IX&cW@Yz;3??&njLWTpyGEQ zXzf+MD-#2UX{x;57%Pqk{i}vRKLfB}+?m6kO07EspDT8Nm>2X?d%*UE0$5#vPp0oP zV4f^xzpww6+uN7}?OxQCh5!eQ1RdP{EYcc3z|<*=Sy1=9-UKg;dRHkad6lG48t~DQiZnH$!22BByQoj!*h^ezUntdyAdc1 zjd#Ua2`lZM(yGMiw@7}uxu5kW1eYLSZ^r7!PMLPaXukwl0ctt?I!_sV=No+0-pI@6 zRs|;WN18sVUoyXBi(2)I=Q%a;ak`%XRXW!1ZNCUL;8usC(#$UJ_!BUr|2YB;VdsD9 zy(;EypVok~`)dsqlZT`?V0g`v&CvKPOb=mDNW|;4LS^zBL>eFU0Nl)+ziT|$w+$`Rp#lpl9|(%}--5C~g9v>Pwfa%C0ASu(;9tM*E$s90#SM82 z?0}QrSX#W#lrx}d&-zk!;5=xfnDjIdR}{3#X>y-G;nW>#dhCE!%@72Vsh2yjrK`fJ zQt6wI0BKtshLi+S zvwve|I(Hb}O@V`bHQQgrPp@u9DhyPS&9oj0m511ej~wl;6*r$?Po1M+PP`%B5+)<< z{=OO~QpQS^R|6`^ZiN-_{8jrh^Zk=^gu`(@xgBmVCXXJO*zTtq2T&_zfQai|Woc-X z{rtX@?yfdC*}5NutnOav-~!(6&KdnD6o@_Uo~zuDS_IX2C)ZBhrwpe$0t45wYe7+q zPNFuxco!d+;l2y-g)ECkuI!k4 zvkDs0M0hCZ7txCTB6@`AXiCna-3!;6b{ff(k0hdf<`_p24RT^2pHgs4?PEebE>hv^ zc<^|-(TSnw6tv-U+k4Fd?uIF23SQLmmWdkpS9|t1DRExM&{%&z|NqN$fIb@ef3uPQ zyA=PXFBc-||0gQ}MVdgU^Upm0fkOYGfygQT;uZfeEkJ@@|9{h}|2ybE*a2W#E^0yl zPpWh=xc&dm*%%0XL`rahF?0S^ZvUIL^?gDCgv0>6_hX?izHCu9q|L;5I$y8dXBX6a z#nRmCH>(1A-uf?CnYQjJTww2h1G2V2h^?91eGTuZ5jTBPy*_vFQe!()OnXs#WHMfBHOFh0C$>jO{kmU-TU_01_r2jJ z9T}#gTy_05DRO&o5>*Y3m-$?Nrc14c-2MLJ9Nmd$?n&wGf#=Uy;-)^VboC4ORd{qJ zJRfSZdNy)T3@5TdZ|DH~@G{>^GB^CO>;RCN{QIb0C|zL=?-l!;KC}CIY}8W?Qd~I( zzS7^3IQeIP_Rkx?+uyRUZ%9m}1CH!ZgpeO=svC870nPJhg3m(3h%KJ86#_nN9ARe? zDR@n2BkkhBGma>VCvo(RCC1lN*&|}%S|e;Xwa|U5H#%g7y@;j{X;bxxVxdcb+yLn8 zZfs`vBNAD{DlHu{V??^~*0QgUIT{JBv6G~xG*pL4K_e!V&R-qcF_bu7Qo4k~w*_YL zQelGHi%@9$t%v_{gMrc}gD<=>o~-2U#c6ehMLnNyP~Xyibmlqt-LgOZE&RsPe?ZGw zKRJU{6$@R01mD;#Dd^be5Bz8Ds=bHrt6hG_1oD=PiuLe`O|PuPjy$oXqHM5n+{Atj z4tMKZ6SwO{InS-xZnZHEE`VBr8kdzovx{XmU6tp*C>d6|!v1}-0h*+tzGQbhT+6a| zKw^t?*FJP+HL2(Q*D21vNxno-n zH#%TsQn*=0U3U#6cd<}OyN4ncC3DOzIJ0`wq;4040cTTm2fE%i{vv4izWE%h11rP@ z5uDXw2F14LuBsNC``s`|`OXMu=zZ?3b8UzN&|&~ZT{qNEQZHQD>Kxu*;qKBA%|CyC z3B>FU=2pn6EfZby(OypzQ?gfZz?YW%o;O|Y&fMgX46cnp;P+XIGi%ar9TlKkQlvW~ zBQG4dY%KC2Ay@Q^Q9VRUCSH$+1zShztfBT_i{P?Dj4Nlf>Oi4U|I05O86c!!yL=O0 zic7)+WMp2Rr2xu7^flOj&S}b(65yjIDB<+>{GO`SZ%%uUR2)(6cTSKX?$Phicdf&S zOsQACDE?Wc1>yFxyWLTA+OKE?)f~g-jhAGZJ$nd&LbFeNWTDGLkf6q1v)D~*etdHR zh25)lYCptK8TZk%+iTA=`rBw~Zqf*Ns5S#IhJnS@yLtElTNK>lsd=-lJ#J8e9lbnm zm~9A-%59H(uU`}=2$UA+FeyjCw@jF=Bdbk`AVZ%X;!-2ppZEUy+CE53cRw^pm?G+I zCjy?t66{Xyd&B_udY(Bbx`Knw2 z((Uq%urnXxlOyEaMLY&-r1opB#m6w%t5OY&EI!-{V!s=K!gFP#o_~$Y9=381c$id) zML-=A$PNH0ufSx0S5yE+KM;*0fWce|Ne#;nmOIHk$^Am8yz(TS8n+tC2CKhmcpJMN zqIMd1cwbIDvU9e_Q_2O@2)$4qshh7~RCc_Rw!0Mla7H?S0QI9ITv)c;N~(-pn&!j9 zCuENobg{vBYjrF-3WoxyI}70R(k%r5 z>i=^;nRuHe4rMjKr&FfCdX6kJR@oqI8w3d5{as`v&Y(IwQ%e1+eP+Rqzgo_1=s0c1 zi_k4^B(`Gxj`cktPwWH2Td+*73qSE6Gaso$0H+v;8}i!5ap^rCE`MZ>1j@DJK!X=# zEGB2?TQt~#K-4Jk#P@bqCxAe4`TpjD7D!siUbGE?gN6&0KtBNN($DSScUl^Xwc;-@ zQr?(1T#zXt580JCruWa<;`GSr1I#qmd`ujY390t8!P{tIHG2e)`6Zs}s z^Nuo1o+2dji)vb@rp4pz>(oY}Br3P%o{d1Z7_;`h?Qcvn8vXmY&&tZCxl|0tDy zONiz*&U#4Ycjd?ciPW^F0gNF;!TC+2KL%+h?w6u0eW zCHeBzK)PJ`)c^f^`3#v4dwjR#9%wq8W$p-aWMIyi5S3}HAm2Zpq%2l zQVoduFF);9xR>uNkLcAsUk7c?HUTz#V+A|y=YabieO)z4=zhy}$OLHm1VR;qJt89i z{}=`Ph?r*SbZQ5^7ZZdR=+2dWnj%LnmkI?!QhLaP?UA-1h~`p>z4p?bWmG2 z0L?X^5H#f#a1d~0fY=>`_ui3o;&uatAf&;zyYs^(Oux_yLJ)GN2_QcPWEf_Oj0rDX zWq6+rbe-$~U9iU#sEq(n%M&3TBKaT5ik22ojme3Hqd(mSP2vEaXHxh4J~#Pgg8xVb zV3}Unh|;11INJmH2Dkwja7y03c{77KbLq0g|9tkXG{`TQ)af_)Nq{I5h<((%EW9z4 zR($>+DHv?5ph{2=C~egLk(@m$qPzD5&?IxD{+-x3;-`s0=o44V8C&50^HbRBkN?U3 z{6C3_uvnu8z09buhl0mx=FyfN@UHck>rnnBSj( zIN0FOoCG>vaYR9uIEJvsvnHfxT`OrNi~7ml(EV$PN`fG(hDX~d#U5!-nj)Y>=Wa9o zu;NwJ=E)ihEka67l8DqlOrJ3p*Ie)xD#O(%hLb;ap0xqh+)Zt;QL#=Ea;z~)6qQ2 z3h^J|ckX=(?W#aoQ1J{H{dUb#m8<$OtxyB5JVe)>VSw({KAH44WM!852Sf$6<-G*F z;y@1kWo#3<3?KV~XI4ZKTF|cd9DUJ(52G*0olmlY8Nr>erQC}by`cprG*sl|l#~>^ zGsHFVKX;tA(KKqfnSLRq27L7 zURr)9Re38#@2Gjri!0AmJ?PS9o1X_oX7CAuCYa;9#(bCfRo4C@*7BYY{lbmV@8;5w z*UBaC_FN1p2)pi2KAWsNl;lO&`Wrgoc>tNMm)GSvQRB{Dx?cTmaCv-&J`131J(eEocY%MpWL{)glV-_WmGtW2eJtLigGaJ)J29ktjfn*4f*6fsUGPGokE>nz;EqdV=C7M%T||SUzcq>@(EQRhCE<>l}M>CTeWWdeFtum4_}Zh#Ha^Z@LBDGve$Ffh{R6Rqns{ z!@g+SYy#r;cT?SY(#^cDU#M@j?;<#_b+?Q%gRZckB)xwJZf~!^u&#kh&hxn?`pXt+ z8%Fevm!iZSb)QPkc5MWwk=W9Rp^<8SGxRih-@BSPu&RXW{^EK0{n1t$@7l;=b<}29 zg-(EJavmcnO*6P=%XRa7slre#&{=9Bs~W01gflf3eV0dB;dh_gAnuUyLALAaU*Kyn z;J`J%az5$dPn;uGJaSmf{D4MNQ3%2-IsVZ(6?%{$MQW#(B!tr$H$leA9y>w7I={YO z8^nC}WK-YXnK56e$a-w6MpHkAgIx7If5!f=Rlu4 z@&>@PKFz#LJ0kll*gtD&$nb%(;^Qju3of91nf-)SkhB6>-GLc_%LMGsf2znsL{CnG z+Jq?d9AoI2)9bb$OQHW)y8d6+HdZlV^j|!Ne_i#*5X}+i2n%2LcO#6CIp6;Y8%F8N zCc1I|Hvos_1^&Io|CzCKuyj|}}q^r_Y*{hkOY-Q$4p-f2qTEOP0F4k<@Q zDwJEz0D;L4w5rv>^oP5VxPV8dNcrekP!Ef-(~({4L<|&}ri|c_I6DD9Tz?dVb--~f z2BHfZSxJ|s&I7C0YFbg6_%1KC((u#6&p*F%w&NvTegD*t-b(XKx0GLl(RHg*-GFt4 z26$L_xK4??vzK&{%*ZI26hL>6L^+sQ z0R@^7cSy{#9+X}Iy~${KezWB)TG00(unkD=rV|{n%gT0u>sk%W*dN^olv0-BaXHs% z?q&_vJys;q*5`@GU2giU7wyV2JY0F;uXASWsma;43)3~B&MjMkCD zsaV@`yqy5KJj&1Eu&hfW;3EM1YwyMLMY*2wj?)2&XX6Gh;Te+$FyI(Jtw_E!MKJmd z3i{;?)jdK>oxgeK>x~(j^E{e7Hu!rcRG>cq9k>FK!-sw*{A|~!5}<9klE+bfW&4BEsexKCJ?vgbdKVQA8UZKFByT(KN?;4s@oUrh^Hg<$0>AT94bTw?IMCPp zH!IxRxss%`^?gYaA-v*K4YrAN`GRszpdC(r!P?cC2KOW*Sq(}~e;*H_0)?tolbL-D=tiFruesa0@?3!-Iexqe&$p$Z z24j=Qsggh_`sfcg$c?}Z$pw3)l$YVka~$*LkHcqOIt-yBQ%wN`ByW5`omW;IUA}xt zcq-H=VBP7gFCV)n2%k3%>zS5Bq4&ORLzmVAM(UCilh_pF|B%rn;GU)6N~GUzX99ex z3vU}j!KA#cq9acPwG35*SWmA{L}n@k6a=(OK}gM(L8M-&lSj!+7uN03B{<9sYbCj0 z((o&Y3fz`g79Jy+FLfZYBsVV|i%(8@K@U3vY+4YBJi{yy*q<*}6+9F|_hX&iq*I=d zzc~-+^fF&d-DMQ-2A(>vfdAz8Alb2aZ9jzPyQAC6H={>h27OrNFW4H{(KoQACI4Kx zpk4xYxDm6=#iD<**`tURKw{w-e7$rZ?myKh)%4d)zh1e)^6spunlpSvC~ss z2De_EoPn(K9(eh0=-!|-jtgRrZ+I(YAMusri%|96Y9f94idkFNj=7ls?)a3E&q6BoSK^Vs5zWyG3Qt$CkumP&Es+cFP zkvdIypwEK(vMPg`Hf>ZC#F4W$6@TnO4B#*#Ds3&eKvvq|q@GAIUkNYKk#B`Pof2IF zG&M1hp%QiNrjVi!l|<(^sW(Wd zs9M^&6A7QVaNJscz)ZL4(hZp?G!_k!*i$WtQ%27CSxWr##4!ZY71LDngh#Jq+5t%n zd9k(Hl5&8GV>DfPv^{D4epNjw4``O269*MjsaWz>=YVRWBRYN#;KbR`DPjjY>mo`; z|FqMkd%FU`boA0Qt|<8I>9`6%J^)HyeL?h;iB#2%KoTOP!T%M(`QnF_+ny!d{siaG z8ZX3=0PW;-t`g!4Hv&3Q0d(i3Gs9EkD;F;1dxL3y6A`Q~JL}1Pa1lBz_Erk3rgZ%m zV(I4Y55v65P~LKs+-VI~3F{ctM8U&s5ao$3Hv0uwW={6iPk0dA&|Z5d!F?-dpo9Ro@ueL$}ikH?fx_O`y6+LMRnZJf#&ojL(%kiW9) z%Z(X%2u$yM(}$aYE=3hq>$b|H_3;8FfL#|Iq|Z6w12e&V?ypycok!rsfSTful%U@A z`TC9g>+i9B1)kH2KD!x6hh;v<%l47U{G|C0PZLz_Su!LBr#27t`R_~tJqEzPzgQjI zVd3=G?*9kUfi=_?K%qlX3-Cw4&-Z}3PWHckFg8_#{RGqDmHc)QtQkLh;&BHZrKzz` zlS>TF!S$}K8&TkO81*92HoNsTCi9h=4Ixy}FVbAMk zcEY-{O)g*k?->Afi=iPFlBPYJy!W{j&Hnp^!QN2Jc+iN>*F||Yie-5y{qqTIL~wj6 zm^FPGo^BZ`%wpGG3Zn)7=nOdJAhK0oZGe>!JVZqUQpAw7jL_pOMV zdK0`}Tk}DAiEVj4GCy%oe;JZ=zv>6Zn*%Cw^K@)ZHI8(95`XBE6&`^~a`A38drMaWNlX#!2(Bysf^zQff9Kz2gOARN_0+Crjd=CXUe(tQIVrL~K zXa*o3<=; zprtMte*yvM8!+}{OOW#A1x-PRwF2|oI@r+eoSV0k+6bDh&D}|Y|1_cm^w!3-0mJM% zF+&Jx=xT*sqgYV6`#E-g3clRf>!}-nMNtF5F2%)E>3~wpZ#R5M8>3Zr0mWE4VwbB< zOGG7_JP^T7~6WaVw`UAiQsT%M(8wXZFw_l(9T><_nR&fbYh*lb}8nkqQbGo zjZ#DfAgz$3g&PmjR_o{9oGh3X(h?c-vr0_RpuJ%wTT zBV4L5{3X3Zh8OcKA9CK+{K}Q42@rC7_WTl#fU36MSuLsiz9g9>UW6svJR|83)o|N| ztV>qpn0AY)I}v>Z-}I*m>B%GcbfOn- zLV^P(F5RKwZj=8>AT(Ct&I^fb-1B+Z^}y)A+B{8|S3qP@;}L?Djc6J;iv4xUY2 zGSw?kT6K=EoH?7KItQ2MwK$xW*}tl-pR;`OC>%6zt^64?sO<55;Pww-6ap{UAVGF# zxi^R0;c%)otoRwf@21(6Q*#ZKQq<-s{^49JM!ibc5Pr8(XD%ZJ&kvZ!XgE?7dX);x zeDz$HugY_@Tvrqms+nBO7S$2`D#c!4B&sOZV^|bB7Kjb*^dUg%c@Ov%FI$=i(pG;; zf-EekefvPq$!i6I`>5=18Orp%PIK6aBf@% zVL573WT{q&sDR^X zoLpxLVP<|w+lU?!_z?#O^k~?F{9&vsbbQ4LjR~rUbYgAcwUr_>;3SuCxebH9qEK|* zEp_u2wl;B72xi|hKw;ml90i)D9UFs|I3M0+#M~J@gAys5{KkfavgJ3;(mMP^z0sSpiak)3R|~A{(jvz zS2FlKu_huJ?+XbeWgC_Yl3KT%j#GihiVj3?O$)PmOYV2-6;U1xJJWc#s>Zi1n+Fm@ z$N%&~f$~hoAAlYLaegUMy1DQ10RVu0I7M-UR-W8t_ptlwlW3p`w`sj0v}}&+@yo(2 zc<^T(Aj(2Na@DqJP%Xwa2TVuzvpJT{853_4@Isd7 z65|em$n2bplbj|KSS*zvfHE(6g?X-P%T>tk@!x>l*=WtE>BbnZdg( zA4VSqR5s9&##BLOyn%?v!0GA(S3K$#M|G2P9J*6ftD(vIM~P{-Gja}yoQIn7m170i z*+1X--xn-}&7veah`&mr_m+pa=1~KD!(Qk3vf-V^g0_huC}veVVtSq|@WZFS=1fc> zq`+akA`ZK~lELQlEfwJI)wKjtQ&@>=l3}st7N(=j;)$Jlhjz_@BF~67nmV{$4g)aN zWuVsqBy#^Z$M`PJMp5L$u|E{}ZI#j!YzALCq$pRY>FJSPuXa2{&vI=^99-lqDw`E* z%nHw7J9&^$JNE_v(=f@s4Pjr$Hd_c!9YUZiTqN$*ukAkbJ|Y;PMt{9iT2|T(uWPgP zaf1+y=bZn+oa8}nY^dbF9_E8O{1pptEZI&qq=I67N04F%tCu%BSOzGkOO8Qb9+n0e zo;#nc0&wL3F{VG_SI8BgU=JffDGH?B=bl{^amfaK1I<UbOp%Y{4mtV1 zmcjmqsTzDj(g5N(d?!$!XK*@0?mQ4!&!n(*?N8k0{A7}ekrpKz1$eq*BORXotmm`! zkctM`hq4Fik8D%=-$(s<&7{y_`@E&>Ee7SQ5S~A>(a2zVah{a_+8^U#53lcb01D=l zWspsC0J{A1y>lHbf;aby0)0ooC97UhOJrN0 z(M{n^q*~ekiBwna8(Vare*KT@;635hr6aj%SV4cgs10l;U6I%IS+b^Cx1n^fXL)}xYcp~BgF+YH{G9D}u=}mCP^_5_0mxbJaJn^Va{hyW@&e;rAbjCF-6AzC{GR2#6<Ue+Hd>85pRN^YYD{|$* zIRSvlbD(FR50gC~+KYG?abqbq_iTxgu`htY3|Ki!C-!UL zf|AW&#U0(0#*qDgMwq(u`1*~q@58UbR3_q|320C6<88;ePkasUWO2za+gM*e!0+%` zHkX7&Ta;+XtB^k0Fo|;{(#CYeJH+c>mm;2W(6>lB=MD|N@@@#d#^u+OhJ;48^ zwH2g$O{B!_rg@|Jx1X`*5!F{ZEZ@xcy46hI&T^Wl8y|HvinP8+Y{~aCDh-c&9k2wd zGVcx}cKKw7`e_xqI^-bQ#{&Y|Ld?u#*R3#fe3BE=Vu;i^)0n-KuAT0G&UR7}e4q6S zg!d_j8c}t$r+j(;G?_I=t0o8j(`NDL-Upi(;=&EWo2JUo zVvQo(9PhF4$%?=ZsEe`NvRmr*-tBAGOz#quD*U@LXZKikmj+n7x0M8kmT(iylZ;(S{G^U^H+w_)PI&a?AxulLHBAM+zj zfwTDjEw;sR#UjAz!kQl<@oc=O+BTBA9R_^)&l2@>+sn!2VS%q6J7JbJH%-W*jW8xymhdBm>(NT3Q)bY)ovFg%}dvK6a1&F zm?ajFNz{6`eyZ5pB6s92DSgqj=#;NkLb(0VyVtS2s#v>P*HHU$To>4@i3Yw@IL7fs z8^UOH6B8kbXZgtc5~+fsy_zSL-w-b)ff&N z+4AER7gwz!jNYO(bw7mWXwsn3Bbstg3gEI000TVd0el5jd!VDb>7r485j64Qo8kuC zfTOks$FN_y`x~KP*A(>!9uJ7gMQ8*q0x46Rt- zRltRPdTzBb8NM@DSyF%yaH6kDkH!eu%F9m1X&*oaIL)+v#m8>Ax4& zsjm#>@B`sc)$HiOZ9fq6@z*Ms1QI;THP7Ina%1+r%+sguc8S>`{1VY*Y*bX35`+<_ zwCNH#B=EtxIFG}!!wDbcN`?8k`IaRk6 zlFk?FRo~uu08=zN$NSRwsUz#;`;V2KbY0Luux)colnbdLRn`moRjuP$mv z+0(}=BsRbTRb-;Oo+jw}a7?SKjIEq=f?F=6*Q;C+-Z>=zPo#RUq%rSq%P7$Ju$f@q zRj7NVd;QX;#&Xo|nLWB9P*f&$b_$?u4?kW?hWVuOha`Kadl|%vAC3}_!diR|(q2Zs zZG5J%u$p$cZNyxpzW}m zwBr1$^ne_Ia2H#@@!ykNkcmj^f&Q%Pt!pPvQM>a2Aqn+HaX5=P9-g?Q#rJW;C7U-_ZPOE|`i}Nb zYXrQ@aAQqa5E={}IVeX}Ik^AaXVeJ^BF9doB3!lh*lF6Z6n`Q(umFD``zK186sv(HO^ER#^`Pk>PQY6Y?$hz zr4wW|YiqyuIOVuA z2uOk)#hkRAiOX42SoS9QAQk0D--)L-N^p8-$5yiV+h5#KdScXwStOZ=U@!Vf^2fi6 z(S$^FEsyL9hVh6vf2*c9-fdo?p}&yBbYB;&x$LI^r|GYgc(?9s?Hit-99@$Zh|n=$)xQMQr3(g3L%s(loy|;yU{S>>rWF?Ml*OR#lbF_ zXcX7yPWAnM!p8!>&FAKKpREkb2RZ* z8*JzodFSJDXv23f0xC|DOMbELikT5RCjeL6eDRgA4`4&l83{$@Yy851}f(lrIr%r6nBQ%l^3F z68-w}Wr}!7m;|lq9g~8Q?D#tyuR{@DI=)040;_p4#YIJ=ualFLwWU9nT;U?7$1k|D zB&Iv0=Idx&stLtjYf%YQ??0y$vPZnf7(DFqo5Xb}_TG^j;JB`NB$%If7GO16h$Md@Z8^AG{aIggNd#M0Gb&ZJAw^+gyF* zg0xZx?kRVWRNrjQ7{0p_IgXcsMr{5~VCH^^le(^z5tC8$35uxT+m zB9oM~aYTeP$00qTQCVBw&V;f^z+oKSR#>xLQ?V-i=-X38)FVklk`JYgc3T`^{C5iw zT2nFlXBCMI+I9O4SO^(N1c0is=m=NjDx;U~e83qAJK0%^(F5X&BePA>G^@h!4kZM%P`yWxcdUXbdHhuutoY|3q0-d!iuiuX>* zPb9mk0NVM>mB`qH{R%MAR?dB1YjVO^yJqNwL}=L^7GN6CMs6wJi6F(W23HBZgLDDo zuocnU6xrbmKZ=unTivF}B67paNn9{`HA${92k^Y=kg2yp$(_j^>*iGYc)}deS&355ZgWzGDW=lBfV_BM{fgI?nmRK( z2$T$g8lwjYeX+!V;Z)mrYaJX*zH^#ysN}mzT3$!0Gv!|@S{vfZBaF9A&&p!UL3B?$ zppxB;59}5`l%4Dp3g1G!Uy?hfgBrHq7qj1{bjlWdh2W*dDMMwoi9j|wTgU6 z#rhPz?=Ep?joxyiwv-=#z8s#JHT7A|1qpqnZ=%e;U-0F%TMV5hYP;5OnlVNb*>gjH zH*A1f48kkNd~(H%-&6V8=qxV!)TNF&evi-5)5G8Mib?YKP|Q^9Eh7YQ>^|+qWO~Mu z1P!g}do@MwT=sy5iDmFv3p9d6YP{@c(w^IEK&#`y^T`*_NjjaFj-Dj! zXcF=qJL%k62tc~5v+LvBd-Cd@)TJ;WPx4;OosV)3p4f)R$jWbbVSi07A!SSIRp_7g z;Gvl$1yYGFoI^>a!);gTje;+ETwWKee^34#j;m~!<@}(7Bo*K628tN!v-R#s>zjNM zlp5bM<4in$OO-VK%BK3iU1)+5HG#-P7Hv`g-}z-fzAS-CJcfC)6wvpTHum!MACAnD zeh0NFS9l71MC03ip28a|>Q|^H*@_U@D4gv2$iL#53ug9OL4qlu z2V#5nQ7+1nxl#uA*4;E&T60IHTO9dMq7c&hi>Sv~*HCf19QFGDWyolnM4CedJ)` z!g_Ka-yT?DkATT;(XpGXHwdGYmI!tGauID42GkxH$(@X>Gb9(q1B160o|2}qc`44K zNUvY$BqPVvxhLKxy`q-#Yl7bM&+R&y_jmj&KD7fVDr(~p>8k{rDJH0Us+05|!340n z(C^=9RHi2yee9W7;~JNe--y7B;N+Vq3iP#u1^4aRlD%*4F8>-!nX68Pe}_-J$`RPP=mbPMqzb&#@`GT+X$(-9P_?k3 zvDUpV`s`2f!bXx*N=Dc##C5kgitWH4v9IfcefvHA^dvAduPf~Rj_zUyHie}0?5ZJx zDysQAbpj|z>UI~YD)UnTQudCtFItjjR!I^RV%|a;+m!tDyVSp7tDX}UV#jgJ-@v0V zf$+&2h2g-7R+%PbJcRlk)r?N3@`nK(*1TRa3BCsMM+D|LEI$Y4Q zu|!Nhw|3;N*!`JO{@DULrH#~>L2q&18X^fM`%#XQvvRSrqT?dNP|cT6_tlTnI_~3kxQa#qcrNwQc-Li2k;eX@a2HkWko1)wZLXg&*=a z;2n<9dfT+22bp0j{w?|~MZ7+HgWrwR9Y+FJ|JkhkH+8edEyd}t1vv;u zxR?tcK8)9BmaVt~84I4OAdmUSC8R!ynX%ja+wP3b$6`}j^y($f|`Z_dInyOQ3^yft$Vo()+sDusMK3F!8vtMBD)!t&(T7O4+CwpZ9x zBx4IY)yh9%i2gbp5_1C=Qz+!iRfs1JMjMd;v(P*=?ELPK>VKd z6C37JcQbx{x5WLyrfqjK^sWO7WO&-mVt8IE1Yya_E^llNj78+R-YnQ~?R z4(+l9a(`zv1t{|g0r9}jW)a4_^2ZSalpqDO`z6zpAo=koSi79Yibaghfi)k^T+5y~ z<01T;LWtW0`Iana=7}REU^eb$-0S7$wgXC3KYg+vC{%jO`Y6>n{KatuHGJVP?Wbqn z#VD$lV7<6xDuV+Kk)&U+tyeXw;lhwdn-x{^2mfrZ0zSxUpO|kdAkT9$QS5*+>E^l1 zcNsQ8?(!XTY`Kt)lLg*WxOjcWCQl}T1c|F_Pk-Sm#j?W5tTzu^&d@*{ zJ)P;+o{m?N1%1Q%H6@4bdk1)wVqOdmt{-45QGeaO0wKp_jgPoLf^XleZI&jZ$mq{X z*pW&3JSTS@H$J4yI{(c_M-|{V$%^Kn3XG~itqrQ3DKZHt|D6j}n*#Qse4|K@j+!?C z|JyD1m{9(r+)2O955A;1571%HQ-pa>u@U(;1oRJWLgJJt{xq$vO-pYbeJEi_#%4X? zmTiaX(X{B#sn^E`c@EZUNGrI)d2om8PtJMLuVOb1 zJcmBG9xnP~47W&`4hW&-e|O|dBw{k0UXo6PMnZcxL+S0jXLgS_N3OdFYqzi?9a)rR zW=SN;LxXZ+VA0c{z3AV+e}jgw46~B}5VQkuV0hTYN#21NAHU&tm=C=4v2AWwP`%a1 z`ps2hNk4IoRpHx?H6J$Z{YVR6y7}Jyd#1i3)OVN2AHrO}n5v^c6Kd(!CIlZM_^(DK z9LP9z>G;r& zj%&ACqko0Xu&r6c2N0-DR%O+TCSo+=;wCiT{IPer*ZzC0BH~I0U@q{qU87Yo3QS0t`llrg^k84` z)FE%w;b`l`B;tkJm?!(84bLGX6>LB}c#8gkEBj$`Oz>3q4rR*x)T(iHq|+eT<3@q7 zb+?#gZrNsu3q7WX8{BSmFEJBmeX{lyxGwfdx*CW3o17Z@Q~Y@`4?!QA_%_b*ZAHn8 zs%j-M?GYSE{rJJ?T6giYya9T(-k?($P8Eqb#;iq@;cd-8!y$l5aM8y5k%we+=efB>VUl2d1YcJjB?yFPm{nUq5g#Y(HRosRr z(hI8fzhTWlyts7#6tafivp{l0u!H{Vnn;-t_S1_4^3nbO`rjW*P-^^)0=$7Cmru{1 zYoZ|U4F$*kC&vNhrF7fVH|c{qn8>{v&!0R#kc@IYig1N6Y(SGYQs+)4>``_EJY$C= z|96foknjEf{gswfEON{5gWd%or7AG3n8O3mh_IvbB4OA$k62#Q9~D+j!%=a?PbR+< zo1Z2iRw*{EN!X4eXJW>etONq2lPdTZbV#7r(<9OymaO{3*!SH(IF$C*<-afZRDv>4 zp_7>VxLgdaO@Uxyxz#-+GhPyCY{3_$j^4`4Dc`VS5rlXmpo_ddU1(EN6Ofp^F5Chr zC(SR-rJS?gKN|*(#{t-zYB5+|9CZsMPo_LXU-}FuZ|>83^SHzzylbT^QwHDa%U#NQ z_lu6-7`_TBG&RrSuMf&U;o+=!e0J>XG${EuzPvDdcYYb^Tks{zAE2&#AMRqKt}B9$ zKBM7R__LoO8E5P1HRW-$_pDtRj^eNe28JK0-+`R#o%7Y-OXZh@6-VkgM|-pMkb_bo zJyU@j?0eN953}+tT1Kx#vZq-_k|APcS#68_5X7s+=jba~IG{4W`o!3>r9^hNwt6Oi zS1DQIlBrl#HRihY2ZOzn|N`Kbh;?wZK^|C&Q-jd*v1pjP|BYJ!BPK;b}N<$y4T==YuAREl%N zt)Kg=pd&&3s1h9R^k9l5?Rxz{G59NTEsviz;(`-i&eAf%BVE0ZEi)V>{Z=`X{6&cF zdevUb8CTc!Z2&nsuRiTTOt+<7<&1nuj2|3?wWAa=$#4QGW4Oxxl7R z%q%Y7)!!?+)SnLH^u^4v%qaNY%>IqzA-f|#4v*Y}PaA4&Q}a_k=~-A@>@e|IRO%DV z!#&Op;7fZGfLK)yfzUWc@ z)d2rqo63|YXY|riMvbkEs!B6HshwR3=5%z6IFMu6J>#n!)urg!{Xjr{-A?nWplG)K zqOo~yLK9hET|xQ-#Ms+{GeY|Ox#cEQ0$t;VwLU$1v-W0jgN4{LWMANp6q+cVgE6le zPV@5jHCdSh$>NBS*jz_l!eh@W?2N~&fYCrcikBocBo|+dfjrV$g3fY#H42f~Eb}mR zp}U$XGGl(zJZ`vGB+1`?^4*0kt0k4nl$^1U>-v=`^XkgvR0H1($5#yZh*CyE_fYhXRJC8pSWmbE#-tXsbA3#-ULUz4!pA{jh zJM5&4Hgo=DV&P3<0M97B(4rdrMth~Mw0B%E2ZcBp>}O&H_B+ZZ=LE=`XcM#ARZ1`7 zaHRBkZM{&{x|z@~H^I(AHaX~Jutik-X!OzyAx@%(Tfu|Ml;>NuWa|!+2Zw?k1WW!! z@%tn{*uiM!(Lq&&AjF9((HPqc(lu# z$epv3)%o~mS>&unZg1pd;sUQpziel|;y-)r|Lg0!1L11I?v>Shjj(!x=)$T&w1^f& zuTi60A<=v9q$p8C2!iNV^iGHto#?R=osHhUyYkAL_sjSD`E%X9cXnp(%z2)3&OCDS z&u9c*M=X)4-XVEEK9CybXij~LrbtYWM1j_0wII*aC_^hV_>(Ckqn2p{8}glyy)&)4 zF5`&pKVTx24yvl3DG+$E%G#5WaH@YTXWyxCSQY|xqn}kk-PEqJVr=zNbB2k4pvLt? zXu?KDM$vCM5kJr~6^bYxHatUU=$_H}(Lmz$ZpAXWhYW}nY~lFKx?T<)keOyRuFRe*8rJKx-lvgxMq(SPO@M++g9eXP(Vv9phf3>z9f?cFFtrANT> zS_g5S_1+3i@A6%tQ}me1<*=j8IG7q2;P!7Dj&q^_c-(!<7{vqzf;>?kuxrNBscK9g zB66PRUO{-tToA)jqYnJ7FgssBtngF^Q=~2t*W@_wpRLC)II3lU%tZ>gy^F~YynK7V z)gNg9rJbQGjd+fJ+sVCKDYJEsOZ^Q(j$ZW_J1L=wKsQngQDEK7O;bnc2FpGuFfOTH zxRLn$F$ZymY~|agX1WLgeYqIREQ!GFQ8Be zW~07>2tYc$BnNIvU&b*Py;HfpNcv%UEAcB3 zfleQQ@e4U#k`AIACmhr4Y;HQ{Pe0id;!~?tgNciagFA$1*A+%_DEci-j-04INeqXU zr`i?MX$SV-tvWg8x-Kp&PHAo(77q~*~ zk*PwNmUH|AeqL=4G!Rj+^$&7AI$gbn=!(}C-7NwP_*vexIdi-~^x1m-1(YDp_8IG6 zFYxVWc=kh<%Z8{RaV+JcNBPxeD>hebf(rg~`(7G5_L6H{AaVmlcn$^hN0pr9#sFY7cel zf6>;vkG8WHdp$pY4D-~j-xl>I3_i>2X>M)?qR-Aa`Z(+h28@z@Vyj0Ay^@6e0G5mz zN7i;3lm3NEzsN$GY=&8|0~y-F3SA|b3BQ1!gg?V}J4uo;H{Ep$JAdMI*VdTDE*Oa} zuf9YYloDo4#CVXothtR6Tj25dHdYB`UOANil2V(2_x0*`2anVfGbyHs4TsjIIq9a8 z`W^rL;p5{&nPUs%^NQ_aw(3iPU_abRdT+<`kHAE1=+}454k?OMykS{x);~Xd;C{K4 zFmlHVuG?~Ua&WM}-)cjIynHGd-iJ3-{u}AG^FwFbYa_hAKLd0bFH*YnC2NU<6X)oV zIqfRGnEzh;7B13A_+}_;#3v^*1ty0kd25L9*M)$TCfo8C_5j8&!t|XAZfqZH&Hixe zQW)_^)=-*UMbmZ`KH=|5ZAnjiNmpEj8$3Dpj!8DVgcSl&kH)f?iu& zdng3X=KI~JSJ49!`2gqMMeh@F9zs3#bv&5SHY9+`z0$A{`a6SVo12@MewyJIh=0iX ze`j|UWr`5d0^Y|oB|f1S6C-z4!@}iDQnKNwu5vR??fJ}qHJ4fvdM?MWbK3ur!`St) zBSiIC@|SSn(VQ`o0~vjOHE)tbWkc0AH#Rz0DL{oTpcW7xTbYMBP zC(=0FWMs^RPvY8Dyz?QcH}jel_`>4gg4Q|1lW% z`ct=NN)Z!VUuABo);+Z>ZbPU=lBaL0ETIYO<`C8NL@CYpQYWxyQPEOS@iySqGdz~! z30GiWTsvUsuGvaBVvAK&(|TbL`PPUtJA7~Og%8248VbW@F;#Dl>MYtCBCxtr`fw`ya=4rXgMcFwzc@wu!5 z*nT>XjpW1y$%?-ltxu}PRAhW9k!I0=AzM6$^%Xge4tn>L^OLUO@kLEjdve@jetWI0_yL50K!o%D!(9IXsXK-6V!<{c)Cs>Pb#2vZ5q~-I$$F29 zEdv{ku&s$p26j&iF<~Se?T~4nx6_uj!MAS3Y34_Wk8h&2N|%VI-EmN+=Y%d_M$s*Q zYcq00eH`VVkUgc&OKuv8-q5GHrGS%`J9h?56571lgGB=yRbJoj$c+A;HBzsvJzsyM zXiHaZIxg|uU7yRAE(ynPA1&?6D=8;sdWINtoDl-$`H)U-Rm?Gc;*N?qjdp#z5!fOg z+F0DXKGUyxueZdOc=+HIE0ci$j%9FG!!iDd?X3lpU|Q#;Rw?kPeo=eK)Z&U}T-IPc zGH#B;*&o}owYJ8QK)-rp2^3YJy@*t7jp+25%S)k6@rBaLsaun*i*xQ8H50;^ODT0z zY55K9H@)~Giy6MEn~+;}J(wKxVB+5_J9rST4vgWW{l^PLP%&oWt|?74)&i#PMOVDi zO1_K^YuZB^0$YB_mRMV1rjbqn1t0MOd!1%05H&1J_7PMH{}Yau9dY`>zMU7P%r&l< z=o(%3TP00v)=UgvQigxDO2SDC7RZWy5IE=!U#c1hq|Chzh~QdSI_VdznJs8E>qKbRgkLz8@xI06m`EjN-EQmwZ% z@md_PzS6kf69`o-K5+{p@jB6e!Wxxty{SwC{w6%{q0V$?%EP3st2p2Q4~(d9Jp_>l zLpz@P*Qio4f2X^9=9?U0n*o>`_=g01ZcJ>92Awm|=gu442G@OQ#d1h|O8+s;No-^-B?G(s+4 zdLU2tM0nwY!dcfze8D~08p`IZI4}G~r+5<~Mp;EtXKYTU@EN+qUY{=ryN!PNa)L5+ z!zhbHv_?kl)(qavy~A>Usr|0gasm%su4-6GNgSaNIN^;j6Q(woQSB>27w?1bCt;oC zM7b|M+>cZ&UZRj-`bs1b7)nKA_Eg|Zr}W&glPS(YY=l78z48P^9spgrO-dE^vUVz( zP_xxjjf+}(*mbT9o1cr2wGi%;i3)a%+Om2t4FG9CU?C!?>bmKNS-pAfq;+?qIil<3 zU=X6Z(Uwld!XP43-Hk@4OmX!+E@1wSoj98Oq@<1{#8%Habl3U5LZ09QQ#jR(q%m&W zI8dndX0qjEZ{ zzao0OXa`4FV}1$pi7QL3GBV;QI5Nwy8K!cIJt9kQpk9qQ@T~RPqi5UNi+G#7nN4rKD?I(AVzi zw=HpdZwDk$HNH?J;2XYQ)hnRcUlr`IgD>X*H%}PIgBI#Wa+On{U`5?a7}M}dJ zCq9ah?L!BD@s}4PdR9M0LYQe4uiC7KKKJ0}jYF4qt5`zU2nqd`m_``P?Ws9Jw^w62 zh|u6n8=dLvLnPZic39@YXvTnCQjGeR9Hk)9UZF`JQKZPC@KcJ~i#uMi`4B}Nf{6;v zSnkhg_X-AQSjd;7jx9=rRj~zn#t#k;2k8jl5-ZP0_$((-my7X9RoVA04JVsCA7QEu zkkicAbr^cN;gm3j3g%=A`$Q?#7mdv)nvxHHg6kA0JkRKmCqnd$6KHo>qw2QA0E3u3 zcI1SOFs|CzH7hb}l8xQ>W_HDlh8E?vbS7MQfS7MioIEE5_U4~h)r0ZZa%j|-n!mBS zT7H7N39gYu9ZpJwSMhwII&iz)?y*$~KD0FD7z5p!CSJ`;fz^cV0lq&B3>b7S*5a$M z)V|;kj*=0z{r*UIT#jYvx6f9=G#xCZ-P62<+0ZEWWi!3^m9dmV+>|N>ydT%ONtv#x zTu|6M=OmRPI{K?->f8|BSi*2AcGNsfAEoB(bRPWj?(cV#igZ8}j z58Sz#&1{5|3gE^1|Is|H>g; zWK7iz<1C#fDlxP?mzuV5mz{v``o!3iD3md#umpoTOzopB;`w=gQt*B-ZG~}`QSXg0 z+=AKKqgzG3jQ3RT@H@!jUJmyv!2M9s0)R3L8$xs@&uIOADEOU1TlrMd>SQF4WL1Au zPDl1uJ5pETF4~vY$l!8y zc%uHh@?W8&&FugA>A%XKKLTF?=x;sH<)<-LIt~}~fP_A3{15p?Fk1@z{ZR;xY1ZR6 zL0O?^ONMx@7o#$(sO5lm!gy>Kpc{VK6aEQQOF+5x&!9l*?!DueK#TQyQ?Ce@U(rlz zN?2#rgkjgxPXoL*`~?HXuTK z4STqSYF2)JlEz+MUXu6@m-7U%4{#`B(KGv3TYkAr z1rA9_Y5#wNwMu>m2A3?ios`i{fQ|6)->=dm*e;8VoLpg$R&cPZV36o1n0O215go}< z!5`(p=~=i~MRQ}fle6tfj)TQ7@A!wt1?XQOpb6&8;kQan4((T51zu!oY9^}J+N_qM z9ejv}r}xAMzXC@ReaNoIVP6Pu{PYx9-}1+l5Eem-==)=4iRE%s2h%`k25dKig2+?0 zGkx|af#dGm*C=!h`GkeT=Yfh5HQM2rNVCANK}bkBxGlsH&R9Sa;D1kA52Bj?<2gS4 zYv6c?5F(O+D$CLOWu2DVNabUcsKx>R9ZlYjaJl)nO2|$w9`rzr8@bJh&F;j$jTN;8 z&ey;8$a`m02Pg$WAY|zL>1G#Dmti4{{=9a)!Lc6edG^8_kdbF6US(oboD7r%Cgd++DS@mlB9Tmr;B3S0!!vNZ{#UfM!Xs~;g`CZvTc_vXna1K-kRSn#^~1N6_ZkZ2 zjY_d2-L2$7eU@`k<#oAwE9yqpA5x$w?oF${H`Cs=hw)<(mXJZ6ps{h-S|=-I5-U-mtG-LDfL40q5~s zgagO+Uw@sYjNz1+*f6CXse=U*oegMpMx!KAE(lP3BllI1^Sb2|$@^MtOh~d67+FNT zQUMuQ|DE*dlMsPsraRm7Rz$wvCQNYvH-&r67b^mm5&WVm2C$fvTEtN(5!2)i@^(tv z8)~^_B5R~Ce1td&iOc$=!~qFe)&Q5Z)&v*jj!_7;xp?C=X5_?qF!Thq(TDG0u1@n< zHfjH)CH0BFh24nTi=A>wIO0r5jnXTc0QFJva;k=_gX;>jBL!*HtC=t5CtXm^Qn)w4 ztUSr4)7&R=?01Cr!uf(H+GvF$RZ;k|>)5g^u$kHx>hiOL8JTw5@Bl$=#Kgru_c%sl z6M@&sFJ-+s!U|Z^fF*{u(!14d<)_cmFx~~?Q4)wTlh?d6n#2z~ihm66n(;vZy~aWn zos?J^Nec$8kccet!2y1t9q#St!*g>3{a6i9S_A}L(>j4{F4)w@*XmeVvsZlHOR1b> zzHY&d+P}?Kuk?$4TgtwVdHr!>;*4a0Vtm`%ep2F}BJRsO^LB-3J>?hQqaDH%RH__5 z(QQ&5j}d z-zAXhmE7$H$mRG?ObOdM27b>v<-|L?*Ujxh$;8b^D|$c!qz%R1@PCv}B#VXX?ld=*uF~Z#kZXYVFlCC;rnR9jgty zD$PU+GMTyRLFpR4T-!-3oVV!@&uPAgI=rFan*iZy>#qp~i_vS9DNTZ2Oaaps>rkLb z&cWSMsFYHwK9o9a`^j2#*EQ3#@9!ZN#SDG04E-D7hi4ITIg`~nk)`l}fB?lVC;Tg< z#_`nt{4@VR&MQBY4Vg4^m+?KjANI9dz8Z+CxV{sE@Hpl`VM-t3YmRkNXm#IDEQ*%p zB(?aVI8NKEP|Rj|wRoZXvt{0WC<$*_b!F@9r{ukFvW4iZ;V+Gu#=pPbyz;n}CpCh> z8xxhu7kxn&>(WxHmX-o@8>UA;S2@_aX0f?CTDUQDa40krc_`}hH3*xCG3ThFlm79I zXAF9cJlOOi?t7mom3tBba2gsKx`iaCIo8>XT=>4$QQDG4LkS#SPkFCz{6cg>+n`fM8(@gbK89=>N z)Wh%uOV*crVSaR=ivH4?CM7)rlxN2eE%_@ha#!$3IYXb(BD2asKBVh; za}er$`l1k5b1Z6oo~v*RAARmNTzc4L$XrehUBdK>e7dlvdoZu#26cC(47Nlp0ho9D zxlLn$GG6NdT?1Rk0PN0ck;n-CcPpve)xD2Do1^#1Yq@g2#av?t7ZQtDN73?nm(&xf z#nW=^6*!j^;nbHKIgTXg zAfi-HeMTj9{1xYGz~`*Yn_66V3C3a`hLO%;J&*3$!7X}xoM_HqHf`tCNo`qRf5G(< zfDPtXZdm;%=7^x6AfN)wX^&2$GXTi!^{bX^m3$W+HqvPZL{`%H`-PV%O5Ei(1}b-8 z#Qq!*Xh66{$!>LGRE-6?@crtw8l#^Y5OWd=$JzSpQbm^WIqxyTdHcrk3@KLHhU7A|&@zS@Nt zNcvn{N8Q%Yp>Rztb(Wp5o~+XCkgLR2%aSW9UNz9SxnO7|z#2nsj*$z=Po(Jp-?P`T zpi$hQ^k=Yrhd#(8gxWY7Eiu8kAqDCZ^s+Wsh2=iHP>+}L!|(F{!_*iQLB^Wld;9}{ zTe_9jB=Unz%9xOWo&@37uNF1Bp7aO|| zf>U+sMGnfR7wcMSX0mW9^FufNLg}CKN)p=#LwcHr4)74t` za+w2{#-ZlFUl%4uoaJE%%9qN8ANKo~Q_msh<_Xs@h8WW)CP?C`=keo(1 z*6(!Asn#qf8c4fZxa)CF#Q`vI@F^q?us+~yROrRr@56iEdQbH3EZO_t8a040Rt4gk z&~ZT0)yvZaUOZ=}Kf*R%aB=wjnJz_;eF_QYi9byMBy-FaeX9Mv{)jT9LNEJxQ)?t0 z`2T;|Zn?NVJQCKwxFpt=v z+fqEADT2Y0{AA$to_#AltOKnQKj|$f(*$fD`uR1TDO|@puJWv`CoYcqm-hW-e+QCN zG9Ibns^@X#Kh-olMZP`~yY ziPz_V@W@78i6nm?o)FSsNg|E?*Zuytgz-PAj9)(V-w6(YH<8AEeq}%SeU8C@Zh_p> z0CWCsT55IM{Ac~$(j$2A9mc=udSpPFHQ&{zi^|`1;v}6x{IBNmH?Nw7fLGX_Co92? zoAw(@E_U5gvo95u0AXrW=h^~@+^A=Wh>yNOB`^#tx&NNFw5cl~8me?WXx-vISJ&2T z-R?UA`t#KHw z-XK~5n!>Gj;hKLWjwQ%5eaB;{VJq|vkw+COpSZZ`$kO6NMC&hGCBS&wd`OvV@{@Ed zm+BzmbA=4pcE!<4)|Rw014$#m)BAY#Gz4aZdP{Y>!R==H+IXYeKmeFkQ1*0vvf_wZ z`jfRdB`S|7`EmRV@f-kDPQAPhWe=|%u=F&D9su^iitQdbU!r;WTcwc+(>Fh*RPzw% zis)P2@uF3}`3AFd;WKcI^ImD&{fedotDE_AEJr2|Iy&PqLbKL1Xz`D$B33~m675fx zvDZQ`nN1a!(fXNl_qV7u0~_meyayOdet7gGt&<| zoyN6bP!YOnN0YJ6Oh1C>1}7KUu2Mwlk!wnd%7`yZ7~IAl3%n$8fg?4#avOGpx%D>7 zjg1nro}QlU6c#DZPylE8&z3?bj^y+=TV&o?#AR_HcgR9N_;?KWg?dj(AdvG)w^h<) zVtXndzbzA-FJo&AB&gyEd!Xi)y^-1&U-y7gQn>zJ=~U5_@D7x z#@|lwD1O`&sg>X$09bDU;iEacFLN5UvzX}%+MOA>v))r@68YP46J0Mf79$L-}oKelggBd9F-@9p;`rt`I1F+(tvV?<|TzFbXGEp zH1PbSQ1ZNco+#62vtSwqCc5?Lgp9qRm4(H1VL}VVrp9>weG48InHsL8hl4b=Wu?gH zZ~BLTWJa6cUuJH10id`H+Vy$Pew$J(2ckNeeN*S=vOYR8 ziLU*9!aLF`bK2!3v7lE_^@C3!!OrcZ`N-x;mT;Nw#cY42pr*mQs)#O8l#XKZXVoIg za4np9YS|y3<;W|`ortAeiqZ)5z!M>#bozrw_F{2}=QyoXLzSCe-*2Pj3xhD2x!Bha zsNAn&djC78i6q~l;h0cw<5+Z1+bXC??g2Y?eTo&xt=697eD>PqSvtt&+49q#CSaAi z31kC++4zCn*r^Ls6ux{uA*EGgHdfa90Lk?Lhy(eHZ8Ij=DD)(LBD7230ppvR2<0{= zM+KlKp9=R|eyX=LU;)%TpE@mKz^OcF1s?wCgQQ8V`M(#z2&ulP@=HCCG++tvC)7dY z<8EaV_=ZpYRt8-`ld)BEgmiT5UzlXl10nShn=u%urVc`OZ6a+dQCKlCP|znySL$3RU>aJ8uSKbDtswcjN5(r@&Q$VlJymEh70gN5H{|c82^+Krtg=! z(in5Zt6c_8BG!LzYR#Mvm(r!XCoF3x=&CBLTejOWG#2SojBd|9k z+tRJ1hTM?Sk9h?2lE>s zJ11RA4TTmk#d8|>LIfX0{P8^VtKU!}EnE=-+8``1o=IQ z#W?YOD)xh@R6z%7FgV8#(I>=AUjlfSxA)#=RN0M7oF6W^eSH$fl^yX=DLbawi<-L` z;<6GX`REMMC3B<=2Rvr5SYL+kSsE3VV(6Os2k`j+3t z39GbO6BO_?%oTI2q`S@Hu%>NCsCuKVG?%pq*`# z^i9vS*yef+y*8>IovrF1&$b;X1ZV}m(_Zd6&LXmn{?~XPd{)MC_G{1C7?DOR(z3^o39g@HBQ_k{`s!HNTpa6dgat~xbNSg%y51s0=iU0rr literal 0 HcmV?d00001 diff --git a/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs b/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs new file mode 100644 index 000000000000..d8814bdcd695 --- /dev/null +++ b/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs @@ -0,0 +1,159 @@ +// Copyright (c) Microsoft. All rights reserved. + +using McpDotNet.Client; +using McpDotNet.Configuration; +using McpDotNet.Protocol.Types; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace ModelContextProtocol; + +/// +/// Extension methods for McpDotNet +/// +internal static class McpDotNetExtensions +{ + /// + /// Retrieve an instance configured to connect to a GitHub server running on stdio. + /// + internal static async Task GetGitHubToolsAsync() + { + McpClientOptions options = new() + { + ClientInfo = new() { Name = "GitHub", Version = "1.0.0" } + }; + + var config = new McpServerConfig + { + Id = "github", + Name = "GitHub", + TransportType = "stdio", + TransportOptions = new Dictionary + { + ["command"] = "npx", + ["arguments"] = "-y @modelcontextprotocol/server-github", + } + }; + + var factory = new McpClientFactory( + [config], + options, + NullLoggerFactory.Instance + ); + + return await factory.GetClientAsync(config.Id).ConfigureAwait(false); + } + + /// + /// Map the tools exposed on this to a collection of instances for use with the Semantic Kernel. + /// + internal static async Task> MapToFunctionsAsync(this IMcpClient mcpClient) + { + var tools = await mcpClient.ListToolsAsync().ConfigureAwait(false); + return tools.Tools.Select(t => t.ToKernelFunction(mcpClient)).ToList(); + } + + #region private + private static KernelFunction ToKernelFunction(this Tool tool, IMcpClient mcpClient) + { + async Task InvokeToolAsync(Kernel kernel, KernelFunction function, KernelArguments arguments, CancellationToken cancellationToken) + { + try + { + // Convert arguments to dictionary format expected by mcpdotnet + Dictionary mcpArguments = []; + foreach (var arg in arguments) + { + if (arg.Value is not null) + { + mcpArguments[arg.Key] = function.ToArgumentValue(arg.Key, arg.Value); + } + } + + // Call the tool through mcpdotnet + var result = await mcpClient.CallToolAsync( + tool.Name, + mcpArguments, + cancellationToken: cancellationToken + ).ConfigureAwait(false); + + // Extract the text content from the result + return string.Join("\n", result.Content + .Where(c => c.Type == "text") + .Select(c => c.Text)); + } + catch (Exception ex) + { + Console.Error.WriteLine($"Error invoking tool '{tool.Name}': {ex.Message}"); + + // Rethrowing to allow the kernel to handle the exception + throw; + } + } + + return KernelFunctionFactory.CreateFromMethod( + method: InvokeToolAsync, + functionName: tool.Name, + description: tool.Description, + parameters: tool.ToParameters(), + returnParameter: ToReturnParameter() + ); + } + + private static object ToArgumentValue(this KernelFunction function, string name, object value) + { + var parameter = function.Metadata.Parameters.FirstOrDefault(p => p.Name == name); + return parameter?.ParameterType switch + { + Type t when Nullable.GetUnderlyingType(t) == typeof(int) => Convert.ToInt32(value), + Type t when Nullable.GetUnderlyingType(t) == typeof(double) => Convert.ToDouble(value), + Type t when Nullable.GetUnderlyingType(t) == typeof(bool) => Convert.ToBoolean(value), + Type t when t == typeof(List) => (value as IEnumerable)?.ToList(), + Type t when t == typeof(Dictionary) => (value as Dictionary)?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value), + _ => value, + } ?? value; + } + + private static List? ToParameters(this Tool tool) + { + var inputSchema = tool.InputSchema; + var properties = inputSchema?.Properties; + if (properties == null) + { + return null; + } + + HashSet requiredProperties = new(inputSchema!.Required ?? []); + return properties.Select(kvp => + new KernelParameterMetadata(kvp.Key) + { + Description = kvp.Value.Description, + ParameterType = ConvertParameterDataType(kvp.Value, requiredProperties.Contains(kvp.Key)), + IsRequired = requiredProperties.Contains(kvp.Key) + }).ToList(); + } + + private static KernelReturnParameterMetadata? ToReturnParameter() + { + return new KernelReturnParameterMetadata() + { + ParameterType = typeof(string), + }; + } + private static Type ConvertParameterDataType(JsonSchemaProperty property, bool required) + { + var type = property.Type switch + { + "string" => typeof(string), + "integer" => typeof(int), + "number" => typeof(double), + "boolean" => typeof(bool), + "array" => typeof(List), + "object" => typeof(Dictionary), + _ => typeof(object) + }; + + return !required && type.IsValueType ? typeof(Nullable<>).MakeGenericType(type) : type; + } + #endregion +} diff --git a/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj b/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj new file mode 100644 index 000000000000..d509495b6882 --- /dev/null +++ b/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj @@ -0,0 +1,33 @@ + + + + Exe + net8.0 + enable + enable + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + $(NoWarn);CA2249;CS0612 + + + + + + + + + + + + + + Always + + + + + + + + + + diff --git a/dotnet/samples/Demos/ModelContextProtocol/Program.cs b/dotnet/samples/Demos/ModelContextProtocol/Program.cs new file mode 100644 index 000000000000..f8c7c205c4e7 --- /dev/null +++ b/dotnet/samples/Demos/ModelContextProtocol/Program.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using ModelContextProtocol; + +var config = new ConfigurationBuilder() + .AddUserSecrets() + .AddEnvironmentVariables() + .Build(); + +// Prepare and build kernel +var builder = Kernel.CreateBuilder(); +builder.Services.AddLogging(c => c.AddDebug().SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Trace)); + +if (config["OpenAI:ApiKey"] is not null) +{ + builder.Services.AddOpenAIChatCompletion( + serviceId: "openai", + modelId: config["OpenAI:ChatModelId"] ?? "gpt-4o", + apiKey: config["OpenAI:ApiKey"]!); +} +else +{ + Console.Error.WriteLine("Please provide a valid OpenAI:ApiKey to run this sample. See the associated README.md for more details."); + return; +} + +Kernel kernel = builder.Build(); + +// Add the MCP simple tools as Kernel functions +var mcpClient = await McpDotNetExtensions.GetGitHubToolsAsync().ConfigureAwait(false); +var functions = await mcpClient.MapToFunctionsAsync().ConfigureAwait(false); + +foreach (var function in functions) +{ + Console.WriteLine($"{function.Name}: {function.Description}"); +} + +kernel.Plugins.AddFromFunctions("GitHub", functions); + +// Enable automatic function calling +var executionSettings = new OpenAIPromptExecutionSettings +{ + Temperature = 0, + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() +}; + +// Test using GitHub tools +var prompt = "Summarize the last four commits to the microsoft/semantic-kernel repository?"; +var result = await kernel.InvokePromptAsync(prompt, new(executionSettings)).ConfigureAwait(false); +Console.WriteLine($"\n\n{prompt}\n{result}"); diff --git a/dotnet/samples/Demos/ModelContextProtocol/README.md b/dotnet/samples/Demos/ModelContextProtocol/README.md new file mode 100644 index 000000000000..efbb914eaa9a --- /dev/null +++ b/dotnet/samples/Demos/ModelContextProtocol/README.md @@ -0,0 +1,44 @@ +# Model Context Protocol Sample + +This example demonstrates how to use Model Context Protocol tools with Semantic Kernel. + +MCP is an open protocol that standardizes how applications provide context to LLMs. + +For for information on Model Context Protocol (MCP) please refer to the [documentation](https://modelcontextprotocol.io/introduction). + +This sample uses [mcpdotnet](https://www.nuget.org/packages/mcpdotnet) was heavily influenced by the [samples](https://github.com/PederHP/mcpdotnet/tree/main/samples) from that repository. + +The sample shows: + +1. How to connect to an MCP Server using [mcpdotnet](https://www.nuget.org/packages/mcpdotnet) +2. Retrieve the list of tools the MCP Server makes available +3. Convert the MCP tools to Semantic Kernel functions so they can be added to a Kernel instance +4. Invoke the tools from Semantic Kernel using function calling + +## Configuring Secrets + +The example require credentials to access OpenAI. + +If you have set up those credentials as secrets within Secret Manager or through environment variables for other samples from the solution in which this project is found, they will be re-used. + +### To set your secrets with Secret Manager: + +```text +cd dotnet/samples/Demos/ModelContextProtocol + +dotnet user-secrets init + +dotnet user-secrets set "OpenAI:ChatModelId" "..." +dotnet user-secrets set "OpenAI:ApiKey" "..." + "..." +``` + +### To set your secrets with environment variables + +Use these names: + +```text +# OpenAI +OpenAI__ChatModelId +OpenAI__ApiKey +``` diff --git a/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json b/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json new file mode 100644 index 000000000000..d6491818e538 --- /dev/null +++ b/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json @@ -0,0 +1,17 @@ +{ + "Options": { + "ClientInfo": { + "Name": "SimpleToolsConsole", + "Version": "1.0.0" + } + }, + "Config": { + "Id": "everything", + "Name": "Everything", + "TransportType": "stdio", + "TransportOptions": { + "command": "npx", + "arguments": "-y @modelcontextprotocol/server-everything" + } + } +} \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj new file mode 100644 index 000000000000..4c8cfe4b3363 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj @@ -0,0 +1,34 @@ + + + + + + Exe + net8.0 + LatestMajor + enable + enable + true + 61efcc24-41eb-4a92-8ebe-64de14ed54dd + $(NoWarn);CS1591 + + + + + + + + + + false + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs new file mode 100644 index 000000000000..d286b93ccf92 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +var builder = DistributedApplication.CreateBuilder(args); + +var openai = builder.AddConnectionString("openAiConnectionName"); + +var translateAgent = builder.AddProject("translatoragent") + .WithReference(openai); + +var summaryAgent = builder.AddProject("summaryagent") + .WithReference(openai); + +var processOrchestrator = builder.AddProject("processorchestrator") + .WithReference(translateAgent) + .WithReference(summaryAgent); + +builder.Build().Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json new file mode 100644 index 000000000000..ef4d177c24ec --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json @@ -0,0 +1,12 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning", + "Aspire.Hosting.Dcp": "Warning" + } + }, + "ConnectionStrings": { + "openAiConnectionName": "https://{account_name}.openai.azure.com/" + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs new file mode 100644 index 000000000000..37fb0b772391 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace ProcessFramework.Aspire.ProcessOrchestrator.Models; + +public static class ProcessEvents +{ + public static readonly string TranslateDocument = nameof(TranslateDocument); + public static readonly string DocumentTranslated = nameof(DocumentTranslated); + public static readonly string SummarizeDocument = nameof(SummarizeDocument); + public static readonly string DocumentSummarized = nameof(DocumentSummarized); +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj new file mode 100644 index 000000000000..846843bdca9e --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj @@ -0,0 +1,28 @@ + + + + net8.0 + LatestMajor + enable + enable + + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http new file mode 100644 index 000000000000..a29192f4d381 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http @@ -0,0 +1,5 @@ +GET https://localhost:7207/api/processdoc +Accept: application/json + +### + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs new file mode 100644 index 000000000000..0dac1b69d041 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using OpenTelemetry; +using OpenTelemetry.Exporter; +using OpenTelemetry.Logs; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; +using ProcessFramework.Aspire.ProcessOrchestrator; +using ProcessFramework.Aspire.ProcessOrchestrator.Models; +using ProcessFramework.Aspire.ProcessOrchestrator.Steps; + +var builder = WebApplication.CreateBuilder(args); + +string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); +string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); + +AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); + +var loggerFactory = LoggerFactory.Create(builder => +{ + // Add OpenTelemetry as a logging provider + builder.AddOpenTelemetry(options => + { + options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); + // Format log messages. This defaults to false. + options.IncludeFormattedMessage = true; + }); + + builder.AddTraceSource("Microsoft.SemanticKernel"); + builder.SetMinimumLevel(LogLevel.Information); +}); + +using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +builder.AddServiceDefaults(); +builder.Services.AddHttpClient(client => { client.BaseAddress = new("https+http://translatoragent"); }); +builder.Services.AddHttpClient(client => { client.BaseAddress = new("https+http://summaryagent"); }); +builder.Services.AddSingleton(builder => +{ + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.Services.AddSingleton(builder.GetRequiredService()); + kernelBuilder.Services.AddSingleton(builder.GetRequiredService()); + + return kernelBuilder.Build(); +}); + +var app = builder.Build(); + +app.UseHttpsRedirection(); + +app.MapGet("/api/processdoc", async (Kernel kernel) => +{ + var processBuilder = new ProcessBuilder("ProcessDocument"); + var translateDocumentStep = processBuilder.AddStepFromType(); + var summarizeDocumentStep = processBuilder.AddStepFromType(); + + processBuilder + .OnInputEvent(ProcessEvents.TranslateDocument) + .SendEventTo(new(translateDocumentStep, TranslateStep.Functions.Translate, parameterName: "textToTranslate")); + + translateDocumentStep + .OnEvent(ProcessEvents.DocumentTranslated) + .SendEventTo(new(summarizeDocumentStep, SummarizeStep.Functions.Summarize, parameterName: "textToSummarize")); + + summarizeDocumentStep + .OnEvent(ProcessEvents.DocumentSummarized) + .StopProcess(); + + var process = processBuilder.Build(); + using var runningProcess = await process.StartAsync( + kernel, + new KernelProcessEvent { Id = ProcessEvents.TranslateDocument, Data = "COME I FORNITORI INFLUENZANO I TUOI COSTI Quando scegli un piano di assicurazione sanitaria, uno dei fattori più importanti da considerare è la rete di fornitori in convenzione disponibili con il piano. Northwind Standard offre un'ampia varietà di fornitori in convenzione, tra cui medici di base, specialisti, ospedali e farmacie. Questo ti permette di scegliere un fornitore comodo per te e la tua famiglia, contribuendo al contempo a mantenere bassi i tuoi costi. Se scegli un fornitore in convenzione con il tuo piano, pagherai generalmente copay e franchigie più basse rispetto a un fornitore fuori rete. Inoltre, molti servizi, come l'assistenza preventiva, possono essere coperti senza alcun costo aggiuntivo se ricevuti da un fornitore in convenzione. È importante notare, tuttavia, che Northwind Standard non copre i servizi di emergenza, l'assistenza per la salute mentale e l'abuso di sostanze, né i servizi fuori rete. Questo significa che potresti dover pagare di tasca tua per questi servizi se ricevuti da un fornitore fuori rete. Quando scegli un fornitore in convenzione, ci sono alcuni suggerimenti da tenere a mente. Verifica che il fornitore sia in convenzione con il tuo piano. Puoi confermarlo chiamando l'ufficio del fornitore e chiedendo se è in rete con Northwind Standard. Puoi anche utilizzare lo strumento di ricerca fornitori sul sito web di Northwind Health per verificare la copertura. Assicurati che il fornitore stia accettando nuovi pazienti. Alcuni fornitori potrebbero essere in convenzione ma non accettare nuovi pazienti. Considera la posizione del fornitore. Se il fornitore è troppo lontano, potrebbe essere difficile raggiungere gli appuntamenti. Valuta gli orari dell'ufficio del fornitore. Se lavori durante il giorno, potresti aver bisogno di trovare un fornitore con orari serali o nel fine settimana. Scegliere un fornitore in convenzione può aiutarti a risparmiare sui costi sanitari. Seguendo i suggerimenti sopra e facendo ricerche sulle opzioni disponibili, puoi trovare un fornitore conveniente, accessibile e in rete con il tuo piano Northwind Standard." } + ); + + return Results.Ok("Process completed successfully"); +}); + +app.MapDefaultEndpoints(); + +app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs new file mode 100644 index 000000000000..0f85f0ff0fd9 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using ProcessFramework.Aspire.ProcessOrchestrator.Models; + +namespace ProcessFramework.Aspire.ProcessOrchestrator.Steps; + +public class SummarizeStep : KernelProcessStep +{ + public static class Functions + { + public const string Summarize = nameof(Summarize); + } + + [KernelFunction(Functions.Summarize)] + public async ValueTask SummarizeAsync(KernelProcessStepContext context, Kernel kernel, string textToSummarize) + { + var summaryAgentHttpClient = kernel.GetRequiredService(); + var summarizedText = await summaryAgentHttpClient.SummarizeAsync(textToSummarize); + Console.WriteLine($"Summarized text: {summarizedText}"); + await context.EmitEventAsync(new() { Id = ProcessEvents.DocumentSummarized, Data = summarizedText }); + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs new file mode 100644 index 000000000000..8c1b46c22746 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using ProcessFramework.Aspire.ProcessOrchestrator.Models; + +namespace ProcessFramework.Aspire.ProcessOrchestrator.Steps; + +public class TranslateStep : KernelProcessStep +{ + public static class Functions + { + public const string Translate = nameof(Translate); + } + + [KernelFunction(Functions.Translate)] + public async ValueTask TranslateAsync(KernelProcessStepContext context, Kernel kernel, string textToTranslate) + { + var translatorAgentHttpClient = kernel.GetRequiredService(); + var translatedText = await translatorAgentHttpClient.TranslateAsync(textToTranslate); + Console.WriteLine($"Translated text: {translatedText}"); + await context.EmitEventAsync(new() { Id = ProcessEvents.DocumentTranslated, Data = translatedText }); + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs new file mode 100644 index 000000000000..54f6fe7dd757 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using System.Text.Json; +using ProcessFramework.Aspire.Shared; + +namespace ProcessFramework.Aspire.ProcessOrchestrator; + +public class SummaryAgentHttpClient(HttpClient httpClient) +{ + public async Task SummarizeAsync(string textToSummarize) + { + var payload = new SummarizeRequest { TextToSummarize = textToSummarize }; +#pragma warning disable CA2234 // We cannot pass uri here since we are using a customer http client with a base address + var response = await httpClient.PostAsync("/api/summary", new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json")).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + var responseContent = await response.Content.ReadAsStringAsync(); + return responseContent; + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs new file mode 100644 index 000000000000..b01cb1c0bb81 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using System.Text.Json; +using ProcessFramework.Aspire.Shared; + +namespace ProcessFramework.Aspire.ProcessOrchestrator; + +public class TranslatorAgentHttpClient(HttpClient httpClient) +{ + public async Task TranslateAsync(string textToTranslate) + { + var payload = new TranslationRequest { TextToTranslate = textToTranslate }; +#pragma warning disable CA2234 // We cannot pass uri here since we are using a customer http client with a base address + var response = await httpClient.PostAsync("/api/translator", new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json")).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + var responseContent = await response.Content.ReadAsStringAsync(); + return responseContent; + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json new file mode 100644 index 000000000000..10f68b8c8b4f --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs new file mode 100644 index 000000000000..b95812023687 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Diagnostics.HealthChecks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; +using OpenTelemetry; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; + +namespace Microsoft.Extensions.Hosting; + +/// +/// Adds common .NET Aspire services: service discovery, resilience, health checks, and OpenTelemetry. +/// This project should be referenced by each service project in your solution. +/// To learn more about using this project, see https://aka.ms/dotnet/aspire/service-defaults +/// +public static class ServiceExtensions +{ + /// + /// Gets a configuration setting from the WebApplicationBuilder. + /// + /// The WebApplicationBuilder instance. + /// The name of the configuration setting. + /// The value of the configuration setting. + /// Thrown when the configuration setting is missing. + public static string GetConfiguration(this WebApplicationBuilder builder, string settingName) + { + return builder.Configuration[settingName] ?? throw new InvalidOperationException($"Missing configuration setting: {settingName}"); + } + + /// + /// Adds default services to the application builder. + /// + /// The type of the application builder. + /// The application builder instance. + /// The application builder instance with default services added. + public static TBuilder AddServiceDefaults(this TBuilder builder) where TBuilder : IHostApplicationBuilder + { + builder.ConfigureOpenTelemetry(); + + builder.AddDefaultHealthChecks(); + + builder.Services.AddServiceDiscovery(); + + builder.Services.ConfigureHttpClientDefaults(http => + { + // Turn on resilience by default + http.AddStandardResilienceHandler(); + + // Turn on service discovery by default + http.AddServiceDiscovery(); + }); + + // Uncomment the following to restrict the allowed schemes for service discovery. + // builder.Services.Configure(options => + // { + // options.AllowedSchemes = ["https"]; + // }); + + return builder; + } + + /// + /// Configures OpenTelemetry for the application builder. + /// + /// The type of the application builder. + /// The application builder instance. + /// The application builder instance with OpenTelemetry configured. + public static TBuilder ConfigureOpenTelemetry(this TBuilder builder) where TBuilder : IHostApplicationBuilder + { + builder.Logging.AddOpenTelemetry(logging => + { + logging.IncludeFormattedMessage = true; + logging.IncludeScopes = true; + }); + + builder.Services.AddOpenTelemetry() + .WithMetrics(metrics => + { + metrics.AddAspNetCoreInstrumentation() + .AddHttpClientInstrumentation() + .AddRuntimeInstrumentation(); + }) + .WithTracing(tracing => + { + tracing.AddSource(builder.Environment.ApplicationName) + .AddAspNetCoreInstrumentation() + // Uncomment the following line to enable gRPC instrumentation (requires the OpenTelemetry.Instrumentation.GrpcNetClient package) + //.AddGrpcClientInstrumentation() + .AddHttpClientInstrumentation(); + }); + + builder.AddOpenTelemetryExporters(); + + return builder; + } + + /// + /// Adds OpenTelemetry exporters to the application builder. + /// + /// The type of the application builder. + /// The application builder instance. + /// The application builder instance with OpenTelemetry exporters added. + private static TBuilder AddOpenTelemetryExporters(this TBuilder builder) where TBuilder : IHostApplicationBuilder + { + var useOtlpExporter = !string.IsNullOrWhiteSpace(builder.Configuration["OTEL_EXPORTER_OTLP_ENDPOINT"]); + + if (useOtlpExporter) + { + builder.Services.AddOpenTelemetry().UseOtlpExporter(); + } + + // Uncomment the following lines to enable the Azure Monitor exporter (requires the Azure.Monitor.OpenTelemetry.AspNetCore package) + //if (!string.IsNullOrEmpty(builder.Configuration["APPLICATIONINSIGHTS_CONNECTION_STRING"])) + //{ + // builder.Services.AddOpenTelemetry() + // .UseAzureMonitor(); + //} + + return builder; + } + + /// + /// Adds default health checks to the application builder. + /// + /// The type of the application builder. + /// The application builder instance. + /// The application builder instance with default health checks added. + public static TBuilder AddDefaultHealthChecks(this TBuilder builder) where TBuilder : IHostApplicationBuilder + { + builder.Services.AddHealthChecks() + // Add a default liveness check to ensure app is responsive + .AddCheck("self", () => HealthCheckResult.Healthy(), ["live"]); + + return builder; + } + + /// + /// Maps default endpoints for the application. + /// + /// The WebApplication instance. + /// The WebApplication instance with default endpoints mapped. + public static WebApplication MapDefaultEndpoints(this WebApplication app) + { + // Adding health checks endpoints to applications in non-development environments has security implications. + // See https://aka.ms/dotnet/aspire/healthchecks for details before enabling these endpoints in non-development environments. + if (app.Environment.IsDevelopment()) + { + // All health checks must pass for app to be considered ready to accept traffic after starting + app.MapHealthChecks("/health"); + + // Only health checks tagged with the "live" tag must pass for app to be considered alive + app.MapHealthChecks("/alive", new HealthCheckOptions + { + Predicate = r => r.Tags.Contains("live") + }); + } + + return app; + } +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj new file mode 100644 index 000000000000..85ccce7e8426 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj @@ -0,0 +1,23 @@ + + + + net8.0 + LatestMajor + enable + enable + true + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj new file mode 100644 index 000000000000..ba15679e7f9d --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj @@ -0,0 +1,11 @@ + + + + net8.0 + LatestMajor + enable + enable + $(NoWarn);CA1716 + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs new file mode 100644 index 000000000000..0ad107bfa245 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace ProcessFramework.Aspire.Shared; + +/// +/// Represents a request to summarize a given text. +/// +public class SummarizeRequest +{ + /// + /// Gets or sets the text to be summarized. + /// + public string TextToSummarize { get; set; } = string.Empty; +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs new file mode 100644 index 000000000000..e94118c74d66 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace ProcessFramework.Aspire.Shared; + +/// +/// Represents a request to translate a given text. +/// +public class TranslationRequest +{ + /// + /// Gets or sets the text to be translated. + /// + public string TextToTranslate { get; set; } = string.Empty; +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj new file mode 100644 index 000000000000..187beb78372b --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj @@ -0,0 +1,25 @@ + + + + net8.0 + LatestMajor + enable + enable + SKEXP0001,SKEXP0050,SKEXP0110 + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http new file mode 100644 index 000000000000..d1b8e9f5ea86 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http @@ -0,0 +1,9 @@ +POST https://localhost:7261/api/summary +Accept: application/json +Content-Type: application/json + +{ + "TextToSummarize": "HOW PROVIDERS AFFECT YOUR COSTS When selecting a health insurance plan, one of the most important factors to consider is the network of in-network providers that are available with the plan. Northwind Standard offers a wide variety of in-network providers, ranging from primary care physicians, specialists, hospitals, and pharmacies. This allows you to choose a provider that is convenient for you and your family, while also helping you to keep your costs low. When you choose a provider that is in-network with your plan, you will typically pay lower copays and deductibles than you would with an out-of-network provider. In addition, many services, such as preventive care, may be covered at no cost when you receive care from an in-network provider. It is important to note, however, that Northwind Standard does not offer coverage for emergency services, mental health and substance abuse coverage, or out-of-network services. This means that you may have to pay out of pocket for these services if you receive them from an out-of-network provider. When choosing an in-network provider, there are a few tips to keep in mind. First, make sure that the provider you choose is in-network with your plan. You can confirm this by calling the provider's office and asking them if they are in-network with Northwind Standard. You can also use the provider search tool on the Northwind Health website to make sure your provider is in-network. Second, make sure that the provider you choose is accepting new patients. Some providers may be in-network but not be taking new patients. Third, consider the location of the provider. If the provider is too far away, it may be difficult for you to get to your appointments. Finally, consider the provider's office hours. If you work during the day, you may need to find a provider that has evening or weekend hours. Choosing an in-network provider can help you save money on your health care costs. By following the tips above and researching your options, you can find a provider that is convenient, affordable, and in-network with your Northwind Standard plan." +} + +### diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs new file mode 100644 index 000000000000..5173182ee154 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenTelemetry; +using OpenTelemetry.Exporter; +using OpenTelemetry.Logs; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; +using ProcessFramework.Aspire.Shared; + +var builder = WebApplication.CreateBuilder(args); + +string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); +string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); + +AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); + +var loggerFactory = LoggerFactory.Create(builder => +{ + // Add OpenTelemetry as a logging provider + builder.AddOpenTelemetry(options => + { + options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); + // Format log messages. This defaults to false. + options.IncludeFormattedMessage = true; + }); + + builder.AddTraceSource("Microsoft.SemanticKernel"); + builder.SetMinimumLevel(LogLevel.Information); +}); + +using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +builder.AddServiceDefaults(); +builder.AddAzureOpenAIClient("openAiConnectionName"); +builder.Services.AddSingleton(builder => +{ + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion("gpt-4o", builder.GetService()); + + return kernelBuilder.Build(); +}); + +var app = builder.Build(); + +app.UseHttpsRedirection(); + +app.MapPost("/api/summary", async (Kernel kernel, SummarizeRequest summarizeRequest) => +{ + ChatCompletionAgent summaryAgent = + new() + { + Name = "SummarizationAgent", + Instructions = "Summarize user input", + Kernel = kernel + }; + // Create a ChatHistory object to maintain the conversation state. + ChatHistory chat = []; + + // Add a user message to the conversation + chat.Add(new ChatMessageContent(AuthorRole.User, summarizeRequest.TextToSummarize)); + + // Generate the agent response(s) + await foreach (var response in summaryAgent.InvokeAsync(chat).ConfigureAwait(false)) + { + chat.AddAssistantMessage(response.ToString()); + return response.Items.Last().ToString(); + } + + return null; +}); + +app.MapDefaultEndpoints(); + +app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json new file mode 100644 index 000000000000..10f68b8c8b4f --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj new file mode 100644 index 000000000000..59be1e8a4d6a --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj @@ -0,0 +1,26 @@ + + + + net8.0 + LatestMajor + enable + enable + SKEXP0001,SKEXP0050,SKEXP0110 + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http new file mode 100644 index 000000000000..f08fca693f69 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http @@ -0,0 +1,9 @@ +POST https://localhost:7228/api/translator +Accept: application/json +Content-Type: application/json + +{ + "TextToTranslate": "COME I FORNITORI INFLUENZANO I TUOI COSTI Quando scegli un piano di assicurazione sanitaria, uno dei fattori più importanti da considerare è la rete di fornitori in convenzione disponibili con il piano. Northwind Standard offre un'ampia varietà di fornitori in convenzione, tra cui medici di base, specialisti, ospedali e farmacie. Questo ti permette di scegliere un fornitore comodo per te e la tua famiglia, contribuendo al contempo a mantenere bassi i tuoi costi. Se scegli un fornitore in convenzione con il tuo piano, pagherai generalmente copay e franchigie più basse rispetto a un fornitore fuori rete. Inoltre, molti servizi, come l'assistenza preventiva, possono essere coperti senza alcun costo aggiuntivo se ricevuti da un fornitore in convenzione. È importante notare, tuttavia, che Northwind Standard non copre i servizi di emergenza, l'assistenza per la salute mentale e l'abuso di sostanze, né i servizi fuori rete. Questo significa che potresti dover pagare di tasca tua per questi servizi se ricevuti da un fornitore fuori rete. Quando scegli un fornitore in convenzione, ci sono alcuni suggerimenti da tenere a mente. Verifica che il fornitore sia in convenzione con il tuo piano. Puoi confermarlo chiamando l'ufficio del fornitore e chiedendo se è in rete con Northwind Standard. Puoi anche utilizzare lo strumento di ricerca fornitori sul sito web di Northwind Health per verificare la copertura. Assicurati che il fornitore stia accettando nuovi pazienti. Alcuni fornitori potrebbero essere in convenzione ma non accettare nuovi pazienti. Considera la posizione del fornitore. Se il fornitore è troppo lontano, potrebbe essere difficile raggiungere gli appuntamenti. Valuta gli orari dell'ufficio del fornitore. Se lavori durante il giorno, potresti aver bisogno di trovare un fornitore con orari serali o nel fine settimana. Scegliere un fornitore in convenzione può aiutarti a risparmiare sui costi sanitari. Seguendo i suggerimenti sopra e facendo ricerche sulle opzioni disponibili, puoi trovare un fornitore conveniente, accessibile e in rete con il tuo piano Northwind Standard." +} + +### diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs new file mode 100644 index 000000000000..ce4e12610699 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenTelemetry; +using OpenTelemetry.Exporter; +using OpenTelemetry.Logs; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; +using ProcessFramework.Aspire.Shared; + +var builder = WebApplication.CreateBuilder(args); + +string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); +string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); + +AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); + +var loggerFactory = LoggerFactory.Create(builder => +{ + // Add OpenTelemetry as a logging provider + builder.AddOpenTelemetry(options => + { + options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); + // Format log messages. This defaults to false. + options.IncludeFormattedMessage = true; + }); + + builder.AddTraceSource("Microsoft.SemanticKernel"); + builder.SetMinimumLevel(LogLevel.Information); +}); + +using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) + .Build(); + +builder.AddServiceDefaults(); +builder.AddAzureOpenAIClient("openAiConnectionName"); +builder.Services.AddSingleton(builder => +{ + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion("gpt-4o", builder.GetService()); + + return kernelBuilder.Build(); +}); + +var app = builder.Build(); + +app.UseHttpsRedirection(); + +app.MapPost("/api/translator", async (Kernel kernel, TranslationRequest translationRequest) => +{ + ChatCompletionAgent summaryAgent = + new() + { + Name = "TranslatorAgent", + Instructions = "Translate user input in english", + Kernel = kernel + }; + // Create a ChatHistory object to maintain the conversation state. + ChatHistory chat = []; + + // Add a user message to the conversation + chat.Add(new ChatMessageContent(AuthorRole.User, translationRequest.TextToTranslate)); + + // Generate the agent response(s) + await foreach (var response in summaryAgent.InvokeAsync(chat).ConfigureAwait(false)) + { + chat.AddAssistantMessage(response.ToString()); + return response.Items.Last().ToString(); + } + + return null; +}); + +app.MapDefaultEndpoints(); + +app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json new file mode 100644 index 000000000000..10f68b8c8b4f --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md b/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md new file mode 100644 index 000000000000..0ad079035b03 --- /dev/null +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md @@ -0,0 +1,45 @@ +# Process Framework with .NET Aspire + +This demo illustrates how the [Semantic Kernel Process Framework](https://learn.microsoft.com/semantic-kernel/overview) can be integrated with [.NET Aspire](https://learn.microsoft.com/dotnet/aspire/get-started/aspire-overview). The Process Framework enables the creation of business processes based on events, where each process step may invoke an agent or execute native code. + +In the demo, agents are defined as **external services**. Each process step issues an HTTP request to call these agents, allowing .NET Aspire to trace the process using **OpenTelemetry**. Furthermore, because each agent is a standalone service, they can be restarted independently via the .NET Aspire developer dashboard. + +## Architecture + +The business logic of this sample is straightforward: it defines a process that translates text from English and subsequently summarizes it. + +![Architecture Diagram](./docs/architecture.png) + +## What is .NET Aspire? + +.NET Aspire is a set of tools, templates, and packages for building observable, production ready apps. .NET Aspire is delivered through a collection of NuGet packages that bootstrap or improve common challenges in modern app development. +Key features include: + +- Dev-Time Orchestration: provides features for running and connecting multi-project applications, container resources, and other dependencies for local development environments. +- Integrations: offers standardized NuGet packages for frequently used services such as Redis and Postgres, with standardized interfaces ensuring they consistent and seamless connectivity. +- Tooling: includes project templates and tools for Visual Studio, Visual Studio Code, and the .NET CLI to help creating and interacting with .NET Aspire projects. + +.NET Aspire orchestration assists with the following concerns: + +- App composition: specify the .NET projects, containers, executables, and cloud resources that make up the application. +- Service Discovery and Connection String Management: automatically injects the right connection strings, network configurations, and service discovery information to simplify the developer experience. + +### Running with .NET Aspire + +To run this sample with .NET Aspire, clone the repository and execute the following commands: + +```bash +cd scr/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost +dotnet run +``` + +A dashboard will then be displayed in the browser, similar to this: +![Aspire Dashboard](./docs/aspire-dashboard.png) + +By invoking the `ProcessOrchestrator` service, the process can be started. A predefined request is available in [`ProcessFramework.Aspire.ProcessOrchestrator.http``](./ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http). + +This will generate a trace in the Aspire dashboard that looks like this: +![Aspire Trace](./docs/aspire-traces.png) + +Additionally, the metrics for each agent can be monitored in the Metrics tab: +![Aspire Metrics](./docs/aspire-metrics.png) \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png b/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..747e2ba19331fb59bd7896b1c0a1a9a40c8ccbc5 GIT binary patch literal 45609 zcmZ5{1yogC*EJw2A&r0_-7TOX4VRJzX{1x7LAsG{K}t$Wk?!se0mVyq!v*QM^uI5@ z-}nCiKgJ#78S>nF&OUpsJ=a`wt`nxBB!h)Pih+cLgeCh*@--3?3JeJexftyM_>0)6 z>K^zHveRoB2vW)L(=8+k|9@W)FIHtj`2W2(9vxK{MSDax zN7&t62ou}l=;e1xTW`2&h>qeX7JP5XFSmTp_Chj)dMz(oMEO)3w=QrOM)u)wr$6NK zm&fUu2b?96SDs7dVInv&oIU4yBe2iY)mCq!NMFnrS`2+DVY#jK<-AHA;pgz;9NW8@ zESrZP#gC=riyCKY>b|32(J0g`o*cYQ-(l?S$t0#A^GPfyfv<31lg+b)p})%C9{#r_ z`9%KR5yUf@zBlg%6WJrK6;1_v+4UQsHuJ5Y<#`ACW zoYnQoMC}mo%un6w_vS-iUa(>ap`TnYF?Y^N!}T2(;3^wV&|R7a;i`|C}x086e9FuRcJzIvrfDY%aY(e6ZkIHD^Ywk z=`Pd#q%YpD7AZ89y)-sI)cpQ7>Qyr$fBbjQuJ$`|PPa>2r@>9vHL=griCIG`EruLf zBl-uV$!yGID7~x59lG#prr#HuPuLU5$md%k%ZJ=;zz+hkuZA95d8gxV?24a#zBt)_ z6M{`{-AnHJp;VD~Di<13aC5f@)o5&bb-xUJubpdQp{ig_^KcpO&|?Z=kI92x$~`@E zM}0b|lcM?wBD2I$(T%uBpTCV&%oOnsnzyMQ({veN{;vI9DV=80%2^h%8QBs@R+r8b z`j+vG$~7SIyn~{bNO?@U(6+~m-zW*PWo4+P6)4<&QwC}K+@Y7;67j7fr{XuYE*;)y zbFMK4A!D?Ngb#h@e;<=*Qju?N)gbh&Y^Qp{fY-PK^^IxN3rB7g^pi9rb^5!w2%~g0 zv@D0S?o}^e%kQ^znEuSZHwf@L~ALqP{IBu(54Q zPhXA}xQQTq@nhi&7W9+%OMm+B7NUs4H}94c^^~7`yuRGDSNCjpE@w*TL=N$|a%pXp zzE?}UpNEsVOoKkndhRukRTRnRC&8n#cJE>#gp5z*{C9fH{DzKpVTfnyC4|za4s&Z$ex)aBuCk8hV4NO0xc}gHELb2*lGsRwu+93< z6l|nV53%`IV+Vq=KHaSh4=q+G4C`{~!5@9sxiJRCFJ>>S#(x&b;KOv5M%(WmT_5Go zH^oZRX#O7OGS|I%R(QTl?F1|Z0eYV)k@4xtJvOtXkJ$|iD(clC%~!ivQLjK?P|(>7 znkG*de0?uLwx^3;q`S`R_!B-iyL%dJf2-2wap@Qe@@8T(71Zx1>^Lr=XgB}5@k0p_alVe0Kn0QTQ4J-Ol*|QA9`67Z@P-s*#P!rkT zSp1!-tzs2E z7P$b_d3VY(vsyZeRH`xumVkIhbrNJRsO!AD4SGtcWO|5#h5caQH=N zNx=$?N5%iMf%M zBLY3nXYY}Qfm44Ue{-@t$NOUId(HLf3|R~8Kc(+C?LJsx$zqa-#ZQv$-#IeV(Q^6U zrE>2fmP7Rgm54*{EdX5Cbiu9M_?_cF$}tu){}K@wd3!W3Vkkp+=F`i=KH9$lm>jR< zr69uWG~{=M?Kd6n#rI0CFf6(MxfR_r$JJMj2R&r^8`D+qOmRfuM*rX4dSWQ;wDcT; zwr6MTT5Z5Fjri}~Ji*$F?G{?1QORVdR0euR)uljDj=oDyCgdwJL$3p=8MAoBS>?xw z#KZO9#Bb9tHy=!twsywoXa)&}5BuG0_Ai~)qmXbezA2_ogBlx&l?QOHL{~~c{;m>y zH9|d|u@m*&NO#rqTy3Vh4Pt}c?<@_z3(zyv^^r`wV%=)TC{QkjdZNg*l7q*>5vvFl zOF&O^=y_7!^+6IwW0C}pv;$1?-;wqQ_&BFq^>#^MqSe=zjN9zhbHRc;Nb*DLLQ_b4 z$z`Ekf4SRO?0tC(D1_+Uy=KwabgTNlys%mimcow`@xJ&xgZqNw4g>^I)V?cK``>%m z;4lY3sAkS(Mm9fbPGj5;{E{W3ZMi$5ST>#^x^qm+(B^WdQn{N459w|x0{#HrIx|$>3hq$XH)QxKl#J(v|GIDZOXfe zdU&4lB^TGu>q446UOCtNJ5PSZ8&2y#%LSZal9loQ4q|8<1}gZsS*F)vnDE7B4o67? z8>~V6Zc`qiyJg*+Z)7GdZ9c!lOD1I${YIDS+If#b3Xe_dnfHaobkM;gZYy=LxCEs( z%#+)jvsMoB>i-1k7=5Sp_U1J6_DZIN^LMK{EDNy?^>jaCnA65^HF!v^W}0+nKo#Dh zfT%T-dmsPib=jHJz6pHgs+}$`+h)m_Af?ZDce&48$ed8ikqjG<4D2hP-oK?$nL@>d zJ$!2K2eNliN$8+!AXxQ5&H^~Df5Dke*p`2yCyynLNVGWoeaLn5*?i@TaczlQ$uj>sf9)Fj)m z9x^*?wrWa%>EmU_5`}>LK|}DSZouQpxPXc?9V2@47I2jcL!a|*m+5zmg0TD?>j{0= zA+k4|(0&v5Df1*9KrO?2LQ$;{hZX@zrn+jlIy_2W&o_3duA8~d*$xxJQ7VFC9 zZ``YZ5h6~e2?2O=P#miO$Ecm0Q@zBa5{q1K+-sRp621Pgat814cARzSodUmnS6JHO zf$E_rpzpRE##m(7>ces4oDDgSmgj7S0^0TYNZdWEW$Rn&G@uw}0IhY0XZorjs7LUF zmA}J+d(RoLgIK!X^HHuK(2DCvqRPla{V@<-Dil2G#h?pNoGebZCt_ZeLZeIw+Rh$t zpFbe8HgcVH2#F}a9hK*tI+-xsgLJ5l>sXRE9uBbD0RF}1;wqo*ttV{H`~|Ry?Y(}+ zOl!vU4{^nS+@1oCD!`Y+nCUfItUc>8W7EST&4EdDbq`1A7oc6Z=vh8XLB!L5q`gfS zbRAxMrZyg=!YSQ&PV$u)7M_!V)9^2LG!PKG#Km(IEj(m2x2)mdYJgzHiwyoRNLc4} zEYr;at*+QEX&gSCG{y0k7e{Kn-JNSB`l(xhYUu$DGefTCc><$>UP(d2^b$385Yjr`=2dgg%W zCVk3Rf(U1R)~H1s6v7m6P&ixQ*)|pZ3{tyMUt%zj^`1wI-nfwod)#r^+fAbExH9|i z=`LSvTCY!CI2X`tMAQ#34p|I>>kFHf{CKG$lh3JB(RB``EpYIbtCB&U`0~cKzI(Uz*2ncPv%;fb z9Lik=56-6jfRo)h!K@jaS=T9ZVylIg=A#LjHXigeZu4-@R-H-fiGhc|F~D)wKqRetE}9m*&1VLU=j*BIsga!X za^bP-$xb7}6FN!Eq0gt!&suNORoYqr<>mdbsd6;sgg2&bT03Qt`WVgoWK4_Ks13=q z^?n{`^jzlMH|p&JeOBmHMIT_1uHtvYP3_MiUflD)_=HkT)$~~yu;&__oitZGck6pW zyHojDYdpe3R94^ws`ke*uj4~n0Sy%ICUoy;L{!wJIqX>n3!+_F?P>RJJ29efTOhD1 zu1-v)BX{hh-fj8%3~yP$QD0NX{4rn3^3zGW=}DNDv$nTUyv;(3_uo{z76<3@<+jz> z%Qz8k*)H$Uj%w?f8XIfwB%8*YWv|P>wYIZpO8}H-#yo$CKdirgl9g)o?fU0QD^H>J z6%GK7FGVl{1aP3??Jr*e+BP)!pau;~L%%;3$)wO1pgDXchEez)AG6n=@(jD?lX0cD z)VpC80sD~Lm1%#eWUTMnb9jQK)(O~&iiILw)!T&wFnLmEE1vv$QhlGby1j{2_pHF!rZ##t(tX-{4MH1qc~ z6NOEkFr!06Q{~D;WVL$_Bm1d%Iuql)6GZ!DdlKM@iJUPYlip9&k!&BQFxr>#lPvlm!)F%MGA)f(O|&DDx-^B;rNi*>$VzJBXK} zjAV(}yWRv)yTO-21g*@NraW{jPK9kVaL@c#Lk#eR8dNF+J zY3gqD0VKU{Vrw~xpaAa-cUIv_l8$^rfqyBxec)jUZIOKwK;Cj$TvM^gBcEoEliAn| z-y3(vKD>c>Lkd=>i4y&EshOB=PYbKy-pp5qY!nqz@*8WZ0)FAmi=Kp#Zq9w z-nTSw+p^&@ry%p_wry1k(p-S*oPj{X&q$#5AISFLs7s+X1=-Ko)DHEb?1Z_ux^#Kb z5aUQv(U27~jZeWL%S3uwWwD3+9pvQ+az9mj!I|DbCCjE9j<^>t-&ejcGhS@Yy|qX0fc<27*g3^y9{P$jId@kNFiE;7}O?}k**Q0m^qa(Jet_3`!9EDh{d(33Fx7+ z#H2Xl!S~5kBdn0K<1R-p4_8!U2di2W`>v1E;n&HyNT#%Ik1b{0-d}_gmwwgRe29Z+-nCgLn1sF-sRJdnmf~upyKLiqv8GB(tClAeqy#kErMrb&D5nq zPw1g-Rvt@(22>yyE0C=}v!nNWxncU;9lXuSomx*+d$`b;idItSj@wZN&5%T`_1KpA z;@@ z=Ta>4y?E8GmEjrMj3YGRWtREtnm*n|7?YQupohCEzZ1zea_G&u12ZI_5oRfJtMP<* zSGpp9BfbJggurAP1et4pNcn+!L|w8CQ-aseg1~MjGb9hEJfu)k8`rLmuq-P!%I#@L zzQsoxO#{?9z>sX=jXF@7x1bRQ>?ccBLgZUXA629ZXgfxCDVF_it3DCjc^vTW>Bn zmhegxv#hXe&W9=kC%X7DZXApEv3jqWG;&FYrKn6*TNeW`BVD|fzRdRu!ARGzrMiWA*JXv1VYxtCbC>X1sWQV~ zZWp~$G*Qxq>t8tduYY5cB{{5^K#~g(X9X91qjE$f9wds{zeMR~SPT+S#om|I#6#|4 z6RAVfLA4UVgN5O-?3Q(6J25Ze{Q?-cg4=4%eNsYbP(_1rpdaI^);<~?Yf7`lNrvA}Wd@g<{0FW^40=3Es65&8ivw{?N6eZZP=$7%&DUloJeUor z;|+a3t2S8`G1%MWev~#MY>l@PsvXF>%RP@V)T1%q>&+CokB?!)AYYb+`z}c2;;bx@ zN0myDV}hPdPkz>U^WlAod#qx0Q&cr6AAa+W@a}m4T!D6DU==>K);^_X^d>w0^q5k# zKWFd9MM^0n#>B_BFF`^@l))a~6?xo!2sx|f6|pgD!*>osf233*tcW zco0r~aU;{~Vae5Rg_y=cR_GmUY@upJb)l@Hc;*%M!~7d{lQRcFw-2vYu5Tf{z2CM? zokd21tqNRl(?d7RmIxdcCO`a9Os}RymV2P<)rL21Y#iXfjgj~vGY!oZmq8+{>}11e zj^UBnBnRuWP)7zu?T{(Sr=mK>+?~$L*OCO%nTyP7T8tOqsg|^LxV};reV_|J8!q3v z&`Gz-+|h%$R+zL4E9VcWOQp&lIowzOen*`B04dZiD%<%HD#lFEwmeAp@s|gjgrJ3M zDjbR6z9d!23wW^UP_HwMYXAD=>-D7kJv6)Qh_YorKTCR=CydcjLKPTo4Duz&sr>Q! z_avXu|Axtn@Dmlz=|zWW;AqMKjaOE~`L2ua0#$ui%~__%6Lu$%go>e{kZ6X|b7qyy zNFX}Hfn=~4k#5f?I2san%&LOAZ61l{F%S`k=*Pqmo%UiTKz7E4hs-}T5dQ?KurlaK zDvo*IjnbKitmRN#Z~vgU6RII72Kn*uAnSnt5qcQgf-SSJU**f(>i!5pNCYb8h_Bds z?=_#5I0_4>A}%NS)gE=a)^5)c|5ZM5biFP}jHWoYsoiVk4WS&;d|8(_;X2t9)U2)_8ex=H0X50s` zJ@k`JLzWX2#7YVfA^}2xRQUM6PaxJa2b4uwi10;{obX-KT7Xy@a0JGiJs9&nMog$frgqEq&~O9>~N{*vLF}khEphvO#PF6^c9cuWyQE zm!Q>Aga~uGY5YLshU7=E&K&8e3WH`3sT?&}5aN%aVsa>N?3$~u4+lAcCZv3~XK6tG z5J4#MKt$xn(J=i_@XE$C-%J6oh$YK_S4;igo%?_a;zudsRd@1tMk^BWW1wIAi2;!S z2$VkYDxCgSc^r4~O90pOIi0ecDet9tv*z(3``_J{(5l{#4FI`|Gt1!R>iauq00ZQ} ziE(G}6li5s;Mn<6h$qGMFG8=fo*E@$(*_343;y)G2c-wa;3P{TM7P!FS}uPBR)Dyw zI)^_IyYA#(>n%r?L4B>CGzUlybi_i1+{&;>o_)m+F5>!U{}8;LZ>ne4t5bnAua@RM zxVt7Z$ZL_ezo-P)KC5-zDe{tEQPE!IC$&kvCPoD0YXi7e@0Vx|{eP|w)dech&n!T% z94GbvhLZgD!xi*@GyChiKi^J!DSZ(+`}#jQ>kM+1;2@}v^M8}#SFS9-eI5ijntaCxb^8TYg<9efjJ~NM9r$X}o%boP1Tr)7hBk zKcEtbZOj({33GY8WlAd@neQPba3|qN0yVN`(ggEOjde<(M|)`DU2ZXX1q~1lSTymrhJwRZmLZO%cs+uEx3i|2(Y=a6d6>?%kpdW#Y%@@DnBjdBx zdtkYV`!D<^1aBwHvaYYs7Vu%+Ex7Lu5xI6K3*wJZn}E%-?8TdRM;FCV9T8~(cKb;d zsanFF3!9PSq7n7)0gyo^&I19i5x77YlNDbOm;g40#UEkk#e%DA(B~iBodZPQ(z4)l zIn@F>0yTMCXG`EzG1P@!~Fi&YWx#=p++I+V4VuKK|S>#iq!uW`2(^57Rhq8+jYy2lmo zx9NY_QK0At-|A9LQYqz?)40p;-+&}|^hT509Ip2N-YxQAL_~td2qCY2N>K-L_eaD6 zKJ$a137?Aa)zToV?N2Xx@08 zgDLt+hl!RIVp%F~;JUR|6T`dp8yOw0V(f@>3${nfs6URrW-alJT)w50;={YUp#!b6 z*=DB3<@Wk4X%P)-eiv6Zln7$ZXnOv*ZFzHj&nV5iD1ZzA0O`4}#bUqqKeGVs4_U2` zNlWe?p2A=B22M*Pcn!SG+V!E-?@t4H;}hp!XvQFRwGENWK#WBQqW@Duy3m1+&P23D z&kZ5xxxG3+GA#vrG%c`QWC}!Dg#6k`y+#tyMo<$>oRSLf_q=(0Lw-X+F}SzHe?`%f zwBzaos_-TTKSMSO^a%Un(Swu6-l>3LcCM%PX{fb-Fr<;#&b7cx>`3b z2~OGn9CbK(Ulx`$z%X7C1LsBb_Nww4bmzMU22MjSOiSUWn9k$_)#RK|fCuk@HTJe1 zTW#g8a)JWxnp>MK3fkTAY^jLPrh^eu*;yl5QQcuy88BhUbe^L# zzS`LXm7^Gzu=O)pD*a^>T%Z7i5PT$4#Z-eX_!hiOfqGm#2Q1#663shf<(1Upq-jhw z^1TO>x>m*XDjHOa>@dYI&&v_E6vm59!he_m!G{ApFx5N`(1D7At{kDAYN6ru;PZV# zLnG^v{tler8BIfP=fXF&C6`?w+M|7-^Z|{!KwT&ls^{v2*-IPdryC433DKGc($f#O*=QY*_$N z?k9mw@vzPOF=-W-s^@ZPU#fV zjDSBN&S{SRdZCGUCMH>wohj?F0*#xX3!qBF5{p_dcLgU%#*fem7_`BiiR9VoB2Z?7wT&TQ`W?Q}ll|3r<>-3bSY3A@zf8aV(!O>K@s3yI zEeG}h*GHZYp@`UpigBS|U2K1o1J2}UWdA|V z*4g3#Rkv^Z)~d3K0HT0aI9a4q8Tii!qgqDwIbS25@z^fb=XBzL9u!Ui<>D^@2WPBD zPq@2mP}J^th8}Jmtf0^uP@|Fp(DTfMV&uf70Tdy#t;=c#QHR`_xiq*AD63D}hvFgG z0(%eQi)M7{TaLH>FxD7v`Q5{Q#%6R^Wn)=U=FT!5unvldsyMbK3P=wB_ks3`2k zJOR(Lp#4|7O~-tapyOtSxc#P>$+a7Fex61P6jJJUa0oHxR{z8t+5t>?mq-L*2;R~fK`-DJtkOD+{Vjp3L*WiRq6=-%a` z&Ulf|pZC!G@{WizrkiWHDV#0-=%uJoiNHqp#M(jk#b+pP(?fmS)As3AqL-{o&?0JU{-B%&OtF@5Z+34-bHfugaT1^?4!+dc=+7 zPhw`8bbz!NzBg95c)czeY(+DPYn8dG?e*gaNFH?} zI9kIY~Uc(>PkzPG>EO7t5WHb1U@V#@0fVbhFsVu2!{ z*YY>Z!$L)7eT5SZRF?EMRT=WN$a~sP%Y^caUV+1;p{dg?0?f;}j*mf?h@qC>opAC6 zC;-KU;%QzG>)f8~VNfzxlaPYZTJG6DK1WO)L=^+6bG*`08KFFbu_n&e^gh+UN9#jQ z(o6$@O_5-ZB^O;pvcZ5`*64k8&^uo5WQp+9ouC>TvE! z_oU7!((zr*@^E`v;~HIc^1{EdnquWKT*yD zE01{FayU?cxjBhLAC{P*699Azussl5IKafT%X|0F8bMAK;|?V%kj-Z+ncde;kmJ9- zI{FKM$93-SL}?}%vZzvTDDeT$C>hu`TkkX%{*}9?D)t%Uab-Qbl`74J3+n7Pt{$w^)s_MNZKA^mOruJ7; z;=l8Og5D|E+el>H0QXKC82U8c4~^i6DxjBdD1{qwocDS|@@p>IV(omPR%tS(5!i<` zRs}-&;UIyJjE}H`e!Aw7Vl##l%X@?R`%1ZuQIsZQS4~+s;U*A6fQAqT&tEB|7=s*V ze8d*$qqi_|+6N*$@z#xWLe2$1#MtCEM1ev$#fYhlUBCZyRk-|mh`}FBF4_JtDdEU7 z_|4_s49EU5G?x$e{4U&_&5U;TzvG@1Rq|z~UL4Fi zjZ}j|%s-I^CI#d zO&RmAs(!^HVCjUxNQ23e=?#BtW=!ORgWmB^7Y9PFf$LrUVnUB6j(lA{jPpBiPf2Bq z{?(2J)K|aw!N|+RPtc?wtk6N%_$OnOCC}EovTuMl-SvR+_E%(yKQ>69J}%^CTLRxp zP^BuHPp4UXtCMVTLDvtGINX;=euO^9R!{{DUtZSXNM8 z(pjC`^66ibG;`^yg!)v2MSc^aFW!!bsS{(LR``P5Dgqj~zK8xd*O!euPJk|ylY8yA zpTEtf+kO-zz(EhjD3yS}*HeNps!uS_l9bW~q)K97VCqTC9lvffJ7Q;mwe&+)w>yzk zR_sxS=)B znD^M0!X5%6Anz^HofEgUK3{LrK2#zH8<5ZI|7-GTLT)-gF^uhT8fba2fpXbwX;oA% zUETfz<*qpiZu^xb@fTRZ+&-t<6P&4PgBO!3`K`d%)&wd;U(^}GOl4O4rVTuz9RTXl zdbd!}F#8|Um)S2th)u45@Zgdn*RQR+6F_#3u&Npyz>t)Aq|0WH1xxU^QNp zB7D$A@M$|!0E`5y%%k3b;vpz;w7=Ae7+aE}v>pz+0It{0ZsVTIz0TH$qLg=GWTHc+ zD3io&M~D_{zNnBa5vy@FM4VTDrGc3Tz~^I_m*rPWBEe-7#EshgkWYU};7p}hX1qht z;olDtjnE9Z=68Jn5s2sZ;Q`-g*5G>G4T;t&`_`F8}hn?Hfr{5jWx{=Ya{C zdIA{mj(HN*nN_PN3CS9!5qZVw4@SoXZy}E!(v7H|Q~ImqgVyD!2FzthV@JhIIvw^i z_M#9-mc-xX(-eLk5a9LRF60M>Utj&ass%Y=9Xu2BPQZz*gn6om2c{8W(K2fp;*jW4NN#vl{*@iNn6+!#)`^4uv8 z`IteFwVS^G<5>Yw>S0Uwb5QA#?Nntpsp0B3%+O5lQynWXbF=y@b}@9vGu)rDUOopq zOiT)wiV9CFxi9--;2Q}7kFuSVv@w~wBE&S zz0sAebV?Ucsg`{qrDDB_P*A8^QLjEGK1xb5@ObA#Cja^pl!dj($CO_C;yWk61J{Y6 z5KgmF;*w*6epgJj0IHKYSHq_YMH+U*)QPPfvge~a82TEE+EniMSVof~LLZXl1IW!Z zm?w&@kbmb5P4V{vlLIAj{}C|tcDM?>!EwQ$UCe2DYTt(Ja=w;CY<&!@`Njz38>pJ{ z6wio|Y3K?3^-nChEEQi6)?%RL+&7JXCxLbh;C1SWWtwr|`#Xy655eEBe~K6^rww*5 z9|Y)N8kXcp1MN=+^iqhgB;N<`zv%-;77;_jX4bZ?K>InIo&?k7TlhzA4$;~%oPc%oIfj!l;A`LY^yyg8l=s_6N9E6fKZT?khpx7 zN-LPnL`+O8Ol4LPogjw00Y%e#GV~2#w`EzNpuwK)EVYOdX?!vMq3GW%rOGRmhm>Ar z%{Ld@CH+ivyItOnK<;+AZnvZuNl>RRR@rFZW0wQOF~8b+sv_2FO!EP%62MJ~qWGov z>CWT*i;u`@Q1YbOF8L`x{1SKRr%zffIsyxbCs=Q?7*0En@c>h&Gs!Pwvfs#p#>EG2 zYy}SD@Z@R%O-D8!#a~2M4*D-at+9f;A;G>-q{2+?a%RMk3$WpD2R56$$Ui$}MJrzb z;ZfgLw!H!L{%eJM1-0Q|W(_`qV0|^+L~qo~6W?a^&>aA(>UuU;P*_4bq*S2g&iD+B zoe`6ntggOg3 z&4h_J;t8PZq@%`tAl05%4pK$RJm?!pV{bjm$+si#Z3^PXvneo65J{2XhDs`vU%BIr zfM)PYLIE^KDuRgrHw5^cnLu|IvX7Rs|+B`kfb0J!F+Tp z%)$f@rQc8UB-9GG#<*D*N=s2cSE)>kDLn_=&(1n|!EGg~@&`9} zZ!jeN_O`}mM*5pw3vKQ$Lr`tSfb{NYz66a59>Pnp+eF9U=(y7V88AN$VyRDT+kB$= zo%(e`6_K+Sg)bPislbCN2%z!F;xqEbG{cu8Y`uV8T4<~h%_-@8*TsX^0a(|a6{mSu zjAhRwI9u!?-@r(=1`PDPpfh-35-pvDTiE~xH#C8kMH`66fv(D8kiU}1173=#&C;S5 zMV$=oD^RULkJ0|S=<9iYxK?5c=FF?G_-21ez1MMHOW@gMJ6mKaELt|Gwwj2(9$tX1 zEP#=}B+=l@&fDFXC1IZpOSotD*`}u#de8-!k0h(V#k=qXo!$%}up4pYpN?PMky{K| zqg0z3u2Wi7ZrNv__gdEU@JTaz3Sv#JVvsrF#%>yhFbBStc7 zwgR5+{ZqCOgW`aScxb$zz;wR)=D2iEFHZ@TksVC4BU;D}Pebrh#QXXa^;*vU;WJ3O z#v{l>X=m=F(gWg(c>pZKK_SB61rL3e%@1oiF(QsF8*`1W%C2wwy`ohtJN!a77)J`h z@Fe}XF9YT5M}y;|Ka?Y={=yUrOv}C&JjObRmsNOzi3y+z9chXw+}$=bL&wtzKkuu` z0M;QFfF!O092MLrS86ZvBJiAy5p)|1} z?8NVBmYy}2g9Zx3izY7}^}K%&FM4%xjxLJ9$_2=fvoz$}s%-NVTPD#9#Twp^shaG6<&R%~cKy$U}r#8w9jL71KiP<6IJEK!m8! zC4TH9-$UCZegYV(Nv}*Yz6NEV*)v?2S4x>#^nx>>aGGV2o~)p%WTrhgr4T9Z(v<#L zTHZrCEEkgBeHeY~BQi%G>IQk{49z)^z9WRZ@8Ev<@PkLo zo`;nzB28DO@F|*GCcPNMY_Jrb$saNyg2#ICWi935{`dDl=QxxiMQ)%caY~`$T7!Zu zFM5Vq$a8Rio2F==&oqFn+urBg2WhR)han4I?l)S?+wJfF&b@}uWs;4r%)LE)G5)&n4EQk0Q|5N6rNh#TuT8UW5RGsM(u4aM zx1Sa8AFVZRynS_At7==nMl=J&b~WCV@oc60fQzIQ2IgaSJ?6*@=q52vej9cQq*5pO z6(CC~u|Uhi@eLl8NA^k5xfL=G*88l`8?JWs91BL^HCXRp1PS#%<8Lb^#0%F*yqH9e zYz&L>kHmFb{UoF3h)da-aYM_N@^WdTAD_3o(QvUCtV1GOW8my3*j>G3b%{CDEKwq&rr;)V+<^jSdt9AM7wppkT6EE2cvRVD-HI| zBz$M2slSrx^1|PCHus~(K`7(sv&D@F*aHQ;+lN9Fzxs5<_Mfzm`N^Vk=?L>MSZia| zud|#}``KN9>C?DM?b+N9cZ6DQvp3?|yq&XE- z;k^`fjeig9oP0Wzz+wYndyqLYpa&lgF%TluRu|U&A9A``(f7{~6LQXy&Xunn{|SV(f%(P+Hq}Zv~{MENr@Pc;5sXea^fTe4@tQq z3!~23*LjM{pfT`|e7)4Gp2wTUU9n2*v;cDe?J5hf;}1xZmG%mJbP*{geXQ0L!%8F_ z$kFn;-wTRIg|-P)fF~!OnLel@WRB-#OY*`BB$JB+d&gLXWXouY+8YPgF|{v`)^I+8 zNy3*sW;t{iSbFu?#sx0ZjApcd4+QhXKR&SivsLx(&un0i<@+@!292M&#=al{8XR!% z2g>4%Pg|GZu-@M{S?-j>jL$EqBiA@-DegLI0L+_V;A$idjG^;O?Oz>@2%f!U2s+>z zdlwq^l;A$ia2YpY9=^sKzr?x*{T-M+NTXFaZO$T0rc7A!0$ETl3pEKFrRik`` zkk9aPt@}C#BG+bn#br>JIitnaB9e!kLgyrHNy7pKIzDqs+x7{c&Hh36=5`oL;ps)k zHaHMDZ^;P8B4eofz~f^_eG6hZTY*L63V5w6k&s2GSxhdLp3|$C?QJu!x-Vs@87$P8 z0q9GE+QOF0mLtFi3JT?ow41GSa2a|rQO87~kZqs0q+7<~@RO_eokDe7Y^?TOqC_iqREM~ zq9TN)uw|`(eA6J{4JFQq2fE`c*E$i-XULCmye9Uq6Z_W}QCyXW!o;f414sIq>RJLJ z`PVgXNP%F^urNq1!v_$?xI(Y zhl!4qIpmn+0V(uuY=?Cva2=u!@(AukLktMvLDVm-`+^)K+6yTv?(%gDp2(U1%@QI-Bm*w_4xgu5|QVQeTcVnWvqWZJL-H zf;sNAnSsbBJSN%6s3M`}*>Zv7?pG!$`k&Y_lcFUxEdzPSO)vbqGdP7A9x4#MrHr5@ zau2+}Ow#HuDsFVdIBu=?sxCvJE?F;dmorq!j{2AYe}QvWHZ)s2SVntEbD8l;23P8f zElThGai;}hhKKXTj5lBGDLTH;p|%*6cO_9!_spsJ9VkTsJ+sZ>yH#sh6mbS`xe`{eme+J}e%<%YWlEPFWx93{w zY?Y$dMIC)&#Qm3js+?=wi2Nt-7tL=r@D(pqE={RUBy_K62h0`Rr9CsI{zPkc3S}cC z%3@5MV3WNU9^iG`*&AUD-SFnGpo&BY1Ynm-GIs~c{CIE+=5_=J`DplbEM#wS5wE7* zEO7+TlZe2{&O<3BRSs?@LuJ^0OA;?dVBp1&oRZOBcSeR_i)^Juh`ykqOnS_A1bY7; zGiXbg0p#TY2{`hP0}?fGY$ZiM)vdp3OY?DNo?0NEs;O7H*Ki%0J_%75o6leE zg#&lXcq&#Lk!-BqQ5mnePBg)+`y#(jx-Y_G1W)Xv%D>TGJc(J5Ox+aXg!0sj+<576 z^`Q;IwduvXsArAtg?RZICv2FWWaf0(gbnoR_#E?>fs$k{x_ek<94Mx@%BM?}D(sHwUq|v|`L5aFg zPzs8aX7>Ng0<286cAOOwugzL{@g3gkSc?;lSsA>$?#SPu)db10V!(wNuN#^1S>X}e zJ1}ucgd6mkiS5ZMeu=LbSLza_AYo_=c^6|xVhB6Y41AN-EV*OMY`)2Tm(Z)fe3-Vx zy9!T|4F`)`n?|S{I~Y&lC5~#e%38Jq$8s{K!Bx{%=E%!|PK@@E2a0U4u7q_`E{Pm0 zTd_bk!*JlTyLy}8x5qJDV&O)^hn=t!Ev{<44^T2quXcER?y)C&bP3}5c~w0YwPipF z)!9Idq_$7RMo4<@LVmpE})GP6#yrz?4f^Is}t$ME8R<-XG z#c01N@SYP*NU$Z+$P0$q8((_I^q*xCLdws)7N>hMh#1}%9UEE8em?o)Emo8vt@F&H zLW`9^2+i3IKK4^#xPO(TXx&TM-q*9o>$?MIxk*?#^CPmq_xQ_h(IgVX(E?QsCs%@r zSS`?l1jIQ*rIQMP?*FsQA?vFVJHjvL>P`AnTU_y_ z-7Je`Ft>>G*;YGg%pmr-fhOg?zg9mvG7T~p|Jtp;r#Mg+(;w4jvr854`wp%#?%cggNU3@Lv!Ut)FXd; znkuW$CRP}?%|GFNb{Vj5;oR$Fp@AZeH1oh9^6qioN~t$Yv+l*TZ}m_?@SWbtF3#Nxyz>Ae~1c4N%E6FtaZ4l$(^~;A14Sq zU$wrTbnTn}WJ08uj*bH94^!!7i!o%npJ{-9l8>0RfR}xOvpf181})}{8!7bFP7?KNLaU8NWN;`I92udPV^ znKOR>S;q7DryALpg2!g;P|N@dqKf;O_-( zI8ZKL3>M(BOSkYu2CYK;ux{CYhN)Oe1S?MsajU~| z4Ep?ku;cBcHqm1m^6&=P=Tw$QU{$fHT`J_zXo`Son2FJo@QLSyFyoFYCVh8EzsYJC zOftKmh+7t4CYOc=rMwqKRgU5zDut&hoWp#9z+`f-6#cS8I0o(PQ$d*!&ldG1kE*iv zQqNpqK+rEnJIpz0`#o9}l)rQYMO8!11eA~@OO`AZ?_KoiEbM6dW9n9(A8qtevPgw{ zb!l{%-P^s=o07bNJWECkC^VU}X|B{51P2boNrpaKyW}V&@IG>F+AykB?|J zYGEk9JB$_tM*Uy?N-oHC_BO7h$9ae`DE~8GL-Ok+s)5HA{aMgP&fi~@F1MA05gPh= zO<=Zh_`x>(S?1b{(H&_5j1!aKFoiLzqWC%^DiS=mwb$Huup5%1H4$sh>~>+s7j zzrK)=!lR`kO&wMquQwWXGMqmvyTF%vI7l@j$OaXeew>}ad5h>7@glde6F1FlX+cBP zj*dnj8G)_o#MXD5x!**xgFAFkxa~t=S5SToWZ+zqG@mFOP+^%mfYufG&UFb zhT|4TmEvKE)vV`LYwirLL8ya~*|9htH#$nJX8w`inz+0%QQ1;38Ij4;_Y$b1_(tzO zbJEh>Q>ps>jJ+aG81W|T#=#4XJ7bAJze@%`6rb{t^iuN(@=#OVr2qc`J- zN7ggIJyiI-d|xpBUiP$`1FTY;z-2Vzowl16rDW5{)l=>Py6JFbO&N7IJ?%0R+QcWc zm&9naX~P2KC1TGw7_gS)d>NJ)!bZDS?-Nr@ucZXc;AQuR(4Drx`j>yM@OC246Dq)`wh zrAs=cK}t$*LRzJy8w620lm-z5L?nfGHlFAG{ddmC8`$^WYtNcBb6wwQf1NQZBUtci zYJ`&R8FgTEjvM>LVmrrC$K$2`ZK|*LH`z!|Vw=T@$~Awp5w0Kf2?TbmN6XQYI?ODY zF8NP3R8?S7=0|-qjxxe3!8ed53Nc@*)4-6YoA0!J^XhR#aJ>d`j}@j6y*@>cfb?(L zGHgwyiYdumY|>;wELA$70l~uXu&Y8kUSv)v9yPDv201 zcXFeABSxDrbZIZnm>4*6zu)K?{g9BWsX=Nf5j`U(C|1Q6hG|he9p<`9G@WIP<=~_G zzL(d;MC)h<+wCZ6z`8xkPWEZz*1NN4W@m{;CVfet@^phr(Ke9-m&Mbz&*HgCJKk~c z2Q5juZ{13_CgZ$0pG?q>aX*+}n(9vEL`UbS26N-5LbVw)=^u?l`9a^j8jqvMYI28e zG|f6-6gpOUS5Kzvp-pu;4>DhNqILYaOGM5%dcU&EU(D6kip!4Rm(TstSdCX#?Tz*B zckmHQ9=80AL=RFW7kkR8_?yX)mRRy)F0BE>lt_x7rli~J-Tt`GzD5zg=6U^C1V1ZF z6-gSl&(wOflv(2kKHnQTd~L&o6Y~4R2I@cY4dL2IQKESkQz93m%)iWz{ukcabG)HP zrEwU%W<6o8mEm>?ua>J1?C&}{su(;VNI2kS>+kyBDXcHX%CU3IdBPmo!)K65aAuJ1 zWN6M3WZ&_SK^mWlLYgMNr)!v(^d{*chz}d~4GlXi!tmVF;$PTD*DjHB%@|ZZpewmq z!soZkLS(^;c@V$I6WfpL$Rn}HzfbU}Q-idzWk1C>m>?+4V5LUsOeDMO+IeiJ= zPy9peEB0jO?!t{`>Sq|;7SU8!F7MDp+B2rn*QN)Xvvl>TnqQaXQl;$S__((!3?*_5 zS;=|g-nH-8!^Ze&#$*dgOcIk}T~Y6@Bf_{JsZTLm<${OOMuu%W__(>EdmA=_(dgy} zCU$c0gzS&Pd4l^a3EMiJ?WXteQna3Bskuj!XJDl3{fcXFO{Av z{&C!zCatx_)b!~J<26Y)uG8OfqXhbFyqbh=SDU@^TIsurh~l>v-|jZc=r;OjxbN=x z8!EXiTpHxSNxTs)-pRc&Ie(UzpmZ8lBb^m_k{TH7RmoF6MHbk{EOo$9)yMJuo^x*Y z1L<7ZpsafYl0LZ%Q|*{iZd<~1lrh31U)}QR-Q<@~5dCO`pzPzSAkM30l1~)hX%Wsx zMqx5wxx5_;ZOU>P6e-(0Xxr=5i}Yhc5&YONYw@2Z>q_pq#X{o!7}GX0{0S??4nx+W zW#jRY@KsJ>-!eyXJJx(vR{VWcsmU}do^>Z*hDMgegbLoemb*0OaZv_7y43pQ_jRKG z(rVRhsl1`9HRi*#d>f(UR;dDsWkhJQKC0TaJ32FnhqT}9TsI2MS3d!KMwh4?@7S%r z#5-~lj~E{g;n)@1V&>h*QEbkUIgca^K;h(DeUOg*E!$j#KuXEon@_OI!teO-|ZBd zw0-~V5~gv5-)iyPwP!g3<)3a{;go2~J`k$D7>CG&SV-m#hMhQ{YYvSjYl(+2&k@n` z5&Y5rB>k@ZU9m>M2f|=_x&A88>WRxt82Wx#RLLM6c>$sZ6SHug^`~jo!_QQ$|1Ax6 zG}$8hKYT$cn$LQD%nrF!eY8=hdS16nJ$QQ+-x=zb@g)0&CH*q~R%5;-h~7n3Wm^@mm7$dX zq;Mq60dreYQSh$MNb&@`k=L+rc63!RA&cvEkxP*#(EO*EJ{al<#FQ;gh}UFGxIm(# z$fUZ)_)kBCelaIAxby`Ah+%la6(NYRN)}p41%$U_MrG!WMLGYfUa4Xww0X}6$r!rZ zWn4}7)ok%DYPAw3|Jj^wSQ1$#&#q*4BzCKsLZ7f~jWj{7nKJ#=6phYh3WEE2a@?1U zQ-R-=H7s%{7QTU6y76*5tLv%J+Yr$m{oZU@hD0*MrhiKTAKeGXc!877Wq>cluF7f( zBfshSqhFv)`1s&k#8HS-D6U&a<$lPvcP$fvTwsoQ%D}<3Cpgzv4ZZ$&<2p|%>45^2 z9_>2DbDUV5#&F9#OA?6ozHPyn+(b{{&lNp|hJ(gkJSX`GjF;!QuR~TTE-Rl*-*uO& z%f%;%>?pDEGFh%MvEcKn5e;Yj(w(N~qBQ1%_E4o8L|reH8&P<13?2FF`7Zx(UQ3Y| zEYf(xW&J56d_2mE-#CkxO8ogbJDnxzcj7^&Rx%}>g_Rb;i~Tamh+~c8%UA?lpC?NI zNlJ_qLcsU{0)$YW8fA-I^$sdV7rnx{zP2lDrnMrWTBi6*1@_X&X<1moG?M25JXouQL8ZTGr_WeP#MG zqUS1iLj1YOXUi{EnTC2TQX4VduE^skFBB8)g%l2W?X8n|HzHc2m#UZtH~S5SD_>da z-BscqQRK~Em`jf=Ru=FIiZ6mq^hYzrk#Kq;CAELD~Ut^n~r{o>V&J_i+I zvMdGq5?uzfqB2pUX7YcJ_caVyxMe}9|H{S4xZ-}RWGut{6$*u&J7&-?FPVNrGPpII zAIA5+&+3b=I+eaPJQ2qGD|yBDjmu)a?{nW-(L;UFqWhBet-OAn!`4^ppGSf4KG7n` zcx>#;>R#<=GVK=jNn*m<+Ma6#Zc2bZb?{qOEn`o@iTN%?-4x@f-3lUAG>AesR+(N5 zlIe^?CmXD5Y5~s|5J677$=KtC0XM425h33rSA>f+bKSB#){Qf*NLioM&5Yo>M#Nvm zQ0@*@6gTMVD4_y1zga}xFdpH~TifQ)4V5eV z23l!>dkZ)EqYCaIj+mI1wh-*u-PnYP3?&9pN~}FF^?bSpwQWdxA2O$iVsf!_y|3CR zK7S=f8Cp8%urIN2-2Po6knNL0h2_R7SX(ibJmob&NF$HzZX0PaXJNM6DO1EZVh>;q$}H+o1Moh>_T8IxhP` zKl!{@u{hxcNO&V6FSVmcr1`6wA3*=w=^MfSB5$s`FTg|IfFUd7q3DC#Lt?wp2v6&$ z3x8n(->t`xmP~$(lgsL0K1j5yZ?~{5l z>4ptyo$D5V(&S`_NF=lB#-oSeJe;O3J^$_@jC1>IBX5kx!Fh101mwiIF9sD+NJ-Hh z8lWq8Q98F7n!_(VdwktG=GIsRoKwHLgU?NIz%IIlV3bZo}~9eL~=lIBJ`%q zc|fRBQK!WsfCf@)QxU2s`T9Objz@B;K25ym?os-0=)9+MIk60NppA5gIJ45w8@bd4#NbXh z+Yy)6-^Nr_jP9z7PyJE*5Ygz6_o*D+!_i`rH2_~Z78(a-#tIOxYt4?(Ij*!7M)0Wq zd)b5n5gXDg)%1m?MW7*B1K(>1QC`QDynedH^+I!B9HCu30vYKa1n`Aen0>yO19&iBusr@jm_0d-e(;~JudV-j@(V>cdwgp!zQ8i- z@!9vMkgk=sLVbPlv8Rt&+fhZmjnNaMUu6otGro-_TJ=K@ZhxI+AFWIGOF_-TId-1G%HQq>jOUvRa6moE^SeN!5}=p zaH%~A>RHcg!5L_?$N5nQ7l2{?SX4?+C(N&s<>~zMM92l$Mn1jRG3y9#`JmpYNQr{9 zxGlS0{C~Db_L0g@bJ(=YfVK*ofVK(gj|DIYZb~0<+%Zime*rF)!aH*yw|>VU6Iwg? zD^bxviz^o~+Iw)l@~Ud;+hd8mtA%(%5Vv&F3s^uhKzzBY=o&&bkbKwl0dRs$t}u-; zAa`?H11%W68Cb-#SAoR!+Qk3wUVJAp)}HD@g9=LFk9M&o@~P+NA(l~UW)77KM4Y}y zTz?S|@)`%jh+?4{yTZR!AKY?#t8uZ)J7FfAaFqA$ya6+`>8eEKk)5kAKL7mM?u%0VH@RvVf`d0+LBQwEcw05` zLS;d^>yb0d!N7EcAKO*5oa099?}NRoF;puNim9uN115KJ7KUQZX3l8*saLyJTQTC) z_B_*mFd8053~Hog1XHu03gqC0Gz<8CTcyH`+h6?>C+o%{+MP2bvh(P;xmR)1%xZ7x z@3!rM=ZVvyvV}sM>Yw+|zaN0Q>+SwGe@XWS{z|qubi*x`T(Q%GMfoId%suVed+Yhg z)7E_kdsn6A;+Da~5ElCrrQWerC5FH;QX>cR49EH+KEIA0M22i?TJtNhnZV%m@AmG2 zw~CnN;ySJA@V6G#){g-_YGc7Anle>wZ$Fv-tUE#Av_ z*lR!E7Q#u-Q1j!?RhlJ2)M&X`bHS4k=`2N)ul3mu_*JOXk!3wwQ2oiyonZt^e}E&G zJbIw^4FpIDc7_S?UD_gi)1qrKe*wDFLv&I9i|Esz8#6{24dD$c*HgM_y$95I*_WEO z$K2oxHoibCaRC?w^qPR~qh5i%%tF9spaKH^R5-DK|EZ5r#r?qQG@Xh@b2%mp9F?u_ z$|r=)Pl*(ld5sWG^pc?x9KP6CukN|1yM8NpQlMq})DVstB4D zBjgCA;sn)YY(FZE&GNi&j0zwQ4@D&i2?{iBNx%^HWIR6(Fyfn|A*$)=6J>^|;xiLz z@isCwlXJ^^F+N6!7htR%I>HI`isa9UW{@w1!&3!P_PRy$YmO=u*;*Tl;@mxQ3z!GS zM0&o3>T?MC7hD^|*TjE+(x!_%GJ6G-_Ws{(4-L+*SOc!&KQS;?0*3C*KyZ`|bWX{m z;^-us@(k^^hnb}kSRc!=rOKY2{HtQy(I3;A06{qb;xSMg{aE?vQSuF2Dl{e}<0wgd#%|GY?G5Z?lx3tNbnu{0iY zf$rZ6Q?cSOWMct0zr-DZcuVNf4jBNnL04A%))mwB`{zy@0FV}DUnq4g5Ae!HJ}4@E zWPJYK2g4uo{NjJqDJz*? zJ3M^+Kw0Du{>Q4gtc=d_LMlV>(FR=nJ5aXHih}-pn9CS9?Dzk)05=cpkd=h+^qyV8 z1;odTROJunp?4h!`{lHfStfqy_rNL-hVlw<%gg{A)70q|zUj?D21KgW~B&6#GTC5Zxs~rGIDfrKm6A1mjC3ryr9x`JFSpTQ#h`k7e zg%Yr|)&+ENm*xIP&6vy-aVZ6f{PHc~m{(lzGmuxw`o(MVM5&bALH4SaNLX@i3gInh z1O|{;+;W_m_^<n6h1YeW;U*n}?H*2c@p2~nVMs_x zVuDjPz`n|03WB5{#%f!UWFu_kXNxL zFlxHO-<4}dUq@Cu80z3_nhTdC+Yz?BCyVz(fU>YvA~x0PuOGb2=|>>?dfo=O^ds;` zR6^py`QnmR`MGcVc<+a;OxU$#xes|iFW@5N2n*oA+#M(!lOn?R00r@8?P6>-X~-nuMcPNVV79Pp5J-}2jnms_cUayTVO|%gS+dd?~Q4*i5o0@@JmTS1R!Me zyim%)q;~)GW-dC3z;jA<)Q9hv#1nbNf@JR^X2CH2uY`=oeOFe0FfB$DpJlmrXu&K>~ z0-OWfq|PR^e;5@kSt;ocQZGkUtuO~NMZS3b+4#QjdJ9a^2u6Q=9AO&|PMtzNFqIod z`*5*4E^dQ-q_U3@)(nxla4VvWjPkOcMWcKJ=}{pPFyA}QI)L6Wncp==2i*ew9NZ3O zc1whng0M_~{zb5|hvhmDt~O+WoFp3v>i{{sSn9Fs6;=9xs9$TX7V-EY$q5a7!wrw` zYrmLVC4g+z&huw3i%nY%(@Lq?rFH?_K=_5#c5BvuH%qu z;$CFMMd^(WKcldnhf+De7kKdG4qq0Fsw7K+W&08A`enzE5Z&TWEe@9&fd#6t8Hqwp z-i*)L;G>>sB@a~+q@dttlA^5)^7qquES-dB+OPX#kvlH3LVgGnc*PY=KJ|sf1 zBfy`wU;6fdm^q_EW@R9biu_r_y)p|VXSS1T55t4dVX<`u7q*U{aZ%_47w; z2A|oL)8B*F0UjLOgw?PTM2(Ff*h8S%mfvGSh`8dU(6y0J4la6Y$jh^V_ic;(g_qRG zg)HigrosR9rC%V3K!CQP?760cckdzeE0hP^ND_Z+4CJ>*5d2?407=!vE#0T*m%xOU zOg|!dxI}ae2ft0QwN!ZSu1rF7dEkHc_q}&Kw;!zOpRfIgn(5A-dKlO53JXp~VJEJ; zNT@tj%^Uh&UjCkAwR``%7L7^@SMOR=-=CMj*=*1O7yrEJPV?&{F0$2wQ`TZmE&mhm z^xvGP6RL1F?>JK(Nd<+mO;p4E?(t&1{xv$u@7@OuXVLp7oTs&_5=L;+bwH+-Anuqk zV;421niZgJ*diF~)-J=fQ05ULe71{iRo49$JhZ=0w+_Djy(BsNOl+Dba$5!5GIXV< z7ilC=hg`GAr{tlTKPMc1{ORtNyl~jPizjmyuyfWD@l|8bkk##Q_wOa1M3)o3d6Cn= zR`2=t_BaL~9_ldv=He}I8A%*oomA$0vvky|an=&$AGhB=bn*=duh)OO$Zg6cNr_;( zX*VM_+drUboQApqEzt^rT)87W_NDk1q%w$r%2e)w*o)JC`rVs1;MwqWsC=RqCj9~S z3MA4c#gK`w_k&+z6SlIOe!pJI>%vqUFw&jwxWEP*sz*=!b*GuBN#w2^I!8zgxX#vP zeHop$-q6pl?lcQ1&4su<6Ye;PKAUasb~5;M;&k%c|9D>-n`$f`iWebiz#f2EXKrXeQ(Ng2XM6A98b%EF>C?wCt6K*{;5=%OmqQo3h{K$_ zEPOIcb^pYLv9D&XoPl=( z84Xk0v=6>1a1$jvQ+rmUz>D7P4sw`DAlYn!JxgU0Gv(G8ob+v57wNipKeqmUI>Gm@WqVyRZ8VxqZB=b`^Wh-L3%vwRL?PP0_NQU!VFT$xJY0`mPYy+cH zIQ_0xZ`{!e5q0E*Z7(foWBQ0p>>d#TAwj7*oGNmE(+FHmb-`^Qx0eC}lK19b1(rF0WV8WuDXwfjI3 zb??9NeuCt#@%&7|^&3X-kij5d0SK?j?-0p40;^XKnXnPf5>?>mr@tT>yY&06~L~)&Vmxz0eBySAF7DK&I-qa@A?!v$(glHybTo4_rTgj zru4}B^nh2qlDM`g4(_2GeTz*S)Xuv=_Y+GZ7yvHhR(Gh#k>x5Bj#^#EaMQmF{Wr=> zw;d*UZ_fu4DPz}oal+A01kK>{hn^cOLTN$Rd*Gm-f^jyMWY4WtOU(^F|Er%8p&|BS zqtA^15sF?`Q${j=uyrO4Ib#LDxpAYSeIqZ5yAS+$j?Cz_~a)qahm~Ds}x-c@x!Y$Z5MgX zUJ^4}LM^*r(E?!Ga98V?TOw(^86q;_O@CkasZ5I-zAzZY9e%G3UvDsCi4x774mVS} zc{POsm5Q17pd(l`14c>x)*AnTpyX?_|G|{-ke*B> z#I9*zTs?ljb-j;8)gvh)w?SDtOKKO=s|uAZFEANDC znUpUYC((-%rMRMGCr9mP_(kO}x6YXyP!${XvTUyn-=`FGAfHrUH9--PQJBfxx3ko= z+PdVXiryF00^aE*E@mr%F2*Qp{pB+ZSv?ey*A5h1-Ds?bT7in-HhG>xlq0_j-K$nZ z{w?HeV6lG>1zBF}yVZbXg6K9FsXp~Mb3gg?cJccl0H-Y9X$kY6g$nZqyvc-^2-k=f0$9(ro8{8t zt`lq>q@-^D%4cjB8CzkkT`R++?La_Jwk|AsPeE-^IY4ajr1YI29M2wYOx=Hm8*SzB z?`AzlL*|pHlqoS(eg$kt#ujUdu0Q+)Uxk&m7P47dHt1A3$W2~FDgSm&fU97}oGVc$ z{elV~MoD3Xc(G2D?vJ$A%Cw0!3FR5kg5m%6 z;ZmZ2pS%src^d8wlQ3quLM9{6p{B$^0Z;uOKr4ZZ`zgazj29JBe;&BY(DlronLB|v zkEf#h-?G8FbARnqt~>CKCNEy4z7Hx#Plgu*^)ZcDZv2-|!SLk{@O_GmkpJ~^6)gfg z(|9Y8Ue6ogaEI6l{(2NFEuM(;G=%&UkdszQ4T=BoA+;?0kHxr1lKJFNXhLxcrN!h8 zl2s-pX0Sh5B@IsG(H=!41lAb6cZ^4WsSY_q5*;!?j%v(R`8snA`v?gnZ7{WJ6E1;A z_lwdr1C3zat@W`t_5hMP4_~<$_o;=Kp35NTX+hh;OzB%0dbd4^)|t!b122c*xWHT1 zQ*4RYO#$XA6R`A<*-*z|;w+yWYY%{`lPInH;%CBatJkOm2Ra)cLw*t;sPWH2EO#Gr zbnbQR5Nc|zfzV8$+}IFwm0n8wQUdG-J@)N~=cSC;+dqZ9;Yj&5_hv0g9(21f5CX?{ z(eGx+xsx3oU=&nLSFc`KftggKg?L4jS)y*mxYWX*XcBbBQ-S8!*SW?*%kAWJ9`Jz2 zaKf6aO6w4^ahGoz)&Y>29xe66cW%VmAH&SDcTl|U8Wi6*-v0-!B~G~Tjew{5q-qx2 zmR-sYBM`Hln7XsX^m->0vSzEn;9h}D^a3Ag3M$6vFZ4UvUC^u^Hyo{(Y{GTE-C68T zJ+u>`GKItZKe}koMu!;fvdn{D;;s83kr*due+JR-6))UtXBiZx&x^QJuN%%DDUYuy zdkHJDXO38nIGPfl`VM0&J3A}NlOw_jjcue*UN&1kkNHK%{Uml}X5l!DYhuKAi1A61 z-Z%A4?~jVRexr578=XBfExG$`+Ba16n~2FCob4On2`MYJv~3}b4h6x|iBdaIp2Zs6 zHJzDotm|V!dwU1|F^yf;m)V7zcpaG`LIu{J$x5WmRVn;+sWo|Y$CB%qs$i~$fo`9N)>1Xk@D4RmL^jg3 zN+ORor{4N8h<#1YdY$=HdX+SAibaaUO#5iTm^H`CLH}m1<4!ID%CXc=J)fYs0G5A2HH_NV-`p{Rt70^6Am(`4Dt{o z!~Xn4ASaB_1~SD8x2isux<$zA_ga;OCuBb-R%Q+mC=lQ{r@g1EGu*-{rcVGy6ngx%(QdVuSPn(> zc%t6T{=LmdO3b|}?-jRBDBoU`zI_T)>W-Fkp?^ET-lzLhI+%8kJopG~V<+KPN$nrj zIDZq!LUaVR=skkBpu8;Pwyrx4)AJUxx#IG}q{TD1Yzzx+PvEJ*tfoyLrzNeS#WFiG zxBz*girRJrz6{xc;E~7ZlK@SWz|(LM+2tC5sEoVi^?Yu}ule8ok8ecAA625QL5cKk zpN_3tSsun7#F7SJRhUVOml(Z_ffW{c=OyH6j>CC!5{uaB&>)a83&o*W;I3cGgBvjy zcy$H}nsfcUz$|zQ8)(W~F?ed>Qkw*8qa~?Oz#5-niWAdbwTg|9)(SoXrr#F0V>xnZ zzhvnf%DxQ62!3#8p;GtOq7!Q$c#QRs^M83=exopi)@|h)--4k!)0ZeUJL_9NV8~l> zX6JAnWCahOOF^M}2B20;mAJ4jEH0$LAZJ2Z+-?iJQ{==Vcm} zKp;s>fw!4$N^=v|Htf=Mj(?hhy{^*Ycd%W(?B|T-!T$quKk8n z%OFBQ`UwSBXXqu`-V|aXUWb|vkLOrAbNJcm)|oprbhqiwem-q>xtaFgU%N=O4&c&~ zk~n40Q`J=tI7UE?-WYbIzjZ7l0<&noJn)F^&`K5i`U(tkLH)XvPTUvO$)`=>w1eUL zRS3#X(q{Vkab#F-==``kA~zfYlJ*M!e>ClrsdDP9cqd!jfIpTvuxKu!A(NuF)(#-JaWMr9SCRMX<1(R>fp?WdiQfuE)qhpf z!9Dv46|7QDnTcF&+C9~$n%^L#5N z3+i-Nk#brEcPSHHoM;iTq5g}L)v7ka`%M@2(SdjO3J*%Gdj4k}7BiZj2K@kQLY!uIi8JK?%O5ez z4`B9K#bDG>`u!K1-X77Re_B=I_9-8~_pH?U`w>mWrehGP{2Feiey^R(oQ`ef-39dA zV$@C>Hcu^*_@Ci!Ws7^Sd^Em&UTl*5kBj*{ z-{6%0VIn?4Di?!((EoHd=eT`dPGY;|(yr)DT#*vnDm!iV2@&$VRJ{!kWFD0#P4md@ z5NYwy&Nci>03_{_D}UhCpqo4o1D4C z8h*ANCi(k;GIZy-+;1r(C%J8kMpQRs!9Um7VS|K%@zh%YOg#*#Z)A?~kNrM?WO@ko zDb~0T5(@j1JJ=rRDKBk@sY#{jPo(q>(c4aDO8p}C8~Eg36Sg7%u6hzq{ZbdymhYCg zDKcnPd`ta~Wnb!M69B6K%0q*zEAj|7uQJ%gnlPe+T;bPdc0L%Xdq9T3#h=0kb=0ZD zE3fbOJD=`w5QB4UOxd?>>+;TY%>x~}l^Q@e-Hd$n?)It|o+>Jl;UOfKliBjqdK4Gs zNtL8N73r{*D_B(LVYqRBrVJ*hd?tNo=vA00%0=(ksWfOw)RK4|a>{KA{+?ZG9omhD z{@hq@z8>7YM6nDNAMK|n7Mp>fbn4Og*${?!{gu^~^@e6ErFUP@PdcnUz`&)>Mk&4y z8xSyPVo<((IjOr9=@xFqG=RUyCmg$Lz1lVC3J&>P^~KQ_e0*{CW)1FdEH4}TMRmG* zEP3OV(lBPZz8a;q-=rP;yjOKF4ducOh@*50p~(|m0~Y!fy#>b13!&zV%4F+xe?D0@ zv{t5E&2<4>)4UZa* zk8Owekbjjky%hQ?hTYVVvkZn`QH(QJ4}cH^1#9=GQUx?7jC-lB%f4u{S4SY%PbEzSzTE7xkt15~2u6TfB%C8I$al81yn#pZ6Gk@vAT^CpL+N1dz7TfGBl~`nvfvA} zrH0m@lkz7qjvecMUzQeLyxy+-_W8A;7)c5}tKf-$F)6fw5LE6`rYb=5{?j)dC;r9s^r4%i z9%88A>$h>)vx&+=0CzM$^Lao?@~?0A4-MZqd?BXamJ=1`ehkH*FjmdmvM|*| z^~4%5wqTQ*^d6?jGEWMx_XXbfpRbQ^UU0$3Kg^Zt(wTk#*v!8<0$1VFXKn=gXc{&L z@y9jz<);VqNin?FMKM(Mtxun`ys4m1w?6pWv>7RjQtD8KoE?>){YvNA=9)|9&2P)k^Mj+tXZ5euk9vN$ zRiel#+>}U;(9R&goYdHOnHu#;86IWmWif7#hrE>7lv0$^0vzs?8t6i_>4^AC2+NpF zQajC1S+Xe%4;MFvswDY=hrqEiOX!Wd=9L?#n~(i}aK0zU84d|wkPCYcfH(>6DqT`6 zH&VAYPD4k_TiWaBluYPmg;aU5ojPmu_x_LtbJOyzQ&4RxxhGAm0$Xx#@B+kzeqh;! zj6A+MHWCy4XU8$-ij+*4Ph-(9TC9@uCTbfxd9x0>{^yM)S_54fB4!~6}JHq*i=&NTPB zndnvjv*T~RzJqH-@(SU1Wt1@ZXP;G=1dtqVn#T`bY`^u=x@{1BKjmHEwJ6Wq*9G>2 z-SLNR(_9d}>{?;Biht0tX)AC~`+M^lr>ovEX&!Eaj!X2-!=PxODA*3YmNwQCPWK1O;r0d20s=e)K`HZ}pcyspJtz~0 zPD`)PRKSq?MT#BsYyR^(H;fH|SOsesB}jeaN>Z)j0`86}?}*~%o(jJ4j&~mJys{$M zEPZOWeHO|D%ep8JrCOd2a&YD0h;ntNg_G-C)_h~lpq3q>nSC{zV)D#(vFaOobC$Ca zq(IczbbRx?hL4jP9Ad=JB+m{tw%k~2!qkzr%-Ro&_4k@BgnoB?6>r~JQ(zzYXdh$Q z9xU00$0L$$Gj$1_agWBFlyxLx|J7dxZ<5ff8IN|bnAVDZ*h!+k6yY`s0IF`N(ygFPb1=wjk$T9w0uvyPtylzPx7@>tB8Rb?@CzAc( zIB;cgry`O%E=>)sS~&~rnI1907I`-2k9i%(cWf)!PnbyqL7zd(#X)E4;f_C{&)wH2 zOA#8>bI)9!f}Fo_0f+OJ#A|`J!w2EySgI0hN^$78;c8YYv{7BpdWk$t*?Gos%)UP0 z+Qu1Pn=H_!q~_$bpbAuDBJf(j@hYJ!)^{?@d=9459$x0bcgY=}5tHXDs3`fZTtU6O z`o6U~obNihMp|i;rpC3&)F?a(PjYj+fi~S`j@v<-0Q67A!&OZUB?u#tCK|f2r%@cO zJrXx^%iwc<>?&pVr9G}7{8e-}l1DgsFFwhxo4Ifph{t{j^&;JIWGbSj;}Oa-SQkg< zvChkv{h-_(^QV1XI{0Q#&hWi9^)(#)dv^KdTn=fjH4`5cQ}btJMJro+4P2&Ey)xYc z;`!t+3-e?V3}gzfl=iOD-eevT5n{|BK%1hHHe6HG4Etm{VTZn-p{|&AG%Cs)J>vX0 z!RfEEre>Mh!}irqPdgVpgWwzQk_(m29@?7{>!KDf)2~$A3-aEY`-UCSs9D*90hK$w8 zU}45oO~Oz~nVlDd$1MpB9;G|7L=w^-N_H!IthB?SiWC=Lw$3TsS(FM1U;T>iTO1pt zw%yq-!{T_IwuqNPx{$#|eIGy9A8C_=6R9d%8UcUrpM2EtC1f+TqM4@0B+n7yW-cnKORI8B1-zFy|0VrlsCP z$%I+N-xRzb6^qC9=2DnI__YA}2mL0CrQWAVpE_=Y*DTmpK#=o=rh+1&jb;SQkf!zG z*t(cZ*G99WdO4AJz0N~S6(z8n#fvA)n*BI*#120VC2eZOp}*_FR7KFP`y|{rN@2Cp zqw4|vL>GtI?W=85p<3-%1(P*}2-KI8m1m}{=+Q-DaK7d&S*dY&Q_?^t&Cns3SZH^1 zpjQ4@*`kPo?qX{j)6jE%5{{j8LC>&QEXhV*o6p$_WvS}8)AxO1yE!S?a+Fc+F{|l6 zMi><<^{n^$WuC=d=&(?dA-`0qiRvaKls(~Rpf+!G#VM`2ekt3IntMbB*O0tBCg82G zQzq5OP_Xh`;BxjS$EhP+eZ2XTIe~R+Du$|v)aYvq){ErAcKGi-whlA5$=iq#uDP@H>H(FT7&KAGiSg0HLz}6q|+n)zwn(|XKtUO>! z6Kb>LrnxH0Y+T~mH<>CJB$#*&cRT{aR+mIUwLbd#;mk8l)R$P^JQ+jsNAeZJr!A?2 zIN))kUen#q3WyR>sVuWSdOljS8|37_fpcyWW3k1p=&!k7hlgwASi8&j--!kV*s@b#}Y zUctI+RublPyUM&)pnywaQjd6>zbkcFZ78QNc#~keXko3*!@DeGQOi?63$t9snbc3% zu!q(s7%iDRn~jo|qled>j!%6t=xKy}n0*M@c>gWNJ`o}7Ts{lg3vpp;>Opt-7qQ3X z!a{nRRMemq`nEPP6Dx{oHzIt(v%&%M?va;GL!(izs%aoFMUfFXnuw zsO_so!bG(Xy2UqNr1vHM*ms>XJcyilT$O1sbw`%1ai>6ME4b)vn^6aEao|p{cKP-P z5s7V9qNbi53=L*kbRM3}?H(qZM+;1OROQ07Pu8VFHwoIpAIsj7pIqabn7J;oXe{hL zLDqd~@;-{KwN-U!NK0+8xDcg^vWqx@hKH zBCGkI`QmEzq>~~npE!yQsm{%Z>P#*qE1(igLR_|Ah7x79g{8>1@#7g-tFu_kpjZ>y zd!@ro#E3ACd2NOk+S(YlDxB5#EGse9bO)$Y5AIMhRoNEw4_HWN9lyZ&SXo$R-bH;$ zfQrFBMr*Qlb@)wT!UdMJr&sz- z%AB)uY*W4XbBL>BZV0$}*5fBGRTh;zxUI;RK4^EmriiaCC9Z0N+aVrHDa|0DcBPOe zH(%{4!%bOUi*)zs_~0(`EIP5jHF)86Q{Sz+BY)6aT^_P`mYt9fqF`xFf2R3wqYrf^trVzIKq7mp6GwsN$k!8-*n4HW$Qm1t2p+w%6UwOxae`D~iz-Vz-g1uMZcC zscWbfMG#I{cY6huD78oC_Qt2-my3a67(+zKoXNajy`@d1?`X*N^`K`{_C35JM1^%t z{DwpO2i^LviQ+1^e#@_xVyP!rtoA~@M1J+ZuD=@XGba_!LaZ>P%_0GnIwcHpk zbd-IE8i!*>D*M9N0_RvOo}P!ZeY4e$c4(>|kGH)lIB00&R7r9>V~7@op*@tdbA{<0 z!JVl6jND$=yp%A!EwM>9E*XXT)nEhX;M<<{MWu z+~wHv-}WUdzcMs!9nrE%C%A~$?YLLSZdr-I1bHSKJwd}gf(qie&TQic*K5ch*3=}9 zD(KwRwL=T`i+_ZHN9I&kbtIjo6x(F#j* z>Tl5wclk~}sus|d;Ipp!(_%0bZW@~mMhRucyb_~T3@?v1>#r)zx=t$vQg+s{{(Mu_ zK2i7U7zUA+*|sa5^Tr9C8ZE)1xdF0QJ8TB{tl9+hiF3=$S0@}j&HF4l$KuV#^aXEp z#J7(mTRoe*8oZhvVWY~+q6Y0#7*E%Dm6blxsh8BL*QWX5S60=bw9IXrbiB47w(3`y zh#E1RI79j3LV7(x{QR=SGUY`;;nT6;&)SJYXFeT10{1jHbZg`t;6$0=_guY-p~b{=()WWHAegoWXUw zoICd(77nB+i0i+;7A2LCd5IIH`JQ^xY7gVf*U`S1@|7PodYg#0ip-O}izCY_aITI) z07t}wBBi^Xn<&r$lQ$wuo$i55FBfwO&q(_owsvK1V!Ivbb=`J#foZ}rX`kD)>_&{@ zW=#d2EZ#&>FX9FD&%G~&%u-$tS;xDJ@4}$QAZ#~Ec~;KKddiZdzT&70hf{{}Nmp*` zP#FBL-0jK?Bx%`(vyKlXCGue@fP^6Vb@QJeIG0^W~SCu0OZxJ26{ zyK>`YUSJwnIhucX<|DQ#m`a}e>n}%b#Jnr*-N1rDY z6px{2vLq7_QnvlsM&OV&XYuZW1dOOjigdNABd1vfB})UDsYh#b+8PZd=kc)Td$Pn< z0Sau7Cj-5SY{zb7h!_v>5@^BlRcVZqtv!UK86+ks&NwfJ_r}Wvfm8J74OwVF)q~7( ztXz~08raTYnH2KUBJB|U>aM@bL}K$f{IzVj5Oa&6R@&l|=2#e=1V)JiyxM6^kt3J={MN+`HW$!J9o5uSQj}CBX4k`}5Y6*{ zzZ`V@;(qLjH)DcZJ01>n&V@`Y`vP#@?QA#Q*U>+0ny)fr9GcfgTUqvMeqCpv%qnV^ z5?kSJ*jPBzczYw~wdlJnfx|2s+s;UmpU7A%!~$=oCR_t1yjcx=ycnegP_e#si*c@rJk=NgjzwZ0cD7q)Ze25$MB|hs#uaV|07SG6y6$L)%rMQqNqCX63 zsdZZP`S};B_d^WIR8?VgeiPnq)dT~%&m9#FY#V5BWp7LdUB+HF;f~^{)Wk_t?ro(1 zudjMvOKhDs`ZBepYSAIgqdeZQHn<~W#!mKbBA+PJEm;{NanHoaK?J$$ziJZyT0;lz zji)>svcf3QUg_;m+4D%|-jqLvzV`RGH}y4h)fW|*P!Yy_U)__H(2FxJFkJ7T`K7LU zvvRm7+T*-r&S+i=E%?~GAQfEy;KJ8ChR7%* zWGA8`n@eVK6-n6<6|%0K6(wa8vXWgwWY0*FBD?IFRW~DK#P4$+eZRloub2FB-Fu(& zoada+=X1_^j}4<<8L7k{5mLK)3w-&4b}8gn5b6{((iudmUR$=-Mjw|_q7ildG6<3Z zn{%M%J^^->Mq$ee{y^8ul)`2I$%E5{>`Utc-JP#I>q9($9rbb>(0iK2-qe#~IKmB-l7n_db39ZI`|J9Q&`kOUxgdn7{BYnc)>FCMvs=`3X9LyMNhs#k&rSts@#^%lMYhZ|X? zh_sR$8|VqNn}E61tqXG+mzJtFStbVXSA2{Y;yXC)0{#aU&;JpokpNtT^36PLz`lqj z`P7JTkLky3_6{SA0f;QT>`iFB++-X2Q@Ah*J8b3noNFX2=1kTvZr6}}Nn0CzWAK8f zEPxt}x=p`6lDN*YT5IJwLQvIGI(x2;4;#;>v}~R|^j)zhErOA|<-wQ6*L6I@ zNA0lsje?te&5u(N4JkOretH46*BHjiCR*UQdG_v+4whSRg6AYKE3aRUp;7|n*&Mko zBIffL&EGG@fMtn(e}K8Xzr>Eo_Kc9E%(PC`f=ZUZkHu>5{TPcmv8BTc{h$svFZzOiJ_G=|{SDL(|ff707? zULgNKesU|PQPQSN!ZO>P4x;xN5?Hsq=nY_Wq`yyy0auw}GVHBawZvbGH3@9=du@rh z)ct(CJS_K*7kzmalhOGKgR|5CtaK(}zoMsh?dsmtms}C_N|)J&eE<=Q^ka}?4+84O z0TOQWDwfOYIcA%THrkM(RB)%@0k7RjnVJ#@r#Asv)F;|&bus&D*n%BUcD;&S=0?i? zAU%38wHt^JNnybv20JcWJBYhpg=Cqv|5}=o!JY4~8dgUT?act}|NLpEnIGgajIe}& z6p}MQt>1o-$KsO-U2}mT*>%iT^r8DCE9A8Rxgv@f z=8!M-G1ZFFjOBV|!?1}NAc!S6nF7mXBW`eGshE^F z9b?iOYdlA^8mjTJ0Xo)Rda(>fl4SV#g9v20PW%QKP3n`hQQ%q)*3>6FT-(o!xe7Nt zlTzCix4QEl;SWj?;D4g$=i#*_B+vo&Gkc30x#JWc><&Rk^;dGOBBLH?o81Z^nS}G2JSTDxM=37rtROsGow!fy za6>X}N(x8k%hl!T%^wg4e^&E9Ax;csTREexYuZ<<%rE4_$m-arSfOSspc>H4k1~eR z!KCsd`wDYG&oW+m8t@-Qy?}U3sWjsz1~CfP3hT4<051GH(=o#_{b($sOW?8@AYWLN zw#ck~yzN`?8xE};;IRA!qb0ICc6MSg$aKKj%hKGkL>^`c2p8y+<_p+}xAH@v{^10N ze`Q7)+Z8^80)aqq?OA;=3p`0t0Viq7`aPQme?XyV59qFfB(Kodh%is?FJIdc0nEnG z$=tI$psRTIZWmEGz+oE9+^oj0L_fqCnG^oVDL9YiJCE$jgQMko3XgRs)m;!ZzrF6- zJcCl5Jb3_+8tEK|gVf4O`Cm|KGxbPXjq;4;zp-%i z_?|h>9j6f|dfYl|so&YC;o+}%0hdC!XmOygBxE4*OSP%tJD|oJpZ#I z(D(Qnw`{>ESMjy0R?s1fh8{LaT-7a0l1s&R>h6R$oL@A^m-@PKySvVHr9Ams#+qQ# z9bO}uhBe^#IC!=l`pW+27No)T^2+a<_hcr_8h`wj?VKoO7V9iB5&tdO>3Y$c_AyUi zOOAM_-Gw{KES0TFOe?LDhEp3wKWl%QFq5PCw9UZ=+y;DCgFgxiT>l09(~f>!i3LAZ zWP&a}IAi~F7A{r}$6Oy6?tZ-7h+l4;KYb1@P}d29M-0R zB=?M#ZOj@-Y|9BpyQbc_2PdZ5s2qM+1ZON2%|{J>)zS@9AUnUA{_Kc(q9URjr8Ga+~ z+~EDhc*hC;4&g_{$*+`^EFMYgf-ADUoRw@v=gaM7C#=5Wc z-;)HkzuByqeTr~@Wz>h5dJ5Q!XE$vb1(|VH-$es%$5P69TQRz*yu4OB*yW?LSiJXI zx|=2`{sNKv;1HD0^SO>KPVMRE?+EG<#%I&t+w&L`SRQAYRyoL;j(>l1!i8}poIjiE z(39{zcB=O-nK60d(mv~j?P;N`ZW%9scdyWPVC=MZGmDNYlLDKgnZJpE|DK>m(bRXf zR`aE-OZ9IaijGNNbU7z8!MiMD&M?ist%Bp@FN>JCAy+95s0RV1n+N7gs~*MY7(2|~ zyp!F<-C^fJz08YJI|RMB8erCtd*vUUiip;?SG~JK@baX3(dF6vQ4)_}lye-jcSfC| zi~MELK5l&4T3=Q11F3HY!a}OQU4I)qZ;4>FTN^Aot*Knpt@pAw?MX#=ylwdRuRQL< zjd;mqo|5DTqEp<>2Op9IlTT&U+P3+(a@N`LdOcS?x7U9urhy~w^Ha`E(>XPgzS5^! z-E7w?^~TiD36W$tqIFQVF~mx|0J%Pj0gr|A-C*-;^d1abNWTUJZMN*<*HCz6|Kdd8R@qbooFJny~7@?(3rvNU-ub?d4r_!y#v zc@=0yEHmtfM2g;yMiIK`_p>*OZ!5Y;8{Y z2c=QO>c&a~U|D_JFKYIl+PU>CgK8$j&X>U$=fw^hGLU|rsX8VSE1wkEY;eHN&s5ix z`kPb<1Zncs^sP2|0@$}-wS>YH6hz9S}EMR0P$)-zI5 z9*WSEv15^NoaJE#&rT*`Q$Ihqso9xxSi>ct5A?>#i2UFGfP^iUseLLa1%Rn_H5Ud+Iqb3ehl;3UA;k)x&K)PHHZzz#hGV6uGS4&m0s_F zWAr(}^;;}RD2(M^#AaP1Weiak;hu9n7hi-Y=<@s>EFm?33F>2Y|32(o1va>S;Nc2{ zW`(Ru)DjrI+#LwW>KCg)5L(BGbic>_E^ z#N|4+9m0mQqXoCxLa=fy6dD8JiN2@ucR4#otIYWmut$#4o8CX~S+SKk7Ty5FqYti# z9_)wk=8CV`FS0I{{LE0}L3elqo5GTpN+SN9kSd3CKHvKx0Y>+KrTUPq&=1;{1iFZZ9P*%rS2eTt!Ftpqmg6y<98*Q`W#DH8SRr=~$7TA)~m-a+*;(2qU z*@zi$^hpq4JirzrvC@I`{hWacrItByR!b6lgk#VEkZ2iT0ghK#^riZ(l;B;H=uV!h z`aMF0T%pPVsx4122=PZZtbG(>7%1FU&UEJ*oi==yql!JEU^##`_CxV(AgV#{U!@Ix z<~!g59lPP(s2-dFXZDx+ML8GIyZN8gAb}`QZzJbaE?sd3<0$C7R_BZUAW2N^a|+rt z2pD%VaB=8((D(#{Rkl2+-%3!~3e^lL$7^!~NbI&P+KSltjfL?G814q-&@BEl7hC^8 z7ik*9^m71t4ia+mo#`#kg;{>vry29&u7@0O=R+hP0*%(oCO2a}#qrq^2Vub$5!K>O znE0X+$ImV0!SpuNlho{RJ6^x*jxe(x{+pQADI`eKV_;i<6s@63fN=uRc;jk14TdKs z!>s_aryl^QhNVa3+_4}=85v@3@kIwE-sl-2Ny;e*=~h}eUbgP3KsG0)I|JB;n- zQ2YGGGlRh?8Qkn`zjES;Zts4>_{DC555{kj@U?r$5AQ7YC={RKD47A(YXb;7GLRtc z0nrk8@e!x**4ZSwawl$>cxjv^T?_UP!c`3jerQeu&85t zG!Fzg%Mn$k8rT^&U?m$t4EeD$OA6fzkrD?9(hkfj%`ef0@3xa+!PGJopW0D$W$TSz zDYp3m%6l`0F2g&3vQLFye8=87fIdjg;{$%AWl_g;%V58$9D=k=GvT>5`V$+DMDadJ zgOy|j@+n|M+7&_UUSLPBcd8fzf1HH&2|(JX3Nnz(|4zNY8pOOs0O6CQuvf<-Ziw9A zCafBwXUDoIN8{l=|J7l(&-H;g6D+QT^iAH?s%2UzrtU?Z%cgifw&p zX+N;tM+H*Vu)NMeRQ4v;Bbg1*NNAJ>Ec&5FmH+$6b8JYo&;V9YeINtWcj>A%orSy( zKQ{TdYd%#DVI{*t+8*{w!^?s2b@IahYbk6Ai->{0o>2*&mb(O$a2oW8QEI3M6l+@e zOl=N0f$d@mP|U8Ug6sW(Fi|X~oWY;)_;3Oip*}!EmumL?_bJ#wP_Tg=f<%gA5r`vr zDC_?<6|J+um$1%b*y*b|1L8Bi``@c2Koqip5o`ktd0Y5@`H~`MM?q~WB3UW2$>0D) z{PJ3B3hWlH)Cwm@K1lmJ!%HahXl_6<2SJ3Ue$XBy3&J4H-p!kwNXKoBDOlOjhD1N8?oT*s(M*dEx4pw=){*V`cxJcBWE)sIRN1B|hd6-&XmIXR zeyk@t7=1u;qZ8D2n(uSVKn<%muCf1sYG(JBe^Q!|)uqP2^I@I=O<6J6y2|D`ro(f< zAPNPj$Y>-gYXH>sK1g$S^d;t|N}u?-<-C23odGGT7{dY*aH}`}{B$EElm?^vm z2$tj^O>hvIbcVgdICqF?6}r;85yY0Lm?A9aso)XVoQyV`1pNHamft|!(XC|6d)x;d zKp^8>!}!gW7B44wat?+&KI@wm8Q^#B$tr~zFp-s z(>k-ZlQh@_Q1w>2OQ#P=IQ!%HsjF8S+gUI`w z3;=KEO1DM+FN-Rf413b{{hdiJlgalzHU?nJtCk_uiXz)A&=%Vtg9U?L9hCEEWKev#1D@d~?%3pm8^W|DBUG~oS+h*mrIF;60q9iTPGsi?m+#pZ_; z#q85T*p3^xy8#=Iopjl-0YV0bkR9Y$Ar}*_4ys`7U09NH!JmWN=I;CLQ=G1@is@Id zMKJ45R=Dp%WjM^0qQz5PIbKp+4P4+ES@o)3izU~x&GsKuf|APVcbFuU?FR|b(W9qt zw7o^n%p2xjJcB>V9ed}+>B$V9k7wp;^CL(48XY)u9YAU~XVyq=oPHCDaseXW5z6q_ zaVIS_DyiPMH&a~T{?5>Zgv~BBU@MFOd}zKMuwR3f-e-fh2XwcS4Kiqlxf61v7FEV- z_H=KZIj-w{3Ssc-J+}U_imLe4Plytuuwlj2Ac}(QjASi z(=Wro;N34I(8kV2q466W%uj)3h(O&7@x68E4SNteAr zwnXj^k3VG+%CsKLE@*ehOqH!zH*O?tO9*#l+4i=IbfpdNHFI!QdU3z+I`D6ozh1H0 zr^!D6q?5&n*#naThKf>dS?36q+NmVWkUk)0o9{|mwBfwa+cjXoej5Oyc7VG6`ZMeN zMsAYfhtKYoiA~x`>?(6N=M(hWA{sX-JN0G6v{>B4Bc91d5jE&85K1rj$E2=G8K|fg zCEaHZ^lErMy2s@G1;(C(_takkHOra9#bWIK^6YIHD%s+n;Ys&57sN(7mBP;$*{AY# zT6D$g72eb#^?7fW@1}pK5VJ518;xIc&J!xFCP}6!KL;djt9t_pe<2qQC-Tqnrs%cW zxX`?v(gzQ9d0!n3b{1aIo0qK8EKaays+w4EF2o1^PU46nQrI7rOZnp!N3hIfC?K$z*#}n?Nv*+LkQd6{h0ETf0xO ztXWBc<-xt+xSQ2uOIZ?xz5ZsOzp65bhX0r_Q5N|1J$dNN7#^5Q>uj{XqSXU!=Sp z7>LD@|6Ygkawm!m|NXELpyWI-aFA}RBLDYw&W^=BTkZe*Th0%ZzK&M#@q^d}c3)e+ mRWfG|p$g@{A0JBmy3l8jKkQeI?SHrp{;8hTP|P71`~DBUmnN40 literal 0 HcmV?d00001 diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png b/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png new file mode 100644 index 0000000000000000000000000000000000000000..2a1178319648a602898a2aae1034ea45e0a49448 GIT binary patch literal 114295 zcmeEuXE&lI5q1PgMA-?Vlc>>yh#-3Jkti`*VlX->2%?SNql__X^iD$bZj4@| zj1oo+1{0iT-v5=n+2_+a-_D1#u5ry+vz|3ixu3iI?%%zFHPjR@lhc!vk&#_idi+p} zjO-#a8QH0T3+KQY_16@iz}sI?Ek!x9qCSRI@Xr|=SyfpwvXY2PKP}FJ|1Z9LYzQSI zqjn+v{WW9F>`q2@f>3%WtK)9=ecH#nclV_I2X?&AI=Nebi;Iq4?ix>0zDClb1Sj|4 zSs(6I<2WZJg)6jNnoZ=iKB45ap#rpAADm-D-@WMg8`_ZQfc-{juNgb=c5|~$4r@Px zNo;tV;Rpxcp?wD_(7mHnF{)nn$u@223x z=NkU|_YH2{x$ds0qUGDwRbE^JJ78+?) zHrc!I=l5iN+GM0g zkFUC)`SXH*sM0W)p6=qame$tFa4_P2un>P7FE3uW5D9*OQCgr%?$jS&kdgVNs3Grp zFTT7_arIy4KleCRb%WRH);VLnp>SDV8GE8-5D>9C%sW{#Wz;ZOac7 z-9-l#niT|sS+Ig)FJL|XfJie>LS$`&z?V@j7+(zmMV_8%C5Gu*cq*?uC7n|vd~Bv z{OMX)SS+vJr%J`oXZ29n4znmq6^64KItN6aI*k}a^XokLIG^*!50Hub{;c*TZiuq6 zvDu3Ne`qmkbN_T2ex8i-{S|`VtH2c#X1; zh=k;zhP7c-RzkvcSKN$gyt!|UM|JbT5n;CCc{?TJ{g)3w=h0CtiaKT1s;17JY_G!`@~188K$*!^K{qqY^#|+7f7uASh{=#YKkYhdTKg@8yQgJoXb7Pq>~rXH^rM}5IpdVD?a)KeMSY}N zYL7f^Gcw&bPjuEBZPI78GLW%0T~}4(Z6<+iQ9SuxVLMdDK7ETNloe^wDU5R^^v~lk z8W{S&#Qxk*pFM24<9L&#yebFH!-sX=#ptiUrINlI-*1Pt-|twBxg+Ug5_Iy) zUH(zyEmptoVqtr0OiRB$e*DrOOLafJC*hua^c_`!`2I1gc+;HumZ0_8^&&y4R9&a0 zqe|)H{pV0a_%K*01qB6^X4a!T^QOkNk;43#7#cQq_Vs?5li@W?euXXi4!)OFi7*XI zXY%{`3R*K25*CJO^dZvF)92j~w7wn}MI_>yTxWdiM@g~l}jmL*}Ee`!K(A^i^ z6HktCVbL=Vp{?I2Z+Q4FXNP->o^LWSjTbIRu;cqf$M}D{Ddub$>>|+>ih*DxaP``J zryjRQFpmoMOJ-HrjnaeO=#PGseM@uoYSyPucUuxrFHkV$QlI`-VUxV?7b^t@zU7pVGw4-PbzY0(WXtcUa;l1Z=D{VKOhQ~^tleW6jP`ckSYA{RD3p?Z7%`08i zGgx7#kJ|;8x!3)(F;%1eygeKfCOVT#YS?sgR4igYHV5s-s|T37ZR`&|QDvxjB;3NU z!DgpxH8B)Zk9QhJ({nzMCI4i=7K-^qkqkQ!CP-BoWb4}u>u7wk7`^*==zZ=NWubW0 zykScH=?|T;nGE4|?8V=%;5)&h|yldw&?r#$?etq-oQs zv8^SBvy^{rFeH2#PZGV`EQETdoHnKXirMpcjzSMd(0{QaoTKz>2FbAgXD`vc7|zuU zGy*DsP08=%sEU$F^2K#-uKvaz)4)9a;v$qbaxH|r=Naatf~c59sFr{hy9R`Q#AJ{D z-m`bqS1&0mKL*=nq`Bp?O&Gi{f*=>YS6l zM≻V&BJ|$4pZrQYevir{PFql(t))HWWFykinYG_ykjFg%&j7?>ep&z%=%&z`-}76FEG&D>j4B)M0EUsdLKKkF9QL@bKoJT6-=fH7Exn>$RD zchU+a$@n$u6&P6L#s=7=`;6R_%rh*@@fhRg;^LYGURW5D)Wpp@`1Z0BAbXJgt@-77 z{G$L$=8dt#i0}=VR1_WB;QbQ0>BxS<_wXlTDI#K4@#y)O#Tc~2UE}Wq2&cL|c zf$pFVim~V=noU1GYnWB_`>gTvxGN9x_TNCIW<5+H%i>r3p$d6%calcFsivaNJb&8I z9I_&6;x96Dh*1A8T=@i#?H`?VY9i_XT>WA*$apCNG%u8Wdv0~Rn?Ifvlc(GLNj(Bc#+|WgAw+0~b)^@vO?*O_f<_e8u5kgysic}Lg2{@I&KouJ>h*>; znfV>LN*%6bK_<#kZ{EIj#pvpp>#6tv0n++Blb({k&j#D3EC(6ER%EgQE_ zIx?#ridbT01jQN^jB36gQTA3iUFo^N$K)}s)N^K@yzE7bhQ5W4KKW^B8ZE{_ z{Igc0+h*Gw+`NX}$nX)nbO)`xstM(mCJ*(Jip$!XGY5Q7%w03Q^m=~nve*MdPO9#r z4R?Jh;uHOl?5)bdyxQBHu9DO2w!0kyGW5Shl8j~$XsDh2g^1*Ccs8eY&I-v0#`kwr zzgpO7y=I=!oo8HCbWCVE@sQ_6EP<^!0KTc8j5qT{E%&9`B|NWu)=J*m-k#iT2CS); zi_7tB*zWgnZB9}eZ(EyU$tF1hxE_0NPO!HKB z=C2X;*tW#SK>NhXzE3Py1jYJFj*V@1JbUHj6{2Sf-Ow$S5VV3>0`IO`PO&c~Trmv@ z^@eKYYQO5Z*jXCGm$4UInFLg!IRyRes@g{j$ijO)r~N~$Ct>)dgR3YEBdjx7`{a&n zFw%MXx8wZLyQ_F%AWh1v2O$`jmgWd#QA!0(-ICOe1XHjEyuf(YLeshtFA8il`3-4h_5Oj0R^26++Gcq*B$}u_ z<1qR(azQJ=?31xb9k;(FY+~`H_@ayW;>*RUzSWmUKj$!4o$qs53g$W^l*?r42W61z zY9l|{1bFq%kkJM?gi%}95^K&9^uAvn45?{BdpHGC(=F|I{`LSL96)zgGz(2?hD`Rl zoC>P9jo);a2^rXuTus-Nfm=AYmcYxTZP>z4hHE-81DZ6BmE;=I3%!V(sIWrcFH5Y8hnGG;UcEGL$iXrBKcHTAfn|pnp;<<;CnwlQid#=Qd zDm&mYdV+UhmlX|?tWZgAJ$S9s>p^k-?q@HbwhuP~OYpJ*6wHN(w^S+XGM1uy65z@k)neLakWtV%_jW9Ir9O^W!Pt@&|4J&3>a& zr3KEWQa^f)*U0iZ-UcYOkkC-{-o|9FJY3?lD>fCjwqD!>jMDZrY}TXr8cm=0HQnm1 zK^bC%EjF-AnlXaB@qTF@YF;h1cNI19izC?<}T9`%)!#fg<**DhBSD6!Cjml8`Mt&1cV0 z*lA`;LIl_|;I*LZ9QI~Uj`hubc4J}0oMcCV59w4~IwX8yrTt8uSEo>NU1-$3`!PhM zytBAYzUNiMxJbulM@-Ar2hN8)b$2>BkqpkR3Q5O@ukgj~Hp`co;HE4*rcz4w z>iISl`f?7|d4!&Jm4+ai+$>oZtOQi)t~pK9DKZ_*UP}Y+iXnB#Q8qnqo4`r zRq5omhV*>Czd-yz_>q#OdcIe*i})iNoDVg{87qiY)o)J}7fUYFtky_Ws$PsfpKnf3 zYa>n(HC0oZ3SShn|CTbjE~oKgiOuR36RvyHwXTBk_1+SfW6Nbuexkh^fY{Z3FYTsS zIGzG-Ey-t*KKTj7M@sL{oyP6v7dKAaT~>ei@Ev5NV0HRd?==rd(!&?N64_ebobv19GPhr z!x2Jk4K(>_E!L3UnG~mV^iN-lG(^k+%W&ay(OoawTI$g|+?dGgomy*O9Fz}VemHcs z%dggtqY-!-ouisgp;uKYwYxI5syqRQ{JcFCmosc?7qd0;R4Q7XgUfaGdaLVq)E6hV z7rjL~@>{ny=0w;Ug$-UY)~zNg90bTswGZ9v!}-R<<_+^IB}u!L(=l!_Zrw*u#;28A zJ*V*^3MSQvuwfI1J_Nrl?}Z9w!eS=&mCmkhabz7i0*iEiG*SUZ8z5v5kv;fk(k@Bcpb6oin{>FiCC( zSKccqT&-x_+N!S8JYlIl*M@fwA6-%KXzIVt@MKs}YJ$r#VcjR613Qw)^%IKqRj;i{ zZ}U#bb~x{&p2>sQG|Eck-_@=PKfSZLELZ>WmjsiAJOyTI8P3`XsmKEcZP_`vh6ISP z71xKaK1qy+a(5lo0oy>i83O!`&kPP~fe)|d0bz^5U3 zjPRWUY&zEHjw9F)6D2Jb1xlfGyh8S4cU!94KUuU47Z?h5GAz;&L zO5_(W4#se5@65fWqF+wo=X$cw!Lm>u$Nz~#f>>T!s>Ufu%oZpcij7SzvY+I%va;cr zTqdWpniTN%t>xcCle>+?vRKxYZrB$64VRJ^Q4IXPhb{_b(RK^sPOyYp+74|UIhP(9 zl~@(34$!_sFPRbXFEK@@J6@F>ccaq2v^ey#k>$U$ds9u5&xiAT-5cATbE^1-&tKiyi9D0uKGqaw{z3m+ zQOd^L%k%|N$#AW%0>xby*=e6CN+2RLE%6)}LQMlAcI_}nTSR@>UmqTW5Mm2kExZzH zN~9#DT^bB2_$DYLEc=!@IT418hu`Nf(Z?aF4rh>wsP9Cy(Y<{W;xjpG+mRv+g+{$EdF@isu5|&fZ zG9`v=NsHctxT0{usqHnccgT;X5S+Hzt4A1u6w1d^?Lkka5}HUg{3Lx;=9e^nPJFDQ zqHUCPf_uC+T3iee1mK${oW9?v%72ops8y+17oRl<2Pr z8wA6%ud5nK84WITHrjV4`8uV{xCWf1^=|hvy4O=3XbJ54+CiMe&Px@iT{Ce-Xz3c4 z+xO%UC*};UFIpo-Y)NH;T0V8|o}txp46mF#4jXke^V!YND|JSkQ&SUP;}DAhEJ7;? zwi*7qe3$g>1D33#cUMfzKTsXF-89zNxw^3HoW5LhTD+2n?PRCOI&H0RUdlOKWwVG| zs(wC*3$iT zMqM7$_0#)?uivLPZ_8qI>#FXkGUu((OXKZrm6OKQ-`aTGe&|YzUZ7^{GiT9VEtpUL zpjU9KBMPy&dYXn%g!KT3OeeR{a2>0DI9#0HBqDg-GapO0)-c(to}{w9m=;33t3Xbv z_fjvel%`+?S3EFa*CWW5sysXwzPVKPQ?ehi?aAFIW>KqJMEGbbh$$vKbW+Tf^n+>s zPrL~`+r73k`u&}(z=_8Tcm8!DBks2LMYJuaF7yzF-tR3c>^MdX9azZV1mUGxRkjOX zk|3Dj9m$(ap%)ZHR7Wa#b%)o2t9Fvo2?YfWDniFUY+~jv7c5CvvEqG?B-?zKY1V4G zU70YtxQomjjXu>sUUeyJHgEaTmkl2!6`{=q8yT`6%S2rMwTT{R+Rrwc`VjYov zE^3}GG;?`9G$hay^zJ8cc;{>Wnwg8DLCGF{_>yHp*E^U}@VK=<5jk&4g09GmX2<`Q z9GXe=Y-7K0MbM&grfyXIZ#j6umynfvYS24h-A`Yzos}$xg{2Od>>?I+%0=C2P{(NO zi=l>^rmyi+#0~0nZue|$C3;aF_NUbUxkES2-Kcy)CA@63Fscrl3m5VTMB+cFtc9>` z&PCFAD|$110VYw;9X<6WL$JYzPvTCV4RGekYd`GpsKv%V>^%@riBj(wslO&(_l6u! zLzmGfEqhjJ$f|^%&?~1(zrya$I=V3CvCimt9@Qqhk>75Mtdv9-%$6J4jSSvRc{NYB61^C^U^K%QhhDwP8QTm^nRCxX->_YL|0vhcjR55yAHyzDu-YC|9j@(GW(FIS^j6W?^>nC@RnoWQ+A4zTXnE^#zwl2CZ z`rvU<$lR@N+Jx&0fY(LsnE{6W$F z_wsW9z~jvb8*di-i%iA3T}|A^tpe}Ovh5!K#H+WA9puQpiwHluTF|C165mSw4=1ur zvoOgOgHk!a@z{*b*e#C1Etx?r$O(6Qv~~4hl|I;`=UbX0Xbhc^}&nxpw`Vlw7R6=6=)_Wu5 z(ysa<*lZu31Wzpb^5nTNBM(|s?zpo~((|UQdu-pPndFS%7)Dg(R*Alg%N{@AW%dwP zZS*tK+rCQ2RZ1F`+S#cEu0i@AT55_pG3v|ir$cPA|L#x4dBmKoE5U{3e9~7or25#n zp>(+4S!9l)OMKOzO}GS6C=WlFj#0boe84sO%Fw#0@xaSBS(ReBbcaA}^@52n@}My& z=-LhyPNl8Zb$thYJ@Lq(w$Py^fDfOhkF$mp!U(iaOd~CrslyqF5?? zk33uU53;9%BK9^^b(fS~mtNinAQ!*%sGUxJVMjinzV(Q$)nut<)i_ADTf*);FW2!? z!ai~c?aN`BGIQ)J(WMfAZy;qWhZb^4u9tAO=i0m8?t`L>tQ$?-+LTFKE)k8$wC zGCDW&;A)w0p+A3)X{PX8xMbrPVIrFiFC|f&-*}V>h4ppF=lku(5*)r0m)iKJt9XXJ z`?Zq5mYc2nxUj{p5ZbuAN3Vq=I@dNWqTDW3Y%>gpFBvW18;z@c)h@8wZQAp4KKpz3 ziBr1qr)ImLYu+fmEkWv|2e@X}GS2Mx=ftq1^W{%k3>>el4T+pkmCtq9dDl+9%g^VG zd~Ti4#IDc=3xP}V(~KymHgwnW4mzu!XJkg@$Jf0Dpw#BtGGw;(C?$cwrZkabmDVTl zC3ScP{pt}{!)P*dt2Fx%Tqv{sDO+t>xULb^X!(5Vk=qGpctI;vs2%^e(0ZZqHic$j zzV-)pfjbUKRUJ+PDsY!q@9J%I^xjzW?=@AvoTuHchtqh=7d&f7Z5Dw%zsPgTWD6Rf z{gq#e>KXTLMEA)qs}f{LZpO?b;J)849YRJ9H}A07n5Z}eE@JNzb=j+hN6=30rUmh< zAh6LyD2u6bg)?xP&D^>_w!5VD@S#N`zQ3112m~p7o}O8_DBC&!(}!6iBr|th^rI6g z8O2uuZ#v%XLFpH#6*cS`ffR{yh?&o>E(nF}iW_nOcBle3s(>I=_=SBf#!9VJaBv5Z z#`N5u589m%VkU(`U9S6pjl#K}job}pCts{di>9sRDvjM^GT~3=HKwdHRYThKtO^AN zI~xq=`oP@o-4WN**qm_+f6c4y9Z2=2BgSYbe%J zw4$!<{+cS-|l=7y;Sk( zV|V1KxKZuwA}6z4!Ddz!TNFy!tnQ%-0HU6o-N;b%f@xQ&X@+=JadayxaNX`F3r@xm z_KjBQ<^O4?nmsUNcO8NiWcObR$lu^iYag4{f}o5_I;J^uyhIg@5zC^jwdQZ_$}J05 z&x}<)Qc81lUDeC`Ps+uYRlHOQic#S>alUoBE;2+D6NX^E}lhK2Q}@#Ieniyz~n>s5`R z;;MAox6jWfWtsa=Gt^jH-KT)LK| z!gi<0@&5ACXJxoke45|!0sHLI?Dlq#V1KR`VH04F2_Cr*&Qmhnb&&=MmXG{q{Zl}* zkOFI*8^D-%LC*C0vU@!U$7j|_sN(Sv&Tk#yNC3x&Hy*BWlf1b}T9g(-nO@qvy1x+m z0mSTsgJSthPtqg}vmS*B^oTLXtZxlAO0}~ovT2{sN_iQHt>yU(GM*PuT538SVZUcD z#t35QA8;t~v%fOLTle(%cSGm<_RiIH%!Z3Qfyq2GpQ9BNDbLt-JJn1Crc~u6NX&p7 zswL52*XLRJOmdlAKK6qiE|bTU?uzCE0)Z!bBd~o?pZM*g{3GS>jA=V$P2_1;IfmTa|B!Y=!t^5aYK66ao>u?J zG)Dnknx8d5u>zk)tclZ-sCc9@Qd{KK_=PdRc0?_8x9xv#o&rY@yY}T;_VK5*1m~w|CTGf`G86IJo1(E+kZUsv82EB?we-@ej*+(BeK6^>qwM9)tibyPv&3h}1iVQQ9+@ZL1G zx)wepdVbSwanpbCSVrA*6|hTU{syGWIC z@d}vC`iLH6{bFlE+$W>5A$*Wp`GjXY6raUv?!D;sUqUe%IUf)m);Hu zYjY7iZ`l)HNngT)-7|z9?X_hyRD zV#AzoN{fGDSJCoM4_8zZ^DdHL6pPp6oqREAo^Jwnxgl^z+dLX&rbm;7>x_WCU;z6* z#Z7D}rga+6!M?v(YiV@`f`Ml(%eRg{m2fDcku}Biau;h+o9~@h%)@V#gz$btU=)@a zmGZ8j#<12eTGh*hSA(xB9EwDXS3kVUyq29-T&$UPB@?WUVX5sir!a%>N(JxoJ<>Bg zab*EUwa+T_ip??tW_gCccV-FytdR`~D}dCxis?SXDwtM4aAL2+R8Sr*4>#&iWw0A9 zPr<3pH!&4D-Sru%sz#d^MQ<(>HI0g^o`nt$+)G^&ORZ|4MgEmeYjx1@hShEl=>g;8 ziYX9FD!ZTlDGxwYUKFDr()C{KeRXdY+Vyxfp;fV1_}AL7SN%xLN~oZ2P@na+4Dmwe z)Y-Gg(FwH66+;j0APi3fg?ac!LH~#HH-C`q^-J8QYmArt$ zp-SmA*CExOC^%ZSekpGxI%|m1YElp|gt`GeXdcY%Rqw8g4+>98O2e7NRvU%8bb3`5 zd+*v$rk2I1XMEQyg>mL5<$c3jxna1(3Kc6ixF*aR_zF-dXD5wvXQ3^^_3z`yxG#GN zk%wyRG-L@sUxgGV2&$DxtN7*gc1)U__8}FpjfD5uF(E-l!cj zCpN?;?}E@q@MGLw&c)L^D^r<_R%vcL!!V-c%_249fX02vNEJH%d#)SXE%XKND}96y z{ZuLrk>m8Ry8crzFYL-7l3K^PxbJ9UxmQOa_toIY5I55-eg%I0@(wf{>$h6GLQPft zD}(Nj*95+yD*#LAAyiU3kr~f>ORc(h#-2ym0tj-V`c;;NqwLVo5JeRUbMy##ePE#6 za+*r==~$YihjT_9v%^fi;jA}4ESyOyk3@xOIytWM2r&mcT|4K=z2TmAO`{}XyPS^4 zqpplFkD`B^vf=C*cLTYy`GA;1H$LEdK zvHhf;-0SEe4I``qdT(v7OodEs|L%o~ z(Cz3=4#cRjQb;u{$(m~WOyp4ca_nE8)|w?|U(VSzCM4oLSB{N}*zVvP>M;`R9ThH> z)FIo|0SdWzXjz4e+=r_C=#jqR%oL4kW|X+jSY7?|M7X4F?wlUv70R|=Xt+sk=I2z7 z!-OvN^wMuV9fDGfU<%yGQB7V0+j|7i)GqIm_WGCe%sTDijJxS4KNY$4+?$RBK8@RZ z0$Yozax!-XT-Jfq=p+$`JiurLxb^g_cE&u@x=I*`CQe2GKK3+0+n_JyZ-vGjwN%K8 zrw-ucf_>h77bKHBf)12~%EE-R7n=xCtcS$A=><{Oh$qYZ?ad_b?|HnA}#Zll2uc%xiD84bT6gCo=O&-w1F|}2^yfj>z{H$R$OVy~ld?lS$cY;dK zTcuyGv~t;NqCoQvEV%jSL15k+FBd&1+8uSl4&l~2HG0V?Dnx!(^o}l@h!_&#okHY;CftxJHMIM2!TJ(yrkJ zXrH?vy-N={26UVM(5o`DvPcZnTtmVBcxew~K$nsJxg{^ZA#-vN0oq-z7$Wgn<9JQY zD-lTK`ivJ&*Tf_Rq{=dY30Mb=%!yjpd~jBZ)DLmz#rGc9&XL%$0O8sNr)@zF1e6pE z1Hmf-v?CorN78KaJ#yL3w%aWLG>$l>+yY51{dtz;-kcp4;V{#%dKF|1A7%QzQPVBZnPhL?w5uG+VRV#M z+y?guJ9}ucMZy8x>|1Rx#2p<6B-o;swm%oFd8>XdfUkmkmIKP9F&p7ZE z9%J;=+Uz))!v}gyb1o`Gy57}Q5Pvt9m$z2tky9m;%%zJ0kW0PAb7=yLs?tJxP2Ew9 zIe?w^A(+ZCqETsFvD%OTESDt{>e45mFMT^pqDEqy)wXvc%&2Tcm&$3j%gg%_V#kyF zl`f5xcVpYZnMmrOJ&xg$!{`e4@MZTh#gaAe3R}6?N%@2Zv51u%b(@><1ri{oe4Eke zQy$&cx2YXZ@7LJaA0DXuM%c28H@E5hnj1{b->!3Pu+`J=ccjO`Ntp2}28>?pv@d{` z%Kc7!rG9R;_?~{eyN+$r=~lRAn*IKLvmg5iV79hsIUxXb&v9*P96z1e4S1X%&3zAA zHSk!HfB_GDXE9nmDahHd0upQHK7UqzBhJ`yuvc`}9`GFjP2O;RzQ{?Hidov8X>X$B zc@QZKOzu8MCB6T#(xKb-qeWcO8ieL=CZ-~A@wlY-42S-F)@e?5%U6NbRXm%^4}HZM zJ!U#!@wM^GFl){o9c3AiC@HH)y^Crf7LyshV z$MR-Ju|Dw37!$$A{KP(pk4q{3Gzxx z0||`_v6l4W@IC&m4oeTdt(T;{t2bVwOzy$3$!Zcj+Cr>CCtCNVNL zlZ}5DJ?Au}g9xeWeR81l1{)yTUAg@;_|x(_RZI+*(v^^nMH%I6>y3K6XrJm!8N&S9 ztLM@_!OBa!dgKMV!yfxWt)ja$ChcpY{-l7S!;!?G)WHQCJx_om*g~DZ*?*A|&yU5x z>R;Y=TuXvd;*yI4jJ?lA@CmoUJ?cI;;0|PTcrg^y%_kf=B=9$8oP5>B36^Yx$e0l%R{+fgvUJFGG|f2pP`J z2x9gf(@4JmDwA~;U_v83iGsTW@>HuCK}!9A?tvQrRVKkua+*oQAnzx_{3mnNFvVOxrmzIP!+K-rn&^fW(eweem#Tt>q4`0)bI@D7|u11j;dl?a=468+> zLim+fGS6{q!fCRU@)h&nhhABNZ!4vEHZE;m9P)wdMer;#Idmo+Xr$b6+~Yl$@7Uc3 zarIyuwO-TXHLFpzmHsP&l$7((fI6|qo4EB3`%7P~=oF=)W+>_yalnin2{BJp%Gpi5 zPJ7S3Y+I=wbX7@ffKSg>`sf6^Yg z-l!Wqn0=5%y8&(Q+u4I>HwenCp`Y!b6cL!Ch3{Y{GXHg+kHaed@o5n{~=<|N8Ys zN93*Axi^<6cAM{e?|-`UB;g~jk=Uov1TcJ3T@NT6YSG}go2+uucXlpytXZPNM}pWo z|Mltf%5cZJ4Ur~U6iK&&%8F=K;6ZsoFkFPS+n|so6{vM&M~8Qkbjn*$=O!mFPfPs` za#yEFoVinToPBm-)pBw2fCEO>ao5_2t5EClM+UdLI((FkWQ31VHd&}H(&7A~%a~7` zJCr@?iAtSVsv4uz^+o)ohIPS}HR7?B>e|B1k`Aq|iHB@GsxM9B4$j;vxDvwHq#?LX zF6Tas%eTF_K(jt6%+IYm8COFP6O~nxPJQ?0$j8GR5gXW-RG8u5BWGJF#71PAoMKJ) z52l%^jCD>rJg^Wy*W#1Uod2rq=b0ey)YXk{lj>*Ye(DYUyPOCIZ8&lNs??V(>mNx+ zzOz4>>$Wq6`YxT#*Y#QmGT!Gk-M#=g{h!2VpM40X$y?C1ZJ1Gh3(_mIi81KbCn)9X zL+T5o%aTQg$nEb{SuqOGsV^-0?HP`(=sD03T^T3}06>zCed$7T_A&z)w*s+?|_R+7tM_xbM3l+s#qKhdr^Q&04S_x&siC)jkPa53M9*FjJuhjB|*T-4S(X?cADjnpixF8<+zK z8^MflO%m>PqXEzBzNlV9&MX$QNxoP8R;Fkx0>{QqwrE7%D?I8NwT?um(mIRBzTz^b zM7G$d^)*8G;?W0y%BklTN2(y2VCiJfwWQ$J%}?Bra*MfVN2@@UScsYJAE;)3hRO4- zrU-R98^jF&tHY+d@|XOQ3g=VF$20QWY%Zsl$z3`TH94A8OeR zX<->&i#w)2ZVE8EXHzXsG#pub;^a4?Ym3N$#zERH`Kb0gd}>Z^@81Q@*QQ2J&Rg;!c0IW->jIicqZ1jY0`wf^a9y$~ zDs7HW-OdF@_<_OY69U~&8Fnh~H&63E(Q=<1d-K-t-~#Rn7JY?7^J3_9Mv@8<($;)~ z{`hJ6ZM*p41p|fN^J?gjngx?$kMz`YLD+g5UWS`lTV+=yw{x!)y3Z2ywTrm!Px+h# zH6X`j3P--PC*5QQ#mfJ4_*07j^E3?r(3uTThp-ZG&1?W9WWF_|pt@qY?Yd$(0oyXs zBew_W19^a-XxrNtWcY3`Dm4HoZEdUsfaLUuSZ;j?h`LFBHR~!s;7NI6P;Qbzs!b7{ zc8MmXa1e+CKCfG(x-9_kSO6KK4YXOUAk~}nr%45tfY{y`Ao-C~Dn+;amLTI5O)y5i zymP1Bcm=2-w43|iWhGE=BrP{lY8C2{+d+z20s9Z-x<0a; z;k-feGk`+@6mC(?GJR7&?x4VgzfKQZN?*E~Tf!Zs&dzdT8absJ;=PQpVHi!q+@$&Y zX)Gw@XiOF(?p%`;kM%G}7IDz_Z{-SQ(*Sj^!6Kpkeh5$1i%h@J6wq6ZL6VPr2snSk zz$?aUl$f{-DtI2QB6s~x2!2ZffHwfjeFjgC2{K_fMb$u!f-u!JWw2<%7R_gX&wG;p z5@-j~qSY+w2T?NiuqDLg_bI|-JjAu@U^yz|6|9PpRhv!tDD`$0Bw)aDFMXygG_EtN z$P?9njOZe#%q#f#c`oH2c&pD%f_fc`3aT9j!RM&v(K*L*oz(@zuCEx*PU?R`*#-uF zaFi=o(koi!9`RPtGNeGg$;e_ZivPDRIR-#CBP*a8YVb;tjwyb=^y^lIxg`TCf8EX9U|hwHhxl=D^3 zox;bE+kpB6P=^pt$#DN8;PtF-&9{*JHd65{C{Z!Ed{bmtSKrjCaonSS>L3?{{odpD zl_n~w2MaFZR#lTrF5psf$feP@7b^Mk2^FX1~fnYu)IT*sD> z(~=tBc^C0p4#`p!mbWz3y#D#`8SshY)c<|)HV>$;QG@=gN}R0cmo28gJ$UfD&WiL? z4_;mQ?>I;&G3^$1ZAP|6+?AfAN161Leh@R0ubO(42gMQJ!i~h-I&i(&Y{WXLC2NG@o z(#9!`0s4mlM8~z78>qmI?Ps&*dgB7TfG>C&{U5Qaw8Q2e?#={=>S zyj%!W^#{Cr_n4N~yZnSdKh9gvu zQoC+!fbHQW;VxF7=g$)igt1`|qdnaOkAH7Og;nozFO^8^My=^OpHABG+WHdJFSi2a z`L7rvc_;9Sk$ddM4HaI2jQU5`f&vDw2 zn_gX#h_6RN>`%CcuB`Mh1eKlPtsbk1N!nMK?0`W#n# zD5DaOG#9$x5N zeZm)+{ZUyBW0zDoQf0kJGp)ya0R!qmyZU-O%1@s{h+npcK#c>r0>tZgt;_>#$MuUg z6&up-quNGoJlngt{}>gPDzx=B620X*E0!g+2_BcC^XO3^=_x1-3dD78W!!EE~v1w^Vk&%Dv7MU2z%F3Pri;J!I_a_7O!YJpZ zF2Ebr^z`%$4)Wsuj$F+PryY50a*apn@VPGA#(0t+F*ob{son0XjAeoU z{@iBFl7N+syb?^htD{51z#!qxThm$QQ)Dk2FH%zKf&T>roQqlA?rf{$VxFIOGwfw+ z+B;q)XV~EX28{o&0~-h)$AZ(c16!hBvOC<`#wIx^q_8k^^zwAdbBBkn}k~GMUwjb z$Kgv-I21lHW~rhzhZ2HRXiMDk4@~045r6JX$As3$y_ZZ3-s-7N|NoY`dshNn~ z|BC9qX|oRJwg*^yOexD_Kc_M-2|*j*2OqUv>BndWd#+m_3g3Gu)ARCZbCO9-JCt%s zCc$NY%L0J*9VUENfLEvm3Z}}PW~K2OuR-l^&inTlK+UHt$&Cj^dznDw@j|~l`SB~T zIVcbUkmLkg2p&6tQl{0vlamY5dX^PNwA9qMbr<^-nJyQ1YE6z&RN!2@ylj2!0!c;Z z7>9&Hk?An}WSjLOIa7uucm{#+yEXg_=k9iPMUH`g;p zx}99froGs97d{SjmQmAi1ruXeA1DkWRr;h#dJO&h7SuNPuKj0MxH)vnxC5s7Tf3P} z-T-604Ss?*P46&Dy8;kiOyWVp%dRzq)^j=0#s%Q$Nlwco7x@5>P450Z%+HC?q)RqI z(~hG6yD4{^7FSIaP#|$7d=FRI!TKd4PmyVKDQ~uvfNqxxkjH=SdJLOx?fK9=jX|#l zO!AYn-@U5=V!09~5V;-gpw-s^tHL8>6r7SJ{LcH6#nhMW(|&B39{`1)f) zl>QfWZy8Y4+IEdDQ4A2+C?TMrbR*rMf+F1@-5@P3Z2<}>(hY)iH!M;Ckw!^rknZj} z*K~jTdG`DLIOqI*`J>`uG1r{;eZ{!O7D3*laqwXtqC zkFpG^a;%T}#j(@Wf~iM)*<@jZMo@Qnpevov6xWq36xq|A9O}3{ML+w)vyAd{$Q+>7 z@=O$EmHfYUP~Nys!Yvpkz9;48h?H(j67xeCLGWa{gi^q#B1<}wb+$=vJI-9d2rwv5IsBo40}^H>h7I!!iOjnpE3+ymBShHWMmZot_vBd z8^aqi3no8zeODdOi^amFC`Ge~#jnW?gmf`k_PtWrdd8~G1@y13zeG0EQ|cvRd2Rba zh@2Z3cy;R|uuk+lj1>YNwsWMxY}-6-q94qUIVeR zIhge^EgmcZ7a>nTo(^{g^hzPHfDhK%^IXkRAOTRpzjYCZ1yC=s1*ASb5ft7t0f^}k zJ`yko)Nb+HHwu#f_17l>r^R^?#t+n;?yw=mks{4pUr!ZB=PI4L)5GjBk1g3;y*gcB z2EbJSOLQPqUL;d0oH5!qL>l-*5zHFNuqWn@Zj3n(TZ6lXY|`$6qS{<6>ko8*(7yO1>9tMe0xjZg1ewIw^PlH7(G_)c-f{UOTJw;#`mvjh zTc--4}3Or@59sGFuz36)Gwcl<&vECbmgR|G9ej$UfKU{3qB6!5gE@Q6g0?x}B z<>Ovjh#2tBCxha_ZFs|Z__b{@9-EP;&uu!u`2s9!K7mkJ z3M<$iS?Yv%N)HBhD5cKRAD=LpVWpoagpW30Na__M>}19(oEG z0OBG*NJf@zmf+fS_n!&%JzdgrN5)TqXTp~EJ`oIVE;|Veko}&T3Xh4&LVS~^n%*-3 zadA%+OrEBKAi^-2n3%wW)a8&rhx(+Q1v`jmlv5@3(dokcp}I--#!hrdN*b^6Vo~$) z>xbMHf{tQ)^_ct$9ssxt*!_NkW|jmpR029I09G435`+-52+xlbZ*Bou2{JN)5v)2E zjcKX-~ePeCq0C=-CF z1prdm@d)Q3F_7}xr+|oGHD&Am>Rlyp^+Q`JsLc&SSla$_9*`}0F=}$`;kUZqb#Pte@bqY{)Okg-FH>0p?j;xAT$^Fx)9w^WEF2tFL&IdS z$1zG!y0e^cmX$zu7^izS`N>ZIgbU}EfQp!?rftr76k~}!g-FrGnd3Vl@zD+yNvbyw z_p-nD9`;{-$knD+L(3oOJ7ApICcIENr!lZ;PQLenFOO`hyKiEVx#9S&>7f5=aMGpk zH3n1O4jmVl3Ie2{t)MVO*719A+fd(2s&8&?{sBmK z9tcY+785yydMe)k?P^kzc{=e)vQ~{uqs-Z7;@tH1||87?>BAwGPpxoZU&vir&T*<#h2P10fHu)CHZuk=-Ij;k zmILf|91;$31YE@giL9EEQuD6LXONBhGZaX|iNn&yo-~67ZV@s!RZMWgQJO z+QK@+#~2zWsw)u#nPskgY$QivVRu#Y&&ID|NTU>Jq5NUr~Rg zXJuF&ai2mY3sA{^Yie3VqB8_jps{rHfV~u}Y@*1h?PUw_M$xSGK%;Y@L#sJl%+{=O z&P9?LlI+WdZF-a$w(6OiR;R}Yhmh^4U4GDl!$;lmW`OhRNG81A z{Oc=B3*c@}@_-K;)bbtW$_hZi+<}X?0Dc`ve*e6=+m40xV9`T(b2aevY3F69&)%|z zoa%a-?E?W#8!6(!rK%6Y7P!7Zz|lpnqv<_acvMtItW9N$?fidyRrcx8v$zLPB*w#> zi2wxhCi-Iq*rfHGWtGel^MkqTfz`?YCX(LCMpy#>Ht<`X$#PKLfW}MX*xUpvd0($nj`Zwi z9FaPTZ{;9CLd=<3J3DVpBh=u&8h00m!6o3YAx;vocO~r(f+PJ=CWL2R5d@cl>>aC_Shp%SEEn5`yAY zo{tu3^lbk=l0yW)gSH)JtzDieaS4HWM8^Q2*$^i{docEk$~cwBF}MdS*mKgZ7g z8+&ONzDNPcMz5{mXjWJ zCL&gV@l}V3WH-#-k#4~OtzEU%DY&&fYg`5$U)|T+l28{I*Ul*!#7WPQ{gcdmoP<<| z&>QV+bRVMjJiO5Rh66)Jt|wDd$xmQQQZGP^26)o_5^-%7+^oeupG$BjpU>jP7l=#H zEUaIv5pm4O=Gj)IA-_BN#ZIY8z5QpSdV)h2N!FaMhfWVs-oX7>^E~R@#Mzj=pKg*Q zLxs<>9*NFXjxLE3czB>*yi$!?52og>xuc91B$(yZ5R~nZ=m;o5RwJ9vs-*f^h^=m3w=ls2)#jUERljAa;6B8#<3SmFiF@#ov#~Iw=qVc#c{&RK?67C?jVrIe!Tkv64kLMPBu$)YdwhCMPCK zEjKGGd~}=;^9dm&-fXqfn0gG)*&$~pyWiPvbOxon@gLoFGs0XIv?9II2EKcz<+|=h z6qt|KyKp%z;Kn<8V@}zyVxsuO2pK)5?OdmnfO$$MFRdgAHuXB#=vs!+_}n) zCNf0-V1Bs0Ft+XD|YP1gG$jgh@i_&>|=-hoM}W?z}L7MRU?!u8EgJ~#qOAGIX! z*z?b>^Kr^mQ$~nA<0GN&`^1mCF>h`EdW)Yo^!9siQiPeQZ-|J8gf#9il~N5`VeVPN z_oVC(s2&A&GeL>k^(d*kaOcqKt6IZp4ab1x*P^T3b)35Sfw%Hu|1J&Mly(%OP~Q~p zG)QDrz)krKpKoz7J3r!^Av@kt_`o$opOpupiO>S4rlpZufQ_UAfX_*^lF(|xU5fz^ z-+aW25+d#_!vS<=Px2<-Q9~?F{V_>aTuuHO#*Ylb!d*{X(AV(s6*E%XsTsiH6V@+x zB!uu>N|z6<(i(}jNG>ndSp=>c2#m-OtHYr>?>g5T|I|6x>?A!G3J2mAhxXdpg5;kq zCbxCeO&)K~e-4rgK}sa>po{UtF7Z5amIdh=RB2WDn+Dk;TdaPjl^!KyLg;Dh=yXg$ z8t(bb^wj_*>v8V^+uYoFukuWT7h#3FW;7=Ifv(FVcTScVM%_JcX&gk{L{Ss67AxMU z&P|r-h~rlV=iMQ&F>s}qclBWfi>17aIHUxJ9H6q;Vx^V@331KQr6V*VI^JLr>D)YC8 z5Huq&aKxLXKU-a%Soe7nTASI#vsMSzjT_<@$E}wxpV$w9uop<(kac81I3nCu_|ImP z(r#2mzZU<7?zLRzTC4P+Z)m8T^d;*B!6Ub%_ks}?ASEeahsvY=@jpe@bkegk+{#9o7%oY~+H?f% zfRKRT5y8B@2uk>!g&qk+TLD*j@#+oz1p5eXwZx6QnO1upa{6dO#!Y91v*W4CvK>y* z_()&ez8^`SOZD@q2dC$l_%r3ixTlBhQ1pb=HOY;b7cVY?&)Wb1Rp9I{||*xoojPGn3jqm=8s6vHuXIy)^CfDh)693)p@Yo`0hIREG@jk zW~Fz2ku6l9nG0b)4DRDBZ-H4E07=5opd*yGV^%Q$+5i6P%xhztu%{S3-X1Kbr#pG`hM zdNc$*@Ws8A1th*q2(O49k=wZWFQ?^SiV$=WH(*t`nKcm858=ZiR0*JeNFr{e<6pWD zmM|WQazP-R_Ffom_Yg$gGpF=;qci}P)kt_LdSbs<41cn5yYE^#3}%A{_U}?hvKt_j zJlLS|8>yf4!P)x#cHP;Dd|M>j05}>GKv|h!h_uyz8tx3lz|^a1Xc@rHHG3zpi7(`Zv}yUx(O?>hAZDVYwf<_dkRrq_$YmdEA^ko@04_U4NRG?D z*th=r=P~lNWvOH)1B5GIY}Tb#ZZk@7<3VWLLMXn+yL|k zvY!N->WxRpeSqi1dyZOFwUji(QAO}nXq*BYMelu?B1r~um-B!3jvcZV*p2=`3>ZyX zMsaAlxO$;`yCw7-)@Yx=l(d1E0D@v7(5LmUg8NXRt0E1La`mgwGZm=Lm+(u8JR7I4kz%Me}Cx-$5BEmXGU{(ZUvmLLUlSeVE zWPFIS#8TaAa-x4HEptV0(F$|4}NE3G7jcW5LGHAHyAZhRC z*nWpN;2Saw5|a&q_G24J&{1u!t=Yp$8W~{EK8_jyI9_VmgWh&d3r>vy!({;ko{JIj ztQ@DgO0xhGAGY9C`F|ew^BfJUpIa^$<}+WD2U}Iw|L+^{^L}VU> zkB>jgpbM36jF4;5pXdCmU~**bg-T6~g_P1PuJz7VsZ8H4dnJ!bcOkW`JFlWv2cFML zhI7uoc#jv~Q&RdTO^5Q^zYCJwHl}tqy|8D0r8i#bGM{(IXR=G|n)ZC>c7jfFvw4ub zi~kA(^==)%RSDSufT_l`g;Xj305}0@K8%f1(A1KL5VIVAH(qf-w_mt;krB-4|0EX4 z5a+eA-0f}0HD_pVdP~fe8*T<65xHw!RaY*KFA4HhG`Q(PGH{TDBLpfT+PA~{>q{>v zvmQ2O4Ua}5Sfq|~G@2RYCZMBgUT}=@^UtYHKq5Xe?l?OcKRdvwMv8`Kwf`i&XK7{m zp&VQLd(F`#;4z`6{OJM=eg9C@5i4+HRYANFajC<*T%$I?6Os}t6>}V?dsA5RvxKb5 z?{j<4?-}j5rW9Lfr3MO1b6IaI2=iK$JPIPu{rNee=f|9GU+)`P0j0ouKl*WE!zMp+ z@UZo_Hmv^s`d-uf3+*88;v7wEwF92bll<2_EzT2WEMvb}&fmjSvA^?sWL*DzYNKHx zJ=`O8UDs6C$AC4%R)Qikp^n@h`r9wIPfeju(?r{+qP1rsmZrnfi*7?d$?X@4>_nD7 zv@sT}bCv+}3{|@pqjSJ*Bnv!65C8^V3W>rTBBu}5D)f0AT_2xlWGH_21?86!t8@qv z^OH|dQS@lD7HxXfBJv}oD*q2ZV9$AYaO^Y8R(M#PWrIyw8^!t_{?b~kNs|2lR z=TDSUM)IoblXxZ(Ptp190kK}8%!`#ZVK4U+~ElN%7Z3zYM1tI|K zt@AoP6O=p#hD3PjOE0gLUTKvI3S5-cwu9rQe*5YP& z$!TNU1%+aK8*=olBqb%~#_q0E-Lr97VP2}fg7=#%kW*b*$(+)V85m+_IW96@`-&9_ zO5iEgZc>LF67ikfz1{ZL4B{{iL^t~EzQtU~Zztf-+y^YXs}3}oUMGi0@2gsFqXPuO zI1x`xs8X^tdbZRbe%)Oj{9IAN2P>>u!4`rM0`eocgVvuT3I=ps4s>Rj0RsdS)h40M z;y?g+;}Dh7{{VKq0F?+sz=y<=P9S=;%!xxGGNKKTU?~#Bph@o%GQJBn0dg_ik*Pwf z`T5XNJU!mum@Pt*oCLXRqjqNxpjQa!!haKOwDi#R>XuT(Tx^GF51hYn;v2td58DmO z9nYZ>208;aNgxUSglU9o#_gN`M3T8!;MpMDb?rO_I7?=ynJVb%#_w+kVh*v@Dggs^ zFA3(3GGI``9*Io@?#+-k?%c_H^Yn_7h}6$^5k-IhO$U-_;TaIv?YYbt3mM;MC+{T>yJZN zE9WTzyCktdys;54FtH9Q1_s2e9WeX!cCm|e@(I0=5+`_{JATC^hON?R3qKaaYYS`= zg}-|;c;%MPp*2h>5Re}>3gU$VnwFp15hFp9Fe*3}p)iG^2#BH;^R_-Qca^ud%Lqe3 z03cP+9~{&Ra%hKbWDZn``<^!cgpM!hJM{FA0Uh~y%Vi`9DymLEg}!Kn9u4ns{shx- zwfugIHIop-T9TX$5zcVKgn+GxK8rr&7dofg@!dqwG9-VC)d1(T68h__+(A%5WJ2i$ za8xs|P$O4<0-~OwEsky{B<+Bd8fXK8w7q$E!!h`d=}S zh64JjU3c=ZnS+Sn;3Qz4GV$=J!E~&h_e}}RC6R>=s38vmiC6y0RQYH1DX>G#HLrhj z1r-qVXFsY! z?zt+o0e#Si>40aml&owYB%U;c*FEv|wePTd3B*2_A$1jd&Orf!oH{~yV>S~KHkAHW zJX$YDJ&ji2N_;2@Mov7dcyjOEXjd{0PTR- z38>zfn3-i@n@3V30!*mgeeJyGUbfDANQ0A}7@lwnBH7bIwoh+nC@g{;xB%p32quX5 zOT&Oz53Z2FdGJ3Br%v^``XV3-g{9b6ENhBTk}^GL8H!ofYOj$Qerr-YwW^WWND}$1 zt?xd=p4VpFl-U9A!08m1LwNE4w+&HzbRz=X@yxjMMYp&L>$ms#gc1mizE-s%EYMHtg%B?4rV z!X3}Hn`&A>h8)P@6VPPLk_aIK6Dae-*C-0$>HGlfNMnNI50V0Xp#$cNc0)r$;Cd8b zyp{;*{Vah8=f*_0U4+tc{mA`ua`JZK*)g5EM0#-&U?zyO4LYt3b_^nnTp6zuRbQtU zHCyu7a;!jRnmeGOUl=q?>;nKd!fc%$fKG@b)c-!>ijPT3*dwfn>wb&B!qjDX8qj~a zSr|^I#LW!SHqzU@W!j#K=jM$n-8BWe z75|{1Ju+i{*Ugl%=TkVa9>^Avdji9O9BEc`qXHYQe0od>GBcWo(XKN1-L{LNK^iQH ztT@)XV|zI7r}6vuC8%4Pd8wze0k9?IG?fYVwuKyzxcX!uDL<*W9)^-%UYvvOOzGvF zyx)v#b6QWIW+1J3RMgg%@V`ho&vwIY_MZ-3-tl14z?ZLH(6MJRpVp`02^^>n&U+k0 zoq6(9c9E6F`{;9(rD6z?7u7={_8+&7gHtLO1-pw=wERs)gcVz6Pcrfjy?7+qrF!*6 z<3C_Axk{walI$|2dTgf9uD<m8idJe4_&Ystwffa0F{d>yS|68}L==~l01Z8Fr zHD5H{U22yBD)j-1@fRPau^1oX45sa)i~IJJ_D+p!Ylh*7peoN~O1hj~vLQ)QlGLxm zLgh|tO!gsab65o5GUv4H&*WT#*b1v$ZN`q)Tj!#*>*%@(wKWl`-ZjE0GhInU%l46Bvj3y>Dn~iA7mEZkmRtlJM4sucsTr@JlWyUuv)kMxMgDz7F z0BZ#CA|N6%{q-Mgq*-gIg{!aPlP{FMUtEzfSC&$$KeVmp=H~F+#~zWutsGj)T+Lp`yKkBei1TA`KF@5FvXsJ3G^t+&Y4 zU0M5moz_D1k4CndjxyyI1RzE!yTl426G1J?r8y8Y`70c2Z+UKN3$>N@sg8H4S?(-K z`Ui*cQA;T*QmGt2{QXZ7M*e(J{JFPYB7{KLKEyCA=N(1GXYR;H^h(`jIzD*#eKjs+ zV5XBRIhs0Mp5TZX{eLox1a|7EU}&oEd)2P;3Kk8Yvs_vq{#A?*4CPPXBl_LBCZ-4P zNQFZ7|A(-GF+vH4Lh>i+zWAH%4*mB}W`|b$b)>v3?SlnR#9iJ63r!|S+p1WJF>H)R zdtp43)ZBkhV7K&s)nsUqky5GqL++4u4BFkyAoPI`->OFt1I6laydvK}42?e@iVs_g z--EiZW>5Cp(Hr~Zvz68Dp6>+R8a(x88l}|l@4lyFkqZNRN_}^91Y){_4+f~|Nk$5= z_egRYeK*{;#^PwYh3XuVyNawC5*y9edB-|1(3N~MBC9FE2{b*6S!@g*7CCM~rQ#dCp z!(BpaYj-zpzLtQfIiR2HR!j4(D?Yb2vbCztTE4xyT){~-6#ORwod3^)m_s;hz`UoYr*8(H)Rz!F)UGS6lAPzmIBEI0s@_%nNZ(NG(yU-RyPkq@-k0%}@SmX#W4-H6^G2lGgqCS%d$rH|u|^vMd1M z7n`$2{2_tD#33vu20YKWVax@t7~O~i898G)E`>-AbP3V_WoN^~`a%L!-~Y3HRPgI1 zyiax_p*u=PfI`1k85k2HB=J8GipuK$e5`-OL9IJQmI!elkn}DKzzl#P^7&}BN*=z) z0YU3uUvs(kqX+YdYYOz)G?Kx=!3g^jwz74Cnguum|W}Kk5h*VtwPa$08kQBJ)*z_k92K@Gto#zaAhV^K zw30Pq6v~xhrR!7i&fuT_LQSWZh`#RZpR+U{XG1+yj>&K7ex3-yxOJt-QqXyMT{p-J$npM#2yVN@sUmSuOf z&RF>HI*1k0YX-!`#BhvEa$$BpwgmkmSD@+zfH9D4c@Ya#LX0D^q;rA_8|%8~Rjv|l zlrrk(-}6z%AGds-cn%e2Mz~R|>QRe*zuYs= ze4PK%-!TUj$k@76o_v!Ig(6(w%%;4>gqk>3Qxqv?Y_-E!&rh!QWdVNS%;w{PT&;9~ z1;s;p)8#3EJlH6d+Ab4y_kN=1ZZx$yH15c06z$5b?;HH^vSCs+%b~z}liyza&%pR9 zipGDB^poL4Db^=uxYwwAEx(HiQ7=Ni&oRB|RwG^G{JZM?V%RHY@!bEU?Ld z?2a@rkfL#Rl99xVG;Do28a+bB_vWZR8HYVezJ_Xlo_~(-6=My`kuYWvt#c@P zS6D~%)~OpJMzCYyd6~dK11@Vy7U%!-2XHA=f0+#Yk9!iQLs+);6EXP)az0|IZ1veJTlWI%?Rv0MLsyFV}dzn(Gi-*w)-cda8eN&#vs3TYieNQv@^ za>CXPbph@u$w5Za&`7 zC|w9dWEg7x`)0C6tPl_fmskzG4vA;|1~L za@)Umy?M`^zI^gCM1A-CGa#|IwhIYnHo^`>KkXP!L^a26_w97cMmvxf1oL7 z&+!x0nYMlFaO1?RFW-`kd*|eccayYqVMso|eLSgnt4_bO_O*9jk7QR7^sMmD$AA?^Fn;E(BNC}Kb~fzMwspkf%S$ytUN2PKsuWMdnZ|jan?Rk@=5nq5>Ao_%)oSChSO?_ru^9=HSl_~b^>(Xsi zC8_HvAHK!YFVp(_;qUUim3?7N%JxDn{l~S;Yf0hasSJi+)Gjc+&*d|*{N%$$B+2+! z<=2h3)Hd`j__tDNQ*UIPH9oUgI3*+5QWG3 zIbiM@4wqRk1Ca4u1i~B#GAD)&jE{m@@ZII4w{fnFEwTCKp5iHFz2W*4W;CO^k0n)E zn#S~nB`h+vx1@DX_&l*lmf@y&ejZzh%hXC5?JgO4l$ntd{?KYS$`)_CmCi@rQMp!eNR|X zjiSxrm7R>8d&qpRH89m!51(JQQwHepSCEivXV0b{WK{IN{v=r0n#uB+aTFEZMXN3v zJLl-Nv!0v1!aq~{9PrH_Cdya}$=Td=1c{@roGoy6q)xwsub+G6`VJq-V-y_N0P$k;6odg`^)B(R_s_$)Xg0m zW^_GK32)yvM`z{F=Hkz67?>Le1)ToHF)2!M`#YF*q2jQw720l}h;9uEsL61#{>0ny zuMB?sc&*!PYv37OTQiz5(*NzdE7Mla9CZPgh_dwJKUiz9pd_*5RoeX%@&g_}c1hs` zkoUHTNW#nWo?S(5PF)g20hoRbUja6MU+mTk!je|d$P6r<52{QIU;Z9Z#PvJ$dv8AG zEZyB5iKYWAp^(!Sep}rqA78wEJl89t{+E<;do;m{-K2IoWmsaKc0kWe{lPu~vl)pR zQ82kmTXBHcONm+YhrQJc#w)D~gD&3$=vX5|Q%TI+M(>LYEE|)^`(l!75UWgNF?`2Y z>vFDE5gScVvVRmysRl=s1@qiEsxh_x^-^B`BKfJw_C`Eq?Xlskj```C zpvL~=hZzIs@q^{~9Qk^=t-5RQK6>46v=Y63MT6fMms5^7IKkm9aB|P&fBT8be7{M7 zzOe5m)|wDXvbghDJVjWIPyRB6tvVVDE#G%XNcjG&mM3HziOrtQue1d9kOD>CDmA&F;*7Ejm6`*usGS|ESvlM^7$~vxqHIxiV90w{wt-^X zk-LOeRuv@r>oURd!;7MKL9KoL6WuCUm%XG)LIk*pzjxl_=MQmsIXQOLD>+$yV)8=EKXa|w z)4DgoFqBU9|!#1YWSMH#%9DQXzke{sw$F54kCorbsbQ$eo}oyDk74sJqJz~t)A zU;^1t!sID#n#)VBZ(7nDaVlrRQ4%JN-4nv|jfJ;g_f9KlhCP*~@pL0MnVh*XwQ1m# zF->NsP?1VLI!H;`EgVxW@4g%Pk(WEHPE(n)*^-8Bvs$_PlxOAXjEAi-p|q(!_Na^0(|rXdMpw$%{YvMkX+u18F79&oJ#W}- z`ts2dRoZ5iKXonU=8h-93x7E6L+L4-cv&Pw#HyEo@{+n*!r;Td7WXeYT=-b z>-Nu7d}wyZ+!zO@^~n^4JoV)KdwlMc}2F&H?}c1 zdv-ihAUx0&*SMueV%tMO3cNWinEqFp}+>m~`}nK%&Jn{pewIE?g(d zgL^J*I>8aE?BF#mnVb*zkXwd!$Oora@%8UnNARoW)5@~ms9!BNUpXZ4U@GwVmAT%l z`a8;UO3NkOv6`6*R@);A+pd|z7T2(4Y!hAG@_-ODPmy6*kL7@I@bOCZy!BAIz_U{B zv%_A=vFBv`mW&OrBC31U`J1EfCRDE|a_stQN-bxej`Nm@RoYoIH;xT%ZV;F96)8m* zGBQVHeHF8Q5?xD0xp}&7{d}aNEV5v4s=yrL*$}$bo8oGWWMbThE7uoDl6cmNx{Kyt z3Y~MJ`Sl%hD5_z|0o8?1+&R8B^SQuWC&{@czL%Y3IyW4eSc_j**eqGEmJ4{ z{-Vx9{2ga`9Za8cU3r`0<3X_*L#^KG!KJ)Z`x7Pu;X47_!dN09HW7{Sdl#B^-uSD( zUlYKPC@W&Pf#Xg$D2B6jPK3c=5>NUWooUFKIR3+IfBFhRAN{D1syA_o0#uxNzM3~V z%@me{Ol2#r8D{d;6jfDU8s&A=*$98zW3JESrAZOKwvzMYdP4cNB%YegA1`jN!DA!uSA&Le#&%HfH!6a=S%p*oUa*dL#7aPQWM535? zxp0mQ{Y2TeN5{Y3N!leWdZu&Rq#{J))Y|ysbzAgL4co zllq0i3C3b%7IS^V-dw7DL&EVjQjfR;Ycw~o@X}g(^6|#iD>7%l2~ZI#f|wud<5X2u zYBW2$bAF&~@=UN9Y?6q>4(BL7|N52pGvP=e!Z}UQ8d>RACEHKTo#(yyo;>Khz_fsU zlW!?GmCIo9<_Nnmfr=H$RGqgPVWFWx+L;Inmx$BI)OZa;XR%LDNx>v;Ub$x9%{1qV z@N`Lv?{4AzJC7n|maA8_#)^d_bNm=R#dNi-rui5%va;$Ax|NEqpR_HvPmOVpI&(or zT`x~ZyE~Jdf9+~{|1Jf_GZyy{4zZenrFJSy$%)kAbq`m}gPst5y*q7fRBG=gZ_pg@ zhk3+^&(;&x3ON+Uhbc_)(TH;OSDly`rPnpyq$pd>!dN!KU~{}oMOfr zzhYgb5q*kQNEn~3{26RWbaB(`TC;9-gg>J<+Dla=`rTKlmr#UU#0d{OZb}@FGUeE! z9se?!7F*PEmAN?gjqFdsZC@&y_6qi(dl7`sy@;AAux5t}4nt3w);8taE;|_;w5-pu z!IG`aF1^S~p(or!Vx=w>6Vc`WWz9#}dw02=YAJ~*;oNE0YhxA%pMG!3OIq|)DOMZB z;&1lVSiYZD*R5K}QJIC;o#bb|Dh*gkc~WoLK^>m@RV2tv=e} zqrK#7`|^>gXWS;^aZ^TMo> z1E8ChuYh6W`%riEje+N4EDxg>n8)$-O=o>cv6}YPA;&I?Jdb%@NoSSRp%_$rY?Oy) zpxM;C_^bTpa4+Tfd6YEnjOx~jF!Mo`U7W%V=9FJ*{S(cXRZAN3%}}&?pJoSTUP+7R z*cyF{#pSqkFsDkWAFmrod{^FKedv1G4>#X~v;|8$7TmyIrf$KVj*_MUS=YhptM=tL zEDSazjYZ2|9cT8>E<6&~jehsp26K|(gFLrMDsgB+_>24@;@=GAp+a5bge#4N6$u?z zwP+t55uvC zsLH1#$IHo0d|)rI(_m#0ouW~{>u_do6?)=Co-Pf{CGOo+ zDdDerV!T9()SCO^;+49I9a;@r1yO+qdd5eN#T=%u17@ys(r*SOBpg`XV!{w>S7_VZ zSorGtbJL`2S356JXBMxBJ{cYCVofd|?cgFiODonj`@?CG#Y#CP}B z0=i4S96`q`>nCNxy059))~i~_Q^=eo1RQ|D2V#2?z8}PV=?Ot8M>wsK*^IEVf5x3depNrAHNKOWp8~39KhpyqHYN zeSPjY4i7c)6U94wUSgsBey8!9As=&os7fE-a5uV z$Bxd<$~Z8ZXYpEh2|d-n*b<5@$R{L)6|IO7glbpUA+vZQH2oodRXnv?ty5o&lVM2X zdVlN(J)JAA<8ri~k`pYW&QhI$*PdU{r!}#fCl_=~@u_lB5t-={#}-=jQAxu$59W6j zNwpPdaJ#;3Cm2v5e1PLh^_PETakh7lTbj5@LA!jLv00rYy{&IP7v_ zDM+4QP_ezdQx;ZIkTLUx>V>-WF;4x*kl69%B!&(1U$jSOYq=?!H8JnrJ}xgT+)wxA zFs%-ct#{LXcEX)QT>Cn|sDt7;0h1n&e7l)f0r7N1s!$ZX>6c2*Sz zZ*)`W^b6r(hL{S=$?5Om@w;*t)UD`8Z#@@>>vF9RQ$P)zmJf+~w7bq>4&N18();vV zxxr&wUrjt^Z(T`^botS%<@RRcnr@Eb*XC|JzD2V&Yn8l;zww0X!&+MDH1UKMOA6d* zd09=P+1D?RU#1W0x?&u%!{@?1>_8!n8$`DC+(AIO{#EceR(2&GmtQMRXX&?I(khk3 zRU_gBQ>b6agLiLL)fM(ix6Bqab-W&?->8cYzH9ktnU~-?-A5v8KAzilD)fpjx2{s3 zI#th%vyK^>21-%0|6IQkAT`707i0FEGFP(Q;M+}auF~|Vk?>PqpP}C=WRb;JzlR8I znwoud(_w7+{J>VJ^yx^Swso~SvsgX(;}pA&`M2nM7!9?iaTgWjb)J=HGgPp))?C~5 zC3af-`O^6^zWJps!a?RyH9t!&#V5S;wdE|P+KBLPt!QPs-z}{p_LhWX#O$ zZypoZ)Q{z)+RXhuCSyVx+#h|dU`8OrUbWC!@mGO)g`G&4T+?DfNz!hveywE;-yH&d zD}5|HkumiFmV@G%1dU%YnD_gO%Nc(Z#rS$!EM=;m=!rSyEPl&WTwPjE7`Jd;v;F$* zp}4pqS;V7V>p?{k^Q1e$@2};59b&dUzvVhdLU&NYpjYa=PtoyqT~?WxY??_)t&KrrHx{c?!4cZguBj?GBT3JiYt+ea{e;>&30}J1BV)exqxW z2{8c;8Uf|9Y~KEF9|v8*BPTUA^DPX0$3FP$yvDE7-xHjZpLm(B2yyd_I-7;L84qCR zo29*%4>;^*eZfsb@OZ{e@DaWBBCi-#)P?99H>s#fMvjF!&KJrZMTFXv8pi5AZV_== zP8tAyy3P*m1-ZadZM=L+G^-+znhP0=Ow{o^zeCWDs za<=@NH}r;gx~2;}^rnaTJGh=dW0j=a$Wq~0Mib;QBpFv>@8R<%l&K|L!uwEj5@kr} zT>t8lc#1kZOif64`NF1OaGa!M<{#%NXiS1!wxpaaqP<`_ zwWnbN{`-xwk1H7xyW_dTI&p9=O$_n{&EN}rm33Ct-EhwepKjr7)=&{|Ipknm8mSny zuF+z7QrCZ~RB!$nD#qbx=9JOWSN^>5%ry)-em*9cVK+BUM61tnPcji-Iva0aZ=`?! zS!bzL(U>d!mm9MW&di9D-OmKBj{ZW$et)=jVX=~08-Yk@q z=>hHzl6RlK6?>S%>ux~(I$6I+Cy44tsF%JJ@m!=ezHV2^6U}DohVvdq?`-84z6_9U z_^aRim47*BF15Dx{#dU{PNwrs?pyBRiYes^>%rR}saLDNpWUV9+ty@>)!nQ!iWc&b zWOw7Skli1~B`UTRy3a~RltoY5nVy{Yp)x|G$u9lj5aU?uu`2)RQhGx2rGSi@+jnMn zrZuBqdcLpI4~>#My^h$t~1&Ll85&UrAB*VMnY}?@F_64F+Mrre^Tt4O1oi zfkMh(xkY}ton_;>&s&?LwO_0`mwvQ&Irnf0DZ9Ux*5S_8myP7k*?IMTiQ|0P%4P3+ zm*!*5B5%?irHmBAa<+Ty2G8g177C46xSs6ZLv2hrBy)eko%}uFMKQPE(v1FjRr+eG zXLQ$%wW--e1yW{c9o=xyd^vcjzGbyCKh8}K*M14icM__$8nI6pwmk1CE7|Um=3mLn z^o+T^k#wFWIVmE9|4edqm#9?$+Fh|l2F?rDOPi{^lnY*CT|F=f*!jgupxY&Ms)6n5 z_=L?@XAy(!z}s;$i_vUh)SUW4R+kN(Px;78sz-Fvl%6kN1*Lp{QnX5a%R?;RI@|Xo zmVPhD>Er3>a=|2F@~<-@Cq;K3JLLn>mnq{9TEkCs8|m&c7ksU}rE)lafpCH7^IY+7 zR&Si=XZEX}hb&Xqy!xQ*r+8)-(=yb-R3Kk$aLe?oA&---n7GWOSfq-=U?(}Lc#2t6 zttrk-=xWdby}h&5DodEv-kB)+#U{={do{y*>Hy5GPjWmj81o6=)r)t}Bq`+%kIq&R z?zPeIot(*QsK_lea7xO{)*G@L`3GICh9s%&kD|*5~A=Jb@~h%GRn2~y#xnKKJWO>yfHYnF@5PDAAevCXljB9 zhaY)b|EWBU;?bgchUtLZRO3?%SEffzt7B-YsC+G@+66Z3H<9n-tw!;)SM}NwT`rv_ zKGJ-0kUVfB?2UClR;#793iIU)n(p-Ol_^@j9|-Y-oqy>)u=|uQEKQ3Ejg9{y!);Cl zjU6K8O^c`3Cs)Q+H(4s&KAkXCCoGI;<2yLM7~&OsscLSN7Z^1_C_}_Fh?oEEz!c|U z4~AkHD^?;-$zT3|5P$?u`q1be&&k-ao!Dnqtb3||$#6!yDwmW~HiQ>CI&G0gm}%XS zwmjDJ_4H$SS@@}t!@yoYwvX96IeA4)^PS^8sZ&zZ!?ug1GbZe$he3AC3jQQkvqEAn zEeR|%VGrdN&(U_%UTDhQQL4^)JM@Hx1%H7jylYxUpOb>5RY<>F+ZGBndTv0_{3o^-{@Ko zmRdfJu-aqkE0$3=u3=$K-198d>7{5)MxDwyo#R}r@f6W}7J!=}hw_ht_j=I7F;riLS% zl?53XcvDmou++|)qTEXT-fdqUXOcvL&0$JTI9Fp>jNE#suKMGjr#I*zXQq^s#Ljx- z6K8118H6RcSqw1O|7^-BwQ|KTZ4xL?e^vkeJl>=0`*M$N^U-m1W_Dzp|+9 zf6R6%G^PcIrS{f{k7g#BfsovK%{;r2X~tf67u6i3$FA(LiOUyX(T=#WaoS4J2``$_ z?v0VUt)%9ZFcYjBWqcQ@D2k!iP~#Ny^~HF*xO6^RV>pUd=E}^^VWPsjQslDnuSZm0 zewL={>3*V;dHilyt4RP&KD$Gd(N4%4~dy^m*{winB? zLs|xJWT2v361Zt{%I|T`UGiheH9dIU#xSgOf6)Jd6fVYGlvtZhm&suWmbTvr92TLE z77h);`->WEAAFq-^2syv@s;TPQEvB;e}$^jFFML)}{j z#kF<&yGRI0uuZVw2^!pjTM~i=2*HB8yN95S1h*i;CBfZY8h2^j>Bbv(XuR*rd(J*} z-gnFhO5h}vneOC3wYHlf^{Sui zgc8rWE)9fCGwNcmJ6utpH=JyJE^Rvfja`*Ms8X5hALbfpqMIoZSS|%K%uA(uDgM{W zM&V0@5DO_5Mf^3Q(rDSqDL+4!L5G#9vBGu7iev#Ia9sk8`nyQV<5B_B- zKRX-17bsqa1nw@6h|%lvi0u55H^VsN#51jqlf~O|?*ZanA8)EWhS~TTg7)=4-3SN| z@e37}I4M`MN1t;E#$%B6F^aYCEB)?KAayr%w$KijPDfZ=(wtF17cjwy8W-5>{H_*UKxcw+b6x(BVGp2qtv2C-| zN}|W%GlM%`@I@X;jI{da7P{~$a;$(&)V)r$ssLNVbd@%S@Tr|ZLSfyh+p~s+BBN6( zherWr%2ICL9C9?Gk{!|$)yn1u)Q?ws_Aq4H#YB(isxY#*RBFb;7BxSVc|Yda7bN`N zdY|qvtM$d~g_V^=wY%B!@Mp6x>Qbv{!ldgevI+|3y-O6&z6d%tUZi|?iQ#fD8MZ3y zd5LiwjfO$yV9#XIyP>Mg2>E4jADknap=j29DoQ67=ueUuOFZZ5#v3Im2=UDp@%-Vb;n^INz}X3??g`aWt}VS%<2-)`Cd_L{1$4q0jx+$i&y zR@bbQU-dNHQNBo_;O@;gN==c7aejH(=R||%7HI=}id7_{@xEh;!io6jvEIr7)@H5K zFbC=Chtl>BV8^2;cY_X-;TsTVr<-ky77P!2UF_t{Yy*T2l@p?{pmSjFl#EYS zR?KyUU!8~vm6g>Xm3;ZefXmb?tV&ws#(%3?=Us62C1)csmU{T?7he*bXk~7_x2Yn& z4@v74sS%rmlo$I_AD4yk)l9xVuDXD3ieTb6xkQ)B!(lJ(uVgsilyV*GY$EIiMD1m| zokju*7v=I=www95{N&TBcYZ`C2Cy-tZ~?y-Bs@5WP>T`tMK2u7{|OUCy2)|%_ytuM zlFC2D@{P1o<>Lt$QdO=JoH|N(UkeNIV3lTeWIdh5hi@r2^o9=2ZNFg`K1U|ne~SnO`}0nB=v^ysC7dp5>P|Ai*E@7Kf3;`$jb!%aDsnsl zkC{`|$D2-ivQlW|3^85FcxXI#gA&Y?dRCf{!R^T**8 zAj##meuKf8PrS0EGCI*bDO2@byoA?~)T)QC-nSMqJf{78LQ;N?xD?yE#GZZi(5Wb+ zGLBEgMKpzAasYNrW?k)3m08oPG{3EgkF;q19yibPS1De5k)^`a6xS;G}~_`{*Y?i@{q@Sib>LBvYHMkg_OI8x)o!!loq1Y1&<=g}KYnU5oM z@DnGiuR{HH38mWo4{A&plO?||)<)@XOM2e2wUy0WfLc~A?b}(`k+?RGH>#V zX?k=*eCBg4Gl5(URY=!F)=CkuxSxFX+S?YR&fv&0-QbhqLpXa{u~S84lnB6GW~pnG z6yapIG4gso4{7t8j8WXOkXW#;`w^d_@KEGL9F3RC4!G*%-He8?&yy|t`}ULUnLnqt z#4r@H_u~&=(|EdANC3SbmIUg;vV>?cJlSegI{gfsI2nsl0LE(S<5csrS%t+lnZoaNa{f4=z$67EuZ~XTYd&J(Q$66y0u^F4B@gwIC!ZJ_L%$B9NNuh!QCv&q zpNsxaK1x-HOro+D?F9d}9#KrjuNo=98?$DzKM)+n2%Y0cql;+1Gp(ZFYx@a_x!2sr_WFIR{q!1c z&4vOEW<%XGHn(ISaS=#18aF%kKyvGQm6DCN2DH@qieLZH%#d>?Q9_`dY#d!7w?1Pn zec3O*5=EHqqu(aibJ_#xqqlgzbh-%{-~;`o)b0R|d9*LSa^%{NxNkQzKi3Pv6`WVa zigQBikXbF;%uDU6pH8#S%s0ToOF(OQU|?|cRVeS)Qu**7M~?k&@(Qi*YgCDG%B^-E zgQ<|$@7}go;76ccBH6f;^4-fnCUVhu$?&opdyLW*=nhO(8mHm~&V2NpI_}2*e9Mj* z$@8)X;%6)Eb!cAf==gUB|9*|}`2}=K$}0e02`$KD=_0xc$nMl@vj4#O`E_#x7Gh^B ze-2P`PbtdI{c!kSbN9dA-oJx9qQsU_)m}lH$+)(iyC00_n7`^jmOqSseyE~e?Mf}ey{w)4CC(rq1|-UwY9aI zVFBNX<~O{&_#`CeX-maB9wa-S@iQUkINm3El89oN%t?|-vRodZ3#k`9Ekv6@A0~z_ zdhXQRi)CPwvlC>0PLz_8@`Cd%$jY(}75(vfqkl-5TMStGPs_(hj?`S*!p;S~xk{CU zfNYUgP5ohQr3YrH zYr^t>k`XceJ;eUgmg2`mDIdrD z`!xnWxrF#f|A!F)MoEIu3rIfwkApJn|6xG>Id;LXuVnslR-q3a(*JQvU`$>;`@d-_ zE)byjf7{f5T^f_L*WR;$C-H}`77+(r#(zJA*x1>lI6&7!*c1X|2;C0p`=P(xj;jCs z9hrOpBmM^|=SM?B192OIzSo=I$37C^Tq3~T4M)94`KgW|;J$jk4km9yVEkjU z#C#1856`Zw92aPWS=iXvIKK=Tm4u8p9(wNx_OcNPqaB@diu66G z^#9ScgaI-TK%b)W4FH=(RR8-l`#>kJ4{TBJXbI2EY#|u)Z#5JVM~ED-OUW~_Q_t(o z#A^U?I|?9>EG;ZJM~VNQQvdzh1^{~2b9?~EtF5gM$f+s%!UgH0xGUFJv_jh^e!ruL z0sxR=i;9^4x9joW-{tyzjKAX4=+LOsaRk(az(4!PEs)d5BX(6#SQrX`hNa~_0Van# zfOetD`{&J@!y|OH-GwKgwv+t0{7|63}wB77mzOQuBX&;mKcaSI4V7>`hzL z$SX{M*uT~Y&sYcauD$d44+pS^NT94oFaXS+5hzywpM;wK?7U#z$XE2mn<54kq+E2Xy-YD~<}F_8YI4d;zpLVF2M%MO%9a$not17;ll`HC8~)Td&(i zm>Hly>D6ov`-fG_;a5IZLXN=!uZ%*V9q_s5n%&ABzoh=5T5bTWDVH7PhvQ)J7v}YU zs*tx6sp}Y~Em{B5j(N~xP+#DAGym5m5{6ylBJbehk<|;Ed-_Nr0Fb=`u*0?z+wSNA zh$tDr{pVqy$%>(scA8Y+=K?Aqu9xTm!KQz+gX!w`RupCYD-M)tLZs$t%Z%6*Q{ASa z2KU4{5l6Mrfsz~8Su5EOzZEg4H-VM{O5kI>bUx!=>>zAXBb^e3GVslA$Pq6uFCe!G z(rx}KDbN3ho!D@Hx%e-8SnWZN@$U~%|=5r2$yz*{*}X+7n#3Qia0j5=eB zT)Oyy_T$6Gxe_o)2H0X`20Fj$0pAyiAHnKI7W*ITzcD+v^c1nS25aRxtBqO^X2j+Sb3b7slSG|A~?wKxN6JdIY9)= zxm6{iRa~FN&)7N@RitHcu%tX#;b!ACd4|Yf^`2P%WrKaJrHn}2p=dO<_Yn)-mC)Oe z!z(4eyZzUS<=1H*Fkd>2Sj^LGwM3^qBKQaP=GuhS$$PBYj(Att21#6m$|Bqx7Tx77 z5S3S#ks-!xhoZr5z1G)u_%k|m6I&I*AW~i7d83eAT@uq>9HHWw4-s*+(ZKIpRO=Pg zT_03!IPhZtdmZkyBBsv(J7Vu4b~BmzV3I~S4LlG498;vD6AyqS2PVEBP}?M16BrHv zpf=y=%y^49;vbFG8`RB^uri`4#1lLrGY<=?>ABPaK#a0FD{n^e1#QX5$Y0-s$QzFE zoEsiUU`A^keU7(SnoAGwi3vc0QOL7!)kCBNE;)>KGrX50qF9cxz~&WTMJD9Z3tj7J zO*e8FVv|yW&vG2|LR`WLGOH|iJmtZg-n)@!XbV;H_EX2NI7+M@$$KH9=KQ2r4DYM; z3{a{M8@mNmKA|H<1Ma?sg-arh(H(d*ozl(}wdsA&d`J z=6|7Bgh{tU<;|zwcrl~h2*^Pjwo$Jb28ilf~~=@c^=NpFlXk49f&S zi0H?F1sW8%9VWI|3O%jN%i~%9ZN$8szkZUII5bd(`aw{qMX0A<}lNTo3OX-Q7Wh$ zYWz;hA~ic!HnTeIqduc8*SA79q5kxgFy5r7xunV*{~O$+5E5^nKcKcdq=CW(KhZ=dhbA)TNu$ zM-QjJLDz2Jfer+_p&j?}IMZRq?YlD}(=H{S4DSftzSTkZ#EPQ|&~g{d;C7;#RGv09 zkJ_jU)-_YG4F?2+OkTN>e@Gp$8|kc zMT9YjE4g)Ef|rFnukD7qcG$Pdoc^_K7TZ8|(pS%a(ltSSj4%$4tTwv2t?b&k^C9Lq zxD9apf;ej`EOq*UI54T2nIeNa7b?J4NupgK(5Vl?-geH2Q2jcZ4b5+RU(6CUj_fz6^n;mhhhtaH9sjJLeG`J?ctM0WnU-jGXx{cV z?2I&&2_p5|PZT4ZC56g?nc!LvD7jOSSJB9oKJf=G0KM$kEs{Su0D#0B0M;;VUgkIu zMlLDNX1{V&s-Ay0D!{XnX9RWA07Dv-5mhFHN^0uT)5dlEXby@V&EGpOeopo68mWHB zQ4w34Huu7d6z3n^MYT^P{ zRPFSZvaHDIncXR`#21chs-fGC{@T_oU@k-u^*@Tgf_3Kr6zrUvK*26Wx+d^+Jq5`X`q9PkYhCp@9kVkrk${ zhk)2GkWv{?V$B$7%s1=O2o7=dikZTr0G{b z;GoGT=#bSirXo^=J{8~H2Hgd=!Qh0PEcdX&RrFAU5GP&gc&-^k)FX?P?C4JOw2fR6 zFH}3Xk?Ut8>ik@*WqwI02>a->I>)c7k^8zH5tMHy6oYG)j1#+ml_`Gkh~v0(y{X2O z`UEQGjvBE}!KAX)Cg}?=ULZNdpZmH}4@l{5%X;;CH=Qcoj-a?8D=RAj*WGx>Y7{zp zdM<#D&E6(*gHj0;z$EyC)^@CVl*}1j#RXFuyi-EC5a9blP6C8cV!pJvnox_+$J(bY z+5H*uWjx*9f#qJ4N^a-rqXE2lbiM+Lm-kAd)<**`?G@+`AImf?zS}`FuC@_A(3b3w z;=mqy#X&oUp*-~bP^)8tm4#ShvCVzhiz?F?wI$xxT1>-0dNw@wvXRjd?WwbmoYI%~ zuVpx(5w=Q|9HPgB)mCyEZ{vVDX}*g}&ca7zfCVE}L7P{+-6K$wY#GDze<9fvKOi4# zD`u^qcI5ferT?9*oM?Hd4Cc4OIUUrbq&Md5mJGjoe~T2R%S(8pLL^DSUeHD#xud5m z&wu;fD1bIEEA6@NXkg~&-H2~sW~Ydr8#Q*=T_u$jD)fYNwC(OVT&Lv0dNJWduy@zd zaPkwS@m?3ma4lp^EF!cDZ?y$<@0+OzO5{omJaF69zd8x;>rA+bpcHoj&PoCURp$O` zt3#M=JRES<0X>AQFBp{b+K%_1)ZsM%@zd zL*P<+B#HVb>~7-gmw4uvsEF%)nC_ak>EwBZamJY+NW!)iw^OxIvx=a>=r8eB#xtb%eJjhYJ`r<(#?o|~kI7F7}y>*|(iN~-<{F*}`Lpm`>j`fJ937gOnAdA@SEK1*KiL*F z&!;ZRK*}YJZFSBXTsPjYuB9N8I-p zXy)#LZIu)SWjW7Z(xwhuRDL;;orBN$didx=ol)ayyg#nFRpg_6D=hg^s+}W8$+Q}1 z>)Z3`GKtV^_~T~-@8D$F%$NhLPh~|m#?izy%cXvl&JUdpo?`0|#>WPYE+`Bu-y#zw z`KdPm_b`xHl#yyE7rg@esX~>`u6Cc3rHFZ7y(4M1qL|6}#f29#8FIa?1FH%EcDUC~ z$44-eE{F4jh{y`Pw{+R5J>%81$pysb+U`3X?ph)9$Kr;!!(4hdwzOmYGv?xvl;^Ws z(1@s07~~Dg$#7FKWtTQ@n#mp>%=G+9QM0ugpWn}!8|6qEXaC#U>-C_vpx@urX}L#8J-(f#TpH_N)K)y5NHxSl)@BS8N^72W!l$V4?xS{8Y3&m!%olh; zrqaPuOcs5$&vkOw`@FrZ*^H%p`m;#H^7$&De|hbd-eT+PjG_cR8kJmGc&3Awh}-U@ zv~r?PULNogk37Mr*U>=v@Uz$bOl_AmWe~Z1gth_v(qiV(x>W3E)VS2((ij31K+u96V_|{gBFNuhz`pBLdd6wDFV|-et z$b5`fHu2s}{upUzL=!qdx>5GE7s_C`x11ElTLJweafT~ZLF%)d{XjSS+%O^XgWoWj zVftPw*>X4?|DFAnC2tQF_xst+|5ihD-e9P<38i;;f0$68l?X@;3+DEp{S5uo)tPw{ zE&o!JXfpQ&z^kvCo#qCsLA3 z!$)%So^R!Ia575$992Nk2=x6#6iHozw-`AmkmQ}S{>Nb1s<+_Hn zTwM5jYul)Nhwy5zJqUZaGRf)VJmZ3Z$k-e^bSDB%zNd^|n(qvD!TIrzl6Nx9I}Qu+0)&+Z+NOYTPTf z)$X;pZ^^c~+cn^$uBn2w?Ss2$=Nj5c(kl_Gc%2^dXktRcw0O5@zCJZPH84+Huei1r zpt=fD`kQK>yL_uL3a78OGe^{Xwo2Nx(CrW!uFjEuNSUx8g*G5J)~+}DO(>ad8V3)=CwB{@IiM^8HKe1c1LT#gv! z7yNe2Ju@t}*tSG3vJZo*AU9LFH zeI4yWZaG^X`UIf!I7q866$rr#(tS_SsG?Fw3Sp}>PqXdn4rw5~APyjTO#lEJ<->5g z@ShS-r2LeB>Sv-f065l&>KvjA(1rNthV}@u@Zh0vr3G7TmDyOknYdGY<3$@+ttz@M{90UJfQ-S5KtP!VrA<$Gy9*fPv2t+m&H0$0Rf6sJtJgV@)0v-~ z>p`r~_+~mV9@?w}f)|)E1qpiN>O~hY2#>i&1A*9pM%PSXMZ{n=Tl5qc_?3>;^6$OL zyFl{UOByEz4P*W!z6PdL^39COBtSE-eEL2YIr5?ncL^)wz&$5wAj`m;zSeLN0pt#X z+W4JrsTkeYV+XW!o;P}z1E+X?=|cgo0DmZTvp`d&HG|AHjkueV~Du$!CQL_qaO2h(~x4|{x*5RC~yDkz+7O-iF45XUw{#&?f| zfyM7{V=m9a@;}S#z2r)0g3Y-9X!z_H&=Jz;M8~)N*o;Xlg%w(nK z!3*#zHd82+tBo)vYYS*^*BxW|p`gr$){fha{jQ$^TRE|nTCJ^Vq)YK7uY1vRAj$Zf z9ZQ+>s=JSuRPPWobr`E}rcQ53nEf7s!sOZh?XfcpGS9w~T!#OVqayMEC(7yLXy_JruPw%TDe^ai+(Y3FXbD5&P=R z3|M>LDZ=WS1;hj@yuJEPtvt!`w8l&~t+S&Y#001omlPT>uV<}~SG1@#AOX%D7ba}% z*>=C|7a2emDx=?7^?%6{9ta-EB4hk8j_siM*u*%FXM=p8&l8vGS&vp4=9&rGdvOX4 z4MV;|+3f$80iQf}FrwJ1}p_2#F#zEf| zZf?}CdwBFW7vyj}1%T#0^w(D*Y9R}{FndJpMDqH>o_UKC#fnWH)JEfI2`k9~uAVhT zqCJtY_p*H`k5N$>Pe&9;%64roAQOBz)o-*lrNyS|FQK z$UTD6cPFDmekZz>3e^i&`T>D8GjgA1YVV?sC%Ig!i@)=cF`ECR-fgX5akdwddOU!S zK~mQ<)E2s~j)i$VQU3UNqZQrZzTEMt{^yDH>cw}CoEvw@9-E7)EHXJ3fE%(9Q!-ad)n24S!l#3@ELUEut7Y;`xvG%_fKwl&i&S z?o;mB)1vc@QHv5yV&HH1Ts$1b&NDacLY*M0jRlJ;A0KL0*OOf~^a{3iuEF<1UU@-H zDHR88nttn}-4&+4ZwlSx8#+WvLsIIk-!Hup@jSF5*cgpJJw0rP(KxX3x=z{jcZ3BG20wfK)RSQ8UfJ*HN*a<+vwx&i4U?Rf z!&v&Ozba^?`^19?D9W61osANk)b{E}uj*15ZcRcpYtEAH2T6rFK~QT@lc*&4 zmBS^G@@F!O?T@rDq%aGQmkM`cuut>uY6an;%+t$(KqW{$$MWzida9+|uZJ20dmJ8V z3W|hl1I$@#3NTo;U3F`EB&+vi8~BPu7k(zSB|{k z1zT9134Em7@I*=Arw`^X=fFv#emE{1nmRjoJ5bFd37lAGCMg>^^G3zZc>V>WTsK?& z+i}8sd8{EvjEVddA*+mm?ua9*ud1Z8mI~6B`BZ)K!t{181%oFcrVnQ71*3W@!No;s{wL#IltNj1gR_rQ zv6AXNpGS%M8JBj{3TD2Evu|C}K57SZ`K96TcC0%$GAD_XwM(v6y>7*?MYKd`w9^r+ zK&5iIl6Fk!i8{EIF-BAR=l2&ozHo3lKHD*b$o()C3n#zJ`;D2g5h)A~owVdi`Dsj>?z!{t6ZfwRiVTBC0&=^|iW%vHVAy4rc@hlxNghLAd`jIs}fyY^P^<%Al}_~{F=~2()v+pZs*Sqd&;j9tUU_ndbwCs@0|iF8YPCyatJ+e?QLTrT=l?Z8U)jIV+~|e`=M< z_NyDjnu{yU6!e>S*m!@#JpE~O7lu_~TT95PBp(gCHW1n@_ECIz-LBp1%oX&ZaCwU+ z`y1P(sxgO#U0C9che!oGtLZ^W{AUO^N|H=JbJktun|6r-#V?Xy;msf-j9j)Q9+Dgb zkbp2@&3KIG>WZG;1lc8QdRvWEp z4yFRM;bZ|mHhS-dR|*X;1Fy+HChypn=xaesWGS^o+Vln!uE{b+CMiauNeQ0vv(YnZ z!C5mPjlV4@&$=L%Ey?Gm5YEX*pWkgBR%|+|=zRVl`gDVkAE}+xH*DUs^ypVC<7^O= z-lutR_S$=5gp`i-a)jH1A$eeRav?jghPXXJai?pCE;$k|l8$+J!8@jY7`i~O8qOU4 ztRTW2><$0zvMfLBYdtWrg{p%hPl^bHoX!3*4z3zw(FNPjnjN zPx~fmCyXZxD155>5dzsB#+qlCxhUUO+CDO+oAwYUh$Y+Wbnr|n_$IDJ9#C$Spx>%& z8eEIDi^NUfoZ)O@5y-lg=DNHSrjD1T-pDN}2=REinIUZ4qHEQA`HjfcRgE?M#b(qq z?Af>^nKiY6xxB5)p4b56KJt=Qwj1K#w0pHJ)bR9KFRhT|v&i+a=%%~3-0 z>f;H9oWNaU-6=3Za~VlspX_0d4zq|sy2_wEoURrev1ZS!fVbf5hlv++3)&EEFH8u{jbF%BG`q$SEBrBuXdWjYjSe z9-ig$vp8qN!rAU4h1J?@>^%nnxRR&}UYFBVX(bLPRDRH(XagI)C}Shg`(bB!UXNy( zf$3n$Y&Hdg0ursnkq~zsL@8@zzM(=nzTj}gY4=3ZXoMr10)qjM7U#}usW~F~RGn|? zTMA}Io$R6~Pmwp2M1B$qb%1&{m!y9YwzTGrOkRGg^R#J71S=i1(t#eiSC_zsV_iv#5X^ZKY4PJMiB@da3Ctu%{ zXj-Fz;9oBoT<%^M+h1c5oQc7&`kgh-;By&(zG&v(0=8%RqdXiZz z?7jMNLdj9gQ-}V6V#9Kzy8Ur$Xd0)RBo;zmCipOL=&mdG_*rUQ?d3~OzYj7f?Aa!vxLs&>@Y54WUq_W*ZaP^B&sY(vl+@JmKGjDc@pWBo zKN<&+rNx(=p)n2Xk9A&C8N-9KL!P%2JwGEmfQ#L#Pg$@Y-na=bZVAYeepTo`$AUD5 z!0kN!r2~mx3Pz{w@`~{8YbGsOrU(CYtDC)JNKz{ zCG^Y1cET6nUhxljD}~IRJ}SiS+PlW$5!>4yy1+P$4m9n)Q+jm_P2VX#%AHCpB;!5B~>BsuK492#0!1wKwxt91u(5D0nzNx$Q_vU%z zi*h+2p4zFQ>A(}E-9$L&YVy$hb*i6^;9pC^;_H0`cuvN>&GRLho`q+ebS$j^fn}nA~KQQnbHtA>hb3zv={B0T$9!u+Q^mW3C3Y?&cSR@x5L)O z;HwFeR$bD*P9}5>PQgp_gY~9N;kBTnH>Vf&d?%aEsaIxN1nPzB2KD?Qu3&+xgi{F5yR6mf%p_4YwD z!yJ!)RYJCB?houi#z3mv8{Twx64~423hY{g^rWtd#3F}Qfz0?M*XIP5OyizdMTR({tc%M;And|n{d2_bK`Q$w?xP-&Z=$guadUuY;W-)uIYA1J=qvM# zH{khBa-sw}B>7K}AYmBm-~n;%NG_SaA_sT;4lwC&PqxZ*}?Hi}LQ(Y{k{Myr}$ z`}c#JIXueuLf<&L7aQDu6CHo%VgFt z6!qGDRbkd?>;7Q)A|cL4jhQDQe#NQ2c>Z_fO)-!N8-+xgOt&)P1t0pQ9{FYsJwCQIc%WWVyx#5 zLAm$hs}EW;3>47F@!+e1yvcW*{&m%mpv5n2peptWS@Fcx{>$h1pD39^1F30}jFG08+q=dIa;vuRAepJh@OSu#CB9rd1ZX74ZibMAoo-U;>e zKT20#bR4QBJC3dKWI~&Q-!A?GiTP>9yC=aIG_45({-y;)i%Yrj>W!~QY?&HpzWg0! z^UR765lL;aG;8lU9RM~nNR+D}%QpdY$5j!lDqJjDRLdWsIFuav`lgW?N0@`pLuU;l zzk)GwqQoc^Fd@S0`B!M-^=Umn;MZ2YusHzBvodt;aMWFHi00GIK5H7{lAZ0wL! zRb~wf)ttqeUFqcF3D5EdQ#n1Hf5Y-)W^+gczXev6F#Ra*Ih|33f3l66PO3XdHJC2h+bR@W~BjbRClf@OqYpQ+M zb2qu++@d>{&C=Rl%1_U1VVXS34{pM;vvP|JTV_!r_vrkSa4=x^r41ti`Jy`|op#Rc zDwRUyM&WY5y2^Xx*S4iR|L&C|o`J)zgHJ24#Ztt+wz`ixHrAilg4cMM(RJ|TiEVI^ zammAz7pJBSecQPlODCnZI{b$M%>t=C!fhSBB3J?1R+G5r8`EnQZkZ=gVHABD%kPdE zFlp*aAC~rcd4=e7=a1Su$@wz+4jp3*&!r{Sw)2uQ%XNR>HqyqH`KM*_o><5dz`S!p zJ9_mNQ701(ZvqtV&fOH2kntQcOeuTd)^#AOYW)ySB|B zCMl_39c;8sE+@p1^?Q^*!~44WY=COHC3QpQDP7G{@4$+)78I=u-JZBMdf@p(f?GuN zNq$>Gdry$8li4%sto&rGpc;)Bg{zM&>f5oDYOccO*J) zKamDrx|FEr#oc4dX=cXw)PrrbK5#XLBHe^KKkC@yQUB#8{*EdzUV(;opcsw#y|Ze^BasSb%N z{e}+Q(-R8*^;aK27P$0!g58}mH!;2Rm;?Zh8G}|vR<>EOEiGZUm4KtrFYweC`2d5z zF4TkwnrC)Z*O#rrg7d3xW{?c4{^h4_M{OP69HW*1IK7km63=kZnlQ*J<=V-#LmU7Ls_! zRmNPT$I%y)8d(RDa=@nVW=%RDaS+PyP&U|Wh357%prx=-L#6ul{seSM zYoNKg+KUp_@pkw!1Mv;yfKMb~eK&b2P>8~9Uv=%T#gK#gQ&sgEVl&MmfI9TOreMd_ ze9=v-bPqM21A%iO`MetL!g~B<7r-XsF)bv7^L|_jb=3>B2iJdBP{U`bKTE+OfK6Oi zI_SyG>d2Ss6uYb}Sg0{XRiJ=VW{V3n>Is|K3ODQG>+b%wOz3LlM&7KdoZ0 zx?x8-wg$Q%y{uOd54_!jA2%c21l@GUKceArZcvm_p&bi?aO2q`tUgtu!#;b?cF*N_ z?umPMohXoX=iol>l9l2 zX3OFG)r%^I36lp`=4@|DeLD`)Pd7wV_V1KQrMGPA#x6v35h9 zzzaIt@+aJfs45}MFrosJ>3Sp%deN%v`hV|-IIbGstv5KPyjwvvk&ZhJN`TlUHtn34 z!K>!`xOl1zq495Tj5ix})Z)eM7<v!5?Kh&J)0s1u4t#I7+*T?O>j_nLn}qQ(mx^2m&+Ivex0aeU>FHA&)kZ7}Du zf#LFMP*+Gecr@b%xfz@bix!;HYRAwA`-VTRohX1=n%NA^a86?Hv(VXlSI57@){<}b zPQ!QAakbc zsfXImSH==h{edp$&sNix%CepmUyQ?~@}Mqzk)BqVcLc`Lw7}MajQ;A;L#?o@+jUd+ zf;Utl$9&F9B2~Z}Uh1L!RL|!B;_a-1;_B8t4Ix1A;0_@KcXvoaa0~7<65NB+XprCp zcXto&ZoxIUH_*64<2{}4oH=vvckZ3KRZ}(lkM3Q&R_|K9e7);^pWn)l1Kl2DIXXm# zjsw!Ds{3oyt{%7Pyzj4FlpdnG>)tXFX!+|TDvkF{smR+fKgiKz-f*svu=P(Q@l^G~ ztc|weo|sRFU41aPA=pKwwJLI^IPQ9cU}Q!4%QN$EGrJer4E2OOq&`Wy%4s8f*H<-< zm$|m&79rQH7bC02vmXqn2fVzic$tZ5cs53O{uBd;L1y18eQ0|!wwDpD-HuG;#JX;q zLwBoHIeO2D;s@c&s=061^4Fl?6$V+4G~Fa#ihmdz3TVDA>nfShx+vrQwDXiVl|-RC z5fy6&JNdqkS5V)`F-c}%COc&GsbEOv2??PBhh|3#7YHcxG$a``fef7t`ajft&UWpLcL?wmb@MB z6}&ENG>%_E)BAV8F976*$v(UgQ{wp(Vue5MY?{Oy(JK}a2`u< zUy7^TwVQ#kpsD;Pm}%KSQNdE=Z5EYQF#K$dwVSniCp)1o5z^~(*!L5PwrNdE$*F|L&xK3jCDcK4Vgkk2CD3@YrbB|?gxMF6RWErOD6N7XOK zPaB3Of5NZQ95qJhpEafoedA&JgIUB=;6k{@V_~wapDJx%*_lw@MJyC3c4sC%WOv$L zE++S8V*X**!gS{pzHer+P+~3Fc*wSN+K)Y04`?0d7ucJ!72Cvuo0w8b?b?RH8mw zu&k^P2x&TnGpCV{CH#sfgE^qF(Q6fC`UN*}YH{1XICnQie)RoQX%Fx9?i}%|M#o99 zV!E5Em7t{`P>SZdX`y-%4YxO#iqr~JsN7{n_uD_GLOw3p@Kxb)=1qo{{JXI`+nb-8MFT z)iRoRJ@twXl8$ZJQ1ol@)yc)U^ydxUwvYdC??81pz+1L97ACeC;e^)}GyS^uoeXP2 zPKn&4SA_D`^ZX9xI6jU^3O%pr74E!ruQe1r6@nUqfN%g~>7TWriG_1U$P^q11|HmVYe(15(2;3_s6x!&7Jw!mF$;!3>*hO7+F8PnY!RAVFN(txG!SGKP=1xY+*N=hWoDwc^j5vGSL*e6P|*R>+P^S&7|BDA(XOH&$^PI{5!(9kDnp+rqFS#6 zhT&AWnyK8U(xfM(k+VG|sKzzerZLyhIFcoI*!d8;K6{y`IE8qmc>hK+k+I zQ-vvuAuaH$&^nPF=`a%&9c!Eg+eekegUX$?>kxh9wYqUhnp zL`#FoHU2_4KH+|PA+c#y*{7{yf0qJT=@fZ%QQorG{lYxeQnUVXX(n&kr)RY@V;l?c z_#q8OXD&2w33pU!{ox^J6)j>3|p6N{NYl#X&F-M^u=#Cm4xz;jK_<{se92<^J6Z> zH}DS?wqv~_{Dk8<3uq`#yRnPG4W`(Z(Ta(GnT5_dzlPim08=tvZ4BUoQgk)_5M^q?p=x{&EU zM(DR)^6oTIk8x?z2nh~DQFiX`V)tlN!*hj{Cn&-QE+O8=uApu%(c0C-an$c=iO?q$ z*HBN;)?;poHlvx>QwP=;ZRHPpv~1(p;n%>L{f}vN>R_0WXI=neT)2il`KvUPbs)l( zV>irA`)S^l6`>1mw~`+@(}n9kw$W=O(v{<}#?$BVD>b@kT2=x&c=+tioH+E$R6L|v z*yfHyv#oyzq^L3iM=K)4T~3#0(ss2hnH__{B#QZ)31%Of3dQnx=8I%MBRdJhN1tZI z#ntc%%34I~I-k(yeinf_YpT`68aBp=Zeeb&w%nBUkb=gD_~!#dh-L+8sW%MAV#+^owZk?+Lb zi@Fh*kC9TXM4}h;X^PzmXsnx}z3VKKGdQrJ=js*I5$v;(MG6vpg-f=@wutA^E^$c+ z{p@(^o=ni-t1Q+yCv_7cbjz8k!}l63@@V+MZv+9nUE`LL3=tSa&KjGTd&k|1`E5;b zhxBV?ZFLm)xDD%Mc%}WXh0I3tyG|`*EGV}MRHXetwqbUQ!*%l95>dAi z{vA8>!obNubH_IwP!9|R%BuM186vlC^(8dFH3ER?K>H^V< zQr?-dU9_Jc=_K|?{Qm78VJ~MplvgZg5Gb>`CeaaW1#UyXXqe?YlwE*@>NU4d&ifsK z8QBUiq-$wPR%?dl;n-I`p6&3w0tU>4&xqlD1Y!^$Jdn;l-zUXjTwqX?W;F$@92?D` zf3s&lEsmJJwT20OXtgvigq|xMEY_x%@Atd2$2EJ2GE|cgj2o5!S`&R+>vsC`cO7^z zB9-$rQi-1ud`;wgI!Wo=sL>YDyzK*lvfyfNnDYV5!v4|$M6X4Wg5+V%(D3jA^S(83 z`J^A|r>}sv7;`8yzH_O*F5w3kPqb|A%Ts6PcC82h&{4AWCxccSTEtDI8xmoD!iju`PPWc05k?!H z?2fwEQVBwwRl?eG{Q<-ae4em#IsD5O27bA1^!a8^ci_mp$Y)8PqKGX1=mUMR0ztV{oZ?@2;9aOc|KAM()2d>U`%y4ow zm=<&&3W0<5#3yC09!FaX`XDvY{fe$-F5`a~&V*L#PichT#P48dTag>@++^X$Lt2`v z$ln}>>b&t!M~pd!%b2OQ?yVJxp6&ajLTN+ws(8eu(r0n)24`S})GPEY-|z*udx;C{ zUfpF;Z)e-N%f}0S(k&}8ON<`TXWm+WKbaWNhe>=2E3jS_^oS)lR#q>w{TDM z@M!Pixfaqx;{!x;Wf0k6C;!8vCuHaGKJjb`^nV;`Z;B{wPse^eEt}XN;_PM))@NAO zhIabt0@~6uNJr&mD7rR~pgz!c?2%u!Dc4t;3!22%x0UekRy_oOu7GW!6aO5LnY6HK zqK^B!SmtM`g)fc!j0o@NV_RW_n6Pxp`aml|`ven)8_8hT@N4=fKX)RxMTlYL+ zzl$XOwUs3GQ{Xgrw>MIsBR!H0eS5gO%Mt2`h4MI+tB)MBwgyt7XN9U9ii~|PVCc4H zp94Ewt`#CF$!=Y*5^`9tz{wXqj!gCNP!2rpc6xQIYI=pvnM5Cytn4Fkrf{zDE4|o) z4NB5gT-V47=rVpSd`5$(X~@_vSGBO{yV-UWu>Q-&G5bu>aOa>uN`sK4pL5kF-c<`e zJ4}F0SU9~1t!nLC^~MLV7*s|>gWxae#a6ddUe@U{G7-=C7;0TMSC^FI=6B<)0e@*& z=&~gv{RSo5l)67%bj(`^Uj)V@ux6^3-L8QQqu8yU` zwz$wij{p;DhxA-I7~OQ9%H=4GeuHe1e+g5_OVity!+r8CQ*AJv6($hWtSCq`q&==y znOigSf-pOG3LI#pPg(9RORa5Hx-?!&n1BcNZl|>Yx$Xm=k{J6va}B(S2&IH{ao26g zl4C`=%yiCB3g;gjIg@;C!RPm+cINxrnnS=s-eHl2U~8dd1LCS2UOnCFC?>Pi$&{ya zk*>RmMgL8{kNeZ-)OKxJL+iXrjdr7!9$5wR7S=v5H&_}?GQfdTcl>}bZ>+nQmHKa* z{YnYV?@1ziFLw7lFtlpIZf8vPlY1}6qbdol#U8C-DqbCZV)D{&>|fc`CUB}ah@tN= z4u&HxytvE#MNIDs6KS*kb-AZUoc_n@wKan5S6S~3wKJL7wB~bYZp2SDCXGx=Rnm*c zsKhFhM6Smd`wfom*@@3<$6}`+Hcg-D_eC)jze?R*BHEO@&tSI~wJ6q|)-l*ex;&Fu zIesZnZNg$V-*UG_iBw zKBhf$X>p;$_<#p-`B|>J<&c$NZol1BbR%MW%fUGbn7RPfWxo8 z(vAECB!tgs@md7*b0IlA;(}%?9sc+w_3vbHb3$5=Bhbgfz+eG{+lvQstwzKEwm=DM z{J=Nu0T3=N=Vd=uO1q z?%YN_FMBCu8^e{}KHTv+2wn`4*>=O6+hGb0C)8OcGV;C8?gBi^seJPFj8lsHX^LSd ziqu2s293V0gUE3(4rfR2zOMu2P`Zwvlahek)1gPy?VOM(e`sB6`0Wn)fD6ofF(>3` z%FN>IHnIK8c$|xw##_mFPRC&Rhtj=+*L>7+_>eJ;ld|PtI_kj2C@0jm8gNr3_JzFUDOc|lnrY|<|f1$ zGJml*rF{3MG_b+*c!HgJHZ)FCIv0b)^LzAM4TELHi=W@`TAxEbE6L z-SK58b!1j+a9Tm|FVCM)*eN7TNrZK{&x#`mc0|UUXS*$IKykjM(Abcq$D4Wd1#llL zLgmGVO9Ukcoa)Mmr4yqIzmQJ;aeVm1Adq&R6g2jO%iuo+3BgopJN7c#tAVER>@ZV= zC)SJryDdy5tv+6?en$Je!}g}2i~8}wd#BOxqbn|-;%Vwc;1O*l$-K~u=zQqGZxPDo zRl7okwBUK`qpAk*zvPFxtxQs3$ z4mjGKw|)g>^PG_CW%Fe6a>U-xj^_Y>zRhy`aV}31^~ChEp6f@>gQqA-*HzKDabLA$ zBC{3XA=6VdmMav(#s7s!xgjr@=I&c+J_NRVdlJt&xcg;Xz2U%5Bv|(ykjK$ zBYt^$Z^V2mt+>S4ij5}{m283W`_{Q#u6)m(=c=0eR&Ksd6@c{z*g7xEUqs*YpQg2X ziJKH?DVMecN3-`{=l>C;V@2%!1Kqr$gSCTgdAxQoHTFKa&&?D6%6E{( z0hs(nKL*^0slwy!eETy;b;yQI3a^!{d>RC zuZvPzS9zK!{V7C2IT%07{4ES=b!B$wLonE(@5t?Cmp;?kmxYm66|^sQ0#?>|JI%y! z3(E`=<@JHl{HLXHnM|Gpm zIm*DfC@eH4PmsQFdA^9?!9INHyAeZ^flrF({tqbPv{?FJdB*zohB6{W=#}K0efMJF zvl!XpVj+L|x(XBz_Er}deeS8xLr?gM|Ag9Zg#s6)#5K${-dKs7mF^FE)W%Tc7|q`!W3_>+PQSPc!=u*R)5~pYmIY0;0K(QvZ%;{GSsmkEds6VZmDV z^{(X=W2Z`rkm}5n^-~$|eJ%X|oLN{#j&c5AME_@LVcGugNSN2Ct?plrv}(e!NX`G2 z@he$JN5?+^Jvs;}jKFG3aW4zb1@GxWMD_8!oReJ|qfL(%ekr-Lk8N(*46JoT%@7z| zE>TC2KfG#pzsx@dqknx0b`!lbdR$8)f~?-Q^9=03$fbYr7PIUIo;Z9? z;eWe?u3@*pk)Y<3w$kKe9H`?YYjE)v; z=Eyni9R7ja{{^a@~i20C|bsvtX4kC6N>KgGM+T}lP+W~X|(X1wY^cy<~)d&{B3PoQac9|{2fAG-H z9&wJiaBv;EGM_%E87)$@i+bfKY%2njJe0xpL2TLJ=ckG zCnLO*r8zxikirY*thdXA@=@&QoI#)eaCjNHk5=O1V7%ZrzlKgH1#1TXV8DA5m-&Mf zW=_nlEg1e|-~Frqisr<1OF7tftIBV|F%=fw0 zJ>8}>NJc>UMQ&v$O3;613c}jEG++j?TS_!(j2Q)!Zx`+3aFhsMn({OskzM+nMwP7* z6Aa@$_R+JYKjc$yCWT;^&qfzpA*U;>-A^DG1(cAxCm}>^kW%+({+$kqz2Dlxd`rJ~ zFSXddEQYrny|j#=B9-x+ohhSqa)Suqd5CBCPIk2Xx1#K*It+-0Yf7YC;d^1VzLV{! z(MqBWN~a*tmN3_9t2)<;H{WO5aD}wV$Ml~@{ zCZ=$4DTgNhvwI??v<>eSzyf8|-T(T&$7s(ML5h?mX%GI_VMf;kVE%Z-=bY>0L6CQ4mVz&ng zuVuK3>Dv2*ts?qQ(t7#Jn4FqhFW|vo^{nG&2W(zbQq(8F(Abn`fESFaE&94)cTDMA zWlOg3`P)y>nXRwyttWQwqiCGF6(p}4jydE)SLV`v-;MfKJ$%`&HBTy<^Xs-W=*z!V zlcJF0*30wra~S6Lbqg&_`63RM_Un`cAu-**(Zii}^z_I8&lgGfUbBD_*wW^2*FNh& zW;7BJ;p@LWhY9DP{SBx2r|hKve=?8%oo)MnnUwww*!ur1tNC(&O?cPD|HWkR?<8N$ z(C~2Ub0ENY#SH|RBTC}?=gi~NLJS%FCq5ep_;+-$Wg`rnOF~K-8TSx5FfhQ2{B7^Q z1N%JPfW3=4 z?uAaQJq=1JuZBZ?Y4VxBk*evvr&3vktNDh3=ZV|R#&m_#7yLfbAN7`rkeNY;S_|WC zAs#|;h&sBu9>YsKW9D~i?^eDY;tw@^j*a%0c+)+AkJunR^rD6+_LQ8eslrWF+pPE4y@_O_90Oh z#B;lyPzQ=<5W4y0@){m2X|9KKqxErwb1UtC{2A;mpL+4VaRvs;9mZQqQ7-Gq@}I7z zqwMAO^{iL{Fow<~_+JLS&HD*fWl3nX85c8MoZ$2y^r@o}D=lXk= zUhZnynfG0)e}amJ9oM#4#YyCOjeu4aW2NXcG9t|4d7IVII!!!VB=I3w_8maY*EX1Co67CU;b4xAwS|-Yi5d;MQJ;BrfKgSeI1fTS8t|HOF&ES zrX=|jnS2i?k%X9{EeOt5Wq3!0`u5ViXRb-l5Tn_k1cM%9b@1w2zaC=0OD*pg-B)}F z=zvA39oNC9i;V?K9bKi|Rqz%JMJ0Ck%L33o9A95#LRmPXCD?V(hk00>){9L==kHX$mxh$ndk3NQ-XKZa5Cxrh6qGGx+4^Y z{9%7=3Q1ill*a70yNYf^jASJ z)r(+&BD|63)*rcthZ zis#LEU1S-_7|qog9rqjT(x$PN)K^xI>)xzI4j6wF*&JQ6+$w>eUG)5XNa(ei7O%-l z1s?xu;JMG7LBUnM0bYU$)t{0XZ`F8ao@JHvXv2SUHCC75&J*_Nu@&ms1CDRPg}fS^ zBrh)S@#x_KKoraGf!uj-)kjpy_=n^m-Xj+x!UydaA-z@}P4J|sJC{@~IUl_Gn8@R3!nx>cn3 z(4f#vy0DMjRtD&etqX?A3=9ms^J*p(t~fVOtS_lrSDU_?clhyvux+~0>I zMw9XQ`NTHRVoXu(Y-z;qhO!FeuTMWy_wl3+RU->|l{&`dB?Ub4^VqOR*JPQ8g&(i7 z9wyD9p3Q=GzwbdPjY_y@J|$3A-UVIKfUHdTBiZ#uBo9nvo$e-m5F2|u-z}Hld4=oA z-#?!6q`RDGqtAmP^V)WU({zVHLs9KT^xH8{UJ72vJ--9DPINN&?MnXssGaY`^M`1X z!doc9Rv&)zPznCdgfeAyoE}rJ3TeEW^0aZ6KoLGz!Z?u+>XzPU=<5bdut^j>M z>PpqpmXPXWfoM~n>Mis-DIbp}-_?ZGhcadWP4+B{fs??(-&o!5?Pkw#WM#3^?t*C6 zp}3V0W^wR2?cRTNr*+DScYD2L<4jblIcHm(xkQ)s!=OZTgC`wgO%&6k$jo z>*QGHI(LwIQl6!;Y;$GtTi5O=kl9@&vCx;kO!?sX_CBUFHkF`FtSv8!9nV$T2-|G+ z_fH~eUJY@=snV793p=*U`}~u^vfkjr10@*KTr6vE$s>1 zeRe6`(3w~Fb5f;8IN%<7eA((&{yMjw|0iiruaT<;PiVBpZ`8L|M?Y>Y@Rd506||Bs zNPpu1>2=;)Xu41x%=og-)o>15Aqn;CGUrE4A}FF}HbocEET!lqQcCS&Iim zqDJQv31nu^y(^O~O8XXXJ%-F|ZaBl*N2-&Q+omjF+c({3%<6Wq+aLau3?S*v!YQ$!+77czG) zitP^?Avdh|#AWOC4WkC~{xk{vA{)$aY^PO@SRRqwTk$sx2Kbmks0V#=FE^g)(`@*N zM$K~ldmxPsnbs;3=U*0w>1*r1vCiLoIa^nvX9<#28qdvMI01;>+Hh&tjuPc-T2Sc} zeb(g?bV~nys`jC1TsWDY26Qvh9}J$@dXwHK!G}{J6M65!7X0#1{z^FbdapvB)X{1yERJGr@9D@euBq66+!(whg5tdYi3rBgntPu33i@6^;k zu$k~=Tz7yjM=cl5eTbIlep8T4i|8i(vhtMDRCU2ths1v5ru@^3KYj1$Oj%6SGlL*I z7%pn%XGs59|6DJ)l?Q^wQp?C!4JT}6z~La*<2`~ruH@u~NAF$FQv}{$-hLL*WaZxP z{IAr5_?f8gjnJB%>VDT?sMVxLMB43T$a;QjM9FWF5NV2#kfB9S9oBrBTx3qMgj?CR zFI&|%Gy$U(UL9-xe4)&>PQ%17OqfrYGbk*kD?r&)qx~Gim_3vtzKe=%Xr^s|WOhBg zmn-eFs+#%Jw7QV|B?szMxwB{PSgrO$x6O|KfmFff&_%EW)%8VZNW)z5#B1K27 z(tIhyqjp?rypzLK{18|r-3w1=#>R)Ka}@IU$A+;>9l=VqE^Abh>qolZbKvJbjCpVC z8l+wziUUjLgl=AUK5W!kl{y`{k1A%_xl%l3R~CK}w;%g;cVgM_Lba~e z8xI+x?MD%5clf~0G8Ltj_c=TGz6uK`r|IR2mlK3fNM7B&OzD{?qa8br6Sv=;;@!22mbvo@Pdhd0Jp{aHH(ni+LpK02HuAMYWxI-I{? zvw3KDbw&#}{_+hcWV7*{_;W7CH#cx@YYE~;V-zk){~Q=%JU=ED4)XBkc$bx0hU%f&PPRDyP*xepAwvqBj!T zIpUQgNyTXt7gP~By0WP&v@F4rnz~E-$PDHT&*|9DFov!kUwQk@r&qDY%bErzj<_TB zv40cI<~GeLLtw1Xt;~`hrN6Fi1F8eR0_z#VlrZeEu&}e=l zLSSr=HS1@c%?I+;JH{KZ8vA*X)WTG;0V>H4O)E3YM^{Z77E_CL&?vykU**(mRe4v} z4YS}wjzbF&62xckaoHQWNz~#&2@OidvT9oRxkrZIwNRJ&$ujQHX$w7z9BD7)oYv(d zJ|&?ZTn{m19pjf+l1zQ6=^IC5jwPUP@E z)|o-^0b?drYFti^>J^)z^Hu<NId9I~wL2MZD4!iv#rUb3$E=HT>+)jSs&rjRAj@c7D|@)U5Tr6 z_*VfDFuyGXb%$UpQZfR>{2?H&9UQWBr{hGq>H7Js{GjQWQoJYKChSGW1wE-BKfiWw4;yPiZNw5y?$Q1?yYjZm!ZjV+ZXwHs~Hlb3DPf1e$TZYU+WsC z*MqM2wIT+zRQTor`bRs?j58#jrI~(#Z*ILl%;lB;_{e8SCXrtBQElY4TN2LadMCCu z_P0{aU|;N|413GX!VCFlSRn$)_b7w%w+>g^pu!=j} zUT8~v`^HM;E$tgmy%)-UKyQj57Yf20uy*HhZzL#BzEcpksd{1gA-dNP5}LECE>Y}} zr%0X<-LvX{sldM067&8}Vsmw1$pwK$b1-Mow_M|_l)|rz_psFVA3MFPB7sK&=HVaA6 zO6t`3O!*^fQxQ{~{VXk#YX2H|*yC!IG34l18OF1~!O6y$r$N`v;MV4;jXaO1eqN+Q`1kjDGTr)c+7$1xMYX60Uz zOXVRk6@Tn{u{3(gk3#1-HWu%@pK{vues}~}Z50d-l!NK5Cqf1Rj3zjmw>~BlJrtSy zA4iaR!k}Y^LjGwjXS40yBbNT^)ldb(R=8DaPszI{TSe7-f%h*PS5<;$bjCq>>MYu; zHek)m151np92FhJizmRt;>A9;a1wcU{6lYO3uec&InO$u#d8AvH=y-*PnVam?^D;=uefhpvR~VVkI3GK5XBN~--1aSaUX8T zZk~08#Ah-VGRJ=@mu!nd3b=TY-n(gMMY@uSb}v`mvVqkv0#*@`IvPB3juGl)N< z5|S>qf{ChGfRmPwIX?M&Tco`at_qR?K%f6bTqtsP_$+)}{Nt*FHjD-Y*lfr|sF#T} zFw1K~)v8^?*h}WE4!YOmH>El59L=v}LpZ%FpAPaiL6UJp5(Rh!`K<7~H<=qWrSZ;B ziSNH#$T&&8lp4$n(lQ%}{i&h@Z06)2(k^t?#1V6{@@tfX${1AQI;fn1D{>Gw)e%+&sVC9PP5zGVBDfoZMt&dT0It zhl6^$8i|n__(CYw4x%?0RdO1TvCwE9(+J>t;$0VU8?)bE<+mZ2_I0FFowq)DViGyz zV?wi*L5x}3$&2q5o0B(aVMLiJR;y`PE(y6}OWO8Q8N~1q|E-=h_3I*J>*ZXLlyPRl z_u6pt4q3vR>$N*QE%1uW^f#|zL57hzk8c?&QZ&C&$Iq;O`9wc^yg=$|HS5s za%W5wfhtd5`<1EP9h+CwL&Ted&P`N)=>eoICB7(V8-QIIO@mvfzh&A>TsmoXwP8JQ z%B)7J7TJvsM}2wKrD-SnEXyNaTb3KoYO~^i+o)gRsfIfhM77UX%P2rryj74;+6ryA zbA?B53hRS^j``Sfvg&?j;9?y1$syt#)f&kVhI>L zZsqITYewkX2z;gQqHO|v=>focIc$HgTSX#YPug?^UsQ0ydY}pPyDKUUz2(rA1Fu<- zk%wqHBoPU7Wz+0B}Wk;CU+EgG>kL*6P0 zlMZxksYpRI^yTrN)iWQv>u$vggAm3E&AhDqI1OeRsX8{v?BK&)$(r%E3bg)&BIT0`m&pu+%2N zA!}le)DL<_29vqvSdHHE|CE!h0!<=%;F20;<1nEj~XDIN! z`kfdJ#}FKmv|`5VvyPSlZNjKnvgyJ8+NpOS(SjT?+rp&b>l3q3M3EFBRbZI)15=yN>o-y7yYJ1vqh1({ z_a($jbc2W6T^AGaCd8TO1fuD8Ozz*s)r-48ZE95;=#plMVAk*9IynF49a%#u<$^}9 zV_))GR;9wKmUOk6_`nna{&rKZQ+xb>80}b5v&P;?xrjR04S$}qpR_?$+ktEaxZ0#~ zHpxAu&!&hE^t|orJ7RGKq_j7ox7u zOT#1g0^RCw*B_OspW z9*Bvdo?Q~%GiLH8)1-4>RdDO1$&ukO6UEL2Gj<6KAtz}Q;nnh?DhGsw!t&GYDf|P> zJqJ&=>M1=5Y26nAOQx=U+#CV*S&Qm}{s;W7J8`8Iad<4|HkuSo5<)ml$`gAXaO(UW zQ?$HQ%9bLbmLOD~P?F(#3pVtvKUj zMZ9{djs>xqFQDyFG$-zR2aFY&Ps`J=|2pTRIYacrrPG2^`9%e>t-jB{S@n6VOre8;T|j(C-8YtwLZ{zSNA0;#~l84?Po#<1JZ-5bdBF{S9>0 zSL~RnynX#B)e`fOI)ZVGI&lUXY(MNZqhJ2Awc@5`snyU;eEhh_AWKUsEC zj=T0uZ47Xf;_uB}l;3r>`MG!1do}|$B=>cRx7{0P=3{#d73N9qa~MVytQLfjb6FgJ zx+SSty3VhjcMzC=Zb1p0UE10vtm%+YCMqf+e(yA8~ z%q^NQO~vdQUPI{6{=IU-Pr#fdS;q)H93y5<%|#%?!v`2HWz|TZ0c3c&A;9`S8Jo}S-RTkHL@6A#=6foVz)e(v~d9h0Qk0}RZrP?I(tplefJB@ z%q;L%Grv~Zb)9^3sPDV$x5q6v0yw9TnujKad*IQ1w*BAOCmnFGRV!fZ(cqR(9ovc?PyLaKjF8C67`pE-f^Knn(zV{qXDdYhc%Mz(H zkd%R%q$2Ej)l0~vMBIJy1y0^_1D`apBzNOpQ&~VdX^+TGALH{SrLB$=D!8Z-*#H5~ z?hacFk^d2ar{Go>T{w*lQ|)3pgpDZGJrJY6a>>7l3Z`mRP|n6z6y+Bx`PSqsO0~)KR}{hPFtmbdwM2cOMgbM zD^0)Ss?w#uPtNxg(hAePLJ$!0X9;x`LnnRl4w}hahSi&~-8N8rKEXLP3F9b_xjn1! zq>39*ymcRxQd{#3n?ArV**Xi2W3t!K5TGeU0F8XE~0$o)X0w4axS+;zej@HQ&{%1a$O5PpibUz<&ZG;O-ZVQxBHN zU9-kMhAA_;!WlSUEUN?7OHp}dNHwcb@vhUAcNlPz&ce)5T&GP-m&9}4lzc0tX>BfW=A_D8iy z^-WmYU^}aEnPW)eqq7Zp+At}~r7$IBS`Y;0Q9yOCTz`)(%w*1tWTU{$H|M<^>?{q! z^J-f6=pt=^YDmP*;R?F{x{c?#DVpJm2`dj%iY1 zCy}}thd8tD=$#r9B4K`XIFu!H|H)_>eBW%cA>CHVC00rKMY05l6!x~WKuZ3>O5MGD%qSRyydM{dJ-i4d4;r(jD>JCE# zr|FYC$@0eV=YbjXA7geQRLEzOgxbE(bG+#alM*mzwxX1yMosP2eK)FKFUIeTKL}kD!^*4IP$McjqAg3pzKrLg&d$%rJvd`R|47axY|JC$TR#}E z&g^Lg6L%Bo$ah2Q)i9qq^`EYbT2GW*a$6>;PoFQ?d0pwigr3f|3@(Edptuk~=pR*y zO*cRBJw1@_sK=<*culx-T77=+kdF|8h(vC|^KJRmF3NX=(!Fz4*NO%2tT%FpwO z&F}jno52TZ8D+@?sH!n>bKH@amx*f~0Y*iSnls(r8KPUAP9UYL<#NSY54RbmVz;ck zoRC0$0^pbiHX%G#r1icnKbUDuE9shlq%6E)C!yRp4J~Wk9L8`bK~eB4@o6l!JUeVh zHpAt#T^pbBeRd@E*r(9-xi?m;(sq{$`BV1kdD@B?IhdzItwc)#xEf{iRWuq#L!JVE zp`dBfOk;9!+gW}zMif|WL?z0TyWC?)Ne%)Q3B}!Ty^(As$Ttb&FzVG1-wla--Pfyr zlj9}&4B25WQ~ZvMG{CkpF=ZM1la_fQyPcIV8;il~FBkjP9OP-)X)w6@&SD7G3u>6& zS{uVlZIfAqMu-SLKS|tCon2HnTsGg;#&xsCUzyIP@X{nL5xk`%YQ%n+9YjL{sk>h> z6=R}uua92%_SaLz+;$@vVi6LZquTCR+Z{Kcfddh8E0AUn^`x40HRuDX44SwN;ihE-}ld?DH)kffpt)AgDT> zy!2fEDvZAK>11KYXwWknTeLSkPCzr6S;vTh1_ zL^>isMQvu7`g`R`S-yt2qhw99Smz%732e(72KyBbCvW0}M8&-)45-d;s2=sI4FZK{ zbd?1AMY2X6O`qcwzolr+T8Ez08~*rlytFVeNNi4Pnv`0Q*a*;5v3D(TTsNAXuTAj0 z4F@mB-DEv-W`buAB!tZP35g;`NeWdoFZL_ix6zS<%o?2lzp-43DT87CL{-B|X=afG zv3;}UaPE6DqA()m+qa!*eyLcWd!m{XbHA|J=Vr7^-_lj0&Q1S_wU5I>DeOX!l#{=8 z`rMRow``Q!#*Em`Hj+OtI9i}E2kVj)&4$TG9iM?^`h;#cf6JPdtM8Fx=_IgdWA?>e z?8HT7X-|OBPBJ<*R z($qE5l^@;Js6A>*qmQ(0Wdot5c^;XpHFXY0%D+tz<^B}M6k=LrdNpc3LGJUFcLC1~ zu5|E_JIqAC$3s}JQ)cA^IytJ#a#hA9`+ebgzEzT7y8J^FoMGs&;hV%?`)V3ueAn~v zH?U5xhr6FLUgcrs|8lreyRqs{I;h>W%Zp0{!nU3J1?vgUNPxf1RTsW|imQf$7MpXkUTw%t?9MoG*#F$+z`WJ4z$S zxd{-*X|frYK@0uyCy>MT_&uKjA&4{fgq>aiYPH%>J67#s?tN1dtCtvj`~FBkW^)~j z&TO0#L+1XX`n}$H4j+lf)P3Vsxy<>Nwm?TrR61l|UW!yzPi09-Y+h$r>g!%xvM`y^#_J%DeU!uIjzAPp_Om_CqNPMQeHcsdgr`F) z-w>1y-+iP?Soi9uM<7@!Z%*6AKBoiEQU1z$w_5MuLDkSUrk9^0xM2{_VPQ|j4QAhv zvMtNxb)HSh?~y~ctWp*~zR(VVu4!>125$d5;Yy*zomgINcOrgcd77kap6xRpG^f6= z75QnorNb?PJ?>&`R}1Xz@5GoFozWigma<h^+iqx{ zFXq?X9|KdJ5lEqd#i5F1-m6UvyZb8};6Nz(K>52lsmHboIq~5vRLs+a);189Io2V2 z@8r=Ww;v-tz<8E&3ktG@;viC%YqR2&c_mH5C_u1pU%R+S%m~Xot+gCf%lI06Wo*TG zBXG{N)(~YbdVOwOs!XxI?lww>2W+avgp97?2bTbs<8l-=1EES~KzWk?sk5$Wn}b|W z2`;X!;wN%0r)qB@-Q-u^m7d$0{ir0~vquvjAnl12qcl#5@<31{9>bE-MVSVE2ZZS1 zG0yZta&-Asme&sSp+F&Q5GP&Cq`qlaZK|=ocIUgI7&|LQeJ(_x4_jLddckn2ML_Mf z--T;>Vos8hy)Xql=TugscN8vjR0OhHmyN}feCCcs^@Pmm2k0=9q46PjQvbk5)U4P% zYx9%_XdWQWO8Yv&$ra6=vpbb2#q{&xhftz@P4z6~vaAm2x0e#4Zh^6aS`rzqnI5%)i~ydHfFMw zMcigbaH#_MCoh^XTu6%UIOJSSdclbj%dW#K4wbfH79PXFQsPgRpPFXBJ5|Gi-JHit z34g%DxSGO?vx`E`{3{ZUGoDBD*q1uq!urD z?d51V5LTqLpeu>*^mR4udb|DScP~Us&0R!uNqg3WpH>YOXS*SA^61a2^j|!Tx)ycY zF!DIsnoK!{KI#*zk~rO6`1L@uh-gb!^4{&wDhL_R7Rhm}``(Y~U2a7(UGApaF1+dR zxIX+$&+zkSd2)4=R{9O_pMO4Nwci>@Gv~Ah4HRelXo7XLd2#m+OXKd^#=$OrZB$Cx zW+s*3u1ylJlp}MIR*6E$jCyR?!VajnprS_U#vMAzbO@USx{!$NQyYEaqY)^)n)vj@ zJj%BTigucc=~t9dyx%m|h zdUK=$9OA(J(a5DoDVCc+;{=iqYA|oJ(STE!gY2RfRis#+2SIz7C;F*aC2w@YKy63u zj*X0V@EdtAIz4*N>k#eAS+L0^-^$7f-VOJ2uTg6Z!imMi5vaVXP%9aObcH>f6uk9W zb{3X8wV)cE!eR3@ehmjf^$*@D*}Tf-_`pFVg45rpJ&hA`vtY&{&o&&!NA2`4p$LFAz~X(zE zuYgGB#aM$5PqSZ9f~2N$;B9;K9UR++N%`krw&KeUc1F953b&sT9QgyGk7%WND);MQ zdV0Js{6)hwi+m)F4DsL{WrgWvhN%ro>1jAq0(riY%|)XO$=w`2f_6p_c77>x-#uXX zMSHxNGjBoI`zEo^lT{%kCyLm@wQEp$wAQTSsGrJ?I|j{#;P^g?fivQy3!evh&1}mD zFL@yu|H#&xCdmAZ5# z^z_SB2mge!Hhi*?X^VK*^E64PMv8`Kw`JJ>x{&dAv}g4NJjbvY>jBNiBM;s)T@aVh zi%MZej2sCpvCQXVJzvgG^r92hQp+SkHo`bD>&S|Oz5xU5JPPbhsn79N>jh4&oTuHe z-6(s%4X;1oLy%0@LbO`)fvF&RdJu#*qJ>#-U1Td?<3sNky$Rhm){#*5nE}>Z>-A;P ze6rwSV)Tdgi?>N<`}>5vKhG>SitOgOsh=(iKw!({Gu#iIUH7ns|25YxOj5dPB2s0_ z0@2llBZxS6YDsPMb~IaOu_N9`zbJP6ZN|Px@9p8>ODnPF30r!br4z#ROMbt`OcC zFP{1}H~Z_@=5PuBLVI1k0fn7AL;VI&0yti}J;xvY%Kc2&W;mPBmI4L*991xDOmZ0< z;%=jrfAI>I`vFOtc~#fdSf$`xk5^8vv}~kydwB2Uo_*s)n}|?isIZIO-40eBIqz-s z^MV8L<5;d2Ak7%K@BB*Yb?G*epRheijGZ8RSsS#dXZK}2c;x7>rau`3*$>BrT1qb7 z^*8O^kG*)3K`nQ?T%_Nc+HK}3vuGsq*(^vbG{uJXj{DdH9-;GJrX%PJoqfGMs}@i# zj4ETv$?~*Ui!ce73Z9jcYJ!M2bgQ5+mr^sC=t~##nN{beo1etI99I)wF|oTu%t73K zaXL8gtx0W>E(VBBZ?^)PbMiiGe46BwC(jyCau#yH6k$(j_Bt-Gvt8XW8a61@t|H1i zyeadv-68eR%B|L_YAY$Sz0#?l^rJIpY~_m_O)Ehp{>m_k^mzW7B;@1h_^N)s+o0Q( zpOL%Cr^;3i@%$fe8A_2LI`~8i#2YJ0VX%CBO)UluWkhvZCa0(9R<+7Oft{RO`$MeK z*s{y4*L=PEVOjS!a`YLgxAN4qOA{s-!AA*6e9kHh3x^?7wo7qk#OTWoBgjeBZ(>c4 ztOU-jFx$~!23QI((O+OKMApm_i1PUNVwAhk6q`+iuv%_{>T&&H*+6*$cuAzHnz6p? ztVE8!upZYZa-xAegFDj#0qIba*`}WMu{m*yMHe8C2>XNKQbj$Psce(}x9jgK?^fo5 z-ABuw$!oO_7S;a#zmi*hny)h~4uO2;GQ&l__1{oKllw}x zcCx~-HeY+6HifB?hZE{NvT3rj{}lBQKibe@lZ1Mc40$`q|~F zuBPW%n47Ej`od(e1@6EmzBOI)fZ32-VNp#lxtOQpJcdSaGB{=c4R}N`pNC@hOwO)6 zjVKY9SvzIlCEbS?|AH42a`j&QXrEhGL|=(FS!k5(%iLMIa(AC1qwP5qEIlwg;A>C)3S&by}S7zdUYgtA9+ z4OWD@L*qd>UUK>Js&*6apV^#CjsAr_uKFq~t#Lg|Fh4g>F9~ zzDLJEKh0xS4B<)YPdfp9x1M*`E{4iGfEZc7+4w1*6u5efNnob@;ulU6n2=e@`Af zPp1E(OoKPJt-?(7B{k~2Wfnedo;}xb$O_RElZs!?QwBB%3Y7j$Gya4Z( zJ*s(lO)hwTg-Of(_o^^DI!T3&0es%4R2++PGRBIjajIohB{owQ-Rp7jSJT8!1GEFM ziVYvIEo&f4KPW%gCct>a4VOl*e+*w)B(gJ&Z+$-^&;ZI&ikhdJWdbtyhDC64X?=E4 zsADe@kWcD)7@ktrnfqUD4B&33YB`##x))G$7G`JD{st(4%}=-yPS9Ll;=Z{3izpiX>6$e9%Uv3iV0Y}#UxV>7p3VbFnSl`e{ z5AG+6#V^zjGqlhM#>=}m-up}+*)_16Ami|B9^3B% zJx+{bZ*BQUeINBPj3Ck}dlr*)?{7g9H68UCEeI+2Gg#J`~;E17e;z|RLy+4>V- zI^nwZ42sP^z!TH(uruu*rY5X@lUnUfb&bI6OBj7txm2 zsSZWMv}<6GigaGl;#wG#zH3uO;*F}P&gG+clRJ+Z|NG!=tO6% z3W-vx>sy2dMZPz|{5ToNN|y&&TwB*hUpVd>!e|qzd~$1p3SB>Ju?Kzlx@$q&;I>X# zj|+z;jAT{ zmt=$UPOz0@9^BoWcL}{ex5BU?`%qk$j`IT36pgh!XYjBE8S`l!u`+Fe$|t{#%%Mqq zD}kRZf}eTTWbhT8zez>>m>fHaGI729Jw1GI_X=H+UrV}o7OB9>$=~PA zSe9At#s`WjyS5;wT{LA~f=^Q8J%@7{*z&lwPRdxn7=J2wCI0fU?UQ6P>YB-m^|r@S zQOCPah6nuFJ_+(_6Ab72K5TkOyETH}g&%-3n^&MtS?Og|Sf75N-%R*VezuVYI3c8Q z*DQ*&J^lV(b=H%h%8$e=61s$bn4-wHg3B$JpJXgneAEe;W4 znoLW}`HrxoA*42g#a8n$uhMG2T@-iaK!+iI2w*o6OiDQzZCjd(u`SuLAPp~L?xE_% z7;Kc`^&a2_e;s=rkQQ5ys`BHi0T3@ci+WIT_nVU^68;U-h)|uGpnLtVfN=s>1 zIx;E(eYl>v9aLI<#dDx+pb&rEzG1nkUQN_LD5C-rrKLdRKQFD~9*9`hh>ocdr`>q5 zlGl4WJo~1hCvD5@fb-v7yVW#BQYGvs!nBsU+KJ)~`%3SMUhi>z#vq&|VQa4i3k%U) z&jZj{ovDuPS13S%g^x;bJ!Qu7kjv)8>XRE-*ZH899{Us>u`e~jrE~8Q)5B6?(p8}z zH!N*Fk?#=sQ>n~AqwGTt86VfaR1^|z-Aw)3x~jqc^7n)4bzv!kn$z~aaGxA9PZw8} zruKN-_5;$7Ta>U<^BJmFvLHL`vx#X;5~Ne?A5J<(eBj+vv!X>@vw5sH?W?PA-MvgC z^V(J7X)|caJ;Z47yxiEdX4>aZLu`e~IIDnQ!gt|d)An}H7c>7$|L!MGe%c5hq1Ob& z2a?Y6rK5qpA^Fg0r(qrb_>p;TjzCkL_jhmvhEC(l7=xz3!+lk`)IvoJ=F9AH!;-X^ z75QyV`0m`)Xx1C17%_Vt*z|jY5v766hDg{L-3Jt#$jaRSqO~;q$)wK)R$7bv{lLE+ z984KxE7Dvxe=2y9Dmj5y@9S7OzSCXtHZ) zQ4O61rXy_`jqR*cRQC4(Xgh0J>RnC+-53r7;xsXrhG6lD+RCT+)CIDYqq>HNP3vU=!_~QSLQ9MVN)^Y@B7Xa(;bxAHGU2cMv^|! z18|9r(Bb*pED|<7zP26Iw23;zYrn|jdyTRPTaw?|N-&;uqm)wK9cf#mc2xCdyM>;# z7j@Gyd-Kf+;MA9EO1yZwq=b2oU7wmDeIPTesa0sbkUxl+9Ydw_w(f$ePoh;wLj@ZdrSJ8)TO8PPuN8wKC>1 zhnHH-ne3=Fd+I&HQ<(TRBj+No&vkzCcZdkFhc*tQJY`Y$884n=dib3N53!mvn@LJX zpX1aCxoOfb*OkQ#U*(Y*_Fktg?5WnWB0*ugz+onOcyIo(EcG^b_>J_K(G;vmsYX&D zs44q9NjW0xPThBXtFnZniGL>UP5pXj-g7sL81xMm-@t%lauWug5e@0P_lz*(E4bxS zp-JWIr3zx~bgl4RQ0;v4(4YLPOywP(kC9?i69|-DOG4~NrJM8hw7wi6HP|XO4X+5@CxL9e)C{ZhB^wVB4S~Dos z%O+$KkKAbJG)j@`q;@%y@6aM%g1%U%&8YTVanPV;sut0`)G>vFO8S5u-p^GBSSA~F z1Z35bp*Xo&j-rmd80UE6N+NXW^|P&5jxOz$Iz8l)P(eY>et_wJt* z^bbtsbv8ck^YQU57FdR`KJDqi>b`}&tdgv87StN3!qPXS+!cnjlFpYU`6v8U;Qmcc z9ZmsVu-iq~`mgoyR*xR2nPuKwoOg-|sGBkvhBO9-z2Q4{6N;*v%C+Q5;n#v-Xou}i zTLi`g+}|wQO3BF?FVz@0qm`DH67%8Zz}inYiHXH`Je+-Da3mBO@q5n0-eaMiG{R>l z`29(vE_KVAaKPMB_{d0>;?|K~1AQ2?60v!6?NWo;b9?qu1}+X`VpAYEfS zb;?;bwVIMOttGVH?YGYf|Md7;toS9lfNEQUd3Af1zBul5nzQNlIL=lSf)k^ypp!^8 zcii5dat0q!e9>_+8`IV-Ix7M29B9p~Q@`E~O-=2n$goPhj4n6jI^M4{M1CUuD!ey( zGzpe4*1}H@?3n+hs1#LwP-M(M>WWAgA%S9Y`hr(uR)-Wg>6q~h;U5Yo-1g}}ar~*~ zd7YB%72xdt+FnASMZni|$^~by2t!BxF?4M_FLQ8vk}PI42i8{bo{In>RZ0iwIzCJX z{0h#KDfl_!tE>lqw|Xzf;uv93XKUr2a8}Gqpo>!=!^7a%)tOJJ9#{oMUQ2PdO+mA% ztgb`hb-K_PB{k<8@1Q+5f%$L>bW?*TGe&#$Pc92Fx&yR)9gtqf&hxD18!MVYf^9Ea z5Wna5Jxrwr7Q5hz?CiLduZl3bDztYnb`aSe>$6VQEX#8i+NyGt6p7-x{p!J`q=EcJ zwWdEs<42pAPj-xhM1a&j={Yhu=@=_l6@ENjZjX8R7WdM5wCR+uzIue%?Sf2^7AzPY zWj$!6n@4w+aTsRs(`{F?;?djL;iq5h{;1_W1P$4dHjR0of3em`zGZFT?%pz$Peh=R z6pWR=$DjM2&#j1SN^@XWAo5XDyp=&pi)^ESawf=K4R4{_l%=cKW9zH2z*X0^otl{8 zn@s72=D_p0@@(M01UL~H17gI2#@5Q7Wdba*R<*S*Y8_>Qbpe(kMX`-o%7xs^vzC<4_}FTRf6>M_U(q{r>*HCIF4>P#A8W97-UX=u(m>2FF! z;jV1LwGQyG+)2iL@*_gCs|{B~=c7_*`1@Nz%iQ&CI21ZH2ER3%`F;-D#dNs>FwBGt zZLVj!q~2FbP(Ih?dKcUA3NYsYCqI>I%}tceS-;+7VyhPs@PZ?3JVVa*&Nus^j@JNW z&hcKNy6$G@EpYtOtqtIzotB@m$l!O6>+*(JNb03F6&hruqGK~8bgr}z$x?_NZE0tU z9S2Z4sVo@k^EKH2zxB84X_iMuMC>Xy17z_iTMq0Z zyM;k1oeC7;fP9h9N;fa}b}`F8!}~AX5D49thwXuo9)9)+T@p`(eJw%nIWjXlV>-I) zD-Z|1dOTyNm*oE9MxNo`E+>1-4|P0io$cX3A@&xP6qGvt(v=;AYySm}(b8^A61gZu z9&fUnfV?w$s^FEbH0oYib=lp7Xkz0m(fzM$-W0$f z7sjj%J$bv-LYWacF^8!X`YMIHK4Pi8k#5TYa@u~@0ywoB^2Lc9cgG0<#N+;)lzBwm zM5EUn#{B#|NNMY}vZe_lIcP~MohryIURnbz|9ysIAVG%A4bCRQB^idfAUi{SppeVp zCzHFtfx^{KM98$;VbClB5D?g-Y4FuXM?QIo|&ZE3mSO}K{Q`Fj9Uy{`7aKJ$yh*4;M z*J~HScXhm)-g;mD{+H?{^ozPl;r>x)hJE|v%yru*gj>?S%%ShH#0g@d=WP$jIF-7E znT7e-BM)y7rkcM)HmHRlNPnv9!|p^N$Zi_L@>qE{~=n4<@mu@G(cB=g9@a8O{>8PdsYt%IV zz$mt)sd^=WYH%jOHz8m%?)lG^mlx*sNXY2@Kg-kBJL!NA!({`s2Qf!-j!U=EB`s~%4h#{5dxIi~upQUG_q?S5F(M{O z{n@1Sa6@JxupUoy|R zwF>NvvTv}pKuQBJbMocMMku^RJ`_Dm81nCBeTm1MeW$STog&Mai0b?=f(8K40)zC zAmqc3E^wtN=aEApD42D9v&AmVqqhBk41s%RUo3axqXk zHy{O)YG*mdfb&TE$=OO}I8{2>A}?m<^Z!jk6N_Yf!ryl?2!h5qRypXaxVdYJUsJ*# zvVPgQrvyfiJfGTye-E1xU(-g(m23qe`?dQBm-GDTl%NS()9Ip zuyi;>MiqSWNT~lk+5Zbl))Elqc}Iz{@$qxc672 z-lgcZd09FA$N4x#t%r)DII=PY$D6I>KcD7zFFO97AmLYeMKMrFTS?leaMHHixJ$WO z_1E0|KU%gozQ3zp*I)Jtn1LiNVGy;;>CXcr zHLOy@Emp6zC%IBs&o5g&Fs?F&D=06c}|8*-kxL1M}i>p=e^#=V2 zYSp?ELNE7Ie*F=+^moha&qbO4IT-)?TCRgQ0+}~>f(xY72pkKe$|wDQJo*2Y&R>@e z4i6hK6Ex&oULDRO*Z-;@Vfb&-`_~t9+S)Yv%2vy4wT@(L6&P=ubWhj26F@UfxXP)I|+eLW? zDX+WD(4`X>sJy(q{rU1_7p#ByZUimaR-OGb*IXS(RkW}<>qRahJVlFe|Kg2801L)Nwm2#cea_j(t25Q z`GLK-sm9uLGJ}GWGC)3)_oF4cAk5u~t?yWsPJ2T>Q!DUv^==o2C;MuW0e}+uPr+z2 zTG!6wRqyKF?&_`;&Mho-R#sMPYH7Oy3w8O4%fwNP@8erwLBk;7bD5*j!BW@r=IDOD zu3i?~`2M%XUfJ_BHj=+?{Lj{OU8~jBFTW>`?QB*1KG8EK%cRd40zq+Jfc;Ne)pq#a zTDhDW(13rJyGU44p7njn1W<$d^V9i1+>s*4j1IJsueSa#^x@w+nCBd~@%A6u1@Kq- z|3iZqc}f7lQj)dqV6+&}j06~!CsQ~6JGaA zs?|2<*=W3fY9nv7udlD=u&jK4t~CBq2hgL*@w5sv&9-_cX8+i?wBzwqHUNb@2S+B= zZ23=3iiAmg|Nm7m29hzPl?%}w1zoz5mjmke%6w~6U!Gea=qtG-3>G{<|GXhL^Z!%- zCNp?M#l+w;d7T5)Fue+{4;)cEFUb|G#to!xt@BD^{#k^;%YiBwXS4XXQU9yQ-)2k# zVqA<-jFzew?5~cTsnCgrfO+Y5s-mz}JjF83Hpw?vV1E4LJlp2a6{-UME*c;wex1Sl zCtcQ~t_AtOGev=R#UKknvpDVra*qMDE@vjd3i!Aw@2A6mOZ7O-W&`|bz0VrpJkNnnx~L$v^1c8@DlNG`vAfF zqm}>m<_+NHS;Lbh`bfQ~KlhOMygl1MZ+{xTPP>6Kx{vFK@wUc0`Ekdfm7lFY>#4F| zn06Cwo^_^I{r04hi4Y=og8NZXQH>9uVxWxjY*iSY#2-8I`Br91nalP3AM zI|5G;MlZVjTF~%=Yk7cAz)^qY^trJ{>mGzr@Bzby)bs|KUzzX0o>UJ()5n%Ae7eTD zedb1q?DvFXTIlCCjelH!)mxfyy>Ptgy&Haa2zo~w^oO{IAa!V#$wR^_gzMp2ADc`F{gZdBKOfA8_Y-i+E;pwCgHVDn)T95jH?ny2iIg+SK%tM!^{>$YHhAH* zd5zd>N!{%Z_y+vupW+5oH!#1wxp{wn`IxmoHZV)J*EJ(W0*8WM}K`pEe_Z#^- zw%tFHrxj|5a%7hl`G@PUKfR&y^;s=C&(Qnjq~NVI1Yc_0eYj{tIsoa)Q|tT zY-nqM+|H+!y7*sRbrW`@&384$6`#6PHm44lEv4ah1n23xQai9|xi{v16mwmh__8rp z7u%oo_a_6=W&YC16K1E*wx>Hf($?0i%b#_1Tp&xB13c|`~bo}<{UiTolEQl%xlBC9M`QG_tBmG;AYptGQ zvf9LS_buOmCLp!?Z*p&tF)%Q2_OX>|II4QmB|EtSomq#wtTwJQah*JvbvILkoELp; zk%9hVu6(Zr#-V%ruR*jT=nv`qQAxygA=Y5hx27`k>q&CR=T2+N2^e83nCdM{j)Ul^ z|C%}na_U8+M7NfolWh*y-wf5zqF)XS5f@!mr3(N3O*Oj(*2a>uvYBWbuClT+5j42( z-u4G0T{hl*(WC+(z^uUHkk8_m0?e<*{J*HM|Jf_EIyTcufJgqZSUxjHpy3}-!hfb@ z3;+K^r4WWtT2;01kG9W0Dv>Y*V4M2Sw$eXd4?NTVE2Sg;Pi6qHu0{UY@;Bs*@sR_& zwZF?5SP}kjG)54KfJcKSpK^l^VD1*!u6Kc^|FexSgm({AD3J9{Gr|lZq9QpPY+mQm zqGU|j7x+NQA4Ud1YR>Jd>$?4_^P;(ABjl@|pD=gv z5)8q+a^TbVmsDA#$Ef!A_3vXS$w}%Ea9vkZZu;g7%UL^W1!0{0*)yYqHs1byL-UG) z-(Rv^&+jdykNc>s=n_WN9VVuOg3tP(zO5{cjIoDDox>QCV9odZdD&`E3J}*3^*@LS z`tkaN89+?$^|_yi?7z!4uzrT1?`;AUYR>KxIZQw#X`xQ-3zGnN`ai|Pu02g_U^V`F zucADSSz+uW=udWjQ^J}!+0ek;dxRL(DL_E3&sH3gV|@+zM1_FcOkO9=R3pzF(C{aL zP0Jhr1lsiP?`T+v)~z7{b$ee)M~t5F^4f4Ouj=|a$0GJ3N=zMJ8 zlwt=p^RHQxdAB9jiKfpT!t1$?(yJHc8{5^+EQ%<4K6Tu|5;K5(u(vK9vn|(p-YQ1# ze53E<@ohTUxvnqne+OVY>ZZ(-ase*%WLA<|j@VdSQVfXD{C;G(l_xQX_O*Upu!1gg z{;WKC(5oJQ?M4~00d`qp9ShNE#LS8cofQ&SEB)CMNHobrU-apJP&xr40t}dOxQzppe zQbG%G9z~?xLK;YtI0}sF7qZu`+nBSoJe4>)F_IO2?bU3hZtVFLrL?xZ{(S1+u3ayZGbe2x?AxX-Z#}fQZrz%R2Skr(e*cNR`ox^z+mK%%9cNbQl!?l|ljU9; z+zO5jT@MoX;+|58+#jPAl+)+yEwQ!i3hrxK2b=$*Z3NB|*FJp>u)2Q)1l&jYdVY9$ zXPc}1+Ib!EPqbNKlxk1-#Mvr+drc&0Ulmj4j03BCCRYx+rA06GskTB14(nD@@nH!C^3y2G;6-p&-sg-ffXbIwdEYy}4MleAUbg=6DckFnCv z8rFw9zt=hwVQ4qTDXQUZAx9?El?(PNw;0EQQem8%<8+sJBql4JdYJ6Ap{#P2s_q#a zW93jJ=&NM>=dZLYe8*6O*q*+Vg2e0zxm}aM?P7f3mm;}R(Y;qFTp^1BVje}`j>Br( zZPl7jUcG(r89P(c&bi7k1)XjPDoL$@0lG~~0!fx1l@O41Y2g{#rKT0M(TEj;z<-qz zdl&pgC5u6=e&;W#)83mI&TJy>T#w>P^L}i_Ht@PS$lO;g%!Hy>GH*4o$pGtJn!uxJ)?l%At!&3>R)u)@CgjP+818?{m$d6KW4Wae@P5!%%!Is!0r83UlE%DuM$p@D(#aA?E~nx@(*9FA6>H zY6pm*L|MaYL8{YDEp)3L2?f-;x@0CXPjzk(IN`~d@c70bWzl=YyE5FHknB_sHBzezeC0WF`gx2dtd;FG%(o34Yq8TH_eE**_tabJ0vuhikD zz^$rQ$K0Bs+4xD4m_N8x&M4cogQxw`-ECxe@ym&xhdaM`e_yxF9ByI~oT1^!*1Ux_Ub0RfZCQpAp6+`w zIQV!XYpqb;(u`3V4M_o*3@YKB3?T)BYKNZ0gdBqbCu~|I{j?vRTjTo2eN%`uZ|O+n z{?wUF=t|VO@&I&ct^z}tjMmceb=`7M^930G_m91Bx?kKsvAYK%R;xbLyZN#tA?8Ez zrQVt-7L;N%cRvUfoH>2WHveMfa?EKg_%9rj-A?@FQHIBDCB}))6`NYOrFCj&3XIHq zdygIsVNgNg647~&R0&*5@&)QxELvif)oD!Ks7qk470sJ>UcQz`tu)uf3Q3hwtblwR zCdTVDhrcDZJ|)gw*qYD&c$Y$2s1vpUmtCP4(8pvJAJjr60u&xc=$v`Qz!LD~BTAR* zHOLIEGYb{JwmIypzje7$Xm6w^LQi*NsV(uz5fXsIjew=&asy!$5d(Vl-ZVTUV+v){* z{^QMVKqyua`HcJLC<~1ao$K{gAcN0@}Ixd4Gkuz_Bk0U;A>x zBWBSvC*o2OdgPv)K$-A=;=a=BKJfGy9AKTgxI8c1K1`2saD5`!pFVwutQ00KgXe3N z-8*zfFe`Jpx&*%Xn5mmA1F0o-ct%u2dr=D>NcZqFak`lPg-@*S#6v97_o zC>JiHMb7zI)85eUdF(~vIj<0~Y*8-1u=e$kWRX*bHi=I~N~>EcLdHvPf6 zil5FEJG9Ii+cuapcHMcqg!Xu|8vWtLQLN``$miDk&uHKaFyS~KZwvYScTh7TPc_1( zPSb63keNK{*f$Y&RWbHkfgC5%eEpP2VR#|0FU%=k|<`z0I2ripj@VZMko&W(H zZ*T;zFye=fy=MA5F^FBFe@iHSt-AstJa}BY+Q+lLH)H$5WS@WZ1=w`HY{MF5R$qbE zAJvMxaubZ}-c~>;`$ILJ6En=PqgY#iReQtIr@JIp#4qUU5Mo4QL>P^65ux4r^R5iO zFr8SsVMk2D7bvb^apgn2JPndDg9*-_Sdq)0y|S64)c)5Z4SautcQ0#nmlBViNIx(q z&%lg91OUpAf(9c zn?^E*&Sn#9O6IyOnXT(GBzGT2vR*=4(dT`qrB*~@cm=h7c6DHEx-wq91j)o2nBo3s zdiUjAGyf^!i(NFU%o@%-=hWjueU2j7KVo9EFSEywk)Pw{5;0YL&`AU_!A6iI=el>+ zN=FYqh>f2{F}KeC-^EFdZXVTkThGO?3by+rY~4q~-C11+B56WFPI_fP+U8mB z4ee$sQ(Gl_J0%G)b+Kng*04)3N_b2Fd}fy1ISKZY(WjJnIi(y8XuIEcW^{De7JJK& z4kr!0tz#8mvNEN;04g!d;QMaxAHJZUx2efQq4Of3pf83Gp2cifY$opy$|LRIqx#UU zwYPvXBl>3ukp_nDlB}b`O%MBcC*S(DuS6I}Tj&wVr4)Zvw)+qSmYv`nEV)TeEk$;$ zvAjv{6MC^Ldo?!hw*JhVn%Y(aRlMGHK50Z{oX<+*8)88eSe0&uEJ*j15=h(iGuoyB zSl8~3#}pd#wGze-c)-p-sS>opxZbct*TbR8yHrG>$Hk;O$Xkuu?_yr1*#3BMYa#if zUi3YOw>wQjj@mK3{)RVFjVdAm-KrupIqE>uTJ!C(d!Q`QPhU=j=moX&wi-c_$Y}zZ z!(GsPWiNN;kA{$})kZ1j*MfSn*YD^HexedcV(yh<%(D)x7Q(5XtVD7}jVPVo>jk-Z zdOH%dpP&>apbKl3z_N~y&uznO5J7iceNSZos?(Vl;?866+>n|{iq{yX*Pa2Lf zBwy+?gx)Cc{bs-?ocKx#_m0{z;jB5)i$YVLn$LvPaNzCU-Cn94V1?(yzK>Y_UxdAP zJX~M9_b)^w1Sv!cCegd-(S;yGL?_Cq(OYzeQ6fl)V4^d6A7i3Nmk6SF!|1*DI?DL% z+|N1Z&i9=2{GR=f{d!r~+H2Nc*R@xDf4Um;QrfCg_f@c>gn!`ZT2*Z!k7B)ViYWB) zG5oSAZxHV}7pIe`nImK^*2#_dOkh=#j63#i|3!@@&E`uf6k~{;$lVp zy6%jL_V|;M2XF-WHmt#9akhz4mGcOAh7<+8-&8l=sqo`L*CRc>7Cg#o?4a6qj*+6Z zCH3uW5?PGSiJzipL)e8V^{eyM+a7|2R>jvAVw3b9HcnC-C_lg+KikI`rVoi5d&b~K z5OOwZY$;n)u-R^dsdE5@AL2OG=Ko|V=pm0VmzW~)Q(+V<`4-#$uLn}%H~VhSt6+cC zq`bw163pIQ%L6~`|0>NK+MG(d<+s3jl%~cLwsp5nPjr=yj8f+9UPae~e&0xmSe-!M zON?KoFZ61!h2qmi{JYHJAoQhP_c3{r^v(%(b*e=6;|Lem&C?0}h+Pj`@@R}r-aT^X zk~>P$p$ObgcRW@Lw5)!zfi|TbC6~U+5lXP!`@G%CbhI+W`7~Sju%pjc*X+srceJ|_ zYTtDtb7P;iuyfj&YqpEdh-z^5o^hZ#AXE}`%3ZtzC8J8tW=(yA0U3GpRfAvLy*ZY~ zzkZ7;h@&C(WRC*_ZP_8?8*MK?`HG>(9!;J}gLu$SnM2ODJuuyDBD(9wwcb;=cj~sC zKg={8&)hLytv*DXj0ZxErYkl(+a!ABHzIJ!HJw}&{%F>h#4~4Psh7{bP0*-z%+Zo| za(q1CQT&&PO>4F|c$}V%2%1s~KQgw|z{LNkxOVdz;to|6Ur@IAV2Q19WdT=<3;M=> z5mym&!}MU*!g!<{%7?q^soyJ7pg+xy`gq__J6>>p1hILejhsw}Egcn^E3x31UmNOr z=L)R6i=~E_J^qvc=zMpgbBbNtkE}i-qF#@9)J}U})nyMxYVfI+sy$3+z*I;gqO&x9 zPgu8k_t)a-oJYB`DO8(WUW$h*<%GY_Q-oHw2T#>b^NO(<#=iau(QGf6pIGE6i+U>f zsf(^{)vex2x`vLbSczE}OjneF9Ls|`$p=kUrIBNJkCyrCDy~7*esAR35vF#gj+&m4) z*%IquQE8;{q~>K__2_lqNH}f4e1iGh^F9+AblRUZ$DGGaMKoJJxuEgncdyS47--si z1srmCm8H>+b!DFEClLUcX=ct*82H zHZd;7T_X;)f7Lqsi;@PRD53TLMl6Q^FT`ShbJ$WN7~+H;!z>3GNl0`6&CThaN7KQp zE7g=F^-9EQNXorKFK4nRWsNNue+U7acANz_%$&omk{OBFN4UA^q`9HhWlU=?ZRB25G>}nQQ zE!*RqNiBZWpq_mH6}3+q=W@R%@^LBSzEoY}Ue*-MEc4u&ikBrs&H^~cN^9EopnF5H z!OZ1c??^{>=wfoz+b!KmnyxVxvF(TVUo5)k{ ztCn>#UDt3Ikb_d49Pfj+a2z$Rn$e{Dyy(rZ+54){LX!6eX0&?h^phJq)bm+?*IYPP zai$AtG7-Ql!TnrR@1%+&BzwBaiqrA&8`XCai_>O(IhPBBv}bMdBi%JkbB&nKC?Uw8 z6s!{%#~Upge<+z-?F$uI4sm^l_F!j-qMW`luiE>eY*lr(ZZG-W#%15@jH}GtiS#|K zMNheUL-(f!ngWycR6|QngP*z>lQHhkzUv|RsN%T&N3w$}K2~>1&9Qd=&K9Yc`B|@s zz{v@#s~&I=L`O$KZ;Zb#k~N%vn@I?BRz>v8)(?l zGkFL`u$m*t&z>9O%f>dTy7;{JRx}73&DUlQLp9hw_x@b!uKhod-cavi+PLLYnsh`m z|8K5q*Sd3MVyD9?u_X>2GsRKHT5BI|%&yr=RA64^*1$l<_Cd0LV3l+WT#n=iV7Kbp~q+8QyRl#q{QFew;|HOJt56V z!ur%pho|K%9JW73_WSHY)4zi7r+*5!9Zlx0j^`u?kpj1 zyZgm)m*vLPG&j$`&ZrYGE0>Jc4Yu-O$~JWG#_-L@pW>w|y(?VvtRMM(KOqv2JWum* ztB0=m>>Mo!0rJIh&qeSrlDn)Fr54THe_yZ(!S@KbB>7@}MOIiX`Tj*R7CcW`8H}3e z(u&Xnk;K`sFFXrO(7N1jc`i=8KmU%)NNJ}m!dgUWB+7yanz#2>Hl1|p&*p4`n@oAH zuX`Co=8MP_!4#|Wb9zuU)d?~uf!aooOaj>?-W{ly@w5C%4Y{(V1xx0 zTt8v3l$rMJ22B+bG9*-SC#{wgh7-x;75|ydq)}uGe7}s$l$d{C+8B3Cp*DreoO5dPT4V10G(P_GYu-uqan5{QWf?AQ2WoJQ^n5JgfI=d5 zI$8u@w2*~eUF%i4_ocq+r&S@F#7eUMMPt+c@Znj-AZ!|#Vi_9?v&FkV{#**b71}#n zYwqG3YIZq&aXi@Y6MNQOoVe5^{WE_+!c6qS=_I#HWU@u++qf++vi>VDb;dBkLvl3n z3cmH~$u#lJBxMDSO#=nCoTk*K+BmAYn@NN}O6-l^Y!bfex-j8pmlL`;T+4iE)Q~7& z($QS^xk}deZf9kTRV7hYe2F($LVYa_fE=Ii5zbw~tSv4eIqH7!IJb<}8fJd^ja%$doAw`B@hy!dE_?d(jrXD73)tC-FRJ+*Ho`c5{6L@f|EMXXoK z1BB$r)2RMK)kOqOiP*Zo)whxGz%O~g#QQ+<#)~sLOR<|lxqMOzlEoCT+>(t&n_q`R zw8$E34Ui_`=ZVUCa-O=sk-0Fz&PI=?Wg-jw-I^uetzA)t2Q|IS1WVNmJ>jhSXo!b| zDqewI;SaHx`QOCi@*fR4!D^M>Nz5WmM0%0tV_n9x0aFEaNJ--tD-R^B9u(AH8Rslw zSI-%K_k-899pSjw$@n+2n76{y7{RgZyXd{SBK8OIjxa{$gf@^|ZYP@w!|;I~cMKhA zzG!wE`aqxFbye{soZ=xKXlEBO@neZsrAdwrNVUZ5xMr#<(^^Hm0VN0LFbDWOtbSx^ zx5exBLi&5$geW<3>#jp|@_7*#ZPSb*{dJ8W$hNv)3VMkV@>xW*>Qg)lz=5)=Hj~t- z9>owXE^&MhwmmTZdPE|6>u?2Iar6u%iLp+8zjaAaP(l!#t@@HV@0B~^KhVVPQA;`> zzGqfD!yH(xOz(L1{B9!#_B=lpHK_`9k5Nf-0X;4rcnhbx5`o!=K zj))EpV$&$Ii{Jf2=e5_NQv1zm2ix2P;^e+}#sEa~@&?nv@$@~!k6-0i7ompAD4)dB z8VOtco+7!YXYK_bQQ2Cm3`RQGLX~sfk+)L73oqWKvn>q~Ea-iuNq^OUZXK)9pKu|S z@T*-cJtmUuf=GNqeD7PPIkw1V?MH) zpriRxxUg^pAx(ni(%x3r%IR|dw&Lf-p7Q8w1o3M)wuDRSNcR$4`tFOQ<1(sb~I=IOFVzLh@*xLe3$Rn92H6F$Kxd8+0@H zlN7Wz!cSVGgN&qsbz7z{w^5eH)=puXcDWZsC<19AB45FF?8Xah5W7Vhb{;hRSQ-?t z$-Q%b;UxLa31QQxyG56`WZHhlCuL|SC&7-1LMvs`SR2pz z-vLo2PR4}841YVdP!9zc#bpj17JN9a5(^m>()pv-ws_f*2h8&fQvVnX>O~`TlZ~d& z0|zcJO4s}K6aB6!?;*Xsm93k@{ZR~#DYVlW-;G=^2kYZ6Jp-D ze72hPz~TSQtCG*8a7@3O<^88RvUa~pa6htk|J1zUO;CCq;Q98povv5_n@0TA>3dIx z-Y=EZytw~h6qFtV3tTtgQ+<7N=ZNHf$S?MJO3{-l+q#lQjVHRNvzAIkEt`U&D}hS0 z7c)|_4j(6wxw0Y-JWD3xTxL(%Q^Q)`Bb4Neb-uZRVVRD*Zmj0Knh3d~1+*)0O#a9u zinR5k^_{Ij5BaQ})W-K{O~EXMbplrTGotz&8x!oWrbh9%33<#}Xw|E}ojT>9Wxwl& zKMrPkHAaO8J7%V&4&3Q^YoF9;YT8Jdyj^4xu_^kud9r-Mk^3WFazwi~|PtulLQ zTuhG$tgft@zq$`BZ9lU6Y0dw&6x%c7ol4^Ve?b8&{|5@V6#R9o1>wEm)dL~(1Iea4 zdrR4c2jT}`(r2q^y_mL9AOR){hYf=sX{8ph$J?9j|2PHI8D@@8-N_8`YJ4nT@0(l> zO(>gC_@eF$zPRzxIqhgWvo!h)wzK0!Kkyiw!tmIZpOmve$0=_%jDT?w5Qe|(mT-Rs%&gQF%pf+lY1xjceU|MVs5%>Ri3SD2~_ zok~()wPjAo%|Ol4PNA;`-HfbPNM7iDbYIK&2hIHjheewgtXtppC7oe~@BBT(Al}Bi z8v5*w$$(ZU$=Iq$SI+7~Za4C7P8ICdM|uhT+4UUpDw2lm>EF#o=i}@X-M8#?K-<%L z$1j;aWo+$-eJ!dxzLhn|tJYs7fW$xO&L+6>qC!jO+hGaseZf`QFI#ka-^xH_3Ri6t zSDDa~?y00ELk1$dF}c+2lL5(IPUbY3`DTZ-iZgbyjBF@E++|~R1G0@+iSp!#LjA?M-(hYj7`g^1=0AMh7?^o<0hX$yG06@t|0+#Rk;GR_`K&6T@4aiJz8 za(`r7tG2ccL+UvDnyd^{<1H-O;?v_@g(uJY7Gw>BoB@xDKa3Q68t-yO*@mo#4amklD2@uEt}PYX@py!W|*S%e}|wtYabd4gA`6*Xax<<#N_% zvq*^Hr=TfLW!RrbM-@Y3kwz00OGTVllpU$eP%A&Z;&39`pU3>AETlLxsPxX7bm*b5 zaK4M~|782FuaC_C547*|;-LrxT74prv+Mss_9fGp<^4td(h8XP{NO#pAUwXIq>RAY z*k0vK7Th(O7*~2(l=xk(2aW{fF9`-MgR;0T>*dmQS+5MQ6p}p~ND} z+;a;0UAS!_DEH$)2D1V!p6vU!7mD4?ZNk8|>p$$~B@k|I^F#+4e6U{dvwE9(=cM|= z+!&)38|+(0e&34aHBB2ND=-M8Irt1-rCf<`|EwOHm4x(Dq!G`?J6dvVzR6{d*3K;d zeb}fhd;K$6Q(|ohVE`X~vW?yCSET0qGFr~qCXNaqIK-~}$Q3Jgt>+`Z#8%a*g* zSqJMD%ez%6T30zVg~Gqu8)^b8doU$yNNzQyZwuOQkDCmoeKT<|2#%h5$2gi7} zC4!ErBO45YpX};;+&i3qt|IbYD2#J-hgya;m-!m$YeKVPQy1fzCcR2}Kqn^kINI6O zA8ONvIbO}7n=V(~90c{_lnhL&Y)$@I(8Owa<)#&~HD!H}r;qxbXNujAo7L9RR z@$_2KTnR|`_B3*-%3te!4B-QOxUHp?HlM!(^<67rV>@35@O95CvLBlM#pm_?o6oE1 z@^QoP_|A&?O^rk~5ybP0IM1l(XM@sT=AhkdPY4D2B(0}B)W1KyGPwC?jgZ;gn?iIQ z6$7~nO%{Yu3lh~QTKHGNg~mP<4}fP2-R_vF{CeGyonCiE`%bYezrX2ELBZ65GXCHC zX>vUtIZDrLWwa#Qps+ve+9VfhCwVuS!xZHXmzQ+Kb#$rxtam$4Y4?t4NyJR~N7Vbr zhRlsXGj*8{pd{=9WX`~+8gSlo9LVXbW?gCzo%sGpw)T0EDinl|5uV+40HEvxpn8vNxQ${p|Pj%nf#ITnPA4l)0c8G*brbxg2G+pIjXXAha`!FU-^JAl^Lr&^?Zb z-;cTrQxEOh%?sj|#QY8C#6bNGF!#U<0>sZ&F+<{Su04;`^Hf(b#aD-!6Mq_w~JCw&+EvL9#gN{ z`vd=AGE(o{9oKcg;RUk=|No${xd$I2{!PN>ksbbTBRUY1Pv;QLRXcc#7nFpbWPJzS`4?_~zy9c7 zo>n1=@9)xIhBb?yJjD#& zbd6B*B;t7V;d)mI|EB`v#&>c{4Q}Gmu+5m*SktPpZw{Si*vJZ3K_Up%F0Xy7V-+x} zCk-N>nJ|Cit|g(YTz;2iu8H4_-T((y(a3FCx8nIdWG6;jacTRWB>v)ZY0IhVF~M z7`l&9&O+ec+WXt98sB$Z9n9z+K1+Kdrtf{<=wW`aEampUAiC+XtrcU(zBdEBU$|bg zy79ypnE0FOTOXQKww=6-dB~NtCl7p_{&z&Kgvyhu?A#dX{}qv2{KkSbNHq0Fz~S8- z@rKM^QUapm77~glwuz^75r9fw7vGf}F1q*+ImHR1CKO9ibS~?Hj-|TG7#x#_;{6;H|=fnjZL>JA10Eb!w;RRV@KZ6-#`h`R?v%ZM5arX4G;PwChuAc$ttRp zhCI0(uRZ&o-|^vc-Z<(#S6_L_t2Y8_BYd9yp0aT3kAeRb9vr1XL3d-QEfV-yKptS(#=Ya783)5c=lF#7m-@&)f z@y`UBy8>rT3A}gq{py!my4)dF&#acZypy&KF)=NR^iDya)r$gI*OInr%7#g zo;DdSZmrxlzaR2jN(e-S{L1wM9+OqC=YGO)KpOs@d3Drr^Q_hI#!$->lu;Zb`>Qe$ z02H|LRF4qif3#%!3?4G)6fNXeIdDv?v-VvEj8lR!pq7j~X$?_1iV;RV!o{f>EAPT5 zENhF6pFIDZEEL<7lx+ zdoVsIv+%;AB6$F&LW0<%9z+Vr_ zYJC)PAy#MhnUHyr*Z2)`YGSP!xgXi86^WUY=2uXM+t(hm8;@b@Q7;}Y@!uTN?tgfj z$HpL+g54Oj!5e*UE;MwoX)|v)z5C19RMW`$=x6mi08_Z{_HFe?!=XYi0)H2y{V=3= zv(lk(*LyTEw>xzNDOOZAd_?bc6m>)&o6UB(X;XZlAvqGUpJ5}dJmn~zbdp5bgEsVW z;uvq_)pu9Z zXzV=j;jNUepW{{km<^AAZ8N-_`d&vK`|an+li`5fO-`uR=j#Kg#wfQfY5g6HX7OFE z%`<++o3{1e!|XyWPERg-peq*nv%_}Pk~`ZEGSAwSH4+<+qijkMJMZ{)vSc@*llNU5 z7Iq4K=8Dy_g$Bb8N_-DjLhOXnbLjP{#*mj$ziRRDrP;|@OYe3TKP1?#d~(OV=y@l^FtG+ zGN=o2^79~2X~#6!aP60`TN8X)_BiKFe^R2KF;@Bok8loX@G})^)GA}&WH$eNoZl~J zD{MY(1thN&UFUaN(eW3bz5oI$S%@HpL z6qDC|O@}!B0>cvd$4=(9wW15RS>*?8&JuPd-y@%S$Crxa4EydGE!r)i6Z-k!*>aut zI0gqpkq1#a(rJwBYBw0ykJejVT(z9yh}UHiJ-VV0Mi1MMtK5jszAt|$qN|ftJSZ9&FK~FyseK5GLA!JEGY7Nn zj$pSs!((teR4DFf+1yuy3Hs-3_hcYL`)}$zQ~YSS3f`jb;k%pp%8*lX)ay>=#tp-R z(#RaK)ZVL8zG*C>N^lisN=~liYGtcb}}T7+|aHYK5`c`5Edw#sQ&h zw)yfL?R{I2vx5zDmo(w0cOHhamK*&0GjrAQY4CTx{tQ8^6Z zo8vjYl&Whtwb=kys+d`#MTwf7&URN1V&ADY&|?|d8PxVCp{8s3mwImxRt|P2&_brw z-52g0-0D3`wA}uK_|bmNfoWAc9IGRF>}3bvWSmB`Pk+=}*#v9DNJw{{TWej}$(DhG zkDS7`6l4-fE6Ke%xeKE<6;@pj4Cf7tb(V$m3*b|VPIrHJiEk~de?h45I9gK~u4FO(73q98!YCQa{mnYxPEE))j_>VS zP$Exo#qyN7+T^7r_XU{jj{m^T75F~6mK}woKz2~S~!6Y9k-(zXJO*{LnDF9 z{xZSZs_aZ|rO(Pbx!r>3JPMmt)ULh(6Y&@2{?AFT160Hy7KZs<-m~7vWIZ|ZV&w}S zD$|P^OH`R3*!$txDa^Y`NvYd-qcQ$iS`lq0$VP_2n%}V~5zXMdrHx{$O`?)=%-3qI zPpV#?bL*e>X@M3A^Z-# zu-|Q?nAB`}63BWcm3tY}Sh<*Y!5RKLlcL?-h^FUuG%n+?u(<3jW$Mc|!Q=VxJIVaZ z5hFug;ftnrL2*1zX&%_OSA7dF^B+A=NsJ~Xj~FU~@EG9gDw6Of+Go&$g*QpLUl2L4 zLWa8eO}Eq~Is8L`;gh;$2|jRpF?+}WgGEnrcCcc^1%BaOd^^)(9`DfFh468}+!*a< zf~fwde)O-H6ArxjmReG_D}$6In!5Fa*(KB{h~=apn-)7$sSwnH&k$$mfcXI4$yA21T^j zVkS-0-Ru*xVftr=x%OcL38q^Zqt76t(Q;lPl{2P@2c{{C)i&Ad;=^QG2X|L{4t>_5 zR&xU&!)ImiO&cr&IPL}Wkk{6OT=8wbOr208=ol_2MGRHu$S)+xqnGH2DfsNVb;2wP zmlZtxado@l91ZteSLiR+3%g0Kz3__HFGZJR`J!y-@p$B^;lA^Mf^O!kGqq4loZTGt z!-s`lt~2|iwk@Hd9By?u5^|irxRh4LtP#cYkA)DA;MOnjY<_M~9Llf0Y#6U^m~8Gm#ErB{!9 zZWYUD+)`ZEk;9rb2)&5q2Ln4)Hr}>G>f4SdtM69n?`tGd_33!F3_szTYc-aOcOUuM z-!PB}60+J^Uc!2Oi3ZD+HK%SRiYzIrjsk8Yv5of6J^`Y9%<%b@< zqOhem+l8FNzX z1JB}d$UP4`zZQ?}Y3bFEOsgsQw~o52N5My8ml3b3-ft#8)&uUBqGfaco+l z@q`|XtKG#%@nV|lxCBfm=cViIt3mc9HLwuB?f?reT$fbco+)U^T}2k6C3Fdr;E|C- zoTY}+aW6SRhr^YPIP2xpkZOmsC_Z`ukOnWj~4l`(e~-0{%Hf9mDD z|L4@iT-trgC$e`BJD}#D&(04D!YV^J%+g_ENbTsmG;0p7QSH0RY8JN8#_L%Qzlu0` zRx(=hk9};15tI*Uj!WZz{a$;M$0%z-C#H-$&anlCE7hlByB?)m_g7AFd?RBC7NQy^ zpX0iPLgWi|w#5&d9)mkZ2BLMAtymZ)3Re7$c-fCS&A`s{TB6E26 zUKS=gv7dTdJ6n6?_X`wx66%08G=a6OjBOT;j5UdliO(&NgT&dGe2{HMg@IoQL9~jc z+jzq&>}nV`E_vsov1o*KDqh0A5_>XY*_E{h8CqhHblInMb)H|Bl!K)I(}S6#7KRco@2ec zth|{;Z%(tKLc^xUK5_DL-q}l%nf)DHO zaes_=`hwovuW$|GlbOnhaH#WL8RL{oVcIO?)xv8F!ftvF*!x(@8lBowv9*!w#t2;Dt4ef7_G4r6gaLIQsa17v z>q2AVwf?Kpdha)S&$JWmt$c?$k2LC|hUIuadrlfu4UUe(m~m?(tGc7j;T7B6zmGa> zEMAf(j=UyXD|0xiHLSwdhaTBq*E0Noey_B-f1#boHOV_t-|If2cqTzUy@>oK?z3W| zqi9}D#1UtJIH#)mo!ORB4VRg$nWcdqKKBnM1~apM_Sj??UkfgJI%xpuE9CR4oj|;I zyNn5^9s&2b|MZY0eMFV+?c_FCsYl1TtqAoP&W$G;;(R&q;LOF-rOrkhMY1I&W!?B@ za6Xg6r`~h^j*DAyP^u#_k8vp=wIY|;1Be1HjqH9kj+!%C%LqyiqM}jXtrlZ6US4{RwG`(xMOenWXK2Uo6 z?O7|!xQKMeorgKUS`6I1tV%PxbY<-BB;_Aa3dBM^V}26m+)2Lh?!C&+#s9oa*NWp} zt0QzQFuJ`%;=6YE#vnD!{FNsQ+bea?yfxf-_uvPc)(|b`1Bw`>@XwWW#Zf+ZdT!;5 zX-(yGu}xzEZN7{^jZZMTwOcvA788{kG5WZLl2m9*vb3}?;zlV4RKJj)GTXLGdx^c! zOdSxV*B9Y#T@-Lt*gRwAaQCda(~L}B*RQh-R9Bkveq%tx%E2+jaLI61ms;~CZpvKH z$32)wh!P;LzB#xGNF*K~7)H(t2_v^EW#QlBm(8qGie%8U5lT>T)fft z%d$e2uWKmsSUHMnBwm`7-`f-|IuJE~TXCZN#X^i-52|RlHir^B|L)mXv&-fiB3%43 zN=WVK(orz%2TISyu> z=FG}fzC>16W8}Ph&pb`S-M)o*zSwg`zOAJ6^F`6-qvSB2lHX$Xh4(Jzza8;@QBU^< zt^Mi3HS=t2xzSBU74g**A!x48imCO7ACFhvy@wm`rKH`=%Htjw2@3x_ zf2~#w(R-lubcyksnUuZN`?qw$+6A?gZ0_aW$l>dCQox|l-G37CQ}Oq1X>f~zE->8W zRgw!C+olZH{G9hr!5lJO@X0vg4hHLgU!z?9Q?u0=(M#=(-iG$RrDix)W)l9O~G6=8Z;q?jzc@Y~53-1w-RSq+;gNp&V+a(VvtiVKVEE$VG`^@FXT;UGe@8 zZ{>5rh{JL&@>yU1xQEtf>S=aT_$(UEi`o0ZC*$ba6p6gon?xHGPad9&XXT-!8Xs+V zY~1G;{Bhkjw5N{=LtqmBL+RsFqs|W*LLdjDm$JEfvASLe50!nlfw9-eTh*_$U7h`ujaKs1PYDZKDir)z(U(Fx$yw^$fhHd;yd6q>rvctHo-O6&YI7 z$TT4&3CRTZBgLucO4fE3MmC@lj)Ts8R{F=qIets5v*0lv!g00fz?8hyX-+CIM&&9- z1F_{=*IF@z2i=?6jd_Ufu7Fe^JG>7hwhxVT>-4rdd2}b<(_;@mb^4s6v}=V!SBqb& zit8KeDFoXjuen^}FmmiajVM-|AYJfl%%HE0)7IWzEeXnlZqwIF3G|h}Z~pwG z?VG^7duyl%n&&!=|#}oV!=dfzW)X}F^(T%(qL|NzIGEIk@J0hg}s%>vxdV0Vc zJ5G%^Z}PV$Ym>uyUy0{&x^>*MnXsiZ&{$*^B*>3H*X9fJV^G9<=5YFaalU=anhWiJ zkaTY&0h!hbsiDjd7M5~|kuVE%T&1=umQ{|f(&v+-;b`-R50rgv8M#J4kbd|sMK?q; zrGW1E&aEpvH}fiN)f(>k&LI`cifWD}{H|CV;0jK=IkbNVAu6nNgfcQEjq?>J{I&QC zv!r~gT`PWwrt66%aIK^4sLS>e;zZvMR0wWU0AIPDl?T7ursUL`&ku*`ddtpC(PPG&RDeeaW@0ZLkJ!}R^jW6sUl41-T&a<60 zJE%l|8V2Pa0lb+4xloEFv+}1cEiHTd`8?ol*CvUFeA9QIqdZ{dF5mg0YACWhK~uBDKI+CwJ&R;~+H!Y^O8xpw7D-Jtk- zj@&vRSNXi7zHUMR;@fzr&z^i#pUBv5tbTEW{)+Wg5I1J9mZ>1pEb|lL%=yO}%XB=S zcOHGUcd;WWT~X&bXQcA&6;SpiX~YPlNbhjmWzS=;p@CRSI~@EKSR*Yt^p*r6!JjKX z@bJflm`Bmp55`azs~33_4~Cs#6Z&hMwt-8{jVP*9WJvXG3Bn#;sFbb63huXwvg*)G zVj{^YlK#w>!(|^YwdB1zl9tG%t7@Rwq4YVn4Ci4+o^@+65t=274Ddu zdi8bIKm!a|`ji?OK&Cz{3A$oUpLz}gjgk6l8;#U7N*(SVM&~G3(?bJIF$5|82fjmM zefzid3tn}XDsZ@%WHT_zQi}>`GMCyODPN5w@LF9S?7%nbJ(TP#?Ay6+e#ZY)V?+&^ zx%q>!^P>ee;l<8%6{Ora#hQ1Hc4OUu(oTM`*abMrG9n^INJ8uOk%LOMkk&}X(M&Y6 zLZ|S5R-&~%L^em*Z;3uge|lQjZ*lF;n%7kVnTv{~)JVURS(44MGR5t>{Vy)AuI>PO z86gGJx9UwD&WSfF?Bs1U+#Ijc?M;;w^4c+(^qP-Y%Cn~CnMjQUW?K3_DRpy3FQB7- ziwoM#-fVGm-$Y#{SWEwvyMf}hTdj$JUfxX(Q`%e%yr<`Mf?!ceEBcTc8(K#gP{SU* znbi%&Raan7j4JKrkIWin^q@xQ=}A7huWkC&FL$m3j;)=!%t%0x6ffs=wsNRtjNs;o zUoekcPq-9I=rni@82O}#p?r1iYmr>{!hbe5celI-oEW+ot1AdSu+GWO&j0>>bmHoN z=1kB;{B~hdx^sKru0}Fw!JKmQWOQBsd|p^OzdL8zc1TisT$zBt`aXB*a#mfPM0Wz8 zoVT}kPia5^0XdzRR@H<{`03#~D+pvRB4+mMIzqx*L)XwbxGXeVWZGAgn3$NSmkk0t z9M&&=3l85F`j{H|Ur#33UIdBfRdjygyxb3WD{&&ht!m&-PO8a`Ml|qBIUL;5NKj{k z=F8(WK^y2dS=k694uW*{zJ0)*w{ZC_!I72rW0|;p&eJOg$Hy3ls!3q}rB3tt^XHBZ z%FGKeVL>T<{;f3)oD)mJw7ni+xapgtbQoaWO*;E;T%2qQK06bjiT0!2z z_O_Gb$YWn$--DC+NF;#2glBtB!*ml96PGOiDvn_vkfciWWJ}3x8WqyK=QAQ2ngiLC;%Zdmlwu< zxSY3?#44#0V-wlx6YrYI*9P+{FutTi^6%75!98tlii?YjOLIDam(-2O8Sm<+NY{;Ys{(m!PS#?)r-g31 zNqxsR9#8r7qA31t8bAi|_OAAYCA)8=8~MUuc62VY70>*=P|0bQ7mk0B#qgnGi1^{! zaHrv9esT5EL-*AId3em%uja)-Q2)1bpKF;4 z+Mkd6C9%EK#vP~%ct=^v`rMou`2+qJaKU#oGC)K~NFM6Q27f(R%}Y~)7dPUeuI?2L zCv#)~cT@>pZZpV2OhS^JEGYiRp{D9U}FyP$? zsr7Jlci&B(K5zBDyf~gl4|d;{fJ+V${cV~qTSg8-h`P3VH#&@00_mMG5Ko))lij5` z{o38uN3qe-8OE2Vm?e(Ct!Cbj*oQ(o+uQp;+_=jG&Qgfb01&~|U_=T~-0JG;rMZWP zo8!80EiJ8Vbi=9A?Ck6kOE+nAG0@HUuPePIArxX)Ih%Et^|dt+F5%f%h_0<9TBY)d zh&rYEo!Zo&t|8&s6m(+N{h9JTY8tL`e}!Wpn}`-#Dcj2CVj#kaZZ9t`Ru3ao0nc^2 z2DGQz?S?@BZ9IwnugCqlp~UQtl*-%hYxEt`ZS&~72 zTclT8SI#nm!@|N`rljA^1Q<->H1zdtdEiVJfafeR{GE2@{XkK(qR7O%pnZI$!}!G7 z-~S{KL|M{FJruN#5_4O542EvkAGtopO8*=kE<_3F0^a-A7-z7Nl8gN zx`VshYuwilx&(S(!mm-#=EuZPxqgKvi+eieq{bR~%@Y5qZcg3ep+ars#2S+g5H)Im z+UssnDgO3N4Im^7(){DKA1=+$XDRbkzqoScO6&X*y%YyKySlx7QCN65GkAZwR~NXi z)~mnyfd3cnoC0`3S{fP};Wt6<+f$Q2(W&R3n z1O#dTQQnsc3^*-1go2I(di+ygWC$gw_>ogxKgu2eZuXr0DgWde^k5}p8h=!! z!JSL|w+WAy6yVK^OFu`}f+(e;m@4cruQM_-9NpU@K^WjoGl3<2j)s`y4d-->C(FCR zrluKyK8qvIU*Vg8U=8*C`x`$$KOjIv@s#iM2M6rj{`(;h!$p1Fp5Ltfhf(_M?Z5gj zK=K*tpNfB55!A{4Gi(LuUI+ah2MKmaXMxV^UzgF7e{@g?nm9lF)nx+`dIj%9gMVPJ z9&V?lAGUQ1{bfr)U?}wbBwuEo^*|SpuZaCt*<`3UCLUSfe~Dj{Hm$equYtd>xM8G9 z_P0?}8OwE$_HWn`8f(YnrkQ?N@L=huSi5+f9Z#W7JlLkALuv4ri7WZvHUzpVG*YDN zxsDxbO78Wb`hDG;1vkyAr>_5aGXc3HZnKOn?2a)pwcLZvQPxLBkqXT0>=TAo%lF~k z&aQMun#Rh2d-GAx{UV45RM zhw@4-D&mq97%;l%F<+DBk)Ku79jfm4&|9VBXJ%n8 z&F}*f%(tlK-NPijeX&{EYoI0-O=(noIlz-K8JpU{iG@+gcZ;pyl8Rqwk613j4@|+( zi{Vdea~e`6@LjL%;+yS)Xb4NUXQIg8KRNx-2D=koV?8(YhTW=+9}>zY-eAP;^s+>A zvRR6P*vUbs*i7gr?JJiLi5}WQ*80>65&5}QjG^^de8`Z)TesMK1~5dfR@>;G?K#$N zZYMfB8K0j0%`m6#&F%Yzl^8~_cbf~d#fs|18M1NQ;s4v3prSKtZC{Jsx*8L5)B5&Q zMa@4_{uh;3o?fDIR4ZKb@vqhO|5?_mx1hALxg9RI_sam&$;2f7w^#05o&ap#dah4T z)m*(QTDL7|_3XS~spiEqS2NCm2KjX@Td!J& z;-YJ-D_i|)?XTM_CzV2N-ox$d*8cVl5ocD`mYSUs-akbPX z*A}WT23Z{R&T##IZ4_6%F`fKj;+52(Nl8;z-)*U5;ZcXO!@sL!uDHV zwrk@x{|cY#ST8o7?x`gqJCDb5y^Hy8utv?#VVU$zP5nlD&*tj% z-9}e@J^#nXwxIMgGH%ZD{ox_CKya~2uh%7DmW)khp0xSPufKPvd~~S)`u_cn-mA|h zE{mI-H)}m`pNUrLoQub7*MxW<6rMWmYfsYsACqtY_X;$ZG+f#B9CWAfFZ+TJ&)Ju< zwxR^HgZIx;KtW>Y9BKgR7#L#c78SOnba&SvF?6U%caF43!vNCK z=b7Jkwx9d^=bV4fbses~FJ$KJcdhlTb+7wg?^Tp!2=OWKFI~Ds2$6lMcInbhvP+ln z6#lvnJ~5y&7XiO6Bh_RiE}{A!u7W?VS-enuap_W7_^neTJn;8Tds!XirAzmnaQ|PP zF{g36bm<}o^74g-n*ny(+pBl~qJ4WgUJ<1hDq$Q7eL$?oO-Ce|7+k!Nm=@W!aH7bP zx#EA^RcMEjc!&S)okDAL2!;I1f}=1pBgeAZZ^(TZQ_|%YU8Y!MSm)~^RszHEn<9pyj;h&ypy45lp(2i?IG?Se*dJ0>_&tym^2%r zU_wW~es#3B%Pe-sX$hq{?l4=}9SdHS?o)|qHCBZ}V~e3h;6E>~hccIc?+*T9-&r<3 zEE$=-v)Enab+l8clHu>=?Y;3itHPpx0_?y>65N*}!Wi62o75})@vlF3tqD0gIw~-3 z4cZa%KCaw2J=*Ph7RM}K)%xuApPPL$!(A3Ji-t<=?sEU61xHt(K={9}A)$QSM%eef zp-U)E3W>`4*DYRhd>hhb#*4i7=P@sFc6F8+Q2gto_!wT&e|_-jz2CogD(xD#5kM0D zYa=?yqL-If5C6YD{Gd(yudSXN{6+S!EB+buKM(!?CH@F2-4{=1RbPmSEk*q6!8Ft^T#*(?QgjIJ!)=eWJ_tZG2qK{qDQi_6Ov z)Q%8APjLw=V8WwDqV4+FDbyP^0tN!?*}}mRHg4P>SIko^&iDWMNsnSwww|oAm6Men2%{2l`uY6@IOGHu!=p#fG5W44n+>~4 z)6N)u>!IxT!1f7Ex`LvkiM)>%<8&*`LvK?FnBHcJAfJnz>o)7wM?M-!zI*#N-nHIFU+r?%k)7EJCVN*1 zFX;(@@^Db+t$34VW^Y;dn2;WQ{#OcLlcE{mUZ$N%fEfoa#-l0a$? z*{3sZS+)8st?ljbnFjY`aN^z)uW83_b_zbr4--|ka74^a8sF2h8IN^#q)^;omQ0aW zsln2Y*=poXGAglGsy)cnutyQ$bV)|;d>HTcG?P3ntR$FmFilSj&0pjsa~L@gJr3M#KxAFmCP7S>YX!tFml zFeMT;?ezF~GUSo}Xo;SdNqd;)aIRu+iSLNWRzp^}*l`CemX6*`1pJ(5tn$^D7yWvSwm&f{peblwUo5<8hA%_KY~~*UwyvP z5@~mGxTSS*eijoEk)eyWUJd4K;=+2?*3n&&uCFp*Yq`j-ukr@5@&)a3nouY0Veg6- zN@M!E8IthjKNkyrt2jMiP+*GH32xZl-yik8IGI(u$0O{%lbz~)pkq5-H>%RS)5Xf~ z^~+xB<;#4v9Jzp0#ysV;u|$6O-t5J>R-RG{&mOQ9j>hxj^+L6rpbP_wckkXoA`uPS z((JmG9aCdZ!k=Y|pZ}7K1UF`hFpTXwr7#*NNfPrpjJBz=Y42r88f8hcK{(I~ne*}{5oN4A25XNvl z^nN|t?&HgsY8p&!1}hB??rP;}J%~2iy!y}0z$BJ>lU!9Ta>$>#eLO#GzDVdMm2z>Z z%FN1&NlaWTjNvpi;A}oEPjz2PpzRmH#Sqbp(_IhX;F?Wd?pmV2d#2Z_Qw1UJ?rYT z%y%qIcNA-i3@)u5ED7uZ=W)XqF>!`Is}m^gvdkH;T}4~~4R9be-d*bT zSW2*P+ME*7DuH8A9^0*?g!3E$AO^2x1X2%`nY8zmz$U8gNm*1)FzVmKq3Gv9UkUopFVRv|J@%XY? zZ&m**(~60QyyQX_+~v$7leuQdgrW>V(}$4ZO~PG1Gwr8nEK$k3=AU&}YMio5w6XZ4 z5H)M8^@hvj@2K~DwF+D(Y>pOUJ=0KY-V>jpSp>D(oT?2NhvQohq`x>m-Ra&9F`Sxz(GO{&#tFz<%0Q(3!#dxlaPji{Gz8BN5oqYG*rP?nqS)s)P zZK0He=-|29j@MoMk)$mENL*QNlkqt+gTS=doVM_n8eHY zT+@!d(CFB7t2Z82L)Rsw&C(1}hm(^Vi7=XL-_;^=e+dpx)L7jjh7+X5>Ad!CFI4tE zIrH1JvlOf#{b%j=lOOoe8r(|{QHx?wgdI{oMHGWKF9lw=R=vvxpWDw@ji)=^iQ>LZ z0yf{$iCLj5DXuev`KnnXSu%IP8&n7;LAQRKt!OJFG?}&1>g9wfn+7e(Mq0^Ye53n>PnR^hbd;vZbM3#;)lo zm)iH#0W82B{r$?J6#PTDK&mWu^zs%d$4HEp0W)Nx#$jIketuni*TQ`(l3=OeytJ{^ z2SQfYlX{o_A{f7@?9BXXJDZ=0e(%+-6|G?)CbsynJA3hEWU_qDXu(#IMK08Jbgs|) zJ>ORM12ePil@px@`)!OO(MyE^*4V<;pkgbsprJl*Na6J-=+h{%LKEt%Ft!|{_mvBW zBPEB=a^&zW$x(%r53tL_^wRGia2${SBn13%oE;0D zQTyW-X$jqh>Um*er#~gNxN@?+4#q7KpShRAPe7=`l$&)EnDhuBrt%}6$Ru`iaC39x z-|0V@(5^;IM}ufM;j>x0jC)xv@sUIIu6Fc{jPjAWN-2e7C3;DLMD^fZK`%fMrElMH z<2CQ$x>IW!-ANweGwz{~r+iGF$FZ~RU?URTRCg98-Bo4AGU;I;kTgooR=(H+TOc5X z480}_KL6$*mggt2TXa9)C2PsLv*884i)OWD*A#nz!zGn5?78*3E)5^+TtP3qNjF+X`C`G8R= znhyA?;+yM)Lwa_NIUM?RHEjxse4XpNjuZkm@kS?a?uc>O#6{ymF)@erSg}sV7pWV! zAF6nHHHaOrJOQ~B`K^T zfVT=KkdR#QD{m>6D!WCsTYj{!W*TfSwdd5AUaeFHcU-dhy^uX$MS@(|cA~Nq=F(ZD zrQyyVw+L%1%EqN!GU3$Dwofewo`V#bH-sDauAQ;pxFJK5cSN)2S^|r}gGl5u^1n_< zK?f1`*vtPhKd(t2&g)}}S8v?zan%vb`~2oc{U)dO$4>+G(b2VjgG+Wiro93R`U}4d zEqa9{BhE;se%?;$Q-~P_mH@9Zt>|3RmR5|k5uh12HPqR_S|$B3>0f+EAw@DJr1rWm zw3vOu=hL?vemo}X#j&R2TgT3}c`Am?drh)JPo(vl{&v+Px&PY{a(ctX&zXW-jXCO_ zHYP@4b+6p1kRX*uaVbN7fs}$^5+tSpw#qJBGaGewx1ZE<27_SEA=w`V?ufGLsm-@n z-#BirkCh;&4M8rSvD^vjTw-G499)>5ihTIEi=qFiKJ zQP>t!&D63}fm@A)C8Y(b>4)gXO`|jq#zU7H-G@$p);zL0KB*~-6c(2QDf+?W+iNoX z@&)j`)$L7o{l?E}%VkwY&_tI~H|6;NsRDJq_OdTuh)BlNQtI|J_d>Kcl#mJ{YGgVE z6FWzqLDU0~6x}+iYAHSEwf!1~US*Xo@?9kcodUUdaY*>#T6QdI7hipn!UrhNiC5rI!wp+buZvsGuQD=W;UY>X*W*vZij-sQ%gF&85qYk{a65kz zrBLWBNA9lOB*a^$G0)j^*WI`AS(6eLiXCYuUOcF^np}5K)QiX@n7>B{A#ZNCp7_Xw zwl(tLB(+t|5HprKg<*Y4qu@5(Pj?ON?c0-tW;5F37I~}ts+fhAd03PT(v%#$XB!$A z@y-+$7#E{HBbG^Wn|9S2#Pj?GE0l^-9-B1FI$JN<@Ig(}F14R&c<(C)Rj6{svgO#o z@*ypmA>tOW(&hUbg2pqQC+m#zv1(%zz6j!_ip3urHJc3~%p;zz#?{6aFN^Hrs@j&P zaF<(FZ$HcUj|=%->4LvUrHXnc%D3jK=P4~k2A_nyXx#vQWj|f2Q<1wiRsvt~1bLZE ziFTPpn$K}TZO-AW@2vAh)g*#<@7YZPf)RcUY<}V#RGfrokz7V!t`SlS^q?E9h8fP5 z#V@!+94JIwir0OBHSM3DY!!h}g@5OcRyegNpX(;9W!ZSD#sPJ*X>4pvP$7leb^V+C zi-enG8&fXY?YCp%S`+DmM``6>?Y2B^{FwU9CEeCI&vcAdN+{_1$KF{ zK_1TfmnHegC%DCRQ*>R1=S%z5zo7W$TM_aBLk9>|?Bt+_K=^lc-BUU#^lI)(wF%nH zF7ffy2M5#YA2VT4jU*)x7e#~5ciL+!-|x6RG|OSO)$(6z6jOztJK72Aubk5}nzYbK zx%fhFRp%}xs|Rpve%S85AG%JQ8{3%4zY%q9L(EwCB7|>ahD*8sz`6iohu9{=-(j%r ztypt1G*mn`4OW|S>G@VKb;~eJZSuvfm>;JN+CxN7bE75>60+avsYlNp2z3dT>p9qo zp_C$`qE-AJ)O_fdvUk(NIKp(`Pnot1`CTmc%agP0#y`^c`zfx4CHq*et6=6AmWARjut!7P4v5Fs#Ye43GCDRR z4)oSvX>Rw9KQSAxSmXQ3&iCUzy;Pa9%joy&2UGs4ZS&H6xA-1nqN0zyN3EP}`b>^S zRV`)O-JJ{>oNF!>z~J%muWK+?ATtXR9G;=&-Ny73mj%>W<0VA@AJV!Ub>pQjt*7FY0* zL*zFp4hIBFY~AA862(=#sP?akb@(oOLw-5bkqKe37M*$r^9 zJDr^WPSRk1hlCKAnYuMet2O(018iXi#v!ouA+$acpNvWz0W#yQpF3hWQLvAOal=44`|aI z7d4w;?lHT}TUBFe;WoKWAHu`Kc`Eu=MrNCK*bQGScAXR^hnHgOqoa5g7pA%aEbl4< zE2-b?VK!)khSAG2&oE=*h7laTsU9oV__5O$#BEwmxF?hVi0_f=(QW zG7Lm9R-MZ_g~QI}GZdq8uQgAzd6{)dlQO^0Y{0@JxE?DE7F{6{GfkF0xTBSRwDn9r zU`%nX&?03k!H`q&!lhE8cPnmg`)px3E~hxLk%-r>@=H;DXoV@0k*=W>r(@gOHKt5B zduz!zT1~6GvRy`&p}S)pYF!PVgFO0=#EuIvA)yo-x?-#{4&EHt()VXRE4xMfjrB*0 z74iv#imFI8dU5e1Gc#s+pEZ8ZEa_PUHS?-Ma^|F9Y~9XJvAB-I5~j49!(z2Q*EjU@ zn;LU#&0ThmzrBk&O|9EcwYU>LN}b(W)?#)}Jn*xkc;lsQ^8JYR5Qvp7PkvPHsqyT8 zLOk{UPo%UObOK>3?ieZ|sjyWshyF{$UN*>Ot1xrmp4~d(9r23t+r`7td8wMktkJP2 z74!+=>vQ}ET)EG;^iln)RQrj?P104jxtf%83WBI6wW5Ad&8|+osrhkx*M;&(_V zYTIhQK!N%`zB}?s2oC^PKvYi#IRD`kGQ5^e=!)R>_V&^a-ej3c;PpGy*a-l~AT#`5 zD0v=W2LbkoouCG|5w+X*%PsK@AvEF!hCW9*a9p{kk|iBVU?SJmjNbGgt%7bjyF1*zsM?xp5+Utc2 zHJTb9?qgs%At4n@>JkR!YQ!g}ue54p@yC8Pwb`q3z+){T(2vp_I;gUZ4FFc{6wUVC zBy*A0k|sEo(W8<5DfD-w=@Z#bMTV3BzRdH`GAsaLe;pj%W-j`d2-`F<}LecI}B zz~xV;iLW724Sp9OvKdNsOX7{FzetrghiynVXa0q$Gul4hb{&p~l}G3NrI>1mPul6z z@YDA~>j|act%3vl2!DRZ8B=utKA#c^S|&135z51f!lwjqNHzPW$gL{9vJcdmEl9mQ1rwJ%bUE^Pe? z2`oQ9ci<4e!r`$J>So>bXB_J*AF5Q>AI{Mp?GMRv&H+4qql=iMtCgA~D85``xbd`S zr@kqrt zXm@V*IXez8X!O{J9Zp;D?wD@PB%3|`Pr6>q>3ay^XfltuLNyvLVTW(ZPiIH80YXK$ z*rTE`pD1iSd5)mZvx!~w0yrs&jxF?1=x^-Pq+MlC#+uKz6TS;KbjmBDQ@jlToG4HP zKn(BuPYyvVq2%I6<>dIMC-Sj8w>QV?j^;XndtDsL6b_9;b< zmwy*+?;(#gjML&nIiKJ2XDMM^3r+2lA1Bu>pL*yI8I&3R0W%8yt{TOEI(8ZkkFEOQ z-70iv=vYTIX}MNY74#phvESs*1U`RkfSLzP8uBz9pda6wmpJ591<~12T$!6N>*r&W zQB-cNwHh-_h<7N*s(tg6)Y|$Pt?ouc6TkBaJl&x$R`^ZZ;&v^8%0i3Kd_@(aAFZJ4 z;KOmMTiu(W+BR8kabt{CKnEDnkqXFp7u1IYY&xA0k(Kj-&S9RH*F3@EDWU>1D<-F*9TleDv3swhrqw&A%{)&70gx~43TT7UDKy2%)|@MWHb zbJpzUz~?F%Xdh^QRqryn)=f-J9MT*rm(u)|Sfb$(p4--xxQx4R8r{+L!v4PvP`j{d zW{V8Pf6T)#vu>@hs3#|wfxY(xofRouT2$IJ@lk&yrxTaSLL!@+n*#{QjS4%^+b{k8s{@ESs4gTe)sYSCuZZO#}gz zv0TA#F4DYcgPWEmtAiFzQf~r2AA7-4&6!<0vGC93*?^fvcBR3k1h{Uc#$-p6$H9i+ zhD(xt=JwyMiAnvf7WMXoo2!dI8oO_o=*>NIquO7kO!d4@z$rnq@&prJSlay5KzkOg z%bF~&cf3E;VQ`cZ$XKAen*;BAw7-_d8yu{~pTsocuGTpV51#BF6W&MqQ|ok-!1W(r z%^|JxMfl+7lYh7<(LLXi{q@NJFDWM1@5&oTF~fEq9>T_{E2B+jGrm?wc(a=V@``@K z@e)tX1Gas0yM9Gwx`~q(5*50j=)aAvdM+ynB8KiayS^2gC>XU39j$Ae7v-jrkF3Ri3#|!X?%`JJ^qzgjv|~a0vq_?~D{cd~s^0a+Qr#Q&-=gfR~aVE8*I^pzTcu!6zgf-N#UJk_o`ade99M ztHXKwD_P;zeaS)r+;NF&lw9T7Y+Z7$N=83IP6W4>7hY4~1+IOHNCPGl$1kEMkkn{V zL4H+I$ZO=IPyFpPSv41zST?99Q|DS@YlnGSV+VUOQS*olv}n}pk(;`>MAmXE9MSyA z|J{9wO^x|epV)`rQ0#r{BbTX0X3)`^m)}mIV*5^VZ><-IGj=4@|6HGeWJuvNVu$VO zlMh7-MmO3}7Me~u@C1~}@uaB50IghO^844tf})ema#2t$RQH9}t-67;UJ3%tNf8w8 z>{G3J#P%-l)%1AH^R&OB*vztXn9bIOOHNKj6+N7jskcTUDH8$AYmHyn_KAw0mp;CC z!|-tT*62pSKK%^B9s{&!bK%CmO)4=S-bi2^8xo?k;bQD3}s+76|Ko8IVE zB8pb8+S>=;Po_CP(XBOCij>-{3L2XH(a9#%8nd2 z+g}@9QY-6(Ln(KM^jkZjh{L*xSy6-rsS*@3{%tJfF|08|ExWK5p&eeCwjpX;qD$j?Qv*D2D>{X5lVR ze0_aGsYJ$Xw;tk(^)D?*Xk!AmNdT@sYKfPJ6cCb8o$h9v?mz8)8U5gr<4l~g5dSUP z%;Mbxoe(zA@vE44Bi*LgcfQztw7w+$qTJM?(;D5e7T?+-BcSEi*-mV|GdSyeWQD1M z4g12X3gazM1?1`9$0F8vjWcsO$Me6^KNv0I>JGuXzQlVgAUk>%){R|>rJOcTVE)?! z-wBVdoUXd~Ta&n>1cjo1!$nSH-c{x}kXW@>Q^YQ1d97$2L|HL;zw zeo8w*85h6`02;)*6NPUZW1fX{HL^ny+U#Mjh_@`|-eR5ceo8?$ z8&k5QUlwPUr>&kjqNMBI`K!e$s?92~kCrP}&ghltk-sAt(O~dW{tR>B=A70{ z)~#|OWo)4U z(Z^%%9Vu)Q6v7n-o*Vp9GBWFEjKOdil@R6x2VIXA%!>}*eefU)un=mhnvd65!IGxN za#b1-<)Ze%UGkC&G15CKDw%HS8=f$v2p=Y~#qM@hX-C^e*$5QbKo507rYxTCs7|jV zFa1u2#x>{EQo=1Fxn}+4 z0>!asm?(R+?S>o1TXJ-W+>4w2DcUS*H!i^*?Zn==^uobsHb^SSrQSRs(tnGPbH+X* zyYkm4qI8r*an1Dh=Xg7IeuQM-xlT6}9$cON!7lx{Ck*3H`h*xOc6n^;X5yivA=-8t z(%(0$x|06i6uE|O)}E-?*j!u_71s0!0)ZTF`Ch;|DEX|WzJf-?X2qK95agUA z?~&@@z4@Lc&=({JCIs7**G*UJAQ2h8iSWYmCck?UsI(0#Z#Ee8K zKaH%~{wpc5=I4~n`o;10?%}(n5v_HAHoydQdS;5Ylsn_r^aV}1i7scT-nv50wS0%I z!Nl`U>sC7j4dWluB?f5SU<-wjZ_9zKJT-+@TWW>h#7LcqCMJ$93vhTY;S-=Yvq2hh z;|zSnoumqF5UBAN64`6y&9))idI9mC4PukXVnj#n&NrNFFcF?-bIC6*GD-!DABbI5 zeQnA<7>RbjrsiG5RI{$l{>vqcM47VS@4MBUdGw0wGt|XZjk%WcWj;c~PJ3y|g!wL( zsD9V;Y9;()OR>BE1oG!|pm8LJqi57&%YlmlXOsI+Q%Td3RSPx3TH26V%(jII)~&S6 zNWcC9G*NHfz6}Li!6pF-3iEW<%Y-Z{?dAi9e96GuxDH}%OkQczu^ecV+S_7^b<|(H zpg)AAfKqp(?&9+e8ZwWWks>W+8t=V`Sa!YK&L~Fs*T3&2@|XqTYUX#hNDC3ubxZos z5<#nW{T#I2jaT&LMFsU80jiq&M)A@S}t_3a0IDUayX`ojh2Q`X*25mc; zUZ3`IMron0GW5bUrN2+Z<-lF1WV~G1oVV-d)5W*RKDvfF?H5X}*wEm$L|w;4BA2ah z9hbc42h6Q#>%k9=SkL@C!l80GpFssa+ zF<8Iw$`s&8D3-0x5mYA?MOnly@=Eo*+w|qmXeGSsu)lFZ`xv3d=Lz zijjUkbaM74g>|Ru+^?MAo;L)Dc}s|-PFE4dG#|9`&zd=8tAHil215rMlY}O_Q4tYF-!A&aFZk|obEU~c$Uqa= z$P=LAc&V)`fX*{ms9EF$TF{^hl@oE@PZMlJbEVARZ=}u`2E!h|JUs7 zYze$VRP(t98jeu{1Q>OITJFN`_u8}s5Tag9C_bIeW>Azvm>2r$7Th8`^UlviYVuSk zIgmU#98m5{8?p^;iE(_SN+KW226{NUB?zO8)JpWt_Z=-(T=aX^xch0 z>18N6dp4}nBIP*T>)6vmyD^>4E)%lxY-1ufwD06{sB^_6-um}8R2Iu(J-bwWydks7 z=ROnjv)oyHB;wtIR&z=l$9pY>xMXtWDmH^BGB$LEpnP~gStb?M5i;WN@wnz)#tzX4A3N_;c zugTm5KCb?%xmNet6gf}VK&;k{^>Kcwa-%hZ)_JwXtt{b`M-ilRrXFjax~U zoQsI^UHUnprz3x1unH4(Dsaay)706kmM;Kn`#Qbg17)8KYsba{!~1KqOj^>2cEkpI zn9h%X{QXi;KA=kBQ!91HX0OV$_r~<> z3Y62taP-&L7S91qM)l>(x3w^=#{sqg&=BjY0McBFYxi;GrrQWT4tfU2U)@}us(@uM z`d=iGk!m|b%o!w#VRdCNOAZ22RZs{4B!g@~l_)Z7_QAP14-bA-vaGapz*1jI(dDbx z^8puu0+<$#i3<27>)*;uD8zjl;5Go!AUH9wmec&SQu@ZXD7mE0;efU}A@Er*Y@u(q zG!kJ&l#eS!E%eNsZ;_JM*q&(RqmEzjJIy%uHNg`3^#pYEZ_>$7cbZUHFJ*#W zUlT;&`W25h{tid?$0Fe(=pNz~2R*3my0EQKR4zLu)$-6IwL>@j#OQ&;61O8}`vHRL zkFbET(2X32ZM+kyPfxl@KqV|Um#?KYKT&yPJqC$Dy&b$=62^HtORT9Y12^Seh6ed` z@0wIdm7x}|?JiL#@2pQKg({xiS7p{HK{!wK+l>qUV+nru#{kI~&|GKHXP{322U)!J zY*QmB-K@!2Ar^hfs(?o@Qf?*>mSWI5)5~7`2)G_595H_b62y8ZV;bLxd4Foj{yK)z zI(5opy~JZ7LWYQ%8YM&R{<3;ztzl(e9}s1L`W%bz~?|3oJ5Xbt%u}l)hyz9Va6gFR+PkR0J2FJht_R$FUsmsPh4ge`O z0HX!RRR*g{2|#LDM5mkZp%v!69CG&9G!5%3(twbachS@)C=vlr;63SG8>^UgO`GfR zsb9?HqIz~5WkT{~Sc&=b@Ou)s;@$1NtG5nY#bQS*ur{-M~hYYg|)v zO0+G@iRqyx50O?S>;KMj#vIe}se znY^(0iY~XvTSGT>(~Dd|Kfr8UP8YC}M^Gud6PeS16`hYGp{VRG ztmdUbf#aw899tn-<9d^xu7iW|jR?D7^G`PG`&H`64BQuq@veO6vnYC@GCy zoXuV&bYGVYAQ;@77K$>B&&e5Qy!sd5w1cd~7-8rrrw9VUi26>MrjyN)AOrahmq`Tj z;BiawOQCr$udV`zs3d1G5nOtpRnsy4G$0Ur*d4cv>8-xj6`8WC%hF4&z{A;ZezCj2 zZMbpTd2-_({ldYVN2{nTS78x#TCQ)aW)iZyL0yznD*F z?U=qee-o5)^@f2y{J6zi?h3Hw23@G0sjdcFcC)mT)Abk^xNa#Tu~I-vXU5SIurfA+ zMzh7#qhDE5>GQ&DJr&ik9CpheGMu-RknHs4siC0qyI{oQLnJ?N8y8gUXOrtZdswc9 z-dt=7GKk4w(S!}RMk}@cs^zjAQ|mLlTIIFfbbpABStY_&wEdkEoE7tHquZEj%iFJR zCaV2Zf_}%F^>UNu2m%WWC2%0YF0XN$nnKj4-YVhW0=({0s|jPW$Q)<5BQ4xVf0B6w z(`d-``p~80h=2x`F7Oww%P7I2L`G^^1q{OZo(w6X7HF%kzteyPA>ZJkjObqM{-mkX40 zjC2_E&UHYIhYMK&4pRHTmMj-KB0%zzTtssJzOlA2*0^pd5l&!~23X0gKz{(*MNLg7 zn<6+er1RRh(iOl@o?XB122MWI)^xm9 z1P+zl$;Y87R!~r|g!lFQo~KV|Y&bf|M2P2vStw2bdA52?W6?|f&=GaGtc%~vz^C3okusgyBlt+bnGhV9pZEPzINdg&lB=e4iPvY!Ur6N1FbG9>v&g({2WM?nvNxiN{t36n*UCsBItkJIz7vwx#_7Np4 z4PAZcC|b>OFGz=5bq=A71k&d&y_zH_&2oSkI z7Gx1kL1*x@w{NBZ5sFmbk9Ia0Wq_%V;c;R8>~QYTXFpi-1nuUM)?ipalu0K>x}$Ch zpErDUnQGGWCMoDWPB`;Pzrg21YJ%cg&L2P9x|8dcklo`p;wciJsLi;XU4r2)!vW!#;@lHI*3>F_{whN{I(3a=NcN( zAeUa6@Bv*He#Zsb5`#uvfS#cA91@<|&4`n5M4H6r0QHh=C?#d--L}@&0)SfSR@o$E zq{bB&v$5;fY1|R}^$KN=OW5S3!1W^PRvE%Ue=^|Er~NiYQ{qjEh-=-ZC+LxuEle-o z=TNi4C)07Bpq_Df>xsQgbEv1((ca{h;9Y#iaFQk?XuZ zW~SG_?iH&kFg}$l6sbqnNv2Zh((8_?ou}O`$^5Ar9P?}ciX!#Bf`Q}9JZ#a6r0D>l zqS*U!rR3bDtgVXuyMj9Zq!l(YRQGI7o6wr7*zeN(j-ICVb(YE#*;cmke7wfSo}xL@ ze1l;qd_Iya6iZ$Be3)B<{`uv{a{zoTCn=uEA04Je_714#&0Z;8{!V%~e<17)-$Bdh zM2%_jguAx?8dyBrHE)it&~NMhG^QE$T+j3S@KgbS_ z6crWK2MA2zM5R?$oPh@fx5fbi=N>52Qxm%6n7^+)^9*O+HKpoYGP!e9 zlcI^r8L|np3bZ*8{D3UpJx^^EUoRE!UJVKwT`-)(KZ-5QcU;8EXV-?iR$r8s2$wgP zpR#JG;>~sea+5z$?P?-DZSrn=D-}U1)bnQ+*M$_UUpi$MvFtbZ;cA;IUors4f-F3^kvMh5}>eeSqt zbs#P7&FO{~Bz%^Qn#O!rTj7eK zjt?&$)bh=~4|-{O8{)?ewpp7?LW2mJES3O0(X@wzaJ-F>s;Weog`?N)!Qj z1KpZw9LB~Z5uogH!U`a#0l<4Mu&4(-;}#r%=qqiyFwt(Ud9-t2HZaxhKGB|+9L}QglRplq{@ngh zR!7&oxpAiuXzKr?MU9a+H_tr)_yvF?;`PiXfQ9E&R`QMJt4iXkn%dBA70^!t%BBZb zq<=^2OXrt`)X)C=Pw+3lo0p&b>jQ&(Tz}ZarAyz5|K|or*}1tBJ;2ca__@pbZ;g1z zt^c{#|M9T@e}n%#&;RS^AFbFPM#UTV$4j{6cT3{a%6&&5jp4WXHURsr^22Qt6hc4r z=eC#Jq(1%j72FqoTGvAU{|5h8&jD9U2_VZVCxHx}hnKf^(O9+2SPG|j?>74aLIFGlAan536O@o$GAs^+=JmB?!m1~i#Q z9C0-Ox>y5cCV!kf`=1Vd=@JtnN9E&bN1Eb~s=e}JDw{jrX)`hIFkz$>S;QJhrPdbS1-=~F4)Vr1*V8>0U{}oVRBy#%% z(tzSfA?i`NV04fPrX1kbk7Ka9nU-n8_JfF7S@`%gyiX3CN0V_%HK40s1r$hBVrgk9 zs3)?|&rXU!27x-pj#mJ}A{%4~Xah%4`XXATmB!cq70!sN)kNP60}?>e5nE zQ2=vW12oE!JL0G5pwy^CfOJ9whwtDx$!-u2Y+3tWpU9Ra$XNom#ZP~}!R01^j&9?jJU9=)JGm@(U0V*f3xW%O5{3 zW<0N)&QIMjXK++)+W>O5ud6FEh>QA2Te-$2diyH3;NI5p0r;Bmsq#I1(1Ve_x$$AJbd zeWuY9-W|&>?6mR$DECHdkru73tqFmDnoV@C-ylxLO2V!i-+di)TGTG5>{*$)E(EO@wa(SW{z8xpZp4kozZRNKJ5khlK0M%a=Up!?V^8C#V(F3L6T4arAl z=cW1RhCkc+JEPfQUV3AuQI90JRkH{6iEuXVMK1NH*&%9iQ#Rz|I5a_bvWL9}=<^Ntezx49Y`Ut{5b(#g6Rqz=G%aurWu`rc@IEvt-{yY**VeVU0>_<`SwG> zbYNFqFzI`skFN;kh;YZzLqO{!7C2Qc!5=5!K$;1^iUxTQ@OxZ|kDBzyyHyE4cn+z^RhxG#aFEM+iV0qG3jyik}&v)l$=E$sE{V=ZhcJ1-9haN#;3?PgsZ0U-9R z0}P87oD`6*SPsVX{y63HaV_(!(l|LZ7+b*aeeBemDmv*L{Qmu*-{q?yr+?V;3%D@+ zli%f__wTtw(g2n&9YU4?rhHJgWmz_z9q8dWqcDzg@TzeXKo|!vj?e`ZeXHP{!@#>x z;Hj%o3fQQCVL^%V}D`)afC>WJiEPy99V|QsAEsinBWa?d_i(&L(wV z$1#3-O)H((p}i?0m}g%fiHRA2H^CdHD!aP6stYDT)WS?b^-EulooFow)`ioE4FXBA zIC7n49-I^>!~yN7i3*Ew0o#deAle1( z$5EemGDYEZy~_2Q)poN6AOm)u23_&FwG?4z7R)c;R%C793o(ov(bTAxA-kcWp`O5BJiXNh!QwCC77I)VW9oL z%^?E{KY}|WA|lHE{{7Xo@5LE#`$XLE zn;G(0(=jv!IT7?v*IEYn79LmGwnY9v8r3xURnFa*y!le0vDF3E2gp_}6&=4nxJd7(NOd4j&(%Ck_n; zS<$jvn3rh-0@@Jlbp$9$aC54J^%t`KCr(3OfLX9oVE5?uW{K57)ddT zn=}Jv_*j8)wVjQ4tYPS~yB8(kW#ljii!-0@5WQxh+B|=>};KX^?K| zl5UXhZZ@0!&fBBs@jUPE{p0=T8{hcGx5p4vHut{owdR^@&TC%tT1ff^q>Kev`P~qU zoOSwRMCF7UpH(~uA5yeEU<6xqW+ppQ!1sfy5bm%udI@$B>-nySAdJ(Zd*SBIo3DWS zr^9LlYXI55gE#W@+o+w;OZl#}Qos$D;u@GeUNcR#~ektMwpiFpy zhQ`bk6E{9S&i!Kc`xTeM5A)5TTnNVmwu6Vsy>i|ah%#9ri?{?-i;fWpeS!(lm}!mV z%KxoL0XJVE`biPjb10a_ihxovO;ri08}9F{WE3wKNy4hP8U`dSWp?&?>CT8l`kEiN z!?rQf&oMSO=DLQAD)@@z6L=)az>`g118~G*cxPwFcGyiH^e1{Rs;FxxgW&-N*TsHK z?$)=OnyE8QxzF2g=MPzW#3)~YLL`(gd*a3YEnD0b&I54QtspB5R55hx(7qVejTaA~ zsrGt@74zpfF(1R#AC;<^PhJ;V-?CZki*ciIegq9G9Ql7tL4{~CJPmA|V?|hqd@1a~ zS9Ky+v>kO)L450QqI7s7#Ag(>H}EOAAAo*?>l#9ni+VYJ`VK&YV6~G)T9C`%ArLr6 zBk~0zvE*BCPeMJ2D?bp_j{vs_ZPH9gN}35Pec*@G`5~ALYjT0a8)?UF3FWFA^84A& z2;x59!B_AAc!5maRO8g)hX`Q@Wn@2wy`v?~V;SzSEctZ=>?sJX2mx+_ZWUTiy@1ln zXGG`c|3tHKd*m?BUAQ+dI|srG`|2ny?DGFqnLhfq@y{6OgVIYa`l-}o~q&A{Urm@a_6h>NN` z3o~=W)_`DlibCDfmnoTuJK>B<%~aZH|RyBJub(+;Uc`nz|>t(p>G@b|$pZoxjveEIS%fb>g=h>2m1u>uZQ zy}+Uf+rd6~+ZJ62J7h9Sx5K`g4u8%cv6loLgCgu9nn$mhtR-3$=69*Xk6yAXUYvL% z{SWi`6tWA@!!#Y8L=1R4@*qK@p@J;a;-#8^-2hbYst`*Rof*3;$QC5S31&FEms&maF%3b>qb18vH(w*UE@QGU3 z{b1#Eb%W%cgkd(~z=6l9s+m#+fW&@UiN*bU~`;-2%KRNUAzQ^tZ$!>F3(-p*P zAkW)#jYdK-D0kAS6zCo7uFa2@m&Z(}Cu>vNj%Ye0!A3L(rpUZXWO+{@39g4v#(on4 z(h3uRdNmKp)75pB+lWROq@i1op^?pl#tsrM%TG@Hkqf*UfA~50ffYiUMV^I;o&EdJ zRYGJR1Syw!j*o9jK=@duiv<#ih!jM-De;vzU+|`=OMrV7X}{zMX(n?Dvnt7 z(?bve7$s+LH*9No=h~?F%_}CF1;P6Bb^M*9Jj4v!KSTs~y&9u&io0YBY7W}bvevC779idGE_xc$a5%z%d zdSH5h+=O5jG(;{xAT&j&4C*}qJYR&Ji?!Hzq~tN;zq=qX>MF6bx|7BPs&^@{AA~g( zf#-UGAfZjvAiVkV>^c?100_ih=-3XM-7UzDAoW4?(R%wmimVrGDvz)O`5h{DZJ_BR z9WA1PECWa(ny>$HkLJE0u|gL%dtIO6Worj!ULQ;||nC%02dd&dcV+sli!mT1oarf~N9Aw(k7qnB95ap_1-8|K; zT1vBI2veY96$Hyw`(Ma}1lbYV3&pQq;aJag)TsA>9}#kO0=tg$qU576L^M2~J&Ol} zIlC3h51A@7w;<9FFg#}*!yz`>JMS02xM*xO7h3?-G^HFpX=v-bB;xo8woRU3r3_8s zpE$FnFSqH?BVmVa?qCh1o*^A2^cZ&XPO$ecKrKWqTK`i(0r1rlqGWoACT>H<&C><$b@NKctgXfa?CIgyr#1O zTR>Jm5<@~^sIV|?b=6$Ms)YxPWEYKI_?()~3F#@Kp9Kn3j+OauPHHz^HEa6xa}rx4 zID$Ry!CtZjsK`82rh6gD-iT~5ge?YKhaY4-g@5kxtmom$Lf{TlOa{L~f&`CiqeJ>!71d?YRfWfrTzl08Ad>^h;(DJ()Tk49S96+iAbwi{% zDn-3;NVEvTc8F=4b^rMskkNf@sg;2H;UP>FlwgE>M`;|82D$bS{J~uc!MR`rSHg2}V`gctHo5)Aj-B-mEOR9gD{Iz$d* zuGD^Jp;V3(mcrCiYfFjHWshPq;k%sl#4YE2`kNtA(HuR9J7n(HfTCWB` zkHzC0y%fX=gKithE9+DHNrES_x=z4YclJIc6gd?EJ&cV zgVqJC2nsGe&z~~5l9ulr?t4)nOYk1}lpcuqHmmU1W?e`%VjBn0=VBfZ^30b0H-frx|9ZPIK-A~7ozI!1W8h4$FrYLNkS5DKu~`&Q$Z3G@pDM5 zg`_AepF!^h;VA#h7z=Jj`5&#JU)4Kt^DXYBVEM&AY%vJvNR|u%z{7chQR&vyLa!dv|Uw%jnjJed!;%r0-ZvV z$CBD@Xc~9}d$dP4VZC|Yy5PI*7E^{kr`meh+;7^(nzN01)2ze%5{PWVDlaN zI!;FNTz@CTZ&lTMM<5Od+0^$)p##$4CXgM5 zxF=k)aCdk2IS6np&&wD2V{iwc>Ra+!QI|=(DtDkMgPqYYDS_o4nMBmrX8+L-nuKjS!UTsp8$6YK0iE0Ip+2WYf!V3 zyJi5+_4n4D(d0Ch00Z;E!=qDpXXvS^og(0MNDKkiGp!stIWZB3w9JhC&C7%0w~f&3W}=`FIR`Wf$b4VNAdu3hcKcg z^Ti`1^GlSbMAkDT39AJG>IT-}?|{OfqMD`S0zejDwb3lZ^OU9qw-5*jK`g<`3@t1K zhlZxWCoI4=+=CRqfV|L@$*lq2{slOyIe23aY+k7VjP333Q+WRoOW=IaS_2xx(OcVc z(jPb;Oq?2v57}-d3?bgOIBr2laEty9S%Zl!?wwDJKIR|uUp-96379C+O)s_*)$)nR zp&_q+RC9^@%9B3<=~v^*1w8N1B+Hc7e3-jZR7Gjx30VN4E9BZCtOu!6?JrC zgM3Z}{4KN*J3UA$hfX@)yFBL?Bo>HBKa&?kWezeL}zS`!38Z#t|RN zK2iQL={R!P5_C`K#K(wrF2r1rb{Hj$E)H84wD&92&_1^GUuC1!`sKRea|RH#4?`e_#-dHxjCn( zbA*;zz54!fiPTu2gE|!}AlD^#iOOOuae6m-c2ynabA|GO#me=~^Ar-_=auB2%s5a< ze(rwwBr_gq#Nf6Zje3*#`i%LAU6cYf6A>q2s!|o(g@*{3(*2M2zo1#!kEa1J~IP za#G{qBcssxLYD@s`Fo#+@V8aMTY_+<74q{Fe49v8+qNt(neDhwsVx! zV1xhvDSjI1|0|BE_;tuctT8l;gn%qB6sV~qMNyi7tOKyS{YKi)XAtLWzh1b>*Z#8R z{H9@=IwbCSr(`_?bcn6JhT5LuJYphG(zG|m`_lodLm0k|QNI)8s4h{|! zk(~cp-7`I4!9n!|^xT!9x&WcRbLqxas=n*;fD=-TvXH1U_vP0 zLLDa+_RY`gh1pjXlPlN;ad2MB!Ko@pj*38}5T0A_ZyNL)Pp<#Hf+UWm13O%fy~eKe zZMnhgruzOIeI<{*whAHNC!s{}vn0oib|jp-GHbr<{|&O<3q zJPj$c%S4188VHiVb9dG&KQI1*)DoVR&^$Er5z9d~K2F7n)r}4R8Xb;bNv=7eMd>l1 zJSlW7TsrRgs_8DM-v6o>zB-DG3Oszvue476uKEg~DODG?>~W9K{@|2PkC@3Q@o{XX zKeoTQ0m7e}vz={{j~5)!4!=sVq`~>9Bo3^DH@VZkU5u}M@MrS5%Kx+pJ3~Rvm1JcuI3?RcPLqfHNB!^rWL-^o$=X}btY5X z?rIQFgPHr!tAakGtM$ny^wupv};ejPh1`<&roZ+f?!5&fdzmFCWCk?vQFrjIXK#d;uVo6TgBu1 zd9iZyzpL_}cFljzx=O&pr36dR6Q6w6i^WUG6e;WlTTU6zb8eL{<4imdz!!Bmmukk zjk@!G9Vp+l$IiSn5O#25<0-hvq1w4t|Tw%G{lD})aO5EIfR1SgpQjIaPoioC(E zn-rX5?(W;gMf3ghCEsJ1&+PAU`7z$OGO9m2z3s9w-s~*V7+e7nhd4UiD3k{u(p0I8vIROR-%UK^i#h>hEzOEZ_A8M!$HV zBz%Ut`Sj(4+JCm+k94{}EpQ&mDw*DaI5|>r{GDR568*;jq+0*AE#v$cXUaJe2leBt z%*-kE`4HqHRIlmkKjJFZlT9+EgXvN8C#-wL(9O~~Hx1+&f*+tkpCJ`m2P)*QeSnoV z59K=EV0_kc`i^ZQnHh)8c~h(5^m*&chJveF6x~?r?kzsshhZGLOh4^O!S(M?__TWg zz2$8dt7KMh$^7z58P+g)J`AlZUlO~3-(_jlFa36=ymz2ZqB3WA%T?9dUIi`LDR87h z_YunypcL=C(9HsheoBdwmg=$_+o##s(ADXB?f)=#^&ZO zz(x?Y^gE^O2N;Ys2mvmH8KraNpJkmP_oNQV6En-RHG0e$cz{81PiG12H(W`nRvX~K z9O3YN*#FTtDrR0~%6B#&T$g%`Z25(qiRkpC*OCdQR@=lsSDeqC1Sj$^E!>O(6yRfM67&bTV zi<W)Cf=cfP~oSn1L!U( z?UP%P{`}pazb`snAHAJWnqJ};_y7l|CJ`8Nz{lZ+-WDKke38qCN>isJ(}kKf|7=tDzd?G)-8+m#K&-qx}%8RigSdT_a=*s#7N zdeSafYDr5}OxD&YeekW-c46rqE#aLpS&96RI>`Y(u}g2?T0BJT0U-ksg<4uhq&p*(FQEJdlP`c%<*8nKwG2{~X)AE#PAy zkPshQ-8IiJv++DZo6G-22ds=l;}B^^f8330&3h#a!|}=lvK{kxPS|O2<%v3f9m6Es zDqP!gw)3^IcO3Ry$vc$kSgaeEDRJh@lZW45a33pP8cK>f>1>+8LNmTiRT@S4DjUKo z5U70&qzvBt5`cN_p@9YJhF5-52$@Ji9maVaq|EXNiW`~TWpJY zAKy5g!%+`obupr@+3Rh*m-DQF$YiMCp<2G_^)3@QE@B{`tyk9hg8aI^)#B^0!MM$e ztwcS&>}jDF#YG~5V&U83Jn5-16usRaQTwb;Yy0PiI}P6OhH`Sv8AU5>e8=S(>y(xV za&q*dIBXOul3WO;n{&4)z;M{R&PH9%nD@9O7U+DhRb@q*UCGi zx0A3sZ&#&`?N*SLskZkE%x2HqGX{KjGgt=`;`CE*XukctJ+oo@#b%nSp{!1&Eye3; z$wHxGOV?qG6&Q6|_fF>-~g<7dvOX)#z6V{D>U;2luxs7Gs$Y^RP@E9ri zvm^yj@re%)dMcN=X>E^3<0;Ib%2Rij#NsKodlU6|=DMPew@{Br4C_yCMNumq%x+U` zH^}tPT+La(LvjAHc*FiGml>M6eO-33{HX2x5c5p=gI&Gcsv_^6;?cz}LV?xWRGFGxWZv!ZOs zW9}DZCYoUA-6oc|h>-~4QjHwk?1>egjblWa<}J@NWA!B^tb1>T^HsJfw89|gB_M$UAWTg|tu>HdfIe(D!Xp8=I23Y% zpWe}BkWVdg5Wg#4mMFk+U1EaAudOb{A^dqwI2YTA(scR4^iq}1y_La30qnF!YWjgN z#tj?Ae>hxztNP@$3b*#xc*Rs{xKQfvhSq2BmK`_;Ye%Ee9B~6IBQrpVwahV;$(@Nxn*%Wo8m%vJN@}xv_e|SR zgVZK!-0GJldJ{<*p08=u4j&qbciw;APMD(FMm~_`;6}v9c-6ux)_HWJxW{=lnc~7G z>CjQNGz(cqrt!Cz_J!LI9|_u|b@!1R`q>zE)Gyn=thelT(h0R$%-$h}wp$}HK0mr8 z$GwZ!cg`R8i^p)Z+ZQ+9O&gwiN&UXus16sSq|%YnR%cN0c9_S&j8S@T*>w~t#erN=+06n(7Y~KWt88rUZxWtD1Q+u%wuN5Bol4wH#w?>DK5Y`vS{6rc5D7V zaH-sA^juwGfvDS(Key@oxo6ugJ=J$e=G@~GX@d$&>)<|i^1}M;%Ne8}n-0~rw5CMz z7&j0q83`*lIXBwj+boX6=*n{Y$j`6?4vEbWKg(3-cMnJ6l?88~#+D@;6# z*ZZh0Yt%berKohu(fhrGBZtsTCQf5~Ma4}}?Neryi2&xia6EFro?8KMOW@!TYzH(x zD?*FEUi#nA=FQVoU8Es8$tU4a^>5)|Bkn@nD-IT}&bI`2 zKa}5hTbakx?|Z7{S0)`C|MkP7T(=3Xcp&9$l-tTfjh%sMtYMX&xDw6xh`!#gtSIK^ zHrE~8= zkiW5}^0uz_b9p&kTBkA_o3@@_0Ye!a0x~r6#n6P&P(lkOjt<;ZNZfA=i*XwgHBzzd zz{A2N2BHgidfC!UqO}sUG$@k8=4j=wkB?FivWTCS=rdgups&Nk*1JSMe~#y{KhjVx zi9%ZmoKMxI)*~3e*0@RdDehz)Ib(~G9q{hXCKXJ1z4t7AIUXfyfMp{kBJ$cWq;Iyr zBS^1ASs>MPICU5{ptW;I!_vuBY06{kCeqP|T@Fr1$wxc0D@teBEbJ*Obheua?Kz^*);_ywh05r9+36HDo+%IJ`N^=y^%l*KiI;lcJMSxI z;SSj_8Qd!(S(kM@hZDf7iwP5hwwJMJV$SEUzdB7_?xQup=x+ov&%Jh|{>XgwnM$F{ zvH`1jc)(_AoHFBs%YN}MYn%hIVS24~1ru{K);@K(zLO4au0q-bGGF#ac)G>248HS8 zTwX@a2~uffJu;$8scy0_EzS40%2~m?-p9aP5FJYq9&UrLI!cQY7enpe6KiSFnM6{* zioT50llcQHX=uwlna9nTswUKmt9?O|6YrI`sx23mcOMVfe81DU^|U)bG%PPDUMlj2 zf$5FABJs=JmeIFz1TW=`c2RvExo){*#EYLVIx?CR5ZcC7Z|7vGS**1^c~DN`+ROIx z6}72f4zvC!pZYQC)N2OiBy|+1t?h@|A{eH}@(RNH50>X-N=rqp-$pR{FK%OJzZvbl zkrKz}*2%MptO*%R|5uOjy-PWD|h=oGLv7l^s9B&xT zD?wTU47)PhvL1b(aPZgqAClhDq_MQ^P%|6s_*aV8XzM`!$VS>e|CKeoz6^9KLBta5pWI1l}$#~0J66|CwU*9FVym}j-E`84W2qOFPZx?gD)ruRLN4r5mL zejM+BWf3cGRD0vJOI>7?4R^d*AYD_&b1rcFnp*qz>|@&?4VuhmV&oqsq7zH2g5+A?VPgT$DpeP+LU2 zeyyk6ijDReBX!whqI_OlrDccL!^riQEEo7*n!loWaAW;2$ue5K`>RtZ3C+hBVz&2Q zK048nXmukGl{79IohJVD{?*Qvz4mf^Y0GTbHAr0NxiGT1vpl%Hr8IJqQ zX#K;;FYlCS=DaxKc%qC$@|+81MbyDFhcfv**>1_r5xvrHZd)7aS}Dj+T?=eO><;x-FuY-O zVoxB0xF36GW5_9PeBPmmM5l~er;Ox^_)bWAMyM$62`5K0z|wi=xy#cc2uvcmDeBft|~P z8~bw)q#s2K>P${*#km@)2e-9c$Ln}9d&Wd%+C>^2mErlX=_z0t}`&T35JZq_}N;j^clN*1);#1`-<#QD3N(_Sa1msc{g z?sC_<3hrm8ID13Ud0KkR5^t-9GZ>u+8m+>va;9N$&FR_zcC*LUa)A-WDW!ectNZi9 z&3olH)I3BUr@xEyyUHhj%&b%G#&(926EJpgq16eN_&)5>9hP<$nN9l_3s&hKEyd{UZIR+A+6t;1&4{Y-U5SReP@9@O$FYT-oWm4p<9?^( zyX>U>3b#W&2Y1-8QUhUR8(yA!#0o{4E$hq=gcB%c_~e~U8{4G!3cX?ufQuUn2zoRq zSVV`L7QKh+dlZ;w6dqg2NBt-}SLmbcDsv}3ndzud>o|c0vggkiXZF5>2-nbFyGjA7 zwQld_nv`jBnVYq&iB4z6_z}hvtw%S4zqbr1MLZr*+D9KMG)MD0oc3TmQX(_^ks}|w zhi;)U<6wL1xRrm<9w7St>qp1=%9~}Qp6gZ$$i11kZ*UC=na(D)Jbl&F_N1BbFxu@Q z`IF7}vW~GkD=2h6ik75_x|uWLa5G7i@YHd8EE@}pRFmjJpYXx%Yx%z<7P9XbNxel; zu?&;_m3eI^Y5Buu;g?Qf?v*pdLt_c1(6;#Qxz^*#KrItKa*9Ra5if#=VZ&-MFW>E+ z|2B2DPs{T(CS>Eu{k><7B}(L_0}(bT)M*b?LHM!C$b)!OYVWo4_S@9cT3Z*tmY78B zE)t@CvQ8v7I&Uj*3mU4#7U7V#r_uToXok1$&;*6Tw z8wwnmL*JyMd4{rSP_GAP9Q~Kf-cGo#WSaDfI1`Ihyj~Tt!L`XrTkhDLQ;2jA-Bb*! z+T2~?S}A7M$k5w6c-Eer+I@xdq@t2i7HBrZdAC4?pMFY%CQD08i?AKBs}%>{_1`bo zM+6JkOrJQh*T$y?ulrvr?%xSNM#aBMiG0EaE}uS#IPol9ZO}wbD&l{>{;aF(7$G@+ zt!j8(@>%wyyV2MU)X`PodE`K!1vJl80vS_mal=FF;0=(=Txi9{Cv-G{sOriiB-np- z{;}q%yK2dg!^w#8Uy1i6;=FsQYpeG_u8hToG^$(V&0x^P@mN9S)%vJsrt5-NUM#m@ zE(w==5&WfFIO*3+S%}FX=1ru(>riJ(Ss?B|fhpb9^d<=G$q-i<7yh<58{ZVd7bInF zz7)4j@zwy@<&btWRu52T6#e6OyczrRa zP4E7ihx@PBS+`-IwPDQ7vEhS7#?c@1RX}StImz$4(LG$?IH=?*{SJk_^y-AjlV-ur zcP}ydwYl$hMiR;o&t{YRef`*}NpSaBE}5hWbsTTSbEtDZVp+Sk4okKH`ju;f%MYdq z(Otd0#OLa&m#=Nm8Yj~0^30K);E=C3$eBt6#8217W$jX#+)*}!qXr*OqpWR02iXkWR9 zO7n8p$jCS1a!v+sB@S;4b_*q_Ly2O@c1dU}(d{*}3T*=yE=NcjJx^J2W zefP@Y@UwUy1uNr)otK;^r3%ARlgO;vE}YBmWSdlm1G+TW4bUZ$QQz1n1U0@qmb)c+;mbyEwxhkF9&7}W;9QfYk=MTx3ndz zT{gGt&Kfz4lMUvZ;C}jX5HL7DLfJ)HfTawd6^~pJ)%p>2V&{po5g&#ll{ewF9jTEl=zn&+4puDs^5)x>9AWFTGsJa$ z=*We_VeZV-S10l{@NdovZtOL3&24eoOrBWY8&Y{W!pT9;Q{$Oosl{A@H*{ENmW_Fp z74_{@>8eKAclHbP?TmY0E3TE>>wY(*`n{y!`y{u4gyV%*X`}kXhq*~Jn?f5G(^21y zS=^3%O~r}O_rPVZ(R?yvH1`S>J(w1HFB?ir0KFfLC#sWC+VcHK!2qkhR$I z9FCatOTN+J$N|gqw445vyXz;PrmBB9Y(&YKTz;F3d*w=8a{OY zrx~eCb=#-%Ug*Rew5u3|>3Su(l`5(a-c-4gS*4T1v39IN%bQUA_%VT%?M@5H@uPMX z9_X->w_jOjdl4L474fZJ`$09+ac;mcA|9Xf;!?cx@)cFTQpU$7eGKs^x2+HN2TB6@ z&)&IjQ6bsSgHMNRE9pFwRZXa)Ak3|azLMwT`lKv))lj9;v%%6rZl{Extt;S*BIE5` z`>V-L0)*<%va|(*15B>gQJTh@H9S19bv`Mpc16ml;+d-+|bceAHoNtBG$2H2gP zAwcaOqruFIc{RNKUcLWV#T@JWWSovc+C8a^Mu3PQx$q_PVASDBQu>Y~I&6;qb6R}? zrw%gN=V;oFjLg&z)`h&BvfUksH?zd%{77~PjntAUwX=UcnB@HWdh-M$E;R%7w%^r4 z)^p2;qk@l(v$ZsL_Z7=lS(-Pfe~h7)BvLS7Eb7KF2{O8c8E**bN^ zFCKf+y5n3;Fr7$HbF$)n>5$fq=&kqX58h+>-xC~LDRp|+2M57VYVKBDQmZJyrj+S<&f&m8@fZM7v->xlyAW_fgL<7`KfY*~Sz=zg(D3yL4ui$k=cL^R{9|X?D2$86GmMvT`Umaz%>?JJsXY}u zznNLKcxGAlOxI-kIA@;D{IiBns;V^Vym)I{Z zM+-vRbOQ(wJcfe-+?{}OQ4d|jxayzuy&3|$hE;LCJ;&-DJAAenJbY)+$T`f6xklUN z5#8-c^LO=G@A)OJrflpRNS-7;5@CBWe^~xQwccfWevYM;%7^mem_*YXo|X4-dgSMA zUv#d0a+5E{m*nOc6{M-+O^djZ5v>x;`(JC+^`hTR^$~iPT*9(%agsPXaG`niM-Ci!=U1|g znw*-8UlNh%tl4>!vabw=_iy>1h%kIah53R)o&1W?gA&(JOY5ak?aCVe%m{vx(Vm@4 zYbKc1?lnHjv-@h!dd^3z%sdHpaW@JMbJl2Z$!oQ$h6QccBPm&R{7!g->CyV`Sg705 zgljp5q_*+2e&qH|RVi<7YVjy+79rGmkBR8>b_yp=x}-(Ez_(^t)>-J&G8$EP&!6Sv z7e>O67i--t8l~5-uQEP$ep|ARS6?&Wc6|l@sl+Y%rGp?=w-d- zD2?yE>FbB6ryZlyn%Lcg;PvkA_bZZL6-u0rGYAwkZ(RCnz0^vf{$hU0>a!(VO!<~% z8{3n$>EVM4D{}9Eug99`e_pv^I9T^&3V!kdZXXFz2Ck%K&s$k(vifaZ`LcDZhh^(- zRmm%r57zm(g~=L1=LyrWuO*+Yp}y^tBRsI+luycm?WkRFdCpZFjbDNC{W41FQ&v95>F-E)GPG?mdcEi5y(DZO0O{WM5SF7AG>5CW zBKz%BPl=kCdaCz7U~8V@jv@{lry$vsaj%YVJy1Un>P)a$t*|US!!_yHP zue?XhrgLkf$(TC*$CX#tR&)mE6V#p7=olIcQ#r6a!G!xjas7?0L1>zyEd z|3@s~?jCWy>uo#vU12hD)(^9!goFatZ{H)|f4+QU;0YOf_e0O8o|Nr zwH_Gl@#|01qqcwE+@4TABhaYg%=-J!Q(;l9GwehHYLm06i*$^E83$hiG90HU70^sr z7D!9key*mx@(B#{MYM&@)>rDcPjJx>=UkDRGfPM^>6uYoU2K~pxRql^PD;i|>U(*M54FK}(EA?C$pG{GAPPEHD@PAnPK}8Cp7sYP z^K9WxBEl-a+MadvzGdg)pf}WN73~x>c8 zf=pjZBF=GYCid4K&YpGbZk_1*YXZl=u1o(Oko!Rcg4qgxI|r+Why8(~ecRrIr`hcj z4sD{I@)8ixxz|5q%5%`brR?S?UZ=y|Z z6v{V2y^nXXQT?#G`LJf`*cmw@a#J9;P0qGsCm}$4%i-bth5}6XAD;xk%JFsVs+*|! z<}8ZSV*KvY^xqZ|anf+5{?=Y0v*CTWyW9M`iK_ zxu~-c@1R}DMyp^69A0oRMu7UkhT8M6;Di;OGop34PU?*v3dtQhds@!pXLZ*kFlS9i%RR5l(!WK?Qw=?sI>@^q zk{garUqnY|n8fhVS1^fm^$Yd*se7-Xj!jtNdoFa1VwtTh+dK#RqqCf=C*a@>^_OEV zwa*9@pwo5&WM4}_Dk4J2&d;9_Epr0;;5xDUTG#~?CIj0xkHt=#9lFBQR7-`hYk&7W z_`TFP54uKTuv_Jay~w-uu_&fN!TU@VvjTehr$@wtPaVJN7&2@ie43v8sEy_Ele)(- zbUnv#+%A(PNB@-!2K|1lZCv<50-In~>*Wb`Csw&vzQ4!d_jk{S)uML?PDKAW`UJaj zj@;^gPvB0L!;UF3fv3`RM?A?Dzn&et0Ztq`-%R$&-~YJs%eMdd<~zoIaOA#%db4$f z+Tg(^O{4l@{QO@Y?A8L!ZXRx}fM+9e7^0MKw)Q2o3Tcs4d)KVvJmDXULn)t1U zUazYC*gdY`MgARo+fK*Taq{BaO+i%ZQ z>t_MIt~U&%s6^cQURDeBxO&c2=+o%gI%&b7TtO1kPHfiA*?5n>76!T~$ori$jki2h zJwr3l;^eC7rI`|4xr7eRno4nrXuCGVHDMDa8Cl`mEUC*Gsa;zC=<8(+Df8(HV}EN( z)_)St|7}pu4xGXzYcQAp-_7A{o-g^8f1K#=TfSRM_kY+`i2k=x{c-_z)Bk=wX0CYf z&&7WKCHsH)eD`Mw5XSuXt3d*G%Kv`(1^(Y`fBz5Ns3D?2BSS%~YYd`NOwjWl!$JNE z^yhxngnLyYt4?56#CC4uwz5bo!dPrio)TpqNR`#mB*zn_#9CX0;m^eV5#g6*8F zeksiHpLpWuAm~${Ad7rm>qS`yQZFh$FZ&S<#E#E@{cGraxgw5ww%$B;sqFsUyU#L_ z_c!=cm?-`EX_Hck|2N~^%1)687n8x&K>c}@yRGVH*l$#OQu4Kiz~pl-zQFuz3I(kG zZ|7gY1eVCRjW*FX7*Ef~|C)PF38tUcB;x_%Q;|JA%7<;(J<;5+U)SLfLrTVfeX{Sv zn?Fs5bNmougz+78XUBoFDC%H>rnBQ;PcXTu@N+eI7DQtf^$&Vc7!}n46s8xomJ^L( z`A=N&^Os~qe=di5ci>);`abv;a`q-2kv3>fg2VswCE)e?|v z-PULN9i;r$1B++ z+u*vuzOW(l^orD;kd=sm)CEG{vsp|rH}Q;u0v^5y9u2v6xZ*G+GTtK31|l2cUg))wH-x2QJ#n?myE;lpVEP`n;I3ys=2X`7 z=g9+}-OhEL7p5D8N3O`4C2(DrjTJO1ST8~hBdgE2k74jfMs14g?M9KJJf{~Y>@Tg3 z%XA4lJQUVo^dx>Z|9GX=G=D-9b>nT8&fV?2Y&~&udivIpi8}aS7Ume^1`5?}{LM!c z()O#Gl%r8$k+nj^4Vaf{!JUOF`^1A7sjv`GD^7Q=M~J-fquDo=Zt*%h{(lv4wzkXHqJVfs!^1pHiolo4 zx=(D0g^_;4&|rj|j!2xK|HHj2$8C8Pu-_;R<~WbS=bEj{*;siPeDc?=H6G%vP;M9^ z`cGi|)9mZNE4(6HMJis{9qb_uNOk`B_Fkxn+V>%76IR?}^I2@p_X$6hCb~vW+}YO{ zcKTiQacXmI4!x+BD-LrvPRYqk38iw7@>hBI@J@`9jk4y+|dk|({5;yN@0u-owJ_w4_vlM7~%|3RMHvM$1)SoVj&^LzfSIVy+j#Pc0u#ZeU zP*5ON0Tn3=KaR2Aq3mRiEB2gVRLhUBxvKI`f_q!XqtdGMBEB=p`B-V=Wj!`k7j)cZ z29UT)H$sLJOs6dIwOkM9LZw5Rhb!jXeALcZs9|$k=bFuDw)9H9~bN0d^@t(Qcm+Y zU18UHT+!WIPdh%*XLJ`vwDeX!WG1=1>j{G`T8kzNEcN7uKBlB;0r=R$wE!NSlz=fx2ly7?mVZo>)`m%zLmamU|fM zFSlO&<(!0QvHM_VSbIOB815urs5T70ZFPZ<_Aar- zE=jn5#|b4C>}~p1D}@=EsifNFnBkBiHh%}#$*t?%H`{y}b>3~~W^WnL2->*K3Q&oY zo91OKE4E&*k?Lc0Q+E)8{eW*c&m` zPXCLpw~A^r+P-+96e&eZajgKw-6gbGi@SSpcMDc%acPTtad!yrR=l`ta1Rh%?$>k9 z|Bi7V?h8+35Rz~2z4lsj{^k%#v><`d8dzTc9r-g6_*tEx_M;m5bf%nKT7O$4+N3tN^cEww{0hc8ZiM2Yq$BgxVu>AFk*53+AokSL4nKXlNy*K|)%_}gJh7ch^ z4eG2#8-hZiwLYW}9HQpJYNTa@4DNfR;kZN6{!i1T6LIKroGB6u89nH8ZCYR87m}X& zNa(G$bKtbIi)L4Cu4&bP2b%`FhYso!+zP4|!zgrO?H$O?R*;3Wuq(~;AF7gx#-JC zjyrzLqo$`OF`o|4z)(fOHTg;N=vEge&z98$etr2NXy#Zk6^hm(2TBAa^}XsFXdgs2lo!(3=xvdEQy|7pOX5$UdSb6-MF7( z-7)}lEG>VmKDJ-%Rm)Jh%)XCqoRaZ-EA!#6Oi=&#^Clh?VW{-=5=UHc-eccT&hQ>k zjJTZ$&>~3CL#Y&|V)D%5^z=Re8G70=L@u!8S@k=pHh zDrm)hp74EZ;uppQV0T;5Jt9`o=etHR%gRhMv0jSy!n_I+ljZM5hf3f;P&?3SsNe<0 zHap?av`)!)y0|&6JnBZ722w|Y{xXjE@FrjbIZroxt*7*vBh{GmiTUt!XL?!U$0xHhGc>)GZPfLD~900r=# z1^|4dmYah(Oro%;XdMX3nwO&=q7Ol5VDx*>lMgw$f2cBN?EAG;Xy6p54`v;GbkOB6sxciACC;?P;-l3`vB`=GBlRc)#9GwZ&^l~OVNZ>6D$$n~)z3+P|< zV>z(1{>PJkXjWYn>6^yv8a6roA5G`X-16oK$*cNTKJ>maPn2Qzo8)n+BEIKyHn(Jy z&kSY`(H*PKbXluGZ3l)IBN}8xO$<(#N3)*3SL0xVfnDvBV8-9Rg~*>6*sICg6iqIcxbG2Ki};70Ju;gcZ@+nbe8IC=1C)6Zr>k4$H&Me3K zmwsg9kW*IX9QWU5r0_|{<@l+?;noY=|JKLmO}8uu7;R>mq!(9M&6f1ovqZ{O)gmX} zD5pEfazck)2KIU;DvJ5j<50%CI^hZBk%EdZSRpAf66pi=oxI&`JaNklDGvrsAo@?9 zOlJby0mtTy#Vt2SlRrg0u=3Um+s9dF^mcmcYU9_$6c~Y>R6|{<*625~07I9-AZtZu zVn6G#j%H_65cU*xw;RJnEj?9kJ$;PZ38Ia|__L&7F+;M!$L{E_r35`Iwt2THC20N% z$bmLPh4w?>M}jvx4sE}})kWWW9j9iw3l1?{c%stQZ)>u>d-Pt?oZc8a=cn4U8Thg4 zPjRXOZ`9FQrTFuL&FgSxQ(qxRy%vH^NZgF(r|35)-5r>%ihIc&dR&@iqHZ2j`+zDJ zV=EH5(GHVgoN*)N;*|868a{5%_?BVN6FED`~oD_#Zgl6?UDF0CnIfVYv#EMEv0DR~18c@J} z<$!!9T=FEhr-6D=mI?Uvc-UV|pfsFld1H#(^{(1G8^E%RU4Hz@ECnt!mGV%!mCD6I z@;+^(6}T!_bJL0OY}nPLABMEYlnQ5SsCc#=+U}%5c31_)W`f@9KN|RpK|d^}gp4-RZUErN4JEyTw1tu5y2w z>A9T;{;JQKxZIb=Wg%I^$GDh_(V6y#vRa@MT2jayNiSf3s)W_U3;mlx^dau;e!*q< zTSg7&gxBwid*&*P&h)awhgPTHZyth0nXv3zxWXg&Q}%Fwf9Ro2f#1iUR>>Ag(6s#2 zGO-`QTW~>Mnt^&Z5$xKNh|7dB1e=JfsDbSKbi@QzxwdG^hecB+*tIF8+$y@hkPghv z){k&&Ug!Ot%iHjiQO_}o6|vCI)zOHVD{DPs%?ko81mvDcH1;xJ{~>OWdhsML`@O*x zL}lO6)ldkViugD_{oQE*qpEFtVt9nUr$!xt@KU{@Ep=%73&Zx`p}&M4k`EFb^zZ+~ zt=L7?aFovv;+U% zmZPHZzevXz-@n1)5#o$4zkv0W;yxC}q|76{xYhE``PBaYwUZkuKQAY&mCmVUyZS5m z0~(pmvovq)EPTmi)?7nm7I#^(5&>Ft<4k>GlNFQI2)fT$Cqk&YY-RhEOXuiKDUP_o zGteishTu@oYMgE|Z*Tle?QO2*kk3FK_9*M1+aIb-*8bN_!hoAEMcagJK7Ji{|HD^J30D2Q_8x7r{}gv^DL#`ULUiJ zt=m^~KZ2pa@Jx^M+HH^vqh}bIjVI=^#tnH0`*qD|E60w%m8kdxMaY`&!qKsaol zmOBe9;hR3bwvRS8OaLq~4+@>(ANZT1G`ayi$iyoJ)%QO{mImLEgN=%^tw-1VN3MkZ zz#B*T&lqQKUL2p$RWgYgY4WZA&fE@v8C8RHdO$sM-sN8b7N_~lPeGR^ICGcbUEWd` zkD(!>Sv%eeO42e9IDYtekK0s*tbe6R@(i`<`tj`AXDh~?;5d4W0OR?K_U&#+tcaOT z07jXiYd^VifyTtW54&%Mh@BcSE!8KDyyt^@%EX!GlV=h}8Ui*12gGyg(tB}?EzQrK zyNh(6eL_MX;-fuB@gye7Efu#PpsCzI-MSd%@#1xfK=P?I_hKqLSU0Pc>JV&CKIZ#% z_g>7=A1`w@Xqj6JCZcm7Y|Bts0IOqP9^mxPNrmS1BDpSS8iJ*rmSJkzuqdMG2Ob@g z_p38?2FlWlRNdZ7``;|->#ijut8x~jUkCi&%WK?5gYvsZlry!_*m-EMXB)PE246(1@@C71ht2cbs$-bOg%s_?v zlNXnd?9!vnt;#5i@1cQFZiiFjr$L6e3(aTtAJ=WaW40!dvTOZT z=L+#0(yVsTZ0*T1Pg2?NuqKbnHgZzQPcMZIP5ppexUpD8(E-4_+X;uk!=3N1omMHxVpC=>x`erElob{XzT}Krvp#JC8>1HpaU? zIPQD7ZQ2|(SkRz#qZ@83CStp_54gZheT;f=f_c#-e=47wLJG3K&x$WVT zlvo5S)VYwu5MFx_1NJYiz^fonR5ZOdoh(D)2<#HV6C&E}diC2mXa)5b!QGCJRks+A5punRZx z?8t)5?^k%jGMw+K{3#K1;-87sxM2{MrK?4E&3g}N$B@LP8@eq{t%ABVgWtwpoJQTj zHdZe8b22lrb^=fyrJ;IfRDO&3^7WPktmOx3qD$xf88de z;QAd7t+6$Z@LH>A{dF59`mA+}yrVrYd0%^br+=+D92tGjlpjA^fI|(E;`c0Hx{V4t z>F965QqWW+yD}6K5r)|ASH1LpWwKna!!?sx6wiHv@#@&uouu9Go8kGozH&yZ z@EIr2dd4faDaOAY`Oo~znU|aZsB|7ro<_&~F1dgctP0uRzy~gg^eGDui1k|I%Dlk> zAhxiFtGg4K`=|TNr*0tgXVe08r%mO1p$vv1)h{`}T)myC50%Pc)~O8zA`(aVU-;h) z8EQdoNrAY8?aZf}Oe;IPUUROeae=t-MCJmLUG1k(tKZ_gMdpIY(orE3osR*a6$RMC z#}1hrFeUdW!)Jf436+4m&i!F@=(khUq3fbqajU)(8GIcw-B7x|tkuefxl@hw?}6dw zf>a-(&@p{%^jEuQQ6OF%n2tOy7Nj~Ebo`F`fM<3B8AJG!FAdzt!sgv^QLvjpebQFy zf;Wm8nTyCb3LN_TTKh=k-eI?b8B+2Z5H;?vJ)doX*x-4p%eGhkkuC@d5AU0(`ZY)O z4NmNtP&^X`yA{SX0+nC=x(wy>p&o6tD|2uOswhTSMA4Hnp$3hozg+^l0v%4h<4KMP zdtOSWx5(?v)L(onGH^s{hmu2oc<4AGptIJt1<0cyNl4ZIZJNAnUVB=?z4?HBmjMfZbXZCG! z13y(2rNC-CL!#hUsP&I=*b%bwE5mh0sTyCF^MP&+uIlhJkUWIlvD#-bf}73Nyv5DF zs@ZC3e`GhWY-ikU9?~+@CAd1o)(Qkm=u{yQ*&vUGOZhDBJCf0(d3S|zh1OA=$dYRv zI~%1VX_d}lE_2(TEAx%;6XTtZ&yO_`vMr0P@leZ0Ec4S_ynZj|xaRjar2Z|}ey&ab zOS;(sKdoi%qqE7Dx$m&0>nlY4L(px8@hctw)Gxou+3PS*GH57EL>dYRjcVwiM|$Kq zi6Qx!=zA+_Bec| zYlXtreAyl5VXY52^0!vvysqE&f-Vr$09!Vt+6^MDX>ZE!x`Q29D!B;3qrBaNpwtT{hf ztV1^b%R7UMFhVcXr2-d~heO%gW&JQ0nfsQ(=S(=W&Moc3%#Z-B)EMF8Ca%b$rq>9k z6HR$|uaZTzKjX=Y*jk;0k5$Cw_65oz-hzyn25)V}lT|J*S9 z3=inwW3lvO=kQli&!93XSIg(>tonud)t?tL|U|1=P)jbd1dj$J% zOp~KL_uyVnhV_{1uq|=Bdln6UC}j2V!h#4j{gd2oD=oC{#8F16#q(Dp5-Oqst?Uy( zWWN`6;~!fvHriWHC?zU}TeGh<)smjSy&_=uB|FG+(;)atIpX>2>P)Lr!Aa#N0CyF0 zQpeIj5vMH_gy7@r39^RrZa`C4$C`=w)*&O(z5@b%OZdDC`^tV&_k6Mc<|Kc#t<}&; z{^ia20Gro#(?NMoRn|x$+vPFjRjzVlfu@}GdCI;vc9&=8Hc4+H5DbJUO zN}6|x^$!&bq$&-L`9Hcm-ef)jFYpQm21aalkH%b$zf#T3ya={Dg>cIbke8O+!P|`ByVM zcQ2pm$~T`X?l{=IL3=$rW#(VW)kZRA2S=Q?3U}0pDk|GTn2wOR9I~CAy)(*6U-606 zC(?6KX%HGI3SW5{H^*46IFx=pn6Y*EtL><77={|!x|Fg)@T)%;XDh>eG$FDwzZcJhsy$XmU2zRro}Z343MKPjH+jmKYjDmw4kNMO zsL0qM8GEY)u6RT1`^ZnX{TX0dvelz0Lwev1n)}U6Da{+O~< ztz6&CPoDCmqb7Usy;4PNtF{9Nz49Z>e!+V9S#2b9Ljv;v#Tn0EGf_1g4UJ;IDLJnM z7i4!_d0BZV=&wM>2`OFwqL*b|)l5$a8^W%l-^}(0XQhIn0&IkCkuR_sVZNa^E~OfM zH=-r|4!aMM-6fdf;FdJi{GO>?XyfBekq>2C3wa|DxZ%+8y}s}JyYZyx-f*zOyXoYg zm&r0@H*m_{?S2Xky;TF!_7hoxoSLCS1sHdRkivB3)GFSAh6S3xN<$j217S3UWLG2W zzW+kfE;g5X2J#hy4D~g~P{i;Ry9RN|@z7FtkKnw}U$J}+jl@qN>s*m{^D;L%Y*p*q zdi|_nYmb7liP{SCrLZ?CTx7j0aiE2dt$V1f1A(w@Y+*l_ZSJZOkcrI-NZ+K#B+UCM zn~$n;5u)Ri+uq@_F~)G-D`0NA_~gs{U0sccR26;uB9S005rxcYho4W&f(NczbC(b# zf>qdXv>_t9hUs8Jy5Ql!(=vnupTeUS0a2)G z+V|T2I?N?{H@h~n>q`g9LYRw3BZ&XAxPh;--7sTbTrUvpwDFxSXjFs_vB~zsDVVV( za${H=l-~PM{ks#B)Ev-DeGsB-PyUbkVblPyo$Z1iVW5Va?bI3^a6*L`4N?aX=yS8Q zBib*pgLTjH-m0`?DN+w1FbjrQkRe8gM7XN~SI=DQY(3fi(M zc0pH(Bnr`Obo*lNK=*HA(lmF$7?0Oo&*CyPln5EFXe^>@P6A`yFVB;FkoUbOfyC@&yyWBCi0<00HhAsQB+#Vkt37-5k-8!uuxQ|VzsbcXe z4G-dN^Vs@wUYNP!Ph&`x9@pY~B|3>+7O)O0xQx}U%Gh_if=PBhsFeFsO2}k*jP~`k zO5%*|A=iuFVdG6K zCtQA+B8VW&B++!lU$?8Xw07i<&a~=bozEY}EX?k7_t+bkB6)L)skXMb|F1(D5w(Dn z1Hh}`1)zr)0bO06{;|;g0FN2YwF?l0{0yLM@4(9(d?O4W_BBfl+cSn(n}<)MlI`1W z%>U&P`rS@us;H}*4HE_Zb8b8M-!1+VpDqDhd1k$aAAlweIi$N2$S)A->Xdx(oo}5X z+#}*%$vG*EKwo%bitr@Lbl`6bA@}K1Z+JJCgR+8M}iMkU;?-*KCdv8d7#nfR!kK+-5d9EJn~Ao6>{wG&iZc$8DdwEZ(2N zyKg>ELzL?qkABtbzT{E+7ZGi|@H#;cH+EE{fAgz3h_0c~P&bV5lucKvxvH0)yaao3 zb19R7nqxF>=yh7y{>ps(%?}5fTS^l<>uP!y7u2V@)y9rrRb}RxelsS!4-e9JzT^uA zmd?3Y)vOsfD4;~_B{=LxQgcfvJ(87Je#}RgXGVF%LfAm?omaaMMSU|@O_uNW^bNL+ zU~h)k@yhuBc51V;M)A`}{^$gbzZu76cx+=bjOy=~-8&pQIkaZ2NTd z7p{cI%=E}!o-XYl?|4!xnD4xD^v%lbC{xqmbCC~hK${?lG+VdxLLGhmI<;lD#vl9G z#s>bK3)2H5D%jas3DM4Jo!s~$uR%WgrnQ}es>ghhhjaGV&z7f9$#)W$9vh1*(N~W; z*1+heu?6Gq*KqTP713Xsiaj~vxNL1nPJR8Cy@U6ox=66}d}QvbRtQ%GZKK4)luwZv z9`a;bpSM={U9q3do6!g^8)I`#$&M-64sCMWrMqC53^6yQ!HOvKpl89sfs*f41K$ zAm{FJo%*peiACVwB^sT|GeC>E56Jkrp6eIw1D1g=+0eMuG~Y(|&uhO|)Y*(sUlA5% zBaQ7!-5}l$bR91N_tH%JGzbQLliBSl`h?5>4==!r|B~v2iYuSAg)sc8_~H$?@ASn( zDJ_^ku+)U5)|QW<%q0RYOh$8`f_oC~OaXgS+n5nvQeve@O2z09g3|}7u1f0`yZuzt zu&*Vwig~T=r06H&73WV6bUu$`JdE2iIz4iCvedrG2`otl!qb}OEuR+h_$GbEx^E@l; z(D40HB4mK^?~bv&?hhZ6C~rTTiHNOHvNTfU0(0uvkUsxNgp0ns^9HxorUI8}P90LE z<6iCK=n$&hO#=t^thp+V$2ys0^%Vi zkHJ2Vv8Zuc0riJ`7!2CFZM@;*PrgZfcKcE!zn$jvb-~G!Q*m&?WAIbO7o!{X#BD6J zG4>;s*t>F?u*D2N1hpQH(Ex(r;0 zy(3Ou=4@&Pp~mL-ta|%+u|eehFX+-R&TmBnRKjZ_txc<%PfIWH zm!QI;5|hz{!V^Nnerq5lL_#+zU3>=L+!@krw}QSf}Y7Qb9hKL&#L_P?ooZSrtuo zylm_b=J+<*1PC=d`>AWoHVIg`fTl0kQq|x}oprr@iyRMJ{D zeEzyJz}&>))~c;0@>M}>wVyZq_F?gcr(%ocWSs?_DQ4vb`rpd**GHEdja^}jlVUbB zahi@Z+6O)OD?=Zs(G&H3iamU~D|UYBp4#%RQ1cf-pf5DrS)9aj>BrFae7;}XWxP{Q zAhHnJP>iYaIQ~omsH0#>170)hyKLbTmYBpT!<@fYzW4oJFPA-O1v_^`dXa+KgBOHO zQttDejmAj!>FNWE=yZ)|x2W!) zIj$`VMveD+Y5^YmNU}QaOZyv!l^bmZCHl9nqRRpF_*nCmtro6RW6CNqXfpr15=e~# zND%L*hc;@8e-xI~1imTzrad~*`}H;h-X(j!-d=Go+nIS_ojJQcCq`ad%h#3tO4Z`w zhws3rw7E`K!$*n!NBhzXGhnU{twntLRMd_&GxOC+e8C%OywABu((rl{*}=x9Of-2} zV;Ex43FZzBZ1ZT*v>1~Y z75V0(ZH6{JDt^_-7$Ut#)}m0;ObnV|OnGsXx-bGM62R}eE2IvX(5BOeU-j>sY^6At z0v(g>)5i*(e#y(!av=eNj}{XrJ&tXGcc5Y5X22?RmsHvC4wFOv8r%jV0OPlbRa3I7jlvgl`8TJ zQJ^#iUoLSlBd#!hz6pya3;f@Mh90{-9!)2WlkyZzQrH5yX6O1DMi6Nwpry~jSH^Gvvq|QS$%$4;o3vch%QKG ze_U+$e_PuhvUp^vcUE2BUfjn#!9m_n9?h_*QfKoj^(CozRFk{F3!|)-CeEvvJ0C*U zEWb8Fj0wl(w=EDXT$SjM)8%>L4k<5m*>?Rf9{lIkyJ5eLwX#V>?55+Er%C;5e9Z=- z5Xi*YwY)rStV?#7&j`gotLUs5%+;WUv}0cHa(m^YuW7Y>&DYkT33t$bLHm--a+!Fb z5Ii3A(dPhWHeTY$fFSfmv~J0FH}!g3>g!0^(X2dp`Sywp4M>^E6Kee487J4SZDV@3 zOGETx!dnxY1zqKp{K2?VwNl&Lm3OkPH6hjUkCFB3*^n8fs><&lGzMpx3mWS2?eW|= z=D{k%W9NN*ppYT;h@y;VJqtzUIMpG0$r)&7e*}nT%`Q9e?fO;CoNbeC0_H1oQRB=h&~N-@o(39PI5uoS z9#|1Iu1RXsd7b2YD`lm#1SRyiTbtElJR=buTe(n?=vS_|- ze>j{;I9tq1R#Ew>HZ~YlaxNXHG7JFIh~ylbyXx|#E==yBx`E6J^7}QDvY=+dIfi;QDv;1?Fv*DupQM72WZ0f8D_fUnYsw`Fe8aV1Q2DDwpa9*dxYE`LI2<(8jb8w2%@4QF*iec$T6WOQ6KJ^-F+a=HnBi? zLR+-nA7;FbQ-5aqmSq3jj5H=sZZ7HJ=6dc}SPMFmykf zM6M*?>x;R?$lC8)WL)9(tit-?h)N;-spHVVy0|`Sw*KUUuSyCUOIJL~88x@_h4)_* zWyl?W$M4&y)MsHpvIy!6NZSV-iCbF@*9*+Om3=a@7cP+M;k<+GPp?*VHx{YqVR`Nb zFGfEJxkJuixx2-j#hQRxB|)$TbruC!8ILZb7q`c*%FQCrR7>W5I9%;WO&(Y^Hh@de>+BNtK}8Jn?7b2P#n8{6@|+Utdr#VIgaq} z8P34+aeO{TCZVxg@ z?eZlvS)|%xcnN{aY2Re{aKl)`w-%drwT?PP%GQQ`oJ@*+Ez94v-`V+CUaKXz{JD#J za>51aA8_WsT0UIwvt`rC69n&Pea%DCl;`^T@bQ^0Sy9H`@C29pw;9?YwACC$5g7~O zgW~_DtDI+jrUB!C=~ou~s|XN~g&ZH-gPntcm=6^trH&}Rl|Lb(z#WWN5%j?Q@4|R@ zb;4&pK(e8A=C&lZSH_ntT*{9Mqf`@xGx|ADZcW;1nc%+-c_-nA3$@ysawvUA^~lK{ zeZY6Re{Qh9EmWlqhMQ4EpYFI&#;r_z?xzAFXoG)bo|kVxb!D99m?a1%rTuWDf646g zxlJmA&yc!xWFDgJ1*tBStm}=#=(Zs{;&b`&sym4DeS-L)mqf_SX1Ej3F8QyhCO#BK>*uzTfd2Gkl zG2~eC&Q^Gv0C3vy#L=U)?7^wXs@zs>7=;kU8)tq@4fMQvZkWHDI7s|)vvc2kYvFA8 z;m6>9Hhjx|;h3f0sz%J2{NKOJT1R6bVRU|2 zS;O%qK9=j>hN9wsZN7r=_PGchGeR7Gn7}(e&n#G`02hSP-S_NnwHO+PGya zMSU&4PaCOX7|OwQTa&+ScF@GZ!KvtKFn!%5b*wixMJX-f`pWgMrEs69X}3T-&dGU2 z^}07~-i+OC{L_ucLSd?U!1tcZcF>DO>BvcJj^+@k7&+oReUAA(^n`>u{W$C924E$5 zWjNvdyq9-e(^>ixtZ&@JH=UgE&YMa<2Dr&Yeu@O=YdPl)98S-Z(VE;QZc;c{ z5=r!%xyG~;x?YCl6R-Pd+43_bz#hDQteuajrA$w!$NUwXLwX!nQn)juPx2Tu`FPo8 zD;E@ji4C$Ji?jLSi7pfg79KHFa5Ell{_yr^l^-$B_m^B5nmxORWPeFBjWoAOGffJ< zEdLqRffAkE50gKx6=Pc89;ENc6@_SPSIU=|0u%q|L4GMroLxi6pcrA1$_HLoDMgX} zOh2ywc>dCGz?>bHoq?!&em7fs-g|ihAb>J?c$EHuxMX_ppKQUt6`=tp`j3hMYPX1V z4LDbgh7*)~>ICKZDf$u*i~d-C5*S%$Codm6Yp^dyH{_S&Uy_j8?Hi`+T4!IMCKY%= zgsDfB0)37V3_HMIS(P`EB~Yh@#!*92J)wYO$=7q3%9%_nzv;B}nC5S~mN~y1`x>}= zIi7!vM}g9zgQXpU(k9mOa&6Q5nwM9FN9256p#rXvM(Acc_p|YlO7r6-^>*W6Y2rVp z-F1-fkX=UR96F5BGXtAoWp!qeAoge*dhN=_ItIdHu5##JH!|_$cp1&vefh21^<+*A zHqxtg_adQM4hU*(Vr%xbZ4jb*Z^l^RybDgBo@zcUMYy%ZZ=|N~vL6n05U~w0rOnqH zSEu%|Ec+t1Znm^Tl56JD)Mh;Q=BD|40WBH@7T(-Oo_*CzX{pnfR)|)JzjGVSS4#52 zp(=aS2;atff$wI;sZCLBaa;8Jk0?}ns@OtZxm3NhG4<^7T&j~bpO5?PGKQ(%{~C2u zu@dfULUAqS)n+5{(XS5)ppM4Bu!1hguU>j<|kr15o)98VuUJ$8VGaSy3 zlD?=4SbdMNX1lRD!4zqQcjLsA?#0)YVG!L5qSOmo`ewp9gfSfb7hW>lcfhqgC*pyn z_~17DVWcH-b@kH$5}CDl`~Dx>P^OlY31(2q=<9C%M7hM6O!`Gm8OUa2`?rZ#;J397 zWMU4WeqZ7G$s~;9p_&IqQiFsdd$M|Ou8^$vaSip9-;|XVLwo9)G{O&AG4NbRZM)xMyLx;&8)7?;a_|WXLXWEW1gxI4^Qx zwTh@)wx*fAEs?=Nc_;q;s+5f@v7J%%5Xr-96B%xZS}NZBjP<_LMzGet!_U;gNcS$PloR-? z!GbLI1qGNO_ykF*<+m_I>@wM;;b3A)Tg_bWWtPnru?xjP!R{Of=Z4(GT!VMQmw=&B zNF$%-4{-Qe4(?AUo@zB^uXtdG($*h-&;-AIj%zw|bPse1!pExz7yTVf=|2H)cwp&7 z)!a$FPgc?5xNfuc>UN$Do%i$)4tV23Y`?E6ymWB*_e9Xh(v7EIr@(l$R0z#pGhJl$6KoTsjC0_o4bIvJ}c7LWHgOYVzyR(n7tZBhQ=Xw`x4nHJ-c zHl+8}i{p#&*zrKKjh>I*DnVgLj;BSnywH+vr6&b zX;NXG5_b|hmLm(QALGiWr@679^ua$*zeciCyH>7-)X*DoBN7|6fxM0bl?gLZoC z>%nZyS>C>hUa(omsa1HMr#0u@fH9z`Gqj`sWs^Lvu)j02#+R=Z}RNLb3zIAcyO_!!xckhJ6?!CN~Kf0PvEUbTH09BB{}tkyRB^?j_} zpM$odo^I#Zj$Yt+DVFz+`$C94r3z&}k2Fbcj~g85n-bR6=dM$;K+Ba{gsYk1x{AEp zH_m;znvn)+)ZF|%;4OHRyyZ?eoAa^UrcV$2@V^^A3A;H;WMC%^mrqdN4S{@-} zz0zB$BI9R2MWvydFy{m%Cx52Spz-1Qt*=_S1Qq{in8|bw6N^MElInfBV|Z32`Ma37 zHc<315^@>Ep3viy+^t8+IK9KJ^J{}g+a%MA=FpAEEY0j%=F;DttFap8Ro&^HBAHnm zx=?CPNW1cWC8VY7Z)$d>GvUXnJN)jh3*Q_Xh z_;sy7bE$yad1PQMx|3`9kqaU%{=F)p=CbN+_$U+#yOAsN`Z2!Ac-$la#tE%e5ovv{ z7#p)@4Eu^+m)y!?e|^WUJwABztL0;aWLPe9vpeK6cqQGk@sxP5e?hR4Yi;NSl-F^SNae zf%l)*)#-+!fo$Zz-pq~HesdJ5eK~KO@37&Jg}ohZlCV0}<+$T=lqXko(739Ch$3^g zr3Opc-CN&9>WGP&iueVah{BaLxICcxYqI?qXwg--nQg-9R|OD-wf+V?i48VohT^~Q zg!|+NZX;un3mBm^;k?JXZ7V_=yBY~jjoHxE2t0SGt9|~-cP9>}?0tE2y0V^cX)J3V zBQWEf(!Rx6w1dKKCjip<*~oKf)6q&J`(r#=3HPZXF@)%~ai)6EEcLz!ajpRVyV}-- zv5G^^?X|W+rHHW$Z|*`j5fuOb)>DzlFaTTOI+5hrbhO-D?_pUD>y$ z_3+$IwwRbe>t1&}ol;cjMyw7#2R{no&*QWS;n*YUx zdi-N^DoSa>-&!HG>BU6uDz&;;pj=v z+S+>J5f|;wpQP|$ZEp*}OIWygr?^#@U5!j!`BM)}z=Gn>`VF$J^1nP_LKsq~#rz?w zCN&@5o4lHegDv?1tWh!;`N5WFw|Q%S?wKn1>TgCu0t}*(6#ori6I@L)B~ntwrgktKuq_Ur4K9xJd*6V0?JaNJDg+tYANnSaJ8=t z8{#Errzfml*|6h{5{e2ES?~(bVdwf4kQ+U4GR;@2amviPk93&ndL`tF<3cE0-5O zuHn?=AY2nvIr3=rUC}09ov-EEOQTaYbBOd7wkB*sp73)XH0XKegN4Z*dfksR!*a_S zVw;30xJyURTR4BX{FtxZY{N;L@Kw!DEXAx5gCg%$lw+A1O2Jp1XsKcY_zry7*vj0^ z&}MNgM7Txr&xEg%judNr9WE&WpR)@PjkJ}Ng_F7m$~5lG|XJfwi8|eUkT5#8057oF;jgi269|p(`NT2Am$y z{2FIQ))a5@rW|Yz+RHab!fn)3_gd|Mb}9L6DwY!8_CZ#d*5D1pqRb*A9Gdj}r-e)3 zR8D8vLGsmqN7$wp$gJNK=XNkdN^EC+c{q=x2bfa(nDGVP zy+vqLE|==5+?)|4&=GFwq~dqN$T-Gl9*K(Tx!}WXFz!E;Wv4*Uexb3wiszIvSMDE& z&382`0dl37by%C;Ni7;KQGVCCv01$FrEhBT!wQXsV2e5wO~8f$A@q#f@MKuzwItfh z(Hlf*F-fB@_mWg^V~#0nJY7iDIXRW{_xFIaAYD)4d6~Dv$kU6xj|2ivXSeYo=Hs2y~l$3;XyC&OH=F?-;SV;B3J+@KU&vDD&T(30#Re_ zpTW=o`{r!Rhw6A9g1PBPi(9c#MxwM? zKA=y6qW&Fa@%(`dcorw*8V5{*N*D|0lks^)pa<*dMn)Ee67%hImQ;?OME~FKwD#Y3 zN)++^pu9PnRyG0a*5!PJ?;Q8(6t_<@H3g6-*vyvqnhZQi|3k0+neRgw{Q&Nf2-zWq zy2VP;%yS*vxx>6Twla%usC{5_3S5p)A+FbyFA9YFYN;yA<3^*q+c;t**e)evFc0Bb z{~QBN$g-)CAKb(NZV4KsH=wP|!w32-y8C5?)4?w0JR?GZ&u8MnFWMk@4-8pP->(hHNGgJE`j|h)J0SDyxH0Dv_dYoN$Gi!Cr|Y0Rlbw3H#R|l zzw#Je5Tk7y+&ySLC`+;34>P4NKPV}>a>v6!+tbpQgd34JNtXaZ`G0fM#<-3+38hX& zhrM=WJFWc_;ML8G7|(&bYe_fPWEjiQ=7dLix!$;3e>qKOCf`QJ!kF_I8gNVO(*?Td zBdrdl@GZ@XrGS$<_OH@RE?1>-Ez!261rwf>qz+MR2c3%z+_dw5bkrnON4-g?1N#JO zQUEVUli|z1E&OA4qLQffu6RpTU-Jp6^d@bSw|G{_Kx50l_@xXEemg0AoDoBfDEVn9 zsuRW~&9J)wZ4q%?vc-7TdcAl)3g zLmD}BBLY$)-QC@tlG5GX4U&iE4c_b3`^Eacx87QB7Hctc&YV57d+&ez{wCXtCZ!$C z#UCU*mx^A}z&jI$oSFGIhM^S0Z8;7J<<3V{Ef$6+0xV#_N#!+o`ZWwCobJ%17io=E z++LQYf~QW3_r}_Xun#@e%T&1;mYVQhlOJt)mx!C9B?>H+-J9pgzcPwnhY=k425;-v z5KA8o9mR#9yWiY%UX^-5Gv9c@}j2_ui;37=RNFiRrIwS2-MNJ?uka zt=mvIf1DM2%yV^rzTCI+==FJ~^FOu7?ZX@fc*LIsgb& zsshLRmDRpJ(c`AegYAJL`tRc%&ckCuPk{(`N5FCl`bQhd-B$^Q+IFt)oK#W(?`2Ri z{M3YuMfq`ZQKn%svmH}Fu5$`THnS>#NhQJm2XiOl&phq@7Z~xo^{cY%$cb#fmCrCiXXofz^VN@I-CAuFq6Q{^^gdG+ijUgFkTDyNJvPd z$A7%jU%wl$eOPp|(R8t?Ho0=&UwhnJLGfR9%aj4}Uk7*&c~=eEHUP(>tjtUwYcAv# z_DPL4Z#4e*3FeC2zYujmfvi41w+4FzkwO{JK$yY?Ek*#E$6*wl<8hVku_7vP zHJpRSARcMHhmDSoXITq?y#74a`j~#A$AJdWl3(1^v)ty#?S50k>@o!aHUATczt;C1 zkJE96GaClL|K%9k^?yJP|8d}5i@14)t^o`!V9z-R=gjwa6aUrvzE5R;s2Gs#?9~1l z_{LkeAqn^X7c2N@I-#r~59B=n^SKHv{_`IioWb{7pxY{a{TG1qJDxPOh;fP+sy0wI zEGiH8uZaa${ltm=LS)&z{Dl?!_56qb|1Bob7#BqWNG#jCNZ?lrKmQvuJhWe+GK?jG z%;MSC-Tj%|=Y-n-pV&s}dQgpVCo+vXDjw^f_4xB#x}Fq)=~a6tff19w$Q32K&W5Z6 z1*_%McN3)42Rdh8jUOoJK_c|;BWyJHF)k4RYYlQTZ?s1O!rmNQn`?>a_87poT4mmF z9;4n`HvhyK|NDu@)`-v|ep460l@04EA`0V|+o}pUHp$HY1q3re0f9-Js=>1$^({Q7 zJhwhv_5A}QRZH)p!bK~)Q6V8pTD2 zI1UZ$UtdQxKk!$ZaF$pOW*g(_wdy+VBf!4N=YsxjnTBgIJI3T#M1juc+&K7YB=1EMD6hz9yS%;p&XX$ z5)+XxGE9KUUvYHR38Cx-Bc3i#+X_s*t`u5Rhx=0DVZVN75Z-ojaW=B+`NaJ?J;M^_ zvy1^iNcHc7lNt19n!s#W z547x8?X-+%m@46>&!I)1xqd|IgK-G@#`P2H3+}%>jjBkl9`Uu{Nk&5KpWJ;tMThad z&#>?E0q0BdBY>G!iR|fiZDOnfk+UxCrF(pWh2E!E6HBqxpK%;^ zAlKvoSBZJbpeo>$(Y>(V1iDxA#M2-!KT>mj5R#x-EGQYj0sJ}m%x%~1ZX7J-^0aPG zqPNC#Q)PCf`mvuHP3`S7lkoKjjn4g->$21JbG;3@cHjPY|qFOo^5| zMB|SR1AiFnD;K+Fg_gGx6teNoiwy#J^wu?-I9T0QFA)vQpjexYA?`Q-dUxN2pFOiR zyQ(CpvM(O!`rU(*kaF`AOSSxg!}!ymSYfMDdw1737cFtkFSWfp)g^sLgw+rR7y9I> z8>aGLOlj43y9a);w}G?Q6S|$e6E@I|W=w{pQrh*Nn@z-nhbYGn9p2CAWCF5r2DrgB z1kq~~y#6k8d@a;z<7w#2GG%d5gx8;})2q-+C8?S8uaO?O_)f+0Ur_$DM+fltBxZCU zg26;HSCjBL7TXP%EWVjP;pbHdzFgxDPQaGVE$WWdKTX8s2UI0lw~J$A5$7HZ?Ck6& z_wWE=?7n~N71f_TfaO#=75)V}Ud~n$j_C%rZuM;G>^@hq>f5$+V!d2UfD%{*d;-M6 zfkyy9My}2PQpv?l#(k9hw7?-dqho7&p(Ra~dRJCnq#slBtqcHKAl8F-El`yg@h?Ag z2{fWlt4cqtA8o+QIJ*P!<46DI0Kyw+cdgvr#q^k58mwDjo{4VH zE_NgfOR^&qFDdjAp5$_w?9fsa>30_3Gomdof63oZ7>wCktcGv=Su~Jz62XK=3o`j38+#$c1faMi= zZ@rPRnoWQKcl{AD(*^~yp4&Cw0-Uw$jix#?5`%C zevMf$BCRV1d_a&OJ<@5NvCg-|_W^^li3mB+j`#YJj^y#uhM(`}^VV#PjmFv;0@+x6*Fi%P`c_r=-aMgm$us&`l^8$i zfZWzxZxPZ#_K0&*;e#EDd-)*`da9er69Bo^L4bfRMqh1BSttXr&r4h@if^ECt@dxf zj9uwwot_YzL&aAcp^OFCR&>IK8O^_4UyBG*py~2rn!cx&tFOI#S2@O-9r0YcL>AfDmasu20z@@%h;d?_kid#;~smJbl zr|MC?4*rH|*m7FWt~#A+VVGpz-7&OuA6vZn=+$KS7?x;E+fMrGbl5(x*dk^3Ep+7W z%_DFBZP+&-2RT=&1P3vz%Zy~M%p1>(vhP62X_oq)I)DP6Fv}z>ycTI?c4}e5a%NY; zK%F+x(@+qG*WuV_@+tr&;AJ4HSdivzW_bPmh zlrANNZ?(OrJcd)RJ|9!~0HTyLuOX34W^Cw^7+}tTq8m%248iW{y1Drm z_>fO=4W|Sc$7+;`P(CMAw36CU9UB>W1zhxnC-t;HV2dh$f)aM+x>zd1vaaV|M?`h? z!TJdDe2l0xZ&3(aAP8B7L+gOzNotEm9@zQGU273EbQ1q8rGr9i55pP{$PA?3%=BQ% z);d#IKEGl*!0<8F`jErPcZq(OR zw}UCW%AcY-anfFI!xFK9G6uT#f&Od2ZWH+p%abS!P(yTLGz5dq z2nI`8j=MhwK{Ba-jtq)qkk3jb8|(iZ1uyJ=TAAc)+VCW!yh^9K}xU@v%2KpFv~F;QRlo={@AEyl2C5Oxd%5$jw@ zep5w$x>DWDx`8KIm|+jtBA0EbQBoigJlN&#-NG%&a-mZVf5~PVUNXOl80&OV^l$8P z>AZxN=%!G0-YJ#4e9Q^e^yPHHL2&lf7IZ+fqXKUXj3;rce@^f@#mik?VE%}fC4l;T zlr-HF5n&_O#Py6hMe0wPEP)J3z*Awnjl0|D- zs!;q@S&vP9rs^``sXDUrGgwEWixV#E;mKjMJDR?W2CC?qXkqOx4wyzC6Ynb zmuF=ftOT4b1BKL$hmWS~a^Zm)BZ}BGjE`~cCZ@1YK)Th{pZhJ3$wdO@rY+!MG%N*Yii1IfyD{|E{pM!DE8CllpHpc?J4pEG=cqrneT`g42>*QYE`-2f zK8}l-)#5F(ts6YJVVz~H7qh$7v;*-Xnm%2VSwvskfKd}3x+gcmcmYYB!K#9N2!Ct2 zu^AWqM_Vu`co1|!@e$sjc=~_`m^WfeWtp-=zW`$0C=uRKi>KSI^ADp@B1a0wj=?mQg08Pn8Fuwrul z!JzU9(vQmlVqdU;{$-I2?HZyO3mFK1%A+fZM;yhiq^RndSZFSVr9WxJh)$mCAGCW? zStz9=$KrIajk}QEdJdc`>#5{4OyJ71JekGvd|JFmoE6DDvufDyo>KC~xB)t_ z?8d;=o)&JNFop~{e_=E{IKxQ^KTkyG8t$IR1{h%NeDhpKCRC!Vz%;#?fQc4^NbW1n ziv40LlG{-;Z|QzgUowElJxITkOL*9`rAif^=QsZ?&y041^Rr{YI zCk*HSG0OLxNg+^rCiYZHl5c3%KJoISS{_%%iFO1Mdr*MCnk2b2Rfi_b zkk|?4bQ`yH_fHnCm%SF(UbdY+GB8{z>Lm;bZnGbyqz{NDlXGeCjb%6=HeP&HAW7Dn zQIN9+)5hx3;mjExk@nWb;r}q6HdbwP%czd__oC8TN_~!(bEZd2&rGG#@OdOCiAGw8 zI}Xo$p2nubZ#>XZHJN^ibZ>@eVwp zzS2(m{#>P3wSMC9PgfOKf#m&yZ-4EFNBnf4t4Kq&Cm}P>Trq~le+4U zNz#rLzaYVtqQcUrXFPnF9)w|h?2=G}!?DC;p72L(jn1Eo1-0%ZJ-v(DX&8*-*q7Zh z{eY{-VMbQR7GM{mXNN$Qa`i>rywM2=a~Y)rjJox#nR18y_G&PpL8KXa*A6A@$U0s2 zS}fy$CxudjafGOq!SGdIXRiwRyIoE83c`0XgUQ}K!l)^ji#vrEo-2$CpW3+abpDL@ zYGMf8PWEWlG#5`+-$KLInEof_|LPh>=B1Q>v~ky~I7oI?Ye(;Gr)Yn37t7}Em=tW< zzT<`whfNn*qT`_zg~h>|!(&%l)HIZ+R`EQ<)e}y02y};|B@=DOGJV$#N||M_L+QoD zN5_`7Le8?zAsr%@av$NzLQ;;*!*}nLtYc}3={BsE2C9-OI;V1)lLk3$Gcz*q*RD{v z9MkW?t)9`8Hl_uJ`S0oclLw>b4>{3vaFs=WEIMHqWvcs2mH=CdV0cMh&0sUH=plcq zJD#EpJwu*lv-ThyHLT+Uwugv-!K6ctzgd1}hdEql?k;Tp6#jskRF`7K2xg z*@l`J1y8M37tP95Kl}2SXsW&$#;)A<(ury^2`lhF3yxM|h+5b#=+;p(TWqKb$r>aA zFqSKN&ytk(AEAj}H*Lbo33@5-Jj2S=)(0?{pQ`gdZn(7ob_VghT=L#VGEM>!z;+5+ zTW?XlOK9&k<$}5dl?(65)w6k3iSARxo#X@Zx7xLx0Em2WZ~gNJUtOjpJ6r=iVc4^p zERbw7_q#R5iFQ|#G|HvSt7DdiX66CX-!r7X*Te3iuqnyhnV7y!RUYu+piso!eaiI_2r++c+{WVj&teXyEisQ0`VHu zyQ0=+FGnp`c0o{&RKbO#vIl&wXT$3N+&{=+$z=NVxZ=`lpuV|EpJhhab(7(1F;{F! z#rJMzKSd*t*TPD!%Bf0TppeIwZA!uYG1|lpS8kbRQ}!Y5lP+BsG@ik{4Z}5hFi6Gg zc|TIXRZrFTFDKx$hPz^|IGn_Aeg*ku`5oGV*I0{*3<(2GPJz7YF~_fo);pcTfIR^$ z-@+9SC9^xvkuB5AEqX8t9L;Ib3ctwL4v1QN09?f&t5Q`GClPeD zX54$NtOv4W&nx(kpCA=-nvdS99Vt3p!>APJ1j>|CJ2lc6Tj;sL*6%Q(io_K~)U{Ut zj*8UlW??6A2y=-nkY#1wpL|AD2Hm>w(Z|<4Y$F@Xok1+@AIJo+LA39u;3d_Q@HA}m zu9ra$@})C^nmtX2KX;_cP#+i1Mq+U^CORO|)fnFZSk3&l;XUAgE_FWU@7Mi$QunK& zZB3;5v#KFjEJlm;>)?ZSSAZJnj&FQ6R8q(1M*M3>0B8&_9|R5osy)OR7C!MXNbO|k zWX&GQVkEaeJhP#f66ANPu1%U2y4&@C`cagwP#q23+~k|rEa2|gP=jgBk2;rkoZR>b zG!@0>T`4)tg1#?b*pPL-TuD%sI_a(VPlJNGKNWlXM)9P0L?a4sZnY*e7%-K06em>a zg7iE5<3fN#vSD8G5@a*l*S=V8v^s$OTo*EQ9+$iUIAW+^8b(dQiHcF!X$e?VuZ^n2 zAB=DxEhL*`OTcRTg+IuN<7 zh+Cy8H@3&*Xn`B>3?aO}KKYLEQol#}17=bC;(^(GtxFbemJMOfoAJUURGLP{;UgUi z>G;fQ^N?FN>LfdNEzNy07fOB{4Ya1WvyJOVPWneqi}%q$q}j%HUqpRai#q7y!{=YB z6E;7+p_5sQkSfa9_L6@tzFQ_z*XMkO$51M!$fpsqJJ&=mk?NPl69|o94^Q0$>7q*} zMnm?OSxRvhR=Od)SkaQaf`NTrMiRwe@0BN$l}uka8eLvmONjK;^f(O19bKg@AhD(I zJ60h%#Y=v%`AcjICFLU!#oSt_*<`# z%YvLC>-`oL<5-Y0t;mGKYYRF?NwL3mjX+}zW!C@C0k%{JZ1{HPYm|Mg3w~@)_j-Na zs=<5pUunV3Q;FSdiFo_%Cv>z&P=AI7k%z%?{xJX^E@32g9CEYbZGnGSI1L(6dC*BJ zu&6{t81_Rjv=Wg$B%>2JS5>42%(TB)iP{-ieYl>GWhstDV2IN?F*={KYaklzVE-Ba zWCx{*eEkL}@Q)!Ku>LG|a)$*e6dO91s3hb5Q*8yaGqi%ctr(`TfLHwE+~lNp(w1oJ zju6ge{CC!JJrJBlf|UJ@J+DuVv&zd9H#$AaBtJu`d=+zAd$DPs{6j%uD>YGX$DvX# zvJk=xgxc9n(-ly3Zeu$y`jm`c?`Pk0r;RHi9Oa0wD7>hSRgzzrU3roa=$yZw7xZZ z%O`(Hwk$lZfJ9XwPC(WY9UQ{Z>>|aeGXkaF+D!CR>1;ng#TXpm(+$x&Y`p?@xWiCD zwB@Erk!^};??5O-{wRdl>1PK7FD+__5W8IbA23K!o)I-Ld;a6I*Xk#gNJTP_o~ibh z!#zXkcCT5H4__jKLc+wv2KMRAj7|-Kv&vknKwK@gPbXk@$6Hdp7t`^Ro)ZtF-}64L ztHtQY_vX%uzanfu$j7rEf@$F?qkFJcI*f0enYfK|Rew%k&I0UyXP=x7HQ1uO8Ex@J z=U51P@h*pC72I#Knm|~}ixUq3Yux#b0`I%T+=QY~r?_1!7ISiCV&M&fKQ-S|WgWey z-t(!QC`x3{Uy)<}!nhbkQA0JK0%?2Bh2t>Pn2QQA_}n-x?cEi8LdMB1V~b9ds;|qm z%ap^?qp$tP3r?l~^@4}@_OyjE)f;EqHy)j}!fTT9vg2>(*_L!z*>{CH_gLSd!F#~! zEGK}2oKT}I0oRnsYXt|N4^0YZzgQ(?kA48C5`>FE2Y>ugsuc=IOfJ5dSa6Pgmr^v{c$OYU&5;Uy@?w3$7L3hk88&L zLziPf_tPi@Isan^f9tZfR&rMC!&?TiEO;49x#m<1?aaPMC7@(N@8i$_1?k^D?n6!= zx+A3ZXb1$I!$#P2Q-=G8mqUU0Y(YiNo6^f^2umdPbmz5e=gZFDSw_mT1j%Hvd6%i@Ae22T5gz# z*3ls3L!NuW%7;)FA~;nMlG09>Da8A@r4RZ>$xCL5o#YP#&V6#@bx24a|Bu^yR5+22 zHx@+EMF60y#-vLRwzV-lZxa2GMJFE0EJzun-ad~WU7c)Cd?h3+=S{UN#4<@Xcb1hN!I!Yor-l4AdT&&%F-IDI1+Mgy*j$-_{lSwEy z6D|{CD>nN#VWEJ7>+w@27$~TgAEhpem2;%IO8HUF3XG9ax9r^HCWGpxk{p%&Z<3Q{ zjTi_;;>{GJ_iD&1`5~Wf!fZ3uYccgRan=WVBFWi{+!)TB&|U@3Ia!LygOxIGsK2Q0 z_aE~Q({$&x?qk~=wX;J%Rjvu zy)2_C=yX9;tQ&9W3Uwx_D>sOK{~?K7_;grwV+ITEvgw^WGHioLm4;?Gy_h@Skw%$y zbcVI}1YfuT;X{5N4iP>Wkrv!%@qAg!RcrS%B(ELjI9rB#>MF%0$|?4X#`2uO7K)wY zHiqxQv?K5L6x46xBuFllK7f7VRs(#KwKGp>*6yD-(WO<*U|{pk&`>K-^dbi1X{MP! zmQXThkNR9cy{ZHj)D{c<3R7uFqnYe#8Ru2D7KtY@>a)8R3nKOSUg0?o->+t;?`7H= zX7|mMJ~33qt&AbTT*IexKUfUrEzUY4r%@%M)j5@wkM>3z(xFD?M;~^?-n<(n33mEz z3=_?QN#$JoS|NEAyTBs(?W*?0S9^y--+^AO?9;#N>o$}EsuGvaVquZA%SAqI+S+7B z-orP{vksLA8@qb%C2l&45hWh@=_W4*lm?AlI;_snuzM_mqMT(a&;N<1-${Y=!F4MJG-RQRMh!ezY zn=&Gs`T3NRf*!Z6XKJs;$Ee-nXxx>GOt$4Og2l1h&+J|imGLC>)X!#5U}m{f(kmpz zq0ls`j5OgpC7@1&SKJSSV3exgP0)h!&UK^Vc3CF|Cb3vRqo-=QS0L!j9<;>0IO=OOQAz=yf|^oa zgh#B?Db`LF$9oU@q~(NtCC+@Xiq>~pBL4liDRdo-Q3kw3RPIlGG#Io`YX8aAv^ zk+Fb2qK$@{P=0F(J<;J??Gvej0mMs(3A6RW+MY61EPh00D#VIPS)b zx2IA)^IgRtrTzM%c7BvvNZA0B-LJ-zfH8tf$Tg(P(4M>fW4WzvEli@X0}3iA27vWb znuC;-D9j-l6%}l{JWmnG%_-X;&;Kq_C@7|jm~ROGTmdJq#UBOzc@35Ew&j1k;eF8d z))vz8lgB@ac$NnK&q@9d&!paO@q#7VXdT{age;8K zA2#KO@Lzq`U?(5ogJ`$`etBcun+f_w_?4(6`~?qYmKw-VEuVo{+^((mN=izt_A=rU z5|~x6Uawp2SzOV6SE#qu7<|yq?(W&eY6zNo3Dv^1adM%$WvwuANS6jX%|qP>as1KF zJQWsmw8-{T{fI{buxUleHOR$)AljoqY=B(A4)wdn?4a5H{aDA09@gL zeUA%2kFTTKW3`(}@k7#KP4~Bl5@8H=TV(Sd_qQ86mdXVxsj%><@4}Ltcc{UY{8w8q z*f}^%Pd54!4MX_OV~Xl_@;=$8qp$)sA_AD8{mzE8TRMl1Ent7U72vWDxN=SFmcZ+3k!Gw+RcG0RGrLJw0LMw|9`{Nx+n`U+T{cdoo)n!Uf#{(b{eA z5)u;=WuyBPWcV_(lbppN3ogaAI~i`byV{30+*Jj?`v;JKbscUbbUMI8s5mh`8KLP0 zqM~Y!X3IpkEi-`fH#l>xZ zEWH6c{COkPt*!XuAbfoM3IOQ}{=ms&zb7jst zuPWef*!<82_%7GN#k6Jk&LR$)@7;lKrso<*fgUhBO^n+}W?HCCF?d5Zo{?af$!OuJ zppes@U*^N0!~`8`=*DszWWWHWRDIg{g~sEle3Wf zi;%>#ZY|4d^>_{o@hOtuKLgI%Vi0gCMn^`{9t?~(Rx+y0r}Y?qw|C$UrYKkuR6ijT z6BFR;rn|G^_&`rm^aa)QkxTBf*?r*8rjJH`J&Hqkg<$`r$D+E|(jnI-guF7(CTe)W zvbwjisL;wHZDyi&wTH}fRz5dv`a{=JpThQ)k&wjJE2&MANb9+MQueuUO3@1K>AHZe!OY`q-hq6{Y zq-_i0<2(Hm!P^{QDapNyx#I(gg6$7|O~d017^l51GNE$=HAkD)_D#k|dR_X>jRRCu zd)qtuj7pQ^pNT-Os%by2`IQ)KDK%XDSY?v(eAiaB=DWlkvq2O`$t2|ginZgm9>3=S zTyUp(%T~k5z-aUBY_kdAE>yQ)H7nml?6ihHZ#SvAanSNSFvm_a$;PrkcAjJ~!lF!S z!j`(MSZ<+%0lUC#-_*JDk)dQ&H^=l#iHh%ftW_4_7Cl_`Ar^Jq8J*acwKy*07tdC4 z&6TM!5OXYry=*K`^VkhlDCKs`9|QF&cUI;3MKa({D<2D$qX(ikG^Y{Dr-jXRqIhne zK7(apRZi7VFC9r7Jx7c0g5X9N=5e{0Z=jIET5;SRjkI1WCZDL~OkZl3JtT2#5l&uh zNnK(WDXO;K$SmsQ;4)q#lQ`Ljftqum8=?WG9g8{EAv4uEmF>j4kWPG_b$J1auY&MT$fs-3|Nes7xYnBd!z!Tg(PVkJvc#obUb%~eK> z9oNB|56=c{v?lGs$TQXJi-hNH7t_y6@DJ>1Hq{Luo{iTL`1+q!Q$H(@~ zcB}3DZ8YOgZ*R74kk!`|7s{yx-~aAmou8Nqn8GrGf{@)dc@~EO6a1uOcC!gOFsIIs z+v|B6$Fa1qO?51D6Zz42DO0Cn;c=v(vDZt>U5XBDGaTtE3!L#|ryEFWar9!UYF7=z z+1vduot_z;7Y28W#z*&_SmEVe*rlA{#;JWYb!vJ$IzmyYxbvmy(<4ECCEh8z?$k_1 zc&zt3GoX}QEo@M7{1m@NcGxb>_*_zME4%w+nBj9N=*LyX5wkRvh1wZPJec)%GnLZV z_jl}(R_5~0aTS?x6~`-4`|F3fy6R@?GRoIKX`ScFg&FFW9#eLTx8>E37WUv!m#3)R zy-qoadko`Vm|zikKK@=#3A{3(_oX4Dix?%}TFM*7B}m_-rtrE_Ixo8kwo!zKR?%7} zy|t|S_MKkyl5}x^04TfX;>U~*9#EG~b8C6LS4jl}tm4NYyWssS7x1cmoCes?t7id0 zf}>j5GUQqlTED^AvM9nKq;~ST_$wp*NX#`A*;EuOIIJMeYR-5p1h)6vV_TKM1Rk{z zk=cSCPQ+tn2hBjv-tp`4uW_`+@?sY6$~N+DE~9D|XVY|AiX*`m@-F2x#f;$s=iCN3 zx5%_1*EjDjkgsz=!EVS@zXwIGv!HEHITk-Ol8HEWvr})9o9ElRKs92gWZo2+7>el}`lLI;r}KA#9+hu8w51YG+F$ACawu>1lI2$R zTnvx=)w1>({L9n8b4Fx=493F#P03z$#ZU@r!I)g z4u|(qtA>jJmA@f2UNNUvT|9puM{eOJj52RI%`wimNY|a#dQ*cSGD<_g_;P!OW?W!J z*{JoKLEevHY#Ry0V(n4vi-79FH=&}tl{S`9bfwdFS9yL-u=OIO;{^qBD)I+HXL$_o z_Oeej^Bi-r?~25LgjtBV*4CREIUFy(rz+`L4lJQ`9I#Twgn+Q+tkstwt`4KR&g}A{JKGltm{tbCR|L zArE}iVFKx`W+tQ7?f^@D^Q8(LHSB9W?qqn}7a10zSp(IFkmE*pRgzrAuv+K5GhSIA?YjUO;OEpo0$ zxy~ze7dmy0$Zoi&h(~NT`xHN;4C`TUR{R3fPN!PL=U|pd_~n!!g*tb?FiB?HZjxj!~6F|thF_= zhDZfrFlu&*LIXaL$NQ)hiEi)UKMs3}CGt+&Pw$Iq4Y24N#eWyQ`aY)}*1@gW{fx9c z_Xo**ScdPF@2ADZE1Sj{a;fVBdJ!F=u(2#1puQbk!d6RW4Xb&SCdkFm-6HmM5^C!z znpvN9WbE(^nN@6RrY}a0vhz*5r|+q1f^=k&PaUsWEu{s2@U3py7)x`4I!bA2XEit+ zw{+n2QYN?Nz`@g&acL5n6xebp zwb8=WFmEbjdx2I;#I47`i{i z_W8(7YqLhR&zwZzhuaBkje@(cP){8d-o3Jv6ii)!bBHzp)li6~8c*bsPCIpkK*;x^ z@mZ%TIoN4bA+q~m>g)5J^pK#;r8|Zf+mgaw{Y_hRf_xXF+GCJ>Qto3**{qE3o#IXk zzIlSjd%_={qp(P(}IVx}v(rrBU;I?_>K?-*|N{xsZH@ zG(|@~#^UPvXPDhU#UESAn8v69;g=}U=yyk;or4*U7f*rM(V#8go;6UW3_~B&LGcxP z-U4Q`@hX3VlLgu;!$fs1snc`1$i0p{suFP^Y}FOnc!O0O1@*|w#TDL2cCk3MqJWQB ztG`7pEhZ0Q2l8!S0?Vx%r_1AYXF%G2He*(7GFhZKuf+@qJ9d?$_w4pmCfV>d>NM-^ zx5q)+N`T~phKA^u<7DNz8nt&PkfiJ3rRc7~Tb0T5wZ=PgZNJQdWQ!7alnj1Xl% z?kjPq>g~4wZs*_)$Y&v`+Q35Yg1C8HqLNhKK78j{y^+-AAV{fnh+4EtXn;28mMamJ zyGT7#S#@3O7IVF8@FQEgU?oHvkR7kKJ?`;8+$`x`?o*h~QYI33Q6wW^xPfgFM}EKY zN_cI8K35Q9dsOi#-$qIzy46ZC@A-imklZwp^!oh1!tN7r*TSuRpbV*4>I}Q{QA%yq zGeD{Fg`#<=hc@`fyR%^Kvdy>GYg6kN%03IX$y!R-iSbjp3Q;)s8M4Y^%Y}wE+qvcWX>c|Tw?v#D$`9W^$j~>Vz%sUXDX_)elVrJ#s(_D_y`8wj`)$@v zmxB3mniyS>^eJRrV@Gx&z`?G6bZ}>Y(auh62=Duo{AJ5%!Y@T;ajQ;pcxaKdi*wn< zSqo|PMG+aE%9@)@^L`d};Qf(|Z(L#{V@`FX_z1L#p_qJGW&C|v2BEvDYWfME2$YWK z^%ESLCQ+k%sy<}|23DqarWe?Pe+w0%a66#Wg#&T62^KFzHqTE61&R%OqqYH2gL^fo z&kGj5Zk)n}h|4;qJf!yEK>+a(n@bgtcO966_bVs!N=iz=1h@=Q4Wf$mbA^(DSWkSg zUY+_g@E-QLSBH`(#^)c?yDKfO4Q0F_eUR5jDXB__Ph_G1eCEg!Fd~t)6Hq{+JQ+dXT50U;f0c$Sy5TRuo(`YADwY(Pow+zLYz5tev&5t zWXx^1a*4_bWxSaSXNp-wQl{YHZk|VmENZ+kqUg0N11O2zz410jB1_B*Lq&u3?5db1 zLSX!5U|RFZ`>f-`;_=-D!RdaYmD6iScqYq;zZC*`TeU>PPS4)LDm>FHY$bfbrhKQV zzg;hFA7UDBs#MxS3ZzaFbe^aibe3o4(m(?U`Rhy%2RVyO+PNlVnj(q=Hnk^xKPDB4 zTba|Ga+tK~Im}H35`hYNr>V1)!pIFxwok@s5=M*m#?bVAyKp* zc~TRDX_NO$AEx(ek9p;bqYANm*k7XlmUw$w{;e<|luB}3d2$N~GRFX2LBEpFK>~O{ zPEIbAmpG)>RPmMTMF|6-bOkd@gDrOFZ20E#3fA4(=O*r#kfkje?r}|4yFz59it_Tr zrw+sbp;4>7`SeKA55C_T1LP(-5@1hk%~kL~1wiy`+sGq5sU38aei-BJCRB}M^F`=k zMBe#&c5eqmNL~d0OIFGQ$mO>P{@bq!j2V7UC%~B}?oTb+@6Y7P{MnA{;uONdM4UcozYx zjP>DPH$ZtZ9{~D_MkQti*5*bz_?0Nd`opvL>$?y;b>+hHfwe-!(POj4g&f!;l)h`0 zN!sZ$OP`sV#kuzefYRpuT#dp?;bW+T6~Gl;Gza^iRB>0OQ?Ys^Xf%J2s9|Wn+V=w1 z0QB^n)PX|F^s~A zn^?$pp-TGX;b`CE{K#9Co5S9NnQkgQ_bIalKyUQCZ;^7t;-7~?=?~^&GMUcj?{yIm zabh4gr^4HlM`75nC>@ZE?&$AFvud>CzW;VO_qULp%J&4ibrH7 z+SbJxpwsjLH}XZ0xqZesl9B0# zWTQ|Yu~If$y;GT1ue17QN+GRm`&T_z6iwU&RIl`)z1J6JvyCW~m1v^gznylShd66s z5)E})EoQ3}SJNP_x<_YBR&QZT4Tl|OCJ2~Jl}e&O>5%`p{hje3P2hdsU5RuAs)bd8 z@}KpM^~JIR+W}KI!xP{mL(`a2*(Rb8vjHo2EDKHrfXq8-YS-_68XQ36eB`Ha+M`+O zoZc@ndHdY*vhre)U4iv*CY>VQO)X}4TBjY*P{sxVHUu`V_rEi!51Iky?HC)KAg+G~ zb10}6MgXbg$t#wEJZ67xG zJN~MK%|;%jZ`gS}zWb0Zor}9nTHrHpncISi_S=gVJMYb)j<&D7KAs$T)vI63s9`>)IlL0;!-(Lr-t-1p2cla)iOqKz&Qk()T}AgK%RZA^?L|QpL$q)f3w~mU zU`0EwGzrSFbWp*GmjVLwy$0tk^rCjjpjDYf^v-s$wE98#G2u*M0Ub5JpPM)VW-&vf4;w_r+hD#kBBblS~uuhEmA; zd>O{zi8+ZHk<5J9jgjRKf2$XV{kp>72^2YINjGm;l&p}bu*6+qNPwmElyYIf*&#(l z2*t8D^f=#M=GTt50%5c)EYx2L|9TGUmAnMZVBT9~FVPfq_U`Kav0S`0c}oDTD3LPPmW zeErp8(SG*%4*2Qo)4GB98`fXOT%HP8&@Y0xaRP;B4gpcZ)l~{3`Qp7P)3cW)>q`05 zxQe4f3@G1D3Kc-_qvIeF@=b`F>iv~rcWILo%Dv4z8LJz4?*2KJZoy<_-RnJJV^oX1 z=}fu?r9Cvc78d@>`y3!tZ-{H&+*nT4i;aM{%UOuEMO4g2-%6#zH$$D-8!;7`rp^%e z%)dG~H4%pQHOfd`%1|b0Tg$vg(F)1Rlt8>(yWEsxqn${dJZ;BshDpV8kg5}+Q1?bW z$W{7adr1Q2yNbSjna5_~NG{bS;2)XT-p9FYlQ^UbOcnI^bP_73Q|IEp$Cu-p?3&HML@a) z0@8a)0zrB&f`ar;s6l!M4ZXh$_i^hv&+ofF{*g<}F1e8tD0OT?E+^)5H%Wold^7`W0a0=_$9DR+e7OJPWpE z#hZ@rNrJ_fi%KSbCr=bPMG-mds{`9soXm}9$C3#&dQX)^PTdn)l;3xk#OrOV_vU7x z2{e`?j~rwxSUTbhn-U4Y9_s1(>bAwV=J#l&39*?uY1Uki&+u%SPOmHUAm7a*KbZUq z!*j*n=4$#-;+G)*g%reYAX?F?4<${cXw8@zt;)2xdXN6^A+g-6pz=*p>#?v$o;>vH zd)CcL&K*(i2XJeIsq9#ZoO?c%?VUUU&&_;|XK>foT4zUh_N>l~CY*NeS22Jd21Ld~ z{|bwV6pvZ)i^{oX2O3!A%(fcevhv!8&6aLW4Gcz^N@QRS;l7#)Ig;11dK6=0kJsMv zWQ<1sb^QqJN0PJDq4@;QyBo!^W7AuN%KT(c{*)JKKtzBxlYk8zsgJrL=OTy(x9{n5oNRzo7#$}YQA`9G6 z$7je8VO)@B3#oo!$UfdMoE42ux8E0W`&dgYDRqydAEzkfa z-CZ+ZXT{vFVhyz&__@v^D%*88U}WJn4rX`988=;lFuR@S9!Pj3t=L{DYz_1u1@BmkUEJif25uca_^~jE_b5$=e~-A$RxH*cQC&as-Po}KqT8K?sRpgwz4i!E zV6Qpy^q%sXS0^g0`;i8d^4k7dHh%NJlk=Kb-^ta(l6MM;D=GG|F-R)>eRwgSy>Cv| zJ8#!XjCRKE#{_H~($Xzr`OtI7o3B27*(bOZIsIvWb@9EkFVfnrC53PA!DMN7s5U1gf-TUvG(X_InGi7|g65 zDKPHRa#i=&?Cz4|)=QNtM+WE~SaS0Y|GF){;s(}BSMv)B6kT0itAU1mJEupRiRc;t zmQ(TctcqpV%VA3O?$;O1}%QXewa7ASO6TtSTt=4n#+*a(C=vT?5U8+KAZD!?0TzVA>+ zR!=?*=vi602QUB?8P9~YV%um_K_6Ypf{UahdjPbcgW_JyoujG}c`cuo(d0?5iq_l& zQFw7E_=7F6na_Bp75otuz$D)$4!O6BUTeQklK&m)Xo%e@c_Beyvt@;S=p2+sL<#1` zX2UJ0b{2#CpeZz}mhQk!$KYiXy^F;TMo7@ONi1)gv!n6t9MG#vBJf4ZVBq8BN z-9HS@Gx&9v(YM+fjyJ)@%)LSj;=xqMKk*fft1FbW*j*H1j2Bg2EwU{N zS3nGEwE6fNN7t|1eAV*C6F{XYZjc(hE`@8#~Lv?noWMsYfqv@UALM4}B@qLNC zL%mJjoSOEpD+YT@9ZW7#1{0j*7o*Y)aWMGhFL6V8qr{|2EB@v~Y#pnV_eO=BIP=$p zOmWfL?_4w|e$34wU6RGkp8SHs&I2^$AD0JmR{<<)NQTxY>%3X9*WRRim!dEbELciC zT~3|bI&k^%g$$ja-#KCGpxBAE1%`ul?4F_eWr@15~~1o1D7^~R=$%(07iFC zT~>3RqmVz=(6a=jXiq6$v`_2U!H7KgbWvR;O-J0&db`6AAO%AEa*@nw-$V%UIO9kt zE9l`t{i-H#vk$!it`mJfVqsAr=L9fkc;eP8W*9cXv8N4ul%4I^m8Bp2bvl@dFk{at z8vysmOGnw+L0q}!hHX&Ge0$KIQEfe*IW39cG9w32VKE{*}~Xt04HqaW%!7UPQA-z!9V zn?zsylmIWjtzE$cZm5v?wnk`$_M$B&rUGlm^g7!8la%y_tnEdnm>t(ln~5jOr~|e53Q68m zi=h7gti6ORx-$mFrxlX!Psbi&cR<5RRIi?$+<@$Duho=#SR0?bDicJ{P)61k8T~Mf zlwlp4WB;m(n~^(6&?S+RrK&*v80D!dWLX#L+>gI=n@w9a_nCf;gJE+~YFWG9H;ul8 zz)CLl7q*H92z^LDf8xDEyHu@%64CSZ>oA>kZwc>GZI#3o?Dlbr^cyh}hx8UwS7mxa;nGuR86|@m!Yd}xr7c?QiEb`}=qkMq;>@KExTCElu}i~E+hUHQ z;FxbJxtjc!@!Rr+GF6Tx7bql9Iy#vj>09x-ww1OC`PW-5RL!54tc!B7>U)KT)lxbbi7I#?zl8OW z@sL;O{0DNaB)g%~{%}0UM_KD3)J~Tpf;mfB{R?LZP(I=DF~2pw^zQxqz7yzpR@mK> z{?)$HeI4F@plmT;jsZxW(hNA)%&vHm4A%5He9bE9GQmXN8eU4@c}e$v3r&dc5FjUm3dt zMV3~N&zfH@8aHd`Wba9k4par zT0x|RD=IAv)Q84yL9t->C3!>YB!{TMoEE|AdaypW>YD9DMS-6zKl0-JRtBB>?S=-~ zgq(s%Boai3yF1?^X5Q?|a$vP$_WTlhQ^hP)alo!XWhcQJEUt?=uIwqpH5C<}I=^Od zAD`3J&tBmlLg}up5J1WWEj_`s^?OD<&m*pojD0tMz!648wtEwcq@Q;sTq+XjC_SUC zwI*;0=)6?)pilxo>FL+sU>|)0y`eia z@%njs=JFwfR*D78Z=6b%A`7gd=Z2{T_k0AXOH}HV3E_OO0Gj%8Kd6!!2}yet+em4l z5|u8!P6*f+x)LyMb7e}s9(a~nEz?%{_NeAQJWFe*e^^7AyJjB>C63}NDAmVk1f_~8 zrig^iT6)m!GrDXRi#xtz6@fL^p7C9$0#Hg`dH zPF1sbPl~k4jwMD4Xi?pQQPEO1kFj!n4OYu+k4>-$17Qk${S#o6wC(-*PXo@D1J?ELeZvW(&PUb3I z{dLP3uM;I-+>EMtuHtS3 zWT1Ok{P-JAn6PBSu2h+|XRZObVxCoSoH4O0hYhP{o5#Ftb8qxev%RNoShT-C&QN{5 z?u%28Ye!vUCaQoP)g4Oh*E2ohuy0@L0*$WCJ^=p4+AxSdJPMCL8cf|3b*ZY<21k;z zx))pB)H$cjeuz?7jUkRdjM?0AEyoHGYai`m zi&&v`WF4sOk?#E(^HEy!J_v7)h7ei*Utx({d()` z>xV%1v$O)eij@w-Q~dY9PH*BN z9tnu@qm??>1CYSH(x#_0vXfJ0K)A-AndU8Fz*#k=NA6 zkg!UxlQeM8qCZ6Fc9_`H!f2+NBhT@5x6x!X>zJ&coLIxP17#W7rE8b0$Fwo}Xkw3Y zSwFu>@iqn7%oE%aWVq96a+3lC2QT_4!5Z{f_BNVFeoh2mFiggnoTovFFM>}z!$CP3NNLbngrgk)|{HmgKQ?U{u&Hi^q0e4MLTw|S1d7!9|VgX%-x&xS}oA6bVxVw*#W=?Gpm6d z#pe4QA?@{2DfdA~=pAjF>zTXeqN+n|Rtd2<+)m+^vh-P;cZY_#J%mwF?#}IZ%L&;` z2E(>H0I;jS(WW+cA`JGLaMEAVTs~YN<~>M4Z!yMqC>Rqp1*+RHGJy!??@#(Ox&_Hq zQ0_!2aw7Y@?c5?)+=o%#<%%JfHTFKrY;>$SaiUeXuH}?FvbE8CZ%OjA!@{|J`1`U$ zh60M^@SHjs)i_@1t7ss;D%!x)E#STa3cTMpjT=d_z7p zo9yNvMaN~zetMKRkV)9UbI~$gQC?$ulZKlotRH6b_s&_#O>(I5^kiPojBmujxIPI^ zW8}ceugpGG8Y3mu){NkmktEn(o%8vqU{{1o#zXqK zYGIJyc)-b7HE%-}*_X6IOS_p1xj_|N0kWN)fw$fE<5pK?05%edp*+F?fj~&FJs!6U?uXI<_ zcewau5k-FJP}nV}Bx+*(z#6Eu+Ay?@|IV(ICC#q~*Du)0Z7V4Uj+k-|l2VxOf^>Bb zAiyxILMHA$@o_V1;BJnX*H4CGR$qSsV}<}U^H|r{(Ekw5-F_Cw$X_=Ks4_mb$_z0I z%vXm!R><=Wy}0m9Q~bVA?3`}vIBi(HyF~)@umvFg;7Rr~=vt_G`0J0b?M}%mcoUvo z2f2`iHF3dFNupYNie*DadtVjNt5f)4%Z7RfeFC5Ql&Js_v(W>VL~LA-apHYLxjQec zi8{{G^maRv+P7j*c%4-5@rwl1H?#k~%G4KRHt*FBb#NgbspeLM#zo3|oXnhJ&X~74 zmG7<&bI|5$rSO`Y5>l&FRF`$MF4wg@Oe*4^$ZNM3n)@)V**8M~w#TAA70GcX*~iA^kb1w@DYjZb|KEYcWDtK@3_!2K;8GBz~e|7AA~H^~mF zN{BTE(}hkl^kfne)YcAabJ(mpi5Br{!qRO3s8DSdvr&}kVcQ@W`|YEGa#87!5!8M# z(A5^s#)2c)8)G>@zbtZmL6~SDcQ+*@;R6HK7HsX2`&>R(jzG}zU;sozi3V@(zPApk z-O^mdyNY)84kUvyw1t}W;E=(%mD2(wopiDZ%c$gfbWHprw}j9U+I6o)G|7^n$OJmk z*8yjP;rQQZcU#GE^F`M*=G+7k)Mpz6&ZT`M(Ey$vvmCOvC#77E?yQofI$pOIMsUIk!mLcVL{%}ltR2r z9m=l&q{#l)6@P>IM3HS7nEr#()ajAF1^fEPqq%PMwK@7@{FnOT?gzW>BB>L+i&QOy z&@jWJ!CQ)l`q%E zTm6uB)~+~0AImw5|61PN1VXFy1UGKT$W+S0W3u%~=P8L`8>g6BcDU2iYBN+7^j=-L3GV=<%7RcABDfgS!AsN6f3PiQUvd{%OsE+@MTOr7(QM|y>NyT>O0E~+ zF+1{bCyT{h@g#Fst1l>T9TL?^@u|Xspf19jx3Kqh=%t1WmyevR&HWDL@gIlgB*1LE z=G+8N_0!;oSp{k<;~qq9dtpUs%62&Z6Fmcq-@N7<4u1wwc?MSC!DO&}*UmFjemUv5 zE{Ykpy_M}KgHO|*y@8wCRIL>+Uw&U@^G>0r>B&?t@9N)fq5c7kSU=AlU=Hb9N^qkH&I9)Si;t-tt_R8~bM9ie^W21S2BRwa# zS|d0%GH$VZ(f`<~BZ2&txq0$pa%B-`JUGs=f^+YqqXv?IKyXJvq)xX?M$N6t;5!F` zb(|4;Ona-YzVNRrA`Q{iH~6SNxs8_=?o7i3NE2)U&erQ>;nxlHyo&Y-`v}2?N|98^ z+O0fA()l>z-Rai~w5C&-kSm&?zknbD{if0Y-Pg&lwyRHFHk(tW0YVrj_-QmdD8 z%z__TquEt)qT%rSEQWX7IUY86>*nM@!~#XXp&ai(1?4cD-q3{a8kTk4(PO(%ckw7tupI zvQ%VHBM~{LljV@ig3bOrva3>(LP1 z2xv~6XD&GzF#LnIT*pvD6K&kj*69T=RNWJp|1~?=w_?WAkzKuAr}*U>ew!gqY}vA$CLCX zq%E#JZ_k4Z;zxHWN*=q%asxbStup&eLXbOR!RhRy=`*i@p(PWy~5axBQn46Gf_il6)H_F`{Y zL#bR6Q$t0pmJ=d)szw9TZ#8-DNmhfH{m9BRhVzt)SmTw`Ad9*miY|L1f~n(~M~-nFz$R9=R(`9OzGLbY}~(AR}ef z%Ma&=EliKgid{JgbL~qV34_d3`-4XQ!N6mq-y$)3HKqbRvbS_mX@v>;Z zHu*p=`Ni~a?}RVM>Yg*Hi3~~Y&89?_Y69o0(V;Z93UZ>q}Dk)+EpH)lBr!r$pT|hNxFjwX( zFa4rg5MHwu4NdXYboXtWxI>OSKr7nrM(iFL zy|Gc>0&ncBuI~g~x)CZ{JSUnlHg9L9n5T8vJC41t{JXChw2xfuuQib@dlGO^&BMQ?JwHaBJfp<%?hw0NypH zvjJ{aR%mGD!xf}{!EF8)$jW4Q_;7Vt_e!}|$80B@zYe-a8y}NBk=Q;zX~3#{d|3n3 zqS~Oqp>dF*V6vOFGnNkgoYHM962}R70CZTf45j_;eOJbk#n(H@g1{5Nx;=Vj>_(^PEAGyWe5oC4 z-9(Q_8Gxq{JeV7wQ-`QSXV{?SRtcvSj^jQ&iYb081Uv!d8mJ}V+$1as(s&>haU@$6 z79a|XJ$t8hU{TiY?iUyGS)Gbalq0bE+5W1;bbavskz8u)sEsh4ty}#RKT1K z#6SzfZd++-b8zsjIP_72Hh>^uBI0A){fJ6 z3a%o5Rya0S*PY!is>3pcd(iV`!RrFtFdKLrjX@(6sls&14~_m`icF1-f;edNllnl9 z+`#tPZq{-v{uab-{@Z2HDfZIrwp}KlU*B~|yai@GNLKds;bN)XmL|VI1q%A*MW{{m zdbP`jH%WI#928Zc=P}sO$y z=~7$ugz~3xISmIHE{i5iF6R0pJb10;l+|!Q&L7`yPzyU7&y08$ax3*pH8iU< z>}=n|G_!SUAlMOp!FJB6<)O)WaV^Q-50D;X=dX2^0h(+{j8J%^_L@g=?9tHe#428c zEJZ?b_5qW!@IDhBi7z?>7}aT81{NnXH>&1e>*&w7xhN$8iO3~@NLm`&06TRTYJOGA zAA1&%6Thnd!02uLyDMQFHN}$A*OqxJ0pRV#tYI5Lp*Czrop7+%|JnJ7_7O#a7t!D0 zgsJzVeGg9@50^gjEj#y{X~|kB^Wa){m9y9D`9S^rf-aWFZ$H=j*+rjjo`D9kdO+P( zkj<0S@htZ#xdDsy{_HbI^-in8N zOyl1^KTJoTl${znFMm5vdqVi9^u1PuAs40LE)PEBBp|GWw^J7qqN9DPG-_*)I{)R& z(nJ7F%in*|8KwPAKJ9|dNswaQiKE)=U$4?BZYhk~dR|g1ZwH>IdI{nHmw6!U!_k{p z&Urn*)8}4=%!<^KFMd2YXk2}E#ipB?Af4h#q}R1(+fMf;(mQMUTOS3 zhPMNdH5#sRHi1OO5OKT4GJhDWEFF3&iov#wq?f(ZFZu{v)zUGUl;|F5SFT{M(R2ZvFq4 zWH;e3a$$R_6m;eixO?VXTSf3|XU_ix)E#)h9ug6uzO`Gj-Gl-i#_5R{4uy*4=AJHl zi|$1`@B9d|f@ejFD-86I`YD|qeypMl&vppS`!UyZYx=9w?oItir}r z2q`jpjad69F&{-Kk9TGrz2--Qz0Pbyt5Auw7@>UU?$vkQM%$jwW%O2K`O18&IVQzP zK27?r?Lz+pzq@%D-|+nl`Xy`5;x-b*3bpknBtCmS6Ya*ZS~JRya9;Z`i)R zPk2l6d!#W*y(Clc?x!%*)~$Gp+~p8^iq?NeESGN9dHoP?u$qp`a|yG9K*{SsvE`&7 z8w%EM&9PDPpDWngN}IE65h`wv+2|w7DP5)2^3GFVj$a1cIIe9Bak6g#6t_96Vl(mo zHeB61qWP6M1(YTpf$r96zWduAKsG7Cl7mLTZ!8g$Oqjc&X0;GhcU~NDwdvT$7^oKb z^~1FrHukD7g56?ymPlszJ1$>jT>tk5fmbDIN(+1%@OPgo{ty~CSwuHWeB2N$Uz5br zuK=xy|37(sUKZfY-+w9@bSvp42hE)Vb~;*@l|sPE1@QMx=VGNa5yzR!59xmG!v&)- zDnQ?}H$@|wo^sM4FGv>dMNLipbnVK8o1wtl09xWh=&#WT$i5x|-k$ALKk|H)$UDUp z?k$bY0)DtS%vZx}INE==3HFd>a(xb?P2Kk~0D}=9C!+yJ z)!X%F58i)FzB%Rp@8>msqeAeK`y5YGEM)xUc{E=l?04D}YgM&N7$)qDEsI$JUU&`4cC{@#TPO#Yj{56Nha5)djC zK-}uZo;)c|PwO~aG@L(e%~a#d+kUxp(`UJVwwh~;*LgtIpP~O8;Fiz&eb#d8RfYWf zUI3MUeIJXHTqUp*E(-ANHJ?Rfxi$RMr}@Q6#+o<3{id!P;lIZO{6fBZUb)H!UCffB zn=zKDVN4oLf0ii)jH?uty_K#1o_HM&&oxYKhSq{)gsdH-EjQ)P5T^0vz|1-j)treh z{=GNAmi_HC7|n{EA`(LbJF|LHdlmMW4ufa|5qth0>)oVYSXoI!WI}>tl)}~s?Ec)N z#*Y-hhj4mVLGCeS%kJIG;8d`}y3-!3_!%A-)#IlgC3u;=I0$VzJeo0C$oa2#T3P)p z^Xz98<0d0y(QC0_sotYYwiQiJ*6SI(5vka3@9P*bwi^~V&zAFt7X#P^U&1Bnsjo-q z7G?5kov`yyp|oFCIRPXJtImr{do_$pXNNr+U6&SD&Q4lAoSpyt*l(qL)|aF$l!epd zH_{Wo0A;`@8u0C!Dcw%+;R)vY!lVE9#c-D|$0pU^GI~0{T;iGTymhi3H!fl!CA@dh zq#>#yCtg>swP~{B&-c9==*zoB9$w+H6j&4S)oHjfPOb(R3qE_Fh|1Q;f_5TV#RkPi zowc1CU>eJ_*Xou3eDgOKZqy}UMRGdjopUVp(%CW9^msCtrT&ag@l>rwOh{PC@6@FF z{3g?|lC~#yPWT=^Z^YoPVtjigm_3CU7RhOMevGrMACo; z?1z)1drAM?>1R&43XQVx)8SKO{TV0i%BmLNfmmVOVojBU@1EhbN!fAN{KO<_{XUoj zU*YZNM&Ww@DG>mZ+l|YaRg9Y%E(rcETL7jdq3rW@*>2Xo!v$N~)nO7^D}3Bo*HCh| z?B;v=4(}B+fTQH{7A>> zJ1F=KNRIs7_}J+eyB_G!o=2<-Jz?=&AVZp`Rw=>nC%`0QffF2Fg0R9!M@j&E4Z92S z&Gn$)N1P;~H2O-lldOIMbLM!$?MZX2ayBQLuLs;Ou7ox_J)r{zyR^S#;c_q_z>#R} z;wLBh7ed$Ad~f1-UI=L2AX%H&fI}C=6e1OAeatbxzml)|R%3|BzfjpP_nv=7p)g&s z-^oCCu_(>dijvaLy!8c&M%~e9?;zGOt0pSlAI>l6xr`XI-g!zyc?Hh3Vl&%`zOOyKES!J zJ`Iu)u4r(DX)i)XNXQf1x|kBYvWseU?R{R?zq@>b$Enj-3_Prvw5z?ZG|zsXA6yt# zU4O}6$f@49mh@Sy0(MUIzD%sej+#}}%ug~g_vZv&3!Uo@-YpWDO9E)wW>_P=-b5Nf9MXC64Trca)4iM%IX8l#92l zh*(wh0E?NDknwu4^PGV;(Z_RaET-MQ`}FMzOAU|A6;^NSiQLa(%hrBDOzb-sMm}1 zNmo=Qx6GBE@&wrD;Qy$?G@1>&!*&sXd2JqY?ySmr0MY;;VNA-r>ioj1CJTy&0if-2 z0boI~hh%3cIyP*r-OR+92a{?G_jCyeN#5&TvStYw(g#0~ahXbQ(9K-W|HU7F5ws@) zO4DcNcm>S&3Ob;05K7nlx$Il>lq9>7&L*~Q{f`8<8emPU+5tELrw(ITZi#a2Vl z=1d^8>0jb@p`us|;8%rnU)1kqE{E+ZSHF4S@1%6%0O-sYv(OWzL?D%2w%K=~ zb)X#cgZPkbH$_=RFItO1Yp(4`#5uD5F!b2nkR3Y{N1R}(F9}4MMl>E_!wt&HgXRt0 z5Coco%pT@zl;p1y;5t`8&GuvE`rQX+C>*J*hdr?!c($bIq=nKw-1Vna%+(fa1@DW{n&*5LoO@MlAPPCG(gkB2n~LwZA)M8 zVDE=*)oHryn`JILKp}%amJTLyn`wdWpk369yF_AV^YYN$^#Qy=s5O~#G(Eq#2aq~? zOaC9m7~b$2vCZ;|UR@&#h~Rh-ExpD55r)8%9uWT{!0Pr@WPe`v)bLSIlXY#bM7_WC zR%d`g=IKFpJ3MPG+v}lMlA^sAIH|8}wcNs9^HYL3AlJ~J)iP3E6E1yB{vBQY4UTDn z|3|77LM^{oCNNf*1dSd`1bh0#A7AHv_C*K}r-AY1H}Mk^tX&5*F{tTsg-nkB*vM!^ zQ$|h`FA~@oGFO1D^%`W+ZVB#VB~xPHbgc#h%+Dv@=U4o}PExB^=aUZU{#T6Vjs390 z5oQotPyW?oY;mZo;)oc|XX|J?F=DUE)teT=I<9eGnT%AYtWc>?+WNjwI>%u)KssZ~$up0uo4MQeaI4$YDcU%|cG zysf4vRW=rbgeRQCj1o7whbiR2Wlz-AGHJ$voa*XOpKyUU*Mm0HaqhVp%zQCS7)~#r zGZ7i%a@Eo0qcl+gk-SshvDRdyRICCC28or+&HAWJqMZAo%}me#Q6%6xo?z3NDXhtA zN#%$$ghO5TW{dRo^HA3&;7uq`VbE=c?0@;ui!eOEC7~e=HWM7TkU!dX5K~%Ym z?#xJqDRM-QicpO3*_;H4xWPj$eXdscy6VuR^gc)L-QLop)V}`xRT}`6#2e4n`P#-+ zhZ}2ES#P?sc369DcDt`)M2hJ_;Uf1IMpvSTb6MwZ$OBRn-9niGg_n<3Z$`G?fWNJ4 zYIRlIY5DB@u3^DiWj3O6_k{n&AG7zj!Dm1?hgrvv*Bn2(g{ju8YLA(GdAz+6do`e| zq;|Wp>}w{QI#-KP)M}0dkO;pB&x<0S zl%by(41+mz=mQd_zv`$Q6O{8zj)*}c@y{nLHS6dhrxgAx7xfX92L1Xb? zq-QT%hibSC)UMG+hul~swxOAHD_}mR11(pzC)*x>@aVdxe0=aLE(R=45`m>|^8@^I zG-bu0;T)+4VU!@B9||Y3oHGmuOlvfvb4-oN?D^{1Y|rLZ0ebZ zR4DY03I13MXGpmz*=m?ao&Zgbe)9B5Gv)*s7 z5>`$zL*VW3NV~;UhGga-QBQ~b9_ba~ZPUj+f6PmHKxmen#}<#%rr$ieGpf&kub;?@ zeWvos=-GK;#dNT7wl|yKDWB)C0Q0_*e{IJW0>M#chf(>&&wvuKw41ngZI`O#yLLCf zxeW!IGp#D1xfE@v6-}~G7ULz`Ui(_~uIh2$sgX-d9&Z=FkBMPE;ezw(E{kC7ASJz| zuTrKW?DK~m7KCp^@I&Xp!v0zw{`)b-cDl$ztS<4$J-{IungD3cP9~XSJy+2I9-WX= zL*o?7ak?6F$rWJ_+7ScSELsw8@P>{0mJ6_q_9cY@ey@Q;zS@^;h~Y%-qZaC#PH^yU zE%a@E(tr!H3d2M;67vxBsiKtC6my@_-&N_gcxim-O_|80cU3m@$+rA0>fb8}wm;rC z`j%WD9-K~!Ity5N1SjaZLJlA870bk+Q{#STB(n7T|t?44DpY`=dkyBvMX7e z2zB>59jH}$#I94dMw#_+X@LmPm|+h?ue$^cLUF-%U5V_VF5R6x@nx9c-K(>IESQqU z0)>oRo>4W3o_h~8UUvQEZ07!t*}{6ELJ&n%P$Q(G{^VPIBPg^BuihX%1e>v74aqAg ze<4_?C{Uvyd#XriO_*pdXbmh$=dXg)7Npu}@bY7ts9F+VKt9|K;1!cPMx9wPsu}o$ zlaS$UqlAshDeaq`4fM)pdSlh>%f#0Y%i4i_^VhqH*$gw--N^+x&}aqP7$CHtA+Tg^ zH3P&YPsUJzHRzh%#5!$~E0A?4PLLXurC9&PlLZRrE7K>ra#D9yfFzc*g9T~3-o>Je zP4Wn5kOESK8_eLY`pPDtJgl&yBL&hcDf>7e`e|;bePx(RVuPAHma$&&rC)$u-kKE- zdBcns_u$~9`CcBJ2VD7;0c7n{`ivpRB;t?W5wn-;SiY8b_||C^(Nm;Z=zlqpcaYHgqnys@tld3FkxQslpphs8!HzD(BIUUQ!J1TwZt--F~$`=>>Pv5)gw`;XeZu%LFc z&LBjX)W*{adZ2dQV6l?j!_88XeYke0?}w?dd9LKGTp9l8cJL7=_CpN_K30A-IKhF; zR>LtmuYW7)gs?>_s#Pj6@UFE7M_iF{iy#m%Z99@(K~p6r%Zk4;wi2&IoJQb0ks51T_-nF>?5wr7!vI=-ad@hofMW*hnq3K zQh%cufMc}PUei49cp*VR{WN3C+(Vc!NT-BBGUu%}$Yb+$+Uv@6Pk|(Quib`YG0>@^KPz^T^<0l7535}D z;B@#}=8?VPj)~#sRD2qWKg-sfFX1&){k*i*7Eqi_jincr4_KXwozk;g6S-SQ8bm)n z=nb}Ei{tiF!|x9F7-KgoZFcoNX^>hS!vym=A1>r_7TXnojbLR@yV-OGGFyGPvgo3V zV_@o4wd2tFU?L_1mofrpdLZw8;C?Tn++JxViS0jXt+;0Ms0{38H0L~e<^Wrkp0@bO zs15d_tYfmh^hN9XKcGZHA|VeT2CZd0qXfEs40oH{@-fyrJt?zB?mT7{Yb0TE)=UTN z0`1IktyUQ7i|0H=1CW(t46-3*_U^5e-n~MC@+p(UYymQoBYfnb}9>){G z^xz?#hiBbh1-1NII>Sq+CMN5{)}oHi#ZqKJy=_UH|6ajY+z)^DWhe^&9iSKUyjJ>S ze5*`XzLXwTrnw2&kj`AwId}v^%Q}34j&U-Fs>N>SlcoEYE{7NrtT|wY;?QQ_I+k*# z8Bf24@d7ML$5Fe--=)AvjV-@&t^WI>^|OCL^wskY#5;y(MF1I+g3lD{1CsWH5tG(B zB?#D}h|Uyb|Bd&zd>Cfjbmag84Ew?ERZk89>?IQ!AOfUMRMC!4*1fWkYL01n%l7+9roH2FUu)Png+5d>2XxK+~-iZv-``b+9J% zwDTy;4EvAiissCm@9)xP%e_Xhqljmf#+JW)#!1b!km}m(BlB)P38lW1wUHvpNJFv$ z-xIMe{g1k%k*-5^`^`23fVPQ=DPw`U0G_q%C)l~H`0w?&@T4nTyHS>cqpJgMfW$rC zlCZKrF)rIRXYV7Lx%8a-ND#Gc0pnjR+NXHK(chHHasJg~Z@VZDTC?pMo+Uw=NO9YP zk7Wx#_h=IDgd3Y271$Qa=9tt0wy2k>b$?%IDV>CFj|hMg7%Mu=F;e1hkB3-la`Tlr zr;ZkIzjs4PhK`J@UrJY-N5bt87eLG*>xigj0abb?=b0?h*5mJaR^=U+RlIpA4+b>l0u<)yWIL9DX6~OzcSZ zwW1l|B6$dNi%8OXGGE)o95ne5Yn(bpKf0i~9KdkPS$SK1!IqHVs=tpH7y{k+or(G8 z!dLuP<_Mc~gAO&}!nPP-FkyxU8%z{Qiooj*Kd$MVDfDUOeCU|*ktA9hNvBSXvy@}A z++N@4sQ>GNmxEc=3N3RK6s4#c01!!POGdJH(25aI`BFTV-C8%AQ?I(}cQ)>VQC>LU zZt4d+^UJ_?`q9^1(#*>Y`tGwcD^YXu#L^!Tt@CQ#3}80wBVHf!TjSTFk2-fbiImP3 z38}4GBZhBtH#vNbS)2!IX0Li4axs%}S?kz15wjuozA%=p1t&`7W`lN=Ile`_shhCpt(uLmZk}{P?w3g8xzmbipdbpl_xAs)rI|Sj=>PE!#3; z#kW(|Wk|m(s#NgFx2gY1>4#BfJ}{#HkL(PlmurdRM_O>g!{Xn>daI8el^j%W?(BzL z8rxq#+Sb%>6T~FbSw-${M6#~Mg_eHL{rSaQ(E7Z2@Jc|FJUtU|US)RFNY&L#6%20;Eco zacmj$^S>>EMtuK=R`Ab5zW{g#`C44gGqk`1^MCue$beQsjO^@PKtj3k z{p`{2<}2hi|C2)kdIJ4FDy;D5zjbO{OMqido96fTfqeF-3M>8vlCAndda-wN`diyD zyZ=n#kM8hXyl^ubS8v}ENt^l+^1o?Yg*9MK1_j-s@RKxjcs?^Vd?E1K-h+QnL|A@$ z{Wa|I(dN$DDy-{E^cH#PzCZg8Xb*KEiBbEJ?ai0Ld>0xWR@aXL8TuWB zDOZ?({IwnS=e=TBWy-%pploSq@9YtXGLBJEY?ICTp3zsx@jn|m#5$X{5=DGDrdCq7ra_T{|;mOm9By2ZGM6; z`l2iRgBU;-5wy8_n0R z#7^f1y4QdF9I>xnO8=&kjEF_tj^EBOy;lO-xdJf5t7(87`?Y&U#G$EmvT(+vPj{_= z|06fJ{#RxRlk1nMS-*d&h{mt`t3P_~mba#UYM=#%ZR2;j{TnXBinOb1AZlTlT9EnV zwxjoNzoT?xpDe6t_bspXqw^u!%=*yCW%TTwC1~d*J5nsa<5Y&e{`d4m*La_cQu*f| zHHQiD{ToP^F3MbB=xKV8TiX-)M`_$d0C;=IclSLgnXf9I^9C+J1f?mn+aEiiarO@A>= z`{RA*2jg}~!4?JV$N!u=5w}`aqnOy1f6krn@s)oMi!0 zeedr+&%Jm5!SgVCX7BI*)?RCUKI^kobelilsBE%a04pE%7lJu#*fRQnw26FKl4ctG zpOQOtDszUGzo*Od6uk~WL|EX(W#C6XpnW<#D5a+F+J+z11K&$^YRm*E71Z*LHbJjG z)_x|`wv?9u8xKpFO6R++81ax`+->I)Dbndhv4H)VqT+Xx%nQHbKZl1Wc)^;8ML-f z;~8?6Zs%xlgJ&*pR4CBpwqGAj1A{RMM6G(#ficwiV^^7`J!9D@36fV+{2=VC1KKdB zL8zy&?O-`U323-yYuFu`6e!9S*68)5k9ad9JuhT*1GeSxFmBWg{ONvEQS38VE% zZ5g7)payYun5#+ovep+)I!zLmXzUR0AQVbE=KuAx2KayU)dI~$-`uF==w7=0xs;vY z?Lb$<1{AGPMv0YKyZ@DSqXgDC=4Iovs;P3dgpM!|)9;I}y3tSmrONpzOV36sLq_K= z%4i5g1B6cO81Mti=ki`w9ytWZqq2BgG-8^g0B>DKUIwQ|pH(@j|4rRUy*7Mbk!Q}d zF!JzOL*?j0M`CW}BPkJw4ItrMhDkXpL+n(G)aOr%07aAxzErs>kj(MCeg(3WC00Sg zs!WN&t=Q=x>gWC%RB3`z`^|Fl!nj|j^&t^!pJ^ak4_ixApNzM<;{@G9-H*2Rp zx3ewk<-ALa6gr~y8an)_R4Ea+71g8Rn?!=?q@~v*->+j^j>^{2k49P; zw+-@t22_oN9L*rp?IvkFXhI1ea!p;viyH3QsLw^OQFhu^hc zb1T`w-cnA&2m>_~kAzV}aV%$-J5HY4mbY>oW7en3zMujX;8%tv9z1NNhn^=8Qcg6g z=a{hjFLs3jLx#oQ$0$C#B!y6fzc3k~n<=p@7fk`!&}Co%uRzzj+!3EH#Wr9u|9H8i zV=vP{-;`>i_fx=Esyg=#AVW>d;4A5IB;~TX4D8RS<2i=-LX`MZRE0S-osYmYzt4b+ zky~C4bvxPSRL%t6C8>S!+#60!=ZQ6@yBT$hw0&$}a5pXleLo%N59+B?88|~ZE}hp2 zLMkJ>fg)Rty;g;5xDzYUPO3=>ypHJwfFL4JUy9Ci+G7f!Y%|r(FzyI}fH{85=sPG> zCmr;sTg2fl*6t`aHNJII1QZrYczk4SNXC)3oJ3yJ*RgMI=fCk-A$~4z=u~z|-|46; zRx_jYs1GCF=!c(Xh-&1Z>c{m(Ka%7I=oKK;2;_fn0q$T}hLU3c`ec!tX1s|n5E7g@ z=!3I4yiEcMS}ykuMBs)M6;9zvjgTrGoU!5c^|NXU7sZ9Nb8|E>hjDQVJomgLw(VzD z?CZC7&t4oNQLD`oiG{0?F-CG5TYgh6Iib87*Q9g(mfifokMXi6?Wk&u06kt%6SkST zPFx|QRWL~bVk-DqBK5MYRPiS)neJKblD&OHQMMJg&Ie+(>57^d+n&pk!@Tz9xM#s-E{{$`QDR||czynL*S4stV#yc8PlaB7 zG1XEgH#+p7`*}K`s1rh@NBak<1L4^I)wUl0F~P7y2;ZU_kkxhG)ovgxZ00)5dl0F3 zDt8T*vXeDDydD1(;b4*-HBuRP`$5PW@Da;hAi1@9a&i)?tem}3)TFi3blq4}8xN?= zMMg)PO_UnA?svWHLweI4Eb}i$;ZD?xHF_^kG6fMen1(NERjQ~icMD zuxSpJ^KMB4bWboko0h2>i~#MUe-L$$&#~MSucs)1`uaQ- z3*>K}ub}xxoC6g>hl3$7yx%{Ry zyjo8w6l0U#yBG^h#?i^FQg>_}ryIG^BWn zhIwaB>}_M@i?-*In}->E`kd67nB2U+`uKfmiiY-gM0Bq$^|2FZj>exEtinK&v($T>8NrguaFMtoNJdBwe08YhCc`^3?c&2 zM~>Z`a!dv|4)%$K&H8Cc6gKXOE$2;UzgcfIsto_-xs>f8*VxggBA0GWoKx}lj}5l~ zxt2jxjY{UmpKm;OA9$wV2w!}g*8eg-9&3yLM($@c?2sDy!}c1X&sH_{hUPvfvN#@ z5^#^BIsU`r>oUzcJsYYge^Nb}*$-4E-Tp?h;Vs#fRonLhi!{JUm_4gS&qlP;)gYQ< z*W7!O{519My|W3ibnVOF*Hrg|gi2I2{BXWteq=Q?Df)JvD-1viLrAt z#lUnlo_1O(y<1yi@nhVE_sZ6f-Z)$_<$6G7WrvOdE}p)Qqtz@c<$9h?p+_*phMsmiXr!f358#>wuQ9eVgFpBIkVbj z#x+0LSWzTdzy3S5WsmjR7^rbp0nhkgL>A9=udjOYSktr;#5b86r3pcgzPVSqBu3&;@7LISH>NE>sxXETZ(pyxQTL#i1WS8+ST0#E50Gfw(`(=JA#>>5IFN*Y za(qx0+a#@fj@v7N*fW#iijjre+sC^DqZt%(`je9{=G*)Rqb+U0YB&kxK1BAsY+0j` z0Gu*W)L6baNvveNwfUCy)_q{%&t}VdBz8M7(VI1Cm^vpb%#MtJ1tFO|Y-(yc^nlD= z$1=^T@l-5C_^SGNdnRx0tY8kz@;qY3lAAl;u401=p#-*F2@t(U^69EpTmjUt)U<9G z4|c4U(;R50nt)z|n`*ZU;yUpuK&47ubrqSNtrstR7K9q`ck`H-F+*Qu&K7EAQq^$z zmY)x*O<)`kAP}EOkA#(9(iY4oTMVGn6APy~4Z-cBx7FWuv)NWal6(bg{w4wWH{=Az zP^vfFJ+EDC7%XQV8#7(lNcDRaTX;pVBv@me{uo`mPXd92&0g^8jfIeLcvwz4cM7$R}cbG3Nml$~V% zggN&*JM~rd+HDIYt|IN2v#DxEuDI)#v@PTGM(H>$ca)>m!F)Nv(f*$Xjg37u8fRzr z>+B{X+eNQ+%gEsZ%+ifX`P^KH^MMBV&^j!4r@nM|e0y!Q&U2V4M0030)JyIs)b4=y zZjkO&`RV%%w_0QqG7sUEm;+uo80e&@z1%8gF5kN&R;^<;nos+Xjn8(mQ)Fg!HHXW5JOx#& z6KQohYPA-`J^SrPTL9*nHV3d841F@BlPi6tU(mF|&eo1ID2qtl7B9-yVlvOXF{Fn4 zjmix^_1x!;CtRY#AGfcUt9wAD8jlo`sr<{W)f3u42_qilCo{rqS_zecA66c3CEH$R z{kd=ltD<0QU%KKvO6`=4{g~oHYed6M!C)2xAqU@9O>d|2ad6a5uZMFy|3)9~#feyC zx&Owh2``B+G~?lbYg0m7e?b1ez%)%>Mz7o84+7)fsH!!e`|%k$lU&Pe*fK9Cn$+(N z_u=q5oCqp`XtCVwg_3!k+E4mwYc)OW*z*5$?^u5 zK4P~vZFxDj!`41uz+@4SxM0+6{8i)DJATj0A^`sRpkI>h#?roH{mh@2l~kWYKp=m4 z*|6dI>YR{-1W9(iM`m_?b%_#pVrgm-g;VCOlUXT5W_)OJ&HVdsN<3H5cc4mPcj(b+ ztf}E@daGEo%uJ#~I9rMI0HSu1_HO(qA&%ptV|Du;``35yeLYkdL3@6^9Hq{OY?OJfF zPzLBuAP~A%a)LrXF)8y|8xclB9{Soi1a}n91PZ#?b>o1Q_dU+74sUW39s%81 z7ssV>eA5AA)1!Q~BCP_H zSeI-z#O-)9c4a;-f1PuGOyaCsT$n_A*3AADvm*|$O}0B!ER&Y89Z=07pbmvhV;bzd zIa#lmq~wHF(k}_!8#HIBz{_K>^+UI%+KIf;=m%Myi*^Ntb?q94Y@{q~$?fb_5*WhU zam3K?*0~N%3ds{?ata3}ycg@j#Ncy`x(L5Ks{rj;GTz0Rk-QHBG8PAm%qE;&M`O#3 znhr^Y$2ki=ViT02q@EhEm0(u3@y$}^*sF>|^<~*%RXSzITJZ<{+$VB2D;m06?R(N{ zZHllJ4Pngu@ZgH^4PrM|AM=z!H&%zDVuBCmp8NKL2Dk;sU30u^ismmH?OI0;GR}lf zqa-C8Qmd$=s(War$)ccC*QR=FO(2HKt~m=2SoYBL#)Zddg7?AT7_N#zIIi-_y#fq5 zyUW|2*4f4~l9tL)y^Yl!grHIM=4NG43!Y5db?dXC>HvA{dXpIsBnh4!S2bB!^2u$Q zUdi>9N@83mw?9#gt#U+gGn%w58dMn;?9MsaE%Y!4B3}s>=j+!1zFT+3;I6s z-@(a&MM4Nkz;oa3#RMd`YaAW0VYcl5pFEQhT zYx?{HKUTywGhmp>Tw~-XfeZ=j)&<03R1nvUZGCrC*>{(6$@LO<7DSGxH4O@Rg@dss zH5Z*RwYS>ACS?XCc~?gKBj%h@3e!%V)6Kb9MR84b`+1j+c1?CRrhQ~cNlDsb?syma z3b~ehaMKGlh=pac!_FvZPH+v+m??>1-D_fT;tBYi4YVdyF^xoYo=WT9c6F~GWciU; zfO9!2$avbJNwYlNiX)s~SjdD@8$(&ffKqgy%yM)c=qeL_|DNm8_|H;xMN^a9WP|$= z2}DuPb6uJ&*gHpiH@Ag zkoL?=niY1iZ~vO$k2lqX30XQIdd*9Avg)TfOuUNQe!_T$)9xNKRDu)qiovNMD<-au zvC=G;lIlupB?fNSS_%k1bI)SK*cyM%{d8c>Xu9?dD0_Mw8HDIDoFxp6aAnWQ$(TGN zlk0cq0A389P&;7?!{#3mP24H2@sITvt+0(dFAhxhG8%4u6Ck9_9HwN#dKm%%x?YF$ zFP)D8X(c5k99{vG6ndR!e885Q^pgVpep&3BsMx8#gr1WW1f_x27WD6Tg^8I4Cu=R4 zVoFKuF0J9>E-S>{gFZxtF-td%>9v(o$F93sRl~ggQc&pa971CCDGKvV^JLE(0f@ z;>V4(o2d`Hf zme((jnkgVt4@!P8b#BD}V)=7l`gW6va)Id3pe*N4Z=cs<%G&a>w6_yn`W^Va0`E*uqt?Xb$E|86b(6@}{cMu?gANob^u=Al5 zuePkkPf)t~#NVM=(F=4JCwgLC@&WF0Vkq-cdbT8k9HWslqud2OZ=m(8%>#rO8p^EE z%`39douIgek@#mwlL(W2tMnxtiH|bR)B4uF+W7&zR z(v`!k|3}XFb&kTyBNK!cg=Y{Y7R~P1!W~sw(c_Pt`lO!+%fDECq5>(-^$>k_f1tMH zMxADV6=MX)xB_`sl7s{1@z%+J9KHI-)j0wdynU=I|lljS_X9DvKVHMcr3&*kR-2M5xD_>gvr$ zXnW9bGL{AFp~Rb>hp0;rl{z}$V#dTqX$BPlX1e6Ivxp4;1B?Oh3~9Q;`S5~u!6aoB zds5I@w$^EM9dg%PelKL$MGD(6LI_`J3Zdj0sfn(fEoDl({ zB{hX?^em``7M^RlHjFEOt7NEBiTPRb{aX({fXY%3iE!Glk_cO1JE!nal&yGrU`3wp!1 zj+uNao%MTmT(xXf;c$L;+nS&O%kfC8-k*yk=nsMzK#87_nugmV@g(e)T^o?-Q5z}i?_7j*=0>et4E1;*r2}1p zh?znrkF%-N6Kk(b(>3tznhp%YPX_7~g|`mN^7fk0M~tKG1#+V54nk7Jf=#3AuabVm zy0n*R)259saf&6!ReouH8lZV3?56yoqJ1wL`@x7~lX{`4FLCf*v=pb)*+^-o+#d4* zzxt936Zagy#{hsXXp&YmM8Yl2N4K?_63Pg&mq;)K73l#dI_gvX!F<%zQfSW>WGd-J z(4x@?Caw_5ncYh|!2I7d4d3Y}tl1B-G6?Yc6XZCsXlI(MD9#0uJ8O{TWN?!11%`{q zaPH}Ati-+zc8C)fVuCL7_?Q+!cd9Ixc-e8ai;YgW35R1??sh6B3RYFsC_=VE&3v>4 zAz6RNtL^K^-%E$z=g0dTYnJ_eccUVD-5@+-c4LeJg5GoM#Ouf=bT6VmSM64$zI%h& zT|Z>`g4bHJEw>a2t?~%+_lfF_Ys+nqo>Qj$R(NLMsLD>8DVq_&M>`RX%v(S@%8>A} zaIUo09bL7&pH!~VuQCW65C|LUJd#BgY=N)8f99Lc3QcZvS}&<^u$?TBG@8nmm6UZ# z=hGsCdght!4{25MIc$%-?+&l(%Y7IJugV*!T6F`}E! z01VtaX%uTr(*WwRBg;AZ{0_;Mr+y%qu72R*6SehW7=bLv0Cm^ zTG(lPfI~q|Y#a3e;pf= zCjy065;nM8D%&}Jy87<1Q@dG0UV?{oqXmd+M?9;g+6=$iBens1hCk0F`kGkkaooiA znv!wZW3=aUKO#f^Mj<_bJrzLh=~s>sB@WnD&39?f7sKV+dwr@oV8;_iwQE`Ny=`dy zw7JcaA$%dcNDmf<>mWeXrc4p2cv(C%Gox87m^eB*>b2!u-w6(&z91E$<#E~1pfuV0 z(Y8#7cJSWpFBKIH@8&N*2>-wzZL7V%q3x`k?0!{2*=3nGf>1xWJ|SUgYIK{i>Qp`# zL~#YG?}q+n2cV&)h%6bgAQ+gLbAZ;NEo0vK4=hb(tbb_!`h8g27d;Z-9K)k+k=xl1 z{3H1-H_Xa@Iljbi%YVPr%N6)Yr|~bHfAk3Cy<~$HOP_(jR5uq?`TGx)^Lu+lq!h^} z(~cLGM?5n}-Sof*jRH+R^1_NbLgM36)1bz0`U&R3Nauu?#ch$YH!uFf3`qF{%LpS{ zqAcP-1@&D_Z+?Ye*G7B)L?4?)_XcBUz`a?Uw`U~3(b~O#yeA6$_dkt+81>_uD;J9p z0;ljFI;H)-5*k%!y@;BRPX`LnR(U?{_Z#d+`~3jL#bP%t!#nNAG`}b{(ohhSRa=w>#%tyn*Z~x*0WDcB9}x^QZ72*|I)E1prfZB2IAZ` z&(YrVa&vLD?;GDf=CiIbiW;gRx?9E$=4Z6@o^QbS@EL!M+(lz*1j_C{9=RnBSckbg zob`S^$M_Y-c zr}M$6t3=c?eZ+6NmS;LhE33O31CkjBh*ZR29qc?sF?LYLa=W!*xlTMvBLS^A^dU>_ zqsYj}wap2QNhlf`Q&gGJz!Ux3IcNR&k&*30z}FkPgNhDt$xrR=?WkgpG0y;zWI`e$ z_Fu&w=kLN2s|4a6vH;`Re0Ra}AFf5pSKtYyTO9w@uKx^q41~gmAds#{wFcELOkbd* z9`%d>Gv;oSC+ZWRt)tdiEX|!?Zd>pQXaO*&Kgk}U{Hfn;m>NzOzw&)A3*hTILXZEo zTyz_SATcdDUbj#B{-$v^S4E>?ZH*oDvbdSbs4n=YNi?k4Vi@et71f+GfK~L~r*-BZ zV$UnT)Ae#*@5dqbz%`1ktBL-}N1^P;7#m4mZ zyeuPce@{!x+XQ68A9=m8ekYOmUI6h7t+@E+8LHu#6sDIXA!?4IoDu!>vLblSU$Xfx zUSr{5u}RXGt~o7E{3Xv3HfJL-s0pMy=6A_ zw@#F>!V;5PKHm;@^KE~;!6KP1%AlMn6AO~+t$;VY3le}wocdss_T@=zj$MUNb)|D+ z9oPHkQw6Eng-{wC21rb6+;(vPJ05n#cWKMM{ccf#e$>A6^tC6x?+r;EVYV zwDEO0-;fn&CrEh2P_uYVTR+>LwuuCsxT@R1LF@caCFE5l1Rg>7Gt>aJ*3i&mQj5e< z`=W|-!p-bzBLN(#W`hi?#JAWTKYIY=k-DNmAjN)LSkMrL80na(ic0$~3NM6jxh`nZ z!Ccc0ho4RaGA=KM2_Wfl*Wv%N2pP7!gA4+~UdC zE^5Oiufq5GkGv5V{sN9OyCe%U-%7rUtaW*U2(2`*WhRD`uvqwxQ*8xrHq7Qz9?pH4 zmdecfd>s_|CC17i*?qq&rsl8ctO22OY3>>O@S5kCYp?Q%H_kE??@%S3#F!PSbR}xJ zR6gpe;Z563hX}?o3FcnmpOWH8;XF9(w@acebDYQEAXFZZOs*s;QtY@2jkDt5uC`T# zLUGNVoSdGS%Vk$oROs-NhCDNW*1FT;PLK%!U79-ilFpGTs=)!|U|#B|6a*(Xb@+*V znR}J_v37^`gjw4qbyZevW~Nz!v*YS@9EJ1#@KwlJ}!(sKO$o!3FaS{XJc^QQH3il zK}hm#`*Na}soATn+t8_dxK@3qF%)Zu_pcH4+`Th1hb;yQs-gzND3aUWbK!+$?OuOU zzBLUn10RV+KW2L#newDxC&C~7a5(v>`^;E`O3OdF#?z_dLnd$(#6k`C8%IeU*oE4cpv{d6!ARs}4=$PN&Wv8mU4U4s_3@ z)&ss|))$$Rp(ATvA{L{UF}!ES(pAV)pSOrAr^p@k2a}2hXL0Bj!YKo!7|)+d5>r$^ zr0NE5W{+Z(GIP!bCG^M8mo;UN4Tw%#ksdcb{5q!81}nIPbkm{a)=68A@4aYQ;4v z2|qe#nxLZHqi^#)n3U_J5n`bPm5yC~Gyb*8r*gx-Toah0=%~O8pXKI_z+|__?aE0B zl2SJIp-|r57c&0in%G&iug4#txjL5e& zHSSy;?=xk9QEX@gFX?EdgHX-EdA>1N-IHyKJZ;px#zbM)EQ(9QqUW874$Q*CaX^^(xanS# zAUXF7Eg`!(gRG^r>Wzg1t7T)!=h)(Td)At8>~dMc?A75ykU{vPldc`>&hN(ZMW=VJ zM9KI;u^HrmuUS_>?WDm}<-So#t2;}!blkPkPpYnUzPp!uZ0c@l(6MoF2Yu8{5h6G{ zm!4(RDv4+4$Q9Gh3FAqL!mEKf%*{-*@S(>*wih{S$pRH8uqW zkRDpi)!i}I_n514X{`i;>TdW7zG!}w$JLaqp?2V$NNF-3`v7@7 z8VbMzMo->V2o9y*0U>6AqMBa8sIf+y7jTCyo3}U`@cw6#;=dtJ5hq<$(14rSsian* zhqy)Z;>&waWmjq{P#HwS{D7*xifid8EMgHxO=rjRZ(EeRE`r26XfcZTTkjX$5ao5Y^Fh?T-Swa$BR(|yot~B*y3S{CHLZ&8k=MU zAN-IE^6YpLA3+MXR}K)sLFa%nH2)AuR8!PCV=Qd}>k4a&uT-vgQ@O}(lPR#dVrq|9A0tFxKwE;x-CdvxBsl^ zr6It~**e*lA5PLej~{*I#~!;6IBN_)u9(qE|77IlPdbn&Wk#L(^FvB-0*6yGl?6w^ z&kv*y`N!Q20?&w7Yx}Y*Ls|Jvxj!CySzSDs;i)4`UIQn}4DF?;De0dp)h*8a1qGv` zNe$JN-6L|P-1a#ej`0~8UPSe_2KVMP=+$rqm;mZUVe%SGeNqn|j*} zEZY(vP94+a7;)?gTSVi(UR#S2`)hU+@nxgRU&_nTH@c&K1AnD%-$3QGh z+yOCJxU&~JGfWw=q&nwolQ?O`Tc~6ozZILo6iUzV0Om?+jIJEP1Puo9L~d}?dFrHg z5N(nKO_)T^d6M~z`LP36_ZC8PDt%W+!>?uUk_7t0I%M4f9_)x{trr=v7O@bvROF$W zu(VQMwI=6g+WUB4+iEOvj7Vl9sXNLlj~m0Cd9i&_zRo{TvcL5Fa2^a=hwK*UPQbut zZA1znCC=-2FeVifq4w?S$DLd(bc&pZG*9G=S==x&eVs@ zEQj6?X?xlJZdXMktUzmm3QJ%rrBb3qFEXR*=S3*lRLM98Pd3ouQw^qh_6_mUcdq>lp zPgP)~sZU>e?H6q_xRM6ejHjsuL|~jAb>hB;mJy77xIU})qV1|gh$;lyOCzm&@#(Tx z1(&Ye-)L5x&&k)r$AljkCVmzZZzl2&co}ln^Lkj>oCpB2y{J>ElJQ&tGz~sUR*O- zi8FG!RBg(aDT1?Vrx-ljRi7p8Uf*avdfPvap$a$8EyEMrdA z*OnFB)zZS4z2>%DWEz?fepDNCC#cHI5oqieIJvb}~jdF>oe*!>&SJ@kV_`Z@8 z<$rBr-D#-Im7w+;{Ona^NQullods0@w1z1?{SlX)&(v?I6NDom@Zd|0%MQb5!EBl9 zhMY|7&$OBw`%4gH)q&i5BC}5v?oBqZ)8eJ&(!<1rV!aKGo0YNRnkh9R^O99$kr7Pi1%*oi0 z5~2aqrQ;mnYX*hJ|g~D8Wt${t5&?s$)%SF33&Jp3*>scocdmcIvqOQymtW?O$?NPYp zQ0w4V!=}yj_iCpwzB;wV^1{0698+}0;VeG#qBDCm@i7)&U+}LU10Ng#;UgemK)M9< zna+0`WsF|mD~ojjI*@FEy0ECV!kxTLsm&GlB`*r|eSzyn@+qRFQpsGER~9FAOSarO z2L+4h=!A+eqmr6V{j767OurO0OS{}<+2o6OK0H9QWA3|Wq3l5p}%m!1rgnQU&MsB+yP!hrkHy{+T0mHQKY2E#SWadz(7On+tRtC)wkn-6X9 zM=ooXP8o5qJzM53;$Lyt$w}g~8_DPSf1qFV1>LM0GOp3yfa6`veVW$avYyE6?zY zt5sQC0UY~}3g$XlWb^EMa4F&xXCvUjmd0jDIO~1({iW!Y=VcjB-nQ4}b@w@7F3uK+ zw)Y)kaLDz4JqoIsCqHYtjvfMt5nn@Diz9~(LgR8b%$DXJLwJVXqRe9oLm}JGwfJ352I&t+CT&Yjih}omYR(7vjcnbGamW zjy+_P6h|Jv%KNfx;9po6KE?=AII4I5uIg(}Pdd!!OPLuVWKhVN0WJ%)j$MEycCVF} zc*=Hw?IJVE&)&e6->jJyZL7Zk-J3jiUrdPAqdtw6?e-ropSKNY|G+vw^4` zalMzoWLa-BY9{+Zv?Rb-u~X(+`exr>2BSNSUJs{qYS@nJz-#wcbFA_(=R9_CyFN`e zh~9av2tvGrDM! z&DMrcJ-wRS2&Fs*YIq}Mk{Dl9Fo#EMLwYm|{BcEy$C2Zc=K3U)d!OSRv1u&2sl!2S zcy30?DK z1j97Wv4l-}D?E8E_gQG^Q9G$LI zAxkD1l|9s}qzWUaI4I-mnJ+|+lvgSSdVZwcD?sG3o{b2oJ_p;#mQ9S&UP_+7p#{;R zyLkwOz3z0~xqrGf@jibE8>ufmr>L3!X+{>kl5j$W+UDocWL_Tv9`UH6bD{glp-#iL zXnTZz`4M)vf>u1s#Ulf>PO$&Y7aKgn-t| z#uU%aKL~oE%I>cmJN(Hp3K!4vaHxWoQZXv4l;ICxNB7aO>7FB!*WFQT!&J-60(3XkVGTv+(DvoFY0dkH=hWfl*hku}>U3Ii=C z$OU@1S(a=1?D!x}v<#RJ_!+aq%n!4^f_{_sbG}S<2W{_}&h24aPbU80Or@=+M|m6X zqBoUjVZa@_hHdlLob3D=N{3vn_u|q1C2}8Z8>->Mxgd5kpL42uF16 z#-bUV0fXiNtXjJUZ3pbS{IyM|L4}uOKj*Qw*W~Bhr2^9i?_mUKui8^ zH&8h#|>!WqN%Xp;o+fA0Wd52)Ub(#1><;d$3TMsNz(%b4A~?B zT98`(c)1YmdzTi@hYSpbuXWzpVBN(uD=cj3ygth1x;mQwdc1Y*SfVl4kd=Z}EdSpy z>tDwxoA2h3Czq8`BMX?Flrrh|d53c1H~VChBy} zhuhM(FIV6n$*(AIlOY6^@eF?bA2EVgew{chr7F&WD7a#%Iq?_o3#b|4$HNK7WI<5g z;Kx*6{3zbt&GG(;^;s`BVZS?jiKc459;$O9?r%#ov+L>Uq0|AG_&ly}zo}6Tp_p5Z zFRN)PgWjOr@1L|Jt{G(I=l7x{6GZ<3_%OK>x)?*KL3jZ_<56K-Nr0e#dkNq#)!&{j z93VvT?sJqEocZD3Ez#-Mfr4%Ur$3ysqEkyt*@A+WMvd>I&Fc~n5$WBSiolcc+I9n| zG;fFva0Pk7YW9XlK|w(#gXFS|X!AeR%Z(quF1Wp)oxbzegqF|O<3RlR{m?7iZiDWN z;<0)xv}W@d&*T^YX+<3@Za^YCgFXsXeC0Ra4}eCuN6H@wguLp-nq8*A&(UZePsiRs zw8L5dH;C47CxX^n0sV$qmm=~TYn~|cxdym0?*qPXPL%yYaC@`fu>S^rFnw;}=I1zW zchC;f^7Exp9j|-ag5ECx!vLd_zWY>N82IGBJ5tB_Yx%!>0oBNxgXRtjHBZOrAy@p* zR9G{(Q7*vzFo(Jla6H?FQUmCDTD>$w=&yM+2n=SRCm>(HO!gblWtz?ntA8Z6bkTsi ze5N-FrMf-dKVeA#eqUN&pMqN9DiZ%@H@q*BU3u_)@8QiDd)NJEwxM&exhiG%FGfj} zsW^YUnWil>|4h^7yF{b}$k@TMr&Ezpzwp-=|AL5%n`I3EfSWS+79*B_Q_cU+Q>*CQ zfDV$ki}SzFWQ$1yh;Pa5>U{98*@(HBH*5rSFoVDsV0d1w&2RrR`Z^zEx^+ZuXX8Vs z?B9=#hS^K316A$6-5p5&_0SB(-+PT)9DOqDZ?L!j>JWxv&>gX*|Km-5Gc_Wx|FtdP zkB?Ez;hI0y0ldNYV1S!v%}Qb%+4?09u=~3;pr30}NM{0obu~bVx8y(? zy~27*Dm<07(AMvY^VK!mzud9t2OWWy8XNR>5@_|EvU%U4V?b| z0IqP@R}6-O-HvHZ3xDl|?=h{{rJ20L48nltrMTG{u(xGD&_n6)<)=(T_?Rg7x|fy# z80=;IqD$6zii%>@5PcoxdFx4TzlJ89jc|RA`ub94Ju$nN=U4X`8u5hp=s-y= zANuPI+I#fl8wdK=CfQ0t_xnuq-5zc70^P?OC~bq3a-abWp@;Kar^MI*KEt_5$fhin z+=1s_2_lUiZ2xYZvwWq5P7X5YwJ2ljXq~L=f9d3rj3;BdA!n#AG7`slvcoBPHo7#) z$0bTFL|-#Z@7wxfr$2)u4*V08tTEweu_MF5-m9x^M4dh|m||nj>2j{Vpg3Iijw<^c zK3M)ERhF~BT5hu`y;f`%X@Nn?$tT8T&@*;K06RXjoBM8z{F*~-oX8(zVvRBlgg>XJS?I&T0{ZR) zUu_JEE^|95DsqBp-Nt2MZ7LFW$lI zs425WH}Vu?a;k|kSkEw_KRVuL7#*loE1HNI1aFHX3a4_!BZ{kJU;vf-3SDPog z{pr2Gk7lMG$v%UXe8+4xcrI`tK$1%DPQ~;xWZpB*O+N^mcK5wh5|Hmnd`-8IaSbs_1}mj+4r1?gpi9vhJH+I$iB=X z=%LNPDPMOY!=Yycg0N?qwc7+uG^I!9Q}!z}9uC_<^p`{nbCC*czy`51qwu=|#>&n} zr)r{5IADPMeQSWAhPg3u71DDrQOtTUtN2X_{{3eFP5UN|wE(KRi_iGhI zRG|`hgXIy$|NoJc8r}awQr2{XXNC;X0fd47@DrDk6dXdNJ&?H4yiR9_6MpB?Tu+{M z0@|dzP5_Q3;TLg)Y;MG95pQuaC#*HLVrK+JAs zl5+}VwC~d2lY_?w_GmjNS_U3$3HjS_-IhG(D9;T@*QUlq5T?7Fxx-Nr1t^=QtjsRy$z*dO3!|pYcRXKz=*l zRjEYrw;yV!Rlc0h<&3S}XO~IT-KBiJAXrjKz?LakS~&2KX>zZyR;EQ1fOi@JSjgPL zbb#Ngqy+^%g_${ix4vEKp&UT|pYRG@_w72*1n7?cpV5!A)&E65@)&92{ZcP&PFr#BuAg5{caQXt(GIL%QZc*_ezJm;^>NRnH^|iqRR6%Q{QV>Oz5Q9R4#c` za3|1zTXyHESPlu?!)hW(a!=?uFL#rNS16p+*tH#SIhNU6e4YhK4QbX|`OtPQnq~{& zxY@T*M3W9`3Kp3z?AH%ejW5oWzU@;2VbGu9xNLFOxrB!Q#yvX9oIs@otE0h4B@n^U zHlxNe)8ue!4&@ZtCtTBlK7&g*rCB7QCw}*RYnfvO20D=K6`2q(rsEjhd8r|CY%w~h zhvss|v7w|v0Z{PIM>3w}We4#_xvwOGVxt7&-4d|a_&soZeC@bj6C(NW@T2RXQ|6_{+BqW{2j0fcfsGMzm ztB0#VolUXj2=`wp&B_N2;igm|rK*n3oP*iNZ5^12|DYGkJj9u`0EW=(^Q^eYE`R+2 z(}IjZ*fVF=6jTlqVx$zwY~-QSDXMcao2>?#yvFpAQFZ!sC0_K2Qc-YHzg%H0a3-R0 zg@tW-)naSPGpuxI{L=N1)UGp*QYvv>>A6+-%YKS2S*1Of(1F5b#kPA}mZw|h!H5uH z#cd*rbvI6mTqUA0yq%W;kj4jPb7ntsxb7x>x|+KrjBBAHtiT@CW9tL=Ym2(XWR}w>zui z85w$-PR={ajJF>2w|=TkuIDSKPwgqLl0=PKK<{v;$UYCBxoCHgjk%N7SjTfIwKKgH z$`f;*aKV0&^v(TV8{L-`d&1ctZG_`~Tm|Vh^SunEMaEFHk=6tka$&q^DbH$f>L^%S zuKgUDLhPci{le%;#+YWD*@=g=Z_nl- ztS++_6z#5=&WRI4I&L=k_JQJ8YB*T9NX^E4pAwHq%IJwWWYy|4>zl-+S4 zzxEjPamuZ+qI>)fEMFKis=l4D%(IkINqmdHG@8>{y6s`DF|9ivMlZ3y5e^Y5Q?PMk zGXuRcIk9q-Pi_pu|A0(Gzm#=g-I}$pE7XJqk%#v9xu9N$Z46ray5Jn3a| zj$c$4rcnV5hWWDcg6$)AC)dK|jMB9w!^;JT>EHR<;D$T1!%rF$$+RCt&Vc?7&u3QKrV{uJWkw(+SA}~z%@|FY@k24E~ z@|1f6JqcaxY%Ej9hA_>^XLkeJ6y4Rfa|~$Fjt1i0txm zX2TawWVVdxvx%4vb&_>ttgg$LLR8MamR%ZrETIs|%6UyDcEz=yg&9$GhS}@^!rhe{ zbO9sNX+=O&wX0qD*>K{fvBuBe#K2tXnXe~oY3=Z;)tpA>gAK+oBISDy>1{ssneSpB zqW$jW4@{ezs?SwEri+w)s~RdD*XJ7pVYGx*uBF`$@W$yXAS{jsr`2SKYAyb#1SU zG?KI1gd-HSy--C4vZ$=urBvDUUTB@H&u6_AGrzr#lvXY{u}a0^k)UBtDLZ_0vvst8 z8fYP{r=m3e1}wcxw5Hq5a+!?(U58<{J|a^2tLB4j6X8l~0+f9l9v#CJ>6$5A*bE%uCb4EHBP zc$O#blb&}@1;OY}AaedFDOiJG!o;r$Gdj&~hV=f|GqkM=RAk~!C~ z`+UnTI#ViA_Wa{jMefZ}eYQ&tlyVkhds5vlcXBM>LbqZD0xX4GkJd%5s^#4=p#4Wu zNo64Ch^w8}LEP%mvD#d9V`ak%%eK+lx_{=zG=Z$iuoQpVV2ZQcsdzk6pkZp`i$|_L z98pl9@wr{%LAkT-T|{@u2pxpdrRp(zqTyF%vnbW4)H{W9%hK9(s zvfU}+DYur(zN!k@P0l(!<6ORQqb;!=^w*`(;EE(O&ClyQ=TvP%fGbM{HiwLh(PFE= ztL}w+aJ{>^E7De&o;l><>MsLOua6)NUmC}iJo<=jhi&#SPdeTI&jQNS$-#R3s9L>w zvCf{=Fh{AHb=N1$^{SuOd-MUu2gjkhAj8<)++UPep^fX20Urp_>znB_F)*hZw6`;I zU(j3~zJnII^{H?uF?wmX9AVM@WiYe4*QIOLP~gyIxyKj^k$$f=^=f**OqZin-D!c^ z?p!7g`i1``qYUnyH_MRTBI%L#&9JeN)XK$Xj9cZ+*e|8M2>@|=5)Otx5bS}ggp5c;zby2>s#e&*I~ zZJNb0fz!kb4(GZHn}$p%0Rc8jO~SnpTAaw$&~W@SaOpJ`8VyDY9=QL7+lifY;huB? zt~2S9*j#hAi8ZWzB7M;*qxwmM@kO(ibF#ah#o2cn8cO+6cE8f>4a;NLC~zQMaA!o{ zL+Fle`RViG>$4`GN=ucNkve&UFe?>(J-R*-_6336Di=4&qNLM3x3=m6B})O>3OG2o z%tdm23u^~?o{>K0VZ;IDlB8pN_(6c|6S!d}@WcPM;L^`f4wa67#_X_OK!p?}bt%`5 zTf8X75Z^kS*f8$lkewaU6}~J~Wno|p#Y9{_o_g#M(F31e%vxV30ibvTfv!KPK5-(>i zSD>R*B?GUL1u8z~5Nf}!%ROS$odyk@v!tgC`1uQv9#OhQHhJ#yDVx*09L{{3OzR#& z3+Y6{F`3iL`_nPH<~qg^yh0}RJFCO&aZx6aJm>Bs{$)g=;X8jKZVF%pK-o36NR4Pe zsk+W=mJ4Rcy58~ln4;GPpQW^d{&Qx$%?!B~hB4la+nD2f>U@5?C=gGE4Y7-@epUUC zJb1AkWzXLjwHICcDFicw|Dt1DEIr46yp=$E(w7cTS(h-{Cn@&SpZ2@J{bYV`#Q}X+ ze|ZNWaRg&Tk8qiXxM#$Ra1%NM=p)~CiL+R$DPii<*E#Md=jg`P$ws@bdTNcdICK*7v78Y#wm~r4+Ontb;A(x<4SO#&*ow`!%rmb z_J***$b6#)a2!COIE=pJs15TXpws!tF-6iuz(Muzm+g6C;$wd}^@ZLnw-qsp4kNf6 zisMI1U{?X7g$3Lc%>hUnHuvN>;nG+QR%`P63-2rYWMNyY|8x_hP6tn6=+jNzk6w+A zAz*le4NIeR{968w1`|ZyHmOg^k*bR7M`@_yg_qFNzeiXbGFvj_rwMOD!#Cz|!9$<& z--3tSpDEb;QD1)1d8y!n@YNIyMF_ctm2ZL=CO!u}l;-!HK097Fg&Q(6;-Va>KEMBJ zy~oV%938&Bo&SyFGkD`>)nuTW`#}mTRWXSSrQ-WO@Ph5rxIgo|t9V)A?C){#M=hr$ z$$sfjE&vH@@ec@y@&ifsbGt`iqyRQ9p>K3E1j%^LDuEPe5>d>olb#^t`%iEmL}NGP zzB>NWE&lZYRW!_(024XefoqLOoHB8H#r|_~3Li6X{xy}yVql7zno$5X!#=VtrMbEH zahxakZV3Sh%75JPYf2%y=!wm;e*`Xlj{5;0Uz&v9!$P2q*mNXGoGrv(vW}MuoeATJ zdN`-@9xXnO@ABbEsch8k?f;ZqX1bqoyi@Y_#u@c0;!!sIIcnV5lSzSI(Z@~LB)mF| z5LsDV-U#Pg1YQ`flYbnrjfqbCvk7B|mygUQB=B-p0zU#5jAcr(TiZ8~38Ej{X2g}pHj#@fyX#H} z{Ab|bi+99agXy7gd=$+0_vgcwC8Sx89zsS=GYGzz@ZeRtOVslyN`mSqV}Bu&Ex-zf z!k;Y#r&O8If`U|0Q~NipvV}M(5-=+p{N!LS_?q?W=;YO3K#vdmd-niGv@MlBJ7ovV zf*y9eRO>`BhN$iWxWU4|=(3MEFKcu+g)gh_u9JL-Kimee2okMN#iRn%v9rRHp&{)4 z0bvw%#-HU6ymU4p58?`Au+YGatvB+ut<%QyAGAE{lf!kDh*LLwL1uj1&-u%qisBOQ z{5yRu_oI7uTYWFqYr&ZuKwCJ89|>qYM~;|G5KRbcUg_glDTclo8mlb6zFC27UzZnfRb*ga z*_4Au0L%~C=32g4*9FY~=6E>G6A~sr>0z#h(f5cRy)RpFXfnTVo^?5=Om9&!o4!_X zb7K4#Z*YeMSB=M%zs3S3qT0yNbe0aBWpfcrLLZF-^DS=G?jK)|q@_DWEV@g+>#$;O z^)}E!qV(s+Ix2j>dn})5z=gHC4BC+mTAdlZJI|CQ(H}nB1%vtwHk8IUx-_6+P7`9E zoky9tDC-#a{Q~6U5%yQuj9+cxyr^5Gc%BBXdN$ z`fQkPY@f$lXd!wzls@&GsA1Ft-2T(B%IssetKfw+DNa6%c(eINjaOLc4rYODZmT&; zH_y^Mo@MLRc6O@i>);fmVfo1G2E-DOAkv31X*IQudD&vb-uewkkAc=1TUJ6m;`Nl zmVGPc)uDR&eJWSCa@dRdHSxF^{p`$#njm0LCRDj@Ndf;i>@0f6T|MxB5ogAB#K(F& zCsM$WRbB>eT{6Qm?Ah$pD(3}XA71njuNqjz=95&DdD*@O{UmGWzcOQ?Yi-u$=1H6H z88CtA+|hZnvzWlX!O+D&a96bM?%)FQB)n6zaB?ye1HRtpA*MWefcp_b{poU#Wb=MR zp3!y8pINYD?Q(iE(>r>?ilz@y`e1H{8i)xff*T%H$v8r!52e?}E4Ht5`;uIOz?TZe z9t_|*7_Zam?QY2`IM}-x-)PO#R$nNIyj*~lQ*)VLph}Y%oLr`DA5INNJy0J{`k2v! zRohsMCsXOFhD+2a^=7ob){g&!n9XQ0leyIHMm;~uUC|3y&t1|JF^x$1p!bIn581TP z)o#}F^o@wyt~xVXxU0Nl1}Ig)hRa#1hezD##&*@Ie`0$9X=NRo%IAh&6tCUU7w*?c zgoJg3U|a$yq_D5IN|gcaZsCL4G6rU6tP*tNRfp*vBX~<{Vmgq zSffJdBUaphFhdaTrV@@2Rs$5KXID02!4e*Lz9+B7$Z!Pd*jRoyBZYo&S=Z&&@)U)s z&75N2G9R#O;(tAjaBKjjSePX7brNpru&|HWg<+pg(Au%#az2=5gK-_=4JTwZ*+Q~n z`rF);DyBnzHZSLm%n?jSazjU`WD8UiMf@Dt#d;rqx zwyYDBp~EJrg>tN@f%iGIq3h@1Tiyf5!n6Aw)lzZJhOhDy} z?qB0Sj^D01zvLLOM;Q#~IyWMuUZw{2eV^e(tdWEo)x{ z)d9U1dla`oxyQFH*?Pzy!3K85%Z(--uxtH0{rnYC@JSv4F z31Kxu5BcAZRJh-=xh*Q{OiDbt!7C&mPr~#?boH;}fgJ31eQ0GJYZRRf^ks#aqoJE< zRN8`QbZ>RDm3h-Gg~{F1GvTivCKE?vp+MFT zEtAI%Ga8SGt>XtACBi~=w*is2td<(Io!PY-yc3nt(thmY$lKuHh=0qRl|){gpq<9ZN`MXo_#Hr#(KZ~VBQJv ze|Pv&?47q=QMZ-ca1MAq$L%d5Gj(p%qkjUYDup5DAhe3RR99X=e2xv=cLjL&pzaaY zE0|Q-t5UO?cmI@)pU7-^if&R!!tr!G)+_fd=D%8*?*>a9v%CAoH;mSYPMmisSfx`r zII@Cq^4Hlir4CWG6xoyxLWLVR6z*O9WaIo!kmMMN(z0x^GQu7txcqJcbBztX(Sw4n z^k^SJvg1v}$I^I#IO+)9TGzJiiuk_%P<3;&BDX`|`~kbZ@lKm4QG24Omv53rjW}_z zA)doyE{ne|8kDi+By5fe`Gn#RR7y6REGtWv3x2M0VE@#rxG!ToDj})fZniTT!t8O! z)cg|e7J1EIG9fyd)k*!85rb>fHN zQXx_mHgfetgNO<~=Dl?nE1}PQjAW|y`Dfrd1G~mLYZ^uE22a*B(1I%!e1l))mwo)p zZn#N8j*2V?f!lK&qWR!Q7M)PfnvNBsrlld-F|Ac?b>o+Lg(;v~j?Ts8 z9tsn-k6O_f;0@MwKBJ|{jaol$_o8UwpUQc|Bd8zaf1b;qHU63W*t@qJ_+D8loQFob zG6G)({>+_{l<4pp4=fV5#Zqb%vrn|&()o(+p|yc3!hBAN-g+Ok98ku5vU>`Q})a*XWNkw3X*18((RvSV|hdN(q5m<0higQ z=R#g}p!>5v(6?VaS|}?)DPG(e8zW zMm+Zm#5nXI>Cymi|PvE<2!RML4w;3%^~`HQZuu=>!0U( zx`g1Jo#a>_=k5IpK|i^|n6jQcRRF2GOq=TIE=f3ir^iH-xwyZ4E~W|X`H}qNp30rU zEJG~E7Wg;E#`q0^a4qlVe#H%&Z7UQ=84l{5;Wh~(4Ab9;vEWQp7n# z-|oaZM7O8bvZSZq&%w8L1hX%|5d1klZjkCn@LzP=?XdsQX{8+Cn%RM$T=zx?e)X=K;C9$Jj_E)u>nR9mIgwZ*Z%^^#p*{2|F zX_BQxr(f0mZ95#=%yu0!i1bex=&e`PE{LDnX^0&i+gi4@mY$X+uelAFH~t&ac|%Fs z*8C&)6sER5FFVK2_v?K42sCd%n7Q@A&{%=H#`pMLY3G>pt|(bKVyU>BV)}$9Gvs7 zhopu*^sN}O0&&uo8~ZORMgs!OwSf_9Ur`t~`pokB0WYJbL)MD6s{PkA%*vuH^}&zb zxIvo^c9pa3&*0w#90;1TTKd+eAyho;JD051WRFox$Z?~|thCPb{7SeqRTzEwf;BTa z3%s!V`PoF>4{NfT)K6Uc@n~A}}b5yhPc(>wLz`I+A8RAe@sPQR=ez6#Jf5a`W zclQHn$x$yeumvx>H^|E$e#3UeM3C>s3%A?<&Xl!gdY;hP>E&fvpt=g9g^NWN2dCko zBZEoNxA(fsc#;y6Fv0i-%9cYvRul|lo{#@CD07<|O>0XAR{3RHDrawyeQB!t?$M^s z@u2!%`^GA3UZoNT68KnSEE^*E8o_{kcV<>cG9^4>V6$mdq5k7(!H z+i1;69p!c^Q|mK2W6V*uI;8kuu{E^Kc?I@{EA?ggtb5u^ySpA6Wp>AGuT=GIH8k(~ z=pXrro9jQ1;C{7bwbpz<=Ti~m!uzwd6wK7HTbdm%3PTpo4P~47?W8q&l7HTdNV#jJ zjY)V^GgSRO_i;H%Eosblk2PD@gRi)1emjVDdxK*yVlcJa>+~(*b;tJF(|RhAJymTr zbDPhV)25qd+pz8b{>h8u!^WKIg+}MpH0pPD{-gdaoB36T>r{`eAq@Uga%G4io#IEv zuDBa8adNCL9n&leNU>@)`H08nX{ekqPbpdKdIgrKh%Q><Zhy6pc9Su!ic=Ahvb z$gHdctZNn%GnnOU)UeF%LjD-mS&U`-fo%QE6aUqhEC~HrF7=sP`l9U!?FhNc&&mzd zg{GYgw8u$m21;a=1ud=d{f3v>rCAVT4ZW_5jmC9iRos6QFk5AabafeXv;nT>?7Uzb zyHyhwy2Z z#Zu%!G`ji>z?pU{$aHYLrn2?0OkIw|QpI?lG?-atHPj@NUZQ(1{+;v;s$}IpQ$A>|TBz)nWDbKFXt33~%|}vF zb&pdhmZ3fighGOr=YlaD=&iVRYcXOq>VtAxCS0^aju<4ih0ARX z@*y1M!=K48=P_m_7C7srk$HqHN#M$xfQMHYUi>uLyPg-kX0jT}{eQ-VMNF-WWUxR4 ze;}eMlNDM+yx*|SJ^%(DojC=Zp+Ic^GCNC%anuPqNiDN3hWH|96DbAq4i-_b8n#@i z_kVhlWR%YjRM70f9&>=DT-HnKc5;FJsmDZNA4G ztGw_%qRe_t^|7Ez%vzQFyxMS|0smV|trgA|!7y1r=DyL_@{PFVD z%99GIw6k)AS#cbd2mI6sed7oy#+?eKeAIQf@|J10{VVo-p(>U=fDvb9hw#|A&!5+K%a}>h;(gqREQY4zdHMiKeI(2UPhKS zX9?UM$=)G3=PzK=8t{?4T=itrx^Prf?w~?I5LT(;U{T)f`7r77jt=41mH(Ui+&Tjs%-sZ2zW$K3!x9-_lN|>sxdHxhvHd8(T7fsDb^e>YiMsH+ zHyCuroZw>0f5c38OC#WuJ9nhNHij@VK>Thf+&Cs0doljIzQ`M5@iU|yt#)*ARo=3^ z$~D0Hq3uL3ycnf|qSu|ZLlD0bl3sjP>?7(*$45ASrSr?-;=KRCP@e=63qEy5ci^$kc2 zL}FS-DE#1)zzYroLzTD7Ag+x9-QXj$$-rM3mw@D_AvTe`2#}segTH!2{B?jZneaY9 zGrW{ie`~=0`m7~6r&T5eCjok0tEXoD>-}nBN}Bte!F7LkG5Bs5d_2>U3BVn)X(PO9 z1^<^H?JsNsH$p6TrlyERwEjiKoawLyWKhGCZCdb&&gl}1%KwxXZI7$kw6XS`huF54 z$^Gn2U98ii&jkX5Q zD;(LdFtRk&|L_2n#WG>R-j^i79!k#I{yKk5zDa8>RwVt+wiBEd&Q}wkVk$%6lpT=> zBt@F-(*!dT{|KwtGmEUr^mqRF;5ZHlWnP_NMKA1|*sWTa0!J;Devra-e(poRG*$3f zOUQGTbIHJn^zWklcVTf)fLA#Z+;QCgI8F@}Vr@G$kQ88PkH7R}TaPM1_cSa(ISK{) zRNTiE_r}ZuGKcR5nM2hw_AEenl~+F=IB2rytENd`{Jr!xW)4QdQhFH~8&>!x$TpLr zZR)f?3%g&zhn64UrzK}`RN+i(cJYWg2g`!nApUvLF$Zp`*H>AaaI61Rr{ctZE}N>O zWgCm>rm&fb)2!;K?qc+j)h`jY+UTy>uLWFfGsoJ13>GpYSGy1UkJ{0zzrPDN^1t1m`InLA0Xt&!heokX&;(tr|Q|W?;9rX1H_0 z9uC_afblVa*(IDcaX6MD0#~m14e{O=L}S*z7Z5T`dcHi3)+LX%hEFc{UmT` zWHp1YkowpQ!5=IDaH>!HP&(rqJ~WSI(ws~>b$DeUgld9Qwy((^TBFLJaTMDLN!9c0 z%sRSk9J%LTS3-Wf>yr$__*E336{O#^e$W^13uwU2N34Dozy?-xs<2%W7G0#)! zQ@58?x^;e4OXk=ne~=<|%>dhS%A9pVRgEn zPvWAp`M9sTx-_RWWhzYvET-*NlM@7Yo`xNE>OB8);6f@xPwLu6hb+B@- z<|Eetc$GtGQJ`G#$9CzzI6>xsG*XXFM)e}yLD%c8m-74jHekYca==ab_D?8&d|1Fk zXZGiwmrJ}Qy!c*UAMgr9Ta49gb1LlH=4h?vG`b%T%PqKBASTV#n!d!B5An;nZJx6qZcgoBc$Zo--ofZe9@WPIJITn6n~q;S17k@ZJG6L6iFZ$#M+ zUC{1Et9N$CSRc(SCh-p0C;6GX`iG zVU(lk41CNYDRU@tXBr|?)96Quznl8?KWuNJCj*3=llY_B8dPWQWoJN?y|mHEw6Vmj zW~l;ttJzAF@Ho9xsu#N|N68fC2hip?9a?t}m9)fj6Gk{-uiuCKO5w1d>gK&Z%jI66 zKut27*h_m#ESPb4T?D7*6_nUsRuBkRd3M{hXH}F&O^dGwy(!}`{E{^zzRNi#5b;tX zfHH1gMK~;&T}U!4{wbNpYPgNb(9p|3IlF*1Ta;!_b3i$LBe$qJQI0si&R|pKhX6B= z2RmghIOSBk2?0j$ykNjslH7A|8um*Z%4G`dtU|n8TB&u*S8FH1QbV<*ezjy`{R}yB z(p43DgO_KwAcQ?@OJ4L5d{q?c!r{w2k&42-Ie|hst$bN32X~WK2;-HM(d>iUAoD(b zVO{jWVuQTH;V?S61T`zip@$R>U0~ehoU;li&2=#|N9e%>DOr?tdBl2^T8> zeCy#*vP(=#a_qtdxv;$%E0MKF=~*!cWGAM!6T-j%_x}d`qOkb?0l#j-IZ)UX9SH?P!^ab`Z-o0 ztyOG0hS~-}M$S;m$Rylci&XIQ;0}I4&#<99xmqcwKv_@&ZBacUVOIIsBTPRlxn8w_ zg)gB~jQBJV0%L=RfDEL?%|bXBhAK~QU>7tE(k7FaL2ccwEE*ADrhB%@$;V3DxbXzL zrXx#6@koEhir9{{G;$}kYKU9@_EWO3l!b$7B*)=X3P z83T(Otl3x*$~1)egKeOkQ-EH-4t`Qqt#b=IWdAj0cv9j@I3wfoXfXO{K}Kf#y_epg zs)ga_Mt(}ApFRF5F1({cnQCxD2UmYK+Ewa~?=>=Qx0^Cg#9Vi7(|{$=j!ww98#RU# z%UU&9QdYRX$q9wSnd})^N90o(yayB5%xrm|N5oEWoI%5*B?&uxZ#|mk-A2Fd^X}Yxv~OsF*Q4N+9af?Wyxo!ngZ) zBkV-QW-i+FlV9_vOtMIpqeU@(p1B=ck-ndNcT7wME zEi3ex_xf_VF*NzJ(RlH)$@^eOMp`@T*M8F*jGq@|^AM)c39sCk8&d1d6+2Z)7MqA; z%%>E~u#xzZcv;73cb+`GGlPe=SEL8gd>K~s>Ajr+EEOFZOlE382 z*)|#@a>6XiZjN7;R(mt=wVPYspP9|xsiyI!H;S-Sb+jB)yX30(cuJUx@ttn6_|5&l z?uW+0rLFITH=~qQCKfe)RDO^@mdxIm(5Mr^QY5DbD3@%zW4MkSx!>-Z<4+ z#~%1IROeRSR7w6*8k?9G$G4By*-MRnj0IO+5`Qx$0Ngr1yr}6v~-&UvQrT}ls{J~c@VJucY*m5bHG#VuLqfe zu7JzIc85sz&-dogIA9*VGRpPLR*5Pqz0ylBgTFp2c6fp5nldJXq-2tV*$<%4@%06xdgURNi_FnF$-PZ?~;+ zu3N7-DnJE%ca(cCqn8}=3L7^7+nq$z%EedB^RmHUWa-8HXSuooZ5Bdl(%2`351P{u zQEy{uU@(|cDHYS>)~N3MnljzP*oY%X`CWVZOuSlSWdjSL))Zz+d1X8Co=e%KTYcv-Vc9uPyuKIR^Yeo?Mvd9FC3!*!26eCLe{KK7N2Xvs zQIgP5{VN^uI?YDo>Q3|MK-Nq$F^jh4($)5=Qh8n^n{tL;tv+XnQwGIa-EKvtp;}g) zOyJ>eRF)lnxKHp0#Toxj{G=Fn;8Xgf^d4$Q-+01WDZ0>b zBR}gRk!lrMGdn0JR6_Y6P`W2@f8*T!dxL_xEqoC}L3~Y4UsNwczKUxXY((bv>=Hc? z&fNc$0;S%mRw!Zn-(>J^=7D33NKYl zvSW`WlKS19Y%jgtSCvoTYP}B-ETf4nixYO|b-BHy;Ib4=!)od4gz(dauu*7Y+SA99 zFI`6T7@8g6-D#t{99N%iZpVx>_nqrW;~(@lx_bNQ`3JLsbNu78*fzD*>AS}r1S-1gp}!i-TPxInrd2*Arr#euH(^rw>dAMZ1f)F6k)l4;qy1u zOv!9@ueE*Bb(fQehBRjOY6n6YA$Q#$4$K883EsH0>NOm0+Ln{Bu(%N59Z?^hoYMvz zLmgLW=zSUbGjzphzE(%GH4WRMm*Dn%8FqF1SB28YG$DBwBk?yE=?NF1g9dD})rJw} zsQXlg_U1uEe&*fA{s$CY8@&iB_1A)YtCKW^442*#Avz{0=r6FU6cLU)$>kK@WMzQJ ziBV*&{-~SMIcVRJs`S8L?suMZxHd5S?>6wyyoR0#BL@PI?X|bnNd=E+brxerxYR1NaXd2bK3IR7m;<6bn7qe=vIvS{VJH2Gm_WTaq8+}b zLOE^GpPa^1cj{8+a_x=odk}sthss39sr^8kn7G#!_m71I2a^_(t~tN-2;fXXtZLjyS%`QK|yL*R)$&(Thmd;PQiL~2p|HM?GNGBXJx#|*z_Mv!5Cu9M zZPl#(PgDJ>$#NS3v2_v7d-qDSg9-8_I+kHA?aIe`<-$Q(4;)J^!z*q#hS(6BbUFWE z@S&TbdR10vVqCg#T(2H5Ob=N_D<{@yQ|E1MmG&^Fe@h%%wt_RL51u?vonYj-MVzlT zvKkIm{`n|3&$?-0RCF-OZ8_IfwXmsw_8J?9)rpb*C#6)_2JvIbWC0rmbkcyDclyJ_ zS1o^Hz z!eF11m7IuLLJCY@PvZ_OU`W24E$!23lhZ_>|CmdTa-1Gg;b?DmfYlq1lx9AfMpETz zC0U)h-5UDREZ1D`xX^sP2ID)?!r7xh!};kOE%Qyqu@$o_NJR-c}csnMrb<$S2-;9+yMFE%=Y^yT@F$PTo7 z6en)J8;iWkZG=3W4M3FD-$iPUFE2b~97GWH9K9&l>7x+xC-l<^s-RuHTnw`$XC|fa zv*XKlE0NE3u*RJK<8(PQT8H(9hEeI2dAoZFLB@hp1FCeZITi)5@Z5SPc9y<&617Z& zH_(|lgQ4vsSd7J#CLG)u&ng{E+Oto=cSAZpv=3s6k#|^OhsXCA)1Kt9Wp}dHwJ!*z z)l*It=`OG}hnAx>c^6@ZL8at5O^(Q!Y00oz9g8I{w+IIm23m%eB)rI`JoVabvz~%- zkN?*sWZ&Mg{`}3C?d?IU!F_2X_F_Xnr@R>HEn+a z8;>8&Kz2plBUZuX|3+3_;F6{VA*ST6DiAcavZ|Z-Q%!WdZ#%V4ncy9G0J}OAPZBN} zxB==a1W;FzI@n91T%bHWaL;zxTi#)SFvEL?x6`>(({9F`Qh$IIM>VH0z8q zp-08CZhXZm0@zKG&e6BvfeI1sjY#F+0-HBYVuwYJR=eb61F++oOyCBB{|R{Us+YpI z(IB*Gk^;OX%Rcn=m#9KO;Gd6DeDLlnjz5SrFM0QO%oFVH{wc}%w9pbk@S8E|-{ayB z_fyKHbcgW+s7ya0jeic*-Q%uwgJXM}Gac`<#r*#FPf3MDqUocQqj0`ND!;D@H`#F0 zAkzO}4*| zzYZ;MN4S}p4;B6(Fql~T`#ZS(6sV>>13#+n#s}tcN2z|ezdN*7GF_lb(+=J+Fdx5+ zCvmVZsEAt(p7LQw$Fl5aZ!#o?hLUb=Z&QM#{F`=_m6fts!$7cc{fSzM1E+wKwzz7q z7=-@m;_Ax#@|+Jj0oSo{OXOokd~CS-ilF%WOOk2bB}U+9pkPU*;oPE2yd0n^P}YrJ zpXA2vkKo+GB?|7FQLlx;*@C+%)K72p8C6$uiJ;o_rl*2`GyYpQLAe=5e??493H&4GTjKZ6Ga;or`;zD8+W8vrtw_Mg{rwo-l#0EJZ*t8iIrtj6 zCc+t~{omCaX!%if5LT#6zSWb!7<+u^$2sQ zlG#ytw#=!9u+>zvkJT(?IrPYOGYP=wN=8f2WH~EEHDB+PDVmLY{%ZSXJs17PY`5L-z!a*k1lTsA9pMZwMo!_vJ zDOa=lQaSs-kh_?&JLdeP-so7uCg%8+j z<;A?pq8UrbGTL#+#t!M`fj$I(HV=@`El_N5Fn;4ZY$q2-dzqiKW$!1X+`6c0*KlLS zsWZKfny@L9R9HF9x6CUsp0vOC@yeOUkSG@5!8+RbQ_$VVT-7!?#UP}K2GvuIN2+8w z|N|wYM;-{e6>mzeGhKBc_j0h= zr{LxeeUKOaJnQQP$6;31^+>q zQ^s87)=SVR_%}Ft=wjpfn#W?#S6P4RO=UAb$1zr=9ETiNg!}aOkE5PRg$TSd0B8|g zaEl~Zcy1fluNmy`Jv97G8$c=vcKE9jJY&%d>$l9qP&JiRDu&s#Dt-OtV4iK)Z0&2+ zm$>aog0^c3nBny`>Dh|Eh_(=#U*%lhZQuTR%ZL3Qow6{J_nLwIx}1b7zO0YjUPRKe z$KpiJ$hXS!@uI^PY~Q;#zn;HsI#S_`t=RYEw6vmgO}pJh<8w_3JkFE-!39H#7MZFA z4xN0_oo=RmET*he9;UEc=x!zpy|ii!f&=u~>kTIv7C-V%U5;pQ^sGz?Ti-3V_@ow* znw5ISmb+A0nhu^QTWU9 zZ7>LZik*g*bON}l>54g1_~c-~xaAYZ;$|awilehyiK??Q%&iak-r?gtp^F*YqUlZ1 z3pUhCx;i&aX%#)ub>xaF#Bwt74MiA-dl*JRxDw#1fF zu5jGtp4!(e$asi4(8)ksRgm;ty@1ZwO3EC}i6@)yC+kcQX_pk#Gr0=E12SEjCK-&@ z0wTN&*4Vx_k_gvGK64cLSmV`_cX9(spP5}7@lY|ND}5BqtPo2vHC7Cbhm4ms85&P+ z?Fqgd*tyu)*Le?;XY9$0=Rv*97D6^|OoVsI3P*!pvAX-4E*nUIfNxM|Kx62m?CduJ zI$%;O{NKKc`8XlBWeX}460D0W;D*QVS2V7uAV>iX3u=!I(w4{b)hbtT6-7m>To@V4a(GPRtWdyu#easDC>E@3m8dfu0$!^zo&z0U;N~R^ zAbTFp07Yz&Hdu43ZWd(r#^&VFy>n<$>e+CG8t|ymqXPpkgQRiE_qjyXXUF~;Ut$M+ zy#2hp*|*VY;VLE!kiL`{`_Xt$Q4~$2^uG=TMMVd#Lc+2r=qlDA_9jc!;sE+dxBQO);7H^$nNwIW5i+5GgY1l%wWITodlSi1@)0lC!6 zO`7Vd@AYNdaZbiyAXm2Ec`Oaakl(Qj#1g0wo|1et|V7gQ^rVNzO_i}GBBk{Enw~B+D(2`O#1!8 zUgf&5YQ~Uu@*5Yv7`Y<0bl56u$gJY6SwW6QO9v(=_flaoE4BP~PPEwiIxlh!10!?0 z|J+n5v=;Stbp`s;qQGV`v67|TsR7PZk`XVleF$3?>88{*u}I$bD1H>U=>KbA5GVNZ zw11~8Zop_@4#-1D=5&2I+2zEWKo25;Zp7sl1ZfJe!i>TUD`z>TZz9tR`9yLH^&Ue| zQd-j%)@mCa#y#21{@p&^y^8ER!`=ftoh=++$)P!p4bv!9E4!G}uR6%?61MG59W^ex zh!x$0r##A+GTJFsy?UGZ#`uqQn*jz0#v(j(^;tkrLWTA^6vNV(ryM7;@?BFgE#JUT z*gBo6c?dDomr}&@J~Qp@sY2d9#JW_DEebcrAdppx9qu4V1~U}wn9jg_LY^E_Z^Rhn#({{uCtm=V%axPaOZ;oWE2RhnwOnIq=r%P|lp|FAx!V0x@N?N+Px)@e(F(qu=!fLOy2kv;4TpuRhwT_)YbSG@S`VqG*>eVXLn zkw?!_&+hvhh0A5?U^oKKofqBDp`N5-D7evw_*vuzbvXlXM1^S!n_F&4wA-QetJHER zX> zO-5PYQXL2n>idPmqs-)&dP!m!8dDMRu2YU$lVYQm!NZy<2r0ihfionMtl;U~8+3zH zv3Pov!=hN2^?o4XJx1n4(oHPtleYo}szw;f8ASi=uxt#aL+iNSp8d7RbEI!s87Gsy z=zfHkS>~^a%0~h^`q-U3T^4Q`^DWaI$jfY{XIt${O3#eR*qb|S?P#8Vt~}9uXlr&H z&+3!KuOk1Y^ojFMA1jlOZ@j0xbxdxp{SEkzYb}CAT1ydCcVvx$V5eGO`xGiD-Z9^S zcQ#G~D=2cAzj^eCfq`Lvc-6xsi&E9bCg(EM!-SI!&*Pw6^QA}~SVTWVVRL@7>wH3D zh|uOYarjIL^tIj27)221o{jSxi!c^h^EU*i!%XbrK5G`)YRAZ3Hi;V=th^sT$k9#| zV_Br*EDsUv%E%L<9^c+*?k3&&2yfe0Jve$YEl6i&$%mnmpB~oFu2YHV&(CKc*icM_ z1q`mMHs6`Ns$AZe3@?vuioU%ZQkbP^)Ly`J^W>zuW341v*$STGQhHF-u134c90arC z!&q`a<3d4XeB|oU(2(x(dy)swDp?LkL(oJfuPHL)65JfZSSsj@hw|1{^O}*{PtRO& zauYZA%$0R^f5|T<<4nR^w|NmU9R)Y7Y`r@>Y!va3<}RUpH)8*I*X~<*7_w%y-oyIR zw>@&<8Kt$nthwBZWW_Kae-orSL$J%v8(YZH(fvg~0jEKy>C4VkZ*4%`snQXk{{Pr} z>#!)hKws3tz*az-PZ^K~Nof^{Aq0V8Xi&P5mKGG0W?<-$hM@$cOGO$)x znbGg_+xzVEoc-KC?tY$o-am+mH`ZG3de{2Z>g;GzSII1Nt9ke>>3pr^_E@RpQlT;W zW=9sc+1v94*QvA1ELJXLt|bZa-#KgvdQ2W@dN8@E)zP{5`F6h>62y^l-%b4-t-f^s z8X`Pv@_0Go(5BEMa6vG6MUycUKCyDK!)Ii!MnDtHLsCvPENY7dv*POd#H9V3WRRE5 z7|)1Z(UwE}y6)?zW>!{kH@9k)eC^!Qara?W;Rm3`TUuL-U#=S-c8eZ{6*$Wpj8#4@ zM?5;`khv95z>At!3m&3FTaH003v33wOa~L#8>uW|)T@lE@5KAmRMd>rhzNJ4N7uZ@ zI2}S$i?o3gNwwHY((8otw|WB0Ggpz#%8dN%}Iu?k&Z=Io{1V*KtXWT=*^8yl0bu&=3?A#%HTsYR(TMw+L_ z1GcjhoBBPA{w42y{DH5p5A7#dRoJzb5xzz@}b2vl*km` ztyFI_<6BE}Q%0x+-$>r_ae!sAu`_%Qo)>rhpH}Y9#BPGl08Zn+lXQe(KUTD=*-bBi z){c#7A0Cb$SDTud*-r)XfmoHc0ey3=BE`kymOEME2_9tAC-*@!b>)Rb`zwERjrx4+ z!beg2e2n8qsTSqWSMpu&YV^8(RBM;iKySZf6xJuO;9H_bn!3XK?gm_Rtp(%gv# zYi3f=EzwGlj!r`!n!|U}voZ*+wEg{sSadl%mm$Rs4ukB;T^oML{Nq5(Eq?S^!u=Gv zs`m>t=U3zn-2M5>P6lDk;c_)mfFE%LuvI zv~$SgOf|Hq|CWVYHjF3x%PYYekB!W%INR)V$=MNRa#Qrw|F_BE&NfSl*xFG)p(;^PmroO02rO8JY( z_YgOjBP-V#FHOD3MQXU~1F`idFm9Q`obcHPz9_U;nnd|!OhF|QPLu8 zLzH>Cf=yjNVWn^D96&j0R1ynmjtZo2)d%Db=1sDSPfQ)PEMmmXzG$LR`<8h>Nu~y7 z76d8k9^RL0yfIUT@ah`v=xaW#kE)^fw<#6QyHk|0KD|9&n5jI>XCQ6bb#?p7{jRzmHjmWB3{hdJ4}BqQQ-XJ%DD3$Q^yWG?QFf0P znpaZScCav}3ZFY0O=1&LfQRB9ROPL&DWyi^=9Zs`#67RG4G$HYO?d@qdmL^aRU{-( zTMT`KitZ0~Z0vS;Xp8PP%ZG=CUhnr9X=D06WPo_IIrOaUTDMw0&9ZT1cy{16Jz*qi z!UM5faypYi6$=Ve0fy5q&t6f!I~Dt+*UTy#VjJ4R9!k&p1NJ-jINfNFxR-h z7>tvK2$U*|T8-kg8R4xw12eIO=%6!SxT!A~X-9Ju*}&|Z^7{^n)yXJ~H_)1}hW0^{ zzEV-<{>PjjNVURfBb5*_8_vmUhvozo%%8(yeX+JgSKis0Sh#!rC?nuwmP9)Z>Ypua^*Tck3Uc6B%}ol`(|Ais0jMpcQ=-xlVPx5tR&^_`#n7 zv@4w2)!Xiqhm_{#co6#9?3C^7R7AI2)(h+i{gX0S9j4(iptX#QGW`7fHWqb`)rEzH z_bO}m1*rt=q-tDSw(6eReq!Bri6MK{3ug!ad8)>_U>F(IO`z@M7W%bt++)c$IC>V%zD+_iF{tix@) zWs0xlt^ICe%g-wQXY^_JX{l;!%vSW%G*6e@#i-xB63{jHUV;w;Z@v+B+f12l7PR*u zOEO4Ei>tl9=h@+ohmF7bw*ry*8q(4@30zd4?9(%(&)RadDpkh!$F}X3NBDC!N}p_x zIwrcW`Rg=UmYR*%xZR}GB)aF3JEDdd%kI3opT50d()n$P6`-Ha9+N+*Xn8P<(Jc&yx=e228nE}UZy>|>|}cr$k6cp-4v<1jj*MGr^f$c;2Vmn9}4SvV)U z^XgGjND!4^db#baDJY1&HX{)|bAg9BOnduxqUm@xS^o2IT-o|3bcFRbn-uz8LZ@PzwNAL2@0u*}HRTI_OZw!Uk9dobIK8sSxep zN|s_Bb1o)+qt&Aj)($i^t3L+Llm!-lCCm4zyVGAHfMgFL%7L%JWx{poD@ozs3;c=? zLQf8J{6I633wU?^RcO_Ixt9OHi=3TbM64`e1;6CV3E6;?Cnf%Q{sok~ESF2dV6X~w zgod()M)qP}<;bolTUe38GCx9Hc*u?O{ZIx4odK%M34Lu))W_j4#~KycKs%Au)$n!G zRv(#4o4>dzL*TrzvpI7C+VXTuuAzmasGV**B@jc!f)rC+{Z6tZ)CYILNgdv&_n-5j zsy97eT0}tB+v}ntOdqKtcQYm&Esl@0Q**35hBmVM*{Vfv(tTe|T@XX4yzJTWKE*$y zsvi{JWh!*4TY^=sUV2x`KJvP-TEpxxANo)G)u;=-w35k&AgYh!U#UJynUWRQ^$ah94+oK92G>s?srL?o~MJO+*o%k>HrC6Q=R0 zo9x<}ZADrj?2;9Cm(|5WI+2b!5a`9_?|f|&W>%7Q=u4lj$qq6d(oCFa{Bg2$+3ll& z{zHkWRXnPE!gH;2km?xjV}b{O`~U#d?ZxAf;@74)_ga35)6C$-!~3V3&@mphOuueD zg#m%6RLIbxA97nE4C32T+VE56Q})oU zVIuB^nK+A{S}H`UZ2P-+_V;FyoAO5Jn(Rkoo=H=K04{3oPk67dwn>X*^UJ4J*GZ2F zsnSj?P+TwxGb)xV+1*_`scxk6w4Tt{hR<>EZelo(4-9c?H(sv1~-$Jp;W= z_9B(;RykTi&3o*}Pug}B(i@c4X2lr3Lprw2cyc<6%3+QlgyQ%YrJ^yVUyT;ZyMkb< zCA=b(S6ngfg7OCJC0C=oSf;m8Lt6Z8W&#W=-cJV2pCGA%L~leGXl(vG>kDb#cB(0i zKsHTLIW75oE6JK4lqR+uxXHB~x%RsnE^I<&JwqAoeY)g?u%e?+JzwKl($6Uc5%x=- zdOA~9{9+#cB&e?ud&k3A3#2~N!}6LD?GxeaDj9L5hRs`OP2&E;z5x>{LpXB5J%rZZ z%XBE_ZlkN@G#3CWRzem(OAv%-B^xD%RkFVdhS=|-2g9>jeC5=xYE0s7iBTso@cY8{@8mX9w+(tM&bhu-)wIuUQ61oR{Ar3pWfF28qKgW(fsV z+;X;s*yL+qij}5(<~FB=FVtu!lo&eqhxX;W=XWNVAf_wDs0l2v ztrG1W>Tc9+p!Go!O`Y$4DEsOsy0(&(oO+^E2yE5iu@%}Zrp16#pq03f6(cX!z@QU#5rfHCSdIql!L6H_6Zjl zRy$<}0fD>i1QyI1|zbo2L4K6o>$#0YTA@4Qs0KZ z&Yw-tJuc3;aos|X45NuZP}@{s{&OaCK`|b%RfYIs}_^F8z}Lzgy?mC?Z86Q zZjFr+l01Rz{&LD*5N-%?;?wJe0H|<0ueT&tBoI~czb6N4&M2{xuL{0pekdvF3t^T{m>xACV$$D*j2gwX68F_RlupXaCXInPJkD$Rn4XXXS7xz*H zV?HymBJm_(#h(HKJ(@&+Lw2AFyh)N%;W?;vpfPW>og$M73?k5iumJQBdaN@R{jzh6J0*LGiAU7bk@yIh*?0{#QI zH+2K-$0?HL%lEjSt2>DzyL~Aiaoq6HkIV}@dFC- zmi=F}{(_|gDhDCm!^37Qr%vdO{8p=6vjTcm`d?BNanGj+C=_~+aC;R6_k2G)#dCrVKBo>_r{Ly$Udo)ezUpH={c47LZ#{ho|nic{F< zp=XxQi4;f};)~F?6Q1uHe}AHad*7PvMIP&{01(- z(G@p+(?w0?{dtOSlF0(Polm3-0T(!=V?U?hTBh4b@89PU5Gr56 z{`DLXL=oYNJGc^0UxV*?-s|dhdw9zwrF9-xaerN~hW~Sh5+YZy^tO!LJDj#G9Up@I z>lCGMWel1(Xg8oI0H^t>r*lfFoio;aeZ6=#yugN%iY>I=FwCN;UzJPlV{6eD`dC(Y z!aGVw{2p`(7YOkQ0@EOr$TMrIlpWQ_o|Pf)aBShXUeh( ztO+h;s-vxV>EV3z>}DzO%$xI69o ze-yIj!?Awm;Ql7Cgr21y5)w_xBr+X&tRL@Z$*pn4`yf9yovY_& zXz};0M0IkIN?}~{}TAAH;sor6kq;b>bh0&7M%&cgveL} z+^8t-rHyEBa!5fg1OWZn*~$}CK}TC9IYP*v7?kis-z}|jEFC`D%eHx@w=i3kO)@pH zE^Sham2k^odI=CT`)@k;cGX%&e>`nftMlEu0EWET5&M=Ig)@?+qGoDIOgiTBWrr_@ zQ$mJxSd8g-PJ4&q zvYP=)IMmpmUH(I{`peMGY0+N8oCrXfQ3%e2-dlC@z2`1`l1C)Gzm~>8^2SH9Wm1aQ zPhO%2<8Ik%QrV2cvxZ99N3Bx#`lh%Oj)$uWNhxFqYqYXk_HEDsWck+n4X1ESbFNlOR>1UH&VL#0jV{;0>6}Fi2FcYmd@Bhu_@Tj%8 zd{0SNA}mcbK=RKi9rBiHwleo{tF}D~mQMihdThW29JQQ;35&B|Xw+B^Pr^QA$R)UD z_$8Wl(c~4i2(;b-b>on!xU0h&R@%Mu3bB1V14Eq?y8ZY;%UknWal*Y=tl{M-@61V_ zLxJhh10x#?6jFz)ZvQr;Q?fR3wv|#eA3fwYkCyG;7LTHSi9{b?D~ca38LQnD?M%%6 zE+A3#uT3`zQw59mVhs0H(zVv9?YS}FQ-<1@n<$S8xMcP;XZcO4{*a+qVW!G; zUgG7-o)=Vk@6}6YTd%9h|F|hL?WF!?_BiO)hKl)m!h3QWJEp~NoG)XBiRLKskGAS< z-1v2bgbIM^t6mcysD0T(b(07!r&w|=}ZTC8&Ic$%4 zp2hP>&S%}D;isSC+u`S%T)~SrJxy}BdbV#RJ~PQ;{Dl>Q-i^ajeM8kG_Y?rsr*bi+ z046amiivf@)OIp-sK+L`DQsSyMis(Z+VVsDXD^nD4zVP5WTOm7=6k3u2F9Mx-Re71 zeMh~YLda9oIaHgVA|J|0M{rfycr?ZpcI1+W(%#D21bYlFY4OOB01gPRVwBUJhkrem zC>aqX?_F4z+~dwJ@6GRPgty#jF3o*Z{O)F5rrWGQc`yrHu1LOSrsmLCdHxiX5Vch> zWPCQ3UmQbe0dx_GnRYc0pzanetVwCdj6{;{cfPZChHVAj;5iFvMcDLGt#9X-4$cBX z!)DYyzM$LSzEb{VFLL5Xa(f_3gN8;kRkzv?u8c~%t3)7Jh1pGR9N)G<+OR6_b5SU} zvD9eP_B&=xrm<%o3>xkpxC14Eg%0_+{$st7*~i5g-X131=BK+yQR{`^eo|p?dry1Lf-2TpeCMsb3{p^TTEf~*Ww{*TKpa`Y^=cz%;o}t~({1Cc_T2?2yVm;>=sFmlU z>31>KB(E=`4iRAqucIw*`6;7*2l{W;;>0Tj21yM^qGsyuj!Be4ubUxs$<$;?rBk#w z^G8F}*?u&t3ZjANasRn_@+46nt38vBWh9-021G5wP4a46WGX#R^>v;8yF>>Fl)HM` z$k9(`M4JQgvC^ADx~yWA&6Rq)GFc0GTjB;I#fpQ&cr zm3T+)jnET|PZ-u+>8R?-n5QXiVs73{ISXU$2*~CeSAD;9EDTpva7oCNzhA99d^-`d z^%FTv*1n+6jk$N_Z)(E@txe>itxke8-A|W=%I!^P>u8R?wd>1X*cXA4S(va*TU3-N%zX}wSeaKlf{kYh8CC>rqciq3TRyM1O`gpOT z*nkj|L2X^w+0u6F(^T&iY^mrrQ0aVheUJ-j{%OZ3i3qC7dz~b-?yvmeld3UC*F6zf zesyl-k@>y_TQnWeuToyK=GA=Vqs}AURhT?TDr zGI(mL9`9uBG5Xudda2t=@?$H_JI<`yaWK&7Y|Sb(gc8ozUA;U?H)!Se_<%GjKY`4B z>Oc`9^+|W8c<*BYiwZ;L)=ll+6_oOz7#)-5OgL97{q{+{I;XArL2v!Bg1I5lzW4k0 zm@>5CN>YfyG{G4P=0*kZXi9eNjSFGP7TVjT&3grKOS>VO3*@LsmzuTv658K5d7z4O zVszwK=k4vHF9A*g!(+;^kocVN)-qwYe4dF1rJ)3I^)8F5VGzdIt+D1@1E<8{5_`3?of(poMA9j>qcZ4Iyb6Cj=D;5nha2cwQ& zHW7MrCDIrz7D(8X=Osx#Ej$x8z7knm43q>E$|VB>Q-t4d5*h`Nv1gn1lnvg_lIvy0 zprPSs=_4${w&?G>ujOM_l}&D%?9yH+sawSm(S0-Iy3r}xLmm-a;d9@4Ww%bv+YVV^ zhzW3Dk-X1acW-Qpuki1-n9W7MSp)=G;IttZmd#&XcwSOI?f&s;jdz-j6e0Cca(v^S zX?~=A5Ttk>p09Nwc;U43MB^qQR9&q*6Xoj5neX2bWFsyY9R94A5{jwK0XD&w0{sZL zI$KOmp_DKyN>eLy2-rjqhu4{_-RUU5iS&1b!Vp(@vg%d&eDV^BzkTa=f(TI!DC`aK zRc}=kFpV|y)hTuG7ukmuSSAPBut*AklkS{yod5fzS$k}s0yX|monK;EsF$FIu2=Nz zEijGXi8H-3T+X@u7L(O1>GjH>xVXZ0pjP2u}er)=|@ zd$n32S>nCH&cxh(!HmG(b;L=jPh_q*xNw6ffrq=CUb6fN)KsyZOGFF}QMdN3c@{x5 zltOiRicdPI%8E%zac;0BsYvK@rQ3sI(Y#0qdfJVLa)c{df%vDy=CDUcfi~lm(qohJTgGH=XX$W zKDb;ng|am?DDXMw^EXw_yN}Jqzd8kvpa1*$UL^lb(i>>J+P~^yXQGgEpoy+^p(Bly z@(L+=4MrD!lODC zw-O63U-6CGhAj~5eeXka45IVQeqQCY88R=ICPecT!eX0l-ZBga;Cg}3t5*UP0_a{`DI+LHQS|*WuQJDR1`?LY?m4E)!W5C zPN4wBDQE7J=g>Ps#UC8Ei`h7r5xBk+xKWA6S9Hl2vG4=l6$ben&&r|w4!T`MWkg@PBAm)B61TImJ-JF3cgi8MKi zpHF4C34dO7h$}Kd*V*r(c_Z`qN@!?jsw9<5>EO{bO-GJ+`=xnANWI$am4dS{@rHb> z5z!bE__qr3r7FjO#&0<9FhQSGNN0cRq*uVt#f52?atX@}EfOULAs@G4TlsP!_Wow! ziD(_*8}G`&sTo|%JIriBC?t8?Iu z++-bV6z%_>Ar6MO;9I6|cz1^UHu=SSJZ`I>&&BXrB@~dJ@Pvj%L>RJ=^CVi$W!>DR z`igv?k|Krx8jxo>r#Mdj%iCygXE)b>e3V3Oxw;*BxLb9}&19>s%;A%=LtSyAqV|^g zFVjc-uarvw37wt|P)co=`UR*kagx$q2j(xai%w=6b%~`E{x@6LK*ih&Ogkarbx9&B zXeax{A1(~6{c&M#?)zsx!3|N5HJU?%R?EGdqp!|#nu5Aav{(?L8vx5tu!f7S@lvG? zdZ^EkGXS7WO(oJ~DQX z0PKxY;q2#YS=qk2C~o}=K=urs6Y#ig$5!vz$^KRDG-+ckSgrB z|C92+VCOktF2aLp*vG)HuK+CSd#@X?x!|;+xCip@cSuhs#sveYIPF`=KhM2?z_JHM znHk4!>ZX2%sb66L;X0v2@+Xx1LsK4)rTwL5?f4J4brxWd0G#7nJC7A2H+xt9aU!eD zyJJV&cw+RgcYl1#I@+0S3&qvbT^kn>a|4bAD`gFUwJ~~ZY)(fKJQbe)#+bXc;M)Z8 zwb#Nv8=u9UjTANZbKyisz62*Y*8cohJ@7vM4c-3%R$4~`A;-1SIK#>7*p2{k&ao8y z!f%mTfWi)Mp~ZEvSC=ir6r`p7oLy4Zf#;D!8`vkKaWn%!QoHU7H!ybX@L_Ro$kkH; z2nC?8HEjFw{nUwu@c=Te^QL%ZR7ejQKJ*B2%dr&Nvh$(;KcdIoy?1A)xJc91d_RGG zN!=M4{ky(3)8#8JlLbWK+^L}Xr9hB~8^@w-4dC%9QMcprT(bcJ;3=2Rl$QPVX+ZZC z4SvZ7&{cK5tV<9i)6g&cQXvH_Jg~^L;Eii1P zi=de~t_~mA;5x-vr!!bgbNch>ZyS@23xr zIVhkS?%7a|#?+{1s(YBb;3@zSd+!rqVNqj}Y10LTyZt{CQWhoD$5nhrl%1>`` zI4J9mpDn+71_&>7B~x7Q(k==qr+87&5yUEQS?8X`Tk8Jeck8-&Yz4uix6!z!%298S zm#?&5gl4kg4}Te7UOQrLpJ^=V%~S}%%HgUCMpMHoVcjgyL6lRTba{!CPMF#j8n)M5 zHwa=Xas_*D^cVb}nHxhsiaf)qC?NNiu?g#l7#P1!k?(lIYPN8C`Vv}f0-a_C<6us| z^ChNMg$+u07^^LEU1>rzRcr-{kTF}YBSTDLb#-Y#F;V-{?U%0#q4<-?B)x|yt^LQ} zEg$M*0+JHLUX3R*Yj9ku1wL+E!nFvM&#H1G`Wt?wd;6k0Vpo4-A( z8PAl%sc6FUZcD z?^;VC!sFd+Wq0=pt8TqClP1-M%rmLwU97!$reia*#)H1AQ~wEN6nIevQq(#4K3N#o zCrN21#?=B_)@V369NY2B+T$t*Qm~aFt-PK9L6fI*Eh_BS`i}F@NUW3B7cJ~{ycsW! z-`I|HI52|HBI-!Ot}`a|lB@MM^ARs$*^LtqFbe^(cLE#P!QWDBj2ZHYhEKvEYyTTx z;}n5Lw*3lNgGs-$<*s|yk__v;V&JqKK}CMVT&OK!56tqL7&Q;u_XL46yEOKf)pM9S4#P2pMikg(L=^2(UGc&;DsW1_GKnYa3u=42S_%?-=4_@9Q_o90Sb@> z#QJa`VN25FtVdQ%RZ^WZW%>A}bH|ipxkFqH>Wn~AsSV}y5OZXCo2B|ebNr(;0$#az zC))3Md{*yr9YVj4PxFc%o&tk5ViN{{78hHC7ahyMFD3N{fw7iK6I@H!K$BqJOm612 zRO8bus@W^|>9WL$woG znDn6WnoRZD)C3Y>MYPQMbtazB&R(kcJFNB+eqB- zP4fMivIU_XT#AzmU>BbHxR?O&7uY@-yangTio_!>G=K2SY+^=j0L%tOB>+D67a5RU z1eE?Vj1>VZ>lb!kF8SpQbt29ZWa=-wS6%1$SOhWyp`~4XckVlgJpmri^2(vKg!k$W z#I5MJ8@6T2RoUX7YmQ6*FU9wtD89~NW_yQ5+0clV6kJu)fc1O+FU|KqmgXz)IM4SV zX};7V5P*{eZdqBph-Vty^8c6S`yWp8&Cww0qnipSmk0F6W-mmpup_k)Gm#5--TEh< z2K&uC7PXaau=;Lmnffakj#ubZC85lp1*efjm<}U$a}Xa}|DrQ!&KLC1|xt^f$`rdYFg zd-Rz-Yas*HveZ#?r`a67CJJNFK&YF+jQ2TCMg4U{sf zM%Ao@VTb8wmuuUiTijAr^J>ssE*suPUO4ym4~OQsEHA3dC>!lNq)-0wSrtcvV8pF5 zzc$1p7iAhjG(6w}7D{Zl4(Qyu?ELK?{4i#o8{miS_?1-p4IVuFsbc6fg>uQ{}p5h~|=XrY&uTIFTUd&fKW*6izX)lnvEax=QccK$+X z;6(XG#(WORsNpm_?cx|>k`X~Nm#5b72wG(MGzf+`Fs*H4;eX0HRGF;>5_@P~v|4iZ z1t;nHqx`Y+-9FdggyA^hRi{lS*uJ?`mozaejRG^2Q&6iV6nF`kpD}bC+#zP+AR!O1 z3y=pcs>L+Xzimh7&ct1YzfV#LM0_;QE@73cIxZNZ*aA*dAo?snC+DLF`#hc#4k@n1 z_}xgb?*secB$4JE&54bCJzkPXrk_u5jOQl|r}zeH0#<*6tQ84 z|Mf2ru6f0rp*0H2r^*HoJ2P z8D{*MYR^Ya(Ky(FJ7AoFA5)Vh3r#X&_Wg$8{<(Fs_oz%85gxYPYi-@3PmWf-Vx@C# zZDnbJBjWNtSEKvE4UgY!uxNbJj$QBXzzf`Z_PmZ`xkx5MS**zfCR|mw@t3^ zJ5U6!qH1`G%P4g*skh3ble-Ho${5+oq~}79xtN64oKn0BPRbi;C~GO-v6Af^nAfn9 z(p~}7Wt)0=xlo)O+Ee4@RbG^^<%k#|w~MkGpNgJQsF_B5%dOYmxqh|kcdRr6-oXgy z#0#ZNJr1;qmrdW%h(VJ)<%_Q(jXSb-Fl)%%z^lHe2dl=vDGmBBolXzGxLb{KfL~eJ z+4|UAG(b2Q>@#sIn3$1+GloGk+3=j}D_K8V^%H-`!7Uk z=2x&PIoNQ0l*m*=4syHF07)dIkdqTzm^@s-pyTqB8kWnsMRw5;J%g486lcfc`0+u8 z&vjCL+r@Gt|8T~v>O9rFfftUrpz(ZC?-DlOW;RDHJ&%ykeO_RNN+#@eGz zdB(5vpNq2*Ft@qCu3VBYH+u7+QMS*8I`Zx~Qx+%yO`Eo0%3)CcF;-!E(HcB!or>Oq zhkm9S#0hN6jX*y0=JW?t#Oop9}9k)-!seSgQXgY5&MMK zIJeX-UJ%bex#)#NIhAO>5Wcj;dh5|}0HOS6fzpv%^ zX$Hn1nv>avI=)i7FIz7rV0I^P=6BCSK2IL<8J9130+RUumew;L>rd9Hq6&euONAco zm53ru&OVfodcvo<&xkhY;03{}o+h4uAT22%xw8NSEZU#Nw{ENV7KrgS?!zi z6}93cxf`yfn&!n$E^{*A3|RmN)f#;2k7!mbCbVeWL9oe=S2vQd6oUd!*#r`LB8E|r#vGA4+V{oTSJ^54R ztO(z(|C?fb~ za2UqjWxHSO9UzUc9t>~}>5}*kw_636NJebbnU0^}y4=U0DF7En@1-N@Qztl1{*jFu zRRX*nz|T{74~Nq^f;<=c6PR#c;p(R0@yod9v&Ed7}UhXqycr(r`xa$pPV?g@)rl0fPer5 zN=8Q3z;xoK=kI)8Y~=7*OpH#}(l%&-B8QA&7!v(C?80okrx6X8``x8q4i;grRF@ni z3M0Vfz~J!6M@iSf-Q#)Be&P$kn@6|p?d|alG2lNj4W~RNa?wUYQ4&J0RKB4byzL;SR7;U9J_x6@#t- zVSMfTyk%hZdC?1Ad7y|*$Bh&9NN59Ralw9VQaoOi=Fki)^}!YzP`D*Kvq^9XW~AX; zaXv=kA*}7q#ZJ2%a2-VM9=N@Ivb%{3^TlEH z4{l6ZVn2%8bYBB|3R!&j?^gV0(C&Hr1TNW7;9tWvR_&1`P#pLz0N7mshy1Z89G8px zSM;CuuScgdqyU5qJ_*kFpL_560+zcU{EC--KL2xX-y3JYH3D-E{uZ15^AsZDj@g zF9oiHzTCqN!*tjvNDuAusM71I6?us)z_DJ~FOXuFoB-L(ib}r>+mL&J!`9TX?$mAl zy+hq;fl8OX@r<-@*CxcVdo}#?Cj1VRTl?c&!Z)f8ojj_*{!5BgGN@#lVOAnH#0QS=}YFpqj7%q*o z`#|_W!OBt{B`zNC$EPMt4SDQ*EcosJp?{3UR1o0_EmPQ+AbiVC0OzjQT=ow(nW|m{ zZkR$hEswPfgzUv)!+CQ(){wZ0vEI+|6vZ271nZ=A8T|J-1yu&@gq%1*E zm3f`9`?{VQRMne=bHvoVw>sP?3pzF|U2evDVivq@nukH^0+Y|$tgWxPL>15y} za61M+B;Iq`fkvf2`2xojQT$of-+QTObi+lw8?ls92O@JzGsM^rr}m5wS!F&h2v*u3T=luS)IFNysO=<6v7&{dD}7w* z(LA~cJ6!L+P%3Ka(v#aQn*Ti}tqNML>Tv$GTHseeXQa7(#uLHohNiNXLLWWKz*-HP~aNY z9y8#kPde4#ofKJ;H$NWimwv{=N3h}S=#u2e{YtavL5t6GcT|e6#8uhVhAwDyC{FFx zuD=xs2&5yBr&ic3RWUJXb9yse8@_rF38_{jvD_b}MWpJsS|Z1)Aw$RCHc=vR#uj0M z5n_RI<0Y(BMd~ZkT~^t}$t!Wx)8wS>JC|qU!X(yy%`8K{3g8n<@qQ&r$Er{F32PLS z(n|ATz3cf4S_TYLMPw~aQgFvcK>!=zb@CGFD3W;FZ7kNV&Ar!03no9wHz=HE}&?Wi28cF2<6u0aLowJJ0i zu`0?dtWf9m4GA*arMB+8>I%)3DV~|QYd@WPWu7Z^sZ{Focr|J?7M?jbk8{k}O;m-T6x@ z;-77DJ&qI`?aRCbYHV{!B>TG8-qG0zu}-WXzw_u7ENFM}a-N?^Q&&_-8qKfH4ysvO zu+l*h^P3g#I~K$Te48K5=r?ZGfiI?ADdH4~z3mTOu=Idc))pz7ZEX>t=s1DDqOG8O z&*#XNo~8OYrfKKrWoh}q-KKkFwV#0s`}Z6c!moEWRJg|eGJXHL4s=imLI@jQ+ z;xBg?ogN_N>CV62X?v9Gw&596-|1cWI~w$ znWS2Y&V>b2_n7622Opb>W`FNz>6-W04RZMS)p2pV!4l?No7r{u>SFO!qy53o3)m<% z-d2;__;E(?T+g#zKk39Ee)eF>{?LrtG$Io>D~QoSzfxY%1`u6_0EffwnZ85%)X&%akyEYwKTNo1D!St3=)GMc(S3+>8w-pt*p5E=cm}$DwzSlzO2>EmMt%U>=CeMxv;}nZMr>w&$4GP+XyQ z*JwR{8QI4cWN6PxzDw$8JYI6wUcG2D*JCWLcn~1*bo(b^qoNvJ*+Wk$B6W=QE3Pc~ z9`6J}{8bL}U+w2_4DJo}S(!k`F)Q0ydpcf-n)hA8r%Pq9h17Y7QgwZ&dml zsxByMXhH8^qp~J88pf98l6Zx;(cx{(f8n}$^(_R<7lN^Z!;q1(DT0R9*_nllh*X^g zovT+xuB#-{vj0>nO(o8*!+LJ^nAHna5!$_LVGYvIR`{Eu>ty<)2gudffMWIH0aa!L z*OvvHkol_*-+%?+VsgtT;=2WFDfEZ3hT-$IZhfi@^F?avakrr>3mXQ8&F=*A4&@Ug zxHZQ2#?;$+$QBzFR?DdOhXVyj!#S6SvhLET-Uq-uW#_9QODOY3qz)FDm)5CztP`4= zKIJ^D$_QOw?$jC4XW=HDCoQGvIpSkG9JD9>}hUadV>Pu`@{IWOIl>%4|JhVl7_58CT9N)JGnmuIuGx-RI7GSFBRhXxlNwA>rTP) z`zN*bkIyCMsnqvz=GMAoMX;%523`jFT{B}<)*)?zByM{Gmn&#ey4*GM9b}|y<&NzW z3T!TIR;7%3RI13=(5Yb@7BVy|l*kt=UDRp52yqwFZ=vZ7pd6 z)6d+}O+meQ2wMNHNFy>YIy9_`Hp(fV-V{$~s7&$A@$)7B?YKy41q*5XYrJK=tg`|- zEt$>Y4(ZXmS5eMWOsq#mFsA*;DZ(olYY|~uF|tIeNuHRdGu^NT#O>;R*Q|&cmQFtx zK9u>sC6&|Bd+ntVyUknN+h?)PoWP>G!Kj8-|KO}WR%lFit;5wDnnZWqeiX>8O55F^ zZ6j|KzNdE9yzUb`IhQ;-ARu(9t}WYF)yPnXKAyRWq0uH)kBc%=&^=OMR$~440Hv*> zA_e608KyF@{_@mI@q!`hR2!mNrPnuTu{d#A*wF{#@prU{U zL_oj-N(bp36e*$i61wyjI!Fnih=nSlNk@7TLhn^XdhZ>SP7sg|3FSsQ)}lm>zQ&7o>}VTqky@pO(tM*zA#KQ>Z>fG7y*2LiI4LfnY49~k zR>29fpn@7Xce1|@`13<3jks_3zP)?yXnSXt-nu#PABYR^TaLF@;i9<61d`kH`IMhc zR(I;N_Bt8i`t;BtV?@CeU60ugI_fOl^Rd{gCz(I!8Eo?HQw2sq#B@WiDZ_%yR>hL( zKqfuI#=!K$0%|RrFM32wz1A8ntx$jVQm~Eb5Wy1Mu{%J2T`B)GxV{`W(B=DkwWwYCcF0g)%q=#@v|_XGwb*4wkg=ZMv zcoD+0F_G!R20Pu1!i;233q!7hS2y~Aok%VqTcTlUk>br57)*&H4<&hpX>u0Zudool zb*|l+L>bdjflQREwMA8_d$I=#)$Hi}Ksoa*b^5fx0&(w}8K1`;cK8@-An{wHc9v)C zs+>V6=$*Dht?V68PU>;&hy+yB?>eKqXKltM{m#+?F+jk>K7?WaDOvtVe1~${sbI=$UzaGrz6uLt9QwfakZowLMgZM@Xbo>XS-wg8O6uq8-2Qjv zBMQ>5h_AOsslzMq$gpwNGk5-g7JVq0Q1dFLBSk#@gr*_Q%`8>I5IL=MS{D2M z?eiLi`t0%5$dj}C3~M>vEtXgH?!>MBV8v?Pq-v;lKC@mjfMykfhY8I@J+iC7*j|w` zmPfbD-8`iYKJ`z1oAzF}8V1+A%4f^o_K?Bm|LvMC&c!DD7v zEU>jR?t@`jOV476*`InSEVu$2^v<6@DhJY?_*4CWviDRt*M9U2{PwQS7?o^MV%6L? zhvVfwG81RwnV}LV!6T6Af=3eUgF0+Q$&V+`XO9dw)Zq*yh9eo}Cf$?qNsB5q*oHf0 zULfJ$NswU#h;FjngOeSmHfEeVO&b0;bO1B8EKaGTt;WRz(SxzrN^k?-W2f=cm59?e z-OSQbUbFt(BVTc|euAB)Nad?h1$ZgY({IdIFN9tN*qHw%NV$7<^j^hndEa=SQv_0I z-R@t6Kq3`jZhONzt))b;PQ2UG?6?(hKbgqi&-e%>A&?X|h|=t+0D`(y*8@ zID0tSgc)oN^H(FtTKS&npF#Uxy!6iU-ur`p+^n+7$R!^Dz{mAtyL**|D}i5wr0M_R zV8h(!5|}yptsz#pHkvUw>bK)!i0$rdH+c(_#5uj>DNXmJH~h~^1~DxS{HID=Vg6!y zXD7YN`c@a@<%ReCZO=IORaUqB=_@!$fNJ4tHbkPweV3hMSsUkQ3Z|zD4VJ6N`Lntx z05uiSDIckU?(VZ6-_pr0Qr>~y_)K#-<4o$zboPu#7&_nJk?T?gUaxFxrW1>v-8q*( zFU@%bSE=&@D6a>fGA!$TiP67j1Ayq9Qs?fzXu7}4S;-}sH8)uQu z88D*?y~D;d`0^bt37h2AA9?@dHV%HL2z<$DkQQ_e@3o!#JLepE{Chcl1Lbqwmi(o^ zT$vhf=q}xW{)g{maV38I(N)pLW>DD&24Etu)&u-oVWL>NE_3cqP1AH*(l-Yp^P>PC zYm%q$+4=JXsB=BES?L<@w>ftStFj30uycppVIO!ySc@VEs46jOyvPnNZ3E-AjH+(p z5+_0D)s2f=96Eo?!E(_LcZUo7hR=Vv>7d6qTDwzXgo|$`wm{;2$8I54hO;YNPay+{-u>)g4m$~oB#^kjb;F$gZ8PqCs5rqp!VY}(#b0U=bI!o{T{S|}n~Da88V z?_yx;C$`h~1`Zj|Ewb`eGB*j3a4e*=svGCk9#Gxym!%KWz2@wUS%^|`Ta3{GhcJR3 zJ$f|t4-r99$*Nx}8?JePq0V{;%&Yumvr+xi*kY8YLYF#Pee#R=kq$VxBpecTP_E3V ziTbKPZ3|A(Po6U5?47>^LxHCI0}!%yFDE=QRFdiChZ7&6PhUT>5~AHyKXJc6(w3bK zRU_11)~2kv{X|&~7R#=0iMq&j`LD6T>GVYS^kahYD<3yZP0wsUy|w`hh?<%j#=x#u zY+g#o#NXgQuA`-Y;%@hpfPm@3+Yw6UwQl1zAb2Z}>4|ue^87dWtHRzkaqRyLNioBSZ3o@X=EqZP4f6cuT6BD!gYrJISCD4r1<0V#CxvQ8g_=@O+KDKJV?=-?y)Hk*-hs|7g*@y* z=M9^XyXR6l#y5bm@J|1<1Gp9N;$rTwP_7Ad^Qsx6M3*RgFAX3#Vl6g?h$>_&N5uO1 zik6sEvHJsQ)e6RV;=`1)dCM8jD@|F^tFjD?PDX7GxX~$^sK*zmyZpdaMn5}ISAXO; zaQx3ciUupu5ZbVa*p$PZw_D%WH5(ae;|Ve{Hf`)d=cRB!q^w-t%dz6KSuv*n%7_qu zxGoua_@CKaAgDCC!wxoQ2NJ09+Oku>-)zh)1Sk3JwPlq{LAMJ^M>=^UG!G06CLN1L zXui=TZ*RuAS*dR2)Dm{K2#q~lmJ)-Z$wM7=URU+Bf4u!94+qu&V~I}h!I|f}n6IxJ zrW_f&YNycYx^y9Ud|SaDGf%^UY6b?we{O({&F^Lt5jZ{ged7AGS~Knr^{oA^I3w;R zLcHC#kz~073h>+i)e?N?stZ(dr z5u}cmztLjII##@ zWCMg9V0SU%i35zQ#6sG4dwY_U^;vnuhd*X+b(x-ZSg%ra@?0a$(sF8#iYMf(u=}rN za&Y5&G$`bbOCoMPp7ol?80gzD^QbjrAqG&p30R#5~X~My& zsi!Y@y^bzIihz%4Zch8akEtTGrwja-BQq}$ZrEuVi$=9t3!=vCP}SJ23m_P>xd5F+ zI9+`v%LmJR-MXp0X7|*B=X=gP)V7W@?E(lfb^i>w0N=p5Cn3y%{pl`$ijRiw_RBsR zd2Wy-ezb7g&}%UU*FQNvONZRi3jqY&Vc?J!=Ecp%?;NSM`k|)n#ML~xhZ-|e#joVT zJj)OgG5Sds4fRsfjYL%)>=aE;gO2xXp(TB9_|S*VbbNv+issGlA8*4lEjnZMkOW;9 zF!!BI`|<6bYg@>@- zHbgjWBCx)hg!9Cc*Te2`vUS37V@NO+u;96Z3n{n*h;CJb4@U+6s~g@XsfBs+&QK?x zWVk)G>KAnp{;tED;lojlSMc^1&2gS_6jwNaOTWZT-+;7@g%cmfPX6*=YlNu7<6TOj z!4OK0u6P_fF_`Lq`!bHlcuA2H+{S)B@R0W4RDJm4u``QU5>MT$2U56Dp=oY^gVGFuHqwvJiLq08Dw_h9!(s;A9R z_!sci{9R8{cRYvj|GZ~4&6hqj?tv1yV}%Sr#3iF}CU4&SSE4%~ND-W4Yz0#4#VYKh z!z_0z@)zgkpdF?D@v^U?I&*7z1oJsg5yliFG56=q>WT)hD2kY&SWGuPD_F@vD}z(l z`#yi!8A;iEKU7~~{INHWf~s?Ei0UIN-RdV#c-gH#k1u2a3Td$ZpQe2~=WL#M`m4w< zi$UwyB!Y;{E!Q+gs@!Ezl)uJchJ?6&JSTi@9hPp}fYkb_k?n;qhJc_@FouriAu3N1 zY*5dYgWD7_0oeT)IRrtMWmX9JIAVDwn;msGaUWmnEg>bVodxVjKf%|FsH@6oa9)}g1zI9u(p zN2{>RW)UNPN88B^g<#?76q?@7x6@e(jCR+4B!}kDNm5RI)5?%_8|}k(z-K4mVRL^r zod>4<@SbKTtya8)Yscz7V(a6l^e3nteyS}@uoia9?sK#YveSZkC zSN8U??`35f3h#8a8G9PxWJGm_bYr1K6=0A_z$)0Nj6Npw^MoUveoF;wWPJZN9qp#Z z+QD@EL5|i*WWdxow4*yV=@BMcRyjvwW_|g_m`7prtLZRPkUH8)wcAc{-PJW?f{X#B z5GEv)mfuRfh@_rWsmRejUB1li5C!#5+TIknv+8ASKl5u%e#8M}W1>?c8gEwbSJChB zGpxm}v$3ez#lo0#u-sB;O@(y}jI=G8G}OzQXvIX|g-U5qk4RmkxXRL@i}0eGPiQYeC%?ty$dPJ9QNW{?P2W162VbBfph>-Oq{_?_mA$ap??ht)JeNMS!;sXL(wA}jm7Drqf zHS}B!Boi;xj^N?uPA@O#cbKYFlQ>ybK@Bf6>@QtlWNaTy(~r})yXHDIl12@ZKR`-A zP<)y55BO#@3%;IKx>ZSqFOrVG(+HM+V_SbDQ%2WCzZE6ihoEuXM?(EI;RV#4LceSn zA3iJdJ0s%TNWNdWV6MwPyNB>{T;ikUw-L-*ISoKNehzvt{Z0ezvn%6uvW^yl!Jt(| z(0$I^X#aYxG66mo%`~xX;nqEmcB{pBEVlw{`I`8!Bh|HhP;ya^D~GuDVL#2H;aMtJX6n#F6(-RSg%ZG4HQhSCdH&00P5ENREvI$W@0-b$^zZ*gya zC}1&dOWBgzS$>R}X;3(R2nv|va$EWZtc0#noyW__0#d(sjq!~d zWYC`-wqkY%RrXr_k6XPr>kjftLaC{!`Wwy;&WygO$^rT6HSO$>?y;`7By9g#tIThZ zA+=k4aZd+1@c{Jnq{$IjJ&_Nq_>CdR&Ky)Dtu-Sn9tPJ^sPR&3w}RhiWUOpRNa}rzCYY%*|Dopz zZ05B0Lfz?mhl$%CS?GWr*(^taLG=l#*-X6-cKf~l3nMiqc6dhtmK7Dl^V^tS>qLXU z?8L!sSCGcnUp3j2y8m(i*q3i^(D63O2BXpQMGa zvZSXi-NQWbP2{iIL7U!YmszU5l#29Vs|drL`b_VkS}5BxxI2Pp#~#T;Y@14_zp3va zb7PT#Xu?s|)l zyRm#B=JQ;S^mpF~aM&NFDTP9VGW|}>Rd@k^l{Okdx(C@?tj$Ih>q$)7Uti@fY-o+9 zvPcN`wAtx-TbiyXL59(SR>AY9^3h?F!?n-V7oK?k`~c7YqL=I=H zshQE$DKzv>+r0w>0(v10pF27sAm^BnlU-pC7i@v`5bAuy`@#r+u)Vt9sC^J*U z=vV9+nc}1xB+ju@IRiRii1r%fJuC9um63-*2G;G^0=vf!H_@?*p&!-pfUFxN1tB;k4r#UwS$^N_CB$4Hx@yp(qzzRmF=Ij*f z`X+S7YdN{U`sYtmlyQTRKeoaj>F7I8{S86|AoV2(c(mQbUT zY>1@El;AG!6&}eD3xu&G4i(zwjdi&P>hOcwUmt8+%52T}_{{vc-O9%&bL4nS*c6qN z{$i7kM9zhezb#qd%J-~9$0Crt?T-^*{kXNM(Pj0fWEuYSdz}SZO4&DSkKQN2)>xW}{!SMZ$hOM%vX!wbShWZ1ut(hF0arD!{_)bFRv=Ccd*5 z85=2UPgOGE3zs7CxBW+eapu({f2XL7_J6w_uX5^w0>TB;3O8 z=;(xmg!8;)O-)Uh9f>nnTmryZ%t%F#nNvT7OJ18OZ4V^_Z)Ei8kw0}WhaQacM}~~f z+3Pa<(56Lh?u_H5rXre+q66}%%#8JI;bfO3pD8o*qW<=1<-`#yr`3r1WJi4lLzj4t z;X}!ft8JuDC@1G=8w}%RPaqkzQSdE&EBB2x*Un^t+y{w<9a(nMM-xri3LHhTAPiH= zRcdr(gUfNbV6w817z#aFRhps12So2X9@Eru{O9G{gL>F)1DN=3Fot0q4Mnum^l@Jr zHBP*Gx0u>4HOYhX03E3W0Wt518ZaIl=#yV>HOK4>cX`>m4co8lh|hY^H1cVW`~9rd z-pCq}2Ptr>)k`f-MNCs0CO-KAg4ru@7ksXWP*K8|E1nd%+f16FY@xZ@DQluGvr`b? z`mS_)ks;q@YCElbd#Az3i;YU7#&+!ZEg=)Nl(8@TC#S4T4ki1-J~3eV^4F}se&#!v z^=~*&>j^})8M#CznubfqxHoDX*SR<_Q|HsGfy?ZzS@i9S+hQaPK#xKK zQ^>ok@4c&!gTFO_VkQ@LH$ES3`GlYpx@mn<){8{7*@`%LUymE5w!htc_N?5wAayg6 z)6K?(0?`)5WHkivJARu-($$Gmn??_@`ZU2<7L4QCA+E10>3$9WsOLY5RahnS9@;o) zhsLO|*IbSkj+4xot$FF24X@IKB#vXf=pv@6Kzqm&ETl>YJ5(v2)>HQ&h?M$)7=`~Q zk%d|N4tjH&V(!Ef+_M5(;N7I+W2uO9GC(wSPry?oH4V)k;9O%#R$lx4vk5S<`m8#i z1GJ~#vS(gN4+70c(xP+_2;^d(0_*E;4}Xemw+zw|uq?T+)arQF*!I|^%FwuI1T~Fl z(~tZ}-#== zeA~Ze%t9Wixj1@4_-1?w1}!sAc5J2)yP-Lm%wZ##ozlAI-bChO=_ywb^7UBQ<2Lck zVD690HbS-C#P!tuCb;b;@XI&%#C%T0dBkZF7({oPUOo01s8qpo>;uvT+$`;1@q)nL zuiy|3rl-IvePr5avkn)-u58o@smheu$=NU^-1$p?`aZBCrS2=w`6jQowgUH5Vc~!59e#EMX8T zXn8zH++(MFIVY7m)$q(#dPY&Y|Ky9}oiA692U`a_o-gLt=~KW4ILleBL%+w$s((q{ zlY`mXIwt5mbHUj;0fDP2lS8-3_jL;3|L8kC0cyOzD(rO*9zXi<028gU!G|KI1+iFJ z|Iz?S*Gy{pXQ)hu%N1iwPprCKViK2%-B+q&;#l}fJ7ML*RqIlEld&Go) ze=*k|&8*HEIm*QCeUg;D=#pRVqg*4ev+jZwnb)3q6{|Fe-Um^DO{qAXc$>Pv95+7G zP}`CQQMi6(p%Hr)s9xLud8yi z)2wQoRZ^&=xzt_U^8i-nv%|UhWNho7l$lEFwNW=g$ce{S+VCPRCZl=4Ki`S>!_i)l zVPeU@>_g!-VKrqQ@qWnrz3hfO;JoOXpQIZWryi12hYc3|SW8CeLkvUok|)ZIPzQ7e z?S^`cz3y~gq7WgSQRL2tOkn9!(%qJ1zEtU%^Jb3}bTw$ir!}^&Cj%j$Gi7&-zOM4* zq6xpy(C@)C>U*&8pdsH9{T>#xNzqZQcD>cu-dXr>(3(q7*FMIw5gkzNZ@|u+b;`bn zK)6MeH?*!tPii1X>YgT3#WjyTHb$wyqQ~@DD_V84Pmiv?5q@lzgRVK<+E$S<`AZ1O1hW|;#MfF z6@O5UF0vt-4kaqe96HF=fp{*JA(Ie5U}FlEa(GX5mzSq$`1I9D*WnY4C$MnQoP@YW z&;@G6K)mRcR^E#Ukl*c58$e9D9}rVr&_ME}3TdG@GG~8D_qa}orgSFZjrE7!k_UezjqZDVV@0{As-&EcFN$Q0P>87aBp>-rh0GZ0B0Ji{lIlI?$m%MF+mt6r- zYEsKcH_#BT*Iq-U$=o|Oo!gCogtlkB9M15i8Gb@C_fE(5p+|sU4TYXr&~3S62Y;J`&QjfpVjO$Tknkvj{Wj!ecR-w3-autq+1 zz`73i1XF&dp`05_E)=7+y&_X;Rj0z?vvgPVG@e6$U1#|Vq6d@oxbaLrk?DJKz4HX_V*kMJ~wt60fdSH)EUcOqJ81>rN8I|g8qIOZ-bg|Tm7pd`A1MgXV3=y?f38A zF@O|e+1i;e2e)q;?`sWf`%mLQZ+Qa1G5Nz+XgDLEauDWQ-H`I*;kn@u{CLR+5CGzh zNIFMIDnUp68GP{23IPFSh(;_y27uSNW&`s~z)KvwpLBUOBiVN5 zR+MIjq-$6iHNMcV(%-2~O07-xdT`k-Reu4nPJrHRZMzs*x(NM^$#Q0v8g$Ae{N3Wn z5&i}O1I2gshc3HEbK8VqP33dlr2I9&$gJ*7`OW8SthS{h{?7qaI>Dvwic&Wf$<0%gvGvKyiYE zw1A(N1mRO`o!7+VBA(;GxiE6RI3%-b{8K{$g4=B*Tn~eAl1_Obe93>O36rSAks1I$ zW}u_9stnjw2Mjdn;X-nrACecOtm=m-jZ8zocR0PAn(VJ-aXQ+wXsz z@8CHKz|E>(;k!-%rz28ZRjF zyuew6{LKq8($?nYFRlS%`K=AqM z+QCP6yg{XnT~EfrC^xSHa1(**n{${D_zyUkOxj?L)=%+%_cwyqIMgjHEN&|U0cdZJ zY~IEJ^uQrmBosr}X`)%5rWn!*GR28b2c!bjZnChl4gyd*4j=2PF0tzUH|tpN1e^!Q zbcu=ksPF*yBS5+(iZ8(9L4e?@OD_N#$De1$6)rs=ZS#S*7zdN3Y5ql*QT_Uf`KxvR za|jZ+D2>aD{ApZOxKl7;-XFt67&E?-U`_#%j_}ltVD{}R` z1!|5zw(l03bKy`BE8Kf$qWH^97aDKimd^pN|I%-YzGIWhWXCg382=Vkiz;WiUIKci zi#`rI0>~()`0|1KMFDpcVD59+kA1vvlJt{P(qvD>$mr8*ZEbTdRC({bci@6d2yh{)KT8 z{=Rp9XO|LavpBqFc>t$K?RwIr=AQ2iATtF3n%42qrBE6GJ9@%Q?fy{2xE!x13yENo ze)a>8s@fq)LlkhdKyReM#9{MF(ub5G565uB9ZP+Npx%u&JXu#GJ9%|i2Zt}wSe4N7J zq+D^zwO_0}-%=>06ph^Nn6?$ap>u3qk)RD`eOeW<*V3aL6v3dC7Q`8PAZ0r{S@7NJ zeCPuz)1@P|YD$uP=^($hHr&H3j>u(JJOo~$u`lQmY)C z-ac}xohz~wCi`4w_UYdHj}juG;$nq0T48owlPhZLQqM?|gx`B*T|J-&8W(8Z&)1<( z`9%{LVgQu?e1$I$B*grhB*$7_p(ZeOTea6QQz2Gl!D%d0Q}Lu%pwq_;`Rz6#Z_soLEM@CteK7o9nHD0;g{3Jj8-Y6V+vL9*~ZMR}FbJw|wf2HWCwu zy|@s)9PMmoaq3%sK{IXBg@1uVgP6hg+;5z98B?hEuB$-;P?c_5Yv374J~uAFq10gF@87)xJD$Ri%T=NzM%K5&w!{Im!|%Nezkl@XxTXA?i&-5r-)( zocUFpNC`VAtKL?)XYPbp%%}jd#tyao`4ZJ;zhiyPDq?Yw6s7wzL167)yw9Mcd>-fy zCl`iixkT2@YwUl$iereentbS~e@(a&lp+EKURT0kg{*|g##I(el5+W=VQ***nw|`P z_PND~HCc1lL~b+b^UvA@-!uF`65I}V+X#vS<~`tZ+FM{+>k3*?*=;kUM~5#z43>F> zgOM0+Y3-`M_az0je@ZyxHsBKJojemt8$Byj@_DG%Jn!5*Ke^aFL{d7NwzQFHUZ6Hv z|E#%W&EHOS%UYM#j&|U8z{F<9Z7ye)yb*o|n<&m{PAa=rSelhz< z127Ym;iV=D+p(K;4Zt3)2y5yfVh?^yrT*jN1rWsH6`yeVX=7!2q!A&cc8GT;AZ7f9 zy1K{k`aMlwC;O8vPb_)utWAO^@yvMESN}S&1E&mm7;aTqc5-J;ktgoe{)MAi17uIz zn4fe8bfQomrcNw&Tb-N7vAH^}h!e}-#t4d`*&WayKVZqtrVv7=sLsXT5!-3_XXs$# zhc)&(QKIGp<}!)2%NuYVtRoaHNIq<`uPZnO7p}?FlzYAWazu*-#Qvmoy7NTeWEiO&3c=O-kYZOl_*IbH*-;XzOG`yjR}p?Tv37A(npsT zKTCd}Ue((86T+uM)f&^loZEk_!Vs-kx zz|T16VgJgQ^Dz#uD;*5q*zysX_COptThf#dQxTS_L_jnaQ2fK6N0LXMzemuoXwIg= zX}dO;BRvS&CWU^d`emxB7tfB{*kV7V160s!m_=T~5+y4mC5wN}q)io|3*W>qF{00{ zB;#t8=RS**0;J^@0HwHZ*hML*d!Nm&#B6O{hrL9HjG&jFHQ36O%$ncZ9EhsXfKAzA zo9T{ptOhg9$5@66GjwQEj$bntEng(gVnEqyc=i?8x^hJ_U~L;F(5)AFFlY0RgBZP1 zH?&$Of}OF&Ck4JRb+mWB6`L-4+Rsd|qC-SeKfdOK|GmCa*{~U5>r6ilGpHI=QBd@& zQ$4Qsz}tf4cD&p#NzF1tpM?X(2O3@1Gc|OO^NXZMU7I1@Sxc!zn(Uy!2YbHOV3XL) zK|XI^*(ihTxH6whXkDwQ=)SK%sQ6AwHAq;Mcnmf)^h5ebJ~nMG5E0XE4lE^KwUI9I zgNTuKtRrSB4ZB$xr2WzDIz&vg9#G|`8>EH@&} z3p9Sm05!ng2gT{wHPu7-EZ*1&dtVD$RCK8*}^*J}AJ3{12fT($6=KlOrcqv&-tI$~i0A7acx%@_u#@-cfe= zx8*+a1@_Mo;@91)z@p>xHPQyUQlc(%R9@69&XY%9s7-AUM&gp_cxuJqePAsKb1+Oi zCXB)g4*kxN*^mBC zo&?erPk_PFEbVYu;y~_lV(jdehUJa{4=Pl4a1o>I{<=%f!*HJu z;1dt_zxynfkKCqLDrbzlt3pic7-#T!C%{c_e4eSiS>#l#TNLd{v#HB%L8{a-uWPU= z2g{HdUAk7^k$WgpZxa1>P7UOeyOiMjMZ!!b&@TLQpj~!TmITJ*)YGvvxvO={zGAR6 zGX}$j_=>q6ktpC#7|1$ zi{T)8Gkx>81Gq73ZN@W5Vl=>E1Kf@fmAVOIh0trfE@ETIMySI!d)xs1ZtnDE6qY3w z>_MiOVA_nBJIJoEk~V|QqwZr55Bf2>Mc=DcFBsP-2Hm=$w4rVo`!R-viH6tnL6@b7 z(qlUH9e0SqI>RL{j!8s!FWGEbyKn3TNia7i=gY(?_6!>@s_ z?XI5%?B1NH;*AvX9wDxtcdxyEG*w1YeK$P|F0+*vrzw87s_T0eY;5W-HRpZynTUT}DMYMI&ZWnmSwO=3{^;w)07ydlK1Pq75 zv%#$1_jAid3^M|2Xj$|;g^eRB|>%E62mzwNSaX7BvSi>@K+7jz$hKzk}KpBPUY1(zH~lV(kScUhJt zXWt*k3ZOv>05`Y(3C)6$X&h>bx9ROOFqMMgws@r>bQ4F=0Kvr_yz{^At8i}`Y~npA zUxPeYk`vXTH}N+74cOl;ee3543fH|x0>4y4t|(tTy1Jw_GV!s>Zc{rbb2nt2xCK?F z2MY2Vc?3eh>)NlJe#*=epRSjI5tXjP3)Qn3x53~?sj@K4aFvhm$d}dP0tHb=n82RH864w9)d++1b7lcd6F@Max z@`JCRxBZ7w!fTn$*C?0HWi?s2Bv8r>L*qla`59G#GFhaKgHW;qCiZr$j)dkH)sJsj zMp;!IQ9?VdeEL-9p)fonRengEIyI*qG!&A@zJv~ zPb;2d!J)nZb934WuT*MJ5~djJWx6b2|3_!1Hu_ZK=VdW;FoVTWNyLCG*-A_$gV3UG zEVJS}3UBu8Xm@*56cn=vFuVJL`*lO2zeRX2xOoqGJ7M}sDf}1sxM$x^?pb?2v=S`P z6ilm`?A3^kwSSxogcc$GC%L<7ID+X%WM&~4;;1$Iw3CMG56DyXKnEU(+7?XS7PTc+ z1t#|4&rL4LG( z!;X!EZnC~uEFm+>>2T`OPnWL3WCjobGrqw-grey^E0-i<3v?m(eifY^=V6o+Ea{bP ze!#IqE`6H(Eb!km;>~%<)`D38$%z3(VchGHzS<>H_TOM6W!0iC$7xh38Ttv}PzR{D ztOE1ahP_5BeSOYX2Z#_-blYG#vUOc^SS&eM9&y-6CuW!^@#E!7ey`MD+O2yY%)*I0B!~s<2praVGKein4=IS#HZ*LNbD$3iu?GwYiCKmq2uRE zG}@PkPUdX^$lnpJqm?DH18zC36f3ixrWvH#q4zq)t`X<$v^k`V&c|ZQ9fI3AW(WP@ z)miag@8^i~58xfOv^dnI8bION>*_VuSW8&_^b6dcgk4uk=2Q`4?aJ-{kr$1j+v`Yn zs9?!K9CO+=M>%l2RCX2jcd1=THcclP{lHH#4I z^!AYm1wTEYzxQL$%*-@DAg!~fw(K>+mcH2w-*Q-U?p)tOuEF@-&-Rp-wBj|}X3J!i zSQ%;8V6$cucOvwAhWF6-fX$0%u1|>0yid38oy_k&;3~0};=NwVyTp!Q%MoQ*rdGw; z1_k#US8W}O@Ujub*u3M`Vuy9_BU+h4tsQic_|N=2^%J_dH~KrYUven?zv$Jo>;Q>K z-L+y8dTK0$k1n87}EKr?%>x;3J}?w;Cjk#&Dk~6%NK2 z(G*R9MQf&&d=X1TF=G(*MD=CxIltd7D9{0+zxe5-vkMcE8rh`x=xoG@gzidi_Sb7; zVi$tQ4iNLNsjGgeqbrM!t>P**cO-LC>IxoO`Hfzh1{iqJv-7Yz(PS@F*YQC)^Q&Ba zznu@tI()h7$`$wai@c&z4Idt`ccVpj?c7Haidooe)FN=_0RUo zD=lb}kHFJLn_O?`5;pJBU>rJdNKH(&z-aq=A;@NaPuagj`8Bj*AjLb!{k7gpjcH?G zx>X9Y?w9bNtuC%% zhBGKfrrM?g>KtgPLsU*OVsZ-A2x&05Z4p&I%H(9CTBm=4a27W2gmktyyW}A>^ttkk zHz9ZFx#CQb(})cvajmgTH*WaC$HD5k;sRZ(#CWRGsKzW3tnhGQ2M0+`sQ6BqMYSf2 zFI$`ra=l>}K5tZUuUv6f)EV3(atZARt*MPm98Gl(g7}UNKw6^)I;i>VH%3ejh^k61 zKnD{9%z$0Nd7CmlKHCmr4pbAW(7-k7M29$LCF7>u0}ka>-|dh2wcGM`PdPFJ*+KW-MYNOM}?lgWG}@9o#);%*o+tdx|>F@bPIapKM}0u=!Apg z%6xCA=}-Ry^+ZU)X|;Roy+0};j@^c zGq&^k0)Myw$NC6-_xH+fI^faW1I|do@v$GzulQ=d(iI0VRuhd%)4B|BUeV}#=Mv5H zZwVCNDUOFQ*!=gve-}(8yaNbY(}VCk6dNFXpkOLw9H4vTA8Hu=>tH1?Nx>YuL5MP% z@t1v^5B(1#`MNMf1vpi&RVAM<(_5&X;(rtv``Qm53{?JKPQ!7S1*efNUcD7f{dZMc zEVx*fJd)7cJ0r+}bdl?Z@K`07H1I)g3cwU|&4%Gr)spWKA+m)t_n6&?qHKNL3<{EpR2C&S9 zuRhnK&MmTXB}te^6jNK9q|b~i#Htw&JIRd`6MVjO?GD@h+_Hb5zQb zU-4ZsPc(!bQ0L?ZlN1{H2*SI6APv=VS$gPq!^VeO?N3ryI#>%z?(8doXi<#=KP$tw@rBc-wFsX&K6vwHrd^GJj13y!A-GqodJ!;_hf4$*fb#QQ-UUEZ9R;1eVWOF|1SCV-{jFyRMYZB} z(_izLS$W>c&`>GcF+e-OB+;1wBGznnW`frg5@bAtk#PNi)N*1>FA@aA-v!9(ZwRju z6H(m+O1xF$yAAmWkV(k>#tVqouEStHG>cw&#t3_j!{Or{T#KC_Kjuu{&SvB)dL9Dk zfu(IRAd^?tVKX)rrIKfLKy@ANnh(4>r=4j7J^Rls>G&|E+0FY(75JrpE!Es$FzI&& zP&gVH=7o1_0v9{=<6))25?j=Uhn%`Dr<)ZwMLaFpYEU4|d=O<+@vpuLxR{daZ)i?F zO#k34R|dxW`*7M8%AUPjs1_A|0N<+*AYuT>Q^!cxJ9v%^aOYR)T%2}wF5Kk*tz8x3 zy{FM;TreV_F+v?!CqvsXmO+$*Ly`X+lz?y)sJjW~Z*?9$Yefw(F~9dVeo~2xs(OB; z>3#wtCE%3)T(fw+KmL3r5JBO;i*TnTpZx`t;Z8996=DGQnSk&w7!&t7?fzdQA?^e@ zKAte{-}58jZ%7t*M)k&Da5C-$@n2vb?(_f8N|=aKg(FOxnR#?w9f1LXc?!wwp8w?#0Y zAY+UU(pf2}>`s(C>qrft(4$_~AED+MGAH|DR@Y+QRj{rGbwEyT8H<)7yDQ zsHLj(yD!fL)30j}%G)tMJ}8Vmbss3~m!7rumGgKfz7F9z%su|{>|KDsQpz{ZPOE&CTS6gpM`_^h{)#-GTHCtSe z1(ot1h*1wE?m;^tcAq6(ZtlB02$D9whB)~G_7dHIa|$^8IsJYxSh~7&T{dhx9-B4@ zNAQK&PK%`t6}o?kPa~TXWqK#BD<_&e(IpHf1+!Yc5eFkZIvVQwFiLfABrGFZt;3JMJV)M!Ibr^R-omPeH;H z9FFBESEHuw`ox6|9r;T&39U1iDs;MQ6e)?`N1=HdWMcqE@V%k0Zz(ATDKF z%DwLU|NAeex#t8SZH`k^M?km%ILj<<=I+Rx_rJ~e7!Xr#THMU zS=7drUZ{{gwNb@&AkJ)q$1!X7iMu4MP?#%=r_S`|jjbhm!^QAl{H8Rf=Tt}!jTJsr zbUP{lPV|Kjb_P*t=mm#;yFZwdqw~yf;Ipl6>rJTjj?XO5=kdtYE0eqWUc*#_EF#%l zbQ=FCYL$8&^XKaqVaqu`CfsbzUmI~%?>$=C#Dnv0o$d8rz17So-IrI_#h5BhaSkav z9*byf-R~%1T|~a2XV9vs)~`c8V(+9;DHz}Cq2g~3%Aj2gQQpj^$tWx_%#Wi?)vPp3 zYF^SR;`n&%M5985xLBAKv;Pc+$W))+X1?=~ABD;z1Bq+e?b)5wi10m($aC*yS-hhHBFtNpbfisrlb?My+~kD1b3e%= zcnGy0taww>9EJ~UFhdLtQ^#bHi(#DXvgUgMO|&!5W~Spt`(=^X9e_&i`LY+}&gOAJ z>XiFNh$J9G&+0-xLb%JDU-OwW9%3wGj1nK=?q2k!+}`Ff*Yy*>rnQ^u5m^C=5KCH5 zTf=xcxPGouAsDDi$Z`t6ze)uyT@s6uFpY>5?@O-L7nwQhc>CWAlXsY6-PHfvl}VVH zzR|X5`6C0r3h&Sq({9_?qxOar`kR~b(iEHK4rzW?-OB`HKomj@buzy?WGTj^m{3p7 zcG)VbV{$))7A{lv{Lc%KoT#7dqH7lKzkrJ&A)U*%Lxqg`s7s6Eml)`;T$#A0c8h&< zDN(!D#+MX%ws^Qb-Yb5l#*+Mg+c=Ekotn|5)l(l9t5f{ zd1eQ%%R)Y~oYydhCPrR8lKy@vAffnF+ppP+nNU+D{~$T9aVZl-IRLKV<8nSp~+ zRd;#uq0~Tfe@6|=0x|~LI6}qy%7j{YZ_uR5)BD0a5_J3+MX;W`+}OVi-5d!lpDug- zsTXi@(pTO*q=OFJNR(0+NSUkojlQFcxb1ggo&*<~hzxd!7SRyt@kN57B&tfr4kQV> zbI95E8nW64(dTqp;m*2_1X`wub95d2Wi ziz)Cv{Tha+_eqFNX$_}P{Y}o41$9V9^!o8x}~8NB@g@$9dfiROl@mE^}_7HS+raM3D*ub4D$Ibm)on) zJIkxjo~_!_-map?T1j_dIU_yOq`MFAoFY|8h>pRKbsSo93M;hpFMJjUkM$D8FGxdT zpjR(=V7vIZ=+wW8Y1~tg*;>$rcyP*woi-_ZeHPgB?$ob-0fRt{xvzeyJThFfs#*`Z zW>GBYqm(0`afDHlNpwO{A7NBikSJ~845FIQj;QE(_8j+g>81!;-Sq$B5J$H|PB27* z)=y|M=wY%+^F}7)OjNImM0xYtiYpE?*NnCs(YClU0E3n*zbhHzs3~Ix2r2z&GDEt`sg_b5}FzAk{fjo#CUm4eW&_I9}m%X7KeC4RG57Ui$X z>u<4nT(!pzx=E|H{lG{Z%?TxJyz*pBVzmMElnzik zb^Wm@0`X!@0?ja0H+=t3zO(c>wAzv**)4DFtT)TGoXV`kK)qJNn=S+O9cL4gr9@n$ zM}p>Vih-g zANV-m$?I~KQ0Bwr+)KZNRdX5MJ+?Efx{n*yky{*A-Hy3r6J1ecq!+QQkV$}>lC7dN zGqUyhM+etk?@gwr(Zw%5YQEeAA6iUc#Fv<5hCA30E!Cz?!A#HCUZa}WeufaAc~j!b zNMe+>kWKxVWV5ZiT>rViuDE-90!ORESpl!s>tOPzX;nr|lViz_j56GvKsM?90f+Nv zglUT&TRv{wJ#~Fo4OLlQ15@S5|LMA9)Nj$~_&Ap(oGtLsgK*B{cJ8D4M24j$_<-Tv zUl|(HcK1KBX-SS)>{I!u^^bx+HK*;gUH63vyC=doz3|R+^6S}?YZ%gftI@R>@)m>{ zanU8S^x&E8FalU*wfb79!ToxmsW$I_L@6RClXczHFmlKro%P;r(}xXXctk0nUctaP z)8(Qe&|4vU8fCjQHghLtygBI=9O4_!Iq6yN+UtJL$DN%`E5j?(Fh_E!o1aO0zVUO4 zIAr#KpRaXpgW=kf$>$cfOisgtGEE7|cQSY8+2FD@t3n0M^KMnfU6leF@0G0|oAJg; zmk7;o&kjA12RLPj6zZEoj6i$Ghq1Fa8Q(QR@U2fTI_?gB%^y+-wXqJ49XZL4aCbAy z280T>{=_mXA31Uukt$=NG_%jsqF*+5wCQ83aGvSeUncYw6tVVJ5hC3%1)y1+^=l~o zy>};V<4!hu{aPb(-9V#>Pd4g& zWPZuBb|tX{ciPS6bUn|FsTqD@RS}L@KTM`IK90pB?!NChd~l@0lSZ;Y0W-n5n!Rx< z4TfEx=`c$=e8oH1YgPz+|Jm}M_g?YC5V)8921EVjE{Y#B5OCG3eY*ZM{#;yZa}tjr z8u6)l^T?t>BF&RqItC5=*J-mU}rMs9;=t&wgPabIWj@9Oef6NSy2H)nBp$0 z!Gn!p+SYH)`^8>4=w8YKj58v=ytXD}xqR6M6*-iie4gLusm?6?gRLSTf5H{6bCT8twSN%0JILN{D7!!9h$y2ilj?QhR z-xo~#riX^!URNcwmTk%6yU42{#Swd=2`la&f<}Crq(1=+bDNAwgjt>$X}zq5Kv6aS8e5<}$r4C}+H5z}5(K51u@FR^7&uCF<|YGcMc zdS!o=+iSRJ>FE7=>SadFHDr`(+DtS0Z84v|{<@S6n!DOc^6hcS#*K){^a80fwj$@X zMCTgdW>HQOG-n4xx7U(QUNp6u&N(~J0wW;i`HYqY^%;wZ3jA# ztm#!W=9^#TSiDN{BNlcz5t6*1ot81b5_rTjgyq_^ZDB|=*)SS!5&a~&#G(>LvxyFO zsoDk-2h%UIdgi6kw_~}7+%rTNv}S+=Oh+PR#{cC~)utUwqUmt;SZ-Izw;<+Uu`4Rk zAXC>pO)8%XgqGFcGtwSBDk346$iL?BxR!k}E|^|aw0SZ5uObUcCkO$TRot(c1#Fy; zrkU%~Ht10xcwf2B_#lTIz}Lt@-50wKZN1s;eu}8P-ITi0(N}mX-R^$62EnJ@->Zj;aMDpnpRXb@b#~UitlX_R<%D9a{mXm1AQs|J z>?FW$$tzsAwKXmbb+Mc~dI7rn^2?n%&BweRMLgUkV+JcxHP6_(J9?==I_mE5ckPYq zC!@Ge-UrG+^RthANltBD3Q}qcKd$j>)_uP;@6)tyo?xy2o>y}MZJbZfo{mp>(K7O)?wRlW#1+CX?=+d)Lo) z+XBrBcg|y@&$BOlJ7?%jDwrP~{N7rWuj=FbEIQP_KM@LreROcGne5m}m~jflQFfp; z!zlEOQPx*RM14JLU5xhwZMGZ}#n;@puARf~mFrS=l9xMtITB=ni(es>Q8|J(?hY}D zwhLIT&1Hi*e)|4|hfew@)Oc)p(NUCDn##jBeY)o( z$zueXr;O+`;IkQ`TtmtS3>^$c6lAWJB_>9HW&QqMJDvg#WE}y?`qbC+uQI4pxpT!t zn&$R*_8*FbRBYRW%}dQl&NJcW715s>5vyexf_kF8;Z6C|W80dKx%w}8{bX&SFgYVI zl{5EBsmLpELrk+f%`$C=_H|FxUr!jea9;2EEuY@*OZ31;024i1-Z4PhuOn>Ko_b6W5ZVv9V$)CpI==eVK)}N+^F-Dl`)Y}aX zjRpU8Y0uyrQ-^}Yk%oI|3fE(|Li-|l1rICPH6g=w$9p8z#uU2FvEz@55XJ=Ucx45! z!540~=pBH`Ds`Dv_=^eUAqu~smf<5G0RlEQYEm)Ij*ozj0U69EYrm$gm8nuzVilvt zGD~xiOT5~*k|x2Ok~9{9vgI9t_6BMyL4u+mJc!AvYo5zE+TDLSmeQYC958y&r@;o5erPGSKMlG%n};vQ+y)p(iz3mxt4%akGPdT5T$c3l2q>eSBVq zu+1A-CZJTp4nmnpa5xAUDl`R0gdz*P`oC{n=Tg1Ks5Fo`O+TElG`p6=qQ*{>^(V+?`J@(m!I6)cFznLqLYI%+VrN{CFKoY46hhWcD3F zJWSF)9lOfk_eBSo)+OY`x29^vR;s#ZPJ34XiTKw8d$9>L$?zZ8Z(ke7%ED&5mi?3Q zh2d^PTlIjHhaQEM;3aa}?GsM|LGS4ltK-2_Zsby3b%i6nMJ=s`uF9x<8lgMmk~=KmCjb%#Tpm1?`#=@sNH-rsr2O%0xRIq?55?MW3R2M>-?Q_UJk+! z@l43yl+qxyYjjgbKp5iQ1IEFitZ+qpR`WgCEZeg{*=@+oyxwvy<1g{Gmd|g}eNjSl zgjlKb@blQZ8w-!+Iz?hb`!y=9MGIjP&n?CxS_BczcX@ab0ddPoq;zDiyHM0^4GJzN%WQgq9M7_3ANI z|KUk`g++bQs{oV-a+FUEhLAss-q|#abX#n5VC>$<^IqKAt@9B$dn>CK;`b~&E^Z}u zRsL0)5|8*Z(?}OhLugd%x}J{tE9I48pOamOuz1`oX}G23fJ&+H`680Q0#=c35Q?@= znol@_1qggZt`8?@%gxP78Nkw%xHn2g!@ze1br%Bq7@U|&tKCAjHkbQS8J0IDFf6*1 z&>mT}D02Lp*!h2UJ+UcGsdXIyT}LR&vxjlMSHjIa)=CG0oB)yGnRAgbpPZu^+{wCA z&tgxoZ?Zt)@jv)x9V8V6mTR5DUG?5nJ&@gel;Nw06t}-k9%bhnly?V%^m$qkoi3RB9!>s1d&57N$@OWSx31{t4Mf;Z&R9gZAQTJ5X^(W{i<04XsXv)+ znUb3TBllvsG9r>Od9`GLb*renYSQQoLI}^lecJ2)DG}Ny3W=86Vh@(=i?T7{%4lIR zL;#aw7O|86JEb04Bx{{@bUm}udU7nYjx(g=;gjJUyKpH~vTxODi@b{o2`PkX-xXK5(MSY+4BCt>ERPZ_mf7*wy9vR#g~1uYLL| z+dZ>fWKYv5qOUat#i_Cs)Gk9#cC$C{Q8^neyu74Y&s`xDC$Qf!9A$QVO1L-QHrm7H zA@YFYM1#d!XuIjDt~(dyfd~Y0;#&tn7-TFd`G~7kJ3Sk|t!qC9r1raK%OWi@gxU$O zc@Gle!#Bi8#<9V1(ogn42c!pb$~h!zS$S-;*7Z(zSC6A+?lOdLY)o=EhL0LIlL_&* z+_bns3agmEO{%#fL_q_E4Vs>|Pd6FVomdG^$upSm>Zc70-1>4~zF!{ zC=_*dWSOI_@OZA8xoG=`SR3?j8ExVn-}eWM z>rX)*8{bg@6V0DL4mU=bX7)k^uTi`K?dMpRkc8mx%cq$3f3N~~x^dn19WjadyLe)j zM*J85Z^f3qtTw&D(#ezpwK40^$EF4NB%^ZT8qi0%x}K*`O2*9I&LSHe(&SextpWCocGT znq)V&rnsYP$ z);cuWc2-?{GQAerNqt@8SXh!tZ4lZN9t4SXw>~(62nc5z`Uu0awnakx8^7WmqCGh3 zR7-d1q*$0ZLA}us>Wo=Y!?O|lsGMHtZz1kts!4baq77H;YX7#OJC=_Sb}k~B_S?(y z`GaQ<>MB-;mIlM-yX|)tu?_||0w+ZAN-SZE?=5r(KAa13B#UNEO4j|cb^oavJ%S~! zWu$lx(fQkt+yf&JF#LK-hImqsS6~7MH7Df1&AQCxdP;b3NoO$Z%if&?6;o>ZNQ4NZ z)i#Yw##9;kQ>b2(VS|b^1y+AJ^#Ky14b~-Z8dJfzHcGSe%~v*uGfB$C@HZUW3~q|1Vs#aLl?5jS88VMov(9BwVGGTiMtCHb7JA%-k?Y?ZkSGMY)_}h5;t~i`rYaJ z*{XIw6@N*Zd&k@gC%qGk>i`wWuy~H~WgP5vnQ)Pbumc<&jc{Zwq<{911&AtYpZ>s= zpB}!V`n*E^Rj~kLMHJ%WP;G3JS@9~yH2{N|*1_dOvIvHgEVY4Bu@41xO;Ph7 z0gb7lvfWW!m9S=9zy@kMVp%P#9qX@0jbo)& zC2&_H2bvpAl_n%tk%a4DIzEL6hrOn;G1#u7hODk?5jbipGnvweYSW?~UWbdjxck!7 zXXur$ftGcZ%Se|W;iA~Xv&pOv&L(@k21i3EFnw2wgMh5ymC)bf8vp^?lA>`aY$DSr7{IeqZc{`Qw!;JjTrG!PKk0P2`<{cnST zlUu#F@n6t=B(rLQqY8kS=)_#V(B9PaukXP0kAKylRTe0nPFyd+VGc#V_!AxKUQ(X< z7ln*;=&eXrv#L>>cyP!xM;4xQ=mezx`_UNqPhS1|B_M6lOlfOtieTmGq@x#j%-F#28n?X^~FpZV-?L0RicfPU-HJ?l^P3d+%@i zd;R`6-=AlUvjz^=TCDZ(%sKCQ$8}%V3|4p}fr3Pc1c5+Mq$I_ZAP_`S2n1gL$z$+N zmY*I8_z%WGNkRltI!L??Ucj3Q%Lzju6_HPG^&f%Ph_;gI4iE^YBlI84f(fZB1oDt4 zB_^!msM`-62cvphrm z5=W=o$|&%J;o{AOD)eUKgl3r^HA+5;=ioMAVf^c%$eQN^t6-#CQ&I6zB+Cch!Fjb*9OXN{N{+5G#ciz6vW6g6~94{;x-Nf{dD4d~RYC84*4;NnXBap^_9Y`oD&TG14cKGx8iEn7|Ui~b`#{BnqTPFDv-hmC1`(uKv1aP;uM;XD*s8 zDJwe;7Mb*$W)yDK$Tsho7?h$hu=xj7;&wt@U#!|n|g$$C|&l()3BbhtuI zYR*CEXYC!bdc$sSFpW|$SpYAXwqrJx_SekxA1YCjKATO5fa4nlo5W`?l_K~fR&Tcc z80P5nf0<7dA{wb+M~P;w!WUVB@d8D-^Ye3q1UwN94Z?K(558+_M!8?UWXmS8(Y`Gu zCVuuzvEI?Vg0Im}=W;D4EWf261I;Nf0zw-W{n^U;<1`UNYnPh6^1nyI?{$lBI+Xs} z$cWOw(9mvo;sv;<^z};~o-B=O3)h?d+L7$nPro-bv=0ux8O{G56dBole|KZI)B;0E zO&v1e#%VF`laPQ{@4O%KR@YP2iplx@{>DbD&i+xY&0-h0`@^MHIF(W@f?q{%C8{lc z4QC3&alD^<_hVY-x4*pL%ny!7c=D#}@EkMzl>7Ep^-~R`FI$SZ*^mD(zl&l8hVq&lsK5RboxIOAFcw3e}rA6r<7-+cK75?4zD0?b4Up$;p|L12i zv&E*yDHX=s(=qvX^^S%3Tx{l}l$Y1nA zF8&JXM&jam_OF}R%5zw5d$c!I0e5r0kCnox2B!S->Z)~U2u<~?3Pwt(U-QE~Q7G<< zwnguU7hLMaQZdwtEczl9Md%y`9ZymPJvESt%tvz(CrUJF0(32yt&XF_;TV%H@h^TC zoJolXNJ-lFmHqN=M{RGWH??K{m+LxZo31o|3JV9BN8hb>t7Sb9|?%^IsHMhchJE)-4WF zOd{kJ1{RZomsbOmS+|K_Jdy;3nVC73Nn2x-nue1T2LYA1B?O1zaIFVRN)yZ`i}$@p z>|98f# z97S_(P7VuBj$!;LJADGCmQ9-f_~b;?)|SQXe9v5M1p^XGr~FvJ z{ag&ZC3t%%Dm9zoeIE?A%IZjtM6|2^OZ&AaA|fIkED*_a(HLquN^9Qt@84hdDJm*P zbESR$j1H`r)ZjAjxQ&wZ?!1qJU(OvVj~-6i>9NXJ*@AKDf?xCns>ypsl;MPg;g2oC z9}_yAM_Uo19A0J-j(8h|xJZeKo!UzUx3Mkm!88B1A&(-j`7;Y>e8u&C?oz908u=M7 z_QU;68NL@2h&)}9q>+Ap@Sgn>6S!3-18p6_SS6)OYg=1Eh06H`6UFKv&j0Gh!o+0p zx^*p3p_h8t+&3yM6TEZi{cz#^(&Zqv^@T1Ib4R6RB&mtsbU&?%^Vgtg z=ZK#QU!LFZpcE((wmhcN%PEJ$c8)flI_)bne@a4!-SXo_oMM}f{UjwuusO*p26ezA zV_iEDxyggW?M=806V6s`jj;JSq-L}PGM%u^R?8l`{`U;x!Rb*({BC52<5sRAp6P+# zSY+RGqXbsJwe+flYo$o@f77Y#p@4Q-GMQ#O)9zvoPu)o8_@}#cgV~#Jz+*QZjO|F~ zw0sQtZnygQV4;!0yejsfse@j0y)U5~{6e8jM?mUFrd)>Lvzv?Gle#Xpwk<22p-?v} z)of~aye1`^`VwP%d;9v`#lig1*`%(_wE$WqsZdupsdpIoN~oMTuk&6In5|I#M)R>R zWQ>euvWQD@Z*^LM(U-hgqkpgS-FOo9pgxfBjZGG1>PJSJ*zxh#?QYh~EAau0s=Srs zNeV@knQF#w1xE`wXOJZ8cO(c4R0#JLS_sju$kyLuu0&XOv_%OF&$#o~q;GmrW#dVV zra!yxpnym|L42Ls>h$Np>1UXG>X^vPhX7qV?B&p%`>9zuWT(qk&ul-sO<z9sR;L{ zs}MlYi=H|;Tu;#5!HT4RTgu3_`@z+9EgAf^D$}6}nwXxE`G@%i7Yuat=UmO$A)d!0 zaWXE4w5&JbDo?CYL);4H4>44BkPA4a%`$z(OJFiTiZ5zrHLnMMB5-vrJCydq;Re>> z8&gi8$|D`pQAyU6=n#e|*_qKea${{#WW|9Vq=PN>1PyCqMQ$bp-Mr1MVtq~sa<^lm z>wImCAzrofXIyUdao(FE3xGhq>~$a5!HIk>OkdTL~YC30Hga@j0|8FokL z52XuS9yI)B(Qku4-J8Z5Nak#hc*e)#c53$HdZ!5DL$CT(@<4fWG*5;)Lsa-(lSkd! z^-}AnkqSczDcMsHL6%R3gaQKt5w6d65zx@kWD{9n9_}tOrc|?m8{e8N%b=`r+*ay2 z^F6=$-MpJH#sHjY&HMQf;CToo>3-^vVl+yDX5A1n~?+`n|663paNb~J+D84wD z=?e2PYR^$=;VWQR<$A5KcV{4-(FYqYS};2(=*i{H4cJ^QEw5bNHUzmRawO0wDJi$g zyurt}rmOOE<%PG$^3h7Q>R=)7Dvc0lYOEQYdt4+UNjmX#Jz&fd=AI%UVK)4*)k!p- z9F$m;*fDfB@1gJ)ao5KC)}A7#8z>zE6UY0MFr1ffTQWxZNHPJ~GxuuTB@Gt?%`4n0 z=G`zagS7qcW=Z=#YfuJNz=&pgSV$n0@BqjEWYByLco3Nfn>zs<)Zaexv>4b%1YcDX=xk_b}oTa4x9<>#mSBIpr>Me`;|zj1D+IBphGt3Iz4sTXXN zaQBOY?Xt--UD9W~j-O94p7Fb)U;JJ~fIxFYd;vF(nQ!Kw2-r>ZfVYD9fDe=L^J{(2 ziv@|RmX_A(Bo-Ey-S#MzREF>2n7q(7Ga3p1yHOKq4-bB@^#aj^ZqY%+ZUezC5F~V2 zzzp#DU2W542DU*kRq_7N7ylx<22Z;}PLTJ?9ci3is)u6#Vb`F(SA06f^3xZ|0ZAW? zv`66@kIXoR%@R!DAmQRLD3xz5-IKoxyGptZYRCN2TD&w8c{xT|Gs zDWl(CuA=og2?+`HIPuuo*iEieg4*Sb=ZzZH5vJgkGf6=9hFqDxY+~b z^inCQOG|nnrCmN=?=!&4lv}#q%85K|ez=pjVmqF-Y2Ieuj7vSsri<^9aKR*9-l=1I zj1zghFrKuo@NgEb@Rcn~i6Lu(wmL4NlHvKf6`>i2#oj_<*eQpakei8JJ~0Xdewd)v2!{pGR359ggG3x^}dq-l#G&zRqyiT$ze*#_%n&G(q1ZqVc^h0D6* zkp1N1BCw&M0e|8KILJ&aG4~$9X;`DE)(EE{qu-A5OU|2pSh>kFqr*5cizY zSmZjL)S6K0mlc^yk4xklMO)afzF3E*rW1z;Go-7ZM9>p*U%WL4?uQqXi>>z#9N2-=DuA#^EY;1i6U=AfV}EmM`tA34m>aMyRdi1W+XIF ziwON5w{YE5g16Ziq`$Y4YG^ioz;AJ!s-1YEMeWzR*4I10NK-utwwk3z|War2-S_JoHr3|l>&D0&*cAfrzXAD z(`7o1Zoyd|tNZ&e0#1<`P}Xa`@2@1huRy*fygyS-wQpbVxa|v!Pj9xy+5ihwZu!k~ zxl&kCDa59xrl7J__r3!mHTv9_#jp!GjF?Bs`wnE$m*=xKRGyBYL_1AzKilzl+MUP( zh4$g;sTjx$D)ieQ_k2&}vQECQ%mQIZDy}e!LLzzZCY#>5N5VU!@zYIkjlx&;AY8ou|DnB3D%$iQM!mvpcpVs6L7PCY78|KRwt_ zU~FlAkm%0TTI`)8e>5clMUrGaQQ*%rhvCHr&f6v)D-&NiBm?fqK$l$-d`@o)a%M^? znzlGP$$DJ>&+DtKJPEl4xraQ5mXmO+34zadG@>bwK(0yL79Sa?%V#4i_*qja=)CUR zNlYWD5jEMi2qo8GMmSt83Xx;_+4&=y^(o>@kCz0RIav1T${`%9&)09?$ex_NvRpiR zckV#Ba)2}>M1^SA&1Fz&V|n_Hm@0+ENP6^R&*8y1se@O9$d6VO<W{KzMW(QAzWwG#(9BtlGGK{NI_M&^I-+&G=@-r=kY7^o@-H%Vzr)l`3XCAT82kLr z?_jX*qQFqGj(U-@W8)jQ&8*hLOCF>h*FOEhL>7tO3UmB=N*bO%r=O4EQS9PB$Zg2X zREcI7t&R^N@bC=!SZ*E^6gH-of1t_k_n4>KtTphsm^eJq4fwp}O13OsD}-lo4l|a) zj_K0QQs-`YDc5s9S7~G=z;Lq2w{NPHf1g+-h-v47f}ltp4IN4G{o#9VY=)r}yH|ha zpz2G&2o0V<0QQOid>w15&CahPnt)STfqYXcrcINc-6FM;Wc;?T=};ui%A+g=y6*DIrk97Vqmw6Z9l}TWZ8?xpHt2L8tsha>nV4 zY?3ecf-BrGwL=;RDx+RUE3d7lD<44s?&Sjjoz)n5!Kl*@1lUDuIr;Kqb{=K0>gwA+ z`(;JN)!5kBl%ta3LKuw+*{afk38%e_&GWE0EOS=QYu8c6vIy+^sg!3#LspnBQs{*g zM=#*6Z2Bs1@Qtj}S9T2i&reX^>Q-r(I8tVxPR=cB$&6Z2nqP>8EgBAXeCLufS4$N3 z{Ul()b>ce|C^)UD`8Kw?ukQ8jLC@iR$MY<)2sRS345#kl^{B^E%A+@D78{Ipx;#fI zH|_39l7oiRlVT@KGn>Eicv4Sm<=d7ay0U!KLOmqSA97FYVx9c+56(Qd{5PD^>^+%= z+-_hUxZ$yA{P6b-Kd?44icM9g?w-GSJfvGm`q6JchlRClM^NJ;Lhh4Ii#{bmQ>}J# z&KG^>-YaALYdK1`DBqtvO^MF5H%)1$hOJno#wgb`{(85GC0`gC*Sf#E=7
PktV zJ1NgF+AZU^p*RRdyk3jOj`1saqTNYi`t4{~^>~CXb9LZzk|8-WUH3+di%6m%#z3L_ zb6@d>*29uYEm&0e-RLBXZ1>I7PD&WaS>s4uz`4E26I9a-hxLOQwJ_pJU!VdK=cd^KP9o7hG@&uYpU^XQwf&&juxx4hdtv{^S)c_iHSbnoh;jBcG(&h z9;LPbu-*M}I1x9cb%4y3=l#{D>uyOce@pLZ(es>J^Cm$v?W8Yh+DNd05{NaqxS5}{`E1_*2wMU8Cyk1|= zY&_yIN8W$`ohP+pl)6{8Bk)c8mU`e>%)D{KV}l`N`6MI1z2Kvxn^W_rhs3q$4maNk zXtrqCF}xkAYQ9{NXSxrb_}1)5yLcXps&hP$Kl#|GrElX(+Kb&$N)Vhy@D(>Muq}nuIYDb9JK`B)q~y_dKIL z>#oKXimw$s#wt4z&U_ZH;ae6+&XbaU`U!YUitCN07|27+_3;3w`ei0m{*-!m;{<7Z*9M+q#lD9U@<# ztKst1o@J>h*K@P){hP=>{kbnY=1Rgg!u>DQE16T5XI_%n;q_);eP4e^d6E?O;e-V@ zA}tKvvn}~(3bx}WZn~Szv)}D^SeK&QC6hYBL(QAfyVxY3&p!8wSfv7AERkE|btT$W zO&0&`U8%2t92;82<3IiuOb?tfsNn$kz5*aJoOT~B9vV=iAPZGZO}+c4%DB6`SgfTr zm)r!-ervv=_Wt^t`PgAA0;%Nnnh8K#rkcH*@2^3v;sYuI?a}^Uc}8=joeg1wpZ~`76rVb4Jw9jdm0~S1^`BM(*;0CB#wpItFFmUj9ZF zf7QJQ@T&5%S>b3`zZ6Z@>E{@2mOUiJgwJE1z(AbOZ4a6tdEWxk5xwHUcO^U)(SrzCp9OIKvrIK zp#GZNEVT4kwk9{IeWrt~r758~xPuH*9;j@%+L2}2*cipoH{9M1NAkg%-7!=-2{RzA z*`&>B4i(;eo{Ucs1|rRu^GP@x7Lr+mhP(Il^^#;#2Zhn^Zq%5KHkoEz^_TbcjHj&8 z!LqLUttMngPo^(6uR4(~Cnq0aR$@giAw7gY^@9#H@Z_^`Vyw0?+AC_2Hi!6|(Y|iG zksqpWCQ7#R#xc=9aPWQXZE_;|4hLbwATc+{s@V!krRtpFN@zlOVQdBNWMcm`qRZ`1lM2^oVyE_VJoDxkTH=-th=Y$?}UhbFyUEaJ4j7oL`rOPuWU( z#r_FDtCKS@$RG=FuV9)5sL;pZ_u>H{FhR@vbSrADzOKf4_DeIr{hDMbX5{me?-3CZ z(Yueffu&yefyff~?N_0)g!gngQgo{2q<#Eufp$Yp?traSBQ#SVJ!hli>XC>%n95nU ze6K)j1zA(!`l?h%J>%km@ODLBBql0|s#gL`OBO|Oc} zVSxhe5hU-eNCc#4pb%f_er>HHQ}30{oeODS9vB;L_KZyJ#eryb4~Pq~he+^5YWjGM z8ZJnV$H&ct5czmc)+J7G{XX@rv^F(VWS-}6L2`Z^ipX|!X{Vkj7}f671{>x@@%sjc zYF|B&Wj^g&T3An=U~T0gu>j%w8B0V73Mv_A<6Qh(n94q#&{0fiz5#%-r;mT;*-C!! z!t_Bc3sMv|mtRw_~Xx!EZ(xVl$Ih&6m72YHhI`$$LE_pH`IQ>V| z?ftA|3?Ly0U=%HO){Wh-hq%KY(;PkNrS&_QX#6mEXl_G}^HmM+awle(yn>>3onlN` z&(%#`iCX=qfi;bx{SCj4ow~eiN}O^Oo&L!iPu~@G*40LKVu?CZdB|Ssh)rtQ{Ra}k zs47>QKa$y~YI?O|+JN+wT*@&AiAQHY04PdX_kQ2rbaOBb4id|#j+x3s?;Pbg^@osyO zVY@rZn9tm_EpnG_lF`5*obOtqw7TfMIm3(5v6FC^kEypOR(zb%z`46Y`O)ZL;bS{4 zb++K{e)x3zHctMJTKf;p6zQKl2Z~G#sZA47kDP?4L`O~f37%WPxHf+TtSaeZ|6Q2BS-|8ZaHR1I{Um;BI2u=Lt|Axiq)Ubr2>&TiU zw(WPWQy?7Gb|m^ojv1RU7wwUqlLH`Z6j10G7#MVX)_Fg)s}A~s zxKuJ5*oCf#E%hD3TvayeQ+eCSEY{K+4G~SZn}=5ZrF(uUQxPadNgiP?J8j;t<5YugCBtcmpUNx z&g_c%npL}T*&N|}<7SS);~d+U8u*af zZ1h5++q9W21}2zCE}6?JE1YECtY9o0b_W;>+Scq`#>E7;2Rf=|LAcA#c@S0E zQhBe}DEqah3rhs4+6S?`NO#fR_{o{0HQR`wo1V2!!wj6MCLHU%#Vft$=0Q!cr{ z3A;Ej;XlnE3!h!1oLK+j$P+uJJt*5HP=}=ZhZ%|C(yD&N{`$6zDd0>*R1{!_Jtc$@ z--+LOu@#R31UyYE!JFOqc&%q^rkVj%z2sc$+n~!8-767Jw8?pNy$8BiA2Tzb-<^Rb z$`Zgq<4#AC1U>8Tk4v>14!83&3?@spgTlf(z)Kd#Ed@{2T+nzSfBw8>X@X~CC<6e0 zslV)feSKLRHe{tT@U(#))jQp>W-`&Ku=Glyj%o+AJswDIpkQZcxQh*I_I za|;t+C<&VmV4?6&HKv928O0-x>osbB!DXqB2(|lm%f*V*pA*#7D^>;=-q>32e)3v1 zG5j_7Bz1;|s41RNdt)zE;tUyblyE9@k*O%~X@_H@0M69&Sd3clv&8wL5Dy~p0n?X< zrL`Rse)|Kej_+)-dlPVdr-(>Lmy;|>6N4x*T-WV#36fc153*SPq<+!0cn&KN`)Q0dalhC6n*t@dFoCD+3w-T%j`{>{_~9;f*8FX!F6eg-Fb0zC5Q6PcLgU(BUylgW zb9H@z$B9!yjC(w6HVz04MWh63ZP=(zpl%{^bi8G>l-pUf$|D1K@LkX*S zeEnTHnd4JfK}Sy4lB6KM8VrZ)>OhIfo+&r3guN8gMo)buYBH-3?wN~}PbDLWpGlmi zWmqNORfxt+faI-#_u+XgeeNfy!a1{*wgt;>1K;+H>I8AJ8BruXlXq;1T6%$uqQ2z|lw3OwGcev@F&kACT z6EgxJuw{G2x+Up%LfpZ6;^6SA>l{x_8`%Y7OFPUzTO}At@tWE4N=lf}HfTUb4`}dJ z7<3}BGAvNJorU1dnL33ZKL~S)e*DI<6w2df0dxY2JfYE(AmPEj93A`$8_HN5rHQnl^JcZnt$oINK@Ol)W?FU!gY8=jMYPnEM zwQ?_zqzV&C+#?EIL>hU23&NKWaWT7)BkF2V2wf3K8B?*d!K%*xtw;EHly zTc#`?1#Urb@|R=PW5deD1)OJsrynEzHBY{-JcB7%Ldq!AM7MK9fbIU!`l~!~e7^l1 zSFwN_V}x%Hb~acUvS96vQ1d`X0^l2V*%6DpP+z-t(53R?d>r=7n!?~9f>D-e@BXFu z-3cSkW;L5Sx%L-|-nNp(wNZA>!d9kk*D%SztVPjtX~yD0`=jfy_|Vp)i!A`!G!Iw4ZU*nncoO*5@Y5&d zB)|5)!0g7~B4X>itx10F{_3v_#2s>u(k?boOc9PE{$vj^PJWRiie=W-LEZ&jy(d`I zvMm5bqT*7w`n}KyZM(Cv%MIZHN*NNo|JV(@C4T?;k-^PPV|1^M`81QFGE>NXWo)d(VGlG&(j3&%Vh+q6gsD!@m4^h3IcQk z!&cu%_}uS_ahbG&Ku64Kz8*3k^aSqc4>1Agf~;G{G1bJ2_E zO8dUs`acXRSS2onZk(m*9jf7=J>p=}THG%c!abEKq@%$2T0%bc!`eYAFFQ4FCfb%# zqd{TT-|)#~`fkwOM_;~$W0?V_r{(&oMfYxd(OLM_vz4aiqj3ti&N41k4WseXDXkIw z9$V!K)iMET0;!r8?R#>|FP$6_a6Qb*TjmGS@Xua9IsUyRE0!vl-%Eh}al*?6Ci0jq{JzpBq8raW8`_;5<-)hDm@F{RAHNEKvkxd=|+5Z0(R!5+@*Ibe~Frt$t28H~+LW zTUAlTC?=AGl6OmumwhVtS0pl)ul@a_FdZDk!XG+i8BAq^i69fNKY_J$Q)<2T5+1(It@T(<`jG4BBH& zNB0!tn@Ah!5~Xg)@rkItUE|k-a}JxJdY<=uCnYPr@e!!67FI8{wUZ^S{ck8U%jTu!fJzUpNa+o{ajOKES6AJ#$N znfN5=&BTQ|d7M*E{AU3eSMTzhsczxBone0(ACyd%&;e)~q0j~eXuq|aW_Sh!J-ORU z@KY-6092WgGF_o>Hj4{;y;1-h4oFByz#nvJbUSNfDEpD!>;=k%V$Iq&qupA`sgxmF zCF)h;N@oRt3HrZj9XpH7&Hr%HY=18ZbaZ!9u&|(u1|SQA76_|e3&d)w9EOf=5XIf?)@ML2tTF2`M~FUtaTs1X1m%tFF`0DQTePU zz*g{%^6k-21*w}+v(tL^%Ge}@!f^#6N)jo2EoSv>Sl}9jbT;53M+Ztxj@;dt?gK1W2i#&-1m&nK7e-0(5|Artiax*MCx5R9Xl*%zQq= zA?%a^RA=)E3d-y5h-a);Q!G-t5J?%CPYn%x8ny3VaKS+K_V$W342pD{y$lLoP0h@# zR1FEnbWD_Lhf+vHNE+BLxUQl=nW5?F>07h4%nPof+}nm4uC6a9%5<57J~Ha7zS!)*ybrIPaU-C(`TEq>7>6LBE}J_!16k(Hy>ZhRhl zO0^OV|EG9N&jH%t{cy+ab-hgkD3J(vHy0B2_UAk5fPUo8BxU?sE=mSkEJ}JRsB8R^ zY-TE^XKVp!TBIMdTbe*sk23T|RKM$|0g~Mn& zO~cYnU0rdAZFts-CwmX0)wfw9?&j<7a%O#c#P*xIH9KGL_Y;XvMZjgWsZ?4S->qLd zu-5&Ez!qcD$}Bloc3j{5&NBYP31L6al~@QT*l;EnbrUt6r%#K(_ROlRRg=1}X*T;LUu%F;_%cbTA+)C})(DpmtcpgNd@p54BuJxca!P%di z?aA}y%1q=@k@aNfo1k^`qYh4W-hYT-*uGK1x)bazyi_YsU7F{k`M7&{JFVu~^k??A zo&W>)IP*;!@AWQ70ysgX&gyY#I}M*(@8qESHP7PLS6gQ;?>EJumW$3?KuWHy<>qx9 z9(ZALbG=)3w3*>8ZD23>aO(j7`0)xLjo_t(rSgBk1edC&oB(W9$DMI0Om~uv!L+9| z`FrYPP#Wfd+uKqtz&-Gy51kfL^DgubPtk~0-S6dTr2;D+9evwl`(7aVJ8C{r*UYM+O@ z-e8uiQF5=MvI%GcJ=r+b43f;+Uidg-_L~oZ3{O@d5S#ekE6M8vTSZ6@@ER;Ub#?2M z?P84W9s5UJWn}Cyfu?8Om!5RAnrgZxU(9;D?Z`%oIUVz#C_ZQg{-lK12FSZ#sgVB| z5nmKCaS1wabOErUL!pey^KMO;a)wY;7^0=ccX&U&N-Gg1dnx?}4g&zGk>4YMOnQ_G zD-dlhw`$tux6g)i!2X7vSDC{gep59<79KJoDRIlz8?VGFnm1lTu#$*q9cE=?nlazC;kSdM<7BQhcF< z_MiW#?@rqa6tZ9g&`6nr!G=eMumD?vrg$K?YcJPtKiscfOpc??m2vEm0VUi~4^2kU zGAy^{1S7}$;tbjLv3z+}P`>#E1V~@2?|MJnklxH?EdRH8#cFj+YsGD0&dmX5=fB ztZmBl`oM`&ev)8qrcjHrCll)ndY!b`GfFyy%U<6e@+WBRRyuSGw%(yp?Gfu0{cH*s z?3Rk&gMax;w!39$;0B%Uo}6{SkiQKA(RsqpNrcNJZbYeHMe4>lIRU>-7^KYJ|3ku< zzXtWBz49bL*nQk;0%kz>MUp4yHYRFXkZ6doZ{Y4{lqZDyU&_`D)@Ia@_p6XA0;I^! z0X8v0h*E8H()j&}m^qePYWE|X11=~| zG{ULKjB>d*wxXFKnZ*qJe4Ju95Q0enO7W8M*=1D-!7e^+b=|j&^OR@wy!GA6kY5#{ z$I{2rFq~oiq!3Ovg~3L6{NWed-M(i9nSs(lu<0YaA4ye1VjFYf1d$~fm-Q&4vz#-g z|7jl2NQ*{K>Z9d%4UEiGhnzcK*lz1luAK6S8825Gp_vc~dIw!B`;V-yRM^*X&CV>n z6V`yuZp-y3Zr88u=)s-k-V!#ctmG-+%s(6@>D zM~0@MpAk9SDOqF7Hc=CArnnEdGZ0vqzT9f&CR8R$^(F@ zMSlmuky$jORfyff<_JoDQP5=c72ds zP((!6&nOBQ2-r3;al6ij_-hwi^aKQIf^2Uf(cchd^5PuJ{WbDN)dWYYQ@+bT0$Kpk`eQB*i?~)y zo0%|beRJ+`@lSy+GWh(20^H7nTeV!oA3c$k8QG%Hp9OrLC!3yS{Ur8;&-9WJxf548 z7@gPYMql(N>?OY1Kp)`QvF>{wpbF;-Q<1sX_1shru$teX%34@|j{PDl{A1Z+_L=!- zJ=mAk?;}#Pz1aLVpMS7+@+{YDkX`u<8h>{VoA4Q(MN67}&towICHGIz#;8admm{B9 z-mEwM4hiwa$19gOsTmb||3;fPSrX_-+#R>b<*-u+9t%tYMp8NWf>fb4g`YHS^1 z2GIlj{j=Vd=}a}c*HWgZ^!Ll|%~U@n^*oJIT#0IEaE}7R^ntcyxQ?FqtTjf4{~E%S zz6Gu8>puHniD9Oki%Kez;1I3jD8j<&V1b#AjJ-9|_7^lTwC+;h_L`=NDj$BCIrwuB z^2_!4YUFTj#W*bTTqG!8N#+fTsC^+MG(+yCXnn3ImHb1c)IR>cZp^{tEp9rN^U`?h zi+3!82m`yk!j+ftjaaEEAM}4tYNERVcw{C;J!>ZpS~qb&79SomDL`Uz* zw7S6gGa3WuxBDkWvuH)sTbPwA7{{SC0AJ7d68u18vfftMkRkj!lf{pCl=|2BPT-3y zP4vTH8ZF(Pvb%SpBh>j(OPAjZooBm~RMy<@=UPB54znL^X`~UQ>Ac@n zee=^*3u~CR63YkqD>1=~<}V}h45LQ*NZZ#tOuCKEsI_IYnX#fgv5bmH&NZmz6+Jr6 z1Le(GM723~7vcoqZCt9o-j=Q}@K%m8)dc)CZ#KOOZEe)#2r0?Q=_MzaJpV&7TnR)_ zpFmTHLO?*^5QYq2E0c`mW06^yo{o13Oois!-INJ&W*DCHr2RV$(A8j~r0M@0(4 zc$81ipLH@==OE;mNPB6xKSKQfeyaZOk3ucP2!|Sang4u5sU~{-%Y48~HASH5wE&$- ze40N{xc~K(En9(B;j0QAknfit|L4V^uhawob(jCsKdt}YUDW^ozv#bL>3_1E|ARUG zKb%TC`mTe;X6ArDf%t!(AHS}%Usq`I_y7z5sBB+5KHPh9m<_)|{*&eNk6j2<0Zevk zdKxe>KK7OVWhzll|E=l$5AOBZ`7yWDc)&KiF^?%m?$YA+*^Mkm!cmVz&9$@^L z&3&)HXTk+G5&0N}Fi8L3ej(d5J~|qfkB<+S1$OgV4#36qu8!A19e|s1tPOhK%>a^Y)7@?>Axp`M5gGSiXajiT6eBg3?%8RL{Co-G$Mo2 z2|iD$FcM5+=7*DwEr3xAemPl<6j}keB7h<%y8+~R2!fI!(CpJHe+d(8a^BY{R?4Q2dID@ih;NIz_?zY7B)5OlPhUxB;T|BHLku=K(9(VhTbt$ zn(%G~a7sbJAoNd{yaRMvJDD({i5&Q4Cy0#ydLV{49ghIC74+e**6~t#our#RPX@TJ zYZ4RjbM6;CPQr5^n4516#xwr%O0oeqchfgb;jKxe4GAHI0&KSVzNc+f96 zyZ;f4|Cb%p^Z&iF5`o>#xpf86AEQ*ydPhSF%tY4v1IQ);AIZ<_W;u}b z)cS?SHwc8oWl_6%%W#6%TNJFaV4hA#rRC(H`8ibE|9-wc3+U;@u{ux)IehZ- zX~0Pb!Rl2eWE>pWAU*dN&%yW|u~i4uQJ7v}-B`oGi6q6k%_PvnIsSALPiX;K%V|-q z5r#(x)!u=!00shVi|6`$pTQg?eXq2%QjJzJ!4`%x{D;9F0+4@Dcz7oeltYg=^6VcR zLN9Dws7`m)I!h!o;s z==MwACH;@s31a0QfWsy6^|2oPi zdoC|Gw*?FqO0{k{j(P)O9pBvTKYI7D0c4q=;?5Si_Khvi~QcP5;zXvrTaM}P{gsA^y34jdx-WH zkK64fE7YW+2W0@QtpPg@95~*BjEwAOZ*O$R{QUe8IN(IefH@SD09l}bF~H~I;sR`Q zH0gLoIxh8fusef-gQGg6V`*VXNl9t3k)%@p%l3n|UMuazL_%@@Fj65BumRd;sx8kn z_4J;DVCDzvOPT93;O>F(i0J8M=01E?En;~;_Y&BS)F^N^w9T|}(!@OW@Zd}tQSc8q zDLkmx1?Q<4OyX&PgLt4Pqv(SZZp_9DFx-I3530Ner}khsj5;{3_u_z4X2>}>RIIWM z)&R@!5=5g8AphTj0iJ~QRtb1=WN0smOf+Dth`8~5#&QZA@3P?jTcDyr6`ZBzyg&0O zT%01f7Em0*2|1%X{_ntobT~SBugrjby!5*LjvxO732F50s}O*`1)vahFku4mHWZ=3 z2PbN|nLv*b0$PD`n?*q|o{FoyBM{;Z0Lc8(c@H0C`%|+1OTa!r>zoBBV(GaE>>M20 z0Tl*HkLke$j3m%|Zyg(BUyQFH?*m8dtPZB}MZW{J(nyo1yVRbsGB^bX9v=QV6B8=5 zyf-s5YhtSN01=Q_z)b}?2fSx%YYR5w|9vpW8i0d$0FU=>-Ny$4pahL(6d<`?#3dj= zgan0zjCO;DE8{jPI(nwf>aiu%GgV4TO140o!5tl96;fqNdD4m8HYDK8B4D-1-rnA> zA3jKdvk&U^dt>RQetcjDG1U)<8VrDcyNY(La6K{r4WH5Mec=DhrpmMK1wq~D2bdi> zUiPzztcH}4WhWacHY%fn-zkdQjHm4$`B<}cs>Jd&SM$}a=`&MGTU zgowbtBdx_jE*IjIQ>~M*v58*Xr=wBs_5BR(?nlxwpnMIXM9TwPPMMN3O7fbKu(u*S#xj}PFAefpFY^Mq1V zD=17%Odx(+V`gTiTFJ(Yb?A+!=dCl|_oY89poH~03{!~DND()7@b9TdlCS>!Gk%=p zIHpq%s*?_N41WT6T_({S1H}t%WGei(V(A+=mCmO~f5CyKCH(`mA#%^Zk2bzd>>__Cd(d z8!U$%piJrb45==>GhwqgK?Y7WX{3fu<(RQCD*|g#L=PKYgo22U`;uGD)vHh&JXBDr9BIo_b7;26zFGN6l+Z5tLea#!Ez)g7|yP z$tmj$jVS@ z7CNTTs}?Y8*_K(3<}``O@QUz7>*>RnFXu)tg!7wvLxHFZ8YJC*EaBW-#eE1bNNUqB z-CQ=;0{gBk*azW@i$M>^*ixzce##iOR6pxqMReL^;O1zgT|58IWCxOdQN)E#`@3m3 z(37cUM%5bJmME>%)ywBTZw_-%MPjmJm^@6N4~la^j}k6z5%&CN}; zWFh6AfAjg-DOP}b7+2ItXk&4}{VVQhaS*-|K0{KK|VOwB9;K zxto=}#6>4s@Z%r7U}_X8a{W`MP~`2~s|m!x({6(V zfJRL9SWN5BU#`vR7ScU6;2=wo&xRo^^Ae~~^Kj<8|KPzbOjdQyv{J(n;^Rct-M@mt z)AZAS5h+|}Cn09R(BpCnl7@&Zfr8)=y}AU2<7Rs6X*iDF-neHNisH5xZ*r(3e5rQu zuEfe;Me(O!85_kV40n;cTb=nc18o?{B=&la6`^C3WZrh1=PrgIbPDfmp>Ug?wj&NP z*zLLk2VhTgKHWUFv1-bPD<==u5JlX7f|z!YYSRNLRb((CX+5#3;?T-EPt^|#$${2f z24Vu`zIs!kVIT>RKwUeLEW{CMiVRa0U^8^6=Ag)7cu%a;0AIOhG^Zm96 z0W(jb5+~#;ZkQ*)IpkB^^WXklB+vnMcq=e}0n3(YJ9r#XtdKc-eB9ckC2I%ZgQSDQ z`v#MMwuVX{>G%(D2jg*`wT|ynw(R)vg970lD?e|bkGiKS<|NdA1Qp6=p`~>`)AkSv zM(~-_og8SCE>n1XDkvZ0M8n0q%w*aDz-lp=raB++ClxAh5&{o87>InJrHzT$`tM?HkR$FrReU*Tu zeKrHq!*t(^Hzctp`MBH-VpEo;R#H?fu98@~3!ryle8*4BN#kPTp;cjSRZ_0{da?Vi z{rM50CnVYEF3!&wE*(0)Y+ekoyXa?4yv6*qg={k{C0p^~Ib5yQct$`tC;4Zdf}=g3 zVfna*j?EbIC`r)e#H=q#c2ba4CZYJ{vyLZ;OiWC%X018Wu9=8lw;@)M(~Y|+I=LaN z>4{X=?pop-2KTB$AQ7e!W`*!eSc2x()(;A8yk~O~$Hyu*S3P}O_C%U~UcLF%@ zC<0v3-)^39`wwAG*w)q-Fo3lsh!6?4fV8}I*22^X#p^PPYJkqHneGT(S}z#U=%UKi z4XxY;BN{B+h$Cl~Q(#eJ3+&psTW~MGI>)|!%MhLdqgc!2V9`RV7x?)^Oh{$KLX@3{ z4i?aPQsWn6Dz>0g1g~knw@$K)TiJAKhdpBM4QNA<-}NU$h6(-46616eMfVP-+)|WS z=aY0Pv$C={W|P0~CJ=TtgOeMaxHJx*(e*;{q61=|l;<^_rdqH%<$*CBaz=#Mq*c7? zizF1~C`-P-bmp3c#-wd<(3eRr_VfY=)lHnu;&Br^2_Iy z{t>XXC}pM(0w*2gJgM$4_d2#Y3NY!XSx9`mUbO60son;zN-LSG-?QwMC!1d(&M|NG zt2`4rmmglmwI^C>SIW54EQ%*0yF53?%|+?#5*QtQ4`rhc!lt3b6p_+4B(FGDU&w4@U9s0HxfC0N`~X4d@5Vi z@TSNRd0+)NTy|4{gkdAKlEmV)q)C7seaEYZgRr9X_xINoR6|Bjy%P5k5C=>`!73l> z@=a{y*4^IJEbdgV9M*1Nde z=&7^k?Qs&u-dwSI)+b_ofo zgrIk{mR0vxKRXle`N-E2mRLWK^|)t~(@-W51545)^764RI|xrwnutnER2~D(8Ol^W zY=~UIX*>=so&cbyQKYA6m7p#p+bu=wHf|tfUZgJO-vLfw9R`S^`;+x8N5k{3)WlKZ zdc43gUYZ_mW7ckcZ__`F?*m^k^aX#L#hkDD_;CZ6HT0bhlEELR>9*TX)l(80KFVCV z!cN1bbriengxTItfSUcIuiv-G{YYoQvw~J`tEv6ER zzlxPJ7W$$h8Y_>OHmNL8>R4NkTg^$e;X#k*w!Ff<*K%F3Nm_q-0oJm!+O@*M!n!S4 z_Jp}Yo7fz=99-tQnwlCq374#zkdMNsgo_ZsixSm|W^`ec=Y5F;@HZ)T2#bQGX>MZ^ z)ZC5NBySZJ28L_Muwo&VEV!42e)7gxBl3#*nrNvXRDD@GQE|m8=aDNa7UGOsSX9&o z;uVa5pklPBS9-c2ajFF;xy5Y11==<8(W2^;6EN&uqobokA}iTVsGD^#_XcukCFDMP zZEs(AB@KRrG=ca@WrOhbK*4SVhO(dqq2*~C8rlT2*72cM2GEej$h0?p{xl}}_hJii zEU?|Nf7h;EWU>ht4!>PCqAq!*A%Mz}68NwftWly>k0Km}eFTDJ0DHifDlFzNeNoev zf^2`laWkAGiIZ^F+K1m+2tJ0-9n5@TgFxs6*M;nKUv<7h7W&M@b->i^4jY-IZ&h4259M?n=7$6Kc+;q5U^rv04o+9vxhuj zLRDyG{T?~_)tmurAu&5O?5jw=T{-!_N7O-lLz znnPjnW0EuqNt0S_gtqe&PeXzB&}HYjaJ=gK_dRCW6TytZAvXmlKb~pmxnC~~BJoU6 z3^miKypodDF#n|%P}3P&e^or!PCQDYr!FZeiFs=-Nl%dA?qg+@SrS}aTm&z59LX1D z*)Vvmu-#iJEnM_i-@pu5f_)^%#q`ru{a?`SA;=YS7ZfJL!{)@5X7X#nxfiNr&z{M& zHBDMeF%195OTgc{=ZD^Y)ha+F7=8IXzQ#BaKAbPlCmlCgPH}gaz(%B$vLib>U`?5N?ntEg~o={w!k*rf&H9Zh+|62OeY}>g!ca>g$XTBOQ{Sl#{cwZ z4q2-px}U<*sbOWO#WxT#Xf-HMXWEVIcbNL+i)O2A7Gh}w$hg;f$5^muQ3LTdwUVJi zVpi(4EDtGic%~<&M<@f$3KHxf-MgdUHmUXz&UQ;IFpU;-7|#PWZna=Aj-5l&ALz2P zCd=gqxT2bi-9BNLf{Csc)iy%r0>|iyCY>^_)(H+I@H`DJTPq%geuN&@8y&w%=$Yf^ zwMuf(Ge8n&o1;UvfxpT5EW4zJl@y_eFH=k3rQ3&3^`jTdhwI2)X1mx|x&j8}+u|+a z_4?i=h#ReQ$U2tEaq`bHM>Y$00SughdTM;#>K8E^+rJ3LAz84r+@kZ_4Ujxuu0 z=~afaB0-DTm`8R>ddmB{T3vgg*SFwdTkC{8Z2bNG+lEI^@Wc@O5w$9q2amw~559r{ zd+JI{a!--ci>JHkhimap}k0S54z)o=MV`O6twvwsAVD zq5cHfMmIe#RJfR9M|@QQzS}T}|6PX`j-X(IllE!qz%fxjXXQDSCyAf-z1pDxWJhj2 zQMa6GAIcir2udo*sMh$*cUs|Y7nd%6`n<8rfoVY^63vM^E3iZkdM$H9;R5U*+%JA> za)gjCb2FOvdjt~hQATBbG6+PG08z4Rdp2vDUTR_$C5k0C48D~zD+(bop(#u1g0ioh z1{c4-y-IuK^HcwRtG`9;c@8lmbx{MCz^!fxvOb>K`v9x94-tDcP@##lT*p2Z!4-o9Jt%<@6Y zjh5Q^D~=-kjzzLAa|0T@gep8)TiVFLU>_sniAf)0u!ty{!y{Q|V44frbV&b+wW)H? z!CV&`$E~Em;GP$jv=mjm%CoZ=|9f5LMQuEE9kpVz9K`}EAn7l>;C~H>Pcc|XUiBR^ zu(38y%~e)b9s!1w)vZ_{)2qY*NE*t19=6c3$dM!}ry zgLLR1|1##ukPJAe2K4Lu#v&#JdY!}rD+V~%FCh}`D;cVBypQpzp%Sh+WhJLsw+&BB z1np6#Fo&CHqaG#2Jq8I*k=V{Q_$GCf?DE|uxt(3bEf@8pcouFGz%*0RO?tDcK!gE2 zSwq|QEvbQ5Gp`RxqgPSs@Crvi5NV}J@@L!`{eveu(*P|??`@mW~v1;5+oE0)^I%UEGTZg*OS_ z6GQ$xw#f(tXPvXNvEOrZa&qj&)&6Q8#>KsEU}RX92Q=UT%~pP6qq+(EW^|-Ul||;K zlf3lr?mWU?l{{bVQughI8dny2D7srIen4!0e}P(n3k@(n`H-ywntgS4e3&0D8R9fE6qH zHsFhQn{F>E0wA&N1*~V{Rsy%Oh^%FP0~erWG*%;y ztOChn$m`EZ3ckH*<8hx*_gC&b@E2=;f-68b$Z6%^t#T06#K)OrPCT)SDbxAs5u%GA zM%t)ny_Ih06EP!dj2t@tXO>;%!=t0r1r%--AzT$MUQrQ1eRwlA`I<^ZD`6W}DfBLg@kj7{BUBadOBXcGI* zU~A-PEpn8~8!b($iuaaX>+t4Gp0TG{e+&(ojfV8iJjOC1{v&k3U!b?#A20Wk5eAL@>zze10gGD_iakCmS|eYYtRR&cv+TXbPgRJ9|^88pJz#D z;!dn}8ZBd?$#y{`#=My%STZ{pD$#ukVsiCc=Pn(818Jr}g{n-!DFhUcxVU(-Ysi?K z3&Wg;^Tx~1p!Og5mZn4%k6ej~Ch7tjwgG3IvYwD&YHgD_BIC@Q17K!X$Sx3NM1^rJc)qwy%>lTOp zL!@(o%~v(`%5`axh>E7~KkxyCe_@=G@3#PA(q|4Kq?~zwX96?^(Dr!J1>1mWAlh+V zIuygIL78vh`!p~r4Kx7&6;cR?$zkMXSzTc~4Et``gPo(ZSH8a9pr)qAV{#UBkpauW z_V#whuL!2>B084f9IygOGsv$&Ke(A$Kn=}T)TX-*sq4H3+77*V9DRU5zT4-tH;%ko{fRNtM*B#o2xu7L(7fH8pH4^TG{8IPB-~%-VtD zb>T1n`6ys&k$E)ucvH*#qw~iX!F;DX%_K9fI!LW(sn<7wd#yA{+ZBwiV|b?nUag?I zhQL@5uEaD2$D=g0U)cr9@FcpGh1@t%3Y^|8L;zhpOzB+dPZjuPj9x6lxIb@&Eecea zq5(G#KLj_O>3wPh;2W5KZKAb<-ou?3L1v~VBRDA5E|=$Nq+gcCA;j=0?2X4% zNXje`u*oSKyF7;G6Do6mg}xPT;56fKMZh$iq-oYIy8ZKv?=xrp-VBb>>2La{P!EC9 zqC5BI<~xUmF_}fzXU!7sF|>+Bp=|}2f~1>g{U}w*VDwc7JXB~-)o6#$UmMU8hck_O zFs-E&MoAE`u)ld{u>3TR{5fwsa}z94#UgLx?gZxbg?)?DsqaIof4e1U!NCj?}>fuMC!ZzCDnHJfb?-MOn?-fo3kcD5Ly0X>%uc0Omi$T97xLPu8Ws98OE ztV=H2C<&i|CHt`jG>1aY6Rw+yiQMt0SV4fI!TFdlox+pr=XTdp({gE{FlS-p!3IaK zG(0LbFe1N&HrUA}Ot`gkCe@m6%IQ4-JsntaMnhKQ4pLd7#`uAj0Gey%;u1oXHZFQs z7*KTzd|l!!f>%yL!RS_}|oapY&EPJt; zeu+-eEcIA~;ai>WPyW^zifxh?k8{#tA8EHw($6pX{6-%H(H#IKZ=Dq4hEBDNcWVYt z841>GdI;V`RpMRg=l+8-pdf%<#Z62smt%D*8r7x8Vew2dcw_ z*Rt<38w4%+DF0m-CPHe?9`=-1po0N+W8V~sWyyi2&z?O?-pZGNne6Z=(Et$pB5Pls zX024b#Qxk;?j<`b<&l=?Cq}ZXGM(r6#pee24qvUO`dtR$PB)I6Yp~q43vgzuq#-`^ zZrNi`oO89`)6$@z&oyujSjkwCGrDfwqGR#}_0;hyttFuF<+(pFta49;Xw18N z@PI|JUIS_-sjL)3Z9MQJgwp^$W`V2t&OCki$*l0!?kF(I>W710f)iPupyi0h_t>Ew z!%`2@&G>VsZCO4wv%5EaOb+xaZ}ux%>{8~@Na#CV`>bPHjq-Em2TzBOMmzX&?@cp%^!G-a&XIazef ze4W<%H=L$cI%+WfS77@&qc^(6Kls$+fT&VC`gLPTW6EFvHt*c&jgEEGORscv zr0>%4EC*!&G%lHw0L`? zgK-J{s|?kj&%yemSiOatx*y~ddi8|QLid!>_K-?O_Nww42qGH2Op%;XZ1TuU$v8L& zyDUto@!W-cOFdPeMk`To$sWzT5-?eeNr1$05Mu^3RJ`V{dQ85!52Q7OP(flYP zj?A@>ecSwJ%+^$Q*=r(b*_r5r(k{5vjJ zKU@ah?iN6g9&}bj<&`M|{!teMtaRXJ;_w+|=Nde=%=xNuG~BM@&4}s)CT3>cwtSD|vMZP;VhKKr5ddz52*MPvBQD*Bz?~wXR z@>wiaGrcjD&vTmJeJE!5kx_U5-{(6rGn^5}jL{ZJ`}bBo_e{!WK^w;6e=r^ny>O46 zho`1*tq*BQ?K?W>V^>gil+?bDjmc?)j~_IMQ;w1N-Iu0y_hy@rZX2aH`a zzT?EAftKuDIQOXpbe_E8T|)x{9RN!_X!Nkpz0umE?L4ua)X)$?KRgpJtxKDYkdFu` zql>^>E?Gy>obCA4_~M0>u(=%3e!GL^ z8|g(^MO4XvG9)+;Q86(4eaqKHy0Yx*3~5cY%|G)@rJT7LEu==-Y6}q9`IEBR>=dS5rW-+p9{!3_T&vvTB#X|75D5PN!Rx>937vb$nQ>3H)bJ(eA zIx9cmBY-tnRMPt+!O`|tbJ*2(Ys3a33H8zmjGmZJz&v9>zc5rHhAiy82)IvGn0cl3 zw@qdcmYMOZO|-#0?OG!Noxbu*kr~RXHfX#%7Ey0nooc(b`A&=1Op0y0=P->}cJkH~ z7OoGUQqs?ldindR4F^{oW&7;HH<$A*)%M1(C>ihRjoK?;N>000zY~xe3`nyuETNYFX1A(6 z^ZxWV6Q`L$#mJ1AZHm6hLqoonO6E=zDwRtLi&g~zU;iFMBS<6Q{{6k4qt=V__KnHo z(8wGIomXU@^9;TP1Gn;l;fXwjC-Mn&UKi8Nk5R2?(x%yEX=xd-5GM*XNC=lUO@Znk zua?(w*#Y!u0}bg9h+uGd;pT{CgN0gXPN0X0`%fM{`hmh(w}AsQk6R_(Y~jnnNGlhFfDhgpmb z-p}!+err=9v8Nc7?yx@r}#jq8f3U zok@QCwn)P)+k?SndKsFYhwUpFMD42E%SGy=TM=tZnOik;UH1IJMx9brd^erDqR=9C zO!O1wbCqZN=OjH*>}+qczmk*q;s$?=i~IHpBi(_AH|PY7_ev|M-(5Qty!;nK;f?d| zIlV@S6PfPmXUaFM=y{n}Y0}l5yi@vYFoJn~^=jS84^$ zi$2?$+`ja&_k~{X{|HKp7#I3Ia>UODtc*&0TY;NxhTqLdj%VnW4_NYnJtK0M0ur}2 z5+5&~yTC{va7+*Ni}OiwWnRyrfINUY8e)>Gs+kd=9D%n(E@fLnu&=daTw$DY>P5Y z&IO|<)`tGimIC`(NCXUCgd8zxsMfbhYJZJjNO*ZfCekbh6BDFh{4jqjJ^i%@<78+6 z2vm!@Lhg*nq|dBSC+bUsz_)1bGetNJb3nJnT{za84~l6R9O@6eb%+8rsjk2D>f*TU^>8>XB&0l49>;Yo*+S>f zqM8h>2n3?v%Tts5X0xQEqy_jL9;)ltugA-Vp^PEWHX!s9E`AH4s}8AxStDI%9rPCG z(Jr82sqajKT%rRF<*fxXacBf2Z&+PO%KdSuG_`lzIahvfWYq11Eqo#GPF0#+pvLePL4uw<8DX`=-s zSwW0yG#1uS;QZ7Zpue~ESKX(T{TE!hxc#kO?$a1aI5g}yw;cL(w)-XQeRKJc_4)e6 z_LQNiONMAL*Vm1uft*av;{6?%Z2-deAi0Ut{5QobqI?$y%?b!iRF{ z+txo+k;^)?^lj^BzMUoYS$`iktvkA<%Lkk;0C?$3E_<+TNlr7Cja;D+KAR=)S8(ZT z$<7?O1tXq?+W@vLFTXCMKz=F4O#~tT5K&WFTV=&jw6RFfj)w{kfEY>Bc-!f@4^Nv| zKjDs(5vFGj8-ejV+|Y+7teq z5X9+c0OH)&e_c#rx&Q13a1CUb($ojhmB4wv`XOS;(mH$Qq-J)Hf}bNbUIFno#^3-B zP&-&YcYI^jlQ9UK4nMyVti5h@9ui$%Vp)JdL{6fWu1F2D#95{DqZM4bf=XBx4-sV0 z!!J{ffnIDMG~-ckVgjow>JEMg6}v{2z@>3JDtEyvbR|<{-x0|OX5H;GvQ7T++I237~g#< zAMb1vsawyA5jCQZv5CXcctmD<=k5cBt;Do5N~Y8dr*<;G7&7_x?obt03OS}4aW_YPOWFvNKTvCkv)M_ln{gs^ zHvlCf2AO}w(Rm97DQNUHp3?xUN-88D9prc!7lSCY-w^^j(0WVu60?K>>7lt7GpGx9 z9uhlCMn1-CP6mW3;y`ZVE1a5oI7xOtI%cGP<oTo2ez8uPD1e+gC@QBO*5boy>@LXmCWri=>^Io(QNtZ}u z!H0FvT;I}^#g^ifNkO~ehc$xark6c_VyPTj zlEWr7v748fx%g*f<)1?)=Uv#J|FN>6M1(yb<}`|MBFd%7o^zNnG%k?#fx)8vuqBZR|agP?0Azk8dVomy4=lsV)$K6#}1Q2nx~gh z(2JER9)Es%U02Z@Z(06-R{POKoHzev>g4ba-%iG?XiHgpe6s0UIKJa!&$*qUB zV!B1aw3-jbG`JwN(6OxTgN#8E3!upS2GP0W^h0^YN)GLu-DtRy3Bel1qZnpGjlB%g zE)?*mnke$1LjO(8Aw~KqxY^d4;q1((8fmYg-~bL{CkMU@S#_6ks>iCZTMRVtkaY{` z2OZ(;Y>%P%6EtWkn zl?uI@uZYYBQS*U_i$D7MuB2M$VqZM*LbbhVvG(t(H z%tWG(?A*}a(SZYo-M|}mpuHO~nO~o5AQ!mR4_NR7!2D)vYTa@A9j7oh1tx@y5#g}e zJpJtw58fN9Pd>qeE!QeMZBB>cGK+>w*0;1}wrMMk2~C z$di&C;x;!e>{TmUPLZ=PsZn<8`KiapQP|W_GZPnLay}(b6i6&+M>a#JAGokvL0+Cr z@e#B6cBC&|K0i)E?j$#W@YMjK}P=P0OLd- z&l0xjt7jq z4sa-1`(@cfeM##h4x$CGQ288v+t;|n%w)i_udMXc_Ey*R|G}!uD2QoV_g)?&Cdd&R<)9sH+{!ALEqOxsxe1(3WR=q+73>=eG)+8op~ zm`mIT>Mj^fIhUXDf^O~LsT!wonaoYiw|8x4&oILU5de?uIq zMjGIFJv0LLin`E#?jXX48L795@X#S^e9j<^K%E6Hhc0^8+DEdSwiJO6^q^Dh`)sM`<_|5a}?2TADB4~uMdTx<`@el^()xVJRBEKL~JG|9B2x1 z1@p8tGyRu2IJ`c!Ff!Q#06mZFDw)csk zRrlQn{S#9c&@sfs2#dXvyjV{2+}(@CF}pLSwl!(@4mG`%n+)l%R?OOjL88uEds0g- zW9k>2=0=Tg1M0qrL}W5tdaIUY46S9=tG&g$EE_|FVM-v4Z0;W(?u|%LDz2?OrBgPy zj6z~zd@GvQ%$@*vFK(l&5Ahxi;%ngU0yNr=gNioqEZs6Rl5vy57(9DnA?PADab?Bl2unLWANP z2Y!)r-Cmqzb|r@9i&ZJeR3`wvfH91O-hkpgo|H>D!R*)O_H&8GxV#(dMKuENTlwUH z5HQqDX=Z+4spB9{w}aFK)EIG)XeT0tX00cG$PYy@QXKDB=KQ*QYUU$?5II)7vG1HG zR_84gWaZIvjt&lS`egyAbsvZm?Px|g(Gh$b+UT<>r>@$%I!aZOfd@QVzadD`r%`q2zh)^Z1gNVRpIUuZOnY;+kJBq5W)iE2VO^ ztoT&T(fyNEk@tSAWXW2Z5csJx?)+JKx3)OFMD*(CbCrdauN&nm)yHT5>OE-bvazud zO|QGVYX7eTPreT?DopshE)+?@js9-1sr~SeW%~~tAcuFx zbE!!IQ*u1u(#XUyv>2=@!WVQxlOyJpT zJ3~f63`nC$`D4P5Ob3!_6o^T=p=t!-`>k`##@f0NdDwvElr=ciB*NAE4 zfB&Vbmh6&~=UQhNT-y>hURm95@N%91+Z}iBwKK2#=G88I+<-#C?T9ZewdNi!CS9iE z2facJ&#bt>qs9HMw#LIPB$Q_DxivI<*aOn{mU-VZi1=B1De2zlJ{g1g$J29q*NhaI z(`4OFv{aSo(ffs2eX5Ma|4q$_^~?#y3?*=i1iw#_m=tjH;e7wJj$_M-U$ln`FcR>6 z|t{)vO!dMAhCdzi%V&ZXrH`mnwg@=;Nf2&JUG)N{GEl=HJYxx9Sw|` ze^ggB^y$O3_3{z?z9=ulSgv21U;K5i%e`%S=vMy&Q!{=Ys;;@@F7;^VY(4YV-j45) zg&`*MU)~t^4O=ZT#LQ2R*0t~BB0toN*Rn>W$2#5H(>v~UfXf(~5O@4? zPHiGxFP%?w18u`D)fYh+nzdM%_!?m}aSFrMZ{NPXXk8=Kt-kt%T7h7r$-4)hU;AFg z`-z92q_@9yo53Yi&rhsgG*d}YsB0U&<3N(Q*%Qf+=opGnx#LQ#zb3~rt+>A}hZ}#}`~KG*{wL{sCk;Js3~2PcpZ;oA5Mn0wp?S&au7O<0 zNZ|^DO6Q0EyZoXJozqOK5_j68yB@h0i<7lE9}ScUSXIfH4iCB-bRVLvF&)@4{OaY7`P)`> z15RO%hOOmdew^!Fr?_a(PIhF7rat%J+K}0vIP%rprhAKj*MZh->m7azJs%$&TH5tX zF26C7dc1GyuW80Ls~M+%PGp8o{#+e7+fm+d|7t|JnZYY(ey-qMzxHPTpr_ARDQ?$D zTXrGU>j>9}XA+BN7>8G1667n7mwfMB>K!9mxw^YbN02Yt`IhXdk%f>!SeL7 z(-)@`-`~2e!8{_vLC0odS990SyjF?6+g*-6Hi1=qxOq74l;Jm>!P(*2Tkc8BJl_(j zXqyYfXbZJBO0z`U?f>%S3z<|EJ^1YB{e2L$b-1iu_465qWQeSmOf|jE=nmJ?KAby{ zT!Hb;6%zApo0Mnwv@R`9q1UznsypX5!WZs1O<^wJW8l`^&f_(rlMQ-o*O^a>+=b>; zYmDR0N{>g>sa6O>W0*E4#!=(f8SFd#$$HCfp@-0}3p3jK0Pm3&#XGtiqlD)MQplU5 z?(Vl?Swab;r{ZED)ZCF!d`k>rm zoq3xFZ8CD#Ma~fW`eol+Y|bs9zDQpF8s*>2PKa@CI{fw{$q@!PH`YGA)-$L`3$- z$#okIH7=oQzv$5Wc>dS*+DB zabi7tsL+v!@dAmCg*5);kyleQ1H;zRN;%Qqb8lEOOOiBB;>?oe?5}h04DDIXke$La zkBiLT5~iH)vM90 zWJO6?$G|)m`%%{2mOsd^tA4O%N?QEj*cSflYgpMYHN~IE32c~sm0g%+(i5|#u1MG; zmxXV4`uMEo-E7lCfdL#}8El7IK7vE244fL7+$rJ;OQC ziO$~u%y9+i+Jj%(TkEtqQSeh=` zD9PSn!Vj*X2wazW#znCl5wUISY8o0!aehjEgjr76Ne1T!+$#A9zhz!5(WSWy-*QLX z8Rp$kZBD+gD|?xp9p&k{s;|G_9%F|ybuo)49334^7L!WoD-gvMuU+E=**R3>YRyfT zHZ=rud_~vW6%M2<%&?nS`H+YjAAv=KB+4sNe|F6@jS=c z3P!7kQ)wtjMk@KLhHK@xn0EUiA2GsMrX79@hFImR8TSt0dRCLW`s~#!0|4BleaAM6 z3~-ZcGYHX?hD_v5Mbs}RB)inqR5GB&Z_$CAvO({Me$L?a8p&(xh2uceiD|y3xe6)l}Lg?$QH(qQ`A)jtmYCM%p%A zZg!sfE*SO6-ND2KT5OZ%bbqu}s+*fD@ziQ{O2dEhgaic@;lczVj!tYeS#6WOIum9h zRx@28EK~~wG-hdWk*|@T>}>Jy-ol4kPc$I7(4y>|%3X5KA1ByiG9_lBLR{?pqEF6g z$M;i%y_1u+E%rToT;{&D*5cujOwBE-2D*eS7U&{%P~~@ac8;ezoA3Q5fCn!z9kVpU zu-Kow=!57j)+P{XSh(bx1Zwmyne52Rl4c2;D~<-IEzNY<3;qqpze zLXUKE>+;el*HS*XX~#VrJe#S4`Sq2Rx2kE`Gc64*E#x+&nmPAth$3Yy1jiaA&eQ(= z-S+R-#YS=111( z$iO8fZKp&oNKnl6 z-aV|?ytqtkNi{I~<#mDn&o%bVDfWC3YM;7Q>MgHkzn%S9kcu#0)7Sd z)B|pX*{@LEVEjMIo;O6(b_br>5hX&y{Q1ZK&H}yD%AS#UhM09+-^(@M+}2jg0WKM* z&HglQ)pARA^M+qL+H{}qsaJg^Lw))WsG@w)U#H2_>I!r9=xW>0;O3g9I+E1)R;y#2Z)B=t?MNckt{lJ6}+1rsX(zWN|bkmr=f=v^1~Rnpyt8 zY(D)@tlm4l#*_9O@3+kltXd}g!^8G0WTf=4X%QuM)uXlwaz5!G`!pNVtxACjyp6_>x?Y=(pw?`tP(aDNsl}l z-fWS;AX;RYbtTVfLRCZ$Q2*h^cRLPVF@Bz&p0_Ty&eo@e!X^f|R5p}z zfrXB0wB3g*oW-h)Da0HMN(9T_a%(K$56jky_icW_r@QJ%1cT1miURpvBEd;DC8pfp zHgn&)y!h&y(O7$mCq>lHo-FID^L)jM9c`5-8%!>8z8{bboc>XBImzJ3+fUs^3^7+O zKHt@2QPoWw=4+o#RgkG^u4G5|_Lj}giqw!N(l-st(`zyFchn)O1t$PTz2JuqOh zIDA{8x~XZWBS4$$UJ;RRSx?6gy!cXX<}wy>YbhINAA!aDlJ)d2LIZz?QZ-`2?Cad* zAfHmmF%z_rFj!SCnxQ?9$JBXaH9fad_Y;W)Hi7Tff%eJ2f3fNQs#&i8tnSVT;<>OF z_yX{U{%PgO|Jsm${Bu+x{>Op87Tw=}%X;1Xy>kEaPWTri(>ihLzhCm#I(_iJe@XTK z@vHdZQ5CVm1iT}TuDrkF%70xdg3Og|POy6a#~-#c!+%da{QdGrCqDf9LIS8M|CcvC z_5a`WA9r;B{{MeZ|6f^Bo;U`izaJg;uqT^jTmg(pe~FyjZeQ+|-bWic~PA3eH-#@X`wVPUdB zge;NfH|>)a3_<5(qW>l#_$WxtDmt9EqPu9-HiLgfCjSM4YO2ghI9OQV5LjesacJoiNYP~tS@tRRPzwG#swC%9h3kBUr>Jff)Ig4>m-|uN6CITS z(9deP3y$IbG0?*+K$SE80p%$@RIw#O+I+-}1d_{IG?EjK0y&0Iw$Md_%REB`CA z{(cLg&6sDd?Yg>!Z%ka*<-6Jn;c1zF2GD%Gg!n^N21+>?z(-C6APrMyp_fwvcS9^QXD0c^r}|7jC<;pv}nRP3za$~~;~>j%zK?s8r1 zam9Q8^J5=vT4gg_1MFUm@%pNap6G8~dil53UNrwe*`4;^N%nVmmSOlK9jves&BFv0 zDa4kUqwKc)W||g8D|Ct8;pe_+m|8*EQ=x77NvJ23b_e(0b=*HbT>pOU{j0UDv&l-% z;9UX!iP^!2sF{D>n$FYruiU>QV>Ej;X6N{alPJbjtopQ?h)>TMKlUo0wg!&CF0k7vuwstuX%jH>9z9zCG?{Nvz19{U#m z8z!fX#9_Zd%6tRl>9uu^1-eP*fT5Pn7A0XuOsBj5m1O-3D^HTgaP;iIQ(UQlal?^+ ze##ahyHz&b@wlzYCXKvsFpS{N7O~BY!W4Em%XQhXe`oBRV_|+;c5Rg8>_uqgUTKMu zEr5lhv-le=|Bp}T^*FZ2$@*7Dk8aZW%D~JZfBz4%EK&N=9RIp`+1yWWYl_@IJP`hO zO8)a7|F`e`zl4STY&)tyoJ0YsKQKcWtXuZ0n~23_m7j3m(_8lc`q0~P*iY8zE0#nP z1%v?Pl{$+zM!Q0^QEJgSMumliaci%1dZ4d=-~Y_bD$%QMBV4wD{{F`nYx(OUKaA}< zaiTMVCGkC-k@TuhB2K#_X~1FcUSlek-*ey>Q}@`G9WPUf%QpG5(ZY4{e#9rc+x|wW z!+o^fVJjMqICe~JJm5E$(&;=V+WTrB^aryJ*oED_D~jO+HoY+9x+!mmQRFcME6mko z{TZxO)YR&ln%0pHP6p_Rk3Jh%(K9xLv3+-$Sa(Fk4Wq~QS7>=FhO-CueHUH0-yY9c zAi2lhPUE0On9(LW7aGk_xi6pezr9*=kxXo?s9bvba9`+vqwu=H;iUN_DGvKarMgszP0!GaSDyF2TqOQqL z)y`hhp5*uu?{Ml3dxGKW>c*V+Ik)ca%G8WlX|MF|tk4J1BVme*EO`!b?mmlp8`Crg z_lwOiNIm^hGWx_t%6Dkb1@U#o2~XbVmP`!s{~Y9LeykMn)Wv*Dh4z()OrwE^_*Y+3 zVp%#D^G!?C&C;+*UW?ujR806LI(i^)(pN5@F78@_dw`9# zi^--wkByJqPY@pKt>x1Kp+ zyQw_v^b5Z(B@W%Pq4|}%MM0Dr#X`;VrIy~GWG=c?|9pNosUgv*r!8ZQX5FX6@SgHZ zTi5;2&b}*Eq5b3g(uTsO&`0V?Q5IVSDsy~~s~HW(x%WnBGo&7}=(x~u(>dC2cGr6j zrn$Y%h8oG$a+=CMm*Ox>f^+;PyDM``GI)wh?c>u)qBEI;b#HY?&H zV__;-{o?xEiTepmd>OVnD~?r9{t3{H7u|TaPHx`oJM&GkCp_@udd88K)pvKL-R9re zb7M-eV|44@@qP)*Cf~O8+{$W^Me%`^W;>sX>y@SR|8gGU{?$D2!*eLCNACK6K>N?U zPfjbj&00h`f`^&fNF^rnE8RWf+WTHNvGR%vy&GgRGeT2+F#)K=D#qio3hJYjG_e z+)HtXV8PujxH|+38g9;a&b>4DpX|x(OlGq8`>w3@tlxU}tiQ2-CltW~d%C_i`0B}G zW2$>vT)OrJ?Tz0~iCrG*0u;Rr?I$UnEpum|RG%{@TA(b=$vNvVsdsDVIVw;ztZn`> z3w1xuXE=xJwfg6v`y&D`hnu}-f!;rL7`_`%B8g3K=-DkZ!8UgCdu(sLR5` z8|EEFM`I+LpNg~t!rr+Gn~P|?DcdAT3WqZh1E}2uB-yO}u5O}hg{E?RTGk9t9q1E1 ztsWci6U3u`@9wz&tZGCp&D`zf;P}Qz&rI@ha=B|>jI_izI0Uht+lvv%9raY=jf+Kv zG~NH!$-b(mY3!ZH=Vqn%i|VqW1AWk{GP!A1l0_6h-*@?@Hy?#(-j<1K*ftvAMzNwd z*#cw2x~TKPoq_YgLgkO$b^7BRyW(hekS*f~T84FpFBHX&cxYrc_^;Zz$d7?*D zURjtg*^6>bC>^aXvw;@yS!*~?LoT=##uF3;U-26<%=D+PQH4jpqVzGTTe|Q9W_jd) za2=9xfU9*^fApW*39&Q`Tbn@b^JiyAsk1({fdhn(7pNVa z6Nk716<%7}+GW7vmqsmyjgq}JpDZlGzKNUmI@V@83Qu&*Zb>=h(iX_4c9Fw&3!PYZ z^Xt&ieCtai_qrw#U;I$=unaECN~+HM?FXNg<=}O^(vlt7H45Ec>{-?V(c4d?6TcI- zqvGz3u>~`btmT_4Teta&V5E?zGJvAx}cBOJ;#y@^rsvW=oW3I&8(f&B8pV zB~qa#173^kC&o8hoR^#kh<5~g&Vl9*t{I)$+7e2}q&d$ZVfxV&ZE0g`rWj}oiUD2X z&NopvH;)!889wsPQ>E`Z^CR2FadFcl@@;4VZ@)TkD_SKY!y1}4#@xTv7LQOvGe6W} znPptIB0hst{rR6%Iy)~@4c~O-hCMGay+3h2(A;oOT$zN3GR4+G8cyjiORDM zmXt3E8zYb$=B@eItsc>0BM6yRW$fS3ft2WPX9S8*WKTb6`RK{hnZ>($B6sNhnEHSS zwkmC2i7@tYr04tcZ;CLOcgkOR5h5j{mHnu_uu$>p&BYU2Pl1&Z@Td)9eNT_QpL!1H zi-n#VKB{DRV(W_5)=Xn-`5_xE8`F=a%JUyG&FJm3QyVjp|PT`Lrv z9A7RH>>>%PiXo?i-c65|I;zpshGkxQ-qePY(1&`=Zz5rB(>+O7$2e!@Zle`Np59D< zwXGN_Y}4LX>~-LW*`LkaI;&dhiEd5}23fQq7_b;^HarvlY7x_{IOM1EF4fr|AY57x zB2{RC8E!kWp{kb;DF12=#ivPLtl0)BOuYeNTtTK{Qs_s@ILJKaBhWAIwJmQJ)b_Z; zTzJ0f%^7%*@A=<(eQG48Gr_Cg=<9b_U0Iet2FFzB=_rmnhM9IqF!Q2HN9pG>CpnN` z`-GFIpd|SVk__PQGW)j>`xjDN@VVQ7>;Gz|8g*TC=RM>yNgVBYlyW5p^Zw+}n-Q({ z@q0xL8@dxs2>f$;D7=#W_9!t++r|31-2tVVMZ{j=|E?#B~S@ zH87l*Ny~F!u{)h7eAt>yV>+PV1W(w_f2VT2dUF1+t}NK^JE+zblDf2@tHez#Z{Qm9 zVdh=oM0<{aaSyqQ`cDCLGopoNxLj+=9lT+Wl{cvF@ytE)x5 zZ7a?UZ5%>|5tS_rFQeDlt{BrWw8RBL$Mk7h@w8dT5wc1fRj9`*XJZ^Ip)CE+WP_jf%UYH+W&(_B)32hOqF=mb)JD7(A>gJ{LLHH~ z>HfRpEKj~9fSHaFdv$!azVvCk!=M%z7k`Lraq!i`wlRHmB#D%7-!G%H9C8s+o#b`> zGs#8hTs zJzXq`?r->GnR`QW>-#_L3UOV=D(EoM}8_ zvs6D(#zyx8D{d$DG7ABO0^j$mV#En$7>1=5rn5$ynYrAzuuPC}T{gD}ial(8AkpJ7 zbFWS~RvzU_;_|cVvG$iz>+w%@9wV#8wNH#28dM}ZKdfVe(zPqkO1se^I5B)0QXb94 z{hiXSnck0<^FG@5tG>T+`8w&;W$XtB6N{b-QK6L}TNesbJxZ8s1o=MWQthucOu)B8 zt`R|#?6j03x&?lqRgE`P30!my0E(fifN zQ-tQ~IbV-cp6Tt9>z>?GgKJg6_mpZd0*D5AB<{H+d^NHMe^yfoGg`U*WF7PJ98%UC zgZvo5N=I533n8&54hq0Mp0XMC-|Sw?Od3#mp8f7MD6&3zJM6F_EJmne$|uE+#ojGm z7&9wpmYm&qxFuQt7p-X_|8zTglzD|qzU+91Uupi#mERfiJtMA$e|_b`i8joMQ}x>8 zL__qto&|f8x2Cf(s3(HlUuCS3h6!4hli`i-NDRK1k-vCIZ2Iv^Qs@X|8r`zm*-E)w zg|>tkXoJcK-Ni-#ETF0aMc5+G=o19DX6VZINgAci2+oM{XV2Mfff}n@`9?zG==j^) z;HoR%_7nGJhW4gx zar#qiQQQ0n_;w3BcZUCBVSq>F5)f3gNVYR}wdLKGJzcYRzgoRK)Z+H)PZRg$A13eV zZ~qDz2~rcPifs=L2+JwH<86COs`^i%MJlIASleNL#-5ql?jVlm?3c|1dOTxj)+MLz zLU)kYV$^Zo^iM=*`@=$FJFA^x;g0C&HAhDmPfV$_ZR{O!N@8gK{b}Bb$nvLq`{wPS z!R;qz&vjuoY5&!+D|Z+h?)a|?caznU(Yt_#b9XmGDx-7r_SU;NMUHl8ea*`Vkj-~( z%zs`PT#nuB7L+psZ-MEG;TAVmmv{Ot_~mX;ADZWEWV&WgS(X%2B{k7o)o>v8@$5AzLr$7B>`Cw`@aqW|_Dl7vb4B_Vg;t6^ zs^-dM^a1ZujYE2m&nC}*KP-~~ll8n_g-#GV`H=~HynXCpu6Y@D+j@h==Mr*%=8)*^ zjo)GWieL1h<=;9?8kU&7i*ray_#2#Armrf~O$=;lKhNzlezT(K&v!L1?H=%sMTI$^ zm6r>!)nfvZmagZcG4ZY*W%$UJp@|}Py{9^c^DpKU!4ltKIyy5+FLv$wj_%otvtUE+w zEB!md#muy=<6P3|LQ7ZVV=3<}M`a5i9f}VIucd$@@Z(b_FSi}f#A%y~Z?YoiiTmkh z8!?q=i0qt~f4j2i)Z71xyOss^??&xdbuusZ*4HAuHtQVcq<}XK8^F=`Wnd&oEB7)% zT8_$m^YqT?uQZj5v1idrO`dujyKs$wjYh#?wzhz8`sb~PY9ek=N)aHyZkFv@99K2{ zYu_`PiwKCTQ=mI;xPP6wkm9bHATb)gL;#>BGB*aZibx*c{8dt{{B(_w^LtD=&Ofh7 zM;v2@id<^2Uu64Z-hI4Cpj_Yj0m!Mw;_miJ5g~kQPM@__b2{~hmt5T2WF7D$)E>N# z5k-Oh+S0OOlgD{!fuaz0OLMx0`!@v9&nGI%5des5YO-CTIL>FmzECCa+pZl1t|~}b zBiJO|JeFLXLwM;LStY^19U*5%-_}50V^TkDa(oKHvwKYPTC4_PK%B7;$B*M@A5rES zU+ua)Y_WEc&ko)qMGG=PYzWog96+UDP6^VRN zC`jFc5PoZ2tG>w$6>}rNT@_QV=hzy&%Xge|S{j+}%pBewVax5kyI-}m-{6rF z0z376cM>ftF$&|*=#@Jo=c&KmGDgjxbYr_WZ@XZw0KKUCw8_MQE=TGd*3J}!8@+sD ziOFx9!@}l0(oO3^-W1mAw&Gl?3bIB$-ScptP&JCCd_{P9AZc{_2EDQBj6V7SE-Lu? zCv}D6{(JiaVHgD^YtoyBsS6j5@u%3+%Z-O`S;m)){*{3QQcHwy?;ybH2J?Nx_lJBH z{p-yc!=yT!Heo=Hyu|rEuatGofNi2H#s5Mee-wC_CLpeJ#}o7hM<(xqO#|JbUscEO zEc`_Nkz#)BvNO6(c(8oW7enZt-G-jBzxvkK#%oKhI1Ai!3Tqmx`aQk8y_m-3d&YNFYs()62{9yCDaoAV)+l-j`x0d z&o`sQn6Hl+Dx=q(DG7Sq2l|pL&;oeEYxgs^&uy<~T8mO@uW*E(p0M}7>5z5_Rz@$g z{Nc&2{!rz%`Y=Ur`>I;Z`x5f}>+S~(lm6L;uMKF{FLQgoi_5mp*a9nKP*$77+Tiy;V zl-0ES-lM;)Mudg#&R#!-Q#c$hu$DL(xjZ75Wu{BZq~Ml-0w23A#8WY$Ahr-_g7Ry1 z=6pf*$HMJ&Lo&r}E>VTj^stN|ihu-S8kc7~|8Uj^L!+SM^Wq+&R3pZZ_F0<^a#C|a z%J8U2|La_g35PRT!))jMInJtnXHDBVCi-g(AbB5=wIk=cgjm{+k`w zUxd{LN?*EvD{>1d?qKb4*hN}zkC8s&|7ds0`7>isW?>fjj}wO`>s!KpAwAnlPOCp6 z#V3p)A`-?pNu*_M@#q{bI6Nor~I`Aiks+hARfn@2Ul5l5`+9g3B7$L@;4#RIY0)c0trOX0FhT!W>`KC9uZQvF& z)z{#XQe#wu@8Io)ttI~Sav3q0j2dYO6;Iqvv2A>7ucA3Sw-1GByt0gB;A)T19>Q;$ zTNK;{*hL<>!UyL@(U->re`^&rquj$uluvM(!>1@m+{G0fTwn1g|Kdr9fVd8P0&#@8 z&H-TO!Ls~JGKaH9N0J5ry}qoFlMtZ&%&XUW%-H8`1^Zvmp#!(id#8RR`pokEaU;9#?9?o!|J+e`pJiw zkVL2{*cDHuEVoPq5Y#976vo^BGKbv!ce2riE~=4e4YxG!X^pO{J1cd2z{=Q}3D!TC ztnX28xJmKLbbE-3dPZVC*~luNSZ^9$yh7fh>yXF&?k=u-$hF^>_M>PmN(Q`~6-`y2 zKNLKEcx7eRBGf?{%h2;ELmgw^-j$_@Amp4KmkM1YCP%sk?Ou8^d+>O+=Z|qaG35drnzb#x3+E^iOUN+vZ z?%jztuBreRq-3T~oPjHB1zvM*9NsZtwm)5*_B^lyFOu<=rN%e*J<>~b6q#yL2uz+T zL`RV_Vt?%ywt=ngO=$nte!s}xt;1MT2@y`VTYe0UCrDL1E8VqT2>hf37pNli+(OQR z$XBjMMZ;PuDxM00ZNX$VOM3?3@RC|5HxeHhz8K)q@Py>|ZV)v;w!vv?yWttw3Ko=F z5IosVXq0q_l{0y)UyD|)%M;CoYks4*{o)9l^G0(Z+JD}6U%E}LTg1_Yc4m$D4=^tV z{B=LuD(U13m-FpUJZ$rX85@kMMsz;1mDKrlZ`MCE4}O|yLqOztb+(FVqN({}xaDrA zDHe@3kh#v`z;O5F;m3%eSj*f9gn!$;y+4{c|FZj|lQQ>M%_3t5Eq1`y?fKsEKc`WQ zRhWoa!+t=r2_iLf2*;Ag8S^hf2g)DLP+VK~{511+irN@*Dogd+ef4j`h!#G-OvCbI zT4(S0cVhw?el73s)A-R5Oke#e1%NeS83=Q`Kk5^d9bE@*nqt$Xb}L+R1H)6?0V0gV z3+-CNbl{o{*tg?ea5F7#ZeUmw?mi}iCGTanN7923X0RnP5+ruK-ey>DP5g)5PYFR5 zX76E#d!Z4~Tz~X?AuYK(b8i4XT}#)I5o<>c>xbMP*B3IUQ3)|s@1s(^_f;M^bzH7_ zBI2tRQIHD7J30bb@1$X;n4hsq2Vc+1COP59$?a&GN9};49MZ5Ys?6!F*iK}!mAfdT zTIzx1T@37zbMA0$*N-fK_LdRP_R*NXbb3TzJ7uAva$$FM@qC?$?FSb|^sSoJ z3>@N?xwc5!9OCb5?K@kpj9uT=EVX_j zeasK_@@Jj!5ZUSFq#;)xA0D&=XqCBS}D)ro+F9hieXGub4dNT?^W^n$BD0UYK@5eZSwE;#-F$Cxs z9?f6u2OqszNwDRQiVm=}b!w#`GD@$+@oiuRoNL3;<`QH_vBS=?)dfdtck~8GNXRf#{`N zsGBZKfP7RYMyF*(J}!;wb@p)KfUii}Ld2Z4`^$^1j5<>~+)LyWv0wO&i&uXHFj+Rv z;Xy}48+3y%&KqXeHon}cRNyjs8igUhQ3XyrwDrgSwRwZ6(>uEu!~0%#$x(Ons1I4| zzuXIHm%V?29|!%N7J*5Uz`_1@oTnX2*QO4~UnyDjeOP;v5ak+b30a@>MBGl6G3F+t zrL5z)F0UB?(FWsEe~NTcg+Ik|8=<4eyxsA=hx<+HYyK=vXKd6CqtoO5sF_EPZFl`w z;T*e0GrBoI0+YiD*!X@|`im#d#CRGl!BcXz$h^`e|nAnF86RXTi5RPFNV8^ zdFqOy+w{b1Z#xMi_4HD5S7!UO2mc!VcjNIm7G5QkYyksxbiK7C7@zY9%0&pbI+~Ka z&B2+aXu{I&%vIG z6`IbaH{n9#&z&sPB_*9Lz?fE4bjO_2USkTk5^af$l;rqhmyM35?RVT+X|lVd3FA+E zpSmSnv$+Nrx<**o0pvxj#(fBu~$#Ggk3cByd>J?ck`L;#BJa zn*UqHy3@1=GADDq$*BV%kF#9z_y`EUA;k^niAw+4HEZ%X7O|gbjAT({TW!{}h+TmT z2VQ?YaaS@Z+w8yoqghMm8BeHP4uN&hlG#|)B#O2M)b7JYwK3B8SjQRzUFtQ`T3n$X zIiM2Z81LNWw3U|N{YZZPsm4fjE$cFn_iZIQ>S>eh;-vk6reaBF6(eV_tzaM2yE^lQ zvd)c?he=YLLKlgJ82lf{haaz-=EqI$eD-^sT9jm0oCejJ|RaI9W89hhBXo zrHBLLg%JK$UYpx&v#RI_7bWkan^6SIIc@q7Qc>%<%|bClY^6&=i0}{ zsF?i2AtcHPVp4$;@V#SujEHcC<1LT+P2<>q0FM)UOr2BTSA%oCd0v@t33T-K=5Qs1 zRE=~u$({<Mnt!XnymC|k~v4kG#8@kxWD$Y=lOz>SFBhn-uJ&(!%W?;F70o- z)AF7Mb_q_p{&YTC_xR|OcWpL5kmO#$^feoiiM(jsVWn3!Z~{Uz{q04`yQ*hK(dW7S z7v(qA69Tc$iLf(J^~+;Zl9@S)s~LYtiRtdTMTD6r3C%yX%kY+P3ApV}GN*B}x;R7H zJ^7oLG3)phfWQAIja=D!|9t;DgKk#|^Yl<0>GKDB%>&Mrq3pXseUh`mH?O^&>Kmt8 zF}q~O1$151;|9NsUcR5ZRK9a}ZEjn;XckT!+n%8k(Ja7{lvr-OFV+4gaY#;!r|hyd zI^0z^-M)b^vlaw(;@j;wFx6rbs4Le1=DlB!`hgc0rT@)ND{JD1g5cSQclmZ@A4rGa zVDqW4ua0>Y@^BCxR{0+jElx>D5?SidFbweb5@-yyv}HfEISaPDQ6AU`j{vyT6TP1Q zy(PHL&iw{nWnHEoGrDJVlu`N{S9S1hg)pVekc~M_k(`yWhsJpi~9S_Cfr8n%-nxg>kSr_%Y$^!MFlI zHdR&|pR{UgtL#uhxh~I!55M-DpqQ&CW=^{kmv_^6(p<={YS^Sg*iE$O!u4@uXC|FB zmjL5el0LWTo_hYIkvyBiyIio_7Pw*}m4qddbZ}r8(Rp^&=}y_~=rNr{(e&wP?Tcv(2MqH7HBA)HQUUUpb-dwsxz<6UeM&7azpmEpT zrs%_K4V`E{v$T`Z@TrPuR}aplr=Re)1iu1EGgdO^z0fth5E5K{d#B0W7w8OcB7hO) z`rXd+5-5%ez@BY6d#OOYHVjj_F4;rVU5KJy@b}t>-1B{Dfh^ZBPpzymJM7dlU!sS) zE2kx#A@-SYCxJg%C3|}zb=!rS)%!m={qr3Ic{4J+7S;q0@&|siggYl8&-C{qc{jp1 z`u^6@e^D?NnfqOoyterP>O-#_9u?__vFJ3$40`qYCqa^{8!N4XPrXZKSsB}-X}N7G z*|-u(GFl2X^)?Ub#O{pFiZC1J=habWvecA~8|fgY|L7|eZqn2j2D0lqmp*-rnc()P z|vP}`WZz;otB0?I#Xd(o|)a;iklX!a{vt47KoAV-j@H*Uq~D%FDBX;O(BEptwT)qMX@2s^D&T z$>_R4v|a3=F7RVw?rWXsifN*s%+^F~af=?(L#Ie?*o9>Co!wc`n`=2Y>g*1P!|M z7%2@TbOt;uZ-tDnh0&+bq79y`1gxN_6Qc9pmA3MtdUM;cj71uI2{crV?dOLWyhSAj zAn)8*KBgn$1-1_be|(Y04zXA!TT`%EGQV-|Z?ZdDohON$aF2I2WX*2D_sIE^XE&lB zkjKxN7{+oPOfd_Z3W$C+){jO{tdF|5l-H$fYtgu^BL8&m?l-#`>@CU-7AxWEagj6? z6XH0QI=K4sSzHUtIh#>Ak(@Kc-xt3Rmz>`odPBulQ`v{x!DACRXMqFy+B2nO8 zH!7LCs@@mJl{1lPEF*CX>&jQo1YTXfJ#we1q75To@&urF4O*zjz!prkvoAWP#&GCT zVqa7eWuFMO9YyZwI{J|LncrDle-j*w0Hv$+Wi;9y&#@M1R-3h^ZTp7G8@qK>a(ZIE zU7bbsgxcA7(Xvez?bKwn)T6T(}Gj5#S)DQNT``_6b9%mI4`;y`7={pe{MX3o5Q&qEyli= z2-SRqb#+w3`yvA3%NKfjzo5W(a1vlGmDKW9Htf=)AmL#kDp8*%`ruSI$P=4;`c7s6 z`#id`qo#m$ArF{|6*UBPhwm+H`-PW4O$%Q%Pxo2eF6Hj$KM-3;`1z5YT_Gf9E;*K` z)7Zdrk%%$`<3y2nEfgyNbAe`C`U<|E>V=PKvsFs#@{(oPKYbYPk6X{8qskbgvT~8x zYJBs+&{FYnv(n?Z+3ASSqYYyi?KllkHksZWqUzv_c8>;{4CP|7y&y0=zy9+8BUO7` zJJT3)q;vD}x!nys-$4Vm&a>~3v-^IktQ2JVW+L*~-rE&U9vCZO03h~dMO%FkayL?( zPihTr$7DWiKG#w9b>=?)%=L$o`Qziof=1g~brx4iQhk!|il6t9ruCbzC%PNc2vAkb zk$I%TE6~a_epq}j1to1u(X9XLTPA{PFDWkA>836CPKNrAy^1Ka-1-fb)Fdj)rc))f zrX6+mS9GuO^h;;)SJ18cn%p3>qw;>YC(0o;E;KmB9rt5e;%gp(kDz@7knFW+THiZy zi?Ix;Q#jwzu`V?}82ei$cYfwL+72}vf1#&@u@(`o-JRO_s#(rxPCKqQk#veZQJR9O zs{0fm9(+-WCI!d5)^p?YUjS>J0ux?^BC~A>xzU=;d@o|;6sDV-3r5U^DtcX3ojLu4 zgd4>0|8`t>rm*sR?^zIEy>@_(BK)sh&<#qA-BIwfqu%STmuHT4%3JSSAl~y;sUB*s zTgJ0(?y*G5%zTOi;}m#(v5_XHr{p=rhBXp+!_6B$!SNgu!(I6&zcfnmH=_U@=>%>J z91V(d*Q2=Qw$Rj6hDrC$7IE(A)jnTRHRx6t3x5YrMfw9$YY*Kl?DOpWLk==#Fg&E$mT z^ZG?yo*XQ5QfyQWqcu{5$csDalghro7}?NB9Qlv{y?8F<9Im?i-+MZ5#d{#ZC%1OG z7Pm~NZZF;uvU%)p4A&d-P(bB^-Icp|5pzR-^4*?XuAtF}1*;s9#z^4#z9*+B3T=8C zn&;R8y7z!&(g5rqeFjcjQKaG({ohm{l{u*YZZG#D*b(|7Mpr|1%Bf*R^@YI(Yymd) zA#bc>;W+Qo+xA{2)##uRUFR$)P0C9?*@@P(OyZPF%`Rw z&La4(`H>^0(&>CwPV8ULomrE)$-2A{-Dci9DwLvXxQJyehI8MYec}8^MO{bws)UHe z-esLt2m^1djZlIrNv0Vi8j%sXZBAQRCT|BEoPLZ505C{-#1{wlXp;Z3O#GE+7HYi1 zfr29hio2qfO)^uofuA^xH)KWUiGnj^nw!}(XgN;FoT3A3zzZ8(t`8Z<30XEX7PADT zu}_>Sok!T%#=IhqOx!k}e494`$th!5s69()q-SZVU!hQiOx8=vIXjzTt5$rA0xLtM z3S7bESldN11-z`4TlbzN48W4yz-wsdy4+f^NY0EdhJHzG69;1M%?}bUoOkcE{cu;w zT6L=pNOi@Ru41^w62kECr{R)$$L2+Fm<#Rxm1AKju#;?9-&X3RZYQ#VQ=$`J-*F zCya-FadZxQtN#?!K-%p#SAEB$y8m7gQA_VWOZ;)az2-fUJA7{mL7cxiZHa|evD`_v z&&}P$--_5+qwr@rPsA!K8ZSB`JbE0TvTmWTEsfA!ao6RkkGCIrSFE^w>STU{9no}W zPKmWhnh7uIQ2WhuLJJU8NE7O%COd2ChTcgLthCr6WPD<}aCP^l-*8asK@3S@RX$;t zPmdw{$^X3<8j_k)7^)EORmFENA?hWZ#4KK#(f3D9C-2328~2L)7Uv1CYrNFNJqP8u zRMR$_zdB2Rj-#vOLAGz!20ILz^fe%iHgr^x$ciyMOCt|6k7w@tfLY0Q`jqE_jhmif z&*W>{Ct|*6@?kgOd-yxpTDxP|0;;4pQWdR9 z$58j_iD2QU#r9u46&Ua*dpYQ6Nd#|B-i%EYnaGL20Noz@3JhPOs)C4}afS9yE3-nz zn;VPz@Yb}IK(`3p2l{gqAFKanY+0A%gx{ra$&ZFytCG`hlf%KPct<}D9<=noSkt@~ z*kAb;LaE`@jv{^E1Z~Qet%3r>-A>VzRk>Gr#FtGq?lXMw=LWqm=#m z%cXKc_3mJ-30loExfTM25Om3RCqE~3=dMSm%jebnTGf;u3f?r-kc5M|ekNp}CWXJ7 z#}^Mv)^c(U4m!kp+e-;M%(^8U-c|0OweZx328j5T`{AK|{q+SStbyYu^5TVT{_}kt z!PL7quRbhWMJc(~W(KVcX74l6yH}<@*xEvB#}dZ{l5j`5x_bm`qMhgi;@Zw{`kJKu z>ksp%E1J#Mh+r>bm^fpy`-9IP4#h2`k@cKWKxqg(Dxq<8Su0UEusi1IC z$uSqxP2d9^&!x$;t8rExr&`f&EBhw`%u3#%18mw1L=>{$h9J%RPjZ02X^`3i8P6V> zGkZue%ro+=%+|h1jZox@e3=Peplyth@n9e`_7EYuzUE)q+O0VaXb30^GpxX+5gB-j zo{&VqlEL7cKx+KU*&qQ$iUMqH33su#n+L6(r#?@)9b%`98ky}PhTJhPsr)x_MNDOr zN--pWz=o{ZcFUFGgOTO*dS-&1Z3$?Bpn5Q*qB#c^q8=dY!)27Qezcm z6%{+xpi@NT^&2W)_Z`ye@`F^vT37-9BhhbR0B`@P`@4%k^Pq~4WY^rHyuHh+S?-Sf z82fX>ae@(ScS6>GdO*E5UzSxBTTe?bcy?DGjTVwoNsuXr0^b9*eQu1(8%c$#ddRhY ziX=Q?*^cah8A!>oiXnAIF z0A86=f3moAC-)E|!t_EgxFFVtK;35bcYvg)FV{h4<}gy}zU$%Fr}(V_H* zmNSlGpcd13H)AfPd1V98a${WCXi@A#)bZTzp6);~3myyh!<{v8>*ziW1+UIeTblbn z2N}a7K~|1}z%imiZ-WN1d=hWskcU?NNFq-=Dqb8tW(p&HCIS-jq&SROj zhT>VMCV&28AT@f^Kqfmg&`-JKs#8vzEXO+Fx{3)23#O zDkM08lgFx+?fYHjkj!4Pdb)cNC2$!>UR`Fm{ubfJJ$ZvC79aXMCj;#4XezxhBrzWv z`u#3iY+hnX&Dp`FQ5m@AC*!xaY~f=|{Wwa|=^23be&w*lPY&&@PB*y{T~dqJ^f*P$ zA>UJTkeCqbBN}sy5KagYe!gj$Ky{tFGC#+PtUfAd<=NK;o& ziGpZ~Si!C=kL7DDn}ODgN~VU3r|6tSwX%Pj08$5+K&JK?2gUY=|Hqt)Q4F&2UPbSe z75P0PE*x9U-snEe1TV3WmqW=^DB1iZgABEnh#^146TRLc_^Mttva)lNgG% zc^y>>;9~g@C;!q!OgH3{&0?-4mE zGOhHcjkJo6^k$+%fNqZvqMy|nL*DXYrAJllQNMWj$)~*2V~x4D@qwpzy`F&k6QJNH zv-G2WX1?qa+!s`9KgLiIqGbnz18Lt$?0sq5f4*rtrPud)hK>LqKOAX+esn(ttD9x& zD*2tLKE1@p$B5^Cr?(jUgx#)1%@gBP)LcKIvkEdr`Cv@5<~HjajSD-s~wi zoW35l26^!Cm%o8m$*KUz}Xg*)~o!-#*rj@=#*qa0@6Sj{=z8m4#8tki6rxgDiU*SW-x?+0LG zsl4%$?|iW6M`|ZDd2pqiHj5@$Cf%8ve%UVm*~LGpGuze)!_9=*bW~3&kCcmSS`V)a z3HvE|2Ea`e$cPH6_8J`#{0NjBHyi{Vv(IUmW&)3IdF?!nlrIsP#$rpn1f;10$$9YN z+~X7Q{i0dR$cY*~kE+&%yk7Gmos=14CX5}eSbVJG)?{CO3Pe)W;@z$C1c4h#4;MTB%|kpu~5(Mg&3Qw_&ot&q?Dn$ zi^ZNm@jheD!N~dWbj=;Mrfr~D)?cc03w6Tz)OdW&k_sWg^XpgzzIxu}CJbU|*Yqi^C%r|X0ef4Ba({xtkK>108;Lo8+Z%|*Fq}w%_p2y-f1()`#wf(4? zOrb)#0X6tK4Hpmkyi{l9xwmZM$t;xWH|BIUajNvnF+L_+YL`cy>dVdQj~XGhZU_V&xXc=BH(IMwkhd$|+2lTb+kUJabZBPK#SKW1X0Ez* zSqZDzD5AP)YIFF!p7~oMWnWrm<)_TPtOd4-hliQ;`$KVUTMky)b_@qWGJf;#)!mEz z6l%(+fF<$xu+V!G(|+)zvMdUgIcw2G!^NB!@!Hk?0z=Mbuc6NTVHbz#dln5xv8No} z*G|xETPND8m3@+GO~`{f<@HTFLtO3S#1`QG?5=}?#kuMI8a82QI7k6+LSy0fv)xHM z+sh#gUHfoWm6f34Z|~}EL?9f$s;chd`P$^gCqohoDx%0j8!$xCO#L}EYe!pB=cb>! zpN3VLO)9Dn5j#+3kRsbwuxx9I6wqD0m}2s_)K!lb8fn%SgfHEwt+@|Gx{7l#Lt0G0 zb+*T8H@Ts8ZOFJ&lIaRBeBW@;k&*4D{phRkd!+nN=)OfoPlAc~oEGi^egA0_vkbY0 zQPenj-Z<*`Azr}r>_{F_WwMXVM-MKHcidkFLq?z%NBU#uNc_=P{symZA$D9Em4ctL z5{9~X$AbXAHmkMR%KSlB9p+lO*vw*g6iiIm=0uN+-&><*Iv$m6fU^Ej?C3Vy&jFxj zvpL2U_F+6XE*t*13Zh8NyQd-*Vm@Km+DzE_#xgzj_PFZ+O$^)J$hqkL@RYyF&c)pe zx7i(KTD1WsM%~3hPZG+>jUqDTO?UBSO*GI6nKHxt3l8tw_88`z-F~~!uTx&e>W>$&P2OzAVsMj!ym=yz22XgtDlGN7Fn{iFK1W-OjUJ^?>t}+Gpmg1( zdvgu8wWTZ&wxZbd8V4EcBv)OjowF2lSq@m8e6Cw|P-FwOjmMqa{p@KE!w;)q+cTUN zG#)ck{%a@}DPI=`E%^C7s|SW4Ld|nBsTI7-QKxEev@|`tR>zT@WnjVi!ilb+i=DCTo}@XJ5_E^1!cgtygfCe33D`{GQ`cfs*TyWo zC*z32?KgLP>IvrKq6mfiKWhzA&<7$Kdu1dtc+`1$I&?>}n4OWvCD)#Y@659g%({Jy zyH>lGP*R4_%d#jmqqs@W9)~fPhBiVVd}_kofn#P*4JjT1?NbV>KTZAR!4n3LLNx6{ zR~6%2%$P2g>NdmzJ7Zd1M>1kDde}4Lfxh z`$lVXb7UY5yWY!09>uRY@e<2SvtYM{R z+qC>>zb}W&Z=1GwrZocp+^oGAbcgh)bOvi)=WLa4aT+%Kba>&%35h4Y2EknD3~lqF zpzo|&HVM#*tL-)+dbxm}t`uEPv6TDTLc!&u1K-TE(I?JJ8E`d>x^}`3Z{x}kOQ){< z0+?Rd@ug0!`dZ0H@9MFmCnX_xA2!Eb#Y9hkNNLsW`f?26bzpc}1HNR*-d|PK35!{V z$X96qcJCT1%p?kZ(E9z709n?5JXVr#Cj4w!{G3SzTc7YvJ@o(f8IK}3nMru8H}t1s z!ZGIB7ogk^m*!5pT?g$#U&~5Tmc~K%s;il1e(cP9e$qiNZOUs%I|H@9K$Vz&m^aNP z6j>5dTR8Llj=2S@CN!bcO6#-+9nw?2Q$Fu0R&z6=_(AI90Z03sljoGj@potKEKmCU zxY=pR!g&==_%X;R1pK{jlv3Q6&yF9+W>Pw{`iuISxhiTncdm>x-nYyS&neGo#YI!~|X_RO(Zq!F@*e}=T5yuZ4B&|3_}yPI7d2I00NGWC;PJRnvtHiMtJ9elao_k~~b z7>IM6@wRtCnpf>TInqEp+*!~UXumFcb^UyP(?;YC*1G!Myz`znHOD+#kjnNG<;cI5 zU|d9Y*`DuC*MOO-1%gZ3(kN{ru6Q}RlH3&oq7$;un81-P2XPzy5NmlC3q*8J6<$>V ziPZFe!n+C8m1^Qh&Z4n@8gteH2P#=lwD=Nv(Om2^BPPg?rTf zKXa^u?m?gW2smkEdSZJqxHo3QE2<8+le^mb_z_JF1_p~DlgfWd!`R^=n-D7~i8&ZXP_Qs%-A3CRg5zk^IR>*#IVy~}0 zDJx!N8j1dRDJVDK|Mj%2z)RYPx$fk+vbZh~`@e+uzr)VEsg6w9^XIn#&aC%Xawy59 z$ygQ_Q^l9snjqRUfhINBj< zvj6t&zfN!%Qa!*y8w#`K#kK?1=+fhEpNCI+~mSAIMb zil#UZxjG1tH}qS_BZzghJb#AO&zb3 zD9R1{-+->s&o<$1bHk`{Db+?LhJqKb)XJD6{b3jRONxl|m2qJhwCDjOvjOu)meEr-I{=V2yPh+!VO{@t zeV1gI(==H=YgJF|$r7gRcU?pkJAI@Q9jFA+@jl)k}cGERGa)gK(*`eEn4)W@c-th_GKkpaR%&6gcb*=wd zp+t~zFSGrZHkq)?fl_#~aw_-%ERL&;O@a?;|@Ex>&D$(?rI=hZq=PX8VZ=1&Geep-K*F7e$;;iITqPV za0IbsyJ7xAYmyH&a7xy1qAx-za|3Ok8SHn>D=x&hYENf59R4?zBV-3S#_R2gaUKrI zw%^dM|JJRvpjRmP*+@FQ6DtmAN;!^+N9s%bO=u^k{jlvNX`~%XP4QbWrYI|pGiTTA z2--|K`?w+)5XaO9q?Y*|5WJ4f5a?V!!shUTM}B2rz*c08NSq;W`=tHzDg5WvYz0QJ zV~ozElYhuAQV8_1J1qqqADe;P7Wv@>y{uo8GKcm9U39+Wjz!rW>>vI@qO_D1rT0}E zZUdtJo!?_bAlUn z4zO(X&IpVk$!CrD2Ypu3xgmi(D~XhWUWdJB4dQ*|plZOAE)8fvU@o`$v)>ISO@*Uo zSSLTm>ccS8^fw+b{acOaZ-PVKm)*F!-{RwY0RA|d%5luT{NLmcZ%l3ga1TDKQHXn$ z!~mdEh3_AYx?f@W-`5(mEPNFj;*F9JUo_ z|I+$L{(qOQ{6F05uP*Zcj~?d#?Tg0vZhd!R0HYfu=lJxR%VOfCSqR4Kmd2z^FIDJW$y zn6SbI2Ao?AENy*ma;&)=(daD?TlhkQju)TPm0w67*I2%f1+Bn3FY$?E!*j(oZGk(J znNFjp=k77y2tu4-o|`!v2+a@OT97H1Z2<^TB)D=r(x|Ux%YKdtkpLYFZSSjbrjStV z4Ik}(0W_-8Q^LnFo>EK9DcroRoiqZD%-id7zITKMIetTi8wm|mF4v;76SVDo@-Z)TZDb zW-8BBe)QBRkaw`|<{1_l_S_7FYv^$hX%g_i&t2?>*=H(ku4cI9mk?2^A`pLt_>xo- zWLBRYkQ_av1o&H{6@)S7WwEJ^jR|X{F8Lzhj;bmv_`Yv*_ms8+a4=$^i>=KC@eLhl zw5_BR7fEt*N2Pai)z^g<4ZNhU1Q9C6`AexTuc@7R?c8Y#%&jEr_3#j{3pKafo%Lx1 zKZ}u(d0(E0KPBDO65mlP*s_MZuy*=twV*r&YcQF@`V9H<{nnOetJ@<95q) z{WdVu3UUGVUIY?K23xd2LZK6LmG)nN;P8lxQtJAglwL}7kPu-4$^M54hEh5BJ`0al zM)RNM83za}x{zn}rThFiZ~yR=qoE!cI|Jxk)tnUEZ_q}4W!g=xOLevx?v2O>{~jS8 zimXEkr$3B$?;!kO7fK4|u(t?^jzvauhFV{>F6}bb0bJlZ`uQ~I@y8#0j)v)Hc3&HS zcN?z8e34GqA*($`eGw086&W6*^(r0B9i#D??M)r(x^l4y#=W{m)bKmEf4LsP%mkIXzFDiz zXw_pLbTymmes4GMZiJV!Yb0_|?Nke^@-Q^?$8|;@lCBmeM$8KVnmd*sPlr3E_P^Yr z-!$1lzcfwEcUj9OKm;>UIB2y?I@bcM+jnoy{m!KLfeM&p7EbEM1sT1{KaS}UK$ zb}Q@Q05-g58XoOwReH*q=C*DMK=0EK$jJJgG%rD_B0Hy)XUhIyH&bJQM|8iyW~!8e z)ZhM7E=32A4~l4^eJj|8mZnKH5EJ5P>2|#2n(F=R{(XehylbEQX! z(4#-Vj3@&=dYtXQhx~j=>a0jj_dLYCYk0I+x!4xXY58(mjZ}yVy{^!z&m&eD{_*{; zsc**GBJx%$Pm)P4|Gcu1e{MBM4JYg1fuC6hu@h7IDTB>kN=o9g(?IFjq0bYDG- zL(r8G6Jy1}ajftBay>-Vu}`pvy}SnTs!J>Y%&x86dO&eRJ``JH=Z(_vYH)WhUtkl5a0YSr)BEv?UkMKRvN?Om>_MoXMy%> z0NgWlx-n>Odt_WzE0XB_5rZdYt9AY;ueCVU37D)n{0^8>7Ke#$bLk`!S;6po?J9ZS zRdjIk`8u*%Ow{PN!{W5W=ioN-4&$d^g_Wvbq6Os=n{<({6z=xkeG!MkZ@`X-YMqf> ziR+pLE`|;)ZF$G^{f9eK@k7&Krb6qwkG?M5z!cZwZ_V||VY{2~mb z^2K#0s&^+*8@}`ID)HeE6b+Dn5yv@A-+p%-@`%RI>jqO&8gA=fOiCTn4IPlKDdzuy zItK|+kkt)PNQIiIHbnz;;x}iCl^DG*b*PP+0?nwWmzBPb|4|K{lfY~$j` z1jBCq!fYmti$>W`A=Dm;bcH)M$C`9>>>r4b0T1LlMZS&9aBuAd#AhyEG}2NFT_`!) zzCA42b~RqH+x%4ZD^ySQ2-tAP*CXKkh?T36c<%qsl{#Wb-{5>#>1U{`%%rVZFxt_A zR%#G03;CerVWdv-E|(Aj14kH=oVC=7j>emZy5g>`7$i3G5E*LmEt-;I5arubaZEW<}11`Zwtr$j8S!oB<>&K2gHp9|n; zng&1pa)fP7i`kOn!lej3>u+FArzy-VgsTHz<`P>XYZ0ujZiYd?Im5jT0`FKzan zg(}%cr%JtN-_tHgPDTkb=EJh+tjozU%y{bhAf*~CePTumvd_tl4=lSrbtIovjO&Jd z7gMs-*XIz+Hyv&(PW9e`uen}!8F!{WWbGts#?(lbzNaZSt}poV&Ep&*A{DQ_(V3a+?7Lrd<sF~2pFxNk;on~xA>2YzUq$DIz&7|mL;Y) zUTCwmnL&YKqVL+J;q@Xo;$6t?ppfDlJv!Ug_T4Rm43=(7Ii9XLT(p zW+Z&n_uN@b zP?Q(ze>T0sx1!nlim#)QB`GX7)XoM9E9(BN%utrIA4X&qGwiy_vAVO-aAIvR$Pi7h zDK^Pr+LPzX7-Fj%$TScnto=#L2y)V;NUSx>ncMgajq*V&8jqYw7%J zG1AXm>#OPedE1X3&Ewff0cuU7iOEdD7xHvuqcx}lI%THseT-UIwTDj1^7!daM}CAC z?yyr!0p{`ZIs(jF{1JtfHEscPa;TD3(|8dig)`LGZd0vaoeN3a&vejcNDtV@CL?2A z@z#*v>F%Bo+kJJn5Qv>-rc>ZpIv=;pf&++_FfjIb%Ly(W2o$)zad1@^JcJ|9Fz?Sk zr8;Rsg`TVB=;Ec)ausY(?TyWp!gSVY788-;%jNPo!yOv~Y4Hu<>#%aMW%yQjq!VR|GB9Fo2!O z2hx^74^iE@2bw=d52CrtSA5|XEO<#@I{8bHyv=(F28ExhJ}}ldr#4y#t@HyDEdvgx zX*X_>nXmk7h?_DgeU#K%Q&P?D_=m;XBg`^9U^$qm=H(dZla+`O>S20^G?JIqQZ~|I&$l+o!lDthO!Ma?9P+Q`l@UevON_HIT*9tjMoWE-QDUYl<-4g z?c^D=_HRD!Zi!vRzu^$lOhp&|{s!-zw~oLCnnz#Cevg1R>XIrg#Cc={SS_I3Oa=K$ zm(*rioe(W#k5|rG&9FLB1BKF;z<3jEbvz2F8*;@nYD@X}2x`{#WaC5v`k0*^fk@A4 znKQr44R47H;sZa;1YDCZ*a~q)A|Oo^^B?}Qh?n0p76Ug?FEh>-iwpm5x6vC z4v8Ltt87#YJtcj=?euZ9ups7Zn1_9HioK_OZt?N>#%{b3f_j}CG>tm4R^IdMn$Pmu z>)Y9(Qw8l?@rt&sajJm4ycT4EfUgt|Q>4ty~13p6-6(b-L=l8)Z;X&BXTDcUb)x#uT%Jz;HIP|r}KWFi$g#c7s)Dewu;EsDQ9GewP@3%8g#~YV^M7QMeooPXHDuf z@RIN_pSv;5<#q<0RF*xaH1TeDx~Ie@uO~L=*Rh-xELyjR%y~of?gj&K^$kk>fu|mF z(Mq<^!;bv@CKqTG;FRrO7y_(lD1ealthfA(dID(AfRz2}YHQ=E(9@aF0wCtD<(ym&@Ic z*9+eVkq<*71auII>97su2x9ohKpSalFEKO}@#GOK-0-FcLD?c0z=V&Lx~BGBp=&i_ z5*Eq|%9J8MrT-s64Tb;@EikauX~zXp<${s&zmKCgu+TpwZ!|ytB(!8=3ga%L4mV1R ztc4gSh;Z{Zv}vR)wXtt`_og{Pw5-Z+VE>cZ{V>C%6(E+*tUPqu&x zR|bgB;T>^fW5jT$gs;VBx$y|_pn=nLRPIs=)9^v63r~)=`(^urcRzE__(H#6^$_xA zZs961w|@yl|6=W|tOa@J8oIN{i-v)*6DNGYx=Rio2gILEE4Bl_&;QdENYuI1y37I; zX08J@iCA^hkN7p^ZV=-sIi|sENuF1Re4G_w$=l3nfIw_=@D7oZAJtr8YfEzSL1^`S z@>)r(j1Syxw2qCy5+OPQtBFUR`L$Nh8~&FB>k11;izw!!?;5g)-o4sMRbth(4qr_YL6)Zu<&^n&}IJ#Sy~CyA5A&(Gz9 zB9?x_(P+Q3Z{)`OL4=*HBK%33TJv|3SYMx~3W2F2-tw|3-iRI9)V@?`ia_ApL&etMV<#ws<}{iVaK>^zP+#j!kr zUBBrPBQVFp1EGbUrPa0HNMv8g^2__1WxfWTtIzk8@}4~*0qRik5kNlq-b?wi9#Agr zls-h)O&~lX4f>Cs^?zP{q3l&EogcLfI?a`)*WU4M=aGDB(p~=+FUU&rcrt66Ms$>k zt}B+2!I?GxCIOf~KG8f*_>2z|v+b#AFDT&A{dTj|T;D8uKNE%0tR*6H-aSIQK#b|Q z{^_;m0cSIqYNM z-pR;dCx*bNg>9zECLot3B&s=1M&|zkeR%H36szLpD_f?TYQ$6cATs(PG16$6d z^kLVS+GXxOpj0a)6l`@S#$OHO$vjL&gX}qw%7)(xczmH+$zH1}Ui%^gtld!PmU*W5 zOD%WE*V34eY~*!TYP;kq?eKWr8=WFjQf$K*=YwqHXqxJuJ@Th&@7KkKoy*wFqi&J; z8BX(uN6P=43Nm(vaC~`^W&QqIc#qUjJ#0TIZLuADI>E)NXD3LDJMBfj8VU=XE`Buu zN|B2dp>v*I*5MyZ>eZ`sJ$UY2_P%1>i+vMIzo~=8%^IoD$;0&Qcum{K zbUMbWP_1pJ~CQFGUi?Wphf&h2{M}^ zh2ev6d_CyG!rUBP61_2g9l*4wZZEVTVJ+!W9^_p;1I!^Ch?eJvm1UzT8uHfqx07;1 zLYxY{sjQ!Hy;)%9PeQBXDpS2lDyu^$En|_TF@C;viHZ@Ct2K8)qi~wR+u;WyBdQuY zI>dCO2jf&ODfsVRYuDa2jdebcr73fbas`_g~+znwAzDT7Mai z_tvjHbZe%>z7lyUbVq;bC0bQw$NbB%Zd!qOn9OJ}o1a(NAy}Npy;4+ua>6RaSM=j% zQsq|bAu~8TEu2T*>Dn= zS5C)@)Yt})o?{h!K`(B3feY+7O4&7l%`Lg39}jRJpIMRS=7sJ!Q2D72h@hKc+e8ja zoL*wrRQFmZQyCt4T`ZoC+G)iE{>1Ms?QUk^(cWaTjroTXk1|7!iseWWaL#tk^`@9| z$^4U8PDn`KV|==%#Q8#@bw6Z+O4;_k7aAj(hp+!#SYag=U91a~S_Li+hI9q-p!0JO zMamO48XQeI;-aX&=#MP2i0HyA>H;kJWACzSLIWis$gT)5UtV7+5*PZ8nF@}d?MwIX zX0g*9c6`0Auz!+BWgv5)M)D?*@Db7VL|?xDRFvKA79XCPEi?FzjWxQXr0*a?PnBe!{34DOxGC*jK6#tA zOKDqc)4v&H^`Sl7j<|4jyxUFBz2si)ax(#9CT0Vz=@ly@k>2DNuEU)PIeZ{vNrca% z(axLpifBwC&UCY!f<42Ns$EQt2LdnliUBmt#2i7i3IKhl2dJBF9rur?M*!bR%XQIt zvwy*TD=jBKe~ucr29s>q#)l%mN=Xr zm$1sizQye1u)ig~)>P&uHj+K7FcTBFu-qlz61O2zqBRTg zSq=ysvL{&jNa#$5V%}W$nMytQI-8cC)Ac${ehZSS_}=+El^*?PZe{Y=d~;>fPL+7? zQ56;j&xE&@W(dRzH_+9sP!o%>crX5YtPmp-I81FCrh7x|Bn1`Tb$gwU^|{pP=~rcS z??***_CfcM#w6T0Ut8yEM>+v5FGKAdeisE!CMM2xsJ7nXa;iNUs=#VoMPl`YTMlQp zqrP6Ayu?Pc_lNXbCWdgae|UK;vy~dNS4fO&3!|ac!?&Qt3=e@E7f{GLc0z9zX$}82 z>J_-G5J0}r9MYV~47_~ep^G1dzm(_0*&n(En;jbxPO>pl3Vka3tm7x#Nq62e{;XI+*h1M@&c`8?_>DiopB?rh}OIgt8QE4vm4g2|KHX+J|Qy+-fLz2gXbrK%jpOYheSeD+KcIy(*u zoS-)+3slC|ZcMJ9+-x~ot*u`G7V!m#K;`?Vs&^`y_9JiAv{7a+wzPGR?mRNwZ~ZC& zkT5_+p@b@HK9#a*I^aAbdoTf~3&X7(m4EldxV(VOPY1pKy+tFs zm)*v`Zm*#;+|x^pyL-ursT8ee3K4#c8FaUjm6HKDo-q+$Al|@pIaBtk@hAz{Nrz%~ z$SGuHx3q2dY)A>)UL-Q9g=sFQZ!<@RBY6BA`CYtl)toj(N z;%PT@`1n;e@kFZ>

-1^{8C<5UUrHCmC)cfM0Uk&N2bSHdE)Q!Yin>YR;!ihUX{w7q)^P?NpDuf^rTUL20y6k;f7fRfk4!y`9v(@~W;B!R z9(q3qied5Q-H2~ZI)OK;;kt)_x5?|9x2ZtQH07)Q{b&~}WgrT=zC8MSN5P=>o>r#; zOicQzHDj)_h4xMfTSGg-(8Y+l38Q<$HZcxwV)DH~ON_TGdCjz_WzrkjAJeg*`syd1 zeokB4EH4=!dTEyk{RTz|^jBH>+i0&M-n}CziNU<`U6swlF}y%Dx7mWYaU2b$sc0(ziWpv|Fq9j8foi zEqb?>_|u&zw@j+5V~ zrT#o6=6onkf^R0-dO)x;s+o-@QB_zjh~XqkI2@&Eyz?tAp}V7bFDm?XVp^Iu&wdYr z9NJ6Ck-K%=$AAk*&8t27|e~t>u0L z!(9m@O!SQW!Q;f<+nPJE7j3-y(1bb{pZG^fj`-xN4W3iWCzszIhdfL~MONunrgG_) zc0kz6<)1TH$K61{Z`voLN_}~Ny`LE*q(%gO7zwJQ)BTy^ysbS0k=idm_G2~f%T)q> zL6!8;zaENhLN{x|fUX5Hsz^*d)?E5SOhtENIZ}8)myomg7_Pba3gRs1sH20qs4wE) z4lq2bt|g=loP0d0-&egahM`H_eu0p8hr}-~E}n-Zgz;FO%|ye3qM{C)BM)-W^;-CX z>#{d8G4J<_y=hmx&4ZS!B?u4>$f>Gg)xQ;XeDeo3RpM#8eESsE9yb6OiTG)s#6(dA zXz%{q8vE{+w-(yUY-R&w=5BtPLV2Nw@0*)t4%UIPiOgTl&z;H|cZ*PgJu+aXOGLU(;uR+zXMBpCm2^!tMt(({3wY;GQYX2IE?cxOGb&bmSi+B-C8q{Qe_x%3s_! zw^kWZ1vn*cz!zwv2YmkA&Su;e+wKyw?aw`49yT4FK2y*$9?$au7!#CWM)b~jz9WD@ zr;@?_?~lNvjC{L74q66Jm1-*auyb)$yB_JB^}^`Y%{QF%5b_-S8Xu~LO5}HEoz`|u z`zKHLKMy0M@2$Q9TGM44EZQ*ubAKlz9k^(-HEWu|Aum^9)Q_3&bt(zfdX{!{yqz8G z(1UN9h)an62O_U;WmLD2;m6m{qU*nNPIZAcyXr6PW&Y!C%2WUs*$s& zCt()hm(|eX(($AINGxL#xv-LiyeZ!US4~4K?nl}2nhcB|XJpM}hD6g8t7+`>j0k*7 z#r9P@hsmgnIkCsEk;rF-4WkiJ>M4IpWO+StZ?*B@!oB#l-h1ZLq+PfE;G;Y3t~<@6 z)58Nk^Ed-OMNC|re@aS9Wz)FA1^_&aNf%%KijJ|97a=aqqnS z?`%aZ*n3( zp6Q7mB;H6LL~~5X9(12SK+9j+#;-O?H`@SDbOT5TB;t3g(4Ge@iSVhawijTx4~vs} zi;9T&S4#hPN?cCw&e&>p(@`rS7Z;b|+3O@3N`T~PVD-0j%NUpK$sd;2U0q!OrM&YC zEZkg8%f*!Df?X#{Pv_rWP6~c#k?P0TQhB{n^dg7V)i* z+15y!<-H@w=Yi<TC>L|agY$Wq3_>-olw2qDr1Pl!gjSBR~ ze;oobgM~`@1Ox=Uwins%XPYO>PY=sb&yh-*G~U;?wziOpBUyEI{1i^>tehM`90<}| zZP1MfsgT<^-x-gb{af*98n~3M5{=Y(*LWJr%ggJTe|x;r-cv?80bo;yZZG!u zy#Fij9xxw9TwGj6-Bv5BLr&mJiIhZ|On$kSk&=>9Csl^m2X}WK;Gwg*_}T*J_pY@2 z-`s$G<(?iNA{WzQX`kuU=ls)an&`k(?PB|piEQj_d+9H&_WJ}47uh-a)qlKOAiz7F z%QQZP1Lrkf{kfG^{hb^CcZ_Mwh~)FBe>q}oeo~csO~#GXR4YBdcz-!^2<`0cSvZ|F z1$MOf_yYe<7|jvE{u_B0BL?-4SNJbi5*6hY{oB2P4o3eUUbJgtqEHD1IC@-mI^;40 zheI&PBZjqmc8(ZNd)LZu_UEgWC%jGfU6^MUBR#!*y2^uZ)|Q&;DuE4)oL*`MhL>$% zFnERnFucV7y{nm+m{{4}4Qpy@>JVR4R7EA7)M@OBfX%Qky8K#M{J9Q|sI5p*h=UWz z5{cB$%zcM;7TO6xtC#N!7eD=wAxt7oEg&km)f={&rGxTF*XM{0{GL&S? z%!Ey%qK(aG7Z>-f1p!oOUOn@hN?xBG<^JptW8{8bj#0E0@$>?}usfBdb`syxLaCXM77rS`^yFz6JwU-6-b;W0#KX9K4x9htba@LQTO-xm5;3%I zBoj6qb|(shf`VQddh(?j4H}M?tg6@Argbp%*zn!OinBFs4?tR(sJOZ@AA+HDcq#&# zBGRT81#zO%Wk=cOB{N){Rk!}-ofatWH|mp6bh*SBWFGEVrtT=Uh(5lDMe#b9rJy>9 zH}kxwYQ6VJEbruwiBDV(d1v6x@`zN`?@@)-Y9losLX`2rUKn$h4|1(5M`NTz&`k3ZcSu)R5={S1TsA3v}YM!9rv_{h*EYF?Q8!7oD>_vxoxV@+~0 zftI$my{jweOcB3JL!Q$p*hHGFV+f4sMhI7n#5W?YM8gI@(2NnSF)H!AwZM~Y~>~&5W1xw02uP@2H=U^`vmbN9@zdNYR? zJ1U@7S~wWs%!1_JKD0sWxI-QL#ve=>&&DrU3vFv}$_Nfr%iYa&U~VUAPdMGf!l51y zi260DIjlgZove{i0kZF@8ew$6EpaXF4W5P5*(;d7Hm7r4b{9YQKa4e_) zS|^*tq&=*rM7tsO2c6YCU5WM`A`%uQ=Jtlg0-@ixolD3;t+@XeJrcHBc{FG}iFN)? zK}US9KDTo^?zrJYa2uq|6CVN;N{{fKEqK&76O_JW`1jjQHa7WC)7&xhZfr-}{}^N8 z$35beeJV$s^^YLlpV!>b#eA(qbFJ&MUMtI9l66ga+IJL7QMho&QdJNcLek~dheL|y zQz)Y4Bsjb%GxgsfLj0_}rQOI@w$*ZqE~fGS{0?A{k1o zyqj4lW#7h7p3%$~Niy-5iZL?YG)RtV_TnYd%1O*gWRi3*2dX8!nER@9g~=L%MkjUi z;#etSf4|wQl%@RoAv?sgAr~@VSf0mB9$1LAzlp;f)<+YXgUda8BlC8LNtUAHu=1lA z_$|YHMBFIU#1E57TEPK*pKMAQ3VZQ!2e-(P_3E9QLeVJJ)WMXSk!W30%DYG&(MaM+ z?j_DA*$fpzam|{e+o>NWahyV73XoKtk-QDxygg=6M#sd-6hxz3Y0$BwO5TOQG3#K2 zg8FMAQ?8O*5BF42Yw|rxs7#t3NS<2p#C%;7c4(7}jNt&HME#0d>TL(-lnV+}pGj1I zKtFr1)z4cy0tCZSh)+Xkk7;q)*N%wKU1Gtkv~4FmvhK^rywtMfV@4fDVQ+Hbe

nSnQ>f9s=l&Aw(W^TVt0978QBPE>surFZvAO|&lv=U
zyOK)Dobe{gRr27054FaVbHpDSqwnUm5Bf|<0zo{{ohq|pi=}EbYkXR){
z!{3~CYenRLx;H_755banDELOpv?rLZpdBlHyG%r-@}NkT>Du${UWKvSrMJA8`rTcc
z2=_=2Z^k@Xh-SK^zB@;{Kk-(^eINCQj6oYtD!5W@-J*V9o}iy6_BmE`IayQVcHYYy
z;d$h2yXG-e#
zW{WAXSCZrsjGml*7Zgpl+3n;YT{$ugSJ#M$C?vI|kn60qoEbSi)=_F-0$#~n_&tA^
zsNzgve!rVQWAsCl&;n$+KjIVCG}Z5sr*cz6|NO&5g`5{?&F8o!xM`4J>?0*H#(rSzB
z7g2NzBbNrp^@fyZq-NgGm2x{{ikC*J9mn<&q!!CMfj5v+dS89c>NQAcbTrjt(z|%j
z5BUv>CVQyhX)IDwaMsnp$|s5&TL!8f{5`>Hlj?Q(CT23Hdbv)1$YLrf))5Nzzoqh;~OKh$MHw
zO`t(y!>8AMD7h(Q!G43d;fMEb927zVn}^Rw=wFW7L>AFeQJq6@cKUkYRul6
z=)p_FIEj4M$PRIwIvHtOEt?_C4`J5sG@Rs5!uJu@))yytUQjRYbJTOTJXwZ}5_du&
z*wDRC=)>Ei*~E1XF5!j5LId_e7~DfXraQ`rs%(2VSfZeGaeaLfch9rFLAo5fD^@aZ
zzc43E98sBl>JQcByCCax?JvCf@x@m
zOlBVh6*A-pn6Uae86obBDP4D)-#$a9uSyzuX?)O6cYf-?Ln$PW|mA(;1KXbw1B!03(2z2L@Xf^d)T=CLLOOSh+E~DX^
z7~3h^%XGsZ{%eP_sZ!XT*B(o$uN-bUQMn39n!9=0=Mf>G{)5;@B9KS*#sC-ahMc8<
z$Tn-d+B`$gJ%bDF0grjq4Q;Qm6Q#}3Xqe#CIZeyQ+IBb3)Sa+1~O
zEdpr1etz!FhD|D-L&palRGOivs
zO1i-W4#r8=$vSPbk6muY)Ljv8zJ$7FJF=cP+=}7MJt;UgXLSYtYJSy$V`NufP+`BY
zn)RVdRdfw@9fU1n;DQ7YG?$Do1}uPQnN`updR0PoK$0!OSmi5^TI0rc_ywImVo|%T
z8@8#hFmNRVzIlIcP*ANqR@ReQm#Cw!ay1|th3dMenrOZyDyN1AuUZspBB9#R9CPn-
zl65`GuA?`Id7+r(tld@Z@!sP!?Qo(he$&M9@Yn!_Ez2R_fIbcmu%Jx!57(sIujwzMF!KMzuUmBVjV_)2_i5cpDZQ!ROJNhJY`
zj}hplYhY=slE0pbLC%a|Z9-PwSW-=Tn!nhmdAY0@Zwc8`k@LB=z-djgbrrDe*A>dh
zxwxDbOl((n*gs|=Ky!DXl1|qg<_jL&*|K$C_@)nAAP@uI5(3ykW$i8}e;!wgi&`f3
zHOI)(mH;Iyjz&)eI8?O+gS)y*72(~2J<{C<+Y4yv>!R8Ck?$s7pm)B94ZSFxe}Br9
zObddlzv+&>b-{cTUaY-n-g9~=GpIt$zK;II-9b=1rS`+tU?<4pO-Rnug3Ll%dennW
zYgcQnzj3CpGjD%6UypW}t~_N60lXX;IYSis-A{}acT*xs7TL_mg=MBUgdu9x{>E2#
zoO}rh38FBQRvrS?G!hr?WyT2Aer8!PL|aFjSY5vgzU2jtiQJbG=n6^FqBzSO62>m6d#z9pqF*dBM=9cNX_HYAb-Sm8EcBjo_7XT=
z;6Rqm25S7`QFKb@qbh_tgB1EzkezsN`Obw?%)S516^xOVY~qBlJx7Q{)>f&}LDkD4
zFE8G4(u|RDCPoO0?@AxzB{xF-ckha*k`&Xb(~WNahij)iA;GwpgDR|!5pWaxTTJs&
z1g_>{gO1fR;2y34ysHj^PmkV|&lfp2#i_om8@;}Kb>?fW7ad1|AoR2XTETTxZ@6^U
z@qKVMiRi$*p1nqFlJh$Xcv+&=@|*W*hrh*)f$j;|=}H)~(yjTC?eg@bU32$Z#ZZWFNDXeBKZQA#HY@I)^sdIbETzE!CAw8V_`d^SO2kWP>iI3bQEVh|p{Ibf+?E<>AjZ#aFyOwx!umG0WgIz(u6tFlaA_5$_m5%ecjCfXq%X
zQt!Iy^LXi+IJqNEDE%;pTryGT9cRb1+A>Z+cCfU4PaW}}&bo~n^!D9_!gnp3S;un3
z=|4xz<(ta|_G*-IZYY~mrl7&vj7xZQEYHmMQJK7ov%_4mm^iik)ybHedx?2WZo6;k
z?A-OJJRhrlnPQw&aWtG{<>LEl2?GMpPG7ktaH&uE3U3d^9pifZtuTIm>hNBWorxgE
z*M!bWet35S%2g7DJ4r$7DAF2Mu9Vd{Q!IfAbeTqDvT9!*B1U3JoEJpH+?WXR-lX@%
znlWe|=wco(q=p`0>0SeTI
z9@+Y=u|`j^KBT?y1V!}JNjht~UuB9y<9)1nrbunEW;Sm
zKsQp4x7wusN|Gv~;lkoP@`0oF<~qP$yqL6&D^MTRd$M)!t~`pO1i@FSriLja9rpSU6<&Gv2T;FoF_(
zE1sH5C(?OsZh7Y_`k7%FrvAztM-7_V(>5kfc+?|x1L5xcs>AZ9J)vJO$8|zA#pb>M
zQD+KEb{c4^L9M$I>@agP^>OOK2IFiCY%tmszcDmKl}2xQS&hE+K6B~I3pnT+ah^^3
zU+Z13w}O~l7fg`F)!h~e`p{QqDtq`evsao@m{wITgv1N2GQlQpeZ+21F9WBpmF)%L
zEZZT6o(Qg&>s+)eEARD=@jB98qQQAUQ0%HASp8PNytoW1ihk_i3;e8>E%!-rQ*%jp)HO^X}trKfh^U0Yyv
zeq_toq;_1iLUjH|wp$jF<1I*_=+i29XZnrmVHM}%MfhE6fUgFN6C#)s>0;k-b)6&V
z=Lmz$%|kUZosNi8m3PI~JI&aXP3OV-;8YcoM$4XRWC-k(l?AVVT#7-`LwaY0%4`(N
ze7|#uHUFVDy}CDPGuM3Cut%v&hqDgH$N=$pmzn{obFDx?VY$5*7i5<|$N;i4J{Eik
z)p~u4H&5h#Vf}D@JxhzobAqKi2)Aav83fkLptF%>JAG%cL1hHOk^V-gyq+6_M=Yga
zhD>vmk`nmsK4YPMwJt7%2-#WJBo-4_^0z#BQuW(qcjJr{n`Q%N5+&im&rWj=-rini
z%qtqFRz3`o^9pw$raW#JujKb*FGvgM-Z{Dx#ndH*q=G`dsH7EZ($M@hcn;R;FyULQ
zu`x#Nht$(Q-fAYTKN4@_20TqNN^fpVey|(NqfywL`1bx6>qQj)`Y?KO(Ino*3Em1i
zRX|pyc
zn-}#~4XauuB{?|2k2X@Fi)ag*jnqA?6|3K9@SmLQ&7djx44e|2BARZ3BHEGInx=+K
zr`W>Pu5<){1YwUI2jd=B=#p9p&G%)Qn#7zpm9Bp$pcDA>UX&B@(Zg(@5aV|DrO>6p
zMTrjfAy-sIK&%S?Y`AqHV-CY^_ta;`p0`jrf?FLyxr|2l?SbE^JnUDS?E{>Qr{8!v
zAt}|`Y^hJ73wW?+ThA|$r7ybpotxLOqqKM;l=IsG5heyh93LxRqOC)zrIG4KQs$iPWsAxqtwVP7|lO@s!F5`7jQ&f&AZe`7HoAj1oyQ4EpwFW)j
zRiCm{hNaG)+_NN*37WCcN{i&576R)UH)~kEZ$@JlHrum4bS5%gHQCJ`&-Tqqaep3T
zKl~(sy~!aI@1zYFQ~-yclOHVHew|YucVU#Bs>669h3kBcb=nv$+uRLz|@dr5RJ?kN8*B5j8?+gD1q7jw|aYX!PbK_Y-~BYMS|WgRi3(
z*&416hpo9hPS`qK=AoE8n!M1svO6$S8Lo{L``U@1NI&?@wPH{GY>z)_fJ4P^B6>|+
zNw0`Q&>?Say(EbIl6qFwMiYiM5^`lN;YkZ(&p9_P*u+ES>-Wo}a`X0Tml%Qi0usx6Z5t7b9tWa`1K$%b0I?wufqph|jm
zVB=gth7g8)tIX7v`Vi8qqH49{+s{PcLd2y9M@r|moC*5)?iZ_kzgad`osybs@z^Im
zRZ?%E2W#+s^hOXzgzA7d_j>QU9E9$b!etksOy6*-S)x
zqPOr1KLUTD8AP#Vq3Q9Jwvhkj%KxJ7y@R6I-i2L~ARr)F;s^*Rl0kA%5dn!xmYj2z
zJme&jL2^{0L;=Y;4VghQ3^_{98D?&y+kM=>eNKH})wxx-ZvQc$Fg;E8TC3mnuIGI^
zES^Pg`19D>SXP_^3k&Z_|DztH5{eRq*e>%f1)YqzR|c*$?v87G7%oQ@g{7;)rniw|
z5hhh~pSdbG)U17k_JC-=syeJ~L2g;ZKeAgs<{+pdO9s1T)1J&-TMOzL-1vj
zL5!{h>$Sze!sPPhzKsuqHYQm&dD}_PHl6Fv*jU_;jf~H-S)}(USn@@|e&4W~9SD~E
z!WxROKe*k~$*`8jGNXrRCu0pEQZS#Yja28XN;;p8QZ=5U$crP7*o~c>dugm8%*abfkkd+qxO*lhYb&+pEa)CC
zicEXg;*>;9E_Q#5N*2Y$j6upPFsEVPub5mv&{K8t+2BHghn@wO;^hrI@CYjX-W58j
zKF@qcLrsk}Vnw?mYhgym$iz7ogLzjV*lxRbQ;T@+lA56%l}S{y^W|j^cj{#D%ohL8
z(D>aO`uqbJ)BH>HJM1S)c3ByH6BBtG^Uv0NdbrLhg^=&yE%Pu2j8SUpS<7N|iEaro
zzdJfaY4(HpPLVJl;bK839!@0t#h|DlCEzV924FacVtwOALnpYSwB5U4S`Zs3lW@f{
zS{nY&w7X5jWllJT%-OX$8%2bT^F+h-7D4LWe%p+)c~cAo^`BBLP0=Ol#=gBM9G4>-
zT|+h0st6l)+=Vd?9RKOzr%lK>XW^#vE!C?AZgs8KP1d?y^39tJ_b6^PA|}8o@3gQ$
zQn_)8`(DfG{Jje{R8?Xo17+JUdO_9BDTyOyijzvq)d@<+xdmsVGSb9}@9E_U7YAYJ
z$nsH?OPEB_2fbe`FNVIVDW@hL<$jjzL-mc33@?>8ujVlGjL!Z%SjqP}t+#dSt2-63
zt67%e1TDHq`#_@c7nN^2sxlsQ>E>elF|{9_52dHK<-DnbuyIX(RQ+B^E~oW9s9pm_
zx3yE{TNZ8d(P_vRp5huuh7ddvv^^tJgEymLQrKIo2z?&n!0UWbE%CmOvPw(cS;~jE
z&34NykoOy(BM)hj0v
zD$hK5yI1!F70v@*28^^6WvwJs45WC5?;!-XNH2tcq>cPG{y47_~%bc
zHRGU%vptU?=cvl*R)#Y{vFt}JIR>>s4mSQkEp6iU)!$BMN5>1_
zA1HuvP~drf`H;D5Mc=9NrRXXlhW^U@i;yDrorP5ps*)aSz9o-*eq)7yuJuVy>FRKe
zC4rc1!gIoIr)p*8m}||442lnfLHx6A8)bx;PZDo?6vrav{yn+?nE#ViFNz0O#Xj`w
zI9G0ZsM@?_65h!eGOoow9U2q~*B|J!mis0!C=%k`5=nV;JiyELb#bCmw-lkJrJgI|y2m^Wer%1T)CU>8p1XJXlro%f%D9_U0lE(<7Vf
zBlt;)&tm@cl^Z742k#zqU3wnbnwZRDG|v6nPrFM|RuR=YzdP@*Fo$Q?g9L
z5-O5HYP`(IN7*oC86k|zdAZ@MD^kOlW5eNd(k3#oW$DW7`sqC~%dmUgY4UfaykO5#
zSPu2;&`%85lwZGojp0I2V@&$x_1JAGGx8{4U8*uYXvP&yxWX@#|JelFfn)Z$AETZ$
z*Thv`>F3dk+Qt~k$3WJ3(-uG!UNEy+JIy3bP?It=Xt(w@ecj9-VA!9n&N2F7Sweid
z#>o&CO&k_6=2DnbYWu0Il>iG%PPa486-^z+veA`7)}DfA<|fCAv(jC~RXBb>nLme*
z6K8E@ewgSvNGcPhd6W=ja|&4-aj++sYLBWeI=8^cu$Dl127T-M+FC@!P3k_MY3K;wjhi
zrw(S%W(A$HC-LEBc01C_%b*^w%r9JjeKvMG&mv$$c@(&+^`Vw9r
zZ;7g{o>f=Ygp&Dfwp@Rl<4cM1;T_&>eH8BL8QK#vPb?5a--OvxW=f_2tbeds=34g2
zXs7FisZ|#uCV=QCJafvs=wLF$?K<;f6&dBy0q(d$DwyVJ@o9}SEv)meS
z_p^>>Uwg|*XL)|V$#tvaR*A33wFl-^A_(=XV9xkGW7foHl1rFK@jVk2SM;-w8g6^k
zlX+^~=dI)2=3-a?GH57y^av<6;Kjb>A+(26joO6#bm@41Wg(u#e)m4CqOY@!EmAP>
z?JN1o@HGx&hF%l@dA&OH+y*kZVy^)Y^=28{_a?2b7qA-MS&Oh!64;!Xt`es!g1?C4
zTfFexi)o&una)ti_T*2(7nKOV4-A)ijph-Q3%=_
zXRje5A(XwwAa!~Fh25nwzK9R6_VB{!&$rvQ*wG@`2j)DRD8RRqQeI5$$fux1Otsj0
z%xomBt`G_ZFSAJblbx8W@yXTc2B7F4abHv*#p_V4xCK~d%e3NffWiGjwS?kPONl16Btz&=9?Tc
zGHx_}5%Ci2U3|0Q8Y5VoBrg^>(H)72m6Wt~2gAa>)~-M^eT;R(Ne$MG6hGuA2zw{?
z&72PBzvFe${oo)ekoy%Y!h1%3gc$x<9>t1%E
zKVIQt*DR$yF7?t@^FnBHR4eD@{A=tf?y8s6)BDLQ%x5c-4+m&WY;bRRId`0f2PM|^
zakz{yP}B6OMYRs&1+Z7E^51f2=@fb^oDdmg99`CTR|w^Yu)jMgv%$hQyE`D4I0KCB
zli*ZoO9TBrzsNaeIwJ^QAmVmUgc@W8{5n{o`Gs+Pbu$5Q?dtsO;Il{PInA8LyNpCS
z2S(GOL@EPcwZv?EcX3GSqMXvoP2CV?WY~3bJP8*ij+O~MB_^Kk9kyWB>wf~{#6FWv)G8N7{wL3^>3Q){3=!xXBCOO
z5y|HvdY6Z#h?R9|?(+uXRIErI!p^ig|YYcL?T<=*7{E-8YRCTy)
zM*eyH3v}yB=r#PFwr}4!goo*93*WnZ3_{9_thMB
zn#To|O~Kx_(F~IN^B|MFsJ9F(3D%E@M55)4MxL@w3w{mEcHf;1CI4XrfV1l(D7g)-
zn3W;f%DC4L=TO%ws;`zja?q?p9D
zZszGL+l(9XQZnMSAi4hDSV>8#`X5IkP@{K!!P&bE>~)>suN(QGu4O#y*55bsY)xy^
zjH~BV#Ex^{y${E8v02ZmXPR}LdEIKN@72N9GWio0x%BLT`wU%Z1CWwS)xP0d8^GwL
zKP2%xp{cyMzlS^EYm}U+T2mu4HJP`K4&GOH=p0mD@4B;0Ibh9%quh#Wz>&9hZle
z)_D~Yp-m{9EaGgo?>+o_@5c=6A3N~WrEYNN`s^i2z2A_?`w|iAh0V^77V)B%R#{{t
zQ0e>^pKUK};lEz9AO8fv|M=5xzO6CGx}jYExk(27x5V=2y{Lcw
z9`LVs?_2-GvwkHwhiLxa-|5WL8H{6al8to#r8{8Z>ZIx9=Jaleq
z!~d}fM{=YXT-7}07??RL*Yx_oJcC1F)z#JCE*w<#Q|k6jYT*B7f7clR*uENkURGKI
zyZM45nHu`Or@?urEYrx(H2Z8<+w<-C*VT0Gv-G=N9{|7|578!uD4HT<>6V6%0
zOD9`(iwyrb9RP8y2=tQw_&>
zxZnn=X4rY8F+ySG(20@0G&^UqWMBmFw9U@WMmjq`C#!QySxj=cs;Kt`CdBriPf#l2
zS;>Na^{LzQ-|KkAgr{pf=8@p8U-97Am%xuC88c2jZmJ+=F0&u!{f6qDm4T*MV=a>8
z*KOF!Eee~U1I~+WV`JJJY3$yz57Y$){!n5*@W|!Y=^OemB^`g3ze;J7EWZ!@_{`%q
z=Gx|FSlh4R>FsDX6KHg_cJJJ)&Tb*)ux~`((+z*lJA`s(_hm=$EM&(R9H>oeD?_#V
zSp|HqP}J$1Upk%Q-_b|aVf53T1do3EV~~Abl`p>JAYVoEIvdVKgwx+ea@FX
zy~OBy6m59Apv@M*Dg#@T6ELc?g-D>!!^YbHTzOnd-fK?cMHb^wgHEk4tNQtmCPX=#
zSArv99nbOF5~8GJxc^GD7ztd12sHK>sAM~xL-IcN(o8;{tipFoC&V{tBZcNF{2(QC7CMf+jzzNpp^(c&M{HBnmC+LRwQ%Xuy|0)+)Cg=Cu5ef+>
z`hxPy*!xXA{x;~^%WLm}iG~iV=bHZ3#WS#TO8{cvNAJKlI8X)=Wxcg~Wu?V5d5aiq
zvUNcNj*{!qs7WXYrApO2mm}%uB%I5!5q^+;P>DLHRnU(w!AyU)
z>>*;ui@oo__%-nT+8VODxwuE|+}Km4Bn_M?yZm?zRCKomqox{-+KVIJZ+Kfl*~U8Z
zy2emY96Q3f?je^`ux0?zqPB#N!+zf+i-&#!vJL}3OE-XCB$JB=GRk!>UKxO&$gL$s
z0An_%AB4lX*=B$+8*tg3lZ;KlQbt#`)xCWH|mGUjd
zzaIDnmRo)Q(k4M{tZLr4t*Ll*iEwJpHPM=x#`_6Qyy4R{%Jkml&-g^{2U|TCH|>B$?7U2(so1R@hFjT7Q5qf)1`Z`V_7!S1!V6Hd0u|a
z+>CuiDc8BJ<)kToMac``*uQ$QeqM`=n>p}w%vLY_5Md6#+Gf{4|Cobs2K-6lk#Eox
zWO~kWN~3JlIxEg=M_#{~K&wtVP2Q4Mc0AcOND`}D+QV&3
z!||H?1;y6d(AGc}Xot+G1RTbGhn?e+L0)SD^|Rdvhm}_Ng1upa-xl@hFX!G4e6A@c
zkKnM_p&i>T;SB#~y6f_@7uA@j3Wz%_)Q6N}dcR&`y?b2pM8*})thikn1(>~B8r=_E
zbA-I&^1O=lMyjzPGbK}1#0_DxwBuumn$f!v_ZRv#8`szTpt>L-t9yvOS
zV`4)Q;+{MVLaE-zIVhWrBGyXr%h8P-@lArEf1;A5f1r|VrE|!60}LEsA`Z5k7>y4r
zXMP^G1qB&Y26IaP_wDX1n%PbuQJ*DqY17e9VQ$H4ueo!5Z_CQ5oM
zF{@qt?zz5o`C92&|2aFR$`6mmAu$E^h34+H#F!BIuJGc5{Wj
zmdM+urtfIKfQSV&9r8l}+|%d1#L&ZH8-Zw8m@JTt9WCE41y*QA8`G=?47t`C(T1A7
z$yLLjhqM$~6oVN<0JKsk9WS1#d+zIdb%2>{s>X)(o-WQEPM+t|_jy51&}W#9*|a)~
z7vHwE^i0(7rJbD}FQY1p3%VXxjqy$z574gT3G#1j30DHFW0@%YFV4}*mGEniO!$6(
z{ZG{LvI%@%2r!%?o>Z5}a@JKT<=ge!>T8_taTSOgtg|Bh6|r=*N=jbC38IAY*R7RA
z^-?h2`6=ImL72wvM!7j1`^9By718+ii-
zz&Q$ASj*~%$nSaGLtGkxc-W}s2T!`1Qag^yPdu>H57a@k@P;KjBx{@^g9F9UE!e`d
z6WDd=3`^3*l{$Q3?4*Ao=-tJU*~`#-dX{mx_o__NOTQ!vFYR#hI7TLZ3&zhPI-dtA
zbnuDP@@g3^kRMr56OH#IoNu&*_Q=%PiQ}V+{_jCt)ch*
z*E*JO>lbOg40e{3>!aMFCK(sQ+}hV}C{i3_kB`1nN;`t&<%KM+6DwC_S0`7MbY#bT
z>!c=Ba9$i7tbLIwYm!wqA>cK~ojfDBoMMRHJ+(@a)-n@tpnmnBU@FGU2E=eKXCWaj
zpeRPzP5<6ikW2EmX_AMQIL*4Nqf7{vnWcrBERR>ilW@~s$8I?qhBe$cXn6D7aPgW9
zi35Z_QXNN6`Vhz@ZY%2=)rrImKU61MnCJwWdGY{@{d20z0eOR&o-h9Ae0^6*kKAqW
zLuOeFWLh`-?ir20{+%6rm$q|^Te7N-Dqi)X+~xa=z)0xDnd8>(Ob|^KoN^dxzV(ra
zC^$4f@$DO!G?c+t)`&dYL6z0Jr(EZP4?hkdQ>ai8GhqIX+~QN0T~FC&h1j
zUJ<8JZz3m5mh0geGx67+K8&Sq^q=055JhUTlZp5$#8%O<`_+DwHK-=#`hpNkPTT*}
zcVt}WG&%TiEN@2;8bwd51UeTh!(Vl
zZ8D2Q=nzg(-&}wE#JJN}W}-Yf#&gHD(ZIKLjU6S7G1@q2j8S?*Ccpty=gdbnZ0z2P
zug!ks;=e*J&Z^VN&Fjr<50uhi*n2g3Ti)02$Bml=a(2^K`mU~04;1=y#<`O7&%kLG
z>T=BYlO6(x=$$MdN#imiQlv#n8{Ktgm1O`VcXS^(n4F_VUapgddsx{jm#n?)FeHAt
z4>Gfx)7Z>bDDw|Glcd|PvH5?iN1R(q0wbFte~CT6ynC4}g_M9e;q8N1zpZIp2DFM?^U|#!;?kN(&f52v*;3GS
zG}~?`>z-%UOgn{brT0q7)Xkl9t&@;HyZ3Z}2Fof8PPdQoaB3j@vqk+^wUF^_$HulI
zd40X$!_ArK0QkOwQ1l`8K;pzPrB*xxCWROHlOzvHM
z1hvL4jZ6wRy0`D=%rVF-rpL&E?8}`_2FUd&WS$*+QM?PKgqYlo@x1JjFmzN#`R{SD
zFr*%lsR8)g+whe;tzOF$yvvP2n5W)l@`tHu1%D+$-iaJnta($A4T7KfR@Qt?*8+_{
zA8dPpaZfN6h|#>Il_{?`1>*XuG~_(t1Y;}fRDgH`nEaeoASbjy*Wy((;3y(-hd~sKyfyL)_
zl{m**nB|95l|r*Q;L80*34ag)DB;BT)G>?j3u~%*l9sc|#uhz7PmD3?+0f)CL=h;FOdt
z*PQpIRfJ!EBih@ixAl)HA;xW-d?UMF#`1;NzZnTH!`pet{$aU$a7AYaSLees7e)=<
zLW6Vc!$d?Mueh_-8)}|P5B8ft@}ae$ErSg
zY(tV4fmH!?DMD~Yab(|$-HTacT6;Ld0dykFVpr{PNn;UO9nzHy3%ZYK@9}joo9aC~
zoSG+faRZe(TqpkntXIP1KeFNYNQzO<@^0o%Fs-#GmYGd<%=tR6x8Sp!f;(`{(aps0
zhMXq6syi)af%zx*VDSU`W7q}i>5Vb+Sg*byqJjl?a4T7RSNrbPW01j8DByc2)F+!_
z+a?t1M}5rEo>j3aLrhETYplxAF#_r8P?oh(Sy=aZq5mshQR_N`J8d}9xRu4xJ;O-n
zg~?{;ai5lqvAJSjpF1x#{)%8}haPF6upbWC6aglYl#Z+H%?a0P0+{b!5eF&gR^a?4
zVcvAp_hNzXzd~%<&QrE2LHMH2oyv5iT}*8Cx*zM1>>WjI?e6K@xw}I*z57V=e}m9z
zF@cqUu=kb7rr&r3>!B_fM=@Qr_ajN=7ijuQ%NcpSr{NcucQ8>cQyvCz*~;kEaNR~i
z)By_uzBU}%SmKur%4+ka)`FB}4^&NPgk=WAFftSCOrsBe%I`p&vWx;4dbGZDYz9S&
zf5ygC;&Ad&ozTHaWn;E12buzU;N2?Lh-hfJqy{^tm+HgWIGMg`G|n&`A#N#f
zEf3?X!rsXm3iqj;#-04>4z?{=|O0Th;60Q2mJmLqZ6EGV6Rg7jaVlQ>u9p|M}8|I2&?-8DFyLmZrGBTEl@@{xWQ~#i?mHi2X`JK{*L}
zGV)-xWCj}BcwzV@1^ZyJCZR
zRir*}7O#~XcSu(5e@D9U5C13W23&Wvy^~#xJZpnJG(in7mWC70cOMnH>d`hfSWF^vkGGu9SnXh)c$l}_
zQ6HW`MWYpG$#S?Yvo@-^5hA4#kMnfGpS^D7+f;_m13X!zE|bw7%e797Gwv|e5G1G?HMx<+
zJ33A#0`IGM#V%{RHHK&SrwiyKRxV+EJfSwRznL=ui!H~I)`^U~p#57Lm06K0`nUm8B7qT
z3bMw5D{fW!Tf5xuXO-GExi?o4#>7<0%regGps4oLr*!-69@YZ^nxuuNMlG*nfxyr5@ai`2}~S~htk9Kl=0cQ
zeX@Qyi5npCGO$R}-p-pPkr)Y?RJu2YQ2>8O%%RBhI*c(!rJt_7_<0!Y+Cd5nJ)bMs
z{f6`EOxK6(vvX)DzFd?wtA$#?ot}#2#IB^hgZg`fYbh;*yQ7fgQCj
zvq9Zn2ZgKL3d7XW%{(AWx>AyG7^Ri`J>ydxlA1P2P5+R%>Z#iff%Q={@2GjujY>#&`Us@Ecq
zm2nbCAxWo1Sr-V6FtI$;@(=
zD9B5qxTJgM!uY}61zQil+o{p5bCd(8Ow)-n5F0UXbNl$ti3SyBsF8vxQLqj1Ve0ku
z)(CZUu0MgHC=d7hjNpQYkeIF~A&G{M$W2C-_Iv7CKV&HMig4Ie3O`+OLMXPMt39{C
zbmq~D$nbvMZh69J6_EWZROEqS?bFGkAhjNt=2gIGbu|)rKZD4zU7KvInBVgGb?eOAjmeVN&Lw)qh#CwN__&J76{}hnXLnu8Ce+u7SYQiI#Q{&W#=0
z2w}zk21N^`y^HVk)|a{1SElco?sp&C8K@Dc*C__$oP5r&_x*5SzIVj96|7VyIw@1w
zT`d7~4P=P9UdN3S#t&?Ypcq{tR{E%_VCRSTEG9-0c8!?~s=(Wx*uO(}-qyCIhA)xO
zO_*PFsp7f-BA3`GP~3OC2WlxxX)~;6555no%Gr;dC!~DV`hkj4n_RD|?fy<88SoYi
zUf9F|)*d$9Nx&UklRmmU4$9%QQk7zOyY-p>*0%=?w=U;hyDR6M&k|bSYn1x6ffxv#
zi1~?wHaDe~j*AY;guE=gR%`=`KOhLnh<4=YSw}$f^n`xE~xEp=vh?%N>V&Evd!Z!>#N
zfFEbj&#=RAhxfS)#!l$U=ap2Zz+`7}d%;?xn>E9mVtY%{E*J0x7sZz?0C&`wr{C3H
z!|b{kzA9Z>k<8oT95laahkMvUX@9aBc_>a#T_xN%O^luR!?0i5U5tTQcj4Pz@XUby
zmM$*;Wr$sa0s5n0y!kcmc32Qclls#mNt=gk_f2;3^WC0Y(^GNk(e$ng%XBH=4Nya?
z`b@ky?%=E~n5Z6{K`C(=hPpDVhDlR?H7rfk@hfLm*qZ~ye9-VNtGX-3ttA{HBFZOD
z$O`s__x76Spl_;b8kUmRx5ej|f%_kZ9I5TmE(H1K_%}ozub3_PxY_ovIQGO#5)TSBMudpRK~@QkKTjDaIDff`n~O>70Xt
z#9$+9&;z_G9*w-5VZQ%<%~_yJW~%-6n-5n~Q>o%9>7_cr;Axbd9p`)O=_$MkiabM2
z-&gMEHz@!8chOy(E**d0zIv(PimQR=cG&FU<1%RyFSoY0F%(8L;=*n{`&fuX*Q~h5
zJvP8g|T>e^oIif@7u=4!l-z%d&5@4HEoog?BOKUA)k+$J$YMIFFohFvZs@
zj#$ktP?y2sD(66>y@MbmzM<;9I6^u2^lKCvyc%R1t|$>!g8#jI%Vs$|q3~Gj<
ztc!tA5bJB~yZIcykLOn{(J*lgVOm%snd6)BtImmQ1JcQ);qjHytAOZM-K`
z@VOBpN=V2i`!5X>)gT=cXtHiM*kKwT@^)5hvzxSLpmSpS$(OkI6AoLmQxWy2Npac$
z6Oi`P;FwkLqkGQx200Lv_b~Vi1&btorwa86N#a*bQW1M*O
zEdFPk{w5nCJqnK7mOD<=4!wOIo~v^$8qK3BFhih%0Eq>TA8yaNzXqS7FX`Rn@3Ufl
zY;N={Mj>O}|Aj(nqPATe8|`-?0jw5aP5Zf}Y>X^q&U%EhsFoCu1sn(SM%T6Z*)&?8
z$LHn`H}I8>hc!Kz>*^374HAi$^BB$AJfOEW!K9~uYY)JFSq0#
zb%*-QsY6Rzigt|*#_?MFGkv^1@Iw*qIX(Zd;{6(s4gT9x-Iyj9d$gb=Wn*rUw3g#}
zFdTA*uDSfflb1>Eh{uU{iR*~K$6$k<^mt*D?c!b?@{i5YW-Di%jELl=CmVf#>kfgB
z7z#7HHUXNo)W5WrfZ}o;>u-XvVKX3Nx#uUiweETZ(4>;ktjnffC;ZRXcehR!GFMmp
zeznIxU#Fh_?^rFs5E?2wqarUbJ|Oq6Jw;hJ|H0Ayn=kvncS}ZDxBmYUmy=loO8zIF
zZvpZ=Ye$VPktNSx9r(Xqhw^|Ps}bY?kahqW#?I60IKtm5A(k8IIgf;43EQ~z))T7h
zFWzn@*x}syYvKLB-c36$3w*-ADa`oc>{f{zCE`UXOUgHm9IJ)Sv4IFu9*z8>QmC#@
znVwWtXQl0sr}w@Z#50A3nDoZ-_w0}T{`^)nH0>Lu`pva1mJWzuo;4a+&J?oH3b*Q>
zLAq4qBh@a|sI|J}YCF>{=D2&KzPnK{s#n6oJdE^*-vedhWWrTl*4$%I{TkPEg!Y#Hmo1I7NiiL(EJ_ief@WG*hD6>7I?;f6VvE_)lmSccP9M8Xeu
z(sGi-FQZKkeGuiAa{>VxTaei&{TGM9EPdx~b~_rW10O$C&T1qaT$+$UFzRiJvcBzR
z;x!}G;m12U2Gg8Y!80PXR9x23#>dwGJOF^`6(7e2*?~@B0iJJHB{D=Q*&bEE>myF*
z{PYOvD19YP>Oj!#IpR{
zg(g!XqSE?%;iJl3_jEjO@APj(!oGk!vl*k$S}5?)U^1VnC{ZZsduEt)-M_Br(H$)|
z@N(acN|3t$N6lW34=N>}El?R-d05zFH_wg|Lp
zaSI#qK8&-Q#4P@c+8uIu=ePG=Dx4GB1WyHcrQE2fh!>{RL7F~NQ?sGS48~B>pS=VBkxJdZXMyF%(`|G
zsu;k6k8f7|ZKUj~AU!?3RJ!=n|Hym3!@9w_N7_ssHxWuLp$H!|!jma;?xfI#b~f!*BBiUa`Ed^hBk%R#eAy_HCZ>XS7hp`WKJO0MXC
z5X}BOhfxJNzesM$$N%=m4jBP7A&xwifbTiD=r4=wa{kY#I`yTvHhHijtz;v0c4Ci%
zl)EPrKAUy~25Hp52G~Ow04VLQrdQ1M5Kyr*vLhE_~spZSbK8}v4?gvA!v>93_W-tw*y!cf(ze?BGPduRj|N2OT
zkRkVVSs9mh>R9Yeh5mb|{SB0WDLN$qZX2ME|2R7eZ>;!dg8>u~jp^A;EBy+vH^
zP|D1U3QHcbJ-@eTJ^e*W*$j8Lp&`VNu_v!q*KBj?3EG@sX5zl>?b!wzFAt&36N=M!
z)?H#D>Q~Ko4azlr#=PL3fZ+E9@h|;y7VIXj0|5RQV6GwXs9~4xmnd56nXD{MadB~V
zU0uNez|KoL59KzIYNICe3?x2s@e-DT7BS9ADIZTQZZo?&iS+VrFQ|#LP-|Cu2
z#vpbCNcyGpH%*iwT0U%c|!d7Q@jhLqa6d&
zQ+;m<=|_*EKq_|q{I~u(I*lpU4T=vI17yj+Bu4@-fT9)}plYBNs2XV0$p??*TfaFs
z{R`Z&1L`$l+Iq)9#%Ggv#sC1OT<@L@${5SsZtoR2by%l0!r!ByHYcZ)yT`$O^u9)}qbMy}XU{n`9V`
zUOo8q403LwSIiY&X%0Ou`pa{g1bx(yk&S@_$V<&Ui82d2D>QReR?%#8B9G#82=ts;
zGjFg{tDNkajaTJr#Gn;h_)p_$(6$iJ@vS0Y#%A52HslsH=p=xYlp|5*MWbHe1y)&F
zgt^dKdkREvB-SwI>?Ij&|p
zTv47n90<w+gzN_b4_5#j|(tiaY}g_5^>m&P@H~Ml-Q?eqBV9UAs!nP
zKX!I%5I1X|CV4h}AcNIguuo~p-zdLp{^4-w<{h7)MoU7zO0w`?4z#ccIwK7l;)=95
z4-EG=s;?zaEOrk9uJ
z)MTqg?D}bF-3kC@J9IVfX?Sp$HQk|I`~s*H^Y_ZAK)p3v9NIZIJHs{_@e9scAy)J`
zo~Bq!|7k!aTU*XJ-jx1_1J$m+(ahDWiefse_(52npn>G99%KfF0g{)NWIhFKtJG4N
z6H?wef5=e>;y~f-nw-fy{DpQ@MGO~8!y{TCh4vOK(Q04y7G(U!lHx8c!MnKf@^0cxY>|2
ztxC8yJ!Qo{4P21_6rcK<uh4jC
zTl08V2w?I;aTuqo&jpJM2UR5I5NzRO6v_L|&L3ksDJ?!5L@@nqBICZT612jz>O(P$
z6FbR$;ZVS0$s1AM;7SQ!WG9mkK#D{RF-upFnEYXR%=3?qCMZigsyK?xCFH#^Y3_wd
z(bs)lKXVPBr}Px`xvQGj^?pg!hCKfqX_ePYywBp?o2|6ONKx(GMA`0a+eaef81kj+
zdfkPjhXkBVdw1k%mlWG-`tJB{XcFj1I_JfGMD&zrsd}$1E8%+S33^fohzA&Ecj#@*
za98TPb3O|4nRhiLCgBMe)>M+5Ju!q_ckMJk{f8JdYtU_H?3}G`4rAj`>fDHSvY&}r
zy~4=@Y82x#M_I;XDzw%rbh()f7(v4@&?ed(nq*qGzl7#Eh-kI~i(Y$Z4<+@EF9Z2x
z92qcg9G_Jpq?HcczYL|qE%kZ~1tJx&T-wy-_yeE&tJ}<$Nw5V1tV5?rFcfLCnLWC2
z++Ox$w$Tt1tfGf*w@S|mqe9>az1xdQYA8+!OPYB++bpq?0w|a5TWtt%8W)QA=>gS%
zuB_zjwgy~}2^fz)kQN(B%ITa8VOI9?6qiSiyN5<_(*A8i%Xww9Le5eKWjHU
zWD5UK$#<4(cyh%Y-0vFj@#9B@13zXT=O~9YWTCwu*a{sjVsZK6OP+cj!dgBy?R=P7xkdq^DJ}cSj-SrD|w)Fs(U!#pGFxq@sR^qoErg|YNlDZCI`E0fO
zgzw>8_t@6kukLIEc?Zdy^DlSKyyU14_wC-zzy5i9TqXyMBw3vW840;vlF2U>MC*)I
z<)5!w(pZ&IDD>syun@7rABNP%3aU9ZiCS0YKT-LIIZ_On0xGx`Rcb_@_H;qIx9c0;
zd`hq3q;!BcKt;~AlwEpIJszw+C#a$Q^yu5`#$eL1L_Jd!L==g{dygNG(U{0{witGz
zE{%Q&ibhbP*d;T)Ib;SjspN>!Jc@&O4*VPrCg-_WY#wry3Qc?pHB58;5}lRe#-
z3jt37M`%fSgKjPE-cooj#lg-O86}5mE_dfnJNMm(tSu_#JPAV}qwV$jzBn89-*S=@=Ad(ZtgXB4?
zVrT=8NIM)t5Xgbpwqfztdt3bvXfp2X?y1X*!$I7;B-Cc-ZNW{v%J0p=tQtw!)d4Tn
zn9x=zKM#)m5GC;P{=B{Qu(tJ?#n)7YGp#2cdpt2#T9fF>%Y6A?V
zjEo*+g68OpK+nmOLsy=CWhau!IAg+J6SFAmwN`#}fN#ZTGtj{pp`LC%(Z_=bk%xJO
z#70;ur#94-hT*yG?_N$`)QR_{5zB+YLi>PC`iC=&-z~PF|L1I3(g9G%M8pWs`FAga
zlMkcp-pS8Hsh$n!KUDF2HjS%rK+G8Gf%Hl5YD7J9@i!6a7nXXGgz_|~(fq?4c>fE1
zHNKO4!f<5hYpE!Da%DhMIC*Fe)8|n`VJ9Ay-DQ8#Cv&sfAJ?1*`(+YkWqEO`(r
z_!ettVs@TiQFckWOgP?X@8VnIcjoy%mkV=!&-{%ikm9C}mmD{PwD#~h0KuFnXa>LN;qeRgd(
zv1>Ehr84@$zD7RozuMT6uW0;40+PStDp!xLjmfOg_7`f5Boo~fJ`1HGJy
z*(%%o=zOlY)8eUvyo#W4E=1Mlq@r6CO;rG)2v5p<@~1Msbb~o^iJM4WQkr#CY=12>
z1~WID!aZ3j#lHcu(i)F3ua6Bv*ZvQWAyCXIJdh!15bQB>PU2K<_YSJ7IndkEjQowf
z;vQ$-F+ADYvSuSHw&-&tmxxFabpyXV2#j9Uh*MEAtN+2EGxtWK;8XjYGrkkwH95HJ
zJh@BMfETx!%Qs%Mo8kET{ppRzT{j~3!TW%Xgr;ERyy>&HN6FpP11`O=d_L#w%}YX^
z`Y@|GiYnvrp~b$`k3~90B%8$DTav^Mzi8^s2mUp4*F(4Hd>PKL=Z&Evv-;mII2TN9uc3gqH&
zVtwSL*4TFR9zjU9_UXFR_s`;1+^2bv;6UZGBBguU|R>-?DJ)r-hQmZ5PUV
zW%rD&5>ZN8U~>G~w#m+){^PgBZf#3xM;fcwY?a0K9Jt8$kNHV?m>p@B@L02izK3A1
zh0$d_1|CV|mXaY6UuyZ~kmW>W$#h{EQ+kx53K`H>RdgUcq$^7MGqNV$gsh^Q(Bw%&
z0>@-8v;aN2#*H_R33}%ajkOjveY#0hr7pq$L)%+M#kDrwqLAR45Fj|g-61$6A-KCs
z&;$+cba1DGySrO(cXxMpcZa*k-tT_1zkBW(=lr-m8W=rT{bql{;bPPmyXL3{0_Um;8aC~w}Ssi0V>l7}_Yi7jE;R1R44fvy(kP+DHNzaW>X)5|<
zYf4C$&h%W^%!kqT6|-5jT|FA?Q__-J+%c~np1TLQj3x!VJKgAno-HC``39CCJLuzb
zAx0CKi|txzMH3T4=$VTbo}B%rTo5x}Tkc}w!r!;;P`bg{&@z3}1{d21I|Wik
z0#O2KPn@rg47Ks_`Uk2YHiS8Kk!C8^`(?!jA;`7WG(?9nKRMZ9BOVdHV7t5Nt4!|%
zQX>9-jYcNc_;uk&8Gl;QYlA1;2{h5f?y|Gc@39u2)|#z!>N6lL(qCv%^T-~2cQ4jM
zLR~Y>pRvQ#1q1MPORJs`yr)kb3NtgTqaKP>NP?+{Su^4=PyXnBt+f+q@La*@cAzw#
z=VxK7c-TM$WWC1uqgI19qk1C%NGLj1Eq);rGj*>rY73~|=o*G~Mx<@dF5i+7yqwq4
zqLViPi3Oq03x##7VXS3KoxgeP;&q8UVbpuu^q-0B>cTot9B(>{D)+Xe+rt^cFB~z?
zJluC_G3yswPv%|V0z`>Pwit86c_CSH`^Iuh?B;%h4cp8I{x@JleeM=J>oy9GsEn#J
zj(T&nq#rjG=%d^&^_VVkoq5FX-uc^)ZoYoK?3wMFn|>Q2Q~|T1=lC()**j3@vG+8g
zZozFMbEPPm2x@5)8BcFWc;KzCb?5yEK?r1<@S;#TtvrkCaU!}ToXp(>Q*Y3JqWni~
zgI%givRNKtz3WeXBl{DV5u^L}UC2Dsm2W?-^duH68FuRROBe<<=$^vCZLWZl
zXU=25zuBWzIghkloIP(IBK6T{I^*DwAO`nW8L)+Ojny%q!AYMJj^7vHv2J5_B}3Nx
zay;@h<2qf~neKr|s&T1h)cBmk4?jI0GJYcj{r^D-mR!;)PcVe}l0&SQ8yi%ft?9uB
z)~i7|be<09WNrR~`ZH8z%2)g^KoFC#Ab9>>R3rxKgXV;)UNMQXG_g^qdejV;`~b9x
z_1vcR$J=BUNVq_psoPUyA+v_YsZULe!~&33q6^1
z+TK?cC(8lh?Xq6WSJN%1eapiz09hb~vRVEYqqyTSwH01jiFHhBr|VZ*C4bUjLEO@i
zdfH}dN(-Q9eg#rgo<)PHf$*%fqi<_dFBy{337Rvdaa+ay(gott%!^B6Mnk?a;TLR<
z>s6O95`aTp*{o}y$p>3rps0FY&7}SXf#Ku0PD)h0XXS36mKR08e$d+(evN^9Bg4{d
zY3`nIxv$Le$3ArPBA??9|2Xo4AqX6D)l+EQc(@&dDK8ceD~
znY#W$kpF=LqLPIv8atHSas`wtzpkpcL{0J7uPUP~dLL3qZeoN9(OAVQXpU%aN?bLs
z!kHbidbLymrFQRvrEh}b5U?JTbz=cA;A#I!fVb0pk!WG>nhY@C9lx@Drqm4^smyy{
zct&J5$SPiuX4|B@i+dANjFD25_Vavet0Z^4=%i2L{f4595uW-v(NPpc%<+lczDWUA
z1$UL6d)`L@I-(-{;!rhx(8payBhur1+d>W0xkg9TbuP#6RXLx-09}_@B@iQ@yY}D<
zAIcNfomN0j3ZG&;lIl8?>LN+Neqx3}rS5UGCE9y5&g#7`Y!_FId8!R*&*WxW6F;x-
zf-{xKi!R-h#_i!$>Jg;4!>BJ0f6>g%@v3d?KXk|oVX)+5uTr8`N5%BAJ2IK=9)aLh
z6IcHvc26DqwI7#fjImfR#xdi+xhQ~N85XGYt$O)G0pvg^=0*z^$p$0p!rvZfRGwu*
zcj)Q8$hGE6@lo9Ci&mI=bLJ2TcGQ?tjhBH{?-}nf#6@~yOHto5GF&h-m^SvW$3_P)
zjCTC(1a3L$kp@sXOkYMfG2LW;O8r9%N2}Rf&3hLlIk1myA6=U3*`Fa9^e(eI*(>v^
zWbKBvzj}pb8nOC=U=5q=3U+51kh-9tr{qr9nz6&Eb)8jANeGK8USW&u4V%H6%5L8+
z9m09YK?RjIkt8p=0iCn-N0dAF%u_ih4J3cg$(m}wy!EQ?Mv}O2-2jKMfh)e0
zW#w4Z%}ckj>3ehLFG+THmIp<}PadXaNxvo_dJRLULL}B~%Z;4pe~^~hcXBGAQoFJE
zbVr-^4bN!NTn-uhO*~g@z#a84Ev~4bLXQ<{Vi$^I0H=vjNUW=yx8YJS)Z^{9fv060
zTX+*JLLBuLcd$X%`A70zdk(IHj*ktzSspY*~jGc6Ka
z#hrlLb=uaquccQ*%FkLsrElQ;%mnA)!$3kphY7l4Ok7y1%2-fsznQtx#5Fjg1DL!G
zfvF#Mwi!yf03k#fFQ4QX@FS;I|8s9d4F;k2rQyK4y_P5@Ab71kL*$yP6(Q3a8
zAGoShkrzA=++Mq4Y|{MBAb^y~MhApvG=)6;6_ARA!I}9|Oq`BEg1_l#1$t7TVq?kR
zFGoqy{%ph4*DuZ>;_H(N9^Oe&y7jg_f9VAaQ?r)e3~rzAuI3V`mU`0OR?5G(;xb8w
zw^wt&SzWSLd8!LuSN?cxF?ni}Q!Y!K82w)>c1l(jjIy%w{KA4Fz}mCiSwdQ7myL$I
zFQ^EZpRm?;0DiAreb6eY!#GM(YL7ms*3;lOW0{bxBQyp8Ae>(o7+wc>&Ooa?4p9Fh
zP&2okbfsyba6s!(4}&WwJi`7jc91=%l92EjIT%cY8TfIYg(ZYB9M!zCbOU4gtR2kOXuX`~jCxS^(RO-IK}-mj|wvNL`gG#+Eve?tdp
zQ^uGcrS)a1xg)D(Kmeb>evv!>7chBfC3iZN1Jv=DN*Fop%GwNTWm<0_fQbKDdl$ah
zLh_vKrJO$MopiyFHU_&BDr1PTH)`oOM^iUyG>~*oehI-Ez|d@O^y=;K?5g|!01R^Q
z)EZ&+5E2-6A++sn_9I_INfT4i#Y?}(@)S;tpBf_2JBdR#s-Yj@nqMx-KBs|csVY8!7*NE&NnmcCn;QCd
z43{#L^4;)}91k@mWw?^(FRO$G4d8G=&OMc#nutO`oI0EnSpty(b=<6MtG}Akx(E1K
z@-`Fx2CZeNV~sQs7K0ySOK$=Y7B!{Krq_b7^$9O>H&i;sqCj6^p1M?Sj@C*hTQ;XF
zNuxm5_-jd;J$jIKbsV8hd`Y%aQh7g`fAEiXVR+=Gf$00I2s^l8=hL
z#1+@qF*xwDHF2nCn)v)xhiF$Pka1_Kzu@{3scgTa`fW
zH%2BSej8|<`)kxUt6R_;(ChmHL|ZAb*6b)o^S`_cH14t7QD~~}(5_h+whsgyzLvNO
z-$4a!t#G@Xqy>32a01VFxh%%7tYw#f7eoL<&+zB?b9bV4)hXB5nxZ7TAU9t1zcc7S
z`V>Ud_uG5bt!Q1lA7^8xW>>g3T4n+q(q*YzZXf;!0&ZvY@AQuPrkJ5pV!t7fK*m^c
zaTRhMfiU+O{on}O+M&wmziT&q?e@3({iVR;VchXXRG_U42f
zKLGkTPtcPaa5EYe{6nn%0YN>}sQqS~0#g6}?~>^15c2&V)W53u=lA~~jAH=T)Bhm}
z*Z&18)xT)WfLZyc2Y-(lGC;tE6<=GJWK)!w2zWOB=j;EtO{0?kq4GL>58%6gt*cS2
z{m#64c$$`9SV%@e;S1(}j9Kdf%PT5&%#<5(+8^RQ++OG{H@nX}tOY-3iJrF};=RA$
z;%6sFQ~w8R{0vZIS5i|$`0*R!0B|lb{hMy?33U0$+n9Gcc=x33o!U+G4H~D5W#dGffL&Xdm-UcNF8|z!4E%OHK){^3oF?B8r~jV)R5*raK7Gp
zti=0Z+;=w_w?9_}?(P=hvfUkj-zuE!ataUTIfVn=uMv7`4sfjnv0!o8P_i$%uoxw~
z+LhEzFuRb7%H3HLyZMQPsSyr6g^fQt97bncc3C%(R`64-%`e5}Lsx8?S@gBG2^YG}
zRtN@SS{_tPy~p7Z6A?)<8cJ%}RJ*%8SZjLRYr1$x|?
zwdA(jU+&_6WD<|2FEk7PrwgQ_#?L+$4wviiB{c)UpAyInd5L^PLIWcs7VFLa*{$5%
z+ya$S4|VezZ$J#4^uX&Li1a!@;@|t*sirKGsiKMlRNnJfKsxd#O^bp*3LecZ%FD}>
z^tA@cHx3-$9v{wY6+WszLpj?V$aj5QIC6`IM#TI+scJNVYhOU$c&)4DumWvfy#m8!
z1BV}`eZTtl9!TMhf)e>hpHkNt_<(K$Ie%fp!dAt9TC
z+(-JYUhp!SS$l_dN1Nf&eDMD+VLYx?nE&Z!Xr>9o57E=pGY@)tXd$9#&yh{vo~vSj
z08%OIOctoN5dC+eNky|0E?MzaooA=yHS+;NF`!+W7N9#^2z+k4b^gaSe!q|q6cp}f
zW0Avg3oIuv=a3G2g;m`>J@6=hjy6{&up>al&lX(%D$cG|dd=r_f6`_01W58zjQK+C
z#2)|1AkUcufy;5r>{cKqUEUPKA3{Nzi{S-+Z$96>^z>+-g906l&;6cbJ>$9AuD9oq
zI>5-9&XkdHeEzfP##2%^$0O<%4T^VLg!jv^Ho)9zJ@wdhU?`72Isy`Uar@^0XIvge
zpb|DbT+UU1P*7jGVz+8rEY#ZmuO2d6tjGQ_n8DrC9^dc9Pub)Z-@z;uKHu$nxODa9tO?}x
zpnYST9+(#s?=;i#dd1iAux1N_<8Hq^me<+NjB-ao?^>jy*S
zCM}y;Bw7B$X#}3q11-}uov1c7$nTe?qA!g<Lm
z&1NjV6Op@HtI;m}=y1Iqkz_|lPhWI7#0IKqL^x`=(lR@STOO0OlrCvP9?(_u;}#2OY>rwZe~Vnt>P|S3zgi^Z2dCHQ@nj0
zVV37iFoDhG{lXPF!ot(@Za1mx(xckK8_e-uy}TQXHP9nztXnl!R6QYL-%Gk~5+!7Y
zYVE?GG*}a5%1D-jop;(f;Z%jyj%d$gK$gkozzzOo5pL&ll5OOJ2etji$*g$OR`5U_
z<$|*7>k@f9%{tB2<&`NbM=(bmZIG7gr^S4R<2!x}-KiuiYWN28d3LAJleT0=4o(=m
zLjBu@zG-ZYN?BLzt97hfsCaP7IoLWTp&=a7o~Ls=Lg&a|sW;A1URC2&+YeCuz_tau
zqsUcdxHI88`l=4t8=3j6{ES&DguC??+o3Oh%oPg(rpI&pu
z4NuMY!`B|d&6Vsb4bZD&u#+(004oBc&2`TDN*WH8+W^HwHiu`4*jb-_x*E1XeHKP8
zsce_)ugyOo6I?H6APUs0LV|JG0YzqPx?4DkVf}`N%5x0i=;+vDo_j&FfS%_wz8s30
z%$A@fGiteQMHfyG*Eb*6)dsMjG=FtUh7@Gr9V|0j@w6}79EqjX(ni=iH|T!={`*~L
z#`jQPWeBU6y{Wl>hGxc*()2_i-obnk5glJj|2U(D=caFYFXYRIsa>oQ&X+YTE_FcJ
z89_3FzxdMks)k&*VVry$`el2x2X+^R*Lpu@Yg`mb<}1s8LWRkiGE)_8fe8@H&;Kl%
z9X6EsLmuebXms-u!QV2^=Yo!pKLBi(#>ot7zjCym{Fz;EZ8irJ`;wpTjkKJ0$T~yt
z`(O!OUg42fm`6GbjFqrCnEJG7LkU-jjL2h?l97e7e#L}_h&KvNia<9O?|nz3K=i8n
zi>@4+sOXy>$lggKrbf_3H6d;4Wtp%b(~bC6C?{?3coN&5DDg#Je9VO}F|~20ckYZ&
zPw$Tr9XCEvM8IjfWmKP}TP8=d4+LZI-!)
z;1`tVRYGW*NnA`+uX~+r{QzED?}OlLPHD*k`vQ5Gh4upFK-~JJ;N0mrfys5EFGto0
zJM4qRcj=vr`NU;K@Z5YDf(5nQ`@DOxIJ1k*X`JEtcU|)ng?
zPpT_wW#*zr*YeO6dbrd?MyDGYOJR~kmZUbK?{FyRN)oX7t37f#k>$Cdo3N#NQ%-0d
zc#}t~Z;PWJvRjvr$SC3IC4DQ{)?;iZFWw!?9CI?XHRV%=$!b;~o=8HCw4^fS
z?9+vhvHea=$z?HdYg!*=%`xShK+x>GOZ|8C6u$n^*({GYpE&YwIS6PG&5khSduFuR
zS#SGmuh4&VPnYQ7-?}82%8H8GK#TFyz_a1awwFa$e&|{ZdVDHMH6Z~B@3s$bW#R5D
zytZq+#fdl0BXq5dCa3Rnt+`x3?$my_$?dtL&KrpfkttZ<=!Pv;Zd
zqU)W+Vp<8JPu-cr;oGqo#v-R!ZlkheusWrMc`SKEk)tQFuPzp234?4!Uy#8H*N;Tn
zx~diE(c_D#w*~7fJ*iBAk8-@rGE{;Vd|e677Dk^2ItJ&mn6_?s#9C?OMd>Ul_t$@)
z2Pyh2A1?ifm~0_l-og@!w!`|uL~fOC7k#63(nEA^TIBvUU$siL6xD@zmc@KFA(T~6
zjJd>e?er3BxCzxi8(LTl5Zc?aUjfImHa4td+0w+5YpS2_bqmZSkPkhR-I<5pOF
z@=#t6)d%c8i$rDI?;KHjQS5sNoeXq7({2)X3r+}UCY^=ZjQqYc$`s8lJcbV`ZrfQ&
zs7bn=gbez#6XB3lKpHDjhN2>cS-IxgsJr8gadBE2txCADRm`d0X6<)+t94xF@tzXzXN1x
zB0eqKVlBN0NZAXs7U{m%6;=au3W5)_AvYfn&WzrzbDjnooV6
zoLJniw;48pu4uQ>x*M3-Ff`v6#g55r_86x(FGRI`-12gf-~OzTJPh7W&=Kbuwh;Uv3c@A?3JrF
z(wVxWPL*M%Y%%(r7d&~4z*7a`jAm=8kD+0+UA-2{BzuLAy+|N=Iz-Ndo}x=f$jY3k
zR&GbP(vscx13l~EibY>bO};T(&4!pHS*2zFm&~?dXLztOBDDisGLd)^5ldmH82^Is
zN%y?$`5xM~zm(y9VQRCK;32H-*ec|@A(O?+SKI-aP4S=nH_Q_J=L?HyKOX7*FRpm
zW%l+s8z5(XumY1p*kPKc;cZ8v5lny6k^}3fBuGL}*hAXw_Qik>#QRf;l@>v;W64@w
zgU!&j;l+MeQ;{FQ?nrrWnS+QA)!VAA4_yo&?y7eYJ*uphUa}9TlbBww8RIPWRfk6g
zDvj6zLHUS3pSZkLt-D69kSMjU)T7K$=*NQ0j>^s1&#SFu4myU~&xbpD+(n`2qu$ql
zxLu)X@})V_KfI@KJnf;t4#zPDbDe$ik&NrRpOME>b}c=dyfd;EU@kdfQ)0)7`F+dJ
zz2J7hdgl`prGy)>Jy;@Y&s?ppvwLG*yh5Se*^-LvQl0CjY_Re1ozEC}bCd4*(5RiK
z^oH-n^~{i4XUa!Ui0kt)>7M@n^)}Ra5quG2HlIgEg^ulM(l7ejIn$&f20BxA
zjoK&+LX13g0AtJ1;?NJQL8JkCCxtYenqJIsE}z(NpAnj(utWeW?Rr*r
zQR55hnJ3N)iaMq_PhG1Obp%~3KVZ#C^KOY$4zeh&NG5~bpde0$5DvyY}wn*E~!
zq|#bnm(6Uu|7!F$JCyk=Ci0n_Ai6n(UzT>V;p8kG)i249(6|D7^E`p)%S+(6E>wXP
z^2>;kk{D^`rJ9@*``Buh9VD63=;Nvz@p4z}u`VU;c^q!{IS`+3M-AC$+a@2XrzM?Q
zCDeSL@2lin0eg6NRzH$bLI*6wfBmM}V92SkkOmx16M*ckEm3B=eyjB??njTiZ$
z05?4wyD{`p1NBV-x*zH8*GEOYJ2At>3r2>5rH-8m`Qj_iVwR1$)9aGZy;skoGoR`y
zb4*mAio(r@ayd!0h+^@;=K)^y?j?V;ylCDkSt*3G()QOmI@Bpvt4qI$bL)*|(t~+A
zWq6XiyU)!|TD{#XHo7_8*k4?@&OToQ^KUKzcGDZs)6r9@#S-75!y58o)6?TJ<4*gX
z5WmplSN%1x_z--Exi1<=c4-;g1VR#_^sHcR25O)FY65Lpvi9Jt^p7!lks|hrG5=TM
zxAXShj=H2&BQgo`9phiVWn>^5O%eu#5P2zd&cgcqw34O9W)Csyd^4lZpDeIKl%kH{
zFR5=3qBQ?aSmonYMrS^fqWC@zp=q9cWt8fYG||Q=l6kEG3}Vs{`Q3$#7uAWnzob_U
z1e&PnrYlccw+dP97K-4^rH;g@r`ItR=r
zR84m)zPY!J-_NM8d|!w&1uwu)GljI%2w5+YOb}(DWUlk_2FEfax4-L8!dtvXYr*+4{v?)GXFWpZx+7~5(D+^Rrjs9PF3;Ahb4@asM)M2?
z7o~$yM`uY-tw;(_(}Qd|KeE2!XHS)+;oyA-t@D{Hg1zu)!_~&Qq&aim@8v)){CS#1J^|
zFoIiESS^iLA1BXIc-R?;IiP+(+Y26_d%mW9Fn}s|n{|Jg3@e<&W)itV^22hi^Q8Ta
z^v@WqykDhebMFlY6MC*1uXvwr%_3^T*3G!!rJg&;i`m;gq{W*K2r&*ec
z_9`LfyOx_>gkEAcs~B!{8o|=1AZrHNKNqYcGzxfYcuR__l1xv
zOLcoG)x$j;Q;wPU1V(@d)lU*C;yqvX&XA`KQcS`1G;DSxTk+Q4?j8s=A!{rcf)kyMCK->AhHlb<6tmH`vG
z^@H$^47^F1TVL%d*6pJWR0EffebljP*W#Dr8D9cz-_b%xT6M2?=_;;
ziUF6?i|YH+zMJHy8$$ECgZH|6dM0y-2ng2?N1&l}Fkf%^5Fg?PRaMpLFk3Nk@R<}+
zE}Q>0+{g(sq4piu=LBPlD_yPIp$!EHfwu*^m&fR6LwU&%U*GRr&8LZO^M-wE>N9v~
zO>KW6DBVw7$H75Q2`&&u_@STmKp!Z&(?i5yqs?Mcuw&38A!7s_{ENlZM=Ahv8sAeSd9
z)}ila1GV0c)R9CqJ>Zyx7{XMZvZa1&*QHOjztS-xS-oX8l~LyS<=tA?GWceBm|STq
z;`!WXEE8PdH$RW&huysm+7X{hA8b1?0?89*ON)IPD+Jad7Pk6Nr^@9{&u6<~c+?_{
zjiOX9ESL}+fhGE2Wg%cf0fV{O?tmYNuxmSB0K;2o$%W>MhbOrv9437FHDFST&W!i$
z!~1xH88r|)xY~qAYI%*`nE|t@L6YRIXO8SVU@76K!*@oAC3(b>a?cQ`ax64Z@mO@e
zi~Z7c*}H&kpXZgzv+F8E?Vf#?pMPuqb_jB{ZW6S}Nx(IZ7}kbu^eMYpat9vyQB$-~
zH6Mj=0*lQUy}02Lr`&L0byS>zY&8N=RwUc6oZjb}21r@;$dKSD`u*Jh>b{S+PY}!4@G1F@Be1;uPUtLE#vo!Z6$XDVfDmc25}A`LZ>|Q&p<(r*_XL*Ux!H8XFmxU5|q_Zl#>
znK!sHo-{{YEfyELjqA0e3)z6vfEkQ+n}fK@i2l
z6wZzy*g9{l2pTX#V;HQYM)5lK&Cxx}uUCNk`MLGAcwvG-
z71WOno)d+Sl>PFBx+d?}gy~XFL^@>g%9E-X^&Dt_4iym2w}*jWWPYIz=x#KCleczX
z9E)MPk4Y1@RrFKhY4I4is7y%9T)R1G?X2QC5q={AJIw=LsWOyTuo%&`IwWkpPAHaD
zPNvr&B%hx{{gJ@!!+3!zcoBmA0Wgi8F&iaLU)33ETPv4d(sc7clWBoy+G;;KwjlB_
zZgHOSeN#!OKMm(eif4)R-s|<>;SQOD^@cWnJbi)M6a$#)N&3v3gN>GGy+ayma*Rr@
z928FnYmr~E{Ah`0!-NjVK2vSJw0EH3`D&Bn#U}1H6jhRy5~d95D>J*e%C?$gCb&DI
zdo>mvL;@XvQs2f6a*sJe%Cca=h$iNSa1rXsZb{ngNXCZUUrz+q*kG|mm$)VNO1(Sl
zn7F9Va~7BsPttmz&OJk3Osr;y^kFXX;r=q5Ux+*IQQm6fR$lc(sfY#)UKqof(I!Fzez(r_szd)v$z){XJmS%f`=-wba9*;?1wBy1kGqiWP<|)#q^{6p%)xM~`UT?nz~o
ze4Iy;lgpb5kNe!4l8Y{>(wvd?;+-&$;YbF5b;Qg|-BeRLmd4~yas#(_*3-A*R_WsB)G)({`~&Wc2d$mo6@jwexNEGf1ZB2v80vqXQz9U+{IhwW6X
zl_=&Uws0aYDoS#;qXL*P1dx#EmFZT55qofwAkafy#kqi1F6}*j%ea&v-pG@{9G5o1
zYg(LQO2hqt@`j<%tzkTu;{CD>EtlO!T<#SkFWzNsM@=E$-g}J^J6oxgzXrTQ
zqck#nETo~3%H%J&RXI&?tlb)7I;8mr&;7?#?o};rZQw(SMYa%dU4i
zXzx2%5350Bg6$($+3cfRaCdUGhRC?jma)w}feSJCh0q$qM;ii=6BFWA{Rh37rj&L_
zY*IU~L
z`o81C$!N3#e}U$+_-4p2#xf!2zk}U(aYSJ$XMYWvK;K;C4br0P#-DgI70a^6_WAXc
z9z^<)ThNX5(R$pWY{Vvi*IFNLQ;XE|@u#n%=U7&+*4BjM%ksDEM4?Zga+tf>n`XXi
zWye*L(9u-j$R-k1DzAaNSv=CgfOvsQBfcqa$oE!M>GYE%p8Wq*jM?tVo)OsM5*V9z$LON4Pg~P4&2wn-?Sm@9NDH(veg{*agyH(w(a&(U;8dV=UWJCly2f`z
zxrDK(g*p%UEZb8Kv8f~?I{HfVL-S!Dt!Nd{-^$nLN3bvq=(pgC_w3J{M|Ej&#`8Cn
z{^4DHEWaK1rJv#fa=)=H6t!IvhcgI27w|krTV}BG_pE>9E2UR0HC(
z?m~UXhjqRa+p?X1H=Hhx7mBgc*D;p<1b3Lg7SEJ>u_{LKS$5F4a@f;-Cs=a)?dU}w
z)jUfcs{e@d%;_V}WC6{)(=ev~I48OV&%4)~xKs0O1t9zn^GU%4jwZ;Cgh!dyryn}!
z?OE^S+Hs35+zDP{j~|0pE7F4gM_0U$)3eOeW4Rr6Me0C-61;Oy?O
z4s23X1M|anZ<6UttJiEJ=)Mc2KawWc-tOgg$y&9aYj}0IkN|qTX)&x2&LsS(I(@&Z
z-Gs;GO5*Z4@s#-byrFihwBg;M`U*)#)LfikXNoHNXRz+KN2FV*yNO%Cx3bDjXec@L
zx<8RQX03sUJ9ht)jby0!-$)QhO4b67t`nFh<=AJ^Jk
z{^=mI25db9GEck9nVEMK#FOc%y?LW@9t}8nHVTnAKH2I)*cGnST&>kyIyd+sHpPCs
z!XW^CXTrVgVEjI+H$VI)B_eG=dpOhR;XrQ>QNsC{P3$>%pe~QEqS5mQd6f#B-I;
zHeF>w7#bS-i~(}>21D>&FTaG~vOkoNlap&`G@Gk@wpc+N7h1UHzjshbuQ8A&^8qX5
zo3cb~PVs_wkrHuYQT(W^Zq2qn1QW6&^*){wxC8iwDQ=EMc6RrSty
z#U3Z)Za*-b)vy)LIAeo*PJF?63_9lf12MwpyeIMliLLiUqn-NzXyg&o^!c2e@n$dR
zEcML0WpcAnyL}pZFGXJ2NjvPb(PjCX1@6qLD49)=(Q@I{cLA}1ii^0E*Tb$*)<*;q
zc-*RvOEytv;B4h5t#()?Z3-aGJ1XgGohh-brMjR**^~3?oVQEcP|j5iLs^9z3TOhw
zgpwRI{h^Keg4#98%7Bbv?iCu}8WL84A+4S*wU
z0zc-gJmUZ=Y4%a)*pGykCNYc)W|Jd-9yiZxe7Us7?buH!DpGA%hflhH05WAr@g*`r
z1$^fEy6KM?NTkO7c4)AaA1`P9$y>jh!DTxP9yweawXrem=X1s={8-o$0WU1mYS`!1
zylh78GbIASdU90h%V7n-foHq6giHZ(A#z_
zo$TCTI)%WdFn02xkj?aZiLzFMImi;V9~qGVvVH6>fRD6uKRd^1Kdve2mGubRp-cDj
zGpf;s`Bz=9@PT_ISYc0w^Zv+0#+Gt=tllr=7}YaXukBURxC{zQV^cuHjJ?%D6`Eg2
z!CvlkZ2&euzVxM1%AA8zJ)eI>i5u1SH4R~f_9FBMjB6Au0M_|?Zr9E&T4Sa7$tz&~
zivY3Hyb&>0=IU&oy8GCrNBJ~TDu2NY1GYK8bc+~xHea1f5LbY3zCDbIh>H>%$k-lJ
z5_t-{86As+g$OFnd%wb{Wl(umGO3)L>R}5MVBHbrf-Gi!8sR_YZVuPNQcbxo9h3pXD6NgQvH65$O_Q
z&dMPdqS4;Ko^KIvzW@b(p(d7F%O%==$rbp~?8N8kEs0x}slx4XyV4}6!xU-sG&u6<
z9H=KYC8MEWE;tU^-7q>$EGfGE^d)nSeX4M4!G39@)FiKN2^iBNP(%{y)CFB#VQzcG
zzZ!Duy0Jo1n`y1dTwnICz(b~@>Md_?SC9R%#mbjd9H6hO4E&qUs7BU$RdJoxH7ERs
z%RE{usKMT`I471Lr3vCkY}Ru+=UB|QBG!VWmC5-njR6LDlg_`S=uTk
z|B#BE_1wHXg!dJy2j8@%ZM9vm{j6&&@2^sp$*fwlDd#R}!Hq~uxZ3>t@_Ki>(bq9Y
zmbQ#B3U3UZ8cIdSa4Zid1{ApSPTqqYum|hN8%E3^x07tN+FXDB(W~3AKT)sp?_8iX
zPAuj_@0q~#d5B1C>KAa+YS&E12vyp4*wGWtNNXb=%iCFr(c`?+&FCwL9{ZC01$)P;(k@wv
zOgq!T^eM-d%I`~GHtUW~4+HJLYe~ZG`1r{8{HZ0PW*t;$d@qWsNpo9^h3`0jZr9U}b2gMby`pU!76
z2m^rMXWZ-N`}DlLUaBc6rfjKezoTP|s>H6YriAYHNJ8sHvnL_zioij92mGxQ1TcYF
z^_GK&*urQHJ+_wWccjRm=hi?HzOB4g{1a9iBC4o}fzOrj$@1r$0s&tXGcJt@oJ6_g
zN^9YTmaFR+Fyi1^t-GW$uZN6xDtD1L7{nRmyohu(Jy^~@-!}J!ZmH(fx!I6!5p&W(
zVsNALsoL`lfxuuo=$Or7YX&>@!{{7~kdidE^1%TAF~XK*SIj1{!hib)IKz|}6W+Ki
z#z9r_UTbJK$OCC}mR5F6+)a*_JIz}feHvNTG@};m2#V(&v7Lj#3xzqsG7@>_57C-{F#J}!e!jeiKgB6bE!__Q
zUMs{Rn5f$wEe_mlB@>!Pk&2p4PZp-;YSq~vJ;P8dz9c=7i0Q>wj&
zpO`ppKnd=DL>1WNDf0cs53{jZ0NNr1EItPMw3
zuW&X&oA96C4^wMD{@2=?fywl*rFb0XSN2Z#>{w&c?(_cfkF=0hKmt)uEm`mLc@Oq-
z2aiKD7=|w2g75$AeWA~@z4*Uh5v9|;{+Axe1!DcHp<(01KEHvO|9q>$@BhAt*FXe8
z4$xzS%6yEu250uIWRP2l>g}!>h-rFsxMm7g9oW0R06`BBHb;xR(_|Hrk1xRgp@Y9Ce$IyuKtiO9@D0TmXP%Z&nV8kROJL9?Gz)qKI+xH(p
zAUuHa9*0rHFY+2XT@Gj0Sz7u;MC$oG;&R<9o~cLOz!CCx@LpAGoBP-ExFml5tG^W}
zuzh}%H!S;4&LzK}*1LS-J;%S*QT_cfObQE_w%HN{1f_lw>uE_^ct!GYS<1ly-QD1w
z{*s7MQ9EN7zB9~V0F%8_FpsexnjtSfu_AVDgRc+;Q}bpQaJ?@7bUT21eCv
zn&ST3$_uOa=x2f@;*z&}qpJE_n59=TCBhU@$y|rL5Yn7$
z(N$XD^@`ZFl%E<8%1dz&cz@Q3LPS?#&MTUWJ%8Rz9YAfY{FdC;UsRNoOp+8Ky5!v!q60_QAd
z{Zq8l-WNyqdU;n=ykz?h$nhddQpXP7i&N!MzK@{>(>09Cta8M92zr-Bf05CW?euoMA{oPzdfwoMxA>^
z{e>8~iIvcEPH0kT)!y*|ezLQU+LZ2L3Pi_svE|>OZvx{W$#*L@KVG6ANAF49Mg!R0RbKg@{WkA2ecw
z+Db|?W}7eUQ)N}Zco&r_-Pl8-+&NiPA=F~w6MphRYg^Rj1-y|re%*Ac#GNc(c@p(I
zM=_LqH%zzjR9$OKc9cZH#TKC`Xfz8qAB?L@l6ytmyZdpMNn<|eSAE?D_A!YB_-fn0
z{B;$UdDSaxFJI-e%PFX7+ULmQ`A0AGAEZ0IfnBG=>1(ys#I}2KMLH}A*-CPXvagJi
z7e!}UU|7H7zlZk>*V`?=dd4d@^_LfK$oy^{=2BlLhau;{26;Z>5!d{DE`MNBW3f9uiC%`nE0_CGN`+r=vN~{%VeQC&Hc2wO=OqHa0wr|VR;1)&+RQCvZmTd0?^9E0@c|Cr>ny(s>Efcg9oif#
zSi+*YPdnHzYtH!1Qh@AllFIyAqgO;WSEr
z&eHyKA8r96c>IEbGJrfNKHyp6HeoIUSJjntH8!Fs{j7%4fPSIJB@JAH8fwpgip-$f
z36lAUs1LRhq@+feZBrVp33FqjA;^0(!3pl}7Tn!}ySoQ>4+MAj;O_43^5#m_u3dHRKJUHL?x96U3)ANsqmTZ7
z{qJ91YZb8Tql%fCVfCXbo*qic1(qR_$2GG@QT5Q|hkg~}N*N9Pk-T-D7{{JkQl;!q
z>lPzi1{>0F2y}tzW{&oV^{`vZX6tl*Sg@_Lmu6T?(YTp^N=$ozC+cM3p#C9`-2&@k
z30=se&WBZbnR1Se{`>=}vZlQlHaK2Vze}T-9MRw6gPH3@e;sAOV;vNA1B=gQawIC4
zAa&B?NBE4MJ~3322rPN&3M*DD+Y17hcVZVtlW^too8nvNLuLaaA2u>Eob;V^mmccK
z0qd;$u
z?5{l&^peESLVQ=zXi(I*I?xx)V1et#m-L4wr2Afup{cT2iU2I
z6^>N+cp`e^tw?#M%D218DGwFLNi(ZWaC=us`PDZkClxxbBzEHZU6-zyi@6q499rok
zmkAEjYA4Gbju0e;#I40k>b_YgLd`Fb_o0UN7Wr0)Y`o{Ji&6Wzd9Ck+G+RO6jVA{S>{`wt7H7p^2QeH
z;%J{&&iwZ4rwu1BKj-`z3)9;(?CT5Hy6wdV309Ryf-L5vuPwg=e|Gj>_a}2Qs*{wF
zDvh?x>^JyK<(FY>;vODw&6XaaY7a^;{yaZ|x*fTPlIb8?i;<4>w|pj4Y7tys2Hh+q
z;1=zOYh?bNjW|7xnkW-Rkv=uy=8xy(H~i@IDv~=amuIv9pA&+W%7=r@m-3;+{iOWr
z!6dGoht6Pcbl+PTtWt@qqXRqJ3pTz9lR0;88tGOJtBtn|4hauD6>w;5UX{5R&!;w+
zpStn^{qRzMDHr&3)ph5+yRhO=^qd$uq--AbR#Ju^Oaxe#C{D{aG^A_Dfxh(TS5Rw9
zH8#rjyU2>1@8I#ki5X0ei9)}Cb}SI{SS<<vVZ|ykuiZ@)Cj%oSxn6(FAarq!Ja2}m
zZPL}r2B!PPSBtb~az{;M$bjJ6C+w8{u<$=uc+@yt=j|%(7d~i)l!q>w8+3IUE#ZdScz?bm)6}h&a
zQ<3q4P1Ot}yur{ZFGc?QN-XsmWrZ^zHhms7aCn2ssW3|!J5}~4uxU4G776t_5HqT5
z@Lg4dHd70Nmq2jJE0vCaZ{MI~8Rkq$Vte{oXTq0>=r|Y~OUBZrhTH{V$4X11SQB&$cihyz?!}C^zg0zlOFUSMs`vJ*7thPF#
z9zlETo=yB4Z+vX8KDk&Tp*W-L#a19?h(p?R(T2B7b16Zt(4I
zErTImY0s8+ioh?L&0WneVEuw5cM7tH}{
z*HvRxs1Wjj0%pBsxz9dq#5(m2LcSsGQkQ9KdjICa?|7~!Jf&?D2BYfOsZ0*EPGYC@
zkI1Rr27)UR5~k-w97P@BD}j8CMKcogg70Q(+-!7)^asa<;!X5EKb{MLvO7E+%&`cu
zpKKOk`s)Nrbq;{!$YB
zUWSnB7xwD+Y5-J|DJ`waL;KaRF*+Lijkq~o3F12%vT7qDPs?Z^nok)}{TAGOh(L=(
ztTmml(@hPg%XVS*4fdM3c2!hn-~rzZ+~p(SK*BS%=Hgx~T=AyC&iwvF(11eixynCo
zl+JLCE|1aq{8e7-yI^uObEF7L+?4UAn>mO}%+$}ia)7Bysoy&s?>Z{z@!}wB`jumT
zr0@Ejq+h(TNzY=I8CtP4{0Z$SjxJb=kr&=T3K1X=57?q2dT7r8|O`$FFlem!wbp$-ku?
zT+F_?+)t99sRIfDPFxZ(rcc%{H^Jd|dA@P)*yp*a;o-jrt#C~$`C&3w`IW129NcgY
zOXZqym+#$Q;Wxv@g-H-W;N0RcH=%t#rb0ek!aYU!35ykU08kkN=OYO2MhiBa9`R~hey?leCL%wq-III
zIY}kpAdipH;c-{2{=$yL9j%@2ja$_W>%F%ME3)sNp6*Ri86mMn?x9
z?95K|!!&H3Cc64MW?cNZz}I?JWWTA2EPInIN(s5(^!FnYt^N6g1Zig~#eEw1^Vg4y
z>NZ*$Z3ULl39oG{?n+H+z2(PgI4=}w>xa4X{PMIoa@~8DWmiq}@n3KQ!{%y312RUk
zFAvCH~y&O-^-Z;%33hC
zWIWcs_FVFS_h<8`DA(%3xw(J9YA+I=YS6StsusG+vw70ZTR~42GQZEVKRn_Nby61aDe%+Y&Z_j#cON^Knw7QjUCcTbt3-4&K
z4nXAdu;{ceyaM@X43leC25zCXU8$6~SP@e!=>{Q|>!aM_?*yTz#Y%23Dr}ag3cvzO
z!*|=Q7&vJ1Lt-!Wsj%9+-U>r_>3r)dR|i+0use)L*p~R>{HER7V1`RKv0jZz$1r#tQgQ!a3%T9o|>0`bz9(YL{uOiC{6b$l|kFwQ6S6e=q4LO_dC*oWDd$JVOw2e&u@2u@0>ThEICkf7pI1PI)kf@tPd?z#6A>Hp)
zHRjzf8uqkO9=au#0D~PCG7W2*fVj<7+v7l$`=mC6&H1JZi=v}
zqMa&(UjSz(ql&Xc`{q-=^=}B4rI~Z3C(g*t9DI8Vrn8d*uPYv(IGMq?Xj^i0?ygby
zvN_M+H&!@*MRoQB*QL#J)XRVSd!&(c-r9UJL>#Q`9=q92*|Q~#7LJXMsRn1iHXQrb
z<5{WnY=CT=zVZqq+;E!c#`<8`UaoK$cO{{(bTF~rN@6#U=zDJ@YhE#q1#wrDy0OZ=m+g`p?Z6c-I+s<&l{hSyS
z=BE-aY>hbQN&cOh)pa&qq3N8GmvAW@Q@3F51R`C%F1YIS%V*c~L>3c5HXVu2J9ck!
z{ia(gC^Rw?PJDh9d$G?m8+T%l)3A)!Xk
zX2)r2Fp&8vDegCbTMuQoKe$SztG!}^{!AJ%&I?v(vtY^@-wZyoJ6I#3-p8qfXudjU
z1hHUM5ZkmutuMi5*KG2!xdY`{Pzs5Rf+A?G9ZX9Hz0;L6>e7cAcHbR3METr@INAk~
zthn+t4H&QhrJ;W$VW-MUgDq%ALTBiwlr+>P*U9NK`f4=zrMcbwRa#T68Sa)HiQXpI
zI;7)pZ%+6cnY&jQl}b%%L#3V)^vV;tZGNaB?~TYcdWqqgPfXI|u89M)^#mw938%gz
z#C!>%G&EE?i*YRq8*~zYi2W6pf=dqTS)4%y
z2?2rD#*Lef_#hr$R&HODs??KGG$Gu`87gP7>YJJiGzM(2;qI(B%C{6mf*B#X&aj}LZd%P1mR$PHa6nggHw<(Ds%xQXtI+_7@k
z5BYCy=#L-ca`w%o#3Tr}9vEkDrBogb`B43u=+fH9Gj4#0m&1s)Q0E~wK3d?*{M$Tk
ztu_p=5RKlQTb>LFcGC|(Ure>u$=7SN7Ds#^@QltTfJRFPY~qc2q;zH$cWItMkPIS+
zuQ{|lz8SYF_pRq+T->^&CSGf0{CbGy)fhHumIdhDk|dtA-VDv54mP?9@PfrhMC|L&
zTia7ja@fZqdaI=yyVx*-gRY}k|C&Qq1-#I5_>_q>5lI!lRG38LH$H7ae(fI0tufgsJHda~|
zW%<3JdIWfA%LD!PXmgBdgRo5U3r+;x%}^UP0XY
z4RL;$e$UBtwXQB~*wnG{>bP-Jtqn7gE7@E33hE!}kfQL-jue(7qC2Upw^m$aXB{0;
zIb9G2-S18)w}x)-ZOuqtcCnV7i9(EL@+04>A*;TW>9(*^C3*0q4F})YBu1gDSte!2
zeSE~S$-t8mlBJgSomO*k;9QiW|5;7oaKgcFOic
zLcohg@Lp%)Ts5Yn20jXr*z75=^QM}pG?^fH99t?MQ(8`k!-3Vt9h=btuS-^29>J4x
zonbZ8**dD=uFu)^M%P^M7H0P?s7F75l4Osk}=l7-SJ3>D$Kn2)%RKA)oxC08NdFYat_Xf
z%6%GB;{m1VwVE1+$Ty@?;@=^-`5RY*Mdp#8h0McM;v#N4)1?d2p
z+=3docA1yw6>`vBan`vD9&0u$&>RXH=yFhL|MX!BOK6ro_~CkEF~alms9k+IweYN4
zk(+6oLeWozo1fY*s^9YZ32$gC2_JXC*Gd+$QIX+k+2HVyBb==Fll{-_1fzrLv8Hc-
zI53s2kXiBqc0I+QL+GJRVU1wDGc!-R3c+mD#?>fqjCd$3Z5%HAbYp#dH!5L=&y>PZ
z{#x8mvmu!+CPPo`9AW6`#_JQ4jLWZ(iNw1AlwAje8*yAA-Pusf+!MO>bglC2$55}u
zJA9V0Vm%7Ml-BQJqfYx0@ISAh^0;o_ljQfEIP5t&{e|XSA1w&$>l3H3+gF`O5f?W1
z?L0Ao{URpA=8Vo!Bx#sScK7maMS9;jnx>HlLFbg
zAbv{&fgAGK+UO+5#gQQ`|2(z~=bcxiFD^iy92JXs58B24)$FMMPCorwZ3D#|#)}K$
zgvZ%?65u}_YT7|br9FL+UT1Gz<||&^8zS-7E0t8hatcwR=GlI>hW^v=9qszwFV9zwKylK{q+|`A
z)CtvnB&Hk-$-{4$OU5b{^Z;VIjc4XRweap`dYbrO#!AZ;sGEiG+uh9k413
zEnD`SO|Y3#KmwUTsp-hsQ^DML>v^WV!7CL!kXWh!oZ0v<_NNr9ZKan7h4IRZ1|Tlt
z0l8|i9wieO9^GX{DoE)^MQalXyUz`%2&nn
zAw`{W0xxGg_hBl
z`DAsgZRHE>eDc{Y&^J7v|B}s0l~BMQF+6e-_TL;Bu(|cWHYXH4irMk}&0326HoW>5
zE7mED>zxLA>Hhj<_vxR?zo-fF{}3Yh&2#;2kH8iDPj@j1Ew7*e^`{*j`#+Cw&I>?0
z6@|klQV%Xg|A8d?e?77}2>AIY_40=%|F5m)1B;3%+3mhIT1O({vj5n#QZSu#;4%OA
z{Q+zgr2h_=aQ*Y_{wrc3iw(5g_;q+OOzH`6-Us;X+!{jjZ9|%MabJkyGg#hIM
z%X~nk|39QE=D+>RA~5a0>>AWpcXraSCf}WF3N)$LntPu0{dSD{^K$=J0F8g%U*i9U
z@kqmj;m+3U*9v-kI1qR?XOqD4S~!+uD{SA1!TtsY`is0Jc|$~tY;ns(8@UX0d1q}Sp)bsr?c!PbKk1sVY1egZDRtI
zCH*q;teu}DCjlZisFApxA+Mik-tjSj+`DpW*H7EW92ea^;oNtgsOc_of8(Sr`k;F`
zP9IXRC^Z?cU^m;$Hful0e!MKc8Lt#nV8%s}9_*;4qd+g6IxJS-t|^)y>ti4g`-lbL
zCBJRyY9v4|b%<{Na;YN&s(D4(y@G9lT7NxuMrt^_Nlrtp_`iWC`)2k0adwXn*jCz7
z6+4N7IJdFkYD%4%)7D0Ymj)|DMBpNr{r1M7V+8}F+UXv90W|`E#Q@Ng8px7v1ok%q
z)FKa@`QEF)QzsH9
zkUjbGYlQ&NhGyJ!u(hB|IEq;b1I-fdqQLMtZv7_%1Vkg4CMw*_mz=U7oR+dTRp9X-GsCe0*>9L>u#Wnes(;KYqO=@)r*}z=S
z``#yfHW^Jen5f_LQh0|C)Dj(OV>wGnZRHiq9<(mcQlDSLz_8?cuOrM2VRQKYn5=k`
z8eYwVQIwHK=DyyIP$>;21YnfD$Y-5}SwU}S<1lcrjZ_IuuqFUBn8^9d9;u}@m5}P
z)k(qCUAK8d?phOlqjR}B_A@}xOs(G!nhg^Y0XHDW&d?m1TzmR^gS(l{!uwyDW*o>c
zgFeF&Mf-2N*G5)0S6}XE14cRNW|w#cwsr(!k6tt5$0gqo0G!Q=?6`mAoyci5#<4PY
zH4}vA3~`zZrKFoZXp$^cVE0-HW1*=BEoK-TWHc*^o#RMKTz?xzy;b!%R3=?yOA|cU
z&l6p~JHMQ7F~sk^p9ekWmQSEJuTC%q?o7_EC%#d{Ts0#aK!(;|kLq_~Lgb|b5ijYmh!2DZq2erRCd#Z}%BVF1$
zsz@LLl;iZXA*XR?^*#g((|}DP`-ix-8>4Cmsw=IND>H8LH?(8Ez|w@-j1o?BG(R!<
zDZ;M`0<_5<&rsHP=0`iT-V%T=>7@HYW;;fhE>cW+B1W@jd^sD({HDZeY2lBXulV8Ia?)^XE~J=3rI_;Vk!0h5B3Q
z($keL_0=t}o28V*H>Z}`Njl`;MP7Bv`!B{RywoLZRl7-&?kSCGo5yg-e{PrK7kOHv
zR&*aPyk=E2)A(%>_DFiDaeT|vFjdmnK*64QtnA29YOx!IWBQSN-HIgYr4@(OEs=lc
zdgByu<|9C`n{_lRZ*ON%T#8eC;P&;rrVPI5ch=?H!$1J?C0I-0dyG_tQ>=EAg({#&
zYR)0E5_VH!IJY3|$tYV200g&x*lm%dxzYp63Fu3F-tzL3{LShHbB<=UjQN%xyyiv$
z@UiUPsB$XMXY$@e0B|J*=+z?1*)m15zlv)L-RCtm6!Y13H1W-hD^vS3zi{(sP;W+2
z`7~#KVcoNZ6>0e!0HTNi3$A^DLduOuL>7^wl~9o$3;|>te4MdA+;8V@t}Kw5W#W*M
z8dRk!>*@`Pq*r(wc_#DO*;nKrP6escTd)}jFz(N}8RksUB;D{*c6!PQ9bUmX*-8p&
zVD4%|L78gms-26BH%^6?Ij3dnqjB@OESY@xJ8-ga%vGGoYQ;G?_Ca^Ig9pB1&q<02qkowc8S2yqJtH<$sEJC0!
zrF=f$)8YS?`ghnw({b^hp5q2f$lu&Wa73PVbSN+M6$74&m#{zCbaRBOP2!o4c)S?`
zmqdDWXHkZqr4rU)Zyv+hmC$uzR^TD1pmvHu*HUxf{`BVV55LdK%C2}a+k+~j{zt_t
zZR%Pg4*-qP?SeM1lbl~KK2g$%_AZa*{UEFtwx9n&~2&aT%y;`k(fvnKo
zWqX?9^PQg<*Re$`K`t#wmGZ23zc
zM*NnkkTe!BbvyPeDHhpGUa&Ay9<5sq%(nfCI{M^^RL$=4@rLuGcUO2j
zxa`B~Uv&}xE=}qm5RSf7Ape&q6(%RdE_bl-c6bp0Vt_z}QhxA)zNhaxP()w@s~HJZ
zmqzx0SUFpar?VFJix)~v6F!5wC=h8GF>VIS$sSZP!;rjrdAj-wIY&xwZi?>Gf
z1`gWdGR=#AO8p87O?BnL;+>n2`qaW>@bPdi7j*J{$_(4o`tqNoD}A?gCT`Qnk+nXl
z1Q)6xZ;=RiJPNbQ_h*(RSd*`LuST+Kx#5`^&zF&K-r8YBcKmqp$O8(F_Vw<%U=8!Z
z4d#ZN7^K_xiQNrB)2OHkBQxFB&x3(I_buKA=!fLWz3!DN3oUp^%f|dI5Mn_!TIr$%
z@9xPu^hX&GEGX31@=MRgoa2~vdKXPcjHP`c^gcxp76sGZeZw(B!%d}i48PW$&)VFS
zP?RE*r}Vj#azJwYom>!Ckrr`lMbMZ$D<JlNr=lfN0Fb8v(I>^<))X6xHr|N1rqT
zeoxjPds`{;j&A2aea-J`xAe-v0X3(ovJIC^QkAVYl9*dhB+T%Ch
z^$NuA*KAvcf$0#w4BA91pJ8^fAPa3Y5=LN!HEf8Ks+{cOxfWk$a~99jUIC9M&GXRj
zI`2tEocaE$9VNMC^H-fPc8J%^ML#A9jM)kxzI(gK29I#!e$gX
z$atSjSwd2a5sefHPznvNI69~Ne7SGc^kkwuE*r^b(WU-Q?
z9R-^_SAGj2>;=E4DmOC_Vf5e`iw{E;XrC;%tpgT(nJ$iySwY1fDItR$D_f}F=de_X
zyb*+_GxrwIh{A2yXFG;7TM?dU8(C)+v?RV-^6LRbxR*&w$$~;YHDMx
z{p}eN(@GSH-BU(jxkqD-2za~gQW0@|y^(Z=ZD!A`D}G9zIXE5MAiG`C$}7Hd+r9xbmR&9hE{<
zI9L_8ea?tklYm@YcC^Qvn+dRN3W5*>gBpz>JIazD4=`Y`mPDT$r87DrYulqBW??x}
zpRxT4RH(j7i{#}?AmWs~#a?Dmz->i0E1J?dSqU2OOfP7CR*H$}qbYC8lorem@5bJ%
z$k%~?{Vfo2xJ6NKU6P~Y^M3lIlk$OphoO|%dINr#zP?_BY4q+poZr6jy?G46+yWFTJw^YKHq_vnZcuO<+2ZaG=`N|QkAq1<;-SDh(+_XXa*
zmuLE=Lk8|11b%*Zn=NsJMR(cZ%JhYTBm4Z@9qO<=>Vng;nZ&|;?Z1OW&cTRn|H>ye
zaU6y?g~kte!ax;v{?(=>ZI0E23*h)^(5(?B1?4!&{S;cIt;VbKxd3)#Cb>{xGBJ3D
zjN$TF;a$v7Uw~rdg(kP3pWnN(QIeZdH1THYI)OwV34x!*3ZFQ_G#GwzrT_*>00FKb
zmF+-T5NtO06s(Zn4a}%Ye7)rChi(}_&lpT#Mx{KT)nm`iV0q!LD6MGFRi15-x-?=2
zAOWNiLkGPGwebk5oZm11c*1@gH2Sbw{|Eq1Enf9{kQ8lJOV@qf!QcL9{);0v!iN26Ux;B_X
zY8y(JNxDfFd64ScLACPLs)%euHwrEu1gu*-tI63LJ4kZP!G~f0@4@7YU~h*>nj^Nd
zl@UV7ns?RkXOjH}hWMvxlZ$%w`%tAImq0Wsje%
zv(8_NgYR4__5zC}Utka&T2a^qep&A{+CAFYT8VJ=p1%j=Q$d&MmWR)Pncckc`MuR=
z`w7v9KL_igxR{$E!s$3NhQoMdVrDMWd*nmn6Zg99W#NBui9Jsb)OgJ|?@>77tB+?w
zlxH?W&jg#!?$H5QZFMlqJI(HBE(BA%)AJ>msgX}19yGAnba`e!U)YY6zTkY|j(j=B
zLBfStAsS^xWlvLVrMWyYFrfl1PerBG7Xj&hxmh0UQSYY^70y;`qJ~EM7I6p=rQBwM
z$;TceTr*pJr~c6jNc-+$E4R>5`w(F#bMZ2^?^uaVhFU+5yIjimBsIghcEE%
z&-Vye%4%M_0&aiC3#JY&oC$zQi)fPv{LH6wB(dJCUL+ud`+2{XSzytLd6tIB`orty
zMYX(G^4Uc3#Q*lv>(Ye!G1d)I&YujW#u75*B(2T2(AMdAnD9$;6tIBU^TlRe|HwsK
zLbx4DNqeZ&-R)E4&3~)7LV54@GkAhSDay$MZDrJqHBXON{V9gB!2@-D#Q&N9!x=ty
zxZHsOkcgA~KL3TB=WX`CMkTQwE67Y4{V9;V#pALf7nLGH^$YuALM%#Yt!=f_^kYcr^en)OjRPvCMI)
zF1To$i)li#vhU@zEN_4QMu~DzGu@sDx0~bz#8<4vTlSsu=|YJ19_zG;z(l#C!Dnks
zjVj>2Nu0Dwv1wq9l~<}=@PXi>2<}gQzn)NosFynSNf?vdW;#oj@o*?WLr;_&a_Ea7
z8e?_wZ9kVnR~jx3_%C{^4Go3G8+EjwJm|0#OnEOi#fd*wKi+WLCLZ+SB03bufu>?I
zX$cZ^O^}HFTR>=T{B`?f!zE&S1RU&csFBn!_Q$^cr8vP;8KH#+YtYzM66zM1Ur28Z
zNOQQl&I6omuDs=Y$_u_NZ>W5{?kmT1zoOA6^`(RgYyA{7xqk%|j~m=F_Um)KE6SQo
zB*LpcPI{cF&K7wUoIXMf9I16xDS5f!JQ7tMA7x`i?w;Y9pXJb7lg7FPtNOTy
zZcmIagM6!)sxdu`Wsh9b;^RRI3IFjx-m0(1NieCN2sD@=T)QNUD)5SNZYUsvecgUBqAs-DmeBKA>0?Q$z
zc>QP`ba9UK(Ze&V5Emp~^0w_CQ*ce-Et>hU|Fq7g28Gq_W;=rMGPaV{Kih;7ECaE^sh_h2Y#%bZ8
zjqwnDBanb5*F<&s-IYfOu!gx3Ba5>6`TINwgS8v(p}kdwr>0h#=AIWvb?K*`dTKG*1m=pqPbIVnZb
z%__-CG>Q7@V*nJ-5yrJ92FResW?m%EV5{OXU&1#*mE5I|pE@o2v##CMK^)F%DkBF}
z0Mo%~5?29x{Jwzjo|PF3B7SGAX$@fv*4-t?roM#U*OOPeNKNUI_<)9>kRCU@@KrkSY$qLXCmc5>
zM4P?>=IShGA`-;Vs|U?|qcJHh`^XvNDNfp*{u1r9V6Z>mfzcB?TI%DbuvDzQ?b4b|
z$u4W2qz~7eSsAIoYdc0TyC-Jl1zwl&e)NBPuoT+Q|5f&Bl2O}=X=c2;Ncv?#q?f|T
z2B_=fYlX3O-WFcdiB>!YvQ}z}$TBkT4~>%~jbxoQq3w1t4}a0m{<~|>(cr%|u(U4Z|GpqtZL4!>a2=|xi0
zwF*$#Meon8sVj}W@!w=1h|_THnp&YhByVmq11%4C-*#ud#7IufcyIZJYbe;^z`RH`
zv=fPvN!rCQ9RRmK=Xy)*)ECm%1*yJc40OyD^RgKh~^`U5omli3bgA;I6HeBov6hjGm)Q2=(R^?x2
z7Qy+H?26YE;}tf%>roKsZQfSDv^OsPjr_U8?%pm$=Vh7`pd|d6>ai)(5!8S9GBrj^i*eA{VR=p3Gg
z^b&$zErQ)x1^YzAjOQ@KZtOtTOTmhtG77laY^hcR0i(~tfq}0biv6hDDgExKkw6j)
zh_I}|2Q`&}Pr;LrE&jK+#_J4+;ZqrGK)Qnk+82?D0^LS(Mk!u%1xbEbqXv~wQWoP*
z?8N6j8tUTDTU{0?cCFC`k}pYic}WG`y0HM#*Xn7ev{8Ua5qp}T!qA+2w_94LksBnt
z2TdI;E}3VbhF5{k^Z(O>e5mOPHa*-OG7osLRlXWW6_b!Us^(`~?c0D#!<^B4pQ
zSf8mv1nEk)eL8e0i&C;tqw8-6qC&A;QTLWQDG6&h6a?gBA&}(-d!ab&QEvGKU|E%+
zGs+mh{eH1XC=o}F;i8V#6v+#pPXE&4F}Y1PfG|`C_-#V=NI=LWZ(kQ3I36b14lFlv
zuGcOtWzifn(IRq~|0=THJ3j9Q1FI1d75FS~s(o7!2AYZir5Xvj5bHnGYftC@*LrQ{
zj=$=)|FOBKNe1W3-!yMPGlqw$A@hS^e*{%-=H_aD#v=_@9=%
z=P-W|)%^33fq{XBe{~8R
z{fF~_EH=nbV677n+01)@zYsp8=>o6I9~b)j&l+UlG|kylQIU~91K(AsXc2%EzQ6s*
z|Bji#AK+15`+PjW4B0;4?ZBi=K|m_C;+mOgU$vdQN&$3+ar31Hv~NJ4HyEJ0-aPR?
zkre+#d;WHoIr0Cz&;JW_#DCaD)NkVn;66buOa&h;H>X^T^6<=-pkvnEA8E@$7Q=Xu
z%>wggg2lg$5JXKn|Mkhp_x5&wxZ5bx<({ohFyDe9=dmOI-KuGVFp8ZEADFv{VZX=2rPkBNEZj%)
z6%QZKGZfBz93yzHwd84~nr|Oneu8_ctdAT;!h=VQQKk9!Q!Z2y%t){uguzy+-|+O&
z05)V7qH=ts`{`B@l{Gr(qaj-if*~e-|0Y;FJlY(oih-0?kJh-%`RZ{Ne8Y~}N^uVt
zNT6}r@8@!vd;7ThInQ!&sP};f)TfWQ(2slhg>8bxq81{0{uOWnIizkK8R)bhjQHub
zUNyGBdk!0dI_yv)IQ$@tBiU?jicE4`8u6{v!{Pv9RkFnqppffa)l2lZ@*O?NK18=@
z0=PjD?-AcRz3dFpJyI5diIBzro*?~?%IPn{jj7!zBC^iObw`4Uj!RhAvyfzew*zB_
zN{}X6FRP2W1A4VBrnsV_{W(gJX|%UIN!9=(s2xh_;#upv!v(-xL=_EmyVAY}FrO_p
zVUV+@_N|KAI-e)lCOt@BZLHl5d1}K$HV8o{{PD?Q-+HuGvaSH#2C~GJ`pJ0!`;f#E
z)Ypg+kd=j2QS%BWROGTk#VQ1>6@cYhEZ=-Z5VdnTb=?OrK`DdC*jRxcIYFz6L$vUvgHjyg`+!i
z$T;Fqf@~_{IOx}^6nl%8yv%p5-g?DAC%u!s!bLlUTLxoztQ0}bkC(30dc{n&PayLK
zH+?ig1V^y&t6My3BF{Z)peI48gZS{-@bXv}92n5Iz0fgL7S!m9=N*%cc;{h1*@IV#
z2$CnAzXVVKx3@)ETc*1_OMtM_iYAx|4)n4U`9erRk*1OY4x7kSU}4rHH?G%V}g||1!Ik>5sv95HFf6BKfpN^T-MF+;XJaJ$aHZPsb5g
z)4tYgvH-oj0zX$JL!1if2m;Tm5Xun4)y?>o0BpJsu%dNu5@^24K
z@IR`aHEnHEmM|&rmAOti+VjvU{}SLlN5Aimq$pRR={6FNy6`yM#>*MCp>K@M^
zozc24gdg8|7s+WrA;l3
zJZ*NKNuEP_=p2v-&yc427j|Evj6fj9$*QY^bLB@>GsJYwpBa=ixPIJ7@b6)nO_=o<
zdOk72OperbXEs7XBK;;KdIl!nW+CkMH>JONG
zg(6x0;*l?|(jA;J<=OfMusOYf5NW!@Qe0dpUw%+NONZw&M0I<%-wQ^MdPX<(&@8bZ0Hvkgd*;ZuGngcRn#LQB4-Lu_yRmgOo#mhNQEA
zcD>vojpuCo{;4RV=J5jO`No_Q+cUcVd|m?oiy~m*ID_pO`dk%Qc`<_=<3SejRHsz&
zGxP84>CyW`63bu8=w$~36%QvH$+S~pYlG^Q3306&LxcEn($3_;iN^^SF`n2G8Rd{$
zYEI^7)sN4egacFr`v+W@AKl6o^N-}Yx(K7-_r|0=Wn_L&HAmWye^j@
z0Vss3ug+HvsDk1*aCgtA*jS4qtR*$`4NRZN>~4Iu#_*XnBR4}ofpTmHGFo(~ABjJBcn*Pe>hKCudEH!`B?^z+Ja8F}ura0^hM!#A
z4QS_IvVKzwj5O1PQwC-FR%8$b-DawYj*dhHc@QR
z@fo>F&AaD?VlrxX-RGO8dEI%pV=L#3ILMsvYx7vN4auU5v+Cq5L3ZfHJtMD{S+5!h
zcmmb=mXD9$J(gp0WE{A*!oLa&3v+UE`Uo^KR^Udde@rE%;|g+Z87!naX=Qa8Dt{!4IF-FFo^aJ!YK;GiUtzA&2Q;dS
zFB~S?cmwygV;sVCrZ!GVk7h}tXPD?{qi2-N5pw
z+2K*2)L~<~^Bi|fmw)dwJai6D$NlDJV5|FpQAH22?$ZI|P@bWdJ@@CwmA~NQTy4y%%5fe{zmubu{C-IPp
zO+NKU7)oD8B|+;=b@_y6B~a0d>MLpQ%!grof6}|FbfKs0ilPvy?j$;
zc!{G)fwL*o9$WXPu==PzV;ymvUwp$}xFruaT$8Njd2VofDHoM)?B1)VB82&ZD2C@<
z4%#pWcq?ARTUP^&@@6U$*U#@&!RFNtU+mlI##$sdi0n$pz?Nm;8ohuN^
zuVqHnnIH^G)z$5?hT~u^`H=s3(841RK(4?SGzQXR7JAf`IO(8AjL5os5cQ)usT2=7
zKbVBP*=YXw)`=!rKyCFg0(NMGPWJ@vfl60VSQ4@1d0ycmk5lgAo|3g135OsmYLRVNoUX_{67uN?g_p^7L0NjdvcfZGx0B;9$vR8L7DO~q(*d8I%_m!&-
zK+*}(B5a
zeH&jJ6??|JCudf=yIXO#=6Ze3IAIym(<}3)wk1*Gv5AIy6o41E-oylmW=ib=;d$Tn5p9;e5yF|)6keGcURZVkfbILpZ$KU
zcCv-v@u=)tSITqGPAiaGeevrnggZP}cD72iiBw>$+o|YH%Y~Ip#*U=IJ5{~A(M4jy
zM^`iF7le!f?-lXx#$loM4WvpNF)SvdFdy2m*zX7X
zR4(vi!_qp7$l@zcTLe(>l?A6QJ5KY0sycLC(0zVNOLKY&9w=4r0{`b(9XfQ$Dt@f(
zYKnWXY({7!6Td;Y!xvwwB{;;?_KSsree=ir2x|e_KK%*C@TY#J~7+3Pc
zM~;)6vc#W=(9yTIq@b?on#x%Gqwl31^2i_98@UXVSmhQVR_fz^G&UAVOg)~d)Ot+%
z8gDaHO|yr0TE<5u)kGF5G09wyQz^IP-3a~pnr2I{d*IEh5H$Gg+%DAHqm0Q9>+EnG
zxqQt$lzRBu5s|3Xty8rkthaM4y!ptHD1vHm!un)UUS;+DO`==ClcUqlEtu{)(HS&l
zNt`4uu*?O%0L%Ue0a||G(~|dned$dlQ_cV||`MN^B6!a*7*NkzTkj&OgJ}~`cc!J@T
zLwzKU>Ux_mc>`v+_zC%I*1m{_(nAg=O8)(t+ta%btXE0fv?Rh
zXNC^AHpo%#@`jAnK2Jz-eea7fTU?<4dIL^r1yTFn;S;0Lu~)Yqy>~aQog!L7;BE$W
ztka58TNzaRgD{OH`&V|9h_dE~2M4!!cahPCZB)TnpL<4Z15>>>B5zi=UU#$NXOG-O
zYY;~9h
zuTT`&Mm6#KxZ*vWc_o7PKrze{s@H*DnA34rg3JroiW9
z>KZPil*V6PK@E;+n&G;sC9ZU|g!E_)Ms!IXnpa{t=vWV2NSLAdXY1=7=?g_fj&Se2
z5l@nlJH#;`7;;meZ>N1_ywRvyJP);v@ccobXb6ah0!iU=6}5F)S;FZZ*g5!3Gtiu#
zPO)LEypzLSf8k6t&UniHa-X;Tp0~({@D^v&?*HNKt)r@Hzr|l#q!9$kO(W9X8%2-?
z1*Ch^-JK#J(x7xocS*yhOG>)CBsbl07y7>EeBTq}o;%L{{nj6AxQB~X&wS>b&zzq*
zSBx-eb$7y=kj_XQv97i%FvHmRHF(D6UJo#4qv;Z8qqHTKv>RMo_>ejnY
zvcB0X{W;Pn0#3a}cyZS7?n;7JK8@?_$IIM%yFvHmULsw(TU-mt9sAhM(cn=~V)3p<
zoY;wz@wd@xeg1W>ebH`#3!Q0+GIYHUb(0i0n+~4MCM%@WER?0>-6P|f77<>bez>Q7
z+kAq0_+e&5E4DiT?+LlW#%y&68s({*R>84k0zuna5=2V6kLG+_$l2?Ffh`%wA!4dO
zpe}ES^Y&U+_qmI%EH7<^c_#Hot|IcjSVxs+qeiVMk<2&YTs(e)X9chKRiAkbwnn^z
zcJZ{>$sal5F4<-0zepR$&Z;=tB6V&&27^Eroi;1@R6b*~V&*N}@Fxk>I|Jni_6y%z
zFZc*RCEZe^qU{~5j>oY4`n`DFw92}6h-_Qt|x&B}JJ
zQ=DCB6H0pY3su_SSuCYXm)Elg`5i+%t>vCPmMql@dkYhya_U$^P<6!B)!K7#xpzf4AfB)xL_+9dt+DU0aa0b!Z!6SIk!>@WMU>nOoV
zoR>soZxo#w#zBP94VDCcc-cSO4oPIZ&14IIADu&2!1LWVBQte|-~
zL&)rlLwzjaCg&3MURPD}I`P(;eb$9vM{cTU=9jRm2<%}IRPK;XB&)c=1S*1VmfQm*
zq=m}Zc^{T;_q@dI*Xv*QS&h!y^}A?U`T@P({Emz=s|p?U7-cp|b81pX()y*K=9|Gl
z1WTRP4&g7IIlFR(#k!ppi!}DI@8C9L)$m{Ja
zqR3Y*u20WGR`T}IKR-+vVAAx8%6Eq`=VM1htsgnP3NBJ>{AS0pQBf9B
z0aI+3fS|+I5M5n?aGO&d=_?^<+{;airtYB4bLZNn03TC?tmNP9nw_BYw
zi>d~Psd0?{!E9SO1j<#fdet)y&qF>N!c(`q*h_g@>o;)U$#Q1K$-;%d+Zw~sHfv%v
zDu|QFHbUUpDWQbq&+Fl;C)2W*p2ZupQgn58YB#f=p%+kpu&#L+5bAA?OUpmw(F3;}
zRCk;c%SIP&wtk>nUE#hZa8w9%<0-iutc~cPu;Hr@a*|P$^5IA-9xA2wvLhba$JX1%
zw^^kc+x7Z9KKsIynEYklYE?VJ?UF5cCzYvXgNwxd((QbauGa&9Y|#Ttl1nN!zC5-5$XLMlYc;)1XWyycIlqI_CR*k=eb=fI4a{e
zd~&%?f-n9}8)UO;p09VCTi%~aKcFOt1vAPwimp(mS6NKWt_Z$g+8pcrDxv9S%*(pDVFb3FD!#$U_##y9
z*2Pp%9X9rAsY!^hGOmG#J~07x>}HSRvlB?RBAgG)`bI$#ww~e%!3yjUxdV`iWs*&X;!~+$Y)W@_CR`sY+Xx7zb~O
zLDXs$;T=y*(U*Kz(q0oMXZDox+!haY0-LtV@(;xk+0pf$WV81Mnb2llzXvtj6Ups0
zs4AO07JmMIPxiCv<`vOi7v+tfxu8C84X1&jV2;IX9d-WZGZ*aIJ;FUCPi)W9K(lha
ztGLuv!zINF$u;|veFFm!>$g_7&y~g`ZXqkvwy4k2MBV3*#UXI4#>_ImU6r8l%v6Y&1yix#h>1=EyIU3h4)z;uTKKT~Z*1_s=ox+;_j%h9B*HBwu
zDuRY*3KpF2#YrSJOt6)v#}!Y^Dc@sum1E));3MeO=TLXpQ$qaGuX{h8!xtGYa7S4A
za57C=Qp4`lV&f1pwhTMYS1)q-$rgjq%bh%ejm>2odgrFyKC*lec^tAiTlm!IKG}^`
zquxiwcSD2GErQ87$-yY4dCx%8%qk-PI1#(4on#!&_~tg*L*RuDx03Q$bL1$X49?TH
zFXiwB;xKHiTB1)K2%2}sjJb>(h2c~*B&8x8hdaGeo1H;>OZk(`Q;F^i+Eez9*rhkw
zdp7&W(-mWh+>DnbVE)tr
zEaAMD-R3-3_hk+g2rE|dJJ0g8LU2WTt>H0Y6G858f~Ix9zDn9EqqD0+dfg
z+R4@c2mc@>zoiucEyB6#QNLE}=uTV|-@r$STm3UDO}UwNg>qPO>+|0pFpJ;hs~<6Y
zMS~Wbgp4zXO*4E5jg9*I?R!Nt3xod6l|f&$5}YLjWq5r$A*?=~y8*gH4Mx67fZSBz
zN&ZNrnTqaMOUG%;4Zs6iaco3Bk8K`>1LGFmwkwKW;bp}~cKAR!KzMqy(PFNj5_(75
z_DXRLjF0A%%D-RXlpE8N*c@&_izFzdW_673kd`FL88}7R6r7nz$VDYDgg6!*Fhpz?;^Q|OkKu~>qjIxy1Vt4_LH-P%60he^bTDR?6P^b
zX=qbS+$fQ1L;TS3WbrN6R~wVct5#*jxU8r%pr3QB^!BPC-%U2Fl24^?emmJ}kOJ);
z7T)*i9v`YOry(1oomk5WW(DeNS1xuXjGg9CH~CjRkrQhpS
zlvVN%YroET+}Fqdh5JLU2V*ha?w5T4UJb`b-vy^IGI3H&~$I|k$
z&%_Wfke(d&4d6H3b38U)eX==3FXaCX+qv>A4j1XlW7zgv>84D7TPRMQU^*1nGk~1)Dc*`ZmS>d
zv^%#LQpbA8D-rqKn|B>%hy(b8U#+_*g9cYMVHQ63JhzRrxb@Hdn&*O@V8+pHh=+%!
z`$8zEP(QJ`82SB$6dL{^=d*UC5tE=L*eG{(^+H^vrs7oNxff6OS<%_r(}vWPzHjVq
zuNW;{d>&h1Gf>MP?-Ov)S<-+X&RLTF3SGcw5&gR_lT
zjCtnOFcT3aPtCG@eB*@nneMO}@CHshGwV{Xl+GmZyk(Plf@l*g)Y+yRW~tz7a?6=0
zF=AZsg0S(Gv{a<}&R5jVY}jSl=EhysNyvYE2o>)@x@rwZ`+QF5PzLs$jVm#3#cY~*
zF5vV2@4RbjK;_CXGMo@EZY%*r_9$ydefh@G)qvnT~jksycG9G
za3pNEZ4fkqTHTGo-_r}$g!OPYrd2H9^7l85InkHK0wX->)^qA
zU>`AfyIAs>*WUDVZA3lfmr};BfKqtSm3L9xS&%1rvwjgX;xW78c@A;J_M_T{T4z`)z2FDI<84Hq_7qR8N)S}D^lrLFCfK$6
zRd(m!tg$(nv0~H&AhY~#$bwu_a=3tN8;-gv1ZFdAeZq|
zT^G1x$IDef==55W)eq9@CiTZanzLH;i);lh*hZow^KWD>d9XoV9j!?9=01&O<@l
z6+QOOYev3d_NMErk|kW<#jNLTT^z>
zW)k$g?`8TAi9Z3v-+Ox<#R0sFyHAW^0}sFLqo5)y(uxCU&w5*cxWb=V@_T=>Esc$>
zB8R33C-o5LdU6H)jXF2OCq`kRP;Dy2@COeY6Hf|xTv^rP2lwAE4Y#f_zT{-1ZigNh
zxQQ;7-w*MUeHGg0Fsabym|asYWBAhk&z$jSDAz_Qi=kJr->aGgs@}xJ3~@$phJbkx
z^Y?79=g%`2YEP3NJ6g^kCAAWeQPNDZ`+a+Sht4i0jpw!{AmQk=@KgGO0^Baj`CtD`
zPX$DvPy<0zKs3p3aez0M=y*qnOXTjj|F8QvzZQPaEVD`Auzzk@x(~wrAGRp(cNdcX
zJP!DECB6)d%zxOn2!CJup9lY+I4O$X*_ktBG@bj)*{S_MNAclZ^-wHrY#3V-e-9ji
z>g@k-o+;{8+rU8B!^Mu%hZhz~7$#Xb0qgmDK>mG)@nmmr4}*lqb(Nw$5R+tVxxml#
zzaz5sLQ;IMOi`O;v{dDY?ajKP!UU!?otpB$34
zb#;g1s=7o;kHSi!SJLoVeW8G~{)f$9%){e49cvRf7u>-L|CI6V>e;#y1}zmd{=lB1*ph(9R`&|m2T|fe-zQf{ERw25w~Ex_s&by
zy+5VasU+Kkg_-Ouijjhf(~Ip1()Bd#`$CoUji$Of05t8ki8JP
z{rdxo2c_%$d%f%SobQg|+{YDXmQb#XB??~VK_S#xj>T?Ncv|BVExs|!!J}ORb428I
zH-`}t{-rAFZoGzPe*Ewcy68?|1=K3A*~8!2ZJ?`=3uU26a+ubViFh|QH8ssou^>+M
z#?aTD%~*sDaKOnfo~M@Q2YHS97+5~xR}g37p)U_nUsJK6`7z~1^jw-V=0u?1JtHx!
z=9x#cF^r`vxe?uXQ!&MfpF9OR-dGj=)n@Ej(%}g=t`Kvg2Q2<`T5kDdMs{j@woO#c
zCS3K5yT%Nh&>^{S?9#o{BfFZ9xNLK^Z{x10I~z}D$gxO^GwzC-21a?|MltP};_(90
z1l8=Lds#SHwlrE&3e}uEe9P4bUmc48gDLlKLt3-7i2c5B<-yAI(Bl6wQqxmKfMY@N
z?~cX$OW;i#vK==5AY2^Q#vYu?a}^9a334c_&0CmO{yMZBG@@2x1q!lAIyl=?n@E~)
zCyC{)P_N@l*#nJ}5F#g*-B5;o=hmE7eJ>GGbG$QQ>uLiwsyd4z58OK<+Y6)jZOF}-
zkBeGp%Fg<0NY`c+$6epFK4|bLnWCkOTxISD2MB|&z>^C#|7f@+`-jBQsp5Jj7&H2Z
zgp!huUaMhlI1pCUy(d#>g7|}&^^~@iJ$^kh<^P`YRRNY+GLW<+NLZm~%my8-ORFX@
z$Ff|Wj{A{kacpe6|
z#w5#4bK4bZ5czMDY75OHQ&MqoZQZa^ah&{`V_l40RK1OvwCw)djPLV=t?D8+PZrBP
zOoX?9qW}*$o$>5RnZ3dYT(i$T4akRM2I2>d`!{gfRtYNi{83gvIKx<~BV-s6h|3z|
zMO`DkrZ1SQ_aRAE_)WF(Q)V$_J3J7qMA?(M^`_EAspBXr`%#0(usI%1%H?tzE2g%0
z%AIap)!(!Ty7`+_B=L_^d)nQ9@J(b?!S}_k_a|*N%JP->)~UH8g(a0YHTh-L+KOUa
zSM=SHr3VEzuSfCoCLZ9do)H3HQ4e>nja;3b@pUqxJ2pO>%W3?)paaKeq_3&L1l$Dx
zpo6O)FMaviNWX!*{+%^ul^{mbFtesJF#^AWlD~TPP9c>)V!c7|Eb~)m_*vHE9&ymX}>Y&FzZZuRcxDtf#A8Do82Z`Vqr*+mm&G^GNty;RWqCN({?S#MK`dv
z9yTvc47W(Yj-e0@u9+9pmyN=%T--)vGS<9E5oM~hXz}&F{SvEeb-cfm!~(Io0TI~b
zD0EMF7jcBz4(n^q34S97fbbFh(#T_ErSOm=&PR`tl9#9BKYgVFu1l#H(g^jx{wbopcA5pQ58r#V9g5gcSZX+QTqxzyayaulkC?NRXSbAn4ERfbxsXUF!ZIj`
zfGdI||25K16Q6+<=xKxse8~4|b#++Qs*{CdKUMZd=6x4!GMCcn%22Sbs$?;>r!a-1oE!ivcmJ0UNNFtzVkac!-Z#Q&T
z4U}k6j4ynSlE#N^W0iieoD?km!%$Ux(&F;_^m8{e{_@eyDAm12xSwx3j(U1KL{cSs
zaY=X1y9?_Gb{gC~e|e_l0s-3w(NFb0oZ?SUFPw`&*U()~@|+12G!T}_5m{d^bMZHV
zZm5g358AuQ&KQfi9P2AEG=nqqjR6
zkvVPlFlmQo%B8r_{5E~GSuk~uXMjt@v6k}xih^D7y1pp%`?!f#PF|2xU!T~fo3!gi
zevn*AVQE1yv{}rnR2H_==yE87_$Z7}Pijrns#FE)nkK
zVtGF^dy_FPx}n|2u>DTIH=}gbBj?k)h^DMQbIP?PvlAG)H77JvE9&bdIFpY0sE!9SpBw$;O9G-|!dZw4C
zGU!Fxvno1aRgSzwyb+EVp%n9WOljd&HT^R`?TVxsytvxTvavu=$*wfIs5@zf3G-{a
zt;C!?G*M%dgCp<4QFhi&b7n)cJcurjhJfnR!#qfTp!!+hRBnTghmU}EsgBG->9@T!
zKLGAou*j}&+~HI9DU12idv+_Im?kr%@O|EIlXL1k(@}?|N>N)|?v$4USqI$&3U|*{x)Ks{5Em+Z)c#v8jSrit2dSgtKpxzhnjXAY&
zA&g9qflkIqu`}&87KTcF7@XUCvnTolCPQ-w1(F#zE;IrPo!-IRRa@Q756;BotHnw~
zwzn6OhCPMZpRcDA1k)mMYADO)a=B9OCJ|(Gk4B)PJtBKqwnlEZ%#wiUi;Krg&
z;(QM8X}nqC;l)Mw8lEJLB(~!C>*WR91pP!sAHT1IF`-3T83MT9UH>;1Rr+$(h|1kd
zTGe#5gNMAYy7dD8j*~Z-ZS!Xs0EU^7ND*Ow0e4P(4#y$^K}}fS0UgtSl@tiVk?SD6
zp~c~b7H-8TUazV7)pS3Z@`4;-rwlP|h6vJEmmA9<;)q`PG|7%&k{LOXHps?L<>S>2P
zK^3(*FRHR=AKf>&IH!(7=WQ0E2qdZ!h?tfh`l_l=(;{ScM}v_&aY@E*2hNYM^}UOP
z=O#e^hOq_y5QD?psARVQ<3ccnb^NlJDjtO<%82Z8la3XR$Qgb?09W>SL70c}Fm^y>
zT_cmu9E;uLOBxNc?}am8pcR+jm{>o-y%Vm!*>TuqJ|jhf;>o77zT_T%-!i2~SBH^T
z$0qRX*fhN$F+A~oj(W%^5^Oo6=`7(JeB=ptmaMYIKZ}nZUv8dg4?Efl^+dV5&1WMh7YtxdL)gz&XaZ+Dm2zLz0o6eP5i
z<+ma{+5_Z5?2|^kSUTmD!KV0_@MBl_flk`m@NS?jzg3_f=1U7)w^A-deI~h(3e_>Y
z^*|Z|uZ5O{BU^
zjW^#g*`g|-HRq+gcQo^aGFWwuZ)+R+M8WEZNQf;{}rW$=fzU;~Ew2OISx#
zwxqbKx7`qq3aXlL%FBUxPc=mH!=kVm%upJ?#p4?83dN!3oDj@y
z)%NWxBc@2?uQnMHO1%<}D4e$JB8ATBtmj>F8lWltR$a(3
zB7T6@yg>eK>-BN|3+aW0u0V;{N+JH(1GD9i{XM(1VlDs_um>`wDmmwMc7`PWx_GIo
z+<^MU4HwtBgt?i|D2a>^FD6FHd+C*m4NH1OZ(P~6O4*V`xFFR~wN5>nI6!`a*_81gY%a(Juy9+s8OoN*LRYM;Tjiee}Y^5*jB}FPucNd
zA6D<#&YbgwQLYyqS8aR(Vs-A=4|WOPmM?~lh}7F%WtFR<2n0kJ_7FL|MtFQ)>n#?_
z8BR_dI_l=c=}zPIh8dyjWZvgAZ=sN!-%%OgzH_m8sOo%eHR65>3R+Dmtcj1Ax^0yo
zF}ZqO`Bi%-`V(|e)^Uc1DL1(b{z9z$@Ro{a5$}Vl1v4!xGd+*c$Abn=2BDR!`)^Ry
zRB8e4)==t#m6^J5*tzTl(JYilM>ssYP-2q&8t3aNSbPY8ACarF>zE?Y>1l*g`)jNS
zpNeuIpOjBx(i$${iuPa*_cw_650q|1otFsD7+v}4UqR7%q91T1|L~UyGIX+D&k)~B<0eT9lFhZ8?zr|j(1o-h=P2Ic
zHTw8Cw{Jp=-KjG4l!-Ae%UOj}+Q(kdD2;#7d`wi`8(hsJ0tA|a-(%B6yXsP*`y}<|
zoXW%xZw`F;tplBHZd=0^SxMg9!GaEzx7mt}x^U0lj$G+tubvKAua%H?oncycQNmez
zPlYjC%uQW}k|Z>fc1^Q}QX5hLp^w1iNRNpM=dc}#Htf*M0Eooidz;2L93xXM;JZx+
z;!rD;@%LvMT&lHbud0Yhu6TBO6{XM5Z+lWK@`N}ALbG-i3q)9GT&9^2gwwZAF;;SO
z%lcdiIuF`9+nN+`)33JzKI*O_eLNZM=r1y+(bFXI`Bzv9-gj+L9(!1-#QtL9
zVT3egP5Kvm3!v&iMfyBWvDWO4pONl#k1H+B;!U}a^7P^;V<_2TXjunS#RSl0s)y>q
z%h=Ctz2HY9cOvSmweZFJO3D?u>+Vnxy{+UsEmq3;*!g25xklgTS%mcR(0kXW5x&3T
z2!^(N?^>icx7&T5w_nH78Wz{b>8ZNxJL1Vsg)O{97PJq^cYvZy7(j+L$Qyhi$e^$Q3%jAifBesA};JprJJ9y@IPC4DZL3(n!
z6Nc}t)id%oJtNYH^`rvK>La=`I<2lXxB;lTuz{VUJSRsNONCzGll9eV*g;q`?b^%w
zH$SH)y($k9
z{~Jdt*@&FtkN4eB<>;*%iXYF7%u8EF=CW}M*KUGNx&4Dja=XB3Vw(mJOyr{ol&mH%
zo=rxd2Wi8Jb;!d!+_^K4g`sh6pHRQG!%I&!uCEk6z3i8zoyK@8^O|)X-@@V=>*8el
z#LR;>&xZ)caRKoX9(re1fX$ue6xdVmic3a<>1za`6K^6rP(iOtDu4q$F=z{IihDXb
zWB?SJ)Vowno9}s8e~-<)yyTMvs^1XGp#qWjH!*Umvq7l5_vNOr2$VeYu$p%^%66k>i6(c=
zuN-Ws2$?AQ502+tq!CoY_mh!y0>;^eDWDl~oGK-5mTfp3!hfxk+9UroLxLX|m
zJN`&)X}+@qtRwciP)V~gtWo^%>;=cFl4L|fx+%s|XOYp?M>xz
z3h#X$;sgDO!0eCw3JnBbWV$33?#k!O5L;>+zx?$sjZgTe^-U9gki)$y?PSLZf
znk*l`0`}EC%98n^^2^9$c(bl*&VV$Nb2}k{SZ3y|j?NH|P8ND$eA6lMkrX0)B9WiD
zOqR8RsaBXvoY!Wt3qPB#D^|&RA8J051U{!}zdB2^93#o)w=C>F%e9OxWoQWuuFXDw
zGMAuSsy}qB$pF@p2nkt#;f((l{vrSc;`)#feArN!7t{HLTqng$nN|my$A}s{j!q!k
z!hJoS8_2&4Hl|JlRcs)*-*F@o~S?!Lfsi|r3J~q-5a66)z)bS`TS4Z*Px?bSq
zIW4ofQUDk|Luy{S&;76cvWn^w<|Eb&+JVAwXp6eJ0^bPX1CUf2C-uwwE8>E7D8Cya
zz-S6E{`fhaox$ZlRX0nzp4w_sB+?EqKQ7QF2^K4w?(I8Aa@NG!{Tg=^y`rwOJ4L|i
zb0e|(-K(;vctLWrkQcxFW+JO3ztQvN5VR~r{%I)N-K+ZvS$B&YQ*v=(Rgi_K($wt_
zMN!f{aWms+9hU{H+}nd-<5dtZr$td_%JHSh4%tahXFgkzsaB?QWCK%O(uU+o1-j?H&uKqCPiEP2s9D
z#5X6S9#feh5kRE;g-dzOe-Pf4u9jgQ}Xh%@>$FWVXm?EIJ%@amH8sddm%?I~Dw7_HM?
z!mEo-se6nh5VVzi2hD~t)0lm5Sj>KRJRz+Deb+wrg-~rtS$tP(TV!T1knrr?C_=5r
z%Fe8lh>?<4ld#&F8I`dev7O)GxyHoXGIQF?Zg-Z+5j6!RbXWQK_RKzO+LpH(ql3Ro
zEGIk4&5bYuKPQi1ON38}#N*LpcyOS1aj;wfWFpyKP^GjrB}t*4X5Wbw
z3qM=Mlk8ySRXTx&EDa+cJO*l|{pX8rjgb8Abhwfq;3noO)eAfh!IZfsSOXS0yem?4
z*}B(f`_Y7_+YzdjkZA!RHX+`{xFyCHaUN`7WydcFN)rFnOvuF-5kX~iyFZ8%GXKn4
zCtmF1G^jMZoZeht6zOrqUY4hc>H+Ff(BawMf!PkRCX7UM>^6zP9SC%P+dOk&KBv9g
zK#iV&<=l`Fwq~OL+`G8?>E^lPwFg_wNz57E&{R`Rj%1d;1=x2q
zYq3X1x5G$RFCB^@WV|%xZZP@i4_ApoY(T}O6&>Z+BMp+>Yv(G^yv{uRhftRv(@hHp
zb^K1qcNvwEL(D6;%w0>v>jUQ!=M~#qvIGl6fxVOdIZCHvX$84Z=wqAvU@R*y0Uzl4_Kpy|;^12KBXi;xPaU~vg?7gx*xl=;@q4Q*f
zCt%m2O8ucGy?OxD;UD<|gsHXZa30BiI^(13Lkrlw%Z*@BgY2jO;{BGx8GF>Q_
z!`x_m75*pO9(hUik%R;v`OlM;j&wya$j&@cUbeAQ20A7m*xRjn*bgviBdY
z)>w7rTBQz>t3Sz|2h1cX?CNW)Fi?xqPcgElf!q9E^CW7tu$n(+^d=82$71hBQ@a%d
zR!a&u2=ur&^SH@(o7c*u7Jlrb>PJ0INFJkAVf4507wvjon2!d@bB|Wr$GnWF_J^N#
zXhF2x7QJ;(6@Zj6?;p-ayndD|+z0bUr-tX@Xu|PrKhibUd+^yokp+7(>mvI|5__Y`oQdaLz|-QYB4i8o$fcF$@y#)x``=M=o{641W#R
z>9Wwj4>P$K4b}h)IVEPwnV6YdaAG|?BhgXg{5%m%IC)1RVl5YP`?%5kqn#x7)dDNx
zD+q*|oSYm31lC&4Y?XWnfB^I9pOo7l*6q*Uh{6^NWH$5f9Mr@4AGZH%5Rlmajx36L
zmHt2(aG7Fedo|7d7gP-@0Ll<0p%iC)FO3uY-+)bqk+u#FnGY8eGW}jtWAUTvzr%9`
z0eonIucu7E9mT2EoPxM9jpxrx{kMG-WzVxgb}&dXbk^FFWfA;8ye}d#fXBJ=1V=ur
znZMA2di`fo2IGmfwYBHzxN6tNtJT6%s3P1JN9I37`$1THC;)T_oY&3|91J`PNz%Gz
z)_*z(m}~!2lm-LgwV6Q0fAfY>;CXp@|9i^o|HEbeOMvPB2Mp-HfmOu3+;UQz3GYE*
zku#S>5)oc!7tXnJ!O+La{pU7~h%Q5nq4NOuLL_Lvh6;yAC(8y9H9ZtLf8&k6TU`8U
zbn%bg6Mx{otk;?{oJRKvQlnb}5Y@CdNgq&?ZUOj2OpFEcPiw;J{|d7APM-9NG1oHa
zV&!(71_o2_&yYjQA{ZNj7&K$5S!o6PW?N@Ho~f#eDwv}#d7I_9!AvgpmPJ0SJN81
zg9LdFI`b960Q&Mlar#en&T)^;%o$ofA*z-C5_#Pt^o^XAl%+lUATcvru2_2U21Ma^By}5{7yheS||9g~ZcH!?(8oC6j4{dewbxa~N@sE|8|X
z?t*!T=YwQD9|%bOVD3E(F%ew4XL)@W#_oCFHQNw|M^yH&>zFBzD5fQRC{}<8w!1WN
zuh|={B==pG5D&S?lPQgOwSq;?Xc-+Hm6PW@KUH{VWD4jP9G&~*
zu`}iI3pSqZt>(!vj&uZuw*U
zZ`$cqOmJ#Lvi8}%>I_*g?F^8zi3u6VpH2Y4pz9+u@h)g-vRY6nA6Ta3sb1}C+-ph7>W%PLOtosJ
z{cYYL9P*mvHtG~ajKuun9>{BZrsb5rsTz|7yulr(5i5_sA*;FzlQi0eT}M*);YPt;
zXm~)wFZW&(h!op*z#
zNkzxzWRLDPPEwsXieO&?<^qPb`vS$-VZoy2m@uPviJ2m9aZSrk-Rykb)!HvoAyk)>
z4WfPmpJf`r%cmYSQ~naGt7=u|uOa{nee;)|L2L`0wx7O3BG%J34-H!GaJA|#QUe>48J?8l+Nrt8`l7tMocej`CZfjt;>
zL@$$QKY75@syopr>}g{Bn0L*qr2H-druLgvdi?+%?G|#d*d8WguRj5iD;W2xU+=iR
zI*tEnwHFoHS;yQQHB>t@C
z?SL<}Bplfh3n
zFO>UC2+&e>l&mMdj4jGnxFm*WG3}-Z)Zg-{0Oq
zzrSm=;_kC&l84X2jVCQ#y~34lv2F39+9=)1xnc$1_x~}PC|83th$(&YpPbVr7!)z`Q|E)!K7Q1pwq4S@W7z(T%8rD9=BN2VQDHaICQ>PcK!Oe_#
zv;9^0URu^L*3ujy4+8MZ_Cbu)9cEdr$kx|Bw?uUy$6X2ji4TrcjlQ2{2^Mc1qIGzq
zjx_iu6t68bivyT2k;|g%2J3!A`ZtK^H_yrH*L~d%-<#K!KYf&xuny6+=^E5nRdhbJ
zIZ#$Xd_45jKv-GM$%#9lnFo&y57U?V9#gB!S^g%fQL)7Wd~D~_p>)p|mwC<7HEip&T+bgcLWsD`S?3!D*
zuK|d1OSdta!H!&~W*5rVx52zzWGy3y8z|ZmK$id3=+UKwCI-5vlI5<~Ih&`6!07Ab
zS!PQ&uB4kIyiim}t)66qw&T12GKCW3pu(5rwmCvUMhY8{V~mZi2YL+6HzsKM+*6>C
zmG9T8`>X(Kzkv1kqK(05XrZ*Qw96Hy3x5J`h{
zm@LQ7hhcX0=R4!bVSbFO;>cFdCqH-bEA}K?!Zg
zKCJg$8j2+O_L_Nk8s}6xa`<9~bUrJyGbL}oSu}D?s_tcc9vp@Zv_505d2%)dQgeST
zVMf-%82QEMcyIP-6MO3&;`0|NDG_h*enIjYI0X`E^ZnrdbWOs75sV~PB~{fRudEM`
z?>fzTc0>0!FbQA1_zj-q=nhf0nbmn)$(XT_ynsu)-1W3#(4S|Kb;EUC?Aw*CWSv!F
zp5@?yw2S?yj+3$nTl)@S%1pa9KYqTPALl{JpALU_F7JFNkTrXE-9IOJrp20`U{&ff
zACs^BiH^yKDTf?2*%T!`ry!b+f{$j3uU2&<*zYCY*Z1K#g9rIc7X!+TvzGWmcQ}kS
zU*3(X*KLy6pTHT>>#s0#vN`P>=*@jcHUy8_9dbr>0KG7FGPYcmx>JQ?tuyz2OkEjZ
zj)FWA`xjZhvmUp+;1MPc>Ls(ZvDRN?Smp4tt+V1m(`5uu5%RClRCR%uB?aD{ssV-%d9DK1)x)0}I5$?+_{Ms(3nKT>#C9+%IyKSSK+dpLFob
zx{bJNmi|RvUtYsO+XnKT3Dk)u*+AOI;O6;oe~LR57fQvxaF=T>T3VwzZU!DOgXn^Q
zPDVx=MdR~1*gG5{mo=wTN(Z0x7E0xO!^j%KGE!a~>69R}N!Hf@9jg?YtYn6FW=&BL
zlwweK>}2;4Ab7u2Fml_Yv>UaskOt^mJMEBB8yV18(n2scYEXAUdvBx7EvMP-Ji*5o;YJ^pZQa8kcNxxM{#k-Y*?gg=ZiEv$
z5Sn~{U1?&9;Mu%tvg;QG4Mn&p7@rz_Epw@fPwwxVP=m)a_jCI1u1DMPioywF-^UH(
z>aX6j{x%V>=j*(?KJ|3EyRaJrJ&&mO>5%#Op<3I%H0JAcfW$%j0)oiB2iWiB9_Z+oie}CVrZXur-D2-hmw=07ZKT!j
z{J6N78px$1jJE-7{r=J3XdBrNnjaVE8LM_R!gC$@DDH3CsNzRtOnY~vyK!CW=6ILX
zz>H*stI;t^ZU@6omdwTN4x$)+cY7)br0p7mJVXS$B8%zIVvB|hI&UNqga9Js%&4QmQ#@?BS;3FX!U)Mf(=%$#Vw)qNTm}R(mERQK;`DMyrvsCeS;87LvAYBtj2wt(uuuIH5r?hoon3FhSra>fQYaxmbx!LD&_1MKqqk{altnO=uvWGx*GQDTFiAg$Eu
zmhLp+!SY|7Z=fV_T!(I%$8BB~{19v=H;)F%`nqd_dWNX?BtQ!r{ZqB-tja6q)%9+f
zT&OGSQ>z6r`LxgFEx2+)y}B{d+Yp7Q7}b{RrxK>O;Ry`90nR98?r1HgHKQ5V0bW?{
zi#)niN(3C^gqAQN@fpPYB2Oq>pNTZ(U-(Z=UUqPOd*@nFZ`TTt20ykD?#8q_+nQQ6
zNdkn4goki8*TBXiBZn!-WUASYv&2LUIbs0S^S2+%1sMietEOedcC4n^`8ZYU~S@y>iGrx
zxQ5MyCFKo=%U0i4saGC|Pe0yFP-+?3JsVe8sIxcf%RWwXNL7u3+b~QrHGSHP(}}zx
zzX(g3eK}RU%OTTCWe$T6`wi|
zzZXZ?na!PRWfxF3GgGBY=loq@?0V%4eEf?xHqo$AxWdnoz$asOD^tocFN5*g+e`En
zgd2=Q&_AZn6(dyjgv^T%`6=f4E${E_l!3%*->(e(UXm0y!inVO+{#Xs8NB~zt3>`w
z)xxcSN{*1%g9f7~g8bLY-hq3U>f2Ev2OpeoM;E!4Qol9Xsg!;nICGULrA#rBUKJ%O
zN~QCkq<}UZ6Q6f(%1u+wIq9D*@|sToQURUy#F*#{9gw&`UTXMYb21t=br5hdW7?}E
z*sFWu>hu>mCoxEM%~6lkeO&ryhL@k#QoG~EN9DH;*^Oeqt@VC!Rb&0|si6151p0U9
zC9l#3O%_3Cqq76>RVfM@J7l+z@&`P8oOJ0@fXUCbp9lB{{(W&Am|aor@$_c&84ra+
zI(N5at5L~v_Ztr-f}W{a8~4(D^*ID)vVAVcm)N|s>I%2G|C)S=I}Or&J+cYI#s4Zf
zgxP?ZcaEp))Z~0hn>ynoIdJRtBDvp=?!M`n(ptnkGp@b1;ndDWE*nekc_qEA{`ie_
zB-6-wq<0#c_T`E*Vy04#W#%65hIvc+Da`Zv;jvco+k;5l=y5a5lmFc4V1($k(Dv7x
zd^_dD+(rY>L3td@mjB^TwFL%0upYDQQvqd;Ym)#jD&b85$t6$n*>3}cf-Lx0H>hQP
zygWBBe34LlKGyJLqWGhs6pW5RAVA?J1+*qfBKQ}zI>S7D?F4$ku-a)Rc9dS^hUXJl0y59T*)Vp5IfM(~q
z-UQD@GHDk7{4Hh2IW#okKM*B^mowO|ccC;RtiqmSzI=G?yR_rBOqR>sU-Y!P{l;w8dQtnxi$Zq
zW@sCJ?-FjT^&)X)A!b1grcY$9YUH!}e==#>!j*VLG*oKyyi>o2WY8{XDaiG$>tyFK
z<7QUa)=e-Rvx=N(gA+0uf|q*TD|)WE2((YDxQM>V-zxy5NAdtal67CzK?3iep=@+l
z0xamDE;X)QBE
zk`})M0Z29t2Y69!+5%Rt@O$!vXGjNACgB30l0^gDgU)#Z+gW722
zyuPXPVzkBMt`@-+HfUweaBr8sMQ@D1@x1G|S||KdqROa6L+`#f;E1j`6L%ds{3P^M&FvPMQKQ3ti>Np8MYon
zI$(RZr~js-5c>T$RGHoX_S0z`gsm=re-xJVS?178`1+#{PuLNkrP^vZ1tw`VK6+oc
z$-FjB{z(7684GYHp<|272w^`jB#(>R3Ft;Tax+7&qt*rpay#I{C$Qc5vmxlMH0czw
zZ7P+uGjiDY&DkSQG>A;VE{0$)l~r1{Gr6(KdS5#Y*Ny4v1aA$sg9Y8Ew3}?{CIO>_
z?ujgcaE6z(0Mj92dvPpza`GqXB;l(YWmsbT&uUB77e_9WE)!K6${;Q!8yw*5+4DI(DC~s7N9Kw@~|pI
zltU;bZAAU?T?>R6LzUMen0?4w>>^;Fpme2AKEV^HHh4L?*;97lSAifzOMmJmQc2=c
z(;VaRMk|NdI7@yg%=0Ge<8Y~H6r(AJDda|+KflA12fM2C0jB^NRnTBu^>NR|czz#u
zGQoE3hn&Boqu2;72;s=S`<5v(t1i5X_xHN3@l2LKVUG8B5Okl*cDC2j(H>H9{nYeP
zN=o7-?UfAr!Cre#X(#;tWKmD`0ZMy*BqrGCsAWM$4?)
zPr1sl;%J^p?q*FVG8d2t|J87*p>Z9>kxJ`Q+u@h^@TC+Pu>9`YyU%-P6~?FL9FzK=
zj2p#i_yx*s5}&h=1&F$~~9r`w^$>jJzsMRr}*RUSGWm
znq;dA&YXF~uP*miHw38`*!WE<8>r&)EMMfQEk{7c5ddlUHi|p
ze*Tn|jmkRCog!$M91sI>EE92V4CBcLjT%P_ep`Qu?f`;6c!e6fORx9s(iTrTJ3vNv
zkRiXw7rY
zT%OrOo<5emfzKE$1%mY{t}Y74l3k*$U1@q&_hneZkZfbXZCP!YMa9J=_GZJUX0g!7o*n+@pMEG-^96i0fp1wj8)1UK_Ri$vd1|M20?!A%7_GF%Dbd`M3E8#D4rYSs(mf+<0(k@|mm9-&$sDdif_sX;yj;
z-fiD5*9YgbB=ES+{>DX54pZL{yDqN#c+R}(3>><@x+3{!-amB-UHWSe6X!I{Ll^TO
z0u8K-EE{!=a^nJv9?Wipye%=cbvWN@t%HR&=dht{3S==4-+d}j0su(Czw7Fdm;Q%1kXTuteoJdhT6TAEOf%>!bj&~l
ze8kH`eAVi6Xr=2Iej=8)wg-r-8j{bL=oCm_4e!ldrk>582D)l)EGEpeE_%{dOjxV%
zHJfn}aOq*>(p2ez!A@3uFq;g#ZaobPvYP{t6I#xi^(JI@tXx%T4jL
z(&1ZMVp}ur(N401{|2OiVk)Rs5Vs~-HfT44d&(HUr{p$Ih2f6tNL59nIx~f!5Y2);
z>f7~Cd&cZoUCs-89y4o^y<^?g`||u^$n7@#t=?JYwt$v)zA>nTUEJUNKQCZ8C1$hI
zu`l224Hi#gb6rFFimlt|Yba{XUz=}U(#w#3^_YfFlQ^-Dh84#`_>3{<)x`Dcm;IPc
z_f7|@Io&q`4$AM=ZSUEl1zpa2K$0^lE>WzDxSY42pLFhFd2#z1~72q~KHm=joN*1jW;^mZujBrew8Y?&CVJ0MCe9L-H?FgFT6R!NtNN
z&J0!tZ!QMS!^A?}Zvr;JblH|nYJVG4_ZgW_XFMh~u1l>}B>meE;kpZH_D$MVF=s?O
z|ANh5qCfLyABX9C)^M}E621HOSjR+73=#h062nF(lI`|s
z2Q_dDsm$UKG`GCXV_WC7G{V?)@nq!GHfZ8k|z41f1FHW&XD>S9JfO3GqntaGi&dG%NGBR1;(~hy$8WcF57c
z=@@hYd++i-gIdWI{LZXKY|rVc+~!1#Bj9uwf-hcUsDSzXk@sUzAL=J#ZFfef{A8=@
z^=vnBH4tZg4B=3tCBA)p+8&sS6sE>
z=8PZ3aAwxG>FhNREdXB`o8Wspy9H}FQ)eKmcN7d06WgqOf3pF80`MzB&h2KM&utBE
zB5!%AU81lO3wMa45SYnVN<)r(6B{~O6?Kl#Mcdd5yB|!Fytz7%Wf8OG*E|5Bk;CUp
z|AOct=rU@>_qfO@9WA6?$Ns=hAa=vO1PA4By7iICbyllAKnJ@P&%?q6s8Rd@a{vVh
zKL9@Y4+W?zfQ)Wp>aqYSm1_uSsirLq`wggTF1>~>27+Tl7fyjDpMTM|0{nK(w;CvF
z&GIB
zM{Pzv_+J4s+J6#O0${j*;l)n7}Wzhg!J2dI;O*W&sAd(tas0Rf|?w8vHt)>h_D
zcO?I{gg&nC?CkvSQK|op%X|C8#W9>-Z2mDoL>X%28bbbE2Z@e-WOS5H)Oxe&F(9WT
zn$e(ju6y#N_@7|IKSSh|$4&M@TmLV@NdM)1wcMSapC4G*`S`nht!ZIHLu{**B+#BRJX`nhvxpQ><{Jv_cT>=R8B^PyYct9)*NJYeMghsG0K{(mN&
zcF|k24p(o_W}FSX!u{taJZP4Q9cc^S@&^!>tABs2&@*YLW9a0mHMpxk(>kMhGFJzn
z%jW>;7{KIlq!=FB?|9#nI3^!>YM1>frq?X8WirtNz?az{r*{v4H~E^wgAsS$PZW8)S`=X3z`)&npGQTFB=%*IXDrE(nD_~va~
zsqM_So>jR2#M;1W%9lon5Cke*p-{O$WE9W@EV)m9Q0Xz8D)(qMulPO<{R62qPY&o^
zM#T*Fvj`&>eJgZSoxte{fn#^l2c^weVef9T2Ay0&hCW#*r!s6%yBQ`?mNg+QQAx03
z5Z0y*JtfE*pkSZgzy(m<*Z(?WNqJqy1oPCSOih
zB=L>PFFqEl`FeNSalZ4)#s~s>Y0ANWc)mLLc@C8h3<9#7pUs8GFt$$v3OWJJ*hq~x
z+wl|BMaWhM|5m=jZfiy>k37pJJl@ebA!uT@ywRFxqqc^rJ;vNr{eu02@C*!m)nxRq
zs(yO$6TpSpnH5R$MV_K2Kwz|CVVF~1)N9!IKF29mQk&hb5Y$-yNQ^I?o6ALVEq;bS
zQbTlsQvXk7r~jj}-vEItJ{>RK?Nq&4hM#P)tOVCl3u{J)>@U2Z+4LWLe}8|y^|yIV
z^KduufZ+bPDDaol)>v`&m(K&-jx!C4u4o^u0%1mO0Ei;UaN`U=SZe2zG%7YmC~JM>2ab-Gt}38u*-jb+8Dfj&N6%a&Epye-Y$Uo(I1q4@%}Wg
z30k@wV1cw$n%)84$Tfjc-F=#D*(AaQ
z0VI%IAND(hemImYWyx5n&84}3hqo1wsW1{XM6;sBOJVQ0m(*-G^iY5G!fUU0Fh!!AE?jzN`SF&O<)3V5e0fcJm2m-a3iQ$C
z7NzF2JF8lBKwK>Vp{VoBRqP0!p~^GgY_FPWJvJA%+SlWn0KZ%U!`u)^HC5pd&2fYxOtxP{Hrg7i^ghuyk(z3t1Tp2#J
zMdo|#U>96YLw3Jv+v4IcMC6Mk4ZGo!aU&p?gUSNxgLC|~P?
z`tqa%n_dA$?%Kf4R6`(#_^QR-jbZ8cOeQK&=-LXx1`S7fM-?WUu2BA#RQ
z%Ql4klJgH+3&y)%r%P0e#qFvz4$Zto!~jQ2@ZFKSLQ83Ac=v;I?dbR$g{?H<9ECJ;F5jp~DG{8_MnWZ?
zR|{l!R40p7}E_kI~61=LQtzq9GZuIx5V}}ZY
zF^L26hg|=Q-R|a-=6);biBZv8TxZ+=Eh61Pt#whmNUdn7EW_^
z4dvU~L_dK-&j-tip&q42?|f~0yJjFz|2g$oX`X}-qM(Vt7!bq(FM#V?vv>^A=`7{`
zg}{Vh?#p<5akJ*zsqsp_%?rkG5{Az!WAbE#5>mqp>Pj2)=P7oEV?E{63)KbLFr5+AshC{;~DfF>4>$iEXt4z88FDviM4Fhwi@=x~5`WU8mvq^-z}q
z$+8|C?WZARbe`zORYVr6bkODMR!HM2T~>7F%Uhe;sTj#1!uqPNVRL)Z>_qhI^G{`J
z09XcD8MqT0Qs30y{pj4N(NI5B&TIa&Vs4UilTXO*ce(@dIoa?IOa&i4&4iL=5
zP^vM!sImx0TI*E>_%hKhFzY-Xu(J~}@XGo=DaUh3Kla(Y9qlQ12sdSk=W1R4s0mj4@nd9BZhhA>&A!hz2sz^&-Da8NUmr^ik!1J8CuV5tYLt
zG6jhHfy5w%t73SEQ%wedVS4Lm_ha>Nq5#`i{87Yx2koyrS6rvd8^25yXf~UDNZrCY6OSg`qA&#T%vcLPAK*{{b_?ceatq+5uym}R1wP3MHozICp1ULTE9N0hs
zy|^&;L7*&Xe|3Cuy}b|hY?_jhxNxX>*Une8el*TDI?k=X?bbNQuNzSzd%N80HS0YS
z77zaX>4F-t!l8>l%Q>9Y!PsWL&#Ju@n)lfHQ_Z)~pL=pa`J|t6Uw!ON1P;I~ALZ)N
z7culwOpwOWtUFFK4l;uZBcw%A*7q6wzQ+?
z4w^Y{P;FuuZqmP86Rt~W=C8L}_kbi!Xt8fT6ymy+^vzq<_q(;-!Y~hOr_rNdL%Gi8
zvO*iPswQk0)(qW_y3T$hFKw5b*U11K91-WUV`23Pxkj+no2!1+^l{(ax(3k*uq=W8
zryYdeKP@2{fbcd{ph%j?Kq
z2=r(=YXGeuwst;w!2=3nuI6CR_W<^I&Ex^S9H>xhR)&*q@w+bt6l8fkOwr1}
zIHw$aHXa<+it%(^uNU0@HCV6=;4-;%s8D!b#i8b{`*wCF4Q%bYiHu4I#r97irahzQ
z?VYbbH88h^-VyZo876x+yXKr*+NV6V{-oaD=Z?ai_C;lY!u7QtEsviIX1^}Q7rzRPRSueHw!Lr3~Qf&BqtF_$|rEjdDjjEAPzj=jG&aok~elxJmuUf5J>
z78TXjCboM0{;1$WMvrf~myMjdiWbAcxNyQ-`OOUP{or4Jf+f|>S1GC1E9_>@37!j}
zg7jFJEWCJbL{a$+JzO*R*uO;*qJ`on3jGu8Wo3Bi?)lh?@
zp6w{LS7?^lHs}j?VL!Nn-lcMkVCu1WMpmKy0d&UVbiuIH_1}>&9RC#wLucLeEW}>z
z(@HayNkB5WAj^-!@$|)t^G~XifI$5@fLHfv4nHhiv!Y_%Yin>am>G+&Tgj=^rxK7Cpkm7@7jffzJ*vfFP
zDi*{7%Q8(=}xwv9-Ky1~L=g?E)(VikThtGIRU;Pf^r%^nqbdW{r3C$$T
ziRGi@#Os4I#bsT_i}Z8cUly(=lAUJoIugZTiITY^aM^_(GU5M_+h7dd#w
zXBxEPK3{1?=pjz@n6GJl4g1TSzim>rxcuuMJRom6S0BsYaX|X!Qwr&Y-+#)iH2|8q
z|&B+qZ_2qNaFebt0;QEcWKnW)@qQ53n3eUvv?S_<*>3vjQC93LT(9Y#Gu4y#2^_0h2qlz3j+50}_#UK=>8UuC
zME0Dd-n5OKD0j(U$7-uQ%401m#5<+^Q?nueS
zR|ZJx1(p;?^uR=gNba96!~;?}@glur$)XCfA(iVPafzDPea{QLaz5rzv0Y?-cP*|s3FMy
z)U7A)MN9ln0kFs~UGcmgWTQ~ztOWM*{974oA<^Lk^uiCj!b$^=koM2=k6uYiIY!^O
zH5(iZbwv&M=TP2y@vHh#{VxXJ60bnRJYBB`H)aAOzwyr6W}iK)=*btI`ugJmZ1m3H
znIECGc)|ND``7PDnap|B#Av_}!~pUPL@>3X$Hj}xa!r93VgXXoYPHIxw8waP6LNHX
zL7B-t_e+X|FH{xc~8
z+*I$Et#>$Y`MulQV!WI(=`oeEcFfQi;q;<=uYts~?6UcvCS<}J!;l;nMXxuj5;POB
zZU!J4#`Ezb4W6snDYa!Rd#9)B@t!DO$2J@Eo=AA1PEhuefu}%;uKuYy4Am{plRrR7
z9N=I8r5yYv%;K%VCAUMpru9AZrIN+s`*qDKD9^K|rjLM33!xZF0*#ybvaPJRY69Fy
z2$wVSJ1;X}-$o{)xm3QsL+a~#=p~8HCCDVnZiARyoV!xJB}e}GqWUZUt>@mO19Y^o
zzT1UktXn?LE7rFyI8(q+^zXP+Mt(=w`Ii13>B3u-B45Q7;eL$YDww$0ejC<5Y}q4Nz9O*uxILg*`+%
zKU-CWPV0z!QF$C$_fQc-7<&%sAdq)uol2w3J08<%_#|J<%aYV~2YdVvygHCw%@wk9Guh|8>xE4AAFhzP&JxNVwfESd
zC?%CGz89TiX~d7Lt5iyjmO_|jhR@dKL4H=|<`v8EDM9AFDcxgEmR<$Kgjv$E>lX2r
z;Z7dusYjw&5+ib<^iJZkY+2!^mbFKLdj!UPe4xJ&botYVDlt2f4kSpqr&d@feLTJ&
z>ts1PoH#GV-#HtHDB1{66+mMmhBc%h^?CE;WVK*9hxR7Qxu
zL~QPRNn?{Xq)%!*GMt=44Y9wmgWK~Rb(PZF7ML(fbToO=5X=-cyCi+ISc}b{Q-IA9
z)YbukY~&T??NL70+B-MgY_)7_9G^pQ)yJvG-vA&=Zyl$fG~!H`mvyJq!4c*0O3*bJ&|w
zjM=Pcv;=D_K6~QeaxF&0)S90#ZBv1m_m!C01I%_@xUc69+F*Z;vyJlk8M0lP61$fW
zq9GRwY|sXVr$xNWySBTFYC6FopnU{^FuGnC<1?D5cOkZrD6ua>YxmfY12f6Vtm4+JXizjCmeFWVf28>s9TL(f2)tL>2YljEPH_R1y>W`crZd
z#`7NTa3hZjvexh7nXI&iA?$p*_6`g9WzO_1`}6TZQexW2d%AQ{QOE5QhM~HH6S2==
zylp@7CSod3M;^6R>t>K!l3uT87(nmeZtqePMWH0yceil`56X<>c%UTghGP@b*Ht*2
zJ$+s!opZQWy3EMa3i$n@82|bU{WYC^ive3D=WKWJ;B|@sreC$FqgpFytD<)yy#JSh
zjJvJv@zD?7N|V>OiMTP3W#~j_xRkcaS
zdeLKX&6Ry;$jB!9EpG#SLJ9k1C`tl6cTt>!bi
z9(@;>!$(3Moh9?qetB?dF-|outYQPLwAY0%wzXYzv1W>@gmrX$@?-A&^tIpjj23dU
zk;SaloHI4IlG}3Q&dx6ePP1_sBBF|8@aEAzq#xN+4uZj}Sxe-@@dE?Jw&SrT6CfE%
zX1%bb9I??g^r5zHR=5QT8g9dxp_0)W7|Re8v0Jjcs8Y;>G6IME-=7};h%06Ao9FFIaO
z18R9;zOWsOef;}frP%bb0b$9K&RyLs5nLzP&f+S(!b7?jZRlw;-oLg*7f~eZX2UNF
zcPVtWiIUrPDZRS*;)se~Ml}8E%Zq`sVxA6Tjjl|bK3)D%!ch@ZNplL`Fx>(L7bARQld_UbUTY~9A9KI$hj@DVrpau
z`s=690H*SRMrOFCTI(}&CmV4gT9sT?^@PuLN9{qnZTRA9=*!a|Yaf_9qtB6%)o1k%
zSPzG>%Y#x@2`uhWaF)c^oK)PRQ8ry$K(Z2rX7d+eC7o(FbwmsIC(Hqi+63IT5bFdZ@2!ML8z-;r7)~CxU*Uz
z%5?dQu^(FwSCLhkxnetfhDKIR7<*fKo#d~|0vEY5M}9G~JExnnO$QcQ-w7*St0
zjKYMk1hus$?=#h$gz7^ZnXI6{o((P>!I(w{OA*^S47~#*>b&R37z@AmCvOi>8|d>^
zt*2`bmD=cY^Y93{Y`zs@Ib&~HOxnbZ4JXcivP5os%D6@p%3(R}Ma$9dC^Pew
z%rT-^5#BK9BwYK$`QRWq;K`=RekMnkNKoTEoFImWhnTruTC;BL-1dH0j!KX$4=oC2
z0h~#~wr|mxf-H&m^qu(xlZl_za~%9-rkDxZYY2BUl3e>@+1{g1n)e`zV3%Den@L$_237&3fB}#Kf79{@IQlRm|p@xFURL(ETx%6D`p4TC^FFR-k}j&TQzy3AIh#*FMe_)XiB`JNasZRKI?`>ldX{Wg{v5g1TuC!CYUX($=1aF-XLmoF
zzhUJZaJEMW8NoENXMs1sa*ymJd`IpPX=aSNizL4P=U#R%8a61L`xbaweSZqM{K07l
z`BNVF1NZP(d3Ae=?eCwHcz-GU{qrMTpgsRpyb3R|oYu^rVs>5j?B74P-SasA*XJ(s
zh?l9xXza3D-d?}zKdzNTosp68AnGWaxT^Ha#d(GgYPzHfUbMVsy)RpTZ5Lz8=N$zJu@vu7_;)eAaU
zLjTuQ2!G_M7nChmGoUT?PbW(H@#BZia?w$*6}%*CsMu0Nkv`kb6+dhaZR1M6+1@1}
zvm2?~*n_|jW^Ed8JjyF_m}z2f{ZC`{pLTQF`%vR=M@wzc+1VYFKfOEu>5&!w@yKNV
z-(L5e>lBQxe2x}<-0zJ(4o;=pZErmNcl!DC|LDq7C2p-@X%+rz8Sod4X)19*<)C%a
zY2E+3=dr&laLga6MqJzCC!n9k8v#RNmTnRA2uH-awF;_yhX=QXX03tURL>c=Dp9^
z?^Hfr7RcdlmYoM4$xFTj=*-){yx<2PCEvhcXu%Jhy=70U-9`776aw{FjlbQ?eDGG>)scGh!PMQLm5(z2@JT9(iiB^tZ^bFa_?=h1#*FlZXs!*YE!23h1$&B)qk|vj*T6UNkS2sp1Nr@T{T>S?mQmXOfy52dUKlaqK|p
ze4}yi_Xm;c!`pUfmx7!j_0n(n9JKWzDJ8P{V?X^H;yH)3zrh{nYMv&sJxY)`UTmk;3YsJP2-llFv
zR*w)pnQk5ihQWd>@J~S3>Z<9E2JzP<=vQwLqF9#fssalZ)FT@KYN_Qr-#cq*1cgM%ASgC6Wv5Mub
zsLRg6hqKnA{e?*KRCGd>?Sgejm~2}mz1r@O*w|QMkK|PG>;_D!umBQB(XleN9~;{j
zfQ%WQuvp$sTr1hwajG2zE*ezf;=FF4F*FevXqmC((z!EuzxAj?&JJrOU0>^D@@O`TG!BDhQS0_^yd=hTFgWz^PX1!%Irz|&~7?sx4QCp7KAiUBD7o;_a
zm4Go$!XBaf|c2x-5%Atjw(^nH=W~CJ6NoX>F&1HAQ2(k%5
zI$E-`dYMH;qK1{(k1)ZGBpehBk=?C!ti}_iDOD_nea5L@k9!iIE{oC9|`;nb0CI*I8oN3#$WEhfIFQ@H2u5m?rU{<9!tsS1yuC4SQ)CwY9R7~Dj
zD>n~a??=D4!t^ktoBxtpFnhKir&4%dMVs`D!^6Wt)Kq7|9a)@X80)$r!wHS8WZe3g
z*V-b3#pz~du8$2~@25K)sWAj&zF+j_cHVd~=P7c(3T8gY&}6pnAkxTBpvzzDDhEqNW?>IW`U7c!^`P0d#VlDmbQg@;sH3Mrh&W&-vo@!+@YWvBf#q
zZ+7d4V>ii9CxE=ZW^cs&#Ga)!R0n|||IBhGvlWEk=R3&x;73ZL(IDo;f?!zDU)`@Wo)fI7c@<`#+
zD_nJRCD{~Xg-A`QI3J&|l>6Y5z8pGfm~QNTbr32~amMfYeZWA+!r8+oq~!Q$$3oIj
zc}o`HB37&0s%>5DHcInzSva3ku!b1d&bkqD-pL&gGkYT{Wlj}R-(JlDkFcKbcvRK^bjE*i6SW|(5
z4(+r#(W(u!aXn*Wb23CrleAh}%Xv=|_`5!48{MO?pF6Ej$=#w8Ztg&h`n*7J_(K4sCPk6WB{CmZcI1
zM=fob{jEvO>djH(B{Qw#?;??PZV!iRgsN1J{Z|nc|9
zU>rwoH5jzMp>>7~7|8Q(00TLrD3O;m5XHVQ|AX@L`lz#}4M(--lV)BSKMleb67cSU
zrpjwlGrN1C?II@O?44GzLii$5OxAdhnP=vEp`8&llhE|>D7NQBCJJ91OftBQD95KF
zq3ROCG_vmUX>abkzIS?4CoV#1mk{<^!0E+jqlaSJfr?>K*+zy}>;u~R;%r;{j+QmG
zoO85O@g~}4>S4usE!fJ0KudU(h*}59GCt9v-%vDWM44dGY-Q+7Dm?Mll!_}nSjV$d
zAw?@HmEQcqxxvbilzi1!
zkYWCq2e-qQ#e$U-`XEnOKlCmIh?yIN70aO({B06ARMuk}Em$J$FmcVWN2i+^kK6f-HWoXjaZVV
zrg>G=_r9Z%ANp!-WR@+#iF&b(R$-><{447*9IaiS4TKAHR=bL4F~yw7-CElqt1w0p
zewyWQ(`N{LyRk_aOu6+L4)=n=0U~+z@N=;>zi2J!Rux#;d4oGDoV_Yof=g<79L1Cl
z&u?zV#MUWayt=&Tc??LO3)=C@s%-Z-JKt=_jX)WjF^O>$IOZ1iNE4wNp%=y0hu@5?
zf}bahZBeN1d_FN(KDCpAzC8ApvOUnex?JvOpPxu1mThn1Usss69WZKGd(g?)K^01~
zkKWiGuiiq6m^_Dc#DwZ<8$~m0!0(a67JxGx0hN$Z{nmj!}fSQ2@S{RVw7Ap&9uu;aqbKS}5c7!`+zZ
z3fvB%G#gyO3mXSXh=6Y_eU5tdvUGeo>T(>926a2_4{Bb
zv0n~;4`2;FS2lsn8VI;rE#5m_=$Oefa;J(=IB;gKxN}VxPUuIv6B;as9iFJlxqCFB
z;Lo3%DkU21m|ivOb|LesH66s5o?yf0l)_a=y4JX?y}?$tiM$p8&#>L%!ZAwE7Xz~D
zp^+qm&;=AVW|?ZsudEWVhh8A`8$u7sL)b5tVJ6-38idwPnH$Uw5UHu
zHY6=DDpHRWPTO&&t*e;yY<@=~_xl@lkJr+yhayc)akcFsYv!8IneJMF-Z?gd9C_B=
z#RVFJ^)@=E$-@KU6O~r>*GS*+b3QXt=8@cLT24-x3e_Dmk48y#8N!<)jXg>irAb*=
z-V>cKo=lT=4>CNqQQ?M66O@cYM=lqo%f257GjU&JwAUv{aWpfF%S87jj>8|?HB5I%
zSMi&S%L?}9C8C<_1TZ})B1h|FU;pkk`;}HJE5~j^y6H<;Fr^i!e24|8>cwKVQIspF
zowT*1d!XaEOpuG;3Fg5s{me~=V%PQ;mIhI%_6g_%&-S_wQIuZPM(#*ryeyv>&fbQf
zv{|f7*yP+s3cw|HCOn^45)dq8k32WMJ+oT|bD2WiYenyZV@2+EW_^>9e4+?G9`f?A
zd;E+(k0HbkH+B*|L~EcCDpmU`^gbBfavXf6h0^l@K1u9!ArSDn*39j;bRlaewJ>De
z{e5`CO3^M9FyuI+)~T9y<@qLJ;+5)#
zWr5D!P{lX=7&#cf%zztdI{O6(pWKV5{9gS7K0GY1S=IZ|yfU-1dBX@98O(SfSO&GM
z()L!L3o%|wiZGIrmYO&SGM!Ai;&N1Q#bth>kzasIi}}6s!r`&4Rl%E0)Jl=8VkION
zF%dCLvtT+rpMpOTo2~WK51)5pD13*D&Gs-gTd#sxM`OspyFGy*raq5#;bvxNj9$Mu
zZRJNl`paOvW}+rd*#=u;V)Iql_*hF+4VQKz>`8$YwX^9Olh`~FOpQGx?SutLei3M9
z+7&IcZjEAZP?hE)j8oA?OeHJSbns4-k--NAm~mn+b`QRtct7j(m8~<-3dL37x#kY}
zTGOc932P%r;k+?Y#bOOG$On04#!?^=y@kRPo?6Rc#WMCbwgk%p?Ve*(YJZ^^x{mN)(r<@Q|>F(6-l=GD90HD
zctunQG&A-J`uDYA<_0VK6X*L^{2SSu)7?zNva!LQJyrKCQd#o^qKw`wWsss0IBVGG
z9zHmSA}U@amR32f;w2AkRwaURbhnDmu{1}K4s^p(mOJe*i5rhf$6@YKrJSy6CQ|QzF&QK<~$G-O&eLnixO!hkysS7-FNVh#Yc_`
zE&oshwga$kHbp#Ar90$j!fCFX#_fl*t4REP`yDuOu|Z-f&FWGd>lebc-VKO-Y&7YwUBKu(OtzX)w6B~_0-yQ|vPW{T_4`%O
z?WNQ!5|t6X&5_&lJ1-mg4~vRk(h>0LL6T_^uhS#8*Ov(UbUMiDoxGP4Q-Q7dgZItV
zl%Jn05l{X!)2n_1X}rX#qvBiIb8fb}#KVv5bdjNkAnN3C(eEbDqtXs5Ac{(rSFNmK
zqm}0K*43BRg0L;i?7Fde2$K`pyb}1#bVq#UU6~Hjht228t;LjK*}74q>xjF2=(av;
zwdI%`3l_6=r8ysu)$P}cBSpL0)bsPG^*9ep(p*BtZjIQ27@^}wvG(3<7RbpWEdBPZ
zcP~THQNz_HQLeto6Fug@IgEt8rJyw;>>24U$L(!v~_9J}Qqk-D&SsJ0p3UvV<;-U9+M#u7>KDDPlq
zrx~(QM$mlW*oXGLisJ-xQ*eGBQk1gxy=)tF~&roh}bsHW96J6$2m}67;+H;X1m6aGlllLIVkqq6T
zZw0b1)qUelw52wz*gept5C{7*Ik8lx+Z634U@bCbtS$hGxN8xk~!dYE!Us9^8XfLV+5qXs4
z!@`J!)j>eNidA8r>}eB9d7&}onLB*x34FB&jXLYb%V`A|^l4#*EO=yr>NYnSTq`Hk
zpj8uj3A#(X*Y3T&1SZ;w{3_>5L3O$nbnP+v$0wd&UkMnYcn;3`h;dT
zE&z~oko^*|h#qSvz+>%Kb5MxPgwl`IS=)vBJIh5JSA_P87TP0fwy0aun-V%QmyO45
zMGU2hIyN(y}D;ZMVtmfA48h@8qfbSv7qq}Gwm>D86L1HMjNA$LxaI=NrUdb1lu
z?&B;psKM{Eho{@`xSBxVv6JaaAWagt;Ci&bP{ba-4SbJLUyPpf(P>M{SV{?sviKb#hR`OZxNCxN*+f55t=h+g?uT4;bk+EqERCarTp6wij$@e?@pSP!C23|!|NVm|Ie
z@=mWF)xim}^Ih&(&CK_S#>L?4;kG4aE|GpeBV;|%1Xd1H#0Jc~KH`*)W~j@PUIF|%RXYp(-RMk*e_fr<
zU?V9>(%d<*DW%;ecm!=da)C1RHCB*`27Jw!fQY^8wm=qno`ASD+XlR!(;UFm(W6&D
zg1xsOYH(>!0%0`ZW1um^YbuL>pOOo8cCz$u)<9aOcCxzT9EGIHJY)tE78jit)lxn2
zkD+aqC9n_7Q7Bz{3iu%O6Oyb2qsc6dDI1^I^UjM}VOxb$k^}qJft>ky3gvMkED~>M
z^G&opY-c#x&mZ#5Sm}}YShVkES^cKs$f+(#2+uM2S>QO`#m&uUZMw+^`iJ~rU)ccJ-vfqjn$Kpna|D`Zt%d)>9F2xKLi*K$|P
zz~?PYYrQtF+9`-xSafH+K2aMXZb07%YLG7K<7_lThg+?-PKwO50E%&_pPjGf%SN3b
zX}?Q1cNeNJmaVXIs`3GroNk(=NB=Rlc;p-zrE5`SI)~9U65Uv+85#>;SJG3(1p>W&
zpgON`U|NzqP0b4*E_VppNm#kUB8Ep)0fz(_=2^ikP~kxi^Xpp0pBAHaea^R%hWQMkZ=2#J>W%wV9Za
z-H*CxO&&zlk{Y)vc4p3Wf|JXcdYlpI$WG0SEaN}QV$9S!hw?GAK(7H_?}tUY#ciy|
z%(ZFw!Hs9S{C}`~lhJolT<*#x%Zm{eUsTSwD5FzIrt6u%W$=bdv0F=0Q)<~?%)>G>
z?`!18g}&1(6hub7ZeqR#CPd)WmtPG|5}l{s6nHU^G|7UhK2!OMXlpt*$%+Zx3=h|8
zDlAC*VY}v{x{V#7{>6nZ78s(6UX({{Ky&3B_hTj}>3kX+&<8vRc@m7a_|rECE-D?X
zBPqQ!7&#pk{2__=6vVrH)@cO8QFbX2a~B`QUYsJ5OTX*}CY`c*8(5RMVUsz9o^f6P
z-Vi%qYm~m?3@l_b+bN72*y+@Am!=r28lvOkhxu7yy(KQ6ggYFgNm@iDus4n`O?)wWhEH3b>
z2h1oR;v*Uw8m&&0A&SRR1^G9?->W~N;t6cJoi?1-(D}>0#UB(q=_x9Ud6pgVM-+GO
zyePL=WHtuATS!4+|EtTn&U*|Cz18S9Sn2lL5We0fHt2&&0vxRP>0X*Y&-8c>#J1wf
zO5+E|)XX;yZ1mOn1Ktd1Qvl=sG=4AB|G(_`TSNoz7dY@Q1
+/// This example demonstrates similarity between using 
+/// and other agent types.
+/// 
+public class Step01_AzureAIAgent(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    [Fact]
+    public async Task UseTemplateForAzureAgentAsync()
+    {
+        // Define the agent
+        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
+        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+        // Instructions, Name and Description properties defined via the PromptTemplateConfig.
+        Agent definition = await this.AgentsClient.CreateAgentAsync("gpt-4o", templateConfig.Name, templateConfig.Description, templateConfig.Template);
+        AzureAIAgent agent = new(
+            definition,
+            this.AgentsClient,
+            templateFactory: new KernelPromptTemplateFactory(),
+            templateFormat: PromptTemplateConfig.SemanticKernelTemplateFormat)
+        {
+            Arguments =
+            {
+                { "topic", "Dog" },
+                { "length", "3" }
+            }
+        };
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        try
+        {
+            // Invoke the agent with the default arguments.
+            await InvokeAgentAsync();
+
+            // Invoke the agent with the override arguments.
+            await InvokeAgentAsync(
+                new()
+                {
+                    { "topic", "Cat" },
+                    { "length", "3" },
+                });
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the response.
+        async Task InvokeAgentAsync(KernelArguments? arguments = null)
+        {
+            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id, arguments))
+            {
+                WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
new file mode 100644
index 000000000000..4754acb92ff4
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
@@ -0,0 +1,100 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Plugins;
+using Agent = Azure.AI.Projects.Agent;
+
+namespace GettingStarted.AzureAgents;
+
+/// 
+/// Demonstrate creation of  with a ,
+/// and then eliciting its response to explicit user messages.
+/// 
+public class Step02_AzureAIAgent_Plugins(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    [Fact]
+    public async Task UseAzureAgentWithPluginAsync()
+    {
+        // Define the agent
+        AzureAIAgent agent = await CreateAzureAgentAsync(
+                plugin: KernelPluginFactory.CreateFromType(),
+                instructions: "Answer questions about the menu.",
+                name: "Host");
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync(agent, thread.Id, "Hello");
+            await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?");
+            await InvokeAgentAsync(agent, thread.Id, "What is the special drink and its price?");
+            await InvokeAgentAsync(agent, thread.Id, "Thank you");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+    }
+
+    [Fact]
+    public async Task UseAzureAgentWithPluginEnumParameterAsync()
+    {
+        // Define the agent
+        AzureAIAgent agent = await CreateAzureAgentAsync(plugin: KernelPluginFactory.CreateFromType());
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync(agent, thread.Id, "Create a beautiful red colored widget for me.");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+    }
+
+    private async Task CreateAzureAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
+    {
+        // Define the agent
+        Agent definition = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            name,
+            null,
+            instructions);
+
+        AzureAIAgent agent = new(definition, this.AgentsClient)
+        {
+            Kernel = new Kernel(),
+        };
+
+        // Add to the agent's Kernel
+        if (plugin != null)
+        {
+            agent.Kernel.Plugins.Add(plugin);
+        }
+
+        return agent;
+    }
+
+    // Local function to invoke agent and display the conversation messages.
+    private async Task InvokeAgentAsync(AzureAIAgent agent, string threadId, string input)
+    {
+        ChatMessageContent message = new(AuthorRole.User, input);
+        await agent.AddChatMessageAsync(threadId, message);
+        this.WriteAgentChatMessage(message);
+
+        await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+        {
+            this.WriteAgentChatMessage(response);
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
similarity index 56%
rename from dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
rename to dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
index 3a48d407dea9..c71b7124b463 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
@@ -1,28 +1,27 @@
 // Copyright (c) Microsoft. All rights reserved.
-using Microsoft.Extensions.Logging;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.AzureAI;
 using Microsoft.SemanticKernel.Agents.Chat;
 using Microsoft.SemanticKernel.ChatCompletion;
+using Agent = Azure.AI.Projects.Agent;
 
-namespace GettingStarted;
+namespace GettingStarted.AzureAgents;
 
 /// 
-/// A repeat of  with logging enabled via assignment
-/// of a  to .
+/// Demonstrate creation of  with 
+/// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum
+/// number of agent interactions.
 /// 
-/// 
-/// Samples become super noisy with logging always enabled.
-/// 
-public class Step07_Logging(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step03_AzureAIAgent_Chat(ITestOutputHelper output) : BaseAzureAgentTest(output)
 {
     private const string ReviewerName = "ArtDirector";
     private const string ReviewerInstructions =
         """
         You are an art director who has opinions about copywriting born of a love for David Ogilvy.
         The goal is to determine if the given copy is acceptable to print.
-        If so, state that it is approved.
-        If not, provide insight on how to refine suggested copy without examples.
+        If so, state that it is approved.  Do not use the word "approve" unless you are giving approval.
+        If not, provide insight on how to refine suggested copy without example.
         """;
 
     private const string CopyWriterName = "CopyWriter";
@@ -37,33 +36,26 @@ Consider suggestions when refining an idea.
         """;
 
     [Fact]
-    public async Task UseLoggerFactoryWithAgentGroupChatAsync()
+    public async Task UseGroupChatWithTwoAgentsAsync()
     {
         // Define the agents
-        ChatCompletionAgent agentReviewer =
-            new()
-            {
-                Instructions = ReviewerInstructions,
-                Name = ReviewerName,
-                Kernel = this.CreateKernelWithChatCompletion(),
-                LoggerFactory = this.LoggerFactory,
-            };
-
-        ChatCompletionAgent agentWriter =
-            new()
-            {
-                Instructions = CopyWriterInstructions,
-                Name = CopyWriterName,
-                Kernel = this.CreateKernelWithChatCompletion(),
-                LoggerFactory = this.LoggerFactory,
-            };
+        Agent reviewerModel = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            ReviewerName,
+            null,
+            ReviewerInstructions);
+        AzureAIAgent agentReviewer = new(reviewerModel, this.AgentsClient);
+        Agent writerModel = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            CopyWriterName,
+            null,
+            CopyWriterInstructions);
+        AzureAIAgent agentWriter = new(writerModel, this.AgentsClient);
 
         // Create a chat for agent interaction.
         AgentGroupChat chat =
             new(agentWriter, agentReviewer)
             {
-                // This is all that is required to enable logging across the agent framework/
-                LoggerFactory = this.LoggerFactory,
                 ExecutionSettings =
                     new()
                     {
@@ -80,23 +72,30 @@ public async Task UseLoggerFactoryWithAgentGroupChatAsync()
                     }
             };
 
-        // Invoke chat and display messages.
-        ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
-        chat.AddChatMessage(input);
-        this.WriteAgentChatMessage(input);
+        try
+        {
+            // Invoke chat and display messages.
+            ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+            chat.AddChatMessage(input);
+            this.WriteAgentChatMessage(input);
+
+            await foreach (ChatMessageContent response in chat.InvokeAsync())
+            {
+                this.WriteAgentChatMessage(response);
+            }
 
-        await foreach (ChatMessageContent response in chat.InvokeAsync())
+            Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
+        }
+        finally
         {
-            this.WriteAgentChatMessage(response);
+            await chat.ResetAsync();
         }
-
-        Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
     }
 
     private sealed class ApprovalTerminationStrategy : TerminationStrategy
     {
         // Terminate when the final message contains the term "approve"
-        protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
+        protected override Task ShouldAgentTerminateAsync(Microsoft.SemanticKernel.Agents.Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
             => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false);
     }
 }
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
new file mode 100644
index 000000000000..551951a81a49
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
@@ -0,0 +1,54 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Agent = Azure.AI.Projects.Agent;
+
+namespace GettingStarted.AzureAgents;
+
+/// 
+/// Demonstrate using code-interpreter on  .
+/// 
+public class Step04_AzureAIAgent_CodeInterpreter(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    [Fact]
+    public async Task UseCodeInterpreterToolWithAgentAsync()
+    {
+        // Define the agent
+        Agent definition = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            tools: [new CodeInterpreterToolDefinition()]);
+        AzureAIAgent agent = new(definition, this.AgentsClient)
+        {
+            Kernel = new Kernel(),
+        };
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(thread.Id, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
new file mode 100644
index 000000000000..361025c44832
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
@@ -0,0 +1,71 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
+using Agent = Azure.AI.Projects.Agent;
+
+namespace GettingStarted.AzureAgents;
+
+/// 
+/// Demonstrate using  with file search.
+/// 
+public class Step05_AzureAIAgent_FileSearch(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    [Fact]
+    public async Task UseFileSearchToolWithAgentAsync()
+    {
+        // Define the agent
+        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
+
+        AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "employees.pdf");
+        VectorStore fileStore =
+            await this.AgentsClient.CreateVectorStoreAsync(
+                [fileInfo.Id],
+                metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
+        Agent agentModel = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            tools: [new FileSearchToolDefinition()],
+            toolResources: new()
+            {
+                FileSearch = new()
+                {
+                    VectorStoreIds = { fileStore.Id },
+                }
+            },
+            metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
+        AzureAIAgent agent = new(agentModel, this.AgentsClient);
+
+        // Create a thread associated for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Who is the youngest employee?");
+            await InvokeAgentAsync("Who works in sales?");
+            await InvokeAgentAsync("I have a customer request, who can help me?");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+            await this.AgentsClient.DeleteVectorStoreAsync(fileStore.Id);
+            await this.AgentsClient.DeleteFileAsync(fileInfo.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(thread.Id, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
new file mode 100644
index 000000000000..54019df77be4
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
@@ -0,0 +1,68 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
+using Agent = Azure.AI.Projects.Agent;
+
+namespace GettingStarted.AzureAgents;
+
+/// 
+/// This example demonstrates invoking Open API functions using .
+/// 
+/// 
+/// Note: Open API invocation does not involve kernel function calling or kernel filters.
+/// Azure Function invocation is managed entirely by the Azure AI Agent service.
+/// 
+public class Step06_AzureAIAgent_OpenAPI(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    [Fact]
+    public async Task UseOpenAPIToolWithAgentAsync()
+    {
+        // Retrieve Open API specifications
+        string apiCountries = EmbeddedResource.Read("countries.json");
+        string apiWeather = EmbeddedResource.Read("weather.json");
+
+        // Define the agent
+        Agent definition = await this.AgentsClient.CreateAgentAsync(
+            TestConfiguration.AzureAI.ChatModelId,
+            tools:
+            [
+                new OpenApiToolDefinition("RestCountries", "Retrieve country information", BinaryData.FromString(apiCountries), new OpenApiAnonymousAuthDetails()),
+                new OpenApiToolDefinition("Weather", "Retrieve weather by location", BinaryData.FromString(apiWeather), new OpenApiAnonymousAuthDetails())
+            ]);
+        AzureAIAgent agent = new(definition, this.AgentsClient)
+        {
+            Kernel = new Kernel(),
+        };
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("What is the name and population of the country that uses currency with abbreviation THB");
+            await InvokeAgentAsync("What is the weather in the capitol city of that country?");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(thread.Id, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
new file mode 100644
index 000000000000..f4ca77e75c5e
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Plugins;
+using Agent = Azure.AI.Projects.Agent;
+
+namespace GettingStarted.AzureAgents;
+
+/// 
+/// This example demonstrates how to define function tools for an 
+/// when the agent is created. This is useful if you want to retrieve the agent later and
+/// then dynamically check what function tools it requires.
+/// 
+public class Step07_AzureAIAgent_Functions(ITestOutputHelper output) : BaseAzureAgentTest(output)
+{
+    private const string HostName = "Host";
+    private const string HostInstructions = "Answer questions about the menu.";
+
+    [Fact]
+    public async Task UseSingleAgentWithFunctionToolsAsync()
+    {
+        // Define the agent
+        // In this sample the function tools are added to the agent this is
+        // important if you want to retrieve the agent later and then dynamically check
+        // what function tools it requires.
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        var tools = plugin.Select(f => f.ToToolDefinition(plugin.Name));
+
+        Agent definition = await this.AgentsClient.CreateAgentAsync(
+            model: TestConfiguration.AzureAI.ChatModelId,
+            name: HostName,
+            description: null,
+            instructions: HostInstructions,
+            tools: tools);
+        AzureAIAgent agent = new(definition, this.AgentsClient)
+        {
+            Kernel = new Kernel(),
+        };
+
+        // Add plugin to the agent's Kernel (same as direct Kernel usage).
+        agent.Kernel.Plugins.Add(plugin);
+
+        // Create a thread for the agent conversation.
+        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Hello");
+            await InvokeAgentAsync("What is the special soup and its price?");
+            await InvokeAgentAsync("What is the special drink and its price?");
+            await InvokeAgentAsync("Thank you");
+        }
+        finally
+        {
+            await this.AgentsClient.DeleteThreadAsync(thread.Id);
+            await this.AgentsClient.DeleteAgentAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(thread.Id, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md
new file mode 100644
index 000000000000..083a1c71a156
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md
@@ -0,0 +1,38 @@
+# Concept samples on how to use AWS Bedrock agents
+
+## Pre-requisites
+
+1. You need to have an AWS account and [access to the foundation models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-permissions.html)
+2. [AWS CLI installed](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) and [configured](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration)
+
+## Before running the samples
+
+You need to set up some user secrets to run the samples.
+
+### `BedrockAgent:AgentResourceRoleArn`
+
+On your AWS console, go to the IAM service and go to **Roles**. Find the role you want to use and click on it. You will find the ARN in the summary section.
+
+```
+dotnet user-secrets set "BedrockAgent:AgentResourceRoleArn" "arn:aws:iam::...:role/..."
+```
+
+### `BedrockAgent:FoundationModel`
+
+You need to make sure you have permission to access the foundation model. You can find the model ID in the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html). To see the models you have access to, find the policy attached to your role you should see a list of models you have access to under the `Resource` section.
+
+```
+dotnet user-secrets set "BedrockAgent:FoundationModel" "..."
+```
+
+### How to add the `bedrock:InvokeModelWithResponseStream` action to an IAM policy
+
+1. Open the [IAM console](https://console.aws.amazon.com/iam/).
+2. On the left navigation pane, choose `Roles` under `Access management`.
+3. Find the role you want to edit and click on it.
+4. Under the `Permissions policies` tab, click on the policy you want to edit.
+5. Under the `Permissions defined in this policy` section, click on the service. You should see **Bedrock** if you already have access to the Bedrock agent service.
+6. Click on the service, and then click `Edit`.
+7. On the right, you will be able to add an action. Find the service and search for `InvokeModelWithResponseStream`.
+8. Check the box next to the action and then scroll all the way down and click `Next`.
+9. Follow the prompts to save the changes.
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs
new file mode 100644
index 000000000000..2c4aa4355097
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how to interact with a  in the most basic way.
+/// 
+public class Step01_BedrockAgent(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    private const string UserQuery = "Why is the sky blue in one sentence?";
+
+    /// 
+    /// Demonstrates how to create a new  and interact with it.
+    /// The agent will respond to the user query.
+    /// 
+    [Fact]
+    public async Task UseNewAgentAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step01_BedrockAgent");
+
+        // Respond to user input
+        try
+        {
+            var responses = bedrockAgent.InvokeAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
+            await foreach (var response in responses)
+            {
+                this.Output.WriteLine(response.Content);
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    /// 
+    /// Demonstrates how to create a new  and interact with it using streaming.
+    /// The agent will respond to the user query.
+    /// 
+    [Fact]
+    public async Task UseNewAgentStreamingAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step01_BedrockAgent_Streaming");
+
+        // Respond to user input
+        try
+        {
+            var streamingResponses = bedrockAgent.InvokeStreamingAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
+            await foreach (var response in streamingResponses)
+            {
+                this.Output.WriteLine(response.Content);
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        return new BedrockAgent(agentModel, this.Client);
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs
new file mode 100644
index 000000000000..70bde61a9aab
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs
@@ -0,0 +1,90 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Reflection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how to interact with a  with code interpreter enabled.
+/// 
+public class Step02_BedrockAgent_CodeInterpreter(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    private const string UserQuery = @"Create a bar chart for the following data:
+Panda   5
+Tiger   8
+Lion    3
+Monkey  6
+Dolphin  2";
+
+    /// 
+    /// Demonstrates how to create a new  with code interpreter enabled and interact with it.
+    /// The agent will respond to the user query by creating a Python code that will be executed by the code interpreter.
+    /// The output of the code interpreter will be a file containing the bar chart, which will be returned to the user.
+    /// 
+    [Fact]
+    public async Task UseAgentWithCodeInterpreterAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step02_BedrockAgent_CodeInterpreter");
+
+        // Respond to user input
+        try
+        {
+            BinaryContent? binaryContent = null;
+            var responses = bedrockAgent.InvokeAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
+            await foreach (var response in responses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+                if (binaryContent == null && response.Items.Count > 0)
+                {
+                    binaryContent = response.Items.OfType().FirstOrDefault();
+                }
+            }
+
+            if (binaryContent == null)
+            {
+                throw new InvalidOperationException("No file found in the response.");
+            }
+
+            // Save the file to the same directory as the test assembly
+            var filePath = Path.Combine(
+                Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)!,
+                binaryContent.Metadata!["Name"]!.ToString()!);
+            this.Output.WriteLine($"Saving file to {filePath}");
+            binaryContent.WriteToFile(filePath, overwrite: true);
+
+            // Expected output:
+            // Here is the bar chart for the given data:
+            // [A bar chart showing the following data:
+            // Panda   5
+            // Tiger   8
+            // Lion    3
+            // Monkey  6
+            // Dolphin 2]
+            // Saving file to ...
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        var bedrockAgent = new BedrockAgent(agentModel, this.Client);
+        // Create the code interpreter action group and prepare the agent for interaction
+        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
+
+        return bedrockAgent;
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs
new file mode 100644
index 000000000000..ab23b4be0128
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs
@@ -0,0 +1,141 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ComponentModel;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how to interact with a  with kernel functions.
+/// 
+public class Step03_BedrockAgent_Functions(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    /// 
+    /// Demonstrates how to create a new  with kernel functions enabled and interact with it.
+    /// The agent will respond to the user query by calling kernel functions to provide weather information.
+    /// 
+    [Fact]
+    public async Task UseAgentWithFunctionsAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions");
+
+        // Respond to user input
+        try
+        {
+            var responses = bedrockAgent.InvokeAsync(
+                BedrockAgent.CreateSessionId(),
+                "What is the weather in Seattle?",
+                null);
+            await foreach (var response in responses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    /// 
+    /// Demonstrates how to create a new  with kernel functions enabled and interact with it using streaming.
+    /// The agent will respond to the user query by calling kernel functions to provide weather information.
+    /// 
+    [Fact]
+    public async Task UseAgentStreamingWithFunctionsAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions_Streaming");
+
+        // Respond to user input
+        try
+        {
+            var streamingResponses = bedrockAgent.InvokeStreamingAsync(
+                BedrockAgent.CreateSessionId(),
+                "What is the weather forecast in Seattle?",
+                null);
+            await foreach (var response in streamingResponses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    /// 
+    /// Demonstrates how to create a new  with kernel functions enabled and interact with it.
+    /// The agent will respond to the user query by calling multiple kernel functions in parallel to provide weather information.
+    /// 
+    [Fact]
+    public async Task UseAgentWithParallelFunctionsAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions_Parallel");
+
+        // Respond to user input
+        try
+        {
+            var responses = bedrockAgent.InvokeAsync(
+                BedrockAgent.CreateSessionId(),
+                "What is the current weather in Seattle and what is the weather forecast in Seattle?",
+                null);
+            await foreach (var response in responses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new kernel with plugins
+        Kernel kernel = new();
+        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        var bedrockAgent = new BedrockAgent(agentModel, this.Client)
+        {
+            Kernel = kernel,
+        };
+        // Create the kernel function action group and prepare the agent for interaction
+        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
+
+        return bedrockAgent;
+    }
+
+    private sealed class WeatherPlugin
+    {
+        [KernelFunction, Description("Provides realtime weather information.")]
+        public string Current([Description("The location to get the weather for.")] string location)
+        {
+            return $"The current weather in {location} is 72 degrees.";
+        }
+
+        [KernelFunction, Description("Forecast weather information.")]
+        public string Forecast([Description("The location to get the weather for.")] string location)
+        {
+            return $"The forecast for {location} is 75 degrees tomorrow.";
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs
new file mode 100644
index 000000000000..3e1400a5115d
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs
@@ -0,0 +1,176 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ComponentModel;
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how to interact with a  and inspect the agent's thought process.
+/// To learn more about different traces available, see:
+/// https://docs.aws.amazon.com/bedrock/latest/userguide/trace-events.html
+/// 
+public class Step04_BedrockAgent_Trace(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    /// 
+    /// Demonstrates how to inspect the thought process of a  by enabling trace.
+    /// 
+    [Fact]
+    public async Task UseAgentWithTraceAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step04_BedrockAgent_Trace");
+
+        // Respond to user input
+        var userQuery = "What is the current weather in Seattle and what is the weather forecast in Seattle?";
+        try
+        {
+            // Customize the request for advanced scenarios
+            InvokeAgentRequest invokeAgentRequest = new()
+            {
+                AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
+                AgentId = bedrockAgent.Id,
+                SessionId = BedrockAgent.CreateSessionId(),
+                InputText = userQuery,
+                // Enable trace to inspect the agent's thought process
+                EnableTrace = true,
+            };
+
+            var responses = bedrockAgent.InvokeAsync(invokeAgentRequest, null);
+            await foreach (var response in responses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+                if (response.InnerContent is List innerContents)
+                {
+                    // There could be multiple traces and they are stored in the InnerContent property
+                    var traceParts = innerContents.OfType().ToList();
+                    if (traceParts is not null)
+                    {
+                        foreach (var tracePart in traceParts)
+                        {
+                            this.OutputTrace(tracePart.Trace);
+                        }
+                    }
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    /// 
+    /// Outputs the trace information to the console.
+    /// This only outputs the orchestration trace for demonstration purposes.
+    /// To learn more about different traces available, see:
+    /// https://docs.aws.amazon.com/bedrock/latest/userguide/trace-events.html
+    /// 
+    private void OutputTrace(Trace trace)
+    {
+        if (trace.OrchestrationTrace is not null)
+        {
+            if (trace.OrchestrationTrace.ModelInvocationInput is not null)
+            {
+                this.Output.WriteLine("========== Orchestration trace ==========");
+                this.Output.WriteLine("Orchestration input:");
+                this.Output.WriteLine(trace.OrchestrationTrace.ModelInvocationInput.Text);
+            }
+            if (trace.OrchestrationTrace.ModelInvocationOutput is not null)
+            {
+                this.Output.WriteLine("========== Orchestration trace ==========");
+                this.Output.WriteLine("Orchestration output:");
+                this.Output.WriteLine(trace.OrchestrationTrace.ModelInvocationOutput.RawResponse.Content);
+                this.Output.WriteLine("Usage:");
+                this.Output.WriteLine($"Input token: {trace.OrchestrationTrace.ModelInvocationOutput.Metadata.Usage.InputTokens}");
+                this.Output.WriteLine($"Output token: {trace.OrchestrationTrace.ModelInvocationOutput.Metadata.Usage.OutputTokens}");
+            }
+        }
+        // Example output:
+        // ========== Orchestration trace ==========
+        // Orchestration input:
+        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
+        // ========== Orchestration trace ==========
+        // Orchestration output:
+        // 
+        // To answer this question, I will need to call the following functions:
+        // 1. Step04_BedrockAgent_Trace_KernelFunctions::Current to get the current weather in Seattle
+        // 2. Step04_BedrockAgent_Trace_KernelFunctions::Forecast to get the weather forecast in Seattle
+        // 
+        //
+        // 
+        // 
+        //     Step04_BedrockAgent_Trace_KernelFunctions::Current
+        //     
+        //     Seattle
+        //     
+        // Usage:
+        // Input token: 617
+        // Output token: 144
+        // ========== Orchestration trace ==========
+        // Orchestration input:
+        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
+        // ========== Orchestration trace ==========
+        // Orchestration output:
+        // Now that I have the current weather in Seattle, I will call the forecast function to get the weather forecast.
+        //
+        // 
+        // 
+        // Step04_BedrockAgent_Trace_KernelFunctions::Forecast
+        // 
+        // Seattle
+        // 
+        // Usage:
+        // Input token: 834
+        // Output token: 87
+        // ========== Orchestration trace ==========
+        // Orchestration input:
+        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
+        // ========== Orchestration trace ==========
+        // Orchestration output:
+        // 
+        // The current weather in Seattle is 72 degrees. The weather forecast for Seattle is 75 degrees tomorrow.
+        // Usage:
+        // Input token: 1003
+        // Output token: 31
+    }
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new kernel with plugins
+        Kernel kernel = new();
+        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        var bedrockAgent = new BedrockAgent(agentModel, this.Client)
+        {
+            Kernel = kernel,
+        };
+        // Create the kernel function action group and prepare the agent for interaction
+        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
+
+        return bedrockAgent;
+    }
+
+    private sealed class WeatherPlugin
+    {
+        [KernelFunction, Description("Provides realtime weather information.")]
+        public string Current([Description("The location to get the weather for.")] string location)
+        {
+            return $"The current weather in {location} is 72 degrees.";
+        }
+
+        [KernelFunction, Description("Forecast weather information.")]
+        public string Forecast([Description("The location to get the weather for.")] string location)
+        {
+            return $"The forecast for {location} is 75 degrees tomorrow.";
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs
new file mode 100644
index 000000000000..9b7b4330af33
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how to interact with a  that is associated with a knowledge base.
+/// A Bedrock Knowledge Base is a collection of documents that the agent uses to answer user queries.
+/// To learn more about Bedrock Knowledge Base, see:
+/// https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html
+/// 
+public class Step05_BedrockAgent_FileSearch(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    // Replace the KnowledgeBaseId with a valid KnowledgeBaseId
+    // To learn how to create a Knowledge Base, see:
+    // https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-create.html
+    private const string KnowledgeBaseId = "[KnowledgeBaseId]";
+
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        var bedrockAgent = new BedrockAgent(agentModel, this.Client);
+        // Associate the agent with a knowledge base and prepare the agent
+        await bedrockAgent.AssociateAgentKnowledgeBaseAsync(
+            KnowledgeBaseId,
+            "You will find information here.");
+
+        return bedrockAgent;
+    }
+
+    /// 
+    /// Demonstrates how to use a  with file search.
+    /// 
+    [Fact(Skip = "This test is skipped because it requires a valid KnowledgeBaseId.")]
+    public async Task UseAgentWithFileSearchAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step05_BedrockAgent_FileSearch");
+
+        // Respond to user input
+        // Assuming the knowledge base contains information about Semantic Kernel.
+        // Feel free to modify the user query according to the information in your knowledge base.
+        var userQuery = "What is Semantic Kernel?";
+        try
+        {
+            // Customize the request for advanced scenarios
+            InvokeAgentRequest invokeAgentRequest = new()
+            {
+                AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
+                AgentId = bedrockAgent.Id,
+                SessionId = BedrockAgent.CreateSessionId(),
+                InputText = userQuery,
+            };
+
+            var responses = bedrockAgent.InvokeAsync(invokeAgentRequest, null, CancellationToken.None);
+            await foreach (var response in responses)
+            {
+                if (response.Content != null)
+                {
+                    this.Output.WriteLine(response.Content);
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs
new file mode 100644
index 000000000000..b7aee9d06c7e
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs
@@ -0,0 +1,93 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+using Microsoft.SemanticKernel.Agents.Chat;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace GettingStarted.BedrockAgents;
+
+/// 
+/// This example demonstrates how two agents (one of which is a Bedrock agent) can chat with each other.
+/// 
+public class Step06_BedrockAgent_AgentChat(ITestOutputHelper output) : BaseBedrockAgentTest(output)
+{
+    protected override async Task CreateAgentAsync(string agentName)
+    {
+        // Create a new agent on the Bedrock Agent service and prepare it for use
+        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
+        // Create a new BedrockAgent instance with the agent model and the client
+        // so that we can interact with the agent using Semantic Kernel contents.
+        return new BedrockAgent(agentModel, this.Client);
+    }
+
+    /// 
+    /// Demonstrates how to put two  instances in a chat.
+    /// 
+    [Fact]
+    public async Task UseAgentWithAgentChatAsync()
+    {
+        // Create the agent
+        var bedrockAgent = await this.CreateAgentAsync("Step06_BedrockAgent_AgentChat");
+        var chatCompletionAgent = new ChatCompletionAgent()
+        {
+            Instructions = "You're a translator who helps users understand the content in Spanish.",
+            Name = "Translator",
+            Kernel = this.CreateKernelWithChatCompletion(),
+        };
+
+        // Create a chat for agent interaction
+        var chat = new AgentGroupChat(bedrockAgent, chatCompletionAgent)
+        {
+            ExecutionSettings = new()
+            {
+                // Terminate after two turns: one from the bedrock agent and one from the chat completion agent.
+                // Note: each invoke will terminate after two turns, and we are invoking the group chat for each user query.
+                TerminationStrategy = new MultiTurnTerminationStrategy(2),
+            }
+        };
+
+        // Respond to user input
+        string[] userQueries = [
+            "Why is the sky blue in one sentence?",
+            "Why do we have seasons in one sentence?"
+        ];
+        try
+        {
+            foreach (var userQuery in userQueries)
+            {
+                chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, userQuery));
+                await foreach (var response in chat.InvokeAsync())
+                {
+                    if (response.Content != null)
+                    {
+                        this.Output.WriteLine($"[{response.AuthorName}]: {response.Content}");
+                    }
+                }
+            }
+        }
+        finally
+        {
+            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
+        }
+    }
+
+    internal sealed class MultiTurnTerminationStrategy : TerminationStrategy
+    {
+        public MultiTurnTerminationStrategy(int turns)
+        {
+            this.MaximumIterations = turns;
+        }
+
+        /// 
+        protected override Task ShouldAgentTerminateAsync(
+            Agent agent,
+            IReadOnlyList history,
+            CancellationToken cancellationToken = default)
+        {
+            return Task.FromResult(false);
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
index 3a061b4fb4a0..ffc4734e10d6 100644
--- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
+++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
@@ -16,6 +16,7 @@
 
   
     
+    
     
     
     
@@ -27,6 +28,7 @@
     
     
     
+    
     
     
     
@@ -40,8 +42,10 @@
   
 
   
+    
     
     
+    
     
     
     
@@ -63,4 +67,4 @@
     
   
 
-
+
\ No newline at end of file
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
new file mode 100644
index 000000000000..312edc9e7c6f
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
@@ -0,0 +1,66 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.Assistants;
+using Resources;
+
+namespace GettingStarted.OpenAIAssistants;
+
+/// 
+/// This example demonstrates using  with templatized instructions.
+/// 
+public class Step01_Assistant(ITestOutputHelper output) : BaseAssistantTest(output)
+{
+    [Fact]
+    public async Task UseTemplateForAssistantAgentAsync()
+    {
+        // Define the agent
+        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
+        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+        // Instructions, Name and Description properties defined via the PromptTemplateConfig.
+        Assistant definition = await this.AssistantClient.CreateAssistantFromTemplateAsync(this.Model, templateConfig, metadata: SampleMetadata);
+        OpenAIAssistantAgent agent = new(
+            definition,
+            this.AssistantClient,
+            templateFactory: new KernelPromptTemplateFactory(),
+            templateFormat: PromptTemplateConfig.SemanticKernelTemplateFormat)
+        {
+            Arguments =
+            {
+                { "topic", "Dog" },
+                { "length", "3" }
+            }
+        };
+
+        // Create a thread for the agent conversation.
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        try
+        {
+            // Invoke the agent with the default arguments.
+            await InvokeAgentAsync();
+
+            // Invoke the agent with the override arguments.
+            await InvokeAgentAsync(
+                    new()
+                    {
+                        { "topic", "Cat" },
+                        { "length", "3" },
+                    });
+        }
+        finally
+        {
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the response.
+        async Task InvokeAgentAsync(KernelArguments? arguments = null)
+        {
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments))
+            {
+                WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
new file mode 100644
index 000000000000..3eb893a8871e
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
@@ -0,0 +1,92 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Plugins;
+
+namespace GettingStarted.OpenAIAssistants;
+
+/// 
+/// Demonstrate creation of  with a ,
+/// and then eliciting its response to explicit user messages.
+/// 
+public class Step02_Assistant_Plugins(ITestOutputHelper output) : BaseAssistantTest(output)
+{
+    [Fact]
+    public async Task UseAssistantWithPluginAsync()
+    {
+        // Define the agent
+        OpenAIAssistantAgent agent = await CreateAssistantAgentAsync(
+                plugin: KernelPluginFactory.CreateFromType(),
+                instructions: "Answer questions about the menu.",
+                name: "Host");
+
+        // Create a thread for the agent conversation.
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync(agent, threadId, "Hello");
+            await InvokeAgentAsync(agent, threadId, "What is the special soup and its price?");
+            await InvokeAgentAsync(agent, threadId, "What is the special drink and its price?");
+            await InvokeAgentAsync(agent, threadId, "Thank you");
+        }
+        finally
+        {
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+        }
+    }
+
+    [Fact]
+    public async Task UseAssistantWithPluginEnumParameterAsync()
+    {
+        // Define the agent
+        OpenAIAssistantAgent agent = await CreateAssistantAgentAsync(plugin: KernelPluginFactory.CreateFromType());
+
+        // Create a thread for the agent conversation.
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync(agent, threadId, "Create a beautiful red colored widget for me.");
+        }
+        finally
+        {
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+        }
+    }
+
+    private async Task CreateAssistantAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
+    {
+        // Define the assistant
+        Assistant assistant =
+            await this.AssistantClient.CreateAssistantAsync(
+                this.Model,
+                name,
+                instructions: instructions,
+                metadata: SampleMetadata);
+
+        // Create the agent
+        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]);
+
+        return agent;
+    }
+
+    // Local function to invoke agent and display the conversation messages.
+    private async Task InvokeAgentAsync(OpenAIAssistantAgent agent, string threadId, string input)
+    {
+        ChatMessageContent message = new(AuthorRole.User, input);
+        await agent.AddChatMessageAsync(threadId, message);
+        this.WriteAgentChatMessage(message);
+
+        await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+        {
+            this.WriteAgentChatMessage(response);
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
similarity index 69%
rename from dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
rename to dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
index 09b02d4ceebf..a9d8f5ead9e0 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
@@ -2,14 +2,15 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
 using Resources;
 
-namespace GettingStarted;
+namespace GettingStarted.OpenAIAssistants;
 
 /// 
 /// Demonstrate providing image input to  .
 /// 
-public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAssistantTest(output)
 {
     /// 
     /// Azure currently only supports message of type=text.
@@ -17,25 +18,23 @@ public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(
     protected override bool ForceOpenAI => true;
 
     [Fact]
-    public async Task UseSingleAssistantAgentAsync()
+    public async Task UseImageContentWithAssistantAsync()
     {
-        // Define the agent
-        OpenAIClientProvider provider = this.GetClientProvider();
-        OpenAIAssistantAgent agent =
-            await OpenAIAssistantAgent.CreateAsync(
-                provider,
-                definition: new OpenAIAssistantDefinition(this.Model)
-                {
-                    Metadata = AssistantSampleMetadata,
-                },
-                kernel: new Kernel());
+        // Define the assistant
+        Assistant assistant =
+            await this.AssistantClient.CreateAssistantAsync(
+                this.Model,
+                metadata: SampleMetadata);
+
+        // Create the agent
+        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
 
         // Upload an image
         await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!;
-        string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg");
+        string fileId = await this.Client.UploadAssistantFileAsync(imageStream, "cat.jpg");
 
         // Create a thread for the agent conversation.
-        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
 
         // Respond to user input
         try
@@ -48,9 +47,9 @@ await OpenAIAssistantAgent.CreateAsync(
         }
         finally
         {
-            await agent.DeleteThreadAsync(threadId);
-            await agent.DeleteAsync();
-            await provider.Client.GetOpenAIFileClient().DeleteFileAsync(fileId);
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+            await this.Client.DeleteFileAsync(fileId);
         }
 
         // Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
similarity index 61%
rename from dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
rename to dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
index 203009ffb561..3de017d422a3 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
@@ -2,30 +2,30 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
 
-namespace GettingStarted;
+namespace GettingStarted.OpenAIAssistants;
 
 /// 
 /// Demonstrate using code-interpreter on  .
 /// 
-public class Step10_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output)
+public class Step04_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAssistantTest(output)
 {
     [Fact]
     public async Task UseCodeInterpreterToolWithAssistantAgentAsync()
     {
-        // Define the agent
-        OpenAIAssistantAgent agent =
-            await OpenAIAssistantAgent.CreateAsync(
-                clientProvider: this.GetClientProvider(),
-                definition: new(this.Model)
-                {
-                    EnableCodeInterpreter = true,
-                    Metadata = AssistantSampleMetadata,
-                },
-                kernel: new Kernel());
+        // Define the assistant
+        Assistant assistant =
+            await this.AssistantClient.CreateAssistantAsync(
+                this.Model,
+                enableCodeInterpreter: true,
+                metadata: SampleMetadata);
+
+        // Create the agent
+        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
 
         // Create a thread for the agent conversation.
-        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
 
         // Respond to user input
         try
@@ -34,8 +34,8 @@ await OpenAIAssistantAgent.CreateAsync(
         }
         finally
         {
-            await agent.DeleteThreadAsync(threadId);
-            await agent.DeleteAsync();
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
         }
 
         // Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
new file mode 100644
index 000000000000..72248118577b
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Resources;
+
+namespace GettingStarted.OpenAIAssistants;
+
+/// 
+/// Demonstrate using  with file search.
+/// 
+public class Step05_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAssistantTest(output)
+{
+    [Fact]
+    public async Task UseFileSearchToolWithAssistantAgentAsync()
+    {
+        // Define the assistant
+        Assistant assistant =
+            await this.AssistantClient.CreateAssistantAsync(
+                this.Model,
+                enableFileSearch: true,
+                metadata: SampleMetadata);
+
+        // Create the agent
+        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
+
+        // Upload file - Using a table of fictional employees.
+        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
+        string fileId = await this.Client.UploadAssistantFileAsync(stream, "employees.pdf");
+
+        // Create a vector-store
+        string vectorStoreId =
+            await this.Client.CreateVectorStoreAsync(
+                [fileId],
+                waitUntilCompleted: true,
+                metadata: SampleMetadata);
+
+        // Create a thread associated with a vector-store for the agent conversation.
+        string threadId = await this.AssistantClient.CreateThreadAsync(
+                            vectorStoreId: vectorStoreId,
+                            metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Who is the youngest employee?");
+            await InvokeAgentAsync("Who works in sales?");
+            await InvokeAgentAsync("I have a customer request, who can help me?");
+        }
+        finally
+        {
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+            await this.Client.DeleteVectorStoreAsync(vectorStoreId);
+            await this.Client.DeleteFileAsync(fileId);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(threadId, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
new file mode 100644
index 000000000000..024f8ab167ae
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
@@ -0,0 +1,77 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Plugins;
+
+namespace GettingStarted.OpenAIAssistants;
+
+/// 
+/// This example demonstrates how to define function tools for an 
+/// when the assistant is created. This is useful if you want to retrieve the assistant later and
+/// then dynamically check what function tools it requires.
+/// 
+public class Step06_AssistantTool_Function(ITestOutputHelper output) : BaseAssistantTest(output)
+{
+    private const string HostName = "Host";
+    private const string HostInstructions = "Answer questions about the menu.";
+
+    [Fact]
+    public async Task UseSingleAssistantWithFunctionToolsAsync()
+    {
+        // Define the agent
+        AssistantCreationOptions creationOptions =
+            new()
+            {
+                Name = HostName,
+                Instructions = HostInstructions,
+                Metadata =
+                {
+                    { SampleMetadataKey, bool.TrueString }
+                },
+            };
+
+        // In this sample the function tools are added to the assistant this is
+        // important if you want to retrieve the assistant later and then dynamically check
+        // what function tools it requires.
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        plugin.Select(f => f.ToToolDefinition(plugin.Name)).ToList().ForEach(td => creationOptions.Tools.Add(td));
+
+        Assistant definition = await this.AssistantClient.CreateAssistantAsync(this.Model, creationOptions);
+        OpenAIAssistantAgent agent = new(definition, this.AssistantClient);
+
+        // Add plugin to the agent's Kernel (same as direct Kernel usage).
+        agent.Kernel.Plugins.Add(plugin);
+
+        // Create a thread for the agent conversation.
+        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Hello");
+            await InvokeAgentAsync("What is the special soup and its price?");
+            await InvokeAgentAsync("What is the special drink and its price?");
+            await InvokeAgentAsync("Thank you");
+        }
+        finally
+        {
+            await this.AssistantClient.DeleteThreadAsync(threadId);
+            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(threadId, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs b/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs
new file mode 100644
index 000000000000..fb37ed0309e9
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs
@@ -0,0 +1,79 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.ComponentModel;
+using Microsoft.SemanticKernel;
+
+namespace Plugins;
+
+public sealed class MenuPlugin
+{
+    [KernelFunction, Description("Provides a list of specials from the menu.")]
+    public MenuItem[] GetMenu()
+    {
+        return s_menuItems;
+    }
+
+    [KernelFunction, Description("Provides a list of specials from the menu.")]
+    public MenuItem[] GetSpecials()
+    {
+        return s_menuItems.Where(i => i.IsSpecial).ToArray();
+    }
+
+    [KernelFunction, Description("Provides the price of the requested menu item.")]
+    public float? GetItemPrice(
+        [Description("The name of the menu item.")]
+        string menuItem)
+    {
+        return s_menuItems.FirstOrDefault(i => i.Name.Equals(menuItem, StringComparison.OrdinalIgnoreCase))?.Price;
+    }
+
+    private static readonly MenuItem[] s_menuItems =
+        [
+            new()
+            {
+                Category = "Soup",
+                Name = "Clam Chowder",
+                Price = 4.95f,
+                IsSpecial = true,
+            },
+            new()
+            {
+                Category = "Soup",
+                Name = "Tomato Soup",
+                Price = 4.95f,
+                IsSpecial = false,
+            },
+            new()
+            {
+                Category = "Salad",
+                Name = "Cobb Salad",
+                Price = 9.99f,
+            },
+            new()
+            {
+                Category = "Salad",
+                Name = "House Salad",
+                Price = 4.95f,
+            },
+            new()
+            {
+                Category = "Drink",
+                Name = "Chai Tea",
+                Price = 2.95f,
+                IsSpecial = true,
+            },
+            new()
+            {
+                Category = "Drink",
+                Name = "Soda",
+                Price = 1.95f,
+            },
+        ];
+
+    public sealed class MenuItem
+    {
+        public string Category { get; init; }
+        public string Name { get; init; }
+        public float Price { get; init; }
+        public bool IsSpecial { get; init; }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs b/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs
new file mode 100644
index 000000000000..8a889ee17249
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ComponentModel;
+using System.Text.Json.Serialization;
+using Microsoft.SemanticKernel;
+
+namespace Plugins;
+
+/// 
+/// A plugin that creates widgets.
+/// 
+public sealed class WidgetFactory
+{
+    [KernelFunction]
+    [Description("Creates a new widget of the specified type and colors")]
+    public WidgetDetails CreateWidget(
+        [Description("The type of widget to be created")] WidgetType widgetType,
+        [Description("The colors of the widget to be created")] WidgetColor[] widgetColors)
+    {
+        return new()
+        {
+            SerialNumber = $"{widgetType}-{string.Join("-", widgetColors)}-{Guid.NewGuid()}",
+            Type = widgetType,
+            Colors = widgetColors,
+        };
+    }
+}
+
+/// 
+/// A  is required to correctly convert enum values.
+/// 
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum WidgetType
+{
+    [Description("A widget that is useful.")]
+    Useful,
+
+    [Description("A widget that is decorative.")]
+    Decorative
+}
+
+/// 
+/// A  is required to correctly convert enum values.
+/// 
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum WidgetColor
+{
+    [Description("Use when creating a red item.")]
+    Red,
+
+    [Description("Use when creating a green item.")]
+    Green,
+
+    [Description("Use when creating a blue item.")]
+    Blue
+}
+
+public sealed class WidgetDetails
+{
+    public string SerialNumber { get; init; }
+    public WidgetType Type { get; init; }
+    public WidgetColor[] Colors { get; init; }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md
index ed0e68802994..6c54a26c0d90 100644
--- a/dotnet/samples/GettingStartedWithAgents/README.md
+++ b/dotnet/samples/GettingStartedWithAgents/README.md
@@ -2,13 +2,14 @@
 
 This project contains a step by step guide to get started with  _Semantic Kernel Agents_.
 
+## NuGet
 
-#### NuGet:
 - [Microsoft.SemanticKernel.Agents.Abstractions](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Abstractions)
 - [Microsoft.SemanticKernel.Agents.Core](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Core)
 - [Microsoft.SemanticKernel.Agents.OpenAI](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.OpenAI)
 
-#### Source
+## Source
+
 - [Semantic Kernel Agent Framework](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Agents)
 
 The examples can be run as integration tests but their code can also be copied to stand-alone programs.
@@ -17,6 +18,8 @@ The examples can be run as integration tests but their code can also be copied t
 
 The getting started with agents examples include:
 
+### ChatCompletion
+
 Example|Description
 ---|---
 [Step01_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs)|How to create and use an agent.
@@ -25,11 +28,39 @@ Example|Description
 [Step04_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
 [Step05_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs)|How to have an agent produce JSON.
 [Step06_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs)|How to define dependency injection patterns for agents.
-[Step07_Logging](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs)|How to enable logging for agents.
-[Step08_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs)|How to create an Open AI Assistant agent.
-[Step09_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent.
-[Step10_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent.
-[Step11_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent.
+[Step07_Telemetry](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs)|How to enable logging for agents.
+
+### Open AI Assistant
+
+Example|Description
+---|---
+[Step01_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs)|How to create an Open AI Assistant agent.
+[Step02_Assistant_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs)|How to create an Open AI Assistant agent.
+[Step03_Assistant_Vision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent.
+[Step04_AssistantTool_CodeInterpreter_](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent.
+[Step05_AssistantTool_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent.
+
+### Azure AI Agent
+
+Example|Description
+---|---
+[Step01_AzureAIAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs)|How to create an Azure AI agent.
+[Step02_AzureAIAgent_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs)|How to create an Azure AI agent.
+[Step03_AzureAIAgent_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Chat.cs)|How create a conversation with Azure AI agents.
+[Step04_AzureAIAgent_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_CodeInterpreter.cs)|How to use the code-interpreter tool for an Azure AI agent.
+[Step05_AzureAIAgent_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_FileSearch.cs)|How to use the file-search tool for an Azure AI agent.
+[Step06_AzureAIAgent_OpenAPI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_OpenAPI.cs)|How to use the Open API tool for an Azure AI agent.
+
+### Bedrock Agent
+
+Example|Description
+---|---
+[Step01_BedrockAgent](./BedrockAgent/Step01_BedrockAgent.cs)|How to create a Bedrock agent and interact with it in the most basic way.
+[Step02_BedrockAgent_CodeInterpreter](./BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs)|How to use the code-interpreter tool with a Bedrock agent.
+[Step03_BedrockAgent_Functions](./BedrockAgent/Step03_BedrockAgent_Functions.cs)|How to use kernel functions with a Bedrock agent.
+[Step04_BedrockAgent_Trace](./BedrockAgent/Step04_BedrockAgent_Trace.cs)|How to enable tracing for a Bedrock agent to inspect the chain of thoughts.
+[Step05_BedrockAgent_FileSearch](./BedrockAgent/Step05_BedrockAgent_FileSearch.cs)|How to use file search with a Bedrock agent (i.e. Bedrock knowledge base).
+[Step06_BedrockAgent_AgentChat](./BedrockAgent/Step06_BedrockAgent_AgentChat.cs)|How to create a conversation between two agents and one of them in a Bedrock agent.
 
 ## Legacy Agents
 
@@ -38,8 +69,8 @@ Support for the OpenAI Assistant API was originally published in `Microsoft.Sema
 
 This package has been superseded by _Semantic Kernel Agents_, which includes support for Open AI Assistant agents.
 
-
 ## Running Examples with Filters
+
 Examples may be explored and ran within _Visual Studio_ using _Test Explorer_.
 
 You can also run specific examples via the command-line by using test filters (`dotnet test --filter`). Type `dotnet test --help` at the command line for more details.
@@ -86,12 +117,25 @@ To set your secrets with .NET Secret Manager:
 5. Or Azure Open AI:
 
     ```
-    dotnet user-secrets set "AzureOpenAI:DeploymentName" "..."
-    dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..."
+    dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4o"
     dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/"
     dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
     ```
 
+6. Or Azure AI:
+
+    ```
+    dotnet user-secrets set "AzureAI:ConnectionString" "..."
+    dotnet user-secrets set "AzureAI:ChatModelId" "gpt-4o"
+    ```
+
+7. Or Bedrock:
+
+    ```
+    dotnet user-secrets set "BedrockAgent:AgentResourceRoleArn" "arn:aws:iam::...:role/..."
+    dotnet user-secrets set "BedrockAgent:FoundationModel" "..."
+    ```
+
 > NOTE: Azure secrets will take precedence, if both Open AI and Azure Open AI secrets are defined, unless `ForceOpenAI` is set:
 
 ```
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml b/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml
new file mode 100644
index 000000000000..36d66167b555
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml
@@ -0,0 +1,7 @@
+name: ToolAgent
+template_format: semantic-kernel
+description: An agent that is configured to auto-invoke plugins.
+execution_settings:
+  default:
+    function_choice_behavior:
+      type: auto
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/countries.json b/dotnet/samples/GettingStartedWithAgents/Resources/countries.json
new file mode 100644
index 000000000000..b88d5040750a
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Resources/countries.json
@@ -0,0 +1,46 @@
+{
+  "openapi": "3.1.0",
+  "info": {
+    "title": "RestCountries.NET API",
+    "description": "Web API version 3.1 for managing country items, based on previous implementations from restcountries.eu and restcountries.com.",
+    "version": "v3.1"
+  },
+  "servers": [
+    { "url": "https://restcountries.net" }
+  ],
+  "auth": [],
+  "paths": {
+    "/v3.1/currency": {
+      "get": {
+        "description": "Search by currency.",
+        "operationId": "LookupCountryByCurrency",
+        "parameters": [
+          {
+            "name": "currency",
+            "in": "query",
+            "description": "The currency to search for.",
+            "required": true,
+            "schema": {
+              "type": "string"
+            }
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "Success",
+            "content": {
+              "text/plain": {
+                "schema": {
+                  "type": "string"
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  },
+  "components": {
+    "schemes": {}
+  }
+}
\ No newline at end of file
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/weather.json b/dotnet/samples/GettingStartedWithAgents/Resources/weather.json
new file mode 100644
index 000000000000..c3009f417de4
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Resources/weather.json
@@ -0,0 +1,62 @@
+{
+  "openapi": "3.1.0",
+  "info": {
+    "title": "get weather data",
+    "description": "Retrieves current weather data for a location based on wttr.in.",
+    "version": "v1.0.0"
+  },
+  "servers": [
+    {
+      "url": "https://wttr.in"
+    }
+  ],
+  "auth": [],
+  "paths": {
+    "/{location}": {
+      "get": {
+        "description": "Get weather information for a specific location",
+        "operationId": "GetCurrentWeather",
+        "parameters": [
+          {
+            "name": "location",
+            "in": "path",
+            "description": "City or location to retrieve the weather for",
+            "required": true,
+            "schema": {
+              "type": "string"
+            }
+          },
+          {
+            "name": "format",
+            "in": "query",
+            "description": "Always use j1 value for this parameter",
+            "required": true,
+            "schema": {
+              "type": "string",
+              "default": "j1"
+            }
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "Successful response",
+            "content": {
+              "text/plain": {
+                "schema": {
+                  "type": "string"
+                }
+              }
+            }
+          },
+          "404": {
+            "description": "Location not found"
+          }
+        },
+        "deprecated": false
+      }
+    }
+  },
+  "components": {
+    "schemes": {}
+  }
+}
\ No newline at end of file
diff --git a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
index dfd6aeb22fb3..3807c1ebef74 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
@@ -59,17 +59,18 @@ public async Task UseTemplateForChatCompletionAgentAsync()
         // Define the agent
         string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
         PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+        KernelPromptTemplateFactory templateFactory = new();
 
         // Instructions, Name and Description properties defined via the config.
         ChatCompletionAgent agent =
-            new(templateConfig, new KernelPromptTemplateFactory())
+            new(templateConfig, templateFactory)
             {
                 Kernel = this.CreateKernelWithChatCompletion(),
-                Arguments = new KernelArguments()
-                {
-                    { "topic", "Dog" },
-                    { "length", "3" },
-                }
+                Arguments =
+                    {
+                        { "topic", "Dog" },
+                        { "length", "3" },
+                    }
             };
 
         /// Create the chat history to capture the agent interaction.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
index 047020a90b67..ced4148a7287 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
@@ -1,9 +1,9 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System.ComponentModel;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Plugins;
+using Resources;
 
 namespace GettingStarted;
 
@@ -13,66 +13,97 @@ namespace GettingStarted;
 /// 
 public class Step02_Plugins(ITestOutputHelper output) : BaseAgentsTest(output)
 {
-    private const string HostName = "Host";
-    private const string HostInstructions = "Answer questions about the menu.";
+    [Fact]
+    public async Task UseChatCompletionWithPluginAsync()
+    {
+        // Define the agent
+        ChatCompletionAgent agent = CreateAgentWithPlugin(
+                plugin: KernelPluginFactory.CreateFromType(),
+                instructions: "Answer questions about the menu.",
+                name: "Host");
+
+        /// Create the chat history to capture the agent interaction.
+        ChatHistory chat = [];
+
+        // Respond to user input, invoking functions where appropriate.
+        await InvokeAgentAsync(agent, chat, "Hello");
+        await InvokeAgentAsync(agent, chat, "What is the special soup and its price?");
+        await InvokeAgentAsync(agent, chat, "What is the special drink and its price?");
+        await InvokeAgentAsync(agent, chat, "Thank you");
+    }
 
     [Fact]
-    public async Task UseChatCompletionWithPluginAgentAsync()
+    public async Task UseChatCompletionWithPluginEnumParameterAsync()
     {
         // Define the agent
+        ChatCompletionAgent agent = CreateAgentWithPlugin(
+                KernelPluginFactory.CreateFromType());
+
+        /// Create the chat history to capture the agent interaction.
+        ChatHistory chat = [];
+
+        // Respond to user input, invoking functions where appropriate.
+        await InvokeAgentAsync(agent, chat, "Create a beautiful red colored widget for me.");
+    }
+
+    [Fact]
+    public async Task UseChatCompletionWithTemplateExecutionSettingsAsync()
+    {
+        // Read the template resource
+        string autoInvokeYaml = EmbeddedResource.Read("AutoInvokeTools.yaml");
+        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(autoInvokeYaml);
+        KernelPromptTemplateFactory templateFactory = new();
+
+        // Define the agent:
+        // Execution-settings with auto-invocation of plugins defined via the config.
         ChatCompletionAgent agent =
-            new()
+            new(templateConfig, templateFactory)
             {
-                Instructions = HostInstructions,
-                Name = HostName,
-                Kernel = this.CreateKernelWithChatCompletion(),
-                Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
+                Kernel = this.CreateKernelWithChatCompletion()
             };
 
-        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        agent.Kernel.Plugins.Add(plugin);
+        agent.Kernel.Plugins.AddFromType();
 
         /// Create the chat history to capture the agent interaction.
         ChatHistory chat = [];
 
         // Respond to user input, invoking functions where appropriate.
-        await InvokeAgentAsync("Hello");
-        await InvokeAgentAsync("What is the special soup?");
-        await InvokeAgentAsync("What is the special drink?");
-        await InvokeAgentAsync("Thank you");
+        await InvokeAgentAsync(agent, chat, "Create a beautiful red colored widget for me.");
+    }
 
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            chat.Add(message);
-            this.WriteAgentChatMessage(message);
+    private ChatCompletionAgent CreateAgentWithPlugin(
+        KernelPlugin plugin,
+        string? instructions = null,
+        string? name = null)
+    {
+        ChatCompletionAgent agent =
+                new()
+                {
+                    Instructions = instructions,
+                    Name = name,
+                    Kernel = this.CreateKernelWithChatCompletion(),
+                    Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
+                };
 
-            await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
-            {
-                chat.Add(response);
+        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
+        agent.Kernel.Plugins.Add(plugin);
 
-                this.WriteAgentChatMessage(response);
-            }
-        }
+        return agent;
     }
 
-    private sealed class MenuPlugin
+    // Local function to invoke agent and display the conversation messages.
+    private async Task InvokeAgentAsync(ChatCompletionAgent agent, ChatHistory chat, string input)
     {
-        [KernelFunction, Description("Provides a list of specials from the menu.")]
-        [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
-        public string GetSpecials() =>
-            """
-            Special Soup: Clam Chowder
-            Special Salad: Cobb Salad
-            Special Drink: Chai Tea
-            """;
-
-        [KernelFunction, Description("Provides the price of the requested menu item.")]
-        public string GetItemPrice(
-            [Description("The name of the menu item.")]
-            string menuItem) =>
-            "$9.99";
+        ChatMessageContent message = new(AuthorRole.User, input);
+        chat.Add(message);
+
+        this.WriteAgentChatMessage(message);
+
+        await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
+        {
+            chat.Add(response);
+
+            this.WriteAgentChatMessage(response);
+        }
     }
 }
diff --git a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
index f924793951aa..963b670f1f82 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
@@ -1,8 +1,8 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.Chat;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace GettingStarted;
diff --git a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index 5beb969bf090..276f2f6fb198 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -1,4 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
+using Azure.Identity;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.Extensions.Logging;
 using Microsoft.SemanticKernel;
@@ -43,7 +44,7 @@ public async Task UseDependencyInjectionToCreateAgentAsync()
             serviceContainer.AddAzureOpenAIChatCompletion(
                 TestConfiguration.AzureOpenAI.ChatDeploymentName,
                 TestConfiguration.AzureOpenAI.Endpoint,
-                TestConfiguration.AzureOpenAI.ApiKey);
+                new AzureCliCredential());
         }
 
         // Transient Kernel as each agent may customize its Kernel instance with plug-ins.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs
new file mode 100644
index 000000000000..832ce0b1db02
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs
@@ -0,0 +1,236 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics;
+using Azure.Monitor.OpenTelemetry.Exporter;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.Chat;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using OpenTelemetry;
+using OpenTelemetry.Resources;
+using OpenTelemetry.Trace;
+
+namespace GettingStarted;
+
+/// 
+/// A repeat of  with telemetry enabled.
+/// 
+public class Step07_Telemetry(ITestOutputHelper output) : BaseAssistantTest(output)
+{
+    /// 
+    /// Instance of  for the example's main activity.
+    /// 
+    private static readonly ActivitySource s_activitySource = new("AgentsTelemetry.Example");
+
+    /// 
+    /// Demonstrates logging in ,  and .
+    /// Logging is enabled through the  and  properties.
+    /// This example uses  to output logs to the test console, but any compatible logging provider can be used.
+    /// 
+    [Fact]
+    public async Task LoggingAsync()
+    {
+        await RunExampleAsync(loggerFactory: this.LoggerFactory);
+
+        // Output:
+        // [AddChatMessages] Adding Messages: 1.
+        // [AddChatMessages] Added Messages: 1.
+        // [InvokeAsync] Invoking chat: Microsoft.SemanticKernel.Agents.ChatCompletionAgent:63c505e8-cf5b-4aa3-a6a5-067a52377f82/CopyWriter, Microsoft.SemanticKernel.Agents.ChatCompletionAgent:85f6777b-54ef-4392-9608-67bc85c42c5b/ArtDirector
+        // [InvokeAsync] Selecting agent: Microsoft.SemanticKernel.Agents.Chat.SequentialSelectionStrategy.
+        // [NextAsync] Selected agent (0 / 2): 63c505e8-cf5b-4aa3-a6a5-067a52377f82/CopyWriter
+        // and more...
+    }
+
+    /// 
+    /// Demonstrates tracing in  and .
+    /// Tracing is enabled through the .
+    /// For output this example uses Console as well as Application Insights.
+    /// 
+    [Theory]
+    [InlineData(true, false)]
+    [InlineData(false, false)]
+    [InlineData(true, true)]
+    [InlineData(false, true)]
+    public async Task TracingAsync(bool useApplicationInsights, bool useStreaming)
+    {
+        using var tracerProvider = GetTracerProvider(useApplicationInsights);
+
+        using var activity = s_activitySource.StartActivity("MainActivity");
+        Console.WriteLine($"Operation/Trace ID: {Activity.Current?.TraceId}");
+
+        await RunExampleAsync(useStreaming: useStreaming);
+
+        // Output:
+        // Operation/Trace ID: 132d831ef39c13226cdaa79873f375b8
+        // Activity.TraceId:            132d831ef39c13226cdaa79873f375b8
+        // Activity.SpanId:             891e8f2f32a61123
+        // Activity.TraceFlags:         Recorded
+        // Activity.ParentSpanId:       5dae937c9438def9
+        // Activity.ActivitySourceName: Microsoft.SemanticKernel.Diagnostics
+        // Activity.DisplayName:        chat.completions gpt-4
+        // Activity.Kind:               Client
+        // Activity.StartTime:          2025-02-03T23:32:57.1363560Z
+        // Activity.Duration:           00:00:02.1339320
+        // and more...
+    }
+
+    #region private
+
+    private async Task RunExampleAsync(
+        bool useStreaming = false,
+        ILoggerFactory? loggerFactory = null)
+    {
+        // Define the agents
+        ChatCompletionAgent agentReviewer =
+            new()
+            {
+                Name = "ArtDirector",
+                Instructions =
+                    """
+                    You are an art director who has opinions about copywriting born of a love for David Ogilvy.
+                    The goal is to determine if the given copy is acceptable to print.
+                    If so, state that it is approved.
+                    If not, provide insight on how to refine suggested copy without examples.
+                    """,
+                Description = "An art director who has opinions about copywriting born of a love for David Ogilvy",
+                Kernel = this.CreateKernelWithChatCompletion(),
+                LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory),
+            };
+
+        // Define the assistant
+        Assistant assistant =
+            await this.AssistantClient.CreateAssistantAsync(
+                this.Model,
+                name: "CopyWriter",
+                instructions:
+                    """
+                    You are a copywriter with ten years of experience and are known for brevity and a dry humor.
+                    The goal is to refine and decide on the single best copy as an expert in the field.
+                    Only provide a single proposal per response.
+                    You're laser focused on the goal at hand.
+                    Don't waste time with chit chat.
+                    Consider suggestions when refining an idea.
+                    """,
+                metadata: SampleMetadata);
+
+        // Create the agent
+        OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient)
+        {
+            LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory)
+        };
+
+        // Create a chat for agent interaction.
+        AgentGroupChat chat =
+            new(agentWriter, agentReviewer)
+            {
+                // This is all that is required to enable logging across the Agent Framework.
+                LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory),
+                ExecutionSettings =
+                    new()
+                    {
+                        // Here a TerminationStrategy subclass is used that will terminate when
+                        // an assistant message contains the term "approve".
+                        TerminationStrategy =
+                            new ApprovalTerminationStrategy()
+                            {
+                                // Only the art-director may approve.
+                                Agents = [agentReviewer],
+                                // Limit total number of turns
+                                MaximumIterations = 10,
+                            }
+                    }
+            };
+
+        // Invoke chat and display messages.
+        ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+        chat.AddChatMessage(input);
+        this.WriteAgentChatMessage(input);
+
+        if (useStreaming)
+        {
+            string lastAgent = string.Empty;
+            await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync())
+            {
+                if (string.IsNullOrEmpty(response.Content))
+                {
+                    continue;
+                }
+
+                if (!lastAgent.Equals(response.AuthorName, StringComparison.Ordinal))
+                {
+                    Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:");
+                    lastAgent = response.AuthorName ?? string.Empty;
+                }
+
+                Console.WriteLine($"\t > streamed: '{response.Content}'");
+            }
+
+            // Display the chat history.
+            Console.WriteLine("================================");
+            Console.WriteLine("CHAT HISTORY");
+            Console.WriteLine("================================");
+
+            ChatMessageContent[] history = await chat.GetChatMessagesAsync().Reverse().ToArrayAsync();
+
+            for (int index = 0; index < history.Length; index++)
+            {
+                this.WriteAgentChatMessage(history[index]);
+            }
+        }
+        else
+        {
+            await foreach (ChatMessageContent response in chat.InvokeAsync())
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+
+        Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
+    }
+
+    private TracerProvider? GetTracerProvider(bool useApplicationInsights)
+    {
+        // Enable diagnostics.
+        AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnostics", true);
+
+        var tracerProviderBuilder = Sdk.CreateTracerProviderBuilder()
+            .SetResourceBuilder(ResourceBuilder.CreateDefault().AddService("Semantic Kernel Agents Tracing Example"))
+            .AddSource("Microsoft.SemanticKernel*")
+            .AddSource(s_activitySource.Name);
+
+        if (useApplicationInsights)
+        {
+            var connectionString = TestConfiguration.ApplicationInsights.ConnectionString;
+
+            if (string.IsNullOrWhiteSpace(connectionString))
+            {
+                throw new ConfigurationNotFoundException(
+                    nameof(TestConfiguration.ApplicationInsights),
+                    nameof(TestConfiguration.ApplicationInsights.ConnectionString));
+            }
+
+            tracerProviderBuilder.AddAzureMonitorTraceExporter(o => o.ConnectionString = connectionString);
+        }
+        else
+        {
+            tracerProviderBuilder.AddConsoleExporter();
+        }
+
+        return tracerProviderBuilder.Build();
+    }
+
+    private ILoggerFactory GetLoggerFactoryOrDefault(ILoggerFactory? loggerFactory = null) => loggerFactory ?? NullLoggerFactory.Instance;
+
+    private sealed class ApprovalTerminationStrategy : TerminationStrategy
+    {
+        // Terminate when the final message contains the term "approve"
+        protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
+            => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false);
+    }
+
+    #endregion
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
deleted file mode 100644
index 1e952810e51e..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.ComponentModel;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Resources;
-
-namespace GettingStarted;
-
-/// 
-/// This example demonstrates similarity between using 
-/// and  (see: Step 2).
-/// 
-public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output)
-{
-    private const string HostName = "Host";
-    private const string HostInstructions = "Answer questions about the menu.";
-
-    [Fact]
-    public async Task UseSingleAssistantAgentAsync()
-    {
-        // Define the agent
-        OpenAIAssistantAgent agent =
-            await OpenAIAssistantAgent.CreateAsync(
-                clientProvider: this.GetClientProvider(),
-                definition: new OpenAIAssistantDefinition(this.Model)
-                {
-                    Instructions = HostInstructions,
-                    Name = HostName,
-                    Metadata = AssistantSampleMetadata,
-                },
-                kernel: new Kernel());
-
-        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        agent.Kernel.Plugins.Add(plugin);
-
-        // Create a thread for the agent conversation.
-        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Hello");
-            await InvokeAgentAsync("What is the special soup and its price?");
-            await InvokeAgentAsync("What is the special drink and its price?");
-            await InvokeAgentAsync("Thank you");
-        }
-        finally
-        {
-            await agent.DeleteThreadAsync(threadId);
-            await agent.DeleteAsync();
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(threadId, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-
-    [Fact]
-    public async Task UseTemplateForAssistantAgentAsync()
-    {
-        // Define the agent
-        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
-        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
-
-        // Instructions, Name and Description properties defined via the config.
-        OpenAIAssistantAgent agent =
-            await OpenAIAssistantAgent.CreateFromTemplateAsync(
-                clientProvider: this.GetClientProvider(),
-                capabilities: new OpenAIAssistantCapabilities(this.Model)
-                {
-                    Metadata = AssistantSampleMetadata,
-                },
-                kernel: new Kernel(),
-                defaultArguments: new KernelArguments()
-                {
-                    { "topic", "Dog" },
-                    { "length", "3" },
-                },
-                templateConfig);
-
-        // Create a thread for the agent conversation.
-        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
-
-        try
-        {
-            // Invoke the agent with the default arguments.
-            await InvokeAgentAsync();
-
-            // Invoke the agent with the override arguments.
-            await InvokeAgentAsync(
-                new()
-                {
-                { "topic", "Cat" },
-                { "length", "3" },
-                });
-        }
-        finally
-        {
-            await agent.DeleteThreadAsync(threadId);
-            await agent.DeleteAsync();
-        }
-
-        // Local function to invoke agent and display the response.
-        async Task InvokeAgentAsync(KernelArguments? arguments = null)
-        {
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments))
-            {
-                WriteAgentChatMessage(response);
-            }
-        }
-    }
-
-    private sealed class MenuPlugin
-    {
-        [KernelFunction, Description("Provides a list of specials from the menu.")]
-        [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
-        public string GetSpecials() =>
-            """
-            Special Soup: Clam Chowder
-            Special Salad: Cobb Salad
-            Special Drink: Chai Tea
-            """;
-
-        [KernelFunction, Description("Provides the price of the requested menu item.")]
-        public string GetItemPrice(
-            [Description("The name of the menu item.")]
-            string menuItem) =>
-            "$9.99";
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
deleted file mode 100644
index 77f4e5dbdff1..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Files;
-using OpenAI.VectorStores;
-using Resources;
-
-namespace GettingStarted;
-
-/// 
-/// Demonstrate using code-interpreter on  .
-/// 
-public class Step11_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output)
-{
-    [Fact]
-    public async Task UseFileSearchToolWithAssistantAgentAsync()
-    {
-        // Define the agent
-        OpenAIClientProvider provider = this.GetClientProvider();
-        OpenAIAssistantAgent agent =
-            await OpenAIAssistantAgent.CreateAsync(
-                clientProvider: this.GetClientProvider(),
-                definition: new OpenAIAssistantDefinition(this.Model)
-                {
-                    EnableFileSearch = true,
-                    Metadata = AssistantSampleMetadata,
-                },
-                kernel: new Kernel());
-
-        // Upload file - Using a table of fictional employees.
-        OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
-        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
-        OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
-
-        // Create a vector-store
-        VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
-        CreateVectorStoreOperation result =
-            await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
-                new VectorStoreCreationOptions()
-                {
-                    FileIds = { fileInfo.Id },
-                    Metadata = { { AssistantSampleMetadataKey, bool.TrueString } }
-                });
-
-        // Create a thread associated with a vector-store for the agent conversation.
-        string threadId =
-            await agent.CreateThreadAsync(
-                new OpenAIThreadCreationOptions
-                {
-                    VectorStoreId = result.VectorStoreId,
-                    Metadata = AssistantSampleMetadata,
-                });
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Who is the youngest employee?");
-            await InvokeAgentAsync("Who works in sales?");
-            await InvokeAgentAsync("I have a customer request, who can help me?");
-        }
-        finally
-        {
-            await agent.DeleteThreadAsync(threadId);
-            await agent.DeleteAsync();
-            await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId);
-            await fileClient.DeleteFileAsync(fileInfo.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(threadId, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs b/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
index bae1cc92f31c..87ce86446994 100644
--- a/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
+++ b/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
@@ -1,8 +1,9 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.History;
+using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Step04;
 
diff --git a/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs b/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
index e349404c5137..8ee3fb3adad6 100644
--- a/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
+++ b/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
@@ -1,11 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using Events;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.Extensions.Logging.Abstractions;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.Chat;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.OpenAI;
 using SharedSteps;
diff --git a/dotnet/src/.editorconfig b/dotnet/src/.editorconfig
index b2afb3dc53c6..7867acb76728 100644
--- a/dotnet/src/.editorconfig
+++ b/dotnet/src/.editorconfig
@@ -4,6 +4,9 @@ dotnet_diagnostic.CA2007.severity = error # Do not directly await a Task
 dotnet_diagnostic.VSTHRD111.severity = error # Use .ConfigureAwait(bool)
 dotnet_diagnostic.IDE1006.severity = error # Naming rule violations
 
+# Testing
+dotnet_diagnostic.Moq1400.severity = none # Explicitly choose a mocking behavior instead of relying on the default (Loose) behavior
+
 # Resharper disabled rules: https://www.jetbrains.com/help/resharper/Reference__Code_Inspections_CSHARP.html#CodeSmell
 resharper_not_resolved_in_text_highlighting = none # Disable Resharper's "Not resolved in text" highlighting
 resharper_check_namespace_highlighting = none # Disable Resharper's "Check namespace" highlighting
diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs
index 06af107a0a5d..383b5df27385 100644
--- a/dotnet/src/Agents/Abstractions/Agent.cs
+++ b/dotnet/src/Agents/Abstractions/Agent.cs
@@ -1,6 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -20,32 +21,37 @@ namespace Microsoft.SemanticKernel.Agents;
 public abstract class Agent
 {
     /// 
-    /// The description of the agent (optional)
+    /// Gets the description of the agent (optional).
     /// 
     public string? Description { get; init; }
 
     /// 
-    /// The identifier of the agent (optional).
+    /// Gets the identifier of the agent (optional).
     /// 
-    /// 
-    /// Default to a random guid value, but may be overridden.
-    /// 
+    /// 
+    /// The identifier of the agent. The default is a random GUID value, but that can be overridden.
+    /// 
     public string Id { get; init; } = Guid.NewGuid().ToString();
 
     /// 
-    /// The name of the agent (optional)
+    /// Gets the name of the agent (optional).
     /// 
     public string? Name { get; init; }
 
     /// 
     /// A  for this .
     /// 
-    public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
+    public ILoggerFactory? LoggerFactory { get; init; }
 
     /// 
     /// The  associated with this  .
     /// 
-    protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
+    protected ILogger Logger => this._logger ??= this.ActiveLoggerFactory.CreateLogger(this.GetType());
+
+    /// 
+    /// Get the active logger factory, if defined; otherwise, provide the default.
+    /// 
+    protected virtual ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? NullLoggerFactory.Instance;
 
     /// 
     /// Set of keys to establish channel affinity.  Minimum expected key-set:
@@ -59,10 +65,13 @@ public abstract class Agent
     /// For example, two OpenAI Assistant agents each targeting a different Azure OpenAI endpoint
     /// would require their own channel. In this case, the endpoint could be expressed as an additional key.
     /// 
+    [Experimental("SKEXP0110")]
+#pragma warning disable CA1024 // Use properties where appropriate
     protected internal abstract IEnumerable GetChannelKeys();
+#pragma warning restore CA1024 // Use properties where appropriate
 
     /// 
-    /// Produce the an  appropriate for the agent type.
+    /// Produce an  appropriate for the agent type.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// An  appropriate for the agent type.
@@ -70,10 +79,11 @@ public abstract class Agent
     /// Every agent conversation, or , will establish one or more 
     /// objects according to the specific  type.
     /// 
+    [Experimental("SKEXP0110")]
     protected internal abstract Task CreateChannelAsync(CancellationToken cancellationToken);
 
     /// 
-    /// Produce the an  appropriate for the agent type based on the provided state.
+    /// Produce an  appropriate for the agent type based on the provided state.
     /// 
     /// The channel state, as serialized
     /// The  to monitor for cancellation requests. The default is .
@@ -82,6 +92,7 @@ public abstract class Agent
     /// Every agent conversation, or , will establish one or more 
     /// objects according to the specific  type.
     /// 
+    [Experimental("SKEXP0110")]
     protected internal abstract Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken);
 
     private ILogger? _logger;
diff --git a/dotnet/src/Agents/Abstractions/AgentChannel.cs b/dotnet/src/Agents/Abstractions/AgentChannel.cs
index 046348443a39..56c631b0c1b9 100644
--- a/dotnet/src/Agents/Abstractions/AgentChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChannel.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -9,12 +10,15 @@ namespace Microsoft.SemanticKernel.Agents;
 
 /// 
 /// Defines the communication protocol for a particular  type.
-/// An agent provides it own  via .
 /// 
+/// 
+/// An agent provides it own  via .
+/// 
+[Experimental("SKEXP0110")]
 public abstract class AgentChannel
 {
     /// 
-    /// The  associated with the .
+    /// Gets or sets the  associated with the .
     /// 
     public ILogger Logger { get; set; } = NullLogger.Instance;
 
@@ -24,7 +28,7 @@ public abstract class AgentChannel
     protected internal abstract string Serialize();
 
     /// 
-    /// Receive the conversation messages.  Used when joining a conversation and also during each agent interaction..
+    /// Receive the conversation messages.  Used when joining a conversation and also during each agent interaction.
     /// 
     /// The chat history at the point the channel is created.
     /// The  to monitor for cancellation requests. The default is .
@@ -35,7 +39,7 @@ public abstract class AgentChannel
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// 
-    /// The channel wont' be reused; rather, it will be discarded and a new one created.
+    /// The channel won't be reused; rather, it will be discarded and a new one created.
     /// 
     protected internal abstract Task ResetAsync(CancellationToken cancellationToken = default);
 
@@ -75,16 +79,17 @@ protected internal abstract IAsyncEnumerable Invoke
 
 /// 
 /// Defines the communication protocol for a particular  type.
-/// An agent provides it own  via .
 /// 
-/// The agent type for this channel
+/// The agent type for this channel.
 /// 
-/// Convenience upcast to agent for .
+/// An agent provides it own  via .
+/// This class is a convenience upcast to an agent for .
 /// 
+[Experimental("SKEXP0110")]
 public abstract class AgentChannel : AgentChannel where TAgent : Agent
 {
     /// 
-    /// Process a discrete incremental interaction between a single  an a .
+    /// Process a discrete incremental interaction between a single  and a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
@@ -110,7 +115,7 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent
         return this.InvokeAsync((TAgent)agent, cancellationToken);
     }
     /// 
-    /// Process a discrete incremental interaction between a single  an a .
+    /// Process a discrete incremental interaction between a single  and a .
     /// 
     /// The agent actively interacting with the chat.
     /// The receiver for the completed messages generated
diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs
index f458739e3bb4..22b4077527e1 100644
--- a/dotnet/src/Agents/Abstractions/AgentChat.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChat.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -15,12 +16,13 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Point of interaction for one or more agents.
+/// Provides a point of interaction for one or more agents.
 /// 
 /// 
-/// Any  instance does not support concurrent invocation and
-/// will throw exception if concurrent activity is attempted for any public method.
+///  instances don't support concurrent invocation and
+/// will throw an exception if concurrent activity is attempted for any public method.
 /// 
+[Experimental("SKEXP0110")]
 public abstract class AgentChat
 {
     private readonly BroadcastQueue _broadcastQueue;
@@ -31,62 +33,62 @@ public abstract class AgentChat
     private ILogger? _logger;
 
     /// 
-    /// The agents participating in the chat.
+    /// Gets the agents participating in the chat.
     /// 
     public abstract IReadOnlyList Agents { get; }
 
     /// 
-    /// Indicates if a chat operation is active.  Activity is defined as
-    /// any the execution of any public method.
+    /// Gets a value that indicates whether a chat operation is active. Activity is defined as
+    /// any execution of a public method.
     /// 
     public bool IsActive => Interlocked.CompareExchange(ref this._isActive, 1, 1) > 0;
 
     /// 
-    /// The  associated with the .
+    /// Gets the  associated with the .
     /// 
     public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
 
     /// 
-    /// The  associated with this chat.
+    /// Gets the  associated with this chat.
     /// 
     protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
 
     /// 
-    /// Exposes the internal history to subclasses.
+    /// Gets the internal history to expose it to subclasses.
     /// 
     protected ChatHistory History { get; }
 
     /// 
-    /// Process a series of interactions between the agents participating in this chat.
+    /// Processes a series of interactions between the agents participating in this chat.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     public abstract IAsyncEnumerable InvokeAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Process a series of interactions between the agents participating in this chat.
+    /// Processes a series of interactions between the agents participating in this chat.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     public abstract IAsyncEnumerable InvokeStreamingAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Retrieve the chat history.
+    /// Retrieves the chat history.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// The message history
+    /// The message history.
     public IAsyncEnumerable GetChatMessagesAsync(CancellationToken cancellationToken = default) =>
         this.GetChatMessagesAsync(agent: null, cancellationToken);
 
     /// 
-    /// Retrieve the message history, either the primary history or
-    /// an agent specific version.
+    /// Retrieves the message history, either the primary history or
+    /// an agent-specific version.
     /// 
     /// An optional agent, if requesting an agent history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The message history
+    /// The message history.
     /// 
-    /// Any  instance does not support concurrent invocation and
+    ///  instances don't support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     public async IAsyncEnumerable GetChatMessagesAsync(
@@ -132,39 +134,38 @@ public async IAsyncEnumerable GetChatMessagesAsync(
     }
 
     /// 
-    /// Append a message to the conversation.  Adding a message while an agent
+    /// Appends a message to the conversation. Adding a message while an agent
     /// is active is not allowed.
     /// 
-    /// A non-system message with which to append to the conversation.
+    /// A non-system message to append to the conversation.
     /// 
-    /// Adding a message to the conversation requires any active  remains
+    /// Adding a message to the conversation requires that any active  remains
     /// synchronized, so the message is broadcast to all channels.
-    /// 
-    /// KernelException if a system message is present, without taking any other action
-    /// 
-    /// Any  instance does not support concurrent invocation and
+    ///
+    ///  instances don't support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
+    /// A system message is present, and no other action is taken.
     public void AddChatMessage(ChatMessageContent message)
     {
         this.AddChatMessages([message]);
     }
 
     /// 
-    /// Append messages to the conversation.  Adding messages while an agent
+    /// Appends messages to the conversation. Adding messages while an agent
     /// is active is not allowed.
     /// 
-    /// Set of non-system messages with which to append to the conversation.
+    /// A set of non-system messages to append to the conversation.
     /// 
-    /// Adding messages to the conversation requires any active  remains
+    /// Adding messages to the conversation requires that any active  remains
     /// synchronized, so the messages are broadcast to all channels.
-    /// 
-    /// KernelException if a system message is present, without taking any other action
-    /// KernelException chat has current activity.
-    /// 
-    /// Any  instance does not support concurrent invocation and
+    ///
+    ///  instances don't support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
+    /// A system message is present, and no other action is taken.
+    /// -or-
+    /// The chat has current activity.
     public void AddChatMessages(IReadOnlyList messages)
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
@@ -198,13 +199,13 @@ public void AddChatMessages(IReadOnlyList messages)
     }
 
     /// 
-    /// Process a discrete incremental interaction between a single  an a .
+    /// Processes a discrete incremental interaction between a single  and a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     /// 
-    /// Any  instance does not support concurrent invocation and
+    ///  instances don't support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     protected async IAsyncEnumerable InvokeAgentAsync(
@@ -213,7 +214,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
 
-        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
         try
         {
@@ -226,7 +227,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
 
             await foreach ((bool isVisible, ChatMessageContent message) in channel.InvokeAsync(agent, cancellationToken).ConfigureAwait(false))
             {
-                this.Logger.LogAgentChatInvokedAgentMessage(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, message);
+                this.Logger.LogAgentChatInvokedAgentMessage(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), message);
 
                 messages.Add(message);
 
@@ -248,7 +249,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
                     .Select(kvp => new ChannelReference(kvp.Value, kvp.Key));
             this._broadcastQueue.Enqueue(channelRefs, messages);
 
-            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
         }
         finally
         {
@@ -257,13 +258,13 @@ protected async IAsyncEnumerable InvokeAgentAsync(
     }
 
     /// 
-    /// Process a discrete incremental interaction between a single  an a .
+    /// Processes a discrete incremental interaction between a single  and a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
     /// Asynchronous enumeration of messages.
     /// 
-    /// Any  instance does not support concurrent invocation and
+    ///  instances don't support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     protected async IAsyncEnumerable InvokeStreamingAgentAsync(
@@ -272,7 +273,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
 
-        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
         try
         {
@@ -290,7 +291,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
 
             this.History.AddRange(messages);
 
-            this.Logger.LogAgentChatInvokedStreamingAgentMessages(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, messages);
+            this.Logger.LogAgentChatInvokedStreamingAgentMessages(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), messages);
 
             // Broadcast message to other channels (in parallel)
             // Note: Able to queue messages without synchronizing channels.
@@ -300,7 +301,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
                     .Select(kvp => new ChannelReference(kvp.Value, kvp.Key));
             this._broadcastQueue.Enqueue(channelRefs, messages);
 
-            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
         }
         finally
         {
@@ -309,7 +310,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
     }
 
     /// 
-    /// Reset the chat, clearing all history and persisted state.
+    /// Resets the chat, clearing all history and persisted state.
     /// All agents will remain present.
     /// 
     /// The  to monitor for cancellation requests. The default is .
@@ -395,12 +396,12 @@ private void ClearActivitySignal()
     }
 
     /// 
-    /// Test to ensure chat is not concurrently active and throw exception if it is.
+    /// Checks to ensure the chat is not concurrently active and throws an exception if it is.
     /// If not, activity is signaled.
     /// 
     /// 
-    /// Rather than allowing concurrent invocation to result in undefined behavior / failure,
-    /// it is preferred to fail-fast in order to avoid side-effects / state mutation.
+    /// Rather than allowing concurrent invocation to result in undefined behavior or failure,
+    /// it's preferred to fail fast to avoid side effects or state mutation.
     /// The activity signal is used to manage ability and visibility for taking actions based
     /// on conversation history.
     /// 
@@ -433,7 +434,7 @@ private async Task GetOrCreateChannelAsync(Agent agent, Cancellati
         AgentChannel? channel = await this.SynchronizeChannelAsync(channelKey, cancellationToken).ConfigureAwait(false);
         if (channel is null)
         {
-            this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+            this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
             channel = await agent.CreateChannelAsync(cancellationToken).ConfigureAwait(false);
 
@@ -445,7 +446,7 @@ private async Task GetOrCreateChannelAsync(Agent agent, Cancellati
                 await channel.ReceiveAsync(this.History, cancellationToken).ConfigureAwait(false);
             }
 
-            this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
+            this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
         }
 
         return channel;
diff --git a/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs b/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
index 146e00d7965f..b6174284d959 100644
--- a/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.IO;
 using System.Text.Json;
 using System.Text.Json.Serialization;
@@ -9,8 +10,9 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Able to serialize and deserialize an .
+/// Serializes and deserializes an .
 /// 
+[Experimental("SKEXP0110")]
 public sealed class AgentChatSerializer
 {
     private readonly AgentChatState _state;
@@ -23,7 +25,7 @@ public sealed class AgentChatSerializer
         };
 
     /// 
-    /// Serialize the provided  to the target stream.
+    /// Serializes the provided  to the target stream.
     /// 
     public static async Task SerializeAsync(TChat chat, Stream stream, JsonSerializerOptions? serializerOptions = null) where TChat : AgentChat
     {
@@ -32,7 +34,7 @@ public static async Task SerializeAsync(TChat chat, Stream stream, JsonSe
     }
 
     /// 
-    /// Provides a  that is able to restore an .
+    /// Provides a  that's able to restore an .
     /// 
     public static async Task DeserializeAsync(Stream stream, JsonSerializerOptions? serializerOptions = null)
     {
@@ -44,13 +46,13 @@ await JsonSerializer.DeserializeAsync(stream, serializerOptions
     }
 
     /// 
-    /// Enumerates the participants of the original  so that
-    /// the caller may be include them in the restored .
+    /// Gets the participants of the original  so that
+    /// the caller can include them in the restored .
     /// 
     public IEnumerable Participants => this._state.Participants;
 
     /// 
-    /// Restore the  to the previously captured state.
+    /// Restores the  to the previously captured state.
     /// 
     public Task DeserializeAsync(TChat chat) where TChat : AgentChat => chat.DeserializeAsync(this._state);
 
diff --git a/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj b/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
index 86d2f37c2b66..2cc0d9799bc1 100644
--- a/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
+++ b/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
@@ -1,4 +1,4 @@
-
+
 
   
     
@@ -6,7 +6,7 @@
     Microsoft.SemanticKernel.Agents
     net8.0;netstandard2.0
     false
-    alpha
+    preview
   
 
   
@@ -20,6 +20,7 @@
   
     
     
+    
   
 
   
@@ -29,7 +30,7 @@
   
 
   
-    
+    
   
 
   
diff --git a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
index eb1f7d0fac98..8cde6b5a9001 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
@@ -1,6 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
@@ -12,6 +13,7 @@ namespace Microsoft.SemanticKernel.Agents;
 /// Defines the relationship between the internal aggregated chat and the chat
 /// with which  is participating.
 /// 
+[Experimental("SKEXP0110")]
 public enum AggregatorMode
 {
     /// 
@@ -29,18 +31,22 @@ public enum AggregatorMode
 /// Allows an  to participate in another  as an .
 /// 
 /// A factory method that produces a new  instance.
+[Experimental("SKEXP0110")]
 public sealed class AggregatorAgent(Func chatProvider) : Agent
 {
     /// 
-    /// Defines the relationship between the internal aggregated chat and the chat
+    /// Gets the relationship between the internal aggregated chat and the chat
     /// with which  is participating.
-    /// Default: .
     /// 
+    /// 
+    /// The relationship between the internal aggregated chat and the chat
+    /// with which  is participating. The default value is .
+    /// 
     public AggregatorMode Mode { get; init; } = AggregatorMode.Flat;
 
     /// 
     /// 
-    /// Different  will never share the same channel.
+    /// Different  instances will never share the same channel.
     /// 
     protected internal override IEnumerable GetChannelKeys()
     {
diff --git a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
index f0dcf5736192..a002e41351af 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -11,6 +12,7 @@ namespace Microsoft.SemanticKernel.Agents;
 /// 
 /// Adapt channel contract to underlying .
 /// 
+[Experimental("SKEXP0110")]
 internal sealed class AggregatorChannel(AgentChat chat) : AgentChannel
 {
     private readonly AgentChat _chat = chat;
diff --git a/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs b/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
index d8ef44a416a1..0e93e3a3e2fd 100644
--- a/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
+++ b/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
@@ -6,14 +6,14 @@
 namespace Microsoft.SemanticKernel.Agents.Extensions;
 
 /// 
-/// Extension methods for 
+/// Provides extension methods for .
 /// 
 public static class ChatHistoryExtensions
 {
     /// 
-    /// Enumeration of chat-history in descending order.
+    /// Enumerates a chat history in descending order.
     /// 
-    /// The chat-history
+    /// The chat history to sort.
     public static IEnumerable ToDescending(this ChatHistory history)
     {
         for (int index = history.Count; index > 0; --index)
@@ -23,9 +23,9 @@ public static IEnumerable ToDescending(this ChatHistory hist
     }
 
     /// 
-    /// Asynchronous enumeration of chat-history in descending order.
+    /// Enumerates a history in descending order asynchronously.
     /// 
-    /// The chat-history
+    /// The chat history to sort.
     public static IAsyncEnumerable ToDescendingAsync(this ChatHistory history)
     {
         return history.ToDescending().ToAsyncEnumerable();
diff --git a/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs b/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
index b4007eec2c49..5c3d6fcf7bb3 100644
--- a/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
+++ b/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
@@ -1,6 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using ChannelQueue = System.Collections.Generic.Queue>;
@@ -21,13 +22,14 @@ namespace Microsoft.SemanticKernel.Agents.Internal;
 ///  is never invoked concurrently, which eliminates
 /// race conditions over the queue dictionary.
 /// 
+[Experimental("SKEXP0110")]
 internal sealed class BroadcastQueue
 {
     private readonly Dictionary _queues = [];
 
     /// 
-    /// Defines the yield duration when waiting on a channel-queue to synchronize.
-    /// to drain.
+    /// Defines the yield duration when waiting on a channel-queue to synchronize
+    /// and drain.
     /// 
     public TimeSpan BlockDuration { get; set; } = TimeSpan.FromSeconds(0.1);
 
diff --git a/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs b/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
index f49835355157..236e25415879 100644
--- a/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
+++ b/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
@@ -1,9 +1,12 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System.Diagnostics.CodeAnalysis;
+
 namespace Microsoft.SemanticKernel.Agents.Internal;
 
 /// 
 /// Tracks channel along with its hashed key.
 /// 
+[Experimental("SKEXP0110")]
 internal readonly struct ChannelReference(AgentChannel channel, string hash)
 {
     /// 
diff --git a/dotnet/src/Agents/Abstractions/KernelAgent.cs b/dotnet/src/Agents/Abstractions/KernelAgent.cs
index 719936b868f1..aac75e2fd62f 100644
--- a/dotnet/src/Agents/Abstractions/KernelAgent.cs
+++ b/dotnet/src/Agents/Abstractions/KernelAgent.cs
@@ -3,85 +3,74 @@
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Base class for agents utilizing  plugins or services.
+/// Provides a base class for agents utilizing  plugins or services.
 /// 
 public abstract class KernelAgent : Agent
 {
     /// 
-    /// Arguments for the agent instruction parameters (optional).
+    /// Gets the arguments for the agent instruction parameters (optional).
     /// 
     /// 
     /// Also includes .
     /// 
-    public KernelArguments? Arguments { get; init; }
+    public KernelArguments Arguments { get; init; } = [];
 
     /// 
-    /// The instructions for the agent (optional)
+    /// Gets the instructions for the agent (optional).
     /// 
-    /// 
-    /// Instructions may be formatted in "semantic-kernel" template format.
-    /// ()
-    /// 
     public string? Instructions { get; init; }
 
     /// 
-    /// The  containing services, plugins, and filters for use throughout the agent lifetime.
+    /// Gets the  containing services, plugins, and filters for use throughout the agent lifetime.
     /// 
-    /// 
-    /// Defaults to empty Kernel, but may be overridden.
-    /// 
+    /// 
+    /// The  containing services, plugins, and filters for use throughout the agent lifetime. The default value is an empty Kernel, but that can be overridden.
+    /// 
     public Kernel Kernel { get; init; } = new();
 
     /// 
-    /// A prompt-template based on the agent instructions.
+    /// Gets or sets a prompt template based on the agent instructions.
     /// 
-    public IPromptTemplate? Template { get; protected set; }
+    protected IPromptTemplate? Template { get; set; }
+
+    /// 
+    protected override ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? this.Kernel.LoggerFactory;
 
     /// 
-    /// Format the system instructions for the agent.
+    /// Formats the system instructions for the agent.
     /// 
     /// The  containing services, plugins, and other state for use by the agent.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  to monitor for cancellation requests. The default is .
-    /// The formatted system instructions for the agent
+    /// The formatted system instructions for the agent.
     protected async Task FormatInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken)
     {
-        // If  is not set, default instructions may be treated as "semantic-kernel" template.
-        if (this.Template == null)
+        if (this.Template is null)
         {
-            if (string.IsNullOrWhiteSpace(this.Instructions))
-            {
-                return null;
-            }
-
-            KernelPromptTemplateFactory templateFactory = new(this.LoggerFactory);
-            this.Template = templateFactory.Create(new PromptTemplateConfig(this.Instructions!));
+            // Use the instructions as-is
+            return this.Instructions;
         }
 
+        // Use the provided template as the instructions
         return await this.Template.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
     }
 
     /// 
-    /// Provide a merged instance of  with precedence for override arguments.
+    /// Provides a merged instance of  with precedence for override arguments.
     /// 
-    /// The override arguments
+    /// The override arguments.
     /// 
     /// This merge preserves original  and  parameters.
-    /// and allows for incremental addition or replacement of specific parameters while also preserving the ability
+    /// It allows for incremental addition or replacement of specific parameters while also preserving the ability
     /// to override the execution settings.
     /// 
-    protected KernelArguments? MergeArguments(KernelArguments? arguments)
+    protected KernelArguments MergeArguments(KernelArguments? arguments)
     {
-        // Avoid merge when default arguments are not set.
-        if (this.Arguments == null)
-        {
-            return arguments;
-        }
-
         // Avoid merge when override arguments are not set.
         if (arguments == null)
         {
diff --git a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
index ebd9e83b42ce..22c2bda0e5da 100644
--- a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
+++ b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
@@ -3,6 +3,7 @@
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
@@ -21,11 +22,12 @@ internal static partial class AgentChatLogMessages
     /// 
     /// Logs retrieval of  messages.
     /// 
-    private static readonly Action s_logAgentChatGetChatMessages =
-        LoggerMessage.Define(
+    private static readonly Action s_logAgentChatGetChatMessages =
+        LoggerMessage.Define(
             logLevel: LogLevel.Debug,
             eventId: 0,
-            "[{MethodName}] Source: {MessageSourceType}/{MessageSourceId}.");
+            "[{MethodName}] Source: {MessageSourceType}/{MessageSourceId}/{MessageSourceName}.");
+
     public static void LogAgentChatGetChatMessages(
         this ILogger logger,
         string methodName,
@@ -33,13 +35,13 @@ public static void LogAgentChatGetChatMessages(
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
-            if (null == agent)
+            if (agent is null)
             {
-                s_logAgentChatGetChatMessages(logger, methodName, "primary", "primary", null);
+                s_logAgentChatGetChatMessages(logger, methodName, "primary", "primary", null, null);
             }
             else
             {
-                s_logAgentChatGetChatMessages(logger, methodName, agent.GetType().Name, agent.Id, null);
+                s_logAgentChatGetChatMessages(logger, methodName, agent.GetType().Name, agent.Id, agent.GetDisplayName(), null);
             }
         }
     }
@@ -74,12 +76,13 @@ public static partial void LogAgentChatAddedMessages(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Invoking agent {AgentType}/{AgentId}.")]
+        Message = "[{MethodName}] Invoking agent {AgentType}/{AgentId}/{AgentName}.")]
     public static partial void LogAgentChatInvokingAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  invoked agent message
@@ -87,35 +90,37 @@ public static partial void LogAgentChatInvokingAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Trace,
-        Message = "[{MethodName}] Agent message {AgentType}/{AgentId}: {Message}.")]
+        Message = "[{MethodName}] Agent message {AgentType}/{AgentId}/{AgentName}: {Message}.")]
     public static partial void LogAgentChatInvokedAgentMessage(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
+        string agentName,
         ChatMessageContent message);
 
     /// 
     /// Logs retrieval of streamed  messages.
     /// 
-    private static readonly Action s_logAgentChatInvokedStreamingAgentMessages =
-        LoggerMessage.Define(
+    private static readonly Action s_logAgentChatInvokedStreamingAgentMessages =
+        LoggerMessage.Define(
             logLevel: LogLevel.Debug,
             eventId: 0,
-            "[{MethodName}] Agent message {AgentType}/{AgentId}: {Message}.");
+            "[{MethodName}] Agent message {AgentType}/{AgentId}/{AgentName}: {Message}.");
 
     public static void LogAgentChatInvokedStreamingAgentMessages(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
+        string agentName,
         IList messages)
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
             foreach (ChatMessageContent message in messages)
             {
-                s_logAgentChatInvokedStreamingAgentMessages(logger, methodName, agentType, agentId, message, null);
+                s_logAgentChatInvokedStreamingAgentMessages(logger, methodName, agentType, agentId, agentName, message, null);
             }
         }
     }
@@ -126,12 +131,13 @@ public static void LogAgentChatInvokedStreamingAgentMessages(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Invoked agent {AgentType}/{AgentId}.")]
+        Message = "[{MethodName}] Invoked agent {AgentType}/{AgentId}/{AgentName}.")]
     public static partial void LogAgentChatInvokedAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  creating agent channel (started).
@@ -139,12 +145,13 @@ public static partial void LogAgentChatInvokedAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Creating channel for {AgentType}: {AgentId}")]
+        Message = "[{MethodName}] Creating channel for {AgentType}: {AgentId}/{AgentName}")]
     public static partial void LogAgentChatCreatingChannel(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  created agent channel (complete).
@@ -152,10 +159,11 @@ public static partial void LogAgentChatCreatingChannel(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Created channel for {AgentType}: {AgentId}")]
+        Message = "[{MethodName}] Created channel for {AgentType}: {AgentId}/{AgentName}")]
     public static partial void LogAgentChatCreatedChannel(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 }
diff --git a/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
index 441c9da117f5..08eb87c8613a 100644
--- a/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
+++ b/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
@@ -15,6 +15,7 @@ namespace Microsoft.SemanticKernel.Agents;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class AggregatorAgentLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs b/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
index 564f68b72ab6..278660096562 100644
--- a/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
+++ b/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
@@ -9,31 +9,37 @@ namespace Microsoft.SemanticKernel.Agents.Serialization;
 public sealed class AgentParticipant
 {
     /// 
-    /// The captured .
+    /// Gets the captured .
     /// 
     public string Id { get; init; } = string.Empty;
 
     /// 
-    /// The captured .
+    /// Gets the captured .
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Name { get; init; }
 
     /// 
-    /// The fully qualified  type name.
+    /// Gets the fully qualified  type name.
     /// 
     public string Type { get; init; } = string.Empty;
 
     /// 
-    /// Parameterless constructor for deserialization.
+    /// Creates a new instance of .
     /// 
+    /// 
+    /// This parameterless constructor is for deserialization.
+    /// 
     [JsonConstructor]
     public AgentParticipant() { }
 
     /// 
-    /// Convenience constructor for serialization.
+    /// Creates a new instance of  with the specified agent.
     /// 
-    /// The referenced 
+    /// 
+    /// This is a convenience constructor for serialization.
+    /// 
+    /// The referenced .
     internal AgentParticipant(Agent agent)
     {
         this.Id = agent.Id;
diff --git a/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs b/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
index f71f86c18b9d..d69011639d86 100644
--- a/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
+++ b/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Text.Json.Serialization;
 using Microsoft.SemanticKernel.ChatCompletion;
@@ -7,40 +8,41 @@
 namespace Microsoft.SemanticKernel.Agents.Serialization;
 
 /// 
-/// Present a  for serialization without metadata.
+/// Represents a  for serialization without metadata.
 /// 
 /// The referenced message
+[Experimental("SKEXP0110")]
 public sealed class ChatMessageReference(ChatMessageContent message)
 {
     /// 
-    /// The referenced  property.
+    /// Gets the referenced  property.
     /// 
     public string? AuthorName => message.AuthorName;
 
     /// 
-    /// The referenced  property.
+    /// Gets the referenced  property.
     /// 
     public AuthorRole Role => message.Role;
 
     /// 
-    /// The referenced  collection.
+    /// Gets the referenced  collection.
     /// 
     public IEnumerable Items => message.Items;
 
     /// 
-    /// The referenced  property.
+    /// Gets the referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ModelId => message.ModelId;
 
     /// 
-    /// The referenced  property.
+    /// Gets the referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? MimeType => message.MimeType;
 
     /// 
-    /// Convenience method to reference a set of messages.
+    /// Converts a set of messages to  instances.
     /// 
     public static IEnumerable Prepare(IEnumerable messages) =>
         messages.Select(m => new ChatMessageReference(m));
diff --git a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
new file mode 100644
index 000000000000..5d26a6a16798
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
@@ -0,0 +1,48 @@
+
+
+  
+    
+    Microsoft.SemanticKernel.Agents.AzureAI
+    Microsoft.SemanticKernel.Agents.AzureAI
+    net8.0;netstandard2.0
+    $(NoWarn);SKEXP0110
+    false
+    preview
+  
+
+  
+
+  
+    
+    Semantic Kernel Agents - AzureAI
+    Defines a concrete Agent based on the Azure AI Agent API.
+  
+
+  
+    
+    
+    
+    
+    
+    
+    
+    
+    
+  
+
+  
+
+  
+    
+  
+
+  
+    
+  
+
+  
+    
+    
+  
+
+
\ No newline at end of file
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs
new file mode 100644
index 000000000000..f17a977ccd24
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs
@@ -0,0 +1,65 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Net.Http;
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Core.Pipeline;
+using Microsoft.SemanticKernel.Http;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Provides an  for use by .
+/// 
+public sealed partial class AzureAIAgent : KernelAgent
+{
+    /// 
+    /// Produces a .
+    /// 
+    /// The Azure AI Foundry project connection string, in the form `endpoint;subscription_id;resource_group_name;project_name`.
+    ///  A credential used to authenticate to an Azure Service.
+    /// A custom  for HTTP requests.
+    public static AIProjectClient CreateAzureAIClient(
+        string connectionString,
+        TokenCredential credential,
+        HttpClient? httpClient = null)
+    {
+        Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString));
+        Verify.NotNull(credential, nameof(credential));
+
+        AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+        return new AIProjectClient(connectionString, credential, clientOptions);
+    }
+
+    private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient)
+    {
+        AIProjectClientOptions options =
+            new()
+            {
+                Diagnostics = {
+                    ApplicationId = HttpHeaderConstant.Values.UserAgent,
+                }
+            };
+
+        options.AddPolicy(new SemanticKernelHeadersPolicy(), HttpPipelinePosition.PerCall);
+
+        if (httpClient is not null)
+        {
+            options.Transport = new HttpClientTransport(httpClient);
+            // Disable retry policy if and only if a custom HttpClient is provided.
+            options.RetryPolicy = new RetryPolicy(maxRetries: 0);
+        }
+
+        return options;
+    }
+
+    private class SemanticKernelHeadersPolicy : HttpPipelineSynchronousPolicy
+    {
+        public override void OnSendingRequest(HttpMessage message)
+        {
+            message.Request.Headers.Add(
+                HttpHeaderConstant.Names.SemanticKernelVersion,
+                HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent)));
+        }
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
new file mode 100644
index 000000000000..912bd83778fe
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
@@ -0,0 +1,285 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.Projects;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Diagnostics;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Provides a specialized  based on an Azure AI agent.
+/// 
+public sealed partial class AzureAIAgent : KernelAgent
+{
+    /// 
+    /// Provides tool definitions used when associating a file attachment to an input message:
+    /// .
+    /// 
+    public static class Tools
+    {
+        /// 
+        /// The code-interpreter tool.
+        /// 
+        public static readonly string CodeInterpreter = "code_interpreter";
+
+        /// 
+        /// The file-search tool.
+        /// 
+        public const string FileSearch = "file_search";
+    }
+
+    /// 
+    /// The metadata key that identifies code-interpreter content.
+    /// 
+    public const string CodeInterpreterMetadataKey = "code";
+
+    /// 
+    /// Gets the assistant definition.
+    /// 
+    public Azure.AI.Projects.Agent Definition { get; private init; }
+
+    /// 
+    /// Gets the polling behavior for run processing.
+    /// 
+    public RunPollingOptions PollingOptions { get; } = new();
+
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    /// The agent model definition.
+    /// An  instance.
+    /// Optional collection of plugins to add to the kernel.
+    /// An optional factory to produce the  for the agent.
+    /// The format of the prompt template used when "templateFactory" parameter is supplied.
+    public AzureAIAgent(
+        Azure.AI.Projects.Agent model,
+        AgentsClient client,
+        IEnumerable? plugins = null,
+        IPromptTemplateFactory? templateFactory = null,
+        string? templateFormat = null)
+    {
+        this.Client = client;
+        this.Definition = model;
+        this.Description = this.Definition.Description;
+        this.Id = this.Definition.Id;
+        this.Name = this.Definition.Name;
+        this.Instructions = this.Definition.Instructions;
+
+        if (templateFactory != null)
+        {
+            Verify.NotNullOrWhiteSpace(templateFormat);
+
+            PromptTemplateConfig templateConfig = new(this.Instructions)
+            {
+                TemplateFormat = templateFormat
+            };
+
+            this.Template = templateFactory.Create(templateConfig);
+        }
+
+        if (plugins != null)
+        {
+            this.Kernel.Plugins.AddRange(plugins);
+        }
+    }
+
+    /// 
+    /// %%%
+    /// 
+    public AgentsClient Client { get; }
+
+    /// 
+    /// Adds a message to the specified thread.
+    /// 
+    /// The thread identifier.
+    /// A non-system message to append to the conversation.
+    /// The  to monitor for cancellation requests. The default is .
+    /// 
+    /// Only supports messages with role = User or agent.
+    /// 
+    public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
+    {
+        return AgentThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
+    }
+
+    /// 
+    /// Gets messages for a specified thread.
+    /// 
+    /// The thread identifier.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An asynchronous enumeration of messages.
+    public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
+    {
+        return AgentThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
+    }
+
+    /// 
+    /// Invokes the assistant on the specified thread.
+    /// 
+    /// The thread identifier.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  containing services, plugins, and other state for use by the agent.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An asynchronous enumeration of response messages.
+    /// 
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// 
+    public IAsyncEnumerable InvokeAsync(
+        string threadId,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        CancellationToken cancellationToken = default)
+    {
+        return this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken);
+    }
+
+    /// 
+    /// Invokes the assistant on the specified thread.
+    /// 
+    /// The thread identifier.
+    /// Optional invocation options.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  containing services, plugins, and other state for use by the agent.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An asynchronous enumeration of response messages.
+    /// 
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// 
+    public IAsyncEnumerable InvokeAsync(
+        string threadId,
+        AzureAIInvocationOptions? options,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        CancellationToken cancellationToken = default)
+    {
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+            () => InternalInvokeAsync(),
+            cancellationToken);
+
+        async IAsyncEnumerable InternalInvokeAsync()
+        {
+            kernel ??= this.Kernel;
+            arguments = this.MergeArguments(arguments);
+
+            await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+            {
+                if (isVisible)
+                {
+                    yield return message;
+                }
+            }
+        }
+    }
+
+    /// 
+    /// Invokes the assistant on the specified thread with streaming response.
+    /// 
+    /// The thread identifier.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  containing services, plugins, and other state for use by the agent.
+    /// Optional receiver of the completed messages that are generated.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An asynchronous enumeration of messages.
+    /// 
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// 
+    public IAsyncEnumerable InvokeStreamingAsync(
+        string threadId,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        ChatHistory? messages = null,
+        CancellationToken cancellationToken = default)
+    {
+        return this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);
+    }
+
+    /// 
+    /// Invokes the assistant on the specified thread with streaming response.
+    /// 
+    /// The thread identifier.
+    /// Optional invocation options.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  containing services, plugins, and other state for use by the agent.
+    /// Optional receiver of the completed messages that are generated.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An asynchronous enumeration of messages.
+    /// 
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// 
+    public IAsyncEnumerable InvokeStreamingAsync(
+        string threadId,
+        AzureAIInvocationOptions? options,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        ChatHistory? messages = null,
+        CancellationToken cancellationToken = default)
+    {
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+            () => InternalInvokeStreamingAsync(),
+            cancellationToken);
+
+        IAsyncEnumerable InternalInvokeStreamingAsync()
+        {
+            kernel ??= this.Kernel;
+            arguments = this.MergeArguments(arguments);
+
+            return AgentThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
+        }
+    }
+
+    /// 
+    protected override IEnumerable GetChannelKeys()
+    {
+        // Distinguish from other channel types.
+        yield return typeof(AzureAIChannel).FullName!;
+        // Distinguish based on client instance.
+        yield return this.Client.GetHashCode().ToString();
+    }
+
+    /// 
+    protected override async Task CreateChannelAsync(CancellationToken cancellationToken)
+    {
+        this.Logger.LogAzureAIAgentCreatingChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel));
+
+        string threadId = await AgentThreadActions.CreateThreadAsync(this.Client, cancellationToken).ConfigureAwait(false);
+
+        this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), threadId);
+
+        AzureAIChannel channel =
+            new(this.Client, threadId)
+            {
+                Logger = this.ActiveLoggerFactory.CreateLogger()
+            };
+
+        this.Logger.LogAzureAIAgentCreatedChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel), threadId);
+
+        return channel;
+    }
+
+    internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken)
+    {
+        return this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
+    }
+
+    /// 
+    protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
+    {
+        string threadId = channelState;
+
+        this.Logger.LogAzureAIAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
+
+        AgentThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+
+        this.Logger.LogAzureAIAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
+
+        return new AzureAIChannel(this.Client, thread.Id);
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIChannel.cs b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs
new file mode 100644
index 000000000000..c3979e10bcb3
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs
@@ -0,0 +1,61 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.Diagnostics;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// A  specialization for use with .
+/// 
+internal sealed class AzureAIChannel(AgentsClient client, string threadId)
+    : AgentChannel
+{
+    /// 
+    protected override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
+    {
+        foreach (ChatMessageContent message in history)
+        {
+            await AgentThreadActions.CreateMessageAsync(client, threadId, message, cancellationToken).ConfigureAwait(false);
+        }
+    }
+
+    /// 
+    protected override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
+        AzureAIAgent agent,
+        CancellationToken cancellationToken)
+    {
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
+            () => AgentThreadActions.InvokeAsync(agent, client, threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
+            cancellationToken);
+    }
+
+    /// 
+    protected override IAsyncEnumerable InvokeStreamingAsync(AzureAIAgent agent, IList messages, CancellationToken cancellationToken = default)
+    {
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
+            () => AgentThreadActions.InvokeStreamingAsync(agent, client, threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
+            cancellationToken);
+    }
+
+    /// 
+    protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken)
+    {
+        return AgentThreadActions.GetMessagesAsync(client, threadId, cancellationToken);
+    }
+
+    /// 
+    protected override Task ResetAsync(CancellationToken cancellationToken = default)
+    {
+        return client.DeleteThreadAsync(threadId, cancellationToken);
+    }
+
+    /// 
+    protected override string Serialize() { return threadId; }
+}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
new file mode 100644
index 000000000000..9082225ef698
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
@@ -0,0 +1,116 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Linq;
+using System.Net.Http;
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Core.Pipeline;
+using Microsoft.SemanticKernel.Http;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Provides an  for use by .
+/// 
+public sealed class AzureAIClientProvider
+{
+    private AgentsClient? _agentsClient;
+
+    /// 
+    /// Gets an active client instance.
+    /// 
+    public AIProjectClient Client { get; }
+
+    /// 
+    /// Gets an active assistant client instance.
+    /// 
+    public AgentsClient AgentsClient => this._agentsClient ??= this.Client.GetAgentsClient();
+
+    /// 
+    /// Configuration keys required for  management.
+    /// 
+    internal IReadOnlyList ConfigurationKeys { get; }
+
+    private AzureAIClientProvider(AIProjectClient client, IEnumerable keys)
+    {
+        this.Client = client;
+        this.ConfigurationKeys = keys.ToArray();
+    }
+
+    /// 
+    /// Produces a .
+    /// 
+    /// The Azure AI Foundry project connection string, in the form `endpoint;subscription_id;resource_group_name;project_name`.
+    ///  A credential used to authenticate to an Azure Service.
+    /// A custom  for HTTP requests.
+    public static AzureAIClientProvider FromConnectionString(
+        string connectionString,
+        TokenCredential credential,
+        HttpClient? httpClient = null)
+    {
+        Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString));
+        Verify.NotNull(credential, nameof(credential));
+
+        AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+        return new(new AIProjectClient(connectionString, credential, clientOptions), CreateConfigurationKeys(connectionString, httpClient));
+    }
+
+    /// 
+    /// Provides a client instance directly.
+    /// 
+    public static AzureAIClientProvider FromClient(AIProjectClient client)
+    {
+        return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]);
+    }
+
+    private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient)
+    {
+        AIProjectClientOptions options =
+            new()
+            {
+                Diagnostics = {
+                    ApplicationId = HttpHeaderConstant.Values.UserAgent,
+                }
+            };
+
+        options.AddPolicy(new SemanticKernelHeadersPolicy(), HttpPipelinePosition.PerCall);
+
+        if (httpClient is not null)
+        {
+            options.Transport = new HttpClientTransport(httpClient);
+            // Disable retry policy if and only if a custom HttpClient is provided.
+            options.RetryPolicy = new RetryPolicy(maxRetries: 0);
+        }
+
+        return options;
+    }
+
+    private static IEnumerable CreateConfigurationKeys(string connectionString, HttpClient? httpClient)
+    {
+        yield return connectionString;
+
+        if (httpClient is not null)
+        {
+            if (httpClient.BaseAddress is not null)
+            {
+                yield return httpClient.BaseAddress.AbsoluteUri;
+            }
+
+            foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value))
+            {
+                yield return header;
+            }
+        }
+    }
+
+    private class SemanticKernelHeadersPolicy : HttpPipelineSynchronousPolicy
+    {
+        public override void OnSendingRequest(HttpMessage message)
+        {
+            message.Request.Headers.Add(
+                HttpHeaderConstant.Names.SemanticKernelVersion,
+                HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent)));
+        }
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs
new file mode 100644
index 000000000000..a1153523b03e
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs
@@ -0,0 +1,109 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Defines per-invocation execution options that override the assistant definition.
+/// 
+/// 
+/// This class is not applicable to  usage.
+/// 
+public sealed class AzureAIInvocationOptions
+{
+    /// 
+    /// Gets the AI model targeted by the agent.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public string? ModelName { get; init; }
+
+    /// 
+    /// Gets the additional instructions.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public string? AdditionalInstructions { get; init; }
+
+    /// 
+    /// Gets the additional messages to add to the thread.
+    /// 
+    /// 
+    /// Only supports messages with role = User or Assistant.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public IReadOnlyList? AdditionalMessages { get; init; }
+
+    /// 
+    /// Gets a value that indicates whether the code_interpreter tool is enabled.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+    public bool EnableCodeInterpreter { get; init; }
+
+    /// 
+    /// Gets a value that indicates whether the file_search tool is enabled.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+    public bool EnableFileSearch { get; init; }
+
+    /// 
+    /// Gets a value that indicates whether the JSON response format is enabled.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public bool? EnableJsonResponse { get; init; }
+
+    /// 
+    /// Gets the maximum number of completion tokens that can be used over the course of the run.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public int? MaxCompletionTokens { get; init; }
+
+    /// 
+    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public int? MaxPromptTokens { get; init; }
+
+    /// 
+    /// Gets a value that indicates whether the parallel function calling is enabled during tool use.
+    /// 
+    /// 
+    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public bool? ParallelToolCallsEnabled { get; init; }
+
+    /// 
+    /// Gets the number of recent messages that the thread will be truncated to.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public int? TruncationMessageCount { get; init; }
+
+    /// 
+    /// Gets the sampling temperature to use, between 0 and 2.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public float? Temperature { get; init; }
+
+    /// 
+    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
+    /// 
+    /// 
+    /// It's recommended to set this property or , but not both.
+    ///
+    /// Nucleus sampling is an alternative to sampling with temperature where the model
+    /// considers the results of the tokens with  probability mass.
+    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public float? TopP { get; init; }
+
+    /// 
+    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.
+    /// 
+    /// 
+    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
+    /// 
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public IReadOnlyDictionary? Metadata { get; init; }
+}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs b/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs
new file mode 100644
index 000000000000..d37242c522ed
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs
@@ -0,0 +1,23 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Exposes patterns for creating and managing agent threads.
+/// 
+/// 
+/// This class supports translation of  from native models.
+/// 
+public static class AzureAIThreadMessageFactory
+{
+    /// 
+    /// Translates  to  for thread creation.
+    /// 
+    public static IEnumerable Translate(IEnumerable messages)
+    {
+        return AgentMessageFactory.GetThreadMessages(messages);
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs
new file mode 100644
index 000000000000..7d4cf718b1e0
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs
@@ -0,0 +1,120 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI.Extensions;
+
+/// 
+/// Extensions associated with an Agent run processing.
+/// 
+/// 
+/// Improves testability.
+/// 
+internal static class AgentRunExtensions
+{
+    public static async IAsyncEnumerable GetStepsAsync(
+        this AgentsClient client,
+        ThreadRun run,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        PageableList? steps = null;
+        do
+        {
+            steps = await client.GetRunStepsAsync(run, after: steps?.LastId, cancellationToken: cancellationToken).ConfigureAwait(false);
+            foreach (RunStep step in steps)
+            {
+                yield return step;
+            }
+        }
+        while (steps?.HasMore ?? false);
+    }
+
+    public static async Task CreateAsync(
+        this AgentsClient client,
+        string threadId,
+        AzureAIAgent agent,
+        string? instructions,
+        ToolDefinition[] tools,
+        AzureAIInvocationOptions? invocationOptions,
+        CancellationToken cancellationToken)
+    {
+        TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions);
+        BinaryData? responseFormat = GetResponseFormat(invocationOptions);
+        return
+            await client.CreateRunAsync(
+                threadId,
+                agent.Definition.Id,
+                overrideModelName: invocationOptions?.ModelName,
+                instructions,
+                additionalInstructions: invocationOptions?.AdditionalInstructions,
+                additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
+                overrideTools: tools,
+                stream: false,
+                temperature: invocationOptions?.Temperature,
+                topP: invocationOptions?.TopP,
+                maxPromptTokens: invocationOptions?.MaxPromptTokens,
+                maxCompletionTokens: invocationOptions?.MaxCompletionTokens,
+                truncationStrategy,
+                toolChoice: null,
+                responseFormat,
+                parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled,
+                metadata: invocationOptions?.Metadata,
+                include: null,
+                cancellationToken).ConfigureAwait(false);
+    }
+
+    private static BinaryData? GetResponseFormat(AzureAIInvocationOptions? invocationOptions)
+    {
+        return invocationOptions?.EnableJsonResponse == true ?
+            BinaryData.FromString(ResponseFormat.JsonObject.ToString()) :
+            null;
+    }
+
+    private static TruncationObject? GetTruncationStrategy(AzureAIInvocationOptions? invocationOptions)
+    {
+        return invocationOptions?.TruncationMessageCount == null ?
+            null :
+            new(TruncationStrategy.LastMessages)
+            {
+                LastMessages = invocationOptions.TruncationMessageCount
+            };
+    }
+
+    public static IAsyncEnumerable CreateStreamingAsync(
+        this AgentsClient client,
+        string threadId,
+        AzureAIAgent agent,
+        string? instructions,
+        ToolDefinition[] tools,
+        AzureAIInvocationOptions? invocationOptions,
+        CancellationToken cancellationToken)
+    {
+        TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions);
+        BinaryData? responseFormat = GetResponseFormat(invocationOptions);
+        return
+            client.CreateRunStreamingAsync(
+                threadId,
+                agent.Definition.Id,
+                overrideModelName: invocationOptions?.ModelName,
+                instructions,
+                additionalInstructions: invocationOptions?.AdditionalInstructions,
+                additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
+                overrideTools: tools,
+                temperature: invocationOptions?.Temperature,
+                topP: invocationOptions?.TopP,
+                maxPromptTokens: invocationOptions?.MaxPromptTokens,
+                maxCompletionTokens: invocationOptions?.MaxCompletionTokens,
+                truncationStrategy,
+                toolChoice: null,
+                responseFormat,
+                parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled,
+                metadata: invocationOptions?.Metadata,
+                cancellationToken);
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs
new file mode 100644
index 000000000000..e6b9c722eabb
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs
@@ -0,0 +1,29 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using Azure.AI.Projects;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Extensions for  to support Azure AI specific operations.
+/// 
+public static class KernelFunctionExtensions
+{
+    /// 
+    /// Convert  to an OpenAI tool model.
+    /// 
+    /// The source function
+    /// The plugin name
+    /// An OpenAI tool definition
+    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
+    {
+        if (function.Metadata.Parameters.Count > 0)
+        {
+            BinaryData parameterData = function.Metadata.CreateParameterSpec();
+
+            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description, parameterData);
+        }
+
+        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description);
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs
new file mode 100644
index 000000000000..621e364acf6a
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs
@@ -0,0 +1,98 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Linq;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+
+/// 
+/// Factory for creating  based on .
+/// 
+/// 
+/// Improves testability.
+/// 
+internal static class AgentMessageFactory
+{
+    /// 
+    /// Translate metadata from a  to be used for a  or
+    /// .
+    /// 
+    /// The message content.
+    public static Dictionary GetMetadata(ChatMessageContent message)
+    {
+        return message.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToString() ?? string.Empty) ?? [];
+    }
+
+    /// 
+    /// Translate attachments from a  to be used for a  or
+    /// 
+    /// The message content.
+    public static IEnumerable GetAttachments(ChatMessageContent message)
+    {
+        return
+            message.Items
+                .OfType()
+                .Select(
+                    fileContent =>
+                        new MessageAttachment(fileContent.FileId, GetToolDefinition(fileContent.Tools).ToList()));
+    }
+
+    /// 
+    /// Translates a set of  to a set of ."/>
+    /// 
+    /// A list of  objects/
+    public static IEnumerable GetThreadMessages(IEnumerable? messages)
+    {
+        if (messages is not null)
+        {
+            foreach (ChatMessageContent message in messages)
+            {
+                string? content = message.Content;
+                if (string.IsNullOrWhiteSpace(content))
+                {
+                    continue;
+                }
+
+                ThreadMessageOptions threadMessage = new(
+                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent,
+                    content: message.Content)
+                {
+                    Attachments = GetAttachments(message).ToArray(),
+                };
+
+                if (message.Metadata != null)
+                {
+                    foreach (string key in message.Metadata.Keys)
+                    {
+                        threadMessage.Metadata = GetMetadata(message);
+                    }
+                }
+
+                yield return threadMessage;
+            }
+        }
+    }
+
+    private static readonly Dictionary s_toolMetadata = new()
+    {
+        { AzureAIAgent.Tools.CodeInterpreter, new CodeInterpreterToolDefinition() },
+        { AzureAIAgent.Tools.FileSearch, new FileSearchToolDefinition() },
+    };
+
+    private static IEnumerable GetToolDefinition(IEnumerable? tools)
+    {
+        if (tools is null)
+        {
+            yield break;
+        }
+
+        foreach (string tool in tools)
+        {
+            if (s_toolMetadata.TryGetValue(tool, out ToolDefinition? toolDefinition))
+            {
+                yield return toolDefinition;
+            }
+        }
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs
new file mode 100644
index 000000000000..167349b63d11
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs
@@ -0,0 +1,860 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure;
+using Azure.AI.Projects;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.AzureAI.Extensions;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.FunctionCalling;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+
+/// 
+/// Actions associated with an Open Assistant thread.
+/// 
+internal static class AgentThreadActions
+{
+    private static readonly HashSet s_pollingStatuses =
+    [
+        RunStatus.Queued,
+        RunStatus.InProgress,
+        RunStatus.Cancelling,
+    ];
+
+    private static readonly HashSet s_failureStatuses =
+    [
+        RunStatus.Expired,
+        RunStatus.Failed,
+        RunStatus.Cancelled,
+    ];
+
+    /// 
+    /// Create a new assistant thread.
+    /// 
+    /// The assistant client
+    /// The  to monitor for cancellation requests. The default is .
+    /// The thread identifier
+    public static async Task CreateThreadAsync(AgentsClient client, CancellationToken cancellationToken = default)
+    {
+        AgentThread thread = await client.CreateThreadAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+
+        return thread.Id;
+    }
+
+    /// 
+    /// Create a message in the specified thread.
+    /// 
+    /// The assistant client
+    /// The thread identifier
+    /// The message to add
+    /// The  to monitor for cancellation requests. The default is .
+    ///  if a system message is present, without taking any other action
+    public static async Task CreateMessageAsync(AgentsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken)
+    {
+        if (message.Items.Any(i => i is FunctionCallContent))
+        {
+            return;
+        }
+
+        string? content = message.Content;
+        if (string.IsNullOrWhiteSpace(content))
+        {
+            return;
+        }
+
+        await client.CreateMessageAsync(
+            threadId,
+            role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent,
+            content,
+            attachments: AgentMessageFactory.GetAttachments(message).ToArray(),
+            metadata: AgentMessageFactory.GetMetadata(message),
+            cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Retrieves the thread messages.
+    /// 
+    /// The assistant client
+    /// The thread identifier
+    /// The  to monitor for cancellation requests. The default is .
+    /// Asynchronous enumeration of messages.
+    public static async IAsyncEnumerable GetMessagesAsync(AgentsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        Dictionary agentNames = []; // Cache agent names by their identifier
+
+        string? lastId = null;
+        PageableList? messages = null;
+        do
+        {
+            messages = await client.GetMessagesAsync(threadId, runId: null, limit: null, ListSortOrder.Descending, after: lastId, before: null, cancellationToken).ConfigureAwait(false);
+            foreach (ThreadMessage message in messages)
+            {
+                lastId = message.Id;
+                string? assistantName = null;
+                if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
+                    !agentNames.TryGetValue(message.AssistantId, out assistantName))
+                {
+                    Azure.AI.Projects.Agent assistant = await client.GetAgentAsync(message.AssistantId, cancellationToken).ConfigureAwait(false);
+                    if (!string.IsNullOrWhiteSpace(assistant.Name))
+                    {
+                        agentNames.Add(assistant.Id, assistant.Name);
+                    }
+                }
+
+                assistantName ??= message.AssistantId;
+
+                ChatMessageContent content = GenerateMessageContent(assistantName, message);
+
+                if (content.Items.Count > 0)
+                {
+                    yield return content;
+                }
+            }
+        } while (messages?.HasMore ?? false);
+    }
+
+    /// 
+    /// Invoke the assistant on the specified thread.
+    /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user.
+    /// Example of a non-visible message is function-content for functions that are automatically executed.
+    /// 
+    /// The assistant agent to interact with the thread.
+    /// The assistant client
+    /// The thread identifier
+    /// Options to utilize for the invocation
+    /// The logger to utilize (might be agent or channel scoped)
+    /// The  plugins and other state.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  to monitor for cancellation requests. The default is .
+    /// Asynchronous enumeration of messages.
+    public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
+        AzureAIAgent agent,
+        AgentsClient client,
+        string threadId,
+        AzureAIInvocationOptions? invocationOptions,
+        ILogger logger,
+        Kernel kernel,
+        KernelArguments? arguments,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId);
+
+        List tools = new(agent.Definition.Tools);
+
+        // Add unique functions from the Kernel which are not already present in the agent's tools
+        var functionToolNames = new HashSet(tools.OfType().Select(t => t.Name));
+        var functionTools = kernel.Plugins
+            .SelectMany(kp => kp.Select(kf => kf.ToToolDefinition(kp.Name)))
+            .Where(tool => !functionToolNames.Contains(tool.Name));
+        tools.AddRange(functionTools);
+
+        string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+        ThreadRun run = await client.CreateAsync(threadId, agent, instructions, [.. tools], invocationOptions, cancellationToken).ConfigureAwait(false);
+
+        logger.LogAzureAIAgentCreatedRun(nameof(InvokeAsync), run.Id, threadId);
+
+        FunctionCallsProcessor functionProcessor = new(logger);
+        // This matches current behavior.  Will be configurable upon integrating with `FunctionChoice` (#6795/#5200)
+        FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true };
+
+        // Evaluate status and process steps and messages, as encountered.
+        HashSet processedStepIds = [];
+        Dictionary functionSteps = [];
+        do
+        {
+            // Check for cancellation
+            cancellationToken.ThrowIfCancellationRequested();
+
+            // Poll run and steps until actionable
+            await PollRunStatusAsync().ConfigureAwait(false);
+
+            // Is in terminal state?
+            if (s_failureStatuses.Contains(run.Status))
+            {
+                throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
+            }
+
+            RunStep[] steps = await client.GetStepsAsync(run, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
+
+            // Is tool action required?
+            if (run.Status == RunStatus.RequiresAction)
+            {
+                logger.LogAzureAIAgentProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId);
+
+                // Execute functions in parallel and post results at once.
+                FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
+                if (functionCalls.Length > 0)
+                {
+                    // Emit function-call content
+                    ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls);
+                    yield return (IsVisible: false, Message: functionCallMessage);
+
+                    // Invoke functions for each tool-step
+                    FunctionResultContent[] functionResults =
+                        await functionProcessor.InvokeFunctionCallsAsync(
+                            functionCallMessage,
+                            (_) => true,
+                            functionOptions,
+                            kernel,
+                            isStreaming: false,
+                            cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
+
+                    // Capture function-call for message processing
+                    foreach (FunctionResultContent functionCall in functionResults)
+                    {
+                        functionSteps.Add(functionCall.CallId!, functionCall);
+                    }
+
+                    // Process tool output
+                    ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
+
+                    await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
+                }
+
+                logger.LogAzureAIAgentProcessedRunSteps(nameof(InvokeAsync), functionCalls.Length, run.Id, threadId);
+            }
+
+            // Enumerate completed messages
+            logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId);
+
+            IEnumerable completedStepsToProcess =
+                steps
+                    .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id))
+                    .OrderBy(s => s.CreatedAt);
+
+            int messageCount = 0;
+            foreach (RunStep completedStep in completedStepsToProcess)
+            {
+                if (completedStep.Type == RunStepType.ToolCalls)
+                {
+                    RunStepToolCallDetails toolDetails = (RunStepToolCallDetails)completedStep.StepDetails;
+                    foreach (RunStepToolCall toolCall in toolDetails.ToolCalls)
+                    {
+                        bool isVisible = false;
+                        ChatMessageContent? content = null;
+
+                        // Process code-interpreter content
+                        if (toolCall is RunStepCodeInterpreterToolCall codeTool)
+                        {
+                            content = GenerateCodeInterpreterContent(agent.GetName(), codeTool.Input, completedStep);
+                            isVisible = true;
+                        }
+                        // Process function result content
+                        else if (toolCall is RunStepFunctionToolCall functionTool)
+                        {
+                            FunctionResultContent functionStep = functionSteps[functionTool.Id]; // Function step always captured on invocation
+                            content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep);
+                        }
+
+                        if (content is not null)
+                        {
+                            ++messageCount;
+
+                            yield return (isVisible, Message: content);
+                        }
+                    }
+                }
+                else if (completedStep.Type == RunStepType.MessageCreation)
+                {
+                    // Retrieve the message
+                    RunStepMessageCreationDetails messageDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
+                    ThreadMessage? message = await RetrieveMessageAsync(client, threadId, messageDetails.MessageCreation.MessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
+
+                    if (message is not null)
+                    {
+                        ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, completedStep);
+
+                        if (content.Items.Count > 0)
+                        {
+                            ++messageCount;
+
+                            yield return (IsVisible: true, Message: content);
+                        }
+                    }
+                }
+
+                processedStepIds.Add(completedStep.Id);
+            }
+
+            logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId);
+        }
+        while (RunStatus.Completed != run.Status);
+
+        logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run.Id, threadId);
+
+        // Local function to assist in run polling (participates in method closure).
+        async Task PollRunStatusAsync()
+        {
+            logger.LogAzureAIAgentPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId);
+
+            int count = 0;
+
+            do
+            {
+                cancellationToken.ThrowIfCancellationRequested();
+
+                if (count > 0)
+                {
+                    // Reduce polling frequency after a couple attempts
+                    await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false);
+                }
+
+                ++count;
+
+                try
+                {
+                    run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false);
+                }
+                // The presence of a `Status` code means the server responded with error...always fail in that case
+                catch (ClientResultException clientException) when (clientException.Status <= 0)
+                {
+                    // Check maximum retry count
+                    if (count >= agent.PollingOptions.MaximumRetryCount)
+                    {
+                        throw;
+                    }
+
+                    // Retry for potential transient failure
+                    continue;
+                }
+                catch (AggregateException aggregateException) when (aggregateException.InnerException is ClientResultException innerClientException)
+                {
+                    // The presence of a `Status` code means the server responded with error
+                    if (innerClientException.Status > 0)
+                    {
+                        throw;
+                    }
+
+                    // Check maximum retry count
+                    if (count >= agent.PollingOptions.MaximumRetryCount)
+                    {
+                        throw;
+                    }
+
+                    // Retry for potential transient failure
+                    continue;
+                }
+            }
+            while (s_pollingStatuses.Contains(run.Status));
+
+            logger.LogAzureAIAgentPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId);
+        }
+    }
+
+    /// 
+    /// Invoke the assistant on the specified thread using streaming.
+    /// 
+    /// The assistant agent to interact with the thread.
+    /// The assistant client
+    /// The thread identifier
+    /// The receiver for the completed messages generated
+    /// Options to utilize for the invocation
+    /// The logger to utilize (might be agent or channel scoped)
+    /// The  plugins and other state.
+    /// Optional arguments to pass to the agents's invocation, including any .
+    /// The  to monitor for cancellation requests. The default is .
+    /// Asynchronous enumeration of messages.
+    /// 
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// 
+    public static async IAsyncEnumerable InvokeStreamingAsync(
+        AzureAIAgent agent,
+        AgentsClient client,
+        string threadId,
+        IList? messages,
+        AzureAIInvocationOptions? invocationOptions,
+        ILogger logger,
+        Kernel kernel,
+        KernelArguments? arguments,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId);
+
+        ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+
+        string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+        // Evaluate status and process steps and messages, as encountered.
+        HashSet processedStepIds = [];
+        Dictionary stepFunctionResults = [];
+        List stepsToProcess = [];
+
+        FunctionCallsProcessor functionProcessor = new(logger);
+        // This matches current behavior.  Will be configurable upon integrating with `FunctionChoice` (#6795/#5200)
+        FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true };
+
+        ThreadRun? run = null;
+        IAsyncEnumerable asyncUpdates = client.CreateStreamingAsync(threadId, agent, instructions, tools, invocationOptions, cancellationToken);
+        do
+        {
+            // Check for cancellation
+            cancellationToken.ThrowIfCancellationRequested();
+
+            stepsToProcess.Clear();
+
+            await foreach (StreamingUpdate update in asyncUpdates.ConfigureAwait(false))
+            {
+                if (update is RunUpdate runUpdate)
+                {
+                    run = runUpdate.Value;
+                }
+                else if (update is MessageContentUpdate contentUpdate)
+                {
+                    switch (contentUpdate.UpdateKind)
+                    {
+                        case StreamingUpdateReason.MessageUpdated:
+                            yield return GenerateStreamingMessageContent(agent.GetName(), contentUpdate);
+                            break;
+                    }
+                }
+                else if (update is RunStepDetailsUpdate detailsUpdate)
+                {
+                    StreamingChatMessageContent? toolContent = GenerateStreamingCodeInterpreterContent(agent.GetName(), detailsUpdate);
+                    if (toolContent != null)
+                    {
+                        yield return toolContent;
+                    }
+                    else if (detailsUpdate.FunctionOutput != null)
+                    {
+                        yield return
+                            new StreamingChatMessageContent(AuthorRole.Assistant, null)
+                            {
+                                AuthorName = agent.Name,
+                                Items = [new StreamingFunctionCallUpdateContent(detailsUpdate.ToolCallId, detailsUpdate.FunctionName, detailsUpdate.FunctionArguments)]
+                            };
+                    }
+                }
+                else if (update is RunStepUpdate stepUpdate)
+                {
+                    switch (stepUpdate.UpdateKind)
+                    {
+                        case StreamingUpdateReason.RunStepCompleted:
+                            stepsToProcess.Add(stepUpdate.Value);
+                            break;
+                        default:
+                            break;
+                    }
+                }
+            }
+
+            if (run == null)
+            {
+                throw new KernelException($"Agent Failure - Run not created for thread: ${threadId}");
+            }
+
+            // Is in terminal state?
+            if (s_failureStatuses.Contains(run.Status))
+            {
+                throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
+            }
+
+            if (run.Status == RunStatus.RequiresAction)
+            {
+                RunStep[] activeSteps =
+                    await client.GetStepsAsync(run, cancellationToken)
+                    .Where(step => step.Status == RunStepStatus.InProgress)
+                    .ToArrayAsync(cancellationToken).ConfigureAwait(false);
+
+                // Capture map between the tool call and its associated step
+                Dictionary toolMap = [];
+                foreach (RunStep step in activeSteps)
+                {
+                    RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails;
+                    foreach (RunStepToolCall stepDetails in toolCallDetails.ToolCalls)
+                    {
+                        toolMap[stepDetails.Id] = step.Id;
+                    }
+                }
+
+                // Execute functions in parallel and post results at once.
+                FunctionCallContent[] functionCalls = activeSteps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
+                if (functionCalls.Length > 0)
+                {
+                    // Emit function-call content
+                    ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls);
+                    messages?.Add(functionCallMessage);
+
+                    FunctionResultContent[] functionResults =
+                        await functionProcessor.InvokeFunctionCallsAsync(
+                            functionCallMessage,
+                            (_) => true,
+                            functionOptions,
+                            kernel,
+                            isStreaming: true,
+                            cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
+
+                    // Process tool output
+                    ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
+                    asyncUpdates = client.SubmitToolOutputsToStreamAsync(run, toolOutputs, cancellationToken);
+
+                    foreach (RunStep step in activeSteps)
+                    {
+                        stepFunctionResults.Add(step.Id, functionResults.Where(result => step.Id == toolMap[result.CallId!]).ToArray());
+                    }
+                }
+            }
+
+            if (stepsToProcess.Count > 0)
+            {
+                logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run!.Id, threadId);
+
+                foreach (RunStep step in stepsToProcess)
+                {
+                    if (step.StepDetails is RunStepMessageCreationDetails messageDetails)
+                    {
+                        ThreadMessage? message =
+                            await RetrieveMessageAsync(
+                                client,
+                                threadId,
+                                messageDetails.MessageCreation.MessageId,
+                                agent.PollingOptions.MessageSynchronizationDelay,
+                                cancellationToken).ConfigureAwait(false);
+
+                        if (message != null)
+                        {
+                            ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, step);
+                            messages?.Add(content);
+                        }
+                    }
+                    else if (step.StepDetails is RunStepToolCallDetails toolDetails)
+                    {
+                        foreach (RunStepToolCall toolCall in toolDetails.ToolCalls)
+                        {
+                            if (toolCall is RunStepFunctionToolCall functionCall)
+                            {
+                                messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step));
+                                stepFunctionResults.Remove(step.Id);
+                                break;
+                            }
+
+                            if (toolCall is RunStepCodeInterpreterToolCall codeCall)
+                            {
+                                messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), codeCall.Input, step));
+                            }
+                        }
+                    }
+                }
+
+                logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), stepsToProcess.Count, run!.Id, threadId);
+            }
+        }
+        while (run?.Status != RunStatus.Completed);
+
+        logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId);
+    }
+
+    private static ChatMessageContent GenerateMessageContent(string? assistantName, ThreadMessage message, RunStep? completedStep = null)
+    {
+        AuthorRole role = new(message.Role.ToString());
+
+        Dictionary? metadata =
+            new()
+            {
+                { nameof(ThreadMessage.CreatedAt), message.CreatedAt },
+                { nameof(ThreadMessage.AssistantId), message.AssistantId },
+                { nameof(ThreadMessage.ThreadId), message.ThreadId },
+                { nameof(ThreadMessage.RunId), message.RunId },
+                { nameof(MessageContentUpdate.MessageId), message.Id },
+            };
+
+        if (completedStep != null)
+        {
+            metadata[nameof(RunStepDetailsUpdate.StepId)] = completedStep.Id;
+            metadata[nameof(RunStep.Usage)] = completedStep.Usage;
+        }
+
+        ChatMessageContent content =
+            new(role, content: null)
+            {
+                AuthorName = assistantName,
+                Metadata = metadata,
+            };
+
+        foreach (MessageContent itemContent in message.ContentItems)
+        {
+            // Process text content
+            if (itemContent is MessageTextContent textContent)
+            {
+                content.Items.Add(new TextContent(textContent.Text));
+
+                foreach (MessageTextAnnotation annotation in textContent.Annotations)
+                {
+                    content.Items.Add(GenerateAnnotationContent(annotation));
+                }
+            }
+            // Process image content
+            else if (itemContent is MessageImageFileContent imageContent)
+            {
+                content.Items.Add(new FileReferenceContent(imageContent.FileId));
+            }
+        }
+
+        return content;
+    }
+
+    private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update)
+    {
+        StreamingChatMessageContent content =
+            new(AuthorRole.Assistant, content: null)
+            {
+                AuthorName = assistantName,
+            };
+
+        // Process text content
+        if (!string.IsNullOrEmpty(update.Text))
+        {
+            content.Items.Add(new StreamingTextContent(update.Text));
+        }
+        // Process image content
+        else if (update.ImageFileId != null)
+        {
+            content.Items.Add(new StreamingFileReferenceContent(update.ImageFileId));
+        }
+        // Process annotations
+        else if (update.TextAnnotation != null)
+        {
+            content.Items.Add(GenerateStreamingAnnotationContent(update.TextAnnotation));
+        }
+
+        if (update.Role.HasValue && update.Role.Value != MessageRole.User)
+        {
+            content.Role = new(update.Role.Value.ToString() ?? MessageRole.Agent.ToString());
+        }
+
+        return content;
+    }
+
+    private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update)
+    {
+        StreamingChatMessageContent content =
+            new(AuthorRole.Assistant, content: null)
+            {
+                AuthorName = assistantName,
+            };
+
+        // Process text content
+        if (update.CodeInterpreterInput != null)
+        {
+            content.Items.Add(new StreamingTextContent(update.CodeInterpreterInput));
+            content.Metadata = new Dictionary { { AzureAIAgent.CodeInterpreterMetadataKey, true } };
+        }
+
+        if ((update.CodeInterpreterOutputs?.Count ?? 0) > 0)
+        {
+            foreach (RunStepDeltaCodeInterpreterOutput output in update.CodeInterpreterOutputs!)
+            {
+                if (output is RunStepDeltaCodeInterpreterImageOutput imageOutput)
+                {
+                    content.Items.Add(new StreamingFileReferenceContent(imageOutput.Image.FileId));
+                }
+            }
+        }
+
+        return content.Items.Count > 0 ? content : null;
+    }
+
+    private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation)
+    {
+        string? fileId = null;
+
+        if (annotation is MessageTextFileCitationAnnotation fileCitationAnnotation)
+        {
+            fileId = fileCitationAnnotation.FileId;
+        }
+        else if (annotation is MessageTextFilePathAnnotation filePathAnnotation)
+        {
+            fileId = filePathAnnotation.FileId;
+        }
+
+        return
+            new(annotation.Text)
+            {
+                Quote = annotation.Text,
+                FileId = fileId,
+            };
+    }
+
+    private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation)
+    {
+        string? fileId = null;
+
+        if (!string.IsNullOrEmpty(annotation.OutputFileId))
+        {
+            fileId = annotation.OutputFileId;
+        }
+        else if (!string.IsNullOrEmpty(annotation.InputFileId))
+        {
+            fileId = annotation.InputFileId;
+        }
+
+        return
+            new(annotation.TextToReplace)
+            {
+                StartIndex = annotation.StartIndex ?? 0,
+                EndIndex = annotation.EndIndex ?? 0,
+                FileId = fileId,
+            };
+    }
+
+    private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode, RunStep completedStep)
+    {
+        Dictionary metadata = GenerateToolCallMetadata(completedStep);
+        metadata[AzureAIAgent.CodeInterpreterMetadataKey] = true;
+
+        return
+            new ChatMessageContent(
+                AuthorRole.Assistant,
+                [
+                    new TextContent(pythonCode)
+                ])
+            {
+                AuthorName = agentName,
+                Metadata = metadata,
+            };
+    }
+
+    private static IEnumerable ParseFunctionStep(AzureAIAgent agent, RunStep step)
+    {
+        if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls)
+        {
+            RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails;
+            foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
+            {
+                if (toolCall is RunStepFunctionToolCall functionCall)
+                {
+                    (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(functionCall.Name, functionCall.Arguments);
+
+                    FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
+
+                    yield return content;
+                }
+            }
+        }
+    }
+
+    private static (FunctionName functionName, KernelArguments arguments) ParseFunctionCall(string functionName, string? functionArguments)
+    {
+        FunctionName nameParts = FunctionName.Parse(functionName);
+
+        KernelArguments arguments = [];
+
+        if (!string.IsNullOrWhiteSpace(functionArguments))
+        {
+            foreach (var argumentKvp in JsonSerializer.Deserialize>(functionArguments!)!)
+            {
+                arguments[argumentKvp.Key] = argumentKvp.Value.ToString();
+            }
+        }
+
+        return (nameParts, arguments);
+    }
+
+    private static ChatMessageContent GenerateFunctionCallContent(string agentName, IList functionCalls)
+    {
+        ChatMessageContent functionCallContent = new(AuthorRole.Assistant, content: null)
+        {
+            AuthorName = agentName
+        };
+
+        functionCallContent.Items.AddRange(functionCalls);
+
+        return functionCallContent;
+    }
+
+    private static ChatMessageContent GenerateFunctionResultContent(string agentName, IEnumerable functionResults, RunStep completedStep)
+    {
+        ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null)
+        {
+            AuthorName = agentName,
+            Metadata = GenerateToolCallMetadata(completedStep),
+        };
+
+        foreach (FunctionResultContent functionResult in functionResults)
+        {
+            functionResultContent.Items.Add(
+                new FunctionResultContent(
+                    functionResult.FunctionName,
+                    functionResult.PluginName,
+                    functionResult.CallId,
+                    functionResult.Result));
+        }
+
+        return functionResultContent;
+    }
+
+    private static Dictionary GenerateToolCallMetadata(RunStep completedStep)
+    {
+        return new()
+            {
+                { nameof(RunStep.CreatedAt), completedStep.CreatedAt },
+                { nameof(RunStep.AssistantId), completedStep.AssistantId },
+                { nameof(RunStep.ThreadId), completedStep.ThreadId },
+                { nameof(RunStep.RunId), completedStep.RunId },
+                { nameof(RunStepDetailsUpdate.StepId), completedStep.Id },
+                { nameof(RunStep.Usage), completedStep.Usage },
+            };
+    }
+
+    private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
+    {
+        ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
+
+        for (int index = 0; index < functionResults.Length; ++index)
+        {
+            FunctionResultContent functionResult = functionResults[index];
+
+            object resultValue = functionResult.Result ?? string.Empty;
+
+            if (resultValue is not string textResult)
+            {
+                textResult = JsonSerializer.Serialize(resultValue);
+            }
+
+            toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!);
+        }
+
+        return toolOutputs;
+    }
+
+    private static async Task RetrieveMessageAsync(AgentsClient client, string threadId, string messageId, TimeSpan syncDelay, CancellationToken cancellationToken)
+    {
+        ThreadMessage? message = null;
+
+        bool retry = false;
+        int count = 0;
+        do
+        {
+            try
+            {
+                message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false);
+            }
+            catch (RequestFailedException exception)
+            {
+                // Step has provided the message-id.  Retry on of NotFound/404 exists.
+                // Extremely rarely there might be a synchronization issue between the
+                // assistant response and message-service.
+                retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3;
+            }
+
+            if (retry)
+            {
+                await Task.Delay(syncDelay, cancellationToken).ConfigureAwait(false);
+            }
+
+            ++count;
+        }
+        while (retry);
+
+        return message;
+    }
+}
diff --git a/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs
new file mode 100644
index 000000000000..974af70205eb
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Diagnostics.CodeAnalysis;
+using Azure.AI.Projects;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class
+
+/// 
+/// Extensions for logging .
+/// 
+/// 
+/// This extension uses the  to
+/// generate logging code at compile time to achieve optimized code.
+/// 
+[ExcludeFromCodeCoverage]
+internal static partial class AgentThreadActionsLogMessages
+{
+    /// 
+    /// Logs  creating run (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Creating run for thread: {ThreadId}.")]
+    public static partial void LogAzureAIAgentCreatingRun(
+        this ILogger logger,
+        string methodName,
+        string threadId);
+
+    /// 
+    /// Logs  created run (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Created run for thread: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentCreatedRun(
+        this ILogger logger,
+        string methodName,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  completed run (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Completed run for thread: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentCompletedRun(
+        this ILogger logger,
+        string methodName,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  processing run steps (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Processing run steps for thread: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentProcessingRunSteps(
+        this ILogger logger,
+        string methodName,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  processed run steps (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Processed #{stepCount} run steps: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentProcessedRunSteps(
+        this ILogger logger,
+        string methodName,
+        int stepCount,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  processing run messages (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Processing run messages for thread: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentProcessingRunMessages(
+        this ILogger logger,
+        string methodName,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  processed run messages (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Processed #{MessageCount} run steps: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentProcessedRunMessages(
+        this ILogger logger,
+        string methodName,
+        int messageCount,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  polling run status (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Polling run status for thread: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentPollingRunStatus(
+        this ILogger logger,
+        string methodName,
+        string runId,
+        string threadId);
+
+    /// 
+    /// Logs  polled run status (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Run status is {RunStatus}: {RunId}/{ThreadId}.")]
+    public static partial void LogAzureAIAgentPolledRunStatus(
+        this ILogger logger,
+        string methodName,
+        RunStatus runStatus,
+        string runId,
+        string threadId);
+}
diff --git a/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs
new file mode 100644
index 000000000000..7056ddc746c0
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs
@@ -0,0 +1,69 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Extensions.Logging;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class
+
+/// 
+/// Extensions for logging  invocations.
+/// 
+/// 
+/// This extension uses the  to
+/// generate logging code at compile time to achieve optimized code.
+/// 
+[ExcludeFromCodeCoverage]
+internal static partial class AzureAIAgentLogMessages
+{
+    /// 
+    /// Logs  creating channel (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Creating assistant thread for {ChannelType}.")]
+    public static partial void LogAzureAIAgentCreatingChannel(
+        this ILogger logger,
+        string methodName,
+        string channelType);
+
+    /// 
+    /// Logs  created channel (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Created assistant thread for {ChannelType}: #{ThreadId}.")]
+    public static partial void LogAzureAIAgentCreatedChannel(
+        this ILogger logger,
+        string methodName,
+        string channelType,
+        string threadId);
+
+    /// 
+    /// Logs  restoring serialized channel (started).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Debug,
+        Message = "[{MethodName}] Restoring assistant channel for {ChannelType}: #{ThreadId}.")]
+    public static partial void LogAzureAIAgentRestoringChannel(
+        this ILogger logger,
+        string methodName,
+        string channelType,
+        string threadId);
+
+    /// 
+    /// Logs  restored serialized channel (complete).
+    /// 
+    [LoggerMessage(
+        EventId = 0,
+        Level = LogLevel.Information,
+        Message = "[{MethodName}] Restored assistant channel for {ChannelType}: #{ThreadId}.")]
+    public static partial void LogAzureAIAgentRestoredChannel(
+        this ILogger logger,
+        string methodName,
+        string channelType,
+        string threadId);
+}
diff --git a/dotnet/src/Agents/Abstractions/Properties/AssemblyInfo.cs b/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs
similarity index 100%
rename from dotnet/src/Agents/Abstractions/Properties/AssemblyInfo.cs
rename to dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs
diff --git a/dotnet/src/Agents/AzureAI/RunPollingOptions.cs b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs
new file mode 100644
index 000000000000..f1bbd1db4853
--- /dev/null
+++ b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+
+namespace Microsoft.SemanticKernel.Agents.AzureAI;
+
+/// 
+/// Configuration and defaults associated with polling behavior for Assistant API run processing.
+/// 
+public sealed class RunPollingOptions
+{
+    /// 
+    /// Gets the default maximum number of retries when monitoring thread-run status.
+    /// 
+    public static int DefaultMaximumRetryCount { get; } = 3;
+
+    /// 
+    /// Gets the default polling interval when monitoring thread-run status.
+    /// 
+    public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
+
+    /// 
+    /// Gets the default back-off interval when monitoring thread-run status.
+    /// 
+    public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
+
+    /// 
+    /// Gets the default number of polling iterations before using .
+    /// 
+    public static int DefaultPollingBackoffThreshold { get; } = 2;
+
+    /// 
+    /// Gets the default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// 
+    public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
+
+    /// 
+    /// Gets or sets the maximum retry count when polling thread-run status.
+    /// 
+    /// 
+    /// This value only affects failures that have the potential to be transient.
+    /// Explicit server error responses will result in immediate failure.
+    /// 
+    public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount;
+
+    /// 
+    /// Gets or sets the polling interval when monitoring thread-run status.
+    /// 
+    public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
+
+    /// 
+    /// Gets or sets the back-off interval when monitoring thread-run status.
+    /// 
+    public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
+
+    /// 
+    /// Gets or sets the number of polling iterations before using .
+    /// 
+    public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold;
+
+    /// 
+    /// Gets or sets the polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// 
+    public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
+
+    /// 
+    /// Gets the polling interval for the specified iteration count.
+    /// 
+    /// The number of polling iterations already attempted.
+    public TimeSpan GetPollingInterval(int iterationCount)
+    {
+        return iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval;
+    }
+}
diff --git a/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj b/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj
new file mode 100644
index 000000000000..e17d43f63fcc
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj
@@ -0,0 +1,50 @@
+
+
+  
+    
+    Microsoft.SemanticKernel.Agents.Bedrock
+    Microsoft.SemanticKernel.Agents.Bedrock
+    net8.0;netstandard2.0
+    $(NoWarn);SKEXP0110;CA1724
+    false
+    alpha
+  
+
+  
+
+  
+    
+    Semantic Kernel Agents - Bedrock
+    Defines a concrete Agent based on the Bedrock Agent Service.
+  
+
+  
+    
+    
+    
+    
+    
+    
+    
+    
+  
+
+  
+
+  
+    
+  
+
+  
+    
+    
+    
+  
+
+  
+    
+    
+  
+
+
+
\ No newline at end of file
diff --git a/dotnet/src/Agents/Bedrock/BedrockAgent.cs b/dotnet/src/Agents/Bedrock/BedrockAgent.cs
new file mode 100644
index 000000000000..f01e46843ace
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/BedrockAgent.cs
@@ -0,0 +1,263 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Amazon.BedrockAgent;
+using Amazon.BedrockAgentRuntime;
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Diagnostics;
+
+namespace Microsoft.SemanticKernel.Agents.Bedrock;
+
+/// 
+/// Provides a specialized  for the Bedrock Agent service.
+/// 
+public class BedrockAgent : KernelAgent
+{
+    internal readonly AmazonBedrockAgentClient Client;
+
+    internal readonly AmazonBedrockAgentRuntimeClient RuntimeClient;
+
+    internal readonly Amazon.BedrockAgent.Model.Agent AgentModel;
+
+    /// 
+    /// There is a default alias created by Bedrock for the working draft version of the agent.
+    /// https://docs.aws.amazon.com/bedrock/latest/userguide/agents-deploy.html
+    /// 
+    public static readonly string WorkingDraftAgentAlias = "TSTALIASID";
+
+    /// 
+    /// Initializes a new instance of the  class.
+    /// Unlike other types of agents in Semantic Kernel, prompt templates are not supported for Bedrock agents,
+    /// since Bedrock agents don't support using an alternative instruction in runtime.
+    /// 
+    /// The agent model of an agent that exists on the Bedrock Agent service.
+    /// A client used to interact with the Bedrock Agent service.
+    /// A client used to interact with the Bedrock Agent runtime service.
+    public BedrockAgent(
+        Amazon.BedrockAgent.Model.Agent agentModel,
+        AmazonBedrockAgentClient? client = null,
+        AmazonBedrockAgentRuntimeClient? runtimeClient = null)
+    {
+        this.AgentModel = agentModel;
+        this.Client = client ?? new AmazonBedrockAgentClient();
+        this.RuntimeClient = runtimeClient ?? new AmazonBedrockAgentRuntimeClient();
+
+        this.Id = agentModel.AgentId;
+        this.Name = agentModel.AgentName;
+        this.Description = agentModel.Description;
+        this.Instructions = agentModel.Instruction;
+    }
+
+    #region static methods
+
+    /// 
+    /// Convenient method to create an unique session id.
+    /// 
+    public static string CreateSessionId()
+    {
+        return Guid.NewGuid().ToString();
+    }
+
+    #endregion
+
+    #region public methods
+
+    /// 
+    /// Invoke the Bedrock agent with the given message.
+    /// 
+    /// The session id.
+    /// The message to send to the agent.
+    /// The arguments to use when invoking the agent.
+    /// The alias id of the agent to use. The default is the working draft alias id.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An  of .
+    public IAsyncEnumerable InvokeAsync(
+        string sessionId,
+        string message,
+        KernelArguments? arguments,
+        string? agentAliasId = null,
+        CancellationToken cancellationToken = default)
+    {
+        var invokeAgentRequest = new InvokeAgentRequest
+        {
+            AgentAliasId = agentAliasId ?? WorkingDraftAgentAlias,
+            AgentId = this.Id,
+            SessionId = sessionId,
+            InputText = message,
+        };
+
+        return this.InvokeAsync(invokeAgentRequest, arguments, cancellationToken);
+    }
+
+    /// 
+    /// Invoke the Bedrock agent with the given request. Use this method when you want to customize the request.
+    /// 
+    /// The request to send to the agent.
+    /// The arguments to use when invoking the agent.
+    /// The  to monitor for cancellation requests. The default is .
+    public IAsyncEnumerable InvokeAsync(
+        InvokeAgentRequest invokeAgentRequest,
+        KernelArguments? arguments,
+        CancellationToken cancellationToken = default)
+    {
+        return invokeAgentRequest.StreamingConfigurations != null && (invokeAgentRequest.StreamingConfigurations.StreamFinalResponse ?? false)
+            ? throw new ArgumentException("The streaming configuration must be null for non-streaming responses.")
+            : ActivityExtensions.RunWithActivityAsync(
+                () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+                InvokeInternal,
+                cancellationToken);
+
+        // Collect all responses from the agent and return them as a single chat message content since this
+        // is a non-streaming API.
+        // The Bedrock Agent API streams beck different types of responses, i.e. text, files, metadata, etc.
+        // The Bedrock Agent API also won't stream back any content when it needs to call a function. It will
+        // only start streaming back content after the function has been called and the response is ready.
+        async IAsyncEnumerable InvokeInternal()
+        {
+            ChatMessageContentItemCollection items = [];
+            string content = "";
+            Dictionary metadata = [];
+            List innerContents = [];
+
+            await foreach (var message in this.InternalInvokeAsync(invokeAgentRequest, arguments, cancellationToken).ConfigureAwait(false))
+            {
+                items.AddRange(message.Items);
+                content += message.Content ?? "";
+                if (message.Metadata != null)
+                {
+                    foreach (var key in message.Metadata.Keys)
+                    {
+                        metadata[key] = message.Metadata[key];
+                    }
+                }
+                innerContents.Add(message.InnerContent);
+            }
+
+            yield return content.Length == 0
+                ? throw new KernelException("No content was returned from the agent.")
+                : new ChatMessageContent(AuthorRole.Assistant, content)
+                {
+                    AuthorName = this.GetDisplayName(),
+                    Items = items,
+                    ModelId = this.AgentModel.FoundationModel,
+                    Metadata = metadata,
+                    InnerContent = innerContents,
+                };
+        }
+    }
+
+    /// 
+    /// Invoke the Bedrock agent with the given request and streaming response.
+    /// 
+    /// The session id.
+    /// The message to send to the agent.
+    /// The arguments to use when invoking the agent.
+    /// The alias id of the agent to use. The default is the working draft alias id.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An  of .
+    public IAsyncEnumerable InvokeStreamingAsync(
+        string sessionId,
+        string message,
+        KernelArguments? arguments,
+        string? agentAliasId = null,
+        CancellationToken cancellationToken = default)
+    {
+        var invokeAgentRequest = new InvokeAgentRequest
+        {
+            AgentAliasId = agentAliasId ?? WorkingDraftAgentAlias,
+            AgentId = this.Id,
+            SessionId = sessionId,
+            InputText = message,
+            StreamingConfigurations = new()
+            {
+                StreamFinalResponse = true,
+            },
+        };
+
+        return this.InvokeStreamingAsync(invokeAgentRequest, arguments, cancellationToken);
+    }
+
+    /// 
+    /// Invoke the Bedrock agent with the given request and streaming response. Use this method when you want to customize the request.
+    /// 
+    /// The request to send to the agent.
+    /// The arguments to use when invoking the agent.
+    /// The  to monitor for cancellation requests. The default is .
+    /// An  of .
+    public IAsyncEnumerable InvokeStreamingAsync(
+        InvokeAgentRequest invokeAgentRequest,
+        KernelArguments? arguments,
+        CancellationToken cancellationToken = default)
+    {
+        if (invokeAgentRequest.StreamingConfigurations == null)
+        {
+            invokeAgentRequest.StreamingConfigurations = new()
+            {
+                StreamFinalResponse = true,
+            };
+        }
+        else if (!(invokeAgentRequest.StreamingConfigurations.StreamFinalResponse ?? false))
+        {
+            throw new ArgumentException("The streaming configuration must have StreamFinalResponse set to true.");
+        }
+
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+            InvokeInternal,
+            cancellationToken);
+
+        async IAsyncEnumerable InvokeInternal()
+        {
+            // The Bedrock agent service has the same API for both streaming and non-streaming responses.
+            // We are invoking the same method as the non-streaming response with the streaming configuration set,
+            // and converting the chat message content to streaming chat message content.
+            await foreach (var chatMessageContent in this.InternalInvokeAsync(invokeAgentRequest, arguments, cancellationToken).ConfigureAwait(false))
+            {
+                yield return new StreamingChatMessageContent(chatMessageContent.Role, chatMessageContent.Content)
+                {
+                    AuthorName = chatMessageContent.AuthorName,
+                    ModelId = chatMessageContent.ModelId,
+                    InnerContent = chatMessageContent.InnerContent,
+                    Metadata = chatMessageContent.Metadata,
+                };
+            }
+        }
+    }
+
+    #endregion
+
+    /// 
+    protected override IEnumerable GetChannelKeys()
+    {
+        // Return the channel keys for the BedrockAgent
+        yield return typeof(BedrockAgentChannel).FullName!;
+    }
+
+    /// 
+    protected override Task CreateChannelAsync(CancellationToken cancellationToken)
+    {
+        // Create and return a new BedrockAgentChannel
+        return Task.FromResult(new BedrockAgentChannel());
+    }
+
+    /// 
+    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
+    {
+        // Restore and return a BedrockAgentChannel from the given state
+        return Task.FromResult(new BedrockAgentChannel());
+    }
+
+    #region internal methods
+
+    internal string CodeInterpreterActionGroupSignature { get => $"{this.GetDisplayName()}_CodeInterpreter"; }
+    internal string KernelFunctionActionGroupSignature { get => $"{this.GetDisplayName()}_KernelFunctions"; }
+    internal string UseInputActionGroupSignature { get => $"{this.GetDisplayName()}_UserInput"; }
+
+    #endregion
+}
diff --git a/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs b/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs
new file mode 100644
index 000000000000..1e0d40d91188
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs
@@ -0,0 +1,248 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.Agents.Serialization;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Agents.Bedrock;
+
+/// 
+/// A  specialization for use with .
+/// 
+public class BedrockAgentChannel : AgentChannel
+{
+    private readonly ChatHistory _history = [];
+
+    private const string MessagePlaceholder = "[SILENCE]";
+
+    /// 
+    /// Receive messages from a group chat.
+    /// Bedrock requires the chat history to alternate between user and agent messages.
+    /// Thus, when receiving messages, the message sequence will be mutated by inserting
+    /// placeholder agent or user messages as needed.
+    /// 
+    /// The history of messages to receive.
+    /// A token to monitor for cancellation requests.
+    protected override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
+    {
+        foreach (var incomingMessage in history)
+        {
+            if (string.IsNullOrEmpty(incomingMessage.Content))
+            {
+                this.Logger.LogWarning("Received a message with no content. Skipping.");
+                continue;
+            }
+
+            if (this._history.Count == 0 || this._history.Last().Role != incomingMessage.Role)
+            {
+                this._history.Add(incomingMessage);
+            }
+            else
+            {
+                this._history.Add
+                (
+                    new ChatMessageContent
+                    (
+                        incomingMessage.Role == AuthorRole.Assistant ? AuthorRole.User : AuthorRole.Assistant,
+                        MessagePlaceholder
+                    )
+                );
+                this._history.Add(incomingMessage);
+            }
+        }
+
+        return Task.CompletedTask;
+    }
+
+    /// 
+    protected override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
+        BedrockAgent agent,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        if (!this.PrepareAndValidateHistory())
+        {
+            yield break;
+        }
+
+        InvokeAgentRequest invokeAgentRequest = new()
+        {
+            AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
+            AgentId = agent.Id,
+            SessionId = BedrockAgent.CreateSessionId(),
+            InputText = this._history.Last().Content,
+            SessionState = this.ParseHistoryToSessionState(),
+        };
+        await foreach (var message in agent.InvokeAsync(invokeAgentRequest, null, cancellationToken).ConfigureAwait(false))
+        {
+            if (message.Content is not null)
+            {
+                this._history.Add(message);
+                // All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages
+                yield return (true, message);
+            }
+        }
+    }
+
+    /// 
+    protected override async IAsyncEnumerable InvokeStreamingAsync(
+        BedrockAgent agent,
+        IList messages,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        if (!this.PrepareAndValidateHistory())
+        {
+            yield break;
+        }
+
+        InvokeAgentRequest invokeAgentRequest = new()
+        {
+            AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
+            AgentId = agent.Id,
+            SessionId = BedrockAgent.CreateSessionId(),
+            InputText = this._history.Last().Content,
+            SessionState = this.ParseHistoryToSessionState(),
+        };
+        await foreach (var message in agent.InvokeStreamingAsync(invokeAgentRequest, null, cancellationToken).ConfigureAwait(false))
+        {
+            if (message.Content is not null)
+            {
+                this._history.Add(new()
+                {
+                    Role = AuthorRole.Assistant,
+                    Content = message.Content,
+                    AuthorName = message.AuthorName,
+                    InnerContent = message.InnerContent,
+                    ModelId = message.ModelId,
+                });
+                // All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages
+                yield return message;
+            }
+        }
+    }
+
+    /// 
+    protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken)
+    {
+        return this._history.ToDescendingAsync();
+    }
+
+    /// 
+    protected override Task ResetAsync(CancellationToken cancellationToken)
+    {
+        this._history.Clear();
+
+        return Task.CompletedTask;
+    }
+
+    /// 
+    protected override string Serialize()
+        => JsonSerializer.Serialize(ChatMessageReference.Prepare(this._history));
+
+    #region private methods
+
+    private bool PrepareAndValidateHistory()
+    {
+        if (this._history.Count == 0)
+        {
+            this.Logger.LogWarning("No messages to send. Bedrock requires at least one message to start a conversation.");
+            return false;
+        }
+
+        this.EnsureHistoryAlternates();
+        this.EnsureLastMessageIsUser();
+        if (string.IsNullOrEmpty(this._history.Last().Content))
+        {
+            this.Logger.LogWarning("Last message has no content. Bedrock doesn't support empty messages.");
+            return false;
+        }
+
+        return true;
+    }
+
+    private void EnsureHistoryAlternates()
+    {
+        if (this._history.Count <= 1)
+        {
+            return;
+        }
+
+        int currentIndex = 1;
+        while (currentIndex < this._history.Count)
+        {
+            if (this._history[currentIndex].Role == this._history[currentIndex - 1].Role)
+            {
+                this._history.Insert(
+                    currentIndex,
+                    new ChatMessageContent(
+                        this._history[currentIndex].Role == AuthorRole.Assistant ? AuthorRole.User : AuthorRole.Assistant,
+                        MessagePlaceholder
+                    )
+                );
+                currentIndex += 2;
+            }
+            else
+            {
+                currentIndex++;
+            }
+        }
+    }
+
+    private void EnsureLastMessageIsUser()
+    {
+        if (this._history.Count > 0 && this._history.Last().Role != AuthorRole.User)
+        {
+            this._history.Add(new ChatMessageContent(AuthorRole.User, MessagePlaceholder));
+        }
+    }
+
+    private SessionState ParseHistoryToSessionState()
+    {
+        SessionState sessionState = new();
+
+        // We don't take the last message as it needs to be sent separately in another parameter.
+        if (this._history.Count > 1)
+        {
+            sessionState.ConversationHistory = new()
+            {
+                Messages = []
+            };
+
+            foreach (var message in this._history.Take(this._history.Count - 1))
+            {
+                if (message.Content is null)
+                {
+                    throw new InvalidOperationException("Message content cannot be null.");
+                }
+                if (message.Role != AuthorRole.Assistant && message.Role != AuthorRole.User)
+                {
+                    throw new InvalidOperationException("Message role must be either Assistant or User.");
+                }
+
+                sessionState.ConversationHistory.Messages.Add(new()
+                {
+                    Role = message.Role == AuthorRole.Assistant
+                        ? Amazon.BedrockAgentRuntime.ConversationRole.Assistant
+                        : Amazon.BedrockAgentRuntime.ConversationRole.User,
+                    Content = [
+                        new Amazon.BedrockAgentRuntime.Model.ContentBlock()
+                        {
+                            Text = message.Content,
+                        },
+                    ],
+                });
+            }
+        }
+
+        return sessionState;
+    }
+    #endregion
+}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs
new file mode 100644
index 000000000000..c2e6bdd358bb
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs
@@ -0,0 +1,214 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Threading;
+using System.Threading.Tasks;
+using Amazon.BedrockAgent;
+using Amazon.BedrockAgent.Model;
+
+namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+/// 
+/// Extensions associated with 
+/// 
+public static class BedrockAgentExtensions
+{
+    /// 
+    /// Creates an agent.
+    /// 
+    /// The  instance.
+    /// The  instance.
+    /// The  instance.
+    public static async Task CreateAndPrepareAgentAsync(
+        this AmazonBedrockAgentClient client,
+        CreateAgentRequest request,
+        CancellationToken cancellationToken = default)
+    {
+        var createAgentResponse = await client.CreateAgentAsync(request, cancellationToken).ConfigureAwait(false);
+        // The agent will first enter the CREATING status.
+        // When the operation finishes, it will enter the NOT_PREPARED status.
+        // We need to wait for the agent to reach the NOT_PREPARED status before we can prepare it.
+        await client.WaitForAgentStatusAsync(createAgentResponse.Agent, AgentStatus.NOT_PREPARED, cancellationToken: cancellationToken).ConfigureAwait(false);
+        return await client.PrepareAgentAndWaitUntilPreparedAsync(createAgentResponse.Agent, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Associates an agent with a knowledge base.
+    /// 
+    /// The  instance.
+    /// The knowledge base ID.
+    /// The description of the association.
+    /// The  instance.
+    public static async Task AssociateAgentKnowledgeBaseAsync(
+        this BedrockAgent agent,
+        string knowledgeBaseId,
+        string description,
+        CancellationToken cancellationToken = default)
+    {
+        await agent.Client.AssociateAgentKnowledgeBaseAsync(new()
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+            KnowledgeBaseId = knowledgeBaseId,
+            Description = description,
+        }, cancellationToken).ConfigureAwait(false);
+
+        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Disassociate the agent with a knowledge base.
+    /// 
+    /// The  instance.
+    /// The id of the knowledge base to disassociate with the agent.
+    /// The  to monitor for cancellation requests. The default is .
+    public static async Task DisassociateAgentKnowledgeBaseAsync(
+        this BedrockAgent agent,
+        string knowledgeBaseId,
+        CancellationToken cancellationToken = default)
+    {
+        await agent.Client.DisassociateAgentKnowledgeBaseAsync(new()
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+            KnowledgeBaseId = knowledgeBaseId,
+        }, cancellationToken).ConfigureAwait(false);
+
+        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// List the knowledge bases associated with the agent.
+    /// 
+    /// The  instance.
+    /// The  to monitor for cancellation requests. The default is .
+    /// A  containing the knowledge bases associated with the agent.
+    public static async Task ListAssociatedKnowledgeBasesAsync(
+        this BedrockAgent agent,
+        CancellationToken cancellationToken = default)
+    {
+        return await agent.Client.ListAgentKnowledgeBasesAsync(new()
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+        }, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Create a code interpreter action group for the agent and prepare the agent.
+    /// 
+    /// The  instance.
+    /// The  to monitor for cancellation requests. The default is .
+    public static async Task CreateCodeInterpreterActionGroupAsync(
+        this BedrockAgent agent,
+        CancellationToken cancellationToken = default)
+    {
+        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+            ActionGroupName = agent.CodeInterpreterActionGroupSignature,
+            ActionGroupState = ActionGroupState.ENABLED,
+            ParentActionGroupSignature = new(Amazon.BedrockAgent.ActionGroupSignature.AMAZONCodeInterpreter),
+        };
+
+        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
+        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Create a kernel function action group for the agent and prepare the agent.
+    /// 
+    /// The  instance.
+    /// The  to monitor for cancellation requests. The default is .
+    public static async Task CreateKernelFunctionActionGroupAsync(
+        this BedrockAgent agent,
+        CancellationToken cancellationToken = default)
+    {
+        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+            ActionGroupName = agent.KernelFunctionActionGroupSignature,
+            ActionGroupState = ActionGroupState.ENABLED,
+            ActionGroupExecutor = new()
+            {
+                CustomControl = Amazon.BedrockAgent.CustomControlMethod.RETURN_CONTROL,
+            },
+            FunctionSchema = agent.Kernel.ToFunctionSchema(),
+        };
+
+        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
+        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Enable user input for the agent and prepare the agent.
+    /// 
+    /// The  instance.
+    /// The  to monitor for cancellation requests. The default is .
+    public static async Task EnableUserInputActionGroupAsync(
+        this BedrockAgent agent,
+        CancellationToken cancellationToken = default)
+    {
+        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
+        {
+            AgentId = agent.Id,
+            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
+            ActionGroupName = agent.UseInputActionGroupSignature,
+            ActionGroupState = ActionGroupState.ENABLED,
+            ParentActionGroupSignature = new(Amazon.BedrockAgent.ActionGroupSignature.AMAZONUserInput),
+        };
+
+        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
+        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
+    }
+
+    private static async Task PrepareAgentAndWaitUntilPreparedAsync(
+        this AmazonBedrockAgentClient client,
+        Amazon.BedrockAgent.Model.Agent agent,
+        CancellationToken cancellationToken = default)
+    {
+        var prepareAgentResponse = await client.PrepareAgentAsync(new() { AgentId = agent.AgentId }, cancellationToken).ConfigureAwait(false);
+
+        // The agent will take some time to enter the PREPARING status after the prepare operation is called.
+        // We need to wait for the agent to reach the PREPARING status before we can proceed, otherwise we
+        // will return immediately if the agent is already in PREPARED status.
+        await client.WaitForAgentStatusAsync(agent, AgentStatus.PREPARING, cancellationToken: cancellationToken).ConfigureAwait(false);
+        // When the agent is prepared, it will enter the PREPARED status.
+        return await client.WaitForAgentStatusAsync(agent, AgentStatus.PREPARED, cancellationToken: cancellationToken).ConfigureAwait(false);
+    }
+
+    /// 
+    /// Wait for the agent to reach the specified status.
+    /// 
+    /// The  instance.
+    /// The  to monitor.
+    /// The status to wait for.
+    /// The interval in seconds to wait between attempts. The default is 2 seconds.
+    /// The maximum number of attempts to make. The default is 5 attempts.
+    /// The  to monitor for cancellation requests.
+    /// The  instance.
+    private static async Task WaitForAgentStatusAsync(
+        this AmazonBedrockAgentClient client,
+        Amazon.BedrockAgent.Model.Agent agent,
+        AgentStatus status,
+        int interval = 2,
+        int maxAttempts = 5,
+        CancellationToken cancellationToken = default)
+    {
+        for (var i = 0; i < maxAttempts; i++)
+        {
+            var getAgentResponse = await client.GetAgentAsync(new() { AgentId = agent.AgentId }, cancellationToken).ConfigureAwait(false);
+
+            if (getAgentResponse.Agent.AgentStatus == status)
+            {
+                return getAgentResponse.Agent;
+            }
+
+            await Task.Delay(interval * 1000, cancellationToken).ConfigureAwait(false);
+        }
+
+        throw new TimeoutException($"Agent did not reach status {status} within the specified time.");
+    }
+}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs
new file mode 100644
index 000000000000..5e67aacaf04a
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs
@@ -0,0 +1,225 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Amazon.BedrockAgentRuntime;
+using Amazon.BedrockAgentRuntime.Model;
+using Amazon.Runtime.EventStreams.Internal;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.FunctionCalling;
+
+namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+/// 
+/// Extensions associated with the status of a .
+/// 
+internal static class BedrockAgentInvokeExtensions
+{
+    public static async IAsyncEnumerable InternalInvokeAsync(
+        this BedrockAgent agent,
+        InvokeAgentRequest invokeAgentRequest,
+        KernelArguments? arguments,
+        [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        // This session state is used to store the results of function calls to be passed back to the agent.
+        // https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/BedrockAgentRuntime/TSessionState.html
+        SessionState? sessionState = null;
+        for (var requestIndex = 0; ; requestIndex++)
+        {
+            if (sessionState != null)
+            {
+                invokeAgentRequest.SessionState = sessionState;
+                sessionState = null;
+            }
+            var invokeAgentResponse = await agent.RuntimeClient.InvokeAgentAsync(invokeAgentRequest, cancellationToken).ConfigureAwait(false);
+
+            if (invokeAgentResponse.HttpStatusCode != System.Net.HttpStatusCode.OK)
+            {
+                throw new HttpOperationException($"Failed to invoke agent. Status code: {invokeAgentResponse.HttpStatusCode}");
+            }
+
+            List functionCallContents = [];
+            await foreach (var responseEvent in invokeAgentResponse.Completion.ToAsyncEnumerable().ConfigureAwait(false))
+            {
+                if (responseEvent is BedrockAgentRuntimeEventStreamException bedrockAgentRuntimeEventStreamException)
+                {
+                    throw new KernelException("Failed to handle Bedrock Agent stream event.", bedrockAgentRuntimeEventStreamException);
+                }
+
+                var chatMessageContent =
+                    HandleChunkEvent(agent, responseEvent) ??
+                    HandleFilesEvent(agent, responseEvent) ??
+                    HandleReturnControlEvent(agent, responseEvent, arguments) ??
+                    HandleTraceEvent(agent, responseEvent) ??
+                    throw new KernelException($"Failed to handle Bedrock Agent stream event: {responseEvent}");
+                if (chatMessageContent.Items.Count > 0 && chatMessageContent.Items[0] is FunctionCallContent functionCallContent)
+                {
+                    functionCallContents.AddRange(chatMessageContent.Items.Where(item => item is FunctionCallContent).Cast());
+                }
+                else
+                {
+                    yield return chatMessageContent;
+                }
+            }
+
+            // This is used to cap the auto function invocation loop to prevent infinite loops.
+            // It doesn't use the the `FunctionCallsProcessor` to process the functions because we do not need 
+            // many of the features it offers and we want to keep the code simple.
+            var functionChoiceBehaviorConfiguration = new FunctionCallsProcessor().GetConfiguration(
+                FunctionChoiceBehavior.Auto(), [], requestIndex, agent.Kernel);
+
+            if (functionCallContents.Count > 0 && functionChoiceBehaviorConfiguration!.AutoInvoke)
+            {
+                var functionResults = await InvokeFunctionCallsAsync(agent, functionCallContents, cancellationToken).ConfigureAwait(false);
+                sessionState = CreateSessionStateWithFunctionResults(functionResults, agent);
+            }
+            else
+            {
+                break;
+            }
+        }
+    }
+
+    private static ChatMessageContent? HandleChunkEvent(
+        BedrockAgent agent,
+        IEventStreamEvent responseEvent)
+    {
+        return responseEvent is not PayloadPart payload
+            ? null
+            : new ChatMessageContent()
+            {
+                Role = AuthorRole.Assistant,
+                AuthorName = agent.GetDisplayName(),
+                Content = Encoding.UTF8.GetString(payload.Bytes.ToArray()),
+                ModelId = agent.AgentModel.FoundationModel,
+                InnerContent = payload,
+            };
+    }
+
+    private static ChatMessageContent? HandleFilesEvent(
+        BedrockAgent agent,
+        IEventStreamEvent responseEvent)
+    {
+        if (responseEvent is not FilePart files)
+        {
+            return null;
+        }
+
+        ChatMessageContentItemCollection binaryContents = [];
+        foreach (var file in files.Files)
+        {
+            binaryContents.Add(new BinaryContent(file.Bytes.ToArray(), file.Type)
+            {
+                Metadata = new Dictionary()
+                {
+                    { "Name", file.Name },
+                },
+            });
+        }
+
+        return new ChatMessageContent()
+        {
+            Role = AuthorRole.Assistant,
+            AuthorName = agent.GetDisplayName(),
+            Items = binaryContents,
+            ModelId = agent.AgentModel.FoundationModel,
+            InnerContent = files,
+        };
+    }
+
+    private static ChatMessageContent? HandleReturnControlEvent(
+        BedrockAgent agent,
+        IEventStreamEvent responseEvent,
+        KernelArguments? arguments)
+    {
+        if (responseEvent is not ReturnControlPayload returnControlPayload)
+        {
+            return null;
+        }
+
+        ChatMessageContentItemCollection functionCallContents = [];
+        foreach (var invocationInput in returnControlPayload.InvocationInputs)
+        {
+            var functionInvocationInput = invocationInput.FunctionInvocationInput;
+            functionCallContents.Add(new FunctionCallContent(
+                functionInvocationInput.Function,
+                id: returnControlPayload.InvocationId,
+                arguments: functionInvocationInput.Parameters.FromFunctionParameters(arguments))
+            {
+                Metadata = new Dictionary()
+                {
+                    { "ActionGroup", functionInvocationInput.ActionGroup },
+                    { "ActionInvocationType", functionInvocationInput.ActionInvocationType },
+                },
+            });
+        }
+
+        return new ChatMessageContent()
+        {
+            Role = AuthorRole.Assistant,
+            AuthorName = agent.GetDisplayName(),
+            Items = functionCallContents,
+            ModelId = agent.AgentModel.FoundationModel,
+            InnerContent = returnControlPayload,
+        };
+    }
+
+    private static ChatMessageContent? HandleTraceEvent(
+        BedrockAgent agent,
+        IEventStreamEvent responseEvent)
+    {
+        return responseEvent is not TracePart trace
+            ? null
+            : new ChatMessageContent()
+            {
+                Role = AuthorRole.Assistant,
+                AuthorName = agent.GetDisplayName(),
+                ModelId = agent.AgentModel.FoundationModel,
+                InnerContent = trace,
+            };
+    }
+
+    private static async Task> InvokeFunctionCallsAsync(
+        BedrockAgent agent,
+        List functionCallContents,
+        CancellationToken cancellationToken)
+    {
+        var functionResults = await Task.WhenAll(functionCallContents.Select(async functionCallContent =>
+        {
+            return await functionCallContent.InvokeAsync(agent.Kernel, cancellationToken).ConfigureAwait(false);
+        })).ConfigureAwait(false);
+
+        return [.. functionResults];
+    }
+
+    private static SessionState CreateSessionStateWithFunctionResults(List functionResults, BedrockAgent agent)
+    {
+        return functionResults.Count == 0
+            ? throw new KernelException("No function results were returned.")
+            : new()
+            {
+                InvocationId = functionResults[0].CallId,
+                ReturnControlInvocationResults = [.. functionResults.Select(functionResult =>
+                    {
+                        return new InvocationResultMember()
+                        {
+                            FunctionResult = new Amazon.BedrockAgentRuntime.Model.FunctionResult
+                            {
+                                ActionGroup = agent.KernelFunctionActionGroupSignature,
+                                Function = functionResult.FunctionName,
+                                ResponseBody = new Dictionary
+                                {
+                                    { "TEXT", new ContentBody() { Body = functionResult.Result as string } }
+                                }
+                            }
+                        };
+                    }
+                )],
+            };
+    }
+}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs
new file mode 100644
index 000000000000..c890638484a2
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs
@@ -0,0 +1,102 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using Amazon.BedrockAgent.Model;
+using Amazon.BedrockAgentRuntime.Model;
+
+namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+
+/// 
+/// Extensions associated with the status of a .
+/// 
+internal static class BedrockFunctionSchemaExtensions
+{
+    public static KernelArguments FromFunctionParameters(this List parameters, KernelArguments? arguments)
+    {
+        KernelArguments kernelArguments = arguments ?? [];
+        foreach (var parameter in parameters)
+        {
+            kernelArguments.Add(parameter.Name, parameter.Value);
+        }
+
+        return kernelArguments;
+    }
+
+    public static Amazon.BedrockAgent.Model.FunctionSchema ToFunctionSchema(this Kernel kernel)
+    {
+        var plugins = kernel.Plugins;
+        List functions = [];
+        foreach (var plugin in plugins)
+        {
+            foreach (KernelFunction function in plugin)
+            {
+                functions.Add(new Function
+                {
+                    Name = function.Name,
+                    Description = function.Description,
+                    Parameters = function.Metadata.Parameters.CreateParameterSpec(),
+                    // This field controls whether user confirmation is required to invoke the function.
+                    // If this is set to "ENABLED", the user will be prompted to confirm the function invocation.
+                    // Only after the user confirms, the function call request will be issued by the agent.
+                    // If the user denies the confirmation, the agent will act as if the function does not exist.
+                    // Currently, we do not support this feature, so we set it to "DISABLED".
+                    RequireConfirmation = Amazon.BedrockAgent.RequireConfirmation.DISABLED,
+                });
+            }
+        }
+
+        return new Amazon.BedrockAgent.Model.FunctionSchema
+        {
+            Functions = functions,
+        };
+    }
+
+    private static Dictionary CreateParameterSpec(
+        this IReadOnlyList parameters)
+    {
+        Dictionary parameterSpec = [];
+        foreach (var parameter in parameters)
+        {
+            parameterSpec.Add(parameter.Name, new Amazon.BedrockAgent.Model.ParameterDetail
+            {
+                Description = parameter.Description,
+                Required = parameter.IsRequired,
+                Type = parameter.ParameterType.ToAmazonType(),
+            });
+        }
+
+        return parameterSpec;
+    }
+
+    private static Amazon.BedrockAgent.Type ToAmazonType(this System.Type? parameterType)
+    {
+        var typeString = parameterType?.GetFriendlyTypeName();
+        return typeString switch
+        {
+            "String" => Amazon.BedrockAgent.Type.String,
+            "Boolean" => Amazon.BedrockAgent.Type.Boolean,
+            "Int16" => Amazon.BedrockAgent.Type.Integer,
+            "UInt16" => Amazon.BedrockAgent.Type.Integer,
+            "Int32" => Amazon.BedrockAgent.Type.Integer,
+            "UInt32" => Amazon.BedrockAgent.Type.Integer,
+            "Int64" => Amazon.BedrockAgent.Type.Integer,
+            "UInt64" => Amazon.BedrockAgent.Type.Integer,
+            "Single" => Amazon.BedrockAgent.Type.Number,
+            "Double" => Amazon.BedrockAgent.Type.Number,
+            "Decimal" => Amazon.BedrockAgent.Type.Number,
+            "String[]" => Amazon.BedrockAgent.Type.Array,
+            "Boolean[]" => Amazon.BedrockAgent.Type.Array,
+            "Int16[]" => Amazon.BedrockAgent.Type.Array,
+            "UInt16[]" => Amazon.BedrockAgent.Type.Array,
+            "Int32[]" => Amazon.BedrockAgent.Type.Array,
+            "UInt32[]" => Amazon.BedrockAgent.Type.Array,
+            "Int64[]" => Amazon.BedrockAgent.Type.Array,
+            "UInt64[]" => Amazon.BedrockAgent.Type.Array,
+            "Single[]" => Amazon.BedrockAgent.Type.Array,
+            "Double[]" => Amazon.BedrockAgent.Type.Array,
+            "Decimal[]" => Amazon.BedrockAgent.Type.Array,
+            _ => throw new ArgumentException($"Unsupported parameter type: {typeString}"),
+        };
+    }
+}
diff --git a/dotnet/src/Agents/Core/Properties/AssemblyInfo.cs b/dotnet/src/Agents/Bedrock/Properties/AssemblyInfo.cs
similarity index 100%
rename from dotnet/src/Agents/Core/Properties/AssemblyInfo.cs
rename to dotnet/src/Agents/Bedrock/Properties/AssemblyInfo.cs
diff --git a/dotnet/src/Agents/Bedrock/README.md b/dotnet/src/Agents/Bedrock/README.md
new file mode 100644
index 000000000000..d480985fc667
--- /dev/null
+++ b/dotnet/src/Agents/Bedrock/README.md
@@ -0,0 +1,27 @@
+# Amazon Bedrock AI Agents in Semantic Kernel
+
+## Overview
+
+AWS Bedrock Agents is a managed service that allows users to stand up and run AI agents in the AWS cloud quickly.
+
+## Tools/Functions
+
+Bedrock Agents allow the use of tools via [action groups](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-action-create.html).
+
+The integration of Bedrock Agents with Semantic Kernel allows users to register kernel functions as tools in Bedrock Agents.
+
+## Enable code interpretation
+
+Bedrock Agents can write and execute code via a feature known as [code interpretation](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-code-interpretation.html) similar to what OpenAI also offers.
+
+## Enable user input
+
+Bedrock Agents can [request user input](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-user-input.html) in case of missing information to invoke a tool. When this is enabled, the agent will prompt the user for the missing information. When this is disabled, the agent will guess the missing information.
+
+## Knowledge base
+
+Bedrock Agents can leverage data saved on AWS to perform RAG tasks, this is referred to as the [knowledge base](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-kb-add.html) in AWS.
+
+## Multi-agent
+
+Bedrock Agents support [multi-agent workflows](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-multi-agent-collaboration.html) for more complex tasks. However, it employs a different pattern than what we have in Semantic Kernel, thus this is not supported in the current integration.
diff --git a/dotnet/src/Agents/Core/AgentGroupChat.cs b/dotnet/src/Agents/Core/AgentGroupChat.cs
index 5d80f969eb4e..1cdb3c638d4b 100644
--- a/dotnet/src/Agents/Core/AgentGroupChat.cs
+++ b/dotnet/src/Agents/Core/AgentGroupChat.cs
@@ -1,6 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Threading;
@@ -8,35 +9,40 @@
 using Microsoft.Extensions.Logging;
 using Microsoft.Extensions.Logging.Abstractions;
 using Microsoft.SemanticKernel.Agents.Chat;
+using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// A an  that supports multi-turn interactions.
+/// Represents an  that supports multi-turn interactions.
 /// 
+[Experimental("SKEXP0110")]
 public sealed class AgentGroupChat : AgentChat
 {
     private readonly HashSet _agentIds; // Efficient existence test O(1) vs O(n) for list.
     private readonly List _agents; // Maintain order the agents joined the chat
 
     /// 
-    /// Indicates if completion criteria has been met.  If set, no further
-    /// agent interactions will occur.  Clear to enable more agent interactions.
+    /// Gets or sets a value that indicates if the completion criteria have been met.
     /// 
+    /// 
+    ///  if the completion criteria have been met; otherwise .
+    /// The default is . Set to  to enable more agent interactions.
+    /// 
     public bool IsComplete { get; set; }
 
     /// 
-    /// Settings for defining chat behavior.
+    /// Gets or sets the settings for defining chat behavior.
     /// 
     public AgentGroupChatSettings ExecutionSettings { get; set; } = new AgentGroupChatSettings();
 
     /// 
-    /// The agents participating in the chat.
+    /// Gets the agents participating in the chat.
     /// 
     public override IReadOnlyList Agents => this._agents.AsReadOnly();
 
     /// 
-    /// Add a  to the chat.
+    /// Add an  to the chat.
     /// 
     /// The  to add.
     public void AddAgent(Agent agent)
@@ -48,14 +54,16 @@ public void AddAgent(Agent agent)
     }
 
     /// 
-    /// Process a series of interactions between the  that have joined this .
-    /// The interactions will proceed according to the  and the 
-    /// defined via .
-    /// In the absence of an , this method will not invoke any agents.
-    /// Any agent may be explicitly selected by calling .
+    /// Processes a series of interactions between the  that have joined this .
     /// 
+    /// 
+    /// The interactions will proceed according to the  and the
+    ///  defined via .
+    /// In the absence of an , this method does not invoke any agents.
+    /// Any agent can be explicitly selected by calling .
+    /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     public override async IAsyncEnumerable InvokeAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
@@ -84,14 +92,16 @@ public override async IAsyncEnumerable InvokeAsync([Enumerat
     }
 
     /// 
-    /// Process a series of interactions between the  that have joined this .
-    /// The interactions will proceed according to the  and the 
-    /// defined via .
-    /// In the absence of an , this method will not invoke any agents.
-    /// Any agent may be explicitly selected by calling .
+    /// Processes a series of interactions between the  that have joined this .
     /// 
+    /// 
+    /// The interactions will proceed according to the  and the
+    ///  defined via .
+    /// In the absence of an , this method does not invoke any agents.
+    /// Any agent can be explicitly selected by calling .
+    /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of streaming messages.
+    /// An asynchronous enumeration of streaming messages.
     public override async IAsyncEnumerable InvokeStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
@@ -120,21 +130,21 @@ public override async IAsyncEnumerable InvokeStream
     }
 
     /// 
-    /// Process a single interaction between a given  an a .
+    /// Processes a single interaction between a given  and an .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
-    /// 
-    /// Specified agent joins the chat.
-    /// >
+    /// An asynchronous enumeration of messages.
+    /// 
+    /// The specified agent joins the chat.
+    /// 
     public async IAsyncEnumerable InvokeAsync(
         Agent agent,
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
 
-        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id);
+        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
         this.AddAgent(agent);
 
@@ -149,21 +159,21 @@ public async IAsyncEnumerable InvokeAsync(
     }
 
     /// 
-    /// Process a single interaction between a given  an a .
+    /// Processes a single interaction between a given  and an .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
-    /// 
-    /// Specified agent joins the chat.
-    /// 
+    /// An asynchronous enumeration of messages.
+    /// 
+    /// The specified agent joins the chat.
+    /// 
     public async IAsyncEnumerable InvokeStreamingAsync(
         Agent agent,
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
 
-        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id);
+        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
         this.AddAgent(agent);
 
@@ -178,17 +188,17 @@ public async IAsyncEnumerable InvokeStreamingAsync(
     }
 
     /// 
-    /// Convenience method to create a  for a given strategy without HTML encoding the specified parameters.
+    /// Creates a  for a given strategy without HTML-encoding the specified parameters.
     /// 
     /// The prompt template string that defines the prompt.
     /// 
-    /// On optional  to use when interpreting the .
-    /// The default factory will be used when none is provided.
+    /// An optional  to use when interpreting the .
+    /// The default factory is used when none is provided.
     /// 
     /// The parameter names to exclude from being HTML encoded.
     /// A  created via  using the specified template.
     /// 
-    /// This is particularly targeted to easily avoid encoding the history used by 
+    /// This method is particularly targeted to easily avoid encoding the history used by 
     /// or .
     /// 
     public static KernelFunction CreatePromptFunctionForStrategy(string template, IPromptTemplateFactory? templateFactory = null, params string[] safeParameterNames)
@@ -255,7 +265,7 @@ private async Task SelectAgentAsync(CancellationToken cancellationToken)
             throw;
         }
 
-        this.Logger.LogAgentGroupChatSelectedAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, this.ExecutionSettings.SelectionStrategy.GetType());
+        this.Logger.LogAgentGroupChatSelectedAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), this.ExecutionSettings.SelectionStrategy.GetType());
 
         return agent;
     }
diff --git a/dotnet/src/Agents/Core/Agents.Core.csproj b/dotnet/src/Agents/Core/Agents.Core.csproj
index da87688ac22f..4311785f61c9 100644
--- a/dotnet/src/Agents/Core/Agents.Core.csproj
+++ b/dotnet/src/Agents/Core/Agents.Core.csproj
@@ -5,9 +5,9 @@
     Microsoft.SemanticKernel.Agents.Core
     Microsoft.SemanticKernel.Agents
     net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110
+    $(NoWarn);SKEXP0110;SKEXP0001
     false
-    alpha
+    preview
   
 
   
@@ -22,10 +22,12 @@
     
     
     
+    
   
 
   
     
+    
   
 
   
diff --git a/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs b/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
index f7b2d87fb7e8..e5399ab46133 100644
--- a/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
+++ b/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
@@ -1,32 +1,36 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Settings that affect behavior of .
+/// Provides settings that affect the behavior of  instances.
 /// 
 /// 
-/// Default behavior result in no agent selection.
+/// The default behavior results in no agent selection.
 /// 
+[Experimental("SKEXP0110")]
 public class AgentGroupChatSettings
 {
     /// 
-    /// Strategy for selecting the next agent.  Dfeault strategy limited to a single iteration and no termination criteria.
+    /// Gets the strategy for terminating the agent.
     /// 
-    /// 
-    /// See .
-    /// 
+    /// 
+    /// The strategy for terminating the agent. The default strategy a single iteration and no termination criteria.
+    /// 
+    /// 
     public TerminationStrategy TerminationStrategy { get; init; } = new DefaultTerminationStrategy();
 
     /// 
-    /// Strategy for selecting the next agent.  Defaults to .
+    /// Gets the strategy for selecting the next agent.
     /// 
-    /// 
-    /// See .
-    /// 
+    /// 
+    /// The strategy for selecting the next agent. The default is .
+    /// 
+    /// 
     public SelectionStrategy SelectionStrategy { get; init; } = new SequentialSelectionStrategy();
 
     /// 
diff --git a/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
index ca83ce407cbb..3a0e8d7fac7b 100644
--- a/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
@@ -7,8 +8,9 @@
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Defines aggregation behavior for 
+/// Defines aggregation behavior for .
 /// 
+[Experimental("SKEXP0110")]
 public enum AggregateTerminationCondition
 {
     /// 
@@ -23,16 +25,20 @@ public enum AggregateTerminationCondition
 }
 
 /// 
-/// Aggregate a set of  objects.
+/// Provides methods to aggregate a set of  objects.
 /// 
-/// Set of strategies upon which to aggregate.
+/// The set of strategies upon which to aggregate.
+[Experimental("SKEXP0110")]
 public sealed class AggregatorTerminationStrategy(params TerminationStrategy[] strategies) : TerminationStrategy
 {
     private readonly TerminationStrategy[] _strategies = strategies;
 
     /// 
-    /// Logical operation for aggregation: All or Any (and/or). Default: All.
+    /// Gets the logical operation for aggregation.
     /// 
+    /// 
+    /// The logical operation for aggregation, which can be  or . The default is .
+    /// 
     public AggregateTerminationCondition Condition { get; init; } = AggregateTerminationCondition.All;
 
     /// 
diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
index fcfea6e1fa93..4fa3c001e2c8 100644
--- a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
@@ -1,19 +1,22 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.Agents.Internal;
+using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
 /// Determines agent selection based on the evaluation of a .
 /// 
-/// A  used for selection criteria
+/// A  used for selection criteria.
 /// A kernel instance with services for function execution.
+[Experimental("SKEXP0110")]
 public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel kernel) : SelectionStrategy
 {
     /// 
@@ -27,49 +30,49 @@ public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel ker
     public const string DefaultHistoryVariableName = "_history_";
 
     /// 
-    /// The  key associated with the list of agent names when
+    /// Gets the  key associated with the list of agent names when
     /// invoking .
     /// 
     public string AgentsVariableName { get; init; } = DefaultAgentsVariableName;
 
     /// 
-    /// The  key associated with the chat history when
+    /// Gets the  key associated with the chat history when
     /// invoking .
     /// 
     public string HistoryVariableName { get; init; } = DefaultHistoryVariableName;
 
     /// 
-    /// Optional arguments used when invoking .
+    /// Gets the optional arguments used when invoking .
     /// 
     public KernelArguments? Arguments { get; init; }
 
     /// 
-    /// The  used when invoking .
+    /// Gets the  used when invoking .
     /// 
     public Kernel Kernel => kernel;
 
     /// 
-    /// The  invoked as selection criteria.
+    /// Gets the  invoked as selection criteria.
     /// 
     public KernelFunction Function { get; } = function;
 
     /// 
-    /// Only include agent name in history when invoking .
+    /// Gets a value that indicates whether only the agent name is included in the history when invoking .
     /// 
     public bool EvaluateNameOnly { get; init; }
 
     /// 
-    /// Optionally specify a  to reduce the history.
+    /// Gets an optional  to reduce the history.
     /// 
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
     /// 
-    /// When set, will use  in the event of a failure to select an agent.
+    /// Gets a value that indicates whether  is used in the event of a failure to select an agent.
     /// 
     public bool UseInitialAgentAsFallback { get; init; }
 
     /// 
-    /// A callback responsible for translating the 
+    /// Gets a callback responsible for translating the 
     /// to the termination criteria.
     /// 
     public Func ResultParser { get; init; } = (result) => result.GetValue() ?? string.Empty;
diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
index 26ad20e747dc..707aa46af466 100644
--- a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
@@ -1,19 +1,22 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.Agents.Internal;
+using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
 /// Signals termination based on the evaluation of a .
 /// 
-/// A  used for termination criteria
+/// A  used for termination criteria.
 /// A kernel instance with services for function execution.
+[Experimental("SKEXP0110")]
 public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel kernel) : TerminationStrategy
 {
     /// 
@@ -27,45 +30,45 @@ public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel k
     public const string DefaultHistoryVariableName = "_history_";
 
     /// 
-    /// The  key associated with the agent name when
+    /// Gets the  key associated with the agent name when
     /// invoking .
     /// 
     public string AgentVariableName { get; init; } = DefaultAgentVariableName;
 
     /// 
-    /// The  key associated with the chat history when
+    /// Gets the  key associated with the chat history when
     /// invoking .
     /// 
     public string HistoryVariableName { get; init; } = DefaultHistoryVariableName;
 
     /// 
-    /// Optional arguments used when invoking .
+    /// Gets optional arguments used when invoking .
     /// 
     public KernelArguments? Arguments { get; init; }
 
     /// 
-    /// The  used when invoking .
+    /// Gets the  used when invoking .
     /// 
     public Kernel Kernel => kernel;
 
     /// 
-    /// The  invoked as termination criteria.
+    /// Gets the  invoked as termination criteria.
     /// 
     public KernelFunction Function { get; } = function;
 
     /// 
-    /// Only include agent name in history when invoking .
+    /// Gets a value that indicates whether only the agent name is included in the history when invoking .
     /// 
     public bool EvaluateNameOnly { get; init; }
 
     /// 
-    /// A callback responsible for translating the 
+    /// Gets a callback responsible for translating the 
     /// to the termination criteria.
     /// 
     public Func ResultParser { get; init; } = (_) => true;
 
     /// 
-    /// Optionally specify a  to reduce the history.
+    /// Gets an optional  to reduce the history.
     /// 
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
diff --git a/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
index 2745a325ee88..0b84c09b8c79 100644
--- a/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Text.RegularExpressions;
 using System.Threading;
@@ -11,6 +12,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// Signals termination when the most recent message matches against the defined regular expressions
 /// for the specified agent (if provided).
 /// 
+[Experimental("SKEXP0110")]
 public sealed class RegexTerminationStrategy : TerminationStrategy
 {
     private readonly Regex[] _expressions;
diff --git a/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
index 1ba5fb502649..e9bca243ec9c 100644
--- a/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
@@ -1,5 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -8,35 +9,36 @@
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Base strategy class for selecting the next agent for a .
+/// Provides a base strategy class for selecting the next agent for an .
 /// 
+[Experimental("SKEXP0110")]
 public abstract class SelectionStrategy
 {
     /// 
-    /// Flag indicating if an agent has been selected (first time).
+    /// Gets a value that indicates if an agent has been selected (first time).
     /// 
     protected bool HasSelected { get; private set; }
 
     /// 
-    /// An optional agent for initial selection.
+    /// Gets or sets an optional agent for initial selection.
     /// 
     /// 
-    /// Useful to avoid latency in initial agent selection.
+    /// Setting this property is useful to avoid latency in initial agent selection.
     /// 
     public Agent? InitialAgent { get; set; }
 
     /// 
-    /// The  associated with the .
+    /// Gets the  associated with the .
     /// 
     protected internal ILogger Logger { get; internal set; } = NullLogger.Instance;
 
     /// 
-    /// Determine which agent goes next.
+    /// Determines which agent goes next.
     /// 
     /// The agents participating in chat.
     /// The chat history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The agent who shall take the next turn.
+    /// The agent that will take the next turn.
     public async Task NextAsync(IReadOnlyList agents, IReadOnlyList history, CancellationToken cancellationToken = default)
     {
         if (agents.Count == 0 && this.InitialAgent == null)
@@ -55,11 +57,11 @@ public async Task NextAsync(IReadOnlyList agents, IReadOnlyList
-    /// Determine which agent goes next.
+    /// Determines which agent goes next.
     /// 
     /// The agents participating in chat.
     /// The chat history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The agent who shall take the next turn.
+    /// The agent that will take the next turn.
     protected abstract Task SelectAgentAsync(IReadOnlyList agents, IReadOnlyList history, CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
index 4983d0752414..9f71372f38d4 100644
--- a/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
@@ -1,20 +1,23 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Round-robin turn-taking strategy.  Agent order is based on the order
+/// Represents a round-robin turn-taking strategy. Agent order is based on the order
 /// in which they joined .
 /// 
+[Experimental("SKEXP0110")]
 public sealed class SequentialSelectionStrategy : SelectionStrategy
 {
     private int _index = -1;
 
     /// 
-    /// Reset selection to initial/first agent. Agent order is based on the order
+    /// Resets the selection to the initial (first) agent. Agent order is based on the order
     /// in which they joined .
     /// 
     public void Reset() => this._index = -1;
@@ -42,7 +45,7 @@ protected override Task SelectAgentAsync(IReadOnlyList agents, IRe
 
         Agent agent = agents[this._index];
 
-        this.Logger.LogSequentialSelectionStrategySelectedAgent(nameof(NextAsync), this._index, agents.Count, agent.Id);
+        this.Logger.LogSequentialSelectionStrategySelectedAgent(nameof(NextAsync), this._index, agents.Count, agent.Id, agent.GetDisplayName());
 
         return Task.FromResult(agent);
 
diff --git a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
index b50f6bd96d11..4a579a44fdf9 100644
--- a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
@@ -1,73 +1,81 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
 using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Base strategy class for defining termination criteria for a .
+/// Provides a base strategy class for defining termination criteria for an .
 /// 
+[Experimental("SKEXP0110")]
 public abstract class TerminationStrategy
 {
     /// 
-    /// Restrict number of turns to a reasonable number (99).
+    /// Specifies a reasonable limit on the number of turns.
     /// 
     public const int DefaultMaximumIterations = 99;
 
     /// 
-    /// The maximum number of agent interactions for a given chat invocation.
-    /// Defaults to: .
+    /// Gets or sets the maximum number of agent interactions for a given chat invocation.
     /// 
+    /// 
+    /// The default is .
+    /// 
     public int MaximumIterations { get; set; } = DefaultMaximumIterations;
 
     /// 
-    /// Set to have automatically clear  if caller
+    /// Gets or sets a value that indicates whether 
+    /// is automatically cleared if the caller
     /// proceeds with invocation subsequent to achieving termination criteria.
     /// 
     public bool AutomaticReset { get; set; }
 
     /// 
-    /// Set of agents for which this strategy is applicable.  If not set,
-    /// any agent is evaluated.
+    /// Gets or sets the set of agents for which this strategy is applicable.
     /// 
+    /// 
+    /// The default value is that any agent is evaluated.
+    /// 
     public IReadOnlyList? Agents { get; set; }
 
     /// 
-    /// The  associated with the .
+    /// Gets the  associated with the .
     /// 
     protected internal ILogger Logger { get; internal set; } = NullLogger.Instance;
 
     /// 
-    /// Called to evaluate termination once  is evaluated.
+    /// Evaluates termination once  is evaluated.
     /// 
     protected abstract Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken);
 
     /// 
-    /// Evaluate the input message and determine if the chat has met its completion criteria.
+    /// Evaluates the input message and determines if the chat has met its completion criteria.
     /// 
     /// The agent actively interacting with the chat.
-    /// The most recent message
+    /// The most recent message.
     /// The  to monitor for cancellation requests. The default is .
-    /// True to terminate chat loop.
+    ///  if the chat loop should be terminated.
     public async Task ShouldTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken = default)
     {
-        this.Logger.LogTerminationStrategyEvaluatingCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id);
+        this.Logger.LogTerminationStrategyEvaluatingCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
         // `Agents` must contain `agent`, if `Agents` not empty.
         if ((this.Agents?.Count ?? 0) > 0 && !this.Agents!.Any(a => a.Id == agent.Id))
         {
-            this.Logger.LogTerminationStrategyAgentOutOfScope(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id);
+            this.Logger.LogTerminationStrategyAgentOutOfScope(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
 
             return false;
         }
 
         bool shouldTerminate = await this.ShouldAgentTerminateAsync(agent, history, cancellationToken).ConfigureAwait(false);
 
-        this.Logger.LogTerminationStrategyEvaluatedCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, shouldTerminate);
+        this.Logger.LogTerminationStrategyEvaluatedCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), shouldTerminate);
 
         return shouldTerminate;
     }
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index 770153bbfb1e..ed3f1ce3d2c6 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -1,21 +1,26 @@
 // Copyright (c) Microsoft. All rights reserved.
+
+using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Runtime.CompilerServices;
 using System.Text;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Diagnostics;
 using Microsoft.SemanticKernel.Services;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// A  specialization based on .
+/// Represents a  specialization based on .
 /// 
 /// 
-/// NOTE: Enable  for agent plugins.
-/// ()
+/// NOTE: Enable  for agent plugins
+/// ().
 /// 
 public sealed class ChatCompletionAgent : ChatHistoryKernelAgent
 {
@@ -28,23 +33,105 @@ public ChatCompletionAgent() { }
     /// Initializes a new instance of the  class from
     /// a .
     /// 
-    /// Prompt template configuration
-    /// An optional factory to produce the  for the agent
-    /// 
-    /// When 'templateFactory' parameter is not provided, the default  is used.
-    /// 
+    /// The prompt template configuration.
+    /// The prompt template factory used to produce the  for the agent.
     public ChatCompletionAgent(
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory? templateFactory = null)
+        IPromptTemplateFactory templateFactory)
     {
         this.Name = templateConfig.Name;
         this.Description = templateConfig.Description;
         this.Instructions = templateConfig.Template;
-        this.Template = templateFactory?.Create(templateConfig);
+        this.Arguments = new(templateConfig.ExecutionSettings.Values);
+        this.Template = templateFactory.Create(templateConfig);
+    }
+
+    /// 
+    /// Gets the role used for agent instructions.  Defaults to "system".
+    /// 
+    /// 
+    /// Certain versions of "O*" series (deep reasoning) models require the instructions
+    /// to be provided as "developer" role.  Other versions support neither role and
+    /// an agent targeting such a model cannot provide instructions.  Agent functionality
+    /// will be dictated entirely by the provided plugins.
+    /// 
+    public AuthorRole InstructionsRole { get; init; } = AuthorRole.System;
+
+    /// 
+    public override IAsyncEnumerable InvokeAsync(
+        ChatHistory history,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        CancellationToken cancellationToken = default)
+    {
+        string agentName = this.GetDisplayName();
+
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, agentName, this.Description),
+            () => this.InternalInvokeAsync(agentName, history, arguments, kernel, cancellationToken),
+            cancellationToken);
     }
 
     /// 
-    public override async IAsyncEnumerable InvokeAsync(
+    public override IAsyncEnumerable InvokeStreamingAsync(
+        ChatHistory history,
+        KernelArguments? arguments = null,
+        Kernel? kernel = null,
+        CancellationToken cancellationToken = default)
+    {
+        string agentName = this.GetDisplayName();
+
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, agentName, this.Description),
+            () => this.InternalInvokeStreamingAsync(agentName, history, arguments, kernel, cancellationToken),
+            cancellationToken);
+    }
+
+    /// 
+    [Experimental("SKEXP0110")]
+    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
+    {
+        ChatHistory history =
+            JsonSerializer.Deserialize(channelState) ??
+            throw new KernelException("Unable to restore channel: invalid state.");
+        return Task.FromResult(new ChatHistoryChannel(history));
+    }
+
+    internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
+    {
+        (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) =
+            kernel.ServiceSelector.SelectAIService(
+                kernel,
+                arguments?.ExecutionSettings,
+                arguments ?? []);
+
+        return (chatCompletionService, executionSettings);
+    }
+
+    #region private
+
+    private async Task SetupAgentChatHistoryAsync(
+        IReadOnlyList history,
+        KernelArguments? arguments,
+        Kernel kernel,
+        CancellationToken cancellationToken)
+    {
+        ChatHistory chat = [];
+
+        string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+        if (!string.IsNullOrWhiteSpace(instructions))
+        {
+            chat.Add(new ChatMessageContent(this.InstructionsRole, instructions) { AuthorName = this.Name });
+        }
+
+        chat.AddRange(history);
+
+        return chat;
+    }
+
+    private async IAsyncEnumerable InternalInvokeAsync(
+        string agentName,
         ChatHistory history,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
@@ -59,7 +146,9 @@ public override async IAsyncEnumerable InvokeAsync(
 
         int messageCount = chat.Count;
 
-        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
+        Type serviceType = chatCompletionService.GetType();
+
+        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
 
         IReadOnlyList messages =
             await chatCompletionService.GetChatMessageContentsAsync(
@@ -68,7 +157,7 @@ await chatCompletionService.GetChatMessageContentsAsync(
                 kernel,
                 cancellationToken).ConfigureAwait(false);
 
-        this.Logger.LogAgentChatServiceInvokedAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType(), messages.Count);
+        this.Logger.LogAgentChatServiceInvokedAgent(nameof(InvokeAsync), this.Id, agentName, serviceType, messages.Count);
 
         // Capture mutated messages related function calling / tools
         for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++)
@@ -88,8 +177,8 @@ await chatCompletionService.GetChatMessageContentsAsync(
         }
     }
 
-    /// 
-    public override async IAsyncEnumerable InvokeStreamingAsync(
+    private async IAsyncEnumerable InternalInvokeStreamingAsync(
+        string agentName,
         ChatHistory history,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
@@ -104,7 +193,9 @@ public override async IAsyncEnumerable InvokeStream
 
         int messageCount = chat.Count;
 
-        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
+        Type serviceType = chatCompletionService.GetType();
+
+        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
 
         IAsyncEnumerable messages =
             chatCompletionService.GetStreamingChatMessageContentsAsync(
@@ -113,7 +204,7 @@ public override async IAsyncEnumerable InvokeStream
                 kernel,
                 cancellationToken);
 
-        this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
+        this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
 
         AuthorRole? role = null;
         StringBuilder builder = new();
@@ -145,45 +236,5 @@ public override async IAsyncEnumerable InvokeStream
         }
     }
 
-    /// 
-    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
-    {
-        ChatHistory history =
-            JsonSerializer.Deserialize(channelState) ??
-            throw new KernelException("Unable to restore channel: invalid state.");
-        return Task.FromResult(new ChatHistoryChannel(history));
-    }
-
-    internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
-    {
-        // Need to provide a KernelFunction to the service selector as a container for the execution-settings.
-        KernelFunction nullPrompt = KernelFunctionFactory.CreateFromPrompt("placeholder", arguments?.ExecutionSettings?.Values);
-        (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) =
-            kernel.ServiceSelector.SelectAIService(
-                kernel,
-                nullPrompt,
-                arguments ?? []);
-
-        return (chatCompletionService, executionSettings);
-    }
-
-    private async Task SetupAgentChatHistoryAsync(
-        IReadOnlyList history,
-        KernelArguments? arguments,
-        Kernel kernel,
-        CancellationToken cancellationToken)
-    {
-        ChatHistory chat = [];
-
-        string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
-
-        if (!string.IsNullOrWhiteSpace(instructions))
-        {
-            chat.Add(new ChatMessageContent(AuthorRole.System, instructions) { AuthorName = this.Name });
-        }
-
-        chat.AddRange(history);
-
-        return chat;
-    }
+    #endregion
 }
diff --git a/dotnet/src/Agents/Core/ChatHistoryChannel.cs b/dotnet/src/Agents/Core/ChatHistoryChannel.cs
index 78345f084b3f..4b44a5cd9fec 100644
--- a/dotnet/src/Agents/Core/ChatHistoryChannel.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryChannel.cs
@@ -1,5 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -12,10 +14,20 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// A  specialization for that acts upon a .
+/// Represents an  specialization that acts upon a .
 /// 
+[Experimental("SKEXP0110")]
 internal sealed class ChatHistoryChannel : AgentChannel
 {
+    // Supported content types for  when
+    //  is empty.
+    private static readonly HashSet s_contentMap =
+        [
+            typeof(FunctionCallContent),
+            typeof(FunctionResultContent),
+            typeof(ImageContent),
+        ];
+
     private readonly ChatHistory _history;
 
     /// 
@@ -105,7 +117,11 @@ protected override async IAsyncEnumerable InvokeStr
     /// 
     protected override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
     {
-        this._history.AddRange(history);
+        // Only add messages with valid content or supported content-items.
+        this._history.AddRange(
+            history.Where(
+                m => !string.IsNullOrEmpty(m.Content) ||
+                m.Items.Where(i => s_contentMap.Contains(i.GetType())).Any()));
 
         return Task.CompletedTask;
     }
diff --git a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
index 0eee62920027..f3572a75d3c2 100644
--- a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
@@ -1,40 +1,42 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Globalization;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// A  specialization bound to a .
+/// Represents a  specialization bound to a .
 /// 
 /// 
-/// NOTE: Enable  for agent plugins.
-/// ()
+/// NOTE: Enable  for agent plugins
+/// ().
 /// 
 public abstract class ChatHistoryKernelAgent : KernelAgent
 {
     /// 
-    /// Optionally specify a  to reduce the history.
+    /// Gets an optional  to reduce the history.
     /// 
     /// 
-    /// This is automatically applied to the history before invoking the agent, only when using
-    /// an .  It must be explicitly applied via .
+    /// The reducer is automatically applied to the history before invoking the agent, only when using
+    /// an . It must be explicitly applied via .
     /// 
+    [Experimental("SKEXP0110")]
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
     /// 
-    /// Invoke the assistant to respond to the provided history.
+    /// Invokes the assistant to respond to the provided history.
     /// 
     /// The conversation history.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of response messages.
+    /// An asynchronous enumeration of response messages.
     public abstract IAsyncEnumerable InvokeAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
@@ -42,13 +44,13 @@ public abstract IAsyncEnumerable InvokeAsync(
         CancellationToken cancellationToken = default);
 
     /// 
-    /// Invoke the assistant to respond to the provided history with streaming response.
+    /// Invokes the assistant to respond to the provided history with streaming response.
     /// 
     /// The conversation history.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of response messages.
+    /// An asynchronous enumeration of response messages.
     public abstract IAsyncEnumerable InvokeStreamingAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
@@ -56,15 +58,17 @@ public abstract IAsyncEnumerable InvokeStreamingAsy
         CancellationToken cancellationToken = default);
 
     /// 
-    /// Reduce the provided history
+    /// Reduces the provided history.
     /// 
-    /// The source history
+    /// The source history.
     /// The  to monitor for cancellation requests. The default is .
-    /// True if reduction has occurred.
+    ///  if reduction occurred.
+    [Experimental("SKEXP0110")]
     public Task ReduceAsync(ChatHistory history, CancellationToken cancellationToken = default) =>
-        history.ReduceAsync(this.HistoryReducer, cancellationToken);
+        history.ReduceInPlaceAsync(this.HistoryReducer, cancellationToken);
 
     /// 
+    [Experimental("SKEXP0110")]
     protected sealed override IEnumerable GetChannelKeys()
     {
         yield return typeof(ChatHistoryChannel).FullName!;
@@ -82,12 +86,13 @@ protected sealed override IEnumerable GetChannelKeys()
     }
 
     /// 
+    [Experimental("SKEXP0110")]
     protected sealed override Task CreateChannelAsync(CancellationToken cancellationToken)
     {
         ChatHistoryChannel channel =
             new()
             {
-                Logger = this.LoggerFactory.CreateLogger()
+                Logger = this.ActiveLoggerFactory.CreateLogger()
             };
 
         return Task.FromResult(channel);
diff --git a/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs b/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs
deleted file mode 100644
index 884fbcf42bc1..000000000000
--- a/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Microsoft.SemanticKernel.Agents.History;
-
-/// 
-/// Defines a contract for a reducing chat history.
-/// 
-public interface IChatHistoryReducer
-{
-    /// 
-    /// Each reducer shall override equality evaluation so that different reducers
-    /// of the same configuration can be evaluated for equivalency.
-    /// 
-    bool Equals(object? obj);
-
-    /// 
-    /// Each reducer shall implement custom hash-code generation so that different reducers
-    /// of the same configuration can be evaluated for equivalency.
-    /// 
-    int GetHashCode();
-
-    /// 
-    /// Optionally reduces the chat history.
-    /// 
-    /// The source history (which may have been previously reduced)
-    /// The  to monitor for cancellation requests. The default is .
-    /// The reduced history, or 'null' if no reduction has occurred
-    Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default);
-}
diff --git a/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
index 8d970988466b..ecedad3c04af 100644
--- a/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
+++ b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
@@ -24,7 +24,9 @@ internal sealed class ChatMessageForPrompt(ChatMessageContent message)
     /// The referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+#pragma warning disable SKEXP0001
     public string? Name => message.AuthorName;
+#pragma warning restore SKEXP0001
 
     /// 
     /// The referenced  property.
diff --git a/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs b/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
index 03b9d27f1c8d..59835f576c4f 100644
--- a/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
@@ -4,6 +4,7 @@
 using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
@@ -17,6 +18,7 @@ namespace Microsoft.SemanticKernel.Agents;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class AgentGroupChatLogMessages
 {
     /// 
@@ -25,12 +27,13 @@ internal static partial class AgentGroupChatLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Invoking chat: {AgentType}: {AgentId}")]
+        Message = "[{MethodName}] Invoking chat: {AgentType}: {AgentId}/{AgentName}")]
     public static partial void LogAgentGroupChatInvokingAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  invoking agents (started).
@@ -40,6 +43,7 @@ public static partial void LogAgentGroupChatInvokingAgent(
             logLevel: LogLevel.Debug,
             eventId: 0,
             "[{MethodName}] Invoking chat: {Agents}");
+
     public static void LogAgentGroupChatInvokingAgents(
         this ILogger logger,
         string methodName,
@@ -47,7 +51,9 @@ public static void LogAgentGroupChatInvokingAgents(
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
-            s_logAgentGroupChatInvokingAgents(logger, methodName, string.Join(", ", agents.Select(a => $"{a.GetType()}:{a.Id}")), null);
+            var agentsMessage = string.Join(", ", agents.Select(a => $"{a.GetType()}:{a.Id}/{a.GetDisplayName()}"));
+
+            s_logAgentGroupChatInvokingAgents(logger, methodName, agentsMessage, null);
         }
     }
 
@@ -81,12 +87,13 @@ public static partial void LogAgentGroupChatNoAgentSelected(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent selected {AgentType}: {AgentId} by {StrategyType}")]
+        Message = "[{MethodName}] Agent selected {AgentType}: {AgentId}/{AgentName} by {StrategyType}")]
     public static partial void LogAgentGroupChatSelectedAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
+        string agentName,
         Type strategyType);
 
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
index 777ec8806ec7..de2e18d63d8c 100644
--- a/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
@@ -14,6 +14,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class AggregatorTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs b/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
index 038c19359cc8..e34a6d102b8f 100644
--- a/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
@@ -23,11 +23,12 @@ internal static partial class ChatCompletionAgentLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Agent #{AgentId} Invoking service {ServiceType}.")]
+        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoking service {ServiceType}.")]
     public static partial void LogAgentChatServiceInvokingAgent(
         this ILogger logger,
         string methodName,
         string agentId,
+        string agentName,
         Type serviceType);
 
     /// 
@@ -36,11 +37,12 @@ public static partial void LogAgentChatServiceInvokingAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent #{AgentId} Invoked service {ServiceType} with message count: {MessageCount}.")]
+        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoked service {ServiceType} with message count: {MessageCount}.")]
     public static partial void LogAgentChatServiceInvokedAgent(
         this ILogger logger,
         string methodName,
         string agentId,
+        string agentName,
         Type serviceType,
         int messageCount);
 
@@ -50,10 +52,11 @@ public static partial void LogAgentChatServiceInvokedAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent #{AgentId} Invoked service {ServiceType}.")]
+        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoked service {ServiceType}.")]
     public static partial void LogAgentChatServiceInvokedStreamingAgent(
         this ILogger logger,
         string methodName,
         string agentId,
+        string agentName,
         Type serviceType);
 }
diff --git a/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
index c846f5e2534e..0da707a0c096 100644
--- a/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
@@ -15,6 +15,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class KernelFunctionStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
index 61a4dea167b5..bd110c54fc8c 100644
--- a/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
@@ -15,6 +15,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class KernelFunctionTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
index a748158252b7..0f85053bb570 100644
--- a/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
@@ -15,6 +15,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class RegExTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
index e201dddcd9c0..6b32b574dd69 100644
--- a/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
@@ -14,6 +14,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class SequentialSelectionStrategyLogMessages
 {
     /// 
@@ -22,11 +23,12 @@ internal static partial class SequentialSelectionStrategyLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Selected agent ({AgentIndex} / {AgentCount}): {AgentId}")]
+        Message = "[{MethodName}] Selected agent ({AgentIndex} / {AgentCount}): {AgentId}/{AgentName}")]
     public static partial void LogSequentialSelectionStrategySelectedAgent(
         this ILogger logger,
         string methodName,
         int agentIndex,
         int agentCount,
-        string agentId);
+        string agentId,
+        string agentName);
 }
diff --git a/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
index adbf5ad7b689..365c262c819f 100644
--- a/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
@@ -15,6 +15,7 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
+[Experimental("SKEXP0110")]
 internal static partial class TerminationStrategyLogMessages
 {
     /// 
@@ -23,12 +24,13 @@ internal static partial class TerminationStrategyLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Evaluating termination for agent {AgentType}: {AgentId}.")]
+        Message = "[{MethodName}] Evaluating termination for agent {AgentType}: {AgentId}/{AgentName}.")]
     public static partial void LogTerminationStrategyEvaluatingCriteria(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  agent out of scope.
@@ -36,12 +38,13 @@ public static partial void LogTerminationStrategyEvaluatingCriteria(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] {AgentType} agent out of scope for termination: {AgentId}.")]
+        Message = "[{MethodName}] {AgentType} agent out of scope for termination: {AgentId}/{AgentName}.")]
     public static partial void LogTerminationStrategyAgentOutOfScope(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId);
+        string agentId,
+        string agentName);
 
     /// 
     /// Logs  evaluated criteria (complete).
@@ -49,11 +52,12 @@ public static partial void LogTerminationStrategyAgentOutOfScope(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Evaluated termination for agent {AgentType}: {AgentId} - {TerminationResult}")]
+        Message = "[{MethodName}] Evaluated termination for agent {AgentType}: {AgentId}/{AgentName} - {TerminationResult}")]
     public static partial void LogTerminationStrategyEvaluatedCriteria(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
+        string agentName,
         bool terminationResult);
 }
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index 71747e21ffad..315389afd386 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -5,9 +5,9 @@
     Microsoft.SemanticKernel.Agents.OpenAI
     Microsoft.SemanticKernel.Agents.OpenAI
     net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110
+    $(NoWarn);SKEXP0110;SKEXP0001;OPENAI001;NU5104
     false
-    alpha
+    preview
   
 
   
@@ -15,11 +15,12 @@
   
     
     Semantic Kernel Agents - OpenAI
-    Defines core a concrete Agent based on the OpenAI Assistant API.
+    Defines a concrete Agent based on the OpenAI Assistant API.
   
 
   
     
+    
     
     
     
@@ -29,6 +30,8 @@
     
   
 
+  
+
   
     
   
diff --git a/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
new file mode 100644
index 000000000000..706186df7e68
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
@@ -0,0 +1,172 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+/// 
+/// Convenience extensions for .
+/// 
+public static class AssistantClientExtensions
+{
+    /// 
+    /// Creates an assistant asynchronously with the specified options.
+    /// 
+    /// The assistant client.
+    /// The model identifier.
+    /// The name of the assistant.
+    /// The description of the assistant.
+    /// The instructions for the assistant.
+    /// Whether to enable the code interpreter tool.
+    /// The file IDs for the code interpreter tool.
+    /// Whether to enable the file search tool.
+    /// The vector store identifier.
+    /// The temperature setting for the assistant.
+    /// The nucleus sampling factor for the assistant.
+    /// The response format for the assistant.
+    /// The metadata for the assistant.
+    /// The cancellation token.
+    /// A task that represents the asynchronous operation. The task result contains the created assistant.
+    public static async Task CreateAssistantAsync(
+        this AssistantClient client,
+        string modelId,
+        string? name = null,
+        string? description = null,
+        string? instructions = null,
+        bool enableCodeInterpreter = false,
+        IReadOnlyList? codeInterpreterFileIds = null,
+        bool enableFileSearch = false,
+        string? vectorStoreId = null,
+        float? temperature = null,
+        float? topP = null,
+        AssistantResponseFormat? responseFormat = null,
+        IReadOnlyDictionary? metadata = null,
+        CancellationToken cancellationToken = default)
+    {
+        AssistantCreationOptions options =
+            new()
+            {
+                Name = name,
+                Description = description,
+                Instructions = instructions,
+                Temperature = temperature,
+                NucleusSamplingFactor = topP,
+                ResponseFormat = responseFormat,
+            };
+
+        if (metadata != null)
+        {
+            foreach (KeyValuePair item in metadata)
+            {
+                options.Metadata[item.Key] = item.Value;
+            }
+        }
+
+        if (enableCodeInterpreter || (codeInterpreterFileIds?.Count ?? 0) > 0)
+        {
+            options.Tools.Add(ToolDefinition.CreateCodeInterpreter());
+        }
+
+        if (enableFileSearch || !string.IsNullOrEmpty(vectorStoreId))
+        {
+            options.Tools.Add(ToolDefinition.CreateFileSearch());
+        }
+
+        options.ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds);
+
+        Assistant assistant = await client.CreateAssistantAsync(modelId, options, cancellationToken).ConfigureAwait(false);
+
+        return assistant;
+    }
+
+    /// 
+    /// Creates an assistant from a template asynchronously with the specified options.
+    /// 
+    /// The assistant client.
+    /// The model identifier.
+    /// The prompt template configuration.
+    /// Whether to enable the code interpreter tool.
+    /// The file IDs for the code interpreter tool.
+    /// Whether to enable the file search tool.
+    /// The vector store identifier.
+    /// The temperature setting for the assistant.
+    /// The nucleus sampling factor for the assistant.
+    /// The response format for the assistant.
+    /// The metadata for the assistant.
+    /// The cancellation token.
+    /// A task that represents the asynchronous operation. The task result contains the created assistant.
+    public static Task CreateAssistantFromTemplateAsync(
+        this AssistantClient client,
+        string modelId,
+        PromptTemplateConfig config,
+        bool enableCodeInterpreter = false,
+        IReadOnlyList? codeInterpreterFileIds = null,
+        bool enableFileSearch = false,
+        string? vectorStoreId = null,
+        float? temperature = null,
+        float? topP = null,
+        AssistantResponseFormat? responseFormat = null,
+        IReadOnlyDictionary? metadata = null,
+        CancellationToken cancellationToken = default)
+    {
+        return
+            client.CreateAssistantAsync(
+                modelId,
+                config.Name,
+                config.Description,
+                config.Template,
+                enableCodeInterpreter,
+                codeInterpreterFileIds,
+                enableFileSearch,
+                vectorStoreId,
+                temperature,
+                topP,
+                responseFormat,
+                metadata,
+                cancellationToken);
+    }
+
+    /// 
+    /// Creates a thread asynchronously with the specified options.
+    /// 
+    /// The assistant client.
+    /// The initial messages for the thread.
+    /// The file IDs for the code interpreter tool.
+    /// The vector store identifier.
+    /// The metadata for the thread.
+    /// The cancellation token.
+    /// A task that represents the asynchronous operation. The task result contains the thread ID.
+    public static async Task CreateThreadAsync(
+        this AssistantClient client,
+        IEnumerable? messages = null,
+        IReadOnlyList? codeInterpreterFileIds = null,
+        string? vectorStoreId = null,
+        IReadOnlyDictionary? metadata = null,
+        CancellationToken cancellationToken = default)
+    {
+        ThreadCreationOptions options = new()
+        {
+            ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds)
+        };
+
+        if (messages != null)
+        {
+            options.InitialMessages.AddRange(messages.ToThreadInitializationMessages());
+        }
+
+        if (metadata != null)
+        {
+            foreach (KeyValuePair item in metadata)
+            {
+                options.Metadata[item.Key] = item.Value;
+            }
+        }
+
+        AssistantThread thread = await client.CreateThreadAsync(options, cancellationToken).ConfigureAwait(false);
+
+        return thread.Id;
+    }
+}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
new file mode 100644
index 000000000000..5cd0055d8456
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
@@ -0,0 +1,36 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+/// 
+/// Convenience extensions for converting .
+/// 
+public static class ChatContentMessageExtensions
+{
+    /// 
+    /// Converts a  instance to a .
+    /// 
+    /// The chat message content to convert.
+    /// A  instance.
+    public static ThreadInitializationMessage ToThreadInitializationMessage(this ChatMessageContent message)
+    {
+        return
+            new ThreadInitializationMessage(
+                role: message.Role.ToMessageRole(),
+                content: AssistantMessageFactory.GetMessageContents(message));
+    }
+
+    /// 
+    /// Converts a collection of  instances to a collection of  instances.
+    /// 
+    /// The collection of chat message contents to convert.
+    /// A collection of  instances.
+    public static IEnumerable ToThreadInitializationMessages(this IEnumerable messages)
+    {
+        return messages.Select(message => message.ToThreadInitializationMessage());
+    }
+}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
deleted file mode 100644
index d1e7e0059494..000000000000
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-internal static class KernelExtensions
-{
-    /// 
-    /// Retrieve a kernel function based on the tool name.
-    /// 
-    public static KernelFunction GetKernelFunction(this Kernel kernel, string functionName, char delimiter)
-    {
-        string[] nameParts = functionName.Split(delimiter);
-        return nameParts.Length switch
-        {
-            2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]),
-            _ => throw new KernelException($"Agent Failure - Unknown tool: {functionName}"),
-        };
-    }
-}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index 1a4b6fc2fbf6..d15dec19d6e0 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -1,12 +1,13 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
-using System.Collections.Generic;
-using System.Linq;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
-internal static class KernelFunctionExtensions
+/// 
+/// Extensions for  to support OpenAI specific operations.
+/// 
+public static class KernelFunctionExtensions
 {
     /// 
     /// Convert  to an OpenAI tool model.
@@ -14,87 +15,22 @@ internal static class KernelFunctionExtensions
     /// The source function
     /// The plugin name
     /// An OpenAI tool definition
-    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
+    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string? pluginName = null)
     {
-        var metadata = function.Metadata;
-        if (metadata.Parameters.Count > 0)
+        if (function.Metadata.Parameters.Count > 0)
         {
-            var required = new List(metadata.Parameters.Count);
-            var parameters =
-                metadata.Parameters.ToDictionary(
-                    p => p.Name,
-                    p =>
-                    {
-                        if (p.IsRequired)
-                        {
-                            required.Add(p.Name);
-                        }
+            BinaryData parameterData = function.Metadata.CreateParameterSpec();
 
-                        return
-                            new
-                            {
-                                type = ConvertType(p.ParameterType),
-                                description = p.Description,
-                            };
-                    });
-
-            var spec =
-                new
-                {
-                    type = "object",
-                    properties = parameters,
-                    required,
-                };
-
-            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
             {
                 Description = function.Description,
-                Parameters = BinaryData.FromObjectAsJson(spec)
+                Parameters = parameterData,
             };
         }
 
-        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
         {
             Description = function.Description
         };
     }
-
-    private static string ConvertType(Type? type)
-    {
-        if (type is null || type == typeof(string))
-        {
-            return "string";
-        }
-
-        if (type == typeof(bool))
-        {
-            return "boolean";
-        }
-
-        if (type.IsEnum)
-        {
-            return "enum";
-        }
-
-        if (type.IsArray)
-        {
-            return "array";
-        }
-
-        if (type == typeof(DateTime) || type == typeof(DateTimeOffset))
-        {
-            return "date-time";
-        }
-
-        return Type.GetTypeCode(type) switch
-        {
-            TypeCode.SByte or TypeCode.Byte or
-            TypeCode.Int16 or TypeCode.UInt16 or
-            TypeCode.Int32 or TypeCode.UInt32 or
-            TypeCode.Int64 or TypeCode.UInt64 or
-            TypeCode.Single or TypeCode.Double or TypeCode.Decimal => "number",
-
-            _ => "object",
-        };
-    }
 }
diff --git a/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
new file mode 100644
index 000000000000..7e63a32673f2
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
@@ -0,0 +1,110 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using OpenAI;
+using OpenAI.Assistants;
+using OpenAI.Files;
+using OpenAI.VectorStores;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+/// 
+/// Convenience extensions for .
+/// 
+public static class OpenAIClientExtensions
+{
+    /// 
+    /// Creates a vector store asynchronously.
+    /// 
+    /// The OpenAI client instance.
+    /// The collection of file identifiers to include in the vector store.
+    /// Indicates whether to wait until the operation is completed.
+    /// The name of the vector store.
+    /// The expiration policy for the vector store.
+    /// The chunking strategy for the vector store.
+    /// The metadata associated with the vector store.
+    /// The cancellation token to monitor for cancellation requests.
+    /// The identifier of the created vector store.
+    public static async Task CreateVectorStoreAsync(
+        this OpenAIClient client,
+        IEnumerable fileIds,
+        bool waitUntilCompleted = true,
+        string? storeName = null,
+        VectorStoreExpirationPolicy? expirationPolicy = null,
+        FileChunkingStrategy? chunkingStrategy = null,
+        IReadOnlyDictionary? metadata = null,
+        CancellationToken cancellationToken = default)
+    {
+        VectorStoreCreationOptions options = new()
+        {
+            Name = storeName,
+            ChunkingStrategy = chunkingStrategy,
+            ExpirationPolicy = expirationPolicy,
+        };
+
+        options.FileIds.AddRange(fileIds);
+
+        if (metadata != null)
+        {
+            foreach (KeyValuePair item in metadata)
+            {
+                options.Metadata[item.Key] = item.Value;
+            }
+        }
+
+        VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
+        CreateVectorStoreOperation result = await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted, options, cancellationToken).ConfigureAwait(false);
+
+        return result.VectorStoreId;
+    }
+
+    /// 
+    /// Deletes a vector store asynchronously.
+    /// 
+    /// The OpenAI client instance.
+    /// The identifier of the vector store to delete.
+    /// The cancellation token to monitor for cancellation requests.
+    /// A boolean indicating whether the vector store was successfully deleted.
+    public static async Task DeleteVectorStoreAsync(this OpenAIClient client, string vectorStoreId, CancellationToken cancellationToken = default)
+    {
+        VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
+        VectorStoreDeletionResult result = await vectorStoreClient.DeleteVectorStoreAsync(vectorStoreId, cancellationToken).ConfigureAwait(false);
+        return result.Deleted;
+    }
+
+    /// 
+    /// Uploads a file to use with the assistant.
+    /// 
+    /// The OpenAI client instance.
+    /// The content to upload.
+    /// The name of the file.
+    /// The  to monitor for cancellation requests. The default is .
+    /// The file identifier.
+    /// 
+    /// Use the  directly for more advanced file operations.
+    /// 
+    public static async Task UploadAssistantFileAsync(this OpenAIClient client, Stream stream, string name, CancellationToken cancellationToken = default)
+    {
+        OpenAIFileClient fileClient = client.GetOpenAIFileClient();
+
+        OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
+
+        return fileInfo.Id;
+    }
+
+    /// 
+    /// Deletes a file asynchronously.
+    /// 
+    /// The OpenAI client instance.
+    /// The identifier of the file to delete.
+    /// The cancellation token to monitor for cancellation requests.
+    /// A boolean indicating whether the file was successfully deleted.
+    public static async Task DeleteFileAsync(this OpenAIClient client, string fileId, CancellationToken cancellationToken = default)
+    {
+        OpenAIFileClient fileClient = client.GetOpenAIFileClient();
+        FileDeletionResult result = await fileClient.DeleteFileAsync(fileId, cancellationToken).ConfigureAwait(false);
+        return result.Deleted;
+    }
+}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
deleted file mode 100644
index d017fb403f23..000000000000
--- a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.Core;
-using Azure.Core.Pipeline;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
-
-/// 
-/// Helper class to inject headers into Azure SDK HTTP pipeline
-/// 
-internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy
-{
-    public override void OnSendingRequest(HttpMessage message) => message.Request.Headers.Add(headerName, headerValue);
-}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
index 532a8433c37c..cdcfdadf93ef 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
@@ -5,6 +5,8 @@
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 
+#pragma warning disable CS0618 // Type or member is obsolete
+
 /// 
 /// Produce the  for an assistant according to the requested configuration.
 /// 
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
index cda0399f5e28..772c30630fe5 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
@@ -1,6 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using System.Collections.Generic;
-using Microsoft.SemanticKernel.ChatCompletion;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -8,62 +8,46 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 /// 
 /// Factory for creating  definition.
 /// 
-/// 
-/// Improves testability.
-/// 
 internal static class AssistantRunOptionsFactory
 {
-    /// 
-    /// Produce  by reconciling  and .
-    /// 
-    /// The assistant definition
-    /// Instructions to use for the run
-    /// The run specific options
-    public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, string? overrideInstructions, OpenAIAssistantInvocationOptions? invocationOptions)
+    public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions)
     {
-        int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount);
-
-        RunCreationOptions options =
+        RunCreationOptions runOptions =
             new()
             {
-                AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
+                AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions,
                 InstructionsOverride = overrideInstructions,
-                MaxOutputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
-                MaxInputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
-                ModelOverride = invocationOptions?.ModelName,
-                NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP),
-                AllowParallelToolCalls = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
-                ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null,
-                Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature),
-                TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null,
+                MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount,
+                MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount,
+                ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride,
+                NucleusSamplingFactor = invocationOptions?.NucleusSamplingFactor ?? defaultOptions?.NucleusSamplingFactor,
+                AllowParallelToolCalls = invocationOptions?.AllowParallelToolCalls ?? defaultOptions?.AllowParallelToolCalls,
+                ResponseFormat = invocationOptions?.ResponseFormat ?? defaultOptions?.ResponseFormat,
+                Temperature = invocationOptions?.Temperature ?? defaultOptions?.Temperature,
+                ToolConstraint = invocationOptions?.ToolConstraint ?? defaultOptions?.ToolConstraint,
+                TruncationStrategy = invocationOptions?.TruncationStrategy ?? defaultOptions?.TruncationStrategy,
             };
 
-        if (invocationOptions?.Metadata != null)
+        IList? additionalMessages = invocationOptions?.AdditionalMessages ?? defaultOptions?.AdditionalMessages;
+        if (additionalMessages != null)
         {
-            foreach (var metadata in invocationOptions.Metadata)
-            {
-                options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty);
-            }
+            runOptions.AdditionalMessages.AddRange(additionalMessages);
         }
 
-        if (invocationOptions?.AdditionalMessages != null)
+        PopulateMetadata(defaultOptions, runOptions);
+        PopulateMetadata(invocationOptions, runOptions);
+
+        return runOptions;
+    }
+
+    private static void PopulateMetadata(RunCreationOptions? sourceOptions, RunCreationOptions targetOptions)
+    {
+        if (sourceOptions?.Metadata != null)
         {
-            foreach (ChatMessageContent message in invocationOptions.AdditionalMessages)
+            foreach (KeyValuePair item in sourceOptions.Metadata)
             {
-                ThreadInitializationMessage threadMessage = new(
-                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
-                    content: AssistantMessageFactory.GetMessageContents(message));
-
-                options.AdditionalMessages.Add(threadMessage);
+                targetOptions.Metadata[item.Key] = item.Value ?? string.Empty;
             }
         }
-
-        return options;
     }
-
-    private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct
-        =>
-            setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ?
-                setting.Value :
-                agentSetting;
 }
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index 2e066b91869f..64749cedff69 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -2,6 +2,7 @@
 using System;
 using System.ClientModel;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Net;
 using System.Runtime.CompilerServices;
@@ -10,6 +11,7 @@
 using System.Threading.Tasks;
 using Azure;
 using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.FunctionCalling;
 using OpenAI.Assistants;
@@ -28,46 +30,6 @@ internal static class AssistantThreadActions
         RunStatus.Cancelling,
     ];
 
-    /// 
-    /// Create a new assistant thread.
-    /// 
-    /// The assistant client
-    /// The options for creating the thread
-    /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier
-    public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
-    {
-        ThreadCreationOptions createOptions =
-            new()
-            {
-                ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds),
-            };
-
-        if (options?.Messages is not null)
-        {
-            foreach (ChatMessageContent message in options.Messages)
-            {
-                ThreadInitializationMessage threadMessage = new(
-                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
-                    content: AssistantMessageFactory.GetMessageContents(message));
-
-                createOptions.InitialMessages.Add(threadMessage);
-            }
-        }
-
-        if (options?.Metadata != null)
-        {
-            foreach (KeyValuePair item in options.Metadata)
-            {
-                createOptions.Metadata[item.Key] = item.Value;
-            }
-        }
-
-        AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false);
-
-        return thread.Id;
-    }
-
     /// 
     /// Create a message in the specified thread.
     /// 
@@ -151,24 +113,26 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
         OpenAIAssistantAgent agent,
         AssistantClient client,
         string threadId,
-        OpenAIAssistantInvocationOptions? invocationOptions,
+        RunCreationOptions? invocationOptions,
         ILogger logger,
         Kernel kernel,
         KernelArguments? arguments,
         [EnumeratorCancellation] CancellationToken cancellationToken)
     {
-        if (agent.IsDeleted)
-        {
-            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
-        }
-
         logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
 
-        ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+        List tools = new(agent.Definition.Tools);
+
+        // Add unique functions from the Kernel which are not already present in the agent's tools
+        var functionToolNames = new HashSet(tools.OfType().Select(t => t.FunctionName));
+        var functionTools = kernel.Plugins
+            .SelectMany(kp => kp.Select(kf => kf.ToToolDefinition(kp.Name)))
+            .Where(tool => !functionToolNames.Contains(tool.FunctionName));
+        tools.AddRange(functionTools);
 
         string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
 
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
 
         options.ToolsOverride.AddRange(tools);
 
@@ -248,7 +212,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
             int messageCount = 0;
             foreach (RunStep completedStep in completedStepsToProcess)
             {
-                if (completedStep.Type == RunStepType.ToolCalls)
+                if (completedStep.Kind == RunStepKind.ToolCall)
                 {
                     foreach (RunStepToolCall toolCall in completedStep.Details.ToolCalls)
                     {
@@ -256,15 +220,15 @@ await functionProcessor.InvokeFunctionCallsAsync(
                         ChatMessageContent? content = null;
 
                         // Process code-interpreter content
-                        if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter)
+                        if (toolCall.Kind == RunStepToolCallKind.CodeInterpreter)
                         {
                             content = GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput, completedStep);
                             isVisible = true;
                         }
                         // Process function result content
-                        else if (toolCall.ToolKind == RunStepToolCallKind.Function)
+                        else if (toolCall.Kind == RunStepToolCallKind.Function)
                         {
-                            FunctionResultContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation
+                            FunctionResultContent functionStep = functionSteps[toolCall.Id]; // Function step always captured on invocation
                             content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep);
                         }
 
@@ -276,7 +240,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
                         }
                     }
                 }
-                else if (completedStep.Type == RunStepType.MessageCreation)
+                else if (completedStep.Kind == RunStepKind.CreatedMessage)
                 {
                     // Retrieve the message
                     ThreadMessage? message = await RetrieveMessageAsync(client, threadId, completedStep.Details.CreatedMessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
@@ -378,29 +342,25 @@ async Task PollRunStatusAsync()
     /// 
     /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
+    [ExcludeFromCodeCoverage]
     public static async IAsyncEnumerable InvokeStreamingAsync(
         OpenAIAssistantAgent agent,
         AssistantClient client,
         string threadId,
         IList? messages,
-        OpenAIAssistantInvocationOptions? invocationOptions,
+        RunCreationOptions? invocationOptions,
         ILogger logger,
         Kernel kernel,
         KernelArguments? arguments,
         [EnumeratorCancellation] CancellationToken cancellationToken)
     {
-        if (agent.IsDeleted)
-        {
-            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
-        }
-
         logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
 
-        ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+        ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
 
         string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
 
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
 
         options.ToolsOverride.AddRange(tools);
 
@@ -498,7 +458,7 @@ await client.GetRunStepsAsync(run.ThreadId, run.Id, cancellationToken: cancellat
                 {
                     foreach (RunStepToolCall stepDetails in step.Details.ToolCalls)
                     {
-                        toolMap[stepDetails.ToolCallId] = step.Id;
+                        toolMap[stepDetails.Id] = step.Id;
                     }
                 }
 
@@ -556,14 +516,14 @@ await RetrieveMessageAsync(
                     {
                         foreach (RunStepToolCall toolCall in step.Details.ToolCalls)
                         {
-                            if (toolCall.ToolKind == RunStepToolCallKind.Function)
+                            if (toolCall.Kind == RunStepToolCallKind.Function)
                             {
                                 messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step));
                                 stepFunctionResults.Remove(step.Id);
                                 break;
                             }
 
-                            if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter)
+                            if (toolCall.Kind == RunStepToolCallKind.CodeInterpreter)
                             {
                                 messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput, step));
                             }
@@ -628,6 +588,7 @@ private static ChatMessageContent GenerateMessageContent(string? assistantName,
         return content;
     }
 
+    [ExcludeFromCodeCoverage]
     private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update)
     {
         StreamingChatMessageContent content =
@@ -660,6 +621,7 @@ private static StreamingChatMessageContent GenerateStreamingMessageContent(strin
         return content;
     }
 
+    [ExcludeFromCodeCoverage]
     private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update)
     {
         StreamingChatMessageContent content =
@@ -712,6 +674,7 @@ private static AnnotationContent GenerateAnnotationContent(TextAnnotation annota
             };
     }
 
+    [ExcludeFromCodeCoverage]
     private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation)
     {
         string? fileId = null;
@@ -753,13 +716,13 @@ private static ChatMessageContent GenerateCodeInterpreterContent(string agentNam
 
     private static IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
     {
-        if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls)
+        if (step.Status == RunStepStatus.InProgress && step.Kind == RunStepKind.ToolCall)
         {
             foreach (RunStepToolCall toolCall in step.Details.ToolCalls)
             {
                 (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(toolCall.FunctionName, toolCall.FunctionArguments);
 
-                FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.ToolCallId, functionArguments);
+                FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
 
                 yield return content;
             }
@@ -829,23 +792,6 @@ private static ChatMessageContent GenerateFunctionResultContent(string agentName
             };
     }
 
-    private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken)
-    {
-        Task[] functionTasks = new Task[functionCalls.Length];
-
-        for (int index = 0; index < functionCalls.Length; ++index)
-        {
-            functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken);
-        }
-
-        return functionTasks;
-    }
-
-    private static Task ExecuteFunctionStep(OpenAIAssistantAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken)
-    {
-        return functionCall.InvokeAsync(agent.Kernel, cancellationToken);
-    }
-
     private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
     {
         ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
index 7c4000dcebb0..b947ccc2a78a 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
@@ -16,7 +16,7 @@ internal static class AssistantToolResourcesFactory
     /// Produces a  definition based on the provided parameters.
     /// 
     /// An optional vector-store-id for the 'file_search' tool
-    /// An optionallist of file-identifiers for the 'code_interpreter' tool.
+    /// An optional list of file-identifiers for the 'code_interpreter' tool.
     public static ToolResources? GenerateToolResources(string? vectorStoreId, IReadOnlyList? codeInterpreterFileIds)
     {
         bool hasVectorStore = !string.IsNullOrWhiteSpace(vectorStoreId);
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs
new file mode 100644
index 000000000000..86e90fbf4adc
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs
@@ -0,0 +1,122 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Net.Http;
+using System.Threading;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.SemanticKernel.Http;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+public sealed partial class OpenAIAssistantAgent : KernelAgent
+{
+    /// 
+    /// Specifies a key that avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
+    /// 
+    private const string SingleSpaceKey = " ";
+
+    /// 
+    /// Produces an .
+    /// 
+    /// The API key.
+    /// The service endpoint.
+    /// A custom  for HTTP requests.
+    public static AzureOpenAIClient CreateAzureOpenAIClient(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null)
+    {
+        Verify.NotNull(apiKey, nameof(apiKey));
+        Verify.NotNull(endpoint, nameof(endpoint));
+
+        AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+        return new AzureOpenAIClient(endpoint, apiKey!, clientOptions);
+    }
+
+    /// 
+    /// Produces an .
+    /// 
+    /// The credentials.
+    /// The service endpoint.
+    /// A custom  for HTTP requests.
+    public static AzureOpenAIClient CreateAzureOpenAIClient(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null)
+    {
+        Verify.NotNull(credential, nameof(credential));
+        Verify.NotNull(endpoint, nameof(endpoint));
+
+        AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+        return new AzureOpenAIClient(endpoint, credential, clientOptions);
+    }
+
+    /// 
+    /// Produces an .
+    /// 
+    /// An optional endpoint.
+    /// A custom  for HTTP requests.
+    public static OpenAIClient CreateOpenAIClient(Uri? endpoint = null, HttpClient? httpClient = null)
+    {
+        OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
+        return new OpenAIClient(new ApiKeyCredential(SingleSpaceKey), clientOptions);
+    }
+
+    /// 
+    /// Produces an .
+    /// 
+    /// The API key.
+    /// An optional endpoint.
+    /// A custom  for HTTP requests.
+    public static OpenAIClient CreateOpenAIClient(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
+    {
+        OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
+        return new OpenAIClient(apiKey, clientOptions);
+    }
+
+    private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient)
+    {
+        AzureOpenAIClientOptions options = new()
+        {
+            UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent
+        };
+
+        ConfigureClientOptions(httpClient, options);
+
+        return options;
+    }
+
+    private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, HttpClient? httpClient)
+    {
+        OpenAIClientOptions options = new()
+        {
+            UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent,
+            Endpoint = endpoint ?? httpClient?.BaseAddress,
+        };
+
+        ConfigureClientOptions(httpClient, options);
+
+        return options;
+    }
+
+    private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options)
+    {
+        options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall);
+
+        if (httpClient is not null)
+        {
+            options.Transport = new HttpClientPipelineTransport(httpClient);
+            options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided.
+            options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout
+        }
+    }
+
+    private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue)
+        =>
+            new((message) =>
+            {
+                if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false)
+                {
+                    message.Request.Headers.Set(headerName, headerValue);
+                }
+            });
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index f79b8ce3239c..c8d300874c60 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -1,76 +1,129 @@
 // Copyright (c) Microsoft. All rights reserved.
+
+using System;
 using System.Collections.Generic;
-using System.IO;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Diagnostics;
 using OpenAI.Assistants;
-using OpenAI.Files;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// A  specialization based on Open AI Assistant / GPT.
+/// Represents a  specialization based on Open AI Assistant / GPT.
 /// 
-public sealed class OpenAIAssistantAgent : KernelAgent
+public sealed partial class OpenAIAssistantAgent : KernelAgent
 {
     /// 
-    /// Metadata key that identifies code-interpreter content.
+    /// The metadata key that identifies code-interpreter content.
     /// 
     public const string CodeInterpreterMetadataKey = "code";
 
     internal const string OptionsMetadataKey = "__run_options";
     internal const string TemplateMetadataKey = "__template_format";
 
-    private readonly OpenAIClientProvider _provider;
-    private readonly Assistant _assistant;
-    private readonly AssistantClient _client;
-    private readonly string[] _channelKeys;
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    /// The assistant definition.
+    /// The OpenAI provider for accessing the Assistant API service.
+    /// Optional collection of plugins to add to the kernel.
+    /// An optional factory to produce the  for the agent.
+    /// The format of the prompt template used when "templateFactory" parameter is supplied.
+    public OpenAIAssistantAgent(
+        Assistant definition,
+        AssistantClient client,
+        IEnumerable? plugins = null,
+        IPromptTemplateFactory? templateFactory = null,
+        string? templateFormat = null)
+    {
+        this.Client = client;
+
+        this.Definition = definition;
+
+        this.Description = this.Definition.Description;
+        this.Id = this.Definition.Id;
+        this.Name = this.Definition.Name;
+        this.Instructions = this.Definition.Instructions;
+
+        if (templateFactory != null)
+        {
+            Verify.NotNullOrWhiteSpace(templateFormat);
+
+            PromptTemplateConfig templateConfig = new(this.Instructions)
+            {
+                TemplateFormat = templateFormat
+            };
+
+            this.Template = templateFactory.Create(templateConfig);
+        }
+
+        if (plugins != null)
+        {
+            this.Kernel.Plugins.AddRange(plugins);
+        }
+    }
 
     /// 
-    /// The assistant definition.
+    /// Expose client for additional use.
     /// 
-    public OpenAIAssistantDefinition Definition { get; private init; }
+    public AssistantClient Client { get; }
 
     /// 
-    /// Set when the assistant has been deleted via .
-    /// An assistant removed by other means will result in an exception when invoked.
+    /// Gets the assistant definition.
     /// 
+    public Assistant Definition { get; }
+
+    /// 
+    /// Gets a value that indicates whether the assistant has been deleted via .
+    /// 
+    /// 
+    /// An assistant removed by other means will result in an exception when invoked.
+    /// 
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to manage the Assistant definition lifecycle.")]
     public bool IsDeleted { get; private set; }
 
     /// 
-    /// Defines polling behavior for run processing
+    /// Gets the polling behavior for run processing.
     /// 
     public RunPollingOptions PollingOptions { get; } = new();
 
     /// 
-    /// Expose predefined tools for run-processing.
+    /// Gets or sets the run creation options for the assistant.
     /// 
-    internal IReadOnlyList Tools => this._assistant.Tools;
+    public RunCreationOptions? RunOptions { get; init; }
 
     /// 
-    /// Define a new .
+    /// Create a new .
     /// 
-    /// OpenAI client provider for accessing the API service.
-    /// Defines the assistant's capabilities.
+    /// The OpenAI client provider for accessing the API service.
+    /// The assistant's capabilities.
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Required arguments that provide default template parameters, including any .
-    /// Prompt template configuration
-    /// An optional factory to produce the  for the agent
+    /// The prompt template configuration.
+    /// An prompt template factory to produce the  for the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance
-    public async static Task CreateFromTemplateAsync(
+    /// An  instance.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantFromTemplateAsync).")]
+    public static async Task CreateFromTemplateAsync(
+#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIClientProvider clientProvider,
+#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIAssistantCapabilities capabilities,
         Kernel kernel,
         KernelArguments defaultArguments,
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory? templateFactory = null,
+        IPromptTemplateFactory templateFactory,
         CancellationToken cancellationToken = default)
     {
         // Validate input
@@ -79,12 +132,13 @@ public async static Task CreateFromTemplateAsync(
         Verify.NotNull(clientProvider, nameof(clientProvider));
         Verify.NotNull(capabilities, nameof(capabilities));
         Verify.NotNull(templateConfig, nameof(templateConfig));
+        Verify.NotNull(templateFactory, nameof(templateFactory));
 
         // Ensure template is valid (avoid failure after posting assistant creation)
-        IPromptTemplate? template = templateFactory?.Create(templateConfig);
+        IPromptTemplate template = templateFactory.Create(templateConfig);
 
         // Create the client
-        AssistantClient client = CreateClient(clientProvider);
+        AssistantClient client = clientProvider.Client.GetAssistantClient();
 
         // Create the assistant
         AssistantCreationOptions assistantCreationOptions = templateConfig.CreateAssistantOptions(capabilities);
@@ -92,7 +146,7 @@ public async static Task CreateFromTemplateAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider, client)
+            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
             {
                 Kernel = kernel,
                 Arguments = defaultArguments,
@@ -101,16 +155,20 @@ public async static Task CreateFromTemplateAsync(
     }
 
     /// 
-    /// Define a new .
+    /// Create a new .
     /// 
-    /// OpenAI client provider for accessing the API service.
+    /// The OpenAI client provider for accessing the API service.
     /// The assistant definition.
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Optional default arguments, including any .
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance
+    /// An  instance.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantAsync).")]
     public static async Task CreateAsync(
+#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIClientProvider clientProvider,
+#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIAssistantDefinition definition,
         Kernel kernel,
         KernelArguments? defaultArguments = null,
@@ -122,7 +180,7 @@ public static async Task CreateAsync(
         Verify.NotNull(definition, nameof(definition));
 
         // Create the client
-        AssistantClient client = CreateClient(clientProvider);
+        AssistantClient client = clientProvider.Client.GetAssistantClient();
 
         // Create the assistant
         AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantOptions();
@@ -130,25 +188,29 @@ public static async Task CreateAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider, client)
+            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
             {
                 Kernel = kernel,
-                Arguments = defaultArguments
+                Arguments = defaultArguments ?? [],
             };
     }
 
     /// 
-    /// Retrieve a list of assistant definitions: .
+    /// Retrieves a list of assistant definitions.
     /// 
-    /// Configuration for accessing the API service.
+    /// The configuration for accessing the API service.
     /// The  to monitor for cancellation requests. The default is .
-    /// An list of  objects.
+    /// A list of  objects.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to query for assistant definitions (GetAssistantsAsync).")]
     public static async IAsyncEnumerable ListDefinitionsAsync(
-        OpenAIClientProvider provider,
+#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+        OpenAIClientProvider clientProvider,
+#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         // Create the client
-        AssistantClient client = CreateClient(provider);
+        AssistantClient client = clientProvider.Client.GetAssistantClient();
 
         // Query and enumerate assistant definitions
         await foreach (Assistant model in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = AssistantCollectionOrder.Descending }, cancellationToken).ConfigureAwait(false))
@@ -158,17 +220,21 @@ public static async IAsyncEnumerable ListDefinitionsA
     }
 
     /// 
-    /// Retrieve a  by identifier.
+    /// Retrieves an  by identifier.
     /// 
-    /// Configuration for accessing the API service.
-    /// The agent identifier
+    /// The configuration for accessing the API service.
+    /// The agent identifier.
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Optional default arguments, including any .
-    /// An optional factory to produce the  for the agent
+    /// An optional factory to produce the  for the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance
+    /// An  instance.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to retrieve an assistant definition (GetAssistantsAsync).")]
     public static async Task RetrieveAsync(
+#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIClientProvider clientProvider,
+#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         string id,
         Kernel kernel,
         KernelArguments? defaultArguments = null,
@@ -181,7 +247,7 @@ public static async Task RetrieveAsync(
         Verify.NotNullOrWhiteSpace(id, nameof(id));
 
         // Create the client
-        AssistantClient client = CreateClient(clientProvider);
+        AssistantClient client = clientProvider.Client.GetAssistantClient();
 
         // Retrieve the assistant
         Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
@@ -194,37 +260,48 @@ public static async Task RetrieveAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider, client)
+            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
             {
                 Kernel = kernel,
-                Arguments = defaultArguments,
+                Arguments = defaultArguments ?? [],
                 Template = template,
             };
     }
 
     /// 
-    /// Create a new assistant thread.
+    /// Creates a new assistant thread.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier
+    /// The thread identifier.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
     public Task CreateThreadAsync(CancellationToken cancellationToken = default)
-        => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken);
+        => this.CreateThreadAsync(options: null, cancellationToken);
 
     /// 
-    /// Create a new assistant thread.
+    /// Creates a new assistant thread.
     /// 
-    /// The options for creating the thread
+    /// The options for creating the thread.
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier
+    /// The thread identifier.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
     public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
-        => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken);
+        => this.Client.CreateThreadAsync(
+            options?.Messages,
+            options?.CodeInterpreterFileIds,
+            options?.VectorStoreId,
+            options?.Metadata,
+            cancellationToken);
 
     /// 
-    /// Create a new assistant thread.
+    /// Deletes an assistant thread.
     /// 
-    /// The thread identifier
+    /// The thread identifier.
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier
+    /// The thread identifier.
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to delete an existing thread.")]
     public async Task DeleteThreadAsync(
         string threadId,
         CancellationToken cancellationToken = default)
@@ -232,73 +309,51 @@ public async Task DeleteThreadAsync(
         // Validate input
         Verify.NotNullOrWhiteSpace(threadId, nameof(threadId));
 
-        ThreadDeletionResult result = await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+        ThreadDeletionResult result = await this.Client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
 
         return result.Deleted;
     }
 
-    /// 
-    /// Uploads an file for the purpose of using with assistant.
-    /// 
-    /// The content to upload
-    /// The name of the file
-    /// The  to monitor for cancellation requests. The default is .
-    /// The file identifier
-    /// 
-    /// Use the  directly for more advanced file operations.
-    /// 
-    public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default)
-    {
-        OpenAIFileClient client = this._provider.Client.GetOpenAIFileClient();
-
-        OpenAIFile fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
-
-        return fileInfo.Id;
-    }
-
     /// 
     /// Adds a message to the specified thread.
     /// 
-    /// The thread identifier
-    /// A non-system message with which to append to the conversation.
+    /// The thread identifier.
+    /// A non-system message to append to the conversation.
     /// The  to monitor for cancellation requests. The default is .
     /// 
-    /// Only supports messages with role = User or Assistant:
-    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
+    /// This method only supports messages with role = User or Assistant.
     /// 
     public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
     {
-        this.ThrowIfDeleted();
-
-        return AssistantThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken);
+        return AssistantThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
     }
 
     /// 
     /// Gets messages for a specified thread.
     /// 
-    /// The thread identifier
+    /// The thread identifier.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
     {
-        this.ThrowIfDeleted();
-
-        return AssistantThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken);
+        return AssistantThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
     }
 
     /// 
-    /// Delete the assistant definition.
+    /// Deletes the assistant definition.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// True if assistant definition has been deleted
+    ///  if the assistant definition was deleted.
     /// 
-    /// Assistant based agent will not be useable after deletion.
+    /// An assistant-based agent is not usable after deletion.
     /// 
+    [Experimental("SKEXP0110")]
+    [Obsolete("Use the OpenAI.Assistants.AssistantClient to remove or otherwise modify the Assistant definition.")]
     public async Task DeleteAsync(CancellationToken cancellationToken = default)
     {
         if (!this.IsDeleted)
         {
-            AssistantDeletionResult result = await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
+            AssistantDeletionResult result = await this.Client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
             this.IsDeleted = result.Deleted;
         }
 
@@ -306,15 +361,15 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul
     }
 
     /// 
-    /// Invoke the assistant on the specified thread.
+    /// Invokes the assistant on the specified thread.
     /// 
-    /// The thread identifier
+    /// The thread identifier.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of response messages.
+    /// An asynchronous enumeration of response messages.
     /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeAsync(
         string threadId,
@@ -324,49 +379,55 @@ public IAsyncEnumerable InvokeAsync(
         => this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken);
 
     /// 
-    /// Invoke the assistant on the specified thread.
+    /// Invokes the assistant on the specified thread.
     /// 
-    /// The thread identifier
-    /// Optional invocation options
+    /// The thread identifier.
+    /// Optional invocation options.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of response messages.
+    /// An asynchronous enumeration of response messages.
     /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
-    public async IAsyncEnumerable InvokeAsync(
+    public IAsyncEnumerable InvokeAsync(
         string threadId,
-        OpenAIAssistantInvocationOptions? options,
+        RunCreationOptions? options,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
-        [EnumeratorCancellation] CancellationToken cancellationToken = default)
+        CancellationToken cancellationToken = default)
     {
-        this.ThrowIfDeleted();
-
-        kernel ??= this.Kernel;
-        arguments = this.MergeArguments(arguments);
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+            () => InternalInvokeAsync(),
+            cancellationToken);
 
-        await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+        async IAsyncEnumerable InternalInvokeAsync()
         {
-            if (isVisible)
+            kernel ??= this.Kernel;
+            arguments = this.MergeArguments(arguments);
+
+            await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
             {
-                yield return message;
+                if (isVisible)
+                {
+                    yield return message;
+                }
             }
         }
     }
 
     /// 
-    /// Invoke the assistant on the specified thread with streaming response.
+    /// Invokes the assistant on the specified thread with streaming response.
     /// 
-    /// The thread identifier
+    /// The thread identifier.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages generated
+    /// Optional receiver of the completed messages that are generated.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeStreamingAsync(
         string threadId,
@@ -377,59 +438,66 @@ public IAsyncEnumerable InvokeStreamingAsync(
         => this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);
 
     /// 
-    /// Invoke the assistant on the specified thread with streaming response.
+    /// Invokes the assistant on the specified thread with streaming response.
     /// 
-    /// The thread identifier
-    /// Optional invocation options
+    /// The thread identifier.
+    /// Optional invocation options.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages generated
+    /// Optional receiver of the completed messages that are generated.
     /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
+    /// An asynchronous enumeration of messages.
     /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeStreamingAsync(
         string threadId,
-        OpenAIAssistantInvocationOptions? options,
+        RunCreationOptions? options,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
         ChatHistory? messages = null,
         CancellationToken cancellationToken = default)
     {
-        this.ThrowIfDeleted();
-
-        kernel ??= this.Kernel;
-        arguments = this.MergeArguments(arguments);
+#pragma warning disable SKEXP0001 // ModelDiagnostics is marked experimental.
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
+            () => InternalInvokeStreamingAsync(),
+            cancellationToken);
+#pragma warning restore SKEXP0001 // ModelDiagnostics is marked experimental.
+
+        IAsyncEnumerable InternalInvokeStreamingAsync()
+        {
+            kernel ??= this.Kernel;
+            arguments = this.MergeArguments(arguments);
 
-        return AssistantThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
+            return AssistantThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
+        }
     }
 
     /// 
+    [Experimental("SKEXP0110")]
     protected override IEnumerable GetChannelKeys()
     {
         // Distinguish from other channel types.
         yield return typeof(OpenAIAssistantChannel).FullName!;
-
-        foreach (string key in this._channelKeys)
-        {
-            yield return key;
-        }
+        // Distinguish based on client instance.
+        yield return this.Client.GetHashCode().ToString();
     }
 
     /// 
+    [Experimental("SKEXP0110")]
     protected override async Task CreateChannelAsync(CancellationToken cancellationToken)
     {
         this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel));
 
-        AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
+        AssistantThread thread = await this.Client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
 
         this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
 
         OpenAIAssistantChannel channel =
-            new(this._client, thread.Id)
+            new(this.Client, thread.Id)
             {
-                Logger = this.LoggerFactory.CreateLogger()
+                Logger = this.ActiveLoggerFactory.CreateLogger()
             };
 
         this.Logger.LogOpenAIAssistantAgentCreatedChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel), thread.Id);
@@ -437,52 +505,25 @@ protected override async Task CreateChannelAsync(CancellationToken
         return channel;
     }
 
-    internal void ThrowIfDeleted()
-    {
-        if (this.IsDeleted)
-        {
-            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {this.Id}.");
-        }
-    }
-
     internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) =>
         this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
 
     /// 
+    [Experimental("SKEXP0110")]
     protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
     {
         string threadId = channelState;
 
         this.Logger.LogOpenAIAssistantAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
 
-        AssistantThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+        AssistantThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
 
         this.Logger.LogOpenAIAssistantAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
 
-        return new OpenAIAssistantChannel(this._client, thread.Id);
-    }
-
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    private OpenAIAssistantAgent(
-        Assistant model,
-        OpenAIClientProvider provider,
-        AssistantClient client)
-    {
-        this._provider = provider;
-        this._assistant = model;
-        this._client = provider.Client.GetAssistantClient();
-        this._channelKeys = provider.ConfigurationKeys.ToArray();
-
-        this.Definition = CreateAssistantDefinition(model);
-
-        this.Description = this._assistant.Description;
-        this.Id = this._assistant.Id;
-        this.Name = this._assistant.Name;
-        this.Instructions = this._assistant.Instructions;
+        return new OpenAIAssistantChannel(this.Client, thread.Id);
     }
 
+    [Obsolete]
     private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model)
     {
         OpenAIAssistantExecutionOptions? options = null;
@@ -513,9 +554,4 @@ private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant mod
             ExecutionOptions = options,
         };
     }
-
-    private static AssistantClient CreateClient(OpenAIClientProvider config)
-    {
-        return config.Client.GetAssistantClient();
-    }
 }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
index c2247ec11e88..5642017c89dd 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
@@ -1,5 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -7,75 +9,81 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// Defines the capabilities of an assistant.
 /// 
+[Experimental("SKEXP0110")]
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
 public class OpenAIAssistantCapabilities
 {
     /// 
-    /// Identifies the AI model targeted by the agent.
+    /// Gets the AI model targeted by the agent.
     /// 
     public string ModelId { get; }
 
     /// 
-    /// The assistant's unique id.  (Ignored on create.)
+    /// Gets the assistant's unique ID. (Ignored on create.)
     /// 
     public string Id { get; init; } = string.Empty;
 
     /// 
-    /// Optional file-ids made available to the code_interpreter tool, if enabled.
+    /// Gets optional file IDs made available to the code-interpreter tool, if enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? CodeInterpreterFileIds { get; init; }
 
     /// 
-    /// Set if code-interpreter is enabled.
+    /// Gets a value that indicates whether the code-interpreter tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableCodeInterpreter { get; init; }
 
     /// 
-    /// Set if file-search is enabled.
+    /// Gets a value that indicates whether the file_search tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableFileSearch { get; init; }
 
     /// 
-    /// Set if json response-format is enabled.
+    /// Gets a value that indicates whether the JSON response format is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableJsonResponse { get; init; }
 
     /// 
-    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.Keys
-    /// may be up to 64 characters in length and values may be up to 512 characters in length.
+    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.
     /// 
+    /// 
+    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
+    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 
     /// 
-    /// The sampling temperature to use, between 0 and 2.
+    /// Gets the sampling temperature to use, between 0 and 2.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? Temperature { get; init; }
 
     /// 
-    /// An alternative to sampling with temperature, called nucleus sampling, where the model
-    /// considers the results of the tokens with top_p probability mass.
-    /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
     /// 
     /// 
-    /// Recommended to set this or temperature but not both.
+    /// It's recommended to set this property or , but not both.
+    ///
+    /// Nucleus sampling is an alternative to sampling with temperature where the model
+    /// considers the results of the tokens with  probability mass.
+    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? TopP { get; init; }
 
     /// 
-    /// Requires file-search if specified.
+    /// Gets the vector store ID. Requires file-search if specified.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? VectorStoreId { get; init; }
 
     /// 
-    /// Default execution options for each agent invocation.
+    /// Gets the default execution options for each agent invocation.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; }
@@ -83,7 +91,7 @@ public class OpenAIAssistantCapabilities
     /// 
     /// Initializes a new instance of the  class.
     /// 
-    /// The targeted model
+    /// The targeted model.
     [JsonConstructor]
     public OpenAIAssistantCapabilities(string modelId)
     {
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 506f0a837ebf..4b91bac74178 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -1,8 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using Microsoft.SemanticKernel.Diagnostics;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -10,6 +13,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// A  specialization for use with .
 /// 
+[Experimental("SKEXP0110")]
 internal sealed class OpenAIAssistantChannel(AssistantClient client, string threadId)
     : AgentChannel
 {
@@ -30,17 +34,19 @@ protected override async Task ReceiveAsync(IEnumerable histo
         OpenAIAssistantAgent agent,
         CancellationToken cancellationToken)
     {
-        agent.ThrowIfDeleted();
-
-        return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
+            () => AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
+            cancellationToken);
     }
 
     /// 
     protected override IAsyncEnumerable InvokeStreamingAsync(OpenAIAssistantAgent agent, IList messages, CancellationToken cancellationToken = default)
     {
-        agent.ThrowIfDeleted();
-
-        return AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
+        return ActivityExtensions.RunWithActivityAsync(
+            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
+            () => AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
+            cancellationToken);
     }
 
     /// 
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
index 79ad3f98f03e..9560857b101e 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
@@ -1,4 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -6,29 +8,31 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// Defines an assistant.
 /// 
+[Experimental("SKEXP0110")]
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
 public sealed class OpenAIAssistantDefinition : OpenAIAssistantCapabilities
 {
     /// 
-    /// The description of the assistant.
+    /// Gets the description of the assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Description { get; init; }
 
     /// 
-    /// The system instructions for the assistant to use.
+    /// Gets the system instructions for the assistant to use.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Instructions { get; init; }
 
     /// 
-    /// The name of the assistant.
+    /// Gets the name of the assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Name { get; init; }
 
     /// 
-    /// Provide the captured template format for the assistant if needed for agent retrieval.
-    /// ()
+    /// Gets the captured template format for the assistant if needed for agent retrieval
+    /// ().
     /// 
     [JsonIgnore]
     public string? TemplateFactoryFormat
@@ -49,7 +53,7 @@ public string? TemplateFactoryFormat
     /// 
     /// Initializes a new instance of the  class.
     /// 
-    /// The targeted model
+    /// The targeted model.
     [JsonConstructor]
     public OpenAIAssistantDefinition(string modelId)
         : base(modelId) { }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
index 845cecb0956c..ecfd4e52fa58 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
@@ -1,4 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -7,37 +9,41 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// Defines assistant execution options for each invocation.
 /// 
 /// 
-/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options"
+/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options".
 /// 
+[Experimental("SKEXP0110")]
+[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
 public sealed class OpenAIAssistantExecutionOptions
 {
     /// 
-    /// Appends additional instructions.
+    /// Gets the additional instructions.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? AdditionalInstructions { get; init; }
 
     /// 
-    /// The maximum number of completion tokens that may be used over the course of the run.
+    /// Gets the maximum number of completion tokens that can be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxCompletionTokens { get; init; }
 
     /// 
-    /// The maximum number of prompt tokens that may be used over the course of the run.
+    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxPromptTokens { get; init; }
 
     /// 
-    /// Enables parallel function calling during tool use.  Enabled by default.
-    /// Use this property to disable.
+    /// Gets a value that indicates whether parallel function calling is enabled during tool use.
     /// 
+    /// 
+    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
+    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? ParallelToolCallsEnabled { get; init; }
 
     /// 
-    /// When set, the thread will be truncated to the N most recent messages in the thread.
+    /// Gets the number of recent messages that the thread will be truncated to.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TruncationMessageCount { get; init; }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
index 9b02cb9faf23..7aec34ee15ed 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
@@ -1,104 +1,113 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Defines per invocation execution options that override the assistant definition.
+/// Defines per-invocation execution options that override the assistant definition.
 /// 
 /// 
-/// Not applicable to  usage.
+/// This class is not applicable to  usage.
 /// 
+[Experimental("SKEXP0110")]
+[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
 public sealed class OpenAIAssistantInvocationOptions
 {
     /// 
-    /// Override the AI model targeted by the agent.
+    /// Gets the AI model targeted by the agent.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ModelName { get; init; }
 
     /// 
-    /// Appends additional instructions.
+    /// Gets the additional instructions.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? AdditionalInstructions { get; init; }
 
     /// 
-    /// Additional messages to add to the thread.
+    /// Gets additional messages to add to the thread.
     /// 
     /// 
-    /// Only supports messages with role = User or Assistant:
-    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
+    /// This property only supports messages with role = User or Assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? AdditionalMessages { get; init; }
 
     /// 
-    /// Set if code_interpreter tool is enabled.
+    /// Gets a value that indicates if the code_interpreter tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableCodeInterpreter { get; init; }
 
     /// 
-    /// Set if file_search tool is enabled.
+    /// Gets a value that indicates if the file_search tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableFileSearch { get; init; }
 
     /// 
-    /// Set if json response-format is enabled.
+    /// Gets a value that indicates if the JSON response format is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? EnableJsonResponse { get; init; }
 
     /// 
-    /// The maximum number of completion tokens that may be used over the course of the run.
+    /// Gets the maximum number of completion tokens that can be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxCompletionTokens { get; init; }
 
     /// 
-    /// The maximum number of prompt tokens that may be used over the course of the run.
+    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxPromptTokens { get; init; }
 
     /// 
-    /// Enables parallel function calling during tool use.  Enabled by default.
-    /// Use this property to disable.
+    /// Gets a value that indicates whether parallel function calling is enabled during tool use.
     /// 
+    /// 
+    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
+    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? ParallelToolCallsEnabled { get; init; }
 
     /// 
-    /// When set, the thread will be truncated to the N most recent messages in the thread.
+    /// Gets the number of recent messages that the thread will be truncated to.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TruncationMessageCount { get; init; }
 
     /// 
-    /// The sampling temperature to use, between 0 and 2.
+    /// Gets the sampling temperature to use, between 0 and 2.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? Temperature { get; init; }
 
     /// 
-    /// An alternative to sampling with temperature, called nucleus sampling, where the model
-    /// considers the results of the tokens with top_p probability mass.
-    /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
     /// 
     /// 
-    /// Recommended to set this or temperature but not both.
+    /// It's recommended to set this property or , but not both.
+    ///
+    /// Nucleus sampling is an alternative to sampling with temperature where the model
+    /// considers the results of the tokens with  probability mass.
+    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? TopP { get; init; }
 
     /// 
-    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.Keys
-    /// may be up to 64 characters in length and values may be up to 512 characters in length.
+    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.
     /// 
+    /// 
+    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
+    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
index 4eb09eed7889..eccb9509ffd1 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -3,6 +3,7 @@
 using System.ClientModel;
 using System.ClientModel.Primitives;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Net.Http;
 using System.Threading;
@@ -10,41 +11,49 @@
 using Azure.Core;
 using Microsoft.SemanticKernel.Http;
 using OpenAI;
+using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
 /// Provides an  for use by .
 /// 
+[Experimental("SKEXP0110")]
 public sealed class OpenAIClientProvider
 {
     /// 
-    /// Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
+    /// Specifies a key that avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
     /// 
     private const string SingleSpaceKey = " ";
+    private AssistantClient? _assistantClient;
 
     /// 
-    /// An active client instance.
+    /// Gets an active client instance.
     /// 
     public OpenAIClient Client { get; }
 
     /// 
-    /// Configuration keys required for  management.
+    /// Gets an active assistant client instance.
+    /// 
+    public AssistantClient AssistantClient => this._assistantClient ??= this.Client.GetAssistantClient();
+
+    /// 
+    /// Gets configuration keys required for  management.
     /// 
     internal IReadOnlyList ConfigurationKeys { get; }
 
     private OpenAIClientProvider(OpenAIClient client, IEnumerable keys)
     {
         this.Client = client;
-        this.ConfigurationKeys = keys.ToArray();
+        this.ConfigurationKeys = [.. keys];
     }
 
     /// 
-    /// Produce a  based on .
+    /// Produces an  based on .
     /// 
-    /// The API key
-    /// The service endpoint
-    /// Custom  for HTTP requests.
+    /// The API key.
+    /// The service endpoint.
+    /// A custom  for HTTP requests.
     public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null)
     {
         Verify.NotNull(apiKey, nameof(apiKey));
@@ -56,11 +65,11 @@ public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri e
     }
 
     /// 
-    /// Produce a  based on .
+    /// Produces an  based on .
     /// 
-    /// The credentials
-    /// The service endpoint
-    /// Custom  for HTTP requests.
+    /// The credentials.
+    /// The service endpoint.
+    /// A custom  for HTTP requests.
     public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null)
     {
         Verify.NotNull(credential, nameof(credential));
@@ -72,10 +81,10 @@ public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Ur
     }
 
     /// 
-    /// Produce a  based on .
+    /// Produces an  based on .
     /// 
-    /// An optional endpoint
-    /// Custom  for HTTP requests.
+    /// An optional endpoint.
+    /// A custom  for HTTP requests.
     public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? httpClient = null)
     {
         OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
@@ -83,11 +92,11 @@ public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? h
     }
 
     /// 
-    /// Produce a  based on .
+    /// Produces an  based on .
     /// 
-    /// The API key
-    /// An optional endpoint
-    /// Custom  for HTTP requests.
+    /// The API key.
+    /// An optional endpoint.
+    /// A custom  for HTTP requests.
     public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
     {
         OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
@@ -95,7 +104,7 @@ public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpo
     }
 
     /// 
-    /// Directly provide a client instance.
+    /// Provides a client instance directly.
     /// 
     public static OpenAIClientProvider FromClient(OpenAIClient client)
     {
diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
index 18bce010a328..5be75f860eb8 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
@@ -1,41 +1,46 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
 using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Thread creation options.
+/// Specifies thread creation options.
 /// 
+[Experimental("SKEXP0110")]
+[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateThreadAsync() to create a thread.")]
 public sealed class OpenAIThreadCreationOptions
 {
     /// 
-    /// Optional file-ids made available to the code_interpreter tool, if enabled.
+    /// Gets the optional file IDs made available to the code_interpreter tool, if enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? CodeInterpreterFileIds { get; init; }
 
     /// 
-    /// Optional messages to initialize thread with..
+    /// Gets the optional messages to initialize the thread with.
     /// 
     /// 
-    /// Only supports messages with role = User or Assistant:
-    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
+    /// This property only supports messages with role = User or Assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? Messages { get; init; }
 
     /// 
-    /// Enables file-search if specified.
+    /// Gets the vector store ID that enables file-search.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? VectorStoreId { get; init; }
 
     /// 
-    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.Keys
-    /// may be up to 64 characters in length and values may be up to 512 characters in length.
+    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.
     /// 
+    /// 
+    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
+    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 }
diff --git a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
index b108048f32d3..24c514686664 100644
--- a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
+++ b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
@@ -4,68 +4,68 @@
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Configuration and defaults associated with polling behavior for Assistant API run processing.
+/// Provides configuration and defaults associated with polling behavior for Assistant API run processing.
 /// 
 public sealed class RunPollingOptions
 {
     /// 
-    /// The default maximum number or retries when monitoring thread-run status.
+    /// Gets the default maximum number or retries when monitoring thread-run status.
     /// 
     public static int DefaultMaximumRetryCount { get; } = 3;
 
     /// 
-    /// The default polling interval when monitoring thread-run status.
+    /// Gets the default polling interval when monitoring thread-run status.
     /// 
     public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
 
     /// 
-    /// The default back-off interval when  monitoring thread-run status.
+    /// Gets the default back-off interval when monitoring thread-run status.
     /// 
     public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
 
     /// 
-    /// The default number of polling iterations before using .
+    /// Gets the default number of polling iterations before using .
     /// 
     public static int DefaultPollingBackoffThreshold { get; } = 2;
 
     /// 
-    /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// Gets the default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
     /// 
     public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
 
     /// 
-    /// The maximum retry count when polling thread-run status.
+    /// Gets or sets the maximum retry count when polling thread-run status.
     /// 
     /// 
-    /// Only affects failures that have the potential to be transient.  Explicit server error responses
-    /// will result in immediate failure.
+    /// This value only affects failures that have the potential to be transient.
+    /// Explicit server error responses will result in immediate failure.
     /// 
     public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount;
 
     /// 
-    /// The polling interval when monitoring thread-run status.
+    /// Gets or sets the polling interval when monitoring thread-run status.
     /// 
     public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
 
     /// 
-    /// The back-off interval when  monitoring thread-run status.
+    /// Gets or sets the back-off interval when monitoring thread-run status.
     /// 
     public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
 
     /// 
-    /// The number of polling iterations before using .
+    /// Gets or sets the number of polling iterations before using .
     /// 
     public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold;
 
     /// 
-    /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// Gets or sets the polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
     /// 
     public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
 
     /// 
     /// Gets the polling interval for the specified iteration count.
     /// 
-    /// The number of polling iterations already attempted
+    /// The number of polling iterations already attempted.
     public TimeSpan GetPollingInterval(int iterationCount) =>
         iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval;
 }
diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
index b773878eb397..752bd3c1ebcb 100644
--- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
+++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
@@ -8,11 +8,20 @@
     true
     false
     12
-    $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
+    $(NoWarn);CA2007,CA1812,CA1861,CA1063,CS0618,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
   
 
   
+    
+    
+    
+  
+
+  
+    
     
+    
+    
     
     
     
@@ -23,15 +32,16 @@
       runtime; build; native; contentfiles; analyzers; buildtransitive
       all
     
-    
-    
   
 
   
+    
     
     
     
     
+    
+    
   
 
   
@@ -39,4 +49,4 @@
     
   
 
-
+
\ No newline at end of file
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs
new file mode 100644
index 000000000000..5eb1ad98e687
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs
@@ -0,0 +1,113 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using SemanticKernel.Agents.UnitTests.Test;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.AzureAI;
+
+/// 
+/// Unit testing of .
+/// 
+public class AzureAIAssistantInvocationOptionsTests
+{
+    /// 
+    /// Verify initial state.
+    /// 
+    [Fact]
+    public void OpenAIAssistantInvocationOptionsInitialState()
+    {
+        // Arrange
+        AzureAIInvocationOptions options = new();
+
+        // Assert
+        Assert.Null(options.ModelName);
+        Assert.Null(options.AdditionalInstructions);
+        Assert.Null(options.AdditionalMessages);
+        Assert.Null(options.Metadata);
+        Assert.Null(options.Temperature);
+        Assert.Null(options.TopP);
+        Assert.Null(options.ParallelToolCallsEnabled);
+        Assert.Null(options.MaxCompletionTokens);
+        Assert.Null(options.MaxPromptTokens);
+        Assert.Null(options.TruncationMessageCount);
+        Assert.Null(options.EnableJsonResponse);
+        Assert.False(options.EnableCodeInterpreter);
+        Assert.False(options.EnableFileSearch);
+
+        // Act and Assert
+        ValidateSerialization(options);
+    }
+
+    /// 
+    /// Verify initialization.
+    /// 
+    [Fact]
+    public void OpenAIAssistantInvocationOptionsAssignment()
+    {
+        // Arrange
+        AzureAIInvocationOptions options =
+            new()
+            {
+                ModelName = "testmodel",
+                AdditionalInstructions = "test instructions",
+                AdditionalMessages = [
+                    new ChatMessageContent(AuthorRole.User, "test message")
+                ],
+                Metadata = new Dictionary() { { "a", "1" } },
+                MaxCompletionTokens = 1000,
+                MaxPromptTokens = 1000,
+                ParallelToolCallsEnabled = false,
+                TruncationMessageCount = 12,
+                Temperature = 2,
+                TopP = 0,
+                EnableCodeInterpreter = true,
+                EnableJsonResponse = true,
+                EnableFileSearch = true,
+            };
+
+        // Assert
+        Assert.Equal("testmodel", options.ModelName);
+        Assert.Equal("test instructions", options.AdditionalInstructions);
+        Assert.Single(options.AdditionalMessages);
+        Assert.Equal(2, options.Temperature);
+        Assert.Equal(0, options.TopP);
+        Assert.Equal(1000, options.MaxCompletionTokens);
+        Assert.Equal(1000, options.MaxPromptTokens);
+        Assert.Equal(12, options.TruncationMessageCount);
+        Assert.False(options.ParallelToolCallsEnabled);
+        Assert.Single(options.Metadata);
+        Assert.True(options.EnableCodeInterpreter);
+        Assert.True(options.EnableJsonResponse);
+        Assert.True(options.EnableFileSearch);
+
+        // Act and Assert
+        ValidateSerialization(options);
+    }
+
+    private static void ValidateSerialization(AzureAIInvocationOptions source)
+    {
+        // Act
+        string json = JsonSerializer.Serialize(source);
+
+        AzureAIInvocationOptions? target = JsonSerializer.Deserialize(json);
+
+        // Assert
+        Assert.NotNull(target);
+        Assert.Equal(source.AdditionalInstructions, target.AdditionalInstructions);
+        Assert.Equivalent(source.AdditionalMessages, target.AdditionalMessages);
+        Assert.Equal(source.ModelName, target.ModelName);
+        Assert.Equal(source.Temperature, target.Temperature);
+        Assert.Equal(source.TopP, target.TopP);
+        Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens);
+        Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens);
+        Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount);
+        Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter);
+        Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse);
+        Assert.Equal(source.EnableFileSearch, target.EnableFileSearch);
+        AssertCollection.Equal(source.Metadata, target.Metadata);
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs
new file mode 100644
index 000000000000..96ddfb046896
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Net.Http;
+using Azure.AI.Projects;
+using Azure.Identity;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Moq;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.AzureAI;
+
+/// 
+/// Unit testing of .
+/// 
+public class AzureAIClientProviderTests
+{
+    /// 
+    /// Verify that provisioning of client for Azure OpenAI.
+    /// 
+    [Fact]
+    public void VerifyAzureAIClientProviderDefault()
+    {
+        // Act
+        AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential());
+
+        // Assert
+        Assert.NotNull(provider.Client);
+    }
+
+    /// 
+    /// Verify that the factory can create a client with http proxy.
+    /// 
+    [Fact]
+    public void VerifyAzureAIClientProviderWithHttpClient()
+    {
+        // Arrange
+        using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") };
+
+        // Act
+        AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClient);
+
+        // Assert
+        Assert.NotNull(provider.Client);
+
+        // Arrange
+        using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") };
+        httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test");
+
+        // Act
+        AzureAIClientProvider providerWithHeaders = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClientWithHeaders);
+
+        // Assert
+        Assert.NotNull(providerWithHeaders.Client);
+
+        Assert.NotEqual(provider.ConfigurationKeys.Count, providerWithHeaders.ConfigurationKeys.Count);
+    }
+
+    /// 
+    /// Verify that the factory can accept an client that already exists.
+    /// 
+    [Fact]
+    public void VerifyAzureAIClientProviderFromClient()
+    {
+        // Arrange
+        Mock mockClient = new();
+        AzureAIClientProvider provider = AzureAIClientProvider.FromClient(mockClient.Object);
+
+        // Assert
+        Assert.NotNull(provider.Client);
+        Assert.Equal(mockClient.Object, provider.Client);
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs
new file mode 100644
index 000000000000..cb8fe8415b97
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ComponentModel;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpeAzureAInAI.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public class KernelFunctionExtensionsTests
+{
+    /// 
+    /// Verify conversion from  to .
+    /// 
+    [Fact]
+    public void VerifyKernelFunctionToFunctionTool()
+    {
+        // Arrange
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+
+        // Assert
+        Assert.Equal(2, plugin.FunctionCount);
+
+        // Arrange
+        KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)];
+        KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)];
+
+        // Act
+        FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin");
+
+        // Assert
+        Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal);
+        Assert.Equal("test description", definition1.Description);
+
+        // Act
+        FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin");
+
+        // Assert
+        Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal);
+        Assert.Equal("test description", definition2.Description);
+    }
+
+    /// 
+    /// Exists only for parsing.
+    /// 
+#pragma warning disable CA1812 // Avoid uninstantiated internal classes
+    private sealed class TestPlugin()
+#pragma warning restore CA1812 // Avoid uninstantiated internal classes
+    {
+        [KernelFunction]
+        [Description("test description")]
+        public void TestFunction1() { }
+
+        [KernelFunction]
+        [Description("test description")]
+#pragma warning disable IDE0060 // Unused parameter for mock kernel function
+        public void TestFunction2(string p1, bool p2, int p3, string[] p4, ConsoleColor p5, DateTime p6) { }
+#pragma warning restore IDE0060 // Unused parameter
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs
new file mode 100644
index 000000000000..c42de6fc38a2
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs
@@ -0,0 +1,112 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Linq;
+using Azure.AI.Projects;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.AzureAI.Internal;
+
+/// 
+/// Unit testing of .
+/// 
+public class AgentMessageFactoryTests
+{
+    /// 
+    /// Verify options creation.
+    /// 
+    [Fact]
+    public void VerifyAssistantMessageAdapterGetMessageContentsWithText()
+    {
+        // Arrange
+        ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]);
+
+        // Act
+        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
+
+        // Assert
+        Assert.NotNull(contents);
+        Assert.Single(contents);
+        Assert.NotNull(contents[0].Content);
+    }
+
+    /// 
+    /// Verify options creation.
+    /// 
+    [Fact]
+    public void VerifyAssistantMessageAdapterGetMessageWithImageUrl()
+    {
+        // Arrange
+        ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]);
+
+        // Act
+        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
+
+        // Assert
+        Assert.NotNull(contents);
+        Assert.Empty(contents);
+    }
+
+    /// 
+    /// Verify options creation.
+    /// 
+    [Fact]
+    public void VerifyAssistantMessageAdapterGetMessageWithImageData()
+    {
+        // Arrange
+        ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png") { DataUri = "data:image/png;base64,MTIz" }]);
+
+        // Act
+        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
+
+        // Assert
+        Assert.NotNull(contents);
+        Assert.Empty(contents);
+    }
+
+    /// 
+    /// Verify options creation.
+    /// 
+    [Fact]
+    public void VerifyAssistantMessageAdapterGetMessageWithImageFile()
+    {
+        // Arrange
+        ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]);
+
+        // Act
+        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
+
+        // Assert
+        Assert.NotNull(contents);
+        Assert.Empty(contents);
+    }
+
+    /// 
+    /// Verify options creation.
+    /// 
+    [Fact]
+    public void VerifyAssistantMessageAdapterGetMessageWithAll()
+    {
+        // Arrange
+        ChatMessageContent message =
+            new(
+                AuthorRole.User,
+                items:
+                [
+                    new TextContent("test"),
+                    new ImageContent(new Uri("https://localhost/myimage.png")),
+                    new FileReferenceContent("file-id")
+                ]);
+
+        // Act
+        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
+
+        // Assert
+        Assert.NotNull(contents);
+        Assert.Single(contents);
+        Assert.NotNull(contents[0].Content);
+        Assert.Single(contents[0].Attachments);
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs
new file mode 100644
index 000000000000..9d1054ac9bb6
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs
@@ -0,0 +1,71 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using Microsoft.SemanticKernel.Agents.AzureAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.AzureAI;
+
+/// 
+/// Unit testing of .
+/// 
+public class RunPollingOptionsTests
+{
+    /// 
+    /// Verify initial state.
+    /// 
+    [Fact]
+    public void RunPollingOptionsInitialStateTest()
+    {
+        // Arrange
+        RunPollingOptions options = new();
+
+        // Assert
+        Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval);
+        Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff);
+        Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay);
+        Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold);
+    }
+
+    /// s
+    /// Verify initialization.
+    /// 
+    [Fact]
+    public void RunPollingOptionsAssignmentTest()
+    {
+        // Arrange
+        RunPollingOptions options =
+            new()
+            {
+                RunPollingInterval = TimeSpan.FromSeconds(3),
+                RunPollingBackoff = TimeSpan.FromSeconds(4),
+                RunPollingBackoffThreshold = 8,
+                MessageSynchronizationDelay = TimeSpan.FromSeconds(5),
+            };
+
+        // Assert
+        Assert.Equal(3, options.RunPollingInterval.TotalSeconds);
+        Assert.Equal(4, options.RunPollingBackoff.TotalSeconds);
+        Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds);
+        Assert.Equal(8, options.RunPollingBackoffThreshold);
+    }
+
+    /// s
+    /// Verify initialization.
+    /// 
+    [Fact]
+    public void RunPollingOptionsGetIntervalTest()
+    {
+        // Arrange
+        RunPollingOptions options =
+            new()
+            {
+                RunPollingInterval = TimeSpan.FromSeconds(3),
+                RunPollingBackoff = TimeSpan.FromSeconds(4),
+                RunPollingBackoffThreshold = 8,
+            };
+
+        // Assert
+        Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8));
+        Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9));
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs
new file mode 100644
index 000000000000..03f1cfbbae1b
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs
@@ -0,0 +1,289 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using Amazon.BedrockAgent;
+using Amazon.BedrockAgentRuntime;
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Moq;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.Bedrock;
+
+/// 
+/// Unit testing of .
+/// 
+public class BedrockAgentChannelTests
+{
+    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
+    {
+        AgentId = "1234567890",
+        AgentName = "testName",
+        Description = "test description",
+        Instruction = "Instruction must have at least 40 characters",
+    };
+
+    /// 
+    /// Verify the simple scenario of receiving messages in a .
+    /// 
+    [Fact]
+    public async Task VerifyReceiveAsync()
+    {
+        // Arrange
+        BedrockAgentChannel channel = new();
+        List history = this.CreateNormalHistory();
+
+        // Act
+        await channel.ReceiveAsync(history);
+
+        // Assert
+        Assert.Equal(2, await channel.GetHistoryAsync().CountAsync());
+    }
+
+    /// 
+    /// Verify the  skips messages with empty content.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveWithEmptyContentAsync()
+    {
+        // Arrange
+        BedrockAgentChannel channel = new();
+        List history = [
+            new ChatMessageContent()
+            {
+                Role = AuthorRole.User,
+            },
+        ];
+
+        // Act
+        await channel.ReceiveAsync(history);
+
+        // Assert
+        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
+    }
+
+    /// 
+    /// Verify the channel inserts placeholders when the message sequence is incorrect.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveWithIncorrectSequenceAsync()
+    {
+        // Arrange
+        BedrockAgentChannel channel = new();
+        List history = this.CreateIncorrectSequenceHistory();
+
+        // Act
+        await channel.ReceiveAsync(history);
+
+        // Assert that a user message is inserted between the two agent messages.
+        // Note that `GetHistoryAsync` returns the history in a reversed order.
+        Assert.Equal(6, await channel.GetHistoryAsync().CountAsync());
+        Assert.Equal(AuthorRole.User, (await channel.GetHistoryAsync().ToArrayAsync())[3].Role);
+    }
+
+    /// 
+    /// Verify the channel empties the history when reset.
+    /// 
+    [Fact]
+    public async Task VerifyResetAsync()
+    {
+        // Arrange
+        BedrockAgentChannel channel = new();
+        List history = this.CreateNormalHistory();
+
+        // Act
+        await channel.ReceiveAsync(history);
+
+        // Assert
+        Assert.NotEmpty(await channel.GetHistoryAsync().ToArrayAsync());
+
+        // Act
+        await channel.ResetAsync();
+
+        // Assert
+        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
+    }
+
+    /// 
+    /// Verify the channel correctly prepares the history for invocation.
+    /// 
+    [Fact]
+    public async Task VerifyInvokeAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        BedrockAgentChannel channel = new();
+        List history = this.CreateIncorrectSequenceHistory();
+
+        // Act
+        async Task InvokeAgent()
+        {
+            await channel.ReceiveAsync(history);
+            await foreach (var _ in channel.InvokeAsync(agent))
+            {
+                continue;
+            }
+        }
+
+        // Assert
+        await Assert.ThrowsAsync(() => InvokeAgent());
+        mockRuntimeClient.Verify(x => x.InvokeAgentAsync(
+            It.Is(r =>
+                r.AgentAliasId == BedrockAgent.WorkingDraftAgentAlias
+                && r.AgentId == this._agentModel.AgentId
+                && r.InputText == "[SILENCE]"   // Inserted by `EnsureLastMessageIsUser`.
+                && r.SessionState.ConversationHistory.Messages.Count == 6   // There is also a user message inserted between the two agent messages.
+            ),
+            It.IsAny()
+        ), Times.Once);
+    }
+
+    /// 
+    /// Verify the channel returns an empty stream when invoking with an empty history.
+    /// 
+    [Fact]
+    public async Task VerifyInvokeWithEmptyHistoryAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        BedrockAgentChannel channel = new();
+
+        // Act
+        List history = [];
+        await foreach ((bool _, ChatMessageContent Message) in channel.InvokeAsync(agent))
+        {
+            history.Add(Message);
+        }
+
+        // Assert
+        Assert.Empty(history);
+    }
+
+    /// 
+    /// Verify the channel correctly prepares the history for streaming invocation.
+    /// 
+    [Fact]
+    public async Task VerifyInvokeStreamAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        BedrockAgentChannel channel = new();
+        List history = this.CreateIncorrectSequenceHistory();
+
+        // Act
+        async Task InvokeAgent()
+        {
+            await channel.ReceiveAsync(history);
+            await foreach (var _ in channel.InvokeStreamingAsync(agent, []))
+            {
+                continue;
+            }
+        }
+
+        // Assert
+        await Assert.ThrowsAsync(() => InvokeAgent());
+        mockRuntimeClient.Verify(x => x.InvokeAgentAsync(
+            It.Is(r =>
+                r.AgentAliasId == BedrockAgent.WorkingDraftAgentAlias
+                && r.AgentId == this._agentModel.AgentId
+                && r.InputText == "[SILENCE]"   // Inserted by `EnsureLastMessageIsUser`.
+                && r.SessionState.ConversationHistory.Messages.Count == 6   // There is also a user message inserted between the two agent messages.
+            ),
+            It.IsAny()
+        ), Times.Once);
+    }
+
+    /// 
+    /// Verify the channel returns an empty stream when invoking with an empty history.
+    /// 
+    [Fact]
+    public async Task VerifyInvokeStreamingWithEmptyHistoryAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        BedrockAgentChannel channel = new();
+
+        // Act
+        List history = [];
+        await foreach (var message in channel.InvokeStreamingAsync(agent, []))
+        {
+            history.Add(message);
+        }
+
+        // Assert
+        Assert.Empty(history);
+    }
+
+    private List CreateNormalHistory()
+    {
+        return
+        [
+            new ChatMessageContent(AuthorRole.User, "Hi!"),
+            new ChatMessageContent(AuthorRole.Assistant, "Hi, how can I help you?"),
+        ];
+    }
+
+    private List CreateIncorrectSequenceHistory()
+    {
+        return
+        [
+            new ChatMessageContent(AuthorRole.User, "What is a word that starts with 'x'?"),
+            new ChatMessageContent(AuthorRole.Assistant, "Xylophone.")
+            {
+                AuthorName = "Agent 1"
+            },
+            new ChatMessageContent(AuthorRole.Assistant, "Xenon.")
+            {
+                AuthorName = "Agent 2"
+            },
+            new ChatMessageContent(AuthorRole.User, "Thanks!"),
+            new ChatMessageContent(AuthorRole.Assistant, "Is there anything else you need?")
+            {
+                AuthorName = "Agent 1"
+            },
+        ];
+    }
+
+    private (Mock, Mock) CreateMockClients()
+    {
+#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
+        Mock mockClientConfig = new();
+        Mock mockRuntimeClientConfig = new();
+        mockClientConfig.Setup(x => x.Validate()).Verifiable();
+        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
+        Mock mockClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockClientConfig.Object);
+        Mock mockRuntimeClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockRuntimeClientConfig.Object);
+#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
+        mockRuntimeClient.Setup(x => x.InvokeAgentAsync(
+            It.IsAny(),
+            It.IsAny())
+        ).ReturnsAsync(new InvokeAgentResponse()
+        {
+            // It's not important what the response is for this test.
+            // And it's difficult to mock the response stream.
+            // Tests should expect an exception to be thrown.
+            HttpStatusCode = System.Net.HttpStatusCode.NotFound,
+        });
+
+        return (mockClient, mockRuntimeClient);
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs
new file mode 100644
index 000000000000..ffc86b79662d
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs
@@ -0,0 +1,290 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ComponentModel;
+using System.Threading.Tasks;
+using Amazon.BedrockAgent;
+using Amazon.BedrockAgent.Model;
+using Amazon.BedrockAgentRuntime;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+using Moq;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.Bedrock;
+
+/// 
+/// Unit testing of .
+/// 
+public class BedrockAgentTests
+{
+    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
+    {
+        AgentId = "1234567890",
+        AgentName = "testName",
+        Description = "test description",
+        Instruction = "Instruction must have at least 40 characters",
+    };
+
+    private readonly CreateAgentRequest _createAgentRequest = new()
+    {
+        AgentName = "testName",
+        Description = "test description",
+        Instruction = "Instruction must have at least 40 characters",
+    };
+
+    /// 
+    /// Verify the initialization of .
+    /// 
+    [Fact]
+    public void VerifyBedrockAgentDefinition()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        // Assert
+        this.VerifyAgent(agent);
+    }
+
+    /// 
+    /// Verify the creation of  without specialized settings.
+    /// 
+    [Fact]
+    public async Task VerifyBedrockAgentCreateAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Act
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        // Assert
+        this.VerifyAgent(bedrockAgent);
+    }
+
+    /// 
+    /// Verify the creation of  with action groups.
+    /// 
+    [Fact]
+    public async Task VerifyBedrockAgentCreateWithActionGroupsAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        // Mock the creation of an agent action group.
+        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentActionGroupResponse());
+        // Override the sequence of calls to GetAgentAsync to return the agent status
+        // because creating an agent action group will require the agent to be prepared again.
+        mockClient.SetupSequence(x => x.GetAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.NOT_PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        });
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Act
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
+
+        // Assert
+        this.VerifyAgent(bedrockAgent);
+        mockClient.Verify(x => x.CreateAgentActionGroupAsync(
+            It.IsAny(),
+            default), Times.Exactly(1));
+    }
+
+    /// 
+    /// Verify the creation of  with a kernel.
+    /// 
+    [Fact]
+    public async Task VerifyBedrockAgentCreateWithKernelAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Act
+        Kernel kernel = new();
+        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object)
+        {
+            Kernel = kernel,
+        };
+
+        // Assert
+        this.VerifyAgent(bedrockAgent);
+        Assert.Single(bedrockAgent.Kernel.Plugins);
+    }
+
+    /// 
+    /// Verify the creation of  with kernel arguments.
+    /// 
+    [Fact]
+    public async Task VerifyBedrockAgentCreateWithKernelArgumentsAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Act
+        KernelArguments arguments = new() { { "key", "value" } };
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object)
+        {
+            Arguments = arguments,
+        };
+
+        // Assert
+        this.VerifyAgent(bedrockAgent);
+        Assert.Single(bedrockAgent.Arguments);
+    }
+
+    /// 
+    /// Verify the bedrock agent returns the expected channel key.
+    /// 
+    [Fact]
+    public async Task VerifyBedrockAgentChannelKeyAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Act
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+
+        // Assert
+        Assert.Single(bedrockAgent.GetChannelKeys());
+    }
+
+    private (Mock, Mock) CreateMockClients()
+    {
+#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
+        Mock mockClientConfig = new();
+        Mock mockRuntimeClientConfig = new();
+        mockClientConfig.Setup(x => x.Validate()).Verifiable();
+        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
+        Mock mockClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockClientConfig.Object);
+        Mock mockRuntimeClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockRuntimeClientConfig.Object);
+#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
+
+        mockClient.Setup(x => x.CreateAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentResponse { Agent = this._agentModel });
+
+        // After a new agent is created, its status will first be CREATING then NOT_PREPARED.
+        // Internally, we will prepare the agent for use. During preparation, the agent status
+        // will be PREPARING, then finally PREPARED.
+        mockClient.SetupSequence(x => x.GetAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.NOT_PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        });
+
+        return (mockClient, mockRuntimeClient);
+    }
+
+    private void VerifyAgent(BedrockAgent bedrockAgent)
+    {
+        Assert.Equal(bedrockAgent.Id, this._agentModel.AgentId);
+        Assert.Equal(bedrockAgent.Name, this._agentModel.AgentName);
+        Assert.Equal(bedrockAgent.Description, this._agentModel.Description);
+        Assert.Equal(bedrockAgent.Instructions, this._agentModel.Instruction);
+    }
+
+    private sealed class WeatherPlugin
+    {
+        [KernelFunction, Description("Provides realtime weather information.")]
+        public string Current([Description("The location to get the weather for.")] string location)
+        {
+            return $"The current weather in {location} is 72 degrees.";
+        }
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs
new file mode 100644
index 000000000000..78f8c8bd67c4
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs
@@ -0,0 +1,320 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+using Amazon.BedrockAgent;
+using Amazon.BedrockAgent.Model;
+using Amazon.BedrockAgentRuntime;
+using Microsoft.SemanticKernel.Agents.Bedrock;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+using Moq;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.Bedrock.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public class BedrockAgentExtensionsTests
+{
+    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
+    {
+        AgentId = "1234567890",
+        AgentName = "testName",
+        Description = "test description",
+        Instruction = "Instruction must have at least 40 characters",
+    };
+
+    private readonly CreateAgentRequest _createAgentRequest = new()
+    {
+        AgentName = "testName",
+        Description = "test description",
+        Instruction = "Instruction must have at least 40 characters",
+    };
+
+    /// 
+    /// Verify the creation of the agent and the preparation of the agent.
+    /// The status of the agent should be checked 3 times based on the setup.
+    /// 1: Waiting for the agent to go from CREATING to NOT_PREPARED.
+    /// 2: Waiting for the agent to go from NOT_PREPARED to PREPARING.
+    /// 3: Waiting for the agent to go from PREPARING to PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAndPrepareAgentAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(3));
+    }
+
+    /// 
+    /// Verify the modification and preparation of the agent is correctly performed.
+    /// The status of the agent should be go through the following states:
+    /// PREPARED -> PREPARING -> PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyAssociateAgentKnowledgeBaseAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        this.ModifyMockClientGetAgentResponseSequence(mockClient);
+
+        mockClient.Setup(x => x.AssociateAgentKnowledgeBaseAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new AssociateAgentKnowledgeBaseResponse());
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.AssociateAgentKnowledgeBaseAsync("testKnowledgeBaseId", "testKnowledgeBaseDescription");
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(5));
+    }
+
+    /// 
+    /// Verify the modification and preparation of the agent is correctly performed.
+    /// The status of the agent should be go through the following states:
+    /// PREPARED -> PREPARING -> PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyDisassociateAgentKnowledgeBaseAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        this.ModifyMockClientGetAgentResponseSequence(mockClient);
+
+        mockClient.Setup(x => x.DisassociateAgentKnowledgeBaseAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new DisassociateAgentKnowledgeBaseResponse());
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.DisassociateAgentKnowledgeBaseAsync("testKnowledgeBaseId");
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(5));
+    }
+
+    /// 
+    /// Verify the modification and preparation of the agent is correctly performed.
+    /// The status of the agent should be go through the following states:
+    /// PREPARED -> PREPARING -> PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyCreateCodeInterpreterActionGroupAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        this.ModifyMockClientGetAgentResponseSequence(mockClient);
+
+        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentActionGroupResponse());
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(5));
+    }
+
+    /// 
+    /// Verify the modification and preparation of the agent is correctly performed.
+    /// The status of the agent should be go through the following states:
+    /// PREPARED -> PREPARING -> PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyCreateKernelFunctionActionGroupAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        this.ModifyMockClientGetAgentResponseSequence(mockClient);
+
+        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentActionGroupResponse());
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(5));
+    }
+
+    /// 
+    /// Verify the modification and preparation of the agent is correctly performed.
+    /// The status of the agent should be go through the following states:
+    /// PREPARED -> PREPARING -> PREPARED.
+    /// 
+    [Fact]
+    public async Task VerifyEnableUserInputActionGroupAsync()
+    {
+        // Arrange
+        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
+        this.ModifyMockClientGetAgentResponseSequence(mockClient);
+
+        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentActionGroupResponse());
+
+        // Act
+        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
+        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
+        await bedrockAgent.EnableUserInputActionGroupAsync();
+
+        // Assert
+        mockClient.Verify(x => x.GetAgentAsync(
+            It.IsAny(),
+            default), Times.Exactly(5));
+    }
+
+    private (Mock, Mock) CreateMockClients()
+    {
+#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
+        Mock mockClientConfig = new();
+        Mock mockRuntimeClientConfig = new();
+        mockClientConfig.Setup(x => x.Validate()).Verifiable();
+        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
+        Mock mockClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockClientConfig.Object);
+        Mock mockRuntimeClient = new(
+            "fakeAccessId",
+            "fakeSecretKey",
+            mockRuntimeClientConfig.Object);
+#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
+
+        mockClient.Setup(x => x.CreateAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new CreateAgentResponse { Agent = this._agentModel });
+
+        // After a new agent is created, its status will first be CREATING then NOT_PREPARED.
+        // Internally, we will prepare the agent for use. During preparation, the agent status
+        // will be PREPARING, then finally PREPARED.
+        mockClient.SetupSequence(x => x.GetAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.NOT_PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        });
+
+        return (mockClient, mockRuntimeClient);
+    }
+
+    /// 
+    /// Modify the mock client to return a new sequence of responses for the GetAgentAsync method
+    /// that reflect the correct sequence of status change when modifying the agent.
+    /// 
+    private void ModifyMockClientGetAgentResponseSequence(Mock mockClient)
+    {
+        mockClient.SetupSequence(x => x.GetAgentAsync(
+            It.IsAny(),
+            default)
+        ).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.NOT_PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARING,
+            }
+        }).ReturnsAsync(new GetAgentResponse
+        {
+            Agent = new Amazon.BedrockAgent.Model.Agent()
+            {
+                AgentId = this._agentModel.AgentId,
+                AgentName = this._agentModel.AgentName,
+                Description = this._agentModel.Description,
+                Instruction = this._agentModel.Instruction,
+                AgentStatus = AgentStatus.PREPARED,
+            }
+        });
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs
new file mode 100644
index 000000000000..a679fe30f83f
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs
@@ -0,0 +1,111 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.ComponentModel;
+using Amazon.BedrockAgentRuntime.Model;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.Bedrock.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public class BedrockFunctionSchemaExtensionsTests
+{
+    /// 
+    /// Verify the conversion of a  to a .
+    /// 
+    [Fact]
+    public void VerifyFromFunctionParameters()
+    {
+        // Arrange
+        List parameters =
+        [
+            new FunctionParameter()
+            {
+                Name = "TestParameter",
+                Type = Amazon.BedrockAgent.Type.String,
+            },
+        ];
+
+        // Act
+        KernelArguments arguments = parameters.FromFunctionParameters(null);
+
+        // Assert
+        Assert.Single(arguments);
+        Assert.True(arguments.ContainsName("TestParameter"));
+    }
+
+    /// 
+    /// Verify the conversion of a  to a  with existing arguments.
+    /// 
+    [Fact]
+    public void VerifyFromFunctionParametersWithArguments()
+    {
+        // Arrange
+        List parameters =
+        [
+            new FunctionParameter()
+            {
+                Name = "TestParameter",
+                Type = Amazon.BedrockAgent.Type.String,
+            },
+        ];
+
+        KernelArguments arguments = new()
+        {
+            { "ExistingParameter", "ExistingValue" }
+        };
+
+        // Act
+        KernelArguments updatedArguments = parameters.FromFunctionParameters(arguments);
+
+        // Assert
+        Assert.Equal(2, updatedArguments.Count);
+        Assert.True(updatedArguments.ContainsName("TestParameter"));
+        Assert.True(updatedArguments.ContainsName("ExistingParameter"));
+    }
+
+    /// 
+    /// Verify the conversion of a  plugin to a .
+    /// 
+    [Fact]
+    public void VerifyToFunctionSchema()
+    {
+        // Arrange
+        (Kernel kernel, KernelFunction function, KernelParameterMetadata parameter) = this.CreateKernelPlugin();
+
+        // Act
+        Amazon.BedrockAgent.Model.FunctionSchema schema = kernel.ToFunctionSchema();
+
+        // Assert
+        Assert.Single(schema.Functions);
+        Assert.Equal(function.Name, schema.Functions[0].Name);
+        Assert.Equal(function.Description, schema.Functions[0].Description);
+        Assert.True(schema.Functions[0].Parameters.ContainsKey(parameter.Name));
+        Assert.Equal(parameter.Description, schema.Functions[0].Parameters[parameter.Name].Description);
+        Assert.True(schema.Functions[0].Parameters[parameter.Name].Required);
+        Assert.Equal(Amazon.BedrockAgent.Type.String, schema.Functions[0].Parameters[parameter.Name].Type);
+        Assert.Equal(Amazon.BedrockAgent.RequireConfirmation.DISABLED, schema.Functions[0].RequireConfirmation);
+    }
+
+    private (Kernel, KernelFunction, KernelParameterMetadata) CreateKernelPlugin()
+    {
+        Kernel kernel = new();
+        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
+        var function = kernel.Plugins["WeatherPlugin"]["Current"];
+        var parameter = function.Metadata.Parameters[0];
+        return (kernel, function, parameter);
+    }
+
+    private sealed class WeatherPlugin
+    {
+        [KernelFunction, Description("Provides realtime weather information.")]
+        public string Current([Description("The location to get the weather for.")] string location)
+        {
+            return $"The current weather in {location} is 72 degrees.";
+        }
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
index 01debd8ded5f..1ce8039b250d 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
@@ -1,11 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
+
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
@@ -37,7 +37,74 @@ public void VerifyChatCompletionAgentDefinition()
         Assert.Equal("test instructions", agent.Instructions);
         Assert.Equal("test description", agent.Description);
         Assert.Equal("test name", agent.Name);
-        Assert.Null(agent.Arguments);
+        Assert.NotNull(agent.Arguments);
+    }
+
+    /// 
+    /// Verify the invocation and response of .
+    /// 
+    [Fact]
+    public void VerifyChatCompletionAgentTemplate()
+    {
+        PromptTemplateConfig promptConfig =
+            new()
+            {
+                Name = "TestName",
+                Description = "TestDescription",
+                Template = "TestInstructions",
+                ExecutionSettings =
+                {
+                    {
+                        PromptExecutionSettings.DefaultServiceId,
+                        new PromptExecutionSettings()
+                        {
+                            FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(),
+                            ModelId = "gpt-new",
+                        }
+                    },
+                    {
+                        "manual",
+                        new PromptExecutionSettings()
+                        {
+                            ServiceId = "manual",
+                            FunctionChoiceBehavior = FunctionChoiceBehavior.Required(),
+                            ModelId = "gpt-old",
+                        }
+                    },
+                }
+            };
+        KernelPromptTemplateFactory templateFactory = new();
+
+        // Arrange
+        ChatCompletionAgent agent = new(promptConfig, templateFactory);
+
+        // Assert
+        Assert.NotNull(agent.Id);
+        Assert.Equal(promptConfig.Template, agent.Instructions);
+        Assert.Equal(promptConfig.Description, agent.Description);
+        Assert.Equal(promptConfig.Name, agent.Name);
+        Assert.Equal(promptConfig.ExecutionSettings, agent.Arguments.ExecutionSettings);
+    }
+
+    /// 
+    /// Verify throws  when invalid  is provided.
+    /// 
+    [Fact]
+    public void VerifyThrowsForInvalidTemplateFactory()
+    {
+        // Arrange
+        PromptTemplateConfig promptConfig =
+            new()
+            {
+                Name = "TestName",
+                Description = "TestDescription",
+                Template = "TestInstructions",
+                TemplateFormat = "handlebars",
+            };
+        KernelPromptTemplateFactory templateFactory = new();
+
+        // Act and Assert
+        Assert.Throws(() => new ChatCompletionAgent(promptConfig, templateFactory));
     }
 
     /// 
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
index dc82bcef59b6..f127b35eaaff 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
@@ -1,8 +1,10 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System;
 using System.Linq;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
 
@@ -18,13 +20,103 @@ public class ChatHistoryChannelTests
     /// does not implement .
     /// 
     [Fact]
-    public async Task VerifyAgentWithoutIChatHistoryHandlerAsync()
+    public async Task VerifyAgentIsChatHistoryKernelAgentAsync()
     {
         // Arrange
         Mock agent = new(); // Not a ChatHistoryKernelAgent
-        ChatHistoryChannel channel = new(); // Requires IChatHistoryHandler
+        ChatHistoryChannel channel = new();
 
         // Act & Assert
         await Assert.ThrowsAsync(() => channel.InvokeAsync(agent.Object).ToArrayAsync().AsTask());
     }
+
+    /// 
+    /// Verify a  filters empty content on receive.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveFiltersEmptyContentAsync()
+    {
+        // Arrange
+        ChatHistoryChannel channel = new();
+
+        // Act
+        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, string.Empty)]);
+
+        // Assert
+        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
+    }
+
+    /// 
+    /// Verify a  filters file content on receive.
+    /// 
+    /// 
+    /// As long as content is not empty, extraneous file content is ok.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveFiltersFileContentAsync()
+    {
+        // Arrange
+        ChatHistoryChannel channel = new();
+
+        // Act
+        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FileReferenceContent("fileId")])]);
+
+        // Assert
+        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
+
+        // Act
+        await channel.ReceiveAsync(
+            [new ChatMessageContent(
+                AuthorRole.Assistant,
+                [
+                    new TextContent("test"),
+                    new FileReferenceContent("fileId")
+                ])]);
+
+        // Assert
+        var history = await channel.GetHistoryAsync().ToArrayAsync();
+        Assert.Single(history);
+        Assert.Equal(2, history[0].Items.Count);
+    }
+
+    /// 
+    /// Verify a  accepts function content on receive.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveAcceptsFunctionContentAsync()
+    {
+        // Arrange
+        ChatHistoryChannel channel = new();
+
+        // Act
+        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FunctionCallContent("test-func")])]);
+
+        // Assert
+        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
+
+        // Arrange
+        channel = new();
+
+        // Act
+        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FunctionResultContent("test-func")])]);
+
+        // Assert
+        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
+    }
+
+    /// 
+    /// Verify a  accepts image content on receive.
+    /// 
+    [Fact]
+    public async Task VerifyReceiveAcceptsImageContentAsync()
+    {
+        // Arrange
+        ChatHistoryChannel channel = new();
+
+        // Act
+        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new ImageContent(new Uri("http://test.ms/test.jpg"))])]);
+
+        // Assert
+        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
+    }
 }
diff --git a/dotnet/src/Agents/UnitTests/KernelAgentTests.cs b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
index 4e4f4e531f4e..0309cd2967d8 100644
--- a/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
@@ -18,19 +18,18 @@ public class KernelAgentTests
     public void VerifyNullArgumentMerge()
     {
         // Arrange
-        MockAgent agentWithNullArguments = new();
+        MockAgent agentWithNoArguments = new();
         // Act
-        KernelArguments? arguments = agentWithNullArguments.MergeArguments(null);
+        KernelArguments arguments = agentWithNoArguments.MergeArguments(null);
         // Assert
-        Assert.Null(arguments);
+        Assert.Empty(arguments);
 
         // Arrange
-        KernelArguments overrideArguments = [];
+        KernelArguments overrideArguments = new() { { "test", 1 } };
         // Act
-        arguments = agentWithNullArguments.MergeArguments(overrideArguments);
+        arguments = agentWithNoArguments.MergeArguments(overrideArguments);
         // Assert
-        Assert.NotNull(arguments);
-        Assert.StrictEqual(overrideArguments, arguments);
+        Assert.StrictEqual(1, arguments.Count);
 
         // Arrange
         MockAgent agentWithEmptyArguments = new() { Arguments = new() };
diff --git a/dotnet/src/Agents/UnitTests/MockAgent.cs b/dotnet/src/Agents/UnitTests/MockAgent.cs
index 409a232b1044..7f242ff510a5 100644
--- a/dotnet/src/Agents/UnitTests/MockAgent.cs
+++ b/dotnet/src/Agents/UnitTests/MockAgent.cs
@@ -41,7 +41,7 @@ public override IAsyncEnumerable InvokeStreamingAsy
     }
 
     // Expose protected method for testing
-    public new KernelArguments? MergeArguments(KernelArguments? arguments)
+    public new KernelArguments MergeArguments(KernelArguments? arguments)
     {
         return base.MergeArguments(arguments);
     }
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
deleted file mode 100644
index 6288c6a5aed8..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Linq;
-using Azure.Core;
-using Azure.Core.Pipeline;
-using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure;
-
-/// 
-/// Unit testing of .
-/// 
-public class AddHeaderRequestPolicyTests
-{
-    /// 
-    /// Verify behavior of .
-    /// 
-    [Fact]
-    public void VerifyAddHeaderRequestPolicyExecution()
-    {
-        // Arrange
-        using HttpClientTransport clientTransport = new();
-        HttpPipeline pipeline = new(clientTransport);
-
-        HttpMessage message = pipeline.CreateMessage();
-        AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue");
-
-        // Act
-        policy.OnSendingRequest(message);
-
-        // Assert
-        Assert.Single(message.Request.Headers);
-        HttpHeader header = message.Request.Headers.Single();
-        Assert.Equal("testname", header.Name);
-        Assert.Equal("testvalue", header.Value);
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
new file mode 100644
index 000000000000..f1cdd1e429cd
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
@@ -0,0 +1,357 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.Net;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public sealed class AssistantClientExtensionsTests : IDisposable
+{
+    private const string ModelValue = "testmodel";
+
+    private readonly HttpMessageHandlerStub _messageHandlerStub;
+    private readonly HttpClient _httpClient;
+    private readonly OpenAIClientProvider _clientProvider;
+
+    /// 
+    /// Verify the assistant creation with default values.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition(ModelValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(modelId: ModelValue);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Equal(ModelValue, definition.Model);
+    }
+
+    /// 
+    /// Verify the assistant creation with name, instructions, and description.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithIdentityAsync()
+    {
+        // Arrange
+        const string NameValue = "test name";
+        const string DescriptionValue = "test instructions";
+        const string InstructionsValue = "test description";
+
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(
+                ModelValue,
+                name: NameValue,
+                instructions: InstructionsValue,
+                description: DescriptionValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: ModelValue,
+            name: NameValue,
+            instructions: InstructionsValue,
+            description: DescriptionValue);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Equal(NameValue, definition.Name);
+        Assert.Equal(DescriptionValue, definition.Description);
+        Assert.Equal(InstructionsValue, definition.Instructions);
+    }
+
+    /// 
+    /// Verify the assistant creation with name, instructions, and description.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithTemplateAsync()
+    {
+        // Arrange
+        const string NameValue = "test name";
+        const string DescriptionValue = "test instructions";
+        const string InstructionsValue = "test description";
+        PromptTemplateConfig templateConfig =
+            new(InstructionsValue)
+            {
+                Name = NameValue,
+                Description = InstructionsValue,
+            };
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(
+                ModelValue,
+                name: NameValue,
+                instructions: InstructionsValue,
+                description: DescriptionValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantFromTemplateAsync(modelId: ModelValue, templateConfig);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Equal(NameValue, definition.Name);
+        Assert.Equal(DescriptionValue, definition.Description);
+        Assert.Equal(InstructionsValue, definition.Instructions);
+    }
+
+    /// 
+    /// Verify the assistant creation with code-interpreter enabled.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithCodeInterpreterAsync()
+    {
+        // Arrange
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableCodeInterpreter: true));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: ModelValue,
+            enableCodeInterpreter: true);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Single(definition.Tools);
+        Assert.IsType(definition.Tools[0]);
+    }
+
+    /// 
+    /// Verify the assistant creation with code-interpreter files specified.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithCodeInterpreterFilesAsync()
+    {
+        // Arrange
+        string[] fileIds = ["file1", "file2"];
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, codeInterpreterFileIds: fileIds));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: ModelValue,
+            codeInterpreterFileIds: fileIds);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Single(definition.Tools);
+        Assert.IsType(definition.Tools[0]);
+        Assert.NotNull(definition.ToolResources.CodeInterpreter);
+        Assert.Equal(2, definition.ToolResources.CodeInterpreter.FileIds.Count);
+    }
+
+    /// 
+    /// Verify the assistant creation with file-search enabled.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithFileSearchAsync()
+    {
+        // Arrange
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableFileSearch: true));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: ModelValue,
+            enableFileSearch: true);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Single(definition.Tools);
+        Assert.IsType(definition.Tools[0]);
+    }
+
+    /// 
+    /// Verify the assistant creation with vector-store specified.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithVectorStoreAsync()
+    {
+        // Arrange
+        const string VectorStoreValue = "test store";
+        this.SetupResponse(
+            HttpStatusCode.OK,
+            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, vectorStoreId: VectorStoreValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: ModelValue,
+            vectorStoreId: VectorStoreValue);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Single(definition.Tools);
+        Assert.IsType(definition.Tools[0]);
+        Assert.NotNull(definition.ToolResources.FileSearch);
+        Assert.Single(definition.ToolResources.FileSearch.VectorStoreIds);
+    }
+
+    /// 
+    /// Verify the invocation and response of 
+    /// for an agent with temperature defined.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithTemperatureAsync()
+    {
+        // Arrange
+        const float TemperatureValue = 0.5F;
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", temperature: TemperatureValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: "testmodel",
+            temperature: TemperatureValue);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Equal(TemperatureValue, definition.Temperature);
+    }
+
+    /// 
+    /// Verify the invocation and response of 
+    /// for an agent with topP defined.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithTopPAsync()
+    {
+        // Arrange
+        const float TopPValue = 2.0F;
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", topP: TopPValue));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: "testmodel",
+            topP: TopPValue);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.Equal(TopPValue, definition.NucleusSamplingFactor);
+    }
+
+    /// 
+    /// Verify the invocation and response of 
+    /// for an agent with execution settings and meta-data.
+    /// 
+    [Fact]
+    public async Task VerifyCreateAssistantWithMetadataAsync()
+    {
+        // Arrange
+        Dictionary metadata =
+            new()
+            {
+                { "a", "1" },
+                { "b", "2" },
+            };
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", metadata: metadata));
+
+        // Act
+        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
+            modelId: "testmodel",
+            metadata: metadata);
+
+        // Assert
+        Assert.NotNull(definition);
+        Assert.NotEmpty(definition.Metadata);
+    }
+
+    /// 
+    /// Verify the deletion of assistant.
+    /// 
+    [Fact]
+    public async Task VerifyDeleteAssistantAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
+
+        // Act
+        AssistantDeletionResult result = await this._clientProvider.AssistantClient.DeleteAssistantAsync("testid");
+
+        // Assert
+        Assert.True(result.Deleted);
+    }
+
+    /// 
+    /// Verify the creating a thread.
+    /// 
+    [Fact]
+    public async Task VerifyCreateThreadAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+
+        // Act
+        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: null);
+
+        // Assert
+        Assert.NotNull(threadId);
+    }
+
+    /// 
+    /// Verify the creating a thread with messages.
+    /// 
+    [Fact]
+    public async Task VerifyCreateThreadWithMessagesAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+
+        // Act
+        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: [new ChatMessageContent(AuthorRole.User, "test")]);
+
+        // Assert
+        Assert.NotNull(threadId);
+    }
+
+    /// 
+    /// Verify the creating a thread with metadata.
+    /// 
+    [Fact]
+    public async Task VerifyCreateThreadWithMetadataAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
+        Dictionary metadata = new() { { "a", "1" }, { "b", "2" } };
+
+        // Act
+        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(metadata: metadata);
+
+        // Assert
+        Assert.NotNull(threadId);
+    }
+
+    /// 
+    public void Dispose()
+    {
+        this._messageHandlerStub.Dispose();
+        this._httpClient.Dispose();
+    }
+
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    public AssistantClientExtensionsTests()
+    {
+        this._messageHandlerStub = new HttpMessageHandlerStub();
+        this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
+        this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
+    }
+
+    private void SetupResponse(HttpStatusCode statusCode, string content) =>
+        this._messageHandlerStub.SetupResponses(statusCode, content);
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
deleted file mode 100644
index 70c27ccb2152..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Xunit;
-using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public class KernelExtensionsTests
-{
-    /// 
-    /// Verify function lookup using KernelExtensions.
-    /// 
-    [Fact]
-    public void VerifyGetKernelFunctionLookup()
-    {
-        // Arrange
-        Kernel kernel = new();
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        kernel.Plugins.Add(plugin);
-
-        // Act
-        KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-');
-
-        // Assert
-        Assert.NotNull(function);
-        Assert.Equal(nameof(TestPlugin.TestFunction), function.Name);
-    }
-
-    /// 
-    /// Verify error case for function lookup using KernelExtensions.
-    /// 
-    [Fact]
-    public void VerifyGetKernelFunctionInvalid()
-    {
-        // Arrange
-        Kernel kernel = new();
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        kernel.Plugins.Add(plugin);
-
-        // Act and Assert
-        Assert.Throws(() => kernel.GetKernelFunction("a", '-'));
-        Assert.Throws(() => kernel.GetKernelFunction("a-b", ':'));
-        Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-'));
-    }
-
-    /// 
-    /// Exists only for parsing.
-    /// 
-#pragma warning disable CA1812 // Avoid uninstantiated internal classes
-    private sealed class TestPlugin()
-#pragma warning restore CA1812 // Avoid uninstantiated internal classes
-    {
-        [KernelFunction]
-        public void TestFunction() { }
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
new file mode 100644
index 000000000000..ce03e8f5843e
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.Collections.Generic;
+using System.IO;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.VectorStores;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public sealed class OpenAIClientExtensionsTests : IDisposable
+{
+    private readonly HttpMessageHandlerStub _messageHandlerStub;
+    private readonly HttpClient _httpClient;
+    private readonly OpenAIClientProvider _clientProvider;
+
+    /// 
+    /// Verify the default creation of vector-store.
+    /// 
+    [Fact]
+    public async Task VerifyCreateDefaultVectorStoreAsync()
+    {
+        // Arrange
+        string[] fileIds = ["file-1", "file-2"];
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
+
+        // Act
+        string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(fileIds, waitUntilCompleted: false);
+
+        // Assert
+        Assert.NotNull(storeId);
+    }
+
+    /// 
+    /// Verify the custom creation of vector-store.
+    /// 
+    [Fact]
+    public async Task VerifyCreateVectorStoreAsync()
+    {
+        // Arrange
+        string[] fileIds = ["file-1", "file-2"];
+        Dictionary metadata =
+            new()
+            {
+                { "a", "1" },
+                { "b", "2" },
+            };
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
+
+        // Act
+        string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(
+            fileIds,
+            waitUntilCompleted: false,
+            storeName: "test-store",
+            expirationPolicy: new VectorStoreExpirationPolicy(VectorStoreExpirationAnchor.LastActiveAt, 30),
+            chunkingStrategy: FileChunkingStrategy.Auto,
+            metadata: metadata);
+
+        // Assert
+        Assert.NotNull(storeId);
+    }
+
+    /// 
+    /// Verify the uploading an assistant file.
+    /// 
+    [Fact]
+    public async Task VerifyUploadFileAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
+
+        // Act
+        await using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
+        string fileId = await this._clientProvider.Client.UploadAssistantFileAsync(stream, "text.txt");
+
+        // Assert
+        Assert.NotNull(fileId);
+    }
+
+    /// 
+    /// Verify the deleting a file.
+    /// 
+    [Fact]
+    public async Task VerifyDeleteFileAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteFile);
+
+        // Act
+        bool isDeleted = await this._clientProvider.Client.DeleteFileAsync("file-id");
+
+        // Assert
+        Assert.True(isDeleted);
+    }
+
+    /// 
+    /// Verify the deleting a vector-store.
+    /// 
+    [Fact]
+    public async Task VerifyDeleteVectorStoreAsync()
+    {
+        // Arrange
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteVectorStore);
+
+        // Act
+        bool isDeleted = await this._clientProvider.Client.DeleteVectorStoreAsync("store-id");
+
+        // Assert
+        Assert.True(isDeleted);
+    }
+
+    /// 
+    public void Dispose()
+    {
+        this._messageHandlerStub.Dispose();
+        this._httpClient.Dispose();
+    }
+
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    public OpenAIClientExtensionsTests()
+    {
+        this._messageHandlerStub = new HttpMessageHandlerStub();
+        this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
+        this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
+    }
+
+    private void SetupResponse(HttpStatusCode statusCode, string content) =>
+        this._messageHandlerStub.SetupResponses(statusCode, content);
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
index 15fd0d6aa5ae..dfca85afc0f2 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
@@ -1,5 +1,4 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -21,19 +20,16 @@ public class AssistantRunOptionsFactoryTests
     public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition =
-            new("gpt-anything")
+        RunCreationOptions defaultOptions =
+            new()
             {
+                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                ExecutionOptions =
-                    new()
-                    {
-                        AdditionalInstructions = "test",
-                    },
+                AdditionalInstructions = "test",
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
 
         // Assert
         Assert.NotNull(options);
@@ -52,20 +48,21 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
     public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition =
-            new("gpt-anything")
+        RunCreationOptions defaultOptions =
+            new()
             {
+                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
             };
 
-        OpenAIAssistantInvocationOptions invocationOptions =
+        RunCreationOptions invocationOptions =
             new()
             {
                 Temperature = 0.5F,
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, "test", invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, "test", invocationOptions);
 
         // Assert
         Assert.NotNull(options);
@@ -81,29 +78,26 @@ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
     public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition =
-            new("gpt-anything")
+        RunCreationOptions defaultOptions =
+            new()
             {
+                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                ExecutionOptions =
-                    new()
-                    {
-                        AdditionalInstructions = "test1",
-                        TruncationMessageCount = 5,
-                    },
+                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
             };
 
-        OpenAIAssistantInvocationOptions invocationOptions =
+        RunCreationOptions invocationOptions =
             new()
             {
+                ModelOverride = "gpt-anything",
                 AdditionalInstructions = "test2",
                 Temperature = 0.9F,
-                TruncationMessageCount = 8,
-                EnableJsonResponse = true,
+                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(8),
+                ResponseFormat = AssistantResponseFormat.JsonObject,
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
 
         // Assert
         Assert.NotNull(options);
@@ -121,21 +115,18 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition =
-            new("gpt-anything")
+        RunCreationOptions defaultOptions =
+            new()
             {
+                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                ExecutionOptions =
-                    new()
-                    {
-                        TruncationMessageCount = 5,
-                    },
+                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
             };
 
-        OpenAIAssistantInvocationOptions invocationOptions =
+        RunCreationOptions invocationOptions =
             new()
             {
-                Metadata = new Dictionary
+                Metadata =
                 {
                     { "key1", "value" },
                     { "key2", null! },
@@ -143,7 +134,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
 
         // Assert
         Assert.Equal(2, options.Metadata.Count);
@@ -158,18 +149,21 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition = new("gpt-anything");
+        RunCreationOptions defaultOptions =
+            new()
+            {
+                ModelOverride = "gpt-anything",
+            };
 
-        OpenAIAssistantInvocationOptions invocationOptions =
+        ChatMessageContent message = new(AuthorRole.User, "test message");
+        RunCreationOptions invocationOptions =
             new()
             {
-                AdditionalMessages = [
-                    new ChatMessageContent(AuthorRole.User, "test message")
-                ]
+                AdditionalMessages = { message.ToThreadInitializationMessage() },
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
 
         // Assert
         Assert.Single(options.AdditionalMessages);
@@ -182,20 +176,17 @@ public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMaxTokensTest()
     {
         // Arrange
-        OpenAIAssistantDefinition definition =
-            new("gpt-anything")
+        RunCreationOptions defaultOptions =
+            new()
             {
+                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                ExecutionOptions =
-                    new()
-                    {
-                        MaxCompletionTokens = 4096,
-                        MaxPromptTokens = 1024,
-                    },
+                MaxOutputTokenCount = 4096,
+                MaxInputTokenCount = 1024,
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
 
         // Assert
         Assert.Equal(1024, options.MaxInputTokenCount);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
index 2730cbbc821a..3860855b986d 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
@@ -2,16 +2,15 @@
 using System;
 using System.ClientModel;
 using System.Collections.Generic;
-using System.IO;
 using System.Linq;
 using System.Net;
 using System.Net.Http;
-using System.Text;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
 using OpenAI.Assistants;
 using Xunit;
 
@@ -20,6 +19,7 @@ namespace SemanticKernel.Agents.UnitTests.OpenAI;
 /// 
 /// Unit testing of .
 /// 
+#pragma warning disable CS0419 // Ambiguous reference in cref attribute
 public sealed class OpenAIAssistantAgentTests : IDisposable
 {
     private readonly HttpMessageHandlerStub _messageHandlerStub;
@@ -77,11 +77,9 @@ public async Task VerifyOpenAIAssistantAgentCreationDefaultTemplateAsync()
 
         OpenAIAssistantCapabilities capabilities = new("testmodel");
 
-        // Act and Assert
-        await this.VerifyAgentTemplateAsync(capabilities, templateConfig);
-
         // Act and Assert
         await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new KernelPromptTemplateFactory());
+        await Assert.ThrowsAsync(async () => await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new HandlebarsPromptTemplateFactory()));
     }
 
     /// 
@@ -311,7 +309,7 @@ public async Task VerifyOpenAIAssistantAgentRetrievalAsync()
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.RetrieveAsync(
-                this.CreateTestConfiguration(),
+                this.CreateTestProvider(),
                 "#id",
                 this._emptyKernel);
 
@@ -332,10 +330,10 @@ public async Task VerifyOpenAIAssistantAgentRetrievalWithFactoryAsync()
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.RetrieveAsync(
-                this.CreateTestConfiguration(),
+                this.CreateTestProvider(),
                 "#id",
                 this._emptyKernel,
-                new KernelArguments(),
+                [],
                 new KernelPromptTemplateFactory());
 
         // Act and Assert
@@ -350,26 +348,13 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
     {
         // Arrange
         OpenAIAssistantAgent agent = await this.CreateAgentAsync();
-        // Assert
-        Assert.False(agent.IsDeleted);
-
-        // Arrange
         this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
 
         // Act
-        await agent.DeleteAsync();
-        // Assert
-        Assert.True(agent.IsDeleted);
+        bool isDeleted = await agent.DeleteAsync();
 
-        // Act
-        await agent.DeleteAsync(); // Doesn't throw
         // Assert
-        Assert.True(agent.IsDeleted);
-        await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test")));
-        await Assert.ThrowsAsync(() => agent.GetThreadMessagesAsync("threadid").ToArrayAsync().AsTask());
-        await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask());
-        await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid").ToArrayAsync().AsTask());
-        await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid", new OpenAIAssistantInvocationOptions()).ToArrayAsync().AsTask());
+        Assert.True(isDeleted);
     }
 
     /// 
@@ -413,25 +398,6 @@ public async Task VerifyOpenAIAssistantAgentDeleteThreadAsync()
         Assert.True(isDeleted);
     }
 
-    /// 
-    /// Verify the deleting a thread via .
-    /// 
-    [Fact]
-    public async Task VerifyOpenAIAssistantAgentUploadFileAsync()
-    {
-        // Arrange
-        OpenAIAssistantAgent agent = await this.CreateAgentAsync();
-
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
-
-        // Act
-        using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
-        string fileId = await agent.UploadFileAsync(stream, "text.txt");
-
-        // Assert
-        Assert.NotNull(fileId);
-    }
-
     /// 
     /// Verify invocation via .
     /// 
@@ -682,7 +648,7 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
         // Act
         var messages =
             await OpenAIAssistantAgent.ListDefinitionsAsync(
-                this.CreateTestConfiguration()).ToArrayAsync();
+                this.CreateTestProvider()).ToArrayAsync();
         // Assert
         Assert.Equal(7, messages.Length);
 
@@ -695,7 +661,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync(
         // Act
         messages =
             await OpenAIAssistantAgent.ListDefinitionsAsync(
-                this.CreateTestConfiguration()).ToArrayAsync();
+                this.CreateTestProvider()).ToArrayAsync();
         // Assert
         Assert.Equal(4, messages.Length);
     }
@@ -757,7 +723,7 @@ private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.CreateAsync(
-                this.CreateTestConfiguration(),
+                this.CreateTestProvider(),
                 definition,
                 this._emptyKernel);
 
@@ -767,16 +733,16 @@ await OpenAIAssistantAgent.CreateAsync(
     private async Task VerifyAgentTemplateAsync(
         OpenAIAssistantCapabilities capabilities,
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory? templateFactory = null)
+        IPromptTemplateFactory templateFactory)
     {
         this.SetupResponse(HttpStatusCode.OK, capabilities, templateConfig);
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.CreateFromTemplateAsync(
-                this.CreateTestConfiguration(),
+                this.CreateTestProvider(),
                 capabilities,
                 this._emptyKernel,
-                new KernelArguments(),
+                [],
                 templateConfig,
                 templateFactory);
 
@@ -803,9 +769,8 @@ private static void ValidateAgent(
         // Verify fundamental state
         Assert.NotNull(agent);
         Assert.NotNull(agent.Id);
-        Assert.False(agent.IsDeleted);
         Assert.NotNull(agent.Definition);
-        Assert.Equal(expectedConfig.ModelId, agent.Definition.ModelId);
+        Assert.Equal(expectedConfig.ModelId, agent.Definition.Model);
 
         // Verify core properties
         Assert.Equal(expectedInstructions ?? string.Empty, agent.Instructions);
@@ -814,11 +779,7 @@ private static void ValidateAgent(
 
         // Verify options
         Assert.Equal(expectedConfig.Temperature, agent.Definition.Temperature);
-        Assert.Equal(expectedConfig.TopP, agent.Definition.TopP);
-        Assert.Equal(expectedConfig.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
-        Assert.Equal(expectedConfig.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
-        Assert.Equal(expectedConfig.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
-        Assert.Equal(expectedConfig.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
+        Assert.Equal(expectedConfig.TopP, agent.Definition.NucleusSamplingFactor);
 
         // Verify tool definitions
         int expectedToolCount = 0;
@@ -830,7 +791,7 @@ private static void ValidateAgent(
             ++expectedToolCount;
         }
 
-        Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any());
+        Assert.Equal(hasCodeInterpreter, agent.Definition.Tools.OfType().Any());
 
         bool hasFileSearch = false;
         if (expectedConfig.EnableFileSearch)
@@ -839,9 +800,9 @@ private static void ValidateAgent(
             ++expectedToolCount;
         }
 
-        Assert.Equal(hasFileSearch, agent.Tools.OfType().Any());
+        Assert.Equal(hasFileSearch, agent.Definition.Tools.OfType().Any());
 
-        Assert.Equal(expectedToolCount, agent.Tools.Count);
+        Assert.Equal(expectedToolCount, agent.Definition.Tools.Count);
 
         // Verify metadata
         Assert.NotNull(agent.Definition.Metadata);
@@ -865,8 +826,8 @@ private static void ValidateAgent(
         }
 
         // Verify detail definition
-        Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.VectorStoreId);
-        Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
+        Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.ToolResources.FileSearch?.VectorStoreIds.SingleOrDefault());
+        Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.ToolResources.CodeInterpreter?.FileIds);
     }
 
     private Task CreateAgentAsync()
@@ -877,12 +838,12 @@ private Task CreateAgentAsync()
 
         return
             OpenAIAssistantAgent.CreateAsync(
-                this.CreateTestConfiguration(),
+                this.CreateTestProvider(),
                 definition,
                 this._emptyKernel);
     }
 
-    private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
+    private OpenAIClientProvider CreateTestProvider(bool targetAzure = false)
         => targetAzure ?
             OpenAIClientProvider.ForAzureOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: new Uri("https://localhost"), this._httpClient) :
             OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
@@ -906,3 +867,5 @@ public void MyFunction(int index)
         { }
     }
 }
+#pragma warning restore CS0419 // Ambiguous reference in cref attribute
+
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
index b0131ac9be6b..f8778a4f2900 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
@@ -2,6 +2,7 @@
 using System.Collections.Generic;
 using System.Text.Json;
 using Microsoft.SemanticKernel.Agents.OpenAI;
+using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
index 4962a9c04797..0a71201e7626 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
@@ -4,6 +4,7 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
+using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
index 7ae3cbaeacbe..3ecf07fada5e 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
@@ -1,4 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
 using System.Linq;
 using System.Net;
 using System.Net.Http;
@@ -6,6 +7,7 @@
 using System.Text.Json;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.Assistants;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
@@ -121,7 +123,7 @@ public static string AssistantDefinition(
                 builder.AppendLine(@$"  ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
             }
 
-            if (hasFileSearch)
+            if (hasFileSearch && capabilities.VectorStoreId != null)
             {
                 builder.AppendLine(@$"  ""file_search"": {{ ""vector_store_ids"": [""{capabilities.VectorStoreId}""] }}");
             }
@@ -173,6 +175,115 @@ public static string AssistantDefinition(
         return builder.ToString();
     }
 
+    /// 
+    /// The response for creating or querying an assistant definition.
+    /// 
+    public static string AssistantDefinition(
+        string modelId,
+        string? name = null,
+        string? description = null,
+        string? instructions = null,
+        bool enableCodeInterpreter = false,
+        IReadOnlyList? codeInterpreterFileIds = null,
+        bool enableFileSearch = false,
+        string? vectorStoreId = null,
+        float? temperature = null,
+        float? topP = null,
+        AssistantResponseFormat? responseFormat = null,
+        IReadOnlyDictionary? metadata = null)
+    {
+        StringBuilder builder = new();
+        builder.AppendLine("{");
+        builder.AppendLine(@$"  ""id"": ""{AssistantId}"",");
+        builder.AppendLine(@"  ""object"": ""assistant"",");
+        builder.AppendLine(@"  ""created_at"": 1698984975,");
+        builder.AppendLine(@$"  ""name"": ""{name}"",");
+        builder.AppendLine(@$"  ""description"": ""{description}"",");
+        builder.AppendLine(@$"  ""instructions"": ""{instructions}"",");
+        builder.AppendLine(@$"  ""model"": ""{modelId}"",");
+
+        bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0;
+        bool hasCodeInterpreter = enableCodeInterpreter || hasCodeInterpreterFiles;
+        bool hasFileSearch = enableFileSearch || vectorStoreId != null;
+        if (!hasCodeInterpreter && !hasFileSearch)
+        {
+            builder.AppendLine(@"  ""tools"": [],");
+        }
+        else
+        {
+            builder.AppendLine(@"  ""tools"": [");
+
+            if (hasCodeInterpreter)
+            {
+                builder.Append(@$"  {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
+            }
+
+            if (hasFileSearch)
+            {
+                builder.AppendLine(@"  { ""type"": ""file_search"" }");
+            }
+
+            builder.AppendLine("    ],");
+        }
+
+        if (!hasCodeInterpreterFiles && !hasFileSearch)
+        {
+            builder.AppendLine(@"  ""tool_resources"": {},");
+        }
+        else
+        {
+            builder.AppendLine(@"  ""tool_resources"": {");
+
+            if (hasCodeInterpreterFiles)
+            {
+                string fileIds = string.Join(",", codeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
+                builder.AppendLine(@$"  ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
+            }
+
+            if (hasFileSearch && vectorStoreId != null)
+            {
+                builder.AppendLine(@$"  ""file_search"": {{ ""vector_store_ids"": [""{vectorStoreId}""] }}");
+            }
+
+            builder.AppendLine("    },");
+        }
+
+        if (temperature.HasValue)
+        {
+            builder.AppendLine(@$"  ""temperature"": {temperature},");
+        }
+
+        if (topP.HasValue)
+        {
+            builder.AppendLine(@$"  ""top_p"": {topP},");
+        }
+        int metadataCount = (metadata?.Count ?? 0);
+        if (metadataCount == 0)
+        {
+            builder.AppendLine(@"  ""metadata"": {}");
+        }
+        else
+        {
+            int index = 0;
+            builder.AppendLine(@"  ""metadata"": {");
+
+            if (metadataCount > 0)
+            {
+                foreach (var (key, value) in metadata!)
+                {
+                    builder.AppendLine(@$"    ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
+                    ++index;
+                }
+            }
+
+            builder.AppendLine("  }");
+        }
+
+        builder.AppendLine("}");
+
+        return builder.ToString();
+    }
+
     public const string DeleteAgent =
         $$$"""
         {
@@ -445,6 +556,42 @@ public static string GetTextMessage(string text = "test") =>
         }
         """;
 
+    public static string DeleteFile =
+        """
+        {
+          "id": "file-abc123",
+          "object": "file",
+          "deleted": true
+        }
+        """;
+
+    public static string CreateVectorStore =
+        """
+        {
+          "id": "vs_abc123",
+          "object": "vector_store",
+          "created_at": 1699061776,
+          "name": "test store",
+          "bytes": 139920,
+          "file_counts": {
+            "in_progress": 0,
+            "completed": 3,
+            "failed": 0,
+            "cancelled": 0,
+            "total": 3
+          }
+        }      
+        """;
+
+    public static string DeleteVectorStore =
+        """
+        {
+          "id": "vs-abc123",
+          "object": "vector_store.deleted",
+          "deleted": true
+        }
+        """;
+
     #endregion
 
     /// 
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
index 6217e1f38395..410b93b3f03b 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
@@ -91,10 +91,10 @@ public void VerifyOpenAIClientProviderWithHttpClient()
 
         // Arrange
         using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") };
-        httpClient.DefaultRequestHeaders.Add("X-Test", "Test");
+        httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test");
 
         // Act
-        OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
+        OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClientWithHeaders);
 
         // Assert
         Assert.NotNull(providerWithHeaders.Client);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
index 1689bec1f828..c4b8abca4baf 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
@@ -4,6 +4,7 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
+using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs b/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs
similarity index 95%
rename from dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
rename to dotnet/src/Agents/UnitTests/Test/AssertCollection.cs
index cd51c736ac18..8a89cba994a2 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
+++ b/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs
@@ -3,7 +3,7 @@
 using System.Collections.Generic;
 using Xunit;
 
-namespace SemanticKernel.Agents.UnitTests.OpenAI;
+namespace SemanticKernel.Agents.UnitTests.Test;
 
 internal static class AssertCollection
 {
diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
index a8447d4838a3..6faef5ab9a11 100644
--- a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
@@ -249,7 +249,7 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string
                 format = JsonSerializer.Deserialize(formatValue);
                 break;
             case "ChatResponseFormat":
-                format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJSON();
+                format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJsonObject();
                 break;
         }
 
diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
index 3146cb94fb78..c6e9dd5d503e 100644
--- a/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
@@ -136,7 +136,7 @@ public int? MaxTokens
     /// Note that to enable JSON mode, some AI models may also require you to instruct the model to produce JSON
     /// via a system or user message.
     /// Please note  is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes.
-    /// The available derived classes include  and .
+    /// The available derived classes include  and .
     /// 
     [JsonPropertyName("response_format")]
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs
new file mode 100644
index 000000000000..c9c47f07ee86
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs
@@ -0,0 +1,90 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Net.Http;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Moq;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
+
+public sealed class AzureClientCoreTests : IDisposable
+{
+    private readonly HttpClient _httpClient;
+    private readonly Mock _mockLogger;
+
+    public AzureClientCoreTests()
+    {
+        this._httpClient = new HttpClient();
+        this._mockLogger = new Mock();
+    }
+
+    public void Dispose()
+    {
+        this._httpClient.Dispose();
+    }
+
+    [Fact]
+    public void ConstructorWithValidParametersShouldInitializeCorrectly()
+    {
+        // Arrange
+        var deploymentName = "test-deployment";
+        var endpoint = "https://test-endpoint.openai.azure.com/";
+        var apiKey = "test-api-key";
+
+        // Act
+        var azureClientCore = new AzureClientCore(deploymentName, endpoint, apiKey, this._httpClient, this._mockLogger.Object);
+
+        // Assert
+        Assert.NotNull(azureClientCore.Client);
+        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
+        Assert.Equal(new Uri(endpoint), azureClientCore.Endpoint);
+    }
+
+    [Fact]
+    public void ConstructorWithInvalidEndpointShouldThrowArgumentException()
+    {
+        // Arrange
+        var deploymentName = "test-deployment";
+        var invalidEndpoint = "http://invalid-endpoint";
+        var apiKey = "test-api-key";
+
+        // Act & Assert
+        Assert.Throws(() =>
+            new AzureClientCore(deploymentName, invalidEndpoint, apiKey, this._httpClient, this._mockLogger.Object));
+    }
+
+    [Fact]
+    public void ConstructorWithTokenCredentialShouldInitializeCorrectly()
+    {
+        // Arrange
+        var deploymentName = "test-deployment";
+        var endpoint = "https://test-endpoint.openai.azure.com/";
+        var tokenCredential = new Mock().Object;
+
+        // Act
+        var azureClientCore = new AzureClientCore(deploymentName, endpoint, tokenCredential, this._httpClient, this._mockLogger.Object);
+
+        // Assert
+        Assert.NotNull(azureClientCore.Client);
+        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
+        Assert.Equal(new Uri(endpoint), azureClientCore.Endpoint);
+    }
+
+    [Fact]
+    public void ConstructorWithOpenAIClientShouldInitializeCorrectly()
+    {
+        // Arrange
+        var deploymentName = "test-deployment";
+        var openAIClient = new Mock(MockBehavior.Strict, new Uri("https://test-endpoint.openai.azure.com/"), new Mock().Object).Object;
+
+        // Act
+        var azureClientCore = new AzureClientCore(deploymentName, openAIClient, this._mockLogger.Object);
+
+        // Assert
+        Assert.NotNull(azureClientCore.Client);
+        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
+    }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
index 336d12036db9..bcfa9aef4ecd 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
@@ -79,6 +79,17 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
         Assert.Equal("model-id", service.Attributes["ModelId"]);
     }
 
+    [Theory]
+    [InlineData("invalid")]
+    public void ConstructorThrowsOnInvalidApiVersion(string? apiVersion)
+    {
+        // Act & Assert
+        Assert.Throws(() =>
+        {
+            _ = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", httpClient: this._httpClient, apiVersion: apiVersion);
+        });
+    }
+
     [Theory]
     [InlineData(true)]
     [InlineData(false)]
@@ -122,8 +133,10 @@ public async Task GetTextContentsWorksCorrectlyAsync()
         Assert.Equal(155, usage.TotalTokenCount);
     }
 
-    [Fact]
-    public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
+    [Theory]
+    [InlineData("system")]
+    [InlineData("developer")]
+    public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync(string historyRole)
     {
         // Arrange
         var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -152,7 +165,14 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
         var chatHistory = new ChatHistory();
         chatHistory.AddUserMessage("User Message");
         chatHistory.AddUserMessage([new ImageContent(new Uri("https://image")), new TextContent("User Message")]);
-        chatHistory.AddSystemMessage("System Message");
+        if (historyRole == "system")
+        {
+            chatHistory.AddSystemMessage("System Message");
+        }
+        else
+        {
+            chatHistory.AddDeveloperMessage("Developer Message");
+        }
         chatHistory.AddAssistantMessage("Assistant Message");
 
         using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
@@ -189,8 +209,16 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
         Assert.Equal("User Message", contentItems[1].GetProperty("text").GetString());
         Assert.Equal("text", contentItems[1].GetProperty("type").GetString());
 
-        Assert.Equal("system", systemMessage.GetProperty("role").GetString());
-        Assert.Equal("System Message", systemMessage.GetProperty("content").GetString());
+        if (historyRole == "system")
+        {
+            Assert.Equal("system", systemMessage.GetProperty("role").GetString());
+            Assert.Equal("System Message", systemMessage.GetProperty("content").GetString());
+        }
+        else
+        {
+            Assert.Equal("developer", systemMessage.GetProperty("role").GetString());
+            Assert.Equal("Developer Message", systemMessage.GetProperty("content").GetString());
+        }
 
         Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString());
         Assert.Equal("Assistant Message", assistantMessage.GetProperty("content").GetString());
@@ -245,6 +273,166 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
         Assert.Equal(expectedResponseType, content.GetProperty("response_format").GetProperty("type").GetString());
     }
 
+    [Theory]
+    [InlineData(true, "max_completion_tokens")]
+    [InlineData(false, "max_tokens")]
+    public async Task GetChatMessageContentsHandlesMaxTokensCorrectlyAsync(bool useNewMaxTokens, string expectedPropertyName)
+    {
+        // Arrange
+        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+        var settings = new AzureOpenAIPromptExecutionSettings
+        {
+            SetNewMaxCompletionTokensEnabled = useNewMaxTokens,
+            MaxTokens = 123
+        };
+
+        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+        };
+        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+        // Act
+        var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
+
+        // Assert
+        var requestContent = this._messageHandlerStub.RequestContents[0];
+
+        Assert.NotNull(requestContent);
+
+        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
+
+        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
+        Assert.Equal(123, propertyValue.GetInt32());
+    }
+
+    [Theory]
+    [InlineData("stream", "true")]
+    [InlineData("stream_options", "{\"include_usage\":true}")]
+    [InlineData("model", "\"deployment\"")]
+
+    public async Task GetStreamingChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsync(string expectedPropertyName, string expectedRawJsonText)
+    {
+        // Arrange
+        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+        var settings = new AzureOpenAIPromptExecutionSettings();
+
+        using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
+
+        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StreamContent(stream)
+        };
+        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+        // Act
+        await foreach (var update in service.GetStreamingChatMessageContentsAsync(new ChatHistory("System message"), settings))
+        {
+            var openAIUpdate = Assert.IsType(update.InnerContent);
+        }
+
+        // Assert
+        var requestContent = this._messageHandlerStub.RequestContents[0];
+
+        Assert.NotNull(requestContent);
+
+        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
+
+        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
+        Assert.Equal(expectedRawJsonText, propertyValue.GetRawText());
+    }
+
+    [Theory]
+    [InlineData("model", "\"deployment\"")]
+
+    public async Task GetChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsync(string expectedPropertyName, string expectedRawJsonText)
+    {
+        // Arrange
+        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+        var settings = new AzureOpenAIPromptExecutionSettings();
+
+        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+        };
+        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+        // Act
+        var results = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
+        var result = Assert.Single(results);
+        Assert.IsType(result.InnerContent);
+
+        // Assert
+        var requestContent = this._messageHandlerStub.RequestContents[0];
+
+        Assert.NotNull(requestContent);
+
+        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
+
+        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
+        Assert.Equal(expectedRawJsonText, propertyValue.GetRawText());
+    }
+
+    [Theory]
+    [InlineData(null, null)]
+    [InlineData("string", "low")]
+    [InlineData("string", "medium")]
+    [InlineData("string", "high")]
+    [InlineData("ChatReasonEffortLevel.Low", "low")]
+    [InlineData("ChatReasonEffortLevel.Medium", "medium")]
+    [InlineData("ChatReasonEffortLevel.High", "high")]
+    public async Task GetChatMessageInReasoningEffortAsync(string? effortType, string? expectedEffortLevel)
+    {
+        // Assert
+        object? reasoningEffortObject = null;
+        switch (effortType)
+        {
+            case "string":
+                reasoningEffortObject = expectedEffortLevel;
+                break;
+            case "ChatReasonEffortLevel.Low":
+                reasoningEffortObject = ChatReasoningEffortLevel.Low;
+                break;
+            case "ChatReasonEffortLevel.Medium":
+                reasoningEffortObject = ChatReasoningEffortLevel.Medium;
+                break;
+            case "ChatReasonEffortLevel.High":
+                reasoningEffortObject = ChatReasoningEffortLevel.High;
+                break;
+        }
+
+        var modelId = "o1";
+        var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient);
+        OpenAIPromptExecutionSettings executionSettings = new() { ReasoningEffort = reasoningEffortObject };
+        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
+        };
+
+        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+        // Act
+        var result = await sut.GetChatMessageContentAsync(new ChatHistory("System message"), executionSettings);
+
+        // Assert
+        Assert.NotNull(result);
+
+        var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
+        Assert.NotNull(actualRequestContent);
+
+        var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+        if (expectedEffortLevel is null)
+        {
+            Assert.False(optionsJson.TryGetProperty("reasoning_effort", out _));
+            return;
+        }
+
+        var requestedReasoningEffort = optionsJson.GetProperty("reasoning_effort").GetString();
+
+        Assert.Equal(expectedEffortLevel, requestedReasoningEffort);
+    }
+
     [Theory]
     [MemberData(nameof(ToolCallBehaviors))]
     public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior behavior)
@@ -806,6 +994,49 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
         Assert.Equal("user", messages[1].GetProperty("role").GetString());
     }
 
+    [Fact]
+    public async Task GetChatMessageContentsUsesDeveloperPromptAndSettingsCorrectlyAsync()
+    {
+        // Arrange
+        const string Prompt = "This is test prompt";
+        const string DeveloperMessage = "This is test system message";
+
+        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+        var settings = new AzureOpenAIPromptExecutionSettings() { ChatDeveloperPrompt = DeveloperMessage };
+
+        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+        };
+        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+        IKernelBuilder builder = Kernel.CreateBuilder();
+        builder.Services.AddTransient((sp) => service);
+        Kernel kernel = builder.Build();
+
+        // Act
+        var result = await kernel.InvokePromptAsync(Prompt, new(settings));
+
+        // Assert
+        Assert.Equal("Test chat response", result.ToString());
+
+        var requestContentByteArray = this._messageHandlerStub.RequestContents[0];
+
+        Assert.NotNull(requestContentByteArray);
+
+        var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
+
+        var messages = requestContent.GetProperty("messages");
+
+        Assert.Equal(2, messages.GetArrayLength());
+
+        Assert.Equal(DeveloperMessage, messages[0].GetProperty("content").GetString());
+        Assert.Equal("developer", messages[0].GetProperty("role").GetString());
+
+        Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
+        Assert.Equal("user", messages[1].GetProperty("role").GetString());
+    }
+
     [Fact]
     public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
     {
@@ -1537,6 +1768,14 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen
 
     public static TheoryData Versions => new()
     {
+        { "V2025_01_01_preview", "2025-01-01-preview" },
+        { "V2025_01_01_PREVIEW", "2025-01-01-preview" },
+        { "2025_01_01_Preview", "2025-01-01-preview" },
+        { "2025-01-01-preview", "2025-01-01-preview" },
+        { "V2024_12_01_preview", "2024-12-01-preview" },
+        { "V2024_12_01_PREVIEW", "2024-12-01-preview" },
+        { "2024_12_01_Preview", "2024-12-01-preview" },
+        { "2024-12-01-preview", "2024-12-01-preview" },
         { "V2024_10_01_preview", "2024-10-01-preview" },
         { "V2024_10_01_PREVIEW", "2024-10-01-preview" },
         { "2024_10_01_Preview", "2024-10-01-preview" },
@@ -1552,10 +1791,16 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen
         { "V2024_06_01", "2024-06-01" },
         { "2024_06_01", "2024-06-01" },
         { "2024-06-01", "2024-06-01" },
+        { "V2024_10_21", "2024-10-21" },
+        { "2024_10_21", "2024-10-21" },
+        { "2024-10-21", "2024-10-21" },
+        { AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview.ToString(), null },
+        { AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_09_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_08_01_Preview.ToString(), null },
-        { AzureOpenAIClientOptions.ServiceVersion.V2024_06_01.ToString(), null }
+        { AzureOpenAIClientOptions.ServiceVersion.V2024_06_01.ToString(), null },
+        { AzureOpenAIClientOptions.ServiceVersion.V2024_10_21.ToString(), null }
     };
 
     public void Dispose()
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
index 3ad42a32eac6..27e2b3ebc14d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
@@ -47,6 +47,23 @@ public void ConstructorsAddRequiredMetadata(bool includeLoggerFactory)
         Assert.Equal("deployment-name", service.Attributes["DeploymentName"]);
     }
 
+    [Theory]
+    [InlineData(true)]
+    [InlineData(false)]
+    public void ConstructorTokenCredentialAddRequiredMetadata(bool includeLoggerFactory)
+    {
+        // Arrange & Act
+        var service = includeLoggerFactory ?
+            new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", Azure.Core.DelegatedTokenCredential.Create((context, ct)
+                => new Azure.Core.AccessToken("abc", DateTimeOffset.Now.AddMinutes(30))), "model-id", loggerFactory: this._mockLoggerFactory.Object) :
+            new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", Azure.Core.DelegatedTokenCredential.Create((context, ct)
+                => new Azure.Core.AccessToken("abc", DateTimeOffset.Now.AddMinutes(30))), "model-id");
+
+        // Assert
+        Assert.Equal("model-id", service.Attributes["ModelId"]);
+        Assert.Equal("deployment-name", service.Attributes["DeploymentName"]);
+    }
+
     [Fact]
     public void ItThrowsIfModelIdIsNotProvided()
     {
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
index 6b4b16c574af..8f3b9a245634 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -35,6 +35,7 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
         Assert.Null(executionSettings.TopLogprobs);
         Assert.Null(executionSettings.Logprobs);
         Assert.Null(executionSettings.AzureChatDataSource);
+        Assert.False(executionSettings.SetNewMaxCompletionTokensEnabled);
         Assert.Equal(maxTokensSettings, executionSettings.MaxTokens);
         Assert.Null(executionSettings.Store);
         Assert.Null(executionSettings.Metadata);
@@ -58,7 +59,8 @@ public void ItUsesExistingOpenAIExecutionSettings()
             TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
             Seed = 123456,
             Store = true,
-            Metadata = new Dictionary() { { "foo", "bar" } }
+            Metadata = new Dictionary() { { "foo", "bar" } },
+            SetNewMaxCompletionTokensEnabled = true,
         };
 
         // Act
@@ -74,6 +76,7 @@ public void ItUsesExistingOpenAIExecutionSettings()
         Assert.Equal(actualSettings.Seed, executionSettings.Seed);
         Assert.Equal(actualSettings.Store, executionSettings.Store);
         Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
+        Assert.Equal(actualSettings.SetNewMaxCompletionTokensEnabled, executionSettings.SetNewMaxCompletionTokensEnabled);
     }
 
     [Fact]
@@ -259,6 +262,7 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
         Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6));
         Assert.Throws(() => executionSettings.Store = false);
         Assert.Throws(() => executionSettings.Metadata?.Add("bar", "foo"));
+        Assert.Throws(() => executionSettings.SetNewMaxCompletionTokensEnabled = true);
 
         executionSettings!.Freeze(); // idempotent
         Assert.True(executionSettings.IsFrozen);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
index bf7859815f1d..23fc85541a0b 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
@@ -1,5 +1,7 @@
 // Copyright (c) Microsoft. All rights reserved.
 
+using System;
+using System.ClientModel.Primitives;
 using System.Diagnostics;
 using Azure.AI.OpenAI.Chat;
 using Microsoft.SemanticKernel.ChatCompletion;
@@ -35,22 +37,29 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
         {
             return base.CreateChatCompletionOptions(executionSettings, chatHistory, toolCallingConfig, kernel);
         }
-
-        var options = new ChatCompletionOptions
-        {
-            MaxOutputTokenCount = executionSettings.MaxTokens,
-            Temperature = (float?)executionSettings.Temperature,
-            TopP = (float?)executionSettings.TopP,
-            FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
-            PresencePenalty = (float?)executionSettings.PresencePenalty,
+        ChatCompletionOptions options = ModelReaderWriter.Read(BinaryData.FromString("{\"stream_options\":{\"include_usage\":true}}")!)!;
+        options.MaxOutputTokenCount = executionSettings.MaxTokens;
+        options.Temperature = (float?)executionSettings.Temperature;
+        options.TopP = (float?)executionSettings.TopP;
+        options.FrequencyPenalty = (float?)executionSettings.FrequencyPenalty;
+        options.PresencePenalty = (float?)executionSettings.PresencePenalty;
 #pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-            Seed = executionSettings.Seed,
+
+        options.Seed = executionSettings.Seed;
 #pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-            EndUserId = executionSettings.User,
-            TopLogProbabilityCount = executionSettings.TopLogprobs,
-            IncludeLogProbabilities = executionSettings.Logprobs,
-            StoredOutputEnabled = executionSettings.Store,
-        };
+        options.EndUserId = executionSettings.User;
+        options.TopLogProbabilityCount = executionSettings.TopLogprobs;
+        options.IncludeLogProbabilities = executionSettings.Logprobs;
+        options.StoredOutputEnabled = executionSettings.Store;
+        options.ReasoningEffortLevel = GetEffortLevel(executionSettings);
+        options.ResponseModalities = ChatResponseModalities.Default;
+
+        if (azureSettings.SetNewMaxCompletionTokensEnabled)
+        {
+#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+            options.SetNewMaxCompletionTokensPropertyEnabled(true);
+#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+        }
 
         var responseFormat = GetResponseFormat(executionSettings);
         if (responseFormat is not null)
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
index 5ad45701a921..a3dbbe730057 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
@@ -135,9 +135,12 @@ internal static AzureOpenAIClientOptions GetAzureOpenAIClientOptions(HttpClient?
             sdkVersion = serviceVersion.ToUpperInvariant() switch // Azure SDK versioning
             {
                 "2024-06-01" or "V2024_06_01" or "2024_06_01" => AzureOpenAIClientOptions.ServiceVersion.V2024_06_01,
+                "2024-10-21" or "V2024_10_21" or "2024_10_21" => AzureOpenAIClientOptions.ServiceVersion.V2024_10_21,
                 "2024-08-01-PREVIEW" or "V2024_08_01_PREVIEW" or "2024_08_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_08_01_Preview,
                 "2024-09-01-PREVIEW" or "V2024_09_01_PREVIEW" or "2024_09_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_09_01_Preview,
                 "2024-10-01-PREVIEW" or "V2024_10_01_PREVIEW" or "2024_10_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview,
+                "2024-12-01-PREVIEW" or "V2024_12_01_PREVIEW" or "2024_12_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview,
+                "2025-01-01-PREVIEW" or "V2025_01_01_PREVIEW" or "2025_01_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview,
 
                 _ => throw new NotSupportedException($"The service version '{serviceVersion}' is not supported.")
             };
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
index 1d00ba3207f5..8852e5fd35df 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
@@ -16,6 +16,26 @@ namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
 [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)]
 public sealed class AzureOpenAIPromptExecutionSettings : OpenAIPromptExecutionSettings
 {
+    /// 
+    /// Enabling this property will enforce the new max_completion_tokens parameter to be send the Azure OpenAI API.
+    /// 
+    /// 
+    /// This setting is temporary and flags the underlying Azure SDK to use the new max_completion_tokens parameter using the
+    /// 
+    /// SetNewMaxCompletionTokensPropertyEnabled extension.
+    /// 
+    [Experimental("SKEXP0010")]
+    [JsonIgnore]
+    public bool SetNewMaxCompletionTokensEnabled
+    {
+        get => this._setNewMaxCompletionTokensEnabled;
+        set
+        {
+            this.ThrowIfFrozen();
+            this._setNewMaxCompletionTokensEnabled = value;
+        }
+    }
+
     /// 
     /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions.
     /// This property is compatible only with Azure OpenAI.
@@ -38,6 +58,7 @@ public override PromptExecutionSettings Clone()
     {
         var settings = base.Clone();
         settings.AzureChatDataSource = this.AzureChatDataSource;
+        settings.SetNewMaxCompletionTokensEnabled = this.SetNewMaxCompletionTokensEnabled;
         return settings;
     }
 
@@ -103,6 +124,7 @@ public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(P
     #region private ================================================================================
     [Experimental("SKEXP0010")]
     private AzureSearchChatDataSource? _azureChatDataSource;
+    private bool _setNewMaxCompletionTokensEnabled;
 
     #endregion
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
index 55283d191a84..3cbf9973ccbb 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
@@ -338,6 +338,33 @@ public void FromChatHistoryImageAsImageContentItReturnsWithChatHistory()
                 .SequenceEqual(Convert.FromBase64String(c.Parts![0].InlineData!.InlineData))));
     }
 
+    [Fact]
+    public void FromChatHistoryAudioAsAudioContentItReturnsWithChatHistory()
+    {
+        // Arrange
+        ReadOnlyMemory audioAsBytes = new byte[] { 0x00, 0x01, 0x02, 0x03 };
+        ChatHistory chatHistory = [];
+        chatHistory.AddUserMessage("user-message");
+        chatHistory.AddAssistantMessage("assist-message");
+        chatHistory.AddUserMessage(contentItems:
+            [new AudioContent(new Uri("https://example-audio.com/file.wav")) { MimeType = "audio/wav" }]);
+        chatHistory.AddUserMessage(contentItems:
+            [new AudioContent(audioAsBytes, "audio/mp3")]);
+        var executionSettings = new GeminiPromptExecutionSettings();
+
+        // Act
+        var request = GeminiRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings);
+
+        // Assert
+        Assert.Collection(request.Contents,
+            c => Assert.Equal(chatHistory[0].Content, c.Parts![0].Text),
+            c => Assert.Equal(chatHistory[1].Content, c.Parts![0].Text),
+            c => Assert.Equal(chatHistory[2].Items.Cast().Single().Uri,
+                c.Parts![0].FileData!.FileUri),
+            c => Assert.True(audioAsBytes.ToArray()
+                .SequenceEqual(Convert.FromBase64String(c.Parts![0].InlineData!.InlineData))));
+    }
+
     [Fact]
     public void FromChatHistoryUnsupportedContentItThrowsNotSupportedException()
     {
@@ -470,6 +497,44 @@ public void AddChatMessageToRequest()
             c => Equals(message.Role, c.Role));
     }
 
+    [Fact]
+    public void CachedContentFromPromptReturnsAsExpected()
+    {
+        // Arrange
+        var prompt = "prompt-example";
+        var executionSettings = new GeminiPromptExecutionSettings
+        {
+            CachedContent = "xyz/abc"
+        };
+
+        // Act
+        var request = GeminiRequest.FromPromptAndExecutionSettings(prompt, executionSettings);
+
+        // Assert
+        Assert.NotNull(request.Configuration);
+        Assert.Equal(executionSettings.CachedContent, request.CachedContent);
+    }
+
+    [Fact]
+    public void CachedContentFromChatHistoryReturnsAsExpected()
+    {
+        // Arrange
+        ChatHistory chatHistory = [];
+        chatHistory.AddUserMessage("user-message");
+        chatHistory.AddAssistantMessage("assist-message");
+        chatHistory.AddUserMessage("user-message2");
+        var executionSettings = new GeminiPromptExecutionSettings
+        {
+            CachedContent = "xyz/abc"
+        };
+
+        // Act
+        var request = GeminiRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings);
+
+        // Assert
+        Assert.Equal(executionSettings.CachedContent, request.CachedContent);
+    }
+
     private sealed class DummyContent(object? innerContent, string? modelId = null, IReadOnlyDictionary? metadata = null) :
         KernelContent(innerContent, modelId, metadata);
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
index 1d9bb5d6377d..0d986d21ca5a 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
@@ -1,13 +1,34 @@
 // Copyright (c) Microsoft. All rights reserved.
 
+using System;
+using System.IO;
+using System.Net.Http;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Google;
 using Microsoft.SemanticKernel.Services;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Google.UnitTests.Services;
 
-public sealed class GoogleAIGeminiChatCompletionServiceTests
+public sealed class GoogleAIGeminiChatCompletionServiceTests : IDisposable
 {
+    private readonly HttpMessageHandlerStub _messageHandlerStub;
+    private readonly HttpClient _httpClient;
+
+    public GoogleAIGeminiChatCompletionServiceTests()
+    {
+        this._messageHandlerStub = new()
+        {
+            ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+            {
+                Content = new StringContent(File.ReadAllText("./TestData/completion_one_response.json"))
+            }
+        };
+        this._httpClient = new HttpClient(this._messageHandlerStub, false);
+    }
+
     [Fact]
     public void AttributesShouldContainModelId()
     {
@@ -18,4 +39,39 @@ public void AttributesShouldContainModelId()
         // Assert
         Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]);
     }
+
+    [Theory]
+    [InlineData(null)]
+    [InlineData("content")]
+    [InlineData("")]
+    public async Task RequestCachedContentWorksCorrectlyAsync(string? cachedContent)
+    {
+        // Arrange
+        string model = "fake-model";
+        var sut = new GoogleAIGeminiChatCompletionService(model, "key", httpClient: this._httpClient);
+
+        // Act
+        var result = await sut.GetChatMessageContentAsync("my prompt", new GeminiPromptExecutionSettings { CachedContent = cachedContent });
+
+        // Assert
+        Assert.NotNull(result);
+        Assert.NotNull(this._messageHandlerStub.RequestContent);
+
+        var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent);
+        if (cachedContent is not null)
+        {
+            Assert.Contains($"\"cachedContent\":\"{cachedContent}\"", requestBody);
+        }
+        else
+        {
+            // Then no quality is provided, it should not be included in the request body
+            Assert.DoesNotContain("cachedContent", requestBody);
+        }
+    }
+
+    public void Dispose()
+    {
+        this._httpClient.Dispose();
+        this._messageHandlerStub.Dispose();
+    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
index 89e65fbaa534..0376924c0e91 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
@@ -1,14 +1,34 @@
 // Copyright (c) Microsoft. All rights reserved.
 
+using System;
+using System.IO;
+using System.Net.Http;
+using System.Text;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Google;
 using Microsoft.SemanticKernel.Services;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Google.UnitTests.Services;
 
-public sealed class VertexAIGeminiChatCompletionServiceTests
+public sealed class VertexAIGeminiChatCompletionServiceTests : IDisposable
 {
+    private readonly HttpMessageHandlerStub _messageHandlerStub;
+    private readonly HttpClient _httpClient;
+
+    public VertexAIGeminiChatCompletionServiceTests()
+    {
+        this._messageHandlerStub = new()
+        {
+            ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+            {
+                Content = new StringContent(File.ReadAllText("./TestData/completion_one_response.json"))
+            }
+        };
+        this._httpClient = new HttpClient(this._messageHandlerStub, false);
+    }
+
     [Fact]
     public void AttributesShouldContainModelIdBearerAsString()
     {
@@ -30,4 +50,39 @@ public void AttributesShouldContainModelIdBearerAsFunc()
         // Assert
         Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]);
     }
+
+    [Theory]
+    [InlineData(null)]
+    [InlineData("content")]
+    [InlineData("")]
+    public async Task RequestCachedContentWorksCorrectlyAsync(string? cachedContent)
+    {
+        // Arrange
+        string model = "fake-model";
+        var sut = new VertexAIGeminiChatCompletionService(model, () => new ValueTask("key"), "location", "project", httpClient: this._httpClient);
+
+        // Act
+        var result = await sut.GetChatMessageContentAsync("my prompt", new GeminiPromptExecutionSettings { CachedContent = cachedContent });
+
+        // Assert
+        Assert.NotNull(result);
+        Assert.NotNull(this._messageHandlerStub.RequestContent);
+
+        var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent);
+        if (cachedContent is not null)
+        {
+            Assert.Contains($"\"cachedContent\":\"{cachedContent}\"", requestBody);
+        }
+        else
+        {
+            // Then no quality is provided, it should not be included in the request body
+            Assert.DoesNotContain("cachedContent", requestBody);
+        }
+    }
+
+    public void Dispose()
+    {
+        this._httpClient.Dispose();
+        this._messageHandlerStub.Dispose();
+    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
index 5d465f5d590f..b94ca9eeebc6 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
@@ -112,6 +112,7 @@ protected static string GetApiVersionSubLink(VertexAIVersion apiVersion)
         => apiVersion switch
         {
             VertexAIVersion.V1 => "v1",
+            VertexAIVersion.V1_Beta => "v1beta1",
             _ => throw new NotSupportedException($"Vertex API version {apiVersion} is not supported.")
         };
 }
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
index 2ebda2c2a0de..aada46854846 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
@@ -42,6 +42,10 @@ internal sealed class GeminiRequest
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public GeminiContent? SystemInstruction { get; set; }
 
+    [JsonPropertyName("cachedContent")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public string? CachedContent { get; set; }
+
     public void AddFunction(GeminiFunction function)
     {
         // NOTE: Currently Gemini only supports one tool i.e. function calling.
@@ -67,6 +71,7 @@ public static GeminiRequest FromPromptAndExecutionSettings(
         GeminiRequest obj = CreateGeminiRequest(prompt);
         AddSafetySettings(executionSettings, obj);
         AddConfiguration(executionSettings, obj);
+        AddAdditionalBodyFields(executionSettings, obj);
         return obj;
     }
 
@@ -83,6 +88,7 @@ public static GeminiRequest FromChatHistoryAndExecutionSettings(
         GeminiRequest obj = CreateGeminiRequest(chatHistory);
         AddSafetySettings(executionSettings, obj);
         AddConfiguration(executionSettings, obj);
+        AddAdditionalBodyFields(executionSettings, obj);
         return obj;
     }
 
@@ -211,6 +217,7 @@ private static List CreateGeminiParts(ChatMessageContent content)
     {
         TextContent textContent => new GeminiPart { Text = textContent.Text },
         ImageContent imageContent => CreateGeminiPartFromImage(imageContent),
+        AudioContent audioContent => CreateGeminiPartFromAudio(audioContent),
         _ => throw new NotSupportedException($"Unsupported content type. {item.GetType().Name} is not supported by Gemini.")
     };
 
@@ -250,6 +257,42 @@ private static string GetMimeTypeFromImageContent(ImageContent imageContent)
                ?? throw new InvalidOperationException("Image content MimeType is empty.");
     }
 
+    private static GeminiPart CreateGeminiPartFromAudio(AudioContent audioContent)
+    {
+        // Binary data takes precedence over URI.
+        if (audioContent.Data is { IsEmpty: false })
+        {
+            return new GeminiPart
+            {
+                InlineData = new GeminiPart.InlineDataPart
+                {
+                    MimeType = GetMimeTypeFromAudioContent(audioContent),
+                    InlineData = Convert.ToBase64String(audioContent.Data.Value.ToArray())
+                }
+            };
+        }
+
+        if (audioContent.Uri is not null)
+        {
+            return new GeminiPart
+            {
+                FileData = new GeminiPart.FileDataPart
+                {
+                    MimeType = GetMimeTypeFromAudioContent(audioContent),
+                    FileUri = audioContent.Uri ?? throw new InvalidOperationException("Audio content URI is empty.")
+                }
+            };
+        }
+
+        throw new InvalidOperationException("Audio content does not contain any data or uri.");
+    }
+
+    private static string GetMimeTypeFromAudioContent(AudioContent audioContent)
+    {
+        return audioContent.MimeType
+               ?? throw new InvalidOperationException("Audio content MimeType is empty.");
+    }
+
     private static void AddConfiguration(GeminiPromptExecutionSettings executionSettings, GeminiRequest request)
     {
         request.Configuration = new ConfigurationElement
@@ -318,6 +361,11 @@ private static void AddSafetySettings(GeminiPromptExecutionSettings executionSet
             => new GeminiSafetySetting(s.Category, s.Threshold)).ToList();
     }
 
+    private static void AddAdditionalBodyFields(GeminiPromptExecutionSettings executionSettings, GeminiRequest request)
+    {
+        request.CachedContent = executionSettings.CachedContent;
+    }
+
     internal sealed class ConfigurationElement
     {
         [JsonPropertyName("temperature")]
diff --git a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
index fab00f01e11d..daa8ea629a5e 100644
--- a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
@@ -27,6 +27,7 @@ public sealed class GeminiPromptExecutionSettings : PromptExecutionSettings
     private bool? _audioTimestamp;
     private string? _responseMimeType;
     private object? _responseSchema;
+    private string? _cachedContent;
     private IList? _safetySettings;
     private GeminiToolCallBehavior? _toolCallBehavior;
 
@@ -41,6 +42,7 @@ public sealed class GeminiPromptExecutionSettings : PromptExecutionSettings
     /// Range is 0.0 to 1.0.
     /// 
     [JsonPropertyName("temperature")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public double? Temperature
     {
         get => this._temperature;
@@ -56,6 +58,7 @@ public double? Temperature
     /// The higher the TopP, the more diverse the completion.
     /// 
     [JsonPropertyName("top_p")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public double? TopP
     {
         get => this._topP;
@@ -71,6 +74,7 @@ public double? TopP
     /// The TopK property represents the maximum value of a collection or dataset.
     /// 
     [JsonPropertyName("top_k")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TopK
     {
         get => this._topK;
@@ -85,6 +89,7 @@ public int? TopK
     /// The maximum number of tokens to generate in the completion.
     /// 
     [JsonPropertyName("max_tokens")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxTokens
     {
         get => this._maxTokens;
@@ -99,6 +104,7 @@ public int? MaxTokens
     /// The count of candidates. Possible values range from 1 to 8.
     /// 
     [JsonPropertyName("candidate_count")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? CandidateCount
     {
         get => this._candidateCount;
@@ -114,6 +120,7 @@ public int? CandidateCount
     /// Maximum number of stop sequences is 5.
     /// 
     [JsonPropertyName("stop_sequences")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IList? StopSequences
     {
         get => this._stopSequences;
@@ -128,6 +135,7 @@ public IList? StopSequences
     /// Represents a list of safety settings.
     /// 
     [JsonPropertyName("safety_settings")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IList? SafetySettings
     {
         get => this._safetySettings;
@@ -180,6 +188,7 @@ public GeminiToolCallBehavior? ToolCallBehavior
     /// if enabled, audio timestamp will be included in the request to the model.
     /// 
     [JsonPropertyName("audio_timestamp")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? AudioTimestamp
     {
         get => this._audioTimestamp;
@@ -198,6 +207,7 @@ public bool? AudioTimestamp
     /// 3. text/x.enum: For classification tasks, output an enum value as defined in the response schema.
     /// 
     [JsonPropertyName("response_mimetype")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ResponseMimeType
     {
         get => this._responseMimeType;
@@ -234,6 +244,23 @@ public object? ResponseSchema
         }
     }
 
+    /// 
+    /// Optional. The name of the cached content used as context to serve the prediction.
+    /// Note: only used in explicit caching, where users can have control over caching (e.g. what content to cache) and enjoy guaranteed cost savings.
+    /// Format: projects/{project}/locations/{location}/cachedContents/{cachedContent}
+    /// 
+    [JsonPropertyName("cached_content")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public string? CachedContent
+    {
+        get => this._cachedContent;
+        set
+        {
+            this.ThrowIfFrozen();
+            this._cachedContent = value;
+        }
+    }
+
     /// 
     public override void Freeze()
     {
diff --git a/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs b/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
index 8e0a894e9f90..998910d8db42 100644
--- a/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
+++ b/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
@@ -12,5 +12,10 @@ public enum VertexAIVersion
     /// 
     /// Represents the V1 version of the Vertex AI API.
     /// 
-    V1
+    V1,
+
+    /// 
+    /// Represents the V1-beta version of the Vertex AI API.
+    /// 
+    V1_Beta
 }
diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
index 6984951fdc90..c23c52b68760 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
@@ -126,7 +126,7 @@ IAsyncEnumerable GetBatchFromNamespaceAsync(
     ///  if true, the embedding will be returned in the memory record.
     /// 
     ///  the memory records that match the filter.
-    public IAsyncEnumerable GetBatchWithFilterAsync(
+    IAsyncEnumerable GetBatchWithFilterAsync(
         string indexName,
         Dictionary filter,
         int limit = 10,
@@ -182,7 +182,7 @@ Task RemoveWithDocumentIdAsync(
     ///  the namespace to remove from.
     /// 
     /// 
-    public Task RemoveWithDocumentIdBatchAsync(
+    Task RemoveWithDocumentIdBatchAsync(
         string indexName,
         IEnumerable documentIds,
         string indexNamespace = "",
diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
index 3fb62b667a92..020aa46dbda6 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
@@ -19,7 +19,7 @@ internal interface IPostgresVectorStoreDbClient
     /// 
     /// The  used to connect to the database.
     /// 
-    public NpgsqlDataSource DataSource { get; }
+    NpgsqlDataSource DataSource { get; }
 
     /// 
     /// Check if a table exists.
diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
index 18aa5bf54901..521dc5633cb0 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
@@ -149,7 +149,7 @@ public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, strin
 
         return new PostgresSqlCommandInfo(
             commandText: $@"
-                CREATE INDEX {indexName} ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});"
+                CREATE INDEX ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});"
         );
     }
 
diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
index 3078e79c2113..aa9ad3f72190 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
@@ -22,7 +22,7 @@ public interface IQdrantVectorDbClient
     /// Whether to include the vector data in the returned results.
     /// The  to monitor for cancellation requests. The default is .
     /// An asynchronous list of Qdrant vectors records associated with the given IDs
-    public IAsyncEnumerable GetVectorsByIdAsync(string collectionName, IEnumerable pointIds, bool withVectors = false,
+    IAsyncEnumerable GetVectorsByIdAsync(string collectionName, IEnumerable pointIds, bool withVectors = false,
         CancellationToken cancellationToken = default);
 
     /// 
@@ -33,7 +33,7 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// Whether to include the vector data in the returned result.
     /// The  to monitor for cancellation requests. The default is .
     /// The Qdrant vector record associated with the given ID if found, null if not.
-    public Task GetVectorByPayloadIdAsync(string collectionName, string metadataId, bool withVector = false, CancellationToken cancellationToken = default);
+    Task GetVectorByPayloadIdAsync(string collectionName, string metadataId, bool withVector = false, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete vectors by their unique Qdrant IDs.
@@ -41,7 +41,7 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// The name assigned to a collection of vectors.
     /// The unique IDs used to index Qdrant vector entries.
     /// The  to monitor for cancellation requests. The default is .
-    public Task DeleteVectorsByIdAsync(string collectionName, IEnumerable pointIds, CancellationToken cancellationToken = default);
+    Task DeleteVectorsByIdAsync(string collectionName, IEnumerable pointIds, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete a vector by its unique identifier in the metadata (Qdrant payload).
@@ -49,7 +49,7 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// The name assigned to a collection of vectors.
     /// The unique ID stored in a Qdrant vector entry's metadata.
     /// The  to monitor for cancellation requests. The default is .
-    public Task DeleteVectorByPayloadIdAsync(string collectionName, string metadataId, CancellationToken cancellationToken = default);
+    Task DeleteVectorByPayloadIdAsync(string collectionName, string metadataId, CancellationToken cancellationToken = default);
 
     /// 
     /// Upsert a group of vectors into a collection.
@@ -57,7 +57,7 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// The name assigned to a collection of vectors.
     /// The Qdrant vector records to upsert.
     /// The  to monitor for cancellation requests. The default is .
-    public Task UpsertVectorsAsync(string collectionName, IEnumerable vectorData, CancellationToken cancellationToken = default);
+    Task UpsertVectorsAsync(string collectionName, IEnumerable vectorData, CancellationToken cancellationToken = default);
 
     /// 
     /// Find the nearest vectors in a collection using vector similarity search.
@@ -69,7 +69,7 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// Whether to include the vector data in the returned results.
     /// Qdrant tags used to filter the results.
     /// The  to monitor for cancellation requests. The default is .
-    public IAsyncEnumerable<(QdrantVectorRecord, double)> FindNearestInCollectionAsync(
+    IAsyncEnumerable<(QdrantVectorRecord, double)> FindNearestInCollectionAsync(
         string collectionName,
         ReadOnlyMemory target,
         double threshold,
@@ -83,25 +83,25 @@ public IAsyncEnumerable GetVectorsByIdAsync(string collectio
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
+    Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete a Qdrant vector collection.
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
+    Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// Check if a vector collection exists.
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    public Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default);
+    Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// List all vector collections.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    public IAsyncEnumerable ListCollectionsAsync(CancellationToken cancellationToken = default);
+    IAsyncEnumerable ListCollectionsAsync(CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
index 901247b95641..ad6f1315402e 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
@@ -1,11 +1,18 @@
 // Copyright (c) Microsoft. All rights reserved.
 
 using System;
+using System.IO;
+using System.Net;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Ollama;
 using Microsoft.SemanticKernel.Embeddings;
 using Microsoft.SemanticKernel.TextGeneration;
+using OllamaSharp;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
@@ -54,4 +61,155 @@ public void AddOllamaTextEmbeddingGenerationCreatesService()
         Assert.NotNull(kernel);
         Assert.NotNull(service);
     }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientEmbeddingsFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/embeddings_test_response.json"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient);
+        var builder = Kernel.CreateBuilder();
+        var services = builder.Services;
+
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        services.AddOllamaTextEmbeddingGeneration(serviceId: serviceId);
+        var serviceProvider = services.BuildServiceProvider();
+
+        var kernel = builder.Build();
+
+        ITextEmbeddingGenerationService service = kernel.GetRequiredService(serviceId);
+
+        Assert.NotNull(service);
+
+        await service.GenerateEmbeddingsAsync(["text"]);
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientChatCompletionFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient);
+        var builder = Kernel.CreateBuilder();
+        var services = builder.Services;
+
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        builder.AddOllamaChatCompletion(serviceId: serviceId);
+        var kernel = builder.Build();
+
+        IChatCompletionService service = kernel.GetRequiredService(serviceId);
+
+        Assert.NotNull(service);
+
+        await service.GetChatMessageContentsAsync(new());
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientTextGenerationFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient, "model");
+        var builder = Kernel.CreateBuilder();
+        var services = builder.Services;
+
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        builder.AddOllamaTextGeneration(serviceId: serviceId);
+        var kernel = builder.Build();
+
+        ITextGenerationService service = kernel.GetRequiredService(serviceId);
+
+        Assert.NotNull(service);
+
+        await service.GetStreamingTextContentsAsync("test prompt").GetAsyncEnumerator().MoveNextAsync();
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    public enum ServiceCollectionRegistration
+    {
+        KeyedOllamaApiClient,
+        KeyedIOllamaApiClient,
+        OllamaApiClient,
+        Endpoint,
+    }
+
+    public static TheoryData AddOllamaApiClientScenarios => new()
+    {
+        { ServiceCollectionRegistration.KeyedOllamaApiClient },
+        { ServiceCollectionRegistration.KeyedIOllamaApiClient },
+        { ServiceCollectionRegistration.OllamaApiClient },
+        { ServiceCollectionRegistration.Endpoint },
+    };
+
+    private sealed class FakeHttpMessageHandler(string responseContent) : HttpMessageHandler
+    {
+        public int InvokedCount { get; private set; }
+
+        protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+        {
+            this.InvokedCount++;
+
+            return Task.FromResult(
+                new HttpResponseMessage(HttpStatusCode.OK)
+                {
+                    Content = new StringContent(responseContent)
+                });
+        }
+    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
index d68ae6548e32..c22d1869954f 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
@@ -1,12 +1,18 @@
 // Copyright (c) Microsoft. All rights reserved.
 
 using System;
+using System.IO;
+using System.Net;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Ollama;
 using Microsoft.SemanticKernel.Embeddings;
 using Microsoft.SemanticKernel.TextGeneration;
+using OllamaSharp;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
@@ -41,6 +47,32 @@ public void AddOllamaChatCompletionToServiceCollection()
         Assert.NotNull(service);
     }
 
+    [Fact]
+    public void AddOllamaChatCompletionFromServiceCollection()
+    {
+        var services = new ServiceCollection();
+        using var ollamaClient = new OllamaApiClient(new Uri("http://localhost:11434"), "model");
+
+        services.AddSingleton(ollamaClient);
+        services.AddOllamaChatCompletion();
+        var serviceProvider = services.BuildServiceProvider();
+        var service = serviceProvider.GetRequiredService();
+        Assert.NotNull(service);
+    }
+
+    [Fact]
+    public void AddOllamaTextEmbeddingGenerationFromServiceCollection()
+    {
+        var services = new ServiceCollection();
+        using var ollamaClient = new OllamaApiClient(new Uri("http://localhost:11434"), "model");
+
+        services.AddSingleton(ollamaClient);
+        services.AddOllamaTextEmbeddingGeneration();
+        var serviceProvider = services.BuildServiceProvider();
+        var service = serviceProvider.GetRequiredService();
+        Assert.NotNull(service);
+    }
+
     [Fact]
     public void AddOllamaTextEmbeddingsGenerationToServiceCollection()
     {
@@ -52,4 +84,174 @@ public void AddOllamaTextEmbeddingsGenerationToServiceCollection()
 
         Assert.NotNull(service);
     }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientEmbeddingsFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/embeddings_test_response.json"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient);
+        var services = new ServiceCollection();
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        services.AddOllamaTextEmbeddingGeneration(serviceId: serviceId);
+        var serviceProvider = services.BuildServiceProvider();
+
+        ITextEmbeddingGenerationService service;
+        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
+                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
+        {
+            service = serviceProvider.GetRequiredKeyedService(serviceId);
+        }
+        else
+        {
+            service = serviceProvider.GetRequiredService();
+        }
+
+        Assert.NotNull(service);
+
+        await service.GenerateEmbeddingsAsync(["text"]);
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientChatCompletionFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient);
+        var services = new ServiceCollection();
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        services.AddOllamaChatCompletion(serviceId: serviceId);
+        var serviceProvider = services.BuildServiceProvider();
+
+        IChatCompletionService service;
+        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
+                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
+        {
+            service = serviceProvider.GetRequiredKeyedService(serviceId);
+        }
+        else
+        {
+            service = serviceProvider.GetRequiredService();
+        }
+
+        Assert.NotNull(service);
+
+        await service.GetChatMessageContentsAsync(new());
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    [Theory]
+    [MemberData(nameof(AddOllamaApiClientScenarios))]
+    public async Task AddOllamaApiClientTextGenerationFromServiceCollectionAsync(ServiceCollectionRegistration registration)
+    {
+        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/text_generation_test_response_stream.txt"));
+        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
+        using var client = new OllamaApiClient(httpClient, "model");
+        var services = new ServiceCollection();
+        string? serviceId = null;
+        switch (registration)
+        {
+            case ServiceCollectionRegistration.KeyedOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
+                services.AddKeyedSingleton(serviceId = "model", client);
+                break;
+            case ServiceCollectionRegistration.OllamaApiClient:
+                services.AddSingleton(client);
+                break;
+            case ServiceCollectionRegistration.Endpoint:
+                services.AddSingleton(client);
+                break;
+        }
+
+        services.AddOllamaTextGeneration(serviceId: serviceId);
+        var serviceProvider = services.BuildServiceProvider();
+
+        ITextGenerationService service;
+        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
+                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
+        {
+            service = serviceProvider.GetRequiredKeyedService(serviceId);
+        }
+        else
+        {
+            service = serviceProvider.GetRequiredService();
+        }
+
+        Assert.NotNull(service);
+
+        await service.GetStreamingTextContentsAsync("test prompt").GetAsyncEnumerator().MoveNextAsync();
+
+        Assert.Equal(1, myHttpClientHandler.InvokedCount);
+    }
+
+    public enum ServiceCollectionRegistration
+    {
+        KeyedOllamaApiClient,
+        KeyedIOllamaApiClient,
+        OllamaApiClient,
+        Endpoint,
+    }
+
+    public static TheoryData AddOllamaApiClientScenarios => new()
+    {
+        { ServiceCollectionRegistration.KeyedOllamaApiClient },
+        { ServiceCollectionRegistration.KeyedIOllamaApiClient },
+        { ServiceCollectionRegistration.OllamaApiClient },
+        { ServiceCollectionRegistration.Endpoint },
+    };
+
+    private sealed class FakeHttpMessageHandler(string responseContent) : HttpMessageHandler
+    {
+        public int InvokedCount { get; private set; }
+
+        protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+        {
+            this.InvokedCount++;
+
+            return Task.FromResult(
+                new HttpResponseMessage(HttpStatusCode.OK)
+                {
+                    Content = new StringContent(responseContent)
+                });
+        }
+    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
index dab3a80976cf..18df66d5cea2 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
@@ -2,11 +2,6 @@
 
 using System;
 using System.Net.Http;
-using Microsoft.Extensions.DependencyInjection;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Connectors.Ollama;
-using Microsoft.SemanticKernel.Http;
-using Microsoft.SemanticKernel.TextGeneration;
 using OllamaSharp;
 
 namespace Microsoft.SemanticKernel;
@@ -25,22 +20,17 @@ public static class OllamaKernelBuilderExtensions
     /// The model for text generation.
     /// The endpoint to Ollama hosted service.
     /// The optional service ID.
-    /// The optional custom HttpClient.
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaTextGeneration(
         this IKernelBuilder builder,
         string modelId,
         Uri endpoint,
-        string? serviceId = null,
-        HttpClient? httpClient = null)
+        string? serviceId = null)
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
-                modelId: modelId,
-                endpoint: endpoint,
-                loggerFactory: serviceProvider.GetService()));
+        builder.Services.AddOllamaTextGeneration(modelId, endpoint, serviceId);
+
         return builder;
     }
 
@@ -60,11 +50,8 @@ public static IKernelBuilder AddOllamaTextGeneration(
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
-                modelId: modelId,
-                httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
-                loggerFactory: serviceProvider.GetService()));
+        builder.Services.AddOllamaTextGeneration(modelId, httpClient, serviceId);
+
         return builder;
     }
 
@@ -84,11 +71,27 @@ public static IKernelBuilder AddOllamaTextGeneration(
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
-                modelId: modelId,
-                ollamaClient: ollamaClient,
-                loggerFactory: serviceProvider.GetService()));
+        builder.Services.AddOllamaTextGeneration(modelId, ollamaClient, serviceId);
+
+        return builder;
+    }
+
+    /// 
+    /// Add Ollama Text Generation service to the kernel builder.
+    /// 
+    /// The kernel builder.
+    /// The Ollama Sharp library client.
+    /// The optional service ID.
+    /// The updated kernel builder.
+    public static IKernelBuilder AddOllamaTextGeneration(
+        this IKernelBuilder builder,
+        OllamaApiClient? ollamaClient = null,
+        string? serviceId = null)
+    {
+        Verify.NotNull(builder);
+
+        builder.Services.AddOllamaTextGeneration(ollamaClient, serviceId);
+
         return builder;
     }
 
@@ -148,7 +151,7 @@ public static IKernelBuilder AddOllamaChatCompletion(
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaChatCompletion(
         this IKernelBuilder builder,
-        OllamaApiClient ollamaClient,
+        OllamaApiClient? ollamaClient = null,
         string? serviceId = null)
     {
         Verify.NotNull(builder);
@@ -213,7 +216,7 @@ public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
         this IKernelBuilder builder,
-        OllamaApiClient ollamaClient,
+        OllamaApiClient? ollamaClient = null,
         string? serviceId = null)
     {
         Verify.NotNull(builder);
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
index 960466bd9f5d..220737be2749 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
@@ -38,10 +38,12 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
+        {
+            return new OllamaTextGenerationService(
                 modelId: modelId,
                 endpoint: endpoint,
-                loggerFactory: serviceProvider.GetService()));
+                loggerFactory: serviceProvider.GetService());
+        });
     }
 
     /// 
@@ -61,10 +63,12 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
+        {
+            return new OllamaTextGenerationService(
                 modelId: modelId,
-                httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
-                loggerFactory: serviceProvider.GetService()));
+                httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
+                loggerFactory: serviceProvider.GetService());
+        });
     }
 
     /// 
@@ -84,10 +88,47 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-            new OllamaTextGenerationService(
+        {
+            var loggerFactory = serviceProvider.GetService();
+
+            return new OllamaTextGenerationService(
                 modelId: modelId,
                 ollamaClient: ollamaClient,
-                loggerFactory: serviceProvider.GetService()));
+                loggerFactory: loggerFactory);
+        });
+    }
+
+    /// 
+    /// Add Ollama Text Generation service to the kernel builder.
+    /// 
+    /// The target service collection.
+    /// The Ollama Sharp library client.
+    /// The optional service ID.
+    /// The updated kernel builder.
+    public static IServiceCollection AddOllamaTextGeneration(
+        this IServiceCollection services,
+        OllamaApiClient? ollamaClient = null,
+        string? serviceId = null)
+    {
+        Verify.NotNull(services);
+
+        return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+        {
+            var loggerFactory = serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
+            ollamaClient ??= serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
+
+            if (ollamaClient is null)
+            {
+                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
+            }
+
+            return new OllamaTextGenerationService(
+                ollamaClient: ollamaClient,
+                loggerFactory: loggerFactory);
+        });
     }
 
     #endregion
@@ -171,7 +212,7 @@ public static IServiceCollection AddOllamaChatCompletion(
     /// The updated kernel builder.
     public static IServiceCollection AddOllamaChatCompletion(
         this IServiceCollection services,
-        OllamaApiClient ollamaClient,
+        OllamaApiClient? ollamaClient = null,
         string? serviceId = null)
     {
         Verify.NotNull(services);
@@ -179,6 +220,15 @@ public static IServiceCollection AddOllamaChatCompletion(
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
         {
             var loggerFactory = serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
+            ollamaClient ??= serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
+
+            if (ollamaClient is null)
+            {
+                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
+            }
 
             var builder = ((IChatClient)ollamaClient)
                 .AsBuilder()
@@ -274,7 +324,7 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
     /// The updated kernel builder.
     public static IServiceCollection AddOllamaTextEmbeddingGeneration(
         this IServiceCollection services,
-        OllamaApiClient ollamaClient,
+        OllamaApiClient? ollamaClient = null,
         string? serviceId = null)
     {
         Verify.NotNull(services);
@@ -282,6 +332,15 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
         {
             var loggerFactory = serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
+            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
+            ollamaClient ??= serviceProvider.GetService();
+            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
+
+            if (ollamaClient is null)
+            {
+                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
+            }
 
             var builder = ((IEmbeddingGenerator>)ollamaClient)
                 .AsBuilder();
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
index 4dda6cd9a351..bf05ca58797e 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
@@ -65,6 +65,18 @@ public OllamaTextGenerationService(
     {
     }
 
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    /// The Ollama API client.
+    /// Optional logger factory to be used for logging.
+    public OllamaTextGenerationService(
+        OllamaApiClient ollamaClient,
+        ILoggerFactory? loggerFactory = null)
+        : base(ollamaClient.SelectedModel, ollamaClient, loggerFactory)
+    {
+    }
+
     /// 
     public IReadOnlyDictionary Attributes => this.AttributesInternal;
 
diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
index 8bf04a9c13a6..b26ecbeb6b0d 100644
--- a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
@@ -89,13 +89,13 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory
         OnnxRuntimeGenAIPromptExecutionSettings onnxPromptExecutionSettings = this.GetOnnxPromptExecutionSettingsSettings(executionSettings);
 
         var prompt = this.GetPrompt(chatHistory, onnxPromptExecutionSettings);
-        var tokens = this.GetTokenizer().Encode(prompt);
+        using var tokens = this.GetTokenizer().Encode(prompt);
 
         using var generatorParams = new GeneratorParams(this.GetModel());
         this.UpdateGeneratorParamsFromPromptExecutionSettings(generatorParams, onnxPromptExecutionSettings);
-        generatorParams.SetInputSequences(tokens);
 
         using var generator = new Generator(this.GetModel(), generatorParams);
+        generator.AppendTokenSequences(tokens);
 
         bool removeNextTokenStartingWithSpace = true;
         while (!generator.IsDone())
@@ -104,7 +104,6 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory
 
             yield return await Task.Run(() =>
             {
-                generator.ComputeLogits();
                 generator.GenerateNextToken();
 
                 var outputTokens = generator.GetSequence(0);
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
index 3fa17f593a4b..19992be01667 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
@@ -661,6 +661,65 @@ public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming)
         Assert.Equal(isStreaming, actualStreamingFlag);
     }
 
+    [Fact]
+    public async Task PromptExecutionSettingsArePropagatedFromInvokePromptToFilterContextAsync()
+    {
+        // Arrange
+        this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+        var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
+
+        AutoFunctionInvocationContext? actualContext = null;
+
+        var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
+        {
+            actualContext = context;
+            return Task.CompletedTask;
+        });
+
+        var expectedExecutionSettings = new OpenAIPromptExecutionSettings
+        {
+            ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
+        };
+
+        // Act
+        var result = await kernel.InvokePromptAsync("Test prompt", new(expectedExecutionSettings));
+
+        // Assert
+        Assert.NotNull(actualContext);
+        Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
+    }
+
+    [Fact]
+    public async Task PromptExecutionSettingsArePropagatedFromInvokePromptStreamingToFilterContextAsync()
+    {
+        // Arrange
+        this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+        var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
+
+        AutoFunctionInvocationContext? actualContext = null;
+
+        var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
+        {
+            actualContext = context;
+            return Task.CompletedTask;
+        });
+
+        var expectedExecutionSettings = new OpenAIPromptExecutionSettings
+        {
+            ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
+        };
+
+        // Act
+        await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(expectedExecutionSettings)))
+        { }
+
+        // Assert
+        Assert.NotNull(actualContext);
+        Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
+    }
+
     public void Dispose()
     {
         this._httpClient.Dispose();
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
index 74360e542358..d6b83f21a391 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
@@ -589,6 +589,48 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
         Assert.Equal("user", messages[1].GetProperty("role").GetString());
     }
 
+    [Fact]
+    public async Task GetChatMessageContentsUsesDeveloperPromptAndSettingsCorrectlyAsync()
+    {
+        // Arrange
+        const string Prompt = "This is test prompt";
+        const string DeveloperMessage = "This is test system message";
+
+        var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient);
+        var settings = new OpenAIPromptExecutionSettings() { ChatDeveloperPrompt = DeveloperMessage };
+
+        this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
+        };
+
+        IKernelBuilder builder = Kernel.CreateBuilder();
+        builder.Services.AddTransient((sp) => service);
+        Kernel kernel = builder.Build();
+
+        // Act
+        var result = await kernel.InvokePromptAsync(Prompt, new(settings));
+
+        // Assert
+        Assert.Equal("Test chat response", result.ToString());
+
+        var requestContentByteArray = this._messageHandlerStub.RequestContent;
+
+        Assert.NotNull(requestContentByteArray);
+
+        var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
+
+        var messages = requestContent.GetProperty("messages");
+
+        Assert.Equal(2, messages.GetArrayLength());
+
+        Assert.Equal(DeveloperMessage, messages[0].GetProperty("content").GetString());
+        Assert.Equal("developer", messages[0].GetProperty("role").GetString());
+
+        Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
+        Assert.Equal("user", messages[1].GetProperty("role").GetString());
+    }
+
     [Fact]
     public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
     {
@@ -962,6 +1004,65 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string
         Assert.NotNull(result);
     }
 
+    [Theory]
+    [InlineData(null, null)]
+    [InlineData("string", "low")]
+    [InlineData("string", "medium")]
+    [InlineData("string", "high")]
+    [InlineData("ChatReasonEffortLevel.Low", "low")]
+    [InlineData("ChatReasonEffortLevel.Medium", "medium")]
+    [InlineData("ChatReasonEffortLevel.High", "high")]
+    public async Task GetChatMessageInReasoningEffortAsync(string? effortType, string? expectedEffortLevel)
+    {
+        // Assert
+        object? reasoningEffortObject = null;
+        switch (effortType)
+        {
+            case "string":
+                reasoningEffortObject = expectedEffortLevel;
+                break;
+            case "ChatReasonEffortLevel.Low":
+                reasoningEffortObject = ChatReasoningEffortLevel.Low;
+                break;
+            case "ChatReasonEffortLevel.Medium":
+                reasoningEffortObject = ChatReasoningEffortLevel.Medium;
+                break;
+            case "ChatReasonEffortLevel.High":
+                reasoningEffortObject = ChatReasoningEffortLevel.High;
+                break;
+        }
+
+        var modelId = "o1";
+        var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient);
+        OpenAIPromptExecutionSettings executionSettings = new() { ReasoningEffort = reasoningEffortObject };
+
+        this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
+        {
+            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
+        };
+
+        // Act
+        var result = await sut.GetChatMessageContentAsync(this._chatHistoryForTest, executionSettings);
+
+        // Assert
+        Assert.NotNull(result);
+
+        var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!);
+        Assert.NotNull(actualRequestContent);
+
+        var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+        if (expectedEffortLevel is null)
+        {
+            Assert.False(optionsJson.TryGetProperty("reasoning_effort", out _));
+            return;
+        }
+
+        var requestedReasoningEffort = optionsJson.GetProperty("reasoning_effort").GetString();
+
+        Assert.Equal(expectedEffortLevel, requestedReasoningEffort);
+    }
+
     [Fact(Skip = "Not working running in the console")]
     public async Task GetInvalidResponseThrowsExceptionAndIsCapturedByDiagnosticsAsync()
     {
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
index 90272b94717c..dda1af38a596 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
@@ -34,6 +34,10 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
         Assert.Equal(128, executionSettings.MaxTokens);
         Assert.Null(executionSettings.Store);
         Assert.Null(executionSettings.Metadata);
+        Assert.Null(executionSettings.Seed);
+        Assert.Null(executionSettings.ReasoningEffort);
+        Assert.Null(executionSettings.ChatSystemPrompt);
+        Assert.Null(executionSettings.ChatDeveloperPrompt);
     }
 
     [Fact]
@@ -48,13 +52,15 @@ public void ItUsesExistingOpenAIExecutionSettings()
             PresencePenalty = 0.7,
             StopSequences = ["foo", "bar"],
             ChatSystemPrompt = "chat system prompt",
+            ChatDeveloperPrompt = "chat developer prompt",
             MaxTokens = 128,
             Logprobs = true,
             TopLogprobs = 5,
             TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
             Seed = 123456,
             Store = true,
-            Metadata = new Dictionary() { { "foo", "bar" } }
+            Metadata = new Dictionary() { { "foo", "bar" } },
+            ReasoningEffort = "high"
         };
 
         // Act
@@ -70,6 +76,9 @@ public void ItUsesExistingOpenAIExecutionSettings()
         Assert.Equal(actualSettings.Seed, executionSettings.Seed);
         Assert.Equal(actualSettings.Store, executionSettings.Store);
         Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
+        Assert.Equal(actualSettings.ReasoningEffort, executionSettings.ReasoningEffort);
+        Assert.Equal(actualSettings.ChatSystemPrompt, executionSettings.ChatSystemPrompt);
+        Assert.Equal(actualSettings.ChatDeveloperPrompt, executionSettings.ChatDeveloperPrompt);
     }
 
     [Fact]
@@ -112,6 +121,8 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
                 { "results_per_prompt", 2 },
                 { "stop_sequences", new [] { "foo", "bar" } },
                 { "chat_system_prompt", "chat system prompt" },
+                { "chat_developer_prompt", "chat developer prompt" },
+                { "reasoning_effort", "high" },
                 { "max_tokens", 128 },
                 { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } },
                 { "seed", 123456 },
@@ -144,6 +155,8 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
                 { "results_per_prompt", "2" },
                 { "stop_sequences", new [] { "foo", "bar" } },
                 { "chat_system_prompt", "chat system prompt" },
+                { "chat_developer_prompt", "chat developer prompt" },
+                { "reasoning_effort", "high" },
                 { "max_tokens", "128" },
                 { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
                 { "seed", 123456 },
@@ -174,6 +187,8 @@ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
               "results_per_prompt": 2,
               "stop_sequences": [ "foo", "bar" ],
               "chat_system_prompt": "chat system prompt",
+              "chat_developer_prompt": "chat developer prompt",
+              "reasoning_effort": "high",
               "token_selection_biases": { "1": 2, "3": 4 },
               "max_tokens": 128,
               "seed": 123456,
@@ -311,6 +326,8 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
         Assert.Equal(0.7, executionSettings.PresencePenalty);
         Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
         Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
+        Assert.Equal("chat developer prompt", executionSettings.ChatDeveloperPrompt);
+        Assert.Equal("high", executionSettings.ReasoningEffort!.ToString());
         Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
         Assert.Equal(128, executionSettings.MaxTokens);
         Assert.Equal(123456, executionSettings.Seed);
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
index 129e7913b788..c09fbc87f6f3 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
@@ -200,6 +200,7 @@ internal async Task> GetChatMessageContentsAsy
             // In such cases, we'll return the last message in the chat history.
             var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync(
                 chatMessageContent,
+                chatExecutionSettings,
                 chatHistory,
                 requestIndex,
                 (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content),
@@ -384,6 +385,7 @@ internal async IAsyncEnumerable GetStreamingC
             // In such cases, we'll return the last message in the chat history.
             var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync(
                 chatMessageContent,
+                chatExecutionSettings,
                 chatHistory,
                 requestIndex,
                 (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content),
@@ -469,6 +471,7 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
             TopLogProbabilityCount = executionSettings.TopLogprobs,
             IncludeLogProbabilities = executionSettings.Logprobs,
             StoredOutputEnabled = executionSettings.Store,
+            ReasoningEffortLevel = GetEffortLevel(executionSettings),
         };
 
         var responseFormat = GetResponseFormat(executionSettings);
@@ -519,6 +522,33 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
         return options;
     }
 
+    protected static ChatReasoningEffortLevel? GetEffortLevel(OpenAIPromptExecutionSettings executionSettings)
+    {
+        var effortLevelObject = executionSettings.ReasoningEffort;
+        if (effortLevelObject is null)
+        {
+            return null;
+        }
+
+        if (effortLevelObject is ChatReasoningEffortLevel effort)
+        {
+            return effort;
+        }
+
+        if (effortLevelObject is string textEffortLevel)
+        {
+            return textEffortLevel.ToUpperInvariant() switch
+            {
+                "LOW" => ChatReasoningEffortLevel.Low,
+                "MEDIUM" => ChatReasoningEffortLevel.Medium,
+                "HIGH" => ChatReasoningEffortLevel.High,
+                _ => throw new NotSupportedException($"The provided reasoning effort '{textEffortLevel}' is not supported.")
+            };
+        }
+
+        throw new NotSupportedException($"The provided reasoning effort '{effortLevelObject.GetType()}' is not supported.");
+    }
+
     /// 
     /// Retrieves the response format based on the provided settings.
     /// 
@@ -589,13 +619,14 @@ private static bool IsRequestableTool(IList tools, FunctionCallContent
     /// 
     /// Optional chat instructions for the AI service
     /// Execution settings
+    /// Indicates what will be the role of the text. Defaults to system role prompt
     /// Chat object
-    private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null)
+    private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null, AuthorRole? textRole = null)
     {
         var chat = new ChatHistory();
 
         // If settings is not provided, create a new chat with the text as the system prompt
-        AuthorRole textRole = AuthorRole.System;
+        textRole ??= AuthorRole.System;
 
         if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt))
         {
@@ -603,9 +634,15 @@ private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecut
             textRole = AuthorRole.User;
         }
 
+        if (!string.IsNullOrWhiteSpace(executionSettings?.ChatDeveloperPrompt))
+        {
+            chat.AddDeveloperMessage(executionSettings!.ChatDeveloperPrompt!);
+            textRole = AuthorRole.User;
+        }
+
         if (!string.IsNullOrWhiteSpace(text))
         {
-            chat.AddMessage(textRole, text!);
+            chat.AddMessage(textRole.Value, text!);
         }
 
         return chat;
@@ -615,6 +652,11 @@ private static List CreateChatCompletionMessages(OpenAIPromptExecut
     {
         List messages = [];
 
+        if (!string.IsNullOrWhiteSpace(executionSettings.ChatDeveloperPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.Developer))
+        {
+            messages.Add(new DeveloperChatMessage(executionSettings.ChatDeveloperPrompt));
+        }
+
         if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System))
         {
             messages.Add(new SystemChatMessage(executionSettings.ChatSystemPrompt));
@@ -630,6 +672,11 @@ private static List CreateChatCompletionMessages(OpenAIPromptExecut
 
     private static List CreateRequestMessages(ChatMessageContent message)
     {
+        if (message.Role == AuthorRole.Developer)
+        {
+            return [new DeveloperChatMessage(message.Content) { ParticipantName = message.AuthorName }];
+        }
+
         if (message.Role == AuthorRole.System)
         {
             return [new SystemChatMessage(message.Content) { ParticipantName = message.AuthorName }];
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
index add62d564046..bd3187b936d6 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
@@ -18,6 +18,29 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI;
 [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)]
 public class OpenAIPromptExecutionSettings : PromptExecutionSettings
 {
+    /// 
+    /// Gets or sets an object specifying the effort level for the model to use when generating the completion.
+    /// 
+    /// 
+    /// Constrains effort on reasoning for reasoning models.
+    /// Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.
+    /// Possible values are:
+    /// -  values: "low", "medium", "high";
+    /// -  object;
+    /// 
+    [Experimental("SKEXP0010")]
+    [JsonPropertyName("reasoning_effort")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public object? ReasoningEffort
+    {
+        get => this._reasoningEffort;
+        set
+        {
+            this.ThrowIfFrozen();
+            this._reasoningEffort = value;
+        }
+    }
+
     /// 
     /// Temperature controls the randomness of the completion.
     /// The higher the temperature, the more random the completion.
@@ -183,6 +206,24 @@ public string? ChatSystemPrompt
         }
     }
 
+    /// 
+    /// The system prompt to use when generating text using a chat model.
+    /// Defaults to "Assistant is a large language model."
+    /// 
+    [Experimental("SKEXP0010")]
+    [JsonPropertyName("chat_developer_prompt")]
+    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+    public string? ChatDeveloperPrompt
+    {
+        get => this._chatDeveloperPrompt;
+
+        set
+        {
+            this.ThrowIfFrozen();
+            this._chatDeveloperPrompt = value;
+        }
+    }
+
     /// 
     /// Modify the likelihood of specified tokens appearing in the completion.
     /// 
@@ -410,15 +451,18 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
             FunctionChoiceBehavior = this.FunctionChoiceBehavior,
             User = this.User,
             ChatSystemPrompt = this.ChatSystemPrompt,
+            ChatDeveloperPrompt = this.ChatDeveloperPrompt,
             Logprobs = this.Logprobs,
             TopLogprobs = this.TopLogprobs,
             Store = this.Store,
             Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null,
+            ReasoningEffort = this.ReasoningEffort
         };
     }
 
     #region private ================================================================================
 
+    private object? _reasoningEffort;
     private double? _temperature;
     private double? _topP;
     private double? _presencePenalty;
@@ -431,6 +475,7 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
     private ToolCallBehavior? _toolCallBehavior;
     private string? _user;
     private string? _chatSystemPrompt;
+    private string? _chatDeveloperPrompt;
     private bool? _logprobs;
     private int? _topLogprobs;
     private bool? _store;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
index 49ffce328e5e..f40e63faa940 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-///  which filters by checking if a field consisting of a list of values contains a specific value.
+/// Represents a filter clause that filters by checking if a field consisting of a list of values contains a specific value.
 /// 
 public sealed class AnyTagEqualToFilterClause : FilterClause
 {
@@ -19,12 +19,12 @@ public AnyTagEqualToFilterClause(string fieldName, string value)
     }
 
     /// 
-    /// The name of the field with the list of values.
+    /// Gets the name of the field with the list of values.
     /// 
     public string FieldName { get; private set; }
 
     /// 
-    /// The value that the list should contain.
+    /// Gets the value that the list should contain.
     /// 
     public string Value { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
index a0eb45c0fbe3..89865732bd75 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-///  which filters using equality of a field value.
+/// Represents a filter clause that filters using equality of a field value.
 /// 
 public sealed class EqualToFilterClause : FilterClause
 {
@@ -19,12 +19,12 @@ public EqualToFilterClause(string fieldName, object value)
     }
 
     /// 
-    /// Field name to match.
+    /// Gets the field name to match.
     /// 
     public string FieldName { get; private set; }
 
     /// 
-    /// Field value to match.
+    /// Gets the field value to match.
     /// 
     public object Value { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
index 4392893f16e3..af0c1dac51b3 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Base class for filter clauses.
+/// Defines a base class for filter clauses.
 /// 
 /// 
 /// A  is used to request that the underlying search service should
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
index 7105b83c8737..38302c7fecc8 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
@@ -5,12 +5,12 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Attribute to mark a property on a record class as 'data'.
+/// Defines an attribute to mark a property on a record class as 'data'.
 /// 
 /// 
-/// Marking a property as 'data' means that the property is not a key, and not a vector, but optionally
-/// this property may have an associated vector field containing an embedding for this data.
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// Marking a property as 'data' means that the property is not a key and not a vector. But optionally,
+/// this property can have an associated vector field containing an embedding for this data.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordDataAttribute : Attribute
@@ -18,22 +18,24 @@ public sealed class VectorStoreRecordDataAttribute : Attribute
     /// 
     /// Gets or sets a value indicating whether this data property is filterable.
     /// 
-    /// 
-    /// Default is .
-    /// 
+    /// 
+    /// The default is .
+    /// 
     public bool IsFilterable { get; init; }
 
     /// 
-    /// Gets or sets a value indicating whether this data property is full text searchable.
+    /// Gets or sets a value indicating whether this data property is full-text searchable.
     /// 
-    /// 
-    /// Default is .
-    /// 
+    /// 
+    /// The default is .
+    /// 
     public bool IsFullTextSearchable { get; init; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
-    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
+    /// 
+    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
+    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
index 871794872adc..318521355f1b 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
@@ -5,17 +5,19 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Attribute to mark a property on a record class as the key under which the record is stored in a vector store.
+/// Defines an attribute to mark a property on a record class as the key under which the record is stored in a vector store.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordKeyAttribute : Attribute
 {
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
-    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
+    /// 
+    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
+    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
index e86a0883574c..a69e50bd7029 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
@@ -5,10 +5,10 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Attribute to mark a property on a record class as a vector.
+/// Defines an attribute to mark a property on a record class as a vector.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordVectorAttribute : Attribute
@@ -54,10 +54,10 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction
     }
 
     /// 
-    /// Gets or sets the number of dimensions that the vector has.
+    /// Gets the number of dimensions that the vector has.
     /// 
     /// 
-    /// This property is required when creating collections, but may be omitted if not using that functionality.
+    /// This property is required when creating collections, but can be omitted if not using that functionality.
     /// If not provided when trying to create a collection, create will fail.
     /// 
     public int? Dimensions { get; private set; }
@@ -65,24 +65,26 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction
     /// 
     /// Gets the kind of index to use.
     /// 
+    /// 
+    /// The default value varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     /// 
-    /// 
-    /// Default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
     public string? IndexKind { get; private set; }
 
     /// 
     /// Gets the distance function to use when comparing vectors.
     /// 
+    /// 
+    /// The default value varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     /// 
-    /// 
-    /// Default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
     public string? DistanceFunction { get; private set; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
-    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
+    /// 
+    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
+    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
index 11e8b0173d48..8c54411fab58 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
@@ -3,11 +3,11 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a list of well known distance functions that can be used to compare vectors.
+/// Defines a list of well-known distance functions that can be used to compare vectors.
 /// 
 /// 
-/// Not all Vector Store connectors support all distance functions and some connectors may
-/// support additional distance functions that are not defined here. See the documentation
+/// Not all Vector Store connectors support all distance functions, and some connectors might
+/// support additional distance functions that aren't defined here. See the documentation
 /// for each connector for more information on what is supported.
 /// 
 public static class DistanceFunction
@@ -69,7 +69,7 @@ public static class DistanceFunction
     public const string EuclideanSquaredDistance = nameof(EuclideanSquaredDistance);
 
     /// 
-    /// Number of differences between vectors at each dimensions.
+    /// The number of differences between vectors at each dimensions.
     /// 
     public const string Hamming = nameof(Hamming);
 
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
index 512b51e54c20..088b31c87262 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
@@ -3,11 +3,11 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a list of well known index types that can be used to index vectors.
+/// Defines a list of well-known index types that can be used to index vectors.
 /// 
 /// 
-/// Not all Vector Store connectors support all index types and some connectors may
-/// support additional index types that are not defined here. See the documentation
+/// Not all Vector Store connectors support all index types, and some connectors might
+/// support additional index types that aren't defined here. See the documentation
 /// for each connector for more information on what is supported.
 /// 
 public static class IndexKind
@@ -16,31 +16,36 @@ public static class IndexKind
     /// Hierarchical Navigable Small World, which performs an approximate nearest neighbour (ANN) search.
     /// 
     /// 
-    /// Lower accuracy than exhaustive k nearest neighbor, but faster and more efficient.
+    /// This search has lower accuracy than exhaustive k nearest neighbor, but is faster and more efficient.
     /// 
     public const string Hnsw = nameof(Hnsw);
 
     /// 
-    /// Does a brute force search to find the nearest neighbors.
-    /// Calculates the distances between all pairs of data points, so has a linear time complexity, that grows directly proportional to the number of points.
-    /// Also referred to as exhaustive k nearest neighbor in some databases.
+    /// Brute force search to find the nearest neighbors.
     /// 
     /// 
-    /// High recall accuracy, but slower and more expensive than HNSW.
-    /// Better with smaller datasets.
+    /// This search calculates the distances between all pairs of data points, so it has a linear time complexity that grows directly proportional to the number of points.
+    /// It's also referred to as exhaustive k nearest neighbor in some databases.
+    /// This search has high recall accuracy, but is slower and more expensive than HNSW.
+    /// It works better with smaller datasets.
     /// 
     public const string Flat = nameof(Flat);
 
     /// 
-    /// Inverted File with Flat Compression. Designed to enhance search efficiency by narrowing the search area through the use of neighbor partitions or clusters.
-    /// Also referred to as approximate nearest neighbor (ANN) search.
+    /// Inverted File with Flat Compression.
     /// 
+    /// 
+    /// This search is designed to enhance search efficiency by narrowing the search area through the use of neighbor partitions or clusters.
+    /// Also referred to as approximate nearest neighbor (ANN) search.
+    /// 
     public const string IvfFlat = nameof(IvfFlat);
 
     /// 
     /// Disk-based Approximate Nearest Neighbor algorithm designed for efficiently searching for approximate nearest neighbors (ANN) in high-dimensional spaces.
-    /// The primary focus of DiskANN is to handle large-scale datasets that cannot fit entirely into memory, leveraging disk storage to store the data while maintaining fast search times.
     /// 
+    /// 
+    /// The primary focus of DiskANN is to handle large-scale datasets that can't fit entirely into memory, leveraging disk storage to store the data while maintaining fast search times.
+    /// 
     public const string DiskAnn = nameof(DiskAnn);
 
     /// 
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
index 29ba283411b9..e3e5c22296b5 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a data property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordDataProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordDataProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone
+    /// The source to clone.
     public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source)
         : base(source)
     {
@@ -36,16 +36,16 @@ public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source)
     /// 
     /// Gets or sets a value indicating whether this data property is filterable.
     /// 
-    /// 
-    /// Default is .
-    /// 
+    /// 
+    /// The default is .
+    /// 
     public bool IsFilterable { get; init; }
 
     /// 
     /// Gets or sets a value indicating whether this data property is full text searchable.
     /// 
-    /// 
-    /// Default is .
-    /// 
+    /// 
+    /// The default is .
+    /// 
     public bool IsFullTextSearchable { get; init; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
index f159b77d195d..d33d0fd4a145 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// A description of the properties of a record stored in a vector store.
+/// Describes the properties of a record stored in a vector store.
 /// 
 /// 
 /// Each property contains additional information about how the property will be treated by the vector store.
@@ -16,7 +16,7 @@ public sealed class VectorStoreRecordDefinition
     private static readonly List s_emptyFields = new();
 
     /// 
-    /// The list of properties that are stored in the record.
+    /// Gets or sets the list of properties that are stored in the record.
     /// 
     public IReadOnlyList Properties { get; init; } = s_emptyFields;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
index 92b8260b19d8..4973d6e637cb 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a key property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordKeyProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordKeyProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone
+    /// The source to clone.
     public VectorStoreRecordKeyProperty(VectorStoreRecordKeyProperty source)
         : base(source)
     {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
index c468817684e9..723261f23e95 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a base property class for properties on a vector store record.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 public abstract class VectorStoreRecordProperty
 {
@@ -41,21 +41,23 @@ private protected VectorStoreRecordProperty(VectorStoreRecordProperty source)
     }
 
     /// 
-    /// Gets or sets the name of the property on the data model.
+    /// Gets the name of the property on the data model.
     /// 
     public string DataModelPropertyName { get; private set; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
-    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
-    /// This property will only be respected by implementations that do not support a well known
-    /// serialization mechanism like JSON, in which case the attributes used by that seriallization system will
-    /// be used.
     /// 
+    /// 
+    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
+    /// This property is only respected by implementations that do not support a well-known
+    /// serialization mechanism like JSON, in which case the attributes used by that serialization system will
+    /// be used.
+    /// 
     public string? StoragePropertyName { get; init; }
 
     /// 
-    /// Gets or sets the type of the property.
+    /// Gets the type of the property.
     /// 
     public Type PropertyType { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
index 8e8e97153ef8..1d1791ed555f 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a vector property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here will influence how the property is treated by the vector store.
+/// The characteristics defined here influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordVectorProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordVectorProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone
+    /// The source to clone.
     public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source)
         : base(source)
     {
@@ -38,26 +38,26 @@ public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source)
     /// Gets or sets the number of dimensions that the vector has.
     /// 
     /// 
-    /// This property is required when creating collections, but may be omitted if not using that functionality.
+    /// This property is required when creating collections, but can be omitted if not using that functionality.
     /// If not provided when trying to create a collection, create will fail.
     /// 
     public int? Dimensions { get; init; }
 
     /// 
-    /// Gets the kind of index to use.
+    /// Gets or sets the kind of index to use.
     /// 
-    /// 
-    /// 
-    /// Default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
+    /// 
+    /// The default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
+    /// 
     public string? IndexKind { get; init; }
 
     /// 
-    /// Gets the distance function to use when comparing vectors.
+    /// Gets or sets the distance function to use when comparing vectors.
     /// 
-    /// 
-    /// 
-    /// Default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
+    /// 
+    /// The default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
+    /// 
     public string? DistanceFunction { get; init; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
index a849f42794d4..e623cb676247 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Options when calling .
+/// Defines options for calling .
 /// 
 public class GetRecordOptions
 {
@@ -17,7 +17,7 @@ public GetRecordOptions()
     /// 
     /// Initializes a new instance of the  class by cloning the given options.
     /// 
-    /// The options to clone
+    /// The options to clone.
     public GetRecordOptions(GetRecordOptions source)
     {
         this.IncludeVectors = source.IncludeVectors;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
index 91827eb081e7..b07403941339 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
@@ -10,7 +10,7 @@
   
 
   
-    9.0.0-preview.1.24523.1
+    9.0.0-preview.1.25078.1
     9.0.0.0
     
     9.0.0-preview.1.24518.1
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
index 5e39a541ef86..5368c5301828 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorizableTextSearch
 {
     /// 
-    /// Search the vector store for records that match the given text and filter. The text string will be vectorized downstream and used for the vector search.
+    /// Searches the vector store for records that match the given text and filter. The text string will be vectorized downstream and used for the vector search.
     /// 
     /// The text to search the store with.
     /// The options that control the behavior of the search.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
index 3286fafc15fc..b2a5a54194a6 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorizedSearch
 {
     /// 
-    /// Search the vector store for records that match the given embedding and filter.
+    /// Searches the vector store for records that match the given embedding and filter.
     /// 
     /// The type of the vector.
     /// The vector to search the store with.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
index 731031ae6706..1430a69b3740 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
@@ -6,39 +6,39 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Used to provide filtering when doing vector searches.
+/// Provides filtering when doing vector searches.
 /// Contains configuration for doing basic vector search filtering.
 /// 
 /// 
-/// A filter has a collection of s that can be used
+/// A filter has a collection of  instances that can be used
 /// to request that the underlying service filter the search results.
-/// All clauses are combined with and.
+/// All clauses are combined with 'and'.
 /// 
 [Obsolete("Use VectorSearchOptions.Filter instead of VectorSearchOptions.OldFilter")]
 public sealed class VectorSearchFilter
 {
-    /// The filter clauses to and together.
+    /// The filter clauses to 'and' together.
     private readonly List _filterClauses = [];
 
     /// Gets the default search filter.
     public static VectorSearchFilter Default { get; } = new VectorSearchFilter();
 
     /// 
-    /// The filter clauses to and together.
+    /// The filter clauses to 'and' together.
     /// 
     public IEnumerable FilterClauses => this._filterClauses;
 
     /// 
-    /// Create an instance of 
+    /// Creates a new instance of 
     /// 
     public VectorSearchFilter()
     {
     }
 
     /// 
-    /// Create an instance of  with the provided s.
-    /// The  instances to use
+    /// Creates a new instance of  with the provided  instances.
     /// 
+    /// The  instances to use.
     public VectorSearchFilter(IEnumerable filterClauses)
     {
         if (filterClauses == null)
@@ -50,13 +50,13 @@ public VectorSearchFilter(IEnumerable filterClauses)
     }
 
     /// 
-    /// Add an equal to clause to the filter options.
+    /// Adds an 'equal to' clause to the filter options.
     /// 
-    /// Name of the property to check against. Use the name of the property from your data model or as provided in the record definition.
-    /// Value that the property should match.
-    ///  instance to allow fluent configuration.
+    /// The name of the property to check against. Use the name of the property from your data model or as provided in the record definition.
+    /// The value that the property should match.
+    /// A  instance to allow fluent configuration.
     /// 
-    /// This clause will check if a property is equal to a specific value.
+    /// This clause checks if a property is equal to a specific value.
     /// 
     public VectorSearchFilter EqualTo(string propertyName, object value)
     {
@@ -65,13 +65,13 @@ public VectorSearchFilter EqualTo(string propertyName, object value)
     }
 
     /// 
-    /// Add an any tag equal to clause to the filter options.
+    /// Adds an 'any tag equal to' clause to the filter options.
     /// 
-    /// Name of the property consisting of a list of values to check against. Use the name of the property from your data model or as provided in the record definition.
-    /// Value that the list should contain.
-    ///  instance to allow fluent configuration.
+    /// The name of the property consisting of a list of values to check against. Use the name of the property from your data model or as provided in the record definition.
+    /// The value that the list should contain.
+    /// A  instance to allow fluent configuration.
     /// 
-    /// This clause will check if a property consisting of a list of values contains a specific value.
+    /// This clause checks if a property consisting of a list of values contains a specific value.
     /// 
     public VectorSearchFilter AnyTagEqualTo(string propertyName, string value)
     {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
index 6ac552651379..72b54d263a39 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
@@ -6,7 +6,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Options for vector search.
+/// Defines options for vector search.
 /// 
 public class VectorSearchOptions
 {
@@ -24,8 +24,10 @@ public class VectorSearchOptions
     /// 
     /// Gets or sets the name of the vector property to search on.
     /// Use the name of the vector property from your data model or as provided in the record definition.
-    /// If not provided will default to the first vector property in the schema.
     /// 
+    /// 
+    /// The default value is the first vector property in the schema.
+    /// 
     public string? VectorPropertyName { get; init; }
 
     /// 
@@ -34,7 +36,7 @@ public class VectorSearchOptions
     public int Top { get; init; } = 3;
 
     /// 
-    /// Gets or sets the number of results to skip before returning results, i.e. the index of the first result to return.
+    /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return.
     /// 
     public int Skip { get; init; } = 0;
 
@@ -46,9 +48,11 @@ public class VectorSearchOptions
     /// 
     /// Gets or sets a value indicating whether the total count should be included in the results.
     /// 
+    /// 
+    /// The default value is false.
+    /// 
     /// 
-    /// Default value is false.
-    /// Not all vector search implementations will support this option in which case the total
+    /// Not all vector search implementations support this option, in which case the total
     /// count will be null even if requested via this option.
     /// 
     public bool IncludeTotalCount { get; init; } = false;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
index 14a813a4a797..f5793844d674 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// A single search result from a vector search.
+/// Represents a single search result from a vector search.
 /// 
 /// The record data model to use for retrieving data from the store.
 public sealed class VectorSearchResult
@@ -20,12 +20,12 @@ public VectorSearchResult(TRecord record, double? score)
     }
 
     /// 
-    /// The record that was found by the search.
+    /// Gets the record that was found by the search.
     /// 
     public TRecord Record { get; }
 
     /// 
-    /// The score of this result in relation to the search query.
+    /// Gets the score of this result in relation to the search query.
     /// 
     public double? Score { get; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
index 41202c513e2b..293315ee554a 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public class VectorSearchResults(IAsyncEnumerable> results)
 {
     /// 
-    /// The total count of results found by the search operation, or null
+    /// Gets or sets the total count of results found by the search operation, or null
     /// if the count was not requested or cannot be computed.
     /// 
     /// 
@@ -21,12 +21,12 @@ public class VectorSearchResults(IAsyncEnumerable
-    /// The metadata associated with the content.
+    /// Gets or sets the metadata associated with the content.
     /// 
     public IReadOnlyDictionary? Metadata { get; init; }
 
     /// 
-    /// The search results.
+    /// Gets the search results.
     /// 
     public IAsyncEnumerable> Results { get; } = results;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
index d6b1bae8dfd2..007dcf79da03 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
@@ -6,7 +6,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Interface for accessing the list of collections in a vector store.
+/// Defines an interface for accessing the list of collections in a vector store.
 /// 
 /// 
 /// This interface can be used with collections of any schema type, but requires you to provide schema information when getting a collection.
@@ -14,12 +14,12 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorStore
 {
     /// 
-    /// Get a collection from the vector store.
+    /// Gets a collection from the vector store.
     /// 
     /// The data type of the record key.
-    /// The record data model to use for adding, updating and retrieving data from the collection.
+    /// The record data model to use for adding, updating, and retrieving data from the collection.
     /// The name of the collection.
-    /// Defines the schema of the record type.
+    /// The schema of the record type.
     /// A new  instance for managing the records in the collection.
     /// 
     /// To successfully request a collection, either  must be annotated with attributes that define the schema of
@@ -32,7 +32,7 @@ IVectorStoreRecordCollection GetCollection(string
         where TKey : notnull;
 
     /// 
-    /// Retrieve the names of all the collections in the vector store.
+    /// Retrieves the names of all the collections in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// The list of names of all the collections in the vector store.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
index 6415ed35fe59..b8e410d4afd5 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
@@ -7,10 +7,10 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// A schema aware interface for managing a named collection of records in a vector store and for creating or deleting the collection itself.
+/// Defines a schema-aware interface for managing a named collection of records in a vector store and for creating or deleting the collection itself.
 /// 
 /// The data type of the record key.
-/// The record data model to use for adding, updating and retrieving data from the store.
+/// The record data model to use for adding, updating, and retrieving data from the store.
 #pragma warning disable CA1711 // Identifiers should not have incorrect suffix
 public interface IVectorStoreRecordCollection : IVectorizedSearch
 #pragma warning restore CA1711 // Identifiers should not have incorrect suffix
@@ -19,31 +19,31 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch
     /// 
     /// Gets the name of the collection.
     /// 
-    public string CollectionName { get; }
+    string CollectionName { get; }
 
     /// 
-    /// Check if the collection exists in the vector store.
+    /// Checks if the collection exists in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     ///  if the collection exists,  otherwise.
     Task CollectionExistsAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Create this collection in the vector store.
+    /// Creates this collection in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been created.
     Task CreateCollectionAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Create this collection in the vector store if it does not already exist.
+    /// Creates this collection in the vector store if it doesn't already exist.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been created.
     Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Delete the collection from the vector store.
+    /// Deletes the collection from the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been deleted.
@@ -53,71 +53,77 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch
     /// Gets a record from the vector store. Does not guarantee that the collection exists.
     /// Returns null if the record is not found.
     /// 
-    /// The unique id associated with the record to get.
+    /// The unique ID associated with the record to get.
     /// Optional options for retrieving the record.
     /// The  to monitor for cancellation requests. The default is .
     /// The record if found, otherwise null.
-    /// Throw when the command fails to execute for any reason.
-    /// Throw when mapping between the storage model and record data model fails.
+    /// The command fails to execute for any reason.
+    /// The mapping between the storage model and record data model fails.
     Task GetAsync(TKey key, GetRecordOptions? options = default, CancellationToken cancellationToken = default);
 
     /// 
     /// Gets a batch of records from the vector store. Does not guarantee that the collection exists.
-    /// Gets will be made in a single request or in a single parallel batch depending on the available store functionality.
-    /// Only found records will be returned, so the resultset may be smaller than the requested keys.
-    /// Throws for any issues other than records not being found.
     /// 
-    /// The unique ids associated with the record to get.
+    /// The unique IDs associated with the record to get.
     /// Optional options for retrieving the records.
     /// The  to monitor for cancellation requests. The default is .
-    /// The records associated with the unique keys provided.
-    /// Throw when the command fails to execute for any reason.
-    /// Throw when mapping between the storage model and record data model fails.
+    /// The records associated with the specified unique keys.
+    /// 
+    /// Gets are made in a single request or in a single parallel batch depending on the available store functionality.
+    /// Only found records are returned, so the result set might be smaller than the requested keys.
+    /// This method throws for any issues other than records not being found.
+    /// 
+    /// The command fails to execute for any reason.
+    /// The mapping between the storage model and record data model fails.
     IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default);
 
     /// 
     /// Deletes a record from the vector store. Does not guarantee that the collection exists.
     /// 
-    /// The unique id associated with the record to remove.
+    /// The unique ID associated with the record to remove.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifier for the record.
-    /// Throw when the command fails to execute for any reason other than that the record does not exit.
+    /// The command fails to execute for any reason other than that the record does not exist.
     Task DeleteAsync(TKey key, CancellationToken cancellationToken = default);
 
     /// 
     /// Deletes a batch of records from the vector store. Does not guarantee that the collection exists.
-    /// Deletes will be made in a single request or in a single parallel batch depending on the available store functionality.
-    /// If a record is not found, it will be ignored and the batch will succeed.
-    /// If any record cannot be deleted for any other reason, the operation will throw. Some records may have already been deleted, while others may not, so the entire operation should be retried.
     /// 
-    /// The unique ids associated with the records to remove.
+    /// The unique IDs associated with the records to remove.
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the records have been deleted.
-    /// Throw when the command fails to execute for any reason other than that a record does not exist.
+    /// 
+    /// Deletes are made in a single request or in a single parallel batch, depending on the available store functionality.
+    /// If a record isn't found, it is ignored and the batch succeeds.
+    /// If any record can't be deleted for any other reason, the operation throws. Some records might have already been deleted while others might not have, so the entire operation should be retried.
+    /// 
+    /// The command fails to execute for any reason other than that a record does not exist.
     Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default);
 
     /// 
     /// Upserts a record into the vector store. Does not guarantee that the collection exists.
-    ///     If the record already exists, it will be updated.
-    ///     If the record does not exist, it will be created.
+    ///     If the record already exists, it is updated.
+    ///     If the record does not exist, it is created.
     /// 
     /// The record to upsert.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifier for the record.
-    /// Throw when the command fails to execute for any reason.
-    /// Throw when mapping between the storage model and record data model fails.
+    /// The command fails to execute for any reason.
+    /// The mapping between the storage model and record data model fails.
     Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default);
 
     /// 
     /// Upserts a group of records into the vector store. Does not guarantee that the collection exists.
-    ///     If the record already exists, it will be updated.
-    ///     If the record does not exist, it will be created.
-    /// Upserts will be made in a single request or in a single parallel batch depending on the available store functionality.
+    ///     If the record already exists, it is updated.
+    ///     If the record does not exist, it is created.
     /// 
     /// The records to upsert.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifiers for the records.
-    /// Throw when the command fails to execute for any reason.
-    /// Throw when mapping between the storage model and record data model fails.
+    /// 
+    /// Upserts are made in a single request or in a single parallel batch depending on the available store functionality.
+    /// 
+    /// The command fails to execute for any reason.
+    /// The mapping between the storage model and record data model fails.
     IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
index ddfc807c3e00..3bac47a89121 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
@@ -3,21 +3,21 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Interface for mapping between a storage model, and the consumer record data model.
+/// Defines an interface for mapping between a storage model and the consumer record data model.
 /// 
 /// The consumer record data model to map to or from.
 /// The storage model to map to or from.
 public interface IVectorStoreRecordMapper
 {
     /// 
-    /// Map from the consumer record data model to the storage model.
+    /// Maps from the consumer record data model to the storage model.
     /// 
     /// The consumer record data model record to map.
     /// The mapped result.
     TStorageModel MapFromDataToStorageModel(TRecordDataModel dataModel);
 
     /// 
-    /// Map from the storage model to the consumer record data model.
+    /// Maps from the storage model to the consumer record data model.
     /// 
     /// The storage data model record to map.
     /// Options to control the mapping behavior.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
index 66f2cb0d2019..7652a0e4ef71 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
@@ -3,12 +3,12 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Options to use with the  method.
+/// Defines options to use with the  method.
 /// 
 public class StorageToDataModelMapperOptions
 {
     /// 
-    /// Get or sets a value indicating whether to include vectors in the retrieval result.
+    /// Gets or sets a value indicating whether to include vectors in the retrieval result.
     /// 
     public bool IncludeVectors { get; init; } = false;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
index 0f98f11ccd43..dc0f5bd1d1b5 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Base exception type thrown for any type of failure when using vector stores.
+/// Defines a base exception type for any type of failure when using vector stores.
 /// 
 public abstract class VectorStoreException : Exception
 {
@@ -25,10 +25,10 @@ protected VectorStoreException(string? message) : base(message)
     }
 
     /// 
-    /// Initializes a new instance of the  class with a specified error message and a reference to the inner exception that is the cause of this exception.
+    /// Initializes a new instance of the  class with a specified error message and a reference to the inner exception that's the cause of this exception.
     /// 
     /// The error message that explains the reason for the exception.
-    /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified.
+    /// The exception that's the cause of the current exception, or a null reference if no inner exception is specified.
     protected VectorStoreException(string? message, Exception? innerException) : base(message, innerException)
     {
     }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
index 59b624e88976..6e50942940bd 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Exception thrown when a vector store command fails, such as upserting a record or deleting a collection.
+/// Defines an exception that's thrown when a vector store command fails, such as upserting a record or deleting a collection.
 /// 
 public class VectorStoreOperationException : VectorStoreException
 {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
index f9876a7f618d..9aa4e9c41737 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Exception thrown when a failure occurs while trying to convert models for storage or retrieval.
+/// Defines an exception that's thrown when a failure occurs while trying to convert models for storage or retrieval.
 /// 
 public class VectorStoreRecordMappingException : VectorStoreException
 {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
index 0b704785b1d0..6ab9ee119e55 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// A generic data model that can be used to store and retrieve any data from a vector store.
+/// Represents a generic data model that can be used to store and retrieve any data from a vector store.
 /// 
 /// The data type of the record key.
 /// The key of the record.
@@ -20,7 +20,7 @@ public sealed class VectorStoreGenericDataModel(TKey key)
     /// Gets or sets a dictionary of data items stored in the record.
     /// 
     /// 
-    /// This dictionary contains all fields that are not vectors.
+    /// This dictionary contains all fields that aren't vectors.
     /// 
     public Dictionary Data { get; init; } = new();
 
diff --git a/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs b/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs
new file mode 100644
index 000000000000..10cfbadd951e
--- /dev/null
+++ b/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs
@@ -0,0 +1,32 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+
+namespace Microsoft.SemanticKernel;
+
+/// 
+/// An interface that provides a channel for emitting external messages from a step.
+/// In addition provide common methods like initialization and Uninitialization
+/// 
+public interface IExternalKernelProcessMessageChannel
+{
+    /// 
+    /// Initialization of the external messaging channel used
+    /// 
+    /// A 
+    abstract ValueTask Initialize();
+
+    /// 
+    /// Uninitialization of the external messaging channel used
+    /// 
+    /// A 
+    abstract ValueTask Uninitialize();
+
+    /// 
+    /// Emits the specified event from the step outside the SK process
+    /// 
+    /// name of the topic to be used externally as the event name
+    /// data to be transmitted externally
+    /// 
+    abstract Task EmitExternalEventAsync(string externalTopicEvent, object? eventData);
+}
diff --git a/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs b/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
index d98dc211aaf8..53df244aa8a9 100644
--- a/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
@@ -14,5 +14,5 @@ public interface IKernelProcessMessageChannel
     /// 
     /// The event to emit.
     /// A 
-    public abstract ValueTask EmitEventAsync(KernelProcessEvent processEvent);
+    abstract ValueTask EmitEventAsync(KernelProcessEvent processEvent);
 }
diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
index 6495eecbfdec..59f420642f0a 100644
--- a/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
@@ -27,4 +27,10 @@ public abstract class KernelProcessContext
     /// 
     /// A  where T is 
     public abstract Task GetStateAsync();
+
+    /// 
+    /// Gets the instance of  used for external messages
+    /// 
+    /// 
+    public abstract Task GetExternalMessageChannelAsync();
 }
diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
index 6dfac0412d29..9beadf7b9896 100644
--- a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
@@ -10,18 +10,21 @@ namespace Microsoft.SemanticKernel;
 public sealed class KernelProcessStepContext
 {
     private readonly IKernelProcessMessageChannel _stepMessageChannel;
+    private readonly IExternalKernelProcessMessageChannel? _externalMessageChannel;
 
     /// 
     /// Initializes a new instance of the  class.
     /// 
     /// An instance of .
-    public KernelProcessStepContext(IKernelProcessMessageChannel channel)
+    /// An instance of 
+    public KernelProcessStepContext(IKernelProcessMessageChannel channel, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
     {
         this._stepMessageChannel = channel;
+        this._externalMessageChannel = externalMessageChannel;
     }
 
     /// 
-    /// Emit an event from the current step.
+    /// Emit an SK process event from the current step.
     /// 
     /// An instance of  to be emitted from the 
     /// A 
@@ -31,7 +34,7 @@ public ValueTask EmitEventAsync(KernelProcessEvent processEvent)
     }
 
     /// 
-    /// Emit an event from the current step with a simplified method signature.
+    /// Emit an SK process event from the current step with a simplified method signature.
     /// 
     /// 
     /// 
@@ -52,4 +55,22 @@ public ValueTask EmitEventAsync(
                 Visibility = visibility
             });
     }
+
+    /// 
+    /// Emit an external event to through a 
+    /// component if connected from within the SK process
+    /// 
+    /// 
+    /// 
+    /// 
+    /// 
+    public async Task EmitExternalEventAsync(string topicName, object? processEventData = null)
+    {
+        if (this._externalMessageChannel == null)
+        {
+            throw new KernelException($"External message channel not configured for step with topic {topicName}");
+        }
+
+        await this._externalMessageChannel.EmitExternalEventAsync(topicName, processEventData).ConfigureAwait(false);
+    }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
index 187500e15dee..4df90bdd20a1 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
@@ -5,6 +5,7 @@
 using Microsoft.AspNetCore.Mvc;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Process.Serialization;
+using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 namespace SemanticKernel.Process.IntegrationTests.Controllers;
 
@@ -72,6 +73,23 @@ public async Task GetProcessAsync(string processId)
         return this.Ok(daprProcess);
     }
 
+    /// 
+    /// Retrieves current state of the MockCloudEventClient used in the running process
+    /// 
+    /// The Id of the process.
+    /// Mock Cloud client ingested via dependency injection
+    /// 
+    [HttpGet("processes/{processId}/mockCloudClient")]
+    public Task GetMockCloudClient(string processId, MockCloudEventClient cloudClient)
+    {
+        if (!s_processes.TryGetValue(processId, out DaprKernelProcessContext? context))
+        {
+            return Task.FromResult(this.NotFound());
+        }
+
+        return Task.FromResult(this.Ok(cloudClient));
+    }
+
     /// 
     /// Checks the health of the Dapr runtime by attempting to send a message to a health actor.
     /// 
diff --git a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
index d1d66f317d50..6d3789bb2047 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
@@ -2,6 +2,7 @@
 
 using Microsoft.SemanticKernel;
 using SemanticKernel.Process.IntegrationTests;
+using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 var builder = WebApplication.CreateBuilder(args);
 
@@ -15,6 +16,10 @@
 // Configure the Kernel with DI. This is required for dependency injection to work with processes.
 builder.Services.AddKernel();
 
+// Configure IExternalKernelProcessMessageChannel used for testing purposes
+builder.Services.AddSingleton(MockCloudEventClient.Instance);
+builder.Services.AddSingleton(MockCloudEventClient.Instance);
+
 // Configure Dapr
 builder.Services.AddActors(static options =>
 {
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
index 59401b1c2979..e7ca6292b16f 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
@@ -5,6 +5,7 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Process;
 using Microsoft.SemanticKernel.Process.Serialization;
+using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 namespace SemanticKernel.Process.IntegrationTests;
 internal sealed class DaprTestProcessContext : KernelProcessContext
@@ -68,4 +69,14 @@ public override Task StopAsync()
     {
         throw new NotImplementedException();
     }
+
+    public override async Task GetExternalMessageChannelAsync()
+    {
+        var response = await this._httpClient.GetFromJsonAsync($"http://localhost:5200/processes/{this._processId}/mockCloudClient", options: this._serializerOptions);
+        return response switch
+        {
+            null => throw new InvalidOperationException("Process not found"),
+            _ => response
+        };
+    }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
index fa35cf1fe0fa..c6f55eb95f69 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
@@ -121,9 +121,11 @@ private async Task WaitForHostStartupAsync()
     /// The process to start.
     /// An instance of 
     /// An optional initial event.
+    /// channel used for external messages
     /// A 
-    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent)
+    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
     {
+        // Actual Kernel injection of Kernel and ExternalKernelProcessMessageChannel is in dotnet\src\Experimental\Process.IntegrationTestHost.Dapr\Program.cs
         var context = new DaprTestProcessContext(process, this._httpClient!);
         await context.StartWithEventAsync(initialEvent);
         return context;
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
index 7fb4f7d72393..cbe202fdd7e0 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
@@ -17,9 +17,10 @@ public class ProcessTestFixture
     /// The process to start.
     /// An instance of 
     /// An optional initial event.
+    /// channel used for external messages
     /// A 
-    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent)
+    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
     {
-        return await process.StartAsync(kernel, initialEvent);
+        return await process.StartAsync(kernel, initialEvent, externalMessageChannel);
     }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs
new file mode 100644
index 000000000000..317a2fe545d2
--- /dev/null
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+
+namespace SemanticKernel.Process.IntegrationTests.CloudEvents;
+/// 
+/// Class used for testing purposes to mock emitting external cloud events
+/// 
+public class MockCloudEventClient : IExternalKernelProcessMessageChannel
+{
+    /// 
+    /// Initialization counter for testing
+    /// 
+    public int InitializationCounter { get; set; } = 0;
+    /// 
+    /// Uninitialization counter for testing
+    /// 
+    public int UninitializationCounter { get; set; } = 0;
+    /// 
+    /// Captures cloud events emitted for testing
+    /// 
+    public List CloudEvents { get; set; } = [];
+
+    private static MockCloudEventClient? s_instance = null;
+
+    /// 
+    /// Instance of  when used as singleton
+    /// 
+    public static MockCloudEventClient Instance
+    {
+        get
+        {
+            return s_instance ??= new MockCloudEventClient();
+        }
+    }
+
+    /// 
+    public Task EmitExternalEventAsync(string externalTopicEvent, object? eventData)
+    {
+        if (eventData != null)
+        {
+            this.CloudEvents.Add(new() { TopicName = externalTopicEvent, Data = (string)eventData });
+        }
+
+        return Task.CompletedTask;
+    }
+
+    /// 
+    public ValueTask Initialize()
+    {
+        this.InitializationCounter++;
+        return ValueTask.CompletedTask;
+    }
+
+    /// 
+    public ValueTask Uninitialize()
+    {
+        this.UninitializationCounter++;
+        return ValueTask.CompletedTask;
+    }
+}
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs
new file mode 100644
index 000000000000..97dd18e9de2d
--- /dev/null
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs
@@ -0,0 +1,19 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+namespace SemanticKernel.Process.IntegrationTests.CloudEvents;
+
+/// 
+/// Mock cloud event data used for testing purposes only
+/// 
+public class MockCloudEventData
+{
+    /// 
+    /// Name of the mock topic
+    /// 
+    public required string TopicName { get; set; }
+
+    /// 
+    /// Data emitted in the mock cloud event
+    /// 
+    public string? Data { get; set; }
+}
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs
new file mode 100644
index 000000000000..e54388269e1e
--- /dev/null
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs
@@ -0,0 +1,40 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+
+namespace SemanticKernel.Process.IntegrationTests;
+
+#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
+
+/// 
+/// A step that emits messages externally
+/// 
+public sealed class MockProxyStep : KernelProcessStep
+{
+    public static class FunctionNames
+    {
+        public const string OnRepeatMessage = nameof(OnRepeatMessage);
+        public const string OnEchoMessage = nameof(OnEchoMessage);
+    }
+
+    public static class TopicNames
+    {
+        public const string RepeatExternalTopic = nameof(RepeatExternalTopic);
+        public const string EchoExternalTopic = nameof(EchoExternalTopic);
+    }
+
+    [KernelFunction(FunctionNames.OnRepeatMessage)]
+    public async Task OnRepeatMessageAsync(KernelProcessStepContext context, string message)
+    {
+        await context.EmitExternalEventAsync(TopicNames.RepeatExternalTopic, message);
+    }
+
+    [KernelFunction(FunctionNames.OnEchoMessage)]
+    public async Task OnEchoMessageAsync(KernelProcessStepContext context, string message)
+    {
+        await context.EmitExternalEventAsync(TopicNames.EchoExternalTopic, message);
+    }
+}
+
+#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props b/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
index 9c4c35980463..b0be78e43a06 100644
--- a/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
@@ -2,5 +2,6 @@
   
     
     
+    
   
 
\ No newline at end of file
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs
new file mode 100644
index 000000000000..ee262b50f7e9
--- /dev/null
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs
@@ -0,0 +1,113 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+#pragma warning disable IDE0005 // Using directive is unnecessary.
+using System;
+using System.Linq;
+using System.Runtime.Serialization;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using SemanticKernel.IntegrationTests.TestSettings;
+using SemanticKernel.Process.IntegrationTests.CloudEvents;
+using Xunit;
+#pragma warning restore IDE0005 // Using directive is unnecessary.
+
+namespace SemanticKernel.Process.IntegrationTests;
+
+/// 
+/// Integration tests for processes.
+/// 
+[Collection(nameof(ProcessTestGroup))]
+public sealed class ProcessCloudEventsTests : IClassFixture
+{
+    private readonly ProcessTestFixture _fixture;
+    private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder();
+    private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+            .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
+            .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+            .AddEnvironmentVariables()
+            .AddUserSecrets()
+            .Build();
+
+    private readonly IExternalKernelProcessMessageChannel _externalMessageChannel = MockCloudEventClient.Instance;
+
+    /// 
+    /// Initializes a new instance of the  class. This is called by the test framework.
+    /// 
+    /// 
+    public ProcessCloudEventsTests(ProcessTestFixture fixture)
+    {
+        this._fixture = fixture;
+    }
+
+    /// 
+    /// Tests a simple linear process with two steps and no sub processes.
+    /// 
+    /// A 
+    [Fact]
+    public async Task LinearProcessWithCloudEventSubscribersAsync()
+    {
+        // Arrange
+        OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!;
+        this._kernelBuilder.AddOpenAIChatCompletion(
+            modelId: configuration.ModelId!,
+            apiKey: configuration.ApiKey);
+
+        Kernel kernel = this._kernelBuilder.Build();
+        var process = this.CreateLinearProcess("SimpleWithCloudEvents").Build();
+
+        // Act
+        string testInput = "Test";
+        var processHandle = await this._fixture.StartProcessAsync(process, kernel, new() { Id = ProcessTestsEvents.StartProcess, Data = testInput }, this._externalMessageChannel);
+        var externalMessageChannel = await processHandle.GetExternalMessageChannelAsync();
+
+        // Assert
+        Assert.NotNull(externalMessageChannel);
+        var mockClient = (MockCloudEventClient)externalMessageChannel;
+        Assert.NotNull(mockClient);
+        Assert.True(mockClient.InitializationCounter > 0);
+        Assert.Equal(2, mockClient.CloudEvents.Count);
+        Assert.Equal(testInput, mockClient.CloudEvents[0].Data);
+        Assert.Equal(MockProxyStep.TopicNames.EchoExternalTopic, mockClient.CloudEvents[0].TopicName);
+        Assert.Equal($"{testInput} {testInput}", mockClient.CloudEvents[1].Data);
+        Assert.Equal(MockProxyStep.TopicNames.RepeatExternalTopic, mockClient.CloudEvents[1].TopicName);
+    }
+
+    /// 
+    /// Creates a simple linear process with two steps and a proxy step to emit events externally
+ /// Input Event:
+ /// Output Events: [, ]
+ /// + /// ┌────────┐ ┌────────┐ + /// │ echo ├─┬─►│ repeat ├───┐ + /// └────────┘ │ └────────┘ │ + /// │ │ + /// │ ┌───────┐ │ + /// └─►│ proxy │◄───┘ + /// └───────┘ + /// + ///
+ private ProcessBuilder CreateLinearProcess(string name) + { + var processBuilder = new ProcessBuilder(name); + var echoStep = processBuilder.AddStepFromType(); + var repeatStep = processBuilder.AddStepFromType(); + var proxyStep = processBuilder.AddStepFromType(); + + processBuilder.OnInputEvent(ProcessTestsEvents.StartProcess) + .SendEventTo(new ProcessFunctionTargetBuilder(echoStep)); + + echoStep.OnFunctionResult(nameof(EchoStep.Echo)) + .SendEventTo(new ProcessFunctionTargetBuilder(repeatStep, parameterName: "message")); + + echoStep + .OnFunctionResult() + .SendEventTo(new ProcessFunctionTargetBuilder(proxyStep, functionName: MockProxyStep.FunctionNames.OnEchoMessage)); + + repeatStep + .OnEvent(ProcessTestsEvents.OutputReadyInternal) + .SendEventTo(new ProcessFunctionTargetBuilder(proxyStep, functionName: MockProxyStep.FunctionNames.OnRepeatMessage)); + + return processBuilder; + } +} diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs index 1fd11bef274b..90dabb3c4bcd 100644 --- a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs +++ b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs @@ -17,6 +17,7 @@ public abstract class ProcessTestFixture /// The process to start. /// An instance of /// An optional initial event. + /// channel used for external messages /// A - public abstract Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent); + public abstract Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null); } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs index b59dd70211f4..9ddf0c4074cb 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs @@ -13,18 +13,17 @@ public sealed class LocalKernelProcessContext : KernelProcessContext, IDisposabl private readonly LocalProcess _localProcess; private readonly Kernel _kernel; - internal LocalKernelProcessContext(KernelProcess process, Kernel kernel, ProcessEventProxy? eventProxy = null) + internal LocalKernelProcessContext(KernelProcess process, Kernel kernel, ProcessEventProxy? eventProxy = null, IExternalKernelProcessMessageChannel? externalMessageChannel = null) { Verify.NotNull(process, nameof(process)); Verify.NotNull(kernel, nameof(kernel)); Verify.NotNullOrWhiteSpace(process.State?.Name); this._kernel = kernel; - this._localProcess = new LocalProcess( - process, - kernel) + this._localProcess = new LocalProcess(process, kernel) { - EventProxy = eventProxy + EventProxy = eventProxy, + ExternalMessageChannel = externalMessageChannel, }; } @@ -55,4 +54,10 @@ public override Task SendEventAsync(KernelProcessEvent processEvent) => /// Disposes of the resources used by the process. ///
public void Dispose() => this._localProcess.Dispose(); + + /// + public override Task GetExternalMessageChannelAsync() + { + return Task.FromResult(this._localProcess.ExternalMessageChannel); + } } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs index 4904366c9d39..eac8826b37a5 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs @@ -15,12 +15,13 @@ public static class LocalKernelProcessFactory /// Required: The to start running. /// Required: An instance of /// Required: The initial event to start the process. + /// Optional: an instance of . /// An instance of that can be used to interrogate or stop the running process. - public static async Task StartAsync(this KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent) + public static async Task StartAsync(this KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null) { Verify.NotNull(initialEvent, nameof(initialEvent)); - LocalKernelProcessContext processContext = new(process, kernel); + LocalKernelProcessContext processContext = new(process, kernel, null, externalMessageChannel); await processContext.StartWithEventAsync(initialEvent).ConfigureAwait(false); return processContext; } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs index b7a6695996f4..aea736ceced0 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs @@ -211,6 +211,7 @@ private ValueTask InitializeProcessAsync() { ParentProcessId = this.Id, EventProxy = this.EventProxy, + ExternalMessageChannel = this.ExternalMessageChannel, }; } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs index 2fe9287bafda..c95ba287d0db 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs @@ -80,6 +80,8 @@ public LocalStep(KernelProcessStepInfo stepInfo, Kernel kernel, string? parentPr ///
internal ProcessEventProxy? EventProxy { get; init; } + internal IExternalKernelProcessMessageChannel? ExternalMessageChannel { get; init; } + /// /// Retrieves all events that have been emitted by this step in the previous superstep. /// @@ -231,6 +233,13 @@ internal virtual async Task HandleMessageAsync(ProcessMessage message) /// protected virtual async ValueTask InitializeStepAsync() { + if (this.ExternalMessageChannel != null) + { + // initialize external message channel + // TODO: in LocalRuntime need to ensure initialization only happens once + await this.ExternalMessageChannel.Initialize().ConfigureAwait(false); + } + // Instantiate an instance of the inner step object KernelProcessStep stepInstance = (KernelProcessStep)ActivatorUtilities.CreateInstance(this._kernel.Services, this._stepInfo.InnerStepType); var kernelPlugin = KernelPluginFactory.CreateFromObject(stepInstance, pluginName: this._stepInfo.State.Name); @@ -242,7 +251,7 @@ protected virtual async ValueTask InitializeStepAsync() } // Initialize the input channels - this._initialInputs = this.FindInputChannels(this._functions, this._logger); + this._initialInputs = this.FindInputChannels(this._functions, this._logger, this.ExternalMessageChannel); this._inputs = this._initialInputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); // Activate the step with user-defined state if needed diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs new file mode 100644 index 000000000000..52b16051e070 --- /dev/null +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Dapr.Actors.Runtime; + +namespace Microsoft.SemanticKernel; + +/// +/// An actor that represents en external event messaging buffer. +/// +internal sealed class ExternalMessageBufferActor : Actor, IExternalMessageBuffer +{ + private readonly IExternalKernelProcessMessageChannel _externalMessageChannel; + + /// + /// Required constructor for Dapr Actor. + /// + /// The actor host. + /// Instance of + public ExternalMessageBufferActor(ActorHost host, IExternalKernelProcessMessageChannel externalMessageChannel) : base(host) + { + this._externalMessageChannel = externalMessageChannel; + } + + public async Task EmitExternalEventAsync(string externalTopicEvent, object? eventData) + { + await this._externalMessageChannel.EmitExternalEventAsync(externalTopicEvent, eventData).ConfigureAwait(false); + } + + protected override async Task OnDeactivateAsync() + { + await this._externalMessageChannel.Uninitialize().ConfigureAwait(false); + } + + protected override async Task OnActivateAsync() + { + await this._externalMessageChannel.Initialize().ConfigureAwait(false); + } +} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs new file mode 100644 index 000000000000..5de54a277d20 --- /dev/null +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Class used to allow using as +/// in SK Process shared abstractions +/// +public class ExternalMessageBufferActorWrapper : IExternalKernelProcessMessageChannel +{ + private readonly IExternalMessageBuffer _actor; + + /// + /// Constructor to wrap as + /// + /// The actor host. + public ExternalMessageBufferActorWrapper(IExternalMessageBuffer actor) + { + this._actor = actor; + } + + /// + public async Task EmitExternalEventAsync(string externalTopicEvent, object? eventData) + { + await this._actor.EmitExternalEventAsync(externalTopicEvent, eventData).ConfigureAwait(false); + } + + /// + public ValueTask Initialize() + { + // When using Dapr initialization is already taken care of by Dapr Actors + throw new System.NotImplementedException(); + } + + /// + public ValueTask Uninitialize() + { + // When using Dapr uninitialization is already taken care of by Dapr Actors + throw new System.NotImplementedException(); + } +} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs index f5445bdf0afc..479687f97077 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs @@ -338,8 +338,15 @@ protected virtual async ValueTask ActivateStepAsync() this._functions.Add(f.Name, f); } + // Creating external process channel actor to be used for external messaging by some steps + IExternalKernelProcessMessageChannel? externalMessageChannelActor = null; + var scopedExternalMessageBufferId = this.ScopedActorId(new ActorId(this.Id.GetId())); + var actor = this.ProxyFactory.CreateActorProxy(scopedExternalMessageBufferId, nameof(ExternalMessageBufferActor)); + externalMessageChannelActor = new ExternalMessageBufferActorWrapper(actor); + // Initialize the input channels - this._initialInputs = this.FindInputChannels(this._functions, this._logger); + // TODO: Issue #10328 Cloud Events - new Step type dedicated to work as Proxy Step abstraction https://github.com/microsoft/semantic-kernel/issues/10328 + this._initialInputs = this.FindInputChannels(this._functions, this._logger, externalMessageChannelActor); this._inputs = this._initialInputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); // Activate the step with user-defined state if needed diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs index f09fa4f39222..b7425516863a 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs @@ -67,4 +67,10 @@ public override async Task GetStateAsync() var daprProcessInfo = await this._daprProcess.GetProcessInfoAsync().ConfigureAwait(false); return daprProcessInfo.ToKernelProcess(); } + + /// + public override Task GetExternalMessageChannelAsync() + { + throw new NotImplementedException(); + } } diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs new file mode 100644 index 000000000000..5db64dbd6f68 --- /dev/null +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Dapr.Actors; + +namespace Microsoft.SemanticKernel; + +// estenori-note: +// for some reason dapr doesn't like if instead public interface IExternalMessageBuffer : IActor, IExternalKernelProcessMessageChannelBase +// instead defining the interface component is necessary. To make it compatible with shared components a "casting" to IExternalKernelProcessMessageChannelEmitter +// is added in StepActor logic to make use of FindInputChannels + +/// +/// An interface for +/// +public interface IExternalMessageBuffer : IActor +{ + /// + /// Emits external events outside of the SK process + /// + /// + /// + /// + abstract Task EmitExternalEventAsync(string externalTopicEvent, object? eventData); +} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs index 52f86899d608..ad65b7f89c4f 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs @@ -22,5 +22,6 @@ public static void AddProcessActors(this ActorRuntimeOptions actorOptions) actorOptions.Actors.RegisterActor(); actorOptions.Actors.RegisterActor(); actorOptions.Actors.RegisterActor(); + actorOptions.Actors.RegisterActor(); } } diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs index 1bda62be5645..8cbd6221be59 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs @@ -511,6 +511,41 @@ public async Task ItTrustsAllTemplatesAsync() Assert.Equal(expected, result); } + [Fact] + public async Task ItRendersContentWithHtmlEntitiesAsync() + { + // Arrange + var template = + """ + Can you help me tell & the time in Seattle right now? + Sure! The time in Seattle is currently 3:00 PM. + What about New York? + """; + + var factory = new HandlebarsPromptTemplateFactory(options: new() { EnableHtmlDecoder = false }); + + var target = factory.Create(new PromptTemplateConfig(template) + { + TemplateFormat = HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + }); + + // Act + var prompt = await target.RenderAsync(this._kernel); + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + Assert.Collection(chatHistory, + c => Assert.Equal(AuthorRole.User, c.Role), + c => Assert.Equal(AuthorRole.Assistant, c.Role), + c => Assert.Equal(AuthorRole.User, c.Role)); + Assert.Collection(chatHistory, + c => Assert.Equal("Can you help me tell & the time in Seattle right now?", c.Content), + c => Assert.Equal("Sure! The time in Seattle is currently 3:00 PM.", c.Content), + c => Assert.Equal("What about New York?", c.Content)); + } + #region private private HandlebarsPromptTemplateFactory _factory; diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs index d73bd85a15b9..e23dd3ddc628 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs @@ -51,7 +51,8 @@ public async Task RenderAsync(Kernel kernel, KernelArguments? arguments this.RegisterHelpers(handlebarsInstance, kernel, arguments, cancellationToken); var template = handlebarsInstance.Compile(this._promptModel.Template); - return System.Net.WebUtility.HtmlDecode(template(arguments).Trim()); + var text = template(arguments).Trim(); + return this._options.EnableHtmlDecoder ? System.Net.WebUtility.HtmlDecode(text) : text; } #region private diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs index 78be0f2480eb..852517a49510 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs @@ -48,6 +48,11 @@ public sealed class HandlebarsPromptTemplateOptions : HandlebarsHelpersOptions /// public Action? RegisterCustomHelpers { get; set; } + /// + /// Flag indicating whether to enable HTML decoding of the rendered template. + /// + public bool EnableHtmlDecoder { get; set; } = true; + /// /// Initializes a new instance of the class. /// diff --git a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs index 474cd99cbd56..2c5722f140a6 100644 --- a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs +++ b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs @@ -32,11 +32,11 @@ internal sealed partial class LiquidPromptTemplate : IPromptTemplate private readonly Dictionary _inputVariables; #if NET - [GeneratedRegex(@"(?system|assistant|user|function):\s+")] + [GeneratedRegex(@"(?system|assistant|user|function|developer):\s+")] private static partial Regex RoleRegex(); #else private static Regex RoleRegex() => s_roleRegex; - private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function):\s+", RegexOptions.Compiled); + private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function|developer):\s+", RegexOptions.Compiled); #endif /// Initializes the . @@ -106,7 +106,7 @@ public async Task RenderAsync(Kernel kernel, KernelArguments? arguments // xxxx // // turn it into - // + // // xxxx // var splits = RoleRegex().Split(renderedResult); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs index 91e229a1c246..fcea1ef3a387 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs @@ -5,6 +5,7 @@ using System.IO; using System.Linq; using System.Net.Http; +using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; @@ -159,13 +160,44 @@ await DocumentLoader.LoadDocumentFromUriAsStreamAsync(parsedDescriptionUrl, #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var operationRunnerHttpClient = HttpClientProvider.GetHttpClient(openApiFunctionExecutionParameters?.HttpClient ?? kernel.Services.GetService()); #pragma warning restore CA2000 + static IDictionary? CopilotAgentPluginHeadersFactory(RestApiOperation operation, IDictionary arguments, RestApiOperationRunOptions? options) + { + var graphAllowedHosts = new[] + { + "graph.microsoft.com", + "graph.microsoft.us", + "dod-graph.microsoft.us", + "graph.microsoft.de", + "microsoftgraph.chinacloudapi.cn", + "canary.graph.microsoft.com", + "graph.microsoft-ppe.com" + }; + if (options?.ApiHostUrl?.Host is not { } hostString || !graphAllowedHosts.Contains(hostString)) + { + return null; + } + string frameworkDescription = RuntimeInformation.FrameworkDescription; + string osDescription = RuntimeInformation.OSDescription; + string copilotAgentPluginVersion = HttpHeaderConstant.Values.GetAssemblyVersion(typeof(CopilotAgentPluginKernelExtensions)); + var defaultHeaders = new Dictionary + { + // TODO: version and format updates + ["SdkVersion"] = $"copilot-agent-plugins/{copilotAgentPluginVersion}, (runtimeEnvironment={frameworkDescription}; hostOS={osDescription})", + ["client-request-id"] = Guid.NewGuid().ToString() + }; + + var currentHeaders = operation.BuildHeaders(arguments); + var finalHeaders = defaultHeaders.Concat(currentHeaders).ToDictionary(k => k.Key, v => v.Value); + return finalHeaders; + } var runner = new RestApiOperationRunner( operationRunnerHttpClient, openApiFunctionExecutionParameters?.AuthCallback, openApiFunctionExecutionParameters?.UserAgent, openApiFunctionExecutionParameters?.EnableDynamicPayload ?? false, - openApiFunctionExecutionParameters?.EnablePayloadNamespacing ?? true); + openApiFunctionExecutionParameters?.EnablePayloadNamespacing ?? true, + headersFactory: CopilotAgentPluginHeadersFactory); var info = OpenApiDocumentParser.ExtractRestApiInfo(filteredOpenApiDocument); var security = OpenApiDocumentParser.CreateRestApiOperationSecurityRequirements(filteredOpenApiDocument.SecurityRequirements); diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs index 899f19e64ce5..9fdd0d9389a3 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs @@ -76,7 +76,6 @@ public class OpenApiFunctionExecutionParameters /// as a stream rather than as a string. /// If the custom reader is not provided, or the reader returns null, the internal reader is used. ///
- [Experimental("SKEXP0040")] public HttpResponseContentReader? HttpResponseContentReader { get; set; } /// diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs index 48f72a697a4a..2e7fb3d2214f 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.IO; using System.Net.Http; using System.Threading; @@ -90,7 +89,6 @@ public static async Task ImportPluginFromOpenApiAsync( /// The specification model. /// The OpenAPI specification parsing and function execution parameters. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. - [Experimental("SKEXP0040")] public static KernelPlugin ImportPluginFromOpenApi( this Kernel kernel, string pluginName, @@ -227,7 +225,6 @@ public static async Task CreatePluginFromOpenApiAsync( /// The OpenAPI specification parsing and function execution parameters. /// The cancellation token. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. - [Experimental("SKEXP0040")] public static KernelPlugin CreatePluginFromOpenApi( this Kernel kernel, string pluginName, diff --git a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj index 1d72c971fcba..a30e82b03e1d 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj +++ b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj @@ -5,7 +5,6 @@ $(AssemblyName) net8.0;netstandard2.0 $(NoWarn);SKEXP0040 - preview diff --git a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs index bd268d5984c2..f92b58375c8c 100644 --- a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs +++ b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -12,5 +11,4 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// The context containing HTTP operation details. /// The cancellation token. /// The HTTP response content. -[Experimental("SKEXP0040")] public delegate Task HttpResponseContentReader(HttpResponseContentReaderContext context, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs index 221ba157fa0a..077591c4d4be 100644 --- a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs +++ b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; using System.Net.Http; namespace Microsoft.SemanticKernel.Plugins.OpenApi; @@ -8,7 +7,6 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Represents the context for HTTP response content reader. /// -[Experimental("SKEXP0040")] public sealed class HttpResponseContentReaderContext { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs index 41cd8a3290e5..89bcf205cbc1 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs @@ -170,7 +170,7 @@ internal IDictionary BuildHeaders(IDictionary a throw new KernelException($"The headers parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); //Serializing the parameter and adding it to the headers. headers.Add(parameter.Name, serializer.Invoke(parameter, node)); @@ -206,7 +206,7 @@ internal string BuildQueryString(IDictionary arguments) throw new KernelException($"The query string parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); // Serializing the parameter and adding it to the query string if there's an argument for it. segments.Add(serializer.Invoke(parameter, node)); @@ -274,7 +274,7 @@ private string BuildPath(string pathTemplate, IDictionary argum throw new KernelException($"The path parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); // Serializing the parameter and adding it to the path. pathTemplate = pathTemplate.Replace($"{{{parameter.Name}}}", HttpUtility.UrlEncode(serializer.Invoke(parameter, node))); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs index 4803d28e1e1b..7912250715b4 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs @@ -27,7 +27,6 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Parser for OpenAPI documents. /// -[Experimental("SKEXP0040")] public sealed class OpenApiDocumentParser(ILoggerFactory? loggerFactory = null) { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs index f012da455b4b..a59b61257e4b 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs @@ -1,14 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Options for OpenAPI document parser. /// -[Experimental("SKEXP0040")] public sealed class OpenApiDocumentParserOptions { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs index ea3a0dab566d..0632a5186de1 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs @@ -1,14 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Represents the context for an operation selection predicate. /// -[Experimental("SKEXP0040")] public readonly struct OperationSelectionPredicateContext : IEquatable { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs index 63e0a2ce39ff..0e69ffefcc16 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; @@ -139,7 +138,6 @@ public static async Task CreateFromOpenApiAsync( /// The specification model. /// The OpenAPI specification parsing and function execution parameters. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. - [Experimental("SKEXP0040")] public static KernelPlugin CreateFromOpenApi( string pluginName, RestApiSpecification specification, diff --git a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs index 0251abbb53e4..deb242dbb1ca 100644 --- a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs +++ b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs @@ -427,13 +427,13 @@ private JsonObject BuildJsonObject(IList properties, IDi // Use property argument name to look up the property value if (!string.IsNullOrEmpty(propertyMetadata.ArgumentName) && arguments.TryGetValue(propertyMetadata.ArgumentName!, out object? argument) && argument is not null) { - result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument)); + result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument, propertyMetadata.Schema)); continue; } // Use property name to look up the property value else if (arguments.TryGetValue(argumentName, out argument) && argument is not null) { - result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument)); + result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument, propertyMetadata.Schema)); continue; } diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs index 5b7422950d15..cadf0252cbea 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs @@ -4,6 +4,7 @@ using System.Globalization; using System.Text.Json; using System.Text.Json.Nodes; +using Json.Schema; namespace Microsoft.SemanticKernel.Plugins.OpenApi; @@ -13,19 +14,20 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; internal static class OpenApiTypeConverter { /// - /// Converts the given parameter argument to a JsonNode based on the specified type. + /// Converts the given parameter argument to a JsonNode based on the specified type or schema. /// /// The parameter name. /// The parameter type. /// The argument to be converted. + /// The parameter schema. /// A JsonNode representing the converted value. - public static JsonNode Convert(string name, string type, object argument) + public static JsonNode Convert(string name, string type, object argument, KernelJsonSchema? schema = null) { Verify.NotNull(argument); try { - JsonNode? converter = type switch + JsonNode? node = type switch { "string" => JsonValue.Create(argument), "array" => argument switch @@ -52,10 +54,12 @@ string stringArgument when double.TryParse(stringArgument, out var doubleValue) byte or sbyte or short or ushort or int or uint or long or ulong or float or double or decimal => JsonValue.Create(argument), _ => null }, - _ => throw new NotSupportedException($"Unexpected type '{type}' of parameter '{name}' with argument '{argument}'."), + _ => schema is null + ? JsonSerializer.SerializeToNode(argument) + : ValidateSchemaAndConvert(name, schema, argument) }; - return converter ?? throw new ArgumentOutOfRangeException(name, argument, $"Argument type '{argument.GetType()}' is not convertible to parameter type '{type}'."); + return node ?? throw new ArgumentOutOfRangeException(name, argument, $"Argument type '{argument.GetType()}' is not convertible to parameter type '{type}'."); } catch (ArgumentException ex) { @@ -66,4 +70,25 @@ string stringArgument when double.TryParse(stringArgument, out var doubleValue) throw new ArgumentOutOfRangeException(name, argument, ex.Message); } } + + /// + /// Validates the argument against the parameter schema and converts it to a JsonNode if valid. + /// + /// The parameter name. + /// The parameter schema. + /// The argument to be validated and converted. + /// A JsonNode representing the converted value. + private static JsonNode? ValidateSchemaAndConvert(string parameterName, KernelJsonSchema parameterSchema, object argument) + { + var jsonSchema = JsonSchema.FromText(JsonSerializer.Serialize(parameterSchema)); + + var node = JsonSerializer.SerializeToNode(argument); + + if (jsonSchema.Evaluate(node).IsValid) + { + return node; + } + + throw new ArgumentOutOfRangeException(parameterName, argument, $"Argument type '{argument.GetType()}' does not match the schema."); + } } diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs index f86a93fbce70..a019f6bbfba9 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs @@ -97,7 +97,7 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettingsWithJsonObjec Assert.Equal(0, executionSettings.Temperature); Assert.Equal(1.0, executionSettings.TopP); Assert.Null(executionSettings.StopSequences); - Assert.Equal("json_object", executionSettings.ResponseFormat?.ToString()); + Assert.Equal("{\"type\":\"json_object\"}", executionSettings.ResponseFormat?.ToString()); Assert.Null(executionSettings.TokenSelectionBiases); Assert.Equal(3000, executionSettings.MaxTokens); Assert.Null(executionSettings.Seed); @@ -342,7 +342,8 @@ public void ItCreatesInputVariablesOnlyWhenNoneAreExplicitlySet() --- name: MyPrompt inputs: - question: What is the color of the sky? + - name: question + description: What is the color of the sky? --- {{a}} {{b}} {{c}} """; diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty index e63680443db2..82884c1ec198 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty @@ -7,9 +7,9 @@ model: api: chat configuration: type: azure_openai - azure_deployment: gpt-35-turbo api_version: 2023-07-01-preview parameters: + model_id: gpt-35-turbo tools_choice: auto tools: - type: function diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty index ba095afeebfc..f8eb9130ae28 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty @@ -9,6 +9,7 @@ model: type: azure_openai azure_deployment: gpt-4o parameters: + model_id: gpt-4o temperature: 0.0 max_tokens: 3000 top_p: 1.0 diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs deleted file mode 100644 index ece2eaabc219..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal sealed class PromptyModel -{ - [YamlMember(Alias = "api")] - public ApiType Api { get; set; } = ApiType.Chat; - - [YamlMember(Alias = "configuration")] - public PromptyModelConfig? ModelConfiguration { get; set; } - - [YamlMember(Alias = "parameters")] - public PromptyModelParameters? Parameters { get; set; } - - [YamlMember(Alias = "response")] - public string? Response { get; set; } -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs deleted file mode 100644 index cb02862f71d1..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal sealed class PromptyModelConfig -{ - // azure open ai - [YamlMember(Alias = "type")] - public ModelType ModelType { get; set; } - - [YamlMember(Alias = "api_version")] - public string ApiVersion { get; set; } = "2023-12-01-preview"; - - [YamlMember(Alias = "azure_endpoint")] - public string? AzureEndpoint { get; set; } - - [YamlMember(Alias = "azure_deployment")] - public string? AzureDeployment { get; set; } - - [YamlMember(Alias = "api_key")] - public string? ApiKey { get; set; } - - //open ai props - [YamlMember(Alias = "name")] - public string? Name { get; set; } - - [YamlMember(Alias = "organization")] - public string? Organization { get; set; } -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs deleted file mode 100644 index 7699037d7466..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -/// Parameters to be sent to the model. -internal sealed class PromptyModelParameters -{ - /// Specify the format for model output (e.g., JSON mode). - [YamlMember(Alias = "response_format")] - public PromptyResponseFormat? ResponseFormat { get; set; } - - /// Seed for deterministic sampling (Beta feature). - [YamlMember(Alias = "seed")] - public int? Seed { get; set; } - - /// Maximum number of tokens in chat completion. - [YamlMember(Alias = "max_tokens")] - public int? MaxTokens { get; set; } - - /// Sampling temperature (0 means deterministic). - [YamlMember(Alias = "temperature")] - public double? Temperature { get; set; } - - /// Controls which function the model calls (e.g., "none" or "auto"). - [YamlMember(Alias = "tools_choice")] - public string? ToolsChoice { get; set; } - - /// Array of tools (if applicable). - [YamlMember(Alias = "tools")] - public List? Tools { get; set; } - - /// Frequency penalty for sampling. - [YamlMember(Alias = "frequency_penalty")] - public double? FrequencyPenalty { get; set; } - - /// Presence penalty for sampling. - [YamlMember(Alias = "presence_penalty")] - public double? PresencePenalty { get; set; } - - /// Sequences where model stops generating tokens. - [YamlMember(Alias = "stop")] - public List? Stop { get; set; } - - /// Nucleus sampling probability (0 means no tokens generated). - [YamlMember(Alias = "top_p")] - public double? TopP { get; set; } -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs deleted file mode 100644 index c3c991903bb1..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -/// The response format of prompty. -internal sealed class PromptyResponseFormat -{ - /// The response format type (e.g: json_object). - [YamlMember(Alias = "type")] - public string? Type { get; set; } -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs deleted file mode 100644 index 1bc0fefcb48d..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal sealed class PromptyTool -{ - [YamlMember(Alias = "id")] - public string? id { get; set; } - - [YamlMember(Alias = "type")] - public string? Type { get; set; } - - [YamlMember(Alias = "function")] - public PromptyFunction? Function { get; set; } -} - -internal sealed class PromptyFunction -{ - [YamlMember(Alias = "arguments")] - public string? Arguments { get; set; } - - [YamlMember(Alias = "name")] - public string? Name { get; set; } - - [YamlMember(Alias = "parameters")] - public PromptyParameters? Parameters { get; set; } - - [YamlMember(Alias = "description")] - public string? Description { get; set; } -} - -internal sealed class PromptyParameters -{ - [YamlMember(Alias = "description")] - public string? Description { get; set; } - - [YamlMember(Alias = "type")] - public string? Type { get; set; } - - [YamlMember(Alias = "properties")] - public object? Properties { get; set; } -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs deleted file mode 100644 index 4af70817e742..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel.Prompty.Core; - -/// -/// Schema: https://github.com/Azure/azureml_run_specification/blob/master/schemas/Prompty.yaml -/// -internal sealed class PromptyYaml -{ - [YamlMember(Alias = "name")] - public string? Name { get; set; } - - [YamlMember(Alias = "description")] - public string? Description { get; set; } - - [YamlMember(Alias = "version")] - public string? Version { get; set; } - - [YamlMember(Alias = "tags")] - public List? Tags { get; set; } - - [YamlMember(Alias = "authors")] - public List? Authors { get; set; } - - [YamlMember(Alias = "inputs")] - public Dictionary? Inputs { get; set; } - - [YamlMember(Alias = "outputs")] - public Dictionary? Outputs { get; set; } - - [YamlMember(Alias = "sample")] - public object? Sample { get; set; } - - [YamlMember(Alias = "model")] - public PromptyModel? Model { get; set; } - - [YamlMember(Alias = "template")] - public string? Template { get; set; } = "liquid"; -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs deleted file mode 100644 index 0076bf6b9983..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal enum ApiType -{ - Chat, - Completion, -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs deleted file mode 100644 index 27c7383868ef..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal enum ModelType -{ - azure_openai, - openai, -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs deleted file mode 100644 index 94d569f0ba89..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal enum ParserType -{ - Chat, - Embedding, - Completion, - Image, -} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs deleted file mode 100644 index 45cbb91eb1f0..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Prompty.Core; - -internal enum RoleType -{ - assistant, - function, - system, - tool, - user, -} diff --git a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj index 7a63018ef572..44ffa76868dc 100644 --- a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj +++ b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj @@ -18,7 +18,7 @@ - + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs b/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs index 003811934181..55a74985f4be 100644 --- a/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs +++ b/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs @@ -1,41 +1,21 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.RegularExpressions; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Microsoft.SemanticKernel.PromptTemplates.Liquid; -using Microsoft.SemanticKernel.Prompty.Core; -using YamlDotNet.Serialization; +using PromptyCore = Prompty.Core; namespace Microsoft.SemanticKernel.Prompty; /// /// Factory methods for creating instances. /// -public static partial class KernelFunctionPrompty +public static class KernelFunctionPrompty { /// Default template factory to use when none is provided. internal static readonly AggregatorPromptTemplateFactory s_defaultTemplateFactory = - new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory()); - - private const string PromptyPattern = /* lang=regex */ """ - ^---\s*$\n # Start of YAML front matter, a line beginning with "---" followed by optional whitespace - (?
.*?) # Capture the YAML front matter, everything up to the next "---" line - ^---\s*$\n # End of YAML front matter, a line beginning with "---" followed by optional whitespace - (?.*) # Capture the content after the YAML front matter - """; - - /// Regex for parsing the YAML frontmatter and content from the prompty template. -#if NET - [GeneratedRegex(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace)] - private static partial Regex PromptyRegex(); -#else - private static Regex PromptyRegex() => s_promptyRegex; - private static readonly Regex s_promptyRegex = new(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace | RegexOptions.Compiled); -#endif + new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory(), new KernelPromptTemplateFactory()); /// /// Creates a instance for a prompt function using the specified markdown text. @@ -71,146 +51,56 @@ public static PromptTemplateConfig ToPromptTemplateConfig(string promptyTemplate { Verify.NotNullOrWhiteSpace(promptyTemplate); - // Step 1: - // Create PromptTemplateConfig from text. - // Retrieve the header, which is in yaml format and put between --- - // e.g - // file: chat.prompty - // --- - // name: Contoso Chat Prompt - // description: A retail assistant for Contoso Outdoors products retailer. - // authors: - // - XXXX - // model: - // api: chat - // configuration: - // type: azure_openai - // azure_deployment: gpt-35-turbo - // api_version: 2023-07-01-preview - // parameters: - // tools_choice: auto - // tools: - // -type: function - // function: - // name: test - // description: test function - // parameters: - // properties: - // location: - // description: The city and state or city and country, e.g.San Francisco, CA - // or Tokyo, Japan - // --- - // ... (rest of the prompty content) - - // Parse the YAML frontmatter and content from the prompty template - Match m = PromptyRegex().Match(promptyTemplate); - if (!m.Success) - { - throw new ArgumentException("Invalid prompty template. Header and content could not be parsed."); - } + PromptyCore.Prompty prompty = PromptyCore.Prompty.Load(promptyTemplate, []); - var header = m.Groups["header"].Value; - var content = m.Groups["content"].Value; - - var prompty = new DeserializerBuilder().Build().Deserialize(header) ?? - throw new ArgumentException("Invalid prompty template. Header could not be parsed."); - - // Step 2: - // Create a prompt template config from the prompty data. var promptTemplateConfig = new PromptTemplateConfig { - Name = prompty.Name, // TODO: sanitize name + Name = prompty.Name, Description = prompty.Description, - Template = content, + Template = prompty.Content.ToString() ?? string.Empty, }; PromptExecutionSettings? defaultExecutionSetting = null; - if (prompty.Model?.ModelConfiguration?.ModelType is ModelType.azure_openai or ModelType.openai) + if (prompty.Model?.Parameters?.Items is not null && prompty.Model.Parameters.Items.Count > 0) { - defaultExecutionSetting = new PromptExecutionSettings + defaultExecutionSetting = new PromptExecutionSettings() { - ModelId = prompty.Model.ModelConfiguration.ModelType is ModelType.azure_openai ? - prompty.Model.ModelConfiguration.AzureDeployment : - prompty.Model.ModelConfiguration.Name + ServiceId = prompty.Model.Parameters.Items.TryGetValue("service_id", out var serviceId) && serviceId is string serviceIdStr ? serviceIdStr : null, + ModelId = prompty.Model.Parameters.Items.TryGetValue("model_id", out var modelId) && modelId is string modelIdStr ? modelIdStr : null, + ExtensionData = prompty.Model.Parameters.Items }; - - var extensionData = new Dictionary(); - - if (prompty.Model?.Parameters?.Temperature is double temperature) - { - extensionData.Add("temperature", temperature); - } - - if (prompty.Model?.Parameters?.TopP is double topP) - { - extensionData.Add("top_p", topP); - } - - if (prompty.Model?.Parameters?.MaxTokens is int maxTokens) - { - extensionData.Add("max_tokens", maxTokens); - } - - if (prompty.Model?.Parameters?.Seed is int seed) - { - extensionData.Add("seed", seed); - } - - if (prompty.Model?.Parameters?.FrequencyPenalty is double frequencyPenalty) - { - extensionData.Add("frequency_penalty", frequencyPenalty); - } - - if (prompty.Model?.Parameters?.PresencePenalty is double presencePenalty) - { - extensionData.Add("presence_penalty", presencePenalty); - } - - if (prompty.Model?.Parameters?.Stop is List stop) - { - extensionData.Add("stop_sequences", stop); - } - - if (prompty.Model?.Parameters?.ResponseFormat?.Type == "json_object") - { - extensionData.Add("response_format", "json_object"); - } - - defaultExecutionSetting.ExtensionData = extensionData; promptTemplateConfig.AddExecutionSettings(defaultExecutionSetting); } - // Step 3: // Add input and output variables. if (prompty.Inputs is not null) { foreach (var input in prompty.Inputs) { - if (input.Value is string description) + if (input.Items.TryGetValue("name", out var value) && value is string name) { + string description = input.Items.TryGetValue("description", out var desc) && desc is string descStr ? descStr : string.Empty; promptTemplateConfig.InputVariables.Add(new() { - Name = input.Key, + Name = name, Description = description, }); } } } - if (prompty.Outputs is not null) { // PromptTemplateConfig supports only a single output variable. If the prompty template // contains one and only one, use it. Otherwise, ignore any outputs. - if (prompty.Outputs.Count == 1 && - prompty.Outputs.First().Value is string description) + if (prompty.Outputs.Length == 1 && + prompty.Outputs[0].Items.TryGetValue("description", out var value) && value is string description) { promptTemplateConfig.OutputVariable = new() { Description = description }; } } - // Step 4: // Update template format. If not provided, use Liquid as default. - promptTemplateConfig.TemplateFormat = prompty.Template ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; + promptTemplateConfig.TemplateFormat = prompty.Template?.Type ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; return promptTemplateConfig; } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs index e55e478c25a2..34abf6f5631e 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs @@ -23,7 +23,7 @@ public async Task ItCanImportPluginFromCopilotAgentPluginAsync() // Assert Assert.NotNull(plugin); Assert.Equal(2, plugin.FunctionCount); - Assert.Equal(683, plugin["me_CreateMessages"].Description.Length); + Assert.Equal(411, plugin["me_sendMail"].Description.Length); Assert.Equal(1000, plugin["me_ListMessages"].Description.Length); } } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs index 9331bb0b55a2..1982dd1a5a59 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Globalization; +using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Plugins.OpenApi; using Microsoft.VisualBasic; using Xunit; @@ -112,4 +113,50 @@ public void ItShouldConvertCollections() Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", "[1, 2, 3]").ToJsonString()); } + + [Fact] + public void ItShouldConvertWithNoTypeAndNoSchema() + { + // Act + var result = OpenApiTypeConverter.Convert("lat", null!, 51.8985136); + + // Assert + Assert.Equal(51.8985136, result.GetValue()); + } + + [Fact] + public void ItShouldConvertWithNoTypeAndValidSchema() + { + // Arrange + var schema = KernelJsonSchema.Parse( + """ + { + "type": "number", + "format": "double", + "nullable": false + } + """); + + // Act + var result = OpenApiTypeConverter.Convert("lat", null!, 51.8985136, schema); + + // Assert + Assert.Equal(51.8985136, result.GetValue()); + } + + [Fact] + public void ItShouldThrowExceptionWhenNoTypeAndInvalidSchema() + { + // Arrange + var schema = KernelJsonSchema.Parse( + """ + { + "type": "boolean", + "nullable": false + } + """); + + // Act & Assert + Assert.Throws(() => OpenApiTypeConverter.Convert("lat", null!, 51.8985136, schema)); + } } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json index 7994cc32ce9e..ba3827350891 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json @@ -1,4 +1,4 @@ -{ +{ "$schema": "https://developer.microsoft.com/json-schemas/copilot/plugin/v2.1/schema.json", "schema_version": "v2.1", "name_for_human": "OData Service for namespace microsoft.graph", @@ -12,14 +12,14 @@ "text": "List messages" }, { - "text": "Create message" + "text": "Send an email from the current user's mailbox" } ] }, "functions": [ { - "name": "me_CreateMessages", - "description": "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." + "name": "me_sendMail", + "description": "Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here." }, { "name": "me_ListMessages", @@ -35,10 +35,7 @@ "spec": { "url": "messages-openapi.yml" }, - "run_for_functions": [ - "me_ListMessages", - "me_CreateMessages" - ] + "run_for_functions": ["me_ListMessages", "me_sendMail"] } ] -} \ No newline at end of file +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml index 127ceff0eaa2..322b38a9e5a9 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml @@ -1,4 +1,4 @@ -openapi: 3.0.1 +openapi: 3.0.1 info: title: OData Service for namespace microsoft.graph - Subset description: This OData service is located at https://graph.microsoft.com/v1.0 @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: List messages - description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' + summary: Get the messages in the signed-in user\u0026apos;s mailbox + description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,26 +63,18 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value + /me/sendMail: post: tags: - - me.message - summary: Create message - description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." - operationId: me_CreateMessages + - me.user.Actions + summary: Invoke action sendMail + description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' + operationId: me_sendMail requestBody: - description: New navigation property - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' - required: true + $ref: '#/components/requestBodies/sendMailRequestBody' responses: - 2XX: - description: Created navigation property. - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.message' + '204': + description: Success components: schemas: microsoft.graph.message: @@ -504,3 +496,18 @@ components: explode: false schema: type: boolean + requestBodies: + sendMailRequestBody: + description: Action parameters + content: + application/json: + schema: + type: object + properties: + Message: + $ref: '#/components/schemas/microsoft.graph.message' + SaveToSentItems: + type: boolean + default: false + nullable: true + required: true diff --git a/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs b/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs new file mode 100644 index 000000000000..1e4363f21ce8 --- /dev/null +++ b/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs @@ -0,0 +1,238 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading.Tasks; +using Amazon.BedrockAgent; +using Amazon.BedrockAgent.Model; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.Bedrock; +using Microsoft.SemanticKernel.Agents.Bedrock.Extensions; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Agents; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class BedrockAgentTests : IDisposable +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + private readonly AmazonBedrockAgentClient _client = new(); + + /// + /// Integration test for invoking a . + /// + [Theory(Skip = "This test is for manual verification.")] + [InlineData("Why is the sky blue in one sentence?")] + public async Task InvokeTestAsync(string input) + { + var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); + var bedrockAgent = new BedrockAgent(agentModel, this._client); + + try + { + await this.ExecuteAgentAsync(bedrockAgent, input); + } + finally + { + await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); + } + } + + /// + /// Integration test for invoking a with streaming. + /// + [Theory(Skip = "This test is for manual verification.")] + [InlineData("Why is the sky blue in one sentence?")] + public async Task InvokeStreamingTestAsync(string input) + { + var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); + var bedrockAgent = new BedrockAgent(agentModel, this._client); + + try + { + await this.ExecuteAgentStreamingAsync(bedrockAgent, input); + } + finally + { + await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); + } + } + + /// + /// Integration test for invoking a with code interpreter. + /// + [Theory(Skip = "This test is for manual verification.")] + [InlineData(@"Create a bar chart for the following data: +Panda 5 +Tiger 8 +Lion 3 +Monkey 6 +Dolphin 2")] + public async Task InvokeWithCodeInterpreterTestAsync(string input) + { + var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); + var bedrockAgent = new BedrockAgent(agentModel, this._client); + await bedrockAgent.CreateCodeInterpreterActionGroupAsync(); + + try + { + var responses = await this.ExecuteAgentAsync(bedrockAgent, input); + BinaryContent? binaryContent = null; + foreach (var response in responses) + { + if (binaryContent == null && response.Items.Count > 0) + { + binaryContent = response.Items.OfType().FirstOrDefault(); + } + } + Assert.NotNull(binaryContent); + } + finally + { + await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); + } + } + + /// + /// Integration test for invoking a with Kernel functions. + /// + [Theory(Skip = "This test is for manual verification.")] + [InlineData("What is the current weather in Seattle and what is the weather forecast in Seattle?", "weather")] + public async Task InvokeWithKernelFunctionTestAsync(string input, string expected) + { + Kernel kernel = new(); + kernel.Plugins.Add(KernelPluginFactory.CreateFromType()); + + var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); + var bedrockAgent = new BedrockAgent(agentModel, this._client) + { + Kernel = kernel, + }; + await bedrockAgent.CreateKernelFunctionActionGroupAsync(); + + try + { + await this.ExecuteAgentAsync(bedrockAgent, input, expected); + } + finally + { + await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); + } + } + + /// + /// Executes a with the specified input and expected output. + /// The output of the agent will be verified against the expected output. + /// If the expected output is not provided, the verification will pass as long as the output is not null or empty. + /// + /// The agent to execute. + /// The input to provide to the agent. + /// The expected output from the agent. + /// The chat messages returned by the agent for additional verification. + private async Task> ExecuteAgentAsync(BedrockAgent agent, string input, string? expected = null) + { + var responses = agent.InvokeAsync(BedrockAgent.CreateSessionId(), input, null, default); + string responseContent = string.Empty; + List chatMessages = new(); + await foreach (var response in responses) + { + // Non-streaming invoke will only return one response. + responseContent = response.Content ?? string.Empty; + chatMessages.Add(response); + } + + if (expected != null) + { + Assert.Contains(expected, responseContent); + } + else + { + Assert.False(string.IsNullOrEmpty(responseContent)); + } + + return chatMessages; + } + + /// + /// Executes a with the specified input and expected output using streaming. + /// The output of the agent will be verified against the expected output. + /// If the expected output is not provided, the verification will pass as long as the output is not null or empty. + /// + /// The agent to execute. + /// The input to provide to the agent. + /// The expected output from the agent. + /// The chat messages returned by the agent for additional verification. + private async Task> ExecuteAgentStreamingAsync(BedrockAgent agent, string input, string? expected = null) + { + var responses = agent.InvokeStreamingAsync(BedrockAgent.CreateSessionId(), input, null, default); + string responseContent = string.Empty; + List chatMessages = new(); + await foreach (var response in responses) + { + responseContent = response.Content ?? string.Empty; + chatMessages.Add(response); + } + + if (expected != null) + { + Assert.Contains(expected, responseContent); + } + else + { + Assert.False(string.IsNullOrEmpty(responseContent)); + } + + return chatMessages; + } + + private const string AgentName = "SKIntegrationTestAgent"; + private const string AgentDescription = "A helpful assistant who helps users find information."; + private const string AgentInstruction = "You're a helpful assistant who helps users find information."; + private CreateAgentRequest GetCreateAgentRequest() + { + BedrockAgentConfiguration bedrockAgentSettings = this._configuration.GetSection("BedrockAgent").Get()!; + Assert.NotNull(bedrockAgentSettings); + + return new() + { + AgentName = AgentName, + Description = AgentDescription, + Instruction = AgentInstruction, + AgentResourceRoleArn = bedrockAgentSettings.AgentResourceRoleArn, + FoundationModel = bedrockAgentSettings.FoundationModel, + }; + } + + public void Dispose() + { + this._client.Dispose(); + } + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class WeatherPlugin + { + [KernelFunction, Description("Provides realtime weather information.")] + public string Current([Description("The location to get the weather for.")] string location) + { + return $"The current weather in {location} is 72 degrees."; + } + + [KernelFunction, Description("Forecast weather information.")] + public string Forecast([Description("The location to get the weather for.")] string location) + { + return $"The forecast for {location} is 75 degrees tomorrow."; + } + } +#pragma warning restore CA1812 // Avoid uninstantiated internal classes +} diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs index 9be5610f2abd..dd39b660966d 100644 --- a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs @@ -12,6 +12,7 @@ using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Assistants; using SemanticKernel.IntegrationTests.TestSettings; using xRetry; using Xunit; @@ -71,7 +72,7 @@ await this.VerifyAgentExecutionAsync( private async Task VerifyAgentExecutionAsync( Kernel chatCompletionKernel, - OpenAIClientProvider config, + OpenAIClientProvider clientProvider, string modelName, bool useNewFunctionCallingModel) { @@ -94,16 +95,8 @@ private async Task VerifyAgentExecutionAsync( chatAgent.Kernel.Plugins.Add(plugin); // Configure assistant agent with the plugin. - OpenAIAssistantAgent assistantAgent = - await OpenAIAssistantAgent.CreateAsync( - config, - new(modelName) - { - Name = "Assistant", - Instructions = "Answer questions about the menu." - }, - new Kernel()); - assistantAgent.Kernel.Plugins.Add(plugin); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); + OpenAIAssistantAgent assistantAgent = new(definition, clientProvider.AssistantClient, [plugin]); // Act & Assert try @@ -114,7 +107,7 @@ await OpenAIAssistantAgent.CreateAsync( } finally { - await assistantAgent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteAssistantAsync(assistantAgent.Id); } } diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index aa5fcbeef785..ad63eab6b795 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -12,6 +12,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; using OpenAI.Files; using OpenAI.VectorStores; using SemanticKernel.IntegrationTests.TestSettings; @@ -93,11 +94,10 @@ await this.ExecuteStreamingAgentAsync( [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains) { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); await this.ExecuteStreamingAgentAsync( - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + CreateClientProvider(azureOpenAIConfiguration), azureOpenAIConfiguration.ChatDeploymentName!, input, expectedAnswerContains); @@ -110,27 +110,23 @@ await this.ExecuteStreamingAgentAsync( [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentFunctionCallResultAsync() { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), - new(azureOpenAIConfiguration.ChatDeploymentName!), - new Kernel()); + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); - string threadId = await agent.CreateThreadAsync(); + AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); ChatMessageContent functionResultMessage = new(AuthorRole.Assistant, [new FunctionResultContent("mock-function", result: "A result value")]); try { - await agent.AddChatMessageAsync(threadId, functionResultMessage); - var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + await agent.AddChatMessageAsync(thread.Id, functionResultMessage); + var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); Assert.Single(messages); } finally { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); } } @@ -141,33 +137,28 @@ await OpenAIAssistantAgent.CreateAsync( [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentTokensAsync() { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!, instructions: "Repeat the user all of the user messages"); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient) + { + RunOptions = new() + { + MaxOutputTokenCount = 16, + } + }; - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), - new(azureOpenAIConfiguration.ChatDeploymentName!) - { - Instructions = "Repeat the user all of the user messages", - ExecutionOptions = new() - { - MaxCompletionTokens = 16, - } - }, - new Kernel()); - - string threadId = await agent.CreateThreadAsync(); + AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); ChatMessageContent functionResultMessage = new(AuthorRole.User, "A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone."); try { - await agent.AddChatMessageAsync(threadId, functionResultMessage); - await Assert.ThrowsAsync(() => agent.InvokeAsync(threadId).ToArrayAsync().AsTask()); + await agent.AddChatMessageAsync(thread.Id, functionResultMessage); + await Assert.ThrowsAsync(() => agent.InvokeAsync(thread.Id).ToArrayAsync().AsTask()); } finally { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); } } @@ -178,48 +169,45 @@ await OpenAIAssistantAgent.CreateAsync( [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentAdditionalMessagesAsync() { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), - new(azureOpenAIConfiguration.ChatDeploymentName!), - new Kernel()); + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); - OpenAIThreadCreationOptions threadOptions = new() + ThreadCreationOptions threadOptions = new() { - Messages = [ - new ChatMessageContent(AuthorRole.User, "Hello"), - new ChatMessageContent(AuthorRole.Assistant, "How may I help you?"), - ] + InitialMessages = + { + new ChatMessageContent(AuthorRole.User, "Hello").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "How may I help you?").ToThreadInitializationMessage(), + } }; - string threadId = await agent.CreateThreadAsync(threadOptions); + AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(threadOptions); try { - var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); Assert.Equal(2, messages.Length); - OpenAIAssistantInvocationOptions invocationOptions = new() + RunCreationOptions invocationOptions = new() { - AdditionalMessages = [ - new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:"), - new ChatMessageContent(AuthorRole.User, "Part 1"), - new ChatMessageContent(AuthorRole.User, "Part 2"), - new ChatMessageContent(AuthorRole.User, "Part 3"), - ] + AdditionalMessages = { + new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 1").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 2").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 3").ToThreadInitializationMessage(), + } }; - messages = await agent.InvokeAsync(threadId, invocationOptions).ToArrayAsync(); + messages = await agent.InvokeAsync(thread.Id, invocationOptions).ToArrayAsync(); Assert.Single(messages); - messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); Assert.Equal(7, messages.Length); } finally { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); } } @@ -230,23 +218,18 @@ await OpenAIAssistantAgent.CreateAsync( [Fact] public async Task AzureOpenAIAssistantAgentStreamingFileSearchAsync() { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)); - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - provider, - new(azureOpenAIConfiguration.ChatDeploymentName!), - new Kernel()); + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); // Upload file - Using a table of fictional employees. - OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + OpenAIFileClient fileClient = clientProvider.Client.GetOpenAIFileClient(); await using Stream stream = File.OpenRead("TestData/employees.pdf")!; OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants); // Create a vector-store - VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient(); + VectorStoreClient vectorStoreClient = clientProvider.Client.GetVectorStoreClient(); CreateVectorStoreOperation result = await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, new VectorStoreCreationOptions() @@ -254,26 +237,26 @@ await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, FileIds = { fileInfo.Id } }); - string threadId = await agent.CreateThreadAsync(); + AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); try { - await agent.AddChatMessageAsync(threadId, new(AuthorRole.User, "Who works in sales?")); + await agent.AddChatMessageAsync(thread.Id, new(AuthorRole.User, "Who works in sales?")); ChatHistory messages = []; - var chunks = await agent.InvokeStreamingAsync(threadId, messages: messages).ToArrayAsync(); + var chunks = await agent.InvokeStreamingAsync(thread.Id, messages: messages).ToArrayAsync(); Assert.NotEmpty(chunks); Assert.Single(messages); } finally { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId); await fileClient.DeleteFileAsync(fileInfo.Id); } } private async Task ExecuteAgentAsync( - OpenAIClientProvider config, + OpenAIClientProvider clientProvider, string modelName, string input, string expected) @@ -282,16 +265,8 @@ private async Task ExecuteAgentAsync( Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - kernel.Plugins.Add(plugin); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - config, - new(modelName) - { - Instructions = "Answer questions about the menu.", - }, - kernel); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]); try { @@ -314,12 +289,12 @@ await OpenAIAssistantAgent.CreateAsync( } finally { - await agent.DeleteAsync(); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); } } private async Task ExecuteStreamingAgentAsync( - OpenAIClientProvider config, + OpenAIClientProvider clientProvider, string modelName, string input, string expected) @@ -328,16 +303,8 @@ private async Task ExecuteStreamingAgentAsync( Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - kernel.Plugins.Add(plugin); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - config, - new(modelName) - { - Instructions = "Answer questions about the menu.", - }, - kernel); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]); AgentGroupChat chat = new(); chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); @@ -372,6 +339,18 @@ private static void AssertMessageValid(ChatMessageContent message) Assert.Equal(string.IsNullOrEmpty(message.AuthorName) ? AuthorRole.User : AuthorRole.Assistant, message.Role); } + private AzureOpenAIConfiguration ReadAzureConfiguration() + { + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + return azureOpenAIConfiguration; + } + + private static OpenAIClientProvider CreateClientProvider(AzureOpenAIConfiguration azureOpenAIConfiguration) + { + return OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)); + } + public sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] diff --git a/dotnet/src/IntegrationTests/BaseIntegrationTest.cs b/dotnet/src/IntegrationTests/BaseIntegrationTest.cs index c4fda5081e39..d97b6787a50d 100644 --- a/dotnet/src/IntegrationTests/BaseIntegrationTest.cs +++ b/dotnet/src/IntegrationTests/BaseIntegrationTest.cs @@ -22,11 +22,11 @@ protected IKernelBuilder CreateKernelBuilder() o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.TooManyRequests); o.CircuitBreaker = new HttpCircuitBreakerStrategyOptions { - SamplingDuration = TimeSpan.FromSeconds(40.0), // The duration should be least double of an attempt timeout + SamplingDuration = TimeSpan.FromSeconds(60.0), // The duration should be least double of an attempt timeout }; o.AttemptTimeout = new HttpTimeoutStrategyOptions { - Timeout = TimeSpan.FromSeconds(20.0) // Doubling the default 10s timeout + Timeout = TimeSpan.FromSeconds(30.0) }; }); }); diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs index 1540ff288197..bc706f5661ff 100644 --- a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs @@ -10,6 +10,7 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.TextGeneration; +using OpenAI.Chat; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; @@ -54,12 +55,22 @@ public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() var stringBuilder = new StringBuilder(); var metadata = new Dictionary(); + var hasUsage = false; - // Act + // Act & Assert await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) { stringBuilder.Append(update.Content); + var openAIUpdate = Assert.IsType(update.InnerContent); + Assert.NotNull(openAIUpdate); + + if (openAIUpdate.Usage is not null) + { + Assert.True(openAIUpdate.Usage.TotalTokenCount > 0); + hasUsage = true; + } + foreach (var key in update.Metadata!.Keys) { if (!metadata.TryGetValue(key, out object? value) || value is null) @@ -69,7 +80,7 @@ public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() } } - // Assert + Assert.True(hasUsage); Assert.Contains("I don't know", stringBuilder.ToString()); Assert.NotNull(metadata); diff --git a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs index 5732a3e4719a..37a3439bb75b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs @@ -3,11 +3,14 @@ using System; using System.IO; using System.Linq; +using System.Net.Http; +using System.Text; using System.Text.Json; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.Google; +using Newtonsoft.Json.Linq; using xRetry; using Xunit; using Xunit.Abstractions; @@ -135,6 +138,61 @@ public async Task ChatGenerationWithSystemMessagesAsync(ServiceType serviceType) Assert.Contains("Roger", response.Content, StringComparison.OrdinalIgnoreCase); } + [RetryTheory] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + public async Task ChatGenerationWithCachedContentAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Finish this sentence: He knew the sea’s..."); + + // Setup initial cached content + var cachedContentJson = File.ReadAllText(Path.Combine("Resources", "gemini_cached_content.json")) + .Replace("{{project}}", this.VertexAIGetProjectId()) + .Replace("{{location}}", this.VertexAIGetLocation()) + .Replace("{{model}}", this.VertexAIGetGeminiModel()); + + var cachedContentName = string.Empty; + + using (var httpClient = new HttpClient() + { + DefaultRequestHeaders = { Authorization = new("Bearer", this.VertexAIGetBearerKey()) } + }) + { + using (var content = new StringContent(cachedContentJson, Encoding.UTF8, "application/json")) + { + using (var httpResponse = await httpClient.PostAsync( + new Uri($"https://{this.VertexAIGetLocation()}-aiplatform.googleapis.com/v1beta1/projects/{this.VertexAIGetProjectId()}/locations/{this.VertexAIGetLocation()}/cachedContents"), + content)) + { + httpResponse.EnsureSuccessStatusCode(); + + var responseString = await httpResponse.Content.ReadAsStringAsync(); + var responseJson = JObject.Parse(responseString); + + cachedContentName = responseJson?["name"]?.ToString(); + + Assert.NotNull(cachedContentName); + } + } + } + + var sut = this.GetChatService(serviceType, isBeta: true); + + // Act + var response = await sut.GetChatMessageContentAsync( + chatHistory, + new GeminiPromptExecutionSettings + { + CachedContent = cachedContentName + }); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("capriciousness", response.Content, StringComparison.OrdinalIgnoreCase); + } + [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] @@ -270,6 +328,58 @@ public async Task ChatStreamingVisionUriAsync(ServiceType serviceType) Assert.Contains("green", message, StringComparison.OrdinalIgnoreCase); } + [RetryTheory] + [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + public async Task ChatGenerationAudioBinaryDataAsync(ServiceType serviceType) + { + // Arrange + Memory audio = await File.ReadAllBytesAsync(Path.Combine("TestData", "test_audio.wav")); + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("Transcribe this audio"), + new AudioContent(audio, "audio/wav") + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatServiceWithVision(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("the sun rises", response.Content, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + public async Task ChatGenerationAudioUriAsync(ServiceType serviceType) + { + // Arrange + Uri audioUri = new("gs://cloud-samples-data/speech/brooklyn_bridge.flac"); // needs setup + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("Transcribe this audio"), + new AudioContent(audioUri) { MimeType = "audio/flac" } + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatServiceWithVision(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("brooklyn bridge", response.Content, StringComparison.OrdinalIgnoreCase); + } + [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "Currently GoogleAI always returns zero tokens.")] [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] diff --git a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs b/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs index 6b932727f4a6..b6b2e2a6c02a 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs @@ -20,16 +20,18 @@ public abstract class TestsBase(ITestOutputHelper output) protected ITestOutputHelper Output { get; } = output; - protected IChatCompletionService GetChatService(ServiceType serviceType) => serviceType switch + protected IChatCompletionService GetChatService(ServiceType serviceType, bool isBeta = false) => serviceType switch { ServiceType.GoogleAI => new GoogleAIGeminiChatCompletionService( this.GoogleAIGetGeminiModel(), - this.GoogleAIGetApiKey()), + this.GoogleAIGetApiKey(), + isBeta ? GoogleAIVersion.V1_Beta : GoogleAIVersion.V1), ServiceType.VertexAI => new VertexAIGeminiChatCompletionService( modelId: this.VertexAIGetGeminiModel(), bearerKey: this.VertexAIGetBearerKey(), location: this.VertexAIGetLocation(), - projectId: this.VertexAIGetProjectId()), + projectId: this.VertexAIGetProjectId(), + isBeta ? VertexAIVersion.V1_Beta : VertexAIVersion.V1), _ => throw new ArgumentOutOfRangeException(nameof(serviceType), serviceType, null) }; @@ -69,10 +71,10 @@ public enum ServiceType private string GoogleAIGetGeminiVisionModel() => this._configuration.GetSection("GoogleAI:Gemini:VisionModelId").Get()!; private string GoogleAIGetEmbeddingModel() => this._configuration.GetSection("GoogleAI:EmbeddingModelId").Get()!; private string GoogleAIGetApiKey() => this._configuration.GetSection("GoogleAI:ApiKey").Get()!; - private string VertexAIGetGeminiModel() => this._configuration.GetSection("VertexAI:Gemini:ModelId").Get()!; + internal string VertexAIGetGeminiModel() => this._configuration.GetSection("VertexAI:Gemini:ModelId").Get()!; private string VertexAIGetGeminiVisionModel() => this._configuration.GetSection("VertexAI:Gemini:VisionModelId").Get()!; private string VertexAIGetEmbeddingModel() => this._configuration.GetSection("VertexAI:EmbeddingModelId").Get()!; - private string VertexAIGetBearerKey() => this._configuration.GetSection("VertexAI:BearerKey").Get()!; - private string VertexAIGetLocation() => this._configuration.GetSection("VertexAI:Location").Get()!; - private string VertexAIGetProjectId() => this._configuration.GetSection("VertexAI:ProjectId").Get()!; + internal string VertexAIGetBearerKey() => this._configuration.GetSection("VertexAI:BearerKey").Get()!; + internal string VertexAIGetLocation() => this._configuration.GetSection("VertexAI:Location").Get()!; + internal string VertexAIGetProjectId() => this._configuration.GetSection("VertexAI:ProjectId").Get()!; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 58f3492074a6..a80519c85a57 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -46,6 +46,24 @@ public async Task CollectionExistsReturnsCollectionStateAsync(bool createCollect } } + [Fact] + public async Task CanCreateCollectionWithSpecialCharactersInNameAsync() + { + // Arrange + var sut = fixture.GetCollection>("Special-Char"); + + try + { + // Act + await sut.CreateCollectionAsync(); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + [Fact] public async Task CollectionCanUpsertAndGetAsync() { @@ -84,8 +102,10 @@ public async Task CollectionCanUpsertAndGetAsync() Assert.Equal("tag1", fetchedHotel1!.Tags![0]); Assert.Equal("tag2", fetchedHotel1!.Tags![1]); Assert.Null(fetchedHotel1!.ListInts); - Assert.Equal(TruncateMilliseconds(fetchedHotel1.CreatedAt), TruncateMilliseconds(writtenHotel1.CreatedAt)); - Assert.Equal(TruncateMilliseconds(fetchedHotel1.UpdatedAt), TruncateMilliseconds(writtenHotel1.UpdatedAt)); + + // Since these values are updated in the database, they will not match existly, but should be very close to each other. + Assert.True(TruncateMilliseconds(fetchedHotel1.CreatedAt) >= TruncateMilliseconds(writtenHotel1.CreatedAt) && TruncateMilliseconds(fetchedHotel1.CreatedAt) <= TruncateMilliseconds(writtenHotel1.CreatedAt).AddSeconds(1)); + Assert.True(TruncateMilliseconds(fetchedHotel1.UpdatedAt) >= TruncateMilliseconds(writtenHotel1.UpdatedAt) && TruncateMilliseconds(fetchedHotel1.UpdatedAt) <= TruncateMilliseconds(writtenHotel1.UpdatedAt).AddSeconds(1)); Assert.NotNull(fetchedHotel2); Assert.Equal(2, fetchedHotel2!.HotelId); @@ -99,8 +119,10 @@ public async Task CollectionCanUpsertAndGetAsync() Assert.Equal(2, fetchedHotel2!.ListInts!.Count); Assert.Equal(1, fetchedHotel2!.ListInts![0]); Assert.Equal(2, fetchedHotel2!.ListInts![1]); - Assert.Equal(TruncateMilliseconds(fetchedHotel2.CreatedAt), TruncateMilliseconds(writtenHotel2.CreatedAt)); - Assert.Equal(TruncateMilliseconds(fetchedHotel2.UpdatedAt), TruncateMilliseconds(writtenHotel2.UpdatedAt)); + + // Since these values are updated in the database, they will not match existly, but should be very close to each other. + Assert.True(TruncateMilliseconds(fetchedHotel2.CreatedAt) >= TruncateMilliseconds(writtenHotel2.CreatedAt) && TruncateMilliseconds(fetchedHotel2.CreatedAt) <= TruncateMilliseconds(writtenHotel2.CreatedAt).AddSeconds(1)); + Assert.True(TruncateMilliseconds(fetchedHotel2.UpdatedAt) >= TruncateMilliseconds(writtenHotel2.UpdatedAt) && TruncateMilliseconds(fetchedHotel2.UpdatedAt) <= TruncateMilliseconds(writtenHotel2.UpdatedAt).AddSeconds(1)); } finally { diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index e24215b583d6..06b2e839116b 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -74,6 +74,7 @@ + @@ -195,4 +196,10 @@ Always + + + + Always + + \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json b/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json new file mode 100644 index 000000000000..fa5e4f688efc --- /dev/null +++ b/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json @@ -0,0 +1,22 @@ +{ + "model": "projects/{{project}}/locations/{{location}}/publishers/google/models/{{model}}", + "displayName": "CACHE_DISPLAY_NAME", + "contents": [ + { + "role": "assistant", + "parts": [ + { + "text": "This is sample text to demonstrate explicit caching." + } + ] + }, + { + "role": "user", + "parts": [ + { + "text": "The old lighthouse keeper, Silas, squinted at the churning grey sea, his weathered face mirroring the granite rocks below. He’d seen countless storms, each one a furious dance of wind and wave, but tonight felt different, a simmering unease prickling his skin. The lantern, his steadfast companion, pulsed its rhythmic beam, a fragile defiance against the encroaching darkness. A small boat, barely visible through the swirling mist, was bucking against the tide, its lone mast a broken finger pointing towards the sky. Silas grabbed his oilskins, his movements stiff with age, and descended the winding stairs, his heart thumping a frantic rhythm against his ribs. He knew the sea’s capriciousness, its ability to lull and then lash out with brutal force." + } + ] + } + ] +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs new file mode 100644 index 000000000000..19476f4d72b4 --- /dev/null +++ b/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace SemanticKernel.IntegrationTests.TestSettings; + +[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", + Justification = "Configuration classes are instantiated through IConfiguration.")] +internal sealed class BedrockAgentConfiguration(string agentResourceRoleArn, string foundationModel) +{ + public string AgentResourceRoleArn { get; set; } = agentResourceRoleArn; + public string FoundationModel { get; set; } = foundationModel; +} diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json index 22c91e9affcc..5dead0d1a7c5 100644 --- a/dotnet/src/IntegrationTests/testsettings.json +++ b/dotnet/src/IntegrationTests/testsettings.json @@ -116,5 +116,9 @@ "ModelId": "gpt-4", "ApiKey": "" } + }, + "BedrockAgent": { + "AgentResourceRoleArn": "", + "FoundationModel": "anthropic.claude-3-haiku-20240307-v1:0" } } \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/agents/AgentUtilities.props b/dotnet/src/InternalUtilities/agents/AgentUtilities.props new file mode 100644 index 000000000000..225ce5a2b745 --- /dev/null +++ b/dotnet/src/InternalUtilities/agents/AgentUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs b/dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs similarity index 51% rename from dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs rename to dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs index 1844c82ac73f..bf8c993b210e 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs +++ b/dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs @@ -1,9 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Agents.OpenAI; + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.Agents.Extensions; /// /// Extension methods for . /// +[ExcludeFromCodeCoverage] internal static class AgentExtensions { /// @@ -13,4 +17,13 @@ internal static class AgentExtensions /// The target agent /// The agent name as a non-empty string public static string GetName(this Agent agent) => agent.Name ?? agent.Id; + + /// + /// Provides the display name of the agent. + /// + /// The target agent + /// + /// Currently, it's intended for telemetry purposes only. + /// + public static string GetDisplayName(this Agent agent) => !string.IsNullOrWhiteSpace(agent.Name) ? agent.Name! : "UnnamedAgent"; } diff --git a/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs b/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs new file mode 100644 index 000000000000..43a879d3dc10 --- /dev/null +++ b/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Agents; + +internal static class KernelFunctionMetadataExtensions +{ + /// + /// Transform the function parameter metadata into a binary parameter spec. + /// + /// The function meta-data + /// The parameter spec as + internal static BinaryData CreateParameterSpec(this KernelFunctionMetadata metadata) + { + JsonSchemaFunctionParameters parameterSpec = new(); + List required = new(metadata.Parameters.Count); + + foreach (var parameter in metadata.Parameters) + { + if (parameter.IsRequired) + { + parameterSpec.Required.Add(parameter.Name); + } + + if (parameter.Schema is null) + { + throw new KernelException($"Unsupported function parameter: {metadata.PluginName ?? "*"}.{metadata.Name}.{parameter.Name}"); + } + + parameterSpec.Properties.Add(parameter.Name, parameter.Schema); + } + + return BinaryData.FromObjectAsJson(parameterSpec); + } +} diff --git a/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props new file mode 100644 index 000000000000..323196e5564b --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs new file mode 100644 index 000000000000..8f412aa9e930 --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +/// +/// Generic action pipeline policy for processing messages. +/// +[ExcludeFromCodeCoverage] +internal sealed class GenericActionPipelinePolicy : HttpPipelinePolicy +{ + private readonly Action _processMessageAction; + + internal GenericActionPipelinePolicy(Action processMessageAction) + { + this._processMessageAction = processMessageAction; + } + + public override void Process(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + } + + public override ValueTask ProcessAsync(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + return new ValueTask(Task.CompletedTask); // .NET STD 2.0 compatibility + } +} diff --git a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs index 5fe03ebb925d..a9f3a79874ef 100644 --- a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs +++ b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs @@ -5,6 +5,7 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; +using System.Text.Encodings.Web; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -118,6 +119,7 @@ public FunctionCallsProcessor(ILogger? logger = null) /// Processes AI function calls by iterating over the function calls, invoking them and adding the results to the chat history. /// /// The chat message content representing AI model response and containing function calls. + /// The prompt execution settings. /// The chat history to add function invocation results to. /// AI model function(s) call request sequence index. /// Callback to check if a function was advertised to AI model or not. @@ -128,6 +130,7 @@ public FunctionCallsProcessor(ILogger? logger = null) /// Last chat history message if function invocation filter requested processing termination, otherwise null. public async Task ProcessFunctionCallsAsync( ChatMessageContent chatMessageContent, + PromptExecutionSettings? executionSettings, ChatHistory chatHistory, int requestIndex, Func checkIfFunctionAdvertised, @@ -176,7 +179,8 @@ public FunctionCallsProcessor(ILogger? logger = null) FunctionCount = functionCalls.Length, CancellationToken = cancellationToken, IsStreaming = isStreaming, - ToolCallId = functionCall.Id + ToolCallId = functionCall.Id, + ExecutionSettings = executionSettings }; s_inflightAutoInvokes.Value++; @@ -490,6 +494,17 @@ public static string ProcessFunctionResult(object functionResult) return chatMessageContent.ToString(); } - return JsonSerializer.Serialize(functionResult); + return JsonSerializer.Serialize(functionResult, s_functionResultSerializerOptions); } + + /// + /// The which will be used in . + /// + /// + /// is very likely to escape characters and generates LLM unfriendly results by default. + /// + private static readonly JsonSerializerOptions s_functionResultSerializerOptions = new() + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + }; } diff --git a/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs b/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs index ab74689a33db..fead79cde844 100644 --- a/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs +++ b/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs @@ -101,9 +101,14 @@ public static void InitializeUserState(this KernelProcessStepState stateObject, /// The source channel to evaluate /// A dictionary of KernelFunction instances. /// An instance of . + /// An instance of /// /// - public static Dictionary?> FindInputChannels(this IKernelProcessMessageChannel channel, Dictionary functions, ILogger? logger) + public static Dictionary?> FindInputChannels( + this IKernelProcessMessageChannel channel, + Dictionary functions, + ILogger? logger, + IExternalKernelProcessMessageChannel? externalMessageChannel = null) { if (functions is null) { @@ -126,7 +131,7 @@ public static void InitializeUserState(this KernelProcessStepState stateObject, // and are instantiated here. if (param.ParameterType == typeof(KernelProcessStepContext)) { - inputs[kvp.Key]![param.Name] = new KernelProcessStepContext(channel); + inputs[kvp.Key]![param.Name] = new KernelProcessStepContext(channel, externalMessageChannel); } else { diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs index 989005333946..7c9ee6a3c654 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using System.ClientModel; using System.Collections.ObjectModel; using System.Diagnostics; -using Azure.Identity; +using Azure.AI.Projects; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using OpenAI.Assistants; @@ -13,40 +13,41 @@ using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage; /// -/// Base class for samples that demonstrate the usage of agents. +/// Base class for samples that demonstrate the usage of host agents +/// based on API's such as Open AI Assistants or Azure AI Agents. /// -public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) +public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseAgentsTest(output) { /// /// Metadata key to indicate the assistant as created for a sample. /// - protected const string AssistantSampleMetadataKey = "sksample"; + protected const string SampleMetadataKey = "sksample"; /// - /// Metadata to indicate the assistant as created for a sample. + /// Metadata to indicate the object was created for a sample. /// /// - /// While the samples do attempt delete the assistants it creates, it is possible - /// that some assistants may remain. This metadata can be used to identify and sample - /// agents for clean-up. + /// While the samples do attempt delete the objects it creates, it is possible + /// that some may remain. This metadata can be used to identify and sample + /// objects for manual clean-up. /// - protected static readonly ReadOnlyDictionary AssistantSampleMetadata = + protected static readonly ReadOnlyDictionary SampleMetadata = new(new Dictionary { - { AssistantSampleMetadataKey, bool.TrueString } + { SampleMetadataKey, bool.TrueString } }); /// - /// Provide a according to the configuration settings. + /// Gets the root client for the service. /// - protected OpenAIClientProvider GetClientProvider() - => - this.UseOpenAIConfig ? - OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : - !string.IsNullOrWhiteSpace(this.ApiKey) ? - OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); + protected abstract TClient Client { get; } +} +/// +/// Base class for samples that demonstrate the usage of agents. +/// +public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) +{ /// /// Common method to write formatted agent chat content to the console. /// @@ -91,13 +92,17 @@ protected void WriteAgentChatMessage(ChatMessageContent message) { WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount); } + else if (usage is RunStepCompletionUsage agentUsage) + { + WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens); + } else if (usage is ChatTokenUsage chatUsage) { WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount); } } - void WriteUsage(int totalTokens, int inputTokens, int outputTokens) + void WriteUsage(long totalTokens, long inputTokens, long outputTokens) { Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}"); } diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs new file mode 100644 index 000000000000..504194becde9 --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.Diagnostics; +using Azure.Identity; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using OpenAI; +using OpenAI.Assistants; +using OpenAI.Files; + +/// +/// Base class for samples that demonstrate the usage of . +/// +public abstract class BaseAssistantTest : BaseAgentsTest +{ + protected BaseAssistantTest(ITestOutputHelper output) : base(output) + { + this.Client = + this.UseOpenAIConfig ? + OpenAIAssistantAgent.CreateOpenAIClient(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : + !string.IsNullOrWhiteSpace(this.ApiKey) ? + OpenAIAssistantAgent.CreateAzureOpenAIClient(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : + OpenAIAssistantAgent.CreateAzureOpenAIClient(new AzureCliCredential(), new Uri(this.Endpoint!)); + + this.AssistantClient = this.Client.GetAssistantClient(); + } + + /// + protected override OpenAIClient Client { get; } + + /// + /// Gets the the . + /// + protected AssistantClient AssistantClient { get; } + + protected async Task DownloadResponseContentAsync(ChatMessageContent message) + { + OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient(); + + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileContentAsync(fileClient, annotation.FileId!); + } + } + } + + protected async Task DownloadResponseImageAsync(ChatMessageContent message) + { + OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient(); + + foreach (KernelContent item in message.Items) + { + if (item is FileReferenceContent fileReference) + { + await this.DownloadFileContentAsync(fileClient, fileReference.FileId, launchViewer: true); + } + } + } + + private async Task DownloadFileContentAsync(OpenAIFileClient fileClient, string fileId, bool launchViewer = false) + { + OpenAIFile fileInfo = fileClient.GetFile(fileId); + if (fileInfo.Purpose == FilePurpose.AssistantsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await fileClient.DownloadFileAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs new file mode 100644 index 000000000000..a36932db1f38 --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.Collections.ObjectModel; +using System.Diagnostics; +using Azure.AI.Projects; +using Azure.Identity; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; +using OpenAI.Files; + +using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage; + +/// +/// Base class for samples that demonstrate the usage of agents. +/// +public abstract class BaseAzureTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) +{ + /// + /// Metadata key to indicate the assistant as created for a sample. + /// + protected const string AssistantSampleMetadataKey = "sksample"; + + protected override bool ForceOpenAI => false; + + /// + /// Metadata to indicate the object was created for a sample. + /// + /// + /// While the samples do attempt delete the objects it creates, it is possible + /// that some may remain. This metadata can be used to identify and sample + /// objects for manual clean-up. + /// + protected static readonly ReadOnlyDictionary SampleMetadata = + new(new Dictionary + { + { AssistantSampleMetadataKey, bool.TrueString } + }); + + /// + /// Provide a according to the configuration settings. + /// + protected AzureAIClientProvider GetAzureProvider() + { + return AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential()); + } + + /// + /// Provide a according to the configuration settings. + /// + protected OpenAIClientProvider GetClientProvider() + { + return + this.UseOpenAIConfig ? + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : + !string.IsNullOrWhiteSpace(this.ApiKey) ? + OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); + } + + /// + /// Common method to write formatted agent chat content to the console. + /// + protected void WriteAgentChatMessage(ChatMessageContent message) + { + // Include ChatMessageContent.AuthorName in output, if present. + string authorExpression = message.Role == AuthorRole.User ? string.Empty : $" - {message.AuthorName ?? "*"}"; + // Include TextContent (via ChatMessageContent.Content), if present. + string contentExpression = string.IsNullOrWhiteSpace(message.Content) ? string.Empty : message.Content; + bool isCode = message.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false; + string codeMarker = isCode ? "\n [CODE]\n" : " "; + Console.WriteLine($"\n# {message.Role}{authorExpression}:{codeMarker}{contentExpression}"); + + // Provide visibility for inner content (that isn't TextContent). + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + Console.WriteLine($" [{item.GetType().Name}] {annotation.Quote}: File #{annotation.FileId}"); + } + else if (item is FileReferenceContent fileReference) + { + Console.WriteLine($" [{item.GetType().Name}] File #{fileReference.FileId}"); + } + else if (item is ImageContent image) + { + Console.WriteLine($" [{item.GetType().Name}] {image.Uri?.ToString() ?? image.DataUri ?? $"{image.Data?.Length} bytes"}"); + } + else if (item is FunctionCallContent functionCall) + { + Console.WriteLine($" [{item.GetType().Name}] {functionCall.Id}"); + } + else if (item is FunctionResultContent functionResult) + { + Console.WriteLine($" [{item.GetType().Name}] {functionResult.CallId} - {functionResult.Result?.AsJson() ?? "*"}"); + } + } + + if (message.Metadata?.TryGetValue("Usage", out object? usage) ?? false) + { + if (usage is RunStepTokenUsage assistantUsage) + { + WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount); + } + else if (usage is RunStepCompletionUsage agentUsage) + { + WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens); + } + else if (usage is ChatTokenUsage chatUsage) + { + WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount); + } + } + + void WriteUsage(long totalTokens, long inputTokens, long outputTokens) + { + Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}"); + } + } + + protected async Task DownloadResponseContentAsync(OpenAIFileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileContentAsync(client, annotation.FileId!); + } + } + } + + protected async Task DownloadResponseImageAsync(OpenAIFileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is FileReferenceContent fileReference) + { + await this.DownloadFileContentAsync(client, fileReference.FileId, launchViewer: true); + } + } + } + + private async Task DownloadFileContentAsync(OpenAIFileClient client, string fileId, bool launchViewer = false) + { + OpenAIFile fileInfo = client.GetFile(fileId); + if (fileInfo.Purpose == FilePurpose.AssistantsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await client.DownloadFileAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs new file mode 100644 index 000000000000..e0c937870e54 --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Azure.AI.Projects; +using Azure.Identity; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Base class for samples that demonstrate the usage of . +/// +public abstract class BaseAzureAgentTest : BaseAgentsTest +{ + protected BaseAzureAgentTest(ITestOutputHelper output) : base(output) + { + this.Client = AzureAIAgent.CreateAzureAIClient(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential()); + this.AgentsClient = this.Client.GetAgentsClient(); + } + + /// + protected override AIProjectClient Client { get; } + + /// + /// Gets the . + /// + protected AgentsClient AgentsClient { get; } + + protected async Task DownloadContentAsync(ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileAsync(annotation.FileId!); + } + } + } + + protected async Task DownloadFileAsync(string fileId, bool launchViewer = false) + { + AgentFile fileInfo = this.AgentsClient.GetFile(fileId); + if (fileInfo.Purpose == AgentFilePurpose.AgentsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await this.AgentsClient.GetFileContentAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs new file mode 100644 index 000000000000..0a41c9c5778c --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Amazon.BedrockAgent; +using Amazon.BedrockAgent.Model; +using Microsoft.SemanticKernel.Agents.Bedrock; + +/// +/// Base class for samples that demonstrate the usage of AWS Bedrock agents. +/// +public abstract class BaseBedrockAgentTest : BaseTest +{ + protected const string AgentDescription = "A helpful assistant who helps users find information."; + protected const string AgentInstruction = "You're a helpful assistant who helps users find information."; + protected readonly AmazonBedrockAgentClient Client; + + protected BaseBedrockAgentTest(ITestOutputHelper output) : base(output, redirectSystemConsoleOutput: true) + { + Client = new AmazonBedrockAgentClient(); + } + + protected CreateAgentRequest GetCreateAgentRequest(string agentName) => new() + { + AgentName = agentName, + Description = AgentDescription, + Instruction = AgentInstruction, + AgentResourceRoleArn = TestConfiguration.BedrockAgent.AgentResourceRoleArn, + FoundationModel = TestConfiguration.BedrockAgent.FoundationModel, + }; + + protected override void Dispose(bool disposing) + { + Client?.Dispose(); + base.Dispose(disposing); + } + + /// + /// Override this method to create an agent with desired settings. + /// + /// The name of the agent to create. Must be unique. + protected abstract Task CreateAgentAsync(string agentName); +} diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs index 03c09e63551b..78816c97e2e2 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs @@ -141,6 +141,33 @@ protected void OutputLastMessage(ChatHistory chatHistory) Console.WriteLine("------------------------"); } + /// + /// Outputs out the stream of generated message tokens. + /// + protected async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + /// /// Utility method to write a horizontal rule to the console. /// diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs index 5e9e0c925660..e45f52216a14 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs @@ -24,6 +24,7 @@ public static void Initialize(IConfigurationRoot configRoot) public static OnnxConfig Onnx => LoadSection(); public static AzureOpenAIConfig AzureOpenAI => LoadSection(); public static AzureAIInferenceConfig AzureAIInference => LoadSection(); + public static AzureAIConfig AzureAI => LoadSection(); public static AzureOpenAIConfig AzureOpenAIImages => LoadSection(); public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); public static AzureAISearchConfig AzureAISearch => LoadSection(); @@ -47,6 +48,9 @@ public static void Initialize(IConfigurationRoot configRoot) public static GoogleAIConfig GoogleAI => LoadSection(); public static VertexAIConfig VertexAI => LoadSection(); public static AzureCosmosDbMongoDbConfig AzureCosmosDbMongoDb => LoadSection(); + public static ApplicationInsightsConfig ApplicationInsights => LoadSection(); + public static CrewAIConfig CrewAI => LoadSection(); + public static BedrockAgentConfig BedrockAgent => LoadSection(); private static T LoadSection([CallerMemberName] string? caller = null) { @@ -91,6 +95,12 @@ public class OnnxConfig public string EmbeddingVocabPath { get; set; } } + public class AzureAIConfig + { + public string ConnectionString { get; set; } + public string ChatModelId { get; set; } + } + public class AzureOpenAIConfig { public string ServiceId { get; set; } @@ -256,6 +266,11 @@ public class AzureCosmosDbMongoDbConfig public string DatabaseName { get; set; } } + public class ApplicationInsightsConfig + { + public string ConnectionString { get; set; } + } + /// /// Graph API connector configuration model. /// @@ -302,4 +317,16 @@ public MsGraphConfiguration( this.RedirectUri = redirectUri; } } + + public class CrewAIConfig + { + public string Endpoint { get; set; } + public string AuthToken { get; set; } + } + + public class BedrockAgentConfig + { + public string AgentResourceRoleArn { get; set; } + public string FoundationModel { get; set; } + } } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs index d5b36387b305..14d29749e36e 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs @@ -4,6 +4,9 @@ using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; namespace Microsoft.SemanticKernel.Diagnostics; @@ -24,7 +27,8 @@ public static Activity SetTags(this Activity activity, ReadOnlySpan RunWithActivityAsync( + Func getActivity, + Func> operation, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + using var activity = getActivity(); + + ConfiguredCancelableAsyncEnumerable result; + + try + { + result = operation().WithCancellation(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + var resultEnumerator = result.ConfigureAwait(false).GetAsyncEnumerator(); + + try + { + while (true) + { + try + { + if (!await resultEnumerator.MoveNextAsync()) + { + break; + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + yield return resultEnumerator.Current; + } + } + finally + { + await resultEnumerator.DisposeAsync(); + } + } } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs index 76e9d130ac3a..af2f4611759e 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs @@ -61,6 +61,38 @@ internal static class ModelDiagnostics ) where TPromptExecutionSettings : PromptExecutionSettings => StartCompletionActivity(endpoint, modelName, modelProvider, chatHistory, executionSettings, ToOpenAIFormat); + /// + /// Start an agent invocation activity and return the activity. + /// + internal static Activity? StartAgentInvocationActivity( + string agentId, + string agentName, + string? agentDescription) + { + if (!IsModelDiagnosticsEnabled()) + { + return null; + } + + const string OperationName = "invoke_agent"; + + var activity = s_activitySource.StartActivityWithTags( + $"{OperationName} {agentName}", + [ + new(ModelDiagnosticsTags.Operation, OperationName), + new(ModelDiagnosticsTags.AgentId, agentId), + new(ModelDiagnosticsTags.AgentName, agentName) + ], + ActivityKind.Internal); + + if (!string.IsNullOrWhiteSpace(agentDescription)) + { + activity?.SetTag(ModelDiagnosticsTags.AgentDescription, agentDescription); + } + + return activity; + } + /// /// Set the text completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. @@ -440,6 +472,9 @@ private static class ModelDiagnosticsTags public const string Completion = "gen_ai.content.completion"; public const string Address = "server.address"; public const string Port = "server.port"; + public const string AgentId = "gen_ai.agent.id"; + public const string AgentName = "gen_ai.agent.name"; + public const string AgentDescription = "gen_ai.agent.description"; // Activity events public const string PromptEvent = "gen_ai.content.prompt"; diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs new file mode 100644 index 000000000000..f49fa4ddce0d --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Plugins.AI.CrewAI; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Plugins.AI.UnitTests.CrewAI; + +/// +/// Tests for the class. +/// +public sealed partial class CrewAIEnterpriseClientTests +{ + private readonly Mock _httpMessageHandlerMock; + private readonly CrewAIEnterpriseClient _client; + + /// + /// Initializes a new instance of the class. + /// + public CrewAIEnterpriseClientTests() + { + this._httpMessageHandlerMock = new Mock(); + using var httpClientFactory = new MockHttpClientFactory(this._httpMessageHandlerMock); + this._client = new CrewAIEnterpriseClient( + endpoint: new Uri("http://example.com"), + authTokenProvider: () => Task.FromResult("token"), + httpClientFactory); + } + + /// + /// Tests that returns the required inputs from the CrewAI API. + /// + /// + [Fact] + public async Task GetInputsAsyncReturnsCrewAIRequiredInputsAsync() + { + // Arrange + var responseContent = "{\"inputs\": [\"input1\", \"input2\"]}"; + using var responseMessage = new HttpResponseMessage + { + StatusCode = HttpStatusCode.OK, + Content = new StringContent(responseContent) + }; + + this._httpMessageHandlerMock.Protected() + .Setup>( + "SendAsync", + ItExpr.IsAny(), + ItExpr.IsAny()) + .ReturnsAsync(responseMessage); + + // Act + var result = await this._client.GetInputsAsync(); + + // Assert + Assert.NotNull(result); + Assert.Equal(2, result.Inputs.Count); + Assert.Contains("input1", result.Inputs); + Assert.Contains("input2", result.Inputs); + } + + /// + /// Tests that returns the kickoff id from the CrewAI API. + /// + /// + [Fact] + public async Task KickoffAsyncReturnsCrewAIKickoffResponseAsync() + { + // Arrange + var responseContent = "{\"kickoff_id\": \"12345\"}"; + using var responseMessage = new HttpResponseMessage + { + StatusCode = HttpStatusCode.OK, + Content = new StringContent(responseContent) + }; + + this._httpMessageHandlerMock.Protected() + .Setup>( + "SendAsync", + ItExpr.IsAny(), + ItExpr.IsAny()) + .ReturnsAsync(responseMessage); + + // Act + var result = await this._client.KickoffAsync(new { key = "value" }); + + // Assert + Assert.NotNull(result); + Assert.Equal("12345", result.KickoffId); + } + + /// + /// Tests that returns the status of the CrewAI Crew. + /// + /// + /// + /// + [Theory] + [InlineData(CrewAIKickoffState.Pending)] + [InlineData(CrewAIKickoffState.Started)] + [InlineData(CrewAIKickoffState.Running)] + [InlineData(CrewAIKickoffState.Success)] + [InlineData(CrewAIKickoffState.Failed)] + [InlineData(CrewAIKickoffState.Failure)] + [InlineData(CrewAIKickoffState.NotFound)] + public async Task GetStatusAsyncReturnsCrewAIStatusResponseAsync(CrewAIKickoffState state) + { + var crewAIStatusState = state switch + { + CrewAIKickoffState.Pending => "PENDING", + CrewAIKickoffState.Started => "STARTED", + CrewAIKickoffState.Running => "RUNNING", + CrewAIKickoffState.Success => "SUCCESS", + CrewAIKickoffState.Failed => "FAILED", + CrewAIKickoffState.Failure => "FAILURE", + CrewAIKickoffState.NotFound => "NOT FOUND", + _ => throw new ArgumentOutOfRangeException(nameof(state), state, null) + }; + + // Arrange + var responseContent = $"{{\"state\": \"{crewAIStatusState}\", \"result\": \"The Result\", \"last_step\": {{\"step1\": \"value1\"}}}}"; + using var responseMessage = new HttpResponseMessage + { + StatusCode = HttpStatusCode.OK, + Content = new StringContent(responseContent) + }; + + this._httpMessageHandlerMock.Protected() + .Setup>( + "SendAsync", + ItExpr.IsAny(), + ItExpr.IsAny()) + .ReturnsAsync(responseMessage); + + // Act + var result = await this._client.GetStatusAsync("12345"); + + // Assert + Assert.NotNull(result); + Assert.Equal(state, result.State); + Assert.Equal("The Result", result.Result); + Assert.NotNull(result.LastStep); + Assert.Equal("value1", result.LastStep["step1"].ToString()); + } +} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs new file mode 100644 index 000000000000..635e8f63700a --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.AI.CrewAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Plugins.UnitTests.AI.CrewAI; + +/// +/// Unit tests for the class. +/// +public sealed class CrewAIEnterpriseTests +{ + private readonly Mock _mockClient; + private readonly CrewAIEnterprise _crewAIEnterprise; + + /// + /// Initializes a new instance of the class. + /// + public CrewAIEnterpriseTests() + { + this._mockClient = new Mock(MockBehavior.Strict); + this._crewAIEnterprise = new CrewAIEnterprise(this._mockClient.Object, NullLoggerFactory.Instance); + } + + /// + /// Tests the successful kickoff of a CrewAI task. + /// + [Fact] + public async Task KickoffAsyncSuccessAsync() + { + // Arrange + var response = new CrewAIKickoffResponse { KickoffId = "12345" }; + this._mockClient.Setup(client => client.KickoffAsync(It.IsAny(), null, null, null, It.IsAny())) + .ReturnsAsync(response); + + // Act + var result = await this._crewAIEnterprise.KickoffAsync(new { }); + + // Assert + Assert.Equal("12345", result); + } + + /// + /// Tests the failure of a CrewAI task kickoff. + /// + [Fact] + public async Task KickoffAsyncFailureAsync() + { + // Arrange + this._mockClient.Setup(client => client.KickoffAsync(It.IsAny(), null, null, null, It.IsAny())) + .ThrowsAsync(new InvalidOperationException("Kickoff failed")); + + // Act & Assert + await Assert.ThrowsAsync(() => this._crewAIEnterprise.KickoffAsync(new { })); + } + + /// + /// Tests the successful retrieval of CrewAI task status. + /// + [Fact] + public async Task GetCrewStatusAsyncSuccessAsync() + { + // Arrange + var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Running }; + this._mockClient.Setup(client => client.GetStatusAsync("12345", It.IsAny())) + .ReturnsAsync(response); + + // Act + var result = await this._crewAIEnterprise.GetCrewKickoffStatusAsync("12345"); + + // Assert + Assert.Equal(CrewAIKickoffState.Running, result.State); + } + + /// + /// Tests the failure of CrewAI task status retrieval. + /// + [Fact] + public async Task GetCrewStatusAsyncFailureAsync() + { + // Arrange + this._mockClient.Setup(client => client.GetStatusAsync("12345", It.IsAny())) + .ThrowsAsync(new InvalidOperationException("Status retrieval failed")); + + // Act & Assert + await Assert.ThrowsAsync(() => this._crewAIEnterprise.GetCrewKickoffStatusAsync("12345")); + } + + /// + /// Tests the successful completion of a CrewAI task. + /// + [Fact] + public async Task WaitForCrewCompletionAsyncSuccessAsync() + { + // Arrange + var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Success, Result = "Completed" }; + this._mockClient.SetupSequence(client => client.GetStatusAsync("12345", It.IsAny())) + .ReturnsAsync(new CrewAIStatusResponse { State = CrewAIKickoffState.Running }) + .ReturnsAsync(response); + + // Act + var result = await this._crewAIEnterprise.WaitForCrewCompletionAsync("12345"); + + // Assert + Assert.Equal("Completed", result); + } + + /// + /// Tests the failure of a CrewAI task completion. + /// + [Fact] + public async Task WaitForCrewCompletionAsyncFailureAsync() + { + // Arrange + var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Failed, Result = "Error" }; + this._mockClient.SetupSequence(client => client.GetStatusAsync("12345", It.IsAny())) + .ReturnsAsync(new CrewAIStatusResponse { State = CrewAIKickoffState.Running }) + .ReturnsAsync(response); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => this._crewAIEnterprise.WaitForCrewCompletionAsync("12345")); + } + + /// + /// Tests the successful creation of a Kernel plugin. + /// + [Fact] + public void CreateKernelPluginSuccess() + { + // Arrange + var inputDefinitions = new List + { + new("input1", "description1", typeof(string)) + }; + + // Act + var plugin = this._crewAIEnterprise.CreateKernelPlugin("TestPlugin", "Test Description", inputDefinitions); + + // Assert + Assert.NotNull(plugin); + Assert.Equal("TestPlugin", plugin.Name); + } +} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs new file mode 100644 index 000000000000..fb37715e604f --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Moq; + +namespace SemanticKernel.Plugins.AI.UnitTests.CrewAI; + +/// +/// Implementation of which uses the . +/// +internal sealed class MockHttpClientFactory(Mock mockHandler) : IHttpClientFactory, IDisposable +{ + public HttpClient CreateClient(string name) + { + return new(mockHandler.Object); + } + + public void Dispose() + { + mockHandler.Object.Dispose(); + GC.SuppressFinalize(this); + } +} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj b/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj new file mode 100644 index 000000000000..00d08ca13f1a --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj @@ -0,0 +1,37 @@ + + + + SemanticKernel.Plugins.AI.UnitTests + SemanticKernel.Plugins.AI.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);CA2007,VSTHRD111,SKEXP0001,SKEXP0050 + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + diff --git a/dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs similarity index 78% rename from dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs rename to dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs index bd1c0f58314e..0aef47e394f8 100644 --- a/dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs +++ b/dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs @@ -3,4 +3,4 @@ using System.Diagnostics.CodeAnalysis; // This assembly is currently experimental. -[assembly: Experimental("SKEXP0110")] +[assembly: Experimental("SKEXP0050")] diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs new file mode 100644 index 000000000000..be2822d3e85e --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// Internal interface used for mocking and testing. +/// +internal interface ICrewAIEnterpriseClient +{ + Task GetInputsAsync(CancellationToken cancellationToken = default); + Task KickoffAsync( + object? inputs, + string? taskWebhookUrl = null, + string? stepWebhookUrl = null, + string? crewWebhookUrl = null, + CancellationToken cancellationToken = default); + Task GetStatusAsync(string taskId, CancellationToken cancellationToken = default); +} + +/// +/// A client for interacting with the CrewAI Enterprise API. +/// +internal class CrewAIEnterpriseClient : ICrewAIEnterpriseClient +{ + private readonly Uri _endpoint; + private readonly Func> _authTokenProvider; + private readonly IHttpClientFactory? _httpClientFactory; + + public CrewAIEnterpriseClient(Uri endpoint, Func> authTokenProvider, IHttpClientFactory? clientFactory = null) + { + Verify.NotNull(endpoint, nameof(endpoint)); + Verify.NotNull(authTokenProvider, nameof(authTokenProvider)); + + this._endpoint = endpoint; + this._authTokenProvider = authTokenProvider; + this._httpClientFactory = clientFactory; + } + + /// + /// Get the inputs required for the Crew to kickoff. + /// + /// A + /// Aninstance of describing the required inputs. + /// + public async Task GetInputsAsync(CancellationToken cancellationToken = default) + { + try + { + using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); + using var requestMessage = HttpRequest.CreateGetRequest("/inputs"); + using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) + .ConfigureAwait(false); + + var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) + .ConfigureAwait(false); + + var requirements = JsonSerializer.Deserialize(body); + + return requirements ?? throw new KernelException(message: $"Failed to deserialize requirements from CrewAI. Response: {body}"); + } + catch (Exception ex) when (ex is not KernelException) + { + throw new KernelException(message: "Failed to get required inputs for CrewAI Crew.", innerException: ex); + } + } + + /// + /// Kickoff the Crew. + /// + /// An object containing key value pairs matching the required inputs of the Crew. + /// The task level webhook Uri. + /// The step level webhook Uri. + /// The crew level webhook Uri. + /// A + /// A string containing the Id of the started Crew Task. + public async Task KickoffAsync( + object? inputs, + string? taskWebhookUrl = null, + string? stepWebhookUrl = null, + string? crewWebhookUrl = null, + CancellationToken cancellationToken = default) + { + try + { + var content = new + { + inputs, + taskWebhookUrl, + stepWebhookUrl, + crewWebhookUrl + }; + + using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); + using var requestMessage = HttpRequest.CreatePostRequest("/kickoff", content); + using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) + .ConfigureAwait(false); + + var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) + .ConfigureAwait(false); + + var kickoffResponse = JsonSerializer.Deserialize(body); + return kickoffResponse ?? throw new KernelException(message: $"Failed to deserialize kickoff response from CrewAI. Response: {body}"); + } + catch (Exception ex) when (ex is not KernelException) + { + throw new KernelException(message: "Failed to kickoff CrewAI Crew.", innerException: ex); + } + } + + /// + /// Get the status of the Crew Task. + /// + /// The Id of the task. + /// A + /// A string containing the status or final result of the Crew task. + /// + public async Task GetStatusAsync(string taskId, CancellationToken cancellationToken = default) + { + try + { + using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); + using var requestMessage = HttpRequest.CreateGetRequest($"/status/{taskId}"); + using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) + .ConfigureAwait(false); + + var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) + .ConfigureAwait(false); + + var statusResponse = JsonSerializer.Deserialize(body); + + return statusResponse ?? throw new KernelException(message: $"Failed to deserialize status response from CrewAI. Response: {body}"); + } + catch (Exception ex) when (ex is not KernelException) + { + throw new KernelException(message: "Failed to status of CrewAI Crew.", innerException: ex); + } + } + + #region Private Methods + + private async Task CreateHttpClientAsync() + { + var authToken = await this._authTokenProvider().ConfigureAwait(false); + + if (string.IsNullOrWhiteSpace(authToken)) + { + throw new KernelException(message: "Failed to get auth token for CrewAI."); + } + + var client = this._httpClientFactory?.CreateClient() ?? new(); + client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authToken}"); + client.BaseAddress = this._endpoint; + return client; + } + + #endregion +} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs new file mode 100644 index 000000000000..93e65b166d21 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes +internal sealed class CrewAIStateEnumConverter : JsonConverter +{ + public override CrewAIKickoffState Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + string? stringValue = reader.GetString(); + return stringValue?.ToUpperInvariant() switch + { + "PENDING" => CrewAIKickoffState.Pending, + "STARTED" => CrewAIKickoffState.Started, + "RUNNING" => CrewAIKickoffState.Running, + "SUCCESS" => CrewAIKickoffState.Success, + "FAILED" => CrewAIKickoffState.Failed, + "FAILURE" => CrewAIKickoffState.Failure, + "NOT FOUND" => CrewAIKickoffState.NotFound, + _ => throw new KernelException("Failed to parse Crew AI kickoff state.") + }; + } + + public override void Write(Utf8JsonWriter writer, CrewAIKickoffState value, JsonSerializerOptions options) + { + string stringValue = value switch + { + CrewAIKickoffState.Pending => "PENDING", + CrewAIKickoffState.Started => "STARTED", + CrewAIKickoffState.Running => "RUNNING", + CrewAIKickoffState.Success => "SUCCESS", + CrewAIKickoffState.Failed => "FAILED", + CrewAIKickoffState.Failure => "FAILURE", + CrewAIKickoffState.NotFound => "NOT FOUND", + _ => throw new KernelException("Failed to parse Crew AI kickoff state.") + }; + writer.WriteStringValue(stringValue); + } +} +#pragma warning restore CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs new file mode 100644 index 000000000000..615f6a14c832 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// A plugin for interacting with the a CrewAI Crew via the Enterprise APIs. +/// +public class CrewAIEnterprise +{ + private readonly ICrewAIEnterpriseClient _crewClient; + private readonly ILogger _logger; + private readonly TimeSpan _pollingInterval; + + /// + /// The name of the kickoff function. + /// + public const string KickoffFunctionName = "KickoffCrew"; + + /// + /// The name of the kickoff and wait function. + /// + public const string KickoffAndWaitFunctionName = "KickoffAndWait"; + + /// + /// Initializes a new instance of the class. + /// + /// The base URI of the CrewAI Crew + /// Optional provider for auth token generation. + /// The HTTP client factory. + /// The logger factory. + /// Defines the delay time between status calls when pollin for a kickoff to complete. + public CrewAIEnterprise(Uri endpoint, Func> authTokenProvider, IHttpClientFactory? httpClientFactory = null, ILoggerFactory? loggerFactory = null, TimeSpan? pollingInterval = default) + { + Verify.NotNull(endpoint, nameof(endpoint)); + Verify.NotNull(authTokenProvider, nameof(authTokenProvider)); + + this._crewClient = new CrewAIEnterpriseClient(endpoint, authTokenProvider, httpClientFactory); + this._logger = loggerFactory?.CreateLogger(typeof(CrewAIEnterprise)) ?? NullLogger.Instance; + this._pollingInterval = pollingInterval ?? TimeSpan.FromSeconds(1); + } + + /// + /// Internal constructor used for testing purposes. + /// + internal CrewAIEnterprise(ICrewAIEnterpriseClient crewClient, ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(crewClient, nameof(crewClient)); + this._crewClient = crewClient; + this._logger = loggerFactory?.CreateLogger(typeof(CrewAIEnterprise)) ?? NullLogger.Instance; + } + + /// + /// Kicks off (starts) a CrewAI Crew with the given inputs and callbacks. + /// + /// An object containing key value pairs matching the required inputs of the Crew. + /// The task level webhook Uri. + /// The step level webhook Uri. + /// The crew level webhook Uri. + /// The Id of the scheduled kickoff. + /// + public async Task KickoffAsync( + object? inputs, + Uri? taskWebhookUrl = null, + Uri? stepWebhookUrl = null, + Uri? crewWebhookUrl = null) + { + try + { + CrewAIKickoffResponse kickoffTask = await this._crewClient.KickoffAsync( + inputs: inputs, + taskWebhookUrl: taskWebhookUrl?.AbsoluteUri, + stepWebhookUrl: stepWebhookUrl?.AbsoluteUri, + crewWebhookUrl: crewWebhookUrl?.AbsoluteUri) + .ConfigureAwait(false); + + this._logger.LogInformation("CrewAI Crew kicked off with Id: {KickoffId}", kickoffTask.KickoffId); + return kickoffTask.KickoffId; + } + catch (Exception ex) + { + throw new KernelException(message: "Failed to kickoff CrewAI Crew.", innerException: ex); + } + } + + /// + /// Gets the current status of the CrewAI Crew kickoff. + /// + /// The Id of the Crew kickoff. + /// A + /// " + [KernelFunction] + [Description("Gets the current status of the CrewAI Crew kickoff.")] + public async Task GetCrewKickoffStatusAsync([Description("The Id of the kickoff")] string kickoffId) + { + Verify.NotNullOrWhiteSpace(kickoffId, nameof(kickoffId)); + + try + { + CrewAIStatusResponse statusResponse = await this._crewClient.GetStatusAsync(kickoffId).ConfigureAwait(false); + + this._logger.LogInformation("CrewAI Crew status for kickoff Id: {KickoffId} is {Status}", kickoffId, statusResponse.State); + return statusResponse; + } + catch (Exception ex) + { + throw new KernelException(message: $"Failed to get status of CrewAI Crew with kickoff Id: {kickoffId}.", innerException: ex); + } + } + + /// + /// Waits for the Crew kickoff to complete and returns the result. + /// + /// The Id of the crew kickoff. + /// The result of the Crew kickoff. + /// + [KernelFunction] + [Description("Waits for the Crew kickoff to complete and returns the result.")] + public async Task WaitForCrewCompletionAsync([Description("The Id of the kickoff")] string kickoffId) + { + Verify.NotNullOrWhiteSpace(kickoffId, nameof(kickoffId)); + + try + { + CrewAIStatusResponse? statusResponse = null; + var status = CrewAIKickoffState.Pending; + do + { + this._logger.LogInformation("Waiting for CrewAI Crew with kickoff Id: {KickoffId} to complete. Current state: {Status}", kickoffId, status); + await Task.Delay(TimeSpan.FromSeconds(1)).ConfigureAwait(false); + statusResponse = await this._crewClient.GetStatusAsync(kickoffId).ConfigureAwait(false); + status = statusResponse.State; + } + while (!this.IsTerminalState(status)); + + this._logger.LogInformation("CrewAI Crew with kickoff Id: {KickoffId} completed with status: {Status}", kickoffId, statusResponse.State); + + return status switch + { + CrewAIKickoffState.Failed => throw new KernelException(message: $"CrewAI Crew failed with error: {statusResponse.Result}"), + CrewAIKickoffState.Success => statusResponse.Result ?? string.Empty, + _ => throw new KernelException(message: "Failed to parse unexpected response from CrewAI status response."), + }; + } + catch (Exception ex) + { + throw new KernelException(message: $"Failed to wait for completion of CrewAI Crew with kickoff Id: {kickoffId}.", innerException: ex); + } + } + + /// + /// Creates a that can be used to invoke the CrewAI Crew. + /// + /// The name of the + /// The description of the + /// The definitions of the Crew's required inputs. + /// The task level webhook Uri + /// The step level webhook Uri + /// The crew level webhook Uri + /// A that can invoke the Crew. + /// + public KernelPlugin CreateKernelPlugin( + string name, + string description, + IEnumerable? inputMetadata, + Uri? taskWebhookUrl = null, + Uri? stepWebhookUrl = null, + Uri? crewWebhookUrl = null) + { + var options = new KernelFunctionFromMethodOptions() + { + Parameters = inputMetadata?.Select(i => new KernelParameterMetadata(i.Name) { Description = i.Description, IsRequired = true, ParameterType = i.Type }) ?? [], + ReturnParameter = new() { ParameterType = typeof(string) }, + }; + + // Define the kernel function implementation for kickoff + [KernelFunction(KickoffFunctionName)] + [Description("kicks off the CrewAI Crew and returns the Id of the scheduled kickoff.")] + async Task KickoffAsync(KernelArguments arguments) + { + Dictionary args = BuildArguments(inputMetadata, arguments); + + return await this.KickoffAsync( + inputs: args, + taskWebhookUrl: taskWebhookUrl, + stepWebhookUrl: stepWebhookUrl, + crewWebhookUrl: crewWebhookUrl) + .ConfigureAwait(false); + } + + // Define the kernel function implementation for kickoff and wait for result + [KernelFunction(KickoffAndWaitFunctionName)] + [Description("kicks off the CrewAI Crew, waits for it to complete, and returns the result.")] + async Task KickoffAndWaitAsync(KernelArguments arguments) + { + Dictionary args = BuildArguments(inputMetadata, arguments); + + var kickoffId = await this.KickoffAsync( + inputs: args, + taskWebhookUrl: taskWebhookUrl, + stepWebhookUrl: stepWebhookUrl, + crewWebhookUrl: crewWebhookUrl) + .ConfigureAwait(false); + + return await this.WaitForCrewCompletionAsync(kickoffId).ConfigureAwait(false); + } + + return KernelPluginFactory.CreateFromFunctions( + name, + description, + [ + KernelFunctionFactory.CreateFromMethod(KickoffAsync, new(), options), + KernelFunctionFactory.CreateFromMethod(KickoffAndWaitAsync, new(), options), + KernelFunctionFactory.CreateFromMethod(this.GetCrewKickoffStatusAsync), + KernelFunctionFactory.CreateFromMethod(this.WaitForCrewCompletionAsync) + ]); + } + + #region Private Methods + + /// + /// Determines if the Crew kikoff state is terminal. + /// + /// The state of the crew kickoff + /// A indicating if the state is a terminal state. + private bool IsTerminalState(CrewAIKickoffState state) + { + return state == CrewAIKickoffState.Failed || state == CrewAIKickoffState.Failure || state == CrewAIKickoffState.Success || state == CrewAIKickoffState.NotFound; + } + + private static Dictionary BuildArguments(IEnumerable? inputMetadata, KernelArguments arguments) + { + // Extract the required arguments from the KernelArguments by name + Dictionary args = []; + if (inputMetadata is not null) + { + foreach (var input in inputMetadata) + { + // If a required argument is missing, throw an exception + if (!arguments.TryGetValue(input.Name, out object? value) || value is null || value is not string strValue) + { + throw new KernelException(message: $"Missing required input '{input.Name}' for CrewAI."); + } + + // Since this KernelFunction does not have explicit parameters all the relevant inputs are passed as strings. + // We need to convert the inputs to the expected types. + if (input.Type == typeof(string)) + { + args.Add(input.Name, value); + } + else + { + // Try to get a converter for the input type + var converter = TypeConverterFactory.GetTypeConverter(input.Type); + if (converter is not null) + { + args.Add(input.Name, converter.ConvertFrom(value)); + } + else + { + // Try to deserialize the input as a JSON object + var objValue = JsonSerializer.Deserialize(strValue, input.Type); + args.Add(input.Name, objValue); + } + } + } + } + + return args; + } + + #endregion +} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs new file mode 100644 index 000000000000..dab170ceabf5 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// The metadata associated with an input required by the CrewAI Crew. This metadata provides the information required to effectively describe the inputs to an LLM. +/// +/// The name of the input +/// The description of the input. This is used to help the LLM understand the correct usage of the input. +/// The of the input. +public record CrewAIInputMetadata(string Name, string Description, Type Type) +{ +} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs new file mode 100644 index 000000000000..949aea64a800 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// Models the response object of a call to kickoff a CrewAI Crew. +/// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes +internal sealed class CrewAIKickoffResponse +{ + [JsonPropertyName("kickoff_id")] + public string KickoffId { get; set; } = string.Empty; +} +#pragma warning restore CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs new file mode 100644 index 000000000000..7ef9b9688928 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// Represents the state of a CrewAI Crew kickoff. +/// +public enum CrewAIKickoffState +{ + /// + /// The kickoff is pending and has not started yet. + /// + Pending, + + /// + /// The kickoff has started. + /// + Started, + + /// + /// The kickoff is currently running. + /// + Running, + + /// + /// The kickoff completed successfully. + /// + Success, + + /// + /// The kickoff failed. + /// + Failed, + + /// + /// The kickoff has failed. + /// + Failure, + + /// + /// The kickoff was not found. + /// + NotFound +} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs new file mode 100644 index 000000000000..b9154e8b334c --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// Represents the requirements for kicking off a CrewAI Crew. +/// +public class CrewAIRequiredInputs +{ + /// + /// The inputs required for the Crew. + /// + [JsonPropertyName("inputs")] + public IList Inputs { get; set; } = []; +} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs new file mode 100644 index 000000000000..5d31a2740f09 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; + +/// +/// Models the response object of a call to get the state of a CrewAI Crew kickoff. +/// +public class CrewAIStatusResponse +{ + /// + /// The current state of the CrewAI Crew kickoff. + /// + [JsonPropertyName("state")] + [JsonConverter(typeof(CrewAIStateEnumConverter))] + public CrewAIKickoffState State { get; set; } + + /// + /// The result of the CrewAI Crew kickoff. + /// + [JsonPropertyName("result")] + public string? Result { get; set; } + + /// + /// The last step of the CrewAI Crew kickoff. + /// + [JsonPropertyName("last_step")] + public Dictionary? LastStep { get; set; } +} diff --git a/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj b/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj new file mode 100644 index 000000000000..472d0d6b3c2f --- /dev/null +++ b/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj @@ -0,0 +1,34 @@ + + + + + Microsoft.SemanticKernel.Plugins.AI + $(AssemblyName) + net8.0;netstandard2.0 + alpha + + + + + + + + Semantic Kernel - AI Plugins + Semantic Kernel AI plugins. + + + + + + + + + + + + + + + + + diff --git a/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs b/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs index bcb274a23808..ce4a0d88856b 100644 --- a/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs +++ b/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs @@ -16,21 +16,21 @@ public interface IFileSystemConnector /// /// Path to the file. /// The to monitor for cancellation requests. The default is . - public Task GetFileContentStreamAsync(string filePath, CancellationToken cancellationToken = default); + Task GetFileContentStreamAsync(string filePath, CancellationToken cancellationToken = default); /// /// Get a writeable stream to an existing file. /// /// Path to file. /// The to monitor for cancellation requests. The default is . - public Task GetWriteableFileStreamAsync(string filePath, CancellationToken cancellationToken = default); + Task GetWriteableFileStreamAsync(string filePath, CancellationToken cancellationToken = default); /// /// Create a new file and get a writeable stream to it. /// /// Path to file. /// The to monitor for cancellation requests. The default is . - public Task CreateFileAsync(string filePath, CancellationToken cancellationToken = default); + Task CreateFileAsync(string filePath, CancellationToken cancellationToken = default); /// /// Determine whether a file exists at the specified path. @@ -38,5 +38,5 @@ public interface IFileSystemConnector /// Path to file. /// The to monitor for cancellation requests. The default is . /// True if file exists, false otherwise. - public Task FileExistsAsync(string filePath, CancellationToken cancellationToken = default); + Task FileExistsAsync(string filePath, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs b/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs index 82934b86cecf..586129e4b84e 100644 --- a/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs +++ b/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs @@ -14,18 +14,18 @@ public interface IDocumentConnector /// /// Document stream /// String containing all text from the document. - public string ReadText(Stream stream); + string ReadText(Stream stream); /// /// Initialize a document from the given stream. /// /// IO stream - public void Initialize(Stream stream); + void Initialize(Stream stream); /// /// Append the specified text to the document. /// /// Document stream /// String of text to write to the document. - public void AppendText(Stream stream, string text); + void AppendText(Stream stream, string text); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs index 2a175afb348d..7ab32b31b869 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs @@ -2,7 +2,9 @@ using System; using System.Collections.Generic; +using System.ComponentModel; using System.Linq; +using System.Reflection; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -12,8 +14,7 @@ namespace Microsoft.SemanticKernel.ChatCompletion; /// Provides a that wraps an . /// -/// The implementation should largely be unused, other than for its . The implementation of -/// only manufactures these to pass along to the underlying +/// The implementation of only manufactures these to pass along to the underlying /// with autoInvoke:false, which means the /// implementation shouldn't be invoking these functions at all. As such, the and /// methods both unconditionally throw, even though they could be implemented. @@ -23,28 +24,15 @@ internal sealed class AIFunctionKernelFunction : KernelFunction private readonly AIFunction _aiFunction; public AIFunctionKernelFunction(AIFunction aiFunction) : - base(aiFunction.Metadata.Name, - aiFunction.Metadata.Description, - aiFunction.Metadata.Parameters.Select(p => new KernelParameterMetadata(p.Name, AbstractionsJsonContext.Default.Options) - { - Description = p.Description, - DefaultValue = p.DefaultValue, - IsRequired = p.IsRequired, - ParameterType = p.ParameterType, - Schema = - p.Schema is JsonElement je ? new KernelJsonSchema(je) : - p.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) : - null, - }).ToList(), - AbstractionsJsonContext.Default.Options, + base(aiFunction.Name, + aiFunction.Description, + MapParameterMetadata(aiFunction), + aiFunction.JsonSerializerOptions, new KernelReturnParameterMetadata(AbstractionsJsonContext.Default.Options) { - Description = aiFunction.Metadata.ReturnParameter.Description, - ParameterType = aiFunction.Metadata.ReturnParameter.ParameterType, - Schema = - aiFunction.Metadata.ReturnParameter.Schema is JsonElement je ? new KernelJsonSchema(je) : - aiFunction.Metadata.ReturnParameter.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) : - null, + Description = aiFunction.UnderlyingMethod?.ReturnParameter.GetCustomAttribute()?.Description, + ParameterType = aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType, + Schema = new KernelJsonSchema(AIJsonUtilities.CreateJsonSchema(aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType)), }) { this._aiFunction = aiFunction; @@ -73,4 +61,30 @@ protected override IAsyncEnumerable InvokeStreamingCoreAsync(K // This should never be invoked, as instances are always passed with autoInvoke:false. throw new NotSupportedException(); } + + private static IReadOnlyList MapParameterMetadata(AIFunction aiFunction) + { + if (!aiFunction.JsonSchema.TryGetProperty("properties", out JsonElement properties)) + { + return Array.Empty(); + } + + List kernelParams = []; + var parameterInfos = aiFunction.UnderlyingMethod?.GetParameters().ToDictionary(p => p.Name!, StringComparer.Ordinal); + foreach (var param in properties.EnumerateObject()) + { + ParameterInfo? paramInfo = null; + parameterInfos?.TryGetValue(param.Name, out paramInfo); + kernelParams.Add(new(param.Name, aiFunction.JsonSerializerOptions) + { + Description = param.Value.TryGetProperty("description", out JsonElement description) ? description.GetString() : null, + DefaultValue = param.Value.TryGetProperty("default", out JsonElement defaultValue) ? defaultValue : null, + IsRequired = param.Value.TryGetProperty("required", out JsonElement required) && required.GetBoolean(), + ParameterType = paramInfo?.ParameterType, + Schema = param.Value.TryGetProperty("schema", out JsonElement schema) ? new KernelJsonSchema(schema) : null, + }); + } + + return kernelParams; + } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs index 05f473b1b792..d4dd082dd98b 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs @@ -11,6 +11,11 @@ namespace Microsoft.SemanticKernel.ChatCompletion; /// public readonly struct AuthorRole : IEquatable { + /// + /// The role that instructs or sets the behavior of the assistant. + /// + public static AuthorRole Developer { get; } = new("developer"); + /// /// The role that instructs or sets the behavior of the assistant. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs index 7447b230ec63..419dca381015 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs @@ -34,12 +34,12 @@ public ChatClientChatCompletionService(IChatClient chatClient, IServiceProvider? var attrs = new Dictionary(); this.Attributes = new ReadOnlyDictionary(attrs); - var metadata = chatClient.Metadata; - if (metadata.ProviderUri is not null) + var metadata = chatClient.GetService(); + if (metadata?.ProviderUri is not null) { attrs[AIServiceExtensions.EndpointKey] = metadata.ProviderUri.ToString(); } - if (metadata.ModelId is not null) + if (metadata?.ModelId is not null) { attrs[AIServiceExtensions.ModelIdKey] = metadata.ModelId; } @@ -57,7 +57,7 @@ public async Task> GetChatMessageContentsAsync var messageList = ChatCompletionServiceExtensions.ToChatMessageList(chatHistory); var currentSize = messageList.Count; - var completion = await this._chatClient.CompleteAsync( + var completion = await this._chatClient.GetResponseAsync( messageList, ToChatOptions(executionSettings, kernel), cancellationToken).ConfigureAwait(false); @@ -76,7 +76,7 @@ public async IAsyncEnumerable GetStreamingChatMessa { Verify.NotNull(chatHistory); - await foreach (var update in this._chatClient.CompleteStreamingAsync( + await foreach (var update in this._chatClient.GetStreamingResponseAsync( ChatCompletionServiceExtensions.ToChatMessageList(chatHistory), ToChatOptions(executionSettings, kernel), cancellationToken).ConfigureAwait(false)) @@ -158,13 +158,19 @@ public async IAsyncEnumerable GetStreamingChatMessa else if (entry.Key.Equals("response_format", StringComparison.OrdinalIgnoreCase) && entry.Value is { } responseFormat) { - options.ResponseFormat = responseFormat switch + if (TryConvert(responseFormat, out string? responseFormatString)) { - "text" => ChatResponseFormat.Text, - "json_object" => ChatResponseFormat.Json, - JsonElement e => ChatResponseFormat.ForJsonSchema(e), - _ => null, - }; + options.ResponseFormat = responseFormatString switch + { + "text" => ChatResponseFormat.Text, + "json_object" => ChatResponseFormat.Json, + _ => null, + }; + } + else + { + options.ResponseFormat = responseFormat is JsonElement e ? ChatResponseFormat.ForJsonSchema(e) : null; + } } else { @@ -268,9 +274,9 @@ static bool TryConvert(object? value, [NotNullWhen(true)] out T? result) } } - /// Converts a to a . + /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - private static StreamingChatMessageContent ToStreamingChatMessageContent(StreamingChatCompletionUpdate update) + private static StreamingChatMessageContent ToStreamingChatMessageContent(ChatResponseUpdate update) { StreamingChatMessageContent content = new( update.Role is not null ? new AuthorRole(update.Role.Value.Value) : null, diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs index 308dbc64e183..862239ccd505 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs @@ -35,7 +35,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ public ChatClientMetadata Metadata { get; } /// - public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(chatMessages); @@ -53,7 +53,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ } /// - public async IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(chatMessages); @@ -82,6 +82,7 @@ public void Dispose() serviceKey is not null ? null : serviceType.IsInstanceOfType(this) ? this : serviceType.IsInstanceOfType(this._chatCompletionService) ? this._chatCompletionService : + serviceType.IsInstanceOfType(this.Metadata) ? this.Metadata : null; } @@ -191,11 +192,11 @@ public void Dispose() return settings; } - /// Converts a to a . + /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - private static StreamingChatCompletionUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content) + private static ChatResponseUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content) { - StreamingChatCompletionUpdate update = new() + ChatResponseUpdate update = new() { AdditionalProperties = content.Metadata is not null ? new AdditionalPropertiesDictionary(content.Metadata) : null, AuthorName = content.AuthorName, diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs index ef8b0b56c7f9..cf5834725700 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs @@ -169,15 +169,15 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) case Microsoft.SemanticKernel.ImageContent ic: aiContent = - ic.DataUri is not null ? new Microsoft.Extensions.AI.ImageContent(ic.DataUri, ic.MimeType) : - ic.Uri is not null ? new Microsoft.Extensions.AI.ImageContent(ic.Uri, ic.MimeType) : + ic.DataUri is not null ? new Microsoft.Extensions.AI.DataContent(ic.DataUri, ic.MimeType ?? "image/*") : + ic.Uri is not null ? new Microsoft.Extensions.AI.DataContent(ic.Uri, ic.MimeType ?? "image/*") : null; break; case Microsoft.SemanticKernel.AudioContent ac: aiContent = - ac.DataUri is not null ? new Microsoft.Extensions.AI.AudioContent(ac.DataUri, ac.MimeType) : - ac.Uri is not null ? new Microsoft.Extensions.AI.AudioContent(ac.Uri, ac.MimeType) : + ac.DataUri is not null ? new Microsoft.Extensions.AI.DataContent(ac.DataUri, ac.MimeType ?? "audio/*") : + ac.Uri is not null ? new Microsoft.Extensions.AI.DataContent(ac.Uri, ac.MimeType ?? "audio/*") : null; break; @@ -193,7 +193,7 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) break; case Microsoft.SemanticKernel.FunctionResultContent frc: - aiContent = new Microsoft.Extensions.AI.FunctionResultContent(frc.CallId ?? string.Empty, frc.FunctionName ?? string.Empty, frc.Result); + aiContent = new Microsoft.Extensions.AI.FunctionResultContent(frc.CallId ?? string.Empty, frc.Result); break; } @@ -211,13 +211,13 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Microsoft.Extensions.AI.ChatCompletion? completion = null) + internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Microsoft.Extensions.AI.ChatResponse? response = null) { ChatMessageContent result = new() { - ModelId = completion?.ModelId, + ModelId = response?.ModelId, AuthorName = message.AuthorName, - InnerContent = completion?.RawRepresentation ?? message.RawRepresentation, + InnerContent = response?.RawRepresentation ?? message.RawRepresentation, Metadata = message.AdditionalProperties, Role = new AuthorRole(message.Role.Value), }; @@ -231,20 +231,20 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic resultContent = new Microsoft.SemanticKernel.TextContent(tc.Text); break; - case Microsoft.Extensions.AI.ImageContent ic: - resultContent = ic.ContainsData ? - new Microsoft.SemanticKernel.ImageContent(ic.Uri) : - new Microsoft.SemanticKernel.ImageContent(new Uri(ic.Uri)); + case Microsoft.Extensions.AI.DataContent dc when dc.MediaTypeStartsWith("image/"): + resultContent = dc.Data is not null ? + new Microsoft.SemanticKernel.ImageContent(dc.Uri) : + new Microsoft.SemanticKernel.ImageContent(new Uri(dc.Uri)); break; - case Microsoft.Extensions.AI.AudioContent ac: - resultContent = ac.ContainsData ? - new Microsoft.SemanticKernel.AudioContent(ac.Uri) : - new Microsoft.SemanticKernel.AudioContent(new Uri(ac.Uri)); + case Microsoft.Extensions.AI.DataContent dc when dc.MediaTypeStartsWith("audio/"): + resultContent = dc.Data is not null ? + new Microsoft.SemanticKernel.AudioContent(dc.Uri) : + new Microsoft.SemanticKernel.AudioContent(new Uri(dc.Uri)); break; case Microsoft.Extensions.AI.DataContent dc: - resultContent = dc.ContainsData ? + resultContent = dc.Data is not null ? new Microsoft.SemanticKernel.BinaryContent(dc.Uri) : new Microsoft.SemanticKernel.BinaryContent(new Uri(dc.Uri)); break; @@ -254,7 +254,7 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic break; case Microsoft.Extensions.AI.FunctionResultContent frc: - resultContent = new Microsoft.SemanticKernel.FunctionResultContent(frc.Name, null, frc.CallId, frc.Result); + resultContent = new Microsoft.SemanticKernel.FunctionResultContent(callId: frc.CallId, result: frc.Result); break; } @@ -262,7 +262,7 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic { resultContent.Metadata = content.AdditionalProperties; resultContent.InnerContent = content.RawRepresentation; - resultContent.ModelId = completion?.ModelId; + resultContent.ModelId = response?.ModelId; result.Items.Add(resultContent); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs index fda7be0d0c8c..22968c47ea38 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs @@ -28,15 +28,26 @@ public ChatHistory() } /// - /// Creates a new instance of the class with a system message + /// Creates a new instance of the with a first message in the provided . + /// If not role is provided then the first message will default to role. /// - /// The system message to add to the history. - public ChatHistory(string systemMessage) + /// The text message to add to the first message in chat history. + /// The role to add as the first message. + public ChatHistory(string message, AuthorRole role) { - Verify.NotNullOrWhiteSpace(systemMessage); + Verify.NotNullOrWhiteSpace(message); this._messages = []; - this.AddSystemMessage(systemMessage); + this.Add(new ChatMessageContent(role, message)); + } + + /// + /// Creates a new instance of the class with a system message. + /// + /// The system message to add to the history. + public ChatHistory(string systemMessage) + : this(systemMessage, AuthorRole.System) + { } /// Initializes the history will all of the specified messages. @@ -97,6 +108,13 @@ public void AddAssistantMessage(string content) => public void AddSystemMessage(string content) => this.AddMessage(AuthorRole.System, content); + /// + /// Add a developer message to the chat history + /// + /// Message content + public void AddDeveloperMessage(string content) => + this.AddMessage(AuthorRole.Developer, content); + /// Adds a message to the history. /// The message to add. /// is null. diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs new file mode 100644 index 000000000000..faf11b2fe450 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.ChatCompletion; + +/// +/// Extension methods for chat history. +/// +[Experimental("SKEXP0001")] +public static class ChatHistoryExtensions +{ + /// + /// Process history reduction and mutate the provided history in place. + /// + /// The source history + /// The target reducer + /// The to monitor for cancellation requests. The default is . + /// True if reduction has occurred. + /// + /// Using the existing for a reduction in collection size eliminates the need + /// for re-allocation (of memory). + /// + public static async Task ReduceInPlaceAsync(this ChatHistory chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) + { + if (reducer is null) + { + return false; + } + + IEnumerable? reducedHistory = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); + + if (reducedHistory is null) + { + return false; + } + + // Mutate the history in place + ChatMessageContent[] reduced = reducedHistory.ToArray(); + chatHistory.Clear(); + chatHistory.AddRange(reduced); + + return true; + } + + /// + /// Returns the reduced history using the provided reducer without mutating the source history. + /// + /// The source history + /// The target reducer + /// The to monitor for cancellation requests. The default is . + public static async Task> ReduceAsync(this IReadOnlyList chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) + { + if (reducer is not null) + { + IEnumerable? reducedHistory = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); + chatHistory = reducedHistory?.ToArray() ?? chatHistory; + } + + return chatHistory; + } + + /// + /// Returns the reduced history using the provided reducer without mutating the source history. + /// + /// The source history + /// The target reducer + /// The to monitor for cancellation requests. The default is . + public static async Task ReduceAsync(this ChatHistory chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) + { + if (reducer is not null) + { + IEnumerable? reduced = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); + return new ChatHistory(reduced ?? chatHistory); + } + + return chatHistory; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs index 12d63de28d3c..0c2a7f18bb7a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs @@ -112,19 +112,12 @@ private static ChatMessageContent ParseChatNode(PromptNode node) /// TagName = "message"
/// Attributes = { "role" : "..." }
/// optional one or more child nodes ...
- /// content not null or single child node ... + /// optional one or more child nodes ... ///
private static bool IsValidChatMessage(PromptNode node) { return node.TagName.Equals(MessageTagName, StringComparison.OrdinalIgnoreCase) && - node.Attributes.ContainsKey(RoleAttributeName) && - IsValidChildNodes(node); - } - - private static bool IsValidChildNodes(PromptNode node) - { - var textTagsCount = node.ChildNodes.Count(n => n.TagName.Equals(TextTagName, StringComparison.OrdinalIgnoreCase)); - return textTagsCount == 1 || (textTagsCount == 0 && node.Content is not null); + node.Attributes.ContainsKey(RoleAttributeName); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs new file mode 100644 index 000000000000..7efc5a9ab515 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.ChatCompletion; + +/// +/// Interface for reducing the chat history. +/// +[Experimental("SKEXP0001")] +public interface IChatHistoryReducer +{ + /// + /// Reduces the chat history. + /// + /// Chat history to be reduced. + /// The to monitor for cancellation requests. The default is . + /// The reduced history or if no reduction has occurred. + Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs index c060c3f0d523..96f1dd0252dd 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs @@ -132,6 +132,7 @@ public async Task>> GenerateAsync(IEnu serviceKey is not null ? null : serviceType.IsInstanceOfType(this) ? this : serviceType.IsInstanceOfType(this._service) ? this._service : + serviceType.IsInstanceOfType(this.Metadata) ? this.Metadata : null; } } @@ -154,12 +155,12 @@ public EmbeddingGeneratorEmbeddingGenerationService( var attrs = new Dictionary(); this.Attributes = new ReadOnlyDictionary(attrs); - var metadata = generator.Metadata; - if (metadata.ProviderUri is not null) + var metadata = (EmbeddingGeneratorMetadata?)generator.GetService(typeof(EmbeddingGeneratorMetadata)); + if (metadata?.ProviderUri is not null) { attrs[AIServiceExtensions.EndpointKey] = metadata.ProviderUri.ToString(); } - if (metadata.ModelId is not null) + if (metadata?.ModelId is not null) { attrs[AIServiceExtensions.ModelIdKey] = metadata.ModelId; } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs index a9cb63787177..989206bc7aa2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel; @@ -35,6 +34,5 @@ public sealed class FunctionChoiceBehaviorOptions /// The default value is set to false. If set to true, the AI model will strictly adhere to the function schema. /// [JsonPropertyName("allow_strict_schema_adherence")] - [Experimental("SKEXP0001")] public bool AllowStrictSchemaAdherence { get; set; } = false; } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs index 3eb2d890aa54..95d1d442ab2f 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs @@ -23,7 +23,7 @@ public interface ITextToImageService : IAIService /// The to monitor for cancellation requests. The default is . /// Generated image contents [Experimental("SKEXP0001")] - public Task> GetImageContentsAsync( + Task> GetImageContentsAsync( TextContent input, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, diff --git a/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs b/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs index 29caab93da9a..736710ab146c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs @@ -6,6 +6,7 @@ using System.Text.Json; using System.Text.Json.Serialization; using System.Text.Json.Serialization.Metadata; +using Microsoft.SemanticKernel.Functions; namespace Microsoft.SemanticKernel; @@ -15,6 +16,7 @@ namespace Microsoft.SemanticKernel; WriteIndented = true)] [JsonSerializable(typeof(IDictionary))] [JsonSerializable(typeof(JsonElement))] +[JsonSerializable(typeof(KernelFunctionSchemaModel))] [JsonSerializable(typeof(PromptExecutionSettings))] // types commonly used as values in settings dictionaries [JsonSerializable(typeof(string))] diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f751ea6fc448..f0e71963fc80 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -3,7 +3,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs index 925d74d0c731..641e375b2839 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs @@ -16,6 +16,15 @@ public class FileReferenceContent : KernelContent /// public string FileId { get; init; } = string.Empty; + /// + /// An optional tool association. + /// + /// + /// Tool definition depends upon the context within which the content is consumed. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? Tools { get; init; } + /// /// Initializes a new instance of the class. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs index 8dbcc00eb25d..525472d90047 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; namespace Microsoft.SemanticKernel; diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs index 609f94a87180..5c5aa5780303 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs @@ -4,7 +4,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs index 9e7325b771c2..ac8380506d43 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs @@ -61,8 +61,8 @@ public StreamingKernelContentItemCollection Items /// /// Name of the author of the message /// - [Experimental("SKEXP0001")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [Experimental("SKEXP0001")] public string? AuthorName { get => this._authorName; diff --git a/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs b/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs index 3b1ef667255b..095c6d9a78d5 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs @@ -18,7 +18,7 @@ public interface ITextSearch /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - public Task> SearchAsync( + Task> SearchAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); @@ -29,7 +29,7 @@ public Task> SearchAsync( /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - public Task> GetTextSearchResultsAsync( + Task> GetTextSearchResultsAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); @@ -40,7 +40,7 @@ public Task> GetTextSearchResultsAsync( /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - public Task> GetSearchResultsAsync( + Task> GetSearchResultsAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs index d943cff4fe89..d13d5519b652 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Threading; using Microsoft.SemanticKernel.ChatCompletion; @@ -79,6 +80,12 @@ public AutoFunctionInvocationContext( /// public ChatMessageContent ChatMessageContent { get; } + /// + /// The execution settings associated with the operation. + /// + [Experimental("SKEXP0001")] + public PromptExecutionSettings? ExecutionSettings { get; init; } + /// /// Gets the associated with automatic function invocation. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs index da16264a9fc7..3ac7507101f2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Threading; namespace Microsoft.SemanticKernel; @@ -54,6 +55,12 @@ internal PromptRenderContext(Kernel kernel, KernelFunction function, KernelArgum /// public KernelArguments Arguments { get; } + /// + /// The execution settings associated with the operation. + /// + [Experimental("SKEXP0001")] + public PromptExecutionSettings? ExecutionSettings { get; init; } + /// /// Gets or sets the rendered prompt. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs index cc2d260b48a7..fddac8f48282 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -16,6 +16,7 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Functions; namespace Microsoft.SemanticKernel; @@ -517,6 +518,7 @@ public AIFunction AsAIFunction(Kernel? kernel = null) /// An wrapper around a . private sealed class KernelAIFunction : AIFunction { + private static readonly JsonElement s_defaultSchema = JsonDocument.Parse("{}").RootElement; private readonly KernelFunction _kernelFunction; private readonly Kernel? _kernel; @@ -524,37 +526,17 @@ public KernelAIFunction(KernelFunction kernelFunction, Kernel? kernel) { this._kernelFunction = kernelFunction; this._kernel = kernel; - - string name = string.IsNullOrWhiteSpace(kernelFunction.PluginName) ? + this.Name = string.IsNullOrWhiteSpace(kernelFunction.PluginName) ? kernelFunction.Name : $"{kernelFunction.PluginName}-{kernelFunction.Name}"; - this.Metadata = new AIFunctionMetadata(name) - { - Description = kernelFunction.Description, - - JsonSerializerOptions = kernelFunction.JsonSerializerOptions, - - Parameters = kernelFunction.Metadata.Parameters.Select(p => new AIFunctionParameterMetadata(p.Name) - { - Description = p.Description, - ParameterType = p.ParameterType, - IsRequired = p.IsRequired, - HasDefaultValue = p.DefaultValue is not null, - DefaultValue = p.DefaultValue, - Schema = p.Schema?.RootElement, - }).ToList(), - - ReturnParameter = new AIFunctionReturnParameterMetadata() - { - Description = kernelFunction.Metadata.ReturnParameter.Description, - ParameterType = kernelFunction.Metadata.ReturnParameter.ParameterType, - Schema = kernelFunction.Metadata.ReturnParameter.Schema?.RootElement, - }, - }; + this.JsonSchema = BuildFunctionSchema(kernelFunction); } - public override AIFunctionMetadata Metadata { get; } + public override string Name { get; } + public override JsonElement JsonSchema { get; } + public override string Description => this._kernelFunction.Description; + public override JsonSerializerOptions JsonSerializerOptions => this._kernelFunction.JsonSerializerOptions ?? base.JsonSerializerOptions; protected override async Task InvokeCoreAsync( IEnumerable> arguments, CancellationToken cancellationToken) @@ -576,5 +558,25 @@ public KernelAIFunction(KernelFunction kernelFunction, Kernel? kernel) JsonSerializer.SerializeToElement(value, AbstractionsJsonContext.GetTypeInfo(value.GetType(), this._kernelFunction.JsonSerializerOptions)) : null; } + + private static JsonElement BuildFunctionSchema(KernelFunction function) + { + KernelFunctionSchemaModel schemaModel = new() + { + Type = "object", + Description = function.Description, + }; + + foreach (var parameter in function.Metadata.Parameters) + { + schemaModel.Properties[parameter.Name] = parameter.Schema?.RootElement ?? s_defaultSchema; + if (parameter.IsRequired) + { + (schemaModel.Required ??= []).Add(parameter.Name); + } + } + + return JsonSerializer.SerializeToElement(schemaModel, AbstractionsJsonContext.Default.KernelFunctionSchemaModel); + } } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs new file mode 100644 index 000000000000..ce6ebc7eaf39 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a kernel function that performs no operation. +/// +[RequiresUnreferencedCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] +[RequiresDynamicCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] +internal sealed class KernelFunctionNoop : KernelFunction +{ + /// + /// Creates a new instance of the class. + /// + /// Option: Prompt execution settings. + internal KernelFunctionNoop(IReadOnlyDictionary? executionSettings) : + base($"Function_{Guid.NewGuid():N}", string.Empty, [], null, executionSettings?.ToDictionary(static kv => kv.Key, static kv => kv.Value)) + { + } + + /// + public override KernelFunction Clone(string pluginName) + { + Dictionary? executionSettings = this.ExecutionSettings?.ToDictionary(kv => kv.Key, kv => kv.Value); + return new KernelFunctionNoop(executionSettings); + } + + /// + protected override ValueTask InvokeCoreAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) + { + return new(new FunctionResult(this)); + } + + /// + protected override IAsyncEnumerable InvokeStreamingCoreAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) + { + return AsyncEnumerable.Empty(); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs new file mode 100644 index 000000000000..e7460f9773af --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Functions; + +internal sealed class KernelFunctionSchemaModel +{ + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + [JsonPropertyName("condition"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; set; } + + [JsonPropertyName("properties")] + public Dictionary Properties { get; set; } = []; + + [JsonPropertyName("required"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public List? Required { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs index 9f53ddc93a7f..99a335e15656 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs @@ -349,13 +349,15 @@ internal async Task OnPromptRenderAsync( KernelFunction function, KernelArguments arguments, bool isStreaming, + PromptExecutionSettings? executionSettings, Func renderCallback, CancellationToken cancellationToken) { PromptRenderContext context = new(this, function, arguments) { CancellationToken = cancellationToken, - IsStreaming = isStreaming + IsStreaming = isStreaming, + ExecutionSettings = executionSettings }; await InvokeFilterOrPromptRenderAsync(this._promptRenderFilters, renderCallback, context).ConfigureAwait(false); diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs index d587fc56778b..7218f0ad4033 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs @@ -24,7 +24,7 @@ public interface ISemanticTextMemory /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. - public Task SaveInformationAsync( + Task SaveInformationAsync( string collection, string text, string id, @@ -45,7 +45,7 @@ public Task SaveInformationAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. - public Task SaveReferenceAsync( + Task SaveReferenceAsync( string collection, string text, string externalId, @@ -66,7 +66,7 @@ public Task SaveReferenceAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memory record, or null when nothing is found - public Task GetAsync(string collection, string key, bool withEmbedding = false, Kernel? kernel = null, CancellationToken cancellationToken = default); + Task GetAsync(string collection, string key, bool withEmbedding = false, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Remove a memory by key. @@ -77,7 +77,7 @@ public Task SaveReferenceAsync( /// Unique memory record identifier. /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . - public Task RemoveAsync(string collection, string key, Kernel? kernel = null, CancellationToken cancellationToken = default); + Task RemoveAsync(string collection, string key, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Find some information in memory @@ -90,7 +90,7 @@ public Task SaveReferenceAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memories found - public IAsyncEnumerable SearchAsync( + IAsyncEnumerable SearchAsync( string collection, string query, int limit = 1, @@ -105,5 +105,5 @@ public IAsyncEnumerable SearchAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// A group of collection names. - public Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default); + Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs index 30a3ee7794e5..24bc16a0f8e7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs @@ -2,6 +2,8 @@ #pragma warning disable CA1716 // Identifiers should not match keywords +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text; using Microsoft.Extensions.DependencyInjection; @@ -109,4 +111,32 @@ public static (T, PromptExecutionSettings?) SelectAIService( throw new KernelException(message.ToString()); } + + /// + /// Resolves an and associated from the specified + /// based on a and associated . + /// + /// + /// Specifies the type of the required. This must be the same type + /// with which the service was registered in the orvia + /// the . + /// + /// The to use to select a service from the . + /// The containing services, plugins, and other state for use throughout the operation. + /// The dictionary of to use to select a service from the . + /// The function arguments. + /// A tuple of the selected service and the settings associated with the service (the settings may be null). + /// An appropriate service could not be found. + [RequiresUnreferencedCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] + [RequiresDynamicCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] + public static (T, PromptExecutionSettings?) SelectAIService( + this IAIServiceSelector selector, + Kernel kernel, + IReadOnlyDictionary? executionSettings, + KernelArguments arguments) where T : class, IAIService + { + // Need to provide a KernelFunction to the service selector as a container for the execution-settings. + KernelFunction nullPrompt = new KernelFunctionNoop(executionSettings); + return selector.SelectAIService(kernel, nullPrompt, arguments); + } } diff --git a/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs similarity index 51% rename from dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs rename to dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs index a9fb8ed895ff..0b2de8042f8a 100644 --- a/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs +++ b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs @@ -1,12 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.ChatCompletion; -namespace Microsoft.SemanticKernel.Agents.History; +namespace Microsoft.SemanticKernel.ChatCompletion; /// /// Discrete operations used when reducing chat history. @@ -19,30 +17,41 @@ internal static class ChatHistoryReducerExtensions /// /// Extract a range of messages from the source history. /// - /// The source history + /// The source history /// The index of the first message to extract /// The index of the last message to extract + /// An optional system message content to include /// The optional filter to apply to each message - public static IEnumerable Extract(this IReadOnlyList history, int startIndex, int? finalIndex = null, Func? filter = null) + public static IEnumerable Extract( + this IReadOnlyList chatHistory, + int startIndex, + int? finalIndex = null, + ChatMessageContent? systemMessage = null, + Func? filter = null) { - int maxIndex = history.Count - 1; + int maxIndex = chatHistory.Count - 1; if (startIndex > maxIndex) { yield break; } + if (systemMessage is not null) + { + yield return systemMessage; + } + finalIndex ??= maxIndex; finalIndex = Math.Min(finalIndex.Value, maxIndex); for (int index = startIndex; index <= finalIndex; ++index) { - if (filter?.Invoke(history[index]) ?? false) + if (filter?.Invoke(chatHistory[index]) ?? false) { continue; } - yield return history[index]; + yield return chatHistory[index]; } } @@ -50,13 +59,13 @@ public static IEnumerable Extract(this IReadOnlyList - /// The source history + /// The source history /// The metadata key that identifies a summary message. - public static int LocateSummarizationBoundary(this IReadOnlyList history, string summaryKey) + public static int LocateSummarizationBoundary(this IReadOnlyList chatHistory, string summaryKey) { - for (int index = 0; index < history.Count; ++index) + for (int index = 0; index < chatHistory.Count; ++index) { - ChatMessageContent message = history[index]; + ChatMessageContent message = chatHistory[index]; if (!message.Metadata?.ContainsKey(summaryKey) ?? true) { @@ -64,7 +73,7 @@ public static int LocateSummarizationBoundary(this IReadOnlyList @@ -75,7 +84,7 @@ public static int LocateSummarizationBoundary(this IReadOnlyList - /// The source history + /// The source history /// The desired message count, should reduction occur. /// /// The threshold, beyond targetCount, required to trigger reduction. @@ -86,11 +95,19 @@ public static int LocateSummarizationBoundary(this IReadOnlyList + /// Indicates whether chat history contains system message. /// An index that identifies the starting point for a reduced history that does not orphan sensitive content. - public static int LocateSafeReductionIndex(this IReadOnlyList history, int targetCount, int? thresholdCount = null, int offsetCount = 0) + public static int LocateSafeReductionIndex( + this IReadOnlyList chatHistory, + int targetCount, + int? thresholdCount = null, + int offsetCount = 0, + bool hasSystemMessage = false) { + targetCount -= hasSystemMessage ? 1 : 0; + // Compute the index of the truncation threshold - int thresholdIndex = history.Count - (thresholdCount ?? 0) - targetCount; + int thresholdIndex = chatHistory.Count - (thresholdCount ?? 0) - targetCount; if (thresholdIndex <= offsetCount) { @@ -99,12 +116,12 @@ public static int LocateSafeReductionIndex(this IReadOnlyList= 0) { - if (!history[messageIndex].Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) + if (!chatHistory[messageIndex].Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) { break; } @@ -119,7 +136,7 @@ public static int LocateSafeReductionIndex(this IReadOnlyList= thresholdIndex) { // A user message provides a superb truncation point - if (history[messageIndex].Role == AuthorRole.User) + if (chatHistory[messageIndex].Role == AuthorRole.User) { return messageIndex; } @@ -130,54 +147,4 @@ public static int LocateSafeReductionIndex(this IReadOnlyList - /// Process history reduction and mutate the provided history. - /// - /// The source history - /// The target reducer - /// The to monitor for cancellation requests. The default is . - /// True if reduction has occurred. - /// - /// Using the existing for a reduction in collection size eliminates the need - /// for re-allocation (of memory). - /// - public static async Task ReduceAsync(this ChatHistory history, IChatHistoryReducer? reducer, CancellationToken cancellationToken) - { - if (reducer == null) - { - return false; - } - - IEnumerable? reducedHistory = await reducer.ReduceAsync(history, cancellationToken).ConfigureAwait(false); - - if (reducedHistory == null) - { - return false; - } - - // Mutate the history in place - ChatMessageContent[] reduced = reducedHistory.ToArray(); - history.Clear(); - history.AddRange(reduced); - - return true; - } - - /// - /// Reduce the history using the provided reducer without mutating the source history. - /// - /// The source history - /// The target reducer - /// The to monitor for cancellation requests. The default is . - public static async Task> ReduceAsync(this IReadOnlyList history, IChatHistoryReducer? reducer, CancellationToken cancellationToken) - { - if (reducer != null) - { - IEnumerable? reducedHistory = await reducer.ReduceAsync(history, cancellationToken).ConfigureAwait(false); - history = reducedHistory?.ToArray() ?? history; - } - - return history; - } } diff --git a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs similarity index 79% rename from dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs rename to dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs index a65cefbea98b..23d7e4286015 100644 --- a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs +++ b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs @@ -1,12 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.ChatCompletion; -namespace Microsoft.SemanticKernel.Agents.History; +namespace Microsoft.SemanticKernel.ChatCompletion; /// /// Reduce the chat history by summarizing message past the target message count. @@ -17,6 +18,7 @@ namespace Microsoft.SemanticKernel.Agents.History; /// is provided (recommended), reduction will scan within the threshold window in an attempt to /// avoid orphaning a user message from an assistant response. /// +[Experimental("SKEXP0001")] public class ChatHistorySummarizationReducer : IChatHistoryReducer { /// @@ -63,14 +65,41 @@ Provide a concise and complete summarization of the entire dialog that does not /// public bool UseSingleSummary { get; init; } = true; + /// + /// Initializes a new instance of the class. + /// + /// A instance to be used for summarization. + /// The desired number of target messages after reduction. + /// An optional number of messages beyond the 'targetCount' that must be present in order to trigger reduction/ + /// + /// While the 'thresholdCount' is optional, it is recommended to provided so that reduction is not triggered + /// for every incremental addition to the chat history beyond the 'targetCount'. + /// > + public ChatHistorySummarizationReducer(IChatCompletionService service, int targetCount, int? thresholdCount = null) + { + Verify.NotNull(service, nameof(service)); + Verify.True(targetCount > 0, "Target message count must be greater than zero."); + Verify.True(!thresholdCount.HasValue || thresholdCount > 0, "The reduction threshold length must be greater than zero."); + + this._service = service; + this._targetCount = targetCount; + this._thresholdCount = thresholdCount ?? 0; + } + /// - public async Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default) + public async Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default) { + var systemMessage = chatHistory.FirstOrDefault(l => l.Role == AuthorRole.System); + // Identify where summary messages end and regular history begins - int insertionPoint = history.LocateSummarizationBoundary(SummaryMetadataKey); + int insertionPoint = chatHistory.LocateSummarizationBoundary(SummaryMetadataKey); // First pass to determine the truncation index - int truncationIndex = history.LocateSafeReductionIndex(this._targetCount, this._thresholdCount, insertionPoint); + int truncationIndex = chatHistory.LocateSafeReductionIndex( + this._targetCount, + this._thresholdCount, + insertionPoint, + hasSystemMessage: systemMessage is not null); IEnumerable? truncatedHistory = null; @@ -78,20 +107,20 @@ Provide a concise and complete summarization of the entire dialog that does not { // Second pass to extract history for summarization IEnumerable summarizedHistory = - history.Extract( + chatHistory.Extract( this.UseSingleSummary ? 0 : insertionPoint, truncationIndex, - (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); + filter: (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); try { // Summarize ChatHistory summarizationRequest = [.. summarizedHistory, new ChatMessageContent(AuthorRole.System, this.SummarizationInstructions)]; - ChatMessageContent summary = await this._service.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false); - summary.Metadata = new Dictionary { { SummaryMetadataKey, true } }; + ChatMessageContent summaryMessage = await this._service.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false); + summaryMessage.Metadata = new Dictionary { { SummaryMetadataKey, true } }; // Assembly the summarized history - truncatedHistory = AssemblySummarizedHistory(summary); + truncatedHistory = AssemblySummarizedHistory(summaryMessage, systemMessage); } catch { @@ -105,49 +134,33 @@ Provide a concise and complete summarization of the entire dialog that does not return truncatedHistory; // Inner function to assemble the summarized history - IEnumerable AssemblySummarizedHistory(ChatMessageContent? summary) + IEnumerable AssemblySummarizedHistory(ChatMessageContent? summaryMessage, ChatMessageContent? systemMessage) { + if (systemMessage is not null) + { + yield return systemMessage; + } + if (insertionPoint > 0 && !this.UseSingleSummary) { for (int index = 0; index <= insertionPoint - 1; ++index) { - yield return history[index]; + yield return chatHistory[index]; } } - if (summary != null) + if (summaryMessage is not null) { - yield return summary; + yield return summaryMessage; } - for (int index = truncationIndex; index < history.Count; ++index) + for (int index = truncationIndex; index < chatHistory.Count; ++index) { - yield return history[index]; + yield return chatHistory[index]; } } } - /// - /// Initializes a new instance of the class. - /// - /// A instance to be used for summarization. - /// The desired number of target messages after reduction. - /// An optional number of messages beyond the 'targetCount' that must be present in order to trigger reduction/ - /// - /// While the 'thresholdCount' is optional, it is recommended to provided so that reduction is not triggered - /// for every incremental addition to the chat history beyond the 'targetCount'. - /// > - public ChatHistorySummarizationReducer(IChatCompletionService service, int targetCount, int? thresholdCount = null) - { - Verify.NotNull(service, nameof(service)); - Verify.True(targetCount > 0, "Target message count must be greater than zero."); - Verify.True(!thresholdCount.HasValue || thresholdCount > 0, "The reduction threshold length must be greater than zero."); - - this._service = service; - this._targetCount = targetCount; - this._thresholdCount = thresholdCount ?? 0; - } - /// public override bool Equals(object? obj) { diff --git a/dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs similarity index 82% rename from dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs rename to dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs index be9ca7868f87..de9ea8037b32 100644 --- a/dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs +++ b/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs @@ -1,10 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; using System.Threading; using System.Threading.Tasks; -namespace Microsoft.SemanticKernel.Agents.History; +namespace Microsoft.SemanticKernel.ChatCompletion; /// /// Truncate the chat history to the target message count. @@ -15,25 +18,9 @@ namespace Microsoft.SemanticKernel.Agents.History; /// is provided (recommended), reduction will scan within the threshold window in an attempt to /// avoid orphaning a user message from an assistant response. /// +[Experimental("SKEXP0001")] public class ChatHistoryTruncationReducer : IChatHistoryReducer { - /// - public Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default) - { - // First pass to determine the truncation index - int truncationIndex = history.LocateSafeReductionIndex(this._targetCount, this._thresholdCount); - - IEnumerable? truncatedHistory = null; - - if (truncationIndex > 0) - { - // Second pass to truncate the history - truncatedHistory = history.Extract(truncationIndex); - } - - return Task.FromResult(truncatedHistory); - } - /// /// Initializes a new instance of the class. /// @@ -53,6 +40,25 @@ public ChatHistoryTruncationReducer(int targetCount, int? thresholdCount = null) this._thresholdCount = thresholdCount ?? 0; } + /// + public Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default) + { + var systemMessage = chatHistory.FirstOrDefault(l => l.Role == AuthorRole.System); + + // First pass to determine the truncation index + int truncationIndex = chatHistory.LocateSafeReductionIndex(this._targetCount, this._thresholdCount, hasSystemMessage: systemMessage is not null); + + IEnumerable? truncatedHistory = null; + + if (truncationIndex > 0) + { + // Second pass to truncate the history + truncatedHistory = chatHistory.Extract(truncationIndex, systemMessage: systemMessage); + } + + return Task.FromResult(truncatedHistory); + } + /// public override bool Equals(object? obj) { diff --git a/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs b/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs new file mode 100644 index 000000000000..f0d8b29ae280 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for interacting with . +/// +public static class BinaryContentExtensions +{ + /// + /// Writes the content to a file. + /// + /// The content to write. + /// The path to the file to write to. + /// Whether to overwrite the file if it already exists. + public static void WriteToFile(this BinaryContent content, string filePath, bool overwrite = false) + { + if (string.IsNullOrWhiteSpace(filePath)) + { + throw new ArgumentException("File path cannot be null or empty", nameof(filePath)); + } + + if (!overwrite && File.Exists(filePath)) + { + throw new InvalidOperationException("File already exists."); + } + + if (!content.CanRead) + { + throw new InvalidOperationException("No content to write to file."); + } + + File.WriteAllBytes(filePath, content.Data!.Value.ToArray()); + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs deleted file mode 100644 index 39b00dec9149..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Data; - -namespace Microsoft.SemanticKernel; - -/// -/// Extension methods to register Data services on the . -/// -[Experimental("SKEXP0001")] -public static class KernelBuilderExtensions -{ - /// - /// Register a Volatile with the specified service ID. - /// - /// The builder to register the on. - /// An optional service id to use as the service key. - /// The kernel builder. - [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] - public static IKernelBuilder AddVolatileVectorStore(this IKernelBuilder builder, string? serviceId = default) - { - builder.Services.AddVolatileVectorStore(serviceId); - return builder; - } - - /// - /// Register a instance with the specified service ID. - /// - /// The to register the on. - /// The name of the collection. - /// instance that can map a TRecord to a - /// instance that can map a TRecord to a - /// Options used to construct an instance of - /// An optional service id to use as the service key. - [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] - public static IKernelBuilder AddVolatileVectorStoreTextSearch( - this IKernelBuilder builder, - string collectionName, - ITextSearchStringMapper? stringMapper = null, - ITextSearchResultMapper? resultMapper = null, - VectorStoreTextSearchOptions? options = null, - string? serviceId = default) - where TKey : notnull - { - builder.Services.AddVolatileVectorStoreTextSearch(collectionName, stringMapper, resultMapper, options, serviceId); - return builder; - } - - /// - /// Register a instance with the specified service ID. - /// - /// The to register the on. - /// The name of the collection. - /// delegate that can map a TRecord to a - /// delegate that can map a TRecord to a - /// Options used to construct an instance of - /// An optional service id to use as the service key. - [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] - public static IKernelBuilder AddVolatileVectorStoreTextSearch( - this IKernelBuilder builder, - string collectionName, - MapFromResultToString? stringMapper = null, - MapFromResultToTextSearchResult? resultMapper = null, - VectorStoreTextSearchOptions? options = null, - string? serviceId = default) - where TKey : notnull - { - builder.AddVolatileVectorStoreTextSearch( - collectionName, - stringMapper is not null ? new TextSearchStringMapper(stringMapper) : null, - resultMapper is not null ? new TextSearchResultMapper(resultMapper) : null, - options, - serviceId); - return builder; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs deleted file mode 100644 index d9d465141d5a..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Data; -using Microsoft.SemanticKernel.Embeddings; - -namespace Microsoft.SemanticKernel; - -/// -/// Extension methods to register Data services on an . -/// -[Experimental("SKEXP0001")] -public static class ServiceCollectionExtensions -{ - /// - /// Register a Volatile with the specified service ID. - /// - /// The to register the on. - /// An optional service id to use as the service key. - /// The service collection. - [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] - public static IServiceCollection AddVolatileVectorStore(this IServiceCollection services, string? serviceId = default) - { - services.AddKeyedSingleton(serviceId); - services.AddKeyedSingleton(serviceId, (sp, obj) => sp.GetRequiredKeyedService(serviceId)); - return services; - } - - /// - /// Register a instance with the specified service ID. - /// - /// The to register the on. - /// The name of the collection. - /// instance that can map a TRecord to a - /// instance that can map a TRecord to a - /// Options used to construct an instance of - /// An optional service id to use as the service key. - [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] - [UnconditionalSuppressMessage("Trimming", "IL2091:Target generic argument does not satisfy 'DynamicallyAccessedMembersAttribute' in target method or type. The generic parameter of the source method or type does not have matching annotations.", Justification = "This method is obsolete")] - [UnconditionalSuppressMessage("Trimming", "IL2095:'DynamicallyAccessedMemberTypes' on the generic parameter of method or type don't match overridden generic parameter method or type. All overridden members must have the same 'DynamicallyAccessedMembersAttribute' usage.", Justification = "This method is obsolete")] - - public static IServiceCollection AddVolatileVectorStoreTextSearch( - this IServiceCollection services, - string collectionName, - ITextSearchStringMapper? stringMapper = null, - ITextSearchResultMapper? resultMapper = null, - VectorStoreTextSearchOptions? options = null, - string? serviceId = default) - where TKey : notnull - { - // If we are not constructing the dependent services, add the VectorStoreTextSearch as transient, since we - // cannot make assumptions about how dependent services are being managed. - services.AddKeyedTransient>( - serviceId, - (sp, obj) => - { - var vectorStore = sp.GetRequiredService(); - var vectorSearch = vectorStore.GetCollection(collectionName); - var generationService = sp.GetRequiredService(); - stringMapper ??= sp.GetRequiredService(); - resultMapper ??= sp.GetRequiredService(); - options ??= sp.GetService(); - return new VectorStoreTextSearch(vectorSearch, generationService, stringMapper, resultMapper, options); - }); - - return services; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs deleted file mode 100644 index 81b5fb4ef6b5..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Service for storing and retrieving vector records, and managing vector record collections, that uses an in memory dictionary as the underlying storage. -/// -[Obsolete("This has been replaced by InMemoryVectorStore in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -public sealed class VolatileVectorStore : IVectorStore -{ - /// Internal storage for the record collection. - private readonly ConcurrentDictionary> _internalCollection; - - /// The data type of each collection, to enforce a single type per collection. - private readonly ConcurrentDictionary _internalCollectionTypes = new(); - - /// - /// Initializes a new instance of the class. - /// - public VolatileVectorStore() - { - this._internalCollection = new(); - } - - /// - /// Initializes a new instance of the class. - /// - /// Allows passing in the dictionary used for storage, for testing purposes. - internal VolatileVectorStore(ConcurrentDictionary> internalCollection) - { - this._internalCollection = internalCollection; - } - - /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull - { - if (this._internalCollectionTypes.TryGetValue(name, out var existingCollectionDataType) && existingCollectionDataType != typeof(TRecord)) - { - throw new InvalidOperationException($"Collection '{name}' already exists and with data type '{existingCollectionDataType.Name}' so cannot be re-created with data type '{typeof(TRecord).Name}'."); - } - - var collection = new VolatileVectorStoreRecordCollection( - this._internalCollection, - this._internalCollectionTypes, - name, - new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; - return collection!; - } - - /// - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) - { - return this._internalCollection.Keys.ToAsyncEnumerable(); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs deleted file mode 100644 index c80914949f7d..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Numerics.Tensors; -using System.Reflection; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Contains mapping helpers to use when searching for documents using the Volatile store. -/// -[Obsolete("This has been replaced by InMemoryVectorStoreCollectionSearchMapping in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -internal static class VolatileVectorStoreCollectionSearchMapping -{ - /// - /// Compare the two vectors using the specified distance function. - /// - /// The first vector to compare. - /// The second vector to compare. - /// The distance function to use for comparison. - /// The score of the comparison. - /// Thrown when the distance function is not supported. - public static float CompareVectors(ReadOnlySpan x, ReadOnlySpan y, string? distanceFunction) - { - switch (distanceFunction) - { - case null: - case DistanceFunction.CosineSimilarity: - case DistanceFunction.CosineDistance: - return TensorPrimitives.CosineSimilarity(x, y); - case DistanceFunction.DotProductSimilarity: - return TensorPrimitives.Dot(x, y); - case DistanceFunction.EuclideanDistance: - return TensorPrimitives.Distance(x, y); - default: - throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); - } - } - - /// - /// Indicates whether result ordering should be descending or ascending, to get most similar results at the top, based on the distance function. - /// - /// The distance function to use for comparison. - /// Whether to order descending or ascending. - /// Thrown when the distance function is not supported. - public static bool ShouldSortDescending(string? distanceFunction) - { - switch (distanceFunction) - { - case null: - case DistanceFunction.CosineSimilarity: - case DistanceFunction.DotProductSimilarity: - return true; - case DistanceFunction.CosineDistance: - case DistanceFunction.EuclideanDistance: - return false; - default: - throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); - } - } - - /// - /// Converts the provided score into the correct result depending on the distance function. - /// The main purpose here is to convert from cosine similarity to cosine distance if cosine distance is requested, - /// since the two are inversely related and the only supports cosine similarity so - /// we are using cosine similarity for both similarity and distance. - /// - /// The score to convert. - /// The distance function to use for comparison. - /// Whether to order descending or ascending. - /// Thrown when the distance function is not supported. - public static float ConvertScore(float score, string? distanceFunction) - { - switch (distanceFunction) - { - case DistanceFunction.CosineDistance: - return 1 - score; - case null: - case DistanceFunction.CosineSimilarity: - case DistanceFunction.DotProductSimilarity: - case DistanceFunction.EuclideanDistance: - return score; - default: - throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); - } - } - - /// - /// Filter the provided records using the provided filter definition. - /// - /// The filter definition to filter the with. - /// The records to filter. - /// The filtered records. - /// Thrown when an unsupported filter clause is encountered. - public static IEnumerable FilterRecords(VectorSearchFilter? filter, IEnumerable records) - { - if (filter == null) - { - return records; - } - - return records.Where(record => - { - var result = true; - - // Run each filter clause against the record, and AND the results together. - // Break if any clause returns false, since we are doing an AND and no need - // to check any further clauses. - foreach (var clause in filter.FilterClauses) - { - if (clause is EqualToFilterClause equalToFilter) - { - result = result && CheckEqualTo(record, equalToFilter); - - if (result == false) - { - break; - } - } - else if (clause is AnyTagEqualToFilterClause anyTagEqualToFilter) - { - result = result && CheckAnyTagEqualTo(record, anyTagEqualToFilter); - - if (result == false) - { - break; - } - } - else - { - throw new InvalidOperationException($"Unsupported filter clause type {clause.GetType().Name}"); - } - } - - return result; - }); - } - - /// - /// Check if the required property on the record is equal to the required value form the filter. - /// - /// The record to check against the filter. - /// The filter containing the property and value to check. - /// if the property equals the required value, otherwise. - private static bool CheckEqualTo(object record, EqualToFilterClause equalToFilter) - { - var propertyInfo = GetPropertyInfo(record, equalToFilter.FieldName); - var propertyValue = propertyInfo.GetValue(record); - if (propertyValue == null) - { - return propertyValue == equalToFilter.Value; - } - - return propertyValue.Equals(equalToFilter.Value); - } - - /// - /// Check if the required tag list on the record is equal to the required value form the filter. - /// - /// The record to check against the filter. - /// The filter containing the property and value to check. - /// if the tag list contains the required value, otherwise. - /// - private static bool CheckAnyTagEqualTo(object record, AnyTagEqualToFilterClause anyTagEqualToFilter) - { - var propertyInfo = GetPropertyInfo(record, anyTagEqualToFilter.FieldName); - - // Check that the property is actually a list of values. - if (!typeof(IEnumerable).IsAssignableFrom(propertyInfo.PropertyType)) - { - throw new InvalidOperationException($"Property {anyTagEqualToFilter.FieldName} is not a list property on record type {record.GetType().Name}"); - } - - // Check that the tag list contains any values. If not, return false, since the required value cannot be in an empty list. - var propertyValue = propertyInfo.GetValue(record) as IEnumerable; - if (propertyValue == null) - { - return false; - } - - // Check each value in the tag list against the required value. - foreach (var value in propertyValue) - { - if (value == null && anyTagEqualToFilter.Value == null) - { - return true; - } - - if (value != null && value.Equals(anyTagEqualToFilter.Value)) - { - return true; - } - } - - return false; - } - - /// - /// Get the property info for the provided property name on the record. - /// - /// The record to find the property on. - /// The name of the property to find. - /// The property info for the required property. - /// Thrown if the required property does not exist on the record. - [UnconditionalSuppressMessage("Analysis", "IL2075:Suppress IL2075 warning", Justification = "This class is obsolete")] - private static PropertyInfo GetPropertyInfo(object record, string propertyName) - { - var propertyInfo = record.GetType().GetProperty(propertyName); - if (propertyInfo == null) - { - throw new InvalidOperationException($"Property {propertyName} not found on record type {record.GetType().Name}"); - } - - return propertyInfo; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs deleted file mode 100644 index 2b1e1f9c9d0f..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.IO; -using System.Linq; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Extension methods for which allow: -/// 1. Serializing an instance of to a stream. -/// 2. Deserializing an instance of from a stream. -/// -[Obsolete("This has been replaced by InMemoryVectorStoreExtensions in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -public static class VolatileVectorStoreExtensions -{ - /// - /// Serialize a to a stream as JSON. - /// - /// Type of the record key. - /// Type of the record. - /// Instance of used to retrieve the collection. - /// The collection name. - /// The stream to write the serialized JSON to. - /// The JSON serializer options to use. - [RequiresUnreferencedCode("Uses reflection for collection serialization, making it incompatible with AOT scenarios.")] - [RequiresDynamicCode("Uses reflection for collection serialization, making it incompatible with AOT scenarios.")] - public static async Task SerializeCollectionAsJsonAsync( - this VolatileVectorStore vectorStore, - string collectionName, - Stream stream, - JsonSerializerOptions? jsonSerializerOptions = null) - where TKey : notnull - { - // Get collection and verify that it exists. - var collection = vectorStore.GetCollection(collectionName); - var exists = await collection.CollectionExistsAsync().ConfigureAwait(false); - if (!exists) - { - throw new InvalidOperationException($"Collection '{collectionName}' does not exist."); - } - - var volatileCollection = collection as VolatileVectorStoreRecordCollection; - var records = volatileCollection!.GetCollectionDictionary(); - VolatileRecordCollection recordCollection = new(collectionName, records); - - await JsonSerializer.SerializeAsync(stream, recordCollection, jsonSerializerOptions).ConfigureAwait(false); - } - - /// - /// Deserialize a to a stream as JSON. - /// - /// Type of the record key. - /// Type of the record. - /// Instance of used to retrieve the collection. - /// The stream to read the serialized JSON from. - [RequiresUnreferencedCode("Uses reflection for collection deserialization, making it incompatible with AOT scenarios.")] - [RequiresDynamicCode("Uses reflection for collection deserialization, making it incompatible with AOT scenarios.")] - public static async Task?> DeserializeCollectionFromJsonAsync( - this VolatileVectorStore vectorStore, - Stream stream) - where TKey : notnull - { - IVectorStoreRecordCollection? collection = null; - - using (StreamReader streamReader = new(stream)) - { - string result = streamReader.ReadToEnd(); - var recordCollection = JsonSerializer.Deserialize>(result); - if (recordCollection is null) - { - throw new InvalidOperationException("Stream does not contain valid record collection JSON."); - } - - // Get and create collection if it doesn't exist. - collection = vectorStore.GetCollection(recordCollection.Name); - await collection.CreateCollectionIfNotExistsAsync().ConfigureAwait(false); - - // Upsert records. - var tasks = recordCollection.Records.Values.Select(record => Task.Run(async () => - { - await collection.UpsertAsync(record).ConfigureAwait(false); - })); - await Task.WhenAll(tasks).ConfigureAwait(false); - } - - return collection; - } - - #region private - /// Model class used when storing a . - private sealed class VolatileRecordCollection(string name, IDictionary records) - where TKey : notnull - { - public string Name { get; init; } = name; - public IDictionary Records { get; init; } = records; - } - #endregion - -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs deleted file mode 100644 index 407909491d38..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Delegate that describes a function that given a record, finds the record key and returns it. -/// -/// The record to look up the key for. -/// The record key. -[Obsolete("This has been replaced by InMemoryVectorStoreKeyResolver in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -public delegate TKey? VolatileVectorStoreKeyResolver(TRecord record) - where TKey : notnull; diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs deleted file mode 100644 index b6e5454dc4d6..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ /dev/null @@ -1,380 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Service for storing and retrieving vector records, that uses an in memory dictionary as the underlying storage. -/// -/// The data type of the record key. -/// The data model to use for adding, updating and retrieving data from storage. -[Obsolete("This has been replaced by InMemoryVectorStoreRecordCollection in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -#pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class VolatileVectorStoreRecordCollection : IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix - where TKey : notnull -{ - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - ]; - - /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); - - /// Internal storage for all of the record collections. - private readonly ConcurrentDictionary> _internalCollections; - - /// The data type of each collection, to enforce a single type per collection. - private readonly ConcurrentDictionary _internalCollectionTypes; - - /// Optional configuration options for this class. - private readonly VolatileVectorStoreRecordCollectionOptions _options; - - /// The name of the collection that this will access. - private readonly string _collectionName; - - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// A dictionary of vector properties on the provided model, keyed by the property name. - private readonly Dictionary _vectorProperties; - - /// An function to look up vectors from the records. - private readonly VolatileVectorStoreVectorResolver _vectorResolver; - - /// An function to look up keys from the records. - private readonly VolatileVectorStoreKeyResolver _keyResolver; - - /// - /// Initializes a new instance of the class. - /// - /// The name of the collection that this will access. - /// Optional configuration options for this class. - [UnconditionalSuppressMessage("Trimming", "IL2087:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - public VolatileVectorStoreRecordCollection(string collectionName, VolatileVectorStoreRecordCollectionOptions? options = default) - { - // Verify. - Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); - - // Assign. - this._collectionName = collectionName; - this._internalCollections = new(); - this._internalCollectionTypes = new(); - this._options = options ?? new VolatileVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); - - // Validate property types. - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); - this._vectorProperties = this._propertyReader.VectorProperties.ToDictionary(x => x.DataModelPropertyName); - - // Assign resolvers. - this._vectorResolver = CreateVectorResolver(this._options.VectorResolver, this._vectorProperties); - this._keyResolver = CreateKeyResolver(this._options.KeyResolver, this._propertyReader.KeyProperty); - } - - /// - /// Initializes a new instance of the class. - /// - /// Internal storage for the record collection. - /// The data type of each collection, to enforce a single type per collection. - /// The name of the collection that this will access. - /// Optional configuration options for this class. - internal VolatileVectorStoreRecordCollection( - ConcurrentDictionary> internalCollection, - ConcurrentDictionary internalCollectionTypes, - string collectionName, - VolatileVectorStoreRecordCollectionOptions? options = default) - : this(collectionName, options) - { - this._internalCollections = internalCollection; - this._internalCollectionTypes = internalCollectionTypes; - } - - /// - public string CollectionName => this._collectionName; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return this._internalCollections.ContainsKey(this._collectionName) ? Task.FromResult(true) : Task.FromResult(false); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - if (!this._internalCollections.ContainsKey(this._collectionName)) - { - this._internalCollections.TryAdd(this._collectionName, new ConcurrentDictionary()); - this._internalCollectionTypes.TryAdd(this._collectionName, typeof(TRecord)); - } - - return Task.CompletedTask; - } - - /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) - { - await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); - } - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - this._internalCollections.TryRemove(this._collectionName, out _); - return Task.CompletedTask; - } - - /// - public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - if (collectionDictionary.TryGetValue(key, out var record)) - { - return Task.FromResult((TRecord?)record); - } - - return Task.FromResult(default); - } - - /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var key in keys) - { - var record = await this.GetAsync(key, options, cancellationToken).ConfigureAwait(false); - - if (record is not null) - { - yield return record; - } - } - } - - /// - public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - collectionDictionary.TryRemove(key, out _); - return Task.CompletedTask; - } - - /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - foreach (var key in keys) - { - collectionDictionary.TryRemove(key, out _); - } - - return Task.CompletedTask; - } - - /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - Verify.NotNull(record); - - var collectionDictionary = this.GetCollectionDictionary(); - - var key = (TKey)this._keyResolver(record)!; - collectionDictionary.AddOrUpdate(key!, record, (key, currentValue) => record); - - return Task.FromResult(key!); - } - - /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var record in records) - { - yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - } - } - - /// -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) -#pragma warning restore CS1998 - { - Verify.NotNull(vector); - - if (this._propertyReader.FirstVectorPropertyName is null) - { - throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); - } - - if (vector is not ReadOnlyMemory floatVector) - { - throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Volatile Vector Store."); - } - - // Resolve options and get requested vector property or first as default. - var internalOptions = options ?? s_defaultVectorSearchOptions; - - var vectorPropertyName = string.IsNullOrWhiteSpace(internalOptions.VectorPropertyName) ? this._propertyReader.FirstVectorPropertyName : internalOptions.VectorPropertyName; - if (!this._vectorProperties.TryGetValue(vectorPropertyName!, out var vectorProperty)) - { - throw new InvalidOperationException($"The collection does not have a vector field named '{internalOptions.VectorPropertyName}', so vector search is not possible."); - } - - // Filter records using the provided filter before doing the vector comparison. - if (internalOptions.Filter is not null) - { - throw new NotSupportedException("LINQ-based filtering is not supported with VolatileVectorStore, use Microsoft.SemanticKernel.Connectors.InMemory instead"); - } - - var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.OldFilter, this.GetCollectionDictionary().Values); - - // Compare each vector in the filtered results with the provided vector. - var results = filteredRecords.Select((record) => - { - var vectorObject = this._vectorResolver(vectorPropertyName!, (TRecord)record); - if (vectorObject is not ReadOnlyMemory dbVector) - { - return null; - } - - var score = VolatileVectorStoreCollectionSearchMapping.CompareVectors(floatVector.Span, dbVector.Span, vectorProperty.DistanceFunction); - var convertedscore = VolatileVectorStoreCollectionSearchMapping.ConvertScore(score, vectorProperty.DistanceFunction); - return (record, convertedscore); - }); - - // Get the non-null results since any record with a null vector results in a null result. - var nonNullResults = results.Where(x => x.HasValue).Select(x => x!.Value); - - // Calculate the total results count if requested. - long? count = null; - if (internalOptions.IncludeTotalCount) - { - count = nonNullResults.Count(); - } - - // Sort the results appropriately for the selected distance function and get the right page of results . - var sortedScoredResults = VolatileVectorStoreCollectionSearchMapping.ShouldSortDescending(vectorProperty.DistanceFunction) ? - nonNullResults.OrderByDescending(x => x.score) : - nonNullResults.OrderBy(x => x.score); - var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(internalOptions.Top); - - // Build the response. - var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); - return new VectorSearchResults(vectorSearchResultList) { TotalCount = count }; - } - - /// - /// Get the collection dictionary from the internal storage, throws if it does not exist. - /// - /// The retrieved collection dictionary. - internal ConcurrentDictionary GetCollectionDictionary() - { - if (!this._internalCollections.TryGetValue(this._collectionName, out var collectionDictionary)) - { - throw new VectorStoreOperationException($"Call to vector store failed. Collection '{this._collectionName}' does not exist."); - } - - return collectionDictionary; - } - - /// - /// Pick / create a vector resolver that will read a vector from a record in the store based on the vector name. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that looks up the vector in its dictionary. - /// 3. Otherwise, create a resolver that assumes the vector is a property directly on the record and use the record definition to determine the name. - /// - /// The override vector resolver if one was provided. - /// A dictionary of vector properties from the record definition. - /// The . - [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - private static VolatileVectorStoreVectorResolver CreateVectorResolver(VolatileVectorStoreVectorResolver? overrideVectorResolver, Dictionary vectorProperties) - { - // Custom resolver. - if (overrideVectorResolver is not null) - { - return overrideVectorResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (vectorName, record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - var vectorsDictionary = genericDataModelRecord!.Vectors; - if (vectorsDictionary != null && vectorsDictionary.TryGetValue(vectorName, out var vector)) - { - return vector; - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - // Default resolver. - var vectorPropertiesInfo = vectorProperties.Values - .Select(x => x.DataModelPropertyName) - .Select(x => typeof(TRecord).GetProperty(x) ?? throw new ArgumentException($"Vector property '{x}' was not found on {typeof(TRecord).Name}")) - .ToDictionary(x => x.Name); - - return (vectorName, record) => - { - if (vectorPropertiesInfo.TryGetValue(vectorName, out var vectorPropertyInfo)) - { - return vectorPropertyInfo.GetValue(record); - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - /// - /// Pick / create a key resolver that will read a key from a record in the store. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that reads the Key property from it. - /// 3. Otherwise, create a resolver that assumes the key is a property directly on the record and use the record definition to determine the name. - /// - /// The override key resolver if one was provided. - /// They key property from the record definition. - /// The . - [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - private static VolatileVectorStoreKeyResolver CreateKeyResolver(VolatileVectorStoreKeyResolver? overrideKeyResolver, VectorStoreRecordKeyProperty keyProperty) - { - // Custom resolver. - if (overrideKeyResolver is not null) - { - return overrideKeyResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - return genericDataModelRecord!.Key; - }; - } - - // Default resolver. - var keyPropertyInfo = typeof(TRecord).GetProperty(keyProperty.DataModelPropertyName) ?? throw new ArgumentException($"Key property {keyProperty.DataModelPropertyName} not found on {typeof(TRecord).Name}"); - return (record) => (TKey)keyPropertyInfo.GetValue(record)!; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs deleted file mode 100644 index 69f4f8c5fe2e..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Options when creating a . -/// -/// The data type of the record key of the collection that this options will be used with. -/// The data model to use for adding, updating and retrieving data on the collection that this options will be used with. -[Obsolete("This has been replaced by InMemoryVectorStoreRecordCollectionOptions in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -public sealed class VolatileVectorStoreRecordCollectionOptions - where TKey : notnull -{ - /// - /// Gets or sets an optional record definition that defines the schema of the record type. - /// - /// - /// If not provided, the schema will be inferred from the record model class using reflection. - /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. - /// See , and . - /// - public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; - - /// - /// An optional function that can be used to look up vectors from a record. - /// - /// - /// If not provided, the default behavior is to look for direct properties of the record - /// using reflection. This delegate can be used to provide a custom implementation if - /// the vector properties are located somewhere else on the record. - /// - public VolatileVectorStoreVectorResolver? VectorResolver { get; init; } = null; - - /// - /// An optional function that can be used to look up record keys. - /// - /// - /// If not provided, the default behavior is to look for a direct property of the record - /// using reflection. This delegate can be used to provide a custom implementation if - /// the key property is located somewhere else on the record. - /// - public VolatileVectorStoreKeyResolver? KeyResolver { get; init; } = null; -} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs deleted file mode 100644 index b360b3aa6017..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Delegate that describes a function that given a vector name and a record, finds the vector in the record and returns it. -/// -/// The name of the vector to find. -/// The record that contains the vector to look up. -/// The named vector from the record. -[Obsolete("This has been replaced by InMemoryVectorStoreVectorResolver in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -public delegate object? VolatileVectorStoreVectorResolver(string vectorName, TRecord record); diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs index 367e5e7a2553..babf4dedb8fc 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs @@ -500,7 +500,7 @@ private async Task RenderPromptAsync( Verify.NotNull(aiService); - var renderingContext = await kernel.OnPromptRenderAsync(this, arguments, isStreaming, async (context) => + var renderingContext = await kernel.OnPromptRenderAsync(this, arguments, isStreaming, executionSettings, async (context) => { renderedPrompt = await this._promptTemplate.RenderAsync(kernel, context.Arguments, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs index d1dcea92bf50..2a0581290eae 100644 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs @@ -17,5 +17,5 @@ internal interface ICodeRendering /// The arguments /// The to monitor for cancellation requests. The default is . /// Rendered content - public ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); + ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs index 87044226e4d1..48d63bed9971 100644 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs @@ -11,5 +11,5 @@ internal interface ITextRendering /// /// Optional arguments the block rendering /// Rendered content - public object? Render(KernelArguments? arguments); + object? Render(KernelArguments? arguments); } diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs similarity index 87% rename from dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs rename to dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs index 45dcc47e5cab..9bdf469b69bc 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs @@ -1,15 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; using Moq; using Xunit; -namespace SemanticKernel.Agents.UnitTests.Core.History; +namespace SemanticKernel.UnitTests.AI.ChatCompletion; /// /// Unit testing of . @@ -31,7 +31,7 @@ public class ChatHistoryReducerExtensionsTests public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? endIndex = null, int? expectedCount = null) { // Arrange - ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + ChatHistory history = [.. MockChatHistoryGenerator.CreateSimpleHistory(messageCount)]; // Act ChatMessageContent[] extractedHistory = history.Extract(startIndex, endIndex).ToArray(); @@ -62,14 +62,14 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount) { // Arrange - ChatHistory summaries = [.. MockHistoryGenerator.CreateSimpleHistory(summaryCount)]; + ChatHistory summaries = [.. MockChatHistoryGenerator.CreateSimpleHistory(summaryCount)]; foreach (ChatMessageContent summary in summaries) { summary.Metadata = new Dictionary() { { "summary", true } }; } // Act - ChatHistory history = [.. summaries, .. MockHistoryGenerator.CreateSimpleHistory(regularCount)]; + ChatHistory history = [.. summaries, .. MockChatHistoryGenerator.CreateSimpleHistory(regularCount)]; int finalSummaryIndex = history.LocateSummarizationBoundary("summary"); @@ -89,14 +89,14 @@ public async Task VerifyChatHistoryNotReducedAsync() mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null); // Act - bool isReduced = await history.ReduceAsync(null, default); + bool isReduced = await history.ReduceInPlaceAsync(null, default); // Assert Assert.False(isReduced); Assert.Empty(history); // Act - isReduced = await history.ReduceAsync(mockReducer.Object, default); + isReduced = await history.ReduceInPlaceAsync(mockReducer.Object, default); // Assert Assert.False(isReduced); @@ -113,10 +113,10 @@ public async Task VerifyChatHistoryReducedAsync() Mock mockReducer = new(); mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)[]); - ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(10)]; + ChatHistory history = [.. MockChatHistoryGenerator.CreateSimpleHistory(10)]; // Act - bool isReduced = await history.ReduceAsync(mockReducer.Object, default); + bool isReduced = await history.ReduceInPlaceAsync(mockReducer.Object, default); // Assert Assert.True(isReduced); @@ -139,7 +139,7 @@ public async Task VerifyChatHistoryReducedAsync() public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount, int? thresholdCount = null) { // Arrange: Shape of history doesn't matter since reduction is not expected - ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); @@ -163,7 +163,7 @@ public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCount, int? thresholdCount = null) { // Arrange: Generate history with only assistant messages - ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateSimpleHistory(messageCount)]; // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); @@ -189,7 +189,7 @@ public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCoun public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int targetCount, int? thresholdCount = null) { // Arrange: Generate history with alternating user and assistant messages - ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); @@ -224,7 +224,7 @@ public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, i { // Arrange: Generate a history with function call on index 5 and 9 and // function result on index 6 and 10 (total length: 14) - ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithFunctionContent()]; + ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithFunctionContent()]; ChatHistoryTruncationReducer reducer = new(targetCount, thresholdCount); diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs similarity index 85% rename from dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs rename to dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs index 53e93d0026c3..0588055efc19 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs @@ -1,15 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; using Moq; using Xunit; -namespace SemanticKernel.Agents.UnitTests.Core.History; +namespace SemanticKernel.UnitTests.AI.ChatCompletion; /// /// Unit testing of . @@ -133,7 +133,7 @@ public async Task VerifyChatHistoryReductionSilentFailureAsync() { // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10) { FailOnError = false }; // Act @@ -151,7 +151,7 @@ public async Task VerifyChatHistoryReductionThrowsOnFailureAsync() { // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); // Act and Assert @@ -166,7 +166,7 @@ public async Task VerifyChatHistoryNotReducedAsync() { // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 20); // Act @@ -184,7 +184,7 @@ public async Task VerifyChatHistoryReducedAsync() { // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); // Act @@ -203,7 +203,7 @@ public async Task VerifyChatHistoryRereducedAsync() { // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); // Act @@ -224,6 +224,27 @@ public async Task VerifyChatHistoryRereducedAsync() VerifySummarization(messages[1]); } + /// + /// Validate history reduced and system message preserved when source history exceeds target threshold. + /// + [Fact] + public async Task VerifySystemMessageIsNotReducedAsync() + { + // Arrange + Mock mockCompletionService = this.CreateMockCompletionService(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20, includeSystemMessage: true).ToArray(); + ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act + IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + + // Assert + ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11); + VerifySummarization(messages[1]); + + Assert.Contains(messages, m => m.Role == AuthorRole.System); + } + private static ChatMessageContent[] VerifyReducedHistory(IEnumerable? reducedHistory, int expectedCount) { Assert.NotNull(reducedHistory); diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs index 723349450e99..20cc5b1269fd 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs @@ -43,4 +43,35 @@ public void ItCanBeSerializedAndDeserialized() chatHistoryDeserialized[i].Items.OfType().Single().Text); } } + + [Theory] + [InlineData("system")] + [InlineData("developer")] + public void CtorWorksForSystemAndDeveloper(string providedRole) + { + // Arrange + var targetRole = providedRole == "system" ? AuthorRole.System : AuthorRole.Developer; + var options = new JsonSerializerOptions(); + var chatHistory = new ChatHistory("First message", targetRole); + + var chatHistoryJson = JsonSerializer.Serialize(chatHistory, options); + + // Act + var chatHistoryDeserialized = JsonSerializer.Deserialize(chatHistoryJson, options); + + // Assert + Assert.NotNull(chatHistoryDeserialized); + Assert.Equal(chatHistory.Count, chatHistoryDeserialized.Count); + Assert.Equal(providedRole, chatHistoryDeserialized[0].Role.Label); + for (var i = 0; i < chatHistory.Count; i++) + { + Assert.Equal(chatHistory[i].Role.Label, chatHistoryDeserialized[i].Role.Label); + Assert.Equal(chatHistory[i].Content, chatHistoryDeserialized[i].Content); + Assert.Equal(chatHistory[i].AuthorName, chatHistoryDeserialized[i].AuthorName); + Assert.Equal(chatHistory[i].Items.Count, chatHistoryDeserialized[i].Items.Count); + Assert.Equal( + chatHistory[i].Items.OfType().Single().Text, + chatHistoryDeserialized[i].Items.OfType().Single().Text); + } + } } diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs similarity index 79% rename from dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs rename to dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs index 9d8b2e721fdf..650648dadc17 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs @@ -1,13 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.History; +using Microsoft.SemanticKernel.ChatCompletion; using Xunit; -namespace SemanticKernel.Agents.UnitTests.Core.History; +namespace SemanticKernel.UnitTests.AI.ChatCompletion; /// /// Unit testing of . @@ -91,7 +92,7 @@ int GenerateHashCode(int targetCount, int thresholdCount) public async Task VerifyChatHistoryNotReducedAsync() { // Arrange - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(10).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(10).ToArray(); ChatHistoryTruncationReducer reducer = new(20); // Act @@ -108,7 +109,7 @@ public async Task VerifyChatHistoryNotReducedAsync() public async Task VerifyChatHistoryReducedAsync() { // Arrange - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistoryTruncationReducer reducer = new(10); // Act @@ -125,7 +126,7 @@ public async Task VerifyChatHistoryReducedAsync() public async Task VerifyChatHistoryRereducedAsync() { // Arrange - IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray(); ChatHistoryTruncationReducer reducer = new(10); // Act @@ -136,6 +137,24 @@ public async Task VerifyChatHistoryRereducedAsync() VerifyReducedHistory(reducedHistory, 10); } + /// + /// Validate history reduced and system message preserved when source history exceeds target threshold. + /// + [Fact] + public async Task VerifySystemMessageIsNotReducedAsync() + { + // Arrange + IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20, includeSystemMessage: true).ToArray(); + ChatHistoryTruncationReducer reducer = new(10); + + // Act + IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + + // Assert + VerifyReducedHistory(reducedHistory, 10); + Assert.Contains(reducedHistory!, m => m.Role == AuthorRole.System); + } + private static void VerifyReducedHistory(IEnumerable? reducedHistory, int expectedCount) { Assert.NotNull(reducedHistory); diff --git a/dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs similarity index 90% rename from dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs rename to dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs index 3475776a1935..cc7dd3f0377e 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs @@ -1,20 +1,26 @@ // Copyright (c) Microsoft. All rights reserved. + using System.Collections.Generic; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; -namespace SemanticKernel.Agents.UnitTests.Core.History; +namespace SemanticKernel.UnitTests.AI.ChatCompletion; /// /// Factory for generating chat history for various test scenarios. /// -internal static class MockHistoryGenerator +internal static class MockChatHistoryGenerator { /// /// Create a homogeneous list of assistant messages. /// - public static IEnumerable CreateSimpleHistory(int messageCount) + public static IEnumerable CreateSimpleHistory(int messageCount, bool includeSystemMessage = false) { + if (includeSystemMessage) + { + yield return new ChatMessageContent(AuthorRole.System, "system message"); + } + for (int index = 0; index < messageCount; ++index) { yield return new ChatMessageContent(AuthorRole.Assistant, $"message #{index}"); diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs index cce73a65510f..556799ecc85e 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs @@ -41,10 +41,10 @@ public void AsEmbeddingGeneratorMetadataReturnsExpectedData() }.AsEmbeddingGenerator(); Assert.NotNull(generator); - Assert.NotNull(generator.Metadata); - Assert.Equal(nameof(TestEmbeddingGenerationService), generator.Metadata.ProviderName); - Assert.Equal("examplemodel", generator.Metadata.ModelId); - Assert.Equal("https://example.com/", generator.Metadata.ProviderUri?.ToString()); + var metadata = Assert.IsType(generator.GetService(typeof(EmbeddingGeneratorMetadata))); + Assert.Equal(nameof(TestEmbeddingGenerationService), metadata.ProviderName); + Assert.Equal("examplemodel", metadata.ModelId); + Assert.Equal("https://example.com/", metadata.ProviderUri?.ToString()); } [Fact] @@ -75,10 +75,10 @@ public void AsChatClientMetadataReturnsExpectedData() }.AsChatClient(); Assert.NotNull(client); - Assert.NotNull(client.Metadata); - Assert.Equal(nameof(TestChatCompletionService), client.Metadata.ProviderName); - Assert.Equal("examplemodel", client.Metadata.ModelId); - Assert.Equal("https://example.com/", client.Metadata.ProviderUri?.ToString()); + var metadata = Assert.IsType(client.GetService(typeof(ChatClientMetadata))); + Assert.Equal(nameof(TestChatCompletionService), metadata.ProviderName); + Assert.Equal("examplemodel", metadata.ModelId); + Assert.Equal("https://example.com/", metadata.ProviderUri?.ToString()); } [Fact] @@ -151,15 +151,15 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() }, }.AsChatClient(); - Microsoft.Extensions.AI.ChatCompletion result = await client.CompleteAsync([ + Microsoft.Extensions.AI.ChatResponse result = await client.GetResponseAsync([ new(ChatRole.System, [ new Microsoft.Extensions.AI.TextContent("some text"), - new Microsoft.Extensions.AI.ImageContent("http://imageurl"), + new Microsoft.Extensions.AI.DataContent("http://imageurl", mediaType: "image/jpeg"), ]), new(ChatRole.User, [ - new Microsoft.Extensions.AI.AudioContent("http://audiourl"), + new Microsoft.Extensions.AI.DataContent("http://audiourl", mediaType: "audio/mpeg"), new Microsoft.Extensions.AI.TextContent("some other text"), ]), new(ChatRole.Assistant, @@ -168,7 +168,7 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() ]), new(ChatRole.Tool, [ - new Microsoft.Extensions.AI.FunctionResultContent("call123", "FunctionName", 42), + new Microsoft.Extensions.AI.FunctionResultContent("call123", 42), ]), ], new ChatOptions() { @@ -211,7 +211,7 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Items[0]); Assert.Equal("call123", frc.CallId); - Assert.Equal("FunctionName", frc.FunctionName); + Assert.Null(frc.FunctionName); Assert.Equal(42, frc.Result); Assert.NotNull(actualSettings); @@ -244,19 +244,19 @@ public async Task AsChatClientNonStreamingResponseFormatHandled() List messages = [new(ChatRole.User, "hi")]; - await client.CompleteAsync(messages); + await client.GetResponseAsync(messages); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Null(oaiSettings); - await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.Text }); + await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.Text }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Equal("text", oaiSettings?.ResponseFormat?.ToString()); - await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.Json }); + await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.Json }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Equal("json_object", oaiSettings?.ResponseFormat?.ToString()); - await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonSerializer.Deserialize(""" + await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonSerializer.Deserialize(""" {"type": "string"} """)) }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); @@ -289,7 +289,7 @@ public async Task AsChatClientNonStreamingToolsPropagated(ChatToolMode mode) List messages = [new(ChatRole.User, "hi")]; - await client.CompleteAsync(messages, new() + await client.GetResponseAsync(messages, new() { Tools = [new NopAIFunction("AIFunc1"), new NopAIFunction("AIFunc2")], ToolMode = mode, @@ -335,8 +335,7 @@ public async Task AsChatClientNonStreamingToolsPropagated(ChatToolMode mode) private sealed class NopAIFunction(string name) : AIFunction { - public override AIFunctionMetadata Metadata => new(name); - + public override string Name => name; protected override Task InvokeCoreAsync(IEnumerable> arguments, CancellationToken cancellationToken) { throw new FormatException(); @@ -362,15 +361,15 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() }, }.AsChatClient(); - List result = await client.CompleteStreamingAsync([ + List result = await client.GetStreamingResponseAsync([ new(ChatRole.System, [ new Microsoft.Extensions.AI.TextContent("some text"), - new Microsoft.Extensions.AI.ImageContent("http://imageurl"), + new Microsoft.Extensions.AI.DataContent("http://imageurl", "image/jpeg"), ]), new(ChatRole.User, [ - new Microsoft.Extensions.AI.AudioContent("http://audiourl"), + new Microsoft.Extensions.AI.DataContent("http://audiourl", "audio/mpeg"), new Microsoft.Extensions.AI.TextContent("some other text"), ]), new(ChatRole.Assistant, @@ -379,7 +378,7 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() ]), new(ChatRole.Tool, [ - new Microsoft.Extensions.AI.FunctionResultContent("call123", "FunctionName", 42), + new Microsoft.Extensions.AI.FunctionResultContent("call123", 42), ]), ], new ChatOptions() { @@ -423,7 +422,7 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Items[0]); Assert.Equal("call123", frc.CallId); - Assert.Equal("FunctionName", frc.FunctionName); + Assert.Null(frc.FunctionName); Assert.Equal(42, frc.Result); Assert.NotNull(actualSettings); @@ -451,7 +450,7 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( await Task.Yield(); actualChatHistory = messages; actualOptions = options; - return new Microsoft.Extensions.AI.ChatCompletion(new ChatMessage() { Text = "the result" }); + return new Microsoft.Extensions.AI.ChatResponse(new ChatMessage() { Text = "the result" }); }, }; @@ -505,8 +504,8 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( Assert.Single(actualChatHistory[3].Contents); Assert.Equal("some text", Assert.IsType(actualChatHistory[0].Contents[0]).Text); - Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); - Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); + Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); + Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); Assert.Equal("some other text", Assert.IsType(actualChatHistory[1].Contents[1]).Text); var fcc = Assert.IsType(actualChatHistory[2].Contents[0]); @@ -516,7 +515,6 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( var frc = Assert.IsType(actualChatHistory[3].Contents[0]); Assert.Equal("call123", frc.CallId); - Assert.Equal("FunctionName", frc.Name); Assert.Equal(42, frc.Result); Assert.NotNull(actualOptions); @@ -542,7 +540,7 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() { actualChatHistory = messages; actualOptions = options; - return new List() + return new List() { new() { Role = ChatRole.Assistant, Text = "the result" } }.ToAsyncEnumerable(); @@ -600,8 +598,8 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() Assert.Single(actualChatHistory[3].Contents); Assert.Equal("some text", Assert.IsType(actualChatHistory[0].Contents[0]).Text); - Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); - Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); + Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); + Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); Assert.Equal("some other text", Assert.IsType(actualChatHistory[1].Contents[1]).Text); var fcc = Assert.IsType(actualChatHistory[2].Contents[0]); @@ -611,7 +609,6 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Contents[0]); Assert.Equal("call123", frc.CallId); - Assert.Equal("FunctionName", frc.Name); Assert.Equal(42, frc.Result); Assert.NotNull(actualOptions); @@ -652,18 +649,18 @@ private sealed class TestChatClient : IChatClient { public ChatClientMetadata Metadata { get; set; } = new(); - public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncDelegate { get; set; } + public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncDelegate { get; set; } - public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncDelegate { get; set; } + public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncDelegate { get; set; } - public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { return this.CompleteAsyncDelegate != null ? this.CompleteAsyncDelegate(chatMessages, options, cancellationToken) : throw new NotImplementedException(); } - public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { return this.CompleteStreamingAsyncDelegate != null ? this.CompleteStreamingAsyncDelegate(chatMessages, options, cancellationToken) @@ -674,7 +671,7 @@ public void Dispose() { } public object? GetService(Type serviceType, object? serviceKey = null) { - return null; + return serviceType == typeof(ChatClientMetadata) ? this.Metadata : null; } } @@ -709,7 +706,7 @@ public Task>> GenerateAsync(IEnumerable() { ["metadata-key-7"] = "metadata-value-7" } }, + new FileReferenceContent(fileId: "file-id-2") { Tools = ["a", "b", "c"] }, new AnnotationContent("quote-8") { ModelId = "model-8", FileId = "file-id-2", StartIndex = 2, EndIndex = 24, Metadata = new Dictionary() { ["metadata-key-8"] = "metadata-value-8" } }, ]; @@ -301,15 +302,21 @@ public void ItCanBeSerializeAndDeserialized() Assert.Equal("function-id", functionResultContent.CallId); Assert.Equal("plugin-name", functionResultContent.PluginName); - var fileReferenceContent = deserializedMessage.Items[8] as FileReferenceContent; - Assert.NotNull(fileReferenceContent); - Assert.Equal("file-id-1", fileReferenceContent.FileId); - Assert.Equal("model-7", fileReferenceContent.ModelId); - Assert.NotNull(fileReferenceContent.Metadata); - Assert.Single(fileReferenceContent.Metadata); - Assert.Equal("metadata-value-7", fileReferenceContent.Metadata["metadata-key-7"]?.ToString()); - - var annotationContent = deserializedMessage.Items[9] as AnnotationContent; + var fileReferenceContent1 = deserializedMessage.Items[8] as FileReferenceContent; + Assert.NotNull(fileReferenceContent1); + Assert.Equal("file-id-1", fileReferenceContent1.FileId); + Assert.Equal("model-7", fileReferenceContent1.ModelId); + Assert.NotNull(fileReferenceContent1.Metadata); + Assert.Single(fileReferenceContent1.Metadata); + Assert.Equal("metadata-value-7", fileReferenceContent1.Metadata["metadata-key-7"]?.ToString()); + + var fileReferenceContent2 = deserializedMessage.Items[9] as FileReferenceContent; + Assert.NotNull(fileReferenceContent2); + Assert.Equal("file-id-2", fileReferenceContent2.FileId); + Assert.NotNull(fileReferenceContent2.Tools); + Assert.Equal(3, fileReferenceContent2.Tools.Count); + + var annotationContent = deserializedMessage.Items[10] as AnnotationContent; Assert.NotNull(annotationContent); Assert.Equal("file-id-2", annotationContent.FileId); Assert.Equal("quote-8", annotationContent.Quote); diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs index 6b55818c9473..b698fa528bff 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs @@ -21,6 +21,7 @@ public void VerifyFileReferenceContentInitialState() Assert.Empty(definition.FileId); } + /// /// Verify usage. /// @@ -30,5 +31,20 @@ public void VerifyFileReferenceContentUsage() FileReferenceContent definition = new(fileId: "testfile"); Assert.Equal("testfile", definition.FileId); + Assert.Null(definition.Tools); + } + + /// + /// Verify usage. + /// + [Fact] + public void VerifyFileReferenceToolUsage() + { + FileReferenceContent definition = new(fileId: "testfile") { Tools = new[] { "a", "b", "c" } }; + + Assert.Equal("testfile", definition.FileId); + Assert.NotNull(definition.Tools); + Assert.Equal(3, definition.Tools.Count); + Assert.Equivalent(new[] { "a", "b", "c" }, definition.Tools); } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs index eb954752ce4b..46da513e4a7c 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Text; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Xunit; namespace SemanticKernel.UnitTests.Contents; diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs deleted file mode 100644 index 3e155bea18fe..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Data; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -/// -/// Contains tests for KernelBuilderExtensions". -/// -public class KernelBuilderExtensionsTests -{ - private readonly IKernelBuilder _kernelBuilder; - - public KernelBuilderExtensionsTests() - { - this._kernelBuilder = Kernel.CreateBuilder(); - } - - [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] - [Fact] - public void AddVectorStoreRegistersClass() - { - // Act. - this._kernelBuilder.AddVolatileVectorStore(); - - // Assert. - var kernel = this._kernelBuilder.Build(); - var vectorStore = kernel.Services.GetRequiredService(); - Assert.NotNull(vectorStore); - Assert.IsType(vectorStore); - } - - [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] - [Fact] - public void AddVolatileVectorStoreTextSearchRegistersClass() - { - // Arrange. - this._kernelBuilder.AddVolatileVectorStore(); - this._kernelBuilder.AddOpenAITextEmbeddingGeneration("modelId", "apiKey"); - - // Act. - this._kernelBuilder.AddVolatileVectorStoreTextSearch( - "records", - new DataModelTextSearchStringMapper(), - new DataModelTextSearchResultMapper()); - - // Assert. - var kernel = this._kernelBuilder.Build(); - var vectorStoreTextSearch = kernel.Services.GetRequiredService>(); - Assert.NotNull(vectorStoreTextSearch); - Assert.IsType>(vectorStoreTextSearch); - } - - [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] - [Fact] - public void AddVolatileVectorStoreTextSearchWithDelegatesRegistersClass() - { - // Arrange. - this._kernelBuilder.AddVolatileVectorStore(); - this._kernelBuilder.AddOpenAITextEmbeddingGeneration("modelId", "apiKey"); - - // Act. - this._kernelBuilder.AddVolatileVectorStoreTextSearch( - "records", - obj => ((DataModel)obj).Text, - obj => new TextSearchResult(value: ((DataModel)obj).Text) { Name = ((DataModel)obj).Key.ToString() }); - - // Assert. - var kernel = this._kernelBuilder.Build(); - var vectorStoreTextSearch = kernel.Services.GetRequiredService>(); - Assert.NotNull(vectorStoreTextSearch); - Assert.IsType>(vectorStoreTextSearch); - } - - /// - /// String mapper which converts a DataModel to a string. - /// - private sealed class DataModelTextSearchStringMapper : ITextSearchStringMapper - { - /// - public string MapFromResultToString(object result) - { - if (result is DataModel dataModel) - { - return dataModel.Text; - } - throw new ArgumentException("Invalid result type."); - } - } - - /// - /// Result mapper which converts a DataModel to a TextSearchResult. - /// - private sealed class DataModelTextSearchResultMapper : ITextSearchResultMapper - { - /// - public TextSearchResult MapFromResultToTextSearchResult(object result) - { - if (result is DataModel dataModel) - { - return new TextSearchResult(value: dataModel.Text) { Name = dataModel.Key.ToString() }; - } - throw new ArgumentException("Invalid result type."); - } - } - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes - private sealed class DataModel -#pragma warning restore CA1812 // Avoid uninstantiated internal classes - { - [VectorStoreRecordKey] - public Guid Key { get; init; } - - [VectorStoreRecordData] - public required string Text { get; init; } - - [VectorStoreRecordVector(1536)] - public ReadOnlyMemory Embedding { get; init; } - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs deleted file mode 100644 index 79691fabe8b6..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Data; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -/// -/// Contains tests for the ServiceCollectionExtensions class. -/// -public class ServiceCollectionExtensionsTests -{ - private readonly IServiceCollection _serviceCollection; - - public ServiceCollectionExtensionsTests() - { - this._serviceCollection = new ServiceCollection(); - } - - [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] - [Fact] - public void AddVectorStoreRegistersClass() - { - // Act. - this._serviceCollection.AddVolatileVectorStore(); - - // Assert. - var serviceProvider = this._serviceCollection.BuildServiceProvider(); - var vectorStore = serviceProvider.GetRequiredService(); - Assert.NotNull(vectorStore); - Assert.IsType(vectorStore); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs deleted file mode 100644 index b93c00952705..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,577 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Data; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -/// -/// Contains tests for the class. -/// -[Obsolete("The VolatileVectorStoreRecordCollection is obsolete so these tests are as well.")] -public class VolatileVectorStoreRecordCollectionTests -{ - private const string TestCollectionName = "testcollection"; - private const string TestRecordKey1 = "testid1"; - private const string TestRecordKey2 = "testid2"; - private const int TestRecordIntKey1 = 1; - private const int TestRecordIntKey2 = 2; - - private readonly CancellationToken _testCancellationToken = new(false); - - private readonly ConcurrentDictionary> _collectionStore; - private readonly ConcurrentDictionary _collectionStoreTypes; - - public VolatileVectorStoreRecordCollectionTests() - { - this._collectionStore = new(); - this._collectionStoreTypes = new(); - } - - [Theory] - [InlineData(TestCollectionName, true)] - [InlineData("nonexistentcollection", false)] - public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - collectionName); - - // Act - var actual = await sut.CollectionExistsAsync(this._testCancellationToken); - - // Assert - Assert.Equal(expectedExists, actual); - } - - [Fact] - public async Task CanCreateCollectionAsync() - { - // Arrange - var sut = this.CreateRecordCollection(false); - - // Act - await sut.CreateCollectionAsync(this._testCancellationToken); - - // Assert - Assert.True(this._collectionStore.ContainsKey(TestCollectionName)); - } - - [Fact] - public async Task DeleteCollectionRemovesCollectionFromDictionaryAsync() - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(false); - - // Act - await sut.DeleteCollectionAsync(this._testCancellationToken); - - // Assert - Assert.Empty(this._collectionStore); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanGetRecordWithVectorsAsync(bool useDefinition, TKey testKey) - where TKey : notnull - { - // Arrange - var record = CreateModel(testKey, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey!, record); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetAsync( - testKey, - new() - { - IncludeVectors = true - }, - this._testCancellationToken); - - // Assert - var expectedArgs = new object[] { TestRecordKey1 }; - - Assert.NotNull(actual); - Assert.Equal(testKey, actual.Key); - Assert.Equal($"data {testKey}", actual.Data); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1!, record1); - collection.TryAdd(testKey2!, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetBatchAsync( - [testKey1, testKey2], - new() - { - IncludeVectors = true - }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0].Key); - Assert.Equal($"data {testKey1}", actual[0].Data); - Assert.Equal(testKey2, actual[1].Key); - Assert.Equal($"data {testKey2}", actual[1].Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteRecordAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteAsync( - testKey1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.True(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteBatchAsync( - [testKey1, testKey2], - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.False(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanUpsertRecordAsync(bool useDefinition, TKey testKey1) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var upsertResult = await sut.UpsertAsync( - record1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.Equal(testKey1, upsertResult); - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.UpsertBatchAsync( - [record1, record2], - cancellationToken: this._testCancellationToken).ToListAsync(); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0]); - Assert.Equal(testKey2, actual[1]); - - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Null(actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data); - Assert.Equal(-1, actualResults[1].Score); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(true, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TKey testKey1, TKey testKey2, string filterType) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(1, actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); - Assert.Single(actualResults); - Assert.Equal(testKey2, actualResults[0].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); - Assert.Equal(-1, actualResults[0].Score); - } - - [Theory] - [InlineData(DistanceFunction.CosineSimilarity, 1, -1)] - [InlineData(DistanceFunction.CosineDistance, 0, 2)] - [InlineData(DistanceFunction.DotProductSimilarity, 4, -4)] - [InlineData(DistanceFunction.EuclideanDistance, 0, 4)] - public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFunction, double expectedScoreResult1, double expectedScoreResult2) - { - // Arrange - var record1 = CreateModel(TestRecordKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(TestRecordKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(TestRecordKey1, record1); - collection.TryAdd(TestRecordKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - VectorStoreRecordDefinition singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } - ] - }; - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = singlePropsDefinition - }); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(TestRecordKey1, actualResults[0].Record.Key); - Assert.Equal($"data {TestRecordKey1}", actualResults[0].Record.Data); - Assert.Equal(expectedScoreResult1, actualResults[0].Score); - Assert.Equal(TestRecordKey2, actualResults[1].Record.Key); - Assert.Equal($"data {TestRecordKey2}", actualResults[1].Record.Data); - Assert.Equal(expectedScoreResult2, actualResults[1].Score); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task CanSearchManyRecordsAsync(bool useDefinition) - { - // Arrange - var collection = new ConcurrentDictionary(); - for (int i = 0; i < 1000; i++) - { - if (i <= 14) - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { 1, 1, 1, 1 })); - } - else - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { -1, -1, -1, -1 })); - } - } - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(1000, actual.TotalCount); - - // Assert that top was respected - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(10, actualResults.Count); - var actualIds = actualResults.Select(r => r.Record.Key).ToList(); - for (int i = 0; i < 10; i++) - { - // Assert that skip was respected - Assert.Contains(i + 10, actualIds); - if (i <= 4) - { - Assert.Equal(1, actualResults[i].Score); - } - else - { - Assert.Equal(-1, actualResults[i].Score); - } - } - } - - [Theory] - [InlineData(TestRecordKey1, TestRecordKey2)] - [InlineData(TestRecordIntKey1, TestRecordIntKey2)] - public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = new VectorStoreGenericDataModel(testKey1) - { - Data = new Dictionary - { - ["Data"] = $"data {testKey1}", - ["Tags"] = new List { "default tag", "tag " + testKey1 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) - } - }; - var record2 = new VectorStoreGenericDataModel(testKey2) - { - Data = new Dictionary - { - ["Data"] = $"data {testKey2}", - ["Tags"] = new List { "default tag", "tag " + testKey2 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) - } - }; - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = this._singlePropsDefinition - }); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory([1, 1, 1, 1]), - new() { IncludeVectors = true, VectorPropertyName = "Vector" }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data["Data"]); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data["Data"]); - Assert.Equal(-1, actualResults[1].Score); - } - - private static SinglePropsModel CreateModel(TKey key, bool withVectors, float[]? vector = null) - { - return new SinglePropsModel - { - Key = key, - Data = "data " + key, - Tags = new List { "default tag", "tag " + key }, - Vector = vector ?? (withVectors ? new float[] { 1, 2, 3, 4 } : null), - NotAnnotated = null, - }; - } - - private VolatileVectorStoreRecordCollection> CreateRecordCollection(bool useDefinition) - where TKey : notnull - { - return new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = useDefinition ? this._singlePropsDefinition : null - }); - } - - private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Data", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - - public sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public TKey? Key { get; set; } - - [VectorStoreRecordData(IsFilterable = true)] - public List Tags { get; set; } = new List(); - - [VectorStoreRecordData(IsFilterable = true)] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector] - public ReadOnlyMemory? Vector { get; set; } - - public string? NotAnnotated { get; set; } - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs deleted file mode 100644 index 70d6e0264dfb..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Data; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -/// -/// Contains tests for the class. -/// -[Obsolete("The VolatileVectorStore is obsolete so these tests are as well.")] -public class VolatileVectorStoreTests -{ - private const string TestCollectionName = "testcollection"; - - [Fact] - public void GetCollectionReturnsCollection() - { - // Arrange. - var sut = new VolatileVectorStore(); - - // Act. - var actual = sut.GetCollection>(TestCollectionName); - - // Assert. - Assert.NotNull(actual); - Assert.IsType>>(actual); - } - - [Fact] - public void GetCollectionReturnsCollectionWithNonStringKey() - { - // Arrange. - var sut = new VolatileVectorStore(); - - // Act. - var actual = sut.GetCollection>(TestCollectionName); - - // Assert. - Assert.NotNull(actual); - Assert.IsType>>(actual); - } - - [Fact] - public async Task ListCollectionNamesReadsDictionaryAsync() - { - // Arrange. - var collectionStore = new ConcurrentDictionary>(); - collectionStore.TryAdd("collection1", new ConcurrentDictionary()); - collectionStore.TryAdd("collection2", new ConcurrentDictionary()); - var sut = new VolatileVectorStore(collectionStore); - - // Act. - var collectionNames = sut.ListCollectionNamesAsync(); - - // Assert. - var collectionNamesList = await collectionNames.ToListAsync(); - Assert.Equal(new[] { "collection1", "collection2" }, collectionNamesList); - } - - [Fact] - public async Task GetCollectionDoesNotAllowADifferentDataTypeThanPreviouslyUsedAsync() - { - // Arrange. - var sut = new VolatileVectorStore(); - var stringKeyCollection = sut.GetCollection>(TestCollectionName); - await stringKeyCollection.CreateCollectionAsync(); - - // Act and assert. - var exception = Assert.Throws(() => sut.GetCollection(TestCollectionName)); - Assert.Equal($"Collection '{TestCollectionName}' already exists and with data type 'SinglePropsModel`1' so cannot be re-created with data type 'SecondModel'.", exception.Message); - } - -#pragma warning disable CA1812 // Classes are used as generic arguments - private sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public required TKey Key { get; set; } - - [VectorStoreRecordData] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector(4)] - public ReadOnlyMemory? Vector { get; set; } - - public string? NotAnnotated { get; set; } - } - - private sealed class SecondModel - { - [VectorStoreRecordKey] - public required int Key { get; set; } - - [VectorStoreRecordData] - public string Data { get; set; } = string.Empty; - } -#pragma warning restore CA1812 -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs index f7a4e947ec38..c9c348d1ac44 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs @@ -64,7 +64,7 @@ public void ItProvideStatusForResponsesWithoutContent() // Assert Assert.NotNull(httpOperationException); Assert.NotNull(httpOperationException.StatusCode); - Assert.Empty(httpOperationException.ResponseContent!); + Assert.Null(httpOperationException.ResponseContent); Assert.Equal(exception, httpOperationException.InnerException); Assert.Equal(exception.Message, httpOperationException.Message); Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); diff --git a/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs index 3a0f1e627bd6..9d06c9e71630 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs @@ -321,4 +321,42 @@ public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming) // Assert Assert.Equal(isStreaming, actualStreamingFlag); } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task PromptExecutionSettingsArePropagatedToFilterContextAsync(bool isStreaming) + { + // Arrange + PromptExecutionSettings? actualExecutionSettings = null; + + var mockTextGeneration = this.GetMockTextGeneration(); + + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onPromptRender: (context, next) => + { + actualExecutionSettings = context.ExecutionSettings; + return next(context); + }); + + var expectedExecutionSettings = new PromptExecutionSettings(); + + var arguments = new KernelArguments(expectedExecutionSettings); + + // Act + if (isStreaming) + { + await foreach (var item in kernel.InvokeStreamingAsync(function, arguments)) + { } + } + else + { + await kernel.InvokeAsync(function, arguments); + } + + // Assert + Assert.Same(expectedExecutionSettings, actualExecutionSettings); + } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs index 877bf8a90857..db62e3ad6769 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs @@ -208,21 +208,19 @@ public async Task ItCanProduceAIFunctionsThatInvokeKernelFunctions(bool withKern AIFunction[] funcs = plugin.AsAIFunctions(kernel).ToArray(); Assert.Equal(2, funcs.Length); - Assert.Equal("PluginName-Function1", funcs[0].Metadata.Name); - Assert.Equal("PluginName-Function2", funcs[1].Metadata.Name); - - Assert.Equal("arg1", Assert.Single(funcs[0].Metadata.Parameters).Name); - Assert.Equal(2, funcs[1].Metadata.Parameters.Count); - Assert.Equal("arg2", funcs[1].Metadata.Parameters[0].Name); - Assert.Equal("arg3", funcs[1].Metadata.Parameters[1].Name); - - Assert.NotNull(funcs[0].Metadata.Parameters[0].Schema); - Assert.NotNull(funcs[1].Metadata.Parameters[0].Schema); - Assert.NotNull(funcs[1].Metadata.Parameters[1].Schema); - - Assert.Equal(plugin["Function1"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(funcs[0].Metadata.Parameters[0].Schema)); - Assert.Equal(plugin["Function2"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(funcs[1].Metadata.Parameters[0].Schema)); - Assert.Equal(plugin["Function2"].Metadata.Parameters[1].Schema?.ToString(), JsonSerializer.Serialize(funcs[1].Metadata.Parameters[1].Schema)); + Assert.Equal("PluginName-Function1", funcs[0].Name); + Assert.Equal("PluginName-Function2", funcs[1].Name); + + var func1Properties = funcs[0].JsonSchema.GetProperty("properties").EnumerateObject().ToArray(); + var func2Properties = funcs[1].JsonSchema.GetProperty("properties").EnumerateObject().ToArray(); + Assert.Equal("arg1", Assert.Single(func1Properties).Name); + Assert.Equal(2, func2Properties.Length); + Assert.Equal("arg2", func2Properties[0].Name); + Assert.Equal("arg3", func2Properties[1].Name); + + Assert.Equal(plugin["Function1"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(func1Properties[0].Value)); + Assert.Equal(plugin["Function2"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(func2Properties[0].Value)); + Assert.Equal(plugin["Function2"].Metadata.Parameters[1].Schema?.ToString(), JsonSerializer.Serialize(func2Properties[1].Value)); using CancellationTokenSource cts = new(); diff --git a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs index e3ad0cd53a5c..c081d35bc573 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs @@ -12,7 +12,7 @@ public sealed class ChatPromptParserTests [Theory] [InlineData("This is plain prompt")] [InlineData("")] - [InlineData("This is invalidchat prompt")] + [InlineData("This is an invalid chat prompt")] public void ItReturnsNullChatHistoryWhenPromptIsPlainTextOrInvalid(string prompt) { // Act @@ -148,6 +148,86 @@ public void ItReturnsChatHistoryWithValidDataImageContent() }); } + [Fact] + public void ItReturnsChatHistoryWithMultipleTextParts() + { + // Arrange + string prompt = GetValidPromptWithMultipleTextParts(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("What can I help with?", c.Content), + c => + { + Assert.Equal("Hello", c.Content); + Assert.Collection(c.Items, + o => + { + Assert.IsType(o); + Assert.Equal("Hello", ((TextContent)o).Text); + }, o => + { + Assert.IsType(o); + Assert.Equal("I am user", ((TextContent)o).Text); + }); + }); + } + + [Fact] + public void ItReturnsChatHistoryWithMixedXmlContent() + { + // Arrange + string prompt = GetValidPromptWithMixedXmlContent(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("What can I help with?", c.Content), + c => + { + Assert.Equal("Hi how are you?", c.Content); + Assert.Single(c.Items); + }); + } + + [Fact] + public void ItReturnsChatHistoryWithEmptyContent() + { + // Arrange + string prompt = GetValidPromptWithEmptyContent(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("What can I help with?", c.Content), + c => + { + Assert.Null(c.Content); + Assert.Empty(c.Items); + }, + c => + { + Assert.Null(c.Content); + Assert.Empty(c.Items); + }); + } + [Fact] public void ItReturnsChatHistoryWithValidContentItemsIncludeCode() { @@ -259,6 +339,50 @@ private static string GetValidPromptWithDataUriImageContent() """; } + private static string GetValidPromptWithMultipleTextParts() + { + return + """ + + What can I help with? + + + Hello + I am user + + + """; + } + + private static string GetValidPromptWithMixedXmlContent() + { + return + """ + + What can I help with? + + + This part will be discarded upon parsing + Hi how are you? + This part will also be discarded upon parsing + + + """; + } + + private static string GetValidPromptWithEmptyContent() + { + return + """ + + What can I help with? + + + + + """; + } + private static string GetValidPromptWithCDataSection() { return diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index 1e95741e153e..e3452f799be6 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -25,7 +25,6 @@ - diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs index 6f0b40f8e82d..fa488e6e7146 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs @@ -11,6 +11,7 @@ using Microsoft.SemanticKernel.ChatCompletion; #pragma warning disable IDE0005 // Using directive is unnecessary using Microsoft.SemanticKernel.Connectors.FunctionCalling; + #pragma warning restore IDE0005 // Using directive is unnecessary using Moq; using Xunit; @@ -21,6 +22,7 @@ public class FunctionCallsProcessorTests { private readonly FunctionCallsProcessor _sut = new(); private readonly FunctionChoiceBehaviorOptions _functionChoiceBehaviorOptions = new(); + private readonly PromptExecutionSettings _promptExecutionSettings = new(); [Fact] public void ItShouldReturnNoConfigurationIfNoBehaviorProvided() @@ -94,6 +96,7 @@ async Task ProcessFunctionCallsRecursivelyToReachInflightLimitAsync() await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: [], requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -123,6 +126,7 @@ public async Task ItShouldAddFunctionCallAssistantMessageToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -151,6 +155,7 @@ public async Task ItShouldAddFunctionCallExceptionToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -184,6 +189,7 @@ public async Task ItShouldAddFunctionInvocationExceptionToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -212,6 +218,7 @@ public async Task ItShouldAddErrorToChatHistoryIfFunctionCallNotAdvertisedAsync( // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => false, // Return false to simulate that the function is not advertised @@ -240,6 +247,7 @@ public async Task ItShouldAddErrorToChatHistoryIfFunctionIsNotRegisteredOnKernel // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -281,6 +289,7 @@ public async Task ItShouldInvokeFunctionsAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -347,6 +356,7 @@ public async Task ItShouldInvokeFiltersAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -436,6 +446,7 @@ public async Task ItShouldInvokeMultipleFiltersInOrderAsync(bool invokeConcurren // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -484,6 +495,7 @@ public async Task FilterCanOverrideArgumentsAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -536,6 +548,7 @@ public async Task FilterCanHandleExceptionAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -588,6 +601,7 @@ public async Task FiltersCanSkipFunctionExecutionAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -634,6 +648,7 @@ public async Task PreFilterCanTerminateOperationAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -678,6 +693,7 @@ public async Task PostFilterCanTerminateOperationAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -732,6 +748,7 @@ public async Task ItShouldHandleChatMessageContentAsFunctionResultAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -767,6 +784,7 @@ public async Task ItShouldSerializeFunctionResultOfUnknowTypeAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -824,6 +842,53 @@ public void ItShouldSerializeFunctionResultsOfComplexType() Assert.Equal("{\"a\":2,\"b\":\"test\"}", result); } + [Fact] + public void ItShouldSerializeFunctionResultsWithStringProperties() + { + // Arrange + var functionResult = new { Text = "テスト" }; + + // Act + var result = FunctionCallsProcessor.ProcessFunctionResult(functionResult); + + // Assert + Assert.Equal("{\"Text\":\"テスト\"}", result); + } + + [Fact] + public async Task ItShouldPassPromptExecutionSettingsToAutoFunctionInvocationFilterAsync() + { + // Arrange + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]); + + AutoFunctionInvocationContext? actualContext = null; + + Kernel kernel = CreateKernel(plugin, (context, next) => + { + actualContext = context; + return Task.CompletedTask; + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + executionSettings: this._promptExecutionSettings, + chatHistory: new ChatHistory(), + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + options: this._functionChoiceBehaviorOptions, + kernel: kernel!, + isStreaming: false, + cancellationToken: CancellationToken.None); + + // Assert + Assert.NotNull(actualContext); + Assert.Same(this._promptExecutionSettings, actualContext!.ExecutionSettings); + } + private sealed class AutoFunctionInvocationFilter( Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter { diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs new file mode 100644 index 000000000000..26975a6bfb29 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Diagnostics; +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities; + +/// +/// Unit tests for activity extensions. +/// +public sealed class ActivityExtensionsTests +{ + [Fact] + public async Task RunWithActivityByDefaultReturnsExpectedResultsAsync() + { + // Arrange + var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); + + // Act + var results = await ActivityExtensions.RunWithActivityAsync( + () => activityMock.Object, + () => new[] { 1, 2, 3 }.ToAsyncEnumerable(), CancellationToken.None).ToListAsync(); + + // Assert + Assert.Equal(new[] { 1, 2, 3 }, results); + } + + [Fact] + public async Task RunWithActivityWhenOperationThrowsExceptionActivitySetsErrorAndThrowsAsync() + { + // Arrange + var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); + + // Act & Assert + var ex = await Assert.ThrowsAsync(async () => + await ActivityExtensions.RunWithActivityAsync( + () => activityMock.Object, + () => throw new InvalidOperationException("Test exception"), + CancellationToken.None).ToListAsync()); + + Assert.Equal("Test exception", ex.Message); + Assert.Equal(ActivityStatusCode.Error, activityMock.Object.Status); + + var errorTag = activityMock.Object.Tags.FirstOrDefault(l => l.Key == "error.type"); + + Assert.Contains(nameof(InvalidOperationException), errorTag.Value); + } + + [Fact] + public async Task RunWithActivityWhenEnumerationThrowsExceptionActivitySetsErrorAndThrows() + { + // Arrange + var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); + + async static IAsyncEnumerable Operation() + { + yield return 1; + await Task.Yield(); + throw new InvalidOperationException("Enumeration error"); + } + + // Act & Assert + var ex = await Assert.ThrowsAsync(async () => + await ActivityExtensions.RunWithActivityAsync( + () => activityMock.Object, + Operation, + CancellationToken.None).ToListAsync()); + + Assert.Equal("Enumeration error", ex.Message); + Assert.Equal(ActivityStatusCode.Error, activityMock.Object.Status); + + var errorTag = activityMock.Object.Tags.FirstOrDefault(l => l.Key == "error.type"); + + Assert.Contains(nameof(InvalidOperationException), errorTag.Value); + } + + [Fact] + public async Task RunWithActivityWhenCancellationRequestedThrowsTaskCanceledException() + { + // Arrange + using var cts = new CancellationTokenSource(); + cts.Cancel(); + + var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); + + async static IAsyncEnumerable Operation([EnumeratorCancellation] CancellationToken token) + { + await Task.Delay(10, token); + yield return 1; + } + + // Act & Assert + var ex = await Assert.ThrowsAsync(async () => + await ActivityExtensions.RunWithActivityAsync( + () => activityMock.Object, + () => Operation(cts.Token), + cts.Token).ToListAsync()); + } +} diff --git a/python/.coveragerc b/python/.coveragerc index dc37f315b86e..b51952a0c8e8 100644 --- a/python/.coveragerc +++ b/python/.coveragerc @@ -7,7 +7,7 @@ omit = semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/* semantic_kernel/connectors/memory/chroma/* semantic_kernel/connectors/memory/milvus/* - semantic_kernel/connectors/memory/mongodb_atlas/* + semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py semantic_kernel/connectors/memory/pinecone/* semantic_kernel/connectors/memory/postgres/* semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py diff --git a/python/.cspell.json b/python/.cspell.json index ea24ad2d7ce4..485789ae22a1 100644 --- a/python/.cspell.json +++ b/python/.cspell.json @@ -25,12 +25,14 @@ "words": [ "aeiou", "aiplatform", + "autogen", "azuredocindex", "azuredocs", "boto", "contentvector", "contoso", "datamodel", + "desync", "dotenv", "endregion", "entra", @@ -45,10 +47,12 @@ "logprobs", "mistralai", "mongocluster", + "nd", "ndarray", "nopep", "NOSQL", "ollama", + "Onnx", "onyourdatatest", "OPENAI", "opentelemetry", @@ -64,18 +68,10 @@ "templating", "uninstrument", "vectordb", + "vectorizable", "vectorizer", "vectorstoremodel", "vertexai", - "Weaviate", - "qdrant", - "huggingface", - "pytestmark", - "contoso", - "opentelemetry", - "SEMANTICKERNEL", - "OTEL", - "vectorizable", - "desync" + "Weaviate" ] } \ No newline at end of file diff --git a/python/.env.example b/python/.env.example index 8e46ec2bb6de..7d9a407dc877 100644 --- a/python/.env.example +++ b/python/.env.example @@ -34,4 +34,6 @@ BOOKING_SAMPLE_CLIENT_ID="" BOOKING_SAMPLE_TENANT_ID="" BOOKING_SAMPLE_CLIENT_SECRET="" BOOKING_SAMPLE_BUSINESS_ID="" -BOOKING_SAMPLE_SERVICE_ID="" \ No newline at end of file +BOOKING_SAMPLE_SERVICE_ID="" +CREW_AI_ENDPOINT="" +CREW_AI_TOKEN="" \ No newline at end of file diff --git a/python/.pre-commit-config.yaml b/python/.pre-commit-config.yaml index cd935fb59473..11beea8e1877 100644 --- a/python/.pre-commit-config.yaml +++ b/python/.pre-commit-config.yaml @@ -32,14 +32,14 @@ repos: - id: pyupgrade args: [--py310-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.9.6 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] - id: ruff-format - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.5.21 + rev: 0.5.30 hooks: # Update the uv lockfile - id: uv-lock diff --git a/python/.vscode/launch.json b/python/.vscode/launch.json index 831aaf5149bc..80145e18a817 100644 --- a/python/.vscode/launch.json +++ b/python/.vscode/launch.json @@ -10,7 +10,7 @@ "request": "launch", "program": "${file}", "console": "integratedTerminal", - "justMyCode": true + "justMyCode": false }, { "name": "Python FastAPI app with Dapr", diff --git a/python/Makefile b/python/Makefile index 745bf2623921..b515e0c5cae1 100644 --- a/python/Makefile +++ b/python/Makefile @@ -32,7 +32,7 @@ help: echo "" echo -e "\033[1mVARIABLES:\033[0m" echo " PYTHON_VERSION - Python version to use. Default is 3.10" - echo " By default, 3.10, 3.11, and 3.12 are installed as well." + echo " By default, 3.10, 3.11, 3.12 and 3.13 are installed as well." ############################## # INSTALL @@ -57,7 +57,13 @@ else ifeq ($(CONTINUE),1) else echo "uv could not be found." echo "Installing uv..." - curl -LsSf https://astral.sh/uv/install.sh | sh + if [ -n "$$VIRTUAL_ENV" ]; then \ + echo "Detected virtual environment at $$VIRTUAL_ENV, installing uv there..."; \ + curl -LsSf https://astral.sh/uv/install.sh | INSTALL_DIR="$$VIRTUAL_ENV/bin" sh; \ + else \ + echo "No virtual environment detected, installing uv globally..."; \ + curl -LsSf https://astral.sh/uv/install.sh | sh; \ + fi echo "uv installed." echo "Re-executing shell so uv is immediately available on PATH..." exec $$SHELL -c 'make install CONTINUE=1' @@ -68,8 +74,8 @@ endif ############################## .ONESHELL: install-python: - echo "Installing python 3.10, 3.11, 3.12" - uv python install 3.10 3.11 3.12 + echo "Installing python versions" + uv python install 3.10 3.11 3.12 3.13 ############################## # INSTALL-PRE-COMMIT @@ -87,7 +93,7 @@ install-sk: echo "Creating and activating venv for python $(PYTHON_VERSION)" uv venv --python $(PYTHON_VERSION) echo "Installing Semantic Kernel and all dependencies" - uv sync --all-extras --dev + uv sync --all-extras --dev --prerelease=if-necessary-or-explicit ############################## # CLEAN diff --git a/python/README.md b/python/README.md index 3e010df1102b..166d4ed38777 100644 --- a/python/README.md +++ b/python/README.md @@ -37,7 +37,9 @@ AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" ``` -You will then configure the Text/ChatCompletion class with the keyword argument `env_file_path`: +Put the .env file in the root directory. + +If you place the .env in a different directory, configure the Text/ChatCompletion class with the keyword argument `env_file_path`: ```python chat_completion = OpenAIChatCompletion(service_id="test", env_file_path=) diff --git a/python/pyproject.toml b/python/pyproject.toml index 0dc38b0b57f9..4aa5d8114f31 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -34,7 +34,7 @@ dependencies = [ "numpy >= 1.25.0; python_version < '3.12'", "numpy >= 1.26.0; python_version >= '3.12'", # openai connector - "openai ~= 1.0", + "openai ~= 1.61", # openapi and swagger "openapi_core >= 0.18,<0.20", # OpenTelemetry @@ -45,12 +45,17 @@ dependencies = [ "pybars4 ~= 0.9", "jinja2 ~= 3.1", "nest-asyncio ~= 1.6", + "scipy>=1.15.1", ] ### Optional dependencies [project.optional-dependencies] +autogen = [ + "autogen-agentchat >= 0.2, <0.4" +] azure = [ "azure-ai-inference >= 1.0.0b6", + "azure-ai-projects >= 1.0.0b5", "azure-core-tracing-opentelemetry >= 1.0.0b11", "azure-search-documents >= 11.6.0b4", "azure-identity ~= 1.13", @@ -60,17 +65,17 @@ chroma = [ "chromadb >= 0.5,< 0.7" ] google = [ - "google-cloud-aiplatform ~= 1.60", - "google-generativeai ~= 0.7" + "google-cloud-aiplatform == 1.82.0", + "google-generativeai ~= 0.8" ] hugging_face = [ "transformers[torch] ~= 4.28", "sentence-transformers >= 2.2,< 4.0", - "torch == 2.5.1" + "torch == 2.6.0" ] mongo = [ - "pymongo >= 4.8.0, < 4.11", - "motor >= 3.3.2,< 3.7.0" + "pymongo >= 4.8.0, < 4.12", + "motor >= 3.3.2,< 3.8.0" ] notebooks = [ "ipykernel ~= 6.29" @@ -86,7 +91,7 @@ ollama = [ "ollama ~= 0.4" ] onnx = [ - "onnxruntime-genai ~= 0.5" + "onnxruntime-genai ~= 0.5; python_version < '3.13'" ] anthropic = [ "anthropic ~= 0.32" @@ -106,23 +111,27 @@ redis = [ "redisvl >= 0.3.6", ] usearch = [ - "usearch ~= 2.9", + "usearch ~= 2.16", "pyarrow >= 12.0,< 20.0" ] weaviate = [ - "weaviate-client>=4.7,<5.0", + "weaviate-client>=4.10,<5.0", ] pandas = [ "pandas ~= 2.2" ] aws = [ - "boto3>=1.28.57", + "boto3>=1.36.4,<1.38.0", ] dapr = [ "dapr>=1.14.0", "dapr-ext-fastapi>=1.14.0", "flask-dapr>=1.14.0" ] +realtime = [ + "websockets >= 13, < 15", + "aiortc>=1.9.0", +] [tool.uv] prerelease = "if-necessary-or-explicit" @@ -138,7 +147,7 @@ dev-dependencies = [ "snoop ~= 0.4", "mypy >= 1.10", "types-PyYAML ~= 6.0.12.20240311", - "ruff ~= 0.7", + "ruff ~= 0.9" ] environments = [ "sys_platform == 'darwin'", @@ -220,5 +229,3 @@ name = "semantic_kernel" [build-system] requires = ["flit-core >= 3.9,<4.0"] build-backend = "flit_core.buildapi" - - diff --git a/python/samples/README.md b/python/samples/README.md index 3062daa353f7..b7ed7617e1ed 100644 --- a/python/samples/README.md +++ b/python/samples/README.md @@ -1,9 +1,10 @@ -## Semantic Kernel Samples +# Semantic Kernel Samples | Type | Description | | ------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------- | | [`getting_started`](./getting_started/CONFIGURING_THE_KERNEL.md) | Take this step by step tutorial to get started with Semantic Kernel and get introduced to the key concepts. | | [`getting_started_with_agents`](./getting_started_with_agents/README.md) | Take this step by step tutorial to get started with Semantic Kernel Agents and get introduced to the key concepts. | +| [`getting_started_with_processes`](./getting_started_with_processes/README.md) | Take this step by step tutorial to get started with Semantic Kernel Processes and get introduced to the key concepts. | | [`concepts`](./concepts/README.md) | This section contains focused samples which illustrate all of the concepts included in Semantic Kernel. | | [`demos`](./demos/README.md) | Look here to find a sample which demonstrate how to use many of Semantic Kernel features. | | [`learn_resources`](./learn_resources/README.md) | Code snippets that are related to online documentation sources like Microsoft Learn, DevBlogs and others | diff --git a/python/samples/SAMPLE_GUIDELINES.md b/python/samples/SAMPLE_GUIDELINES.md new file mode 100644 index 000000000000..46815a464115 --- /dev/null +++ b/python/samples/SAMPLE_GUIDELINES.md @@ -0,0 +1,81 @@ +# Sample Guidelines + +Samples are extremely important for developers to get started with Semantic Kernel. We strive to provide a wide range of samples that demonstrate the capabilities of Semantic Kernel with consistency and quality. This document outlines the guidelines for creating samples. + +## General Guidelines + +- **Clear and Concise**: Samples should be clear and concise. They should demonstrate a specific set of features or capabilities of Semantic Kernel. The less concepts a sample demonstrates, the better. +- **Consistent Structure**: All samples should have a consistent structure. This includes the folder structure, file naming, and the content of the sample. +- **Incremental Complexity**: Samples should start simple and gradually increase in complexity. This helps developers understand the concepts and features of Semantic Kernel. +- **Documentation**: Samples should be over-documented. + +### **Clear and Concise** + +Try not to include too many concepts in a single sample. The goal is to demonstrate a specific feature or capability of Semantic Kernel. If you find yourself including too many concepts, consider breaking the sample into multiple samples. A good example of this is to break non-streaming and streaming modes into separate samples. + +### **Consistent Structure** + +#### Getting Started Samples + +The getting started samples are the simplest samples that require minimal setup. These samples should be named in the following format: `step_.py`. One exception to this rule is when the sample is a notebook, in which case the sample should be named in the following format: `_.ipynb`. + +#### Concept Samples + +Concept samples under [./concepts](./concepts) should be grouped by feature or capability. These samples should be relatively short and demonstrate a specific concept. These samples are more advanced than the getting started samples. + +#### Demos + +Demos under [./demos](./demos) are full console applications that demonstrate a specific set of features or capabilities of Semantic Kernel, potentially with external dependencies. Each of the demos should have a README.md file that explains the purpose of the demo and how to run it. + +### **Incremental Complexity** + +Try to do a best effort to make sure that the samples are incremental in complexity. For example, in the getting started samples, each step should build on the previous step, and the concept samples should build on the getting started samples, same with the demos. + +### **Documentation** + +Try to over-document the samples. This includes comments in the code, README.md files, and any other documentation that is necessary to understand the sample. We use the guidance from [PEP8](https://peps.python.org/pep-0008/#comments) for comments in the code, with a deviation for the initial summary comment in samples and the output of the samples. + +For the getting started samples and the concept samples, we should have the following: + +1. A README.md file is included in each set of samples that explains the purpose of the samples and the setup required to run them. +2. A summary should be included at the top of the file that explains the purpose of the sample and required components/concepts to understand the sample. For example: + + ```python + ''' + This sample shows how to create a chatbot. This sample uses the following two main components: + - a ChatCompletionService: This component is responsible for generating responses to user messages. + - a ChatHistory: This component is responsible for keeping track of the chat history. + The chatbot in this sample is called Mosscap, who responds to user messages with long flowery prose. + ''' + ``` + +3. Mark the code with comments to explain the purpose of each section of the code. For example: + + ```python + # 1. Create the instance of the Kernel to register the plugin and service. + ... + + # 2. Create the agent with the kernel instance. + ... + ``` + + > This will also allow the sample creator to track if the sample is getting too complex. + +4. At the end of the sample, include a section that explains the expected output of the sample. For example: + + ```python + ''' + Sample output: + User:> Why is the sky blue in one sentence? + Mosscap:> The sky is blue due to the scattering of sunlight by the molecules in the Earth's atmosphere, + a phenomenon known as Rayleigh scattering, which causes shorter blue wavelengths to become more + prominent in our visual perception. + ''' + ``` + +For the demos, a README.md file must be included that explains the purpose of the demo and how to run it. The README.md file should include the following: + +- A description of the demo. +- A list of dependencies required to run the demo. +- Instructions on how to run the demo. +- Expected output of the demo. diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index 22f0496e43e6..72fe6258f876 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -4,18 +4,54 @@ ### Agents - Creating and using [agents](../../semantic_kernel/agents/) in Semantic Kernel -- [Assistant Agent Chart Maker](./agents/assistant_agent_chart_maker.py) -- [Assistant Agent File Manipulation](./agents/assistant_agent_file_manipulation.py) -- [Assistant Agent File Manipulation Streaming](./agents/assistant_agent_file_manipulation_streaming.py) -- [Assistant Agent Retrieval](./agents/assistant_agent_retrieval.py) -- [Assistant Agent Streaming](./agents/assistant_agent_streaming.py) -- [Chat Completion Function Termination](./agents/chat_completion_function_termination.py) -- [Chat Completion History Reducer](./agents/chat_completion_history_reducer.py) -- [Mixed Chat Agents](./agents/mixed_chat_agents.py) -- [Mixed Chat Agents Plugins](./agents/mixed_chat_agents_plugins.py) -- [Mixed Chat Files](./agents/mixed_chat_files.py) -- [Mixed Chat Reset](./agents/mixed_chat_reset.py) -- [Mixed Chat Streaming](./agents/mixed_chat_streaming.py) +#### [OpenAI Assistant Agent](../../semantic_kernel/agents/open_ai/open_ai_assistant_agent.py) + +- [OpenAI Assistant Chart Maker Streaming](./agents/openai_assistant/openai_assistant_chart_maker_streaming.py) +- [OpenAI Assistant Chart Maker](./agents/openai_assistant/openai_assistant_chart_maker.py) +- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) +- [OpenAI Assistant File Manipulation](./agents/openai_assistant/openai_assistant_file_manipulation.py) +- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) +- [OpenAI Assistant Retrieval](./agents/openai_assistant/openai_assistant_retrieval.py) +- [OpenAI Assistant Streaming](./agents/openai_assistant/openai_assistant_streaming.py) +- [OpenAI Assistant Structured Outputs](./agents/openai_assistant/openai_assistant_structured_outputs.py) +- [OpenAI Assistant Templating Streaming](./agents/openai_assistant/openai_assistant_templating_streaming.py) +- [OpenAI Assistant Vision Streaming](./agents/openai_assistant/openai_assistant_vision_streaming.py) + +#### [Azure AI Agent](../../semantic_kernel/agents/azure_ai/azure_ai_agent.py) + +- [Azure AI Agent with Azure AI Search](./agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py) +- [Azure AI Agent File Manipulation](./agents/azure_ai_agent/azure_ai_agent_file_manipulation.py) +- [Azure AI Agent Streaming](./agents/azure_ai_agent/azure_ai_agent_streaming.py) + +#### [Bedrock Agent](../../semantic_kernel/agents/bedrock/bedrock_agent.py) + +- [Bedrock Agent Simple Chat Streaming](./agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py) +- [Bedrock Agent Simple Chat](./agents/bedrock_agent/bedrock_agent_simple_chat.py) +- [Bedrock Agent With Code Interpreter Streaming](./agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py) +- [Bedrock Agent With Code Interpreter](./agents/bedrock_agent/bedrock_agent_with_code_interpreter.py) +- [Bedrock Agent With Kernel Function Simple](./agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py) +- [Bedrock Agent With Kernel Function Streaming](./agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py) +- [Bedrock Agent With Kernel Function](./agents/bedrock_agent/bedrock_agent_with_kernel_function.py) +- [Bedrock Agent Mixed Chat Agents Streaming](./agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py) +- [Bedrock Agent Mixed Chat Agents](./agents/bedrock_agent/bedrock_mixed_chat_agents.py) + +#### [Chat Completion Agent](../../semantic_kernel/agents/chat_completion/chat_completion_agent.py) + +- [Chat Completion Function Termination](./agents/chat_completion_agent/chat_completion_function_termination.py) +- [Chat Completion Templating](./agents/chat_completion_agent/chat_completion_prompt_templating.py) +- [Chat Completion Summary History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py) +- [Chat Completion Summary History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py) +- [Chat Completion Truncate History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py) +- [Chat Completion Truncate History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py) + +#### [Mixed Agent Group Chat](../../semantic_kernel/agents/group_chat/agent_group_chat.py) + +- [Mixed Chat Agents Plugins](./agents/mixed_chat/mixed_chat_agents_plugins.py) +- [Mixed Chat Agents](./agents/mixed_chat/mixed_chat_agents.py) +- [Mixed Chat Files](./agents/mixed_chat/mixed_chat_files.py) +- [Mixed Chat Images](./agents/mixed_chat/mixed_chat_images.py) +- [Mixed Chat Reset](./agents/mixed_chat/mixed_chat_reset.py) +- [Mixed Chat Streaming](./agents/mixed_chat/mixed_chat_streaming.py) ### Audio - Using services that support audio-to-text and text-to-audio conversion @@ -53,6 +89,7 @@ ### ChatHistory - Using and serializing the [`ChatHistory`](https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/contents/chat_history.py) - [Serialize Chat History](./chat_history/serialize_chat_history.py) +- [Store Chat History in CosmosDB](./chat_history/store_chat_history_in_cosmosdb.py) ### Filtering - Creating and using Filters @@ -83,12 +120,13 @@ - [Setup Logging](./logging/setup_logging.py) -### Memory - Using [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) AI concepts +### Memory - Using [`Memory`](https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors/?pivots=programming-language-python) AI concepts -- [Azure Cognitive Search Memory](./memory/azure_cognitive_search_memory.py) +- [Simple Memory](./memory/simple_memory.py) - [Memory Data Models](./memory/data_models.py) -- [New Memory](./memory/new_memory.py) -- [Pandas Memory](./memory/pandas_memory.py) +- [Memory with Pandas Dataframes](./memory/memory_with_pandas.py) +- [Complex memory](./memory/complex_memory.py) +- [Full sample with Azure AI Search including function calling](./memory/azure_ai_search_hotel_samples/README.md) ### Model-as-a-Service - Using models deployed as [`serverless APIs on Azure AI Studio`](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models-serverless?tabs=azure-ai-studio) to benchmark model performance against open-source datasets @@ -100,12 +138,6 @@ - [Azure Chat GPT with Data API Function Calling](./on_your_data/azure_chat_gpt_with_data_api_function_calling.py) - [Azure Chat GPT with Data API Vector Search](./on_your_data/azure_chat_gpt_with_data_api_vector_search.py) -### Planners - Showing the uses of [`Planners`](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/planners) - -- [Sequential Planner](./planners/sequential_planner.py) -- [OpenAI Function Calling Stepwise Planner](./planners/openai_function_calling_stepwise_planner.py) -- [Azure OpenAI Function Calling Stepwise Planner](./planners/azure_openai_function_calling_stepwise_planner.py) - ### Plugins - Different ways of creating and using [`Plugins`](https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/functions/kernel_plugin.py) - [Azure Key Vault Settings](./plugins/azure_key_vault_settings.py) @@ -170,7 +202,7 @@ In Semantic Kernel for Python, we leverage Pydantic Settings to manage configura 1. **Reading Environment Variables:** - **Primary Source:** Pydantic first attempts to read the required settings from environment variables. - + 2. **Using a .env File:** - **Fallback Source:** If the required environment variables are not set, Pydantic will look for a `.env` file in the current working directory. - **Custom Path (Optional):** You can specify an alternative path for the `.env` file via `env_file_path`. This can be either a relative or an absolute path. @@ -188,4 +220,4 @@ To successfully retrieve and use the Entra Auth Token, you need the `Cognitive S - **.env File Placement:** We highly recommend placing the `.env` file in the `semantic-kernel/python` root directory. This is a common practice when developing in the Semantic Kernel repository. -By following these guidelines, you can ensure that your settings for various components are configured correctly, enabling seamless functionality and integration of Semantic Kernel in your Python projects. \ No newline at end of file +By following these guidelines, you can ensure that your settings for various components are configured correctly, enabling seamless functionality and integration of Semantic Kernel in your Python projects. diff --git a/python/samples/concepts/agents/README.md b/python/samples/concepts/agents/README.md index 1260395f88f2..dad64006c78e 100644 --- a/python/samples/concepts/agents/README.md +++ b/python/samples/concepts/agents/README.md @@ -2,13 +2,17 @@ This project contains a step by step guide to get started with _Semantic Kernel Agents_ in Python. -#### PyPI: +## PyPI: + - For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. - For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. - For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. -- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0 +- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0. +- For the use of AzureAI and Bedrock agents, the minimum allowed Semantic Kernel pypi version is 1.21.0. +- For the use of Crew.AI as a plugin, the minimum allowed Semantic Kernel pypi version is 1.21.1. + -#### Source +## Source - [Semantic Kernel Agent Framework](../../../semantic_kernel/agents/) @@ -19,11 +23,12 @@ The concept agents examples are grouped by prefix: Prefix|Description ---|--- assistant|How to use agents based on the [Open AI Assistant API](https://platform.openai.com/docs/assistants). -chat_completion|How to use Semantic Kernel Chat Completion agents. +autogen_conversable_agent| How to use [AutoGen 0.2 Conversable Agents](https://microsoft.github.io/autogen/0.2/docs/Getting-Started) within Semantic Kernel. +azure_ai_agent|How to use an [Azure AI Agent](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure) within Semantic Kernel. +chat_completion_agent|How to use Semantic Kernel Chat Completion agents that leverage AI Connector Chat Completion APIs. +bedrock|How to use [AWS Bedrock agents](https://aws.amazon.com/bedrock/agents/) in Semantic Kernel. mixed_chat|How to combine different agent types. -complex_chat|**Coming Soon** - -*Note: As we strive for parity with .NET, more getting_started_with_agent samples will be added. The current steps and names may be revised to further align with our .NET counterpart.* +openai_assistant|How to use [OpenAI Assistants](https://platform.openai.com/docs/assistants/overview) in Semantic Kernel. ## Configuring the Kernel diff --git a/python/samples/concepts/agents/assistant_agent_chart_maker.py b/python/samples/concepts/agents/assistant_agent_chart_maker.py deleted file mode 100644 index 34cfd77f40c0..000000000000 --- a/python/samples/concepts/agents/assistant_agent_chart_maker.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# assistant and leverage the assistant's file search functionality. # -##################################################################### - -AGENT_NAME = "ChartMaker" -AGENT_INSTRUCTIONS = "Create charts as requested without explanation." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True - -streaming = True - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - if streaming: - first_chunk = True - async for message in agent.invoke_stream(thread_id=thread_id): - if message.content: - if first_chunk: - print(f"# {message.role}: ", end="", flush=True) - first_chunk = False - print(message.content, end="", flush=True) - - if len(message.items) > 0: - for item in message.items: - if isinstance(item, StreamingFileReferenceContent): - print(f"\n# {message.role} => {item.file_id}") - print() - else: - async for message in agent.invoke(thread_id=thread_id): - if message.content: - print(f"# {message.role}: {message.content}") - - if len(message.items) > 0: - for item in message.items: - if isinstance(item, FileReferenceContent): - print(f"\n`{message.role}` => {item.file_id}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Create the agent configuration - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - - # Define a thread and invoke the agent with the user input - thread_id = await agent.create_thread() - - try: - await invoke_agent( - agent, - thread_id=thread_id, - input=""" - Display this data using a bar-chart: - - Banding Brown Pink Yellow Sum - X00000 339 433 126 898 - X00300 48 421 222 691 - X12345 16 395 352 763 - Others 23 373 156 552 - Sum 426 1622 856 2904 - """, - ) - await invoke_agent( - agent, - thread_id=thread_id, - input="Can you regenerate this same chart using the category names as the bar colors?", - ) - finally: - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_file_manipulation.py b/python/samples/concepts/agents/assistant_agent_file_manipulation.py deleted file mode 100644 index ff13f38a5504..000000000000 --- a/python/samples/concepts/agents/assistant_agent_file_manipulation.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# assistant's ability to have the code interpreter work with # -# uploaded files. # -##################################################################### - -AGENT_NAME = "FileManipulation" -AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in agent.invoke(thread_id=thread_id): - print(f"# {content.role}: {content.content}") - - if len(content.items) > 0: - for item in content.items: - if isinstance(item, AnnotationContent): - print(f"\n`{item.quote}` => {item.file_id}") - response_content = await agent.client.files.content(item.file_id) - print(response_content.text) - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Get the path to the sales.csv file - csv_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - "agent_assistant_file_manipulation", - "sales.csv", - ) - - # Create the assistant agent - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - code_interpreter_filenames=[csv_file_path], - ) - - # Create a thread and specify the file to use for code interpretation - thread_id = await agent.create_thread() - - try: - await invoke_agent(agent, thread_id=thread_id, input="Which segment had the most sales?") - await invoke_agent(agent, thread_id=thread_id, input="List the top 5 countries that generated the most profit.") - await invoke_agent( - agent, - thread_id=thread_id, - input="Create a tab delimited file report of profit by each country per month.", - ) - finally: - if agent is not None: - [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py b/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py deleted file mode 100644 index 8d9df1e458be..000000000000 --- a/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# assistant's ability to stream the response and have the code # -# interpreter work with uploaded files # -##################################################################### - -AGENT_NAME = "FileManipulation" -AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." - - -# A helper method to invoke the agent with the user input -async def invoke_streaming_agent(agent: OpenAIAssistantAgent | AzureAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the streaming agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - if first_chunk: - print(f"# {content.role}: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - elif content.role == AuthorRole.TOOL and content.metadata.get("code"): - print("") - print(f"# {content.role} (code):\n\n{content.content}") - print() - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Get the path to the sales.csv file - csv_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - "agent_assistant_file_manipulation", - "sales.csv", - ) - - # Create the assistant agent - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - code_interpreter_filenames=[csv_file_path], - ) - - # Create a thread and specify the file to use for code interpretation - thread_id = await agent.create_thread() - - try: - await invoke_streaming_agent(agent, thread_id=thread_id, input="Which segment had the most sales?") - await invoke_streaming_agent( - agent, thread_id=thread_id, input="List the top 5 countries that generated the most profit." - ) - await invoke_streaming_agent( - agent, - thread_id=thread_id, - input="Create a tab delimited file report of profit by each country per month.", - ) - finally: - if agent is not None: - [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_retrieval.py b/python/samples/concepts/agents/assistant_agent_retrieval.py deleted file mode 100644 index a3ea4e81b4ec..000000000000 --- a/python/samples/concepts/agents/assistant_agent_retrieval.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and retrieve the # -# assistant using the `retrieve` class method. # -##################################################################### - -AGENT_NAME = "JokeTeller" -AGENT_INSTRUCTIONS = "You are a funny comedian who loves telling G-rated jokes." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for message in agent.invoke(thread_id=thread_id): - if message.content: - print(f"# {message.role}: {message.content}") - - if len(message.items) > 0: - for item in message.items: - if isinstance(item, FileReferenceContent): - print(f"\n`{message.role}` => {item.file_id}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Specify an assistant ID which is used - # to retrieve the agent - assistant_id: str = None - - # Create the agent configuration - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - - assistant_id = agent.assistant.id - - retrieved_agent: AzureAssistantAgent = await AzureAssistantAgent.retrieve( - id=assistant_id, - kernel=kernel, - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - - assistant_id = agent.assistant.id - - # Retrieve the agent using the assistant_id - retrieved_agent: OpenAIAssistantAgent = await OpenAIAssistantAgent.retrieve( - id=assistant_id, - kernel=kernel, - ) - - # Define a thread and invoke the agent with the user input - thread_id = await retrieved_agent.create_thread() - - try: - await invoke_agent(retrieved_agent, thread_id, "Tell me a joke about bears.") - finally: - await agent.delete() - await retrieved_agent.delete_thread(thread_id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_streaming.py b/python/samples/concepts/agents/assistant_agent_streaming.py deleted file mode 100644 index 64439ba4e7c1..000000000000 --- a/python/samples/concepts/agents/assistant_agent_streaming.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -from typing import Annotated - -from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI. OpenAI Assistants # -# allow for function calling, the use of file search and a # -# code interpreter. Assistant Threads are used to manage the # -# conversation state, similar to a Semantic Kernel Chat History. # -# This sample also demonstrates the Assistants Streaming # -# capability and how to manage an Assistants chat history. # -##################################################################### - -HOST_NAME = "Host" -HOST_INSTRUCTIONS = "Answer questions about the menu." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# A helper method to invoke the agent with the user input -async def invoke_agent( - agent: OpenAIAssistantAgent, thread_id: str, input: str, history: list[ChatMessageContent] -) -> None: - """Invoke the agent with the user input.""" - message = ChatMessageContent(role=AuthorRole.USER, content=input) - await agent.add_chat_message(thread_id=thread_id, message=message) - - # Add the user message to the history - history.append(message) - - print(f"# {AuthorRole.USER}: '{input}'") - - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread_id, messages=history): - if content.role != AuthorRole.TOOL: - if first_chunk: - print(f"# {content.role}: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - print() - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Add the sample plugin to the kernel - kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") - - # Create the OpenAI Assistant Agent - service_id = "agent" - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS - ) - - thread_id = await agent.create_thread() - - history: list[ChatMessageContent] = [] - - try: - await invoke_agent(agent, thread_id=thread_id, input="Hello", history=history) - await invoke_agent(agent, thread_id=thread_id, input="What is the special soup?", history=history) - await invoke_agent(agent, thread_id=thread_id, input="What is the special drink?", history=history) - await invoke_agent(agent, thread_id=thread_id, input="Thank you", history=history) - finally: - await agent.delete_thread(thread_id) - await agent.delete() - - # You may then view the conversation history - print("========= Conversation History =========") - for content in history: - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") - print("========= End of Conversation History =========") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/README.md b/python/samples/concepts/agents/autogen_conversable_agent/README.md new file mode 100644 index 000000000000..f8fc8973ca9d --- /dev/null +++ b/python/samples/concepts/agents/autogen_conversable_agent/README.md @@ -0,0 +1,20 @@ +## AutoGen Conversable Agent (v0.2.X) + +Semantic Kernel Python supports running AutoGen Conversable Agents provided in the 0.2.X package. + +### Limitations + +Currently, there are some limitations to note: + +- AutoGen Conversable Agents in Semantic Kernel run asynchronously and do not support streaming of agent inputs or responses. +- The `AutoGenConversableAgent` in Semantic Kernel Python cannot be configured as part of a Semantic Kernel `AgentGroupChat`. As we progress towards GA for our agent group chat patterns, we will explore ways to integrate AutoGen agents into a Semantic Kernel group chat scenario. + +### Installation + +Install the `semantic-kernel` package with the `autogen` extra: + +```bash +pip install semantic-kernel[autogen] +``` + +For an example of how to integrate an AutoGen Conversable Agent using the Semantic Kernel Agent abstraction, please refer to [`autogen_conversable_agent_simple_convo.py`](autogen_conversable_agent_simple_convo.py). \ No newline at end of file diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py new file mode 100644 index 000000000000..d557bad86fe1 --- /dev/null +++ b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py @@ -0,0 +1,61 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from autogen import ConversableAgent +from autogen.coding import LocalCommandLineCodeExecutor + +from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent + +""" +The following sample demonstrates how to use the AutoGenConversableAgent to create a reply from an agent +to a message with a code block. The agent executes the code block and replies with the output. + +The sample follows the AutoGen flow outlined here: +https://microsoft.github.io/autogen/0.2/docs/tutorial/code-executors#local-execution +""" + + +async def main(): + # Create a temporary directory to store the code files. + import os + + # Configure the temporary directory to be where the script is located. + temp_dir = os.path.dirname(os.path.realpath(__file__)) + + # Create a local command line code executor. + executor = LocalCommandLineCodeExecutor( + timeout=10, # Timeout for each code execution in seconds. + work_dir=temp_dir, # Use the temporary directory to store the code files. + ) + + # Create an agent with code executor configuration. + code_executor_agent = ConversableAgent( + "code_executor_agent", + llm_config=False, # Turn off LLM for this agent. + code_execution_config={"executor": executor}, # Use the local command line code executor. + human_input_mode="ALWAYS", # Always take human input for this agent for safety. + ) + + autogen_agent = AutoGenConversableAgent(conversable_agent=code_executor_agent) + + message_with_code_block = """This is a message with code block. +The code block is below: +```python +import numpy as np +import matplotlib.pyplot as plt +x = np.random.randint(0, 100, 100) +y = np.random.randint(0, 100, 100) +plt.scatter(x, y) +plt.savefig('scatter.png') +print('Scatter plot saved to scatter.png') +``` +This is the end of the message. +""" + + async for content in autogen_agent.invoke(message=message_with_code_block): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py new file mode 100644 index 000000000000..f807ff93d122 --- /dev/null +++ b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os +from typing import Annotated, Literal + +from autogen import ConversableAgent, register_function + +from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent + +""" +The following sample demonstrates how to use the AutoGenConversableAgent to create a conversation between two agents +where one agent suggests a tool function call and the other agent executes the tool function call. + +In this example, the assistant agent suggests a calculator tool function call to the user proxy agent. The user proxy +agent executes the calculator tool function call. The assistant agent and the user proxy agent are created using the +ConversableAgent class. The calculator tool function is registered with the assistant agent and the user proxy agent. + +This sample follows the AutoGen flow outlined here: +https://microsoft.github.io/autogen/0.2/docs/tutorial/tool-use +""" + + +Operator = Literal["+", "-", "*", "/"] + + +async def main(): + def calculator(a: int, b: int, operator: Annotated[Operator, "operator"]) -> int: + if operator == "+": + return a + b + if operator == "-": + return a - b + if operator == "*": + return a * b + if operator == "/": + return int(a / b) + raise ValueError("Invalid operator") + + assistant = ConversableAgent( + name="Assistant", + system_message="You are a helpful AI assistant. " + "You can help with simple calculations. " + "Return 'TERMINATE' when the task is done.", + # Note: the model "gpt-4o" leads to a "division by zero" error that doesn't occur with "gpt-4o-mini" + # or even "gpt-4". + llm_config={ + "config_list": [{"model": os.environ["OPENAI_CHAT_MODEL_ID"], "api_key": os.environ["OPENAI_API_KEY"]}] + }, + ) + + # Create a Semantic Kernel AutoGenConversableAgent based on the AutoGen ConversableAgent. + assistant_agent = AutoGenConversableAgent(conversable_agent=assistant) + + user_proxy = ConversableAgent( + name="User", + llm_config=False, + is_termination_msg=lambda msg: msg.get("content") is not None and "TERMINATE" in msg["content"], + human_input_mode="NEVER", + ) + + assistant.register_for_llm(name="calculator", description="A simple calculator")(calculator) + + # Register the tool function with the user proxy agent. + user_proxy.register_for_execution(name="calculator")(calculator) + + register_function( + calculator, + caller=assistant, # The assistant agent can suggest calls to the calculator. + executor=user_proxy, # The user proxy agent can execute the calculator calls. + name="calculator", # By default, the function name is used as the tool name. + description="A simple calculator", # A description of the tool. + ) + + # Create a Semantic Kernel AutoGenConversableAgent based on the AutoGen ConversableAgent. + user_proxy_agent = AutoGenConversableAgent(conversable_agent=user_proxy) + + async for content in user_proxy_agent.invoke( + recipient=assistant_agent, + message="What is (44232 + 13312 / (232 - 32)) * 5?", + max_turns=10, + ): + for item in content.items: + match item: + case FunctionResultContent(result=r): + print(f"# {content.role} - {content.name or '*'}: '{r}'") + case FunctionCallContent(function_name=fn, arguments=arguments): + print(f"# {content.role} - {content.name or '*'}: Function Name: '{fn}', Arguments: '{arguments}'") + case _: + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py new file mode 100644 index 000000000000..d3c799135e7e --- /dev/null +++ b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py @@ -0,0 +1,61 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from autogen import ConversableAgent + +from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent + +""" +The following sample demonstrates how to use the AutoGenConversableAgent to create a conversation between two agents +where one agent suggests a joke and the other agent generates a joke. + +The sample follows the AutoGen flow outlined here: +https://microsoft.github.io/autogen/0.2/docs/tutorial/introduction#roles-and-conversations +""" + + +async def main(): + cathy = ConversableAgent( + "cathy", + system_message="Your name is Cathy and you are a part of a duo of comedians.", + llm_config={ + "config_list": [ + { + "model": os.environ["OPENAI_CHAT_MODEL_ID"], + "temperature": 0.9, + "api_key": os.environ.get("OPENAI_API_KEY"), + } + ] + }, + human_input_mode="NEVER", # Never ask for human input. + ) + + cathy_autogen_agent = AutoGenConversableAgent(conversable_agent=cathy) + + joe = ConversableAgent( + "joe", + system_message="Your name is Joe and you are a part of a duo of comedians.", + llm_config={ + "config_list": [ + { + "model": os.environ["OPENAI_CHAT_MODEL_ID"], + "temperature": 0.7, + "api_key": os.environ.get("OPENAI_API_KEY"), + } + ] + }, + human_input_mode="NEVER", # Never ask for human input. + ) + + joe_autogen_agent = AutoGenConversableAgent(conversable_agent=joe) + + async for content in cathy_autogen_agent.invoke( + recipient=joe_autogen_agent, message="Tell me a joke about the stock market.", max_turns=3 + ): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/.env.example b/python/samples/concepts/agents/azure_ai_agent/.env.example new file mode 100644 index 000000000000..c2d16cea26aa --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/.env.example @@ -0,0 +1,6 @@ +AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" +AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" +AZURE_AI_AGENT_ENDPOINT = "" +AZURE_AI_AGENT_SUBSCRIPTION_ID = "" +AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" +AZURE_AI_AGENT_PROJECT_NAME = "" \ No newline at end of file diff --git a/python/samples/concepts/agents/azure_ai_agent/README.md b/python/samples/concepts/agents/azure_ai_agent/README.md new file mode 100644 index 000000000000..7d588308ee6b --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/README.md @@ -0,0 +1,13 @@ +## Azure AI Agents + +For details on using Azure AI Agents within Semantic Kernel, see the [README](../../../getting_started_with_agents/azure_ai_agent/README.md) in the `getting_started_with_agents/azure_ai_agent` directory. + +### Running the `azure_ai_agent_ai_search.py` Sample + +Before running this sample, ensure you have a valid index configured in your Azure AI Search resource. This sample queries hotel data using the sample Azure AI Search hotels index. + +For configuration details, refer to the comments in the sample script. For additional guidance, consult the [README](../../memory/azure_ai_search_hotel_samples/README.md), which provides step-by-step instructions for creating the sample index and generating vectors. This is one approach to setting up the index; you can also follow other tutorials, such as those on "Import and Vectorize Data" in your Azure AI Search resource. + +### Requests and Rate Limits + +For information on configuring rate limits or adjusting polling, refer [here](../../../getting_started_with_agents/azure_ai_agent/README.md#requests-and-rate-limits) \ No newline at end of file diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py new file mode 100644 index 000000000000..2b57f6229e41 --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py @@ -0,0 +1,147 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from azure.ai.projects.models import AzureAISearchTool, ConnectionType +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole + +logging.basicConfig(level=logging.WARNING) + +""" +The following sample demonstrates how to create a simple, +Azure AI agent that uses the Azure AI Search tool and the demo +hotels-sample-index to answer questions about hotels. + +This sample requires: +- A "Standard" Agent Setup (choose the Python (Azure SDK) tab): +https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart +- An Azure AI Search index named 'hotels-sample-index' created in your +Azure AI Search service. You may follow this guide to create the index: +https://learn.microsoft.com/azure/search/search-get-started-portal +- You will need to make sure your Azure AI Agent project is set up with +the required Knowledge Source to be able to use the Azure AI Search tool. +Refer to the following link for information on how to do this: +https://learn.microsoft.com/en-us/azure/ai-services/agents/how-to/tools/azure-ai-search + +Refer to the README for information about configuring the index to work +with the sample data model in Azure AI Search. +""" + +# The name of the Azure AI Search index, rename as needed +AZURE_AI_SEARCH_INDEX_NAME = "hotels-sample-index" + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + conn_list = await client.connections.list() + + ai_search_conn_id = "" + for conn in conn_list: + if conn.connection_type == ConnectionType.AZURE_AI_SEARCH and conn.authentication_type == "ApiKey": + ai_search_conn_id = conn.id + break + + ai_search = AzureAISearchTool(index_connection_id=ai_search_conn_id, index_name=AZURE_AI_SEARCH_INDEX_NAME) + + # Create agent definition + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + instructions="Answer questions about hotels using your index.", + tools=ai_search.definitions, + tool_resources=ai_search.resources, + headers={"x-ms-enable-preview": "true"}, + ) + + # Create the AzureAI Agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # Create a new thread + thread = await client.agents.create_thread() + + user_inputs = [ + "Which hotels are available with full-sized kitchens in Nashville, TN?", + "Fun hotels with free WiFi.", + ] + + try: + for user_input in user_inputs: + # Add the user input as a chat message + await agent.add_chat_message( + thread_id=thread.id, + message=ChatMessageContent(role=AuthorRole.USER, content=user_input), + ) + print(f"# User: '{user_input}'\n") + # Invoke the agent for the specified thread + async for content in agent.invoke(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}\n") + finally: + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample output: + + # User: 'Which hotels are available with full-sized kitchens in Nashville, TN?' + + # Agent: In Nashville, TN, there are several hotels available that feature full-sized kitchens: + + 1. **Extended-Stay Hotel Options**: + - Many extended-stay hotels offer suites equipped with full-sized kitchens, which include cookware and + appliances. These hotels are designed for longer stays, making them a great option for those needing more space + and kitchen facilities【3:0†source】【3:1†source】. + + 2. **Amenities Included**: + - Most of these hotels provide additional amenities like free Wi-Fi, laundry services, fitness centers, and some + have on-site dining options【3:1†source】【3:2†source】. + + 3. **Location**: + - The extended-stay hotels are often located near downtown Nashville, making it convenient for guests to + explore the vibrant local music scene while enjoying the comfort of a home-like + environment【3:0†source】【3:4†source】. + + If you are looking for specific names or more detailed options, I can further assist you with that! + + # User: 'Fun hotels with free WiFi.' + + # Agent: Here are some fun hotels that offer free WiFi: + + 1. **Vibrant Downtown Hotel**: + - Located near the heart of downtown, this hotel offers a warm atmosphere with free WiFi and even provides a + delightful milk and cookies treat【7:2†source】. + + 2. **Extended-Stay Options**: + - These hotels often feature fun amenities such as a bowling alley, fitness center, and themed rooms. They also + provide free WiFi and are well-situated near local attractions【7:0†source】【7:1†source】. + + 3. **Luxury Hotel**: + - Ranked highly by Traveler magazine, this 5-star luxury hotel boasts the biggest rooms in the city, free WiFi, + espresso in the room, and flexible check-in/check-out options【7:1†source】. + + 4. **Budget-Friendly Hotels**: + - Several budget hotels offer free WiFi, breakfast, and shuttle services to nearby attractions and airports + while still providing a fun stay【7:3†source】. + + These options ensure you stay connected while enjoying your visit! If you need more specific recommendations or + details, feel free to ask! + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py new file mode 100644 index 000000000000..58398add45b9 --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py @@ -0,0 +1,92 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from azure.ai.projects.models import CodeInterpreterTool, FilePurpose +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole + +################################################################### +# The following sample demonstrates how to create a simple, # +# Azure AI agent that uses the code interpreter tool to answer # +# a coding question. # +################################################################### + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + file = await client.agents.upload_file_and_poll(file_path=csv_file_path, purpose=FilePurpose.AGENTS) + + code_interpreter = CodeInterpreterTool(file_ids=[file.id]) + + # Create agent definition + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + tools=code_interpreter.definitions, + tool_resources=code_interpreter.resources, + ) + + # Create the AzureAI Agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # Create a new thread + thread = await client.agents.create_thread() + + user_inputs = [ + "Which segment had the most sales?", + "List the top 5 countries that generated the most profit.", + "Create a tab delimited file report of profit by each country per month.", + ] + + try: + for user_input in user_inputs: + # Add the user input as a chat message + await agent.add_chat_message( + thread_id=thread.id, + message=ChatMessageContent(role=AuthorRole.USER, content=user_input), + ) + print(f"# User: '{user_input}'") + # Invoke the agent for the specified thread + async for content in agent.invoke(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}") + if len(content.items) > 0: + for item in content.items: + if isinstance(item, AnnotationContent): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await client.agents.get_file_content(file_id=item.file_id) + content_bytes = bytearray() + async for chunk in response_content: + content_bytes.extend(chunk) + tab_delimited_text = content_bytes.decode("utf-8") + print(tab_delimited_text) + finally: + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py new file mode 100644 index 000000000000..5a9b1bbcf0f1 --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py @@ -0,0 +1,93 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + AGENT_NAME = "Host" + AGENT_INSTRUCTIONS = "Answer questions about the menu." + + # Create agent definition + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + ) + + # Create the AzureAI Agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # Add the sample plugin to the kernel + agent.kernel.add_plugin(MenuPlugin(), plugin_name="menu") + + # Create a new thread + thread = await client.agents.create_thread() + + user_inputs = [ + "Hello", + "What is the special soup?", + "How much does that cost?", + "Thank you", + ] + + try: + for user_input in user_inputs: + # Add the user input as a chat message + await agent.add_chat_message( + thread_id=thread.id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) + ) + print(f"# User: '{user_input}'") + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + finally: + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/.env.example b/python/samples/concepts/agents/bedrock_agent/.env.example new file mode 100644 index 000000000000..d0e3523fcfca --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/.env.example @@ -0,0 +1,2 @@ +BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN=[YOUR_AGENT_ROLE_AMAZON_RESOURCE_NAME] +BEDROCK_AGENT_FOUNDATION_MODEL=[YOUR_FOUNDATION_MODEL] \ No newline at end of file diff --git a/python/samples/concepts/agents/bedrock_agent/README.md b/python/samples/concepts/agents/bedrock_agent/README.md new file mode 100644 index 000000000000..3e72751eb308 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/README.md @@ -0,0 +1,74 @@ +# Concept samples on how to use AWS Bedrock agents + +## Pre-requisites + +1. You need to have an AWS account and [access to the foundation models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-permissions.html) +2. [AWS CLI installed](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) and [configured](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) + +### Configuration + +Follow this [guide](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) to configure your environment to use the Bedrock API. + +Please configure the `aws_access_key_id`, `aws_secret_access_key`, and `region` otherwise you will need to create custom clients for the services. For example: + +```python +runtime_client=boto.client( + "bedrock-runtime", + aws_access_key_id="your_access_key", + aws_secret_access_key="your_secret_key", + region_name="your_region", + [...other parameters you may need...] +) +client=boto.client( + "bedrock", + aws_access_key_id="your_access_key", + aws_secret_access_key="your_secret_key", + region_name="your_region", + [...other parameters you may need...] +) + +bedrock_agent = BedrockAgent.create_and_prepare_agent( + name="your_agent_name", + instructions="your_instructions", + runtime_client=runtime_client, + client=client, + [...other parameters you may need...] +) +``` + +## Samples + +| Sample | Description | +|--------|-------------| +| [bedrock_agent_simple_chat.py](bedrock_agent_simple_chat.py) | Demonstrates basic usage of the Bedrock agent. | +| [bedrock_agent_simple_chat_streaming.py](bedrock_agent_simple_chat_streaming.py) | Demonstrates basic usage of the Bedrock agent with streaming. | +| [bedrock_agent_with_kernel_function.py](bedrock_agent_with_kernel_function.py) | Shows how to use the Bedrock agent with a kernel function. | +| [bedrock_agent_with_kernel_function_streaming.py](bedrock_agent_with_kernel_function_streaming.py) | Shows how to use the Bedrock agent with a kernel function with streaming. | +| [bedrock_agent_with_code_interpreter.py](bedrock_agent_with_code_interpreter.py) | Example of using the Bedrock agent with a code interpreter. | +| [bedrock_agent_with_code_interpreter_streaming.py](bedrock_agent_with_code_interpreter_streaming.py) | Example of using the Bedrock agent with a code interpreter and streaming. | +| [bedrock_mixed_chat_agents.py](bedrock_mixed_chat_agents.py) | Example of using multiple chat agents in a single script. | +| [bedrock_mixed_chat_agents_streaming.py](bedrock_mixed_chat_agents_streaming.py) | Example of using multiple chat agents in a single script with streaming. | + +## Before running the samples + +You need to set up some environment variables to run the samples. Please refer to the [.env.example](.env.example) file for the required environment variables. + +### `BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN` + +On your AWS console, go to the IAM service and go to **Roles**. Find the role you want to use and click on it. You will find the ARN in the summary section. + +### `BEDROCK_AGENT_FOUNDATION_MODEL` + +You need to make sure you have permission to access the foundation model. You can find the model ID in the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html). To see the models you have access to, find the policy attached to your role you should see a list of models you have access to under the `Resource` section. + +### How to add the `bedrock:InvokeModelWithResponseStream` action to an IAM policy + +1. Open the [IAM console](https://console.aws.amazon.com/iam/). +2. On the left navigation pane, choose `Roles` under `Access management`. +3. Find the role you want to edit and click on it. +4. Under the `Permissions policies` tab, click on the policy you want to edit. +5. Under the `Permissions defined in this policy` section, click on the service. You should see **Bedrock** if you already have access to the Bedrock agent service. +6. Click on the service, and then click `Edit`. +7. On the right, you will be able to add an action. Find the service and search for `InvokeModelWithResponseStream`. +8. Check the box next to the action and then scroll all the way down and click `Next`. +9. Follow the prompts to save the changes. diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py new file mode 100644 index 000000000000..e50d376b93f0 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py @@ -0,0 +1,52 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent + +# This sample shows how to interact with a Bedrock agent in the simplest way. +# This sample uses the following main component(s): +# - a Bedrock agent +# You will learn how to create a new Bedrock agent and talk to it. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +async def main(): + bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) + session_id = BedrockAgent.create_session_id() + + try: + while True: + user_input = input("User:> ") + if user_input == "exit": + print("\n\nExiting chat...") + break + + # Invoke the agent + # The chat history is maintained in the session + async for response in bedrock_agent.invoke( + session_id=session_id, + input_text=user_input, + ): + print(f"Bedrock agent: {response}") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # User:> Hi, my name is John. + # Bedrock agent: Hello John. How can I help you? + # User:> What is my name? + # Bedrock agent: Your name is John. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py new file mode 100644 index 000000000000..099b9de75f51 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py @@ -0,0 +1,54 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent + +# This sample shows how to interact with a Bedrock agent via streaming in the simplest way. +# This sample uses the following main component(s): +# - a Bedrock agent +# You will learn how to create a new Bedrock agent and talk to it. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +async def main(): + bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) + session_id = BedrockAgent.create_session_id() + + try: + while True: + user_input = input("User:> ") + if user_input == "exit": + print("\n\nExiting chat...") + break + + # Invoke the agent + # The chat history is maintained in the session + print("Bedrock agent: ", end="") + async for response in bedrock_agent.invoke_stream( + session_id=session_id, + input_text=user_input, + ): + print(response, end="") + print() + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # User:> Hi, my name is John. + # Bedrock agent: Hello John. How can I help you? + # User:> What is my name? + # Bedrock agent: Your name is John. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py new file mode 100644 index 000000000000..ad6bf184b9fa --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.contents.binary_content import BinaryContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent + +# This sample shows how to interact with a Bedrock agent that is capable of writing and executing code. +# This sample uses the following main component(s): +# - a Bedrock agent +# You will learn how to create a new Bedrock agent and ask it a question that requires coding to answer. +# After running this sample, a bar chart will be generated and saved to a file in the same directory +# as this script. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +ASK = """ +Create a bar chart for the following data: +Panda 5 +Tiger 8 +Lion 3 +Monkey 6 +Dolphin 2 +""" + + +async def main(): + bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) + await bedrock_agent.create_code_interpreter_action_group() + + session_id = BedrockAgent.create_session_id() + + # Placeholder for the file generated by the code interpreter + binary_item: BinaryContent | None = None + + try: + # Invoke the agent + async for response in bedrock_agent.invoke( + session_id=session_id, + input_text=ASK, + ): + print(f"Response:\n{response}") + assert isinstance(response, ChatMessageContent) # nosec + if not binary_item: + binary_item = next((item for item in response.items if isinstance(item, BinaryContent)), None) + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Save the chart to a file + if not binary_item: + raise RuntimeError("No chart generated") + + file_path = os.path.join(os.path.dirname(__file__), binary_item.metadata["name"]) + binary_item.write_to_file(os.path.join(os.path.dirname(__file__), binary_item.metadata["name"])) + print(f"Chart saved to {file_path}") + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # Response: + # Here is the bar chart for the given data: + # [A bar chart showing the following data: + # Panda 5 + # Tiger 8 + # Lion 3 + # Monkey 6 + # Dolpin 2] + # Chart saved to ... + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py new file mode 100644 index 000000000000..ca60c477e66e --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py @@ -0,0 +1,77 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.contents.binary_content import BinaryContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + +# This sample shows how to interact with a Bedrock agent that is capable of writing and executing code. +# This sample uses the following main component(s): +# - a Bedrock agent +# You will learn how to create a new Bedrock agent and ask it a question that requires coding to answer. +# After running this sample, a bar chart will be generated and saved to a file in the same directory +# as this script. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +ASK = """ +Create a bar chart for the following data: +Panda 5 +Tiger 8 +Lion 3 +Monkey 6 +Dolphin 2 +""" + + +async def main(): + bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) + await bedrock_agent.create_code_interpreter_action_group() + + session_id = BedrockAgent.create_session_id() + + # Placeholder for the file generated by the code interpreter + binary_item: BinaryContent | None = None + + try: + # Invoke the agent + print("Response: ") + async for response in bedrock_agent.invoke_stream( + session_id=session_id, + input_text=ASK, + ): + print(response, end="") + assert isinstance(response, StreamingChatMessageContent) # nosec + if not binary_item: + binary_item = next((item for item in response.items if isinstance(item, BinaryContent)), None) + print() + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Save the chart to a file + if not binary_item: + raise RuntimeError("No chart generated") + + file_path = os.path.join(os.path.dirname(__file__), binary_item.metadata["name"]) + binary_item.write_to_file(os.path.join(os.path.dirname(__file__), binary_item.metadata["name"])) + print(f"Chart saved to {file_path}") + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # Response: + # Here is the bar chart for the given data: + # [A bar chart showing the following data: + # Panda 5 + # Tiger 8 + # Lion 3 + # Monkey 6 + # Dolpin 2] + # Chart saved to ... + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py new file mode 100644 index 000000000000..928c02054fa7 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py @@ -0,0 +1,68 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. +# This sample uses the following main component(s): +# - a Bedrock agent +# - a kernel function +# - a kernel +# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +class WeatherPlugin: + """Mock weather plugin.""" + + @kernel_function(description="Get real-time weather information.") + def current(self, location: Annotated[str, "The location to get the weather"]) -> str: + """Returns the current weather.""" + return f"The weather in {location} is sunny." + + +def get_kernel() -> Kernel: + kernel = Kernel() + kernel.add_plugin(WeatherPlugin(), plugin_name="weather") + + return kernel + + +async def main(): + # Create a kernel + kernel = get_kernel() + + bedrock_agent = await BedrockAgent.create_and_prepare_agent( + AGENT_NAME, + INSTRUCTION, + kernel=kernel, + ) + # Note: We still need to create the kernel function action group on the service side. + await bedrock_agent.create_kernel_function_action_group() + + session_id = BedrockAgent.create_session_id() + + try: + # Invoke the agent + async for response in bedrock_agent.invoke( + session_id=session_id, + input_text="What is the weather in Seattle?", + ): + print(f"Response:\n{response}") + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # Response: + # The current weather in Seattle is sunny. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py new file mode 100644 index 000000000000..b214ab5591dc --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py @@ -0,0 +1,59 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. +# Instead of creating a kernel and adding plugins to it, you can directly pass the plugins to the +# agent when creating it. +# This sample uses the following main component(s): +# - a Bedrock agent +# - a kernel function +# - a kernel +# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +class WeatherPlugin: + """Mock weather plugin.""" + + @kernel_function(description="Get real-time weather information.") + def current(self, location: Annotated[str, "The location to get the weather"]) -> str: + """Returns the current weather.""" + return f"The weather in {location} is sunny." + + +async def main(): + bedrock_agent = await BedrockAgent.create_and_prepare_agent( + AGENT_NAME, + INSTRUCTION, + plugins=[WeatherPlugin()], + ) + # Note: We still need to create the kernel function action group on the service side. + await bedrock_agent.create_kernel_function_action_group() + + session_id = BedrockAgent.create_session_id() + + try: + # Invoke the agent + async for response in bedrock_agent.invoke( + session_id=session_id, + input_text="What is the weather in Seattle?", + ): + print(f"Response:\n{response}") + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # Response: + # The current weather in Seattle is sunny. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py new file mode 100644 index 000000000000..aa4dce75e0ed --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py @@ -0,0 +1,69 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. +# This sample uses the following main component(s): +# - a Bedrock agent +# - a kernel function +# - a kernel +# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. + +AGENT_NAME = "semantic-kernel-bedrock-agent" +INSTRUCTION = "You are a friendly assistant. You help people find information." + + +class WeatherPlugin: + """Mock weather plugin.""" + + @kernel_function(description="Get real-time weather information.") + def current(self, location: Annotated[str, "The location to get the weather"]) -> str: + """Returns the current weather.""" + return f"The weather in {location} is sunny." + + +def get_kernel() -> Kernel: + kernel = Kernel() + kernel.add_plugin(WeatherPlugin(), plugin_name="weather") + + return kernel + + +async def main(): + # Create a kernel + kernel = get_kernel() + + bedrock_agent = await BedrockAgent.create_and_prepare_agent( + AGENT_NAME, + INSTRUCTION, + kernel=kernel, + ) + # Note: We still need to create the kernel function action group on the service side. + await bedrock_agent.create_kernel_function_action_group() + + session_id = BedrockAgent.create_session_id() + + try: + # Invoke the agent + print("Response: ") + async for response in bedrock_agent.invoke_stream( + session_id=session_id, + input_text="What is the weather in Seattle?", + ): + print(response, end="") + finally: + # Delete the agent + await bedrock_agent.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # Response: + # The current weather in Seattle is sunny. + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py new file mode 100644 index 000000000000..aa29259276d5 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +# This sample shows how to use a bedrock agent in a group chat that includes multiple agents of different roles. +# This sample uses the following main component(s): +# - a Bedrock agent +# - a ChatCompletionAgent +# - an AgentGroupChat +# You will learn how to create a new or connect to an existing Bedrock agent and put it in a group chat with +# another agent. + +# This will be a chat completion agent +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Only include the word "approved" if it is so. +If not, provide insight on how to refine suggested copy without example. +""" + +# This will be a bedrock agent +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +def _create_kernel_with_chat_completion() -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion()) + return kernel + + +async def main(): + agent_reviewer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion(), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = await BedrockAgent.create_and_prepare_agent( + COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy( + agents=[agent_reviewer], + maximum_iterations=10, + ), + ) + + input = "A slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + try: + async for message in chat.invoke(): + print(f"# {message.role} - {message.name or '*'}: '{message.content}'") + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + # Delete the agent + await agent_writer.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # AuthorRole.USER: 'A slogan for a new line of electric cars.' + # AuthorRole.ASSISTANT - CopyWriter: 'Charge Ahead: The Future of Driving' + # AuthorRole.ASSISTANT - ArtDirector: 'The slogan "Charge Ahead: The Future of Driving" is compelling but could be + # made even more impactful. Consider clarifying the unique selling proposition of the electric cars. Focus on what + # sets them apart in terms of performance, eco-friendliness, or innovation. This will help create an emotional + # connection and a clearer message for the audience.' + # AuthorRole.ASSISTANT - CopyWriter: 'Charge Forward: The Electrifying Future of Driving' + # AuthorRole.ASSISTANT - ArtDirector: 'Approved' + # IS COMPLETE: True + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py new file mode 100644 index 000000000000..b4ed1668b822 --- /dev/null +++ b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py @@ -0,0 +1,107 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +# This sample shows how to use a bedrock agent in a group chat that includes multiple agents of different roles. +# This sample uses the following main component(s): +# - a Bedrock agent +# - a ChatCompletionAgent +# - an AgentGroupChat +# You will learn how to create a new or connect to an existing Bedrock agent and put it in a group chat with +# another agent. + +# This will be a chat completion agent +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Only include the word "approved" if it is so. +If not, provide insight on how to refine suggested copy without example. +""" + +# This will be a bedrock agent +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +def _create_kernel_with_chat_completion() -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion()) + return kernel + + +async def main(): + agent_reviewer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion(), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = await BedrockAgent.create_and_prepare_agent( + COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy( + agents=[agent_reviewer], + maximum_iterations=10, + ), + ) + + input = "A slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + try: + current_agent = "*" + async for message_chunk in chat.invoke_stream(): + if current_agent != message_chunk.name: + current_agent = message_chunk.name or "*" + print(f"\n# {message_chunk.role} - {current_agent}: ", end="") + print(message_chunk.content, end="") + print() + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + # Delete the agent + await agent_writer.delete_agent() + + # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): + # AuthorRole.USER: 'A slogan for a new line of electric cars.' + # AuthorRole.ASSISTANT - CopyWriter: 'Charge Ahead: The Future of Driving' + # AuthorRole.ASSISTANT - ArtDirector: 'The slogan "Charge Ahead: The Future of Driving" is compelling but could be + # made even more impactful. Consider clarifying the unique selling proposition of the electric cars. Focus on what + # sets them apart in terms of performance, eco-friendliness, or innovation. This will help create an emotional + # connection and a clearer message for the audience.' + # AuthorRole.ASSISTANT - CopyWriter: 'Charge Forward: The Electrifying Future of Driving' + # AuthorRole.ASSISTANT - ArtDirector: 'Approved' + # IS COMPLETE: True + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/README.md b/python/samples/concepts/agents/chat_completion_agent/README.md new file mode 100644 index 000000000000..a9b02dbb0d07 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/README.md @@ -0,0 +1,45 @@ +# Chat Completion Agent Samples + +The following samples demonstrate advanced usage of the `ChatCompletionAgent`. + +--- + +## Chat History Reduction Strategies + +When configuring chat history management, there are two important settings to consider: + +### `reducer_msg_count` + +- **Purpose:** Defines the target number of messages to retain after applying truncation or summarization. +- **Controls:** Determines how much recent conversation history is preserved, while older messages are either discarded or summarized. +- **Recommendations for adjustment:** + - **Smaller values:** Ideal for memory-constrained environments or scenarios where brief context is sufficient. + - **Larger values:** Useful when retaining extensive conversational context is critical for accurate responses or complex dialogue. + +### `reducer_threshold` + +- **Purpose:** Provides a buffer to prevent premature reduction when the message count slightly exceeds `reducer_msg_count`. +- **Controls:** Ensures essential message pairs (e.g., a user query and the assistant’s response) aren't unintentionally truncated. +- **Recommendations for adjustment:** + - **Smaller values:** Use to enforce stricter message reduction criteria, potentially truncating older message pairs sooner. + - **Larger values:** Recommended for preserving critical conversation segments, particularly in sensitive interactions involving API function calls or detailed responses. + +### Interaction Between Parameters + +The combination of these parameters determines **when** history reduction occurs and **how much** of the conversation is retained. + +**Example:** +- If `reducer_msg_count = 10` and `reducer_threshold = 5`, message history won't be truncated until the total message count exceeds 15. This strategy maintains conversational context flexibility while respecting memory limitations. + +--- + +## Recommendations for Effective Configuration + +- **Performance-focused environments:** + - Lower `reducer_msg_count` to conserve memory and accelerate processing. + +- **Context-sensitive scenarios:** + - Higher `reducer_msg_count` and `reducer_threshold` help maintain continuity across multiple interactions, crucial for multi-turn conversations or complex workflows. + +- **Iterative Experimentation:** + - Start with default values (`reducer_msg_count = 10`, `reducer_threshold = 10`), and adjust according to the specific behavior and response quality required by your application. diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py new file mode 100644 index 000000000000..c257eacb5ff4 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py @@ -0,0 +1,147 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory, ChatMessageContent, FunctionCallContent, FunctionResultContent +from semantic_kernel.filters import AutoFunctionInvocationContext +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +""" +The following sample demonstrates how to configure the auto +function invocation filter while using a ChatCompletionAgent. +This allows the developer or user to view the function call content +and the function result content. +""" + + +# Define the auto function invocation filter that will be used by the kernel +async def auto_function_invocation_filter(context: AutoFunctionInvocationContext, next): + """A filter that will be called for each function call in the response.""" + # if we don't call next, it will skip this function, and go to the next one + await next(context) + if context.function.plugin_name == "menu": + context.terminate = True + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +def _create_kernel_with_chat_completionand_filter() -> Kernel: + """A helper function to create a kernel with a chat completion service and a filter.""" + kernel = Kernel() + kernel.add_service(AzureChatCompletion()) + kernel.add_filter("auto_function_invocation", auto_function_invocation_filter) + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + return kernel + + +def _write_content(content: ChatMessageContent) -> None: + """Write the content to the console based on the content type.""" + last_item_type = type(content.items[-1]).__name__ if content.items else "(empty)" + message_content = "" + if isinstance(last_item_type, FunctionCallContent): + message_content = f"tool request = {content.items[-1].function_name}" + elif isinstance(last_item_type, FunctionResultContent): + message_content = f"function result = {content.items[-1].result}" + else: + message_content = str(content.items[-1]) + print(f"[{last_item_type}] {content.role} : '{message_content}'") + + +async def main(): + # 1. Create the agent with a kernel instance that contains + # the auto function invocation filter and the AI service + agent = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completionand_filter(), + name="Host", + instructions="Answer questions about the menu.", + ) + + # 2. Define the chat history + chat_history = ChatHistory() + + user_inputs = [ + "Hello", + "What is the special soup?", + "What is the special drink?", + "Thank you", + ] + + for user_input in user_inputs: + # 3. Add the user message to the chat history + chat_history.add_user_message(user_input) + print(f"# User: '{user_input}'") + + # 4. Get the response from the agent + content = await agent.get_response(chat_history) + # Don't add the message if it is a function call or result + if not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items): + chat_history.add_message(content) + _write_content(content) + + print("================================") + print("CHAT HISTORY") + print("================================") + + # Print out the chat history to view the different types of messages + for message in chat_history.messages: + _write_content(message) + + """ + Sample output: + + # AuthorRole.USER: 'Hello' + [TextContent] AuthorRole.ASSISTANT : 'Hello! How can I assist you today?' + # AuthorRole.USER: 'What is the special soup?' + [FunctionResultContent] AuthorRole.TOOL : ' + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + ' + # AuthorRole.USER: 'What is the special drink?' + [TextContent] AuthorRole.ASSISTANT : 'The special drink is Chai Tea.' + # AuthorRole.USER: 'Thank you' + [TextContent] AuthorRole.ASSISTANT : 'You're welcome! If you have any more questions or need assistance with + anything else, feel free to ask!' + ================================ + CHAT HISTORY + ================================ + [TextContent] AuthorRole.USER : 'Hello' + [TextContent] AuthorRole.ASSISTANT : 'Hello! How can I assist you today?' + [TextContent] AuthorRole.USER : 'What is the special soup?' + [FunctionCallContent] AuthorRole.ASSISTANT : 'menu-get_specials({})' + [FunctionResultContent] AuthorRole.TOOL : ' + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + ' + [TextContent] AuthorRole.USER : 'What is the special drink?' + [TextContent] AuthorRole.ASSISTANT : 'The special drink is Chai Tea.' + [TextContent] AuthorRole.USER : 'Thank you' + [TextContent] AuthorRole.ASSISTANT : 'You're welcome! If you have any more questions or need assistance with + anything else, feel free to ask!' + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py new file mode 100644 index 000000000000..74aba6060d6a --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory +from semantic_kernel.functions import KernelArguments +from semantic_kernel.prompt_template import PromptTemplateConfig + +""" +The following sample demonstrates how to create a chat completion +agent using Azure OpenAI within Semantic Kernel. +It uses parameterized prompts and shows how to swap between +"semantic-kernel," "jinja2," and "handlebars" template formats, +This sample highlights the agent's chat history conversation +is managed and how kernel arguments are passed in and used. +""" + +# Define the inputs and styles to be used in the agent +inputs = [ + ("Home cooking is great.", None), + ("Talk about world peace.", "iambic pentameter"), + ("Say something about doing your best.", "e. e. cummings"), + ("What do you think about having fun?", "old school rap"), +] + + +async def invoke_chat_completion_agent(agent: ChatCompletionAgent, inputs): + """Invokes the given agent with each (input, style) in inputs.""" + + chat = ChatHistory() + + for user_input, style in inputs: + # Add user message to the conversation + chat.add_user_message(user_input) + print(f"[USER]: {user_input}\n") + + # If style is specified, override the 'style' argument + argument_overrides = None + if style: + argument_overrides = KernelArguments(style=style) + + # Stream agent responses + async for response in agent.invoke_stream(history=chat, arguments=argument_overrides): + print(f"{response.content}", end="", flush=True) + print() + + +async def invoke_agent_with_template(template_str: str, template_format: str, default_style: str = "haiku"): + """Creates an agent with the specified template and format, then invokes it using invoke_chat_completion_agent.""" + + # Configure the prompt template + prompt_config = PromptTemplateConfig(template=template_str, template_format=template_format) + + agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="MyPoetAgent", + prompt_template_config=prompt_config, + arguments=KernelArguments(style=default_style), + ) + + await invoke_chat_completion_agent(agent, inputs) + + +async def main(): + # 1) Using "semantic-kernel" format + print("\n===== SEMANTIC-KERNEL FORMAT =====\n") + semantic_kernel_template = """ + Write a one verse poem on the requested topic in the style of {{$style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template( + template_str=semantic_kernel_template, + template_format="semantic-kernel", + default_style="haiku", + ) + + # 2) Using "jinja2" format + print("\n===== JINJA2 FORMAT =====\n") + jinja2_template = """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template(template_str=jinja2_template, template_format="jinja2", default_style="haiku") + + # 3) Using "handlebars" format + print("\n===== HANDLEBARS FORMAT =====\n") + handlebars_template = """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template( + template_str=handlebars_template, template_format="handlebars", default_style="haiku" + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py new file mode 100644 index 000000000000..f48905b6113f --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py @@ -0,0 +1,80 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import ( + AgentGroupChat, + ChatCompletionAgent, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ( + ChatHistorySummarizationReducer, +) + +""" +The following sample demonstrates how to implement a chat history +reducer as part of the Semantic Kernel Agent Framework. For this sample, +the ChatCompletionAgent with an AgentGroupChat is used. The Chat History +Reducer is a Summary Reducer. View the README for more information on +how to use the reducer and what each parameter does. +""" + + +async def main(): + """ + Single-function approach that shows the same chat reducer behavior + while preserving all original logic and code lines (now commented). + """ + + # Setup necessary parameters + reducer_msg_count = 10 + reducer_threshold = 10 + + # Create a summarization reducer and clear its history + history_summarization_reducer = ChatHistorySummarizationReducer( + service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold + ) + history_summarization_reducer.clear() + + # Create our agent + agent = ChatCompletionAgent( + name="NumeroTranslator", + instructions="Add one to the latest user number and spell it in Spanish without explanation.", + service=AzureChatCompletion(), + ) + + # Create a group chat using the reducer + chat = AgentGroupChat(chat_history=history_summarization_reducer) + + # Simulate user messages + message_count = 50 # Number of messages to simulate + for index in range(1, message_count, 2): + # Add user message to the chat + await chat.add_chat_message(message=str(index)) + print(f"# User: '{index}'") + + # Attempt to reduce history + is_reduced = await chat.reduce_history() + if is_reduced: + print(f"@ History reduced to {len(history_summarization_reducer.messages)} messages.") + + # Invoke the agent and display responses + async for message in chat.invoke(agent): + print(f"# {message.role} - {message.name or '*'}: '{message.content}'") + + # Retrieve messages + msgs = [] + async for m in chat.get_chat_messages(agent): + msgs.append(m) + print(f"@ Message Count: {len(msgs)}\n") + + # If a reduction happened and we use summarization, print the summary + if is_reduced: + for msg in msgs: + if msg.metadata and msg.metadata.get("__summary__"): + print(f"\tSummary: {msg.content}") + break + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py new file mode 100644 index 000000000000..11236749ba8a --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py @@ -0,0 +1,70 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from semantic_kernel.agents import ( + ChatCompletionAgent, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ( + ChatHistorySummarizationReducer, +) + +""" +The following sample demonstrates how to implement a truncation chat +history reducer as part of the Semantic Kernel Agent Framework. For +this sample, a single ChatCompletionAgent is used. +""" + + +# Initialize the logger for debugging and information messages +logger = logging.getLogger(__name__) + + +async def main(): + # Setup necessary parameters + reducer_msg_count = 10 + reducer_threshold = 10 + + # Create a summarization reducer + history_summarization_reducer = ChatHistorySummarizationReducer( + service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold + ) + + # Create our agent + agent = ChatCompletionAgent( + name="NumeroTranslator", + instructions="Add one to the latest user number and spell it in Spanish without explanation.", + service=AzureChatCompletion(), + ) + + # Number of messages to simulate + message_count = 50 + for index in range(1, message_count + 1, 2): + # Add user message + history_summarization_reducer.add_user_message(str(index)) + print(f"# User: '{index}'") + + # Attempt reduction + is_reduced = await history_summarization_reducer.reduce() + if is_reduced: + print(f"@ History reduced to {len(history_summarization_reducer.messages)} messages.") + + # Get agent response and store it + response = await agent.get_response(history_summarization_reducer) + history_summarization_reducer.add_message(response) + print(f"# Agent - {response.name}: '{response.content}'") + + print(f"@ Message Count: {len(history_summarization_reducer.messages)}\n") + + # If reduced, print summary if present + if is_reduced: + for msg in history_summarization_reducer.messages: + if msg.metadata and msg.metadata.get("__summary__"): + print(f"\tSummary: {msg.content}") + break + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py new file mode 100644 index 000000000000..b2ae98661fe0 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py @@ -0,0 +1,78 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from semantic_kernel.agents import ( + AgentGroupChat, + ChatCompletionAgent, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ( + ChatHistoryTruncationReducer, +) + +""" +The following sample demonstrates how to implement a chat history +reducer as part of the Semantic Kernel Agent Framework. For this sample, +the ChatCompletionAgent with an AgentGroupChat is used. The Chat History +Reducer is a Truncation Reducer. View the README for more information on +how to use the reducer and what each parameter does. +""" + + +# Initialize the logger for debugging and information messages +logger = logging.getLogger(__name__) + + +async def main(): + """ + Single-function approach that shows the same chat reducer behavior + while preserving all original logic and code lines (now commented). + """ + + # Setup necessary parameters + reducer_msg_count = 10 + reducer_threshold = 10 + + # Create a summarization reducer and clear its history + history_truncatation_reducer = ChatHistoryTruncationReducer( + target_count=reducer_msg_count, threshold_count=reducer_threshold + ) + history_truncatation_reducer.clear() + + # Create our agent + agent = ChatCompletionAgent( + name="NumeroTranslator", + instructions="Add one to the latest user number and spell it in Spanish without explanation.", + service=AzureChatCompletion(), + ) + + # Create a group chat using the reducer + chat = AgentGroupChat(chat_history=history_truncatation_reducer) + + # Simulate user messages + message_count = 50 # Number of messages to simulate + for index in range(1, message_count, 2): + # Add user message to the chat + await chat.add_chat_message(message=str(index)) + print(f"# User: '{index}'") + + # Attempt to reduce history + is_reduced = await chat.reduce_history() + if is_reduced: + print(f"@ History reduced to {len(history_truncatation_reducer.messages)} messages.") + + # Invoke the agent and display responses + async for message in chat.invoke(agent): + print(f"# {message.role} - {message.name or '*'}: '{message.content}'") + + # Retrieve messages + msgs = [] + async for m in chat.get_chat_messages(agent): + msgs.append(m) + print(f"@ Message Count: {len(msgs)}\n") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py new file mode 100644 index 000000000000..95a41f9c5b8f --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from semantic_kernel.agents import ( + ChatCompletionAgent, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ( + ChatHistoryTruncationReducer, +) + +""" +The following sample demonstrates how to implement a truncation chat +history reducer as part of the Semantic Kernel Agent Framework. For +this sample, a single ChatCompletionAgent is used. +""" + + +# Initialize the logger for debugging and information messages +logger = logging.getLogger(__name__) + + +async def main(): + # Setup necessary parameters + reducer_msg_count = 10 + reducer_threshold = 10 + + # Create a summarization reducer + history_truncation_reducer = ChatHistoryTruncationReducer( + service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold + ) + + # Create our agent + agent = ChatCompletionAgent( + name="NumeroTranslator", + instructions="Add one to the latest user number and spell it in Spanish without explanation.", + service=AzureChatCompletion(), + ) + + # Number of messages to simulate + message_count = 50 + for index in range(1, message_count + 1, 2): + # Add user message + history_truncation_reducer.add_user_message(str(index)) + print(f"# User: '{index}'") + + # Attempt reduction + is_reduced = await history_truncation_reducer.reduce() + if is_reduced: + print(f"@ History reduced to {len(history_truncation_reducer.messages)} messages.") + + # Get agent response and store it + response = await agent.get_response(history_truncation_reducer) + history_truncation_reducer.add_message(response) + print(f"# Agent - {response.name}: '{response.content}'") + + print(f"@ Message Count: {len(history_truncation_reducer.messages)}\n") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_function_termination.py deleted file mode 100644 index 38ee6e76d832..000000000000 --- a/python/samples/concepts/agents/chat_completion_function_termination.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( - AutoFunctionInvocationContext, -) -from semantic_kernel.filters.filter_types import FilterTypes -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to configure the auto # -# function invocation filter with use of a ChatCompletionAgent. # -################################################################### - - -# Define the agent name and instructions -HOST_NAME = "Host" -HOST_INSTRUCTIONS = "Answer questions about the menu." - - -# Define the auto function invocation filter that will be used by the kernel -async def auto_function_invocation_filter(context: AutoFunctionInvocationContext, next): - """A filter that will be called for each function call in the response.""" - # if we don't call next, it will skip this function, and go to the next one - await next(context) - if context.function.plugin_name == "menu": - context.terminate = True - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -def _create_kernel_with_chat_completionand_filter(service_id: str) -> Kernel: - """A helper function to create a kernel with a chat completion service and a filter.""" - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - kernel.add_filter(FilterTypes.AUTO_FUNCTION_INVOCATION, auto_function_invocation_filter) - kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") - return kernel - - -def _write_content(content: ChatMessageContent) -> None: - """Write the content to the console.""" - last_item_type = type(content.items[-1]).__name__ if content.items else "(empty)" - message_content = "" - if isinstance(last_item_type, FunctionCallContent): - message_content = f"tool request = {content.items[-1].function_name}" - elif isinstance(last_item_type, FunctionResultContent): - message_content = f"function result = {content.items[-1].result}" - else: - message_content = str(content.items[-1]) - print(f"[{last_item_type}] {content.role} : '{message_content}'") - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: ChatCompletionAgent, input: str, chat_history: ChatHistory) -> None: - """Invoke the agent with the user input.""" - chat_history.add_user_message(input) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in agent.invoke(chat_history): - if not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items): - chat_history.add_message(content) - _write_content(content) - - -async def main(): - service_id = "agent" - - # Create the kernel used by the chat completion agent - kernel = _create_kernel_with_chat_completionand_filter(service_id=service_id) - - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) - - # Configure the function choice behavior to auto invoke kernel functions - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - - # Create the agent - agent = ChatCompletionAgent( - service_id=service_id, - kernel=kernel, - name=HOST_NAME, - instructions=HOST_INSTRUCTIONS, - execution_settings=settings, - ) - - # Define the chat history - chat = ChatHistory() - - # Respond to user input - await invoke_agent(agent=agent, input="Hello", chat_history=chat) - await invoke_agent(agent=agent, input="What is the special soup?", chat_history=chat) - await invoke_agent(agent=agent, input="What is the special drink?", chat_history=chat) - await invoke_agent(agent=agent, input="Thank you", chat_history=chat) - - print("================================") - print("CHAT HISTORY") - print("================================") - - # Print out the chat history to view the different types of messages - for message in chat.messages: - _write_content(message) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_history_reducer.py b/python/samples/concepts/agents/chat_completion_history_reducer.py deleted file mode 100644 index 1cdffefe7b78..000000000000 --- a/python/samples/concepts/agents/chat_completion_history_reducer.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from typing import TYPE_CHECKING - -from semantic_kernel.agents import ( - AgentGroupChat, - ChatCompletionAgent, -) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion -from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent -from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ChatHistorySummarizationReducer -from semantic_kernel.contents.history_reducer.chat_history_truncation_reducer import ChatHistoryTruncationReducer -from semantic_kernel.kernel import Kernel - -if TYPE_CHECKING: - from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer - -##################################################################### -# The following sample demonstrates how to implement a chat history # -# reducer as part of the Semantic Kernel Agent Framework. It # -# covers two types of reducers: summarization reduction and a # -# truncation reduction. For this sample, the ChatCompletionAgent # -# is used. # -##################################################################### - - -# Initialize the logger for debugging and information messages -logger = logging.getLogger(__name__) - -# Flag to determine whether to use Azure OpenAI services or OpenAI -# Set this to True if using Azure OpenAI (requires appropriate configuration) -use_azure_openai = True - - -# Helper function to create and configure a Kernel with the desired chat completion service -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - """A helper function to create a kernel with a chat completion service.""" - kernel = Kernel() - if use_azure_openai: - # Add Azure OpenAI service to the kernel - kernel.add_service(AzureChatCompletion(service_id=service_id)) - else: - # Add OpenAI service to the kernel - kernel.add_service(OpenAIChatCompletion(service_id=service_id)) - return kernel - - -class HistoryReducerExample: - """ - Demonstrates how to create a ChatCompletionAgent with a ChatHistoryReducer - (either truncation or summarization) and how to invoke that agent - multiple times while applying the history reduction. - """ - - # Agent-specific settings - TRANSLATOR_NAME = "NumeroTranslator" # Name of the agent - TRANSLATOR_INSTRUCTIONS = "Add one to the latest user number and spell it in Spanish without explanation." - - def create_truncating_agent( - self, reducer_msg_count: int, reducer_threshold: int - ) -> tuple[ChatCompletionAgent, "ChatHistoryReducer"]: - """ - Creates a ChatCompletionAgent with a truncation-based history reducer. - - Parameters: - - reducer_msg_count: Target number of messages to retain after truncation. - - reducer_threshold: Threshold number of messages to trigger truncation. - - Returns: - - A configured ChatCompletionAgent instance with truncation enabled. - """ - truncation_reducer = ChatHistoryTruncationReducer( - target_count=reducer_msg_count, threshold_count=reducer_threshold - ) - - return ChatCompletionAgent( - name=self.TRANSLATOR_NAME, - instructions=self.TRANSLATOR_INSTRUCTIONS, - kernel=_create_kernel_with_chat_completion("truncate_agent"), - history_reducer=truncation_reducer, - ), truncation_reducer - - def create_summarizing_agent( - self, reducer_msg_count: int, reducer_threshold: int - ) -> tuple[ChatCompletionAgent, "ChatHistoryReducer"]: - """ - Creates a ChatCompletionAgent with a summarization-based history reducer. - - Parameters: - - reducer_msg_count: Target number of messages to retain after summarization. - - reducer_threshold: Threshold number of messages to trigger summarization. - - Returns: - - A configured ChatCompletionAgent instance with summarization enabled. - """ - kernel = _create_kernel_with_chat_completion("summarize_agent") - - summarization_reducer = ChatHistorySummarizationReducer( - service=kernel.get_service(service_id="summarize_agent"), - target_count=reducer_msg_count, - threshold_count=reducer_threshold, - ) - - return ChatCompletionAgent( - name=self.TRANSLATOR_NAME, - instructions=self.TRANSLATOR_INSTRUCTIONS, - kernel=kernel, - history_reducer=summarization_reducer, - ), summarization_reducer - - async def invoke_agent(self, agent: ChatCompletionAgent, chat_history: ChatHistory, message_count: int): - """ - Demonstrates agent invocation with direct history management and reduction. - - Parameters: - - agent: The ChatCompletionAgent to invoke. - - message_count: The number of messages to simulate in the conversation. - """ - - index = 1 - while index <= message_count: - # Provide user input - user_message = ChatMessageContent(role=AuthorRole.USER, content=str(index)) - chat_history.messages.append(user_message) - print(f"# User: '{index}'") - - # Attempt history reduction if a reducer is present - is_reduced = False - if agent.history_reducer is not None: - reduced = await agent.history_reducer.reduce() - if reduced is not None: - chat_history.messages.clear() - chat_history.messages.extend(reduced) - is_reduced = True - print("@ (History was reduced!)") - - # Invoke the agent and display its response - async for response in agent.invoke(chat_history): - chat_history.messages.append(response) - print(f"# {response.role} - {response.name}: '{response.content}'") - - # The index is incremented by 2 because the agent is told to: - # "Add one to the latest user number and spell it in Spanish without explanation." - # The user sends 1, 3, 5, etc., and the agent responds with 2, 4, 6, etc. (in Spanish) - index += 2 - print(f"@ Message Count: {len(chat_history.messages)}\n") - - # If history was reduced, and the chat history is of type `ChatHistorySummarizationReducer`, - # print summaries as it will contain the __summary__ metadata key. - if is_reduced and isinstance(chat_history, ChatHistorySummarizationReducer): - self._print_summaries_from_front(chat_history.messages) - - async def invoke_chat(self, agent: ChatCompletionAgent, message_count: int): - """ - Demonstrates agent invocation within a group chat. - - Parameters: - - agent: The ChatCompletionAgent to invoke. - - message_count: The number of messages to simulate in the conversation. - """ - chat = AgentGroupChat() # Initialize a new group chat - last_history_count = 0 - - index = 1 - while index <= message_count: - # Add user message to the chat - user_msg = ChatMessageContent(role=AuthorRole.USER, content=str(index)) - await chat.add_chat_message(user_msg) - print(f"# User: '{index}'") - - # Invoke the agent and display its response - async for message in chat.invoke(agent): - print(f"# {message.role} - {message.name or '*'}: '{message.content}'") - - # The index is incremented by 2 because the agent is told to: - # "Add one to the latest user number and spell it in Spanish without explanation." - # The user sends 1, 3, 5, etc., and the agent responds with 2, 4, 6, etc. (in Spanish) - index += 2 - - # Retrieve chat messages in descending order (newest first) - msgs = [] - async for m in chat.get_chat_messages(agent): - msgs.append(m) - - print(f"@ Message Count: {len(msgs)}\n") - - # Check for reduction in message count and print summaries - if len(msgs) < last_history_count: - self._print_summaries_from_back(msgs) - - last_history_count = len(msgs) - - def _print_summaries_from_front(self, messages: list[ChatMessageContent]): - """ - Prints summaries from the front of the message list. - - Parameters: - - messages: List of chat messages to process. - """ - summary_index = 0 - while summary_index < len(messages): - msg = messages[summary_index] - if msg.metadata and msg.metadata.get("__summary__"): - print(f"\tSummary: {msg.content}") - summary_index += 1 - else: - break - - def _print_summaries_from_back(self, messages: list[ChatMessageContent]): - """ - Prints summaries from the back of the message list. - - Parameters: - - messages: List of chat messages to process. - """ - summary_index = len(messages) - 1 - while summary_index >= 0: - msg = messages[summary_index] - if msg.metadata and msg.metadata.get("__summary__"): - print(f"\tSummary: {msg.content}") - summary_index -= 1 - else: - break - - -# Main entry point for the script -async def main(): - # Initialize the example class - example = HistoryReducerExample() - - # Demonstrate truncation-based reduction - trunc_agent, history_reducer = example.create_truncating_agent( - # reducer_msg_count: - # Purpose: Defines the target number of messages to retain after applying truncation or summarization. - # What it controls: This parameter determines how much of the most recent conversation history - # is preserved while discarding or summarizing older messages. - # Why change it?: - # - Smaller values: Use when memory constraints are tight, or the assistant only needs a brief history - # to maintain context. - # - Larger values: Use when retaining more conversational context is critical for accurate responses - # or maintaining a richer dialogue. - reducer_msg_count=10, - # reducer_threshold: - # Purpose: Acts as a buffer to avoid reducing history prematurely when the current message count exceeds - # reducer_msg_count by a small margin. - # What it controls: Helps ensure that essential paired messages (like a user query and the assistant’s response) - # are not "orphaned" or lost during truncation or summarization. - # Why change it?: - # - Smaller values: Use when you want stricter reduction criteria and are okay with possibly cutting older - # pairs of messages sooner. - # - Larger values: Use when you want to minimize the risk of cutting a critical part of the conversation, - # especially for sensitive interactions like API function calls or complex responses. - reducer_threshold=10, - ) - # print("===TruncatedAgentReduction Demo===") - # await example.invoke_agent(trunc_agent, chat_history=history_reducer, message_count=50) - - # Demonstrate summarization-based reduction - sum_agent, history_reducer = example.create_summarizing_agent( - # Same configuration for summarization-based reduction - reducer_msg_count=10, # Target number of messages to retain - reducer_threshold=10, # Buffer to avoid premature reduction - ) - print("\n===SummarizedAgentReduction Demo===") - await example.invoke_agent(sum_agent, chat_history=history_reducer, message_count=50) - - # Demonstrate group chat with truncation - print("\n===TruncatedChatReduction Demo===") - trunc_agent.history_reducer.messages.clear() - await example.invoke_chat(trunc_agent, message_count=50) - - # Demonstrate group chat with summarization - print("\n===SummarizedChatReduction Demo===") - sum_agent.history_reducer.messages.clear() - await example.invoke_chat(sum_agent, message_count=50) - - -# Interaction between reducer_msg_count and reducer_threshold: -# The combination of these values determines when reduction occurs and how much history is kept. -# Example: -# If reducer_msg_count = 10 and reducer_threshold = 5, history will not be truncated until the total message count -# exceeds 15. This approach ensures flexibility in retaining conversational context while still adhering to memory -# constraints. - -# Recommendations: -# - Adjust for performance: Use a lower reducer_msg_count in environments with limited memory or when the assistant -# needs faster processing times. -# - Context sensitivity: Increase reducer_msg_count and reducer_threshold in use cases where maintaining continuity -# across multiple interactions is essential (e.g., multi-turn conversations or complex workflows). -# - Experiment: Start with the default values (10 and 10) and refine based on your application's behavior and the -# assistant's response quality. - - -# Execute the main function if the script is run directly -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_agents.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py similarity index 66% rename from python/samples/concepts/agents/mixed_chat_agents.py rename to python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py index 493f5e70f457..b975857e97e2 100644 --- a/python/samples/concepts/agents/mixed_chat_agents.py +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py @@ -3,10 +3,9 @@ import asyncio from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai import AzureAssistantAgent from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel @@ -52,29 +51,38 @@ def _create_kernel_with_chat_completion(service_id: str) -> Kernel: async def main(): + agent_reviewer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + # To create an AzureAssistantAgent for Azure OpenAI, use the following: + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent_writer = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Create the AgentGroupChat object and specify the list of agents along with the termination strategy + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + try: - agent_reviewer = ChatCompletionAgent( - service_id="artdirector", - kernel=_create_kernel_with_chat_completion("artdirector"), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - agent_writer = await OpenAIAssistantAgent.create( - service_id="copywriter", - kernel=Kernel(), - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "a slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + await chat.add_chat_message(input) print(f"# {AuthorRole.USER}: '{input}'") async for content in chat.invoke(): @@ -82,7 +90,7 @@ async def main(): print(f"# IS COMPLETE: {chat.is_complete}") finally: - await agent_writer.delete() + await client.beta.assistants.delete(agent_writer.id) if __name__ == "__main__": diff --git a/python/samples/concepts/agents/mixed_chat_agents_plugins.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py similarity index 69% rename from python/samples/concepts/agents/mixed_chat_agents_plugins.py rename to python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py index 6df7f88cac43..02c093cb4a0f 100644 --- a/python/samples/concepts/agents/mixed_chat_agents_plugins.py +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py @@ -4,12 +4,12 @@ from typing import Annotated from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai import AzureAssistantAgent from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.kernel import Kernel @@ -76,34 +76,41 @@ def _create_kernel_with_chat_completion(service_id: str) -> Kernel: async def main(): + kernel = _create_kernel_with_chat_completion("artdirector") + settings = kernel.get_prompt_execution_settings_from_service_id(service_id="artdirector") + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + agent_reviewer = ChatCompletionAgent( + kernel=kernel, + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + arguments=KernelArguments(settings=settings), + ) + + # Create the Assistant Agent using Azure OpenAI resources + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent_writer = AzureAssistantAgent( + client=client, + definition=definition, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "Write copy based on the food specials." try: - kernel = _create_kernel_with_chat_completion("artdirector") - settings = kernel.get_prompt_execution_settings_from_service_id(service_id="artdirector") - # Configure the function choice behavior to auto invoke kernel functions - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - agent_reviewer = ChatCompletionAgent( - service_id="artdirector", - kernel=kernel, - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - execution_settings=settings, - ) - - agent_writer = await OpenAIAssistantAgent.create( - service_id="copywriter", - kernel=Kernel(), - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "Write copy based on the food specials." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + await chat.add_chat_message(input) print(f"# {AuthorRole.USER}: '{input}'") async for content in chat.invoke(): @@ -111,7 +118,7 @@ async def main(): print(f"# IS COMPLETE: {chat.is_complete}") finally: - await agent_writer.delete() + await agent_writer.client.beta.assistants.delete(agent_writer.id) if __name__ == "__main__": diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py new file mode 100644 index 000000000000..32922723ad34 --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py @@ -0,0 +1,109 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI, a chat completion +agent and have them participate in a group chat working on +an uploaded file. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), + "resources", + "mixed_chat_files", + "user-context.txt", + ) + + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # If desired, create using OpenAI resources + # client, model = OpenAIAssistantAgent.setup_resources() + + # Load the text file as a FileObject + with open(file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + code_interpreter_tool, code_interpreter_tool_resource = AzureAssistantAgent.configure_code_interpreter_tool( + file_ids=file.id + ) + + definition = await client.beta.assistants.create( + model=model, + instructions="Create charts as requested without explanation.", + name="ChartMaker", + tools=code_interpreter_tool, + tool_resources=code_interpreter_tool_resource, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + analyst_agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + service_id = "summary" + summary_agent = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion(service_id=service_id), + instructions="Summarize the entire conversation for the user in natural language.", + name="SummaryAgent", + ) + + # Create the AgentGroupChat object, which will manage the chat between the agents + # We don't always need to specify the agents in the chat up front + # As shown below, calling `chat.invoke(agent=)` will automatically add the + # agent to the chat + chat = AgentGroupChat() + + try: + user_and_agent_inputs = ( + ( + "Create a tab delimited file report of the ordered (descending) frequency distribution of " + "words in the file 'user-context.txt' for any words used more than once.", + analyst_agent, + ), + (None, summary_agent), + ) + + for input, agent in user_and_agent_inputs: + if input: + await chat.add_chat_message(input) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(agent=agent): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + if len(content.items) > 0: + for item in content.items: + if ( + isinstance(agent, AzureAssistantAgent) + and isinstance(item, AnnotationContent) + and item.file_id + ): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await agent.client.files.content(item.file_id) + print(response_content.text) + finally: + await client.files.delete(file_id=file.id) + await client.beta.assistants.delete(analyst_agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py new file mode 100644 index 000000000000..e32d22a3903e --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI, a chat completion +agent and have them participate in a group chat working with +image content. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Get the code interpreter tool and resources + code_interpreter_tool, code_interpreter_resources = AzureAssistantAgent.configure_code_interpreter_tool() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="Analyst", + instructions="Create charts as requested without explanation", + tools=code_interpreter_tool, + tool_resources=code_interpreter_resources, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + analyst_agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + service_id = "summary" + summary_agent = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion(service_id=service_id), + instructions="Summarize the entire conversation for the user in natural language.", + name="Summarizer", + ) + + # Create the AgentGroupChat object, which will manage the chat between the agents + # We don't always need to specify the agents in the chat up front + # As shown below, calling `chat.invoke(agent=)` will automatically add the + # agent to the chat + chat = AgentGroupChat() + + try: + user_and_agent_inputs = ( + ( + """ + Graph the percentage of storm events by state using a pie chart: + + State, StormCount + TEXAS, 4701 + KANSAS, 3166 + IOWA, 2337 + ILLINOIS, 2022 + MISSOURI, 2016 + GEORGIA, 1983 + MINNESOTA, 1881 + WISCONSIN, 1850 + NEBRASKA, 1766 + NEW YORK, 1750 + """.strip(), + analyst_agent, + ), + (None, summary_agent), + ) + + for input, agent in user_and_agent_inputs: + if input: + await chat.add_chat_message(input) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(agent=agent): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + if len(content.items) > 0: + for item in content.items: + if ( + isinstance(agent, AzureAssistantAgent) + and isinstance(item, AnnotationContent) + and item.file_id + ): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await agent.client.files.content(item.file_id) + print(response_content.text) + finally: + await client.beta.assistants.delete(analyst_agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py new file mode 100644 index 000000000000..9e707686500a --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py @@ -0,0 +1,103 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import TYPE_CHECKING + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +if TYPE_CHECKING: + pass + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI, a chat completion +agent and have them participate in a group chat to work towards +the user's requirement. It also demonstrates how the underlying +agent reset method is used to clear the current state of the chat +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + # First create the ChatCompletionAgent + chat_agent = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion("chat"), + name="chat_agent", + instructions=""" + The user may either provide information or query on information previously provided. + If the query does not correspond with information provided, inform the user that their query + cannot be answered. + """, + ) + + # Next, we will create the AzureAssistantAgent + + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="copywriter", + instructions=""" + The user may either provide information or query on information previously provided. + If the query does not correspond with information provided, inform the user that their query + cannot be answered. + """, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + assistant_agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Create the AgentGroupChat object, which will manage the chat between the agents + # We don't always need to specify the agents in the chat up front + # As shown below, calling `chat.invoke(agent=)` will automatically add the + # agent to the chat + chat = AgentGroupChat() + + try: + user_inputs = [ + "What is my favorite color?", + "I like green.", + "What is my favorite color?", + "[RESET]", + "What is my favorite color?", + ] + + for user_input in user_inputs: + # Check for reset indicator + if user_input == "[RESET]": + print("\nResetting chat...") + await chat.reset() + continue + + # First agent (assistant_agent) receives the user input + await chat.add_chat_message(user_input) + print(f"\n{AuthorRole.USER}: '{user_input}'") + async for message in chat.invoke(agent=assistant_agent): + if message.content is not None: + print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") + + # Second agent (chat_agent) just responds without new user input + async for message in chat.invoke(agent=chat_agent): + if message.content is not None: + print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") + finally: + await chat.reset() + await assistant_agent.client.beta.assistants.delete(assistant_agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py new file mode 100644 index 000000000000..9a0983099ff0 --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat to work towards # +# the user's requirement. # +##################################################################### + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + # First create a ChatCompletionAgent + agent_reviewer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion("artdirector"), + name="ArtDirector", + instructions=""" + You are an art director who has opinions about copywriting born of a love for David Ogilvy. + The goal is to determine if the given copy is acceptable to print. + If so, state that it is approved. Only include the word "approved" if it is so. + If not, provide insight on how to refine suggested copy without example. + """, + ) + + # Next, we will create the AzureAssistantAgent + + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="CopyWriter", + instructions=""" + You are a copywriter with ten years of experience and are known for brevity and a dry humor. + The goal is to refine and decide on the single best copy as an expert in the field. + Only provide a single proposal per response. + You're laser focused on the goal at hand. + Don't waste time with chit chat. + Consider suggestions when refining an idea. + """, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent_writer = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Create the AgentGroupChat object, which will manage the chat between the agents + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + + try: + await chat.add_chat_message(input) + print(f"# {AuthorRole.USER}: '{input}'") + + last_agent = None + async for message in chat.invoke_stream(): + if message.content is not None: + if last_agent != message.name: + print(f"\n# {message.name}: ", end="", flush=True) + last_agent = message.name + print(f"{message.content}", end="", flush=True) + + print() + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + await agent_writer.client.beta.assistants.delete(agent_writer.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_files.py b/python/samples/concepts/agents/mixed_chat_files.py deleted file mode 100644 index b5d21c3fd09f..000000000000 --- a/python/samples/concepts/agents/mixed_chat_files.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI, a chat completion # -# agent and have them participate in a group chat working on # -# an uploaded file. # -##################################################################### - - -SUMMARY_INSTRUCTIONS = "Summarize the entire conversation for the user in natural language." - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def invoke_agent( - chat: AgentGroupChat, agent: ChatCompletionAgent | OpenAIAssistantAgent, input: str | None = None -) -> None: - """Invoke the agent with the user input.""" - if input: - await chat.add_chat_message(message=ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke(agent=agent): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - if len(content.items) > 0: - for item in content.items: - if isinstance(item, AnnotationContent): - print(f"\n`{item.quote}` => {item.file_id}") - response_content = await agent.client.files.content(item.file_id) - print(response_content.text) - - -async def main(): - try: - file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - "mixed_chat_files", - "user-context.txt", - ) - - analyst_agent = await OpenAIAssistantAgent.create( - service_id="analyst", - kernel=Kernel(), - enable_code_interpreter=True, - code_interpreter_filenames=[file_path], - name="AnalystAgent", - ) - - service_id = "summary" - summary_agent = ChatCompletionAgent( - service_id=service_id, - kernel=_create_kernel_with_chat_completion(service_id=service_id), - instructions=SUMMARY_INSTRUCTIONS, - name="SummaryAgent", - ) - - chat = AgentGroupChat() - - await invoke_agent( - chat=chat, - agent=analyst_agent, - input=""" - Create a tab delimited file report of the ordered (descending) frequency distribution - of words in the file 'user-context.txt' for any words used more than once. - """, - ) - await invoke_agent(chat=chat, agent=summary_agent) - finally: - if analyst_agent is not None: - [await analyst_agent.delete_file(file_id=file_id) for file_id in analyst_agent.code_interpreter_file_ids] - await analyst_agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_reset.py b/python/samples/concepts/agents/mixed_chat_reset.py deleted file mode 100644 index 2480358ac4da..000000000000 --- a/python/samples/concepts/agents/mixed_chat_reset.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import TYPE_CHECKING - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -if TYPE_CHECKING: - from semantic_kernel.agents.agent import Agent - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI, a chat completion # -# agent and have them participate in a group chat to work towards # -# the user's requirement. It also demonstrates how the underlying # -# agent reset method is used to clear the current state of the chat # -##################################################################### - -INSTRUCTIONS = """ -The user may either provide information or query on information previously provided. -If the query does not correspond with information provided, inform the user that their query cannot be answered. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - try: - assistant_agent = await OpenAIAssistantAgent.create( - service_id="copywriter", - kernel=Kernel(), - name=f"{OpenAIAssistantAgent.__name__}", - instructions=INSTRUCTIONS, - ) - - chat_agent = ChatCompletionAgent( - service_id="chat", - kernel=_create_kernel_with_chat_completion("chat"), - name=f"{ChatCompletionAgent.__name__}", - instructions=INSTRUCTIONS, - ) - - chat = AgentGroupChat() - - async def invoke_agent(agent: "Agent", input: str | None = None): - if input is not None: - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"\n{AuthorRole.USER}: '{input}'") - - async for message in chat.invoke(agent=agent): - if message.content is not None: - print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") - - await invoke_agent(agent=assistant_agent, input="What is my favorite color?") - await invoke_agent(agent=chat_agent) - - await invoke_agent(agent=assistant_agent, input="I like green.") - await invoke_agent(agent=chat_agent) - - await invoke_agent(agent=assistant_agent, input="What is my favorite color?") - await invoke_agent(agent=chat_agent) - - print("\nResetting chat...") - await chat.reset() - - await invoke_agent(agent=assistant_agent, input="What is my favorite color?") - await invoke_agent(agent=chat_agent) - finally: - await chat.reset() - await assistant_agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_streaming.py b/python/samples/concepts/agents/mixed_chat_streaming.py deleted file mode 100644 index 3aac54f3eb45..000000000000 --- a/python/samples/concepts/agents/mixed_chat_streaming.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI, a chat completion # -# agent and have them participate in a group chat to work towards # -# the user's requirement. # -##################################################################### - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. Only include the word "approved" if it is so. -If not, provide insight on how to refine suggested copy without example. -""" - -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - try: - agent_reviewer = ChatCompletionAgent( - service_id="artdirector", - kernel=_create_kernel_with_chat_completion("artdirector"), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - agent_writer = await OpenAIAssistantAgent.create( - service_id="copywriter", - kernel=Kernel(), - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "a slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - last_agent = None - async for message in chat.invoke_stream(): - if message.content is not None: - if last_agent != message.name: - print(f"\n# {message.name}: ", end="", flush=True) - last_agent = message.name - print(f"{message.content}", end="", flush=True) - - print() - print(f"# IS COMPLETE: {chat.is_complete}") - finally: - await agent_writer.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/README.md b/python/samples/concepts/agents/openai_assistant/README.md new file mode 100644 index 000000000000..6689c05f9f4b --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/README.md @@ -0,0 +1,101 @@ +## OpenAI Assistant Agents + +The following getting started samples show how to use OpenAI Assistant agents with Semantic Kernel. + +## Assistants API Overview + +The Assistants API is a robust solution from OpenAI that empowers developers to integrate powerful, purpose-built AI assistants into their applications. It streamlines the development process by handling conversation histories, managing threads, and providing seamless access to advanced tools. + +### Key Features + +- **Purpose-Built AI Assistants:** + Assistants are specialized AIs that leverage OpenAI’s models to interact with users, access files, maintain persistent threads, and call additional tools. This enables highly tailored and effective user interactions. + +- **Simplified Conversation Management:** + The concept of a **thread** -- a dedicated conversation session between an assistant and a user -- ensures that message history is managed automatically. Threads optimize the conversation context by storing and truncating messages as needed. + +- **Integrated Tool Access:** + The API provides built-in tools such as: + - **Code Interpreter:** Allows the assistant to execute code, enhancing its ability to solve complex tasks. + - **File Search:** Implements best practices for retrieving data from uploaded files, including advanced chunking and embedding techniques. + +- **Enhanced Function Calling:** + With improved support for third-party tool integration, the Assistants API enables assistants to extend their capabilities beyond native functions. + +For more detailed technical information, refer to the [Assistants API](https://platform.openai.com/docs/assistants/overview). + +### Semantic Kernel OpenAI Assistant Agents + +OpenAI Assistant Agents are created in the following way: + +```python +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent + +# Create the client using OpenAI resources and configuration +client, model = OpenAIAssistantAgent.setup_resources() + +# Create the assistant definition +definition = await client.beta.assistants.create( + model=model, + instructions="", + name="", +) + +# Define the Semantic Kernel OpenAI Assistant Agent +agent = OpenAIAssistantAgent( + client=client, + definition=definition, +) + +# Define a thread and invoke the agent with the user input +thread = await agent.client.beta.threads.create() + +# Add a message to the thread +await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") + +# Invoke the agent +async for content in agent.invoke(thread_id=thread.id): + print(f"# {content.role}: {content.content}") +``` + +### Semantic Kernel Azure Assistant Agents + +Azure Assistant Agents are currently in preview and require a `-preview` API version (minimum version: `2024-05-01-preview`). As new features are introduced, API versions will be updated accordingly. For the latest versioning details, please refer to the [Azure OpenAI API preview lifecycle](https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation). + +To specify the correct API version, set the following environment variable (for example, in your `.env` file): + +```bash +AZURE_OPENAI_API_VERSION="2025-01-01-preview" +``` + +Alternatively, you can pass the `api_version` parameter when creating an `AzureAssistantAgent`: + +```python +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +# Create the client using Azure OpenAI resources and configuration +client, model = AzureAssistantAgent.setup_resources() + +# Create the assistant definition +definition = await client.beta.assistants.create( + model=model, + instructions="", + name="", +) + +# Define the Semantic Kernel Azure OpenAI Assistant Agent +agent = AzureAssistantAgent( + client=client, + definition=definition, +) + +# Define a thread and invoke the agent with the user input +thread = await agent.client.beta.threads.create() + +# Add a message to the thread +await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") + +# Invoke the agent +async for content in agent.invoke(thread_id=thread.id): + print(f"# {content.role}: {content.content}") +``` \ No newline at end of file diff --git a/python/samples/concepts/agents/openai_assistant/__init__.py b/python/samples/concepts/agents/openai_assistant/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py new file mode 100644 index 000000000000..83d07d210ebc --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py @@ -0,0 +1,81 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_images +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents.file_reference_content import FileReferenceContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant and leverage the assistant's code interpreter tool +in a streaming fashion. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Get the code interpreter tool and resources + code_interpreter_tool, code_interpreter_resource = AzureAssistantAgent.configure_code_interpreter_tool() + + # Define the assistant definition + definition = await client.beta.assistants.create( + model=model, + instructions="Create charts as requested without explanation.", + name="ChartMaker", + tools=code_interpreter_tool, + tool_resources=code_interpreter_resource, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + user_inputs = [ + """ + Display this data using a bar-chart: + + Banding Brown Pink Yellow Sum + X00000 339 433 126 898 + X00300 48 421 222 691 + X12345 16 395 352 763 + Others 23 373 156 552 + Sum 426 1622 856 2904 + """, + "Can you regenerate this same chart using the category names as the bar colors?", + ] + + try: + for user_input in user_inputs: + file_ids = [] + await agent.add_chat_message(thread_id=thread.id, message=user_input) + async for message in agent.invoke(thread_id=thread.id): + if message.content: + print(f"# {message.role}: {message.content}") + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, FileReferenceContent): + file_ids.extend([ + item.file_id + for item in message.items + if isinstance(item, FileReferenceContent) and item.file_id is not None + ]) + + # Use a sample utility method to download the files to the current working directory + await download_response_images(agent, file_ids) + + finally: + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(assistant_id=agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py new file mode 100644 index 000000000000..d4ec9662b490 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_images +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant and leverage the assistant's code interpreter tool +in a streaming fashion. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Get the code interpreter tool and resources + code_interpreter_tool, code_interpreter_resource = AzureAssistantAgent.configure_code_interpreter_tool() + + # Define the assistant definition + definition = await client.beta.assistants.create( + model=model, + instructions="Create charts as requested without explanation.", + name="ChartMaker", + tools=code_interpreter_tool, + tool_resources=code_interpreter_resource, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + user_inputs = [ + """ + Display this data using a bar-chart: + + Banding Brown Pink Yellow Sum + X00000 339 433 126 898 + X00300 48 421 222 691 + X12345 16 395 352 763 + Others 23 373 156 552 + Sum 426 1622 856 2904 + """, + "Can you regenerate this same chart using the category names as the bar colors?", + ] + + try: + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + + print(f"# User: '{user_input}'") + + file_ids: list[str] = [] + is_code = False + last_role = None + async for response in agent.invoke_stream(thread_id=thread.id): + current_is_code = response.metadata.get("code", False) + + if current_is_code: + if not is_code: + print("\n\n```python") + is_code = True + print(response.content, end="", flush=True) + else: + if is_code: + print("\n```") + is_code = False + last_role = None + if hasattr(response, "role") and response.role is not None and last_role != response.role: + print(f"\n# {response.role}: ", end="", flush=True) + last_role = response.role + print(response.content, end="", flush=True) + file_ids.extend([ + item.file_id + for item in response.items + if isinstance(item, StreamingFileReferenceContent) and item.file_id is not None + ]) + if is_code: + print("```\n") + + # Use a sample utility method to download the files to the current working directory + await download_response_images(agent, file_ids) + file_ids.clear() + + finally: + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(assistant_id=agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py new file mode 100644 index 000000000000..76c9262cd046 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py @@ -0,0 +1,83 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_files +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents.annotation_content import AnnotationContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant's ability to have the code interpreter work with +uploaded files. This sample uses non-streaming responses. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + # Load the employees PDF file as a FileObject + with open(csv_file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + # Get the code interpreter tool and resources + code_interpreter_tool, code_interpreter_tool_resource = AzureAssistantAgent.configure_code_interpreter_tool(file.id) + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="FileManipulation", + instructions="Find answers to the user's questions in the provided file.", + tools=code_interpreter_tool, + tool_resources=code_interpreter_tool_resource, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + try: + user_inputs = [ + "Which segment had the most sales?", + "List the top 5 countries that generated the most profit.", + "Create a tab delimited file report of profit by each country per month.", + ] + + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + + print(f"# User: '{user_input}'") + async for content in agent.invoke(thread_id=thread.id): + if content.metadata.get("code", False): + print(f"# {content.role}:\n\n```python") + print(content.content) + print("```") + else: + print(f"# {content.role}: {content.content}") + + if content.items: + for item in content.items: + if isinstance(item, AnnotationContent): + await download_response_files(agent, [item]) + finally: + await client.files.delete(file.id) + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py new file mode 100644 index 000000000000..b34a46b43105 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py @@ -0,0 +1,105 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_files +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant's ability to have the code interpreter work with +uploaded files. This sample uses streaming responses. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + # Load the employees PDF file as a FileObject + with open(csv_file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + # Get the code interpreter tool and resources + code_interpreter_tools, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool( + file.id + ) + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="FileManipulation", + instructions="Find answers to the user's questions in the provided file.", + tools=code_interpreter_tools, + tool_resources=code_interpreter_tool_resources, + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + try: + user_inputs = [ + # "Which segment had the most sales?", + # "List the top 5 countries that generated the most profit.", + "Create a tab delimited file report of profit by each country per month.", + ] + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + + print(f"# User: '{user_input}'") + annotations: list[StreamingAnnotationContent] = [] + messages: list[ChatMessageContent] = [] + is_code = False + last_role = None + async for response in agent.invoke_stream(thread_id=thread.id, messages=messages): + current_is_code = response.metadata.get("code", False) + + if current_is_code: + if not is_code: + print("\n\n```python") + is_code = True + print(response.content, end="", flush=True) + else: + if is_code: + print("\n```") + is_code = False + last_role = None + if hasattr(response, "role") and response.role is not None and last_role != response.role: + print(f"\n# {response.role}: ", end="", flush=True) + last_role = response.role + print(response.content, end="", flush=True) + if is_code: + print("```\n") + else: + print() + + # Use a sample utility method to download the files to the current working directory + annotations.extend( + item for message in messages for item in message.items if isinstance(item, StreamingAnnotationContent) + ) + await download_response_files(agent, annotations) + annotations.clear() + finally: + await client.files.delete(file.id) + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py new file mode 100644 index 000000000000..57cdf5e7e7aa --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py @@ -0,0 +1,55 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and retrieve it from +the server to create a new instance of the assistant. This is done by +retrieving the assistant definition from the server using the Assistant's +ID and creating a new instance of the assistant using the retrieved definition. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="Assistant", + instructions="You are a helpful assistant answering questions about the world in one sentence.", + ) + + # Store the assistant ID + assistant_id = definition.id + + # Retrieve the assistant definition from the server based on the assistant ID + new_asst_definition = await client.beta.assistants.retrieve(assistant_id) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=new_asst_definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + user_inputs = ["Why is the sky blue?"] + + try: + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: '{user_input}'") + async for content in agent.invoke(thread_id=thread.id): + print(f"# {content.role}: {content.content}") + finally: + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py new file mode 100644 index 000000000000..fe072c2596d4 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py @@ -0,0 +1,54 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +from collections.abc import Sequence +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from semantic_kernel.agents.open_ai import OpenAIAssistantAgent + from semantic_kernel.contents import AnnotationContent, StreamingAnnotationContent + + +async def download_file_content(agent: "OpenAIAssistantAgent", file_id: str, file_extension: str): + """A sample utility method to download the content of a file.""" + try: + # Fetch the content of the file using the provided method + response_content = await agent.client.files.content(file_id) + + # Get the current working directory of the file + current_directory = os.path.dirname(os.path.abspath(__file__)) + + # Define the path to save the image in the current directory + file_path = os.path.join( + current_directory, # Use the current directory of the file + f"{file_id}.{file_extension}", # You can modify this to use the actual filename with proper extension + ) + + # Save content to a file asynchronously + with open(file_path, "wb") as file: + file.write(response_content.content) + + print(f"File saved to: {file_path}") + except Exception as e: + print(f"An error occurred while downloading file {file_id}: {str(e)}") + + +async def download_response_images(agent: "OpenAIAssistantAgent", file_ids: list[str]): + """A sample utility method to download the content of a list of files.""" + if file_ids: + # Iterate over file_ids and download each one + for file_id in file_ids: + await download_file_content(agent, file_id, "png") + + +async def download_response_files( + agent: "OpenAIAssistantAgent", annotations: Sequence["StreamingAnnotationContent | AnnotationContent"] +): + """A sample utility method to download the content of a file.""" + if annotations: + # Iterate over file_ids and download each one + for ann in annotations: + if ann.quote is None or ann.file_id is None: + continue + extension = os.path.splitext(ann.quote)[1].lstrip(".") + await download_file_content(agent, ann.file_id, extension) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py new file mode 100644 index 000000000000..c965acb92dd0 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py @@ -0,0 +1,80 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +from typing import Annotated + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI. OpenAI Assistants +allow for function calling, the use of file search and a +code interpreter. Assistant Threads are used to manage the +conversation state, similar to a Semantic Kernel Chat History. +This sample also demonstrates the Assistants Streaming +capability and how to manage an Assistants chat history. +""" + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Define the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="Host", + instructions="Answer questions about the menu.", + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition and the defined plugin + agent = AzureAssistantAgent( + client=client, + definition=definition, + plugins=[MenuPlugin()], + ) + + thread = await client.beta.threads.create() + + user_inputs = ["Hello", "What is the special soup?", "What is the special drink?", "How much is that?", "Thank you"] + + try: + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + + print(f"# {AuthorRole.USER}: '{user_input}'") + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread.id): + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + finally: + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(assistant_id=agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py new file mode 100644 index 000000000000..fbb52a444353 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py @@ -0,0 +1,90 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from pydantic import BaseModel + +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant's ability to returned structured outputs, based on a user-defined +Pydantic model. This could also be a non-Pydantic model. Use the convenience +method on the OpenAIAssistantAgent class to configure the response format, +as shown below. + +Note, you may specify your own JSON Schema. You'll need to make sure it is correct +if not using the convenience method, per the following format: + +json_schema = { + "type": "json_schema", + "json_schema": { + "schema": { + "properties": { + "response": {"title": "Response", "type": "string"}, + "items": {"items": {"type": "string"}, "title": "Items", "type": "array"}, + }, + "required": ["response", "items"], + "title": "ResponseModel", + "type": "object", + "additionalProperties": False, + }, + "name": "ResponseModel", + "strict": True, + }, +} + +# Create the assistant definition +definition = await client.beta.assistants.create( + model=model, + name="Assistant", + instructions="You are a helpful assistant answering questions about the world in one sentence.", + response_format=json_schema, +) +""" + + +# Define a Pydantic model that represents the structured output from the OpenAI service +class ResponseModel(BaseModel): + response: str + items: list[str] + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="Assistant", + instructions="You are a helpful assistant answering questions about the world in one sentence.", + response_format=AzureAssistantAgent.configure_response_format(ResponseModel), + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + user_inputs = ["Why is the sky blue?"] + + try: + for user_input in user_inputs: + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: '{user_input}'") + async for content in agent.invoke(thread_id=thread.id): + # The response returned is a Pydantic Model, so we can validate it using the model_validate_json method + response_model = ResponseModel.model_validate_json(content.content) + print(f"# {content.role}: {response_model}") + finally: + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py new file mode 100644 index 000000000000..83331109d15d --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py @@ -0,0 +1,116 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.prompt_template.const import TEMPLATE_FORMAT_TYPES +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig + +""" +The following sample demonstrates how to create an assistant +agent using either Azure OpenAI or OpenAI within Semantic Kernel. +It uses parameterized prompts and shows how to swap between +"semantic-kernel," "jinja2," and "handlebars" template formats, +This sample highlights how the agent's threaded conversation +state parallels the Chat History in Semantic Kernel, ensuring +all responses and parameters remain consistent throughout the +session. +""" + +inputs = [ + ("Home cooking is great.", None), + ("Talk about world peace.", "iambic pentameter"), + ("Say something about doing your best.", "e. e. cummings"), + ("What do you think about having fun?", "old school rap"), +] + + +async def invoke_agent_with_template( + template_str: str, template_format: TEMPLATE_FORMAT_TYPES, default_style: str = "haiku" +): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Configure the prompt template + prompt_template_config = PromptTemplateConfig(template=template_str, template_format=template_format) + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + name="MyPoetAgent", + ) + + # Create the AzureAssistantAgent instance using the client, the assistant definition, + # the prompt template config, and the constructor-level Kernel Arguments + agent = AzureAssistantAgent( + client=client, + definition=definition, + prompt_template_config=prompt_template_config, # type: ignore + arguments=KernelArguments(style=default_style), + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + try: + for user_input, style in inputs: + # Add user message to the conversation + await agent.add_chat_message( + thread_id=thread.id, + message=user_input, + ) + print(f"# User: {user_input}\n") + + # If style is specified, override the 'style' argument + argument_overrides = None + if style: + # Arguments passed in at invocation time take precedence over + # the default arguments that were added via the constructor. + argument_overrides = KernelArguments(style=style) + + # Stream agent responses + async for response in agent.invoke_stream(thread_id=thread.id, arguments=argument_overrides): + if response.content: + print(f"{response.content}", flush=True, end="") + print("\n") + finally: + # Clean up + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +async def main(): + # 1) Using "semantic-kernel" format + print("\n===== SEMANTIC-KERNEL FORMAT =====\n") + semantic_kernel_template = """ +Write a one verse poem on the requested topic in the style of {{$style}}. +Always state the requested style of the poem. Write appropriate G-rated content. +""" + await invoke_agent_with_template( + template_str=semantic_kernel_template, + template_format="semantic-kernel", + default_style="haiku", + ) + + # 2) Using "jinja2" format + print("\n===== JINJA2 FORMAT =====\n") + jinja2_template = """ +Write a one verse poem on the requested topic in the style of {{style}}. +Always state the requested style of the poem. Write appropriate G-rated content. +""" + await invoke_agent_with_template(template_str=jinja2_template, template_format="jinja2", default_style="haiku") + + # 3) Using "handlebars" format + print("\n===== HANDLEBARS FORMAT =====\n") + handlebars_template = """ +Write a one verse poem on the requested topic in the style of {{style}}. +Always state the requested style of the poem. Write appropriate G-rated content. +""" + await invoke_agent_with_template( + template_str=handlebars_template, template_format="handlebars", default_style="haiku" + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py new file mode 100644 index 000000000000..975a426c94a9 --- /dev/null +++ b/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py @@ -0,0 +1,93 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents import AuthorRole, ChatMessageContent, FileReferenceContent, ImageContent, TextContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +multi-modal content types to have the assistant describe images +and answer questions about them and provide streaming responses. +""" + + +async def main(): + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), "resources", "cat.jpg" + ) + + with open(file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, + instructions="Answer questions about the menu.", + name="Host", + ) + + # Create the AzureAssistantAgent instance using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # Define a thread and invoke the agent with the user input + thread = await agent.client.beta.threads.create() + + # Define a series of message with either ImageContent or FileReferenceContent + user_messages = { + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Describe this image."), + ImageContent( + uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" + ), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="What is the main color in this image?"), + ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Is there an animal in this image?"), + FileReferenceContent(file_id=file.id), + ], + ), + } + + try: + for message in user_messages: + await agent.add_chat_message(thread_id=thread.id, message=message) + + print(f"# User: '{message.items[0].text}'") # type: ignore + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print("# Agent: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print("\n") + + finally: + await client.files.delete(file.id) + await agent.client.beta.threads.delete(thread.id) + await agent.client.beta.assistants.delete(assistant_id=agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py index c74ebc322489..27e80773645c 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -from typing import TYPE_CHECKING from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel import Kernel @@ -11,9 +10,6 @@ from semantic_kernel.core_plugins.time_plugin import TimePlugin from semantic_kernel.functions import KernelArguments -if TYPE_CHECKING: - pass - ##################################################################### # This sample demonstrates how to build a conversational chatbot # # using Semantic Kernel, featuring auto function calling, # @@ -61,12 +57,13 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) # Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. # With `auto_invoke=True`, the model will automatically choose and call functions as needed. -request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() +request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]}) kernel.add_service(chat_completion_service) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py index f7aa767ffa23..3b028d329ae9 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py @@ -59,6 +59,7 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py b/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py index 162c415c4a64..5fcfcd37ac76 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py @@ -64,6 +64,7 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py b/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py index 26697141ab17..a54407e3769d 100644 --- a/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py +++ b/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py @@ -50,7 +50,7 @@ # This concept example shows how to handle both streaming and non-streaming responses # To toggle the behavior, set the following flag accordingly: -stream = True +stream = False kernel = Kernel() @@ -84,6 +84,7 @@ temperature=0.7, top_p=0.8, function_choice_behavior=FunctionChoiceBehavior.Required( + auto_invoke=False, filters={"included_functions": ["time-time", "time-date"]}, ), ) diff --git a/python/samples/concepts/caching/semantic_caching.py b/python/samples/concepts/caching/semantic_caching.py new file mode 100644 index 000000000000..786992888817 --- /dev/null +++ b/python/samples/concepts/caching/semantic_caching.py @@ -0,0 +1,143 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import time +from collections.abc import Awaitable, Callable +from dataclasses import dataclass, field +from typing import Annotated +from uuid import uuid4 + +from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding +from semantic_kernel.connectors.memory.in_memory.in_memory_store import InMemoryVectorStore +from semantic_kernel.data.record_definition import vectorstoremodel +from semantic_kernel.data.record_definition.vector_store_record_fields import ( + VectorStoreRecordDataField, + VectorStoreRecordKeyField, + VectorStoreRecordVectorField, +) +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions +from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin +from semantic_kernel.data.vector_storage.vector_store import VectorStore +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.filters.filter_types import FilterTypes +from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext +from semantic_kernel.filters.prompts.prompt_render_context import PromptRenderContext +from semantic_kernel.functions.function_result import FunctionResult +from semantic_kernel.kernel import Kernel + +COLLECTION_NAME = "llm_responses" +RECORD_ID_KEY = "cache_record_id" + + +# Define a simple data model to store, the prompt, the result, and the prompt embedding. +@vectorstoremodel +@dataclass +class CacheRecord: + prompt: Annotated[str, VectorStoreRecordDataField(embedding_property_name="prompt_embedding")] + result: Annotated[str, VectorStoreRecordDataField(is_full_text_searchable=True)] + prompt_embedding: Annotated[list[float], VectorStoreRecordVectorField(dimensions=1536)] = field( + default_factory=list + ) + id: Annotated[str, VectorStoreRecordKeyField] = field(default_factory=lambda: str(uuid4())) + + +# Define the filters, one for caching the results and one for using the cache. +class PromptCacheFilter: + """A filter to cache the results of the prompt rendering and function invocation.""" + + def __init__( + self, + embedding_service: EmbeddingGeneratorBase, + vector_store: VectorStore, + collection_name: str = COLLECTION_NAME, + score_threshold: float = 0.2, + ): + self.embedding_service = embedding_service + self.vector_store = vector_store + self.collection: VectorStoreRecordCollection[str, CacheRecord] = vector_store.get_collection( + collection_name, data_model_type=CacheRecord + ) + self.score_threshold = score_threshold + + async def on_prompt_render( + self, context: PromptRenderContext, next: Callable[[PromptRenderContext], Awaitable[None]] + ): + """Filter to cache the rendered prompt and the result of the function. + + It uses the score threshold to determine if the result should be cached. + The direction of the comparison is based on the default distance metric for + the in memory vector store, which is cosine distance, so the closer to 0 the + closer the match. + """ + await next(context) + assert context.rendered_prompt # nosec + prompt_embedding = await self.embedding_service.generate_raw_embeddings([context.rendered_prompt]) + await self.collection.create_collection_if_not_exists() + assert isinstance(self.collection, VectorizedSearchMixin) # nosec + results = await self.collection.vectorized_search( + vector=prompt_embedding[0], options=VectorSearchOptions(vector_field_name="prompt_embedding", top=1) + ) + async for result in results.results: + if result.score < self.score_threshold: + context.function_result = FunctionResult( + function=context.function.metadata, + value=result.record.result, + rendered_prompt=context.rendered_prompt, + metadata={RECORD_ID_KEY: result.record.id}, + ) + + async def on_function_invocation( + self, context: FunctionInvocationContext, next: Callable[[FunctionInvocationContext], Awaitable[None]] + ): + """Filter to store the result in the cache if it is new.""" + await next(context) + result = context.result + if result and result.rendered_prompt and RECORD_ID_KEY not in result.metadata: + prompt_embedding = await self.embedding_service.generate_embeddings([result.rendered_prompt]) + cache_record = CacheRecord( + prompt=result.rendered_prompt, + result=str(result), + prompt_embedding=prompt_embedding[0], + ) + await self.collection.create_collection_if_not_exists() + await self.collection.upsert(cache_record) + + +async def execute_async(kernel: Kernel, title: str, prompt: str): + """Helper method to execute and log time.""" + print(f"{title}: {prompt}") + start = time.time() + result = await kernel.invoke_prompt(prompt) + elapsed = time.time() - start + print(f"\tElapsed Time: {elapsed:.3f}") + return result + + +async def main(): + # create the kernel and add the chat service and the embedding service + kernel = Kernel() + chat = OpenAIChatCompletion(service_id="default") + embedding = OpenAITextEmbedding(service_id="embedder") + kernel.add_service(chat) + kernel.add_service(embedding) + # create the in-memory vector store + vector_store = InMemoryVectorStore() + # create the cache filter and add the filters to the kernel + cache = PromptCacheFilter(embedding_service=embedding, vector_store=vector_store) + kernel.add_filter(FilterTypes.PROMPT_RENDERING, cache.on_prompt_render) + kernel.add_filter(FilterTypes.FUNCTION_INVOCATION, cache.on_function_invocation) + + # Run the sample + print("\nIn-memory cache sample:") + r1 = await execute_async(kernel, "First run", "What's the tallest building in New York?") + print(f"\tResult 1: {r1}") + r2 = await execute_async(kernel, "Second run", "How are you today?") + print(f"\tResult 2: {r2}") + r3 = await execute_async(kernel, "Third run", "What is the highest building in New York City?") + print(f"\tResult 3: {r3}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/chat_completion/simple_chatbot.py b/python/samples/concepts/chat_completion/simple_chatbot.py index 630bd75061f2..11909e74c902 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot.py +++ b/python/samples/concepts/chat_completion/simple_chatbot.py @@ -2,10 +2,7 @@ import asyncio -from samples.concepts.setup.chat_completion_services import ( - Services, - get_chat_completion_service_and_request_settings, -) +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel.contents import ChatHistory # This sample shows how to create a chatbot. This sample uses the following two main components: @@ -25,6 +22,7 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py b/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py index 6ed249276c08..5b9738ce4471 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py @@ -2,10 +2,7 @@ import asyncio -from samples.concepts.setup.chat_completion_services import ( - Services, - get_chat_completion_service_and_request_settings, -) +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel import Kernel from semantic_kernel.contents import ChatHistory from semantic_kernel.functions import KernelArguments @@ -33,6 +30,7 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_streaming.py b/python/samples/concepts/chat_completion/simple_chatbot_streaming.py index b513aeeb408d..9086aeb7f17e 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_streaming.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_streaming.py @@ -2,10 +2,7 @@ import asyncio -from samples.concepts.setup.chat_completion_services import ( - Services, - get_chat_completion_service_and_request_settings, -) +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel.contents import ChatHistory, StreamingChatMessageContent # This sample shows how to create a chatbot that streams responses. @@ -26,6 +23,7 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI +# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. # Please note that not all models support streaming responses. Make sure to select a model that supports streaming. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py index 338c76519b0e..838d90ac18ab 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py @@ -27,6 +27,9 @@ # The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer. # To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly. +# Toggle this flag to view the chat history summary after a reduction was performed. +view_chat_history_summary_after_reduction = True + # You can select from the following chat completion services: # - Services.OPENAI # - Services.AZURE_OPENAI @@ -122,7 +125,8 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - await summarization_reducer.reduce() + if is_reduced := await summarization_reducer.reduce(): + print(f"@ History reduced to {len(summarization_reducer.messages)} messages.") kernel_arguments = KernelArguments( settings=request_settings, @@ -136,6 +140,15 @@ async def chat() -> bool: summarization_reducer.add_user_message(user_input) summarization_reducer.add_message(answer.value[0]) + if view_chat_history_summary_after_reduction and is_reduced: + for msg in summarization_reducer.messages: + if msg.metadata and msg.metadata.get("__summary__"): + print("*" * 60) + print(f"Chat History Reduction Summary: {msg.content}") + print("*" * 60) + break + print("\n") + return True diff --git a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py index b5d0eae75d24..591bbec053b8 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py @@ -32,6 +32,9 @@ # The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer. # To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly. +# Toggle this flag to view the chat history summary after a reduction was performed. +view_chat_history_summary_after_reduction = True + # You can select from the following chat completion services: # - Services.OPENAI # - Services.AZURE_OPENAI @@ -136,7 +139,8 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - await summarization_reducer.reduce() + if is_reduced := await summarization_reducer.reduce(): + print(f"@ History reduced to {len(summarization_reducer.messages)} messages.") kernel_arguments = KernelArguments( settings=request_settings, @@ -169,17 +173,26 @@ async def chat() -> bool: frc.append(item) for i, item in enumerate(fcc): - summarization_reducer.add_assistant_message_list([item]) + summarization_reducer.add_assistant_message([item]) processed_fccs.add(item.id) # Safely check if there's a matching FunctionResultContent if i < len(frc): assert fcc[i].id == frc[i].id # nosec - summarization_reducer.add_tool_message_list([frc[i]]) + summarization_reducer.add_tool_message([frc[i]]) processed_frcs.add(item.id) # Since this example is showing how to include FunctionCallContent and FunctionResultContent # in the summary, we need to add them to the chat history and also to the processed sets. + if view_chat_history_summary_after_reduction and is_reduced: + for msg in summarization_reducer.messages: + if msg.metadata and msg.metadata.get("__summary__"): + print("*" * 60) + print(f"Chat History Reduction Summary: {msg.content}") + print("*" * 60) + break + print("\n") + return True diff --git a/python/samples/concepts/chat_history/README.md b/python/samples/concepts/chat_history/README.md new file mode 100644 index 000000000000..1fada334e7c5 --- /dev/null +++ b/python/samples/concepts/chat_history/README.md @@ -0,0 +1,17 @@ +# Chat History manipulation samples + +This folder contains samples that demonstrate how to manipulate chat history in Semantic Kernel. + +## [Serialize Chat History](./serialize_chat_history.py) + +This sample demonstrates how to build a conversational chatbot using Semantic Kernel, it features auto function calling, but with file-based serialization of the chat history. This sample stores and reads the chat history at every turn. This is not the best way to do it, but clearly demonstrates the mechanics. + +To run this sample a environment with keys for the chosen chat service is required. In line 61 you can change the model used. This sample uses a temporary file to store the chat history, so no additional setup is required. + +## [Store Chat History in Cosmos DB](./store_chat_history_in_cosmosdb.py) + +This a more complex version of the sample above, it uses Azure CosmosDB NoSQL to store the chat messages. + +In order to do that a simple datamodel is defined. And then a class is created that extends ChatHistory, this class adds `store` and `read` methods, as well as a `create_collection` method that creates a collection in CosmosDB. + +This samples further uses the same chat service setup as the sample above, so the keys and other parameters for the chosen model should be in the environment. Next to that a AZURE_COSMOS_DB_NO_SQL_URL and optionally a AZURE_COSMOS_DB_NO_SQL_KEY should be set in the environment, you can also rely on Entra ID Auth instead of the key. The database name can also be put in the environment. diff --git a/python/samples/concepts/chat_history/serialize_chat_history.py b/python/samples/concepts/chat_history/serialize_chat_history.py index 331669be0906..f6c04bbd00c0 100644 --- a/python/samples/concepts/chat_history/serialize_chat_history.py +++ b/python/samples/concepts/chat_history/serialize_chat_history.py @@ -1,94 +1,112 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import os -from typing import TYPE_CHECKING - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( - AzureChatPromptExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +import tempfile + +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel.contents import ChatHistory -from semantic_kernel.core_plugins.math_plugin import MathPlugin -from semantic_kernel.core_plugins.time_plugin import TimePlugin -from semantic_kernel.functions import KernelArguments - -if TYPE_CHECKING: - pass - - -system_message = """ -You are a chat bot. Your name is Mosscap and -you have one goal: figure out what people need. -Your full name, should you need to know it, is -Splendid Speckled Mosscap. You communicate -effectively, but you tend to answer with long -flowery prose. You are also a math wizard, -especially for adding and subtracting. -You also excel at joke telling, where your tone is often sarcastic. -Once you have the answer I am looking for, -you will return a full answer to me as soon as possible. + +""" +This sample demonstrates how to build a conversational chatbot +using Semantic Kernel, it features auto function calling, +but with file-based serialization of the chat history. +This sample stores and reads the chat history at every turn. +This is not the best way to do it, but clearly demonstrates the mechanics. +More optimal would for instance be to only write once when a conversation is done. +And writing to something other then a file is also usually better. """ -kernel = Kernel() - -# Note: the underlying gpt-35/gpt-4 model version needs to be at least version 0613 to support tools. -kernel.add_service(AzureChatCompletion(service_id="chat")) - -plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") -# adding plugins to the kernel -kernel.add_plugin(MathPlugin(), plugin_name="math") -kernel.add_plugin(TimePlugin(), plugin_name="time") - -# Enabling or disabling function calling is done by setting the `function_choice_behavior` attribute for the -# prompt execution settings. When the function_call parameter is set to "auto" the model will decide which -# function to use, if any. -# -# There are two ways to define the `function_choice_behavior` parameter: -# 1. Using the type string as `"auto"`, `"required"`, or `"none"`. For example: -# configure `function_choice_behavior="auto"` parameter directly in the execution settings. -# 2. Using the FunctionChoiceBehavior class. For example: -# `function_choice_behavior=FunctionChoiceBehavior.Auto()`. -# Both of these configure the `auto` tool_choice and all of the available plugins/functions -# registered on the kernel. If you want to limit the available plugins/functions, you must -# configure the `filters` dictionary attribute for each type of function choice behavior. -# For example: -# -# from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior - -# function_choice_behavior = FunctionChoiceBehavior.Auto( -# filters={"included_functions": ["time-date", "time-time", "math-Add"]} -# ) -# -# The filters attribute allows you to specify either: `included_functions`, `excluded_functions`, -# `included_plugins`, or `excluded_plugins`. - -# Note: the number of responses for auto invoking tool calls is limited to 1. -# If configured to be greater than one, this value will be overridden to 1. -execution_settings = AzureChatPromptExecutionSettings( - service_id="chat", - max_tokens=2000, - temperature=0.7, - top_p=0.8, - function_choice_behavior=FunctionChoiceBehavior.Auto(), -) - -arguments = KernelArguments(settings=execution_settings) +# You can select from the following chat completion services that support function calling: +# - Services.OPENAI +# - Services.AZURE_OPENAI +# - Services.AZURE_AI_INFERENCE +# - Services.ANTHROPIC +# - Services.BEDROCK +# - Services.GOOGLE_AI +# - Services.MISTRAL_AI +# - Services.OLLAMA +# - Services.ONNX +# - Services.VERTEX_AI +# - Services.DEEPSEEK +# Please make sure you have configured your environment correctly for the selected chat completion service. +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) + + +async def chat(file) -> bool: + """ + Continuously prompt the user for input and show the assistant's response. + Type 'exit' to exit. + """ + try: + # Try to load the chat history from a file. + history = ChatHistory.load_chat_history_from_file(file_path=file) + print(f"Chat history successfully loaded {len(history.messages)} messages.") + except Exception: + # Create a new chat history to store the system message, initial messages, and the conversation. + print("Chat history file not found. Starting a new conversation.") + history = ChatHistory() + history.add_system_message( + "You are a chat bot. Your name is Mosscap and you have one goal: figure out what people need." + ) + + try: + # Get the user input + user_input = input("User:> ") + except (KeyboardInterrupt, EOFError): + print("\n\nExiting chat...") + return False + + if user_input.lower().strip() == "exit": + print("\n\nExiting chat...") + return False + + # Add the user input to the chat history + history.add_user_message(user_input) + # Get a response from the chat completion service + result = await chat_completion_service.get_chat_message_content(history, request_settings) + + # Update the chat history with the user's input and the assistant's response + if result: + print(f"Mosscap:> {result}") + history.add_message(result) + + # Save the chat history to a file. + print(f"Saving {len(history.messages)} messages to the file.") + history.store_chat_history_to_file(file_path=file) + return True -async def main() -> None: - user_input = "What is the current hour plus 10?" - print(f"User:> {user_input}") - result = await kernel.invoke_prompt(prompt=user_input, arguments=arguments) +""" +Sample output: + +Welcome to the chat bot! + Type 'exit' to exit. + Try a math question to see function calling in action (e.g. 'what is 3+3?'). + Your chat history will be saved in: /tmpq1n1f6qk.json +Chat history file not found. Starting a new conversation. +User:> Hello, how are you? +Mosscap:> Hello! I'm here and ready to help. What do you need today? +Saving 3 messages to the file. +Chat history successfully loaded 3 messages. +User:> exit +""" - print(f"Mosscap:> {result}") - print("\nChat history:") - chat_history: ChatHistory = result.metadata["messages"] - print(chat_history.serialize()) +async def main() -> None: + chatting = True + with tempfile.NamedTemporaryFile(mode="w+", dir=".", suffix=".json", delete=True) as file: + print( + "Welcome to the chat bot!\n" + " Type 'exit' to exit.\n" + " Try a math question to see function calling in action (e.g. 'what is 3+3?')." + f" Your chat history will be saved in: {file.name}" + ) + try: + while chatting: + chatting = await chat(file.name) + except Exception: + print("Closing and removing the file.") if __name__ == "__main__": diff --git a/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py b/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py new file mode 100644 index 000000000000..e6a68d354e3d --- /dev/null +++ b/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py @@ -0,0 +1,199 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from dataclasses import dataclass +from typing import Annotated + +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import AzureCosmosDBNoSQLStore +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.data.record_definition.vector_store_model_decorator import vectorstoremodel +from semantic_kernel.data.record_definition.vector_store_record_fields import ( + VectorStoreRecordDataField, + VectorStoreRecordKeyField, +) +from semantic_kernel.data.vector_storage.vector_store import VectorStore +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection + +""" +This sample demonstrates how to build a conversational chatbot +using Semantic Kernel, it features auto function calling, +but with Azure CosmosDB as storage for the chat history. +This sample stores and reads the chat history at every turn. +This is not the best way to do it, but clearly demonstrates the mechanics. + +Further refinement would be to only write once when a conversation is done. +And there is also no logic to see if there is something to write. +You could also enhance the ChatHistoryModel with a summary and a vector for that +in order to search for similar conversations. +""" + + +# 1. We first create simple datamodel for the chat history. +# Note that this model does not contain any vectors, +# those can be added, for instance to store a summary of the conversation. +@vectorstoremodel +@dataclass +class ChatHistoryModel: + session_id: Annotated[str, VectorStoreRecordKeyField] + user_id: Annotated[str, VectorStoreRecordDataField(is_filterable=True)] + messages: Annotated[list[dict[str, str]], VectorStoreRecordDataField(is_filterable=True)] + + +# 2. We then create a class that extends the ChatHistory class +# and implements the methods to store and read the chat history. +# This could also use one of the history reducers to make +# sure the database doesn't grow too large. +# It adds a `store` attribute and a couple of methods. +class ChatHistoryInCosmosDB(ChatHistory): + """This class extends the ChatHistory class to store the chat history in a Cosmos DB.""" + + session_id: str + user_id: str + store: VectorStore + collection: VectorStoreRecordCollection[str, ChatHistoryModel] | None = None + + async def create_collection(self, collection_name: str) -> None: + """Create a collection with the inbuild data model using the vector store. + + First create the collection, then call this method to create the collection itself. + """ + self.collection = self.store.get_collection( + collection_name=collection_name, + data_model_type=ChatHistoryModel, + ) + await self.collection.create_collection_if_not_exists() + + async def store_messages(self) -> None: + """Store the chat history in the Cosmos DB. + + Note that we use model_dump to convert the chat message content into a serializable format. + """ + if self.collection: + await self.collection.upsert( + ChatHistoryModel( + session_id=self.session_id, + user_id=self.user_id, + messages=[msg.model_dump() for msg in self.messages], + ) + ) + + async def read_messages(self) -> None: + """Read the chat history from the Cosmos DB. + + Note that we use the model_validate method to convert the serializable format back into a ChatMessageContent. + """ + if self.collection: + record = await self.collection.get(self.session_id) + if record: + for message in record.messages: + self.messages.append(ChatMessageContent.model_validate(message)) + + +# 3. We now create a fairly standard kernel, with functions and a chat service. +# Create and configure the kernel. +kernel = Kernel() + +# Load some sample plugins (for demonstration of function calling). +kernel.add_plugin(MathPlugin(), plugin_name="math") +kernel.add_plugin(TimePlugin(), plugin_name="time") + +# You can select from the following chat completion services that support function calling: +# - Services.OPENAI +# - Services.AZURE_OPENAI +# - Services.AZURE_AI_INFERENCE +# - Services.ANTHROPIC +# - Services.BEDROCK +# - Services.GOOGLE_AI +# - Services.MISTRAL_AI +# - Services.OLLAMA +# - Services.ONNX +# - Services.VERTEX_AI +# - Services.DEEPSEEK +# Please make sure you have configured your environment correctly for the selected chat completion service. +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) + +# Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. +# With `auto_invoke=True`, the model will automatically choose and call functions as needed. +request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]}) + +kernel.add_service(chat_completion_service) + + +# 4. The main chat loop, which takes a history object and prompts the user for input. +# It then adds the user input to the history and gets a response from the chat completion service. +# Finally, it prints the response and saves the chat history to the Cosmos DB. +async def chat(history: ChatHistoryInCosmosDB) -> bool: + """ + Continuously prompt the user for input and show the assistant's response. + Type 'exit' to exit. + """ + await history.read_messages() + print(f"Chat history successfully loaded {len(history.messages)} messages.") + if len(history.messages) == 0: + # if it is a new conversation, add the system message and a couple of initial messages. + history.add_system_message( + "You are a chat bot. Your name is Mosscap and you have one goal: figure out what people need." + ) + history.add_user_message("Hi there, who are you?") + history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") + + try: + user_input = input("User:> ") + except (KeyboardInterrupt, EOFError): + print("\n\nExiting chat...") + return False + + if user_input.lower().strip() == "exit": + print("\n\nExiting chat...") + return False + + # add the user input to the chat history + history.add_user_message(user_input) + + result = await chat_completion_service.get_chat_message_content(history, request_settings, kernel=kernel) + + if result: + print(f"Mosscap:> {result}") + history.add_message(result) + + # Save the chat history to CosmosDB. + print(f"Saving {len(history.messages)} messages to AzureCosmosDB.") + await history.store_messages() + return True + + +async def main() -> None: + delete_when_done = True + session_id = "session1" + chatting = True + # 5. We now create the store, ChatHistory and collection and start the chat loop. + + # First we enter the store context manager to connect. + # The create_database flag will create the database if it does not exist. + async with AzureCosmosDBNoSQLStore(create_database=True) as store: + # Then we create the chat history in CosmosDB. + history = ChatHistoryInCosmosDB(store=store, session_id=session_id, user_id="user") + # Finally we create the collection. + await history.create_collection(collection_name="chat_history") + print( + "Welcome to the chat bot!\n" + " Type 'exit' to exit.\n" + " Try a math question to see function calling in action (e.g. 'what is 3+3?')." + ) + try: + while chatting: + chatting = await chat(history) + except Exception: + print("Closing chat...") + if delete_when_done and history.collection: + await history.collection.delete_collection() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/filtering/auto_function_invoke_filters.py b/python/samples/concepts/filtering/auto_function_invoke_filters.py index b1e055e9397d..008150af011d 100644 --- a/python/samples/concepts/filtering/auto_function_invoke_filters.py +++ b/python/samples/concepts/filtering/auto_function_invoke_filters.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import os from semantic_kernel import Kernel from semantic_kernel.connectors.ai import FunctionChoiceBehavior @@ -29,10 +28,7 @@ # Note: the underlying gpt-35/gpt-4 model version needs to be at least version 0613 to support tools. kernel.add_service(OpenAIChatCompletion(service_id="chat")) -plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") # adding plugins to the kernel -# the joke plugin in the FunPlugins is a semantic plugin and has the function calling disabled. -# kernel.import_plugin_from_prompt_directory("chat", plugins_directory, "FunPlugin") # the math plugin is a core plugin and has the function calling enabled. kernel.add_plugin(MathPlugin(), plugin_name="math") kernel.add_plugin(TimePlugin(), plugin_name="time") diff --git a/python/samples/concepts/filtering/function_invocation_filters_stream.py b/python/samples/concepts/filtering/function_invocation_filters_stream.py index 74948472ac49..0f0b58208f5d 100644 --- a/python/samples/concepts/filtering/function_invocation_filters_stream.py +++ b/python/samples/concepts/filtering/function_invocation_filters_stream.py @@ -4,7 +4,6 @@ import logging import os from collections.abc import Callable, Coroutine -from functools import reduce from typing import Any from semantic_kernel import Kernel @@ -38,17 +37,21 @@ async def streaming_exception_handling( ): await next(context) - async def override_stream(stream): - try: - async for partial in stream: - yield partial - except Exception as e: - yield [ - StreamingChatMessageContent(role=AuthorRole.ASSISTANT, content=f"Exception caught: {e}", choice_index=0) - ] + if context.is_streaming: - stream = context.result.value - context.result = FunctionResult(function=context.result.function, value=override_stream(stream)) + async def override_stream(stream): + try: + async for partial in stream: + yield partial + except Exception as e: + yield [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, content=f"Exception caught: {e}", choice_index=0 + ) + ] + + stream = context.result.value + context.result = FunctionResult(function=context.result.function, value=override_stream(stream)) async def chat(chat_history: ChatHistory) -> bool: @@ -77,7 +80,7 @@ async def chat(chat_history: ChatHistory) -> bool: print("") chat_history.add_user_message(user_input) if streamed_chunks: - streaming_chat_message = reduce(lambda first, second: first + second, streamed_chunks) + streaming_chat_message = sum(streamed_chunks[1:], streamed_chunks[0]) chat_history.add_message(streaming_chat_message) return True diff --git a/python/samples/concepts/filtering/retry_with_different_model.py b/python/samples/concepts/filtering/retry_with_different_model.py new file mode 100644 index 000000000000..e76f82ce7c7f --- /dev/null +++ b/python/samples/concepts/filtering/retry_with_different_model.py @@ -0,0 +1,98 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from collections.abc import Awaitable, Callable + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.filters import FunctionInvocationContext +from semantic_kernel.filters.filter_types import FilterTypes +from semantic_kernel.functions.kernel_arguments import KernelArguments + +# This sample shows how to use a filter to use a fallback service if the default service fails to execute the function. +# this works by replacing the settings that point to the default service +# with the settings that point to the fallback service +# after the default service fails to execute the function. + +logger = logging.getLogger(__name__) + + +class RetryFilter: + """A filter that retries the function invocation with a different model if it fails.""" + + def __init__(self, default_service_id: str, fallback_service_id: str): + """Initialize the filter with the default and fallback service ids.""" + self.default_service_id = default_service_id + self.fallback_service_id = fallback_service_id + + async def retry_filter( + self, + context: FunctionInvocationContext, + next: Callable[[FunctionInvocationContext], Awaitable[None]], + ) -> None: + """A filter that retries the function invocation with a different model if it fails.""" + try: + # try the default function + await next(context) + except Exception as ex: + print("Expected failure to execute the function: ", ex) + # if the default function fails, try the fallback function + if ( + context.arguments + and context.arguments.execution_settings + and self.default_service_id in context.arguments.execution_settings + ): + # get the settings for the default service + settings = context.arguments.execution_settings.pop(self.default_service_id) + settings.service_id = self.fallback_service_id + # add them back with the right service id + context.arguments.execution_settings[self.fallback_service_id] = settings + # try again! + await next(context) + else: + raise ex + + +async def main() -> None: + # set the ids for the default and fallback services + default_service_id = "default_service" + fallback_service_id = "fallback_service" + kernel = Kernel() + # create the filter with the ids + retry_filter = RetryFilter(default_service_id=default_service_id, fallback_service_id=fallback_service_id) + # add the filter to the kernel + kernel.add_filter(FilterTypes.FUNCTION_INVOCATION, retry_filter.retry_filter) + + # add the default and fallback services + default_service = OpenAIChatCompletion(service_id=default_service_id, api_key="invalid_key") + kernel.add_service(default_service) + fallback_service = OpenAIChatCompletion(service_id=fallback_service_id) + kernel.add_service(fallback_service) + + # create the settings for the request + request_settings = OpenAIChatPromptExecutionSettings(service_id=default_service_id) + # invoke a simple prompt function + response = await kernel.invoke_prompt( + function_name="retry_function", + prompt="How are you today?", + arguments=KernelArguments(settings=request_settings), + ) + + print("Model response: ", response) + + # Sample output: + # Expected failure to execute the function: Error occurred while invoking function retry_function: + # (" service + # failed to complete the prompt", AuthenticationError("Error code: 401 - {'error': {'message': 'Incorrect API key + # provided: invalid_key. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': + # 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}")) + # Model response: I'm just a program, so I don't experience feelings, but I'm here and ready to help you out. + # How can I assist you today? + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/filtering/retry_with_filters.py b/python/samples/concepts/filtering/retry_with_filters.py index 92131ad1d292..e488a4aa5aa1 100644 --- a/python/samples/concepts/filtering/retry_with_filters.py +++ b/python/samples/concepts/filtering/retry_with_filters.py @@ -2,8 +2,7 @@ import asyncio import logging -from collections.abc import Callable, Coroutine -from typing import Any +from collections.abc import Awaitable, Callable from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel import Kernel @@ -34,7 +33,7 @@ def __init__(self): self._invocation_count = 0 @kernel_function(name="GetWeather", description="Get the weather of the day at the current location.") - def get_wather(self) -> str: + def get_weather(self) -> str: """Get the weather of the day at the current location. Simulates a call to an external service to get the weather. @@ -50,7 +49,7 @@ def get_wather(self) -> str: async def retry_filter( context: FunctionInvocationContext, - next: Callable[[FunctionInvocationContext], Coroutine[Any, Any, None]], + next: Callable[[FunctionInvocationContext], Awaitable[None]], ) -> None: """A filter that retries the function invocation if it fails. diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md b/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md new file mode 100644 index 000000000000..c2d75ca0ca80 --- /dev/null +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md @@ -0,0 +1,72 @@ +## Azure AI Search with Hotel Sample Data + +This guide walks you through setting up your Azure AI Search Service with the correct index, data source, and indexer to run the hotel sample. + +### Setting Up the Azure AI Search Service + +1. **Import the Sample Data** + - Navigate to the **Search Service Overview** page and click **Import Data**. + - From the dropdown, select **Samples**, then choose **hotels-sample**. + - Click **Next: Add Cognitive Skills (Optional)**. + +2. **Skip the Cognitive Skills Page** + - No changes are needed here. Click **Next** to proceed. + +3. **Configure the Index Fields** + - The Python sample uses **snake_case** field names. Update the default field names accordingly. + - Since `HotelId` is the primary key, you cannot rename it directly. Instead, create a new field: + - Click **+ Add Field** and name it `hotel_id`. + - Enable **Retrievable**, **Filterable**, **Facetable**, and **Searchable** options. + - Rename other fields to snake case: + - `HotelName` → `hotel_name` + - There may be a current issue with index config that has trouble mapping the `HotelName` -> `hotel_name`, so as to not hit issues + deselect `retrievable` for `hotel_name`. It should still be `searchable`. + - Use the dropdown to rename complex fields like `Address` -> `address` and `Rooms` -> `rooms` with their sub-fields renamed. + - Add two new vector fields: + - `description_vector` + - `description_fr_vector` + - Configure these fields as: + - **Type**: `Collection(Edm.Single)` (for vector fields) + - **Retrievable**: Enabled (default setting) + - Click the **three dots (...)** on the right, then **Configure vector field**: + - Set **Dimensions** to `1536`. + - If no vector search profiles exist, click **Create**. + - Under **Algorithms**, click **Create** to set up a vector algorithm (default values are fine). + - If no vectorizer exists, create one: + - Select the **Kind** (e.g., Azure OpenAI). + - Choose your **subscription, Azure OpenAI service, and model deployment**. + - Select your **authentication type**. + - Repeat this process for both `description_vector` and `description_fr_vector`. + +4. **Create an Indexer** + - On the next page, create an indexer with **default settings**, as the sample data is static. + - Click **Submit** to start the indexer. + - The indexing process may take a few minutes. + +### Generating Vectors on First Run + +In the `step_1_interact_with_the_collection.py` script: +- Set `first_run = True` to generate vectors for all entries in the index. +- This process may take a few minutes. + +### Using Precomputed Vectors for Subsequent Runs + +If your index already contains vectors: +- Set `first_run = False` to skip vector generation and perform only text and vector searches. + +### Example Search Results + +After running `step_1_interact_with_the_collection.py` you should see output similar to: + +#### **Text Search Results** +```text +Search results using text: + eitRUkFJSmFmWG93QUFBQUFBQUFBQT090 (in Nashville, USA): All of the suites feature full-sized kitchens stocked with cookware, separate living and sleeping areas and sofa beds. Some of the larger rooms have fireplaces and patios or balconies. Experience real country hospitality in the heart of bustling Nashville. The most vibrant music scene in the world is just outside your front door. (score: 7.613796) + eitRUkFJSmFmWG9jQUFBQUFBQUFBQT090 (in Sarasota, USA): The hotel is situated in a nineteenth century plaza, which has been expanded and renovated to the highest architectural standards to create a modern, functional and first-class hotel in which art and unique historical elements coexist with the most modern comforts. The hotel also regularly hosts events like wine tastings, beer dinners, and live music. (score: 6.1204605) + eitRUkFJSmFmWG9SQUFBQUFBQUFBQT090 (in Durham, USA): Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more. (score: 6.0284567) + +Search results using vector: + eitRUkFJSmFmWG93QUFBQUFBQUFBQT090 (in Nashville, USA): All of the suites feature full-sized kitchens stocked with cookware, separate living and sleeping areas and sofa beds. Some of the larger rooms have fireplaces and patios or balconies. Experience real country hospitality in the heart of bustling Nashville. The most vibrant music scene in the world is just outside your front door. (score: 0.6944429) + eitRUkFJSmFmWG9SQUFBQUFBQUFBQT090 (in Durham, USA): Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more. (score: 0.6776492) + eitRUkFJSmFmWG9PQUFBQUFBQUFBQT090 (in San Diego, USA): Extend Your Stay. Affordable home away from home, with amenities like free Wi-Fi, full kitchen, and convenient laundry service. (score: 0.67669696) +``` \ No newline at end of file diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py index 271b61a47061..4f22bbb4a25a 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py @@ -21,6 +21,7 @@ # This model adds vectors for the 2 descriptions in English and French. # Both are based on the 1536 dimensions of the OpenAI models. # You can adjust this at creation time and then make the change below as well. +# Refer to the README for more information. ### diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py index f110513d2ea8..c602f24034ad 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py @@ -25,6 +25,9 @@ first_run = False +# Note: you may need to update this `collection_name` depending upon how your index is named. +COLLECTION_NAME = "hotels-sample-index" + async def add_vectors(collection: AzureAISearchCollection, vectorizer: VectorStoreRecordUtils): """This is a simple function that uses the VectorStoreRecordUtils to add vectors to the records in the collection. @@ -53,15 +56,17 @@ async def main(query: str, first_run: bool = False): vectorizer = VectorStoreRecordUtils(kernel) # Create the Azure AI Search collection collection = AzureAISearchCollection[HotelSampleClass]( - collection_name="hotels-sample-index", data_model_type=HotelSampleClass + collection_name=COLLECTION_NAME, data_model_type=HotelSampleClass ) # Check if the collection exists. if not await collection.does_collection_exist(): raise ValueError( "Collection does not exist, please create using the " "Azure AI Search portal wizard -> Import Data -> Samples -> hotels-sample." - "During creation adopt the schema to add the description_vector and description_fr_vector fields." + "During creation adapt the index schema to add the description_vector and description_fr_vector fields." + "You may need to rename other fields to match the data model." "Then run this sample with `first_run=True` to add the vectors." + "Refer to the README for more information." ) # If it is the first run and there are no vectors, add them. diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py index af27024542ec..5814bc95f7d2 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py @@ -41,6 +41,9 @@ KernelParameterMetadata, ) +# Note: you may need to update this `collection_name` depending upon how your index is named. +COLLECTION_NAME = "hotels-sample-index" + # Create Kernel and add both chat completion and text embeddings services. kernel = Kernel() service_id = "chat" @@ -54,7 +57,7 @@ # You can also choose to use the `from_vectorized_search` method to use vector search. # Or the `from_vectorizable_text_search` method if the collection is setup to vectorize incoming texts. text_search = VectorStoreTextSearch.from_vector_text_search( - AzureAISearchCollection[HotelSampleClass](collection_name="hotels-sample-index", data_model_type=HotelSampleClass) + AzureAISearchCollection[HotelSampleClass](collection_name=COLLECTION_NAME, data_model_type=HotelSampleClass) ) @@ -139,7 +142,14 @@ def update_options_search( type="str", is_required=True, type_object=str, - ) + ), + KernelParameterMetadata( + name="hotel_name", + description="The name of the hotel.", + type="str", + type_object=str, + is_required=True, + ), ], # it uses the default update options that will turn the hotel_id into a filter. ), diff --git a/python/samples/concepts/memory/azure_cognitive_search_memory.py b/python/samples/concepts/memory/azure_cognitive_search_memory.py deleted file mode 100644 index aaa7b3b0d213..000000000000 --- a/python/samples/concepts/memory/azure_cognitive_search_memory.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -##################################################### -# This sample should be considered obsolete, as we are moving things towards the new data model. -# Please check out the azure_ai_search_hotel_samples folder for the latest implementation. -##################################################### - -import asyncio - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion, AzureTextEmbedding -from semantic_kernel.connectors.memory.azure_cognitive_search import AzureCognitiveSearchMemoryStore -from semantic_kernel.core_plugins import TextMemoryPlugin -from semantic_kernel.memory import SemanticTextMemory - -COLLECTION_NAME = "acs-index-sample" - - -async def populate_memory(memory: SemanticTextMemory) -> None: - # Add some documents to the ACS semantic memory - await memory.save_information(COLLECTION_NAME, id="info1", text="My name is Andrea") - await memory.save_information(COLLECTION_NAME, id="info2", text="I currently work as a tour guide") - await memory.save_information(COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005") - await memory.save_information( - COLLECTION_NAME, - id="info4", - text="I visited France and Italy five times since 2015", - ) - await memory.save_information(COLLECTION_NAME, id="info5", text="My family is from New York") - - -async def search_acs_memory_questions(memory: SemanticTextMemory) -> None: - questions = [ - "what's my name", - "where do I live?", - "where's my family from?", - "where have I traveled?", - "what do I do for work", - ] - - for question in questions: - print(f"Question: {question}") - result = await memory.search(COLLECTION_NAME, question) - print(f"Answer: {result[0].text}\n") - - -async def main() -> None: - kernel = Kernel() - - vector_size = 1536 - - # Setting up OpenAI services for text completion and text embedding - kernel.add_service(AzureTextCompletion(service_id="dv")) - async with AzureCognitiveSearchMemoryStore(vector_size=vector_size) as acs_connector: - memory = SemanticTextMemory(storage=acs_connector, embeddings_generator=AzureTextEmbedding(service_id="ada")) - kernel.add_plugin(TextMemoryPlugin(memory), "TextMemoryPlugin") - - print("Populating memory...") - await populate_memory(memory) - - print("Asking questions... (manually)") - await search_acs_memory_questions(memory) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/memory/new_memory.py b/python/samples/concepts/memory/complex_memory.py similarity index 56% rename from python/samples/concepts/memory/new_memory.py rename to python/samples/concepts/memory/complex_memory.py index 11f8d3b20b51..423508388e9c 100644 --- a/python/samples/concepts/memory/new_memory.py +++ b/python/samples/concepts/memory/complex_memory.py @@ -4,11 +4,13 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass, field -from typing import Annotated +from typing import Annotated, Literal from uuid import uuid4 import numpy as np +from samples.concepts.memory.utils import print_record +from samples.concepts.resources.utils import Colors, print_with_color from semantic_kernel import Kernel from semantic_kernel.connectors.ai.open_ai import ( AzureTextEmbedding, @@ -16,20 +18,24 @@ OpenAITextEmbedding, ) from semantic_kernel.connectors.memory.azure_ai_search import AzureAISearchCollection -from semantic_kernel.connectors.memory.azure_cosmos_db import AzureCosmosDBNoSQLCollection +from semantic_kernel.connectors.memory.azure_cosmos_db import ( + AzureCosmosDBforMongoDBCollection, + AzureCosmosDBNoSQLCollection, +) +from semantic_kernel.connectors.memory.chroma import ChromaCollection from semantic_kernel.connectors.memory.in_memory import InMemoryVectorCollection from semantic_kernel.connectors.memory.postgres import PostgresCollection from semantic_kernel.connectors.memory.qdrant import QdrantCollection from semantic_kernel.connectors.memory.redis import RedisHashsetCollection, RedisJsonCollection from semantic_kernel.connectors.memory.weaviate import WeaviateCollection from semantic_kernel.data import ( + DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction, IndexKind, VectorizableTextSearchMixin, VectorizedSearchMixin, VectorSearchFilter, VectorSearchOptions, - VectorSearchResult, VectorStoreRecordCollection, VectorStoreRecordDataField, VectorStoreRecordKeyField, @@ -39,40 +45,48 @@ vectorstoremodel, ) +# This is a rather complex sample, showing how to use the vector store +# with a number of different collections. +# It also shows how to use the vector store with a number of different data models. +# It also uses all the types of search available in the vector store. +# For a simpler example, see "simple_memory.py" -def get_data_model_array(index_kind: IndexKind, distance_function: DistanceFunction) -> type: - @vectorstoremodel - @dataclass - class DataModelArray: - vector: Annotated[ - np.ndarray | None, - VectorStoreRecordVectorField( - embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, - index_kind=index_kind, - dimensions=1536, - distance_function=distance_function, - property_type="float", - serialize_function=np.ndarray.tolist, - deserialize_function=np.array, - ), - ] = None - id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) - content: Annotated[ - str, - VectorStoreRecordDataField( - has_embedding=True, - embedding_property_name="vector", - property_type="str", - is_full_text_searchable=True, - ), - ] = "content1" - title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = "title" - tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" - return DataModelArray +def get_data_model(type: Literal["array", "list"], index_kind: IndexKind, distance_function: DistanceFunction) -> type: + if type == "array": + @vectorstoremodel + @dataclass + class DataModelArray: + vector: Annotated[ + np.ndarray | None, + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, + index_kind=index_kind, + dimensions=1536, + distance_function=distance_function, + property_type="float", + serialize_function=np.ndarray.tolist, + deserialize_function=np.array, + ), + ] = None + id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) + content: Annotated[ + str, + VectorStoreRecordDataField( + has_embedding=True, + embedding_property_name="vector", + property_type="str", + is_full_text_searchable=True, + ), + ] = "content1" + title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = ( + "title" + ) + tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" + + return DataModelArray -def get_data_model_list(index_kind: IndexKind, distance_function: DistanceFunction) -> type: @vectorstoremodel @dataclass class DataModelList: @@ -103,9 +117,12 @@ class DataModelList: collection_name = "test" -# Depending on the vector database, the index kind and distance function may need to be adjusted, +# Depending on the vector database, the index kind and distance function may need to be adjusted # since not all combinations are supported by all databases. -DataModel = get_data_model_array(IndexKind.HNSW, DistanceFunction.COSINE_SIMILARITY) +# The values below might need to be changed for your collection to work. +distance_function = DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE +index_kind = IndexKind.HNSW +DataModel = get_data_model("array", index_kind, distance_function) # A list of VectorStoreRecordCollection that can be used. # Available collections are: @@ -124,6 +141,12 @@ class DataModelList: # https://learn.microsoft.com/en-us/azure/cosmos-db/how-to-develop-emulator?tabs=windows%2Cpython&pivots=api-nosql # Please see the link above to learn how to set up the Azure Cosmos NoSQL emulator on your machine. # For this sample to work with Azure Cosmos NoSQL, please adjust the index_kind of the data model to QUANTIZED_FLAT. +# - azure_cosmos_mongodb: Azure Cosmos MongoDB +# https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/introduction +# - chroma: Chroma +# The chroma collection is currently only available for in-memory versions +# Client-Server mode and Chroma Cloud are not yet supported. +# More info on Chroma here: https://docs.trychroma.com/docs/overview/introduction # This is represented as a mapping from the collection name to a # function which returns the collection. # Using a function allows for lazy initialization of the collection, @@ -162,29 +185,23 @@ class DataModelList: collection_name=collection_name, create_database=True, ), + "azure_cosmos_mongodb": lambda: AzureCosmosDBforMongoDBCollection( + data_model_type=DataModel, + collection_name=collection_name, + ), + "chroma": lambda: ChromaCollection(data_model_type=DataModel, collection_name=collection_name), } -def print_record(result: VectorSearchResult | None = None, record: DataModel | None = None): - if result: - record = result.record - print(f" Found id: {record.id}") - print(f" Content: {record.content}") - if record.vector is not None: - print(f" Vector (first five): {record.vector[:5]}") - - -async def main(collection: str, use_azure_openai: bool, embedding_model: str): +async def main(collection: str, use_azure_openai: bool): print("-" * 30) kernel = Kernel() - service_id = "embedding" - if use_azure_openai: - embedder = AzureTextEmbedding(service_id=service_id, deployment_name=embedding_model) - else: - embedder = OpenAITextEmbedding(service_id=service_id, ai_model_id=embedding_model) + embedder = ( + AzureTextEmbedding(service_id="embedding") if use_azure_openai else OpenAITextEmbedding(service_id="embedding") + ) kernel.add_service(embedder) async with collections[collection]() as record_collection: - print(f"Creating {collection} collection!") + print_with_color(f"Creating {collection} collection!", Colors.CGREY) await record_collection.delete_collection() await record_collection.create_collection_if_not_exists() @@ -200,16 +217,22 @@ async def main(collection: str, use_azure_openai: bool, embedding_model: str): title="Semantic Kernel Languages", tag="general", ) + record3 = DataModel( + content="```python\nfrom semantic_kernel import Kernel\nkernel = Kernel()\n```", + id="d5c9913a-e015-4944-b960-5d4a84bca002", + title="Code sample", + tag="code", + ) - print("Adding records!") + print_with_color("Adding records!", Colors.CBLUE) records = await VectorStoreRecordUtils(kernel).add_vector_to_records( - [record1, record2], data_model_type=DataModel + [record1, record2, record3], data_model_type=DataModel ) keys = await record_collection.upsert_batch(records) print(f" Upserted {keys=}") - print("Getting records!") - results = await record_collection.get_batch([record1.id, record2.id]) + print_with_color("Getting records!", Colors.CBLUE) + results = await record_collection.get_batch([record1.id, record2.id, record3.id]) if results: [print_record(record=result) for result in results] else: @@ -219,49 +242,48 @@ async def main(collection: str, use_azure_openai: bool, embedding_model: str): include_vectors=True, filter=VectorSearchFilter.equal_to("tag", "general"), ) + print("-" * 30) + print_with_color("Searching for 'python', with filter 'tag == general'", Colors.CBLUE) if isinstance(record_collection, VectorTextSearchMixin): print("-" * 30) - print("Using text search") - try: - search_results = await record_collection.text_search("python", options) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] - except Exception: - print("Text search could not execute.") + print_with_color("Using text search", Colors.CBLUE) + search_results = await record_collection.text_search("python", options) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] if isinstance(record_collection, VectorizedSearchMixin): print("-" * 30) - print( - "Using vectorized search, depending on the distance function, " - "the better score might be higher or lower." + print_with_color( + f"Using vectorized search, for {distance_function.value}, " + f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[distance_function](1, 0) else 'lower'} the score the better" # noqa: E501 + f"", + Colors.CBLUE, ) - try: - search_results = await record_collection.vectorized_search( - vector=(await embedder.generate_raw_embeddings(["python"]))[0], - options=VectorSearchOptions(vector_field_name="vector", include_vectors=True), - ) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] - except Exception: - print("Vectorized search could not execute.") + search_results = await record_collection.vectorized_search( + vector=(await embedder.generate_raw_embeddings(["python"]))[0], + options=options, + ) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] if isinstance(record_collection, VectorizableTextSearchMixin): print("-" * 30) - print("Using vectorizable text search") - try: - search_results = await record_collection.vectorizable_text_search("python", options) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] - except Exception: - print("Vectorizable text search could not execute.") + print_with_color( + f"Using vectorized search, for {distance_function.value}, " + f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[distance_function](1, 0) else 'lower'} the score the better", # noqa: E501 + Colors.CBLUE, + ) + search_results = await record_collection.vectorizable_text_search("python", options) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] print("-" * 30) - print("Deleting collection!") + print_with_color("Deleting collection!", Colors.CBLUE) await record_collection.delete_collection() - print("Done!") + print_with_color("Done!", Colors.CGREY) if __name__ == "__main__": @@ -271,10 +293,5 @@ async def main(collection: str, use_azure_openai: bool, embedding_model: str): parser.add_argument("--collection", default="in_memory", choices=collections.keys(), help="What collection to use.") # Option of whether to use OpenAI or Azure OpenAI. parser.add_argument("--use-azure-openai", action="store_true", help="Use Azure OpenAI instead of OpenAI.") - # Model - parser.add_argument( - "--model", default="text-embedding-3-small", help="The model or deployment to use for embeddings." - ) args = parser.parse_args() - - asyncio.run(main(collection=args.collection, use_azure_openai=args.use_azure_openai, embedding_model=args.model)) + asyncio.run(main(collection=args.collection, use_azure_openai=args.use_azure_openai)) diff --git a/python/samples/concepts/memory/memory.py b/python/samples/concepts/memory/memory.py deleted file mode 100644 index 95b09bf0b7f3..000000000000 --- a/python/samples/concepts/memory/memory.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -##################################################### -# This sample should be considered obsolete, as we are moving things towards the new data model. -# Please check out the new_memory.py sample for the latest implementation. -##################################################### - -import asyncio - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAITextEmbedding -from semantic_kernel.core_plugins import TextMemoryPlugin -from semantic_kernel.functions import KernelFunction -from semantic_kernel.memory import SemanticTextMemory, VolatileMemoryStore -from semantic_kernel.prompt_template import PromptTemplateConfig - -collection_id = "generic" - - -async def populate_memory(memory: SemanticTextMemory) -> None: - # Add some documents to the semantic memory - await memory.save_information(collection=collection_id, id="info1", text="Your budget for 2024 is $100,000") - await memory.save_information(collection=collection_id, id="info2", text="Your savings from 2023 are $50,000") - await memory.save_information(collection=collection_id, id="info3", text="Your investments are $80,000") - - -async def search_memory_examples(memory: SemanticTextMemory) -> None: - questions = ["What is my budget for 2024?", "What are my savings from 2023?", "What are my investments?"] - - for question in questions: - print(f"Question: {question}") - result = await memory.search(collection_id, question) - print(f"Answer: {result[0].text}\n") - - -async def setup_chat_with_memory( - kernel: Kernel, - service_id: str, -) -> KernelFunction: - prompt = """ - ChatBot can have a conversation with you about any topic. - It can give explicit instructions or say 'I don't know' if - it does not have an answer. - - Information about me, from previous conversations: - - {{recall 'budget by year'}} What is my budget for 2024? - - {{recall 'savings from previous year'}} What are my savings from 2023? - - {{recall 'investments'}} What are my investments? - - {{$request}} - """.strip() - - prompt_template_config = PromptTemplateConfig( - template=prompt, - execution_settings={service_id: kernel.get_prompt_execution_settings_from_service_id(service_id=service_id)}, - ) - - return kernel.add_function( - function_name="chat_with_memory", - plugin_name="TextMemoryPlugin", - prompt_template_config=prompt_template_config, - ) - - -async def chat(kernel: Kernel, chat_func: KernelFunction) -> bool: - try: - user_input = input("User:> ") - except KeyboardInterrupt: - print("\n\nExiting chat...") - return False - except EOFError: - print("\n\nExiting chat...") - return False - - if user_input == "exit": - print("\n\nExiting chat...") - return False - - answer = await kernel.invoke(chat_func, request=user_input) - - print(f"ChatBot:> {answer}") - return True - - -async def main() -> None: - kernel = Kernel() - - service_id = "chat-gpt" - kernel.add_service(OpenAIChatCompletion(service_id=service_id, ai_model_id="gpt-3.5-turbo")) - embedding_gen = OpenAITextEmbedding( - service_id="ada", - ai_model_id="text-embedding-ada-002", - ) - kernel.add_service(embedding_gen) - - memory = SemanticTextMemory(storage=VolatileMemoryStore(), embeddings_generator=embedding_gen) - kernel.add_plugin(TextMemoryPlugin(memory), "TextMemoryPlugin") - - print("Populating memory...") - await populate_memory(memory) - - print("Asking questions... (manually)") - await search_memory_examples(memory) - - print("Setting up a chat (with memory!)") - chat_func = await setup_chat_with_memory(kernel, service_id) - - print("Begin chatting (type 'exit' to exit):\n") - print( - "Welcome to the chat bot!\ - \n Type 'exit' to exit.\ - \n Try asking a question about your finances (i.e. \"talk to me about my finances\")." - ) - chatting = True - while chatting: - chatting = await chat(kernel, chat_func) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/memory/pandas_memory.py b/python/samples/concepts/memory/memory_with_pandas.py similarity index 57% rename from python/samples/concepts/memory/pandas_memory.py rename to python/samples/concepts/memory/memory_with_pandas.py index e04a1ac3950f..7a9b7f6b5d1c 100644 --- a/python/samples/concepts/memory/pandas_memory.py +++ b/python/samples/concepts/memory/memory_with_pandas.py @@ -39,36 +39,31 @@ async def main(): kernel.add_service(OpenAITextEmbedding(service_id="embedding", ai_model_id="text-embedding-3-small")) # create the record collection - record_collection = AzureAISearchCollection[pd.DataFrame]( + async with AzureAISearchCollection[pd.DataFrame]( data_model_type=pd.DataFrame, data_model_definition=model_fields, - ) - # create some records - records = [ - {"id": str(uuid4()), "content": "my dict text", "vector": None}, - {"id": str(uuid4()), "content": "my second text", "vector": None}, - ] + ) as record_collection: + # create some records + records = [ + {"id": str(uuid4()), "content": "my dict text", "vector": None}, + {"id": str(uuid4()), "content": "my second text", "vector": None}, + ] - # create the dataframe and add the embeddings - df = pd.DataFrame(records) - df = await VectorStoreRecordUtils(kernel).add_vector_to_records(df, None, data_model_definition=model_fields) - print("Records with embeddings:") - print(df.shape) - print(df.head(5)) + # create the dataframe and add the embeddings + df = pd.DataFrame(records) + df = await VectorStoreRecordUtils(kernel).add_vector_to_records(df, None, data_model_definition=model_fields) + print("Records with embeddings:") + print(df.shape) + print(df.head(5)) - # upsert the records (for a container, upsert and upsert_batch are equivalent) - await record_collection.upsert_batch(df) + # upsert the records (for a container, upsert and upsert_batch are equivalent) + await record_collection.upsert_batch(df) - # retrieve a record - result = await record_collection.get(records[0]["id"]) - print("Retrieved records:") - print(result.shape) - print(result.head(5)) - - # explicit cleanup, usually not needed, but a script like this - # closes so fast that the async close triggered by delete may not finish on time - del record_collection - await asyncio.sleep(1) + # retrieve a record + result = await record_collection.get(records[0]["id"]) + print("Retrieved records:") + print(result.shape) + print(result.head(5)) if __name__ == "__main__": diff --git a/python/samples/concepts/memory/simple_memory.py b/python/samples/concepts/memory/simple_memory.py new file mode 100644 index 000000000000..941b5f59baa7 --- /dev/null +++ b/python/samples/concepts/memory/simple_memory.py @@ -0,0 +1,170 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from collections.abc import Sequence +from dataclasses import dataclass, field +from typing import Annotated +from uuid import uuid4 + +from samples.concepts.memory.utils import print_record +from samples.concepts.resources.utils import Colors, print_with_color +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import ( + OpenAIEmbeddingPromptExecutionSettings, + OpenAITextEmbedding, +) +from semantic_kernel.connectors.memory.in_memory import InMemoryVectorCollection +from semantic_kernel.data import ( + DISTANCE_FUNCTION_DIRECTION_HELPER, + DistanceFunction, + IndexKind, + VectorSearchFilter, + VectorSearchOptions, + VectorStoreRecordDataField, + VectorStoreRecordKeyField, + VectorStoreRecordUtils, + VectorStoreRecordVectorField, + vectorstoremodel, +) + +# This is the most basic example of a vector store and collection +# For a more complex example, using different collection types, see "complex_memory.py" +# This sample uses openai text embeddings, so make sure to have your environment variables set up +# it needs openai api key and embedding model id +kernel = Kernel() +embedder = OpenAITextEmbedding(service_id="embedding") +kernel.add_service(embedder) + +# Next, you need to define your data structure +# In this case, we are using a dataclass to define our data structure +# you can also use a pydantic model, or a vanilla python class, see "data_models.py" for more examples +# Inside the model we define which fields we want to use, and which fields are vectors +# and for vector fields we define what kind of index we want to use, and what distance function we want to use +# This has been done in constants here for simplicity, but you can also define them in the model itself +# Next we create three records using that model + +DISTANCE_FUNCTION = DistanceFunction.COSINE_SIMILARITY +# The in memory collection does not actually use a index, so this variable is not relevant, here for completeness +INDEX_KIND = IndexKind.IVF_FLAT + + +@vectorstoremodel +@dataclass +class DataModel: + vector: Annotated[ + list[float] | None, + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings()}, + index_kind=INDEX_KIND, + dimensions=1536, + distance_function=DISTANCE_FUNCTION, + property_type="float", + ), + ] = None + id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) + content: Annotated[ + str, + VectorStoreRecordDataField( + has_embedding=True, + embedding_property_name="vector", + property_type="str", + is_full_text_searchable=True, + ), + ] = "content1" + title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = "title" + tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" + + +records = [ + DataModel( + content="Semantic Kernel is awesome", + id="e6103c03-487f-4d7d-9c23-4723651c17f4", + title="Overview", + tag="general", + ), + DataModel( + content="Semantic Kernel is available in dotnet, python and Java.", + id="09caec77-f7e1-466a-bcec-f1d51c5b15be", + title="Semantic Kernel Languages", + tag="general", + ), + DataModel( + content="```python\nfrom semantic_kernel import Kernel\nkernel = Kernel()\n```", + id="d5c9913a-e015-4944-b960-5d4a84bca002", + title="Code sample", + tag="code", + ), +] + + +async def main(): + print("-" * 30) + # Create the collection here + # by using the generic we make sure that IDE's understand what you need to pass in and get back + # we also use the async with to open and close the connection + # for the in memory collection, this is just a no-op + # but for other collections, like Azure AI Search, this will open and close the connection + async with InMemoryVectorCollection[DataModel]( + collection_name="test", + data_model_type=DataModel, + ) as record_collection: + # Create the collection after wiping it + print_with_color("Creating test collection!", Colors.CGREY) + await record_collection.delete_collection() + await record_collection.create_collection_if_not_exists() + + # First add vectors to the records + print_with_color("Adding records!", Colors.CBLUE) + records_with_embedding = await VectorStoreRecordUtils(kernel).add_vector_to_records( + records, data_model_type=DataModel + ) + # Next upsert them to the store. + keys = await record_collection.upsert_batch(records_with_embedding) + print(f" Upserted {keys=}") + print("-" * 30) + + # Now we can get the records back + print_with_color("Getting records!", Colors.CBLUE) + results = await record_collection.get_batch([records[0].id, records[1].id, records[2].id]) + if results and isinstance(results, Sequence): + [print_record(record=result) for result in results] + else: + print("Nothing found...") + print("-" * 30) + + # Now we can search for records + # First we define the options + # The most important option is the vector_field_name, which is the name of the field that contains the vector + # The other options are optional, but can be useful + # The filter option is used to filter the results based on the tag field + options = VectorSearchOptions( + vector_field_name="vector", + include_vectors=True, + filter=VectorSearchFilter.equal_to("tag", "general"), + ) + query = "python" + print_with_color(f"Searching for '{query}', with filter 'tag == general'", Colors.CBLUE) + print_with_color( + f"Using vectorized search, for {DISTANCE_FUNCTION.value}, " + f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[DISTANCE_FUNCTION](1, 0) else 'lower'} the score the better" # noqa: E501 + f"", + Colors.CBLUE, + ) + search_results = await record_collection.vectorized_search( + vector=(await embedder.generate_raw_embeddings([query]))[0], + options=options, + ) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] + print("-" * 30) + + # lets cleanup! + print_with_color("Deleting collection!", Colors.CBLUE) + await record_collection.delete_collection() + print_with_color("Done!", Colors.CGREY) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/memory/utils.py b/python/samples/concepts/memory/utils.py new file mode 100644 index 000000000000..6f0c94c788c9 --- /dev/null +++ b/python/samples/concepts/memory/utils.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from typing import TypeVar + +from samples.concepts.resources.utils import Colors, print_with_color +from semantic_kernel.data import ( + VectorSearchResult, +) + +_T = TypeVar("_T") + + +def print_record(result: VectorSearchResult[_T] | None = None, record: _T | None = None): + if result: + record = result.record + print_with_color(f" Found id: {record.id}", Colors.CGREEN) + if result and result.score is not None: + print_with_color(f" Score: {result.score}", Colors.CWHITE) + print_with_color(f" Content: {record.content}", Colors.CWHITE) + print_with_color(f" Tag: {record.tag}", Colors.CWHITE) + if record.vector is not None: + print_with_color(f" Vector (first five): {record.vector[:5]}", Colors.CWHITE) diff --git a/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py b/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py deleted file mode 100644 index 6627a2a7fb26..000000000000 --- a/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.core_plugins import MathPlugin, TimePlugin -from semantic_kernel.planners import FunctionCallingStepwisePlanner, FunctionCallingStepwisePlannerOptions - - -async def main(): - kernel = Kernel() - - service_id = "planner" - kernel.add_service( - AzureChatCompletion( - service_id=service_id, - ), - ) - - plugin_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - ) - kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin") - - kernel.add_plugin(MathPlugin(), "MathPlugin") - kernel.add_plugin(TimePlugin(), "TimePlugin") - - questions = [ - "What is the current hour number, plus 5?", - "What is 387 minus 22? Email the solution to John and Mary.", - "Write a limerick, translate it to Spanish, and send it to Jane", - ] - - options = FunctionCallingStepwisePlannerOptions( - max_iterations=10, - max_tokens=4000, - ) - - planner = FunctionCallingStepwisePlanner(service_id=service_id, options=options) - - for question in questions: - result = await planner.invoke(kernel, question) - print(f"Q: {question}\nA: {result.final_answer}\n") - - # Uncomment the following line to view the planner's process for completing the request - # print(f"Chat history: {result.chat_history}\n") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py b/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py deleted file mode 100644 index cce74f39a41d..000000000000 --- a/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.core_plugins import MathPlugin, TimePlugin -from semantic_kernel.planners import FunctionCallingStepwisePlanner, FunctionCallingStepwisePlannerOptions - - -async def main(): - kernel = Kernel() - - service_id = "planner" - kernel.add_service( - OpenAIChatCompletion( - service_id=service_id, - ), - ) - - plugin_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - ) - kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin") - kernel.add_plugins({"MathPlugin": MathPlugin(), "TimePlugin": TimePlugin()}) - - questions = [ - "What is the current hour number, plus 5?", - "What is 387 minus 22? Email the solution to John and Mary.", - "Write a limerick, translate it to Spanish, and send it to Jane", - ] - - options = FunctionCallingStepwisePlannerOptions( - max_iterations=10, - max_tokens=4000, - ) - - planner = FunctionCallingStepwisePlanner(service_id=service_id, options=options) - - for question in questions: - result = await planner.invoke(kernel, question) - print(f"Q: {question}\nA: {result.final_answer}\n") - - # Uncomment the following line to view the planner's process for completing the request - # print(f"\nChat history: {result.chat_history}\n") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/planners/sequential_planner.py b/python/samples/concepts/planners/sequential_planner.py deleted file mode 100644 index 13aaf83fdab0..000000000000 --- a/python/samples/concepts/planners/sequential_planner.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.core_plugins import MathPlugin, TextPlugin, TimePlugin -from semantic_kernel.planners import SequentialPlanner - - -async def main(): - kernel = Kernel() - - service_id = "gpt-3.5" - kernel.add_service(OpenAIChatCompletion(service_id=service_id, ai_model_id="gpt-3.5-turbo")) - kernel.add_plugins({"math": MathPlugin(), "time": TimePlugin(), "text": TextPlugin()}) - - # create an instance of sequential planner. - planner = SequentialPlanner(service_id=service_id, kernel=kernel) - - # the ask for which the sequential planner is going to find a relevant function. - ask = "What day of the week is today, all uppercase?" - - # ask the sequential planner to identify a suitable function from the list of functions available. - plan = await planner.create_plan(goal=ask) - - # ask the sequential planner to execute the identified function. - result = await plan.invoke(kernel=kernel) - - for step in plan._steps: - print(step.description, ":", step._state.__dict__) - - print("Expected Answer:") - print(result) - """ - Output: - SUNDAY - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/plugins/crew_ai/README.md b/python/samples/concepts/plugins/crew_ai/README.md new file mode 100644 index 000000000000..faa9a4b9c72f --- /dev/null +++ b/python/samples/concepts/plugins/crew_ai/README.md @@ -0,0 +1,47 @@ +# Crew AI Plugin for Semantic Kernel + +This sample demonstrates how to integrate with [Crew AI Enterprise](https://app.crewai.com/) crews in Semantic Kernel. + +## Requirements + +Before running this sample you need to have a Crew deployed to the Crew AI Enterprise cloud. Many pre-built Crew templates can be found [here](https://app.crewai.com/crewai_plus/templates). You will need the following information from your deployed Crew: + +- endpoint: The base URL for your Crew. +- authentication token: The authentication token for your Crew +- required inputs: Most Crews have a set of required inputs that need to provided when kicking off the Crew and those input names, types, and values need to be known. + +- ## Using the Crew Plugin + +Once configured, the `CrewAIEnterprise` class can be used directly by calling methods on it, or can be used to generate a Semantic Kernel plugin with inputs that match those of your Crew. Generating a plugin is useful for scenarios when you want an LLM to be able to invoke your Crew as a tool. + +## Running the sample + +1. Deploy your Crew to the Crew Enterprise cloud. +1. Gather the required information listed above. +1. Create environment variables or use your .env file to define your Crew's endpoint and token as: + +```md +CREW_AI_ENDPOINT="{Your Crew's endpoint}" +CREW_AI_TOKEN="{Your Crew's authentication token}" +``` + +1. In [crew_ai_plugin.py](./crew_ai_plugin.py) find the section that defines the Crew's required inputs and modify it to match your Crew's inputs. The input descriptions and types are critical to help LLMs understand the inputs semantic meaning so that it can accurately call the plugin. The sample is based on the `Enterprise Content Marketing Crew` template which has two required inputs, `company` and `topic`. + +```python + # The required inputs for the Crew must be known in advance. This example is modeled after the + # Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. + # We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. + crew_plugin_definitions = [ + KernelParameterMetadata( + name="company", + type="string", + description="The name of the company that should be researched", + is_required=True, + ), + KernelParameterMetadata( + name="topic", type="string", description="The topic that should be researched", is_required=True + ), + ] +``` + +1. Run the sample. Notice that the sample invokes (kicks-off) the Crew twice, once directly by calling the `kickoff` method and once by creating a plugin and invoking it. diff --git a/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py b/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py new file mode 100644 index 000000000000..c817f6d8cda1 --- /dev/null +++ b/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py @@ -0,0 +1,140 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.core_plugins.crew_ai import CrewAIEnterprise +from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata + +logging.basicConfig(level=logging.INFO) + + +async def using_crew_ai_enterprise(): + # Create an instance of the CrewAI Enterprise Crew + async with CrewAIEnterprise() as crew: + ##################################################################### + # Using the CrewAI Enterprise Crew directly # + ##################################################################### + + # The required inputs for the Crew must be known in advance. This example is modeled after the + # Enterprise Content Marketing Crew Template and requires the following inputs: + inputs = {"company": "CrewAI", "topic": "Agentic products for consumers"} + + # Invoke directly with our inputs + kickoff_id = await crew.kickoff(inputs) + print(f"CrewAI Enterprise Crew kicked off with ID: {kickoff_id}") + + # Wait for completion + result = await crew.wait_for_crew_completion(kickoff_id) + print("CrewAI Enterprise Crew completed with the following result:") + print(result) + + ##################################################################### + # Using the CrewAI Enterprise as a Plugin # + ##################################################################### + + # Define the description of the Crew. This will used as the semantic description of the plugin. + crew_description = ( + "Conducts thorough research on the specified company and topic to identify emerging trends," + "analyze competitor strategies, and gather data-driven insights." + ) + + # The required inputs for the Crew must be known in advance. This example is modeled after the + # Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. + # We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. + crew_input_parameters = [ + KernelParameterMetadata( + name="company", + type="string", + type_object=str, + description="The name of the company that should be researched", + is_required=True, + ), + KernelParameterMetadata( + name="topic", + type="string", + type_object=str, + description="The topic that should be researched", + is_required=True, + ), + ] + + # Create the CrewAI Plugin. This builds a plugin that can be added to the Kernel and invoked like any other + # plugin. The plugin will contain the following functions: + # - kickoff: Starts the Crew with the specified inputs and returns the Id of the scheduled kickoff. + # - kickoff_and_wait: Starts the Crew with the specified inputs and waits for the Crew to complete before + # returning the result. + # - wait_for_completion: Waits for the specified Crew kickoff to complete and returns the result. + # - get_status: Gets the status of the specified Crew kickoff. + crew_plugin = crew.create_kernel_plugin( + name="EnterpriseContentMarketingCrew", + description=crew_description, + parameters=crew_input_parameters, + ) + + # Configure the kernel for chat completion and add the CrewAI plugin. + kernel, chat_completion, settings = configure_kernel_for_chat() + kernel.add_plugin(crew_plugin) + + # Create a chat history to store the system message, initial messages, and the conversation. + history = ChatHistory() + history.add_system_message("You are an AI assistant that can help me with research.") + history.add_user_message( + "I'm looking for emerging marketplace trends about Crew AI and their concumer AI products." + ) + + # Invoke the chat completion service with enough information for the CrewAI plugin to be invoked. + response = await chat_completion.get_chat_message_content(history, settings, kernel=kernel) + print(response) + + # expected output: + # INFO:semantic_kernel.connectors.ai.open_ai.services.open_ai_handler:OpenAI usage: ... + # INFO:semantic_kernel.connectors.ai.chat_completion_client_base:processing 1 tool calls in parallel. + # INFO:semantic_kernel.kernel:Calling EnterpriseContentMarketingCrew-kickoff_and_wait function with args: + # {"company":"Crew AI","topic":"emerging marketplace trends in consumer AI products"} + # INFO:semantic_kernel.functions.kernel_function:Function EnterpriseContentMarketingCrew-kickoff_and_wait + # invoking. + # INFO:semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise:CrewAI Crew kicked off with Id: ***** + # INFO:semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise:CrewAI Crew with kickoff Id: ***** completed with + # status: SUCCESS + # INFO:semantic_kernel.functions.kernel_function:Function EnterpriseContentMarketingCrew-kickoff_and_wait + # succeeded. + # Here are some emerging marketplace trends related to Crew AI and their consumer AI products, along with + # suggested content pieces to explore these trends: ... + + +def configure_kernel_for_chat() -> tuple[Kernel, ChatCompletionClientBase, PromptExecutionSettings]: + kernel = Kernel() + + # You can select from the following chat completion services that support function calling: + # - Services.OPENAI + # - Services.AZURE_OPENAI + # - Services.AZURE_AI_INFERENCE + # - Services.ANTHROPIC + # - Services.BEDROCK + # - Services.GOOGLE_AI + # - Services.MISTRAL_AI + # - Services.OLLAMA + # - Services.ONNX + # - Services.VERTEX_AI + # - Services.DEEPSEEK + # Please make sure you have configured your environment correctly for the selected chat completion service. + chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) + + # Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. + # With `auto_invoke=True`, the model will automatically choose and call functions as needed. + request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + # Pass the request settings to the kernel arguments. + kernel.add_service(chat_completion_service) + return kernel, chat_completion_service, request_settings + + +if __name__ == "__main__": + asyncio.run(using_crew_ai_enterprise()) diff --git a/python/samples/concepts/realtime/README.md b/python/samples/concepts/realtime/README.md new file mode 100644 index 000000000000..a2dbb5d349f5 --- /dev/null +++ b/python/samples/concepts/realtime/README.md @@ -0,0 +1,50 @@ +# Realtime Multi-modal API Samples + +These samples are more complex then most because of the nature of these API's. They are designed to be run in real-time and require a microphone and speaker to be connected to your computer. + +To run these samples, you will need to have the following setup: + +- Environment variables for OpenAI (websocket or WebRTC), with your key and OPENAI_REALTIME_MODEL_ID set. +- Environment variables for Azure (websocket only), set with your endpoint, optionally a key and AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME set. The API version needs to be at least `2024-10-01-preview`. +- To run the sample with a simple version of a class that handles the incoming and outgoing sound you need to install the following packages in your environment: + - semantic-kernel[realtime] + - pyaudio + - sounddevice + - pydub + e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] + +The samples all run as python scripts, that can either be started directly or through your IDE. + +All demos have a similar output, where the instructions are printed, each new *response item* from the API is put into a new `Mosscap (transcript):` line. The nature of these api's is such that the transcript arrives before the spoken audio, so if you interrupt the audio the transcript will not match the audio. + +The realtime api's work by sending event from the server to you and sending events back to the server, this is fully asynchronous. The samples show you can listen to the events being sent by the server and some are handled by the code in the samples, others are not. For instance one could add a clause in the match case in the receive loop that logs the usage that is part of the `response.done` event. + +For more info on the events, go to our documentation, as well as the documentation of [OpenAI](https://platform.openai.com/docs/guides/realtime) and [Azure](https://learn.microsoft.com/en-us/azure/ai-services/openai/realtime-audio-quickstart?tabs=keyless%2Cmacos&pivots=programming-language-python). + +## Simple chat samples + +### [Simple chat with realtime websocket](./simple_realtime_chat_websocket.py) + +This sample uses the websocket api with Azure OpenAI to run a simple interaction based on voice. If you want to use this sample with OpenAI, just change AzureRealtimeWebsocket into OpenAIRealtimeWebsocket. + +### [Simple chat with realtime WebRTC](./simple_realtime_chat_webrtc.py) + +This sample uses the WebRTC api with OpenAI to run a simple interaction based on voice. Because of the way the WebRTC protocol works this needs a different player and recorder than the websocket version. + +## Function calling samples + +The following two samples use function calling with the following functions: + +- get_weather: This function will return the weather for a given city, it is randomly generated and not based on any real data. +- get_time: This function will return the current time and date. +- goodbye: This function will end the conversation. + +A line is logged whenever one of these functions is called. + +### [Chat with function calling Websocket](./realtime_chat_with_function_calling_websocket.py) + +This sample uses the websocket api with Azure OpenAI to run the interaction with the voice model, but now with function calling. + +### [Chat with function calling WebRTC](./realtime_chat_with_function_calling_webrtc.py) + +This sample uses the WebRTC api with OpenAI to run the interaction with the voice model, but now with function calling. diff --git a/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py b/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py new file mode 100644 index 000000000000..2131807a0eae --- /dev/null +++ b/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py @@ -0,0 +1,143 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from datetime import datetime +from random import randint + +from samples.concepts.realtime.utils import AudioPlayerWebRTC, AudioRecorderWebRTC, check_audio_devices +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import ( + ListenEvents, + OpenAIRealtimeExecutionSettings, + OpenAIRealtimeWebRTC, + TurnDetection, +) +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.realtime_events import RealtimeTextEvent +from semantic_kernel.functions import kernel_function + +logging.basicConfig(level=logging.WARNING) +utils_log = logging.getLogger("samples.concepts.realtime.utils") +utils_log.setLevel(logging.INFO) +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +""" +This simple sample demonstrates how to use the OpenAI Realtime API to create +a chat bot that can listen and respond directly through audio. +It requires installing: +- semantic-kernel[realtime] +- pyaudio +- sounddevice +- pydub +e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] + +For more details of the exact setup, see the README.md in the realtime folder. +""" + +# The characterics of your speaker and microphone are a big factor in a smooth conversation +# so you may need to try out different devices for each. +# you can also play around with the turn_detection settings to get the best results. +# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, +# so you may need to adjust these for your system. +# you can disable the check for available devices by commenting the line below +check_audio_devices() + + +@kernel_function +def get_weather(location: str) -> str: + """Get the weather for a location.""" + weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") + weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec + logger.info(f"@ Getting weather for {location}: {weather}") + return f"The weather in {location} is {weather}." + + +@kernel_function +def get_date_time() -> str: + """Get the current date and time.""" + logger.info("@ Getting current datetime") + return f"The current date and time is {datetime.now().isoformat()}." + + +@kernel_function +def goodbye(): + """When the user is done, say goodbye and then call this function.""" + logger.info("@ Goodbye has been called!") + raise KeyboardInterrupt + + +async def main() -> None: + print_transcript = True + # create the Kernel and add a simple function for function calling. + kernel = Kernel() + kernel.add_functions(plugin_name="helpers", functions=[goodbye, get_weather, get_date_time]) + + # create the audio player and audio track + # both take a device_id parameter, which is the index of the device to use, if None the default device is used + audio_player = AudioPlayerWebRTC() + # create the realtime client and optionally add the audio output function, this is optional + # and can also be passed in the receive method + realtime_client = OpenAIRealtimeWebRTC(audio_track=AudioRecorderWebRTC()) + + # Create the settings for the session + # The realtime api, does not use a system message, but takes instructions as a parameter for a session + # Another important setting is to tune the server_vad turn detection + # if this is turned off (by setting turn_detection=None), you will have to send + # the "input_audio_buffer.commit" and "response.create" event to the realtime api + # to signal the end of the user's turn and start the response. + # manual VAD is not part of this sample + # for more info: https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-turn_detection + settings = OpenAIRealtimeExecutionSettings( + instructions=""" + You are a chat bot. Your name is Mosscap and + you have one goal: figure out what people need. + Your full name, should you need to know it, is + Splendid Speckled Mosscap. You communicate + effectively, but you tend to answer with long + flowery prose. + """, + voice="alloy", + turn_detection=TurnDetection(type="server_vad", create_response=True, silence_duration_ms=800, threshold=0.8), + function_choice_behavior=FunctionChoiceBehavior.Auto(), + ) + # and we can add a chat history to conversation after starting it + chat_history = ChatHistory() + chat_history.add_user_message("Hi there, who are you?") + chat_history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") + + # the context manager calls the create_session method on the client and starts listening to the audio stream + async with ( + audio_player, + realtime_client( + settings=settings, + chat_history=chat_history, + kernel=kernel, + create_response=True, + ), + ): + async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): + match event: + case RealtimeTextEvent(): + if print_transcript: + print(event.text.text, end="") + case _: + # OpenAI Specific events + match event.service_type: + case ListenEvents.RESPONSE_CREATED: + if print_transcript: + print("\nMosscap (transcript): ", end="") + case ListenEvents.ERROR: + logger.error(event.service_event) + + +if __name__ == "__main__": + print( + "Instructions: The model will start speaking immediately," + "this can be turned off by removing `create_response=True` above." + "The model will detect when you stop and automatically generate a response. " + "Press ctrl + c to stop the program." + ) + asyncio.run(main()) diff --git a/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py b/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py new file mode 100644 index 000000000000..eaa83f250c54 --- /dev/null +++ b/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py @@ -0,0 +1,141 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from datetime import datetime +from random import randint + +from samples.concepts.realtime.utils import AudioPlayerWebsocket, AudioRecorderWebsocket +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import ( + AzureRealtimeExecutionSettings, + AzureRealtimeWebsocket, + ListenEvents, + TurnDetection, +) +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.realtime_events import RealtimeTextEvent +from semantic_kernel.functions import kernel_function + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + +""" +This simple sample demonstrates how to use the OpenAI Realtime API to create +a chat bot that can listen and respond directly through audio. +It requires installing: +- semantic-kernel[realtime] +- pyaudio +- sounddevice +- pydub +e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] + +For more details of the exact setup, see the README.md in the realtime folder. +""" + + +@kernel_function +def get_weather(location: str) -> str: + """Get the weather for a location.""" + weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") + weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec + logger.info(f"@ Getting weather for {location}: {weather}") + return f"The weather in {location} is {weather}." + + +@kernel_function +def get_date_time() -> str: + """Get the current date and time.""" + logger.info("@ Getting current datetime") + return f"The current date and time is {datetime.now().isoformat()}." + + +@kernel_function +def goodbye(): + """When the user is done, say goodbye and then call this function.""" + logger.info("@ Goodbye has been called!") + raise KeyboardInterrupt + + +async def main() -> None: + print_transcript = True + # create the Kernel and add a simple function for function calling. + kernel = Kernel() + kernel.add_functions(plugin_name="helpers", functions=[goodbye, get_weather, get_date_time]) + + # create the realtime client, in this the Azure Websocket client, there are also OpenAI Websocket and WebRTC clients + # See 02b-chat_with_function_calling_webrtc.py for an example of the WebRTC client + realtime_client = AzureRealtimeWebsocket() + # create the audio player and audio track + # both take a device_id parameter, which is the index of the device to use, if None the default device is used + audio_player = AudioPlayerWebsocket() + audio_recorder = AudioRecorderWebsocket(realtime_client=realtime_client) + + # Create the settings for the session + # The realtime api, does not use a system message, but takes instructions as a parameter for a session + # Another important setting is to tune the server_vad turn detection + # if this is turned off (by setting turn_detection=None), you will have to send + # the "input_audio_buffer.commit" and "response.create" event to the realtime api + # to signal the end of the user's turn and start the response. + # manual VAD is not part of this sample + # for more info: https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-turn_detection + settings = AzureRealtimeExecutionSettings( + instructions=""" + You are a chat bot. Your name is Mosscap and + you have one goal: figure out what people need. + Your full name, should you need to know it, is + Splendid Speckled Mosscap. You communicate + effectively, but you tend to answer with long + flowery prose. + """, + # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice for the full list of voices # noqa: E501 + voice="alloy", + turn_detection=TurnDetection(type="server_vad", create_response=True, silence_duration_ms=800, threshold=0.8), + function_choice_behavior=FunctionChoiceBehavior.Auto(), + ) + # and we can add a chat history to conversation to seed the conversation + chat_history = ChatHistory() + chat_history.add_user_message("Hi there, I'm based in Amsterdam.") + chat_history.add_assistant_message( + "I am Mosscap, a chat bot. I'm trying to figure out what people need, " + "I can tell you what the weather is or the time." + ) + + # the context manager calls the create_session method on the client and starts listening to the audio stream + async with ( + audio_player, + audio_recorder, + realtime_client( + settings=settings, + chat_history=chat_history, + kernel=kernel, + create_response=True, + ), + ): + # the audio_output_callback can be added here or in the client constructor + # using this gives the smoothest experience + async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): + match event: + case RealtimeTextEvent(): + if print_transcript: + print(event.text.text, end="") + case _: + # OpenAI Specific events + match event.service_type: + case ListenEvents.RESPONSE_CREATED: + if print_transcript: + print("\nMosscap (transcript): ", end="") + case ListenEvents.ERROR: + print(event.service_event) + logger.error(event.service_event) + + +if __name__ == "__main__": + print( + "Instructions: The model will start speaking immediately," + "this can be turned off by removing `create_response=True` above." + "The model will detect when you stop and automatically generate a response. " + "Press ctrl + c to stop the program." + ) + asyncio.run(main()) diff --git a/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py b/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py new file mode 100644 index 000000000000..0b9c6a7e9485 --- /dev/null +++ b/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from samples.concepts.realtime.utils import AudioPlayerWebRTC, AudioRecorderWebRTC, check_audio_devices +from semantic_kernel.connectors.ai.open_ai import ( + ListenEvents, + OpenAIRealtimeExecutionSettings, + OpenAIRealtimeWebRTC, +) + +logging.basicConfig(level=logging.WARNING) +utils_log = logging.getLogger("samples.concepts.realtime.utils") +utils_log.setLevel(logging.INFO) +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +""" +This simple sample demonstrates how to use the OpenAI Realtime API to create +a chat bot that can listen and respond directly through audio. +It requires installing: +- semantic-kernel[realtime] +- pyaudio +- sounddevice +- pydub +e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] + +For more details of the exact setup, see the README.md in the realtime folder. +""" + +# The characteristics of your speaker and microphone are a big factor in a smooth conversation +# so you may need to try out different devices for each. +# you can also play around with the turn_detection settings to get the best results. +# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, +# so you may need to adjust these for your system. +# you can disable the check for available devices by commenting the line below +check_audio_devices() + + +async def main() -> None: + # create the realtime client and optionally add the audio output function, this is optional + # you can define the protocol to use, either "websocket" or "webrtc" + # they will behave the same way, even though the underlying protocol is quite different + realtime_client = OpenAIRealtimeWebRTC(audio_track=AudioRecorderWebRTC()) + # Create the settings for the session + settings = OpenAIRealtimeExecutionSettings( + instructions=""" + You are a chat bot. Your name is Mosscap and + you have one goal: figure out what people need. + Your full name, should you need to know it, is + Splendid Speckled Mosscap. You communicate + effectively, but you tend to answer with long + flowery prose. + """, + # there are different voices to choose from, since that list is bound to change, it is not checked beforehand, + # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice + # for more details. + voice="alloy", + ) + audio_player = AudioPlayerWebRTC() + # the context manager calls the create_session method on the client and starts listening to the audio stream + async with audio_player, realtime_client(settings=settings, create_response=True): + async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): + match event.event_type: + case "text": + # the model returns both audio and transcript of the audio, which we will print + print(event.text.text, end="") + case "service": + # OpenAI Specific events + if event.service_type == ListenEvents.SESSION_UPDATED: + print("Session updated") + if event.service_type == ListenEvents.RESPONSE_CREATED: + print("\nMosscap (transcript): ", end="") + + +if __name__ == "__main__": + print( + "Instructions: The model will start speaking immediately," + "this can be turned off by removing `create_response=True` above." + "The model will detect when you stop and automatically generate a response. " + "Press ctrl + c to stop the program." + ) + asyncio.run(main()) diff --git a/python/samples/concepts/realtime/simple_realtime_chat_websocket.py b/python/samples/concepts/realtime/simple_realtime_chat_websocket.py new file mode 100644 index 000000000000..4a374c46518f --- /dev/null +++ b/python/samples/concepts/realtime/simple_realtime_chat_websocket.py @@ -0,0 +1,90 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from samples.concepts.realtime.utils import AudioPlayerWebsocket, AudioRecorderWebsocket, check_audio_devices +from semantic_kernel.connectors.ai.open_ai import ( + AzureRealtimeExecutionSettings, + AzureRealtimeWebsocket, + ListenEvents, +) +from semantic_kernel.contents import RealtimeAudioEvent, RealtimeTextEvent + +logging.basicConfig(level=logging.WARNING) +utils_log = logging.getLogger("samples.concepts.realtime.utils") +utils_log.setLevel(logging.INFO) +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +""" +This simple sample demonstrates how to use the OpenAI Realtime API to create +a chat bot that can listen and respond directly through audio. +It requires installing: +- semantic-kernel[realtime] +- pyaudio +- sounddevice +- pydub +e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] + +For more details of the exact setup, see the README.md in the realtime folder. +""" + +# The characterics of your speaker and microphone are a big factor in a smooth conversation +# so you may need to try out different devices for each. +# you can also play around with the turn_detection settings to get the best results. +# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, +# so you may need to adjust these for your system. +# you can disable the check for available devices by commenting the line below +check_audio_devices() + + +async def main() -> None: + # create the realtime client and optionally add the audio output function, this is optional + # you can define the protocol to use, either "websocket" or "webrtc" + # they will behave the same way, even though the underlying protocol is quite different + realtime_client = AzureRealtimeWebsocket() + audio_player = AudioPlayerWebsocket() + audio_recorder = AudioRecorderWebsocket(realtime_client=realtime_client) + # Create the settings for the session + settings = AzureRealtimeExecutionSettings( + instructions=""" + You are a chat bot. Your name is Mosscap and + you have one goal: figure out what people need. + Your full name, should you need to know it, is + Splendid Speckled Mosscap. You communicate + effectively, but you tend to answer with long + flowery prose. + """, + # there are different voices to choose from, since that list is bound to change, it is not checked beforehand, + # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice + # for more details. + voice="shimmer", + ) + # the context manager calls the create_session method on the client and starts listening to the audio stream + async with audio_player, audio_recorder, realtime_client(settings=settings, create_response=True): + async for event in realtime_client.receive(): + match event: + # this can be used as an alternative to the callback function used in other samples, + # the callback is faster and smoother + case RealtimeAudioEvent(): + await audio_player.add_audio(event.audio) + case RealtimeTextEvent(): + # the model returns both audio and transcript of the audio, which we will print + print(event.text.text, end="") + case _: + # OpenAI Specific events + if event.service_type == ListenEvents.SESSION_UPDATED: + print("Session updated") + if event.service_type == ListenEvents.RESPONSE_CREATED: + print("\nMosscap (transcript): ", end="") + + +if __name__ == "__main__": + print( + "Instructions: The model will start speaking immediately," + "this can be turned off by removing `create_response=True` above." + "The model will detect when you stop and automatically generate a response. " + "Press ctrl + c to stop the program." + ) + asyncio.run(main()) diff --git a/python/samples/concepts/realtime/utils.py b/python/samples/concepts/realtime/utils.py new file mode 100644 index 000000000000..b3056991d626 --- /dev/null +++ b/python/samples/concepts/realtime/utils.py @@ -0,0 +1,489 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import base64 +import logging +import threading +from typing import Any, ClassVar, Final, cast + +import numpy as np +import numpy.typing as npt +import sounddevice as sd +from aiortc.mediastreams import MediaStreamError, MediaStreamTrack +from av.audio.frame import AudioFrame +from av.frame import Frame +from pydantic import BaseModel, ConfigDict, PrivateAttr +from sounddevice import InputStream, OutputStream + +from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase +from semantic_kernel.contents import AudioContent +from semantic_kernel.contents.realtime_events import RealtimeAudioEvent + +logger = logging.getLogger(__name__) + +SAMPLE_RATE: Final[int] = 24000 +RECORDER_CHANNELS: Final[int] = 1 +PLAYER_CHANNELS: Final[int] = 1 +FRAME_DURATION: Final[int] = 100 +SAMPLE_RATE_WEBRTC: Final[int] = 48000 +RECORDER_CHANNELS_WEBRTC: Final[int] = 1 +PLAYER_CHANNELS_WEBRTC: Final[int] = 2 +FRAME_DURATION_WEBRTC: Final[int] = 20 +DTYPE: Final[npt.DTypeLike] = np.int16 + + +def check_audio_devices(): + logger.info(sd.query_devices()) + + +# region: Recorders + + +class AudioRecorderWebRTC(BaseModel, MediaStreamTrack): + """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. + + This class is meant as a demo sample and is not meant for production use. + """ + + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) + + kind: ClassVar[str] = "audio" + device: str | int | None = None + sample_rate: int + channels: int + frame_duration: int + dtype: npt.DTypeLike = DTYPE + frame_size: int = 0 + _queue: asyncio.Queue[Frame] = PrivateAttr(default_factory=asyncio.Queue) + _is_recording: bool = False + _stream: InputStream | None = None + _recording_task: asyncio.Task | None = None + _loop: asyncio.AbstractEventLoop | None = None + _pts: int = 0 + + def __init__( + self, + *, + device: str | int | None = None, + sample_rate: int = SAMPLE_RATE_WEBRTC, + channels: int = RECORDER_CHANNELS_WEBRTC, + frame_duration: int = FRAME_DURATION_WEBRTC, + dtype: npt.DTypeLike = DTYPE, + ): + """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. + + Make sure the device is set to the correct device for your system. + + Args: + device: The device id to use for recording audio. + sample_rate: The sample rate for the audio. + channels: The number of channels for the audio. + frame_duration: The duration of each audio frame in milliseconds. + dtype: The data type for the audio. + """ + super().__init__(**{ + "device": device, + "sample_rate": sample_rate, + "channels": channels, + "frame_duration": frame_duration, + "dtype": dtype, + "frame_size": int(sample_rate * frame_duration / 1000), + }) + MediaStreamTrack.__init__(self) + + async def recv(self) -> Frame: + """Receive the next frame of audio data.""" + if not self._recording_task: + self._recording_task = asyncio.create_task(self.start_recording()) + + try: + frame = await self._queue.get() + self._queue.task_done() + return frame + except Exception as e: + logger.error(f"Error receiving audio frame: {e!s}") + raise MediaStreamError("Failed to receive audio frame") + + def _sounddevice_callback(self, indata: np.ndarray, frames: int, time: Any, status: Any) -> None: + if status: + logger.warning(f"Audio input status: {status}") + if self._loop and self._loop.is_running(): + asyncio.run_coroutine_threadsafe(self._queue.put(self._create_frame(indata)), self._loop) + + def _create_frame(self, indata: np.ndarray) -> Frame: + audio_data = indata.copy() + if audio_data.dtype != self.dtype: + audio_data = ( + (audio_data * 32767).astype(self.dtype) if self.dtype == np.int16 else audio_data.astype(self.dtype) + ) + frame = AudioFrame( + format="s16", + layout="mono", + samples=len(audio_data), + ) + frame.rate = self.sample_rate + frame.pts = self._pts + frame.planes[0].update(audio_data.tobytes()) + self._pts += len(audio_data) + return frame + + async def start_recording(self): + """Start recording audio from the input device.""" + if self._is_recording: + return + + self._is_recording = True + self._loop = asyncio.get_running_loop() + self._pts = 0 # Reset pts when starting recording + + try: + self._stream = InputStream( + device=self.device, + channels=self.channels, + samplerate=self.sample_rate, + dtype=self.dtype, + blocksize=self.frame_size, + callback=self._sounddevice_callback, + ) + self._stream.start() + + while self._is_recording: + await asyncio.sleep(0.1) + except asyncio.CancelledError: + logger.debug("Recording task was stopped.") + except KeyboardInterrupt: + logger.debug("Recording task was stopped.") + except Exception as e: + logger.error(f"Error in audio recording: {e!s}") + raise + finally: + self._is_recording = False + + +class AudioRecorderWebsocket(BaseModel): + """A simple class that implements a sounddevice for use with websockets. + + This class is meant as a demo sample and is not meant for production use. + """ + + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) + + realtime_client: RealtimeClientBase + device: str | int | None = None + sample_rate: int + channels: int + frame_duration: int + dtype: npt.DTypeLike = DTYPE + frame_size: int = 0 + _stream: InputStream | None = None + _pts: int = 0 + _stream_task: asyncio.Task | None = None + + def __init__( + self, + *, + realtime_client: RealtimeClientBase, + device: str | int | None = None, + sample_rate: int = SAMPLE_RATE, + channels: int = RECORDER_CHANNELS, + frame_duration: int = FRAME_DURATION, + dtype: npt.DTypeLike = DTYPE, + ): + """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. + + Make sure the device is set to the correct device for your system. + + Args: + realtime_client: The RealtimeClientBase to use for streaming audio. + device: The device id to use for recording audio. + sample_rate: The sample rate for the audio. + channels: The number of channels for the audio. + frame_duration: The duration of each audio frame in milliseconds. + dtype: The data type for the audio. + **kwargs: Additional keyword arguments. + """ + super().__init__(**{ + "realtime_client": realtime_client, + "device": device, + "sample_rate": sample_rate, + "channels": channels, + "frame_duration": frame_duration, + "dtype": dtype, + "frame_size": int(sample_rate * frame_duration / 1000), + }) + + async def __aenter__(self): + """Stream audio data to a RealtimeClientBase.""" + if not self._stream_task: + self._stream_task = asyncio.create_task(self._start_stream()) + return self + + async def _start_stream(self): + self._pts = 0 # Reset pts when starting recording + self._stream = InputStream( + device=self.device, + channels=self.channels, + samplerate=self.sample_rate, + dtype=self.dtype, + blocksize=self.frame_size, + ) + self._stream.start() + try: + while True: + if self._stream.read_available < self.frame_size: + await asyncio.sleep(0) + continue + data, _ = self._stream.read(self.frame_size) + + await self.realtime_client.send( + RealtimeAudioEvent(audio=AudioContent(data=base64.b64encode(cast(Any, data)).decode("utf-8"))) + ) + + await asyncio.sleep(0) + except asyncio.CancelledError: + pass + + async def __aexit__(self, exc_type, exc, tb): + """Stop recording audio.""" + if self._stream_task: + self._stream_task.cancel() + await self._stream_task + if self._stream: + self._stream.stop() + self._stream.close() + + +# region: Players + + +class AudioPlayerWebRTC(BaseModel): + """Simple class that plays audio using sounddevice. + + This class is meant as a demo sample and is not meant for production use. + + Make sure the device_id is set to the correct device for your system. + + The sample rate, channels and frame duration + should be set to match the audio you + are receiving. + + Args: + device: The device id to use for playing audio. + sample_rate: The sample rate for the audio. + channels: The number of channels for the audio. + dtype: The data type for the audio. + frame_duration: The duration of each audio frame in milliseconds + + """ + + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) + + device: int | None = None + sample_rate: int = SAMPLE_RATE_WEBRTC + channels: int = PLAYER_CHANNELS_WEBRTC + dtype: npt.DTypeLike = DTYPE + frame_duration: int = FRAME_DURATION_WEBRTC + _queue: asyncio.Queue[np.ndarray] | None = PrivateAttr(default=None) + _stream: OutputStream | None = PrivateAttr(default=None) + + async def __aenter__(self): + """Start the audio stream when entering a context.""" + self.start() + return self + + async def __aexit__(self, exc_type, exc, tb): + """Stop the audio stream when exiting a context.""" + self.stop() + + def start(self): + """Start the audio stream.""" + self._queue = asyncio.Queue() + self._stream = OutputStream( + callback=self._sounddevice_callback, + samplerate=self.sample_rate, + channels=self.channels, + dtype=self.dtype, + blocksize=int(self.sample_rate * self.frame_duration / 1000), + device=self.device, + ) + if self._stream and self._queue: + self._stream.start() + + def stop(self): + """Stop the audio stream.""" + if self._stream: + self._stream.stop() + self._stream = None + self._queue = None + + def _sounddevice_callback(self, outdata, frames, time, status): + """This callback is called by sounddevice when it needs more audio data to play.""" + if status: + logger.debug(f"Audio output status: {status}") + if self._queue: + if self._queue.empty(): + return + data = self._queue.get_nowait() + outdata[:] = data.reshape(outdata.shape) + self._queue.task_done() + else: + logger.error( + "Audio queue not initialized, make sure to call start before " + "using the player, or use the context manager." + ) + + async def client_callback(self, content: np.ndarray): + """This function can be passed to the audio_output_callback field of the RealtimeClientBase.""" + if self._queue: + await self._queue.put(content) + else: + logger.error( + "Audio queue not initialized, make sure to call start before " + "using the player, or use the context manager." + ) + + async def add_audio(self, audio_content: AudioContent) -> None: + """This function is used to add audio to the queue for playing. + + It first checks if there is a AudioFrame in the inner_content of the AudioContent. + If not, it checks if the data is a numpy array, bytes, or a string and converts it to a numpy array. + """ + if not self._queue: + logger.error( + "Audio queue not initialized, make sure to call start before " + "using the player, or use the context manager." + ) + return + if audio_content.inner_content and isinstance(audio_content.inner_content, AudioFrame): + await self._queue.put(audio_content.inner_content.to_ndarray()) + return + if isinstance(audio_content.data, np.ndarray): + await self._queue.put(audio_content.data) + return + if isinstance(audio_content.data, bytes): + await self._queue.put(np.frombuffer(audio_content.data, dtype=self.dtype)) + return + if isinstance(audio_content.data, str): + await self._queue.put(np.frombuffer(audio_content.data.encode(), dtype=self.dtype)) + return + logger.error(f"Unknown audio content: {audio_content}") + + +class AudioPlayerWebsocket(BaseModel): + """Simple class that plays audio using sounddevice. + + This class is meant as a demo sample and is not meant for production use. + + Make sure the device_id is set to the correct device for your system. + + The sample rate, channels and frame duration + should be set to match the audio you + are receiving. + + Args: + device: The device id to use for playing audio. + sample_rate: The sample rate for the audio. + channels: The number of channels for the audio. + dtype: The data type for the audio. + frame_duration: The duration of each audio frame in milliseconds + + """ + + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) + + device: int | None = None + sample_rate: int = SAMPLE_RATE + channels: int = PLAYER_CHANNELS + dtype: npt.DTypeLike = DTYPE + frame_duration: int = FRAME_DURATION + _lock: Any = PrivateAttr(default_factory=threading.Lock) + _queue: list[np.ndarray] = PrivateAttr(default_factory=list) + _stream: OutputStream | None = PrivateAttr(default=None) + _frame_count: int = 0 + + async def __aenter__(self): + """Start the audio stream when entering a context.""" + self.start() + return self + + async def __aexit__(self, exc_type, exc, tb): + """Stop the audio stream when exiting a context.""" + self.stop() + + def start(self): + """Start the audio stream.""" + with self._lock: + self._queue = [] + self._stream = OutputStream( + callback=self._sounddevice_callback, + samplerate=self.sample_rate, + channels=self.channels, + dtype=self.dtype, + blocksize=int(self.sample_rate * self.frame_duration / 1000), + device=self.device, + ) + if self._stream: + self._stream.start() + + def stop(self): + """Stop the audio stream.""" + if self._stream: + self._stream.stop() + self._stream = None + with self._lock: + self._queue = [] + + def _sounddevice_callback(self, outdata, frames, time, status): + """This callback is called by sounddevice when it needs more audio data to play.""" + with self._lock: + if status: + logger.debug(f"Audio output status: {status}") + data = np.empty(0, dtype=np.int16) + + # get next item from queue if there is still space in the buffer + while len(data) < frames and len(self._queue) > 0: + item = self._queue.pop(0) + frames_needed = frames - len(data) + data = np.concatenate((data, item[:frames_needed])) + if len(item) > frames_needed: + self._queue.insert(0, item[frames_needed:]) + + self._frame_count += len(data) + + # fill the rest of the frames with zeros if there is no more data + if len(data) < frames: + data = np.concatenate((data, np.zeros(frames - len(data), dtype=np.int16))) + + outdata[:] = data.reshape(-1, 1) + + def reset_frame_count(self): + self._frame_count = 0 + + def get_frame_count(self): + return self._frame_count + + async def client_callback(self, content: np.ndarray): + """This function can be passed to the audio_output_callback field of the RealtimeClientBase.""" + with self._lock: + self._queue.append(content) + + async def add_audio(self, audio_content: AudioContent) -> None: + """This function is used to add audio to the queue for playing. + + It first checks if there is a AudioFrame in the inner_content of the AudioContent. + If not, it checks if the data is a numpy array, bytes, or a string and converts it to a numpy array. + """ + with self._lock: + if audio_content.inner_content and isinstance(audio_content.inner_content, AudioFrame): + self._queue.append(audio_content.inner_content.to_ndarray()) + return + if isinstance(audio_content.data, np.ndarray): + self._queue.append(audio_content.data) + return + if isinstance(audio_content.data, bytes): + self._queue.append(np.frombuffer(audio_content.data, dtype=self.dtype)) + return + if isinstance(audio_content.data, str): + self._queue.append(np.frombuffer(audio_content.data.encode(), dtype=self.dtype)) + return + logger.error(f"Unknown audio content: {audio_content}") diff --git a/python/samples/concepts/reasoning/simple_reasoning.py b/python/samples/concepts/reasoning/simple_reasoning.py index c423cf106a71..ce4c6d4b66fb 100644 --- a/python/samples/concepts/reasoning/simple_reasoning.py +++ b/python/samples/concepts/reasoning/simple_reasoning.py @@ -2,11 +2,8 @@ import asyncio -from samples.concepts.setup.chat_completion_services import ( - Services, - get_chat_completion_service_and_request_settings, -) -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( +from semantic_kernel.connectors.ai.open_ai import ( + OpenAIChatCompletion, OpenAIChatPromptExecutionSettings, ) from semantic_kernel.contents import ChatHistory @@ -59,17 +56,25 @@ Note: Unsupported features may be added in future updates. """ -chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) +chat_service = OpenAIChatCompletion(service_id="reasoning", instruction_role="developer") +# Set the reasoning effort to "medium" and the maximum completion tokens to 5000. +request_settings = OpenAIChatPromptExecutionSettings( + service_id="reasoning", max_completion_tokens=2000, reasoning_effort="medium" +) + + +# Create a ChatHistory object +chat_history = ChatHistory() # This is the system message that gives the chatbot its personality. developer_message = """ As an assistant supporting the user, - you recognize all user input - as questions or consultations and answer them. +you recognize all user input +as questions or consultations and answer them. """ - -# Create a ChatHistory object -chat_history = ChatHistory() +# The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. +# `system message` cannot be used with reasoning models. +chat_history.add_developer_message(developer_message) async def chat() -> bool: @@ -86,25 +91,15 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - # The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. - # `system message` cannot be used with reasoning models. - chat_history.add_developer_message(developer_message) chat_history.add_user_message(user_input) - if not isinstance(request_settings, OpenAIChatPromptExecutionSettings): - raise ValueError("The OpenAI prompt execution settings are not supported for this sample.") - - # Set the reasoning effort to "medium" and the maximum completion tokens to 5000. - request_settings.max_completion_tokens = 5000 - request_settings.reasoning_effort = "medium" - # Get the chat message content from the chat completion service. - response = await chat_completion_service.get_chat_message_content( + response = await chat_service.get_chat_message_content( chat_history=chat_history, settings=request_settings, ) if response: - print(f"Mosscap:> {response}") + print(f"Reasoning model:> {response}") # Add the chat message to the chat history to keep track of the conversation. chat_history.add_message(response) diff --git a/python/samples/concepts/reasoning/simple_reasoning_function_calling.py b/python/samples/concepts/reasoning/simple_reasoning_function_calling.py index 0da02adacefe..63925ac7745d 100644 --- a/python/samples/concepts/reasoning/simple_reasoning_function_calling.py +++ b/python/samples/concepts/reasoning/simple_reasoning_function_calling.py @@ -1,22 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +from collections.abc import Awaitable, Callable -from samples.concepts.setup.chat_completion_services import ( - Services, - get_chat_completion_service_and_request_settings, -) from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.function_calling_utils import ( - kernel_function_metadata_to_function_call_format, -) -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( - OpenAIChatPromptExecutionSettings, -) +from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings from semantic_kernel.contents import ChatHistory -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.filters import AutoFunctionInvocationContext, FilterTypes """ # Reasoning Models Sample @@ -70,25 +62,44 @@ Note: Unsupported features may be added in future updates. """ -chat_completion_service, request_settings = get_chat_completion_service_and_request_settings( - Services.OPENAI, instruction_role="developer" + +chat_service = OpenAIChatCompletion(service_id="reasoning", instruction_role="developer") +# Set the reasoning effort to "medium" and the maximum completion tokens to 5000. +# also set the function_choice_behavior to auto and that includes auto invoking the functions. +request_settings = OpenAIChatPromptExecutionSettings( + service_id="reasoning", + max_completion_tokens=5000, + reasoning_effort="medium", + function_choice_behavior=FunctionChoiceBehavior.Auto(), ) -# This is the system message that gives the chatbot its personality. -developer_message = """ -As an assistant supporting the user, - you recognize all user input - as questions or consultations and answer them. -""" # Create a ChatHistory object -chat_history = ChatHistory() +# The reasoning models use developer instead of system, but because we set the instruction_role to developer, +# we can use the system message as the developer message. +chat_history = ChatHistory( + system_message=""" +As an assistant supporting the user, +you recognize all user input +as questions or consultations and answer them. +""" +) # Create a kernel and register plugin. kernel = Kernel() kernel.add_plugin(TimePlugin(), "time") +# add a simple filter to track the function call result +@kernel.filter(filter_type=FilterTypes.AUTO_FUNCTION_INVOCATION) +async def auto_function_invocation_filter( + context: AutoFunctionInvocationContext, next: Callable[[AutoFunctionInvocationContext], Awaitable[None]] +) -> None: + await next(context) + print("Tools:> FUNCTION CALL RESULT") + print(f" - time: {context.function_result}") + + async def chat() -> bool: try: user_input = input("User:> ") @@ -103,78 +114,17 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - # The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. - # `system message` cannot be used with reasoning models. - chat_history.add_developer_message(developer_message) chat_history.add_user_message(user_input) - if not isinstance(request_settings, OpenAIChatPromptExecutionSettings): - raise ValueError(f"{type(request_settings).__name__} settings are not supported for this sample.") - - # Set the reasoning effort to "medium" and the maximum completion tokens to 5000. - request_settings.max_completion_tokens = 5000 - request_settings.reasoning_effort = "medium" - - # enable the function calling and disable parallel tool calls for reasoning models. - request_settings.parallel_tool_calls = None - request_settings.tool_choice = None - request_settings.tools = [ - kernel_function_metadata_to_function_call_format(f) for f in kernel.get_full_list_of_function_metadata() - ] - # Get the chat message content from the chat completion service. - response = await chat_completion_service.get_chat_message_content( + response = await chat_service.get_chat_message_content( chat_history=chat_history, settings=request_settings, kernel=kernel, ) - - if not response: - return True - - function_calls = [item for item in response.items if isinstance(item, FunctionCallContent)] - if len(function_calls) == 0: - print(f"Mosscap:> {response}") - chat_history.add_message(response) - return True - - # Invoke the function calls and update the chat history with the results. - print(f"processing {len(function_calls)} tool calls") - await asyncio.gather( - *[ - kernel.invoke_function_call( - function_call=function_call, - chat_history=chat_history, - function_call_count=len(function_calls), - request_index=0, - ) - for function_call in function_calls - ], - ) - - # Convert the last tool message to a user message. - fc_results = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionResultContent)] - - result_prompt: list[str] = ["FUNCTION CALL RESULT"] - for fc_result in fc_results: - result_prompt.append(f"- {fc_result.plugin_name}: {fc_result.result}") - - chat_history.remove_message(chat_history.messages[-1]) - chat_history.add_user_message("\n".join(result_prompt)) - print("Tools:> ", "\n".join(result_prompt)) - - # Get the chat message content from the chat completion service. - request_settings.tools = None - response = await chat_completion_service.get_chat_message_content( - chat_history=chat_history, - settings=request_settings, - ) - - # Add the chat message to the chat history to keep track of the conversation. if response: print(f"Mosscap:> {response}") chat_history.add_message(response) - return True diff --git a/python/samples/concepts/resources/cat.jpg b/python/samples/concepts/resources/cat.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1e9f26de48fc542676a7461020206fab297c0314 GIT binary patch literal 37831 zcmbTdcT|&4^fwp;DN0e1-lQv4dJ7;bARr(p1PE268R@+%(n}&;x=NQ4DIxSuLJ=Z0 z^xkVi4gKZ&?r(R`*}r!8J~QV$=R7lW?&sW@XXf7fnd|ZEdB8nQH4QZY5fKr<^5y|t zPXS&4ZV?gvSN=zcZxjC~q$DK7x5-G!$o{M36n81e$?uSpk=>!XbLTGQjgV1L(@;@T z|M&jiApdp$uh&hXBqt;Puf_j2xo!pACno|B0f>pX0JrWF5#J}e?gVfE07SQMwEa)v z{}G~F#3Z*#Z&bQ-_oe~j-i`Xi#J6sgy-h-L(>n0xJAmZ=?FXDR4 z|0L&rUeQLUKZxUzc<&ZMafhCPk%^g?kN@!#0ZA!o8Cf~`7cW)S)L&_6zI|t42r@D@ zvHoCVYiIB9(cQz-%iG7-?`vpSctm7WbV6cMa!Ts=wDe!OdHDr}Ma91>tEv$-$lAL4 z_Kwaj6uP^ocW8KIbPPK_F}bj~w7jyqw!X26-#<7!IzAzsp8bdGKb-%k{2##nAGq${ z;JS5V0}`_Ta1q_|zNy6bNp5qBkv>q-BeQa){QpAse*ycyaZLg!iHUBEM|>Zk47kV|z5rs}P)ziyFu*3!y9EJYoQ6W-TlG^NkZ?D;E{_qn7PF5Q+>ynxzi z5X^^-Ubtc=`K`F_x!r;lz8FGE!ifT-;;LP%|IJ8me6Z?ZUm}~WZ=v4cz{NsJ3T8yl zqy}dk#g5qCv1bJ;mgu4^EU04tiXrg#f2tn*j+3oE z9Nk#kMc7TiUwrfp8x;B2w7SkZ-Gt&Gx9E%(54%~}WB$2=(`7pRGe%^f z@u)HUE;2S#jm8FZxMiM*<39e7*vV`{h(6b^s;F&vA(h`Ud0z|qS%LOT&I4dpu}sXz}#oRL$n} zd$7-??h)Jqk3r3lOK4aZ%1nxd3~7AREe5=AM4Hy)8Bk_kFKdjK55BM)RD<2f6v-^b zgF>2P_{-8%{7u{cMZBAAx&mETXu}xj63-pLlt;s01@PK6fNIssp=JmjP$HAka24=$ z~g^;c8=>Ccu9T9^TBV>Mn3MjZ*&sdFJwRCbjFJ zi#+r*NKJ{}tY<0GV~5VRvH1=YXH;hf+HAceq!zB(>>O4!O*_iBdt6xq@A3Bc1X`5* zrgeuk<_#h050Pe$6H4~k1KBUY=#M-J3sB>OR*dG^ z-{yxUi%Nw9Nx6HPM?6?jRo;B~^3=>Mdcc-WK<_Kuj^|KAl`qF*_8K7N%$c6_t^Y^G z1LZXC&xXY3HG5mN*7!1bm_GXBknrCJ<`WgSb1`}_Q`fJDyMiM7N7dkKKxK0VRXs|x z&OphE=b%CH)dos%Q*`3s_~{7nZT{ruq(y8aZy_X=#e7aHd-0Wt>AtdG--?s+4A0S) z*+erp#S01sP1l&BnI9|pqs>f;CjKX2Ba8@CpbW$u)%>n$M`Uf^6Qd?L3&( z7~&U}^mKND5HyMqED_9SN_xQkCb!&eNvMw4De&=Fi_KQqtpP!%($%YxS`P^o`5m7x zEb;G-3^~6zcAPiK!HZh#t^wn)#hiAIXs_m{fikbFQ+4Y4XH$=pri#JcB`V0JZ8CNFTjJ%-pCZb24T#c*-LQWji_;2+qV|p{y zh~b6_unxTYOh8uG;jV`9C3-cb7>B~PkM;4S90-UBBsdFm21%f2WHCt6dRT-BRoPV> z>lfkvuIkdbl>%T_>*O|#8d>cjJpaN0*FL+YSq0m^nP0!YW)(f{sz^LqtZmo}KbDs1 z_jYzzp3z|M`C!P&!MF^6jgzB;(lVLnJKIHmMI`F4??K>v4HD@!6m6#aW?CJ0iEZfe zSpNtq%eRd*%GC7*G`S;xl30FO_m{oV}NT?qnln=)|aC!8a(U~xZ(@Zc@58lwbvPC zSh=_Hu;44?ScCl%Z+SgqZ>)YJlv?-}B=nZ7_r-X(VpEA;5POH?r!tQr!6hLEM&&ra z+b!Y4y5Fj$#lgG0_nz>=2%ZBF2?N|U;MjDkd_-IH&F4ez%&ZQQAjGP!l8$Nh(E(Q; z|L{v!Y>lDVd)b{aovD1Yzd_B2c;T<);GWKp)GHu1bWmoXV1D}5tiqR7wTd*3`$<+6 z8j3WGyAu7@g!nw;VX`1oG-zFp+EX5 zn&yDsIy=ur_QJQ6?QMs?DG!Qmm^|p1?C<VcC>Gc z4?=2x(M5@teL&Tm<))nMy@0=C7<&zv&uG^_=Y>rOa}HmaAP3?ECsl>wQDptxB%f$7 zq)pbx!8j=tQgP<+%Qaxpq`{~KN866yO2@+{Iy&z;FsEj@ay{>b5?Q(wI-8z@N(jWq z5fuc5f=w>f%25$1-pTkB1?x3VSTWfgD|XdX;|pq05L6f*jpm8+gy-Qm*--^9}o9m%|eM5d3P)tZZ`xvUfRPb zamm?r<)1>v2wmc))*N1MDyaURYP~q{McW$p%A8jf~r;X#YQh>Z0Y-pEaS& zlr$#iH&ey$GV?+ix1>5W)gf<|o#83bRNdmBng)OMsD#PLHLKM+mi_3M;mZ|b{B1*=0`*GM%_aaXUv<)^dRenFAT0m8qw zA;Uu5v=jfpIh<}!4&ybTjY~2og!VF`tG;VxtUf~AXdDci5oP2ZKUAz;ro_fv*b;p6 z0;KWJ20gOqO2NA00TX%7)qTm+-k-e=GA*gIlh69`FFNWAp+LLEEG^xkBPwU3^i_9O z>3`KjujVUJg~Xq!r-{FOv5?wQ{$mlTL=BM-Dl7g-FLv>)tuAHKL#w-tGRM=V+eZA} zz^i3I-(2Bxoj0?!7+uK{zAC-rxoNyxQ-)(cBGExuzjoAKvz4?;&V{Ln*@LKz7ZBa4lUWm<>WK_aP(P0=y5+d1Pq91N zI2sEaA7TL$c@3aBGSkFrV5`ry&aDckCEp15c7~0pDZS|m$JMs+oRBuBUVd#BoVxnQ zY=|g@(0>z+rkCB3Ql+TiJ+kIdX)NMrwe$34+!W>bKHNT-QQ-xtm8+dU+-Jyq``V~`M<_yS_YQjGhU1;+s{q9n?NVD zN@cXIsTocTrT9DwJGj83QgawZ#`MW-!nc8#3NRGF~1Gf&6 zK^!UusDBM%^6>@f+U~t)Zm7Lq4OPzX;KhU63$pu&mZQ4UEF4)ogE?RJ*_>9mLX?5N zgRq?`JN1~r44EF5ZZ#pO?<~9tU-)kLlgYptR>QV7VZ(-NW=XG>t69zq)hd=Su`OYd z8z0opfh)HqdXVS@Xny%dPPwT!#2=J%)vHXIhr_mBx|=o+)1SWq z6A*FAI`!79GJ0yjU#wqhoo0=^IMr0>&e$0glPQDvV%P4tneR0Y2lN8_HB@WTH6}NA zCrxoBHkiByD#6T0d>WOw^_;0TJ|eQq=H9&T4i*96!zNNHG4=f z&ljyM;@d-aJk2Yl>O0mtHMx+NU^+*Y;ZQqcCcC~mY;W8dIC^@37t7SstlU-G>;kc>Z?WU!~YqML=J zaQ7vOUF2P~ouY>-Jm&5w760y{Rpq2^tsvVibR#>xLhQUn$b*UwD-Jv#_l#8Q4cN2a za`ao6wftA@IR(;>nDTCb+9GS5MXdHOD`Ndo;?&}# zPn&ZK*^}3U_;5R6rE$DF=k2{KnLwKu&Av>wj&D924f}98ywdL;$Z|JU%c-nVgZuXM z*M9-y2E^yNjHKBhV*_ss=p0Um(tt_zl-0Jv^D;*O?bdI{os*nM#SC>#UZ;6&DI%Zc(9OPU`?+sMtX=-(xp$EZDf^#j3%T)7pE=e z&#g+2S`$Ti+29rE(PVR)qB5o|uF)YDEUQixx+7E*8r;+4)Tru5})*0IZqqFp<%}9RHcBEEOp0Eu66d;t6Mhd4E=To~a23xD|eoa>TvGxBRCva$i9hDe=#<;#O`stnEW*oVxA=d*>yb`g00GVT! zvhx(z_6+K@`$ndBGf2mxtfxO$SGtT|h{ixrGHTPDt{R47VD6w1H@^RdGT^s5OLLy} zK0FOC5sGir_}pPxx%#tD&;0ChS69K%mZMNg`HEhwcyK-I z!k44&LwN2sB00{$r~UA_EJN6wWlB!iDMju_)@wJlCAa$QAFvnZ1N@&yKcyQ(;Gzz> zZxQJZGdwM+`S2jj%cMFJ(;lXrD$y^8=dcNjj%JooE3+56VDEQO*96+XZlUZyKKpRT zgUyKv96v7vzfC*5sg_B6i8GykGIglrcQmWPei@I0FDi^W|GZSB zQv+OtnI#%cXJ`e?lx(?em&e_aRIGmYdpHE9pn1j{+-1?XIuHa_l$KS#N3hVrd5D8I zm;=i~#UBj$iiJ63lp7scXYRZA`(mSnb7^?`IQnJg0s=az<(Un0`u)!zW})dV_R^w^ih- ze+v(cXyh7TyK$%+HR&8B2`;i~8Wy2nVM2}68f?NY#{}bRKlovDeUOX1h*D6>4*$fO zj*@$O@%jXt{9$=&SN&BC?&M{}x&DjGy!fg=x(k>YMobV9kL!RUWKA?GNCAeDK1*H( z86N-Z2#;fH@fFp}_NNy2TVBeZGmf`fNO@u<-dC$V2MRJv6==$8FhdSDL78hAZvQ&T zc@Alb6)ad!rGKi;@-)@Ig6HXNsJAyaai*R(-Vdcn_uBRe1BswuPFjTCfLwpT)*_s- zc#ci-E>ch4jOdJRBurt8j}>V@S{043U+tT|L>HmNTa$M6W{!5`gkD3IMc zD&KT*8eFN|3Lb_Z)u9D4ajW1g-gRz){yGf>|HX!u&n+}GQJ$%uM&m`jFWrVTh83RO z#5+A5+^{E66fhTIsC!JDe+`HqvC@vNtW-j*yPe&XO;lW(2+6bKQrA;<2}?1N6$S z0qrjWCHGA$+uunhgiWvKEept0yw`auH~`cxy$nBt_;N!xH*LEeSav#FYJGurNfs~UVSl-{iUvUX`XPkm!C@i4pC-8=^{?A48{Yzhf+EgRaaN^ zj^h$zq1(AimrM3}8tHaqb<{A)+W-|eN( zlP9W3yxChK5J71H6gbD6&}Mp{my?1U3Fec1?miVLndv`isS1&t&)$$V1qBj_Tl-44 z14=N^y#p&VCde~nJmtGTNp`uc`v;kvY3-%Lk?fkDoUn0R zF2WO+7tg~tBWK^bBs$^dgXWpXYZLr`Tf7M7$#DqT)rx238v_Om-D{xOfRYo(W+p(! zTNgHlLZ5Wa1cR>P_ZD0p{k%qA=)qIGMl2YpiC&eM6b#CM|+#s127+&=6Xa z-d_&H-pTtiKrKUbL^jk!^9y85p$^XX`6Qp*5-vR9Bx0}Lz+te%Ggyb!9VU5s?jN~o1l|r4StH+=}DeT*#;6XSkP5hqW`QU z#nJ@lL$>PYCfU3NNk6S2m32+_b*VS?C(x|rE|m{R8QC*Et^v&3rHz_JEXzd8m$PV# z?_TSE{#Y{&0XKcM_)!*ogO&t2=$A9fpBmwa`0`Nm8sM41s4@kG++u0rdUerx`rSH$ z7#^^OBp8I=!`Ke+BxnG-d$XIugVA(1DT7Zc270#HJrh~xiHlC4Rm@SD*Qn~WLUOb7 znz_61mcuuZ!#lkMA(q3EN$+U!cj2eO@o+~STF}>eIbWTFS9RgweBT-g_1rP@;owMIj zrBRW}fP<2w^1|pPd&%EY18oyRQuWwzO}+EMRz)IzRKbvQ+0k9kaL3R_SDiYLDZBBj zjh;sbvF@g*hNb(yW%6dPY6Kto8b{xYq`(k=YxO&1yp`IC{S?BujI58hhFLmG-g_&JSu5r-f|ExmVD8CIBPN5JkmP@aLHO3& zmyrDabNNFCkSS=QYI_>FQ`M_P@v*!CZl_LB^W^=x*VLMP% z$bI!{AL_ehhP#iq13ITWTx|vJ(O#$*C?Ss3TNTxUDSVijoSYstLTwZRiYf;!|8RTB zbG%X0I2uqgBs{^3s^VDty^CI&*z1|m0TKsXd``&)V{ub0p_%Sx+jG@Hg^SA4n^^72 z!JS;*WigoG(+U8OFTk~j$7SZ#jzubd6lf^zlu)olKH;vwb18Mvm}(~73whg7?x27i z^pF>gYq_`kC_y!RqY)vTy5HyNeqi^5jg$x&efnY%8)ud6ABK#|%FL>)T>iNvM6sZ< zfl@Zvy5jX2*xWW18Zu?Cs6mJNx{&`i|x6nC;} za5_Hk$J~bf*)VS@Av}RAF`N28pIG#M)z{*W-6yx7IH@=*mxC+p=Vh-q1pg+v_rvPl z%_=S%YcXy!5nDB3zx_%_Tr*IUgE`a7(xbcImpUz9 z%`;L@S6q|P#yv!LQ#mV;o?WM}&S+b?qEj>V<>4iD#4#M#zLEWCcx~#u%KlH7giww* z30v;&nzy0~n&iOT_=ki3mRpR}H6SH4jK_|VWC@QFoNM_QOm&&`f>|x**p+4HUSLkR z4n^!mPj(+669fxT8m=Kk&-6W>5_I_sy4lOt_@hto-(@uOKl*H>o14O2;tCxtTOj29^UNr-#X9&a@VV`_%MPYE zyfk({am)WB*TW{5(M% zg#mA!q2i?a$CB8z+j|6jQ!29-#BV~Em|f0F8ctn8xnVM3;55Y;B{ zK{LUY@DtFbTDVHq!qYc@@$ZIzBzWlv7uBS@MSE8vcCWl-T5JQ=Ea>f#45<&yRQ(GE z4$Nt!Y{4NPdIbK;JU0DZ-1p=iBi*hNL;AqgtOjL?Ge`OzU7;^J%Jl{+N1J3v@Y@8N zg&BobX2s0cWddB~r*Bsd09zr?*7zA&{g={@YJp(u1uB`HvkwqYR|RrMK5&xl>pwm5 zF3&UBa+!AB+vsKl68hE!#Jn@MHs*%+f!poX?wM_uW+wDQmnUvhB^xJRim9qlHbc~7 z$HKn~(Q5s5Na^9~xF4y4Ax{BYZBqJEsFM7dSIi3e_T!xCs}g+JwY$jkHCEe3=-T4D zV|naF0J?cPe_#hxR+WJ3I5_lj30#jLRIThRRG~AlT!m-oN2;4(k*??z{Pur_Ow* z@Wjmfsx+`_p(WyVA_Pt^j;Rw+!rul@!qRg8;j+q_BgT_D=a5y zb!j=UX7)H?*g5p?i4=kIQy)_eIKvWFG6L|20&%Y>2cZ)gdxDv<^3@_!f#N)|(W=f(56r z$iVt^OBbn%dznR|U(B}b_IJq>n1lrm%^_S7 z@7>CT?r37Lu)2>JwGI~AnTb$)yxF=;O5%ZjE;x&zA%@VJ`3mN4>T<`1(U)s*B+IA~ zJWDljC;I<9*FXG|%%b-idvFcVi&{X2Jb4OFgn~I;z6y2D-_m~hej_h;#@aIRU+RTNS04s{PhsLo9Jkwt*C9A)H|h;b zD+s|Ier#irrS$u27Kj5WHhr!(QwK z^L&ae^$gSy)};#aucorredx>BXP;GmFS1p;R~aS@Kt?##sq5$4ZhkFL(Idr*epk_nCC8oz0TsmqY7B*JO@fn3id_p6N2fah z!oOzFc~fD~A#ia|_EB;L_wIXxq-7bFXJ}#)P#PfyjKBr}Jt7a;6m(N&q*=m{-%Pc{ zQ;0=;r>AXv%mruKM}9_|9=-Ej+^+HR8op?Ht4<%7y12C{xadlouOk+nS|`k&s;6OA z=h0L;_ck7{y_{%7Uz^sr+S(fY9j0bJUM{0QJj%!Z=7A#hpStYL)eT>_p zr{Wn*@lASdFf-sHPBTMH2JqaRDItE>h^QO=kkTK7xhvDh=`q+6})QzG5e+256i=_8FS%Nz1QlsjhoDcV1Cv=0@iA z$Zj0;rK6x4^!Im=0Wd_a^$=bb{C^`e4t_KJJmVT5G13B2rnvFa)z1|P z3}7TwR?lO+x8qK7sN-c{}Wy#97|EY?Kp|7cqL*{-_P zZ1WZ#9WtzU866;M=X$4uw=|W0dr`X=^r6mMZ1+JVj%EH+gQO|2<32+Zeb3T{=DIG!I`PV9_3zKK1*PMB0k zlc9pw{-&+Hd6n0SS@<7gzPrp3Y%LC=2Ce|$r&$*%_)X%juJTNymHJ+G}5NIEBO=7tJ;c15* zt9a}_jg!BfPeiA)@9)=RCP8^)+G3uNm}*;M#%Uj>lVv+L!P7aJh40n}capY-++qzA z8cdqb1CX2khmby^&$+N%vnTxx=ov@es?9Tn89@baMdD5+{t+oN8||7OPND@cAB`Y2 zF{sMGQ{TfS+-lR@F(&~A_p$Dj8PYgbfQ!GuPwGsS4R$lRHOaBH%J#s&l^yXlrO2qo zmAfy8S!as61>)mT3fB4lpf;K~cLJ$HI%;)EBf)->w+?EX9C8gHf2S%mzRSY6v}JKO z$E94PhGx^p-koo>yv#PoV;EV`Y`4Ya9K-ndTyuOARQ!0q*;vGw-*Jg&$j`zS?nqOX zJLZJAG~7Rf7z-cnRV&<0s0>VaKrt{zyb5$7b*M)n^1E_-1kIWFhw`|{ZEm}DRmJsP z7_DWpw}jhuSWHnnf;yz@MI$c2<>sa;jDU%8u+g$GG#SSpXI zOU_z;d!=D~?@O#8?wNAA++r0vHiYNKYN6nc(tFHfSSrm{JlrZ zXXA1}0Z$0CSef40_mXi>&7f-@Y4dg!z@zdEbTBoC*}3NsYKt)BKg zW4?HUl-UshCw{7i0~Kl3w;>bhjhk)a{ra21z0h68QE{c!sY58i5OD+O90kITC6ok65?k-ClkN zE_`7O}Szz|Vvkq0xjFTuqvb5Y`s@SS;>A($SH^pPZQYjYP^P_3k2jD>ZT& zSA60Cr|)P-$FY6bAkJ$*D7J$+AW~oM=cq@Sz%1}$k?ggE12(Q`gTxn|q|Bg1({F@!Xv}=Wn0%mVSOv+xo`M_jaX25VH?uU$7l@zAvToM{}@k zoL0HkV*&4b(g#@+dtHGn3owSlj=kF-LG7y(H9awQ5+W;t5ueteanLLs#jSw>iE%5J zWU+guLqmNQe#cuYC!0P5)?ai_8dU>R`da+69nLkk#7*WVHq(!wTj5n)*w@45V+uYz zce~vl27I}-EJ9=zdx*Yaj1-oAO!a=NUgqO9<5J#kx1Z>K^N(j?`u>^PYc2DZd-{nk z<%Po^{8|@y{7755BfW7lEGYpjT?^e{p~<4>HKvnOos{{&{++Z@YjL%Kt+~YOpZygb z%soC**rkZ#isSodiFI8mJS#u6(~^68i5=XqdkEU_K0%gjg3f+wJ-r5eEaB}z)C4E~ zTL3zI7(3DLw`^cW~-5`uNGipO3juzW||Gx|)|tP**VR z$G4*aFquoecAFzQKXcG%!)7DQ=Q5r@v8{!xT;Y@pW&85UF1*9A7=;zT?N zJNrd##QdjY5on7rv4TR!k{5yb6B$t?1!s#Jq2ADU%E@;C8t+JK28j5PQkY1LOP!as zE|sBSvetwj#QRiuYP~eW4DtK%f;pS$!ob#Gj*8j1`l9p1l*wQjWjI!&f%c_wt0;9l zOS6moH&UV{xvhP8`y7dVJ*Yc3WG;q_oQuT6?U~u_Lh+xBOR+ZqBU#44UpPkr>uz zO4kRr_w)28Qm(v)K&iKtRW@mU{B9Pm$XO(VHukAO&U>ux0>dKuBS*hhSbu5b=Klv; zISq16R~IW5vPQ@~-%v0S27?hnCk^(Ov`P!p(FJ;Hav#@9m{T0h_qCP1lNG!W3I63P zRAilJRM~~MXI68Hs^n}^X4OE(+@9P;kKm>uVzD5k2?rGhhgwq)pG8qUr}OW6#lx}g ze_zjxR71ZFy3p`Dd%V<{EkJH4Ko!R|WJbF!K5L&qSL^djMY1BfEwI%oc$R&6o+vsO zw{+}3CfQ_2^{PDeVH_=K2zW#z!>8?3w~s(e01dh1L^1^wYo^HGi?xwq!1SiC&;H)s z975_UViFc}s3d?Qyy~QDa%y*i7LyY9_p9M7NAj46l%3)z$*F?ctA9v?gfXAAe23g6 zp2PziHwq@8U4h`764iztVM-4Y2+?)kiK7^ke#O`-bql@X`~4osiF&bi`qrVG)Z!I; z5-ct!W7|UkH4sb@$<0Z!TpO-~Q(2HoPL}m_QCjEj=$M0Hsg@w<2{#=FxE+2B2%Mva`>GTcrBSTypyf>4}S{!py|DNAD4e&@# z#`f&Y6;1~bSugsR#3a|=1*UN{Dcp7Q_dvR@X2Gt6%%NvzMwm zCTs_OqU`B$+-3OAbSGYYmWlNsLE@{qmomi*v9Gz>W+zxIBBv*9xNnvw1)*WAtai0E z!;UGEu5;l|aC|b_QPX3^`;``hyGcf;ow-1Yap4t#`PGgk4kz*8Ka-X9l0lY7iV;dP zC94*cL314UM9K{ISe~ap?h)XvG6w$B0T(SP4>ZLrMC_<@Kqb-0W?XX$GV%5SZ_Nuz zw~(^hO5zZ=cg1K%B}BgvJ3obv$aIf`#}VRqbRCu#_e#`@zBz6q7nwM`*0LD*^_M~(R}yD?hSn;=W6g0ofruf z=jz8HJWetR>OfpUlkPFqzm^5iRe$pneA!6CDaM({C4#a2_wxO{ZunnVEQr3D9IumK z=2O3~A=+pxF))bSE0=cSReJVRer@%5W2h-cmqq`MpXxbg(7!m~C@8z&yDaKKvB~t1 zp@x?~a^KLr5HrW%0b>ZT8{L`e;CP5Avwnj_=s7yF#@W8lGA&_?=bS-kR_-dWK+08G zfv_7V%y!4Ri=7R-JeIOK*EuPc&8FGOtrkn$o5GaBF+W>5U(}uw`%O-2lQ|`ettFZP zb-(8l=4>s0)6sOiqY`h!W7GV0KSW%k`&I*thTv{Y5XRG7(W@?>q(?*qc(cY)Z8;jH z;NP8wI4~|~iAe7$OFxxkgbxe&%e&?aP<+amdzJ4XeJ1?YdkYbdt2Gt=h$xXi@S0H9 zl2g=&dFNJ|iPqscU>CH%eZZ(;kvORxOca93T!Bq#mJSkL8TLqAsuv}ZD$+)L@A(C) zK{qo!8so%6cJsn*-d$H2j zfQ_G#SDYdg{MleBY*@7EsKDek0Ool^t(EBi`*6u5(?Pq4*S5N9#;p0IIa{kB)C3mG zEJ`bq80aGf=YL@I(dN0PYo96~fU={45k5jQK8JdGHIh?R6X7cc&T?*xqmT07vx>=3 z=lx-;7@g3Q_*oYISCOV|lB4TMaAozme;mV`DlVfBwMowLmp=1$!;J7U<$0*ZeJRoK zmTKvW1vabK{1%l7|H>CDFkBZsJ6WxkMxMh%K8wJ|RJQ**snB2Y*WkYGZzi+tzBgjumB_; zN~NVc8&QyqIl~>UELLRNd^6VVpDb5A16R=4P_f3tb}j9^apf zyHa#ySX7-E%Wy57)^L=`_;eBsjIXaT_`buYDDQ4MP=$Z4%t~m-n2i8|B0r$3^dlbU zo?_}gGPim)_lf7B^0v*fH+(;QuqvTtA%}c|;QR_Ywzn;PJefE-xtqND<^d?E(3|N5 z)1nvfh@1|3|tQ+J`N!i1i0G{mB$o}PnNdN%5o2LR@M#kyi~gzYPRx9wvV=! z8dY&c9E7PqK@~eA{Cy8+B(#TK`Iqddt*bcGvOkA1rQrug-?o;chCBSQQH>Hs4p@d1 z?*#Rl)m^TPu26B}g9gX;y2L&*o;7kZ&9$BCb43O;{S^ytx2|Xoyy?bUUJzwjaoWqG zTz|icen81IcFlRHDYk`;S0~r2(d=aXJV;4BD>ee_Dpmq>9D;TA_2yxnT|}&FJhtXrV$>8EN=m7pC@PN= zp22>|UK_Bx3#dr&VRzCQLJT7ft06TJHsu6u?R8c5ZNH|0U)KJ}|h`pp_}&lnb8Nwm+d z6JO52?89R1pGLdDc~QnL7kt3G(MB1mQP`L2nD7y)t+u-m(qYfPvCAaUN_T$n6BobN z)iX~q&VK(h%%wy1T~b1Qix54sAyKywQd9P8`t+i=nodN=Hu-qwA|7IlKlFo!#0Cia zE;@Q^e{d*AutepCu%fo-AK4fU8U(Gkww|<}_APS0Y6PuLJBY1IBXufYP|`}qw^9JA zKcvi{ba_rT%n!;594L!>M@-EuU_!3;7$Sy=YV@p2&WAHzXCh!K<ZIq~I+%toPuD65yOKybH`LOU-xCHkxn;sB{m zXzojPsK65l1C74=!z(QoIe+(cBZ6-K+YOK-o|uy=s3;QM(>J1jB@>$!kRl^= zYR+4;=jNNGb%jMy6>Cd=qP#MFljQIR1T9=?<7dlu-Ug|C>0_$3!@KP7A3l4T@D%X0l~scJ zM<%w=;p8eHUU=uY@Z$oo;X}x4<<)}RA6jB0`E_#*`?lv4(v@j9 z_`X>+I~N`uBXSr_wXuyW70~{vks>S2&ot}u`=Qqo5C>Uxr;J(@XlJxn?Lh|TII`_k z92y5Rz30nk9*{_t z3k)`Zp(jP^ii~d9dm3p&f@TB~ij2>>*&Y1Id=AziOtTV!^Fyllq-jdMW8>n4sIaJ^ zzjrE=X?-Hn%cu9VA2sA3R6Vkt|0bDGSoUiYbPb?`JM-F{oz_nCs@46ZZ|K=7RQ)h% zxI1MxrgU%gLS;V9@k2Ad={TcQoualvy7ru!RIMv058$FEPAbdw-<3DQT6utFymOh& zpcLDC)I^zcc-hmcKcDSREX4j7MdumL=G%sGs->;fYVBQAYqzyWwDm_()ZSDnCAEny zt-TdRjZ(Esq*m-zBzEjnV#eNy5kcsi_xtl9$MNL2pX)xa^ZcEI{d)s`2({50VS$^A ztk5;)M)vPMi@b$2vn{Q*)t<@`B@5=iX_ecnW z5i5G>TFR2JUdFl)O9QpeZ@Gz=rJe(9A_AJkS(m@hYt#T+Rh&nG3M#fD^q=zq?1T@Z-YU39XJR)SR+20Ik0c>VR;i$7N$d|I1W zugpkBJj zJ0FsK{G5Ndo}x>pTv0huJ;>nQ2K$VaElqBXMAToq?Bj3B;#zO<_Yy8V z4&@gacBBF{dpc(uvsgh8Uw;}V<-6r<%diaV;4k_r6^k4sKP-}$bi0Y{ROD_X39#OZ zV{!S^*t9CuZQacO>0{y@4R3n)lG0p=l^n)_mDS9Ae(uIj$`4`YH6A6RXr;QB?*h+M zwQp02>5)U!KMG%;(I=Av`C$zgwycbni=e8xcDbe3M? zwYsWtSV{p$fWG>)jEl^UmIRbz{lBRBwObs4AD=6+Ij?VP?a~Bc0iwj1<+)SBj`orG8&wqua0e45Pgo zQUu2-YZd}|_|kwwD&ntCTi);0Ieo}rFQS4DYQC9c<}zD9gFIgH#8)ULY1pvlT>3I_ zLp0h-UY0QLFIu^tY!$JJb^#szbWo;Xasc75SeE>HyM^g`S18i0 zP+(g3AJdi_<}rHFhjTQ^qpKgAItFot7G!VrtoWy5`nfdGDUuHM9|hf}qC{bdi=u5t z$>mQ_T5xXgf61bL@*>tEXP?#nKS6qX#V;!d;@cZ8GN<`7@$0Zp2>KH&)p{NkpE{gV z{++kybFz}z{B&-JMkF<$LMKyu#zx{h)c1ULYs;_s#C(pmHoSE$B})&HL(Kidky%-8 z3%s*s#^coKwQ4cq#+Bf~&cv9FO6POva)?Mn6OfnhbKELm z!}K=VeayAaly!jxdI&r|bD+=rLbI^>0y*?KalK_n1Tdn_H44-Wwj2S%TG0z;7>A9G z_#Oobt@iJPg?Z&_oyq`KtLfuSKQ{oc(wq7=2Uq?ki*X)P%0&BU&Efldo6Xx#;6f!N z00*A_l=Lul(QcbFqP@kMa--!?2bp=M3DciwO}QRoHSv)Iz}ISuU-tC_{o}>r%xlDk zwJ0LS9;pUg^Ol!cBOZB4L;?Lger^S2Lff+&boFM~OJMA@onO(T7O!->!yj?o>t8NP z^eNublaO_@B|*eF>Ki~FIfT01{Q&th^$v^nprG?m1g~nZ%`=mBt?GIid4EX;HU z3S0ep=8X^|orvkb_=|0_V+&|S&=@P$e_0d+4USv&(T45!-WQR*vBXTHaIy~TcuZ{U z%W3NfYnH9>qArHN%F*fjqDQ?;-~Ia++o$JU7)<<*r(TgQWVxf2Kt54QkrAfP7HCfHY@ zU7(k>#k4S3sBOU-HRFFLY!6l+m{_}k`g*!!(ZM>kFEa$s0gmWlfnJZ}oI)3@>X|OP zFK->>oa!18OdE*$rMW4rt{}YY*j$y=R?EjYe}US>XF9@j=0tnA7ScXEBm4vGduhI56kxgYw46EM;>cf-}!MO%tyI7-B7xTM#Y>&xoIQK8pn&~FV-XBgT+aM zR^-((LU)!><;L6h+k6{`d{+o{pyWTqn*U*3W4sAh^O~|)K`gS1>zg!J%tQI9~ z&C6WwOC9v{N}cZ5m97PbDsD}e@Bbtm9-Szkn+qBFL)9HR$KMmd*qvJs+ZtOZmZQ~s zVLlx@JrRfB7yho&9rb}gave8jq&cnVWjPn==dC{R9PQCdHwx4N4zfJ^sVJ_=L%)+9 zko4QPRMFxG4YWx@)F)c6EKHi=6e^4p1Og0LxBs4 zwQB&^uU|i&x+3*{p|6B1Ph3RY-;_4DGan!QqY#);w!3)lT0nCQ-EDdY(?Zzc&CeelCBb^lfa!Po$%>58%37D{eoK&N-kq6^EVw(hd0bDW z1V1mL4y)S`l3yJ-j(XLXw?F*E9=FVIl`X#W&KIoQE7{|(GHo1JmE1Bv?*K=r(p^31 z@kZN4gs7MHl|Q1buvA99+49b#jKwH@skvSuNSfC)=n2TWx^=JurTg37_iMAxQl!=T zzsAkcC3j4-7fgY1fua%S(-(N~DeUQX9YUr$)?paA``>l(szC1J?DW)}98dI6uN*3Hd7w=`JvD3U{_3XL!)SFRH zLsSyW7$MQdFXq`c;KhAd|7YLn4Cc&Nv&||z#v*{L>|}8_o$NIT$kmC4V_x7*QJE?- zovYjmeEq-j?vFB1tG17dHWKZzCwPNOdZP%jZuQ6T=E~-;UmEgDer$HGq{o+y9k+owW_?MP6RJ;@ zN?JbT10ti8;?CbPj+QaT6j@0X8E6k^ppIT&DE}y5Q}a~fih)hT*#Qt4;CN& z%nda>j$?}`HuiBZR8MEoL(Az+NCL8nL zI`WD3pz|dT2oAH7cP;>d58P&VQnPlEspKZ`S z$dpvuIBd(mA9@DSAf_OKqI&6uEoJLt>yA#N@hL+oqb-ZGs&86ah~JlUo2GkR!0aFd zjhHHO!!~$%p4JqfU*Vy045WMN#12X--{_nKDF&nd^5XREK79!&!ivf1+>?zdF3)sE zP`{YuSz1Yfgt+^~mub?jW*Y-@zicO*8K%Z@K*5$s=q%hfSg9SC;@rKEM zQ)!oH(Z~h1*)Iz3fYZt{%f}N9^xmgNGb_P&-3W%_Rxv`fU)|>Fb8~699G(5OcS*F% zMa3>CCi86%$}^|w@vbNdhsDEUwHT0q;3|24`^?q??~hf9ig0B)2js)VAB#xvIa#nd zNAVSWJ74lg*aDA~u{u&c8tIV1_UV#yKYf$X8Ca-<_p+Da&uxvbc5Mg56Fa5FLOAhg z_jiEoj=}F4h-1?ceMeEL&Yh&}m=Uh{ZEu55kN+mmQ$hvsDRG_4qV3UaA)-FlYHzwh zjxSE>8*kqqQ4eR99y>QXf|i4+exc9Fb}8cT!*1gcYlmzl+b?nf)Z8m5Z=uG0Vi13Tsymn_2or@W} z<}K|!eyzSfO9NoWydN;MbBS#!(AC(FxwOU4?z0(o8WH8&5OZ~RUdgH!yOMV3l9$d zQ7m&$Y&11CCf;%{&KiQ%zj<+1?2xQ4&vi7>%$QJ6b5()c`$sWBQeV!lUKIKn4^m*~ z`yohaOHlvZyPj-R^kQhO1R5$senGbR`Eo+=sc^^*_ZAD^THAypfCJ827U@B-;npau zRQ^v(Bs%FDjI!IKop}4ahw$Mt-1pM?W%Np)tys*$ocSb%o>xCYO2Nk%EX9$V1My3mqQN0hI1uE($7DTm0(-s zBf|ENqUOLuVlQXs_HAoXioxU`OTTbE;S}=F>fxHdP{)e5!h?(G?Vvf@FUw`@?|i7& zIJY=255PSRyLcQY;p4bXJtm12dp%2u&t|KVV)Z_qmA&D;06Nc0%E6WR1UoY6%^*M$ zn-$fdtG~>fgg!ir^B*H=(z*7eWC}J}yE!1AJ~HiJeY>$v{#_aHn)@uK&+J}*d>d#v zD`qPjnQ_AaFt$B#jq7tvs)pjDE~pk!_reiF)bC~S9;(vux+q+Ov<=oM&s}Y1EFR_4 ziv9QKHT}GLn6qWsy|}=v^r|55M|q3%%*H>q>KF2_om-eV5Gt#Rrx+@fJV zwt#~ZN_btO;;lYptc2}}&Z-I(9w~%5Ue>-zFtFS^Q(@f>6(VN z?Qhef4r7W37bG05^+ zZNqEV;S5W!mK@VX^~Sd1jDwCGasR&;S<8TWWh?uSJKV`x;$%yTR;onK#@@%dPyLX? zqeZTq3!kM&xyvcL^DFc(Uzt8GwEOz2-hOoWSQ*+LW`2dVY(ONEj7tA*%AR_fIw9i)-Qc9rl=NKTW*qb$oQD$s3TDVr#)80#Rg0PEBjy1C^95o&Tfg;SEjfv$s#K zFw`5pCF%=2KihKv!Y%3Bzoxl#@J(8>b8MlpntFN@CT5r8q&_DEE~`gPZMtakUGl5d zs1AoAxA}p-0Jp_G3+(0XWpO{+jhWr%_bEn^ebNbV&s_z;tjr9N2J2q1Vg-y9Uu&B5 zDDwWZpwJZXaVqYS~1$XEibeeYz}qM5&cVuRyOT9o9-9P zUA~5kqJ7B)S$tEA_u7mKt%#r9A!5^ots5U90kdf_aKw{tvn<8H`AJg~6Y%9%rE&88%{KBO1g0d}Mz~}^>x|ZJBUWy|I1%A!yAhUT@+<&~RTUXMPZ5|%wfiM^d-{*C$ z7Z4C$L)lJLzCf+9R1o&u&!7#!gltF9D0_hI<+JlJnbR!NT_x-sfRWe=$db-C20VZP zrPD$4wO|^+Mhd(QAUf#UR_dOYn}gEJ+y$)#^kYRv<4p|{blQ<)$ixNF3FyJYhUl}C zceCkx{!gCHJ*R#$m~2+g`P`xFz*codl{-LFpIGo?&em#vA;2BJ#Y31u3(2uQx@FHZ z7tP53=B9gC5R$mHLfc6dkiYXLN9$_-_O}NqN#4=@hF-k@VI9tQdrN-pG0a2$6=EH;U(>Cnwc?@JlBosTT;)zJY4?H;;6XGI{i04N&RJNKr9!UxTqY$2v zOcdN2@;n)L9etDPq|NH%pi{*igWi{tvwZ{A7|HCvu67NME$elVdD|US zmKmxltzj77-ZQw`)Px`OL z#`U5%Q!WQG@#6t{?!PbPY;OkR<2!N859*b9t6j%mv^=iv5!n-R=X9YN2bXCtb+8h_ zSNC{XtY%$clVbi%f_LEY{{31MUfWt*2X;-WRGn`NV!$~4gK%_MJ9b1^ccv{^=$Eyv z7MQIm<&|LQU+lh_=Z#olPx&t9lfSP!d~>5zirOb8E)}6d9z@~7PsxAO+U!Qj^m|%T}8JJr6F@YY%i=D z@plns*Ei2gupt^ShKC4n%~ZlUZeWNI;4y7HEK;Cp_+t_iEFU> z9O@M)i;_QL)9mh_i{0&O^b^l7UMUH-Bi+APp*sxGX`6HBKyMp}^ddX{iavUzCMler z#z4u1`U4QcJ7^0^j19<|$Gs3H!x?#}^9P%)g(+2p39 zK20Tk?NZIgvXSzOA_ZiM%gy?Nr<2)Cyy9gzRMfju2Al&hlI+EL4>Lhy%}UX;m55xO zhIZK4^1ib57)HF(YKK!&RQ4v3-B8v{d6$QTeVfpEz|JXm#!~fQ~qbxruG`857;>LPK1Lc zay+Wh;%@CNyzjHTOilFiYGii2;VSnXF|c1VHXg0D9mzA!RdZqE2}{V?G6Y9R%2g^h zHtmo4#XB7@&_&TZ8*?r=v%jO;M=ACds?~W>gj^VFN16^xCKdnLxDP zQ|)Xl7Ro|-b1vwCER*g z&~qd+*~LmM{+!{6e){y~Z*IJ;V^jLg9g(LXkJiRzpYe(`R8bUp2I~}=7}7bOnE@E% zpfP?jY4c$-%#7Qwst+wF%{&OzpC}+caPG?0d33@X$AL@VwdGFKbhQCk z2nm!6SJQ9>UCA8#$Ev!02+_h*pO3D;C+C=$AuJDG{-VDA*k{{kG7W5b#2a;9MtTe; zJ_Mvc%w*x*^K2g(F5Yl~Q|tfcqTI2|?IfpKKZxrs!f$z$4alrbw|6zQLut~LgJ&wFa^R_D^4+KrXcxFuZud@s>#g14cRoCPgVyZYN|ZTpcY zz@u%{`$w1>Ip>UPg<*b$I#nw5SkgtrhFRzK1<_uFxb8 z_>9E->p&G#mxmGnouYO3p*$8NIHT9Wp1I{|xMZ8aFN7cPI_OHfvdAiZZRz4(#zvwu zJK{CUZfWt8sV!j4%y5#uur2q88GO#?3~ywHM~Z=*#ir0wGA=QE=U z-HHb4z1h5cG)_+|sHgzXn(Qu!W1b$|kKaCHSz?$$8^r7=n^UIyxX5?{|{IL^0qgP9C$K`|3kSBr?w`+W5QRP{) zmY}2Scl-=ul%sV}I7EaU>99z0S9w|jeN3&{yhoFQLV#NIIQKxAk|LxMnJr`r3d(G56u9MAA}4pmx550v4!k! z2tL~mSy*R%reB(c=YALe1tc%RukMVWz;oC%nw`?uMmZFo_f(vmMZY+fIV|n`F>?8{ zCa$ohalzs%Rk%F&RmKT;_XeI(2hZ9i8XKbH%u~n{-G#1$v;Q#NEj|hl1_>{;@Wyj! zMu+G-AzmlwKbngKX1siM!aHVmw6DY*F@*Onv<-oU)?XlNAsI&B{VlZTWof)#F;{%Qn^@<}RHznHld0BkP!j?Q1#-noq@lJ1-tZoz;X>gEBVKM2~U_v2C@v zc5UV|uRMyP29hE|r@8#P0C|^>1~#;-?f3W&xJfy|JbQ}nYzji{vnyHS3$KhJOCpE^ z`!0h|nx^~ozC8Sw-;}}2(3@KwB4pbb)bc}6TBZ3S@&mrdGdaxk^`S|M1{KXrdnMud zWrDP7iYmR|!To?rIOFoXoK_19iz$qSj8F*l6nI+WIa=so3=f9sDRK}bF|>s6x@m@# zK<8f{FX+5f$}-3}J>1^iUAD8AuOg%1H+i$Rk6jHm2v3L*zxSa^dxro(JyW}O^31ed z#WJ|2Eb@dgO0~hG2ZWDK_b)=vZ4>;pTT0qs*bk9AN6XRLt9}&6&`vVix|%$MB*8ne zv?D6-F%HswIqw~ssy&Oi_Sm`ZPzNHFQ5y#cFk0gu#fn*sxSy`+c`)>zT8OYq*>&q% z<^|7Ob{hG3^x4^<@9|b70w>A7-2`(0;WQwREY zteL$M=!Hl2OJ^=uz(;_UaD0<`tV!kenPoz-4F*4UW`-^3+krS6q@NDmuDqME{Uq%c zjUZFdrOM;~FYq4>7kH!$Khu~=hCaZP+Zsqpe$_^Z&f6jCCB$f5z>0obLssy$#LmYX z&HG^O7`+*34;q&V3uzRq8N-@{K%8t06;Um{OSt-e9-UJMTvpFrnc+0PMd?OlM*ll&*mTrgHGSn$GWiG5w zJpS2acbctp1&;FOz2EWRLnkxIs?x^g%T~v|nhy21wMEJxtHb2f2Z<-DBA+Ob)^w=; zW%(GXKc>EyiS0SFtuie(G9>Q3<}=mv5gm4QBM86+0_jAOu8u}XoXcfX`wt>aa(7J^ zd=1n^L$0*BSECW~HpMG*=P0lD{^c&?ws3NG!4j5!|DB6!T?u6lJ%;;>tktmXEt>T% znlHDw%%Y(zCeh1|Q{o)ky8WX+s;nIRh69(9Mv!ckW;qZ-qy*>gxm%CKvPUTOs^`it zdHPSF)riEq=fSi4)J1x}yA!o*pheBFOmHTl1#s6PZ|<=!`n&=1JwHc~h)2>yCb z@D|yshR(t`xj3c|-?=9sx3CDG{M3F4{DtG)Rlx_Z@WwjX(GzL=a*E?iN;4*h;~3=b z5;A+;y06o1gfg6bES!28#r}dl`VWm$86nBe-0Byfm3Gf~XA5VBIqsWLf4HOcD7jlC2DXr(h6H z&6!AboV9y;5#Vaej<5Hbcg@6O=AT$NG=2Y1pT}FxeZ_uTE~JWvvd-4#AH|KwM2F=9 z>JY6-0AGl3hu!TS?2gNFd-A|HP3~F#(nHp_A6EPx4Zg{scv% zz54rn>+#MGvB1SlV5rHNnF}2`?P@@*N(Tk`?UF6+($6d}oBXI}9!|<6?7i!6xWWVn zXN&8uNX5p)gWgNT5L02&ugRpC?&>;%Az4t3D2PVq1=Vw;&8lK_t4!-KdzmhB_va({ zOrm1&G3oY#Ty)M@l_1)Auuf~|G_)j5MMmJWbAxWPeHt({2ae zx)K}jDLdLp-Ky!d!}gP!V+84bgbfZEA13Xx@Bnv};juh4@~*$5q0D8IFPgUA4e~&Q z6f0y{=IZrDiwY##MN*Z{oP3dS)^%Ho;$2ahyPnb{`g{e!G>hJEZfdMEy(_wh_=e1S zO95x({j{lQhElC5BQXWT7we6swsp9I+_QysauYwTOwpPBSNCXHs^gCv?4S%vV$48} z*35<~yE`iqb%Ny^s)zIzl`xR1V9DsK2Lo6};zL1DxW;INQsKawnos({*lprVwDI`+ zr@Al&;d$x9MpB*L#?D`}ZA70w@!`qo?#2ZVu}ojr+;zelUZ?oL?G3p~u_BrlypXyYP;1;IwaguC%;xy@SJoF{p z_(4DQsl-1_nViLcMnt~6>c@K^@1(jHbsIiq{6}FIVW;Oxumd)Pj{wZIYzEsif*9d# z;1%2GMh~H5k(XA3OivZRH;<}XUK-I@C>*Idg?W5aFF({Nj9CVz+}UKomBp;*tdE8c z#g5J-x+xePq@&QV2JoGLI*o}mT_4>x7JPq5^Q@kU{u?l#x*?GnD@j!E;Qv-^Q~JEB zaM5*+y4&zs7u$9)YWhmHX_DNiaZ*Gl^aXC}I9Dsd5ahq2Y}RU6X>6UQ`I#-4@bRqE z5Fb=UhC>Ecs?-GZUp+*Fjrsyf`BZp#`<`o8wOZUv98eB94Y4s05`kvBE5NS^8x z!RnbH>*O<&As)SHo&xfM){djB2Im~TZeGnFWh!eLDT;U`%r?hJ(=GB41~#7+0)p0$a$vB3kL)%2o+WcSNU~QIg_)K z?33$U`VM9YdAxk^Nm4Fe`tre^%&mw&Su?9`1)T!N7u4~5ClBrP#HjkUny6~X0{YcQcc+|NQfi-Jufs7JUiBMLvKm27 z&8kvr6$P{`%{?EuK%{CPrv`SNN5IKVB#n{pIRYvSpfVzv9R;iTDOZ%G53B5BR@ZH# z{d-c-3U(D~E60f(9HI$?eGF;6~wxP?49;tkGUDPFfdzh0)$umUKATvuiRa z64u@}1vSiE+H_nL6s(9$#i zOu=Srq2woD?E<*fmO7(VabI0v=aYSY)r)i1H=#0^3{I9OT!OJbZF+a<;2QiX_I7?( zBjkUPlIkq^at`j5HC!Hqe+9$hbTDsejna&^l(RK=HxLXQ!Z29nS8{73cX*u zN@*wch1o1V-e6nlBi+SavjKORE$}tQ4CJtUSc<{~Yc<|De}qX~p;Gvm4Da}JSqcy* zZeWqU<{56s+z+>Mhod>I{{EvF3u!BO@}B=cvC2_Ro1%&(l0IIvOR+Jk-YTJ@TCinG zI`<4>8nD*O*mQ<}RdrLacA%BQIYk>8>2pgySd42$drzi*zTr@Nq!`-K!pqrf&i2k> zBsL+(Mgp}%{OIGe#gW-sWwjL)^5yTs>pq*(H@Ot-f^}4lT>076c1Ah+RIZbj3pSR( zH05~up{#%h>>F!KFcn7azG+Fn5k}+a{q1dF9D5@-;?!L!egQU4*4knTSKQ+do9(Hr zSyQPUpPO*Lb?2n(@=8C3Vp!P!6g0q1v{`n3dbFm`g6y(!0g66;4w~Xc{-GbMQFGr% zz>ERezJI0vf~c)T1MgF_-Po`%IQ>jTI<7|cYApxqR-(7+D%l;Pmkn23wO7e7--v#M zdROtbHzlC`P|;L4 zrOn^1kXY#cP$x;K@eJ*^3VJN|@(spP$XV>J*Z03&*N<0(2ab7J+N*0PD}S8{@8UwD zzA9`8aD~$cIc2XX>Bj+RbG4=*BMa$e<%fS#2nwY@x;a(GIDJCg>-vY2?=P)#g)5Lky*GLnZjhs6D)|-V7S=B3y}cIO z-spj=ESe9cCaQu#mK8XiUS5`&QSx@bMX~pH&zh!R`M`#?*jrq* z#@QT$CtI4*9eHS0pVQdS=9&|gf@kXuCuY-OEYjVMo)GEZJFhD3bX@*XP_HxYLU3Rm zyno+_ET`&|-H>S?u1rQoz|0{ttj^@K_HxJ!)xJiXeW+Gn zm?crEyOLZxqF7o3^x;JdH2Tq1erBbTKe@6YGeKka#>+pb4_QAGFE{@YiHQ88 z7ux~CSS055uG~fwwuiKQ)Q7E8QLUNMpD6S4>mtzxq#hu!c}9&j7n2P`c8Y(5{)-g5--(|K=6%WY$ZR?~ck7iPA;!y&N~8+_YgD!Sn+=A7U^ej|~6l zqrc0kJDv8yDaF@=CiAaKOSsR;gU;{Zx@FzN^?-q~@>4xov%_={aP;(0vt6%VB8Ya64xNJX5YsPgXC1B(Zee#chd_9dRZq`rFx5LjIXK36yIS5DfW(0_E)p#k z73Z>d(?Sqwa{1S07VfQi|LtLw?Cr)GDS6Dm$@t~@RauE%632MmX{aT5^&bV*AuCpO zcO!5aSbBTMhgZpwv3~D_Z{w(13&6Q8F^(WOrDkVJ-}csd^5+wcx4wSR0UG&Il?60ugn#e9ge>yZCGSl`?XzF_IpY;!t;>O*;3vxE801+7>HfG(ZTDoNO>i? zr5KY?)G-aM4~}YU8n0KEWBNP%&5@PqSN#Vy$_|-@E=TUN&*$71R3RZ#9dblQtX(ho zzKyG8exZMbMDBH|wtTmGp^)C+jq3j>jCe&H-#!I7Mqt(!;|l0Qw6G8+PCG?a`T%{s zlq;bip<)SvNAk~xjVp5eyPYx`nP#bOQ*af0CaGZ$7=cnm+ac>rL{Cc+#?|bO9uIny zV4|)O9LfHazLIy)n=?Hz-B$K}w&8H|wn*F{a$G2Q7Hq{n(QW*5%CQ>ik-c5Og@_EH z39NMW_w2zIaJXjix;jk>A3=QoGnyPUG}J<}X-Jd<2uSQ&OZvy0PMm7_i*LIEiUG{`&^{fh-(Xf0ViAeLd4-k3g}Wp=J^#U z@7|h?>_s%+QMMwp%2NtB1zdEmDXpLpszy&cm4-@Hdf2v`i-7{kWijN!4AZC(UE@M_ z7bFma*8z*$wB8|d8WV$yTnLtK(CFjPm%6R*(~ToWvSZ|(y}g^2m)=r-wswXuQhtc5 zbZN|IP3Bf#6t&Q74t)t zqA2Afl!72fC+=ujIHu}&Rzg{;!e-GF#}`7|1t6#X z0P`~61-!a4F$PboRpz*44Y)Qmflqf4ZN3(V>}}Nd)F8gu1tV z&D_!+sGiZX2${cf9_y@eCf%pe#f?^$ZKeaC5?Omd_8I(>?;2&d5ne*r!+0g8K#5+Q zil_$xN*ux+5kAd`^Wz$#P_fEtz^9M##yE@%aC7r_4^uWOUk>II&L9s>3UOY-q(q#0 zR@!I=mc34#Io09$14=fT+5l4c>?>?p-^DW%KL2{+l5L^4nI|_m78KkG)V=AN_K2A zgfR^sS?k>6sTB;oTgh-S{b?iT?-ZG3eeXm5<0jy|q&_g%J8aq&ZEWgq8lz(VvRsg= zrmKHf5fyti5GFsR%!{2>eiK!lBR|ZLn?S+-S*Z>lz(%vPWoJDA4NxMa#B(ND4hHK6 zEvVGY&kolN4_(6PZoc2Ar0IBKE8$s7;>_T}(C_gJIU6FiK>ChZ>AHashQy9!QO+Ay zHr|^GHzlW0;41_J{VrWL=L03h0)Js>32r{(jjP?_d4f=Ybvn0 z%_W)*@+geSc-(T8)0b%*t8%w?+Vm*y^#| z8A#Qp%rvpQ#Ly#WNO$)fqKq8Gs3ypv)l!}mYgkuoq$wIeNFQ2eKAJiX6XxtZ^GXSn->#-ty7>b=u@>gTC>mo#fz=#F4=>yW+I4<+e)Zc$es!Tzw&W^AYF z0C7bCZc&OQO==vCC@0b7U=CbOe=(jGGoOA_#!w2m8tL+iCRUi9 z=G3GHt^{nHg^f#lOU^rOkgk}K+gD;ud8FOipeG8*u@mYRtAYp^VJQ%u$1C?7Vu%T0 zV4}A7uGLUs*hN)x{RuaFjoY4ypZRzM+%u`RTi?q6N3j73+UU`bMLVS~i*D{$FXv$A zZix->%C1pnZ6Sw}Bb`bo!)|YR>8vfhcvhQOkOd_YAWC4e2jqo?soJ~P>>W?e)0ieU z)fHQi5^q!`k5=QMFk`Mkp;D=R$n$BkR*=-eUy}@_a)D;TO91nxPA zK!a! z4%=>PW2QM%96s}$%F4Q<=NX-StPwoOtAFG}2pMo8s`5~yDGj(Klyy2lx5aMlu!`1L zr$O;CD546C*kk-0ZoU+4fgwl9gevK|d&%cuGlffMgBck+>K#_kzEW#ldk@+HpK{xJ zSy*MG>#y?VZ&Zx4O03m7 zS{zP$N3@l=vNB{Nx4hr(TyB}r?qJvy{o!4@Z}|HoT4s3#-aia;Of;py4Lh_mJYS5c zgZ=eU)~W5`=GU5-FBY$Rmf9r_xlBVijon%y z+|asx$oOKYxX()6$H4}XnKIn0OE5-%c)oGGbgX*0W9FO92f`qPxsCN^Wza%yF6bWF zh-;qasiOG=&uHF8);|*++Aer<9URD@b~KuCewQMeOlG?GHeY{h0&+EKNhnz6rUvdA zE}+b(tK3&Vi+dEx-AP=ESUUzOT=6|eXoGuG5&#c~sg+uU1m53UExPk;93hns7Wcp$9 zw(Ox};~R&wJO2xP5rXcJ0ANQ_IUIdWbQXR)vGG@v4~?wtb#Js>tHz-KOK4R|*tqG_ zBKZ(j zW*fzl)d!mHz^r8F80V+B=Dy;FQygp*=O1S7?ADUEMy+PAzgKH3h7Uihm(`RPysxZZ z!{>DLcIndhXCZTdSQu?)u_Eac)6z z99t3oV8sui>R5jBCk8L+2e)B zTJzr-eWK3m%T<+aZll`~x{T!edSr3>R={*yAj2x$^z%|9m@gn*!J_8K1Sf&s~!y-q) z&#})q{Oe-Z#>=JMCEQon7Zy68DKvLTj3xveAuFDrfgP*F&T0L1r^=h#T{QJq^FA+# zGisR3YsE_GHDzV4it9_-`|o`%*!07!Lmh>J+3MEc+5Z4>BWyO)*KROKW9~Vw%fbHu z5L(XeCeZHIFC`c`okkt`8oJce zaG>PyM)JAio(4d#L-=v~8TgCDwjO@5d1D~SdB@x2hFG!A5QQqo*pNN()8<=WA6|H; z!P;D!tIKX#Ff?O<$2of$vHvl?hWOwggweb7)h}JZ#HG(ZlWzl|7i<_2qh8E#j zTO9}lFv%ka75N4;EvwBsl;a3FN!>y`v0CiAC4CZBS|7h;xwSZ66)857O-fGft94fV zZ+|21f7xgDLHG~x6Ty+*Nv7%(YBuT?{X9i`bq&leMt{{SA`!vCMr9oHkzKC6{{RHG z_$RGhE%Y$T&@`E7Y&9#@M*je30J$R^anin1_*wf*T6jBG9!0IJak{%nAK8TPGh}}D zJ>>0Uj20OT52($33E*Gbg5l$h)o-Q*1@hpxAh{i}k8n@7rFoObLX@93y1e&Zx}RNz z!eV7bI*m8oTF=dYUHTrMp?|?Nz60wzKl(4j-w%0vb~WU}=I%(?3=mB61H0(G>%%{3 zzxW|fg*E$Wuf87K+gaYWSZTNDBaUJI!x5FoPIKD5Qpfh$yR^NQNi^+7$7l>m)7(gn z>SX7FJ--@;Kilt5x4l_D!wT-pTuk>KTNekak~tjn(B#)n7dVB~snvJxpXs;YKdIJE z$tJryKkMXvRQy`~j`bgd-X*xwb?p!PCi>i!Xy=D%VVv%H^0*X7Xib8NQ^={ZWBn(6*W$@(sv zccX5%nnNwQJZ>UFe;U%Zg*@3NX*{=$a2eEPFh9C^6_MlL7Ojgis|J%hcB2k)-2VWD zaL{?34)Uc2H+tnJh_ zz-U!LY-MmgN59jxbJxEbKFcCTlNlJDzbW~f{gK@F>0H*GrQUdwW&YB*n$F(`aEt;N`jz00p9r_COFu5%U?$1(VlB#O;R-|X*uZT6- z(l|7|R_aLc^DVpN{c->p{*>uFU*f-+TSeBSY=h+sbN7Fd$G@$5W{2V%Yy25CVm5<= z7~EUaImb`NwRFD{FENFRZO7$pfN|@`73@*r{1(xV;8*s2G`=6i@hH4`sKYaE(zQUU zpbS9g2b|>f{OeOs_=|mN(X^U=q&AQPHN2mmK9L0GWb8iQ?8%lZx6?)Xmg#f3NZ~B{{Up3-o#hud_}@pbRq5N;k~51nosil z53Lu2vM%i?n_<>!*jtahwp3=cwgXV>$tKU?^H;Bn(6zF}}= z)g_V}h$Wm{eC`VtUBKaq2Pc#BSFZTW;m^a}dO!F{d?BUj8itmN&nC&4F73eX>C*rX z+2q%cYJL>cd}pQJPjJxbeq(vMY!SZEXu$B&IR`J(DmnvS50}cF8OdRtN>O?^-&K8; zlh)harJ?F)nOnmsskqd>soSEp+iyjEtgnB(ddzVtt}bk3JS87 z^LKULqi;-PgOl2g16Nyrhgys=Pj7f<%$JPOuKAgd-7+3XBb~p?HJ`3{e@DBrn&Rs6 zJB!I2<|xk9wUZbi9{C{l$0M#QAH|niAAq#0d2}0%PD{z-G0ha%WetE>fI1#=o}=qu zZHLO~;-sf$rrd7abbPD+AIkp#BjfP7(!o}B6jZ(0C1%rH-F%;>wz-v~-s)F&%l(<7 z!E0w_m3wqy9_$_oCz4N7&q3C*JY9a;rOa^MTH5Jhwz;}aNyk6{IAh7=`qdv0Y7pE? zMU2-8l^$f5Qw3Hz=L4YnXZhAdnlNJn*Az_U9Q^u zzUv=5FJ&vr)_PlAeDB@guii(f+3B7xjyJTqmeS%U#G3~Aaq77|9FCnnm2*lhW3Jo5 zGRHHZ8*U?Jjb0iC*WR~vpR`MCjMs@1ZR)M} zeL&57lHX&?t(Ub0N1xrcm*3X={{X=AJKq_2vqzKc+H4Aw+)XOwkr!-u%Op<$= zvGBv)z>c&1)Nx(SBAVt7E1QU&guXMOJo_>0{5s&`Yt1M2zcunck=Hao z0N?1b$>IGQP|$u+=0_%>_Q-m3kjM)D2OW5?o%KySO@qgmg5K`lJwDyciVS8&W87eM z8?#Qi_=l|Ok=onOcF~)s0V7kebKIO(KZ~FIk=wVg9+<97MKu)VX6@Ulq_EX0)KsOXuG)3erMsQhg{)0$7N7Rb zK+kJ)Ahr8EXUj2Xo=0$cbI{jef1(Xy{wr8xzM9%#s|&?;%J%A~a`h!jhV>a7^NRAD z-9{(fiF&tvWn*{|ZBo$$v_v|H^huWl7%Ep6ww zmc4#*t&prv3jwqff=4;8cGdp?Z!h>)EfuSOWGsvcYi%|~SP@ufcI6y_-=WS2OjqWl ze;r|p0MS{(*FXs#cYTI#0ouuprEoE{b|<|u4~~l^thU!y5X!e-D^E5WZoH6v0N@NB zoon&D)5AZn;vOdyoSS;9@9D0~?>>S#{W>(&Dq1DCW$UN>1N4&P{t64LS!}nudtF1$ zjbw>Tv7nI~<-t*mV3p%J3(f{>QtRX2h5jW;ANKO-Hx}~cBzucc41{D68w`+r4;_d# z`I`@m=Ci!Fdl!P@8&J}#-W)eQ4%q#_g_p)FjVDc?H$gQE#tRZ5+`d>-l08Q~ z4@&ZDcvC@ISZ3odEgrv?nw3k69Aw;=;(mkb`bUEAOIv8ZByS0eq~a7|CdVLQ31CYx z9Q@rm=QZ=E?Hl_p&8ow1b*K3A`$JEW37%66t;&9o~69y^i0`8rx91os|@cC32yO845W0RCOb{;=OFEgY!%U zMOu|s+Wu&!-_5K2?f$2wM~P|GYBg6f^ZdJ--WUCdwGC@sg59RGmDn>$G_DuP1E6** z^0CJXLC@DBrud`#G1~Z6>T7!>nZzF>%90j@xjg~k{(`goG4X>?*K~!kj!87jk2vqX z^BLKJ=nAfKJ@Jw2T}OjHEnMptZKdkhi6m%SE+GBfyz?O~jO5^f{?Dm3^;nE%bxN*|IjN`>IN* zJ92T6liIwW#uoaOyEJ!JvB-+1KQ97BW1fQp2iHAD4SkkH#8oQW+AqIH@;(P08uZ)d z_m`2CYo$p7eX~rswz`Fo7LI4%wY_uO@vf@J;!8D(+uqyV!m2*}WjADZ+y_s_yp`-O zEwxD{(!}=dA3K&{gD18>9+f1o;)!Nr%?z=te8YAFcj!M_`-tKx$tNiey z7031JaH}emjp+ORe65nAdJAG61( zM-Te4X1FfC#4kbl^rXB?Td3q{B)sy##?m*R&yGF0qUXfLt^B68w`*9y+vc6IfH=re X-`J9C)u%;8=#JWSR9(_<@Uj2d> None: + """Prints a string with the specified color.""" + print(color + f"{text}" + Colors.CEND, end=end) + + class Colors(str, Enum): CEND = "\33[0m" CBOLD = "\33[1m" diff --git a/python/samples/concepts/setup/ALL_SETTINGS.md b/python/samples/concepts/setup/ALL_SETTINGS.md index 1f2536ad4738..4deba2610935 100644 --- a/python/samples/concepts/setup/ALL_SETTINGS.md +++ b/python/samples/concepts/setup/ALL_SETTINGS.md @@ -1,4 +1,6 @@ -## AI Service Settings used across SK: +# Semantic Kernel Settings + +## AI Service Settings used across SK | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | @@ -36,7 +38,7 @@ | Onnx | [OnnxGenAIChatCompletion](../../../semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py) | template,
ai_model_path | N/A,
ONNX_GEN_AI_CHAT_MODEL_FOLDER | Yes,
Yes | [OnnxGenAISettings](../../../semantic_kernel/connectors/ai/onnx/onnx_gen_ai_settings.py) | | | [OnnxGenAITextCompletion](../../../semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py) | ai_model_path | ONNX_GEN_AI_TEXT_MODEL_FOLDER | Yes | | -## Memory Service Settings used across SK: +## Memory Service Settings used across SK | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | @@ -49,7 +51,7 @@ | Redis | [RedisMemoryService](../../../semantic_kernel/connectors/memory/redis/redis_memory_store.py) | connection_string | REDIS_CONNECTION_STRING | Yes | [RedisSettings](../../../semantic_kernel/connectors/memory/redis/redis_settings.py) | | Weaviate | [WeaviateMemoryService](../../../semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py) | url,
api_key,
use_embed | WEAVIATE_URL,
WEAVIATE_API_KEY,
WEAVIATE_USE_EMBED | No,
No,
No | [WeaviateSettings](../../../semantic_kernel/connectors/memory/weaviate/weaviate_settings.py) | -## Other settings used: +## Other settings used | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | diff --git a/python/samples/concepts/setup/chat_completion_services.py b/python/samples/concepts/setup/chat_completion_services.py index ee4d6d2dfa67..bd53042dc666 100644 --- a/python/samples/concepts/setup/chat_completion_services.py +++ b/python/samples/concepts/setup/chat_completion_services.py @@ -3,6 +3,8 @@ from enum import Enum from typing import TYPE_CHECKING +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError + if TYPE_CHECKING: from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -25,6 +27,7 @@ class Services(str, Enum): OLLAMA = "ollama" ONNX = "onnx" VERTEX_AI = "vertex_ai" + DEEPSEEK = "deepseek" service_id = "default" @@ -39,7 +42,8 @@ def get_chat_completion_service_and_request_settings( Args: service_name (Services): The service name. instruction_role (str | None): The role to use for 'instruction' messages, for example, - 'system' or 'developer'. Defaults to 'system'. Currently only supported for OpenAI reasoning models. + 'system' or 'developer'. Defaults to 'system'. Currently only OpenAI reasoning models + support 'developer' role. """ # Use lambdas or functions to delay instantiation chat_services = { @@ -59,6 +63,7 @@ def get_chat_completion_service_and_request_settings( Services.OLLAMA: lambda: get_ollama_chat_completion_service_and_request_settings(), Services.ONNX: lambda: get_onnx_chat_completion_service_and_request_settings(), Services.VERTEX_AI: lambda: get_vertex_ai_chat_completion_service_and_request_settings(), + Services.DEEPSEEK: lambda: get_deepseek_chat_completion_service_and_request_settings(), } # Call the appropriate lambda or function based on the service name @@ -87,10 +92,7 @@ def get_openai_chat_completion_service_and_request_settings( Please refer to the Semantic Kernel Python documentation for more information: https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel-python """ - from semantic_kernel.connectors.ai.open_ai import ( - OpenAIChatCompletion, - OpenAIChatPromptExecutionSettings, - ) + from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings chat_service = OpenAIChatCompletion(service_id=service_id, instruction_role=instruction_role) request_settings = OpenAIChatPromptExecutionSettings( @@ -120,10 +122,7 @@ def get_azure_openai_chat_completion_service_and_request_settings( Please refer to the Semantic Kernel Python documentation for more information: https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel """ - from semantic_kernel.connectors.ai.open_ai import ( - AzureChatCompletion, - AzureChatPromptExecutionSettings, - ) + from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, AzureChatPromptExecutionSettings chat_service = AzureChatCompletion(service_id=service_id, instruction_role=instruction_role) request_settings = AzureChatPromptExecutionSettings(service_id=service_id) @@ -152,9 +151,19 @@ def get_azure_ai_inference_chat_completion_service_and_request_settings( AzureAIInferenceChatPromptExecutionSettings, ) + # The AI model ID is used as an identifier for developers when they are using serverless endpoints + # on AI Foundry. It is not actually used to identify the model in the service as the endpoint points + # to only one model. + # When developers are using one endpoint that can route to multiple models, the `ai_model_id` will be + # used to identify the model. To use the latest routing feature on AI Foundry, please refer to the + # following documentation: + # https://learn.microsoft.com/en-us/azure/ai-services/multi-service-resource?%3Fcontext=%2Fazure%2Fai-services%2Fmodel-inference%2Fcontext%2Fcontext&pivots=azportal + # https://learn.microsoft.com/en-us/azure/ai-foundry/model-inference/how-to/configure-project-connection?pivots=ai-foundry-portal + # https://learn.microsoft.com/en-us/azure/ai-foundry/model-inference/how-to/inference?tabs=python + chat_service = AzureAIInferenceChatCompletion( service_id=service_id, - ai_model_id="id", # The model ID is simply an identifier as the model id cannot be obtained programmatically. + ai_model_id="id", instruction_role=instruction_role, ) request_settings = AzureAIInferenceChatPromptExecutionSettings(service_id=service_id) @@ -355,3 +364,50 @@ def get_vertex_ai_chat_completion_service_and_request_settings() -> tuple[ request_settings = VertexAIChatPromptExecutionSettings(service_id=service_id) return chat_service, request_settings + + +def get_deepseek_chat_completion_service_and_request_settings() -> tuple[ + "ChatCompletionClientBase", "PromptExecutionSettings" +]: + """Return DeepSeek chat completion service and request settings. + + The service credentials can be read by 3 ways: + 1. Via the constructor + 2. Via the environment variables + 3. Via an environment file + + The DeepSeek endpoint can be accessed via the OpenAI connector as the DeepSeek API is compatible with OpenAI API. + Set the `OPENAI_API_KEY` environment variable to the DeepSeek API key. + Set the `OPENAI_CHAT_MODEL_ID` environment variable to the DeepSeek model ID (deepseek-chat or deepseek-reasoner). + + The request settings control the behavior of the service. The default settings are sufficient to get started. + However, you can adjust the settings to suit your needs. + Note: Some of the settings are NOT meant to be set by the user. + Please refer to the Semantic Kernel Python documentation for more information: + https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel-python + """ + from openai import AsyncOpenAI + + from semantic_kernel.connectors.ai.open_ai import ( + OpenAIChatCompletion, + OpenAIChatPromptExecutionSettings, + OpenAISettings, + ) + + openai_settings = OpenAISettings.create() + if not openai_settings.api_key: + raise ServiceInitializationError("The DeepSeek API key is required.") + if not openai_settings.chat_model_id: + raise ServiceInitializationError("The DeepSeek model ID is required.") + + chat_service = OpenAIChatCompletion( + ai_model_id=openai_settings.chat_model_id, + service_id=service_id, + async_client=AsyncOpenAI( + api_key=openai_settings.api_key.get_secret_value(), + base_url="https://api.deepseek.com", + ), + ) + request_settings = OpenAIChatPromptExecutionSettings(service_id=service_id) + + return chat_service, request_settings diff --git a/python/samples/concepts/structured_outputs/json_structured_outputs.py b/python/samples/concepts/structured_outputs/json_structured_outputs.py index f6ea600cd56f..b1eacba11fd6 100644 --- a/python/samples/concepts/structured_outputs/json_structured_outputs.py +++ b/python/samples/concepts/structured_outputs/json_structured_outputs.py @@ -109,7 +109,7 @@ class Reasoning(KernelBaseModel): async def main(): - stream = True + stream = False if stream: answer = kernel.invoke_stream( chat_function, @@ -127,7 +127,8 @@ async def main(): chat_function, chat_history=history, ) - print(f"Mosscap:> {result}") + reasoned_result = Reasoning.model_validate_json(result.value[0].content) + print(f"Mosscap:> {reasoned_result}") history.add_assistant_message(str(result)) diff --git a/python/samples/demos/call_automation/.env.example b/python/samples/demos/call_automation/.env.example new file mode 100644 index 000000000000..055528e2c2f3 --- /dev/null +++ b/python/samples/demos/call_automation/.env.example @@ -0,0 +1,8 @@ +ACS_CONNECTION_STRING= +CALLBACK_URI_HOST= + +AZURE_OPENAI_SERVICE_ENDPOINT= +AZURE_OPENAI_DEPLOYMENT_MODEL_NAME= +AZURE_OPENAI_API_VERSION= + +AZURE_OPENAI_SERVICE_KEY= \ No newline at end of file diff --git a/python/samples/demos/call_automation/call_automation.py b/python/samples/demos/call_automation/call_automation.py new file mode 100755 index 000000000000..2ea8058167d9 --- /dev/null +++ b/python/samples/demos/call_automation/call_automation.py @@ -0,0 +1,290 @@ +# Copyright (c) Microsoft. All rights reserved. + +#################################################################### +# Sample Quart webapp with that connects to Azure OpenAI # +# Make sure to install `uv`, see: # +# https://docs.astral.sh/uv/getting-started/installation/ # +# and rename .env.example to .env and fill in the values. # +# Follow the guidance in README.md for more info. # +# To run the app, use: # +# `uv run --env-file .env call_automation.py` # +#################################################################### +# +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "Quart", +# "azure-eventgrid", +# "azure-communication-callautomation==1.4.0b1", +# "semantic-kernel[realtime]", +# ] +# /// + +import asyncio +import base64 +import os +import uuid +from datetime import datetime +from logging import INFO +from random import randint +from urllib.parse import urlencode, urlparse, urlunparse + +from azure.communication.callautomation import ( + AudioFormat, + MediaStreamingAudioChannelType, + MediaStreamingContentType, + MediaStreamingOptions, + MediaStreamingTransportType, +) +from azure.communication.callautomation.aio import CallAutomationClient +from azure.eventgrid import EventGridEvent, SystemEventNames +from numpy import ndarray +from quart import Quart, Response, json, request, websocket + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import ( + AzureRealtimeExecutionSettings, + AzureRealtimeWebsocket, + ListenEvents, +) +from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase +from semantic_kernel.contents import AudioContent, RealtimeAudioEvent +from semantic_kernel.functions import kernel_function + +# Callback events URI to handle callback events. +CALLBACK_URI_HOST = os.environ["CALLBACK_URI_HOST"] +CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" + +acs_client = CallAutomationClient.from_connection_string(os.environ["ACS_CONNECTION_STRING"]) +app = Quart(__name__) + +# region: Semantic Kernel + +kernel = Kernel() + + +class HelperPlugin: + """Helper plugin for the Semantic Kernel.""" + + @kernel_function + def get_weather(self, location: str) -> str: + """Get the weather for a location.""" + app.logger.info(f"@ Getting weather for {location}") + weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") + weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec + return f"The weather in {location} is {weather}." + + @kernel_function + def get_date_time(self) -> str: + """Get the current date and time.""" + app.logger.info("@ Getting current datetime") + return f"The current date and time is {datetime.now().isoformat()}." + + @kernel_function + async def goodbye(self): + """When the user is done, say goodbye and then call this function.""" + app.logger.info("@ Goodbye has been called!") + global call_connection_id + await acs_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) + + +kernel.add_plugin(plugin=HelperPlugin(), plugin_name="helpers", description="Helper functions for the realtime client.") + +# region: Handlers for audio and data streams + + +async def from_realtime_to_acs(audio: ndarray): + """Function that forwards the audio from the model to the websocket of the ACS client.""" + await websocket.send( + json.dumps({"kind": "AudioData", "audioData": {"data": base64.b64encode(audio.tobytes()).decode("utf-8")}}) + ) + + +async def from_acs_to_realtime(client: RealtimeClientBase): + """Function that forwards the audio from the ACS client to the model.""" + while True: + try: + # Receive data from the ACS client + stream_data = await websocket.receive() + data = json.loads(stream_data) + if data["kind"] == "AudioData": + # send it to the Realtime service + await client.send( + event=RealtimeAudioEvent( + audio=AudioContent(data=data["audioData"]["data"], data_format="base64", inner_content=data), + ) + ) + except Exception: + app.logger.info("Websocket connection closed.") + break + + +async def handle_realtime_messages(client: RealtimeClientBase): + """Function that handles the messages from the Realtime service. + + This function only handles the non-audio messages. + Audio is done through the callback so that it is faster and smoother. + """ + async for event in client.receive(audio_output_callback=from_realtime_to_acs): + match event.service_type: + case ListenEvents.SESSION_CREATED: + print("Session Created Message") + print(f" Session Id: {event.service_event.session.id}") + case ListenEvents.ERROR: + print(f" Error: {event.service_event.error}") + case ListenEvents.INPUT_AUDIO_BUFFER_CLEARED: + print("Input Audio Buffer Cleared Message") + case ListenEvents.INPUT_AUDIO_BUFFER_SPEECH_STARTED: + print(f"Voice activity detection started at {event.service_event.audio_start_ms} [ms]") + await websocket.send(json.dumps({"Kind": "StopAudio", "AudioData": None, "StopAudio": {}})) + + case ListenEvents.CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_COMPLETED: + print(f" User:-- {event.service_event.transcript}") + case ListenEvents.CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_FAILED: + print(f" Error: {event.service_event.error}") + case ListenEvents.RESPONSE_DONE: + print("Response Done Message") + print(f" Response Id: {event.service_event.response.id}") + if event.service_event.response.status_details: + print(f" Status Details: {event.service_event.response.status_details.model_dump_json()}") + case ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DONE: + print(f" AI:-- {event.service_event.transcript}") + + +# region: Routes + + +# WebSocket. +@app.websocket("/ws") +async def ws(): + app.logger.info("Client connected to WebSocket") + + # create the client, using the audio callback + client = AzureRealtimeWebsocket() + settings = AzureRealtimeExecutionSettings( + instructions="""You are a chat bot. Your name is Mosscap and + you have one goal: figure out what people need. + Your full name, should you need to know it, is + Splendid Speckled Mosscap. You communicate + effectively, but you tend to answer with long + flowery prose.""", + turn_detection={"type": "server_vad"}, + voice="shimmer", + input_audio_format="pcm16", + output_audio_format="pcm16", + input_audio_transcription={"model": "whisper-1"}, + function_choice_behavior=FunctionChoiceBehavior.Auto(), + ) + + # create the realtime client session + async with client(settings=settings, create_response=True, kernel=kernel): + # start handling the messages from the realtime client + # and allow the callback to be used to forward the audio to the acs client + receive_task = asyncio.create_task(handle_realtime_messages(client)) + # receive messages from the ACS client and send them to the realtime client + await from_acs_to_realtime(client) + receive_task.cancel() + + +@app.route("/api/incomingCall", methods=["POST"]) +async def incoming_call_handler() -> Response: + app.logger.info("incoming event data") + for event_dict in await request.json: + event = EventGridEvent.from_dict(event_dict) + app.logger.info("incoming event data --> %s", event.data) + + if event.event_type == SystemEventNames.EventGridSubscriptionValidationEventName: + app.logger.info("Validating subscription") + validation_code = event.data["validationCode"] + validation_response = {"validationResponse": validation_code} + return Response(response=json.dumps(validation_response), status=200) + + if event.event_type == "Microsoft.Communication.IncomingCall": + app.logger.info("Incoming call received: data=%s", event.data) + caller_id = ( + event.data["from"]["phoneNumber"]["value"] + if event.data["from"]["kind"] == "phoneNumber" + else event.data["from"]["rawId"] + ) + app.logger.info("incoming call handler caller id: %s", caller_id) + incoming_call_context = event.data["incomingCallContext"] + guid = uuid.uuid4() + query_parameters = urlencode({"callerId": caller_id}) + callback_uri = f"{CALLBACK_EVENTS_URI}/{guid}?{query_parameters}" + + parsed_url = urlparse(CALLBACK_EVENTS_URI) + websocket_url = urlunparse(("wss", parsed_url.netloc, "/ws", "", "", "")) + + app.logger.info("callback url: %s", callback_uri) + app.logger.info("websocket url: %s", websocket_url) + + media_streaming_options = MediaStreamingOptions( + transport_url=websocket_url, + transport_type=MediaStreamingTransportType.WEBSOCKET, + content_type=MediaStreamingContentType.AUDIO, + audio_channel_type=MediaStreamingAudioChannelType.MIXED, + start_media_streaming=True, + enable_bidirectional=True, + audio_format=AudioFormat.PCM24_K_MONO, + ) + answer_call_result = await acs_client.answer_call( + incoming_call_context=incoming_call_context, + operation_context="incomingCall", + callback_url=callback_uri, + media_streaming=media_streaming_options, + ) + app.logger.info("Answered call for connection id: %s", answer_call_result.call_connection_id) + return Response(status=200) + return Response(status=200) + + +@app.route("/api/callbacks/", methods=["POST"]) +async def callbacks(contextId): + for event in await request.json: + # Parsing callback events + global call_connection_id + event_data = event["data"] + call_connection_id = event_data["callConnectionId"] + app.logger.info( + f"Received Event:-> {event['type']}, Correlation Id:-> {event_data['correlationId']}, CallConnectionId:-> {call_connection_id}" # noqa: E501 + ) + match event["type"]: + case "Microsoft.Communication.CallConnected": + call_connection_properties = await acs_client.get_call_connection( + call_connection_id + ).get_call_properties() + media_streaming_subscription = call_connection_properties.media_streaming_subscription + app.logger.info(f"MediaStreamingSubscription:--> {media_streaming_subscription}") + app.logger.info(f"Received CallConnected event for connection id: {call_connection_id}") + app.logger.info("CORRELATION ID:--> %s", event_data["correlationId"]) + app.logger.info("CALL CONNECTION ID:--> %s", event_data["callConnectionId"]) + case "Microsoft.Communication.MediaStreamingStarted" | "Microsoft.Communication.MediaStreamingStopped": + app.logger.info(f"Media streaming content type:--> {event_data['mediaStreamingUpdate']['contentType']}") + app.logger.info( + f"Media streaming status:--> {event_data['mediaStreamingUpdate']['mediaStreamingStatus']}" + ) + app.logger.info( + f"Media streaming status details:--> {event_data['mediaStreamingUpdate']['mediaStreamingStatusDetails']}" # noqa: E501 + ) + case "Microsoft.Communication.MediaStreamingFailed": + app.logger.info( + f"Code:->{event_data['resultInformation']['code']}, Subcode:-> {event_data['resultInformation']['subCode']}" # noqa: E501 + ) + app.logger.info(f"Message:->{event_data['resultInformation']['message']}") + case "Microsoft.Communication.CallDisconnected": + pass + return Response(status=200) + + +@app.route("/") +def home(): + return "Hello SKxACS CallAutomation!" + + +# region: Main + + +if __name__ == "__main__": + app.logger.setLevel(INFO) + app.run(port=8080) diff --git a/python/samples/demos/call_automation/readme.md b/python/samples/demos/call_automation/readme.md new file mode 100644 index 000000000000..ca69b39e0a3b --- /dev/null +++ b/python/samples/demos/call_automation/readme.md @@ -0,0 +1,53 @@ +# Call Automation - Quick Start Sample + +This is a sample application. It highlights an integration of Azure Communication Services with Semantic Kernel, using the Azure OpenAI Service to enable intelligent conversational agents. + +Original code for this sample can be found [here](https://github.com/Azure-Samples/communication-services-python-quickstarts/tree/main/callautomation-openai-sample). + +## Prerequisites + +- An Azure account with an active subscription. [Create an account for free](https://azure.microsoft.com/free/?WT.mc_id=A261C142F). +- A deployed Communication Services resource. [Create a Communication Services resource](https://docs.microsoft.com/azure/communication-services/quickstarts/create-communication-resource). +- A [phone number](https://learn.microsoft.com/en-us/azure/communication-services/quickstarts/telephony/get-phone-number) in your Azure Communication Services resource that can get inbound calls. NB: phone numbers are not available in free subscriptions. +- [Python](https://www.python.org/downloads/) 3.9 or above. +- An Azure OpenAI Resource and Deployed Model. See [instructions](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal). +- Install `uv`, see [the uv docs](https://docs.astral.sh/uv/getting-started/installation/). + +## To run the app + +1. Open an instance of PowerShell, Windows Terminal, Command Prompt or equivalent and navigate to the directory that you would like to clone the sample to. +2. git clone `https://github.com/microsoft/semantic-kernel.git`. +3. Navigate to `python/samples/demos/call_automation` folder + +### Setup and host your Azure DevTunnel + +[Azure DevTunnels](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/overview) is an Azure service that enables you to share local web services hosted on the internet. Use the commands below to connect your local development environment to the public internet. This creates a tunnel with a persistent endpoint URL and which allows anonymous access. We will then use this endpoint to notify your application of calling events from the ACS Call Automation service. + +```bash +devtunnel create --allow-anonymous +devtunnel port create -p 8080 +devtunnel host +``` + +### Configuring application + +Copy the `.env.example` file to `.env` and update the following values: + +1. `ACS_CONNECTION_STRING`: Azure Communication Service resource's connection string. +2. `CALLBACK_URI_HOST`: Base url of the app. (For local development use the dev tunnel url from the step above) +3. `AZURE_OPENAI_ENDPOINT`: Azure Open AI service endpoint +4. `AZURE_OPENAI_DEPLOYMENT_MODEL_NAME`: Azure Open AI deployment name +5. `AZURE_OPENAI_API_VERSION`: Azure Open AI API version, this should be one that includes the realtime api, for instance '2024-10-01-preview' +6. `AZURE_OPENAI_API_KEY`: Azure Open AI API key, optionally, you can also use Entra Auth. + +## Run the app + +1. Navigate to `call_automation` folder and do one of the following to start the main application: + - run `call_automation.py` in debug mode from your IDE (VSCode will load your .env variables into the environment automatically, other IDE's might need an extra step). + - execute `uv run --env-file .env call_automation.py` directly in your terminal (this uses `uv`, which will then install the requirements in a temporary virtual environment, see [uv docs](https://docs.astral.sh/uv/guides/scripts) for more info). +2. Browser should pop up with a simple page. If not navigate it to `http://localhost:8080/` or your dev tunnel url. +3. Register an EventGrid Webhook for the IncomingCall(`https:///api/incomingCall`) event that points to your devtunnel URI. Instructions [here](https://learn.microsoft.com/en-us/azure/communication-services/concepts/call-automation/incoming-call-notification). + +Once that's completed you should have a running application. The way to test this is to place a call to your ACS phone number and talk to your intelligent agent! + +In the terminal you should see all sorts of logs from both ACS and Semantic Kernel. diff --git a/python/samples/demos/document_generator/GENERATED_DOCUMENT.md b/python/samples/demos/document_generator/GENERATED_DOCUMENT.md new file mode 100644 index 000000000000..1d67e6dcf800 --- /dev/null +++ b/python/samples/demos/document_generator/GENERATED_DOCUMENT.md @@ -0,0 +1,58 @@ +### Understanding Semantic Kernel AI Connectors + +AI Connectors in Semantic Kernel are components that facilitate communication between the Kernel's core functionalities and various AI services. They abstract the intricate details of service-specific protocols, allowing developers to seamlessly interact with AI services for tasks like text generation, chat interactions, and more. + +### Using AI Connectors in Semantic Kernel + +Developers utilize AI connectors to connect their applications to different AI services efficiently. The connectors manage the requests and responses, providing a streamlined way to leverage the power of these AI services without needing to handle the specific communication protocols each service requires. + +### Creating Custom AI Connectors in Semantic Kernel + +To create a custom AI connector in Semantic Kernel, one must extend the base classes provided, such as `ChatCompletionClientBase` and `AIServiceClientBase`. Below is a guide and example for implementing a mock AI connector: + +#### Step-by-Step Walkthrough + +1. **Understand the Base Classes**: The foundational classes `ChatCompletionClientBase` and `AIServiceClientBase` provide necessary methods and structures for creating chat-based AI connectors. + +2. **Implementing the Connector**: Here's a mock implementation example illustrating how to implement a connector without real service dependencies, ensuring compatibility with Pydantic's expectations within the framework: + +```python +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase + +class MockAIChatCompletionService(ChatCompletionClientBase): + def __init__(self, ai_model_id: str): + super().__init__(ai_model_id=ai_model_id) + + async def _inner_get_chat_message_contents(self, chat_history, settings): + # Mock implementation: returns dummy chat message content for demonstration. + return [{"role": "assistant", "content": "Mock response based on your history."}] + + def service_url(self): + return "http://mock-ai-service.com" +``` + +### Usage Example + +The following example demonstrates how to integrate and use the `MockAIChatCompletionService` in an application: + +```python +import asyncio +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + +async def main(): + chat_history = ChatHistory(messages=[{"role": "user", "content": "Hello"}]) + settings = PromptExecutionSettings(model="mock-model") + + service = MockAIChatCompletionService(ai_model_id="mock-model") + + response = await service.get_chat_message_contents(chat_history, settings) + print(response) + +# Run the main function +asyncio.run(main()) +``` + +### Conclusion + +By following the revised guide and understanding the base class functionalities, developers can effectively create custom connectors within Semantic Kernel. This structured approach enhances integration with various AI services while ensuring alignment with the framework's architectural expectations. Custom connectors offer flexibility, allowing developers to adjust implementations to meet specific service needs, such as additional logging, authentication, or modifications tailored to specific protocols. This guide provides a strong foundation upon which more complex and service-specific extensions can be built, promoting robust and scalable AI service integration. \ No newline at end of file diff --git a/python/samples/demos/document_generator/README.md b/python/samples/demos/document_generator/README.md new file mode 100644 index 000000000000..45bba5069d99 --- /dev/null +++ b/python/samples/demos/document_generator/README.md @@ -0,0 +1,105 @@ +# Document Generator + +This sample app demonstrates how to create technical documents for a codebase using AI. More specifically, it uses the agent framework offered by **Semantic Kernel** to ochestrate multiple agents to create a technical document. + +This sample app also provides telemetry to monitor the agents, making it easier to observe the inner workings of the agents. + +To learn more about agents, please refer to this introduction [video](https://learn.microsoft.com/en-us/shows/generative-ai-for-beginners/ai-agents-generative-ai-for-beginners). +To learn more about the Semantic Kernel Agent Framework, please refer to the [Semantic Kernel documentation](https://learn.microsoft.com/en-us/semantic-kernel/frameworks/agent/agent-architecture?pivots=programming-language-python). + +> Note: This sample app cannot guarantee to generate a perfect technical document each time due to the stochastic nature of the AI model. Please a version of the document generated by the app in [GENERATED_DOCUMENT.md](GENERATED_DOCUMENT.md). + +## Design + +### Tools/PLugins + +- **Code Execution Plugin**: This plugin offers a sandbox environment to execute Python snippets. It returns the output of the program or errors if any. +- **Repository File Plugin**: This plugin allows the AI to retrieve files from the Semantic Kernel repository. +- **User Input Plugin**: This plugin allows the AI to present content to the user and receive feedback. + +### Agents + +- **Content Creation Agent**: This agent is responsible for creating the content of the document. This agent has access to the **Repository File Plugin** to read source files it deems necessary for reference. +- **Code Validation Agent**: This agent is responsible for validating the code snippets in the document. This agent has access to the **Code Execution Plugin** to execute the code snippets. +- **User Agent**: This agent is responsible for interacting with the user. This agent has access to the **User Input Plugin** to present content to the user and receive feedback. + +### Agent Selection Strategy + +### Termination Strategy + +## Prerequisites + +1. Azure OpenAI +2. Azure Application Insights + +## Additional packages + +- `AICodeSandbox` - for executing AI generated code in a sandbox environment + + ```bash + pip install ai-code-sandbox + ``` + + > You must also have `docker` installed and running on your machine. Follow the instructions [here](https://docs.docker.com/get-started/introduction/get-docker-desktop/) to install docker for your platform. Images will be pulled during runtime if not already present. Containers will be created and destroyed during code execution. + +## Running the app + +### Step 1: Set up the environment + +Make sure you have the following environment variables set: + +```env +OPENAI_CHAT_MODEL_ID= +OPENAI_API_KEY= +``` + +> gpt-4o-2024-08-06 was used to generate [GENERATED_DOCUMENT.md](GENERATED_DOCUMENT.md). +> Feel free to use other models from OpenAI or other providers. When you use models from another provider, make sure to update the chat completion services accordingly. + +```env + +### Step 2: Run the app + +```bash +python ./main.py +``` + +Expected output: + +```bash +==== ContentCreationAgent just responded ==== +==== CodeValidationAgent just responded ==== +==== ContentCreationAgent just responded ==== +... +``` + +## Customization + +Since this is a sample app that demonstrates the creation of a technical document on Semantic Kernel AI connectors, you can customize the app to suit your needs. You can try different tasks, add more agents, tune existing agents, change the agent selection strategy, or modify the termination strategy. + +- To try a different task, modify the `TASK` prompt in `main.py`. +- To add more agents, create a new agent under `agents/` and add it to the `agents` list in `main.py`. +- To tune existing agents, modify the `INSTRUCTION` prompt in the agent's source code. +- To change the agent selection strategy, modify `custom_selection_strategy.py`. +- To change the termination strategy, modify `custom_termination_strategy.py`. + +## Optional: Monitoring the agents + +When you see the final document generated by the app, what you see is actually the creation of multiple agents working together. You may wonder, how did the agents work together to create the document? What was the sequence of actions taken by the agents? How did the agents interact with each other? To answer these questions, you need to **observe** the agents. + +Semantic Kernel by default instruments all the LLM calls. However, for agents there is no default instrumentation. This sample app shows how one can extend the Semantic Kernel agent to add instrumentation. + +> There are currently no standards on what information needs to be captured for agents as the concept of agents is still relatively new. At the time of writing, the Semantic Convention for agents is still in the draft stage: + +To monitor the agents, set the following environment variables: + +```env +AZURE_APP_INSIGHTS_CONNECTION_STRING= + +SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS=true +SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE=true +``` + +Follow this guide to inspect the telemetry data: + +Or follow this guide to visualize the telemetry data on Azure AI Foundry: diff --git a/python/samples/demos/document_generator/agents/code_validation_agent.py b/python/samples/demos/document_generator/agents/code_validation_agent.py new file mode 100644 index 000000000000..c85da09476d1 --- /dev/null +++ b/python/samples/demos/document_generator/agents/code_validation_agent.py @@ -0,0 +1,69 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING, Any + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase +from samples.demos.document_generator.plugins.code_execution_plugin import CodeExecutionPlugin +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.functions.kernel_arguments import KernelArguments + +if TYPE_CHECKING: + from semantic_kernel.kernel import Kernel + +INSTRUCTION = """ +You are a code validation agent in a collaborative document creation chat. + +Your task is to validate Python code in the latest document draft and summarize any errors. +Follow the instructions in the document to assemble the code snippets into a single Python script. +If the snippets in the document are from multiple scripts, you need to modify them to work together as a single script. +Execute the code to validate it. If there are errors, summarize the error messages. + +Do not try to fix the errors. +""" + +DESCRIPTION = """ +Select me to validate the Python code in the latest document draft. +""" + + +class CodeValidationAgent(CustomAgentBase): + def __init__(self): + kernel = self._create_kernel() + kernel.add_plugin(plugin=CodeExecutionPlugin(), plugin_name="CodeExecutionPlugin") + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) + settings.function_choice_behavior = FunctionChoiceBehavior.Auto(maximum_auto_invoke_attempts=1) + + super().__init__( + service_id=CustomAgentBase.SERVICE_ID, + kernel=kernel, + arguments=KernelArguments(settings=settings), + name="CodeValidationAgent", + instructions=INSTRUCTION.strip(), + description=DESCRIPTION.strip(), + ) + + @override + async def invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + cloned_history = history.model_copy(deep=True) + cloned_history.add_user_message( + "Now validate the Python code in the latest document draft and summarize any errors." + ) + + async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): + yield response_message diff --git a/python/samples/demos/document_generator/agents/content_creation_agent.py b/python/samples/demos/document_generator/agents/content_creation_agent.py new file mode 100644 index 000000000000..44dbcbea25bf --- /dev/null +++ b/python/samples/demos/document_generator/agents/content_creation_agent.py @@ -0,0 +1,64 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING, Any + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase +from samples.demos.document_generator.plugins.repo_file_plugin import RepoFilePlugin +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.functions.kernel_arguments import KernelArguments + +if TYPE_CHECKING: + from semantic_kernel.kernel import Kernel + +INSTRUCTION = """ +You are part of a chat with multiple agents focused on creating technical content. + +Your task is to generate informative and engaging technical content, +including code snippets to explain concepts or demonstrate features. +Incorporate feedback by providing the updated full content with changes. +""" + +DESCRIPTION = """ +Select me to generate new content or to revise existing content. +""" + + +class ContentCreationAgent(CustomAgentBase): + def __init__(self): + kernel = self._create_kernel() + kernel.add_plugin(plugin=RepoFilePlugin(), plugin_name="RepoFilePlugin") + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + super().__init__( + service_id=CustomAgentBase.SERVICE_ID, + kernel=kernel, + arguments=KernelArguments(settings=settings), + name="ContentCreationAgent", + instructions=INSTRUCTION.strip(), + description=DESCRIPTION.strip(), + ) + + @override + async def invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + cloned_history = history.model_copy(deep=True) + cloned_history.add_user_message("Now generate new content or revise existing content to incorporate feedback.") + + async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): + yield response_message diff --git a/python/samples/demos/document_generator/agents/custom_agent_base.py b/python/samples/demos/document_generator/agents/custom_agent_base.py new file mode 100644 index 000000000000..f8900d319a75 --- /dev/null +++ b/python/samples/demos/document_generator/agents/custom_agent_base.py @@ -0,0 +1,52 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from abc import ABC +from collections.abc import AsyncIterable +from typing import Any, ClassVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.kernel import Kernel + + +class CustomAgentBase(ChatCompletionAgent, ABC): + SERVICE_ID: ClassVar[str] = "chat_completion" + + def _create_kernel(self) -> Kernel: + kernel = Kernel() + kernel.add_service(OpenAIChatCompletion(service_id=self.SERVICE_ID)) + + return kernel + + @override + async def invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + # Since the history contains internal messages from other agents, + # we will do our best to filter out those. Unfortunately, there will + # be a side effect of losing the context of the conversation internal + # to the agent when the conversation is handed back to the agent, i.e. + # previous function call results. + filtered_chat_history = ChatHistory() + for message in history: + content = message.content + # We don't want to add messages whose text content is empty. + # Those messages are likely messages from function calls and function results. + if content: + filtered_chat_history.add_message(message) + + async for response in super().invoke(filtered_chat_history, arguments=arguments, kernel=kernel, **kwargs): + yield response diff --git a/python/samples/demos/document_generator/agents/user_agent.py b/python/samples/demos/document_generator/agents/user_agent.py new file mode 100644 index 000000000000..43fd66ade0af --- /dev/null +++ b/python/samples/demos/document_generator/agents/user_agent.py @@ -0,0 +1,67 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING, Any + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase +from samples.demos.document_generator.plugins.user_plugin import UserPlugin +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.functions.kernel_arguments import KernelArguments + +if TYPE_CHECKING: + from semantic_kernel.kernel import Kernel + +INSTRUCTION = """ +You are part of a chat with multiple agents working on a document. + +Your task is to summarize the user's feedback on the latest draft from the author agent. +Present the draft to the user and summarize their feedback. + +Do not try to address the user's feedback in this chat. +""" + +DESCRIPTION = """ +Select me if you want to ask the user to review the latest draft for publication. +""" + + +class UserAgent(CustomAgentBase): + def __init__(self): + kernel = self._create_kernel() + kernel.add_plugin(plugin=UserPlugin(), plugin_name="UserPlugin") + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) + settings.function_choice_behavior = FunctionChoiceBehavior.Auto(maximum_auto_invoke_attempts=1) + + super().__init__( + service_id=CustomAgentBase.SERVICE_ID, + kernel=kernel, + arguments=KernelArguments(settings=settings), + name="UserAgent", + instructions=INSTRUCTION.strip(), + description=DESCRIPTION.strip(), + ) + + @override + async def invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + cloned_history = history.model_copy(deep=True) + cloned_history.add_user_message( + "Now present the latest draft to the user for feedback and summarize their feedback." + ) + + async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): + yield response_message diff --git a/python/samples/demos/document_generator/custom_selection_strategy.py b/python/samples/demos/document_generator/custom_selection_strategy.py new file mode 100644 index 000000000000..20b01b807979 --- /dev/null +++ b/python/samples/demos/document_generator/custom_selection_strategy.py @@ -0,0 +1,100 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, ClassVar + +from opentelemetry import trace +from pydantic import Field + +from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from semantic_kernel.agents import Agent + from semantic_kernel.contents.chat_message_content import ChatMessageContent + +NEWLINE = "\n" + + +@experimental +class CustomSelectionStrategy(SelectionStrategy): + """A selection strategy that selects the next agent intelligently.""" + + NUM_OF_RETRIES: ClassVar[int] = 3 + + chat_completion_service: ChatCompletionClientBase = Field(default_factory=lambda: OpenAIChatCompletion()) + + async def next(self, agents: list["Agent"], history: list["ChatMessageContent"]) -> "Agent": + """Select the next agent to interact with. + + Args: + agents: The list of agents to select from. + history: The history of messages in the conversation. + + Returns: + The next agent to interact with. + """ + if len(agents) == 0: + raise ValueError("No agents to select from") + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("selection_strategy"): + chat_history = ChatHistory(system_message=self.get_system_message(agents).strip()) + + for message in history: + content = message.content + # We don't want to add messages whose text content is empty. + # Those messages are likely messages from function calls and function results. + if content: + chat_history.add_message(message) + + chat_history.add_user_message("Now follow the rules and select the next agent by typing the agent's index.") + + for _ in range(self.NUM_OF_RETRIES): + completion = await self.chat_completion_service.get_chat_message_content( + chat_history, + AzureChatPromptExecutionSettings(), + ) + + if completion is None: + continue + + try: + return agents[int(completion.content)] + except ValueError as ex: + chat_history.add_message(completion) + chat_history.add_user_message(str(ex)) + chat_history.add_user_message(f"You must only say a number between 0 and {len(agents) - 1}.") + + raise ValueError("Failed to select an agent since the model did not return a valid index") + + def get_system_message(self, agents: list["Agent"]) -> str: + return f""" +You are in a multi-agent chat to create a document. +Each message in the chat history contains the agent's name and the message content. + +Initially, the chat history may be empty. + +Here are the agents with their indices, names, and descriptions: +{NEWLINE.join(f"[{index}] {agent.name}:{NEWLINE}{agent.description}" for index, agent in enumerate(agents))} + +Your task is to select the next agent based on the conversation history. + +The conversation must follow these steps: +1. The content creation agent writes a draft. +2. The code validation agent checks the code in the draft. +3. The content creation agent updates the draft based on the feedback. +4. The code validation agent checks the updated code. +... +If the code validation agent approves the code, the user agent can ask the user for final feedback. +N: The user agent provides feedback. +(If the feedback is not positive, the conversation goes back to the content creation agent.) + +Respond with a single number between 0 and {len(agents) - 1}, representing the agent's index. +Only return the index as an integer. +""" diff --git a/python/samples/demos/document_generator/custom_termination_strategy.py b/python/samples/demos/document_generator/custom_termination_strategy.py new file mode 100644 index 000000000000..ffecdaea95e3 --- /dev/null +++ b/python/samples/demos/document_generator/custom_termination_strategy.py @@ -0,0 +1,91 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, ClassVar + +from opentelemetry import trace +from pydantic import Field + +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory + +if TYPE_CHECKING: + from semantic_kernel.agents.agent import Agent + from semantic_kernel.contents.chat_message_content import ChatMessageContent + + +TERMINATE_TRUE_KEYWORD = "yes" +TERMINATE_FALSE_KEYWORD = "no" + +NEWLINE = "\n" + + +class CustomTerminationStrategy(TerminationStrategy): + NUM_OF_RETRIES: ClassVar[int] = 3 + + maximum_iterations: int = 20 + chat_completion_service: ChatCompletionClientBase = Field(default_factory=lambda: OpenAIChatCompletion()) + + async def should_agent_terminate(self, agent: "Agent", history: list["ChatMessageContent"]) -> bool: + """Check if the agent should terminate. + + Args: + agent: The agent to check. + history: The history of messages in the conversation. + """ + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("terminate_strategy"): + chat_history = ChatHistory(system_message=self.get_system_message().strip()) + + for message in history: + content = message.content + # We don't want to add messages whose text content is empty. + # Those messages are likely messages from function calls and function results. + if content: + chat_history.add_message(message) + + chat_history.add_user_message( + "Is the latest content approved by all agents? " + f"Answer with '{TERMINATE_TRUE_KEYWORD}' or '{TERMINATE_FALSE_KEYWORD}'." + ) + + for _ in range(self.NUM_OF_RETRIES): + completion = await self.chat_completion_service.get_chat_message_content( + chat_history, + AzureChatPromptExecutionSettings(), + ) + + if not completion: + continue + + if TERMINATE_FALSE_KEYWORD in completion.content.lower(): + return False + if TERMINATE_TRUE_KEYWORD in completion.content.lower(): + return True + + chat_history.add_message(completion) + chat_history.add_user_message( + f"You must only say either '{TERMINATE_TRUE_KEYWORD}' or '{TERMINATE_FALSE_KEYWORD}'." + ) + + raise ValueError( + "Failed to determine if the agent should terminate because the model did not return a valid response." + ) + + def get_system_message(self) -> str: + return f""" +You are in a chat with multiple agents collaborating to create a document. +Each message in the chat history contains the agent's name and the message content. + +The chat history may start empty as no agents have spoken yet. + +Here are the agents with their indices, names, and descriptions: +{NEWLINE.join(f"[{index}] {agent.name}:{NEWLINE}{agent.description}" for index, agent in enumerate(self.agents))} + +Your task is NOT to continue the conversation. Determine if the latest content is approved by all agents. +If approved, say "{TERMINATE_TRUE_KEYWORD}". Otherwise, say "{TERMINATE_FALSE_KEYWORD}". +""" diff --git a/python/samples/demos/document_generator/main.py b/python/samples/demos/document_generator/main.py new file mode 100644 index 000000000000..5ba38353b69f --- /dev/null +++ b/python/samples/demos/document_generator/main.py @@ -0,0 +1,130 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +import os + +from dotenv import load_dotenv +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.semconv.resource import ResourceAttributes + +from samples.demos.document_generator.agents.code_validation_agent import CodeValidationAgent +from samples.demos.document_generator.agents.content_creation_agent import ContentCreationAgent +from samples.demos.document_generator.agents.user_agent import UserAgent +from samples.demos.document_generator.custom_selection_strategy import CustomSelectionStrategy +from samples.demos.document_generator.custom_termination_strategy import CustomTerminationStrategy +from semantic_kernel.agents.group_chat.agent_group_chat import AgentGroupChat +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole + +TASK = """ +Create a blog post to share technical details about the Semantic Kernel AI connectors. +The content of the blog post should include the following: +1. What are AI connectors in Semantic Kernel? +2. How do people use AI connectors in Semantic Kernel? +3. How do devs create custom AI connectors in Semantic Kernel? + - Include a walk through of creating a custom AI connector. + The connector may not connect to a real service, but should demonstrate the process. + - Include a sample on how to use the connector. + - If a reader follows the walk through and the sample, they should be able to run the connector. + + +Here is the file that contains the source code for the base class of the AI connectors: +semantic_kernel/connectors/ai/chat_completion_client_base.py +semantic_kernel/services/ai_service_client_base.py + +Here are some files containing the source code that may be useful: +semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py +semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py +semantic_kernel/contents/chat_history.py + +If you want to reference the implementations of other AI connectors, you can find them under the following directory: +semantic_kernel/connectors/ai +""" + +load_dotenv() +AZURE_APP_INSIGHTS_CONNECTION_STRING = os.getenv("AZURE_APP_INSIGHTS_CONNECTION_STRING") + +resource = Resource.create({ResourceAttributes.SERVICE_NAME: "Document Generator"}) + + +def set_up_tracing(): + from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + from opentelemetry.trace import set_tracer_provider + + # Initialize a trace provider for the application. This is a factory for creating tracers. + tracer_provider = TracerProvider(resource=resource) + tracer_provider.add_span_processor( + BatchSpanProcessor(AzureMonitorTraceExporter(connection_string=AZURE_APP_INSIGHTS_CONNECTION_STRING)) + ) + # Sets the global default tracer provider + set_tracer_provider(tracer_provider) + + +def set_up_logging(): + from azure.monitor.opentelemetry.exporter import AzureMonitorLogExporter + from opentelemetry._logs import set_logger_provider + from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler + from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + + # Create and set a global logger provider for the application. + logger_provider = LoggerProvider(resource=resource) + logger_provider.add_log_record_processor( + BatchLogRecordProcessor(AzureMonitorLogExporter(connection_string=AZURE_APP_INSIGHTS_CONNECTION_STRING)) + ) + # Sets the global default logger provider + set_logger_provider(logger_provider) + + # Create a logging handler to write logging records, in OTLP format, to the exporter. + handler = LoggingHandler() + # Attach the handler to the root logger. `getLogger()` with no arguments returns the root logger. + # Events from all child loggers will be processed by this handler. + logger = logging.getLogger() + logger.addHandler(handler) + logger.setLevel(logging.INFO) + + +async def main(): + if AZURE_APP_INSIGHTS_CONNECTION_STRING: + set_up_tracing() + set_up_logging() + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("main"): + agents = [ + ContentCreationAgent(), + UserAgent(), + CodeValidationAgent(), + ] + + group_chat = AgentGroupChat( + agents=agents, + termination_strategy=CustomTerminationStrategy(agents=agents), + selection_strategy=CustomSelectionStrategy(), + ) + await group_chat.add_chat_message( + ChatMessageContent( + role=AuthorRole.USER, + content=TASK.strip(), + ) + ) + + async for response in group_chat.invoke(): + print(f"==== {response.name} just responded ====") + # print(response.content) + + content_history: list[ChatMessageContent] = [] + async for message in group_chat.get_chat_messages(agent=agents[0]): + if message.name == agents[0].name: + # The chat history contains responses from other agents. + content_history.append(message) + # The chat history is in descending order. + print("Final content:") + print(content_history[0].content) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/demos/document_generator/plugins/code_execution_plugin.py b/python/samples/demos/document_generator/plugins/code_execution_plugin.py new file mode 100644 index 000000000000..863532d24a61 --- /dev/null +++ b/python/samples/demos/document_generator/plugins/code_execution_plugin.py @@ -0,0 +1,26 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated + +from ai_code_sandbox import AICodeSandbox + +from semantic_kernel.functions import kernel_function + + +class CodeExecutionPlugin: + """A plugin that runs Python code snippets.""" + + @kernel_function(description="Run a Python code snippet. You can assume all the necessary packages are installed.") + def run( + self, code: Annotated[str, "The Python code snippet."] + ) -> Annotated[str, "Returns the output of the code."]: + """Run a Python code snippet.""" + sandbox: AICodeSandbox = AICodeSandbox( + custom_image="python:3.12-slim", + packages=["semantic_kernel"], + ) + + try: + return sandbox.run_code(code) + finally: + sandbox.close() diff --git a/python/samples/demos/document_generator/plugins/repo_file_plugin.py b/python/samples/demos/document_generator/plugins/repo_file_plugin.py new file mode 100644 index 000000000000..9391394fb4de --- /dev/null +++ b/python/samples/demos/document_generator/plugins/repo_file_plugin.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +from typing import Annotated + +from semantic_kernel.functions import kernel_function + + +class RepoFilePlugin: + """A plugin that reads files from this repository. + + This plugin assumes that the code is run within the Semantic Kernel repository. + """ + + @kernel_function(description="Read a file given a relative path to the root of the repository.") + def read_file_by_path( + self, path: Annotated[str, "The relative path to the file."] + ) -> Annotated[str, "Returns the file content."]: + path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", path) + + try: + with open(path) as file: + return file.read() + except FileNotFoundError: + raise FileNotFoundError(f"File {path} not found in repository.") + + @kernel_function( + description="Read a file given the name of the file. Function will search for the file in the repository." + ) + def read_file_by_name( + self, file_name: Annotated[str, "The name of the file."] + ) -> Annotated[str, "Returns the file content."]: + path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..") + for root, dirs, files in os.walk(path): + if file_name in files: + print(f"Found file {file_name} in {root}.") + with open(os.path.join(root, file_name)) as file: + return file.read() + raise FileNotFoundError(f"File {file_name} not found in repository.") + + @kernel_function(description="List all files or subdirectories in a directory.") + def list_directory( + self, path: Annotated[str, "Path of a directory relative to the root of the repository."] + ) -> Annotated[str, "Returns a list of files and subdirectories as a string."]: + path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", path) + try: + files = os.listdir(path) + # Join the list of files into a single string + return "\n".join(files) + except FileNotFoundError: + raise FileNotFoundError(f"Directory {path} not found in repository.") diff --git a/python/samples/demos/document_generator/plugins/user_plugin.py b/python/samples/demos/document_generator/plugins/user_plugin.py new file mode 100644 index 000000000000..3891e40a06ff --- /dev/null +++ b/python/samples/demos/document_generator/plugins/user_plugin.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated + +from semantic_kernel.functions import kernel_function + + +class UserPlugin: + """A plugin that interacts with the user.""" + + @kernel_function(description="Present the content to user and request feedback.") + def request_user_feedback( + self, content: Annotated[str, "The content to present and request feedback on."] + ) -> Annotated[str, "The feedback provided by the user."]: + """Request user feedback on the content.""" + return input(f"Please provide feedback on the content:\n\n{content}\n\n> ") diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py index a74b897dcb9d..e963b0cad46a 100644 --- a/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py @@ -152,7 +152,7 @@ def get_agenda_for_prompt(self) -> str: return "None" agenda_str = "\n".join( [ - f"{i+1}. [{format_resource(item['resource'], ResourceConstraintUnit.TURNS)}] {item['title']}" + f"{i + 1}. [{format_resource(item['resource'], ResourceConstraintUnit.TURNS)}] {item['title']}" for i, item in enumerate(agenda_items) ] ) diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py index 14cf65431911..581c1f0a18cd 100644 --- a/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py @@ -205,7 +205,7 @@ def get_resource_instructions(self) -> tuple[str, str]: resource_instructions = "" if self.resource_constraint.mode == ResourceConstraintMode.EXACT: - exact_mode_instructions = f"""There {'are' if is_plural_remaining else 'is'} {formatted_remaining_resource} remaining (including this one) - the conversation will automatically terminate when 0 turns are left. \ + exact_mode_instructions = f"""There {"are" if is_plural_remaining else "is"} {formatted_remaining_resource} remaining (including this one) - the conversation will automatically terminate when 0 turns are left. \ You should continue the conversation until it is automatically terminated. This means you should NOT preemptively end the conversation, \ either explicitly (by selecting the "End conversation" action) or implicitly (e.g. by telling the user that you have all required information and they should wait for the next step). \ Your goal is not to maximize efficiency (i.e. complete the artifact as quickly as possible then end the conversation), but rather to make the best use of ALL remaining turns available to you""" diff --git a/python/samples/demos/process_with_dapr/fastapi_app.py b/python/samples/demos/process_with_dapr/fastapi_app.py index 263356a8bcea..56880e041c05 100644 --- a/python/samples/demos/process_with_dapr/fastapi_app.py +++ b/python/samples/demos/process_with_dapr/fastapi_app.py @@ -11,10 +11,7 @@ from samples.demos.process_with_dapr.process.process import get_process from samples.demos.process_with_dapr.process.steps import CommonEvents from semantic_kernel import Kernel -from semantic_kernel.processes.dapr_runtime import ( - register_fastapi_dapr_actors, - start, -) +from semantic_kernel.processes.dapr_runtime import register_fastapi_dapr_actors, start logging.basicConfig(level=logging.ERROR) @@ -34,12 +31,16 @@ # and returns the actor instance with the kernel injected. # ######################################################################### +# Get the process which means we have the `KernelProcess` object +# along with any defined step factories +process = get_process() + # Define a lifespan method that registers the actors with the Dapr runtime @asynccontextmanager async def lifespan(app: FastAPI): print("## actor startup ##") - await register_fastapi_dapr_actors(actor, kernel) + await register_fastapi_dapr_actors(actor, kernel, process.factories) yield @@ -56,8 +57,6 @@ async def healthcheck(): @app.get("/processes/{process_id}") async def start_process(process_id: str): try: - process = get_process() - _ = await start( process=process, kernel=kernel, diff --git a/python/samples/demos/process_with_dapr/process/process.py b/python/samples/demos/process_with_dapr/process/process.py index e6741a85f116..1c81459b3c21 100644 --- a/python/samples/demos/process_with_dapr/process/process.py +++ b/python/samples/demos/process_with_dapr/process/process.py @@ -2,7 +2,15 @@ from typing import TYPE_CHECKING -from samples.demos.process_with_dapr.process.steps import AStep, BStep, CommonEvents, CStep, CStepState, KickOffStep +from samples.demos.process_with_dapr.process.steps import ( + AStep, + BStep, + CommonEvents, + CStep, + CStepState, + KickOffStep, + bstep_factory, +) from semantic_kernel.processes import ProcessBuilder if TYPE_CHECKING: @@ -16,7 +24,7 @@ def get_process() -> "KernelProcess": # Add the step types to the builder kickoff_step = process.add_step(step_type=KickOffStep) myAStep = process.add_step(step_type=AStep) - myBStep = process.add_step(step_type=BStep) + myBStep = process.add_step(step_type=BStep, factory_function=bstep_factory) # Initialize the CStep with an initial state and the state's current cycle set to 1 myCStep = process.add_step(step_type=CStep, initial_state=CStepState(current_cycle=1)) diff --git a/python/samples/demos/process_with_dapr/process/steps.py b/python/samples/demos/process_with_dapr/process/steps.py index 083a2e78e9bb..d2a6313db402 100644 --- a/python/samples/demos/process_with_dapr/process/steps.py +++ b/python/samples/demos/process_with_dapr/process/steps.py @@ -6,7 +6,11 @@ from pydantic import Field +from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.functions import kernel_function +from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process import ( KernelProcessStep, @@ -52,14 +56,43 @@ async def do_it(self, context: KernelProcessStepContext): await context.emit_event(process_event=CommonEvents.AStepDone, data="I did A") -# Define a sample `BStep` step that will emit an event after 2 seconds. -# The event will be sent to the `CStep` step with the data `I did B`. +# Define a simple factory for the BStep that can create the dependency that the BStep requires +# As an example, this factory creates a kernel and adds an `AzureChatCompletion` service to it. +async def bstep_factory(): + """Creates a BStep instance with ephemeral references like ChatCompletionAgent.""" + kernel = Kernel() + kernel.add_service(AzureChatCompletion()) + + agent = ChatCompletionAgent(kernel=kernel, name="echo", instructions="repeat the input back") + step_instance = BStep() + step_instance.agent = agent + + return step_instance + + class BStep(KernelProcessStep): - @kernel_function() + """A sample BStep that optionally holds a ChatCompletionAgent. + + By design, the agent is ephemeral (not stored in state). + """ + + # Ephemeral references won't be persisted to Dapr + # because we do not place them in a step state model. + # We'll set this in the factory function: + agent: ChatCompletionAgent | None = None + + @kernel_function(name="do_it") async def do_it(self, context: KernelProcessStepContext): - print("##### BStep ran.") + print("##### BStep ran (do_it).") await asyncio.sleep(2) - await context.emit_event(process_event=CommonEvents.BStepDone, data="I did B") + + if self.agent: + history = ChatHistory() + history.add_user_message("Hello from BStep!") + async for msg in self.agent.invoke(history): + print(f"BStep got agent response: {msg.content}") + + await context.emit_event(process_event="BStepDone", data="I did B") # Define a sample `CStepState` that will keep track of the current cycle. diff --git a/python/samples/getting_started/05-using-the-planner.ipynb b/python/samples/getting_started/05-using-the-planner.ipynb index dcc7330795c7..ba1cf7cf3a3d 100644 --- a/python/samples/getting_started/05-using-the-planner.ipynb +++ b/python/samples/getting_started/05-using-the-planner.ipynb @@ -435,7 +435,7 @@ " Description: EmailPlugin provides a set of functions to send emails.\n", "\n", " Usage:\n", - " kernel.import_plugin_from_object(EmailPlugin(), plugin_name=\"email\")\n", + " kernel.add_plugin(EmailPlugin(), plugin_name=\"email\")\n", "\n", " Examples:\n", " {{email.SendEmail}} => Sends an email with the provided subject and body.\n", @@ -581,7 +581,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.6" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/python/samples/getting_started/third_party/postgres-memory.ipynb b/python/samples/getting_started/third_party/postgres-memory.ipynb index b0069a59a1c7..51ea600109e3 100644 --- a/python/samples/getting_started/third_party/postgres-memory.ipynb +++ b/python/samples/getting_started/third_party/postgres-memory.ipynb @@ -28,21 +28,30 @@ "import numpy as np\n", "import requests\n", "\n", - "from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (\n", + "from semantic_kernel import Kernel\n", + "from semantic_kernel.connectors.ai import FunctionChoiceBehavior\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " AzureChatPromptExecutionSettings,\n", + " AzureTextEmbedding,\n", " OpenAIEmbeddingPromptExecutionSettings,\n", + " OpenAITextEmbedding,\n", ")\n", - "from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding\n", - "from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding\n", - "from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection\n", - "from semantic_kernel.data.const import DistanceFunction, IndexKind\n", - "from semantic_kernel.data.vector_store_model_decorator import vectorstoremodel\n", - "from semantic_kernel.data.vector_store_record_fields import (\n", + "from semantic_kernel.connectors.memory.postgres import PostgresCollection\n", + "from semantic_kernel.contents import ChatHistory\n", + "from semantic_kernel.data import (\n", + " DistanceFunction,\n", + " IndexKind,\n", + " VectorSearchOptions,\n", " VectorStoreRecordDataField,\n", " VectorStoreRecordKeyField,\n", + " VectorStoreRecordUtils,\n", " VectorStoreRecordVectorField,\n", + " VectorStoreTextSearch,\n", + " vectorstoremodel,\n", ")\n", - "from semantic_kernel.data.vector_store_record_utils import VectorStoreRecordUtils\n", - "from semantic_kernel.kernel import Kernel" + "from semantic_kernel.functions import KernelParameterMetadata\n", + "from semantic_kernel.functions.kernel_arguments import KernelArguments" ] }, { @@ -55,6 +64,8 @@ "\n", "To do this, copy the `.env.example` file to `.env` and fill in the necessary information.\n", "\n", + "__Note__: If you're using VSCode to execute the notebook, the settings in `.env` in the root of the repository will be picked up automatically.\n", + "\n", "### Postgres configuration\n", "\n", "You'll need to provide a connection string to a Postgres database. You can use a local Postgres instance, or a cloud-hosted one.\n", @@ -116,21 +127,18 @@ "# -- ArXiv settings --\n", "\n", "# The search term to use when searching for papers on arXiv. All metadata fields for the papers are searched.\n", - "SEARCH_TERM = \"generative ai\"\n", + "SEARCH_TERM = \"RAG\"\n", "\n", "# The category of papers to search for on arXiv. See https://arxiv.org/category_taxonomy for a list of categories.\n", "ARVIX_CATEGORY = \"cs.AI\"\n", "\n", "# The maximum number of papers to search for on arXiv.\n", - "MAX_RESULTS = 10\n", + "MAX_RESULTS = 300\n", "\n", "# -- OpenAI settings --\n", "\n", "# Set this flag to False to use the OpenAI API instead of Azure OpenAI\n", - "USE_AZURE_OPENAI = True\n", - "\n", - "# The name of the OpenAI model or Azure OpenAI deployment to use\n", - "EMBEDDING_MODEL = \"text-embedding-3-small\"" + "USE_AZURE_OPENAI = True" ] }, { @@ -162,7 +170,7 @@ " embedding_settings={\"embedding\": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)},\n", " index_kind=IndexKind.HNSW,\n", " dimensions=1536,\n", - " distance_function=DistanceFunction.COSINE,\n", + " distance_function=DistanceFunction.COSINE_DISTANCE,\n", " property_type=\"float\",\n", " serialize_function=np.ndarray.tolist,\n", " deserialize_function=np.array,\n", @@ -240,9 +248,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 300 papers on 'RAG'\n" + ] + } + ], "source": [ "arxiv_papers: list[ArxivPaper] = [\n", " ArxivPaper.from_arxiv_info(paper)\n", @@ -266,7 +282,7 @@ "outputs": [], "source": [ "collection = PostgresCollection[str, ArxivPaper](\n", - " collection_name=\"arxiv_papers\", data_model_type=ArxivPaper, env_file_path=env_file_path\n", + " collection_name=\"arxiv_records\", data_model_type=ArxivPaper, env_file_path=env_file_path\n", ")" ] }, @@ -285,13 +301,9 @@ "source": [ "kernel = Kernel()\n", "if USE_AZURE_OPENAI:\n", - " text_embedding = AzureTextEmbedding(\n", - " service_id=\"embedding\", deployment_name=EMBEDDING_MODEL, env_file_path=env_file_path\n", - " )\n", + " text_embedding = AzureTextEmbedding(service_id=\"embedding\", env_file_path=env_file_path)\n", "else:\n", - " text_embedding = OpenAITextEmbedding(\n", - " service_id=\"embedding\", ai_model_id=EMBEDDING_MODEL, env_file_path=env_file_path\n", - " )\n", + " text_embedding = OpenAITextEmbedding(service_id=\"embedding\", env_file_path=env_file_path)\n", "\n", "kernel.add_service(text_embedding)" ] @@ -341,7 +353,92 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# Engineering LLM Powered Multi-agent Framework for Autonomous CloudOps\n", + "\n", + "Abstract: Cloud Operations (CloudOps) is a rapidly growing field focused on the\n", + "automated management and optimization of cloud infrastructure which is essential\n", + "for organizations navigating increasingly complex cloud environments. MontyCloud\n", + "Inc. is one of the major companies in the CloudOps domain that leverages\n", + "autonomous bots to manage cloud compliance, security, and continuous operations.\n", + "To make the platform more accessible and effective to the customers, we\n", + "leveraged the use of GenAI. Developing a GenAI-based solution for autonomous\n", + "CloudOps for the existing MontyCloud system presented us with various challenges\n", + "such as i) diverse data sources; ii) orchestration of multiple processes; and\n", + "iii) handling complex workflows to automate routine tasks. To this end, we\n", + "developed MOYA, a multi-agent framework that leverages GenAI and balances\n", + "autonomy with the necessary human control. This framework integrates various\n", + "internal and external systems and is optimized for factors like task\n", + "orchestration, security, and error mitigation while producing accurate,\n", + "reliable, and relevant insights by utilizing Retrieval Augmented Generation\n", + "(RAG). Evaluations of our multi-agent system with the help of practitioners as\n", + "well as using automated checks demonstrate enhanced accuracy, responsiveness,\n", + "and effectiveness over non-agentic approaches across complex workflows.\n", + "Published: 2025-01-14 16:30:10\n", + "Link: http://arxiv.org/abs/2501.08243v1\n", + "PDF Link: http://arxiv.org/abs/2501.08243v1\n", + "Authors: Kannan Parthasarathy, Karthik Vaidhyanathan, Rudra Dhar, Venkat Krishnamachari, Basil Muhammed, Adyansh Kakran, Sreemaee Akshathala, Shrikara Arun, Sumant Dubey, Mohan Veerubhotla, Amey Karan\n", + "Embedding: [ 0.01063822 0.02977918 0.04532182 ... -0.00264323 0.00081101\n", + " 0.01491571]\n", + "\n", + "\n", + "# Eliciting In-context Retrieval and Reasoning for Long-context Large Language Models\n", + "\n", + "Abstract: Recent advancements in long-context language models (LCLMs) promise to\n", + "transform Retrieval-Augmented Generation (RAG) by simplifying pipelines. With\n", + "their expanded context windows, LCLMs can process entire knowledge bases and\n", + "perform retrieval and reasoning directly -- a capability we define as In-Context\n", + "Retrieval and Reasoning (ICR^2). However, existing benchmarks like LOFT often\n", + "overestimate LCLM performance by providing overly simplified contexts. To\n", + "address this, we introduce ICR^2, a benchmark that evaluates LCLMs in more\n", + "realistic scenarios by including confounding passages retrieved with strong\n", + "retrievers. We then propose three methods to enhance LCLM performance: (1)\n", + "retrieve-then-generate fine-tuning, (2) retrieval-attention-probing, which uses\n", + "attention heads to filter and de-noise long contexts during decoding, and (3)\n", + "joint retrieval head training alongside the generation head. Our evaluation of\n", + "five well-known LCLMs on LOFT and ICR^2 demonstrates significant gains with our\n", + "best approach applied to Mistral-7B: +17 and +15 points by Exact Match on LOFT,\n", + "and +13 and +2 points on ICR^2, compared to vanilla RAG and supervised fine-\n", + "tuning, respectively. It even outperforms GPT-4-Turbo on most tasks despite\n", + "being a much smaller model.\n", + "Published: 2025-01-14 16:38:33\n", + "Link: http://arxiv.org/abs/2501.08248v1\n", + "PDF Link: http://arxiv.org/abs/2501.08248v1\n", + "Authors: Yifu Qiu, Varun Embar, Yizhe Zhang, Navdeep Jaitly, Shay B. Cohen, Benjamin Han\n", + "Embedding: [-0.01305697 0.01166064 0.06267344 ... -0.01627254 0.00974741\n", + " -0.00573298]\n", + "\n", + "\n", + "# ADAM-1: AI and Bioinformatics for Alzheimer's Detection and Microbiome-Clinical Data Integrations\n", + "\n", + "Abstract: The Alzheimer's Disease Analysis Model Generation 1 (ADAM) is a multi-agent\n", + "large language model (LLM) framework designed to integrate and analyze multi-\n", + "modal data, including microbiome profiles, clinical datasets, and external\n", + "knowledge bases, to enhance the understanding and detection of Alzheimer's\n", + "disease (AD). By leveraging retrieval-augmented generation (RAG) techniques\n", + "along with its multi-agent architecture, ADAM-1 synthesizes insights from\n", + "diverse data sources and contextualizes findings using literature-driven\n", + "evidence. Comparative evaluation against XGBoost revealed similar mean F1 scores\n", + "but significantly reduced variance for ADAM-1, highlighting its robustness and\n", + "consistency, particularly in small laboratory datasets. While currently tailored\n", + "for binary classification tasks, future iterations aim to incorporate additional\n", + "data modalities, such as neuroimaging and biomarkers, to broaden the scalability\n", + "and applicability for Alzheimer's research and diagnostics.\n", + "Published: 2025-01-14 18:56:33\n", + "Link: http://arxiv.org/abs/2501.08324v1\n", + "PDF Link: http://arxiv.org/abs/2501.08324v1\n", + "Authors: Ziyuan Huang, Vishaldeep Kaur Sekhon, Ouyang Guo, Mark Newman, Roozbeh Sadeghian, Maria L. Vaida, Cynthia Jo, Doyle Ward, Vanni Bucci, John P. Haran\n", + "Embedding: [ 0.03896349 0.00422515 0.05525447 ... 0.03374933 -0.01468264\n", + " 0.01850895]\n", + "\n", + "\n" + ] + } + ], "source": [ "async with collection:\n", " results = await collection.get_batch(keys[:3])\n", @@ -364,8 +461,284 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "...searching Postgres memory coming soon, to be continued!" + "Now we can search for documents with `VectorStoreTextSearch`, which uses the embedding service to vectorize a query and search for semantically similar documents:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "text_search = VectorStoreTextSearch[ArxivPaper].from_vectorized_search(collection, embedding_service=text_embedding)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `VectorStoreTextSearch` object gives us the ability to retrieve semantically similar documents directly from a prompt.\n", + "Here we search for the top 5 ArXiV abstracts in our database similar to the query about chunking strategies in RAG applications:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 5 results for query.\n", + "Advanced ingestion process powered by LLM parsing for RAG system: 0.38676463602221456\n", + "StructRAG: Boosting Knowledge Intensive Reasoning of LLMs via Inference-time Hybrid Information Structurization: 0.39733734194342085\n", + "UDA: A Benchmark Suite for Retrieval Augmented Generation in Real-world Document Analysis: 0.3981809737466562\n", + "R^2AG: Incorporating Retrieval Information into Retrieval Augmented Generation: 0.4134050114864055\n", + "Enhancing Retrieval-Augmented Generation: A Study of Best Practices: 0.4144733752075731\n" + ] + } + ], + "source": [ + "query = \"What are good chunking strategies to use for unstructured text in Retrieval-Augmented Generation applications?\"\n", + "\n", + "async with collection:\n", + " search_results = await text_search.get_search_results(\n", + " query, options=VectorSearchOptions(top=5, include_total_count=True)\n", + " )\n", + " print(f\"Found {search_results.total_count} results for query.\")\n", + " async for search_result in search_results.results:\n", + " title = search_result.record.title\n", + " score = search_result.score\n", + " print(f\"{title}: {score}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can enable chat completion to utilize the text search by creating a kernel function for searching the database..." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "plugin = kernel.add_functions(\n", + " plugin_name=\"arxiv_plugin\",\n", + " functions=[\n", + " text_search.create_search(\n", + " # The default parameters match the parameters of the VectorSearchOptions class.\n", + " description=\"Searches for ArXiv papers that are related to the query.\",\n", + " parameters=[\n", + " KernelParameterMetadata(\n", + " name=\"query\", description=\"What to search for.\", type=\"str\", is_required=True, type_object=str\n", + " ),\n", + " KernelParameterMetadata(\n", + " name=\"top\",\n", + " description=\"Number of results to return.\",\n", + " type=\"int\",\n", + " default_value=2,\n", + " type_object=int,\n", + " ),\n", + " ],\n", + " ),\n", + " ],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "...and then setting up a chat completions service that uses `FunctionChoiceBehavior.Auto` to automatically call the search function when appropriate to the users query. We also create the chat function that will be invoked by the kernel." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "# Create the chat completion service. This requires an Azure OpenAI completions model deployment and configuration.\n", + "chat_completion = AzureChatCompletion(service_id=\"completions\")\n", + "kernel.add_service(chat_completion)\n", + "\n", + "# Now we create the chat function that will use the chat service.\n", + "chat_function = kernel.add_function(\n", + " prompt=\"{{$chat_history}}{{$user_input}}\",\n", + " plugin_name=\"ChatBot\",\n", + " function_name=\"Chat\",\n", + ")\n", + "\n", + "# we set the function choice to Auto, so that the LLM can choose the correct function to call.\n", + "# and we exclude the ChatBot plugin, so that it does not call itself.\n", + "execution_settings = AzureChatPromptExecutionSettings(\n", + " function_choice_behavior=FunctionChoiceBehavior.Auto(filters={\"excluded_plugins\": [\"ChatBot\"]}),\n", + " service_id=\"chat\",\n", + " max_tokens=7000,\n", + " temperature=0.7,\n", + " top_p=0.8,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we create a chat history with a system message and some initial context:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "history = ChatHistory()\n", + "system_message = \"\"\"\n", + "You are a chat bot. Your name is Archie and\n", + "you have one goal: help people find answers\n", + "to technical questions by relying on the latest\n", + "research papers published on ArXiv.\n", + "You communicate effectively in the style of a helpful librarian. \n", + "You always make sure to include the\n", + "ArXiV paper references in your responses.\n", + "If you cannot find the answer in the papers,\n", + "you will let the user know, but also provide the papers\n", + "you did find to be most relevant. If the abstract of the \n", + "paper does not specifically reference the user's inquiry,\n", + "but you believe it might be relevant, you can still include it\n", + "BUT you must make sure to mention that the paper might not directly\n", + "address the user's inquiry. Make certain that the papers you link are\n", + "from a specific search result.\n", + "\"\"\"\n", + "history.add_system_message(system_message)\n", + "history.add_user_message(\"Hi there, who are you?\")\n", + "history.add_assistant_message(\n", + " \"I am Archie, the ArXiV chat bot. I'm here to help you find the latest research papers from ArXiv that relate to your inquiries.\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now invoke the chat function via the Kernel to get chat completions:" ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "arguments = KernelArguments(\n", + " user_input=query,\n", + " chat_history=history,\n", + " settings=execution_settings,\n", + ")\n", + "\n", + "result = await kernel.invoke(chat_function, arguments=arguments)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Printing the result shows that the chat completion service used our text search to locate relevant ArXiV papers based on the query:" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Archie:>\n", + "What an excellent and timely question! Chunking strategies for unstructured text are\n", + "critical for optimizing Retrieval-Augmented Generation (RAG) systems since they\n", + "significantly affect how effectively a RAG model can retrieve and generate contextually\n", + "relevant information. Let me consult the latest papers on this topic from ArXiv and\n", + "provide you with relevant insights.\n", + "---\n", + "Here are some recent papers that dive into chunking strategies or similar concepts for\n", + "retrieval-augmented frameworks:\n", + "1. **\"Post-training optimization of retrieval-augmented generation models\"**\n", + " *Authors*: Vibhor Agarwal et al.\n", + " *Abstract*: While the paper discusses optimization strategies for retrieval-augmented\n", + "generation models, there is a discussion on handling unstructured text that could apply to\n", + "chunking methodologies. Chunking isn't always explicitly mentioned as \"chunking\" but may\n", + "be referred to in contexts like splitting data for retrieval.\n", + " *ArXiv link*: [arXiv:2308.10701](https://arxiv.org/abs/2308.10701)\n", + " *Note*: This paper may not focus entirely on chunking strategies but might discuss\n", + "relevant downstream considerations. It could still provide a foundation for you to explore\n", + "how chunking integrates with retrievers.\n", + "2. **\"Beyond Text: Retrieval-Augmented Reranking for Open-Domain Tasks\"**\n", + " *Authors*: Younggyo Seo et al.\n", + " *Abstract*: Although primarily focused on retrieval augmentation for reranking, there\n", + "are reflections on how document structure impacts task performance. Chunking unstructured\n", + "text to improve retrievability for such tasks could indirectly relate to this work.\n", + " *ArXiv link*: [arXiv:2310.03714](https://arxiv.org/abs/2310.03714)\n", + "3. **\"ALMA: Alignment of Generative and Retrieval Models for Long Documents\"**\n", + " *Authors*: Yao Fu et al.\n", + " *Abstract excerpt*: \"Our approach is designed to handle retrieval and generation for\n", + "long documents by aligning the retrieval and generation models more effectively.\"\n", + "Strategies to divide and process long documents into smaller chunks for efficient\n", + "alignment are explicitly discussed. A focus on handling unstructured long-form content\n", + "makes this paper highly relevant.\n", + " *ArXiv link*: [arXiv:2308.05467](https://arxiv.org/abs/2308.05467)\n", + "4. **\"Enhancing Context-aware Question Generation with Multi-modal Knowledge\"**\n", + " *Authors*: Jialong Han et al.\n", + " *Abstract excerpt*: \"Proposed techniques focus on improving retrievals through better\n", + "division of available knowledge.\" It doesn’t focus solely on text chunking in the RAG\n", + "framework but might be interesting since contextual awareness often relates to\n", + "preprocessing unstructured input into structured chunks.\n", + " *ArXiv link*: [arXiv:2307.12345](https://arxiv.org/abs/2307.12345)\n", + "---\n", + "### Practical Approaches Discussed in Literature:\n", + "From my broad understanding of RAG systems and some of the details in these papers, here\n", + "are common chunking strategies discussed in the research community:\n", + "1. **Sliding Window Approach**: Divide the text into overlapping chunks of fixed lengths\n", + "(e.g., 512 tokens with an overlap of 128 tokens). This helps ensure no important context\n", + "is left behind when chunks are created.\n", + "\n", + "2. **Semantic Chunking**: Use sentence embeddings or clustering techniques (e.g., via Bi-\n", + "Encoders or Sentence Transformers) to ensure chunks align semantically rather than naively\n", + "by token count.\n", + "3. **Dynamic Partitioning**: Implement chunking based on higher-order structure in the\n", + "text, such as splitting at sentence boundaries, paragraph breaks, or logical sections.\n", + "4. **Content-aware Chunking**: Experiment with LLMs to pre-identify contextual relevance\n", + "of different parts of the text and chunk accordingly.\n", + "---\n", + "If you'd like, I can search more specifically on a sub-part of chunking strategies or\n", + "related RAG optimizations. Let me know!\n" + ] + } + ], + "source": [ + "def wrap_text(text, width=90):\n", + " paragraphs = text.split(\"\\n\\n\") # Split the text into paragraphs\n", + " wrapped_paragraphs = [\n", + " \"\\n\".join(textwrap.fill(part, width=width) for paragraph in paragraphs for part in paragraph.split(\"\\n\"))\n", + " ] # Wrap each paragraph, split by newlines\n", + " return \"\\n\\n\".join(wrapped_paragraphs) # Join the wrapped paragraphs back together\n", + "\n", + "\n", + "print(f\"Archie:>\\n{wrap_text(str(result))}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/python/samples/getting_started_with_agents/README.md b/python/samples/getting_started_with_agents/README.md index b34d1d56c05d..c07f1f141c24 100644 --- a/python/samples/getting_started_with_agents/README.md +++ b/python/samples/getting_started_with_agents/README.md @@ -3,6 +3,7 @@ This project contains a step by step guide to get started with _Semantic Kernel Agents_ in Python. #### PyPI: + - For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. - For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. - For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. @@ -16,20 +17,42 @@ This project contains a step by step guide to get started with _Semantic Kernel The getting started with agents examples include: +## Chat Completion + +Example|Description +---|--- +[step1_chat_completion_agent_simple](../getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py)|How to create and use a simple chat completion agent. +[step2_chat_completion_agent_with_kernel](../getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py)|How to create and use a a chat completion agent with the AI service created on the kernel. +[step3_chat_completion_agent_plugin_simple](../getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py)|How to create a simple chat completion agent and specify plugins via the constructor with a kernel. +[step4_chat_completion_agent_plugin_with_kernel](../getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py)|How to create and use a chat completion agent by registering plugins on the kernel. +[step5_chat_completion_agent_group_chat](../getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py)|How to create a conversation between agents. +[step6_kernel_function_strategies](../getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py)|How to utilize a `KernelFunction` as a chat strategy. +[step7_chat_completion_agent_json_result](../getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py)|How to have an agent produce JSON. +[step8_chat_completion_agent_logging](../getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py)|How to enable logging for agents. +[step9_chat_completion_agent_structured_outputs](../getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py)|How to use have a chat completion agent use structured outputs + +## OpenAI Assistant Agent + Example|Description ---|--- -[step1_agent](../getting_started_with_agents/step1_agent.py)|How to create and use an agent. -[step2_plugins](../getting_started_with_agents/step2_plugins.py)|How to associate plugins with an agent. -[step3_chat](../getting_started_with_agents/step3_chat.py)|How to create a conversation between agents. -[step4_kernel_function_strategies](../getting_started_with_agents/step4_kernel_function_strategies.py)|How to utilize a `KernelFunction` as a chat strategy. -[step5_json_result](../getting_started_with_agents/step5_json_result.py)|How to have an agent produce JSON. -[step6_logging](../getting_started_with_agents/step6_logging.py)|How to enable logging for agents. -[step7_assistant](../getting_started_with_agents/step7_assistant.py)|How to create and use an OpenAI Assistant agent. -[step8_assistant_vision](../getting_started_with_agents/step8_assistant_vision.py)|How to provide an image as input to an Open AI Assistant agent. -[step9_assistant_tool_code_interpreter](../getting_started_with_agents/step9_assistant_tool_code_interpreter.py)|How to use the code-interpreter tool for an Open AI Assistant agent. -[step10_assistant_tool_file_search](../getting_started_with_agents/step10_assistant_tool_file_search.py)|How to use the file-search tool for an Open AI Assistant agent. - -*Note: As we strive for parity with .NET, more getting_started_with_agent samples will be added. The current steps and names may be revised to further align with our .NET counterpart.* +[step1_assistant](../getting_started_with_agents/openai_assistant/step1_assistant.py)|How to create and use an OpenAI Assistant agent. +[step2_assistant_plugins](../getting_started_with_agents/openai_assistant/step2_assistant_plugins.py)| How to create and use an OpenAI Assistant agent with plugins. +[step3_assistant_vision](../getting_started_with_agents/openai_assistant/step3_assistant_vision.py)|How to provide an image as input to an Open AI Assistant agent. +[step4_assistant_tool_code_interpreter](../getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py)|How to use the code-interpreter tool for an Open AI Assistant agent. +[step5_assistant_tool_file_search](../getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py)|How to use the file-search tool for an Open AI Assistant agent. + +## Azure AI Agent + +Example|Description +---|--- +[step1_azure_ai_agent](../getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py)|How to create an Azure AI Agent and invoke a Semantic Kernel plugin. +[step2_azure_ai_agent_plugin](../getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py)|How to create an Azure AI Agent with plugins. +[step3_azure_ai_agent_group_chat](../getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py)|How to create an agent group chat with Azure AI Agents. +[step4_azure_ai_agent_code_interpreter](../getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py)|How to use the code-interpreter tool for an Azure AI agent. +[step5_azure_ai_agent_file_search](../getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py)|How to use the file-search tool for an Azure AI agent. +[step6_azure_ai_agent_openapi](../getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py)|How to use the Open API tool for an Azure AI agent. + +_Note: For details on configuring an Azure AI Agent, please see [here](../getting_started_with_agents/azure_ai_agent/README.md)._ ## Configuring the Kernel diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/.env.example b/python/samples/getting_started_with_agents/azure_ai_agent/.env.example new file mode 100644 index 000000000000..c2d16cea26aa --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/.env.example @@ -0,0 +1,6 @@ +AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" +AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" +AZURE_AI_AGENT_ENDPOINT = "" +AZURE_AI_AGENT_SUBSCRIPTION_ID = "" +AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" +AZURE_AI_AGENT_PROJECT_NAME = "" \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/README.md b/python/samples/getting_started_with_agents/azure_ai_agent/README.md new file mode 100644 index 000000000000..2cf85976444a --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/README.md @@ -0,0 +1,121 @@ +## Azure AI Agents + +The following getting started samples show how to use Azure AI Agents with Semantic Kernel. + +To set up the required resources, follow the "Quickstart: Create a new agent" guide [here](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure). + +You will need to install the optional Semantic Kernel `azure` dependencies if you haven't already via: + +```bash +pip install semantic-kernel[azure] +``` + +Before running an Azure AI Agent, modify your .env file to include: + +```bash +AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" +AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" +``` + +or + +```bash +AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" +AZURE_AI_AGENT_ENDPOINT = "" +AZURE_AI_AGENT_SUBSCRIPTION_ID = "" +AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" +AZURE_AI_AGENT_PROJECT_NAME = "" +``` + +The project connection string is of the following format: `;;;`. See [here](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure#configure-and-run-an-agent) for information on obtaining the values to populate the connection string. + +The .env should be placed in the root directory. + +## Configuring the AI Project Client + +Ensure that your Azure AI Agent resources are configured with at least a Basic or Standard SKU. + +To begin, create the project client as follows: + +```python +async with DefaultAzureCredential() as credential: + client = await AzureAIAgent.create_client(credential=credential) + + async with client: + # Your operational code here +``` + +### Required Imports + +The required imports for the `Azure AI Agent` include async libraries: + +```python +from azure.identity.aio import DefaultAzureCredential +``` + +### Initializing the Agent + +You can pass in a connection string (shown above) to create the client: + +```python +async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + # operational logic +``` + +### Creating an Agent Definition + +Once the client is initialized, you can define the agent: + +```python +# Create agent definition +agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, +) +``` + +Then, instantiate the `AzureAIAgent` with the `client` and `agent_definition`: + +```python +# Create the AzureAI Agent +agent = AzureAIAgent( + client=client, + definition=agent_definition, +) +``` + +Now, you can create a thread, add chat messages to the agent, and invoke it with given inputs and optional parameters. + +## Requests and Rate Limits + +### Managing API Request Frequency + +Your default request limits may be low, affecting how often you can poll the status of a run. You have two options: + +1. Adjust the `polling_options` of the `AzureAIAgent` + +By default, the polling interval is 250 ms. You can slow it down to 1 second (or another preferred value) to reduce the number of API calls: + +```python +# Required imports +from datetime import timedelta +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions + +# Configure the polling options as part of the `AzureAIAgent` +agent = AzureAIAgent( + client=client, + definition=agent_definition, + polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), +) +``` + +2. Increase Rate Limits in Azure AI Foundry + +You can also adjust your deployment's Rate Limit (Tokens per minute), which impacts the Rate Limit (Requests per minute). This can be configured in Azure AI Foundry under your project's deployment settings for the "Connected Azure OpenAI Service Resource." diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py b/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py new file mode 100644 index 000000000000..bb756e4ad5b3 --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py @@ -0,0 +1,80 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings + +""" +The following sample demonstrates how to create an Azure AI agent that answers +user questions. This sample demonstrates the basic steps to create an agent +and simulate a conversation with the agent. + +The interaction with the agent is via the `get_response` method, which sends a +user input to the agent and receives a response from the agent. The conversation +history is maintained by the agent service, i.e. the responses are automatically +associated with the thread. Therefore, client code does not need to maintain the +conversation history. +""" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello, I am John Doe.", + "What is your name?", + "What is my name?", +] + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Create an agent on the Azure AI agent service + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name="Assistant", + instructions="Answer the user's questions.", + ) + + # 2. Create a Semantic Kernel agent for the Azure AI agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + # Optionally configure polling options + # polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), + ) + + # 3. Create a new thread on the Azure AI agent service + thread = await client.agents.create_thread() + + try: + for user_input in USER_INPUTS: + # 4. Add the user input as a chat message + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: {user_input}") + # 5. Invoke the agent for the specified thread for response + response = await agent.get_response(thread_id=thread.id) + print(f"# {response.name}: {response}") + finally: + # 6. Cleanup: Delete the thread and agent + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample Output: + # User: Hello, I am John Doe. + # Assistant: Hello, John! How can I assist you today? + # User: What is your name? + # Assistant: I’m here as your assistant, so you can just call me Assistant. How can I help you today? + # User: What is my name? + # Assistant: Your name is John Doe. How can I assist you today, John? + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py b/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py new file mode 100644 index 000000000000..33477e0d9863 --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py @@ -0,0 +1,101 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents import AuthorRole +from semantic_kernel.functions import kernel_function + +""" +The following sample demonstrates how to create an Azure AI agent that answers +questions about a sample menu using a Semantic Kernel Plugin. +""" + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello", + "What is the special soup?", + "How much does that cost?", + "Thank you", +] + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Create an agent on the Azure AI agent service + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name="Host", + instructions="Answer questions about the menu.", + ) + + # 2. Create a Semantic Kernel agent for the Azure AI agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + # Optionally configure polling options + # polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), + ) + + # 3. Add a plugin to the agent via the kernel + agent.kernel.add_plugin(MenuPlugin(), plugin_name="menu") + + # 4. Create a new thread on the Azure AI agent service + thread = await client.agents.create_thread() + + try: + for user_input in USER_INPUTS: + # 5. Add the user input as a chat message + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: {user_input}") + # 6. Invoke the agent for the specified thread for response + async for content in agent.invoke( + thread_id=thread.id, + temperature=0.2, # override the agent-level temperature setting with a run-time value + ): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}") + finally: + # 7. Cleanup: Delete the thread and agent + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample Output: + # User: Hello + # Agent: Hello! How can I assist you today? + # User: What is the special soup? + # ... + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py b/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py new file mode 100644 index 000000000000..064fdd6415cd --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py @@ -0,0 +1,111 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents import AgentGroupChat +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.agents.strategies import TerminationStrategy +from semantic_kernel.contents import AuthorRole + +""" +The following sample demonstrates how to create an OpenAI assistant using either +Azure OpenAI or OpenAI, a chat completion agent and have them participate in a +group chat to work towards the user's requirement. +""" + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Do not use the word "approve" unless you are giving approval. +If not, provide insight on how to refine suggested copy without example. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + +TASK = "a slogan for a new line of electric cars." + + +async def main(): + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Create the reviewer agent on the Azure AI agent service + reviewer_agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + # 2. Create a Semantic Kernel agent for the reviewer Azure AI agent + agent_reviewer = AzureAIAgent( + client=client, + definition=reviewer_agent_definition, + ) + + # 3. Create the copy writer agent on the Azure AI agent service + copy_writer_agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + # 4. Create a Semantic Kernel agent for the copy writer Azure AI agent + agent_writer = AzureAIAgent( + client=client, + definition=copy_writer_agent_definition, + ) + + # 5. Place the agents in a group chat with a custom termination strategy + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + try: + # 6. Add the task as a message to the group chat + await chat.add_chat_message(message=TASK) + print(f"# {AuthorRole.USER}: '{TASK}'") + # 7. Invoke the chat + async for content in chat.invoke(): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + finally: + # 8. Cleanup: Delete the agents + await chat.reset() + await client.agents.delete_agent(agent_reviewer.id) + await client.agents.delete_agent(agent_writer.id) + + """ + Sample Output: + # AuthorRole.USER: 'a slogan for a new line of electric cars.' + # AuthorRole.ASSISTANT - CopyWriter: '"Charge Ahead: Drive the Future."' + # AuthorRole.ASSISTANT - ArtDirector: 'This slogan has a nice ring to it and captures the ...' + # AuthorRole.ASSISTANT - CopyWriter: '"Plug In. Drive Green."' + ... + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py b/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py new file mode 100644 index 000000000000..4d462f7aafe3 --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from azure.ai.projects.models import CodeInterpreterTool +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents import AuthorRole + +""" +The following sample demonstrates how to create a simple, Azure AI agent that +uses the code interpreter tool to answer a coding question. +""" + +TASK = "Use code to determine the values in the Fibonacci sequence that that are less then the value of 101." + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Create an agent with a code interpreter on the Azure AI agent service + code_interpreter = CodeInterpreterTool() + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + tools=code_interpreter.definitions, + tool_resources=code_interpreter.resources, + ) + + # 2. Create a Semantic Kernel agent for the Azure AI agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # 3. Create a new thread on the Azure AI agent service + thread = await client.agents.create_thread() + + try: + # 4. Add the task as a chat message + await agent.add_chat_message(thread_id=thread.id, message=TASK) + print(f"# User: '{TASK}'") + # 5. Invoke the agent for the specified thread for response + async for content in agent.invoke(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}") + finally: + # 6. Cleanup: Delete the thread and agent + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample Output: + # User: 'Use code to determine the values in the Fibonacci sequence that that are less then the value of 101.' + # Agent: # Function to generate Fibonacci sequence values less than a given limit + def fibonacci_less_than(limit): + fib_sequence = [] + a, b = 0, 1 + while a < limit: + fib_sequence.append(a) + a, b = b, a + b + a, b = 0, 1 + while a < limit: + fib_sequence.append(a) + a, b = 0, 1 + while a < limit: + a, b = 0, 1 + a, b = 0, 1 + while a < limit: + fib_sequence.append(a) + a, b = b, a + b + return fib_sequence + + Generate Fibonacci sequence values less than 101 + fibonacci_values = fibonacci_less_than(101) + fibonacci_values + # Agent: The values in the Fibonacci sequence that are less than 101 are: + + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89] + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py b/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py new file mode 100644 index 000000000000..978ac19a76d2 --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py @@ -0,0 +1,83 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from azure.ai.projects.models import FileSearchTool, OpenAIFile, VectorStore +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents import AuthorRole + +""" +The following sample demonstrates how to create a simple, Azure AI agent that +uses a file search tool to answer user questions. +""" + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Who is the youngest employee?", + "Who works in sales?", + "I have a customer request, who can help me?", +] + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Read and upload the file to the Azure AI agent service + pdf_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf" + ) + file: OpenAIFile = await client.agents.upload_file_and_poll(file_path=pdf_file_path, purpose="assistants") + vector_store: VectorStore = await client.agents.create_vector_store_and_poll( + file_ids=[file.id], name="my_vectorstore" + ) + + # 2. Create file search tool with uploaded resources + file_search = FileSearchTool(vector_store_ids=[vector_store.id]) + + # 3. Create an agent on the Azure AI agent service with the file search tool + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + tools=file_search.definitions, + tool_resources=file_search.resources, + ) + + # 4. Create a Semantic Kernel agent for the Azure AI agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # 5. Create a new thread on the Azure AI agent service + thread = await client.agents.create_thread() + + try: + for user_input in USER_INPUTS: + # 6. Add the user input as a chat message + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: '{user_input}'") + # 7. Invoke the agent for the specified thread for response + async for content in agent.invoke(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}") + finally: + # 8. Cleanup: Delete the thread and agent + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample Output: + # User: 'Who is the youngest employee?' + # Agent: The youngest employee is Teodor Britton, who is an accountant and was born on January 9, 1997... + # User: 'Who works in sales?' + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py b/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py new file mode 100644 index 000000000000..1abfb001e93b --- /dev/null +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py @@ -0,0 +1,111 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import json +import os + +from azure.ai.projects.models import OpenApiAnonymousAuthDetails, OpenApiTool +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.contents import AuthorRole + +""" +The following sample demonstrates how to create a simple, Azure AI agent that +uses OpenAPI tools to answer user questions. +""" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "What is the name and population of the country that uses currency with abbreviation THB", + "What is the current weather in the capital city of the country?", +] + + +async def main() -> None: + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client(credential=creds) as client, + ): + # 1. Read in the OpenAPI spec files + openapi_spec_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + ) + with open(os.path.join(openapi_spec_file_path, "weather.json")) as weather_file: + weather_openapi_spec = json.loads(weather_file.read()) + with open(os.path.join(openapi_spec_file_path, "countries.json")) as countries_file: + countries_openapi_spec = json.loads(countries_file.read()) + + # 2. Create OpenAPI tools + # Note that connection or managed identity auth setup requires additional setup in Azure + auth = OpenApiAnonymousAuthDetails() + openapi_weather = OpenApiTool( + name="get_weather", + spec=weather_openapi_spec, + description="Retrieve weather information for a location", + auth=auth, + ) + openapi_countries = OpenApiTool( + name="get_country", + spec=countries_openapi_spec, + description="Retrieve country information", + auth=auth, + ) + + # 3. Create an agent on the Azure AI agent service with the OpenAPI tools + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + tools=openapi_weather.definitions + openapi_countries.definitions, + ) + + # 4. Create a Semantic Kernel agent for the Azure AI agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + # 5. Create a new thread on the Azure AI agent service + thread = await client.agents.create_thread() + + try: + for user_input in USER_INPUTS: + # 6. Add the user input as a chat message + await agent.add_chat_message(thread_id=thread.id, message=user_input) + print(f"# User: '{user_input}'") + # 7. Invoke the agent for the specified thread for response + async for content in agent.invoke(thread_id=thread.id): + if content.role != AuthorRole.TOOL: + print(f"# Agent: {content.content}") + finally: + # 8. Cleanup: Delete the thread and agent + await client.agents.delete_thread(thread.id) + await client.agents.delete_agent(agent.id) + + """ + Sample Output: + # User: 'What is the name and population of the country that uses currency with abbreviation THB' + # Agent: It seems I encountered an issue while trying to retrieve data about the country that uses the ... + + As of the latest estimates, the population of Thailand is approximately 69 million people. If you ... + # User: 'What is the current weather in the capital city of the country?' + # Agent: The current weather in Bangkok, Thailand, the capital city, is as follows: + + - **Temperature**: 24°C (76°F) + - **Feels Like**: 26°C (79°F) + - **Weather Description**: Light rain + - **Humidity**: 69% + - **Cloud Cover**: 75% + - **Pressure**: 1017 hPa + - **Wind Speed**: 8 km/h (5 mph) from the east-northeast (ENE) + - **Visibility**: 10 km (approximately 6 miles) + + This weather information reflects the current conditions as of the latest observation. If you need ... + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/README.md b/python/samples/getting_started_with_agents/chat_completion/README.md new file mode 100644 index 000000000000..5815c8a75642 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/README.md @@ -0,0 +1,3 @@ +# Chat Completion Agents + +The following getting started samples show how to use Chat Completion agents with Semantic Kernel. diff --git a/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py b/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py new file mode 100644 index 000000000000..850e159a1069 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory + +""" +The following sample demonstrates how to create a chat completion agent that +answers user questions using the Azure Chat Completion service. The Chat Completion +Service is passed directly via the ChatCompletionAgent constructor. This sample +demonstrates the basic steps to create an agent and simulate a conversation +with the agent. + +The interaction with the agent is via the `get_response` method, which sends a +user input to the agent and receives a response from the agent. The conversation +history needs to be maintained by the caller in the chat history object. +""" + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello, I am John Doe.", + "What is your name?", + "What is my name?", +] + + +async def main(): + # 1. Create the agent by specifying the service + agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="Assistant", + instructions="Answer the user's questions.", + ) + + # 2. Create a chat history to hold the conversation + chat_history = ChatHistory() + + for user_input in USER_INPUTS: + # 3. Add the user input to the chat history + chat_history.add_user_message(user_input) + print(f"# User: {user_input}") + # 4. Invoke the agent for a response + response = await agent.get_response(chat_history) + print(f"# {response.name}: {response}") + # 5. Add the agent response to the chat history + chat_history.add_message(response) + + """ + Sample output: + # User: Hello, I am John Doe. + # Assistant: Hello, John Doe! How can I assist you today? + # User: What is your name? + # Assistant: I don't have a personal name like a human does, but you can call me Assistant.? + # User: What is my name? + # Assistant: You mentioned that your name is John Doe. How can I assist you further, John? + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py b/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py new file mode 100644 index 000000000000..6d13aa4f2293 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py @@ -0,0 +1,69 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory + +""" +The following sample demonstrates how to create a chat completion agent that +answers user questions using the Azure Chat Completion service. The Chat Completion +Service is first added to the kernel, and the kernel is passed in to the +ChatCompletionAgent constructor. This sample demonstrates the basic steps to +create an agent and simulate a conversation with the agent. + +Note: if both a service and a kernel are provided, the service will be used. + +The interaction with the agent is via the `get_response` method, which sends a +user input to the agent and receives a response from the agent. The conversation +history needs to be maintained by the caller in the chat history object. +""" + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello, I am John Doe.", + "What is your name?", + "What is my name?", +] + + +async def main(): + # 1. Create the instance of the Kernel to register an AI service + kernel = Kernel() + kernel.add_service(AzureChatCompletion()) + + # 2. Create the agent + agent = ChatCompletionAgent( + kernel=kernel, + name="Assistant", + instructions="Answer the user's questions.", + ) + + # 3. Create a chat history to hold the conversation + chat_history = ChatHistory() + + for user_input in USER_INPUTS: + # 4. Add the user input to the chat history + chat_history.add_user_message(user_input) + print(f"# User: {user_input}") + # 5. Invoke the agent for a response + response = await agent.get_response(chat_history) + print(f"# {response.name}: {response}") + # 6. Add the agent response to the chat history + chat_history.add_message(response) + + """ + Sample output: + # User: Hello, I am John Doe. + # Assistant: Hello, John Doe! How can I assist you today? + # User: What is your name? + # Assistant: I don't have a personal name like a human does, but you can call me Assistant.? + # User: What is my name? + # Assistant: You mentioned that your name is John Doe. How can I assist you further, John? + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py b/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py new file mode 100644 index 000000000000..ac9d94ce84ed --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py @@ -0,0 +1,81 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory +from semantic_kernel.functions import kernel_function + +""" +The following sample demonstrates how to create a chat completion agent that +answers questions about a sample menu using a Semantic Kernel Plugin. The Chat +Completion Service is passed directly via the ChatCompletionAgent constructor. +Additionally, the plugin is supplied via the constructor. +""" + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello", + "What is the special soup?", + "What does that cost?", + "Thank you", +] + + +async def main(): + # 1. Create the agent + agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="Host", + instructions="Answer questions about the menu.", + plugins=[MenuPlugin()], + ) + + # 2. Create a chat history to hold the conversation + chat_history = ChatHistory() + + for user_input in USER_INPUTS: + # 3. Add the user input to the chat history + chat_history.add_user_message(user_input) + print(f"# User: {user_input}") + # 4. Invoke the agent for a response + response = await agent.get_response(chat_history) + print(f"# {response.name}: {response.content} ") + + """ + Sample output: + # User: Hello + # Host: Hello! How can I assist you today? + # User: What is the special soup? + # Host: The special soup is Clam Chowder. + # User: What does that cost? + # Host: The special soup, Clam Chowder, costs $9.99. + # User: Thank you + # Host: You're welcome! If you have any more questions, feel free to ask. Enjoy your day! + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py b/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py new file mode 100644 index 000000000000..8f2b241f6295 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py @@ -0,0 +1,104 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel import Kernel +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistory, FunctionCallContent, FunctionResultContent +from semantic_kernel.functions import KernelArguments, kernel_function + +""" +The following sample demonstrates how to create a chat completion agent that +answers questions about a sample menu using a Semantic Kernel Plugin. The Chat +Completion Service is first added to the kernel, and the kernel is passed in to the +ChatCompletionAgent constructor. Additionally, the plugin is supplied via the kernel. +To enable auto-function calling, the prompt execution settings are retrieved from the kernel +using the specified `service_id`. The function choice behavior is set to `Auto` to allow the +agent to automatically execute the plugin's functions when needed. +""" + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello", + "What is the special soup?", + "What does that cost?", + "Thank you", +] + + +async def main(): + # 1. Create the instance of the Kernel to register the plugin and service + service_id = "agent" + kernel = Kernel() + kernel.add_plugin(MenuPlugin(), plugin_name="menu") + kernel.add_service(AzureChatCompletion(service_id=service_id)) + + # 2. Configure the function choice behavior to auto invoke kernel functions + # so that the agent can automatically execute the menu plugin functions when needed + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + # 3. Create the agent + agent = ChatCompletionAgent( + kernel=kernel, + name="Host", + instructions="Answer questions about the menu.", + arguments=KernelArguments(settings=settings), + ) + + # 4. Create a chat history to hold the conversation + chat_history = ChatHistory() + + for user_input in USER_INPUTS: + # 5. Add the user input to the chat history + chat_history.add_user_message(user_input) + print(f"# User: {user_input}") + # 6. Invoke the agent for a response + async for content in agent.invoke(chat_history): + print(f"# {content.name}: ", end="") + if ( + not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items) + and content.content.strip() + ): + # We only want to print the content if it's not a function call or result + print(f"{content.content}", end="", flush=True) + print("") + + """ + Sample output: + # User: Hello + # Host: Hello! How can I assist you today? + # User: What is the special soup? + # Host: The special soup is Clam Chowder. + # User: What does that cost? + # Host: The special soup, Clam Chowder, costs $9.99. + # User: Thank you + # Host: You're welcome! If you have any more questions, feel free to ask. Enjoy your day! + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step3_chat.py b/python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py similarity index 53% rename from python/samples/getting_started_with_agents/step3_chat.py rename to python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py index e81c5d0c516c..d7f13173d268 100644 --- a/python/samples/getting_started_with_agents/step3_chat.py +++ b/python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py @@ -2,19 +2,22 @@ import asyncio +from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel +from semantic_kernel.agents.strategies import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -################################################################### -# The following sample demonstrates how to create a simple, # -# agent group chat that utilizes An Art Director Chat Completion # -# Agent along with a Copy Writer Chat Completion Agent to # -# complete a task. # -################################################################### +""" +The following sample demonstrates how to create a simple, agent group chat that +utilizes An Art Director Chat Completion Agent along with a Copy Writer Chat +Completion Agent to complete a task. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel class ApprovalTerminationStrategy(TerminationStrategy): @@ -43,42 +46,52 @@ async def should_agent_terminate(self, agent, history): Consider suggestions when refining an idea. """ - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel +TASK = "a slogan for a new line of electric cars." async def main(): + # 1. Create the reviewer agent based on the chat completion service agent_reviewer = ChatCompletionAgent( - service_id="artdirector", kernel=_create_kernel_with_chat_completion("artdirector"), name=REVIEWER_NAME, instructions=REVIEWER_INSTRUCTIONS, ) + # 2. Create the copywriter agent based on the chat completion service agent_writer = ChatCompletionAgent( - service_id="copywriter", kernel=_create_kernel_with_chat_completion("copywriter"), name=COPYWRITER_NAME, instructions=COPYWRITER_INSTRUCTIONS, ) - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + # 3. Place the agents in a group chat with a custom termination strategy + group_chat = AgentGroupChat( + agents=[ + agent_writer, + agent_reviewer, + ], + termination_strategy=ApprovalTerminationStrategy( + agents=[agent_reviewer], + maximum_iterations=10, + ), ) - input = "a slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke(): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - print(f"# IS COMPLETE: {chat.is_complete}") + # 4. Add the task as a message to the group chat + await group_chat.add_chat_message(message=TASK) + print(f"# User: {TASK}") + + # 5. Invoke the chat + async for content in group_chat.invoke(): + print(f"# {content.name}: {content.content}") + + """ + Sample output: + # User: a slogan for a new line of electric cars. + # CopyWriter: "Drive the Future: Shockingly Efficient." + # ArtDirector: This slogan has potential but could benefit from refinement to create a stronger ... + # CopyWriter: "Electrify Your Drive." + # ArtDirector: Approved. This slogan is concise, memorable, and effectively communicates the ... + """ if __name__ == "__main__": diff --git a/python/samples/getting_started_with_agents/step4_kernel_function_strategies.py b/python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py similarity index 66% rename from python/samples/getting_started_with_agents/step4_kernel_function_strategies.py rename to python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py index 9ad6a9d361bf..3cfabc110ba8 100644 --- a/python/samples/getting_started_with_agents/step4_kernel_function_strategies.py +++ b/python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py @@ -2,26 +2,26 @@ import asyncio +from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import ( - KernelFunctionSelectionStrategy, - KernelFunctionTerminationStrategy, -) -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to create a simple, # -# agent group chat that utilizes An Art Director Chat Completion # -# Agent along with a Copy Writer Chat Completion Agent to # -# complete a task. The sample also shows how to specify a Kernel # -# Function termination and selection strategy to determine when # -# to end the chat or how to select the next agent to take a turn # -# in the conversation. # -################################################################### +from semantic_kernel.agents.strategies import KernelFunctionSelectionStrategy, KernelFunctionTerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.functions import KernelFunctionFromPrompt + +""" +The following sample demonstrates how to create a simple, agent group chat that utilizes +An Art Director Chat Completion Agent along with a Copy Writer Chat Completion Agent to +complete a task. The sample also shows how to specify a Kernel Function termination and +selection strategy to determine when to end the chat or how to select the next agent to +take a turn in the conversation. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + REVIEWER_NAME = "ArtDirector" REVIEWER_INSTRUCTIONS = """ @@ -41,28 +41,25 @@ Consider suggestions when refining an idea. """ - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel +TASK = "a slogan for a new line of electric cars." async def main(): + # 1. Create the reviewer agent based on the chat completion service agent_reviewer = ChatCompletionAgent( - service_id="artdirector", kernel=_create_kernel_with_chat_completion("artdirector"), name=REVIEWER_NAME, instructions=REVIEWER_INSTRUCTIONS, ) + # 2. Create the copywriter agent based on the chat completion service agent_writer = ChatCompletionAgent( - service_id="copywriter", kernel=_create_kernel_with_chat_completion("copywriter"), name=COPYWRITER_NAME, instructions=COPYWRITER_INSTRUCTIONS, ) + # 3. Create a Kernel Function to determine if the copy has been approved termination_function = KernelFunctionFromPrompt( function_name="termination", prompt=""" @@ -73,6 +70,7 @@ async def main(): """, ) + # 4. Create a Kernel Function to determine which agent should take the next turn selection_function = KernelFunctionFromPrompt( function_name="selection", prompt=f""" @@ -94,6 +92,7 @@ async def main(): """, ) + # 5. Place the agents in a group chat with the custom termination and selection strategies chat = AgentGroupChat( agents=[agent_writer, agent_reviewer], termination_strategy=KernelFunctionTerminationStrategy( @@ -113,15 +112,22 @@ async def main(): ), ) - input = "a slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") + # 6. Add the task as a message to the group chat + await chat.add_chat_message(message=TASK) + print(f"# User: {TASK}") + # 7. Invoke the chat async for content in chat.invoke(): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - print(f"# IS COMPLETE: {chat.is_complete}") + print(f"# {content.name}: {content.content}") + + """ + Sample Output: + # User: a slogan for a new line of electric cars. + # CopyWriter: "Electrify your drive. Spare the gas, not the thrill." + # ArtDirector: This slogan captures the essence of electric cars but could use refinement to ... + # CopyWriter: "Go electric. Enjoy the thrill. Skip the gas." + # ArtDirector: Approved. This slogan is clear, concise, and effectively communicates the ... + """ if __name__ == "__main__": diff --git a/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py b/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py new file mode 100644 index 000000000000..0f4e51202805 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from pydantic import BaseModel, ValidationError + +from semantic_kernel import Kernel +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.strategies import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings +from semantic_kernel.functions import KernelArguments + +""" +The following sample demonstrates how to configure an Agent Group Chat, and invoke an +agent with only a single turn.A custom termination strategy is provided where the model +is to rate the user input on creativity and expressiveness and end the chat when a score +of 70 or higher is provided. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(OpenAIChatCompletion(service_id=service_id)) + return kernel + + +class InputScore(BaseModel): + """A model for the input score.""" + + score: int + notes: str + + +class ThresholdTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + threshold: int = 70 + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + try: + result = InputScore.model_validate_json(history[-1].content or "") + return result.score >= self.threshold + except ValidationError: + return False + + +INSTRUCTION = """ +Think step-by-step and rate the user input on creativity and expressiveness from 1-100 with some notes on improvements. +""" + +# Simulate a conversation with the agent +USER_INPUTS = { + "The sunset is very colorful.", + "The sunset is setting over the mountains.", + "The sunset is setting over the mountains and fills the sky with a deep red flame, setting the clouds ablaze.", +} + + +async def main(): + # 1. Create the instance of the Kernel to register a service + service_id = "agent" + kernel = _create_kernel_with_chat_completion(service_id) + + # 2. Configure the prompt execution settings to return the score in the desired format + settings = kernel.get_prompt_execution_settings_from_service_id(service_id) + assert isinstance(settings, OpenAIChatPromptExecutionSettings) # nosec + settings.response_format = InputScore + + # 3. Create the agent + agent = ChatCompletionAgent( + kernel=kernel, + name="Tutor", + instructions=INSTRUCTION, + arguments=KernelArguments(settings), + ) + + # 4. Create the group chat with the custom termination strategy + group_chat = AgentGroupChat(termination_strategy=ThresholdTerminationStrategy(maximum_iterations=10)) + + for user_input in USER_INPUTS: + # 5. Add the user input to the chat history + await group_chat.add_chat_message(message=user_input) + print(f"# User: {user_input}") + + # 6. Invoke the chat with the agent for a response + async for content in group_chat.invoke_single_turn(agent): + print(f"# {content.name}: {content.content}") + + """ + Sample output: + # User: The sunset is very colorful. + # Tutor: {"score":45,"notes":"The sentence 'The sunset is very colorful' is simple and direct. While it ..."} + # User: The sunset is setting over the mountains. + # Tutor: {"score":50,"notes":"This sentence provides a basic scene of a sunset over mountains, which ..."} + # User: The sunset is setting over the mountains and fills the sky with a deep red flame, setting the clouds ablaze. + # Tutor: {"score":75,"notes":"This sentence demonstrates improved creativity and expressiveness by ..."} + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py b/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py new file mode 100644 index 000000000000..50ff8574ba09 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py @@ -0,0 +1,112 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from semantic_kernel import Kernel +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.strategies import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion + +""" +The following sample demonstrates how to create a simple, agent group chat that +utilizes An Art Director Chat Completion Agent along with a Copy Writer Chat +Completion Agent to complete a task. The main point of this sample is to note +how to enable logging to view all interactions between the agents and the model. +""" + +# 0. Enable logging +# NOTE: This is all that is required to enable logging. +# Set the desired level to INFO, DEBUG, etc. +logging.basicConfig(level=logging.INFO) + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. +If not, provide insight on how to refine suggested copy without example. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + +TASK = "a slogan for a new line of electric cars." + + +async def main(): + # 1. Create the reviewer agent based on the chat completion service + agent_reviewer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + # 2. Create the copywriter agent based on the chat completion service + agent_writer = ChatCompletionAgent( + kernel=_create_kernel_with_chat_completion("copywriter"), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + # 3. Place the agents in a group chat with a custom termination strategy + group_chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + # 4. Add the task as a message to the group chat + await group_chat.add_chat_message(message=TASK) + print(f"# User: {TASK}") + + # 5. Invoke the chat + async for content in group_chat.invoke(): + print(f"# {content.name}: {content.content}") + + """ + Sample output: + INFO:semantic_kernel.agents.group_chat.agent_chat:Adding `1` agent chat messages + # User: a slogan for a new line of electric cars. + INFO:semantic_kernel.agents.strategies.selection.sequential_selection_strategy:Selected agent at index 0 (ID: ... + INFO:semantic_kernel.agents.group_chat.agent_chat:Invoking agent CopyWriter + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "system", "content": "\nYou are a ... + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "user", "content": "a slogan for ... + INFO:semantic_kernel.connectors.ai.open_ai.services.open_ai_handler:OpenAI usage: CompletionUsage(completion_... + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"message": {"role": "assistant", "content": ... + INFO:semantic_kernel.agents.chat_completion.chat_completion_agent:[ChatCompletionAgent] Invoked AzureChatCompl... + INFO:semantic_kernel.agents.strategies.termination.termination_strategy:Evaluating termination criteria for ... + INFO:semantic_kernel.agents.strategies.termination.termination_strategy:Agent 598d827e-ce5e-44fa-879b-42793bb... + # CopyWriter: "Drive Change. Literally." + INFO:semantic_kernel.agents.strategies.selection.sequential_selection_strategy:Selected agent at index 1 (ID: ... + INFO:semantic_kernel.agents.group_chat.agent_chat:Invoking agent ArtDirector + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "system", "content": "\nYou are an ... + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "user", "content": "a slogan for a ... + INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "assistant", "content": "\"Drive ... + ... + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py b/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py new file mode 100644 index 000000000000..c01085152999 --- /dev/null +++ b/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py @@ -0,0 +1,112 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import json + +from pydantic import BaseModel + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import ( + AzureChatCompletion, + AzureChatPromptExecutionSettings, +) +from semantic_kernel.contents import ChatHistory +from semantic_kernel.functions.kernel_arguments import KernelArguments + +""" +The following sample demonstrates how to create a chat completion agent that +answers user questions using structured outputs. The `Reasoning` model is defined +on the prompt execution settings. The settings are then passed into the agent +via the `KernelArguments` object. + +The interaction with the agent is via the `get_response` method, which sends a +user input to the agent and receives a response from the agent. The conversation +history needs to be maintained by the caller in the chat history object. +""" + + +# Define the BaseModel we will use for structured outputs +class Step(BaseModel): + explanation: str + output: str + + +class Reasoning(BaseModel): + steps: list[Step] + final_answer: str + + +# Simulate a conversation with the agent +USER_INPUT = "how can I solve 8x + 7y = -23, and 4x=12?" + + +async def main(): + # 1. Create the prompt settings + settings = AzureChatPromptExecutionSettings() + settings.response_format = Reasoning + + # 2. Create the agent by specifying the service + agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="Assistant", + instructions="Answer the user's questions.", + arguments=KernelArguments(settings=settings), + ) + + # 2. Create a chat history to hold the conversation + chat_history = ChatHistory() + + # 3. Add the user input to the chat history + chat_history.add_user_message(USER_INPUT) + print(f"# User: {USER_INPUT}") + # 4. Invoke the agent for a response + response = await agent.get_response(chat_history) + # 5. Validate the response and print the structured output + reasoned_result = Reasoning.model_validate(json.loads(response.content)) + print(f"# {response.name}:\n\n{reasoned_result.model_dump_json(indent=4)}") + # 6. Add the agent response to the chat history + chat_history.add_message(response) + + """ + Sample output: + # User: how can I solve 8x + 7y = -23, and 4x=12? + # Assistant: + + { + "steps": [ + { + "explanation": "The second equation 4x = 12 can be solved for x by dividing both sides by 4.", + "output": "x = 3." + }, + { + "explanation": "Substitute x = 3 from the second equation into the first equation 8x + 7y = -23.", + "output": "8(3) + 7y = -23." + }, + { + "explanation": "Calculate 8 times 3 to simplify the equation.", + "output": "24 + 7y = -23." + }, + { + "explanation": "Subtract 24 from both sides to isolate the term with y.", + "output": "7y = -23 - 24." + }, + { + "explanation": "Perform the subtraction.", + "output": "7y = -47." + }, + { + "explanation": "Divide both sides by 7 to solve for y.", + "output": "y = -47 / 7." + }, + { + "explanation": "Simplify the division to get the value of y.", + "output": "y = -6.714285714285714 (approximately -6.71)." + } + ], + "final_answer": "The solution to the system of equations is x = 3 and y = -6.71." + } + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/README.md b/python/samples/getting_started_with_agents/openai_assistant/README.md new file mode 100644 index 000000000000..6689c05f9f4b --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/README.md @@ -0,0 +1,101 @@ +## OpenAI Assistant Agents + +The following getting started samples show how to use OpenAI Assistant agents with Semantic Kernel. + +## Assistants API Overview + +The Assistants API is a robust solution from OpenAI that empowers developers to integrate powerful, purpose-built AI assistants into their applications. It streamlines the development process by handling conversation histories, managing threads, and providing seamless access to advanced tools. + +### Key Features + +- **Purpose-Built AI Assistants:** + Assistants are specialized AIs that leverage OpenAI’s models to interact with users, access files, maintain persistent threads, and call additional tools. This enables highly tailored and effective user interactions. + +- **Simplified Conversation Management:** + The concept of a **thread** -- a dedicated conversation session between an assistant and a user -- ensures that message history is managed automatically. Threads optimize the conversation context by storing and truncating messages as needed. + +- **Integrated Tool Access:** + The API provides built-in tools such as: + - **Code Interpreter:** Allows the assistant to execute code, enhancing its ability to solve complex tasks. + - **File Search:** Implements best practices for retrieving data from uploaded files, including advanced chunking and embedding techniques. + +- **Enhanced Function Calling:** + With improved support for third-party tool integration, the Assistants API enables assistants to extend their capabilities beyond native functions. + +For more detailed technical information, refer to the [Assistants API](https://platform.openai.com/docs/assistants/overview). + +### Semantic Kernel OpenAI Assistant Agents + +OpenAI Assistant Agents are created in the following way: + +```python +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent + +# Create the client using OpenAI resources and configuration +client, model = OpenAIAssistantAgent.setup_resources() + +# Create the assistant definition +definition = await client.beta.assistants.create( + model=model, + instructions="", + name="", +) + +# Define the Semantic Kernel OpenAI Assistant Agent +agent = OpenAIAssistantAgent( + client=client, + definition=definition, +) + +# Define a thread and invoke the agent with the user input +thread = await agent.client.beta.threads.create() + +# Add a message to the thread +await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") + +# Invoke the agent +async for content in agent.invoke(thread_id=thread.id): + print(f"# {content.role}: {content.content}") +``` + +### Semantic Kernel Azure Assistant Agents + +Azure Assistant Agents are currently in preview and require a `-preview` API version (minimum version: `2024-05-01-preview`). As new features are introduced, API versions will be updated accordingly. For the latest versioning details, please refer to the [Azure OpenAI API preview lifecycle](https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation). + +To specify the correct API version, set the following environment variable (for example, in your `.env` file): + +```bash +AZURE_OPENAI_API_VERSION="2025-01-01-preview" +``` + +Alternatively, you can pass the `api_version` parameter when creating an `AzureAssistantAgent`: + +```python +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +# Create the client using Azure OpenAI resources and configuration +client, model = AzureAssistantAgent.setup_resources() + +# Create the assistant definition +definition = await client.beta.assistants.create( + model=model, + instructions="", + name="", +) + +# Define the Semantic Kernel Azure OpenAI Assistant Agent +agent = AzureAssistantAgent( + client=client, + definition=definition, +) + +# Define a thread and invoke the agent with the user input +thread = await agent.client.beta.threads.create() + +# Add a message to the thread +await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") + +# Invoke the agent +async for content in agent.invoke(thread_id=thread.id): + print(f"# {content.role}: {content.content}") +``` \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py b/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py new file mode 100644 index 000000000000..d46a127ee71a --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +""" +The following sample demonstrates how to create an OpenAI assistant using either +Azure OpenAI or OpenAI. The sample shows how to have the assistant answrer +questions about the world. + +The interaction with the agent is via the `get_response` method, which sends a +user input to the agent and receives a response from the agent. The conversation +history is maintained by the agent service, i.e. the responses are automatically +associated with the thread. Therefore, client code does not need to maintain the +conversation history. +""" + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Why is the sky blue?", + "What is the speed of light?", +] + + +async def main(): + # 1. Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # 2. Create the assistant on the Azure OpenAI service + definition = await client.beta.assistants.create( + model=model, + instructions="Answer questions about the world in one sentence.", + name="Assistant", + ) + + # 3. Create a Semantic Kernel agent for the Azure OpenAI assistant + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # 4. Create a new thread on the Azure OpenAI assistant service + thread = await agent.client.beta.threads.create() + + try: + for user_input in USER_INPUTS: + # 5. Add the user input to the chat thread + await agent.add_chat_message( + thread_id=thread.id, + message=user_input, + ) + print(f"# User: '{user_input}'") + # 6. Invoke the agent for the current thread and print the response + response = await agent.get_response(thread_id=thread.id) + print(f"# {response.name}: {response.content}") + + finally: + # 7. Clean up the resources + await agent.client.beta.threads.delete(thread.id) + await agent.client.beta.assistants.delete(assistant_id=agent.id) + + """ + You should see output similar to the following: + + # User: 'Why is the sky blue?' + # Agent: The sky appears blue because molecules in the atmosphere scatter sunlight in all directions, and blue + light is scattered more than other colors because it travels in shorter, smaller waves. + # User: 'What is the speed of light?' + # Agent: The speed of light in a vacuum is approximately 299,792,458 meters per second + (about 186,282 miles per second). + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py b/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py new file mode 100644 index 000000000000..a9ea4f10c9b2 --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +from typing import Annotated + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.functions import kernel_function + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI. The sample +shows how to use a Semantic Kernel plugin as part of the +OpenAI Assistant. +""" + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# Simulate a conversation with the agent +USER_INPUTS = [ + "Hello", + "What is the special soup?", + "What is the special drink?", + "How much is it?", + "Thank you", +] + + +async def main(): + # 1. Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # 2. Create the assistant on the Azure OpenAI service + definition = await client.beta.assistants.create( + model=model, + instructions="Answer questions about the menu.", + name="Host", + ) + + # 3. Create a Semantic Kernel agent for the Azure OpenAI assistant + agent = AzureAssistantAgent( + client=client, + definition=definition, + plugins=[MenuPlugin()], # The plugins can be passed in as a list to the constructor + ) + # Note: plugins can also be configured on the Kernel and passed in as a parameter to the OpenAIAssistantAgent + + # 4. Create a new thread on the Azure OpenAI assistant service + thread = await agent.client.beta.threads.create() + + try: + for user_input in USER_INPUTS: + # 5. Add the user input to the chat thread + await agent.add_chat_message( + thread_id=thread.id, + message=user_input, + ) + print(f"# User: '{user_input}'") + # 6. Invoke the agent for the current thread and print the response + async for content in agent.invoke(thread_id=thread.id): + print(f"# Agent: {content.content}") + finally: + # 7. Clean up the resources + await agent.client.beta.threads.delete(thread.id) + await agent.client.beta.assistants.delete(assistant_id=agent.id) + + """ + You should see output similar to the following: + + # User: 'Hello' + # Agent: Hello! How can I assist you today? + # User: 'What is the special soup?' + # Agent: The special soup today is Clam Chowder. Would you like to know more about any other menu items? + # User: 'What is the special drink?' + # Agent: The special drink today is Chai Tea. Would you like more information on anything else? + # User: 'Thank you' + # Agent: You're welcome! If you have any more questions or need further assistance, feel free to ask. + Enjoy your day! + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py b/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py new file mode 100644 index 000000000000..8881ee3e6d4e --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py @@ -0,0 +1,87 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.contents import AuthorRole, ChatMessageContent, FileReferenceContent, ImageContent, TextContent + +""" +The following sample demonstrates how to create an OpenAI +assistant using OpenAI configuration, and leverage the +multi-modal content types to have the assistant describe images +and answer questions about them. This sample uses non-streaming responses. +""" + + +async def main(): + # 1. Create the OpenAI Assistant Agent client + # Note Azure OpenAI doesn't support vision files yet + client, model = OpenAIAssistantAgent.setup_resources() + + # 2. Load a sample image of a cat used for the assistant to describe + file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "cat.jpg") + + with open(file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + # 3. Create the assistant on the OpenAI service + definition = await client.beta.assistants.create( + model=model, + instructions="Answer questions about the provided images.", + name="Vision", + ) + + # 4. Create a Semantic Kernel agent for the OpenAI assistant + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + ) + + # 5. Create a new thread on the OpenAI assistant service + thread = await agent.client.beta.threads.create() + + # 6. Define the user messages with the image content to simulate the conversation + user_messages = { + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Describe this image."), + ImageContent( + uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" + ), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="What is the main color in this image?"), + ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Is there an animal in this image?"), + FileReferenceContent(file_id=file.id), + ], + ), + } + + try: + for message in user_messages: + # 7. Add the user input to the chat thread + await agent.add_chat_message(thread_id=thread.id, message=message) + print(f"# User: {str(message)}") # type: ignore + # 8. Invoke the agent for the current thread and print the response + async for content in agent.invoke(thread_id=thread.id): + print(f"# Agent: {content.content}\n") + finally: + # 9. Clean up the resources + await client.files.delete(file.id) + await agent.client.beta.threads.delete(thread.id) + await agent.client.beta.assistants.delete(assistant_id=agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py b/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py new file mode 100644 index 000000000000..e8a542a8045f --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py @@ -0,0 +1,58 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +""" +The following sample demonstrates how to create an OpenAI +assistant using either Azure OpenAI or OpenAI and leverage the +assistant's code interpreter functionality to have it write +Python code to print Fibonacci numbers. +""" + +TASK = "Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?" + + +async def main(): + # 1. Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # 2. Configure the code interpreter tool and resources for the Assistant + code_interpreter_tool, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool() + + # 3. Create the assistant on the Azure OpenAI service + definition = await client.beta.assistants.create( + model=model, + name="CodeRunner", + instructions="Run the provided request as code and return the result.", + tools=code_interpreter_tool, + tool_resources=code_interpreter_tool_resources, + ) + + # 4. Create a Semantic Kernel agent for the Azure OpenAI assistant + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # 4. Create a new thread on the Azure OpenAI assistant service + thread = await agent.client.beta.threads.create() + + print(f"# User: '{TASK}'") + try: + # 5. Add the user input to the chat thread + await agent.add_chat_message( + thread_id=thread.id, + message=TASK, + ) + # 6. Invoke the agent for the current thread and print the response + async for content in agent.invoke(thread_id=thread.id): + print(f"# Agent: {content.content}") + finally: + # 7. Clean up the resources + await agent.client.beta.threads.delete(thread.id) + await agent.client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py b/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py new file mode 100644 index 000000000000..ef841fad2bf5 --- /dev/null +++ b/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py @@ -0,0 +1,80 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.open_ai import AzureAssistantAgent + +""" +The following sample demonstrates how to create an OpenAI +Assistant using either Azure OpenAI or OpenAI and leverage the +assistant's file search functionality. +""" + +# Simulate a conversation with the agent +USER_INPUTS = { + "Who is the youngest employee?", + "Who works in sales?", + "I have a customer request, who can help me?", +} + + +async def main(): + # 1. Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # 2. Read and upload the file to the Azure OpenAI assistant service + pdf_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf" + ) + + with open(pdf_file_path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + + vector_store = await client.beta.vector_stores.create( + name="step4_assistant_file_search", + file_ids=[file.id], + ) + + # 3. Create file search tool with uploaded resources + file_search_tool, file_search_tool_resources = AzureAssistantAgent.configure_file_search_tool(vector_store.id) + + # 4. Create the assistant on the Azure OpenAI service with the file search tool + definition = await client.beta.assistants.create( + model=model, + instructions="Find answers to the user's questions in the provided file.", + name="FileSearch", + tools=file_search_tool, + tool_resources=file_search_tool_resources, + ) + + # 5. Create a Semantic Kernel agent for the Azure OpenAI assistant + agent = AzureAssistantAgent( + client=client, + definition=definition, + ) + + # 6. Create a new thread on the Azure OpenAI assistant service + thread = await agent.client.beta.threads.create() + + try: + for user_input in USER_INPUTS: + # 7. Add the user input to the chat thread + await agent.add_chat_message( + thread_id=thread.id, + message=user_input, + ) + print(f"# User: '{user_input}'") + # 8. Invoke the agent for the current thread and print the response + async for content in agent.invoke(thread_id=thread.id): + print(f"# Agent: {content.content}") + finally: + # 9. Clean up the resources + await client.files.delete(file.id) + await client.beta.vector_stores.delete(vector_store.id) + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/resources/countries.json b/python/samples/getting_started_with_agents/resources/countries.json new file mode 100644 index 000000000000..b88d5040750a --- /dev/null +++ b/python/samples/getting_started_with_agents/resources/countries.json @@ -0,0 +1,46 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "RestCountries.NET API", + "description": "Web API version 3.1 for managing country items, based on previous implementations from restcountries.eu and restcountries.com.", + "version": "v3.1" + }, + "servers": [ + { "url": "https://restcountries.net" } + ], + "auth": [], + "paths": { + "/v3.1/currency": { + "get": { + "description": "Search by currency.", + "operationId": "LookupCountryByCurrency", + "parameters": [ + { + "name": "currency", + "in": "query", + "description": "The currency to search for.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Success", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + } + }, + "components": { + "schemes": {} + } +} \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/resources/weather.json b/python/samples/getting_started_with_agents/resources/weather.json new file mode 100644 index 000000000000..c3009f417de4 --- /dev/null +++ b/python/samples/getting_started_with_agents/resources/weather.json @@ -0,0 +1,62 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "get weather data", + "description": "Retrieves current weather data for a location based on wttr.in.", + "version": "v1.0.0" + }, + "servers": [ + { + "url": "https://wttr.in" + } + ], + "auth": [], + "paths": { + "/{location}": { + "get": { + "description": "Get weather information for a specific location", + "operationId": "GetCurrentWeather", + "parameters": [ + { + "name": "location", + "in": "path", + "description": "City or location to retrieve the weather for", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "format", + "in": "query", + "description": "Always use j1 value for this parameter", + "required": true, + "schema": { + "type": "string", + "default": "j1" + } + } + ], + "responses": { + "200": { + "description": "Successful response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "404": { + "description": "Location not found" + } + }, + "deprecated": false + } + } + }, + "components": { + "schemes": {} + } +} \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py b/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py deleted file mode 100644 index 3ac413f92400..000000000000 --- a/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# assistant's file search functionality. # -##################################################################### - - -AGENT_NAME = "FileSearch" -AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in agent.invoke(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Get the path to the travelinfo.txt file - pdf_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources", "employees.pdf") - - # Create the agent configuration - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_file_search=True, - vector_store_filenames=[pdf_file_path], - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_file_search=True, - vector_store_filenames=[pdf_file_path], - ) - - # Define a thread and invoke the agent with the user input - thread_id = await agent.create_thread() - - try: - await invoke_agent(agent, thread_id=thread_id, input="Who is the youngest employee?") - await invoke_agent(agent, thread_id=thread_id, input="Who works in sales?") - await invoke_agent(agent, thread_id=thread_id, input="I have a customer request, who can help me?") - finally: - [await agent.delete_file(file_id) for file_id in agent.file_search_file_ids] - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step1_agent.py b/python/samples/getting_started_with_agents/step1_agent.py deleted file mode 100644 index 28d19a45df1f..000000000000 --- a/python/samples/getting_started_with_agents/step1_agent.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from functools import reduce - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to create a simple, # -# non-group agent that repeats the user message in the voice # -# of a pirate and then ends with a parrot sound. # -################################################################### - -# To toggle streaming or non-streaming mode, change the following boolean -streaming = True - -# Define the agent name and instructions -PARROT_NAME = "Parrot" -PARROT_INSTRUCTIONS = "Repeat the user message in the voice of a pirate and then end with a parrot sound." - - -async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory): - """Invoke the agent with the user input.""" - chat.add_user_message(input) - - print(f"# {AuthorRole.USER}: '{input}'") - - if streaming: - contents = [] - content_name = "" - async for content in agent.invoke_stream(chat): - content_name = content.name - contents.append(content) - streaming_chat_message = reduce(lambda first, second: first + second, contents) - print(f"# {content.role} - {content_name or '*'}: '{streaming_chat_message}'") - chat.add_message(streaming_chat_message) - else: - async for content in agent.invoke(chat): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - chat.add_message(content) - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Add the OpenAIChatCompletion AI Service to the Kernel - kernel.add_service(AzureChatCompletion(service_id="agent")) - - # Create the agent - agent = ChatCompletionAgent(service_id="agent", kernel=kernel, name=PARROT_NAME, instructions=PARROT_INSTRUCTIONS) - - # Define the chat history - chat = ChatHistory() - - # Respond to user input - await invoke_agent(agent, "Fortune favors the bold.", chat) - await invoke_agent(agent, "I came, I saw, I conquered.", chat) - await invoke_agent(agent, "Practice makes perfect.", chat) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step2_plugins.py b/python/samples/getting_started_with_agents/step2_plugins.py deleted file mode 100644 index 53772408211d..000000000000 --- a/python/samples/getting_started_with_agents/step2_plugins.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to create a simple, # -# non-group agent that utilizes plugins defined as part of # -# the Kernel. # -################################################################### - -# This sample allows for a streaming response verus a non-streaming response -streaming = True - -# Define the agent name and instructions -HOST_NAME = "Host" -HOST_INSTRUCTIONS = "Answer questions about the menu." - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory) -> None: - """Invoke the agent with the user input.""" - chat.add_user_message(input) - - print(f"# {AuthorRole.USER}: '{input}'") - - if streaming: - contents = [] - content_name = "" - async for content in agent.invoke_stream(chat): - content_name = content.name - contents.append(content) - message_content = "".join([content.content for content in contents]) - print(f"# {content.role} - {content_name or '*'}: '{message_content}'") - chat.add_assistant_message(message_content) - else: - async for content in agent.invoke(chat): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - chat.add_message(content) - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - service_id = "agent" - kernel.add_service(AzureChatCompletion(service_id=service_id)) - - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) - # Configure the function choice behavior to auto invoke kernel functions - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - - kernel.add_plugin(MenuPlugin(), plugin_name="menu") - - # Create the agent - agent = ChatCompletionAgent( - service_id="agent", kernel=kernel, name=HOST_NAME, instructions=HOST_INSTRUCTIONS, execution_settings=settings - ) - - # Define the chat history - chat = ChatHistory() - - # Respond to user input - await invoke_agent(agent, "Hello", chat) - await invoke_agent(agent, "What is the special soup?", chat) - await invoke_agent(agent, "What is the special drink?", chat) - await invoke_agent(agent, "Thank you", chat) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step5_json_result.py b/python/samples/getting_started_with_agents/step5_json_result.py deleted file mode 100644 index 10edc9f2198f..000000000000 --- a/python/samples/getting_started_with_agents/step5_json_result.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from pydantic import ValidationError - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel -from semantic_kernel.kernel_pydantic import KernelBaseModel - -################################################################### -# The following sample demonstrates how to configure an Agent # -# Group Chat, and invoke an agent with only a single turn. # -# A custom termination strategy is provided where the model is # -# to rate the user input on creativity and expressiveness # -# and end the chat when a score of 70 or higher is provided. # -################################################################### - - -SCORE_COMPLETED_THRESHOLD = 70 -TUTOR_NAME = "Tutor" -TUTOR_INSTRUCTIONS = """ -Think step-by-step and rate the user input on creativity and expressivness from 1-100. - -Respond in JSON format with the following JSON schema: - -{ - "score": "integer (1-100)", - "notes": "the reason for your score" -} -""" - - -class InputScore(KernelBaseModel): - """A model for the input score.""" - - score: int - notes: str - - -def translate_json(json_string: str) -> InputScore | None: - try: - if json_string is None: - return None - return InputScore.model_validate_json(json_string) - except ValidationError: - return None - - -class ThresholdTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - last_message_content = history[-1].content or "" - result = translate_json(last_message_content) - return result.score >= SCORE_COMPLETED_THRESHOLD if result else False - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: AgentGroupChat): - """Invoke the agent with the user input.""" - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke_single_turn(agent): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - print(f"# IS COMPLETE: {chat.is_complete}") - - -async def main(): - service_id = "tutor" - agent = ChatCompletionAgent( - service_id=service_id, - kernel=_create_kernel_with_chat_completion(service_id=service_id), - name=TUTOR_NAME, - instructions=TUTOR_INSTRUCTIONS, - ) - - # Here a TerminationStrategy subclass is used that will terminate when - # the response includes a score that is greater than or equal to 70. - termination_strategy = ThresholdTerminationStrategy(maximum_iterations=10) - - chat = AgentGroupChat(termination_strategy=termination_strategy) - - await invoke_agent(agent=agent, input="The sunset is very colorful.", chat=chat) - await invoke_agent(agent=agent, input="The sunset is setting over the mountains.", chat=chat) - await invoke_agent( - agent=agent, - input="The sunset is setting over the mountains and filled the sky with a deep red flame, setting the clouds ablaze.", # noqa: E501 - chat=chat, - ) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step6_logging.py b/python/samples/getting_started_with_agents/step6_logging.py deleted file mode 100644 index 197bcd72ab8e..000000000000 --- a/python/samples/getting_started_with_agents/step6_logging.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from semantic_kernel.agents import AgentGroupChat -from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to create a simple, # -# agent group chat that utilizes An Art Director Chat Completion # -# Agent along with a Copy Writer Chat Completion Agent to # -# complete a task. The main point of this sample is to note how # -# to enable logging to view all interactions between the agents # -# and the model. # -################################################################### - - -# NOTE: This is all that is required to enable logging -logging.basicConfig(level=logging.DEBUG) - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. -If not, provide insight on how to refine suggested copy without example. -""" - -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - agent_reviewer = ChatCompletionAgent( - service_id="artdirector", - kernel=_create_kernel_with_chat_completion("artdirector"), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - agent_writer = ChatCompletionAgent( - service_id="copywriter", - kernel=_create_kernel_with_chat_completion("copywriter"), - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "a slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke(): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - print(f"# IS COMPLETE: {chat.is_complete}") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step7_assistant.py b/python/samples/getting_started_with_agents/step7_assistant.py deleted file mode 100644 index 67235c0dcf3c..000000000000 --- a/python/samples/getting_started_with_agents/step7_assistant.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -from typing import Annotated - -from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI. OpenAI Assistants # -# allow for function calling, the use of file search and a # -# code interpreter. Assistant Threads are used to manage the # -# conversation state, similar to a Semantic Kernel Chat History. # -##################################################################### - -HOST_NAME = "Host" -HOST_INSTRUCTIONS = "Answer questions about the menu." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = False - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in agent.invoke(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Add the sample plugin to the kernel - kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") - - # Create the OpenAI Assistant Agent - service_id = "agent" - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS - ) - - thread_id = await agent.create_thread() - - try: - await invoke_agent(agent, thread_id=thread_id, input="Hello") - await invoke_agent(agent, thread_id=thread_id, input="What is the special soup?") - await invoke_agent(agent, thread_id=thread_id, input="What is the special drink?") - await invoke_agent(agent, thread_id=thread_id, input="Thank you") - finally: - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step8_assistant_vision.py b/python/samples/getting_started_with_agents/step8_assistant_vision.py deleted file mode 100644 index ac7bf34d7e48..000000000000 --- a/python/samples/getting_started_with_agents/step8_assistant_vision.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# multi-modal content types to have the assistant describe images # -# and answer questions about them. # -##################################################################### - -HOST_NAME = "Host" -HOST_INSTRUCTIONS = "Answer questions about the menu." - - -def create_message_with_image_url(input: str, url: str) -> ChatMessageContent: - return ChatMessageContent( - role=AuthorRole.USER, - items=[TextContent(text=input), ImageContent(uri=url)], - ) - - -def create_message_with_image_reference(input: str, file_id: str) -> ChatMessageContent: - return ChatMessageContent( - role=AuthorRole.USER, - items=[TextContent(text=input), FileReferenceContent(file_id=file_id)], - ) - - -streaming = False - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, message: ChatMessageContent) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=message) - - print(f"# {AuthorRole.USER}: '{message.items[0].text}'") - - if streaming: - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - if first_chunk: - print(f"# {content.role}: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - print() - else: - async for content in agent.invoke(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - service_id = "agent" - - # Create the Assistant Agent - agent = await OpenAIAssistantAgent.create( - kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS - ) - - cat_image_file_path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - "resources", - "cat.jpg", - ) - - # Upload the file for use with the assistant - file_id = await agent.add_file(cat_image_file_path, purpose="vision") - - # Create a thread for the conversation - thread_id = await agent.create_thread() - - try: - await invoke_agent( - agent, - thread_id=thread_id, - message=create_message_with_image_url( - "Describe this image.", - "https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg", - ), - ) - await invoke_agent( - agent, - thread_id=thread_id, - message=create_message_with_image_url( - "What is the main color in this image?", - "https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg", - ), - ) - await invoke_agent( - agent, - thread_id=thread_id, - message=create_message_with_image_reference("Is there an animal in this image?", file_id), - ) - finally: - await agent.delete_file(file_id) - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py b/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py deleted file mode 100644 index 11c2deff8e7c..000000000000 --- a/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI and leverage the # -# assistant's code interpreter functionality to have it write # -# Python code to print Fibonacci numbers. # -##################################################################### - - -AGENT_NAME = "CodeRunner" -AGENT_INSTRUCTIONS = "Run the provided code file and return the result." - -# Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True - - -# A helper method to invoke the agent with the user input -async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: - """Invoke the agent with the user input.""" - await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) - - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in agent.invoke(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") - - -async def main(): - # Create the instance of the Kernel - kernel = Kernel() - - # Define a service_id for the sample - service_id = "agent" - - # Create the agent - if use_azure_openai: - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - else: - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id=service_id, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - enable_code_interpreter=True, - ) - - thread_id = await agent.create_thread() - - try: - await invoke_agent( - agent, - thread_id=thread_id, - input="Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?", # noqa: E501 - ) - finally: - await agent.delete_thread(thread_id) - await agent.delete() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/learn_resources/agent_docs/agent_collaboration.py b/python/samples/learn_resources/agent_docs/agent_collaboration.py index c8e7d04bffb6..0d08c4a5b513 100644 --- a/python/samples/learn_resources/agent_docs/agent_collaboration.py +++ b/python/samples/learn_resources/agent_docs/agent_collaboration.py @@ -3,138 +3,143 @@ import asyncio import os +from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies.selection.kernel_function_selection_strategy import ( +from semantic_kernel.agents.strategies import ( KernelFunctionSelectionStrategy, -) -from semantic_kernel.agents.strategies.termination.kernel_function_termination_strategy import ( KernelFunctionTerminationStrategy, ) -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt -from semantic_kernel.kernel import Kernel - -################################################################### -# The following sample demonstrates how to create a simple, # -# agent group chat that utilizes a Reviewer Chat Completion # -# Agent along with a Writer Chat Completion Agent to # -# complete a user's task. # -################################################################### - +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents import ChatHistoryTruncationReducer +from semantic_kernel.functions import KernelFunctionFromPrompt -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" +""" +The following sample demonstrates how to create a simple, +agent group chat that utilizes a Reviewer Chat Completion +Agent along with a Writer Chat Completion Agent to +complete a user's task. - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() +This is the full code sample for the Semantic Kernel Learn Site: How-To: Coordinate Agent Collaboration + using Agent Group Chat +https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-agent-collaboration?pivots=programming-language-python +""" +# Define agent names REVIEWER_NAME = "Reviewer" -COPYWRITER_NAME = "Writer" +WRITER_NAME = "Writer" -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: +def create_kernel() -> Kernel: + """Creates a Kernel instance with an Azure OpenAI ChatCompletion service.""" kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) + kernel.add_service(service=AzureChatCompletion()) return kernel async def main(): + # Create a single kernel instance for all agents. + kernel = create_kernel() + + # Create ChatCompletionAgents using the same kernel. agent_reviewer = ChatCompletionAgent( - service_id=REVIEWER_NAME, - kernel=_create_kernel_with_chat_completion(REVIEWER_NAME), + kernel=kernel, name=REVIEWER_NAME, instructions=""" - Your responsibility is to review and identify how to improve user provided content. - If the user has providing input or direction for content already provided, specify how to - address this input. - Never directly perform the correction or provide example. - Once the content has been updated in a subsequent response, you will review the content - again until satisfactory. - Always copy satisfactory content to the clipboard using available tools and inform user. - - RULES: - - Only identify suggestions that are specific and actionable. - - Verify previous suggestions have been addressed. - - Never repeat previous suggestions. - """, +Your responsibility is to review and identify how to improve user provided content. +If the user has provided input or direction for content already provided, specify how to address this input. +Never directly perform the correction or provide an example. +Once the content has been updated in a subsequent response, review it again until it is satisfactory. + +RULES: +- Only identify suggestions that are specific and actionable. +- Verify previous suggestions have been addressed. +- Never repeat previous suggestions. +""", ) agent_writer = ChatCompletionAgent( - service_id=COPYWRITER_NAME, - kernel=_create_kernel_with_chat_completion(COPYWRITER_NAME), - name=COPYWRITER_NAME, + kernel=kernel, + name=WRITER_NAME, instructions=""" - Your sole responsibility is to rewrite content according to review suggestions. - - - Always apply all review direction. - - Always revise the content in its entirety without explanation. - - Never address the user. - """, +Your sole responsibility is to rewrite content according to review suggestions. +- Always apply all review directions. +- Always revise the content in its entirety without explanation. +- Never address the user. +""", ) + # Define a selection function to determine which agent should take the next turn. selection_function = KernelFunctionFromPrompt( function_name="selection", prompt=f""" - Determine which participant takes the next turn in a conversation based on the the most recent participant. - State only the name of the participant to take the next turn. - No participant should take more than one turn in a row. - - Choose only from these participants: - - {REVIEWER_NAME} - - {COPYWRITER_NAME} - - Always follow these rules when selecting the next participant: - - After user input, it is {COPYWRITER_NAME}'s turn. - - After {COPYWRITER_NAME} replies, it is {REVIEWER_NAME}'s turn. - - After {REVIEWER_NAME} provides feedback, it is {COPYWRITER_NAME}'s turn. - - History: - {{{{$history}}}} - """, +Examine the provided RESPONSE and choose the next participant. +State only the name of the chosen participant without explanation. +Never choose the participant named in the RESPONSE. + +Choose only from these participants: +- {REVIEWER_NAME} +- {WRITER_NAME} + +Rules: +- If RESPONSE is user input, it is {REVIEWER_NAME}'s turn. +- If RESPONSE is by {REVIEWER_NAME}, it is {WRITER_NAME}'s turn. +- If RESPONSE is by {WRITER_NAME}, it is {REVIEWER_NAME}'s turn. + +RESPONSE: +{{{{$lastmessage}}}} +""", ) - TERMINATION_KEYWORD = "yes" + # Define a termination function where the reviewer signals completion with "yes". + termination_keyword = "yes" termination_function = KernelFunctionFromPrompt( function_name="termination", prompt=f""" - Examine the RESPONSE and determine whether the content has been deemed satisfactory. - If content is satisfactory, respond with a single word without explanation: {TERMINATION_KEYWORD}. - If specific suggestions are being provided, it is not satisfactory. - If no correction is suggested, it is satisfactory. - - RESPONSE: - {{{{$history}}}} - """, +Examine the RESPONSE and determine whether the content has been deemed satisfactory. +If the content is satisfactory, respond with a single word without explanation: {termination_keyword}. +If specific suggestions are being provided, it is not satisfactory. +If no correction is suggested, it is satisfactory. + +RESPONSE: +{{{{$lastmessage}}}} +""", ) + history_reducer = ChatHistoryTruncationReducer(target_count=5) + + # Create the AgentGroupChat with selection and termination strategies. chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], + agents=[agent_reviewer, agent_writer], selection_strategy=KernelFunctionSelectionStrategy( + initial_agent=agent_reviewer, function=selection_function, - kernel=_create_kernel_with_chat_completion("selection"), - result_parser=lambda result: str(result.value[0]) if result.value is not None else COPYWRITER_NAME, - agent_variable_name="agents", - history_variable_name="history", + kernel=kernel, + result_parser=lambda result: str(result.value[0]).strip() if result.value[0] is not None else WRITER_NAME, + history_variable_name="lastmessage", + history_reducer=history_reducer, ), termination_strategy=KernelFunctionTerminationStrategy( agents=[agent_reviewer], function=termination_function, - kernel=_create_kernel_with_chat_completion("termination"), - result_parser=lambda result: TERMINATION_KEYWORD in str(result.value[0]).lower(), - history_variable_name="history", + kernel=kernel, + result_parser=lambda result: termination_keyword in str(result.value[0]).lower(), + history_variable_name="lastmessage", maximum_iterations=10, + history_reducer=history_reducer, ), ) - is_complete: bool = False + print( + "Ready! Type your input, or 'exit' to quit, 'reset' to restart the conversation. " + "You may pass in a file path using @." + ) + + is_complete = False while not is_complete: - user_input = input("User:> ") + print() + user_input = input("User > ").strip() if not user_input: continue @@ -147,26 +152,35 @@ async def main(): print("[Conversation has been reset]") continue - if user_input.startswith("@") and len(input) > 1: - file_path = input[1:] + # Try to grab files from the script's current directory + if user_input.startswith("@") and len(user_input) > 1: + file_name = user_input[1:] + script_dir = os.path.dirname(os.path.abspath(__file__)) + file_path = os.path.join(script_dir, file_name) try: if not os.path.exists(file_path): print(f"Unable to access file: {file_path}") continue - with open(file_path) as file: + with open(file_path, encoding="utf-8") as file: user_input = file.read() except Exception: print(f"Unable to access file: {file_path}") continue - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=user_input)) + # Add the current user_input to the chat + await chat.add_chat_message(message=user_input) - async for response in chat.invoke(): - print(f"# {response.role} - {response.name or '*'}: '{response.content}'") + try: + async for response in chat.invoke(): + if response is None or not response.name: + continue + print() + print(f"# {response.name.upper()}:\n{response.content}") + except Exception as e: + print(f"Error during chat invocation: {e}") - if chat.is_complete: - is_complete = True - break + # Reset the chat's complete flag for the new conversation round. + chat.is_complete = False if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/assistant_code.py b/python/samples/learn_resources/agent_docs/assistant_code.py index 06a7bc8b29fd..19c7b61140b4 100644 --- a/python/samples/learn_resources/agent_docs/assistant_code.py +++ b/python/samples/learn_resources/agent_docs/assistant_code.py @@ -1,33 +1,39 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +import logging import os -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents import StreamingFileReferenceContent -################################################################### -# The following sample demonstrates how to create a simple, # -# OpenAI assistant agent that utilizes the code interpreter # -# to analyze uploaded files. # -################################################################### +logging.basicConfig(level=logging.ERROR) + +""" +The following sample demonstrates how to create a simple, +OpenAI assistant agent that utilizes the code interpreter +to analyze uploaded files. + +This is the full code sample for the Semantic Kernel Learn Site: How-To: Open AI Assistant Agent Code Interpreter + +https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-assistant-code?pivots=programming-language-python +""" # noqa: E501 # Let's form the file paths that we will later pass to the assistant csv_file_path_1 = os.path.join( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", "PopulationByAdmin1.csv", ) csv_file_path_2 = os.path.join( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", "PopulationByCountry.csv", ) -async def download_file_content(agent, file_id: str): +async def download_file_content(agent: AzureAssistantAgent, file_id: str): try: # Fetch the content of the file using the provided method response_content = await agent.client.files.content(file_id) @@ -50,7 +56,7 @@ async def download_file_content(agent, file_id: str): print(f"An error occurred while downloading file {file_id}: {str(e)}") -async def download_response_image(agent, file_ids: list[str]): +async def download_response_image(agent: AzureAssistantAgent, file_ids: list[str]): if file_ids: # Iterate over file_ids and download each one for file_id in file_ids: @@ -58,22 +64,43 @@ async def download_response_image(agent, file_ids: list[str]): async def main(): - agent = await AzureAssistantAgent.create( - kernel=Kernel(), - service_id="agent", - name="SampleAssistantAgent", + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Upload the files to the client + file_ids: list[str] = [] + for path in [csv_file_path_1, csv_file_path_2]: + with open(path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + file_ids.append(file.id) + + # Get the code interpreter tool and resources + code_interpreter_tools, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool( + file_ids=file_ids + ) + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, instructions=""" - Analyze the available data to provide an answer to the user's question. - Always format response using markdown. - Always include a numerical index that starts at 1 for any lists or tables. - Always sort lists in ascending order. - """, - enable_code_interpreter=True, - code_interpreter_filenames=[csv_file_path_1, csv_file_path_2], + Analyze the available data to provide an answer to the user's question. + Always format response using markdown. + Always include a numerical index that starts at 1 for any lists or tables. + Always sort lists in ascending order. + """, + name="SampleAssistantAgent", + tools=code_interpreter_tools, + tool_resources=code_interpreter_tool_resources, + ) + + # Create the agent using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, ) print("Creating thread...") - thread_id = await agent.create_thread() + thread = await client.beta.threads.create() try: is_complete: bool = False @@ -85,33 +112,44 @@ async def main(): if user_input.lower() == "exit": is_complete = True - - await agent.add_chat_message( - thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) - ) - is_code: bool = False - async for response in agent.invoke_stream(thread_id=thread_id): - if is_code != response.metadata.get("code"): - print() - is_code = not is_code - - print(f"{response.content}", end="", flush=True) - + break + + await agent.add_chat_message(thread_id=thread.id, message=user_input) + + is_code = False + last_role = None + async for response in agent.invoke_stream(thread_id=thread.id): + current_is_code = response.metadata.get("code", False) + + if current_is_code: + if not is_code: + print("\n\n```python") + is_code = True + print(response.content, end="", flush=True) + else: + if is_code: + print("\n```") + is_code = False + last_role = None + if hasattr(response, "role") and response.role is not None and last_role != response.role: + print(f"\n# {response.role}: ", end="", flush=True) + last_role = response.role + print(response.content, end="", flush=True) file_ids.extend([ item.file_id for item in response.items if isinstance(item, StreamingFileReferenceContent) ]) - + if is_code: + print("```\n") print() await download_response_image(agent, file_ids) file_ids.clear() finally: - print("Cleaning up resources...") - if agent is not None: - [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] - await agent.delete_thread(thread_id) - await agent.delete() + print("\nCleaning up resources...") + [await client.files.delete(file_id) for file_id in file_ids] + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/assistant_search.py b/python/samples/learn_resources/agent_docs/assistant_search.py index 5d91786e9bc4..8e1d77fca5cb 100644 --- a/python/samples/learn_resources/agent_docs/assistant_search.py +++ b/python/samples/learn_resources/agent_docs/assistant_search.py @@ -3,21 +3,25 @@ import asyncio import os -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.contents import StreamingAnnotationContent -################################################################### -# The following sample demonstrates how to create a simple, # -# OpenAI assistant agent that utilizes the vector store # -# to answer questions based on the uploaded documents. # -################################################################### +""" +The following sample demonstrates how to create a simple, +OpenAI assistant agent that utilizes the vector store +to answer questions based on the uploaded documents. + +This is the full code sample for the Semantic Kernel Learn Site: How-To: Open AI Assistant Agent File Search + +https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-assistant-search?pivots=programming-language-python +""" def get_filepath_for_filename(filename: str) -> str: - base_directory = os.path.dirname(os.path.realpath(__file__)) + base_directory = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + ) return os.path.join(base_directory, filename) @@ -29,22 +33,48 @@ def get_filepath_for_filename(filename: str) -> str: async def main(): - agent = await AzureAssistantAgent.create( - kernel=Kernel(), - service_id="agent", - name="SampleAssistantAgent", + # Create the client using Azure OpenAI resources and configuration + client, model = AzureAssistantAgent.setup_resources() + + # Upload the files to the client + file_ids: list[str] = [] + for path in [get_filepath_for_filename(filename) for filename in filenames]: + with open(path, "rb") as file: + file = await client.files.create(file=file, purpose="assistants") + file_ids.append(file.id) + + vector_store = await client.beta.vector_stores.create( + name="assistant_search", + file_ids=file_ids, + ) + + # Get the file search tool and resources + file_search_tools, file_search_tool_resources = AzureAssistantAgent.configure_file_search_tool( + vector_store_ids=vector_store.id + ) + + # Create the assistant definition + definition = await client.beta.assistants.create( + model=model, instructions=""" The document store contains the text of fictional stories. Always analyze the document store to provide an answer to the user's question. Never rely on your knowledge of stories not included in the document store. Always format response using markdown. """, - enable_file_search=True, - vector_store_filenames=[get_filepath_for_filename(filename) for filename in filenames], + name="SampleAssistantAgent", + tools=file_search_tools, + tool_resources=file_search_tool_resources, + ) + + # Create the agent using the client and the assistant definition + agent = AzureAssistantAgent( + client=client, + definition=definition, ) print("Creating thread...") - thread_id = await agent.create_thread() + thread = await client.beta.threads.create() try: is_complete: bool = False @@ -55,13 +85,12 @@ async def main(): if user_input.lower() == "exit": is_complete = True + break - await agent.add_chat_message( - thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) - ) + await agent.add_chat_message(thread_id=thread.id, message=user_input) footnotes: list[StreamingAnnotationContent] = [] - async for response in agent.invoke_stream(thread_id=thread_id): + async for response in agent.invoke_stream(thread_id=thread.id): footnotes.extend([item for item in response.items if isinstance(item, StreamingAnnotationContent)]) print(f"{response.content}", end="", flush=True) @@ -76,11 +105,10 @@ async def main(): ) finally: - print("Cleaning up resources...") - if agent is not None: - [await agent.delete_file(file_id) for file_id in agent.file_search_file_ids] - await agent.delete_thread(thread_id) - await agent.delete() + print("\nCleaning up resources...") + [await client.files.delete(file_id) for file_id in file_ids] + await client.beta.threads.delete(thread.id) + await client.beta.assistants.delete(agent.id) if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/chat_agent.py b/python/samples/learn_resources/agent_docs/chat_agent.py index 56429d5974cb..33d4999bea80 100644 --- a/python/samples/learn_resources/agent_docs/chat_agent.py +++ b/python/samples/learn_resources/agent_docs/chat_agent.py @@ -6,11 +6,10 @@ from datetime import datetime from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent +from semantic_kernel.functions import KernelArguments from semantic_kernel.kernel import Kernel # Adjust the sys.path so we can use the GitHubPlugin and GitHubSettings classes @@ -20,11 +19,15 @@ from plugins.GithubPlugin.github import GitHubPlugin, GitHubSettings # noqa: E402 -################################################################### -# The following sample demonstrates how to create a simple, # -# ChatCompletionAgent to use a GitHub plugin to interact # -# with the GitHub API. # -################################################################### +""" +The following sample demonstrates how to create a simple, +ChatCompletionAgent to use a GitHub plugin to interact +with the GitHub API. + +This is the full code sample for the Semantic Kernel Learn Site: How-To: Chat Completion Agent + +https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-chat-agent?pivots=programming-language-python +""" async def main(): @@ -46,7 +49,6 @@ async def main(): # Create the agent agent = ChatCompletionAgent( - service_id="agent", kernel=kernel, name="SampleAssistantAgent", instructions=f""" @@ -60,7 +62,7 @@ async def main(): The current date and time is: {current_time}. """, - execution_settings=settings, + arguments=KernelArguments(settings=settings), ) history = ChatHistory() diff --git a/python/samples/learn_resources/plugins/GithubPlugin/github.py b/python/samples/learn_resources/plugins/GithubPlugin/github.py index 4f06fe9bdd62..0e2230653b82 100644 --- a/python/samples/learn_resources/plugins/GithubPlugin/github.py +++ b/python/samples/learn_resources/plugins/GithubPlugin/github.py @@ -12,22 +12,22 @@ class Repo(BaseModel): id: int = Field(..., alias="id") name: str = Field(..., alias="full_name") - description: str | None = Field(None, alias="description") + description: str | None = Field(default=None, alias="description") url: str = Field(..., alias="html_url") class User(BaseModel): id: int = Field(..., alias="id") login: str = Field(..., alias="login") - name: str | None = Field(None, alias="name") - company: str | None = Field(None, alias="company") + name: str | None = Field(default=None, alias="name") + company: str | None = Field(default=None, alias="company") url: str = Field(..., alias="html_url") class Label(BaseModel): id: int = Field(..., alias="id") name: str = Field(..., alias="name") - description: str | None = Field(None, alias="description") + description: str | None = Field(default=None, alias="description") class Issue(BaseModel): @@ -37,12 +37,12 @@ class Issue(BaseModel): title: str = Field(..., alias="title") state: str = Field(..., alias="state") labels: list[Label] = Field(..., alias="labels") - when_created: str | None = Field(None, alias="created_at") - when_closed: str | None = Field(None, alias="closed_at") + when_created: str | None = Field(default=None, alias="created_at") + when_closed: str | None = Field(default=None, alias="closed_at") class IssueDetail(Issue): - body: str | None = Field(None, alias="body") + body: str | None = Field(default=None, alias="body") # endregion diff --git a/python/samples/learn_resources/resources/WomensSuffrage.txt b/python/samples/learn_resources/resources/WomensSuffrage.txt new file mode 100644 index 000000000000..3100274682f2 --- /dev/null +++ b/python/samples/learn_resources/resources/WomensSuffrage.txt @@ -0,0 +1,9 @@ +Women's suffrage is when women got the right to vote. A long time ago, only men could vote and make decisions. This was not fair because women should have the same rights as men. Women wanted to vote too, so they started asking for it. It took a long time, and they had to work very hard to make people listen to them. Many men did not think women should vote, and this made it very hard for the women. + +The women who fought for voting were called suffragets. They did many things to show they wanted the right to vote. Some gave speeches, others made signs and marched in the streets. Some even went to jail because they refused to stop fighting for what they believed was right. It was scary for some of the women, but they knew how important it was to keep trying. They wanted to change the world so that it was more fair for everyone. + +One of the most important suffragets was Susan B. Anthony. She worked very hard to help women get the right to vote. She gave speeches and wrote letters to the goverment to make them change the laws. Susan never gave up, even when people said mean things to her. Another important person was Elizabeth Cady Stanton. She also helped fight for women's rights and was friends with Susan B. Anthony. Together, they made a great team and helped make big changes. + +Finally, in 1920, the 19th amendment was passed in the United States. This law gave women the right to vote. It was a huge victory for the suffragets, and they were very happy. Many women went to vote for the first time, and it felt like they were finally equal with men. It took many years and a lot of hard work, but the women never gave up. They kept fighting until they won. + +Women's suffrage is very important because it shows that if you work hard and believe in something, you can make a change. The women who fought for the right to vote showed bravery and strengh, and they helped make the world a better place. Today, women can vote because of them, and it's important to remember their hard work. We should always stand up for what is right, just like the suffragets did. diff --git a/python/semantic_kernel/__init__.py b/python/semantic_kernel/__init__.py index b4439bca0b28..ac1decedec41 100644 --- a/python/semantic_kernel/__init__.py +++ b/python/semantic_kernel/__init__.py @@ -2,5 +2,8 @@ from semantic_kernel.kernel import Kernel -__version__ = "1.19.0" -__all__ = ["Kernel", "__version__"] +__version__ = "1.22.1" + +DEFAULT_RC_VERSION = f"{__version__}-rc2" + +__all__ = ["DEFAULT_RC_VERSION", "Kernel", "__version__"] diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index 56cd115a7751..1b410ba7c7b6 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -1,25 +1,30 @@ # Copyright (c) Microsoft. All rights reserved. +import logging import uuid -from collections.abc import Iterable -from typing import TYPE_CHECKING, ClassVar +from abc import ABC, abstractmethod +from collections.abc import AsyncIterable, Iterable +from typing import Any, ClassVar -from pydantic import Field +from pydantic import Field, model_validator from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_plugin import KernelPlugin from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate +from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig from semantic_kernel.utils.naming import generate_random_ascii_name from semantic_kernel.utils.validation import AGENT_NAME_REGEX -if TYPE_CHECKING: - from semantic_kernel.contents.chat_history import ChatHistory +logger: logging.Logger = logging.getLogger(__name__) -@experimental_class -class Agent(KernelBaseModel): +class Agent(KernelBaseModel, ABC): """Base abstraction for all Semantic Kernel agents. An agent instance may participate in one or more conversations. @@ -28,35 +33,84 @@ class Agent(KernelBaseModel): must define its communication protocol, or AgentChannel. Attributes: - name: The name of the agent (optional). - description: The description of the agent (optional). - id: The unique identifier of the agent (optional). If no id is provided, + arguments: The arguments for the agent + channel_type: The type of the agent channel + description: The description of the agent + id: The unique identifier of the agent If no id is provided, a new UUID will be generated. - instructions: The instructions for the agent (optional + instructions: The instructions for the agent (optional) + kernel: The kernel instance for the agent + name: The name of the agent + prompt_template: The prompt template for the agent """ - id: str = Field(default_factory=lambda: str(uuid.uuid4())) + arguments: KernelArguments | None = None + channel_type: ClassVar[type[AgentChannel] | None] = None description: str | None = None - name: str = Field(default_factory=lambda: f"agent_{generate_random_ascii_name()}", pattern=AGENT_NAME_REGEX) + id: str = Field(default_factory=lambda: str(uuid.uuid4())) instructions: str | None = None kernel: Kernel = Field(default_factory=Kernel) - channel_type: ClassVar[type[AgentChannel] | None] = None - history_reducer: ChatHistoryReducer | None = None + name: str = Field(default_factory=lambda: f"agent_{generate_random_ascii_name()}", pattern=AGENT_NAME_REGEX) + prompt_template: PromptTemplateBase | None = None + + @staticmethod + def _get_plugin_name(plugin: KernelPlugin | object) -> str: + """Helper method to get the plugin name.""" + if isinstance(plugin, KernelPlugin): + return plugin.name + return plugin.__class__.__name__ + + @model_validator(mode="before") + @classmethod + def _configure_plugins(cls, data: Any) -> Any: + """Configure any plugins passed in.""" + if isinstance(data, dict) and (plugins := data.pop("plugins", None)): + kernel = data.get("kernel", None) + if not kernel: + kernel = Kernel() + for plugin in plugins: + name = Agent._get_plugin_name(plugin) + kernel.add_plugin(plugin, plugin_name=name) + data["kernel"] = kernel + return data + + @abstractmethod + async def get_response(self, *args, **kwargs) -> ChatMessageContent: + """Get a response from the agent. + + This method returns the final result of the agent's execution + as a single ChatMessageContent object. The caller is blocked until + the final result is available. + + Note: For streaming responses, use the invoke_stream method, which returns + intermediate steps and the final result as a stream of StreamingChatMessageContent + objects. Streaming only the final result is not feasible because the timing of + the final result's availability is unknown, and blocking the caller until then + is undesirable in streaming scenarios. + """ + pass - async def reduce_history(self, history: "ChatHistory") -> bool: - """Perform the reduction on the provided history, returning True if reduction occurred.""" - if self.history_reducer is None: - return False + @abstractmethod + def invoke(self, *args, **kwargs) -> AsyncIterable[ChatMessageContent]: + """Invoke the agent. - self.history_reducer.messages = history.messages + This invocation method will return the intermediate steps and the final results + of the agent's execution as a stream of ChatMessageContent objects to the caller. - new_messages = await self.history_reducer.reduce() - if new_messages is not None: - history.messages.clear() - history.messages.extend(new_messages) - return True + Note: A ChatMessageContent object contains an entire message. + """ + pass - return False + @abstractmethod + def invoke_stream(self, *args, **kwargs) -> AsyncIterable[StreamingChatMessageContent]: + """Invoke the agent as a stream. + + This invocation method will return the intermediate steps and final results of the + agent's execution as a stream of StreamingChatMessageContent objects to the caller. + + Note: A StreamingChatMessageContent object contains a chunk of a message. + """ + pass def get_channel_keys(self) -> Iterable[str]: """Get the channel keys. @@ -68,10 +122,6 @@ def get_channel_keys(self) -> Iterable[str]: raise NotImplementedError("Unable to get channel keys. Channel type not configured.") yield self.channel_type.__name__ - if self.history_reducer is not None: - yield self.history_reducer.__class__.__name__ - yield str(self.history_reducer.__hash__) - async def create_channel(self) -> AgentChannel: """Create a channel. @@ -82,6 +132,51 @@ async def create_channel(self) -> AgentChannel: raise NotImplementedError("Unable to create channel. Channel type not configured.") return self.channel_type() + async def format_instructions(self, kernel: Kernel, arguments: KernelArguments | None = None) -> str | None: + """Format the instructions. + + Args: + kernel: The kernel instance. + arguments: The kernel arguments. + + Returns: + The formatted instructions. + """ + if self.prompt_template is None: + if self.instructions is None: + return None + self.prompt_template = KernelPromptTemplate( + prompt_template_config=PromptTemplateConfig(template=self.instructions) + ) + return await self.prompt_template.render(kernel, arguments) + + def _merge_arguments(self, override_args: KernelArguments | None) -> KernelArguments: + """Merge the arguments with the override arguments. + + Args: + override_args: The arguments to override. + + Returns: + The merged arguments. If both are None, return None. + """ + if not self.arguments: + if not override_args: + return KernelArguments() + return override_args + + if not override_args: + return self.arguments + + # Both are not None, so merge with precedence for override_args. + merged_execution_settings = self.arguments.execution_settings or {} + if override_args.execution_settings: + merged_execution_settings.update(override_args.execution_settings) + + merged_params = dict(self.arguments) + merged_params.update(override_args) + + return KernelArguments(settings=merged_execution_settings, **merged_params) + def __eq__(self, other): """Check if two agents are equal.""" if isinstance(other, Agent): diff --git a/python/semantic_kernel/agents/autogen/README.md b/python/semantic_kernel/agents/autogen/README.md new file mode 100644 index 000000000000..88ceb05bad1d --- /dev/null +++ b/python/semantic_kernel/agents/autogen/README.md @@ -0,0 +1,20 @@ +## AutoGen Conversable Agent (v0.2.X) + +Semantic Kernel Python supports running AutoGen Conversable Agents provided in the 0.2.X package. + +### Limitations + +Currently, there are some limitations to note: + +- AutoGen Conversable Agents in Semantic Kernel run asynchronously and do not support streaming of agent inputs or responses. +- The `AutoGenConversableAgent` in Semantic Kernel Python cannot be configured as part of a Semantic Kernel `AgentGroupChat`. As we progress towards GA for our agent group chat patterns, we will explore ways to integrate AutoGen agents into a Semantic Kernel group chat scenario. + +### Installation + +Install the `semantic-kernel` package with the `autogen` extra: + +```bash +pip install semantic-kernel[autogen] +``` + +For an example of how to integrate an AutoGen Conversable Agent using the Semantic Kernel Agent abstraction, please refer to [`autogen_conversable_agent_simple_convo.py`](../../../samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py). \ No newline at end of file diff --git a/python/semantic_kernel/agents/autogen/__init__.py b/python/semantic_kernel/agents/autogen/__init__.py new file mode 100644 index 000000000000..e25409f3c0b6 --- /dev/null +++ b/python/semantic_kernel/agents/autogen/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent + +__all__ = ["AutoGenConversableAgent"] diff --git a/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py b/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py new file mode 100644 index 000000000000..634cb658ab9b --- /dev/null +++ b/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py @@ -0,0 +1,204 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import AsyncIterable, Callable +from typing import TYPE_CHECKING, Any + +from semantic_kernel.utils.feature_stage_decorator import experimental + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from autogen import ConversableAgent + +from semantic_kernel.agents.agent import Agent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) + +if TYPE_CHECKING: + from autogen.cache import AbstractCache + + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + from semantic_kernel.kernel import Kernel + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental +class AutoGenConversableAgent(Agent): + """A Semantic Kernel wrapper around an AutoGen 0.2 `ConversableAgent`. + + This allows one to use it as a Semantic Kernel `Agent`. Note: this agent abstraction + does not currently allow for the use of AgentGroupChat within Semantic Kernel. + """ + + conversable_agent: ConversableAgent + + def __init__(self, conversable_agent: ConversableAgent, **kwargs: Any) -> None: + """Initialize the AutoGenConversableAgent. + + Args: + conversable_agent: The existing AutoGen 0.2 ConversableAgent instance + kwargs: Other Agent base class arguments (e.g. name, id, instructions) + """ + args: dict[str, Any] = { + "name": conversable_agent.name, + "description": conversable_agent.description, + "instructions": conversable_agent.system_message, + "conversable_agent": conversable_agent, + } + + if kwargs: + args.update(kwargs) + + super().__init__(**args) + + @trace_agent_get_response + @override + async def get_response(self, message: str) -> ChatMessageContent: + """Get a response from the agent. + + Args: + message: The message to send. + + Returns: + A ChatMessageContent object with the response. + """ + reply = await self.conversable_agent.a_generate_reply( + messages=[{"role": "user", "content": message}], + ) + + logger.info("Called AutoGenConversableAgent.a_generate_reply.") + + if isinstance(reply, str): + return ChatMessageContent(content=reply, role=AuthorRole.ASSISTANT) + if isinstance(reply, dict): + return ChatMessageContent(**reply) + + raise AgentInvokeException(f"Unexpected reply type from `a_generate_reply`: {type(reply)}") + + @trace_agent_invocation + @override + async def invoke( + self, + *, + recipient: "AutoGenConversableAgent | None" = None, + clear_history: bool = True, + silent: bool = True, + cache: "AbstractCache | None" = None, + max_turns: int | None = None, + summary_method: str | Callable | None = ConversableAgent.DEFAULT_SUMMARY_METHOD, + summary_args: dict | None = {}, + message: dict | str | Callable | None = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """A direct `invoke` method for the ConversableAgent. + + Args: + recipient: The recipient ConversableAgent to chat with + clear_history: Whether to clear the chat history before starting. True by default. + silent: Whether to suppress console output. True by default. + cache: The cache to use for storing chat history + max_turns: The maximum number of turns to chat for + summary_method: The method to use for summarizing the chat + summary_args: The arguments to pass to the summary method + message: The initial message to send. If message is not provided, + the agent will wait for the user to provide the first message. + kwargs: Additional keyword arguments + """ + if recipient is not None: + if not isinstance(recipient, AutoGenConversableAgent): + raise AgentInvokeException( + f"Invalid recipient type: {type(recipient)}. " + "Recipient must be an instance of AutoGenConversableAgent." + ) + + chat_result = await self.conversable_agent.a_initiate_chat( + recipient=recipient.conversable_agent, + clear_history=clear_history, + silent=silent, + cache=cache, + max_turns=max_turns, + summary_method=summary_method, + summary_args=summary_args, + message=message, # type: ignore + **kwargs, + ) + + logger.info(f"Called AutoGenConversableAgent.a_initiate_chat with recipient: {recipient}.") + + for message in chat_result.chat_history: + yield AutoGenConversableAgent._to_chat_message_content(message) # type: ignore + else: + reply = await self.conversable_agent.a_generate_reply( + messages=[{"role": "user", "content": message}], + ) + + logger.info("Called AutoGenConversableAgent.a_generate_reply.") + + if isinstance(reply, str): + yield ChatMessageContent(content=reply, role=AuthorRole.ASSISTANT) + elif isinstance(reply, dict): + yield ChatMessageContent(**reply) + else: + raise AgentInvokeException(f"Unexpected reply type from `a_generate_reply`: {type(reply)}") + + @override + def invoke_stream( + self, + message: str, + kernel: "Kernel | None" = None, + arguments: KernelArguments | None = None, + **kwargs: Any, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Invoke the agent with a stream of messages.""" + raise NotImplementedError("The AutoGenConversableAgent does not support streaming.") + + @staticmethod + def _to_chat_message_content(message: dict[str, Any]) -> ChatMessageContent: + """Translate an AutoGen message to a Semantic Kernel ChatMessageContent.""" + items: list[TextContent | FunctionCallContent | FunctionResultContent] = [] + role = AuthorRole(message.get("role")) + name: str = message.get("name", "") + + content = message.get("content") + if content is not None: + text = TextContent(text=content) + items.append(text) + + if role == AuthorRole.ASSISTANT: + tool_calls = message.get("tool_calls") + if tool_calls is not None: + for tool_call in tool_calls: + items.append( + FunctionCallContent( + id=tool_call.get("id"), + function_name=tool_call.get("name"), + arguments=tool_call.get("function").get("arguments"), + ) + ) + + if role == AuthorRole.TOOL: + tool_responses = message.get("tool_responses") + if tool_responses is not None: + for tool_response in tool_responses: + items.append( + FunctionResultContent( + id=tool_response.get("tool_call_id"), + result=tool_response.get("content"), + ) + ) + + return ChatMessageContent(role=role, items=items, name=name) # type: ignore diff --git a/python/semantic_kernel/agents/azure_ai/__init__.py b/python/semantic_kernel/agents/azure_ai/__init__.py new file mode 100644 index 000000000000..bb074ae1499c --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent +from semantic_kernel.agents.azure_ai.azure_ai_agent_settings import AzureAIAgentSettings + +__all__ = ["AzureAIAgent", "AzureAIAgentSettings"] diff --git a/python/semantic_kernel/agents/azure_ai/agent_content_generation.py b/python/semantic_kernel/agents/azure_ai/agent_content_generation.py new file mode 100644 index 000000000000..997ded13eb40 --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/agent_content_generation.py @@ -0,0 +1,435 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, Any, cast + +from azure.ai.projects.models import ( + MessageDeltaImageFileContent, + MessageDeltaImageFileContentObject, + MessageDeltaTextContent, + MessageDeltaTextFileCitationAnnotation, + MessageDeltaTextFilePathAnnotation, + MessageImageFileContent, + MessageTextContent, + MessageTextFileCitationAnnotation, + MessageTextFilePathAnnotation, + RunStep, + RunStepDeltaCodeInterpreterDetailItemObject, + RunStepDeltaCodeInterpreterImageOutput, + RunStepDeltaCodeInterpreterLogOutput, + RunStepDeltaCodeInterpreterToolCall, + RunStepDeltaFileSearchToolCall, + RunStepDeltaFunctionToolCall, + RunStepFunctionToolCall, + ThreadMessage, + ThreadRun, +) + +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from azure.ai.projects.models import ( + MessageDeltaChunk, + RunStepDeltaToolCallObject, + ) + +################################################################### +# The methods in this file are used with Azure AI Agent # +# related code. They are used to invoke, create chat messages, # +# or generate message content. # +################################################################### + + +@experimental +def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: + """Get the message contents. + + Args: + message: The message. + """ + contents: list[dict[str, Any]] = [] + for content in message.items: + match content: + case TextContent(): + # Make sure text is a string + final_text = content.text + if not isinstance(final_text, str): + if isinstance(final_text, (list, tuple)): + final_text = " ".join(map(str, final_text)) + else: + final_text = str(final_text) + + contents.append({"type": "text", "text": final_text}) + + case ImageContent(): + if content.uri: + contents.append(content.to_dict()) + + case FileReferenceContent(): + contents.append({ + "type": "image_file", + "image_file": {"file_id": content.file_id}, + }) + + case FunctionResultContent(): + final_result = content.result + match final_result: + case str(): + contents.append({"type": "text", "text": final_result}) + case list() | tuple(): + contents.append({"type": "text", "text": " ".join(map(str, final_result))}) + case _: + contents.append({"type": "text", "text": str(final_result)}) + + return contents + + +@experimental +def generate_message_content( + assistant_name: str, message: "ThreadMessage", completed_step: "RunStep | None" = None +) -> ChatMessageContent: + """Generate message content.""" + role = AuthorRole(message.role) + + metadata = ( + { + "created_at": completed_step.created_at, + "message_id": message.id, # message needs to be defined in context + "step_id": completed_step.id, + "run_id": completed_step.run_id, + "thread_id": completed_step.thread_id, + "assistant_id": completed_step.assistant_id, + "usage": completed_step.usage, + } + if completed_step is not None + else None + ) + + content: ChatMessageContent = ChatMessageContent(role=role, name=assistant_name, metadata=metadata) # type: ignore + + messages: list[MessageImageFileContent | MessageTextContent] = cast( + list[MessageImageFileContent | MessageTextContent], message.content or [] + ) + for item_content in messages: + if item_content.type == "text": + content.items.append( + TextContent( + text=item_content.text.value, + ) + ) + for annotation in item_content.text.annotations: + content.items.append(generate_annotation_content(annotation)) # type: ignore + elif item_content.type == "image_file": + content.items.append( + FileReferenceContent( + file_id=item_content.image_file.file_id, + ) + ) + return content + + +@experimental +def generate_streaming_message_content( + assistant_name: str, message_delta_event: "MessageDeltaChunk" +) -> StreamingChatMessageContent: + """Generate streaming message content from a MessageDeltaEvent.""" + delta = message_delta_event.delta + + # Determine the role + role = AuthorRole(delta.role) if delta.role is not None else AuthorRole("assistant") + + items: list[StreamingTextContent | StreamingAnnotationContent | StreamingFileReferenceContent] = [] + + delta_chunks: list[MessageDeltaImageFileContent | MessageDeltaTextContent] = cast( + list[MessageDeltaImageFileContent | MessageDeltaTextContent], delta.content or [] + ) + + for delta_block in delta_chunks: + if delta_block.type == "text": + if delta_block.text and delta_block.text.value: # Ensure text is not None + text_value = delta_block.text.value + items.append( + StreamingTextContent( + text=text_value, + choice_index=delta_block.index, + ) + ) + # Process annotations if any + if delta_block.text.annotations: + for annotation in delta_block.text.annotations or []: + if isinstance( + annotation, + ( + MessageDeltaTextFileCitationAnnotation, + MessageDeltaTextFilePathAnnotation, + ), + ): + items.append(generate_streaming_annotation_content(annotation)) + elif delta_block.type == "image_file": + assert isinstance(delta_block, MessageDeltaImageFileContent) # nosec + if delta_block.image_file and isinstance(delta_block.image_file, MessageDeltaImageFileContentObject): + file_id = delta_block.image_file.file_id + items.append( + StreamingFileReferenceContent( + file_id=file_id, + ) + ) + + return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0) # type: ignore + + +@experimental +def get_function_call_contents( + run: "ThreadRun", function_steps: dict[str, FunctionCallContent] +) -> list[FunctionCallContent]: + """Extract function call contents from the run. + + Args: + run: The run. + function_steps: The function steps + + Returns: + The list of function call contents. + """ + function_call_contents: list[FunctionCallContent] = [] + required_action = getattr(run, "required_action", None) + if not required_action or not getattr(required_action, "submit_tool_outputs", False): + return function_call_contents + for tool_call in required_action.submit_tool_outputs.tool_calls: + tool: RunStepFunctionToolCall = tool_call + fcc = FunctionCallContent( + id=tool.id, + index=getattr(tool, "index", None), + name=tool.function.name, + arguments=tool.function.arguments, + ) + function_call_contents.append(fcc) + function_steps[tool.id] = fcc + return function_call_contents + + +@experimental +def generate_function_call_content(agent_name: str, fccs: list[FunctionCallContent]) -> ChatMessageContent: + """Generate function call content. + + Args: + agent_name: The agent name. + fccs: The function call contents. + + Returns: + ChatMessageContent: The chat message content containing the function call content as the items. + """ + return ChatMessageContent(role=AuthorRole.ASSISTANT, name=agent_name, items=fccs) # type: ignore + + +@experimental +def generate_function_call_streaming_content( + agent_name: str, + fccs: list[FunctionCallContent], +) -> StreamingChatMessageContent: + """Generate function call content. + + Args: + agent_name: The agent name. + fccs: The function call contents. + + Returns: + StreamingChatMessageContent: The chat message content containing the function call content as the items. + """ + return StreamingChatMessageContent(role=AuthorRole.ASSISTANT, choice_index=0, name=agent_name, items=fccs) # type: ignore + + +@experimental +def generate_function_result_content( + agent_name: str, function_step: FunctionCallContent, tool_call: "RunStepFunctionToolCall" +) -> ChatMessageContent: + """Generate function result content.""" + function_call_content: ChatMessageContent = ChatMessageContent(role=AuthorRole.TOOL, name=agent_name) # type: ignore + function_call_content.items.append( + FunctionResultContent( + function_name=function_step.function_name, + plugin_name=function_step.plugin_name, + id=function_step.id, + result=tool_call.function.output, # type: ignore + ) + ) + return function_call_content + + +@experimental +def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessageContent": + """Generate code interpreter content. + + Args: + agent_name: The agent name. + code: The code. + + Returns: + ChatMessageContent: The chat message content. + """ + return ChatMessageContent( + role=AuthorRole.ASSISTANT, + content=code, + name=agent_name, + metadata={"code": True}, + ) + + +@experimental +def generate_streaming_function_content( + agent_name: str, step_details: "RunStepDeltaToolCallObject" +) -> "StreamingChatMessageContent | None": + """Generate streaming function content. + + Args: + agent_name: The agent name. + step_details: The function step. + + Returns: + StreamingChatMessageContent: The chat message content. + """ + if not step_details.tool_calls: + return None + + items: list[FunctionCallContent] = [] + + tool_calls: list[ + RunStepDeltaCodeInterpreterToolCall | RunStepDeltaFileSearchToolCall | RunStepDeltaFunctionToolCall + ] = cast( + list[RunStepDeltaCodeInterpreterToolCall | RunStepDeltaFileSearchToolCall | RunStepDeltaFunctionToolCall], + step_details.tool_calls or [], + ) + + for tool in tool_calls: + if tool.type == "function" and tool.function: + items.append( + FunctionCallContent( + id=tool.id, + index=getattr(tool, "index", None), + name=tool.function.name, + arguments=tool.function.arguments, + ) + ) + + return ( + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + name=agent_name, + items=items, # type: ignore + choice_index=0, + ) + if len(items) > 0 + else None + ) + + +@experimental +def generate_streaming_code_interpreter_content( + agent_name: str, step_details: "RunStepDeltaToolCallObject" +) -> "StreamingChatMessageContent | None": + """Generate code interpreter content. + + Args: + agent_name: The agent name. + step_details: The current step details. + + Returns: + StreamingChatMessageContent: The chat message content. + """ + items: list[StreamingTextContent | StreamingFileReferenceContent] = [] + + if not step_details.tool_calls: + return None + + metadata: dict[str, bool] = {} + for index, tool in enumerate(step_details.tool_calls): + if isinstance(tool, RunStepDeltaCodeInterpreterDetailItemObject): + code_interpreter_tool_call = tool + if code_interpreter_tool_call.input: + items.append( + StreamingTextContent( + choice_index=index, + text=code_interpreter_tool_call.input, + ) + ) + metadata["code"] = True + if code_interpreter_tool_call.outputs: + for output in code_interpreter_tool_call.outputs: + if ( + isinstance(output, RunStepDeltaCodeInterpreterImageOutput) + and output.image is not None + and output.image.file_id + ): + items.append( + StreamingFileReferenceContent( + file_id=output.image.file_id, + ) + ) + if isinstance(output, RunStepDeltaCodeInterpreterLogOutput) and output.logs: + items.append( + StreamingTextContent( + choice_index=index, + text=output.logs, + ) + ) + + return ( + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + name=agent_name, + items=items, # type: ignore + choice_index=0, + metadata=metadata if metadata else None, + ) + if len(items) > 0 + else None + ) + + +@experimental +def generate_annotation_content( + annotation: MessageTextFilePathAnnotation | MessageTextFileCitationAnnotation, +) -> AnnotationContent: + """Generate annotation content.""" + file_id = None + if isinstance(annotation, MessageTextFilePathAnnotation) and annotation.file_path is not None: + file_id = annotation.file_path.file_id + elif isinstance(annotation, MessageTextFileCitationAnnotation) and annotation.file_citation is not None: + file_id = annotation.file_citation.file_id + + return AnnotationContent( + file_id=file_id, + quote=annotation.text, + start_index=annotation.start_index if annotation.start_index is not None else None, + end_index=annotation.end_index if annotation.end_index is not None else None, + ) + + +@experimental +def generate_streaming_annotation_content( + annotation: MessageDeltaTextFilePathAnnotation | MessageDeltaTextFileCitationAnnotation, +) -> StreamingAnnotationContent: + """Generate streaming annotation content.""" + file_id = None + if isinstance(annotation, MessageDeltaTextFilePathAnnotation) and annotation.file_path: + file_id = annotation.file_path.file_id if annotation.file_path.file_id else None + elif isinstance(annotation, MessageDeltaTextFileCitationAnnotation) and annotation.file_citation: + file_id = annotation.file_citation.file_id if annotation.file_citation.file_id else None + + return StreamingAnnotationContent( + file_id=file_id, + quote=annotation.text, + start_index=annotation.start_index if annotation.start_index is not None else None, + end_index=annotation.end_index if annotation.end_index is not None else None, + ) diff --git a/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py b/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py new file mode 100644 index 000000000000..b84f0b87e30c --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py @@ -0,0 +1,876 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, cast + +from azure.ai.projects.models import ( + AgentsApiResponseFormat, + AgentsApiResponseFormatMode, + AgentsNamedToolChoiceType, + AgentStreamEvent, + AsyncAgentEventHandler, + AsyncAgentRunStream, + BaseAsyncAgentEventHandler, + OpenAIPageableListOfThreadMessage, + ResponseFormatJsonSchemaType, + RunStep, + RunStepCodeInterpreterToolCall, + RunStepDeltaChunk, + RunStepDeltaToolCallObject, + RunStepMessageCreationDetails, + RunStepToolCallDetails, + RunStepType, + SubmitToolOutputsAction, + ThreadMessage, + ThreadRun, + ToolDefinition, + TruncationObject, +) +from azure.ai.projects.models._enums import MessageRole + +from semantic_kernel.agents.azure_ai.agent_content_generation import ( + generate_code_interpreter_content, + generate_function_call_content, + generate_function_call_streaming_content, + generate_function_result_content, + generate_message_content, + generate_streaming_code_interpreter_content, + generate_streaming_function_content, + generate_streaming_message_content, + get_function_call_contents, +) +from semantic_kernel.agents.azure_ai.azure_ai_agent_utils import AzureAIAgentUtils +from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult +from semantic_kernel.connectors.ai.function_calling_utils import ( + kernel_function_metadata_to_function_call_format, + merge_streaming_function_results, +) +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException +from semantic_kernel.functions import KernelArguments +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from azure.ai.projects.aio import AIProjectClient + + from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent + from semantic_kernel.contents.chat_history import ChatHistory + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + from semantic_kernel.kernel import Kernel + +_T = TypeVar("_T", bound="AgentThreadActions") + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental +class AgentThreadActions: + """AzureAI Agent Thread Actions.""" + + polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] + error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] + + # region Invocation Methods + + @classmethod + async def invoke( + cls: type[_T], + *, + agent: "AzureAIAgent", + thread_id: str, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + model: str | None = None, + instructions_override: str | None = None, + additional_instructions: str | None = None, + additional_messages: "list[ChatMessageContent] | None" = None, + tools: list[ToolDefinition] | None = None, + temperature: float | None = None, + top_p: float | None = None, + max_prompt_tokens: int | None = None, + max_completion_tokens: int | None = None, + truncation_strategy: TruncationObject | None = None, + response_format: AgentsApiResponseFormat + | AgentsApiResponseFormatMode + | ResponseFormatJsonSchemaType + | None = None, + parallel_tool_calls: bool | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + """Invoke the message in the thread. + + Args: + agent: The agent to invoke. + thread_id: The thread id. + arguments: The kernel arguments. + kernel: The kernel. + model: The model. + instructions_override: The instructions override. + additional_instructions: The additional instructions. + additional_messages: The additional messages to add to the thread. Only supports messages with + role = User or Assistant. + https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + tools: The tools. + temperature: The temperature. + top_p: The top p. + max_prompt_tokens: The max prompt tokens. + max_completion_tokens: The max completion tokens. + truncation_strategy: The truncation strategy. + response_format: The response format. + parallel_tool_calls: The parallel tool calls. + metadata: The metadata. + kwargs: Additional keyword arguments. + + Returns: + A tuple of the visibility flag and the invoked message. + """ + arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) + kernel = kernel or agent.kernel + + tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore + + base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) + + merged_instructions: str = "" + if instructions_override is not None: + merged_instructions = instructions_override + elif base_instructions and additional_instructions: + merged_instructions = f"{base_instructions}\n\n{additional_instructions}" + else: + merged_instructions = base_instructions or additional_instructions or "" + + run_options = cls._generate_options( + agent=agent, + model=model, + additional_messages=additional_messages, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + temperature=temperature, + top_p=top_p, + metadata=metadata, + truncation_strategy=truncation_strategy, + response_format=response_format, + parallel_tool_calls=parallel_tool_calls, + ) + # Remove keys with None values. + run_options = {k: v for k, v in run_options.items() if v is not None} + + run: ThreadRun = await agent.client.agents.create_run( + assistant_id=agent.id, + thread_id=thread_id, + instructions=merged_instructions or agent.instructions, + tools=tools, + **run_options, + ) + + processed_step_ids = set() + function_steps: dict[str, "FunctionCallContent"] = {} + + while run.status != "completed": + run = await cls._poll_run_status(agent=agent, run=run, thread_id=thread_id) + + if run.status in cls.error_message_states: + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " + f"with error: {error_message}" + ) + + # Check if function calling is required + if run.status == "requires_action" and isinstance(run.required_action, SubmitToolOutputsAction): + logger.debug(f"Run [{run.id}] requires tool action for agent `{agent.name}` and thread `{thread_id}`") + fccs = get_function_call_contents(run, function_steps) + if fccs: + logger.debug( + f"Yielding generate_function_call_content for agent `{agent.name}` and " + f"thread `{thread_id}`, visibility False" + ) + yield False, generate_function_call_content(agent_name=agent.name, fccs=fccs) + + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] + _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + + tool_outputs = cls._format_tool_outputs(fccs, chat_history) + await agent.client.agents.submit_tool_outputs_to_run( + run_id=run.id, + thread_id=thread_id, + tool_outputs=tool_outputs, # type: ignore + ) + logger.debug(f"Submitted tool outputs for agent `{agent.name}` and thread `{thread_id}`") + + steps_response = await agent.client.agents.list_run_steps(run_id=run.id, thread_id=thread_id) + logger.debug(f"Called for steps_response for run [{run.id}] agent `{agent.name}` and thread `{thread_id}`") + steps: list[RunStep] = steps_response.data + + def sort_key(step: RunStep): + # Put tool_calls first, then message_creation. + # If multiple steps share a type, break ties by completed_at. + return (0 if step.type == "tool_calls" else 1, step.completed_at) + + completed_steps_to_process = sorted( + [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], + key=sort_key, + ) + + logger.debug( + f"Completed steps to process for run [{run.id}] agent `{agent.name}` and thread `{thread_id}` " + f"with length `{len(completed_steps_to_process)}`" + ) + + message_count = 0 + for completed_step in completed_steps_to_process: + match completed_step.type: + case RunStepType.TOOL_CALLS: + logger.debug( + f"Entering step type tool_calls for run [{run.id}], agent `{agent.name}` and " + f"thread `{thread_id}`" + ) + tool_call_details: RunStepToolCallDetails = cast( + RunStepToolCallDetails, completed_step.step_details + ) + for tool_call in tool_call_details.tool_calls: + is_visible = False + content: "ChatMessageContent | None" = None + match tool_call.type: + case AgentsNamedToolChoiceType.CODE_INTERPRETER: + logger.debug( + f"Entering tool_calls (code_interpreter) for run [{run.id}], agent " + f"`{agent.name}` and thread `{thread_id}`" + ) + code_call: RunStepCodeInterpreterToolCall = cast( + RunStepCodeInterpreterToolCall, tool_call + ) + content = generate_code_interpreter_content( + agent.name, + code_call.code_interpreter.input, + ) + is_visible = True + case AgentsNamedToolChoiceType.FUNCTION: + logger.debug( + f"Entering tool_calls (function) for run [{run.id}], agent `{agent.name}` " + f"and thread `{thread_id}`" + ) + function_step = function_steps.get(tool_call.id) + assert function_step is not None # nosec + content = generate_function_result_content( + agent_name=agent.name, + function_step=function_step, + tool_call=tool_call, # type: ignore + ) + + if content: + message_count += 1 + logger.debug( + f"Yielding tool_message for run [{run.id}], agent `{agent.name}`, " + f"thread `{thread_id}`, message count `{message_count}`, " + f"is_visible `{is_visible}`" + ) + yield is_visible, content + case RunStepType.MESSAGE_CREATION: + logger.debug( + f"Entering message_creation for run [{run.id}], agent `{agent.name}` and thread " + f"`{thread_id}`" + ) + message_call_details: RunStepMessageCreationDetails = cast( + RunStepMessageCreationDetails, completed_step.step_details + ) + message = await cls._retrieve_message( + agent=agent, + thread_id=thread_id, + message_id=message_call_details.message_creation.message_id, # type: ignore + ) + if message: + content = generate_message_content(agent.name, message) + if content and len(content.items) > 0: + message_count += 1 + logger.debug( + f"Yielding message_creation for run [{run.id}], agent `{agent.name}`, " + f"thread `{thread_id}`, message count `{message_count}`, is_visible `True`" + ) + yield True, content + processed_step_ids.add(completed_step.id) + + @classmethod + async def invoke_stream( + cls: type[_T], + *, + agent: "AzureAIAgent", + thread_id: str, + messages: "list[ChatMessageContent] | None" = None, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + model: str | None = None, + instructions_override: str | None = None, + additional_instructions: str | None = None, + additional_messages: "list[ChatMessageContent] | None" = None, + tools: list[ToolDefinition] | None = None, + temperature: float | None = None, + top_p: float | None = None, + max_prompt_tokens: int | None = None, + max_completion_tokens: int | None = None, + truncation_strategy: TruncationObject | None = None, + response_format: AgentsApiResponseFormat + | AgentsApiResponseFormatMode + | ResponseFormatJsonSchemaType + | None = None, + parallel_tool_calls: bool | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Invoke the agent stream and yield ChatMessageContent continuously. + + Args: + agent: The agent to invoke. + thread_id: The thread id. + messages: The messages. + arguments: The kernel arguments. + kernel: The kernel. + model: The model. + instructions_override: The instructions override. + additional_instructions: The additional instructions. + additional_messages: The additional messages to add to the thread. Only supports messages with + role = User or Assistant. + https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + tools: The tools. + temperature: The temperature. + top_p: The top p. + max_prompt_tokens: The max prompt tokens. + max_completion_tokens: The max completion tokens. + truncation_strategy: The truncation strategy. + response_format: The response format. + parallel_tool_calls: The parallel tool calls. + metadata: The metadata. + kwargs: Additional keyword arguments. + + Returns: + An async iterable of streamed content. + """ + arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) + kernel = kernel or agent.kernel + arguments = agent._merge_arguments(arguments) + + tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore + + base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) + + merged_instructions: str = "" + if instructions_override is not None: + merged_instructions = instructions_override + elif base_instructions and additional_instructions: + merged_instructions = f"{base_instructions}\n\n{additional_instructions}" + else: + merged_instructions = base_instructions or additional_instructions or "" + + run_options = cls._generate_options( + agent=agent, + model=model, + additional_messages=additional_messages, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + temperature=temperature, + top_p=top_p, + metadata=metadata, + truncation_strategy=truncation_strategy, + response_format=response_format, + parallel_tool_calls=parallel_tool_calls, + ) + run_options = {k: v for k, v in run_options.items() if v is not None} + + stream: AsyncAgentRunStream = await agent.client.agents.create_stream( + assistant_id=agent.id, + thread_id=thread_id, + instructions=merged_instructions or agent.instructions, + tools=tools, + **run_options, + ) + + function_steps: dict[str, FunctionCallContent] = {} + active_messages: dict[str, RunStep] = {} + + async for content in cls._process_stream_events( + stream=stream, + agent=agent, + thread_id=thread_id, + messages=messages, + kernel=kernel, + function_steps=function_steps, + active_messages=active_messages, + ): + if content: + yield content + + @classmethod + async def _process_stream_events( + cls: type[_T], + stream: AsyncAgentRunStream, + agent: "AzureAIAgent", + thread_id: str, + kernel: "Kernel", + function_steps: dict[str, FunctionCallContent], + active_messages: dict[str, RunStep], + messages: "list[ChatMessageContent] | None" = None, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Process events from the main stream and delegate tool output handling as needed.""" + while True: + async with stream as response_stream: + async for event_type, event_data, _ in response_stream: + if event_type == AgentStreamEvent.THREAD_RUN_CREATED: + run = event_data + logger.info(f"Assistant run created with ID: {run.id}") + + elif event_type == AgentStreamEvent.THREAD_RUN_IN_PROGRESS: + run_step = cast(RunStep, event_data) + logger.info(f"Assistant run in progress with ID: {run_step.id}") + + elif event_type == AgentStreamEvent.THREAD_MESSAGE_DELTA: + yield generate_streaming_message_content(agent.name, event_data) + + elif event_type == AgentStreamEvent.THREAD_RUN_STEP_COMPLETED: + step_completed = cast(RunStep, event_data) + logger.info(f"Run step completed with ID: {step_completed.id}") + if isinstance(step_completed.step_details, RunStepMessageCreationDetails): + msg_id = step_completed.step_details.message_creation.message_id + active_messages.setdefault(msg_id, step_completed) + + elif event_type == AgentStreamEvent.THREAD_RUN_STEP_DELTA: + run_step_event: RunStepDeltaChunk = event_data + details = run_step_event.delta.step_details + if not details: + continue + if isinstance(details, RunStepDeltaToolCallObject) and details.tool_calls: + for tool_call in details.tool_calls: + content = None + if tool_call.type == "function": + content = generate_streaming_function_content(agent.name, details) + elif tool_call.type == "code_interpreter": + content = generate_streaming_code_interpreter_content(agent.name, details) + if content: + yield content + + elif event_type == AgentStreamEvent.THREAD_RUN_REQUIRES_ACTION: + run = cast(ThreadRun, event_data) + action_result = await cls._handle_streaming_requires_action( + agent_name=agent.name, + kernel=kernel, + run=run, + function_steps=function_steps, + ) + if action_result is None: + raise RuntimeError( + f"Function call required but no function steps found for agent `{agent.name}` " + f"thread: {thread_id}." + ) + + if action_result.function_result_streaming_content: + yield action_result.function_result_streaming_content + if messages: + messages.append(action_result.function_result_streaming_content) + + if action_result.function_call_streaming_content: + if messages: + messages.append(action_result.function_call_streaming_content) + async for sub_content in cls._stream_tool_outputs( + agent=agent, + thread_id=thread_id, + run=run, + action_result=action_result, + active_messages=active_messages, + messages=messages, + ): + if sub_content: + yield sub_content + break + + elif event_type == AgentStreamEvent.THREAD_RUN_COMPLETED: + run = cast(ThreadRun, event_data) + logger.info(f"Run completed with ID: {run.id}") + if active_messages: + for msg_id, step in active_messages.items(): + message = await cls._retrieve_message( + agent=agent, thread_id=thread_id, message_id=msg_id + ) + if message and hasattr(message, "content"): + final_content = generate_message_content(agent.name, message, step) + if messages: + messages.append(final_content) + return + + elif event_type == AgentStreamEvent.THREAD_RUN_FAILED: + run_failed = cast(ThreadRun, event_data) + error_message = ( + run_failed.last_error.message + if run_failed.last_error and run_failed.last_error.message + else "" + ) + raise RuntimeError( + f"Run failed with status: `{run_failed.status}` for agent `{agent.name}` " + f"thread `{thread_id}` with error: {error_message}" + ) + else: + break + return + + @classmethod + async def _stream_tool_outputs( + cls: type[_T], + agent: "AzureAIAgent", + thread_id: str, + run: ThreadRun, + action_result: FunctionActionResult, + active_messages: dict[str, RunStep], + messages: "list[ChatMessageContent] | None" = None, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Wrap the tool outputs stream as an async generator. + + This allows downstream consumers to iterate over the yielded content. + """ + handler: BaseAsyncAgentEventHandler = AsyncAgentEventHandler() + await agent.client.agents.submit_tool_outputs_to_stream( + run_id=run.id, + thread_id=thread_id, + tool_outputs=action_result.tool_outputs, # type: ignore + event_handler=handler, + ) + async for sub_event_type, sub_event_data, _ in handler: + if sub_event_type == AgentStreamEvent.THREAD_MESSAGE_DELTA: + yield generate_streaming_message_content(agent.name, sub_event_data) + elif sub_event_type == AgentStreamEvent.THREAD_RUN_COMPLETED: + thread_run = cast(ThreadRun, sub_event_data) + logger.info(f"Run completed with ID: {thread_run.id}") + if active_messages: + for msg_id, step in active_messages.items(): + message = await cls._retrieve_message(agent=agent, thread_id=thread_id, message_id=msg_id) + if message and hasattr(message, "content"): + final_content = generate_message_content(agent.name, message, step) + if messages: + messages.append(final_content) + return + elif sub_event_type == AgentStreamEvent.THREAD_RUN_FAILED: + run_failed = cast(ThreadRun, sub_event_data) + error_message = ( + run_failed.last_error.message if run_failed.last_error and run_failed.last_error.message else "" + ) + raise RuntimeError( + f"Run failed with status: `{run_failed.status}` for agent `{agent.name}` " + f"thread `{thread_id}` with error: {error_message}" + ) + elif sub_event_type == AgentStreamEvent.DONE: + break + + # endregion + + # region Messaging Handling Methods + + @classmethod + async def create_thread( + cls: type[_T], + client: "AIProjectClient", + **kwargs: Any, + ) -> str: + """Create a thread. + + Args: + client: The client to use to create the thread. + kwargs: Additional keyword arguments. + + Returns: + The ID of the created thread. + """ + thread = await client.agents.create_thread(**kwargs) + return thread.id + + @classmethod + async def create_message( + cls: type[_T], + client: "AIProjectClient", + thread_id: str, + message: "str | ChatMessageContent", + **kwargs: Any, + ) -> "ThreadMessage | None": + """Create a message in the thread. + + Args: + client: The client to use to create the message. + thread_id: The ID of the thread to create the message in. + message: The message to create. + kwargs: Additional keyword arguments. + + Returns: + The created message. + """ + if isinstance(message, str): + message = ChatMessageContent(role=AuthorRole.USER, content=message) + + if any(isinstance(item, FunctionCallContent) for item in message.items): + return None + + if not message.content.strip(): + return None + + return await client.agents.create_message( + thread_id=thread_id, + role=MessageRole.USER if message.role == AuthorRole.USER else MessageRole.AGENT, + content=message.content, + attachments=AzureAIAgentUtils.get_attachments(message), + metadata=AzureAIAgentUtils.get_metadata(message), + **kwargs, + ) + + @classmethod + async def get_messages( + cls: type[_T], + client: "AIProjectClient", + thread_id: str, + ) -> AsyncIterable["ChatMessageContent"]: + """Get messages from a thread. + + Args: + client: The client to use to get the messages. + thread_id: The ID of the thread to get the messages from. + + Yields: + The messages from the thread. + """ + agent_names: dict[str, Any] = {} + last_id: str | None = None + messages: OpenAIPageableListOfThreadMessage + + while True: + messages = await client.agents.list_messages( + thread_id=thread_id, + run_id=None, + limit=None, + order="desc", + after=last_id, + before=None, + ) + + if not messages: + break + + for message in messages.data: + last_id = message.id + assistant_name: str | None = None + + if message.assistant_id and message.assistant_id.strip() and message.assistant_id not in agent_names: + assistant = await client.agents.get_agent(message.assistant_id) + if assistant.name and assistant.name.strip(): + agent_names[assistant.id] = assistant.name + + assistant_name = agent_names.get(message.assistant_id) or message.assistant_id + + content = generate_message_content(assistant_name, message) + + if len(content.items) > 0: + yield content + + if not messages.has_more: + break + + # endregion + + # region Internal Methods + + @classmethod + def _merge_options( + cls: type[_T], + *, + agent: "AzureAIAgent", + model: str | None = None, + response_format: AgentsApiResponseFormat + | AgentsApiResponseFormatMode + | ResponseFormatJsonSchemaType + | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> dict[str, Any]: + """Merge run-time options with the agent-level options. + + Run-level parameters take precedence. + """ + return { + "model": model if model is not None else agent.definition.model, + "response_format": response_format if response_format is not None else agent.definition.response_format, + "temperature": temperature if temperature is not None else agent.definition.temperature, + "top_p": top_p if top_p is not None else agent.definition.top_p, + "metadata": metadata if metadata is not None else agent.definition.metadata, + **kwargs, + } + + @classmethod + def _generate_options(cls: type[_T], **kwargs: Any) -> dict[str, Any]: + """Generate a dictionary of options that can be passed directly to create_run.""" + merged = cls._merge_options(**kwargs) + trunc_count = merged.get("truncation_message_count", None) + max_completion_tokens = merged.get("max_completion_tokens", None) + max_prompt_tokens = merged.get("max_prompt_tokens", None) + parallel_tool_calls = merged.get("parallel_tool_calls_enabled", None) + additional_messages = cls._translate_additional_messages(merged.get("additional_messages", None)) + return { + "model": merged.get("model"), + "top_p": merged.get("top_p"), + "response_format": merged.get("response_format"), + "temperature": merged.get("temperature"), + "truncation_strategy": trunc_count, + "metadata": merged.get("metadata"), + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "parallel_tool_calls": parallel_tool_calls, + "additional_messages": additional_messages, + } + + @classmethod + def _translate_additional_messages( + cls: type[_T], messages: "list[ChatMessageContent] | None" + ) -> list[ThreadMessage] | None: + """Translate additional messages to the required format.""" + if not messages: + return None + return AzureAIAgentUtils.get_thread_messages(messages) + + @classmethod + def _prepare_tool_definition(cls: type[_T], tool: dict | ToolDefinition) -> dict | ToolDefinition: + """Prepare the tool definition.""" + if tool.get("type") == "openapi" and "openapi" in tool: + openapi_data = dict(tool["openapi"]) + openapi_data.pop("functions", None) + tool = dict(tool) + tool["openapi"] = openapi_data + return tool + + @classmethod + def _get_tools(cls: type[_T], agent: "AzureAIAgent", kernel: "Kernel") -> list[dict[str, Any] | ToolDefinition]: + """Get the tools for the agent.""" + tools: list[Any] = list(agent.definition.tools) + funcs = kernel.get_full_list_of_function_metadata() + dict_defs = [kernel_function_metadata_to_function_call_format(f) for f in funcs] + tools.extend(dict_defs) + return [cls._prepare_tool_definition(tool) for tool in tools] + + @classmethod + async def _poll_run_status(cls: type[_T], agent: "AzureAIAgent", run: ThreadRun, thread_id: str) -> ThreadRun: + """Poll the run status.""" + logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") + try: + run = await asyncio.wait_for( + cls._poll_loop(agent=agent, run=run, thread_id=thread_id), + timeout=agent.polling_options.run_polling_timeout.total_seconds(), + ) + except asyncio.TimeoutError: + timeout_duration = agent.polling_options.run_polling_timeout + error_message = ( + f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` " + f"after waiting {timeout_duration}." + ) + logger.error(error_message) + raise AgentInvokeException(error_message) + logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") + return run + + @classmethod + async def _poll_loop(cls: type[_T], agent: "AzureAIAgent", run: ThreadRun, thread_id: str) -> ThreadRun: + """Continuously poll the run status until it is no longer pending.""" + count = 0 + while True: + await asyncio.sleep(agent.polling_options.get_polling_interval(count).total_seconds()) + count += 1 + try: + run = await agent.client.agents.get_run(run_id=run.id, thread_id=thread_id) + except Exception as e: + logger.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") + if run.status not in cls.polling_status: + break + return run + + @classmethod + async def _retrieve_message( + cls: type[_T], agent: "AzureAIAgent", thread_id: str, message_id: str + ) -> ThreadMessage | None: + """Retrieve a message from a thread.""" + message: ThreadMessage | None = None + count = 0 + max_retries = 3 + while count < max_retries: + try: + message = await agent.client.agents.get_message(thread_id=thread_id, message_id=message_id) + break + except Exception as ex: + logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") + count += 1 + if count >= max_retries: + logger.error( + f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." + ) + break + backoff_time: float = agent.polling_options.message_synchronization_delay.total_seconds() * (2**count) + await asyncio.sleep(backoff_time) + return message + + @classmethod + async def _invoke_function_calls( + cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + ) -> list[Any]: + """Invoke the function calls.""" + tasks = [ + kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) + for function_call in fccs + ] + return await asyncio.gather(*tasks) + + @classmethod + def _format_tool_outputs( + cls: type[_T], fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + ) -> list[dict[str, str]]: + """Format the tool outputs for submission.""" + from semantic_kernel.contents.function_result_content import FunctionResultContent + + tool_call_lookup = { + tool_call.id: tool_call + for message in chat_history.messages + for tool_call in message.items + if isinstance(tool_call, FunctionResultContent) + } + return [ + {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} + for fcc in fccs + if fcc.id in tool_call_lookup + ] + + @classmethod + async def _handle_streaming_requires_action( + cls: type[_T], + agent_name: str, + kernel: "Kernel", + run: ThreadRun, + function_steps: dict[str, "FunctionCallContent"], + **kwargs: Any, + ) -> FunctionActionResult | None: + """Handle the requires action event for a streaming run.""" + fccs = get_function_call_contents(run, function_steps) + if fccs: + function_call_streaming_content = generate_function_call_streaming_content(agent_name=agent_name, fccs=fccs) + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] + _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] + tool_outputs = cls._format_tool_outputs(fccs, chat_history) + return FunctionActionResult( + function_call_streaming_content, function_result_streaming_content, tool_outputs + ) + return None + + # endregion diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py new file mode 100644 index 000000000000..1d4fb805fa35 --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py @@ -0,0 +1,390 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import AsyncIterable, Iterable +from typing import TYPE_CHECKING, Any, ClassVar, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import Agent as AzureAIAgentModel +from azure.ai.projects.models import ( + AgentsApiResponseFormat, + AgentsApiResponseFormatMode, + ResponseFormatJsonSchemaType, + ThreadMessage, + ThreadMessageOptions, + ToolDefinition, + TruncationObject, +) +from pydantic import Field + +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions +from semantic_kernel.agents.azure_ai.azure_ai_agent_settings import AzureAIAgentSettings +from semantic_kernel.agents.azure_ai.azure_ai_channel import AzureAIChannel +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions import KernelArguments +from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel import Kernel +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig +from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.naming import generate_random_ascii_name +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) +from semantic_kernel.utils.telemetry.user_agent import APP_INFO, SEMANTIC_KERNEL_USER_AGENT + +logger: logging.Logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from azure.identity.aio import DefaultAzureCredential + + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + +AgentsApiResponseFormatOption = ( + str | AgentsApiResponseFormatMode | AgentsApiResponseFormat | ResponseFormatJsonSchemaType +) + +_T = TypeVar("_T", bound="AzureAIAgent") + + +@experimental +class AzureAIAgent(Agent): + """Azure AI Agent class.""" + + client: AIProjectClient + definition: AzureAIAgentModel + polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) + + channel_type: ClassVar[type[AgentChannel]] = AzureAIChannel + + def __init__( + self, + *, + arguments: "KernelArguments | None" = None, + client: AIProjectClient, + definition: AzureAIAgentModel, + kernel: "Kernel | None" = None, + plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, + polling_options: RunPollingOptions | None = None, + prompt_template_config: "PromptTemplateConfig | None" = None, + **kwargs: Any, + ) -> None: + """Initialize the Azure AI Agent. + + Args: + arguments: The KernelArguments instance + client: The AzureAI Project client. See "Quickstart: Create a new agent" guide + https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure + for details on how to create a new agent. + definition: The AzureAI Agent model created via the AzureAI Project client. + kernel: The Kernel instance used if invoking plugins + plugins: The plugins for the agent. If plugins are included along with a kernel, any plugins + that already exist in the kernel will be overwritten. + polling_options: The polling options for the agent. + prompt_template_config: The prompt template configuration. If this is provided along with + instructions, the prompt template will be used in place of the instructions. + **kwargs: Additional keyword arguments + """ + args: dict[str, Any] = { + "client": client, + "definition": definition, + "name": definition.name or f"azure_agent_{generate_random_ascii_name(length=8)}", + "description": definition.description, + } + + if definition.id is not None: + args["id"] = definition.id + if kernel is not None: + args["kernel"] = kernel + if arguments is not None: + args["arguments"] = arguments + if ( + definition.instructions + and prompt_template_config + and definition.instructions != prompt_template_config.template + ): + logger.info( + f"Both `instructions` ({definition.instructions}) and `prompt_template_config` " + f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " + "and ignoring `instructions`." + ) + + if plugins is not None: + args["plugins"] = plugins + if definition.instructions is not None: + args["instructions"] = definition.instructions + if prompt_template_config is not None: + args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( + prompt_template_config=prompt_template_config + ) + if prompt_template_config.template is not None: + # Use the template from the prompt_template_config if it is provided + args["instructions"] = prompt_template_config.template + if polling_options is not None: + args["polling_options"] = polling_options + if kwargs: + args.update(kwargs) + + super().__init__(**args) + + @staticmethod + def create_client( + credential: "DefaultAzureCredential", + conn_str: str | None = None, + **kwargs: Any, + ) -> AIProjectClient: + """Create the Azure AI Project client using the connection string. + + Args: + credential: The credential + conn_str: The connection string + kwargs: Additional keyword arguments + + Returns: + AIProjectClient: The Azure AI Project client + """ + if conn_str is None: + ai_agent_settings = AzureAIAgentSettings.create() + if not ai_agent_settings.project_connection_string: + raise AgentInitializationException("Please provide a valid Azure AI connection string.") + conn_str = ai_agent_settings.project_connection_string.get_secret_value() + + return AIProjectClient.from_connection_string( + credential=credential, + conn_str=conn_str, + **({"user_agent": SEMANTIC_KERNEL_USER_AGENT} if APP_INFO else {}), + **kwargs, + ) + + async def add_chat_message(self, thread_id: str, message: str | ChatMessageContent) -> "ThreadMessage | None": + """Add a chat message to the thread. + + Args: + thread_id: The ID of the thread + message: The chat message to add + + Returns: + ThreadMessage | None: The thread message + """ + return await AgentThreadActions.create_message(client=self.client, thread_id=thread_id, message=message) + + @trace_agent_get_response + @override + async def get_response( + self, + thread_id: str, + arguments: KernelArguments | None = None, + kernel: Kernel | None = None, + # Run-level parameters: + *, + model: str | None = None, + instructions_override: str | None = None, + additional_instructions: str | None = None, + additional_messages: list[ThreadMessageOptions] | None = None, + tools: list[ToolDefinition] | None = None, + temperature: float | None = None, + top_p: float | None = None, + max_prompt_tokens: int | None = None, + max_completion_tokens: int | None = None, + truncation_strategy: TruncationObject | None = None, + response_format: AgentsApiResponseFormatOption | None = None, + parallel_tool_calls: bool | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> ChatMessageContent: + """Get a response from the agent on a thread.""" + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "model": model, + "instructions_override": instructions_override, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "tools": tools, + "temperature": temperature, + "top_p": top_p, + "max_prompt_tokens": max_prompt_tokens, + "max_completion_tokens": max_completion_tokens, + "truncation_strategy": truncation_strategy, + "response_format": response_format, + "parallel_tool_calls": parallel_tool_calls, + "metadata": metadata, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + + messages: list[ChatMessageContent] = [] + async for is_visible, message in AgentThreadActions.invoke( + agent=self, + thread_id=thread_id, + kernel=kernel, + arguments=arguments, + **run_level_params, # type: ignore + ): + if is_visible and message.metadata.get("code") is not True: + messages.append(message) + + if not messages: + raise AgentInvokeException("No response messages were returned from the agent.") + return messages[-1] + + @trace_agent_invocation + @override + async def invoke( + self, + thread_id: str, + arguments: KernelArguments | None = None, + kernel: Kernel | None = None, + # Run-level parameters: + *, + model: str | None = None, + instructions_override: str | None = None, + additional_instructions: str | None = None, + additional_messages: list[ThreadMessageOptions] | None = None, + tools: list[ToolDefinition] | None = None, + temperature: float | None = None, + top_p: float | None = None, + max_prompt_tokens: int | None = None, + max_completion_tokens: int | None = None, + truncation_strategy: TruncationObject | None = None, + response_format: AgentsApiResponseFormatOption | None = None, + parallel_tool_calls: bool | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Invoke the agent on the specified thread.""" + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "model": model, + "instructions_override": instructions_override, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "tools": tools, + "temperature": temperature, + "top_p": top_p, + "max_prompt_tokens": max_prompt_tokens, + "max_completion_tokens": max_completion_tokens, + "truncation_strategy": truncation_strategy, + "response_format": response_format, + "parallel_tool_calls": parallel_tool_calls, + "metadata": metadata, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + + async for is_visible, message in AgentThreadActions.invoke( + agent=self, + thread_id=thread_id, + kernel=kernel, + arguments=arguments, + **run_level_params, # type: ignore + ): + if is_visible: + yield message + + @trace_agent_invocation + @override + async def invoke_stream( + self, + thread_id: str, + messages: list[ChatMessageContent] | None = None, + kernel: Kernel | None = None, + arguments: KernelArguments | None = None, + # Run-level parameters: + *, + model: str | None = None, + instructions_override: str | None = None, + additional_instructions: str | None = None, + additional_messages: list[ThreadMessageOptions] | None = None, + tools: list[ToolDefinition] | None = None, + temperature: float | None = None, + top_p: float | None = None, + max_prompt_tokens: int | None = None, + max_completion_tokens: int | None = None, + truncation_strategy: TruncationObject | None = None, + response_format: AgentsApiResponseFormatOption | None = None, + parallel_tool_calls: bool | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Invoke the agent on the specified thread with a stream of messages.""" + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "model": model, + "instructions_override": instructions_override, + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "tools": tools, + "temperature": temperature, + "top_p": top_p, + "max_prompt_tokens": max_prompt_tokens, + "max_completion_tokens": max_completion_tokens, + "truncation_strategy": truncation_strategy, + "response_format": response_format, + "parallel_tool_calls": parallel_tool_calls, + "metadata": metadata, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + + async for message in AgentThreadActions.invoke_stream( + agent=self, + thread_id=thread_id, + messages=messages, + kernel=kernel, + arguments=arguments, + **run_level_params, # type: ignore + ): + yield message + + def get_channel_keys(self) -> Iterable[str]: + """Get the channel keys. + + Returns: + Iterable[str]: The channel keys. + """ + # Distinguish from other channel types. + yield f"{AzureAIAgent.__name__}" + + # Distinguish between different agent IDs + yield self.id + + # Distinguish between agent names + yield self.name + + # Distinguish between different scopes + yield str(self.client.scope) + + async def create_channel(self) -> AgentChannel: + """Create a channel.""" + thread_id = await AgentThreadActions.create_thread(self.client) + + return AzureAIChannel(client=self.client, thread_id=thread_id) diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py new file mode 100644 index 000000000000..e17bfaaf5a5b --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py @@ -0,0 +1,32 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from pydantic import SecretStr + +from semantic_kernel.kernel_pydantic import KernelBaseSettings +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class AzureAIAgentSettings(KernelBaseSettings): + """Azure AI Agent settings currently used by the AzureAIAgent. + + Args: + model_deployment_name: Azure AI Agent (Env var AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME) + project_connection_string: Azure AI Agent Project Connection String + (Env var AZURE_AI_AGENT_PROJECT_CONNECTION_STRING) + endpoint: Azure AI Agent Endpoint (Env var AZURE_AI_AGENT_ENDPOINT) + subscription_id: Azure AI Agent Subscription ID (Env var AZURE_AI_AGENT_SUBSCRIPTION_ID) + resource_group_name: Azure AI Agent Resource Group Name (Env var AZURE_AI_AGENT_RESOURCE_GROUP_NAME) + project_name: Azure AI Agent Project Name (Env var AZURE_AI_AGENT_PROJECT_NAME) + """ + + env_prefix: ClassVar[str] = "AZURE_AI_AGENT_" + + model_deployment_name: str + project_connection_string: SecretStr | None = None + endpoint: str | None = None + subscription_id: str | None = None + resource_group_name: str | None = None + project_name: str | None = None diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py new file mode 100644 index 000000000000..cfa0ebdc43b2 --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py @@ -0,0 +1,87 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Iterable, Sequence +from typing import TYPE_CHECKING, Any, ClassVar, TypeVar + +from azure.ai.projects.models import ( + CodeInterpreterTool, + FileSearchTool, + MessageAttachment, + MessageRole, + ThreadMessageOptions, + ToolDefinition, +) + +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from semantic_kernel.contents import ChatMessageContent + +_T = TypeVar("_T", bound="AzureAIAgentUtils") + + +@experimental +class AzureAIAgentUtils: + """AzureAI Agent Utility Methods.""" + + tool_metadata: ClassVar[dict[str, Sequence[ToolDefinition]]] = { + "file_search": FileSearchTool().definitions, + "code_interpreter": CodeInterpreterTool().definitions, + } + + @classmethod + def get_thread_messages(cls: type[_T], messages: list["ChatMessageContent"]) -> Any: + """Get the thread messages for an agent message.""" + if not messages: + return None + + thread_messages: list[ThreadMessageOptions] = [] + + for message in messages: + if not message.content: + continue + + thread_msg = ThreadMessageOptions( + content=message.content, + role=MessageRole.USER if message.role == AuthorRole.USER else MessageRole.AGENT, + attachments=cls.get_attachments(message), + metadata=cls.get_metadata(message) if message.metadata else None, + ) + thread_messages.append(thread_msg) + + return thread_messages + + @classmethod + def get_metadata(cls: type[_T], message: "ChatMessageContent") -> dict[str, str]: + """Get the metadata for an agent message.""" + return {k: str(v) if v is not None else "" for k, v in (message.metadata or {}).items()} + + @classmethod + def get_attachments(cls: type[_T], message: "ChatMessageContent") -> list[MessageAttachment]: + """Get the attachments for an agent message. + + Args: + message: The ChatMessageContent + + Returns: + A list of MessageAttachment + """ + return [ + MessageAttachment( + file_id=file_content.file_id, + tools=list(cls._get_tool_definition(file_content.tools)), # type: ignore + data_source=file_content.data_source if file_content.data_source else None, + ) + for file_content in message.items + if isinstance(file_content, FileReferenceContent) + ] + + @classmethod + def _get_tool_definition(cls: type[_T], tools: list[Any]) -> Iterable[ToolDefinition]: + if not tools: + return + for tool in tools: + if tool_definition := cls.tool_metadata.get(tool): + yield from tool_definition diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py b/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py new file mode 100644 index 000000000000..f662e3ad33ac --- /dev/null +++ b/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py @@ -0,0 +1,121 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.exceptions.agent_exceptions import AgentChatException +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from azure.ai.projects.aio import AIProjectClient + + from semantic_kernel.agents.agent import Agent + from semantic_kernel.contents.chat_message_content import ChatMessageContent + + +@experimental +class AzureAIChannel(AgentChannel): + """AzureAI Channel.""" + + def __init__(self, client: "AIProjectClient", thread_id: str) -> None: + """Initialize the AzureAI Channel. + + Args: + client: The AzureAI Project client. + thread_id: The thread ID. + """ + self.client = client + self.thread_id = thread_id + + @override + async def receive(self, history: list["ChatMessageContent"]) -> None: + """Receive the conversation messages. + + Args: + history: The conversation messages. + """ + for message in history: + await AgentThreadActions.create_message(self.client, self.thread_id, message) + + @override + async def invoke(self, agent: "Agent", **kwargs) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + """Invoke the agent. + + Args: + agent: The agent to invoke. + kwargs: The keyword arguments. + + Yields: + tuple[bool, ChatMessageContent]: The conversation messages. + """ + from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent + + if not isinstance(agent, AzureAIAgent): + raise AgentChatException(f"Agent is not of the expected type {type(AzureAIAgent)}.") + + async for is_visible, message in AgentThreadActions.invoke( + agent=agent, + thread_id=self.thread_id, + arguments=agent.arguments, + kernel=agent.kernel, + **kwargs, + ): + yield is_visible, message + + @override + async def invoke_stream( + self, + agent: "Agent", + messages: list["ChatMessageContent"], + **kwargs, + ) -> AsyncIterable["ChatMessageContent"]: + """Invoke the agent stream. + + Args: + agent: The agent to invoke. + messages: The conversation messages. + kwargs: The keyword arguments. + + Yields: + tuple[bool, StreamingChatMessageContent]: The conversation messages. + """ + from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent + + if not isinstance(agent, AzureAIAgent): + raise AgentChatException(f"Agent is not of the expected type {type(AzureAIAgent)}.") + + async for message in AgentThreadActions.invoke_stream( + agent=agent, + thread_id=self.thread_id, + messages=messages, + arguments=agent.arguments, + kernel=agent.kernel, + **kwargs, + ): + yield message + + @override + async def get_history(self) -> AsyncIterable["ChatMessageContent"]: + """Get the conversation history. + + Yields: + ChatMessageContent: The conversation history. + """ + async for message in AgentThreadActions.get_messages(self.client, thread_id=self.thread_id): + yield message + + @override + async def reset(self) -> None: + """Reset the agent's thread.""" + try: + await self.client.agents.delete_thread(thread_id=self.thread_id) + except Exception as e: + raise AgentChatException(f"Failed to delete thread: {e}") diff --git a/python/semantic_kernel/agents/bedrock/README.md b/python/semantic_kernel/agents/bedrock/README.md new file mode 100644 index 000000000000..d1e17f9245c3 --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/README.md @@ -0,0 +1,27 @@ +# Amazon Bedrock AI Agents in Semantic Kernel + +## Overview + +AWS Bedrock Agents is a managed service that allows users to stand up and run AI agents in the AWS cloud quickly. + +## Tools/Functions + +Bedrock Agents allow the use of tools via [action groups](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-action-create.html). + +The integration of Bedrock Agents with Semantic Kernel allows users to register kernel functions as tools in Bedrock Agents. + +## Enable code interpretation + +Bedrock Agents can write and execute code via a feature known as [code interpretation](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-code-interpretation.html) similar to what OpenAI also offers. + +## Enable user input + +Bedrock Agents can [request user input](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-user-input.html) in case of missing information to invoke a tool. When this is enabled, the agent will prompt the user for the missing information. When this is disabled, the agent will guess the missing information. + +## Knowledge base + +Bedrock Agents can leverage data saved on AWS to perform RAG tasks, this is referred to as the [knowledge base](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-kb-add.html) in AWS. + +## Multi-agent + +Bedrock Agents support [multi-agent workflows](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-multi-agent-collaboration.html) for more complex tasks. However, it employs a different pattern than what we have in Semantic Kernel, thus this is not supported in the current integration. \ No newline at end of file diff --git a/python/semantic_kernel/agents/bedrock/__init__.py b/python/semantic_kernel/agents/bedrock/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/semantic_kernel/agents/bedrock/action_group_utils.py b/python/semantic_kernel/agents/bedrock/action_group_utils.py new file mode 100644 index 000000000000..29e391e0f0f2 --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/action_group_utils.py @@ -0,0 +1,117 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata +from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata + + +def kernel_function_to_bedrock_function_schema( + function_choice_configuration: FunctionCallChoiceConfiguration, +) -> dict[str, Any]: + """Convert the kernel function to bedrock function schema.""" + return { + "functions": [ + kernel_function_metadata_to_bedrock_function_schema(function_metadata) + for function_metadata in function_choice_configuration.available_functions or [] + ] + } + + +def kernel_function_metadata_to_bedrock_function_schema(function_metadata: KernelFunctionMetadata) -> dict[str, Any]: + """Convert the kernel function metadata to bedrock function schema.""" + schema = { + "description": function_metadata.description, + "name": function_metadata.fully_qualified_name, + "parameters": { + parameter.name: kernel_function_parameter_to_bedrock_function_parameter(parameter) + for parameter in function_metadata.parameters + }, + # This field controls whether user confirmation is required to invoke the function. + # If this is set to "ENABLED", the user will be prompted to confirm the function invocation. + # Only after the user confirms, the function call request will be issued by the agent. + # If the user denies the confirmation, the agent will act as if the function does not exist. + # Currently, we do not support this feature, so we set it to "DISABLED". + "requireConfirmation": "DISABLED", + } + + # Remove None values from the schema + return {key: value for key, value in schema.items() if value is not None} + + +def kernel_function_parameter_to_bedrock_function_parameter(parameter: KernelParameterMetadata): + """Convert the kernel function parameters to bedrock function parameters.""" + schema = { + "description": parameter.description, + "type": kernel_function_parameter_type_to_bedrock_function_parameter_type(parameter.schema_data), + "required": parameter.is_required, + } + + # Remove None values from the schema + return {key: value for key, value in schema.items() if value is not None} + + +# These are the allowed parameter types in bedrock function. +# https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent-runtime_ParameterDetail.html +BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES = { + "string", + "number", + "integer", + "boolean", + "array", +} + + +def kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data: dict[str, Any] | None) -> str: + """Convert the kernel function parameter type to bedrock function parameter type.""" + if schema_data is None: + raise ValueError( + "Schema data is required to convert the kernel function parameter type to bedrock function parameter type." + ) + + type_ = schema_data.get("type") + if type_ is None: + raise ValueError( + "Type is required to convert the kernel function parameter type to bedrock function parameter type." + ) + + if type_ not in BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES: + raise ValueError( + f"Type {type_} is not allowed in bedrock function parameter type. " + f"Allowed types are {BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES}." + ) + + return type_ + + +def parse_return_control_payload(return_control_payload: dict[str, Any]) -> list[FunctionCallContent]: + """Parse the return control payload to a list of function call contents for the kernel.""" + return [ + FunctionCallContent( + id=return_control_payload["invocationId"], + name=invocation_input["functionInvocationInput"]["function"], + arguments={ + parameter["name"]: parameter["value"] + for parameter in invocation_input["functionInvocationInput"]["parameters"] + }, + metadata=invocation_input, + ) + for invocation_input in return_control_payload.get("invocationInputs", []) + ] + + +def parse_function_result_contents(function_result_contents: list[FunctionResultContent]) -> list[dict[str, Any]]: + """Parse the function result contents to be returned to the agent in the session state.""" + return [ + { + "functionResult": { + "actionGroup": function_result_content.metadata["functionInvocationInput"]["actionGroup"], + "function": function_result_content.name, + "responseBody": {"TEXT": {"body": str(function_result_content.result)}}, + } + } + for function_result_content in function_result_contents + ] diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent.py b/python/semantic_kernel/agents/bedrock/bedrock_agent.py new file mode 100644 index 000000000000..33b57363e193 --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/bedrock_agent.py @@ -0,0 +1,589 @@ +# Copyright (c) Microsoft. All rights reserved. + + +import asyncio +import logging +import sys +import uuid +from collections.abc import AsyncIterable +from functools import partial, reduce +from typing import Any, ClassVar + +from pydantic import ValidationError + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from semantic_kernel.agents.bedrock.action_group_utils import ( + parse_function_result_contents, + parse_return_control_payload, +) +from semantic_kernel.agents.bedrock.bedrock_agent_base import BedrockAgentBase +from semantic_kernel.agents.bedrock.bedrock_agent_settings import BedrockAgentSettings +from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType +from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel +from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.agents.channels.bedrock_agent_channel import BedrockAgentChannel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.binary_content import BinaryContent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel import Kernel +from semantic_kernel.utils.async_utils import run_in_executor +from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) + +logger = logging.getLogger(__name__) + + +@experimental +class BedrockAgent(BedrockAgentBase): + """Bedrock Agent. + + Manages the interaction with Amazon Bedrock Agent Service. + """ + + channel_type: ClassVar[type[AgentChannel]] = BedrockAgentChannel + + def __init__( + self, + agent_model: BedrockAgentModel | dict[str, Any], + *, + function_choice_behavior: FunctionChoiceBehavior | None = None, + kernel: Kernel | None = None, + plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, + arguments: KernelArguments | None = None, + bedrock_runtime_client: Any | None = None, + bedrock_client: Any | None = None, + **kwargs, + ) -> None: + """Initialize the Bedrock Agent. + + Note that this only creates the agent object and does not create the agent in the service. + + Args: + agent_model (BedrockAgentModel | dict[str, Any]): The agent model. + function_choice_behavior (FunctionChoiceBehavior, optional): The function choice behavior for accessing + the kernel functions and filters. + kernel (Kernel, optional): The kernel to use. + plugins (list[KernelPlugin | object] | dict[str, KernelPlugin | object], optional): The plugins to use. + arguments (KernelArguments, optional): The kernel arguments. + Invoke method arguments take precedence over the arguments provided here. + bedrock_runtime_client: The Bedrock Runtime Client. + bedrock_client: The Bedrock Client. + **kwargs: Additional keyword arguments. + """ + args: dict[str, Any] = { + "agent_model": agent_model, + **kwargs, + } + + if function_choice_behavior: + args["function_choice_behavior"] = function_choice_behavior + if kernel: + args["kernel"] = kernel + if plugins: + args["plugins"] = plugins + if arguments: + args["arguments"] = arguments + if bedrock_runtime_client: + args["bedrock_runtime_client"] = bedrock_runtime_client + if bedrock_client: + args["bedrock_client"] = bedrock_client + + super().__init__(**args) + + # region convenience class methods + + @classmethod + async def create_and_prepare_agent( + cls, + name: str, + instructions: str, + *, + agent_resource_role_arn: str | None = None, + foundation_model: str | None = None, + bedrock_runtime_client: Any | None = None, + bedrock_client: Any | None = None, + kernel: Kernel | None = None, + plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, + function_choice_behavior: FunctionChoiceBehavior | None = None, + arguments: KernelArguments | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + ) -> "BedrockAgent": + """Create a new agent asynchronously. + + This is a convenience method that creates an instance of BedrockAgent and then creates the agent on the service. + + Args: + name (str): The name of the agent. + instructions (str, optional): The instructions for the agent. + agent_resource_role_arn (str, optional): The ARN of the agent resource role. + foundation_model (str, optional): The foundation model. + bedrock_runtime_client (Any, optional): The Bedrock Runtime Client. + bedrock_client (Any, optional): The Bedrock Client. + kernel (Kernel, optional): The kernel to use. + plugins (list[KernelPlugin | object] | dict[str, KernelPlugin | object], optional): The plugins to use. + function_choice_behavior (FunctionChoiceBehavior, optional): The function choice behavior for accessing + the kernel functions and filters. Only FunctionChoiceType.AUTO is supported. + arguments (KernelArguments, optional): The kernel arguments. + prompt_template_config (PromptTemplateConfig, optional): The prompt template configuration. + env_file_path (str, optional): The path to the environment file. + env_file_encoding (str, optional): The encoding of the environment file. + + Returns: + An instance of BedrockAgent with the created agent. + """ + try: + bedrock_agent_settings = BedrockAgentSettings.create( + agent_resource_role_arn=agent_resource_role_arn, + foundation_model=foundation_model, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as e: + raise AgentInitializationException("Failed to initialize the Amazon Bedrock Agent settings.") from e + + import boto3 + from botocore.exceptions import ClientError + + bedrock_runtime_client = bedrock_runtime_client or boto3.client("bedrock-agent-runtime") + bedrock_client = bedrock_client or boto3.client("bedrock-agent") + + try: + response = await run_in_executor( + None, + partial( + bedrock_client.create_agent, + agentName=name, + foundationModel=bedrock_agent_settings.foundation_model, + agentResourceRoleArn=bedrock_agent_settings.agent_resource_role_arn, + instruction=instructions, + ), + ) + except ClientError as e: + logger.error(f"Failed to create agent {name}.") + raise AgentInitializationException("Failed to create the Amazon Bedrock Agent.") from e + + bedrock_agent = cls( + response["agent"], + function_choice_behavior=function_choice_behavior, + kernel=kernel, + plugins=plugins, + arguments=arguments, + bedrock_runtime_client=bedrock_runtime_client, + bedrock_client=bedrock_client, + ) + + # The agent will first enter the CREATING status. + # When the operation finishes, it will enter the NOT_PREPARED status. + # We need to wait for the agent to reach the NOT_PREPARED status before we can prepare it. + await bedrock_agent._wait_for_agent_status(BedrockAgentStatus.NOT_PREPARED) + await bedrock_agent.prepare_agent_and_wait_until_prepared() + + return bedrock_agent + + @classmethod + def create_session_id(cls) -> str: + """Create a new session identifier. + + It is the caller's responsibility to maintain the session ID + to continue the session with the agent. + + Find the requirement for the session identifier here: + https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent-runtime_InvokeAgent.html#API_agent-runtime_InvokeAgent_RequestParameters + """ + return str(uuid.uuid4()) + + # endregion + + @trace_agent_get_response + @override + async def get_response( + self, + session_id: str, + input_text: str, + *, + agent_alias: str | None = None, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs, + ) -> ChatMessageContent: + """Get a response from the agent. + + Args: + session_id (str): The session identifier. This is used to maintain the session state in the service. + input_text (str): The input text. + agent_alias (str, optional): The agent alias. + arguments (KernelArguments, optional): The kernel arguments to override the current arguments. + kernel (Kernel, optional): The kernel to override the current kernel. + **kwargs: Additional keyword arguments. + + Returns: + A chat message content with the response. + """ + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = False + kwargs.setdefault("sessionState", {}) + + for _ in range(self.function_choice_behavior.maximum_auto_invoke_attempts): + response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) + + events: list[dict[str, Any]] = [] + for event in response.get("completion", []): + events.append(event) + + if any(BedrockAgentEventType.RETURN_CONTROL in event for event in events): + # Check if there is function call requests. If there are function calls, + # parse and invoke them and return the results back to the agent. + # Not yielding the function call results back to the user. + kwargs["sessionState"].update( + await self._handle_return_control_event( + next(event for event in events if BedrockAgentEventType.RETURN_CONTROL in event), + kernel, + arguments, + ) + ) + else: + # For the rest of the events, the chunk will become the chat message content. + # If there are files or trace, they will be added to the chat message content. + file_items: list[BinaryContent] | None = None + trace_metadata: dict[str, Any] | None = None + chat_message_content: ChatMessageContent | None = None + for event in events: + if BedrockAgentEventType.CHUNK in event: + chat_message_content = self._handle_chunk_event(event) + elif BedrockAgentEventType.FILES in event: + file_items = self._handle_files_event(event) + elif BedrockAgentEventType.TRACE in event: + trace_metadata = self._handle_trace_event(event) + + if not chat_message_content or not chat_message_content.content: + raise AgentInvokeException("Chat message content is expected but not found in the response.") + + if file_items: + chat_message_content.items.extend(file_items) + if trace_metadata: + chat_message_content.metadata.update({"trace": trace_metadata}) + + if not chat_message_content: + raise AgentInvokeException("No response from the agent.") + + return chat_message_content + + raise AgentInvokeException( + "Failed to get a response from the agent. Please consider increasing the auto invoke attempts." + ) + + @trace_agent_invocation + @override + async def invoke( + self, + session_id: str, + input_text: str, + *, + agent_alias: str | None = None, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs, + ) -> AsyncIterable[ChatMessageContent]: + """Invoke an agent. + + Args: + session_id (str): The session identifier. This is used to maintain the session state in the service. + input_text (str): The input text. + agent_alias (str, optional): The agent alias. + arguments (KernelArguments, optional): The kernel arguments to override the current arguments. + kernel (Kernel, optional): The kernel to override the current kernel. + **kwargs: Additional keyword arguments. + + Returns: + An async iterable of chat message content. + """ + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = False + kwargs.setdefault("sessionState", {}) + + for _ in range(self.function_choice_behavior.maximum_auto_invoke_attempts): + response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) + + events: list[dict[str, Any]] = [] + for event in response.get("completion", []): + events.append(event) + + if any(BedrockAgentEventType.RETURN_CONTROL in event for event in events): + # Check if there is function call requests. If there are function calls, + # parse and invoke them and return the results back to the agent. + # Not yielding the function call results back to the user. + kwargs["sessionState"].update( + await self._handle_return_control_event( + next(event for event in events if BedrockAgentEventType.RETURN_CONTROL in event), + kernel, + arguments, + ) + ) + else: + for event in events: + if BedrockAgentEventType.CHUNK in event: + yield self._handle_chunk_event(event) + elif BedrockAgentEventType.FILES in event: + yield ChatMessageContent( + role=AuthorRole.ASSISTANT, + items=self._handle_files_event(event), # type: ignore + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + ) + elif BedrockAgentEventType.TRACE in event: + yield ChatMessageContent( + role=AuthorRole.ASSISTANT, + name=self.name, + content="", + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + metadata=self._handle_trace_event(event), + ) + + return + + raise AgentInvokeException( + "Failed to get a response from the agent. Please consider increasing the auto invoke attempts." + ) + + @trace_agent_invocation + @override + async def invoke_stream( + self, + session_id: str, + input_text: str, + *, + agent_alias: str | None = None, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs, + ) -> AsyncIterable[StreamingChatMessageContent]: + """Invoke an agent with streaming. + + Args: + session_id (str): The session identifier. This is used to maintain the session state in the service. + input_text (str): The input text. + agent_alias (str, optional): The agent alias. + arguments (KernelArguments, optional): The kernel arguments to override the current arguments. + kernel (Kernel, optional): The kernel to override the current kernel. + **kwargs: Additional keyword arguments. + + Returns: + An async iterable of streaming chat message content + """ + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = True + kwargs.setdefault("sessionState", {}) + + for request_index in range(self.function_choice_behavior.maximum_auto_invoke_attempts): + response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) + + all_function_call_messages: list[StreamingChatMessageContent] = [] + for event in response.get("completion", []): + if BedrockAgentEventType.CHUNK in event: + yield self._handle_streaming_chunk_event(event) + continue + if BedrockAgentEventType.FILES in event: + yield self._handle_streaming_files_event(event) + continue + if BedrockAgentEventType.TRACE in event: + yield self._handle_streaming_trace_event(event) + continue + if BedrockAgentEventType.RETURN_CONTROL in event: + all_function_call_messages.append(self._handle_streaming_return_control_event(event)) + continue + + if not all_function_call_messages: + return + + full_message: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_function_call_messages) + function_calls = [item for item in full_message.items if isinstance(item, FunctionCallContent)] + function_result_contents = await self._handle_function_call_contents(function_calls) + kwargs["sessionState"].update({ + "invocationId": function_calls[0].id, + "returnControlInvocationResults": parse_function_result_contents(function_result_contents), + }) + + # region non streaming Event Handlers + + def _handle_chunk_event(self, event: dict[str, Any]) -> ChatMessageContent: + """Create a chat message content.""" + chunk = event[BedrockAgentEventType.CHUNK] + completion = chunk["bytes"].decode() + + return ChatMessageContent( + role=AuthorRole.ASSISTANT, + content=completion, + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + metadata=chunk, + ) + + async def _handle_return_control_event( + self, + event: dict[str, Any], + kernel: Kernel, + kernel_arguments: KernelArguments, + ) -> dict[str, Any]: + """Handle return control event.""" + return_control_payload = event[BedrockAgentEventType.RETURN_CONTROL] + function_calls = parse_return_control_payload(return_control_payload) + if not function_calls: + raise AgentInvokeException("Function call is expected but not found in the response.") + + function_result_contents = await self._handle_function_call_contents(function_calls) + + return { + "invocationId": function_calls[0].id, + "returnControlInvocationResults": parse_function_result_contents(function_result_contents), + } + + def _handle_files_event(self, event: dict[str, Any]) -> list[BinaryContent]: + """Handle file event.""" + files_event = event[BedrockAgentEventType.FILES] + return [ + BinaryContent( + data=file["bytes"], + data_format="base64", + mime_type=file["type"], + metadata={"name": file["name"]}, + ) + for file in files_event["files"] + ] + + def _handle_trace_event(self, event: dict[str, Any]) -> dict[str, Any]: + """Handle trace event.""" + return event[BedrockAgentEventType.TRACE] + + # endregion + + # region streaming Event Handlers + + def _handle_streaming_chunk_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: + """Handle streaming chunk event.""" + chunk = event[BedrockAgentEventType.CHUNK] + completion = chunk["bytes"].decode() + + return StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + content=completion, + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + ) + + def _handle_streaming_return_control_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: + """Handle streaming return control event.""" + return_control_payload = event[BedrockAgentEventType.RETURN_CONTROL] + function_calls = parse_return_control_payload(return_control_payload) + + return StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + items=function_calls, # type: ignore + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + ) + + def _handle_streaming_files_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: + """Handle streaming file event.""" + files_event = event[BedrockAgentEventType.FILES] + items: list[BinaryContent] = [ + BinaryContent( + data=file["bytes"], + data_format="base64", + mime_type=file["type"], + metadata={"name": file["name"]}, + ) + for file in files_event["files"] + ] + + return StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + items=items, # type: ignore + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + ) + + def _handle_streaming_trace_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: + """Handle streaming trace event.""" + return StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + items=[], + name=self.name, + inner_content=event, + ai_model_id=self.agent_model.foundation_model, + metadata=event[BedrockAgentEventType.TRACE], + ) + + # endregion + + async def _handle_function_call_contents( + self, + function_call_contents: list[FunctionCallContent], + ) -> list[FunctionResultContent]: + """Handle function call contents.""" + chat_history = ChatHistory() + await asyncio.gather( + *[ + self.kernel.invoke_function_call( + function_call=function_call, + chat_history=chat_history, + arguments=self.arguments, + function_call_count=len(function_call_contents), + ) + for function_call in function_call_contents + ], + ) + + return [ + item + for chat_message in chat_history.messages + for item in chat_message.items + if isinstance(item, FunctionResultContent) + ] diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py b/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py new file mode 100644 index 000000000000..708c0d2b01de --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py @@ -0,0 +1,376 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from functools import partial +from typing import Any, ClassVar + +import boto3 +from botocore.exceptions import ClientError +from pydantic import Field, field_validator + +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.bedrock.action_group_utils import kernel_function_to_bedrock_function_schema +from semantic_kernel.agents.bedrock.models.bedrock_action_group_model import BedrockActionGroupModel +from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel +from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior, FunctionChoiceType +from semantic_kernel.utils.async_utils import run_in_executor +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger = logging.getLogger(__name__) + + +@experimental +class BedrockAgentBase(Agent): + """Bedrock Agent Base Class to provide common functionalities for Bedrock Agents.""" + + # There is a default alias created by Bedrock for the working draft version of the agent. + # https://docs.aws.amazon.com/bedrock/latest/userguide/agents-deploy.html + WORKING_DRAFT_AGENT_ALIAS: ClassVar[str] = "TSTALIASID" + + # Amazon Bedrock Clients + # Runtime Client: Use for inference + bedrock_runtime_client: Any + # Client: Use for model management + bedrock_client: Any + # Function Choice Behavior: this is primarily used to control the behavior of the kernel when + # the agent requests functions, and to configure the kernel function action group (i.e. via filters). + # When this is None, users won't be able to create a kernel function action groups. + function_choice_behavior: FunctionChoiceBehavior = Field(default=FunctionChoiceBehavior.Auto()) + # Agent Model: stores the agent information + agent_model: BedrockAgentModel + + def __init__( + self, + agent_model: BedrockAgentModel | dict[str, Any], + *, + function_choice_behavior: FunctionChoiceBehavior | None = None, + bedrock_runtime_client: Any | None = None, + bedrock_client: Any | None = None, + **kwargs, + ) -> None: + """Initialize the Bedrock Agent Base. + + Args: + agent_model: The Bedrock Agent Model. + function_choice_behavior: The function choice behavior. + bedrock_client: The Bedrock Client. + bedrock_runtime_client: The Bedrock Runtime Client. + kwargs: Additional keyword arguments. + """ + agent_model = ( + agent_model if isinstance(agent_model, BedrockAgentModel) else BedrockAgentModel.model_validate(agent_model) + ) + + args = { + "agent_model": agent_model, + "id": agent_model.agent_id, + "name": agent_model.agent_name, + "bedrock_runtime_client": bedrock_runtime_client or boto3.client("bedrock-agent-runtime"), + "bedrock_client": bedrock_client or boto3.client("bedrock-agent"), + **kwargs, + } + if function_choice_behavior: + args["function_choice_behavior"] = function_choice_behavior + + super().__init__(**args) + + @field_validator("function_choice_behavior", mode="after") + @classmethod + def validate_function_choice_behavior( + cls, function_choice_behavior: FunctionChoiceBehavior | None + ) -> FunctionChoiceBehavior | None: + """Validate the function choice behavior.""" + if function_choice_behavior and function_choice_behavior.type_ != FunctionChoiceType.AUTO: + # Users cannot specify REQUIRED or NONE for the Bedrock agents. + # Please note that the function choice behavior only control if the kernel will automatically + # execute the functions the agent requests. It does not control the behavior of the agent. + raise ValueError("Only FunctionChoiceType.AUTO is supported.") + return function_choice_behavior + + def __repr__(self): + """Return the string representation of the Bedrock Agent.""" + return f"{self.agent_model}" + + # region Agent Management + + async def prepare_agent_and_wait_until_prepared(self) -> None: + """Prepare the agent for use.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before preparing it.") + + try: + await run_in_executor( + None, + partial( + self.bedrock_client.prepare_agent, + agentId=self.agent_model.agent_id, + ), + ) + + # The agent will take some time to enter the PREPARING status after the prepare operation is called. + # We need to wait for the agent to reach the PREPARING status before we can proceed, otherwise we + # will return immediately if the agent is already in PREPARED status. + await self._wait_for_agent_status(BedrockAgentStatus.PREPARING) + # The agent will enter the PREPARED status when the preparation is complete. + await self._wait_for_agent_status(BedrockAgentStatus.PREPARED) + except ClientError as e: + logger.error(f"Failed to prepare agent {self.agent_model.agent_id}.") + raise e + + async def delete_agent(self, **kwargs) -> None: + """Delete an agent asynchronously.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before deleting it.") + + try: + await run_in_executor( + None, + partial( + self.bedrock_client.delete_agent, + agentId=self.agent_model.agent_id, + **kwargs, + ), + ) + + self.agent_model.agent_id = None + except ClientError as e: + logger.error(f"Failed to delete agent {self.agent_model.agent_id}.") + raise e + + async def _get_agent(self) -> None: + """Get an agent.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before getting it.") + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.get_agent, + agentId=self.agent_model.agent_id, + ), + ) + + # Update the agent model + self.agent_model = BedrockAgentModel(**response["agent"]) + except ClientError as e: + logger.error(f"Failed to get agent {self.agent_model.agent_id}.") + raise e + + async def _wait_for_agent_status( + self, + status: BedrockAgentStatus, + interval: int = 2, + max_attempts: int = 5, + ) -> None: + """Wait for the agent to reach a specific status.""" + for _ in range(max_attempts): + await self._get_agent() + if self.agent_model.agent_status == status: + return + + await asyncio.sleep(interval) + + raise TimeoutError( + f"Agent did not reach status {status} within the specified time." + f" Current status: {self.agent_model.agent_status}" + ) + + # endregion Agent Management + + # region Action Group Management + async def create_code_interpreter_action_group(self, **kwargs) -> BedrockActionGroupModel: + """Create a code interpreter action group.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.create_agent_action_group, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version or "DRAFT", + actionGroupName=f"{self.agent_model.agent_name}_code_interpreter", + actionGroupState="ENABLED", + parentActionGroupSignature="AMAZON.CodeInterpreter", + **kwargs, + ), + ) + + await self.prepare_agent_and_wait_until_prepared() + + return BedrockActionGroupModel(**response["agentActionGroup"]) + except ClientError as e: + logger.error(f"Failed to create code interpreter action group for agent {self.agent_model.agent_id}.") + raise e + + async def create_user_input_action_group(self, **kwargs) -> BedrockActionGroupModel: + """Create a user input action group.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.create_agent_action_group, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version or "DRAFT", + actionGroupName=f"{self.agent_model.agent_name}_user_input", + actionGroupState="ENABLED", + parentActionGroupSignature="AMAZON.UserInput", + **kwargs, + ), + ) + + await self.prepare_agent_and_wait_until_prepared() + + return BedrockActionGroupModel(**response["agentActionGroup"]) + except ClientError as e: + logger.error(f"Failed to create user input action group for agent {self.agent_model.agent_id}.") + raise e + + async def create_kernel_function_action_group(self, **kwargs) -> BedrockActionGroupModel | None: + """Create a kernel function action group.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") + + function_call_choice_config = self.function_choice_behavior.get_config(self.kernel) + if not function_call_choice_config.available_functions: + logger.warning("No available functions. Skipping kernel function action group creation.") + return None + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.create_agent_action_group, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version or "DRAFT", + actionGroupName=f"{self.agent_model.agent_name}_kernel_function", + actionGroupState="ENABLED", + actionGroupExecutor={"customControl": "RETURN_CONTROL"}, + functionSchema=kernel_function_to_bedrock_function_schema(function_call_choice_config), + **kwargs, + ), + ) + + await self.prepare_agent_and_wait_until_prepared() + + return BedrockActionGroupModel(**response["agentActionGroup"]) + except ClientError as e: + logger.error(f"Failed to create kernel function action group for agent {self.agent_model.agent_id}.") + raise e + + # endregion Action Group Management + + # region Knowledge Base Management + + async def associate_agent_knowledge_base(self, knowledge_base_id: str, **kwargs) -> dict[str, Any]: + """Associate an agent with a knowledge base.""" + if not self.agent_model.agent_id: + raise ValueError( + "Agent does not exist. Please create the agent before associating it with a knowledge base." + ) + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.associate_agent_knowledge_base, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version, + knowledgeBaseId=knowledge_base_id, + **kwargs, + ), + ) + + await self.prepare_agent_and_wait_until_prepared() + + return response + except ClientError as e: + logger.error( + f"Failed to associate agent {self.agent_model.agent_id} with knowledge base {knowledge_base_id}." + ) + raise e + + async def disassociate_agent_knowledge_base(self, knowledge_base_id: str, **kwargs) -> None: + """Disassociate an agent with a knowledge base.""" + if not self.agent_model.agent_id: + raise ValueError( + "Agent does not exist. Please create the agent before disassociating it with a knowledge base." + ) + + try: + response = await run_in_executor( + None, + partial( + self.bedrock_client.disassociate_agent_knowledge_base, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version, + knowledgeBaseId=knowledge_base_id, + **kwargs, + ), + ) + + await self.prepare_agent_and_wait_until_prepared() + + return response + except ClientError as e: + logger.error( + f"Failed to disassociate agent {self.agent_model.agent_id} with knowledge base {knowledge_base_id}." + ) + raise e + + async def list_associated_agent_knowledge_bases(self, **kwargs) -> dict[str, Any]: + """List associated knowledge bases with an agent.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before listing associated knowledge bases.") + + try: + return await run_in_executor( + None, + partial( + self.bedrock_client.list_agent_knowledge_bases, + agentId=self.agent_model.agent_id, + agentVersion=self.agent_model.agent_version, + **kwargs, + ), + ) + except ClientError as e: + logger.error(f"Failed to list associated knowledge bases for agent {self.agent_model.agent_id}.") + raise e + + # endregion Knowledge Base Management + + async def _invoke_agent( + self, + session_id: str, + input_text: str, + agent_alias: str | None = None, + **kwargs, + ) -> dict[str, Any]: + """Invoke an agent.""" + if not self.agent_model.agent_id: + raise ValueError("Agent does not exist. Please create the agent before invoking it.") + + agent_alias = agent_alias or self.WORKING_DRAFT_AGENT_ALIAS + + try: + return await run_in_executor( + None, + partial( + self.bedrock_runtime_client.invoke_agent, + agentAliasId=agent_alias, + agentId=self.agent_model.agent_id, + sessionId=session_id, + inputText=input_text, + **kwargs, + ), + ) + except ClientError as e: + logger.error(f"Failed to invoke agent {self.agent_model.agent_id}.") + raise e diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py b/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py new file mode 100644 index 000000000000..a3679478678b --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py @@ -0,0 +1,32 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from semantic_kernel.kernel_pydantic import KernelBaseSettings +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class BedrockAgentSettings(KernelBaseSettings): + """Amazon Bedrock Agent service settings. + + The settings are first loaded from environment variables with + the prefix 'BEDROCK_AGENT_'. + If the environment variables are not found, the settings can + be loaded from a .env file with the encoding 'utf-8'. + If the settings are not found in the .env file, the settings + are ignored; however, validation will fail alerting that the + settings are missing. + + Optional settings for prefix 'BEDROCK_' are: + - agent_resource_role_arn: str - The Amazon Bedrock agent resource role ARN. + https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started.html + (Env var BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN) + - foundation_model: str - The Amazon Bedrock foundation model ID to use. + (Env var BEDROCK_AGENT_FOUNDATION_MODEL) + """ + + env_prefix: ClassVar[str] = "BEDROCK_AGENT_" + + agent_resource_role_arn: str + foundation_model: str diff --git a/python/semantic_kernel/agents/bedrock/models/__init__.py b/python/semantic_kernel/agents/bedrock/models/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py b/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py new file mode 100644 index 000000000000..8f80b25297fa --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py @@ -0,0 +1,21 @@ +# Copyright (c) Microsoft. All rights reserved. + +from pydantic import ConfigDict, Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class BedrockActionGroupModel(KernelBaseModel): + """Bedrock Action Group Model. + + Model field definitions for the Amazon Bedrock Action Group Service: + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-agent/client/create_agent_action_group.html + """ + + # This model_config will merge with the KernelBaseModel.model_config + model_config = ConfigDict(extra="allow") + + action_group_id: str = Field(..., alias="actionGroupId", description="The unique identifier of the action group.") + action_group_name: str = Field(..., alias="actionGroupName", description="The name of the action group.") diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py new file mode 100644 index 000000000000..a62e62d6b5ce --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py @@ -0,0 +1,19 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum + +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class BedrockAgentEventType(str, Enum): + """Bedrock Agent Event Type.""" + + # Contains the text response from the agent. + CHUNK = "chunk" + # Contains the trace information (reasoning process) from the agent. + TRACE = "trace" + # Contains the function call requests from the agent. + RETURN_CONTROL = "returnControl" + # Contains the files generated by the agent using the code interpreter. + FILES = "files" diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py new file mode 100644 index 000000000000..dd73fb145e10 --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py @@ -0,0 +1,24 @@ +# Copyright (c) Microsoft. All rights reserved. + +from pydantic import ConfigDict, Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class BedrockAgentModel(KernelBaseModel): + """Bedrock Agent Model. + + Model field definitions for the Amazon Bedrock Agent Service: + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-agent/client/create_agent.html + """ + + # This model_config will merge with the KernelBaseModel.model_config + model_config = ConfigDict(extra="allow") + + agent_id: str | None = Field(default=None, alias="agentId", description="The unique identifier of the agent.") + agent_name: str | None = Field(default=None, alias="agentName", description="The name of the agent.") + agent_version: str | None = Field(default=None, alias="agentVersion", description="The version of the agent.") + foundation_model: str | None = Field(default=None, alias="foundationModel", description="The foundation model.") + agent_status: str | None = Field(default=None, alias="agentStatus", description="The status of the agent.") diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py new file mode 100644 index 000000000000..321f09c63752 --- /dev/null +++ b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from enum import Enum + +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class BedrockAgentStatus(str, Enum): + """Bedrock Agent Status. + + https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_PrepareAgent.html#API_agent_PrepareAgent_ResponseElements + """ + + CREATING = "CREATING" + PREPARING = "PREPARING" + PREPARED = "PREPARED" + NOT_PREPARED = "NOT_PREPARED" + DELETING = "DELETING" + FAILED = "FAILED" + VERSIONING = "VERSIONING" + UPDATING = "UPDATING" diff --git a/python/semantic_kernel/agents/channels/agent_channel.py b/python/semantic_kernel/agents/channels/agent_channel.py index b7a56d1f4a32..a3a59cee579f 100644 --- a/python/semantic_kernel/agents/channels/agent_channel.py +++ b/python/semantic_kernel/agents/channels/agent_channel.py @@ -2,16 +2,16 @@ from abc import ABC, abstractmethod from collections.abc import AsyncIterable -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental_class +@experimental class AgentChannel(ABC): """Defines the communication protocol for a particular Agent type. @@ -36,11 +36,13 @@ async def receive( def invoke( self, agent: "Agent", + **kwargs: Any, ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: """Perform a discrete incremental interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. + kwargs: The keyword arguments. Returns: An async iterable of a bool, ChatMessageContent. @@ -51,13 +53,15 @@ def invoke( def invoke_stream( self, agent: "Agent", - history: "list[ChatMessageContent]", + messages: "list[ChatMessageContent]", + **kwargs: Any, ) -> AsyncIterable["ChatMessageContent"]: """Perform a discrete incremental stream interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. - history: The history of messages in the conversation. + messages: The history of messages in the conversation. + kwargs: The keyword arguments. Returns: An async iterable ChatMessageContent. diff --git a/python/semantic_kernel/agents/channels/bedrock_agent_channel.py b/python/semantic_kernel/agents/channels/bedrock_agent_channel.py new file mode 100644 index 000000000000..748496b52a14 --- /dev/null +++ b/python/semantic_kernel/agents/channels/bedrock_agent_channel.py @@ -0,0 +1,213 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import AsyncIterable +from typing import Any, ClassVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentChatException +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger = logging.getLogger(__name__) + + +@experimental +class BedrockAgentChannel(AgentChannel, ChatHistory): + """An AgentChannel for a BedrockAgent that is based on a ChatHistory. + + This channel allows Bedrock agents to interact with other types of agents in Semantic Kernel in an AgentGroupChat. + However, since Bedrock agents require the chat history to alternate between user and agent messages, this channel + will preprocess the chat history to ensure that it meets the requirements of the Bedrock agent. When an invalid + pattern is detected, the channel will insert a placeholder user or assistant message to ensure that the chat history + alternates between user and agent messages. + """ + + MESSAGE_PLACEHOLDER: ClassVar[str] = "[SILENCE]" + + @override + async def invoke(self, agent: "Agent", **kwargs: Any) -> AsyncIterable[tuple[bool, ChatMessageContent]]: + """Perform a discrete incremental interaction between a single Agent and AgentChat. + + Args: + agent: The agent to interact with. + kwargs: Additional keyword arguments. + + Returns: + An async iterable of ChatMessageContent with a boolean indicating if the + message should be visible external to the agent. + """ + from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent + + if not isinstance(agent, BedrockAgent): + raise AgentChatException(f"Agent is not of the expected type {type(BedrockAgent)}.") + if not self.messages: + # This is not supposed to happen, as the channel won't get invoked + # before it has received messages. This is just extra safety. + raise AgentChatException("No chat history available.") + + # Preprocess chat history + self._ensure_history_alternates() + self._ensure_last_message_is_user() + + session_id = BedrockAgent.create_session_id() + async for message in agent.invoke( + session_id, + self.messages[-1].content, + sessionState=self._parse_chat_history_to_session_state(), + ): + self.messages.append(message) + # All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages + yield True, message + + @override + async def invoke_stream( + self, + agent: "Agent", + messages: list[ChatMessageContent], + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Perform a streaming interaction between a single Agent and AgentChat. + + Args: + agent: The agent to interact with. + messages: The history of messages in the conversation. + kwargs: Additional keyword arguments. + + Returns: + An async iterable of ChatMessageContent. + """ + from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent + + if not isinstance(agent, BedrockAgent): + raise AgentChatException(f"Agent is not of the expected type {type(BedrockAgent)}.") + if not self.messages: + raise AgentChatException("No chat history available.") + + # Preprocess chat history + self._ensure_history_alternates() + self._ensure_last_message_is_user() + + session_id = BedrockAgent.create_session_id() + full_message: list[StreamingChatMessageContent] = [] + async for message_chunk in agent.invoke_stream( + session_id, + self.messages[-1].content, + sessionState=self._parse_chat_history_to_session_state(), + ): + yield message_chunk + full_message.append(message_chunk) + + messages.append( + ChatMessageContent( + role=AuthorRole.ASSISTANT, + content="".join([message.content for message in full_message]), + name=agent.name, + inner_content=full_message, + ai_model_id=agent.agent_model.foundation_model, + ) + ) + + @override + async def receive( + self, + history: list[ChatMessageContent], + ) -> None: + """Receive the conversation messages. + + Bedrock requires the chat history to alternate between user and agent messages. + Thus, when receiving the history, the message sequence will be mutated by inserting + empty agent or user messages as needed. + + Args: + history: The history of messages in the conversation. + """ + for incoming_message in history: + if not self.messages or self.messages[-1].role != incoming_message.role: + self.messages.append(incoming_message) + else: + self.messages.append( + ChatMessageContent( + role=AuthorRole.ASSISTANT if incoming_message.role == AuthorRole.USER else AuthorRole.USER, + content=self.MESSAGE_PLACEHOLDER, + ) + ) + self.messages.append(incoming_message) + + @override + async def get_history( # type: ignore + self, + ) -> AsyncIterable[ChatMessageContent]: + """Retrieve the message history specific to this channel. + + Returns: + An async iterable of ChatMessageContent. + """ + for message in reversed(self.messages): + yield message + + @override + async def reset(self) -> None: + """Reset the channel state.""" + self.messages.clear() + + # region chat history preprocessing and parsing + + def _ensure_history_alternates(self): + """Ensure that the chat history alternates between user and agent messages.""" + if not self.messages or len(self.messages) == 1: + return + + current_index = 1 + while current_index < len(self.messages): + if self.messages[current_index].role == self.messages[current_index - 1].role: + self.messages.insert( + current_index, + ChatMessageContent( + role=AuthorRole.ASSISTANT + if self.messages[current_index].role == AuthorRole.USER + else AuthorRole.USER, + content=self.MESSAGE_PLACEHOLDER, + ), + ) + current_index += 2 + else: + current_index += 1 + + def _ensure_last_message_is_user(self): + """Ensure that the last message in the chat history is a user message.""" + if self.messages and self.messages[-1].role == AuthorRole.ASSISTANT: + self.messages.append( + ChatMessageContent( + role=AuthorRole.USER, + content=self.MESSAGE_PLACEHOLDER, + ) + ) + + def _parse_chat_history_to_session_state(self) -> dict[str, Any]: + """Parse the chat history to a session state.""" + session_state: dict[str, Any] = {"conversationHistory": {"messages": []}} + if len(self.messages) > 1: + # We don't take the last message as it needs to be sent separately in another parameter + for message in self.messages[:-1]: + if message.role not in [AuthorRole.USER, AuthorRole.ASSISTANT]: + logger.debug(f"Skipping message with unsupported role: {message}") + continue + session_state["conversationHistory"]["messages"].append({ + "content": [{"text": message.content}], + "role": message.role.value, + }) + + return session_state + + # endregion diff --git a/python/semantic_kernel/agents/channels/chat_history_channel.py b/python/semantic_kernel/agents/channels/chat_history_channel.py index 057c005b3d3d..3f330415df25 100644 --- a/python/semantic_kernel/agents/channels/chat_history_channel.py +++ b/python/semantic_kernel/agents/channels/chat_history_channel.py @@ -3,70 +3,59 @@ import sys from collections import deque from collections.abc import AsyncIterable +from copy import deepcopy + +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover -from abc import abstractmethod -from typing import TYPE_CHECKING, Deque, Protocol, runtime_checkable +from typing import TYPE_CHECKING, Any, ClassVar, Deque from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.contents import ChatMessageContent from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.exceptions import ServiceInvalidTypeError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_history import ChatHistory + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -@experimental_class -@runtime_checkable -class ChatHistoryAgentProtocol(Protocol): - """Contract for an agent that utilizes a ChatHistoryChannel.""" - - @abstractmethod - def invoke(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: - """Invoke the chat history agent protocol.""" - ... - - @abstractmethod - def invoke_stream(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: - """Invoke the chat history agent protocol in streaming mode.""" - ... - - -@experimental_class +@experimental class ChatHistoryChannel(AgentChannel, ChatHistory): """An AgentChannel specialization for that acts upon a ChatHistoryHandler.""" + ALLOWED_CONTENT_TYPES: ClassVar[tuple[type, ...]] = ( + ImageContent, + FunctionCallContent, + FunctionResultContent, + StreamingTextContent, + TextContent, + ) + @override async def invoke( self, agent: "Agent", + **kwargs: Any, ) -> AsyncIterable[tuple[bool, ChatMessageContent]]: """Perform a discrete incremental interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. + kwargs: The keyword arguments. Returns: An async iterable of ChatMessageContent. """ - if not isinstance(agent, ChatHistoryAgentProtocol): - id = getattr(agent, "id", "") - raise ServiceInvalidTypeError( - f"Invalid channel binding for agent with id: `{id}` with name: ({type(agent).__name__})" - ) - - # pre-process history reduction - await agent.reduce_history(self) - message_count = len(self.messages) mutated_history = set() message_queue: Deque[ChatMessageContent] = deque() @@ -103,28 +92,18 @@ async def invoke( @override async def invoke_stream( - self, - agent: "Agent", - messages: list[ChatMessageContent], - ) -> AsyncIterable[ChatMessageContent]: + self, agent: "Agent", messages: list[ChatMessageContent], **kwargs: Any + ) -> AsyncIterable["StreamingChatMessageContent"]: """Perform a discrete incremental stream interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. messages: The history of messages in the conversation. + kwargs: The keyword arguments Returns: - An async iterable of bool, StreamingChatMessageContent. + An async iterable of ChatMessageContent. """ - if not isinstance(agent, ChatHistoryAgentProtocol): - id = getattr(agent, "id", "") - raise ServiceInvalidTypeError( - f"Invalid channel binding for agent with id: `{id}` with name: ({type(agent).__name__})" - ) - - # pre-process history reduction - await agent.reduce_history(self) - message_count = len(self.messages) async for response_message in agent.invoke_stream(self): @@ -148,10 +127,23 @@ async def receive( ) -> None: """Receive the conversation messages. + Do not include messages that only contain file references. + Args: history: The history of messages in the conversation. """ - self.messages.extend(history) + filtered_history: list[ChatMessageContent] = [] + for message in history: + new_message = deepcopy(message) + if new_message.items is None: + new_message.items = [] + allowed_items = [item for item in new_message.items if isinstance(item, self.ALLOWED_CONTENT_TYPES)] + if not allowed_items: + continue + new_message.items.clear() + new_message.items.extend(allowed_items) + filtered_history.append(new_message) + self.messages.extend(filtered_history) @override async def get_history( # type: ignore diff --git a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py index 7ba31b598827..a2f74fa05d51 100644 --- a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py +++ b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py @@ -13,16 +13,17 @@ from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.open_ai.assistant_content_generation import create_chat_message, generate_message_content +from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent -@experimental_class +@experimental class OpenAIAssistantChannel(AgentChannel): """OpenAI Assistant Channel.""" @@ -44,48 +45,46 @@ async def receive(self, history: list["ChatMessageContent"]) -> None: await create_chat_message(self.client, self.thread_id, message) @override - async def invoke(self, agent: "Agent") -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + async def invoke(self, agent: "Agent", **kwargs: Any) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: """Invoke the agent. Args: agent: The agent to invoke. + kwargs: The keyword arguments. Yields: tuple[bool, ChatMessageContent]: The conversation messages. """ - from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase + from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent - if not isinstance(agent, OpenAIAssistantBase): - raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantBase)}.") + if not isinstance(agent, OpenAIAssistantAgent): + raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantAgent)}.") - if agent._is_deleted: - raise AgentChatException("Agent is deleted.") - - async for is_visible, message in agent._invoke_internal(thread_id=self.thread_id): + async for is_visible, message in AssistantThreadActions.invoke(agent=agent, thread_id=self.thread_id, **kwargs): yield is_visible, message @override async def invoke_stream( - self, agent: "Agent", messages: list[ChatMessageContent] + self, agent: "Agent", messages: list[ChatMessageContent], **kwargs: Any ) -> AsyncIterable["ChatMessageContent"]: """Invoke the agent stream. Args: agent: The agent to invoke. messages: The conversation messages. + kwargs: The keyword arguments. Yields: tuple[bool, StreamingChatMessageContent]: The conversation messages. """ - from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase - - if not isinstance(agent, OpenAIAssistantBase): - raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantBase)}.") + from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent - if agent._is_deleted: - raise AgentChatException("Agent is deleted.") + if not isinstance(agent, OpenAIAssistantAgent): + raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantAgent)}.") - async for message in agent._invoke_internal_stream(thread_id=self.thread_id, messages=messages): + async for message in AssistantThreadActions.invoke_stream( + agent=agent, thread_id=self.thread_id, messages=messages, **kwargs + ): yield message @override diff --git a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py index cbdb218ad616..0ce6b8c58946 100644 --- a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py +++ b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py @@ -1,23 +1,38 @@ # Copyright (c) Microsoft. All rights reserved. import logging +import sys from collections.abc import AsyncGenerator, AsyncIterable from typing import TYPE_CHECKING, Any, ClassVar +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from pydantic import Field, model_validator + from semantic_kernel.agents import Agent from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions import KernelServiceNotFoundError -from semantic_kernel.utils.experimental_decorator import experimental_class -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import trace_agent_invocation +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig +from semantic_kernel.utils.feature_stage_decorator import release_candidate +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) if TYPE_CHECKING: from semantic_kernel.kernel import Kernel @@ -25,52 +40,53 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@release_candidate class ChatCompletionAgent(Agent): - """A KernelAgent specialization based on ChatCompletionClientBase. - - Note: enable `function_choice_behavior` on the PromptExecutionSettings to enable function - choice behavior which allows the kernel to utilize plugins and functions registered in - the kernel. - """ + """A Chat Completion Agent based on ChatCompletionClientBase.""" - service_id: str - execution_settings: PromptExecutionSettings | None = None - channel_type: ClassVar[type[AgentChannel]] = ChatHistoryChannel + function_choice_behavior: FunctionChoiceBehavior | None = Field( + default_factory=lambda: FunctionChoiceBehavior.Auto() + ) + channel_type: ClassVar[type[AgentChannel] | None] = ChatHistoryChannel + service: ChatCompletionClientBase | None = Field(default=None, exclude=True) def __init__( self, - service_id: str | None = None, - kernel: "Kernel | None" = None, - name: str | None = None, - id: str | None = None, + *, + arguments: KernelArguments | None = None, description: str | None = None, + function_choice_behavior: FunctionChoiceBehavior | None = None, + id: str | None = None, instructions: str | None = None, - execution_settings: PromptExecutionSettings | None = None, - history_reducer: ChatHistoryReducer | None = None, + kernel: "Kernel | None" = None, + name: str | None = None, + plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, + prompt_template_config: PromptTemplateConfig | None = None, + service: ChatCompletionClientBase | None = None, ) -> None: """Initialize a new instance of ChatCompletionAgent. Args: - service_id: The service id for the chat completion service. (optional) If not provided, - the default service name `default` will be used. - kernel: The kernel instance. (optional) - name: The name of the agent. (optional) - id: The unique identifier for the agent. (optional) If not provided, + arguments: The kernel arguments for the agent. Invoke method arguments take precedence over + the arguments provided here. + description: The description of the agent. + function_choice_behavior: The function choice behavior to determine how and which plugins are + advertised to the model. + kernel: The kernel instance. If both a kernel and a service are provided, the service will take precedence + if they share the same service_id or ai_model_id. Otherwise if separate, the first AI service + registered on the kernel will be used. + id: The unique identifier for the agent. If not provided, a unique GUID will be generated. - description: The description of the agent. (optional) - instructions: The instructions for the agent. (optional) - execution_settings: The execution settings for the agent. (optional) - history_reducer: The history reducer for the agent. (optional) + instructions: The instructions for the agent. + name: The name of the agent. + plugins: The plugins for the agent. If plugins are included along with a kernel, any plugins + that already exist in the kernel will be overwritten. + prompt_template_config: The prompt template configuration for the agent. + service: The chat completion service instance. If a kernel is provided with the same service_id or + `ai_model_id`, the service will take precedence. """ - if not service_id: - service_id = DEFAULT_SERVICE_NAME - args: dict[str, Any] = { - "service_id": service_id, "description": description, - "instructions": instructions, - "execution_settings": execution_settings, } if name is not None: args["name"] = name @@ -78,139 +94,278 @@ def __init__( args["id"] = id if kernel is not None: args["kernel"] = kernel - if history_reducer is not None: - args["history_reducer"] = history_reducer - super().__init__(**args) + if arguments is not None: + args["arguments"] = arguments + + if instructions and prompt_template_config and instructions != prompt_template_config.template: + logger.info( + f"Both `instructions` ({instructions}) and `prompt_template_config` " + f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " + "and ignoring `instructions`." + ) - @trace_agent_invocation - async def invoke(self, history: ChatHistory) -> AsyncIterable[ChatMessageContent]: - """Invoke the chat history handler. + if plugins is not None: + args["plugins"] = plugins - Args: - kernel: The kernel instance. - history: The chat history. + if function_choice_behavior is not None: + args["function_choice_behavior"] = function_choice_behavior - Returns: - An async iterable of ChatMessageContent. - """ - # Get the chat completion service - chat_completion_service = self.kernel.get_service(service_id=self.service_id, type=ChatCompletionClientBase) - - if not chat_completion_service: - raise KernelServiceNotFoundError(f"Chat completion service not found with service_id: {self.service_id}") + if service is not None: + args["service"] = service - assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec + if instructions is not None: + args["instructions"] = instructions + if prompt_template_config is not None: + args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( + prompt_template_config=prompt_template_config + ) + if prompt_template_config.template is not None: + # Use the template from the prompt_template_config if it is provided + args["instructions"] = prompt_template_config.template + super().__init__(**args) - settings = ( - self.execution_settings - or self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) - or chat_completion_service.instantiate_prompt_execution_settings( - service_id=self.service_id, extension_data={"ai_model_id": chat_completion_service.ai_model_id} + @model_validator(mode="after") + def configure_service(self) -> "ChatCompletionAgent": + """Configure the service used by the ChatCompletionAgent.""" + if self.service is None: + return self + if not isinstance(self.service, ChatCompletionClientBase): + raise AgentInitializationException( + f"Service provided for ChatCompletionAgent is not an instance of ChatCompletionClientBase. " + f"Service: {type(self.service)}" ) - ) + self.kernel.add_service(self.service, overwrite=True) + return self - chat = self._setup_agent_chat_history(history) + @trace_agent_get_response + @override + async def get_response( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> ChatMessageContent: + """Get a response from the agent. - message_count = len(chat) + Args: + history: The chat history. + arguments: The kernel arguments. (optional) + kernel: The kernel instance. (optional) + kwargs: The keyword arguments. (optional) - logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") + Returns: + A chat message content. + """ + responses: list[ChatMessageContent] = [] + async for response in self._inner_invoke(history, arguments, kernel, **kwargs): + responses.append(response) - messages = await chat_completion_service.get_chat_message_contents( - chat_history=chat, - settings=settings, - kernel=self.kernel, - ) + if not responses: + raise AgentInvokeException("No response from agent.") - logger.info( - f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " - f"with message count: {message_count}." - ) + return responses[0] - # Capture mutated messages related function calling / tools - for message_index in range(message_count, len(chat)): - message = chat[message_index] - message.name = self.name - history.add_message(message) + @trace_agent_invocation + @override + async def invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Invoke the chat history handler. - for message in messages: - message.name = self.name - yield message + Args: + history: The chat history. + arguments: The kernel arguments. + kernel: The kernel instance. + kwargs: The keyword arguments. + + Returns: + An async iterable of ChatMessageContent. + """ + async for response in self._inner_invoke(history, arguments, kernel, **kwargs): + yield response @trace_agent_invocation - async def invoke_stream(self, history: ChatHistory) -> AsyncIterable[StreamingChatMessageContent]: + @override + async def invoke_stream( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[StreamingChatMessageContent]: """Invoke the chat history handler in streaming mode. Args: - kernel: The kernel instance. history: The chat history. + arguments: The kernel arguments. + kernel: The kernel instance. + kwargs: The keyword arguments. Returns: An async generator of StreamingChatMessageContent. """ - # Get the chat completion service - chat_completion_service = self.kernel.get_service(service_id=self.service_id, type=ChatCompletionClientBase) + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) - if not chat_completion_service: - raise KernelServiceNotFoundError(f"Chat completion service not found with service_id: {self.service_id}") + # Add the chat history to the args in the event that it is needed for prompt template configuration + if "chat_history" not in arguments: + arguments["chat_history"] = history - assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) - settings = ( - self.execution_settings - or self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) - or chat_completion_service.instantiate_prompt_execution_settings( - service_id=self.service_id, extension_data={"ai_model_id": chat_completion_service.ai_model_id} - ) + chat_completion_service, settings = await self._get_chat_completion_service_and_settings( + kernel=kernel, arguments=arguments + ) + + # If the user hasn't provided a function choice behavior, use the agent's default. + if settings.function_choice_behavior is None: + settings.function_choice_behavior = self.function_choice_behavior + + agent_chat_history = await self._prepare_agent_chat_history( + history=history, + kernel=kernel, + arguments=arguments, ) - chat = self._setup_agent_chat_history(history) + # Remove the chat history from the arguments, potentially used for the prompt, + # to avoid passing it to the service + arguments.pop("chat_history", None) - message_count = len(chat) + message_count_before_completion = len(agent_chat_history) logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") - messages: AsyncGenerator[list[StreamingChatMessageContent], Any] = ( + responses: AsyncGenerator[list[StreamingChatMessageContent], Any] = ( chat_completion_service.get_streaming_chat_message_contents( - chat_history=chat, + chat_history=agent_chat_history, settings=settings, - kernel=self.kernel, + kernel=kernel, + arguments=arguments, ) ) - logger.info( + logger.debug( f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " - f"with message count: {message_count}." + f"with message count: {message_count_before_completion}." ) role = None - message_builder: list[str] = [] - async for message_list in messages: - for message in message_list: - role = message.role - message.name = self.name - message_builder.append(message.content) - yield message - - # Capture mutated messages related function calling / tools - for message_index in range(message_count, len(chat)): - message = chat[message_index] # type: ignore - message.name = self.name - history.add_message(message) - + response_builder: list[str] = [] + async for response_list in responses: + for response in response_list: + role = response.role + response.name = self.name + response_builder.append(response.content) + yield response + + self._capture_mutated_messages(history, agent_chat_history, message_count_before_completion) if role != AuthorRole.TOOL: history.add_message( ChatMessageContent( - role=role if role else AuthorRole.ASSISTANT, content="".join(message_builder), name=self.name + role=role if role else AuthorRole.ASSISTANT, content="".join(response_builder), name=self.name ) ) - def _setup_agent_chat_history(self, history: ChatHistory) -> ChatHistory: - """Setup the agent chat history.""" - chat = [] + async def _inner_invoke( + self, + history: ChatHistory, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Helper method to invoke the agent with a chat history in non-streaming mode.""" + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + # Add the chat history to the args in the event that it is needed for prompt template configuration + if "chat_history" not in arguments: + arguments["chat_history"] = history + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + chat_completion_service, settings = await self._get_chat_completion_service_and_settings( + kernel=kernel, arguments=arguments + ) - if self.instructions is not None: - chat.append(ChatMessageContent(role=AuthorRole.SYSTEM, content=self.instructions, name=self.name)) + # If the user hasn't provided a function choice behavior, use the agent's default. + if settings.function_choice_behavior is None: + settings.function_choice_behavior = self.function_choice_behavior - chat.extend(history.messages if history.messages else []) + agent_chat_history = await self._prepare_agent_chat_history( + history=history, + kernel=kernel, + arguments=arguments, + ) - return ChatHistory(messages=chat) + # Remove the chat history from the arguments, potentially used for the prompt, + # to avoid passing it to the service + arguments.pop("chat_history", None) + + message_count_before_completion = len(agent_chat_history) + + logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") + + responses = await chat_completion_service.get_chat_message_contents( + chat_history=agent_chat_history, + settings=settings, + kernel=kernel, + arguments=arguments, + ) + + logger.debug( + f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " + f"with message count: {message_count_before_completion}." + ) + + self._capture_mutated_messages(history, agent_chat_history, message_count_before_completion) + + for response in responses: + response.name = self.name + yield response + + async def _prepare_agent_chat_history( + self, history: ChatHistory, kernel: "Kernel", arguments: KernelArguments + ) -> ChatHistory: + """Prepare the agent chat history from the input history by adding the formatted instructions.""" + formatted_instructions = await self.format_instructions(kernel, arguments) + messages = [] + if formatted_instructions: + messages.append(ChatMessageContent(role=AuthorRole.SYSTEM, content=formatted_instructions, name=self.name)) + if history.messages: + messages.extend(history.messages) + + return ChatHistory(messages=messages) + + async def _get_chat_completion_service_and_settings( + self, kernel: "Kernel", arguments: KernelArguments + ) -> tuple[ChatCompletionClientBase, PromptExecutionSettings]: + """Get the chat completion service and settings.""" + chat_completion_service, settings = kernel.select_ai_service(arguments=arguments, type=ChatCompletionClientBase) + + if not chat_completion_service: + raise KernelServiceNotFoundError( + "Chat completion service not found. Check your service or kernel configuration." + ) + + assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec + assert settings is not None # nosec + + return chat_completion_service, settings + + def _capture_mutated_messages(self, caller_chat_history: ChatHistory, agent_chat_history: ChatHistory, start: int): + """Capture mutated messages related function calling/tools.""" + for message_index in range(start, len(agent_chat_history)): + message = agent_chat_history[message_index] # type: ignore + message.name = self.name + caller_chat_history.add_message(message) diff --git a/python/semantic_kernel/agents/group_chat/agent_chat.py b/python/semantic_kernel/agents/group_chat/agent_chat.py index 294f695cbb1b..65ad9d737905 100644 --- a/python/semantic_kernel/agents/group_chat/agent_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_chat.py @@ -3,7 +3,7 @@ import asyncio import logging import threading -from collections.abc import AsyncGenerator, AsyncIterable +from collections.abc import AsyncIterable from pydantic import Field, PrivateAttr @@ -16,12 +16,12 @@ from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AgentChat(KernelBaseModel): """A base class chat interface for agents.""" @@ -54,17 +54,19 @@ def invoke(self, agent: Agent | None = None, is_joining: bool = True) -> AsyncIt """Invoke the agent asynchronously.""" raise NotImplementedError("Subclasses should implement this method") - async def get_messages_in_descending_order(self): + async def get_messages_in_descending_order(self) -> AsyncIterable[ChatMessageContent]: """Get messages in descending order asynchronously.""" for index in range(len(self.history.messages) - 1, -1, -1): yield self.history.messages[index] await asyncio.sleep(0) # Yield control to the event loop - async def get_chat_messages(self, agent: "Agent | None" = None) -> AsyncGenerator[ChatMessageContent, None]: + async def get_chat_messages(self, agent: "Agent | None" = None) -> AsyncIterable[ChatMessageContent]: """Get chat messages asynchronously.""" self.set_activity_or_throw() logger.info("Getting chat messages") + + messages: AsyncIterable[ChatMessageContent] | None = None try: if agent is None: messages = self.get_messages_in_descending_order() @@ -95,8 +97,11 @@ def _get_agent_hash(self, agent: Agent): return hash_value - async def add_chat_message(self, message: ChatMessageContent) -> None: + async def add_chat_message(self, message: str | ChatMessageContent) -> None: """Add a chat message.""" + if isinstance(message, str): + message = ChatMessageContent(role=AuthorRole.USER, content=message) + await self.add_chat_messages([message]) async def add_chat_messages(self, messages: list[ChatMessageContent]) -> None: diff --git a/python/semantic_kernel/agents/group_chat/agent_chat_utils.py b/python/semantic_kernel/agents/group_chat/agent_chat_utils.py index 0162bb94fe33..864863cff2c3 100644 --- a/python/semantic_kernel/agents/group_chat/agent_chat_utils.py +++ b/python/semantic_kernel/agents/group_chat/agent_chat_utils.py @@ -5,10 +5,10 @@ from collections.abc import Iterable from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KeyEncoder: """A class for encoding keys.""" diff --git a/python/semantic_kernel/agents/group_chat/agent_group_chat.py b/python/semantic_kernel/agents/group_chat/agent_group_chat.py index 38d0d73af0ab..00ed794018a2 100644 --- a/python/semantic_kernel/agents/group_chat/agent_group_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_group_chat.py @@ -2,7 +2,8 @@ import logging from collections.abc import AsyncIterable -from typing import Any +from copy import deepcopy +from typing import TYPE_CHECKING, Any, cast from pydantic import Field @@ -14,14 +15,18 @@ from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from semantic_kernel.contents.chat_history import ChatHistory logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AgentGroupChat(AgentChat): """An agent chat that supports multi-turn interactions.""" @@ -29,7 +34,10 @@ class AgentGroupChat(AgentChat): agents: list[Agent] = Field(default_factory=list) is_complete: bool = False - termination_strategy: TerminationStrategy = Field(default_factory=DefaultTerminationStrategy) + termination_strategy: TerminationStrategy = Field( + default_factory=DefaultTerminationStrategy, + description="The termination strategy to use. The default strategy never terminates and has a max iterations of 5.", # noqa: E501 + ) selection_strategy: SelectionStrategy = Field(default_factory=SequentialSelectionStrategy) def __init__( @@ -37,6 +45,7 @@ def __init__( agents: list[Agent] | None = None, termination_strategy: TerminationStrategy | None = None, selection_strategy: SelectionStrategy | None = None, + chat_history: "ChatHistory | None" = None, ) -> None: """Initialize a new instance of AgentGroupChat. @@ -44,6 +53,7 @@ def __init__( agents: The agents to add to the group chat. termination_strategy: The termination strategy to use. selection_strategy: The selection strategy + chat_history: The chat history. """ agent_ids = {agent.id for agent in agents} if agents else set() @@ -59,6 +69,8 @@ def __init__( args["termination_strategy"] = termination_strategy if selection_strategy is not None: args["selection_strategy"] = selection_strategy + if chat_history is not None: + args["history"] = chat_history super().__init__(**args) @@ -199,3 +211,18 @@ async def invoke_stream( if self.is_complete: break + + async def reduce_history(self) -> bool: + """Perform the reduction on the provided history, returning True if reduction occurred.""" + if not isinstance(self.history, ChatHistoryReducer): + return False + + result = await self.history.reduce() + if result is None: + return False + + reducer = cast(ChatHistoryReducer, result) + reduced_history = deepcopy(reducer.messages) + await self.reset() + await self.add_chat_messages(reduced_history) + return True diff --git a/python/semantic_kernel/agents/group_chat/broadcast_queue.py b/python/semantic_kernel/agents/group_chat/broadcast_queue.py index 0e77b14f91e4..fb5d7674129d 100644 --- a/python/semantic_kernel/agents/group_chat/broadcast_queue.py +++ b/python/semantic_kernel/agents/group_chat/broadcast_queue.py @@ -11,10 +11,10 @@ from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class QueueReference(KernelBaseModel): """Utility class to associate a queue with its specific lock.""" @@ -38,7 +38,7 @@ def validate_receive_task(cls, values: Any): return values -@experimental_class +@experimental @dataclass class ChannelReference: """Tracks a channel along with its hashed key.""" @@ -47,7 +47,7 @@ class ChannelReference: channel: AgentChannel = field(default_factory=AgentChannel) -@experimental_class +@experimental class BroadcastQueue(KernelBaseModel): """A queue for broadcasting messages to listeners.""" diff --git a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py index 483586fedf02..d7116098635b 100644 --- a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py +++ b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py @@ -3,13 +3,15 @@ from typing import TYPE_CHECKING, Any from openai import AsyncOpenAI +from openai.types.beta.threads.file_citation_annotation import FileCitationAnnotation from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation +from openai.types.beta.threads.file_path_annotation import FilePathAnnotation from openai.types.beta.threads.file_path_delta_annotation import FilePathDeltaAnnotation from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock from openai.types.beta.threads.image_file_delta_block import ImageFileDeltaBlock from openai.types.beta.threads.message_delta_event import MessageDeltaEvent from openai.types.beta.threads.runs import CodeInterpreterLogs -from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreter +from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreterOutputImage from openai.types.beta.threads.text_content_block import TextContentBlock from openai.types.beta.threads.text_delta_block import TextDeltaBlock @@ -26,12 +28,11 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: - from openai.resources.beta.threads.messages import Message - from openai.resources.beta.threads.runs.runs import Run - from openai.types.beta.threads.annotation import Annotation + from openai.types.beta.threads.message import Message + from openai.types.beta.threads.run import Run from openai.types.beta.threads.runs import RunStep from openai.types.beta.threads.runs.tool_call import ToolCall from openai.types.beta.threads.runs.tool_calls_step_details import ToolCallsStepDetails @@ -44,7 +45,7 @@ ################################################################### -@experimental_function +@experimental async def create_chat_message( client: AsyncOpenAI, thread_id: str, @@ -76,7 +77,7 @@ async def create_chat_message( ) -@experimental_function +@experimental def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: """Get the message contents. @@ -120,7 +121,7 @@ def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: return contents -@experimental_function +@experimental def generate_message_content( assistant_name: str, message: "Message", completed_step: "RunStep | None" = None ) -> ChatMessageContent: @@ -163,13 +164,29 @@ def generate_message_content( return content -@experimental_function +@experimental def generate_streaming_message_content( - assistant_name: str, message_delta_event: "MessageDeltaEvent" + assistant_name: str, + message_delta_event: "MessageDeltaEvent", + completed_step: "RunStep | None" = None, ) -> StreamingChatMessageContent: """Generate streaming message content from a MessageDeltaEvent.""" delta = message_delta_event.delta + metadata = ( + { + "created_at": completed_step.created_at, + "message_id": message_delta_event.id, # message needs to be defined in context + "step_id": completed_step.id, + "run_id": completed_step.run_id, + "thread_id": completed_step.thread_id, + "assistant_id": completed_step.assistant_id, + "usage": completed_step.usage, + } + if completed_step is not None + else None + ) + # Determine the role role = AuthorRole(delta.role) if delta.role is not None else AuthorRole("assistant") @@ -202,10 +219,54 @@ def generate_streaming_message_content( ) ) - return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0) # type: ignore + return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0, metadata=metadata) # type: ignore + + +@experimental +def generate_final_streaming_message_content( + assistant_name: str, + message: "Message", + completed_step: "RunStep | None" = None, +) -> StreamingChatMessageContent: + """Generate streaming message content from a MessageDeltaEvent.""" + metadata = ( + { + "created_at": completed_step.created_at, + "message_id": message.id, # message needs to be defined in context + "step_id": completed_step.id, + "run_id": completed_step.run_id, + "thread_id": completed_step.thread_id, + "assistant_id": completed_step.assistant_id, + "usage": completed_step.usage, + } + if completed_step is not None + else None + ) + + # Determine the role + role = AuthorRole(message.role) if message.role is not None else AuthorRole("assistant") + + items: list[StreamingTextContent | StreamingAnnotationContent | StreamingFileReferenceContent] = [] + + # Process each content block in the delta + for item_content in message.content: + if item_content.type == "text": + assert isinstance(item_content, TextContentBlock) # nosec + items.append(StreamingTextContent(text=item_content.text.value, choice_index=0)) + for annotation in item_content.text.annotations: + items.append(generate_streaming_annotation_content(annotation)) + elif item_content.type == "image_file": + assert isinstance(item_content, ImageFileContentBlock) # nosec + items.append( + StreamingFileReferenceContent( + file_id=item_content.image_file.file_id, + ) + ) + + return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0, metadata=metadata) # type: ignore -@experimental_function +@experimental def generate_function_call_content(agent_name: str, fccs: list[FunctionCallContent]) -> ChatMessageContent: """Generate function call content. @@ -219,7 +280,7 @@ def generate_function_call_content(agent_name: str, fccs: list[FunctionCallConte return ChatMessageContent(role=AuthorRole.ASSISTANT, name=agent_name, items=fccs) # type: ignore -@experimental_function +@experimental def generate_function_result_content( agent_name: str, function_step: FunctionCallContent, tool_call: "ToolCall" ) -> ChatMessageContent: @@ -236,7 +297,7 @@ def generate_function_result_content( return function_call_content -@experimental_function +@experimental def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCallContent]) -> list[FunctionCallContent]: """Extract function call contents from the run. @@ -263,7 +324,7 @@ def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCal return function_call_contents -@experimental_function +@experimental def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessageContent": """Generate code interpreter content. @@ -282,7 +343,7 @@ def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessag ) -@experimental_function +@experimental def generate_streaming_function_content( agent_name: str, step_details: "ToolCallsStepDetails" ) -> "StreamingChatMessageContent": @@ -320,7 +381,7 @@ def generate_streaming_function_content( ) -@experimental_function +@experimental def generate_streaming_code_interpreter_content( agent_name: str, step_details: "ToolCallsStepDetails" ) -> "StreamingChatMessageContent | None": @@ -348,7 +409,7 @@ def generate_streaming_code_interpreter_content( metadata["code"] = True if tool.code_interpreter.outputs: for output in tool.code_interpreter.outputs: - if isinstance(output, CodeInterpreter) and output.image.file_id: + if isinstance(output, CodeInterpreterOutputImage) and output.image.file_id: items.append( StreamingFileReferenceContent( file_id=output.image.file_id, @@ -375,14 +436,15 @@ def generate_streaming_code_interpreter_content( ) -@experimental_function -def generate_annotation_content(annotation: "Annotation") -> AnnotationContent: +@experimental +def generate_annotation_content(annotation: FileCitationAnnotation | FilePathAnnotation) -> AnnotationContent: """Generate annotation content.""" file_id = None - if hasattr(annotation, "file_path"): - file_id = annotation.file_path.file_id - elif hasattr(annotation, "file_citation"): - file_id = annotation.file_citation.file_id + match annotation: + case FilePathAnnotation(): + file_id = annotation.file_path.file_id + case FileCitationAnnotation(): + file_id = annotation.file_citation.file_id return AnnotationContent( file_id=file_id, @@ -392,14 +454,21 @@ def generate_annotation_content(annotation: "Annotation") -> AnnotationContent: ) -@experimental_function -def generate_streaming_annotation_content(annotation: "Annotation") -> StreamingAnnotationContent: +@experimental +def generate_streaming_annotation_content( + annotation: FileCitationAnnotation | FilePathAnnotation | FilePathDeltaAnnotation | FileCitationDeltaAnnotation, +) -> StreamingAnnotationContent: """Generate streaming annotation content.""" file_id = None - if hasattr(annotation, "file_path") and annotation.file_path: - file_id = annotation.file_path.file_id if annotation.file_path.file_id else None - elif hasattr(annotation, "file_citation") and annotation.file_citation: - file_id = annotation.file_citation.file_id if annotation.file_citation.file_id else None + match annotation: + case FilePathAnnotation(): + file_id = annotation.file_path.file_id + case FileCitationAnnotation(): + file_id = annotation.file_citation.file_id + case FilePathDeltaAnnotation(): + file_id = annotation.file_path.file_id if annotation.file_path is not None else None + case FileCitationDeltaAnnotation(): + file_id = annotation.file_citation.file_id if annotation.file_citation is not None else None return StreamingAnnotationContent( file_id=file_id, diff --git a/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py b/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py new file mode 100644 index 000000000000..92bdb43470cf --- /dev/null +++ b/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py @@ -0,0 +1,770 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from collections.abc import AsyncIterable, Iterable, Sequence +from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar, cast + +from openai import AsyncOpenAI +from openai.types.beta.code_interpreter_tool import CodeInterpreterTool +from openai.types.beta.file_search_tool import FileSearchTool +from openai.types.beta.threads.run_create_params import AdditionalMessage, AdditionalMessageAttachment +from openai.types.beta.threads.runs import ( + MessageCreationStepDetails, + RunStep, + RunStepDeltaEvent, + ToolCallDeltaObject, + ToolCallsStepDetails, +) + +from semantic_kernel.agents.azure_ai.agent_content_generation import generate_function_call_streaming_content +from semantic_kernel.agents.open_ai.assistant_content_generation import ( + generate_code_interpreter_content, + generate_final_streaming_message_content, + generate_function_call_content, + generate_function_result_content, + generate_message_content, + generate_streaming_code_interpreter_content, + generate_streaming_function_content, + generate_streaming_message_content, + get_function_call_contents, + get_message_contents, +) +from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult +from semantic_kernel.connectors.ai.function_calling_utils import ( + kernel_function_metadata_to_function_call_format, + merge_streaming_function_results, +) +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import ( + AgentExecutionException, + AgentInvokeException, +) +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from openai import AsyncOpenAI + from openai.types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam + from openai.types.beta.assistant_tool_param import AssistantToolParam + from openai.types.beta.threads.message import Message + from openai.types.beta.threads.run import Run + from openai.types.beta.threads.run_create_params import AdditionalMessageAttachmentTool, TruncationStrategy + + from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent + from semantic_kernel.contents.chat_history import ChatHistory + from semantic_kernel.contents.chat_message_content import ChatMessageContent + from semantic_kernel.contents.function_call_content import FunctionCallContent + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + from semantic_kernel.kernel import Kernel + +_T = TypeVar("_T", bound="AssistantThreadActions") + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental +class AssistantThreadActions: + """Assistant Thread Actions class.""" + + polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] + error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] + + tool_metadata: ClassVar[dict[str, Sequence[Any]]] = { + "file_search": [{"type": "file_search"}], + "code_interpreter": [{"type": "code_interpreter"}], + } + + # region Messaging Handling Methods + + @classmethod + async def create_message( + cls: type[_T], + client: "AsyncOpenAI", + thread_id: str, + message: "str | ChatMessageContent", + allowed_message_roles: list[str] = [AuthorRole.USER, AuthorRole.ASSISTANT], + **kwargs: Any, + ) -> "Message | None": + """Create a message in the thread. + + Args: + client: The client to use to create the message. + thread_id: The ID of the thread to create the message in. + message: The message to create. + allowed_message_roles: The allowed message roles. + kwargs: Additional keyword arguments. + + Returns: + The created message. + """ + from semantic_kernel.contents.chat_message_content import ChatMessageContent + + if isinstance(message, str): + message = ChatMessageContent(role=AuthorRole.USER, content=message) + + if any(isinstance(item, FunctionCallContent) for item in message.items): + return None + + if message.role.value not in allowed_message_roles and message.role != AuthorRole.TOOL: + raise AgentExecutionException( + f"Invalid message role `{message.role.value}`. Allowed roles are {allowed_message_roles}." + ) + + message_contents: list[dict[str, Any]] = get_message_contents(message=message) + + return await client.beta.threads.messages.create( + thread_id=thread_id, + role="assistant" if message.role == AuthorRole.TOOL else message.role.value, # type: ignore + content=message_contents, # type: ignore + **kwargs, + ) + + # endregion + + # region Invocation Methods + + @classmethod + async def invoke( + cls: type[_T], + *, + agent: "OpenAIAssistantAgent", + thread_id: str, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + additional_instructions: str | None = None, + additional_messages: "list[ChatMessageContent] | None" = None, + instructions_override: str | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + metadata: dict[str, str] | None = None, + model: str | None = None, + parallel_tool_calls: bool | None = None, + reasoning_effort: Literal["low", "medium", "high"] | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + tools: "list[AssistantToolParam] | None" = None, + temperature: float | None = None, + top_p: float | None = None, + truncation_strategy: "TruncationStrategy | None" = None, + **kwargs: Any, + ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + """Invoke the assistant. + + Args: + agent: The assistant agent. + thread_id: The thread ID. + arguments: The kernel arguments. + kernel: The kernel. + instructions_override: The instructions override. + additional_instructions: The additional instructions. + additional_messages: The additional messages. + max_completion_tokens: The maximum completion tokens. + max_prompt_tokens: The maximum prompt tokens. + metadata: The metadata. + model: The model. + parallel_tool_calls: The parallel tool calls. + reasoning_effort: The reasoning effort. + response_format: The response format. + tools: The tools. + temperature: The temperature. + top_p: The top p. + truncation_strategy: The truncation strategy. + kwargs: Additional keyword arguments. + + Returns: + An async iterable of tuple of the visibility of the message and the chat message content. + """ + arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) + kernel = kernel or agent.kernel + + tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore + + base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) + + merged_instructions: str = "" + if instructions_override is not None: + merged_instructions = instructions_override + elif base_instructions and additional_instructions: + merged_instructions = f"{base_instructions}\n\n{additional_instructions}" + else: + merged_instructions = base_instructions or additional_instructions or "" + + # form run options + run_options = cls._generate_options( + agent=agent, + model=model, + response_format=response_format, + temperature=temperature, + top_p=top_p, + metadata=metadata, + parallel_tool_calls_enabled=parallel_tool_calls, + truncation_message_count=truncation_strategy, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + additional_messages=additional_messages, + reasoning_effort=reasoning_effort, + ) + + run_options = {k: v for k, v in run_options.items() if v is not None} + + run = await agent.client.beta.threads.runs.create( + assistant_id=agent.id, + thread_id=thread_id, + instructions=merged_instructions or agent.instructions, + tools=tools, # type: ignore + **run_options, + ) + + processed_step_ids = set() + function_steps: dict[str, "FunctionCallContent"] = {} + + while run.status != "completed": + run = await cls._poll_run_status(agent=agent, run=run, thread_id=thread_id) + + if run.status in cls.error_message_states: + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + incomplete_details = "" + if run.incomplete_details: + incomplete_details = str(run.incomplete_details.reason) + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " + f"with error: {error_message} or incomplete details: {incomplete_details}" + ) + + # Check if function calling required + if run.status == "requires_action": + logger.debug(f"Run [{run.id}] requires action for agent `{agent.name}` and thread `{thread_id}`") + fccs = get_function_call_contents(run, function_steps) + if fccs: + logger.debug( + f"Yielding `generate_function_call_content` for agent `{agent.name}` and " + f"thread `{thread_id}`, visibility False" + ) + yield False, generate_function_call_content(agent_name=agent.name, fccs=fccs) + + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() + _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + + tool_outputs = cls._format_tool_outputs(fccs, chat_history) + await agent.client.beta.threads.runs.submit_tool_outputs( + run_id=run.id, + thread_id=thread_id, + tool_outputs=tool_outputs, # type: ignore + ) + logger.debug(f"Submitted tool outputs for agent `{agent.name}` and thread `{thread_id}`") + + steps_response = await agent.client.beta.threads.runs.steps.list(run_id=run.id, thread_id=thread_id) + logger.debug(f"Called for steps_response for run [{run.id}] agent `{agent.name}` and thread `{thread_id}`") + steps: list[RunStep] = steps_response.data + + def sort_key(step: RunStep): + # Put tool_calls first, then message_creation + # If multiple steps share a type, break ties by completed_at + return (0 if step.type == "tool_calls" else 1, step.completed_at) + + completed_steps_to_process = sorted( + [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], key=sort_key + ) + + logger.debug( + f"Completed steps to process for run [{run.id}] agent `{agent.name}` and thread `{thread_id}` " + f"with length `{len(completed_steps_to_process)}`" + ) + + message_count = 0 + for completed_step in completed_steps_to_process: + if completed_step.type == "tool_calls": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], agent `{agent.name}` and " + f"thread `{thread_id}`" + ) + assert hasattr(completed_step.step_details, "tool_calls") # nosec + tool_call_details = cast(ToolCallsStepDetails, completed_step.step_details) + for tool_call in tool_call_details.tool_calls: + is_visible = False + content: "ChatMessageContent | None" = None + if tool_call.type == "code_interpreter": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], [code_interpreter] for " + f"agent `{agent.name}` and thread `{thread_id}`" + ) + content = generate_code_interpreter_content( + agent.name, + tool_call.code_interpreter.input, # type: ignore + ) + is_visible = True + elif tool_call.type == "function": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], [function] for agent " + f"`{agent.name}` and thread `{thread_id}`" + ) + function_step = function_steps.get(tool_call.id) + assert function_step is not None # nosec + content = generate_function_result_content( + agent_name=agent.name, function_step=function_step, tool_call=tool_call + ) + + if content: + message_count += 1 + logger.debug( + f"Yielding tool_message for run [{run.id}], agent `{agent.name}` and thread " + f"`{thread_id}` and message count `{message_count}`, is_visible `{is_visible}`" + ) + yield is_visible, content + elif completed_step.type == "message_creation": + logger.debug( + f"Entering step type message_creation for run [{run.id}], agent `{agent.name}` and " + f"thread `{thread_id}`" + ) + message = await cls._retrieve_message( + agent=agent, + thread_id=thread_id, + message_id=completed_step.step_details.message_creation.message_id, # type: ignore + ) + if message: + content = generate_message_content(agent.name, message) + if content and len(content.items) > 0: + message_count += 1 + logger.debug( + f"Yielding message_creation for run [{run.id}], agent `{agent.name}` and " + f"thread `{thread_id}` and message count `{message_count}`, is_visible `{True}`" + ) + yield True, content + processed_step_ids.add(completed_step.id) + + @classmethod + async def invoke_stream( + cls: type[_T], + *, + agent: "OpenAIAssistantAgent", + thread_id: str, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + additional_instructions: str | None = None, + additional_messages: "list[ChatMessageContent] | None" = None, + instructions_override: str | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + messages: list["ChatMessageContent"] | None = None, + metadata: dict[str, str] | None = None, + model: str | None = None, + parallel_tool_calls: bool | None = None, + reasoning_effort: Literal["low", "medium", "high"] | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + tools: "list[AssistantToolParam] | None" = None, + temperature: float | None = None, + top_p: float | None = None, + truncation_strategy: "TruncationStrategy | None" = None, + **kwargs: Any, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Invoke the assistant. + + Args: + agent: The assistant agent. + thread_id: The thread ID. + arguments: The kernel arguments. + kernel: The kernel. + instructions_override: The instructions override. + additional_instructions: The additional instructions. + additional_messages: The additional messages. + max_completion_tokens: The maximum completion tokens. + max_prompt_tokens: The maximum prompt tokens. + messages: The messages that act as a receiver for completed messages. + metadata: The metadata. + model: The model. + parallel_tool_calls: The parallel tool calls. + reasoning_effort: The reasoning effort. + response_format: The response format. + tools: The tools. + temperature: The temperature. + top_p: The top p. + truncation_strategy: The truncation strategy. + kwargs: Additional keyword arguments. + + Returns: + An async iterable of tuple of the visibility of the message and the chat message content. + """ + arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) + kernel = kernel or agent.kernel + + tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore + + base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) + + merged_instructions: str = "" + if instructions_override is not None: + merged_instructions = instructions_override + elif base_instructions and additional_instructions: + merged_instructions = f"{base_instructions}\n\n{additional_instructions}" + else: + merged_instructions = base_instructions or additional_instructions or "" + + # form run options + run_options = cls._generate_options( + agent=agent, + model=model, + response_format=response_format, + temperature=temperature, + top_p=top_p, + metadata=metadata, + parallel_tool_calls_enabled=parallel_tool_calls, + truncation_message_count=truncation_strategy, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + additional_messages=additional_messages, + reasoning_effort=reasoning_effort, + ) + + run_options = {k: v for k, v in run_options.items() if v is not None} + + stream = agent.client.beta.threads.runs.stream( + assistant_id=agent.id, + thread_id=thread_id, + instructions=merged_instructions or agent.instructions, + tools=tools, # type: ignore + **run_options, + ) + + function_steps: dict[str, "FunctionCallContent"] = {} + active_messages: dict[str, RunStep] = {} + + while True: + async with stream as response_stream: + async for event in response_stream: + if event.event == "thread.run.created": + run = event.data + logger.info(f"Assistant run created with ID: {run.id}") + elif event.event == "thread.run.in_progress": + run = event.data + logger.info(f"Assistant run in progress with ID: {run.id}") + elif event.event == "thread.message.delta": + content = generate_streaming_message_content(agent.name, event.data) + yield content + elif event.event == "thread.run.step.completed": + step_completed = cast(RunStep, event.data) + logger.info(f"Run step completed with ID: {event.data.id}") + if isinstance(step_completed.step_details, MessageCreationStepDetails): + message_id = step_completed.step_details.message_creation.message_id + if message_id not in active_messages: + active_messages[message_id] = event.data + elif event.event == "thread.run.step.delta": + run_step_event: RunStepDeltaEvent = event.data + details = run_step_event.delta.step_details + if not details: + continue + step_details = event.data.delta.step_details + if isinstance(details, ToolCallDeltaObject) and details.tool_calls: + for tool_call in details.tool_calls: + tool_content = None + if tool_call.type == "function": + tool_content = generate_streaming_function_content(agent.name, step_details) + elif tool_call.type == "code_interpreter": + tool_content = generate_streaming_code_interpreter_content(agent.name, step_details) + if tool_content: + yield tool_content + elif event.event == "thread.run.requires_action": + run = event.data + function_action_result = await cls._handle_streaming_requires_action( + agent.name, kernel, run, function_steps + ) + if function_action_result is None: + raise AgentInvokeException( + f"Function call required but no function steps found for agent `{agent.name}` " + f"thread: {thread_id}." + ) + if function_action_result.function_result_streaming_content: + # Yield the function result content to the caller + yield function_action_result.function_result_streaming_content + if messages is not None: + # Add the function result content to the messages list, if it exists + messages.append(function_action_result.function_result_streaming_content) + if function_action_result.function_call_streaming_content: + if messages is not None: + messages.append(function_action_result.function_call_streaming_content) + stream = agent.client.beta.threads.runs.submit_tool_outputs_stream( + run_id=run.id, + thread_id=thread_id, + tool_outputs=function_action_result.tool_outputs, # type: ignore + ) + break + elif event.event == "thread.run.completed": + run = event.data + logger.info(f"Run completed with ID: {run.id}") + if len(active_messages) > 0: + for id in active_messages: + step: RunStep = active_messages[id] + message = await cls._retrieve_message( + agent=agent, + thread_id=thread_id, + message_id=id, # type: ignore + ) + + if message and message.content: + content = generate_final_streaming_message_content(agent.name, message, step) + if messages is not None: + messages.append(content) + return + elif event.event == "thread.run.failed": + run = event.data # type: ignore + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " + f"with error: {error_message}" + ) + else: + # If the inner loop completes without encountering a 'break', exit the outer loop + break + + @classmethod + async def _handle_streaming_requires_action( + cls: type[_T], + agent_name: str, + kernel: "Kernel", + run: "Run", + function_steps: dict[str, "FunctionCallContent"], + **kwargs: Any, + ) -> FunctionActionResult | None: + """Handle the requires action event for a streaming run.""" + fccs = get_function_call_contents(run, function_steps) + if fccs: + function_call_streaming_content = generate_function_call_streaming_content(agent_name=agent_name, fccs=fccs) + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] + _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] + tool_outputs = cls._format_tool_outputs(fccs, chat_history) + return FunctionActionResult( + function_call_streaming_content, function_result_streaming_content, tool_outputs + ) + return None + + # endregion + + @classmethod + async def _retrieve_message( + cls: type[_T], agent: "OpenAIAssistantAgent", thread_id: str, message_id: str + ) -> "Message | None": + """Retrieve a message from a thread.""" + message: "Message | None" = None + count = 0 + max_retries = 3 + while count < max_retries: + try: + message = await agent.client.beta.threads.messages.retrieve(thread_id=thread_id, message_id=message_id) + break + except Exception as ex: + logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") + count += 1 + if count >= max_retries: + logger.error( + f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." + ) + break + backoff_time: float = agent.polling_options.message_synchronization_delay.total_seconds() * (2**count) + await asyncio.sleep(backoff_time) + return message + + @classmethod + async def _invoke_function_calls( + cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + ) -> list[Any]: + """Invoke the function calls.""" + tasks = [ + kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) + for function_call in fccs + ] + return await asyncio.gather(*tasks) + + @classmethod + def _format_tool_outputs( + cls: type[_T], fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + ) -> list[dict[str, str]]: + """Format the tool outputs for submission.""" + from semantic_kernel.contents.function_result_content import FunctionResultContent + + tool_call_lookup = { + tool_call.id: tool_call + for message in chat_history.messages + for tool_call in message.items + if isinstance(tool_call, FunctionResultContent) + } + return [ + {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} + for fcc in fccs + if fcc.id in tool_call_lookup + ] + + @classmethod + async def _poll_run_status(cls: type[_T], agent: "OpenAIAssistantAgent", run: "Run", thread_id: str) -> "Run": + """Poll the run status.""" + logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") + + try: + run = await asyncio.wait_for( + cls._poll_loop(agent, run, thread_id), + timeout=agent.polling_options.run_polling_timeout.total_seconds(), + ) + except asyncio.TimeoutError: + timeout_duration = agent.polling_options.run_polling_timeout + error_message = f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` after waiting {timeout_duration}." # noqa: E501 + logger.error(error_message) + raise AgentInvokeException(error_message) + + logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") + return run + + @classmethod + async def _poll_loop(cls: type[_T], agent: "OpenAIAssistantAgent", run: "Run", thread_id: str) -> "Run": + """Internal polling loop.""" + count = 0 + while True: + await asyncio.sleep(agent.polling_options.get_polling_interval(count).total_seconds()) + count += 1 + + try: + run = await agent.client.beta.threads.runs.retrieve(run.id, thread_id=thread_id) + except Exception as e: + logging.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") + # Retry anyway + + if run.status not in cls.polling_status: + break + + return run + + @classmethod + def _merge_options( + cls: type[_T], + *, + agent: "OpenAIAssistantAgent", + model: str | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> dict[str, Any]: + """Merge run-time options with the agent-level options. + + Run-level parameters take precedence. + """ + return { + "model": model if model is not None else agent.definition.model, + "response_format": response_format if response_format is not None else None, + "temperature": temperature if temperature is not None else agent.definition.temperature, + "top_p": top_p if top_p is not None else agent.definition.top_p, + "metadata": metadata if metadata is not None else agent.definition.metadata, + **kwargs, + } + + @classmethod + def _generate_options(cls: type[_T], **kwargs: Any) -> dict[str, Any]: + """Generate a dictionary of options that can be passed directly to create_run.""" + merged = cls._merge_options(**kwargs) + agent = kwargs.get("agent") + trunc_count = merged.get("truncation_message_count", None) + max_completion_tokens = merged.get("max_completion_tokens", None) + max_prompt_tokens = merged.get("max_prompt_tokens", None) + parallel_tool_calls = merged.get("parallel_tool_calls_enabled", None) + additional_messages = cls._translate_additional_messages(agent, merged.get("additional_messages", None)) + return { + "model": merged.get("model"), + "top_p": merged.get("top_p"), + "response_format": merged.get("response_format"), + "temperature": merged.get("temperature"), + "truncation_strategy": trunc_count, + "metadata": merged.get("metadata"), + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "parallel_tool_calls": parallel_tool_calls, + "additional_messages": additional_messages, + } + + @classmethod + def _translate_additional_messages( + cls: type[_T], agent, messages: "list[ChatMessageContent] | None" + ) -> list[AdditionalMessage] | None: + """Translate additional messages to the required format.""" + if not messages: + return None + return cls._form_additional_messages(messages) + + @classmethod + def _form_additional_messages( + cls: type[_T], messages: list["ChatMessageContent"] + ) -> list[AdditionalMessage] | None: + """Form the additional messages for the specified thread.""" + if not messages: + return None + + additional_messages = [] + for message in messages: + if not message.content: + continue + + message_with_all: AdditionalMessage = { + "content": message.content, + "role": "assistant" if message.role == AuthorRole.ASSISTANT else "user", + "attachments": cls._get_attachments(message) if message.items else None, + "metadata": cls._get_metadata(message) if message.metadata else None, + } + additional_messages.append(message_with_all) + return additional_messages + + @classmethod + def _get_attachments(cls: type[_T], message: "ChatMessageContent") -> list[AdditionalMessageAttachment]: + return [ + AdditionalMessageAttachment( + file_id=file_content.file_id, + tools=list(cls._get_tool_definition(file_content.tools)), # type: ignore + data_source=file_content.data_source if file_content.data_source else None, + ) + for file_content in message.items + if isinstance(file_content, (FileReferenceContent, StreamingFileReferenceContent)) + and file_content.file_id is not None + ] + + @classmethod + def _get_metadata(cls: type[_T], message: "ChatMessageContent") -> dict[str, str]: + """Get the metadata for an agent message.""" + return {k: str(v) if v is not None else "" for k, v in (message.metadata or {}).items()} + + @classmethod + def _get_tool_definition(cls: type[_T], tools: list[Any]) -> Iterable["AdditionalMessageAttachmentTool"]: + if not tools: + return + for tool in tools: + if tool_definition := cls.tool_metadata.get(tool): + yield from tool_definition + + @classmethod + def _get_tools(cls: type[_T], agent: "OpenAIAssistantAgent", kernel: "Kernel") -> list[dict[str, str]]: + """Get the list of tools for the assistant. + + Returns: + The list of tools. + """ + tools: list[Any] = [] + + for tool in agent.definition.tools: + if isinstance(tool, CodeInterpreterTool): + tools.append({"type": "code_interpreter"}) + elif isinstance(tool, FileSearchTool): + tools.append({"type": "file_search"}) + + funcs = agent.kernel.get_full_list_of_function_metadata() + tools.extend([kernel_function_metadata_to_function_call_format(f) for f in funcs]) + + return tools diff --git a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py index d307f0ac2466..f82c305acdd6 100644 --- a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py @@ -1,403 +1,77 @@ # Copyright (c) Microsoft. All rights reserved. -import logging -from collections.abc import AsyncIterable, Awaitable, Callable +from collections.abc import Awaitable, Callable from copy import copy -from typing import TYPE_CHECKING, Any +from typing import Any from openai import AsyncAzureOpenAI from pydantic import ValidationError -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings -from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException -from semantic_kernel.kernel_pydantic import HttpsUrl from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import release_candidate from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent -if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel +@release_candidate +class AzureAssistantAgent(OpenAIAssistantAgent): + """An Azure Assistant Agent class that extends the OpenAI Assistant Agent class.""" -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental_class -class AzureAssistantAgent(OpenAIAssistantBase): - """Azure OpenAI Assistant Agent class. - - Provides the ability to interact with Azure OpenAI Assistants. - """ - - # region Agent Initialization - - def __init__( - self, - kernel: "Kernel | None" = None, - service_id: str | None = None, - deployment_name: str | None = None, - api_key: str | None = None, - endpoint: HttpsUrl | None = None, - api_version: str | None = None, + @staticmethod + def setup_resources( + *, ad_token: str | None = None, ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, - client: AsyncAzureOpenAI | None = None, - default_headers: dict[str, str] | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - description: str | None = None, - id: str | None = None, - instructions: str | None = None, - name: str | None = None, - enable_code_interpreter: bool | None = None, - enable_file_search: bool | None = None, - enable_json_response: bool | None = None, - file_ids: list[str] | None = None, - temperature: float | None = None, - top_p: float | None = None, - vector_store_id: str | None = None, - metadata: dict[str, Any] | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - token_endpoint: str | None = None, - **kwargs: Any, - ) -> None: - """Initialize an Azure OpenAI Assistant Agent. - - Args: - kernel: The Kernel instance. (optional) - service_id: The service ID. (optional) - deployment_name: The deployment name. (optional) - api_key: The Azure OpenAI API key. (optional) - endpoint: The Azure OpenAI endpoint. (optional) - api_version: The Azure OpenAI API version. (optional) - ad_token: The Azure AD token. (optional) - ad_token_provider: The Azure AD token provider. (optional) - client: The Azure OpenAI client. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) - description: The description. (optional) - id: The Agent ID. (optional) - instructions: The Agent instructions. (optional) - name: The Agent name. (optional) - enable_code_interpreter: Enable the code interpreter. (optional) - enable_file_search: Enable the file search. (optional) - enable_json_response: Enable the JSON response. (optional) - file_ids: The file IDs. (optional) - temperature: The temperature. (optional) - top_p: The top p. (optional) - vector_store_id: The vector store ID. (optional) - metadata: The metadata. (optional) - max_completion_tokens: The maximum completion tokens. (optional) - max_prompt_tokens: The maximum prompt tokens. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. (optional) - truncation_message_count: The truncation message count. (optional) - token_endpoint: The Azure AD token endpoint. (optional) - **kwargs: Additional keyword arguments. - - Raises: - AgentInitializationError: If the api_key is not provided in the configuration. - """ - azure_openai_settings = self._create_azure_openai_settings( - api_key=api_key, - endpoint=endpoint, - deployment_name=deployment_name, - api_version=api_version, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - token_endpoint=token_endpoint, - ) - - client, ad_token = self._setup_client_and_token( - azure_openai_settings=azure_openai_settings, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - client=client, - default_headers=default_headers, - ) - - service_id = service_id if service_id else DEFAULT_SERVICE_NAME - - args: dict[str, Any] = { - "kernel": kernel, - "ai_model_id": azure_openai_settings.chat_deployment_name, - "service_id": service_id, - "client": client, - "name": name, - "description": description, - "instructions": instructions, - "enable_code_interpreter": enable_code_interpreter, - "enable_file_search": enable_file_search, - "enable_json_response": enable_json_response, - "file_ids": file_ids or [], - "temperature": temperature, - "top_p": top_p, - "vector_store_id": vector_store_id, - "metadata": metadata or {}, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "parallel_tool_calls_enabled": parallel_tool_calls_enabled, - "truncation_message_count": truncation_message_count, - } - - if id is not None: - args["id"] = id - if kernel is not None: - args["kernel"] = kernel - if kwargs: - args.update(kwargs) - - super().__init__(**args) - - @classmethod - async def create( - cls, - *, - kernel: "Kernel | None" = None, - service_id: str | None = None, - deployment_name: str | None = None, api_key: str | None = None, - endpoint: HttpsUrl | None = None, api_version: str | None = None, - ad_token: str | None = None, - ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, - client: AsyncAzureOpenAI | None = None, + base_url: str | None = None, default_headers: dict[str, str] | None = None, + deployment_name: str | None = None, + endpoint: str | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - description: str | None = None, - id: str | None = None, - instructions: str | None = None, - name: str | None = None, - enable_code_interpreter: bool | None = None, - code_interpreter_filenames: list[str] | None = None, - code_interpreter_file_ids: list[str] | None = None, - enable_file_search: bool | None = None, - vector_store_filenames: list[str] | None = None, - vector_store_file_ids: list[str] | None = None, - enable_json_response: bool | None = None, - temperature: float | None = None, - top_p: float | None = None, - vector_store_id: str | None = None, - metadata: dict[str, Any] | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - token_endpoint: str | None = None, + token_scope: str | None = None, **kwargs: Any, - ) -> "AzureAssistantAgent": - """Asynchronous class method used to create the OpenAI Assistant Agent. - - Args: - kernel: The Kernel instance. (optional) - service_id: The service ID. (optional) - deployment_name: The deployment name. (optional) - api_key: The Azure OpenAI API key. (optional) - endpoint: The Azure OpenAI endpoint. (optional) - api_version: The Azure OpenAI API version. (optional) - ad_token: The Azure AD token. (optional) - ad_token_provider: The Azure AD token provider. (optional) - client: The Azure OpenAI client. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) - description: The description. (optional) - id: The Agent ID. (optional) - instructions: The Agent instructions. (optional) - name: The Agent name. (optional) - enable_code_interpreter: Enable the code interpreter. (optional) - code_interpreter_filenames: The filenames/paths to use with the code interpreter. (optional) - code_interpreter_file_ids: The existing file IDs to use with the code interpreter. (optional) - enable_file_search: Enable the file search. (optional) - vector_store_filenames: The filenames/paths for files to use with file search. (optional) - vector_store_file_ids: The existing file IDs to use with file search. (optional) - enable_json_response: Enable the JSON response. (optional) - temperature: The temperature. (optional) - top_p: The top p. (optional) - vector_store_id: The vector store ID. (optional) - metadata: The metadata. (optional) - max_completion_tokens: The maximum completion tokens. (optional) - max_prompt_tokens: The maximum prompt tokens. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. (optional) - truncation_message_count: The truncation message count. (optional) - token_endpoint: The Azure AD token endpoint. (optional) - **kwargs: Additional keyword arguments. + ) -> tuple[AsyncAzureOpenAI, str]: + """A method to create the Azure OpenAI client and the deployment name/model from the provided arguments. - Returns: - An instance of the AzureAssistantAgent - """ - agent = cls( - kernel=kernel, - service_id=service_id, - deployment_name=deployment_name, - api_key=api_key, - endpoint=endpoint, - api_version=api_version, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - client=client, - default_headers=default_headers, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - description=description, - id=id, - instructions=instructions, - name=name, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - temperature=temperature, - top_p=top_p, - vector_store_id=vector_store_id, - metadata=metadata or {}, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - token_endpoint=token_endpoint, - **kwargs, - ) - - assistant_create_kwargs: dict[str, Any] = {} - - code_interpreter_file_ids_combined: list[str] = [] - if code_interpreter_file_ids is not None: - code_interpreter_file_ids_combined.extend(code_interpreter_file_ids) - if code_interpreter_filenames is not None: - for file_path in code_interpreter_filenames: - try: - file_id = await agent.add_file(file_path=file_path, purpose="assistants") - code_interpreter_file_ids_combined.append(file_id) - except FileNotFoundError as ex: - logger.error( - f"Failed to upload code interpreter file with path: `{file_path}` with exception: {ex}" - ) - raise AgentInitializationException("Failed to upload code interpreter files.", ex) from ex - if code_interpreter_file_ids_combined: - agent.code_interpreter_file_ids = code_interpreter_file_ids_combined - assistant_create_kwargs["code_interpreter_file_ids"] = code_interpreter_file_ids_combined - - vector_store_file_ids_combined: list[str] = [] - if vector_store_file_ids is not None: - vector_store_file_ids_combined.extend(vector_store_file_ids) - if vector_store_filenames is not None: - for file_path in vector_store_filenames: - try: - file_id = await agent.add_file(file_path=file_path, purpose="assistants") - vector_store_file_ids_combined.append(file_id) - except FileNotFoundError as ex: - logger.error(f"Failed to upload vector store file with path: `{file_path}` with exception: {ex}") - raise AgentInitializationException("Failed to upload vector store files.", ex) from ex - if vector_store_file_ids_combined: - agent.file_search_file_ids = vector_store_file_ids_combined - if enable_file_search or agent.enable_file_search: - vector_store_id = await agent.create_vector_store(file_ids=vector_store_file_ids_combined) - agent.vector_store_id = vector_store_id - assistant_create_kwargs["vector_store_id"] = vector_store_id - - agent.assistant = await agent.create_assistant(**assistant_create_kwargs) - return agent - - @classmethod - async def retrieve( - cls, - *, - id: str, - api_key: str | None = None, - endpoint: HttpsUrl | None = None, - api_version: str | None = None, - ad_token: str | None = None, - ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, - client: AsyncAzureOpenAI | None = None, - kernel: "Kernel | None" = None, - default_headers: dict[str, str] | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - token_endpoint: str | None = None, - ) -> "AzureAssistantAgent": - """Retrieve an assistant by ID. + Any arguments provided will override the values in the environment variables/environment file. Args: - id: The assistant ID. - api_key: The Azure OpenAI API key. (optional) - endpoint: The Azure OpenAI endpoint. (optional) - api_version: The Azure OpenAI API version. (optional) - ad_token: The Azure AD token. (optional) - ad_token_provider: The Azure AD token provider. (optional) - client: The Azure OpenAI client. (optional) - kernel: The Kernel instance. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) - token_endpoint: The Azure AD token endpoint. (optional) - - Returns: - An AzureAssistantAgent instance. - """ - azure_openai_settings = cls._create_azure_openai_settings( - api_key=api_key, - endpoint=endpoint, - deployment_name=None, # Not required for retrieving an existing assistant - api_version=api_version, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - token_endpoint=token_endpoint, - ) - - client, ad_token = cls._setup_client_and_token( - azure_openai_settings=azure_openai_settings, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - client=client, - default_headers=default_headers, - ) - - assistant = await client.beta.assistants.retrieve(id) - assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - - return AzureAssistantAgent( - kernel=kernel, - assistant=assistant, - client=client, - ad_token=ad_token, - api_key=api_key, - endpoint=endpoint, - api_version=api_version, - default_headers=default_headers, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - token_endpoint=token_endpoint, - **assistant_definition, - ) - - @staticmethod - def _setup_client_and_token( - azure_openai_settings: AzureOpenAISettings, - ad_token: str | None, - ad_token_provider: Callable[[], str | Awaitable[str]] | None, - client: AsyncAzureOpenAI | None, - default_headers: dict[str, str] | None, - ) -> tuple[AsyncAzureOpenAI, str | None]: - """Helper method that ensures either an AD token or an API key is present. - - Retrieves a new AD token if needed, and configures the AsyncAzureOpenAI client. + ad_token: The Microsoft Entra (previously Azure AD) token represented as a string + ad_token_provider: The Microsoft Entra (previously Azure AD) token provider provided as a callback + api_key: The API key + api_version: The API version + base_url: The base URL in the form https://.azure.openai.com/openai/deployments/ + default_headers: The default headers to add to the client + deployment_name: The deployment name + endpoint: The endpoint in the form https://.azure.openai.com + env_file_path: The environment file path + env_file_encoding: The environment file encoding, defaults to utf-8 + token_scope: The token scope + kwargs: Additional keyword arguments Returns: - A tuple of (client, ad_token), where client is guaranteed not to be None. + An Azure OpenAI client instance and the configured deployment name (model) """ - if not azure_openai_settings.chat_deployment_name: - raise AgentInitializationException("The Azure OpenAI chat_deployment_name is required.") + try: + azure_openai_settings = AzureOpenAISettings.create( + api_key=api_key, + base_url=base_url, + endpoint=endpoint, + chat_deployment_name=deployment_name, + api_version=api_version, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + token_endpoint=token_scope, + ) + except ValidationError as exc: + raise AgentInitializationException(f"Failed to create Azure OpenAI settings: {exc}") from exc - # If everything is missing, but there is a token_endpoint, try to get the token. if ( - client is None - and azure_openai_settings.api_key is None + azure_openai_settings.api_key is None and ad_token_provider is None and ad_token is None and azure_openai_settings.token_endpoint @@ -405,114 +79,32 @@ def _setup_client_and_token( ad_token = get_entra_auth_token(azure_openai_settings.token_endpoint) # If we still have no credentials, we can't proceed - if not client and not azure_openai_settings.api_key and not ad_token and not ad_token_provider: + if not azure_openai_settings.api_key and not ad_token and not ad_token_provider: raise AgentInitializationException( - "Please provide either a client, an api_key, ad_token or ad_token_provider." + "Please provide either an api_key, ad_token or ad_token_provider for authentication." ) - # Build the client if it's not supplied - if not client: - client = AzureAssistantAgent._create_client( - api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, - endpoint=azure_openai_settings.endpoint, - api_version=azure_openai_settings.api_version, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - default_headers=default_headers, - ) - - return client, ad_token - - @staticmethod - def _create_client( - api_key: str | None = None, - endpoint: HttpsUrl | None = None, - api_version: str | None = None, - ad_token: str | None = None, - ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, - default_headers: dict[str, str] | None = None, - ) -> AsyncAzureOpenAI: - """Create the OpenAI client from configuration. - - Args: - api_key: The OpenAI API key. - endpoint: The OpenAI endpoint. - api_version: The OpenAI API version. - ad_token: The Azure AD token. - ad_token_provider: The Azure AD token provider. - default_headers: The default headers. - - Returns: - An AsyncAzureOpenAI client instance. - """ merged_headers = dict(copy(default_headers)) if default_headers else {} + if default_headers: + merged_headers.update(default_headers) if APP_INFO: merged_headers.update(APP_INFO) merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) - if not api_key and not ad_token and not ad_token_provider: - raise AgentInitializationException( - "Please provide either AzureOpenAI api_key, an ad_token, ad_token_provider, or a client." - ) - if not endpoint: - raise AgentInitializationException("Please provide an AzureOpenAI endpoint.") + if not azure_openai_settings.endpoint: + raise AgentInitializationException("Please provide an Azure OpenAI endpoint") + + if not azure_openai_settings.chat_deployment_name: + raise AgentInitializationException("Please provide an Azure OpenAI deployment name") - return AsyncAzureOpenAI( - azure_endpoint=str(endpoint), - api_version=api_version, - api_key=api_key, + client = AsyncAzureOpenAI( + azure_endpoint=str(azure_openai_settings.endpoint), + api_version=azure_openai_settings.api_version, + api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, azure_ad_token=ad_token, azure_ad_token_provider=ad_token_provider, default_headers=merged_headers, + **kwargs, ) - @staticmethod - def _create_azure_openai_settings( - api_key: str | None = None, - endpoint: HttpsUrl | None = None, - deployment_name: str | None = None, - api_version: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - token_endpoint: str | None = None, - ) -> AzureOpenAISettings: - """Create the Azure OpenAI settings. - - Args: - api_key: The Azure OpenAI API key. - endpoint: The Azure OpenAI endpoint. - deployment_name: The Azure OpenAI chat deployment name. - api_version: The Azure OpenAI API version. - env_file_path: The environment file path. - env_file_encoding: The environment file encoding. - token_endpoint: The Azure AD token endpoint. - - Returns: - An instance of the AzureOpenAISettings. - """ - try: - azure_openai_settings = AzureOpenAISettings.create( - api_key=api_key, - endpoint=endpoint, - chat_deployment_name=deployment_name, - api_version=api_version, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - token_endpoint=token_endpoint, - ) - except ValidationError as ex: - raise AgentInitializationException("Failed to create Azure OpenAI settings.", ex) from ex - - return azure_openai_settings - - async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: - """List the assistant definitions. - - Yields: - An AsyncIterable of dictionaries representing the OpenAIAssistantDefinition. - """ - assistants = await self.client.beta.assistants.list(order="desc") - for assistant in assistants.data: - yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - - # endregion + return client, azure_openai_settings.chat_deployment_name diff --git a/python/semantic_kernel/agents/open_ai/function_action_result.py b/python/semantic_kernel/agents/open_ai/function_action_result.py index 48f6eb13bf4e..b971c8899a95 100644 --- a/python/semantic_kernel/agents/open_ai/function_action_result.py +++ b/python/semantic_kernel/agents/open_ai/function_action_result.py @@ -3,17 +3,17 @@ import logging from dataclasses import dataclass -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental @dataclass class FunctionActionResult: """Function Action Result.""" - function_call_content: ChatMessageContent | None - function_result_content: ChatMessageContent | None + function_call_streaming_content: StreamingChatMessageContent | None + function_result_streaming_content: StreamingChatMessageContent | None tool_outputs: list[dict[str, str]] | None diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py index 7db30aa6f5c2..efabc6194c6c 100644 --- a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py @@ -1,29 +1,66 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import AsyncIterable +import sys +from collections.abc import AsyncIterable, Iterable from copy import copy -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, ClassVar, Literal -from openai import AsyncOpenAI -from pydantic import ValidationError +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from openai import AsyncOpenAI +from openai.lib._parsing._completions import type_to_response_format_param +from openai.types.beta.assistant import Assistant +from openai.types.beta.assistant_create_params import ( + ToolResources, + ToolResourcesCodeInterpreter, + ToolResourcesFileSearch, +) +from openai.types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam +from openai.types.beta.file_search_tool_param import FileSearchToolParam +from pydantic import BaseModel, Field, ValidationError + +from semantic_kernel.agents import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel +from semantic_kernel.agents.open_ai.assistant_content_generation import generate_message_content +from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings -from semantic_kernel.const import DEFAULT_SERVICE_NAME -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.connectors.utils.structured_output_schema import generate_structured_output_response_format_schema +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions import KernelArguments +from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.schema.kernel_json_schema_builder import KernelJsonSchemaBuilder +from semantic_kernel.utils.feature_stage_decorator import release_candidate +from semantic_kernel.utils.naming import generate_random_ascii_name +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel + from openai import AsyncOpenAI + from openai.types.beta.assistant_tool_param import AssistantToolParam + from openai.types.beta.code_interpreter_tool_param import CodeInterpreterToolParam + from openai.types.beta.threads.message import Message + from openai.types.beta.threads.run_create_params import TruncationStrategy + from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + from semantic_kernel.kernel import Kernel + from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig logger: logging.Logger = logging.getLogger(__name__) -@experimental_class -class OpenAIAssistantAgent(OpenAIAssistantBase): +@release_candidate +class OpenAIAssistantAgent(Agent): """OpenAI Assistant Agent class. Provides the ability to interact with OpenAI Assistants. @@ -31,396 +68,580 @@ class OpenAIAssistantAgent(OpenAIAssistantBase): # region Agent Initialization + client: AsyncOpenAI + definition: Assistant + plugins: list[Any] = Field(default_factory=list) + polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) + + channel_type: ClassVar[type[AgentChannel]] = OpenAIAssistantChannel # type: ignore + def __init__( self, *, + arguments: KernelArguments | None = None, + client: AsyncOpenAI, + definition: Assistant, kernel: "Kernel | None" = None, - service_id: str | None = None, - ai_model_id: str | None = None, - api_key: str | None = None, - org_id: str | None = None, - client: AsyncOpenAI | None = None, - default_headers: dict[str, str] | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - description: str | None = None, - id: str | None = None, - instructions: str | None = None, - name: str | None = None, - enable_code_interpreter: bool | None = None, - enable_file_search: bool | None = None, - enable_json_response: bool | None = None, - code_interpreter_file_ids: list[str] | None = None, - temperature: float | None = None, - top_p: float | None = None, - vector_store_id: str | None = None, - metadata: dict[str, Any] | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, + plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, + polling_options: RunPollingOptions | None = None, + prompt_template_config: "PromptTemplateConfig | None" = None, **kwargs: Any, ) -> None: """Initialize an OpenAIAssistant service. Args: - kernel: The Kernel instance. (optional) - service_id: The service ID. (optional) If not provided the default service name (default) is used. - ai_model_id: The AI model ID. (optional) - api_key: The OpenAI API key. (optional) - org_id: The OpenAI organization ID. (optional) - client: The OpenAI client. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) - description: The assistant description. (optional) - id: The assistant ID. (optional) - instructions: The assistant instructions. (optional) - name: The assistant name. (optional) - enable_code_interpreter: Enable code interpreter. (optional) - enable_file_search: Enable file search. (optional) - enable_json_response: Enable JSON response. (optional) - code_interpreter_file_ids: The file IDs. (optional) - temperature: The temperature. (optional) - top_p: The top p. (optional) - vector_store_id: The vector store ID. (optional) - metadata: The assistant metadata. (optional) - max_completion_tokens: The max completion tokens. (optional) - max_prompt_tokens: The max prompt tokens. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. (optional) - truncation_message_count: The truncation message count. (optional) + arguments: The arguments to pass to the function. + client: The OpenAI client. + definition: The assistant definition. + kernel: The Kernel instance. + plugins: The plugins to add to the kernel. If both the plugins and the kernel are supplied, + the plugins take precedence and are added to the kernel by default. + polling_options: The polling options. + prompt_template_config: The prompt template configuration. kwargs: Additional keyword arguments. - - Raises: - AgentInitializationError: If the api_key is not provided in the configuration. """ - openai_settings = OpenAIAssistantAgent._create_open_ai_settings( - api_key=api_key, - org_id=org_id, - ai_model_id=ai_model_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - - if not client and not openai_settings.api_key: - raise AgentInitializationException("The OpenAI API key is required, if a client is not provided.") - if not openai_settings.chat_model_id: - raise AgentInitializationException("The OpenAI chat model ID is required.") - - if not client: - client = self._create_client( - api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, - org_id=openai_settings.org_id, - default_headers=default_headers, - ) - - service_id = service_id if service_id else DEFAULT_SERVICE_NAME - args: dict[str, Any] = { - "ai_model_id": openai_settings.chat_model_id, - "service_id": service_id, "client": client, - "description": description, - "instructions": instructions, - "enable_code_interpreter": enable_code_interpreter, - "enable_file_search": enable_file_search, - "enable_json_response": enable_json_response, - "code_interpreter_file_ids": code_interpreter_file_ids or [], - "temperature": temperature, - "top_p": top_p, - "vector_store_id": vector_store_id, - "metadata": metadata or {}, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "parallel_tool_calls_enabled": parallel_tool_calls_enabled, - "truncation_message_count": truncation_message_count, + "definition": definition, + "name": definition.name or f"assistant_agent_{generate_random_ascii_name(length=8)}", + "description": definition.description, } - if name is not None: - args["name"] = name - if id is not None: - args["id"] = id + if arguments is not None: + args["arguments"] = arguments + if definition.id is not None: + args["id"] = definition.id + if definition.instructions is not None: + args["instructions"] = definition.instructions if kernel is not None: args["kernel"] = kernel + + if ( + definition.instructions + and prompt_template_config + and definition.instructions != prompt_template_config.template + ): + logger.info( + f"Both `instructions` ({definition.instructions}) and `prompt_template_config` " + f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " + "and ignoring `instructions`." + ) + + if plugins is not None: + args["plugins"] = plugins + + if prompt_template_config is not None: + args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( + prompt_template_config=prompt_template_config + ) + if prompt_template_config.template is not None: + # Use the template from the prompt_template_config if it is provided + args["instructions"] = prompt_template_config.template + if polling_options is not None: + args["polling_options"] = polling_options if kwargs: args.update(kwargs) super().__init__(**args) - @classmethod - async def create( - cls, + @staticmethod + def setup_resources( *, - kernel: "Kernel | None" = None, - service_id: str | None = None, ai_model_id: str | None = None, api_key: str | None = None, org_id: str | None = None, - client: AsyncOpenAI | None = None, - default_headers: dict[str, str] | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - description: str | None = None, - id: str | None = None, - instructions: str | None = None, - name: str | None = None, - enable_code_interpreter: bool | None = None, - code_interpreter_filenames: list[str] | None = None, - code_interpreter_file_ids: list[str] | None = None, - enable_file_search: bool | None = None, - vector_store_filenames: list[str] | None = None, - vector_store_file_ids: list[str] | None = None, - enable_json_response: bool | None = None, - temperature: float | None = None, - top_p: float | None = None, - vector_store_id: str | None = None, - metadata: dict[str, Any] | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, + default_headers: dict[str, str] | None = None, **kwargs: Any, - ) -> "OpenAIAssistantAgent": - """Asynchronous class method used to create the OpenAI Assistant Agent. + ) -> tuple[AsyncOpenAI, str]: + """A method to create the OpenAI client and the model from the provided arguments. + + Any arguments provided will override the values in the environment variables/environment file. Args: - kernel: The Kernel instance. (optional) - service_id: The service ID. (optional) If not provided the default service name (default) is used. - ai_model_id: The AI model ID. (optional) - api_key: The OpenAI API key. (optional) - org_id: The OpenAI organization ID. (optional) - client: The OpenAI client. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) - description: The assistant description. (optional) - id: The assistant ID. (optional) - instructions: The assistant instructions. (optional) - name: The assistant name. (optional) - enable_code_interpreter: Enable code interpreter. (optional) - code_interpreter_filenames: The filenames/paths for files to use with code interpreter. (optional) - code_interpreter_file_ids: The existing file IDs to use with the code interpreter. (optional) - enable_file_search: Enable the file search. (optional) - vector_store_filenames: The filenames/paths for files to use with file search. (optional) - vector_store_file_ids: The existing file IDs to use with file search. (optional) - enable_json_response: Enable JSON response. (optional) - temperature: The temperature. (optional) - top_p: The top p. (optional) - vector_store_id: The vector store ID. (optional) - metadata: The assistant metadata. (optional) - max_completion_tokens: The max completion tokens. (optional) - max_prompt_tokens: The max prompt tokens. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. (optional) - truncation_message_count: The truncation message count. (optional) - kwargs: Additional keyword arguments. + ai_model_id: The AI model ID + api_key: The API key + org_id: The organization ID + env_file_path: The environment file path + env_file_encoding: The environment file encoding, defaults to utf-8 + default_headers: The default headers to add to the client + kwargs: Additional keyword arguments Returns: - An OpenAIAssistantAgent instance. + An OpenAI client instance and the configured model name """ - agent = cls( - kernel=kernel, - service_id=service_id, - ai_model_id=ai_model_id, - api_key=api_key, - org_id=org_id, - client=client, - default_headers=default_headers, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - description=description, - id=id, - instructions=instructions, - name=name, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - temperature=temperature, - top_p=top_p, - vector_store_id=vector_store_id, - metadata=metadata or {}, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, + try: + openai_settings = OpenAISettings.create( + chat_model_id=ai_model_id, + api_key=api_key, + org_id=org_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise AgentInitializationException("Failed to create OpenAI settings.", ex) from ex + + if not openai_settings.api_key: + raise AgentInitializationException("The OpenAI API key is required.") + + if not openai_settings.chat_model_id: + raise AgentInitializationException("The OpenAI model ID is required.") + + merged_headers = dict(copy(default_headers)) if default_headers else {} + if default_headers: + merged_headers.update(default_headers) + if APP_INFO: + merged_headers.update(APP_INFO) + merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) + + client = AsyncOpenAI( + api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, + organization=openai_settings.org_id, + default_headers=merged_headers, **kwargs, ) - assistant_create_kwargs: dict[str, Any] = {} + return client, openai_settings.chat_model_id - code_interpreter_file_ids_combined: list[str] = [] + # endregion - if code_interpreter_file_ids is not None: - code_interpreter_file_ids_combined.extend(code_interpreter_file_ids) + # region Tool Handling + + @staticmethod + def configure_code_interpreter_tool( + file_ids: str | list[str] | None = None, **kwargs: Any + ) -> tuple[list["CodeInterpreterToolParam"], ToolResources]: + """Generate tool + tool_resources for the code_interpreter.""" + if isinstance(file_ids, str): + file_ids = [file_ids] + tool: "CodeInterpreterToolParam" = {"type": "code_interpreter"} + resources: ToolResources = {} + if file_ids: + resources["code_interpreter"] = ToolResourcesCodeInterpreter(file_ids=file_ids) + return [tool], resources - if code_interpreter_filenames is not None: - for file_path in code_interpreter_filenames: - try: - file_id = await agent.add_file(file_path=file_path, purpose="assistants") - code_interpreter_file_ids_combined.append(file_id) - except FileNotFoundError as ex: - logger.error( - f"Failed to upload code interpreter file with path: `{file_path}` with exception: {ex}" + @staticmethod + def configure_file_search_tool( + vector_store_ids: str | list[str], **kwargs: Any + ) -> tuple[list[FileSearchToolParam], ToolResources]: + """Generate tool + tool_resources for the file_search.""" + if isinstance(vector_store_ids, str): + vector_store_ids = [vector_store_ids] + + tool: FileSearchToolParam = { + "type": "file_search", + } + resources: ToolResources = {"file_search": ToolResourcesFileSearch(vector_store_ids=vector_store_ids, **kwargs)} # type: ignore + return [tool], resources + + @staticmethod + def configure_response_format( + response_format: dict[Literal["type"], Literal["text", "json_object"]] + | dict[str, Any] + | type[BaseModel] + | type + | AssistantResponseFormatOptionParam + | None = None, + ) -> AssistantResponseFormatOptionParam | None: + """Form the response format. + + "auto" is the default value. Not configuring the response format will result in the model + outputting text. + + Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + Outputs which ensures the model will match your supplied JSON schema. Learn more + in the [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + + Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + message the model generates is valid JSON, as long as the prompt contains "JSON." + + Args: + response_format: The response format. + + Returns: + AssistantResponseFormatOptionParam: The response format. + """ + if response_format is None or response_format == "auto": + return None + + configured_response_format = None + if isinstance(response_format, dict): + resp_type = response_format.get("type") + if resp_type == "json_object": + configured_response_format = {"type": "json_object"} + elif resp_type == "json_schema": + json_schema = response_format.get("json_schema") # type: ignore + if not isinstance(json_schema, dict): + raise AgentInitializationException( + "If response_format has type 'json_schema', 'json_schema' must be a valid dictionary." ) - raise AgentInitializationException("Failed to upload code interpreter files.", ex) from ex + # We're assuming the response_format has already been provided in the correct format + configured_response_format = response_format # type: ignore + else: + raise AgentInitializationException( + f"Encountered unexpected response_format type: {resp_type}. Allowed types are `json_object` " + " and `json_schema`." + ) + elif isinstance(response_format, type): + # If it's a type, differentiate based on whether it's a BaseModel subclass + if issubclass(response_format, BaseModel): + configured_response_format = type_to_response_format_param(response_format) # type: ignore + else: + generated_schema = KernelJsonSchemaBuilder.build(parameter_type=response_format, structured_output=True) + assert generated_schema is not None # nosec + configured_response_format = generate_structured_output_response_format_schema( + name=response_format.__name__, schema=generated_schema + ) + else: + # If it's not a dict or a type, throw an exception + raise AgentInitializationException( + "response_format must be a dictionary, a subclass of BaseModel, a Python class/type, or None" + ) + + return configured_response_format # type: ignore - if code_interpreter_file_ids_combined: - agent.code_interpreter_file_ids = code_interpreter_file_ids_combined - assistant_create_kwargs["code_interpreter_file_ids"] = code_interpreter_file_ids_combined + # endregion + + # region Agent Channel Methods + + def get_channel_keys(self) -> Iterable[str]: + """Get the channel keys. - vector_store_file_ids_combined: list[str] = [] + Returns: + Iterable[str]: The channel keys. + """ + # Distinguish from other channel types. + yield f"{OpenAIAssistantAgent.__name__}" - if vector_store_file_ids is not None: - vector_store_file_ids_combined.extend(vector_store_file_ids) + # Distinguish between different agent IDs + yield self.id - if vector_store_filenames is not None: - for file_path in vector_store_filenames: - try: - file_id = await agent.add_file(file_path=file_path, purpose="assistants") - vector_store_file_ids_combined.append(file_id) - except FileNotFoundError as ex: - logger.error(f"Failed to upload vector store file with path: `{file_path}` with exception: {ex}") - raise AgentInitializationException("Failed to upload vector store files.", ex) from ex + # Distinguish between agent names + yield self.name - if vector_store_file_ids_combined: - agent.file_search_file_ids = vector_store_file_ids_combined - if enable_file_search or agent.enable_file_search: - vector_store_id = await agent.create_vector_store(file_ids=vector_store_file_ids_combined) - agent.vector_store_id = vector_store_id - assistant_create_kwargs["vector_store_id"] = vector_store_id + # Distinguish between different API base URLs + yield str(self.client.base_url) - agent.assistant = await agent.create_assistant(**assistant_create_kwargs) - return agent + async def create_channel(self) -> AgentChannel: + """Create a channel.""" + thread = await self.client.beta.threads.create() - @staticmethod - def _create_client( - api_key: str | None = None, org_id: str | None = None, default_headers: dict[str, str] | None = None - ) -> AsyncOpenAI: - """An internal method to create the OpenAI client from the provided arguments. + return OpenAIAssistantChannel(client=self.client, thread_id=thread.id) + + # endregion + + # region Message Handling + + async def add_chat_message( + self, thread_id: str, message: "str | ChatMessageContent", **kwargs: Any + ) -> "Message | None": + """Add a chat message to the thread. Args: - api_key: The OpenAI API key. - org_id: The OpenAI organization ID. (optional) - default_headers: The default headers. (optional) + thread_id: The ID of the thread + message: The chat message to add + kwargs: Additional keyword arguments Returns: - An OpenAI client instance. + The thread message or None """ - merged_headers = dict(copy(default_headers)) if default_headers else {} - if default_headers: - merged_headers.update(default_headers) - if APP_INFO: - merged_headers.update(APP_INFO) - merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) + return await AssistantThreadActions.create_message( + client=self.client, thread_id=thread_id, message=message, **kwargs + ) - if not api_key: - raise AgentInitializationException("Please provide an OpenAI api_key") + async def get_thread_messages(self, thread_id: str) -> AsyncIterable["ChatMessageContent"]: + """Get the messages for the specified thread. - return AsyncOpenAI( - api_key=api_key, - organization=org_id, - default_headers=merged_headers, - ) + Args: + thread_id: The thread id. - @staticmethod - def _create_open_ai_settings( - api_key: str | None = None, - org_id: str | None = None, - ai_model_id: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - ) -> OpenAISettings: - """An internal method to create the OpenAI settings from the provided arguments. + Yields: + ChatMessageContent: The chat message. + """ + agent_names: dict[str, Any] = {} + + thread_messages = await self.client.beta.threads.messages.list(thread_id=thread_id, limit=100, order="desc") + for message in thread_messages.data: + assistant_name = None + if message.assistant_id and message.assistant_id not in agent_names: + agent = await self.client.beta.assistants.retrieve(message.assistant_id) + if agent.name: + agent_names[message.assistant_id] = agent.name + assistant_name = agent_names.get(message.assistant_id) if message.assistant_id else message.assistant_id + assistant_name = assistant_name or message.assistant_id + + content: "ChatMessageContent" = generate_message_content(str(assistant_name), message) + + if len(content.items) > 0: + yield content + + # endregion + + # region Invocation Methods + + @trace_agent_get_response + @override + async def get_response( + self, + thread_id: str, + *, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + additional_instructions: str | None = None, + additional_messages: list[ChatMessageContent] | None = None, + instructions_override: str | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + metadata: dict[str, str] | None = None, + model: str | None = None, + parallel_tool_calls: bool | None = None, + reasoning_effort: Literal["low", "medium", "high"] | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + tools: "list[AssistantToolParam] | None" = None, + temperature: float | None = None, + top_p: float | None = None, + truncation_strategy: "TruncationStrategy | None" = None, + **kwargs: Any, + ) -> ChatMessageContent: + """Get a response from the agent on a thread. Args: - api_key: The OpenAI API key. - org_id: The OpenAI organization ID. (optional) - ai_model_id: The AI model ID. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional) + thread_id: The ID of the thread. + arguments: The kernel arguments. + kernel: The kernel. + instructions_override: The instructions override. + additional_instructions: Additional instructions. + additional_messages: Additional messages. + max_completion_tokens: The maximum completion tokens. + max_prompt_tokens: The maximum prompt tokens. + metadata: The metadata. + model: The model. + parallel_tool_calls: Parallel tool calls. + reasoning_effort: The reasoning effort. + response_format: The response format. + tools: The tools. + temperature: The temperature. + top_p: The top p. + truncation_strategy: The truncation strategy. + kwargs: Additional keyword arguments. Returns: - An OpenAI settings instance. + ChatMessageContent: The response from the agent. """ - try: - openai_settings = OpenAISettings.create( - api_key=api_key, - org_id=org_id, - chat_model_id=ai_model_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise AgentInitializationException("Failed to create OpenAI settings.", ex) from ex + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions_override": instructions_override, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "parallel_tool_calls": parallel_tool_calls, + "reasoning_effort": reasoning_effort, + "response_format": response_format, + "temperature": temperature, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - return openai_settings + messages: list[ChatMessageContent] = [] + async for is_visible, message in AssistantThreadActions.invoke( + agent=self, + thread_id=thread_id, + kernel=kernel, + arguments=arguments, + **run_level_params, # type: ignore + ): + if is_visible and message.metadata.get("code") is not True: + messages.append(message) + + if not messages: + raise AgentInvokeException("No response messages were returned from the agent.") + return messages[-1] + + @trace_agent_invocation + @override + async def invoke( + self, + thread_id: str, + *, + arguments: KernelArguments | None = None, + kernel: "Kernel | None" = None, + # Run-level parameters: + additional_instructions: str | None = None, + additional_messages: list[ChatMessageContent] | None = None, + instructions_override: str | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + metadata: dict[str, str] | None = None, + model: str | None = None, + parallel_tool_calls: bool | None = None, + reasoning_effort: Literal["low", "medium", "high"] | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + tools: "list[AssistantToolParam] | None" = None, + temperature: float | None = None, + top_p: float | None = None, + truncation_strategy: "TruncationStrategy | None" = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Invoke the agent. - async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: - """List the assistant definitions. + Args: + thread_id: The ID of the thread. + arguments: The kernel arguments. + kernel: The kernel. + instructions_override: The instructions override. + additional_instructions: Additional instructions. + additional_messages: Additional messages. + max_completion_tokens: The maximum completion tokens. + max_prompt_tokens: The maximum prompt tokens. + metadata: The metadata. + model: The model. + parallel_tool_calls: Parallel tool calls. + reasoning_effort: The reasoning effort. + response_format: The response format. + tools: The tools. + temperature: The temperature. + top_p: The top p. + truncation_strategy: The truncation strategy. + kwargs: Additional keyword arguments. Yields: - An AsyncIterable of dictionaries representing the OpenAIAssistantDefinition. + The chat message content. """ - assistants = await self.client.beta.assistants.list(order="desc") - for assistant in assistants.data: - yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions_override": instructions_override, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "parallel_tool_calls": parallel_tool_calls, + "reasoning_effort": reasoning_effort, + "response_format": response_format, + "temperature": temperature, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - @classmethod - async def retrieve( - cls, + async for is_visible, message in AssistantThreadActions.invoke( + agent=self, + thread_id=thread_id, + kernel=kernel, + arguments=arguments, + **run_level_params, # type: ignore + ): + if is_visible: + yield message + + @trace_agent_invocation + @override + async def invoke_stream( + self, + thread_id: str, *, - id: str, + arguments: KernelArguments | None = None, kernel: "Kernel | None" = None, - api_key: str | None = None, - org_id: str | None = None, - ai_model_id: str | None = None, - client: AsyncOpenAI | None = None, - default_headers: dict[str, str] | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - ) -> "OpenAIAssistantAgent": - """Retrieve an assistant by ID. + # Run-level parameters: + additional_instructions: str | None = None, + additional_messages: list[ChatMessageContent] | None = None, + instructions_override: str | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + messages: list[ChatMessageContent] | None = None, + metadata: dict[str, str] | None = None, + model: str | None = None, + parallel_tool_calls: bool | None = None, + reasoning_effort: Literal["low", "medium", "high"] | None = None, + response_format: "AssistantResponseFormatOptionParam | None" = None, + tools: "list[AssistantToolParam] | None" = None, + temperature: float | None = None, + top_p: float | None = None, + truncation_strategy: "TruncationStrategy | None" = None, + **kwargs: Any, + ) -> AsyncIterable["StreamingChatMessageContent"]: + """Invoke the agent. Args: - id: The assistant ID. - kernel: The Kernel instance. (optional) - api_key: The OpenAI API key. (optional) - org_id: The OpenAI organization ID. (optional) - ai_model_id: The AI model ID. (optional) - client: The OpenAI client. (optional) - default_headers: The default headers. (optional) - env_file_path: The environment file path. (optional) - env_file_encoding: The environment file encoding. (optional + thread_id: The ID of the thread. + arguments: The kernel arguments. + kernel: The kernel. + instructions_override: The instructions override. + additional_instructions: Additional instructions. + additional_messages: Additional messages. + max_completion_tokens: The maximum completion tokens. + max_prompt_tokens: The maximum prompt tokens. + messages: The messages that act as a receiver for completed messages. + metadata: The metadata. + model: The model. + parallel_tool_calls: Parallel tool calls. + reasoning_effort: The reasoning effort. + response_format: The response format. + tools: The tools. + temperature: The temperature. + top_p: The top p. + truncation_strategy: The truncation strategy. + kwargs: Additional keyword arguments. - Returns: - An OpenAIAssistantAgent instance. + Yields: + The chat message content. """ - openai_settings = OpenAIAssistantAgent._create_open_ai_settings( - api_key=api_key, - org_id=org_id, - ai_model_id=ai_model_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - if not client and not openai_settings.api_key: - raise AgentInitializationException("The OpenAI API key is required, if a client is not provided.") - if not openai_settings.chat_model_id: - raise AgentInitializationException("The OpenAI chat model ID is required.") - if not client: - client = OpenAIAssistantAgent._create_client( - api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, - org_id=openai_settings.org_id, - default_headers=default_headers, - ) - assistant = await client.beta.assistants.retrieve(id) - assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - return OpenAIAssistantAgent( + if arguments is None: + arguments = KernelArguments(**kwargs) + else: + arguments.update(kwargs) + + kernel = kernel or self.kernel + arguments = self._merge_arguments(arguments) + + run_level_params = { + "additional_instructions": additional_instructions, + "additional_messages": additional_messages, + "instructions_override": instructions_override, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "metadata": metadata, + "model": model, + "parallel_tool_calls": parallel_tool_calls, + "reasoning_effort": reasoning_effort, + "response_format": response_format, + "temperature": temperature, + "tools": tools, + "top_p": top_p, + "truncation_strategy": truncation_strategy, + } + run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + + async for message in AssistantThreadActions.invoke_stream( + agent=self, + thread_id=thread_id, kernel=kernel, - assistant=assistant, - client=client, - api_key=api_key, - default_headers=default_headers, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - **assistant_definition, - ) + arguments=arguments, + messages=messages, + **run_level_params, # type: ignore + ): + yield message # endregion diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py deleted file mode 100644 index 3b072043751c..000000000000 --- a/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py +++ /dev/null @@ -1,1300 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import json -import logging -from collections.abc import AsyncIterable, Iterable -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal - -from openai import AsyncOpenAI -from openai.resources.beta.assistants import Assistant -from openai.resources.beta.threads.messages import Message -from openai.resources.beta.threads.runs.runs import Run -from openai.types.beta.assistant_tool import CodeInterpreterTool, FileSearchTool -from openai.types.beta.threads.runs import RunStep -from pydantic import Field - -from semantic_kernel.agents import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel -from semantic_kernel.agents.open_ai.assistant_content_generation import ( - create_chat_message, - generate_code_interpreter_content, - generate_function_call_content, - generate_function_result_content, - generate_message_content, - generate_streaming_code_interpreter_content, - generate_streaming_function_content, - generate_streaming_message_content, - get_function_call_contents, - get_message_contents, -) -from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.connectors.ai.function_calling_utils import ( - kernel_function_metadata_to_function_call_format, - merge_function_results, -) -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import ( - AgentExecutionException, - AgentFileNotFoundException, - AgentInitializationException, - AgentInvokeException, -) -from semantic_kernel.utils.experimental_decorator import experimental_class -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import trace_agent_invocation - -if TYPE_CHECKING: - from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.contents.chat_message_content import ChatMessageContent - from semantic_kernel.contents.function_call_content import FunctionCallContent - from semantic_kernel.kernel import Kernel - -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental_class -class OpenAIAssistantBase(Agent): - """OpenAI Assistant Base class. - - Manages the interaction with OpenAI Assistants. - """ - - _options_metadata_key: ClassVar[str] = "__run_options" - - ai_model_id: str - client: AsyncOpenAI - assistant: Assistant | None = None - polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) - enable_code_interpreter: bool | None = False - enable_file_search: bool | None = False - enable_json_response: bool | None = False - code_interpreter_file_ids: Annotated[list[str] | None, Field(max_length=20)] = Field(default_factory=list) # type: ignore - file_search_file_ids: Annotated[ - list[str] | None, - Field( - description="There is a limit of 10000 files when using Azure Assistants API, " - "the OpenAI docs state no limit, hence this is not checked." - ), - ] = Field(default_factory=list) # type: ignore - temperature: float | None = None - top_p: float | None = None - vector_store_id: str | None = None - metadata: Annotated[dict[str, Any] | None, Field(max_length=20)] = Field(default_factory=dict) # type: ignore - max_completion_tokens: int | None = None - max_prompt_tokens: int | None = None - parallel_tool_calls_enabled: bool | None = True - truncation_message_count: int | None = None - - allowed_message_roles: ClassVar[list[str]] = [AuthorRole.USER, AuthorRole.ASSISTANT] - polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] - error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] - - channel_type: ClassVar[type[AgentChannel]] = OpenAIAssistantChannel - - _is_deleted: bool = False - - # region Assistant Initialization - - def __init__( - self, - ai_model_id: str, - client: AsyncOpenAI, - service_id: str, - *, - kernel: "Kernel | None" = None, - id: str | None = None, - name: str | None = None, - description: str | None = None, - instructions: str | None = None, - enable_code_interpreter: bool | None = None, - enable_file_search: bool | None = None, - enable_json_response: bool | None = None, - code_interpreter_file_ids: list[str] | None = None, - temperature: float | None = None, - top_p: float | None = None, - vector_store_id: str | None = None, - metadata: dict[str, Any] | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - **kwargs: Any, - ) -> None: - """Initialize an OpenAIAssistant Base. - - Args: - ai_model_id: The AI model id. Defaults to None. - client: The client, either AsyncOpenAI or AsyncAzureOpenAI. - service_id: The service id. - kernel: The kernel. (optional) - id: The id. Defaults to None. (optional) - name: The name. Defaults to None. (optional) - description: The description. Defaults to None. (optional) - default_headers: The default headers. Defaults to None. (optional) - instructions: The instructions. Defaults to None. (optional) - enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) - enable_file_search: Enable file search. Defaults to False. (optional) - enable_json_response: Enable JSON response. Defaults to False. (optional) - code_interpreter_file_ids: The file ids. Defaults to []. (optional) - temperature: The temperature. Defaults to None. (optional) - top_p: The top p. Defaults to None. (optional) - vector_store_id: The vector store id. Defaults to None. (optional) - metadata: The metadata. Defaults to {}. (optional) - max_completion_tokens: The max completion tokens. Defaults to None. (optional) - max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) - truncation_message_count: The truncation message count. Defaults to None. (optional) - kwargs: The keyword arguments. - """ - args: dict[str, Any] = {} - - args = { - "ai_model_id": ai_model_id, - "client": client, - "service_id": service_id, - "instructions": instructions, - "description": description, - "enable_code_interpreter": enable_code_interpreter, - "enable_file_search": enable_file_search, - "enable_json_response": enable_json_response, - "code_interpreter_file_ids": code_interpreter_file_ids or [], - "temperature": temperature, - "top_p": top_p, - "vector_store_id": vector_store_id, - "metadata": metadata or {}, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "parallel_tool_calls_enabled": parallel_tool_calls_enabled, - "truncation_message_count": truncation_message_count, - } - - if name is not None: - args["name"] = name - if id is not None: - args["id"] = id - if kernel is not None: - args["kernel"] = kernel - if kwargs: - args.update(kwargs) - - super().__init__(**args) - - async def create_assistant( - self, - ai_model_id: str | None = None, - description: str | None = None, - instructions: str | None = None, - name: str | None = None, - enable_code_interpreter: bool | None = None, - code_interpreter_file_ids: list[str] | None = None, - enable_file_search: bool | None = None, - vector_store_id: str | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> "Assistant": - """Create the assistant. - - Args: - ai_model_id: The AI model id. Defaults to None. (optional) - description: The description. Defaults to None. (optional) - instructions: The instructions. Defaults to None. (optional) - name: The name. Defaults to None. (optional) - enable_code_interpreter: Enable code interpreter. Defaults to None. (optional) - enable_file_search: Enable file search. Defaults to None. (optional) - code_interpreter_file_ids: The file ids. Defaults to None. (optional) - vector_store_id: The vector store id. Defaults to None. (optional) - metadata: The metadata. Defaults to None. (optional) - kwargs: Extra keyword arguments. - - Returns: - Assistant: The assistant - """ - create_assistant_kwargs: dict[str, Any] = {} - - if ai_model_id is not None: - create_assistant_kwargs["model"] = ai_model_id - elif self.ai_model_id: - create_assistant_kwargs["model"] = self.ai_model_id - - if description is not None: - create_assistant_kwargs["description"] = description - elif self.description: - create_assistant_kwargs["description"] = self.description - - if instructions is not None: - create_assistant_kwargs["instructions"] = instructions - elif self.instructions: - create_assistant_kwargs["instructions"] = self.instructions - - if name is not None: - create_assistant_kwargs["name"] = name - elif self.name: - create_assistant_kwargs["name"] = self.name - - tools = [] - if enable_code_interpreter is not None: - if enable_code_interpreter: - tools.append({"type": "code_interpreter"}) - elif self.enable_code_interpreter: - tools.append({"type": "code_interpreter"}) - - if enable_file_search is not None: - if enable_file_search: - tools.append({"type": "file_search"}) - elif self.enable_file_search: - tools.append({"type": "file_search"}) - - if tools: - create_assistant_kwargs["tools"] = tools - - tool_resources = {} - if code_interpreter_file_ids is not None: - tool_resources["code_interpreter"] = {"file_ids": code_interpreter_file_ids} - elif self.code_interpreter_file_ids: - tool_resources["code_interpreter"] = {"file_ids": self.code_interpreter_file_ids} - - if vector_store_id is not None: - tool_resources["file_search"] = {"vector_store_ids": [vector_store_id]} - elif self.vector_store_id: - tool_resources["file_search"] = {"vector_store_ids": [self.vector_store_id]} - - if tool_resources: - create_assistant_kwargs["tool_resources"] = tool_resources - - if metadata: - create_assistant_kwargs["metadata"] = metadata - elif self.metadata: - create_assistant_kwargs["metadata"] = self.metadata - - if kwargs: - create_assistant_kwargs.update(kwargs) - - execution_settings: dict[str, Any] = {} - if self.max_completion_tokens: - execution_settings["max_completion_tokens"] = self.max_completion_tokens - - if self.max_prompt_tokens: - execution_settings["max_prompt_tokens"] = self.max_prompt_tokens - - if self.top_p is not None: - execution_settings["top_p"] = self.top_p - create_assistant_kwargs["top_p"] = self.top_p - - if self.temperature is not None: - execution_settings["temperature"] = self.temperature - create_assistant_kwargs["temperature"] = self.temperature - - if self.parallel_tool_calls_enabled: - execution_settings["parallel_tool_calls_enabled"] = self.parallel_tool_calls_enabled - - if self.truncation_message_count: - execution_settings["truncation_message_count"] = self.truncation_message_count - - if execution_settings: - if "metadata" not in create_assistant_kwargs: - create_assistant_kwargs["metadata"] = {} - if self._options_metadata_key not in create_assistant_kwargs["metadata"]: - create_assistant_kwargs["metadata"][self._options_metadata_key] = {} - create_assistant_kwargs["metadata"][self._options_metadata_key] = json.dumps(execution_settings) - - self.assistant = await self.client.beta.assistants.create( - **create_assistant_kwargs, - ) - - if self._is_deleted: - self._is_deleted = False - - return self.assistant - - async def modify_assistant(self, assistant_id: str, **kwargs: Any) -> "Assistant": - """Modify the assistant. - - Args: - assistant_id: The assistant's current ID. - kwargs: Extra keyword arguments. - - Returns: - Assistant: The modified assistant. - """ - if self.assistant is None: - raise AgentInitializationException("The assistant has not been created.") - - modified_assistant = await self.client.beta.assistants.update(assistant_id=assistant_id, **kwargs) - self.assistant = modified_assistant - return self.assistant - - @classmethod - def _create_open_ai_assistant_definition(cls, assistant: "Assistant") -> dict[str, Any]: - """Create an OpenAI Assistant Definition from the provided assistant dictionary. - - Args: - assistant: The assistant dictionary. - - Returns: - An OpenAI Assistant Definition. - """ - execution_settings = {} - if isinstance(assistant.metadata, dict) and OpenAIAssistantBase._options_metadata_key in assistant.metadata: - settings_data = assistant.metadata[OpenAIAssistantBase._options_metadata_key] - if isinstance(settings_data, str): - settings_data = json.loads(settings_data) - assistant.metadata[OpenAIAssistantBase._options_metadata_key] = settings_data - execution_settings = {key: value for key, value in settings_data.items()} - - file_ids: list[str] = [] - vector_store_id = None - - tool_resources = getattr(assistant, "tool_resources", None) - if tool_resources: - if hasattr(tool_resources, "code_interpreter") and tool_resources.code_interpreter: - file_ids = getattr(tool_resources.code_interpreter, "code_interpreter_file_ids", []) - - if hasattr(tool_resources, "file_search") and tool_resources.file_search: - vector_store_ids = getattr(tool_resources.file_search, "vector_store_ids", []) - if vector_store_ids: - vector_store_id = vector_store_ids[0] - - enable_json_response = ( - hasattr(assistant, "response_format") - and assistant.response_format is not None - and getattr(assistant.response_format, "type", "") == "json_object" - ) - - enable_code_interpreter = any(isinstance(tool, CodeInterpreterTool) for tool in assistant.tools) - enable_file_search = any(isinstance(tool, FileSearchTool) for tool in assistant.tools) - - return { - "ai_model_id": assistant.model, - "description": assistant.description, - "id": assistant.id, - "instructions": assistant.instructions, - "name": assistant.name, - "enable_code_interpreter": enable_code_interpreter, - "enable_file_search": enable_file_search, - "enable_json_response": enable_json_response, - "code_interpreter_file_ids": file_ids, - "temperature": assistant.temperature, - "top_p": assistant.top_p, - "vector_store_id": vector_store_id if vector_store_id else None, - "metadata": assistant.metadata, - **execution_settings, - } - - # endregion - - # region Agent Properties - - @property - def tools(self) -> list[dict[str, str]]: - """The tools. - - Returns: - list[dict[str, str]]: The tools. - """ - if self.assistant is None: - raise AgentInitializationException("The assistant has not been created.") - return self._get_tools() - - # endregion - - # region Agent Channel Methods - - def get_channel_keys(self) -> Iterable[str]: - """Get the channel keys. - - Returns: - Iterable[str]: The channel keys. - """ - # Distinguish from other channel types. - yield f"{OpenAIAssistantBase.__name__}" - - # Distinguish between different agent IDs - yield self.id - - # Distinguish between agent names - yield self.name - - # Distinguish between different API base URLs - yield str(self.client.base_url) - - async def create_channel(self) -> AgentChannel: - """Create a channel.""" - thread_id = await self.create_thread() - - return OpenAIAssistantChannel(client=self.client, thread_id=thread_id) - - # endregion - - # region Agent Methods - - async def create_thread( - self, - *, - code_interpreter_file_ids: list[str] | None = [], - messages: list["ChatMessageContent"] | None = [], - vector_store_id: str | None = None, - metadata: dict[str, str] = {}, - ) -> str: - """Create a thread. - - Args: - code_interpreter_file_ids: The code interpreter file ids. Defaults to an empty list. (optional) - messages: The chat messages. Defaults to an empty list. (optional) - vector_store_id: The vector store id. Defaults to None. (optional) - metadata: The metadata. Defaults to an empty dictionary. (optional) - - Returns: - str: The thread id. - """ - create_thread_kwargs: dict[str, Any] = {} - - tool_resources = {} - - if code_interpreter_file_ids: - tool_resources["code_interpreter"] = {"file_ids": code_interpreter_file_ids} - - if vector_store_id: - tool_resources["file_search"] = {"vector_store_ids": [vector_store_id]} - - if tool_resources: - create_thread_kwargs["tool_resources"] = tool_resources - - if messages: - messages_to_add = [] - for message in messages: - if message.role.value not in self.allowed_message_roles: - raise AgentExecutionException( - f"Invalid message role `{message.role.value}`. Allowed roles are {self.allowed_message_roles}." - ) - message_contents = get_message_contents(message=message) - for content in message_contents: - messages_to_add.append({"role": message.role.value, "content": content}) - create_thread_kwargs["messages"] = messages_to_add - - if metadata: - create_thread_kwargs["metadata"] = metadata - - thread = await self.client.beta.threads.create(**create_thread_kwargs) - return thread.id - - async def delete_thread(self, thread_id: str) -> None: - """Delete a thread. - - Args: - thread_id: The thread id. - """ - await self.client.beta.threads.delete(thread_id) - - async def delete(self) -> bool: - """Delete the assistant. - - Returns: - bool: True if the assistant is deleted. - """ - if not self._is_deleted and self.assistant: - await self.client.beta.assistants.delete(self.assistant.id) - self._is_deleted = True - return self._is_deleted - - async def add_chat_message(self, thread_id: str, message: "ChatMessageContent") -> "Message": - """Add a chat message. - - Args: - thread_id: The thread id. - message: The chat message. - - Returns: - Message: The message. - """ - return await create_chat_message(self.client, thread_id, message, self.allowed_message_roles) - - async def get_thread_messages(self, thread_id: str) -> AsyncIterable["ChatMessageContent"]: - """Get the messages for the specified thread. - - Args: - thread_id: The thread id. - - Yields: - ChatMessageContent: The chat message. - """ - agent_names: dict[str, Any] = {} - - thread_messages = await self.client.beta.threads.messages.list(thread_id=thread_id, limit=100, order="desc") - for message in thread_messages.data: - assistant_name = None - if message.assistant_id and message.assistant_id not in agent_names: - agent = await self.client.beta.assistants.retrieve(message.assistant_id) - if agent.name: - agent_names[message.assistant_id] = agent.name - assistant_name = agent_names.get(message.assistant_id) if message.assistant_id else message.assistant_id - assistant_name = assistant_name or message.assistant_id - - content: "ChatMessageContent" = generate_message_content(str(assistant_name), message) - - if len(content.items) > 0: - yield content - - async def add_file(self, file_path: str, purpose: Literal["assistants", "vision"]) -> str: - """Add a file for use with the Assistant. - - Args: - file_path: The file path. - purpose: The purpose. Can be "assistants" or "vision". - - Returns: - str: The file id. - - Raises: - AgentInitializationError: If the client has not been initialized or the file is not found. - """ - try: - with open(file_path, "rb") as file: - file = await self.client.files.create(file=file, purpose=purpose) # type: ignore - return file.id # type: ignore - except FileNotFoundError as ex: - raise AgentFileNotFoundException(f"File not found: {file_path}") from ex - - async def delete_file(self, file_id: str) -> None: - """Delete a file. - - Args: - file_id: The file id. - """ - try: - await self.client.files.delete(file_id) - except Exception as ex: - raise AgentExecutionException("Error deleting file.") from ex - - async def create_vector_store(self, file_ids: str | list[str]) -> str: - """Create a vector store. - - Args: - file_ids: The file ids either as a str of a single file ID or a list of strings of file IDs. - - Returns: - The vector store id. - - Raises: - AgentExecutionError: If there is an error creating the vector store. - """ - if isinstance(file_ids, str): - file_ids = [file_ids] - try: - vector_store = await self.client.beta.vector_stores.create(file_ids=file_ids) - return vector_store.id - except Exception as ex: - raise AgentExecutionException("Error creating vector store.") from ex - - async def delete_vector_store(self, vector_store_id: str) -> None: - """Delete a vector store. - - Args: - vector_store_id: The vector store id. - - Raises: - AgentExecutionError: If there is an error deleting the vector store. - """ - try: - await self.client.beta.vector_stores.delete(vector_store_id) - except Exception as ex: - raise AgentExecutionException("Error deleting vector store.") from ex - - # endregion - - # region Agent Invoke Methods - - @trace_agent_invocation - async def invoke( - self, - thread_id: str, - *, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = False, - enable_file_search: bool | None = False, - enable_json_response: bool | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable["ChatMessageContent"]: - """Invoke the chat assistant. - - The supplied arguments will take precedence over the specified assistant level attributes. - - Args: - thread_id: The thread id. - ai_model_id: The AI model id. Defaults to None. (optional) - enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) - enable_file_search: Enable file search. Defaults to False. (optional) - enable_json_response: Enable JSON response. Defaults to False. (optional) - max_completion_tokens: The max completion tokens. Defaults to None. (optional) - max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) - truncation_message_count: The truncation message count. Defaults to None. (optional) - temperature: The temperature. Defaults to None. (optional) - top_p: The top p. Defaults to None. (optional) - metadata: The metadata. Defaults to {}. (optional) - kwargs: Extra keyword arguments. - - Yields: - ChatMessageContent: The chat message content. - """ - async for is_visible, content in self._invoke_internal( - thread_id=thread_id, - ai_model_id=ai_model_id, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - temperature=temperature, - top_p=top_p, - metadata=metadata, - kwargs=kwargs, - ): - if is_visible: - yield content - - async def _invoke_internal( - self, - thread_id: str, - *, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = False, - enable_file_search: bool | None = False, - enable_json_response: bool | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: - """Internal invoke method. - - The supplied arguments will take precedence over the specified assistant level attributes. - - Args: - thread_id: The thread id. - ai_model_id: The AI model id. Defaults to None. (optional) - enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) - enable_file_search: Enable file search. Defaults to False. (optional) - enable_json_response: Enable JSON response. Defaults to False. (optional) - max_completion_tokens: The max completion tokens. Defaults to None. (optional) - max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) - parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) - truncation_message_count: The truncation message count. Defaults to None. (optional) - temperature: The temperature. Defaults to None. (optional) - top_p: The top p. Defaults to None. (optional) - metadata: The metadata. Defaults to {}. (optional) - kwargs: Extra keyword arguments. - - Yields: - tuple[bool, ChatMessageContent]: A tuple of visibility and chat message content. - """ - if not self.assistant: - raise AgentInitializationException("The assistant has not been created.") - - if self._is_deleted: - raise AgentInitializationException("The assistant has been deleted.") - - if metadata is None: - metadata = {} - - self._check_if_deleted() - tools = self._get_tools() - - run_options = self._generate_options( - ai_model_id=ai_model_id, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - temperature=temperature, - top_p=top_p, - metadata=metadata, - kwargs=kwargs, - ) - - # Filter out None values to avoid passing them as kwargs - run_options = {k: v for k, v in run_options.items() if v is not None} - - logger.debug(f"Starting invoke for agent `{self.name}` and thread `{thread_id}`") - - run = await self.client.beta.threads.runs.create( - assistant_id=self.assistant.id, - thread_id=thread_id, - instructions=self.assistant.instructions, - tools=tools, # type: ignore - **run_options, - ) - - processed_step_ids = set() - function_steps: dict[str, "FunctionCallContent"] = {} - - while run.status != "completed": - run = await self._poll_run_status(run=run, thread_id=thread_id) - - if run.status in self.error_message_states: - error_message = "" - if run.last_error and run.last_error.message: - error_message = run.last_error.message - raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " - f"with error: {error_message}" - ) - - # Check if function calling required - if run.status == "requires_action": - logger.debug(f"Run [{run.id}] requires action for agent `{self.name}` and thread `{thread_id}`") - fccs = get_function_call_contents(run, function_steps) - if fccs: - logger.debug( - f"Yielding `generate_function_call_content` for agent `{self.name}` and " - f"thread `{thread_id}`, visibility False" - ) - yield False, generate_function_call_content(agent_name=self.name, fccs=fccs) - - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() - _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) - - tool_outputs = self._format_tool_outputs(fccs, chat_history) - await self.client.beta.threads.runs.submit_tool_outputs( - run_id=run.id, - thread_id=thread_id, - tool_outputs=tool_outputs, # type: ignore - ) - logger.debug(f"Submitted tool outputs for agent `{self.name}` and thread `{thread_id}`") - - steps_response = await self.client.beta.threads.runs.steps.list(run_id=run.id, thread_id=thread_id) - logger.debug(f"Called for steps_response for run [{run.id}] agent `{self.name}` and thread `{thread_id}`") - steps: list[RunStep] = steps_response.data - - def sort_key(step: RunStep): - # Put tool_calls first, then message_creation - # If multiple steps share a type, break ties by completed_at - return (0 if step.type == "tool_calls" else 1, step.completed_at) - - completed_steps_to_process = sorted( - [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], key=sort_key - ) - - logger.debug( - f"Completed steps to process for run [{run.id}] agent `{self.name}` and thread `{thread_id}` " - f"with length `{len(completed_steps_to_process)}`" - ) - - message_count = 0 - for completed_step in completed_steps_to_process: - if completed_step.type == "tool_calls": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], agent `{self.name}` and " - f"thread `{thread_id}`" - ) - assert hasattr(completed_step.step_details, "tool_calls") # nosec - for tool_call in completed_step.step_details.tool_calls: - is_visible = False - content: "ChatMessageContent | None" = None - if tool_call.type == "code_interpreter": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], [code_interpreter] for " - f"agent `{self.name}` and thread `{thread_id}`" - ) - content = generate_code_interpreter_content( - self.name, - tool_call.code_interpreter.input, # type: ignore - ) - is_visible = True - elif tool_call.type == "function": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], [function] for agent `{self.name}` " - f"and thread `{thread_id}`" - ) - function_step = function_steps.get(tool_call.id) - assert function_step is not None # nosec - content = generate_function_result_content( - agent_name=self.name, function_step=function_step, tool_call=tool_call - ) - - if content: - message_count += 1 - logger.debug( - f"Yielding tool_message for run [{run.id}], agent `{self.name}` and thread " - f"`{thread_id}` and message count `{message_count}`, is_visible `{is_visible}`" - ) - yield is_visible, content - elif completed_step.type == "message_creation": - logger.debug( - f"Entering step type message_creation for run [{run.id}], agent `{self.name}` and " - f"thread `{thread_id}`" - ) - message = await self._retrieve_message( - thread_id=thread_id, - message_id=completed_step.step_details.message_creation.message_id, # type: ignore - ) - if message: - content = generate_message_content(self.name, message) - if content and len(content.items) > 0: - message_count += 1 - logger.debug( - f"Yielding message_creation for run [{run.id}], agent `{self.name}` and " - f"thread `{thread_id}` and message count `{message_count}`, is_visible `{True}`" - ) - yield True, content - processed_step_ids.add(completed_step.id) - - @trace_agent_invocation - async def invoke_stream( - self, - thread_id: str, - *, - messages: list["ChatMessageContent"] | None = None, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = False, - enable_file_search: bool | None = False, - enable_json_response: bool | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable["ChatMessageContent"]: - """Invoke the chat assistant with streaming.""" - async for content in self._invoke_internal_stream( - thread_id=thread_id, - messages=messages, - ai_model_id=ai_model_id, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - temperature=temperature, - top_p=top_p, - metadata=metadata, - **kwargs, - ): - yield content - - async def _invoke_internal_stream( - self, - thread_id: str, - *, - messages: list["ChatMessageContent"] | None = None, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = False, - enable_file_search: bool | None = False, - enable_json_response: bool | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable["ChatMessageContent"]: - """Internal invoke method with streaming.""" - if not self.assistant: - raise AgentInitializationException("The assistant has not been created.") - - if self._is_deleted: - raise AgentInitializationException("The assistant has been deleted.") - - if metadata is None: - metadata = {} - - tools = self._get_tools() - - run_options = self._generate_options( - ai_model_id=ai_model_id, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - temperature=temperature, - top_p=top_p, - metadata=metadata, - **kwargs, - ) - - # Filter out None values to avoid passing them as kwargs - run_options = {k: v for k, v in run_options.items() if v is not None} - - stream = self.client.beta.threads.runs.stream( - assistant_id=self.assistant.id, - thread_id=thread_id, - instructions=self.assistant.instructions, - tools=tools, # type: ignore - **run_options, - ) - - function_steps: dict[str, "FunctionCallContent"] = {} - active_messages: dict[str, RunStep] = {} - - while True: - async with stream as response_stream: - async for event in response_stream: - if event.event == "thread.run.created": - run = event.data - logger.info(f"Assistant run created with ID: {run.id}") - elif event.event == "thread.run.in_progress": - run = event.data - logger.info(f"Assistant run in progress with ID: {run.id}") - elif event.event == "thread.message.delta": - content = generate_streaming_message_content(self.name, event.data) - yield content - elif event.event == "thread.run.step.completed": - logger.info(f"Run step completed with ID: {event.data.id}") - if hasattr(event.data.step_details, "message_creation"): - message_id = event.data.step_details.message_creation.message_id - if message_id not in active_messages: - active_messages[message_id] = event.data - elif event.event == "thread.run.step.delta": - step_details = event.data.delta.step_details - if ( - step_details is not None - and hasattr(step_details, "tool_calls") - and step_details.tool_calls is not None - and isinstance(step_details.tool_calls, list) - ): - for tool_call in step_details.tool_calls: - tool_content = None - if tool_call.type == "function": - tool_content = generate_streaming_function_content(self.name, step_details) - elif tool_call.type == "code_interpreter": - tool_content = generate_streaming_code_interpreter_content(self.name, step_details) - if tool_content: - yield tool_content - elif event.event == "thread.run.requires_action": - run = event.data - function_action_result = await self._handle_streaming_requires_action(run, function_steps) - if function_action_result is None: - raise AgentInvokeException( - f"Function call required but no function steps found for agent `{self.name}` " - f"thread: {thread_id}." - ) - if function_action_result.function_result_content: - # Yield the function result content to the caller - yield function_action_result.function_result_content - if messages is not None: - # Add the function result content to the messages list, if it exists - messages.append(function_action_result.function_result_content) - if function_action_result.function_call_content: - if messages is not None: - messages.append(function_action_result.function_call_content) - stream = self.client.beta.threads.runs.submit_tool_outputs_stream( - run_id=run.id, - thread_id=thread_id, - tool_outputs=function_action_result.tool_outputs, # type: ignore - ) - break - elif event.event == "thread.run.completed": - run = event.data - logger.info(f"Run completed with ID: {run.id}") - if len(active_messages) > 0: - for id in active_messages: - step: RunStep = active_messages[id] - message = await self._retrieve_message( - thread_id=thread_id, - message_id=id, # type: ignore - ) - - if message and message.content: - content = generate_message_content(self.name, message, step) - if messages is not None: - messages.append(content) - return - elif event.event == "thread.run.failed": - run = event.data # type: ignore - error_message = "" - if run.last_error and run.last_error.message: - error_message = run.last_error.message - raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " - f"with error: {error_message}" - ) - else: - # If the inner loop completes without encountering a 'break', exit the outer loop - break - - async def _handle_streaming_requires_action( - self, run: Run, function_steps: dict[str, "FunctionCallContent"] - ) -> FunctionActionResult | None: - fccs = get_function_call_contents(run, function_steps) - if fccs: - function_call_content = generate_function_call_content(agent_name=self.name, fccs=fccs) - - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() - _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) - - function_result_content = merge_function_results(chat_history.messages)[0] - - tool_outputs = self._format_tool_outputs(fccs, chat_history) - return FunctionActionResult(function_call_content, function_result_content, tool_outputs) - return None - - # endregion - - # region Agent Helper Methods - - def _generate_options( - self, - *, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = False, - enable_file_search: bool | None = False, - enable_json_response: bool | None = False, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = {}, - kwargs: Any = {}, - ) -> dict[str, Any]: - """Generate options for the assistant invocation.""" - merged_options = self._merge_options( - ai_model_id=ai_model_id, - enable_code_interpreter=enable_code_interpreter, - enable_file_search=enable_file_search, - enable_json_response=enable_json_response, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - parallel_tool_calls_enabled=parallel_tool_calls_enabled, - truncation_message_count=truncation_message_count, - temperature=temperature, - top_p=top_p, - metadata=metadata, - **kwargs, - ) - - truncation_message_count = merged_options.get("truncation_message_count") - - return { - "max_completion_tokens": merged_options.get("max_completion_tokens"), - "max_prompt_tokens": merged_options.get("max_prompt_tokens"), - "model": merged_options.get("ai_model_id"), - "top_p": merged_options.get("top_p"), - # TODO(evmattso): Support `parallel_tool_calls` when it is ready - "response_format": "json" if merged_options.get("enable_json_response") else None, - "temperature": merged_options.get("temperature"), - "truncation_strategy": truncation_message_count if truncation_message_count else None, - "metadata": merged_options.get("metadata", None), - } - - def _merge_options( - self, - ai_model_id: str | None = None, - enable_code_interpreter: bool | None = None, - enable_file_search: bool | None = None, - enable_json_response: bool | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - parallel_tool_calls_enabled: bool | None = True, - truncation_message_count: int | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = {}, - **kwargs: Any, - ) -> dict[str, Any]: - """Merge the run-time options with the agent level attribute options.""" - merged_options = { - "ai_model_id": ai_model_id if ai_model_id is not None else self.ai_model_id, - "enable_code_interpreter": enable_code_interpreter - if enable_code_interpreter is not None - else self.enable_code_interpreter, - "enable_file_search": enable_file_search if enable_file_search is not None else self.enable_file_search, - "enable_json_response": enable_json_response - if enable_json_response is not None - else self.enable_json_response, - "max_completion_tokens": max_completion_tokens - if max_completion_tokens is not None - else self.max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens if max_prompt_tokens is not None else self.max_prompt_tokens, - "parallel_tool_calls_enabled": parallel_tool_calls_enabled - if parallel_tool_calls_enabled is not None - else self.parallel_tool_calls_enabled, - "truncation_message_count": truncation_message_count - if truncation_message_count is not None - else self.truncation_message_count, - "temperature": temperature if temperature is not None else self.temperature, - "top_p": top_p if top_p is not None else self.top_p, - "metadata": metadata if metadata is not None else self.metadata, - } - - # Update merged_options with any additional kwargs - merged_options.update(kwargs) - return merged_options - - async def _poll_run_status(self, run: Run, thread_id: str) -> Run: - """Poll the run status. - - Args: - run: The run. - thread_id: The thread id. - - Returns: - The updated run. - """ - logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") - - count = 0 - - try: - run = await asyncio.wait_for( - self._poll_loop(run, thread_id, count), timeout=self.polling_options.run_polling_timeout.total_seconds() - ) - except asyncio.TimeoutError: - timeout_duration = self.polling_options.run_polling_timeout - error_message = f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` after waiting {timeout_duration}." # noqa: E501 - logger.error(error_message) - raise AgentInvokeException(error_message) - - logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") - return run - - async def _poll_loop(self, run: Run, thread_id: str, count: int) -> Run: - """Internal polling loop.""" - while True: - await asyncio.sleep(self.polling_options.get_polling_interval(count).total_seconds()) - count += 1 - - try: - run = await self.client.beta.threads.runs.retrieve(run.id, thread_id=thread_id) - except Exception as e: - logging.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") - # Retry anyway - - if run.status not in self.polling_status: - break - - return run - - async def _retrieve_message(self, thread_id: str, message_id: str) -> Message | None: - """Retrieve a message from a thread. - - Args: - thread_id: The thread id. - message_id: The message id. - - Returns: - The message or None. - """ - message: Message | None = None - count = 0 - max_retries = 3 - - while count < max_retries: - try: - message = await self.client.beta.threads.messages.retrieve(message_id, thread_id=thread_id) - break - except Exception as ex: - logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") - count += 1 - if count >= max_retries: - logger.error( - f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." - ) - break - backoff_time: float = self.polling_options.message_synchronization_delay.total_seconds() * (2**count) - await asyncio.sleep(backoff_time) - - return message - - def _check_if_deleted(self) -> None: - """Check if the assistant has been deleted.""" - if self._is_deleted: - raise AgentInitializationException("The assistant has been deleted.") - - def _get_tools(self) -> list[dict[str, str]]: - """Get the list of tools for the assistant. - - Returns: - The list of tools. - """ - tools = [] - if self.assistant is None: - raise AgentInitializationException("The assistant has not been created.") - - for tool in self.assistant.tools: - if isinstance(tool, CodeInterpreterTool): - tools.append({"type": "code_interpreter"}) - elif isinstance(tool, FileSearchTool): - tools.append({"type": "file_search"}) - - funcs = self.kernel.get_full_list_of_function_metadata() - tools.extend([kernel_function_metadata_to_function_call_format(f) for f in funcs]) - - return tools - - async def _invoke_function_calls(self, fccs: list["FunctionCallContent"], chat_history: "ChatHistory") -> list[Any]: - """Invoke function calls and store results in chat history. - - Args: - fccs: The function call contents. - chat_history: The chat history. - - Returns: - The results as a list. - """ - tasks = [ - self.kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) - for function_call in fccs - ] - return await asyncio.gather(*tasks) - - def _format_tool_outputs( - self, fccs: list["FunctionCallContent"], chat_history: "ChatHistory" - ) -> list[dict[str, str]]: - """Format tool outputs from chat history for submission. - - Args: - fccs: The function call contents. - chat_history: The chat history. - - Returns: - The formatted tool outputs as a list of dictionaries. - """ - from semantic_kernel.contents.function_result_content import FunctionResultContent - - tool_call_lookup = { - tool_call.id: tool_call - for message in chat_history.messages - for tool_call in message.items - if isinstance(tool_call, FunctionResultContent) - } - - return [ - {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} - for fcc in fccs - if fcc.id in tool_call_lookup - ] - - # endregion diff --git a/python/semantic_kernel/agents/open_ai/run_polling_options.py b/python/semantic_kernel/agents/open_ai/run_polling_options.py index 52c20677fc60..29da3046e32a 100644 --- a/python/semantic_kernel/agents/open_ai/run_polling_options.py +++ b/python/semantic_kernel/agents/open_ai/run_polling_options.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RunPollingOptions(KernelBaseModel): """Configuration and defaults associated with polling behavior for Assistant API requests.""" diff --git a/python/semantic_kernel/agents/strategies/__init__.py b/python/semantic_kernel/agents/strategies/__init__.py index 836604a9f632..9a0307489103 100644 --- a/python/semantic_kernel/agents/strategies/__init__.py +++ b/python/semantic_kernel/agents/strategies/__init__.py @@ -9,6 +9,7 @@ from semantic_kernel.agents.strategies.termination.kernel_function_termination_strategy import ( KernelFunctionTerminationStrategy, ) +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy __all__ = [ "AggregatorTerminationStrategy", @@ -16,4 +17,5 @@ "KernelFunctionSelectionStrategy", "KernelFunctionTerminationStrategy", "SequentialSelectionStrategy", + "TerminationStrategy", ] diff --git a/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py index 65f7dfb2ae0b..62ff44f22726 100644 --- a/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py @@ -21,7 +21,7 @@ from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -29,7 +29,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class KernelFunctionSelectionStrategy(SelectionStrategy): """Determines agent selection based on the evaluation of a Kernel Function.""" diff --git a/python/semantic_kernel/agents/strategies/selection/selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/selection_strategy.py index 6f453a50a876..a70e23a40b19 100644 --- a/python/semantic_kernel/agents/strategies/selection/selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/selection_strategy.py @@ -6,13 +6,13 @@ from semantic_kernel.agents import Agent from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental_class +@experimental class SelectionStrategy(KernelBaseModel, ABC): """Base strategy class for selecting the next agent in a chat.""" diff --git a/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py index b60fc5f0f21f..739ad10b29b6 100644 --- a/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py @@ -13,7 +13,7 @@ from pydantic import PrivateAttr from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -23,7 +23,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class SequentialSelectionStrategy(SelectionStrategy): """Round-robin turn-taking strategy. Agent order is based on the order in which they joined.""" diff --git a/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py index 9b102912299e..5ee26f5f6042 100644 --- a/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py @@ -9,13 +9,13 @@ from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent -@experimental_class +@experimental class AggregateTerminationCondition(str, Enum): """The condition for terminating the aggregation process.""" @@ -23,7 +23,7 @@ class AggregateTerminationCondition(str, Enum): ANY = "Any" -@experimental_class +@experimental class AggregatorTerminationStrategy(KernelBaseModel): """A strategy that aggregates multiple termination strategies.""" diff --git a/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py index b1232d680097..396c3849d0a8 100644 --- a/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py @@ -2,19 +2,21 @@ from typing import TYPE_CHECKING +from pydantic import Field + from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental_class +@experimental class DefaultTerminationStrategy(TerminationStrategy): """A default termination strategy that never terminates.""" - maximum_iterations: int = 1 + maximum_iterations: int = Field(default=5, description="The maximum number of iterations to run the agent.") async def should_agent_terminate(self, agent: "Agent", history: list["ChatMessageContent"]) -> bool: """Check if the agent should terminate. diff --git a/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py index 93c59e10ed84..a9a13a87c2dc 100644 --- a/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py @@ -13,7 +13,7 @@ from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -21,7 +21,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class KernelFunctionTerminationStrategy(TerminationStrategy): """A termination strategy that uses a kernel function to determine termination.""" diff --git a/python/semantic_kernel/agents/strategies/termination/termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/termination_strategy.py index ba4d0f6c341f..34fc3b40b761 100644 --- a/python/semantic_kernel/agents/strategies/termination/termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/termination_strategy.py @@ -7,7 +7,7 @@ from semantic_kernel.agents.agent import Agent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.contents.chat_message_content import ChatMessageContent @@ -15,7 +15,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class TerminationStrategy(KernelBaseModel): """A strategy for determining when an agent should terminate.""" diff --git a/python/semantic_kernel/connectors/ai/README.md b/python/semantic_kernel/connectors/ai/README.md index 997a33427c65..bad47044b835 100644 --- a/python/semantic_kernel/connectors/ai/README.md +++ b/python/semantic_kernel/connectors/ai/README.md @@ -3,6 +3,7 @@ This directory contains the implementation of the AI connectors (aka AI services) that are used to interact with AI models. Depending on the modality, the AI connector can inherit from one of the following classes: + - [`ChatCompletionClientBase`](./chat_completion_client_base.py) for chat completion tasks. - [`TextCompletionClientBase`](./text_completion_client_base.py) for text completion tasks. - [`AudioToTextClientBase`](./audio_to_text_client_base.py) for audio to text tasks. @@ -10,7 +11,6 @@ Depending on the modality, the AI connector can inherit from one of the followin - [`TextToImageClientBase`](./text_to_image_client_base.py) for text to image tasks. - [`EmbeddingGeneratorBase`](./embeddings/embedding_generator_base.py) for text embedding tasks. - All base clients inherit from the [`AIServiceClientBase`](../../services/ai_service_client_base.py) class. ## Existing AI connectors diff --git a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py index f5baec134528..1c5d670c57f1 100644 --- a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py @@ -35,9 +35,9 @@ from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -49,7 +49,7 @@ ServiceInvalidResponseError, ServiceResponseException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -68,7 +68,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AnthropicChatCompletion(ChatCompletionClientBase): """Anthropic ChatCompletion class.""" @@ -261,7 +261,7 @@ def _create_chat_message_content( self, response: Message, response_metadata: dict[str, Any] ) -> "ChatMessageContent": """Create a chat message content object.""" - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] items += self._get_tool_calls_from_message(response) for content_block in response.content: diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py index ac290925b399..9d2275a74e6e 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureAIInferencePromptExecutionSettings(PromptExecutionSettings): """Azure AI Inference Prompt Execution Settings. @@ -26,7 +26,7 @@ class AzureAIInferencePromptExecutionSettings(PromptExecutionSettings): extra_parameters: dict[str, Any] | None = None -@experimental_class +@experimental class AzureAIInferenceChatPromptExecutionSettings(AzureAIInferencePromptExecutionSettings): """Azure AI Inference Chat Prompt Execution Settings.""" @@ -46,7 +46,7 @@ class AzureAIInferenceChatPromptExecutionSettings(AzureAIInferencePromptExecutio ] = None -@experimental_class +@experimental class AzureAIInferenceEmbeddingPromptExecutionSettings(PromptExecutionSettings): """Azure AI Inference Embedding Prompt Execution Settings. diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py index 400eb31aa54c..dc935ff18f1f 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureAIInferenceSettings(KernelBaseSettings): """Azure AI Inference settings. diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py index 64e0806804e1..a732d85da5a9 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py @@ -16,7 +16,7 @@ from semantic_kernel.utils.authentication.async_default_azure_credential_wrapper import ( AsyncDefaultAzureCredentialWrapper, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT @@ -37,7 +37,7 @@ def get_client_class(cls, client_type: "AzureAIInferenceClientType") -> Any: return class_mapping[client_type] -@experimental_class +@experimental class AzureAIInferenceBase(KernelBaseModel, ABC): """Azure AI Inference Chat Completion Service.""" diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py index 9a43591938e6..88cb7ca5abd9 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py @@ -33,16 +33,16 @@ from semantic_kernel.connectors.ai.function_calling_utils import update_settings_from_function_call_configuration from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason from semantic_kernel.exceptions.service_exceptions import ServiceInvalidExecutionSettingsError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration @@ -51,7 +51,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AzureAIInferenceChatCompletion(ChatCompletionClientBase, AzureAIInferenceBase): """Azure AI Inference Chat Completion Service.""" @@ -136,6 +136,8 @@ async def _inner_get_chat_message_contents( with AzureAIInferenceTracing(): response: ChatCompletions = await self.client.complete( messages=self._prepare_chat_history_for_request(chat_history), + # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) + model=self.ai_model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -158,6 +160,8 @@ async def _inner_get_streaming_chat_message_contents( with AzureAIInferenceTracing(): response: AsyncStreamingChatCompletions = await self.client.complete( stream=True, + # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) + model=self.ai_model_id, messages=self._prepare_chat_history_for_request(chat_history), model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), @@ -236,12 +240,11 @@ def _create_chat_message_content( Returns: A chat message content object. """ - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] if choice.message.content: items.append( TextContent( text=choice.message.content, - inner_content=response, metadata=metadata, ) ) diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py index 226fde4ec7b1..64fea069cb1c 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py @@ -20,13 +20,13 @@ AzureAIInferenceClientType, ) from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental_class +@experimental class AzureAIInferenceTextEmbedding(EmbeddingGeneratorBase, AzureAIInferenceBase): """Azure AI Inference Text Embedding Service.""" @@ -86,6 +86,8 @@ async def generate_embeddings( response: EmbeddingsResult = await self.client.embed( input=texts, + # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) + model=self.ai_model_id, model_extras=settings.extra_parameters if settings else None, dimensions=settings.dimensions if settings else None, encoding_format=settings.encoding_format if settings else None, diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py index d8264aec8664..f47817aefc07 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py @@ -50,11 +50,7 @@ def _format_developer_message(message: ChatMessageContent) -> ChatRequestMessage Returns: The formatted developer message. """ - # TODO(@ymuichiro): Add support when Azure AI Inference SDK implements developer role - raise NotImplementedError( - "Developer role is currently not supported by the Azure AI Inference SDK. " - "This feature will be implemented in a future update when SDK support is available." - ) + return ChatRequestMessage({"role": "developer", "content": message.content}) def _format_user_message(message: ChatMessageContent) -> UserMessage: diff --git a/python/semantic_kernel/connectors/ai/bedrock/README.md b/python/semantic_kernel/connectors/ai/bedrock/README.md index 9f48879b54cb..bc9461aef7e8 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/README.md +++ b/python/semantic_kernel/connectors/ai/bedrock/README.md @@ -11,6 +11,27 @@ Follow this [guide](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) to configure your environment to use the Bedrock API. +Please configure the `aws_access_key_id`, `aws_secret_access_key`, and `region` otherwise you will need to create custom clients for the services. For example: + +```python +runtime_client=boto.client( + "bedrock-runtime", + aws_access_key_id="your_access_key", + aws_secret_access_key="your_secret_key", + region_name="your_region", + [...other parameters you may need...] +) +client=boto.client( + "bedrock", + aws_access_key_id="your_access_key", + aws_secret_access_key="your_secret_key", + region_name="your_region", + [...other parameters you may need...] +) + +bedrock_chat_completion_service = BedrockChatCompletion(runtime_client=runtime_client, client=client) +``` + ## Supports ### Region diff --git a/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py b/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py index 1f966fc08b2a..2c5348a14676 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py +++ b/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class BedrockSettings(KernelBaseSettings): """Amazon Bedrock service settings. diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py index 115ae65409dd..3457d9fee1a5 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py @@ -4,8 +4,10 @@ from functools import partial from typing import Any, ClassVar -from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor +import boto3 + from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.utils.async_utils import run_in_executor class BedrockBase(KernelBaseModel, ABC): @@ -19,6 +21,26 @@ class BedrockBase(KernelBaseModel, ABC): # Client: Use for model management bedrock_client: Any + def __init__( + self, + *, + runtime_client: Any | None = None, + client: Any | None = None, + **kwargs: Any, + ) -> None: + """Initialize the Amazon Bedrock Base Class. + + Args: + runtime_client: The Amazon Bedrock runtime client to use. + client: The Amazon Bedrock client to use. + **kwargs: Additional keyword arguments. + """ + super().__init__( + bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), + bedrock_client=client or boto3.client("bedrock"), + **kwargs, + ) + async def get_foundation_model_info(self, model_id: str) -> dict[str, Any]: """Get the foundation model information.""" response = await run_in_executor( diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py index 5c4f3e6cd192..64df31e5967b 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py @@ -5,8 +5,6 @@ from functools import partial from typing import TYPE_CHECKING, Any, ClassVar -import boto3 - if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: @@ -25,16 +23,15 @@ finish_reason_from_bedrock_to_semantic_kernel, format_bedrock_function_name_to_kernel_function_fully_qualified_name, remove_none_recursively, - run_in_executor, update_settings_from_function_choice_configuration, ) from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -45,6 +42,7 @@ ServiceInvalidRequestError, ServiceInvalidResponseError, ) +from semantic_kernel.utils.async_utils import run_in_executor from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -95,8 +93,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.chat_model_id, service_id=service_id or bedrock_settings.chat_model_id, - bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), - bedrock_client=client or boto3.client("bedrock"), + runtime_client=runtime_client, + client=client, ) # region Overriding base class methods @@ -240,7 +238,7 @@ def _create_chat_message_content(self, response: dict[str, Any]) -> ChatMessageC prompt_tokens=response["usage"]["inputTokens"], completion_tokens=response["usage"]["outputTokens"], ) - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] for content in response["output"]["message"]["content"]: if "text" in content: items.append(TextContent(text=content["text"], inner_content=content)) diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py index 81092a7c7fa4..d690a3aeb644 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py @@ -6,7 +6,6 @@ from functools import partial from typing import TYPE_CHECKING, Any -import boto3 from pydantic import ValidationError if sys.version_info >= (3, 12): @@ -22,11 +21,11 @@ parse_streaming_text_completion_response, parse_text_completion_response, ) -from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidRequestError +from semantic_kernel.utils.async_utils import run_in_executor from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_streaming_text_completion, trace_text_completion, @@ -73,8 +72,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.text_model_id, service_id=service_id or bedrock_settings.text_model_id, - bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), - bedrock_client=client or boto3.client("bedrock"), + runtime_client=runtime_client, + client=client, ) # region Overriding base class methods diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py index f963db5c5f0b..72224726acec 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py @@ -6,7 +6,6 @@ from functools import partial from typing import TYPE_CHECKING, Any -import boto3 from numpy import array, ndarray from pydantic import ValidationError @@ -24,10 +23,10 @@ get_text_embedding_request_body, parse_text_embedding_response, ) -from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidRequestError +from semantic_kernel.utils.async_utils import run_in_executor if TYPE_CHECKING: pass @@ -70,8 +69,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.embedding_model_id, service_id=service_id or bedrock_settings.embedding_model_id, - bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), - bedrock_client=client or boto3.client("bedrock"), + runtime_client=runtime_client, + client=client, ) @override diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py index 361a37e622a5..8655361331e5 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py @@ -37,8 +37,10 @@ class BedrockModelProvider(Enum): @classmethod def to_model_provider(cls, model_id: str) -> "BedrockModelProvider": """Convert a model ID to a model provider.""" - provider = model_id.split(".")[0] - return cls(provider) + try: + return next(provider for provider in cls if provider.value in model_id) + except StopIteration: + raise ValueError(f"Model ID {model_id} does not contain a valid model provider name.") # region Text Completion diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py index 7607696559c5..e6425eda1c39 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py @@ -1,9 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio import json from collections.abc import Callable, Mapping -from functools import partial from typing import TYPE_CHECKING, Any from semantic_kernel.connectors.ai.bedrock.bedrock_prompt_execution_settings import BedrockChatPromptExecutionSettings @@ -23,11 +21,6 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -async def run_in_executor(executor, func, *args, **kwargs): - """Run a function in an executor.""" - return await asyncio.get_event_loop().run_in_executor(executor, partial(func, *args, **kwargs)) - - def remove_none_recursively(data: dict, max_depth: int = 5) -> dict: """Remove None values from a dictionary recursively.""" if max_depth <= 0: diff --git a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py index 5c527e994564..974d59af92be 100644 --- a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py @@ -157,6 +157,7 @@ async def get_chat_message_contents( function_call=function_call, chat_history=chat_history, arguments=kwargs.get("arguments"), + execution_settings=settings, function_call_count=fc_count, request_index=request_index, function_behavior=settings.function_choice_behavior, @@ -263,7 +264,9 @@ async def get_streaming_chat_message_contents( for msg in messages: if msg is not None: all_messages.append(msg) - if any(isinstance(item, FunctionCallContent) for item in msg.items): + if not function_call_returned and any( + isinstance(item, FunctionCallContent) for item in msg.items + ): function_call_returned = True yield messages @@ -289,6 +292,8 @@ async def get_streaming_chat_message_contents( function_call=function_call, chat_history=chat_history, arguments=kwargs.get("arguments"), + is_streaming=True, + execution_settings=settings, function_call_count=fc_count, request_index=request_index, function_behavior=settings.function_choice_behavior, @@ -429,7 +434,10 @@ def _get_ai_model_id(self, settings: "PromptExecutionSettings") -> str: return getattr(settings, "ai_model_id", self.ai_model_id) or self.ai_model_id def _yield_function_result_messages(self, function_result_messages: list) -> bool: - """Determine if the function result messages should be yielded.""" + """Determine if the function result messages should be yielded. + + If there are messages and if the first message has items, then yield the messages. + """ return len(function_result_messages) > 0 and len(function_result_messages[0].items) > 0 # endregion diff --git a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py index 3342d96baa02..edc93dd326d4 100644 --- a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py +++ b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any from semantic_kernel.services.ai_service_client_base import AIServiceClientBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from numpy import ndarray @@ -12,7 +12,7 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental_class +@experimental class EmbeddingGeneratorBase(AIServiceClientBase, ABC): """Base class for embedding generators.""" diff --git a/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py b/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py index d44fb946af65..a49fa1a1a78c 100644 --- a/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py +++ b/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py @@ -4,10 +4,10 @@ from pydantic.dataclasses import dataclass from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental @dataclass class FunctionCallChoiceConfiguration: """Configuration for function call choice.""" diff --git a/python/semantic_kernel/connectors/ai/function_calling_utils.py b/python/semantic_kernel/connectors/ai/function_calling_utils.py index 7a5c2950c4e0..1e65fa59e537 100644 --- a/python/semantic_kernel/connectors/ai/function_calling_utils.py +++ b/python/semantic_kernel/connectors/ai/function_calling_utils.py @@ -1,10 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. from collections import OrderedDict +from collections.abc import Callable +from copy import deepcopy from typing import TYPE_CHECKING, Any from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_choice_behavior import ( @@ -15,6 +18,7 @@ from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata + from semantic_kernel.kernel import Kernel def update_settings_from_function_call_configuration( @@ -102,8 +106,8 @@ def merge_function_results( def merge_streaming_function_results( messages: list["ChatMessageContent | StreamingChatMessageContent"], - ai_model_id: str, - function_invoke_attempt: int, + ai_model_id: str | None = None, + function_invoke_attempt: int | None = None, ) -> list["StreamingChatMessageContent"]: """Combine multiple streaming function result content types to one streaming chat message content type. @@ -134,3 +138,36 @@ def merge_streaming_function_results( function_invoke_attempt=function_invoke_attempt, ) ] + + +@experimental +def prepare_settings_for_function_calling( + settings: "PromptExecutionSettings", + settings_class: type["PromptExecutionSettings"], + update_settings_callback: Callable[..., None], + kernel: "Kernel", +) -> "PromptExecutionSettings": + """Prepare settings for the service. + + Args: + settings: Prompt execution settings. + settings_class: The settings class. + update_settings_callback: The callback to update the settings. + kernel: Kernel instance. + + Returns: + PromptExecutionSettings of type settings_class. + """ + settings = deepcopy(settings) + if not isinstance(settings, settings_class): + settings = settings_class.from_prompt_execution_settings(settings) + + if settings.function_choice_behavior: + # Configure the function choice behavior into the settings object + # that will become part of the request to the AI service + settings.function_choice_behavior.configure( + kernel=kernel, + update_settings_callback=update_settings_callback, + settings=settings, + ) + return settings diff --git a/python/semantic_kernel/connectors/ai/function_choice_behavior.py b/python/semantic_kernel/connectors/ai/function_choice_behavior.py index f32a57e26952..0e872746346f 100644 --- a/python/semantic_kernel/connectors/ai/function_choice_behavior.py +++ b/python/semantic_kernel/connectors/ai/function_choice_behavior.py @@ -7,7 +7,7 @@ from semantic_kernel.connectors.ai.function_choice_type import FunctionChoiceType from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration @@ -23,7 +23,7 @@ _T = TypeVar("_T", bound="FunctionChoiceBehavior") -@experimental_class +@experimental class FunctionChoiceBehavior(KernelBaseModel): """Class that controls function choice behavior. diff --git a/python/semantic_kernel/connectors/ai/function_choice_type.py b/python/semantic_kernel/connectors/ai/function_choice_type.py index d4bc2b3a598f..6fc2287b3ce8 100644 --- a/python/semantic_kernel/connectors/ai/function_choice_type.py +++ b/python/semantic_kernel/connectors/ai/function_choice_type.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class FunctionChoiceType(Enum): """The type of function choice behavior.""" diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py index b7005c3c1f5d..9b538b26ebec 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py @@ -36,9 +36,9 @@ format_gemini_function_name_to_kernel_function_fully_qualified_name, ) from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -127,8 +127,10 @@ async def _inner_get_chat_message_contents( assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) + if not self.service_settings.gemini_model_id: + raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - self.service_settings.gemini_model_id, + model_name=self.service_settings.gemini_model_id, system_instruction=filter_system_message(chat_history), ) @@ -136,7 +138,7 @@ async def _inner_get_chat_message_contents( contents=self._prepare_chat_history_for_request(chat_history), generation_config=GenerationConfig(**settings.prepare_settings_dict()), tools=settings.tools, - tool_config=settings.tool_config, + tool_config=settings.tool_config, # type: ignore ) return [self._create_chat_message_content(response, candidate) for candidate in response.candidates] @@ -154,8 +156,10 @@ async def _inner_get_streaming_chat_message_contents( assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) + if not self.service_settings.gemini_model_id: + raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - self.service_settings.gemini_model_id, + model_name=self.service_settings.gemini_model_id, system_instruction=filter_system_message(chat_history), ) @@ -163,7 +167,7 @@ async def _inner_get_streaming_chat_message_contents( contents=self._prepare_chat_history_for_request(chat_history), generation_config=GenerationConfig(**settings.prepare_settings_dict()), tools=settings.tools, - tool_config=settings.tool_config, + tool_config=settings.tool_config, # type: ignore stream=True, ) @@ -240,7 +244,7 @@ def _create_chat_message_content( response_metadata = self._get_metadata_from_response(response) response_metadata.update(self._get_metadata_from_candidate(candidate)) - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] for idx, part in enumerate(candidate.content.parts): if part.text: items.append(TextContent(text=part.text, inner_content=response, metadata=response_metadata)) diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py index 2c2c25bc1910..625ee5c3e251 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py @@ -99,8 +99,10 @@ async def _inner_get_text_contents( assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) + if not self.service_settings.gemini_model_id: + raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - self.service_settings.gemini_model_id, + model_name=self.service_settings.gemini_model_id, ) response: AsyncGenerateContentResponse = await model.generate_content_async( @@ -122,8 +124,10 @@ async def _inner_get_streaming_text_contents( assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) + if not self.service_settings.gemini_model_id: + raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - self.service_settings.gemini_model_id, + model_name=self.service_settings.gemini_model_id, ) response: AsyncGenerateContentResponse = await model.generate_content_async( diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py index 6f2ceff601d7..98cfc71ce173 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py @@ -93,7 +93,9 @@ async def generate_raw_embeddings( assert isinstance(settings, GoogleAIEmbeddingPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - response: BatchEmbeddingDict = await genai.embed_content_async( + if not self.service_settings.embedding_model_id: + raise ServiceInitializationError("The Google AI embedding model ID is required.") + response: BatchEmbeddingDict = await genai.embed_content_async( # type: ignore model=self.service_settings.embedding_model_id, content=texts, **settings.prepare_settings_dict(), diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py index bd7c1346accf..beec827bfb2f 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py @@ -34,9 +34,9 @@ ) from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_settings import VertexAISettings from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -235,7 +235,7 @@ def _create_chat_message_content(self, response: GenerationResponse, candidate: response_metadata = self._get_metadata_from_response(response) response_metadata.update(self._get_metadata_from_candidate(candidate)) - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] for idx, part in enumerate(candidate.content.parts): part_dict = part.to_dict() if "text" in part_dict: diff --git a/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py index 89fc5525cb29..da7b2fac26e1 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py @@ -2,10 +2,16 @@ from typing import Any -from transformers import GenerationConfig - from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +try: + from transformers import GenerationConfig + + ready = True +except ImportError: + GenerationConfig = Any + ready = False + class HuggingFacePromptExecutionSettings(PromptExecutionSettings): """Hugging Face prompt execution settings.""" @@ -19,8 +25,10 @@ class HuggingFacePromptExecutionSettings(PromptExecutionSettings): temperature: float = 1.0 top_p: float = 1.0 - def get_generation_config(self) -> GenerationConfig: + def get_generation_config(self) -> "GenerationConfig": """Get the generation config.""" + if not ready: + raise ImportError("transformers is not installed.") return GenerationConfig( **self.model_dump( include={"max_new_tokens", "pad_token_id", "eos_token_id", "temperature", "top_p"}, diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py index e40688ebad79..3136e3c0f582 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py @@ -11,6 +11,7 @@ else: from typing_extensions import override # pragma: no cover + import torch from transformers import AutoTokenizer, TextIteratorStreamer, pipeline diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py index 0bb20906be30..b52a87fa42f7 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py @@ -15,17 +15,18 @@ from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase from semantic_kernel.exceptions import ServiceResponseException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from torch import Tensor from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class HuggingFaceTextEmbedding(EmbeddingGeneratorBase): """Hugging Face text embedding service.""" diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py index ea709172950f..8153db93577f 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py @@ -1,17 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. import logging -import sys from typing import Annotated, Any, Literal from mistralai import utils - -if sys.version_info >= (3, 11): - pass # pragma: no cover -else: - pass # pragma: no cover - -from pydantic import Field, field_validator +from pydantic import Field from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -29,7 +22,14 @@ class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings): response_format: dict[Literal["type"], Literal["text", "json_object"]] | None = None messages: list[dict[str, Any]] | None = None - safe_mode: Annotated[bool, Field(exclude=True)] = False + safe_mode: Annotated[ + bool, + Field( + exclude=True, + deprecated="The 'safe_mode' setting is no longer supported and is being ignored, " + "it will be removed in the Future.", + ), + ] = False safe_prompt: bool = False max_tokens: Annotated[int | None, Field(gt=0)] = None seed: int | None = None @@ -56,12 +56,3 @@ class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings): "on the function choice configuration.", ), ] = None - - @field_validator("safe_mode") - @classmethod - def check_safe_mode(cls, v: bool) -> bool: - """The safe_mode setting is no longer supported.""" - logger.warning( - "The 'safe_mode' setting is no longer supported and is being ignored, it will be removed in the Future." - ) - return v diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py index 2405897a6c39..efe8ee177360 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py @@ -42,7 +42,7 @@ from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -55,7 +55,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class MistralAIChatCompletion(MistralAIBase, ChatCompletionClientBase): """Mistral Chat completion class.""" diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py index f3e10e190b88..1cffe44c1376 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py @@ -19,12 +19,12 @@ from semantic_kernel.connectors.ai.mistral_ai.settings.mistral_ai_settings import MistralAISettings from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class MistralAITextEmbedding(MistralAIBase, EmbeddingGeneratorBase): """Mistral AI Inference Text Embedding Service.""" diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py index 103133af2c9f..68a62e434423 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py @@ -27,9 +27,9 @@ ) from semantic_kernel.contents import AuthorRole from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -255,7 +255,7 @@ def _parse_tool_calls(self, tool_calls: Sequence[Message.ToolCall] | None, items def _create_chat_message_content_from_chat_response(self, response: ChatResponse) -> ChatMessageContent: """Create a chat message content from the response.""" - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] if response.message.content: items.append( TextContent( @@ -274,7 +274,7 @@ def _create_chat_message_content_from_chat_response(self, response: ChatResponse def _create_chat_message_content(self, response: Mapping[str, Any]) -> ChatMessageContent: """Create a chat message content from the response.""" - items: list[ITEM_TYPES] = [] + items: list[CMC_ITEM_TYPES] = [] if not (message := response.get("message", None)): raise ServiceInvalidResponseError("No message content found in response.") diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py index 7b5c0358e400..25a30fdb8afc 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py @@ -23,12 +23,12 @@ from numpy import array, ndarray from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class OllamaTextEmbedding(OllamaBase, EmbeddingGeneratorBase): """Ollama embeddings client. diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py index 28521975e366..891e6d44f53a 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py @@ -28,12 +28,12 @@ ) from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidExecutionSettingsError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class OnnxGenAIChatCompletion(ChatCompletionClientBase, OnnxGenAICompletionBase): """OnnxGenAI text completion service.""" diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py index 40ce552ed88b..79bb310bbc6d 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py @@ -5,13 +5,18 @@ from collections.abc import AsyncGenerator from typing import Any -import onnxruntime_genai as OnnxRuntimeGenAi - from semantic_kernel.connectors.ai.onnx.onnx_gen_ai_prompt_execution_settings import OnnxGenAIPromptExecutionSettings from semantic_kernel.contents import ImageContent from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidResponseError from semantic_kernel.kernel_pydantic import KernelBaseModel +try: + import onnxruntime_genai as OnnxRuntimeGenAi + + ready = True +except ImportError: + ready = False + class OnnxGenAICompletionBase(KernelBaseModel): """Base class for OnnxGenAI Completion services.""" @@ -31,6 +36,8 @@ def __init__(self, ai_model_path: str, **kwargs) -> None: Raises: ServiceInitializationError: When model cannot be loaded """ + if not ready: + raise ImportError("onnxruntime-genai is not installed.") try: json_gen_ai_config = os.path.join(ai_model_path + "/genai_config.json") with open(json_gen_ai_config) as file: diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py index 11aa88165be4..0f922397a7b7 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py @@ -20,12 +20,12 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions import ServiceInitializationError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class OnnxGenAITextCompletion(TextCompletionClientBase, OnnxGenAICompletionBase): """OnnxGenAI text completion service.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/__init__.py b/python/semantic_kernel/connectors/ai/open_ai/__init__.py index a3103ae86446..34e11756fdb7 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/__init__.py +++ b/python/semantic_kernel/connectors/ai/open_ai/__init__.py @@ -22,6 +22,12 @@ OpenAIPromptExecutionSettings, OpenAITextPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( + AzureRealtimeExecutionSettings, + InputAudioTranscription, + OpenAIRealtimeExecutionSettings, + TurnDetection, +) from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_text_to_audio_execution_settings import ( OpenAITextToAudioExecutionSettings, ) @@ -30,12 +36,19 @@ ) from semantic_kernel.connectors.ai.open_ai.services.azure_audio_to_text import AzureAudioToText from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.azure_realtime import AzureRealtimeWebsocket from semantic_kernel.connectors.ai.open_ai.services.azure_text_completion import AzureTextCompletion from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding from semantic_kernel.connectors.ai.open_ai.services.azure_text_to_audio import AzureTextToAudio from semantic_kernel.connectors.ai.open_ai.services.azure_text_to_image import AzureTextToImage from semantic_kernel.connectors.ai.open_ai.services.open_ai_audio_to_text import OpenAIAudioToText from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import ( + ListenEvents, + OpenAIRealtimeWebRTC, + OpenAIRealtimeWebsocket, + SendEvents, +) from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import OpenAITextCompletion from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_to_audio import OpenAITextToAudio @@ -55,6 +68,8 @@ "AzureDataSourceParameters", "AzureEmbeddingDependency", "AzureOpenAISettings", + "AzureRealtimeExecutionSettings", + "AzureRealtimeWebsocket", "AzureTextCompletion", "AzureTextEmbedding", "AzureTextToAudio", @@ -63,12 +78,17 @@ "DataSourceFieldsMapping", "DataSourceFieldsMapping", "ExtraBody", + "InputAudioTranscription", + "ListenEvents", "OpenAIAudioToText", "OpenAIAudioToTextExecutionSettings", "OpenAIChatCompletion", "OpenAIChatPromptExecutionSettings", "OpenAIEmbeddingPromptExecutionSettings", "OpenAIPromptExecutionSettings", + "OpenAIRealtimeExecutionSettings", + "OpenAIRealtimeWebRTC", + "OpenAIRealtimeWebsocket", "OpenAISettings", "OpenAITextCompletion", "OpenAITextEmbedding", @@ -77,4 +97,6 @@ "OpenAITextToAudioExecutionSettings", "OpenAITextToImage", "OpenAITextToImageExecutionSettings", + "SendEvents", + "TurnDetection", ] diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py index 543b4e2c64a5..3d5199d8bf98 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py @@ -39,7 +39,7 @@ class ApiKeyAuthentication(AzureChatRequestBase): """API key authentication.""" type: Annotated[Literal["APIKey", "api_key"], AfterValidator(to_snake)] = "api_key" - key: str | None = None + key: str class SystemAssignedManagedIdentityAuthentication(AzureChatRequestBase): diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py index 5be6f5d364fe..c20080bdf44d 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py @@ -13,9 +13,10 @@ class OpenAIAudioToTextExecutionSettings(PromptExecutionSettings): """Request settings for OpenAI audio to text services.""" - ai_model_id: str | None = Field(None, serialization_alias="model") + ai_model_id: str | None = Field(default=None, serialization_alias="model") filename: str | None = Field( - None, description="Do not set this manually. It is set by the service based on the audio content." + default=None, + description="Do not set this manually. It is set by the service based on the audio content.", ) language: str | None = None prompt: str | None = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py index d2b37d44bb40..b1a241d2a5d3 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py @@ -66,7 +66,7 @@ class OpenAIChatPromptExecutionSettings(OpenAIPromptExecutionSettings): messages: Annotated[ list[dict[str, Any]] | None, Field(description="Do not set this manually. It is set by the service.") ] = None - parallel_tool_calls: bool | None = True + parallel_tool_calls: bool | None = None tools: Annotated[ list[dict[str, Any]] | None, Field( diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py new file mode 100644 index 000000000000..2c4fc74738b5 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py @@ -0,0 +1,78 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Mapping, Sequence +from typing import Annotated, Any, Literal + +from pydantic import Field + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class InputAudioTranscription(KernelBaseModel): + """Input audio transcription settings. + + Args: + model: The model to use for transcription, currently only "whisper-1" is supported. + language: The language of the audio, should be in ISO-639-1 format, like 'en'. + prompt: An optional text to guide the model's style or continue a previous audio segment. + The prompt should match the audio language. + """ + + model: Literal["whisper-1"] | None = None + language: str | None = None + prompt: str | None = None + + +class TurnDetection(KernelBaseModel): + """Turn detection settings. + + Args: + type: The type of turn detection, currently only "server_vad" is supported. + threshold: The threshold for voice activity detection, should be between 0 and 1. + prefix_padding_ms: The padding before the detected voice activity, in milliseconds. + silence_duration_ms: The duration of silence to detect the end of a turn, in milliseconds. + create_response: Whether to create a response for each detected turn. + + """ + + type: Literal["server_vad"] = "server_vad" + threshold: Annotated[float | None, Field(ge=0.0, le=1.0)] = None + prefix_padding_ms: Annotated[int | None, Field(ge=0)] = None + silence_duration_ms: Annotated[int | None, Field(ge=0)] = None + create_response: bool | None = None + + +class OpenAIRealtimeExecutionSettings(PromptExecutionSettings): + """Request settings for OpenAI realtime services.""" + + modalities: Sequence[Literal["audio", "text"]] | None = None + ai_model_id: Annotated[str | None, Field(None, serialization_alias="model")] = None + instructions: str | None = None + voice: str | None = None + input_audio_format: Literal["pcm16", "g711_ulaw", "g711_alaw"] | None = None + output_audio_format: Literal["pcm16", "g711_ulaw", "g711_alaw"] | None = None + input_audio_transcription: InputAudioTranscription | Mapping[str, str] | None = None + turn_detection: TurnDetection | Mapping[str, str] | None = None + tools: Annotated[ + list[dict[str, Any]] | None, + Field( + description="Do not set this manually. It is set by the service based " + "on the function choice configuration.", + ), + ] = None + tool_choice: Annotated[ + str | None, + Field( + description="Do not set this manually. It is set by the service based " + "on the function choice configuration.", + ), + ] = None + temperature: Annotated[float | None, Field(ge=0.0, le=2.0)] = None + max_response_output_tokens: Annotated[int | Literal["inf"] | None, Field(gt=0)] = None + + +class AzureRealtimeExecutionSettings(OpenAIRealtimeExecutionSettings): + """Request settings for Azure OpenAI realtime services.""" + + pass diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py index d482e92ec6e8..58a9a9e6a46f 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py @@ -36,7 +36,7 @@ class OpenAITextToImageExecutionSettings(PromptExecutionSettings): """Request settings for OpenAI text to image services.""" prompt: str | None = None - ai_model_id: str | None = Field(None, serialization_alias="model") + ai_model_id: str | None = Field(default=None, serialization_alias="model") size: ImageSize | None = None quality: str | None = None style: str | None = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py index da50e4ee56b6..94d8691534fa 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py @@ -3,9 +3,11 @@ import logging from collections.abc import Awaitable, Callable, Mapping from copy import copy +from typing import Any from openai import AsyncAzureOpenAI from pydantic import ConfigDict, validate_call +from pydantic_core import Url from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler, OpenAIModelTypes @@ -27,7 +29,7 @@ def __init__( deployment_name: str, ai_model_type: OpenAIModelTypes, endpoint: HttpsUrl | None = None, - base_url: HttpsUrl | None = None, + base_url: Url | None = None, api_version: str = DEFAULT_AZURE_API_VERSION, service_id: str | None = None, api_key: str | None = None, @@ -37,6 +39,7 @@ def __init__( default_headers: Mapping[str, str] | None = None, client: AsyncAzureOpenAI | None = None, instruction_role: str | None = None, + **kwargs: Any, ) -> None: """Internal class for configuring a connection to an Azure OpenAI service. @@ -47,7 +50,7 @@ def __init__( deployment_name (str): Name of the deployment. ai_model_type (OpenAIModelTypes): The type of OpenAI model to deploy. endpoint (HttpsUrl): The specific endpoint URL for the deployment. (Optional) - base_url (HttpsUrl): The base URL for Azure services. (Optional) + base_url (Url): The base URL for Azure services. (Optional) api_version (str): Azure API version. Defaults to the defined DEFAULT_AZURE_API_VERSION. service_id (str): Service ID for the deployment. (Optional) api_key (str): API key for Azure services. (Optional) @@ -59,6 +62,7 @@ def __init__( client (AsyncAzureOpenAI): An existing client to use. (Optional) instruction_role (str | None): The role to use for 'instruction' messages, for example, summarization prompts could use `developer` or `system`. (Optional) + kwargs: Additional keyword arguments. """ # Merge APP_INFO into the headers if it exists @@ -79,18 +83,29 @@ def __init__( "Please provide either api_key, ad_token or ad_token_provider or a client." ) - if not base_url: - if not endpoint: - raise ServiceInitializationError("Please provide an endpoint or a base_url") - base_url = HttpsUrl(f"{str(endpoint).rstrip('/')}/openai/deployments/{deployment_name}") - client = AsyncAzureOpenAI( - base_url=str(base_url), - api_version=api_version, - api_key=api_key, - azure_ad_token=ad_token, - azure_ad_token_provider=ad_token_provider, - default_headers=merged_headers, - ) + if not endpoint and not base_url: + raise ServiceInitializationError("Please provide an endpoint or a base_url") + + args: dict[str, Any] = { + "default_headers": merged_headers, + } + if api_version: + args["api_version"] = api_version + if ad_token: + args["azure_ad_token"] = ad_token + if ad_token_provider: + args["azure_ad_token_provider"] = ad_token_provider + if api_key: + args["api_key"] = api_key + if base_url: + args["base_url"] = str(base_url) + if endpoint and not base_url: + args["azure_endpoint"] = str(endpoint) + # TODO (eavanvalkenburg): Remove the check on model type when the package fixes: https://github.com/openai/openai-python/issues/2120 + if deployment_name and ai_model_type != OpenAIModelTypes.REALTIME: + args["azure_deployment"] = deployment_name + + client = AsyncAzureOpenAI(**args) args = { "ai_model_id": deployment_name, "client": client, @@ -100,7 +115,7 @@ def __init__( args["service_id"] = service_id if instruction_role: args["instruction_role"] = instruction_role - super().__init__(**args) + super().__init__(**args, **kwargs) def to_dict(self) -> dict[str, str]: """Convert the configuration to a dictionary.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py new file mode 100644 index 000000000000..39e5690fb3c1 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py @@ -0,0 +1,116 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from collections.abc import Callable, Coroutine, Mapping +from typing import Any + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from numpy import ndarray +from openai import AsyncAzureOpenAI +from openai.lib.azure import AsyncAzureADTokenProvider +from pydantic import ValidationError + +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( + AzureRealtimeExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import AzureOpenAIConfigBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import OpenAIModelTypes +from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import OpenAIRealtimeWebsocketBase +from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class AzureRealtimeWebsocket(OpenAIRealtimeWebsocketBase, AzureOpenAIConfigBase): + """Azure OpenAI Realtime service using WebSocket protocol.""" + + def __init__( + self, + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + service_id: str | None = None, + api_key: str | None = None, + deployment_name: str | None = None, + endpoint: str | None = None, + base_url: str | None = None, + api_version: str | None = None, + ad_token: str | None = None, + ad_token_provider: AsyncAzureADTokenProvider | None = None, + token_endpoint: str | None = None, + default_headers: Mapping[str, str] | None = None, + async_client: AsyncAzureOpenAI | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initialize an AzureRealtimeWebsocket service. + + Args: + audio_output_callback: The audio output callback, optional. + This should be a coroutine, that takes a ndarray with audio as input. + The goal of this function is to allow you to play the audio with the + least amount of latency possible, because it is called first before further processing. + It can also be set in the `receive` method. + Even when passed, the audio content will still be + added to the receiving queue. + service_id: The service ID for the Azure deployment. (Optional) + api_key: The optional api key. If provided, will override the value in the + env vars or .env file. + deployment_name: The optional deployment. If provided, will override the value + (chat_deployment_name) in the env vars or .env file. + endpoint: The optional deployment endpoint. If provided will override the value + in the env vars or .env file. + base_url: The optional deployment base_url. If provided will override the value + in the env vars or .env file. + api_version: The optional deployment api version. If provided will override the value + in the env vars or .env file. + ad_token: The Azure Active Directory token. (Optional) + ad_token_provider: The Azure Active Directory token provider. (Optional) + token_endpoint: The token endpoint to request an Azure token. (Optional) + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + async_client: An existing client to use. (Optional) + env_file_path: Use the environment settings file as a fallback to + environment variables. (Optional) + env_file_encoding: The encoding of the environment settings file. (Optional) + kwargs: Additional arguments. + """ + try: + azure_openai_settings = AzureOpenAISettings.create( + api_key=api_key, + base_url=base_url, + endpoint=endpoint, + realtime_deployment_name=deployment_name, + api_version=api_version, + token_endpoint=token_endpoint, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex + if not azure_openai_settings.realtime_deployment_name: + raise ServiceInitializationError("The OpenAI realtime model ID is required.") + super().__init__( + audio_output_callback=audio_output_callback, + deployment_name=azure_openai_settings.realtime_deployment_name, + endpoint=azure_openai_settings.endpoint, + base_url=azure_openai_settings.base_url, + api_version=azure_openai_settings.api_version, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + token_endpoint=azure_openai_settings.token_endpoint, + ai_model_type=OpenAIModelTypes.REALTIME, + service_id=service_id, + default_headers=default_headers, + client=async_client, + **kwargs, + ) + + @override + def get_prompt_execution_settings_class(self) -> type[PromptExecutionSettings]: + return AzureRealtimeExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py index 36486b7b8108..f8f0654c741b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py @@ -13,12 +13,12 @@ from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import OpenAITextEmbeddingBase from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AzureTextEmbedding(AzureOpenAIConfigBase, OpenAITextEmbeddingBase): """Azure Text Embedding class.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py index d3d72795665b..7883be04f4ff 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py @@ -3,6 +3,7 @@ import logging from collections.abc import Mapping from copy import copy +from typing import Any from openai import AsyncOpenAI from pydantic import ConfigDict, Field, validate_call @@ -30,6 +31,7 @@ def __init__( default_headers: Mapping[str, str] | None = None, client: AsyncOpenAI | None = None, instruction_role: str | None = None, + **kwargs: Any, ) -> None: """Initialize a client for OpenAI services. @@ -51,6 +53,7 @@ def __init__( client (AsyncOpenAI): An existing OpenAI client, optional. instruction_role (str): The role to use for 'instruction' messages, for example, summarization prompts could use `developer` or `system`. (Optional) + kwargs: Additional keyword arguments. """ # Merge APP_INFO into the headers if it exists @@ -76,7 +79,7 @@ def __init__( args["service_id"] = service_id if instruction_role: args["instruction_role"] = instruction_role - super().__init__(**args) + super().__init__(**args, **kwargs) def to_dict(self) -> dict[str, str]: """Create a dict of the service settings.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py index 7a1f43da234e..ea2e05deead7 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py @@ -12,3 +12,4 @@ class OpenAIModelTypes(Enum): TEXT_TO_IMAGE = "text-to-image" AUDIO_TO_TEXT = "audio-to-text" TEXT_TO_AUDIO = "text-to-audio" + REALTIME = "realtime" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py new file mode 100644 index 000000000000..d6422066394b --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py @@ -0,0 +1,1024 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import base64 +import contextlib +import json +import logging +import sys +from collections.abc import AsyncGenerator, Callable, Coroutine, Mapping +from enum import Enum +from typing import TYPE_CHECKING, Any, ClassVar, Literal, cast + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +import numpy as np +from aiohttp import ClientSession +from aiortc import ( + MediaStreamTrack, + RTCConfiguration, + RTCDataChannel, + RTCIceServer, + RTCPeerConnection, + RTCSessionDescription, +) +from av.audio.frame import AudioFrame +from numpy import ndarray +from openai import AsyncOpenAI +from openai._models import construct_type_unchecked +from openai.resources.beta.realtime.realtime import AsyncRealtimeConnection +from openai.types.beta.realtime import ( + ConversationItemCreateEvent, + ConversationItemDeleteEvent, + ConversationItemTruncateEvent, + InputAudioBufferAppendEvent, + InputAudioBufferClearEvent, + InputAudioBufferCommitEvent, + RealtimeClientEvent, + RealtimeServerEvent, + ResponseCancelEvent, + ResponseCreateEvent, + ResponseFunctionCallArgumentsDoneEvent, + SessionUpdateEvent, +) +from openai.types.beta.realtime.response_create_event import Response +from pydantic import Field, PrivateAttr, ValidationError + +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_calling_utils import ( + prepare_settings_for_function_calling, +) +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType +from semantic_kernel.connectors.ai.open_ai.services.open_ai_config_base import OpenAIConfigBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler +from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import OpenAIModelTypes +from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase +from semantic_kernel.contents.audio_content import AudioContent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.realtime_events import ( + RealtimeAudioEvent, + RealtimeEvent, + RealtimeEvents, + RealtimeFunctionCallEvent, + RealtimeFunctionResultEvent, + RealtimeTextEvent, +) +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.exceptions import ContentException +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.kernel import Kernel +from semantic_kernel.utils.feature_stage_decorator import experimental + +if TYPE_CHECKING: + from aiortc.mediastreams import MediaStreamTrack + + from semantic_kernel.connectors.ai.function_choice_behavior import ( + FunctionCallChoiceConfiguration, + FunctionChoiceType, + ) + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + from semantic_kernel.contents.chat_history import ChatHistory + from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata + +logger: logging.Logger = logging.getLogger(__name__) + +# region constants + + +@experimental +class SendEvents(str, Enum): + """Events that can be sent.""" + + SESSION_UPDATE = "session.update" + INPUT_AUDIO_BUFFER_APPEND = "input_audio_buffer.append" + INPUT_AUDIO_BUFFER_COMMIT = "input_audio_buffer.commit" + INPUT_AUDIO_BUFFER_CLEAR = "input_audio_buffer.clear" + CONVERSATION_ITEM_CREATE = "conversation.item.create" + CONVERSATION_ITEM_TRUNCATE = "conversation.item.truncate" + CONVERSATION_ITEM_DELETE = "conversation.item.delete" + RESPONSE_CREATE = "response.create" + RESPONSE_CANCEL = "response.cancel" + + +@experimental +class ListenEvents(str, Enum): + """Events that can be listened to.""" + + ERROR = "error" + SESSION_CREATED = "session.created" + SESSION_UPDATED = "session.updated" + CONVERSATION_CREATED = "conversation.created" + INPUT_AUDIO_BUFFER_COMMITTED = "input_audio_buffer.committed" + INPUT_AUDIO_BUFFER_CLEARED = "input_audio_buffer.cleared" + INPUT_AUDIO_BUFFER_SPEECH_STARTED = "input_audio_buffer.speech_started" + INPUT_AUDIO_BUFFER_SPEECH_STOPPED = "input_audio_buffer.speech_stopped" + CONVERSATION_ITEM_CREATED = "conversation.item.created" + CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_COMPLETED = "conversation.item.input_audio_transcription.completed" + CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_FAILED = "conversation.item.input_audio_transcription.failed" + CONVERSATION_ITEM_TRUNCATED = "conversation.item.truncated" + CONVERSATION_ITEM_DELETED = "conversation.item.deleted" + RESPONSE_CREATED = "response.created" + RESPONSE_DONE = "response.done" # contains usage info -> log + RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" + RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" + RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" + RESPONSE_CONTENT_PART_DONE = "response.content_part.done" + RESPONSE_TEXT_DELTA = "response.text.delta" + RESPONSE_TEXT_DONE = "response.text.done" + RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" + RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" + RESPONSE_AUDIO_DELTA = "response.audio.delta" + RESPONSE_AUDIO_DONE = "response.audio.done" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" + RATE_LIMITS_UPDATED = "rate_limits.updated" + + +# region utils + + +def update_settings_from_function_call_configuration( + function_choice_configuration: "FunctionCallChoiceConfiguration", + settings: "PromptExecutionSettings", + type: "FunctionChoiceType", +) -> None: + """Update the settings from a FunctionChoiceConfiguration.""" + if ( + function_choice_configuration.available_functions + and hasattr(settings, "tool_choice") + and hasattr(settings, "tools") + ): + settings.tool_choice = type # type: ignore + settings.tools = [ # type: ignore + kernel_function_metadata_to_function_call_format(f) + for f in function_choice_configuration.available_functions + ] + + +def kernel_function_metadata_to_function_call_format( + metadata: "KernelFunctionMetadata", +) -> dict[str, Any]: + """Convert the kernel function metadata to function calling format. + + Function calling in the realtime API, uses a slightly different format than the chat completion API. + See https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-tools + for more details. + + TLDR: there is no "function" key, and the function details are at the same level as "type". + """ + return { + "type": "function", + "name": metadata.fully_qualified_name, + "description": metadata.description or "", + "parameters": { + "type": "object", + "properties": { + param.name: param.schema_data for param in metadata.parameters if param.include_in_function_choices + }, + "required": [p.name for p in metadata.parameters if p.is_required and p.include_in_function_choices], + }, + } + + +def _create_openai_realtime_client_event(event_type: SendEvents, **kwargs: Any) -> RealtimeClientEvent: + """Create an OpenAI Realtime client event from a event type and kwargs.""" + match event_type: + case SendEvents.SESSION_UPDATE: + if "session" not in kwargs: + raise ContentException("Session is required for SessionUpdateEvent") + return SessionUpdateEvent( + type=event_type, + session=kwargs.pop("session"), + **kwargs, + ) + case SendEvents.INPUT_AUDIO_BUFFER_APPEND: + if "audio" not in kwargs: + raise ContentException("Audio is required for InputAudioBufferAppendEvent") + return InputAudioBufferAppendEvent( + type=event_type, + **kwargs, + ) + case SendEvents.INPUT_AUDIO_BUFFER_COMMIT: + return InputAudioBufferCommitEvent( + type=event_type, + **kwargs, + ) + case SendEvents.INPUT_AUDIO_BUFFER_CLEAR: + return InputAudioBufferClearEvent( + type=event_type, + **kwargs, + ) + case SendEvents.CONVERSATION_ITEM_CREATE: + if "item" not in kwargs: + raise ContentException("Item is required for ConversationItemCreateEvent") + kwargs["type"] = event_type + return ConversationItemCreateEvent(**kwargs) + case SendEvents.CONVERSATION_ITEM_TRUNCATE: + if "content_index" not in kwargs: + kwargs["content_index"] = 0 + return ConversationItemTruncateEvent( + type=event_type, + **kwargs, + ) + case SendEvents.CONVERSATION_ITEM_DELETE: + if "item_id" not in kwargs: + raise ContentException("Item ID is required for ConversationItemDeleteEvent") + return ConversationItemDeleteEvent( + type=event_type, + **kwargs, + ) + case SendEvents.RESPONSE_CREATE: + if "response" in kwargs: + response: Response | None = Response.model_validate(kwargs.pop("response")) + else: + response = None + return ResponseCreateEvent( + type=event_type, + response=response, + **kwargs, + ) + case SendEvents.RESPONSE_CANCEL: + return ResponseCancelEvent( + type=event_type, + **kwargs, + ) + + +# region Base + + +@experimental +class OpenAIRealtimeBase(OpenAIHandler, RealtimeClientBase): + """OpenAI Realtime service.""" + + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + kernel: Kernel | None = None + + _current_settings: PromptExecutionSettings | None = PrivateAttr(default=None) + _call_id_to_function_map: dict[str, str] = PrivateAttr(default_factory=dict) + + async def _parse_event(self, event: RealtimeServerEvent) -> AsyncGenerator[RealtimeEvents, None]: + """Handle all events but audio delta. + + Audio delta has to be handled by the implementation of the protocol as some + protocols have different ways of handling audio. + + We put all event in the output buffer, but after the interpreted one. + so when dealing with them, make sure to check the type of the event, since they + might be of different types. + """ + match event.type: + case ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DELTA.value: + yield RealtimeTextEvent( + service_type=event.type, + service_event=event, + text=StreamingTextContent( + inner_content=event, + text=event.delta, # type: ignore + choice_index=0, + ), + ) + case ListenEvents.RESPONSE_OUTPUT_ITEM_ADDED.value: + if event.item.type == "function_call" and event.item.call_id and event.item.name: # type: ignore + self._call_id_to_function_map[event.item.call_id] = event.item.name # type: ignore + yield RealtimeEvent(service_type=event.type, service_event=event) + case ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA.value: + yield RealtimeFunctionCallEvent( + service_type=event.type, + service_event=event, + function_call=FunctionCallContent( + id=event.item_id, # type: ignore + name=self._call_id_to_function_map[event.call_id], # type: ignore + arguments=event.delta, # type: ignore + index=event.output_index, # type: ignore + metadata={"call_id": event.call_id}, # type: ignore + inner_content=event, + ), + ) + case ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE.value: + async for parsed_event in self._parse_function_call_arguments_done(event): # type: ignore + if parsed_event: + yield parsed_event + case ListenEvents.ERROR.value: + logger.error("Error received: %s", event.error.model_dump_json()) # type: ignore + yield RealtimeEvent(service_type=event.type, service_event=event) + case ListenEvents.SESSION_CREATED.value | ListenEvents.SESSION_UPDATED.value: + logger.info("Session created or updated, session: %s", event.session.model_dump_json()) # type: ignore + yield RealtimeEvent(service_type=event.type, service_event=event) + case _: + logger.debug(f"Received event: {event}") + yield RealtimeEvent(service_type=event.type, service_event=event) + + @override + async def update_session( + self, + chat_history: ChatHistory | None = None, + settings: PromptExecutionSettings | None = None, + create_response: bool = False, + **kwargs: Any, + ) -> None: + """Update the session in the service. + + Args: + chat_history: Chat history. + settings: Prompt execution settings, if kernel is linked to the service or passed as + Kwargs, it will be used to update the settings for function calling. + create_response: Create a response, get the model to start responding, default is False. + kwargs: Additional arguments, if 'kernel' is passed, it will be used to update the + settings for function calling, others will be ignored. + + """ + if kwargs: + if self._create_kwargs: + kwargs = {**self._create_kwargs, **kwargs} + else: + kwargs = self._create_kwargs or {} + if settings: + self._current_settings = settings + if "kernel" in kwargs: + self.kernel = kwargs["kernel"] + + if self._current_settings: + if self.kernel: + self._current_settings = prepare_settings_for_function_calling( + self._current_settings, + self.get_prompt_execution_settings_class(), + self._update_function_choice_settings_callback(), + kernel=self.kernel, # type: ignore + ) + await self.send( + RealtimeEvent( + service_type=SendEvents.SESSION_UPDATE, + service_event={"settings": self._current_settings}, + ) + ) + + if chat_history and len(chat_history) > 0: + for msg in chat_history.messages: + for item in msg.items: + match item: + case TextContent(): + await self.send( + RealtimeTextEvent(service_type=SendEvents.CONVERSATION_ITEM_CREATE, text=item) + ) + case FunctionCallContent(): + await self.send( + RealtimeFunctionCallEvent( + service_type=SendEvents.CONVERSATION_ITEM_CREATE, function_call=item + ) + ) + case FunctionResultContent(): + await self.send( + RealtimeFunctionResultEvent( + service_type=SendEvents.CONVERSATION_ITEM_CREATE, function_result=item + ) + ) + case _: + logger.error("Unsupported item type: %s", item) + + if create_response or kwargs.get("create_response", False) is True: + await self.send(RealtimeEvent(service_type=SendEvents.RESPONSE_CREATE)) + + async def _parse_function_call_arguments_done( + self, + event: ResponseFunctionCallArgumentsDoneEvent, + ) -> AsyncGenerator[RealtimeEvents | None]: + """Handle response function call done. + + This always yields at least 1 event, either a RealtimeEvent or a RealtimeFunctionResultEvent with the raw event. + + It then also yields any function results both back to the service, through `send` and to the developer. + + """ + # Step 1: check if function calling enabled: + if not self.kernel or ( + self._current_settings + and self._current_settings.function_choice_behavior + and not self._current_settings.function_choice_behavior.auto_invoke_kernel_functions + ): + yield RealtimeEvent(service_type=event.type, service_event=event) + return + # Step 2: check if there is a function that can be found. + try: + plugin_name, function_name = self._call_id_to_function_map.pop(event.call_id, "-").split("-", 1) + except ValueError: + logger.error("Function call needs to have a plugin name and function name") + yield RealtimeEvent(service_type=event.type, service_event=event) + return + + # Step 3: Parse into the function call content, and yield that. + item = FunctionCallContent( + id=event.item_id, + plugin_name=plugin_name, + function_name=function_name, + arguments=event.arguments, + index=event.output_index, + metadata={"call_id": event.call_id}, + ) + yield RealtimeFunctionCallEvent( + service_type=ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE, function_call=item, service_event=event + ) + + # Step 4: Invoke the function call + chat_history = ChatHistory() + await self.kernel.invoke_function_call(item, chat_history) + created_output: FunctionResultContent = chat_history.messages[-1].items[0] # type: ignore + # Step 5: Create the function result event + result = RealtimeFunctionResultEvent( + service_type=SendEvents.CONVERSATION_ITEM_CREATE, + function_result=created_output, + ) + # Step 6: send the result to the service and call `create response` + await self.send(result) + await self.send(RealtimeEvent(service_type=SendEvents.RESPONSE_CREATE)) + # Step 7: yield the function result back to the developer as well + yield result + + async def _send(self, event: RealtimeClientEvent) -> None: + """Send an event to the service.""" + raise NotImplementedError + + @override + async def send(self, event: RealtimeEvents, **kwargs: Any) -> None: + match event: + case RealtimeAudioEvent(): + await self._send( + _create_openai_realtime_client_event( + event_type=SendEvents.INPUT_AUDIO_BUFFER_APPEND, audio=event.audio.data_string + ) + ) + case RealtimeTextEvent(): + await self._send( + _create_openai_realtime_client_event( + event_type=SendEvents.CONVERSATION_ITEM_CREATE, + item={ + "type": "message", + "content": [ + { + "type": "input_text", + "text": event.text.text, + } + ], + "role": "user", + }, + ) + ) + case RealtimeFunctionCallEvent(): + await self._send( + _create_openai_realtime_client_event( + event_type=SendEvents.CONVERSATION_ITEM_CREATE, + item={ + "type": "function_call", + "name": event.function_call.name or event.function_call.function_name, + "arguments": "" + if not event.function_call.arguments + else event.function_call.arguments + if isinstance(event.function_call.arguments, str) + else json.dumps(event.function_call.arguments), + "call_id": event.function_call.metadata.get("call_id"), + }, + ) + ) + case RealtimeFunctionResultEvent(): + await self._send( + _create_openai_realtime_client_event( + event_type=SendEvents.CONVERSATION_ITEM_CREATE, + item={ + "type": "function_call_output", + "output": event.function_result.result, + "call_id": event.function_result.metadata.get("call_id"), + }, + ) + ) + case _: + data = event.service_event + match event.service_type: + case SendEvents.SESSION_UPDATE: + if not data: + logger.error("Event data is empty") + return + settings = data.get("settings", None) + if not settings: + logger.error("Event data does not contain 'settings'") + return + try: + settings = self.get_prompt_execution_settings_from_settings(settings) + except Exception as e: + logger.error( + f"Failed to properly create settings from passed settings: {settings}, error: {e}" + ) + return + assert isinstance(settings, self.get_prompt_execution_settings_class()) # nosec + if not settings.ai_model_id: # type: ignore + settings.ai_model_id = self.ai_model_id # type: ignore + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + session=settings.prepare_settings_dict(), + ) + ) + case SendEvents.INPUT_AUDIO_BUFFER_APPEND: + if not data or "audio" not in data: + logger.error("Event data does not contain 'audio'") + return + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + audio=data["audio"], + ) + ) + case SendEvents.INPUT_AUDIO_BUFFER_COMMIT: + await self._send(_create_openai_realtime_client_event(event_type=event.service_type)) + case SendEvents.INPUT_AUDIO_BUFFER_CLEAR: + await self._send(_create_openai_realtime_client_event(event_type=event.service_type)) + case SendEvents.CONVERSATION_ITEM_CREATE: + if not data or "item" not in data: + logger.error("Event data does not contain 'item'") + return + content = data["item"] + contents = content.items if isinstance(content, ChatMessageContent) else [content] + for item in contents: + match item: + case TextContent(): + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + item={ + "type": "message", + "content": [ + { + "type": "input_text", + "text": item.text, + } + ], + "role": "user", + }, + ) + ) + case FunctionCallContent(): + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + item={ + "type": "function_call", + "name": item.name or item.function_name, + "arguments": "" + if not item.arguments + else item.arguments + if isinstance(item.arguments, str) + else json.dumps(item.arguments), + "call_id": item.metadata.get("call_id"), + }, + ) + ) + + case FunctionResultContent(): + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + item={ + "type": "function_call_output", + "output": item.result, + "call_id": item.metadata.get("call_id"), + }, + ) + ) + case SendEvents.CONVERSATION_ITEM_TRUNCATE: + if not data or "item_id" not in data: + logger.error("Event data does not contain 'item_id'") + return + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + item_id=data["item_id"], + content_index=0, + audio_end_ms=data.get("audio_end_ms", 0), + ) + ) + case SendEvents.CONVERSATION_ITEM_DELETE: + if not data or "item_id" not in data: + logger.error("Event data does not contain 'item_id'") + return + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + item_id=data["item_id"], + ) + ) + case SendEvents.RESPONSE_CREATE: + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, event_id=data.get("event_id", None) if data else None + ) + ) + case SendEvents.RESPONSE_CANCEL: + await self._send( + _create_openai_realtime_client_event( + event_type=event.service_type, + response_id=data.get("response_id", None) if data else None, + ) + ) + + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( # noqa + OpenAIRealtimeExecutionSettings, + ) + + return OpenAIRealtimeExecutionSettings + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return update_settings_from_function_call_configuration + + +# region WebRTC +@experimental +class OpenAIRealtimeWebRTCBase(OpenAIRealtimeBase): + """OpenAI WebRTC Realtime service.""" + + peer_connection: RTCPeerConnection | None = None + data_channel: RTCDataChannel | None = None + audio_track: MediaStreamTrack | None = None + _receive_buffer: asyncio.Queue[RealtimeEvents] = PrivateAttr(default_factory=asyncio.Queue) + + @override + async def receive( + self, + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + **kwargs: Any, + ) -> AsyncGenerator[RealtimeEvents, None]: + if audio_output_callback: + self.audio_output_callback = audio_output_callback + while True: + event = await self._receive_buffer.get() + yield event + + async def _send(self, event: RealtimeClientEvent) -> None: + if not self.data_channel: + logger.error("Data channel not initialized") + return + while self.data_channel.readyState != "open": + await asyncio.sleep(0.1) + try: + self.data_channel.send(event.model_dump_json(exclude_none=True)) + except Exception as e: + logger.error(f"Failed to send event {event} with error: {e!s}") + + @override + async def create_session( + self, + chat_history: "ChatHistory | None" = None, + settings: "PromptExecutionSettings | None" = None, + **kwargs: Any, + ) -> None: + """Create a session in the service.""" + if not self.audio_track: + raise Exception("Audio track not initialized") + self.peer_connection = RTCPeerConnection( + configuration=RTCConfiguration(iceServers=[RTCIceServer(urls="stun:stun.l.google.com:19302")]) + ) + + # track is the audio track being returned from the service + self.peer_connection.add_listener("track", self._on_track) + + # data channel is used to send and receive messages + self.data_channel = self.peer_connection.createDataChannel("oai-events", protocol="json") + self.data_channel.add_listener("message", self._on_data) + + # this is the incoming audio, which sends audio to the service + self.peer_connection.addTransceiver(self.audio_track) + + offer = await self.peer_connection.createOffer() + await self.peer_connection.setLocalDescription(offer) + + try: + ephemeral_token = await self._get_ephemeral_token() + headers = {"Authorization": f"Bearer {ephemeral_token}", "Content-Type": "application/sdp"} + + async with ( + ClientSession() as session, + session.post( + f"{self.client.beta.realtime._client.base_url}realtime?model={self.ai_model_id}", + headers=headers, + data=offer.sdp, + ) as response, + ): + if response.status not in [200, 201]: + error_text = await response.text() + raise Exception(f"OpenAI WebRTC error: {error_text}") + + sdp_answer = await response.text() + answer = RTCSessionDescription(sdp=sdp_answer, type="answer") + await self.peer_connection.setRemoteDescription(answer) + logger.info("Connected to OpenAI WebRTC") + + except Exception as e: + logger.error(f"Failed to connect to OpenAI: {e!s}") + raise + + if settings or chat_history or kwargs: + await self.update_session(settings=settings, chat_history=chat_history, **kwargs) + + @override + async def close_session(self) -> None: + """Close the session in the service.""" + if self.peer_connection: + with contextlib.suppress(asyncio.CancelledError): + await self.peer_connection.close() + self.peer_connection = None + if self.data_channel: + with contextlib.suppress(asyncio.CancelledError): + self.data_channel.close() + self.data_channel = None + + async def _on_track(self, track: "MediaStreamTrack") -> None: + logger.debug(f"Received {track.kind} track from remote") + if track.kind != "audio": + return + while True: + try: + # This is a MediaStreamTrack, so the type is AudioFrame + # this might need to be updated if video becomes part of this + frame: AudioFrame = await track.recv() # type: ignore + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Error getting audio frame: {e!s}") + break + + try: + if self.audio_output_callback: + await self.audio_output_callback(frame.to_ndarray()) + + except Exception as e: + logger.error(f"Error playing remote audio frame: {e!s}") + try: + await self._receive_buffer.put( + RealtimeAudioEvent( + audio=AudioContent(data=frame.to_ndarray(), data_format="np.int16", inner_content=frame), + service_event=frame, + service_type=ListenEvents.RESPONSE_AUDIO_DELTA, + ), + ) + except Exception as e: + logger.error(f"Error processing remote audio frame: {e!s}") + await asyncio.sleep(0.01) + + async def _on_data(self, data: str) -> None: + """This method is called whenever a data channel message is received. + + The data is parsed into a RealtimeServerEvent (by OpenAI code) and then processed. + Audio data is not send through this channel, use _on_track for that. + """ + try: + event = cast( + RealtimeServerEvent, + construct_type_unchecked(value=json.loads(data), type_=cast(Any, RealtimeServerEvent)), + ) + except Exception as e: + logger.error(f"Failed to parse event {data} with error: {e!s}") + return + async for parsed_event in self._parse_event(event): + await self._receive_buffer.put(parsed_event) + + async def _get_ephemeral_token(self) -> str: + """Get an ephemeral token from OpenAI.""" + headers = {"Authorization": f"Bearer {self.client.api_key}", "Content-Type": "application/json"} + data = {"model": self.ai_model_id, "voice": "echo"} + + try: + async with ( + ClientSession() as session, + session.post( + f"{self.client.beta.realtime._client.base_url}/realtime/sessions", headers=headers, json=data + ) as response, + ): + if response.status not in [200, 201]: + error_text = await response.text() + raise Exception(f"Failed to get ephemeral token: {error_text}") + + result = await response.json() + return result["client_secret"]["value"] + + except Exception as e: + logger.error(f"Failed to get ephemeral token: {e!s}") + raise + + +@experimental +class OpenAIRealtimeWebRTC(OpenAIRealtimeWebRTCBase, OpenAIConfigBase): + """OpenAI Realtime service using WebRTC protocol.""" + + def __init__( + self, + audio_track: "MediaStreamTrack", + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + ai_model_id: str | None = None, + api_key: str | None = None, + org_id: str | None = None, + service_id: str | None = None, + default_headers: Mapping[str, str] | None = None, + client: AsyncOpenAI | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initialize an OpenAIRealtime service. + + Args: + audio_output_callback: The audio output callback, optional. + This should be a coroutine, that takes a ndarray with audio as input. + The goal of this function is to allow you to play the audio with the + least amount of latency possible, because it is called first before further processing. + It can also be set in the `receive` method. + Even when passed, the audio content will still be + added to the receiving queue. + audio_track: The audio track to use for the service, only used by WebRTC. + A default is supplied if not provided. + It can be any class that implements the AudioStreamTrack interface. + ai_model_id (str | None): OpenAI model name, see + https://platform.openai.com/docs/models + service_id (str | None): Service ID tied to the execution settings. + api_key (str | None): The optional API key to use. If provided will override, + the env vars or .env file value. + org_id (str | None): The optional org ID to use. If provided will override, + the env vars or .env file value. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + client (Optional[AsyncOpenAI]): An existing client to use. (Optional) + env_file_path (str | None): Use the environment settings file as a fallback to + environment variables. (Optional) + env_file_encoding (str | None): The encoding of the environment settings file. (Optional) + kwargs: Additional arguments. + """ + try: + openai_settings = OpenAISettings.create( + api_key=api_key, + org_id=org_id, + realtime_model_id=ai_model_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex + if not openai_settings.realtime_model_id: + raise ServiceInitializationError("The OpenAI realtime model ID is required.") + if audio_track: + kwargs["audio_track"] = audio_track + super().__init__( + audio_output_callback=audio_output_callback, + ai_model_id=openai_settings.realtime_model_id, + service_id=service_id, + api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, + org_id=openai_settings.org_id, + ai_model_type=OpenAIModelTypes.REALTIME, + default_headers=default_headers, + client=client, + **kwargs, + ) + + +# region Websocket + + +@experimental +class OpenAIRealtimeWebsocketBase(OpenAIRealtimeBase): + """OpenAI Realtime service.""" + + protocol: ClassVar[Literal["websocket"]] = "websocket" # type: ignore + connection: AsyncRealtimeConnection | None = None + connected: asyncio.Event = Field(default_factory=asyncio.Event) + + @override + async def receive( + self, + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + **kwargs: Any, + ) -> AsyncGenerator[RealtimeEvents, None]: + if audio_output_callback: + self.audio_output_callback = audio_output_callback + await self.connected.wait() + if not self.connection: + raise ValueError("Connection is not established.") + + async for event in self.connection: + if event.type == ListenEvents.RESPONSE_AUDIO_DELTA.value: + if self.audio_output_callback: + await self.audio_output_callback(np.frombuffer(base64.b64decode(event.delta), dtype=np.int16)) + yield RealtimeAudioEvent( + audio=AudioContent(data=event.delta, data_format="base64", inner_content=event), + service_type=event.type, + service_event=event, + ) + continue + async for realtime_event in self._parse_event(event): + yield realtime_event + + async def _send(self, event: RealtimeClientEvent) -> None: + await self.connected.wait() + if not self.connection: + raise ValueError("Connection is not established.") + try: + await self.connection.send(event) + except Exception as e: + logger.error(f"Error sending response: {e!s}") + + @override + async def create_session( + self, + chat_history: "ChatHistory | None" = None, + settings: "PromptExecutionSettings | None" = None, + **kwargs: Any, + ) -> None: + """Create a session in the service.""" + self.connection = await self.client.beta.realtime.connect(model=self.ai_model_id).enter() + self.connected.set() + if settings or chat_history or kwargs: + await self.update_session(settings=settings, chat_history=chat_history, **kwargs) + + @override + async def close_session(self) -> None: + """Close the session in the service.""" + if self.connected.is_set() and self.connection: + await self.connection.close() + self.connection = None + self.connected.clear() + + +@experimental +class OpenAIRealtimeWebsocket(OpenAIRealtimeWebsocketBase, OpenAIConfigBase): + """OpenAI Realtime service using WebSocket protocol.""" + + def __init__( + self, + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + ai_model_id: str | None = None, + api_key: str | None = None, + org_id: str | None = None, + service_id: str | None = None, + default_headers: Mapping[str, str] | None = None, + client: AsyncOpenAI | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initialize an OpenAIRealtime service. + + Args: + audio_output_callback: The audio output callback, optional. + This should be a coroutine, that takes a ndarray with audio as input. + The goal of this function is to allow you to play the audio with the + least amount of latency possible, because it is called first before further processing. + It can also be set in the `receive` method. + Even when passed, the audio content will still be + added to the receiving queue. + ai_model_id (str | None): OpenAI model name, see + https://platform.openai.com/docs/models + service_id (str | None): Service ID tied to the execution settings. + api_key (str | None): The optional API key to use. If provided will override, + the env vars or .env file value. + org_id (str | None): The optional org ID to use. If provided will override, + the env vars or .env file value. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + client (Optional[AsyncOpenAI]): An existing client to use. (Optional) + env_file_path (str | None): Use the environment settings file as a fallback to + environment variables. (Optional) + env_file_encoding (str | None): The encoding of the environment settings file. (Optional) + kwargs: Additional arguments. + """ + try: + openai_settings = OpenAISettings.create( + api_key=api_key, + org_id=org_id, + realtime_model_id=ai_model_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex + if not openai_settings.realtime_model_id: + raise ServiceInitializationError("The OpenAI realtime model ID is required.") + super().__init__( + audio_output_callback=audio_output_callback, + ai_model_id=openai_settings.realtime_model_id, + service_id=service_id, + api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, + org_id=openai_settings.org_id, + ai_model_type=OpenAIModelTypes.REALTIME, + default_headers=default_headers, + client=client, + **kwargs, + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py index 8459780b3f5a..0a0f26440923 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py @@ -12,14 +12,14 @@ from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import OpenAITextEmbeddingBase from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) T_ = TypeVar("T_", bound="OpenAITextEmbedding") -@experimental_class +@experimental class OpenAITextEmbedding(OpenAIConfigBase, OpenAITextEmbeddingBase): """OpenAI Text Embedding class.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py index 9c686335255b..364d6822d819 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py @@ -15,13 +15,13 @@ OpenAIEmbeddingPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental_class +@experimental class OpenAITextEmbeddingBase(OpenAIHandler, EmbeddingGeneratorBase): """Base class for OpenAI text embedding services.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py index 8603714804cf..47ebc4c2b7b7 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py @@ -3,6 +3,7 @@ from typing import ClassVar from pydantic import SecretStr +from pydantic_core import Url from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError @@ -55,6 +56,12 @@ class AzureOpenAISettings(KernelBaseSettings): Resource Management > Deployments in the Azure portal or, alternatively, under Management > Deployments in Azure OpenAI Studio. (Env var AZURE_OPENAI_TEXT_TO_AUDIO_DEPLOYMENT_NAME) + - realtime_deployment_name: str - The name of the Azure Realtime deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + (Env var AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME) - api_key: SecretStr - The API key for the Azure deployment. This value can be found in the Keys & Endpoint section when examining your resource in the Azure portal. You can use either KEY1 or KEY2. @@ -73,7 +80,7 @@ class AzureOpenAISettings(KernelBaseSettings): - api_version: str | None - The API version to use. The default value is "2024-02-01". (Env var AZURE_OPENAI_API_VERSION) - token_endpoint: str - The token endpoint to use to retrieve the authentication token. - The default value is "https://cognitiveservices.azure.com". + The default value is "https://cognitiveservices.azure.com/.default". (Env var AZURE_OPENAI_TOKEN_ENDPOINT) """ @@ -85,11 +92,12 @@ class AzureOpenAISettings(KernelBaseSettings): text_to_image_deployment_name: str | None = None audio_to_text_deployment_name: str | None = None text_to_audio_deployment_name: str | None = None + realtime_deployment_name: str | None = None endpoint: HttpsUrl | None = None - base_url: HttpsUrl | None = None + base_url: Url | None = None api_key: SecretStr | None = None api_version: str = DEFAULT_AZURE_API_VERSION - token_endpoint: str = "https://cognitiveservices.azure.com" + token_endpoint: str = "https://cognitiveservices.azure.com/.default" def get_azure_openai_auth_token(self, token_endpoint: str | None = None) -> str | None: """Retrieve a Microsoft Entra Auth Token for a given token endpoint for the use with Azure OpenAI. @@ -100,7 +108,7 @@ def get_azure_openai_auth_token(self, token_endpoint: str | None = None) -> str The `token_endpoint` argument takes precedence over the `token_endpoint` attribute. Args: - token_endpoint: The token endpoint to use. Defaults to `https://cognitiveservices.azure.com`. + token_endpoint: The token endpoint to use. Defaults to `https://cognitiveservices.azure.com/.default`. Returns: The Azure token or None if the token could not be retrieved. diff --git a/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py b/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py index 6423a5385a33..7276af4b1f3b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py @@ -32,6 +32,9 @@ class OpenAISettings(KernelBaseSettings): (Env var OPENAI_AUDIO_TO_TEXT_MODEL_ID) - text_to_audio_model_id: str | None - The OpenAI text to audio model ID to use, for example, jukebox-1. (Env var OPENAI_TEXT_TO_AUDIO_MODEL_ID) + - realtime_model_id: str | None - The OpenAI realtime model ID to use, + for example, gpt-4o-realtime-preview-2024-12-17. + (Env var OPENAI_REALTIME_MODEL_ID) - env_file_path: str | None - if provided, the .env settings are read from this file path location """ @@ -45,3 +48,4 @@ class OpenAISettings(KernelBaseSettings): text_to_image_model_id: str | None = None audio_to_text_model_id: str | None = None text_to_audio_model_id: str | None = None + realtime_model_id: str | None = None diff --git a/python/semantic_kernel/connectors/ai/realtime_client_base.py b/python/semantic_kernel/connectors/ai/realtime_client_base.py new file mode 100644 index 000000000000..3992d116a4f7 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/realtime_client_base.py @@ -0,0 +1,145 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine +from typing import Any, ClassVar + +if sys.version_info >= (3, 11): + from typing import Self # pragma: no cover +else: + from typing_extensions import Self # pragma: no cover + +from numpy import ndarray +from pydantic import PrivateAttr + +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.realtime_events import RealtimeEvents +from semantic_kernel.services.ai_service_client_base import AIServiceClientBase +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class RealtimeClientBase(AIServiceClientBase, ABC): + """Base class for a realtime client.""" + + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = False + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None + _chat_history: ChatHistory | None = PrivateAttr(default=None) + _settings: PromptExecutionSettings | None = PrivateAttr(default=None) + _create_kwargs: dict[str, Any] | None = PrivateAttr(default=None) + + @abstractmethod + async def send(self, event: RealtimeEvents) -> None: + """Send an event to the service. + + Args: + event: The event to send. + kwargs: Additional arguments. + """ + raise NotImplementedError + + @abstractmethod + def receive( + self, + audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, + **kwargs: Any, + ) -> AsyncGenerator[RealtimeEvents, None]: + """Starts listening for messages from the service, generates events. + + Args: + audio_output_callback: The audio output callback, optional. + This should be a coroutine, that takes a ndarray with audio as input. + The goal of this function is to allow you to play the audio with the + least amount of latency possible. + It is called first in both websockets and webrtc. + Even when passed, the audio content will still be + added to the receiving queue. + This can also be set in the constructor. + When supplied here it will override any value in the class. + kwargs: Additional arguments. + """ + raise NotImplementedError + + @abstractmethod + async def create_session( + self, + chat_history: "ChatHistory | None" = None, + settings: "PromptExecutionSettings | None" = None, + **kwargs: Any, + ) -> None: + """Create a session in the service. + + Args: + settings: Prompt execution settings. + chat_history: Chat history. + kwargs: Additional arguments. + """ + raise NotImplementedError + + @abstractmethod + async def update_session( + self, + chat_history: "ChatHistory | None" = None, + settings: "PromptExecutionSettings | None" = None, + **kwargs: Any, + ) -> None: + """Update a session in the service. + + Can be used when using the context manager instead of calling create_session with these same arguments. + + Args: + settings: Prompt execution settings. + chat_history: Chat history. + kwargs: Additional arguments. + """ + raise NotImplementedError + + @abstractmethod + async def close_session(self) -> None: + """Close the session in the service.""" + pass + + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + """Return the callback function to update the settings from a function call configuration. + + Override this method to provide a custom callback function to + update the settings from a function call configuration. + """ + return lambda configuration, settings, choice_type: None + + async def __aenter__(self) -> "Self": + """Enter the context manager. + + Default implementation calls the create session method. + """ + await self.create_session(self._chat_history, self._settings) + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + """Exit the context manager.""" + await self.close_session() + + def __call__( + self, + chat_history: "ChatHistory | None" = None, + settings: "PromptExecutionSettings | None" = None, + **kwargs: Any, + ) -> Self: + """Call the service and set the chat history and settings. + + Args: + chat_history: Chat history. + settings: Prompt execution settings. + kwargs: Additional arguments, can include `kernel` or specific settings for the service. + Check the update_session method for the specific service for more details. + """ + self._chat_history = chat_history + self._settings = settings + self._create_kwargs = kwargs + return self diff --git a/python/semantic_kernel/connectors/memory/astradb/astra_client.py b/python/semantic_kernel/connectors/memory/astradb/astra_client.py index 83dcd3b3ce9d..739d8b3cd0d5 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astra_client.py +++ b/python/semantic_kernel/connectors/memory/astradb/astra_client.py @@ -6,7 +6,7 @@ from semantic_kernel.connectors.memory.astradb.utils import AsyncSession from semantic_kernel.exceptions import ServiceResponseException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import APP_INFO ASTRA_CALLER_IDENTITY: str @@ -14,7 +14,7 @@ ASTRA_CALLER_IDENTITY = f"semantic-kernel/{SEMANTIC_KERNEL_VERSION}" if SEMANTIC_KERNEL_VERSION else "semantic-kernel" -@experimental_class +@experimental class AstraClient: """AstraClient.""" diff --git a/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py index e640bb663903..5b6908377cf9 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py +++ b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py @@ -13,7 +13,7 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental MAX_DIMENSIONALITY = 20000 MAX_UPSERT_BATCH_SIZE = 100 @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AstraDBMemoryStore(MemoryStoreBase): """A memory store that uses Astra database as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py b/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py index e3d190187f4c..637dc2589d1c 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py +++ b/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AstraDBSettings(KernelBaseSettings): """AstraDB model settings. diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py index d103997db200..dc32336c8004 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py @@ -39,14 +39,14 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental_class +@experimental class AzureAISearchCollection( VectorSearchBase[str, TModel], VectorizableTextSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py index 99fc5620d289..6c029c4d84ba 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureAISearchSettings(KernelBaseSettings): """Azure AI Search model settings currently used by the AzureCognitiveSearchMemoryStore connector. diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py index 4c4693abb6d7..4df7925a8e6a 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py @@ -20,7 +20,7 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from azure.core.credentials import AzureKeyCredential, TokenCredential @@ -34,7 +34,7 @@ TModel = TypeVar("TModel") -@experimental_class +@experimental class AzureAISearchStore(VectorStore): """Azure AI Search store implementation.""" diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py b/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py index 6acd20cb18e1..16ac55a0a79e 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py @@ -31,7 +31,7 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import ServiceInitializationError -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: @@ -77,7 +77,7 @@ def get_search_index_client( ) -@experimental_function +@experimental def data_model_definition_to_azure_ai_search_index( collection_name: str, definition: VectorStoreRecordDefinition, diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py index c4c066407d92..9caeec864898 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureAISearchSettings(KernelBaseSettings): """Azure AI Search model settings currently used by the AzureCognitiveSearchMemoryStore connector. diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py index bc201777ec38..f9c04d1dcd18 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py @@ -33,12 +33,12 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError, MemoryConnectorResourceNotFound from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AzureCognitiveSearchMemoryStore(MemoryStoreBase): """Azure Cognitive Search Memory Store.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py index d6979ec34c38..b55443addcd4 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py @@ -1,5 +1,12 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_collection import ( + AzureCosmosDBforMongoDBCollection, +) +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( + AzureCosmosDBforMongoDBSettings, +) +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_store import AzureCosmosDBforMongoDBStore from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_collection import ( AzureCosmosDBNoSQLCollection, ) @@ -14,4 +21,7 @@ "AzureCosmosDBNoSQLCompositeKey", "AzureCosmosDBNoSQLSettings", "AzureCosmosDBNoSQLStore", + "AzureCosmosDBforMongoDBCollection", + "AzureCosmosDBforMongoDBSettings", + "AzureCosmosDBforMongoDBStore", ] diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py new file mode 100644 index 000000000000..89bfd0e1f9f9 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py @@ -0,0 +1,253 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import AsyncIterable +from importlib import metadata +from typing import Any, TypeVar + +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from pydantic import ValidationError +from pymongo import AsyncMongoClient +from pymongo.driver_info import DriverInfo + +from semantic_kernel.connectors.memory.azure_cosmos_db.const import ( + DISTANCE_FUNCTION_MAPPING_MONGODB, + INDEX_KIND_MAPPING_MONGODB, +) +from semantic_kernel.connectors.memory.mongodb_atlas.const import ( + DEFAULT_DB_NAME, + MONGODB_SCORE_FIELD, +) +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection +from semantic_kernel.data.kernel_search_results import KernelSearchResults +from semantic_kernel.data.record_definition import VectorStoreRecordDefinition +from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions +from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult +from semantic_kernel.exceptions import ( + VectorStoreInitializationException, +) +from semantic_kernel.exceptions.vector_store_exceptions import ( + VectorSearchExecutionException, + VectorStoreModelDeserializationException, +) +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger: logging.Logger = logging.getLogger(__name__) + +TModel = TypeVar("TModel") + + +@experimental +class AzureCosmosDBforMongoDBCollection(MongoDBAtlasCollection): + """Azure Cosmos DB for MongoDB collection.""" + + def __init__( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + mongo_client: AsyncMongoClient | None = None, + connection_string: str | None = None, + database_name: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initializes a new instance of the AzureCosmosDBforMongoDBCollection class. + + Args: + data_model_type: The type of the data model. + data_model_definition: The model definition, optional. + collection_name: The name of the collection, optional. + mongo_client: The MongoDB client for interacting with Azure CosmosDB for MongoDB, + used for creating and deleting collections. + connection_string: The connection string for MongoDB Atlas, optional. + Can be read from environment variables. + database_name: The name of the database, will be filled from the env when this is not set. + connection_string: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None + **kwargs: Additional keyword arguments + + """ + managed_client = not mongo_client + if mongo_client: + super().__init__( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + mongo_client=mongo_client, + collection_name=collection_name, + database_name=database_name or DEFAULT_DB_NAME, + managed_client=managed_client, + ) + return + + from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( + AzureCosmosDBforMongoDBSettings, + ) + + try: + settings = AzureCosmosDBforMongoDBSettings.create( + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + connection_string=connection_string, + database_name=database_name, + ) + except ValidationError as exc: + raise VectorStoreInitializationException("Failed to create Azure CosmosDB for MongoDB settings.") from exc + if not settings.connection_string: + raise VectorStoreInitializationException("The Azure CosmosDB for MongoDB connection string is required.") + + mongo_client = AsyncMongoClient( + settings.connection_string.get_secret_value(), + driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), + ) + + super().__init__( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + collection_name=collection_name, + mongo_client=mongo_client, + managed_client=managed_client, + database_name=settings.database_name, + ) + + @override + async def create_collection(self, **kwargs) -> None: + """Create a new collection in Azure CosmosDB for MongoDB. + + This first creates a collection, with the kwargs. + Then creates a search index based on the data model definition. + + By the naming convection of MongoDB indexes are created by using the field name + with a underscore. + + Args: + **kwargs: Additional keyword arguments. + These are the additional keyword arguments for creating + vector indexes in Azure Cosmos DB for MongoDB. + And they depend on the kind of index you are creating. + See https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/vcore/vector-search + for more information. + Other kwargs are passed to the create_collection method. + """ + await self._get_database().create_collection(self.collection_name, **kwargs) + await self._get_database().command(command=self._get_vector_index(**kwargs)) + + def _get_vector_index(self, **kwargs: Any) -> dict[str, Any]: + indexes = [ + {"name": f"{field.name}_", "key": {field.name: 1}} + for field in self.data_model_definition.fields.values() + if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) + ] + for vector_field in self.data_model_definition.vector_fields: + index_name = f"{vector_field.name}_" + + similarity = ( + DISTANCE_FUNCTION_MAPPING_MONGODB.get(vector_field.distance_function) + if vector_field.distance_function + else "COS" + ) + kind = INDEX_KIND_MAPPING_MONGODB.get(vector_field.index_kind) if vector_field.index_kind else "vector-ivf" + if similarity is None: + raise VectorStoreInitializationException(f"Invalid distance function: {vector_field.distance_function}") + if kind is None: + raise VectorStoreInitializationException(f"Invalid index kind: {vector_field.index_kind}") + index: dict[str, Any] = { + "name": index_name, + "key": {vector_field.name: "cosmosSearch"}, + "cosmosSearchOptions": { + "kind": kind, + "similarity": similarity, + "dimensions": vector_field.dimensions, + }, + } + match kind: + case "vector-diskann": + if "maxDegree" in kwargs: + index["cosmosSearchOptions"]["maxDegree"] = kwargs["maxDegree"] + if "lBuild" in kwargs: + index["cosmosSearchOptions"]["lBuild"] = kwargs["lBuild"] + case "vector-hnsw": + if "m" in kwargs: + index["cosmosSearchOptions"]["m"] = kwargs["m"] + if "efConstruction" in kwargs: + index["cosmosSearchOptions"]["efConstruction"] = kwargs["efConstruction"] + case "vector-ivf": + if "numList" in kwargs: + index["cosmosSearchOptions"]["numList"] = kwargs["numList"] + indexes.append(index) + + return {"createIndexes": self.collection_name, "indexes": indexes} + + @override + async def _inner_vectorized_search( + self, + options: VectorSearchOptions, + vector: list[float | int], + **kwargs: Any, + ) -> KernelSearchResults[VectorSearchResult[TModel]]: + collection = self._get_collection() + vector_search_query: dict[str, Any] = { + "k": options.top + options.skip, + "index": f"{options.vector_field_name}_", + "vector": vector, + "path": options.vector_field_name, + } + if options.filter.filters: + vector_search_query["filter"] = self._build_filter_dict(options.filter) + projection_query: dict[str, int | dict] = { + field: 1 + for field in self.data_model_definition.get_field_names( + include_vector_fields=options.include_vectors, + include_key_field=False, # _id is always included + ) + } + projection_query[MONGODB_SCORE_FIELD] = {"$meta": "searchScore"} + try: + raw_results = await collection.aggregate([ + {"$search": {"cosmosSearch": vector_search_query}}, + {"$project": projection_query}, + ]) + except Exception as exc: + raise VectorSearchExecutionException("Failed to search the collection.") from exc + return KernelSearchResults( + results=self._get_vector_search_results_from_results(raw_results, options), + total_count=None, # no way to get a count before looping through the result cursor + ) + + async def _get_vector_search_results_from_cursor( + self, + filter: dict[str, Any], + projection: dict[str, int | dict], + options: VectorSearchOptions | None = None, + ) -> AsyncIterable[VectorSearchResult[TModel]]: + collection = self._get_collection() + async for result in collection.find( + filter=filter, + projection=projection, + skip=options.skip if options else 0, + limit=options.top if options else 0, + ): + try: + record = self.deserialize( + self._get_record_from_result(result), include_vectors=options.include_vectors if options else True + ) + except VectorStoreModelDeserializationException: + raise + except Exception as exc: + raise VectorStoreModelDeserializationException( + f"An error occurred while deserializing the record: {exc}" + ) from exc + score = self._get_score_from_result(result) + if record: + # single records are always returned as single records by the deserializer + yield VectorSearchResult(record=record, score=score) # type: ignore diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py new file mode 100644 index 000000000000..c41443ca13ab --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py @@ -0,0 +1,38 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from pydantic import SecretStr + +from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME +from semantic_kernel.kernel_pydantic import KernelBaseSettings +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class AzureCosmosDBforMongoDBSettings(KernelBaseSettings): + """Azure CosmosDB for MongoDB settings. + + The settings are first loaded from environment variables with + the prefix 'AZURE_COSMOS_DB_MONGODB_'. + If the environment variables are not found, the settings can + be loaded from a .env file with the encoding 'utf-8'. + If the settings are not found in the .env file, the settings + are ignored; however, validation will fail alerting that the + settings are missing. + + Required settings for prefix 'AZURE_COSMOS_DB_MONGODB_': + - connection_string: The connection string of the Azure CosmosDB for MongoDB account. + This value can be found in the Keys & Endpoint section when examining + your resource from the Azure portal. + (Env var name: AZURE_COSMOS_DB_MONGODB_CONNECTION_STRING) + - database_name: str - The name of the database. Please refer to this documentation + on Azure CosmosDB NoSQL resource model: + https://learn.microsoft.com/en-us/azure/cosmos-db/resource-model + (Env var name: AZURE_COSMOS_DB_MONGODB_DATABASE_NAME) + """ + + env_prefix: ClassVar[str] = "AZURE_COSMOS_DB_MONGODB_" + + connection_string: SecretStr | None = None + database_name: str = DEFAULT_DB_NAME diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py new file mode 100644 index 000000000000..d3314ad2a93d --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py @@ -0,0 +1,116 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from importlib import metadata +from typing import TYPE_CHECKING, Any, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from pydantic import ValidationError +from pymongo import AsyncMongoClient +from pymongo.driver_info import DriverInfo + +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_collection import ( + AzureCosmosDBforMongoDBCollection, +) +from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_store import MongoDBAtlasStore +from semantic_kernel.data.record_definition import VectorStoreRecordDefinition +from semantic_kernel.exceptions import VectorStoreInitializationException +from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT + +if TYPE_CHECKING: + from semantic_kernel.data import VectorStoreRecordCollection + +TModel = TypeVar("TModel") + + +@experimental +class AzureCosmosDBforMongoDBStore(MongoDBAtlasStore): + """Azure Cosmos DB for MongoDB store implementation.""" + + def __init__( + self, + connection_string: str | None = None, + database_name: str | None = None, + mongo_client: AsyncMongoClient | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + ) -> None: + """Initializes a new instance of the AzureCosmosDBforMongoDBStore client. + + Args: + connection_string (str): The connection string for Azure CosmosDB for MongoDB, optional. + Can be read from environment variables. + database_name (str): The name of the database, optional. Can be read from environment variables. + mongo_client (MongoClient): The MongoDB client, optional. + env_file_path (str): Use the environment settings file as a fallback + to environment variables. + env_file_encoding (str): The encoding of the environment settings file. + + """ + managed_client: bool = not mongo_client + if mongo_client: + super().__init__( + mongo_client=mongo_client, + managed_client=managed_client, + database_name=database_name or DEFAULT_DB_NAME, + ) + return + from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( + AzureCosmosDBforMongoDBSettings, + ) + + try: + settings = AzureCosmosDBforMongoDBSettings.create( + env_file_path=env_file_path, + connection_string=connection_string, + database_name=database_name, + env_file_encoding=env_file_encoding, + ) + except ValidationError as exc: + raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc + if not settings.connection_string: + raise VectorStoreInitializationException("The connection string is missing.") + + mongo_client = AsyncMongoClient( + settings.connection_string.get_secret_value(), + driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), + ) + + super().__init__( + mongo_client=mongo_client, + managed_client=managed_client, + database_name=settings.database_name, + ) + + @override + def get_collection( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + **kwargs: Any, + ) -> "VectorStoreRecordCollection": + """Get a AzureCosmosDBforMongoDBCollection tied to a collection. + + Args: + collection_name (str): The name of the collection. + data_model_type (type[TModel]): The type of the data model. + data_model_definition (VectorStoreRecordDefinition | None): The model fields, optional. + **kwargs: Additional keyword arguments, passed to the collection constructor. + """ + if collection_name not in self.vector_record_collections: + self.vector_record_collections[collection_name] = AzureCosmosDBforMongoDBCollection( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + mongo_client=self.mongo_client, + collection_name=collection_name, + database_name=self.database_name, + **kwargs, + ) + return self.vector_record_collections[collection_name] diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py index c9a49ba4e546..2bf712283858 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py @@ -14,10 +14,10 @@ from semantic_kernel.utils.authentication.async_default_azure_credential_wrapper import ( AsyncDefaultAzureCredentialWrapper, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureCosmosDBNoSQLBase(KernelBaseModel): """An Azure Cosmos DB NoSQL collection stores documents in a Azure Cosmos DB NoSQL account.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py index aa8633ecb54e..41865d5b060c 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py @@ -42,13 +42,13 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TModel = TypeVar("TModel") TKey = TypeVar("TKey", str, AzureCosmosDBNoSQLCompositeKey) -@experimental_class +@experimental class AzureCosmosDBNoSQLCollection( AzureCosmosDBNoSQLBase, VectorSearchBase[TKey, TModel], @@ -178,23 +178,25 @@ def _build_search_text_query(self, options: VectorSearchOptions) -> str: where_clauses = self._build_where_clauses_from_filter(options.filter) contains_clauses = " OR ".join( f"CONTAINS(c.{field}, @search_text)" - for field in self.data_model_definition.fields - if isinstance(field, VectorStoreRecordDataField) and field.is_full_text_searchable + for field, field_def in self.data_model_definition.fields.items() + if isinstance(field_def, VectorStoreRecordDataField) and field_def.is_full_text_searchable ) + if where_clauses: + where_clauses = f" {where_clauses} AND" return ( f"SELECT TOP @top {self._build_select_clause(options.include_vectors)} " # nosec: B608 - f"FROM c WHERE ({contains_clauses}) AND {where_clauses}" # nosec: B608 + f"FROM c WHERE{where_clauses} ({contains_clauses})" # nosec: B608 ) def _build_vector_query(self, options: VectorSearchOptions) -> str: where_clauses = self._build_where_clauses_from_filter(options.filter) if where_clauses: - where_clauses = f"WHERE {where_clauses}" + where_clauses = f"WHERE {where_clauses} " vector_field_name: str = self.data_model_definition.try_get_vector_field(options.vector_field_name).name # type: ignore return ( - f"SELECT TOP @top {self._build_select_clause(options.include_vectors)}," # nosec: B608 - f" VectorDistance(c.{vector_field_name}, @vector) AS distance FROM c ORDER " # nosec: B608 - f"BY VectorDistance(c.{vector_field_name}, @vector) {where_clauses}" # nosec: B608 + f"SELECT TOP @top {self._build_select_clause(options.include_vectors)}, " # nosec: B608 + f"VectorDistance(c.{vector_field_name}, @vector) AS distance FROM c " # nosec: B608 + f"{where_clauses}ORDER BY VectorDistance(c.{vector_field_name}, @vector)" # nosec: B608 ) def _build_select_clause(self, include_vectors: bool) -> str: @@ -218,11 +220,24 @@ def _build_where_clauses_from_filter(self, filters: VectorSearchFilter | None) - return "" clauses = [] for filter in filters.filters: + field_def = self.data_model_definition.fields[filter.field_name] match filter: case EqualTo(): - clauses.append(f"c.{filter.field_name} = {filter.value}") + clause = "" + if field_def.property_type in ["int", "float"]: + clause = f"c.{filter.field_name} = {filter.value}" + if field_def.property_type == "str": + clause = f"c.{filter.field_name} = '{filter.value}'" + if field_def.property_type == "list[str]": + filter_value = f"ARRAY_CONTAINS(c.{filter.field_name}, '{filter.value}')" + if field_def.property_type in ["list[int]", "list[float]"]: + filter_value = f"ARRAY_CONTAINS(c.{filter.field_name}, {filter.value})" + clauses.append(clause) case AnyTagsEqualTo(): - clauses.append(f"{filter.value} IN c.{filter.field_name}") + filter_value = filter.value + if field_def.property_type == "list[str]": + filter_value = f"'{filter.value}'" + clauses.append(f"{filter_value} IN c.{filter.field_name}") case _: raise ValueError(f"Unsupported filter: {filter}") return " AND ".join(clauses) diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py index 6da66cc1cfd0..a0d1f38acd95 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py @@ -2,10 +2,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureCosmosDBNoSQLCompositeKey(KernelBaseModel): """Azure CosmosDB NoSQL composite key.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py index cbdac0036d13..a30f7cc5cc0d 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py @@ -5,10 +5,10 @@ from pydantic import HttpUrl, SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureCosmosDBNoSQLSettings(KernelBaseSettings): """Azure CosmosDB NoSQL settings. diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py index 45ca18b58c55..9d191effa88e 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py @@ -19,12 +19,12 @@ from semantic_kernel.data.vector_storage.vector_store import VectorStore from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreOperationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TModel = TypeVar("TModel") -@experimental_class +@experimental class AzureCosmosDBNoSQLStore(AzureCosmosDBNoSQLBase, VectorStore): """A VectorStore implementation that uses Azure CosmosDB NoSQL as the backend storage.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py index 2bec006b99b5..dfdf4976cda1 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py @@ -11,12 +11,24 @@ IndexKind.DISK_ANN: "diskANN", } +INDEX_KIND_MAPPING_MONGODB = { + IndexKind.IVF_FLAT: "vector-ivf", + IndexKind.HNSW: "vector-hnsw", + IndexKind.DISK_ANN: "vector-diskann", +} + DISTANCE_FUNCTION_MAPPING = { DistanceFunction.COSINE_SIMILARITY: "cosine", DistanceFunction.DOT_PROD: "dotproduct", DistanceFunction.EUCLIDEAN_DISTANCE: "euclidean", } +DISTANCE_FUNCTION_MAPPING_MONGODB = { + DistanceFunction.COSINE_SIMILARITY: "COS", + DistanceFunction.DOT_PROD: "IP", + DistanceFunction.EUCLIDEAN_DISTANCE: "L2", +} + DATATYPES_MAPPING = { "default": "float32", "float": "float32", diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py index 40150463b40e..dce5a29b3d04 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py @@ -16,12 +16,12 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class AzureCosmosDBMemoryStore(MemoryStoreBase): """A memory store that uses AzureCosmosDB for MongoDB vCore. diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py index 47b8d065086c..eb3427cd58ae 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py @@ -5,11 +5,11 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental # Abstract class similar to the original data store that allows API level abstraction -@experimental_class +@experimental class AzureCosmosDBStoreApi(ABC): """AzureCosmosDBStoreApi.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py index 212d45788ec7..dc08e27e14e8 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py @@ -5,10 +5,10 @@ from pydantic import ConfigDict, Field, SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureCosmosDBSettings(KernelBaseSettings): """Azure CosmosDB model settings. @@ -21,7 +21,7 @@ class AzureCosmosDBSettings(KernelBaseSettings): env_prefix: ClassVar[str] = "COSMOSDB_" api: str | None = None - connection_string: SecretStr | None = Field(None, alias="AZCOSMOS_CONNSTR") + connection_string: SecretStr | None = Field(default=None, alias="AZCOSMOS_CONNSTR") model_config = ConfigDict( populate_by_name=True, diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py index 338bd9e7a234..aab438fcb833 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py @@ -14,10 +14,10 @@ from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_store_api import AzureCosmosDBStoreApi from semantic_kernel.connectors.memory.azure_cosmosdb.utils import CosmosDBSimilarityType, CosmosDBVectorSearchType from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MongoStoreApi(AzureCosmosDBStoreApi): """MongoStoreApi class for the Azure Cosmos DB Mongo store.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py index 8c0cd782e1af..aa931ebac6ae 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_function +@experimental class CosmosDBSimilarityType(str, Enum): """Cosmos DB Similarity Type as enumerator.""" @@ -17,7 +17,7 @@ class CosmosDBSimilarityType(str, Enum): """Euclidean distance""" -@experimental_function +@experimental class CosmosDBVectorSearchType(str, Enum): """Cosmos DB Vector Search Type as enumerator.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py index e40abd9ed5bd..4bca89952828 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py @@ -15,10 +15,10 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AzureCosmosDBNoSQLMemoryStore(MemoryStoreBase): """You can read more about vector search using AzureCosmosDBNoSQL here: https://aka.ms/CosmosVectorSearch.""" diff --git a/python/semantic_kernel/connectors/memory/chroma/__init__.py b/python/semantic_kernel/connectors/memory/chroma/__init__.py index 005203ad8e73..7cdf6cd8bda6 100644 --- a/python/semantic_kernel/connectors/memory/chroma/__init__.py +++ b/python/semantic_kernel/connectors/memory/chroma/__init__.py @@ -1,7 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.memory.chroma.chroma import ChromaCollection, ChromaStore from semantic_kernel.connectors.memory.chroma.chroma_memory_store import ( ChromaMemoryStore, ) -__all__ = ["ChromaMemoryStore"] +__all__ = ["ChromaCollection", "ChromaMemoryStore", "ChromaStore"] diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma.py b/python/semantic_kernel/connectors/memory/chroma/chroma.py new file mode 100644 index 000000000000..6a41142e3d52 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/chroma/chroma.py @@ -0,0 +1,376 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import Sequence +from typing import Any, ClassVar, Generic, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from chromadb import Client, Collection, QueryResult +from chromadb.api import ClientAPI +from chromadb.config import Settings + +from semantic_kernel.data.const import DistanceFunction +from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo +from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo +from semantic_kernel.data.kernel_search_results import KernelSearchResults +from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField +from semantic_kernel.data.vector_search.vector_search import VectorSearchBase +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions +from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult +from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin +from semantic_kernel.data.vector_storage.vector_store import VectorStore +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.exceptions.vector_store_exceptions import ( + VectorStoreInitializationException, + VectorStoreModelValidationError, + VectorStoreOperationException, +) +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger = logging.getLogger(__name__) + +TModel = TypeVar("TModel") + +DISTANCE_FUNCTION_MAP = { + DistanceFunction.COSINE_SIMILARITY: "cosine", + DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE: "l2", + DistanceFunction.DOT_PROD: "ip", +} + + +@experimental +class ChromaCollection( + VectorSearchBase[str, TModel], + VectorizedSearchMixin[TModel], + Generic[TModel], +): + """Chroma vector store collection.""" + + client: ClientAPI + supported_key_types: ClassVar[list[str] | None] = ["str"] + + def __init__( + self, + collection_name: str, + data_model_type: type[object], + data_model_definition: VectorStoreRecordDefinition | None = None, + persist_directory: str | None = None, + client_settings: "Settings | None" = None, + client: "ClientAPI | None" = None, + **kwargs: Any, + ): + """Initialize the Chroma vector store collection.""" + managed_client = not client + if client is None: + settings = client_settings or Settings() + if persist_directory is not None: + settings.is_persistent = True + settings.persist_directory = persist_directory + client = Client(settings) + super().__init__( + collection_name=collection_name, + data_model_type=data_model_type, + data_model_definition=data_model_definition, + client=client, + managed_client=managed_client, + **kwargs, + ) + + def _get_collection(self) -> Collection: + try: + return self.client.get_collection(name=self.collection_name) + except Exception as e: + raise RuntimeError(f"Failed to get collection {self.collection_name}") from e + + @override + async def does_collection_exist(self, **kwargs: Any) -> bool: + """Check if the collection exists.""" + try: + self.client.get_collection(name=self.collection_name) + return True + except Exception: + return False + + @override + async def create_collection(self, **kwargs: Any) -> None: + """Create the collection. + + Sets the distance function if specified in the data model definition. + + Args: + kwargs: Additional arguments are passed to the metadata parameter of the create_collection method. + """ + if self.data_model_definition.vector_fields and self.data_model_definition.vector_fields[0].distance_function: + if self.data_model_definition.vector_fields[0].distance_function not in DISTANCE_FUNCTION_MAP: + raise VectorStoreInitializationException( + f"Distance function {self.data_model_definition.vector_fields[0].distance_function} is not " + "supported." + ) + kwargs["hnsw:space"] = DISTANCE_FUNCTION_MAP[self.data_model_definition.vector_fields[0].distance_function] + if kwargs: + self.client.create_collection(name=self.collection_name, metadata=kwargs) + else: + self.client.create_collection(name=self.collection_name) + + @override + async def delete_collection(self, **kwargs: Any) -> None: + """Delete the collection.""" + try: + self.client.delete_collection(name=self.collection_name) + except ValueError: + logger.info(f"Collection {self.collection_name} could not be deleted because it doesn't exist.") + except Exception as e: + raise VectorStoreOperationException( + f"Failed to delete collection {self.collection_name} with error: {e}" + ) from e + + async def _validate_data_model(self): + super()._validate_data_model() + if len(self.data_model_definition.vector_fields) > 1: + raise VectorStoreModelValidationError( + "Chroma only supports one vector field, but " + f"{len(self.data_model_definition.vector_fields)} were provided." + ) + if self.data_model_definition.vector_fields[0].index_kind != "hnsw": + raise VectorStoreModelValidationError( + "Chroma only supports hnsw index kind, but " + f"{self.data_model_definition.vector_fields[0].index_kind} was provided." + ) + + @override + def _serialize_dicts_to_store_models(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Sequence[Any]: + vector_field_name = self.data_model_definition.vector_field_names[0] + id_field_name = self.data_model_definition.key_field_name + document_field_name = next( + field.name + for field in self.data_model_definition.fields.values() + if isinstance(field, VectorStoreRecordDataField) and field.embedding_property_name == vector_field_name + ) + store_models = [] + for record in records: + store_model = { + "id": record[id_field_name], + "embedding": record[vector_field_name], + "document": record[document_field_name], + "metadata": { + k: v for k, v in record.items() if k not in [id_field_name, vector_field_name, document_field_name] + }, + } + if store_model["metadata"] == {}: + store_model.pop("metadata") + store_models.append(store_model) + return store_models + + @override + def _deserialize_store_models_to_dicts(self, records: Sequence[Any], **kwargs: Any) -> Sequence[dict[str, Any]]: + vector_field_name = self.data_model_definition.vector_field_names[0] + id_field_name = self.data_model_definition.key_field_name + document_field_name = next( + field.name + for field in self.data_model_definition.fields.values() + if isinstance(field, VectorStoreRecordDataField) and field.embedding_property_name == vector_field_name + ) + # replace back the name of the vector, content and id fields + for record in records: + record[id_field_name] = record.pop("id") + record[vector_field_name] = record.pop("embedding") + record[document_field_name] = record.pop("document") + return records + + @override + async def _inner_upsert( + self, + records: Sequence[Any], + **kwargs: Any, + ) -> Sequence[str]: + upsert_obj = {"ids": []} + for record in records: + upsert_obj["ids"].append(record["id"]) + if "embedding" in record: + if "embeddings" not in upsert_obj: + upsert_obj["embeddings"] = [] + upsert_obj["embeddings"].append(record["embedding"]) + if "document" in record: + if "documents" not in upsert_obj: + upsert_obj["documents"] = [] + upsert_obj["documents"].append(record["document"]) + if "metadata" in record: + if "metadatas" not in upsert_obj: + upsert_obj["metadatas"] = [] + upsert_obj["metadatas"].append(record["metadata"]) + self._get_collection().add(**upsert_obj) + return upsert_obj["ids"] + + @override + async def _inner_get(self, keys: Sequence[str], **kwargs: Any) -> Sequence[Any]: + include_vectors = kwargs.get("include_vectors", True) + results = self._get_collection().get( + ids=keys, + include=["documents", "metadatas", "embeddings"] if include_vectors else ["documents", "metadatas"], + ) + return self._unpack_results(results, include_vectors) + + def _unpack_results( + self, results: QueryResult, include_vectors: bool, include_distances: bool = False + ) -> Sequence[dict[str, Any]]: + try: + if isinstance(results["ids"][0], str): + for k, v in results.items(): + results[k] = [v] + except IndexError: + return [] + records = [] + if include_vectors and include_distances: + for id, document, embedding, metadata, distance in zip( + results["ids"][0], + results["documents"][0], + results["embeddings"][0], + results["metadatas"][0], + results["distances"][0], + ): + record = {"id": id, "embedding": embedding, "document": document, "distance": distance} + if metadata: + record.update(metadata) + records.append(record) + return records + if include_vectors and not include_distances: + for id, document, embedding, metadata in zip( + results["ids"][0], + results["documents"][0], + results["embeddings"][0], + results["metadatas"][0], + ): + record = { + "id": id, + "embedding": embedding, + "document": document, + } + if metadata: + record.update(metadata) + records.append(record) + return records + if not include_vectors and include_distances: + for id, document, metadata, distance in zip( + results["ids"][0], results["documents"][0], results["metadatas"][0], results["distances"][0] + ): + record = {"id": id, "document": document, "distance": distance} + if metadata: + record.update(metadata) + records.append(record) + return records + for id, document, metadata in zip( + results["ids"][0], + results["documents"][0], + results["metadatas"][0], + ): + record = { + "id": id, + "document": document, + } + if metadata: + record.update(metadata) + records.append(record) + return records + + @override + async def _inner_delete(self, keys: Sequence[str], **kwargs: Any) -> None: + self._get_collection().delete(ids=keys) + + @override + async def _inner_search( + self, + options: VectorSearchOptions, + search_text: str | None = None, + vectorizable_text: str | None = None, + vector: list[float | int] | None = None, + **kwargs: Any, + ) -> KernelSearchResults[VectorSearchResult[TModel]]: + where = self._parse_filter(options) + args = { + "n_results": options.top, + "include": ["documents", "metadatas", "embeddings", "distances"] + if options.include_vectors + else ["documents", "metadatas", "distances"], + } + if where: + args["where"] = where + if vector is not None: + args["query_embeddings"] = vector + results = self._get_collection().query(**args) + records = self._unpack_results(results, options.include_vectors, include_distances=True) + return KernelSearchResults( + results=self._get_vector_search_results_from_results(records), total_count=len(records) + ) + + @override + def _get_record_from_result(self, result: Any) -> Any: + return result + + @override + def _get_score_from_result(self, result: Any) -> float | None: + return result["distance"] + + def _parse_filter(self, options: VectorSearchOptions) -> dict[str, Any] | None: + if options.filter is None or not options.filter.filters: + return None + filter_expression = {"$and": []} + for filter in options.filter.filters: + match filter: + case EqualTo(): + filter_expression["$and"].append({filter.field_name: {"$eq": filter.value}}) + case AnyTagsEqualTo(): + filter_expression["$and"].append({filter.field_name: {"$in": filter.value}}) + if len(filter_expression["$and"]) == 1: + return filter_expression["$and"][0] + return filter_expression + + +@experimental +class ChromaStore(VectorStore): + """Chroma vector store.""" + + client: ClientAPI + + def __init__( + self, + persist_directory: str | None = None, + client_settings: "Settings | None" = None, + client: ClientAPI | None = None, + **kwargs: Any, + ): + """Initialize the Chroma vector store.""" + managed_client = not client + settings = client_settings or Settings() + if persist_directory is not None: + settings.is_persistent = True + settings.persist_directory = persist_directory + if client is None: + client = Client(settings) + super().__init__(client=client, managed_client=managed_client, **kwargs) + + @override + def get_collection( + self, + collection_name: str, + data_model_type: type[object], + data_model_definition: "VectorStoreRecordDefinition | None" = None, + **kwargs: "Any", + ) -> VectorStoreRecordCollection: + """Get a vector record store.""" + return ChromaCollection( + client=self.client, + collection_name=collection_name, + data_model_type=data_model_type, + data_model_definition=data_model_definition, + **kwargs, + ) + + @override + async def list_collection_names(self, **kwargs) -> Sequence[str]: + return self.client.list_collections() diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py index 0ff69f3fcd8c..188b73498dc7 100644 --- a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +++ b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py @@ -15,7 +15,7 @@ from semantic_kernel.exceptions import ServiceInitializationError, ServiceResourceNotFoundError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: import chromadb @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class ChromaMemoryStore(MemoryStoreBase): """ChromaMemoryStore provides an interface to store and retrieve data using ChromaDB.""" diff --git a/python/semantic_kernel/connectors/memory/in_memory/const.py b/python/semantic_kernel/connectors/memory/in_memory/const.py index daefb15497fc..24fd25d7568c 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/const.py +++ b/python/semantic_kernel/connectors/memory/in_memory/const.py @@ -17,5 +17,4 @@ DistanceFunction.MANHATTAN: cityblock, DistanceFunction.HAMMING: hamming, DistanceFunction.DOT_PROD: dot, - "default": cosine, } diff --git a/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py b/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py index 789faceb8611..95e2e537f9ee 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py +++ b/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py @@ -4,18 +4,17 @@ from collections.abc import AsyncIterable, Callable, Mapping, Sequence from typing import Any, ClassVar, TypeVar -from pydantic import Field - -from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo -from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo - if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover +from pydantic import Field + from semantic_kernel.connectors.memory.in_memory.const import DISTANCE_FUNCTION_MAP -from semantic_kernel.data.const import DistanceFunction +from semantic_kernel.data.const import DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction +from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo +from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase from semantic_kernel.data.kernel_search_results import KernelSearchResults from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition @@ -29,6 +28,7 @@ from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin from semantic_kernel.exceptions import VectorSearchExecutionException, VectorStoreModelValidationError from semantic_kernel.kernel_types import OneOrMany +from semantic_kernel.utils.list_handler import empty_generator KEY_TYPES = str | int | float @@ -149,7 +149,10 @@ async def _inner_search_vectorized( raise ValueError("Vector field name must be provided in options for vector search.") field = options.vector_field_name assert isinstance(self.data_model_definition.fields.get(field), VectorStoreRecordVectorField) # nosec - distance_metric = self.data_model_definition.fields.get(field).distance_function or "default" # type: ignore + distance_metric = ( + self.data_model_definition.fields.get(field).distance_function # type: ignore + or DistanceFunction.COSINE_DISTANCE + ) distance_func = DISTANCE_FUNCTION_MAP[distance_metric] for key, record in self._get_filtered_records(options).items(): @@ -160,10 +163,13 @@ async def _inner_search_vectorized( distance_func, invert_score=distance_metric == DistanceFunction.COSINE_SIMILARITY, ) - if distance_metric in [DistanceFunction.COSINE_SIMILARITY, DistanceFunction.DOT_PROD]: - sorted_records = dict(sorted(return_records.items(), key=lambda item: item[1], reverse=True)) - else: - sorted_records = dict(sorted(return_records.items(), key=lambda item: item[1])) + sorted_records = dict( + sorted( + return_records.items(), + key=lambda item: item[1], + reverse=DISTANCE_FUNCTION_DIRECTION_HELPER[distance_metric](1, 0), + ) + ) if sorted_records: return KernelSearchResults( results=self._get_vector_search_results_from_results( @@ -171,7 +177,7 @@ async def _inner_search_vectorized( ), total_count=len(return_records) if options and options.include_total_count else None, ) - return KernelSearchResults(results=None) + return KernelSearchResults(results=empty_generator()) async def _generate_return_list( self, return_records: dict[KEY_TYPES, float], options: VectorSearchOptions | None diff --git a/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py b/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py index 2d8c5b6b7d9f..4828820053dd 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py +++ b/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py @@ -12,14 +12,14 @@ from semantic_kernel.connectors.memory.in_memory.in_memory_collection import InMemoryVectorCollection from semantic_kernel.data import VectorStore, VectorStoreRecordCollection, VectorStoreRecordDefinition -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental_class +@experimental class InMemoryVectorStore(VectorStore): """Create a In Memory Vector Store.""" diff --git a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py index b3a5fe3275e7..d834b0c76e79 100644 --- a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py +++ b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py @@ -10,7 +10,7 @@ from semantic_kernel.exceptions import ServiceResourceNotFoundError, ServiceResponseException from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class, experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) @@ -48,7 +48,7 @@ ] -@experimental_function +@experimental def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict[str, Any]: """Convert a memoryrecord into a dict. @@ -69,7 +69,7 @@ def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict[str, Any]: return ret_dict -@experimental_function +@experimental def milvus_dict_to_memoryrecord(milvus_dict: dict[str, Any]) -> MemoryRecord: """Convert Milvus search result dict into MemoryRecord. @@ -96,7 +96,7 @@ def milvus_dict_to_memoryrecord(milvus_dict: dict[str, Any]) -> MemoryRecord: ) -@experimental_function +@experimental def create_fields(dimensions: int) -> list[FieldSchema]: """Create the fields for the Milvus collection.""" return [ @@ -144,7 +144,7 @@ def create_fields(dimensions: int) -> list[FieldSchema]: ] -@experimental_class +@experimental class MilvusMemoryStore(MemoryStoreBase): """Memory store based on Milvus.""" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py index 3e3c3775c990..bbaea131089b 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py @@ -1,8 +1,15 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import ( + MongoDBAtlasCollection, +) from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_memory_store import ( MongoDBAtlasMemoryStore, ) from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import MongoDBAtlasSettings -__all__ = ["MongoDBAtlasMemoryStore", "MongoDBAtlasSettings"] +__all__ = [ + "MongoDBAtlasCollection", + "MongoDBAtlasMemoryStore", + "MongoDBAtlasSettings", +] diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py new file mode 100644 index 000000000000..5954e03c7bdc --- /dev/null +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Final + +from semantic_kernel.data.const import DistanceFunction + +DISTANCE_FUNCTION_MAPPING: Final[dict[DistanceFunction, str]] = { + DistanceFunction.EUCLIDEAN_DISTANCE: "euclidean", + DistanceFunction.COSINE_SIMILARITY: "cosine", + DistanceFunction.DOT_PROD: "dotProduct", +} + +MONGODB_ID_FIELD: Final[str] = "_id" +MONGODB_SCORE_FIELD: Final[str] = "score" +DEFAULT_DB_NAME = "default" +DEFAULT_SEARCH_INDEX_NAME = "default" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py new file mode 100644 index 000000000000..84723a4be323 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py @@ -0,0 +1,325 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import Sequence +from importlib import metadata +from typing import Any, ClassVar, Generic, TypeVar + +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT + +if sys.version_info >= (3, 11): + from typing import Self # pragma: no cover +else: + from typing_extensions import Self # pragma: no cover + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from pydantic import ValidationError +from pymongo import AsyncMongoClient, ReplaceOne +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.asynchronous.database import AsyncDatabase +from pymongo.driver_info import DriverInfo +from pymongo.operations import SearchIndexModel + +from semantic_kernel.connectors.memory.mongodb_atlas.const import ( + DEFAULT_DB_NAME, + DEFAULT_SEARCH_INDEX_NAME, + MONGODB_ID_FIELD, + MONGODB_SCORE_FIELD, +) +from semantic_kernel.connectors.memory.mongodb_atlas.utils import create_vector_field +from semantic_kernel.data.filter_clauses import AnyTagsEqualTo, EqualTo +from semantic_kernel.data.kernel_search_results import KernelSearchResults +from semantic_kernel.data.record_definition import VectorStoreRecordDefinition +from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField +from semantic_kernel.data.vector_search import ( + VectorSearchFilter, + VectorSearchOptions, +) +from semantic_kernel.data.vector_search.vector_search import VectorSearchBase +from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult +from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin +from semantic_kernel.exceptions import ( + VectorSearchExecutionException, + VectorStoreInitializationException, + VectorStoreOperationException, +) +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger: logging.Logger = logging.getLogger(__name__) + +TModel = TypeVar("TModel") + + +@experimental +class MongoDBAtlasCollection( + VectorSearchBase[str, TModel], + VectorizedSearchMixin[TModel], + Generic[TModel], +): + """MongoDB Atlas collection implementation.""" + + mongo_client: AsyncMongoClient + database_name: str + index_name: str + supported_key_types: ClassVar[list[str] | None] = ["str"] + supported_vector_types: ClassVar[list[str] | None] = ["float", "int"] + + def __init__( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + index_name: str | None = None, + mongo_client: AsyncMongoClient | None = None, + connection_string: str | None = None, + database_name: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initializes a new instance of the MongoDBAtlasCollection class. + + Args: + data_model_type: The type of the data model. + data_model_definition: The model definition, optional. + collection_name: The name of the collection, optional. + mongo_client: The MongoDB client for interacting with MongoDB Atlas, + used for creating and deleting collections. + index_name: The name of the index to use for searching, when not passed, will use _idx. + connection_string: The connection string for MongoDB Atlas, optional. + Can be read from environment variables. + database_name: The name of the database, will be filled from the env when this is not set. + connection_string: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None + **kwargs: Additional keyword arguments + """ + managed_client = kwargs.get("managed_client", not mongo_client) + if mongo_client: + super().__init__( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + mongo_client=mongo_client, + collection_name=collection_name, + database_name=database_name or DEFAULT_DB_NAME, + index_name=index_name or DEFAULT_SEARCH_INDEX_NAME, + managed_client=managed_client, + ) + return + + from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import MongoDBAtlasSettings + + try: + mongodb_atlas_settings = MongoDBAtlasSettings.create( + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + connection_string=connection_string, + database_name=database_name, + index_name=index_name, + ) + except ValidationError as exc: + raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc + + mongo_client = AsyncMongoClient( + mongodb_atlas_settings.connection_string.get_secret_value(), + driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), + ) + + super().__init__( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + collection_name=collection_name, + mongo_client=mongo_client, + managed_client=managed_client, + database_name=mongodb_atlas_settings.database_name, + index_name=mongodb_atlas_settings.index_name, + ) + + def _get_database(self) -> AsyncDatabase: + """Get the database. + + If you need control over things like read preference, you can override this method. + """ + return self.mongo_client.get_database(self.database_name) + + def _get_collection(self) -> AsyncCollection: + """Get the collection. + + If you need control over things like read preference, you can override this method. + """ + return self.mongo_client.get_database(self.database_name).get_collection(self.collection_name) + + @override + async def _inner_upsert( + self, + records: Sequence[Any], + **kwargs: Any, + ) -> Sequence[str]: + operations = [] + ids = [] + for record in records: + operations.append( + ReplaceOne( + filter={MONGODB_ID_FIELD: record[MONGODB_ID_FIELD]}, + replacement=record, + upsert=True, + ) + ) + ids.append(record[MONGODB_ID_FIELD]) + result = await self._get_collection().bulk_write(operations, ordered=False) + return [str(value) for key, value in result.upserted_ids.items()] + + @override + async def _inner_get(self, keys: Sequence[str], **kwargs: Any) -> Sequence[dict[str, Any]]: + result = self._get_collection().find({MONGODB_ID_FIELD: {"$in": keys}}) + return await result.to_list(length=len(keys)) + + @override + async def _inner_delete(self, keys: Sequence[str], **kwargs: Any) -> None: + collection = self._get_collection() + await collection.delete_many({MONGODB_ID_FIELD: {"$in": keys}}) + + def _replace_key_field(self, record: dict[str, Any]) -> dict[str, Any]: + if self._key_field_name == MONGODB_ID_FIELD: + return record + return { + MONGODB_ID_FIELD: record.pop(self._key_field_name, None), + **record, + } + + def _reset_key_field(self, record: dict[str, Any]) -> dict[str, Any]: + if self._key_field_name == MONGODB_ID_FIELD: + return record + return { + self._key_field_name: record.pop(MONGODB_ID_FIELD, None), + **record, + } + + @override + def _serialize_dicts_to_store_models(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Sequence[Any]: + return [self._replace_key_field(record) for record in records] + + @override + def _deserialize_store_models_to_dicts(self, records: Sequence[Any], **kwargs: Any) -> Sequence[dict[str, Any]]: + return [self._reset_key_field(record) for record in records] + + @override + async def create_collection(self, **kwargs) -> None: + """Create a new collection in MongoDB. + + This first creates a collection, with the kwargs. + Then creates a search index based on the data model definition. + + Args: + **kwargs: Additional keyword arguments. + """ + collection = await self._get_database().create_collection(self.collection_name, **kwargs) + await collection.create_search_index(self._create_index_definition()) + + def _create_index_definition(self) -> SearchIndexModel: + """Create an index definition. + + Returns: + SearchIndexModel: The index definition. + """ + vector_fields = [create_vector_field(field) for field in self.data_model_definition.vector_fields] + data_fields = [ + {"path": field.name, "type": "filter"} + for field in self.data_model_definition.fields + if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) + ] + key_field = [{"path": self.data_model_definition.key_field.name, "type": "filter"}] + return SearchIndexModel( + type="vectorSearch", name=self.index_name, definition={"fields": vector_fields + data_fields + key_field} + ) + + @override + async def does_collection_exist(self, **kwargs) -> bool: + return bool(await self._get_database().list_collection_names(filter={"name": self.collection_name})) + + @override + async def delete_collection(self, **kwargs) -> None: + await self._get_database().drop_collection(self.collection_name, **kwargs) + + @override + async def _inner_search( + self, + options: VectorSearchOptions, + search_text: str | None = None, + vectorizable_text: str | None = None, + vector: list[float | int] | None = None, + **kwargs: Any, + ) -> KernelSearchResults[VectorSearchResult[TModel]]: + if vector is not None: + return await self._inner_vectorized_search(options, vector, **kwargs) + raise VectorStoreOperationException("Vector is required for search.") + + async def _inner_vectorized_search( + self, + options: VectorSearchOptions, + vector: list[float | int], + **kwargs: Any, + ) -> KernelSearchResults[VectorSearchResult[TModel]]: + collection = self._get_collection() + vector_search_query: dict[str, Any] = { + "limit": options.top + options.skip, + "index": f"{options.vector_field_name}_", + "queryVector": vector, + "path": options.vector_field_name, + } + if options.filter.filters: + vector_search_query["filter"] = self._build_filter_dict(options.filter) + + projection_query: dict[str, int | dict] = { + field: 1 + for field in self.data_model_definition.get_field_names( + include_vector_fields=options.include_vectors, + include_key_field=False, # _id is always included + ) + } + projection_query[MONGODB_SCORE_FIELD] = {"$meta": "vectorSearchScore"} + try: + raw_results = await collection.aggregate([ + {"$vectorSearch": vector_search_query}, + {"$project": projection_query}, + ]) + except Exception as exc: + raise VectorSearchExecutionException("Failed to search the collection.") from exc + return KernelSearchResults( + results=self._get_vector_search_results_from_results(raw_results, options), + total_count=None, # no way to get a count before looping through the result cursor + ) + + def _build_filter_dict(self, search_filter: VectorSearchFilter) -> dict[str, Any]: + """Create the filter dictionary based on the filters.""" + filter_dict = {} + for filter in search_filter.filters: + if isinstance(filter, EqualTo): + filter_dict[filter.field_name] = filter.value + elif isinstance(filter, AnyTagsEqualTo): + filter_dict[filter.field_name] = {"$in": filter.value} + return filter_dict + + @override + def _get_record_from_result(self, result: dict[str, Any]) -> dict[str, Any]: + return result + + @override + def _get_score_from_result(self, result: dict[str, Any]) -> float | None: + return result.get(MONGODB_SCORE_FIELD) + + @override + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + """Exit the context manager.""" + if self.managed_client: + await self.mongo_client.close() + + async def __aenter__(self) -> Self: + """Enter the context manager.""" + await self.mongo_client.aconnect() + return self diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py index 9c57c3341d28..9e46c04ad818 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py @@ -22,12 +22,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class MongoDBAtlasMemoryStore(MemoryStoreBase): """Memory Store for MongoDB Atlas Vector Search Connections.""" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py index 0eec5591d15f..eaefaeb17936 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py @@ -4,18 +4,22 @@ from pydantic import SecretStr -from semantic_kernel.connectors.memory.mongodb_atlas.utils import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME +from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MongoDBAtlasSettings(KernelBaseSettings): """MongoDB Atlas model settings. Args: - connection_string: str - MongoDB Atlas connection string (Env var MONGODB_ATLAS_CONNECTION_STRING) + - database_name: str - MongoDB Atlas database name, defaults to 'default' + (Env var MONGODB_ATLAS_DATABASE_NAME) + - index_name: str - MongoDB Atlas search index name, defaults to 'default' + (Env var MONGODB_ATLAS_INDEX_NAME) """ env_prefix: ClassVar[str] = "MONGODB_ATLAS_" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py new file mode 100644 index 000000000000..8e97476fbb69 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py @@ -0,0 +1,145 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from importlib import metadata +from typing import TYPE_CHECKING, Any, TypeVar + +if sys.version_info >= (3, 11): + from typing import Self # pragma: no cover +else: + from typing_extensions import Self # pragma: no cover + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from pydantic import ValidationError +from pymongo import AsyncMongoClient +from pymongo.asynchronous.database import AsyncDatabase +from pymongo.driver_info import DriverInfo + +from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import ( + MongoDBAtlasCollection, +) +from semantic_kernel.data.record_definition import VectorStoreRecordDefinition +from semantic_kernel.data.vector_storage import VectorStore +from semantic_kernel.exceptions import VectorStoreInitializationException +from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT + +if TYPE_CHECKING: + from semantic_kernel.data import VectorStoreRecordCollection + + +logger: logging.Logger = logging.getLogger(__name__) + +TModel = TypeVar("TModel") + + +@experimental +class MongoDBAtlasStore(VectorStore): + """MongoDB Atlas store implementation.""" + + mongo_client: AsyncMongoClient + database_name: str + + def __init__( + self, + connection_string: str | None = None, + database_name: str | None = None, + mongo_client: AsyncMongoClient | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + **kwargs: Any, + ) -> None: + """Initializes a new instance of the MongoDBAtlasStore client. + + Args: + connection_string: The connection string for MongoDB Atlas, optional. + Can be read from environment variables. + database_name: The name of the database, optional. Can be read from environment variables. + mongo_client: The MongoDB client, optional. + env_file_path: Use the environment settings file as a fallback + to environment variables. + env_file_encoding: The encoding of the environment settings file. + kwargs: Additional keyword arguments. + """ + managed_client = kwargs.get("managed_client", not mongo_client) + if mongo_client: + super().__init__( + mongo_client=mongo_client, + managed_client=managed_client, + database_name=database_name or DEFAULT_DB_NAME, + ) + return + from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import ( + MongoDBAtlasSettings, + ) + + try: + mongodb_atlas_settings = MongoDBAtlasSettings.create( + env_file_path=env_file_path, + connection_string=connection_string, + database_name=database_name, + env_file_encoding=env_file_encoding, + ) + except ValidationError as exc: + raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc + if not mongodb_atlas_settings.connection_string: + raise VectorStoreInitializationException("The connection string is missing.") + + mongo_client = AsyncMongoClient( + mongodb_atlas_settings.connection_string.get_secret_value(), + driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), + ) + + super().__init__( + mongo_client=mongo_client, + managed_client=managed_client, + database_name=mongodb_atlas_settings.database_name, + ) + + @override + def get_collection( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + **kwargs: Any, + ) -> "VectorStoreRecordCollection": + """Get a MongoDBAtlasCollection tied to a collection. + + Args: + collection_name (str): The name of the collection. + data_model_type (type[TModel]): The type of the data model. + data_model_definition (VectorStoreRecordDefinition | None): The model fields, optional. + **kwargs: Additional keyword arguments, passed to the collection constructor. + """ + if collection_name not in self.vector_record_collections: + self.vector_record_collections[collection_name] = MongoDBAtlasCollection( + data_model_type=data_model_type, + data_model_definition=data_model_definition, + mongo_client=self.mongo_client, + collection_name=collection_name, + database_name=self.database_name, + **kwargs, + ) + return self.vector_record_collections[collection_name] + + @override + async def list_collection_names(self, **kwargs: Any) -> list[str]: + database: AsyncDatabase = self.mongo_client.get_database(self.database_name) + return await database.list_collection_names() + + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + """Exit the context manager.""" + if self.managed_client: + await self.mongo_client.close() + + async def __aenter__(self) -> Self: + """Enter the context manager.""" + await self.mongo_client.aconnect() + return self diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py index cb415f45377c..f05b94b45782 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py @@ -1,11 +1,17 @@ # Copyright (c) Microsoft. All rights reserved. from numpy import array +from pymongo.operations import SearchIndexModel +from semantic_kernel.connectors.memory.mongodb_atlas.const import DISTANCE_FUNCTION_MAPPING +from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.record_definition.vector_store_record_fields import ( + VectorStoreRecordDataField, + VectorStoreRecordVectorField, +) +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.memory.memory_record import MemoryRecord -DEFAULT_DB_NAME = "default" -DEFAULT_SEARCH_INDEX_NAME = "default" NUM_CANDIDATES_SCALAR = 10 MONGODB_FIELD_ID = "_id" @@ -66,3 +72,44 @@ def memory_record_to_mongo_document(record: MemoryRecord) -> dict: MONGODB_FIELD_EMBEDDING: record._embedding.tolist(), MONGODB_FIELD_TIMESTAMP: record._timestamp, } + + +def create_vector_field(field: VectorStoreRecordVectorField) -> dict: + """Create a vector field. + + Args: + field (VectorStoreRecordVectorField): The vector field. + + Returns: + dict: The vector field. + """ + if field.distance_function not in DISTANCE_FUNCTION_MAPPING: + raise ServiceInitializationError(f"Invalid distance function: {field.distance_function}") + return { + "type": "vector", + "numDimensions": field.dimensions, + "path": field.name, + "similarity": DISTANCE_FUNCTION_MAPPING[field.distance_function], + } + + +def create_index_definition(record_definition: VectorStoreRecordDefinition, index_name: str) -> SearchIndexModel: + """Create an index definition. + + Args: + record_definition (VectorStoreRecordDefinition): The record definition. + index_name (str): The index name. + + Returns: + SearchIndexModel: The index definition. + """ + vector_fields = [create_vector_field(field) for field in record_definition.vector_fields] + data_fields = [ + {"path": field.name, "type": "filter"} + for field in record_definition.fields + if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) + ] + key_field = [{"path": record_definition.key_field.name, "type": "filter"}] + return SearchIndexModel( + type="vectorSearch", name=index_name, definition={"fields": vector_fields + data_fields + key_field} + ) diff --git a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py index 25f72fe05481..0638f053e407 100644 --- a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py +++ b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py @@ -18,7 +18,7 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental # Limitations set by Pinecone at https://docs.pinecone.io/reference/known-limitations MAX_DIMENSIONALITY = 20000 @@ -31,7 +31,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class PineconeMemoryStore(MemoryStoreBase): """A memory store that uses Pinecone as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py b/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py index db2cd99ef88b..03b3cf43b212 100644 --- a/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py +++ b/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class PineconeSettings(KernelBaseSettings): """Pinecone model settings. diff --git a/python/semantic_kernel/connectors/memory/postgres/constants.py b/python/semantic_kernel/connectors/memory/postgres/constants.py index 6c08ef2052e6..3121b70ef72b 100644 --- a/python/semantic_kernel/connectors/memory/postgres/constants.py +++ b/python/semantic_kernel/connectors/memory/postgres/constants.py @@ -5,6 +5,10 @@ # Limitation based on pgvector documentation https://github.com/pgvector/pgvector#what-if-i-want-to-index-vectors-with-more-than-2000-dimensions MAX_DIMENSIONALITY = 2000 +# The name of the column that returns distance value in the database. +# It is used in the similarity search query. Must not conflict with model property. +DISTANCE_COLUMN_NAME = "sk_pg_distance" + # Environment Variables PGHOST_ENV_VAR = "PGHOST" PGPORT_ENV_VAR = "PGPORT" diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py index 6de863646dc3..5e30e8e923f1 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py @@ -1,40 +1,53 @@ # Copyright (c) Microsoft. All rights reserved. import logging +import random +import string import sys -from collections.abc import Sequence -from typing import Any, ClassVar, TypeVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover +from collections.abc import AsyncGenerator, Sequence +from typing import Any, ClassVar, Generic, TypeVar from psycopg import sql from psycopg_pool import AsyncConnectionPool from pydantic import PrivateAttr -from semantic_kernel.connectors.memory.postgres.constants import DEFAULT_SCHEMA, MAX_DIMENSIONALITY +from semantic_kernel.connectors.memory.postgres.constants import ( + DEFAULT_SCHEMA, + DISTANCE_COLUMN_NAME, + MAX_DIMENSIONALITY, +) from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings from semantic_kernel.connectors.memory.postgres.utils import ( convert_dict_to_row, convert_row_to_dict, + get_vector_distance_ops_str, get_vector_index_ops_str, python_type_to_postgres, ) -from semantic_kernel.data.const import IndexKind +from semantic_kernel.data.const import DistanceFunction, IndexKind +from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo +from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo +from semantic_kernel.data.kernel_search_results import KernelSearchResults from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.record_definition.vector_store_record_fields import ( + VectorStoreRecordField, VectorStoreRecordKeyField, VectorStoreRecordVectorField, ) -from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection -from semantic_kernel.exceptions import ( - VectorStoreModelValidationError, - VectorStoreOperationException, -) +from semantic_kernel.data.vector_search.vector_search import VectorSearchBase +from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions +from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult +from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin +from semantic_kernel.exceptions import VectorStoreModelValidationError, VectorStoreOperationException +from semantic_kernel.exceptions.vector_store_exceptions import VectorSearchExecutionException from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover TKey = TypeVar("TKey", str, int) TModel = TypeVar("TModel") @@ -42,14 +55,19 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class -class PostgresCollection(VectorStoreRecordCollection[TKey, TModel]): +@experimental +class PostgresCollection( + VectorSearchBase[TKey, TModel], + VectorizedSearchMixin[TModel], + Generic[TKey, TModel], +): """PostgreSQL collection implementation.""" connection_pool: AsyncConnectionPool | None = None db_schema: str = DEFAULT_SCHEMA supported_key_types: ClassVar[list[str] | None] = ["str", "int"] supported_vector_types: ClassVar[list[str] | None] = ["float"] + _distance_column_name: str = PrivateAttr(DISTANCE_COLUMN_NAME) _settings: PostgresSettings = PrivateAttr() """Postgres settings""" @@ -84,26 +102,52 @@ def __init__( data_model_definition=data_model_definition, connection_pool=connection_pool, db_schema=db_schema, + # This controls whether the connection pool is managed by the collection + # in the __aenter__ and __aexit__ methods. + managed_client=connection_pool is None, ) self._settings = settings or PostgresSettings.create( env_file_path=env_file_path, env_file_encoding=env_file_encoding ) + @override + def model_post_init(self, __context: object | None = None) -> None: + """Post-initialization of the model. + + In addition to the base class implementation, this method resets the distance column name + to avoid collisions if necessary. + """ + super().model_post_init(__context) + + distance_column_name = DISTANCE_COLUMN_NAME + tries = 0 + while distance_column_name in self.data_model_definition.fields: + # Reset the distance column name, ensuring no collision with existing model fields + # Avoid bandit B311 - random is not used for a security/cryptographic purpose + suffix = "".join(random.choices(string.ascii_lowercase + string.digits, k=8)) # nosec B311 + distance_column_name = f"{DISTANCE_COLUMN_NAME}_{suffix}" + tries += 1 + if tries > 10: + raise VectorStoreModelValidationError("Unable to generate a unique distance column name.") + self._distance_column_name = distance_column_name + + # region: VectorStoreRecordCollection implementation + @override async def __aenter__(self) -> "PostgresCollection": # If the connection pool was not provided, create a new one. if not self.connection_pool: self.connection_pool = await self._settings.create_connection_pool() - self.managed_client = True return self @override async def __aexit__(self, *args): + # Only close the connection pool if it was created by the collection. if self.managed_client and self.connection_pool: await self.connection_pool.close() # If the pool was created by the collection, set it to None to enable reusing the collection. - if self._settings: + if self.managed_client: self.connection_pool = None @override @@ -158,13 +202,16 @@ async def _inner_upsert( # Execute the INSERT statement for each batch await cur.executemany( - sql.SQL("INSERT INTO {}.{} ({}) VALUES ({}) ON CONFLICT ({}) DO UPDATE SET {}").format( - sql.Identifier(self.db_schema), - sql.Identifier(self.collection_name), - sql.SQL(", ").join(sql.Identifier(field.name) for _, field in fields), - sql.SQL(", ").join(sql.Placeholder() * len(fields)), - sql.Identifier(self.data_model_definition.key_field.name), - sql.SQL(", ").join( + sql.SQL( + "INSERT INTO {schema}.{table} ({col_names}) VALUES ({placeholders}) " + "ON CONFLICT ({key_name}) DO UPDATE SET {update_columns}" + ).format( + schema=sql.Identifier(self.db_schema), + table=sql.Identifier(self.collection_name), + col_names=sql.SQL(", ").join(sql.Identifier(field.name) for _, field in fields), + placeholders=sql.SQL(", ").join(sql.Placeholder() * len(fields)), + key_name=sql.Identifier(self.data_model_definition.key_field.name), + update_columns=sql.SQL(", ").join( sql.SQL("{field} = EXCLUDED.{field}").format(field=sql.Identifier(field.name)) for _, field in fields if field.name != self.data_model_definition.key_field.name @@ -194,12 +241,12 @@ async def _inner_get(self, keys: Sequence[TKey], **kwargs: Any) -> OneOrMany[dic fields = [(field.name, field) for field in self.data_model_definition.fields.values()] async with self.connection_pool.connection() as conn, conn.cursor() as cur: await cur.execute( - sql.SQL("SELECT {} FROM {}.{} WHERE {} IN ({})").format( - sql.SQL(", ").join(sql.Identifier(name) for (name, _) in fields), - sql.Identifier(self.db_schema), - sql.Identifier(self.collection_name), - sql.Identifier(self.data_model_definition.key_field.name), - sql.SQL(", ").join(sql.Literal(key) for key in keys), + sql.SQL("SELECT {select_list} FROM {schema}.{table} WHERE {key_name} IN ({keys})").format( + select_list=sql.SQL(", ").join(sql.Identifier(name) for (name, _) in fields), + schema=sql.Identifier(self.db_schema), + table=sql.Identifier(self.collection_name), + key_name=sql.Identifier(self.data_model_definition.key_field.name), + keys=sql.SQL(", ").join(sql.Literal(key) for key in keys), ) ) rows = await cur.fetchall() @@ -232,11 +279,11 @@ async def _inner_delete(self, keys: Sequence[TKey], **kwargs: Any) -> None: # Execute the DELETE statement for each batch await cur.execute( - sql.SQL("DELETE FROM {}.{} WHERE {} IN ({})").format( - sql.Identifier(self.db_schema), - sql.Identifier(self.collection_name), - sql.Identifier(self.data_model_definition.key_field.name), - sql.SQL(", ").join(sql.Literal(key) for key in key_batch), + sql.SQL("DELETE FROM {schema}.{table} WHERE {name} IN ({keys})").format( + schema=sql.Identifier(self.db_schema), + table=sql.Identifier(self.collection_name), + name=sql.Identifier(self.data_model_definition.key_field.name), + keys=sql.SQL(", ").join(sql.Literal(key) for key in key_batch), ) ) @@ -285,21 +332,29 @@ async def create_collection(self, **kwargs: Any) -> None: # but would need to be created outside of this method. if isinstance(field, VectorStoreRecordVectorField) and field.dimensions: column_definitions.append( - sql.SQL("{} VECTOR({})").format(sql.Identifier(field_name), sql.Literal(field.dimensions)) + sql.SQL("{name} VECTOR({dimensions})").format( + name=sql.Identifier(field_name), dimensions=sql.Literal(field.dimensions) + ) ) elif isinstance(field, VectorStoreRecordKeyField): # Use the property_type directly for key fields column_definitions.append( - sql.SQL("{} {} PRIMARY KEY").format(sql.Identifier(field_name), sql.SQL(property_type)) + sql.SQL("{name} {col_type} PRIMARY KEY").format( + name=sql.Identifier(field_name), col_type=sql.SQL(property_type) + ) ) else: # Use the property_type directly for other types - column_definitions.append(sql.SQL("{} {}").format(sql.Identifier(field_name), sql.SQL(property_type))) + column_definitions.append( + sql.SQL("{name} {col_type}").format( + name=sql.Identifier(field_name), col_type=sql.SQL(property_type) + ) + ) columns_str = sql.SQL(", ").join(column_definitions) - create_table_query = sql.SQL("CREATE TABLE {}.{} ({})").format( - sql.Identifier(self.db_schema), sql.Identifier(table_name), columns_str + create_table_query = sql.SQL("CREATE TABLE {schema}.{table} ({columns})").format( + schema=sql.Identifier(self.db_schema), table=sql.Identifier(table_name), columns=columns_str ) async with self.connection_pool.connection() as conn, conn.cursor() as cur: @@ -313,6 +368,42 @@ async def create_collection(self, **kwargs: Any) -> None: if vector_field.index_kind: await self._create_index(table_name, vector_field) + @override + async def does_collection_exist(self, **kwargs: Any) -> bool: + """Check if the collection exists.""" + if self.connection_pool is None: + raise VectorStoreOperationException( + "Connection pool is not available, use the collection as a context manager." + ) + + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema = %s AND table_name = %s + """, + (self.db_schema, self.collection_name), + ) + row = await cur.fetchone() + return bool(row) + + @override + async def delete_collection(self, **kwargs: Any) -> None: + """Delete the collection.""" + if self.connection_pool is None: + raise VectorStoreOperationException( + "Connection pool is not available, use the collection as a context manager." + ) + + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + sql.SQL("DROP TABLE {schema}.{table} CASCADE").format( + schema=sql.Identifier(self.db_schema), table=sql.Identifier(self.collection_name) + ), + ) + await conn.commit() + async def _create_index(self, table_name: str, vector_field: VectorStoreRecordVectorField) -> None: """Create an index on a column in the table. @@ -347,51 +438,200 @@ async def _create_index(self, table_name: str, vector_field: VectorStoreRecordVe async with self.connection_pool.connection() as conn, conn.cursor() as cur: await cur.execute( - sql.SQL("CREATE INDEX {} ON {}.{} USING {} ({} {})").format( - sql.Identifier(index_name), - sql.Identifier(self.db_schema), - sql.Identifier(table_name), - sql.SQL(vector_field.index_kind), - sql.Identifier(column_name), - sql.SQL(ops_str), + sql.SQL("CREATE INDEX {index_name} ON {schema}.{table} USING {index_kind} ({column_name} {op})").format( + index_name=sql.Identifier(index_name), + schema=sql.Identifier(self.db_schema), + table=sql.Identifier(table_name), + index_kind=sql.SQL(vector_field.index_kind), + column_name=sql.Identifier(column_name), + op=sql.SQL(ops_str), ) ) await conn.commit() logger.info(f"Index '{index_name}' created successfully on column '{column_name}'.") + # endregion + # region: VectorSearchBase implementation + @override - async def does_collection_exist(self, **kwargs: Any) -> bool: - """Check if the collection exists.""" + async def _inner_search( + self, + options: VectorSearchOptions, + search_text: str | None = None, + vectorizable_text: str | None = None, + vector: list[float | int] | None = None, + **kwargs: Any, + ) -> KernelSearchResults[VectorSearchResult[TModel]]: if self.connection_pool is None: raise VectorStoreOperationException( "Connection pool is not available, use the collection as a context manager." ) - async with self.connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema = %s AND table_name = %s - """, - (self.db_schema, self.collection_name), + if vector is not None: + query, params, return_fields = self._construct_vector_query(vector, options, **kwargs) + elif search_text: + raise VectorSearchExecutionException("Text search not supported.") + elif vectorizable_text: + raise VectorSearchExecutionException("Vectorizable text search not supported.") + + if options.include_total_count: + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute(query, params) + # Fetch all results to get total count. + rows = await cur.fetchall() + row_dicts = [convert_row_to_dict(row, return_fields) for row in rows] + return KernelSearchResults( + results=self._get_vector_search_results_from_results(row_dicts, options), total_count=len(row_dicts) + ) + else: + # Use an asynchronous generator to fetch and yield results + connection_pool = self.connection_pool + + async def fetch_results() -> AsyncGenerator[dict[str, Any], None]: + async with connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute(query, params) + async for row in cur: + yield convert_row_to_dict(row, return_fields) + + return KernelSearchResults( + results=self._get_vector_search_results_from_results(fetch_results(), options), + total_count=None, ) - row = await cur.fetchone() - return bool(row) - @override - async def delete_collection(self, **kwargs: Any) -> None: - """Delete the collection.""" - if self.connection_pool is None: - raise VectorStoreOperationException( - "Connection pool is not available, use the collection as a context manager." + def _construct_vector_query( + self, + vector: list[float | int], + options: VectorSearchOptions, + **kwargs: Any, + ) -> tuple[sql.Composed, list[Any], list[tuple[str, VectorStoreRecordField | None]]]: + """Construct a vector search query. + + Args: + vector: The vector to search for. + options: The search options. + **kwargs: Additional arguments. + + Returns: + The query, parameters, and the fields representing the columns in the result. + """ + # Get the vector field we will be searching against, + # defaulting to the first vector field if not specified + vector_fields = self.data_model_definition.vector_fields + if not vector_fields: + raise VectorSearchExecutionException("No vector fields defined.") + if options.vector_field_name: + vector_field = next((f for f in vector_fields if f.name == options.vector_field_name), None) + if not vector_field: + raise VectorSearchExecutionException(f"Vector field '{options.vector_field_name}' not found.") + else: + vector_field = vector_fields[0] + + # Default to cosine distance if not set + distance_function = vector_field.distance_function or DistanceFunction.COSINE_DISTANCE + ops_str = get_vector_distance_ops_str(distance_function) + + # Select all fields except all vector fields if include_vectors is False + select_list = self.data_model_definition.get_field_names(include_vector_fields=options.include_vectors) + + where_clause = self._build_where_clauses_from_filter(options.filter) + + query = sql.SQL("SELECT {select_list}, {vec_col} {dist_op} %s as {dist_col} FROM {schema}.{table}").format( + select_list=sql.SQL(", ").join(sql.Identifier(name) for name in select_list), + vec_col=sql.Identifier(vector_field.name), + dist_op=sql.SQL(ops_str), + dist_col=sql.Identifier(self._distance_column_name), + schema=sql.Identifier(self.db_schema), + table=sql.Identifier(self.collection_name), + ) + + if where_clause: + query += where_clause + + query += sql.SQL(" ORDER BY {dist_col} LIMIT {limit}").format( + dist_col=sql.Identifier(self._distance_column_name), + limit=sql.Literal(options.top), + ) + + if options.skip: + query += sql.SQL(" OFFSET {offset}").format(offset=sql.Literal(options.skip)) + + # For cosine similarity, we need to take 1 - cosine distance. + # However, we can't use an expression in the ORDER BY clause or else the index won't be used. + # Instead we'll wrap the query in a subquery and modify the distance in the outer query. + if distance_function == DistanceFunction.COSINE_SIMILARITY: + query = sql.SQL( + "SELECT subquery.*, 1 - subquery.{subquery_dist_col} AS {dist_col} FROM ({subquery}) AS subquery" + ).format( + subquery_dist_col=sql.Identifier(self._distance_column_name), + dist_col=sql.Identifier(self._distance_column_name), + subquery=query, ) - async with self.connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute( - sql.SQL("DROP TABLE {scm}.{tbl} CASCADE").format( - scm=sql.Identifier(self.db_schema), tbl=sql.Identifier(self.collection_name) - ), + # For inner product, we need to take -1 * inner product. + # However, we can't use an expression in the ORDER BY clause or else the index won't be used. + # Instead we'll wrap the query in a subquery and modify the distance in the outer query. + if distance_function == DistanceFunction.DOT_PROD: + query = sql.SQL( + "SELECT subquery.*, -1 * subquery.{subquery_dist_col} AS {dist_col} FROM ({subquery}) AS subquery" + ).format( + subquery_dist_col=sql.Identifier(self._distance_column_name), + dist_col=sql.Identifier(self._distance_column_name), + subquery=query, ) - await conn.commit() + + # Convert the vector to a string for the query + params = ["[" + ",".join([str(float(v)) for v in vector]) + "]"] + + return ( + query, + params, + [ + *((name, f) for (name, f) in self.data_model_definition.fields.items() if name in select_list), + (self._distance_column_name, None), + ], + ) + + def _build_where_clauses_from_filter(self, filters: VectorSearchFilter | None) -> sql.Composed | None: + """Build the WHERE clause for the search query from the filter in the search options. + + Args: + filters: The filters. + + Returns: + The WHERE clause. + """ + if not filters or not filters.filters: + return None + + where_clauses = [] + for filter in filters.filters: + match filter: + case EqualTo(): + where_clauses.append( + sql.SQL("{field} = {value}").format( + field=sql.Identifier(filter.field_name), + value=sql.Literal(filter.value), + ) + ) + case AnyTagsEqualTo(): + where_clauses.append( + sql.SQL("{field} @> ARRAY[{value}::TEXT").format( + field=sql.Identifier(filter.field_name), + value=sql.Literal(filter.value), + ) + ) + case _: + raise ValueError(f"Unsupported filter: {filter}") + + return sql.SQL("WHERE {clause}").format(clause=sql.SQL(" AND ").join(where_clauses)) + + @override + def _get_record_from_result(self, result: dict[str, Any]) -> dict[str, Any]: + return {k: v for (k, v) in result.items() if k != self._distance_column_name} + + @override + def _get_score_from_result(self, result: Any) -> float | None: + return result.pop(self._distance_column_name, None) + + # endregion diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py index 3d553c570846..89edd5926998 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py @@ -21,12 +21,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class PostgresMemoryStore(MemoryStoreBase): """A memory store that uses Postgres with pgvector as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py index 02a5d24616ec..a3361d0a305d 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py @@ -4,6 +4,7 @@ from psycopg.conninfo import conninfo_to_dict from psycopg_pool import AsyncConnectionPool +from psycopg_pool.abc import ACT from pydantic import Field, SecretStr from semantic_kernel.connectors.memory.postgres.constants import ( @@ -14,15 +15,12 @@ PGSSL_MODE_ENV_VAR, PGUSER_ENV_VAR, ) -from semantic_kernel.exceptions.memory_connector_exceptions import ( - MemoryConnectorConnectionException, - MemoryConnectorInitializationError, -) +from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorConnectionException from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class PostgresSettings(KernelBaseSettings): """Postgres model settings. @@ -61,12 +59,12 @@ class PostgresSettings(KernelBaseSettings): env_prefix: ClassVar[str] = "POSTGRES_" connection_string: SecretStr | None = None - host: str | None = Field(None, alias=PGHOST_ENV_VAR) - port: int | None = Field(5432, alias=PGPORT_ENV_VAR) - dbname: str | None = Field(None, alias=PGDATABASE_ENV_VAR) - user: str | None = Field(None, alias=PGUSER_ENV_VAR) - password: SecretStr | None = Field(None, alias=PGPASSWORD_ENV_VAR) - sslmode: str | None = Field(None, alias=PGSSL_MODE_ENV_VAR) + host: str | None = Field(default=None, alias=PGHOST_ENV_VAR) + port: int | None = Field(default=5432, alias=PGPORT_ENV_VAR) + dbname: str | None = Field(default=None, alias=PGDATABASE_ENV_VAR) + user: str | None = Field(default=None, alias=PGUSER_ENV_VAR) + password: SecretStr | None = Field(default=None, alias=PGPASSWORD_ENV_VAR) + sslmode: str | None = Field(default=None, alias=PGSSL_MODE_ENV_VAR) min_pool: int = 1 max_pool: int = 5 @@ -89,30 +87,34 @@ def get_connection_args(self) -> dict[str, Any]: if self.password: result["password"] = self.password.get_secret_value() - # Ensure required values - if "host" not in result: - raise MemoryConnectorInitializationError("host is required. Please set PGHOST or connection_string.") - if "dbname" not in result: - raise MemoryConnectorInitializationError( - "database is required. Please set PGDATABASE or connection_string." - ) - if "user" not in result: - raise MemoryConnectorInitializationError("user is required. Please set PGUSER or connection_string.") - if "password" not in result: - raise MemoryConnectorInitializationError( - "password is required. Please set PGPASSWORD or connection_string." - ) - return result - async def create_connection_pool(self) -> AsyncConnectionPool: - """Creates a connection pool based off of settings.""" + async def create_connection_pool( + self, connection_class: type[ACT] | None = None, **kwargs: Any + ) -> AsyncConnectionPool: + """Creates a connection pool based off of settings. + + Args: + connection_class: The connection class to use. + kwargs: Additional keyword arguments to pass to the connection class. + + Returns: + The connection pool. + """ try: + # Only pass connection_class if it specified, or else allow psycopg to use the default connection class + extra_args: dict[str, Any] = {} if connection_class is None else {"connection_class": connection_class} + pool = AsyncConnectionPool( min_size=self.min_pool, max_size=self.max_pool, open=False, - kwargs=self.get_connection_args(), + # kwargs are passed to the connection class + kwargs={ + **self.get_connection_args(), + **kwargs, + }, + **extra_args, ) await pool.open() except Exception as e: diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py index b5a1edcaf38b..6b6ec9ff427f 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py @@ -4,27 +4,26 @@ import sys from typing import Any, TypeVar -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - from psycopg import sql from psycopg_pool import AsyncConnectionPool from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.connectors.memory.postgres.postgres_memory_store import DEFAULT_SCHEMA -from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition -from semantic_kernel.data.vector_storage.vector_store import VectorStore -from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.data import VectorStore, VectorStoreRecordCollection, VectorStoreRecordDefinition +from semantic_kernel.utils.feature_stage_decorator import experimental + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental_class +@experimental class PostgresStore(VectorStore): """PostgreSQL store implementation.""" diff --git a/python/semantic_kernel/connectors/memory/postgres/utils.py b/python/semantic_kernel/connectors/memory/postgres/utils.py index 299567a57a39..874df3e6b26a 100644 --- a/python/semantic_kernel/connectors/memory/postgres/utils.py +++ b/python/semantic_kernel/connectors/memory/postgres/utils.py @@ -52,7 +52,9 @@ def python_type_to_postgres(python_type_str: str) -> str | None: return None -def convert_row_to_dict(row: tuple[Any, ...], fields: list[tuple[str, VectorStoreRecordField]]) -> dict[str, Any]: +def convert_row_to_dict( + row: tuple[Any, ...], fields: list[tuple[str, VectorStoreRecordField | None]] +) -> dict[str, Any]: """Convert a row from a PostgreSQL query to a dictionary. Uses the field information to map the row values to the corresponding field names. @@ -65,11 +67,12 @@ def convert_row_to_dict(row: tuple[Any, ...], fields: list[tuple[str, VectorStor A dictionary representation of the row. """ - def _convert(v: Any | None, field: VectorStoreRecordField) -> Any | None: + def _convert(v: Any | None, field: VectorStoreRecordField | None) -> Any | None: if v is None: return None - if isinstance(field, VectorStoreRecordVectorField): - # psycopg returns vector as a string + if isinstance(field, VectorStoreRecordVectorField) and isinstance(v, str): + # psycopg returns vector as a string if pgvector is not loaded. + # If pgvector is registered with the connection, no conversion is required. return json.loads(v) return v @@ -109,6 +112,8 @@ def get_vector_index_ops_str(distance_function: DistanceFunction) -> str: >>> get_vector_index_ops_str(DistanceFunction.COSINE) 'vector_cosine_ops' """ + if distance_function == DistanceFunction.COSINE_DISTANCE: + return "vector_cosine_ops" if distance_function == DistanceFunction.COSINE_SIMILARITY: return "vector_cosine_ops" if distance_function == DistanceFunction.DOT_PROD: @@ -121,6 +126,38 @@ def get_vector_index_ops_str(distance_function: DistanceFunction) -> str: raise ValueError(f"Unsupported distance function: {distance_function}") +def get_vector_distance_ops_str(distance_function: DistanceFunction) -> str: + """Get the PostgreSQL distance operator string for a given distance function. + + Args: + distance_function: The distance function for which the operator string is needed. + + Note: + For the COSINE_SIMILARITY and DOT_PROD distance functions, + there is additional query steps to retrieve the correct distance. + For dot product, take -1 * inner product, as <#> returns the negative inner product + since Postgres only supports ASC order index scans on operators + For cosine similarity, take 1 - cosine distance. + + Returns: + The PostgreSQL distance operator string for the given distance function. + + Raises: + ValueError: If the distance function is unsupported. + """ + if distance_function == DistanceFunction.COSINE_DISTANCE: + return "<=>" + if distance_function == DistanceFunction.COSINE_SIMILARITY: + return "<=>" + if distance_function == DistanceFunction.DOT_PROD: + return "<#>" + if distance_function == DistanceFunction.EUCLIDEAN_DISTANCE: + return "<->" + if distance_function == DistanceFunction.MANHATTAN: + return "<+>" + raise ValueError(f"Unsupported distance function: {distance_function}") + + async def ensure_open(connection_pool: AsyncConnectionPool) -> AsyncConnectionPool: """Ensure the connection pool is open. diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py index b0cfd8244299..b3bcd7292cd9 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py @@ -29,7 +29,7 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent logger: logging.Logger = logging.getLogger(__name__) @@ -38,7 +38,7 @@ TKey = TypeVar("TKey", str, int) -@experimental_class +@experimental class QdrantCollection( VectorSearchBase[str | int, TModel], VectorizedSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py index 8e5d5d0f2166..7c5a13e9ce8a 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py @@ -17,12 +17,12 @@ from semantic_kernel.exceptions import ServiceResponseException from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class QdrantMemoryStore(MemoryStoreBase): """QdrantMemoryStore.""" diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py index 75cfd3f8553f..7dd45b2b86a9 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py @@ -5,12 +5,12 @@ from pydantic import HttpUrl, SecretStr, model_validator from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental IN_MEMORY_STRING = ":memory:" -@experimental_class +@experimental class QdrantSettings(KernelBaseSettings): """Qdrant settings currently used by the Qdrant Vector Record Store.""" diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py index 0fd00bc59532..8e751e9f2048 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py @@ -17,7 +17,7 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: @@ -29,7 +29,7 @@ TKey = TypeVar("TKey", str, int) -@experimental_class +@experimental class QdrantStore(VectorStore): """A QdrantStore is a memory store that uses Qdrant as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_collection.py b/python/semantic_kernel/connectors/memory/redis/redis_collection.py index 73f3a2bd4dea..278551d80b9a 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_collection.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_collection.py @@ -53,7 +53,7 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.list_handler import desync_list logger: logging.Logger = logging.getLogger(__name__) @@ -63,7 +63,7 @@ TQuery = TypeVar("TQuery", bound=BaseQuery) -@experimental_class +@experimental class RedisCollection(VectorSearchBase[str, TModel], VectorizedSearchMixin[TModel], VectorTextSearchMixin[TModel]): """A vector store record collection implementation using Redis.""" @@ -263,7 +263,7 @@ def _get_score_from_result(self, result: dict[str, Any]) -> float | None: return result.get("vector_distance") -@experimental_class +@experimental class RedisHashsetCollection(RedisCollection): """A vector store record collection implementation using Redis Hashsets.""" @@ -383,7 +383,7 @@ def _add_return_fields(self, query: TQuery, include_vectors: bool) -> TQuery: return query -@experimental_class +@experimental class RedisJsonCollection(RedisCollection): """A vector store record collection implementation using Redis Json.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py index 19a361d51f50..e28335d5e022 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py @@ -25,12 +25,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class RedisMemoryStore(MemoryStoreBase): """A memory store implementation using Redis.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_settings.py b/python/semantic_kernel/connectors/memory/redis/redis_settings.py index 62ceba3dee2f..f3aadba1bc57 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_settings.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RedisSettings(KernelBaseSettings): """Redis model settings. diff --git a/python/semantic_kernel/connectors/memory/redis/redis_store.py b/python/semantic_kernel/connectors/memory/redis/redis_store.py index 8764027e0cd8..c56625dd2ae1 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_store.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_store.py @@ -19,14 +19,14 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore, VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental_class +@experimental class RedisStore(VectorStore): """Create a Redis Vector Store.""" diff --git a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py index 1d00e19d5fb6..ad21b28614f2 100644 --- a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py +++ b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py @@ -21,7 +21,7 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) @@ -112,7 +112,7 @@ def pyarrow_table_to_memoryrecords(table: pa.Table, vectors: ndarray | None = No ] -@experimental_class +@experimental class USearchMemoryStore(MemoryStoreBase): """Memory store for searching embeddings with USearch.""" diff --git a/python/semantic_kernel/connectors/memory/weaviate/README.md b/python/semantic_kernel/connectors/memory/weaviate/README.md index 18894c00b161..6d763825a744 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/README.md +++ b/python/semantic_kernel/connectors/memory/weaviate/README.md @@ -14,4 +14,4 @@ There are a few ways you can deploy your Weaviate database: ## Using the Connector -Once the Weaviate database is up and running, and the environment variables are set, you can use the connector in your Semantic Kernel application. Please refer to this sample to see how to use the connector: [Weaviate Connector Sample](../../../../samples/concepts/memory/new_memory.py) \ No newline at end of file +Once the Weaviate database is up and running, and the environment variables are set, you can use the connector in your Semantic Kernel application. Please refer to this sample to see how to use the connector: [Complex Connector Sample](../../../../samples/concepts/memory/complex_memory.py) \ No newline at end of file diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py index 947188a3c819..e7ef424e0016 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py @@ -47,7 +47,7 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ TKey = TypeVar("TKey", str, int) -@experimental_class +@experimental class WeaviateCollection( VectorSearchBase[TKey, TModel], VectorizedSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py index e0be96a17021..ed831e1063e7 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py @@ -10,7 +10,7 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) @@ -102,7 +102,7 @@ def remove_underscore_prefix(cls, sk_dict): return {key.lstrip("_"): value for key, value in sk_dict.items()} -@experimental_class +@experimental class WeaviateMemoryStore(MemoryStoreBase): """A memory store that uses Weaviate as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py index 0327d9b6cbe4..cf7a3a9c6e47 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py @@ -6,10 +6,10 @@ from semantic_kernel.exceptions.service_exceptions import ServiceInvalidExecutionSettingsError from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class WeaviateSettings(KernelBaseSettings): """Weaviate model settings. diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py index 9d57a6e588c2..dc34e0d03417 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py @@ -23,10 +23,10 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class WeaviateStore(VectorStore): """A Weaviate store is a vector store that uses Weaviate as the backend.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/const.py b/python/semantic_kernel/connectors/openapi_plugin/const.py index ac4cebb1aeab..58998a797519 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/const.py +++ b/python/semantic_kernel/connectors/openapi_plugin/const.py @@ -3,10 +3,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class OperationExtensions(Enum): """The operation extensions.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py index 5fee34f9e2c0..9503cb77a6a8 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py @@ -1,10 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.experimental_decorator import experimental_class - -@experimental_class +@experimental class RestApiExpectedResponse: """RestApiExpectedResponse.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py index 2de8cc4162ec..c285bf599b60 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py @@ -2,10 +2,10 @@ from dataclasses import dataclass -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental @dataclass class RestApiOAuthFlow: """Represents the OAuth flow used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py index f739b757cb95..596d47c904df 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py @@ -3,10 +3,10 @@ from dataclasses import dataclass from semantic_kernel.connectors.openapi_plugin.models.rest_api_oauth_flow import RestApiOAuthFlow -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental @dataclass class RestApiOAuthFlows: """Represents the OAuth flows used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py index f6150e70a0a7..7963c55883e8 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py @@ -21,10 +21,10 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_security_requirement import RestApiSecurityRequirement from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiOperation: """RestApiOperation.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py index def469cbeadf..d38e6bebae81 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py @@ -12,10 +12,10 @@ RestApiParameterStyle, ) from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiParameter: """RestApiParameter.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py index 25da836bd3ce..59c71ddb559f 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiParameterLocation(Enum): """The location of the REST API parameter.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py index a5db1b921f6f..2651a4918a43 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiParameterStyle(Enum): """RestApiParameterStyle.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py index 21c7cb288500..bc0bdbe086cf 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py @@ -4,10 +4,10 @@ RestApiPayloadProperty, ) from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiPayload: """RestApiPayload.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py index 455609fdf927..9889e23972a6 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py @@ -3,10 +3,10 @@ from typing import Any from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiPayloadProperty: """RestApiPayloadProperty.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py index 78ce7a760ca7..1e4d59bbd34f 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py @@ -1,9 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiRunOptions: """The options for running the REST API operation.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py index 78a07ace5da6..7d68adbdd16e 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.connectors.openapi_plugin.models.rest_api_security_scheme import RestApiSecurityScheme -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiSecurityRequirement(dict[RestApiSecurityScheme, list[str]]): """Represents the security requirements used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py index c57669b7f121..5274382af619 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py @@ -4,10 +4,10 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_parameter_location import ( RestApiParameterLocation, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class RestApiSecurityScheme: """Represents the security scheme used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py index 16219521870e..aba9fde6c297 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py @@ -2,10 +2,10 @@ from urllib.parse import urlparse -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class Uri: """The Uri class that represents the URI.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py index d671d3b25573..ccbe7627bd1f 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py @@ -11,12 +11,12 @@ OperationSelectionPredicateContext, ) from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental AuthCallbackType = Callable[..., Awaitable[Any]] -@experimental_class +@experimental class OpenAPIFunctionExecutionParameters(KernelBaseModel): """OpenAPI function execution parameters.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py index e2bb641b601b..407095588ca1 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py @@ -18,7 +18,7 @@ from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata from semantic_kernel.schema.kernel_json_schema_builder import TYPE_MAPPING -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.openapi_plugin.openapi_function_execution_parameters import ( @@ -28,7 +28,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_function +@experimental def create_functions_from_openapi( plugin_name: str, openapi_document_path: str | None = None, @@ -98,7 +98,7 @@ def create_functions_from_openapi( return functions -@experimental_function +@experimental def _create_function_from_operation( runner: OpenApiRunner, operation: RestApiOperation, diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py index 8b15ddfa5222..9afc17eb4523 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py @@ -19,13 +19,13 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_run_options import RestApiRunOptions from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class OpenApiRunner: """The OpenApiRunner that runs the operations defined in the OpenAPI manifest.""" diff --git a/python/semantic_kernel/connectors/search/bing/bing_search.py b/python/semantic_kernel/connectors/search/bing/bing_search.py index 21a5ffb7c54c..92a0e5334721 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_search.py +++ b/python/semantic_kernel/connectors/search/bing/bing_search.py @@ -25,7 +25,7 @@ from semantic_kernel.data.text_search.text_search_result import TextSearchResult from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidRequestError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT if TYPE_CHECKING: @@ -34,7 +34,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class BingSearch(KernelBaseModel, TextSearch): """A search engine connector that uses the Bing Search API to perform a web search.""" diff --git a/python/semantic_kernel/connectors/search/bing/bing_search_response.py b/python/semantic_kernel/connectors/search/bing/bing_search_response.py index c42248f24f17..784fecd7c859 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_search_response.py +++ b/python/semantic_kernel/connectors/search/bing/bing_search_response.py @@ -6,24 +6,24 @@ from semantic_kernel.connectors.search.bing.bing_web_page import BingWebPage from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class BingWebPages(KernelBaseModel): """The web pages from a Bing search.""" id: str | None = None - some_results_removed: bool | None = Field(None, alias="someResultsRemoved") - total_estimated_matches: int | None = Field(None, alias="totalEstimatedMatches") - web_search_url: str | None = Field(None, alias="webSearchUrl") + some_results_removed: bool | None = Field(default=None, alias="someResultsRemoved") + total_estimated_matches: int | None = Field(default=None, alias="totalEstimatedMatches") + web_search_url: str | None = Field(default=None, alias="webSearchUrl") value: list[BingWebPage] = Field(default_factory=list) -@experimental_class +@experimental class BingSearchResponse(KernelBaseModel): """The response from a Bing search.""" - type_: str = Field("", alias="_type") + type_: str = Field(default="", alias="_type") query_context: dict[str, Any] = Field(default_factory=dict, validation_alias="queryContext") - web_pages: BingWebPages | None = Field(None, alias="webPages") + web_pages: BingWebPages | None = Field(default=None, alias="webPages") diff --git a/python/semantic_kernel/connectors/search/bing/bing_web_page.py b/python/semantic_kernel/connectors/search/bing/bing_web_page.py index faa8a1217e55..013462879bc6 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_web_page.py +++ b/python/semantic_kernel/connectors/search/bing/bing_web_page.py @@ -2,10 +2,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class BingWebPage(KernelBaseModel): """A Bing web page.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search.py b/python/semantic_kernel/connectors/search/google/google_search.py index 6219283029f2..85934804df00 100644 --- a/python/semantic_kernel/connectors/search/google/google_search.py +++ b/python/semantic_kernel/connectors/search/google/google_search.py @@ -20,7 +20,7 @@ from semantic_kernel.data.text_search.text_search_result import TextSearchResult from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidRequestError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT if TYPE_CHECKING: @@ -29,7 +29,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class GoogleSearch(KernelBaseModel, TextSearch): """A search engine connector that uses the Google Search API to perform a web search.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search_response.py b/python/semantic_kernel/connectors/search/google/google_search_response.py index df76418ae18a..06c34f8f4bbf 100644 --- a/python/semantic_kernel/connectors/search/google/google_search_response.py +++ b/python/semantic_kernel/connectors/search/google/google_search_response.py @@ -6,10 +6,10 @@ from semantic_kernel.connectors.search.google.google_search_result import GoogleSearchResult from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class GoogleSearchInformation(KernelBaseModel): """Information about the search.""" @@ -19,7 +19,7 @@ class GoogleSearchInformation(KernelBaseModel): formatted_total_results: str = Field(alias="formattedTotalResults") -@experimental_class +@experimental class GoogleSearchResponse(KernelBaseModel): """The response from a Google search.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search_result.py b/python/semantic_kernel/connectors/search/google/google_search_result.py index d2e68a67848d..ce273ef208af 100644 --- a/python/semantic_kernel/connectors/search/google/google_search_result.py +++ b/python/semantic_kernel/connectors/search/google/google_search_result.py @@ -5,25 +5,25 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class GoogleSearchResult(KernelBaseModel): """A Google web page.""" kind: str = "" title: str = "" - html_title: str = Field("", alias="htmlTitle") + html_title: str = Field(default="", alias="htmlTitle") link: str = "" - display_link: str = Field("", alias="displayLink") + display_link: str = Field(default="", alias="displayLink") snippet: str = "" - html_snippet: str = Field("", alias="htmlSnippet") - cache_id: str = Field("", alias="cacheId") - formatted_url: str = Field("", alias="formattedUrl") - html_formatted_url: str = Field("", alias="htmlFormattedUrl") + html_snippet: str = Field(default="", alias="htmlSnippet") + cache_id: str = Field(default="", alias="cacheId") + formatted_url: str = Field(default="", alias="formattedUrl") + html_formatted_url: str = Field(default="", alias="htmlFormattedUrl") pagemap: dict[str, Any] = Field(default_factory=dict) mime: str = "" - file_format: str = Field("", alias="fileFormat") + file_format: str = Field(default="", alias="fileFormat") image: dict[str, Any] = Field(default_factory=dict) labels: list[dict[str, Any]] = Field(default_factory=list) diff --git a/python/semantic_kernel/connectors/search_engine/google_connector.py b/python/semantic_kernel/connectors/search_engine/google_connector.py index 9999638a1014..a4b2d70a8bd9 100644 --- a/python/semantic_kernel/connectors/search_engine/google_connector.py +++ b/python/semantic_kernel/connectors/search_engine/google_connector.py @@ -38,7 +38,7 @@ def __init__( """ try: self._settings = GoogleSearchSettings.create( - api_key=api_key, + search_api_key=api_key, search_engine_id=search_engine_id, env_file_path=env_file_path, env_file_encoding=env_file_encoding, diff --git a/python/semantic_kernel/contents/__init__.py b/python/semantic_kernel/contents/__init__.py index c326115ccd86..cb69b29ac6c3 100644 --- a/python/semantic_kernel/contents/__init__.py +++ b/python/semantic_kernel/contents/__init__.py @@ -4,11 +4,22 @@ from semantic_kernel.contents.audio_content import AudioContent from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ChatHistorySummarizationReducer from semantic_kernel.contents.history_reducer.chat_history_truncation_reducer import ChatHistoryTruncationReducer from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.realtime_events import ( + RealtimeAudioEvent, + RealtimeEvent, + RealtimeEvents, + RealtimeFunctionCallEvent, + RealtimeFunctionResultEvent, + RealtimeImageEvent, + RealtimeTextEvent, +) from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent @@ -22,13 +33,22 @@ "AudioContent", "AuthorRole", "ChatHistory", + "ChatHistoryReducer", "ChatHistorySummarizationReducer", "ChatHistoryTruncationReducer", "ChatMessageContent", + "FileReferenceContent", "FinishReason", "FunctionCallContent", "FunctionResultContent", "ImageContent", + "RealtimeAudioEvent", + "RealtimeEvent", + "RealtimeEvents", + "RealtimeFunctionCallEvent", + "RealtimeFunctionResultEvent", + "RealtimeImageEvent", + "RealtimeTextEvent", "StreamingAnnotationContent", "StreamingChatMessageContent", "StreamingFileReferenceContent", diff --git a/python/semantic_kernel/contents/annotation_content.py b/python/semantic_kernel/contents/annotation_content.py index a33f8014d694..64ed9ffe4c4b 100644 --- a/python/semantic_kernel/contents/annotation_content.py +++ b/python/semantic_kernel/contents/annotation_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import ANNOTATION_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="AnnotationContent") -@experimental_class +@experimental class AnnotationContent(KernelContent): """Annotation content.""" diff --git a/python/semantic_kernel/contents/audio_content.py b/python/semantic_kernel/contents/audio_content.py index 8ee4197aaa8f..12bb47af9f64 100644 --- a/python/semantic_kernel/contents/audio_content.py +++ b/python/semantic_kernel/contents/audio_content.py @@ -3,16 +3,17 @@ import mimetypes from typing import Any, ClassVar, Literal, TypeVar +from numpy import ndarray from pydantic import Field from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import AUDIO_CONTENT_TAG, ContentTypes -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental _T = TypeVar("_T", bound="AudioContent") -@experimental_class +@experimental class AudioContent(BinaryContent): """Audio Content class. @@ -38,11 +39,45 @@ class AudioContent(BinaryContent): metadata (dict[str, Any]): Any metadata that should be attached to the response. """ - content_type: Literal[ContentTypes.AUDIO_CONTENT] = Field(AUDIO_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.AUDIO_CONTENT] = Field(default=AUDIO_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = AUDIO_CONTENT_TAG + def __init__( + self, + uri: str | None = None, + data_uri: str | None = None, + data: str | bytes | ndarray | None = None, + data_format: str | None = None, + mime_type: str | None = None, + **kwargs: Any, + ): + """Create an Audio Content object, either from a data_uri or data. + + Args: + uri: The reference uri of the content. + data_uri: The data uri of the content. + data: The data of the content. + data_format: The format of the data (e.g. base64). + mime_type: The mime type of the audio, only used with data. + kwargs: Any additional arguments: + inner_content: The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer + can leverage the full thing. + ai_model_id: The id of the AI model that generated this response. + metadata: Any metadata that should be attached to the response. + """ + super().__init__( + uri=uri, + data_uri=data_uri, + data=data, + data_format=data_format, + mime_type=mime_type, + **kwargs, + ) + @classmethod - def from_audio_file(cls: type[_T], path: str) -> "AudioContent": + def from_audio_file(cls: type[_T], path: str) -> _T: """Create an instance from an audio file.""" mime_type = mimetypes.guess_type(path)[0] with open(path, "rb") as audio_file: diff --git a/python/semantic_kernel/contents/binary_content.py b/python/semantic_kernel/contents/binary_content.py index a36535b0c120..aa161f78755f 100644 --- a/python/semantic_kernel/contents/binary_content.py +++ b/python/semantic_kernel/contents/binary_content.py @@ -2,17 +2,19 @@ import logging import os +from pathlib import Path from typing import Annotated, Any, ClassVar, Literal, TypeVar from xml.etree.ElementTree import Element # nosec -from pydantic import Field, FilePath, UrlConstraints, computed_field +from numpy import ndarray +from pydantic import Field, FilePath, PrivateAttr, UrlConstraints, computed_field from pydantic_core import Url from semantic_kernel.contents.const import BINARY_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent from semantic_kernel.contents.utils.data_uri import DataUri -from semantic_kernel.exceptions.content_exceptions import ContentInitializationError -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.exceptions.content_exceptions import ContentException, ContentInitializationError +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) @@ -21,7 +23,7 @@ DataUrl = Annotated[Url, UrlConstraints(allowed_schemes=["data"])] -@experimental_class +@experimental class BinaryContent(KernelContent): """This is a base class for different types of binary content. @@ -38,17 +40,18 @@ class BinaryContent(KernelContent): """ - content_type: Literal[ContentTypes.BINARY_CONTENT] = Field(BINARY_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.BINARY_CONTENT] = Field(default=BINARY_CONTENT_TAG, init=False) # type: ignore uri: Url | str | None = None + default_mime_type: ClassVar[str] = "text/plain" tag: ClassVar[str] = BINARY_CONTENT_TAG - _data_uri: DataUri | None = None + _data_uri: DataUri | None = PrivateAttr(default=None) def __init__( self, uri: Url | str | None = None, data_uri: DataUrl | str | None = None, - data: str | bytes | None = None, + data: str | bytes | ndarray | None = None, data_format: str | None = None, mime_type: str | None = None, **kwargs: Any, @@ -56,43 +59,49 @@ def __init__( """Create a Binary Content object, either from a data_uri or data. Args: - uri (Url | str | None): The reference uri of the content. - data_uri (DataUrl | None): The data uri of the content. - data (str | bytes | None): The data of the content. - data_format (str | None): The format of the data (e.g. base64). - mime_type (str | None): The mime type of the image, only used with data. - kwargs (Any): Any additional arguments: - inner_content (Any): The inner content of the response, + uri: The reference uri of the content. + data_uri: The data uri of the content. + data: The data of the content. + data_format: The format of the data (e.g. base64). + mime_type: The mime type of the content, not always relevant. + kwargs: Any additional arguments: + inner_content: The inner content of the response, this should hold all the information from the response so even when not creating a subclass a developer can leverage the full thing. - ai_model_id (str | None): The id of the AI model that generated this response. - metadata (dict[str, Any]): Any metadata that should be attached to the response. + ai_model_id: The id of the AI model that generated this response. + metadata: Any metadata that should be attached to the response. """ - _data_uri = None + temp_data_uri: DataUri | None = None if data_uri: - _data_uri = DataUri.from_data_uri(data_uri, self.default_mime_type) - if "metadata" in kwargs: - kwargs["metadata"].update(_data_uri.parameters) - else: - kwargs["metadata"] = _data_uri.parameters - elif data: - if isinstance(data, str): - _data_uri = DataUri( - data_str=data, data_format=data_format, mime_type=mime_type or self.default_mime_type - ) - else: - _data_uri = DataUri( - data_bytes=data, data_format=data_format, mime_type=mime_type or self.default_mime_type - ) + temp_data_uri = DataUri.from_data_uri(data_uri, self.default_mime_type) + kwargs.setdefault("metadata", {}) + kwargs["metadata"].update(temp_data_uri.parameters) + elif data is not None: + match data: + case bytes(): + temp_data_uri = DataUri( + data_bytes=data, data_format=data_format, mime_type=mime_type or self.default_mime_type + ) + case ndarray(): + temp_data_uri = DataUri( + data_array=data, data_format=data_format, mime_type=mime_type or self.default_mime_type + ) + case str(): + temp_data_uri = DataUri( + data_str=data, data_format=data_format, mime_type=mime_type or self.default_mime_type + ) if uri is not None: if isinstance(uri, str) and os.path.exists(uri): - uri = str(FilePath(uri)) + if os.path.isfile(uri): + uri = str(Path(uri)) + else: + raise ContentInitializationError("URI must be a file path, not a directory.") elif isinstance(uri, str): uri = Url(uri) super().__init__(uri=uri, **kwargs) - self._data_uri = _data_uri + self._data_uri = temp_data_uri @computed_field # type: ignore @property @@ -105,28 +114,43 @@ def data_uri(self) -> str: @data_uri.setter def data_uri(self, value: str): """Set the data uri.""" - self._data_uri = DataUri.from_data_uri(value) + if not self._data_uri: + self._data_uri = DataUri.from_data_uri(value, self.default_mime_type) + else: + self._data_uri.update_data(value) self.metadata.update(self._data_uri.parameters) @property - def data(self) -> bytes: + def data_string(self) -> str: + """Returns the data as a string, using the data format.""" + if self._data_uri: + return self._data_uri._data_str() + return "" + + @property + def data(self) -> bytes | ndarray: """Get the data.""" + if self._data_uri and self._data_uri.data_array is not None: + return self._data_uri.data_array.tobytes() if self._data_uri and self._data_uri.data_bytes: return self._data_uri.data_bytes - if self._data_uri and self._data_uri.data_str: - return self._data_uri.data_str.encode("utf-8") return b"" @data.setter - def data(self, value: str | bytes): + def data(self, value: str | bytes | ndarray): """Set the data.""" if self._data_uri: self._data_uri.update_data(value) - else: - if isinstance(value, str): + return + match value: + case ndarray(): + self._data_uri = DataUri(data_array=value, mime_type=self.mime_type) + case str(): self._data_uri = DataUri(data_str=value, mime_type=self.mime_type) - else: + case bytes(): self._data_uri = DataUri(data_bytes=value, mime_type=self.mime_type) + case _: + raise ContentException("Data must be a string, bytes, or numpy array.") @property def mime_type(self) -> str: @@ -167,7 +191,11 @@ def from_element(cls: type[_T], element: Element) -> _T: def write_to_file(self, path: str | FilePath) -> None: """Write the data to a file.""" + if self._data_uri and self._data_uri.data_array is not None: + self._data_uri.data_array.tofile(path) + return with open(path, "wb") as file: + assert isinstance(self.data, bytes) # nosec file.write(self.data) def to_dict(self) -> dict[str, Any]: diff --git a/python/semantic_kernel/contents/chat_history.py b/python/semantic_kernel/contents/chat_history.py index 5013aae0e073..53d49d2d80e4 100644 --- a/python/semantic_kernel/contents/chat_history.py +++ b/python/semantic_kernel/contents/chat_history.py @@ -1,14 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import Generator +from collections.abc import Generator, Iterable from functools import singledispatchmethod from html import unescape -from typing import Any +from typing import Any, TypeVar from xml.etree.ElementTree import Element, tostring # nosec from defusedxml.ElementTree import XML, ParseError -from pydantic import field_validator +from pydantic import Field, field_validator, model_validator from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.const import CHAT_HISTORY_TAG, CHAT_MESSAGE_CONTENT_TAG @@ -19,56 +19,37 @@ logger = logging.getLogger(__name__) +_T = TypeVar("_T", bound="ChatHistory") + class ChatHistory(KernelBaseModel): """This class holds the history of chat messages from a chat conversation. - Note: the constructor takes a system_message parameter, which is not part - of the class definition. This is to allow the system_message to be passed in - as a keyword argument, but not be part of the class definition. + Note: the system_message is added to the messages as a ChatMessageContent instance with role=AuthorRole.SYSTEM, + but updating it will not update the messages list. - Attributes: - messages (List[ChatMessageContent]): The list of chat messages in the history. + Args: + messages: The messages to add to the chat history. + system_message: A system message to add to the chat history, optional. + if passed, it is added to the messages + as a ChatMessageContent instance with role=AuthorRole.SYSTEM + before any other messages. """ - messages: list[ChatMessageContent] - - def __init__(self, **data: Any): - """Initializes a new instance of the ChatHistory class. - - Optionally incorporating a message and/or a system message at the beginning of the chat history. - - This constructor allows for flexible initialization with chat messages and an optional messages or a - system message. If both 'messages' (a list of ChatMessageContent instances) and 'system_message' are - provided, the 'system_message' is prepended to the list of messages, ensuring it appears as the first - message in the history. If only 'system_message' is provided without any 'messages', the chat history is - initialized with the 'system_message' as its first item. If 'messages' are provided without a - 'system_message', the chat history is initialized with the provided messages as is. - - Note: The 'system_message' is not retained as part of the class's attributes; it's used during - initialization and then discarded. The rest of the keyword arguments are passed to the superclass - constructor and handled according to the Pydantic model's behavior. - - Args: - **data: Arbitrary keyword arguments. - The constructor looks for two optional keys: - - 'messages': Optional[List[ChatMessageContent]], a list of chat messages to include in the history. - - 'system_message' Optional[str]: An optional string representing a system-generated message to be - included at the start of the chat history. - - """ - system_message_content = data.pop("system_message", None) - - if system_message_content: - system_message = ChatMessageContent(role=AuthorRole.SYSTEM, content=system_message_content) + messages: list[ChatMessageContent] = Field(default_factory=list, kw_only=False) + system_message: str | None = Field(default=None, kw_only=False, repr=False) + @model_validator(mode="before") + @classmethod + def _parse_system_message(cls, data: Any) -> Any: + """Parse the system_message and add it to the messages.""" + if isinstance(data, dict) and (system_message := data.pop("system_message", None)): + msg = ChatMessageContent(role=AuthorRole.SYSTEM, content=system_message) if "messages" in data: - data["messages"] = [system_message] + data["messages"] + data["messages"] = [msg] + data["messages"] else: - data["messages"] = [system_message] - if "messages" not in data: - data["messages"] = [] - super().__init__(**data) + data["messages"] = [msg] + return data @field_validator("messages", mode="before") @classmethod @@ -85,76 +66,107 @@ def _validate_messages(cls, messages: list[ChatMessageContent]) -> list[ChatMess @singledispatchmethod def add_system_message(self, content: str | list[KernelContent], **kwargs) -> None: - """Add a system message to the chat history.""" + """Add a system message to the chat history. + + Args: + content: The content of the system message, can be a string or a + list of KernelContent instances that are turned into a single ChatMessageContent. + **kwargs: Additional keyword arguments. + """ raise NotImplementedError @add_system_message.register - def add_system_message_str(self, content: str, **kwargs: Any) -> None: + def _(self, content: str, **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.SYSTEM, content=content, **kwargs)) @add_system_message.register(list) - def add_system_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: + def _(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.SYSTEM, items=content, **kwargs)) @singledispatchmethod def add_developer_message(self, content: str | list[KernelContent], **kwargs) -> None: - """Add a system message to the chat history.""" + """Add a system message to the chat history. + + Args: + content: The content of the developer message, can be a string or a + list of KernelContent instances that are turned into a single ChatMessageContent. + **kwargs: Additional keyword arguments. + """ raise NotImplementedError @add_developer_message.register - def add_developer_message_str(self, content: str, **kwargs: Any) -> None: + def _(self, content: str, **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.DEVELOPER, content=content, **kwargs)) @add_developer_message.register(list) - def add_developer_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: + def _(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.DEVELOPER, items=content, **kwargs)) @singledispatchmethod def add_user_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add a user message to the chat history.""" + """Add a user message to the chat history. + + Args: + content: The content of the user message, can be a string or a + list of KernelContent instances that are turned into a single ChatMessageContent. + **kwargs: Additional keyword arguments. + + """ raise NotImplementedError @add_user_message.register - def add_user_message_str(self, content: str, **kwargs: Any) -> None: + def _(self, content: str, **kwargs: Any) -> None: """Add a user message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.USER, content=content, **kwargs)) @add_user_message.register(list) - def add_user_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: + def _(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a user message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.USER, items=content, **kwargs)) @singledispatchmethod def add_assistant_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add an assistant message to the chat history.""" + """Add an assistant message to the chat history. + + Args: + content: The content of the assistant message, can be a string or a + list of KernelContent instances that are turned into a single ChatMessageContent. + **kwargs: Additional keyword arguments. + """ raise NotImplementedError @add_assistant_message.register - def add_assistant_message_str(self, content: str, **kwargs: Any) -> None: + def _(self, content: str, **kwargs: Any) -> None: """Add an assistant message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.ASSISTANT, content=content, **kwargs)) @add_assistant_message.register(list) - def add_assistant_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: + def _(self, content: list[KernelContent], **kwargs: Any) -> None: """Add an assistant message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.ASSISTANT, items=content, **kwargs)) @singledispatchmethod def add_tool_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add a tool message to the chat history.""" + """Add a tool message to the chat history. + + Args: + content: The content of the tool message, can be a string or a + list of KernelContent instances that are turned into a single ChatMessageContent. + **kwargs: Additional keyword arguments. + """ raise NotImplementedError @add_tool_message.register - def add_tool_message_str(self, content: str, **kwargs: Any) -> None: + def _(self, content: str, **kwargs: Any) -> None: """Add a tool message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.TOOL, content=content, **kwargs)) @add_tool_message.register(list) - def add_tool_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: + def _(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a tool message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.TOOL, items=content, **kwargs)) @@ -245,6 +257,31 @@ def __str__(self) -> str: chat_history_xml.append(message.to_element()) return tostring(chat_history_xml, encoding="unicode", short_empty_elements=True) + def clear(self) -> None: + """Clear the chat history.""" + self.messages.clear() + + def extend(self, messages: Iterable[ChatMessageContent]) -> None: + """Extend the chat history with a list of messages. + + Args: + messages: The messages to add to the history. + Can be a list of ChatMessageContent instances or a ChatHistory itself. + """ + self.messages.extend(messages) + + def replace(self, messages: Iterable[ChatMessageContent]) -> None: + """Replace the chat history with a list of messages. + + This calls clear() and then extend(messages=messages). + + Args: + messages: The messages to add to the history. + Can be a list of ChatMessageContent instances or a ChatHistory itself. + """ + self.clear() + self.extend(messages=messages) + def to_prompt(self) -> str: """Return a string representation of the history.""" chat_history_xml = Element(CHAT_HISTORY_TAG) @@ -264,7 +301,7 @@ def __eq__(self, other: Any) -> bool: return self.messages == other.messages @classmethod - def from_rendered_prompt(cls, rendered_prompt: str) -> "ChatHistory": + def from_rendered_prompt(cls: type[_T], rendered_prompt: str) -> _T: """Create a ChatHistory instance from a rendered prompt. Args: @@ -305,12 +342,12 @@ def serialize(self) -> str: ValueError: If the ChatHistory instance cannot be serialized to JSON. """ try: - return self.model_dump_json(indent=2, exclude_none=True) + return self.model_dump_json(exclude_none=True, indent=2) except Exception as e: # pragma: no cover raise ContentSerializationError(f"Unable to serialize ChatHistory to JSON: {e}") from e @classmethod - def restore_chat_history(cls, chat_history_json: str) -> "ChatHistory": + def restore_chat_history(cls: type[_T], chat_history_json: str) -> _T: """Restores a ChatHistory instance from a JSON string. Args: @@ -325,26 +362,30 @@ def restore_chat_history(cls, chat_history_json: str) -> "ChatHistory": fails validation. """ try: - return ChatHistory.model_validate_json(chat_history_json) + return cls.model_validate_json(chat_history_json) except Exception as e: raise ContentInitializationError(f"Invalid JSON format: {e}") def store_chat_history_to_file(self, file_path: str) -> None: """Stores the serialized ChatHistory to a file. + Uses mode "w" which means the file is created if it does not exist and gets truncated if it does. + Args: - file_path (str): The path to the file where the serialized data will be stored. + file_path: The path to the file where the serialized data will be stored. """ json_str = self.serialize() - with open(file_path, "w") as file: - file.write(json_str) + with open(file_path, "w") as local_file: + local_file.write(json_str) @classmethod - def load_chat_history_from_file(cls, file_path: str) -> "ChatHistory": + def load_chat_history_from_file(cls: type[_T], file_path: str) -> _T: """Loads the ChatHistory from a file. + Uses mode "r" which means it can only be read if it exists. + Args: - file_path (str): The path to the file from which to load the ChatHistory. + file_path: The path to the file from which to load the ChatHistory. Returns: ChatHistory: The deserialized ChatHistory instance. diff --git a/python/semantic_kernel/contents/chat_message_content.py b/python/semantic_kernel/contents/chat_message_content.py index b369038cdceb..829b3f5c6aed 100644 --- a/python/semantic_kernel/contents/chat_message_content.py +++ b/python/semantic_kernel/contents/chat_message_content.py @@ -10,6 +10,8 @@ from pydantic import Field from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.audio_content import AudioContent +from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import ( ANNOTATION_CONTENT_TAG, CHAT_MESSAGE_CONTENT_TAG, @@ -33,6 +35,7 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason +from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions.content_exceptions import ContentInitializationError TAG_CONTENT_MAP = { @@ -46,8 +49,9 @@ STREAMING_ANNOTATION_CONTENT_TAG: StreamingAnnotationContent, } -ITEM_TYPES = ( +CMC_ITEM_TYPES = Annotated[ AnnotationContent + | BinaryContent | ImageContent | TextContent | FunctionResultContent @@ -55,7 +59,10 @@ | FileReferenceContent | StreamingAnnotationContent | StreamingFileReferenceContent -) + | AudioContent, + Field(discriminator=DISCRIMINATOR_FIELD), +] + logger = logging.getLogger(__name__) @@ -80,11 +87,11 @@ class ChatMessageContent(KernelContent): __str__: Returns the content of the response. """ - content_type: Literal[ContentTypes.CHAT_MESSAGE_CONTENT] = Field(CHAT_MESSAGE_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.CHAT_MESSAGE_CONTENT] = Field(default=CHAT_MESSAGE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = CHAT_MESSAGE_CONTENT_TAG role: AuthorRole name: str | None = None - items: list[Annotated[ITEM_TYPES, Field(discriminator=DISCRIMINATOR_FIELD)]] = Field(default_factory=list) + items: list[CMC_ITEM_TYPES] = Field(default_factory=list) encoding: str | None = None finish_reason: FinishReason | None = None @@ -92,7 +99,7 @@ class ChatMessageContent(KernelContent): def __init__( self, role: AuthorRole, - items: list[ITEM_TYPES], + items: list[CMC_ITEM_TYPES], name: str | None = None, inner_content: Any | None = None, encoding: str | None = None, @@ -119,7 +126,7 @@ def __init__( def __init__( # type: ignore self, role: AuthorRole, - items: list[ITEM_TYPES] | None = None, + items: list[CMC_ITEM_TYPES] | None = None, content: str | None = None, inner_content: Any | None = None, name: str | None = None, @@ -315,4 +322,5 @@ def _parse_items(self) -> str | list[dict[str, Any]]: def __hash__(self) -> int: """Return the hash of the chat message content.""" - return hash((self.tag, self.role, self.content, self.encoding, self.finish_reason, *self.items)) + hashable_items = [make_hashable(item) for item in self.items] if self.items else [] + return hash((self.tag, self.role, self.content, self.encoding, self.finish_reason, *hashable_items)) diff --git a/python/semantic_kernel/contents/file_reference_content.py b/python/semantic_kernel/contents/file_reference_content.py index 99cd15f341ef..8fa87503393e 100644 --- a/python/semantic_kernel/contents/file_reference_content.py +++ b/python/semantic_kernel/contents/file_reference_content.py @@ -8,20 +8,22 @@ from semantic_kernel.contents.const import FILE_REFERENCE_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="FileReferenceContent") -@experimental_class +@experimental class FileReferenceContent(KernelContent): """File reference content.""" content_type: Literal[ContentTypes.FILE_REFERENCE_CONTENT] = Field(FILE_REFERENCE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = FILE_REFERENCE_CONTENT_TAG file_id: str | None = None + tools: list[Any] = Field(default_factory=list) + data_source: Any | None = None def __str__(self) -> str: """Return the string representation of the file reference content.""" diff --git a/python/semantic_kernel/contents/function_call_content.py b/python/semantic_kernel/contents/function_call_content.py index 7067311f4c8a..863ba6dfbaf7 100644 --- a/python/semantic_kernel/contents/function_call_content.py +++ b/python/semantic_kernel/contents/function_call_content.py @@ -45,7 +45,6 @@ class FunctionCallContent(KernelContent): def __init__( self, - content_type: Literal[ContentTypes.FUNCTION_CALL_CONTENT] = FUNCTION_CALL_CONTENT_TAG, # type: ignore inner_content: Any | None = None, ai_model_id: str | None = None, id: str | None = None, @@ -60,7 +59,6 @@ def __init__( """Create function call content. Args: - content_type: The content type. inner_content (Any | None): The inner content. ai_model_id (str | None): The id of the AI model. id (str | None): The id of the function call. @@ -83,7 +81,6 @@ def __init__( else: function_name = name args = { - "content_type": content_type, "inner_content": inner_content, "ai_model_id": ai_model_id, "id": id, @@ -124,6 +121,7 @@ def __add__(self, other: "FunctionCallContent | None") -> "FunctionCallContent": index=self.index or other.index, name=self.name or other.name, arguments=self.combine_arguments(self.arguments, other.arguments), + metadata=self.metadata | other.metadata, ) def combine_arguments( diff --git a/python/semantic_kernel/contents/function_result_content.py b/python/semantic_kernel/contents/function_result_content.py index 536fb4ff19ce..b1d36b2bd5f8 100644 --- a/python/semantic_kernel/contents/function_result_content.py +++ b/python/semantic_kernel/contents/function_result_content.py @@ -12,6 +12,7 @@ from semantic_kernel.contents.kernel_content import KernelContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions.content_exceptions import ContentInitializationError if TYPE_CHECKING: @@ -41,7 +42,6 @@ class FunctionResultContent(KernelContent): def __init__( self, - content_type: Literal[ContentTypes.FUNCTION_RESULT_CONTENT] = FUNCTION_RESULT_CONTENT_TAG, # type: ignore inner_content: Any | None = None, ai_model_id: str | None = None, id: str | None = None, @@ -56,7 +56,6 @@ def __init__( """Create function result content. Args: - content_type: The content type. inner_content (Any | None): The inner content. ai_model_id (str | None): The id of the AI model. id (str | None): The id of the function call that the result relates to. @@ -79,7 +78,6 @@ def __init__( else: function_name = name args = { - "content_type": content_type, "inner_content": inner_content, "ai_model_id": ai_model_id, "id": id, @@ -194,10 +192,11 @@ def serialize_result(self, value: Any) -> str: def __hash__(self) -> int: """Return the hash of the function result content.""" + hashable_result = make_hashable(self.result) return hash(( self.tag, self.id, - tuple(self.result) if isinstance(self.result, list) else self.result, + hashable_result, self.name, self.function_name, self.plugin_name, diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py index bc05c705ceda..ed7d6deee70a 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py @@ -2,6 +2,7 @@ import sys from abc import ABC, abstractmethod +from typing import Any if sys.version < "3.11": from typing_extensions import Self # pragma: no cover @@ -11,15 +12,21 @@ from pydantic import Field from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.exceptions.content_exceptions import ContentInitializationError +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class ChatHistoryReducer(ChatHistory, ABC): """Defines a contract for reducing chat history.""" target_count: int = Field(..., gt=0, description="Target message count.") - threshold_count: int = Field(0, ge=0, description="Threshold count to avoid orphaning messages.") + threshold_count: int = Field(default=0, ge=0, description="Threshold count to avoid orphaning messages.") + auto_reduce: bool = Field( + default=False, + description="Whether to automatically reduce the chat history, this happens when using add_message_async.", + ) @abstractmethod async def reduce(self) -> Self | None: @@ -29,3 +36,28 @@ async def reduce(self) -> Self | None: A possibly shorter list of messages, or None if no change is needed. """ ... + + async def add_message_async( + self, + message: ChatMessageContent | dict[str, Any], + encoding: str | None = None, + metadata: dict[str, Any] | None = None, + ) -> None: + """Add a message to the chat history. + + If auto_reduce is enabled, the history will be reduced after adding the message. + """ + if isinstance(message, ChatMessageContent): + self.messages.append(message) + if self.auto_reduce: + await self.reduce() + return + if "role" not in message: + raise ContentInitializationError(f"Dictionary must contain at least the role. Got: {message}") + if encoding: + message["encoding"] = encoding + if metadata: + message["metadata"] = metadata + self.messages.append(ChatMessageContent(**message)) + if self.auto_reduce: + await self.reduce() diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py b/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py index 6742c0b56816..dd82689cc1ad 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py @@ -7,7 +7,7 @@ from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) @@ -15,7 +15,7 @@ SUMMARY_METADATA_KEY = "__summary__" -@experimental_function +@experimental def get_call_result_pairs(history: list[ChatMessageContent]) -> list[tuple[int, int]]: """Identify all (FunctionCallContent, FunctionResultContent) pairs in the history. @@ -45,7 +45,7 @@ def get_call_result_pairs(history: list[ChatMessageContent]) -> list[tuple[int, return pairs -@experimental_function +@experimental def locate_summarization_boundary(history: list[ChatMessageContent]) -> int: """Identify the index of the first message that is not a summary message. @@ -60,7 +60,7 @@ def locate_summarization_boundary(history: list[ChatMessageContent]) -> int: return len(history) -@experimental_function +@experimental def locate_safe_reduction_index( history: list[ChatMessageContent], target_count: int, @@ -96,10 +96,11 @@ def locate_safe_reduction_index( message_index = total_count - target_count # Move backward to avoid cutting function calls / results + # also skip over developer/system messages while message_index >= offset_count: - if not any( - isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in history[message_index].items - ): + if history[message_index].role not in (AuthorRole.DEVELOPER, AuthorRole.SYSTEM): + break + if not contains_function_call_or_result(history[message_index]): break message_index -= 1 @@ -115,7 +116,7 @@ def locate_safe_reduction_index( return target_index -@experimental_function +@experimental def extract_range( history: list[ChatMessageContent], start: int, @@ -164,6 +165,11 @@ def extract_range( i += 1 continue + # skipping system/developer message + if msg.role in (AuthorRole.DEVELOPER, AuthorRole.SYSTEM): + i += 1 + continue + # If preserve_pairs is on, and there's a paired index, skip or include them both if preserve_pairs and idx in pair_map: paired_idx = pair_map[idx] @@ -205,7 +211,7 @@ def extract_range( return extracted -@experimental_function +@experimental def contains_function_call_or_result(msg: ChatMessageContent) -> bool: """Return True if the message has any function call or function result.""" return any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in msg.items) diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py index 1feaf1a839ad..e01594f1e0e4 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py @@ -2,20 +2,20 @@ import logging import sys -from typing import Any - -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.utils.experimental_decorator import experimental_class if sys.version < "3.11": from typing_extensions import Self # pragma: no cover else: from typing import Self # type: ignore # pragma: no cover +if sys.version < "3.12": + from typing_extensions import override # pragma: no cover +else: + from typing import override # type: ignore # pragma: no cover from pydantic import Field from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.const import DEFAULT_SERVICE_NAME +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer @@ -27,6 +27,7 @@ locate_summarization_boundary, ) from semantic_kernel.exceptions.content_exceptions import ChatHistoryReducerException +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) @@ -47,75 +48,41 @@ """ -@experimental_class +@experimental class ChatHistorySummarizationReducer(ChatHistoryReducer): - """A ChatHistory with logic to summarize older messages past a target count.""" + """A ChatHistory with logic to summarize older messages past a target count. + + This class inherits from ChatHistoryReducer, which in turn inherits from ChatHistory. + It can be used anywhere a ChatHistory is expected, while adding summarization capability. + + Args: + target_count: The target message count. + threshold_count: The threshold count to avoid orphaning messages. + auto_reduce: Whether to automatically reduce the chat history, default is False. + service: The ChatCompletion service to use for summarization. + summarization_instructions: The summarization instructions, optional. + use_single_summary: Whether to use a single summary message, default is True. + fail_on_error: Raise error if summarization fails, default is True. + include_function_content_in_summary: Whether to include function calls/results in the summary, default is False. + execution_settings: The execution settings for the summarization prompt, optional. + + """ service: ChatCompletionClientBase summarization_instructions: str = Field( - default_factory=lambda: DEFAULT_SUMMARIZATION_PROMPT, + default=DEFAULT_SUMMARIZATION_PROMPT, description="The summarization instructions.", + kw_only=True, ) - use_single_summary: bool = Field(True, description="Whether to use a single summary message.") - fail_on_error: bool = Field(True, description="Raise error if summarization fails.") - service_id: str = Field( - default_factory=lambda: DEFAULT_SERVICE_NAME, description="The ID of the chat completion service." - ) + use_single_summary: bool = Field(default=True, description="Whether to use a single summary message.") + fail_on_error: bool = Field(default=True, description="Raise error if summarization fails.") include_function_content_in_summary: bool = Field( - False, description="Whether to include function calls/results in the summary." + default=False, description="Whether to include function calls/results in the summary." ) execution_settings: PromptExecutionSettings | None = None - def __init__( - self, - service: ChatCompletionClientBase, - target_count: int, - service_id: str | None = None, - threshold_count: int | None = None, - summarization_instructions: str | None = None, - use_single_summary: bool | None = None, - fail_on_error: bool | None = None, - include_function_content_in_summary: bool | None = None, - execution_settings: PromptExecutionSettings | None = None, - **kwargs: Any, - ): - """Initialize the ChatHistorySummarizationReducer. - - Args: - service (ChatCompletionClientBase): The chat completion service. - target_count (int): The target number of messages to retain after applying summarization. - service_id (str | None): The ID of the chat completion service. - threshold_count (int | None): The threshold beyond target_count required to trigger reduction. - summarization_instructions (str | None): The summarization instructions. - use_single_summary (bool | None): Whether to use a single summary message. - fail_on_error (bool | None): Raise error if summarization fails. - include_function_content_in_summary (bool | None): Whether to include function calls/results in the summary. - execution_settings (PromptExecutionSettings | None): The prompt execution settings. - **kwargs (Any): Additional keyword arguments. - """ - args: dict[str, Any] = { - "service": service, - "target_count": target_count, - } - if service_id is not None: - args["service_id"] = service_id - if threshold_count is not None: - args["threshold_count"] = threshold_count - if summarization_instructions is not None: - args["summarization_instructions"] = summarization_instructions - if use_single_summary is not None: - args["use_single_summary"] = use_single_summary - if fail_on_error is not None: - args["fail_on_error"] = fail_on_error - if include_function_content_in_summary is not None: - args["include_function_content_in_summary"] = include_function_content_in_summary - if execution_settings is not None: - args["execution_settings"] = execution_settings - - super().__init__(**args, **kwargs) - + @override async def reduce(self) -> Self | None: - """Summarize the older messages past the target message count.""" history = self.messages if len(history) <= self.target_count + (self.threshold_count or 0): return None # No summarization needed @@ -187,19 +154,15 @@ async def _summarize(self, messages: list[ChatMessageContent]) -> ChatMessageCon from semantic_kernel.contents.utils.author_role import AuthorRole chat_history = ChatHistory(messages=messages) - - role = ( - getattr(self.execution_settings, "instruction_role", AuthorRole.SYSTEM) - if self.execution_settings - else AuthorRole.SYSTEM + execution_settings = self.execution_settings or self.service.get_prompt_execution_settings_from_settings( + PromptExecutionSettings() ) - - chat_history.add_message(ChatMessageContent(role=role, content=self.summarization_instructions)) - - execution_settings = self.execution_settings or self.service.get_prompt_execution_settings_class()( - service_id=self.service_id + chat_history.add_message( + ChatMessageContent( + role=getattr(execution_settings, "instruction_role", AuthorRole.SYSTEM), + content=self.summarization_instructions, + ) ) - return await self.service.get_chat_message_content(chat_history=chat_history, settings=execution_settings) def __eq__(self, other: object) -> bool: diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py index 4faf28876748..32c848a098d7 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py @@ -2,47 +2,42 @@ import logging import sys -from typing import Any - -from semantic_kernel.utils.experimental_decorator import experimental_class if sys.version < "3.11": from typing_extensions import Self # pragma: no cover else: from typing import Self # type: ignore # pragma: no cover +if sys.version < "3.12": + from typing_extensions import override # pragma: no cover +else: + from typing import override # type: ignore # pragma: no cover + from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.history_reducer.chat_history_reducer_utils import ( extract_range, locate_safe_reduction_index, ) +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) -@experimental_class +@experimental class ChatHistoryTruncationReducer(ChatHistoryReducer): """A ChatHistory that supports truncation logic. Because this class inherits from ChatHistoryReducer (which in turn inherits from ChatHistory), it can also be used anywhere a ChatHistory is expected, while adding truncation capability. - """ - def __init__(self, target_count: int, threshold_count: int | None = None, **kwargs: Any): - """Initialize the truncation reducer.""" - args: dict[str, Any] = { - "target_count": target_count, - } - if threshold_count is not None: - args["threshold_count"] = threshold_count - super().__init__(**args, **kwargs) + Args: + target_count: The target message count. + threshold_count: The threshold count to avoid orphaning messages. + auto_reduce: Whether to automatically reduce the chat history, default is False. + """ + @override async def reduce(self) -> Self | None: - """Truncate the chat history to the target message count, avoiding orphaned calls. - - Returns: - The truncated list of messages if truncation occurred, or None otherwise. - """ history = self.messages if len(history) <= self.target_count + (self.threshold_count or 0): # No need to reduce diff --git a/python/semantic_kernel/contents/image_content.py b/python/semantic_kernel/contents/image_content.py index b0d66f133abf..4b25df692440 100644 --- a/python/semantic_kernel/contents/image_content.py +++ b/python/semantic_kernel/contents/image_content.py @@ -4,19 +4,20 @@ import mimetypes from typing import Any, ClassVar, Literal, TypeVar +from numpy import ndarray from pydantic import Field from typing_extensions import deprecated from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import IMAGE_CONTENT_TAG, ContentTypes -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="ImageContent") -@experimental_class +@experimental class ImageContent(BinaryContent): """Image Content class. @@ -52,6 +53,40 @@ class ImageContent(BinaryContent): content_type: Literal[ContentTypes.IMAGE_CONTENT] = Field(IMAGE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = IMAGE_CONTENT_TAG + def __init__( + self, + uri: str | None = None, + data_uri: str | None = None, + data: str | bytes | ndarray | None = None, + data_format: str | None = None, + mime_type: str | None = None, + **kwargs: Any, + ): + """Create an Image Content object, either from a data_uri or data. + + Args: + uri: The reference uri of the content. + data_uri: The data uri of the content. + data: The data of the content. + data_format: The format of the data (e.g. base64). + mime_type: The mime type of the image, only used with data. + kwargs: Any additional arguments: + inner_content: The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer + can leverage the full thing. + ai_model_id: The id of the AI model that generated this response. + metadata: Any metadata that should be attached to the response. + """ + super().__init__( + uri=uri, + data_uri=data_uri, + data=data, + data_format=data_format, + mime_type=mime_type, + **kwargs, + ) + @classmethod @deprecated("The `from_image_path` method is deprecated; use `from_image_file` instead.", category=None) def from_image_path(cls: type[_T], image_path: str) -> _T: diff --git a/python/semantic_kernel/contents/realtime_events.py b/python/semantic_kernel/contents/realtime_events.py new file mode 100644 index 000000000000..d74287d5ccf4 --- /dev/null +++ b/python/semantic_kernel/contents/realtime_events.py @@ -0,0 +1,67 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated, Any, ClassVar, Literal, Union + +from pydantic import Field + +from semantic_kernel.contents.audio_content import AudioContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.kernel_pydantic import KernelBaseModel + +RealtimeEvents = Annotated[ + Union[ + "RealtimeEvent", + "RealtimeAudioEvent", + "RealtimeTextEvent", + "RealtimeFunctionCallEvent", + "RealtimeFunctionResultEvent", + "RealtimeImageEvent", + ], + Field(discriminator="event_type"), +] + + +class RealtimeEvent(KernelBaseModel): + """Base class for all service events.""" + + service_event: Any | None = Field(default=None, description="The event content.") + service_type: str | None = None + event_type: ClassVar[Literal["service"]] = "service" + + +class RealtimeAudioEvent(RealtimeEvent): + """Audio event type.""" + + event_type: ClassVar[Literal["audio"]] = "audio" # type: ignore + audio: AudioContent = Field(..., description="Audio content.") + + +class RealtimeTextEvent(RealtimeEvent): + """Text event type.""" + + event_type: ClassVar[Literal["text"]] = "text" # type: ignore + text: TextContent = Field(..., description="Text content.") + + +class RealtimeFunctionCallEvent(RealtimeEvent): + """Function call event type.""" + + event_type: ClassVar[Literal["function_call"]] = "function_call" # type: ignore + function_call: FunctionCallContent = Field(..., description="Function call content.") + + +class RealtimeFunctionResultEvent(RealtimeEvent): + """Function result event type.""" + + event_type: ClassVar[Literal["function_result"]] = "function_result" # type: ignore + function_result: FunctionResultContent = Field(..., description="Function result content.") + + +class RealtimeImageEvent(RealtimeEvent): + """Image event type.""" + + event_type: ClassVar[Literal["image"]] = "image" # type: ignore + image: ImageContent = Field(..., description="Image content.") diff --git a/python/semantic_kernel/contents/streaming_annotation_content.py b/python/semantic_kernel/contents/streaming_annotation_content.py index addf4750d75c..14afea3e6cf3 100644 --- a/python/semantic_kernel/contents/streaming_annotation_content.py +++ b/python/semantic_kernel/contents/streaming_annotation_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import STREAMING_ANNOTATION_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="StreamingAnnotationContent") -@experimental_class +@experimental class StreamingAnnotationContent(KernelContent): """Streaming Annotation content.""" diff --git a/python/semantic_kernel/contents/streaming_chat_message_content.py b/python/semantic_kernel/contents/streaming_chat_message_content.py index 683b498d0c69..88c31ef31473 100644 --- a/python/semantic_kernel/contents/streaming_chat_message_content.py +++ b/python/semantic_kernel/contents/streaming_chat_message_content.py @@ -1,12 +1,15 @@ # Copyright (c) Microsoft. All rights reserved. from enum import Enum -from typing import Any, Union, overload +from typing import Annotated, Any, overload from xml.etree.ElementTree import Element # nosec from pydantic import Field +from semantic_kernel.contents.audio_content import AudioContent +from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.const import DISCRIMINATOR_FIELD from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent @@ -16,15 +19,19 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason +from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions import ContentAdditionException -ITEM_TYPES = Union[ - ImageContent, - StreamingTextContent, - FunctionCallContent, - FunctionResultContent, - StreamingFileReferenceContent, - StreamingAnnotationContent, +STREAMING_CMC_ITEM_TYPES = Annotated[ + BinaryContent + | AudioContent + | ImageContent + | FunctionResultContent + | FunctionCallContent + | StreamingTextContent + | StreamingAnnotationContent + | StreamingFileReferenceContent, + Field(discriminator=DISCRIMINATOR_FIELD), ] @@ -63,7 +70,7 @@ class StreamingChatMessageContent(ChatMessageContent, StreamingContentMixin): def __init__( self, role: AuthorRole, - items: list[ITEM_TYPES], + items: list[STREAMING_CMC_ITEM_TYPES], choice_index: int, name: str | None = None, inner_content: Any | None = None, @@ -93,7 +100,7 @@ def __init__( # type: ignore self, role: AuthorRole, choice_index: int, - items: list[ITEM_TYPES] | None = None, + items: list[STREAMING_CMC_ITEM_TYPES] | None = None, content: str | None = None, inner_content: Any | None = None, name: str | None = None, @@ -222,6 +229,7 @@ def to_element(self) -> "Element": def __hash__(self) -> int: """Return the hash of the streaming chat message content.""" + hashable_items = [make_hashable(item) for item in self.items] if self.items else [] return hash(( self.tag, self.role, @@ -230,5 +238,5 @@ def __hash__(self) -> int: self.finish_reason, self.choice_index, self.function_invoke_attempt, - *self.items, + *hashable_items, )) diff --git a/python/semantic_kernel/contents/streaming_file_reference_content.py b/python/semantic_kernel/contents/streaming_file_reference_content.py index 4f934848e174..f7be0a179335 100644 --- a/python/semantic_kernel/contents/streaming_file_reference_content.py +++ b/python/semantic_kernel/contents/streaming_file_reference_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import STREAMING_FILE_REFERENCE_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="StreamingFileReferenceContent") -@experimental_class +@experimental class StreamingFileReferenceContent(KernelContent): """Streaming File reference content.""" @@ -25,6 +25,8 @@ class StreamingFileReferenceContent(KernelContent): ) tag: ClassVar[str] = STREAMING_FILE_REFERENCE_CONTENT_TAG file_id: str | None = None + tools: list[Any] = Field(default_factory=list) + data_source: Any | None = None def __str__(self) -> str: """Return the string representation of the file reference content.""" diff --git a/python/semantic_kernel/contents/utils/data_uri.py b/python/semantic_kernel/contents/utils/data_uri.py index d49022a6b104..1695491e9110 100644 --- a/python/semantic_kernel/contents/utils/data_uri.py +++ b/python/semantic_kernel/contents/utils/data_uri.py @@ -4,15 +4,11 @@ import binascii import logging import re -import sys +from collections.abc import Mapping, MutableMapping, Sequence from typing import Any, TypeVar -if sys.version < "3.11": - from typing_extensions import Self # pragma: no cover -else: - from typing import Self # type: ignore # pragma: no cover - -from pydantic import Field, ValidationError, field_validator, model_validator +from numpy import ndarray +from pydantic import Field, ValidationError, field_validator from pydantic_core import Url from semantic_kernel.exceptions import ContentInitializationError @@ -24,49 +20,92 @@ class DataUri(KernelBaseModel, validate_assignment=True): - """A class to represent a data uri.""" + """A class to represent a data uri. + + If a array is provided, that will be used as the data since it is the most efficient, + otherwise the bytes will be used, or the string will be converted to bytes. + + When updating either array or bytes, the other will not be updated. + + Args: + data_bytes: The data as bytes. + data_str: The data as a string. + data_array: The data as a numpy array. + mime_type: The mime type of the data. + parameters: Any parameters for the data. + data_format: The format of the data (e.g. base64). + """ + + data_array: ndarray | None = None data_bytes: bytes | None = None - data_str: str | None = None mime_type: str | None = None - parameters: dict[str, str] = Field(default_factory=dict) + parameters: MutableMapping[str, str] = Field(default_factory=dict) data_format: str | None = None - def update_data(self, value: str | bytes): - """Update the data, using either a string or bytes.""" - if isinstance(value, str): - self.data_str = value - else: - self.data_bytes = value - - @model_validator(mode="before") - @classmethod - def _validate_data(cls, values: Any) -> dict[str, Any]: - """Validate the data.""" - if isinstance(values, dict) and not values.get("data_bytes") and not values.get("data_str"): - raise ContentInitializationError("Either data_bytes or data_str must be provided.") - return values - - @model_validator(mode="after") - def _parse_data(self) -> Self: - """Parse the data bytes to str.""" - if not self.data_str and self.data_bytes: - if self.data_format and self.data_format.lower() == "base64": - self.data_str = base64.b64encode(self.data_bytes).decode("utf-8") + def __init__( + self, + data_bytes: bytes | None = None, + data_str: str | None = None, + data_array: ndarray | None = None, + mime_type: str | None = None, + parameters: Sequence[str] | Mapping[str, str] | None = None, + data_format: str | None = None, + **kwargs: Any, + ): + """Initialize the data uri. + + Make sure to set the data_format to base64 so that it can be decoded properly. + + Args: + data_bytes: The data as bytes. + data_str: The data as a string. + data_array: The data as a numpy array. + mime_type: The mime type of the data. + parameters: Any parameters for the data. + data_format: The format of the data (e.g. base64). + kwargs: Any additional arguments. + """ + args: dict[str, Any] = {} + if data_bytes is not None: + args["data_bytes"] = data_bytes + if data_array is not None: + args["data_array"] = data_array + + if mime_type is not None: + args["mime_type"] = mime_type + if parameters is not None: + args["parameters"] = parameters + if data_format is not None: + args["data_format"] = data_format + + if data_str is not None and not data_bytes: + if data_format and data_format.lower() == "base64": + try: + args["data_bytes"] = base64.b64decode(data_str, validate=True) + except binascii.Error as exc: + raise ContentInitializationError("Invalid base64 data.") from exc else: - self.data_str = self.data_bytes.decode("utf-8") - if self.data_format and self.data_format.lower() == "base64" and self.data_str: - try: - if not self.data_bytes: - self.data_bytes = base64.b64decode(self.data_str, validate=True) + args["data_bytes"] = data_str.encode("utf-8") + if "data_array" not in args and "data_bytes" not in args: + raise ContentInitializationError("Either data_bytes, data_str or data_array must be provided.") + super().__init__(**args, **kwargs) + + def update_data(self, value: str | bytes | ndarray) -> None: + """Update the data, using either a string or bytes.""" + match value: + case ndarray(): + self.data_array = value + case str(): + if self.data_format and self.data_format.lower() == "base64": + self.data_bytes = base64.b64decode(value, validate=True) else: - base64.b64decode(self.data_str, validate=True) - except binascii.Error as exc: - raise ContentInitializationError("Invalid base64 data.") from exc - return self + self.data_bytes = value.encode("utf-8") + case _: + self.data_bytes = value @field_validator("parameters", mode="before") - def _validate_parameters(cls, value: list[str] | dict[str, str] | None = None) -> dict[str, str]: + def _validate_parameters(cls, value: list[str] | dict[str, str] | None) -> dict[str, str]: if not value: return {} if isinstance(value, dict): @@ -109,17 +148,29 @@ def from_data_uri(cls: type[_T], data_uri: str | Url, default_mime_type: str = " matches["parameters"] = matches["parameters"].strip(";").split(";") if not matches.get("mime_type"): matches["mime_type"] = default_mime_type - return cls(**matches) + return cls(**matches) # type: ignore def to_string(self, metadata: dict[str, str] = {}) -> str: """Return the data uri as a string.""" parameters = ";".join([f"{key}={val}" for key, val in metadata.items()]) parameters = f";{parameters}" if parameters else "" data_format = f"{self.data_format}" if self.data_format else "" - return f"data:{self.mime_type or ''}{parameters};{data_format},{self.data_str}" + return f"data:{self.mime_type or ''}{parameters};{data_format},{self._data_str()}" def __eq__(self, value: object) -> bool: """Check if the data uri is equal to another.""" if not isinstance(value, DataUri): return False return self.to_string() == value.to_string() + + def _data_str(self) -> str: + """Return the data as a string.""" + if self.data_array is not None: + if self.data_format and self.data_format.lower() == "base64": + return base64.b64encode(self.data_array.tobytes()).decode("utf-8") + return self.data_array.tobytes().decode("utf-8") + if self.data_bytes is not None: + if self.data_format and self.data_format.lower() == "base64": + return base64.b64encode(self.data_bytes).decode("utf-8") + return self.data_bytes.decode("utf-8") + return "" diff --git a/python/semantic_kernel/contents/utils/hashing.py b/python/semantic_kernel/contents/utils/hashing.py new file mode 100644 index 000000000000..02b341427027 --- /dev/null +++ b/python/semantic_kernel/contents/utils/hashing.py @@ -0,0 +1,52 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +from pydantic import BaseModel + + +def make_hashable(input: Any, visited=None) -> Any: + """Recursively convert unhashable types to hashable equivalents. + + Args: + input: The input to convert to a hashable type. + visited: A dictionary of visited objects to prevent infinite recursion. + + Returns: + Any: The input converted to a hashable type. + """ + if visited is None: + visited = {} + + # If we've seen this object before, return the stored placeholder or final result + unique_obj_id = id(input) + if unique_obj_id in visited: + return visited[unique_obj_id] + + # Handle Pydantic models by manually traversing fields + if isinstance(input, BaseModel): + visited[unique_obj_id] = None + data = {} + for field_name in input.model_fields: + value = getattr(input, field_name) + data[field_name] = make_hashable(value, visited) + result = tuple(sorted(data.items())) + visited[unique_obj_id] = result + return result + + # Convert dictionaries + if isinstance(input, dict): + visited[unique_obj_id] = None + items = tuple(sorted((k, make_hashable(v, visited)) for k, v in input.items())) + visited[unique_obj_id] = items + return items + + # Convert lists, sets, and tuples to tuples + if isinstance(input, (list, set, tuple)): + visited[unique_obj_id] = None + items = tuple(make_hashable(item, visited) for item in input) + visited[unique_obj_id] = items + return items + + # If it's already something hashable, just return it + return input diff --git a/python/semantic_kernel/core_plugins/crew_ai/__init__.py b/python/semantic_kernel/core_plugins/crew_ai/__init__.py new file mode 100644 index 000000000000..08c61da6d2fa --- /dev/null +++ b/python/semantic_kernel/core_plugins/crew_ai/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise import CrewAIEnterprise +from semantic_kernel.core_plugins.crew_ai.crew_ai_models import ( + CrewAIStatusResponse, +) +from semantic_kernel.core_plugins.crew_ai.crew_ai_settings import ( + CrewAISettings, +) + +__all__ = ["CrewAIEnterprise", "CrewAISettings", "CrewAIStatusResponse"] diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py new file mode 100644 index 000000000000..643e08b586ea --- /dev/null +++ b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py @@ -0,0 +1,261 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from typing import Any + +import aiohttp +from pydantic import Field, ValidationError + +from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise_client import CrewAIEnterpriseClient +from semantic_kernel.core_plugins.crew_ai.crew_ai_models import CrewAIEnterpriseKickoffState, CrewAIStatusResponse +from semantic_kernel.core_plugins.crew_ai.crew_ai_settings import CrewAISettings +from semantic_kernel.exceptions.function_exceptions import ( + FunctionExecutionException, + FunctionResultError, + PluginInitializationError, +) +from semantic_kernel.functions import kernel_function +from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod +from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.utils.feature_stage_decorator import experimental + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental +class CrewAIEnterprise(KernelBaseModel): + """Class to interface with Crew.AI Crews from Semantic Kernel. + + This object can be used directly or as a plugin in the Kernel. + """ + + client: CrewAIEnterpriseClient + polling_interval: float = Field(default=1.0) + polling_timeout: float = Field(default=30.0) + + def __init__( + self, + endpoint: str | None = None, + auth_token: str | None = None, + polling_interval: float | None = 1.0, + polling_timeout: float | None = 30.0, + session: aiohttp.ClientSession | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + ): + """Initialize a new instance of the class. This object can be used directly or as a plugin in the Kernel. + + Args: + endpoint (str | None, optional): The API endpoint. + auth_token (str | None, optional): The authentication token. + polling_interval (float, optional): The polling interval in seconds. Defaults to 1.0. + polling_timeout (float, optional): The polling timeout in seconds. Defaults to 30.0. + session (aiohttp.ClientSession | None, optional): The HTTP client session. Defaults to None. + env_file_path (str | None): Use the environment settings file as a + fallback to environment variables. (Optional) + env_file_encoding (str | None): The encoding of the environment settings file. (Optional) + """ + try: + settings = CrewAISettings.create( + endpoint=endpoint, + auth_token=auth_token, + polling_interval=polling_interval, + polling_timeout=polling_timeout, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise PluginInitializationError("Failed to initialize CrewAI settings.") from ex + + client = CrewAIEnterpriseClient( + endpoint=settings.endpoint, auth_token=settings.auth_token.get_secret_value(), session=session + ) + + super().__init__( + client=client, + polling_interval=settings.polling_interval, + polling_timeout=settings.polling_timeout, + ) + + async def kickoff( + self, + inputs: dict[str, Any] | None = None, + task_webhook_url: str | None = None, + step_webhook_url: str | None = None, + crew_webhook_url: str | None = None, + ) -> str: + """Kickoff a new Crew AI task. + + Args: + inputs (dict[str, Any], optional): The inputs for the task. Defaults to None. + task_webhook_url (str | None, optional): The webhook URL for task updates. Defaults to None. + step_webhook_url (str | None, optional): The webhook URL for step updates. Defaults to None. + crew_webhook_url (str | None, optional): The webhook URL for crew updates. Defaults to None. + + Returns: + str: The ID of the kickoff response. + """ + try: + kickoff_response = await self.client.kickoff(inputs, task_webhook_url, step_webhook_url, crew_webhook_url) + logger.info(f"CrewAI Crew kicked off with Id: {kickoff_response.kickoff_id}") + return kickoff_response.kickoff_id + except Exception as ex: + raise FunctionExecutionException("Failed to kickoff CrewAI Crew.") from ex + + @kernel_function(description="Get the status of a Crew AI kickoff.") + async def get_crew_kickoff_status(self, kickoff_id: str) -> CrewAIStatusResponse: + """Get the status of a Crew AI task. + + Args: + kickoff_id (str): The ID of the kickoff response. + + Returns: + CrewAIStatusResponse: The status response of the task. + """ + try: + status_response = await self.client.get_status(kickoff_id) + logger.info(f"CrewAI Crew status for kickoff Id: {kickoff_id} is {status_response.state}") + return status_response + except Exception as ex: + raise FunctionExecutionException( + f"Failed to get status of CrewAI Crew with kickoff Id: {kickoff_id}." + ) from ex + + @kernel_function(description="Wait for the completion of a Crew AI kickoff.") + async def wait_for_crew_completion(self, kickoff_id: str) -> str: + """Wait for the completion of a Crew AI task. + + Args: + kickoff_id (str): The ID of the kickoff response. + + Returns: + str: The result of the task. + + Raises: + FunctionExecutionException: If the task fails or an error occurs while waiting for completion. + """ + status_response: CrewAIStatusResponse | None = None + state: str = CrewAIEnterpriseKickoffState.Pending + + async def poll_status(): + nonlocal state, status_response + while state not in [ + CrewAIEnterpriseKickoffState.Failed, + CrewAIEnterpriseKickoffState.Failure, + CrewAIEnterpriseKickoffState.Success, + CrewAIEnterpriseKickoffState.Not_Found, + ]: + logger.debug( + f"Waiting for CrewAI Crew with kickoff Id: {kickoff_id} to complete. Current state: {state}" + ) + + await asyncio.sleep(self.polling_interval) + + try: + status_response = await self.client.get_status(kickoff_id) + state = status_response.state + except Exception as ex: + raise FunctionExecutionException( + f"Failed to wait for completion of CrewAI Crew with kickoff Id: {kickoff_id}." + ) from ex + + await asyncio.wait_for(poll_status(), timeout=self.polling_timeout) + + logger.info(f"CrewAI Crew with kickoff Id: {kickoff_id} completed with status: {state}") + result = status_response.result if status_response is not None and status_response.result is not None else "" + + if state in ["Failed", "Failure"]: + raise FunctionResultError(f"CrewAI Crew failed with error: {result}") + + return result + + def create_kernel_plugin( + self, + name: str, + description: str, + parameters: list[KernelParameterMetadata] | None = None, + task_webhook_url: str | None = None, + step_webhook_url: str | None = None, + crew_webhook_url: str | None = None, + ) -> KernelPlugin: + """Creates a kernel plugin that can be used to invoke the CrewAI Crew. + + Args: + name (str): The name of the kernel plugin. + description (str): The description of the kernel plugin. + parameters (List[KernelParameterMetadata] | None, optional): The definitions of the Crew's + required inputs. Defaults to None. + task_webhook_url (Optional[str], optional): The task level webhook URL. Defaults to None. + step_webhook_url (Optional[str], optional): The step level webhook URL. Defaults to None. + crew_webhook_url (Optional[str], optional): The crew level webhook URL. Defaults to None. + + Returns: + dict[str, Any]: A dictionary representing the kernel plugin. + """ + + @kernel_function(description="Kickoff the CrewAI task.") + async def kickoff(**kwargs: Any) -> str: + args = self._build_arguments(parameters, kwargs) + return await self.kickoff( + inputs=args, + task_webhook_url=task_webhook_url, + step_webhook_url=step_webhook_url, + crew_webhook_url=crew_webhook_url, + ) + + @kernel_function(description="Kickoff the CrewAI task and wait for completion.") + async def kickoff_and_wait(**kwargs: Any) -> str: + args = self._build_arguments(parameters, kwargs) + kickoff_id = await self.kickoff( + inputs=args, + task_webhook_url=task_webhook_url, + step_webhook_url=step_webhook_url, + crew_webhook_url=crew_webhook_url, + ) + return await self.wait_for_crew_completion(kickoff_id) + + return KernelPlugin( + name, + description, + { + "kickoff": KernelFunctionFromMethod(kickoff, stream_method=None, parameters=parameters), + "kickoff_and_wait": KernelFunctionFromMethod( + kickoff_and_wait, stream_method=None, parameters=parameters + ), + "get_status": self.get_crew_kickoff_status, + "wait_for_completion": self.wait_for_crew_completion, + }, + ) + + def _build_arguments( + self, parameters: list[KernelParameterMetadata] | None, arguments: dict[str, Any] + ) -> dict[str, Any]: + """Builds the arguments for the CrewAI task from the provided parameters and arguments. + + Args: + parameters (List[KernelParameterMetadata] | None): The metadata for the inputs. + arguments (dict[str, Any]): The provided arguments. + + Returns: + dict[str, Any]: The built arguments. + """ + args = {} + if parameters: + for input in parameters: + name = input.name + if name not in arguments: + raise PluginInitializationError(f"Missing required input '{name}' for CrewAI.") + args[name] = arguments[name] + return args + + async def __aenter__(self): + """Enter the session.""" + await self.client.__aenter__() + return self + + async def __aexit__(self, *args, **kwargs): + """Close the session.""" + await self.client.__aexit__() diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py new file mode 100644 index 000000000000..f52efa77e17d --- /dev/null +++ b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +import aiohttp + +from semantic_kernel.core_plugins.crew_ai.crew_ai_models import ( + CrewAIKickoffResponse, + CrewAIRequiredInputs, + CrewAIStatusResponse, +) +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT + + +class CrewAIEnterpriseClient: + """Client to interact with the Crew AI Enterprise API.""" + + def __init__( + self, + endpoint: str, + auth_token: str, + session: aiohttp.ClientSession | None = None, + ): + """Initializes a new instance of the CrewAIEnterpriseClient class. + + Args: + endpoint (str): The API endpoint. + auth_token (str): The authentication token. + session (aiohttp.ClientSession | None, optional): The HTTP client session. Defaults to None. + """ + self.endpoint = endpoint + self.auth_token = auth_token + self.session = session if session is None else aiohttp.ClientSession() + self.request_header = { + "Authorization": f"Bearer {auth_token}", + "Content-Type": "application/json", + "user_agent": SEMANTIC_KERNEL_USER_AGENT, + } + + async def get_inputs(self) -> CrewAIRequiredInputs: + """Get the required inputs for Crew AI. + + Returns: + CrewAIRequiredInputs: The required inputs for Crew AI. + """ + async with ( + self.session.get(f"{self.endpoint}/inputs", headers=self.request_header) as response, # type: ignore + ): + response.raise_for_status() + return CrewAIRequiredInputs.model_validate_json(await response.text()) + + async def kickoff( + self, + inputs: dict[str, Any] | None = None, + task_webhook_url: str | None = None, + step_webhook_url: str | None = None, + crew_webhook_url: str | None = None, + ) -> CrewAIKickoffResponse: + """Kickoff a new Crew AI task. + + Args: + inputs (Optional[dict[str, Any]], optional): The inputs for the task. Defaults to None. + task_webhook_url (Optional[str], optional): The webhook URL for task updates. Defaults to None. + step_webhook_url (Optional[str], optional): The webhook URL for step updates. Defaults to None. + crew_webhook_url (Optional[str], optional): The webhook URL for crew updates. Defaults to None. + + Returns: + CrewAIKickoffResponse: The response from the kickoff request. + """ + content = { + "inputs": inputs, + "taskWebhookUrl": task_webhook_url, + "stepWebhookUrl": step_webhook_url, + "crewWebhookUrl": crew_webhook_url, + } + async with ( + self.session.post(f"{self.endpoint}/kickoff", json=content, headers=self.request_header) as response, # type: ignore + ): + response.raise_for_status() + body = await response.text() + return CrewAIKickoffResponse.model_validate_json(body) + + async def get_status(self, task_id: str) -> CrewAIStatusResponse: + """Get the status of a Crew AI task. + + Args: + task_id (str): The ID of the task. + + Returns: + CrewAIStatusResponse: The status response of the task. + """ + async with ( + self.session.get(f"{self.endpoint}/status/{task_id}", headers=self.request_header) as response, # type: ignore + ): + response.raise_for_status() + body = await response.text() + return CrewAIStatusResponse.model_validate_json(body) + + async def __aenter__(self): + """Enter the session.""" + await self.session.__aenter__() # type: ignore + return self + + async def __aexit__(self, *args, **kwargs): + """Close the session.""" + await self.session.close() # type: ignore diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py new file mode 100644 index 000000000000..540b3e5293af --- /dev/null +++ b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py @@ -0,0 +1,38 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Any + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class CrewAIEnterpriseKickoffState(str, Enum): + """The Crew.AI Enterprise kickoff state.""" + + Pending = "PENDING" + Started = "STARTED" + Running = "RUNNING" + Success = "SUCCESS" + Failed = "FAILED" + Failure = "FAILURE" + Not_Found = "NOT FOUND" + + +class CrewAIStatusResponse(KernelBaseModel): + """Represents the status response from Crew AI.""" + + state: CrewAIEnterpriseKickoffState + result: str | None = None + last_step: dict[str, Any] | None = None + + +class CrewAIKickoffResponse(KernelBaseModel): + """Represents the kickoff response from Crew AI.""" + + kickoff_id: str + + +class CrewAIRequiredInputs(KernelBaseModel): + """Represents the required inputs for Crew AI.""" + + inputs: dict[str, str] diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py new file mode 100644 index 000000000000..7b54b6b9a90e --- /dev/null +++ b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py @@ -0,0 +1,22 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from pydantic import SecretStr + +from semantic_kernel.kernel_pydantic import KernelBaseSettings + + +class CrewAISettings(KernelBaseSettings): + """The Crew.AI settings. + + Required: + - endpoint: str - The API endpoint. + """ + + env_prefix: ClassVar[str] = "CREW_AI_" + + endpoint: str + auth_token: SecretStr + polling_interval: float = 1.0 + polling_timeout: float = 30.0 diff --git a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py index d7e4195f49a5..f43d7784396b 100644 --- a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py +++ b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py @@ -59,7 +59,7 @@ def _validate_endpoint(cls, endpoint: str) -> str: else: endpoint_parsed = urlsplit(endpoint)._asdict() if endpoint_parsed["path"]: - endpoint_parsed["path"] = re.sub("/{2,}", "/", endpoint_parsed["path"]) + endpoint_parsed["path"] = re.sub(r"/{2,}", "/", endpoint_parsed["path"]) else: endpoint_parsed["path"] = "/" return str(urlunsplit(endpoint_parsed.values())) diff --git a/python/semantic_kernel/data/__init__.py b/python/semantic_kernel/data/__init__.py index 4bc216e8dc84..a8f14d067e7d 100644 --- a/python/semantic_kernel/data/__init__.py +++ b/python/semantic_kernel/data/__init__.py @@ -3,6 +3,7 @@ from semantic_kernel.data.const import ( DEFAULT_DESCRIPTION, DEFAULT_FUNCTION_NAME, + DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction, IndexKind, ) @@ -41,6 +42,7 @@ __all__ = [ "DEFAULT_DESCRIPTION", "DEFAULT_FUNCTION_NAME", + "DISTANCE_FUNCTION_DIRECTION_HELPER", "AnyTagsEqualTo", "DistanceFunction", "EqualTo", diff --git a/python/semantic_kernel/data/const.py b/python/semantic_kernel/data/const.py index ae5246938834..1354ff276fae 100644 --- a/python/semantic_kernel/data/const.py +++ b/python/semantic_kernel/data/const.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +import operator +from collections.abc import Callable from enum import Enum from typing import Final @@ -91,3 +93,14 @@ class DistanceFunction(str, Enum): EUCLIDEAN_SQUARED_DISTANCE = "euclidean_squared_distance" MANHATTAN = "manhattan" HAMMING = "hamming" + + +DISTANCE_FUNCTION_DIRECTION_HELPER: Final[dict[DistanceFunction, Callable[[int | float, int | float], bool]]] = { + DistanceFunction.COSINE_SIMILARITY: operator.gt, + DistanceFunction.COSINE_DISTANCE: operator.le, + DistanceFunction.DOT_PROD: operator.gt, + DistanceFunction.EUCLIDEAN_DISTANCE: operator.le, + DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE: operator.le, + DistanceFunction.MANHATTAN: operator.le, + DistanceFunction.HAMMING: operator.le, +} diff --git a/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py b/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py index e63f972232dd..0da3299adbb7 100644 --- a/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py +++ b/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py @@ -4,10 +4,10 @@ from typing import ClassVar from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class AnyTagsEqualTo(FilterClauseBase): """A filter clause for a any tags equals comparison. diff --git a/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py b/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py index d60ac6d723cc..9b6f956faa93 100644 --- a/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py +++ b/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class EqualTo(FilterClauseBase): """A filter clause for an equals comparison. diff --git a/python/semantic_kernel/data/filter_clauses/filter_clause_base.py b/python/semantic_kernel/data/filter_clauses/filter_clause_base.py index 2337784a6bda..16505a209be1 100644 --- a/python/semantic_kernel/data/filter_clauses/filter_clause_base.py +++ b/python/semantic_kernel/data/filter_clauses/filter_clause_base.py @@ -5,10 +5,10 @@ from typing import Any, ClassVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class FilterClauseBase(ABC, KernelBaseModel): """A base for all filter clauses.""" diff --git a/python/semantic_kernel/data/kernel_search_results.py b/python/semantic_kernel/data/kernel_search_results.py index 361a730df8d2..14f41a5b0f52 100644 --- a/python/semantic_kernel/data/kernel_search_results.py +++ b/python/semantic_kernel/data/kernel_search_results.py @@ -4,12 +4,12 @@ from typing import Any, Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental T = TypeVar("T") -@experimental_class +@experimental class KernelSearchResults(KernelBaseModel, Generic[T]): """The result of a kernel search.""" diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py b/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py index b3c6a6a412fc..0b6893116389 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py @@ -3,7 +3,7 @@ import logging from inspect import Parameter, _empty, signature from types import MappingProxyType, NoneType -from typing import Any +from typing import TypeVar from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.record_definition.vector_store_record_fields import ( @@ -11,15 +11,17 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import VectorStoreModelException -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) +_T = TypeVar("_T") -@experimental_function + +@experimental def vectorstoremodel( - cls: Any | None = None, -): + cls: type[_T] | None = None, +) -> type[_T]: """Returns the class as a vector store model. This decorator makes a class a vector store model. @@ -44,18 +46,18 @@ def vectorstoremodel( VectorStoreModelException: If there is a ndarray field without a serialize or deserialize function. """ - def wrap(cls: Any): + def wrap(cls: type[_T]) -> type[_T]: # get fields and annotations cls_sig = signature(cls) setattr(cls, "__kernel_vectorstoremodel__", True) setattr(cls, "__kernel_vectorstoremodel_definition__", _parse_signature_to_definition(cls_sig.parameters)) - return cls + return cls # type: ignore # See if we're being called as @vectorstoremodel or @vectorstoremodel(). if cls is None: # We're called with parens. - return wrap + return wrap # type: ignore # We're called as @vectorstoremodel without parens. return wrap(cls) diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_definition.py b/python/semantic_kernel/data/record_definition/vector_store_model_definition.py index adc993ff22e3..aab27edbc15f 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_definition.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_definition.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from dataclasses import dataclass, field -from typing import TypeVar +from typing import TypeAlias, TypeVar from semantic_kernel.data.record_definition.vector_store_model_protocols import ( DeserializeFunctionProtocol, @@ -16,13 +16,13 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import VectorStoreModelException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental VectorStoreRecordFields = TypeVar("VectorStoreRecordFields", bound=VectorStoreRecordField) -FieldsType = dict[str, VectorStoreRecordFields] +FieldsType: TypeAlias = dict[str, VectorStoreRecordFields] -@experimental_class +@experimental @dataclass class VectorStoreRecordDefinition: """Memory record definition. diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py b/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py index 0c83a131c965..1d190bd4fedc 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py @@ -3,12 +3,12 @@ from collections.abc import Sequence from typing import Any, Protocol, TypeVar, runtime_checkable -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TModel = TypeVar("TModel", bound=object) -@experimental_class +@experimental @runtime_checkable class SerializeMethodProtocol(Protocol): """Data model serialization protocol. @@ -22,7 +22,7 @@ def serialize(self, **kwargs: Any) -> Any: ... # pragma: no cover -@experimental_class +@experimental @runtime_checkable class ToDictMethodProtocol(Protocol): """Class used internally to check if a model has a to_dict method.""" @@ -32,7 +32,7 @@ def to_dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... # pragma: no cover -@experimental_class +@experimental @runtime_checkable class ToDictFunctionProtocol(Protocol): """Protocol for to_dict function. @@ -48,7 +48,7 @@ class ToDictFunctionProtocol(Protocol): def __call__(self, record: Any, **kwargs: Any) -> Sequence[dict[str, Any]]: ... # pragma: no cover # noqa: D102 -@experimental_class +@experimental @runtime_checkable class FromDictFunctionProtocol(Protocol): """Protocol for from_dict function. @@ -64,7 +64,7 @@ class FromDictFunctionProtocol(Protocol): def __call__(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Any: ... # noqa: D102 -@experimental_class +@experimental @runtime_checkable class SerializeFunctionProtocol(Protocol): """Protocol for serialize function. @@ -81,7 +81,7 @@ class SerializeFunctionProtocol(Protocol): def __call__(self, record: Any, **kwargs: Any) -> Any: ... # noqa: D102 -@experimental_class +@experimental @runtime_checkable class DeserializeFunctionProtocol(Protocol): """Protocol for deserialize function. diff --git a/python/semantic_kernel/data/record_definition/vector_store_record_fields.py b/python/semantic_kernel/data/record_definition/vector_store_record_fields.py index 536482b1069d..7af7519e96d5 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_record_fields.py +++ b/python/semantic_kernel/data/record_definition/vector_store_record_fields.py @@ -9,10 +9,10 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.data import DistanceFunction, IndexKind -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental @dataclass class VectorStoreRecordField(ABC): """Base class for all Vector Store Record Fields.""" @@ -21,13 +21,13 @@ class VectorStoreRecordField(ABC): property_type: str | None = None -@experimental_class +@experimental @dataclass class VectorStoreRecordKeyField(VectorStoreRecordField): """Memory record key field.""" -@experimental_class +@experimental @dataclass class VectorStoreRecordDataField(VectorStoreRecordField): """Memory record data field.""" @@ -38,7 +38,7 @@ class VectorStoreRecordDataField(VectorStoreRecordField): is_full_text_searchable: bool | None = None -@experimental_class +@experimental @dataclass class VectorStoreRecordVectorField(VectorStoreRecordField): """Memory record vector field. diff --git a/python/semantic_kernel/data/record_definition/vector_store_record_utils.py b/python/semantic_kernel/data/record_definition/vector_store_record_utils.py index 00436fe8e199..8812edca0943 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_record_utils.py +++ b/python/semantic_kernel/data/record_definition/vector_store_record_utils.py @@ -10,7 +10,7 @@ ) from semantic_kernel.exceptions import VectorStoreModelException from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -20,7 +20,7 @@ TModel = TypeVar("TModel", bound=object) -@experimental_class +@experimental class VectorStoreRecordUtils: """Helper class to easily add embeddings to a (set of) vector store record.""" diff --git a/python/semantic_kernel/data/search_filter.py b/python/semantic_kernel/data/search_filter.py index 4d0d84b5a7b9..1e91e5e0e2ed 100644 --- a/python/semantic_kernel/data/search_filter.py +++ b/python/semantic_kernel/data/search_filter.py @@ -10,12 +10,12 @@ from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental _T = TypeVar("_T", bound="SearchFilter") -@experimental_class +@experimental class SearchFilter: """A filter clause for a search.""" diff --git a/python/semantic_kernel/data/search_options.py b/python/semantic_kernel/data/search_options.py index 292fce607111..e054cd082be5 100644 --- a/python/semantic_kernel/data/search_options.py +++ b/python/semantic_kernel/data/search_options.py @@ -5,10 +5,10 @@ from semantic_kernel.data.search_filter import SearchFilter from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class SearchOptions(KernelBaseModel): """Options for a search.""" diff --git a/python/semantic_kernel/data/text_search/text_search.py b/python/semantic_kernel/data/text_search/text_search.py index 85791877f046..5c7a27eb223a 100644 --- a/python/semantic_kernel/data/text_search/text_search.py +++ b/python/semantic_kernel/data/text_search/text_search.py @@ -24,7 +24,7 @@ from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.data.search_options import SearchOptions @@ -35,7 +35,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class TextSearch: """The base class for all text searches.""" diff --git a/python/semantic_kernel/data/text_search/text_search_filter.py b/python/semantic_kernel/data/text_search/text_search_filter.py index f43b81da84ff..a156577d5747 100644 --- a/python/semantic_kernel/data/text_search/text_search_filter.py +++ b/python/semantic_kernel/data/text_search/text_search_filter.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.data.search_filter import SearchFilter -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class TextSearchFilter(SearchFilter): """A filter clause for a text search query.""" diff --git a/python/semantic_kernel/data/text_search/text_search_options.py b/python/semantic_kernel/data/text_search/text_search_options.py index 3d5afc187dab..075f27ffe06e 100644 --- a/python/semantic_kernel/data/text_search/text_search_options.py +++ b/python/semantic_kernel/data/text_search/text_search_options.py @@ -7,10 +7,10 @@ from semantic_kernel.data.search_options import SearchOptions from semantic_kernel.data.text_search.text_search_filter import TextSearchFilter -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class TextSearchOptions(SearchOptions): """Options for a text search.""" diff --git a/python/semantic_kernel/data/text_search/text_search_result.py b/python/semantic_kernel/data/text_search/text_search_result.py index 3ca56f1bf7d0..08222daa0bff 100644 --- a/python/semantic_kernel/data/text_search/text_search_result.py +++ b/python/semantic_kernel/data/text_search/text_search_result.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class TextSearchResult(KernelBaseModel): """The result of a text search.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search.py b/python/semantic_kernel/data/vector_search/vector_search.py index 166676136ef9..ffc53b39d50f 100644 --- a/python/semantic_kernel/data/vector_search/vector_search.py +++ b/python/semantic_kernel/data/vector_search/vector_search.py @@ -11,7 +11,7 @@ from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreModelDeserializationException -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.list_handler import desync_list TModel = TypeVar("TModel") @@ -20,7 +20,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class VectorSearchBase(VectorStoreRecordCollection[TKey, TModel], Generic[TKey, TModel]): """Method for searching vectors.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_filter.py b/python/semantic_kernel/data/vector_search/vector_search_filter.py index 6944fe69ba4d..5e6fe31ce533 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_filter.py +++ b/python/semantic_kernel/data/vector_search/vector_search_filter.py @@ -9,10 +9,10 @@ from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo from semantic_kernel.data.search_filter import SearchFilter -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class VectorSearchFilter(SearchFilter): """A filter clause for a vector search query.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_options.py b/python/semantic_kernel/data/vector_search/vector_search_options.py index 1c3ec85d69c7..786f7627d68d 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_options.py +++ b/python/semantic_kernel/data/vector_search/vector_search_options.py @@ -7,10 +7,10 @@ from semantic_kernel.data.search_options import SearchOptions from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class VectorSearchOptions(SearchOptions): """Options for vector search, builds on TextSearchOptions.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_result.py b/python/semantic_kernel/data/vector_search/vector_search_result.py index 6495fe5f0921..04272bed7935 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_result.py +++ b/python/semantic_kernel/data/vector_search/vector_search_result.py @@ -3,12 +3,12 @@ from typing import Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TModel = TypeVar("TModel") -@experimental_class +@experimental class VectorSearchResult(KernelBaseModel, Generic[TModel]): """The result of a vector search.""" diff --git a/python/semantic_kernel/data/vector_search/vector_text_search.py b/python/semantic_kernel/data/vector_search/vector_text_search.py index f2a29b2908b8..19ec8e1f22ef 100644 --- a/python/semantic_kernel/data/vector_search/vector_text_search.py +++ b/python/semantic_kernel/data/vector_search/vector_text_search.py @@ -11,7 +11,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class VectorTextSearchMixin(Generic[TModel]): """The mixin for text search, to be used in combination with VectorSearchBase.""" diff --git a/python/semantic_kernel/data/vector_search/vectorizable_text_search.py b/python/semantic_kernel/data/vector_search/vectorizable_text_search.py index 9c5b882cf6f4..57960fca33d8 100644 --- a/python/semantic_kernel/data/vector_search/vectorizable_text_search.py +++ b/python/semantic_kernel/data/vector_search/vectorizable_text_search.py @@ -10,7 +10,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class VectorizableTextSearchMixin(Generic[TModel]): """The mixin for searching with text that get's vectorized downstream. diff --git a/python/semantic_kernel/data/vector_search/vectorized_search.py b/python/semantic_kernel/data/vector_search/vectorized_search.py index 1b3e5aa25f9e..b0e8329c795f 100644 --- a/python/semantic_kernel/data/vector_search/vectorized_search.py +++ b/python/semantic_kernel/data/vector_search/vectorized_search.py @@ -10,7 +10,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class VectorizedSearchMixin(Generic[TModel]): """The mixin for searching with vectors. To be used in combination with VectorSearchBase.""" diff --git a/python/semantic_kernel/data/vector_storage/vector_store.py b/python/semantic_kernel/data/vector_storage/vector_store.py index 796973a63854..d0e24a0bf5da 100644 --- a/python/semantic_kernel/data/vector_storage/vector_store.py +++ b/python/semantic_kernel/data/vector_storage/vector_store.py @@ -10,10 +10,10 @@ from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class VectorStore(KernelBaseModel): """Base class for vector stores.""" diff --git a/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py b/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py index 195d472d8155..49f7dacbf272 100644 --- a/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py +++ b/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py @@ -3,10 +3,17 @@ import asyncio import contextlib import logging +import sys from abc import abstractmethod from collections.abc import Awaitable, Callable, Mapping, Sequence from typing import Any, ClassVar, Generic, TypeVar +if sys.version_info >= (3, 11): + from typing import Self # pragma: no cover +else: + from typing_extensions import Self # pragma: no cover + + from pydantic import BaseModel, model_validator from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition @@ -22,7 +29,7 @@ ) from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TModel = TypeVar("TModel", bound=object) TKey = TypeVar("TKey") @@ -31,7 +38,7 @@ logger = logging.getLogger(__name__) -@experimental_class +@experimental class VectorStoreRecordCollection(KernelBaseModel, Generic[TKey, TModel]): """Base class for a vector store record collection.""" @@ -64,7 +71,7 @@ def model_post_init(self, __context: object | None = None): """Post init function that sets the key field and container mode values, and validates the datamodel.""" self._validate_data_model() - async def __aenter__(self) -> "VectorStoreRecordCollection": + async def __aenter__(self) -> Self: """Enter the context manager.""" return self diff --git a/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py b/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py index 5a0f7c300b46..2997c3d055dd 100644 --- a/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py +++ b/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py @@ -5,12 +5,13 @@ from semantic_kernel.filters.filter_context_base import FilterContextBase if TYPE_CHECKING: + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.functions.function_result import FunctionResult class AutoFunctionInvocationContext(FilterContextBase): - """Class for auto function invocation context. + """The context for auto function invocation filtering. This is the context supplied to the auto function invocation filters. @@ -19,10 +20,11 @@ class AutoFunctionInvocationContext(FilterContextBase): Another option is to terminate, this can be done by setting terminate to True. - Attributes: + Args: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. + is_streaming: Whether the function is streaming. chat_history: The chat history or None. function_result: The function result or None. request_sequence_index: The request sequence index. @@ -34,6 +36,7 @@ class AutoFunctionInvocationContext(FilterContextBase): chat_history: "ChatHistory | None" = None function_result: "FunctionResult | None" = None + execution_settings: "PromptExecutionSettings | None" = None request_sequence_index: int = 0 function_sequence_index: int = 0 function_count: int = 0 diff --git a/python/semantic_kernel/filters/filter_context_base.py b/python/semantic_kernel/filters/filter_context_base.py index b7f1b8da82c8..23d3806aafc3 100644 --- a/python/semantic_kernel/filters/filter_context_base.py +++ b/python/semantic_kernel/filters/filter_context_base.py @@ -16,3 +16,4 @@ class FilterContextBase(KernelBaseModel): function: "KernelFunction" kernel: "Kernel" arguments: "KernelArguments" + is_streaming: bool = False diff --git a/python/semantic_kernel/filters/functions/function_invocation_context.py b/python/semantic_kernel/filters/functions/function_invocation_context.py index 5c9dedce50cb..3a557609571d 100644 --- a/python/semantic_kernel/filters/functions/function_invocation_context.py +++ b/python/semantic_kernel/filters/functions/function_invocation_context.py @@ -9,16 +9,17 @@ class FunctionInvocationContext(FilterContextBase): - """Class for function invocation context. + """The context for function invocation filtering. This filter can be used to monitor which functions are called. To log what function was called with which parameters and what output. Finally it can be used for caching by setting the result value. - Attributes: + Args: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. + is_streaming: Whether the function is streaming. result: The result of the function, or None. """ diff --git a/python/semantic_kernel/filters/kernel_filters_extension.py b/python/semantic_kernel/filters/kernel_filters_extension.py index 82bf489f8c5f..78152039724c 100644 --- a/python/semantic_kernel/filters/kernel_filters_extension.py +++ b/python/semantic_kernel/filters/kernel_filters_extension.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC -from collections.abc import Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine from functools import partial from typing import Any, Literal, TypeVar @@ -13,7 +13,9 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel FILTER_CONTEXT_TYPE = TypeVar("FILTER_CONTEXT_TYPE", bound=FilterContextBase) -CALLABLE_FILTER_TYPE = Callable[[FILTER_CONTEXT_TYPE, Callable[[FILTER_CONTEXT_TYPE], None]], None] +CALLABLE_FILTER_TYPE = Callable[ + [FILTER_CONTEXT_TYPE, Callable[[FILTER_CONTEXT_TYPE], Awaitable[None]]], Awaitable[None] +] ALLOWED_FILTERS_LITERAL = Literal[ FilterTypes.AUTO_FUNCTION_INVOCATION, FilterTypes.FUNCTION_INVOCATION, FilterTypes.PROMPT_RENDERING @@ -117,6 +119,7 @@ def construct_call_stack( def _rebuild_auto_function_invocation_context() -> None: + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings # noqa: F401 from semantic_kernel.contents.chat_history import ChatHistory # noqa: F401 from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( AutoFunctionInvocationContext, diff --git a/python/semantic_kernel/filters/prompts/prompt_render_context.py b/python/semantic_kernel/filters/prompts/prompt_render_context.py index dde178ad42d9..6afa5fa33766 100644 --- a/python/semantic_kernel/filters/prompts/prompt_render_context.py +++ b/python/semantic_kernel/filters/prompts/prompt_render_context.py @@ -9,15 +9,16 @@ class PromptRenderContext(FilterContextBase): - """Context for prompt rendering filters. + """The context for prompt rendering filtering. When prompt rendering is expensive (for instance when there are expensive functions being called.) - This filter can be used to set the rendered_prompt directly and returning. + This filter can be used to set the rendered_prompt or function result directly and returning. - Attributes: + Args: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. + is_streaming: Whether the function is streaming. rendered_prompt: The result of the prompt rendering. function_result: The result of the function that used the prompt. diff --git a/python/semantic_kernel/functions/function_result.py b/python/semantic_kernel/functions/function_result.py index f86e201c9ee8..1d3dd0c29f59 100644 --- a/python/semantic_kernel/functions/function_result.py +++ b/python/semantic_kernel/functions/function_result.py @@ -17,9 +17,10 @@ class FunctionResult(KernelBaseModel): """The result of a function. Args: - function (KernelFunctionMetadata): The metadata of the function that was invoked. - value (Any): The value of the result. - metadata (Mapping[str, Any]): The metadata of the result. + function: The metadata of the function that was invoked. + value: The value of the result. + rendered_prompt: The rendered prompt of the result. + metadata: The metadata of the result. Methods: __str__: Get the string representation of the result, will call str() on the value, @@ -31,6 +32,7 @@ class FunctionResult(KernelBaseModel): function: KernelFunctionMetadata value: Any + rendered_prompt: str | None = None metadata: dict[str, Any] = Field(default_factory=dict) def __str__(self) -> str: diff --git a/python/semantic_kernel/functions/kernel_function.py b/python/semantic_kernel/functions/kernel_function.py index 04c95e1dc873..a75459099290 100644 --- a/python/semantic_kernel/functions/kernel_function.py +++ b/python/semantic_kernel/functions/kernel_function.py @@ -30,6 +30,7 @@ from semantic_kernel.prompt_template.handlebars_prompt_template import HandlebarsPromptTemplate from semantic_kernel.prompt_template.jinja2_prompt_template import Jinja2PromptTemplate from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate +from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -37,7 +38,6 @@ from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt from semantic_kernel.kernel import Kernel - from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig # Logger, tracer and meter for observability @@ -46,7 +46,7 @@ meter: metrics.Meter = metrics.get_meter_provider().get_meter(__name__) MEASUREMENT_FUNCTION_TAG_NAME: str = "semantic_kernel.function.name" -TEMPLATE_FORMAT_MAP = { +TEMPLATE_FORMAT_MAP: dict[TEMPLATE_FORMAT_TYPES, type[PromptTemplateBase]] = { KERNEL_TEMPLATE_FORMAT_NAME: KernelPromptTemplate, HANDLEBARS_TEMPLATE_FORMAT_NAME: HandlebarsPromptTemplate, JINJA2_TEMPLATE_FORMAT_NAME: Jinja2PromptTemplate, @@ -292,7 +292,9 @@ async def invoke_stream( if arguments is None: arguments = KernelArguments(**kwargs) _rebuild_function_invocation_context() - function_context = FunctionInvocationContext(function=self, kernel=kernel, arguments=arguments) + function_context = FunctionInvocationContext( + function=self, kernel=kernel, arguments=arguments, is_streaming=True + ) with tracer.start_as_current_span(self.fully_qualified_name) as current_span: KernelFunctionLogMessages.log_function_streaming_invoking(logger, self.fully_qualified_name) diff --git a/python/semantic_kernel/functions/kernel_function_extension.py b/python/semantic_kernel/functions/kernel_function_extension.py index 84439dc7fea1..7cc963f94e28 100644 --- a/python/semantic_kernel/functions/kernel_function_extension.py +++ b/python/semantic_kernel/functions/kernel_function_extension.py @@ -86,6 +86,8 @@ def add_plugin( return self.plugins[plugin.name] if not plugin_name: raise ValueError("plugin_name must be provided if a plugin is not supplied.") + if not isinstance(plugin_name, str): + raise TypeError("plugin_name must be a string.") if plugin: self.plugins[plugin_name] = KernelPlugin.from_object( plugin_name=plugin_name, plugin_instance=plugin, description=description diff --git a/python/semantic_kernel/functions/kernel_function_from_prompt.py b/python/semantic_kernel/functions/kernel_function_from_prompt.py index 2dc9420f29e5..2e35e7413618 100644 --- a/python/semantic_kernel/functions/kernel_function_from_prompt.py +++ b/python/semantic_kernel/functions/kernel_function_from_prompt.py @@ -183,7 +183,10 @@ async def _invoke_internal(self, context: FunctionInvocationContext) -> None: raise FunctionExecutionException(f"No completions returned while invoking function {self.name}") context.result = self._create_function_result( - completions=chat_message_contents, chat_history=chat_history, arguments=context.arguments + completions=chat_message_contents, + chat_history=chat_history, + arguments=context.arguments, + prompt=prompt_render_result.rendered_prompt, ) return @@ -205,7 +208,10 @@ async def _invoke_internal(self, context: FunctionInvocationContext) -> None: async def _invoke_internal_stream(self, context: FunctionInvocationContext) -> None: """Invokes the function stream with the given arguments.""" - prompt_render_result = await self._render_prompt(context) + prompt_render_result = await self._render_prompt(context, is_streaming=True) + if prompt_render_result.function_result is not None: + context.result = prompt_render_result.function_result + return if isinstance(prompt_render_result.ai_service, ChatCompletionClientBase): chat_history = ChatHistory.from_rendered_prompt(prompt_render_result.rendered_prompt) @@ -223,14 +229,20 @@ async def _invoke_internal_stream(self, context: FunctionInvocationContext) -> N f"Service `{type(prompt_render_result.ai_service)}` is not a valid AI service" ) - context.result = FunctionResult(function=self.metadata, value=value) + context.result = FunctionResult( + function=self.metadata, value=value, rendered_prompt=prompt_render_result.rendered_prompt + ) - async def _render_prompt(self, context: FunctionInvocationContext) -> PromptRenderingResult: + async def _render_prompt( + self, context: FunctionInvocationContext, is_streaming: bool = False + ) -> PromptRenderingResult: """Render the prompt and apply the prompt rendering filters.""" self.update_arguments_with_defaults(context.arguments) _rebuild_prompt_render_context() - prompt_render_context = PromptRenderContext(function=self, kernel=context.kernel, arguments=context.arguments) + prompt_render_context = PromptRenderContext( + function=self, kernel=context.kernel, arguments=context.arguments, is_streaming=is_streaming + ) stack = context.kernel.construct_call_stack( filter_type=FilterTypes.PROMPT_RENDERING, @@ -247,6 +259,7 @@ async def _render_prompt(self, context: FunctionInvocationContext) -> PromptRend rendered_prompt=prompt_render_context.rendered_prompt, ai_service=selected_service[0], execution_settings=selected_service[1], + function_result=prompt_render_context.function_result, ) async def _inner_render_prompt(self, context: PromptRenderContext) -> None: @@ -273,6 +286,7 @@ def _create_function_result( function=self.metadata, value=completions, metadata=metadata, + rendered_prompt=prompt, ) def update_arguments_with_defaults(self, arguments: KernelArguments) -> None: diff --git a/python/semantic_kernel/functions/kernel_function_metadata.py b/python/semantic_kernel/functions/kernel_function_metadata.py index 50cdaa76e944..2204f710a251 100644 --- a/python/semantic_kernel/functions/kernel_function_metadata.py +++ b/python/semantic_kernel/functions/kernel_function_metadata.py @@ -14,7 +14,7 @@ class KernelFunctionMetadata(KernelBaseModel): """The kernel function metadata.""" name: str = Field(..., pattern=FUNCTION_NAME_REGEX) - plugin_name: str | None = Field(None, pattern=PLUGIN_NAME_REGEX) + plugin_name: str | None = Field(default=None, pattern=PLUGIN_NAME_REGEX) description: str | None = Field(default=None) parameters: list[KernelParameterMetadata] = Field(default_factory=list) is_prompt: bool diff --git a/python/semantic_kernel/functions/kernel_parameter_metadata.py b/python/semantic_kernel/functions/kernel_parameter_metadata.py index 7572d5d924b2..6eb28074879e 100644 --- a/python/semantic_kernel/functions/kernel_parameter_metadata.py +++ b/python/semantic_kernel/functions/kernel_parameter_metadata.py @@ -13,9 +13,9 @@ class KernelParameterMetadata(KernelBaseModel): """The kernel parameter metadata.""" name: str | None = Field(..., pattern=FUNCTION_PARAM_NAME_REGEX) - description: str | None = Field(None) + description: str | None = None default_value: Any | None = None - type_: str | None = Field("str", alias="type") + type_: str | None = Field(default="str", alias="type") is_required: bool | None = False type_object: Any | None = None schema_data: dict[str, Any] | None = None diff --git a/python/semantic_kernel/functions/kernel_plugin.py b/python/semantic_kernel/functions/kernel_plugin.py index 33e81c4ed8ee..c373de0b1238 100644 --- a/python/semantic_kernel/functions/kernel_plugin.py +++ b/python/semantic_kernel/functions/kernel_plugin.py @@ -398,7 +398,16 @@ def from_python_file( for name, cls_instance in inspect.getmembers(module, inspect.isclass): if cls_instance.__module__ != module_name: continue - instance = getattr(module, name)(**class_init_arguments.get(name, {}) if class_init_arguments else {}) + # Check whether this class has at least one @kernel_function decorated method + has_kernel_function = False + for _, method in inspect.getmembers(cls_instance, inspect.isfunction): + if getattr(method, "__kernel_function__", False): + has_kernel_function = True + break + if not has_kernel_function: + continue + init_args = class_init_arguments.get(name, {}) if class_init_arguments else {} + instance = getattr(module, name)(**init_args) return cls.from_object(plugin_name=plugin_name, description=description, plugin_instance=instance) raise PluginInitializationError(f"No class found in file: {py_file}") diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py index a827fb8dbf1c..ad71ffccfedb 100644 --- a/python/semantic_kernel/kernel.py +++ b/python/semantic_kernel/kernel.py @@ -315,10 +315,13 @@ async def invoke_function_call( self, function_call: FunctionCallContent, chat_history: ChatHistory, + *, arguments: "KernelArguments | None" = None, + execution_settings: "PromptExecutionSettings | None" = None, function_call_count: int | None = None, request_index: int | None = None, - function_behavior: "FunctionChoiceBehavior" = None, # type: ignore + is_streaming: bool = False, + function_behavior: "FunctionChoiceBehavior | None" = None, ) -> "AutoFunctionInvocationContext | None": """Processes the provided FunctionCallContent and updates the chat history.""" args_cloned = copy(arguments) if arguments else KernelArguments() @@ -382,7 +385,9 @@ async def invoke_function_call( function=function_to_call, kernel=self, arguments=args_cloned, + is_streaming=is_streaming, chat_history=chat_history, + execution_settings=execution_settings, function_result=FunctionResult(function=function_to_call.metadata, value=None), function_count=function_call_count or 0, request_sequence_index=request_index or 0, diff --git a/python/semantic_kernel/kernel_pydantic.py b/python/semantic_kernel/kernel_pydantic.py index 50e6e047a172..f3a2a4338bf6 100644 --- a/python/semantic_kernel/kernel_pydantic.py +++ b/python/semantic_kernel/kernel_pydantic.py @@ -35,8 +35,8 @@ class KernelBaseSettings(BaseSettings): """ env_prefix: ClassVar[str] = "" - env_file_path: str | None = Field(None, exclude=True) - env_file_encoding: str = Field("utf-8", exclude=True) + env_file_path: str | None = Field(default=None, exclude=True) + env_file_encoding: str = Field(default="utf-8", exclude=True) model_config = SettingsConfigDict( extra="ignore", diff --git a/python/semantic_kernel/memory/memory_query_result.py b/python/semantic_kernel/memory/memory_query_result.py index 23467885a257..7884392bd51e 100644 --- a/python/semantic_kernel/memory/memory_query_result.py +++ b/python/semantic_kernel/memory/memory_query_result.py @@ -3,10 +3,10 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MemoryQueryResult: """The memory query result.""" diff --git a/python/semantic_kernel/memory/memory_record.py b/python/semantic_kernel/memory/memory_record.py index 877953a336cd..95e647b03e99 100644 --- a/python/semantic_kernel/memory/memory_record.py +++ b/python/semantic_kernel/memory/memory_record.py @@ -4,10 +4,10 @@ from numpy import ndarray -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MemoryRecord: """The in-built memory record.""" diff --git a/python/semantic_kernel/memory/memory_store_base.py b/python/semantic_kernel/memory/memory_store_base.py index 8a79472e1b00..c762ef9f080e 100644 --- a/python/semantic_kernel/memory/memory_store_base.py +++ b/python/semantic_kernel/memory/memory_store_base.py @@ -5,10 +5,10 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MemoryStoreBase(ABC): """Base class for memory store.""" diff --git a/python/semantic_kernel/memory/null_memory.py b/python/semantic_kernel/memory/null_memory.py index 78fb88d74c42..e202c0ad9310 100644 --- a/python/semantic_kernel/memory/null_memory.py +++ b/python/semantic_kernel/memory/null_memory.py @@ -2,10 +2,10 @@ from semantic_kernel.memory.memory_query_result import MemoryQueryResult from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class NullMemory(SemanticTextMemoryBase): """Class for null memory.""" diff --git a/python/semantic_kernel/memory/semantic_text_memory.py b/python/semantic_kernel/memory/semantic_text_memory.py index 454727a8c987..0c3fdb0d66b7 100644 --- a/python/semantic_kernel/memory/semantic_text_memory.py +++ b/python/semantic_kernel/memory/semantic_text_memory.py @@ -9,10 +9,10 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class SemanticTextMemory(SemanticTextMemoryBase): """Class for semantic text memory.""" diff --git a/python/semantic_kernel/memory/semantic_text_memory_base.py b/python/semantic_kernel/memory/semantic_text_memory_base.py index 74c4c48a67c9..af35d34635e0 100644 --- a/python/semantic_kernel/memory/semantic_text_memory_base.py +++ b/python/semantic_kernel/memory/semantic_text_memory_base.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.memory.memory_query_result import MemoryQueryResult @@ -12,7 +12,7 @@ SemanticTextMemoryT = TypeVar("SemanticTextMemoryT", bound="SemanticTextMemoryBase") -@experimental_class +@experimental class SemanticTextMemoryBase(KernelBaseModel): """Base class for semantic text memory.""" diff --git a/python/semantic_kernel/memory/volatile_memory_store.py b/python/semantic_kernel/memory/volatile_memory_store.py index 9b3ab4ccb65d..9736efc6235d 100644 --- a/python/semantic_kernel/memory/volatile_memory_store.py +++ b/python/semantic_kernel/memory/volatile_memory_store.py @@ -8,12 +8,12 @@ from semantic_kernel.exceptions import ServiceResourceNotFoundError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class VolatileMemoryStore(MemoryStoreBase): """A volatile memory store that stores data in memory.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py b/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py index f1701c338184..0be0701795c0 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class ActorStateKeys(Enum): """Keys used to store actor state in Dapr.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py index 1a5f235e0628..b74b3d233539 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py @@ -8,12 +8,12 @@ from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.interfaces.message_buffer_interface import MessageBufferInterface -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) -@experimental_class +@experimental class EventBufferActor(Actor, MessageBufferInterface): """Represents a message buffer actor that manages a queue of JSON strings representing events.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py index 1938056b4f23..213d491e413e 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py @@ -11,12 +11,12 @@ from semantic_kernel.processes.dapr_runtime.interfaces.external_event_buffer_interface import ( ExternalEventBufferInterface, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) -@experimental_class +@experimental class ExternalEventBufferActor(Actor, ExternalEventBufferInterface): """Represents a message buffer actor that follows the MessageBuffer abstract class.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py index b94616a7ec6e..bc1a2bca9bbf 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py @@ -9,12 +9,12 @@ from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.interfaces.message_buffer_interface import MessageBufferInterface -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class MessageBufferActor(Actor, MessageBufferInterface): """Represents a message buffer actor that follows the MessageBuffer abstract class.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py index a57e44a4ba58..09e2ab083bdd 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py @@ -5,6 +5,7 @@ import json import logging import uuid +from collections.abc import Callable, MutableSequence, Sequence from queue import Queue from typing import Any @@ -37,27 +38,29 @@ from semantic_kernel.processes.process_event import ProcessEvent from semantic_kernel.processes.process_message import ProcessMessage from semantic_kernel.processes.process_message_factory import ProcessMessageFactory -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class ProcessActor(StepActor, ProcessInterface): """A local process that contains a collection of steps.""" - def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel): + def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel, factories: dict[str, Callable]): """Initializes a new instance of ProcessActor. Args: ctx: The actor runtime context. actor_id: The unique ID for the actor. kernel: The Kernel dependency to be injected. + factories: The factory dictionary that contains step types to factory methods. """ - super().__init__(ctx, actor_id, kernel) + super().__init__(ctx, actor_id, kernel, factories) self.kernel = kernel - self.steps: list[StepInterface] = [] - self.step_infos: list[DaprStepInfo] = [] + self.factories = factories + self.steps: MutableSequence[StepInterface] = [] + self.step_infos: MutableSequence[DaprStepInfo] = [] self.initialize_task: bool | None = False self.external_event_queue: Queue = Queue() self.process_task: asyncio.Task | None = None @@ -130,7 +133,7 @@ async def start(self, keep_alive: bool = True) -> None: if not self.process_task or self.process_task.done(): self.process_task = asyncio.create_task(self.internal_execute(keep_alive=keep_alive)) - async def run_once(self, process_event: str) -> None: + async def run_once(self, process_event: KernelProcessEvent | str | None) -> None: """Starts the process with an initial event and waits for it to finish. Args: @@ -145,7 +148,9 @@ async def run_once(self, process_event: str) -> None: actor_interface=ExternalEventBufferInterface, ) try: - await external_event_queue.enqueue(process_event) + await external_event_queue.enqueue( + process_event.model_dump_json() if isinstance(process_event, KernelProcessEvent) else process_event + ) logger.info(f"Run once for process event: {process_event}") @@ -169,7 +174,7 @@ async def stop(self): with contextlib.suppress(asyncio.CancelledError): await self.process_task - async def initialize_step(self): + async def initialize_step(self, input: str) -> None: """Initializes the step.""" # The process does not need any further initialization pass @@ -215,9 +220,13 @@ async def to_dapr_process_info(self) -> DaprProcessInfo: process_state = KernelProcessState(self.name, self.id.id) step_tasks = [step.to_dapr_step_info() for step in self.steps] - steps = await asyncio.gather(*step_tasks) + steps: Sequence[str] = await asyncio.gather(*step_tasks) return DaprProcessInfo( - inner_step_python_type=self.inner_step_type, edges=self.process.edges, state=process_state, steps=steps + inner_step_python_type=self.inner_step_type, + edges=self.process.edges, + state=process_state, + # steps are model dumps of the classes, which pydantic can parse back. + steps=steps, # type: ignore ) async def handle_message(self, message: ProcessMessage) -> None: @@ -251,7 +260,7 @@ async def _initialize_process_actor( self.output_edges = {kvp[0]: list(kvp[1]) for kvp in self.process.edges.items()} for step in self.step_infos: - step_actor = None + step_actor: StepInterface | None = None # The current step should already have a name. assert step.state and step.state.name is not None # nosec @@ -283,7 +292,7 @@ async def _initialize_process_actor( assert step.state and step.state.id is not None # nosec scoped_step_id = self._scoped_actor_id(ActorId(step.state.id)) - step_actor: StepInterface = ActorProxy.create( # type: ignore + step_actor = ActorProxy.create( # type: ignore actor_type=f"{StepActor.__name__}", actor_id=scoped_step_id, actor_interface=StepInterface, diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py index 7b38c7633a31..08e719237caf 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py @@ -4,6 +4,8 @@ import importlib import json import logging +from collections.abc import Callable +from inspect import isawaitable from queue import Queue from typing import Any @@ -37,25 +39,27 @@ from semantic_kernel.processes.process_message_factory import ProcessMessageFactory from semantic_kernel.processes.process_types import get_generic_state_type from semantic_kernel.processes.step_utils import find_input_channels, get_fully_qualified_name -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class StepActor(Actor, StepInterface, KernelProcessMessageChannel): """Represents a step actor that follows the Step abstract class.""" - def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel): + def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel, factories: dict[str, Callable]): """Initializes a new instance of StepActor. Args: ctx: The actor runtime context. actor_id: The unique ID for the actor. kernel: The Kernel dependency to be injected. + factories: The factory dictionary to use for creating the step. """ super().__init__(ctx, actor_id) self.kernel = kernel + self.factories: dict[str, Callable] = factories self.parent_process_id: str | None = None self.step_info: DaprStepInfo | None = None self.initialize_task: bool | None = False @@ -172,31 +176,38 @@ def _get_class_from_string(self, full_class_name: str): async def activate_step(self): """Initializes the step.""" - # Instantiate an instance of the inner step object - step_cls = self._get_class_from_string(self.inner_step_type) - - step_instance: KernelProcessStep = step_cls() # type: ignore + # Instantiate an instance of the inner step object and retrieve its class reference. + if self.factories and self.inner_step_type in self.factories: + step_object = self.factories[self.inner_step_type]() + if isawaitable(step_object): + step_object = await step_object + step_cls = step_object.__class__ + step_instance: KernelProcessStep = step_object # type: ignore + else: + step_cls = self._get_class_from_string(self.inner_step_type) + step_instance: KernelProcessStep = step_cls() # type: ignore kernel_plugin = self.kernel.add_plugin( - step_instance, self.step_info.state.name if self.step_info.state else "default_name" + step_instance, + self.step_info.state.name if self.step_info.state else "default_name", ) - # Load the kernel functions + # Load the kernel functions. for name, f in kernel_plugin.functions.items(): self.functions[name] = f - # Initialize the input channels + # Initialize the input channels. self.initial_inputs = find_input_channels(channel=self, functions=self.functions) self.inputs = {k: {kk: vv for kk, vv in v.items()} if v else {} for k, v in self.initial_inputs.items()} - # Use the existing state or create a new one if not provided + # Use the existing state or create a new one if not provided. state_object = self.step_info.state - # Extract TState from inner_step_type + # Extract TState from inner_step_type using the class reference. t_state = get_generic_state_type(step_cls) if t_state is not None: - # Create state_type as KernelProcessStepState[TState] + # Create state_type as KernelProcessStepState[TState]. state_type = KernelProcessStepState[t_state] if state_object is None: @@ -206,7 +217,7 @@ async def activate_step(self): state=None, ) else: - # Make sure state_object is an instance of state_type + # Ensure that state_object is an instance of the expected type. if not isinstance(state_object, KernelProcessStepState): error_message = "State object is not of the expected type." raise KernelException(error_message) @@ -215,15 +226,13 @@ async def activate_step(self): ActorStateKeys.StepStateType.value, get_fully_qualified_name(t_state), ) - await self._state_manager.try_add_state( ActorStateKeys.StepStateJson.value, json.dumps(state_object.model_dump()), ) - await self._state_manager.save_state() - # Make sure that state_object.state is not None + # Initialize state_object.state if it is not already set. if state_object.state is None: try: state_object.state = t_state() @@ -231,9 +240,8 @@ async def activate_step(self): error_message = f"Cannot instantiate state of type {t_state}: {e}" raise KernelException(error_message) else: - # The step has no user-defined state; use the base KernelProcessStepState + # The step has no user-defined state; use the base KernelProcessStepState. state_type = KernelProcessStepState - if state_object is None: state_object = state_type( name=step_cls.__name__, @@ -245,7 +253,7 @@ async def activate_step(self): error_message = "The state object for the KernelProcessStep could not be created." raise KernelException(error_message) - # Set the step state and activate the step with the state object + # Set the step state and activate the step with the state object. self.step_state = state_object await step_instance.activate(state_object) @@ -324,7 +332,7 @@ async def handle_message(self, message: ProcessMessage): raise ProcessFunctionNotFoundException(f"Function {target_function} not found in plugin {self.name}") invoke_result = None - event_name = None + event_name: str = "" event_value = None try: @@ -332,6 +340,8 @@ async def handle_message(self, message: ProcessMessage): f"Invoking plugin `{function.plugin_name}` and function `{function.name}` with arguments: {arguments}" ) invoke_result = await self.invoke_function(function, self.kernel, arguments) + if invoke_result is None: + raise KernelException(f"Function {target_function} returned None.") event_name = f"{target_function}.OnResult" event_value = invoke_result.value @@ -398,7 +408,9 @@ async def to_dapr_step_info(self) -> str: raise ValueError("The inner step type must be initialized before converting to DaprStepInfo.") step_info = DaprStepInfo( - inner_step_python_type=self.inner_step_type, state=self.step_info.state, edges=self.step_info.edges + inner_step_python_type=self.inner_step_type, + state=self.step_info.state, + edges=self.step_info.edges, ) return step_info.model_dump_json() diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py b/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py index 88a0378f5395..6e2dfb13562b 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from collections.abc import Callable from typing import TYPE_CHECKING from dapr.actor import ActorId @@ -19,22 +20,32 @@ from semantic_kernel.kernel import Kernel -def create_actor_factories(kernel: "Kernel") -> tuple: +def create_actor_factories(kernel: "Kernel", factories: dict[str, Callable] | None = None) -> tuple: """Creates actor factories for ProcessActor and StepActor.""" + if factories is None: + factories = {} - def process_actor_factory(ctx: ActorRuntimeContext, actor_id: ActorId) -> ProcessActor: - return ProcessActor(ctx, actor_id, kernel) + def process_actor_factory( + ctx: ActorRuntimeContext, + actor_id: ActorId, + ) -> ProcessActor: + return ProcessActor(ctx, actor_id, kernel=kernel, factories=factories) - def step_actor_factory(ctx: ActorRuntimeContext, actor_id: ActorId) -> StepActor: - return StepActor(ctx, actor_id, kernel=kernel) + def step_actor_factory( + ctx: ActorRuntimeContext, + actor_id: ActorId, + ) -> StepActor: + return StepActor(ctx, actor_id, kernel=kernel, factories=factories) return process_actor_factory, step_actor_factory # Asynchronous registration for FastAPI -async def register_fastapi_dapr_actors(actor: FastAPIDaprActor, kernel: "Kernel") -> None: +async def register_fastapi_dapr_actors( + actor: FastAPIDaprActor, kernel: "Kernel", factories: dict[str, Callable] | None = None +) -> None: """Registers the actors with the Dapr runtime for use with a FastAPI app.""" - process_actor_factory, step_actor_factory = create_actor_factories(kernel) + process_actor_factory, step_actor_factory = create_actor_factories(kernel, factories) await actor.register_actor(ProcessActor, actor_factory=process_actor_factory) await actor.register_actor(StepActor, actor_factory=step_actor_factory) await actor.register_actor(EventBufferActor) @@ -43,9 +54,11 @@ async def register_fastapi_dapr_actors(actor: FastAPIDaprActor, kernel: "Kernel" # Synchronous registration for Flask -def register_flask_dapr_actors(actor: FlaskDaprActor, kernel: "Kernel") -> None: +def register_flask_dapr_actors( + actor: FlaskDaprActor, kernel: "Kernel", factory: dict[str, Callable] | None = None +) -> None: """Registers the actors with the Dapr runtime for use with a Flask app.""" - process_actor_factory, step_actor_factory = create_actor_factories(kernel) + process_actor_factory, step_actor_factory = create_actor_factories(kernel, factory) actor.register_actor(ProcessActor, actor_factory=process_actor_factory) actor.register_actor(StepActor, actor_factory=step_actor_factory) actor.register_actor(EventBufferActor) diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py index cf8a3dabb09a..f95e0db2bd94 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py @@ -6,13 +6,13 @@ from semantic_kernel.exceptions.process_exceptions import ProcessInvalidConfigurationException from semantic_kernel.processes.dapr_runtime.dapr_kernel_process_context import DaprKernelProcessContext from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental_function +@experimental async def start( process: "KernelProcess", initial_event: KernelProcessEvent | str | Enum, diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py index b6f5780daab2..6cbb4f369f00 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py @@ -9,10 +9,10 @@ from semantic_kernel.processes.dapr_runtime.interfaces.process_interface import ProcessInterface from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class DaprKernelProcessContext: """A Dapr kernel process context.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py b/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py index 4a93ad10e66a..f573c20c2726 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from collections.abc import MutableSequence from typing import Literal from pydantic import Field @@ -9,15 +10,15 @@ from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.kernel_process.kernel_process_state import KernelProcessState from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class DaprProcessInfo(DaprStepInfo): """A Dapr process info.""" - type: Literal["DaprProcessInfo"] = Field("DaprProcessInfo") # type: ignore - steps: list["DaprStepInfo | DaprProcessInfo"] = Field(default_factory=list) + type: Literal["DaprProcessInfo"] = "DaprProcessInfo" # type: ignore + steps: MutableSequence["DaprStepInfo | DaprProcessInfo"] = Field(default_factory=list) def to_kernel_process(self) -> KernelProcess: """Converts the Dapr process info to a kernel process.""" @@ -40,7 +41,7 @@ def from_kernel_process(cls, kernel_process: KernelProcess) -> "DaprProcessInfo" raise ValueError("Kernel process must be provided") dapr_step_info = DaprStepInfo.from_kernel_step_info(kernel_process) - dapr_steps: list[DaprProcessInfo | DaprStepInfo] = [] + dapr_steps: MutableSequence[DaprProcessInfo | DaprStepInfo] = [] for step in kernel_process.steps: if isinstance(step, KernelProcess): diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py b/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py index 85f149709490..c0cb7490f899 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py @@ -11,14 +11,14 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.step_utils import get_fully_qualified_name -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class DaprStepInfo(KernelBaseModel): """A Dapr step info.""" - type: Literal["DaprStepInfo"] = Field("DaprStepInfo") + type: Literal["DaprStepInfo"] = "DaprStepInfo" inner_step_python_type: str state: KernelProcessStepState edges: dict[str, list[KernelProcessEdge]] = Field(default_factory=dict) diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py index 3f6312b7ff37..e0de52953d8d 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py @@ -4,10 +4,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class EventBufferInterface(ActorInterface, ABC): """Abstract base class for an event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py index af1bb8686d01..3e9fbe212de5 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py @@ -5,10 +5,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class ExternalEventBufferInterface(ActorInterface, ABC): """Abstract base class for an external event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py index c1591c219b85..a69bc55da549 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py @@ -4,10 +4,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class MessageBufferInterface(ActorInterface, ABC): """Abstract base class for a message event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py index 63268ed58fbc..7eac1ce61384 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py @@ -2,15 +2,18 @@ from abc import ABC, abstractmethod +from typing import TYPE_CHECKING from dapr.actor import ActorInterface, actormethod -from semantic_kernel.processes.dapr_runtime.dapr_process_info import DaprProcessInfo -from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental +if TYPE_CHECKING: + from semantic_kernel.processes.dapr_runtime.dapr_process_info import DaprProcessInfo + from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -@experimental_class + +@experimental class ProcessInterface(ActorInterface, ABC): """Abstract base class for a process that follows the ActorInterface.""" @@ -35,7 +38,7 @@ async def start(self, keep_alive: bool) -> None: @abstractmethod @actormethod(name="run_once") - async def run_once(self, process_event: KernelProcessEvent) -> None: + async def run_once(self, process_event: "KernelProcessEvent | str | None") -> None: """Starts the process with an initial event and then waits for the process to finish. :param process_event: Required. The KernelProcessEvent to start the process with. diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py index 987e52103cb1..630a254207a1 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py @@ -5,10 +5,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class StepInterface(ActorInterface, ABC): """Abstract base class for a step in the process workflow.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process.py b/python/semantic_kernel/processes/kernel_process/kernel_process.py index c7f90e85959c..6b6dea0fc21d 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from collections.abc import Callable from typing import TYPE_CHECKING, Any from pydantic import Field @@ -7,25 +8,35 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_state import KernelProcessState from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge -@experimental_class +@experimental class KernelProcess(KernelProcessStepInfo): """A kernel process.""" steps: list[KernelProcessStepInfo] = Field(default_factory=list) + factories: dict[str, Callable] = Field(default_factory=dict) def __init__( self, state: KernelProcessState, steps: list[KernelProcessStepInfo], edges: dict[str, list["KernelProcessEdge"]] | None = None, + factories: dict[str, Callable] | None = None, ): - """Initialize the kernel process.""" + """Initialize the kernel process. + + Args: + state: The state of the process. + steps: The steps of the process. + edges: The edges of the process. Defaults to None. + factories: The factories of the process. This allows for the creation of + steps that require complex dependencies that cannot be JSON serialized or deserialized. + """ if not state: raise ValueError("state cannot be None") if not steps: @@ -43,4 +54,7 @@ def __init__( "output_edges": edges or {}, } + if factories: + args["factories"] = factories + super().__init__(**args) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py b/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py index 26f41dd9ab34..8e654c388bd0 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py @@ -3,10 +3,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_function_target import KernelProcessFunctionTarget -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessEdge(KernelBaseModel): """Represents an edge between steps.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_event.py b/python/semantic_kernel/processes/kernel_process/kernel_process_event.py index b51efb334f64..2dc14331b2a4 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_event.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_event.py @@ -6,10 +6,10 @@ from pydantic import ConfigDict from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessEventVisibility(Enum): """Visibility of a kernel process event.""" @@ -21,7 +21,7 @@ class KernelProcessEventVisibility(Enum): Internal = "Internal" -@experimental_class +@experimental class KernelProcessEvent(KernelBaseModel): """A kernel process event.""" @@ -29,4 +29,4 @@ class KernelProcessEvent(KernelBaseModel): data: Any | None = None visibility: KernelProcessEventVisibility = KernelProcessEventVisibility.Internal - model_config = ConfigDict(use_enum_values=True) + model_config = ConfigDict(use_enum_values=False) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py b/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py index ead28bb8ff55..2372253e26b6 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessFunctionTarget(KernelBaseModel): """The target of a function call in a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py b/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py index 566a787878ad..7337666118a3 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py @@ -3,10 +3,10 @@ from abc import ABC, abstractmethod from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessMessageChannel(ABC): """Abstract base class for emitting events from a step.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_state.py b/python/semantic_kernel/processes/kernel_process/kernel_process_state.py index 641e29e518f6..0cb2e53d2aab 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_state.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_state.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessState(KernelProcessStepState): """The state of a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step.py index 887dcfac47e1..653a2e0ba19e 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState @@ -12,7 +12,7 @@ TState = TypeVar("TState") -@experimental_class +@experimental class KernelProcessStep(ABC, KernelBaseModel, Generic[TState]): """A KernelProcessStep Base class for process steps.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py index 8b90b204a5cc..4ff52477ca0e 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py @@ -6,10 +6,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_message_channel import KernelProcessMessageChannel from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessStepContext(KernelBaseModel): """The context of a step in a kernel process.""" @@ -17,9 +17,9 @@ class KernelProcessStepContext(KernelBaseModel): def __init__(self, channel: KernelProcessMessageChannel): """Initialize the step context.""" - super().__init__(step_message_channel=channel) + super().__init__(step_message_channel=channel) # type: ignore - async def emit_event(self, process_event: "KernelProcessEvent | str | Enum", **kwargs) -> None: + async def emit_event(self, process_event: "KernelProcessEvent | str | Enum | None", **kwargs) -> None: """Emit an event from the current step. It is possible to either specify a `KernelProcessEvent` object or the ID of the event @@ -34,10 +34,9 @@ async def emit_event(self, process_event: "KernelProcessEvent | str | Enum", **k if process_event is None: raise ProcessEventUndefinedException("Process event cannot be None") - if isinstance(process_event, Enum): - process_event = process_event.value - if not isinstance(process_event, KernelProcessEvent): - process_event = KernelProcessEvent(id=process_event, **kwargs) + process_event = KernelProcessEvent( + id=process_event.value if isinstance(process_event, Enum) else process_event, **kwargs + ) await self.step_message_channel.emit_event(process_event) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py index e4591e67f257..f60499d2bc8d 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py @@ -4,10 +4,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class KernelProcessStepInfo(KernelBaseModel): """Information about a step in a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py index 802823e4de64..a0c494f9e749 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py @@ -3,12 +3,12 @@ from typing import Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental TState = TypeVar("TState") -@experimental_class +@experimental class KernelProcessStepState(KernelBaseModel, Generic[TState]): """The state of a step in a kernel process.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_event.py b/python/semantic_kernel/processes/local_runtime/local_event.py index cee3d64321e9..6075e49f798d 100644 --- a/python/semantic_kernel/processes/local_runtime/local_event.py +++ b/python/semantic_kernel/processes/local_runtime/local_event.py @@ -7,10 +7,10 @@ KernelProcessEvent, KernelProcessEventVisibility, ) -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class LocalEvent(KernelBaseModel): """An event that is local to a namespace.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_kernel_process.py b/python/semantic_kernel/processes/local_runtime/local_kernel_process.py index 6a7408b5b29b..aee1b8faa693 100644 --- a/python/semantic_kernel/processes/local_runtime/local_kernel_process.py +++ b/python/semantic_kernel/processes/local_runtime/local_kernel_process.py @@ -6,14 +6,14 @@ from semantic_kernel.exceptions.process_exceptions import ProcessInvalidConfigurationException from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent from semantic_kernel.processes.local_runtime.local_kernel_process_context import LocalKernelProcessContext -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.kernel import Kernel from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental_function +@experimental async def start( process: "KernelProcess", kernel: "Kernel", initial_event: KernelProcessEvent | str | Enum, **kwargs ) -> LocalKernelProcessContext: diff --git a/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py b/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py index e0e5d0b1cc80..f3ca76107512 100644 --- a/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py +++ b/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py @@ -5,14 +5,14 @@ from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.local_runtime.local_process import LocalProcess -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -@experimental_class +@experimental class LocalKernelProcessContext(KernelBaseModel): """A local kernel process context.""" @@ -33,9 +33,10 @@ def __init__(self, process: "KernelProcess", kernel: "Kernel"): process=process, kernel=kernel, parent_process_id=None, + factories=process.factories, ) - super().__init__(local_process=local_process) + super().__init__(local_process=local_process) # type: ignore async def start_with_event(self, initial_event: "KernelProcessEvent") -> None: """Starts the local process with an initial event.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_message.py b/python/semantic_kernel/processes/local_runtime/local_message.py index ea67aad3a3aa..26c50f62d833 100644 --- a/python/semantic_kernel/processes/local_runtime/local_message.py +++ b/python/semantic_kernel/processes/local_runtime/local_message.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class LocalMessage(KernelBaseModel): """A message that is local to a namespace.""" @@ -16,5 +16,5 @@ class LocalMessage(KernelBaseModel): destination_id: str = Field(...) function_name: str = Field(...) values: dict[str, Any | None] = Field(...) - target_event_id: str | None = Field(None) - target_event_data: Any | None = Field(None) + target_event_id: str | None = Field(default=None) + target_event_data: Any | None = Field(default=None) diff --git a/python/semantic_kernel/processes/local_runtime/local_message_factory.py b/python/semantic_kernel/processes/local_runtime/local_message_factory.py index 78e6d6260857..2d4ac42e01a6 100644 --- a/python/semantic_kernel/processes/local_runtime/local_message_factory.py +++ b/python/semantic_kernel/processes/local_runtime/local_message_factory.py @@ -4,10 +4,10 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.local_runtime.local_message import LocalMessage -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class LocalMessageFactory: """Factory class to create LocalMessage instances.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_process.py b/python/semantic_kernel/processes/local_runtime/local_process.py index eeb03b7888c2..ac47fc30716b 100644 --- a/python/semantic_kernel/processes/local_runtime/local_process.py +++ b/python/semantic_kernel/processes/local_runtime/local_process.py @@ -4,6 +4,7 @@ import contextlib import logging import uuid +from collections.abc import Callable from queue import Queue from typing import TYPE_CHECKING, Any @@ -23,7 +24,7 @@ from semantic_kernel.processes.local_runtime.local_message import LocalMessage from semantic_kernel.processes.local_runtime.local_message_factory import LocalMessageFactory from semantic_kernel.processes.local_runtime.local_step import LocalStep -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess @@ -31,7 +32,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class LocalProcess(LocalStep): """A local process that contains a collection of steps.""" @@ -42,8 +43,15 @@ class LocalProcess(LocalStep): initialize_task: bool | None = False external_event_queue: Queue = Field(default_factory=Queue) process_task: asyncio.Task | None = None - - def __init__(self, process: "KernelProcess", kernel: Kernel, parent_process_id: str | None = None): + factories: dict[str, Callable] = Field(default_factory=dict) + + def __init__( + self, + process: "KernelProcess", + kernel: Kernel, + factories: dict[str, Callable] | None = None, + parent_process_id: str | None = None, + ): """Initializes the local process.""" args: dict[str, Any] = { "step_info": process, @@ -54,6 +62,9 @@ def __init__(self, process: "KernelProcess", kernel: Kernel, parent_process_id: "initialize_task": False, } + if factories: + args["factories"] = factories + super().__init__(**args) def ensure_initialized(self): @@ -124,6 +135,7 @@ def initialize_process(self): process = LocalProcess( process=step, kernel=self.kernel, + factories=self.factories, parent_process_id=self.id, ) @@ -133,9 +145,10 @@ def initialize_process(self): assert step.state and step.state.id is not None # nosec # Create a LocalStep for the step - local_step = LocalStep( + local_step = LocalStep( # type: ignore step_info=step, kernel=self.kernel, + factories=self.factories, parent_process_id=self.id, ) @@ -216,16 +229,17 @@ async def enqueue_step_messages(self, step: LocalStep, message_channel: Queue[Lo """Processes events emitted by the given step and enqueues them.""" all_step_events = step.get_all_events() for step_event in all_step_events: + # must come first because emitting the step event modifies its namespace + for edge in step.get_edge_for_event(step_event.id): + message = LocalMessageFactory.create_from_edge(edge, step_event.data) + message_channel.put(message) + if step_event.visibility == KernelProcessEventVisibility.Public: if isinstance(step_event, KernelProcessEvent): await self.emit_event(step_event) # type: ignore elif isinstance(step_event, LocalEvent): await self.emit_local_event(step_event) # type: ignore - for edge in step.get_edge_for_event(step_event.id): - message = LocalMessageFactory.create_from_edge(edge, step_event.data) - message_channel.put(message) - def dispose(self): """Clean up resources.""" if self.process_task: diff --git a/python/semantic_kernel/processes/local_runtime/local_step.py b/python/semantic_kernel/processes/local_runtime/local_step.py index a5856d1fb812..1b6d6e43c2ed 100644 --- a/python/semantic_kernel/processes/local_runtime/local_step.py +++ b/python/semantic_kernel/processes/local_runtime/local_step.py @@ -3,6 +3,8 @@ import asyncio import logging import uuid +from collections.abc import Callable +from inspect import isawaitable from queue import Queue from typing import Any @@ -19,19 +21,18 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent from semantic_kernel.processes.kernel_process.kernel_process_message_channel import KernelProcessMessageChannel -from semantic_kernel.processes.kernel_process.kernel_process_step import KernelProcessStep from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.local_runtime.local_event import LocalEvent from semantic_kernel.processes.local_runtime.local_message import LocalMessage from semantic_kernel.processes.process_types import get_generic_state_type -from semantic_kernel.processes.step_utils import find_input_channels -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.processes.step_utils import find_input_channels, get_fully_qualified_name +from semantic_kernel.utils.feature_stage_decorator import experimental logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class LocalStep(KernelProcessMessageChannel, KernelBaseModel): """A local step that is part of a local process.""" @@ -47,6 +48,7 @@ class LocalStep(KernelProcessMessageChannel, KernelBaseModel): output_edges: dict[str, list[KernelProcessEdge]] = Field(default_factory=dict) parent_process_id: str | None = None init_lock: asyncio.Lock = Field(default_factory=asyncio.Lock, exclude=True) + factories: dict[str, Callable] @model_validator(mode="before") @classmethod @@ -148,7 +150,7 @@ async def handle_message(self, message: LocalMessage): raise ProcessFunctionNotFoundException(f"Function {target_function} not found in plugin {self.name}") invoke_result = None - event_name = None + event_name: str = "" event_value = None try: @@ -156,6 +158,8 @@ async def handle_message(self, message: LocalMessage): f"Invoking plugin `{function.plugin_name}` and function `{function.name}` with arguments: {arguments}" ) invoke_result = await self.invoke_function(function, self.kernel, arguments) + if invoke_result is None: + raise KernelException(f"Function {target_function} returned None.") event_name = f"{target_function}.OnResult" event_value = invoke_result.value except Exception as ex: @@ -185,8 +189,16 @@ async def initialize_step(self): """Initializes the step.""" # Instantiate an instance of the inner step object step_cls = self.step_info.inner_step_type - - step_instance: KernelProcessStep = step_cls() # type: ignore + factory = ( + self.factories.get(get_fully_qualified_name(self.step_info.inner_step_type)) if self.factories else None + ) + if factory: + step_instance = factory() + if isawaitable(step_instance): + step_instance = await step_instance + step_cls = type(step_instance) + else: + step_instance = step_cls() # type: ignore kernel_plugin = self.kernel.add_plugin( step_instance, self.step_info.state.name if self.step_info.state else "default_name" diff --git a/python/semantic_kernel/processes/process_builder.py b/python/semantic_kernel/processes/process_builder.py index c4e024298af3..934bee91ab42 100644 --- a/python/semantic_kernel/processes/process_builder.py +++ b/python/semantic_kernel/processes/process_builder.py @@ -2,6 +2,7 @@ import contextlib import inspect +from collections.abc import Callable from copy import copy from enum import Enum from typing import TYPE_CHECKING @@ -17,13 +18,14 @@ from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder from semantic_kernel.processes.process_types import TState, TStep -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.processes.step_utils import get_fully_qualified_name +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental_class +@experimental class ProcessBuilder(ProcessStepBuilder): """A builder for a process.""" @@ -32,20 +34,38 @@ class ProcessBuilder(ProcessStepBuilder): has_parent_process: bool = False steps: list["ProcessStepBuilder"] = Field(default_factory=list) + factories: dict[str, Callable] = Field(default_factory=dict) def add_step( self, step_type: type[TStep], name: str | None = None, initial_state: TState | None = None, + factory_function: Callable | None = None, **kwargs, ) -> ProcessStepBuilder[TState, TStep]: - """Register a step type with optional constructor arguments.""" + """Register a step type with optional constructor arguments. + + Args: + step_type: The step type. + name: The name of the step. Defaults to None. + initial_state: The initial state of the step. Defaults to None. + factory_function: The factory function. Allows for a callable that is used to create the step instance + that may have complex dependencies that cannot be JSON serialized or deserialized. Defaults to None. + kwargs: Additional keyword arguments. + + Returns: + The process step builder. + """ if not inspect.isclass(step_type): raise ProcessInvalidConfigurationException( f"Expected a class type, but got an instance of {type(step_type).__name__}" ) + if factory_function: + fq_name = get_fully_qualified_name(step_type) + self.factories[fq_name] = factory_function + name = name or step_type.__name__ process_step_builder = ProcessStepBuilder(type=step_type, name=name, initial_state=initial_state, **kwargs) self.steps.append(process_step_builder) @@ -117,4 +137,4 @@ def build(self) -> "KernelProcess": built_edges = {key: [edge.build() for edge in edges] for key, edges in self.edges.items()} built_steps = [step.build_step() for step in self.steps] process_state = KernelProcessState(name=self.name, id=self.id if self.has_parent_process else None) - return KernelProcess(state=process_state, steps=built_steps, edges=built_edges) + return KernelProcess(state=process_state, steps=built_steps, edges=built_edges, factories=self.factories) diff --git a/python/semantic_kernel/processes/process_edge_builder.py b/python/semantic_kernel/processes/process_edge_builder.py index 0cd95d8f3b9a..56e1bf642210 100644 --- a/python/semantic_kernel/processes/process_edge_builder.py +++ b/python/semantic_kernel/processes/process_edge_builder.py @@ -6,13 +6,13 @@ from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.process_builder import ProcessBuilder -@experimental_class +@experimental class ProcessEdgeBuilder(KernelBaseModel): """A builder for a process edge.""" @@ -32,7 +32,9 @@ def send_event_to( raise TypeError("Target cannot be None") if isinstance(target, ProcessStepBuilder): - target = ProcessFunctionTargetBuilder(step=target, parameter_name=kwargs.get("parameter_name")) + target = ProcessFunctionTargetBuilder( + step=target, parameter_name=kwargs.get("parameter_name"), function_name=kwargs.get("function_name") + ) self.target = target edge_builder = ProcessStepEdgeBuilder(source=self.source, event_id=self.event_id) diff --git a/python/semantic_kernel/processes/process_end_step.py b/python/semantic_kernel/processes/process_end_step.py index fea2074286cd..3306418ef479 100644 --- a/python/semantic_kernel/processes/process_end_step.py +++ b/python/semantic_kernel/processes/process_end_step.py @@ -4,14 +4,14 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.functions import KernelFunctionMetadata from semantic_kernel.kernel import Kernel -@experimental_class +@experimental class EndStep(ProcessStepBuilder): """An end step in a process.""" diff --git a/python/semantic_kernel/processes/process_function_target_builder.py b/python/semantic_kernel/processes/process_function_target_builder.py index 30a695273721..00325e346858 100644 --- a/python/semantic_kernel/processes/process_function_target_builder.py +++ b/python/semantic_kernel/processes/process_function_target_builder.py @@ -6,10 +6,10 @@ from semantic_kernel.processes.kernel_process.kernel_process_function_target import KernelProcessFunctionTarget from semantic_kernel.processes.process_end_step import EndStep from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class ProcessFunctionTargetBuilder(KernelBaseModel): """A builder for a process function target.""" diff --git a/python/semantic_kernel/processes/process_step_builder.py b/python/semantic_kernel/processes/process_step_builder.py index ee09eb5f92dd..fa259ab022ed 100644 --- a/python/semantic_kernel/processes/process_step_builder.py +++ b/python/semantic_kernel/processes/process_step_builder.py @@ -15,7 +15,7 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_types import TState, TStep, get_generic_state_type -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.functions import KernelFunctionMetadata @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class +@experimental class ProcessStepBuilder(KernelBaseModel, Generic[TState, TStep]): """A builder for a process step.""" @@ -189,9 +189,18 @@ def build_step(self) -> "KernelProcessStepInfo": # Return an instance of KernelProcessStepInfo with the built state and edges. return KernelProcessStepInfo(inner_step_type=step_cls, state=state_object, output_edges=built_edges) - def on_function_result(self, function_name: str) -> "ProcessStepEdgeBuilder": - """Creates a new ProcessStepEdgeBuilder for the function result.""" - return self.on_event(f"{function_name}.OnResult") + def on_function_result(self, function_name: str | Enum) -> "ProcessStepEdgeBuilder": + """Creates a new ProcessStepEdgeBuilder for the function result. + + Args: + function_name: The function name as a string or Enum. + + Returns: + ProcessStepEdgeBuilder: The ProcessStepEdgeBuilder instance. + """ + function_name_str: str = function_name.value if isinstance(function_name, Enum) else function_name + + return self.on_event(f"{function_name_str}.OnResult") def get_function_metadata_map( self, plugin_type, name: str | None = None, kernel: "Kernel | None" = None diff --git a/python/semantic_kernel/processes/process_step_edge_builder.py b/python/semantic_kernel/processes/process_step_edge_builder.py index ca1284fa49a8..e42422914f6f 100644 --- a/python/semantic_kernel/processes/process_step_edge_builder.py +++ b/python/semantic_kernel/processes/process_step_edge_builder.py @@ -6,13 +6,13 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.process_end_step import EndStep from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -@experimental_class +@experimental class ProcessStepEdgeBuilder: """A builder for a process step edge.""" diff --git a/python/semantic_kernel/prompt_template/const.py b/python/semantic_kernel/prompt_template/const.py index ecc64e31402d..46f4b4154948 100644 --- a/python/semantic_kernel/prompt_template/const.py +++ b/python/semantic_kernel/prompt_template/const.py @@ -1,16 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. -from typing import Literal, get_args +from typing import Literal -KERNEL_TEMPLATE_FORMAT_NAME_TYPE = Literal["semantic-kernel"] -KERNEL_TEMPLATE_FORMAT_NAME: KERNEL_TEMPLATE_FORMAT_NAME_TYPE = get_args(KERNEL_TEMPLATE_FORMAT_NAME_TYPE)[0] -HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE = Literal["handlebars"] -HANDLEBARS_TEMPLATE_FORMAT_NAME: HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE = get_args(HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE)[ - 0 -] -JINJA2_TEMPLATE_FORMAT_NAME_TYPE = Literal["jinja2"] -JINJA2_TEMPLATE_FORMAT_NAME: JINJA2_TEMPLATE_FORMAT_NAME_TYPE = get_args(JINJA2_TEMPLATE_FORMAT_NAME_TYPE)[0] +KERNEL_TEMPLATE_FORMAT_NAME: Literal["semantic-kernel"] = "semantic-kernel" +HANDLEBARS_TEMPLATE_FORMAT_NAME: Literal["handlebars"] = "handlebars" +JINJA2_TEMPLATE_FORMAT_NAME: Literal["jinja2"] = "jinja2" -TEMPLATE_FORMAT_TYPES = Literal[ - KERNEL_TEMPLATE_FORMAT_NAME_TYPE, HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE, JINJA2_TEMPLATE_FORMAT_NAME_TYPE -] +TEMPLATE_FORMAT_TYPES = Literal["semantic-kernel", "handlebars", "jinja2"] diff --git a/python/semantic_kernel/prompt_template/handlebars_prompt_template.py b/python/semantic_kernel/prompt_template/handlebars_prompt_template.py index e2b7fc80a1d7..30d840bb8cc7 100644 --- a/python/semantic_kernel/prompt_template/handlebars_prompt_template.py +++ b/python/semantic_kernel/prompt_template/handlebars_prompt_template.py @@ -2,7 +2,7 @@ import logging from collections.abc import Callable -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from pybars import Compiler, PybarsError from pydantic import PrivateAttr, field_validator @@ -63,7 +63,7 @@ def model_post_init(self, __context: Any) -> None: f"Invalid handlebars template: {self.prompt_template_config.template}" ) from e - async def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: + async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: """Render the prompt template. Using the prompt template, replace the variables with their values diff --git a/python/semantic_kernel/prompt_template/jinja2_prompt_template.py b/python/semantic_kernel/prompt_template/jinja2_prompt_template.py index 6ba85e5e2eea..6c4a6601e0d6 100644 --- a/python/semantic_kernel/prompt_template/jinja2_prompt_template.py +++ b/python/semantic_kernel/prompt_template/jinja2_prompt_template.py @@ -2,7 +2,7 @@ import logging from collections.abc import Callable -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from jinja2 import BaseLoader, TemplateError from jinja2.sandbox import ImmutableSandboxedEnvironment @@ -65,7 +65,7 @@ def model_post_init(self, _: Any) -> None: return self._env = ImmutableSandboxedEnvironment(loader=BaseLoader(), enable_async=True) - async def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: + async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: """Render the prompt template. Using the prompt template, replace the variables with their values diff --git a/python/semantic_kernel/prompt_template/prompt_template_base.py b/python/semantic_kernel/prompt_template/prompt_template_base.py index 6cac84ba4693..5261cd24a923 100644 --- a/python/semantic_kernel/prompt_template/prompt_template_base.py +++ b/python/semantic_kernel/prompt_template/prompt_template_base.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC, abstractmethod +from collections.abc import Sequence from html import escape from typing import TYPE_CHECKING @@ -20,7 +21,7 @@ class PromptTemplateBase(KernelBaseModel, ABC): allow_dangerously_set_content: bool = False @abstractmethod - async def render(self, kernel: "Kernel", arguments: "KernelArguments") -> str: + async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: """Render the prompt template.""" pass @@ -62,7 +63,7 @@ def _get_allow_dangerously_set_function_output(self) -> bool: allow_dangerously_set_content = True return allow_dangerously_set_content - def _should_escape(self, name: str, input_variables: list["InputVariable"]) -> bool: + def _should_escape(self, name: str, input_variables: Sequence["InputVariable"]) -> bool: """Check if the variable should be escaped. If the PromptTemplate allows dangerously set content, then the variable will not be escaped, diff --git a/python/semantic_kernel/prompt_template/prompt_template_config.py b/python/semantic_kernel/prompt_template/prompt_template_config.py index 4a0823b7a07a..aaec7739541e 100644 --- a/python/semantic_kernel/prompt_template/prompt_template_config.py +++ b/python/semantic_kernel/prompt_template/prompt_template_config.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import Mapping, Sequence +from collections.abc import MutableMapping, MutableSequence, Sequence from typing import TypeVar from pydantic import Field, field_validator, model_validator @@ -16,6 +16,8 @@ logger: logging.Logger = logging.getLogger(__name__) +_T = TypeVar("_T", bound="PromptTemplateConfig") + class PromptTemplateConfig(KernelBaseModel): """Configuration for a prompt template. @@ -37,9 +39,9 @@ class PromptTemplateConfig(KernelBaseModel): description: str | None = "" template: str | None = None template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME - input_variables: list[InputVariable] = Field(default_factory=list) + input_variables: MutableSequence[InputVariable] = Field(default_factory=list) allow_dangerously_set_content: bool = False - execution_settings: dict[str, PromptExecutionSettings] = Field(default_factory=dict) + execution_settings: MutableMapping[str, PromptExecutionSettings] = Field(default_factory=dict) @model_validator(mode="after") def check_input_variables(self): @@ -52,12 +54,12 @@ def check_input_variables(self): @field_validator("execution_settings", mode="before") @classmethod def rewrite_execution_settings( - cls, + cls: type[_T], settings: PromptExecutionSettings | Sequence[PromptExecutionSettings] - | Mapping[str, PromptExecutionSettings] + | MutableMapping[str, PromptExecutionSettings] | None, - ) -> Mapping[str, PromptExecutionSettings]: + ) -> MutableMapping[str, PromptExecutionSettings]: """Rewrite execution settings to a dictionary.""" if not settings: return {} @@ -81,14 +83,14 @@ def get_kernel_parameter_metadata(self) -> Sequence[KernelParameterMetadata]: name=variable.name, description=variable.description, default_value=variable.default, - type_=variable.json_schema, # TODO (moonbox3): update to handle complex JSON schemas + type_=variable.json_schema, # TODO (moonbox3): update to handle complex JSON schemas # type: ignore is_required=variable.is_required, ) for variable in self.input_variables ] @classmethod - def from_json(cls, json_str: str) -> "PromptTemplateConfig": + def from_json(cls: type[_T], json_str: str) -> _T: """Create a PromptTemplateConfig instance from a JSON string.""" if not json_str: raise ValueError("json_str is empty") @@ -102,15 +104,15 @@ def from_json(cls, json_str: str) -> "PromptTemplateConfig": @classmethod def restore( - cls, + cls: type[_T], name: str, description: str, template: str, template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME, - input_variables: Sequence[InputVariable] = [], - execution_settings: Mapping[str, PromptExecutionSettings] = {}, + input_variables: MutableSequence[InputVariable] = [], + execution_settings: MutableMapping[str, PromptExecutionSettings] = {}, allow_dangerously_set_content: bool = False, - ) -> "PromptTemplateConfig": + ) -> _T: """Restore a PromptTemplateConfig instance from the specified parameters. Args: diff --git a/python/semantic_kernel/services/ai_service_client_base.py b/python/semantic_kernel/services/ai_service_client_base.py index 69c12ea0b1ca..efe90bb633d1 100644 --- a/python/semantic_kernel/services/ai_service_client_base.py +++ b/python/semantic_kernel/services/ai_service_client_base.py @@ -3,7 +3,7 @@ from abc import ABC from typing import TYPE_CHECKING, Annotated -from pydantic import Field, StringConstraints +from pydantic.types import StringConstraints from semantic_kernel.kernel_pydantic import KernelBaseModel @@ -23,7 +23,7 @@ class AIServiceClientBase(KernelBaseModel, ABC): """ ai_model_id: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] - service_id: str = Field("") + service_id: str = "" def model_post_init(self, __context: object | None = None): """Update the service_id if it is not set.""" diff --git a/python/semantic_kernel/services/ai_service_selector.py b/python/semantic_kernel/services/ai_service_selector.py index 0cdb5347f239..8b688e7ae17d 100644 --- a/python/semantic_kernel/services/ai_service_selector.py +++ b/python/semantic_kernel/services/ai_service_selector.py @@ -24,8 +24,8 @@ class AIServiceSelector: def select_ai_service( self, kernel: "KernelServicesExtension", - function: "KernelFunction", - arguments: "KernelArguments", + function: "KernelFunction | None" = None, + arguments: "KernelArguments | None" = None, type_: type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None = None, ) -> tuple["AIServiceClientBase", "PromptExecutionSettings"]: """Select an AI Service on a first come, first served basis. @@ -33,6 +33,12 @@ def select_ai_service( Starts with execution settings in the arguments, followed by the execution settings from the function. If the same service_id is in both, the one in the arguments will be used. + + Args: + kernel: The kernel used. + function: The function used. (optional) + arguments: The arguments used. (optional) + type_: The type of service to select. (optional) """ if type_ is None: from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase @@ -40,7 +46,7 @@ def select_ai_service( type_ = (TextCompletionClientBase, ChatCompletionClientBase) # type: ignore - execution_settings_dict = arguments.execution_settings or {} + execution_settings_dict = arguments.execution_settings if arguments and arguments.execution_settings else {} if func_exec_settings := getattr(function, "prompt_execution_settings", None): for id, settings in func_exec_settings.items(): if id not in execution_settings_dict: diff --git a/python/semantic_kernel/services/kernel_services_extension.py b/python/semantic_kernel/services/kernel_services_extension.py index 13b6aea5e3ae..37d425ce16d8 100644 --- a/python/semantic_kernel/services/kernel_services_extension.py +++ b/python/semantic_kernel/services/kernel_services_extension.py @@ -49,10 +49,20 @@ def rewrite_services( return services def select_ai_service( - self, function: "KernelFunction", arguments: "KernelArguments" + self, + function: "KernelFunction | None" = None, + arguments: "KernelArguments | None" = None, + type: type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None = None, ) -> tuple[AIServiceClientBase, PromptExecutionSettings]: - """Uses the AI service selector to select a service for the function.""" - return self.ai_service_selector.select_ai_service(self, function, arguments) + """Uses the AI service selector to select a service for the function. + + Args: + function (KernelFunction | None): The function used. + arguments (KernelArguments | None): The arguments used. + type (Type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None): The type of + service to select. Defaults to None. + """ + return self.ai_service_selector.select_ai_service(self, function=function, arguments=arguments, type_=type) def get_service( self, diff --git a/python/semantic_kernel/template_engine/protocols/code_renderer.py b/python/semantic_kernel/template_engine/protocols/code_renderer.py index f88d7d74571e..4f196e69f48d 100644 --- a/python/semantic_kernel/template_engine/protocols/code_renderer.py +++ b/python/semantic_kernel/template_engine/protocols/code_renderer.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from abc import abstractmethod from typing import TYPE_CHECKING, Protocol, runtime_checkable if TYPE_CHECKING: @@ -11,6 +12,7 @@ class CodeRenderer(Protocol): """Protocol for dynamic code blocks that need async IO to be rendered.""" + @abstractmethod async def render_code(self, kernel: "Kernel", arguments: "KernelArguments") -> str: """Render the block using the given context. diff --git a/python/semantic_kernel/template_engine/protocols/text_renderer.py b/python/semantic_kernel/template_engine/protocols/text_renderer.py index 5c9e94e3c1a3..4483bf7b6d2a 100644 --- a/python/semantic_kernel/template_engine/protocols/text_renderer.py +++ b/python/semantic_kernel/template_engine/protocols/text_renderer.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from abc import abstractmethod from typing import TYPE_CHECKING, Optional, Protocol, runtime_checkable if TYPE_CHECKING: @@ -11,6 +12,7 @@ class TextRenderer(Protocol): """Protocol for static (text) blocks that don't need async rendering.""" + @abstractmethod def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: """Render the block using only the given variables. diff --git a/python/semantic_kernel/utils/async_utils.py b/python/semantic_kernel/utils/async_utils.py new file mode 100644 index 000000000000..4d2d2d50249a --- /dev/null +++ b/python/semantic_kernel/utils/async_utils.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from collections.abc import Callable +from functools import partial +from typing import Any + + +async def run_in_executor(executor: Any, func: Callable, *args, **kwargs) -> Any: + """Run a function in an executor.""" + return await asyncio.get_event_loop().run_in_executor(executor, partial(func, *args, **kwargs)) diff --git a/python/semantic_kernel/utils/experimental_decorator.py b/python/semantic_kernel/utils/experimental_decorator.py deleted file mode 100644 index 7b28cc0e4064..000000000000 --- a/python/semantic_kernel/utils/experimental_decorator.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Callable -from typing import TypeVar - -T = TypeVar("T", bound=type) - - -def experimental_function(func: Callable) -> Callable: - """Decorator to mark a function as experimental.""" - if callable(func): - if func.__doc__: - func.__doc__ += "\n\nNote: This function is experimental and may change in the future." - else: - func.__doc__ = "Note: This function is experimental and may change in the future." - - setattr(func, "is_experimental", True) - - return func - - -def experimental_class(cls: T) -> T: - """Decorator to mark a class as experimental.""" - if isinstance(cls, type): - if cls.__doc__: - cls.__doc__ += "\n\nNote: This class is experimental and may change in the future." - else: - cls.__doc__ = "Note: This class is experimental and may change in the future." - - setattr(cls, "is_experimental", True) - - return cls diff --git a/python/semantic_kernel/utils/feature_stage_decorator.py b/python/semantic_kernel/utils/feature_stage_decorator.py new file mode 100644 index 000000000000..322934617cd8 --- /dev/null +++ b/python/semantic_kernel/utils/feature_stage_decorator.py @@ -0,0 +1,153 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Callable +from typing import Any, TypeVar, cast + +T = TypeVar("T", bound=type[Any] | Callable[..., Any]) + +DEFAULT_RC_NOTE = ( + "Features marked with this status are nearing completion and are considered " + "stable for most purposes, but may still incur minor refinements or " + "optimizations before achieving full general availability." +) + +""" +Example usage: + +@experimental +class MyExperimentalClass: + '''A class that is still evolving rapidly.''' + pass + +@stage(status="experimental") +class MyExperimentalClass: + '''A class that is still evolving rapidly.''' + pass + +@experimental +def my_experimental_function(): + '''A function that is still evolving rapidly.''' + pass + +@release_candidate +class MyRCClass: + '''A class that is nearly final, but still in release-candidate stage.''' + pass + +@release_candidate("1.23.1-rc1") +class MyRCClass: + '''A class that is nearly final, but still in release-candidate stage.''' + pass +""" + + +def _update_docstring(obj: T, note: str) -> None: + """Append or set the docstring of the given object with the specified note.""" + if obj.__doc__: + obj.__doc__ += f"\n\n{note}" + else: + obj.__doc__ = note + + +def stage( + status: str = "experimental", + version: str | None = None, + note: str | None = None, +) -> Callable[[T], T]: + """A general-purpose decorator for marking a function or a class. + + It updates the docstring and attaches 'stage_status' (and optionally + 'stage_version') as metadata. A custom 'note' may be provided to + override the default appended text. + + Args: + status: The development stage (e.g., 'experimental', 'release_candidate', etc.). + version: Optional version or release info (e.g., '1.21.0-rc4'). + note: A custom note to append to the docstring. If omitted, a default + note is used to indicate the stage and possible changes. + + Returns: + A decorator that updates the docstring and metadata of + the target function/class. + """ + + def decorator(obj: T) -> T: + entity_type = "class" if isinstance(obj, type) else "function" + ver_text = f" (Version: {version})" if version else "" + default_note = f"Note: This {entity_type} is marked as '{status}'{ver_text} and may change in the future." + final_note = note if note else default_note + + _update_docstring(obj, final_note) + setattr(obj, "stage_status", status) + if version: + setattr(obj, "stage_version", version) + + return obj + + return decorator + + +def experimental(obj: T) -> T: + """Decorator specifically for 'experimental' features. + + It uses the general 'stage' decorator but also attaches + 'is_experimental = True'. + """ + decorated = stage(status="experimental")(obj) + setattr(decorated, "is_experimental", True) + return decorated + + +def release_candidate( + func: T | str | None = None, + *, + version: str | None = None, + doc_string: str | None = None, +) -> T: + """Decorator that designates a function/class as being in a 'release candidate' state. + + By default, applies a descriptive note indicating near-completion and possible minor refinements + before achieving general availability. You may override this with a custom 'doc_string' if needed. + + Usage: + 1) @release_candidate + 2) @release_candidate() + 3) @release_candidate("1.21.3-rc1") + 4) @release_candidate(version="1.21.3-rc1") + 5) @release_candidate(doc_string="Custom RC note...") + 6) @release_candidate(version="1.21.3-rc1", doc_string="Custom RC note...") + + Args: + func: + - In cases (1) or (2), this is the function/class being decorated. + - In cases (3) or (4), this may be a version string or None. + version: + The RC version string, if provided. + doc_string: + An optional custom note to append to the docstring, overriding + the default RC descriptive note. + + Returns: + The decorated object, with an updated docstring and + 'is_release_candidate = True'. + """ + from semantic_kernel import DEFAULT_RC_VERSION + + def _apply(obj: T, ver: str, note: str | None) -> T: + ver_text = f" (Version: {ver})" if ver else "" + rc_note = note if note is not None else f"{DEFAULT_RC_NOTE}{ver_text}" + + decorated = stage(status="release_candidate", version=ver, note=rc_note)(obj) + setattr(decorated, "is_release_candidate", True) + return decorated + + if func is not None and callable(func): + ver = version or DEFAULT_RC_VERSION + return _apply(cast(T, func), ver, doc_string) + + ver_str = func if isinstance(func, str) else version + + def wrapper(obj: T) -> T: + return _apply(obj, ver_str or DEFAULT_RC_VERSION, doc_string) + + return wrapper # type: ignore diff --git a/python/semantic_kernel/utils/list_handler.py b/python/semantic_kernel/utils/list_handler.py index 713ef87721d3..9876f63b024d 100644 --- a/python/semantic_kernel/utils/list_handler.py +++ b/python/semantic_kernel/utils/list_handler.py @@ -1,7 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import AsyncIterable, Sequence +import asyncio +from collections.abc import AsyncGenerator, AsyncIterable, Sequence from typing import TypeVar _T = TypeVar("_T") @@ -11,3 +12,10 @@ async def desync_list(sync_list: Sequence[_T]) -> AsyncIterable[_T]: # noqa: RU """De synchronize a list of synchronous objects.""" for x in sync_list: yield x + + +async def empty_generator() -> AsyncGenerator[_T, None]: + """An empty generator, can be used to return an empty generator.""" + if False: + yield None + await asyncio.sleep(0) diff --git a/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py b/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py index 91e5f28197f4..884464cb7efb 100644 --- a/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py +++ b/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py @@ -6,7 +6,10 @@ from opentelemetry.trace import get_tracer -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.telemetry.agent_diagnostics import gen_ai_attributes if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent @@ -16,15 +19,27 @@ tracer = get_tracer(__name__) -@experimental_function +@experimental def trace_agent_invocation(invoke_func: Callable) -> Callable: """Decorator to trace agent invocation.""" + OPERATION_NAME = "invoke_agent" @functools.wraps(invoke_func) - async def wrapper_decorator(*args: Any, **kwargs: Any) -> AsyncIterable: + async def wrapper_decorator( + *args: Any, **kwargs: Any + ) -> AsyncIterable[ChatMessageContent | StreamingChatMessageContent]: agent: "Agent" = args[0] - with tracer.start_as_current_span(agent.name): + with tracer.start_as_current_span(f"{OPERATION_NAME} {agent.name}") as span: + span.set_attributes({ + gen_ai_attributes.OPERATION: OPERATION_NAME, + gen_ai_attributes.AGENT_ID: agent.id, + gen_ai_attributes.AGENT_NAME: agent.name, + }) + + if agent.description: + span.set_attribute(gen_ai_attributes.AGENT_DESCRIPTION, agent.description) + async for response in invoke_func(*args, **kwargs): yield response @@ -32,3 +47,30 @@ async def wrapper_decorator(*args: Any, **kwargs: Any) -> AsyncIterable: wrapper_decorator.__agent_diagnostics__ = True # type: ignore return wrapper_decorator + + +@experimental +def trace_agent_get_response(get_response_func: Callable) -> Callable: + """Decorator to trace agent invocation.""" + OPERATION_NAME = "invoke_agent" + + @functools.wraps(get_response_func) + async def wrapper_decorator(*args: Any, **kwargs: Any) -> ChatMessageContent: + agent: "Agent" = args[0] + + with tracer.start_as_current_span(f"{OPERATION_NAME} {agent.name}") as span: + span.set_attributes({ + gen_ai_attributes.OPERATION: OPERATION_NAME, + gen_ai_attributes.AGENT_ID: agent.id, + gen_ai_attributes.AGENT_NAME: agent.name, + }) + + if agent.description: + span.set_attribute(gen_ai_attributes.AGENT_DESCRIPTION, agent.description) + + return await get_response_func(*args, **kwargs) + + # Mark the wrapper decorator as an agent diagnostics decorator + wrapper_decorator.__agent_diagnostics__ = True # type: ignore + + return wrapper_decorator diff --git a/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py b/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py new file mode 100644 index 000000000000..09062516c9ed --- /dev/null +++ b/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py @@ -0,0 +1,12 @@ +# Copyright (c) Microsoft. All rights reserved. + +# Constants for tracing agent activities with semantic conventions. +# Ideally, we should use the attributes from the semcov package. +# However, many of the attributes are not yet available in the package, +# so we define them here for now. + +# Activity tags +OPERATION = "gen_ai.operation.name" +AGENT_ID = "gen_ai.agent.id" +AGENT_NAME = "gen_ai.agent.name" +AGENT_DESCRIPTION = "gen_ai.agent.description" diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py index 11ff9279faa7..a813348e704f 100644 --- a/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py @@ -13,9 +13,10 @@ from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_content_mixin import StreamingContentMixin from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.feature_stage_decorator import experimental from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes from semantic_kernel.utils.telemetry.model_diagnostics.model_diagnostics_settings import ModelDiagnosticSettings @@ -69,7 +70,7 @@ def filter(self, record: logging.LogRecord) -> bool: logger.addFilter(ChatHistoryMessageTimestampFilter()) -@experimental_function +@experimental def are_model_diagnostics_enabled() -> bool: """Check if model diagnostics are enabled. @@ -81,7 +82,7 @@ def are_model_diagnostics_enabled() -> bool: ) -@experimental_function +@experimental def are_sensitive_events_enabled() -> bool: """Check if sensitive events are enabled. @@ -90,7 +91,7 @@ def are_sensitive_events_enabled() -> bool: return MODEL_DIAGNOSTICS_SETTINGS.enable_otel_diagnostics_sensitive -@experimental_function +@experimental def trace_chat_completion(model_provider: str) -> Callable: """Decorator to trace chat completion activities. @@ -141,7 +142,7 @@ async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[ChatMessageConten return inner_trace_chat_completion -@experimental_function +@experimental def trace_streaming_chat_completion(model_provider: str) -> Callable: """Decorator to trace streaming chat completion activities. @@ -206,7 +207,7 @@ async def wrapper_decorator( return inner_trace_streaming_chat_completion -@experimental_function +@experimental def trace_text_completion(model_provider: str) -> Callable: """Decorator to trace text completion activities. @@ -257,7 +258,7 @@ async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[TextContent]: return inner_trace_text_completion -@experimental_function +@experimental def trace_streaming_text_completion(model_provider: str) -> Callable: """Decorator to trace streaming text completion activities. @@ -434,9 +435,9 @@ def _set_completion_response( "message": completion.to_dict(), } - if hasattr(completion, "finish_reason"): + if isinstance(completion, ChatMessageContent): full_response["finish_reason"] = completion.finish_reason - if hasattr(completion, "choice_index"): + if isinstance(completion, StreamingContentMixin): full_response["index"] = completion.choice_index logger.info( diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py index ef716960153f..d4efd5de3a9f 100644 --- a/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.feature_stage_decorator import experimental -@experimental_class +@experimental class ModelDiagnosticSettings(KernelBaseSettings): """Settings for model diagnostics. diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 3be0430a8ad4..60bb1bda97da 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -107,9 +107,9 @@ def decorated_native_function(self) -> str: @fixture(scope="session") def experimental_plugin_class(): from semantic_kernel.functions.kernel_function_decorator import kernel_function - from semantic_kernel.utils.experimental_decorator import experimental_class + from semantic_kernel.utils.feature_stage_decorator import experimental - @experimental_class + @experimental class ExperimentalPlugin: @kernel_function(name="getLightStatus") def decorated_native_function(self) -> str: @@ -220,6 +220,7 @@ def azure_openai_unit_test_env(monkeypatch, exclude_list, override_env_param_dic "AZURE_OPENAI_TEXT_TO_IMAGE_DEPLOYMENT_NAME": "test_text_to_image_deployment", "AZURE_OPENAI_AUDIO_TO_TEXT_DEPLOYMENT_NAME": "test_audio_to_text_deployment", "AZURE_OPENAI_TEXT_TO_AUDIO_DEPLOYMENT_NAME": "test_text_to_audio_deployment", + "AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME": "test_realtime_deployment", "AZURE_OPENAI_API_KEY": "test_api_key", "AZURE_OPENAI_ENDPOINT": "https://test-endpoint.com", "AZURE_OPENAI_API_VERSION": "2023-03-15-preview", @@ -256,6 +257,7 @@ def openai_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): "OPENAI_TEXT_TO_IMAGE_MODEL_ID": "test_text_to_image_model_id", "OPENAI_AUDIO_TO_TEXT_MODEL_ID": "test_audio_to_text_model_id", "OPENAI_TEXT_TO_AUDIO_MODEL_ID": "test_text_to_audio_model_id", + "OPENAI_REALTIME_MODEL_ID": "test_realtime_model_id", } env_vars.update(override_env_param_dict) @@ -367,6 +369,28 @@ def azure_ai_search_unit_test_env(monkeypatch, exclude_list, override_env_param_ return env_vars +@fixture() +def mongodb_atlas_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): + """Fixture to set environment variables for MongoDB Atlas Unit Tests.""" + if exclude_list is None: + exclude_list = [] + + if override_env_param_dict is None: + override_env_param_dict = {} + + env_vars = {"MONGODB_ATLAS_CONNECTION_STRING": "mongodb://test", "MONGODB_ATLAS_DATABASE_NAME": "test-database"} + + env_vars.update(override_env_param_dict) + + for key, value in env_vars.items(): + if key not in exclude_list: + monkeypatch.setenv(key, value) + else: + monkeypatch.delenv(key, raising=False) + + return env_vars + + @fixture() def bing_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): """Fixture to set environment variables for BingConnector.""" diff --git a/python/tests/integration/agents/bedrock_agent/conftest.py b/python/tests/integration/agents/bedrock_agent/conftest.py new file mode 100644 index 000000000000..9583a1e1819a --- /dev/null +++ b/python/tests/integration/agents/bedrock_agent/conftest.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated + +import pytest + +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + + +class WeatherPlugin: + """Mock weather plugin.""" + + @kernel_function(description="Get real-time weather information.") + def current(self, location: Annotated[str, "The location to get the weather"]) -> str: + """Returns the current weather.""" + return f"The weather in {location} is sunny." + + +@pytest.fixture +def kernel_with_dummy_function() -> Kernel: + kernel = Kernel() + kernel.add_plugin(WeatherPlugin(), plugin_name="weather") + + return kernel diff --git a/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py b/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py new file mode 100644 index 000000000000..04d35ccd1bc3 --- /dev/null +++ b/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py @@ -0,0 +1,143 @@ +# Copyright (c) Microsoft. All rights reserved. + +import uuid + +import pytest + +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.contents.binary_content import BinaryContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole + + +class TestBedrockAgentIntegration: + @pytest.fixture(autouse=True) + async def setup_and_teardown(self, request): + """Setup and teardown for the test. + + This is run for each test function, i.e. each test function will have its own instance of the agent. + """ + try: + self.bedrock_agent = await BedrockAgent.create_and_prepare_agent( + f"semantic-kernel-integration-test-agent-{uuid.uuid4()}", + "You are a helpful assistant that help users with their questions.", + ) + if hasattr(request, "param"): + if "enable_code_interpreter" in request.param: + await self.bedrock_agent.create_code_interpreter_action_group() + if "kernel" in request.param: + self.bedrock_agent.kernel = request.getfixturevalue(request.param.get("kernel")) + if "enable_kernel_function" in request.param: + await self.bedrock_agent.create_kernel_function_action_group() + except Exception as e: + pytest.fail("Failed to create agent") + raise e + # Yield control to the test + yield + # Clean up + try: + await self.bedrock_agent.delete_agent() + except Exception as e: + pytest.fail(f"Failed to delete agent: {e}") + raise e + + @pytest.mark.asyncio + async def test_invoke(self): + """Test invoke of the agent.""" + async for message in self.bedrock_agent.invoke(BedrockAgent.create_session_id(), "Hello"): + assert isinstance(message, ChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + assert message.content is not None + + @pytest.mark.asyncio + async def test_invoke_stream(self): + """Test invoke stream of the agent.""" + async for message in self.bedrock_agent.invoke_stream(BedrockAgent.create_session_id(), "Hello"): + assert isinstance(message, StreamingChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + assert message.content is not None + + @pytest.mark.asyncio + @pytest.mark.parametrize("setup_and_teardown", [{"enable_code_interpreter": True}], indirect=True) + async def test_code_interpreter(self): + """Test code interpreter.""" + input_text = """ +Create a bar chart for the following data: +Panda 5 +Tiger 8 +Lion 3 +Monkey 6 +Dolphin 2 +""" + binary_item: BinaryContent | None = None + async for message in self.bedrock_agent.invoke(BedrockAgent.create_session_id(), input_text): + assert isinstance(message, ChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + if not binary_item: + binary_item = next((item for item in message.items if isinstance(item, BinaryContent)), None) + + assert binary_item + + @pytest.mark.asyncio + @pytest.mark.parametrize("setup_and_teardown", [{"enable_code_interpreter": True}], indirect=True) + async def test_code_interpreter_stream(self): + """Test code interpreter streaming.""" + input_text = """ +Create a bar chart for the following data: +Panda 5 +Tiger 8 +Lion 3 +Monkey 6 +Dolphin 2 +""" + binary_item: BinaryContent | None = None + async for message in self.bedrock_agent.invoke_stream(BedrockAgent.create_session_id(), input_text): + assert isinstance(message, StreamingChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + binary_item = next((item for item in message.items if isinstance(item, BinaryContent)), None) + assert binary_item + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "setup_and_teardown", + [ + { + "enable_kernel_function": True, + "kernel": "kernel_with_dummy_function", + }, + ], + indirect=True, + ) + async def test_function_calling(self): + """Test function calling.""" + async for message in self.bedrock_agent.invoke( + BedrockAgent.create_session_id(), + "What is the weather in Seattle?", + ): + assert isinstance(message, ChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + assert "sunny" in message.content + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "setup_and_teardown", + [ + { + "enable_kernel_function": True, + "kernel": "kernel_with_dummy_function", + }, + ], + indirect=True, + ) + async def test_function_calling_stream(self): + """Test function calling streaming.""" + full_message: str = "" + async for message in self.bedrock_agent.invoke_stream( + BedrockAgent.create_session_id(), + "What is the weather in Seattle?", + ): + assert isinstance(message, StreamingChatMessageContent) + assert message.role == AuthorRole.ASSISTANT + full_message += message.content + assert "sunny" in full_message diff --git a/python/tests/integration/completions/chat_completion_test_base.py b/python/tests/integration/completions/chat_completion_test_base.py index 7a4db5b8fcfe..890298556605 100644 --- a/python/tests/integration/completions/chat_completion_test_base.py +++ b/python/tests/integration/completions/chat_completion_test_base.py @@ -5,6 +5,11 @@ import sys from typing import Annotated +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + import pytest from azure.ai.inference.aio import ChatCompletionsClient from azure.identity import DefaultAzureCredential @@ -41,11 +46,6 @@ from tests.integration.completions.completion_test_base import CompletionTestBase, ServiceType from tests.utils import is_service_setup_for_testing -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - # Make sure all services are setup for before running the tests # The following exceptions apply: # 1. OpenAI and Azure OpenAI services are always setup for testing. @@ -67,8 +67,6 @@ ["ONNX_GEN_AI_CHAT_MODEL_FOLDER"], raise_if_not_set=False ) # Tests are optional for ONNX anthropic_setup: bool = is_service_setup_for_testing(["ANTHROPIC_API_KEY", "ANTHROPIC_CHAT_MODEL_ID"]) -# When testing Bedrock, after logging into AWS CLI this has been set, so we can use it to check if the service is setup -bedrock_setup: bool = is_service_setup_for_testing(["AWS_DEFAULT_REGION"], raise_if_not_set=False) # A mock plugin that contains a function that returns a complex object. @@ -90,7 +88,9 @@ class ChatCompletionTestBase(CompletionTestBase): """Base class for testing completion services.""" @override - @pytest.fixture(scope="function") + @pytest.fixture( + scope="function" + ) # This needs to be scoped to function to avoid resources getting cleaned up after each test def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSettings] | None]]: azure_openai_setup = True azure_openai_settings = AzureOpenAISettings.create() @@ -152,27 +152,27 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe OnnxGenAIPromptExecutionSettings, ), "bedrock_amazon_titan": ( - BedrockChatCompletion(model_id="amazon.titan-text-premier-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("amazon.titan-text-premier-v1:0"), BedrockChatPromptExecutionSettings, ), "bedrock_ai21labs": ( - BedrockChatCompletion(model_id="ai21.jamba-1-5-mini-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("ai21.jamba-1-5-mini-v1:0"), BedrockChatPromptExecutionSettings, ), "bedrock_anthropic_claude": ( - BedrockChatCompletion(model_id="anthropic.claude-3-5-sonnet-20240620-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("anthropic.claude-3-5-sonnet-20240620-v1:0"), BedrockChatPromptExecutionSettings, ), "bedrock_cohere_command": ( - BedrockChatCompletion(model_id="cohere.command-r-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("cohere.command-r-v1:0"), BedrockChatPromptExecutionSettings, ), "bedrock_meta_llama": ( - BedrockChatCompletion(model_id="meta.llama3-70b-instruct-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("meta.llama3-70b-instruct-v1:0"), BedrockChatPromptExecutionSettings, ), "bedrock_mistralai": ( - BedrockChatCompletion(model_id="mistral.mistral-small-2402-v1:0") if bedrock_setup else None, + self._try_create_bedrock_chat_completion_client("mistral.mistral-small-2402-v1:0"), BedrockChatPromptExecutionSettings, ), } @@ -218,3 +218,13 @@ async def get_chat_completion_response( if parts: return sum(parts[1:], parts[0]) raise AssertionError("No response") + + def _try_create_bedrock_chat_completion_client(self, model_id: str) -> BedrockChatCompletion | None: + try: + return BedrockChatCompletion(model_id=model_id) + except Exception as ex: + from conftest import logger + + logger.warning(ex) + # Returning None so that the test that uses this service will be skipped + return None diff --git a/python/tests/integration/completions/test_chat_completions.py b/python/tests/integration/completions/test_chat_completions.py index 67eab08b0a90..f0e9a4fdb5ea 100644 --- a/python/tests/integration/completions/test_chat_completions.py +++ b/python/tests/integration/completions/test_chat_completions.py @@ -12,15 +12,12 @@ from typing_extensions import override # pragma: no cover from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents import ChatMessageContent, TextContent -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.connectors.ai import PromptExecutionSettings +from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent, TextContent from semantic_kernel.kernel_pydantic import KernelBaseModel from tests.integration.completions.chat_completion_test_base import ( ChatCompletionTestBase, anthropic_setup, - bedrock_setup, mistral_ai_setup, ollama_setup, onnx_setup, @@ -192,7 +189,6 @@ class Reasoning(KernelBaseModel): ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="How are you today?")]), ], {}, - marks=pytest.mark.skipif(not bedrock_setup, reason="Bedrock Environment Variables not set"), id="bedrock_amazon_titan_text_input", ), pytest.param( diff --git a/python/tests/integration/completions/test_text_completion.py b/python/tests/integration/completions/test_text_completion.py index 7e6792de4465..c2e1e06d523a 100644 --- a/python/tests/integration/completions/test_text_completion.py +++ b/python/tests/integration/completions/test_text_completion.py @@ -2,6 +2,7 @@ import sys from functools import partial +from importlib import util from typing import Any if sys.version_info >= (3, 12): @@ -32,6 +33,9 @@ from tests.integration.completions.completion_test_base import CompletionTestBase, ServiceType from tests.utils import is_service_setup_for_testing, is_test_running_on_supported_platforms, retry +hugging_face_setup = util.find_spec("torch") is not None + + azure_openai_setup = True ollama_setup: bool = is_service_setup_for_testing(["OLLAMA_TEXT_MODEL_ID"]) and is_test_running_on_supported_platforms([ "Linux" @@ -41,7 +45,6 @@ onnx_setup: bool = is_service_setup_for_testing( ["ONNX_GEN_AI_TEXT_MODEL_FOLDER"], raise_if_not_set=False ) # Tests are optional for ONNX -bedrock_setup = is_service_setup_for_testing(["AWS_DEFAULT_REGION"], raise_if_not_set=False) pytestmark = pytest.mark.parametrize( "service_id, execution_settings_kwargs, inputs, kwargs", @@ -134,7 +137,6 @@ {}, ["Repeat the word Hello once"], {}, - marks=pytest.mark.skipif(not bedrock_setup, reason="Not setup"), id="bedrock_amazon_titan_text_completion", ), pytest.param( @@ -219,7 +221,9 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="patrickvonplaten/t5-tiny-random", ai_model_id="patrickvonplaten/t5-tiny-random", task="text2text-generation", - ), + ) + if hugging_face_setup + else None, HuggingFacePromptExecutionSettings, ), "hf_summ": ( @@ -227,7 +231,9 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="jotamunz/billsum_tiny_summarization", ai_model_id="jotamunz/billsum_tiny_summarization", task="summarization", - ), + ) + if hugging_face_setup + else None, HuggingFacePromptExecutionSettings, ), "hf_gen": ( @@ -235,7 +241,9 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="HuggingFaceM4/tiny-random-LlamaForCausalLM", ai_model_id="HuggingFaceM4/tiny-random-LlamaForCausalLM", task="text-generation", - ), + ) + if hugging_face_setup + else None, HuggingFacePromptExecutionSettings, ), "onnx_gen_ai": ( @@ -245,27 +253,27 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe # Amazon Bedrock supports models from multiple providers but requests to and responses from the models are # inconsistent. So we need to test each model separately. "bedrock_amazon_titan": ( - BedrockTextCompletion(model_id="amazon.titan-text-premier-v1:0") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("amazon.titan-text-premier-v1:0"), BedrockTextPromptExecutionSettings, ), "bedrock_anthropic_claude": ( - BedrockTextCompletion(model_id="anthropic.claude-v2") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("anthropic.claude-v2"), BedrockTextPromptExecutionSettings, ), "bedrock_cohere_command": ( - BedrockTextCompletion(model_id="cohere.command-text-v14") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("cohere.command-text-v14"), BedrockTextPromptExecutionSettings, ), "bedrock_ai21labs": ( - BedrockTextCompletion(model_id="ai21.j2-mid-v1") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("ai21.j2-mid-v1"), BedrockTextPromptExecutionSettings, ), "bedrock_meta_llama": ( - BedrockTextCompletion(model_id="meta.llama3-70b-instruct-v1:0") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("meta.llama3-70b-instruct-v1:0"), BedrockTextPromptExecutionSettings, ), "bedrock_mistralai": ( - BedrockTextCompletion(model_id="mistral.mistral-7b-instruct-v0:2") if bedrock_setup else None, + self._try_create_bedrock_text_completion_client("mistral.mistral-7b-instruct-v0:2"), BedrockTextPromptExecutionSettings, ), } @@ -363,3 +371,13 @@ async def _test_helper( name="text completions", ) self.evaluate(response) + + def _try_create_bedrock_text_completion_client(self, model_id: str) -> BedrockTextCompletion | None: + try: + return BedrockTextCompletion(model_id=model_id) + except Exception as ex: + from conftest import logger + + logger.warning(ex) + # Returning None so that the test that uses this service will be skipped + return None diff --git a/python/tests/integration/cross_language/test_cross_language.py b/python/tests/integration/cross_language/test_cross_language.py index 18724c17a6c9..1e00e855eaa8 100644 --- a/python/tests/integration/cross_language/test_cross_language.py +++ b/python/tests/integration/cross_language/test_cross_language.py @@ -733,7 +733,7 @@ async def test_openapi_put_light_by_id(kernel: Kernel): assert request_content.get("method") == "PUT" assert request_content.get("url") == "https://127.0.0.1/Lights/1" - assert request_content.get("body") == '{"hexColor": "11EE11"}' + assert request_content.get("body") == '{"hexColor":"11EE11"}' # endregion diff --git a/python/tests/integration/embeddings/test_embedding_service_base.py b/python/tests/integration/embeddings/test_embedding_service_base.py index 1ecca21657e3..160370d10195 100644 --- a/python/tests/integration/embeddings/test_embedding_service_base.py +++ b/python/tests/integration/embeddings/test_embedding_service_base.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from importlib import util + import pytest from azure.ai.inference.aio import EmbeddingsClient from azure.identity import DefaultAzureCredential @@ -32,6 +34,8 @@ from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token from tests.utils import is_service_setup_for_testing +hugging_face_setup = util.find_spec("torch") is not None + # Make sure all services are setup for before running the tests # The following exceptions apply: # 1. OpenAI and Azure OpenAI services are always setup for testing. @@ -91,7 +95,9 @@ def services(self) -> dict[str, tuple[EmbeddingGeneratorBase | None, type[Prompt PromptExecutionSettings, ), "hugging_face": ( - HuggingFaceTextEmbedding(ai_model_id="sentence-transformers/all-MiniLM-L6-v2"), + HuggingFaceTextEmbedding(ai_model_id="sentence-transformers/all-MiniLM-L6-v2") + if hugging_face_setup + else None, PromptExecutionSettings, ), "ollama": (OllamaTextEmbedding() if ollama_setup else None, OllamaEmbeddingPromptExecutionSettings), diff --git a/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py b/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py index 5ecdddd6835d..e860d6387e87 100644 --- a/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py +++ b/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py @@ -2,6 +2,7 @@ import os import platform +from collections.abc import Callable from typing import Any import pytest @@ -26,11 +27,11 @@ class TestCosmosDBNoSQL(VectorStoreTestBase): async def test_list_collection_names( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type: type, ): """Test list collection names.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: assert await store.list_collection_names() == [] collection_name = "list_collection_names" @@ -50,12 +51,12 @@ async def test_list_collection_names( async def test_collection_not_created( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type: type, data_record: dict[str, Any], ): """Test get without collection.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: collection_name = "collection_not_created" collection = store.get_collection(collection_name, data_model_type) @@ -79,12 +80,12 @@ async def test_collection_not_created( async def test_custom_partition_key( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type: type, data_record: dict[str, Any], ): """Test custom partition key.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: collection_name = "custom_partition_key" collection = store.get_collection( collection_name, @@ -116,12 +117,12 @@ async def test_custom_partition_key( async def test_get_include_vector( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type: type, data_record: dict[str, Any], ): """Test get with include_vector.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: collection_name = "get_include_vector" collection = store.get_collection(collection_name, data_model_type) @@ -146,12 +147,12 @@ async def test_get_include_vector( async def test_get_not_include_vector( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type: type, data_record: dict[str, Any], ): """Test get with include_vector.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: collection_name = "get_not_include_vector" collection = store.get_collection(collection_name, data_model_type) @@ -176,12 +177,12 @@ async def test_get_not_include_vector( async def test_collection_with_key_as_key_field( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], data_model_type_with_key_as_key_field: type, data_record_with_key_as_key_field: dict[str, Any], ): """Test collection with key as key field.""" - async with stores["azure_cosmos_db_no_sql"] as store: + async with stores["azure_cosmos_db_no_sql"]() as store: collection_name = "collection_with_key_as_key_field" collection = store.get_collection(collection_name, data_model_type_with_key_as_key_field) diff --git a/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py b/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py index fb280e047a39..4748a878a928 100644 --- a/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py +++ b/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import uuid -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Sequence from contextlib import asynccontextmanager from typing import Annotated, Any @@ -11,6 +11,7 @@ from pydantic import BaseModel from semantic_kernel.connectors.memory.postgres import PostgresSettings, PostgresStore +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.data import ( DistanceFunction, IndexKind, @@ -20,6 +21,7 @@ VectorStoreRecordVectorField, vectorstoremodel, ) +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions from semantic_kernel.exceptions.memory_connector_exceptions import ( MemoryConnectorConnectionException, MemoryConnectorInitializationError, @@ -49,13 +51,13 @@ class SimpleDataModel(BaseModel): id: Annotated[int, VectorStoreRecordKeyField()] embedding: Annotated[ - list[float], + list[float] | None, VectorStoreRecordVectorField( index_kind=IndexKind.HNSW, dimensions=3, distance_function=DistanceFunction.COSINE_SIMILARITY, ), - ] + ] = None data: Annotated[ dict[str, Any], VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), @@ -97,7 +99,9 @@ async def vector_store() -> AsyncGenerator[PostgresStore, None]: @asynccontextmanager -async def create_simple_collection(vector_store: PostgresStore): +async def create_simple_collection( + vector_store: PostgresStore, +) -> AsyncGenerator[PostgresCollection[int, SimpleDataModel], None]: """Returns a collection with a unique name that is deleted after the context. This can be moved to use a fixture with scope=function and loop_scope=session @@ -107,6 +111,7 @@ async def create_simple_collection(vector_store: PostgresStore): suffix = str(uuid.uuid4()).replace("-", "")[:8] collection_id = f"test_collection_{suffix}" collection = vector_store.get_collection(collection_id, SimpleDataModel) + assert isinstance(collection, PostgresCollection) await collection.create_collection() try: yield collection @@ -213,6 +218,7 @@ async def test_upsert_get_and_delete_batch(vector_store: PostgresStore): # this should return only the two existing records. result = await simple_collection.get_batch([1, 2, 3]) assert result is not None + assert isinstance(result, Sequence) assert len(result) == 2 assert result[0] is not None assert result[0].id == record1.id @@ -226,3 +232,28 @@ async def test_upsert_get_and_delete_batch(vector_store: PostgresStore): await simple_collection.delete_batch([1, 2]) result_after_delete = await simple_collection.get_batch([1, 2]) assert result_after_delete is None + + +async def test_search(vector_store: PostgresStore): + async with create_simple_collection(vector_store) as simple_collection: + records = [ + SimpleDataModel(id=1, embedding=[1.0, 0.0, 0.0], data={"key": "value1"}), + SimpleDataModel(id=2, embedding=[0.8, 0.2, 0.0], data={"key": "value2"}), + SimpleDataModel(id=3, embedding=[0.6, 0.0, 0.4], data={"key": "value3"}), + SimpleDataModel(id=4, embedding=[1.0, 1.0, 0.0], data={"key": "value4"}), + SimpleDataModel(id=5, embedding=[0.0, 1.0, 1.0], data={"key": "value5"}), + SimpleDataModel(id=6, embedding=[1.0, 0.0, 1.0], data={"key": "value6"}), + ] + + await simple_collection.upsert_batch(records) + + try: + search_results = await simple_collection.vectorized_search( + [1.0, 0.0, 0.0], options=VectorSearchOptions(top=3, include_total_count=True) + ) + assert search_results is not None + assert search_results.total_count == 3 + assert {result.record.id async for result in search_results.results} == {1, 2, 3} + + finally: + await simple_collection.delete_batch([r.id for r in records]) diff --git a/python/tests/integration/memory/vector_stores/test_vector_store.py b/python/tests/integration/memory/vector_stores/test_vector_store.py index 23b5f9dca1c0..d47b0466af6f 100644 --- a/python/tests/integration/memory/vector_stores/test_vector_store.py +++ b/python/tests/integration/memory/vector_stores/test_vector_store.py @@ -2,6 +2,7 @@ import logging import platform +from collections.abc import Callable from typing import Any import pandas as pd @@ -23,8 +24,18 @@ class TestVectorStore(VectorStoreTestBase): """ @pytest.mark.parametrize( - "store_id, collection_name, collection_options, data_model_type, data_model_definition, distance_function, " - "index_kind, vector_property_type, dimensions, record", + [ + "store_id", + "collection_name", + "collection_options", + "data_model_type", + "data_model_definition", + "distance_function", + "index_kind", + "vector_property_type", + "dimensions", + "record", + ], [ # region Redis pytest.param( @@ -322,7 +333,7 @@ class TestVectorStore(VectorStoreTestBase): id="weaviate_local_pandas_data_model", ), # endregion - # region Azure Cosmos DB NoSQL + # region Azure Cosmos DB pytest.param( "azure_cosmos_db_no_sql", "azure_cosmos_db_no_sql_array_data_model", @@ -375,11 +386,53 @@ class TestVectorStore(VectorStoreTestBase): id="azure_cosmos_db_no_sql_pandas_data_model", ), # endregion + # region Chroma + pytest.param( + "chroma", + "chroma_array_data_model", + {}, + "dataclass_vector_data_model_array", + None, + None, + None, + None, + 5, + RAW_RECORD_ARRAY, + id="chroma_array_data_model", + ), + pytest.param( + "chroma", + "chroma_list_data_model", + {}, + "dataclass_vector_data_model", + None, + None, + None, + None, + 5, + RAW_RECORD_LIST, + id="chroma_list_data_model", + ), + pytest.param( + "chroma", + "chroma_pandas_data_model", + {}, + pd.DataFrame, + "data_model_definition_pandas", + None, + None, + None, + 5, + RAW_RECORD_LIST, + id="chroma_pandas_data_model", + ), + # endregion ], ) + # region test function async def test_vector_store( self, - stores: dict[str, VectorStore], + stores: dict[str, Callable[[], VectorStore]], store_id: str, collection_name: str, collection_options: dict[str, Any], @@ -399,7 +452,7 @@ async def test_vector_store( data_model_definition = request.getfixturevalue(data_model_definition) try: async with ( - stores[store_id] as vector_store, + stores[store_id]() as vector_store, vector_store.get_collection( collection_name, data_model_type, data_model_definition, **collection_options ) as collection, diff --git a/python/tests/integration/memory/vector_stores/vector_store_test_base.py b/python/tests/integration/memory/vector_stores/vector_store_test_base.py index abd78a525796..fc1a33d91517 100644 --- a/python/tests/integration/memory/vector_stores/vector_store_test_base.py +++ b/python/tests/integration/memory/vector_stores/vector_store_test_base.py @@ -1,24 +1,66 @@ # Copyright (c) Microsoft. All rights reserved. +from collections.abc import Callable + import pytest -from semantic_kernel.connectors.memory.azure_ai_search.azure_ai_search_store import AzureAISearchStore -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import AzureCosmosDBNoSQLStore -from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore -from semantic_kernel.connectors.memory.redis.redis_store import RedisStore -from semantic_kernel.connectors.memory.weaviate.weaviate_store import WeaviateStore from semantic_kernel.data import VectorStore +def get_redis_store(): + from semantic_kernel.connectors.memory.redis.redis_store import RedisStore + + return RedisStore() + + +def get_azure_ai_search_store(): + from semantic_kernel.connectors.memory.azure_ai_search.azure_ai_search_store import AzureAISearchStore + + return AzureAISearchStore() + + +def get_qdrant_store(): + from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore + + return QdrantStore() + + +def get_qdrant_store_in_memory(): + from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore + + return QdrantStore(location=":memory:") + + +def get_weaviate_store(): + from semantic_kernel.connectors.memory.weaviate.weaviate_store import WeaviateStore + + return WeaviateStore(local_host="localhost") + + +def get_azure_cosmos_db_no_sql_store(): + from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import ( + AzureCosmosDBNoSQLStore, + ) + + return AzureCosmosDBNoSQLStore(database_name="test_database", create_database=True) + + +def get_chroma_store(): + from semantic_kernel.connectors.memory.chroma.chroma import ChromaStore + + return ChromaStore() + + class VectorStoreTestBase: @pytest.fixture - def stores(self) -> dict[str, VectorStore]: + def stores(self) -> dict[str, Callable[[], VectorStore]]: """Return a dictionary of vector stores to test.""" return { - "redis": RedisStore(), - "azure_ai_search": AzureAISearchStore(), - "qdrant": QdrantStore(), - "qdrant_in_memory": QdrantStore(location=":memory:"), - "weaviate_local": WeaviateStore(local_host="localhost"), - "azure_cosmos_db_no_sql": AzureCosmosDBNoSQLStore(database_name="test_database", create_database=True), + "redis": get_redis_store, + "azure_ai_search": get_azure_ai_search_store, + "qdrant": get_qdrant_store, + "qdrant_in_memory": get_qdrant_store_in_memory, + "weaviate_local": get_weaviate_store, + "azure_cosmos_db_no_sql": get_azure_cosmos_db_no_sql_store, + "chroma": get_chroma_store, } diff --git a/python/tests/samples/test_concepts.py b/python/tests/samples/test_concepts.py index d2455d4f4d6d..85c11c43902d 100644 --- a/python/tests/samples/test_concepts.py +++ b/python/tests/samples/test_concepts.py @@ -17,6 +17,7 @@ from samples.concepts.auto_function_calling.functions_defined_in_yaml_prompt import ( main as function_defined_in_yaml_prompt, ) +from samples.concepts.caching.semantic_caching import main as semantic_caching from samples.concepts.chat_completion.simple_chatbot import main as simple_chatbot from samples.concepts.chat_completion.simple_chatbot_kernel_function import main as simple_chatbot_kernel_function from samples.concepts.chat_completion.simple_chatbot_logit_bias import main as simple_chatbot_logit_bias @@ -26,21 +27,14 @@ from samples.concepts.filtering.function_invocation_filters import main as function_invocation_filters from samples.concepts.filtering.function_invocation_filters_stream import main as function_invocation_filters_stream from samples.concepts.filtering.prompt_filters import main as prompt_filters +from samples.concepts.filtering.retry_with_different_model import main as retry_with_different_model from samples.concepts.functions.kernel_arguments import main as kernel_arguments from samples.concepts.grounding.grounded import main as grounded from samples.concepts.images.image_generation import main as image_generation from samples.concepts.local_models.lm_studio_chat_completion import main as lm_studio_chat_completion from samples.concepts.local_models.lm_studio_text_embedding import main as lm_studio_text_embedding from samples.concepts.local_models.ollama_chat_completion import main as ollama_chat_completion -from samples.concepts.memory.azure_cognitive_search_memory import main as azure_cognitive_search_memory -from samples.concepts.memory.memory import main as memory -from samples.concepts.planners.azure_openai_function_calling_stepwise_planner import ( - main as azure_openai_function_calling_stepwise_planner, -) -from samples.concepts.planners.openai_function_calling_stepwise_planner import ( - main as openai_function_calling_stepwise_planner, -) -from samples.concepts.planners.sequential_planner import main as sequential_planner +from samples.concepts.memory.simple_memory import main as simple_memory from samples.concepts.plugins.openai_function_calling_with_custom_plugin import ( main as openai_function_calling_with_custom_plugin, ) @@ -53,10 +47,22 @@ from samples.concepts.rag.rag_with_text_memory_plugin import main as rag_with_text_memory_plugin from samples.concepts.search.bing_search_plugin import main as bing_search_plugin from samples.concepts.service_selector.custom_service_selector import main as custom_service_selector -from samples.getting_started_with_agents.step1_agent import main as step1_agent -from samples.getting_started_with_agents.step2_plugins import main as step2_plugins -from samples.getting_started_with_agents.step3_chat import main as step3_chat -from samples.getting_started_with_agents.step7_assistant import main as step7_assistant +from samples.getting_started_with_agents.chat_completion.step1_chat_completion_agent_simple import ( + main as step1_chat_completion_agent_simple, +) +from samples.getting_started_with_agents.chat_completion.step2_chat_completion_agent_with_kernel import ( + main as step2_chat_completion_agent_with_kernel, +) +from samples.getting_started_with_agents.chat_completion.step3_chat_completion_agent_plugin_simple import ( + main as step3_chat_completion_agent_plugin_simple, +) +from samples.getting_started_with_agents.chat_completion.step4_chat_completion_agent_plugin_with_kernel import ( + main as step4_chat_completion_agent_plugin_with_kernel, +) +from samples.getting_started_with_agents.chat_completion.step5_chat_completion_agent_group_chat import ( + main as step5_chat_completion_agent_group_chat, +) +from samples.getting_started_with_agents.openai_assistant.step1_assistant import main as step1_openai_assistant from tests.utils import retry # These environment variable names are used to control which samples are run during integration testing. @@ -65,6 +71,14 @@ MEMORY_CONCEPT_SAMPLE = "MEMORY_CONCEPT_SAMPLE" concepts = [ + param( + semantic_caching, + [], + id="semantic_caching", + marks=pytest.mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), param( simple_chatbot, ["Why is the sky blue in one sentence?", "exit"], @@ -146,41 +160,26 @@ ), ), param( - kernel_arguments, - [], - id="kernel_arguments", - marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), - param( - grounded, + retry_with_different_model, [], - id="grounded", + id="retry_with_different_model", marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, + reason="Not running completion samples.", ), ), param( - azure_openai_function_calling_stepwise_planner, - [], - id="azure_openai_function_calling_stepwise_planner", - marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), - param( - openai_function_calling_stepwise_planner, + kernel_arguments, [], - id="openai_function_calling_stepwise_planner", + id="kernel_arguments", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - sequential_planner, + grounded, [], - id="sequential_planner", + id="grounded", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), @@ -242,15 +241,9 @@ ), ), param( - azure_cognitive_search_memory, + simple_memory, [], - id="azure_cognitive_search_memory", - marks=pytest.mark.skipif(os.getenv(MEMORY_CONCEPT_SAMPLE, None) is None, reason="Not running memory samples."), - ), - param( - memory, - ["What are my investments?", "exit"], - id="memory", + id="simple_memory", marks=pytest.mark.skipif(os.getenv(MEMORY_CONCEPT_SAMPLE, None) is None, reason="Not running memory samples."), ), param(rag_with_text_memory_plugin, [], id="rag_with_text_memory_plugin"), @@ -285,33 +278,49 @@ ), ), param( - step1_agent, + step1_chat_completion_agent_simple, + [], + id="step1_chat_completion_agent_simple", + marks=pytest.mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), + param( + step2_chat_completion_agent_with_kernel, + [], + id="step2_chat_completion_agent_with_kernel", + marks=pytest.mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), + param( + step3_chat_completion_agent_plugin_simple, [], - id="step1_agent", + id="step3_chat_completion_agent_plugin_simple", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step2_plugins, + step4_chat_completion_agent_plugin_with_kernel, [], - id="step2_agent_plugins", + id="step4_chat_completion_agent_plugin_with_kernel", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step3_chat, + step5_chat_completion_agent_group_chat, [], - id="step3_chat", + id="step5_chat_completion_agent_group_chat", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step7_assistant, + step1_openai_assistant, [], - id="step7_assistant", + id="step1_openai_assistant", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), diff --git a/python/tests/samples/test_learn_resources.py b/python/tests/samples/test_learn_resources.py index 0a3cab0dae3c..77e045e8cc6a 100644 --- a/python/tests/samples/test_learn_resources.py +++ b/python/tests/samples/test_learn_resources.py @@ -9,7 +9,6 @@ from samples.learn_resources.configuring_prompts import main as configuring_prompts from samples.learn_resources.creating_functions import main as creating_functions from samples.learn_resources.functions_within_prompts import main as functions_within_prompts -from samples.learn_resources.planner import main as planner from samples.learn_resources.plugin import main as plugin from samples.learn_resources.serializing_prompts import main as serializing_prompts from samples.learn_resources.templates import main as templates @@ -54,14 +53,6 @@ os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), - param( - planner, - [], - id="planner", - marks=mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), param( plugin, [], diff --git a/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py b/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py new file mode 100644 index 000000000000..224b5ac42931 --- /dev/null +++ b/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py @@ -0,0 +1,123 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, MagicMock + +import pytest +from autogen import ConversableAgent + +from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException + + +@pytest.fixture +def mock_conversable_agent(): + agent = MagicMock(spec=ConversableAgent) + agent.name = "MockName" + agent.description = "MockDescription" + agent.system_message = "MockSystemMessage" + return agent + + +async def test_autogen_conversable_agent_initialization(mock_conversable_agent): + agent = AutoGenConversableAgent(mock_conversable_agent, id="mock_id") + assert agent.name == "MockName" + assert agent.description == "MockDescription" + assert agent.instructions == "MockSystemMessage" + assert agent.conversable_agent == mock_conversable_agent + + +async def test_autogen_conversable_agent_get_response(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock(return_value="Mocked assistant response") + agent = AutoGenConversableAgent(mock_conversable_agent) + + response = await agent.get_response("Hello") + assert response.role == AuthorRole.ASSISTANT + assert response.content == "Mocked assistant response" + + +async def test_autogen_conversable_agent_get_response_exception(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock(return_value=None) + agent = AutoGenConversableAgent(mock_conversable_agent) + + with pytest.raises(AgentInvokeException): + await agent.get_response("Hello") + + +async def test_autogen_conversable_agent_invoke_with_recipient(mock_conversable_agent): + mock_conversable_agent.a_initiate_chat = AsyncMock() + mock_conversable_agent.a_initiate_chat.return_value = MagicMock( + chat_history=[ + {"role": "user", "content": "Hello from user!"}, + {"role": "assistant", "content": "Hello from assistant!"}, + ] + ) + agent = AutoGenConversableAgent(mock_conversable_agent) + recipient_agent = MagicMock(spec=AutoGenConversableAgent) + recipient_agent.conversable_agent = MagicMock(spec=ConversableAgent) + + messages = [] + async for msg in agent.invoke(recipient=recipient_agent, message="Test message", arg1="arg1"): + messages.append(msg) + + mock_conversable_agent.a_initiate_chat.assert_awaited_once() + assert len(messages) == 2 + assert messages[0].role == AuthorRole.USER + assert messages[0].content == "Hello from user!" + assert messages[1].role == AuthorRole.ASSISTANT + assert messages[1].content == "Hello from assistant!" + + +async def test_autogen_conversable_agent_invoke_without_recipient_string_reply(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock(return_value="Mocked assistant response") + agent = AutoGenConversableAgent(mock_conversable_agent) + + messages = [] + async for msg in agent.invoke(message="Hello"): + messages.append(msg) + + mock_conversable_agent.a_generate_reply.assert_awaited_once() + assert len(messages) == 1 + assert messages[0].role == AuthorRole.ASSISTANT + assert messages[0].content == "Mocked assistant response" + + +async def test_autogen_conversable_agent_invoke_without_recipient_dict_reply(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock( + return_value={ + "content": "Mocked assistant response", + "role": "assistant", + "name": "AssistantName", + } + ) + agent = AutoGenConversableAgent(mock_conversable_agent) + + messages = [] + async for msg in agent.invoke(message="Hello"): + messages.append(msg) + + mock_conversable_agent.a_generate_reply.assert_awaited_once() + assert len(messages) == 1 + assert messages[0].role == AuthorRole.ASSISTANT + assert messages[0].content == "Mocked assistant response" + assert messages[0].name == "AssistantName" + + +async def test_autogen_conversable_agent_invoke_without_recipient_unexpected_type(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock(return_value=12345) + agent = AutoGenConversableAgent(mock_conversable_agent) + + with pytest.raises(AgentInvokeException): + async for _ in agent.invoke(message="Hello"): + pass + + +async def test_autogen_conversable_agent_invoke_with_invalid_recipient_type(mock_conversable_agent): + mock_conversable_agent.a_generate_reply = AsyncMock(return_value=12345) + agent = AutoGenConversableAgent(mock_conversable_agent) + + recipient = MagicMock() + + with pytest.raises(AgentInvokeException): + async for _ in agent.invoke(recipient=recipient, message="Hello"): + pass diff --git a/python/tests/unit/agents/azure_ai_agent/conftest.py b/python/tests/unit/agents/azure_ai_agent/conftest.py new file mode 100644 index 000000000000..5f5b5082ae52 --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/conftest.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock + +import pytest +from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import Agent as AzureAIAgentModel + + +@pytest.fixture +def ai_project_client() -> AsyncMock: + return AsyncMock(spec=AIProjectClient) + + +@pytest.fixture +def ai_agent_definition() -> AsyncMock: + definition = AsyncMock(spec=AzureAIAgentModel) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + + return definition diff --git a/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py b/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py new file mode 100644 index 000000000000..3d912605cae6 --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py @@ -0,0 +1,279 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from azure.ai.projects.models import ( + MessageDelta, + MessageDeltaChunk, + MessageDeltaImageFileContent, + MessageDeltaImageFileContentObject, + MessageDeltaTextContent, + MessageDeltaTextContentObject, + MessageDeltaTextFileCitationAnnotation, + MessageDeltaTextFileCitationAnnotationObject, + MessageDeltaTextFilePathAnnotation, + MessageDeltaTextFilePathAnnotationObject, + MessageImageFileContent, + MessageImageFileDetails, + MessageTextContent, + MessageTextDetails, + MessageTextFileCitationAnnotation, + MessageTextFileCitationDetails, + MessageTextFilePathAnnotation, + MessageTextFilePathDetails, + RunStep, + RunStepDeltaFunction, + RunStepDeltaFunctionToolCall, + RunStepDeltaToolCallObject, + ThreadMessage, +) + +from semantic_kernel.agents.azure_ai.agent_content_generation import ( + generate_annotation_content, + generate_code_interpreter_content, + generate_function_call_content, + generate_function_result_content, + generate_message_content, + generate_streaming_code_interpreter_content, + generate_streaming_function_content, + generate_streaming_message_content, + get_function_call_contents, + get_message_contents, +) +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole + + +def test_get_message_contents_all_types(): + chat_msg = ChatMessageContent(role=AuthorRole.USER, content="") + chat_msg.items.append(TextContent(text="hello world")) + chat_msg.items.append(ImageContent(uri="http://example.com/image.png")) + chat_msg.items.append(FileReferenceContent(file_id="file123")) + chat_msg.items.append(FunctionResultContent(id="func1", result={"a": 1})) + results = get_message_contents(chat_msg) + assert len(results) == 4 + assert results[0]["type"] == "text" + assert results[1]["type"] == "image_url" + assert results[2]["type"] == "image_file" + assert results[3]["type"] == "text" + + +def test_generate_message_content_text_and_image(): + thread_msg = ThreadMessage( + content=[], + role="user", + ) + + image = MessageImageFileContent(image_file=MessageImageFileDetails(file_id="test_file_id")) + + text = MessageTextContent( + text=MessageTextDetails( + value="some text", + annotations=[ + MessageTextFileCitationAnnotation( + text="text", + file_citation=MessageTextFileCitationDetails(file_id="file_id", quote="some quote"), + start_index=0, + end_index=9, + ), + MessageTextFilePathAnnotation( + text="text again", + file_path=MessageTextFilePathDetails(file_id="file_id_2"), + start_index=1, + end_index=10, + ), + ], + ) + ) + + thread_msg.content = [image, text] + step = RunStep(id="step_id", run_id="run_id", thread_id="thread_id", assistant_id="assistant_id") + out = generate_message_content("assistant", thread_msg, step) + assert len(out.items) == 4 + assert isinstance(out.items[0], FileReferenceContent) + assert isinstance(out.items[1], TextContent) + assert isinstance(out.items[2], AnnotationContent) + assert isinstance(out.items[3], AnnotationContent) + + assert out.items[0].file_id == "test_file_id" + + assert out.items[1].text == "some text" + + assert out.items[2].file_id == "file_id" + assert out.items[2].quote == "text" + assert out.items[2].start_index == 0 + assert out.items[2].end_index == 9 + + assert out.items[3].file_id == "file_id_2" + assert out.items[3].quote == "text again" + assert out.items[3].start_index == 1 + assert out.items[3].end_index == 10 + + assert out.metadata["step_id"] == "step_id" + assert out.role == AuthorRole.USER + + +def test_generate_annotation_content(): + message_text_file_path_ann = MessageTextFilePathAnnotation( + text="some text", + file_path=MessageTextFilePathDetails(file_id="file123"), + start_index=0, + end_index=9, + ) + + message_text_file_citation_ann = MessageTextFileCitationAnnotation( + text="some text", + file_citation=MessageTextFileCitationDetails(file_id="file123"), + start_index=0, + end_index=9, + ) + + for fake_ann in [message_text_file_path_ann, message_text_file_citation_ann]: + out = generate_annotation_content(fake_ann) + assert out.file_id == "file123" + assert out.quote == "some text" + assert out.start_index == 0 + assert out.end_index == 9 + + +def test_generate_streaming_message_content_text_annotations(): + message_delta_image_file_content = MessageDeltaImageFileContent( + index=0, + image_file=MessageDeltaImageFileContentObject(file_id="image_file"), + ) + + MessageDeltaTextFileCitationAnnotation, MessageDeltaTextFilePathAnnotation + + message_delta_text_content = MessageDeltaTextContent( + index=0, + text=MessageDeltaTextContentObject( + value="some text", + annotations=[ + MessageDeltaTextFileCitationAnnotation( + index=0, + file_citation=MessageDeltaTextFileCitationAnnotationObject(file_id="file123"), + start_index=0, + end_index=9, + text="some text", + ), + MessageDeltaTextFilePathAnnotation( + index=0, + file_path=MessageDeltaTextFilePathAnnotationObject(file_id="file123"), + start_index=0, + end_index=9, + text="some text", + ), + ], + ), + ) + + delta = MessageDeltaChunk( + id="chunk123", + delta=MessageDelta(role="user", content=[message_delta_image_file_content, message_delta_text_content]), + ) + + out = generate_streaming_message_content("assistant", delta) + assert out is not None + assert out.content == "some text" + assert len(out.items) == 4 + assert out.items[0].file_id == "image_file" + assert isinstance(out.items[0], StreamingFileReferenceContent) + assert isinstance(out.items[1], StreamingTextContent) + assert isinstance(out.items[2], StreamingAnnotationContent) + + assert out.items[2].file_id == "file123" + assert out.items[2].quote == "some text" + assert out.items[2].start_index == 0 + assert out.items[2].end_index == 9 + + assert isinstance(out.items[3], StreamingAnnotationContent) + assert out.items[3].file_id == "file123" + assert out.items[3].quote == "some text" + assert out.items[3].start_index == 0 + assert out.items[3].end_index == 9 + + +def test_generate_streaming_function_content_with_function(): + step_details = RunStepDeltaToolCallObject( + tool_calls=[ + RunStepDeltaFunctionToolCall( + index=0, id="tool123", function=RunStepDeltaFunction(name="some_func", arguments={"arg": "val"}) + ) + ] + ) + + out = generate_streaming_function_content("my_agent", step_details) + assert out is not None + assert len(out.items) == 1 + assert isinstance(out.items[0], FunctionCallContent) + assert out.items[0].function_name == "some_func" + assert out.items[0].arguments == "{'arg': 'val'}" + + +def test_get_function_call_contents_no_action(): + run = type("ThreadRunFake", (), {"required_action": None})() + fc = get_function_call_contents(run, {}) + assert fc == [] + + +def test_get_function_call_contents_submit_tool_outputs(): + class FakeFunction: + name = "test_function" + arguments = {"arg": "val"} + + class FakeToolCall: + id = "tool_id" + function = FakeFunction() + + run = type( + "ThreadRunFake", + (), + { + "required_action": type( + "RequiredAction", (), {"submit_tool_outputs": type("FakeSubmit", (), {"tool_calls": [FakeToolCall()]})} + ) + }, + )() + function_steps = {} + fc = get_function_call_contents(run, function_steps) + assert len(fc) == 1 + assert function_steps["tool_id"].function_name == "test_function" + + +def test_generate_function_call_content(): + fcc = FunctionCallContent(id="id123", name="func_name", arguments={"x": 1}) + msg = generate_function_call_content("my_agent", [fcc]) + assert len(msg.items) == 1 + assert msg.role == AuthorRole.ASSISTANT + + +def test_generate_function_result_content(): + step = FunctionCallContent(id="123", name="func_name", arguments={"k": "v"}) + + class FakeToolCall: + function = type("Function", (), {"output": "result_data"}) + + tool_call = FakeToolCall() + msg = generate_function_result_content("my_agent", step, tool_call) + assert len(msg.items) == 1 + assert msg.items[0].result == "result_data" + assert msg.role == AuthorRole.TOOL + + +def test_generate_code_interpreter_content(): + msg = generate_code_interpreter_content("my_agent", "some_code()") + assert msg.content == "some_code()" + assert msg.metadata["code"] is True + + +def test_generate_streaming_code_interpreter_content_no_calls(): + step_details = type("Details", (), {"tool_calls": None}) + assert generate_streaming_code_interpreter_content("my_agent", step_details) is None diff --git a/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py b/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py new file mode 100644 index 000000000000..e26bf13a16d7 --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py @@ -0,0 +1,326 @@ +# Copyright (c) Microsoft. All rights reserved. + +from datetime import datetime, timezone +from unittest.mock import AsyncMock, MagicMock, patch + +from azure.ai.projects.models import ( + MessageTextContent, + MessageTextDetails, + OpenAIPageableListOfRunStep, + RequiredFunctionToolCall, + RequiredFunctionToolCallDetails, + RunStep, + RunStepCodeInterpreterToolCall, + RunStepCodeInterpreterToolCallDetails, + RunStepFunctionToolCall, + RunStepFunctionToolCallDetails, + RunStepMessageCreationDetails, + RunStepMessageCreationReference, + RunStepToolCallDetails, + SubmitToolOutputsAction, + SubmitToolOutputsDetails, + ThreadMessage, + ThreadRun, +) + +from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions +from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent +from semantic_kernel.contents import FunctionCallContent, FunctionResultContent, TextContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + + +async def test_agent_thread_actions_create_thread(): + class FakeAgentClient: + create_thread = AsyncMock(return_value=type("FakeThread", (), {"id": "thread123"})) + + class FakeClient: + agents = FakeAgentClient() + + client = FakeClient() + thread_id = await AgentThreadActions.create_thread(client) + assert thread_id == "thread123" + + +async def test_agent_thread_actions_create_message(): + class FakeAgentClient: + create_message = AsyncMock(return_value="someMessage") + + class FakeClient: + agents = FakeAgentClient() + + msg = ChatMessageContent(role=AuthorRole.USER, content="some content") + out = await AgentThreadActions.create_message(FakeClient(), "threadXYZ", msg) + assert out == "someMessage" + + +async def test_agent_thread_actions_create_message_no_content(): + class FakeAgentClient: + create_message = AsyncMock(return_value="should_not_be_called") + + class FakeClient: + agents = FakeAgentClient() + + message = ChatMessageContent(role=AuthorRole.USER, content=" ") + out = await AgentThreadActions.create_message(FakeClient(), "threadXYZ", message) + assert out is None + assert FakeAgentClient.create_message.await_count == 0 + + +async def test_agent_thread_actions_invoke(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + + agent.client.agents = MagicMock() + + mock_thread_run = ThreadRun( + id="run123", + thread_id="thread123", + status="running", + instructions="test agent", + created_at=int(datetime.now(timezone.utc).timestamp()), + model="model", + ) + + agent.client.agents.create_run = AsyncMock(return_value=mock_thread_run) + + mock_run_steps = OpenAIPageableListOfRunStep( + data=[ + RunStep( + type="message_creation", + id="msg123", + thread_id="thread123", + run_id="run123", + created_at=int(datetime.now(timezone.utc).timestamp()), + completed_at=int(datetime.now(timezone.utc).timestamp()), + status="completed", + assistant_id="assistant123", + step_details=RunStepMessageCreationDetails( + message_creation=RunStepMessageCreationReference( + message_id="msg123", + ), + ), + ), + ] + ) + + agent.client.agents.list_run_steps = AsyncMock(return_value=mock_run_steps) + + mock_message = ThreadMessage( + id="msg123", + thread_id="thread123", + run_id="run123", + created_at=int(datetime.now(timezone.utc).timestamp()), + completed_at=int(datetime.now(timezone.utc).timestamp()), + status="completed", + assistant_id="assistant123", + role="assistant", + content=[MessageTextContent(text=MessageTextDetails(value="some message", annotations=[]))], + ) + + agent.client.agents.get_message = AsyncMock(return_value=mock_message) + + async for message in AgentThreadActions.invoke(agent=agent, thread_id="thread123", kernel=AsyncMock(spec=Kernel)): + assert message is not None + break + + +async def test_agent_thread_actions_invoke_with_requires_action(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + agent.client.agents = MagicMock() + + mock_thread_run = ThreadRun( + id="run123", + thread_id="thread123", + status="running", + instructions="test agent", + created_at=int(datetime.now(timezone.utc).timestamp()), + model="model", + ) + + agent.client.agents.create_run = AsyncMock(return_value=mock_thread_run) + + poll_count = 0 + + async def mock_poll_run_status(*args, **kwargs): + nonlocal poll_count + if poll_count == 0: + mock_thread_run.status = "requires_action" + mock_thread_run.required_action = SubmitToolOutputsAction( + submit_tool_outputs=SubmitToolOutputsDetails( + tool_calls=[ + RequiredFunctionToolCall( + id="tool_call_id", + function=RequiredFunctionToolCallDetails( + name="mock_function_call", arguments={"arg": "value"} + ), + ) + ] + ) + ) + else: + mock_thread_run.status = "completed" + poll_count += 1 + return mock_thread_run + + def mock_get_function_call_contents(run: ThreadRun, function_steps: dict): + function_call_content = FunctionCallContent( + name="mock_function_call", + arguments={"arg": "value"}, + id="tool_call_id", + ) + function_steps[function_call_content.id] = function_call_content + return [function_call_content] + + mock_run_step_tool_calls = RunStep( + type="tool_calls", + id="tool_step123", + thread_id="thread123", + run_id="run123", + created_at=int(datetime.now(timezone.utc).timestamp()), + completed_at=int(datetime.now(timezone.utc).timestamp()), + status="completed", + assistant_id="assistant123", + step_details=RunStepToolCallDetails( + tool_calls=[ + RunStepCodeInterpreterToolCall( + id="tool_call_id", + code_interpreter=RunStepCodeInterpreterToolCallDetails( + input="some code", + ), + ), + RunStepFunctionToolCall( + id="tool_call_id", + function=RunStepFunctionToolCallDetails( + name="mock_function_call", + arguments={"arg": "value"}, + output="some output", + ), + ), + ] + ), + ) + + mock_run_step_message_creation = RunStep( + type="message_creation", + id="msg_step123", + thread_id="thread123", + run_id="run123", + created_at=int(datetime.now(timezone.utc).timestamp()), + completed_at=int(datetime.now(timezone.utc).timestamp()), + status="completed", + assistant_id="assistant123", + step_details=RunStepMessageCreationDetails( + message_creation=RunStepMessageCreationReference(message_id="msg123") + ), + ) + + mock_run_steps = OpenAIPageableListOfRunStep(data=[mock_run_step_tool_calls, mock_run_step_message_creation]) + agent.client.agents.list_run_steps = AsyncMock(return_value=mock_run_steps) + + mock_message = ThreadMessage( + id="msg123", + thread_id="thread123", + run_id="run123", + created_at=int(datetime.now(timezone.utc).timestamp()), + completed_at=int(datetime.now(timezone.utc).timestamp()), + status="completed", + assistant_id="assistant123", + role="assistant", + content=[MessageTextContent(text=MessageTextDetails(value="some message", annotations=[]))], + ) + agent.client.agents.get_message = AsyncMock(return_value=mock_message) + + agent.client.agents.submit_tool_outputs_to_run = AsyncMock() + + with ( + patch.object(AgentThreadActions, "_poll_run_status", side_effect=mock_poll_run_status), + patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.get_function_call_contents", + side_effect=mock_get_function_call_contents, + ), + ): + messages = [] + async for is_visible, content in AgentThreadActions.invoke( + agent=agent, + thread_id="thread123", + kernel=AsyncMock(spec=Kernel), + ): + messages.append((is_visible, content)) + + assert len(messages) == 4, "There should be four yields in total." + + assert isinstance(messages[0][1].items[0], FunctionCallContent) + assert isinstance(messages[1][1].items[0], TextContent) + assert messages[1][1].items[0].metadata.get("code") is True + assert isinstance(messages[2][1].items[0], FunctionResultContent) + assert isinstance(messages[3][1].items[0], TextContent) + + agent.client.agents.submit_tool_outputs_to_run.assert_awaited_once() + + +class MockEvent: + def __init__(self, event, data): + self.event = event + self.data = data + + def __iter__(self): + return iter((self.event, self.data, None)) + + +class MockRunData: + def __init__(self, id, status): + self.id = id + self.status = status + + +class MockAsyncIterable: + def __init__(self, items): + self.items = items.copy() + + def __aiter__(self): + self._iter = iter(self.items) + return self + + async def __anext__(self): + try: + return next(self._iter) + except StopIteration: + raise StopAsyncIteration + + +class MockStream: + def __init__(self, events): + self.events = events + + async def __aenter__(self): + return MockAsyncIterable(self.events) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +async def test_agent_thread_actions_invoke_stream(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + agent.client.agents = AsyncMock() + + events = [ + MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), + MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), + MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), + ] + + main_run_stream = MockStream(events) + agent.client.agents.create_stream.return_value = main_run_stream + + with ( + patch.object(AgentThreadActions, "_invoke_function_calls", return_value=None), + patch.object(AgentThreadActions, "_format_tool_outputs", return_value=[{"type": "mock_tool_output"}]), + ): + collected_messages = [] + async for content in AgentThreadActions.invoke_stream( + agent=agent, + thread_id="thread123", + kernel=AsyncMock(spec=Kernel), + ): + collected_messages.append(content) diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py new file mode 100644 index 000000000000..0e2e676bc33f --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py @@ -0,0 +1,145 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import MagicMock, patch + +import pytest +from azure.ai.projects.aio import AIProjectClient +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException + + +async def test_azure_ai_agent_init(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + assert agent.id == "agent123" + assert agent.name == "agentName" + assert agent.description == "desc" + + +async def test_azure_ai_agent_init_with_plugins_via_constructor( + ai_project_client, ai_agent_definition, custom_plugin_class +): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition, plugins=[custom_plugin_class()]) + assert agent.id == "agent123" + assert agent.name == "agentName" + assert agent.description == "desc" + assert agent.kernel.plugins is not None + assert len(agent.kernel.plugins) == 1 + + +async def test_azure_ai_agent_add_chat_message(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.create_message", + ): + await agent.add_chat_message("threadId", ChatMessageContent(role="user", content="text")) # pass anything + + +async def test_azure_ai_agent_get_response(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", + side_effect=fake_invoke, + ): + response = await agent.get_response("thread_id") + assert response.role == AuthorRole.ASSISTANT + assert response.content == "content" + + +async def test_azure_ai_agent_get_response_exception(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + + async def fake_invoke(*args, **kwargs): + yield False, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with ( + patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", + side_effect=fake_invoke, + ), + pytest.raises(AgentInvokeException), + ): + await agent.get_response("thread_id") + + +async def test_azure_ai_agent_invoke(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", + side_effect=fake_invoke, + ): + async for item in agent.invoke("thread_id"): + results.append(item) + + assert len(results) == 1 + + +async def test_azure_ai_agent_invoke_stream(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke_stream", + side_effect=fake_invoke, + ): + async for item in agent.invoke_stream("thread_id"): + results.append(item) + + assert len(results) == 1 + + +def test_azure_ai_agent_get_channel_keys(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + keys = list(agent.get_channel_keys()) + assert len(keys) >= 3 + + +async def test_azure_ai_agent_create_channel(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.create_thread", + side_effect="t", + ): + ch = await agent.create_channel() + assert isinstance(ch, AgentChannel) + assert ch.thread_id == "t" + + +def test_create_client(): + conn_str = "endpoint;subscription_id;resource_group;project_name" + credential = MagicMock(spec=DefaultAzureCredential) + + with patch("azure.ai.projects.aio.AIProjectClient.from_connection_string") as mock_from_conn_str: + mock_client = MagicMock(spec=AIProjectClient) + mock_from_conn_str.return_value = mock_client + + client = AzureAIAgent.create_client( + credential=credential, + conn_str=conn_str, + extra_arg="extra_value", + ) + + mock_from_conn_str.assert_called_once() + _, actual_kwargs = mock_from_conn_str.call_args + + assert actual_kwargs["credential"] is credential + assert actual_kwargs["conn_str"] == conn_str + assert actual_kwargs["extra_arg"] == "extra_value" + assert actual_kwargs["user_agent"] is not None + assert client is mock_client diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py new file mode 100644 index 000000000000..b70159d08f3f --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest +from pydantic import Field, SecretStr, ValidationError + +from semantic_kernel.kernel_pydantic import KernelBaseSettings +from semantic_kernel.utils.feature_stage_decorator import experimental + + +@experimental +class AzureAIAgentSettings(KernelBaseSettings): + """Slightly modified to ensure invalid data raises ValidationError.""" + + env_prefix = "AZURE_AI_AGENT_" + model_deployment_name: str = Field(min_length=1) + project_connection_string: SecretStr = Field(..., min_length=1) + + +def test_azure_ai_agent_settings_valid(): + settings = AzureAIAgentSettings( + model_deployment_name="test_model", + project_connection_string="secret_value", + ) + assert settings.model_deployment_name == "test_model" + assert settings.project_connection_string.get_secret_value() == "secret_value" + + +def test_azure_ai_agent_settings_invalid(): + with pytest.raises(ValidationError): + # Should fail due to min_length=1 constraints + AzureAIAgentSettings( + model_deployment_name="", # empty => invalid + project_connection_string="", + ) diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py new file mode 100644 index 000000000000..74237e1e0b33 --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +from azure.ai.projects.models import MessageAttachment, MessageRole + +from semantic_kernel.agents.azure_ai.azure_ai_agent_utils import AzureAIAgentUtils +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole + + +def test_azure_ai_agent_utils_get_thread_messages_none(): + msgs = AzureAIAgentUtils.get_thread_messages([]) + assert msgs is None + + +def test_azure_ai_agent_utils_get_thread_messages(): + msg1 = ChatMessageContent(role=AuthorRole.USER, content="Hello!") + msg1.items.append(FileReferenceContent(file_id="file123")) + results = AzureAIAgentUtils.get_thread_messages([msg1]) + assert len(results) == 1 + assert results[0].content == "Hello!" + assert results[0].role == MessageRole.USER + assert len(results[0].attachments) == 1 + assert isinstance(results[0].attachments[0], MessageAttachment) + + +def test_azure_ai_agent_utils_get_attachments_empty(): + msg1 = ChatMessageContent(role=AuthorRole.USER, content="No file items") + atts = AzureAIAgentUtils.get_attachments(msg1) + assert atts == [] + + +def test_azure_ai_agent_utils_get_attachments_file(): + msg1 = ChatMessageContent(role=AuthorRole.USER, content="One file item") + msg1.items.append(FileReferenceContent(file_id="file123")) + atts = AzureAIAgentUtils.get_attachments(msg1) + assert len(atts) == 1 + assert atts[0].file_id == "file123" + + +def test_azure_ai_agent_utils_get_metadata(): + msg1 = ChatMessageContent(role=AuthorRole.USER, content="has meta", metadata={"k": 123}) + meta = AzureAIAgentUtils.get_metadata(msg1) + assert meta["k"] == "123" + + +def test_azure_ai_agent_utils_get_tool_definition(): + gen = AzureAIAgentUtils._get_tool_definition(["file_search", "code_interpreter", "non_existent"]) + # file_search & code_interpreter exist, non_existent yields nothing + tools_list = list(gen) + assert len(tools_list) == 2 diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py new file mode 100644 index 000000000000..1bb85c8b5e9a --- /dev/null +++ b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py @@ -0,0 +1,112 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, patch + +import pytest +from azure.ai.projects.aio import AIProjectClient + +from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent +from semantic_kernel.agents.azure_ai.azure_ai_channel import AzureAIChannel +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentChatException + + +async def test_azure_ai_channel_receive(): + class FakeAgentClient: + create_message = AsyncMock() + + class FakeClient: + agents = FakeAgentClient() + + channel = AzureAIChannel(FakeClient(), "thread123") + await channel.receive([ChatMessageContent(role=AuthorRole.USER, content="Hello")]) + FakeAgentClient.create_message.assert_awaited_once() + + +async def test_azure_ai_channel_invoke_invalid_agent(): + channel = AzureAIChannel(AsyncMock(spec=AIProjectClient), "thread123") + with pytest.raises(AgentChatException): + async for _ in channel.invoke(object()): + pass + + +async def test_azure_ai_channel_invoke_valid_agent(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", + side_effect=fake_invoke, + ): + channel = AzureAIChannel(ai_project_client, "thread123") + results = [] + async for is_visible, msg in channel.invoke(agent): + results.append((is_visible, msg)) + + assert len(results) == 1 + + +async def test_azure_ai_channel_invoke_stream_valid_agent(ai_project_client, ai_agent_definition): + agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke_stream", + side_effect=fake_invoke, + ): + channel = AzureAIChannel(ai_project_client, "thread123") + results = [] + async for is_visible, msg in channel.invoke_stream(agent, messages=[]): + results.append((is_visible, msg)) + + assert len(results) == 1 + + +async def test_azure_ai_channel_get_history(): + # We need to return an async iterable, so let's do an AsyncMock returning an _async_gen + class FakeAgentClient: + delete_thread = AsyncMock() + # We'll patch get_messages directly below + + class FakeClient: + agents = FakeAgentClient() + + channel = AzureAIChannel(FakeClient(), "threadXYZ") + + async def fake_get_messages(client, thread_id): + # Must produce an async iterable + yield ChatMessageContent(role=AuthorRole.ASSISTANT, content="Previous msg") + + with patch( + "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.get_messages", + new=fake_get_messages, # direct replacement with a coroutine + ): + results = [] + async for item in channel.get_history(): + results.append(item) + + assert len(results) == 1 + assert results[0].content == "Previous msg" + + +async def test_azure_ai_channel_reset(): + class FakeAgentClient: + delete_thread = AsyncMock() + + class FakeClient: + agents = FakeAgentClient() + + channel = AzureAIChannel(FakeClient(), "threadXYZ") + await channel.reset() + FakeAgentClient.delete_thread.assert_awaited_once_with(thread_id="threadXYZ") + + +# Helper for returning an async generator +async def _async_gen(items): + for i in items: + yield i diff --git a/python/tests/unit/agents/bedrock_agent/conftest.py b/python/tests/unit/agents/bedrock_agent/conftest.py new file mode 100644 index 000000000000..b76ae70b88a5 --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/conftest.py @@ -0,0 +1,180 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Callable + +import pytest + +from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType +from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel +from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus +from semantic_kernel.kernel import Kernel + + +@pytest.fixture() +def bedrock_agent_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): + """Fixture to set environment variables for Amazon Bedrock Agent unit tests.""" + if exclude_list is None: + exclude_list = [] + + if override_env_param_dict is None: + override_env_param_dict = {} + + env_vars = { + "BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN": "TEST_BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN", + "BEDROCK_AGENT_FOUNDATION_MODEL": "TEST_BEDROCK_AGENT_FOUNDATION_MODEL", + } + + env_vars.update(override_env_param_dict) + + for key, value in env_vars.items(): + if key not in exclude_list: + monkeypatch.setenv(key, value) + else: + monkeypatch.delenv(key, raising=False) + + return env_vars + + +@pytest.fixture +def kernel_with_function(kernel: Kernel, decorated_native_function: Callable) -> Kernel: + kernel.add_function("test_plugin", function=decorated_native_function) + + return kernel + + +@pytest.fixture +def new_agent_name(): + return "test_agent_name" + + +@pytest.fixture +def bedrock_agent_model(): + return BedrockAgentModel( + agent_name="test_agent_name", + foundation_model="test_foundation_model", + agent_status=BedrockAgentStatus.NOT_PREPARED, + ) + + +@pytest.fixture +def bedrock_agent_model_with_id(): + return BedrockAgentModel( + agent_id="test_agent_id", + agent_name="test_agent_name", + foundation_model="test_foundation_model", + agent_status=BedrockAgentStatus.NOT_PREPARED, + ) + + +@pytest.fixture +def bedrock_agent_model_with_id_prepared_dict(): + return { + "agent": { + "agentId": "test_agent_id", + "agentName": "test_agent_name", + "foundationModel": "test_foundation_model", + "agentStatus": "PREPARED", + } + } + + +@pytest.fixture +def bedrock_agent_model_with_id_preparing_dict(): + return { + "agent": { + "agentId": "test_agent_id", + "agentName": "test_agent_name", + "foundationModel": "test_foundation_model", + "agentStatus": "PREPARING", + } + } + + +@pytest.fixture +def bedrock_agent_model_with_id_not_prepared_dict(): + return { + "agent": { + "agentId": "test_agent_id", + "agentName": "test_agent_name", + "foundationModel": "test_foundation_model", + "agentStatus": "NOT_PREPARED", + } + } + + +@pytest.fixture +def existing_agent_not_prepared_model(): + return BedrockAgentModel( + agent_id="test_agent_id", + agent_name="test_agent_name", + foundation_model="test_foundation_model", + agent_status=BedrockAgentStatus.NOT_PREPARED, + ) + + +@pytest.fixture +def bedrock_action_group_mode_dict(): + return { + "agentActionGroup": { + "actionGroupId": "test_action_group_id", + "actionGroupName": "test_action_group_name", + } + } + + +@pytest.fixture +def simple_response(): + return "test response" + + +@pytest.fixture +def bedrock_agent_non_streaming_empty_response(): + return { + "completion": [], + } + + +@pytest.fixture +def bedrock_agent_non_streaming_simple_response(simple_response): + return { + "completion": [ + { + "chunk": {"bytes": bytes(simple_response, "utf-8")}, + }, + ], + } + + +@pytest.fixture +def bedrock_agent_streaming_simple_response(simple_response): + return { + "completion": [ + { + "chunk": {"bytes": bytes(chunk, "utf-8")}, + } + for chunk in simple_response + ] + } + + +@pytest.fixture +def bedrock_agent_function_call_response(): + return { + "completion": [ + { + BedrockAgentEventType.RETURN_CONTROL: { + "invocationId": "test_invocation_id", + "invocationInputs": [ + { + "functionInvocationInput": { + "function": "test_function", + "parameters": [ + {"name": "test_parameter_name", "value": "test_parameter_value"}, + ], + }, + }, + ], + }, + }, + ], + } diff --git a/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py b/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py new file mode 100644 index 000000000000..9898e457c40e --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py @@ -0,0 +1,93 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.agents.bedrock.action_group_utils import ( + BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES, + kernel_function_parameter_type_to_bedrock_function_parameter_type, + kernel_function_to_bedrock_function_schema, + parse_function_result_contents, + parse_return_control_payload, +) +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.kernel import Kernel + + +def test_kernel_function_to_bedrock_function_schema(kernel_with_function: Kernel): + # Test the conversion of kernel function to bedrock function schema + function_choice_behavior = FunctionChoiceBehavior.Auto() + function_choice_configuration = function_choice_behavior.get_config(kernel_with_function) + result = kernel_function_to_bedrock_function_schema(function_choice_configuration) + assert result == { + "functions": [ + { + "name": "test_plugin-getLightStatus", + "parameters": { + "arg1": { + "type": "string", + "required": True, + } + }, + "requireConfirmation": "DISABLED", + } + ] + } + + +def test_kernel_function_parameter_type_to_bedrock_function_parameter_type(): + # Test the conversion of kernel function parameter type to bedrock function parameter type + schema_data = {"type": "string"} + result = kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data) + assert result == "string" + + +def test_kernel_function_parameter_type_to_bedrock_function_parameter_type_invalid(): + # Test the conversion of invalid kernel function parameter type to bedrock function parameter type + schema_data = {"type": "invalid_type"} + with pytest.raises( + ValueError, + match="Type invalid_type is not allowed in bedrock function parameter type. " + f"Allowed types are {BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES}.", + ): + kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data) + + +def test_parse_return_control_payload(): + # Test the parsing of return control payload to function call contents + return_control_payload = { + "invocationId": "test_invocation_id", + "invocationInputs": [ + { + "functionInvocationInput": { + "function": "test_function", + "parameters": [ + {"name": "param1", "value": "value1"}, + {"name": "param2", "value": "value2"}, + ], + } + } + ], + } + result = parse_return_control_payload(return_control_payload) + assert len(result) == 1 + assert result[0].id == "test_invocation_id" + assert result[0].name == "test_function" + assert result[0].arguments == {"param1": "value1", "param2": "value2"} + + +def test_parse_function_result_contents(): + # Test the parsing of function result contents to be returned to the agent + function_result_contents = [ + FunctionResultContent( + id="test_id", + name="test_function", + result="test_result", + metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, + ) + ] + result = parse_function_result_contents(function_result_contents) + assert len(result) == 1 + assert result[0]["functionResult"]["actionGroup"] == "test_action_group" + assert result[0]["functionResult"]["function"] == "test_function" + assert result[0]["functionResult"]["responseBody"]["TEXT"]["body"] == "test_result" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py new file mode 100644 index 000000000000..e76abea04cb4 --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py @@ -0,0 +1,33 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest +from pydantic import ValidationError + +from semantic_kernel.agents.bedrock.models.bedrock_action_group_model import BedrockActionGroupModel + + +def test_bedrock_action_group_model_valid(): + """Test case to verify the BedrockActionGroupModel with valid data.""" + model = BedrockActionGroupModel(actionGroupId="test_id", actionGroupName="test_name") + assert model.action_group_id == "test_id" + assert model.action_group_name == "test_name" + + +def test_bedrock_action_group_model_missing_action_group_id(): + """Test case to verify error handling when actionGroupId is missing.""" + with pytest.raises(ValidationError): + BedrockActionGroupModel(actionGroupName="test_name") + + +def test_bedrock_action_group_model_missing_action_group_name(): + """Test case to verify error handling when actionGroupName is missing.""" + with pytest.raises(ValidationError): + BedrockActionGroupModel(actionGroupId="test_id") + + +def test_bedrock_action_group_model_extra_field(): + """Test case to verify the BedrockActionGroupModel with an extra field.""" + model = BedrockActionGroupModel(actionGroupId="test_id", actionGroupName="test_name", extraField="extra_value") + assert model.action_group_id == "test_id" + assert model.action_group_name == "test_name" + assert model.extraField == "extra_value" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py new file mode 100644 index 000000000000..ddf49aca36ad --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py @@ -0,0 +1,633 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, Mock, patch + +import boto3 +import pytest + +from semantic_kernel.agents.bedrock.action_group_utils import ( + kernel_function_to_bedrock_function_schema, + parse_function_result_contents, +) +from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.kernel import Kernel + +# region Agent Initialization Tests + + +# Test case to verify BedrockAgent initialization +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_initialization(client, bedrock_agent_model_with_id): + agent = BedrockAgent(bedrock_agent_model_with_id) + + assert agent.name == bedrock_agent_model_with_id.agent_name + assert agent.agent_model.agent_name == bedrock_agent_model_with_id.agent_name + assert agent.agent_model.agent_id == bedrock_agent_model_with_id.agent_id + assert agent.agent_model.foundation_model == bedrock_agent_model_with_id.foundation_model + + +# Test case to verify error handling during BedrockAgent initialization with non-auto function choice +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_initialization_error_with_non_auto_function_choice(client, bedrock_agent_model_with_id): + with pytest.raises(ValueError, match="Only FunctionChoiceType.AUTO is supported."): + BedrockAgent( + bedrock_agent_model_with_id, + function_choice_behavior=FunctionChoiceBehavior.NoneInvoke(), + ) + + +# Test case to verify the creation of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +@pytest.mark.parametrize( + "kernel, function_choice_behavior, arguments", + [ + (None, None, None), + (Kernel(), None, None), + (Kernel(), FunctionChoiceBehavior.Auto(), None), + (Kernel(), FunctionChoiceBehavior.Auto(), KernelArguments()), + ], +) +async def test_bedrock_agent_create_and_prepare_agent( + client, + bedrock_agent_model_with_id_not_prepared_dict, + bedrock_agent_unit_test_env, + kernel, + function_choice_behavior, + arguments, +): + with ( + patch.object(client, "create_agent") as mock_create_agent, + patch.object(BedrockAgent, "_wait_for_agent_status", new_callable=AsyncMock), + patch.object(BedrockAgent, "prepare_agent_and_wait_until_prepared", new_callable=AsyncMock), + ): + mock_create_agent.return_value = bedrock_agent_model_with_id_not_prepared_dict + + agent = await BedrockAgent.create_and_prepare_agent( + name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], + instructions="test_instructions", + bedrock_client=client, + env_file_path="fake_path", + kernel=kernel, + function_choice_behavior=function_choice_behavior, + arguments=arguments, + ) + + mock_create_agent.assert_called_once_with( + agentName=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], + foundationModel=bedrock_agent_unit_test_env["BEDROCK_AGENT_FOUNDATION_MODEL"], + agentResourceRoleArn=bedrock_agent_unit_test_env["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], + instruction="test_instructions", + ) + assert agent.agent_model.agent_id == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentId"] + assert agent.id == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentId"] + assert agent.agent_model.agent_name == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"] + assert agent.name == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"] + assert ( + agent.agent_model.foundation_model + == bedrock_agent_model_with_id_not_prepared_dict["agent"]["foundationModel"] + ) + assert agent.kernel is not None + assert agent.function_choice_behavior is not None + if arguments: + assert agent.arguments is not None + + +# Test case to verify the creation of BedrockAgent +@pytest.mark.parametrize( + "exclude_list", + [ + ["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], + ["BEDROCK_AGENT_FOUNDATION_MODEL"], + ], + indirect=True, +) +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_create_and_prepare_agent_settings_validation_error( + client, + bedrock_agent_model_with_id_not_prepared_dict, + bedrock_agent_unit_test_env, +): + with pytest.raises(AgentInitializationException): + await BedrockAgent.create_and_prepare_agent( + name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], + instructions="test_instructions", + env_file_path="fake_path", + ) + + +# Test case to verify the creation of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_create_and_prepare_agent_service_exception( + client, + bedrock_agent_model_with_id_not_prepared_dict, + bedrock_agent_unit_test_env, +): + with ( + patch.object(client, "create_agent") as mock_create_agent, + patch.object(BedrockAgent, "prepare_agent_and_wait_until_prepared", new_callable=AsyncMock), + ): + from botocore.exceptions import ClientError + + mock_create_agent.side_effect = ClientError({}, "create_agent") + + with pytest.raises(AgentInitializationException): + await BedrockAgent.create_and_prepare_agent( + name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], + instructions="test_instructions", + bedrock_client=client, + env_file_path="fake_path", + ) + + +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_prepare_agent_and_wait_until_prepared( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_prepared_dict, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(client, "get_agent") as mock_get_agent, + patch.object(client, "prepare_agent") as mock_prepare_agent, + ): + mock_get_agent.side_effect = [ + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_prepared_dict, + ] + + await agent.prepare_agent_and_wait_until_prepared() + + mock_prepare_agent.assert_called_once_with(agentId=bedrock_agent_model_with_id.agent_id) + assert mock_get_agent.call_count == 2 + assert agent.agent_model.agent_status == "PREPARED" + + +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_prepare_agent_and_wait_until_prepared_fail( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_model_with_id_preparing_dict, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(client, "get_agent") as mock_get_agent, + patch.object(client, "prepare_agent"), + ): + mock_get_agent.side_effect = [ + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_preparing_dict, + bedrock_agent_model_with_id_preparing_dict, + ] + + with pytest.raises(TimeoutError): + await agent.prepare_agent_and_wait_until_prepared() + + +# Test case to verify the creation of a code interpreter action group +@patch.object(boto3, "client", return_value=Mock()) +async def test_create_code_interpreter_action_group( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_action_group_mode_dict, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(client, "create_agent_action_group") as mock_create_action_group, + patch.object( + BedrockAgent, "prepare_agent_and_wait_until_prepared" + ) as mock_prepare_agent_and_wait_until_prepared, + ): + mock_create_action_group.return_value = bedrock_action_group_mode_dict + action_group_model = await agent.create_code_interpreter_action_group() + + mock_create_action_group.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version or "DRAFT", + actionGroupName=f"{agent.agent_model.agent_name}_code_interpreter", + actionGroupState="ENABLED", + parentActionGroupSignature="AMAZON.CodeInterpreter", + ) + assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] + mock_prepare_agent_and_wait_until_prepared.assert_called_once() + + +# Test case to verify the creation of BedrockAgent with plugins +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_create_with_plugin_via_constructor( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + custom_plugin_class, +): + agent = BedrockAgent( + bedrock_agent_model_with_id, + plugins=[custom_plugin_class()], + bedrock_client=client, + ) + + assert agent.kernel.plugins is not None + assert len(agent.kernel.plugins) == 1 + + +# Test case to verify the creation of a user input action group +@patch.object(boto3, "client", return_value=Mock()) +async def test_create_user_input_action_group( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_action_group_mode_dict, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(agent.bedrock_client, "create_agent_action_group") as mock_create_action_group, + patch.object( + BedrockAgent, "prepare_agent_and_wait_until_prepared" + ) as mock_prepare_agent_and_wait_until_prepared, + ): + mock_create_action_group.return_value = bedrock_action_group_mode_dict + action_group_model = await agent.create_user_input_action_group() + + mock_create_action_group.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version or "DRAFT", + actionGroupName=f"{agent.agent_model.agent_name}_user_input", + actionGroupState="ENABLED", + parentActionGroupSignature="AMAZON.UserInput", + ) + assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] + mock_prepare_agent_and_wait_until_prepared.assert_called_once() + + +# Test case to verify the creation of a kernel function action group +@patch.object(boto3, "client", return_value=Mock()) +async def test_create_kernel_function_action_group( + client, + kernel_with_function, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_action_group_mode_dict, +): + agent = BedrockAgent(bedrock_agent_model_with_id, kernel=kernel_with_function, bedrock_client=client) + + with ( + patch.object(agent.bedrock_client, "create_agent_action_group") as mock_create_action_group, + patch.object( + BedrockAgent, "prepare_agent_and_wait_until_prepared" + ) as mock_prepare_agent_and_wait_until_prepared, + ): + mock_create_action_group.return_value = bedrock_action_group_mode_dict + + action_group_model = await agent.create_kernel_function_action_group() + + mock_create_action_group.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version or "DRAFT", + actionGroupName=f"{agent.agent_model.agent_name}_kernel_function", + actionGroupState="ENABLED", + actionGroupExecutor={"customControl": "RETURN_CONTROL"}, + functionSchema=kernel_function_to_bedrock_function_schema( + agent.function_choice_behavior.get_config(kernel_with_function) + ), + ) + assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] + mock_prepare_agent_and_wait_until_prepared.assert_called_once() + + +# Test case to verify the association of an agent with a knowledge base +@patch.object(boto3, "client", return_value=Mock()) +async def test_associate_agent_knowledge_base( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(agent.bedrock_client, "associate_agent_knowledge_base") as mock_associate_knowledge_base, + patch.object( + BedrockAgent, "prepare_agent_and_wait_until_prepared" + ) as mock_prepare_agent_and_wait_until_prepared, + ): + await agent.associate_agent_knowledge_base("test_knowledge_base_id") + + mock_associate_knowledge_base.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version, + knowledgeBaseId="test_knowledge_base_id", + ) + mock_prepare_agent_and_wait_until_prepared.assert_called_once() + + +# Test case to verify the disassociation of an agent with a knowledge base +@patch.object(boto3, "client", return_value=Mock()) +async def test_disassociate_agent_knowledge_base( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with ( + patch.object(agent.bedrock_client, "disassociate_agent_knowledge_base") as mock_disassociate_knowledge_base, + patch.object( + BedrockAgent, "prepare_agent_and_wait_until_prepared" + ) as mock_prepare_agent_and_wait_until_prepared, + ): + await agent.disassociate_agent_knowledge_base("test_knowledge_base_id") + mock_disassociate_knowledge_base.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version, + knowledgeBaseId="test_knowledge_base_id", + ) + mock_prepare_agent_and_wait_until_prepared.assert_called_once() + + +# Test case to verify listing associated knowledge bases with an agent +@patch.object(boto3, "client", return_value=Mock()) +async def test_list_associated_agent_knowledge_bases( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with patch.object(agent.bedrock_client, "list_agent_knowledge_bases") as mock_list_knowledge_bases: + await agent.list_associated_agent_knowledge_bases() + + mock_list_knowledge_bases.assert_called_once_with( + agentId=agent.agent_model.agent_id, + agentVersion=agent.agent_model.agent_version, + ) + + +# endregion + +# region Agent Deletion Tests + + +@patch.object(boto3, "client", return_value=Mock()) +async def test_delete_agent( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + agent_id = bedrock_agent_model_with_id.agent_id + with patch.object(agent.bedrock_client, "delete_agent") as mock_delete_agent: + await agent.delete_agent() + + mock_delete_agent.assert_called_once_with(agentId=agent_id) + assert agent.agent_model.agent_id is None + + +# Test case to verify error handling when deleting an agent that does not exist +@patch.object(boto3, "client", return_value=Mock()) +async def test_delete_agent_twice_error( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with patch.object(agent.bedrock_client, "delete_agent"): + await agent.delete_agent() + + with pytest.raises(ValueError): + await agent.delete_agent() + + +# Test case to verify error handling when there is a client error during agent deletion +@patch.object(boto3, "client", return_value=Mock()) +async def test_delete_agent_client_error( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, +): + agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) + + with patch.object(agent.bedrock_client, "delete_agent") as mock_delete_agent: + from botocore.exceptions import ClientError + + mock_delete_agent.side_effect = ClientError({"Error": {"Code": "500"}}, "delete_agent") + + with pytest.raises(ClientError): + await agent.delete_agent() + + +# endregion + +# region Agent Invoke Tests + + +# Test case to verify the `get_response` method of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_get_response( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_non_streaming_simple_response, + simple_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + mock_invoke_agent.return_value = bedrock_agent_non_streaming_simple_response + response = await agent.get_response("test_session_id", "test_input_text") + assert response.content == simple_response + + mock_invoke_agent.assert_called_once_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": False}, + sessionState={}, + ) + + +# Test case to verify the `get_response` method of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_get_response_exception( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_non_streaming_empty_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + mock_invoke_agent.return_value = bedrock_agent_non_streaming_empty_response + with pytest.raises(AgentInvokeException): + await agent.get_response("test_session_id", "test_input_text") + + mock_invoke_agent.assert_called_once_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": False}, + sessionState={}, + ) + + +# Test case to verify the invocation of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_invoke( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_non_streaming_simple_response, + simple_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + mock_invoke_agent.return_value = bedrock_agent_non_streaming_simple_response + async for message in agent.invoke("test_session_id", "test_input_text"): + assert message.content == simple_response + + mock_invoke_agent.assert_called_once_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": False}, + sessionState={}, + ) + + +# Test case to verify the streaming invocation of BedrockAgent +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_invoke_stream( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_streaming_simple_response, + simple_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + mock_invoke_agent.return_value = bedrock_agent_streaming_simple_response + full_message = "" + async for message in agent.invoke_stream("test_session_id", "test_input_text"): + full_message += message.content + + assert full_message == simple_response + mock_invoke_agent.assert_called_once_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": True}, + sessionState={}, + ) + + +# Test case to verify the invocation of BedrockAgent with function call +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_invoke_with_function_call( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_function_call_response, + bedrock_agent_non_streaming_simple_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + patch.object(BedrockAgent, "_handle_function_call_contents") as mock_handle_function_call_contents, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + function_result_contents = [ + FunctionResultContent( + id="test_id", + name="test_function", + result="test_result", + metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, + ) + ] + mock_handle_function_call_contents.return_value = function_result_contents + agent.function_choice_behavior.maximum_auto_invoke_attempts = 2 + + mock_invoke_agent.side_effect = [ + bedrock_agent_function_call_response, + bedrock_agent_non_streaming_simple_response, + ] + async for _ in agent.invoke("test_session_id", "test_input_text"): + mock_invoke_agent.assert_called_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": False}, + sessionState={ + "invocationId": "test_invocation_id", + "returnControlInvocationResults": parse_function_result_contents(function_result_contents), + }, + ) + + +# Test case to verify the streaming invocation of BedrockAgent with function call +@patch.object(boto3, "client", return_value=Mock()) +async def test_bedrock_agent_invoke_stream_with_function_call( + client, + bedrock_agent_unit_test_env, + bedrock_agent_model_with_id, + bedrock_agent_function_call_response, + bedrock_agent_streaming_simple_response, +): + with ( + patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, + patch.object(BedrockAgent, "_handle_function_call_contents") as mock_handle_function_call_contents, + ): + agent = BedrockAgent(bedrock_agent_model_with_id) + + function_result_contents = [ + FunctionResultContent( + id="test_id", + name="test_function", + result="test_result", + metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, + ) + ] + mock_handle_function_call_contents.return_value = function_result_contents + agent.function_choice_behavior.maximum_auto_invoke_attempts = 2 + + mock_invoke_agent.side_effect = [ + bedrock_agent_function_call_response, + bedrock_agent_streaming_simple_response, + ] + async for _ in agent.invoke_stream("test_session_id", "test_input_text"): + mock_invoke_agent.assert_called_with( + "test_session_id", + "test_input_text", + None, + streamingConfigurations={"streamFinalResponse": True}, + sessionState={ + "invocationId": "test_invocation_id", + "returnControlInvocationResults": parse_function_result_contents(function_result_contents), + }, + ) + + +# endregion diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py new file mode 100644 index 000000000000..66e203d93065 --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.contents.chat_message_content import ChatMessageContent + + +@pytest.fixture +def mock_channel(): + from semantic_kernel.agents.channels.bedrock_agent_channel import BedrockAgentChannel + + return BedrockAgentChannel() + + +@pytest.fixture +def chat_history() -> list[ChatMessageContent]: + return [ + ChatMessageContent(role="user", content="Hello, Bedrock!"), + ChatMessageContent(role="assistant", content="Hello, User!"), + ChatMessageContent(role="user", content="How are you?"), + ChatMessageContent(role="assistant", content="I'm good, thank you!"), + ] + + +@pytest.fixture +def chat_history_not_alternate_role() -> list[ChatMessageContent]: + return [ + ChatMessageContent(role="user", content="Hello, Bedrock!"), + ChatMessageContent(role="user", content="Hello, User!"), + ChatMessageContent(role="assistant", content="How are you?"), + ChatMessageContent(role="assistant", content="I'm good, thank you!"), + ] + + +async def test_receive_message(mock_channel, chat_history): + # Test to verify the receive_message functionality + await mock_channel.receive(chat_history) + assert len(mock_channel) == len(chat_history) + + +async def test_channel_receive_message_with_no_message(mock_channel): + # Test to verify receive_message when no message is received + await mock_channel.receive([]) + assert len(mock_channel) == 0 + + +async def test_chat_history_alternation(mock_channel, chat_history_not_alternate_role): + # Test to verify chat history alternates between user and assistant messages + await mock_channel.receive(chat_history_not_alternate_role) + assert all( + mock_channel.messages[i].role != mock_channel.messages[i + 1].role + for i in range(len(chat_history_not_alternate_role) - 1) + ) + assert mock_channel.messages[1].content == mock_channel.MESSAGE_PLACEHOLDER + assert mock_channel.messages[4].content == mock_channel.MESSAGE_PLACEHOLDER + + +async def test_channel_reset(mock_channel, chat_history): + # Test to verify the reset functionality + await mock_channel.receive(chat_history) + assert len(mock_channel) == len(chat_history) + await mock_channel.reset() + assert len(mock_channel) == 0 diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py new file mode 100644 index 000000000000..08bf1b704cb6 --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType + + +def test_bedrock_agent_event_type_values(): + """Test case to verify the values of BedrockAgentEventType enum.""" + assert BedrockAgentEventType.CHUNK.value == "chunk" + assert BedrockAgentEventType.TRACE.value == "trace" + assert BedrockAgentEventType.RETURN_CONTROL.value == "returnControl" + assert BedrockAgentEventType.FILES.value == "files" + + +def test_bedrock_agent_event_type_enum(): + """Test case to verify the type of BedrockAgentEventType enum members.""" + assert isinstance(BedrockAgentEventType.CHUNK, BedrockAgentEventType) + assert isinstance(BedrockAgentEventType.TRACE, BedrockAgentEventType) + assert isinstance(BedrockAgentEventType.RETURN_CONTROL, BedrockAgentEventType) + assert isinstance(BedrockAgentEventType.FILES, BedrockAgentEventType) + + +def test_bedrock_agent_event_type_invalid(): + """Test case to verify error handling for invalid BedrockAgentEventType value.""" + with pytest.raises(ValueError): + BedrockAgentEventType("invalid_value") diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py new file mode 100644 index 000000000000..42098654eaee --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py @@ -0,0 +1,67 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel + + +def test_bedrock_agent_model_valid(): + """Test case to verify the BedrockAgentModel with valid data.""" + model = BedrockAgentModel( + agentId="test_id", + agentName="test_name", + agentVersion="1.0", + foundationModel="test_model", + agentStatus="CREATING", + ) + assert model.agent_id == "test_id" + assert model.agent_name == "test_name" + assert model.agent_version == "1.0" + assert model.foundation_model == "test_model" + assert model.agent_status == "CREATING" + + +def test_bedrock_agent_model_missing_agent_id(): + """Test case to verify the BedrockAgentModel with missing agentId.""" + model = BedrockAgentModel( + agentName="test_name", + agentVersion="1.0", + foundationModel="test_model", + agentStatus="CREATING", + ) + assert model.agent_id is None + assert model.agent_name == "test_name" + assert model.agent_version == "1.0" + assert model.foundation_model == "test_model" + assert model.agent_status == "CREATING" + + +def test_bedrock_agent_model_missing_agent_name(): + """Test case to verify the BedrockAgentModel with missing agentName.""" + model = BedrockAgentModel( + agentId="test_id", + agentVersion="1.0", + foundationModel="test_model", + agentStatus="CREATING", + ) + assert model.agent_id == "test_id" + assert model.agent_name is None + assert model.agent_version == "1.0" + assert model.foundation_model == "test_model" + assert model.agent_status == "CREATING" + + +def test_bedrock_agent_model_extra_field(): + """Test case to verify the BedrockAgentModel with an extra field.""" + model = BedrockAgentModel( + agentId="test_id", + agentName="test_name", + agentVersion="1.0", + foundationModel="test_model", + agentStatus="CREATING", + extraField="extra_value", + ) + assert model.agent_id == "test_id" + assert model.agent_name == "test_name" + assert model.agent_version == "1.0" + assert model.foundation_model == "test_model" + assert model.agent_status == "CREATING" + assert model.extraField == "extra_value" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py new file mode 100644 index 000000000000..c56e3fcb878f --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py @@ -0,0 +1,28 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest +from pydantic import ValidationError + +from semantic_kernel.agents.bedrock.bedrock_agent_settings import BedrockAgentSettings + + +def test_bedrock_agent_settings_from_env_vars(bedrock_agent_unit_test_env): + """Test loading BedrockAgentSettings from environment variables.""" + settings = BedrockAgentSettings.create(env_file_path="fake_path") + + assert settings.agent_resource_role_arn == bedrock_agent_unit_test_env["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"] + assert settings.foundation_model == bedrock_agent_unit_test_env["BEDROCK_AGENT_FOUNDATION_MODEL"] + + +@pytest.mark.parametrize( + "exclude_list", + [ + ["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], + ["BEDROCK_AGENT_FOUNDATION_MODEL"], + ], + indirect=True, +) +def test_bedrock_agent_settings_from_env_vars_missing_required(bedrock_agent_unit_test_env): + """Test loading BedrockAgentSettings from environment variables with missing required fields.""" + with pytest.raises(ValidationError): + BedrockAgentSettings.create(env_file_path="fake_path") diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py new file mode 100644 index 000000000000..c02b11178713 --- /dev/null +++ b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus + + +def test_bedrock_agent_status_values(): + """Test case to verify the values of BedrockAgentStatus enum.""" + assert BedrockAgentStatus.CREATING == "CREATING" + assert BedrockAgentStatus.PREPARING == "PREPARING" + assert BedrockAgentStatus.PREPARED == "PREPARED" + assert BedrockAgentStatus.NOT_PREPARED == "NOT_PREPARED" + assert BedrockAgentStatus.DELETING == "DELETING" + assert BedrockAgentStatus.FAILED == "FAILED" + assert BedrockAgentStatus.VERSIONING == "VERSIONING" + assert BedrockAgentStatus.UPDATING == "UPDATING" + + +def test_bedrock_agent_status_invalid_value(): + """Test case to verify error handling for invalid BedrockAgentStatus value.""" + with pytest.raises(ValueError): + BedrockAgentStatus("INVALID_STATUS") diff --git a/python/tests/unit/agents/chat_completion/conftest.py b/python/tests/unit/agents/chat_completion/conftest.py new file mode 100644 index 000000000000..5e5784bc1b9c --- /dev/null +++ b/python/tests/unit/agents/chat_completion/conftest.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, create_autospec + +import pytest + +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + + +@pytest.fixture +def kernel_with_ai_service(): + kernel = create_autospec(Kernel) + mock_ai_service_client = create_autospec(ChatCompletionClientBase) + mock_prompt_execution_settings = create_autospec(PromptExecutionSettings) + mock_prompt_execution_settings.function_choice_behavior = None + kernel.select_ai_service.return_value = (mock_ai_service_client, mock_prompt_execution_settings) + mock_ai_service_client.get_chat_message_contents = AsyncMock( + return_value=[ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message")] + ) + + return kernel, mock_ai_service_client diff --git a/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py b/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py new file mode 100644 index 000000000000..2a0798f342ad --- /dev/null +++ b/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py @@ -0,0 +1,326 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import AsyncGenerator, Callable +from unittest.mock import AsyncMock, create_autospec, patch + +import pytest +from pydantic import ValidationError + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions import KernelServiceNotFoundError +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.kernel import Kernel + + +@pytest.fixture +def mock_streaming_chat_completion_response() -> Callable[..., AsyncGenerator[list[ChatMessageContent], None]]: + async def mock_response( + chat_history: ChatHistory, + settings: PromptExecutionSettings, + kernel: Kernel, + arguments: KernelArguments, + ) -> AsyncGenerator[list[ChatMessageContent], None]: + content1 = ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message 1") + content2 = ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2") + chat_history.messages.append(content1) + chat_history.messages.append(content2) + yield [content1] + yield [content2] + + return mock_response + + +async def test_initialization(): + agent = ChatCompletionAgent( + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +async def test_initialization_invalid_name_throws(): + with pytest.raises(ValidationError): + _ = ChatCompletionAgent( + name="Test Agent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + +def test_initialization_with_kernel(kernel: Kernel): + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert kernel == agent.kernel + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +def test_initialization_with_kernel_and_service(kernel: Kernel, azure_openai_unit_test_env, openai_unit_test_env): + kernel.add_service(AzureChatCompletion(service_id="test_azure")) + agent = ChatCompletionAgent( + service=OpenAIChatCompletion(), + kernel=kernel, + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert kernel == agent.kernel + assert len(kernel.services) == 2 + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +def test_initialization_with_plugins_via_constructor(custom_plugin_class): + agent = ChatCompletionAgent( + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + plugins=[custom_plugin_class()], + ) + + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + assert agent.kernel.plugins is not None + assert len(agent.kernel.plugins) == 1 + + +def test_initialization_with_service_via_constructor(openai_unit_test_env): + agent = ChatCompletionAgent( + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + service=OpenAIChatCompletion(), + ) + + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + assert agent.service is not None + assert agent.kernel.services["test_chat_model_id"] == agent.service + + +async def test_get_response(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): + kernel, _ = kernel_with_ai_service + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + instructions="Test Instructions", + ) + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + response = await agent.get_response(history) + + assert response.content == "Processed Message" + + +async def test_get_response_exception(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): + kernel, mock_ai_service_client = kernel_with_ai_service + mock_ai_service_client.get_chat_message_contents = AsyncMock(return_value=[]) + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + instructions="Test Instructions", + ) + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with pytest.raises(AgentInvokeException): + await agent.get_response(history) + + +async def test_invoke(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): + kernel, _ = kernel_with_ai_service + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + instructions="Test Instructions", + ) + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + messages = [message async for message in agent.invoke(history)] + + assert len(messages) == 1 + assert messages[0].content == "Processed Message" + + +async def test_invoke_tool_call_added(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): + kernel, mock_ai_service_client = kernel_with_ai_service + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + ) + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + async def mock_get_chat_message_contents( + chat_history: ChatHistory, + settings: PromptExecutionSettings, + kernel: Kernel, + arguments: KernelArguments, + ): + new_messages = [ + ChatMessageContent(role=AuthorRole.ASSISTANT, content="Processed Message 1"), + ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2"), + ] + chat_history.messages.extend(new_messages) + return new_messages + + mock_ai_service_client.get_chat_message_contents = AsyncMock(side_effect=mock_get_chat_message_contents) + + messages = [message async for message in agent.invoke(history)] + + assert len(messages) == 2 + assert messages[0].content == "Processed Message 1" + assert messages[1].content == "Processed Message 2" + + assert len(history.messages) == 3 + assert history.messages[1].content == "Processed Message 1" + assert history.messages[2].content == "Processed Message 2" + assert history.messages[1].name == "TestAgent" + assert history.messages[2].name == "TestAgent" + + +async def test_invoke_no_service_throws(kernel: Kernel): + agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with pytest.raises(KernelServiceNotFoundError): + async for _ in agent.invoke(history): + pass + + +async def test_invoke_stream(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): + kernel, _ = kernel_with_ai_service + agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with patch( + "semantic_kernel.connectors.ai.chat_completion_client_base.ChatCompletionClientBase.get_streaming_chat_message_contents", + return_value=AsyncMock(), + ) as mock: + mock.return_value.__aiter__.return_value = [ + [ChatMessageContent(role=AuthorRole.USER, content="Initial Message")] + ] + + async for message in agent.invoke_stream(history): + assert message.role == AuthorRole.USER + assert message.content == "Initial Message" + + +async def test_invoke_stream_tool_call_added( + kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase], + mock_streaming_chat_completion_response, +): + kernel, mock_ai_service_client = kernel_with_ai_service + agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + mock_ai_service_client.get_streaming_chat_message_contents = mock_streaming_chat_completion_response + + async for message in agent.invoke_stream(history): + print(f"Message role: {message.role}, content: {message.content}") + assert message.role in [AuthorRole.SYSTEM, AuthorRole.TOOL] + assert message.content in ["Processed Message 1", "Processed Message 2"] + + assert len(history.messages) == 3 + + +async def test_invoke_stream_no_service_throws(kernel: Kernel): + agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with pytest.raises(KernelServiceNotFoundError): + async for _ in agent.invoke_stream(history): + pass + + +def test_get_channel_keys(): + agent = ChatCompletionAgent() + keys = agent.get_channel_keys() + + for key in keys: + assert isinstance(key, str) + + +async def test_create_channel(): + agent = ChatCompletionAgent() + channel = await agent.create_channel() + + assert isinstance(channel, ChatHistoryChannel) + + +async def test_prepare_agent_chat_history_with_formatted_instructions(): + agent = ChatCompletionAgent( + name="TestAgent", id="test_id", description="Test Description", instructions="Test Instructions" + ) + with patch.object( + ChatCompletionAgent, "format_instructions", new=AsyncMock(return_value="Formatted instructions for testing") + ) as mock_format_instructions: + dummy_kernel = create_autospec(Kernel) + dummy_args = KernelArguments(param="value") + user_message = ChatMessageContent(role=AuthorRole.USER, content="User message") + history = ChatHistory(messages=[user_message]) + result_history = await agent._prepare_agent_chat_history(history, dummy_kernel, dummy_args) + mock_format_instructions.assert_awaited_once_with(dummy_kernel, dummy_args) + assert len(result_history.messages) == 2 + system_message = result_history.messages[0] + assert system_message.role == AuthorRole.SYSTEM + assert system_message.content == "Formatted instructions for testing" + assert system_message.name == agent.name + assert result_history.messages[1] == user_message + + +async def test_prepare_agent_chat_history_without_formatted_instructions(): + agent = ChatCompletionAgent( + name="TestAgent", id="test_id", description="Test Description", instructions="Test Instructions" + ) + with patch.object( + ChatCompletionAgent, "format_instructions", new=AsyncMock(return_value=None) + ) as mock_format_instructions: + dummy_kernel = create_autospec(Kernel) + dummy_args = KernelArguments(param="value") + user_message = ChatMessageContent(role=AuthorRole.USER, content="User message") + history = ChatHistory(messages=[user_message]) + result_history = await agent._prepare_agent_chat_history(history, dummy_kernel, dummy_args) + mock_format_instructions.assert_awaited_once_with(dummy_kernel, dummy_args) + assert len(result_history.messages) == 1 + assert result_history.messages[0] == user_message diff --git a/python/tests/unit/agents/test_chat_history_channel.py b/python/tests/unit/agents/chat_completion/test_chat_history_channel.py similarity index 79% rename from python/tests/unit/agents/test_chat_history_channel.py rename to python/tests/unit/agents/chat_completion/test_chat_history_channel.py index 4ba15f01a062..b03dc892dd57 100644 --- a/python/tests/unit/agents/test_chat_history_channel.py +++ b/python/tests/unit/agents/chat_completion/test_chat_history_channel.py @@ -3,13 +3,12 @@ from collections.abc import AsyncIterable from unittest.mock import AsyncMock -import pytest - -from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryAgentProtocol, ChatHistoryChannel +from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions import ServiceInvalidTypeError class MockChatHistoryHandler: @@ -33,9 +32,6 @@ class MockNonChatHistoryHandler: id: str = "mock_non_chat_history_handler" -ChatHistoryAgentProtocol.register(MockChatHistoryHandler) - - class AsyncIterableMock: def __init__(self, async_gen): self.async_gen = async_gen @@ -130,24 +126,6 @@ async def mock_invoke(history: list[ChatMessageContent]): assert received_messages[2].items[0].id == "test_id" -async def test_invoke_incorrect_instance_throws(): - channel = ChatHistoryChannel() - agent = MockNonChatHistoryHandler() - - with pytest.raises(ServiceInvalidTypeError): - async for _ in channel.invoke(agent): - pass - - -async def test_invoke_stream_incorrect_instance_throws(): - channel = ChatHistoryChannel() - agent = MockNonChatHistoryHandler() - - with pytest.raises(ServiceInvalidTypeError): - async for _ in channel.invoke_stream(agent, []): - pass - - async def test_receive(): channel = ChatHistoryChannel() history = [ @@ -200,3 +178,47 @@ async def test_reset_history(): await channel.reset() assert len(channel.messages) == 0 + + +async def test_receive_skips_file_references(): + channel = ChatHistoryChannel() + + file_ref_item = FileReferenceContent() + streaming_file_ref_item = StreamingFileReferenceContent() + normal_item_1 = FunctionResultContent(id="test_id", result="normal content 1") + normal_item_2 = FunctionResultContent(id="test_id_2", result="normal content 2") + + msg_with_file_only = ChatMessageContent( + role=AuthorRole.USER, + content="Normal message set as TextContent", + items=[file_ref_item], + ) + + msg_with_mixed = ChatMessageContent( + role=AuthorRole.USER, + content="Mixed content message", + items=[streaming_file_ref_item, normal_item_1], + ) + + msg_with_normal = ChatMessageContent( + role=AuthorRole.USER, + content="Normal message", + items=[normal_item_2], + ) + + history = [msg_with_file_only, msg_with_mixed, msg_with_normal] + await channel.receive(history) + + assert len(channel.messages) == 3 + + assert channel.messages[0].content == "Normal message set as TextContent" + assert len(channel.messages[0].items) == 1 + + assert channel.messages[1].content == "Mixed content message" + assert len(channel.messages[0].items) == 1 + assert channel.messages[1].items[0].result == "normal content 1" + + assert channel.messages[2].content == "Normal message" + assert len(channel.messages[2].items) == 2 + assert channel.messages[2].items[0].result == "normal content 2" + assert channel.messages[2].items[1].text == "Normal message" diff --git a/python/tests/unit/agents/openai_assistant/conftest.py b/python/tests/unit/agents/openai_assistant/conftest.py new file mode 100644 index 000000000000..d21ae4e4e0fc --- /dev/null +++ b/python/tests/unit/agents/openai_assistant/conftest.py @@ -0,0 +1,105 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +import pytest +from openai import AsyncOpenAI +from openai.types.beta.assistant import Assistant +from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation +from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation +from openai.types.beta.threads.image_file import ImageFile +from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock +from openai.types.beta.threads.text import Text +from openai.types.beta.threads.text_content_block import TextContentBlock + + +@pytest.fixture +def mock_thread(): + class MockThread: + id = "test_thread_id" + + return MockThread() + + +@pytest.fixture +def mock_thread_messages(): + class MockMessage: + def __init__(self, id, role, content, assistant_id=None): + self.id = id + self.role = role + self.content = content + self.assistant_id = assistant_id + + return [ + MockMessage( + id="test_message_id_1", + role="user", + content=[ + TextContentBlock( + type="text", + text=Text( + value="Hello", + annotations=[ + FilePathAnnotation( + type="file_path", + file_path=FilePath(file_id="test_file_id"), + end_index=5, + start_index=0, + text="Hello", + ), + FileCitationAnnotation( + type="file_citation", + file_citation=FileCitation(file_id="test_file_id"), + text="Hello", + start_index=0, + end_index=5, + ), + ], + ), + ) + ], + ), + MockMessage( + id="test_message_id_2", + role="assistant", + content=[ + ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) + ], + assistant_id="assistant_1", + ), + ] + + +@pytest.fixture +def openai_client(assistant_definition, mock_thread, mock_thread_messages) -> AsyncMock: + async def mock_list_messages(*args, **kwargs) -> Any: + return MagicMock(data=mock_thread_messages) + + async def mock_retrieve_assistant(*args, **kwargs) -> Any: + asst = AsyncMock(spec=Assistant) + asst.name = "test-assistant" + return asst + + client = AsyncMock(spec=AsyncOpenAI) + client.beta = MagicMock() + client.beta.assistants = MagicMock() + client.beta.assistants.create = AsyncMock(return_value=assistant_definition) + client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) + client.beta.threads = MagicMock() + client.beta.threads.create = AsyncMock(return_value=mock_thread) + client.beta.threads.messages = MagicMock() + client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) + + return client + + +@pytest.fixture +def assistant_definition() -> AsyncMock: + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + + return definition diff --git a/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py b/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py new file mode 100644 index 000000000000..dd75ab7ddb0f --- /dev/null +++ b/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py @@ -0,0 +1,770 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from datetime import datetime, timedelta, timezone +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from openai import AsyncOpenAI +from openai.types.beta.assistant import Assistant +from openai.types.beta.assistant_stream_event import ( + ThreadMessageDelta, + ThreadRunRequiresAction, + ThreadRunStepCompleted, + ThreadRunStepDelta, +) +from openai.types.beta.code_interpreter_tool import CodeInterpreterTool +from openai.types.beta.file_search_tool import FileSearchTool +from openai.types.beta.function_tool import FunctionTool +from openai.types.beta.threads import ImageFileDelta, ImageFileDeltaBlock, MessageDelta, TextDelta, TextDeltaBlock +from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation +from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation +from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation +from openai.types.beta.threads.image_file import ImageFile +from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock +from openai.types.beta.threads.message import Message +from openai.types.beta.threads.message_delta_event import MessageDeltaEvent +from openai.types.beta.threads.required_action_function_tool_call import Function, RequiredActionFunctionToolCall +from openai.types.beta.threads.run import ( + RequiredAction, + RequiredActionSubmitToolOutputs, + Run, +) +from openai.types.beta.threads.run_create_params import TruncationStrategy +from openai.types.beta.threads.runs import ( + FunctionToolCallDelta, + RunStep, + RunStepDelta, + RunStepDeltaEvent, + ToolCallDeltaObject, + ToolCallsStepDetails, +) +from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreter, CodeInterpreterToolCall +from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreter as CodeInterpreterDelta +from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta +from openai.types.beta.threads.runs.function_tool_call import Function as RunsFunction +from openai.types.beta.threads.runs.function_tool_call import FunctionToolCall +from openai.types.beta.threads.runs.function_tool_call_delta import Function as FunctionForToolCallDelta +from openai.types.beta.threads.runs.message_creation_step_details import MessageCreation, MessageCreationStepDetails +from openai.types.beta.threads.runs.run_step import Usage +from openai.types.beta.threads.text import Text +from openai.types.beta.threads.text_content_block import TextContentBlock +from openai.types.shared.function_definition import FunctionDefinition + +from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions +from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel import Kernel +from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig + + +def mock_thread_run_step_completed(): + return ThreadRunStepCompleted( + data=RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=MessageCreationStepDetails( + type="message_creation", message_creation=MessageCreation(message_id="test") + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), + ), + event="thread.run.step.completed", + ) + + +def create_thread_message_delta_mock(): + return ThreadMessageDelta( + data=MessageDeltaEvent( + id="mock_msg_id", + delta=MessageDelta( + content=[ + TextDeltaBlock( + index=0, + type="text", + text=TextDelta( + annotations=[ + FileCitationDeltaAnnotation( + index=0, + type="file_citation", + start_index=1, + end_index=3, + text="annotation", + ) + ], + value="Hello", + ), + ), + ImageFileDeltaBlock( + index=0, + type="image_file", + image_file=ImageFileDelta( + file_id="test_file_id", + detail="auto", + ), + ), + ], + role=None, + ), + object="thread.message.delta", + ), + event="thread.message.delta", + ) + + +def create_thread_run_step_delta_mock(): + function = FunctionForToolCallDelta(name="math-Add", arguments="", output=None) + function_tool_call = FunctionToolCallDelta( + index=0, type="function", id="call_RcvYVzsppjjnUZcC47fAlwTW", function=function + ) + code = CodeInterpreterDelta(input="import os") + code_tool_call = CodeInterpreterToolCallDelta( + index=1, type="code_interpreter", id="call_RcvYVzsppjjnUZcC47fAlwTW", code_interpreter=code + ) + + step_details = ToolCallDeltaObject(type="tool_calls", tool_calls=[function_tool_call, code_tool_call]) + delta = RunStepDelta(step_details=step_details) + run_step_delta_event = RunStepDeltaEvent( + id="step_FXzQ44kRmoeHOPUstkEI1UL5", delta=delta, object="thread.run.step.delta" + ) + return ThreadRunStepDelta(data=run_step_delta_event, event="thread.run.step.delta") + + +class MockError: + def __init__(self, message: str): + self.message = message + + +class MockRunData: + def __init__(self, id, status): + self.id = id + self.status = status + + +class ErrorMockRunData(MockRunData): + def __init__(self, id, status, last_error=None): + super().__init__(id, status) + self.last_error = last_error + + +class MockEvent: + def __init__(self, event, data): + self.event = event + self.data = data + + +class MockAsyncIterable: + def __init__(self, items): + self.items = items.copy() + + def __aiter__(self): + self._iter = iter(self.items) + return self + + async def __anext__(self): + try: + return next(self._iter) + except StopIteration: + raise StopAsyncIteration + + +class MockStream: + def __init__(self, events): + self.events = events + + async def __aenter__(self): + return MockAsyncIterable(self.events) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +@pytest.fixture +def mock_run_step_tool_call(): + class MockToolCall: + def __init__(self): + self.type = "code_interpreter" + self.code_interpreter = MagicMock(input="print('Hello, world!')") + + return RunStep( + id="step_id_1", + type="tool_calls", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), + step_details=ToolCallsStepDetails( + tool_calls=[ + CodeInterpreterToolCall( # type: ignore + type="code_interpreter", + id="tool_call_id", + code_interpreter=CodeInterpreter(input="test code", outputs=[]), + ), + FunctionToolCall( + type="function", + id="tool_call_id", + function=RunsFunction(arguments="{}", name="function_name", output="test output"), + ), + ], + type="tool_calls", + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + ) + + +def mock_thread_requires_action_run(): + return ThreadRunRequiresAction( + data=Run( + id="run_00OwjJnEg2SGJy8sky7ip35P", + assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", + cancelled_at=None, + completed_at=None, + created_at=1727798684, + expires_at=1727799284, + failed_at=None, + incomplete_details=None, + instructions="Answer questions about the menu.", + last_error=None, + max_completion_tokens=None, + max_prompt_tokens=None, + metadata={}, + model="gpt-4o-2024-08-06", + object="thread.run", + parallel_tool_calls=True, + required_action=RequiredAction( + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="call_OTcZMjhm7WbhFnGkrmUjs68T", + function=Function(arguments="{}", name="menu-get_specials"), + type="function", + ) + ] + ), + type="submit_tool_outputs", + ), + response_format="auto", + started_at=1727798685, + status="requires_action", + thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", + tool_choice="auto", + tools=[ + FunctionTool( + function=FunctionDefinition( + name="menu-get_item_price", + description="Provides the price of the requested menu item.", + parameters={ + "type": "object", + "properties": { + "menu_item": {"type": "string", "description": "The name of the menu item."} + }, + "required": ["menu_item"], + }, + strict=False, + ), + type="function", + ), + FunctionTool( + function=FunctionDefinition( + name="menu-get_specials", + description="Provides a list of specials from the menu.", + parameters={"type": "object", "properties": {}, "required": []}, + strict=False, + ), + type="function", + ), + ], + truncation_strategy=TruncationStrategy(type="auto", last_messages=None), + usage=None, + temperature=1.0, + top_p=1.0, + tool_resources={"code_interpreter": {"file_ids": []}}, # type: ignore + ), + event="thread.run.requires_action", + ) + + +@pytest.fixture +def mock_thread_messages(): + class MockMessage: + def __init__(self, id, role, content, assistant_id=None): + self.id = id + self.role = role + self.content = content + self.assistant_id = assistant_id + + return [ + MockMessage( + id="test_message_id_1", + role="user", + content=[ + TextContentBlock( + type="text", + text=Text( + value="Hello", + annotations=[ + FilePathAnnotation( + type="file_path", + file_path=FilePath(file_id="test_file_id"), + end_index=5, + start_index=0, + text="Hello", + ), + FileCitationAnnotation( + type="file_citation", + file_citation=FileCitation(file_id="test_file_id"), + text="Hello", + start_index=0, + end_index=5, + ), + ], + ), + ) + ], + ), + MockMessage( + id="test_message_id_2", + role="assistant", + content=[ + ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) + ], + assistant_id="assistant_1", + ), + ] + + +@pytest.fixture +def mock_run_step_message_creation(): + class MockMessageCreation: + def __init__(self): + self.message_id = "message_id" + + class MockStepDetails: + def __init__(self): + self.message_creation = MockMessageCreation() + + return RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=MessageCreationStepDetails( + type="message_creation", message_creation=MessageCreation(message_id="test") + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + ) + + +@pytest.fixture +def mock_run_in_progress(): + class MockRun: + def __init__(self): + self.id = "run_id" + self.status = "requires_action" + self.assistant_id = "assistant_id" + self.created_at = int(datetime.now(timezone.utc).timestamp()) + self.instructions = "instructions" + self.model = "model" + self.object = "run" + self.thread_id = "thread_id" + self.tools = [] + self.poll_count = 0 + self.required_action = RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", + type="function", + function=Function(arguments="{}", name="function_name"), + ) + ] + ), + ) + self.last_error = None + + def update_status(self): + self.poll_count += 1 + if self.poll_count > 2: + self.status = "completed" + + return MockRun() + + +class SamplePlugin: + @kernel_function + def test_plugin(self, *args, **kwargs): + pass + + +async def test_agent_thread_actions_create_message(): + client = AsyncMock(spec=AsyncOpenAI) + client.beta = MagicMock() + client.beta.assistants = MagicMock() + client.beta.threads.messages = MagicMock() + client.beta.threads.messages.create = AsyncMock(spec=Message) + + msg = ChatMessageContent(role=AuthorRole.USER, content="some content") + created_message = await AssistantThreadActions.create_message(client, "threadXYZ", msg) + assert created_message is not None + + +async def test_assistant_thread_actions_invoke( + mock_run_step_message_creation, mock_run_step_tool_call, mock_run_in_progress, mock_thread_messages +): + async def mock_poll_run_status(agent, run, thread_id): + run.update_status() + return run + + sample_prompt_template_config = PromptTemplateConfig( + template="template", + ) + + kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + definition.tools = [FileSearchTool(type="file_search"), CodeInterpreterTool(type="code_interpreter")] + definition.model = "gpt-4o" + definition.temperature = (1.0,) + definition.top_p = 1.0 + definition.metadata = {} + + client.beta = MagicMock() + client.beta.threads = MagicMock() + client.beta.threads.runs = MagicMock() + client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) + client.beta.threads.runs.submit_tool_outputs = AsyncMock() + client.beta.threads.runs.steps = MagicMock() + client.beta.threads.runs.steps.list = AsyncMock( + return_value=MagicMock(data=[mock_run_step_message_creation, mock_run_step_tool_call]) + ) + + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + arguments=KernelArguments(test="test"), + kernel=AsyncMock(spec=Kernel), + plugins=[SamplePlugin(), kernel_plugin], + polling_options=AsyncMock(spec=RunPollingOptions), + prompt_template_config=sample_prompt_template_config, + other_arg="test", + ) + + with ( + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions._poll_run_status", + new=AsyncMock(side_effect=mock_poll_run_status), + ), + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions._retrieve_message", + new=AsyncMock(side_effect=AsyncMock(return_value=mock_thread_messages[0])), + ), + ): + async for message in AssistantThreadActions.invoke( + agent=agent, + thread_id="thread123", + kernel=AsyncMock(spec=Kernel), + additional_messages=[ + ChatMessageContent( + role=AuthorRole.USER, + content="additional content", + items=[FileReferenceContent(file_id="file_id", tools=["file_search"])], + metadata={"sample_metadata_key": "sample_metadata_val"}, + ) + ], + ): + assert message is not None + + +async def test_assistant_thread_actions_stream( + mock_thread_messages, +): + events = [ + MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), + MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), + mock_thread_run_step_completed(), + MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), + MockEvent( + "thread.run.failed", ErrorMockRunData(id="run_1", status="failed", last_error=MockError("Test error")) + ), + ] + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + definition.tools = [] + definition.model = "gpt-4o" + definition.temperature = 0.7 + definition.top_p = 0.9 + definition.metadata = {} + definition.response_format = {"type": "json_object"} + + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + ) + + client.beta = MagicMock() + client.beta.threads = MagicMock() + client.beta.assistants = MagicMock() + client.beta.threads.runs = MagicMock() + client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + # Set up agent prompts + agent.instructions = "Base instructions" + agent.prompt_template = KernelPromptTemplate( + prompt_template_config=PromptTemplateConfig(template="Template instructions") + ) + + # Scenario A: Use only prompt template + messages = [] + async for content in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): + assert content is not None + + +async def test_assistant_thread_actions_stream_run_fails( + mock_thread_messages, +): + events = [ + MockEvent("thread.run.failed", ErrorMockRunData(id=1, status="failed", last_error=MockError("Test error"))), + ] + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + definition.tools = [] + definition.model = "gpt-4o" + definition.temperature = 0.7 + definition.top_p = 0.9 + definition.metadata = {} + definition.response_format = {"type": "json_object"} + + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + ) + + client.beta = MagicMock() + client.beta.threads = MagicMock() + client.beta.assistants = MagicMock() + client.beta.threads.runs = MagicMock() + client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + # Set up agent prompts + agent.instructions = "Base instructions" + agent.prompt_template = KernelPromptTemplate( + prompt_template_config=PromptTemplateConfig(template="Template instructions") + ) + + # Scenario A: Use only prompt template + messages = [] + with pytest.raises(AgentInvokeException): + async for _ in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): + pass + + +async def test_assistant_thread_actions_stream_with_instructions( + mock_thread_messages, +): + events = [ + MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), + MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), + create_thread_message_delta_mock(), + create_thread_run_step_delta_mock(), + mock_thread_requires_action_run(), + mock_thread_run_step_completed(), + MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), + ] + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + definition.tools = [] + definition.model = "gpt-4o" + definition.temperature = 0.7 + definition.top_p = 0.9 + definition.metadata = {} + definition.response_format = {"type": "json_object"} + + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + ) + + client.beta = MagicMock() + client.beta.threads = MagicMock() + client.beta.assistants = MagicMock() + client.beta.threads.runs = MagicMock() + client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + # Set up agent prompts + agent.instructions = "Base instructions" + agent.prompt_template = KernelPromptTemplate( + prompt_template_config=PromptTemplateConfig(template="Template instructions") + ) + + # Scenario A: Use only prompt template + messages = [] + async for content in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): + assert content is not None + + assert len(messages) > 0, "Expected messages to be populated during the stream." + client.beta.threads.runs.stream.assert_called_once_with( + assistant_id=agent.id, + thread_id="thread_id", + instructions="Template instructions", + tools=[], + temperature=0.7, + top_p=0.9, + model="gpt-4o", + metadata={}, + ) + + client.beta.threads.runs.stream.reset_mock() + + # Scenario B: Use prompt template with additional instructions + messages = [] + async for content in AssistantThreadActions.invoke_stream( + agent=agent, + thread_id="thread_id", + messages=messages, + additional_instructions="My additional instructions", + ): + assert content is not None + + assert len(messages) > 0, "Expected messages to be populated during the stream." + client.beta.threads.runs.stream.assert_called_once_with( + assistant_id=agent.id, + thread_id="thread_id", + instructions="Template instructions\n\nMy additional instructions", + tools=[], + temperature=0.7, + top_p=0.9, + model="gpt-4o", + metadata={}, + ) + + client.beta.threads.runs.stream.reset_mock() + + +async def test_poll_loop_exits_on_status_change(): + AssistantThreadActions.polling_status = {"in_progress"} # type: ignore + + polling_interval = timedelta(seconds=0.01) + dummy_polling_options = MagicMock() + dummy_polling_options.get_polling_interval = lambda count: polling_interval + + run_id = "run_123" + initial_run = MagicMock() + initial_run.id = run_id + + run_in_progress = MagicMock() + run_in_progress.id = run_id + run_in_progress.status = "in_progress" + + run_completed = MagicMock() + run_completed.id = run_id + run_completed.status = "completed" + + dummy_agent = MagicMock() + dummy_agent.polling_options = dummy_polling_options + dummy_agent.client.beta.threads.runs.retrieve = AsyncMock(side_effect=[run_in_progress, run_completed]) + + thread_id = "thread_123" + + result_run = await AssistantThreadActions._poll_loop(dummy_agent, initial_run, thread_id) + + assert result_run.status == "completed" + + +async def test_handle_streaming_requires_action_returns_result(): + dummy_run = MagicMock() + dummy_run.id = "dummy_run_id" + dummy_function_steps = {"step1": MagicMock()} + dummy_fccs = {"fcc_key": "fcc_value"} + dummy_function_call_streaming_content = MagicMock() + dummy_function_result_streaming_content = MagicMock() + dummy_tool_outputs = {"output": "value"} + dummy_kernel = MagicMock() + dummy_agent_name = "TestAgent" + with ( + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", + return_value=dummy_fccs, + ), + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.generate_function_call_streaming_content", + return_value=dummy_function_call_streaming_content, + ), + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.merge_streaming_function_results", + return_value=[dummy_function_result_streaming_content], + ), + patch.object(AssistantThreadActions, "_invoke_function_calls", new=AsyncMock(return_value=None)), + patch.object(AssistantThreadActions, "_format_tool_outputs", return_value=dummy_tool_outputs), + ): + result = await AssistantThreadActions._handle_streaming_requires_action( + dummy_agent_name, + dummy_kernel, + dummy_run, + dummy_function_steps, # type: ignore + ) + assert result is not None + assert isinstance(result, FunctionActionResult) + assert result.function_call_streaming_content == dummy_function_call_streaming_content + assert result.function_result_streaming_content == dummy_function_result_streaming_content + assert result.tool_outputs == dummy_tool_outputs + + +async def test_handle_streaming_requires_action_returns_none(): + dummy_run = MagicMock() + dummy_run.id = "dummy_run_id" + dummy_function_steps = {"step1": MagicMock()} + dummy_kernel = MagicMock() + dummy_agent_name = "TestAgent" + with patch("semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", return_value=None): + result = await AssistantThreadActions._handle_streaming_requires_action( + dummy_agent_name, + dummy_kernel, + dummy_run, + dummy_function_steps, # type: ignore + ) + assert result is None diff --git a/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py b/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py new file mode 100644 index 000000000000..123a19bd34cc --- /dev/null +++ b/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py @@ -0,0 +1,387 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from openai import AsyncOpenAI +from openai.types.beta.assistant import Assistant +from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation +from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation +from openai.types.beta.threads.image_file import ImageFile +from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock +from openai.types.beta.threads.text import Text +from openai.types.beta.threads.text_content_block import TextContentBlock +from pydantic import BaseModel, ValidationError + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel import Kernel +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig + + +class SamplePlugin: + @kernel_function + def test_plugin(self, *args, **kwargs): + pass + + +class ResponseModelPydantic(BaseModel): + response: str + items: list[str] + + +class ResponseModelNonPydantic: + response: str + items: list[str] + + +@pytest.fixture +def mock_thread_messages(): + class MockMessage: + def __init__(self, id, role, content, assistant_id=None): + self.id = id + self.role = role + self.content = content + self.assistant_id = assistant_id + + return [ + MockMessage( + id="test_message_id_1", + role="user", + content=[ + TextContentBlock( + type="text", + text=Text( + value="Hello", + annotations=[ + FilePathAnnotation( + type="file_path", + file_path=FilePath(file_id="test_file_id"), + end_index=5, + start_index=0, + text="Hello", + ), + FileCitationAnnotation( + type="file_citation", + file_citation=FileCitation(file_id="test_file_id"), + text="Hello", + start_index=0, + end_index=5, + ), + ], + ), + ) + ], + ), + MockMessage( + id="test_message_id_2", + role="assistant", + content=[ + ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) + ], + assistant_id="assistant_1", + ), + ] + + +async def test_open_ai_assistant_agent_init(): + sample_prompt_template_config = PromptTemplateConfig( + template="template", + ) + + kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent( + client=client, + definition=definition, + arguments=KernelArguments(test="test"), + kernel=AsyncMock(spec=Kernel), + plugins=[SamplePlugin(), kernel_plugin], + polling_options=AsyncMock(spec=RunPollingOptions), + prompt_template_config=sample_prompt_template_config, # type: ignore + other_arg="test", # type: ignore + ) + assert agent.id == "agent123" + assert agent.name == "agentName" + assert agent.description == "desc" + + +def test_azure_open_ai_settings_create_throws(azure_openai_unit_test_env): + with patch( + "semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings.AzureOpenAISettings.create" + ) as mock_create: + mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") + + with pytest.raises(AgentInitializationException, match="Failed to create Azure OpenAI settings."): + _, _ = AzureAssistantAgent.setup_resources(api_key="test_api_key") + + +def test_open_ai_assistant_with_code_interpreter_tool(): + tools, resources = AzureAssistantAgent.configure_code_interpreter_tool(file_ids=["file_id"]) + assert tools is not None + assert resources is not None + + +def test_open_ai_assistant_with_file_search_tool(): + tools, resources = AzureAssistantAgent.configure_file_search_tool(vector_store_ids=["vector_store_id"]) + assert tools is not None + assert resources is not None + + +@pytest.mark.parametrize( + "model, json_schema_expected", + [ + pytest.param(ResponseModelPydantic, True), + pytest.param(ResponseModelNonPydantic, True), + pytest.param({"type": "json_object"}, False), + pytest.param({"type": "json_schema", "json_schema": {"schema": {}}}, False), + ], +) +def test_configure_response_format(model, json_schema_expected): + response_format = AzureAssistantAgent.configure_response_format(model) + assert response_format is not None + if json_schema_expected: + assert response_format["json_schema"] is not None # type: ignore + + +def test_configure_response_format_unexpected_type(): + with pytest.raises(AgentInitializationException) as exc_info: + AzureAssistantAgent.configure_response_format({"type": "invalid_type"}) + assert "Encountered unexpected response_format type" in str(exc_info.value) + + +def test_configure_response_format_json_schema_invalid_schema(): + with pytest.raises(AgentInitializationException) as exc_info: + AzureAssistantAgent.configure_response_format({"type": "json_schema", "json_schema": "not_a_dict"}) + assert "If response_format has type 'json_schema'" in str(exc_info.value) + + +def test_configure_response_format_invalid_input_type(): + with pytest.raises(AgentInitializationException) as exc_info: + AzureAssistantAgent.configure_response_format(3) # type: ignore + assert "response_format must be a dictionary" in str(exc_info.value) + + +@pytest.mark.parametrize( + "message", + [ + pytest.param(ChatMessageContent(role=AuthorRole.USER, content="text")), + pytest.param("text"), + ], +) +async def test_open_ai_assistant_agent_add_chat_message(message): + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent(client=client, definition=definition) + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.create_message", + ): + await agent.add_chat_message("threadId", message) + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_invoke(arguments, include_args): + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + definition.tools = [] + definition.model = "gpt-4o" + definition.response_format = {"type": "json_object"} + definition.temperature = 0.1 + definition.top_p = 0.9 + definition.metadata = {} + agent = AzureAssistantAgent(client=client, definition=definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", + side_effect=fake_invoke, + ): + async for item in agent.invoke("thread_id", **(kwargs or {})): + results.append(item) + + assert len(results) == 1 + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_invoke_stream(arguments, include_args): + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent(client=client, definition=definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", + side_effect=fake_invoke, + ): + async for item in agent.invoke_stream("thread_id", **(kwargs or {})): + results.append(item) + + assert len(results) == 1 + + +def test_open_ai_assistant_agent_get_channel_keys(): + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent(client=client, definition=definition) + keys = list(agent.get_channel_keys()) + assert len(keys) >= 3 + + +@pytest.fixture +def mock_thread(): + class MockThread: + id = "test_thread_id" + + return MockThread() + + +async def test_open_ai_assistant_agent_create_channel(mock_thread): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent(client=client, definition=definition) + client.beta = MagicMock() + client.beta.assistants = MagicMock() + client.beta.assistants.create = AsyncMock(return_value=definition) + client.beta.threads = MagicMock() + client.beta.threads.create = AsyncMock(return_value=mock_thread) + ch = await agent.create_channel() + assert isinstance(ch, OpenAIAssistantChannel) + assert ch.thread_id == "test_thread_id" + + +def test_create_openai_client(azure_openai_unit_test_env): + client, model = AzureAssistantAgent.setup_resources(api_key="test_api_key", default_headers={"user_agent": "test"}) + assert client is not None + assert client.api_key == "test_api_key" + assert model is not None + + +def test_create_azure_openai_client(azure_openai_unit_test_env): + client, model = AzureAssistantAgent.setup_resources( + api_key="test_api_key", endpoint="https://test_endpoint.com", default_headers={"user_agent": "test"} + ) + assert model is not None + assert client is not None + assert client.api_key == "test_api_key" + assert str(client.base_url) == "https://test_endpoint.com/openai/" + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_ENDPOINT"]], indirect=True) +async def test_retrieve_agent_missing_endpoint_throws(kernel, azure_openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="Please provide an Azure OpenAI endpoint"): + _, _ = AzureAssistantAgent.setup_resources( + env_file_path="./", api_key="test_api_key", default_headers={"user_agent": "test"} + ) + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) +async def test_retrieve_agent_missing_chat_deployment_name_throws(kernel, azure_openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="Please provide an Azure OpenAI deployment name"): + _, _ = AzureAssistantAgent.setup_resources( + env_file_path="./", + api_key="test_api_key", + endpoint="https://test_endpoint.com", + default_headers={"user_agent": "test"}, + ) + + +async def test_get_thread_messages(mock_thread_messages, openai_unit_test_env): + async def mock_list_messages(*args, **kwargs) -> Any: + return MagicMock(data=mock_thread_messages) + + async def mock_retrieve_assistant(*args, **kwargs) -> Any: + asst = AsyncMock(spec=Assistant) + asst.name = "test-assistant" + return asst + + mock_client = AsyncMock(spec=AsyncOpenAI) + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.messages = MagicMock() + mock_client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) + + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = AzureAssistantAgent(client=mock_client, definition=definition) + + messages = [message async for message in agent.get_thread_messages("test_thread_id")] + + assert len(messages) == 2 + assert len(messages[0].items) == 3 + assert isinstance(messages[0].items[0], TextContent) + assert isinstance(messages[0].items[1], AnnotationContent) + assert isinstance(messages[0].items[2], AnnotationContent) + assert messages[0].items[0].text == "Hello" + + assert len(messages[1].items) == 1 + assert isinstance(messages[1].items[0], FileReferenceContent) + assert str(messages[1].items[0].file_id) == "test_file_id" diff --git a/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py b/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py new file mode 100644 index 000000000000..45b46d02aff4 --- /dev/null +++ b/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py @@ -0,0 +1,294 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, patch + +import pytest +from pydantic import BaseModel, ValidationError + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.kernel import Kernel +from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig + + +class SamplePlugin: + @kernel_function + def test_plugin(self, *args, **kwargs): + pass + + +class ResponseModelPydantic(BaseModel): + response: str + items: list[str] + + +class ResponseModelNonPydantic: + response: str + items: list[str] + + +async def test_open_ai_assistant_agent_init(openai_client, assistant_definition): + sample_prompt_template_config = PromptTemplateConfig( + template="template", + ) + + kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") + + agent = OpenAIAssistantAgent( + client=openai_client, + definition=assistant_definition, + arguments=KernelArguments(test="test"), + kernel=AsyncMock(spec=Kernel), + plugins=[SamplePlugin(), kernel_plugin], + polling_options=AsyncMock(spec=RunPollingOptions), + prompt_template_config=sample_prompt_template_config, + other_arg="test", + ) + assert agent.id == "agent123" + assert agent.name == "agentName" + assert agent.description == "desc" + + +def test_open_ai_settings_create_throws(openai_unit_test_env): + with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: + mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") + + with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): + _, _ = OpenAIAssistantAgent.setup_resources(api_key="test_api_key") + + +def test_open_ai_assistant_with_code_interpreter_tool(): + tools, resources = OpenAIAssistantAgent.configure_code_interpreter_tool(file_ids=["file_id"]) + assert tools is not None + assert resources is not None + + +def test_open_ai_assistant_with_file_search_tool(): + tools, resources = OpenAIAssistantAgent.configure_file_search_tool(vector_store_ids=["vector_store_id"]) + assert tools is not None + assert resources is not None + + +@pytest.mark.parametrize( + "model, json_schema_expected", + [ + pytest.param(ResponseModelPydantic, True), + pytest.param(ResponseModelNonPydantic, True), + pytest.param({"type": "json_object"}, False), + pytest.param({"type": "json_schema", "json_schema": {"schema": {}}}, False), + ], +) +def test_configure_response_format(model, json_schema_expected): + response_format = OpenAIAssistantAgent.configure_response_format(model) + assert response_format is not None + if json_schema_expected: + assert response_format["json_schema"] is not None # type: ignore + + +def test_configure_response_format_unexpected_type(): + with pytest.raises(AgentInitializationException) as exc_info: + OpenAIAssistantAgent.configure_response_format({"type": "invalid_type"}) + assert "Encountered unexpected response_format type" in str(exc_info.value) + + +def test_configure_response_format_json_schema_invalid_schema(): + with pytest.raises(AgentInitializationException) as exc_info: + OpenAIAssistantAgent.configure_response_format({"type": "json_schema", "json_schema": "not_a_dict"}) + assert "If response_format has type 'json_schema'" in str(exc_info.value) + + +def test_configure_response_format_invalid_input_type(): + with pytest.raises(AgentInitializationException) as exc_info: + OpenAIAssistantAgent.configure_response_format(3) # type: ignore + assert "response_format must be a dictionary" in str(exc_info.value) + + +@pytest.mark.parametrize( + "message", + [ + pytest.param(ChatMessageContent(role=AuthorRole.USER, content="text")), + pytest.param("text"), + ], +) +async def test_open_ai_assistant_agent_add_chat_message(message, openai_client, assistant_definition): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.create_message", + ): + await agent.add_chat_message("threadId", message) + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_get_response(arguments, include_args, openai_client, assistant_definition): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", + side_effect=fake_invoke, + ): + response = await agent.get_response("thread_id", **(kwargs or {})) + + assert response is not None + assert response.content == "content" + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_get_response_exception( + arguments, include_args, openai_client, assistant_definition +): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + + async def fake_invoke(*args, **kwargs): + yield False, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with ( + patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", + side_effect=fake_invoke, + ), + pytest.raises(AgentInvokeException), + ): + await agent.get_response("thread_id", **(kwargs or {})) + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_invoke(arguments, include_args, openai_client, assistant_definition): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", + side_effect=fake_invoke, + ): + async for item in agent.invoke("thread_id", **(kwargs or {})): + results.append(item) + + assert len(results) == 1 + + +@pytest.mark.parametrize( + "arguments, include_args", + [ + pytest.param({"extra_args": "extra_args"}, True), + pytest.param(None, False), + ], +) +async def test_open_ai_assistant_agent_invoke_stream(arguments, include_args, openai_client, assistant_definition): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + results = [] + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + + kwargs = None + if include_args: + kwargs = arguments + + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", + side_effect=fake_invoke, + ): + async for item in agent.invoke_stream("thread_id", **(kwargs or {})): + results.append(item) + + assert len(results) == 1 + + +def test_open_ai_assistant_agent_get_channel_keys(openai_client, assistant_definition): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + keys = list(agent.get_channel_keys()) + assert len(keys) >= 3 + + +async def test_open_ai_assistant_agent_create_channel(openai_client, assistant_definition): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + ch = await agent.create_channel() + assert isinstance(ch, OpenAIAssistantChannel) + assert ch.thread_id == "test_thread_id" + + +def test_create_openai_client(openai_unit_test_env): + client, model = OpenAIAssistantAgent.setup_resources(env_file_path="./", default_headers={"user_agent": "test"}) + assert client is not None + assert client.api_key == "test_api_key" + assert model is not None + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) +async def test_open_ai_agent_missing_api_key_throws(kernel, openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI API key is required."): + _, _ = OpenAIAssistantAgent.setup_resources(env_file_path="./", default_headers={"user_agent": "test"}) + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) +async def test_open_ai_agent_missing_chat_deployment_name_throws(kernel, openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI model ID is required."): + _, _ = OpenAIAssistantAgent.setup_resources( + env_file_path="./", + api_key="test_api_key", + default_headers={"user_agent": "test"}, + ) + + +async def test_get_thread_messages(mock_thread_messages, openai_client, assistant_definition, openai_unit_test_env): + agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) + + messages = [message async for message in agent.get_thread_messages("test_thread_id")] + + assert len(messages) == 2 + assert len(messages[0].items) == 3 + assert isinstance(messages[0].items[0], TextContent) + assert isinstance(messages[0].items[1], AnnotationContent) + assert isinstance(messages[0].items[2], AnnotationContent) + assert messages[0].items[0].text == "Hello" + + assert len(messages[1].items) == 1 + assert isinstance(messages[1].items[0], FileReferenceContent) + assert str(messages[1].items[0].file_id) == "test_file_id" diff --git a/python/tests/unit/agents/test_open_ai_assistant_channel.py b/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py similarity index 82% rename from python/tests/unit/agents/test_open_ai_assistant_channel.py rename to python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py index 92f076de53da..64026abf4724 100644 --- a/python/tests/unit/agents/test_open_ai_assistant_channel.py +++ b/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py @@ -1,25 +1,27 @@ # Copyright (c) Microsoft. All rights reserved. +import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest from openai import AsyncOpenAI from openai.types.beta.assistant import Assistant, ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch -from openai.types.beta.threads.annotation import FileCitationAnnotation, FilePathAnnotation -from openai.types.beta.threads.file_citation_annotation import FileCitation -from openai.types.beta.threads.file_path_annotation import FilePath +from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation +from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation from openai.types.beta.threads.image_file import ImageFile from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock from openai.types.beta.threads.text import Text from openai.types.beta.threads.text_content_block import TextContentBlock from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.kernel import Kernel @pytest.fixture @@ -48,7 +50,7 @@ def __init__(self, role, content, assistant_id=None): ), FileCitationAnnotation( type="file_citation", - file_citation=FileCitation(file_id="test_file_id", quote="test quote"), + file_citation=FileCitation(file_id="test_file_id"), text="Hello", start_index=0, end_index=5, @@ -74,22 +76,22 @@ def mock_assistant(): created_at=123456789, object="assistant", metadata={ - "__run_options": { + "__run_options": json.dumps({ "max_completion_tokens": 100, "max_prompt_tokens": 50, "parallel_tool_calls_enabled": True, "truncation_message_count": 10, - } + }) }, model="test_model", description="test_description", id="test_id", instructions="test_instructions", name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], # type: ignore temperature=0.7, top_p=0.9, - response_format={"type": "json_object"}, + response_format={"type": "json_object"}, # type: ignore tool_resources=ToolResources( code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), @@ -109,27 +111,38 @@ async def test_receive_messages(): ] with patch("semantic_kernel.agents.open_ai.assistant_content_generation.create_chat_message"): - await channel.receive(history) + await channel.receive(history) # type: ignore async def test_invoke_agent(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - client = MagicMock(spec=AsyncOpenAI) - thread_id = "test_thread" - agent = MagicMock(spec=OpenAIAssistantBase) - agent._is_deleted = False - channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + arguments=KernelArguments(test="test"), + kernel=AsyncMock(spec=Kernel), + ) + + channel = OpenAIAssistantChannel(client=client, thread_id="test_thread_id") async def mock_invoke_internal(*args, **kwargs): for _ in range(3): yield True, MagicMock(spec=ChatMessageContent) - agent._invoke_internal.side_effect = mock_invoke_internal - results = [] - async for is_visible, message in channel.invoke(agent): - results.append((is_visible, message)) + with patch( + "semantic_kernel.agents.channels.open_ai_assistant_channel.AssistantThreadActions.invoke", + side_effect=mock_invoke_internal, + ): + async for is_visible, message in channel.invoke(agent): + results.append((is_visible, message)) assert len(results) == 3 for is_visible, message in results: @@ -146,7 +159,7 @@ async def test_invoke_agent_invalid_instance_throws(): agent._is_deleted = False channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantAgent)}."): async for _, _ in channel.invoke(agent): pass @@ -154,11 +167,20 @@ async def test_invoke_agent_invalid_instance_throws(): async def test_invoke_streaming_agent(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - client = MagicMock(spec=AsyncOpenAI) - thread_id = "test_thread" - agent = MagicMock(spec=OpenAIAssistantBase) - agent._is_deleted = False - channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.id = "agent123" + definition.name = "agentName" + definition.description = "desc" + definition.instructions = "test agent" + agent = OpenAIAssistantAgent( + client=client, + definition=definition, + arguments=KernelArguments(test="test"), + kernel=AsyncMock(spec=Kernel), + ) + + channel = OpenAIAssistantChannel(client=client, thread_id="test_thread_id") results = [] @@ -168,10 +190,12 @@ async def mock_invoke_internal(*args, **kwargs): yield msg results.append(msg) - agent._invoke_internal_stream.side_effect = mock_invoke_internal - - async for message in channel.invoke_stream(agent, results): - assert message is not None + with patch( + "semantic_kernel.agents.channels.open_ai_assistant_channel.AssistantThreadActions.invoke_stream", + side_effect=mock_invoke_internal, + ): + async for message in channel.invoke_stream(agent, results): + assert message is not None assert len(results) == 3 for message in results: @@ -187,35 +211,7 @@ async def test_invoke_streaming_agent_invalid_instance_throws(): agent._is_deleted = False channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): - async for _ in channel.invoke_stream(agent, []): - pass - - -async def test_invoke_agent_deleted(): - from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - - client = MagicMock(spec=AsyncOpenAI) - thread_id = "test_thread" - agent = MagicMock(spec=OpenAIAssistantBase) - agent._is_deleted = True - channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - - with pytest.raises(AgentChatException, match="Agent is deleted"): - async for _ in channel.invoke(agent): - pass - - -async def test_invoke_streaming_agent_deleted(): - from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - - client = MagicMock(spec=AsyncOpenAI) - thread_id = "test_thread" - agent = MagicMock(spec=OpenAIAssistantBase) - agent._is_deleted = True - channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - - with pytest.raises(AgentChatException, match="Agent is deleted"): + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantAgent)}."): async for _ in channel.invoke_stream(agent, []): pass diff --git a/python/tests/unit/agents/test_agent.py b/python/tests/unit/agents/test_agent.py index d01a6a9ba0e8..ecbfc63e9277 100644 --- a/python/tests/unit/agents/test_agent.py +++ b/python/tests/unit/agents/test_agent.py @@ -1,15 +1,38 @@ # Copyright (c) Microsoft. All rights reserved. +import sys import uuid +from typing import ClassVar from unittest.mock import AsyncMock +import pytest + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + from semantic_kernel.agents import Agent from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.functions.kernel_arguments import KernelArguments + + +class MockChatHistory: + """Minimal mock for ChatHistory to hold messages.""" + + def __init__(self, messages=None): + self.messages = messages if messages is not None else [] + + +class MockChannel(AgentChannel): + """Mock channel for testing get_channel_keys and create_channel.""" class MockAgent(Agent): """A mock agent for testing purposes.""" + channel_type: ClassVar[type[AgentChannel]] = MockChannel + def __init__(self, name: str = "Test-Agent", description: str = "A test agent", id: str = None): args = { "name": name, @@ -19,12 +42,25 @@ def __init__(self, name: str = "Test-Agent", description: str = "A test agent", args["id"] = id super().__init__(**args) - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - async def create_channel(self) -> AgentChannel: return AsyncMock(spec=AgentChannel) + @override + async def get_response(self, *args, **kwargs): + raise NotImplementedError + + @override + async def invoke(self, *args, **kwargs): + raise NotImplementedError + + @override + async def invoke_stream(self, *args, **kwargs): + raise NotImplementedError + + +class MockAgentWithoutChannelType(MockAgent): + channel_type = None + async def test_agent_initialization(): name = "TestAgent" @@ -49,7 +85,7 @@ def test_get_channel_keys(): agent = MockAgent() keys = agent.get_channel_keys() - assert keys == ["key1", "key2"] + assert len(list(keys)) == 1, "Should return a single key" async def test_create_channel(): @@ -91,3 +127,47 @@ async def test_agent_hash(): agent3 = MockAgent(name="TestAgent", description="A different description", id=id_value) assert hash(agent1) != hash(agent3) + + +def test_get_channel_keys_no_channel_type(): + agent = MockAgentWithoutChannelType() + with pytest.raises(NotImplementedError): + list(agent.get_channel_keys()) + + +def test_merge_arguments_both_none(): + agent = MockAgent() + merged = agent._merge_arguments(None) + assert isinstance(merged, KernelArguments) + assert len(merged) == 0, "If both arguments are None, should return an empty KernelArguments object" + + +def test_merge_arguments_agent_none_override_not_none(): + agent = MockAgent() + override = KernelArguments(settings={"key": "override"}, param1="val1") + + merged = agent._merge_arguments(override) + assert merged is override, "If agent.arguments is None, just return override_args" + + +def test_merge_arguments_override_none_agent_not_none(): + agent = MockAgent() + agent.arguments = KernelArguments(settings={"key": "base"}, param1="baseVal") + + merged = agent._merge_arguments(None) + assert merged is agent.arguments, "If override_args is None, should return the agent's arguments" + + +def test_merge_arguments_both_not_none(): + agent = MockAgent() + agent.arguments = KernelArguments(settings={"key1": "val1", "common": "base"}, param1="baseVal") + override = KernelArguments(settings={"key2": "override_val", "common": "override"}, param2="override_param") + + merged = agent._merge_arguments(override) + + assert merged.execution_settings["key1"] == "val1", "Should retain original setting from agent" + assert merged.execution_settings["key2"] == "override_val", "Should include new setting from override" + assert merged.execution_settings["common"] == "override", "Override should take precedence" + + assert merged["param1"] == "baseVal", "Should retain base param from agent" + assert merged["param2"] == "override_param", "Should include param from override" diff --git a/python/tests/unit/agents/test_azure_assistant_agent.py b/python/tests/unit/agents/test_azure_assistant_agent.py deleted file mode 100644 index 3748065ac761..000000000000 --- a/python/tests/unit/agents/test_azure_assistant_agent.py +++ /dev/null @@ -1,570 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, MagicMock, mock_open, patch - -import pytest -from openai import AsyncAzureOpenAI -from openai.resources.beta.assistants import Assistant -from openai.types.beta.assistant import ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch -from pydantic import ValidationError - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException -from semantic_kernel.kernel import Kernel - - -@pytest.fixture -def azure_openai_assistant_agent(kernel: Kernel, azure_openai_unit_test_env): - return AzureAssistantAgent( - kernel=kernel, - service_id="test_service", - name="test_name", - instructions="test_instructions", - api_key="test_api_key", - endpoint="https://test.endpoint", - ai_model_id="test_model", - api_version="2024-05-01-preview", - default_headers={"User-Agent": "test-agent"}, - ) - - -@pytest.fixture -def mock_assistant(): - return Assistant( - created_at=123456789, - object="assistant", - metadata={ - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - model="test_model", - description="test_description", - id="test_id", - instructions="test_instructions", - name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], - temperature=0.7, - top_p=0.9, - response_format={"type": "json_object"}, - tool_resources=ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ), - ) - - -def test_initialization(azure_openai_assistant_agent: AzureAssistantAgent, azure_openai_unit_test_env): - agent = azure_openai_assistant_agent - assert agent is not None - - -def test_create_client(azure_openai_assistant_agent, azure_openai_unit_test_env): - assert isinstance(azure_openai_assistant_agent.client, AsyncAzureOpenAI) - - -def test_create_client_from_configuration(azure_openai_assistant_agent, azure_openai_unit_test_env): - assert isinstance(azure_openai_assistant_agent.client, AsyncAzureOpenAI) - assert azure_openai_assistant_agent.client.api_key == "test_api_key" - - -def test_create_client_from_configuration_missing_api_key(): - with pytest.raises( - AgentInitializationException, - match="Please provide either AzureOpenAI api_key, an ad_token, ad_token_provider, or a client.", - ): - AzureAssistantAgent._create_client(None) - - -def test_create_client_from_configuration_missing_endpoint(): - with pytest.raises( - AgentInitializationException, - match="Please provide an AzureOpenAI endpoint.", - ): - AzureAssistantAgent._create_client(api_key="test") - - -async def test_create_agent(kernel: Kernel, azure_openai_unit_test_env): - with patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant: - mock_create_assistant.return_value = MagicMock(spec=Assistant) - agent = await AzureAssistantAgent.create( - kernel=kernel, service_id="test_service", name="test_name", api_key="test_api_key", api_version="2024-05-01" - ) - assert agent.assistant is not None - mock_create_assistant.assert_called_once() - await agent.client.close() - - -async def test_create_agent_with_files(kernel: Kernel, azure_openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - return_value="test_file_id", - ), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.create_vector_store", - return_value="vector_store_id", - ), - patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - agent = await AzureAssistantAgent.create( - kernel=kernel, - service_id="test_service", - name="test_name", - api_key="test_api_key", - api_version="2024-05-01", - code_interpreter_filenames=["file1", "file2"], - vector_store_filenames=["file3", "file4"], - enable_code_interpreter=True, - enable_file_search=True, - ) - assert agent.assistant is not None - mock_create_assistant.assert_called_once() - - -async def test_create_agent_with_code_files_not_found_raises_exception(kernel: Kernel, azure_openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - side_effect=FileNotFoundError("File not found"), - ), - patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - with pytest.raises(AgentInitializationException, match="Failed to upload code interpreter files."): - _ = await AzureAssistantAgent.create( - kernel=kernel, - service_id="test_service", - deployment_name="test_deployment_name", - name="test_name", - api_key="test_api_key", - api_version="2024-05-01", - code_interpreter_filenames=["file1", "file2"], - ) - - -async def test_create_agent_with_search_files_not_found_raises_exception(kernel: Kernel, azure_openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - side_effect=FileNotFoundError("File not found"), - ), - patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - with pytest.raises(AgentInitializationException, match="Failed to upload vector store files."): - _ = await AzureAssistantAgent.create( - kernel=kernel, - service_id="test_service", - deployment_name="test_deployment_name", - name="test_name", - api_key="test_api_key", - api_version="2024-05-01", - vector_store_filenames=["file3", "file4"], - ) - - -async def test_list_definitions(kernel: Kernel, mock_assistant, azure_openai_unit_test_env): - agent = AzureAssistantAgent( - kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" - ) - - with patch.object( - AzureAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncAzureOpenAI) - ) as mock_create_client: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[mock_assistant])) - - agent.client = mock_client_instance - - definitions = [] - async for definition in agent.list_definitions(): - definitions.append(definition) - - mock_client_instance.beta.assistants.list.assert_called() - - assert len(definitions) == 1 - assert definitions[0] == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - - -async def test_retrieve_agent(kernel, azure_openai_unit_test_env): - with patch.object( - AzureAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncAzureOpenAI) - ) as mock_create_client: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) - - OpenAIAssistantBase._create_open_ai_assistant_definition = MagicMock( - return_value={ - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - ) - - retrieved_agent = await AzureAssistantAgent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) - assert retrieved_agent.model_dump( - include={ - "ai_model_id", - "description", - "id", - "instructions", - "name", - "enable_code_interpreter", - "enable_file_search", - "enable_json_response", - "code_interpreter_file_ids", - "temperature", - "top_p", - "vector_store_id", - "metadata", - "max_completion_tokens", - "max_prompt_tokens", - "parallel_tool_calls_enabled", - "truncation_message_count", - } - ) == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") - OpenAIAssistantBase._create_open_ai_assistant_definition.assert_called_once() - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) -async def test_retrieve_agent_missing_chat_deployment_name_throws(kernel, azure_openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The Azure OpenAI chat_deployment_name is required."): - _ = await AzureAssistantAgent.retrieve( - id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" - ) - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) -async def test_retrieve_agent_missing_api_key_throws(kernel, azure_openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." - ): - _ = await AzureAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") - - -def test_open_ai_settings_create_throws(azure_openai_unit_test_env): - with patch( - "semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings.AzureOpenAISettings.create" - ) as mock_create: - mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") - - with pytest.raises(AgentInitializationException, match="Failed to create Azure OpenAI settings."): - AzureAssistantAgent(service_id="test", api_key="test_api_key", deployment_name="test_deployment_name") - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) -def test_azure_openai_agent_create_missing_deployment_name(azure_openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The Azure OpenAI chat_deployment_name is required."): - AzureAssistantAgent( - service_id="test_service", api_key="test_key", endpoint="https://example.com", env_file_path="test.env" - ) - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) -def test_azure_openai_agent_create_missing_api_key(azure_openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." - ): - AzureAssistantAgent(service_id="test_service", endpoint="https://example.com", env_file_path="test.env") - - -async def test_setup_client_and_token_with_existing_client(): - """Test that if a client is already provided, _setup_client_and_token - simply returns that client (and doesn't create a new one). - """ - mock_settings = MagicMock() - mock_settings.chat_deployment_name = "test_deployment_name" - mock_settings.api_key = None - mock_settings.token_endpoint = None - mock_client = MagicMock(spec=AsyncAzureOpenAI) - - returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( - azure_openai_settings=mock_settings, - ad_token=None, - ad_token_provider=None, - client=mock_client, - default_headers=None, - ) - - assert returned_client == mock_client - assert returned_token is None - - -async def test_setup_client_and_token_with_api_key_creates_client(): - """Test that providing an API key (and no client) results - in creating a new client via _create_client. - """ - mock_settings = MagicMock() - mock_settings.chat_deployment_name = "test_deployment_name" - mock_settings.api_key.get_secret_value.return_value = "test_api_key" - mock_settings.endpoint = "https://test.endpoint" - mock_settings.api_version = "2024-05-01" - mock_settings.token_endpoint = None - - with patch.object(AzureAssistantAgent, "_create_client", return_value="mock_client") as mock_create_client: - returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( - azure_openai_settings=mock_settings, - ad_token=None, - ad_token_provider=None, - client=None, - default_headers=None, - ) - - mock_create_client.assert_called_once_with( - api_key="test_api_key", - endpoint="https://test.endpoint", - api_version="2024-05-01", - ad_token=None, - ad_token_provider=None, - default_headers=None, - ) - assert returned_client == "mock_client" - assert returned_token is None - - -async def test_setup_client_and_token_fetches_ad_token_when_token_endpoint_present(): - """Test that if no credentials are provided except a token endpoint, - _setup_client_and_token fetches an AD token. - """ - mock_settings = MagicMock() - mock_settings.chat_deployment_name = "test_deployment_name" - mock_settings.api_key = None - mock_settings.endpoint = "https://test.endpoint" - mock_settings.api_version = "2024-05-01" - mock_settings.token_endpoint = "https://login.microsoftonline.com" - - with ( - patch( - "semantic_kernel.agents.open_ai.azure_assistant_agent.get_entra_auth_token", - return_value="fetched_ad_token", - ) as mock_get_token, - patch.object(AzureAssistantAgent, "_create_client", return_value="mock_client") as mock_create_client, - ): - returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( - azure_openai_settings=mock_settings, - ad_token=None, - ad_token_provider=None, - client=None, - default_headers=None, - ) - - mock_get_token.assert_called_once_with("https://login.microsoftonline.com") - mock_create_client.assert_called_once_with( - api_key=None, - endpoint="https://test.endpoint", - api_version="2024-05-01", - ad_token="fetched_ad_token", - ad_token_provider=None, - default_headers=None, - ) - assert returned_client == "mock_client" - assert returned_token == "fetched_ad_token" - - -async def test_setup_client_and_token_no_credentials_raises_exception(): - """Test that if there's no client, no API key, no AD token/provider, - and no token endpoint, an AgentInitializationException is raised. - """ - mock_settings = MagicMock() - mock_settings.chat_deployment_name = "test_deployment_name" - mock_settings.api_key = None - mock_settings.endpoint = "https://test.endpoint" - mock_settings.api_version = "2024-05-01" - mock_settings.token_endpoint = None - - with pytest.raises( - AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." - ): - _ = AzureAssistantAgent._setup_client_and_token( - azure_openai_settings=mock_settings, - ad_token=None, - ad_token_provider=None, - client=None, - default_headers=None, - ) - - -@pytest.mark.parametrize( - "exclude_list, client, api_key, should_raise, expected_exception_msg, should_create_client_call", - [ - ([], None, "test_api_key", False, None, True), - ([], AsyncMock(spec=AsyncAzureOpenAI), None, False, None, False), - ( - [], - AsyncMock(spec=AsyncAzureOpenAI), - "test_api_key", - False, - None, - False, - ), - ( - ["AZURE_OPENAI_API_KEY"], - None, - None, - True, - "Please provide either a client, an api_key, ad_token or ad_token_provider.", - False, - ), - ], - indirect=["exclude_list"], -) -async def test_retrieve_agent_handling_api_key_and_client( - azure_openai_unit_test_env, - exclude_list, - kernel, - client, - api_key, - should_raise, - expected_exception_msg, - should_create_client_call, -): - is_api_key_present = "AZURE_OPENAI_API_KEY" not in exclude_list - - with ( - patch.object( - AzureAssistantAgent, - "_create_azure_openai_settings", - return_value=MagicMock( - chat_model_id="test_model", - api_key=MagicMock( - get_secret_value=MagicMock(return_value="test_api_key" if is_api_key_present else None) - ) - if is_api_key_present - else None, - ), - ), - patch.object( - AzureAssistantAgent, - "_create_client", - return_value=AsyncMock(spec=AsyncAzureOpenAI), - ) as mock_create_client, - patch.object( - OpenAIAssistantBase, - "_create_open_ai_assistant_definition", - return_value={ - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "name": "test_name", - }, - ) as mock_create_def, - ): - if client: - client.beta = MagicMock() - client.beta.assistants = MagicMock() - client.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) - else: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) - - if should_raise: - with pytest.raises(AgentInitializationException, match=expected_exception_msg): - await AzureAssistantAgent.retrieve(id="test_id", kernel=kernel, api_key=api_key, client=client) - return - - retrieved_agent = await AzureAssistantAgent.retrieve( - id="test_id", kernel=kernel, api_key=api_key, client=client - ) - - if should_create_client_call: - mock_create_client.assert_called_once() - else: - mock_create_client.assert_not_called() - - assert retrieved_agent.ai_model_id == "test_model" - mock_create_def.assert_called_once() - if client: - client.beta.assistants.retrieve.assert_called_once_with("test_id") - else: - mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") diff --git a/python/tests/unit/agents/test_chat_completion_agent.py b/python/tests/unit/agents/test_chat_completion_agent.py deleted file mode 100644 index 01f9813acf83..000000000000 --- a/python/tests/unit/agents/test_chat_completion_agent.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, create_autospec, patch - -import pytest -from pydantic import ValidationError - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions import KernelServiceNotFoundError -from semantic_kernel.kernel import Kernel - - -@pytest.fixture -def mock_streaming_chat_completion_response() -> AsyncMock: - """A fixture that returns a mock response for a streaming chat completion response.""" - - async def mock_response(chat_history, settings, kernel): - content1 = ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message 1") - content2 = ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2") - chat_history.messages.append(content1) - chat_history.messages.append(content2) - yield [content1] - yield [content2] - - return mock_response - - -async def test_initialization(): - agent = ChatCompletionAgent( - service_id="test_service", - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert agent.service_id == "test_service" - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -async def test_initialization_invalid_name_throws(): - with pytest.raises(ValidationError): - _ = ChatCompletionAgent( - service_id="test_service", - name="Test Agent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - -async def test_initialization_no_service_id(): - agent = ChatCompletionAgent( - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert agent.service_id == "default" - assert agent.kernel is not None - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -async def test_initialization_with_kernel(kernel: Kernel): - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert agent.service_id == "default" - assert kernel == agent.kernel - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -async def test_invoke(): - kernel = create_autospec(Kernel) - kernel.get_service.return_value = create_autospec(ChatCompletionClientBase) - kernel.get_service.return_value.get_chat_message_contents = AsyncMock( - return_value=[ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message")] - ) - agent = ChatCompletionAgent( - kernel=kernel, service_id="test_service", name="TestAgent", instructions="Test Instructions" - ) - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - messages = [message async for message in agent.invoke(history)] - - assert len(messages) == 1 - assert messages[0].content == "Processed Message" - - -async def test_invoke_tool_call_added(): - kernel = create_autospec(Kernel) - chat_completion_service = create_autospec(ChatCompletionClientBase) - kernel.get_service.return_value = chat_completion_service - agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - async def mock_get_chat_message_contents(chat_history, settings, kernel): - new_messages = [ - ChatMessageContent(role=AuthorRole.ASSISTANT, content="Processed Message 1"), - ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2"), - ] - chat_history.messages.extend(new_messages) - return new_messages - - chat_completion_service.get_chat_message_contents = AsyncMock(side_effect=mock_get_chat_message_contents) - - messages = [message async for message in agent.invoke(history)] - - assert len(messages) == 2 - assert messages[0].content == "Processed Message 1" - assert messages[1].content == "Processed Message 2" - - assert len(history.messages) == 3 - assert history.messages[1].content == "Processed Message 1" - assert history.messages[2].content == "Processed Message 2" - assert history.messages[1].name == "TestAgent" - assert history.messages[2].name == "TestAgent" - - -async def test_invoke_no_service_throws(): - kernel = create_autospec(Kernel) - kernel.get_service.return_value = None - agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with pytest.raises(KernelServiceNotFoundError): - async for _ in agent.invoke(history): - pass - - -async def test_invoke_stream(): - kernel = create_autospec(Kernel) - kernel.get_service.return_value = create_autospec(ChatCompletionClientBase) - - agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with patch( - "semantic_kernel.connectors.ai.chat_completion_client_base.ChatCompletionClientBase.get_streaming_chat_message_contents", - return_value=AsyncMock(), - ) as mock: - mock.return_value.__aiter__.return_value = [ - [ChatMessageContent(role=AuthorRole.USER, content="Initial Message")] - ] - - async for message in agent.invoke_stream(history): - assert message.role == AuthorRole.USER - assert message.content == "Initial Message" - - -async def test_invoke_stream_tool_call_added(mock_streaming_chat_completion_response): - kernel = create_autospec(Kernel) - chat_completion_service = create_autospec(ChatCompletionClientBase) - kernel.get_service.return_value = chat_completion_service - agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - chat_completion_service.get_streaming_chat_message_contents = mock_streaming_chat_completion_response - - async for message in agent.invoke_stream(history): - print(f"Message role: {message.role}, content: {message.content}") - assert message.role in [AuthorRole.SYSTEM, AuthorRole.TOOL] - assert message.content in ["Processed Message 1", "Processed Message 2"] - - assert len(history.messages) == 3 - - -async def test_invoke_stream_no_service_throws(): - kernel = create_autospec(Kernel) - kernel.get_service.return_value = None - agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with pytest.raises(KernelServiceNotFoundError): - async for _ in agent.invoke_stream(history): - pass - - -def test_get_channel_keys(): - agent = ChatCompletionAgent() - keys = agent.get_channel_keys() - - for key in keys: - assert isinstance(key, str) - - -async def test_create_channel(): - agent = ChatCompletionAgent() - channel = await agent.create_channel() - - assert isinstance(channel, ChatHistoryChannel) diff --git a/python/tests/unit/agents/test_agent_channel.py b/python/tests/unit/agents/test_group_chat/test_agent_channel.py similarity index 100% rename from python/tests/unit/agents/test_agent_channel.py rename to python/tests/unit/agents/test_group_chat/test_agent_channel.py diff --git a/python/tests/unit/agents/test_agent_chat.py b/python/tests/unit/agents/test_group_chat/test_agent_chat.py similarity index 100% rename from python/tests/unit/agents/test_agent_chat.py rename to python/tests/unit/agents/test_group_chat/test_agent_chat.py diff --git a/python/tests/unit/agents/test_agent_chat_utils.py b/python/tests/unit/agents/test_group_chat/test_agent_chat_utils.py similarity index 100% rename from python/tests/unit/agents/test_agent_chat_utils.py rename to python/tests/unit/agents/test_group_chat/test_agent_chat_utils.py diff --git a/python/tests/unit/agents/test_agent_group_chat.py b/python/tests/unit/agents/test_group_chat/test_agent_group_chat.py similarity index 100% rename from python/tests/unit/agents/test_agent_group_chat.py rename to python/tests/unit/agents/test_group_chat/test_agent_group_chat.py diff --git a/python/tests/unit/agents/test_broadcast_queue.py b/python/tests/unit/agents/test_group_chat/test_broadcast_queue.py similarity index 100% rename from python/tests/unit/agents/test_broadcast_queue.py rename to python/tests/unit/agents/test_group_chat/test_broadcast_queue.py diff --git a/python/tests/unit/agents/test_aggregator_termination_strategy.py b/python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py similarity index 84% rename from python/tests/unit/agents/test_aggregator_termination_strategy.py rename to python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py index 103f734fa0ed..bb659c417ebd 100644 --- a/python/tests/unit/agents/test_aggregator_termination_strategy.py +++ b/python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py @@ -2,33 +2,13 @@ from unittest.mock import AsyncMock, MagicMock -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.termination.aggregator_termination_strategy import ( AggregateTerminationCondition, AggregatorTerminationStrategy, ) from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent - - -class MockAgent(Agent): - """A mock agent for testing purposes.""" - - def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): - args = { - "name": name, - "description": description, - } - if id is not None: - args["id"] = id - super().__init__(**args) - - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - - async def create_channel(self) -> AgentChannel: - return AsyncMock(spec=AgentChannel) +from tests.unit.agents.test_agent import MockAgent async def test_aggregate_termination_condition_all_true(): diff --git a/python/tests/unit/agents/test_default_termination_strategy.py b/python/tests/unit/agents/test_group_chat_strategies/test_default_termination_strategy.py similarity index 100% rename from python/tests/unit/agents/test_default_termination_strategy.py rename to python/tests/unit/agents/test_group_chat_strategies/test_default_termination_strategy.py diff --git a/python/tests/unit/agents/test_kernel_function_selection_strategy.py b/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py similarity index 85% rename from python/tests/unit/agents/test_kernel_function_selection_strategy.py rename to python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py index 2523c06cb05e..8953593c4b29 100644 --- a/python/tests/unit/agents/test_kernel_function_selection_strategy.py +++ b/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py @@ -4,8 +4,6 @@ import pytest -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.selection.kernel_function_selection_strategy import ( KernelFunctionSelectionStrategy, ) @@ -13,25 +11,7 @@ from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel - - -class MockAgent(Agent): - """A mock agent for testing purposes.""" - - def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): - args = { - "name": name, - "description": description, - } - if id is not None: - args["id"] = id - super().__init__(**args) - - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - - async def create_channel(self) -> AgentChannel: - return AsyncMock(spec=AgentChannel) +from tests.unit.agents.test_agent import MockAgent @pytest.fixture diff --git a/python/tests/unit/agents/test_kernel_function_termination_strategy.py b/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py similarity index 85% rename from python/tests/unit/agents/test_kernel_function_termination_strategy.py rename to python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py index e273c40e2501..5e4d96da0e60 100644 --- a/python/tests/unit/agents/test_kernel_function_termination_strategy.py +++ b/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py @@ -2,32 +2,12 @@ from unittest.mock import AsyncMock, MagicMock, patch -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies import KernelFunctionTerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel - - -class MockAgent(Agent): - """A mock agent for testing purposes.""" - - def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): - args = { - "name": name, - "description": description, - } - if id is not None: - args["id"] = id - super().__init__(**args) - - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - - async def create_channel(self) -> AgentChannel: - return AsyncMock(spec=AgentChannel) +from tests.unit.agents.test_agent import MockAgent async def test_should_agent_terminate_with_result_true(): diff --git a/python/tests/unit/agents/test_sequential_strategy_selection.py b/python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py similarity index 82% rename from python/tests/unit/agents/test_sequential_strategy_selection.py rename to python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py index 17754bd389fd..9df2214ebb3f 100644 --- a/python/tests/unit/agents/test_sequential_strategy_selection.py +++ b/python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py @@ -1,32 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock import pytest from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.selection.sequential_selection_strategy import SequentialSelectionStrategy from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException - - -class MockAgent(Agent): - """A mock agent for testing purposes.""" - - def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): - args = { - "name": name, - "description": description, - } - if id is not None: - args["id"] = id - super().__init__(**args) - - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - - async def create_channel(self) -> AgentChannel: - return AsyncMock(spec=AgentChannel) +from tests.unit.agents.test_agent import MockAgent @pytest.fixture @@ -101,10 +82,12 @@ async def test_sequential_selection_avoid_selecting_same_agent_twice(): agent_0 = MagicMock(spec=Agent) agent_0.id = "agent-0" agent_0.name = "Agent0" + agent_0.plugins = [] agent_1 = MagicMock(spec=Agent) agent_1.id = "agent-1" agent_1.name = "Agent1" + agent_1.plugins = [] agents = [agent_0, agent_1] diff --git a/python/tests/unit/agents/test_termination_strategy.py b/python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py similarity index 77% rename from python/tests/unit/agents/test_termination_strategy.py rename to python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py index 6888745453a8..6a49818fa199 100644 --- a/python/tests/unit/agents/test_termination_strategy.py +++ b/python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py @@ -1,32 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock import pytest from semantic_kernel.agents import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent - - -class MockAgent(Agent): - """A mock agent for testing purposes.""" - - def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): - args = { - "name": name, - "description": description, - } - if id is not None: - args["id"] = id - super().__init__(**args) - - def get_channel_keys(self) -> list[str]: - return ["key1", "key2"] - - async def create_channel(self) -> AgentChannel: - return AsyncMock(spec=AgentChannel) +from tests.unit.agents.test_agent import MockAgent class TerminationStrategyTest(TerminationStrategy): diff --git a/python/tests/unit/agents/test_open_ai_assistant_agent.py b/python/tests/unit/agents/test_open_ai_assistant_agent.py deleted file mode 100644 index 6392d3345ea1..000000000000 --- a/python/tests/unit/agents/test_open_ai_assistant_agent.py +++ /dev/null @@ -1,601 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import json -from unittest.mock import AsyncMock, MagicMock, mock_open, patch - -import pytest -from openai import AsyncOpenAI -from openai.resources.beta.assistants import Assistant -from openai.types.beta.assistant import ( - ToolResources, - ToolResourcesCodeInterpreter, - ToolResourcesFileSearch, -) -from pydantic import ValidationError - -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException -from semantic_kernel.kernel import Kernel - - -@pytest.fixture(scope="function") -def openai_assistant_agent(kernel: Kernel, openai_unit_test_env): - return OpenAIAssistantAgent( - kernel=kernel, - service_id="test_service", - name="test_name", - instructions="test_instructions", - api_key="test_api_key", - kwargs={"temperature": 0.1}, - max_completion_tokens=100, - max_prompt_tokens=100, - parallel_tool_calls_enabled=True, - truncation_message_count=2, - ) - - -@pytest.fixture(scope="function") -def mock_assistant(): - return Assistant( - created_at=123456789, - object="assistant", - metadata={ - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - model="test_model", - description="test_description", - id="test_id", - instructions="test_instructions", - name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], - temperature=0.7, - top_p=0.9, - response_format={"type": "json_object"}, - tool_resources=ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ), - ) - - -@pytest.fixture(scope="function") -def mock_assistant_json(): - return Assistant( - created_at=123456789, - object="assistant", - metadata={ - "__run_options": json.dumps({ - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - }) - }, - model="test_model", - description="test_description", - id="test_id", - instructions="test_instructions", - name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], - temperature=0.7, - top_p=0.9, - response_format={"type": "json_object"}, - tool_resources=ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ), - ) - - -def test_initialization(openai_assistant_agent: OpenAIAssistantAgent, openai_unit_test_env): - agent = openai_assistant_agent - assert agent is not None - agent.kernel is not None - - -def test_create_client(openai_unit_test_env): - client = OpenAIAssistantAgent._create_client(api_key="test_api_key", default_headers={"User-Agent": "test-agent"}) - assert isinstance(client, AsyncOpenAI) - assert client.api_key == "test_api_key" - - -def test_create_client_from_configuration_missing_api_key(): - with pytest.raises( - AgentInitializationException, - match="Please provide an OpenAI api_key", - ): - OpenAIAssistantAgent._create_client(None) - - -async def test_create_agent(kernel: Kernel, openai_unit_test_env): - with patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant: - mock_create_assistant.return_value = MagicMock(spec=Assistant) - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - ai_model_id="test_model_id", - service_id="test_service", - name="test_name", - api_key="test_api_key", - ) - assert agent.assistant is not None - mock_create_assistant.assert_called_once() - - -async def test_create_agent_with_files(kernel: Kernel, openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - return_value="test_file_id", - ), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.create_vector_store", - return_value="vector_store_id", - ), - patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - agent = await OpenAIAssistantAgent.create( - kernel=kernel, - ai_model_id="test_model_id", - service_id="test_service", - name="test_name", - api_key="test_api_key", - code_interpreter_filenames=["file1", "file2"], - vector_store_filenames=["file3", "file4"], - enable_code_interpreter=True, - enable_file_search=True, - ) - assert agent.assistant is not None - mock_create_assistant.assert_called_once() - - -async def test_create_agent_with_code_files_not_found_raises_exception(kernel: Kernel, openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - side_effect=FileNotFoundError("File not found"), - ), - patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - with pytest.raises(AgentInitializationException, match="Failed to upload code interpreter files."): - _ = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id="test_service", - ai_model_id="test_model_id", - name="test_name", - api_key="test_api_key", - api_version="2024-05-01", - code_interpreter_filenames=["file1", "file2"], - ) - - -async def test_create_agent_with_search_files_not_found_raises_exception(kernel: Kernel, openai_unit_test_env): - mock_open_file = mock_open(read_data="file_content") - with ( - patch("builtins.open", mock_open_file), - patch( - "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", - side_effect=FileNotFoundError("File not found"), - ), - patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, - ): - mock_create_assistant.return_value = MagicMock(spec=Assistant) - with pytest.raises(AgentInitializationException, match="Failed to upload vector store files."): - _ = await OpenAIAssistantAgent.create( - kernel=kernel, - service_id="test_service", - ai_model_id="test_model_id", - name="test_name", - api_key="test_api_key", - api_version="2024-05-01", - vector_store_filenames=["file3", "file4"], - ) - - -async def test_create_agent_second_way(kernel: Kernel, mock_assistant, openai_unit_test_env): - agent = OpenAIAssistantAgent( - kernel=kernel, - ai_model_id="test_model_id", - service_id="test_service", - name="test_name", - api_key="test_api_key", - max_completion_tokens=100, - max_prompt_tokens=100, - parallel_tool_calls_enabled=True, - truncation_message_count=2, - ) - - with patch.object( - OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) - ) as mock_create_client: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - agent.client = mock_client_instance - - assistant = await agent.create_assistant() - - mock_client_instance.beta.assistants.create.assert_called_once() - - assert assistant == mock_assistant - - assert json.loads( - mock_client_instance.beta.assistants.create.call_args[1]["metadata"][agent._options_metadata_key] - ) == { - "max_completion_tokens": 100, - "max_prompt_tokens": 100, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 2, - } - - -async def test_list_definitions(kernel: Kernel, openai_unit_test_env): - agent = OpenAIAssistantAgent( - kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" - ) - - assistant = Assistant( - id="test_id", - created_at=123456789, - description="test_description", - instructions="test_instructions", - metadata={ - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - model="test_model", - name="test_name", - object="assistant", - temperature=0.7, - tool_resources=ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ), - top_p=0.9, - response_format={"type": "json_object"}, - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], - ) - - with patch.object( - OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) - ) as mock_create_client: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[assistant])) - - agent.client = mock_client_instance - - definitions = [] - async for definition in agent.list_definitions(): - definitions.append(definition) - - mock_client_instance.beta.assistants.list.assert_called() - - assert len(definitions) == 1 - assert definitions[0] == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) -async def test_retrieve_agent_missing_chat_model_id_throws(kernel, openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): - _ = await OpenAIAssistantAgent.retrieve( - id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" - ) - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) -async def test_retrieve_agent_missing_api_key_throws(kernel, openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." - ): - _ = await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") - - -def test_open_ai_settings_create_throws(openai_unit_test_env): - with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: - mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") - - with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): - OpenAIAssistantAgent( - service_id="test", api_key="test_api_key", org_id="test_org_id", ai_model_id="test_model_id" - ) - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) -def test_azure_openai_agent_create_missing_chat_model_id_throws(openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): - OpenAIAssistantAgent(service_id="test_service", env_file_path="test.env") - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) -def test_azure_openai_agent_create_missing_api_key_throws(openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." - ): - OpenAIAssistantAgent(env_file_path="test.env") - - -def test_create_open_ai_assistant_definition_with_json_metadata(mock_assistant_json, openai_unit_test_env): - with ( - patch.object( - OpenAIAssistantBase, - "_create_open_ai_assistant_definition", - return_value={ - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - }, - ) as mock_create_def, - ): - assert mock_create_def.return_value == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - - -async def test_retrieve_agent(kernel, openai_unit_test_env): - with ( - patch.object( - OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) - ) as mock_create_client, - patch.object( - OpenAIAssistantBase, - "_create_open_ai_assistant_definition", - return_value={ - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - }, - ) as mock_create_def, - ): - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) - - retrieved_agent = await OpenAIAssistantAgent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) - assert retrieved_agent.model_dump( - include={ - "ai_model_id", - "description", - "id", - "instructions", - "name", - "enable_code_interpreter", - "enable_file_search", - "enable_json_response", - "code_interpreter_file_ids", - "temperature", - "top_p", - "vector_store_id", - "metadata", - "max_completion_tokens", - "max_prompt_tokens", - "parallel_tool_calls_enabled", - "truncation_message_count", - } - ) == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") - mock_create_def.assert_called_once() - - -@pytest.mark.parametrize( - "exclude_list, client, api_key, should_raise, expected_exception_msg, should_create_client_call", - [ - ([], None, "test_api_key", False, None, True), - ([], AsyncMock(spec=AsyncOpenAI), None, False, None, False), - ([], AsyncMock(spec=AsyncOpenAI), "test_api_key", False, None, False), - ( - ["OPENAI_API_KEY"], - None, - None, - True, - "The OpenAI API key is required, if a client is not provided.", - False, - ), - ], - indirect=["exclude_list"], -) -async def test_retrieve_agent_handling_api_key_and_client( - openai_unit_test_env, - exclude_list, - kernel, - client, - api_key, - should_raise, - expected_exception_msg, - should_create_client_call, -): - is_api_key_present = "OPENAI_API_KEY" not in exclude_list - - with ( - patch.object( - OpenAIAssistantAgent, - "_create_open_ai_settings", - return_value=MagicMock( - chat_model_id="test_model", - api_key=MagicMock( - get_secret_value=MagicMock(return_value="test_api_key" if is_api_key_present else None) - ) - if is_api_key_present - else None, - ), - ), - patch.object( - OpenAIAssistantAgent, - "_create_client", - return_value=AsyncMock(spec=AsyncOpenAI), - ) as mock_create_client, - patch.object( - OpenAIAssistantBase, - "_create_open_ai_assistant_definition", - return_value={ - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "name": "test_name", - }, - ) as mock_create_def, - ): - if client: - client.beta = MagicMock() - client.beta.assistants = MagicMock() - client.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) - else: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) - - if should_raise: - with pytest.raises(AgentInitializationException, match=expected_exception_msg): - await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, api_key=api_key, client=client) - return - - retrieved_agent = await OpenAIAssistantAgent.retrieve( - id="test_id", kernel=kernel, api_key=api_key, client=client - ) - - if should_create_client_call: - mock_create_client.assert_called_once() - else: - mock_create_client.assert_not_called() - - assert retrieved_agent.ai_model_id == "test_model" - mock_create_def.assert_called_once() - if client: - client.beta.assistants.retrieve.assert_called_once_with("test_id") - else: - mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") diff --git a/python/tests/unit/agents/test_open_ai_assistant_base.py b/python/tests/unit/agents/test_open_ai_assistant_base.py deleted file mode 100644 index 332d9a33d30f..000000000000 --- a/python/tests/unit/agents/test_open_ai_assistant_base.py +++ /dev/null @@ -1,1776 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from datetime import datetime, timedelta, timezone -from typing import Any -from unittest.mock import AsyncMock, MagicMock, mock_open, patch - -import pytest -from openai import AsyncAzureOpenAI, AsyncOpenAI -from openai.lib.streaming._assistants import AsyncAssistantEventHandler, AsyncAssistantStreamManager -from openai.resources.beta.threads.runs.runs import Run -from openai.types.beta.assistant import Assistant, ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch -from openai.types.beta.assistant_stream_event import ( - MessageDeltaEvent, - ThreadMessageDelta, - ThreadRunFailed, - ThreadRunRequiresAction, - ThreadRunStepCompleted, - ThreadRunStepDelta, -) -from openai.types.beta.assistant_tool import CodeInterpreterTool, FileSearchTool -from openai.types.beta.function_tool import FunctionDefinition, FunctionTool -from openai.types.beta.threads import ImageFileDelta, ImageFileDeltaBlock, MessageDelta, TextDelta, TextDeltaBlock -from openai.types.beta.threads.annotation import FileCitationAnnotation, FilePathAnnotation -from openai.types.beta.threads.file_citation_annotation import FileCitation -from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation -from openai.types.beta.threads.file_path_annotation import FilePath -from openai.types.beta.threads.image_file import ImageFile -from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock -from openai.types.beta.threads.required_action_function_tool_call import Function -from openai.types.beta.threads.required_action_function_tool_call import Function as RequiredActionFunction -from openai.types.beta.threads.run import ( - LastError, - RequiredAction, - RequiredActionFunctionToolCall, - RequiredActionSubmitToolOutputs, - TruncationStrategy, -) -from openai.types.beta.threads.runs import ( - FunctionToolCallDelta, - RunStep, - RunStepDelta, - RunStepDeltaEvent, - ToolCallDeltaObject, - ToolCallsStepDetails, -) -from openai.types.beta.threads.runs.code_interpreter_tool_call import ( - CodeInterpreter, - CodeInterpreterToolCall, -) -from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreter as CodeInterpreterDelta -from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta -from openai.types.beta.threads.runs.function_tool_call import Function as RunsFunction -from openai.types.beta.threads.runs.function_tool_call import FunctionToolCall -from openai.types.beta.threads.runs.function_tool_call_delta import Function as FunctionForToolCallDelta -from openai.types.beta.threads.runs.message_creation_step_details import MessageCreation, MessageCreationStepDetails -from openai.types.beta.threads.runs.run_step import Usage -from openai.types.beta.threads.text import Text -from openai.types.beta.threads.text_content_block import TextContentBlock -from openai.types.shared.response_format_json_object import ResponseFormatJSONObject - -from semantic_kernel.agents.open_ai.assistant_content_generation import ( - generate_function_call_content, - generate_function_result_content, - generate_message_content, - get_function_call_contents, - get_message_contents, -) -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import ( - AgentExecutionException, - AgentFileNotFoundException, - AgentInitializationException, - AgentInvokeException, -) -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod -from semantic_kernel.kernel import Kernel - -# region Test Fixtures - - -@pytest.fixture -def azure_openai_assistant_agent(kernel: Kernel, azure_openai_unit_test_env): - return AzureAssistantAgent( - kernel=kernel, - service_id="test_service", - name="test_name", - instructions="test_instructions", - api_key="test", - metadata={"key": "value"}, - api_version="2024-05-01", - description="test_description", - ai_model_id="test_model", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - file_ids=["file1", "file2"], - temperature=0.7, - top_p=0.9, - enable_json_response=True, - ) - - -@pytest.fixture -def mock_assistant(): - return Assistant( - created_at=123456789, - object="assistant", - metadata={ - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - model="test_model", - description="test_description", - id="test_id", - instructions="test_instructions", - name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], - temperature=0.7, - top_p=0.9, - response_format={"type": "json_object"}, - tool_resources=ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ), - ) - - -@pytest.fixture -def mock_thread(): - class MockThread: - id = "test_thread_id" - - return MockThread() - - -@pytest.fixture -def mock_chat_message_content(): - return ChatMessageContent(role=AuthorRole.USER, content="test message", metadata={"key": "value"}) - - -@pytest.fixture -def mock_message(): - class MockMessage: - id = "test_message_id" - role = "user" - - return MockMessage() - - -@pytest.fixture -def mock_thread_messages(): - class MockMessage: - def __init__(self, id, role, content, assistant_id=None): - self.id = id - self.role = role - self.content = content - self.assistant_id = assistant_id - - return [ - MockMessage( - id="test_message_id_1", - role="user", - content=[ - TextContentBlock( - type="text", - text=Text( - value="Hello", - annotations=[ - FilePathAnnotation( - type="file_path", - file_path=FilePath(file_id="test_file_id"), - end_index=5, - start_index=0, - text="Hello", - ), - FileCitationAnnotation( - type="file_citation", - file_citation=FileCitation(file_id="test_file_id", quote="test quote"), - text="Hello", - start_index=0, - end_index=5, - ), - ], - ), - ) - ], - ), - MockMessage( - id="test_message_id_2", - role="assistant", - content=[ - ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) - ], - assistant_id="assistant_1", - ), - ] - - -@pytest.fixture -def mock_run_failed(): - return Run( - id="run_id", - status="failed", - assistant_id="assistant_id", - created_at=123456789, - instructions="instructions", - model="model", - object="thread.run", - thread_id="thread_id", - tools=[], - parallel_tool_calls=True, - ) - - -@pytest.fixture -def mock_run_required_action(): - return Run( - id="run_id", - status="requires_action", - assistant_id="assistant_id", - created_at=123456789, - instructions="instructions", - model="model", - object="thread.run", - thread_id="thread_id", - tools=[], - required_action=RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", - type="function", - function=RequiredActionFunction(arguments="{}", name="function_name"), - ) - ] - ), - ), - parallel_tool_calls=True, - ) - - -@pytest.fixture -def mock_run_completed(): - return Run( - id="run_id", - status="completed", - assistant_id="assistant_id", - created_at=123456789, - instructions="instructions", - model="model", - object="thread.run", - thread_id="thread_id", - tools=[], - required_action=RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") - ) - ] - ), - ), - parallel_tool_calls=True, - ) - - -@pytest.fixture -def mock_run_incomplete(): - return Run( - id="run_id", - status="incomplete", - assistant_id="assistant_id", - created_at=123456789, - instructions="instructions", - model="model", - object="thread.run", - thread_id="thread_id", - tools=[], - required_action=RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") - ) - ] - ), - ), - parallel_tool_calls=True, - ) - - -@pytest.fixture -def mock_run_cancelled(): - return Run( - id="run_id", - status="cancelled", - assistant_id="assistant_id", - created_at=123456789, - instructions="instructions", - model="model", - object="thread.run", - thread_id="thread_id", - tools=[], - required_action=RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") - ) - ] - ), - ), - parallel_tool_calls=True, - ) - - -@pytest.fixture -def mock_function_call_content(): - return FunctionCallContent(id="function_call_id", name="function_name", arguments={}) - - -@pytest.fixture -def mock_run_in_progress(): - class MockRun: - def __init__(self): - self.id = "run_id" - self.status = "requires_action" - self.assistant_id = "assistant_id" - self.created_at = int(datetime.now(timezone.utc).timestamp()) - self.instructions = "instructions" - self.model = "model" - self.object = "run" - self.thread_id = "thread_id" - self.tools = [] - self.poll_count = 0 - self.required_action = RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", - type="function", - function=Function(arguments="{}", name="function_name"), - ) - ] - ), - ) - self.last_error = None - - def update_status(self): - self.poll_count += 1 - if self.poll_count > 2: - self.status = "completed" - - return MockRun() - - -@pytest.fixture -def mock_run_step_tool_call(): - class MockToolCall: - def __init__(self): - self.type = "code_interpreter" - self.code_interpreter = MagicMock(input="print('Hello, world!')") - - return RunStep( - id="step_id_1", - type="tool_calls", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), - step_details=ToolCallsStepDetails( - tool_calls=[ - CodeInterpreterToolCall( - type="code_interpreter", - id="tool_call_id", - code_interpreter=CodeInterpreter(input="test code", outputs=[]), - ), - FunctionToolCall( - type="function", - id="tool_call_id", - function=RunsFunction(arguments="{}", name="function_name", outpt="test output"), - ), - ], - type="tool_calls", - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - ) - - -@pytest.fixture -def mock_run_step_function_tool_call(): - class MockToolCall: - def __init__(self): - self.type = "function" - - return RunStep( - id="step_id_1", - type="tool_calls", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), - step_details=ToolCallsStepDetails( - tool_calls=[ - FunctionToolCall( - type="function", - id="tool_call_id", - function=RunsFunction(arguments="{}", name="function_name", outpt="test output"), - ), - ], - type="tool_calls", - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - ) - - -@pytest.fixture -def mock_run_step_message_creation(): - class MockMessageCreation: - def __init__(self): - self.message_id = "message_id" - - class MockStepDetails: - def __init__(self): - self.message_creation = MockMessageCreation() - - return RunStep( - id="step_id_2", - type="message_creation", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), - step_details=MessageCreationStepDetails( - type="message_creation", message_creation=MessageCreation(message_id="test") - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - ) - - -class MockEvent: - def __init__(self, event, data): - self.event = event - self.data = data - - -class MockRunData: - def __init__(self, id, status): - self.id = id - self.status = status - # Add other attributes as needed - - -def create_thread_message_delta_mock(): - return ThreadMessageDelta( - data=MessageDeltaEvent( - id="mock_msg_id", - delta=MessageDelta( - content=[ - TextDeltaBlock( - index=0, - type="text", - text=TextDelta( - annotations=[ - FileCitationDeltaAnnotation( - index=0, - type="file_citation", - start_index=1, - end_index=3, - text="annotation", - ) - ], - value="Hello", - ), - ), - ImageFileDeltaBlock( - index=0, - type="image_file", - image_file=ImageFileDelta( - file_id="test_file_id", - detail="auto", - ), - ), - ], - role=None, - ), - object="thread.message.delta", - ), - event="thread.message.delta", - ) - - -def create_thread_run_step_delta_mock(): - function = FunctionForToolCallDelta(name="math-Add", arguments="", output=None) - function_tool_call = FunctionToolCallDelta( - index=0, type="function", id="call_RcvYVzsppjjnUZcC47fAlwTW", function=function - ) - code = CodeInterpreterDelta(input="import os") - code_tool_call = CodeInterpreterToolCallDelta( - index=1, type="code_interpreter", id="call_RcvYVzsppjjnUZcC47fAlwTW", code_interpreter=code - ) - - step_details = ToolCallDeltaObject(type="tool_calls", tool_calls=[function_tool_call, code_tool_call]) - delta = RunStepDelta(step_details=step_details) - run_step_delta_event = RunStepDeltaEvent( - id="step_FXzQ44kRmoeHOPUstkEI1UL5", delta=delta, object="thread.run.step.delta" - ) - return ThreadRunStepDelta(data=run_step_delta_event, event="thread.run.step.delta") - - -def mock_thread_requires_action_run(): - return ThreadRunRequiresAction( - data=Run( - id="run_00OwjJnEg2SGJy8sky7ip35P", - assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", - cancelled_at=None, - completed_at=None, - created_at=1727798684, - expires_at=1727799284, - failed_at=None, - incomplete_details=None, - instructions="Answer questions about the menu.", - last_error=None, - max_completion_tokens=None, - max_prompt_tokens=None, - metadata={}, - model="gpt-4o-2024-08-06", - object="thread.run", - parallel_tool_calls=True, - required_action=RequiredAction( - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="call_OTcZMjhm7WbhFnGkrmUjs68T", - function=Function(arguments="{}", name="menu-get_specials"), - type="function", - ) - ] - ), - type="submit_tool_outputs", - ), - response_format="auto", - started_at=1727798685, - status="requires_action", - thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", - tool_choice="auto", - tools=[ - FunctionTool( - function=FunctionDefinition( - name="menu-get_item_price", - description="Provides the price of the requested menu item.", - parameters={ - "type": "object", - "properties": { - "menu_item": {"type": "string", "description": "The name of the menu item."} - }, - "required": ["menu_item"], - }, - strict=False, - ), - type="function", - ), - FunctionTool( - function=FunctionDefinition( - name="menu-get_specials", - description="Provides a list of specials from the menu.", - parameters={"type": "object", "properties": {}, "required": []}, - strict=False, - ), - type="function", - ), - ], - truncation_strategy=TruncationStrategy(type="auto", last_messages=None), - usage=None, - temperature=1.0, - top_p=1.0, - tool_resources={"code_interpreter": {"file_ids": []}}, - ), - event="thread.run.requires_action", - ) - - -def mock_thread_run_step_completed(): - return ThreadRunStepCompleted( - data=RunStep( - id="step_id_2", - type="message_creation", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), - step_details=MessageCreationStepDetails( - type="message_creation", message_creation=MessageCreation(message_id="test") - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), - ), - event="thread.run.step.completed", - ) - - -def mock_thread_run_step_completed_with_code(): - return ThreadRunStepCompleted( - data=RunStep( - id="step_id_2", - type="message_creation", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), - step_details=ToolCallsStepDetails( - type="tool_calls", - tool_calls=[ - CodeInterpreterToolCall( - id="tool_call_id", - code_interpreter=CodeInterpreter(input="test code", outputs=[]), - type="code_interpreter", - ) - ], - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), - ), - event="thread.run.step.completed", - ) - - -def mock_run_with_last_error(): - return ThreadRunFailed( - data=Run( - id="run_00OwjJnEg2SGJy8sky7ip35P", - assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", - cancelled_at=None, - completed_at=None, - created_at=1727798684, - expires_at=1727799284, - failed_at=None, - incomplete_details=None, - instructions="Answer questions about the menu.", - last_error=LastError(code="server_error", message="Server error"), - max_completion_tokens=None, - max_prompt_tokens=None, - metadata={}, - model="gpt-4o-2024-08-06", - object="thread.run", - parallel_tool_calls=True, - required_action=None, - response_format="auto", - started_at=1727798685, - status="failed", - thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", - tool_choice="auto", - tools=[], - truncation_strategy=TruncationStrategy(type="auto", last_messages=None), - usage=None, - temperature=1.0, - top_p=1.0, - tool_resources={"code_interpreter": {"file_ids": []}}, - ), - event="thread.run.failed", - ) - - -class MockAsyncIterable: - def __init__(self, items): - self.items = items.copy() - - def __aiter__(self): - self._iter = iter(self.items) - return self - - async def __anext__(self): - try: - return next(self._iter) - except StopIteration: - raise StopAsyncIteration - - -class MockStream: - def __init__(self, events): - self.events = events - - async def __aenter__(self): - return MockAsyncIterable(self.events) - - async def __aexit__(self, exc_type, exc_val, exc_tb): - pass - - -# endregion - -# region Tests - - -async def test_create_assistant( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - assistant = await azure_openai_assistant_agent.create_assistant( - ai_model_id="test_model", - description="test_description", - instructions="test_instructions", - name="test_name", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - code_interpreter_file_ids=["file1", "file2"], - metadata={"key": "value"}, - ) - - assert assistant.model == "test_model" - assert assistant.description == "test_description" - assert assistant.id == "test_id" - assert assistant.instructions == "test_instructions" - assert assistant.name == "test_name" - assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] - assert assistant.temperature == 0.7 - assert assistant.top_p == 0.9 - assert assistant.response_format == ResponseFormatJSONObject(type="json_object") - assert assistant.tool_resources == ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ) - - -async def test_modify_assistant( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - assistant = await azure_openai_assistant_agent.create_assistant( - ai_model_id="test_model", - description="test_description", - instructions="test_instructions", - name="test_name", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - code_interpreter_file_ids=["file1", "file2"], - metadata={"key": "value"}, - ) - - mock_client.beta.assistants.update = AsyncMock(return_value=mock_assistant) - - assistant = await azure_openai_assistant_agent.modify_assistant( - assistant_id=assistant.id, - ai_model_id="test_model", - description="test_description", - instructions="test_instructions", - name="test_name", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - code_interpreter_file_ids=["file1", "file2"], - metadata={"key": "value"}, - ) - - assert assistant.model == "test_model" - assert assistant.description == "test_description" - assert assistant.id == "test_id" - assert assistant.instructions == "test_instructions" - assert assistant.name == "test_name" - assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] - assert assistant.temperature == 0.7 - assert assistant.top_p == 0.9 - assert assistant.response_format == ResponseFormatJSONObject(type="json_object") - assert assistant.tool_resources == ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ) - - -async def test_modify_assistant_not_initialized_throws( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with pytest.raises(AgentInitializationException, match="The assistant has not been created."): - _ = await azure_openai_assistant_agent.modify_assistant( - assistant_id="id", - ai_model_id="test_model", - description="test_description", - instructions="test_instructions", - name="test_name", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - code_interpreter_file_ids=["file1", "file2"], - metadata={"key": "value"}, - ) - - -async def test_create_assistant_with_model_attributes( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - assistant = await azure_openai_assistant_agent.create_assistant( - ai_model_id="test_model", - description="test_description", - instructions="test_instructions", - name="test_name", - enable_code_interpreter=True, - enable_file_search=True, - vector_store_id="vector_store1", - code_interpreter_file_ids=["file1", "file2"], - metadata={"key": "value"}, - kwargs={"temperature": 0.1}, - ) - - assert assistant.model == "test_model" - assert assistant.description == "test_description" - assert assistant.id == "test_id" - assert assistant.instructions == "test_instructions" - assert assistant.name == "test_name" - assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] - assert assistant.temperature == 0.7 - assert assistant.top_p == 0.9 - assert assistant.response_format == ResponseFormatJSONObject(type="json_object") - assert assistant.tool_resources == ToolResources( - code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), - file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), - ) - - -async def test_create_assistant_delete_and_recreate( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - mock_client.beta.assistants.delete = AsyncMock() - - assistant = await azure_openai_assistant_agent.create_assistant() - - assert assistant is not None - - await azure_openai_assistant_agent.delete() - - assert azure_openai_assistant_agent._is_deleted - - assistant = await azure_openai_assistant_agent.create_assistant() - - assert azure_openai_assistant_agent._is_deleted is False - - -async def test_get_channel_keys(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - keys = azure_openai_assistant_agent.get_channel_keys() - for key in keys: - assert isinstance(key, str) - - -async def test_create_channel( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, mock_thread, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) - - channel = await azure_openai_assistant_agent.create_channel() - - assert channel is not None - - -async def test_get_assistant_metadata( - azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - assistant = await azure_openai_assistant_agent.create_assistant() - - assistant.metadata is not None - - -async def test_get_agent_tools(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - func = KernelFunctionFromMethod(method=kernel_function(lambda x: x**2, name="square"), plugin_name="math") - azure_openai_assistant_agent.kernel.add_function(plugin_name="test", function=func) - - assistant = await azure_openai_assistant_agent.create_assistant() - - assert assistant.tools is not None - assert len(assistant.tools) == 2 - tools = azure_openai_assistant_agent.tools - assert len(tools) == 3 - assert tools[0] == {"type": "code_interpreter"} - assert tools[1] == {"type": "file_search"} - assert tools[2]["type"].startswith("function") - - -async def test_get_assistant_tools_throws_when_no_assistant( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with pytest.raises(AgentInitializationException, match="The assistant has not been created."): - _ = azure_openai_assistant_agent.tools - - -async def test_create_thread(azure_openai_assistant_agent, mock_thread, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) - - thread_id = await azure_openai_assistant_agent.create_thread( - code_interpreter_file_ids=["file1", "file2"], - vector_store_id="vector_store1", - messages=[ - ChatMessageContent(role=AuthorRole.USER, content="test message"), - ], - metadata={"key": "value"}, - ) - - assert thread_id == "test_thread_id" - mock_client.beta.threads.create.assert_called_once() - _, called_kwargs = mock_client.beta.threads.create.call_args - assert "tool_resources" in called_kwargs - assert called_kwargs["tool_resources"] == { - "code_interpreter": {"file_ids": ["file1", "file2"]}, - "file_search": {"vector_store_ids": ["vector_store1"]}, - } - assert "messages" in called_kwargs - assert called_kwargs["messages"] == [{"role": "user", "content": {"type": "text", "text": "test message"}}] - assert "metadata" in called_kwargs - assert called_kwargs["metadata"] == {"key": "value"} - - -async def test_create_thread_throws_with_invalid_role(azure_openai_assistant_agent, mock_thread, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) - - with pytest.raises( - AgentExecutionException, - match="Invalid message role `tool`", - ): - _ = await azure_openai_assistant_agent.create_thread( - messages=[ChatMessageContent(role=AuthorRole.TOOL, content="test message")] - ) - - -async def test_delete_thread(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.delete = AsyncMock() - - await azure_openai_assistant_agent.delete_thread("test_thread_id") - - mock_client.beta.threads.delete.assert_called_once_with("test_thread_id") - - -async def test_delete(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): - azure_openai_assistant_agent.assistant = mock_assistant - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.delete = AsyncMock() - - azure_openai_assistant_agent._is_deleted = False - result = await azure_openai_assistant_agent.delete() - - assert result == azure_openai_assistant_agent._is_deleted - mock_client.beta.assistants.delete.assert_called_once_with(mock_assistant.id) - - -async def test_add_file(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.files = MagicMock() - mock_client.files.create = AsyncMock(return_value=MagicMock(id="test_file_id")) - - mock_open_file = mock_open(read_data="file_content") - with patch("builtins.open", mock_open_file): - file_id = await azure_openai_assistant_agent.add_file("test_file_path", "assistants") - - assert file_id == "test_file_id" - mock_open_file.assert_called_once_with("test_file_path", "rb") - mock_client.files.create.assert_called_once() - - -async def test_add_file_not_found(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.files = MagicMock() - - with patch("builtins.open", mock_open(read_data="file_content")) as mock_open_file: - mock_open_file.side_effect = FileNotFoundError - - with pytest.raises(AgentFileNotFoundException, match="File not found: test_file_path"): - await azure_openai_assistant_agent.add_file("test_file_path", "assistants") - - -async def test_delete_file(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.files = MagicMock() - mock_client.files.delete = AsyncMock() - - await azure_openai_assistant_agent.delete_file("test_file_id") - - mock_client.files.delete.assert_called_once_with("test_file_id") - - -async def test_delete_file_raises_exception(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.files = MagicMock() - mock_client.files.delete = AsyncMock(side_effect=Exception("Deletion failed")) - - with pytest.raises(AgentExecutionException, match="Error deleting file."): - await azure_openai_assistant_agent.delete_file("test_file_id") - - mock_client.files.delete.assert_called_once_with("test_file_id") - - -async def test_create_vector_store(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.vector_stores = MagicMock() - mock_client.beta.vector_stores.create = AsyncMock(return_value=MagicMock(id="test_vector_store_id")) - - vector_store_id = await azure_openai_assistant_agent.create_vector_store(["file_id1", "file_id2"]) - - assert vector_store_id == "test_vector_store_id" - mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1", "file_id2"]) - - -async def test_create_vector_store_single_file_id( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.vector_stores = MagicMock() - mock_client.beta.vector_stores.create = AsyncMock(return_value=MagicMock(id="test_vector_store_id")) - - vector_store_id = await azure_openai_assistant_agent.create_vector_store("file_id1") - - assert vector_store_id == "test_vector_store_id" - mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1"]) - - -async def test_create_vector_store_raises_exception( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.vector_stores = MagicMock() - mock_client.beta.vector_stores.create = AsyncMock(side_effect=Exception("Creation failed")) - - with pytest.raises(AgentExecutionException, match="Error creating vector store."): - await azure_openai_assistant_agent.create_vector_store("file_id1") - - mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1"]) - - -async def test_delete_vector_store(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.vector_stores = MagicMock() - mock_client.beta.vector_stores.delete = AsyncMock() - - await azure_openai_assistant_agent.delete_vector_store("test_vector_store_id") - - mock_client.beta.vector_stores.delete.assert_called_once_with("test_vector_store_id") - - -async def test_delete_vector_store_raises_exception( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.vector_stores = MagicMock() - mock_client.beta.vector_stores.delete = AsyncMock(side_effect=Exception("Deletion failed")) - - with pytest.raises(AgentExecutionException, match="Error deleting vector store."): - await azure_openai_assistant_agent.delete_vector_store("test_vector_store_id") - - mock_client.beta.vector_stores.delete.assert_called_once_with("test_vector_store_id") - - -async def test_add_chat_message( - azure_openai_assistant_agent, mock_chat_message_content, mock_message, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.messages = MagicMock() - mock_client.beta.threads.messages.create = AsyncMock(return_value=mock_message) - - result = await azure_openai_assistant_agent.add_chat_message("test_thread_id", mock_chat_message_content) - - assert result.id == "test_message_id" - mock_client.beta.threads.messages.create.assert_called_once_with( - thread_id="test_thread_id", - role="user", - content=[{"type": "text", "text": "test message"}], - ) - - -async def test_add_chat_message_invalid_role( - azure_openai_assistant_agent, mock_chat_message_content, openai_unit_test_env -): - mock_chat_message_content.role = AuthorRole.SYSTEM - - with pytest.raises(AgentExecutionException, match="Invalid message role `system`"): - await azure_openai_assistant_agent.add_chat_message("test_thread_id", mock_chat_message_content) - - -async def test_get_thread_messages( - azure_openai_assistant_agent, mock_thread_messages, mock_assistant, openai_unit_test_env -): - async def mock_list_messages(*args, **kwargs) -> Any: - return MagicMock(data=mock_thread_messages) - - async def mock_retrieve_assistant(*args, **kwargs) -> Any: - return mock_assistant - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.messages = MagicMock() - mock_client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) - - messages = [message async for message in azure_openai_assistant_agent.get_thread_messages("test_thread_id")] - - assert len(messages) == 2 - assert len(messages[0].items) == 3 - assert isinstance(messages[0].items[0], TextContent) - assert isinstance(messages[0].items[1], AnnotationContent) - assert isinstance(messages[0].items[2], AnnotationContent) - assert messages[0].items[0].text == "Hello" - - assert len(messages[1].items) == 1 - assert isinstance(messages[1].items[0], FileReferenceContent) - assert str(messages[1].items[0].file_id) == "test_file_id" - - -async def test_invoke( - azure_openai_assistant_agent, - mock_assistant, - mock_run_in_progress, - mock_run_required_action, - mock_chat_message_content, - mock_run_step_tool_call, - mock_run_step_message_creation, - mock_thread_messages, - mock_function_call_content, - openai_unit_test_env, -): - async def mock_poll_run_status(run, thread_id): - run.update_status() - return run - - def mock_get_function_call_contents(run, function_steps): - function_call_content = mock_function_call_content - function_call_content.id = "tool_call_id" # Set expected ID - function_steps[function_call_content.id] = function_call_content - return [function_call_content] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) - mock_client.beta.threads.runs.submit_tool_outputs = AsyncMock() - mock_client.beta.threads.runs.steps = MagicMock() - mock_client.beta.threads.runs.steps.list = AsyncMock( - return_value=MagicMock(data=[mock_run_step_message_creation, mock_run_step_tool_call]) - ) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - azure_openai_assistant_agent._get_tools = MagicMock(return_value=["tool"]) - azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) - azure_openai_assistant_agent._invoke_function_calls = AsyncMock() - azure_openai_assistant_agent._format_tool_outputs = MagicMock( - return_value=[{"tool_call_id": "id", "output": "output"}] - ) - azure_openai_assistant_agent._retrieve_message = AsyncMock(return_value=mock_thread_messages[0]) - - with patch( - "semantic_kernel.agents.open_ai.assistant_content_generation.get_function_call_contents", - side_effect=mock_get_function_call_contents, - ): - _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] - - -async def test_invoke_order( - azure_openai_assistant_agent, - mock_assistant, - mock_run_required_action, - mock_run_step_function_tool_call, - mock_run_step_message_creation, - mock_thread_messages, - mock_function_call_content, -): - poll_count = 0 - - async def mock_poll_run_status(run, thread_id): - nonlocal poll_count - if run.status == "requires_action": - if poll_count == 0: - pass - else: - run.status = "completed" - poll_count += 1 - return run - - def mock_get_function_call_contents(run, function_steps): - function_call_content = mock_function_call_content - function_call_content.id = "tool_call_id" - function_steps[function_call_content.id] = function_call_content - return [function_call_content] - - azure_openai_assistant_agent.assistant = mock_assistant - azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) - azure_openai_assistant_agent._retrieve_message = AsyncMock(return_value=mock_thread_messages[0]) - - with patch( - "semantic_kernel.agents.open_ai.assistant_content_generation.get_function_call_contents", - side_effect=mock_get_function_call_contents, - ): - client = azure_openai_assistant_agent.client - - with patch.object(client.beta.threads.runs, "create", new_callable=AsyncMock) as mock_runs_create: - mock_runs_create.return_value = mock_run_required_action - - with ( - patch.object(client.beta.threads.runs, "submit_tool_outputs", new_callable=AsyncMock), - patch.object(client.beta.threads.runs.steps, "list", new_callable=AsyncMock) as mock_steps_list, - ): - mock_steps_list.return_value = MagicMock( - data=[mock_run_step_message_creation, mock_run_step_function_tool_call] - ) - - messages = [] - async for _, content in azure_openai_assistant_agent._invoke_internal("thread_id"): - messages.append(content) - - assert len(messages) == 3 - assert isinstance(messages[0].items[0], FunctionCallContent) - assert isinstance(messages[1].items[0], FunctionResultContent) - assert isinstance(messages[2].items[0], TextContent) - - -async def test_invoke_stream( - azure_openai_assistant_agent, - mock_assistant, - mock_thread_messages, - azure_openai_unit_test_env, -): - events = [ - MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), - MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), - create_thread_message_delta_mock(), - mock_thread_run_step_completed(), - MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), - mock_thread_requires_action_run(), - ] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - - messages = [] - async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): - assert content is not None - - assert len(messages) > 0 - - -async def test_invoke_stream_with_function_call( - azure_openai_assistant_agent, - mock_assistant, - mock_thread_messages, - azure_openai_unit_test_env, -): - events = [create_thread_run_step_delta_mock()] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - - async for content in azure_openai_assistant_agent.invoke_stream("thread_id"): - assert content is not None - - -async def test_invoke_stream_code_output( - azure_openai_assistant_agent, - mock_assistant, - azure_openai_unit_test_env, -): - events = [mock_thread_run_step_completed_with_code()] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - - messages = [] - async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): - assert content is not None - assert content.metadata.get("code") is True - - -async def test_invoke_stream_requires_action( - azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env -): - events = [ - mock_thread_requires_action_run(), - ] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - - messages = [] - async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): - assert content is not None - - assert len(messages) > 0 - - -async def test_invoke_stream_throws_exception( - azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env -): - events = [ - mock_run_with_last_error(), - ] - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - - with pytest.raises(AgentInvokeException): - async for _ in azure_openai_assistant_agent.invoke_stream("thread_id"): - pass - - -async def test_invoke_assistant_not_initialized_throws(azure_openai_assistant_agent, openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The assistant has not been created."): - _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] - - -async def test_invoke_agent_deleted_throws(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - azure_openai_assistant_agent._is_deleted = True - - with pytest.raises(AgentInitializationException, match="The assistant has been deleted."): - _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] - - -async def test_invoke_raises_error( - azure_openai_assistant_agent, - mock_assistant, - mock_run_in_progress, - mock_run_step_tool_call, - mock_run_step_message_creation, - openai_unit_test_env, -): - async def mock_poll_run_status(run, thread_id): - run.status = "failed" - return run - - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - mock_client.beta.threads.runs = MagicMock() - mock_client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) - mock_client.beta.threads.runs.submit_tool_outputs = AsyncMock() - mock_client.beta.threads.runs.steps = MagicMock() - mock_client.beta.threads.runs.steps.list = AsyncMock( - return_value=MagicMock(data=[mock_run_step_tool_call, mock_run_step_message_creation]) - ) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - azure_openai_assistant_agent._get_tools = MagicMock(return_value=["tool"]) - azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) - - with pytest.raises( - AgentInvokeException, match="Run failed with status: `failed` for agent `test_name` and thread `thread_id`" - ): - _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] - - -@pytest.fixture -def mock_streaming_assistant_stream_manager() -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: - assistant_event_handler = AsyncAssistantEventHandler() - - mock_stream = AsyncMock() - mock_stream.__aiter__.return_value = [assistant_event_handler] - - mock_manager = AsyncMock(spec=AsyncAssistantStreamManager) - mock_manager.__aenter__.return_value = mock_stream - mock_manager.__aexit__.return_value = None - - return mock_manager - - -def test_format_tool_outputs(azure_openai_assistant_agent, openai_unit_test_env): - chat_history = ChatHistory() - fcc = FunctionCallContent( - id="test", name="test-function", arguments='{"input": "world"}', metadata={"test": "test"} - ) - frc = FunctionResultContent.from_function_call_content_and_result(fcc, 123, {"test2": "test2"}) - chat_history.add_message(message=frc.to_chat_message_content()) - - tool_outputs = azure_openai_assistant_agent._format_tool_outputs([fcc], chat_history) - assert tool_outputs[0] == {"tool_call_id": "test", "output": "123"} - - -async def test_invoke_function_calls(azure_openai_assistant_agent, openai_unit_test_env): - chat_history = ChatHistory() - fcc = FunctionCallContent( - id="test", name="test-function", arguments='{"input": "world"}', metadata={"test": "test"} - ) - - with patch( - "semantic_kernel.kernel.Kernel.invoke_function_call", new_callable=AsyncMock - ) as mock_invoke_function_call: - mock_invoke_function_call.return_value = "mocked_result" - results = await azure_openai_assistant_agent._invoke_function_calls([fcc], chat_history) - assert results == ["mocked_result"] - mock_invoke_function_call.assert_called_once_with(function_call=fcc, chat_history=chat_history) - - -def test_get_function_call_contents(azure_openai_assistant_agent, mock_run_required_action, openai_unit_test_env): - result = get_function_call_contents(run=mock_run_required_action, function_steps={}) - assert result is not None - - -def test_get_function_call_contents_no_action_required( - azure_openai_assistant_agent, mock_run_required_action, openai_unit_test_env -): - mock_run_required_action.required_action = None - result = get_function_call_contents(run=mock_run_required_action, function_steps={}) - assert result == [] - - -async def test_get_tools(azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) - - azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() - tools = azure_openai_assistant_agent._get_tools() - assert tools is not None - - -async def test_get_tools_no_assistant_returns_empty_list( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with pytest.raises(AgentInitializationException, match="The assistant has not been created."): - _ = azure_openai_assistant_agent._get_tools() - - -def test_generate_message_content(azure_openai_assistant_agent, mock_thread_messages, openai_unit_test_env): - for message in mock_thread_messages: - result = generate_message_content(assistant_name="test", message=message) - assert result is not None - - -def test_check_if_deleted_throws(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - azure_openai_assistant_agent._is_deleted = True - with pytest.raises(AgentInitializationException, match="The assistant has been deleted."): - azure_openai_assistant_agent._check_if_deleted() - - -def test_get_message_contents(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - message = ChatMessageContent(role=AuthorRole.USER, content="test message") - message.items = [ - ImageContent(role=AuthorRole.ASSISTANT, content="test message", uri="http://image.url"), - TextContent(role=AuthorRole.ASSISTANT, text="test message"), - FileReferenceContent(role=AuthorRole.ASSISTANT, file_id="test_file_id"), - TextContent(role=AuthorRole.USER, text="test message"), - FunctionResultContent(role=AuthorRole.ASSISTANT, result=["test result"], id="test_id"), - ] - - result = get_message_contents(message) - assert result is not None - - -async def test_retrieve_message(azure_openai_assistant_agent, mock_thread_messages, openai_unit_test_env): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - message = await azure_openai_assistant_agent._retrieve_message( - thread_id="test_thread_id", message_id="test_message_id" - ) - assert message is not None - - -async def test_retrieve_message_fails_polls_again( - azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env -): - with ( - patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client, - patch("semantic_kernel.agents.open_ai.open_ai_assistant_agent.logger", autospec=True), - ): - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=Exception("Unable to retrieve message")) - - message = await azure_openai_assistant_agent._retrieve_message( - thread_id="test_thread_id", message_id="test_message_id" - ) - assert message is None - - -async def test_poll_run_status( - azure_openai_assistant_agent, mock_run_required_action, mock_run_completed, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_completed) - - # Test successful polling - run = await azure_openai_assistant_agent._poll_run_status( - run=mock_run_required_action, thread_id="test_thread_id" - ) - assert run.status == "completed", f"Expected status 'completed', but got '{run.status}'" - - # Test timeout scenario - mock_client.beta.threads.runs.retrieve = AsyncMock(side_effect=TimeoutError) - azure_openai_assistant_agent.polling_options.run_polling_timeout = timedelta(milliseconds=10) - - with pytest.raises(AgentInvokeException) as excinfo: - await azure_openai_assistant_agent._poll_run_status( - run=mock_run_required_action, thread_id="test_thread_id" - ) - - assert "Polling timed out" in str(excinfo.value) - assert f"after waiting {azure_openai_assistant_agent.polling_options.run_polling_timeout}" in str(excinfo.value) - - -async def test_poll_run_status_incomplete( - azure_openai_assistant_agent, mock_run_required_action, mock_run_incomplete, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_incomplete) - - run = await azure_openai_assistant_agent._poll_run_status( - run=mock_run_required_action, thread_id="test_thread_id" - ) - - assert run.status in azure_openai_assistant_agent.error_message_states - - -async def test_poll_run_status_cancelled( - azure_openai_assistant_agent, mock_run_required_action, mock_run_cancelled, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_cancelled) - - run = await azure_openai_assistant_agent._poll_run_status( - run=mock_run_required_action, thread_id="test_thread_id" - ) - - assert run.status in azure_openai_assistant_agent.error_message_states - - -async def test_poll_run_status_exception_polls_again( - azure_openai_assistant_agent, mock_run_required_action, mock_run_completed, openai_unit_test_env -): - with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: - mock_client.beta = MagicMock() - mock_client.beta.assistants = MagicMock() - - mock_client.beta.threads.runs.retrieve = AsyncMock( - side_effect=[Exception("Failed to retrieve message"), mock_run_completed] - ) - - run = await azure_openai_assistant_agent._poll_run_status( - run=mock_run_required_action, thread_id="test_thread_id" - ) - assert run.status == "requires_action" - - -def test_generate_function_result_content( - azure_openai_assistant_agent, mock_function_call_content, openai_unit_test_env -): - mock_tool_call = RequiredActionFunctionToolCall( - id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name", output="result") - ) - - message = generate_function_result_content( - agent_name="test", function_step=mock_function_call_content, tool_call=mock_tool_call - ) - assert message is not None - assert isinstance(message.items[0], FunctionResultContent) - - -def test_generate_function_call_content(azure_openai_assistant_agent, mock_function_call_content, openai_unit_test_env): - message = generate_function_call_content(agent_name="test", fccs=[mock_function_call_content]) - assert message is not None - assert isinstance(message, ChatMessageContent) - assert isinstance(message.items[0], FunctionCallContent) - - -def test_merge_options(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - merged_options = azure_openai_assistant_agent._merge_options( - ai_model_id="model-id", - enable_json_response=True, - enable_code_interpreter=True, - enable_file_search=True, - max_completion_tokens=150, - parallel_tool_calls_enabled=True, - ) - - expected_options = { - "ai_model_id": "model-id", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "max_completion_tokens": 150, - "max_prompt_tokens": None, - "parallel_tool_calls_enabled": True, - "truncation_message_count": None, - "temperature": 0.7, - "top_p": 0.9, - "metadata": {}, - } - - assert merged_options == expected_options, f"Expected {expected_options}, but got {merged_options}" - - -def test_generate_options(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): - options = azure_openai_assistant_agent._generate_options( - ai_model_id="model-id", max_completion_tokens=150, metadata={"key1": "value1"} - ) - - expected_options = { - "max_completion_tokens": 150, - "max_prompt_tokens": None, - "model": "model-id", - "top_p": 0.9, - "response_format": None, - "temperature": 0.7, - "truncation_strategy": None, - "metadata": {"key1": "value1"}, - } - - assert options == expected_options, f"Expected {expected_options}, but got {options}" - - -def test_generate_function_call_content_sets_assistant_role(): - fcc1 = FunctionCallContent(name="function_name1", arguments={"input": "some input"}) - fcc2 = FunctionCallContent(name="function_name2", arguments={"input": "other input"}) - agent_name = "TestAgent" - - result = generate_function_call_content(agent_name=agent_name, fccs=[fcc1, fcc2]) - - assert result.role == AuthorRole.ASSISTANT - assert result.name == agent_name - assert len(result.items) == 2 - assert isinstance(result.items[0], FunctionCallContent) - assert isinstance(result.items[1], FunctionCallContent) - assert result.items[0].name == "function_name1" - assert result.items[1].name == "function_name2" - - -# endregion diff --git a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py index 05fa5773729a..8b9df3f6b77f 100644 --- a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py +++ b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py @@ -109,6 +109,7 @@ def test_prompt_execution_settings_class(azure_ai_inference_unit_test_env, model async def test_azure_ai_inference_chat_completion( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -123,6 +124,7 @@ async def test_azure_ai_inference_chat_completion( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -140,6 +142,7 @@ async def test_azure_ai_inference_chat_completion( async def test_azure_ai_inference_chat_completion_with_standard_parameters( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -163,6 +166,7 @@ async def test_azure_ai_inference_chat_completion_with_standard_parameters( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, frequency_penalty=settings.frequency_penalty, max_tokens=settings.max_tokens, @@ -186,6 +190,7 @@ async def test_azure_ai_inference_chat_completion_with_standard_parameters( async def test_azure_ai_inference_chat_completion_with_extra_parameters( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -202,6 +207,7 @@ async def test_azure_ai_inference_chat_completion_with_extra_parameters( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -296,6 +302,7 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior( async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_no_tool_call( mock_complete, azure_ai_inference_service, + model_id, kernel, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, @@ -319,6 +326,7 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_ mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -342,6 +350,7 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_ async def test_azure_ai_inference_streaming_chat_completion( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -360,6 +369,7 @@ async def test_azure_ai_inference_streaming_chat_completion( mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -374,6 +384,7 @@ async def test_azure_ai_inference_streaming_chat_completion( async def test_azure_ai_inference_chat_streaming_completion_with_standard_parameters( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -401,6 +412,7 @@ async def test_azure_ai_inference_chat_streaming_completion_with_standard_parame mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, frequency_penalty=settings.frequency_penalty, max_tokens=settings.max_tokens, @@ -421,6 +433,7 @@ async def test_azure_ai_inference_chat_streaming_completion_with_standard_parame async def test_azure_ai_inference_streaming_chat_completion_with_extra_parameters( mock_complete, azure_ai_inference_service, + model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -441,6 +454,7 @@ async def test_azure_ai_inference_streaming_chat_completion_with_extra_parameter mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -546,6 +560,7 @@ async def test_azure_ai_inference_streaming_chat_completion_with_function_choice async def test_azure_ai_inference_streaming_chat_completion_with_function_choice_behavior_no_tool_call( mock_complete, azure_ai_inference_service, + model_id, kernel, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, @@ -573,6 +588,7 @@ async def test_azure_ai_inference_streaming_chat_completion_with_function_choice mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], + model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) diff --git a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py index c17510fec13b..a0abdd573b9e 100644 --- a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py +++ b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py @@ -83,6 +83,7 @@ def test_azure_ai_inference_text_embedding_init_with_empty_endpoint(azure_ai_inf async def test_azure_ai_inference_text_embedding( mock_embed, azure_ai_inference_service, + model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding without settings""" texts = ["hello", "world"] @@ -90,6 +91,7 @@ async def test_azure_ai_inference_text_embedding( mock_embed.assert_awaited_once_with( input=texts, + model=model_id, model_extras=None, dimensions=None, encoding_format=None, @@ -106,6 +108,7 @@ async def test_azure_ai_inference_text_embedding( async def test_azure_ai_inference_text_embedding_with_standard_settings( mock_embed, azure_ai_inference_service, + model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding with standard settings""" texts = ["hello", "world"] @@ -116,6 +119,7 @@ async def test_azure_ai_inference_text_embedding_with_standard_settings( mock_embed.assert_awaited_once_with( input=texts, + model=model_id, model_extras=None, dimensions=settings.dimensions, encoding_format=settings.encoding_format, @@ -132,6 +136,7 @@ async def test_azure_ai_inference_text_embedding_with_standard_settings( async def test_azure_ai_inference_text_embedding_with_extra_parameters( mock_embed, azure_ai_inference_service, + model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding with extra parameters""" texts = ["hello", "world"] @@ -141,6 +146,7 @@ async def test_azure_ai_inference_text_embedding_with_extra_parameters( mock_embed.assert_awaited_once_with( input=texts, + model=model_id, model_extras=extra_parameters, dimensions=settings.dimensions, encoding_format=settings.encoding_format, diff --git a/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py b/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py index cfd588c74e61..4a5728be554c 100644 --- a/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py +++ b/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py @@ -1,6 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. +import pytest + from semantic_kernel.connectors.ai.bedrock.bedrock_prompt_execution_settings import BedrockChatPromptExecutionSettings +from semantic_kernel.connectors.ai.bedrock.services.model_provider.bedrock_model_provider import ( + BedrockModelProvider, +) from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import ( remove_none_recursively, update_settings_from_function_choice_configuration, @@ -124,3 +129,20 @@ def test_update_settings_from_function_choice_configuration_required_with_more_t assert "any" in settings.tool_choice assert len(settings.tools) == 2 + + +def test_inference_profile_with_bedrock_model() -> None: + """Test the BedrockModelProvider class returns the correct model for a given inference profile.""" + + us_amazon_inference_profile = "us.amazon.nova-lite-v1:0" + assert BedrockModelProvider.to_model_provider(us_amazon_inference_profile) == BedrockModelProvider.AMAZON + + us_anthropic_inference_profile = "us.anthropic.claude-3-sonnet-20240229-v1:0" + assert BedrockModelProvider.to_model_provider(us_anthropic_inference_profile) == BedrockModelProvider.ANTHROPIC + + eu_meta_inference_profile = "eu.meta.llama3-2-3b-instruct-v1:0" + assert BedrockModelProvider.to_model_provider(eu_meta_inference_profile) == BedrockModelProvider.META + + unknown_inference_profile = "unknown" + with pytest.raises(ValueError, match="Model ID unknown does not contain a valid model provider name."): + BedrockModelProvider.to_model_provider(unknown_inference_profile) diff --git a/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py b/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py index ddf8d48caa43..aa2a11ea0556 100644 --- a/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py +++ b/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, Mock, patch import pytest -from transformers import TextIteratorStreamer +from transformers import AutoTokenizer, TextIteratorStreamer from semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion import HuggingFaceTextCompletion from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -135,6 +135,10 @@ async def test_text_completion_streaming(model_name, task, input_str): "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.Thread", side_effect=Mock(spec=Thread), ), + patch( + "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.AutoTokenizer", + side_effect=Mock(spec=AutoTokenizer), + ), patch( "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.TextIteratorStreamer", return_value=mock_streamer, diff --git a/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py b/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py index 30c9573fef6c..991ec195fd1e 100644 --- a/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py +++ b/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py @@ -11,6 +11,15 @@ from semantic_kernel.kernel import Kernel from tests.unit.connectors.ai.onnx.conftest import gen_ai_config, gen_ai_config_vision +try: + import onnxruntime_genai # noqa: F401 + + ready = True +except ImportError: + ready = False + +pytestmark = pytest.mark.skipif(not ready, reason="ONNX Runtime is not installed.") + @patch("builtins.open", new_callable=mock_open, read_data=json.dumps(gen_ai_config)) @patch("onnxruntime_genai.Model") diff --git a/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py b/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py index 09435f02667f..506ccaa175f4 100644 --- a/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py +++ b/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py @@ -12,6 +12,15 @@ from semantic_kernel.exceptions import ServiceInitializationError from tests.unit.connectors.ai.onnx.conftest import gen_ai_config +try: + import onnxruntime_genai # noqa: F401 + + ready = True +except ImportError: + ready = False + +pytestmark = pytest.mark.skipif(not ready, reason="ONNX Runtime is not installed.") + @patch("builtins.open", new_callable=mock_open, read_data=json.dumps(gen_ai_config)) @patch("onnxruntime_genai.Model") diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py b/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py index c512a38f1b10..1b7d0c13bafe 100644 --- a/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py +++ b/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py @@ -669,7 +669,6 @@ def test_tool(self, key: str): model=azure_openai_unit_test_env["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"], stream=False, messages=azure_chat_completion._prepare_chat_history_for_request(orig_chat_history), - parallel_tool_calls=True, tools=[ { "type": "function", diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py b/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py index c0b1000ae159..76de816fc65f 100644 --- a/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py +++ b/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py @@ -265,7 +265,6 @@ def test_tool(self, key: str): model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=False, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), - parallel_tool_calls=True, tools=[ { "type": "function", @@ -757,7 +756,6 @@ def test_tool(self, key: str): mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, - parallel_tool_calls=True, tools=[ { "type": "function", diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py b/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py new file mode 100644 index 000000000000..a341f2bb5c4c --- /dev/null +++ b/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py @@ -0,0 +1,656 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from collections.abc import AsyncIterable +from typing import Any +from unittest.mock import AsyncMock, patch + +from aiortc import AudioStreamTrack +from openai.resources.beta.realtime.realtime import AsyncRealtimeConnection, AsyncRealtimeConnectionManager +from openai.types.beta.realtime import ( + ConversationItem, + ConversationItemContent, + ConversationItemCreatedEvent, + ConversationItemCreateEvent, + ConversationItemDeletedEvent, + ConversationItemDeleteEvent, + ConversationItemTruncatedEvent, + ConversationItemTruncateEvent, + ErrorEvent, + InputAudioBufferAppendEvent, + InputAudioBufferClearedEvent, + InputAudioBufferClearEvent, + InputAudioBufferCommitEvent, + InputAudioBufferCommittedEvent, + InputAudioBufferSpeechStartedEvent, + RealtimeResponse, + RealtimeServerEvent, + ResponseAudioDeltaEvent, + ResponseAudioDoneEvent, + ResponseAudioTranscriptDeltaEvent, + ResponseCancelEvent, + ResponseCreatedEvent, + ResponseCreateEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseOutputItemAddedEvent, + Session, + SessionCreatedEvent, + SessionUpdatedEvent, + SessionUpdateEvent, +) +from pytest import fixture, mark, param, raises + +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.function_choice_type import FunctionChoiceType +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( + OpenAIRealtimeExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import ( + ListenEvents, + OpenAIRealtimeWebRTC, + OpenAIRealtimeWebsocket, + SendEvents, + _create_openai_realtime_client_event, + update_settings_from_function_call_configuration, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.audio_content import AudioContent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.realtime_events import ( + RealtimeAudioEvent, + RealtimeEvent, + RealtimeFunctionCallEvent, + RealtimeFunctionResultEvent, + RealtimeTextEvent, +) +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.exceptions.content_exceptions import ContentException +from semantic_kernel.functions import kernel_function +from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata + +events = [ + SessionCreatedEvent(type=ListenEvents.SESSION_CREATED, session=Session(id="session_id"), event_id="1"), + SessionUpdatedEvent(type=ListenEvents.SESSION_UPDATED, session=Session(id="session_id"), event_id="2"), + ConversationItemCreatedEvent( + type=ListenEvents.CONVERSATION_ITEM_CREATED, + item=ConversationItem(id="item_id"), + event_id="3", + previous_item_id="2", + ), + ConversationItemDeletedEvent(type=ListenEvents.CONVERSATION_ITEM_DELETED, item_id="item_id", event_id="4"), + ConversationItemTruncatedEvent( + type=ListenEvents.CONVERSATION_ITEM_TRUNCATED, event_id="5", audio_end_ms=0, content_index=0, item_id="item_id" + ), + InputAudioBufferClearedEvent(type=ListenEvents.INPUT_AUDIO_BUFFER_CLEARED, event_id="7"), + InputAudioBufferCommittedEvent( + type=ListenEvents.INPUT_AUDIO_BUFFER_COMMITTED, + event_id="8", + item_id="item_id", + previous_item_id="previous_item_id", + ), + ResponseCreatedEvent(type=ListenEvents.RESPONSE_CREATED, event_id="10", response=RealtimeResponse()), + ResponseFunctionCallArgumentsDoneEvent( + type=ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE, + event_id="11", + arguments="{}", + call_id="call_id", + item_id="item_id", + output_index=0, + response_id="response_id", + ), + ResponseAudioTranscriptDeltaEvent( + type=ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DELTA, + event_id="12", + content_index=0, + delta="text", + item_id="item_id", + output_index=0, + response_id="response_id", + ), + ResponseAudioDoneEvent( + type=ListenEvents.RESPONSE_AUDIO_DONE, + event_id="13", + item_id="item_id", + output_index=0, + response_id="response_id", + content_index=0, + ), + ResponseAudioDeltaEvent( + type=ListenEvents.RESPONSE_AUDIO_DELTA, + event_id="14", + item_id="item_id", + output_index=0, + response_id="response_id", + content_index=0, + delta="audio data", + ), +] + + +async def websocket_stream(**kwargs) -> AsyncIterable[RealtimeServerEvent]: + for event in events: + yield event + await asyncio.sleep(0) + + +@fixture +def audio_track(): + class AudioTrack(AudioStreamTrack): + kind = "audio" + + async def recv(self): + await asyncio.sleep(0) + return + + return AudioTrack() + + +@fixture +def OpenAIWebsocket(openai_unit_test_env): + client = OpenAIRealtimeWebsocket() + client._call_id_to_function_map["call_id"] = "function_name" + return client + + +@fixture +def OpenAIWebRTC(openai_unit_test_env, audio_track): + client = OpenAIRealtimeWebRTC(audio_track=audio_track) + client._call_id_to_function_map["call_id"] = "function_name" + return client + + +def test_update_settings_from_function_call_config(): + config = FunctionCallChoiceConfiguration( + available_functions=[ + KernelFunctionMetadata(name="function_name", description="function_description", is_prompt=False) + ] + ) + + settings = OpenAIRealtimeExecutionSettings() + + update_settings_from_function_call_configuration(config, settings, FunctionChoiceType.AUTO) + + assert len(settings.tools) == 1 + assert settings.tools[0]["type"] == "function" + assert settings.tools[0]["name"] == "function_name" + assert settings.tools[0]["description"] == "function_description" + assert settings.tool_choice == FunctionChoiceType.AUTO.value + + +def test_openai_realtime_websocket(openai_unit_test_env): + realtime_client = OpenAIRealtimeWebsocket() + assert realtime_client is not None + + +def test_openai_realtime_webrtc(openai_unit_test_env, audio_track): + realtime_client = OpenAIRealtimeWebRTC(audio_track=audio_track) + assert realtime_client is not None + + +@mark.parametrize( + ["event_type", "event_kwargs", "expected_event", "expected_exception"], + [ + param( + SendEvents.SESSION_UPDATE, + {"session": {"id": "session_id"}}, + SessionUpdateEvent, + None, + id="session_update", + ), + param( + SendEvents.SESSION_UPDATE, + {}, + SessionUpdateEvent, + ContentException, + id="session_update_missing", + ), + param( + SendEvents.INPUT_AUDIO_BUFFER_APPEND, + {"audio": "audio_buffer_as_string"}, + InputAudioBufferAppendEvent, + None, + id="input_audio_buffer_append", + ), + param( + SendEvents.INPUT_AUDIO_BUFFER_APPEND, + {}, + InputAudioBufferAppendEvent, + ContentException, + id="input_audio_buffer_append_missing_audio", + ), + param( + SendEvents.INPUT_AUDIO_BUFFER_COMMIT, + {}, + InputAudioBufferCommitEvent, + None, + id="input_audio_buffer_commit", + ), + param( + SendEvents.INPUT_AUDIO_BUFFER_CLEAR, + {}, + InputAudioBufferClearEvent, + None, + id="input_audio_buffer_Clear", + ), + param( + SendEvents.CONVERSATION_ITEM_CREATE, + { + "event_id": "event_id", + "previous_item_id": "previous_item_id", + "item": {"id": "item_id"}, + }, + ConversationItemCreateEvent, + None, + id="conversation_item_create_event", + ), + param( + SendEvents.CONVERSATION_ITEM_CREATE, + {}, + ConversationItemCreateEvent, + ContentException, + id="conversation_item_create_event_no_item", + ), + param( + SendEvents.CONVERSATION_ITEM_TRUNCATE, + {"audio_end_ms": 1000, "item_id": "item_id"}, + ConversationItemTruncateEvent, + None, + id="conversation_item_truncate", + ), + param( + SendEvents.CONVERSATION_ITEM_DELETE, + {"item_id": "item_id"}, + ConversationItemDeleteEvent, + None, + id="conversation_item_delete", + ), + param( + SendEvents.CONVERSATION_ITEM_DELETE, + {}, + ConversationItemDeleteEvent, + ContentException, + id="conversation_item_delete_fail", + ), + param( + SendEvents.RESPONSE_CREATE, + {"response": {"instructions": "instructions"}}, + ResponseCreateEvent, + None, + id="response_create", + ), + param( + SendEvents.RESPONSE_CANCEL, + {}, + ResponseCancelEvent, + None, + id="response_cancel", + ), + ], +) +def test_create_openai_realtime_event( + event_type: SendEvents, event_kwargs: dict[str, Any], expected_event: Any, expected_exception: Exception | None +): + if expected_exception: + with raises(expected_exception): + _create_openai_realtime_client_event(event_type, **event_kwargs) + else: + event = _create_openai_realtime_client_event(event_type, **event_kwargs) + assert isinstance(event, expected_event) + + +@mark.parametrize( + ["event", "expected_type"], + [ + param( + ResponseAudioTranscriptDeltaEvent( + content_index=0, + delta="text", + item_id="item_id", + event_id="event_id", + output_index=0, + response_id="response_id", + type="response.audio_transcript.delta", + ), + [RealtimeTextEvent], + id="response_audio_transcript_delta", + ), + param( + ResponseOutputItemAddedEvent( + item=ConversationItem(id="item_id"), + event_id="event_id", + output_index=0, + response_id="response_id", + type="response.output_item.added", + ), + [RealtimeEvent], + id="response_output_item_added", + ), + param( + ResponseOutputItemAddedEvent( + item=ConversationItem(id="item_id", type="function_call", call_id="call_id", name="function_to_call"), + event_id="event_id", + output_index=0, + response_id="response_id", + type="response.output_item.added", + ), + [RealtimeEvent], + id="response_output_item_added_function_call", + ), + param( + ResponseFunctionCallArgumentsDeltaEvent( + call_id="call_id", + delta="argument delta", + event_id="event_id", + output_index=0, + item_id="item_id", + response_id="response_id", + type="response.function_call_arguments.delta", + ), + [RealtimeFunctionCallEvent], + id="response_function_call_arguments_delta", + ), + param( + ResponseFunctionCallArgumentsDoneEvent( + call_id="call_id", + arguments="argument delta", + event_id="event_id", + output_index=0, + item_id="item_id", + response_id="response_id", + type="response.function_call_arguments.done", + ), + [RealtimeEvent], + id="response_function_call_arguments_done_no_kernel", + ), + param( + ErrorEvent( + error={"code": "error_code", "message": "error_message", "type": "invalid_request_error"}, + event_id="event_id", + type="error", + ), + [RealtimeEvent], + id="error", + ), + param( + SessionCreatedEvent( + session=Session(id="session_id"), + event_id="event_id", + type="session.created", + ), + [RealtimeEvent], + id="session_created", + ), + param( + SessionUpdatedEvent( + session=Session(id="session_id"), + event_id="event_id", + type="session.updated", + ), + [RealtimeEvent], + id="session_updated", + ), + param( + InputAudioBufferSpeechStartedEvent( + audio_start_ms=0, + event_id="event_id", + item_id="item_id", + type="input_audio_buffer.speech_started", + ), + [RealtimeEvent], + id="other", + ), + ], +) +async def test_parse_event(OpenAIWebsocket, event: RealtimeServerEvent, expected_type: list[type]): + iter = 0 + async for result in OpenAIWebsocket._parse_event(event): + assert isinstance(result, expected_type[iter]) + iter += 1 + + +async def test_update_session(OpenAIWebsocket, kernel): + chat_history = ChatHistory( + messages=[ + ChatMessageContent(role="user", content="Hello"), + ChatMessageContent( + role="assistant", + items=[ + FunctionCallContent( + function_name="function_name", plugin_name="plugin", arguments={"arg1": "value"}, id="1" + ) + ], + ), + ChatMessageContent( + role="tool", + items=[ + FunctionResultContent(function_name="function_name", plugin_name="plugin", result="result", id="1") + ], + ), + ChatMessageContent( + role="user", + items=[ + TextContent(text="Hello again"), + ImageContent(uri="https://example.com/image.png"), + ], + ), + ] + ) + settings = OpenAIRealtimeExecutionSettings(instructions="instructions", ai_model_id="gpt-4o-realtime-preview") + with patch.object(OpenAIWebsocket, "_send") as mock_send: + await OpenAIWebsocket.update_session( + chat_history=chat_history, settings=settings, create_response=True, kernel=kernel + ) + mock_send.assert_awaited() + # session update, 4 conversation item create events, response create + # images are not supported, so ignored + assert len(mock_send.await_args_list) == 6 + assert OpenAIWebsocket._current_settings == settings + assert OpenAIWebsocket.kernel == kernel + + +async def test_parse_function_call_arguments_done(OpenAIWebsocket, kernel): + func_result = "result" + event = ResponseFunctionCallArgumentsDoneEvent( + call_id="call_id", + arguments='{"x": "' + func_result + '"}', + event_id="event_id", + output_index=0, + item_id="item_id", + response_id="response_id", + type="response.function_call_arguments.done", + ) + response_events = [RealtimeFunctionCallEvent, RealtimeFunctionResultEvent] + OpenAIWebsocket._current_settings = OpenAIRealtimeExecutionSettings( + instructions="instructions", ai_model_id="gpt-4o-realtime-preview" + ) + OpenAIWebsocket._current_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + OpenAIWebsocket._call_id_to_function_map["call_id"] = "plugin_name-function_name" + func = kernel_function(name="function_name", description="function_description")(lambda x: x) + kernel.add_function(plugin_name="plugin_name", function_name="function_name", function=func) + OpenAIWebsocket.kernel = kernel + iter = 0 + with patch.object(OpenAIWebsocket, "_send") as mock_send: + async for event in OpenAIWebsocket._parse_function_call_arguments_done(event): + assert isinstance(event, response_events[iter]) + iter += 1 + mock_send.assert_awaited() + assert len(mock_send.await_args_list) == 2 + mock_send.assert_any_await( + ConversationItemCreateEvent( + type="conversation.item.create", + item=ConversationItem( + type="function_call_output", + output=func_result, + call_id="call_id", + ), + ) + ) + + +async def test_parse_function_call_arguments_done_fail(OpenAIWebsocket, kernel): + func_result = "result" + event = ResponseFunctionCallArgumentsDoneEvent( + call_id="call_id", + arguments='{"x": "' + func_result + '"}', + event_id="event_id", + output_index=0, + item_id="item_id", + response_id="response_id", + type="response.function_call_arguments.done", + ) + response_events = [RealtimeEvent] + OpenAIWebsocket._current_settings = OpenAIRealtimeExecutionSettings( + instructions="instructions", ai_model_id="gpt-4o-realtime-preview" + ) + OpenAIWebsocket._current_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + # This function name is invalid + OpenAIWebsocket._call_id_to_function_map["call_id"] = "function_name" + func = kernel_function(name="function_name", description="function_description")(lambda x: x) + kernel.add_function(plugin_name="plugin_name", function_name="function_name", function=func) + OpenAIWebsocket.kernel = kernel + iter = 0 + async for event in OpenAIWebsocket._parse_function_call_arguments_done(event): + assert isinstance(event, response_events[iter]) + iter += 1 + + +async def test_send_audio(OpenAIWebsocket): + audio_event = RealtimeAudioEvent( + audio=AudioContent(data=b"audio data", mime_type="audio/wav"), + ) + with patch.object(OpenAIWebsocket, "_send") as mock_send: + await OpenAIWebsocket.send(audio_event) + mock_send.assert_awaited() + assert len(mock_send.await_args_list) == 1 + mock_send.assert_any_await( + InputAudioBufferAppendEvent( + audio="audio data", + type="input_audio_buffer.append", + ) + ) + + +@mark.parametrize("client", ["OpenAIWebRTC", "OpenAIWebsocket"]) +async def test_send_session_update(client, OpenAIWebRTC, OpenAIWebsocket): + openai_client = OpenAIWebRTC if client == "OpenAIWebRTC" else OpenAIWebsocket + settings = PromptExecutionSettings(ai_model_id="gpt-4o-realtime-preview") + session_event = RealtimeEvent( + service_type=SendEvents.SESSION_UPDATE, + service_event={"settings": settings}, + ) + with patch.object(openai_client, "_send") as mock_send: + await openai_client.send(event=session_event) + mock_send.assert_awaited() + assert len(mock_send.await_args_list) == 1 + mock_send.assert_any_await( + SessionUpdateEvent( + session={"model": "gpt-4o-realtime-preview"}, + type="session.update", + ) + ) + + +@mark.parametrize("client", ["OpenAIWebRTC", "OpenAIWebsocket"]) +async def test_send_conversation_item_create(client, OpenAIWebRTC, OpenAIWebsocket): + openai_client = OpenAIWebRTC if client == "OpenAIWebRTC" else OpenAIWebsocket + event = RealtimeEvent( + service_type=SendEvents.CONVERSATION_ITEM_CREATE, + service_event={ + "item": ChatMessageContent( + role="user", + items=[ + TextContent(text="Hello"), + FunctionCallContent( + function_name="function_name", + plugin_name="plugin", + arguments={"arg1": "value"}, + id="1", + metadata={"call_id": "call_id"}, + ), + FunctionResultContent( + function_name="function_name", + plugin_name="plugin", + result="result", + id="1", + metadata={"call_id": "call_id"}, + ), + ], + ) + }, + ) + + with patch.object(openai_client, "_send") as mock_send: + await openai_client.send(event=event) + mock_send.assert_awaited() + assert len(mock_send.await_args_list) == 3 + mock_send.assert_any_await( + ConversationItemCreateEvent( + item=ConversationItem( + content=[ConversationItemContent(text="Hello", type="input_text")], + role="user", + type="message", + ), + type="conversation.item.create", + ) + ) + mock_send.assert_any_await( + ConversationItemCreateEvent( + item=ConversationItem( + arguments='{"arg1": "value"}', + call_id="call_id", + name="plugin-function_name", + type="function_call", + ), + type="conversation.item.create", + ) + ) + mock_send.assert_any_await( + ConversationItemCreateEvent( + item=ConversationItem( + call_id="call_id", + output="result", + type="function_call_output", + ), + type="conversation.item.create", + ) + ) + + +async def test_receive_websocket(OpenAIWebsocket): + connection_mock = AsyncMock(spec=AsyncRealtimeConnection) + connection_mock.recv = websocket_stream + + manager = AsyncMock(spec=AsyncRealtimeConnectionManager) + manager.enter.return_value = connection_mock + + with patch("openai.resources.beta.realtime.realtime.AsyncRealtime.connect") as mock_connect: + mock_connect.return_value = manager + async with OpenAIWebsocket(): + async for msg in OpenAIWebsocket.receive(): + assert isinstance(msg, RealtimeEvent) + + +async def test_receive_webrtc(OpenAIWebRTC): + counter = len(events) + with patch.object(OpenAIRealtimeWebRTC, "create_session"): + recv_task = asyncio.create_task(_stream_to_webrtc(OpenAIWebRTC)) + async with OpenAIWebRTC(): + async for msg in OpenAIWebRTC.receive(): + assert isinstance(msg, RealtimeEvent) + counter -= 1 + if counter == 0: + break + recv_task.cancel() + + +async def _stream_to_webrtc(client: OpenAIRealtimeWebRTC): + async for msg in websocket_stream(): + async for parsed_msg in client._parse_event(msg): + await client._receive_buffer.put(parsed_msg) + await asyncio.sleep(0) diff --git a/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py b/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py index 25cb379bff12..8d4e9e4cea4b 100644 --- a/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py +++ b/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py @@ -311,6 +311,20 @@ def test_azure_open_ai_chat_prompt_execution_settings_with_aisearch_data_sources pytest.param({"type": "access_token"}, marks=pytest.mark.xfail), pytest.param({"type": "invalid"}, marks=pytest.mark.xfail), ], + ids=[ + "APIKey", + "api_key", + "api_key_no_key", + "SystemAssignedManagedIdentity", + "system_assigned_managed_identity", + "UserAssignedManagedIdentity", + "user_assigned_managed_identity", + "user_assigned_managed_identity_no_id", + "AccessToken", + "access_token", + "access_token_no_token", + "invalid", + ], ) def test_aisearch_data_source_parameters(authentication) -> None: AzureAISearchDataSourceParameters(index_name="test_index", authentication=authentication) diff --git a/python/tests/unit/connectors/memory/chroma/test_chroma.py b/python/tests/unit/connectors/memory/chroma/test_chroma.py new file mode 100644 index 000000000000..2fb29afd1e40 --- /dev/null +++ b/python/tests/unit/connectors/memory/chroma/test_chroma.py @@ -0,0 +1,134 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import MagicMock + +import pytest +from chromadb.api import ClientAPI + +from semantic_kernel.connectors.memory.chroma.chroma import ChromaCollection, ChromaStore +from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions + + +@pytest.fixture +def mock_client(): + return MagicMock(spec=ClientAPI) + + +@pytest.fixture +def chroma_collection(mock_client, data_model_definition): + return ChromaCollection( + collection_name="test_collection", + data_model_type=dict, + data_model_definition=data_model_definition, + client=mock_client, + ) + + +@pytest.fixture +def chroma_store(mock_client): + return ChromaStore(client=mock_client) + + +def test_chroma_collection_initialization(chroma_collection): + assert chroma_collection.collection_name == "test_collection" + assert chroma_collection.data_model_type is dict + + +def test_chroma_store_initialization(chroma_store): + assert chroma_store.client is not None + + +def test_chroma_collection_get_collection(chroma_collection, mock_client): + mock_client.get_collection.return_value = "mock_collection" + collection = chroma_collection._get_collection() + assert collection == "mock_collection" + + +def test_chroma_store_get_collection(chroma_store, mock_client, data_model_definition): + collection = chroma_store.get_collection( + collection_name="test_collection", data_model_type=dict, data_model_definition=data_model_definition + ) + assert collection is not None + assert isinstance(collection, ChromaCollection) + + +async def test_chroma_collection_does_collection_exist(chroma_collection, mock_client): + mock_client.get_collection.return_value = "mock_collection" + exists = await chroma_collection.does_collection_exist() + assert exists + + +async def test_chroma_store_list_collection_names(chroma_store, mock_client): + mock_client.list_collections.return_value = ["collection1", "collection2"] + collections = await chroma_store.list_collection_names() + assert collections == ["collection1", "collection2"] + + +async def test_chroma_collection_create_collection(chroma_collection, mock_client): + await chroma_collection.create_collection() + mock_client.create_collection.assert_called_once_with(name="test_collection", metadata={"hnsw:space": "cosine"}) + + +async def test_chroma_collection_delete_collection(chroma_collection, mock_client): + await chroma_collection.delete_collection() + mock_client.delete_collection.assert_called_once_with(name="test_collection") + + +async def test_chroma_collection_upsert(chroma_collection, mock_client): + records = [{"id": "1", "vector": [0.1, 0.2, 0.3, 0.4, 0.5], "content": "test document"}] + ids = await chroma_collection.upsert(records) + assert ids == "1" + mock_client.get_collection().add.assert_called_once() + + +async def test_chroma_collection_get(chroma_collection, mock_client): + mock_client.get_collection().get.return_value = { + "ids": [["1"]], + "documents": [["test document"]], + "embeddings": [[[0.1, 0.2, 0.3, 0.4, 0.5]]], + "metadatas": [[{}]], + } + records = await chroma_collection._inner_get(["1"]) + assert len(records) == 1 + assert records[0]["id"] == "1" + + +async def test_chroma_collection_delete(chroma_collection, mock_client): + await chroma_collection._inner_delete(["1"]) + mock_client.get_collection().delete.assert_called_once_with(ids=["1"]) + + +async def test_chroma_collection_search(chroma_collection, mock_client): + options = VectorSearchOptions(top=1, include_vectors=True) + mock_client.get_collection().query.return_value = { + "ids": [["1"]], + "documents": [["test document"]], + "embeddings": [[[0.1, 0.2, 0.3, 0.4, 0.5]]], + "metadatas": [[{}]], + "distances": [[0.1]], + } + results = await chroma_collection.vectorized_search(options=options, vector=[0.1, 0.2, 0.3, 0.4, 0.5]) + async for res in results.results: + assert res.record["id"] == "1" + assert res.score == 0.1 + + +@pytest.mark.parametrize( + "filter_expression, expected", + [ + pytest.param( + VectorSearchFilter.equal_to("field1", "value1"), {"field1": {"$eq": "value1"}}, id="single_filter" + ), + pytest.param(VectorSearchFilter(), None, id="empty_filter"), + pytest.param( + VectorSearchFilter.equal_to("field1", "value1").any_tag_equal_to("field2", ["value2", "value3"]), + {"$and": [{"field1": {"$eq": "value1"}}, {"field2": {"$in": ["value2", "value3"]}}]}, + id="multiple_filters", + ), + ], +) +def test_chroma_collection_parse_filter(chroma_collection, filter_expression, expected): + options = VectorSearchOptions(top=1, include_vectors=True, filter=filter_expression) + filter_expression = chroma_collection._parse_filter(options) + assert filter_expression == expected diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py b/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py new file mode 100644 index 000000000000..23f637104710 --- /dev/null +++ b/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py @@ -0,0 +1,37 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from unittest.mock import patch + +import pytest +from pymongo import AsyncMongoClient +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.asynchronous.database import AsyncDatabase + +BASE_PATH = "pymongo.asynchronous.mongo_client.AsyncMongoClient" +DATABASE_PATH = "pymongo.asynchronous.database.AsyncDatabase" +COLLECTION_PATH = "pymongo.asynchronous.collection.AsyncCollection" + + +@pytest.fixture(autouse=True) +def mock_mongo_client(): + with patch(BASE_PATH, spec=AsyncMongoClient) as mock: + yield mock + + +@pytest.fixture(autouse=True) +def mock_get_database(mock_mongo_client): + with ( + patch(DATABASE_PATH, spec=AsyncDatabase) as mock_db, + patch.object(mock_mongo_client, "get_database", new_callable=lambda: mock_db) as mock, + ): + yield mock + + +@pytest.fixture(autouse=True) +def mock_get_collection(mock_get_database): + with ( + patch(COLLECTION_PATH, spec=AsyncCollection) as mock_collection, + patch.object(mock_get_database, "get_collection", new_callable=lambda: mock_collection) as mock, + ): + yield mock diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py new file mode 100644 index 000000000000..5a084ed7ea76 --- /dev/null +++ b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py @@ -0,0 +1,96 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, patch + +from pymongo import AsyncMongoClient +from pymongo.asynchronous.cursor import AsyncCursor +from pymongo.results import UpdateResult +from pytest import mark, raises + +from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection +from semantic_kernel.exceptions.vector_store_exceptions import VectorStoreInitializationException + + +def test_mongodb_atlas_collection_initialization(mongodb_atlas_unit_test_env, data_model_definition, mock_mongo_client): + collection = MongoDBAtlasCollection( + data_model_type=dict, + data_model_definition=data_model_definition, + collection_name="test_collection", + mongo_client=mock_mongo_client, + ) + assert collection.mongo_client is not None + assert isinstance(collection.mongo_client, AsyncMongoClient) + + +@mark.parametrize("exclude_list", [["MONGODB_ATLAS_CONNECTION_STRING"]], indirect=True) +def test_mongodb_atlas_collection_initialization_fail(mongodb_atlas_unit_test_env, data_model_definition): + with raises(VectorStoreInitializationException): + MongoDBAtlasCollection( + collection_name="test_collection", + data_model_type=dict, + data_model_definition=data_model_definition, + ) + + +@mark.parametrize("exclude_list", [["MONGODB_ATLAS_DATABASE_NAME", "MONGODB_ATLAS_INDEX_NAME"]], indirect=True) +def test_mongodb_atlas_collection_initialization_defaults(mongodb_atlas_unit_test_env, data_model_definition): + collection = MongoDBAtlasCollection( + collection_name="test_collection", + data_model_type=dict, + data_model_definition=data_model_definition, + ) + assert collection.database_name == DEFAULT_DB_NAME + assert collection.index_name == DEFAULT_SEARCH_INDEX_NAME + + +async def test_mongodb_atlas_collection_upsert(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): + collection = MongoDBAtlasCollection( + data_model_type=dict, + data_model_definition=data_model_definition, + collection_name="test_collection", + ) + with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: + result_mock = AsyncMock(spec=UpdateResult) + result_mock.upserted_ids = {0: "test_id"} + mock_get.return_value.bulk_write.return_value = result_mock + result = await collection._inner_upsert([{"_id": "test_id", "data": "test_data"}]) + assert result == ["test_id"] + + +async def test_mongodb_atlas_collection_get(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): + collection = MongoDBAtlasCollection( + data_model_type=dict, + data_model_definition=data_model_definition, + collection_name="test_collection", + ) + with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: + result_mock = AsyncMock(spec=AsyncCursor) + result_mock.to_list.return_value = [{"_id": "test_id", "data": "test_data"}] + mock_get.return_value.find.return_value = result_mock + result = await collection._inner_get(["test_id"]) + assert result == [{"_id": "test_id", "data": "test_data"}] + + +async def test_mongodb_atlas_collection_delete(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): + collection = MongoDBAtlasCollection( + data_model_type=dict, + data_model_definition=data_model_definition, + collection_name="test_collection", + ) + with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: + await collection._inner_delete(["test_id"]) + mock_get.return_value.delete_many.assert_called_with({"_id": {"$in": ["test_id"]}}) + + +async def test_mongodb_atlas_collection_collection_exists( + mongodb_atlas_unit_test_env, data_model_definition, mock_get_database +): + collection = MongoDBAtlasCollection( + data_model_type=dict, + data_model_definition=data_model_definition, + collection_name="test_collection", + ) + with patch.object(collection, "_get_database", new=mock_get_database) as mock_get: + mock_get.return_value.list_collection_names.return_value = ["test_collection"] + assert await collection.does_collection_exist() diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py new file mode 100644 index 000000000000..a06e68a99699 --- /dev/null +++ b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from pymongo import AsyncMongoClient + +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection +from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_store import MongoDBAtlasStore + + +def test_mongodb_atlas_store_initialization(mongodb_atlas_unit_test_env): + store = MongoDBAtlasStore() + assert store.mongo_client is not None + assert isinstance(store.mongo_client, AsyncMongoClient) + + +def test_mongodb_atlas_store_get_collection(mongodb_atlas_unit_test_env, data_model_definition): + store = MongoDBAtlasStore() + collection = store.get_collection( + collection_name="test_collection", + data_model_type=dict, + data_model_definition=data_model_definition, + ) + assert collection is not None + assert isinstance(collection, MongoDBAtlasCollection) + + +async def test_mongodb_atlas_store_list_collection_names(mongodb_atlas_unit_test_env, mock_mongo_client): + store = MongoDBAtlasStore(mongo_client=mock_mongo_client, database_name="test_db") + store.mongo_client.get_database().list_collection_names.return_value = ["test_collection"] + result = await store.list_collection_names() + assert result == ["test_collection"] diff --git a/python/tests/unit/connectors/memory/postgres/test_postgres_store.py b/python/tests/unit/connectors/memory/postgres/test_postgres_store.py index a2b23bb662c4..755281e9f2bb 100644 --- a/python/tests/unit/connectors/memory/postgres/test_postgres_store.py +++ b/python/tests/unit/connectors/memory/postgres/test_postgres_store.py @@ -5,6 +5,7 @@ from typing import Annotated, Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +import pytest import pytest_asyncio from psycopg import AsyncConnection, AsyncCursor from psycopg_pool import AsyncConnectionPool @@ -13,6 +14,8 @@ from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIEmbeddingPromptExecutionSettings, ) +from semantic_kernel.connectors.memory.postgres.constants import DISTANCE_COLUMN_NAME +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings from semantic_kernel.connectors.memory.postgres.postgres_store import PostgresStore from semantic_kernel.data.const import DistanceFunction, IndexKind @@ -22,6 +25,7 @@ VectorStoreRecordKeyField, VectorStoreRecordVectorField, ) +from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions @fixture(scope="function") @@ -76,6 +80,9 @@ class SimpleDataModel: ] +# region VectorStore Tests + + async def test_vector_store_defaults(vector_store: PostgresStore) -> None: assert vector_store.connection_pool is not None async with vector_store.connection_pool.connection() as conn: @@ -236,7 +243,130 @@ async def test_get_records(vector_store: PostgresStore, mock_cursor: Mock) -> No assert records[2].data == {"key": "value3"} -# Test settings +# endregion + +# region Vector Search tests + + +@pytest.mark.parametrize( + "distance_function, operator, subquery_distance, include_vectors, include_total_count", + [ + (DistanceFunction.COSINE_SIMILARITY, "<=>", f'1 - subquery."{DISTANCE_COLUMN_NAME}"', False, False), + (DistanceFunction.COSINE_DISTANCE, "<=>", None, False, False), + (DistanceFunction.DOT_PROD, "<#>", f'-1 * subquery."{DISTANCE_COLUMN_NAME}"', True, False), + (DistanceFunction.EUCLIDEAN_DISTANCE, "<->", None, False, True), + (DistanceFunction.MANHATTAN, "<+>", None, True, True), + ], +) +async def test_vector_search( + vector_store: PostgresStore, + mock_cursor: Mock, + distance_function: DistanceFunction, + operator: str, + subquery_distance: str | None, + include_vectors: bool, + include_total_count: bool, +) -> None: + @vectorstoremodel + @dataclass + class SimpleDataModel: + id: Annotated[int, VectorStoreRecordKeyField()] + embedding: Annotated[ + list[float], + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, + index_kind=IndexKind.HNSW, + dimensions=1536, + distance_function=distance_function, + property_type="float", + ), + ] + data: Annotated[ + dict[str, Any], + VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), + ] + + collection = vector_store.get_collection("test_collection", SimpleDataModel) + assert isinstance(collection, PostgresCollection) + + search_results = await collection.vectorized_search( + [1.0, 2.0, 3.0], + options=VectorSearchOptions( + top=10, skip=5, include_vectors=include_vectors, include_total_count=include_total_count + ), + ) + if include_total_count: + # Including total count issues query directly + assert mock_cursor.execute.call_count == 1 + else: + # Total count is not included, query is issued when iterating over results + assert mock_cursor.execute.call_count == 0 + async for _ in search_results.results: + pass + assert mock_cursor.execute.call_count == 1 + + execute_args, _ = mock_cursor.execute.call_args + + assert (search_results.total_count is not None) == include_total_count + + statement = execute_args[0] + statement_str = statement.as_string() + + expected_columns = '"id", "data"' + if include_vectors: + expected_columns = '"id", "embedding", "data"' + + expected_statement = ( + f'SELECT {expected_columns}, "embedding" {operator} %s as "{DISTANCE_COLUMN_NAME}" ' + 'FROM "public"."test_collection" ' + f'ORDER BY "{DISTANCE_COLUMN_NAME}" LIMIT 10 OFFSET 5' + ) + + if subquery_distance: + expected_statement = ( + f'SELECT subquery.*, {subquery_distance} AS "{DISTANCE_COLUMN_NAME}" FROM (' + + expected_statement + + ") AS subquery" + ) + + assert statement_str == expected_statement + + +async def test_model_post_init_conflicting_distance_column_name(vector_store: PostgresStore) -> None: + @vectorstoremodel + @dataclass + class ConflictingDataModel: + id: Annotated[int, VectorStoreRecordKeyField()] + sk_pg_distance: Annotated[ + float, VectorStoreRecordDataField() + ] # Note: test depends on value of DISTANCE_COLUMN_NAME constant + + embedding: Annotated[ + list[float], + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, + index_kind=IndexKind.HNSW, + dimensions=1536, + distance_function=DistanceFunction.COSINE_SIMILARITY, + property_type="float", + ), + ] + data: Annotated[ + dict[str, Any], + VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), + ] + + collection = vector_store.get_collection("test_collection", ConflictingDataModel) + assert isinstance(collection, PostgresCollection) + + # Ensure that the distance column name has been changed to avoid conflict + assert collection._distance_column_name != DISTANCE_COLUMN_NAME + assert collection._distance_column_name.startswith(f"{DISTANCE_COLUMN_NAME}_") + + +# endregion + +# region Settings tests def test_settings_connection_string(monkeypatch) -> None: @@ -290,3 +420,6 @@ def test_settings_env_vars(monkeypatch) -> None: assert conn_info["dbname"] == "dbname" assert conn_info["user"] == "user" assert conn_info["password"] == "password" + + +# endregion diff --git a/python/tests/unit/contents/test_binary_content.py b/python/tests/unit/contents/test_binary_content.py index 14719d5f5754..57ee6dad5d3f 100644 --- a/python/tests/unit/contents/test_binary_content.py +++ b/python/tests/unit/contents/test_binary_content.py @@ -2,6 +2,7 @@ import pytest +from numpy import array from semantic_kernel.contents.binary_content import BinaryContent @@ -16,6 +17,9 @@ BinaryContent(data_uri="data:image/jpeg;foo=bar;base64,dGVzdF9kYXRh", metadata={"bar": "baz"}), id="data_uri_with_params_and_metadata", ), + pytest.param( + BinaryContent(data=array([1, 2, 3]), mime_type="application/json", data_format="base64"), id="data_array" + ), ] @@ -72,6 +76,7 @@ def test_update_data_str(): def test_update_existing_data(): binary = BinaryContent(data_uri="data:image/jpeg;foo=bar;base64,dGVzdF9kYXRh", metadata={"bar": "baz"}) + assert binary._data_uri is not None binary._data_uri.data_format = None binary.data = "test_data" binary.data = b"test_data" diff --git a/python/tests/unit/contents/test_chat_history_summarization_reducer.py b/python/tests/unit/contents/test_chat_history_summarization_reducer.py index 35e13c969522..c61d044a9811 100644 --- a/python/tests/unit/contents/test_chat_history_summarization_reducer.py +++ b/python/tests/unit/contents/test_chat_history_summarization_reducer.py @@ -5,7 +5,7 @@ import pytest from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.const import DEFAULT_SERVICE_NAME +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.history_reducer.chat_history_reducer_utils import SUMMARY_METADATA_KEY from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ( @@ -49,7 +49,6 @@ def test_summarization_reducer_init(mock_service): reducer = ChatHistorySummarizationReducer( service=mock_service, target_count=10, - service_id="my_service", threshold_count=5, summarization_instructions="Custom instructions", use_single_summary=False, @@ -58,7 +57,6 @@ def test_summarization_reducer_init(mock_service): assert reducer.service == mock_service assert reducer.target_count == 10 - assert reducer.service_id == "my_service" assert reducer.threshold_count == 5 assert reducer.summarization_instructions == "Custom instructions" assert reducer.use_single_summary is False @@ -72,7 +70,6 @@ def test_summarization_reducer_defaults(mock_service): assert reducer.summarization_instructions in reducer.summarization_instructions assert reducer.use_single_summary is True assert reducer.fail_on_error is True - assert reducer.service_id == DEFAULT_SERVICE_NAME def test_summarization_reducer_eq_and_hash(mock_service): @@ -115,6 +112,7 @@ async def test_summarization_reducer_reduce_needed(mock_service): # Mock that the service will return a single summary message summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="This is a summary.") mock_service.get_chat_message_content.return_value = summary_content + mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() result = await reducer.reduce() assert result is not None, "We expect a shortened list with a new summary inserted." @@ -124,6 +122,33 @@ async def test_summarization_reducer_reduce_needed(mock_service): ) +async def test_summarization_reducer_reduce_needed_auto(mock_service): + # Mock that the service will return a single summary message + summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="This is a summary.") + mock_service.get_chat_message_content.return_value = summary_content + mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() + + messages = [ + # A summary message (as in the original test) + ChatMessageContent(role=AuthorRole.SYSTEM, content="Existing summary", metadata={SUMMARY_METADATA_KEY: True}), + # Enough additional messages so total is > 4 + ChatMessageContent(role=AuthorRole.USER, content="User says hello"), + ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds"), + ChatMessageContent(role=AuthorRole.USER, content="User says more"), + ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds again"), + ChatMessageContent(role=AuthorRole.USER, content="User says more"), + ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds again"), + ] + + reducer = ChatHistorySummarizationReducer(auto_reduce=True, service=mock_service, target_count=3, threshold_count=1) + + for msg in messages: + await reducer.add_message_async(msg) + assert len(reducer.messages) <= 5, ( + "We should auto-reduce after each message, we have one summary, and then 4 other messages." + ) + + async def test_summarization_reducer_reduce_no_messages_to_summarize(mock_service): # If we do use_single_summary=False, the older_range_start is insertion_point # In that scenario, if insertion_point == older_range_end => no messages to summarize => return None @@ -196,6 +221,7 @@ async def test_summarization_reducer_private_summarize(mock_service): summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="Mock Summary") mock_service.get_chat_message_content.return_value = summary_content + mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() actual_summary = await reducer._summarize(chat_messages) assert actual_summary is not None, "We should get a summary message back." diff --git a/python/tests/unit/contents/test_chat_message_content.py b/python/tests/unit/contents/test_chat_message_content.py index 9e7dcaa07b8a..18e160c69304 100644 --- a/python/tests/unit/contents/test_chat_message_content.py +++ b/python/tests/unit/contents/test_chat_message_content.py @@ -4,6 +4,7 @@ from defusedxml.ElementTree import XML from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent @@ -380,3 +381,37 @@ def test_cmc_to_dict_keys(): def test_cmc_to_dict_items(input_args, expected_dict): message = ChatMessageContent(**input_args) assert message.to_dict() == expected_dict + + +def test_cmc_with_unhashable_types_can_hash(): + user_messages = [ + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Describe this image."), + ImageContent( + uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" + ), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="What is the main color in this image?"), + ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + items=[ + TextContent(text="Is there an animal in this image?"), + FileReferenceContent(file_id="test_file_id"), + ], + ), + ChatMessageContent( + role=AuthorRole.USER, + ), + ] + + for message in user_messages: + assert hash(message) is not None diff --git a/python/tests/unit/contents/test_data_uri.py b/python/tests/unit/contents/test_data_uri.py index c4879b305593..fc98679a9765 100644 --- a/python/tests/unit/contents/test_data_uri.py +++ b/python/tests/unit/contents/test_data_uri.py @@ -2,6 +2,7 @@ from typing import Any +import numpy as np import pytest from semantic_kernel.contents.utils.data_uri import DataUri @@ -20,7 +21,15 @@ "base64", id="basic_image", ), - pytest.param("data:text/plain;,test_data", None, "test_data", "text/plain", {}, None, id="basic_text"), + pytest.param( + "data:text/plain;,test_data", + b"test_data", + "test_data", + "text/plain", + {}, + None, + id="basic_text", + ), pytest.param( "data:application/octet-stream;base64,AQIDBA==", b"\x01\x02\x03\x04", @@ -41,13 +50,22 @@ ), pytest.param( "data:application/octet-stream;utf8,01-02-03-04", - None, + b"01-02-03-04", "01-02-03-04", "application/octet-stream", {}, "utf8", id="utf8", ), + pytest.param( + "data:text/plain;key=value;base64,U29t\r\nZQ==\t", + b"Some", + "U29tZQ==", + "text/plain", + {"key": "value"}, + "base64", + id="with_params", + ), ], ) def test_data_uri_from_data_uri_str( @@ -60,10 +78,10 @@ def test_data_uri_from_data_uri_str( ): data_uri = DataUri.from_data_uri(uri) assert data_uri.data_bytes == data_bytes - assert data_uri.data_str == data_str assert data_uri.mime_type == mime_type assert data_uri.parameters == parameters assert data_uri.data_format == data_format + assert data_uri._data_str() == data_str @pytest.mark.parametrize( @@ -74,11 +92,6 @@ def test_data_uri_from_data_uri_str( pytest.param("data:", ContentInitializationError, id="missing_comma"), pytest.param("data:something,", ContentInitializationError, id="mime_type_without_subtype"), pytest.param("data:something;else,data", ContentInitializationError, id="mime_type_without_subtype2"), - pytest.param( - "data:type/subtype;parameterwithoutvalue;else,", ContentInitializationError, id="param_without_value" - ), - pytest.param("data:type/subtype;parameter=va=lue;else,", ContentInitializationError, id="param_multiple_eq"), - pytest.param("data:type/subtype;=value;else,", ContentInitializationError, id="param_without_name"), pytest.param("data:image/jpeg;base64,dGVzdF9kYXRh;foo=bar", ContentInitializationError, id="wrong_order"), pytest.param("data:text/plain;test_data", ContentInitializationError, id="missing_comma"), pytest.param( @@ -230,3 +243,70 @@ def test_eq(): assert data_uri1 == data_uri2 assert data_uri1 != "data:image/jpeg;base64,dGVzdF9kYXRh" assert data_uri1 != DataUri.from_data_uri("data:image/jpeg;base64,dGVzdF9kYXRi") + + +def test_array(): + arr = np.array([[1, 2], [3, 4]], dtype=np.uint8) + data_uri = DataUri(data_array=arr, mime_type="application/octet-stream", data_format="base64") + encoded = data_uri.to_string() + assert data_uri.data_array is not None + assert "data:application/octet-stream;base64," in encoded + assert data_uri.data_array.tobytes() == b"\x01\x02\x03\x04" + + +@pytest.mark.parametrize( + "data_bytes, data_str, data_array, data_format, expected_output", + [ + pytest.param( + b"test_data", + None, + None, + "base64", + "dGVzdF9kYXRh", + id="bytes_base64", + ), + pytest.param( + b"test_data", + None, + None, + "plain", + "test_data", + id="bytes_non_base64", + ), + pytest.param( + None, + "dGVzdF9kYXRh", + None, + "base64", + "dGVzdF9kYXRh", + id="string_base64", + ), + pytest.param( + None, + "plain_data", + None, + None, + "plain_data", + id="string_non_base64", + ), + pytest.param( + None, + None, + np.array([1, 2, 3], dtype=np.uint8), + "base64", + "AQID", + id="array_base64", + ), + pytest.param( + None, + None, + np.array([1, 2, 3], dtype=np.uint8), + "plain", + "\1\2\3", + id="array_non_base64", + ), + ], +) +def test__data_str(data_bytes, data_str, data_array, data_format, expected_output): + data_uri = DataUri(data_bytes=data_bytes, data_str=data_str, data_array=data_array, data_format=data_format) + assert data_uri._data_str() == expected_output diff --git a/python/tests/unit/contents/test_function_result_content.py b/python/tests/unit/contents/test_function_result_content.py index 5bb549924d81..2e83dc9737a9 100644 --- a/python/tests/unit/contents/test_function_result_content.py +++ b/python/tests/unit/contents/test_function_result_content.py @@ -4,6 +4,7 @@ from unittest.mock import Mock import pytest +from pydantic import BaseModel, ConfigDict, Field from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent @@ -12,6 +13,7 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.functions.function_result import FunctionResult from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata +from semantic_kernel.kernel_pydantic import KernelBaseModel class CustomResultClass: @@ -34,6 +36,17 @@ def __str__(self): return f"CustomObjectWithList({self.items})" +class AccountBalanceFrozen(KernelBaseModel): + # Make the model frozen so it's hashable + balance: int = Field(default=..., alias="account_balance") + model_config = ConfigDict(frozen=True) + + +class AccountBalanceNonFrozen(KernelBaseModel): + # This model is not frozen and thus not hashable by default + balance: int = Field(default=..., alias="account_balance") + + def test_init(): frc = FunctionResultContent(id="test", name="test-function", result="test-result", metadata={"test": "test"}) assert frc.name == "test-function" @@ -124,3 +137,83 @@ def __str__(self) -> str: frc.model_dump_json(exclude_none=True) == """{"metadata":{},"content_type":"function_result","id":"test","result":"test","name":"test-function","function_name":"function","plugin_name":"test"}""" # noqa: E501 ) + + +def test_hash_with_frozen_account_balance(): + balance = AccountBalanceFrozen(account_balance=100) + content = FunctionResultContent( + id="test_id", + result=balance, + function_name="TestFunction", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with frozen model should not raise errors." + + +def test_hash_with_dict_result(): + balance_dict = {"account_balance": 100} + content = FunctionResultContent( + id="test_id", + result=balance_dict, + function_name="TestFunction", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with dict result should not raise errors." + + +def test_hash_with_nested_dict_result(): + nested_dict = {"account_balance": 100, "details": {"currency": "USD", "last_updated": "2025-01-28"}} + content = FunctionResultContent( + id="test_id_nested", + result=nested_dict, + function_name="TestFunctionNested", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with nested dict result should not raise errors." + + +def test_hash_with_list_result(): + balance_list = [100, 200, 300] + content = FunctionResultContent( + id="test_id_list", + result=balance_list, + function_name="TestFunctionList", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with list result should not raise errors." + + +def test_hash_with_set_result(): + balance_set = {100, 200, 300} + content = FunctionResultContent( + id="test_id_set", + result=balance_set, + function_name="TestFunctionSet", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with set result should not raise errors." + + +def test_hash_with_custom_object_result(): + class CustomObject(BaseModel): + field1: str + field2: int + + custom_obj = CustomObject(field1="value1", field2=42) + content = FunctionResultContent( + id="test_id_custom", + result=custom_obj, + function_name="TestFunctionCustom", + ) + _ = hash(content) + assert True, "Hashing FunctionResultContent with custom object result should not raise errors." + + +def test_unhashable_non_frozen_model_raises_type_error(): + balance = AccountBalanceNonFrozen(account_balance=100) + content = FunctionResultContent( + id="test_id_unhashable", + result=balance, + function_name="TestFunctionUnhashable", + ) + _ = hash(content) diff --git a/python/tests/unit/contents/test_hashing_utils.py b/python/tests/unit/contents/test_hashing_utils.py new file mode 100644 index 000000000000..66bcf52c13d9 --- /dev/null +++ b/python/tests/unit/contents/test_hashing_utils.py @@ -0,0 +1,196 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + +from pydantic import BaseModel + +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent + + +class SimpleModel(BaseModel): + field1: str + field2: int + + +class NestedModel(BaseModel): + name: str + values: list[int] + + +class ModelContainer(BaseModel): + container_name: str + nested_model: NestedModel + + +def test_hash_with_nested_structures(): + """ + Deeply nested dictionaries and lists, but with no cyclical references. + Ensures multiple levels of nested transformations work. + """ + data = { + "level1": { + "list1": [1, 2, 3], + "dict1": {"keyA": "valA", "keyB": "valB"}, + }, + "level2": [ + {"sub_dict1": {"x": 99}}, + {"sub_dict2": {"y": 100}}, + ], + } + content = FunctionResultContent( + id="test_nested_structures", + result=data, + function_name="TestNestedStructures", + ) + _ = hash(content) + assert True, "Hashing deeply nested structures succeeded." + + +def test_hash_with_repeated_references(): + """ + Multiple references to the same object, but no cycle. + Ensures repeated objects are handled consistently and do not cause duplication. + """ + shared_dict = {"common_key": "common_value"} + data = { + "ref1": shared_dict, + "ref2": shared_dict, # same object, repeated reference + } + content = FunctionResultContent( + id="test_repeated_references", + result=data, + function_name="TestRepeatedRefs", + ) + _ = hash(content) + assert True, "Hashing repeated references (no cycles) succeeded." + + +def test_hash_with_simple_pydantic_model(): + """ + Hash a Pydantic model that doesn't reference itself or another model. + """ + model_instance = SimpleModel(field1="hello", field2=42) + content = FunctionResultContent( + id="test_simple_model", + result=model_instance, + function_name="TestSimpleModel", + ) + _ = hash(content) + assert True, "Hashing a simple Pydantic model succeeded." + + +def test_hash_with_nested_pydantic_models(): + """ + Hash a Pydantic model containing another Pydantic model, no cycles. + """ + nested = NestedModel(name="MyNestedModel", values=[1, 2, 3]) + container = ModelContainer(container_name="TopLevel", nested_model=nested) + content = FunctionResultContent( + id="test_nested_models", + result=container, + function_name="TestNestedModels", + ) + _ = hash(content) + assert True, "Hashing nested Pydantic models succeeded." + + +def test_hash_with_triple_cycle(): + """ + Three dictionaries referencing each other to form a cycle. + This ensures that multi-node cycles are also handled. + """ + dict_a: dict[str, Any] = {"a_key": 1} + dict_b: dict[str, Any] = {"b_key": 2} + dict_c: dict[str, Any] = {"c_key": 3} + + dict_a["ref_to_b"] = dict_b + dict_b["ref_to_c"] = dict_c + dict_c["ref_to_a"] = dict_a + + content = FunctionResultContent( + id="test_triple_cycle", + result=dict_a, + function_name="TestTripleCycle", + ) + + _ = hash(content) + assert True, "Hashing triple cyclical references succeeded." + + +def test_hash_with_cyclical_references(): + """ + The original cyclical references test for thorough coverage. + """ + + class CyclicalModel(BaseModel): + name: str + partner: "CyclicalModel" = None # type: ignore + + CyclicalModel.model_rebuild() + + model_a = CyclicalModel(name="ModelA") + model_b = CyclicalModel(name="ModelB") + model_a.partner = model_b + model_b.partner = model_a + + dict_x = {"x_key": 42} + dict_y = {"y_key": 99, "ref_to_x": dict_x} + dict_x["ref_to_y"] = dict_y # type: ignore + + giant_data_structure = { + "models": [model_a, model_b], + "nested": {"cyclical_dict_x": dict_x, "cyclical_dict_y": dict_y}, + } + + content = FunctionResultContent( + id="test_id_cyclical", + result=giant_data_structure, + function_name="TestFunctionCyclical", + ) + + _ = hash(content) + + +def test_hash_with_large_structure(): + """ + Tests performance or at least correctness when dealing with + a large structure, ensuring we don't crash or exceed recursion. + """ + large_list = list(range(1000)) + large_dict = {f"key_{i}": i for i in range(1000)} + combined = { + "big_list": large_list, + "big_dict": large_dict, + "nested": [ + {"inner_list": large_list}, + {"inner_dict": large_dict}, + ], + } + content = FunctionResultContent( + id="test_large_structure", + result=combined, + function_name="TestLargeStructure", + ) + + _ = hash(content) + + +def test_hash_function_call_content(): + call_content = FunctionCallContent( + inner_content=None, + ai_model_id=None, + metadata={}, + id="call_LAbz", + index=None, + name="menu-get_specials", + function_name="get_specials", + plugin_name="menu", + arguments="{}", + ) + + content = FunctionResultContent( + id="test_function_call_content", result=call_content, function_name="TestFunctionCallContent" + ) + + _ = hash(content) diff --git a/python/tests/unit/contents/test_streaming_chat_message_content.py b/python/tests/unit/contents/test_streaming_chat_message_content.py index d9651a45489d..9d9fc8c0f4aa 100644 --- a/python/tests/unit/contents/test_streaming_chat_message_content.py +++ b/python/tests/unit/contents/test_streaming_chat_message_content.py @@ -4,8 +4,10 @@ from defusedxml.ElementTree import XML from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -410,3 +412,41 @@ def test_scmc_bytes(): message = StreamingChatMessageContent(choice_index=0, role=AuthorRole.USER, content="Hello, world!") assert bytes(message) == b"Hello, world!" assert bytes(message.items[0]) == b"Hello, world!" + + +def test_scmc_with_unhashable_types_can_hash(): + user_messages = [ + StreamingChatMessageContent( + role=AuthorRole.USER, + items=[ + StreamingTextContent(text="Describe this image.", choice_index=0), + ImageContent( + uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" + ), + ], + choice_index=0, + ), + StreamingChatMessageContent( + role=AuthorRole.USER, + items=[ + StreamingTextContent(text="What is the main color in this image?", choice_index=0), + ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), + ], + choice_index=0, + ), + StreamingChatMessageContent( + role=AuthorRole.USER, + items=[ + StreamingTextContent(text="Is there an animal in this image?", choice_index=0), + FileReferenceContent(file_id="test_file_id"), + ], + choice_index=0, + ), + StreamingChatMessageContent( + role=AuthorRole.USER, + choice_index=0, + ), + ] + + for message in user_messages: + assert hash(message) is not None diff --git a/python/tests/unit/core_plugins/test_crew_ai_enterprise.py b/python/tests/unit/core_plugins/test_crew_ai_enterprise.py new file mode 100644 index 000000000000..1dbf8ee40679 --- /dev/null +++ b/python/tests/unit/core_plugins/test_crew_ai_enterprise.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import patch + +import pytest + +from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise import CrewAIEnterprise +from semantic_kernel.core_plugins.crew_ai.crew_ai_models import CrewAIEnterpriseKickoffState, CrewAIStatusResponse +from semantic_kernel.exceptions.function_exceptions import PluginInitializationError +from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata +from semantic_kernel.functions.kernel_plugin import KernelPlugin + + +@pytest.fixture +def crew_ai_enterprise(): + return CrewAIEnterprise(endpoint="https://test.com", auth_token="FakeToken") + + +def test_it_can_be_instantiated(crew_ai_enterprise): + assert crew_ai_enterprise is not None + + +def test_create_kernel_plugin(crew_ai_enterprise): + plugin = crew_ai_enterprise.create_kernel_plugin( + name="test_plugin", + description="Test plugin", + parameters=[KernelParameterMetadata(name="param1")], + ) + assert isinstance(plugin, KernelPlugin) + assert "kickoff" in plugin.functions + assert "kickoff_and_wait" in plugin.functions + assert "get_status" in plugin.functions + assert "wait_for_completion" in plugin.functions + + +@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.kickoff") +async def test_kickoff(mock_kickoff, crew_ai_enterprise): + mock_kickoff.return_value.kickoff_id = "123" + kickoff_id = await crew_ai_enterprise.kickoff(inputs={"param1": "value"}) + assert kickoff_id == "123" + + +@pytest.mark.parametrize( + "state", + [ + CrewAIEnterpriseKickoffState.Pending, + CrewAIEnterpriseKickoffState.Started, + CrewAIEnterpriseKickoffState.Running, + CrewAIEnterpriseKickoffState.Success, + CrewAIEnterpriseKickoffState.Failed, + CrewAIEnterpriseKickoffState.Failure, + CrewAIEnterpriseKickoffState.Not_Found, + ], +) +@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.get_status") +async def test_get_crew_kickoff_status(mock_get_status, crew_ai_enterprise, state): + mock_get_status.return_value = CrewAIStatusResponse(state=state.value) + status_response = await crew_ai_enterprise.get_crew_kickoff_status(kickoff_id="123") + assert status_response.state == state + + +@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.get_status") +async def test_wait_for_crew_completion(mock_get_status, crew_ai_enterprise): + mock_get_status.side_effect = [ + CrewAIStatusResponse(state=CrewAIEnterpriseKickoffState.Pending), + CrewAIStatusResponse(state=CrewAIEnterpriseKickoffState.Success, result="result"), + ] + result = await crew_ai_enterprise.wait_for_crew_completion(kickoff_id="123") + assert result == "result" + + +def test_build_arguments(crew_ai_enterprise): + parameters = [KernelParameterMetadata(name="param1")] + arguments = {"param1": "value"} + args = crew_ai_enterprise._build_arguments(parameters, arguments) + assert args["param1"] == "value" + + +def test_build_arguments_missing_param(crew_ai_enterprise): + parameters = [KernelParameterMetadata(name="param1")] + arguments = {} + with pytest.raises(PluginInitializationError): + crew_ai_enterprise._build_arguments(parameters, arguments) + + +@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.__aenter__") +async def test_aenter(mock_aenter, crew_ai_enterprise): + await crew_ai_enterprise.__aenter__() + mock_aenter.assert_called_once() + + +@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.__aexit__") +async def test_aexit(mock_aexit, crew_ai_enterprise): + await crew_ai_enterprise.__aexit__() + mock_aexit.assert_called_once() diff --git a/python/tests/unit/functions/test_kernel_experimental_decorator.py b/python/tests/unit/functions/test_kernel_experimental_decorator.py deleted file mode 100644 index a7391fed504f..000000000000 --- a/python/tests/unit/functions/test_kernel_experimental_decorator.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.utils.experimental_decorator import experimental_function - - -@experimental_function -def my_function() -> None: - """This is a sample function docstring.""" - pass - - -@experimental_function -def my_function_no_doc_string() -> None: - pass - - -def test_function_experimental_decorator() -> None: - assert ( - my_function.__doc__ - == "This is a sample function docstring.\n\nNote: This function is experimental and may change in the future." - ) - assert hasattr(my_function, "is_experimental") - assert my_function.is_experimental is True - - -def test_function_experimental_decorator_with_no_doc_string() -> None: - assert my_function_no_doc_string.__doc__ == "Note: This function is experimental and may change in the future." - assert hasattr(my_function_no_doc_string, "is_experimental") - assert my_function_no_doc_string.is_experimental is True diff --git a/python/tests/unit/kernel/test_kernel.py b/python/tests/unit/kernel/test_kernel.py index 0e7148ddf2c0..c40c1f7153d9 100644 --- a/python/tests/unit/kernel/test_kernel.py +++ b/python/tests/unit/kernel/test_kernel.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import os +from pathlib import Path from typing import Union from unittest.mock import AsyncMock, MagicMock, patch @@ -282,12 +283,12 @@ async def test_invoke_function_call(kernel: Kernel, get_tool_call_mock): patch("semantic_kernel.kernel.Kernel.get_list_of_function_metadata", return_value=[func_meta]), ): await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(filters={"included_functions": ["function"]}), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(filters={"included_functions": ["function"]}), ) @@ -313,12 +314,12 @@ async def test_invoke_function_call_throws_during_invoke(kernel: Kernel, get_too patch("semantic_kernel.kernel.Kernel.get_function", return_value=func_mock), ): await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(), ) @@ -339,12 +340,12 @@ async def test_invoke_function_call_non_allowed_func_throws(kernel: Kernel, get_ with patch("semantic_kernel.kernel.logger", autospec=True): await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(filters={"included_functions": ["unknown"]}), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(filters={"included_functions": ["unknown"]}), ) @@ -368,12 +369,12 @@ async def test_invoke_function_call_no_name_throws(kernel: Kernel, get_tool_call patch("semantic_kernel.kernel.logger", autospec=True), ): await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(), ) @@ -399,12 +400,12 @@ async def test_invoke_function_call_not_enough_parsed_args(kernel: Kernel, get_t patch("semantic_kernel.kernel.Kernel.get_function", return_value=func_mock), ): await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(), ) @@ -434,12 +435,12 @@ async def test_invoke_function_call_with_continuation_on_malformed_arguments(ker with patch("semantic_kernel.kernel.logger", autospec=True) as logger_mock: await kernel.invoke_function_call( - tool_call_mock, - chat_history_mock, - arguments, - 1, - 0, - FunctionChoiceBehavior.Auto(), + function_call=tool_call_mock, + chat_history=chat_history_mock, + arguments=arguments, + function_call_count=1, + request_index=0, + function_behavior=FunctionChoiceBehavior.Auto(), ) logger_mock.info.assert_any_call( @@ -483,6 +484,11 @@ def test_plugin_name_error(kernel: Kernel): kernel.add_plugin(" ", None) +def test_plugin_name_not_string_error(kernel: Kernel): + with pytest.raises(TypeError): + kernel.add_plugin(" ", plugin_name=Path(__file__).parent) + + def test_plugins_add_plugins(kernel: Kernel): plugin1 = KernelPlugin(name="TestPlugin") plugin2 = KernelPlugin(name="TestPlugin2") @@ -755,7 +761,7 @@ def test_instantiate_prompt_execution_settings_through_kernel(kernel_with_servic def test_experimental_class_has_decorator_and_flag(experimental_plugin_class): assert hasattr(experimental_plugin_class, "is_experimental") assert experimental_plugin_class.is_experimental - assert "This class is experimental and may change in the future." in experimental_plugin_class.__doc__ + assert "This class is marked as 'experimental' and may change in the future" in experimental_plugin_class.__doc__ # endregion diff --git a/python/tests/unit/processes/dapr_runtime/test_process_actor.py b/python/tests/unit/processes/dapr_runtime/test_process_actor.py index 78fcb464334f..f01266a9fe48 100644 --- a/python/tests/unit/processes/dapr_runtime/test_process_actor.py +++ b/python/tests/unit/processes/dapr_runtime/test_process_actor.py @@ -27,7 +27,7 @@ def actor_context(): actor_client=MagicMock(), ) kernel_mock = MagicMock() - actor = ProcessActor(runtime_context, actor_id, kernel=kernel_mock) + actor = ProcessActor(runtime_context, actor_id, kernel=kernel_mock, factories={}) actor._state_manager = AsyncMock() actor._state_manager.try_add_state = AsyncMock(return_value=True) diff --git a/python/tests/unit/processes/dapr_runtime/test_step_actor.py b/python/tests/unit/processes/dapr_runtime/test_step_actor.py index 4a5c306dc2fa..786d14e0f60f 100644 --- a/python/tests/unit/processes/dapr_runtime/test_step_actor.py +++ b/python/tests/unit/processes/dapr_runtime/test_step_actor.py @@ -6,18 +6,28 @@ import pytest from dapr.actor import ActorId +from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.actors.step_actor import StepActor from semantic_kernel.processes.dapr_runtime.dapr_step_info import DaprStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_message import ProcessMessage +class FakeStep: + async def activate(self, state): + self.activated_state = state + + +class FakeState: + pass + + @pytest.fixture def actor_context(): ctx = MagicMock() actor_id = ActorId("test_actor") kernel = MagicMock() - return StepActor(ctx, actor_id, kernel) + return StepActor(ctx, actor_id, kernel, factories={}) async def test_initialize_step(actor_context): @@ -97,3 +107,98 @@ async def test_process_incoming_messages(actor_context): expected_messages = [] expected_messages = [json.dumps(msg.model_dump()) for msg in list(actor_context.incoming_messages.queue)] mock_try_add_state.assert_any_call("incomingMessagesState", expected_messages) + + +async def test_activate_step_with_factory_creates_state(actor_context): + fake_step_instance = FakeStep() + fake_step_instance.activate = AsyncMock(side_effect=fake_step_instance.activate) + + fake_plugin = MagicMock() + fake_plugin.functions = {"test_function": lambda x: x} + + with ( + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_generic_state_type", + return_value=FakeState, + ), + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_fully_qualified_name", + return_value="FakeStateFullyQualified", + ), + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.find_input_channels", + return_value={"channel": {"input": "value"}}, + ), + ): + actor_context.factories = {"FakeStep": lambda: fake_step_instance} + actor_context.inner_step_type = "FakeStep" + actor_context.step_info = DaprStepInfo( + state=KernelProcessStepState(name="default_name", id="step_123"), + inner_step_python_type="FakeStep", + edges={}, + ) + actor_context.kernel.add_plugin = MagicMock(return_value=fake_plugin) + actor_context._state_manager.try_add_state = AsyncMock() + actor_context._state_manager.save_state = AsyncMock() + + await actor_context.activate_step() + + actor_context.kernel.add_plugin.assert_called_once_with(fake_step_instance, "default_name") + assert actor_context.functions == fake_plugin.functions + assert actor_context.initial_inputs == {"channel": {"input": "value"}} + assert actor_context.inputs == {"channel": {"input": "value"}} + assert actor_context.step_state is not None + assert isinstance(actor_context.step_state.state, FakeState) + fake_step_instance.activate.assert_awaited_once_with(actor_context.step_state) + + +async def test_activate_step_with_factory_uses_existing_state(actor_context): + fake_step_instance = FakeStep() + fake_step_instance.activate = AsyncMock(side_effect=fake_step_instance.activate) + + fake_plugin = MagicMock() + fake_plugin.functions = {"test_function": lambda x: x} + + pre_existing_state = KernelProcessStepState(name="ExistingState", id="ExistingState", state=None) + + with ( + patch.object( + KernelProcessStepState, + "model_dump", + return_value={"name": "ExistingState", "id": "ExistingState", "state": None}, + ), + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_generic_state_type", + return_value=FakeState, + ), + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_fully_qualified_name", + return_value="FakeStateFullyQualified", + ), + patch( + "semantic_kernel.processes.dapr_runtime.actors.step_actor.find_input_channels", + return_value={"channel": {"input": "value"}}, + ), + ): + actor_context.factories = {"FakeStep": lambda: fake_step_instance} + actor_context.inner_step_type = "FakeStep" + actor_context.step_info = DaprStepInfo(state=pre_existing_state, inner_step_python_type="FakeStep", edges={}) + actor_context.kernel.add_plugin = MagicMock(return_value=fake_plugin) + actor_context._state_manager.try_add_state = AsyncMock() + actor_context._state_manager.save_state = AsyncMock() + + await actor_context.activate_step() + + actor_context.kernel.add_plugin.assert_called_once_with(fake_step_instance, pre_existing_state.name) + assert actor_context.functions == fake_plugin.functions + assert actor_context.initial_inputs == {"channel": {"input": "value"}} + assert actor_context.inputs == {"channel": {"input": "value"}} + actor_context._state_manager.try_add_state.assert_any_await( + ActorStateKeys.StepStateType.value, "FakeStateFullyQualified" + ) + actor_context._state_manager.try_add_state.assert_any_await( + ActorStateKeys.StepStateJson.value, json.dumps(pre_existing_state.model_dump()) + ) + actor_context._state_manager.save_state.assert_awaited_once() + assert isinstance(actor_context.step_state.state, FakeState) + fake_step_instance.activate.assert_awaited_once_with(actor_context.step_state) diff --git a/python/tests/unit/processes/kernel_process/test_kernel_process_event.py b/python/tests/unit/processes/kernel_process/test_kernel_process_event.py index f5bbecd048af..8b6fbeb43440 100644 --- a/python/tests/unit/processes/kernel_process/test_kernel_process_event.py +++ b/python/tests/unit/processes/kernel_process/test_kernel_process_event.py @@ -34,7 +34,7 @@ def test_initialization_with_visibility(): # Assert assert event.id == event_id assert event.data == event_data - assert event.visibility == KernelProcessEventVisibility.Public.value + assert event.visibility == KernelProcessEventVisibility.Public def test_invalid_visibility(): diff --git a/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py b/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py index 2f73faaf2741..a60a65abfcc5 100644 --- a/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py +++ b/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py @@ -26,6 +26,7 @@ def mock_process(): process = MagicMock(spec=KernelProcess) process.state = state process.steps = [step_info] + process.factories = {} return process diff --git a/python/tests/unit/processes/local_runtime/test_local_process.py b/python/tests/unit/processes/local_runtime/test_local_process.py index 605f88255dd8..00844643fa17 100644 --- a/python/tests/unit/processes/local_runtime/test_local_process.py +++ b/python/tests/unit/processes/local_runtime/test_local_process.py @@ -271,6 +271,7 @@ def test_initialize_process(mock_process, mock_kernel, build_model): mock_local_step_init.assert_called_with( step_info=step_info, kernel=mock_kernel, + factories={}, parent_process_id=local_process.id, ) @@ -310,7 +311,7 @@ async def test_handle_message_with_valid_event_id(mock_process_with_output_edges assert isinstance(event, KernelProcessEvent) assert event.id == "valid_event_id" assert event.data == message.target_event_data - assert event.visibility == KernelProcessEventVisibility.Internal.value + assert event.visibility == KernelProcessEventVisibility.Internal END_PROCESS_ID = "END" diff --git a/python/tests/unit/processes/test_process_edge_builder.py b/python/tests/unit/processes/test_process_edge_builder.py index 8be3d5445a51..6d6ab89326f2 100644 --- a/python/tests/unit/processes/test_process_edge_builder.py +++ b/python/tests/unit/processes/test_process_edge_builder.py @@ -7,7 +7,9 @@ from semantic_kernel.processes.process_builder import ProcessBuilder from semantic_kernel.processes.process_edge_builder import ProcessEdgeBuilder -from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder +from semantic_kernel.processes.process_function_target_builder import ( + ProcessFunctionTargetBuilder, +) from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder @@ -88,12 +90,55 @@ def test_send_event_to_with_step_builder(): assert linked_edge_builder.target == edge_builder.target +def test_send_event_to_step_with_multiple_functions(): + from semantic_kernel.functions.kernel_function_metadata import ( + KernelFunctionMetadata, + ) # noqa: F401 + + # Arrange + source = MagicMock(spec=ProcessBuilder) + source.link_to = MagicMock() + + target_step = ProcessStepBuilder(name="test_step") + target_step.functions_dict = { + "func_1": MagicMock(spec=KernelFunctionMetadata), + "func_2": MagicMock(spec=KernelFunctionMetadata), + } + + event_id = "event_004" + edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) + + # Act - Create edges to both functions in the step + result1 = edge_builder.send_event_to(target_step, function_name="func_1", parameter_name="input_param1") + result2 = edge_builder.send_event_to(target_step, function_name="func_2", parameter_name="input_param2") + + # Assert + # Verify both edges were created + assert len(source.link_to.call_args_list) == 2 + + # Check first edge + first_edge = source.link_to.call_args_list[0][0][1] + assert isinstance(first_edge, ProcessStepEdgeBuilder) + assert first_edge.target.function_name == "func_1" + assert first_edge.target.parameter_name == "input_param1" + assert first_edge.target.step == target_step + assert isinstance(result1, ProcessEdgeBuilder) + + # Check second edge + second_edge = source.link_to.call_args_list[1][0][1] + assert isinstance(second_edge, ProcessStepEdgeBuilder) + assert second_edge.target.function_name == "func_2" + assert second_edge.target.parameter_name == "input_param2" + assert second_edge.target.step == target_step + assert isinstance(result2, ProcessEdgeBuilder) + + def test_send_event_to_creates_step_edge(): # Arrange source = MagicMock(spec=ProcessBuilder) source.link_to = MagicMock() target = MagicMock(spec=ProcessFunctionTargetBuilder) - event_id = "event_004" + event_id = "event_005" edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) # Act @@ -110,7 +155,7 @@ def test_send_event_to_creates_step_edge(): def test_send_event_to_raises_error_on_invalid_target(): # Arrange source = MagicMock(spec=ProcessBuilder) - event_id = "event_005" + event_id = "event_006" edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) # Act & Assert diff --git a/python/tests/unit/processes/test_process_step_builder.py b/python/tests/unit/processes/test_process_step_builder.py index f0b5a87d4eb4..63a38e9ffcab 100644 --- a/python/tests/unit/processes/test_process_step_builder.py +++ b/python/tests/unit/processes/test_process_step_builder.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from enum import Enum from unittest.mock import MagicMock import pytest @@ -14,6 +15,10 @@ from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder +class TestFunctionEnum(Enum): + MY_FUNCTION = "my_function" + + class MockKernelProcessStep(KernelProcessStep): """A mock class to use as a step type.""" @@ -177,3 +182,24 @@ def test_link_to_multiple_edges(): # Assert assert step_builder.edges[event_id] == [edge_builder_1, edge_builder_2] + + +@pytest.mark.parametrize( + "function_name, expected_function_name", + [ + ("my_function", "my_function"), + (TestFunctionEnum.MY_FUNCTION, TestFunctionEnum.MY_FUNCTION.value), + ], +) +def test_on_function_result(function_name, expected_function_name): + # Arrange + name = "test_step" + step_builder = ProcessStepBuilder(name=name) + + # Act + edge_builder = step_builder.on_function_result(function_name=function_name) + + # Assert + assert isinstance(edge_builder, ProcessStepEdgeBuilder) + assert edge_builder.source == step_builder + assert edge_builder.event_id == f"{step_builder.event_namespace}.{expected_function_name}.OnResult" diff --git a/python/tests/unit/prompt_template/test_prompt_templates.py b/python/tests/unit/prompt_template/test_prompt_templates.py index 7a0247d2d131..80e345f2431c 100644 --- a/python/tests/unit/prompt_template/test_prompt_templates.py +++ b/python/tests/unit/prompt_template/test_prompt_templates.py @@ -326,3 +326,4 @@ def test_from_yaml_with_function_choice_behavior(): def test_multiple_param_in_prompt(): func = KernelFunctionFromPrompt("test", prompt="{{$param}}{{$param}}") assert len(func.parameters) == 1 + assert func.metadata.parameters[0].schema_data == {"type": "object"} diff --git a/python/tests/unit/test_serialization.py b/python/tests/unit/test_serialization.py index 4f206899ea4d..c2bc66eb0c40 100644 --- a/python/tests/unit/test_serialization.py +++ b/python/tests/unit/test_serialization.py @@ -113,14 +113,14 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: PROTOCOLS = [ - pytest.param(ConversationSummaryPlugin, marks=pytest.mark.xfail(reason="Contains data")), + ConversationSummaryPlugin, HttpPlugin, MathPlugin, TextMemoryPlugin, TextPlugin, TimePlugin, WaitPlugin, - pytest.param(WebSearchEnginePlugin, marks=pytest.mark.xfail(reason="Contains data")), + WebSearchEnginePlugin, ] BASE_CLASSES = [ @@ -146,17 +146,20 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: KernelParameterMetadata, KernelFunctionMetadata, ChatHistory, +] +KERNEL_FUNCTION_OPTIONAL = [KernelFunction] +KERNEL_FUNCTION_REQUIRED = [ pytest.param( KernelFunction, marks=pytest.mark.xfail(reason="Need to implement Pickle serialization."), - ), + ) ] class TestUsageInPydanticFields: @pytest.mark.parametrize( "kernel_type", - BASE_CLASSES + PROTOCOLS + ENUMS + PYDANTIC_MODELS + STATELESS_CLASSES, + BASE_CLASSES + PROTOCOLS + ENUMS + PYDANTIC_MODELS + STATELESS_CLASSES + KERNEL_FUNCTION_OPTIONAL, ) def test_usage_as_optional_field( self, @@ -170,11 +173,11 @@ def test_usage_as_optional_field( class TestModel(KernelBaseModel): """A test model.""" - field: t.Optional[kernel_type] = None + field: kernel_type | None = None assert_serializable(TestModel(), TestModel) - @pytest.mark.parametrize("kernel_type", PYDANTIC_MODELS + STATELESS_CLASSES) + @pytest.mark.parametrize("kernel_type", PYDANTIC_MODELS + STATELESS_CLASSES + KERNEL_FUNCTION_REQUIRED) def test_usage_as_required_field( self, kernel_factory: t.Callable[[t.Type[KernelBaseModelFieldT]], KernelBaseModelFieldT], diff --git a/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py b/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py index 747bf38c52b1..36fc5f9ee2b4 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py +++ b/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py @@ -3,7 +3,7 @@ import pytest from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent pytestmark = pytest.mark.parametrize( "decorated_method, expected_attribute", @@ -22,12 +22,12 @@ # endregion # region OpenAIAssistantAgent pytest.param( - OpenAIAssistantBase.invoke, + OpenAIAssistantAgent.invoke, "__agent_diagnostics__", id="OpenAIAssistantBase.invoke", ), pytest.param( - OpenAIAssistantBase.invoke_stream, + OpenAIAssistantAgent.invoke_stream, "__agent_diagnostics__", id="OpenAIAssistantBase.invoke_stream", ), diff --git a/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py b/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py index 3c1df16efa14..f222e51227a3 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py +++ b/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py @@ -18,7 +18,7 @@ async def test_chat_completion_agent_invoke(mock_tracer, chat_history): async for _ in chat_completion_agent.invoke(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(chat_completion_agent.name) + mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {chat_completion_agent.name}") @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") @@ -30,4 +30,4 @@ async def test_chat_completion_agent_invoke_stream(mock_tracer, chat_history): async for _ in chat_completion_agent.invoke_stream(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(chat_completion_agent.name) + mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {chat_completion_agent.name}") diff --git a/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py b/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py index d4a7ae6134ef..4c20e4d6da42 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py +++ b/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py @@ -1,33 +1,71 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -import pytest +from openai import AsyncOpenAI +from openai.types.beta.assistant import Assistant from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") async def test_open_ai_assistant_agent_invoke(mock_tracer, chat_history, openai_unit_test_env): # Arrange - open_ai_assistant_agent = OpenAIAssistantAgent() + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.name = "agentName" + definition.description = "agentDescription" + definition.id = "agentId" + definition.instructions = "agentInstructions" + definition.tools = [] + definition.model = "agentModel" + definition.temperature = 1.0 + definition.top_p = 1.0 + definition.metadata = {} + open_ai_assistant_agent = OpenAIAssistantAgent(client=client, definition=definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + # Act - with pytest.raises(AgentInitializationException): - async for _ in open_ai_assistant_agent.invoke(chat_history): + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", + side_effect=fake_invoke, + ): + async for item in open_ai_assistant_agent.invoke("thread_id"): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(open_ai_assistant_agent.name) + mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {open_ai_assistant_agent.name}") @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") async def test_open_ai_assistant_agent_invoke_stream(mock_tracer, chat_history, openai_unit_test_env): # Arrange - open_ai_assistant_agent = OpenAIAssistantAgent() + client = AsyncMock(spec=AsyncOpenAI) + definition = AsyncMock(spec=Assistant) + definition.name = "agentName" + definition.description = "agentDescription" + definition.id = "agentId" + definition.instructions = "agentInstructions" + definition.tools = [] + definition.model = "agentModel" + definition.temperature = 1.0 + definition.top_p = 1.0 + definition.metadata = {} + open_ai_assistant_agent = OpenAIAssistantAgent(client=client, definition=definition) + + async def fake_invoke(*args, **kwargs): + yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") + # Act - with pytest.raises(AgentInitializationException): - async for _ in open_ai_assistant_agent.invoke_stream(chat_history): + with patch( + "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", + side_effect=fake_invoke, + ): + async for item in open_ai_assistant_agent.invoke_stream("thread_id"): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(open_ai_assistant_agent.name) + mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {open_ai_assistant_agent.name}") diff --git a/python/tests/unit/utils/test_feature_stage_decorator.py b/python/tests/unit/utils/test_feature_stage_decorator.py new file mode 100644 index 000000000000..7c82fa12cdc2 --- /dev/null +++ b/python/tests/unit/utils/test_feature_stage_decorator.py @@ -0,0 +1,124 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.utils.feature_stage_decorator import experimental, release_candidate + + +@experimental +def my_function() -> None: + """This is a sample function docstring.""" + pass + + +@release_candidate +def my_function_release_candidate() -> None: + """This is a sample function docstring.""" + pass + + +@release_candidate +def my_function_release_candidate_no_doc_string() -> None: + pass + + +@release_candidate(version="1.0.0-rc2") +def my_function_release_candidate_with_version() -> None: + """This is a sample function docstring.""" + pass + + +@experimental +def my_function_no_doc_string() -> None: + pass + + +@experimental +class MyExperimentalClass: + """A class that is still evolving rapidly.""" + + pass + + +@release_candidate +class MyRCClass: + """A class that is nearly final, but still in release-candidate stage.""" + + pass + + +@release_candidate(version="1.0.0-rc2") +class MyRCClassTwo: + """A class that is nearly final, but still in release-candidate stage.""" + + pass + + +def test_function_experimental_decorator(): + assert ( + my_function.__doc__ + == "This is a sample function docstring.\n\nNote: This function is marked as 'experimental' and may change in the future." # noqa: E501 + ) + assert hasattr(my_function, "is_experimental") + assert my_function.is_experimental is True + + +def test_function_experimental_decorator_with_no_doc_string(): + assert ( + my_function_no_doc_string.__doc__ + == "Note: This function is marked as 'experimental' and may change in the future." + ) + assert hasattr(my_function_no_doc_string, "is_experimental") + assert my_function_no_doc_string.is_experimental is True + + +def test_function_release_candidate_decorator(): + assert ( + "Features marked with this status are nearing completion and are considered" + in my_function_release_candidate_no_doc_string.__doc__ + ) + assert hasattr(my_function_release_candidate, "is_release_candidate") + assert my_function_release_candidate.is_release_candidate is True + assert "Version:" in my_function_release_candidate_no_doc_string.__doc__ + + +def test_function_release_candidate_decorator_and_version(): + assert ( + "Features marked with this status are nearing completion and are considered" + in my_function_release_candidate_with_version.__doc__ + ) + assert hasattr(my_function_release_candidate, "is_release_candidate") + assert my_function_release_candidate.is_release_candidate is True + assert "Version:" in my_function_release_candidate_with_version.__doc__ + + +def test_function_release_candidate_decorator_with_no_doc_string(): + assert ( + "Features marked with this status are nearing completion" in my_function_release_candidate_no_doc_string.__doc__ + ) + assert hasattr(my_function_release_candidate_no_doc_string, "is_release_candidate") + assert my_function_release_candidate_no_doc_string.is_release_candidate is True + assert "Version:" in my_function_release_candidate_no_doc_string.__doc__ + + +def test_class_experimental_decorator(): + assert MyExperimentalClass.__doc__ == ( + "A class that is still evolving rapidly.\n\nNote: This class is marked as " + "'experimental' and may change in the future." + ) + assert hasattr(MyExperimentalClass, "is_experimental") + assert MyExperimentalClass.is_experimental is True + + +def test_class_release_candidate_decorator(): + assert "Features marked with this status are nearing completion" in MyRCClass.__doc__ + assert hasattr(MyRCClass, "is_release_candidate") + assert MyRCClass.is_release_candidate is True + assert "Version:" in MyRCClass.__doc__ + + +def test_class_release_candidate_decorator_with_version(): + assert "Features marked with this status are nearing completion" in MyRCClassTwo.__doc__ + expected_version = "1.0.0-rc2" + assert expected_version in MyRCClassTwo.__doc__ + assert hasattr(MyRCClassTwo, "is_release_candidate") + assert MyRCClassTwo.is_release_candidate is True + assert "Version:" in MyRCClassTwo.__doc__ diff --git a/python/uv.lock b/python/uv.lock index 6daaa60a388a..696e0d1cf6ef 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -22,7 +22,7 @@ supported-markers = [ [[package]] name = "accelerate" -version = "1.2.1" +version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -33,23 +33,23 @@ dependencies = [ { name = "safetensors", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/09/7947691b7d44bfc739da4a44cc47d6a6d75e6fe9adf047c5234d7cb6be64/accelerate-1.2.1.tar.gz", hash = "sha256:03e161fc69d495daf2b9b5c8d5b43d06e2145520c04727b5bda56d49f1a43ab5", size = 341652 } +sdist = { url = "https://files.pythonhosted.org/packages/8f/02/24a4c4edb9cf0f1e0bc32bb6829e2138f1cc201442e7a24f0daf93b8a15a/accelerate-1.4.0.tar.gz", hash = "sha256:37d413e1b64cb8681ccd2908ae211cf73e13e6e636a2f598a96eccaa538773a5", size = 348745 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/60/a585c806d6c0ec5f8149d44eb202714792802f484e6e2b1bf96b23bd2b00/accelerate-1.2.1-py3-none-any.whl", hash = "sha256:be1cbb958cf837e7cdfbde46b812964b1b8ae94c9c7d94d921540beafcee8ddf", size = 336355 }, + { url = "https://files.pythonhosted.org/packages/0a/f6/791b9d7eb371a2f385da3b7f1769ced72ead7bf09744637ea2985c83d7ee/accelerate-1.4.0-py3-none-any.whl", hash = "sha256:f6e1e7dfaf9d799a20a1dc45efbf4b1546163eac133faa5acd0d89177c896e55", size = 342129 }, ] [[package]] name = "aiohappyeyeballs" -version = "2.4.4" +version = "2.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977 } +sdist = { url = "https://files.pythonhosted.org/packages/08/07/508f9ebba367fc3370162e53a3cfd12f5652ad79f0e0bfdf9f9847c6f159/aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0", size = 21726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756 }, + { url = "https://files.pythonhosted.org/packages/44/4c/03fb05f56551828ec67ceb3665e5dc51638042d204983a03b0a1541475b6/aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1", size = 14543 }, ] [[package]] name = "aiohttp" -version = "3.11.11" +version = "3.11.13" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -61,68 +61,110 @@ dependencies = [ { name = "propcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "yarl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/ed/f26db39d29cd3cb2f5a3374304c713fe5ab5a0e4c8ee25a0c45cc6adf844/aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e", size = 7669618 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/7d/ff2e314b8f9e0b1df833e2d4778eaf23eae6b8cc8f922495d110ddcbf9e1/aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8", size = 708550 }, - { url = "https://files.pythonhosted.org/packages/09/b8/aeb4975d5bba233d6f246941f5957a5ad4e3def8b0855a72742e391925f2/aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5", size = 468430 }, - { url = "https://files.pythonhosted.org/packages/9c/5b/5b620279b3df46e597008b09fa1e10027a39467387c2332657288e25811a/aiohttp-3.11.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2", size = 455593 }, - { url = "https://files.pythonhosted.org/packages/d8/75/0cdf014b816867d86c0bc26f3d3e3f194198dbf33037890beed629cd4f8f/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43", size = 1584635 }, - { url = "https://files.pythonhosted.org/packages/df/2f/95b8f4e4dfeb57c1d9ad9fa911ede35a0249d75aa339edd2c2270dc539da/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f", size = 1632363 }, - { url = "https://files.pythonhosted.org/packages/39/cb/70cf69ea7c50f5b0021a84f4c59c3622b2b3b81695f48a2f0e42ef7eba6e/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d", size = 1668315 }, - { url = "https://files.pythonhosted.org/packages/2f/cc/3a3fc7a290eabc59839a7e15289cd48f33dd9337d06e301064e1e7fb26c5/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef", size = 1589546 }, - { url = "https://files.pythonhosted.org/packages/15/b4/0f7b0ed41ac6000e283e7332f0f608d734b675a8509763ca78e93714cfb0/aiohttp-3.11.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438", size = 1544581 }, - { url = "https://files.pythonhosted.org/packages/58/b9/4d06470fd85c687b6b0e31935ef73dde6e31767c9576d617309a2206556f/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3", size = 1529256 }, - { url = "https://files.pythonhosted.org/packages/61/a2/6958b1b880fc017fd35f5dfb2c26a9a50c755b75fd9ae001dc2236a4fb79/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55", size = 1536592 }, - { url = "https://files.pythonhosted.org/packages/0f/dd/b974012a9551fd654f5bb95a6dd3f03d6e6472a17e1a8216dd42e9638d6c/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e", size = 1607446 }, - { url = "https://files.pythonhosted.org/packages/e0/d3/6c98fd87e638e51f074a3f2061e81fcb92123bcaf1439ac1b4a896446e40/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33", size = 1628809 }, - { url = "https://files.pythonhosted.org/packages/a8/2e/86e6f85cbca02be042c268c3d93e7f35977a0e127de56e319bdd1569eaa8/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c", size = 1564291 }, - { url = "https://files.pythonhosted.org/packages/0b/8d/1f4ef3503b767717f65e1f5178b0173ab03cba1a19997ebf7b052161189f/aiohttp-3.11.11-cp310-cp310-win32.whl", hash = "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745", size = 416601 }, - { url = "https://files.pythonhosted.org/packages/ad/86/81cb83691b5ace3d9aa148dc42bacc3450d749fc88c5ec1973573c1c1779/aiohttp-3.11.11-cp310-cp310-win_amd64.whl", hash = "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9", size = 442007 }, - { url = "https://files.pythonhosted.org/packages/34/ae/e8806a9f054e15f1d18b04db75c23ec38ec954a10c0a68d3bd275d7e8be3/aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76", size = 708624 }, - { url = "https://files.pythonhosted.org/packages/c7/e0/313ef1a333fb4d58d0c55a6acb3cd772f5d7756604b455181049e222c020/aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538", size = 468507 }, - { url = "https://files.pythonhosted.org/packages/a9/60/03455476bf1f467e5b4a32a465c450548b2ce724eec39d69f737191f936a/aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204", size = 455571 }, - { url = "https://files.pythonhosted.org/packages/be/f9/469588603bd75bf02c8ffb8c8a0d4b217eed446b49d4a767684685aa33fd/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9", size = 1685694 }, - { url = "https://files.pythonhosted.org/packages/88/b9/1b7fa43faf6c8616fa94c568dc1309ffee2b6b68b04ac268e5d64b738688/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03", size = 1743660 }, - { url = "https://files.pythonhosted.org/packages/2a/8b/0248d19dbb16b67222e75f6aecedd014656225733157e5afaf6a6a07e2e8/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287", size = 1785421 }, - { url = "https://files.pythonhosted.org/packages/c4/11/f478e071815a46ca0a5ae974651ff0c7a35898c55063305a896e58aa1247/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e", size = 1675145 }, - { url = "https://files.pythonhosted.org/packages/26/5d/284d182fecbb5075ae10153ff7374f57314c93a8681666600e3a9e09c505/aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665", size = 1619804 }, - { url = "https://files.pythonhosted.org/packages/1b/78/980064c2ad685c64ce0e8aeeb7ef1e53f43c5b005edcd7d32e60809c4992/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b", size = 1654007 }, - { url = "https://files.pythonhosted.org/packages/21/8d/9e658d63b1438ad42b96f94da227f2e2c1d5c6001c9e8ffcc0bfb22e9105/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34", size = 1650022 }, - { url = "https://files.pythonhosted.org/packages/85/fd/a032bf7f2755c2df4f87f9effa34ccc1ef5cea465377dbaeef93bb56bbd6/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d", size = 1732899 }, - { url = "https://files.pythonhosted.org/packages/c5/0c/c2b85fde167dd440c7ba50af2aac20b5a5666392b174df54c00f888c5a75/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2", size = 1755142 }, - { url = "https://files.pythonhosted.org/packages/bc/78/91ae1a3b3b3bed8b893c5d69c07023e151b1c95d79544ad04cf68f596c2f/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773", size = 1692736 }, - { url = "https://files.pythonhosted.org/packages/77/89/a7ef9c4b4cdb546fcc650ca7f7395aaffbd267f0e1f648a436bec33c9b95/aiohttp-3.11.11-cp311-cp311-win32.whl", hash = "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62", size = 416418 }, - { url = "https://files.pythonhosted.org/packages/fc/db/2192489a8a51b52e06627506f8ac8df69ee221de88ab9bdea77aa793aa6a/aiohttp-3.11.11-cp311-cp311-win_amd64.whl", hash = "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac", size = 442509 }, - { url = "https://files.pythonhosted.org/packages/69/cf/4bda538c502f9738d6b95ada11603c05ec260807246e15e869fc3ec5de97/aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886", size = 704666 }, - { url = "https://files.pythonhosted.org/packages/46/7b/87fcef2cad2fad420ca77bef981e815df6904047d0a1bd6aeded1b0d1d66/aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2", size = 464057 }, - { url = "https://files.pythonhosted.org/packages/5a/a6/789e1f17a1b6f4a38939fbc39d29e1d960d5f89f73d0629a939410171bc0/aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c", size = 455996 }, - { url = "https://files.pythonhosted.org/packages/b7/dd/485061fbfef33165ce7320db36e530cd7116ee1098e9c3774d15a732b3fd/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a", size = 1682367 }, - { url = "https://files.pythonhosted.org/packages/e9/d7/9ec5b3ea9ae215c311d88b2093e8da17e67b8856673e4166c994e117ee3e/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231", size = 1736989 }, - { url = "https://files.pythonhosted.org/packages/d6/fb/ea94927f7bfe1d86178c9d3e0a8c54f651a0a655214cce930b3c679b8f64/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e", size = 1793265 }, - { url = "https://files.pythonhosted.org/packages/40/7f/6de218084f9b653026bd7063cd8045123a7ba90c25176465f266976d8c82/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8", size = 1691841 }, - { url = "https://files.pythonhosted.org/packages/77/e2/992f43d87831cbddb6b09c57ab55499332f60ad6fdbf438ff4419c2925fc/aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8", size = 1619317 }, - { url = "https://files.pythonhosted.org/packages/96/74/879b23cdd816db4133325a201287c95bef4ce669acde37f8f1b8669e1755/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c", size = 1641416 }, - { url = "https://files.pythonhosted.org/packages/30/98/b123f6b15d87c54e58fd7ae3558ff594f898d7f30a90899718f3215ad328/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab", size = 1646514 }, - { url = "https://files.pythonhosted.org/packages/d7/38/257fda3dc99d6978ab943141d5165ec74fd4b4164baa15e9c66fa21da86b/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da", size = 1702095 }, - { url = "https://files.pythonhosted.org/packages/0c/f4/ddab089053f9fb96654df5505c0a69bde093214b3c3454f6bfdb1845f558/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853", size = 1734611 }, - { url = "https://files.pythonhosted.org/packages/c3/d6/f30b2bc520c38c8aa4657ed953186e535ae84abe55c08d0f70acd72ff577/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e", size = 1694576 }, - { url = "https://files.pythonhosted.org/packages/bc/97/b0a88c3f4c6d0020b34045ee6d954058abc870814f6e310c4c9b74254116/aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600", size = 411363 }, - { url = "https://files.pythonhosted.org/packages/7f/23/cc36d9c398980acaeeb443100f0216f50a7cfe20c67a9fd0a2f1a5a846de/aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d", size = 437666 }, - { url = "https://files.pythonhosted.org/packages/49/d1/d8af164f400bad432b63e1ac857d74a09311a8334b0481f2f64b158b50eb/aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9", size = 697982 }, - { url = "https://files.pythonhosted.org/packages/92/d1/faad3bf9fa4bfd26b95c69fc2e98937d52b1ff44f7e28131855a98d23a17/aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194", size = 460662 }, - { url = "https://files.pythonhosted.org/packages/db/61/0d71cc66d63909dabc4590f74eba71f91873a77ea52424401c2498d47536/aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f", size = 452950 }, - { url = "https://files.pythonhosted.org/packages/07/db/6d04bc7fd92784900704e16b745484ef45b77bd04e25f58f6febaadf7983/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104", size = 1665178 }, - { url = "https://files.pythonhosted.org/packages/54/5c/e95ade9ae29f375411884d9fd98e50535bf9fe316c9feb0f30cd2ac8f508/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff", size = 1717939 }, - { url = "https://files.pythonhosted.org/packages/6f/1c/1e7d5c5daea9e409ed70f7986001b8c9e3a49a50b28404498d30860edab6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3", size = 1775125 }, - { url = "https://files.pythonhosted.org/packages/5d/66/890987e44f7d2f33a130e37e01a164168e6aff06fce15217b6eaf14df4f6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1", size = 1677176 }, - { url = "https://files.pythonhosted.org/packages/8f/dc/e2ba57d7a52df6cdf1072fd5fa9c6301a68e1cd67415f189805d3eeb031d/aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4", size = 1603192 }, - { url = "https://files.pythonhosted.org/packages/6c/9e/8d08a57de79ca3a358da449405555e668f2c8871a7777ecd2f0e3912c272/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d", size = 1618296 }, - { url = "https://files.pythonhosted.org/packages/56/51/89822e3ec72db352c32e7fc1c690370e24e231837d9abd056490f3a49886/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87", size = 1616524 }, - { url = "https://files.pythonhosted.org/packages/2c/fa/e2e6d9398f462ffaa095e84717c1732916a57f1814502929ed67dd7568ef/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2", size = 1685471 }, - { url = "https://files.pythonhosted.org/packages/ae/5f/6bb976e619ca28a052e2c0ca7b0251ccd893f93d7c24a96abea38e332bf6/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12", size = 1715312 }, - { url = "https://files.pythonhosted.org/packages/79/c1/756a7e65aa087c7fac724d6c4c038f2faaa2a42fe56dbc1dd62a33ca7213/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5", size = 1672783 }, - { url = "https://files.pythonhosted.org/packages/73/ba/a6190ebb02176c7f75e6308da31f5d49f6477b651a3dcfaaaca865a298e2/aiohttp-3.11.11-cp313-cp313-win32.whl", hash = "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d", size = 410229 }, - { url = "https://files.pythonhosted.org/packages/b8/62/c9fa5bafe03186a0e4699150a7fed9b1e73240996d0d2f0e5f70f3fdf471/aiohttp-3.11.11-cp313-cp313-win_amd64.whl", hash = "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99", size = 436081 }, +sdist = { url = "https://files.pythonhosted.org/packages/b3/3f/c4a667d184c69667b8f16e0704127efc5f1e60577df429382b4d95fd381e/aiohttp-3.11.13.tar.gz", hash = "sha256:8ce789231404ca8fff7f693cdce398abf6d90fd5dae2b1847477196c243b1fbb", size = 7674284 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/49/18bde4fbe1f98a12fb548741e65b27c5f0991c1af4ad15c86b537a4ce94a/aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d", size = 708941 }, + { url = "https://files.pythonhosted.org/packages/99/24/417e5ab7074f5c97c9a794b6acdc59f47f2231d43e4d5cec06150035e61e/aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef", size = 468823 }, + { url = "https://files.pythonhosted.org/packages/76/93/159d3a2561bc6d64d32f779d08b17570b1c5fe55b985da7e2df9b3a4ff8f/aiohttp-3.11.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9840be675de208d1f68f84d578eaa4d1a36eee70b16ae31ab933520c49ba1325", size = 455984 }, + { url = "https://files.pythonhosted.org/packages/18/bc/ed0dce45da90d4618ae14e677abbd704aec02e0f54820ea3815c156f0759/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28a772757c9067e2aee8a6b2b425d0efaa628c264d6416d283694c3d86da7689", size = 1585022 }, + { url = "https://files.pythonhosted.org/packages/75/10/c1e6d59030fcf04ccc253193607b5b7ced0caffd840353e109c51134e5e9/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b88aca5adbf4625e11118df45acac29616b425833c3be7a05ef63a6a4017bfdb", size = 1632761 }, + { url = "https://files.pythonhosted.org/packages/2d/8e/da1a20fbd2c961f824dc8efeb8d31c32ed4af761c87de83032ad4c4f5237/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce10ddfbe26ed5856d6902162f71b8fe08545380570a885b4ab56aecfdcb07f4", size = 1668720 }, + { url = "https://files.pythonhosted.org/packages/fa/9e/d0bbdc82236c3fe43b28b3338a13ef9b697b0f7a875b33b950b975cab1f6/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa48dac27f41b36735c807d1ab093a8386701bbf00eb6b89a0f69d9fa26b3671", size = 1589941 }, + { url = "https://files.pythonhosted.org/packages/ed/14/248ed0385baeee854e495ca7f33b48bb151d1b226ddbf1585bdeb2301fbf/aiohttp-3.11.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89ce611b1eac93ce2ade68f1470889e0173d606de20c85a012bfa24be96cf867", size = 1544978 }, + { url = "https://files.pythonhosted.org/packages/20/b0/b2ad9d24fe85db8330034ac45dde67799af40ca2363c0c9b30126e204ef3/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78e4dd9c34ec7b8b121854eb5342bac8b02aa03075ae8618b6210a06bbb8a115", size = 1529641 }, + { url = "https://files.pythonhosted.org/packages/11/c6/03bdcb73a67a380b9593d52613ea88edd21ddc4ff5aaf06d4f807dfa2220/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:66047eacbc73e6fe2462b77ce39fc170ab51235caf331e735eae91c95e6a11e4", size = 1558027 }, + { url = "https://files.pythonhosted.org/packages/0d/ae/e45491c8ca4d1e30ff031fb25b44842e16c326f8467026c3eb2a9c167608/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ad8f1c19fe277eeb8bc45741c6d60ddd11d705c12a4d8ee17546acff98e0802", size = 1536991 }, + { url = "https://files.pythonhosted.org/packages/19/89/10eb37351dd2b52928a54768a70a58171e43d7914685fe3feec8f681d905/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64815c6f02e8506b10113ddbc6b196f58dbef135751cc7c32136df27b736db09", size = 1607848 }, + { url = "https://files.pythonhosted.org/packages/a4/fd/492dec170df6ea57bef4bcd26374befdc170b10ba9ac7f51a0214943c20a/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:967b93f21b426f23ca37329230d5bd122f25516ae2f24a9cea95a30023ff8283", size = 1629208 }, + { url = "https://files.pythonhosted.org/packages/70/46/ef8a02cb171d4779ca1632bc8ac0c5bb89729b091e2a3f4b895d688146b5/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf1f31f83d16ec344136359001c5e871915c6ab685a3d8dee38e2961b4c81730", size = 1564684 }, + { url = "https://files.pythonhosted.org/packages/8a/03/b1b552d1112b72da94bd1f9f5efb8adbcbbafaa8d495fc0924cd80493f17/aiohttp-3.11.13-cp310-cp310-win32.whl", hash = "sha256:00c8ac69e259c60976aa2edae3f13d9991cf079aaa4d3cd5a49168ae3748dee3", size = 416982 }, + { url = "https://files.pythonhosted.org/packages/b0/2d/b6be8e7905ceba64121268ce28208bafe508a742c1467bf636a41d152284/aiohttp-3.11.13-cp310-cp310-win_amd64.whl", hash = "sha256:90d571c98d19a8b6e793b34aa4df4cee1e8fe2862d65cc49185a3a3d0a1a3996", size = 442389 }, + { url = "https://files.pythonhosted.org/packages/3b/93/8e012ae31ff1bda5d43565d6f9e0bad325ba6f3f2d78f298bd39645be8a3/aiohttp-3.11.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b35aab22419ba45f8fc290d0010898de7a6ad131e468ffa3922b1b0b24e9d2e", size = 709013 }, + { url = "https://files.pythonhosted.org/packages/d8/be/fc7c436678ffe547d038319add8e44fd5e33090158752e5c480aed51a8d0/aiohttp-3.11.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81cba651db8795f688c589dd11a4fbb834f2e59bbf9bb50908be36e416dc760", size = 468896 }, + { url = "https://files.pythonhosted.org/packages/d9/1c/56906111ac9d4dab4baab43c89d35d5de1dbb38085150257895005b08bef/aiohttp-3.11.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f55d0f242c2d1fcdf802c8fabcff25a9d85550a4cf3a9cf5f2a6b5742c992839", size = 455968 }, + { url = "https://files.pythonhosted.org/packages/ba/16/229d36ed27c2bb350320364efb56f906af194616cc15fc5d87f3ef21dbef/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4bea08a6aad9195ac9b1be6b0c7e8a702a9cec57ce6b713698b4a5afa9c2e33", size = 1686082 }, + { url = "https://files.pythonhosted.org/packages/3a/44/78fd174509c56028672e5dfef886569cfa1fced0c5fd5c4480426db19ac9/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6070bcf2173a7146bb9e4735b3c62b2accba459a6eae44deea0eb23e0035a23", size = 1744056 }, + { url = "https://files.pythonhosted.org/packages/a3/11/325145c6dce8124b5caadbf763e908f2779c14bb0bc5868744d1e5cb9cb7/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:718d5deb678bc4b9d575bfe83a59270861417da071ab44542d0fcb6faa686636", size = 1785810 }, + { url = "https://files.pythonhosted.org/packages/95/de/faba18a0af09969e10eb89fdbd4cb968bea95e75449a7fa944d4de7d1d2f/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f6b2c5b4a4d22b8fb2c92ac98e0747f5f195e8e9448bfb7404cd77e7bfa243f", size = 1675540 }, + { url = "https://files.pythonhosted.org/packages/ea/53/0437c46e960b79ae3b1ff74c1ec12f04bf4f425bd349c8807acb38aae3d7/aiohttp-3.11.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747ec46290107a490d21fe1ff4183bef8022b848cf9516970cb31de6d9460088", size = 1620210 }, + { url = "https://files.pythonhosted.org/packages/04/2f/31769ed8e29cc22baaa4005bd2749a7fd0f61ad0f86024d38dff8e394cf6/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:01816f07c9cc9d80f858615b1365f8319d6a5fd079cd668cc58e15aafbc76a54", size = 1654399 }, + { url = "https://files.pythonhosted.org/packages/b0/24/acb24571815b9a86a8261577c920fd84f819178c02a75b05b1a0d7ab83fb/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a08ad95fcbd595803e0c4280671d808eb170a64ca3f2980dd38e7a72ed8d1fea", size = 1660424 }, + { url = "https://files.pythonhosted.org/packages/91/45/30ca0c3ba5bbf7592eee7489eae30437736f7ff912eaa04cfdcf74edca8c/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c97be90d70f7db3aa041d720bfb95f4869d6063fcdf2bb8333764d97e319b7d0", size = 1650415 }, + { url = "https://files.pythonhosted.org/packages/86/8d/4d887df5e732cc70349243c2c9784911979e7bd71c06f9e7717b8a896f75/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ab915a57c65f7a29353c8014ac4be685c8e4a19e792a79fe133a8e101111438e", size = 1733292 }, + { url = "https://files.pythonhosted.org/packages/40/c9/bd950dac0a4c84d44d8da8d6e0f9c9511d45e02cf908a4e1fca591f46a25/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:35cda4e07f5e058a723436c4d2b7ba2124ab4e0aa49e6325aed5896507a8a42e", size = 1755536 }, + { url = "https://files.pythonhosted.org/packages/32/04/aafeda6b4ed3693a44bb89eae002ebaa74f88b2265a7e68f8a31c33330f5/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:af55314407714fe77a68a9ccaab90fdb5deb57342585fd4a3a8102b6d4370080", size = 1693126 }, + { url = "https://files.pythonhosted.org/packages/a1/4f/67729187e884b0f002a0317d2cc7962a5a0416cadc95ea88ba92477290d9/aiohttp-3.11.13-cp311-cp311-win32.whl", hash = "sha256:42d689a5c0a0c357018993e471893e939f555e302313d5c61dfc566c2cad6185", size = 416800 }, + { url = "https://files.pythonhosted.org/packages/29/23/d98d491ca073ee92cc6a741be97b6b097fb06dacc5f95c0c9350787db549/aiohttp-3.11.13-cp311-cp311-win_amd64.whl", hash = "sha256:b73a2b139782a07658fbf170fe4bcdf70fc597fae5ffe75e5b67674c27434a9f", size = 442891 }, + { url = "https://files.pythonhosted.org/packages/9a/a9/6657664a55f78db8767e396cc9723782ed3311eb57704b0a5dacfa731916/aiohttp-3.11.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2eabb269dc3852537d57589b36d7f7362e57d1ece308842ef44d9830d2dc3c90", size = 705054 }, + { url = "https://files.pythonhosted.org/packages/3b/06/f7df1fe062d16422f70af5065b76264f40b382605cf7477fa70553a9c9c1/aiohttp-3.11.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b77ee42addbb1c36d35aca55e8cc6d0958f8419e458bb70888d8c69a4ca833d", size = 464440 }, + { url = "https://files.pythonhosted.org/packages/22/3a/8773ea866735754004d9f79e501fe988bdd56cfac7fdecbc8de17fc093eb/aiohttp-3.11.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55789e93c5ed71832e7fac868167276beadf9877b85697020c46e9a75471f55f", size = 456394 }, + { url = "https://files.pythonhosted.org/packages/7f/61/8e2f2af2327e8e475a2b0890f15ef0bbfd117e321cce1e1ed210df81bbac/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c929f9a7249a11e4aa5c157091cfad7f49cc6b13f4eecf9b747104befd9f56f2", size = 1682752 }, + { url = "https://files.pythonhosted.org/packages/24/ed/84fce816bc8da39aa3f6c1196fe26e47065fea882b1a67a808282029c079/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d33851d85537bbf0f6291ddc97926a754c8f041af759e0aa0230fe939168852b", size = 1737375 }, + { url = "https://files.pythonhosted.org/packages/d9/de/35a5ba9e3d21ebfda1ebbe66f6cc5cbb4d3ff9bd6a03e5e8a788954f8f27/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9229d8613bd8401182868fe95688f7581673e1c18ff78855671a4b8284f47bcb", size = 1793660 }, + { url = "https://files.pythonhosted.org/packages/ff/fe/0f650a8c7c72c8a07edf8ab164786f936668acd71786dd5885fc4b1ca563/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669dd33f028e54fe4c96576f406ebb242ba534dd3a981ce009961bf49960f117", size = 1692233 }, + { url = "https://files.pythonhosted.org/packages/a8/20/185378b3483f968c6303aafe1e33b0da0d902db40731b2b2b2680a631131/aiohttp-3.11.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1b20a1ace54af7db1f95af85da530fe97407d9063b7aaf9ce6a32f44730778", size = 1619708 }, + { url = "https://files.pythonhosted.org/packages/a4/f9/d9c181750980b17e1e13e522d7e82a8d08d3d28a2249f99207ef5d8d738f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5724cc77f4e648362ebbb49bdecb9e2b86d9b172c68a295263fa072e679ee69d", size = 1641802 }, + { url = "https://files.pythonhosted.org/packages/50/c7/1cb46b72b1788710343b6e59eaab9642bd2422f2d87ede18b1996e0aed8f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:aa36c35e94ecdb478246dd60db12aba57cfcd0abcad43c927a8876f25734d496", size = 1684678 }, + { url = "https://files.pythonhosted.org/packages/71/87/89b979391de840c5d7c34e78e1148cc731b8aafa84b6a51d02f44b4c66e2/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b5b37c863ad5b0892cc7a4ceb1e435e5e6acd3f2f8d3e11fa56f08d3c67b820", size = 1646921 }, + { url = "https://files.pythonhosted.org/packages/a7/db/a463700ac85b72f8cf68093e988538faaf4e865e3150aa165cf80ee29d6e/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e06cf4852ce8c4442a59bae5a3ea01162b8fcb49ab438d8548b8dc79375dad8a", size = 1702493 }, + { url = "https://files.pythonhosted.org/packages/b8/32/1084e65da3adfb08c7e1b3e94f3e4ded8bd707dee265a412bc377b7cd000/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5194143927e494616e335d074e77a5dac7cd353a04755330c9adc984ac5a628e", size = 1735004 }, + { url = "https://files.pythonhosted.org/packages/a0/bb/a634cbdd97ce5d05c2054a9a35bfc32792d7e4f69d600ad7e820571d095b/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afcb6b275c2d2ba5d8418bf30a9654fa978b4f819c2e8db6311b3525c86fe637", size = 1694964 }, + { url = "https://files.pythonhosted.org/packages/fd/cf/7d29db4e5c28ec316e5d2ac9ac9df0e2e278e9ea910e5c4205b9b64c2c42/aiohttp-3.11.13-cp312-cp312-win32.whl", hash = "sha256:7104d5b3943c6351d1ad7027d90bdd0ea002903e9f610735ac99df3b81f102ee", size = 411746 }, + { url = "https://files.pythonhosted.org/packages/65/a9/13e69ad4fd62104ebd94617f9f2be58231b50bb1e6bac114f024303ac23b/aiohttp-3.11.13-cp312-cp312-win_amd64.whl", hash = "sha256:47dc018b1b220c48089b5b9382fbab94db35bef2fa192995be22cbad3c5730c8", size = 438078 }, + { url = "https://files.pythonhosted.org/packages/87/dc/7d58d33cec693f1ddf407d4ab975445f5cb507af95600f137b81683a18d8/aiohttp-3.11.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9862d077b9ffa015dbe3ce6c081bdf35135948cb89116e26667dd183550833d1", size = 698372 }, + { url = "https://files.pythonhosted.org/packages/84/e7/5d88514c9e24fbc8dd6117350a8ec4a9314f4adae6e89fe32e3e639b0c37/aiohttp-3.11.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbfef0666ae9e07abfa2c54c212ac18a1f63e13e0760a769f70b5717742f3ece", size = 461057 }, + { url = "https://files.pythonhosted.org/packages/96/1a/8143c48a929fa00c6324f85660cb0f47a55ed9385f0c1b72d4b8043acf8e/aiohttp-3.11.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a1f7d857c4fcf7cabb1178058182c789b30d85de379e04f64c15b7e88d66fb", size = 453340 }, + { url = "https://files.pythonhosted.org/packages/2f/1c/b8010e4d65c5860d62681088e5376f3c0a940c5e3ca8989cae36ce8c3ea8/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba40b7ae0f81c7029583a338853f6607b6d83a341a3dcde8bed1ea58a3af1df9", size = 1665561 }, + { url = "https://files.pythonhosted.org/packages/19/ed/a68c3ab2f92fdc17dfc2096117d1cfaa7f7bdded2a57bacbf767b104165b/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5b95787335c483cd5f29577f42bbe027a412c5431f2f80a749c80d040f7ca9f", size = 1718335 }, + { url = "https://files.pythonhosted.org/packages/27/4f/3a0b6160ce663b8ebdb65d1eedff60900cd7108838c914d25952fe2b909f/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7d474c5c1f0b9405c1565fafdc4429fa7d986ccbec7ce55bc6a330f36409cad", size = 1775522 }, + { url = "https://files.pythonhosted.org/packages/0b/58/9da09291e19696c452e7224c1ce8c6d23a291fe8cd5c6b247b51bcda07db/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e83fb1991e9d8982b3b36aea1e7ad27ea0ce18c14d054c7a404d68b0319eebb", size = 1677566 }, + { url = "https://files.pythonhosted.org/packages/3d/18/6184f2bf8bbe397acbbbaa449937d61c20a6b85765f48e5eddc6d84957fe/aiohttp-3.11.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4586a68730bd2f2b04a83e83f79d271d8ed13763f64b75920f18a3a677b9a7f0", size = 1603590 }, + { url = "https://files.pythonhosted.org/packages/04/94/91e0d1ca0793012ccd927e835540aa38cca98bdce2389256ab813ebd64a3/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fe4eb0e7f50cdb99b26250d9328faef30b1175a5dbcfd6d0578d18456bac567", size = 1618688 }, + { url = "https://files.pythonhosted.org/packages/71/85/d13c3ea2e48a10b43668305d4903838834c3d4112e5229177fbcc23a56cd/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2a8a6bc19818ac3e5596310ace5aa50d918e1ebdcc204dc96e2f4d505d51740c", size = 1658053 }, + { url = "https://files.pythonhosted.org/packages/12/6a/3242a35100de23c1e8d9e05e8605e10f34268dee91b00d9d1e278c58eb80/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f27eec42f6c3c1df09cfc1f6786308f8b525b8efaaf6d6bd76c1f52c6511f6a", size = 1616917 }, + { url = "https://files.pythonhosted.org/packages/f5/b3/3f99b6f0a9a79590a7ba5655dbde8408c685aa462247378c977603464d0a/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2a4a13dfbb23977a51853b419141cd0a9b9573ab8d3a1455c6e63561387b52ff", size = 1685872 }, + { url = "https://files.pythonhosted.org/packages/8a/2e/99672181751f280a85e24fcb9a2c2469e8b1a0de1746b7b5c45d1eb9a999/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:02876bf2f69b062584965507b07bc06903c2dc93c57a554b64e012d636952654", size = 1715719 }, + { url = "https://files.pythonhosted.org/packages/7a/cd/68030356eb9a7d57b3e2823c8a852709d437abb0fbff41a61ebc351b7625/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b992778d95b60a21c4d8d4a5f15aaab2bd3c3e16466a72d7f9bfd86e8cea0d4b", size = 1673166 }, + { url = "https://files.pythonhosted.org/packages/03/61/425397a9a2839c609d09fdb53d940472f316a2dbeaa77a35b2628dae6284/aiohttp-3.11.13-cp313-cp313-win32.whl", hash = "sha256:507ab05d90586dacb4f26a001c3abf912eb719d05635cbfad930bdbeb469b36c", size = 410615 }, + { url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452 }, +] + +[[package]] +name = "aioice" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ifaddr", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/b6/e2b0e48ccb5b04fe29265e93f14a0915f416e359c897ae87d570566c430b/aioice-0.9.0.tar.gz", hash = "sha256:fc2401b1c4b6e19372eaaeaa28fd1bd9cbf6b0e412e48625297c53b495eebd1e", size = 40324 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/35/d21e48d3ba25d32aba5d142d54c4491376c659dd74d052a30dd25198007b/aioice-0.9.0-py3-none-any.whl", hash = "sha256:b609597a3a5a611e0004ff04772e16aceb881d51c25c0afc4ceac05d5e50024e", size = 24177 }, +] + +[[package]] +name = "aiortc" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aioice", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "av", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-crc32c", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyee", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pylibsrtp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyopenssl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/f8/408e092748521889c9d33dddcef920afd9891cf6db4615ba6b6bfe114ff8/aiortc-1.10.1.tar.gz", hash = "sha256:64926ad86bde20c1a4dacb7c3a164e57b522606b70febe261fada4acf79641b5", size = 1179406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/6b/74547a30d1ddcc81f905ef4ff7fcc2c89b7482cb2045688f2aaa4fa918aa/aiortc-1.10.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3bef536f38394b518aefae9dbf9cdd08f39e4c425f316f9692f0d8dc724810bd", size = 1218457 }, + { url = "https://files.pythonhosted.org/packages/46/92/b4ccf39cd18e366ace2a11dc7d98ed55967b4b325707386b5788149db15e/aiortc-1.10.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8842c02e38513d9432ef22982572833487bb015f23348fa10a690616dbf55143", size = 898855 }, + { url = "https://files.pythonhosted.org/packages/a4/e9/2676de48b493787d8b03129713e6bb2dfbacca2a565090f2a89cbad71f96/aiortc-1.10.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954a420de01c0bf6b07a0c58b662029b1c4204ddbd8f5c4162bbdebd43f882b1", size = 1750403 }, + { url = "https://files.pythonhosted.org/packages/c3/9d/ab6d09183cdaf5df060923d9bd5c9ed5fb1802661d9401dba35f3c85a57b/aiortc-1.10.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7c0d46fb30307a9d7deb4b7d66f0b0e73b77a7221b063fb6dc78821a5d2aa1e", size = 1867886 }, + { url = "https://files.pythonhosted.org/packages/c2/71/0b5666e6b965dbd9a7f331aa827a6c3ab3eb4d582fefb686a7f4227b7954/aiortc-1.10.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89582f6923046f79f15d9045f432bc78191eacc95f6bed18714e86ec935188d9", size = 1893709 }, + { url = "https://files.pythonhosted.org/packages/9d/0a/8c0c78fad79ef595a0ed6e2ab413900e6bd0eac65fc5c31c9d8736bff909/aiortc-1.10.1-cp39-abi3-win32.whl", hash = "sha256:d1cbe87f740b33ffaa8e905f21092773e74916be338b64b81c8b79af4c3847eb", size = 923265 }, + { url = "https://files.pythonhosted.org/packages/73/12/a27dd588a4988021da88cb4d338d8ee65ac097afc14e9193ab0be4a48790/aiortc-1.10.1-cp39-abi3-win_amd64.whl", hash = "sha256:c9a5a0b23f8a77540068faec8837fa0a65b0396c20f09116bdb874b75e0b6abe", size = 1009488 }, ] [[package]] @@ -148,7 +190,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.43.0" +version = "0.47.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -159,9 +201,9 @@ dependencies = [ { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/973d2ac6c9f7d1be41829c7b878cbe399385b25cc2ebe80ad0eec9999b8c/anthropic-0.43.0.tar.gz", hash = "sha256:06801f01d317a431d883230024318d48981758058bf7e079f33fb11f64b5a5c1", size = 194826 } +sdist = { url = "https://files.pythonhosted.org/packages/64/65/175bf024bd9866ef96470620e164dcf8c3e0a2892178e59d1532465c8315/anthropic-0.47.2.tar.gz", hash = "sha256:452f4ca0c56ffab8b6ce9928bf8470650f88106a7001b250895eb65c54cfa44c", size = 208066 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/88/ded3ba979a2218a448cbc1a1e762d998b92f30529452c5104b35b6cb71f8/anthropic-0.43.0-py3-none-any.whl", hash = "sha256:f748a703f77b3244975e1aace3a935840dc653a4714fb6bba644f97cc76847b4", size = 207867 }, + { url = "https://files.pythonhosted.org/packages/92/ad/feddd3ed83804b7f05c90b343e2d9df8f4a28028d6820c1a034de79dcdab/anthropic-0.47.2-py3-none-any.whl", hash = "sha256:61b712a56308fce69f04d92ba0230ab2bc187b5bce17811d400843a8976bb67f", size = 239536 }, ] [[package]] @@ -220,11 +262,11 @@ wheels = [ [[package]] name = "attrs" -version = "24.3.0" +version = "25.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 } +sdist = { url = "https://files.pythonhosted.org/packages/49/7c/fdf464bcc51d23881d110abd74b512a42b3d5d376a55a831b44c603ae17f/attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e", size = 810562 } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 }, + { url = "https://files.pythonhosted.org/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a", size = 63152 }, ] [[package]] @@ -239,18 +281,91 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/1f/bc95e43ffb57c05b8efcc376dd55a0240bf58f47ddf5a0f92452b6457b75/Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377", size = 223827 }, ] +[[package]] +name = "autogen-agentchat" +version = "0.2.40" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "diskcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "docker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "flaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "termcolor", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/82/91a2a38a7188c216cf6c2ff1177b47eb0ec9451a5f60b83dc5f1669ae5f1/autogen-agentchat-0.2.40.tar.gz", hash = "sha256:bfdd25ab63fb75a701095315d0d7214f1616411b9edbcdf6183da35a956cc42e", size = 335172 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/27/198414c4c24e886770a03e0bed349582c40e3bfc2ec327034cc5d22c185f/autogen_agentchat-0.2.40-py3-none-any.whl", hash = "sha256:03f11ab89442a3b2408e7e46aa4a66d0be44e6f4447467efbb3ef4e35940176e", size = 382317 }, +] + +[[package]] +name = "av" +version = "13.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/9d/486d31e76784cc0ad943f420c5e05867263b32b37e2f4b0f7f22fdc1ca3a/av-13.1.0.tar.gz", hash = "sha256:d3da736c55847d8596eb8c26c60e036f193001db3bc5c10da8665622d906c17e", size = 3957908 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d6/1c4a8056a88e006681ac6a3d5ac6082f0a48e52bd565bfd350bfc7c6a37d/av-13.1.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2af44fae6d16c3a40dd1c85bda41b449be08a2c172d8f44fb63395ccf6e6fb4", size = 24260057 }, + { url = "https://files.pythonhosted.org/packages/23/be/cf89545117172d75a0c48066e6f368403237df623b2e3e93590fdeaef8bf/av-13.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0fea71fe06fd0dfe90a089200eb6468034797f860a321fa2d62e07d619c74749", size = 19475039 }, + { url = "https://files.pythonhosted.org/packages/4b/d0/8e261547f7763f320a4f5f68e139fea5f31814fddfe5503c8372123ebb8b/av-13.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:756997810dcca11811b598d209561cabd2071e5b472b867c295bb3e7022eecde", size = 31289005 }, + { url = "https://files.pythonhosted.org/packages/82/a3/00cacfe80ebbe0664876dd26558fb23b65d034ffd2ce0ddb12f1c746e7cb/av-13.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f432102edaec4ee90087a675acf486bff0c81b47d98b85eb3218afe84575b60", size = 30705668 }, + { url = "https://files.pythonhosted.org/packages/d7/37/faa98dca1a8f6c2e3f4ad3a935037872aff49a679b76918c5258cf5a1c70/av-13.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d568c4d7a36df52c0774d52e6d730148775ead16daed81c10dafc2569b5a38d", size = 33122108 }, + { url = "https://files.pythonhosted.org/packages/25/81/c3a842477b558e23c7249f81cf723764c193636b6523267c2c02321da6b0/av-13.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa6f76e7c5e77bc5f99a27ada29f78c64fd4e0d42da2c4d203badc650bc0a686", size = 25775920 }, + { url = "https://files.pythonhosted.org/packages/39/54/c4227080c9700384db90072ace70d89b6a288b3748bd2ec0e32580a49e7f/av-13.1.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:867385e6701464a5c95903e24d2e0df1c7e0dbf211ed91d0ce639cd687373e10", size = 24255112 }, + { url = "https://files.pythonhosted.org/packages/32/4a/eb9348231655ca99b200b380f4edbceff7358c927a285badcc84b18fb1c9/av-13.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb7a3f319401a46b0017771268ff4928501e77cf00b1a2aa0721e20b2fd1146e", size = 19467930 }, + { url = "https://files.pythonhosted.org/packages/14/c7/48c80252bdbc3a75a54dd205a7fab8f613914009b9e5416202757208e040/av-13.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad904f860147bceaca65b0d3174a8153f35c570d465161d210f1879970b15559", size = 32207671 }, + { url = "https://files.pythonhosted.org/packages/f9/66/3332c7fa8c43b65680a94f279ea3e832b5500de3a1392bac6112881e984b/av-13.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a906e017b29d0eb80d9ccf7a98d19268122da792dbb68eb741cfebba156e6aed", size = 31520911 }, + { url = "https://files.pythonhosted.org/packages/e5/bb/2e03acb9b27591d97f700a3a6c27cfd1bc53fa148177747eda8a70cca1e9/av-13.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce894d7847897da7be63277a0875bd93c51327134ac226c67978de014c7979f", size = 34048399 }, + { url = "https://files.pythonhosted.org/packages/85/44/527aa3b65947d42cfe829326026edf0cd1a8c459390076034be275616c36/av-13.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:384bcdb5fc3238a263a5a25cc9efc690859fa4148cc4b07e00fae927178db22a", size = 25779569 }, + { url = "https://files.pythonhosted.org/packages/9b/aa/4bdd8ce59173574fc6e0c282c71ee6f96fca82643d97bf172bc4cb5a5674/av-13.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:261dbc3f4b55f4f8f3375b10b2258fca7f2ab7a6365c01bc65e77a0d5327a195", size = 24268674 }, + { url = "https://files.pythonhosted.org/packages/17/b4/b267dd5bad99eed49ec6731827c6bcb5ab03864bf732a7ebb81e3df79911/av-13.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83d259ef86b9054eb914bc7c6a7f6092a6d75cb939295e70ee979cfd92a67b99", size = 19475617 }, + { url = "https://files.pythonhosted.org/packages/68/32/4209e51f54d7b54a1feb576d309c671ed1ff437b54fcc4ec68c239199e0a/av-13.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b4d3ca159eceab97e3c0fb08fe756520fb95508417f76e48198fda2a5b0806", size = 32468873 }, + { url = "https://files.pythonhosted.org/packages/b6/d8/c174da5f06b24f3c9e36f91fd02a7411c39da9ce792c17964260d4be675e/av-13.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40e8f757e373b73a2dc4640852a00cce4a4a92ef19b2e642a96d6994cd1fffbf", size = 31818484 }, + { url = "https://files.pythonhosted.org/packages/7f/22/0dd8d1d5cad415772bb707d16aea8b81cf75d340d11d3668eea43468c730/av-13.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8aaec2c0bfd024359db3821d679009d4e637e1bee0321d20f61c54ed6b20f41", size = 34398652 }, + { url = "https://files.pythonhosted.org/packages/7b/ff/48fa68888b8d5bae36d915556ff18f9e5fdc6b5ff5ae23dc4904c9713168/av-13.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ea0deab0e6a739cb742fba2a3983d8102f7516a3cdf3c46669f3cac0ed1f351", size = 25781343 }, + { url = "https://files.pythonhosted.org/packages/82/6e/cdce12e534570df37d3fdcb3a74851d39e9ab79d388f3174dea9785a011a/av-13.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47642ebaebfe20519b2391bd5b7c38b596efcd052bfd09c8d33058f94ddd0fd6", size = 24229340 }, + { url = "https://files.pythonhosted.org/packages/7c/88/5359aeada9ea509426f2db63b6531833824a1b02470667b103479ddea7ae/av-13.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f079c2daa3ae06557b3f6e9bed4fb9c876e8012175bec645ccd007199a302db", size = 19436445 }, + { url = "https://files.pythonhosted.org/packages/b4/d4/64995e5b800476c86dae4ea1444a0eac44e2c4985fac6401b08401e2df11/av-13.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f0de8252deeeb1887637e88d4d9d18514e5cfe276bdb9e6ca8e9eef89d1667a", size = 32120549 }, + { url = "https://files.pythonhosted.org/packages/68/76/9910694cf87d2d308d851f5b2b5c5b20f7f55411f596e2c158fb13bf84a3/av-13.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ad0024f4def11b0cedfeee478fa6c6fd7ed3955e13387e0f27261fdda6121b4", size = 31495305 }, + { url = "https://files.pythonhosted.org/packages/6a/a8/cd92de947b9595a0eb2c64e6f7ba295aac2687972050ae092173c2f6ea0c/av-13.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb88e2590eaed45233eb117f1dfab1a43ed9a997b2c46da9f08468dd00f14895", size = 34065325 }, + { url = "https://files.pythonhosted.org/packages/9d/d0/9869fcbd66422df2033d4b78a663e3c64aa6fe7eb9189c811d60f69d9871/av-13.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:c927e4fa4f6aeed4340b3e3b16b237d7cb743e5c1a55b92307407590ca4112aa", size = 25754728 }, + { url = "https://files.pythonhosted.org/packages/63/62/09859d91bc2309918d548ac4585973c53e7db27010c432d050f02206f9bd/av-13.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fc5118f78ee712b2c396f345e4c51e60e61e28f1f606adbd4060c4dc44b0b652", size = 23861117 }, + { url = "https://files.pythonhosted.org/packages/c7/43/f186435a0acad3a2bdf271ce51d3af97ac3153a410e54a623529d39a1818/av-13.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:81bcbd3545e523e7a350613be1866b515a5ee3fafa1d9d257d7ed02531fc2636", size = 19115008 }, + { url = "https://files.pythonhosted.org/packages/31/eb/a1b4af95a615ba73dfc3cfcb9387e40826c92d7d6d383a1b68685a7ef920/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b2bc641e8e16bbf058de35f1ba79ebed358ac6fe3cb5a665366294774fdb18", size = 22852637 }, + { url = "https://files.pythonhosted.org/packages/0b/a6/94a34aa672af7fef2939e4a5d6c4c6c28e33da0c623aaa9485d977eeaa95/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d956ae3e68fabdc45eb2b986c2e842a31df084d8cfc90336509f07a727a9df62", size = 22703888 }, + { url = "https://files.pythonhosted.org/packages/b9/69/08a72ceed2c8a6e689dea2ef8e941df9469cbe144a600b83d45f821477fc/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ef076fcdf172aafcb21ea3ef7bd68cc9151b050016a8ace13b3dae3d08a4427", size = 24657784 }, + { url = "https://files.pythonhosted.org/packages/b7/8c/c20894580a4341a76c7c74b59c43e26e6652b0fc60f7248f2c1bc5fdbb5e/av-13.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bbf90397b7a466ff2879bd0944d55f796ad76c073fce50304315b83ad00113bd", size = 25562492 }, +] + [[package]] name = "azure-ai-inference" -version = "1.0.0b7" +version = "1.0.0b9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/6a/ed85592e5c64e08c291992f58b1a94dab6869f28fb0f40fd753dced73ba6/azure_ai_inference-1.0.0b9.tar.gz", hash = "sha256:1feb496bd84b01ee2691befc04358fa25d7c344d8288e99364438859ad7cd5a4", size = 182408 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/0f/27520da74769db6e58327d96c98e7b9a07ce686dff582c9a5ec60b03f9dd/azure_ai_inference-1.0.0b9-py3-none-any.whl", hash = "sha256:49823732e674092dad83bb8b0d1b65aa73111fab924d61349eb2a8cdc0493990", size = 124885 }, +] + +[[package]] +name = "azure-ai-projects" +version = "1.0.0b6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/37/233eee0bebbf631d2f911a9f1ebbc3784b100d9bfb84efc275e71c1ea636/azure_ai_inference-1.0.0b7.tar.gz", hash = "sha256:bd912f71f7f855036ca46c9a21439f290eed5e61da418fd26bbb32e3c68bcce3", size = 175883 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/72/5f9a78c913af66c55222ff912227b494707c4adfbdca27a78c3687a1b8ba/azure_ai_projects-1.0.0b6.tar.gz", hash = "sha256:ce6cfb2403eeb1a80e5dd84193fb2864953cd95a351f3d4572a5451bbb4c30d2", size = 298737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/b6/5ba830eddc59f820c654694d476c14a3dd9c1f828ff9b48eb8b21dfd5f01/azure_ai_inference-1.0.0b7-py3-none-any.whl", hash = "sha256:59bb6a9ee62bd7654a69ca2bf12fe9335d7045df95b491cb8b5f9e3791c86175", size = 123030 }, + { url = "https://files.pythonhosted.org/packages/9d/d9/14b31fc773072b63493d55a1a5b60e656f11aeea2b603fef2eb567686d96/azure_ai_projects-1.0.0b6-py3-none-any.whl", hash = "sha256:b0689825065648b54b4405e9edd22b1de3ea0dfc1ca3baf99db5173fd6208692", size = 187221 }, ] [[package]] @@ -304,7 +419,7 @@ wheels = [ [[package]] name = "azure-identity" -version = "1.19.0" +version = "1.20.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -313,9 +428,9 @@ dependencies = [ { name = "msal-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/91/cbaeff9eb0b838f0d35b4607ac1c6195c735c8eb17db235f8f60e622934c/azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83", size = 263058 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/89/7d170fab0b85d9650cdb7abda087e849644beb52bd28f6804620dd0cecd9/azure_identity-1.20.0.tar.gz", hash = "sha256:40597210d56c83e15031b0fe2ea3b26420189e1e7f3e20bdbb292315da1ba014", size = 264447 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/d5/3995ed12f941f4a41a273d9b1709282e825ef87ed8eab3833038fee54d59/azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81", size = 187587 }, + { url = "https://files.pythonhosted.org/packages/de/aa/819513c1dbef990af690bb5eefb5e337f8698d75dfdb7302528f50ce1994/azure_identity-1.20.0-py3-none-any.whl", hash = "sha256:5f23fc4889a66330e840bd78830287e14f3761820fe3c5f77ac875edcb9ec998", size = 188243 }, ] [[package]] @@ -376,14 +491,15 @@ wheels = [ [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +sdist = { url = "https://files.pythonhosted.org/packages/f0/3c/adaf39ce1fb4afdd21b611e3d530b183bb7759c9b673d60db0e347fd4439/beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b", size = 619516 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, + { url = "https://files.pythonhosted.org/packages/f9/49/6abb616eb3cbab6a7cca303dc02fdf3836de2e0b834bf966a7f5271a34d8/beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16", size = 186015 }, ] [[package]] @@ -414,30 +530,30 @@ wheels = [ [[package]] name = "boto3" -version = "1.36.1" +version = "1.37.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "s3transfer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/04/0c6cea060653eee75f4348152dfc0aa0b241f7d1f99a530079ee44d61e4b/boto3-1.36.1.tar.gz", hash = "sha256:258ab77225a81d3cf3029c9afe9920cd9dec317689dfadec6f6f0a23130bb60a", size = 110959 } +sdist = { url = "https://files.pythonhosted.org/packages/b3/75/afe885605fef5b624d68869864f9af2595ff0b646377e1fdc9bba35aa49b/boto3-1.37.2.tar.gz", hash = "sha256:d64491bd4142c2c6dfe44479bf89c4ab7fa8d00210c2aaa7361931e61898b608", size = 111201 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/ed/464e1df3901fbfedd5a0786e551240216f0c867440fa6156595178227b3f/boto3-1.36.1-py3-none-any.whl", hash = "sha256:eb21380d73fec6645439c0d802210f72a0cdb3295b02953f246ff53f512faa8f", size = 139163 }, + { url = "https://files.pythonhosted.org/packages/a7/5d/e4d5ed68b3b2f421754cc5ea0b9276f0c1568cd1a45dd7df3aadce028f60/boto3-1.37.2-py3-none-any.whl", hash = "sha256:e58136d52d79425ce26c3c1578bf94d4b2e91ead55fed9f6950406ee9713e6af", size = 139345 }, ] [[package]] name = "botocore" -version = "1.36.1" +version = "1.37.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/aa/556720b3ee9629b7c4366b5a0d9797a84e83a97f78435904cbb9bdc41939/botocore-1.36.1.tar.gz", hash = "sha256:f789a6f272b5b3d8f8756495019785e33868e5e00dd9662a3ee7959ac939bb12", size = 13498150 } +sdist = { url = "https://files.pythonhosted.org/packages/39/20/5f8f74ac3db553f713d640d0af4131162846123c955ac7118e727ef7441b/botocore-1.37.2.tar.gz", hash = "sha256:3f460f3c32cd6d747d5897a9cbde011bf1715abc7bf0a6ea6fdb0b812df63287", size = 13568710 } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/bb/5431f12e2dadd881fd023fb57e7e3ab82f7b697c38dc837fc8d70cca51bd/botocore-1.36.1-py3-none-any.whl", hash = "sha256:dec513b4eb8a847d79bbefdcdd07040ed9d44c20b0001136f0890a03d595705a", size = 13297686 }, + { url = "https://files.pythonhosted.org/packages/85/27/c579234944cb1e9a34e7803b3a45efa309d44280ba5e2b1069d604b2b266/botocore-1.37.2-py3-none-any.whl", hash = "sha256:5f59b966f3cd0c8055ef6f7c2600f7db5f8218071d992e5f95da3f9156d4370f", size = 13333985 }, ] [[package]] @@ -458,20 +574,20 @@ wheels = [ [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, + { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080 }, ] [[package]] name = "certifi" -version = "2024.12.14" +version = "2025.1.31" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, + { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, ] [[package]] @@ -742,61 +858,62 @@ wheels = [ [[package]] name = "coverage" -version = "7.6.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/84/ba/ac14d281f80aab516275012e8875991bb06203957aa1e19950139238d658/coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23", size = 803868 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/12/2a2a923edf4ddabdffed7ad6da50d96a5c126dae7b80a33df7310e329a1e/coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78", size = 207982 }, - { url = "https://files.pythonhosted.org/packages/ca/49/6985dbca9c7be3f3cb62a2e6e492a0c88b65bf40579e16c71ae9c33c6b23/coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c", size = 208414 }, - { url = "https://files.pythonhosted.org/packages/35/93/287e8f1d1ed2646f4e0b2605d14616c9a8a2697d0d1b453815eb5c6cebdb/coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a", size = 236860 }, - { url = "https://files.pythonhosted.org/packages/de/e1/cfdb5627a03567a10031acc629b75d45a4ca1616e54f7133ca1fa366050a/coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165", size = 234758 }, - { url = "https://files.pythonhosted.org/packages/6d/85/fc0de2bcda3f97c2ee9fe8568f7d48f7279e91068958e5b2cc19e0e5f600/coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988", size = 235920 }, - { url = "https://files.pythonhosted.org/packages/79/73/ef4ea0105531506a6f4cf4ba571a214b14a884630b567ed65b3d9c1975e1/coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5", size = 234986 }, - { url = "https://files.pythonhosted.org/packages/c6/4d/75afcfe4432e2ad0405c6f27adeb109ff8976c5e636af8604f94f29fa3fc/coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3", size = 233446 }, - { url = "https://files.pythonhosted.org/packages/86/5b/efee56a89c16171288cafff022e8af44f8f94075c2d8da563c3935212871/coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5", size = 234566 }, - { url = "https://files.pythonhosted.org/packages/f2/db/67770cceb4a64d3198bf2aa49946f411b85ec6b0a9b489e61c8467a4253b/coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244", size = 210675 }, - { url = "https://files.pythonhosted.org/packages/8d/27/e8bfc43f5345ec2c27bc8a1fa77cdc5ce9dcf954445e11f14bb70b889d14/coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e", size = 211518 }, - { url = "https://files.pythonhosted.org/packages/85/d2/5e175fcf6766cf7501a8541d81778fd2f52f4870100e791f5327fd23270b/coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3", size = 208088 }, - { url = "https://files.pythonhosted.org/packages/4b/6f/06db4dc8fca33c13b673986e20e466fd936235a6ec1f0045c3853ac1b593/coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43", size = 208536 }, - { url = "https://files.pythonhosted.org/packages/0d/62/c6a0cf80318c1c1af376d52df444da3608eafc913b82c84a4600d8349472/coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132", size = 240474 }, - { url = "https://files.pythonhosted.org/packages/a3/59/750adafc2e57786d2e8739a46b680d4fb0fbc2d57fbcb161290a9f1ecf23/coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f", size = 237880 }, - { url = "https://files.pythonhosted.org/packages/2c/f8/ef009b3b98e9f7033c19deb40d629354aab1d8b2d7f9cfec284dbedf5096/coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994", size = 239750 }, - { url = "https://files.pythonhosted.org/packages/a6/e2/6622f3b70f5f5b59f705e680dae6db64421af05a5d1e389afd24dae62e5b/coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99", size = 238642 }, - { url = "https://files.pythonhosted.org/packages/2d/10/57ac3f191a3c95c67844099514ff44e6e19b2915cd1c22269fb27f9b17b6/coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd", size = 237266 }, - { url = "https://files.pythonhosted.org/packages/ee/2d/7016f4ad9d553cabcb7333ed78ff9d27248ec4eba8dd21fa488254dff894/coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377", size = 238045 }, - { url = "https://files.pythonhosted.org/packages/a7/fe/45af5c82389a71e0cae4546413266d2195c3744849669b0bab4b5f2c75da/coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8", size = 210647 }, - { url = "https://files.pythonhosted.org/packages/db/11/3f8e803a43b79bc534c6a506674da9d614e990e37118b4506faf70d46ed6/coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609", size = 211508 }, - { url = "https://files.pythonhosted.org/packages/86/77/19d09ea06f92fdf0487499283b1b7af06bc422ea94534c8fe3a4cd023641/coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853", size = 208281 }, - { url = "https://files.pythonhosted.org/packages/b6/67/5479b9f2f99fcfb49c0d5cf61912a5255ef80b6e80a3cddba39c38146cf4/coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078", size = 208514 }, - { url = "https://files.pythonhosted.org/packages/15/d1/febf59030ce1c83b7331c3546d7317e5120c5966471727aa7ac157729c4b/coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0", size = 241537 }, - { url = "https://files.pythonhosted.org/packages/4b/7e/5ac4c90192130e7cf8b63153fe620c8bfd9068f89a6d9b5f26f1550f7a26/coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50", size = 238572 }, - { url = "https://files.pythonhosted.org/packages/dc/03/0334a79b26ecf59958f2fe9dd1f5ab3e2f88db876f5071933de39af09647/coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022", size = 240639 }, - { url = "https://files.pythonhosted.org/packages/d7/45/8a707f23c202208d7b286d78ad6233f50dcf929319b664b6cc18a03c1aae/coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b", size = 240072 }, - { url = "https://files.pythonhosted.org/packages/66/02/603ce0ac2d02bc7b393279ef618940b4a0535b0868ee791140bda9ecfa40/coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0", size = 238386 }, - { url = "https://files.pythonhosted.org/packages/04/62/4e6887e9be060f5d18f1dd58c2838b2d9646faf353232dec4e2d4b1c8644/coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852", size = 240054 }, - { url = "https://files.pythonhosted.org/packages/5c/74/83ae4151c170d8bd071924f212add22a0e62a7fe2b149edf016aeecad17c/coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359", size = 210904 }, - { url = "https://files.pythonhosted.org/packages/c3/54/de0893186a221478f5880283119fc40483bc460b27c4c71d1b8bba3474b9/coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247", size = 211692 }, - { url = "https://files.pythonhosted.org/packages/25/6d/31883d78865529257bf847df5789e2ae80e99de8a460c3453dbfbe0db069/coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9", size = 208308 }, - { url = "https://files.pythonhosted.org/packages/70/22/3f2b129cc08de00c83b0ad6252e034320946abfc3e4235c009e57cfeee05/coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b", size = 208565 }, - { url = "https://files.pythonhosted.org/packages/97/0a/d89bc2d1cc61d3a8dfe9e9d75217b2be85f6c73ebf1b9e3c2f4e797f4531/coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690", size = 241083 }, - { url = "https://files.pythonhosted.org/packages/4c/81/6d64b88a00c7a7aaed3a657b8eaa0931f37a6395fcef61e53ff742b49c97/coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18", size = 238235 }, - { url = "https://files.pythonhosted.org/packages/9a/0b/7797d4193f5adb4b837207ed87fecf5fc38f7cc612b369a8e8e12d9fa114/coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c", size = 240220 }, - { url = "https://files.pythonhosted.org/packages/65/4d/6f83ca1bddcf8e51bf8ff71572f39a1c73c34cf50e752a952c34f24d0a60/coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd", size = 239847 }, - { url = "https://files.pythonhosted.org/packages/30/9d/2470df6aa146aff4c65fee0f87f58d2164a67533c771c9cc12ffcdb865d5/coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e", size = 237922 }, - { url = "https://files.pythonhosted.org/packages/08/dd/723fef5d901e6a89f2507094db66c091449c8ba03272861eaefa773ad95c/coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694", size = 239783 }, - { url = "https://files.pythonhosted.org/packages/3d/f7/64d3298b2baf261cb35466000628706ce20a82d42faf9b771af447cd2b76/coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6", size = 210965 }, - { url = "https://files.pythonhosted.org/packages/d5/58/ec43499a7fc681212fe7742fe90b2bc361cdb72e3181ace1604247a5b24d/coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e", size = 211719 }, - { url = "https://files.pythonhosted.org/packages/ab/c9/f2857a135bcff4330c1e90e7d03446b036b2363d4ad37eb5e3a47bbac8a6/coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe", size = 209050 }, - { url = "https://files.pythonhosted.org/packages/aa/b3/f840e5bd777d8433caa9e4a1eb20503495709f697341ac1a8ee6a3c906ad/coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273", size = 209321 }, - { url = "https://files.pythonhosted.org/packages/85/7d/125a5362180fcc1c03d91850fc020f3831d5cda09319522bcfa6b2b70be7/coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8", size = 252039 }, - { url = "https://files.pythonhosted.org/packages/a9/9c/4358bf3c74baf1f9bddd2baf3756b54c07f2cfd2535f0a47f1e7757e54b3/coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098", size = 247758 }, - { url = "https://files.pythonhosted.org/packages/cf/c7/de3eb6fc5263b26fab5cda3de7a0f80e317597a4bad4781859f72885f300/coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb", size = 250119 }, - { url = "https://files.pythonhosted.org/packages/3e/e6/43de91f8ba2ec9140c6a4af1102141712949903dc732cf739167cfa7a3bc/coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0", size = 249597 }, - { url = "https://files.pythonhosted.org/packages/08/40/61158b5499aa2adf9e37bc6d0117e8f6788625b283d51e7e0c53cf340530/coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf", size = 247473 }, - { url = "https://files.pythonhosted.org/packages/50/69/b3f2416725621e9f112e74e8470793d5b5995f146f596f133678a633b77e/coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2", size = 248737 }, - { url = "https://files.pythonhosted.org/packages/3c/6e/fe899fb937657db6df31cc3e61c6968cb56d36d7326361847440a430152e/coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312", size = 211611 }, - { url = "https://files.pythonhosted.org/packages/1c/55/52f5e66142a9d7bc93a15192eba7a78513d2abf6b3558d77b4ca32f5f424/coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d", size = 212781 }, - { url = "https://files.pythonhosted.org/packages/a1/70/de81bfec9ed38a64fc44a77c7665e20ca507fc3265597c28b0d989e4082e/coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f", size = 200223 }, +version = "7.6.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/d6/2b53ab3ee99f2262e6f0b8369a43f6d66658eab45510331c0b3d5c8c4272/coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2", size = 805941 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/67/81dc41ec8f548c365d04a29f1afd492d3176b372c33e47fa2a45a01dc13a/coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8", size = 208345 }, + { url = "https://files.pythonhosted.org/packages/33/43/17f71676016c8829bde69e24c852fef6bd9ed39f774a245d9ec98f689fa0/coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879", size = 208775 }, + { url = "https://files.pythonhosted.org/packages/86/25/c6ff0775f8960e8c0840845b723eed978d22a3cd9babd2b996e4a7c502c6/coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe", size = 237925 }, + { url = "https://files.pythonhosted.org/packages/b0/3d/5f5bd37046243cb9d15fff2c69e498c2f4fe4f9b42a96018d4579ed3506f/coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674", size = 235835 }, + { url = "https://files.pythonhosted.org/packages/b5/f1/9e6b75531fe33490b910d251b0bf709142e73a40e4e38a3899e6986fe088/coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb", size = 236966 }, + { url = "https://files.pythonhosted.org/packages/4f/bc/aef5a98f9133851bd1aacf130e754063719345d2fb776a117d5a8d516971/coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c", size = 236080 }, + { url = "https://files.pythonhosted.org/packages/eb/d0/56b4ab77f9b12aea4d4c11dc11cdcaa7c29130b837eb610639cf3400c9c3/coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c", size = 234393 }, + { url = "https://files.pythonhosted.org/packages/0d/77/28ef95c5d23fe3dd191a0b7d89c82fea2c2d904aef9315daf7c890e96557/coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e", size = 235536 }, + { url = "https://files.pythonhosted.org/packages/29/62/18791d3632ee3ff3f95bc8599115707d05229c72db9539f208bb878a3d88/coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425", size = 211063 }, + { url = "https://files.pythonhosted.org/packages/fc/57/b3878006cedfd573c963e5c751b8587154eb10a61cc0f47a84f85c88a355/coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa", size = 211955 }, + { url = "https://files.pythonhosted.org/packages/64/2d/da78abbfff98468c91fd63a73cccdfa0e99051676ded8dd36123e3a2d4d5/coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015", size = 208464 }, + { url = "https://files.pythonhosted.org/packages/31/f2/c269f46c470bdabe83a69e860c80a82e5e76840e9f4bbd7f38f8cebbee2f/coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45", size = 208893 }, + { url = "https://files.pythonhosted.org/packages/47/63/5682bf14d2ce20819998a49c0deadb81e608a59eed64d6bc2191bc8046b9/coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702", size = 241545 }, + { url = "https://files.pythonhosted.org/packages/6a/b6/6b6631f1172d437e11067e1c2edfdb7238b65dff965a12bce3b6d1bf2be2/coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0", size = 239230 }, + { url = "https://files.pythonhosted.org/packages/c7/01/9cd06cbb1be53e837e16f1b4309f6357e2dfcbdab0dd7cd3b1a50589e4e1/coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f", size = 241013 }, + { url = "https://files.pythonhosted.org/packages/4b/26/56afefc03c30871326e3d99709a70d327ac1f33da383cba108c79bd71563/coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f", size = 239750 }, + { url = "https://files.pythonhosted.org/packages/dd/ea/88a1ff951ed288f56aa561558ebe380107cf9132facd0b50bced63ba7238/coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d", size = 238462 }, + { url = "https://files.pythonhosted.org/packages/6e/d4/1d9404566f553728889409eff82151d515fbb46dc92cbd13b5337fa0de8c/coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba", size = 239307 }, + { url = "https://files.pythonhosted.org/packages/12/c1/e453d3b794cde1e232ee8ac1d194fde8e2ba329c18bbf1b93f6f5eef606b/coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f", size = 211117 }, + { url = "https://files.pythonhosted.org/packages/d5/db/829185120c1686fa297294f8fcd23e0422f71070bf85ef1cc1a72ecb2930/coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558", size = 212019 }, + { url = "https://files.pythonhosted.org/packages/e2/7f/4af2ed1d06ce6bee7eafc03b2ef748b14132b0bdae04388e451e4b2c529b/coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad", size = 208645 }, + { url = "https://files.pythonhosted.org/packages/dc/60/d19df912989117caa95123524d26fc973f56dc14aecdec5ccd7d0084e131/coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3", size = 208898 }, + { url = "https://files.pythonhosted.org/packages/bd/10/fecabcf438ba676f706bf90186ccf6ff9f6158cc494286965c76e58742fa/coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574", size = 242987 }, + { url = "https://files.pythonhosted.org/packages/4c/53/4e208440389e8ea936f5f2b0762dcd4cb03281a7722def8e2bf9dc9c3d68/coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985", size = 239881 }, + { url = "https://files.pythonhosted.org/packages/c4/47/2ba744af8d2f0caa1f17e7746147e34dfc5f811fb65fc153153722d58835/coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750", size = 242142 }, + { url = "https://files.pythonhosted.org/packages/e9/90/df726af8ee74d92ee7e3bf113bf101ea4315d71508952bd21abc3fae471e/coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea", size = 241437 }, + { url = "https://files.pythonhosted.org/packages/f6/af/995263fd04ae5f9cf12521150295bf03b6ba940d0aea97953bb4a6db3e2b/coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3", size = 239724 }, + { url = "https://files.pythonhosted.org/packages/1c/8e/5bb04f0318805e190984c6ce106b4c3968a9562a400180e549855d8211bd/coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a", size = 241329 }, + { url = "https://files.pythonhosted.org/packages/9e/9d/fa04d9e6c3f6459f4e0b231925277cfc33d72dfab7fa19c312c03e59da99/coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95", size = 211289 }, + { url = "https://files.pythonhosted.org/packages/53/40/53c7ffe3c0c3fff4d708bc99e65f3d78c129110d6629736faf2dbd60ad57/coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288", size = 212079 }, + { url = "https://files.pythonhosted.org/packages/76/89/1adf3e634753c0de3dad2f02aac1e73dba58bc5a3a914ac94a25b2ef418f/coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1", size = 208673 }, + { url = "https://files.pythonhosted.org/packages/ce/64/92a4e239d64d798535c5b45baac6b891c205a8a2e7c9cc8590ad386693dc/coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd", size = 208945 }, + { url = "https://files.pythonhosted.org/packages/b4/d0/4596a3ef3bca20a94539c9b1e10fd250225d1dec57ea78b0867a1cf9742e/coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9", size = 242484 }, + { url = "https://files.pythonhosted.org/packages/1c/ef/6fd0d344695af6718a38d0861408af48a709327335486a7ad7e85936dc6e/coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e", size = 239525 }, + { url = "https://files.pythonhosted.org/packages/0c/4b/373be2be7dd42f2bcd6964059fd8fa307d265a29d2b9bcf1d044bcc156ed/coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4", size = 241545 }, + { url = "https://files.pythonhosted.org/packages/a6/7d/0e83cc2673a7790650851ee92f72a343827ecaaea07960587c8f442b5cd3/coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6", size = 241179 }, + { url = "https://files.pythonhosted.org/packages/ff/8c/566ea92ce2bb7627b0900124e24a99f9244b6c8c92d09ff9f7633eb7c3c8/coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3", size = 239288 }, + { url = "https://files.pythonhosted.org/packages/7d/e4/869a138e50b622f796782d642c15fb5f25a5870c6d0059a663667a201638/coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc", size = 241032 }, + { url = "https://files.pythonhosted.org/packages/ae/28/a52ff5d62a9f9e9fe9c4f17759b98632edd3a3489fce70154c7d66054dd3/coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3", size = 211315 }, + { url = "https://files.pythonhosted.org/packages/bc/17/ab849b7429a639f9722fa5628364c28d675c7ff37ebc3268fe9840dda13c/coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef", size = 212099 }, + { url = "https://files.pythonhosted.org/packages/d2/1c/b9965bf23e171d98505eb5eb4fb4d05c44efd256f2e0f19ad1ba8c3f54b0/coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e", size = 209511 }, + { url = "https://files.pythonhosted.org/packages/57/b3/119c201d3b692d5e17784fee876a9a78e1b3051327de2709392962877ca8/coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703", size = 209729 }, + { url = "https://files.pythonhosted.org/packages/52/4e/a7feb5a56b266304bc59f872ea07b728e14d5a64f1ad3a2cc01a3259c965/coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0", size = 253988 }, + { url = "https://files.pythonhosted.org/packages/65/19/069fec4d6908d0dae98126aa7ad08ce5130a6decc8509da7740d36e8e8d2/coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924", size = 249697 }, + { url = "https://files.pythonhosted.org/packages/1c/da/5b19f09ba39df7c55f77820736bf17bbe2416bbf5216a3100ac019e15839/coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b", size = 252033 }, + { url = "https://files.pythonhosted.org/packages/1e/89/4c2750df7f80a7872267f7c5fe497c69d45f688f7b3afe1297e52e33f791/coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d", size = 251535 }, + { url = "https://files.pythonhosted.org/packages/78/3b/6d3ae3c1cc05f1b0460c51e6f6dcf567598cbd7c6121e5ad06643974703c/coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827", size = 249192 }, + { url = "https://files.pythonhosted.org/packages/6e/8e/c14a79f535ce41af7d436bbad0d3d90c43d9e38ec409b4770c894031422e/coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9", size = 250627 }, + { url = "https://files.pythonhosted.org/packages/cb/79/b7cee656cfb17a7f2c1b9c3cee03dd5d8000ca299ad4038ba64b61a9b044/coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3", size = 212033 }, + { url = "https://files.pythonhosted.org/packages/b6/c3/f7aaa3813f1fa9a4228175a7bd368199659d392897e184435a3b66408dd3/coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f", size = 213240 }, + { url = "https://files.pythonhosted.org/packages/7a/7f/05818c62c7afe75df11e0233bd670948d68b36cdbf2a339a095bc02624a8/coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf", size = 200558 }, + { url = "https://files.pythonhosted.org/packages/fb/b2/f655700e1024dec98b10ebaafd0cedbc25e40e4abe62a3c8e2ceef4f8f0a/coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953", size = 200552 }, ] [package.optional-dependencies] @@ -806,39 +923,43 @@ toml = [ [[package]] name = "cryptography" -version = "44.0.0" +version = "44.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "(platform_python_implementation != 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/4c/45dfa6829acffa344e3967d6006ee4ae8be57af746ae2eba1c431949b32c/cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02", size = 710657 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/09/8cc67f9b84730ad330b3b72cf867150744bf07ff113cda21a15a1c6d2c7c/cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123", size = 6541833 }, - { url = "https://files.pythonhosted.org/packages/7e/5b/3759e30a103144e29632e7cb72aec28cedc79e514b2ea8896bb17163c19b/cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092", size = 3922710 }, - { url = "https://files.pythonhosted.org/packages/5f/58/3b14bf39f1a0cfd679e753e8647ada56cddbf5acebffe7db90e184c76168/cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f", size = 4137546 }, - { url = "https://files.pythonhosted.org/packages/98/65/13d9e76ca19b0ba5603d71ac8424b5694415b348e719db277b5edc985ff5/cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb", size = 3915420 }, - { url = "https://files.pythonhosted.org/packages/b1/07/40fe09ce96b91fc9276a9ad272832ead0fddedcba87f1190372af8e3039c/cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b", size = 4154498 }, - { url = "https://files.pythonhosted.org/packages/75/ea/af65619c800ec0a7e4034207aec543acdf248d9bffba0533342d1bd435e1/cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543", size = 3932569 }, - { url = "https://files.pythonhosted.org/packages/c7/af/d1deb0c04d59612e3d5e54203159e284d3e7a6921e565bb0eeb6269bdd8a/cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e", size = 4016721 }, - { url = "https://files.pythonhosted.org/packages/bd/69/7ca326c55698d0688db867795134bdfac87136b80ef373aaa42b225d6dd5/cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e", size = 4240915 }, - { url = "https://files.pythonhosted.org/packages/ef/d4/cae11bf68c0f981e0413906c6dd03ae7fa864347ed5fac40021df1ef467c/cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053", size = 2757925 }, - { url = "https://files.pythonhosted.org/packages/64/b1/50d7739254d2002acae64eed4fc43b24ac0cc44bf0a0d388d1ca06ec5bb1/cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd", size = 3202055 }, - { url = "https://files.pythonhosted.org/packages/11/18/61e52a3d28fc1514a43b0ac291177acd1b4de00e9301aaf7ef867076ff8a/cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591", size = 6542801 }, - { url = "https://files.pythonhosted.org/packages/1a/07/5f165b6c65696ef75601b781a280fc3b33f1e0cd6aa5a92d9fb96c410e97/cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7", size = 3922613 }, - { url = "https://files.pythonhosted.org/packages/28/34/6b3ac1d80fc174812486561cf25194338151780f27e438526f9c64e16869/cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc", size = 4137925 }, - { url = "https://files.pythonhosted.org/packages/d0/c7/c656eb08fd22255d21bc3129625ed9cd5ee305f33752ef2278711b3fa98b/cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289", size = 3915417 }, - { url = "https://files.pythonhosted.org/packages/ef/82/72403624f197af0db6bac4e58153bc9ac0e6020e57234115db9596eee85d/cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7", size = 4155160 }, - { url = "https://files.pythonhosted.org/packages/a2/cd/2f3c440913d4329ade49b146d74f2e9766422e1732613f57097fea61f344/cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c", size = 3932331 }, - { url = "https://files.pythonhosted.org/packages/7f/df/8be88797f0a1cca6e255189a57bb49237402b1880d6e8721690c5603ac23/cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64", size = 4017372 }, - { url = "https://files.pythonhosted.org/packages/af/36/5ccc376f025a834e72b8e52e18746b927f34e4520487098e283a719c205e/cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285", size = 4239657 }, - { url = "https://files.pythonhosted.org/packages/46/b0/f4f7d0d0bcfbc8dd6296c1449be326d04217c57afb8b2594f017eed95533/cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417", size = 2758672 }, - { url = "https://files.pythonhosted.org/packages/97/9b/443270b9210f13f6ef240eff73fd32e02d381e7103969dc66ce8e89ee901/cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede", size = 3202071 }, - { url = "https://files.pythonhosted.org/packages/77/d4/fea74422326388bbac0c37b7489a0fcb1681a698c3b875959430ba550daa/cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731", size = 3338857 }, - { url = "https://files.pythonhosted.org/packages/1a/aa/ba8a7467c206cb7b62f09b4168da541b5109838627f582843bbbe0235e8e/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4", size = 3850615 }, - { url = "https://files.pythonhosted.org/packages/89/fa/b160e10a64cc395d090105be14f399b94e617c879efd401188ce0fea39ee/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756", size = 4081622 }, - { url = "https://files.pythonhosted.org/packages/47/8f/20ff0656bb0cf7af26ec1d01f780c5cfbaa7666736063378c5f48558b515/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c", size = 3867546 }, - { url = "https://files.pythonhosted.org/packages/38/d9/28edf32ee2fcdca587146bcde90102a7319b2f2c690edfa627e46d586050/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa", size = 4090937 }, - { url = "https://files.pythonhosted.org/packages/cc/9d/37e5da7519de7b0b070a3fedd4230fe76d50d2a21403e0f2153d70ac4163/cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c", size = 3128774 }, +sdist = { url = "https://files.pythonhosted.org/packages/c7/67/545c79fe50f7af51dbad56d16b23fe33f63ee6a5d956b3cb68ea110cbe64/cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14", size = 710819 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/27/5e3524053b4c8889da65cf7814a9d0d8514a05194a25e1e34f46852ee6eb/cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009", size = 6642022 }, + { url = "https://files.pythonhosted.org/packages/34/b9/4d1fa8d73ae6ec350012f89c3abfbff19fc95fe5420cf972e12a8d182986/cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f", size = 3943865 }, + { url = "https://files.pythonhosted.org/packages/6e/57/371a9f3f3a4500807b5fcd29fec77f418ba27ffc629d88597d0d1049696e/cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2", size = 4162562 }, + { url = "https://files.pythonhosted.org/packages/c5/1d/5b77815e7d9cf1e3166988647f336f87d5634a5ccecec2ffbe08ef8dd481/cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911", size = 3951923 }, + { url = "https://files.pythonhosted.org/packages/28/01/604508cd34a4024467cd4105887cf27da128cba3edd435b54e2395064bfb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69", size = 3685194 }, + { url = "https://files.pythonhosted.org/packages/c6/3d/d3c55d4f1d24580a236a6753902ef6d8aafd04da942a1ee9efb9dc8fd0cb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026", size = 4187790 }, + { url = "https://files.pythonhosted.org/packages/ea/a6/44d63950c8588bfa8594fd234d3d46e93c3841b8e84a066649c566afb972/cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd", size = 3951343 }, + { url = "https://files.pythonhosted.org/packages/c1/17/f5282661b57301204cbf188254c1a0267dbd8b18f76337f0a7ce1038888c/cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0", size = 4187127 }, + { url = "https://files.pythonhosted.org/packages/f3/68/abbae29ed4f9d96596687f3ceea8e233f65c9645fbbec68adb7c756bb85a/cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf", size = 4070666 }, + { url = "https://files.pythonhosted.org/packages/0f/10/cf91691064a9e0a88ae27e31779200b1505d3aee877dbe1e4e0d73b4f155/cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864", size = 4288811 }, + { url = "https://files.pythonhosted.org/packages/38/78/74ea9eb547d13c34e984e07ec8a473eb55b19c1451fe7fc8077c6a4b0548/cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a", size = 2771882 }, + { url = "https://files.pythonhosted.org/packages/cf/6c/3907271ee485679e15c9f5e93eac6aa318f859b0aed8d369afd636fafa87/cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00", size = 3206989 }, + { url = "https://files.pythonhosted.org/packages/9f/f1/676e69c56a9be9fd1bffa9bc3492366901f6e1f8f4079428b05f1414e65c/cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008", size = 6643714 }, + { url = "https://files.pythonhosted.org/packages/ba/9f/1775600eb69e72d8f9931a104120f2667107a0ee478f6ad4fe4001559345/cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862", size = 3943269 }, + { url = "https://files.pythonhosted.org/packages/25/ba/e00d5ad6b58183829615be7f11f55a7b6baa5a06910faabdc9961527ba44/cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3", size = 4166461 }, + { url = "https://files.pythonhosted.org/packages/b3/45/690a02c748d719a95ab08b6e4decb9d81e0ec1bac510358f61624c86e8a3/cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7", size = 3950314 }, + { url = "https://files.pythonhosted.org/packages/e6/50/bf8d090911347f9b75adc20f6f6569ed6ca9b9bff552e6e390f53c2a1233/cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a", size = 3686675 }, + { url = "https://files.pythonhosted.org/packages/e1/e7/cfb18011821cc5f9b21efb3f94f3241e3a658d267a3bf3a0f45543858ed8/cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c", size = 4190429 }, + { url = "https://files.pythonhosted.org/packages/07/ef/77c74d94a8bfc1a8a47b3cafe54af3db537f081742ee7a8a9bd982b62774/cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62", size = 3950039 }, + { url = "https://files.pythonhosted.org/packages/6d/b9/8be0ff57c4592382b77406269b1e15650c9f1a167f9e34941b8515b97159/cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41", size = 4189713 }, + { url = "https://files.pythonhosted.org/packages/78/e1/4b6ac5f4100545513b0847a4d276fe3c7ce0eacfa73e3b5ebd31776816ee/cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b", size = 4071193 }, + { url = "https://files.pythonhosted.org/packages/3d/cb/afff48ceaed15531eab70445abe500f07f8f96af2bb35d98af6bfa89ebd4/cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7", size = 4289566 }, + { url = "https://files.pythonhosted.org/packages/30/6f/4eca9e2e0f13ae459acd1ca7d9f0257ab86e68f44304847610afcb813dc9/cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9", size = 2772371 }, + { url = "https://files.pythonhosted.org/packages/d2/05/5533d30f53f10239616a357f080892026db2d550a40c393d0a8a7af834a9/cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f", size = 3207303 }, + { url = "https://files.pythonhosted.org/packages/15/06/507bfb5c7e048114a0185dd65f7814677a2ba285d15705c3d69e660c21d7/cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183", size = 3380782 }, + { url = "https://files.pythonhosted.org/packages/e0/f1/7fb4982d59aa86e1a116c812b545e7fc045352be07738ae3fb278835a9a4/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12", size = 3888155 }, + { url = "https://files.pythonhosted.org/packages/60/7b/cbc203838d3092203493d18b923fbbb1de64e0530b332a713ba376905b0b/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83", size = 4106417 }, + { url = "https://files.pythonhosted.org/packages/12/c7/2fe59fb085ab418acc82e91e040a6acaa7b1696fcc1c1055317537fbf0d3/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420", size = 3887540 }, + { url = "https://files.pythonhosted.org/packages/48/89/09fc7b115f60f5bd970b80e32244f8e9aeeb9244bf870b63420cec3b5cd5/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4", size = 4106040 }, + { url = "https://files.pythonhosted.org/packages/2e/38/3fd83c4690dc7d753a442a284b3826ea5e5c380a411443c66421cd823898/cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7", size = 3134657 }, ] [[package]] @@ -899,11 +1020,11 @@ wheels = [ [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, ] [[package]] @@ -917,14 +1038,14 @@ wheels = [ [[package]] name = "deprecated" -version = "1.2.15" +version = "1.2.18" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/a3/53e7d78a6850ffdd394d7048a31a6f14e44900adedf190f9a165f6b69439/deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d", size = 2977612 } +sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/8f/c7f227eb42cfeaddce3eb0c96c60cbca37797fa7b34f8e1aeadf6c5c0983/Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320", size = 9941 }, + { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 }, ] [[package]] @@ -939,6 +1060,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, ] +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550 }, +] + [[package]] name = "distlib" version = "0.3.9" @@ -966,6 +1096,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, +] + [[package]] name = "docstring-parser" version = "0.16" @@ -1013,25 +1157,25 @@ wheels = [ [[package]] name = "executing" -version = "2.1.0" +version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } +sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, + { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702 }, ] [[package]] name = "fastapi" -version = "0.115.6" +version = "0.115.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/72/d83b98cd106541e8f5e5bfab8ef2974ab45a62e8a6c5b5e6940f26d2ed4b/fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654", size = 301336 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/b2/5a5dc4affdb6661dea100324e19a7721d5dc524b464fe8e366c093fd7d87/fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9", size = 295403 } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/b3/7e4df40e585df024fac2f80d1a2d579c854ac37109675db2b0cc22c0bb9e/fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305", size = 94843 }, + { url = "https://files.pythonhosted.org/packages/8f/7d/2d6ce181d7a5f51dedb8c06206cbf0ec026a99bf145edd309f9e17c3282f/fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf", size = 94814 }, ] [[package]] @@ -1045,11 +1189,23 @@ wheels = [ [[package]] name = "filelock" -version = "3.16.1" +version = "3.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +sdist = { url = "https://files.pythonhosted.org/packages/dc/9c/0b15fb47b464e1b663b1acd1253a062aa5feecb07d4e597daea542ebd2b5/filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e", size = 18027 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, + { url = "https://files.pythonhosted.org/packages/89/ec/00d68c4ddfedfe64159999e5f8a98fb8442729a63e2077eb9dcd89623d27/filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338", size = 16164 }, +] + +[[package]] +name = "flaml" +version = "2.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/a8/17322311b77f3012194f92c47c81455463f99c48d358c463fa45bd3c8541/flaml-2.3.4.tar.gz", hash = "sha256:308c3e769976d8a0272f2fd7d98258d7d4a4fd2e4525ba540d1ba149ae266c54", size = 284728 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/5c/c1e734b36d0f068708836238fbf1e8c34716a61e1a844482f37e277ba476/FLAML-2.3.4-py3-none-any.whl", hash = "sha256:dceab62194d469889c4584531049ac0a43480056f4f39c6ea207bfc12a157d76", size = 314250 }, ] [[package]] @@ -1083,11 +1239,11 @@ wheels = [ [[package]] name = "flatbuffers" -version = "24.12.23" +version = "25.2.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a3/83/9ae01534f7e92a0c04f86586a0d62a4a0266e51d8bb2bfd5b8ea8165abba/flatbuffers-24.12.23.tar.gz", hash = "sha256:2910b0bc6ae9b6db78dd2b18d0b7a0709ba240fb5585f286a3a2b30785c22dac", size = 22164 } +sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/b4/31c461eef98b96b8ab736d97274548eaf2b2e349bf09e4de3902f7d53084/flatbuffers-24.12.23-py2.py3-none-any.whl", hash = "sha256:c418e0d48890f4142b92fd3e343e73a48f194e1f80075ddcc5793779b3585444", size = 30962 }, + { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 }, ] [[package]] @@ -1161,16 +1317,16 @@ wheels = [ [[package]] name = "fsspec" -version = "2024.12.0" +version = "2025.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/11/de70dee31455c546fbc88301971ec03c328f3d1138cfba14263f651e9551/fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f", size = 291600 } +sdist = { url = "https://files.pythonhosted.org/packages/b5/79/68612ed99700e6413de42895aa725463e821a6b3be75c87fcce1b4af4c70/fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd", size = 292283 } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 }, + { url = "https://files.pythonhosted.org/packages/e2/94/758680531a00d06e471ef649e4ec2ed6bf185356a7f9fbfbb7368a40bd49/fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b", size = 184484 }, ] [[package]] name = "google-ai-generativelanguage" -version = "0.6.10" +version = "0.6.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1178,14 +1334,14 @@ dependencies = [ { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/71/46543c398629bb883b769041fc10278d4d63aaa2c34744dede1b84ec0207/google_ai_generativelanguage-0.6.10.tar.gz", hash = "sha256:6fa642c964d8728006fe7e8771026fc0b599ae0ebeaf83caf550941e8e693455", size = 795200 } +sdist = { url = "https://files.pythonhosted.org/packages/11/d1/48fe5d7a43d278e9f6b5ada810b0a3530bbeac7ed7fcbcd366f932f05316/google_ai_generativelanguage-0.6.15.tar.gz", hash = "sha256:8f6d9dc4c12b065fe2d0289026171acea5183ebf2d0b11cefe12f3821e159ec3", size = 1375443 } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/6d/db99a295f9caf027bbdd90c41e6ea650a7468392a0e8713719e7abc5f647/google_ai_generativelanguage-0.6.10-py3-none-any.whl", hash = "sha256:854a2bf833d18be05ad5ef13c755567b66a4f4a870f099b62c61fe11bddabcf4", size = 760045 }, + { url = "https://files.pythonhosted.org/packages/7c/a3/67b8a6ff5001a1d8864922f2d6488dc2a14367ceb651bc3f09a947f2f306/google_ai_generativelanguage-0.6.15-py3-none-any.whl", hash = "sha256:5a03ef86377aa184ffef3662ca28f19eeee158733e45d7947982eb953c6ebb6c", size = 1327356 }, ] [[package]] name = "google-api-core" -version = "2.24.0" +version = "2.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1194,9 +1350,9 @@ dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/56/d70d66ed1b5ab5f6c27bf80ec889585ad8f865ff32acbafd3b2ef0bfb5d0/google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf", size = 162647 } +sdist = { url = "https://files.pythonhosted.org/packages/b8/b7/481c83223d7b4f02c7651713fceca648fa3336e1571b9804713f66bca2d8/google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a", size = 163508 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/76/65b8b94e74bf1b6d1cc38d916089670c4da5029d25762441d8c5c19e51dd/google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9", size = 158576 }, + { url = "https://files.pythonhosted.org/packages/b1/a6/8e30ddfd3d39ee6d2c76d3d4f64a83f77ac86a4cab67b286ae35ce9e4369/google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1", size = 160059 }, ] [package.optional-dependencies] @@ -1207,7 +1363,7 @@ grpc = [ [[package]] name = "google-api-python-client" -version = "2.159.0" +version = "2.162.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1216,23 +1372,23 @@ dependencies = [ { name = "httplib2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "uritemplate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/12b58cca5a93d63fd6a7abed570423bdf2db4349eb9361ac5214d42ed7d6/google_api_python_client-2.159.0.tar.gz", hash = "sha256:55197f430f25c907394b44fa078545ffef89d33fd4dca501b7db9f0d8e224bd6", size = 12302576 } +sdist = { url = "https://files.pythonhosted.org/packages/73/d0/4a82e36c514437fa977d9b24f15328cd4505a0d92fcab9a18c81210b0f72/google_api_python_client-2.162.0.tar.gz", hash = "sha256:5f8bc934a5b6eea73a7d12d999e6585c1823179f48340234acb385e2502e735a", size = 12562719 } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/ab/d0671375afe79e6e8c51736e115a69bb6b4bcdc80cd5c01bf667486cd24c/google_api_python_client-2.159.0-py2.py3-none-any.whl", hash = "sha256:baef0bb631a60a0bd7c0bf12a5499e3a40cd4388484de7ee55c1950bf820a0cf", size = 12814228 }, + { url = "https://files.pythonhosted.org/packages/ce/b9/69e1f64714da8b312448f6c425c346189f377ee6a5ee06fa8b5371e08b6c/google_api_python_client-2.162.0-py2.py3-none-any.whl", hash = "sha256:49365fa4f7795fe81a747f5544d6528ea94314fa59664e0ea1005f603facf1ec", size = 13072387 }, ] [[package]] name = "google-auth" -version = "2.37.0" +version = "2.38.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyasn1-modules", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rsa", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/af/b25763b9d35dfc2c6f9c3ec34d8d3f1ba760af3a7b7e8d5c5f0579522c45/google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00", size = 268878 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/eb/d504ba1daf190af6b204a9d4714d457462b486043744901a6eeea711f913/google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", size = 270866 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/8d/4d5d5f9f500499f7bd4c93903b43e8d6976f3fc6f064637ded1a85d09b07/google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0", size = 209829 }, + { url = "https://files.pythonhosted.org/packages/9d/47/603554949a37bca5b7f894d51896a9c534b9eab808e2520a748e081669d0/google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a", size = 210770 }, ] [[package]] @@ -1250,7 +1406,7 @@ wheels = [ [[package]] name = "google-cloud-aiplatform" -version = "1.77.0" +version = "1.80.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1266,14 +1422,14 @@ dependencies = [ { name = "shapely", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/45/7ffd099ff7554d9f4f3665611afb44d3ea59f8a3dd071e4284381d0ac3c1/google_cloud_aiplatform-1.77.0.tar.gz", hash = "sha256:1e5b77fe6c7f276d7aae65bcf08a273122a71f6c4af1f43cf45821f603a74080", size = 8287282 } +sdist = { url = "https://files.pythonhosted.org/packages/f0/88/d36384280cc4653e190a4a30025e66b285fbaef06024f68a4264cc588a33/google_cloud_aiplatform-1.80.0.tar.gz", hash = "sha256:bcaa4570a6fb56d3d29cb6b8f92588d4d1a1931de5f90cf07761853dab4c76fd", size = 8459480 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/b6/f7a3c8bdb08a3636d216c49768eff3369b5475edd71f6dbe590a942252b9/google_cloud_aiplatform-1.77.0-py2.py3-none-any.whl", hash = "sha256:e9dd1bcb1b9a85eddd452916cd6ad1d9ce2d487772a9e45b1814aa0ac5633689", size = 6939280 }, + { url = "https://files.pythonhosted.org/packages/b5/57/5e761e7a8b03efc8e7faa4c0b2775991177bbd4dae7a6656a60dfd092ca8/google_cloud_aiplatform-1.80.0-py2.py3-none-any.whl", hash = "sha256:45d2a170f22431dae977551eccb740400bdb899807d0c8d4c16c53b2c1dbc6a5", size = 7089949 }, ] [[package]] name = "google-cloud-bigquery" -version = "3.27.0" +version = "3.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1284,27 +1440,27 @@ dependencies = [ { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/05/633ce6686b1fed2cd364fa4698bfa6d586263cd4795d012584f8097061e1/google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c", size = 456964 } +sdist = { url = "https://files.pythonhosted.org/packages/21/36/87875a9775985849f18d4b3e320e4acdeb5232db3d49cfa6269e7c7867b8/google_cloud_bigquery-3.29.0.tar.gz", hash = "sha256:fafc2b455ffce3bcc6ce0e884184ef50b6a11350a83b91e327fadda4d5566e72", size = 467180 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/40/4b11a4a8839de8ce802a3ccd60b34e70ce10d13d434a560534ba98f0ea3f/google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3", size = 240100 }, + { url = "https://files.pythonhosted.org/packages/68/60/9e1430f0fe17f8e8e931eff468021516f74f2573f261221529767dd59591/google_cloud_bigquery-3.29.0-py2.py3-none-any.whl", hash = "sha256:5453a4eabe50118254eda9778f3d7dad413490de5f7046b5e66c98f5a1580308", size = 244605 }, ] [[package]] name = "google-cloud-core" -version = "2.4.1" +version = "2.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/1f/9d1e0ba6919668608570418a9a51e47070ac15aeff64261fb092d8be94c0/google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073", size = 35587 } +sdist = { url = "https://files.pythonhosted.org/packages/8d/96/16cc0a34f75899ace6a42bb4ef242ac4aa263089b018d1c18c007d1fd8f2/google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35", size = 35854 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/0f/2e2061e3fbcb9d535d5da3f58cc8de4947df1786fe6a1355960feb05a681/google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61", size = 29233 }, + { url = "https://files.pythonhosted.org/packages/9c/0f/76e813cee7568ac467d929f4f0da7ab349596e7fc4ee837b990611e07d99/google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180", size = 29343 }, ] [[package]] name = "google-cloud-resource-manager" -version = "1.14.0" +version = "1.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1313,9 +1469,9 @@ dependencies = [ { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/74/db14f34283b325b775b3287cd72ce8c43688bdea26801d02017a2ccded08/google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30", size = 430148 } +sdist = { url = "https://files.pythonhosted.org/packages/76/9d/da2e07d064926fc0d84c5f179006148cfa6fcffe6fd7aabdbf86dd20c46c/google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87", size = 443094 } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/c4/2275ca35419f9a2ae66846f389490b356856bf55a9ad9f95a88399a89294/google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c", size = 384138 }, + { url = "https://files.pythonhosted.org/packages/47/be/ffdba56168f7e3778cd002a35fc0e94c608f088f6df24d2b980538389d71/google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518", size = 392325 }, ] [[package]] @@ -1363,7 +1519,7 @@ wheels = [ [[package]] name = "google-generativeai" -version = "0.8.3" +version = "0.8.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-ai-generativelanguage", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1376,7 +1532,7 @@ dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/2f/b5c1d62e94409ed98d5425e83b8e6d3dd475b611be272f561b1a545d273a/google_generativeai-0.8.3-py3-none-any.whl", hash = "sha256:1108ff89d5b8e59f51e63d1a8bf84701cd84656e17ca28d73aeed745e736d9b7", size = 160822 }, + { url = "https://files.pythonhosted.org/packages/9b/b0/6c6af327a8a6ef3be6fe79be1d6f1e2914d6c363aa6b081b93396f4460a7/google_generativeai-0.8.4-py3-none-any.whl", hash = "sha256:e987b33ea6decde1e69191ddcaec6ef974458864d243de7191db50c21a7c5b82", size = 175409 }, ] [[package]] @@ -1393,14 +1549,14 @@ wheels = [ [[package]] name = "googleapis-common-protos" -version = "1.66.0" +version = "1.68.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/8e9cccdb1c49870de6faea2a2764fa23f627dd290633103540209f03524c/googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c", size = 114376 } +sdist = { url = "https://files.pythonhosted.org/packages/54/d2/c08f0d9f94b45faca68e355771329cba2411c777c8713924dd1baee0e09c/googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c", size = 57367 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/0f/c0713fb2b3d28af4b2fded3291df1c4d4f79a00d15c2374a9e010870016c/googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed", size = 221682 }, + { url = "https://files.pythonhosted.org/packages/3f/85/c99a157ee99d67cc6c9ad123abb8b1bfb476fab32d2f3511c59314548e4f/googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac", size = 164985 }, ] [package.optional-dependencies] @@ -1553,15 +1709,15 @@ wheels = [ [[package]] name = "h2" -version = "4.1.0" +version = "4.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "hpack", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "hyperframe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/32/fec683ddd10629ea4ea46d206752a95a2d8a48c22521edd70b142488efe1/h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb", size = 2145593 } +sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e5/db6d438da759efbb488c4f3fbdab7764492ff3c3f953132efa6b9f0e9e53/h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", size = 57488 }, + { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 }, ] [[package]] @@ -1640,11 +1796,11 @@ wheels = [ [[package]] name = "hpack" -version = "4.0.0" +version = "4.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3e/9b/fda93fb4d957db19b0f6b370e79d586b3e8528b20252c729c476a2c02954/hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095", size = 49117 } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/34/e8b383f35b77c402d28563d2b8f83159319b509bc5f760b15d60b0abf165/hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", size = 32611 }, + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, ] [[package]] @@ -1710,18 +1866,17 @@ wheels = [ [[package]] name = "httpx" -version = "0.27.2" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "httpcore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, ] [package.optional-dependencies] @@ -1731,7 +1886,7 @@ http2 = [ [[package]] name = "huggingface-hub" -version = "0.27.1" +version = "0.29.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1742,9 +1897,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/d2/d6976de7542792fc077b498d64af64882b6d8bb40679284ec0bff77d5929/huggingface_hub-0.27.1.tar.gz", hash = "sha256:c004463ca870283909d715d20f066ebd6968c2207dae9393fdffb3c1d4d8f98b", size = 379407 } +sdist = { url = "https://files.pythonhosted.org/packages/22/37/797d6476f13e5ef6af5fc48a5d641d32b39c37e166ccf40c3714c5854a85/huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250", size = 389776 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/3f/50f6b25fafdcfb1c089187a328c95081abf882309afd86f4053951507cd1/huggingface_hub-0.27.1-py3-none-any.whl", hash = "sha256:1c5155ca7d60b60c2e2fc38cbb3ffb7f7c3adf48f824015b219af9061771daec", size = 450658 }, + { url = "https://files.pythonhosted.org/packages/ae/05/75b90de9093de0aadafc868bb2fa7c57651fd8f45384adf39bd77f63980d/huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5", size = 468049 }, ] [[package]] @@ -1761,20 +1916,20 @@ wheels = [ [[package]] name = "hyperframe" -version = "6.0.1" +version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/2a/4747bff0a17f7281abe73e955d60d80aae537a5d203f417fa1c2e7578ebb/hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914", size = 25008 } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/de/85a784bcc4a3779d1753a7ec2dee5de90e18c7bcf402e71b51fcf150b129/hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", size = 12389 }, + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, ] [[package]] name = "identify" -version = "2.6.5" +version = "2.6.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/92/69934b9ef3c31ca2470980423fda3d00f0460ddefdf30a67adf7f17e2e00/identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc", size = 99213 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/fa/5eb460539e6f5252a7c5a931b53426e49258cde17e3d50685031c300a8fd/identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc", size = 99249 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/fa/dce098f4cdf7621aa8f7b4f919ce545891f489482f0bfa5102f3eca8608b/identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566", size = 99078 }, + { url = "https://files.pythonhosted.org/packages/78/8c/4bfcab2d8286473b8d83ea742716f4b79290172e75f91142bc1534b05b9a/identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255", size = 99109 }, ] [[package]] @@ -1786,6 +1941,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] +[[package]] +name = "ifaddr" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/ac/fb4c578f4a3256561548cd825646680edcadb9440f3f68add95ade1eb791/ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4", size = 10485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/1f/19ebc343cc71a7ffa78f17018535adc5cbdd87afb31d7c34874680148b32/ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748", size = 12314 }, +] + [[package]] name = "importlib-metadata" version = "8.5.0" @@ -1842,7 +2006,7 @@ wheels = [ [[package]] name = "ipython" -version = "8.31.0" +version = "8.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1857,9 +2021,9 @@ dependencies = [ { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.12' and sys_platform == 'darwin') or (python_full_version < '3.12' and sys_platform == 'linux') or (python_full_version < '3.12' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/35/6f90fdddff7a08b7b715fccbd2427b5212c9525cd043d26fdc45bee0708d/ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b", size = 5501011 } +sdist = { url = "https://files.pythonhosted.org/packages/36/80/4d2a072e0db7d250f134bc11676517299264ebe16d62a8619d49a78ced73/ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251", size = 5507441 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/60/d0feb6b6d9fe4ab89fe8fe5b47cbf6cd936bfd9f1e7ffa9d0015425aeed6/ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6", size = 821583 }, + { url = "https://files.pythonhosted.org/packages/e7/e1/f4474a7ecdb7745a820f6f6039dc43c66add40f1bcc66485607d93571af6/ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa", size = 825524 }, ] [[package]] @@ -2007,7 +2171,7 @@ wheels = [ [[package]] name = "jsonschema-path" -version = "0.3.3" +version = "0.3.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pathable", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2015,9 +2179,9 @@ dependencies = [ { name = "referencing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/39/3a58b63a997b0cf824536d6f84fff82645a1ca8de222ee63586adab44dfa/jsonschema_path-0.3.3.tar.gz", hash = "sha256:f02e5481a4288ec062f8e68c808569e427d905bedfecb7f2e4c69ef77957c382", size = 11589 } +sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159 } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/b0/69237e85976916b2e37586b7ddc48b9547fc38b440e25103d084b2b02ab3/jsonschema_path-0.3.3-py3-none-any.whl", hash = "sha256:203aff257f8038cd3c67be614fe6b2001043408cb1b4e36576bc4921e09d83c4", size = 14817 }, + { url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810 }, ] [[package]] @@ -2073,7 +2237,7 @@ wheels = [ [[package]] name = "kubernetes" -version = "31.0.0" +version = "32.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2088,9 +2252,9 @@ dependencies = [ { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "websocket-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/bd/ffcd3104155b467347cd9b3a64eb24182e459579845196b3a200569c8912/kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0", size = 916096 } +sdist = { url = "https://files.pythonhosted.org/packages/b7/e8/0598f0e8b4af37cd9b10d8b87386cf3173cb8045d834ab5f6ec347a758b3/kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28", size = 946691 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/a8/17f5e28cecdbd6d48127c22abdb794740803491f422a11905c4569d8e139/kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1", size = 1857013 }, + { url = "https://files.pythonhosted.org/packages/08/10/9f8af3e6f569685ce3af7faab51c8dd9d93b9c38eba339ca31c746119447/kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998", size = 1988070 }, ] [[package]] @@ -2240,7 +2404,7 @@ wheels = [ [[package]] name = "mistralai" -version = "1.3.1" +version = "1.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "eval-type-backport", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2250,21 +2414,21 @@ dependencies = [ { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-inspect", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/50/59669ee8d21fd27a4f887148b1efb19d9be5ed22ec19c8e6eb842407ac0f/mistralai-1.3.1.tar.gz", hash = "sha256:1c30385656393f993625943045ad20de2aff4c6ab30fc6e8c727d735c22b1c08", size = 133338 } +sdist = { url = "https://files.pythonhosted.org/packages/16/9d/aba193fdfe0fc7403efa380189143d965becfb1bc7df3230e5c7664f8c53/mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d", size = 131647 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/b4/a76b6942b78383d5499f776d880a166296542383f6f952feeef96d0ea692/mistralai-1.3.1-py3-none-any.whl", hash = "sha256:35e74feadf835b7d2145095114b9cf3ba86c4cf1044f28f49b02cd6ddd0a5733", size = 261271 }, + { url = "https://files.pythonhosted.org/packages/58/e7/7147c75c383a975c58c33f8e7ee7dbbb0e7390fbcb1ecd321f63e4c73efd/mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97", size = 271559 }, ] [[package]] name = "mistune" -version = "3.1.0" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/6e/96fc7cb3288666c5de2c396eb0e338dc95f7a8e4920e43e38783a22d0084/mistune-3.1.0.tar.gz", hash = "sha256:dbcac2f78292b9dc066cd03b7a3a26b62d85f8159f2ea5fd28e55df79908d667", size = 94401 } +sdist = { url = "https://files.pythonhosted.org/packages/80/f7/f6d06304c61c2a73213c0a4815280f70d985429cda26272f490e42119c1a/mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff", size = 94613 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/b3/743ffc3f59da380da504d84ccd1faf9a857a1445991ff19bf2ec754163c2/mistune-3.1.0-py3-none-any.whl", hash = "sha256:b05198cf6d671b3deba6c87ec6cf0d4eb7b72c524636eddb6dbf13823b52cee1", size = 53694 }, + { url = "https://files.pythonhosted.org/packages/12/92/30b4e54c4d7c48c06db61595cffbbf4f19588ea177896f9b78f0fbe021fd/mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319", size = 53696 }, ] [[package]] @@ -2292,74 +2456,74 @@ wheels = [ [[package]] name = "mmh3" -version = "5.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e2/08/04ad6419f072ea3f51f9a0f429dd30f5f0a0b02ead7ca11a831117b6f9e8/mmh3-5.0.1.tar.gz", hash = "sha256:7dab080061aeb31a6069a181f27c473a1f67933854e36a3464931f2716508896", size = 32008 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/b9/9a91b0a0e330557cdbf51fc43ca0ba306633f2ec6d2b15e871e288592a32/mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa", size = 52867 }, - { url = "https://files.pythonhosted.org/packages/da/28/6b37f0d6707872764e1af49f327b0940b6a3ad995d91b3839b90ba35f559/mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6", size = 38352 }, - { url = "https://files.pythonhosted.org/packages/76/84/a98f59a620b522f218876a0630b02fc345ecf078f6393595756ddb3aa0b5/mmh3-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a2583b5521ca49756d8d8bceba80627a9cc295f255dcab4e3df7ccc2f09679a", size = 38214 }, - { url = "https://files.pythonhosted.org/packages/35/cb/4980c7eb6cd31f49d1913a4066562bc9e0af28526750f1232be9688a9cd4/mmh3-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081a8423fe53c1ac94f87165f3e4c500125d343410c1a0c5f1703e898a3ef038", size = 93502 }, - { url = "https://files.pythonhosted.org/packages/65/f3/29726296fadeaf06134a6978f7c453dfa562cf2f0f1faf9ae28b9b8ef76e/mmh3-5.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b4d72713799755dc8954a7d36d5c20a6c8de7b233c82404d122c7c7c1707cc", size = 98394 }, - { url = "https://files.pythonhosted.org/packages/35/fd/e181f4f4b250f7b63ee27a7d65e5e290a3ea0e26cc633f4bfd906f04558b/mmh3-5.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389a6fd51efc76d3182d36ec306448559c1244f11227d2bb771bdd0e6cc91321", size = 98052 }, - { url = "https://files.pythonhosted.org/packages/61/5c/8a5d838da3eb3fb91035ef5eaaea469abab4e8e3fae55607c27a1a07d162/mmh3-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39f4128edaa074bff721b1d31a72508cba4d2887ee7867f22082e1fe9d4edea0", size = 86320 }, - { url = "https://files.pythonhosted.org/packages/10/80/3f33a8f4de12cea322607da1a84d001513affb741b3c3cc1277ecb85d34b/mmh3-5.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5d23a94d91aabba3386b3769048d5f4210fdfef80393fece2f34ba5a7b466c", size = 93232 }, - { url = "https://files.pythonhosted.org/packages/9e/1c/d0ce5f498493be4de2e7e7596e1cbf63315a4c0bb8bb94e3c37c4fad965d/mmh3-5.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16347d038361f8b8f24fd2b7ef378c9b68ddee9f7706e46269b6e0d322814713", size = 93590 }, - { url = "https://files.pythonhosted.org/packages/d9/66/770b5ad35b5a2eb7965f3fcaeaa76148e59543575d2e27b80690c1b0795c/mmh3-5.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e299408565af7d61f2d20a5ffdd77cf2ed902460fe4e6726839d59ba4b72316", size = 88433 }, - { url = "https://files.pythonhosted.org/packages/14/58/e0d258b18749d8640233976493716a40aa27352dcb1cea941836357dac24/mmh3-5.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42050af21ddfc5445ee5a66e73a8fc758c71790305e3ee9e4a85a8e69e810f94", size = 99339 }, - { url = "https://files.pythonhosted.org/packages/38/26/7267146122deb584cf377975b994d80c6d72c4c8d0e8eedff4d0cc5cd4c8/mmh3-5.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2ae9b1f5ef27ec54659920f0404b7ceb39966e28867c461bfe83a05e8d18ddb0", size = 93944 }, - { url = "https://files.pythonhosted.org/packages/8d/6b/df60b14a2dd383d8848f6f35496c86c7003be3ffb236789e98d002c542c6/mmh3-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50c2495a02045f3047d71d4ae9cdd7a15efc0bcbb7ff17a18346834a8e2d1d19", size = 92798 }, - { url = "https://files.pythonhosted.org/packages/0a/3f/d5fecf13915163a15b449e5cc89232a4df90e836ecad1c38121318119d27/mmh3-5.0.1-cp310-cp310-win32.whl", hash = "sha256:c028fa77cddf351ca13b4a56d43c1775652cde0764cadb39120b68f02a23ecf6", size = 39185 }, - { url = "https://files.pythonhosted.org/packages/74/8e/4bb5ade332a87de633cda21dae09d6002d69601f2b93e9f40302ab2d9acf/mmh3-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c5e741e421ec14400c4aae30890515c201f518403bdef29ae1e00d375bb4bbb5", size = 39766 }, - { url = "https://files.pythonhosted.org/packages/16/2b/cd5cfa4d7ad40a37655af491f9270909d63fc27bcf0558ec36000ee5347f/mmh3-5.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:b17156d56fabc73dbf41bca677ceb6faed435cc8544f6566d72ea77d8a17e9d0", size = 36540 }, - { url = "https://files.pythonhosted.org/packages/fb/8a/f3b9cf8b7110fef0f130158d7602af6f5b09f2cf568130814b7c92e2507b/mmh3-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a6d5a9b1b923f1643559ba1fc0bf7a5076c90cbb558878d3bf3641ce458f25d", size = 52867 }, - { url = "https://files.pythonhosted.org/packages/bf/06/f466e0da3c5bd6fbb1e047f70fd4e9e9563d0268aa56de511f363478dbf2/mmh3-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3349b968be555f7334bbcce839da98f50e1e80b1c615d8e2aa847ea4a964a012", size = 38349 }, - { url = "https://files.pythonhosted.org/packages/13/f0/2d3daca276a4673f82af859e4b0b18befd4e6e54f1017ba48ea9735b2f1b/mmh3-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bd3c94b110e55db02ab9b605029f48a2f7f677c6e58c09d44e42402d438b7e1", size = 38211 }, - { url = "https://files.pythonhosted.org/packages/e3/56/a2d203ca97702d4e045ac1a46a608393da1a1dddb24f81de664dae940518/mmh3-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ba84d48608f79adbb10bb09986b6dc33eeda5c2d1bd75d00820081b73bde9", size = 95104 }, - { url = "https://files.pythonhosted.org/packages/ec/45/c7c8ae64e3ae024776a0ce5377c16c6741a3359f3e9505fc35fc5012beb2/mmh3-5.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0217987a8b8525c8d9170f66d036dec4ab45cfbd53d47e8d76125791ceb155e", size = 100049 }, - { url = "https://files.pythonhosted.org/packages/d5/74/681113776fe406c09870ab2152ffbd214a15bbc8f1d1da9ad73ce594b878/mmh3-5.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2797063a34e78d1b61639a98b0edec1c856fa86ab80c7ec859f1796d10ba429", size = 99671 }, - { url = "https://files.pythonhosted.org/packages/bf/4f/dbb8be18ce9b6ff8df14bc14348c0404b3091fb51df9c673ebfcf5877db3/mmh3-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bba16340adcbd47853a2fbe5afdb397549e8f2e79324ff1dced69a3f8afe7c3", size = 87549 }, - { url = "https://files.pythonhosted.org/packages/5f/82/274d646f3f604c35b7e3d4eb7f3ff08b3bdc6a2c87d797709bb6f084a611/mmh3-5.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:282797957c9f60b51b9d768a602c25f579420cc9af46feb77d457a27823d270a", size = 94780 }, - { url = "https://files.pythonhosted.org/packages/c9/a1/f094ca8b8fb5e2ac53201070bda42b0fee80ceb92c153eb99a1453e3aed3/mmh3-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4fb670c29e63f954f9e7a2cdcd57b36a854c2538f579ef62681ccbaa1de2b69", size = 90430 }, - { url = "https://files.pythonhosted.org/packages/d9/23/4732ba68c6ef7242b69bb53b9e1bcb2ef065d68ed85fd26e829fb911ab5a/mmh3-5.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ee7d85438dc6aff328e19ab052086a3c29e8a9b632998a49e5c4b0034e9e8d6", size = 89451 }, - { url = "https://files.pythonhosted.org/packages/3c/c5/daea5d534fcf20b2399c2a7b1cd00a8d29d4d474247c15c2c94548a1a272/mmh3-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7fb5db231f3092444bc13901e6a8d299667126b00636ffbad4a7b45e1051e2f", size = 94703 }, - { url = "https://files.pythonhosted.org/packages/5e/4a/34d5691e7be7c63c34181387bc69bdcc0005ca93c8b562d68cb5775e0e78/mmh3-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c100dd441703da5ec136b1d9003ed4a041d8a1136234c9acd887499796df6ad8", size = 91054 }, - { url = "https://files.pythonhosted.org/packages/5c/3a/ab31bb5e9e1a19a4a997593cbe6ce56710308218ff36c7f76d40ff9c8d2e/mmh3-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71f3b765138260fd7a7a2dba0ea5727dabcd18c1f80323c9cfef97a7e86e01d0", size = 89571 }, - { url = "https://files.pythonhosted.org/packages/0b/79/b986bb067dbfcba6879afe6e723aad1bd53f223450532dd9a4606d0af389/mmh3-5.0.1-cp311-cp311-win32.whl", hash = "sha256:9a76518336247fd17689ce3ae5b16883fd86a490947d46a0193d47fb913e26e3", size = 39187 }, - { url = "https://files.pythonhosted.org/packages/48/69/97029eda3df0f84edde16a496a2e71bac508fc5d1f0a31e163da071e2670/mmh3-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:336bc4df2e44271f1c302d289cc3d78bd52d3eed8d306c7e4bff8361a12bf148", size = 39766 }, - { url = "https://files.pythonhosted.org/packages/c7/51/538f2b8412303281d8ce2a9a5c4ea84ff81f06de98af0b7c72059727a3bb/mmh3-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:af6522722fbbc5999aa66f7244d0986767a46f1fb05accc5200f75b72428a508", size = 36540 }, - { url = "https://files.pythonhosted.org/packages/75/c7/5b52d0882e7c0dccfaf8786a648e2b26c5307c594abe5cbe98c092607c97/mmh3-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f2730bb263ed9c388e8860438b057a53e3cc701134a6ea140f90443c4c11aa40", size = 52907 }, - { url = "https://files.pythonhosted.org/packages/01/b5/9609fa353c27188292748db033323c206f3fc6fbfa124bccf6a42af0da08/mmh3-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6246927bc293f6d56724536400b85fb85f5be26101fa77d5f97dd5e2a4c69bf2", size = 38389 }, - { url = "https://files.pythonhosted.org/packages/33/99/49bf3c86244857b3b250c2f54aff22a5a78ef12258af556fa39bb1e80699/mmh3-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbca322519a6e6e25b6abf43e940e1667cf8ea12510e07fb4919b48a0cd1c411", size = 38204 }, - { url = "https://files.pythonhosted.org/packages/f8/04/8860cab35b48aaefe40cf88344437e79ddc93cf7ff745dacd1cd56a2be1e/mmh3-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae8c19903ed8a1724ad9e67e86f15d198a7a1271a4f9be83d47e38f312ed672", size = 95091 }, - { url = "https://files.pythonhosted.org/packages/fa/e9/4ac56001a5bab6d26aa3dfabeddea6d7f037fd2972c76803259f51a5af75/mmh3-5.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09fd6cc72c07c0c07c3357714234b646d78052487c4a3bd5f7f6e08408cff60", size = 100055 }, - { url = "https://files.pythonhosted.org/packages/18/e8/7d5fd73f559c423ed5b72f940130c27803a406ee0ffc32ef5422f733df67/mmh3-5.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ff8551fee7ae3b11c5d986b6347ade0dccaadd4670ffdb2b944dee120ffcc84", size = 99764 }, - { url = "https://files.pythonhosted.org/packages/54/d8/c0d89da6c729feec997a9b3b68698894cef12359ade0da95eba9e03b1d5d/mmh3-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39694c73a5a20c8bf36dfd8676ed351e5234d55751ba4f7562d85449b21ef3f", size = 87650 }, - { url = "https://files.pythonhosted.org/packages/dd/41/ec0ee3fd5124c83cb767dcea8569bb326f8981cc88c991e3e4e948a31e24/mmh3-5.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba6001989a92f72a89c7cf382fda831678bd780707a66b4f8ca90239fdf2123", size = 94976 }, - { url = "https://files.pythonhosted.org/packages/8e/fa/e8059199fe6fbb2fd6494302904cb1209b2f8b6899d58059858a280e89a5/mmh3-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0771f90c9911811cc606a5c7b7b58f33501c9ee896ed68a6ac22c7d55878ecc0", size = 90485 }, - { url = "https://files.pythonhosted.org/packages/3a/a0/eb9da5f93dea3f44b8e970f013279d1543ab210ccf63bb030830968682aa/mmh3-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:09b31ed0c0c0920363e96641fac4efde65b1ab62b8df86293142f35a254e72b4", size = 89554 }, - { url = "https://files.pythonhosted.org/packages/e7/e8/5803181eac4e015b4caf307af22fea74292dca48e580d93afe402dcdc138/mmh3-5.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cf4a8deda0235312db12075331cb417c4ba163770edfe789bde71d08a24b692", size = 94872 }, - { url = "https://files.pythonhosted.org/packages/ed/f9/4d55063f9dcaed41524f078a85989efdf1d335159af5e70af29942ebae67/mmh3-5.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41f7090a95185ef20ac018581a99337f0cbc84a2135171ee3290a9c0d9519585", size = 91326 }, - { url = "https://files.pythonhosted.org/packages/80/75/0a5acab5291480acd939db80e94448ac937fc7fbfddc0a67b3e721ebfc9c/mmh3-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b97b5b368fb7ff22194ec5854f5b12d8de9ab67a0f304728c7f16e5d12135b76", size = 89810 }, - { url = "https://files.pythonhosted.org/packages/9b/fd/eb1a3573cda74d4c2381d10ded62c128e869954ced1881c15e2bcd97a48f/mmh3-5.0.1-cp312-cp312-win32.whl", hash = "sha256:842516acf04da546f94fad52db125ee619ccbdcada179da51c326a22c4578cb9", size = 39206 }, - { url = "https://files.pythonhosted.org/packages/66/e8/542ed252924002b84c43a68a080cfd4facbea0d5df361e4f59637638d3c7/mmh3-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:d963be0dbfd9fca209c17172f6110787ebf78934af25e3694fe2ba40e55c1e2b", size = 39799 }, - { url = "https://files.pythonhosted.org/packages/bd/25/ff2cd36c82a23afa57a05cdb52ab467a911fb12c055c8a8238c0d426cbf0/mmh3-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:a5da292ceeed8ce8e32b68847261a462d30fd7b478c3f55daae841404f433c15", size = 36537 }, - { url = "https://files.pythonhosted.org/packages/09/e0/fb19c46265c18311b422ba5ce3e18046ad45c48cfb213fd6dbec23ae6b51/mmh3-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:673e3f1c8d4231d6fb0271484ee34cb7146a6499fc0df80788adb56fd76842da", size = 52909 }, - { url = "https://files.pythonhosted.org/packages/c3/94/54fc591e7a24c7ce2c531ecfc5715cff932f9d320c2936550cc33d67304d/mmh3-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f795a306bd16a52ad578b663462cc8e95500b3925d64118ae63453485d67282b", size = 38396 }, - { url = "https://files.pythonhosted.org/packages/1f/9a/142bcc9d0d28fc8ae45bbfb83926adc069f984cdf3495a71534cc22b8e27/mmh3-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5ed57a5e28e502a1d60436cc25c76c3a5ba57545f250f2969af231dc1221e0a5", size = 38207 }, - { url = "https://files.pythonhosted.org/packages/f8/5b/f1c9110aa70321bb1ee713f17851b9534586c63bc25e0110e4fc03ae2450/mmh3-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632c28e7612e909dbb6cbe2fe496201ada4695b7715584005689c5dc038e59ad", size = 94988 }, - { url = "https://files.pythonhosted.org/packages/87/e5/4dc67e7e0e716c641ab0a5875a659e37258417439590feff5c3bd3ff4538/mmh3-5.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53fd6bd525a5985e391c43384672d9d6b317fcb36726447347c7fc75bfed34ec", size = 99969 }, - { url = "https://files.pythonhosted.org/packages/ac/68/d148327337687c53f04ad9ceaedfa9ad155ee0111d0cb06220f044d66720/mmh3-5.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dceacf6b0b961a0e499836af3aa62d60633265607aef551b2a3e3c48cdaa5edd", size = 99662 }, - { url = "https://files.pythonhosted.org/packages/13/79/782adb6df6397947c1097b1e94b7f8d95629a4a73df05cf7207bd5148c1f/mmh3-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0738d478fdfb5d920f6aff5452c78f2c35b0eff72caa2a97dfe38e82f93da2", size = 87606 }, - { url = "https://files.pythonhosted.org/packages/f2/c2/0404383281df049d0e4ccf07fabd659fc1f3da834df6708d934116cbf45d/mmh3-5.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e70285e7391ab88b872e5bef632bad16b9d99a6d3ca0590656a4753d55988af", size = 94836 }, - { url = "https://files.pythonhosted.org/packages/c8/33/fda67c5f28e4c2131891cf8cbc3513cfc55881e3cfe26e49328e38ffacb3/mmh3-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27e5fc6360aa6b828546a4318da1a7da6bf6e5474ccb053c3a6aa8ef19ff97bd", size = 90492 }, - { url = "https://files.pythonhosted.org/packages/64/2f/0ed38aefe2a87f30bb1b12e5b75dc69fcffdc16def40d1752d6fc7cbbf96/mmh3-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7989530c3c1e2c17bf5a0ec2bba09fd19819078ba90beedabb1c3885f5040b0d", size = 89594 }, - { url = "https://files.pythonhosted.org/packages/95/ab/6e7a5e765fc78e3dbd0a04a04cfdf72e91eb8e31976228e69d82c741a5b4/mmh3-5.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cdad7bee649950da7ecd3cbbbd12fb81f1161072ecbdb5acfa0018338c5cb9cf", size = 94929 }, - { url = "https://files.pythonhosted.org/packages/74/51/f748f00c072006f4a093d9b08853a0e2e3cd5aeaa91343d4e2d942851978/mmh3-5.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e143b8f184c1bb58cecd85ab4a4fd6dc65a2d71aee74157392c3fddac2a4a331", size = 91317 }, - { url = "https://files.pythonhosted.org/packages/df/a1/21ee8017a7feb0270c49f756ff56da9f99bd150dcfe3b3f6f0d4b243423d/mmh3-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5eb12e886f3646dd636f16b76eb23fc0c27e8ff3c1ae73d4391e50ef60b40f6", size = 89861 }, - { url = "https://files.pythonhosted.org/packages/c2/d2/46a6d070de4659bdf91cd6a62d659f8cc547dadee52b6d02bcbacb3262ed/mmh3-5.0.1-cp313-cp313-win32.whl", hash = "sha256:16e6dddfa98e1c2d021268e72c78951234186deb4df6630e984ac82df63d0a5d", size = 39201 }, - { url = "https://files.pythonhosted.org/packages/ed/07/316c062f09019b99b248a4183c5333f8eeebe638345484774908a8f2c9c0/mmh3-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d3ffb792d70b8c4a2382af3598dad6ae0c5bd9cee5b7ffcc99aa2f5fd2c1bf70", size = 39807 }, - { url = "https://files.pythonhosted.org/packages/9d/d3/f7e6d7d062b8d7072c3989a528d9d47486ee5d5ae75250f6e26b4976d098/mmh3-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:122fa9ec148383f9124292962bda745f192b47bfd470b2af5fe7bb3982b17896", size = 36539 }, +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1fc6888c74cbd8abad1292dde2ddfcf8fc059e114c97dd6bf16d12f36293/mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c", size = 33728 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/01/9d06468928661765c0fc248a29580c760a4a53a9c6c52cf72528bae3582e/mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec", size = 56095 }, + { url = "https://files.pythonhosted.org/packages/e4/d7/7b39307fc9db867b2a9a20c58b0de33b778dd6c55e116af8ea031f1433ba/mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a", size = 40512 }, + { url = "https://files.pythonhosted.org/packages/4f/85/728ca68280d8ccc60c113ad119df70ff1748fbd44c89911fed0501faf0b8/mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d", size = 40110 }, + { url = "https://files.pythonhosted.org/packages/e4/96/beaf0e301472ffa00358bbbf771fe2d9c4d709a2fe30b1d929e569f8cbdf/mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4", size = 100151 }, + { url = "https://files.pythonhosted.org/packages/c3/ee/9381f825c4e09ffafeffa213c3865c4bf7d39771640de33ab16f6faeb854/mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf", size = 106312 }, + { url = "https://files.pythonhosted.org/packages/67/dc/350a54bea5cf397d357534198ab8119cfd0d8e8bad623b520f9c290af985/mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0", size = 104232 }, + { url = "https://files.pythonhosted.org/packages/b2/5d/2c6eb4a4ec2f7293b98a9c07cb8c64668330b46ff2b6511244339e69a7af/mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01", size = 91663 }, + { url = "https://files.pythonhosted.org/packages/f1/ac/17030d24196f73ecbab8b5033591e5e0e2beca103181a843a135c78f4fee/mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150", size = 99166 }, + { url = "https://files.pythonhosted.org/packages/b9/ed/54ddc56603561a10b33da9b12e95a48a271d126f4a4951841bbd13145ebf/mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096", size = 101555 }, + { url = "https://files.pythonhosted.org/packages/1c/c3/33fb3a940c9b70908a5cc9fcc26534aff8698180f9f63ab6b7cc74da8bcd/mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb", size = 94813 }, + { url = "https://files.pythonhosted.org/packages/61/88/c9ff76a23abe34db8eee1a6fa4e449462a16c7eb547546fc5594b0860a72/mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c", size = 109611 }, + { url = "https://files.pythonhosted.org/packages/0b/8e/27d04f40e95554ebe782cac7bddda2d158cf3862387298c9c7b254fa7beb/mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732", size = 100515 }, + { url = "https://files.pythonhosted.org/packages/7b/00/504ca8f462f01048f3c87cd93f2e1f60b93dac2f930cd4ed73532a9337f5/mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce", size = 100177 }, + { url = "https://files.pythonhosted.org/packages/6f/1d/2efc3525fe6fdf8865972fcbb884bd1f4b0f923c19b80891cecf7e239fa5/mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182", size = 40815 }, + { url = "https://files.pythonhosted.org/packages/38/b5/c8fbe707cb0fea77a6d2d58d497bc9b67aff80deb84d20feb34d8fdd8671/mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf", size = 41479 }, + { url = "https://files.pythonhosted.org/packages/a1/f1/663e16134f913fccfbcea5b300fb7dc1860d8f63dc71867b013eebc10aec/mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26", size = 38883 }, + { url = "https://files.pythonhosted.org/packages/56/09/fda7af7fe65928262098382e3bf55950cfbf67d30bf9e47731bf862161e9/mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d", size = 56098 }, + { url = "https://files.pythonhosted.org/packages/0c/ab/84c7bc3f366d6f3bd8b5d9325a10c367685bc17c26dac4c068e2001a4671/mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7", size = 40513 }, + { url = "https://files.pythonhosted.org/packages/4f/21/25ea58ca4a652bdc83d1528bec31745cce35802381fb4fe3c097905462d2/mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1", size = 40112 }, + { url = "https://files.pythonhosted.org/packages/bd/78/4f12f16ae074ddda6f06745254fdb50f8cf3c85b0bbf7eaca58bed84bf58/mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894", size = 102632 }, + { url = "https://files.pythonhosted.org/packages/48/11/8f09dc999cf2a09b6138d8d7fc734efb7b7bfdd9adb9383380941caadff0/mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a", size = 108884 }, + { url = "https://files.pythonhosted.org/packages/bd/91/e59a66538a3364176f6c3f7620eee0ab195bfe26f89a95cbcc7a1fb04b28/mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769", size = 106835 }, + { url = "https://files.pythonhosted.org/packages/25/14/b85836e21ab90e5cddb85fe79c494ebd8f81d96a87a664c488cc9277668b/mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2", size = 93688 }, + { url = "https://files.pythonhosted.org/packages/ac/aa/8bc964067df9262740c95e4cde2d19f149f2224f426654e14199a9e47df6/mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a", size = 101569 }, + { url = "https://files.pythonhosted.org/packages/70/b6/1fb163cbf919046a64717466c00edabebece3f95c013853fec76dbf2df92/mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3", size = 98483 }, + { url = "https://files.pythonhosted.org/packages/70/49/ba64c050dd646060f835f1db6b2cd60a6485f3b0ea04976e7a29ace7312e/mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33", size = 96496 }, + { url = "https://files.pythonhosted.org/packages/9e/07/f2751d6a0b535bb865e1066e9c6b80852571ef8d61bce7eb44c18720fbfc/mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7", size = 105109 }, + { url = "https://files.pythonhosted.org/packages/b7/02/30360a5a66f7abba44596d747cc1e6fb53136b168eaa335f63454ab7bb79/mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a", size = 98231 }, + { url = "https://files.pythonhosted.org/packages/8c/60/8526b0c750ff4d7ae1266e68b795f14b97758a1d9fcc19f6ecabf9c55656/mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258", size = 97548 }, + { url = "https://files.pythonhosted.org/packages/6d/4c/26e1222aca65769280d5427a1ce5875ef4213449718c8f03958d0bf91070/mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372", size = 40810 }, + { url = "https://files.pythonhosted.org/packages/98/d5/424ba95062d1212ea615dc8debc8d57983f2242d5e6b82e458b89a117a1e/mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759", size = 41476 }, + { url = "https://files.pythonhosted.org/packages/bd/08/0315ccaf087ba55bb19a6dd3b1e8acd491e74ce7f5f9c4aaa06a90d66441/mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1", size = 38880 }, + { url = "https://files.pythonhosted.org/packages/f4/47/e5f452bdf16028bfd2edb4e2e35d0441e4a4740f30e68ccd4cfd2fb2c57e/mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d", size = 56152 }, + { url = "https://files.pythonhosted.org/packages/60/38/2132d537dc7a7fdd8d2e98df90186c7fcdbd3f14f95502a24ba443c92245/mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae", size = 40564 }, + { url = "https://files.pythonhosted.org/packages/c0/2a/c52cf000581bfb8d94794f58865658e7accf2fa2e90789269d4ae9560b16/mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322", size = 40104 }, + { url = "https://files.pythonhosted.org/packages/83/33/30d163ce538c54fc98258db5621447e3ab208d133cece5d2577cf913e708/mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00", size = 102634 }, + { url = "https://files.pythonhosted.org/packages/94/5c/5a18acb6ecc6852be2d215c3d811aa61d7e425ab6596be940877355d7f3e/mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06", size = 108888 }, + { url = "https://files.pythonhosted.org/packages/1f/f6/11c556324c64a92aa12f28e221a727b6e082e426dc502e81f77056f6fc98/mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968", size = 106968 }, + { url = "https://files.pythonhosted.org/packages/5d/61/ca0c196a685aba7808a5c00246f17b988a9c4f55c594ee0a02c273e404f3/mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83", size = 93771 }, + { url = "https://files.pythonhosted.org/packages/b4/55/0927c33528710085ee77b808d85bbbafdb91a1db7c8eaa89cac16d6c513e/mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd", size = 101726 }, + { url = "https://files.pythonhosted.org/packages/49/39/a92c60329fa470f41c18614a93c6cd88821412a12ee78c71c3f77e1cfc2d/mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559", size = 98523 }, + { url = "https://files.pythonhosted.org/packages/81/90/26adb15345af8d9cf433ae1b6adcf12e0a4cad1e692de4fa9f8e8536c5ae/mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63", size = 96628 }, + { url = "https://files.pythonhosted.org/packages/8a/4d/340d1e340df972a13fd4ec84c787367f425371720a1044220869c82364e9/mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3", size = 105190 }, + { url = "https://files.pythonhosted.org/packages/d3/7c/65047d1cccd3782d809936db446430fc7758bda9def5b0979887e08302a2/mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b", size = 98439 }, + { url = "https://files.pythonhosted.org/packages/72/d2/3c259d43097c30f062050f7e861075099404e8886b5d4dd3cebf180d6e02/mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df", size = 97780 }, + { url = "https://files.pythonhosted.org/packages/29/29/831ea8d4abe96cdb3e28b79eab49cac7f04f9c6b6e36bfc686197ddba09d/mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76", size = 40835 }, + { url = "https://files.pythonhosted.org/packages/12/dd/7cbc30153b73f08eeac43804c1dbc770538a01979b4094edbe1a4b8eb551/mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776", size = 41509 }, + { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888 }, + { url = "https://files.pythonhosted.org/packages/05/06/a098a42870db16c0a54a82c56a5bdc873de3165218cd5b3ca59dbc0d31a7/mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c", size = 56165 }, + { url = "https://files.pythonhosted.org/packages/5a/65/eaada79a67fde1f43e1156d9630e2fb70655e1d3f4e8f33d7ffa31eeacfd/mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40", size = 40569 }, + { url = "https://files.pythonhosted.org/packages/36/7e/2b6c43ed48be583acd68e34d16f19209a9f210e4669421b0321e326d8554/mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997", size = 40104 }, + { url = "https://files.pythonhosted.org/packages/11/2b/1f9e962fdde8e41b0f43d22c8ba719588de8952f9376df7d73a434827590/mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd", size = 102497 }, + { url = "https://files.pythonhosted.org/packages/46/94/d6c5c3465387ba077cccdc028ab3eec0d86eed1eebe60dcf4d15294056be/mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a", size = 108834 }, + { url = "https://files.pythonhosted.org/packages/34/1e/92c212bb81796b69dddfd50a8a8f4b26ab0d38fdaf1d3e8628a67850543b/mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676", size = 106936 }, + { url = "https://files.pythonhosted.org/packages/f4/41/f2f494bbff3aad5ffd2085506255049de76cde51ddac84058e32768acc79/mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb", size = 93709 }, + { url = "https://files.pythonhosted.org/packages/9e/a9/a2cc4a756d73d9edf4fb85c76e16fd56b0300f8120fd760c76b28f457730/mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6", size = 101623 }, + { url = "https://files.pythonhosted.org/packages/5e/6f/b9d735533b6a56b2d56333ff89be6a55ac08ba7ff33465feb131992e33eb/mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4", size = 98521 }, + { url = "https://files.pythonhosted.org/packages/99/47/dff2b54fac0d421c1e6ecbd2d9c85b2d0e6f6ee0d10b115d9364116a511e/mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2", size = 96696 }, + { url = "https://files.pythonhosted.org/packages/be/43/9e205310f47c43ddf1575bb3a1769c36688f30f1ac105e0f0c878a29d2cd/mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b", size = 105234 }, + { url = "https://files.pythonhosted.org/packages/6b/44/90b11fd2b67dcb513f5bfe9b476eb6ca2d5a221c79b49884dc859100905e/mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107", size = 98449 }, + { url = "https://files.pythonhosted.org/packages/f0/d0/25c4b0c7b8e49836541059b28e034a4cccd0936202800d43a1cc48495ecb/mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59", size = 97796 }, + { url = "https://files.pythonhosted.org/packages/23/fa/cbbb7fcd0e287a715f1cd28a10de94c0535bd94164e38b852abc18da28c6/mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692", size = 40828 }, + { url = "https://files.pythonhosted.org/packages/09/33/9fb90ef822f7b734955a63851907cf72f8a3f9d8eb3c5706bfa6772a2a77/mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f", size = 41504 }, + { url = "https://files.pythonhosted.org/packages/16/71/4ad9a42f2772793a03cb698f0fc42499f04e6e8d2560ba2f7da0fb059a8e/mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7", size = 38890 }, ] [[package]] @@ -2382,14 +2546,14 @@ wheels = [ [[package]] name = "motor" -version = "3.6.1" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymongo", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/5d/be1f10b4ecc259503bcc9c5642a61b39715796343c771f3f61b84f79ee21/motor-3.6.1.tar.gz", hash = "sha256:ee2b18386292f9ceb3cc8279a4cd34e4c641c5ac8de3500c30374081c76a9d03", size = 279031 } +sdist = { url = "https://files.pythonhosted.org/packages/2b/c0/b94558a88fb8406b092bb180c6fa5fb3068f8ec2c7e84dd2b0625f4f4f6e/motor-3.7.0.tar.gz", hash = "sha256:0dfa1f12c812bd90819c519b78bed626b5a9dbb29bba079ccff2bfa8627e0fec", size = 279745 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/aa/a255c74c702477a8610fa24fb83af45ce1a2dd0bce7bca4e2230e2c9a23b/motor-3.6.1-py3-none-any.whl", hash = "sha256:7fe552353aded4fa9f05ae515a179df5b1d192b1da56726f422dbb2d8c3b5962", size = 74813 }, + { url = "https://files.pythonhosted.org/packages/ab/a6/e915e3225cc431c7ff07fd3e5ae138f6eb1c3ef4f8e8356cab1ea5dc1ed5/motor-3.7.0-py3-none-any.whl", hash = "sha256:61bdf1afded179f008d423f98066348157686f25a90776ea155db5f47f57d605", size = 74811 }, ] [[package]] @@ -2502,40 +2666,40 @@ wheels = [ [[package]] name = "mypy" -version = "1.14.1" +version = "1.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 }, - { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 }, - { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 }, - { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 }, - { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 }, - { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 }, - { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 }, - { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 }, - { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 }, - { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 }, - { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 }, - { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 }, - { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 }, - { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 }, - { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 }, - { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 }, - { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 }, - { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 }, - { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097 }, - { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728 }, - { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965 }, - { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660 }, - { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198 }, - { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276 }, - { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 }, +sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433 }, + { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472 }, + { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424 }, + { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450 }, + { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765 }, + { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701 }, + { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338 }, + { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540 }, + { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051 }, + { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751 }, + { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783 }, + { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618 }, + { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, + { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, + { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, + { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, + { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, + { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, + { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, + { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, + { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, + { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, + { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, + { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, + { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, ] [[package]] @@ -2564,7 +2728,7 @@ wheels = [ [[package]] name = "nbconvert" -version = "7.16.5" +version = "7.16.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2582,9 +2746,9 @@ dependencies = [ { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/2c/d026c0367f2be2463d4c2f5b538e28add2bc67bc13730abb7f364ae4eb8b/nbconvert-7.16.5.tar.gz", hash = "sha256:c83467bb5777fdfaac5ebbb8e864f300b277f68692ecc04d6dab72f2d8442344", size = 856367 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/9e/2dcc9fe00cf55d95a8deae69384e9cea61816126e345754f6c75494d32ec/nbconvert-7.16.5-py3-none-any.whl", hash = "sha256:e12eac052d6fd03040af4166c563d76e7aeead2e9aadf5356db552a1784bd547", size = 258061 }, + { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525 }, ] [[package]] @@ -2631,64 +2795,34 @@ wheels = [ [[package]] name = "numpy" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/fdbf6a7871703df6160b5cf3dd774074b086d278172285c52c2758b76305/numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918", size = 20227662 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/c4/5588367dc9f91e1a813beb77de46ea8cab13f778e1b3a0e661ab031aba44/numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440", size = 21213214 }, - { url = "https://files.pythonhosted.org/packages/d8/8b/32dd9f08419023a4cf856c5ad0b4eba9b830da85eafdef841a104c4fc05a/numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab", size = 14352248 }, - { url = "https://files.pythonhosted.org/packages/84/2d/0e895d02940ba6e12389f0ab5cac5afcf8dc2dc0ade4e8cad33288a721bd/numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675", size = 5391007 }, - { url = "https://files.pythonhosted.org/packages/11/b9/7f1e64a0d46d9c2af6d17966f641fb12d5b8ea3003f31b2308f3e3b9a6aa/numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308", size = 6926174 }, - { url = "https://files.pythonhosted.org/packages/2e/8c/043fa4418bc9364e364ab7aba8ff6ef5f6b9171ade22de8fbcf0e2fa4165/numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957", size = 14330914 }, - { url = "https://files.pythonhosted.org/packages/f7/b6/d8110985501ca8912dfc1c3bbef99d66e62d487f72e46b2337494df77364/numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf", size = 16379607 }, - { url = "https://files.pythonhosted.org/packages/e2/57/bdca9fb8bdaa810c3a4ff2eb3231379b77f618a7c0d24be9f7070db50775/numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2", size = 15541760 }, - { url = "https://files.pythonhosted.org/packages/97/55/3b9147b3cbc3b6b1abc2a411dec5337a46c873deca0dd0bf5bef9d0579cc/numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528", size = 18168476 }, - { url = "https://files.pythonhosted.org/packages/00/e7/7c2cde16c9b87a8e14fdd262ca7849c4681cf48c8a774505f7e6f5e3b643/numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95", size = 6570985 }, - { url = "https://files.pythonhosted.org/packages/a1/a8/554b0e99fc4ac11ec481254781a10da180d0559c2ebf2c324232317349ee/numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf", size = 12913384 }, - { url = "https://files.pythonhosted.org/packages/59/14/645887347124e101d983e1daf95b48dc3e136bf8525cb4257bf9eab1b768/numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484", size = 21217379 }, - { url = "https://files.pythonhosted.org/packages/9f/fd/2279000cf29f58ccfd3778cbf4670dfe3f7ce772df5e198c5abe9e88b7d7/numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7", size = 14388520 }, - { url = "https://files.pythonhosted.org/packages/58/b0/034eb5d5ba12d66ab658ff3455a31f20add0b78df8203c6a7451bd1bee21/numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb", size = 5389286 }, - { url = "https://files.pythonhosted.org/packages/5d/69/6f3cccde92e82e7835fdb475c2bf439761cbf8a1daa7c07338e1e132dfec/numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5", size = 6930345 }, - { url = "https://files.pythonhosted.org/packages/d1/72/1cd38e91ab563e67f584293fcc6aca855c9ae46dba42e6b5ff4600022899/numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73", size = 14335748 }, - { url = "https://files.pythonhosted.org/packages/f2/d4/f999444e86986f3533e7151c272bd8186c55dda554284def18557e013a2a/numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591", size = 16391057 }, - { url = "https://files.pythonhosted.org/packages/99/7b/85cef6a3ae1b19542b7afd97d0b296526b6ef9e3c43ea0c4d9c4404fb2d0/numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8", size = 15556943 }, - { url = "https://files.pythonhosted.org/packages/69/7e/b83cc884c3508e91af78760f6b17ab46ad649831b1fa35acb3eb26d9e6d2/numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0", size = 18180785 }, - { url = "https://files.pythonhosted.org/packages/b2/9f/eb4a9a38867de059dcd4b6e18d47c3867fbd3795d4c9557bb49278f94087/numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd", size = 6568983 }, - { url = "https://files.pythonhosted.org/packages/6d/1e/be3b9f3073da2f8c7fa361fcdc231b548266b0781029fdbaf75eeab997fd/numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16", size = 12917260 }, - { url = "https://files.pythonhosted.org/packages/62/12/b928871c570d4a87ab13d2cc19f8817f17e340d5481621930e76b80ffb7d/numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab", size = 20909861 }, - { url = "https://files.pythonhosted.org/packages/3d/c3/59df91ae1d8ad7c5e03efd63fd785dec62d96b0fe56d1f9ab600b55009af/numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa", size = 14095776 }, - { url = "https://files.pythonhosted.org/packages/af/4e/8ed5868efc8e601fb69419644a280e9c482b75691466b73bfaab7d86922c/numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315", size = 5126239 }, - { url = "https://files.pythonhosted.org/packages/1a/74/dd0bbe650d7bc0014b051f092f2de65e34a8155aabb1287698919d124d7f/numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355", size = 6659296 }, - { url = "https://files.pythonhosted.org/packages/7f/11/4ebd7a3f4a655764dc98481f97bd0a662fb340d1001be6050606be13e162/numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7", size = 14047121 }, - { url = "https://files.pythonhosted.org/packages/7f/a7/c1f1d978166eb6b98ad009503e4d93a8c1962d0eb14a885c352ee0276a54/numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d", size = 16096599 }, - { url = "https://files.pythonhosted.org/packages/3d/6d/0e22afd5fcbb4d8d0091f3f46bf4e8906399c458d4293da23292c0ba5022/numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51", size = 15243932 }, - { url = "https://files.pythonhosted.org/packages/03/39/e4e5832820131ba424092b9610d996b37e5557180f8e2d6aebb05c31ae54/numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046", size = 17861032 }, - { url = "https://files.pythonhosted.org/packages/5f/8a/3794313acbf5e70df2d5c7d2aba8718676f8d054a05abe59e48417fb2981/numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2", size = 6274018 }, - { url = "https://files.pythonhosted.org/packages/17/c1/c31d3637f2641e25c7a19adf2ae822fdaf4ddd198b05d79a92a9ce7cb63e/numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8", size = 12613843 }, - { url = "https://files.pythonhosted.org/packages/20/d6/91a26e671c396e0c10e327b763485ee295f5a5a7a48c553f18417e5a0ed5/numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780", size = 20896464 }, - { url = "https://files.pythonhosted.org/packages/8c/40/5792ccccd91d45e87d9e00033abc4f6ca8a828467b193f711139ff1f1cd9/numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821", size = 14111350 }, - { url = "https://files.pythonhosted.org/packages/c0/2a/fb0a27f846cb857cef0c4c92bef89f133a3a1abb4e16bba1c4dace2e9b49/numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e", size = 5111629 }, - { url = "https://files.pythonhosted.org/packages/eb/e5/8e81bb9d84db88b047baf4e8b681a3e48d6390bc4d4e4453eca428ecbb49/numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348", size = 6645865 }, - { url = "https://files.pythonhosted.org/packages/7a/1a/a90ceb191dd2f9e2897c69dde93ccc2d57dd21ce2acbd7b0333e8eea4e8d/numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59", size = 14043508 }, - { url = "https://files.pythonhosted.org/packages/f1/5a/e572284c86a59dec0871a49cd4e5351e20b9c751399d5f1d79628c0542cb/numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af", size = 16094100 }, - { url = "https://files.pythonhosted.org/packages/0c/2c/a79d24f364788386d85899dd280a94f30b0950be4b4a545f4fa4ed1d4ca7/numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51", size = 15239691 }, - { url = "https://files.pythonhosted.org/packages/cf/79/1e20fd1c9ce5a932111f964b544facc5bb9bde7865f5b42f00b4a6a9192b/numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716", size = 17856571 }, - { url = "https://files.pythonhosted.org/packages/be/5b/cc155e107f75d694f562bdc84a26cc930569f3dfdfbccb3420b626065777/numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e", size = 6270841 }, - { url = "https://files.pythonhosted.org/packages/44/be/0e5cd009d2162e4138d79a5afb3b5d2341f0fe4777ab6e675aa3d4a42e21/numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60", size = 12606618 }, - { url = "https://files.pythonhosted.org/packages/a8/87/04ddf02dd86fb17c7485a5f87b605c4437966d53de1e3745d450343a6f56/numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e", size = 20921004 }, - { url = "https://files.pythonhosted.org/packages/6e/3e/d0e9e32ab14005425d180ef950badf31b862f3839c5b927796648b11f88a/numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712", size = 14119910 }, - { url = "https://files.pythonhosted.org/packages/b5/5b/aa2d1905b04a8fb681e08742bb79a7bddfc160c7ce8e1ff6d5c821be0236/numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008", size = 5153612 }, - { url = "https://files.pythonhosted.org/packages/ce/35/6831808028df0648d9b43c5df7e1051129aa0d562525bacb70019c5f5030/numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84", size = 6668401 }, - { url = "https://files.pythonhosted.org/packages/b1/38/10ef509ad63a5946cc042f98d838daebfe7eaf45b9daaf13df2086b15ff9/numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631", size = 14014198 }, - { url = "https://files.pythonhosted.org/packages/df/f8/c80968ae01df23e249ee0a4487fae55a4c0fe2f838dfe9cc907aa8aea0fa/numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d", size = 16076211 }, - { url = "https://files.pythonhosted.org/packages/09/69/05c169376016a0b614b432967ac46ff14269eaffab80040ec03ae1ae8e2c/numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5", size = 15220266 }, - { url = "https://files.pythonhosted.org/packages/f1/ff/94a4ce67ea909f41cf7ea712aebbe832dc67decad22944a1020bb398a5ee/numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71", size = 17852844 }, - { url = "https://files.pythonhosted.org/packages/46/72/8a5dbce4020dfc595592333ef2fbb0a187d084ca243b67766d29d03e0096/numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2", size = 6326007 }, - { url = "https://files.pythonhosted.org/packages/7b/9c/4fce9cf39dde2562584e4cfd351a0140240f82c0e3569ce25a250f47037d/numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268", size = 12693107 }, - { url = "https://files.pythonhosted.org/packages/f1/65/d36a76b811ffe0a4515e290cb05cb0e22171b1b0f0db6bee9141cf023545/numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3", size = 21044672 }, - { url = "https://files.pythonhosted.org/packages/aa/3f/b644199f165063154df486d95198d814578f13dd4d8c1651e075bf1cb8af/numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964", size = 6789873 }, - { url = "https://files.pythonhosted.org/packages/d7/df/2adb0bb98a3cbe8a6c3c6d1019aede1f1d8b83927ced228a46cc56c7a206/numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800", size = 16194933 }, - { url = "https://files.pythonhosted.org/packages/13/3e/1959d5219a9e6d200638d924cedda6a606392f7186a4ed56478252e70d55/numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e", size = 12820057 }, +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, + { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, + { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, + { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, + { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, + { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, + { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, ] [[package]] @@ -2777,6 +2911,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/f7/97a9ea26ed4bbbfc2d470994b8b4f338ef663be97b8f677519ac195e113d/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1", size = 207454763 }, ] +[[package]] +name = "nvidia-cusparselt-cu12" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/a8/bcbb63b53a4b1234feeafb65544ee55495e1bb37ec31b999b963cbccfd1d/nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9", size = 150057751 }, +] + [[package]] name = "nvidia-nccl-cu12" version = "2.21.5" @@ -2812,15 +2954,15 @@ wheels = [ [[package]] name = "ollama" -version = "0.4.6" +version = "0.4.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/d6/2bd7cffbabc81282576051ebf66ebfaa97e6b541975cd4e886bfd6c0f83d/ollama-0.4.6.tar.gz", hash = "sha256:b00717651c829f96094ed4231b9f0d87e33cc92dc235aca50aeb5a2a4e6e95b7", size = 12710 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/6d/dc77539c735bbed5d0c873fb029fb86aa9f0163df169b34152914331c369/ollama-0.4.7.tar.gz", hash = "sha256:891dcbe54f55397d82d289c459de0ea897e103b86a3f1fad0fdb1895922a75ff", size = 12843 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/60/ac0e47c4c400fbd1a72a3c6e4a76cf5ef859d60677e7c4b9f0203c5657d3/ollama-0.4.6-py3-none-any.whl", hash = "sha256:cbb4ebe009e10dd12bdd82508ab415fd131945e185753d728a7747c9ebe762e9", size = 13086 }, + { url = "https://files.pythonhosted.org/packages/31/83/c3ffac86906c10184c88c2e916460806b072a2cfe34cdcaf3a0c0e836d39/ollama-0.4.7-py3-none-any.whl", hash = "sha256:85505663cca67a83707be5fb3aeff0ea72e67846cea5985529d8eca4366564a1", size = 13210 }, ] [[package]] @@ -2861,32 +3003,36 @@ wheels = [ [[package]] name = "onnxruntime-genai" -version = "0.5.2" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "onnxruntime", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "onnxruntime", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/52/35/22a421f852eb14f47c33a4dd4c3ef58a2f3d5a96be8bb6d6cc271b2a0e83/onnxruntime_genai-0.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cd322ead0027fbfa309e7be76c4512157ad369dc189ab3334a58a199b4f58a02", size = 769921 }, - { url = "https://files.pythonhosted.org/packages/7f/1b/5166ed4a73c5e9f92e6db4d7838923ffd595cea164661fae20d82e3a6966/onnxruntime_genai-0.5.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:79d721a33e80a9664aeeb87c0ceec75801fc81e48e8ff7940e3658d0b28f25cc", size = 869111 }, - { url = "https://files.pythonhosted.org/packages/12/5b/6f08f9435f0c3977046cb4292ab1e836c22cd7d56fc87ace4d2a90dfb828/onnxruntime_genai-0.5.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd7954f9dc829e69dabd7f676443529ac18171ec8077438c16364d381733070e", size = 1380370 }, - { url = "https://files.pythonhosted.org/packages/57/d6/91e486424f924c2a99e8f1bd201180979101ecc09bee1ca7f53dae1c8a38/onnxruntime_genai-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:4d2968df6d8064664a5f095006c70520f4ca689204b695e88951f088477bc1e0", size = 776263 }, - { url = "https://files.pythonhosted.org/packages/3e/3d/e2d8f89c05c6cf35e2ade2b335b1b97725327591b8fb141d266ab98615f9/onnxruntime_genai-0.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:217c748f8ebd1a5082e1ad8ee8fc90fc1a4e9ce7839189f4c2c2545d1390af15", size = 769888 }, - { url = "https://files.pythonhosted.org/packages/33/13/66ffa143cc82f8352ec87ba0501bc21e05dd9e84fbbad530e74a705ac911/onnxruntime_genai-0.5.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6194aabd589b3ffb571b325f504266ac47c33c434abfd87575c30d7a3e1179c9", size = 869092 }, - { url = "https://files.pythonhosted.org/packages/6a/17/a29c0cf89d90374234b8e510fcb970f2e043b42689b5ea23cbdab5a414b6/onnxruntime_genai-0.5.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:88edb36c9e2d670316f1e6e4ce27a86f212648a92053a94a31f88b1f4d6c0935", size = 1380461 }, - { url = "https://files.pythonhosted.org/packages/59/b1/acb1daf1a08c8098c828e7ea9e187b9728a8fc151a4df4911f988c08a874/onnxruntime_genai-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:09b697f955616156948f21366d13d02884a15521926f68a259722d9fa4437db4", size = 776308 }, - { url = "https://files.pythonhosted.org/packages/22/57/d249827c3e37abe528674bfa97de4c61b18afb452d2afced690a745e0866/onnxruntime_genai-0.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:893be15d2113438e60b8a1c0095892e0fd4f2b01dd470d6197337db2a5778c88", size = 751552 }, - { url = "https://files.pythonhosted.org/packages/cf/72/259de19e93e72b14d0a3910f1025f71da006a8dfc76c97792646b335a8a3/onnxruntime_genai-0.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:6b438d7f4901081b8f3ff99db6c6ea15a3fcc107abce79859ff635e1278e26b0", size = 771097 }, - { url = "https://files.pythonhosted.org/packages/8c/72/73c95e357ada258025236437fb2b4d56fb7e8594db6361f4560ea97ca06c/onnxruntime_genai-0.5.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:d7bffb799d44656b2615fc43130a1a287d57e8893b80523e560924cf05770f1d", size = 871450 }, - { url = "https://files.pythonhosted.org/packages/79/3d/43211c8a66d7ce54dea137ad7bec30767e3f2dc5e1e22befdcca290ebbe0/onnxruntime_genai-0.5.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb5b9650512e21a71d965e582d616b33df07978b0c3ecbd5bef0912a7b5f7832", size = 1380898 }, - { url = "https://files.pythonhosted.org/packages/9f/7b/53b217ed0db401877fafa2f63d2ce7de754899f2bdf4cb415931e2019f18/onnxruntime_genai-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:422e9af27f182247378e9423f5745becfaffcdf7a4f452da17fd5d9390770ca7", size = 776974 }, - { url = "https://files.pythonhosted.org/packages/08/c1/a69aeba29f40febd8d70d45044d4eb97905beb37fc8491b1628c8714ecc1/onnxruntime_genai-0.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:315b23cb04749202c9cc3eb34f281bb4943de477a5aa46c99b940603b6a5d272", size = 751246 }, + { url = "https://files.pythonhosted.org/packages/5f/7f/3e1edde3318458aabdd6070c44bedc2caa913949530d90ec89c32c76a036/onnxruntime_genai-0.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b820e20e438fc2679db24e432c5652e20a972709e4002210a46b4f6282fd57d4", size = 871347 }, + { url = "https://files.pythonhosted.org/packages/62/9e/695c96d4023c1a826f64a61fd3b7e11c6d1059df04baeea99cd9695afb0c/onnxruntime_genai-0.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:02cd58bd85f2cc3d9a017af095dee2b3d7cd2be3b5c1701ebc6fc5204e6dffef", size = 986678 }, + { url = "https://files.pythonhosted.org/packages/b8/15/a62e1096413d17c24fac161f15002b94406be4b6b17663c576742eb15b44/onnxruntime_genai-0.6.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22342e7262bcdc2337e0a3c1125ec7d42e8cf18b2eca5e5debd17d17f0e6154d", size = 1523539 }, + { url = "https://files.pythonhosted.org/packages/da/bd/f638c512712f776ec536878905dd4316ae94bd1b9f29b3511d9cc00ebf9b/onnxruntime_genai-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a7ceef0afdd605d2816cea2b54bab629cfd5f8005800c09db4d213f2c32be43", size = 866056 }, + { url = "https://files.pythonhosted.org/packages/28/b5/60c3128cba49884f64fa8f77bc75560e14b0092af15915a4fec5983d22e2/onnxruntime_genai-0.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:55e74c544573e78cd86b9bc440a12cb0e1442978a0446dfc438b3e4d6c40f498", size = 871291 }, + { url = "https://files.pythonhosted.org/packages/1d/7d/a26c1a6517b6d58131d8068b557702d1227699e0ddc77c5f8d3ce3e58afb/onnxruntime_genai-0.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4a7afcc99657bcbf66ee8a33e9cf7b4540edef01480c7d7267b4c8f086bcf1ee", size = 986693 }, + { url = "https://files.pythonhosted.org/packages/9d/d3/f43608d2b64cc0122f5eea4e3c3efb1a4d82ae69cca6a8e17a7b10e77752/onnxruntime_genai-0.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f250e986af81d014305037a485c57388dbab680bb08624ddb938b5df1fbb011d", size = 1523215 }, + { url = "https://files.pythonhosted.org/packages/96/ca/941cf8ff6549097e0e0395e933160fdcb13445f0040122295951b70aa60c/onnxruntime_genai-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9d32192ac9012864629066d0ce8a4ff6326557201ca5f4d3238bffa06580f313", size = 866007 }, + { url = "https://files.pythonhosted.org/packages/55/13/15cfbd1c5d1163f25894b5f959f698f72d54be5da31efcee0210973069f0/onnxruntime_genai-0.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:04a10f74a912809de859104f27555c1bfd88936314adf1dae1c107f7fdd4e557", size = 839485 }, + { url = "https://files.pythonhosted.org/packages/bf/20/c88f2ea675a37854fef5986201599c2d910195aff93644678e12fecc1991/onnxruntime_genai-0.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ac2caaafe79e664fcd105a61051d46755aa9242a08485c065772bcb8a7436819", size = 873021 }, + { url = "https://files.pythonhosted.org/packages/ac/ac/ee559e8250dc039a5b63ad294894f110d9db53646a3d8a5bf9e4733bd130/onnxruntime_genai-0.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:01ce0b0fc068d5b33166c45a7b554f4ffd3774357ace53e1fabf6476468eb45a", size = 989275 }, + { url = "https://files.pythonhosted.org/packages/89/4f/a98cb43eb90dbdb31abc9b97ec0f903f70b0164c696ed013bb867ab7a0f4/onnxruntime_genai-0.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecdc72e2f9ec6386c08390a55eb9d9d0fe4b0b5d5d042cbeac34c666e28d0a3f", size = 1524001 }, + { url = "https://files.pythonhosted.org/packages/fe/24/357a31a821b706e340a8ac1bac8c3d9a27fdc53ae6c41e1cdbfb16055aa7/onnxruntime_genai-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:5039138c7510ca84acb879f1da943709dc37ed2364b5cf280fec52eb94d8e137", size = 866519 }, + { url = "https://files.pythonhosted.org/packages/b7/77/bcb9f3c7e95945fc457238548c0b8fe656c54ead0153d67c0610b7d9fb57/onnxruntime_genai-0.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:12fc1c412e57011904f4942d176ae50dc8d52445dc83bc0bd25c097debfa1712", size = 839461 }, + { url = "https://files.pythonhosted.org/packages/08/77/d3722046f4dca6046f89e61c4ec6ddba08ee28371d7f923c9b061df6f419/onnxruntime_genai-0.6.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:f69785e5f8b1fcd678616dea51dccd27c9fa45c840f8681f08ab97fbc9502636", size = 873022 }, + { url = "https://files.pythonhosted.org/packages/ef/2e/84c15a067b2e82d11cb763b287f62f499130323ddb9e97b3ce627c0521e2/onnxruntime_genai-0.6.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:40131e948ccd3d282c33a86418308288f66f465ef679632a6483230b38da2566", size = 989229 }, + { url = "https://files.pythonhosted.org/packages/36/60/360866f47d523ee688dedbda98b4a09bf7e2008cc31148c2037e2456b643/onnxruntime_genai-0.6.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7f314dec25125cb11a32683a120a63f570d2209b20cab4a28a8e93867f576a9", size = 1523999 }, + { url = "https://files.pythonhosted.org/packages/b0/d7/2a802365c6c11cbd01dfa96b9d58f315d76c2c1ecdd4397cd082104fc765/onnxruntime_genai-0.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:4870cca3701603efd5842398e574146000e92325a99f0f28ddbb4c904b513071", size = 866464 }, ] [[package]] name = "openai" -version = "1.59.7" +version = "1.64.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2898,9 +3044,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/d5/25cf04789c7929b476c4d9ef711f8979091db63d30bfc093828fe4bf5c72/openai-1.59.7.tar.gz", hash = "sha256:043603def78c00befb857df9f0a16ee76a3af5984ba40cb7ee5e2f40db4646bf", size = 345007 } +sdist = { url = "https://files.pythonhosted.org/packages/7b/1d/aae78d8ecc571d672c4a27794a8f248bc46437a22ddcb9c4eb6fd6616c03/openai-1.64.0.tar.gz", hash = "sha256:2861053538704d61340da56e2f176853d19f1dc5704bc306b7597155f850d57a", size = 357058 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/47/7b92f1731c227f4139ef0025b5996062e44f9a749c54315c8bdb34bad5ec/openai-1.59.7-py3-none-any.whl", hash = "sha256:cfa806556226fa96df7380ab2e29814181d56fea44738c2b0e581b462c268692", size = 454844 }, + { url = "https://files.pythonhosted.org/packages/9a/1a/e62718f311daa26d208800976d7944e5ee6d503e1ea474522b2a15a904bb/openai-1.64.0-py3-none-any.whl", hash = "sha256:20f85cde9e95e9fbb416e3cb5a6d3119c0b28308afd6e3cc47bf100623dac623", size = 472289 }, ] [[package]] @@ -2953,32 +3099,32 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.29.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "importlib-metadata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/8e/b886a5e9861afa188d1fe671fb96ff9a1d90a23d57799331e137cc95d573/opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf", size = 62900 } +sdist = { url = "https://files.pythonhosted.org/packages/2b/6d/bbbf879826b7f3c89a45252010b5796fb1f1a0d45d9dc4709db0ef9a06c8/opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240", size = 63703 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/53/5249ea860d417a26a3a6f1bdedfc0748c4f081a3adaec3d398bc0f7c6a71/opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8", size = 64304 }, + { url = "https://files.pythonhosted.org/packages/36/0a/eea862fae6413d8181b23acf8e13489c90a45f17986ee9cf4eab8a0b9ad9/opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09", size = 64955 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.29.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/58/f7fd7eaf592b2521999a4271ab3ce1c82fe37fe9b0dc25c348398d95d66a/opentelemetry_exporter_otlp_proto_common-1.29.0.tar.gz", hash = "sha256:e7c39b5dbd1b78fe199e40ddfe477e6983cb61aa74ba836df09c3869a3e3e163", size = 19133 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/d7/44098bf1ef89fc5810cdbda05faa2ae9322a0dbda4921cdc965dc68a9856/opentelemetry_exporter_otlp_proto_common-1.30.0.tar.gz", hash = "sha256:ddbfbf797e518411857d0ca062c957080279320d6235a279f7b64ced73c13897", size = 19640 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/75/7609bda3d72bf307839570b226180513e854c01443ebe265ed732a4980fc/opentelemetry_exporter_otlp_proto_common-1.29.0-py3-none-any.whl", hash = "sha256:a9d7376c06b4da9cf350677bcddb9618ed4b8255c3f6476975f5e38274ecd3aa", size = 18459 }, + { url = "https://files.pythonhosted.org/packages/ee/54/f4b3de49f8d7d3a78fd6e6e1a6fd27dd342eb4d82c088b9078c6a32c3808/opentelemetry_exporter_otlp_proto_common-1.30.0-py3-none-any.whl", hash = "sha256:5468007c81aa9c44dc961ab2cf368a29d3475977df83b4e30aeed42aa7bc3b38", size = 18747 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.29.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2989,14 +3135,14 @@ dependencies = [ { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-sdk", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/41/aa/b3f2190613141f35fe15145bf438334fdd1eac8aeeee4f7ecbc887999443/opentelemetry_exporter_otlp_proto_grpc-1.29.0.tar.gz", hash = "sha256:3d324d07d64574d72ed178698de3d717f62a059a93b6b7685ee3e303384e73ea", size = 26224 } +sdist = { url = "https://files.pythonhosted.org/packages/86/3e/c7246df92c25e6ce95c349ad21597b4471b01ec9471e95d5261f1629fe92/opentelemetry_exporter_otlp_proto_grpc-1.30.0.tar.gz", hash = "sha256:d0f10f0b9b9a383b7d04a144d01cb280e70362cccc613987e234183fd1f01177", size = 26256 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/de/4b4127a25d1594851d99032f3a9acb09cb512d11edec713410fb906607f4/opentelemetry_exporter_otlp_proto_grpc-1.29.0-py3-none-any.whl", hash = "sha256:5a2a3a741a2543ed162676cf3eefc2b4150e6f4f0a193187afb0d0e65039c69c", size = 18520 }, + { url = "https://files.pythonhosted.org/packages/5e/35/d9f63fd84c2ed8dbd407bcbb933db4ed6e1b08e7fbdaca080b9ac309b927/opentelemetry_exporter_otlp_proto_grpc-1.30.0-py3-none-any.whl", hash = "sha256:2906bcae3d80acc54fd1ffcb9e44d324e8631058b502ebe4643ca71d1ff30830", size = 18550 }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.50b0" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3004,14 +3150,14 @@ dependencies = [ { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/2e/2e59a7cb636dc394bd7cf1758ada5e8ed87590458ca6bb2f9c26e0243847/opentelemetry_instrumentation-0.50b0.tar.gz", hash = "sha256:7d98af72de8dec5323e5202e46122e5f908592b22c6d24733aad619f07d82979", size = 26539 } +sdist = { url = "https://files.pythonhosted.org/packages/ec/5a/4c7f02235ac1269b48f3855f6be1afc641f31d4888d28b90b732fbce7141/opentelemetry_instrumentation-0.51b0.tar.gz", hash = "sha256:4ca266875e02f3988536982467f7ef8c32a38b8895490ddce9ad9604649424fa", size = 27760 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/b1/55a77152a83ec8998e520a3a575f44af1020cfe4bdc000b7538583293b85/opentelemetry_instrumentation-0.50b0-py3-none-any.whl", hash = "sha256:b8f9fc8812de36e1c6dffa5bfc6224df258841fb387b6dfe5df15099daa10630", size = 30728 }, + { url = "https://files.pythonhosted.org/packages/40/2c/48fa93f1acca9f79a06da0df7bfe916632ecc7fce1971067b3e46bcae55b/opentelemetry_instrumentation-0.51b0-py3-none-any.whl", hash = "sha256:c6de8bd26b75ec8b0e54dff59e198946e29de6a10ec65488c357d4b34aa5bdcf", size = 30923 }, ] [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.50b0" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3020,14 +3166,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/cc/a7b2fd243c6d2621803092eba62e450071b6752dfe4f64f530bbfd91a328/opentelemetry_instrumentation_asgi-0.50b0.tar.gz", hash = "sha256:3ca4cb5616ae6a3e8ce86e7d5c360a8d8cc8ed722cf3dc8a5e44300774e87d49", size = 24105 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/67/8aa6e1129f641f0f3f8786e6c5d18c1f2bbe490bd4b0e91a6879e85154d2/opentelemetry_instrumentation_asgi-0.51b0.tar.gz", hash = "sha256:b3fe97c00f0bfa934371a69674981d76591c68d937b6422a5716ca21081b4148", size = 24201 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/81/0899c6b56b1023835f266d909250d439174afa0c34ed5944c5021d3da263/opentelemetry_instrumentation_asgi-0.50b0-py3-none-any.whl", hash = "sha256:2ba1297f746e55dec5a17fe825689da0613662fb25c004c3965a6c54b1d5be22", size = 16304 }, + { url = "https://files.pythonhosted.org/packages/54/7e/0a95ab37302729543631a789ba8e71dea75c520495739dbbbdfdc580b401/opentelemetry_instrumentation_asgi-0.51b0-py3-none-any.whl", hash = "sha256:e8072993db47303b633c6ec1bc74726ba4d32bd0c46c28dfadf99f79521a324c", size = 16340 }, ] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.50b0" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3036,113 +3182,117 @@ dependencies = [ { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/f8/1917b0b3e414e23c7d71c9a33f0ce020f94bc47d22a30f54ace704e07588/opentelemetry_instrumentation_fastapi-0.50b0.tar.gz", hash = "sha256:16b9181682136da210295def2bb304a32fb9bdee9a935cdc9da43567f7c1149e", size = 19214 } +sdist = { url = "https://files.pythonhosted.org/packages/2d/dc/8db4422b5084177d1ef6c7855c69bf2e9e689f595a4a9b59e60588e0d427/opentelemetry_instrumentation_fastapi-0.51b0.tar.gz", hash = "sha256:1624e70f2f4d12ceb792d8a0c331244cd6723190ccee01336273b4559bc13abc", size = 19249 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/d6/37784bb30b213e2dd6838b9f96c2940907022c1b75ef1ff18a99afe42433/opentelemetry_instrumentation_fastapi-0.50b0-py3-none-any.whl", hash = "sha256:8f03b738495e4705fbae51a2826389c7369629dace89d0f291c06ffefdff5e52", size = 12079 }, + { url = "https://files.pythonhosted.org/packages/55/1c/ec2d816b78edf2404d7b3df6d09eefb690b70bfd191b7da06f76634f1bdc/opentelemetry_instrumentation_fastapi-0.51b0-py3-none-any.whl", hash = "sha256:10513bbc11a1188adb9c1d2c520695f7a8f2b5f4de14e8162098035901cd6493", size = 12117 }, ] [[package]] name = "opentelemetry-proto" -version = "1.29.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/52/fd3b3d79e1b00ad2dcac92db6885e49bedbf7a6828647954e4952d653132/opentelemetry_proto-1.29.0.tar.gz", hash = "sha256:3c136aa293782e9b44978c738fff72877a4b78b5d21a64e879898db7b2d93e5d", size = 34320 } +sdist = { url = "https://files.pythonhosted.org/packages/31/6e/c1ff2e3b0cd3a189a6be03fd4d63441d73d7addd9117ab5454e667b9b6c7/opentelemetry_proto-1.30.0.tar.gz", hash = "sha256:afe5c9c15e8b68d7c469596e5b32e8fc085eb9febdd6fb4e20924a93a0389179", size = 34362 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/66/a500e38ee322d89fce61c74bd7769c8ef3bebc6c2f43fda5f3fc3441286d/opentelemetry_proto-1.29.0-py3-none-any.whl", hash = "sha256:495069c6f5495cbf732501cdcd3b7f60fda2b9d3d4255706ca99b7ca8dec53ff", size = 55818 }, + { url = "https://files.pythonhosted.org/packages/56/d7/85de6501f7216995295f7ec11e470142e6a6e080baacec1753bbf272e007/opentelemetry_proto-1.30.0-py3-none-any.whl", hash = "sha256:c6290958ff3ddacc826ca5abbeb377a31c2334387352a259ba0df37c243adc11", size = 55854 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.29.0" +version = "1.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/5a/1ed4c3cf6c09f80565fc085f7e8efa0c222712fd2a9412d07424705dcf72/opentelemetry_sdk-1.29.0.tar.gz", hash = "sha256:b0787ce6aade6ab84315302e72bd7a7f2f014b0fb1b7c3295b88afe014ed0643", size = 157229 } +sdist = { url = "https://files.pythonhosted.org/packages/93/ee/d710062e8a862433d1be0b85920d0c653abe318878fef2d14dfe2c62ff7b/opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18", size = 158633 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/1d/512b86af21795fb463726665e2f61db77d384e8779fdcf4cb0ceec47866d/opentelemetry_sdk-1.29.0-py3-none-any.whl", hash = "sha256:173be3b5d3f8f7d671f20ea37056710217959e774e2749d984355d1f9391a30a", size = 118078 }, + { url = "https://files.pythonhosted.org/packages/97/28/64d781d6adc6bda2260067ce2902bd030cf45aec657e02e28c5b4480b976/opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091", size = 118717 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.50b0" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/4e/d7c7c91ff47cd96fe4095dd7231701aec7347426fd66872ff320d6cd1fcc/opentelemetry_semantic_conventions-0.50b0.tar.gz", hash = "sha256:02dc6dbcb62f082de9b877ff19a3f1ffaa3c306300fa53bfac761c4567c83d38", size = 100459 } +sdist = { url = "https://files.pythonhosted.org/packages/1e/c0/0f9ef4605fea7f2b83d55dd0b0d7aebe8feead247cd6facd232b30907b4f/opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47", size = 107191 } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/fb/dc15fad105450a015e913cfa4f5c27b6a5f1bea8fb649f8cae11e699c8af/opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl", hash = "sha256:e87efba8fdb67fb38113efea6a349531e75ed7ffc01562f65b802fcecb5e115e", size = 166602 }, + { url = "https://files.pythonhosted.org/packages/2e/75/d7bdbb6fd8630b4cafb883482b75c4fc276b6426619539d266e32ac53266/opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae", size = 177416 }, ] [[package]] name = "opentelemetry-util-http" -version = "0.50b0" +version = "0.51b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/10/ce3f0d1157cedbd819194f0b27a6bbb7c19a8bceb3941e4a4775014076cf/opentelemetry_util_http-0.50b0.tar.gz", hash = "sha256:dc4606027e1bc02aabb9533cc330dd43f874fca492e4175c31d7154f341754af", size = 7859 } +sdist = { url = "https://files.pythonhosted.org/packages/58/64/32510c0a803465eb6ef1f5bd514d0f5627f8abc9444ed94f7240faf6fcaa/opentelemetry_util_http-0.51b0.tar.gz", hash = "sha256:05edd19ca1cc3be3968b1e502fd94816901a365adbeaab6b6ddb974384d3a0b9", size = 8043 } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8a/9e1b54f50d1fddebbeac9a9b0632f8db6ece7add904fb593ee2e268ee4de/opentelemetry_util_http-0.50b0-py3-none-any.whl", hash = "sha256:21f8aedac861ffa3b850f8c0a6c373026189eb8630ac6e14a2bf8c55695cc090", size = 6942 }, + { url = "https://files.pythonhosted.org/packages/48/dd/c371eeb9cc78abbdad231a27ce1a196a37ef96328d876ccbb381dea4c8ee/opentelemetry_util_http-0.51b0-py3-none-any.whl", hash = "sha256:0561d7a6e9c422b9ef9ae6e77eafcfcd32a2ab689f5e801475cbb67f189efa20", size = 7304 }, ] [[package]] name = "orjson" -version = "3.10.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/f7/3219b56f47b4f5e864fb11cdf4ac0aaa3de608730ad2dc4c6e16382f35ec/orjson-3.10.14.tar.gz", hash = "sha256:cf31f6f071a6b8e7aa1ead1fa27b935b48d00fbfa6a28ce856cfff2d5dd68eed", size = 5282116 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/62/64348b8b29a14c7342f6aa45c8be0a87fdda2ce7716bc123717376537077/orjson-3.10.14-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:849ea7845a55f09965826e816cdc7689d6cf74fe9223d79d758c714af955bcb6", size = 249439 }, - { url = "https://files.pythonhosted.org/packages/9f/51/48f4dfbca7b4db630316b170db4a150a33cd405650258bd62a2d619b43b4/orjson-3.10.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5947b139dfa33f72eecc63f17e45230a97e741942955a6c9e650069305eb73d", size = 135811 }, - { url = "https://files.pythonhosted.org/packages/a1/1c/e18770843e6d045605c8e00a1be801da5668fa934b323b0492a49c9dee4f/orjson-3.10.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cde6d76910d3179dae70f164466692f4ea36da124d6fb1a61399ca589e81d69a", size = 150154 }, - { url = "https://files.pythonhosted.org/packages/51/1e/3817dc79164f1fc17fc53102f74f62d31f5f4ec042abdd24d94c5e06e51c/orjson-3.10.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6dfbaeb7afa77ca608a50e2770a0461177b63a99520d4928e27591b142c74b1", size = 139740 }, - { url = "https://files.pythonhosted.org/packages/ff/fc/fbf9e25448f7a2d67c1a2b6dad78a9340666bf9fda3339ff59b1e93f0b6f/orjson-3.10.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa45e489ef80f28ff0e5ba0a72812b8cfc7c1ef8b46a694723807d1b07c89ebb", size = 154479 }, - { url = "https://files.pythonhosted.org/packages/d4/df/c8b7ea21ff658f6a9a26d562055631c01d445bda5eb613c02c7d0934607d/orjson-3.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5007abfdbb1d866e2aa8990bd1c465f0f6da71d19e695fc278282be12cffa5", size = 130414 }, - { url = "https://files.pythonhosted.org/packages/df/f7/e29c2d42bef8fbf696a5e54e6339b0b9ea5179326950fee6ae80acf59d09/orjson-3.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b49e2af011c84c3f2d541bb5cd1e3c7c2df672223e7e3ea608f09cf295e5f8a", size = 138545 }, - { url = "https://files.pythonhosted.org/packages/8e/97/afdf2908fe8eaeecb29e97fa82dc934f275acf330e5271def0b8fbac5478/orjson-3.10.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:164ac155109226b3a2606ee6dda899ccfbe6e7e18b5bdc3fbc00f79cc074157d", size = 130952 }, - { url = "https://files.pythonhosted.org/packages/4a/dd/04e01c1305694f47e9794c60ec7cece02e55fa9d57c5d72081eaaa62ad1d/orjson-3.10.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6b1225024cf0ef5d15934b5ffe9baf860fe8bc68a796513f5ea4f5056de30bca", size = 414673 }, - { url = "https://files.pythonhosted.org/packages/fa/12/28c4d5f6a395ac9693b250f0662366968c47fc99c8f3cd803a65b1f5ba46/orjson-3.10.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6546e8073dc382e60fcae4a001a5a1bc46da5eab4a4878acc2d12072d6166d5", size = 141002 }, - { url = "https://files.pythonhosted.org/packages/21/f6/357cb167c2d2fd9542251cfd9f68681b67ed4dcdac82aa6ee2f4f3ab952e/orjson-3.10.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9f1d2942605c894162252d6259b0121bf1cb493071a1ea8cb35d79cb3e6ac5bc", size = 129626 }, - { url = "https://files.pythonhosted.org/packages/df/07/d9062353500df9db8bfa7c6a5982687c97d0b69a5b158c4166d407ac94e2/orjson-3.10.14-cp310-cp310-win32.whl", hash = "sha256:397083806abd51cf2b3bbbf6c347575374d160331a2d33c5823e22249ad3118b", size = 142429 }, - { url = "https://files.pythonhosted.org/packages/50/ba/6ba2bf69ac0526d143aebe78bc39e6e5fbb51d5336fbc5efb9aab6687cd9/orjson-3.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:fa18f949d3183a8d468367056be989666ac2bef3a72eece0bade9cdb733b3c28", size = 133512 }, - { url = "https://files.pythonhosted.org/packages/bf/18/26721760368e12b691fb6811692ed21ae5275ea918db409ba26866cacbe8/orjson-3.10.14-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f506fd666dd1ecd15a832bebc66c4df45c1902fd47526292836c339f7ba665a9", size = 249437 }, - { url = "https://files.pythonhosted.org/packages/d5/5b/2adfe7cc301edeb3bffc1942956659c19ec00d51a21c53c17c0767bebf47/orjson-3.10.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efe5fd254cfb0eeee13b8ef7ecb20f5d5a56ddda8a587f3852ab2cedfefdb5f6", size = 135812 }, - { url = "https://files.pythonhosted.org/packages/8a/68/07df7787fd9ff6dba815b2d793eec5e039d288fdf150431ed48a660bfcbb/orjson-3.10.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ddc8c866d7467f5ee2991397d2ea94bcf60d0048bdd8ca555740b56f9042725", size = 150153 }, - { url = "https://files.pythonhosted.org/packages/02/71/f68562734461b801b53bacd5365e079dcb3c78656a662f0639494880e522/orjson-3.10.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af8e42ae4363773658b8d578d56dedffb4f05ceeb4d1d4dd3fb504950b45526", size = 139742 }, - { url = "https://files.pythonhosted.org/packages/04/03/1355fb27652582f00d3c62e93a32b982fa42bc31d2e07f0a317867069096/orjson-3.10.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84dd83110503bc10e94322bf3ffab8bc49150176b49b4984dc1cce4c0a993bf9", size = 154479 }, - { url = "https://files.pythonhosted.org/packages/7c/47/1c2a840f27715e8bc2bbafffc851512ede6e53483593eded190919bdcaf4/orjson-3.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f5bfc0399cd4811bf10ec7a759c7ab0cd18080956af8ee138097d5b5296a95", size = 130413 }, - { url = "https://files.pythonhosted.org/packages/dd/b2/5bb51006cbae85b052d1bbee7ff43ae26fa155bb3d31a71b0c07d384d5e3/orjson-3.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868943660fb2a1e6b6b965b74430c16a79320b665b28dd4511d15ad5038d37d5", size = 138545 }, - { url = "https://files.pythonhosted.org/packages/79/30/7841a5dd46bb46b8e868791d5469c9d4788d3e26b7e69d40256647997baf/orjson-3.10.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33449c67195969b1a677533dee9d76e006001213a24501333624623e13c7cc8e", size = 130953 }, - { url = "https://files.pythonhosted.org/packages/08/49/720e7c2040c0f1df630a36d83d449bd7e4d4471071d5ece47a4f7211d570/orjson-3.10.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e4c9f60f9fb0b5be66e416dcd8c9d94c3eabff3801d875bdb1f8ffc12cf86905", size = 414675 }, - { url = "https://files.pythonhosted.org/packages/50/b0/ca7619f34280e7dcbd50dbc9c5fe5200c12cd7269b8858652beb3887483f/orjson-3.10.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0de4d6315cfdbd9ec803b945c23b3a68207fd47cbe43626036d97e8e9561a436", size = 141004 }, - { url = "https://files.pythonhosted.org/packages/75/1b/7548e3a711543f438e87a4349e00439ab7f37807942e5659f29363f35765/orjson-3.10.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:83adda3db595cb1a7e2237029b3249c85afbe5c747d26b41b802e7482cb3933e", size = 129629 }, - { url = "https://files.pythonhosted.org/packages/b0/1e/4930a6ff46debd6be1ff18e869b7bc43a7ad762c865610b7e745038d6f68/orjson-3.10.14-cp311-cp311-win32.whl", hash = "sha256:998019ef74a4997a9d741b1473533cdb8faa31373afc9849b35129b4b8ec048d", size = 142430 }, - { url = "https://files.pythonhosted.org/packages/28/e0/6cc1cd1dfde36555e81ac869f7847e86bb11c27f97b72fde2f1509b12163/orjson-3.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:9d034abdd36f0f0f2240f91492684e5043d46f290525d1117712d5b8137784eb", size = 133516 }, - { url = "https://files.pythonhosted.org/packages/8c/dc/dc5a882be016ee8688bd867ad3b4e3b2ab039d91383099702301a1adb6ac/orjson-3.10.14-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2ad4b7e367efba6dc3f119c9a0fcd41908b7ec0399a696f3cdea7ec477441b09", size = 249396 }, - { url = "https://files.pythonhosted.org/packages/f0/95/4c23ff5c0505cd687928608e0b7910ccb44ce59490079e1c17b7610aa0d0/orjson-3.10.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f496286fc85e93ce0f71cc84fc1c42de2decf1bf494094e188e27a53694777a7", size = 135689 }, - { url = "https://files.pythonhosted.org/packages/ad/39/b4bdd19604dce9d6509c4d86e8e251a1373a24204b4c4169866dcecbe5f5/orjson-3.10.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c7f189bbfcded40e41a6969c1068ba305850ba016665be71a217918931416fbf", size = 150136 }, - { url = "https://files.pythonhosted.org/packages/1d/92/7b9bad96353abd3e89947960252dcf1022ce2df7f29056e434de05e18b6d/orjson-3.10.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cc8204f0b75606869c707da331058ddf085de29558b516fc43c73ee5ee2aadb", size = 139766 }, - { url = "https://files.pythonhosted.org/packages/a6/bd/abb13c86540b7a91b40d7d9f8549d03a026bc22d78fa93f71d68b8f4c36e/orjson-3.10.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deaa2899dff7f03ab667e2ec25842d233e2a6a9e333efa484dfe666403f3501c", size = 154533 }, - { url = "https://files.pythonhosted.org/packages/c0/02/0bcb91ec9c7143012359983aca44f567f87df379957cd4af11336217b12f/orjson-3.10.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1c3ea52642c9714dc6e56de8a451a066f6d2707d273e07fe8a9cc1ba073813d", size = 130658 }, - { url = "https://files.pythonhosted.org/packages/b4/1e/b304596bb1f800d47d6e92305bd09f0eef693ed4f7b2095db63f9808b229/orjson-3.10.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d3f9ed72e7458ded9a1fb1b4d4ed4c4fdbaf82030ce3f9274b4dc1bff7ace2b", size = 138546 }, - { url = "https://files.pythonhosted.org/packages/56/c7/65d72b22080186ef618a46afeb9386e20056f3237664090f3a2f8da1cd6d/orjson-3.10.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:07520685d408a2aba514c17ccc16199ff2934f9f9e28501e676c557f454a37fe", size = 130774 }, - { url = "https://files.pythonhosted.org/packages/4d/85/1ab35a832f32b37ccd673721e845cf302f23453603112255af611c91d1d1/orjson-3.10.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:76344269b550ea01488d19a2a369ab572c1ac4449a72e9f6ac0d70eb1cbfb953", size = 414649 }, - { url = "https://files.pythonhosted.org/packages/d1/7d/1d6575f779bab8fe698fa6d52e8aa3aa0a9fca4885d0bf6197700455713a/orjson-3.10.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e2979d0f2959990620f7e62da6cd954e4620ee815539bc57a8ae46e2dacf90e3", size = 141060 }, - { url = "https://files.pythonhosted.org/packages/f8/26/68513e28b3bd1d7633318ed2818e86d1bfc8b782c87c520c7b363092837f/orjson-3.10.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03f61ca3674555adcb1aa717b9fc87ae936aa7a63f6aba90a474a88701278780", size = 129798 }, - { url = "https://files.pythonhosted.org/packages/44/ca/020fb99c98ff7267ba18ce798ff0c8c3aa97cd949b611fc76cad3c87e534/orjson-3.10.14-cp312-cp312-win32.whl", hash = "sha256:d5075c54edf1d6ad81d4c6523ce54a748ba1208b542e54b97d8a882ecd810fd1", size = 142524 }, - { url = "https://files.pythonhosted.org/packages/70/7f/f2d346819a273653825e7c92dc26418c8da506003c9fc1dfe8157e733b2e/orjson-3.10.14-cp312-cp312-win_amd64.whl", hash = "sha256:175cafd322e458603e8ce73510a068d16b6e6f389c13f69bf16de0e843d7d406", size = 133663 }, - { url = "https://files.pythonhosted.org/packages/46/bb/f1b037d89f580c79eda0940772384cc226a697be1cb4eb94ae4e792aa34c/orjson-3.10.14-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:0905ca08a10f7e0e0c97d11359609300eb1437490a7f32bbaa349de757e2e0c7", size = 249333 }, - { url = "https://files.pythonhosted.org/packages/e4/72/12958a073cace3f8acef0f9a30739d95f46bbb1544126fecad11527d4508/orjson-3.10.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92d13292249f9f2a3e418cbc307a9fbbef043c65f4bd8ba1eb620bc2aaba3d15", size = 125038 }, - { url = "https://files.pythonhosted.org/packages/c0/ae/461f78b1c98de1bc034af88bc21c6a792cc63373261fbc10a6ee560814fa/orjson-3.10.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90937664e776ad316d64251e2fa2ad69265e4443067668e4727074fe39676414", size = 130604 }, - { url = "https://files.pythonhosted.org/packages/ae/d2/17f50513f56bff7898840fddf7fb88f501305b9b2605d2793ff224789665/orjson-3.10.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9ed3d26c4cb4f6babaf791aa46a029265850e80ec2a566581f5c2ee1a14df4f1", size = 130756 }, - { url = "https://files.pythonhosted.org/packages/fa/bc/673856e4af94c9890dfd8e2054c05dc2ddc16d1728c2aa0c5bd198943105/orjson-3.10.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:56ee546c2bbe9599aba78169f99d1dc33301853e897dbaf642d654248280dc6e", size = 414613 }, - { url = "https://files.pythonhosted.org/packages/09/01/08c5b69b0756dd1790fcffa569d6a28dedcd7b97f825e4b46537b788908c/orjson-3.10.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:901e826cb2f1bdc1fcef3ef59adf0c451e8f7c0b5deb26c1a933fb66fb505eae", size = 141010 }, - { url = "https://files.pythonhosted.org/packages/5b/98/72883bb6cf88fd364996e62d2026622ca79bfb8dbaf96ccdd2018ada25b1/orjson-3.10.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:26336c0d4b2d44636e1e1e6ed1002f03c6aae4a8a9329561c8883f135e9ff010", size = 129732 }, - { url = "https://files.pythonhosted.org/packages/e4/99/347418f7ef56dcb478ba131a6112b8ddd5b747942652b6e77a53155a7e21/orjson-3.10.14-cp313-cp313-win32.whl", hash = "sha256:e2bc525e335a8545c4e48f84dd0328bc46158c9aaeb8a1c2276546e94540ea3d", size = 142504 }, - { url = "https://files.pythonhosted.org/packages/59/ac/5e96cad01083015f7bfdb02ccafa489da8e6caa7f4c519e215f04d2bd856/orjson-3.10.14-cp313-cp313-win_amd64.whl", hash = "sha256:eca04dfd792cedad53dc9a917da1a522486255360cb4e77619343a20d9f35364", size = 133388 }, +version = "3.10.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/5dea21763eeff8c1590076918a446ea3d6140743e0e36f58f369928ed0f4/orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e", size = 5282482 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/09/e5ff18ad009e6f97eb7edc5f67ef98b3ce0c189da9c3eaca1f9587cd4c61/orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04", size = 249532 }, + { url = "https://files.pythonhosted.org/packages/bd/b8/a75883301fe332bd433d9b0ded7d2bb706ccac679602c3516984f8814fb5/orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8", size = 125229 }, + { url = "https://files.pythonhosted.org/packages/83/4b/22f053e7a364cc9c685be203b1e40fc5f2b3f164a9b2284547504eec682e/orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8", size = 150148 }, + { url = "https://files.pythonhosted.org/packages/63/64/1b54fc75ca328b57dd810541a4035fe48c12a161d466e3cf5b11a8c25649/orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814", size = 139748 }, + { url = "https://files.pythonhosted.org/packages/5e/ff/ff0c5da781807bb0a5acd789d9a7fbcb57f7b0c6e1916595da1f5ce69f3c/orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164", size = 154559 }, + { url = "https://files.pythonhosted.org/packages/4e/9a/11e2974383384ace8495810d4a2ebef5f55aacfc97b333b65e789c9d362d/orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf", size = 130349 }, + { url = "https://files.pythonhosted.org/packages/2d/c4/dd9583aea6aefee1b64d3aed13f51d2aadb014028bc929fe52936ec5091f/orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061", size = 138514 }, + { url = "https://files.pythonhosted.org/packages/53/3e/dcf1729230654f5c5594fc752de1f43dcf67e055ac0d300c8cdb1309269a/orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3", size = 130940 }, + { url = "https://files.pythonhosted.org/packages/e8/2b/b9759fe704789937705c8a56a03f6c03e50dff7df87d65cba9a20fec5282/orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d", size = 414713 }, + { url = "https://files.pythonhosted.org/packages/a7/6b/b9dfdbd4b6e20a59238319eb203ae07c3f6abf07eef909169b7a37ae3bba/orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182", size = 141028 }, + { url = "https://files.pythonhosted.org/packages/7c/b5/40f5bbea619c7caf75eb4d652a9821875a8ed04acc45fe3d3ef054ca69fb/orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e", size = 129715 }, + { url = "https://files.pythonhosted.org/packages/38/60/2272514061cbdf4d672edbca6e59c7e01cd1c706e881427d88f3c3e79761/orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab", size = 142473 }, + { url = "https://files.pythonhosted.org/packages/11/5d/be1490ff7eafe7fef890eb4527cf5bcd8cfd6117f3efe42a3249ec847b60/orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806", size = 133564 }, + { url = "https://files.pythonhosted.org/packages/7a/a2/21b25ce4a2c71dbb90948ee81bd7a42b4fbfc63162e57faf83157d5540ae/orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6", size = 249533 }, + { url = "https://files.pythonhosted.org/packages/b2/85/2076fc12d8225698a51278009726750c9c65c846eda741e77e1761cfef33/orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef", size = 125230 }, + { url = "https://files.pythonhosted.org/packages/06/df/a85a7955f11274191eccf559e8481b2be74a7c6d43075d0a9506aa80284d/orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334", size = 150148 }, + { url = "https://files.pythonhosted.org/packages/37/b3/94c55625a29b8767c0eed194cb000b3787e3c23b4cdd13be17bae6ccbb4b/orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d", size = 139749 }, + { url = "https://files.pythonhosted.org/packages/53/ba/c608b1e719971e8ddac2379f290404c2e914cf8e976369bae3cad88768b1/orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0", size = 154558 }, + { url = "https://files.pythonhosted.org/packages/b2/c4/c1fb835bb23ad788a39aa9ebb8821d51b1c03588d9a9e4ca7de5b354fdd5/orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13", size = 130349 }, + { url = "https://files.pythonhosted.org/packages/78/14/bb2b48b26ab3c570b284eb2157d98c1ef331a8397f6c8bd983b270467f5c/orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5", size = 138513 }, + { url = "https://files.pythonhosted.org/packages/4a/97/d5b353a5fe532e92c46467aa37e637f81af8468aa894cd77d2ec8a12f99e/orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b", size = 130942 }, + { url = "https://files.pythonhosted.org/packages/b5/5d/a067bec55293cca48fea8b9928cfa84c623be0cce8141d47690e64a6ca12/orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399", size = 414717 }, + { url = "https://files.pythonhosted.org/packages/6f/9a/1485b8b05c6b4c4db172c438cf5db5dcfd10e72a9bc23c151a1137e763e0/orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388", size = 141033 }, + { url = "https://files.pythonhosted.org/packages/f8/d2/fc67523656e43a0c7eaeae9007c8b02e86076b15d591e9be11554d3d3138/orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c", size = 129720 }, + { url = "https://files.pythonhosted.org/packages/79/42/f58c7bd4e5b54da2ce2ef0331a39ccbbaa7699b7f70206fbf06737c9ed7d/orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e", size = 142473 }, + { url = "https://files.pythonhosted.org/packages/00/f8/bb60a4644287a544ec81df1699d5b965776bc9848d9029d9f9b3402ac8bb/orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e", size = 133570 }, + { url = "https://files.pythonhosted.org/packages/66/85/22fe737188905a71afcc4bf7cc4c79cd7f5bbe9ed1fe0aac4ce4c33edc30/orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a", size = 249504 }, + { url = "https://files.pythonhosted.org/packages/48/b7/2622b29f3afebe938a0a9037e184660379797d5fd5234e5998345d7a5b43/orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d", size = 125080 }, + { url = "https://files.pythonhosted.org/packages/ce/8f/0b72a48f4403d0b88b2a41450c535b3e8989e8a2d7800659a967efc7c115/orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0", size = 150121 }, + { url = "https://files.pythonhosted.org/packages/06/ec/acb1a20cd49edb2000be5a0404cd43e3c8aad219f376ac8c60b870518c03/orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4", size = 139796 }, + { url = "https://files.pythonhosted.org/packages/33/e1/f7840a2ea852114b23a52a1c0b2bea0a1ea22236efbcdb876402d799c423/orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767", size = 154636 }, + { url = "https://files.pythonhosted.org/packages/fa/da/31543337febd043b8fa80a3b67de627669b88c7b128d9ad4cc2ece005b7a/orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41", size = 130621 }, + { url = "https://files.pythonhosted.org/packages/ed/78/66115dc9afbc22496530d2139f2f4455698be444c7c2475cb48f657cefc9/orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514", size = 138516 }, + { url = "https://files.pythonhosted.org/packages/22/84/cd4f5fb5427ffcf823140957a47503076184cb1ce15bcc1165125c26c46c/orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17", size = 130762 }, + { url = "https://files.pythonhosted.org/packages/93/1f/67596b711ba9f56dd75d73b60089c5c92057f1130bb3a25a0f53fb9a583b/orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b", size = 414700 }, + { url = "https://files.pythonhosted.org/packages/7c/0c/6a3b3271b46443d90efb713c3e4fe83fa8cd71cda0d11a0f69a03f437c6e/orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7", size = 141077 }, + { url = "https://files.pythonhosted.org/packages/3b/9b/33c58e0bfc788995eccd0d525ecd6b84b40d7ed182dd0751cd4c1322ac62/orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a", size = 129898 }, + { url = "https://files.pythonhosted.org/packages/01/c1/d577ecd2e9fa393366a1ea0a9267f6510d86e6c4bb1cdfb9877104cac44c/orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665", size = 142566 }, + { url = "https://files.pythonhosted.org/packages/ed/eb/a85317ee1732d1034b92d56f89f1de4d7bf7904f5c8fb9dcdd5b1c83917f/orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa", size = 133732 }, + { url = "https://files.pythonhosted.org/packages/06/10/fe7d60b8da538e8d3d3721f08c1b7bff0491e8fa4dd3bf11a17e34f4730e/orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6", size = 249399 }, + { url = "https://files.pythonhosted.org/packages/6b/83/52c356fd3a61abd829ae7e4366a6fe8e8863c825a60d7ac5156067516edf/orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a", size = 125044 }, + { url = "https://files.pythonhosted.org/packages/55/b2/d06d5901408e7ded1a74c7c20d70e3a127057a6d21355f50c90c0f337913/orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9", size = 150066 }, + { url = "https://files.pythonhosted.org/packages/75/8c/60c3106e08dc593a861755781c7c675a566445cc39558677d505878d879f/orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0", size = 139737 }, + { url = "https://files.pythonhosted.org/packages/6a/8c/ae00d7d0ab8a4490b1efeb01ad4ab2f1982e69cc82490bf8093407718ff5/orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307", size = 154804 }, + { url = "https://files.pythonhosted.org/packages/22/86/65dc69bd88b6dd254535310e97bc518aa50a39ef9c5a2a5d518e7a223710/orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e", size = 130583 }, + { url = "https://files.pythonhosted.org/packages/bb/00/6fe01ededb05d52be42fabb13d93a36e51f1fd9be173bd95707d11a8a860/orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7", size = 138465 }, + { url = "https://files.pythonhosted.org/packages/db/2f/4cc151c4b471b0cdc8cb29d3eadbce5007eb0475d26fa26ed123dca93b33/orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8", size = 130742 }, + { url = "https://files.pythonhosted.org/packages/9f/13/8a6109e4b477c518498ca37963d9c0eb1508b259725553fb53d53b20e2ea/orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca", size = 414669 }, + { url = "https://files.pythonhosted.org/packages/22/7b/1d229d6d24644ed4d0a803de1b0e2df832032d5beda7346831c78191b5b2/orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561", size = 141043 }, + { url = "https://files.pythonhosted.org/packages/cc/d3/6dc91156cf12ed86bed383bcb942d84d23304a1e57b7ab030bf60ea130d6/orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825", size = 129826 }, + { url = "https://files.pythonhosted.org/packages/b3/38/c47c25b86f6996f1343be721b6ea4367bc1c8bc0fc3f6bbcd995d18cb19d/orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890", size = 142542 }, + { url = "https://files.pythonhosted.org/packages/27/f1/1d7ec15b20f8ce9300bc850de1e059132b88990e46cd0ccac29cbf11e4f9/orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf", size = 133444 }, ] [[package]] @@ -3396,18 +3546,19 @@ wheels = [ [[package]] name = "posthog" -version = "3.8.3" +version = "3.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "distro", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "monotonic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/5a/057ebd6b279940e2cf2cbe8b10a4b34bc832f6f82b10649dcd12210219e9/posthog-3.8.3.tar.gz", hash = "sha256:263df03ea312d4b47a3d5ea393fdb22ff2ed78140d5ce9af9dd0618ae245a44b", size = 56864 } +sdist = { url = "https://files.pythonhosted.org/packages/b4/cd/d349468731e2cdbd61bc9655acae5dac961156f4b9c652f011b8433d906e/posthog-3.16.0.tar.gz", hash = "sha256:953176a443b30b1404c0f36010a95caad60a83c31ecb17b427f6d986f6f765c1", size = 65192 } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/3a/ff36f067367de4477d114ab04f42d5830849bad1b0949eb70c9858cdb7e2/posthog-3.8.3-py2.py3-none-any.whl", hash = "sha256:7215c4d7649b0c87905b42f460403311564996d776ab48d39852f46539a50f22", size = 64665 }, + { url = "https://files.pythonhosted.org/packages/75/89/5524d64b421e946f85a42d9e95348bfd1b43335eadb9f3ee4a0e368a1b47/posthog-3.16.0-py2.py3-none-any.whl", hash = "sha256:6d2140f58823e540855885a77474a32045f77c2276351791db4dca844f278b37", size = 75934 }, ] [[package]] @@ -3444,99 +3595,115 @@ wheels = [ [[package]] name = "prompt-toolkit" -version = "3.0.48" +version = "3.0.50" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/e1/bd15cb8ffdcfeeb2bdc215de3c3cffca11408d829e4b8416dcfe71ba8854/prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", size = 429087 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 }, + { url = "https://files.pythonhosted.org/packages/e4/ea/d836f008d33151c7a1f62caf3d8dd782e4d15f6a43897f64480c2b8de2ad/prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198", size = 387816 }, ] [[package]] name = "propcache" -version = "0.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/a5/0ea64c9426959ef145a938e38c832fc551843481d356713ececa9a8a64e8/propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6", size = 79296 }, - { url = "https://files.pythonhosted.org/packages/76/5a/916db1aba735f55e5eca4733eea4d1973845cf77dfe67c2381a2ca3ce52d/propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2", size = 45622 }, - { url = "https://files.pythonhosted.org/packages/2d/62/685d3cf268b8401ec12b250b925b21d152b9d193b7bffa5fdc4815c392c2/propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea", size = 45133 }, - { url = "https://files.pythonhosted.org/packages/4d/3d/31c9c29ee7192defc05aa4d01624fd85a41cf98e5922aaed206017329944/propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212", size = 204809 }, - { url = "https://files.pythonhosted.org/packages/10/a1/e4050776f4797fc86140ac9a480d5dc069fbfa9d499fe5c5d2fa1ae71f07/propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3", size = 219109 }, - { url = "https://files.pythonhosted.org/packages/c9/c0/e7ae0df76343d5e107d81e59acc085cea5fd36a48aa53ef09add7503e888/propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d", size = 217368 }, - { url = "https://files.pythonhosted.org/packages/fc/e1/e0a2ed6394b5772508868a977d3238f4afb2eebaf9976f0b44a8d347ad63/propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634", size = 205124 }, - { url = "https://files.pythonhosted.org/packages/50/c1/e388c232d15ca10f233c778bbdc1034ba53ede14c207a72008de45b2db2e/propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2", size = 195463 }, - { url = "https://files.pythonhosted.org/packages/0a/fd/71b349b9def426cc73813dbd0f33e266de77305e337c8c12bfb0a2a82bfb/propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958", size = 198358 }, - { url = "https://files.pythonhosted.org/packages/02/f2/d7c497cd148ebfc5b0ae32808e6c1af5922215fe38c7a06e4e722fe937c8/propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c", size = 195560 }, - { url = "https://files.pythonhosted.org/packages/bb/57/f37041bbe5e0dfed80a3f6be2612a3a75b9cfe2652abf2c99bef3455bbad/propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583", size = 196895 }, - { url = "https://files.pythonhosted.org/packages/83/36/ae3cc3e4f310bff2f064e3d2ed5558935cc7778d6f827dce74dcfa125304/propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf", size = 207124 }, - { url = "https://files.pythonhosted.org/packages/8c/c4/811b9f311f10ce9d31a32ff14ce58500458443627e4df4ae9c264defba7f/propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034", size = 210442 }, - { url = "https://files.pythonhosted.org/packages/18/dd/a1670d483a61ecac0d7fc4305d91caaac7a8fc1b200ea3965a01cf03bced/propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b", size = 203219 }, - { url = "https://files.pythonhosted.org/packages/f9/2d/30ced5afde41b099b2dc0c6573b66b45d16d73090e85655f1a30c5a24e07/propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4", size = 40313 }, - { url = "https://files.pythonhosted.org/packages/23/84/bd9b207ac80da237af77aa6e153b08ffa83264b1c7882495984fcbfcf85c/propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba", size = 44428 }, - { url = "https://files.pythonhosted.org/packages/bc/0f/2913b6791ebefb2b25b4efd4bb2299c985e09786b9f5b19184a88e5778dd/propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16", size = 79297 }, - { url = "https://files.pythonhosted.org/packages/cf/73/af2053aeccd40b05d6e19058419ac77674daecdd32478088b79375b9ab54/propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717", size = 45611 }, - { url = "https://files.pythonhosted.org/packages/3c/09/8386115ba7775ea3b9537730e8cf718d83bbf95bffe30757ccf37ec4e5da/propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3", size = 45146 }, - { url = "https://files.pythonhosted.org/packages/03/7a/793aa12f0537b2e520bf09f4c6833706b63170a211ad042ca71cbf79d9cb/propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9", size = 232136 }, - { url = "https://files.pythonhosted.org/packages/f1/38/b921b3168d72111769f648314100558c2ea1d52eb3d1ba7ea5c4aa6f9848/propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787", size = 239706 }, - { url = "https://files.pythonhosted.org/packages/14/29/4636f500c69b5edea7786db3c34eb6166f3384b905665ce312a6e42c720c/propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465", size = 238531 }, - { url = "https://files.pythonhosted.org/packages/85/14/01fe53580a8e1734ebb704a3482b7829a0ef4ea68d356141cf0994d9659b/propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af", size = 231063 }, - { url = "https://files.pythonhosted.org/packages/33/5c/1d961299f3c3b8438301ccfbff0143b69afcc30c05fa28673cface692305/propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7", size = 220134 }, - { url = "https://files.pythonhosted.org/packages/00/d0/ed735e76db279ba67a7d3b45ba4c654e7b02bc2f8050671ec365d8665e21/propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f", size = 220009 }, - { url = "https://files.pythonhosted.org/packages/75/90/ee8fab7304ad6533872fee982cfff5a53b63d095d78140827d93de22e2d4/propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54", size = 212199 }, - { url = "https://files.pythonhosted.org/packages/eb/ec/977ffaf1664f82e90737275873461695d4c9407d52abc2f3c3e24716da13/propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505", size = 214827 }, - { url = "https://files.pythonhosted.org/packages/57/48/031fb87ab6081764054821a71b71942161619549396224cbb242922525e8/propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82", size = 228009 }, - { url = "https://files.pythonhosted.org/packages/1a/06/ef1390f2524850838f2390421b23a8b298f6ce3396a7cc6d39dedd4047b0/propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca", size = 231638 }, - { url = "https://files.pythonhosted.org/packages/38/2a/101e6386d5a93358395da1d41642b79c1ee0f3b12e31727932b069282b1d/propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e", size = 222788 }, - { url = "https://files.pythonhosted.org/packages/db/81/786f687951d0979007e05ad9346cd357e50e3d0b0f1a1d6074df334b1bbb/propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034", size = 40170 }, - { url = "https://files.pythonhosted.org/packages/cf/59/7cc7037b295d5772eceb426358bb1b86e6cab4616d971bd74275395d100d/propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3", size = 44404 }, - { url = "https://files.pythonhosted.org/packages/4c/28/1d205fe49be8b1b4df4c50024e62480a442b1a7b818e734308bb0d17e7fb/propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", size = 79588 }, - { url = "https://files.pythonhosted.org/packages/21/ee/fc4d893f8d81cd4971affef2a6cb542b36617cd1d8ce56b406112cb80bf7/propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", size = 45825 }, - { url = "https://files.pythonhosted.org/packages/4a/de/bbe712f94d088da1d237c35d735f675e494a816fd6f54e9db2f61ef4d03f/propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", size = 45357 }, - { url = "https://files.pythonhosted.org/packages/7f/14/7ae06a6cf2a2f1cb382586d5a99efe66b0b3d0c6f9ac2f759e6f7af9d7cf/propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", size = 241869 }, - { url = "https://files.pythonhosted.org/packages/cc/59/227a78be960b54a41124e639e2c39e8807ac0c751c735a900e21315f8c2b/propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", size = 247884 }, - { url = "https://files.pythonhosted.org/packages/84/58/f62b4ffaedf88dc1b17f04d57d8536601e4e030feb26617228ef930c3279/propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", size = 248486 }, - { url = "https://files.pythonhosted.org/packages/1c/07/ebe102777a830bca91bbb93e3479cd34c2ca5d0361b83be9dbd93104865e/propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", size = 243649 }, - { url = "https://files.pythonhosted.org/packages/ed/bc/4f7aba7f08f520376c4bb6a20b9a981a581b7f2e385fa0ec9f789bb2d362/propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", size = 229103 }, - { url = "https://files.pythonhosted.org/packages/fe/d5/04ac9cd4e51a57a96f78795e03c5a0ddb8f23ec098b86f92de028d7f2a6b/propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", size = 226607 }, - { url = "https://files.pythonhosted.org/packages/e3/f0/24060d959ea41d7a7cc7fdbf68b31852331aabda914a0c63bdb0e22e96d6/propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", size = 221153 }, - { url = "https://files.pythonhosted.org/packages/77/a7/3ac76045a077b3e4de4859a0753010765e45749bdf53bd02bc4d372da1a0/propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", size = 222151 }, - { url = "https://files.pythonhosted.org/packages/e7/af/5e29da6f80cebab3f5a4dcd2a3240e7f56f2c4abf51cbfcc99be34e17f0b/propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", size = 233812 }, - { url = "https://files.pythonhosted.org/packages/8c/89/ebe3ad52642cc5509eaa453e9f4b94b374d81bae3265c59d5c2d98efa1b4/propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", size = 238829 }, - { url = "https://files.pythonhosted.org/packages/e9/2f/6b32f273fa02e978b7577159eae7471b3cfb88b48563b1c2578b2d7ca0bb/propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", size = 230704 }, - { url = "https://files.pythonhosted.org/packages/5c/2e/f40ae6ff5624a5f77edd7b8359b208b5455ea113f68309e2b00a2e1426b6/propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246", size = 40050 }, - { url = "https://files.pythonhosted.org/packages/3b/77/a92c3ef994e47180862b9d7d11e37624fb1c00a16d61faf55115d970628b/propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1", size = 44117 }, - { url = "https://files.pythonhosted.org/packages/0f/2a/329e0547cf2def8857157f9477669043e75524cc3e6251cef332b3ff256f/propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc", size = 77002 }, - { url = "https://files.pythonhosted.org/packages/12/2d/c4df5415e2382f840dc2ecbca0eeb2293024bc28e57a80392f2012b4708c/propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9", size = 44639 }, - { url = "https://files.pythonhosted.org/packages/d0/5a/21aaa4ea2f326edaa4e240959ac8b8386ea31dedfdaa636a3544d9e7a408/propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439", size = 44049 }, - { url = "https://files.pythonhosted.org/packages/4e/3e/021b6cd86c0acc90d74784ccbb66808b0bd36067a1bf3e2deb0f3845f618/propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536", size = 224819 }, - { url = "https://files.pythonhosted.org/packages/3c/57/c2fdeed1b3b8918b1770a133ba5c43ad3d78e18285b0c06364861ef5cc38/propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629", size = 229625 }, - { url = "https://files.pythonhosted.org/packages/9d/81/70d4ff57bf2877b5780b466471bebf5892f851a7e2ca0ae7ffd728220281/propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b", size = 232934 }, - { url = "https://files.pythonhosted.org/packages/3c/b9/bb51ea95d73b3fb4100cb95adbd4e1acaf2cbb1fd1083f5468eeb4a099a8/propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052", size = 227361 }, - { url = "https://files.pythonhosted.org/packages/f1/20/3c6d696cd6fd70b29445960cc803b1851a1131e7a2e4ee261ee48e002bcd/propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce", size = 213904 }, - { url = "https://files.pythonhosted.org/packages/a1/cb/1593bfc5ac6d40c010fa823f128056d6bc25b667f5393781e37d62f12005/propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d", size = 212632 }, - { url = "https://files.pythonhosted.org/packages/6d/5c/e95617e222be14a34c709442a0ec179f3207f8a2b900273720501a70ec5e/propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce", size = 207897 }, - { url = "https://files.pythonhosted.org/packages/8e/3b/56c5ab3dc00f6375fbcdeefdede5adf9bee94f1fab04adc8db118f0f9e25/propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95", size = 208118 }, - { url = "https://files.pythonhosted.org/packages/86/25/d7ef738323fbc6ebcbce33eb2a19c5e07a89a3df2fded206065bd5e868a9/propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf", size = 217851 }, - { url = "https://files.pythonhosted.org/packages/b3/77/763e6cef1852cf1ba740590364ec50309b89d1c818e3256d3929eb92fabf/propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f", size = 222630 }, - { url = "https://files.pythonhosted.org/packages/4f/e9/0f86be33602089c701696fbed8d8c4c07b6ee9605c5b7536fd27ed540c5b/propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30", size = 216269 }, - { url = "https://files.pythonhosted.org/packages/cc/02/5ac83217d522394b6a2e81a2e888167e7ca629ef6569a3f09852d6dcb01a/propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6", size = 39472 }, - { url = "https://files.pythonhosted.org/packages/f4/33/d6f5420252a36034bc8a3a01171bc55b4bff5df50d1c63d9caa50693662f/propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1", size = 43363 }, - { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 }, +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/76/f941e63d55c0293ff7829dd21e7cf1147e90a526756869a9070f287a68c9/propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5", size = 42722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/f0/dc9ec44d2e63c13f816a16398c039329736712440ff82b682dd9a78d2258/propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d", size = 79574 }, + { url = "https://files.pythonhosted.org/packages/99/3a/33a207dfcb3ee1131ea23a2aeb726c3c4994f89546d7eadf8c50627c8b63/propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c", size = 45898 }, + { url = "https://files.pythonhosted.org/packages/af/68/0bde765c9f5dc02b4466d2838600af38c81b184c26c6d3cd44643ac668e3/propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc", size = 45418 }, + { url = "https://files.pythonhosted.org/packages/06/a6/c682669bae41199358e16cc7b1c818f91c5f9e925cc863dabd98ce32716a/propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d", size = 205116 }, + { url = "https://files.pythonhosted.org/packages/fb/ae/82cfb50267d9a1baa0340728eb9e32245a68538fef929d7bb786d01c11a8/propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f", size = 219405 }, + { url = "https://files.pythonhosted.org/packages/ab/16/7b6b2bf8c207cfd0e5ca3d41aea397392de9899867ec024f88c94f9ae2ab/propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf", size = 217656 }, + { url = "https://files.pythonhosted.org/packages/f4/eb/41447de61eb5454891658d0fb9b1d7d35d49a4a5dd2e0c86f2c332e8b7e1/propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9", size = 205414 }, + { url = "https://files.pythonhosted.org/packages/03/b6/9719878f8b5b20d37ee663a40f8dcbf888559e4d3be2ba2fe5c790fc28d2/propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc", size = 195746 }, + { url = "https://files.pythonhosted.org/packages/bb/ec/b79c3210ba459800d1a8f1afeb81d7b503893555a7b79c24082ff26d3314/propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0", size = 198651 }, + { url = "https://files.pythonhosted.org/packages/48/f6/2b0140bc47013e43575973068e72ad51ee9f22f2dad42e6d6e362d715125/propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b", size = 195858 }, + { url = "https://files.pythonhosted.org/packages/97/3d/2fa19303d87aa21f9a42dcd870d6088a2a776ff5518e394d50412c3679a6/propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f", size = 197181 }, + { url = "https://files.pythonhosted.org/packages/09/f3/a2170ffc9fa774c1dfd52294113c0fa6cdc5b71dbfd7129bb9378fdd8b42/propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a", size = 207411 }, + { url = "https://files.pythonhosted.org/packages/d6/1e/cb8a6c82178efffa0b00dc463f36cd086f747345585140aeb95d5cb93666/propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25", size = 210724 }, + { url = "https://files.pythonhosted.org/packages/2b/72/6e273543337a3e22cf462eb836f065a9830b4d41baeb1f58db2695c934f3/propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f", size = 203511 }, + { url = "https://files.pythonhosted.org/packages/f3/ea/7412c79bcec06597c967d49789f5a1f7fd76a8654908feeaefafb7447c9a/propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c", size = 40600 }, + { url = "https://files.pythonhosted.org/packages/a3/42/488c90190491f3e61bd2c2fb0b3d91c1c78778270dde2f0b6633fc9ff723/propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340", size = 44714 }, + { url = "https://files.pythonhosted.org/packages/45/c9/cf09ff7e6d09f14149094f7cd50d2dec032b24e61af21fc4540da2b17bfb/propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51", size = 79568 }, + { url = "https://files.pythonhosted.org/packages/c8/32/2424d89da88cd81b7d148e0d2b3131461b570a02aa9d84a2e567509adb0d/propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e", size = 45895 }, + { url = "https://files.pythonhosted.org/packages/f6/91/ee5b6aa7aa31754fefcf0c5180e09223cac380ef195c4ddc8c266eb641ea/propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa", size = 45427 }, + { url = "https://files.pythonhosted.org/packages/bf/73/38f0128462b8b616181d8c53bd5d04eac41c50c449b07615c65d56ba0a9b/propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf", size = 232427 }, + { url = "https://files.pythonhosted.org/packages/59/82/f3d4e84f4539dcfc9c3d338282b9e915f5b63c921986ecfdf7af2d12f87c/propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b", size = 239985 }, + { url = "https://files.pythonhosted.org/packages/42/e8/029f58cccbae83c9969a7ee7a06558d5b83a93dfc54e0f4f70234bbaea1b/propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9", size = 238827 }, + { url = "https://files.pythonhosted.org/packages/8b/a2/c373561777c0cb9b9e7b9b9a10b9b3a7b6bde75a2535b962231cecc8fdb8/propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6", size = 231348 }, + { url = "https://files.pythonhosted.org/packages/d7/d2/4673f715beedf6038b485bcd976813149231d9df5bb6196cb69a09c185c9/propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c", size = 220426 }, + { url = "https://files.pythonhosted.org/packages/e0/f6/1da65f900927bafd4675a16e890618ec7643f2f922bf0e4d84bb38645618/propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075", size = 220294 }, + { url = "https://files.pythonhosted.org/packages/ff/86/620451bdc02e91b1712cd71890c17077ee97e2a28493836a87e47b8e70ff/propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c", size = 212492 }, + { url = "https://files.pythonhosted.org/packages/6e/1b/e8f86921ed4016da80faf3b8f515f7829decabdbff106736bfff353bceba/propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810", size = 215113 }, + { url = "https://files.pythonhosted.org/packages/1a/95/a61d86cc49aa0945f6c06f3a4614fc543e311a50558c92861f5e9691a37c/propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3", size = 228330 }, + { url = "https://files.pythonhosted.org/packages/8f/7d/10dbae48ff2bb189e92c2b3487a48f3229146a25941ad0d485934d1104d4/propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7", size = 231942 }, + { url = "https://files.pythonhosted.org/packages/39/ce/82d16aec96c5513ae7db13ab901a65a1e54c915292fb5b2390e33275b61d/propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c", size = 223077 }, + { url = "https://files.pythonhosted.org/packages/c8/e0/cb077e8e7a583c733df7f53327fcbdb92e42be59b976ce60bf1d904a0efe/propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d", size = 40455 }, + { url = "https://files.pythonhosted.org/packages/d8/35/57abeb6146fe3c19081eeaf3d9d4cfea256f87f1e5101acf80d3332c1820/propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32", size = 44705 }, + { url = "https://files.pythonhosted.org/packages/8d/2c/921f15dc365796ec23975b322b0078eae72995c7b4d49eba554c6a308d70/propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e", size = 79867 }, + { url = "https://files.pythonhosted.org/packages/11/a5/4a6cc1a559d1f2fb57ea22edc4245158cdffae92f7f92afcee2913f84417/propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af", size = 46109 }, + { url = "https://files.pythonhosted.org/packages/e1/6d/28bfd3af3a567ad7d667348e7f46a520bda958229c4d545ba138a044232f/propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5", size = 45635 }, + { url = "https://files.pythonhosted.org/packages/73/20/d75b42eaffe5075eac2f4e168f6393d21c664c91225288811d85451b2578/propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b", size = 242159 }, + { url = "https://files.pythonhosted.org/packages/a5/fb/4b537dd92f9fd4be68042ec51c9d23885ca5fafe51ec24c58d9401034e5f/propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667", size = 248163 }, + { url = "https://files.pythonhosted.org/packages/e7/af/8a9db04ac596d531ca0ef7dde518feaadfcdabef7b17d6a5ec59ee3effc2/propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7", size = 248794 }, + { url = "https://files.pythonhosted.org/packages/9d/c4/ecfc988879c0fd9db03228725b662d76cf484b6b46f7e92fee94e4b52490/propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7", size = 243912 }, + { url = "https://files.pythonhosted.org/packages/04/a2/298dd27184faa8b7d91cc43488b578db218b3cc85b54d912ed27b8c5597a/propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf", size = 229402 }, + { url = "https://files.pythonhosted.org/packages/be/0d/efe7fec316ca92dbf4bc4a9ba49ca889c43ca6d48ab1d6fa99fc94e5bb98/propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138", size = 226896 }, + { url = "https://files.pythonhosted.org/packages/60/63/72404380ae1d9c96d96e165aa02c66c2aae6072d067fc4713da5cde96762/propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86", size = 221447 }, + { url = "https://files.pythonhosted.org/packages/9d/18/b8392cab6e0964b67a30a8f4dadeaff64dc7022b5a34bb1d004ea99646f4/propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d", size = 222440 }, + { url = "https://files.pythonhosted.org/packages/6f/be/105d9ceda0f97eff8c06bac1673448b2db2a497444de3646464d3f5dc881/propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e", size = 234104 }, + { url = "https://files.pythonhosted.org/packages/cb/c9/f09a4ec394cfcce4053d8b2a04d622b5f22d21ba9bb70edd0cad061fa77b/propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64", size = 239086 }, + { url = "https://files.pythonhosted.org/packages/ea/aa/96f7f9ed6def82db67c972bdb7bd9f28b95d7d98f7e2abaf144c284bf609/propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c", size = 230991 }, + { url = "https://files.pythonhosted.org/packages/5a/11/bee5439de1307d06fad176f7143fec906e499c33d7aff863ea8428b8e98b/propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d", size = 40337 }, + { url = "https://files.pythonhosted.org/packages/e4/17/e5789a54a0455a61cb9efc4ca6071829d992220c2998a27c59aeba749f6f/propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57", size = 44404 }, + { url = "https://files.pythonhosted.org/packages/3a/0f/a79dd23a0efd6ee01ab0dc9750d8479b343bfd0c73560d59d271eb6a99d4/propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568", size = 77287 }, + { url = "https://files.pythonhosted.org/packages/b8/51/76675703c90de38ac75adb8deceb3f3ad99b67ff02a0fa5d067757971ab8/propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9", size = 44923 }, + { url = "https://files.pythonhosted.org/packages/01/9b/fd5ddbee66cf7686e73c516227c2fd9bf471dbfed0f48329d095ea1228d3/propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767", size = 44325 }, + { url = "https://files.pythonhosted.org/packages/13/1c/6961f11eb215a683b34b903b82bde486c606516c1466bf1fa67f26906d51/propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8", size = 225116 }, + { url = "https://files.pythonhosted.org/packages/ef/ea/f8410c40abcb2e40dffe9adeed017898c930974650a63e5c79b886aa9f73/propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0", size = 229905 }, + { url = "https://files.pythonhosted.org/packages/ef/5a/a9bf90894001468bf8e6ea293bb00626cc9ef10f8eb7996e9ec29345c7ed/propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d", size = 233221 }, + { url = "https://files.pythonhosted.org/packages/dd/ce/fffdddd9725b690b01d345c1156b4c2cc6dca09ab5c23a6d07b8f37d6e2f/propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05", size = 227627 }, + { url = "https://files.pythonhosted.org/packages/58/ae/45c89a5994a334735a3032b48e8e4a98c05d9536ddee0719913dc27da548/propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe", size = 214217 }, + { url = "https://files.pythonhosted.org/packages/01/84/bc60188c3290ff8f5f4a92b9ca2d93a62e449c8daf6fd11ad517ad136926/propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1", size = 212921 }, + { url = "https://files.pythonhosted.org/packages/14/b3/39d60224048feef7a96edabb8217dc3f75415457e5ebbef6814f8b2a27b5/propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92", size = 208200 }, + { url = "https://files.pythonhosted.org/packages/9d/b3/0a6720b86791251273fff8a01bc8e628bc70903513bd456f86cde1e1ef84/propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787", size = 208400 }, + { url = "https://files.pythonhosted.org/packages/e9/4f/bb470f3e687790547e2e78105fb411f54e0cdde0d74106ccadd2521c6572/propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545", size = 218116 }, + { url = "https://files.pythonhosted.org/packages/34/71/277f7f9add469698ac9724c199bfe06f85b199542121a71f65a80423d62a/propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e", size = 222911 }, + { url = "https://files.pythonhosted.org/packages/92/e3/a7b9782aef5a2fc765b1d97da9ec7aed2f25a4e985703608e73232205e3f/propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626", size = 216563 }, + { url = "https://files.pythonhosted.org/packages/ab/76/0583ca2c551aa08ffcff87b2c6849c8f01c1f6fb815a5226f0c5c202173e/propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374", size = 39763 }, + { url = "https://files.pythonhosted.org/packages/80/ec/c6a84f9a36f608379b95f0e786c111d5465926f8c62f12be8cdadb02b15c/propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a", size = 43650 }, + { url = "https://files.pythonhosted.org/packages/ee/95/7d32e3560f5bf83fc2f2a4c1b0c181d327d53d5f85ebd045ab89d4d97763/propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf", size = 82140 }, + { url = "https://files.pythonhosted.org/packages/86/89/752388f12e6027a5e63f5d075f15291ded48e2d8311314fff039da5a9b11/propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0", size = 47296 }, + { url = "https://files.pythonhosted.org/packages/1b/4c/b55c98d586c69180d3048984a57a5ea238bdeeccf82dbfcd598e935e10bb/propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829", size = 46724 }, + { url = "https://files.pythonhosted.org/packages/0f/b6/67451a437aed90c4e951e320b5b3d7eb584ade1d5592f6e5e8f678030989/propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa", size = 291499 }, + { url = "https://files.pythonhosted.org/packages/ee/ff/e4179facd21515b24737e1e26e02615dfb5ed29416eed4cf5bc6ac5ce5fb/propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6", size = 293911 }, + { url = "https://files.pythonhosted.org/packages/76/8d/94a8585992a064a23bd54f56c5e58c3b8bf0c0a06ae10e56f2353ae16c3d/propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db", size = 293301 }, + { url = "https://files.pythonhosted.org/packages/b0/b8/2c860c92b4134f68c7716c6f30a0d723973f881c32a6d7a24c4ddca05fdf/propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54", size = 281947 }, + { url = "https://files.pythonhosted.org/packages/cd/72/b564be7411b525d11757b713c757c21cd4dc13b6569c3b2b8f6d3c96fd5e/propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121", size = 268072 }, + { url = "https://files.pythonhosted.org/packages/37/68/d94649e399e8d7fc051e5a4f2334efc567993525af083db145a70690a121/propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e", size = 275190 }, + { url = "https://files.pythonhosted.org/packages/d8/3c/446e125f5bbbc1922964dd67cb541c01cdb678d811297b79a4ff6accc843/propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e", size = 254145 }, + { url = "https://files.pythonhosted.org/packages/f4/80/fd3f741483dc8e59f7ba7e05eaa0f4e11677d7db2077522b92ff80117a2a/propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a", size = 257163 }, + { url = "https://files.pythonhosted.org/packages/dc/cf/6292b5ce6ed0017e6a89024a827292122cc41b6259b30ada0c6732288513/propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac", size = 280249 }, + { url = "https://files.pythonhosted.org/packages/e8/f0/fd9b8247b449fe02a4f96538b979997e229af516d7462b006392badc59a1/propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e", size = 288741 }, + { url = "https://files.pythonhosted.org/packages/64/71/cf831fdc2617f86cfd7f414cfc487d018e722dac8acc098366ce9bba0941/propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf", size = 277061 }, + { url = "https://files.pythonhosted.org/packages/42/78/9432542a35d944abeca9e02927a0de38cd7a298466d8ffa171536e2381c3/propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863", size = 42252 }, + { url = "https://files.pythonhosted.org/packages/6f/45/960365f4f8978f48ebb56b1127adf33a49f2e69ecd46ac1f46d6cf78a79d/propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46", size = 46425 }, + { url = "https://files.pythonhosted.org/packages/b5/35/6c4c6fc8774a9e3629cd750dc24a7a4fb090a25ccd5c3246d127b70f9e22/propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043", size = 12101 }, ] [[package]] name = "proto-plus" -version = "1.25.0" +version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/05/74417b2061e1bf1b82776037cad97094228fa1c1b6e82d08a78d3fb6ddb6/proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91", size = 56124 } +sdist = { url = "https://files.pythonhosted.org/packages/26/79/a5c6cbb42268cfd3ddc652dc526889044a8798c688a03ff58e5e92b743c8/proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22", size = 56136 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/25/0b7cc838ae3d76d46539020ec39fc92bfc9acc29367e58fe912702c2a79e/proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961", size = 50126 }, + { url = "https://files.pythonhosted.org/packages/42/c3/59308ccc07b34980f9d532f7afc718a9f32b40e52cde7a740df8d55632fb/proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7", size = 50166 }, ] [[package]] @@ -3555,30 +3722,30 @@ wheels = [ [[package]] name = "psutil" -version = "6.1.1" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/5a/07871137bb752428aa4b659f910b399ba6f291156bdea939be3e96cae7cb/psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5", size = 508502 } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/99/ca79d302be46f7bdd8321089762dd4476ee725fce16fc2b2e1dbba8cac17/psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8", size = 247511 }, - { url = "https://files.pythonhosted.org/packages/0b/6b/73dbde0dd38f3782905d4587049b9be64d76671042fdcaf60e2430c6796d/psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377", size = 248985 }, - { url = "https://files.pythonhosted.org/packages/17/38/c319d31a1d3f88c5b79c68b3116c129e5133f1822157dd6da34043e32ed6/psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003", size = 284488 }, - { url = "https://files.pythonhosted.org/packages/9c/39/0f88a830a1c8a3aba27fededc642da37613c57cbff143412e3536f89784f/psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160", size = 287477 }, - { url = "https://files.pythonhosted.org/packages/47/da/99f4345d4ddf2845cb5b5bd0d93d554e84542d116934fde07a0c50bd4e9f/psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3", size = 289017 }, - { url = "https://files.pythonhosted.org/packages/38/53/bd755c2896f4461fd4f36fa6a6dcb66a88a9e4b9fd4e5b66a77cf9d4a584/psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53", size = 250602 }, - { url = "https://files.pythonhosted.org/packages/7b/d7/7831438e6c3ebbfa6e01a927127a6cb42ad3ab844247f3c5b96bea25d73d/psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649", size = 254444 }, + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, ] [[package]] name = "psycopg" -version = "3.2.4" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/f2/954b1467b3e2ca5945b83b5e320268be1f4df486c3e8ffc90f4e4b707979/psycopg-3.2.4.tar.gz", hash = "sha256:f26f1346d6bf1ef5f5ef1714dd405c67fb365cfd1c6cea07de1792747b167b92", size = 156109 } +sdist = { url = "https://files.pythonhosted.org/packages/0e/cf/dc1a4d45e3c6222fe272a245c5cea9a969a7157639da606ac7f2ab5de3a1/psycopg-3.2.5.tar.gz", hash = "sha256:f5f750611c67cb200e85b408882f29265c66d1de7f813add4f8125978bfd70e8", size = 156158 } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/49/15114d5f7ee68983f4e1a24d47e75334568960352a07c6f0e796e912685d/psycopg-3.2.4-py3-none-any.whl", hash = "sha256:43665368ccd48180744cab26b74332f46b63b7e06e8ce0775547a3533883d381", size = 198716 }, + { url = "https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl", hash = "sha256:b782130983e5b3de30b4c529623d3687033b4dafa05bb661fc6bf45837ca5879", size = 198749 }, ] [package.optional-dependencies] @@ -3591,65 +3758,65 @@ pool = [ [[package]] name = "psycopg-binary" -version = "3.2.4" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/7b/6d7a4626b49e227125f8edf6f114dd8e9a9b22fc4f0abc3b2b0068d5f2bd/psycopg_binary-3.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c716f75b5c0388fc5283b5124046292c727511dd8c6aa59ca2dc644b9a2ed0cd", size = 3862864 }, - { url = "https://files.pythonhosted.org/packages/2b/7b/bc0dbb8384997e1321ffb265f96e68ba8584c2af58229816c16809218bdf/psycopg_binary-3.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2e8050347018f596a63f5dccbb92fb68bca52b13912cb8fc40184b24c0e534f", size = 3934048 }, - { url = "https://files.pythonhosted.org/packages/42/c0/8a8034650e4618efc8c0be32c30469933a1ddac1656525c0c6b2b2151736/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04171f9af9ab567c0fd339bac06f2c75836db839cebac5bd07824778dafa7f0e", size = 4516741 }, - { url = "https://files.pythonhosted.org/packages/b8/6c/714572fc7c59295498287b9b4b965e3b1d6ff5758c310535a2f02d159688/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7ba7b2ff25a6405826f627fb7d0f1e06e5c08ae25ffabc74a5e9ec7b0a63b85", size = 4323332 }, - { url = "https://files.pythonhosted.org/packages/64/19/a807021e48719cf226a7b520fd0c9c741577ad8974ecd264efe03862d80c/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e58eeba520d405b2ad72dffaafd04d0b592bef870e718bf37c261e89a75450a", size = 4569646 }, - { url = "https://files.pythonhosted.org/packages/67/78/70c515175c623bbc505d015ef1ee55b1ee4d0878985a95d4d6317fdd6894/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb18cfbb1cfc8172786ceefd314f0faa05c40ea93b3db7194d0f6bbbbfedb42a", size = 4279629 }, - { url = "https://files.pythonhosted.org/packages/0f/02/8a0395ac8f69320ca26f4f7ec7fd16620671ba002072e01ed5fb13c29a38/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:769804b4f753ddec9403183a6d4577d5b696fc49c2451421013fb06d6fa2f288", size = 3868189 }, - { url = "https://files.pythonhosted.org/packages/b9/a8/fa254c48513580c9cae242b5fac4af4dd1227178061a27a2eb260ff61a27/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7d4f0c9b01eb933ce35bb32a54205f48d7bc36bf455565afe269cabcb7973955", size = 3335018 }, - { url = "https://files.pythonhosted.org/packages/d6/c1/98c239f40851c67eb4813d6a7eb90b39f717de2fd48f23fe3121899eb70b/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:26aed7ff8691ba810de95718d3bc81a43fd48a4036c3641ef711eb5f71fc7106", size = 3432703 }, - { url = "https://files.pythonhosted.org/packages/91/08/5b6fa2247bf964ac14d10cff3f7163d901dd008b7b6300e13eace8394751/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8a4b65eaf44dfed0b47e6ebd392e88cd3cff62ea11652d92db6fefeb2608ed25", size = 3457676 }, - { url = "https://files.pythonhosted.org/packages/2f/55/79db2b10f87eb7a913b59bbcdd10f794c4c964141f2db31f8eb1f567c7d9/psycopg_binary-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9fa48a2dc54c4e906d7dd781031d227d1b13966deff7e5ece5b037588643190", size = 2787324 }, - { url = "https://files.pythonhosted.org/packages/f3/9a/8013aa4ad4d76dfcf9b822da549d51aab96abfc77afc44b200ef295685dc/psycopg_binary-3.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d092b0aa80b8c3ee0701a7252cbfb0bdb742e1f74aaf0c1a13ef22c05c9266ab", size = 3871518 }, - { url = "https://files.pythonhosted.org/packages/1e/65/2422036d0169e33e5f06d868a36235340f85e42afe153d59b0edf4b4210f/psycopg_binary-3.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3955381dacc6d15f3838d5f25445ee99f80882876a163f8de0c01ffc54aeef4a", size = 3938511 }, - { url = "https://files.pythonhosted.org/packages/bf/ab/4f6c815862d62d9d06353abfbf36fef69ad7e6ca0763eed1629f47579e83/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04144d1963aa3309247980f1a742b98e15f60d68ea9745143c433f99aaeb70d7", size = 4512971 }, - { url = "https://files.pythonhosted.org/packages/27/ef/0e5e9ea6122f61f9e0c4e70b7f7a28ef51404c98bbb32096ad99f79f85b5/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eac61931bc90c1c6fdc648452894d3a434a005ffefaf12819b4709548c894bf2", size = 4318297 }, - { url = "https://files.pythonhosted.org/packages/93/cd/05d71e4f2f7f69fd185d2ec44b66de13734ff70c426ead14523e206258bb/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c09b765960480c4586758a3c16f0ee0db6f7e2f31c88cccb5e7d7024215468cd", size = 4570696 }, - { url = "https://files.pythonhosted.org/packages/af/7c/f5099ad491f78ba491e56cd686b38b0737eb09a719e919661a9f8d08e754/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220de8efcc276e42ba7cc7ed613145b1274b6b5de321a1396fb6b6ce1758d34c", size = 4275069 }, - { url = "https://files.pythonhosted.org/packages/2d/95/a1a2f861d90f3394f98d032329a1e44a67c8d1f5bded0ec343b664c65ba5/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b558d3de315d18819ce477908e27518cbdd3275717c6193b58dde36f0443e167", size = 3865827 }, - { url = "https://files.pythonhosted.org/packages/ab/72/0b395ad2db2adc6009d2a1cdc2707b1764a3e870d6895cf92dc87e251aa9/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3b4c9b9a112d43533f7dbdedbb1188107d4ddcd262e2a2af41b4de0caf7d053", size = 3329276 }, - { url = "https://files.pythonhosted.org/packages/ba/5d/8e9904664e5bae3852989a0f1b0517c781ff0a9cba64416ffa68952129ac/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:870df866f789bb641a350897c1751c293b9420f46be4eb366d190ff5f2f2ffd8", size = 3426059 }, - { url = "https://files.pythonhosted.org/packages/46/6a/9abc03e01c1cb97878e6e87d5ea9e3d925790b04fa03d72b2d6e3455f124/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89506e268fb95428fb0f8f7abe48032e66cf47390469e11a4fe989f7407a5d88", size = 3456766 }, - { url = "https://files.pythonhosted.org/packages/12/c5/1be474bfa7282aa9177c3e498eb641b1441724f0155953f3872c69deddf0/psycopg_binary-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:7ddf1494cc3bf60761c01265c44dfc7a7fd63f21308c403c14f5dd91702df84d", size = 2790400 }, - { url = "https://files.pythonhosted.org/packages/48/f8/f30cf36bc9bc672894413f10f0498d5e81b0813c87f1b963d85e7c5cc9f1/psycopg_binary-3.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ac24b3d421127ebe8662eba2c1e149a12f0f5b6795e66c1811a3f59111456bb", size = 3852023 }, - { url = "https://files.pythonhosted.org/packages/2f/23/88a265ca4a35def6f53cb239e352bf52f01ea418f57f4272b3913ecd6fd2/psycopg_binary-3.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f702f36204127984dd212eb57bb328676abdfe8a56f179e408a806d5e520aa11", size = 3935919 }, - { url = "https://files.pythonhosted.org/packages/0f/2b/2ac3456208c255a6fad9fec4fea0e411e34a0b4b0ecd1e60c0ba36fb78c4/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:610cd2013ee0849154fcff34b0cca17f720c91c7430ca094a61f1e5ff1d38e15", size = 4493108 }, - { url = "https://files.pythonhosted.org/packages/55/f5/725b786b7cf1b91f1afbe03545f0b14857c0a5cc03b4f8a6735ec289ff89/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95da59edd95f6b6488799c9710fafc2d5750e3ec6328ec991f7a9be04efe6886", size = 4300141 }, - { url = "https://files.pythonhosted.org/packages/09/80/72b3a1ec912b8be51e6af858fcd2a016d25145aca400e75bba6ab91025c4/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b71e98e3186f08473962e1ea4bfbc4387ecc398644b794cb112ad0a4276e3789", size = 4540559 }, - { url = "https://files.pythonhosted.org/packages/0b/8e/6cd6643f04e033bcdab008d5175c9356ade1eecff53fa4558d383dd9866c/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccf4f71c3a0d46bc74207bf7997f010a6586414161dd10f3dd026ec059942ef", size = 4253687 }, - { url = "https://files.pythonhosted.org/packages/85/47/50d93bef98d32eba1f7b95e3c4e671a7f59b1d0b9ed01fdb43e951d6012b/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:244e1dd33b694792b7bc7a3d412a535ba39116218b07d8936b4591567f4121e9", size = 3842084 }, - { url = "https://files.pythonhosted.org/packages/2e/a0/2cf0dda5634d14219a24c05bc85cb928a5b2ea29684d167aebc974df016c/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f8dc8f4de5130c6278dd5e34b18ad8324a74658a7adb72d4e67ca97f9aeaaf3c", size = 3315357 }, - { url = "https://files.pythonhosted.org/packages/14/65/13b3dd91dd62f6e4ee3cb00bd24ab60a251592c03a8fb090c28057f21e38/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c336e58a48061a9189d3ba8c19f00fe5d9570219e6f7f954b923ad5c33e5bc71", size = 3394512 }, - { url = "https://files.pythonhosted.org/packages/07/cc/90b5307ff833892c8985aefd73c1894b1a9d8b5df4965650e95636ba8161/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9633c5dc6796d11766d2475e62335b67e5f99f119f40ba1675c1d23208d7709d", size = 3431893 }, - { url = "https://files.pythonhosted.org/packages/40/dc/5ab8fec2fc2e0599fd7a60abe046c853477bbb7cd978b818f795c5423848/psycopg_binary-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:295c25e56b430d786a475c5c2cef266b0b27c0a6fcaadf9d83a4cdcfb76f971f", size = 2778464 }, - { url = "https://files.pythonhosted.org/packages/25/e2/f56675aada063762f08559b6969e47e1313f269fc1682c16457c13da8186/psycopg_binary-3.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:81ab801c0d35830c876bf0d1edc8e7dd2f73aa2b04fe24eb812159c0b054d149", size = 3846854 }, - { url = "https://files.pythonhosted.org/packages/7b/8b/8c4a66b2b3db494367df0299535b7d2df78f303334228c517b8d00c411d5/psycopg_binary-3.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c09e02ce1124eb6638b3381df050a8cf88aedfad4522f939945cda49050a990c", size = 3932292 }, - { url = "https://files.pythonhosted.org/packages/84/e8/618d45f77cebce73d75497c95685a0902aea3783386d9335ce486c69e13a/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a249cdc6a5c2b5088a8677acba66b291e5237524739ab3d27498e1ef189312f5", size = 4493785 }, - { url = "https://files.pythonhosted.org/packages/c4/87/fc30318e6b97e723e017e7dc88d0f721bbfb749de1a6e414e52d4ac54c9a/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2960ba8a5c0ad75e184f6d8bf76bdf023708999efe75fe4e13445136c1cd206", size = 4304874 }, - { url = "https://files.pythonhosted.org/packages/91/30/1d127e651c21cd77befaf361c7c3b9001bfff51ac38027e8fce598ba0701/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dae2e50b0d3425c167eebbedc3553f7c811dbc0dbfc737b6877f68a03be7daf", size = 4541296 }, - { url = "https://files.pythonhosted.org/packages/0d/5e/22c824cb38745c1c744cec85d227190727c564afb75960ce0057ca15fd84/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bf7ee7e0002c2cce43ecb923ec510358056eb2e44a96afaeb0424518f35206", size = 4255756 }, - { url = "https://files.pythonhosted.org/packages/b3/83/ae8783dec3f7e39df8a4056e4d383926ffec531970c0b415d48d9fd4a2c2/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f5c85eeb63b1a8a6b026eef57f5da36ff215ce9a6a3bb8e20a409670d6cfbda", size = 3845918 }, - { url = "https://files.pythonhosted.org/packages/be/f7/fb7bffb0c4c45a5a82fe324e4f7b176075a4c5372e546a038858dd13c7ab/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8c7b95899d4d6d23c5cc46cb3419e8e6ca68d867509432ee1487042564a1ea55", size = 3315429 }, - { url = "https://files.pythonhosted.org/packages/81/a3/29f4993a239d6a3fb18ef8681d9990c007f5f73bdd2e21f65f07ac55ad6f/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fa4acea9ca20a567c3872a5afab2084751530bb57b8fb6b52820d5c54e7c8c3b", size = 3399388 }, - { url = "https://files.pythonhosted.org/packages/25/5b/925171cbfa2e3d1ccb7f4c005d0d5db609ba796c1d08a23c42825b09c554/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c487f35a1905bb15da927c1fc05f70f3d29f0e21fb4ba21d360a0da9c755f20", size = 3436702 }, - { url = "https://files.pythonhosted.org/packages/b6/47/25b2b85b8fcabf99bfa92b4b0d587894c01576bf0b2bf137c243d1eb1070/psycopg_binary-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:80297c3a9f7b5a6afdb0d8f220661ccd796e5c9128c44b32c41267f7daefd37f", size = 2779196 }, +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/30/af3806081adc75b5a8addde839d4c6b171a8c5d0d07dd92de20ca4dd6717/psycopg_binary-3.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a82211a43372cba9b1555a110e84e679deec2dc9463ae4c736977dad99dca5ed", size = 3868990 }, + { url = "https://files.pythonhosted.org/packages/31/77/31968655db2efe83c519e6296ff3a85a0c9e50432e0c11c8ffae1b404870/psycopg_binary-3.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7d215a43343d91ba08301865f059d9518818d66a222a85fb425e4156716f5a6", size = 3938253 }, + { url = "https://files.pythonhosted.org/packages/b5/d7/c898cd7d5c672d1c16b10dfde6ab220a6d295ff136711bf8ebcd1bebe91e/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f893c0ed3d5c7b83b76b1f8f7d3ca5a03e38bcd3cab5d65b5c25a0d1064aca4", size = 4523098 }, + { url = "https://files.pythonhosted.org/packages/98/d7/84517d0f62ddb10ca15254b6a63596f0e47ebd462b3ed30473b191a2a57f/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d10ce4c39eb9631381a0c3792727946a4391e843625a7ee9579ac6bb11495a5", size = 4329658 }, + { url = "https://files.pythonhosted.org/packages/3d/65/9c6addcf00ba80d2355ffa825d6537d60313c24d4b6db438f631f9ff0ac7/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a602d9fdb567cca090ca19ac3ebf10219065be2a4f8cf9eb8356cffb5a7ab1d", size = 4575351 }, + { url = "https://files.pythonhosted.org/packages/a5/90/9f2c41b3b42d8cd8b9866f0bbd27a5796a1ca8042a1a019b39a6645df523/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37eb3be7a6be93f4925ccf52bbfa60244da6c63201770a709dd81a3d2d08534", size = 4287136 }, + { url = "https://files.pythonhosted.org/packages/20/e6/2476e30ff4b02588799dc6d0cff244cea448f9a2a80e37b48c39a64a81be/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7d5f1bfc848a94e0d63fe693adee4f88bd9e5c415ecb4c9c17d2d44eba6795a6", size = 3872875 }, + { url = "https://files.pythonhosted.org/packages/ba/bc/93272521e571df3a6ce85553e2eba424c7abb2ded006b8d6643c2a3cc0f2/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b5e0acbc991472188c9df40eb56d8a97ad3ad00d4de560b8b74bdc2d94041a8f", size = 3341000 }, + { url = "https://files.pythonhosted.org/packages/a2/d7/930a127d2b4817445a08153a1b203655d3da52e79e4c66843d8bd7e3643f/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d4e0c1b1aa5283f6d9a384ffc7a8400d25386bb98fdb9bddae446e4ef4da7366", size = 3439711 }, + { url = "https://files.pythonhosted.org/packages/aa/4a/73ea25870d0b4cac60ad768e6cdf4014e7a44036ec29d3820876c62efea0/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c3c5fa3d4fa0a651cefab391b783f89bc5e331afa0a4e93c9b16141993fa05c8", size = 3464993 }, + { url = "https://files.pythonhosted.org/packages/55/1d/790223b15283904759ef48279dd7201dc4a9d088c5196f7b529a52c5b40d/psycopg_binary-3.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:7efe6c732fd2d7e22d72dc4f7cf9b644020adacfff61b0a8a151343da8e661c0", size = 2791126 }, + { url = "https://files.pythonhosted.org/packages/27/ac/201a9bcfe4a2ae0cc1999c55dff9a2da8daf829e9baca103045ed1c41876/psycopg_binary-3.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:393ab353196d364858b47317d27804ecc58ab56dbde32217bd67f0f2f2980662", size = 3876607 }, + { url = "https://files.pythonhosted.org/packages/4a/ef/2d7722bee81c0a2619b8748070cea8ec299979f677479554e299a864d171/psycopg_binary-3.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:71d82dbc7c6c7f5746468e7992e5483aa45b12250d78d220a2431ab88795825c", size = 3942789 }, + { url = "https://files.pythonhosted.org/packages/f6/dc/a1fe4b61d0f614ab6283a9c5a35747b8fd2b72d7c21f201d6772394c0c09/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39e2cd10bf15442d95c3f48376b25dc33360418ea6c3c05884d8bf42407768c0", size = 4519457 }, + { url = "https://files.pythonhosted.org/packages/2c/5a/bbf5ec9fea9cc81c77d37957777d9b15492884437929fc634fc6dc16aade/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7623659d44a6aa032be4a066c658ba45009d768c2481526fbef7c609702af116", size = 4324376 }, + { url = "https://files.pythonhosted.org/packages/4b/17/c785b4a795860bf67f0dc1e03129cb8e9a3be45d21049ccbffeae9c576e9/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cd9ebf335262e864d740f9dad3f672f61162cc0d4825a5eb5cf50df334a688f", size = 4578729 }, + { url = "https://files.pythonhosted.org/packages/e8/bb/c7bcb17b60040777fb26efd2db5f61bc84453e380114be480ebbedc20829/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc8bc40d82d1ee8dec136e10707c7f3147a6322fd8014e174a0f3446fb793649", size = 4281876 }, + { url = "https://files.pythonhosted.org/packages/2c/a2/ea6d36644fbccd462f4e3bd79149e94b284d4f90f24671bd50ce5e9e9dc5/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:11e3ed8b94c750d54fc3e4502dd930fb0fd041629845b6a7ce089873ac9756b0", size = 3871313 }, + { url = "https://files.pythonhosted.org/packages/09/38/b32728e13d65bac03d556f730af02509310f451ee873f8662bfc40b3f6ef/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:48fcb12a0a72fdfe4102bdb1252a7366e8d73a2c89fe6ce5923be890de367c2f", size = 3334458 }, + { url = "https://files.pythonhosted.org/packages/ca/69/fcd3d845ff2a39fad7783249c8add4966cb12a50f40df3cbcd743fa24c10/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:51a96d9fe51f718912b4a0089784f1f32d800217499fd0f0095b888506aba4c5", size = 3432832 }, + { url = "https://files.pythonhosted.org/packages/f6/9c/90baa71833da03c08ff9d4e12a4bcebfb15c1b0259738f7d3970c2292ab9/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb8293d66c6a4ddc72fceb7ad0e111cb196cc394954ae0f9b63c251d97f1b00e", size = 3463280 }, + { url = "https://files.pythonhosted.org/packages/4f/42/f40ca24a89de58a47e54f82d7124d7dcf996781c89a5ed7bfe722e96da55/psycopg_binary-3.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:5b81342e139ddccfa417832089cd213bd4beacd7a1462ca4019cafe71682d177", size = 2794275 }, + { url = "https://files.pythonhosted.org/packages/84/eb/175a81bfd26734eeaaa39b651bc44a3c5e3fce1190963ace21e428c4d2ee/psycopg_binary-3.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a4321ee8180982d70458d3e8378e31448901bf0ee40fe0d410a87413578f4098", size = 3857964 }, + { url = "https://files.pythonhosted.org/packages/ca/2e/0d57047372c3dd31becc1a48185862d7e6714ffbdc1401742a32f2294f79/psycopg_binary-3.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2cc86657c05e09c701e97f87132cd58e0d55381dd568520081ac1fe7580a9bbb", size = 3940056 }, + { url = "https://files.pythonhosted.org/packages/c5/2f/339a18b28787d33fe892d1ae1fbaa83739c6274327cbf9ada4158322ad9d/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244bebaa9734a236b7157fb57c065b6c0f2344281916187bd73f951df1899e0", size = 4499081 }, + { url = "https://files.pythonhosted.org/packages/42/21/32d7115b2cbd87d043ad494254fd7c4c8652ac3c32f49bb571fd8111caf3/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21b839f9bfd77ed074f7f71464a43f453400c57d038a0ba0716329a28e335897", size = 4307502 }, + { url = "https://files.pythonhosted.org/packages/00/67/e99b58f616dd02c5e52c179b3df047d9683a9f699993cb1795ee435db598/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7376b13504396da9678b646f5338462347da01286b2a688a0d8493ec764683a2", size = 4547821 }, + { url = "https://files.pythonhosted.org/packages/0d/64/9d13ee0fed78a47c506a93d1e67ee53cc7ffd75c1f5885b59d17810fe5cd/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f6827cf1faf3924eb77146d1e85126a1b5e48a88053b8d8b78dd29e971d78", size = 4259849 }, + { url = "https://files.pythonhosted.org/packages/ea/f2/172b6ebcd60a1a86f5ce1a539cfb93ffbe42fc9bc7ab2e1ed79e99a75d71/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28bd5cb2324567e5e70f07fe1d646398d6b0e210e28b49be0e69593590a59980", size = 3847280 }, + { url = "https://files.pythonhosted.org/packages/0f/51/9cd26c6b862d499b4b25ea173ae6e21c9d460ddce6b09cbe9501dff66211/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48f97936145cb7de18b95d85670b2d3e2c257277263272be05815b74fb0ef195", size = 3320262 }, + { url = "https://files.pythonhosted.org/packages/51/7d/2dac61ff16476e77c6ce0a49a30b130e2ba6ad08c83c4950591b4bc49cf2/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e6f2bef5aed021fbdf46323d3cd8847bf960efb56394698644a8ee2306f8892", size = 3400254 }, + { url = "https://files.pythonhosted.org/packages/45/67/bd36932c24f96dc1bc21fb18b1bdebcda7b9791067f7151a1c5dc1193e6b/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d2e57a1d06f3968e49e948ba374f21a7d8dcf44f37d582a4aeddeb7c85ce239", size = 3438916 }, + { url = "https://files.pythonhosted.org/packages/00/ab/882b861cfcf83d7faffe583e1e092117cd66eacc86fb4517d27973e52f35/psycopg_binary-3.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:2cbb8649cfdacbd14e17f5ab78edc52d33350013888518c73e90c5d17d7bea55", size = 2782504 }, + { url = "https://files.pythonhosted.org/packages/81/3d/26483d75e1a5daa93cbb47ee7cde96fac07a9b026058b036b00a04f5c012/psycopg_binary-3.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2dbaf32c18c0d11c4480016b89c9c5cadb7b64c55de7f181d222b189bd13a558", size = 3852616 }, + { url = "https://files.pythonhosted.org/packages/90/cb/542bd0eab110ed2ddcc02cbe8f5df0afe3e86bd843c533fc6a795ffd7c0f/psycopg_binary-3.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ca5e36a3e7480a5c09aed99ecdb8e6554b21485c3b064297fe77f7b1b5806106", size = 3936563 }, + { url = "https://files.pythonhosted.org/packages/e1/43/2b347816983a5b0f1cc3e608eae4650422476187e047e574981081bcf9ec/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9abe093a303e25ac58774a11241150e2fe2947358d1ca12521ad03c90b131060", size = 4499166 }, + { url = "https://files.pythonhosted.org/packages/3f/0d/d7ac5289dfa1163b0fcce9aeb848a7f4499d7b3ef34f1de565d0ba9a51bd/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a91b0e096fdfeb52d86bb8f5ee25dc22483d6960af9b968e6b381a8ec5bfbf82", size = 4311647 }, + { url = "https://files.pythonhosted.org/packages/7b/a2/b238d91cbbc5953ff6910737b5a598cc4d5aad84453052005891cec329b3/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3eb71cfc35116e4a8e336b7e785f1fe06ca23b4516a48ea91facd577d1a1fdf6", size = 4547848 }, + { url = "https://files.pythonhosted.org/packages/d7/33/e78ae02d8f23753af2884303370b914a5d172f76fed13bfde380ec473f53/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98efaedf2bf79f4d563ca039a57a025b72847bd80568f54709cc39fc1404772c", size = 4261732 }, + { url = "https://files.pythonhosted.org/packages/44/9a/1745ff5c6e4c715aa71f3da3f393022ec0c7cc972fa0ee7296df8871d6d6/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba4a610882171bdaae0779f14e0ff45f3ee271fd2dbf16cdadfc81bd67323232", size = 3850803 }, + { url = "https://files.pythonhosted.org/packages/7b/1c/933fb04560e7bcf5f24c632f9381e8700dcf8462adcd32eabd6192480d66/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1494827c43265820d5dcdc6f8086521bc7dd04b9da8831310978a788cdcd2e62", size = 3320315 }, + { url = "https://files.pythonhosted.org/packages/5d/36/111e2db9c3ff5123da4ce814aa9462d242a7c393f132a4005ec427e09903/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7a94020821723a6a210206ddb458001f3ed27e1e6a0555b9422bebf7ead8ff37", size = 3403225 }, + { url = "https://files.pythonhosted.org/packages/90/04/246efe587463d13b015202ab344e12e8e30ea9ba90ca952def0469b95a9e/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:659f2c675d478b1bc01b95a8d3ded74fa939b370e71ffbecd496f617b215eb05", size = 3440446 }, + { url = "https://files.pythonhosted.org/packages/92/75/5e15e7a6ad4c6a00fe1a28fe704310dc7f7b26dbd5e6e14c817e7899451b/psycopg_binary-3.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:6b581da13126b8715c0c0585cd37ce934c9864d44b2a4019f5487c0b943275e6", size = 2783095 }, ] [[package]] name = "psycopg-pool" -version = "3.2.4" +version = "3.2.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/71/01d4e589dc5fd1f21368b7d2df183ed0e5bbc160ce291d745142b229797b/psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed", size = 29749 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/28/2b56ac94c236ee033c7b291bcaa6a83089d0cc0fe7830c35f6521177c199/psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224", size = 38240 }, + { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252 }, ] [[package]] @@ -3672,44 +3839,44 @@ wheels = [ [[package]] name = "pyarrow" -version = "18.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/bb/8d4a1573f66e0684f190dd2b55fd0b97a7214de8882d58a3867e777bf640/pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c", size = 29531620 }, - { url = "https://files.pythonhosted.org/packages/30/90/893acfad917533b624a97b9e498c0e8393908508a0a72d624fe935e632bf/pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4", size = 30836521 }, - { url = "https://files.pythonhosted.org/packages/a3/2a/526545a7464b5fb2fa6e2c4bad16ca90e59e1843025c534fd907b7f73e5a/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b", size = 39213905 }, - { url = "https://files.pythonhosted.org/packages/8a/77/4b3fab91a30e19e233e738d0c5eca5a8f6dd05758bc349a2ca262c65de79/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71", size = 40128881 }, - { url = "https://files.pythonhosted.org/packages/aa/e2/a88e16c5e45e562449c52305bd3bc2f9d704295322d3434656e7ccac1444/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470", size = 38627517 }, - { url = "https://files.pythonhosted.org/packages/6d/84/8037c20005ccc7b869726465be0957bd9c29cfc88612962030f08292ad06/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56", size = 40060187 }, - { url = "https://files.pythonhosted.org/packages/2a/38/d6435c723ff73df8ae74626ea778262fbcc2b9b0d1a4f3db915b61711b05/pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812", size = 25118314 }, - { url = "https://files.pythonhosted.org/packages/9e/4d/a4988e7d82f4fbc797715db4185939a658eeffb07a25bab7262bed1ea076/pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854", size = 29554860 }, - { url = "https://files.pythonhosted.org/packages/59/03/3a42c5c1e4bd4c900ab62aa1ff6b472bdb159ba8f1c3e5deadab7222244f/pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c", size = 30867076 }, - { url = "https://files.pythonhosted.org/packages/75/7e/332055ac913373e89256dce9d14b7708f55f7bd5be631456c897f0237738/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21", size = 39212135 }, - { url = "https://files.pythonhosted.org/packages/8c/64/5099cdb325828722ef7ffeba9a4696f238eb0cdeae227f831c2d77fcf1bd/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6", size = 40125195 }, - { url = "https://files.pythonhosted.org/packages/83/88/1938d783727db1b178ff71bc6a6143d7939e406db83a9ec23cad3dad325c/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe", size = 38641884 }, - { url = "https://files.pythonhosted.org/packages/5e/b5/9e14e9f7590e0eaa435ecea84dabb137284a4dbba7b3c337b58b65b76d95/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0", size = 40076877 }, - { url = "https://files.pythonhosted.org/packages/4d/a3/817ac7fe0891a2d66e247e223080f3a6a262d8aefd77e11e8c27e6acf4e1/pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a", size = 25119811 }, - { url = "https://files.pythonhosted.org/packages/6a/50/12829e7111b932581e51dda51d5cb39207a056c30fe31ef43f14c63c4d7e/pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d", size = 29514620 }, - { url = "https://files.pythonhosted.org/packages/d1/41/468c944eab157702e96abab3d07b48b8424927d4933541ab43788bb6964d/pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee", size = 30856494 }, - { url = "https://files.pythonhosted.org/packages/68/f9/29fb659b390312a7345aeb858a9d9c157552a8852522f2c8bad437c29c0a/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992", size = 39203624 }, - { url = "https://files.pythonhosted.org/packages/6e/f6/19360dae44200e35753c5c2889dc478154cd78e61b1f738514c9f131734d/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54", size = 40139341 }, - { url = "https://files.pythonhosted.org/packages/bb/e6/9b3afbbcf10cc724312e824af94a2e993d8ace22994d823f5c35324cebf5/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33", size = 38618629 }, - { url = "https://files.pythonhosted.org/packages/3a/2e/3b99f8a3d9e0ccae0e961978a0d0089b25fb46ebbcfb5ebae3cca179a5b3/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30", size = 40078661 }, - { url = "https://files.pythonhosted.org/packages/76/52/f8da04195000099d394012b8d42c503d7041b79f778d854f410e5f05049a/pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99", size = 25092330 }, - { url = "https://files.pythonhosted.org/packages/cb/87/aa4d249732edef6ad88899399047d7e49311a55749d3c373007d034ee471/pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b", size = 29497406 }, - { url = "https://files.pythonhosted.org/packages/3c/c7/ed6adb46d93a3177540e228b5ca30d99fc8ea3b13bdb88b6f8b6467e2cb7/pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2", size = 30835095 }, - { url = "https://files.pythonhosted.org/packages/41/d7/ed85001edfb96200ff606943cff71d64f91926ab42828676c0fc0db98963/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191", size = 39194527 }, - { url = "https://files.pythonhosted.org/packages/59/16/35e28eab126342fa391593415d79477e89582de411bb95232f28b131a769/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa", size = 40131443 }, - { url = "https://files.pythonhosted.org/packages/0c/95/e855880614c8da20f4cd74fa85d7268c725cf0013dc754048593a38896a0/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c", size = 38608750 }, - { url = "https://files.pythonhosted.org/packages/54/9d/f253554b1457d4fdb3831b7bd5f8f00f1795585a606eabf6fec0a58a9c38/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c", size = 40066690 }, - { url = "https://files.pythonhosted.org/packages/2f/58/8912a2563e6b8273e8aa7b605a345bba5a06204549826f6493065575ebc0/pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181", size = 25081054 }, - { url = "https://files.pythonhosted.org/packages/82/f9/d06ddc06cab1ada0c2f2fd205ac8c25c2701182de1b9c4bf7a0a44844431/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc", size = 29525542 }, - { url = "https://files.pythonhosted.org/packages/ab/94/8917e3b961810587ecbdaa417f8ebac0abb25105ae667b7aa11c05876976/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386", size = 30829412 }, - { url = "https://files.pythonhosted.org/packages/5e/e3/3b16c3190f3d71d3b10f6758d2d5f7779ef008c4fd367cedab3ed178a9f7/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324", size = 39119106 }, - { url = "https://files.pythonhosted.org/packages/1d/d6/5d704b0d25c3c79532f8c0639f253ec2803b897100f64bcb3f53ced236e5/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8", size = 40090940 }, - { url = "https://files.pythonhosted.org/packages/37/29/366bc7e588220d74ec00e497ac6710c2833c9176f0372fe0286929b2d64c/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9", size = 38548177 }, - { url = "https://files.pythonhosted.org/packages/c8/11/fabf6ecabb1fe5b7d96889228ca2a9158c4c3bb732e3b8ee3f7f6d40b703/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba", size = 40043567 }, +version = "19.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/09/a9046344212690f0632b9c709f9bf18506522feb333c894d0de81d62341a/pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e", size = 1129437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/01/b23b514d86b839956238d3f8ef206fd2728eee87ff1b8ce150a5678d9721/pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69", size = 30688914 }, + { url = "https://files.pythonhosted.org/packages/c6/68/218ff7cf4a0652a933e5f2ed11274f724dd43b9813cb18dd72c0a35226a2/pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec", size = 32102866 }, + { url = "https://files.pythonhosted.org/packages/98/01/c295050d183014f4a2eb796d7d2bbfa04b6cccde7258bb68aacf6f18779b/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89", size = 41147682 }, + { url = "https://files.pythonhosted.org/packages/40/17/a6c3db0b5f3678f33bbb552d2acbc16def67f89a72955b67b0109af23eb0/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a", size = 42179192 }, + { url = "https://files.pythonhosted.org/packages/cf/75/c7c8e599300d8cebb6cb339014800e1c720c9db2a3fcb66aa64ec84bac72/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a", size = 40517272 }, + { url = "https://files.pythonhosted.org/packages/ef/c9/68ab123ee1528699c4d5055f645ecd1dd68ff93e4699527249d02f55afeb/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608", size = 42069036 }, + { url = "https://files.pythonhosted.org/packages/54/e3/d5cfd7654084e6c0d9c3ce949e5d9e0ccad569ae1e2d5a68a3ec03b2be89/pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866", size = 25277951 }, + { url = "https://files.pythonhosted.org/packages/a0/55/f1a8d838ec07fe3ca53edbe76f782df7b9aafd4417080eebf0b42aab0c52/pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90", size = 30713987 }, + { url = "https://files.pythonhosted.org/packages/13/12/428861540bb54c98a140ae858a11f71d041ef9e501e6b7eb965ca7909505/pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00", size = 32135613 }, + { url = "https://files.pythonhosted.org/packages/2f/8a/23d7cc5ae2066c6c736bce1db8ea7bc9ac3ef97ac7e1c1667706c764d2d9/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae", size = 41149147 }, + { url = "https://files.pythonhosted.org/packages/a2/7a/845d151bb81a892dfb368bf11db584cf8b216963ccce40a5cf50a2492a18/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5", size = 42178045 }, + { url = "https://files.pythonhosted.org/packages/a7/31/e7282d79a70816132cf6cae7e378adfccce9ae10352d21c2fecf9d9756dd/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3", size = 40532998 }, + { url = "https://files.pythonhosted.org/packages/b8/82/20f3c290d6e705e2ee9c1fa1d5a0869365ee477e1788073d8b548da8b64c/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6", size = 42084055 }, + { url = "https://files.pythonhosted.org/packages/ff/77/e62aebd343238863f2c9f080ad2ef6ace25c919c6ab383436b5b81cbeef7/pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466", size = 25283133 }, + { url = "https://files.pythonhosted.org/packages/78/b4/94e828704b050e723f67d67c3535cf7076c7432cd4cf046e4bb3b96a9c9d/pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b", size = 30670749 }, + { url = "https://files.pythonhosted.org/packages/7e/3b/4692965e04bb1df55e2c314c4296f1eb12b4f3052d4cf43d29e076aedf66/pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294", size = 32128007 }, + { url = "https://files.pythonhosted.org/packages/22/f7/2239af706252c6582a5635c35caa17cb4d401cd74a87821ef702e3888957/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14", size = 41144566 }, + { url = "https://files.pythonhosted.org/packages/fb/e3/c9661b2b2849cfefddd9fd65b64e093594b231b472de08ff658f76c732b2/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34", size = 42202991 }, + { url = "https://files.pythonhosted.org/packages/fe/4f/a2c0ed309167ef436674782dfee4a124570ba64299c551e38d3fdaf0a17b/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6", size = 40507986 }, + { url = "https://files.pythonhosted.org/packages/27/2e/29bb28a7102a6f71026a9d70d1d61df926887e36ec797f2e6acfd2dd3867/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832", size = 42087026 }, + { url = "https://files.pythonhosted.org/packages/16/33/2a67c0f783251106aeeee516f4806161e7b481f7d744d0d643d2f30230a5/pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960", size = 25250108 }, + { url = "https://files.pythonhosted.org/packages/2b/8d/275c58d4b00781bd36579501a259eacc5c6dfb369be4ddeb672ceb551d2d/pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c", size = 30653552 }, + { url = "https://files.pythonhosted.org/packages/a0/9e/e6aca5cc4ef0c7aec5f8db93feb0bde08dbad8c56b9014216205d271101b/pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae", size = 32103413 }, + { url = "https://files.pythonhosted.org/packages/6a/fa/a7033f66e5d4f1308c7eb0dfcd2ccd70f881724eb6fd1776657fdf65458f/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4", size = 41134869 }, + { url = "https://files.pythonhosted.org/packages/2d/92/34d2569be8e7abdc9d145c98dc410db0071ac579b92ebc30da35f500d630/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2", size = 42192626 }, + { url = "https://files.pythonhosted.org/packages/0a/1f/80c617b1084fc833804dc3309aa9d8daacd46f9ec8d736df733f15aebe2c/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6", size = 40496708 }, + { url = "https://files.pythonhosted.org/packages/e6/90/83698fcecf939a611c8d9a78e38e7fed7792dcc4317e29e72cf8135526fb/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136", size = 42075728 }, + { url = "https://files.pythonhosted.org/packages/40/49/2325f5c9e7a1c125c01ba0c509d400b152c972a47958768e4e35e04d13d8/pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef", size = 25242568 }, + { url = "https://files.pythonhosted.org/packages/3f/72/135088d995a759d4d916ec4824cb19e066585b4909ebad4ab196177aa825/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0", size = 30702371 }, + { url = "https://files.pythonhosted.org/packages/2e/01/00beeebd33d6bac701f20816a29d2018eba463616bbc07397fdf99ac4ce3/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9", size = 32116046 }, + { url = "https://files.pythonhosted.org/packages/1f/c9/23b1ea718dfe967cbd986d16cf2a31fe59d015874258baae16d7ea0ccabc/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3", size = 41091183 }, + { url = "https://files.pythonhosted.org/packages/3a/d4/b4a3aa781a2c715520aa8ab4fe2e7fa49d33a1d4e71c8fc6ab7b5de7a3f8/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6", size = 42171896 }, + { url = "https://files.pythonhosted.org/packages/23/1b/716d4cd5a3cbc387c6e6745d2704c4b46654ba2668260d25c402626c5ddb/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a", size = 40464851 }, + { url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 }, ] [[package]] @@ -3753,16 +3920,16 @@ wheels = [ [[package]] name = "pydantic" -version = "2.10.5" +version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/ca334c2ef6f2e046b1144fe4bb2a5da8a4c574e7f2ebf7e16b34a6a2fa92/pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff", size = 761287 } +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/26/82663c79010b28eddf29dcdd0ea723439535fa917fce5905885c0e9ba562/pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53", size = 431426 }, + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, ] [[package]] @@ -3842,15 +4009,27 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.7.1" +version = "2.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/a2/ad2511ede77bb424f3939e5148a56d968cdc6b1462620d24b2a1f4ab65b4/pydantic_settings-2.8.0.tar.gz", hash = "sha256:88e2ca28f6e68ea102c99c3c401d6c9078e68a5df600e97b43891c34e089500a", size = 83347 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718 }, + { url = "https://files.pythonhosted.org/packages/c1/a9/3b9642025174bbe67e900785fb99c9bfe91ea584b0b7126ff99945c24a0e/pydantic_settings-2.8.0-py3-none-any.whl", hash = "sha256:c782c7dc3fb40e97b238e713c25d26f64314aece2e91abcff592fcac15f71820", size = 30746 }, +] + +[[package]] +name = "pyee" +version = "12.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/37/8fb6e653597b2b67ef552ed49b438d5398ba3b85a9453f8ada0fd77d455c/pyee-12.1.1.tar.gz", hash = "sha256:bbc33c09e2ff827f74191e3e5bbc6be7da02f627b7ec30d86f5ce1a6fb2424a3", size = 30915 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/68/7e150cba9eeffdeb3c5cecdb6896d70c8edd46ce41c0491e12fb2b2256ff/pyee-12.1.1-py3-none-any.whl", hash = "sha256:18a19c650556bb6b32b406d7f017c8f513aceed1ef7ca618fb65de7bd2d347ef", size = 15527 }, ] [[package]] @@ -3876,6 +4055,24 @@ crypto = [ { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] +[[package]] +name = "pylibsrtp" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/49/1c5101ecfeda540699e0754dddfc91c401fbf736ebe99d66e59fe3dad2ba/pylibsrtp-0.11.0.tar.gz", hash = "sha256:5a8d19b1448baebde5ae3cedfa51f10e8ada3d9d99f43046ced0ecf1c105b8ec", size = 10786 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/95/65650bf56e1080beb5f7c963a0bb11a6ee7599bfd89b33ff4525d2b5824b/pylibsrtp-0.11.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:36c6b33347d47c889b7dd465c6ae1f44d7705d00436ca613fd2a8f5dd401b104", size = 1727506 }, + { url = "https://files.pythonhosted.org/packages/4e/b0/f12c489ea8716e74343559abc5d0dfb94d66bcfe1924d64d58424a50f496/pylibsrtp-0.11.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cf18b80f9513484a70e55136ece6ec80e7d21c03cc69abbb428e4f2745ca3cee", size = 2058008 }, + { url = "https://files.pythonhosted.org/packages/e1/2e/6040cd6da6f82f3aa1763c8c45f7fcfdfe08db5560c73f5e1deb4c36c2bb/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bbe0cd777979f7fc45c85f0c619c9cbe709faffbf91675d9dcce560734b353", size = 2566705 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/fd313ac3a23e9c45493131d9fa3463770289e59bb8422c6c6877ab3add40/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78fcdfe63925ea9a5017884c31fe9687b9b8b9f7d9beb7e25e3be47aa6ece495", size = 2168163 }, + { url = "https://files.pythonhosted.org/packages/f9/b3/ae0bac50cc0cca4b8c14de8063ba410ed3edd82c71a2315f284c9be7d679/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1909f7e781a7675d5c92cbad9e7ed3642e626e2bea5834243e423976e5420ac3", size = 2224343 }, + { url = "https://files.pythonhosted.org/packages/51/c4/650c2cecd5810f84adc89f3a94a28ea02d7ac8eaf3ee718a629c6f8ebf09/pylibsrtp-0.11.0-cp39-abi3-win32.whl", hash = "sha256:15123cecd377248747c95de9305ac314f3bcccdae46022bb4b9d60a552a26a10", size = 1156330 }, + { url = "https://files.pythonhosted.org/packages/fe/78/724307095b95c937e54c48133be3e85779cebea770f7536be555217b31f2/pylibsrtp-0.11.0-cp39-abi3-win_amd64.whl", hash = "sha256:bea2fb98029d19de516538b13c4827b6474d6f85d9ea50fae349e9671b946f7a", size = 1486448 }, +] + [[package]] name = "pymeta3" version = "0.5.1" @@ -3884,7 +4081,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/ce/af/409edba35fc597f1e [[package]] name = "pymilvus" -version = "2.5.3" +version = "2.5.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3895,56 +4092,78 @@ dependencies = [ { name = "setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ujson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/8a/a10d29f5d9c9c33ac71db4594e3e6230279d557d6bd5fde6f99d1edfc360/pymilvus-2.5.3.tar.gz", hash = "sha256:68bc3797b7a14c494caf116cee888894ffd6eba7b96a3ac841be85d60694cc5d", size = 1258217 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/64/b00289d52e33a6ebc645cf0d60a7a0a3ce4db74648ceb1f55d776971e34d/pymilvus-2.5.4.tar.gz", hash = "sha256:611732428ff669d57ded3d1f823bdeb10febf233d0251cce8498b287e5a10ce8", size = 1250160 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/ef/2a5682e02ef69465f7a50aa48fd9ac3fe12a3f653f51cbdc211a28557efc/pymilvus-2.5.3-py3-none-any.whl", hash = "sha256:64ca63594284586937274800be27a402f3be2d078130bf81d94ab8d7798ac9c8", size = 229867 }, + { url = "https://files.pythonhosted.org/packages/26/e6/1ba3cae7c723ecf9ede7a30c78824953afc2fe4bab5fce8ec5d8e233f541/pymilvus-2.5.4-py3-none-any.whl", hash = "sha256:3f7ddaeae0c8f63554b8e316b73f265d022e05a457d47c366ce47293434a3aea", size = 222399 }, ] [[package]] name = "pymongo" -version = "4.9.2" +version = "4.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/43/d5e8993bd43e6f9cbe985e8ae1398eb73309e88694ac2ea618eacbc9cea2/pymongo-4.9.2.tar.gz", hash = "sha256:3e63535946f5df7848307b9031aa921f82bb0cbe45f9b0c3296f2173f9283eb0", size = 1889366 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/af/1ce26b971e520de621239842f2be302749eb752a5cb29dd253f4c210eb0a/pymongo-4.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab8d54529feb6e29035ba8f0570c99ad36424bc26486c238ad7ce28597bc43c8", size = 833709 }, - { url = "https://files.pythonhosted.org/packages/a6/bd/7bc8224ae96fd9ffe8b2a193469200b9c75787178c5b1955bd20e5d024c7/pymongo-4.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f928bdc152a995cbd0b563fab201b2df873846d11f7a41d1f8cc8a01b35591ab", size = 833974 }, - { url = "https://files.pythonhosted.org/packages/87/2e/3cc96aec7a1d6151677bb108af606ea220205a47255ed53255bfe1d8f31f/pymongo-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6e7251d59fa3dcbb1399a71a3aec63768cebc6b22180b671601c2195fe1f90a", size = 1405440 }, - { url = "https://files.pythonhosted.org/packages/e8/9c/2d5db2fcabc873daead275729c17ddeb2b437010858fe101e8d59a276209/pymongo-4.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e759ed0459e7264a11b6896016f616341a8e4c6ab7f71ae651bd21ffc7e9524", size = 1454720 }, - { url = "https://files.pythonhosted.org/packages/6f/84/b382e7f817fd39dcd02ae69e21afd538251acf5de1904606a9908d8895fe/pymongo-4.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3fc60f242191840ccf02b898bc615b5141fbb70064f38f7e60fcaa35d3b5efd", size = 1431625 }, - { url = "https://files.pythonhosted.org/packages/87/f5/653f9af6a7625353138bded4548a5a48729352b963fc2a059e07241b37c2/pymongo-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c798351666ac97a0ddaa823689061c3af949c2d6acf7fb2d9ab0a7f465ced79", size = 1409027 }, - { url = "https://files.pythonhosted.org/packages/36/26/f4159209cf6229ce0a5ac37f093dab49495c51daad8ca835279f0058b060/pymongo-4.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aac78b5fdd49ed8cae49adf76befacb02293a23b412676775c4715148e166d85", size = 1378524 }, - { url = "https://files.pythonhosted.org/packages/57/3c/78c60e721a975b836922467410dd4b9616ac84f096eec00f7bde9e889b2b/pymongo-4.9.2-cp310-cp310-win32.whl", hash = "sha256:bf77bf175c315e299a91332c2bbebc097c4d4fcc8713e513a9861684aa39023a", size = 810564 }, - { url = "https://files.pythonhosted.org/packages/71/cf/790c8da7fdd55e5e824b08eaf63355732bbf278ebcb98615e723feb05702/pymongo-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:c42b5aad8971256365bfd0a545fb1c7a199c93db80decd298ea2f987419e2a6d", size = 825019 }, - { url = "https://files.pythonhosted.org/packages/a8/b4/7af80304a0798526fac959e3de651b0747472c049c8b89a6c15fed2026f6/pymongo-4.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:99e40f44877b32bf4b3c46ceed2228f08c222cf7dec8a4366dd192a1429143fa", size = 887499 }, - { url = "https://files.pythonhosted.org/packages/33/ee/5389229774f842bd92a123fd3ea4f2d72b474bde9315ff00e889fe104a0d/pymongo-4.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f6834d575ed87edc7dfcab4501d961b6a423b3839edd29ecb1382eee7736777", size = 887755 }, - { url = "https://files.pythonhosted.org/packages/d4/fd/3f0ae0fd3a7049ec67ab8f952020bc9fad841791d52d8c51405bd91b3c9b/pymongo-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3010018f5672e5b7e8d096dea9f1ea6545b05345ff0eb1754f6ee63785550773", size = 1647336 }, - { url = "https://files.pythonhosted.org/packages/00/b7/0472d51778e9e22b2ffd5ae9a401888525c4872cb2073f1bff8d5ae9659b/pymongo-4.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69394ee9f0ce38ff71266bad01b7e045cd75e58500ebad5d72187cbabf2e652a", size = 1713193 }, - { url = "https://files.pythonhosted.org/packages/8c/ac/aa41cb291107bb16bae286d7b9f2c868e393765830bc173609ae4dc9a3ae/pymongo-4.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87b18094100f21615d9db99c255dcd9e93e476f10fb03c1d3632cf4b82d201d2", size = 1681720 }, - { url = "https://files.pythonhosted.org/packages/dc/70/ac12eb58bd46a7254daaa4d39e7c4109983ee2227dac44df6587954fe345/pymongo-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3039e093d28376d6a54bdaa963ca12230c8a53d7b19c8e6368e19bcfbd004176", size = 1652109 }, - { url = "https://files.pythonhosted.org/packages/d3/20/38f71e0f1c7878b287305b2965cebe327fc5626ecca83ea52a272968cbe2/pymongo-4.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab42d9ee93fe6b90020c42cba5bfb43a2b4660951225d137835efc21940da48", size = 1611503 }, - { url = "https://files.pythonhosted.org/packages/9b/4c/d3b26e1040c9538b9c8aed005ec18af7515c6dd3091aabfbf6c30a3b3b1a/pymongo-4.9.2-cp311-cp311-win32.whl", hash = "sha256:a663ca60e187a248d370c58961e40f5463077d2b43831eb92120ea28a79ecf96", size = 855570 }, - { url = "https://files.pythonhosted.org/packages/40/3d/7de1a4cf51bf2b10bb9f43ffa208acad0d64c18994ca8d83f490edef6834/pymongo-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:24e7b6887bbfefd05afed26a99a2c69459e2daa351a43a410de0d6c0ee3cce4e", size = 874715 }, - { url = "https://files.pythonhosted.org/packages/a1/08/7d95aab0463dc5a2c460a0b4e50a45a743afbe20986f47f87a9a88f43c0c/pymongo-4.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8083bbe8cb10bb33dca4d93f8223dd8d848215250bb73867374650bac5fe69e1", size = 941617 }, - { url = "https://files.pythonhosted.org/packages/bb/28/40613d8d97fc33bf2b9187446a6746925623aa04a9a27c9b058e97076f7a/pymongo-4.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b8c636bf557c7166e3799bbf1120806ca39e3f06615b141c88d9c9ceae4d8c", size = 941394 }, - { url = "https://files.pythonhosted.org/packages/df/b2/7f1a0d75f538c0dcaa004ea69e28706fa3ca72d848e0a5a7dafd30939fff/pymongo-4.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8aac5dce28454f47576063fbad31ea9789bba67cab86c95788f97aafd810e65b", size = 1907396 }, - { url = "https://files.pythonhosted.org/packages/ba/70/9304bae47a361a4b12adb5be714bad41478c0e5bc3d6cf403b328d6398a0/pymongo-4.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1d5e7123af1fddf15b2b53e58f20bf5242884e671bcc3860f5e954fe13aeddd", size = 1986029 }, - { url = "https://files.pythonhosted.org/packages/ae/51/ac0378d001995c4a705da64a4a2b8e1732f95de5080b752d69f452930cc7/pymongo-4.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe97c847b56d61e533a7af0334193d6b28375b9189effce93129c7e4733794a9", size = 1949088 }, - { url = "https://files.pythonhosted.org/packages/1a/30/e93dc808039dc29fc47acee64f128aa650aacae3e4b57b68e01ff1001cda/pymongo-4.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ad54433a996e2d1985a9cd8fc82538ca8747c95caae2daf453600cc8c317f9", size = 1910516 }, - { url = "https://files.pythonhosted.org/packages/2b/34/895b9cad3bd5342d5ab51a853ed3a814840ce281d55c6928968e9f3f49f5/pymongo-4.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98b9cade40f5b13e04492a42ae215c3721099be1014ddfe0fbd23f27e4f62c0c", size = 1860499 }, - { url = "https://files.pythonhosted.org/packages/24/7e/167818f324bf2122d45551680671a3c6406a345d3fcace4e737f57bda4e4/pymongo-4.9.2-cp312-cp312-win32.whl", hash = "sha256:dde6068ae7c62ea8ee2c5701f78c6a75618cada7e11f03893687df87709558de", size = 901282 }, - { url = "https://files.pythonhosted.org/packages/12/6b/b7ffa7114177fc1c60ae529512b82629ff7e25d19be88e97f2d0ddd16717/pymongo-4.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:e1ab6cd7cd2d38ffc7ccdc79fdc166c7a91a63f844a96e3e6b2079c054391c68", size = 924925 }, - { url = "https://files.pythonhosted.org/packages/5b/d6/b57ef5f376e2e171218a98b8c30dfd001aa5cac6338aa7f3ca76e6315667/pymongo-4.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1ad79d6a74f439a068caf9a1e2daeabc20bf895263435484bbd49e90fbea7809", size = 995233 }, - { url = "https://files.pythonhosted.org/packages/32/80/4ec79e36e99f86a063d297a334883fb5115ad70e9af46142b8dc33f636fa/pymongo-4.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:877699e21703717507cbbea23e75b419f81a513b50b65531e1698df08b2d7094", size = 995025 }, - { url = "https://files.pythonhosted.org/packages/c4/fd/8f5464321fdf165700f10aec93b07a75c3537be593291ac2f8c8f5f69bd0/pymongo-4.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc9322ce7cf116458a637ac10517b0c5926a8211202be6dbdc51dab4d4a9afc8", size = 2167429 }, - { url = "https://files.pythonhosted.org/packages/da/42/0f749d805d17f5b17f48f2ee1aaf2a74e67939607b87b245e5ec9b4c1452/pymongo-4.9.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cca029f46acf475504eedb33c7839f030c4bc4f946dcba12d9a954cc48850b79", size = 2258834 }, - { url = "https://files.pythonhosted.org/packages/b8/52/b0c1b8e9cbeae234dd1108a906f30b680755533b7229f9f645d7e7adad25/pymongo-4.9.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c8c861e77527eec5a4b7363c16030dd0374670b620b08a5300f97594bbf5a40", size = 2216412 }, - { url = "https://files.pythonhosted.org/packages/4d/20/53395473a1023bb6a670b68fbfa937664c75b354c2444463075ff43523e2/pymongo-4.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc70326ae71b3c7b8d6af82f46bb71dafdba3c8f335b29382ae9cf263ef3a5c", size = 2168891 }, - { url = "https://files.pythonhosted.org/packages/01/b7/fa4030279d8a4a9c0a969a719b6b89da8a59795b5cdf129ef553fce6d1f2/pymongo-4.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba9d2f6df977fee24437f82f7412460b0628cd6b961c4235c9cff71577a5b61f", size = 2109380 }, - { url = "https://files.pythonhosted.org/packages/f3/55/f252972a039fc6bfca748625c5080d6f88801eb61f118fe79cde47342d6a/pymongo-4.9.2-cp313-cp313-win32.whl", hash = "sha256:b3254769e708bc4aa634745c262081d13c841a80038eff3afd15631540a1d227", size = 946962 }, - { url = "https://files.pythonhosted.org/packages/7b/36/88d8438699ba09b714dece00a4a7462330c1d316f5eaa28db450572236f6/pymongo-4.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:169b85728cc17800344ba17d736375f400ef47c9fbb4c42910c4b3e7c0247382", size = 975113 }, +sdist = { url = "https://files.pythonhosted.org/packages/c5/18/63fd06769a2f47842c374fc5d937445fe8dc2f31b3a859c8bf7df73daa14/pymongo-4.11.1.tar.gz", hash = "sha256:3757ce9257c3486eead45680a8895a0ed9ba27efaf1791fc0cf854367c21c638", size = 2054021 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/98/5030f36a22f602b8ed8fa0921b80c5d1f1e2cb271a5e70e9b4269e54e6c9/pymongo-4.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e596caec72db62a3f438559dfa46d22faefea1967279f553f936ddcb873903df", size = 786132 }, + { url = "https://files.pythonhosted.org/packages/c8/a1/971f4ce571d2e4622ff3360592ec9e674337c1feea2941ee88094b842015/pymongo-4.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15a88b25efcd61c5e539e9204932849b20f393efa330771676e860c4466fe8ad", size = 786420 }, + { url = "https://files.pythonhosted.org/packages/f6/d0/df9b520c1b702b6229a36fa58d7d2d5791bb1d5b9d585eed1ef3d0bad524/pymongo-4.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7073a740aad257f9d2c12cb95a08f17db1f273d422e7ddfed9895738571cac7", size = 1163863 }, + { url = "https://files.pythonhosted.org/packages/32/be/3b7890e9cca9b1218043a656f6d05d2569741ad3e144c877fb6a0c01e9fc/pymongo-4.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25b7cadae1d5287b2eed3d901a347f3fa9bc3f898532e1cb7f28a1c9237d824d", size = 1198081 }, + { url = "https://files.pythonhosted.org/packages/17/68/23e88bf9781c2eaa38d17f61c0b86c3191c73420a91deba5030930f2c27b/pymongo-4.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fe9589d9a83f6e2abe88f32daa410276eddd038eb8f8f75975cf8ce834cea1f", size = 1181002 }, + { url = "https://files.pythonhosted.org/packages/4d/9c/9d19ea4187eecce995ea261ca6ead9b85082246370da10b5d3e8cb0b09c1/pymongo-4.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc6d48b74e9abe544dd71b000453ad06e65cbfcfd57c7342a9f012f65532eb2", size = 1167024 }, + { url = "https://files.pythonhosted.org/packages/69/5c/453d8815521b1a1c81e83a2083bd49255d96648e5b24fc0ceda131deb717/pymongo-4.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1518931a4a26d3cb31a97b9187087c6378cd0b0401d7a7cc160e92223a2a3059", size = 1146171 }, + { url = "https://files.pythonhosted.org/packages/81/ff/cf195d0c7786fd26f1ea654e728b189ae5622f462e4672db17073a688ebe/pymongo-4.11.1-cp310-cp310-win32.whl", hash = "sha256:163c887384cb9fd16e0463128600867138a5a9a5344fc0903db08494b39a2d6e", size = 772072 }, + { url = "https://files.pythonhosted.org/packages/78/c3/8ae8e05e72e3349c2ca935fd7aec22a6e4011dff3e03f97a89e36d90e734/pymongo-4.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e147e08df329a7d23cbcb6213bc2fd360e51551626be828092fe2027f3473abc", size = 781414 }, + { url = "https://files.pythonhosted.org/packages/20/ee/8caede1100c5d59eee723980e39acfad04c5267d45b4f0827cc42f5de994/pymongo-4.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac125f2782d8fe3f3ff93a396af5482d694093b3be3e06052197096c83acadc", size = 840509 }, + { url = "https://files.pythonhosted.org/packages/33/2f/0df9ff0bb6a7b2812697dd9a3fb728fc0c7b4d035c85acf10eeb0b38579d/pymongo-4.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:681806d3ecaf29b11e16a45c1f4c28f99d9d8283238f7b6ea9eee93b5d7bc6d2", size = 840802 }, + { url = "https://files.pythonhosted.org/packages/58/fb/167e3fef60d2269a1e536cf6edeb871a4b53683f9d03681d2744983e0540/pymongo-4.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50210249a9bf67937e97205a312b96a4b1250b111cbaaff532d7a61bc2b1562d", size = 1409951 }, + { url = "https://files.pythonhosted.org/packages/31/ff/f02900dac6d0374a98319cbbf3d6de3b3cd8cf5d1508d62062efb2084bcc/pymongo-4.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd0e404d5c3b1203ee61fcfee40a1f062f3780ce272febdc2378797b00401d1", size = 1460907 }, + { url = "https://files.pythonhosted.org/packages/a5/4b/2eed7b9b7f65278123f0e73b39d38df7d99f477cc1eef49b5aa62485b0a1/pymongo-4.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6e46bcd3c2f86f442b721551ed5e5812294e4a93fce42517e173bd41d4cd2d8", size = 1435332 }, + { url = "https://files.pythonhosted.org/packages/55/dc/1ddce3af1dd5156f1f1178857f768c8a88a44f8cc791c1490192ce7fd24c/pymongo-4.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f28d179e7d434869e23f4582c941cb400f75e996cfea472693ec756ee213c685", size = 1414459 }, + { url = "https://files.pythonhosted.org/packages/bb/b9/cfb32aea974c7656d81a47c1a52d7c94bf491b057ffb66ecec070c4f207b/pymongo-4.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b56dbb6883ce7adad8588464948e0723a3d881e5549f48c4767f1654e8e4cb7d", size = 1383103 }, + { url = "https://files.pythonhosted.org/packages/76/63/3768c99383e24ca16d59d860a1f799eccd02fc55a4e7588a72bf65740fe5/pymongo-4.11.1-cp311-cp311-win32.whl", hash = "sha256:27bc58e0b1bebb17d2426d0cc191c579f2eeaf9692be880f93fe4180cf850ca7", size = 817671 }, + { url = "https://files.pythonhosted.org/packages/1d/2d/044b8511853c8d439817dfee4b1d99060fb76cb08c980877fcb6a6bc1da1/pymongo-4.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:7751e6e99c79057b09441c6ab2a93fae10b4028478aac5b455db8b12f884a3c0", size = 831620 }, + { url = "https://files.pythonhosted.org/packages/cc/8a/81fdd61a0764c0ba1072cd70f67c7f4a83008ceaa61305e20add2ad580c6/pymongo-4.11.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f96683f1dec7d28f12fe43a4d5c0df35d6b80348a9fbf5aac47fa284332a1f92", size = 895365 }, + { url = "https://files.pythonhosted.org/packages/05/60/32910044b2329b7a580a1b4d4f895ecb9616cdffeb57c2d7622214659ac5/pymongo-4.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:157e6a722d051c4bab3e6bc34a1f80fc98101cf2d12139a94e51638d023198c5", size = 895061 }, + { url = "https://files.pythonhosted.org/packages/00/11/30d3351f24cf8e652a0d5fe76e56a50478ea7e81dabcfea7339b1338cccd/pymongo-4.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74503e853758e1eaa1cad2df9c08c8c35a3d26222cf6426d2cde4b2e8593b9b3", size = 1673794 }, + { url = "https://files.pythonhosted.org/packages/a7/90/5ff61e8bad861621361868addeb34c4d2539a4c973a5767d1a266878cb32/pymongo-4.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b630596089106c968ddd252bde3fe692c420e24f214dd39ca517d26343d81012", size = 1738027 }, + { url = "https://files.pythonhosted.org/packages/d0/91/1fdf2843a664f01b8ca83d22cd7accb48f3a5371e61813a5451bc33f93c3/pymongo-4.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7007669eef871079d39a9bbcda0fbcd4252f9b575592804343d0b5c05849d65b", size = 1707052 }, + { url = "https://files.pythonhosted.org/packages/c6/f7/1bd23ea674c957b24256f9ef87875892801cf77b3d2535e59dd78b04db2e/pymongo-4.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d1da6201e1350cfcd4deab599b32237ac2ac591180d44553a2c8e614f2c0e", size = 1677027 }, + { url = "https://files.pythonhosted.org/packages/62/42/077b138efd223ed3cd03f3b8622d2315096e7cd1d9476cd8f1cf219c420e/pymongo-4.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:908e65ab42cd4bf1ffeaafe8f11bb86b3f804d54227058794e33fff2963ccc86", size = 1636150 }, + { url = "https://files.pythonhosted.org/packages/c9/a5/f958fcdc944f97d02b6a46c94dbbcdde0d355639c8564974b31b4685e97a/pymongo-4.11.1-cp312-cp312-win32.whl", hash = "sha256:2d1d956c15dd05f1e41c61f0dbcaec59f274db4814cff2c3d9c2508f58004c39", size = 864029 }, + { url = "https://files.pythonhosted.org/packages/d2/e2/b1747eabad8bf172aa66fae50ed7290c4992b8adbeaddbe31944755dbed4/pymongo-4.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:c71655f4188c70032ba56ac7ead688449e4f86a4ccd8e57201ee283f2f591e1d", size = 882299 }, + { url = "https://files.pythonhosted.org/packages/71/b6/dc403a4dda2adaf0f0088d3fcfe6eb17c9e16098eca98f705f2a8e73e693/pymongo-4.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f845b46d77a5bcf0c9ee16f11c5bc84c63f4668d9ea4fc54cd923c8d48a1d521", size = 949622 }, + { url = "https://files.pythonhosted.org/packages/52/54/0572ffa3d1c43fec0bdd065c5008b57f7ce4da90e6c6ade0a3c32f34c21e/pymongo-4.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aadea45e01103f6ee4e80d76d4a27393a4e2bd93472ce4ebb894781f395e1053", size = 949301 }, + { url = "https://files.pythonhosted.org/packages/7f/d6/5bf309a20892f47898e7bc626cb3169a1120b16b2d7b7a60c3fab607907c/pymongo-4.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63348c850df796199abef7e9afbd86c34449f56731c7ec70b3901df1f5c135b", size = 1937689 }, + { url = "https://files.pythonhosted.org/packages/16/03/1c792ab1e1e5a48fde005bbf739f04846ae48c8c8543a2f1e74ce42d465b/pymongo-4.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dd7656794bfbfbe10723813332ec33eed29bd9bb7fc122c63829fd445eb8425", size = 2015119 }, + { url = "https://files.pythonhosted.org/packages/ad/cc/8765bbec58392929f414b5b26f4c3fe333bfb75ad2f03e92fc48c81bd25f/pymongo-4.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7146ae04300ce6f83b75c639e97c3d0ce873f30edaac4b719ae173e886b9ff90", size = 1978788 }, + { url = "https://files.pythonhosted.org/packages/77/a6/b700ccb2695f3233a12943e78760f68adc19516cf120949ad7c67fdc81a9/pymongo-4.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698fb3d13126c0719077c98b40378cb9a6f4ab4a72b7691779aa01f1f6c66493", size = 1939607 }, + { url = "https://files.pythonhosted.org/packages/d1/10/c0e4c38c7a6d318a80a4dcd8cfc42bfb8a072145f152089e5bc8d60db902/pymongo-4.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f415d9569720f408cc4dcc171f60299d454b0414f120666e6fdd349d414bf010", size = 1889041 }, + { url = "https://files.pythonhosted.org/packages/1d/86/7145841c425e4f6b012116db38e3bf4652dce7b8537961b2391e3c52e051/pymongo-4.11.1-cp313-cp313-win32.whl", hash = "sha256:4aa2c40e391ca29a337bef2b46b495c3f24b5696a87a58f0a0676a8bf131f9f8", size = 910368 }, + { url = "https://files.pythonhosted.org/packages/a0/d4/97632e8f230e95a877220c785a69478cae97610e1ec48b5f9be59a926b29/pymongo-4.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:1f871efa14a1f368559edff39ec03799ca108bfa8e1ba330b7ffc05eb958661f", size = 932942 }, + { url = "https://files.pythonhosted.org/packages/db/61/f719841bc59d3d33c6002950e8b9978705b6f9f1dd5efb66e73fe6919a7d/pymongo-4.11.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d293cec18624825937bd7f1d8bacf16104c79ced45a8ada93f08ec8a7a2ad17a", size = 1006140 }, + { url = "https://files.pythonhosted.org/packages/cb/05/f43900c675e158cc024bc82a062dfcaaf12d4d7f574947b73f41d843d189/pymongo-4.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7b3ea3494f3e166a524529bb05a4fdda97afd77031fed3a63862fd815288c9df", size = 1006124 }, + { url = "https://files.pythonhosted.org/packages/31/2f/7bccadbcf272b5e8c617a6a329b07671ecfd1faea080d9ab311240b93737/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12f4c4579076b7351c63378e22f43d4ce4ed4f2c93208b653c4752f18f47309", size = 2266399 }, + { url = "https://files.pythonhosted.org/packages/78/ac/6bf48a7c99b574c9afcb0f68b7a8b9bf9617a1a54773d0f8b1568f8a079c/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a8aba4818350d2a463e084ae2426d395e725525fe86bd0219240b265dc1ca52", size = 2353616 }, + { url = "https://files.pythonhosted.org/packages/3c/f3/c4cd608ddda2dbc7fa668dd8356bb728313b8eec5b118eca3fa937d4fc8c/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f97f62e6edde15d1d3d08abd7e43f1787ee9e672b1bb8e9d9f5fd6ded24f5599", size = 2312480 }, + { url = "https://files.pythonhosted.org/packages/c8/3e/2261ac8e0b6a150d92d35ba2db30b8387c78f9ecba725b0b6a363250f9c6/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a4e82dce301c97bb132dec28a487c1a609dc67948e9db7cbd23485875367204", size = 2263792 }, + { url = "https://files.pythonhosted.org/packages/73/80/41568f1ff09cb73976f7e6f9d11dae63003e4c1156834366ad03f91f27df/pymongo-4.11.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:985a614ec24519f4a3d82aafb766c3f782a452fc46b32112d508a4e19b33fff3", size = 2202805 }, + { url = "https://files.pythonhosted.org/packages/0c/d3/d7ca22d5eb654a451e18f616442b7c6d472ffe76560d6623a2a4ddfd4854/pymongo-4.11.1-cp313-cp313t-win32.whl", hash = "sha256:889d20850d5aaa4f19814462c06488553e70ed4c62195dbaad5d5662884778af", size = 959247 }, + { url = "https://files.pythonhosted.org/packages/95/7b/8d0767251e687966cf19a4ad032d597ab135d26af5ecebbdb8895ea92cf0/pymongo-4.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:3854db4be39cb9e0c34add1fd7e515deab0b4ee30f3cc3978e057746d119ac12", size = 987871 }, +] + +[[package]] +name = "pyopenssl" +version = "25.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/26/e25b4a374b4639e0c235527bbe31c0524f26eda701d79456a7e1877f4cc5/pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16", size = 179573 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/d7/eb76863d2060dcbe7c7e6cccfd95ac02ea0b9acc37745a0d99ff6457aefb/pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90", size = 56453 }, ] [[package]] @@ -3999,14 +4218,14 @@ wheels = [ [[package]] name = "pytest-asyncio" -version = "0.25.2" +version = "0.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/df/adcc0d60f1053d74717d21d58c0048479e9cab51464ce0d2965b086bd0e2/pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f", size = 53950 } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a8/ecbc8ede70921dd2f544ab1cadd3ff3bf842af27f87bbdea774c7baa1d38/pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a", size = 54239 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/d8/defa05ae50dcd6019a95527200d3b3980043df5aa445d40cb0ef9f7f98ab/pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075", size = 19400 }, + { url = "https://files.pythonhosted.org/packages/67/17/3493c5624e48fd97156ebaec380dcaafee9506d7e2c46218ceebbb57d7de/pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3", size = 19467 }, ] [[package]] @@ -4073,13 +4292,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, ] +[[package]] +name = "python-ulid" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/db/e5e67aeca9c2420cb91f94007f30693cc3628ae9783a565fd33ffb3fbfdd/python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f", size = 28822 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/4e/cc2ba2c0df2589f35a4db8473b8c2ba9bbfc4acdec4a94f1c78934d2350f/python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31", size = 11194 }, +] + [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, + { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 }, ] [[package]] @@ -4147,93 +4375,127 @@ wheels = [ [[package]] name = "pyzmq" -version = "26.2.0" +version = "26.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "(implementation_name == 'pypy' and sys_platform == 'darwin') or (implementation_name == 'pypy' and sys_platform == 'linux') or (implementation_name == 'pypy' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/a8/9837c39aba390eb7d01924ace49d761c8dbe7bc2d6082346d00c8332e431/pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629", size = 1340058 }, - { url = "https://files.pythonhosted.org/packages/a2/1f/a006f2e8e4f7d41d464272012695da17fb95f33b54342612a6890da96ff6/pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b", size = 1008818 }, - { url = "https://files.pythonhosted.org/packages/b6/09/b51b6683fde5ca04593a57bbe81788b6b43114d8f8ee4e80afc991e14760/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764", size = 673199 }, - { url = "https://files.pythonhosted.org/packages/c9/78/486f3e2e824f3a645238332bf5a4c4b4477c3063033a27c1e4052358dee2/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c", size = 911762 }, - { url = "https://files.pythonhosted.org/packages/5e/3b/2eb1667c9b866f53e76ee8b0c301b0469745a23bd5a87b7ee3d5dd9eb6e5/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a", size = 868773 }, - { url = "https://files.pythonhosted.org/packages/16/29/ca99b4598a9dc7e468b5417eda91f372b595be1e3eec9b7cbe8e5d3584e8/pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88", size = 868834 }, - { url = "https://files.pythonhosted.org/packages/ad/e5/9efaeb1d2f4f8c50da04144f639b042bc52869d3a206d6bf672ab3522163/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f", size = 1202861 }, - { url = "https://files.pythonhosted.org/packages/c3/62/c721b5608a8ac0a69bb83cbb7d07a56f3ff00b3991a138e44198a16f94c7/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282", size = 1515304 }, - { url = "https://files.pythonhosted.org/packages/87/84/e8bd321aa99b72f48d4606fc5a0a920154125bd0a4608c67eab742dab087/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea", size = 1414712 }, - { url = "https://files.pythonhosted.org/packages/cd/cd/420e3fd1ac6977b008b72e7ad2dae6350cc84d4c5027fc390b024e61738f/pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2", size = 578113 }, - { url = "https://files.pythonhosted.org/packages/5c/57/73930d56ed45ae0cb4946f383f985c855c9b3d4063f26416998f07523c0e/pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971", size = 641631 }, - { url = "https://files.pythonhosted.org/packages/61/d2/ae6ac5c397f1ccad59031c64beaafce7a0d6182e0452cc48f1c9c87d2dd0/pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa", size = 543528 }, - { url = "https://files.pythonhosted.org/packages/12/20/de7442172f77f7c96299a0ac70e7d4fb78cd51eca67aa2cf552b66c14196/pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218", size = 1340639 }, - { url = "https://files.pythonhosted.org/packages/98/4d/5000468bd64c7910190ed0a6c76a1ca59a68189ec1f007c451dc181a22f4/pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4", size = 1008710 }, - { url = "https://files.pythonhosted.org/packages/e1/bf/c67fd638c2f9fbbab8090a3ee779370b97c82b84cc12d0c498b285d7b2c0/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef", size = 673129 }, - { url = "https://files.pythonhosted.org/packages/86/94/99085a3f492aa538161cbf27246e8886ff850e113e0c294a5b8245f13b52/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317", size = 910107 }, - { url = "https://files.pythonhosted.org/packages/31/1d/346809e8a9b999646d03f21096428453465b1bca5cd5c64ecd048d9ecb01/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf", size = 867960 }, - { url = "https://files.pythonhosted.org/packages/ab/68/6fb6ae5551846ad5beca295b7bca32bf0a7ce19f135cb30e55fa2314e6b6/pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e", size = 869204 }, - { url = "https://files.pythonhosted.org/packages/0f/f9/18417771dee223ccf0f48e29adf8b4e25ba6d0e8285e33bcbce078070bc3/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37", size = 1203351 }, - { url = "https://files.pythonhosted.org/packages/e0/46/f13e67fe0d4f8a2315782cbad50493de6203ea0d744610faf4d5f5b16e90/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3", size = 1514204 }, - { url = "https://files.pythonhosted.org/packages/50/11/ddcf7343b7b7a226e0fc7b68cbf5a5bb56291fac07f5c3023bb4c319ebb4/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6", size = 1414339 }, - { url = "https://files.pythonhosted.org/packages/01/14/1c18d7d5b7be2708f513f37c61bfadfa62161c10624f8733f1c8451b3509/pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4", size = 576928 }, - { url = "https://files.pythonhosted.org/packages/3b/1b/0a540edd75a41df14ec416a9a500b9fec66e554aac920d4c58fbd5756776/pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5", size = 642317 }, - { url = "https://files.pythonhosted.org/packages/98/77/1cbfec0358078a4c5add529d8a70892db1be900980cdb5dd0898b3d6ab9d/pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003", size = 543834 }, - { url = "https://files.pythonhosted.org/packages/28/2f/78a766c8913ad62b28581777ac4ede50c6d9f249d39c2963e279524a1bbe/pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9", size = 1343105 }, - { url = "https://files.pythonhosted.org/packages/b7/9c/4b1e2d3d4065be715e007fe063ec7885978fad285f87eae1436e6c3201f4/pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52", size = 1008365 }, - { url = "https://files.pythonhosted.org/packages/4f/ef/5a23ec689ff36d7625b38d121ef15abfc3631a9aecb417baf7a4245e4124/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08", size = 665923 }, - { url = "https://files.pythonhosted.org/packages/ae/61/d436461a47437d63c6302c90724cf0981883ec57ceb6073873f32172d676/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5", size = 903400 }, - { url = "https://files.pythonhosted.org/packages/47/42/fc6d35ecefe1739a819afaf6f8e686f7f02a4dd241c78972d316f403474c/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae", size = 860034 }, - { url = "https://files.pythonhosted.org/packages/07/3b/44ea6266a6761e9eefaa37d98fabefa112328808ac41aa87b4bbb668af30/pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711", size = 860579 }, - { url = "https://files.pythonhosted.org/packages/38/6f/4df2014ab553a6052b0e551b37da55166991510f9e1002c89cab7ce3b3f2/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6", size = 1196246 }, - { url = "https://files.pythonhosted.org/packages/38/9d/ee240fc0c9fe9817f0c9127a43238a3e28048795483c403cc10720ddef22/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3", size = 1507441 }, - { url = "https://files.pythonhosted.org/packages/85/4f/01711edaa58d535eac4a26c294c617c9a01f09857c0ce191fd574d06f359/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b", size = 1406498 }, - { url = "https://files.pythonhosted.org/packages/07/18/907134c85c7152f679ed744e73e645b365f3ad571f38bdb62e36f347699a/pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7", size = 575533 }, - { url = "https://files.pythonhosted.org/packages/ce/2c/a6f4a20202a4d3c582ad93f95ee78d79bbdc26803495aec2912b17dbbb6c/pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a", size = 637768 }, - { url = "https://files.pythonhosted.org/packages/5f/0e/eb16ff731632d30554bf5af4dbba3ffcd04518219d82028aea4ae1b02ca5/pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b", size = 540675 }, - { url = "https://files.pythonhosted.org/packages/04/a7/0f7e2f6c126fe6e62dbae0bc93b1bd3f1099cf7fea47a5468defebe3f39d/pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726", size = 1006564 }, - { url = "https://files.pythonhosted.org/packages/31/b6/a187165c852c5d49f826a690857684333a6a4a065af0a6015572d2284f6a/pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3", size = 1340447 }, - { url = "https://files.pythonhosted.org/packages/68/ba/f4280c58ff71f321602a6e24fd19879b7e79793fb8ab14027027c0fb58ef/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50", size = 665485 }, - { url = "https://files.pythonhosted.org/packages/77/b5/c987a5c53c7d8704216f29fc3d810b32f156bcea488a940e330e1bcbb88d/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb", size = 903484 }, - { url = "https://files.pythonhosted.org/packages/29/c9/07da157d2db18c72a7eccef8e684cefc155b712a88e3d479d930aa9eceba/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187", size = 859981 }, - { url = "https://files.pythonhosted.org/packages/43/09/e12501bd0b8394b7d02c41efd35c537a1988da67fc9c745cae9c6c776d31/pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b", size = 860334 }, - { url = "https://files.pythonhosted.org/packages/eb/ff/f5ec1d455f8f7385cc0a8b2acd8c807d7fade875c14c44b85c1bddabae21/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18", size = 1196179 }, - { url = "https://files.pythonhosted.org/packages/ec/8a/bb2ac43295b1950fe436a81fc5b298be0b96ac76fb029b514d3ed58f7b27/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115", size = 1507668 }, - { url = "https://files.pythonhosted.org/packages/a9/49/dbc284ebcfd2dca23f6349227ff1616a7ee2c4a35fe0a5d6c3deff2b4fed/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e", size = 1406539 }, - { url = "https://files.pythonhosted.org/packages/00/68/093cdce3fe31e30a341d8e52a1ad86392e13c57970d722c1f62a1d1a54b6/pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5", size = 575567 }, - { url = "https://files.pythonhosted.org/packages/92/ae/6cc4657148143412b5819b05e362ae7dd09fb9fe76e2a539dcff3d0386bc/pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad", size = 637551 }, - { url = "https://files.pythonhosted.org/packages/6c/67/fbff102e201688f97c8092e4c3445d1c1068c2f27bbd45a578df97ed5f94/pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797", size = 540378 }, - { url = "https://files.pythonhosted.org/packages/3f/fe/2d998380b6e0122c6c4bdf9b6caf490831e5f5e2d08a203b5adff060c226/pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a", size = 1007378 }, - { url = "https://files.pythonhosted.org/packages/4a/f4/30d6e7157f12b3a0390bde94d6a8567cdb88846ed068a6e17238a4ccf600/pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc", size = 1329532 }, - { url = "https://files.pythonhosted.org/packages/82/86/3fe917870e15ee1c3ad48229a2a64458e36036e64b4afa9659045d82bfa8/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5", size = 653242 }, - { url = "https://files.pythonhosted.org/packages/50/2d/242e7e6ef6c8c19e6cb52d095834508cd581ffb925699fd3c640cdc758f1/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672", size = 888404 }, - { url = "https://files.pythonhosted.org/packages/ac/11/7270566e1f31e4ea73c81ec821a4b1688fd551009a3d2bab11ec66cb1e8f/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797", size = 845858 }, - { url = "https://files.pythonhosted.org/packages/91/d5/72b38fbc69867795c8711bdd735312f9fef1e3d9204e2f63ab57085434b9/pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386", size = 847375 }, - { url = "https://files.pythonhosted.org/packages/dd/9a/10ed3c7f72b4c24e719c59359fbadd1a27556a28b36cdf1cd9e4fb7845d5/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306", size = 1183489 }, - { url = "https://files.pythonhosted.org/packages/72/2d/8660892543fabf1fe41861efa222455811adac9f3c0818d6c3170a1153e3/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6", size = 1492932 }, - { url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 }, - { url = "https://files.pythonhosted.org/packages/53/fb/36b2b2548286e9444e52fcd198760af99fd89102b5be50f0660fcfe902df/pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072", size = 906955 }, - { url = "https://files.pythonhosted.org/packages/77/8f/6ce54f8979a01656e894946db6299e2273fcee21c8e5fa57c6295ef11f57/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1", size = 565701 }, - { url = "https://files.pythonhosted.org/packages/ee/1c/bf8cd66730a866b16db8483286078892b7f6536f8c389fb46e4beba0a970/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d", size = 794312 }, - { url = "https://files.pythonhosted.org/packages/71/43/91fa4ff25bbfdc914ab6bafa0f03241d69370ef31a761d16bb859f346582/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca", size = 752775 }, - { url = "https://files.pythonhosted.org/packages/ec/d2/3b2ab40f455a256cb6672186bea95cd97b459ce4594050132d71e76f0d6f/pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c", size = 550762 }, +sdist = { url = "https://files.pythonhosted.org/packages/5a/e3/8d0382cb59feb111c252b54e8728257416a38ffcb2243c4e4775a3c990fe/pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca", size = 278433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/3d/c2d9d46c033d1b51692ea49a22439f7f66d91d5c938e8b5c56ed7a2151c2/pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb", size = 1345451 }, + { url = "https://files.pythonhosted.org/packages/0e/df/4754a8abcdeef280651f9bb51446c47659910940b392a66acff7c37f5cef/pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641", size = 942766 }, + { url = "https://files.pythonhosted.org/packages/74/da/e6053a3b13c912eded6c2cdeee22ff3a4c33820d17f9eb24c7b6e957ffe7/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257", size = 678488 }, + { url = "https://files.pythonhosted.org/packages/9e/50/614934145244142401ca174ca81071777ab93aa88173973ba0154f491e09/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff", size = 917115 }, + { url = "https://files.pythonhosted.org/packages/80/2b/ebeb7bc4fc8e9e61650b2e09581597355a4341d413fa9b2947d7a6558119/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24", size = 874162 }, + { url = "https://files.pythonhosted.org/packages/79/48/93210621c331ad16313dc2849801411fbae10d91d878853933f2a85df8e7/pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459", size = 874180 }, + { url = "https://files.pythonhosted.org/packages/f0/8b/40924b4d8e33bfdd54c1970fb50f327e39b90b902f897cf09b30b2e9ac48/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c", size = 1208139 }, + { url = "https://files.pythonhosted.org/packages/c8/b2/82d6675fc89bd965eae13c45002c792d33f06824589844b03f8ea8fc6d86/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e", size = 1520666 }, + { url = "https://files.pythonhosted.org/packages/9d/e2/5ff15f2d3f920dcc559d477bd9bb3faacd6d79fcf7c5448e585c78f84849/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3", size = 1420056 }, + { url = "https://files.pythonhosted.org/packages/40/a2/f9bbeccf7f75aa0d8963e224e5730abcefbf742e1f2ae9ea60fd9d6ff72b/pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa", size = 583874 }, + { url = "https://files.pythonhosted.org/packages/56/b1/44f513135843272f0e12f5aebf4af35839e2a88eb45411f2c8c010d8c856/pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473", size = 647367 }, + { url = "https://files.pythonhosted.org/packages/27/9c/1bef14a37b02d651a462811bbdb1390b61cd4a5b5e95cbd7cc2d60ef848c/pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594", size = 561784 }, + { url = "https://files.pythonhosted.org/packages/b9/03/5ecc46a6ed5971299f5c03e016ca637802d8660e44392bea774fb7797405/pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a", size = 1346032 }, + { url = "https://files.pythonhosted.org/packages/40/51/48fec8f990ee644f461ff14c8fe5caa341b0b9b3a0ad7544f8ef17d6f528/pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a", size = 943324 }, + { url = "https://files.pythonhosted.org/packages/c1/f4/f322b389727c687845e38470b48d7a43c18a83f26d4d5084603c6c3f79ca/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454", size = 678418 }, + { url = "https://files.pythonhosted.org/packages/a8/df/2834e3202533bd05032d83e02db7ac09fa1be853bbef59974f2b2e3a8557/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99", size = 915466 }, + { url = "https://files.pythonhosted.org/packages/b5/e2/45c0f6e122b562cb8c6c45c0dcac1160a4e2207385ef9b13463e74f93031/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4", size = 873347 }, + { url = "https://files.pythonhosted.org/packages/de/b9/3e0fbddf8b87454e914501d368171466a12550c70355b3844115947d68ea/pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa", size = 874545 }, + { url = "https://files.pythonhosted.org/packages/1f/1c/1ee41d6e10b2127263b1994bc53b9e74ece015b0d2c0a30e0afaf69b78b2/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f", size = 1208630 }, + { url = "https://files.pythonhosted.org/packages/3d/a9/50228465c625851a06aeee97c74f253631f509213f979166e83796299c60/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba", size = 1519568 }, + { url = "https://files.pythonhosted.org/packages/c6/f2/6360b619e69da78863c2108beb5196ae8b955fe1e161c0b886b95dc6b1ac/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd", size = 1419677 }, + { url = "https://files.pythonhosted.org/packages/da/d5/f179da989168f5dfd1be8103ef508ade1d38a8078dda4f10ebae3131a490/pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7", size = 582682 }, + { url = "https://files.pythonhosted.org/packages/60/50/e5b2e9de3ffab73ff92bee736216cf209381081fa6ab6ba96427777d98b1/pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1", size = 648128 }, + { url = "https://files.pythonhosted.org/packages/d9/fe/7bb93476dd8405b0fc9cab1fd921a08bd22d5e3016aa6daea1a78d54129b/pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7", size = 562465 }, + { url = "https://files.pythonhosted.org/packages/9c/b9/260a74786f162c7f521f5f891584a51d5a42fd15f5dcaa5c9226b2865fcc/pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3", size = 1348495 }, + { url = "https://files.pythonhosted.org/packages/bf/73/8a0757e4b68f5a8ccb90ddadbb76c6a5f880266cdb18be38c99bcdc17aaa/pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e", size = 945035 }, + { url = "https://files.pythonhosted.org/packages/cf/de/f02ec973cd33155bb772bae33ace774acc7cc71b87b25c4829068bec35de/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8", size = 671213 }, + { url = "https://files.pythonhosted.org/packages/d1/80/8fc583085f85ac91682744efc916888dd9f11f9f75a31aef1b78a5486c6c/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09", size = 908750 }, + { url = "https://files.pythonhosted.org/packages/c3/25/0b4824596f261a3cc512ab152448b383047ff5f143a6906a36876415981c/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da", size = 865416 }, + { url = "https://files.pythonhosted.org/packages/a1/d1/6fda77a034d02034367b040973fd3861d945a5347e607bd2e98c99f20599/pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435", size = 865922 }, + { url = "https://files.pythonhosted.org/packages/ad/81/48f7fd8a71c427412e739ce576fc1ee14f3dc34527ca9b0076e471676183/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a", size = 1201526 }, + { url = "https://files.pythonhosted.org/packages/c7/d8/818f15c6ef36b5450e435cbb0d3a51599fc884a5d2b27b46b9c00af68ef1/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4", size = 1512808 }, + { url = "https://files.pythonhosted.org/packages/d9/c4/b3edb7d0ae82ad6fb1a8cdb191a4113c427a01e85139906f3b655b07f4f8/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e", size = 1411836 }, + { url = "https://files.pythonhosted.org/packages/69/1c/151e3d42048f02cc5cd6dfc241d9d36b38375b4dee2e728acb5c353a6d52/pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a", size = 581378 }, + { url = "https://files.pythonhosted.org/packages/b6/b9/d59a7462848aaab7277fddb253ae134a570520115d80afa85e952287e6bc/pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13", size = 643737 }, + { url = "https://files.pythonhosted.org/packages/55/09/f37e707937cce328944c1d57e5e50ab905011d35252a0745c4f7e5822a76/pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5", size = 558303 }, + { url = "https://files.pythonhosted.org/packages/4f/2e/fa7a91ce349975971d6aa925b4c7e1a05abaae99b97ade5ace758160c43d/pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23", size = 942331 }, + { url = "https://files.pythonhosted.org/packages/64/2b/1f10b34b6dc7ff4b40f668ea25ba9b8093ce61d874c784b90229b367707b/pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be", size = 1345831 }, + { url = "https://files.pythonhosted.org/packages/4c/8d/34884cbd4a8ec050841b5fb58d37af136766a9f95b0b2634c2971deb09da/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399", size = 670773 }, + { url = "https://files.pythonhosted.org/packages/0f/f4/d4becfcf9e416ad2564f18a6653f7c6aa917da08df5c3760edb0baa1c863/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9", size = 908836 }, + { url = "https://files.pythonhosted.org/packages/07/fa/ab105f1b86b85cb2e821239f1d0900fccd66192a91d97ee04661b5436b4d/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab", size = 865369 }, + { url = "https://files.pythonhosted.org/packages/c9/48/15d5f415504572dd4b92b52db5de7a5befc76bb75340ba9f36f71306a66d/pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce", size = 865676 }, + { url = "https://files.pythonhosted.org/packages/7e/35/2d91bcc7ccbb56043dd4d2c1763f24a8de5f05e06a134f767a7fb38e149c/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a", size = 1201457 }, + { url = "https://files.pythonhosted.org/packages/6d/bb/aa7c5119307a5762b8dca6c9db73e3ab4bccf32b15d7c4f376271ff72b2b/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9", size = 1513035 }, + { url = "https://files.pythonhosted.org/packages/4f/4c/527e6650c2fccec7750b783301329c8a8716d59423818afb67282304ce5a/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad", size = 1411881 }, + { url = "https://files.pythonhosted.org/packages/89/9f/e4412ea1b3e220acc21777a5edba8885856403d29c6999aaf00a9459eb03/pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb", size = 581354 }, + { url = "https://files.pythonhosted.org/packages/55/cd/f89dd3e9fc2da0d1619a82c4afb600c86b52bc72d7584953d460bc8d5027/pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf", size = 643560 }, + { url = "https://files.pythonhosted.org/packages/a7/99/5de4f8912860013f1116f818a0047659bc20d71d1bc1d48f874bdc2d7b9c/pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce", size = 558037 }, + { url = "https://files.pythonhosted.org/packages/06/0b/63b6d7a2f07a77dbc9768c6302ae2d7518bed0c6cee515669ca0d8ec743e/pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e", size = 938580 }, + { url = "https://files.pythonhosted.org/packages/85/38/e5e2c3ffa23ea5f95f1c904014385a55902a11a67cd43c10edf61a653467/pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891", size = 1339670 }, + { url = "https://files.pythonhosted.org/packages/d2/87/da5519ed7f8b31e4beee8f57311ec02926822fe23a95120877354cd80144/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6", size = 660983 }, + { url = "https://files.pythonhosted.org/packages/f6/e8/1ca6a2d59562e04d326a026c9e3f791a6f1a276ebde29da478843a566fdb/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a", size = 896509 }, + { url = "https://files.pythonhosted.org/packages/5c/e5/0b4688f7c74bea7e4f1e920da973fcd7d20175f4f1181cb9b692429c6bb9/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3", size = 853196 }, + { url = "https://files.pythonhosted.org/packages/8f/35/c17241da01195001828319e98517683dad0ac4df6fcba68763d61b630390/pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e", size = 855133 }, + { url = "https://files.pythonhosted.org/packages/d2/14/268ee49bbecc3f72e225addeac7f0e2bd5808747b78c7bf7f87ed9f9d5a8/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7", size = 1191612 }, + { url = "https://files.pythonhosted.org/packages/5e/02/6394498620b1b4349b95c534f3ebc3aef95f39afbdced5ed7ee315c49c14/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8", size = 1500824 }, + { url = "https://files.pythonhosted.org/packages/17/fc/b79f0b72891cbb9917698add0fede71dfb64e83fa3481a02ed0e78c34be7/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460", size = 1399943 }, + { url = "https://files.pythonhosted.org/packages/65/d1/e630a75cfb2534574a1258fda54d02f13cf80b576d4ce6d2aa478dc67829/pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d", size = 847743 }, + { url = "https://files.pythonhosted.org/packages/27/df/f94a711b4f6c4b41e227f9a938103f52acf4c2e949d91cbc682495a48155/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99", size = 570991 }, + { url = "https://files.pythonhosted.org/packages/bf/08/0c6f97fb3c9dbfa23382f0efaf8f9aa1396a08a3358974eaae3ee659ed5c/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c", size = 799664 }, + { url = "https://files.pythonhosted.org/packages/05/14/f4d4fd8bb8988c667845734dd756e9ee65b9a17a010d5f288dfca14a572d/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53", size = 758156 }, + { url = "https://files.pythonhosted.org/packages/e3/fe/72e7e166bda3885810bee7b23049133e142f7c80c295bae02c562caeea16/pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9", size = 556563 }, ] [[package]] name = "qdrant-client" -version = "1.12.2" +version = "1.12.1" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.13' and sys_platform == 'linux'", + "python_full_version >= '3.13' and sys_platform == 'win32'", +] dependencies = [ - { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "grpcio-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "grpcio-tools", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "httpx", extra = ["http2"], marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "numpy", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "portalocker", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "pydantic", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "urllib3", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/0b/7b6ddc9ade365b644a023ca225300662766732e1e9db7f5962a6cf9530bd/qdrant_client-1.12.2.tar.gz", hash = "sha256:2777e09b3e89bb22bb490384d8b1fa8140f3915287884f18984f7031a346aba5", size = 237512 } +sdist = { url = "https://files.pythonhosted.org/packages/15/5e/ec560881e086f893947c8798949c72de5cfae9453fd05c2250f8dfeaa571/qdrant_client-1.12.1.tar.gz", hash = "sha256:35e8e646f75b7b883b3d2d0ee4c69c5301000bba41c82aa546e985db0f1aeb72", size = 237441 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/00/27c6eb6fc764e2b3d26ddeab4aedab855c050c906ec018bdd669b18f3157/qdrant_client-1.12.2-py3-none-any.whl", hash = "sha256:a0ae500a46a679ff3521ba3f1f1cf3d72b57090a768cec65fc317066bcbac1e6", size = 267173 }, + { url = "https://files.pythonhosted.org/packages/68/c0/eef4fe9dad6d41333f7dc6567fa8144ffc1837c8a0edfc2317d50715335f/qdrant_client-1.12.1-py3-none-any.whl", hash = "sha256:b2d17ce18e9e767471368380dd3bbc4a0e3a0e2061fedc9af3542084b48451e0", size = 267171 }, +] + +[[package]] +name = "qdrant-client" +version = "1.13.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version < '3.11' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'linux'", + "python_full_version == '3.12.*' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", +] +dependencies = [ + { name = "grpcio", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "grpcio-tools", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "httpx", extra = ["http2"], marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "numpy", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "portalocker", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "pydantic", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "urllib3", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/07/3eaf3777d524d555ba14e56a30c3e393ad78ed93f6c87c6a3ddc70ec2e49/qdrant_client-1.13.2.tar.gz", hash = "sha256:c8cce87ce67b006f49430a050a35c85b78e3b896c0c756dafc13bdeca543ec13", size = 266257 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/26/89ebaee5fcbd99bf1c0a627a9447b440118b2d31dea423d074cb0481be5c/qdrant_client-1.13.2-py3-none-any.whl", hash = "sha256:db97e759bd3f8d483a383984ba4c2a158eef56f2188d83df7771591d43de2201", size = 306637 }, ] [[package]] @@ -4255,34 +4517,36 @@ hiredis = [ [[package]] name = "redisvl" -version = "0.3.8" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ml-dtypes", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-ulid", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tabulate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e8/e2/95d61ccf79dd6fed45a2cdabbc09fd78453bd744ac8ac8fe53a678959470/redisvl-0.3.8.tar.gz", hash = "sha256:b07fc3c36cdd2d6304ab8b3e759a733d3332df868a926ffa7691803c195eab42", size = 72757 } +sdist = { url = "https://files.pythonhosted.org/packages/21/33/ab14865a0b2a31b1d003c29e7e8ea3a7a2f2c8ecb24e58e58d606e1f031b/redisvl-0.4.1.tar.gz", hash = "sha256:fd6a36426ba94792c0efca20915c31232d4ee3cc58eb23794a62c142696401e6", size = 77688 } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/c4/b0cb6d49794e6e2ea69bde5877cd566dc448e4067932de570a09c6fb09de/redisvl-0.3.8-py3-none-any.whl", hash = "sha256:2dffd4ed9a4de5a384b5cbcfb2d4bc5a12bd109811ac672b9d89225dc0a0fd63", size = 99287 }, + { url = "https://files.pythonhosted.org/packages/ad/34/2b4bb30fabb6e37c3c57d2cb760f19aec23567f9622372c8a7617071f204/redisvl-0.4.1-py3-none-any.whl", hash = "sha256:6db5d5bc95b1fe8032a1cdae74ce1c65bc7fe9054e5429b5d34d5a91d28bae5f", size = 108525 }, ] [[package]] name = "referencing" -version = "0.35.1" +version = "0.36.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rpds-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, ] [[package]] @@ -4410,87 +4674,87 @@ wheels = [ [[package]] name = "rpds-py" -version = "0.22.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/80/cce854d0921ff2f0a9fa831ba3ad3c65cee3a46711addf39a2af52df2cfd/rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d", size = 26771 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/2a/ead1d09e57449b99dcc190d8d2323e3a167421d8f8fdf0f217c6f6befe47/rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967", size = 359514 }, - { url = "https://files.pythonhosted.org/packages/8f/7e/1254f406b7793b586c68e217a6a24ec79040f85e030fff7e9049069284f4/rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37", size = 349031 }, - { url = "https://files.pythonhosted.org/packages/aa/da/17c6a2c73730d426df53675ff9cc6653ac7a60b6438d03c18e1c822a576a/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24", size = 381485 }, - { url = "https://files.pythonhosted.org/packages/aa/13/2dbacd820466aa2a3c4b747afb18d71209523d353cf865bf8f4796c969ea/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff", size = 386794 }, - { url = "https://files.pythonhosted.org/packages/6d/62/96905d0a35ad4e4bc3c098b2f34b2e7266e211d08635baa690643d2227be/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c", size = 423523 }, - { url = "https://files.pythonhosted.org/packages/eb/1b/d12770f2b6a9fc2c3ec0d810d7d440f6d465ccd8b7f16ae5385952c28b89/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e", size = 446695 }, - { url = "https://files.pythonhosted.org/packages/4d/cf/96f1fd75512a017f8e07408b6d5dbeb492d9ed46bfe0555544294f3681b3/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec", size = 381959 }, - { url = "https://files.pythonhosted.org/packages/ab/f0/d1c5b501c8aea85aeb938b555bfdf7612110a2f8cdc21ae0482c93dd0c24/rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c", size = 410420 }, - { url = "https://files.pythonhosted.org/packages/33/3b/45b6c58fb6aad5a569ae40fb890fc494c6b02203505a5008ee6dc68e65f7/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09", size = 557620 }, - { url = "https://files.pythonhosted.org/packages/83/62/3fdd2d3d47bf0bb9b931c4c73036b4ab3ec77b25e016ae26fab0f02be2af/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00", size = 584202 }, - { url = "https://files.pythonhosted.org/packages/04/f2/5dced98b64874b84ca824292f9cee2e3f30f3bcf231d15a903126684f74d/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf", size = 552787 }, - { url = "https://files.pythonhosted.org/packages/67/13/2273dea1204eda0aea0ef55145da96a9aa28b3f88bb5c70e994f69eda7c3/rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652", size = 220088 }, - { url = "https://files.pythonhosted.org/packages/4e/80/8c8176b67ad7f4a894967a7a4014ba039626d96f1d4874d53e409b58d69f/rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8", size = 231737 }, - { url = "https://files.pythonhosted.org/packages/15/ad/8d1ddf78f2805a71253fcd388017e7b4a0615c22c762b6d35301fef20106/rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f", size = 359773 }, - { url = "https://files.pythonhosted.org/packages/c8/75/68c15732293a8485d79fe4ebe9045525502a067865fa4278f178851b2d87/rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a", size = 349214 }, - { url = "https://files.pythonhosted.org/packages/3c/4c/7ce50f3070083c2e1b2bbd0fb7046f3da55f510d19e283222f8f33d7d5f4/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5", size = 380477 }, - { url = "https://files.pythonhosted.org/packages/9a/e9/835196a69cb229d5c31c13b8ae603bd2da9a6695f35fe4270d398e1db44c/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb", size = 386171 }, - { url = "https://files.pythonhosted.org/packages/f9/8e/33fc4eba6683db71e91e6d594a2cf3a8fbceb5316629f0477f7ece5e3f75/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2", size = 422676 }, - { url = "https://files.pythonhosted.org/packages/37/47/2e82d58f8046a98bb9497a8319604c92b827b94d558df30877c4b3c6ccb3/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0", size = 446152 }, - { url = "https://files.pythonhosted.org/packages/e1/78/79c128c3e71abbc8e9739ac27af11dc0f91840a86fce67ff83c65d1ba195/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1", size = 381300 }, - { url = "https://files.pythonhosted.org/packages/c9/5b/2e193be0e8b228c1207f31fa3ea79de64dadb4f6a4833111af8145a6bc33/rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d", size = 409636 }, - { url = "https://files.pythonhosted.org/packages/c2/3f/687c7100b762d62186a1c1100ffdf99825f6fa5ea94556844bbbd2d0f3a9/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648", size = 556708 }, - { url = "https://files.pythonhosted.org/packages/8c/a2/c00cbc4b857e8b3d5e7f7fc4c81e23afd8c138b930f4f3ccf9a41a23e9e4/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74", size = 583554 }, - { url = "https://files.pythonhosted.org/packages/d0/08/696c9872cf56effdad9ed617ac072f6774a898d46b8b8964eab39ec562d2/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a", size = 552105 }, - { url = "https://files.pythonhosted.org/packages/18/1f/4df560be1e994f5adf56cabd6c117e02de7c88ee238bb4ce03ed50da9d56/rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64", size = 220199 }, - { url = "https://files.pythonhosted.org/packages/b8/1b/c29b570bc5db8237553002788dc734d6bd71443a2ceac2a58202ec06ef12/rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c", size = 231775 }, - { url = "https://files.pythonhosted.org/packages/75/47/3383ee3bd787a2a5e65a9b9edc37ccf8505c0a00170e3a5e6ea5fbcd97f7/rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e", size = 352334 }, - { url = "https://files.pythonhosted.org/packages/40/14/aa6400fa8158b90a5a250a77f2077c0d0cd8a76fce31d9f2b289f04c6dec/rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56", size = 342111 }, - { url = "https://files.pythonhosted.org/packages/7d/06/395a13bfaa8a28b302fb433fb285a67ce0ea2004959a027aea8f9c52bad4/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45", size = 384286 }, - { url = "https://files.pythonhosted.org/packages/43/52/d8eeaffab047e6b7b7ef7f00d5ead074a07973968ffa2d5820fa131d7852/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e", size = 391739 }, - { url = "https://files.pythonhosted.org/packages/83/31/52dc4bde85c60b63719610ed6f6d61877effdb5113a72007679b786377b8/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d", size = 427306 }, - { url = "https://files.pythonhosted.org/packages/70/d5/1bab8e389c2261dba1764e9e793ed6830a63f830fdbec581a242c7c46bda/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38", size = 442717 }, - { url = "https://files.pythonhosted.org/packages/82/a1/a45f3e30835b553379b3a56ea6c4eb622cf11e72008229af840e4596a8ea/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15", size = 385721 }, - { url = "https://files.pythonhosted.org/packages/a6/27/780c942de3120bdd4d0e69583f9c96e179dfff082f6ecbb46b8d6488841f/rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059", size = 415824 }, - { url = "https://files.pythonhosted.org/packages/94/0b/aa0542ca88ad20ea719b06520f925bae348ea5c1fdf201b7e7202d20871d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e", size = 561227 }, - { url = "https://files.pythonhosted.org/packages/0d/92/3ed77d215f82c8f844d7f98929d56cc321bb0bcfaf8f166559b8ec56e5f1/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61", size = 587424 }, - { url = "https://files.pythonhosted.org/packages/09/42/cacaeb047a22cab6241f107644f230e2935d4efecf6488859a7dd82fc47d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7", size = 555953 }, - { url = "https://files.pythonhosted.org/packages/e6/52/c921dc6d5f5d45b212a456c1f5b17df1a471127e8037eb0972379e39dff4/rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627", size = 221339 }, - { url = "https://files.pythonhosted.org/packages/f2/c7/f82b5be1e8456600395366f86104d1bd8d0faed3802ad511ef6d60c30d98/rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4", size = 235786 }, - { url = "https://files.pythonhosted.org/packages/d0/bf/36d5cc1f2c609ae6e8bf0fc35949355ca9d8790eceb66e6385680c951e60/rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84", size = 351657 }, - { url = "https://files.pythonhosted.org/packages/24/2a/f1e0fa124e300c26ea9382e59b2d582cba71cedd340f32d1447f4f29fa4e/rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25", size = 341829 }, - { url = "https://files.pythonhosted.org/packages/cf/c2/0da1231dd16953845bed60d1a586fcd6b15ceaeb965f4d35cdc71f70f606/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4", size = 384220 }, - { url = "https://files.pythonhosted.org/packages/c7/73/a4407f4e3a00a9d4b68c532bf2d873d6b562854a8eaff8faa6133b3588ec/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5", size = 391009 }, - { url = "https://files.pythonhosted.org/packages/a9/c3/04b7353477ab360fe2563f5f0b176d2105982f97cd9ae80a9c5a18f1ae0f/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc", size = 426989 }, - { url = "https://files.pythonhosted.org/packages/8d/e6/e4b85b722bcf11398e17d59c0f6049d19cd606d35363221951e6d625fcb0/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b", size = 441544 }, - { url = "https://files.pythonhosted.org/packages/27/fc/403e65e56f65fff25f2973216974976d3f0a5c3f30e53758589b6dc9b79b/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518", size = 385179 }, - { url = "https://files.pythonhosted.org/packages/57/9b/2be9ff9700d664d51fd96b33d6595791c496d2778cb0b2a634f048437a55/rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd", size = 415103 }, - { url = "https://files.pythonhosted.org/packages/bb/a5/03c2ad8ca10994fcf22dd2150dd1d653bc974fa82d9a590494c84c10c641/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2", size = 560916 }, - { url = "https://files.pythonhosted.org/packages/ba/2e/be4fdfc8b5b576e588782b56978c5b702c5a2307024120d8aeec1ab818f0/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16", size = 587062 }, - { url = "https://files.pythonhosted.org/packages/67/e0/2034c221937709bf9c542603d25ad43a68b4b0a9a0c0b06a742f2756eb66/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f", size = 555734 }, - { url = "https://files.pythonhosted.org/packages/ea/ce/240bae07b5401a22482b58e18cfbabaa392409b2797da60223cca10d7367/rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de", size = 220663 }, - { url = "https://files.pythonhosted.org/packages/cb/f0/d330d08f51126330467edae2fa4efa5cec8923c87551a79299380fdea30d/rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9", size = 235503 }, - { url = "https://files.pythonhosted.org/packages/f7/c4/dbe1cc03df013bf2feb5ad00615038050e7859f381e96fb5b7b4572cd814/rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b", size = 347698 }, - { url = "https://files.pythonhosted.org/packages/a4/3a/684f66dd6b0f37499cad24cd1c0e523541fd768576fa5ce2d0a8799c3cba/rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b", size = 337330 }, - { url = "https://files.pythonhosted.org/packages/82/eb/e022c08c2ce2e8f7683baa313476492c0e2c1ca97227fe8a75d9f0181e95/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1", size = 380022 }, - { url = "https://files.pythonhosted.org/packages/e4/21/5a80e653e4c86aeb28eb4fea4add1f72e1787a3299687a9187105c3ee966/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83", size = 390754 }, - { url = "https://files.pythonhosted.org/packages/37/a4/d320a04ae90f72d080b3d74597074e62be0a8ecad7d7321312dfe2dc5a6a/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd", size = 423840 }, - { url = "https://files.pythonhosted.org/packages/87/70/674dc47d93db30a6624279284e5631be4c3a12a0340e8e4f349153546728/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1", size = 438970 }, - { url = "https://files.pythonhosted.org/packages/3f/64/9500f4d66601d55cadd21e90784cfd5d5f4560e129d72e4339823129171c/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3", size = 383146 }, - { url = "https://files.pythonhosted.org/packages/4d/45/630327addb1d17173adcf4af01336fd0ee030c04798027dfcb50106001e0/rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130", size = 408294 }, - { url = "https://files.pythonhosted.org/packages/5f/ef/8efb3373cee54ea9d9980b772e5690a0c9e9214045a4e7fa35046e399fee/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c", size = 556345 }, - { url = "https://files.pythonhosted.org/packages/54/01/151d3b9ef4925fc8f15bfb131086c12ec3c3d6dd4a4f7589c335bf8e85ba/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b", size = 582292 }, - { url = "https://files.pythonhosted.org/packages/30/89/35fc7a6cdf3477d441c7aca5e9bbf5a14e0f25152aed7f63f4e0b141045d/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333", size = 553855 }, - { url = "https://files.pythonhosted.org/packages/8f/e0/830c02b2457c4bd20a8c5bb394d31d81f57fbefce2dbdd2e31feff4f7003/rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730", size = 219100 }, - { url = "https://files.pythonhosted.org/packages/f8/30/7ac943f69855c2db77407ae363484b915d861702dbba1aa82d68d57f42be/rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf", size = 233794 }, - { url = "https://files.pythonhosted.org/packages/8b/63/e29f8ee14fcf383574f73b6bbdcbec0fbc2e5fc36b4de44d1ac389b1de62/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d", size = 360786 }, - { url = "https://files.pythonhosted.org/packages/d3/e0/771ee28b02a24e81c8c0e645796a371350a2bb6672753144f36ae2d2afc9/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd", size = 350589 }, - { url = "https://files.pythonhosted.org/packages/cf/49/abad4c4a1e6f3adf04785a99c247bfabe55ed868133e2d1881200aa5d381/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493", size = 381848 }, - { url = "https://files.pythonhosted.org/packages/3a/7d/f4bc6d6fbe6af7a0d2b5f2ee77079efef7c8528712745659ec0026888998/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96", size = 387879 }, - { url = "https://files.pythonhosted.org/packages/13/b0/575c797377fdcd26cedbb00a3324232e4cb2c5d121f6e4b0dbf8468b12ef/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123", size = 423916 }, - { url = "https://files.pythonhosted.org/packages/54/78/87157fa39d58f32a68d3326f8a81ad8fb99f49fe2aa7ad9a1b7d544f9478/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad", size = 448410 }, - { url = "https://files.pythonhosted.org/packages/59/69/860f89996065a88be1b6ff2d60e96a02b920a262d8aadab99e7903986597/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9", size = 382841 }, - { url = "https://files.pythonhosted.org/packages/bd/d7/bc144e10d27e3cb350f98df2492a319edd3caaf52ddfe1293f37a9afbfd7/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e", size = 409662 }, - { url = "https://files.pythonhosted.org/packages/14/2a/6bed0b05233c291a94c7e89bc76ffa1c619d4e1979fbfe5d96024020c1fb/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338", size = 558221 }, - { url = "https://files.pythonhosted.org/packages/11/23/cd8f566de444a137bc1ee5795e47069a947e60810ba4152886fe5308e1b7/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566", size = 583780 }, - { url = "https://files.pythonhosted.org/packages/8d/63/79c3602afd14d501f751e615a74a59040328da5ef29ed5754ae80d236b84/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe", size = 553619 }, - { url = "https://files.pythonhosted.org/packages/9f/2e/c5c1689e80298d4e94c75b70faada4c25445739d91b94c211244a3ed7ed1/rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d", size = 233338 }, +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/79/2ce611b18c4fd83d9e3aecb5cba93e1917c050f556db39842889fa69b79f/rpds_py-0.23.1.tar.gz", hash = "sha256:7f3240dcfa14d198dba24b8b9cb3b108c06b68d45b7babd9eefc1038fdf7e707", size = 26806 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/fe/e5326459863bd525122f4e9c80ac8d7c6cfa171b7518d04cc27c12c209b0/rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed", size = 372123 }, + { url = "https://files.pythonhosted.org/packages/f9/db/f10a3795f7a89fb27594934012d21c61019bbeb516c5bdcfbbe9e9e617a7/rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c", size = 356778 }, + { url = "https://files.pythonhosted.org/packages/21/27/0d3678ad7f432fa86f8fac5f5fc6496a4d2da85682a710d605219be20063/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ee9d6f0b38efb22ad94c3b68ffebe4c47865cdf4b17f6806d6c674e1feb4246", size = 385775 }, + { url = "https://files.pythonhosted.org/packages/99/a0/1786defa125b2ad228027f22dff26312ce7d1fee3c7c3c2682f403db2062/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7356a6da0562190558c4fcc14f0281db191cdf4cb96e7604c06acfcee96df15", size = 391181 }, + { url = "https://files.pythonhosted.org/packages/f1/5c/1240934050a7ffd020a915486d0cc4c7f6e7a2442a77aedf13664db55d36/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9441af1d25aed96901f97ad83d5c3e35e6cd21a25ca5e4916c82d7dd0490a4fa", size = 444607 }, + { url = "https://files.pythonhosted.org/packages/b7/1b/cee6905b47817fd0a377716dbe4df35295de46df46ee2ff704538cc371b0/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d8abf7896a91fb97e7977d1aadfcc2c80415d6dc2f1d0fca5b8d0df247248f3", size = 445550 }, + { url = "https://files.pythonhosted.org/packages/54/f7/f0821ca34032892d7a67fcd5042f50074ff2de64e771e10df01085c88d47/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b08027489ba8fedde72ddd233a5ea411b85a6ed78175f40285bd401bde7466d", size = 386148 }, + { url = "https://files.pythonhosted.org/packages/eb/ef/2afe53bc857c4bcba336acfd2629883a5746e7291023e017ac7fc98d85aa/rpds_py-0.23.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fee513135b5a58f3bb6d89e48326cd5aa308e4bcdf2f7d59f67c861ada482bf8", size = 416780 }, + { url = "https://files.pythonhosted.org/packages/ae/9a/38d2236cf669789b8a3e1a014c9b6a8d7b8925b952c92e7839ae2749f9ac/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:35d5631ce0af26318dba0ae0ac941c534453e42f569011585cb323b7774502a5", size = 558265 }, + { url = "https://files.pythonhosted.org/packages/e6/0a/f2705530c42578f20ed0b5b90135eecb30eef6e2ba73e7ba69087fad2dba/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a20cb698c4a59c534c6701b1c24a968ff2768b18ea2991f886bd8985ce17a89f", size = 585270 }, + { url = "https://files.pythonhosted.org/packages/29/4e/3b597dc84ed82c3d757ac9aa620de224a94e06d2e102069795ae7e81c015/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e9c206a1abc27e0588cf8b7c8246e51f1a16a103734f7750830a1ccb63f557a", size = 553850 }, + { url = "https://files.pythonhosted.org/packages/00/cc/6498b6f79e4375e6737247661e52a2d18f6accf4910e0c8da978674b4241/rpds_py-0.23.1-cp310-cp310-win32.whl", hash = "sha256:d9f75a06ecc68f159d5d7603b734e1ff6daa9497a929150f794013aa9f6e3f12", size = 220660 }, + { url = "https://files.pythonhosted.org/packages/17/2b/08db023d23e8c7032c99d8d2a70d32e450a868ab73d16e3ff5290308a665/rpds_py-0.23.1-cp310-cp310-win_amd64.whl", hash = "sha256:f35eff113ad430b5272bbfc18ba111c66ff525828f24898b4e146eb479a2cdda", size = 232551 }, + { url = "https://files.pythonhosted.org/packages/1c/67/6e5d4234bb9dee062ffca2a5f3c7cd38716317d6760ec235b175eed4de2c/rpds_py-0.23.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b79f5ced71efd70414a9a80bbbfaa7160da307723166f09b69773153bf17c590", size = 372264 }, + { url = "https://files.pythonhosted.org/packages/a7/0a/3dedb2daee8e783622427f5064e2d112751d8276ee73aa5409f000a132f4/rpds_py-0.23.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c9e799dac1ffbe7b10c1fd42fe4cd51371a549c6e108249bde9cd1200e8f59b4", size = 356883 }, + { url = "https://files.pythonhosted.org/packages/ed/fc/e1acef44f9c24b05fe5434b235f165a63a52959ac655e3f7a55726cee1a4/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721f9c4011b443b6e84505fc00cc7aadc9d1743f1c988e4c89353e19c4a968ee", size = 385624 }, + { url = "https://files.pythonhosted.org/packages/97/0a/a05951f6465d01622720c03ef6ef31adfbe865653e05ed7c45837492f25e/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f88626e3f5e57432e6191cd0c5d6d6b319b635e70b40be2ffba713053e5147dd", size = 391500 }, + { url = "https://files.pythonhosted.org/packages/ea/2e/cca0583ec0690ea441dceae23c0673b99755710ea22f40bccf1e78f41481/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:285019078537949cecd0190f3690a0b0125ff743d6a53dfeb7a4e6787af154f5", size = 444869 }, + { url = "https://files.pythonhosted.org/packages/cc/e6/95cda68b33a6d814d1e96b0e406d231ed16629101460d1740e92f03365e6/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b92f5654157de1379c509b15acec9d12ecf6e3bc1996571b6cb82a4302060447", size = 444930 }, + { url = "https://files.pythonhosted.org/packages/5f/a7/e94cdb73411ae9c11414d3c7c9a6ad75d22ad4a8d094fb45a345ba9e3018/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e768267cbe051dd8d1c5305ba690bb153204a09bf2e3de3ae530de955f5b5580", size = 386254 }, + { url = "https://files.pythonhosted.org/packages/dd/c5/a4a943d90a39e85efd1e04b1ad5129936786f9a9aa27bb7be8fc5d9d50c9/rpds_py-0.23.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5334a71f7dc1160382d45997e29f2637c02f8a26af41073189d79b95d3321f1", size = 417090 }, + { url = "https://files.pythonhosted.org/packages/0c/a0/80d0013b12428d1fce0ab4e71829400b0a32caec12733c79e6109f843342/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6adb81564af0cd428910f83fa7da46ce9ad47c56c0b22b50872bc4515d91966", size = 557639 }, + { url = "https://files.pythonhosted.org/packages/a6/92/ec2e6980afb964a2cd7a99cbdef1f6c01116abe94b42cbe336ac93dd11c2/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cafa48f2133d4daa028473ede7d81cd1b9f9e6925e9e4003ebdf77010ee02f35", size = 584572 }, + { url = "https://files.pythonhosted.org/packages/3d/ce/75b6054db34a390789a82523790717b27c1bd735e453abb429a87c4f0f26/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fced9fd4a07a1ded1bac7e961ddd9753dd5d8b755ba8e05acba54a21f5f1522", size = 553028 }, + { url = "https://files.pythonhosted.org/packages/cc/24/f45abe0418c06a5cba0f846e967aa27bac765acd927aabd857c21319b8cc/rpds_py-0.23.1-cp311-cp311-win32.whl", hash = "sha256:243241c95174b5fb7204c04595852fe3943cc41f47aa14c3828bc18cd9d3b2d6", size = 220862 }, + { url = "https://files.pythonhosted.org/packages/2d/a6/3c0880e8bbfc36451ef30dc416266f6d2934705e468db5d21c8ba0ab6400/rpds_py-0.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:11dd60b2ffddba85715d8a66bb39b95ddbe389ad2cfcf42c833f1bcde0878eaf", size = 232953 }, + { url = "https://files.pythonhosted.org/packages/f3/8c/d17efccb9f5b9137ddea706664aebae694384ae1d5997c0202093e37185a/rpds_py-0.23.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3902df19540e9af4cc0c3ae75974c65d2c156b9257e91f5101a51f99136d834c", size = 364369 }, + { url = "https://files.pythonhosted.org/packages/6e/c0/ab030f696b5c573107115a88d8d73d80f03309e60952b64c584c70c659af/rpds_py-0.23.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66f8d2a17e5838dd6fb9be6baaba8e75ae2f5fa6b6b755d597184bfcd3cb0eba", size = 349965 }, + { url = "https://files.pythonhosted.org/packages/b3/55/b40170f5a079c4fb0b6a82b299689e66e744edca3c3375a8b160fb797660/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:112b8774b0b4ee22368fec42749b94366bd9b536f8f74c3d4175d4395f5cbd31", size = 389064 }, + { url = "https://files.pythonhosted.org/packages/ab/1c/b03a912c59ec7c1e16b26e587b9dfa8ddff3b07851e781e8c46e908a365a/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0df046f2266e8586cf09d00588302a32923eb6386ced0ca5c9deade6af9a149", size = 397741 }, + { url = "https://files.pythonhosted.org/packages/52/6f/151b90792b62fb6f87099bcc9044c626881fdd54e31bf98541f830b15cea/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3288930b947cbebe767f84cf618d2cbe0b13be476e749da0e6a009f986248c", size = 448784 }, + { url = "https://files.pythonhosted.org/packages/71/2a/6de67c0c97ec7857e0e9e5cd7c52405af931b303eb1e5b9eff6c50fd9a2e/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce473a2351c018b06dd8d30d5da8ab5a0831056cc53b2006e2a8028172c37ce5", size = 440203 }, + { url = "https://files.pythonhosted.org/packages/db/5e/e759cd1c276d98a4b1f464b17a9bf66c65d29f8f85754e27e1467feaa7c3/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d550d7e9e7d8676b183b37d65b5cd8de13676a738973d330b59dc8312df9c5dc", size = 391611 }, + { url = "https://files.pythonhosted.org/packages/1c/1e/2900358efcc0d9408c7289769cba4c0974d9db314aa884028ed7f7364f61/rpds_py-0.23.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e14f86b871ea74c3fddc9a40e947d6a5d09def5adc2076ee61fb910a9014fb35", size = 423306 }, + { url = "https://files.pythonhosted.org/packages/23/07/6c177e6d059f5d39689352d6c69a926ee4805ffdb6f06203570234d3d8f7/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf5be5ba34e19be579ae873da515a2836a2166d8d7ee43be6ff909eda42b72b", size = 562323 }, + { url = "https://files.pythonhosted.org/packages/70/e4/f9097fd1c02b516fff9850792161eb9fc20a2fd54762f3c69eae0bdb67cb/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7031d493c4465dbc8d40bd6cafefef4bd472b17db0ab94c53e7909ee781b9ef", size = 588351 }, + { url = "https://files.pythonhosted.org/packages/87/39/5db3c6f326bfbe4576ae2af6435bd7555867d20ae690c786ff33659f293b/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55ff4151cfd4bc635e51cfb1c59ac9f7196b256b12e3a57deb9e5742e65941ad", size = 557252 }, + { url = "https://files.pythonhosted.org/packages/fd/14/2d5ad292f144fa79bafb78d2eb5b8a3a91c358b6065443cb9c49b5d1fedf/rpds_py-0.23.1-cp312-cp312-win32.whl", hash = "sha256:a9d3b728f5a5873d84cba997b9d617c6090ca5721caaa691f3b1a78c60adc057", size = 222181 }, + { url = "https://files.pythonhosted.org/packages/a3/4f/0fce63e0f5cdd658e71e21abd17ac1bc9312741ebb8b3f74eeed2ebdf771/rpds_py-0.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:b03a8d50b137ee758e4c73638b10747b7c39988eb8e6cd11abb7084266455165", size = 237426 }, + { url = "https://files.pythonhosted.org/packages/13/9d/b8b2c0edffb0bed15be17b6d5ab06216f2f47f9ee49259c7e96a3ad4ca42/rpds_py-0.23.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4caafd1a22e5eaa3732acb7672a497123354bef79a9d7ceed43387d25025e935", size = 363672 }, + { url = "https://files.pythonhosted.org/packages/bd/c2/5056fa29e6894144d7ba4c938b9b0445f75836b87d2dd00ed4999dc45a8c/rpds_py-0.23.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:178f8a60fc24511c0eb756af741c476b87b610dba83270fce1e5a430204566a4", size = 349602 }, + { url = "https://files.pythonhosted.org/packages/b0/bc/33779a1bb0ee32d8d706b173825aab75c628521d23ce72a7c1e6a6852f86/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c632419c3870507ca20a37c8f8f5352317aca097639e524ad129f58c125c61c6", size = 388746 }, + { url = "https://files.pythonhosted.org/packages/62/0b/71db3e36b7780a619698ec82a9c87ab44ad7ca7f5480913e8a59ff76f050/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:698a79d295626ee292d1730bc2ef6e70a3ab135b1d79ada8fde3ed0047b65a10", size = 397076 }, + { url = "https://files.pythonhosted.org/packages/bb/2e/494398f613edf77ba10a916b1ddea2acce42ab0e3b62e2c70ffc0757ce00/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271fa2184cf28bdded86bb6217c8e08d3a169fe0bbe9be5e8d96e8476b707122", size = 448399 }, + { url = "https://files.pythonhosted.org/packages/dd/53/4bd7f5779b1f463243ee5fdc83da04dd58a08f86e639dbffa7a35f969a84/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b91cceb5add79ee563bd1f70b30896bd63bc5f78a11c1f00a1e931729ca4f1f4", size = 439764 }, + { url = "https://files.pythonhosted.org/packages/f6/55/b3c18c04a460d951bf8e91f2abf46ce5b6426fb69784166a6a25827cb90a/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a6cb95074777f1ecda2ca4fa7717caa9ee6e534f42b7575a8f0d4cb0c24013", size = 390662 }, + { url = "https://files.pythonhosted.org/packages/2a/65/cc463044a3cbd616029b2aa87a651cdee8288d2fdd7780b2244845e934c1/rpds_py-0.23.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50fb62f8d8364978478b12d5f03bf028c6bc2af04082479299139dc26edf4c64", size = 422680 }, + { url = "https://files.pythonhosted.org/packages/fa/8e/1fa52990c7836d72e8d70cd7753f2362c72fbb0a49c1462e8c60e7176d0b/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8f7e90b948dc9dcfff8003f1ea3af08b29c062f681c05fd798e36daa3f7e3e8", size = 561792 }, + { url = "https://files.pythonhosted.org/packages/57/b8/fe3b612979b1a29d0c77f8585903d8b3a292604b26d4b300e228b8ac6360/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5b98b6c953e5c2bda51ab4d5b4f172617d462eebc7f4bfdc7c7e6b423f6da957", size = 588127 }, + { url = "https://files.pythonhosted.org/packages/44/2d/fde474de516bbc4b9b230f43c98e7f8acc5da7fc50ceed8e7af27553d346/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2893d778d4671ee627bac4037a075168b2673c57186fb1a57e993465dbd79a93", size = 556981 }, + { url = "https://files.pythonhosted.org/packages/18/57/767deeb27b81370bbab8f74ef6e68d26c4ea99018f3c71a570e506fede85/rpds_py-0.23.1-cp313-cp313-win32.whl", hash = "sha256:2cfa07c346a7ad07019c33fb9a63cf3acb1f5363c33bc73014e20d9fe8b01cdd", size = 221936 }, + { url = "https://files.pythonhosted.org/packages/7d/6c/3474cfdd3cafe243f97ab8474ea8949236eb2a1a341ca55e75ce00cd03da/rpds_py-0.23.1-cp313-cp313-win_amd64.whl", hash = "sha256:3aaf141d39f45322e44fc2c742e4b8b4098ead5317e5f884770c8df0c332da70", size = 237145 }, + { url = "https://files.pythonhosted.org/packages/ec/77/e985064c624230f61efa0423759bb066da56ebe40c654f8b5ba225bd5d63/rpds_py-0.23.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:759462b2d0aa5a04be5b3e37fb8183615f47014ae6b116e17036b131985cb731", size = 359623 }, + { url = "https://files.pythonhosted.org/packages/62/d9/a33dcbf62b29e40559e012d525bae7d516757cf042cc9234bd34ca4b6aeb/rpds_py-0.23.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3e9212f52074fc9d72cf242a84063787ab8e21e0950d4d6709886fb62bcb91d5", size = 345900 }, + { url = "https://files.pythonhosted.org/packages/92/eb/f81a4be6397861adb2cb868bb6a28a33292c2dcac567d1dc575226055e55/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e9f3a3ac919406bc0414bbbd76c6af99253c507150191ea79fab42fdb35982a", size = 386426 }, + { url = "https://files.pythonhosted.org/packages/09/47/1f810c9b5e83be005341201b5389f1d240dfa440346ea7189f9b3fd6961d/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c04ca91dda8a61584165825907f5c967ca09e9c65fe8966ee753a3f2b019fe1e", size = 392314 }, + { url = "https://files.pythonhosted.org/packages/83/bd/bc95831432fd6c46ed8001f01af26de0763a059d6d7e6d69e3c5bf02917a/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab923167cfd945abb9b51a407407cf19f5bee35001221f2911dc85ffd35ff4f", size = 447706 }, + { url = "https://files.pythonhosted.org/packages/19/3e/567c04c226b1802dc6dc82cad3d53e1fa0a773258571c74ac5d8fbde97ed/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed6f011bedca8585787e5082cce081bac3d30f54520097b2411351b3574e1219", size = 437060 }, + { url = "https://files.pythonhosted.org/packages/fe/77/a77d2c6afe27ae7d0d55fc32f6841502648070dc8d549fcc1e6d47ff8975/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959bb9928c5c999aba4a3f5a6799d571ddc2c59ff49917ecf55be2bbb4e3722", size = 389347 }, + { url = "https://files.pythonhosted.org/packages/3f/47/6b256ff20a74cfebeac790ab05586e0ac91f88e331125d4740a6c86fc26f/rpds_py-0.23.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ed7de3c86721b4e83ac440751329ec6a1102229aa18163f84c75b06b525ad7e", size = 415554 }, + { url = "https://files.pythonhosted.org/packages/fc/29/d4572469a245bc9fc81e35166dca19fc5298d5c43e1a6dd64bf145045193/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb89edee2fa237584e532fbf78f0ddd1e49a47c7c8cfa153ab4849dc72a35e6", size = 557418 }, + { url = "https://files.pythonhosted.org/packages/9c/0a/68cf7228895b1a3f6f39f51b15830e62456795e61193d2c8b87fd48c60db/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7e5413d2e2d86025e73f05510ad23dad5950ab8417b7fc6beaad99be8077138b", size = 583033 }, + { url = "https://files.pythonhosted.org/packages/14/18/017ab41dcd6649ad5db7d00155b4c212b31ab05bd857d5ba73a1617984eb/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d31ed4987d72aabdf521eddfb6a72988703c091cfc0064330b9e5f8d6a042ff5", size = 554880 }, + { url = "https://files.pythonhosted.org/packages/2e/dd/17de89431268da8819d8d51ce67beac28d9b22fccf437bc5d6d2bcd1acdb/rpds_py-0.23.1-cp313-cp313t-win32.whl", hash = "sha256:f3429fb8e15b20961efca8c8b21432623d85db2228cc73fe22756c6637aa39e7", size = 219743 }, + { url = "https://files.pythonhosted.org/packages/68/15/6d22d07e063ce5e9bfbd96db9ec2fbb4693591b4503e3a76996639474d02/rpds_py-0.23.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d", size = 235415 }, + { url = "https://files.pythonhosted.org/packages/95/a9/6fafd35fc6bac05f59bcbc800b57cef877911ff1c015397c519fec888642/rpds_py-0.23.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c1f8afa346ccd59e4e5630d5abb67aba6a9812fddf764fd7eb11f382a345f8cc", size = 373463 }, + { url = "https://files.pythonhosted.org/packages/5b/ac/44f00029b8fbe0903a19e9a87a9b86063bf8700df2cc58868373d378418c/rpds_py-0.23.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fad784a31869747df4ac968a351e070c06ca377549e4ace94775aaa3ab33ee06", size = 358400 }, + { url = "https://files.pythonhosted.org/packages/5e/9c/3da199346c68d785f10dccab123b74c8c5f73be3f742c9e33d1116e07931/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a96fcac2f18e5a0a23a75cd27ce2656c66c11c127b0318e508aab436b77428", size = 386815 }, + { url = "https://files.pythonhosted.org/packages/d3/45/8f6533c33c0d33da8c2c8b2fb8f2ee90b23c05c679b86b0ac6aee4653749/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e77febf227a1dc3220159355dba68faa13f8dca9335d97504abf428469fb18b", size = 392974 }, + { url = "https://files.pythonhosted.org/packages/ca/56/6a9ac1bf0455ba07385d8fe98c571c519b4f2000cff6581487bf9fab9272/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26bb3e8de93443d55e2e748e9fd87deb5f8075ca7bc0502cfc8be8687d69a2ec", size = 446019 }, + { url = "https://files.pythonhosted.org/packages/f4/83/5d9a3f9731cdccf49088bcc4ce821a5cf50bd1737cdad83e9959a7b9054d/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db7707dde9143a67b8812c7e66aeb2d843fe33cc8e374170f4d2c50bd8f2472d", size = 445811 }, + { url = "https://files.pythonhosted.org/packages/44/50/f2e0a98c62fc1fe68b176caca587714dc5c8bb2c3d1dd1eeb2bd4cc787ac/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eedaaccc9bb66581d4ae7c50e15856e335e57ef2734dbc5fd8ba3e2a4ab3cb6", size = 388070 }, + { url = "https://files.pythonhosted.org/packages/f2/d0/4981878f8f157e6dbea01d95e0119bf3d6b4c2c884fe64a9e6987f941104/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28358c54fffadf0ae893f6c1050e8f8853e45df22483b7fff2f6ab6152f5d8bf", size = 419173 }, + { url = "https://files.pythonhosted.org/packages/ce/13/fc971c470da96b270d2f64fedee987351bd935dc3016932a5cdcb1a88a2a/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:633462ef7e61d839171bf206551d5ab42b30b71cac8f10a64a662536e057fdef", size = 559048 }, + { url = "https://files.pythonhosted.org/packages/42/02/be91e1de139ec8b4f9fec4192fd779ba48af281cfc762c0ca4c15b945484/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a98f510d86f689fcb486dc59e6e363af04151e5260ad1bdddb5625c10f1e95f8", size = 584773 }, + { url = "https://files.pythonhosted.org/packages/27/28/3af8a1956df3edc41d884267d766dc096496dafc83f02f764a475eca0b4a/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e0397dd0b3955c61ef9b22838144aa4bef6f0796ba5cc8edfc64d468b93798b4", size = 555153 }, + { url = "https://files.pythonhosted.org/packages/5e/bb/e45f51c4e1327dea3c72b846c6de129eebacb7a6cb309af7af35d0578c80/rpds_py-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:75307599f0d25bf6937248e5ac4e3bde5ea72ae6618623b86146ccc7845ed00b", size = 233827 }, ] [[package]] @@ -4563,61 +4827,61 @@ wheels = [ [[package]] name = "ruff" -version = "0.9.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/63/77ecca9d21177600f551d1c58ab0e5a0b260940ea7312195bd2a4798f8a8/ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0", size = 3553799 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b9/0e168e4e7fb3af851f739e8f07889b91d1a33a30fca8c29fa3149d6b03ec/ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347", size = 11652408 }, - { url = "https://files.pythonhosted.org/packages/2c/22/08ede5db17cf701372a461d1cb8fdde037da1d4fa622b69ac21960e6237e/ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00", size = 11587553 }, - { url = "https://files.pythonhosted.org/packages/42/05/dedfc70f0bf010230229e33dec6e7b2235b2a1b8cbb2a991c710743e343f/ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4", size = 11020755 }, - { url = "https://files.pythonhosted.org/packages/df/9b/65d87ad9b2e3def67342830bd1af98803af731243da1255537ddb8f22209/ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d", size = 11826502 }, - { url = "https://files.pythonhosted.org/packages/93/02/f2239f56786479e1a89c3da9bc9391120057fc6f4a8266a5b091314e72ce/ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c", size = 11390562 }, - { url = "https://files.pythonhosted.org/packages/c9/37/d3a854dba9931f8cb1b2a19509bfe59e00875f48ade632e95aefcb7a0aee/ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f", size = 12548968 }, - { url = "https://files.pythonhosted.org/packages/fa/c3/c7b812bb256c7a1d5553433e95980934ffa85396d332401f6b391d3c4569/ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684", size = 13187155 }, - { url = "https://files.pythonhosted.org/packages/bd/5a/3c7f9696a7875522b66aa9bba9e326e4e5894b4366bd1dc32aa6791cb1ff/ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d", size = 12704674 }, - { url = "https://files.pythonhosted.org/packages/be/d6/d908762257a96ce5912187ae9ae86792e677ca4f3dc973b71e7508ff6282/ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df", size = 14529328 }, - { url = "https://files.pythonhosted.org/packages/2d/c2/049f1e6755d12d9cd8823242fa105968f34ee4c669d04cac8cea51a50407/ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247", size = 12385955 }, - { url = "https://files.pythonhosted.org/packages/91/5a/a9bdb50e39810bd9627074e42743b00e6dc4009d42ae9f9351bc3dbc28e7/ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e", size = 11810149 }, - { url = "https://files.pythonhosted.org/packages/e5/fd/57df1a0543182f79a1236e82a79c68ce210efb00e97c30657d5bdb12b478/ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe", size = 11479141 }, - { url = "https://files.pythonhosted.org/packages/dc/16/bc3fd1d38974f6775fc152a0554f8c210ff80f2764b43777163c3c45d61b/ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb", size = 12014073 }, - { url = "https://files.pythonhosted.org/packages/47/6b/e4ca048a8f2047eb652e1e8c755f384d1b7944f69ed69066a37acd4118b0/ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a", size = 12435758 }, - { url = "https://files.pythonhosted.org/packages/c2/40/4d3d6c979c67ba24cf183d29f706051a53c36d78358036a9cd21421582ab/ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145", size = 9796916 }, - { url = "https://files.pythonhosted.org/packages/c3/ef/7f548752bdb6867e6939489c87fe4da489ab36191525fadc5cede2a6e8e2/ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5", size = 10773080 }, - { url = "https://files.pythonhosted.org/packages/0e/4e/33df635528292bd2d18404e4daabcd74ca8a9853b2e1df85ed3d32d24362/ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6", size = 10001738 }, +version = "0.9.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/39/8b/a86c300359861b186f18359adf4437ac8e4c52e42daa9eedc731ef9d5b53/ruff-0.9.7.tar.gz", hash = "sha256:643757633417907510157b206e490c3aa11cab0c087c912f60e07fbafa87a4c6", size = 3669813 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/f3/3a1d22973291226df4b4e2ff70196b926b6f910c488479adb0eeb42a0d7f/ruff-0.9.7-py3-none-linux_armv6l.whl", hash = "sha256:99d50def47305fe6f233eb8dabfd60047578ca87c9dcb235c9723ab1175180f4", size = 11774588 }, + { url = "https://files.pythonhosted.org/packages/8e/c9/b881f4157b9b884f2994fd08ee92ae3663fb24e34b0372ac3af999aa7fc6/ruff-0.9.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d59105ae9c44152c3d40a9c40d6331a7acd1cdf5ef404fbe31178a77b174ea66", size = 11746848 }, + { url = "https://files.pythonhosted.org/packages/14/89/2f546c133f73886ed50a3d449e6bf4af27d92d2f960a43a93d89353f0945/ruff-0.9.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f313b5800483770bd540cddac7c90fc46f895f427b7820f18fe1822697f1fec9", size = 11177525 }, + { url = "https://files.pythonhosted.org/packages/d7/93/6b98f2c12bf28ab9def59c50c9c49508519c5b5cfecca6de871cf01237f6/ruff-0.9.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042ae32b41343888f59c0a4148f103208bf6b21c90118d51dc93a68366f4e903", size = 11996580 }, + { url = "https://files.pythonhosted.org/packages/8e/3f/b3fcaf4f6d875e679ac2b71a72f6691a8128ea3cb7be07cbb249f477c061/ruff-0.9.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87862589373b33cc484b10831004e5e5ec47dc10d2b41ba770e837d4f429d721", size = 11525674 }, + { url = "https://files.pythonhosted.org/packages/f0/48/33fbf18defb74d624535d5d22adcb09a64c9bbabfa755bc666189a6b2210/ruff-0.9.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a17e1e01bee0926d351a1ee9bc15c445beae888f90069a6192a07a84af544b6b", size = 12739151 }, + { url = "https://files.pythonhosted.org/packages/63/b5/7e161080c5e19fa69495cbab7c00975ef8a90f3679caa6164921d7f52f4a/ruff-0.9.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7c1f880ac5b2cbebd58b8ebde57069a374865c73f3bf41f05fe7a179c1c8ef22", size = 13416128 }, + { url = "https://files.pythonhosted.org/packages/4e/c8/b5e7d61fb1c1b26f271ac301ff6d9de5e4d9a9a63f67d732fa8f200f0c88/ruff-0.9.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e63fc20143c291cab2841dbb8260e96bafbe1ba13fd3d60d28be2c71e312da49", size = 12870858 }, + { url = "https://files.pythonhosted.org/packages/da/cb/2a1a8e4e291a54d28259f8fc6a674cd5b8833e93852c7ef5de436d6ed729/ruff-0.9.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91ff963baed3e9a6a4eba2a02f4ca8eaa6eba1cc0521aec0987da8d62f53cbef", size = 14786046 }, + { url = "https://files.pythonhosted.org/packages/ca/6c/c8f8a313be1943f333f376d79724260da5701426c0905762e3ddb389e3f4/ruff-0.9.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88362e3227c82f63eaebf0b2eff5b88990280fb1ecf7105523883ba8c3aaf6fb", size = 12550834 }, + { url = "https://files.pythonhosted.org/packages/9d/ad/f70cf5e8e7c52a25e166bdc84c082163c9c6f82a073f654c321b4dff9660/ruff-0.9.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0372c5a90349f00212270421fe91874b866fd3626eb3b397ede06cd385f6f7e0", size = 11961307 }, + { url = "https://files.pythonhosted.org/packages/52/d5/4f303ea94a5f4f454daf4d02671b1fbfe2a318b5fcd009f957466f936c50/ruff-0.9.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d76b8ab60e99e6424cd9d3d923274a1324aefce04f8ea537136b8398bbae0a62", size = 11612039 }, + { url = "https://files.pythonhosted.org/packages/eb/c8/bd12a23a75603c704ce86723be0648ba3d4ecc2af07eecd2e9fa112f7e19/ruff-0.9.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0c439bdfc8983e1336577f00e09a4e7a78944fe01e4ea7fe616d00c3ec69a3d0", size = 12168177 }, + { url = "https://files.pythonhosted.org/packages/cc/57/d648d4f73400fef047d62d464d1a14591f2e6b3d4a15e93e23a53c20705d/ruff-0.9.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:115d1f15e8fdd445a7b4dc9a30abae22de3f6bcabeb503964904471691ef7606", size = 12610122 }, + { url = "https://files.pythonhosted.org/packages/49/79/acbc1edd03ac0e2a04ae2593555dbc9990b34090a9729a0c4c0cf20fb595/ruff-0.9.7-py3-none-win32.whl", hash = "sha256:e9ece95b7de5923cbf38893f066ed2872be2f2f477ba94f826c8defdd6ec6b7d", size = 9988751 }, + { url = "https://files.pythonhosted.org/packages/6d/95/67153a838c6b6ba7a2401241fd8a00cd8c627a8e4a0491b8d853dedeffe0/ruff-0.9.7-py3-none-win_amd64.whl", hash = "sha256:3770fe52b9d691a15f0b87ada29c45324b2ace8f01200fb0c14845e499eb0c2c", size = 11002987 }, + { url = "https://files.pythonhosted.org/packages/63/6a/aca01554949f3a401991dc32fe22837baeaccb8a0d868256cbb26a029778/ruff-0.9.7-py3-none-win_arm64.whl", hash = "sha256:b075a700b2533feb7a01130ff656a4ec0d5f340bb540ad98759b8401c32c2037", size = 10177763 }, ] [[package]] name = "s3transfer" -version = "0.11.1" +version = "0.11.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/aa/fdd958c626b00e3f046d4004363e7f1a2aba4354f78d65ceb3b217fa5eb8/s3transfer-0.11.1.tar.gz", hash = "sha256:3f25c900a367c8b7f7d8f9c34edc87e300bde424f779dc9f0a8ae4f9df9264f6", size = 146952 } +sdist = { url = "https://files.pythonhosted.org/packages/39/24/1390172471d569e281fcfd29b92f2f73774e95972c965d14b6c802ff2352/s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a", size = 148042 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ce/22673f4a85ccc640735b4f8d12178a0f41b5d3c6eda7f33756d10ce56901/s3transfer-0.11.1-py3-none-any.whl", hash = "sha256:8fa0aa48177be1f3425176dfe1ab85dcd3d962df603c3dbfc585e6bf857ef0ff", size = 84111 }, + { url = "https://files.pythonhosted.org/packages/e4/81/48c41b554a54d75d4407740abb60e3a102ae416284df04d1dbdcbe3dbf24/s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d", size = 84246 }, ] [[package]] name = "safetensors" -version = "0.5.2" +version = "0.5.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/4f/2ef9ef1766f8c194b01b67a63a444d2e557c8fe1d82faf3ebd85f370a917/safetensors-0.5.2.tar.gz", hash = "sha256:cb4a8d98ba12fa016f4241932b1fc5e702e5143f5374bba0bbcf7ddc1c4cf2b8", size = 66957 } +sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/d1/017e31e75e274492a11a456a9e7c171f8f7911fe50735b4ec6ff37221220/safetensors-0.5.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:45b6092997ceb8aa3801693781a71a99909ab9cc776fbc3fa9322d29b1d3bef2", size = 427067 }, - { url = "https://files.pythonhosted.org/packages/24/84/e9d3ff57ae50dd0028f301c9ee064e5087fe8b00e55696677a0413c377a7/safetensors-0.5.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6d0d6a8ee2215a440e1296b843edf44fd377b055ba350eaba74655a2fe2c4bae", size = 408856 }, - { url = "https://files.pythonhosted.org/packages/f1/1d/fe95f5dd73db16757b11915e8a5106337663182d0381811c81993e0014a9/safetensors-0.5.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86016d40bcaa3bcc9a56cd74d97e654b5f4f4abe42b038c71e4f00a089c4526c", size = 450088 }, - { url = "https://files.pythonhosted.org/packages/cf/21/e527961b12d5ab528c6e47b92d5f57f33563c28a972750b238b871924e49/safetensors-0.5.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:990833f70a5f9c7d3fc82c94507f03179930ff7d00941c287f73b6fcbf67f19e", size = 458966 }, - { url = "https://files.pythonhosted.org/packages/a5/8b/1a037d7a57f86837c0b41905040369aea7d8ca1ec4b2a77592372b2ec380/safetensors-0.5.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dfa7c2f3fe55db34eba90c29df94bcdac4821043fc391cb5d082d9922013869", size = 509915 }, - { url = "https://files.pythonhosted.org/packages/61/3d/03dd5cfd33839df0ee3f4581a20bd09c40246d169c0e4518f20b21d5f077/safetensors-0.5.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46ff2116150ae70a4e9c490d2ab6b6e1b1b93f25e520e540abe1b81b48560c3a", size = 527664 }, - { url = "https://files.pythonhosted.org/packages/c5/dc/8952caafa9a10a3c0f40fa86bacf3190ae7f55fa5eef87415b97b29cb97f/safetensors-0.5.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab696dfdc060caffb61dbe4066b86419107a24c804a4e373ba59be699ebd8d5", size = 461978 }, - { url = "https://files.pythonhosted.org/packages/60/da/82de1fcf1194e3dbefd4faa92dc98b33c06bed5d67890e0962dd98e18287/safetensors-0.5.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03c937100f38c9ff4c1507abea9928a6a9b02c9c1c9c3609ed4fb2bf413d4975", size = 491253 }, - { url = "https://files.pythonhosted.org/packages/5a/9a/d90e273c25f90c3ba1b0196a972003786f04c39e302fbd6649325b1272bb/safetensors-0.5.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a00e737948791b94dad83cf0eafc09a02c4d8c2171a239e8c8572fe04e25960e", size = 628644 }, - { url = "https://files.pythonhosted.org/packages/70/3c/acb23e05aa34b4f5edd2e7f393f8e6480fbccd10601ab42cd03a57d4ab5f/safetensors-0.5.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:d3a06fae62418ec8e5c635b61a8086032c9e281f16c63c3af46a6efbab33156f", size = 721648 }, - { url = "https://files.pythonhosted.org/packages/71/45/eaa3dba5253a7c6931230dc961641455710ab231f8a89cb3c4c2af70f8c8/safetensors-0.5.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:1506e4c2eda1431099cebe9abf6c76853e95d0b7a95addceaa74c6019c65d8cf", size = 659588 }, - { url = "https://files.pythonhosted.org/packages/b0/71/2f9851164f821064d43b481ddbea0149c2d676c4f4e077b178e7eeaa6660/safetensors-0.5.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5c5b5d9da594f638a259fca766046f44c97244cc7ab8bef161b3e80d04becc76", size = 632533 }, - { url = "https://files.pythonhosted.org/packages/00/f1/5680e2ef61d9c61454fad82c344f0e40b8741a9dbd1e31484f0d31a9b1c3/safetensors-0.5.2-cp38-abi3-win32.whl", hash = "sha256:fe55c039d97090d1f85277d402954dd6ad27f63034fa81985a9cc59655ac3ee2", size = 291167 }, - { url = "https://files.pythonhosted.org/packages/86/ca/aa489392ec6fb59223ffce825461e1f811a3affd417121a2088be7a5758b/safetensors-0.5.2-cp38-abi3-win_amd64.whl", hash = "sha256:78abdddd03a406646107f973c7843276e7b64e5e32623529dc17f3d94a20f589", size = 303756 }, + { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917 }, + { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419 }, + { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493 }, + { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400 }, + { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891 }, + { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694 }, + { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642 }, + { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241 }, + { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001 }, + { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013 }, + { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687 }, + { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147 }, + { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677 }, + { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878 }, ] [[package]] @@ -4660,52 +4924,58 @@ wheels = [ [[package]] name = "scipy" -version = "1.15.1" +version = "1.15.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/c6/8eb0654ba0c7d0bb1bf67bf8fbace101a8e4f250f7722371105e8b6f68fc/scipy-1.15.1.tar.gz", hash = "sha256:033a75ddad1463970c96a88063a1df87ccfddd526437136b6ee81ff0312ebdf6", size = 59407493 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/53/b204ce5a4433f1864001b9d16f103b9c25f5002a602ae83585d0ea5f9c4a/scipy-1.15.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:c64ded12dcab08afff9e805a67ff4480f5e69993310e093434b10e85dc9d43e1", size = 41414518 }, - { url = "https://files.pythonhosted.org/packages/c7/fc/54ffa7a8847f7f303197a6ba65a66104724beba2e38f328135a78f0dc480/scipy-1.15.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5b190b935e7db569960b48840e5bef71dc513314cc4e79a1b7d14664f57fd4ff", size = 32519265 }, - { url = "https://files.pythonhosted.org/packages/f1/77/a98b8ba03d6f371dc31a38719affd53426d4665729dcffbed4afe296784a/scipy-1.15.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:4b17d4220df99bacb63065c76b0d1126d82bbf00167d1730019d2a30d6ae01ea", size = 24792859 }, - { url = "https://files.pythonhosted.org/packages/a7/78/70bb9f0df7444b18b108580934bfef774822e28fd34a68e5c263c7d2828a/scipy-1.15.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:63b9b6cd0333d0eb1a49de6f834e8aeaefe438df8f6372352084535ad095219e", size = 27886506 }, - { url = "https://files.pythonhosted.org/packages/14/a7/f40f6033e06de4176ddd6cc8c3ae9f10a226c3bca5d6b4ab883bc9914a14/scipy-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f151e9fb60fbf8e52426132f473221a49362091ce7a5e72f8aa41f8e0da4f25", size = 38375041 }, - { url = "https://files.pythonhosted.org/packages/17/03/390a1c5c61fd76b0fa4b3c5aa3bdd7e60f6c46f712924f1a9df5705ec046/scipy-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e10b1dd56ce92fba3e786007322542361984f8463c6d37f6f25935a5a6ef52", size = 40597556 }, - { url = "https://files.pythonhosted.org/packages/4e/70/fa95b3ae026b97eeca58204a90868802e5155ac71b9d7bdee92b68115dd3/scipy-1.15.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5dff14e75cdbcf07cdaa1c7707db6017d130f0af9ac41f6ce443a93318d6c6e0", size = 42938505 }, - { url = "https://files.pythonhosted.org/packages/d6/07/427859116bdd71847c898180f01802691f203c3e2455a1eb496130ff07c5/scipy-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:f82fcf4e5b377f819542fbc8541f7b5fbcf1c0017d0df0bc22c781bf60abc4d8", size = 43909663 }, - { url = "https://files.pythonhosted.org/packages/8e/2e/7b71312da9c2dabff53e7c9a9d08231bc34d9d8fdabe88a6f1155b44591c/scipy-1.15.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:5bd8d27d44e2c13d0c1124e6a556454f52cd3f704742985f6b09e75e163d20d2", size = 41424362 }, - { url = "https://files.pythonhosted.org/packages/81/8c/ab85f1aa1cc200c796532a385b6ebf6a81089747adc1da7482a062acc46c/scipy-1.15.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:be3deeb32844c27599347faa077b359584ba96664c5c79d71a354b80a0ad0ce0", size = 32535910 }, - { url = "https://files.pythonhosted.org/packages/3b/9c/6f4b787058daa8d8da21ddff881b4320e28de4704a65ec147adb50cb2230/scipy-1.15.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:5eb0ca35d4b08e95da99a9f9c400dc9f6c21c424298a0ba876fdc69c7afacedf", size = 24809398 }, - { url = "https://files.pythonhosted.org/packages/16/2b/949460a796df75fc7a1ee1becea202cf072edbe325ebe29f6d2029947aa7/scipy-1.15.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:74bb864ff7640dea310a1377d8567dc2cb7599c26a79ca852fc184cc851954ac", size = 27918045 }, - { url = "https://files.pythonhosted.org/packages/5f/36/67fe249dd7ccfcd2a38b25a640e3af7e59d9169c802478b6035ba91dfd6d/scipy-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:667f950bf8b7c3a23b4199db24cb9bf7512e27e86d0e3813f015b74ec2c6e3df", size = 38332074 }, - { url = "https://files.pythonhosted.org/packages/fc/da/452e1119e6f720df3feb588cce3c42c5e3d628d4bfd4aec097bd30b7de0c/scipy-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395be70220d1189756068b3173853029a013d8c8dd5fd3d1361d505b2aa58fa7", size = 40588469 }, - { url = "https://files.pythonhosted.org/packages/7f/71/5f94aceeac99a4941478af94fe9f459c6752d497035b6b0761a700f5f9ff/scipy-1.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ce3a000cd28b4430426db2ca44d96636f701ed12e2b3ca1f2b1dd7abdd84b39a", size = 42965214 }, - { url = "https://files.pythonhosted.org/packages/af/25/caa430865749d504271757cafd24066d596217e83326155993980bc22f97/scipy-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:3fe1d95944f9cf6ba77aa28b82dd6bb2a5b52f2026beb39ecf05304b8392864b", size = 43896034 }, - { url = "https://files.pythonhosted.org/packages/d8/6e/a9c42d0d39e09ed7fd203d0ac17adfea759cba61ab457671fe66e523dbec/scipy-1.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c09aa9d90f3500ea4c9b393ee96f96b0ccb27f2f350d09a47f533293c78ea776", size = 41478318 }, - { url = "https://files.pythonhosted.org/packages/04/ee/e3e535c81828618878a7433992fecc92fa4df79393f31a8fea1d05615091/scipy-1.15.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0ac102ce99934b162914b1e4a6b94ca7da0f4058b6d6fd65b0cef330c0f3346f", size = 32596696 }, - { url = "https://files.pythonhosted.org/packages/c4/5e/b1b0124be8e76f87115f16b8915003eec4b7060298117715baf13f51942c/scipy-1.15.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:09c52320c42d7f5c7748b69e9f0389266fd4f82cf34c38485c14ee976cb8cb04", size = 24870366 }, - { url = "https://files.pythonhosted.org/packages/14/36/c00cb73eefda85946172c27913ab995c6ad4eee00fa4f007572e8c50cd51/scipy-1.15.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:cdde8414154054763b42b74fe8ce89d7f3d17a7ac5dd77204f0e142cdc9239e9", size = 28007461 }, - { url = "https://files.pythonhosted.org/packages/68/94/aff5c51b3799349a9d1e67a056772a0f8a47db371e83b498d43467806557/scipy-1.15.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c9d8fc81d6a3b6844235e6fd175ee1d4c060163905a2becce8e74cb0d7554ce", size = 38068174 }, - { url = "https://files.pythonhosted.org/packages/b0/3c/0de11ca154e24a57b579fb648151d901326d3102115bc4f9a7a86526ce54/scipy-1.15.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb57b30f0017d4afa5fe5f5b150b8f807618819287c21cbe51130de7ccdaed2", size = 40249869 }, - { url = "https://files.pythonhosted.org/packages/15/09/472e8d0a6b33199d1bb95e49bedcabc0976c3724edd9b0ef7602ccacf41e/scipy-1.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491d57fe89927fa1aafbe260f4cfa5ffa20ab9f1435025045a5315006a91b8f5", size = 42629068 }, - { url = "https://files.pythonhosted.org/packages/ff/ba/31c7a8131152822b3a2cdeba76398ffb404d81d640de98287d236da90c49/scipy-1.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:900f3fa3db87257510f011c292a5779eb627043dd89731b9c461cd16ef76ab3d", size = 43621992 }, - { url = "https://files.pythonhosted.org/packages/2b/bf/dd68965a4c5138a630eeed0baec9ae96e5d598887835bdde96cdd2fe4780/scipy-1.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:100193bb72fbff37dbd0bf14322314fc7cbe08b7ff3137f11a34d06dc0ee6b85", size = 41441136 }, - { url = "https://files.pythonhosted.org/packages/ef/5e/4928581312922d7e4d416d74c416a660addec4dd5ea185401df2269ba5a0/scipy-1.15.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:2114a08daec64980e4b4cbdf5bee90935af66d750146b1d2feb0d3ac30613692", size = 32533699 }, - { url = "https://files.pythonhosted.org/packages/32/90/03f99c43041852837686898c66767787cd41c5843d7a1509c39ffef683e9/scipy-1.15.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:6b3e71893c6687fc5e29208d518900c24ea372a862854c9888368c0b267387ab", size = 24807289 }, - { url = "https://files.pythonhosted.org/packages/9d/52/bfe82b42ae112eaba1af2f3e556275b8727d55ac6e4932e7aef337a9d9d4/scipy-1.15.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:837299eec3d19b7e042923448d17d95a86e43941104d33f00da7e31a0f715d3c", size = 27929844 }, - { url = "https://files.pythonhosted.org/packages/f6/77/54ff610bad600462c313326acdb035783accc6a3d5f566d22757ad297564/scipy-1.15.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82add84e8a9fb12af5c2c1a3a3f1cb51849d27a580cb9e6bd66226195142be6e", size = 38031272 }, - { url = "https://files.pythonhosted.org/packages/f1/26/98585cbf04c7cf503d7eb0a1966df8a268154b5d923c5fe0c1ed13154c49/scipy-1.15.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070d10654f0cb6abd295bc96c12656f948e623ec5f9a4eab0ddb1466c000716e", size = 40210217 }, - { url = "https://files.pythonhosted.org/packages/fd/3f/3d2285eb6fece8bc5dbb2f9f94d61157d61d155e854fd5fea825b8218f12/scipy-1.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:55cc79ce4085c702ac31e49b1e69b27ef41111f22beafb9b49fea67142b696c4", size = 42587785 }, - { url = "https://files.pythonhosted.org/packages/48/7d/5b5251984bf0160d6533695a74a5fddb1fa36edd6f26ffa8c871fbd4782a/scipy-1.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:c352c1b6d7cac452534517e022f8f7b8d139cd9f27e6fbd9f3cbd0bfd39f5bef", size = 43640439 }, - { url = "https://files.pythonhosted.org/packages/e7/b8/0e092f592d280496de52e152582030f8a270b194f87f890e1a97c5599b81/scipy-1.15.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0458839c9f873062db69a03de9a9765ae2e694352c76a16be44f93ea45c28d2b", size = 41619862 }, - { url = "https://files.pythonhosted.org/packages/f6/19/0b6e1173aba4db9e0b7aa27fe45019857fb90d6904038b83927cbe0a6c1d/scipy-1.15.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:af0b61c1de46d0565b4b39c6417373304c1d4f5220004058bdad3061c9fa8a95", size = 32610387 }, - { url = "https://files.pythonhosted.org/packages/e7/02/754aae3bd1fa0f2479ade3cfdf1732ecd6b05853f63eee6066a32684563a/scipy-1.15.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:71ba9a76c2390eca6e359be81a3e879614af3a71dfdabb96d1d7ab33da6f2364", size = 24883814 }, - { url = "https://files.pythonhosted.org/packages/1f/ac/d7906201604a2ea3b143bb0de51b3966f66441ba50b7dc182c4505b3edf9/scipy-1.15.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14eaa373c89eaf553be73c3affb11ec6c37493b7eaaf31cf9ac5dffae700c2e0", size = 27944865 }, - { url = "https://files.pythonhosted.org/packages/84/9d/8f539002b5e203723af6a6f513a45e0a7671e9dabeedb08f417ac17e4edc/scipy-1.15.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f735bc41bd1c792c96bc426dece66c8723283695f02df61dcc4d0a707a42fc54", size = 39883261 }, - { url = "https://files.pythonhosted.org/packages/97/c0/62fd3bab828bcccc9b864c5997645a3b86372a35941cdaf677565c25c98d/scipy-1.15.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2722a021a7929d21168830790202a75dbb20b468a8133c74a2c0230c72626b6c", size = 42093299 }, - { url = "https://files.pythonhosted.org/packages/e4/1f/5d46a8d94e9f6d2c913cbb109e57e7eed914de38ea99e2c4d69a9fc93140/scipy-1.15.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bc7136626261ac1ed988dca56cfc4ab5180f75e0ee52e58f1e6aa74b5f3eacd5", size = 43181730 }, +sdist = { url = "https://files.pythonhosted.org/packages/b7/b9/31ba9cd990e626574baf93fbc1ac61cf9ed54faafd04c479117517661637/scipy-1.15.2.tar.gz", hash = "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec", size = 59417316 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/df/ef233fff6838fe6f7840d69b5ef9f20d2b5c912a8727b21ebf876cb15d54/scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9", size = 38692502 }, + { url = "https://files.pythonhosted.org/packages/5c/20/acdd4efb8a68b842968f7bc5611b1aeb819794508771ad104de418701422/scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5", size = 30085508 }, + { url = "https://files.pythonhosted.org/packages/42/55/39cf96ca7126f1e78ee72a6344ebdc6702fc47d037319ad93221063e6cf4/scipy-1.15.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e", size = 22359166 }, + { url = "https://files.pythonhosted.org/packages/51/48/708d26a4ab8a1441536bf2dfcad1df0ca14a69f010fba3ccbdfc02df7185/scipy-1.15.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9", size = 25112047 }, + { url = "https://files.pythonhosted.org/packages/dd/65/f9c5755b995ad892020381b8ae11f16d18616208e388621dfacc11df6de6/scipy-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3", size = 35536214 }, + { url = "https://files.pythonhosted.org/packages/de/3c/c96d904b9892beec978562f64d8cc43f9cca0842e65bd3cd1b7f7389b0ba/scipy-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d", size = 37646981 }, + { url = "https://files.pythonhosted.org/packages/3d/74/c2d8a24d18acdeae69ed02e132b9bc1bb67b7bee90feee1afe05a68f9d67/scipy-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58", size = 37230048 }, + { url = "https://files.pythonhosted.org/packages/42/19/0aa4ce80eca82d487987eff0bc754f014dec10d20de2f66754fa4ea70204/scipy-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa", size = 40010322 }, + { url = "https://files.pythonhosted.org/packages/d0/d2/f0683b7e992be44d1475cc144d1f1eeae63c73a14f862974b4db64af635e/scipy-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65", size = 41233385 }, + { url = "https://files.pythonhosted.org/packages/40/1f/bf0a5f338bda7c35c08b4ed0df797e7bafe8a78a97275e9f439aceb46193/scipy-1.15.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4", size = 38703651 }, + { url = "https://files.pythonhosted.org/packages/de/54/db126aad3874601048c2c20ae3d8a433dbfd7ba8381551e6f62606d9bd8e/scipy-1.15.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1", size = 30102038 }, + { url = "https://files.pythonhosted.org/packages/61/d8/84da3fffefb6c7d5a16968fe5b9f24c98606b165bb801bb0b8bc3985200f/scipy-1.15.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971", size = 22375518 }, + { url = "https://files.pythonhosted.org/packages/44/78/25535a6e63d3b9c4c90147371aedb5d04c72f3aee3a34451f2dc27c0c07f/scipy-1.15.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655", size = 25142523 }, + { url = "https://files.pythonhosted.org/packages/e0/22/4b4a26fe1cd9ed0bc2b2cb87b17d57e32ab72c346949eaf9288001f8aa8e/scipy-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e", size = 35491547 }, + { url = "https://files.pythonhosted.org/packages/32/ea/564bacc26b676c06a00266a3f25fdfe91a9d9a2532ccea7ce6dd394541bc/scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0", size = 37634077 }, + { url = "https://files.pythonhosted.org/packages/43/c2/bfd4e60668897a303b0ffb7191e965a5da4056f0d98acfb6ba529678f0fb/scipy-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40", size = 37231657 }, + { url = "https://files.pythonhosted.org/packages/4a/75/5f13050bf4f84c931bcab4f4e83c212a36876c3c2244475db34e4b5fe1a6/scipy-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462", size = 40035857 }, + { url = "https://files.pythonhosted.org/packages/b9/8b/7ec1832b09dbc88f3db411f8cdd47db04505c4b72c99b11c920a8f0479c3/scipy-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737", size = 41217654 }, + { url = "https://files.pythonhosted.org/packages/4b/5d/3c78815cbab499610f26b5bae6aed33e227225a9fa5290008a733a64f6fc/scipy-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd", size = 38756184 }, + { url = "https://files.pythonhosted.org/packages/37/20/3d04eb066b471b6e171827548b9ddb3c21c6bbea72a4d84fc5989933910b/scipy-1.15.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301", size = 30163558 }, + { url = "https://files.pythonhosted.org/packages/a4/98/e5c964526c929ef1f795d4c343b2ff98634ad2051bd2bbadfef9e772e413/scipy-1.15.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93", size = 22437211 }, + { url = "https://files.pythonhosted.org/packages/1d/cd/1dc7371e29195ecbf5222f9afeedb210e0a75057d8afbd942aa6cf8c8eca/scipy-1.15.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20", size = 25232260 }, + { url = "https://files.pythonhosted.org/packages/f0/24/1a181a9e5050090e0b5138c5f496fee33293c342b788d02586bc410c6477/scipy-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e", size = 35198095 }, + { url = "https://files.pythonhosted.org/packages/c0/53/eaada1a414c026673eb983f8b4a55fe5eb172725d33d62c1b21f63ff6ca4/scipy-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8", size = 37297371 }, + { url = "https://files.pythonhosted.org/packages/e9/06/0449b744892ed22b7e7b9a1994a866e64895363572677a316a9042af1fe5/scipy-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11", size = 36872390 }, + { url = "https://files.pythonhosted.org/packages/6a/6f/a8ac3cfd9505ec695c1bc35edc034d13afbd2fc1882a7c6b473e280397bb/scipy-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53", size = 39700276 }, + { url = "https://files.pythonhosted.org/packages/f5/6f/e6e5aff77ea2a48dd96808bb51d7450875af154ee7cbe72188afb0b37929/scipy-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded", size = 40942317 }, + { url = "https://files.pythonhosted.org/packages/53/40/09319f6e0f276ea2754196185f95cd191cb852288440ce035d5c3a931ea2/scipy-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf", size = 38717587 }, + { url = "https://files.pythonhosted.org/packages/fe/c3/2854f40ecd19585d65afaef601e5e1f8dbf6758b2f95b5ea93d38655a2c6/scipy-1.15.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37", size = 30100266 }, + { url = "https://files.pythonhosted.org/packages/dd/b1/f9fe6e3c828cb5930b5fe74cb479de5f3d66d682fa8adb77249acaf545b8/scipy-1.15.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d", size = 22373768 }, + { url = "https://files.pythonhosted.org/packages/15/9d/a60db8c795700414c3f681908a2b911e031e024d93214f2d23c6dae174ab/scipy-1.15.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb", size = 25154719 }, + { url = "https://files.pythonhosted.org/packages/37/3b/9bda92a85cd93f19f9ed90ade84aa1e51657e29988317fabdd44544f1dd4/scipy-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27", size = 35163195 }, + { url = "https://files.pythonhosted.org/packages/03/5a/fc34bf1aa14dc7c0e701691fa8685f3faec80e57d816615e3625f28feb43/scipy-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0", size = 37255404 }, + { url = "https://files.pythonhosted.org/packages/4a/71/472eac45440cee134c8a180dbe4c01b3ec247e0338b7c759e6cd71f199a7/scipy-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32", size = 36860011 }, + { url = "https://files.pythonhosted.org/packages/01/b3/21f890f4f42daf20e4d3aaa18182dddb9192771cd47445aaae2e318f6738/scipy-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d", size = 39657406 }, + { url = "https://files.pythonhosted.org/packages/0d/76/77cf2ac1f2a9cc00c073d49e1e16244e389dd88e2490c91d84e1e3e4d126/scipy-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f", size = 40961243 }, + { url = "https://files.pythonhosted.org/packages/4c/4b/a57f8ddcf48e129e6054fa9899a2a86d1fc6b07a0e15c7eebff7ca94533f/scipy-1.15.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9", size = 38870286 }, + { url = "https://files.pythonhosted.org/packages/0c/43/c304d69a56c91ad5f188c0714f6a97b9c1fed93128c691148621274a3a68/scipy-1.15.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f", size = 30141634 }, + { url = "https://files.pythonhosted.org/packages/44/1a/6c21b45d2548eb73be9b9bff421aaaa7e85e22c1f9b3bc44b23485dfce0a/scipy-1.15.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6", size = 22415179 }, + { url = "https://files.pythonhosted.org/packages/74/4b/aefac4bba80ef815b64f55da06f62f92be5d03b467f2ce3668071799429a/scipy-1.15.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af", size = 25126412 }, + { url = "https://files.pythonhosted.org/packages/b1/53/1cbb148e6e8f1660aacd9f0a9dfa2b05e9ff1cb54b4386fe868477972ac2/scipy-1.15.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274", size = 34952867 }, + { url = "https://files.pythonhosted.org/packages/2c/23/e0eb7f31a9c13cf2dca083828b97992dd22f8184c6ce4fec5deec0c81fcf/scipy-1.15.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776", size = 36890009 }, + { url = "https://files.pythonhosted.org/packages/03/f3/e699e19cabe96bbac5189c04aaa970718f0105cff03d458dc5e2b6bd1e8c/scipy-1.15.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828", size = 36545159 }, + { url = "https://files.pythonhosted.org/packages/af/f5/ab3838e56fe5cc22383d6fcf2336e48c8fe33e944b9037fbf6cbdf5a11f8/scipy-1.15.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28", size = 39136566 }, + { url = "https://files.pythonhosted.org/packages/0a/c8/b3f566db71461cabd4b2d5b39bcc24a7e1c119535c8361f81426be39bb47/scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db", size = 40477705 }, ] [[package]] @@ -4727,17 +4997,22 @@ dependencies = [ { name = "pybars4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic-settings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "scipy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] [package.optional-dependencies] anthropic = [ { name = "anthropic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] +autogen = [ + { name = "autogen-agentchat", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] aws = [ { name = "boto3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] azure = [ { name = "azure-ai-inference", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-ai-projects", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-core-tracing-opentelemetry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-cosmos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-identity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4778,7 +5053,7 @@ ollama = [ { name = "ollama", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] onnx = [ - { name = "onnxruntime-genai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "onnxruntime-genai", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] pandas = [ { name = "pandas", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4790,7 +5065,12 @@ postgres = [ { name = "psycopg", extra = ["binary", "pool"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] qdrant = [ - { name = "qdrant-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "qdrant-client", version = "1.12.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, + { name = "qdrant-client", version = "1.13.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, +] +realtime = [ + { name = "aiortc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "websockets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] redis = [ { name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4824,33 +5104,36 @@ dev = [ [package.metadata] requires-dist = [ { name = "aiohttp", specifier = "~=3.8" }, + { name = "aiortc", marker = "extra == 'realtime'", specifier = ">=1.9.0" }, { name = "anthropic", marker = "extra == 'anthropic'", specifier = "~=0.32" }, + { name = "autogen-agentchat", marker = "extra == 'autogen'", specifier = ">=0.2,<0.4" }, { name = "azure-ai-inference", marker = "extra == 'azure'", specifier = ">=1.0.0b6" }, + { name = "azure-ai-projects", marker = "extra == 'azure'", specifier = ">=1.0.0b5" }, { name = "azure-core-tracing-opentelemetry", marker = "extra == 'azure'", specifier = ">=1.0.0b11" }, { name = "azure-cosmos", marker = "extra == 'azure'", specifier = "~=4.7" }, { name = "azure-identity", specifier = "~=1.13" }, { name = "azure-identity", marker = "extra == 'azure'", specifier = "~=1.13" }, { name = "azure-search-documents", marker = "extra == 'azure'", specifier = ">=11.6.0b4" }, - { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.28.57" }, + { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.36.4,<1.38.0" }, { name = "chromadb", marker = "extra == 'chroma'", specifier = ">=0.5,<0.7" }, { name = "cloudevents", specifier = "~=1.0" }, { name = "dapr", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, { name = "dapr-ext-fastapi", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, { name = "defusedxml", specifier = "~=0.7" }, { name = "flask-dapr", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, - { name = "google-cloud-aiplatform", marker = "extra == 'google'", specifier = "~=1.60" }, - { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.7" }, + { name = "google-cloud-aiplatform", marker = "extra == 'google'", specifier = "==1.80.0" }, + { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.8" }, { name = "ipykernel", marker = "extra == 'notebooks'", specifier = "~=6.29" }, { name = "jinja2", specifier = "~=3.1" }, { name = "milvus", marker = "sys_platform != 'win32' and extra == 'milvus'", specifier = ">=2.3,<2.3.8" }, { name = "mistralai", marker = "extra == 'mistralai'", specifier = ">=1.2,<2.0" }, - { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.7.0" }, + { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.8.0" }, { name = "nest-asyncio", specifier = "~=1.6" }, { name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.25.0" }, { name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0" }, { name = "ollama", marker = "extra == 'ollama'", specifier = "~=0.4" }, - { name = "onnxruntime-genai", marker = "extra == 'onnx'", specifier = "~=0.5" }, - { name = "openai", specifier = "~=1.0" }, + { name = "onnxruntime-genai", marker = "python_full_version < '3.13' and extra == 'onnx'", specifier = "~=0.5" }, + { name = "openai", specifier = "~=1.61" }, { name = "openapi-core", specifier = ">=0.18,<0.20" }, { name = "opentelemetry-api", specifier = "~=1.24" }, { name = "opentelemetry-sdk", specifier = "~=1.24" }, @@ -4863,16 +5146,18 @@ requires-dist = [ { name = "pydantic", specifier = ">=2.0,!=2.10.0,!=2.10.1,!=2.10.2,!=2.10.3,<2.11" }, { name = "pydantic-settings", specifier = "~=2.0" }, { name = "pymilvus", marker = "extra == 'milvus'", specifier = ">=2.3,<2.6" }, - { name = "pymongo", marker = "extra == 'mongo'", specifier = ">=4.8.0,<4.11" }, + { name = "pymongo", marker = "extra == 'mongo'", specifier = ">=4.8.0,<4.12" }, { name = "qdrant-client", marker = "extra == 'qdrant'", specifier = "~=1.9" }, { name = "redis", extras = ["hiredis"], marker = "extra == 'redis'", specifier = "~=5.0" }, { name = "redisvl", marker = "extra == 'redis'", specifier = ">=0.3.6" }, + { name = "scipy", specifier = ">=1.15.1" }, { name = "sentence-transformers", marker = "extra == 'hugging-face'", specifier = ">=2.2,<4.0" }, - { name = "torch", marker = "extra == 'hugging-face'", specifier = "==2.5.1" }, + { name = "torch", marker = "extra == 'hugging-face'", specifier = "==2.6.0" }, { name = "transformers", extras = ["torch"], marker = "extra == 'hugging-face'", specifier = "~=4.28" }, { name = "types-redis", marker = "extra == 'redis'", specifier = "~=4.6.0.20240425" }, - { name = "usearch", marker = "extra == 'usearch'", specifier = "~=2.9" }, - { name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=4.7,<5.0" }, + { name = "usearch", marker = "extra == 'usearch'", specifier = "~=2.16" }, + { name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=4.10,<5.0" }, + { name = "websockets", marker = "extra == 'realtime'", specifier = ">=13,<15" }, ] [package.metadata.requires-dev] @@ -4886,14 +5171,14 @@ dev = [ { name = "pytest-cov", specifier = ">=5.0" }, { name = "pytest-timeout", specifier = ">=2.3.1" }, { name = "pytest-xdist", extras = ["psutil"], specifier = "~=3.6" }, - { name = "ruff", specifier = "~=0.7" }, + { name = "ruff", specifier = "~=0.9" }, { name = "snoop", specifier = "~=0.4" }, { name = "types-pyyaml", specifier = "~=6.0.12.20240311" }, ] [[package]] name = "sentence-transformers" -version = "3.3.1" +version = "3.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4904,53 +5189,53 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/0a/c677efe908b20e7e8d4ed6cce3a3447eebc7dc5e348e458f5f9a44a72b00/sentence_transformers-3.3.1.tar.gz", hash = "sha256:9635dbfb11c6b01d036b9cfcee29f7716ab64cf2407ad9f403a2e607da2ac48b", size = 217914 } +sdist = { url = "https://files.pythonhosted.org/packages/16/74/aca6f8a2b8d62b4daf8c9a0c49d2aa573381caf47dc35cbb343389229376/sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b", size = 223898 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/c8/990e22a465e4771338da434d799578865d6d7ef1fdb50bd844b7ecdcfa19/sentence_transformers-3.3.1-py3-none-any.whl", hash = "sha256:abffcc79dab37b7d18d21a26d5914223dd42239cfe18cb5e111c66c54b658ae7", size = 268797 }, + { url = "https://files.pythonhosted.org/packages/05/89/7eb147a37b7f31d3c815543df539d8b8d0425e93296c875cc87719d65232/sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da", size = 275896 }, ] [[package]] name = "setuptools" -version = "75.8.0" +version = "75.8.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6/setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", size = 1343222 } +sdist = { url = "https://files.pythonhosted.org/packages/d1/53/43d99d7687e8cdef5ab5f9ec5eaf2c0423c2b35133a2b7e7bc276fc32b21/setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2", size = 1344083 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/8a/b9dc7678803429e4a3bc9ba462fa3dd9066824d3c607490235c6a796be5a/setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3", size = 1228782 }, + { url = "https://files.pythonhosted.org/packages/a9/38/7d7362e031bd6dc121e5081d8cb6aa6f6fedf2b67bf889962134c6da4705/setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f", size = 1229385 }, ] [[package]] name = "shapely" -version = "2.0.6" +version = "2.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/89/0d20bac88016be35ff7d3c0c2ae64b477908f1b1dfa540c5d69ac7af07fe/shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6", size = 282361 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d4/f84bbbdb7771f5b9ade94db2398b256cf1471f1eb0ca8afbe0f6ca725d5a/shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b", size = 1449635 }, - { url = "https://files.pythonhosted.org/packages/03/10/bd6edb66ed0a845f0809f7ce653596f6fd9c6be675b3653872f47bf49f82/shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b", size = 1296756 }, - { url = "https://files.pythonhosted.org/packages/af/09/6374c11cb493a9970e8c04d7be25f578a37f6494a2fecfbed3a447b16b2c/shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde", size = 2381960 }, - { url = "https://files.pythonhosted.org/packages/2b/a6/302e0d9c210ccf4d1ffadf7ab941797d3255dcd5f93daa73aaf116a4db39/shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e", size = 2468133 }, - { url = "https://files.pythonhosted.org/packages/8c/be/e448681dc485f2931d4adee93d531fce93608a3ee59433303cc1a46e21a5/shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e", size = 1294982 }, - { url = "https://files.pythonhosted.org/packages/cd/4c/6f4a6fc085e3be01c4c9de0117a2d373bf9fec5f0426cf4d5c94090a5a4d/shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4", size = 1441141 }, - { url = "https://files.pythonhosted.org/packages/37/15/269d8e1f7f658a37e61f7028683c546f520e4e7cedba1e32c77ff9d3a3c7/shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e", size = 1449578 }, - { url = "https://files.pythonhosted.org/packages/37/63/e182e43081fffa0a2d970c480f2ef91647a6ab94098f61748c23c2a485f2/shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2", size = 1296792 }, - { url = "https://files.pythonhosted.org/packages/6e/5a/d019f69449329dcd517355444fdb9ddd58bec5e080b8bdba007e8e4c546d/shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855", size = 2443997 }, - { url = "https://files.pythonhosted.org/packages/25/aa/53f145e5a610a49af9ac49f2f1be1ec8659ebd5c393d66ac94e57c83b00e/shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0", size = 2528334 }, - { url = "https://files.pythonhosted.org/packages/64/64/0c7b0a22b416d36f6296b92bb4219d82b53d0a7c47e16fd0a4c85f2f117c/shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d", size = 1294669 }, - { url = "https://files.pythonhosted.org/packages/b1/5a/6a67d929c467a1973b6bb9f0b00159cc343b02bf9a8d26db1abd2f87aa23/shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b", size = 1442032 }, - { url = "https://files.pythonhosted.org/packages/46/77/efd9f9d4b6a762f976f8b082f54c9be16f63050389500fb52e4f6cc07c1a/shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0", size = 1450326 }, - { url = "https://files.pythonhosted.org/packages/68/53/5efa6e7a4036a94fe6276cf7bbb298afded51ca3396b03981ad680c8cc7d/shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3", size = 1298480 }, - { url = "https://files.pythonhosted.org/packages/88/a2/1be1db4fc262e536465a52d4f19d85834724fedf2299a1b9836bc82fe8fa/shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8", size = 2439311 }, - { url = "https://files.pythonhosted.org/packages/d5/7d/9a57e187cbf2fbbbdfd4044a4f9ce141c8d221f9963750d3b001f0ec080d/shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726", size = 2524835 }, - { url = "https://files.pythonhosted.org/packages/6d/0a/f407509ab56825f39bf8cfce1fb410238da96cf096809c3e404e5bc71ea1/shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f", size = 1295613 }, - { url = "https://files.pythonhosted.org/packages/7b/b3/857afd9dfbfc554f10d683ac412eac6fa260d1f4cd2967ecb655c57e831a/shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48", size = 1442539 }, - { url = "https://files.pythonhosted.org/packages/34/e8/d164ef5b0eab86088cde06dee8415519ffd5bb0dd1bd9d021e640e64237c/shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013", size = 1445344 }, - { url = "https://files.pythonhosted.org/packages/ce/e2/9fba7ac142f7831757a10852bfa465683724eadbc93d2d46f74a16f9af04/shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7", size = 1296182 }, - { url = "https://files.pythonhosted.org/packages/cf/dc/790d4bda27d196cd56ec66975eaae3351c65614cafd0e16ddde39ec9fb92/shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381", size = 2423426 }, - { url = "https://files.pythonhosted.org/packages/af/b0/f8169f77eac7392d41e231911e0095eb1148b4d40c50ea9e34d999c89a7e/shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805", size = 2513249 }, - { url = "https://files.pythonhosted.org/packages/f6/1d/a8c0e9ab49ff2f8e4dedd71b0122eafb22a18ad7e9d256025e1f10c84704/shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a", size = 1294848 }, - { url = "https://files.pythonhosted.org/packages/23/38/2bc32dd1e7e67a471d4c60971e66df0bdace88656c47a9a728ace0091075/shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2", size = 1441371 }, +sdist = { url = "https://files.pythonhosted.org/packages/21/c0/a911d1fd765d07a2b6769ce155219a281bfbe311584ebe97340d75c5bdb1/shapely-2.0.7.tar.gz", hash = "sha256:28fe2997aab9a9dc026dc6a355d04e85841546b2a5d232ed953e3321ab958ee5", size = 283413 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/2e/02c694d6ddacd4f13b625722d313d2838f23c5b988cbc680132983f73ce3/shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691", size = 1478310 }, + { url = "https://files.pythonhosted.org/packages/87/69/b54a08bcd25e561bdd5183c008ace4424c25e80506e80674032504800efd/shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147", size = 1336082 }, + { url = "https://files.pythonhosted.org/packages/b3/f9/40473fcb5b66ff849e563ca523d2a26dafd6957d52dd876ffd0eded39f1c/shapely-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6c50cd879831955ac47af9c907ce0310245f9d162e298703f82e1785e38c98", size = 2371047 }, + { url = "https://files.pythonhosted.org/packages/d6/f3/c9cc07a7a03b5f5e83bd059f9adf3e21cf086b0e41d7f95e6464b151e798/shapely-2.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04a65d882456e13c8b417562c36324c0cd1e5915f3c18ad516bb32ee3f5fc895", size = 2469112 }, + { url = "https://files.pythonhosted.org/packages/5d/b9/fc63d6b0b25063a3ff806857a5dc88851d54d1c278288f18cef1b322b449/shapely-2.0.7-cp310-cp310-win32.whl", hash = "sha256:7e97104d28e60b69f9b6a957c4d3a2a893b27525bc1fc96b47b3ccef46726bf2", size = 1296057 }, + { url = "https://files.pythonhosted.org/packages/fe/d1/8df43f94cf4cda0edbab4545f7cdd67d3f1d02910eaff152f9f45c6d00d8/shapely-2.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:35524cc8d40ee4752520819f9894b9f28ba339a42d4922e92c99b148bed3be39", size = 1441787 }, + { url = "https://files.pythonhosted.org/packages/1d/ad/21798c2fec013e289f8ab91d42d4d3299c315b8c4460c08c75fef0901713/shapely-2.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5cf23400cb25deccf48c56a7cdda8197ae66c0e9097fcdd122ac2007e320bc34", size = 1473091 }, + { url = "https://files.pythonhosted.org/packages/15/63/eef4f180f1b5859c70e7f91d2f2570643e5c61e7d7c40743d15f8c6cbc42/shapely-2.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8f1da01c04527f7da59ee3755d8ee112cd8967c15fab9e43bba936b81e2a013", size = 1332921 }, + { url = "https://files.pythonhosted.org/packages/fe/67/77851dd17738bbe7762a0ef1acf7bc499d756f68600dd68a987d78229412/shapely-2.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f623b64bb219d62014781120f47499a7adc30cf7787e24b659e56651ceebcb0", size = 2427949 }, + { url = "https://files.pythonhosted.org/packages/0b/a5/2c8dbb0f383519771df19164e3bf3a8895d195d2edeab4b6040f176ee28e/shapely-2.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6d95703efaa64aaabf278ced641b888fc23d9c6dd71f8215091afd8a26a66e3", size = 2529282 }, + { url = "https://files.pythonhosted.org/packages/dc/4e/e1d608773c7fe4cde36d48903c0d6298e3233dc69412403783ac03fa5205/shapely-2.0.7-cp311-cp311-win32.whl", hash = "sha256:2f6e4759cf680a0f00a54234902415f2fa5fe02f6b05546c662654001f0793a2", size = 1295751 }, + { url = "https://files.pythonhosted.org/packages/27/57/8ec7c62012bed06731f7ee979da7f207bbc4b27feed5f36680b6a70df54f/shapely-2.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:b52f3ab845d32dfd20afba86675c91919a622f4627182daec64974db9b0b4608", size = 1442684 }, + { url = "https://files.pythonhosted.org/packages/4f/3e/ea100eec5811bafd0175eb21828a3be5b0960f65250f4474391868be7c0f/shapely-2.0.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4c2b9859424facbafa54f4a19b625a752ff958ab49e01bc695f254f7db1835fa", size = 1482451 }, + { url = "https://files.pythonhosted.org/packages/ce/53/c6a3487716fd32e1f813d2a9608ba7b72a8a52a6966e31c6443480a1d016/shapely-2.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5aed1c6764f51011d69a679fdf6b57e691371ae49ebe28c3edb5486537ffbd51", size = 1345765 }, + { url = "https://files.pythonhosted.org/packages/fd/dd/b35d7891d25cc11066a70fb8d8169a6a7fca0735dd9b4d563a84684969a3/shapely-2.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73c9ae8cf443187d784d57202199bf9fd2d4bb7d5521fe8926ba40db1bc33e8e", size = 2421540 }, + { url = "https://files.pythonhosted.org/packages/62/de/8dbd7df60eb23cb983bb698aac982944b3d602ef0ce877a940c269eae34e/shapely-2.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9469f49ff873ef566864cb3516091881f217b5d231c8164f7883990eec88b73", size = 2525741 }, + { url = "https://files.pythonhosted.org/packages/96/64/faf0413ebc7a84fe7a0790bf39ec0b02b40132b68e57aba985c0b6e4e7b6/shapely-2.0.7-cp312-cp312-win32.whl", hash = "sha256:6bca5095e86be9d4ef3cb52d56bdd66df63ff111d580855cb8546f06c3c907cd", size = 1296552 }, + { url = "https://files.pythonhosted.org/packages/63/05/8a1c279c226d6ad7604d9e237713dd21788eab96db97bf4ce0ea565e5596/shapely-2.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:f86e2c0259fe598c4532acfcf638c1f520fa77c1275912bbc958faecbf00b108", size = 1443464 }, + { url = "https://files.pythonhosted.org/packages/c6/21/abea43effbfe11f792e44409ee9ad7635aa93ef1c8ada0ef59b3c1c3abad/shapely-2.0.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a0c09e3e02f948631c7763b4fd3dd175bc45303a0ae04b000856dedebefe13cb", size = 1481618 }, + { url = "https://files.pythonhosted.org/packages/d9/71/af688798da36fe355a6e6ffe1d4628449cb5fa131d57fc169bcb614aeee7/shapely-2.0.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:06ff6020949b44baa8fc2e5e57e0f3d09486cd5c33b47d669f847c54136e7027", size = 1345159 }, + { url = "https://files.pythonhosted.org/packages/67/47/f934fe2b70d31bb9774ad4376e34f81666deed6b811306ff574faa3d115e/shapely-2.0.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6dbf096f961ca6bec5640e22e65ccdec11e676344e8157fe7d636e7904fd36", size = 2410267 }, + { url = "https://files.pythonhosted.org/packages/f5/8a/2545cc2a30afc63fc6176c1da3b76af28ef9c7358ed4f68f7c6a9d86cf5b/shapely-2.0.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adeddfb1e22c20548e840403e5e0b3d9dc3daf66f05fa59f1fcf5b5f664f0e98", size = 2514128 }, + { url = "https://files.pythonhosted.org/packages/87/54/2344ce7da39676adec94e84fbaba92a8f1664e4ae2d33bd404dafcbe607f/shapely-2.0.7-cp313-cp313-win32.whl", hash = "sha256:a7f04691ce1c7ed974c2f8b34a1fe4c3c5dfe33128eae886aa32d730f1ec1913", size = 1295783 }, + { url = "https://files.pythonhosted.org/packages/d7/1e/6461e5cfc8e73ae165b8cff6eb26a4d65274fad0e1435137c5ba34fe4e88/shapely-2.0.7-cp313-cp313-win_amd64.whl", hash = "sha256:aaaf5f7e6cc234c1793f2a2760da464b604584fb58c6b6d7d94144fd2692d67e", size = 1442300 }, ] [[package]] @@ -5096,14 +5381,14 @@ wheels = [ [[package]] name = "starlette" -version = "0.41.3" +version = "0.45.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159 } +sdist = { url = "https://files.pythonhosted.org/packages/ff/fb/2984a686808b89a6781526129a4b51266f678b2d2b97ab2d325e56116df8/starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f", size = 2574076 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 }, + { url = "https://files.pythonhosted.org/packages/d9/61/f2b52e107b1fc8944b33ef56bf6ac4ebbe16d91b94d2b87ce013bf63fb84/starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d", size = 71507 }, ] [[package]] @@ -5136,6 +5421,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, ] +[[package]] +name = "termcolor" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/72/88311445fd44c455c7d553e61f95412cf89054308a1aa2434ab835075fc5/termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f", size = 13057 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/be/df630c387a0a054815d60be6a97eb4e8f17385d5d6fe660e1c02750062b4/termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8", size = 7755 }, +] + [[package]] name = "threadpoolctl" version = "3.5.0" @@ -5145,6 +5439,42 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4b/2c/ffbf7a134b9ab11a67b0cf0726453cedd9c5043a4fe7a35d1cefa9a1bcfb/threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467", size = 18414 }, ] +[[package]] +name = "tiktoken" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/f3/50ec5709fad61641e4411eb1b9ac55b99801d71f1993c29853f256c726c9/tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382", size = 1065770 }, + { url = "https://files.pythonhosted.org/packages/d6/f8/5a9560a422cf1755b6e0a9a436e14090eeb878d8ec0f80e0cd3d45b78bf4/tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108", size = 1009314 }, + { url = "https://files.pythonhosted.org/packages/bc/20/3ed4cfff8f809cb902900ae686069e029db74567ee10d017cb254df1d598/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd", size = 1143140 }, + { url = "https://files.pythonhosted.org/packages/f1/95/cc2c6d79df8f113bdc6c99cdec985a878768120d87d839a34da4bd3ff90a/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de", size = 1197860 }, + { url = "https://files.pythonhosted.org/packages/c7/6c/9c1a4cc51573e8867c9381db1814223c09ebb4716779c7f845d48688b9c8/tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990", size = 1259661 }, + { url = "https://files.pythonhosted.org/packages/cd/4c/22eb8e9856a2b1808d0a002d171e534eac03f96dbe1161978d7389a59498/tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4", size = 894026 }, + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987 }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155 }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898 }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535 }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548 }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895 }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 }, + { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919 }, + { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877 }, + { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095 }, + { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649 }, + { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465 }, + { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669 }, +] + [[package]] name = "tinycss2" version = "1.4.0" @@ -5223,7 +5553,7 @@ wheels = [ [[package]] name = "torch" -version = "2.5.1" +version = "2.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5239,28 +5569,32 @@ dependencies = [ { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform == 'darwin') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')" }, { name = "sympy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "triton", marker = "python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/ef/834af4a885b31a0b32fff2d80e1e40f771e1566ea8ded55347502440786a/torch-2.5.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:71328e1bbe39d213b8721678f9dcac30dfc452a46d586f1d514a6aa0a99d4744", size = 906446312 }, - { url = "https://files.pythonhosted.org/packages/69/f0/46e74e0d145f43fa506cb336eaefb2d240547e4ce1f496e442711093ab25/torch-2.5.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:34bfa1a852e5714cbfa17f27c49d8ce35e1b7af5608c4bc6e81392c352dbc601", size = 91919522 }, - { url = "https://files.pythonhosted.org/packages/a5/13/1eb674c8efbd04d71e4a157ceba991904f633e009a584dd65dccbafbb648/torch-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:32a037bd98a241df6c93e4c789b683335da76a2ac142c0973675b715102dc5fa", size = 203088048 }, - { url = "https://files.pythonhosted.org/packages/a9/9d/e0860474ee0ff8f6ef2c50ec8f71a250f38d78a9b9df9fd241ad3397a65b/torch-2.5.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:23d062bf70776a3d04dbe74db950db2a5245e1ba4f27208a87f0d743b0d06e86", size = 63877046 }, - { url = "https://files.pythonhosted.org/packages/d1/35/e8b2daf02ce933e4518e6f5682c72fd0ed66c15910ea1fb4168f442b71c4/torch-2.5.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:de5b7d6740c4b636ef4db92be922f0edc425b65ed78c5076c43c42d362a45457", size = 906474467 }, - { url = "https://files.pythonhosted.org/packages/40/04/bd91593a4ca178ece93ca55f27e2783aa524aaccbfda66831d59a054c31e/torch-2.5.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:340ce0432cad0d37f5a31be666896e16788f1adf8ad7be481196b503dad675b9", size = 91919450 }, - { url = "https://files.pythonhosted.org/packages/0d/4a/e51420d46cfc90562e85af2fee912237c662ab31140ab179e49bd69401d6/torch-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:603c52d2fe06433c18b747d25f5c333f9c1d58615620578c326d66f258686f9a", size = 203098237 }, - { url = "https://files.pythonhosted.org/packages/d0/db/5d9cbfbc7968d79c5c09a0bc0bc3735da079f2fd07cc10498a62b320a480/torch-2.5.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:31f8c39660962f9ae4eeec995e3049b5492eb7360dd4f07377658ef4d728fa4c", size = 63884466 }, - { url = "https://files.pythonhosted.org/packages/8b/5c/36c114d120bfe10f9323ed35061bc5878cc74f3f594003854b0ea298942f/torch-2.5.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:ed231a4b3a5952177fafb661213d690a72caaad97d5824dd4fc17ab9e15cec03", size = 906389343 }, - { url = "https://files.pythonhosted.org/packages/6d/69/d8ada8b6e0a4257556d5b4ddeb4345ea8eeaaef3c98b60d1cca197c7ad8e/torch-2.5.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:3f4b7f10a247e0dcd7ea97dc2d3bfbfc90302ed36d7f3952b0008d0df264e697", size = 91811673 }, - { url = "https://files.pythonhosted.org/packages/5f/ba/607d013b55b9fd805db2a5c2662ec7551f1910b4eef39653eeaba182c5b2/torch-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:73e58e78f7d220917c5dbfad1a40e09df9929d3b95d25e57d9f8558f84c9a11c", size = 203046841 }, - { url = "https://files.pythonhosted.org/packages/57/6c/bf52ff061da33deb9f94f4121fde7ff3058812cb7d2036c97bc167793bd1/torch-2.5.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:8c712df61101964eb11910a846514011f0b6f5920c55dbf567bff8a34163d5b1", size = 63858109 }, - { url = "https://files.pythonhosted.org/packages/69/72/20cb30f3b39a9face296491a86adb6ff8f1a47a897e4d14667e6cf89d5c3/torch-2.5.1-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:9b61edf3b4f6e3b0e0adda8b3960266b9009d02b37555971f4d1c8f7a05afed7", size = 906393265 }, + { url = "https://files.pythonhosted.org/packages/37/81/aa9ab58ec10264c1abe62c8b73f5086c3c558885d6beecebf699f0dbeaeb/torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961", size = 766685561 }, + { url = "https://files.pythonhosted.org/packages/86/86/e661e229df2f5bfc6eab4c97deb1286d598bbeff31ab0cdb99b3c0d53c6f/torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab", size = 95751887 }, + { url = "https://files.pythonhosted.org/packages/20/e0/5cb2f8493571f0a5a7273cd7078f191ac252a402b5fb9cb6091f14879109/torch-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:56eeaf2ecac90da5d9e35f7f35eb286da82673ec3c582e310a8d1631a1c02341", size = 204165139 }, + { url = "https://files.pythonhosted.org/packages/e5/16/ea1b7842413a7b8a5aaa5e99e8eaf3da3183cc3ab345ad025a07ff636301/torch-2.6.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:09e06f9949e1a0518c5b09fe95295bc9661f219d9ecb6f9893e5123e10696628", size = 66520221 }, + { url = "https://files.pythonhosted.org/packages/78/a9/97cbbc97002fff0de394a2da2cdfa859481fdca36996d7bd845d50aa9d8d/torch-2.6.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:7979834102cd5b7a43cc64e87f2f3b14bd0e1458f06e9f88ffa386d07c7446e1", size = 766715424 }, + { url = "https://files.pythonhosted.org/packages/6d/fa/134ce8f8a7ea07f09588c9cc2cea0d69249efab977707cf67669431dcf5c/torch-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ccbd0320411fe1a3b3fec7b4d3185aa7d0c52adac94480ab024b5c8f74a0bf1d", size = 95759416 }, + { url = "https://files.pythonhosted.org/packages/11/c5/2370d96b31eb1841c3a0883a492c15278a6718ccad61bb6a649c80d1d9eb/torch-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:46763dcb051180ce1ed23d1891d9b1598e07d051ce4c9d14307029809c4d64f7", size = 204164970 }, + { url = "https://files.pythonhosted.org/packages/0b/fa/f33a4148c6fb46ca2a3f8de39c24d473822d5774d652b66ed9b1214da5f7/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:94fc63b3b4bedd327af588696559f68c264440e2503cc9e6954019473d74ae21", size = 66530713 }, + { url = "https://files.pythonhosted.org/packages/e5/35/0c52d708144c2deb595cd22819a609f78fdd699b95ff6f0ebcd456e3c7c1/torch-2.6.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:2bb8987f3bb1ef2675897034402373ddfc8f5ef0e156e2d8cfc47cacafdda4a9", size = 766624563 }, + { url = "https://files.pythonhosted.org/packages/01/d6/455ab3fbb2c61c71c8842753b566012e1ed111e7a4c82e0e1c20d0c76b62/torch-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b789069020c5588c70d5c2158ac0aa23fd24a028f34a8b4fcb8fcb4d7efcf5fb", size = 95607867 }, + { url = "https://files.pythonhosted.org/packages/18/cf/ae99bd066571656185be0d88ee70abc58467b76f2f7c8bfeb48735a71fe6/torch-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7e1448426d0ba3620408218b50aa6ada88aeae34f7a239ba5431f6c8774b1239", size = 204120469 }, + { url = "https://files.pythonhosted.org/packages/81/b4/605ae4173aa37fb5aa14605d100ff31f4f5d49f617928c9f486bb3aaec08/torch-2.6.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:9a610afe216a85a8b9bc9f8365ed561535c93e804c2a317ef7fabcc5deda0989", size = 66532538 }, + { url = "https://files.pythonhosted.org/packages/24/85/ead1349fc30fe5a32cadd947c91bda4a62fbfd7f8c34ee61f6398d38fb48/torch-2.6.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:4874a73507a300a5d089ceaff616a569e7bb7c613c56f37f63ec3ffac65259cf", size = 766626191 }, + { url = "https://files.pythonhosted.org/packages/dd/b0/26f06f9428b250d856f6d512413e9e800b78625f63801cbba13957432036/torch-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a0d5e1b9874c1a6c25556840ab8920569a7a4137afa8a63a32cee0bc7d89bd4b", size = 95611439 }, + { url = "https://files.pythonhosted.org/packages/c2/9c/fc5224e9770c83faed3a087112d73147cd7c7bfb7557dcf9ad87e1dda163/torch-2.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:510c73251bee9ba02ae1cb6c9d4ee0907b3ce6020e62784e2d7598e0cfa4d6cc", size = 204126475 }, + { url = "https://files.pythonhosted.org/packages/88/8b/d60c0491ab63634763be1537ad488694d316ddc4a20eaadd639cedc53971/torch-2.6.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:ff96f4038f8af9f7ec4231710ed4549da1bdebad95923953a25045dcf6fd87e2", size = 66536783 }, ] [[package]] @@ -5304,7 +5638,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.48.0" +version = "4.49.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5318,9 +5652,9 @@ dependencies = [ { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/71/93a6331682d6f15adf7d646956db0c43e5f1759bbbd05f2ef53029bae107/transformers-4.48.0.tar.gz", hash = "sha256:03fdfcbfb8b0367fb6c9fbe9d1c9aa54dfd847618be9b52400b2811d22799cb1", size = 8372101 } +sdist = { url = "https://files.pythonhosted.org/packages/79/50/46573150944f46df8ec968eda854023165a84470b42f69f67c7d475dabc5/transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e", size = 8610952 } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/d6/a69764e89fc5c2c957aa473881527c8c35521108d553df703e9ba703daeb/transformers-4.48.0-py3-none-any.whl", hash = "sha256:6d3de6d71cb5f2a10f9775ccc17abce9620195caaf32ec96542bd2a6937f25b0", size = 9673380 }, + { url = "https://files.pythonhosted.org/packages/20/37/1f29af63e9c30156a3ed6ebc2754077016577c094f31de7b2631e5d379eb/transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03", size = 9970275 }, ] [package.optional-dependencies] @@ -5331,15 +5665,13 @@ torch = [ [[package]] name = "triton" -version = "3.1.0" +version = "3.2.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock", marker = "python_full_version < '3.13' and sys_platform == 'linux'" }, -] wheels = [ - { url = "https://files.pythonhosted.org/packages/98/29/69aa56dc0b2eb2602b553881e34243475ea2afd9699be042316842788ff5/triton-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b0dd10a925263abbe9fa37dcde67a5e9b2383fc269fdf59f5657cac38c5d1d8", size = 209460013 }, - { url = "https://files.pythonhosted.org/packages/86/17/d9a5cf4fcf46291856d1e90762e36cbabd2a56c7265da0d1d9508c8e3943/triton-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f34f6e7885d1bf0eaaf7ba875a5f0ce6f3c13ba98f9503651c1e6dc6757ed5c", size = 209506424 }, - { url = "https://files.pythonhosted.org/packages/78/eb/65f5ba83c2a123f6498a3097746607e5b2f16add29e36765305e4ac7fdd8/triton-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8182f42fd8080a7d39d666814fa36c5e30cc00ea7eeeb1a2983dbb4c99a0fdc", size = 209551444 }, + { url = "https://files.pythonhosted.org/packages/01/65/3ffa90e158a2c82f0716eee8d26a725d241549b7d7aaf7e4f44ac03ebd89/triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62", size = 253090354 }, + { url = "https://files.pythonhosted.org/packages/a7/2e/757d2280d4fefe7d33af7615124e7e298ae7b8e3bc4446cdb8e88b0f9bab/triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220", size = 253157636 }, + { url = "https://files.pythonhosted.org/packages/06/00/59500052cb1cf8cf5316be93598946bc451f14072c6ff256904428eaf03c/triton-3.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d9b215efc1c26fa7eefb9a157915c92d52e000d2bf83e5f69704047e63f125c", size = 253159365 }, + { url = "https://files.pythonhosted.org/packages/c7/30/37a3384d1e2e9320331baca41e835e90a3767303642c7a80d4510152cbcf/triton-3.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5dfa23ba84541d7c0a531dfce76d8bcd19159d50a4a8b14ad01e91734a5c1b0", size = 253154278 }, ] [[package]] @@ -5406,11 +5738,11 @@ wheels = [ [[package]] name = "types-setuptools" -version = "75.8.0.20250110" +version = "75.8.0.20250225" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/42/5713e90d4f9683f2301d900f33e4fc2405ad8ac224dda30f6cb7f4cd215b/types_setuptools-75.8.0.20250110.tar.gz", hash = "sha256:96f7ec8bbd6e0a54ea180d66ad68ad7a1d7954e7281a710ea2de75e355545271", size = 48185 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/ad/0747cfa03acc6cbeee3ce15704ac65fb4c7444f3cd5596c34d581e7366a7/types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636", size = 48448 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/a3/dbfd106751b11c728cec21cc62cbfe7ff7391b935c4b6e8f0bdc2e6fd541/types_setuptools-75.8.0.20250110-py3-none-any.whl", hash = "sha256:a9f12980bbf9bcdc23ecd80755789085bad6bfce4060c2275bc2b4ca9f2bc480", size = 71521 }, + { url = "https://files.pythonhosted.org/packages/0c/f2/6259d7d302d66a1df119baac81a06649c2cf5fa0a671278c408d43711cee/types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af", size = 71839 }, ] [[package]] @@ -5437,11 +5769,11 @@ wheels = [ [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } +sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, + { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 }, ] [[package]] @@ -5632,16 +5964,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.29.0" +version = "20.29.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/5d/8d625ebddf9d31c301f85125b78002d4e4401fe1c15c04dca58a54a3056a/virtualenv-20.29.0.tar.gz", hash = "sha256:6345e1ff19d4b1296954cee076baaf58ff2a12a84a338c62b02eda39f20aa982", size = 7658081 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/88/dacc875dd54a8acadb4bcbfd4e3e86df8be75527116c91d8f9784f5e9cab/virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728", size = 4320272 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/d3/12687ab375bb0e077ea802a5128f7b45eb5de7a7c6cb576ccf9dd59ff80a/virtualenv-20.29.0-py3-none-any.whl", hash = "sha256:c12311863497992dc4b8644f8ea82d3b35bb7ef8ee82e6630d76d0197c39baf9", size = 4282443 }, + { url = "https://files.pythonhosted.org/packages/93/fa/849483d56773ae29740ae70043ad88e068f98a6401aa819b5d6bee604683/virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a", size = 4301478 }, ] [[package]] @@ -5720,7 +6052,7 @@ wheels = [ [[package]] name = "weaviate-client" -version = "4.10.4" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5731,9 +6063,9 @@ dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "validators", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/ce/e34426eeda39a77b45df86f9ab901a7232096a071ee379a046a8072e2a35/weaviate_client-4.10.4.tar.gz", hash = "sha256:a1e799fc41d9f43a56c95490f6c14f475861f27d2a62b9b6de28a1db5494751d", size = 594549 } +sdist = { url = "https://files.pythonhosted.org/packages/a9/0e/e12a41d1a272d30184f8deaaec3b27a4e98aaf13a4aa1f3fc40ee0ce294d/weaviate_client-4.11.0.tar.gz", hash = "sha256:27cb82326a1b69b1de764614973f7c1c8f1e3e86459b75c6db1be9fac004d68a", size = 609019 } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/e9/5b6ffbdee0d0f1444d0ce142c70a70bf22ba43bf2d6b35913a8d7e674431/weaviate_client-4.10.4-py3-none-any.whl", hash = "sha256:d9808456ba109fcd99331bc833b61cf520bf6ad9db442db621e12f78c8480c4c", size = 330450 }, + { url = "https://files.pythonhosted.org/packages/a4/54/b5e80a1708e4973332c149565729010bc0d9674f9f5f301445d56b9c550c/weaviate_client-4.11.0-py3-none-any.whl", hash = "sha256:de97f34a953974f9f294a371a7057a1d6c908af92561b797d70e9c4eed2bba02", size = 350057 }, ] [[package]] @@ -5756,61 +6088,61 @@ wheels = [ [[package]] name = "websockets" -version = "14.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/1b/380b883ce05bb5f45a905b61790319a28958a9ab1e4b6b95ff5464b60ca1/websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8", size = 162840 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/91/b1b375dbd856fd5fff3f117de0e520542343ecaf4e8fc60f1ac1e9f5822c/websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29", size = 161950 }, - { url = "https://files.pythonhosted.org/packages/61/8f/4d52f272d3ebcd35e1325c646e98936099a348374d4a6b83b524bded8116/websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179", size = 159601 }, - { url = "https://files.pythonhosted.org/packages/c4/b1/29e87b53eb1937992cdee094a0988aadc94f25cf0b37e90c75eed7123d75/websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250", size = 159854 }, - { url = "https://files.pythonhosted.org/packages/3f/e6/752a2f5e8321ae2a613062676c08ff2fccfb37dc837a2ee919178a372e8a/websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0", size = 168835 }, - { url = "https://files.pythonhosted.org/packages/60/27/ca62de7877596926321b99071639275e94bb2401397130b7cf33dbf2106a/websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0", size = 167844 }, - { url = "https://files.pythonhosted.org/packages/7e/db/f556a1d06635c680ef376be626c632e3f2bbdb1a0189d1d1bffb061c3b70/websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199", size = 168157 }, - { url = "https://files.pythonhosted.org/packages/b3/bc/99e5f511838c365ac6ecae19674eb5e94201aa4235bd1af3e6fa92c12905/websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58", size = 168561 }, - { url = "https://files.pythonhosted.org/packages/c6/e7/251491585bad61c79e525ac60927d96e4e17b18447cc9c3cfab47b2eb1b8/websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078", size = 167979 }, - { url = "https://files.pythonhosted.org/packages/ac/98/7ac2e4eeada19bdbc7a3a66a58e3ebdf33648b9e1c5b3f08c3224df168cf/websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434", size = 167925 }, - { url = "https://files.pythonhosted.org/packages/ab/3d/09e65c47ee2396b7482968068f6e9b516221e1032b12dcf843b9412a5dfb/websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10", size = 162831 }, - { url = "https://files.pythonhosted.org/packages/8a/67/59828a3d09740e6a485acccfbb66600632f2178b6ed1b61388ee96f17d5a/websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e", size = 163266 }, - { url = "https://files.pythonhosted.org/packages/97/ed/c0d03cb607b7fe1f7ff45e2cd4bb5cd0f9e3299ced79c2c303a6fff44524/websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512", size = 161949 }, - { url = "https://files.pythonhosted.org/packages/06/91/bf0a44e238660d37a2dda1b4896235d20c29a2d0450f3a46cd688f43b239/websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac", size = 159606 }, - { url = "https://files.pythonhosted.org/packages/ff/b8/7185212adad274c2b42b6a24e1ee6b916b7809ed611cbebc33b227e5c215/websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280", size = 159854 }, - { url = "https://files.pythonhosted.org/packages/5a/8a/0849968d83474be89c183d8ae8dcb7f7ada1a3c24f4d2a0d7333c231a2c3/websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1", size = 169402 }, - { url = "https://files.pythonhosted.org/packages/bd/4f/ef886e37245ff6b4a736a09b8468dae05d5d5c99de1357f840d54c6f297d/websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3", size = 168406 }, - { url = "https://files.pythonhosted.org/packages/11/43/e2dbd4401a63e409cebddedc1b63b9834de42f51b3c84db885469e9bdcef/websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6", size = 168776 }, - { url = "https://files.pythonhosted.org/packages/6d/d6/7063e3f5c1b612e9f70faae20ebaeb2e684ffa36cb959eb0862ee2809b32/websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0", size = 169083 }, - { url = "https://files.pythonhosted.org/packages/49/69/e6f3d953f2fa0f8a723cf18cd011d52733bd7f6e045122b24e0e7f49f9b0/websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89", size = 168529 }, - { url = "https://files.pythonhosted.org/packages/70/ff/f31fa14561fc1d7b8663b0ed719996cf1f581abee32c8fb2f295a472f268/websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23", size = 168475 }, - { url = "https://files.pythonhosted.org/packages/f1/15/b72be0e4bf32ff373aa5baef46a4c7521b8ea93ad8b49ca8c6e8e764c083/websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e", size = 162833 }, - { url = "https://files.pythonhosted.org/packages/bc/ef/2d81679acbe7057ffe2308d422f744497b52009ea8bab34b6d74a2657d1d/websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09", size = 163263 }, - { url = "https://files.pythonhosted.org/packages/55/64/55698544ce29e877c9188f1aee9093712411a8fc9732cca14985e49a8e9c/websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed", size = 161957 }, - { url = "https://files.pythonhosted.org/packages/a2/b1/b088f67c2b365f2c86c7b48edb8848ac27e508caf910a9d9d831b2f343cb/websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d", size = 159620 }, - { url = "https://files.pythonhosted.org/packages/c1/89/2a09db1bbb40ba967a1b8225b07b7df89fea44f06de9365f17f684d0f7e6/websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707", size = 159852 }, - { url = "https://files.pythonhosted.org/packages/ca/c1/f983138cd56e7d3079f1966e81f77ce6643f230cd309f73aa156bb181749/websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a", size = 169675 }, - { url = "https://files.pythonhosted.org/packages/c1/c8/84191455d8660e2a0bdb33878d4ee5dfa4a2cedbcdc88bbd097303b65bfa/websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45", size = 168619 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/62e551fdcd7d44ea74a006dc193aba370505278ad76efd938664531ce9d6/websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58", size = 169042 }, - { url = "https://files.pythonhosted.org/packages/ad/ed/1532786f55922c1e9c4d329608e36a15fdab186def3ca9eb10d7465bc1cc/websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058", size = 169345 }, - { url = "https://files.pythonhosted.org/packages/ea/fb/160f66960d495df3de63d9bcff78e1b42545b2a123cc611950ffe6468016/websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4", size = 168725 }, - { url = "https://files.pythonhosted.org/packages/cf/53/1bf0c06618b5ac35f1d7906444b9958f8485682ab0ea40dee7b17a32da1e/websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05", size = 168712 }, - { url = "https://files.pythonhosted.org/packages/e5/22/5ec2f39fff75f44aa626f86fa7f20594524a447d9c3be94d8482cd5572ef/websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0", size = 162838 }, - { url = "https://files.pythonhosted.org/packages/74/27/28f07df09f2983178db7bf6c9cccc847205d2b92ced986cd79565d68af4f/websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f", size = 163277 }, - { url = "https://files.pythonhosted.org/packages/34/77/812b3ba5110ed8726eddf9257ab55ce9e85d97d4aa016805fdbecc5e5d48/websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9", size = 161966 }, - { url = "https://files.pythonhosted.org/packages/8d/24/4fcb7aa6986ae7d9f6d083d9d53d580af1483c5ec24bdec0978307a0f6ac/websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b", size = 159625 }, - { url = "https://files.pythonhosted.org/packages/f8/47/2a0a3a2fc4965ff5b9ce9324d63220156bd8bedf7f90824ab92a822e65fd/websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3", size = 159857 }, - { url = "https://files.pythonhosted.org/packages/dd/c8/d7b425011a15e35e17757e4df75b25e1d0df64c0c315a44550454eaf88fc/websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59", size = 169635 }, - { url = "https://files.pythonhosted.org/packages/93/39/6e3b5cffa11036c40bd2f13aba2e8e691ab2e01595532c46437b56575678/websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2", size = 168578 }, - { url = "https://files.pythonhosted.org/packages/cf/03/8faa5c9576299b2adf34dcccf278fc6bbbcda8a3efcc4d817369026be421/websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da", size = 169018 }, - { url = "https://files.pythonhosted.org/packages/8c/05/ea1fec05cc3a60defcdf0bb9f760c3c6bd2dd2710eff7ac7f891864a22ba/websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9", size = 169383 }, - { url = "https://files.pythonhosted.org/packages/21/1d/eac1d9ed787f80754e51228e78855f879ede1172c8b6185aca8cef494911/websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7", size = 168773 }, - { url = "https://files.pythonhosted.org/packages/0e/1b/e808685530185915299740d82b3a4af3f2b44e56ccf4389397c7a5d95d39/websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a", size = 168757 }, - { url = "https://files.pythonhosted.org/packages/b6/19/6ab716d02a3b068fbbeb6face8a7423156e12c446975312f1c7c0f4badab/websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6", size = 162834 }, - { url = "https://files.pythonhosted.org/packages/6c/fd/ab6b7676ba712f2fc89d1347a4b5bdc6aa130de10404071f2b2606450209/websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0", size = 163277 }, - { url = "https://files.pythonhosted.org/packages/fb/cd/382a05a1ba2a93bd9fb807716a660751295df72e77204fb130a102fcdd36/websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8", size = 159633 }, - { url = "https://files.pythonhosted.org/packages/b7/a0/fa7c62e2952ef028b422fbf420f9353d9dd4dfaa425de3deae36e98c0784/websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e", size = 159867 }, - { url = "https://files.pythonhosted.org/packages/c1/94/954b4924f868db31d5f0935893c7a8446515ee4b36bb8ad75a929469e453/websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098", size = 161121 }, - { url = "https://files.pythonhosted.org/packages/7a/2e/f12bbb41a8f2abb76428ba4fdcd9e67b5b364a3e7fa97c88f4d6950aa2d4/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb", size = 160731 }, - { url = "https://files.pythonhosted.org/packages/13/97/b76979401f2373af1fe3e08f960b265cecab112e7dac803446fb98351a52/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7", size = 160681 }, - { url = "https://files.pythonhosted.org/packages/39/9c/16916d9a436c109a1d7ba78817e8fee357b78968be3f6e6f517f43afa43d/websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d", size = 163316 }, - { url = "https://files.pythonhosted.org/packages/b0/0b/c7e5d11020242984d9d37990310520ed663b942333b83a033c2f20191113/websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e", size = 156277 }, +version = "15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/7a/8bc4d15af7ff30f7ba34f9a172063bfcee9f5001d7cef04bee800a658f33/websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab", size = 175574 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/f1/b20cc4c1ff84911c791f36fa511a78203836bb4d603f56290de08c067437/websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0", size = 174701 }, + { url = "https://files.pythonhosted.org/packages/f9/e8/4de59ee85ec86052ca574f4e5327ef948e4f77757d3c9c1503f5a0e9c039/websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3", size = 172358 }, + { url = "https://files.pythonhosted.org/packages/2f/ea/b0f95815cdc83d61b1a895858671c6af38a76c23f3ea5d91e2ba11bbedc7/websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b", size = 172610 }, + { url = "https://files.pythonhosted.org/packages/09/ed/c5d8f1f296f475c00611a40eff6a952248785efb125f91a0b29575f36ba6/websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453", size = 181579 }, + { url = "https://files.pythonhosted.org/packages/b7/fc/2444b5ae792d92179f20cec53475bcc25d1d7f00a2be9947de9837ef230a/websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4", size = 180588 }, + { url = "https://files.pythonhosted.org/packages/ff/b5/0945a31562d351cff26d76a2ae9a4ba4536e698aa059a4262afd793b2a1d/websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb", size = 180902 }, + { url = "https://files.pythonhosted.org/packages/b6/7c/e9d844b87754bc83b294cc1c695cbc6c5d42e329b85d2bf2d7bb9554d09c/websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5", size = 181282 }, + { url = "https://files.pythonhosted.org/packages/9e/6c/6a5d3272f494fa2fb4806b896ecb312bd6c72bab632df4ace19946c079dc/websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f", size = 180694 }, + { url = "https://files.pythonhosted.org/packages/b2/32/1fb4b62c2ec2c9844d4ddaa4021d993552c7c493a0acdcec95551679d501/websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8", size = 180631 }, + { url = "https://files.pythonhosted.org/packages/e4/9b/5ef1ddb8857ce894217bdd9572ad98c1cef20d8f9f0f43823b782b7ded6b/websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f", size = 175664 }, + { url = "https://files.pythonhosted.org/packages/29/63/c320572ccf813ed2bc3058a0e0291ee95eb258dc5e6b3446ca45dc1af0fd/websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133", size = 176109 }, + { url = "https://files.pythonhosted.org/packages/ee/16/81a7403c8c0a33383de647e89c07824ea6a654e3877d6ff402cbae298cb8/websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965", size = 174702 }, + { url = "https://files.pythonhosted.org/packages/ef/40/4629202386a3bf1195db9fe41baeb1d6dfd8d72e651d9592d81dae7fdc7c/websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7", size = 172359 }, + { url = "https://files.pythonhosted.org/packages/7b/33/dfb650e822bc7912d8c542c452497867af91dec81e7b5bf96aca5b419d58/websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad", size = 172604 }, + { url = "https://files.pythonhosted.org/packages/2e/52/666743114513fcffd43ee5df261a1eb5d41f8e9861b7a190b730732c19ba/websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3", size = 182145 }, + { url = "https://files.pythonhosted.org/packages/9c/63/5273f146b13aa4a057a95ab0855d9990f3a1ced63693f4365135d1abfacc/websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1", size = 181152 }, + { url = "https://files.pythonhosted.org/packages/0f/ae/075697f3f97de7c26b73ae96d952e13fa36393e0db3f028540b28954e0a9/websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55", size = 181523 }, + { url = "https://files.pythonhosted.org/packages/25/87/06d091bbcbe01903bed3dad3bb4a1a3c516f61e611ec31fffb28abe4974b/websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596", size = 181791 }, + { url = "https://files.pythonhosted.org/packages/77/08/5063b6cc1b2aa1fba2ee3b578b777db22fde7145f121d07fd878811e983b/websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3", size = 181231 }, + { url = "https://files.pythonhosted.org/packages/86/ff/af23084df0a7405bb2add12add8c17d6192a8de9480f1b90d12352ba2b7d/websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4", size = 181191 }, + { url = "https://files.pythonhosted.org/packages/21/ce/b2bdfcf49201dee0b899edc6a814755763ec03d74f2714923d38453a9e8d/websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680", size = 175666 }, + { url = "https://files.pythonhosted.org/packages/8d/7b/444edcd5365538c226b631897975a65bbf5ccf27c77102e17d8f12a306ea/websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37", size = 176105 }, + { url = "https://files.pythonhosted.org/packages/22/1e/92c4547d7b2a93f848aedaf37e9054111bc00dc11bff4385ca3f80dbb412/websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f", size = 174709 }, + { url = "https://files.pythonhosted.org/packages/9f/37/eae4830a28061ba552516d84478686b637cd9e57d6a90b45ad69e89cb0af/websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d", size = 172372 }, + { url = "https://files.pythonhosted.org/packages/46/2f/b409f8b8aa9328d5a47f7a301a43319d540d70cf036d1e6443675978a988/websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276", size = 172607 }, + { url = "https://files.pythonhosted.org/packages/d6/81/d7e2e4542d4b4df849b0110df1b1f94f2647b71ab4b65d672090931ad2bb/websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc", size = 182422 }, + { url = "https://files.pythonhosted.org/packages/b6/91/3b303160938d123eea97f58be363f7dbec76e8c59d587e07b5bc257dd584/websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72", size = 181362 }, + { url = "https://files.pythonhosted.org/packages/f2/8b/df6807f1ca339c567aba9a7ab03bfdb9a833f625e8d2b4fc7529e4c701de/websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d", size = 181787 }, + { url = "https://files.pythonhosted.org/packages/21/37/e6d3d5ebb0ebcaf98ae84904205c9dcaf3e0fe93e65000b9f08631ed7309/websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab", size = 182058 }, + { url = "https://files.pythonhosted.org/packages/c9/df/6aca296f2be4c638ad20908bb3d7c94ce7afc8d9b4b2b0780d1fc59b359c/websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99", size = 181434 }, + { url = "https://files.pythonhosted.org/packages/88/f1/75717a982bab39bbe63c83f9df0e7753e5c98bab907eb4fb5d97fe5c8c11/websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc", size = 181431 }, + { url = "https://files.pythonhosted.org/packages/e7/15/cee9e63ed9ac5bfc1a3ae8fc6c02c41745023c21eed622eef142d8fdd749/websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904", size = 175678 }, + { url = "https://files.pythonhosted.org/packages/4e/00/993974c60f40faabb725d4dbae8b072ef73b4c4454bd261d3b1d34ace41f/websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa", size = 176119 }, + { url = "https://files.pythonhosted.org/packages/12/23/be28dc1023707ac51768f848d28a946443041a348ee3a54abdf9f6283372/websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1", size = 174714 }, + { url = "https://files.pythonhosted.org/packages/8f/ff/02b5e9fbb078e7666bf3d25c18c69b499747a12f3e7f2776063ef3fb7061/websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7", size = 172374 }, + { url = "https://files.pythonhosted.org/packages/8e/61/901c8d4698e0477eff4c3c664d53f898b601fa83af4ce81946650ec2a4cb/websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081", size = 172605 }, + { url = "https://files.pythonhosted.org/packages/d2/4b/dc47601a80dff317aecf8da7b4ab278d11d3494b2c373b493e4887561f90/websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9", size = 182380 }, + { url = "https://files.pythonhosted.org/packages/83/f7/b155d2b38f05ed47a0b8de1c9ea245fcd7fc625d89f35a37eccba34b42de/websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b", size = 181325 }, + { url = "https://files.pythonhosted.org/packages/d3/ff/040a20c01c294695cac0e361caf86f33347acc38f164f6d2be1d3e007d9f/websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f", size = 181763 }, + { url = "https://files.pythonhosted.org/packages/cb/6a/af23e93678fda8341ac8775e85123425e45c608389d3514863c702896ea5/websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6", size = 182097 }, + { url = "https://files.pythonhosted.org/packages/7e/3e/1069e159c30129dc03c01513b5830237e576f47cedb888777dd885cae583/websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375", size = 181485 }, + { url = "https://files.pythonhosted.org/packages/9a/a7/c91c47103f1cd941b576bbc452601e9e01f67d5c9be3e0a9abe726491ab5/websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72", size = 181466 }, + { url = "https://files.pythonhosted.org/packages/16/32/a4ca6e3d56c24aac46b0cf5c03b841379f6409d07fc2044b244f90f54105/websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c", size = 175673 }, + { url = "https://files.pythonhosted.org/packages/c0/31/25a417a23e985b61ffa5544f9facfe4a118cb64d664c886f1244a8baeca5/websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8", size = 176115 }, + { url = "https://files.pythonhosted.org/packages/42/52/359467c7ca12721a04520da9ba9fc29da2cd176c30992f6f81fa881bb3e5/websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506", size = 172384 }, + { url = "https://files.pythonhosted.org/packages/7c/ff/36fd8a45fac404d8f109e03ca06328f49847d71c0c048414c76bb2db91c4/websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31", size = 172616 }, + { url = "https://files.pythonhosted.org/packages/b1/a8/65496a87984815e2837835d5ac3c9f81ea82031036877e8f80953c59dbd9/websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03", size = 173871 }, + { url = "https://files.pythonhosted.org/packages/23/89/9441e1e0818d46fe22d78b3e5c8fe2316516211330e138231c90dce5559e/websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3", size = 173477 }, + { url = "https://files.pythonhosted.org/packages/2f/1b/80460b3ac9795ef7bbaa074c603d64e009dbb2ceb11008416efab0dcc811/websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842", size = 173425 }, + { url = "https://files.pythonhosted.org/packages/56/d1/8da7e733ed266f342e8c544c3b8338449de9b860d85d9a0bfd4fe1857d6e/websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5", size = 176160 }, + { url = "https://files.pythonhosted.org/packages/e8/b2/31eec524b53f01cd8343f10a8e429730c52c1849941d1f530f8253b6d934/websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3", size = 169023 }, ] [[package]] From fd0c55cec44366f60231dbc6655153ff7dd4efd2 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 5 Mar 2025 18:28:48 +0000 Subject: [PATCH 07/63] Revert "Python: Preb1 merge from main 1 (#10805)" This reverts commit 865d67edaebb7af1f118af19bf9930610894b44f. --- .github/_typos.toml | 5 +- .github/workflows/dotnet-build-and-test.yml | 10 +- .github/workflows/label-needs-port.yml | 41 - .github/workflows/python-build.yml | 17 - .../workflows/python-integration-tests.yml | 122 +- .github/workflows/python-lint.yml | 1 - .github/workflows/python-manual-release.yml | 30 - .github/workflows/python-test-coverage.yml | 1 - .github/workflows/python-unit-tests.yml | 17 +- .vscode/launch.json | 13 - .vscode/tasks.json | 9 - docs/decisions/0054-processes.md | 5 +- .../0064-hybrid-model-orchestration.md | 276 -- docs/decisions/0065-realtime-api-clients.md | 1770 ----------- docs/decisions/0066-concepts-guidelines.md | 93 - dotnet/Directory.Packages.props | 95 +- dotnet/SK-dotnet.sln | 123 +- dotnet/nuget/nuget-package.props | 4 +- .../Agents/AzureAIAgent_FileManipulation.cs | 68 - .../Concepts/Agents/AzureAIAgent_Streaming.cs | 183 -- .../ChatCompletion_FunctionTermination.cs | 9 +- .../Agents/ChatCompletion_HistoryReducer.cs | 2 +- .../Agents/ChatCompletion_Serialization.cs | 3 +- .../Agents/ChatCompletion_ServiceSelection.cs | 17 +- .../Agents/ChatCompletion_Streaming.cs | 3 +- .../Agents/ChatCompletion_Templating.cs | 8 +- .../Agents/ComplexChat_NestedShopper.cs | 2 +- .../Concepts/Agents/DeclarativeAgents.cs | 31 +- .../Concepts/Agents/MixedChat_Agents.cs | 23 +- .../Concepts/Agents/MixedChat_Files.cs | 43 +- .../Concepts/Agents/MixedChat_Images.cs | 33 +- .../Concepts/Agents/MixedChat_Reset.cs | 24 +- .../Agents/MixedChat_Serialization.cs | 22 +- .../Concepts/Agents/MixedChat_Streaming.cs | 23 +- .../Agents/OpenAIAssistant_ChartMaker.cs | 36 +- .../OpenAIAssistant_FileManipulation.cs | 40 +- .../Agents/OpenAIAssistant_FunctionFilters.cs | 33 +- .../Agents/OpenAIAssistant_Streaming.cs | 87 +- .../Agents/OpenAIAssistant_Templating.cs | 92 +- .../AzureAIInference_ChatCompletion.cs | 6 +- ...zureAIInference_ChatCompletionStreaming.cs | 34 +- .../AzureOpenAI_ChatCompletion.cs | 79 +- .../AzureOpenAI_ChatCompletionStreaming.cs | 26 +- ...AzureOpenAI_ChatCompletionWithReasoning.cs | 102 - .../AzureOpenAI_CustomClient.cs | 42 +- .../ChatHistoryExtensions.cs | 14 +- .../ChatHistoryReducerTests.cs | 50 +- .../IChatHistoryReducer.cs | 20 + ...ucer.cs => MaxTokensChatHistoryReducer.cs} | 14 +- .../SummarizingChatHistoryReducer.cs | 140 + .../TruncatingChatHistoryReducer.cs | 80 + .../HuggingFace_ChatCompletion.cs | 97 - .../HuggingFace_ChatCompletionStreaming.cs | 95 - .../HybridCompletion_Fallback.cs | 279 -- .../ChatCompletion/LMStudio_ChatCompletion.cs | 92 - .../LMStudio_ChatCompletionStreaming.cs | 97 - .../MultipleProviders_ChatHistoryReducer.cs | 8 +- .../ChatCompletion/Ollama_ChatCompletion.cs | 51 +- .../Ollama_ChatCompletionStreaming.cs | 129 +- .../ChatCompletion/OpenAI_ChatCompletion.cs | 93 +- .../OpenAI_ChatCompletionStreaming.cs | 33 +- .../OpenAI_ChatCompletionWithReasoning.cs | 86 - .../ChatCompletion/OpenAI_CustomClient.cs | 41 +- dotnet/samples/Concepts/Concepts.csproj | 11 +- .../Filtering/AzureOpenAI_DeploymentSwitch.cs | 116 - .../HuggingFace_ChatCompletionWithTGI.cs | 89 + .../MultipleProviders_ChatCompletion.cs | 95 + .../Plugins/CopilotAgentBasedPlugins.cs | 73 +- .../samples/Concepts/Plugins/CrewAI_Plugin.cs | 108 - dotnet/samples/Concepts/README.md | 39 +- .../Resources/Agents/ParrotAgent.yaml | 9 + .../Concepts/Resources/Agents/ToolAgent.yaml | 7 + .../Concepts/Resources/Agents/travelinfo.txt | 217 ++ .../MessagesPlugin/apimanifest.json | 2 +- .../AstronomyPlugin/messages-openapi.yml | 45 +- .../CalendarPlugin/calendar-apiplugin.json | 10 +- .../CalendarPlugin/calendar-openapi.yml | 51 +- .../MessagesPlugin/messages-apiplugin.json | 13 +- .../MessagesPlugin/messages-openapi.yml | 45 +- .../Plugins/CopilotAgentPlugins/README.md | 4 +- .../Resources/Plugins/LegacyMenuPlugin.cs | 50 + ...enticationProviderWithCancellationToken.cs | 65 - .../CopilotAgentPluginsDemoSample.csproj | 55 - .../CopilotAgentPluginsDemoSample.sln | 24 - .../DemoCommand.cs | 528 ---- .../Logging/SemanticKernelLogger.cs | 125 - .../Logging/SemanticKernelLoggerProvider.cs | 27 - .../CopilotAgentPluginsDemoSample/Program.cs | 11 - .../appsettings.json | 19 - .../Demos/CopilotAgentPlugins/README.md | 168 -- .../CopilotAgentPlugins/TROUBLESHOOTING.md | 11 - .../images/AppRegistration_APIPermissions.png | Bin 127548 -> 0 bytes .../images/AppRegistration_AppSecret.png | Bin 81407 -> 0 bytes ...on_Authentication_localhostredirecturi.png | Bin 100857 -> 0 bytes .../ApplicationOverViewScreenClientIDetc.png | Bin 115660 -> 0 bytes .../images/CAPs_PublicRoadmap.png | Bin 138724 -> 0 bytes .../images/aad-portal-app-registrations.png | Bin 68909 -> 0 bytes .../McpDotNetExtensions.cs | 159 - .../ModelContextProtocol.csproj | 33 - .../Demos/ModelContextProtocol/Program.cs | 55 - .../Demos/ModelContextProtocol/README.md | 44 - .../SimpleToolsConsole.json | 17 - .../ProcessFramework.Aspire.AppHost.csproj | 34 - .../Program.cs | 17 - .../appsettings.json | 12 - .../Models/ProcessEvents.cs | 11 - ...ramework.Aspire.ProcessOrchestrator.csproj | 28 - ...sFramework.Aspire.ProcessOrchestrator.http | 5 - .../Program.cs | 90 - .../Steps/SummarizeStep.cs | 23 - .../Steps/TranslateStep.cs | 23 - .../SummaryAgentHttpClient.cs | 20 - .../TranslatorAgentHttpClient.cs | 20 - .../appsettings.json | 9 - .../Extensions.cs | 163 - ...essFramework.Aspire.ServiceDefaults.csproj | 23 - .../ProcessFramework.Aspire.Shared.csproj | 11 - .../SummarizeRequest.cs | 14 - .../TranslationRequest.cs | 14 - ...rocessFramework.Aspire.SummaryAgent.csproj | 25 - .../ProcessFramework.Aspire.SummaryAgent.http | 9 - .../Program.cs | 87 - .../appsettings.json | 9 - ...essFramework.Aspire.TranslatorAgent.csproj | 26 - ...ocessFramework.Aspire.TranslatorAgent.http | 9 - .../Program.cs | 87 - .../appsettings.json | 9 - .../ProcessFrameworkWithAspire/README.md | 45 - .../docs/architecture.png | Bin 45609 -> 0 bytes .../docs/aspire-dashboard.png | Bin 114295 -> 0 bytes .../docs/aspire-metrics.png | Bin 185939 -> 0 bytes .../docs/aspire-traces.png | Bin 177506 -> 0 bytes .../AzureAIAgent/Step01_AzureAIAgent.cs | 68 - .../Step02_AzureAIAgent_Plugins.cs | 100 - .../Step04_AzureAIAgent_CodeInterpreter.cs | 54 - .../Step05_AzureAIAgent_FileSearch.cs | 71 - .../Step06_AzureAIAgent_OpenAPI.cs | 68 - .../Step07_AzureAIAgent_Functions.cs | 75 - .../BedrockAgent/README.md | 38 - .../BedrockAgent/Step01_BedrockAgent.cs | 73 - .../Step02_BedrockAgent_CodeInterpreter.cs | 90 - .../Step03_BedrockAgent_Functions.cs | 141 - .../BedrockAgent/Step04_BedrockAgent_Trace.cs | 176 -- .../Step05_BedrockAgent_FileSearch.cs | 75 - .../Step06_BedrockAgent_AgentChat.cs | 93 - .../GettingStartedWithAgents.csproj | 6 +- .../OpenAIAssistant/Step01_Assistant.cs | 66 - .../Step02_Assistant_Plugins.cs | 92 - .../Step05_AssistantTool_FileSearch.cs | 73 - .../Step06_AssistantTool_Function.cs | 77 - .../Plugins/MenuPlugin.cs | 79 - .../Plugins/WidgetFactory.cs | 63 - .../GettingStartedWithAgents/README.md | 64 +- .../Resources/AutoInvokeTools.yaml | 7 - .../Resources/countries.json | 46 - .../Resources/weather.json | 62 - .../GettingStartedWithAgents/Step01_Agent.cs | 13 +- .../Step02_Plugins.cs | 119 +- .../Step04_KernelFunctionStrategies.cs | 2 +- .../Step06_DependencyInjection.cs | 3 +- ...AzureAIAgent_Chat.cs => Step07_Logging.cs} | 77 +- .../Step07_Telemetry.cs | 236 -- .../Step08_Assistant.cs | 142 + ...t_Vision.cs => Step09_Assistant_Vision.cs} | 35 +- ...> Step10_AssistantTool_CodeInterpreter.cs} | 30 +- .../Step11_AssistantTool_FileSearch.cs | 84 + .../Step04/KernelExtensions.cs | 3 +- .../Step04/Step04_AgentOrchestration.cs | 2 +- dotnet/src/.editorconfig | 3 - dotnet/src/Agents/Abstractions/Agent.cs | 31 +- .../src/Agents/Abstractions/AgentChannel.cs | 23 +- dotnet/src/Agents/Abstractions/AgentChat.cs | 103 +- .../Abstractions/AgentChatSerializer.cs | 14 +- .../Abstractions/Agents.Abstractions.csproj | 7 +- .../Agents/Abstractions/AggregatorAgent.cs | 12 +- .../Agents/Abstractions/AggregatorChannel.cs | 2 - .../Extensions/ChatHistoryExtensions.cs | 10 +- .../Abstractions/Internal/BroadcastQueue.cs | 6 +- .../Abstractions/Internal/ChannelReference.cs | 3 - dotnet/src/Agents/Abstractions/KernelAgent.cs | 59 +- .../Logging/AgentChatLogMessages.cs | 46 +- .../Logging/AggregatorAgentLogMessages.cs | 1 - .../Properties/AssemblyInfo.cs | 0 .../Serialization/AgentParticipant.cs | 18 +- .../Serialization/ChatMessageReference.cs | 16 +- .../src/Agents/AzureAI/Agents.AzureAI.csproj | 48 - .../AzureAI/AzureAIAgent.ClientFactory.cs | 65 - dotnet/src/Agents/AzureAI/AzureAIAgent.cs | 285 -- dotnet/src/Agents/AzureAI/AzureAIChannel.cs | 61 - .../Agents/AzureAI/AzureAIClientProvider.cs | 116 - .../AzureAI/AzureAIInvocationOptions.cs | 109 - .../AzureAI/AzureAIThreadMessageFactory.cs | 23 - .../AzureAI/Extensions/AgentRunExtensions.cs | 120 - .../Extensions/KernelFunctionExtensions.cs | 29 - .../AzureAI/Internal/AgentMessageFactory.cs | 98 - .../AzureAI/Internal/AgentThreadActions.cs | 860 ------ .../Logging/AgentThreadActionsLogMessages.cs | 139 - .../Logging/AzureAIAgentLogMessages.cs | 69 - .../src/Agents/AzureAI/RunPollingOptions.cs | 73 - .../src/Agents/Bedrock/Agents.Bedrock.csproj | 50 - dotnet/src/Agents/Bedrock/BedrockAgent.cs | 263 -- .../src/Agents/Bedrock/BedrockAgentChannel.cs | 248 -- .../Extensions/BedrockAgentExtensions.cs | 214 -- .../BedrockAgentInvokeExtensions.cs | 225 -- .../BedrockFunctionSchemaExtensions.cs | 102 - dotnet/src/Agents/Bedrock/README.md | 27 - dotnet/src/Agents/Core/AgentGroupChat.cs | 80 +- dotnet/src/Agents/Core/Agents.Core.csproj | 6 +- .../Core/Chat/AgentGroupChatSettings.cs | 24 +- .../Chat/AggregatorTerminationStrategy.cs | 14 +- .../Chat/KernelFunctionSelectionStrategy.cs | 25 +- .../Chat/KernelFunctionTerminationStrategy.cs | 23 +- .../Core/Chat/RegExTerminationStrategy.cs | 2 - .../src/Agents/Core/Chat/SelectionStrategy.cs | 20 +- .../Core/Chat/SequentialSelectionStrategy.cs | 9 +- .../Agents/Core/Chat/TerminationStrategy.cs | 38 +- dotnet/src/Agents/Core/ChatCompletionAgent.cs | 167 +- dotnet/src/Agents/Core/ChatHistoryChannel.cs | 20 +- .../src/Agents/Core/ChatHistoryKernelAgent.cs | 37 +- .../History}/ChatHistoryReducerExtensions.cs | 107 +- .../ChatHistorySummarizationReducer.cs | 87 +- .../History}/ChatHistoryTruncationReducer.cs | 42 +- .../Core/History/IChatHistoryReducer.cs | 32 + .../Core/Internal/ChatMessageForPrompt.cs | 2 - .../Core/Logging/AgentGroupChatLogMessages.cs | 15 +- ...ggregatorTerminationStrategyLogMessages.cs | 1 - .../Logging/ChatCompletionAgentLogMessages.cs | 9 +- ...nelFunctionSelectionStrategyLogMessages.cs | 1 - ...lFunctionTerminationStrategyLogMessages.cs | 1 - .../RegExTerminationStrategyLogMessages.cs | 1 - .../SequentialSelectionStrategyLogMessages.cs | 6 +- .../Logging/TerminationStrategyLogMessages.cs | 14 +- .../Properties/AssemblyInfo.cs | 0 dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj | 9 +- .../OpenAI}/Extensions/AgentExtensions.cs | 15 +- .../Extensions/AssistantClientExtensions.cs | 172 -- .../ChatContentMessageExtensions.cs | 36 - .../OpenAI/Extensions/KernelExtensions.cs | 18 + .../Extensions/KernelFunctionExtensions.cs | 84 +- .../Extensions/OpenAIClientExtensions.cs | 110 - .../OpenAI/Internal/AddHeaderRequestPolicy.cs | 13 + .../AssistantCreationOptionsFactory.cs | 2 - .../Internal/AssistantRunOptionsFactory.cs | 70 +- .../OpenAI/Internal/AssistantThreadActions.cs | 112 +- .../Internal/AssistantToolResourcesFactory.cs | 2 +- .../OpenAIAssistantAgent.ClientFactory.cs | 122 - .../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 380 ++- .../OpenAI/OpenAIAssistantCapabilities.cs | 42 +- .../Agents/OpenAI/OpenAIAssistantChannel.cs | 18 +- .../OpenAI/OpenAIAssistantDefinition.cs | 16 +- .../OpenAI/OpenAIAssistantExecutionOptions.cs | 20 +- .../OpenAIAssistantInvocationOptions.cs | 55 +- .../src/Agents/OpenAI/OpenAIClientProvider.cs | 49 +- .../OpenAI/OpenAIThreadCreationOptions.cs | 23 +- .../OpenAI/Properties}/AssemblyInfo.cs | 2 +- dotnet/src/Agents/OpenAI/RunPollingOptions.cs | 28 +- .../Agents/UnitTests/Agents.UnitTests.csproj | 18 +- .../AzureAIAssistantInvocationOptionsTests.cs | 113 - .../AzureAI/AzureAIClientProviderTests.cs | 72 - .../KernelFunctionExtensionsTests.cs | 64 - .../Internal/AgentMessageFactoryTests.cs | 112 - .../AzureAI/RunPollingOptionsTests.cs | 71 - .../Bedrock/BedrockAgentChannelTests.cs | 289 -- .../UnitTests/Bedrock/BedrockAgentTests.cs | 290 -- .../BedrockAgentExtensionsTests.cs | 320 -- .../BedrockFunctionSchemaExtensionsTests.cs | 111 - .../Core/ChatCompletionAgentTests.cs | 71 +- .../UnitTests/Core/ChatHistoryChannelTests.cs | 96 +- .../ChatHistoryReducerExtensionsTests.cs | 26 +- .../ChatHistorySummarizationReducerTests.cs | 35 +- .../ChatHistoryTruncationReducerTests.cs | 29 +- .../Core/History/MockHistoryGenerator.cs} | 12 +- .../src/Agents/UnitTests/KernelAgentTests.cs | 13 +- dotnet/src/Agents/UnitTests/MockAgent.cs | 2 +- .../{Test => OpenAI}/AssertCollection.cs | 2 +- .../Azure/AddHeaderRequestPolicyTests.cs | 37 + .../AssistantClientExtensionsTests.cs | 357 --- .../Extensions/KernelExtensionsTests.cs | 60 + .../Extensions/OpenAIClientExtensionsTests.cs | 139 - .../AssistantRunOptionsFactoryTests.cs | 91 +- .../OpenAI/OpenAIAssistantAgentTests.cs | 87 +- .../OpenAI/OpenAIAssistantDefinitionTests.cs | 1 - .../OpenAIAssistantInvocationOptionsTests.cs | 1 - .../OpenAI/OpenAIAssistantResponseContent.cs | 149 +- .../OpenAI/OpenAIClientProviderTests.cs | 4 +- .../OpenAIThreadCreationOptionsTests.cs | 1 - ...reAIInferenceChatCompletionServiceTests.cs | 2 +- ...AzureAIInferencePromptExecutionSettings.cs | 2 +- .../Core/AzureClientCoreTests.cs | 90 - .../AzureOpenAIChatCompletionServiceTests.cs | 257 +- .../AzureOpenAITextToAudioServiceTests.cs | 17 - ...AzureOpenAIPromptExecutionSettingsTests.cs | 6 +- .../Core/AzureClientCore.ChatCompletion.cs | 37 +- .../Core/AzureClientCore.cs | 3 - .../AzureOpenAIPromptExecutionSettings.cs | 22 - .../Core/Gemini/GeminiRequestTests.cs | 65 - ...oogleAIGeminiChatCompletionServiceTests.cs | 58 +- ...ertexAIGeminiChatCompletionServiceTests.cs | 57 +- .../Connectors.Google/Core/ClientBase.cs | 1 - .../Core/Gemini/Models/GeminiRequest.cs | 48 - .../GeminiPromptExecutionSettings.cs | 27 - .../Connectors.Google/VertexAIVersion.cs | 7 +- .../IPineconeMemoryStore.cs | 4 +- .../IPostgresVectorStoreDbClient.cs | 2 +- ...PostgresVectorStoreCollectionSqlBuilder.cs | 2 +- .../IQdrantVectorDbClient.cs | 20 +- .../OllamaKernelBuilderExtensionsTests.cs | 158 - .../OllamaServiceCollectionExtensionsTests.cs | 202 -- .../OllamaKernelBuilderExtensions.cs | 53 +- .../OllamaServiceCollectionExtensions.cs | 77 +- .../Services/OllamaTextGenerationService.cs | 12 - .../OnnxRuntimeGenAIChatCompletionService.cs | 5 +- .../Core/AutoFunctionInvocationFilterTests.cs | 59 - .../OpenAIChatCompletionServiceTests.cs | 101 - .../OpenAIPromptExecutionSettingsTests.cs | 19 +- .../Core/ClientCore.ChatCompletion.cs | 53 +- .../Settings/OpenAIPromptExecutionSettings.cs | 45 - .../AnyTagEqualToFilterClause.cs | 6 +- .../FilterClauses/EqualToFilterClause.cs | 6 +- .../FilterClauses/FilterClause.cs | 2 +- .../VectorStoreRecordDataAttribute.cs | 26 +- .../VectorStoreRecordKeyAttribute.cs | 8 +- .../VectorStoreRecordVectorAttribute.cs | 24 +- .../RecordDefinition/DistanceFunction.cs | 8 +- .../RecordDefinition/IndexKind.cs | 29 +- .../VectorStoreRecordDataProperty.cs | 16 +- .../VectorStoreRecordDefinition.cs | 4 +- .../VectorStoreRecordKeyProperty.cs | 4 +- .../VectorStoreRecordProperty.cs | 16 +- .../VectorStoreRecordVectorProperty.cs | 26 +- .../RecordOptions/GetRecordOptions.cs | 4 +- .../VectorData.Abstractions.csproj | 2 +- .../VectorSearch/IVectorizableTextSearch.cs | 2 +- .../VectorSearch/IVectorizedSearch.cs | 2 +- .../VectorSearch/VectorSearchFilter.cs | 36 +- .../VectorSearch/VectorSearchOptions.cs | 14 +- .../VectorSearch/VectorSearchResult.cs | 6 +- .../VectorSearch/VectorSearchResults.cs | 6 +- .../VectorStorage/IVectorStore.cs | 10 +- .../IVectorStoreRecordCollection.cs | 72 +- .../VectorStorage/IVectorStoreRecordMapper.cs | 6 +- .../StorageToDataModelMapperOptions.cs | 4 +- .../VectorStorage/VectorStoreException.cs | 6 +- .../VectorStoreOperationException.cs | 2 +- .../VectorStoreRecordMappingException.cs | 2 +- .../VectorStoreGenericDataModel.cs | 4 +- .../IKernelExternalProcessMessageChannel.cs | 32 - .../IKernelProcessMessageChannel.cs | 2 +- .../KernelProcessContext.cs | 6 - .../KernelProcessStepContext.cs | 27 +- .../Controllers/ProcessTestController.cs | 18 - .../Program.cs | 5 - .../DaprTestProcessContext.cs | 11 - .../ProcessTestFixture.cs | 4 +- .../ProcessTestFixture.cs | 5 +- .../CloudEvents/MockCloudEventClient.cs | 63 - .../CloudEvents/MockCloudEventData.cs | 19 - .../ProcessCloudEventsResources.cs | 40 - .../Process.IntegrationTests.Shared.props | 1 - .../ProcessCloudEventsTests.cs | 113 - .../ProcessTestFixture.cs | 3 +- .../LocalKernelProcessContext.cs | 15 +- .../LocalKernelProcessFactory.cs | 5 +- .../Process.LocalRuntime/LocalProcess.cs | 1 - .../Process.LocalRuntime/LocalStep.cs | 11 +- .../Actors/ExternalMessageBufferActor.cs | 39 - .../ExternalMessageBufferActorWrapper.cs | 43 - .../Process.Runtime.Dapr/Actors/StepActor.cs | 9 +- .../DaprKernelProcessContext.cs | 6 - .../Interfaces/IExternalMessageBuffer.cs | 25 - .../KernelProcessDaprExtensions.cs | 1 - .../HandlebarsPromptTemplateTests.cs | 35 - .../HandlebarsPromptTemplate.cs | 3 +- .../HandlebarsPromptTemplateOptions.cs | 5 - .../LiquidPromptTemplate.cs | 6 +- .../CopilotAgentPluginKernelExtensions.cs | 34 +- .../OpenApiFunctionExecutionParameters.cs | 1 + .../Extensions/OpenApiKernelExtensions.cs | 3 + .../Functions.OpenApi.csproj | 1 + .../HttpResponseContentReader.cs | 2 + .../HttpResponseContentReaderContext.cs | 2 + .../Model/RestApiOperation.cs | 6 +- .../OpenApi/OpenApiDocumentParser.cs | 1 + .../OpenApi/OpenApiDocumentParserOptions.cs | 2 + .../OperationSelectionPredicateContext.cs | 2 + .../OpenApiKernelPluginFactory.cs | 2 + .../RestApiOperationRunner.cs | 4 +- .../Serialization/OpenApiTypeConverter.cs | 35 +- .../PromptyTest.cs | 5 +- .../TestData/chat.prompty | 2 +- .../TestData/chatJsonObject.prompty | 1 - .../Functions.Prompty/Core/PromptyModel.cs | 20 + .../Core/PromptyModelConfig.cs | 31 + .../Core/PromptyModelParameters.cs | 50 + .../Core/PromptyResponseFormat.cs | 13 + .../Functions.Prompty/Core/PromptyTool.cs | 44 + .../Functions.Prompty/Core/PromptyYaml.cs | 42 + .../Functions.Prompty/Core/Types/ApiType.cs | 9 + .../Functions.Prompty/Core/Types/ModelType.cs | 9 + .../Core/Types/ParserType.cs | 11 + .../Functions.Prompty/Core/Types/RoleType.cs | 12 + .../Functions.Prompty.csproj | 2 +- .../KernelFunctionPrompty.cs | 144 +- ...CopilotAgentPluginKernelExtensionsTests.cs | 2 +- .../OpenApiTypeConverterTests.cs | 47 - .../TestPlugins/messages-apiplugin.json | 15 +- .../OpenApi/TestPlugins/messages-openapi.yml | 45 +- .../Agents/BedrockAgentTests.cs | 238 -- .../Agents/MixedAgentTests.cs | 17 +- .../Agents/OpenAIAssistantAgentTests.cs | 181 +- .../IntegrationTests/BaseIntegrationTest.cs | 4 +- ...AzureOpenAIChatCompletionStreamingTests.cs | 15 +- .../Gemini/GeminiChatCompletionTests.cs | 110 - .../Connectors/Google/TestsBase.cs | 16 +- ...ostgresVectorStoreRecordCollectionTests.cs | 30 +- .../IntegrationTests/IntegrationTests.csproj | 7 - .../Resources/gemini_cached_content.json | 22 - .../TestSettings/BedrockAgentConfiguration.cs | 13 - dotnet/src/IntegrationTests/testsettings.json | 4 - .../agents/AgentUtilities.props | 5 - .../KernelFunctionMetadataExtensions.cs | 36 - .../azure/AzureAIUtilities.props | 5 - .../Policies/GeneratedActionPipelinePolicy.cs | 32 - .../FunctionCalling/FunctionCallsProcessor.cs | 19 +- .../process/Abstractions/StepExtensions.cs | 9 +- .../samples/AgentUtilities/BaseAgentsTest.cs | 45 +- .../AgentUtilities/BaseAssistantTest.cs | 90 - .../AgentUtilities/BaseAzureAgentTest.cs | 173 -- .../samples/AgentUtilities/BaseAzureTest.cs | 66 - .../AgentUtilities/BaseBedrockAgentTest.cs | 41 - .../samples/InternalUtilities/BaseTest.cs | 27 - .../InternalUtilities/TestConfiguration.cs | 27 - .../src/Diagnostics/ActivityExtensions.cs | 53 +- .../src/Diagnostics/ModelDiagnostics.cs | 35 - .../CrewAI/CrewAIEnterpriseClientTests.cs | 151 - .../CrewAI/CrewAIEnterpriseTests.cs | 150 - .../CrewAI/MockHttpClientFactory.cs | 24 - .../Plugins.AI.UnitTests.csproj | 37 - .../CrewAI/Client/CrewAIEnterpriseClient.cs | 164 -- .../CrewAI/Client/CrewAIStateEnumConverter.cs | 44 - .../Plugins.AI/CrewAI/CrewAIEnterprise.cs | 282 -- .../Plugins.AI/CrewAI/CrewAIInputMetadata.cs | 15 - .../CrewAI/Models/CrewAIKickoffResponse.cs | 16 - .../CrewAI/Models/CrewAIKickoffState.cs | 44 - .../CrewAI/Models/CrewAIRequiredInputs.cs | 18 - .../CrewAI/Models/CrewAIStatusResponse.cs | 31 - .../src/Plugins/Plugins.AI/Plugins.AI.csproj | 34 - .../FileSystem/IFileSystemConnector.cs | 8 +- .../Plugins.Document/IDocumentConnector.cs | 6 +- .../AIFunctionKernelFunction.cs | 58 +- .../AI/ChatCompletion/AuthorRole.cs | 5 - .../ChatClientChatCompletionService.cs | 32 +- .../ChatCompletionServiceChatClient.cs | 11 +- .../ChatCompletionServiceExtensions.cs | 38 +- .../AI/ChatCompletion/ChatHistory.cs | 28 +- .../ChatCompletion/ChatHistoryExtensions.cs | 83 - .../AI/ChatCompletion/ChatPromptParser.cs | 11 +- .../AI/ChatCompletion/IChatHistoryReducer.cs | 23 - .../EmbeddingGenerationServiceExtensions.cs | 7 +- .../FunctionChoiceBehaviorOptions.cs | 2 + .../AI/TextToImage/ITextToImageService.cs | 2 +- .../AbstractionsJsonContext.cs | 2 - .../Contents/AnnotationContent.cs | 2 +- .../Contents/FileReferenceContent.cs | 9 - .../Contents/KernelContent.cs | 2 +- .../Contents/StreamingAnnotationContent.cs | 2 +- .../Contents/StreamingChatMessageContent.cs | 2 +- .../Data/TextSearch/ITextSearch.cs | 6 +- .../AutoFunctionInvocationContext.cs | 7 - .../Filters/Prompt/PromptRenderContext.cs | 7 - .../Functions/KernelFunction.cs | 54 +- .../Functions/KernelFunctionNoop.cs | 46 - .../Functions/KernelFunctionSchemaModel.cs | 22 - .../src/SemanticKernel.Abstractions/Kernel.cs | 4 +- .../Memory/ISemanticTextMemory.cs | 12 +- .../Services/AIServiceExtensions.cs | 30 - .../Contents/BinaryContentExtensions.cs | 38 - .../Data/KernelBuilderExtensions.cs | 79 + .../Data/ServiceCollectionExtensions.cs | 71 + .../Data/VolatileVectorStore.cs | 63 + ...atileVectorStoreCollectionSearchMapping.cs | 221 ++ .../Data/VolatileVectorStoreExtensions.cs | 105 + .../Data/VolatileVectorStoreKeyResolver.cs | 14 + .../VolatileVectorStoreRecordCollection.cs | 380 +++ ...atileVectorStoreRecordCollectionOptions.cs | 46 + .../Data/VolatileVectorStoreVectorResolver.cs | 14 + .../Functions/KernelFunctionFromPrompt.cs | 2 +- .../TemplateEngine/Blocks/ICodeRendering.cs | 2 +- .../TemplateEngine/Blocks/ITextRendering.cs | 2 +- .../AI/ChatCompletion/ChatHistoryTests.cs | 31 - .../AI/ServiceConversionExtensionsTests.cs | 75 +- .../Contents/AnnotationContentTests.cs | 2 +- .../Contents/ChatMessageContentTests.cs | 27 +- .../Contents/FileReferenceContentTests.cs | 16 - .../StreamingAnnotationContentTests.cs | 2 +- .../Data/KernelBuilderExtensionsTests.cs | 125 + .../Data/ServiceCollectionExtensionsTests.cs | 37 + ...olatileVectorStoreRecordCollectionTests.cs | 577 ++++ .../Data/VolatileVectorStoreTests.cs | 103 + .../ClientResultExceptionExtensionsTests.cs | 2 +- .../Filters/PromptRenderFilterTests.cs | 38 - .../Functions/KernelPluginTests.cs | 28 +- .../Prompt/ChatPromptParserTests.cs | 126 +- .../SemanticKernel.UnitTests.csproj | 1 + .../FunctionCallsProcessorTests.cs | 65 - .../Utilities/ActivityExtensionsTests.cs | 107 - python/.coveragerc | 2 +- python/.cspell.json | 16 +- python/.env.example | 4 +- python/.pre-commit-config.yaml | 4 +- python/.vscode/launch.json | 2 +- python/Makefile | 16 +- python/README.md | 4 +- python/pyproject.toml | 33 +- python/samples/README.md | 3 +- python/samples/SAMPLE_GUIDELINES.md | 81 - python/samples/concepts/README.md | 80 +- python/samples/concepts/agents/README.md | 19 +- .../agents/assistant_agent_chart_maker.py | 112 + .../assistant_agent_file_manipulation.py | 85 + ...stant_agent_file_manipulation_streaming.py | 88 + .../agents/assistant_agent_retrieval.py | 95 + .../agents/assistant_agent_streaming.py | 110 + .../autogen_conversable_agent/README.md | 20 - ...autogen_conversable_agent_code_executor.py | 61 - ...ogen_conversable_agent_convo_with_tools.py | 95 - .../autogen_conversable_agent_simple_convo.py | 61 - .../agents/azure_ai_agent/.env.example | 6 - .../concepts/agents/azure_ai_agent/README.md | 13 - .../azure_ai_agent_azure_ai_search.py | 147 - .../azure_ai_agent_file_manipulation.py | 92 - .../azure_ai_agent_streaming.py | 93 - .../agents/bedrock_agent/.env.example | 2 - .../concepts/agents/bedrock_agent/README.md | 74 - .../bedrock_agent_simple_chat.py | 52 - .../bedrock_agent_simple_chat_streaming.py | 54 - .../bedrock_agent_with_code_interpreter.py | 75 - ...k_agent_with_code_interpreter_streaming.py | 77 - .../bedrock_agent_with_kernel_function.py | 68 - ...drock_agent_with_kernel_function_simple.py | 59 - ...ck_agent_with_kernel_function_streaming.py | 69 - .../bedrock_mixed_chat_agents.py | 102 - .../bedrock_mixed_chat_agents_streaming.py | 107 - .../agents/chat_completion_agent/README.md | 45 - .../chat_completion_function_termination.py | 147 - .../chat_completion_prompt_templating.py | 99 - ...tion_summary_history_reducer_agent_chat.py | 80 - ...on_summary_history_reducer_single_agent.py | 70 - ...ion_truncate_history_reducer_agent_chat.py | 78 - ...n_truncate_history_reducer_single_agent.py | 63 - .../chat_completion_function_termination.py | 133 + .../agents/chat_completion_history_reducer.py | 298 ++ .../agents/mixed_chat/mixed_chat_files.py | 109 - .../agents/mixed_chat/mixed_chat_images.py | 106 - .../agents/mixed_chat/mixed_chat_reset.py | 103 - .../agents/mixed_chat/mixed_chat_streaming.py | 99 - .../{mixed_chat => }/mixed_chat_agents.py | 58 +- .../mixed_chat_agents_plugins.py | 67 +- .../concepts/agents/mixed_chat_files.py | 92 + .../concepts/agents/mixed_chat_reset.py | 83 + .../concepts/agents/mixed_chat_streaming.py | 95 + .../agents/openai_assistant/README.md | 101 - .../agents/openai_assistant/__init__.py | 0 .../openai_assistant_chart_maker.py | 81 - .../openai_assistant_chart_maker_streaming.py | 99 - .../openai_assistant_file_manipulation.py | 83 - ...i_assistant_file_manipulation_streaming.py | 105 - .../openai_assistant_retrieval.py | 55 - .../openai_assistant_sample_utils.py | 54 - .../openai_assistant_streaming.py | 80 - .../openai_assistant_structured_outputs.py | 90 - .../openai_assistant_templating_streaming.py | 116 - .../openai_assistant_vision_streaming.py | 93 - ...t_completion_with_auto_function_calling.py | 7 +- ...on_with_auto_function_calling_streaming.py | 1 - ...completion_with_manual_function_calling.py | 1 - .../function_calling_with_required_type.py | 3 +- .../concepts/caching/semantic_caching.py | 143 - .../chat_completion/simple_chatbot.py | 6 +- .../simple_chatbot_kernel_function.py | 6 +- .../simple_chatbot_streaming.py | 6 +- ...le_chatbot_with_summary_history_reducer.py | 15 +- ...mmary_history_reducer_keep_func_content.py | 19 +- .../samples/concepts/chat_history/README.md | 17 - .../chat_history/serialize_chat_history.py | 178 +- .../store_chat_history_in_cosmosdb.py | 199 -- .../filtering/auto_function_invoke_filters.py | 4 + .../function_invocation_filters_stream.py | 27 +- .../filtering/retry_with_different_model.py | 98 - .../concepts/filtering/retry_with_filters.py | 7 +- .../azure_ai_search_hotel_samples/README.md | 72 - .../step_0_data_model.py | 1 - .../step_1_interact_with_the_collection.py | 9 +- .../step_2_use_as_a_plugin.py | 14 +- .../memory/azure_cognitive_search_memory.py | 66 + python/samples/concepts/memory/memory.py | 120 + .../{complex_memory.py => new_memory.py} | 207 +- ...memory_with_pandas.py => pandas_memory.py} | 45 +- .../samples/concepts/memory/simple_memory.py | 170 -- python/samples/concepts/memory/utils.py | 23 - ...penai_function_calling_stepwise_planner.py | 53 + ...penai_function_calling_stepwise_planner.py | 51 + .../concepts/planners/sequential_planner.py | 42 + .../concepts/plugins/crew_ai/README.md | 47 - .../plugins/crew_ai/crew_ai_plugin.py | 140 - python/samples/concepts/realtime/README.md | 50 - ...ltime_chat_with_function_calling_webrtc.py | 143 - ...me_chat_with_function_calling_websocket.py | 141 - .../realtime/simple_realtime_chat_webrtc.py | 84 - .../simple_realtime_chat_websocket.py | 90 - python/samples/concepts/realtime/utils.py | 489 --- .../concepts/reasoning/simple_reasoning.py | 41 +- .../simple_reasoning_function_calling.py | 116 +- python/samples/concepts/resources/cat.jpg | Bin 37831 -> 0 bytes python/samples/concepts/resources/utils.py | 5 - python/samples/concepts/setup/ALL_SETTINGS.md | 8 +- .../setup/chat_completion_services.py | 76 +- .../json_structured_outputs.py | 5 +- .../demos/call_automation/.env.example | 8 - .../demos/call_automation/call_automation.py | 290 -- .../samples/demos/call_automation/readme.md | 53 - .../document_generator/GENERATED_DOCUMENT.md | 58 - .../demos/document_generator/README.md | 105 - .../agents/code_validation_agent.py | 69 - .../agents/content_creation_agent.py | 64 - .../agents/custom_agent_base.py | 52 - .../document_generator/agents/user_agent.py | 67 - .../custom_selection_strategy.py | 100 - .../custom_termination_strategy.py | 91 - .../samples/demos/document_generator/main.py | 130 - .../plugins/code_execution_plugin.py | 26 - .../plugins/repo_file_plugin.py | 51 - .../document_generator/plugins/user_plugin.py | 16 - .../guided_conversation/plugins/agenda.py | 2 +- .../guided_conversation/utils/resources.py | 2 +- .../demos/process_with_dapr/fastapi_app.py | 13 +- .../process_with_dapr/process/process.py | 12 +- .../demos/process_with_dapr/process/steps.py | 43 +- .../05-using-the-planner.ipynb | 4 +- .../third_party/postgres-memory.ipynb | 427 +-- .../getting_started_with_agents/README.md | 47 +- .../azure_ai_agent/.env.example | 6 - .../azure_ai_agent/README.md | 121 - .../azure_ai_agent/step1_azure_ai_agent.py | 80 - .../step2_azure_ai_agent_plugin.py | 101 - .../step3_azure_ai_agent_group_chat.py | 111 - .../step4_azure_ai_agent_code_interpreter.py | 88 - .../step5_azure_ai_agent_file_search.py | 83 - .../step6_azure_ai_agent_openapi.py | 111 - .../chat_completion/README.md | 3 - .../step1_chat_completion_agent_simple.py | 62 - ...step2_chat_completion_agent_with_kernel.py | 69 - ...ep3_chat_completion_agent_plugin_simple.py | 81 - ...hat_completion_agent_plugin_with_kernel.py | 104 - ...step7_chat_completion_agent_json_result.py | 102 - .../step8_chat_completion_agent_logging.py | 112 - ...hat_completion_agent_structured_outputs.py | 112 - .../openai_assistant/README.md | 101 - .../openai_assistant/step1_assistant.py | 75 - .../step2_assistant_plugins.py | 99 - .../step3_assistant_vision.py | 87 - .../step4_assistant_tool_code_interpreter.py | 58 - .../step5_assistant_tool_file_search.py | 80 - .../resources/countries.json | 46 - .../resources/weather.json | 62 - .../step10_assistant_tool_file_search.py | 81 + .../step1_agent.py | 67 + .../step2_plugins.py | 98 + ...tion_agent_group_chat.py => step3_chat.py} | 73 +- ...py => step4_kernel_function_strategies.py} | 72 +- .../step5_json_result.py | 106 + .../step6_logging.py | 93 + .../step7_assistant.py | 88 + .../step8_assistant_vision.py | 115 + .../step9_assistant_tool_code_interpreter.py | 76 + .../agent_docs/agent_collaboration.py | 200 +- .../agent_docs/assistant_code.py | 122 +- .../agent_docs/assistant_search.py | 82 +- .../learn_resources/agent_docs/chat_agent.py | 24 +- .../plugins/GithubPlugin/github.py | 14 +- .../resources/WomensSuffrage.txt | 9 - python/semantic_kernel/__init__.py | 7 +- python/semantic_kernel/agents/agent.py | 159 +- .../semantic_kernel/agents/autogen/README.md | 20 - .../agents/autogen/__init__.py | 5 - .../autogen/autogen_conversable_agent.py | 204 -- .../agents/azure_ai/__init__.py | 6 - .../azure_ai/agent_content_generation.py | 435 --- .../agents/azure_ai/agent_thread_actions.py | 876 ------ .../agents/azure_ai/azure_ai_agent.py | 390 --- .../azure_ai/azure_ai_agent_settings.py | 32 - .../agents/azure_ai/azure_ai_agent_utils.py | 87 - .../agents/azure_ai/azure_ai_channel.py | 121 - .../semantic_kernel/agents/bedrock/README.md | 27 - .../agents/bedrock/__init__.py | 0 .../agents/bedrock/action_group_utils.py | 117 - .../agents/bedrock/bedrock_agent.py | 589 ---- .../agents/bedrock/bedrock_agent_base.py | 376 --- .../agents/bedrock/bedrock_agent_settings.py | 32 - .../agents/bedrock/models/__init__.py | 0 .../models/bedrock_action_group_model.py | 21 - .../models/bedrock_agent_event_type.py | 19 - .../bedrock/models/bedrock_agent_model.py | 24 - .../bedrock/models/bedrock_agent_status.py | 23 - .../agents/channels/agent_channel.py | 14 +- .../agents/channels/bedrock_agent_channel.py | 213 -- .../agents/channels/chat_history_channel.py | 82 +- .../channels/open_ai_assistant_channel.py | 35 +- .../chat_completion/chat_completion_agent.py | 397 +-- .../agents/group_chat/agent_chat.py | 17 +- .../agents/group_chat/agent_chat_utils.py | 4 +- .../agents/group_chat/agent_group_chat.py | 35 +- .../agents/group_chat/broadcast_queue.py | 8 +- .../open_ai/assistant_content_generation.py | 129 +- .../open_ai/assistant_thread_actions.py | 770 ----- .../agents/open_ai/azure_assistant_agent.py | 526 +++- .../agents/open_ai/function_action_result.py | 10 +- .../agents/open_ai/open_ai_assistant_agent.py | 879 +++--- .../agents/open_ai/open_ai_assistant_base.py | 1300 ++++++++ .../agents/open_ai/run_polling_options.py | 4 +- .../agents/strategies/__init__.py | 2 - .../kernel_function_selection_strategy.py | 4 +- .../selection/selection_strategy.py | 4 +- .../sequential_selection_strategy.py | 4 +- .../aggregator_termination_strategy.py | 6 +- .../default_termination_strategy.py | 8 +- .../kernel_function_termination_strategy.py | 4 +- .../termination/termination_strategy.py | 4 +- .../semantic_kernel/connectors/ai/README.md | 2 +- .../services/anthropic_chat_completion.py | 10 +- ..._ai_inference_prompt_execution_settings.py | 8 +- .../azure_ai_inference_settings.py | 4 +- .../services/azure_ai_inference_base.py | 4 +- .../azure_ai_inference_chat_completion.py | 15 +- .../azure_ai_inference_text_embedding.py | 6 +- .../ai/azure_ai_inference/services/utils.py | 6 +- .../connectors/ai/bedrock/README.md | 21 - .../connectors/ai/bedrock/bedrock_settings.py | 4 +- .../ai/bedrock/services/bedrock_base.py | 24 +- .../services/bedrock_chat_completion.py | 14 +- .../services/bedrock_text_completion.py | 7 +- .../services/bedrock_text_embedding.py | 7 +- .../model_provider/bedrock_model_provider.py | 6 +- .../bedrock/services/model_provider/utils.py | 7 + .../ai/chat_completion_client_base.py | 12 +- .../ai/embeddings/embedding_generator_base.py | 4 +- .../ai/function_call_choice_configuration.py | 4 +- .../connectors/ai/function_calling_utils.py | 41 +- .../connectors/ai/function_choice_behavior.py | 4 +- .../connectors/ai/function_choice_type.py | 4 +- .../services/google_ai_chat_completion.py | 18 +- .../services/google_ai_text_completion.py | 8 +- .../services/google_ai_text_embedding.py | 4 +- .../services/vertex_ai_chat_completion.py | 6 +- .../hf_prompt_execution_settings.py | 14 +- .../services/hf_text_completion.py | 1 - .../services/hf_text_embedding.py | 5 +- .../mistral_ai_prompt_execution_settings.py | 27 +- .../services/mistral_ai_chat_completion.py | 4 +- .../services/mistral_ai_text_embedding.py | 4 +- .../ollama/services/ollama_chat_completion.py | 8 +- .../ollama/services/ollama_text_embedding.py | 4 +- .../services/onnx_gen_ai_chat_completion.py | 4 +- .../services/onnx_gen_ai_completion_base.py | 11 +- .../services/onnx_gen_ai_text_completion.py | 4 +- .../connectors/ai/open_ai/__init__.py | 22 - .../azure_chat_prompt_execution_settings.py | 2 +- ...pen_ai_audio_to_text_execution_settings.py | 5 +- .../open_ai_prompt_execution_settings.py | 2 +- .../open_ai_realtime_execution_settings.py | 78 - ...pen_ai_text_to_image_execution_settings.py | 2 +- .../ai/open_ai/services/azure_config_base.py | 45 +- .../ai/open_ai/services/azure_realtime.py | 116 - .../open_ai/services/azure_text_embedding.py | 4 +- .../open_ai/services/open_ai_config_base.py | 5 +- .../open_ai/services/open_ai_model_types.py | 1 - .../ai/open_ai/services/open_ai_realtime.py | 1024 ------- .../services/open_ai_text_embedding.py | 4 +- .../services/open_ai_text_embedding_base.py | 4 +- .../settings/azure_open_ai_settings.py | 16 +- .../ai/open_ai/settings/open_ai_settings.py | 4 - .../connectors/ai/realtime_client_base.py | 145 - .../connectors/memory/astradb/astra_client.py | 4 +- .../memory/astradb/astradb_memory_store.py | 4 +- .../memory/astradb/astradb_settings.py | 4 +- .../azure_ai_search_collection.py | 4 +- .../azure_ai_search_settings.py | 4 +- .../azure_ai_search/azure_ai_search_store.py | 4 +- .../memory/azure_ai_search/utils.py | 4 +- .../azure_ai_search_settings.py | 4 +- .../azure_cognitive_search_memory_store.py | 4 +- .../memory/azure_cosmos_db/__init__.py | 10 - .../azure_cosmos_db_mongodb_collection.py | 253 -- .../azure_cosmos_db_mongodb_settings.py | 38 - .../azure_cosmos_db_mongodb_store.py | 116 - .../azure_cosmos_db_no_sql_base.py | 4 +- .../azure_cosmos_db_no_sql_collection.py | 37 +- .../azure_cosmos_db_no_sql_composite_key.py | 4 +- .../azure_cosmos_db_no_sql_settings.py | 4 +- .../azure_cosmos_db_no_sql_store.py | 4 +- .../memory/azure_cosmos_db/const.py | 12 - .../azure_cosmos_db_memory_store.py | 4 +- .../azure_cosmos_db_store_api.py | 4 +- .../azure_cosmosdb/azure_cosmosdb_settings.py | 6 +- .../azure_cosmosdb/mongo_vcore_store_api.py | 4 +- .../connectors/memory/azure_cosmosdb/utils.py | 6 +- .../azure_cosmosdb_no_sql_memory_store.py | 4 +- .../connectors/memory/chroma/__init__.py | 3 +- .../connectors/memory/chroma/chroma.py | 376 --- .../memory/chroma/chroma_memory_store.py | 4 +- .../connectors/memory/in_memory/const.py | 1 + .../memory/in_memory/in_memory_collection.py | 30 +- .../memory/in_memory/in_memory_store.py | 4 +- .../memory/milvus/milvus_memory_store.py | 10 +- .../memory/mongodb_atlas/__init__.py | 9 +- .../connectors/memory/mongodb_atlas/const.py | 16 - .../mongodb_atlas/mongodb_atlas_collection.py | 325 -- .../mongodb_atlas_memory_store.py | 4 +- .../mongodb_atlas/mongodb_atlas_settings.py | 10 +- .../mongodb_atlas/mongodb_atlas_store.py | 145 - .../connectors/memory/mongodb_atlas/utils.py | 51 +- .../memory/pinecone/pinecone_memory_store.py | 4 +- .../memory/pinecone/pinecone_settings.py | 4 +- .../connectors/memory/postgres/constants.py | 4 - .../memory/postgres/postgres_collection.py | 386 +-- .../memory/postgres/postgres_memory_store.py | 4 +- .../memory/postgres/postgres_settings.py | 60 +- .../memory/postgres/postgres_store.py | 19 +- .../connectors/memory/postgres/utils.py | 45 +- .../memory/qdrant/qdrant_collection.py | 4 +- .../memory/qdrant/qdrant_memory_store.py | 4 +- .../memory/qdrant/qdrant_settings.py | 4 +- .../connectors/memory/qdrant/qdrant_store.py | 4 +- .../memory/redis/redis_collection.py | 8 +- .../memory/redis/redis_memory_store.py | 4 +- .../connectors/memory/redis/redis_settings.py | 4 +- .../connectors/memory/redis/redis_store.py | 4 +- .../memory/usearch/usearch_memory_store.py | 4 +- .../connectors/memory/weaviate/README.md | 2 +- .../memory/weaviate/weaviate_collection.py | 4 +- .../memory/weaviate/weaviate_memory_store.py | 4 +- .../memory/weaviate/weaviate_settings.py | 4 +- .../memory/weaviate/weaviate_store.py | 4 +- .../connectors/openapi_plugin/const.py | 4 +- .../models/rest_api_expected_response.py | 5 +- .../models/rest_api_oauth_flow.py | 4 +- .../models/rest_api_oauth_flows.py | 4 +- .../models/rest_api_operation.py | 4 +- .../models/rest_api_parameter.py | 4 +- .../models/rest_api_parameter_location.py | 4 +- .../models/rest_api_parameter_style.py | 4 +- .../openapi_plugin/models/rest_api_payload.py | 4 +- .../models/rest_api_payload_property.py | 4 +- .../models/rest_api_run_options.py | 4 +- .../models/rest_api_security_requirement.py | 4 +- .../models/rest_api_security_scheme.py | 4 +- .../openapi_plugin/models/rest_api_uri.py | 4 +- .../openapi_function_execution_parameters.py | 4 +- .../openapi_plugin/openapi_manager.py | 6 +- .../openapi_plugin/openapi_runner.py | 4 +- .../connectors/search/bing/bing_search.py | 4 +- .../search/bing/bing_search_response.py | 16 +- .../connectors/search/bing/bing_web_page.py | 4 +- .../connectors/search/google/google_search.py | 4 +- .../search/google/google_search_response.py | 6 +- .../search/google/google_search_result.py | 18 +- .../search_engine/google_connector.py | 2 +- python/semantic_kernel/contents/__init__.py | 20 - .../contents/annotation_content.py | 4 +- .../semantic_kernel/contents/audio_content.py | 43 +- .../contents/binary_content.py | 110 +- .../semantic_kernel/contents/chat_history.py | 175 +- .../contents/chat_message_content.py | 22 +- .../contents/file_reference_content.py | 6 +- .../contents/function_call_content.py | 4 +- .../contents/function_result_content.py | 7 +- .../history_reducer/chat_history_reducer.py | 38 +- .../chat_history_reducer_utils.py | 24 +- .../chat_history_summarization_reducer.py | 111 +- .../chat_history_truncation_reducer.py | 31 +- .../semantic_kernel/contents/image_content.py | 39 +- .../contents/realtime_events.py | 67 - .../contents/streaming_annotation_content.py | 4 +- .../streaming_chat_message_content.py | 30 +- .../streaming_file_reference_content.py | 6 +- .../contents/utils/data_uri.py | 137 +- .../semantic_kernel/contents/utils/hashing.py | 52 - .../core_plugins/crew_ai/__init__.py | 11 - .../crew_ai/crew_ai_enterprise.py | 261 -- .../crew_ai/crew_ai_enterprise_client.py | 106 - .../core_plugins/crew_ai/crew_ai_models.py | 38 - .../core_plugins/crew_ai/crew_ai_settings.py | 22 - .../sessions_python_settings.py | 2 +- python/semantic_kernel/data/__init__.py | 2 - python/semantic_kernel/data/const.py | 13 - .../any_tags_equal_to_filter_clause.py | 4 +- .../filter_clauses/equal_to_filter_clause.py | 4 +- .../data/filter_clauses/filter_clause_base.py | 4 +- .../data/kernel_search_results.py | 4 +- .../vector_store_model_decorator.py | 18 +- .../vector_store_model_definition.py | 8 +- .../vector_store_model_protocols.py | 14 +- .../vector_store_record_fields.py | 10 +- .../vector_store_record_utils.py | 4 +- python/semantic_kernel/data/search_filter.py | 4 +- python/semantic_kernel/data/search_options.py | 4 +- .../data/text_search/text_search.py | 4 +- .../data/text_search/text_search_filter.py | 4 +- .../data/text_search/text_search_options.py | 4 +- .../data/text_search/text_search_result.py | 4 +- .../data/vector_search/vector_search.py | 4 +- .../vector_search/vector_search_filter.py | 4 +- .../vector_search/vector_search_options.py | 4 +- .../vector_search/vector_search_result.py | 4 +- .../data/vector_search/vector_text_search.py | 4 +- .../vector_search/vectorizable_text_search.py | 4 +- .../data/vector_search/vectorized_search.py | 4 +- .../data/vector_storage/vector_store.py | 4 +- .../vector_store_record_collection.py | 13 +- .../auto_function_invocation_context.py | 7 +- .../filters/filter_context_base.py | 1 - .../functions/function_invocation_context.py | 5 +- .../filters/kernel_filters_extension.py | 7 +- .../filters/prompts/prompt_render_context.py | 7 +- .../functions/function_result.py | 8 +- .../functions/kernel_function.py | 8 +- .../functions/kernel_function_extension.py | 2 - .../functions/kernel_function_from_prompt.py | 24 +- .../functions/kernel_function_metadata.py | 2 +- .../functions/kernel_parameter_metadata.py | 4 +- .../functions/kernel_plugin.py | 11 +- python/semantic_kernel/kernel.py | 7 +- python/semantic_kernel/kernel_pydantic.py | 4 +- .../memory/memory_query_result.py | 4 +- .../semantic_kernel/memory/memory_record.py | 4 +- .../memory/memory_store_base.py | 4 +- python/semantic_kernel/memory/null_memory.py | 4 +- .../memory/semantic_text_memory.py | 4 +- .../memory/semantic_text_memory_base.py | 4 +- .../memory/volatile_memory_store.py | 4 +- .../dapr_runtime/actors/actor_state_key.py | 4 +- .../dapr_runtime/actors/event_buffer_actor.py | 4 +- .../actors/external_event_buffer_actor.py | 4 +- .../actors/message_buffer_actor.py | 4 +- .../dapr_runtime/actors/process_actor.py | 35 +- .../dapr_runtime/actors/step_actor.py | 56 +- .../dapr_runtime/dapr_actor_registration.py | 31 +- .../dapr_runtime/dapr_kernel_process.py | 4 +- .../dapr_kernel_process_context.py | 4 +- .../dapr_runtime/dapr_process_info.py | 11 +- .../processes/dapr_runtime/dapr_step_info.py | 6 +- .../interfaces/event_buffer_interface.py | 4 +- .../external_event_buffer_interface.py | 4 +- .../interfaces/message_buffer_interface.py | 4 +- .../interfaces/process_interface.py | 13 +- .../dapr_runtime/interfaces/step_interface.py | 4 +- .../kernel_process/kernel_process.py | 20 +- .../kernel_process/kernel_process_edge.py | 4 +- .../kernel_process/kernel_process_event.py | 8 +- .../kernel_process_function_target.py | 4 +- .../kernel_process_message_channel.py | 4 +- .../kernel_process/kernel_process_state.py | 4 +- .../kernel_process/kernel_process_step.py | 4 +- .../kernel_process_step_context.py | 15 +- .../kernel_process_step_info.py | 4 +- .../kernel_process_step_state.py | 4 +- .../processes/local_runtime/local_event.py | 4 +- .../local_runtime/local_kernel_process.py | 4 +- .../local_kernel_process_context.py | 7 +- .../processes/local_runtime/local_message.py | 8 +- .../local_runtime/local_message_factory.py | 4 +- .../processes/local_runtime/local_process.py | 32 +- .../processes/local_runtime/local_step.py | 26 +- .../processes/process_builder.py | 28 +- .../processes/process_edge_builder.py | 8 +- .../processes/process_end_step.py | 4 +- .../process_function_target_builder.py | 4 +- .../processes/process_step_builder.py | 19 +- .../processes/process_step_edge_builder.py | 4 +- .../semantic_kernel/prompt_template/const.py | 17 +- .../handlebars_prompt_template.py | 4 +- .../prompt_template/jinja2_prompt_template.py | 4 +- .../prompt_template/prompt_template_base.py | 5 +- .../prompt_template/prompt_template_config.py | 26 +- .../services/ai_service_client_base.py | 4 +- .../services/ai_service_selector.py | 12 +- .../services/kernel_services_extension.py | 16 +- .../protocols/code_renderer.py | 2 - .../protocols/text_renderer.py | 2 - python/semantic_kernel/utils/async_utils.py | 11 - .../utils/experimental_decorator.py | 32 + .../utils/feature_stage_decorator.py | 153 - python/semantic_kernel/utils/list_handler.py | 10 +- .../telemetry/agent_diagnostics/decorators.py | 50 +- .../agent_diagnostics/gen_ai_attributes.py | 12 - .../telemetry/model_diagnostics/decorators.py | 19 +- .../model_diagnostics_settings.py | 4 +- python/tests/conftest.py | 28 +- .../agents/bedrock_agent/conftest.py | 25 - .../test_bedrock_agent_integration.py | 143 - .../completions/chat_completion_test_base.py | 38 +- .../completions/test_chat_completions.py | 8 +- .../completions/test_text_completion.py | 40 +- .../cross_language/test_cross_language.py | 2 +- .../embeddings/test_embedding_service_base.py | 8 +- .../test_azure_cosmos_db_no_sql.py | 25 +- .../postgres/test_postgres_int.py | 39 +- .../memory/vector_stores/test_vector_store.py | 63 +- .../vector_stores/vector_store_test_base.py | 66 +- python/tests/samples/test_concepts.py | 113 +- python/tests/samples/test_learn_resources.py | 9 + .../test_autogen_conversable_agent.py | 123 - .../unit/agents/azure_ai_agent/conftest.py | 23 - .../test_agent_content_generation.py | 279 -- .../test_agent_thread_actions.py | 326 -- .../azure_ai_agent/test_azure_ai_agent.py | 145 - .../test_azure_ai_agent_settings.py | 34 - .../test_azure_ai_agent_utils.py | 51 - .../azure_ai_agent/test_azure_ai_channel.py | 112 - .../unit/agents/bedrock_agent/conftest.py | 180 -- .../bedrock_agent/test_action_group_utils.py | 93 - .../test_bedrock_action_group_model.py | 33 - .../bedrock_agent/test_bedrock_agent.py | 633 ---- .../test_bedrock_agent_channel.py | 63 - .../test_bedrock_agent_event_type.py | 27 - .../bedrock_agent/test_bedrock_agent_model.py | 67 - .../test_bedrock_agent_settings.py | 28 - .../test_bedrock_agent_status.py | 23 - .../unit/agents/chat_completion/conftest.py | 25 - .../test_chat_completion_agent.py | 326 -- .../unit/agents/openai_assistant/conftest.py | 105 - .../test_assistant_thread_actions.py | 770 ----- .../test_azure_assistant_agent.py | 387 --- .../test_open_ai_assistant_agent.py | 294 -- python/tests/unit/agents/test_agent.py | 88 +- .../test_agent_channel.py | 0 .../{test_group_chat => }/test_agent_chat.py | 0 .../test_agent_chat_utils.py | 0 .../test_agent_group_chat.py | 0 .../test_aggregator_termination_strategy.py | 22 +- .../unit/agents/test_azure_assistant_agent.py | 570 ++++ .../test_broadcast_queue.py | 0 .../unit/agents/test_chat_completion_agent.py | 217 ++ .../test_chat_history_channel.py | 72 +- .../test_default_termination_strategy.py | 0 ...test_kernel_function_selection_strategy.py | 22 +- ...st_kernel_function_termination_strategy.py | 22 +- .../agents/test_open_ai_assistant_agent.py | 601 ++++ .../agents/test_open_ai_assistant_base.py | 1776 +++++++++++ .../test_open_ai_assistant_channel.py | 112 +- .../test_sequential_strategy_selection.py | 25 +- .../test_termination_strategy.py | 23 +- ...test_azure_ai_inference_chat_completion.py | 16 - .../test_azure_ai_inference_text_embedding.py | 6 - .../test_bedrock_model_provider_utils.py | 22 - .../hugging_face/test_hf_text_completions.py | 6 +- .../services/test_onnx_chat_completion.py | 9 - .../services/test_onnx_text_completion.py | 9 - .../services/test_azure_chat_completion.py | 1 + .../test_openai_chat_completion_base.py | 2 + .../open_ai/services/test_openai_realtime.py | 656 ----- .../open_ai/test_openai_request_settings.py | 14 - .../connectors/memory/chroma/test_chroma.py | 134 - .../memory/mongodb_atlas/conftest.py | 37 - .../test_mongodb_atlas_collection.py | 96 - .../mongodb_atlas/test_mongodb_atlas_store.py | 31 - .../memory/postgres/test_postgres_store.py | 135 +- .../unit/contents/test_binary_content.py | 5 - ...test_chat_history_summarization_reducer.py | 34 +- .../contents/test_chat_message_content.py | 35 - python/tests/unit/contents/test_data_uri.py | 96 +- .../contents/test_function_result_content.py | 93 - .../tests/unit/contents/test_hashing_utils.py | 196 -- .../test_streaming_chat_message_content.py | 40 - .../core_plugins/test_crew_ai_enterprise.py | 95 - .../test_kernel_experimental_decorator.py | 29 + python/tests/unit/kernel/test_kernel.py | 80 +- .../dapr_runtime/test_process_actor.py | 2 +- .../processes/dapr_runtime/test_step_actor.py | 107 +- .../test_kernel_process_event.py | 2 +- .../test_local_kernel_process_context.py | 1 - .../local_runtime/test_local_process.py | 3 +- .../processes/test_process_edge_builder.py | 51 +- .../processes/test_process_step_builder.py | 26 - .../prompt_template/test_prompt_templates.py | 1 - python/tests/unit/test_serialization.py | 15 +- .../agent_diagnostics/test_agent_decorated.py | 6 +- .../test_trace_chat_completion_agent.py | 4 +- .../test_trace_open_ai_assistant_agent.py | 60 +- .../utils/test_feature_stage_decorator.py | 124 - python/uv.lock | 2624 +++++++---------- 1091 files changed, 17275 insertions(+), 50849 deletions(-) delete mode 100644 .github/workflows/label-needs-port.yml delete mode 100644 .github/workflows/python-manual-release.yml delete mode 100644 docs/decisions/0064-hybrid-model-orchestration.md delete mode 100644 docs/decisions/0065-realtime-api-clients.md delete mode 100644 docs/decisions/0066-concepts-guidelines.md delete mode 100644 dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs delete mode 100644 dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs rename dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/{ChatHistoryMaxTokensReducer.cs => MaxTokensChatHistoryReducer.cs} (81%) create mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs create mode 100644 dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs delete mode 100644 dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs create mode 100644 dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs create mode 100644 dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs delete mode 100644 dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs create mode 100644 dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml create mode 100644 dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml create mode 100644 dotnet/samples/Concepts/Resources/Agents/travelinfo.txt create mode 100644 dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/README.md delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_AppSecret.png delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/ApplicationOverViewScreenClientIDetc.png delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png delete mode 100644 dotnet/samples/Demos/CopilotAgentPlugins/images/aad-portal-app-registrations.png delete mode 100644 dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs delete mode 100644 dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj delete mode 100644 dotnet/samples/Demos/ModelContextProtocol/Program.cs delete mode 100644 dotnet/samples/Demos/ModelContextProtocol/README.md delete mode 100644 dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-metrics.png delete mode 100644 dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-traces.png delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs delete mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml delete mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/countries.json delete mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/weather.json rename dotnet/samples/GettingStartedWithAgents/{AzureAIAgent/Step03_AzureAIAgent_Chat.cs => Step07_Logging.cs} (56%) delete mode 100644 dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs rename dotnet/samples/GettingStartedWithAgents/{OpenAIAssistant/Step03_Assistant_Vision.cs => Step09_Assistant_Vision.cs} (69%) rename dotnet/samples/GettingStartedWithAgents/{OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs => Step10_AssistantTool_CodeInterpreter.cs} (61%) create mode 100644 dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs rename dotnet/src/Agents/{AzureAI => Abstractions}/Properties/AssemblyInfo.cs (100%) delete mode 100644 dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgent.cs delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIChannel.cs delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs delete mode 100644 dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs delete mode 100644 dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs delete mode 100644 dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs delete mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs delete mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs delete mode 100644 dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs delete mode 100644 dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs delete mode 100644 dotnet/src/Agents/AzureAI/RunPollingOptions.cs delete mode 100644 dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj delete mode 100644 dotnet/src/Agents/Bedrock/BedrockAgent.cs delete mode 100644 dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs delete mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs delete mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs delete mode 100644 dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs delete mode 100644 dotnet/src/Agents/Bedrock/README.md rename dotnet/src/{SemanticKernel.Core/AI/ChatCompletion => Agents/Core/History}/ChatHistoryReducerExtensions.cs (51%) rename dotnet/src/{SemanticKernel.Core/AI/ChatCompletion => Agents/Core/History}/ChatHistorySummarizationReducer.cs (79%) rename dotnet/src/{SemanticKernel.Core/AI/ChatCompletion => Agents/Core/History}/ChatHistoryTruncationReducer.cs (82%) create mode 100644 dotnet/src/Agents/Core/History/IChatHistoryReducer.cs rename dotnet/src/Agents/{Bedrock => Core}/Properties/AssemblyInfo.cs (100%) rename dotnet/src/{InternalUtilities/agents => Agents/OpenAI}/Extensions/AgentExtensions.cs (51%) delete mode 100644 dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs delete mode 100644 dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs create mode 100644 dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs delete mode 100644 dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs create mode 100644 dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs delete mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs rename dotnet/src/{Plugins/Plugins.AI => Agents/OpenAI/Properties}/AssemblyInfo.cs (78%) delete mode 100644 dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs rename dotnet/src/{SemanticKernel.UnitTests/AI/ChatCompletion => Agents/UnitTests/Core/History}/ChatHistoryReducerExtensionsTests.cs (87%) rename dotnet/src/{SemanticKernel.UnitTests/AI/ChatCompletion => Agents/UnitTests/Core/History}/ChatHistorySummarizationReducerTests.cs (85%) rename dotnet/src/{SemanticKernel.UnitTests/AI/ChatCompletion => Agents/UnitTests/Core/History}/ChatHistoryTruncationReducerTests.cs (79%) rename dotnet/src/{SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs => Agents/UnitTests/Core/History/MockHistoryGenerator.cs} (90%) rename dotnet/src/Agents/UnitTests/{Test => OpenAI}/AssertCollection.cs (95%) create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs delete mode 100644 dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs delete mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs delete mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs delete mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs delete mode 100644 dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs delete mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs delete mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs delete mode 100644 dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs create mode 100644 dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs delete mode 100644 dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs delete mode 100644 dotnet/src/IntegrationTests/Resources/gemini_cached_content.json delete mode 100644 dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs delete mode 100644 dotnet/src/InternalUtilities/agents/AgentUtilities.props delete mode 100644 dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs delete mode 100644 dotnet/src/InternalUtilities/azure/AzureAIUtilities.props delete mode 100644 dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs delete mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs delete mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs delete mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs delete mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs delete mode 100644 dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj delete mode 100644 dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs delete mode 100644 dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs delete mode 100644 dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs delete mode 100644 dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs delete mode 100644 dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs create mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs delete mode 100644 python/samples/SAMPLE_GUIDELINES.md create mode 100644 python/samples/concepts/agents/assistant_agent_chart_maker.py create mode 100644 python/samples/concepts/agents/assistant_agent_file_manipulation.py create mode 100644 python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py create mode 100644 python/samples/concepts/agents/assistant_agent_retrieval.py create mode 100644 python/samples/concepts/agents/assistant_agent_streaming.py delete mode 100644 python/samples/concepts/agents/autogen_conversable_agent/README.md delete mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py delete mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py delete mode 100644 python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py delete mode 100644 python/samples/concepts/agents/azure_ai_agent/.env.example delete mode 100644 python/samples/concepts/agents/azure_ai_agent/README.md delete mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py delete mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py delete mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/.env.example delete mode 100644 python/samples/concepts/agents/bedrock_agent/README.md delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py delete mode 100644 python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/README.md delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py delete mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py create mode 100644 python/samples/concepts/agents/chat_completion_function_termination.py create mode 100644 python/samples/concepts/agents/chat_completion_history_reducer.py delete mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_files.py delete mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_images.py delete mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py delete mode 100644 python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py rename python/samples/concepts/agents/{mixed_chat => }/mixed_chat_agents.py (66%) rename python/samples/concepts/agents/{mixed_chat => }/mixed_chat_agents_plugins.py (69%) create mode 100644 python/samples/concepts/agents/mixed_chat_files.py create mode 100644 python/samples/concepts/agents/mixed_chat_reset.py create mode 100644 python/samples/concepts/agents/mixed_chat_streaming.py delete mode 100644 python/samples/concepts/agents/openai_assistant/README.md delete mode 100644 python/samples/concepts/agents/openai_assistant/__init__.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py delete mode 100644 python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py delete mode 100644 python/samples/concepts/caching/semantic_caching.py delete mode 100644 python/samples/concepts/chat_history/README.md delete mode 100644 python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py delete mode 100644 python/samples/concepts/filtering/retry_with_different_model.py delete mode 100644 python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md create mode 100644 python/samples/concepts/memory/azure_cognitive_search_memory.py create mode 100644 python/samples/concepts/memory/memory.py rename python/samples/concepts/memory/{complex_memory.py => new_memory.py} (56%) rename python/samples/concepts/memory/{memory_with_pandas.py => pandas_memory.py} (57%) delete mode 100644 python/samples/concepts/memory/simple_memory.py delete mode 100644 python/samples/concepts/memory/utils.py create mode 100644 python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py create mode 100644 python/samples/concepts/planners/openai_function_calling_stepwise_planner.py create mode 100644 python/samples/concepts/planners/sequential_planner.py delete mode 100644 python/samples/concepts/plugins/crew_ai/README.md delete mode 100644 python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py delete mode 100644 python/samples/concepts/realtime/README.md delete mode 100644 python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py delete mode 100644 python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py delete mode 100644 python/samples/concepts/realtime/simple_realtime_chat_webrtc.py delete mode 100644 python/samples/concepts/realtime/simple_realtime_chat_websocket.py delete mode 100644 python/samples/concepts/realtime/utils.py delete mode 100644 python/samples/concepts/resources/cat.jpg delete mode 100644 python/samples/demos/call_automation/.env.example delete mode 100755 python/samples/demos/call_automation/call_automation.py delete mode 100644 python/samples/demos/call_automation/readme.md delete mode 100644 python/samples/demos/document_generator/GENERATED_DOCUMENT.md delete mode 100644 python/samples/demos/document_generator/README.md delete mode 100644 python/samples/demos/document_generator/agents/code_validation_agent.py delete mode 100644 python/samples/demos/document_generator/agents/content_creation_agent.py delete mode 100644 python/samples/demos/document_generator/agents/custom_agent_base.py delete mode 100644 python/samples/demos/document_generator/agents/user_agent.py delete mode 100644 python/samples/demos/document_generator/custom_selection_strategy.py delete mode 100644 python/samples/demos/document_generator/custom_termination_strategy.py delete mode 100644 python/samples/demos/document_generator/main.py delete mode 100644 python/samples/demos/document_generator/plugins/code_execution_plugin.py delete mode 100644 python/samples/demos/document_generator/plugins/repo_file_plugin.py delete mode 100644 python/samples/demos/document_generator/plugins/user_plugin.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/.env.example delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/README.md delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py delete mode 100644 python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/README.md delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py delete mode 100644 python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/README.md delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py delete mode 100644 python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py delete mode 100644 python/samples/getting_started_with_agents/resources/countries.json delete mode 100644 python/samples/getting_started_with_agents/resources/weather.json create mode 100644 python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py create mode 100644 python/samples/getting_started_with_agents/step1_agent.py create mode 100644 python/samples/getting_started_with_agents/step2_plugins.py rename python/samples/getting_started_with_agents/{chat_completion/step5_chat_completion_agent_group_chat.py => step3_chat.py} (53%) rename python/samples/getting_started_with_agents/{chat_completion/step6_kernel_function_strategies.py => step4_kernel_function_strategies.py} (66%) create mode 100644 python/samples/getting_started_with_agents/step5_json_result.py create mode 100644 python/samples/getting_started_with_agents/step6_logging.py create mode 100644 python/samples/getting_started_with_agents/step7_assistant.py create mode 100644 python/samples/getting_started_with_agents/step8_assistant_vision.py create mode 100644 python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py delete mode 100644 python/samples/learn_resources/resources/WomensSuffrage.txt delete mode 100644 python/semantic_kernel/agents/autogen/README.md delete mode 100644 python/semantic_kernel/agents/autogen/__init__.py delete mode 100644 python/semantic_kernel/agents/autogen/autogen_conversable_agent.py delete mode 100644 python/semantic_kernel/agents/azure_ai/__init__.py delete mode 100644 python/semantic_kernel/agents/azure_ai/agent_content_generation.py delete mode 100644 python/semantic_kernel/agents/azure_ai/agent_thread_actions.py delete mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent.py delete mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py delete mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py delete mode 100644 python/semantic_kernel/agents/azure_ai/azure_ai_channel.py delete mode 100644 python/semantic_kernel/agents/bedrock/README.md delete mode 100644 python/semantic_kernel/agents/bedrock/__init__.py delete mode 100644 python/semantic_kernel/agents/bedrock/action_group_utils.py delete mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent.py delete mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent_base.py delete mode 100644 python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py delete mode 100644 python/semantic_kernel/agents/bedrock/models/__init__.py delete mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py delete mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py delete mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py delete mode 100644 python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py delete mode 100644 python/semantic_kernel/agents/channels/bedrock_agent_channel.py delete mode 100644 python/semantic_kernel/agents/open_ai/assistant_thread_actions.py create mode 100644 python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py delete mode 100644 python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py delete mode 100644 python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py delete mode 100644 python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py delete mode 100644 python/semantic_kernel/connectors/ai/realtime_client_base.py delete mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py delete mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py delete mode 100644 python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py delete mode 100644 python/semantic_kernel/connectors/memory/chroma/chroma.py delete mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/const.py delete mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py delete mode 100644 python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py delete mode 100644 python/semantic_kernel/contents/realtime_events.py delete mode 100644 python/semantic_kernel/contents/utils/hashing.py delete mode 100644 python/semantic_kernel/core_plugins/crew_ai/__init__.py delete mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py delete mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py delete mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py delete mode 100644 python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py delete mode 100644 python/semantic_kernel/utils/async_utils.py create mode 100644 python/semantic_kernel/utils/experimental_decorator.py delete mode 100644 python/semantic_kernel/utils/feature_stage_decorator.py delete mode 100644 python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py delete mode 100644 python/tests/integration/agents/bedrock_agent/conftest.py delete mode 100644 python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py delete mode 100644 python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/conftest.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py delete mode 100644 python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py delete mode 100644 python/tests/unit/agents/bedrock_agent/conftest.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_action_group_utils.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py delete mode 100644 python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py delete mode 100644 python/tests/unit/agents/chat_completion/conftest.py delete mode 100644 python/tests/unit/agents/chat_completion/test_chat_completion_agent.py delete mode 100644 python/tests/unit/agents/openai_assistant/conftest.py delete mode 100644 python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py delete mode 100644 python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py delete mode 100644 python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py rename python/tests/unit/agents/{test_group_chat => }/test_agent_channel.py (100%) rename python/tests/unit/agents/{test_group_chat => }/test_agent_chat.py (100%) rename python/tests/unit/agents/{test_group_chat => }/test_agent_chat_utils.py (100%) rename python/tests/unit/agents/{test_group_chat => }/test_agent_group_chat.py (100%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_aggregator_termination_strategy.py (84%) create mode 100644 python/tests/unit/agents/test_azure_assistant_agent.py rename python/tests/unit/agents/{test_group_chat => }/test_broadcast_queue.py (100%) create mode 100644 python/tests/unit/agents/test_chat_completion_agent.py rename python/tests/unit/agents/{chat_completion => }/test_chat_history_channel.py (79%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_default_termination_strategy.py (100%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_kernel_function_selection_strategy.py (85%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_kernel_function_termination_strategy.py (85%) create mode 100644 python/tests/unit/agents/test_open_ai_assistant_agent.py create mode 100644 python/tests/unit/agents/test_open_ai_assistant_base.py rename python/tests/unit/agents/{openai_assistant => }/test_open_ai_assistant_channel.py (82%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_sequential_strategy_selection.py (82%) rename python/tests/unit/agents/{test_group_chat_strategies => }/test_termination_strategy.py (77%) delete mode 100644 python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py delete mode 100644 python/tests/unit/connectors/memory/chroma/test_chroma.py delete mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/conftest.py delete mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py delete mode 100644 python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py delete mode 100644 python/tests/unit/contents/test_hashing_utils.py delete mode 100644 python/tests/unit/core_plugins/test_crew_ai_enterprise.py create mode 100644 python/tests/unit/functions/test_kernel_experimental_decorator.py delete mode 100644 python/tests/unit/utils/test_feature_stage_decorator.py diff --git a/.github/_typos.toml b/.github/_typos.toml index 74279ff55fe6..d9a2dcb7a2e4 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -23,10 +23,7 @@ extend-exclude = [ "PopulationByCountry.csv", "PopulationByAdmin1.csv", "WomensSuffrage.txt", - "SK-dotnet.sln.DotSettings", - "**/azure_ai_search_hotel_samples/README.md", - "**/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs", - "**/Demos/ProcessFrameworkWithAspire/**/*.http" + "SK-dotnet.sln.DotSettings" ] [default.extend-words] diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index dde2fad80b39..f291c5a4d888 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -148,17 +148,11 @@ jobs: # Generate test reports and check coverage - name: Generate test reports - uses: danielpalme/ReportGenerator-GitHub-Action@5.4.4 + uses: danielpalme/ReportGenerator-GitHub-Action@5.4.3 with: reports: "./TestResults/Coverage/**/coverage.cobertura.xml" targetdir: "./TestResults/Reports" - reporttypes: "HtmlInline;JsonSummary" - - - name: Upload coverage report artifact - uses: actions/upload-artifact@v4 - with: - name: CoverageReport-${{ matrix.os }}-${{ matrix.dotnet }}-${{ matrix.configuration }} # Artifact name - path: ./TestResults/Reports # Directory containing files to upload + reporttypes: "JsonSummary" - name: Check coverage shell: pwsh diff --git a/.github/workflows/label-needs-port.yml b/.github/workflows/label-needs-port.yml deleted file mode 100644 index baec103be3d7..000000000000 --- a/.github/workflows/label-needs-port.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Create Issue when Needs Port label is added -on: - issues: - types: [labeled] - pull_request_target: - types: [labeled] - -jobs: - create_issue: - if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.pull_request.labels.*.name, 'needs_port_to_python') || contains(github.event.issue.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.issue.labels.*.name, 'needs_port_to_python') - name: "Create Issue" - continue-on-error: true - runs-on: ubuntu-latest - permissions: - issues: write - pull-requests: read - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GH_REPO: ${{ github.repository }} - - steps: - - name: Create dotnet issue - if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_dotnet') || contains(github.event.issue.labels.*.name, 'needs_port_to_dotnet') - run: | - new_issue_url=$(gh issue create \ - --title "Port python feature: ${{ github.event.issue.title || github.event.pull_request.title }}" \ - --label ".NET" \ - --body "# Original issue - ${{ github.event.issue.html_url || github.event.pull_request.html_url }} - ## Description - ${{ github.event.issue.body || github.event.pull_request.body }}") - - name: Create python issue - if: contains(github.event.pull_request.labels.*.name, 'needs_port_to_python') || contains(github.event.issue.labels.*.name, 'needs_port_to_python') - run: | - new_issue_url=$(gh issue create \ - --title "Port dotnet feature: ${{ github.event.issue.title || github.event.pull_request.title }}" \ - --label "python" \ - --body "# Original issue - ${{ github.event.issue.html_url || github.event.pull_request.html_url }} - ## Description - ${{ github.event.issue.body || github.event.pull_request.body }}") diff --git a/.github/workflows/python-build.yml b/.github/workflows/python-build.yml index 19029d60b6bc..2f7f5de183b1 100644 --- a/.github/workflows/python-build.yml +++ b/.github/workflows/python-build.yml @@ -4,16 +4,11 @@ on: release: types: [published] -permissions: - contents: read - id-token: "write" - jobs: python-build-assets: if: github.event_name == 'release' && startsWith(github.event.release.tag_name, 'python-') name: Python Build Assets and add to Release runs-on: ubuntu-latest - environment: "integration" permissions: contents: write env: @@ -26,7 +21,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Check version run: | echo "Building and uploading Python package version: ${{ github.event.release.tag_name }}" @@ -37,14 +31,3 @@ jobs: with: files: | python/dist/* - - name: Azure Login - uses: azure/login@v2 - with: - client-id: ${{ secrets.AZURE_CLIENT_ID }} - tenant-id: ${{ secrets.AZURE_TENANT_ID }} - subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - - name: Start DevOps pipeline - uses: azure/cli@v2 - with: - inlineScript: | - az pipelines run --id ${{ vars.ADO_PYTHON_RELEASE_ID }} --org ${{ vars.ADO_ORG }} --project ${{ vars.ADO_PROJECT_NAME }} --parameters tag=${{ github.event.release.tag_name }} delay=0 diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml index 39d7a3f38452..b4d854df72a6 100644 --- a/.github/workflows/python-integration-tests.yml +++ b/.github/workflows/python-integration-tests.yml @@ -55,10 +55,10 @@ env: MISTRALAI_EMBEDDING_MODEL_ID: ${{ vars.MISTRALAI_EMBEDDING_MODEL_ID }} ANTHROPIC_API_KEY: ${{secrets.ANTHROPIC_API_KEY}} ANTHROPIC_CHAT_MODEL_ID: ${{ vars.ANTHROPIC_CHAT_MODEL_ID }} - OLLAMA_CHAT_MODEL_ID: "${{ vars.OLLAMA_CHAT_MODEL_ID || '' }}" # llama3.2:1b - OLLAMA_CHAT_MODEL_ID_IMAGE: "${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE || '' }}" # moondream - OLLAMA_CHAT_MODEL_ID_TOOL_CALL: "${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL || '' }}" # llama3.2:1b - OLLAMA_TEXT_MODEL_ID: "${{ vars.OLLAMA_TEXT_MODEL_ID || '' }}" # llama3.2:1b + OLLAMA_CHAT_MODEL_ID: "${{ vars.OLLAMA_CHAT_MODEL_ID || '' }}" # llava-phi3 + OLLAMA_CHAT_MODEL_ID_IMAGE: "${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE || '' }}" # llava-phi3 + OLLAMA_CHAT_MODEL_ID_TOOL_CALL: "${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL || '' }}" # llama3.2 + OLLAMA_TEXT_MODEL_ID: "${{ vars.OLLAMA_TEXT_MODEL_ID || '' }}" # llava-phi3 OLLAMA_EMBEDDING_MODEL_ID: "${{ vars.OLLAMA_EMBEDDING_MODEL_ID || '' }}" # nomic-embed-text GOOGLE_AI_GEMINI_MODEL_ID: ${{ vars.GOOGLE_AI_GEMINI_MODEL_ID }} GOOGLE_AI_EMBEDDING_MODEL_ID: ${{ vars.GOOGLE_AI_EMBEDDING_MODEL_ID }} @@ -69,8 +69,6 @@ env: REDIS_CONNECTION_STRING: ${{ vars.REDIS_CONNECTION_STRING }} AZURE_COSMOS_DB_NO_SQL_URL: ${{ vars.AZURE_COSMOS_DB_NO_SQL_URL }} AZURE_COSMOS_DB_NO_SQL_KEY: ${{ secrets.AZURE_COSMOS_DB_NO_SQL_KEY }} - BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN: ${{ secrets.BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN }} - BEDROCK_AGENT_FOUNDATION_MODEL: ${{ vars.BEDROCK_AGENT_FOUNDATION_MODEL }} jobs: paths-filter: @@ -120,7 +118,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -150,92 +147,10 @@ jobs: run: | uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/completions ./tests/integration/embeddings ./tests/samples ./tests/integration/cross_language - python-merge-gate-multi-modality: - name: Python Pre-Merge Integration Tests - Multi-Modality - needs: paths-filter - if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' - strategy: - max-parallel: 1 - fail-fast: false - matrix: - python-version: ["3.11"] - os: [ubuntu-latest] - defaults: - run: - working-directory: python - runs-on: ${{ matrix.os }} - environment: "integration" - env: - UV_PYTHON: ${{ matrix.python-version }} - steps: - - uses: actions/checkout@v4 - - name: Set up uv - uses: astral-sh/setup-uv@v5 - with: - version: "0.5.x" - enable-cache: true - cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - - name: Install dependencies - run: | - uv sync --all-extras --dev - - name: Azure CLI Login - if: github.event_name != 'pull_request' - uses: azure/login@v2 - with: - client-id: ${{ secrets.AZURE_CLIENT_ID }} - tenant-id: ${{ secrets.AZURE_TENANT_ID }} - subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - - name: Run Integration Tests - id: run_tests_multi_modality - shell: bash - run: | - uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal ./tests/integration/audio_to_text ./tests/integration/text_to_audio ./tests/integration/text_to_image - - python-merge-gate-agents: - name: Python Pre-Merge Integration Tests - Agents - needs: paths-filter - if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' - strategy: - max-parallel: 1 - fail-fast: false - matrix: - python-version: ["3.11"] - os: [ubuntu-latest] - defaults: - run: - working-directory: python - runs-on: ${{ matrix.os }} - environment: "integration" - env: - UV_PYTHON: ${{ matrix.python-version }} - steps: - - uses: actions/checkout@v4 - - name: Set up uv - uses: astral-sh/setup-uv@v5 - with: - version: "0.5.x" - enable-cache: true - cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - - name: Install dependencies - run: | - uv sync --all-extras --dev - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ vars.AWS_REGION }} - - name: Run Integration Tests - id: run_tests_agents - shell: bash - run: | - uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal ./tests/integration/agents - python-merge-gate-ollama: name: Python Pre-Merge Integration Tests - Ollama needs: paths-filter - # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, this job is disabled for now. - if: false && github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' + if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' strategy: max-parallel: 1 fail-fast: false @@ -258,7 +173,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -323,7 +237,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -355,6 +268,7 @@ jobs: run: working-directory: python runs-on: ${{ matrix.os }} + environment: "integration" env: UV_PYTHON: ${{ matrix.python-version }} MEMORY_CONCEPT_SAMPLE: "true" @@ -381,7 +295,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install dependencies run: | uv sync --all-extras --dev @@ -392,8 +305,7 @@ jobs: ollama serve & sleep 5 - name: Pull model in Ollama - # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. - if: false && matrix.os == 'ubuntu-latest' + if: matrix.os == 'ubuntu-latest' run: | ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID }} ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE }} @@ -431,16 +343,14 @@ jobs: id: run_tests_completions timeout-minutes: 10 shell: bash - # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/completions + uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/completions - name: Run Integration Tests - Embeddings id: run_tests_embeddings timeout-minutes: 5 shell: bash - # Ollama tests are very unstable at the moment. It often fails to pull models from the Ollama server. Thus, Ollama is disabled for now. run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/embeddings + uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/embeddings - name: Run Integration Tests - Memory id: run_tests_memory timeout-minutes: 5 @@ -465,18 +375,6 @@ jobs: shell: bash run: | uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/samples - - name: Run Integration Tests - Agents - id: run_tests_agents - timeout-minutes: 5 - shell: bash - run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/agents - - name: Run Integration Tests - Multi-Modality - id: run_tests_multi_modality - timeout-minutes: 5 - shell: bash - run: | - uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/audio_to_text ./tests/integration/text_to_audio ./tests/integration/text_to_image # This final job is required to satisfy the merge queue. It must only run (or succeed) if no tests failed python-integration-tests-check: @@ -490,8 +388,6 @@ jobs: python-merge-gate-ai-services, python-merge-gate-ollama, python-merge-gate-memory, - python-merge-gate-agents, - python-merge-gate-multi-modality, python-integration-tests, ] steps: diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index fe2c6882d8ac..65e04c0aba3c 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -30,7 +30,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/python-manual-release.yml b/.github/workflows/python-manual-release.yml deleted file mode 100644 index 5742cf747311..000000000000 --- a/.github/workflows/python-manual-release.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Python Start Release on ADO - -on: - workflow_dispatch: - inputs: - tag: - description: "Tag to release" - required: true - -permissions: - contents: read - id-token: "write" - -jobs: - python-build-assets: - name: Trigger ADO Pipeline for Python Release - runs-on: ubuntu-latest - environment: "integration" - steps: - - name: Azure Login - uses: azure/login@v2 - with: - client-id: ${{ secrets.AZURE_CLIENT_ID }} - tenant-id: ${{ secrets.AZURE_TENANT_ID }} - subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - - name: Start DevOps pipeline - uses: azure/cli@v2 - with: - inlineScript: | - az pipelines run --id ${{ vars.ADO_PYTHON_RELEASE_ID }} --org ${{ vars.ADO_ORG }} --project ${{ vars.ADO_PROJECT_NAME }} --parameters tag=${{ inputs.tag }} delay=0 diff --git a/.github/workflows/python-test-coverage.yml b/.github/workflows/python-test-coverage.yml index 44e567580968..fc0ae4087c7b 100644 --- a/.github/workflows/python-test-coverage.yml +++ b/.github/workflows/python-test-coverage.yml @@ -32,7 +32,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ env.UV_PYTHON }} - cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev - name: Test with pytest diff --git a/.github/workflows/python-unit-tests.yml b/.github/workflows/python-unit-tests.yml index e7a749ab589c..65a61710ceff 100644 --- a/.github/workflows/python-unit-tests.yml +++ b/.github/workflows/python-unit-tests.yml @@ -20,10 +20,18 @@ jobs: python-version: ["3.10", "3.11", "3.12"] os: [ubuntu-latest, windows-latest, macos-latest] experimental: [false] - include: - - python-version: "3.13" - os: "ubuntu-latest" - experimental: true + # include: + # - python-version: "3.13" + # os: "ubuntu-latest" + # experimental: true + # - python-version: "3.13t" + # os: "ubuntu-latest" + # experimental: true + # gil: 0 + # - python-version: "3.13t" + # os: "ubuntu-latest" + # experimental: true + # gil: 1 env: UV_PYTHON: ${{ matrix.python-version }} permissions: @@ -39,7 +47,6 @@ jobs: version: "0.5.x" enable-cache: true cache-suffix: ${{ runner.os }}-${{ matrix.python-version }} - cache-dependency-glob: "**/uv.lock" - name: Install the project run: uv sync --all-extras --dev -U --prerelease=if-necessary-or-explicit - name: Test with pytest diff --git a/.vscode/launch.json b/.vscode/launch.json index d643e4be4b96..ae7d191a00a1 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,19 +1,6 @@ { "version": "0.2.0", "configurations": [ - { - "name": "CAPs - Demo Sample", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build (CopilotAgentPluginsDemoSample)", - "program": "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/bin/Debug/net8.0/CopilotAgentPluginsDemoSample.exe", - "args": [ - "demo" - ], - "cwd": "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample", - "stopAtEntry": false, - "console": "integratedTerminal" - }, { // Use IntelliSense to find out which attributes exist for C# debugging // Use hover for the description of the existing attributes diff --git a/.vscode/tasks.json b/.vscode/tasks.json index afe3d20b3390..1cd9319c318b 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -222,15 +222,6 @@ "cwd": "${workspaceFolder}/dotnet/src/IntegrationTests/" } }, - { - "label": "build (CopilotAgentPluginsDemoSample)", - "command": "dotnet", - "type": "process", - "args": [ - "build", - "${workspaceFolder}/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj" - ] - }, // **************** // Samples (dotnet) // **************** diff --git a/docs/decisions/0054-processes.md b/docs/decisions/0054-processes.md index ed2f0c7c17ef..d9ae06be4028 100644 --- a/docs/decisions/0054-processes.md +++ b/docs/decisions/0054-processes.md @@ -26,9 +26,6 @@ In technical terms, a process is something that can be represented as a graph wh - Customers should be able to leverage their existing investments in all supported languages of Semantic Kernel. - ``` - - ``` - - Customers should be able to leverage their existing investments in infrastructure. - Customers should be able to collaborate with their business process peers to build up composable processes. - Customers should be able to use AI to enhance and streamline the steps within their business processes. @@ -317,5 +314,5 @@ The following packages will be created for Processes: In validation of the proposed solution, two runtimes were created, one for the local/server scenario and one for the distributed actor scenario using Orleans. Both of these implementation were based on the [Pregel Algorithm](https://kowshik.github.io/JPregel/pregel_paper.pdf) for large-scale graph processing. This algorithm is well tested and well suited for single machine scenarios as well as distributed systems. More information on how the Pregel algorithm works can be found in the following links. - +- [Pregel - The Morning Paper](https://blog.acolyer.org/2015/05/26/pregel-a-system-for-large-scale-graph-processing/) diff --git a/docs/decisions/0064-hybrid-model-orchestration.md b/docs/decisions/0064-hybrid-model-orchestration.md deleted file mode 100644 index 4038d5ff00a5..000000000000 --- a/docs/decisions/0064-hybrid-model-orchestration.md +++ /dev/null @@ -1,276 +0,0 @@ ---- -status: accepted -contact: sergeymenshykh -date: 2025-02-05 -deciders: dmytrostruk, markwallace, rbarreto, sergeymenshykh, westey-m, ---- - -# Hybrid Model Orchestration - -## Context and Problem Statement -Taking into account the constantly emerging and improving local and cloud-based models, in addition to the growing demand for utilizing local AI models running on local devices' NPUs, -AI powered applications need to be able to effectively and seamlessly leverage both local and cloud models for inference to achieve the best AI user experience. - -## Decision Drivers - -1. The model orchestration layer should be simple and extensible. -2. The model orchestration layer client code should not be aware of or deal with the underlying complexities. -3. The model orchestration layer should allow for different strategies for selecting the best model(s) for the task at hand. - -## Considered Implementation Options - -The following options consider a few ways to implement the model orchestration layer. - -### Option 1: IChatClient implementation per orchestration strategy - -This option presents a simple and straightforward approach to implementing the model orchestration layer. Each strategy is implemented as a separate implementation of the IChatClient interface. - -For example, a fallback strategy that uses the first configured chat client for inference and falls back to the next one if the AI model is not available may be implemented as follows: -```csharp -public sealed class FallbackChatClient : IChatClient -{ - private readonly IChatClient[] _clients; - - public FallbackChatClient(params IChatClient[] clients) - { - this._clients = clients; - } - - public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - foreach (var client in this._clients) - { - try - { - return client.CompleteAsync(chatMessages, options, cancellationToken); - } - catch (HttpRequestException ex) - { - if (ex.StatusCode >= 500) - { - // Try the next client - continue; - } - - throw; - } - } - } - - public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - ... - } - - public void Dispose() { /*We can't dispose clients here because they can be used up the stack*/ } - - public ChatClientMetadata Metadata => new ChatClientMetadata(); - - public object? GetService(Type serviceType, object? serviceKey = null) => null; -} -``` - -Other orchestration strategies, such as latency-based or token-based strategies, can be implemented in a similar way: a class that implements the IChatClient interface and the corresponding chat client selection strategy. - -Pros: -- Does not require any new abstraction. -- Simple and straightforward implementation. -- Can be sufficient for most use cases. - -### Option 2: HybridChatClient class with chat completion handler(s) per orchestration strategy - -This option introduces a HybridChatClient class that implements the IChatClient interface and delegates the selection routine to a provided handler represented by the abstract ChatCompletionHandler class: -```csharp -public sealed class HybridChatClient : IChatClient -{ - private readonly IChatClient[] _chatClients; - private readonly ChatCompletionHandler _handler; - private readonly Kernel? _kernel; - - public HybridChatClient(IChatClient[] chatClients, ChatCompletionHandler handler, Kernel? kernel = null) - { - this._chatClients = chatClients; - this._handler = handler; - this._kernel = kernel; - } - - public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - return this._handler.CompleteAsync( - new ChatCompletionHandlerContext - { - ChatMessages = chatMessages, - Options = options, - ChatClients = this._chatClients.ToDictionary(c => c, c => (CompletionContext?)null), - Kernel = this._kernel, - }, cancellationToken); - } - - public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - ... - } - - ... -} - -public abstract class ChatCompletionHandler -{ - public abstract Task CompleteAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default); - - public abstract IAsyncEnumerable CompleteStreamingAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default); -} -``` - -The HybridChatClient class passes all the necessary information to the handler via the ChatCompletionHandlerContext class, which contains the list of chat clients, chat messages, options, and Kernel instance. -```csharp -public class ChatCompletionHandlerContext -{ - public IDictionary ChatClients { get; init; } - - public IList ChatMessages { get; init; } - - public ChatOptions? Options { get; init; } - - public Kernel? Kernel { get; init; } -} -``` - -The fallback strategy shown in the previous option can be implemented as the following handler: -```csharp -public class FallbackChatCompletionHandler : ChatCompletionHandler -{ - public override async Task CompleteAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default) - { - for (int i = 0; i < context.ChatClients.Count; i++) - { - var chatClient = context.ChatClients.ElementAt(i).Key; - - try - { - return client.CompleteAsync(chatMessages, options, cancellationToken); - } - catch (HttpRequestException ex) - { - if (ex.StatusCode >= 500) - { - // Try the next client - continue; - } - - throw; - } - } - - throw new InvalidOperationException("No client provided for chat completion."); - } - - public override async IAsyncEnumerable CompleteStreamingAsync(ChatCompletionHandlerContext context, CancellationToken cancellationToken = default) - { - ... - } -} -``` - -and the caller code would look like this: -```csharp -IChatClient onnxChatClient = new OnnxChatClient(...); - -IChatClient openAIChatClient = new OpenAIChatClient(...); - -// Tries the first client and falls back to the next one if the first one fails -FallbackChatCompletionHandler handler = new FallbackChatCompletionHandler(...); - -IChatClient hybridChatClient = new HybridChatClient([onnxChatClient, openAIChatClient], handler); - -... - -var result = await hybridChatClient.CompleteAsync("Do I need an umbrella?", ...); -``` - -The handlers can be chained to create more complex scenarios, where a handler performs some preprocessing and then delegates the call to another handler with an augmented chat clients list. - -For example, the first handler identifies that a cloud model has requested access to sensitive data and delegates the call handling to local models to process it. - -```csharp -IChatClient onnxChatClient = new OnnxChatClient(...); - -IChatClient llamaChatClient = new LlamaChatClient(...); - -IChatClient openAIChatClient = new OpenAIChatClient(...); - -// Tries the first client and falls back to the next one if the first one fails -FallbackChatCompletionHandler fallbackHandler = new FallbackChatCompletionHandler(...); - -// Check if the request contains sensitive data, identifies the client(s) allowed to work with the sensitive data, and delegates the call handling to the next handler. -SensitiveDataHandler sensitiveDataHandler = new SensitiveDataHandler(fallbackHandler); - -IChatClient hybridChatClient = new HybridChatClient(new[] { onnxChatClient, llamaChatClient, openAIChatClient }, sensitiveDataHandler); - -var result = await hybridChatClient.CompleteAsync("Do I need an umbrella?", ...); -``` - -Examples of complex orchestration scenarios: - -| First Handler | Second Handler | Scenario Description | -|---------------------------------------|--------------------------------|---------------------------------------------------------------------------| -| InputTokenThresholdEvaluationHandler | FastestChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the fastest model's response. | -| InputTokenThresholdEvaluationHandler | RelevancyChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the most relevant response. | -| InputTokenThresholdEvaluationHandler | FallbackChatCompletionHandler | Identifies models based on the prompt's input token size and each model's min/max token capacity, then returns the first available model's response. | -| SensitiveDataRoutingHandler | FastestChatCompletionHandler | Identifies models based on data sensitivity, then returns the fastest model's response. | -| SensitiveDataRoutingHandler | RelevancyChatCompletionHandler | Identifies models based on data sensitivity, then returns the most relevant response. | -| SensitiveDataRoutingHandler | FallbackChatCompletionHandler | Identifies models based on data sensitivity, then returns the first available model's response. | - -Pros: -- Allows reusing same handlers to create various composite orchestration strategies. - -Cons: -- Requires new abstractions and components than the previous option: context classes and code for handling the next handler. - -
- -POC demonstrating this option can be found [here](https://github.com/microsoft/semantic-kernel/pull/10412). - -### Option 3: Implementing existing IAIServiceSelector interface. - -The Semantic Kernel has a mechanism that allows for the dynamic selection of AI services: - -```csharp -public interface IAIServiceSelector -{ - bool TrySelectAIService( - Kernel kernel, - KernelFunction function, - KernelArguments arguments, - [NotNullWhen(true)] out T? service, - out PromptExecutionSettings? serviceSettings) where T : class, IAIService; -} -``` - -However, this mechanism requires specific context - the kernel, function, and arguments which may not always be available. -Additionally, it only works with implementations of the IAIService interface, which may not be compatible with all AI services, -such as those in Microsoft.Extensions.AI that implement the IChatClient interface. - -Furthermore, this mechanism cannot be used in orchestration scenarios where an AI service needs to be prompted first to determine its availability, latency, etc. -For example, to check if an AI service is available, the selector would need to send chat messages with options to the service. It should then return -the completion if the service is available, or fallback to another service if it is not. Given that the TrySelectAIService method does not accept a list of -chat messages or options, it is impossible to send chat messages using this method. Even if it were possible, the consumer code would have to resend the same -chat messages to the selected service to obtain a completion, as the selector does not return the completion itself. Additionally, the TrySelectAIService method -is synchronous, making it difficult to send chat messages without using synchronous code, which is generally discouraged. - -Looking at the above, it is clear that the IAIServiceSelector interface is not suitable for the hybrid orchestration of AI services since it was designed for a different purpose: -to synchronously select an instance of an AI service based on SK context and service metadata without taking the results of completion and streamed completion methods into account. - -Pros: -- Reuses the existing mechanism for AI service selection. - -Cons: -- Not suitable for all AI services. -- Requires context that may not be available in all scenarios. -- Consumer code must be aware of the IAIServiceSelector interface instead of simply using the IChatClient interface. -- Synchronous method. - -## Decision Outcome - -Chosen option: Option 1 because it does not require any new abstraction; its simplicity and straightforwardness are sufficient for most use cases. -Option 2 can be considered in the future if more complex orchestration scenarios are required. \ No newline at end of file diff --git a/docs/decisions/0065-realtime-api-clients.md b/docs/decisions/0065-realtime-api-clients.md deleted file mode 100644 index a27987aeaf00..000000000000 --- a/docs/decisions/0065-realtime-api-clients.md +++ /dev/null @@ -1,1770 +0,0 @@ ---- -# These are optional elements. Feel free to remove any of them. -status: proposed -contact: eavanvalkenburg -date: 2025-01-31 -deciders: eavanvalkenburg, markwallace, alliscode, sphenry -consulted: westey-m, rbarreto, alliscode, markwallace, sergeymenshykh, moonbox3 -informed: taochenosu, dmytrostruk ---- - -# Multi-modal Realtime API Clients - -## Context and Problem Statement - -Multiple model providers are starting to enable realtime voice-to-voice or even multi-modal, realtime, two-way communication with their models, this includes OpenAI with their [Realtime API][openai-realtime-api] and [Google Gemini][google-gemini]. These API's promise some very interesting new ways of using LLM's for different scenario's, which we want to enable with Semantic Kernel. - -The key feature that Semantic Kernel brings into this system is the ability to (re)use Semantic Kernel function as tools with these API's. There are also options for Google to use video and images as input, this will likely not be implemented first, but the abstraction should be able to deal with it. - -> [!IMPORTANT] -> Both the OpenAI and Google realtime api's are in preview/beta, this means there might be breaking changes in the way they work coming in the future, therefore the clients built to support these API's are going to be experimental until the API's stabilize. - -At this time, the protocols that these API's use are Websockets and WebRTC. - -In both cases there are events being sent to and from the service, some events contain content, text, audio, or video (so far only sending, not receiving), while some events are "control" events, like content created, function call requested, etc. Sending events include, sending content, either voice, text or function call output, or events, like committing the input audio and requesting a response. - -### Websocket -Websocket has been around for a while and is a well known technology, it is a full-duplex communication protocol over a single, long-lived connection. It is used for sending and receiving messages between client and server in real-time. Each event can contain a message, which might contain a content item, or a control event. Audio is sent as a base64 encoded string in a event. - -### WebRTC -WebRTC is a Mozilla project that provides web browsers and mobile applications with real-time communication via simple APIs. It allows audio and video communication to work inside web pages and other applications by allowing direct peer-to-peer communication, eliminating the need to install plugins or download native apps. It is used for sending and receiving audio and video streams, and can be used for sending (data-)messages as well. The big difference compared to websockets is that it explicitly create a channel for audio and video, and a separate channel for "data", which are events and in this space that contains all non-AV content, text, function calls, function results and control events, like errors or acknowledgements. - - -### Event types (Websocket and partially WebRTC) - -#### Client side events: -| **Content/Control event** | **Event Description** | **OpenAI Event** | **Google Event** | -| ------------------------- | --------------------------------- | ---------------------------- | ---------------------------------- | -| Control | Configure session | `session.update` | `BidiGenerateContentSetup` | -| Content | Send voice input | `input_audio_buffer.append` | `BidiGenerateContentRealtimeInput` | -| Control | Commit input and request response | `input_audio_buffer.commit` | `-` | -| Control | Clean audio input buffer | `input_audio_buffer.clear` | `-` | -| Content | Send text input | `conversation.item.create` | `BidiGenerateContentClientContent` | -| Control | Interrupt audio | `conversation.item.truncate` | `-` | -| Control | Delete content | `conversation.item.delete` | `-` | -| Control | Respond to function call request | `conversation.item.create` | `BidiGenerateContentToolResponse` | -| Control | Ask for response | `response.create` | `-` | -| Control | Cancel response | `response.cancel` | `-` | - -#### Server side events: -| **Content/Control event** | **Event Description** | **OpenAI Event** | **Google Event** | -| ------------------------- | -------------------------------------- | ------------------------------------------------------- | ----------------------------------------- | -| Control | Error | `error` | `-` | -| Control | Session created | `session.created` | `BidiGenerateContentSetupComplete` | -| Control | Session updated | `session.updated` | `BidiGenerateContentSetupComplete` | -| Control | Conversation created | `conversation.created` | `-` | -| Control | Input audio buffer committed | `input_audio_buffer.committed` | `-` | -| Control | Input audio buffer cleared | `input_audio_buffer.cleared` | `-` | -| Control | Input audio buffer speech started | `input_audio_buffer.speech_started` | `-` | -| Control | Input audio buffer speech stopped | `input_audio_buffer.speech_stopped` | `-` | -| Content | Conversation item created | `conversation.item.created` | `-` | -| Content | Input audio transcription completed | `conversation.item.input_audio_transcription.completed` | | -| Content | Input audio transcription failed | `conversation.item.input_audio_transcription.failed` | | -| Control | Conversation item truncated | `conversation.item.truncated` | `-` | -| Control | Conversation item deleted | `conversation.item.deleted` | `-` | -| Control | Response created | `response.created` | `-` | -| Control | Response done | `response.done` | `-` | -| Content | Response output item added | `response.output_item.added` | `-` | -| Content | Response output item done | `response.output_item.done` | `-` | -| Content | Response content part added | `response.content_part.added` | `-` | -| Content | Response content part done | `response.content_part.done` | `-` | -| Content | Response text delta | `response.text.delta` | `BidiGenerateContentServerContent` | -| Content | Response text done | `response.text.done` | `-` | -| Content | Response audio transcript delta | `response.audio_transcript.delta` | `BidiGenerateContentServerContent` | -| Content | Response audio transcript done | `response.audio_transcript.done` | `-` | -| Content | Response audio delta | `response.audio.delta` | `BidiGenerateContentServerContent` | -| Content | Response audio done | `response.audio.done` | `-` | -| Content | Response function call arguments delta | `response.function_call_arguments.delta` | `BidiGenerateContentToolCall` | -| Content | Response function call arguments done | `response.function_call_arguments.done` | `-` | -| Control | Function call cancelled | `-` | `BidiGenerateContentToolCallCancellation` | -| Control | Rate limits updated | `rate_limits.updated` | `-` | - - -## Overall Decision Drivers -- Abstract away the underlying protocols, so that developers can build applications that implement whatever protocol they want to support, without having to change the client code when changing models or protocols. - - There are some limitations expected here as i.e. WebRTC requires different information at session create time than websockets. -- Simple programming model that is likely able to handle future realtime api's and the evolution of the existing ones. -- Whenever possible we transform incoming content into Semantic Kernel content, but surface everything, so it's extensible for developers and in the future. - -There are multiple areas where we need to make decisions, these are: -- Content and Events -- Programming model -- Audio speaker/microphone handling -- Interface design and naming - -# Content and Events - -## Considered Options - Content and Events -Both the sending and receiving side of these integrations need to decide how to deal with the events. - -1. Treat content separate from control -1. Treat everything as content items -1. Treat everything as events - -### 1. Treat content separate from control -This would mean there are two mechanisms in the clients, one deals with content, and one with control events. - -- Pro: - - strongly typed responses for known content - - easy to use as the main interactions are clear with familiar SK content types, the rest goes through a separate mechanism -- Con: - - new content support requires updates in the codebase and can be considered breaking (potentially sending additional types back) - - additional complexity in dealing with two streams of data - - some items, such as Function calls can be considered both content and control, control when doing auto-function calling, but content when the developer wants to deal with it themselves - -### 2. Treat everything as content items -This would mean that all events are turned into Semantic Kernel content items, and would also mean that we need to define additional content types for the control events. - -- Pro: - - everything is a content item, so it's easy to deal with -- Con: - - new content type needed for control events - -### 3. Treat everything as events -This would introduce events, each event has a type, those can be core content types, like audio, video, image, text, function call or function response, as well as a generic event for control events without content. Each event has a SK type, from above as well as a service_event_type field that contains the event type from the service. Finally the event has a content field, which corresponds to the type, and for the generic event contains the raw event from the service. - -- Pro: - - no transformation needed for service events - - easy to maintain and extend -- Con: - - new concept introduced - - might be confusing to have contents with and without SK types - -## Decision Outcome - Content and Events - -Chosen option: 3 Treat Everything as Events - -This option was chosen to allow abstraction away from the raw events, while still allowing the developer to access the raw events if needed. -A base event type is added called `RealtimeEvent`, this has three fields, a `event_type`, `service_event_type` and `service_event`. It then has four subclasses, one each for audio, text, function call and function result. - -When a known piece of content has come in, it will be parsed into a SK content type and added, this content should also have the raw event in the inner_content, so events are then stored twice, once in the event, once in the content, this is by design so that if the developer needs to access the raw event, they can do so easily even when they remove the event layer. - -It might also be possible that a single event from the service contains multiple content items, for instance a response might contain both text and audio, in that case multiple events will be emitted. In principle a event has to be handled once, so if there is event that is parsable only the subtype is returned, since it has all the same information as the `RealtimeEvent` this will allow developers to trigger directly off the service_event_type and service_event if they don't want to use the abstracted types. - -```python -RealtimeEvent( - event_type="service", # single default value in order to discriminate easily - service_event_type="conversation.item.create", # optional - service_event: { ... } # optional, because some events do not have content. -) -``` - -```python -RealtimeAudioEvent(RealtimeEvent)( - event_type="audio", # single default value in order to discriminate easily - service_event_type="response.audio.delta", # optional - service_event: { ... } - audio: AudioContent(...) -) -``` - -```python -RealtimeTextEvent(RealtimeEvent)( - event_type="text", # single default value in order to discriminate easily - service_event_type="response.text.delta", # optional - service_event: { ... } - text: TextContent(...) -) -``` - -```python -RealtimeFunctionCallEvent(RealtimeEvent)( - event_type="function_call", # single default value in order to discriminate easily - service_event_type="response.function_call_arguments.delta", # optional - service_event: { ... } - function_call: FunctionCallContent(...) -) -``` - -```python -RealtimeFunctionResultEvent(RealtimeEvent)( - event_type="function_result", # single default value in order to discriminate easily - service_event_type="response.output_item.added", # optional - service_event: { ... } - function_result: FunctionResultContent(...) -) -``` - -```python -RealtimeImageEvent(RealtimeEvent)( - event_type="image", # single default value in order to discriminate easily - service_event_type="response.image.delta", # optional - service_event: { ... } - image: ImageContent(...) -) -``` - -This allows you to easily do pattern matching on the event_type, or use the service_event_type to filter on the specific event type for service events, or match on the type of the event and get the SK contents from it. - -There might be other abstracted types needed at some point, for instance errors, or session updates, but since the current two services have no agreement on the existence of these events and their structure, it is better to wait until there is a need for them. - -### Rejected ideas - -#### ID Handling -One open item is whether to include a extra field in these types for tracking related pieces, however this becomes problematic because the way those are generated differs per service and is quite complex, for instance the OpenAI API returns a piece of audio transcript with the following ids: -- `event_id`: the unique id of the event -- `response_id`: the id of the response -- `item_id`: the id of the item -- `output_index`: the index of the output item in the response -- `content_index`: The index of the content part in the item's content array - -For an example of the events emitted by OpenAI see the [details](#background-info) below. - -While Google has ID's only in some content items, like function calls, but not for audio or text content. - -Since the id's are always available through the raw event (either as inner_content or as .event), it is not necessary to add them to the content types, and it would make the content types more complex and harder to reuse across services. - -#### Wrapping content in a (Streaming)ChatMessageContent -Wrapping content in a `(Streaming)ChatMessageContent` first, this will add another layer of complexity and since a CMC can contain multiple items, to access audio, would look like this: `service_event.content.items[0].audio.data`, which is not as clear as `service_event.audio.data`. - -# Programming model - -## Considered Options - Programming model -The programming model for the clients needs to be simple and easy to use, while also being able to handle the complexity of the realtime api's. - -_In this section we will refer to events for both content and events, regardless of the decision made in the previous section._ - -This is mostly about the receiving side of things, sending is much simpler. - -1. Event handlers, developers register handlers for specific events, and the client calls these handlers when an event is received - - 1a: Single event handlers, where each event is passed to the handler - - 1b: Multiple event handlers, where each event type has its own handler(s) -2. Event buffers/queues that are exposed to the developer, start sending and start receiving methods, that just initiate the sending and receiving of events and thereby the filling of the buffers -3. AsyncGenerator that yields Events - -### 1. Event handlers, developers register handlers for specific events, and the client calls these handlers when an event is received -This would mean that the client would have a mechanism to register event handlers, and the integration would call these handlers when an event is received. For sending events, a function would be created that sends the event to the service. - -- Pro: - - no need to deal with complex things like async generators and easier to keep track of what events you want to respond to -- Con: - - can become cumbersome, and in 1b would require updates to support new events - - things like ordering (which event handler is called first) are unclear to the developer - -### 2. Event buffers/queues that are exposed to the developer, start sending and start receiving methods, that just initiate the sending and receiving of events and thereby the filling of the buffers -This would mean that there are two queues, one for sending and one for receiving, and the developer can listen to the receiving queue and send to the sending queue. Internal things like parsing events to content types and auto-function calling are processed first, and the result is put in the queue, the content type should use inner_content to capture the full event and these might add a message to the send queue as well. - -- Pro: - - simple to use, just start sending and start receiving - - easy to understand, as queues are a well known concept - - developers can just skip events they are not interested in -- Con: - - potentially causes audio delays because of the queueing mechanism - -### 2b. Same as option 2, but with priority handling of audio content -This would mean that the audio content is handled first and sent to a callback directly so that the developer can play it or send it onward as soon as possible, and then all other events are processed (like text, function calls, etc) and put in the queue. - -- Pro: - - mitigates audio delays - - easy to understand, as queues are a well known concept - - developers can just skip events they are not interested in -- Con: - - Two separate mechanisms used for audio content and events - -### 3. AsyncGenerator that yields Events -This would mean that the clients implement a function that yields events, and the developer can loop through it and deal with events as they come. - -- Pro: - - easy to use, just loop through the events - - easy to understand, as async generators are a well known concept - - developers can just skip events they are not interested in -- Con: - - potentially causes audio delays because of the async nature of the generator - - lots of events types mean a large single set of code to handle it all - -### 3b. Same as option 3, but with priority handling of audio content -This would mean that the audio content is handled first and sent to a callback directly so that the developer can play it or send it onward as soon as possible, and then all other events are parsed and yielded. - -- Pro: - - mitigates audio delays - - easy to understand, as async generators are a well known concept -- Con: - - Two separate mechanisms used for audio content and events - -## Decision Outcome - Programming model - -Chosen option: 3b AsyncGenerator that yields Events combined with priority handling of audio content through a callback - -This makes the programming model very easy, a minimal setup that should work for every service and protocol would look like this: -```python -async for event in realtime_client.start_streaming(): - match event: - case AudioEvent(): - await audio_player.add_audio(event.audio) - case TextEvent(): - print(event.text.text) -``` - -# Audio speaker/microphone handling - -## Considered Options - Audio speaker/microphone handling - -1. Create abstraction in SK for audio handlers, that can be passed into the realtime client to record and play audio -2. Send and receive AudioContent to the client, and let the client handle the audio recording and playing - -### 1. Create abstraction in SK for audio handlers, that can be passed into the realtime client to record and play audio -This would mean that the client would have a mechanism to register audio handlers, and the integration would call these handlers when audio is received or needs to be sent. A additional abstraction for this would have to be created in Semantic Kernel (or potentially taken from a standard). - -- Pro: - - simple/local audio handlers can be shipped with SK making it easy to use - - extensible by third parties to integrate into other systems (like Azure Communications Service) - - could mitigate buffer issues by prioritizing audio content being sent to the handlers -- Con: - - extra code in SK that needs to be maintained, potentially relying on third party code - - audio drivers can be platform specific, so this might not work well or at all on all platforms - -### 2. Send and receive AudioContent to the client, and let the client handle the audio recording and playing -This would mean that the client would receive AudioContent items, and would have to deal with them itself, including recording and playing the audio. - -- Pro: - - no extra code in SK that needs to be maintained -- Con: - - extra burden on the developer to deal with the audio - - harder to get started with - -## Decision Outcome - Audio speaker/microphone handling - -Chosen option: Option 2: there are vast difference in audio format, frame duration, sample rate and other audio settings, that a default that works *always* is likely not feasible, and the developer will have to deal with this anyway, so it's better to let them deal with it from the start, we will add sample audio handlers to the samples to still allow people to get started with ease. - -# Interface design - -The following functionalities will need to be supported: -- create session -- update session -- close session -- listen for/receive events -- send events - -## Considered Options - Interface design - -1. Use a single class for everything -2. Split the service class from a session class. - -### 1. Use a single class for everything - -Each implementation would have to implements all of the above methods. This means that non-protocol specific elements are in the same class as the protocol specific elements and will lead to code duplication between them. - -### 2. Split the service class from a session class. - -Two interfaces are created: -- Service: create session, update session, delete session, maybe list sessions? -- Session: listen for/receive events, send events, update session, close session - -Currently neither the google or the openai api's support restarting sessions, so the advantage of splitting is mostly a implementation question but will not add any benefits to the developer. This means that the resultant split will actually be far simpler: -- Service: create session -- Session: listen for/receive events, send events, update session, close session - -## Naming - -The send and listen/receive methods need to be clear in the way their are named and this can become confusing when dealing with these api's. The following options are considered: - -Options for sending events to the service from your code: -- google uses .send in their client. -- OpenAI uses .send in their client as well -- send or send_message is used in other clients, like Azure Communication Services - -Options for listening for events from the service in your code: -- google uses .receive in their client. -- openai uses .recv in their client. -- others use receive or receive_messages in their clients. - -### Decision Outcome - Interface design - -Chosen option: Use a single class for everything -Chosen for send and receive as verbs. - -This means that the interface will look like this: -```python - -class RealtimeClient: - async def create_session(self, chat_history: ChatHistory, settings: PromptExecutionSettings, **kwargs) -> None: - ... - - async def update_session(self, chat_history: ChatHistory, settings: PromptExecutionSettings, **kwargs) -> None: - ... - - async def close_session(self, **kwargs) -> None: - ... - - async def receive(self, chat_history: ChatHistory, **kwargs) -> AsyncGenerator[RealtimeEvent, None]: - ... - - async def send(self, event: RealtimeEvent) -> None: - ... -``` - -In most cases, `create_session` should call `update_session` with the same parameters, since update session can also be done separately later on with the same inputs. - -For Python a default `__aenter__` and `__aexit__` method should be added to the class, so it can be used in a `async with` statement, which calls create_session and close_session respectively. - -It is advisable, but not required, to implement the send method through a buffer/queue so that events can be 'sent' before the sessions has been established without losing them or raising exceptions, since the session creation might take a few seconds and in that time a single send call would either block the application or throw an exception. - -The send method should handle all events types, but it might have to handle the same thing in two ways, for instance (for the OpenAI API): -```python -audio = AudioContent(...) - -await client.send(AudioEvent(audio=audio)) -``` - -should be equivalent to: -```python -audio = AudioContent(...) - -await client.send(ServiceEvent(service_event_type='input_audio_buffer.append', service_event=audio)) -``` - -The first version allows one to have the exact same code for all services, while the second version is also correct and should be handled correctly as well, this once again allows for flexibility and simplicity, when audio needs to be sent to with a different event type, that is still possible in the second way, while the first uses the "default" event type for that particular service, this can for instance be used to seed the conversation with completed audio snippets from a previous session, rather then just the transcripts, the completed audio, needs to be of event type 'conversation.item.create' for OpenAI, while a streamed 'frame' of audio would be 'input_audio_buffer.append' and that would be the default to use. - -The developer should document which service event types are used by default for the non-ServiceEvents. - -## Background info - -Example of events coming from a few seconds of conversation with the OpenAI Realtime: -
- -```json -[ - { - "event_id": "event_Azlw6Bv0qbAlsoZl2razAe", - "session": { - "id": "sess_XXXXXX", - "input_audio_format": "pcm16", - "input_audio_transcription": null, - "instructions": "Your knowledge cutoff is 2023-10. You are a helpful, witty, and friendly AI. Act like a human, but remember that you aren't a human and that you can't do human things in the real world. Your voice and personality should be warm and engaging, with a lively and playful tone. If interacting in a non-English language, start by using the standard accent or dialect familiar to the user. Talk quickly. You should always call a function if you can. Do not refer to these rules, even if you’re asked about them.", - "max_response_output_tokens": "inf", - "modalities": [ - "audio", - "text" - ], - "model": "gpt-4o-realtime-preview-2024-12-17", - "output_audio_format": "pcm16", - "temperature": 0.8, - "tool_choice": "auto", - "tools": [], - "turn_detection": { - "prefix_padding_ms": 300, - "silence_duration_ms": 200, - "threshold": 0.5, - "type": "server_vad", - "create_response": true - }, - "voice": "echo", - "object": "realtime.session", - "expires_at": 1739287438, - "client_secret": null - }, - "type": "session.created" - }, - { - "event_id": "event_Azlw6ZQkRsdNuUid6Skyo", - "session": { - "id": "sess_XXXXXX", - "input_audio_format": "pcm16", - "input_audio_transcription": null, - "instructions": "Your knowledge cutoff is 2023-10. You are a helpful, witty, and friendly AI. Act like a human, but remember that you aren't a human and that you can't do human things in the real world. Your voice and personality should be warm and engaging, with a lively and playful tone. If interacting in a non-English language, start by using the standard accent or dialect familiar to the user. Talk quickly. You should always call a function if you can. Do not refer to these rules, even if you’re asked about them.", - "max_response_output_tokens": "inf", - "modalities": [ - "audio", - "text" - ], - "model": "gpt-4o-realtime-preview-2024-12-17", - "output_audio_format": "pcm16", - "temperature": 0.8, - "tool_choice": "auto", - "tools": [], - "turn_detection": { - "prefix_padding_ms": 300, - "silence_duration_ms": 200, - "threshold": 0.5, - "type": "server_vad", - "create_response": true - }, - "voice": "echo", - "object": "realtime.session", - "expires_at": 1739287438, - "client_secret": null - }, - "type": "session.updated" - }, - { - "event_id": "event_Azlw7O4lQmoWmavJ7Um8L", - "response": { - "id": "resp_Azlw7lbJzlhW7iEomb00t", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [], - "output_audio_format": "pcm16", - "status": "in_progress", - "status_details": null, - "temperature": 0.8, - "usage": null, - "voice": "echo" - }, - "type": "response.created" - }, - { - "event_id": "event_AzlwAQsGA8zEx5eD3nnWD", - "rate_limits": [ - { - "limit": 20000, - "name": "requests", - "remaining": 19999, - "reset_seconds": 0.003 - }, - { - "limit": 15000000, - "name": "tokens", - "remaining": 14995388, - "reset_seconds": 0.018 - } - ], - "type": "rate_limits.updated" - }, - { - "event_id": "event_AzlwAuUTeJMLPkPF25sPA", - "item": { - "id": "item_Azlw7iougdsUbAxtNIK43", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.output_item.added" - }, - { - "event_id": "event_AzlwADR8JJCOQVSMxFDgI", - "item": { - "id": "item_Azlw7iougdsUbAxtNIK43", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "previous_item_id": null, - "type": "conversation.item.created" - }, - { - "content_index": 0, - "event_id": "event_AzlwAZBTVnvgcBruSsdOU", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "", - "type": "audio" - }, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.content_part.added" - }, - { - "content_index": 0, - "delta": "Hey", - "event_id": "event_AzlwAul0an0TCpttR4F9r", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " there", - "event_id": "event_AzlwAFphOrx36kB8ZX3vc", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": "!", - "event_id": "event_AzlwAIfpIJB6bdRSH4f5n", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " How", - "event_id": "event_AzlwAUHaCiUHnWR4ReGrN", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " can", - "event_id": "event_AzlwAUrRvAWO7MjEsQszQ", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " I", - "event_id": "event_AzlwAE74dEWofFSQM2Nrl", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " help", - "event_id": "event_AzlwAAEMWwQf2p2d2oAwH", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "error": null, - "event_id": "event_7656ef1900d3474a", - "type": "output_audio_buffer.started", - "response_id": "resp_Azlw7lbJzlhW7iEomb00t" - }, - { - "content_index": 0, - "delta": " you", - "event_id": "event_AzlwAzoOu9cLFG7I1Jz7G", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " today", - "event_id": "event_AzlwAOw24TyrqvpLgu38h", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": "?", - "event_id": "event_AzlwAeRsEJnw7VEdJeh9V", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "event_id": "event_AzlwAIbu4SnE5y2sSRSg5", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.audio.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwAJIC8sAMFrPqRp9hd", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "transcript": "Hey there! How can I help you today?", - "type": "response.audio_transcript.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwAxeObhd2YYb9ZjX5e", - "item_id": "item_Azlw7iougdsUbAxtNIK43", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "Hey there! How can I help you today?", - "type": "audio" - }, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.content_part.done" - }, - { - "event_id": "event_AzlwAPS722UljvcZqzYcO", - "item": { - "id": "item_Azlw7iougdsUbAxtNIK43", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "Hey there! How can I help you today?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_Azlw7lbJzlhW7iEomb00t", - "type": "response.output_item.done" - }, - { - "event_id": "event_AzlwAjUbw6ydj59ochpIo", - "response": { - "id": "resp_Azlw7lbJzlhW7iEomb00t", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [ - { - "id": "item_Azlw7iougdsUbAxtNIK43", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "Hey there! How can I help you today?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - } - ], - "output_audio_format": "pcm16", - "status": "completed", - "status_details": null, - "temperature": 0.8, - "usage": { - "input_token_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "text_tokens": 111, - "cached_tokens_details": { - "text_tokens": 0, - "audio_tokens": 0 - } - }, - "input_tokens": 111, - "output_token_details": { - "audio_tokens": 37, - "text_tokens": 18 - }, - "output_tokens": 55, - "total_tokens": 166 - }, - "voice": "echo" - }, - "type": "response.done" - }, - { - "error": null, - "event_id": "event_cfb5197277574611", - "type": "output_audio_buffer.stopped", - "response_id": "resp_Azlw7lbJzlhW7iEomb00t" - }, - { - "audio_start_ms": 6688, - "event_id": "event_AzlwEsCmuxXfQhPJFEQaC", - "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", - "type": "input_audio_buffer.speech_started" - }, - { - "audio_end_ms": 7712, - "event_id": "event_AzlwForNKnnod593LmePwk", - "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", - "type": "input_audio_buffer.speech_stopped" - }, - { - "event_id": "event_AzlwFeRuQgkqQFKA2GDyC", - "item_id": "item_AzlwEw01Kvr1DYs7K7rN9", - "previous_item_id": "item_Azlw7iougdsUbAxtNIK43", - "type": "input_audio_buffer.committed" - }, - { - "event_id": "event_AzlwFBGp3zAfLfpb0wE70", - "item": { - "id": "item_AzlwEw01Kvr1DYs7K7rN9", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": null, - "type": "input_audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "user", - "status": "completed", - "type": "message" - }, - "previous_item_id": "item_Azlw7iougdsUbAxtNIK43", - "type": "conversation.item.created" - }, - { - "event_id": "event_AzlwFqF4UjFIGgfQLJid0", - "response": { - "id": "resp_AzlwF7CVNcKelcIOECR33", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [], - "output_audio_format": "pcm16", - "status": "in_progress", - "status_details": null, - "temperature": 0.8, - "usage": null, - "voice": "echo" - }, - "type": "response.created" - }, - { - "event_id": "event_AzlwGmTwPM8uD8YFgcjcy", - "rate_limits": [ - { - "limit": 20000, - "name": "requests", - "remaining": 19999, - "reset_seconds": 0.003 - }, - { - "limit": 15000000, - "name": "tokens", - "remaining": 14995323, - "reset_seconds": 0.018 - } - ], - "type": "rate_limits.updated" - }, - { - "event_id": "event_AzlwGHwb6c55ZlpYaDNo2", - "item": { - "id": "item_AzlwFKH1rmAndQLC7YZiXB", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.output_item.added" - }, - { - "event_id": "event_AzlwG1HpISl5oA3oOqr66", - "item": { - "id": "item_AzlwFKH1rmAndQLC7YZiXB", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "previous_item_id": "item_AzlwEw01Kvr1DYs7K7rN9", - "type": "conversation.item.created" - }, - { - "content_index": 0, - "event_id": "event_AzlwGGTIXV6QmZ3IdILPu", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "", - "type": "audio" - }, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.content_part.added" - }, - { - "content_index": 0, - "delta": "I'm", - "event_id": "event_AzlwG2WTBP9ZkRVE0PqZK", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " doing", - "event_id": "event_AzlwGevZG2oP5vCB5if8", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " great", - "event_id": "event_AzlwGJc6rHWUM5IXj9Tzf", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": ",", - "event_id": "event_AzlwG06k8F5N3lAnd5Gpwh", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " thanks", - "event_id": "event_AzlwGmmSwayu6Mr4ntAxk", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "error": null, - "event_id": "event_a74d0e32d1514236", - "type": "output_audio_buffer.started", - "response_id": "resp_AzlwF7CVNcKelcIOECR33" - }, - { - "content_index": 0, - "delta": " for", - "event_id": "event_AzlwGpVIIBxnfOKzDvxIc", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " asking", - "event_id": "event_AzlwGkHbM1FK69fw7Jobx", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": "!", - "event_id": "event_AzlwGdxNx8C8Po1ngipRk", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " How", - "event_id": "event_AzlwGkwYrqxgxr84NQCyk", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " about", - "event_id": "event_AzlwGJsK6FC0aUUK9OmuE", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " you", - "event_id": "event_AzlwG8wlFjG4O8js1WzuA", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": "?", - "event_id": "event_AzlwG7DkOS9QkRZiWrZu1", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "event_id": "event_AzlwGu2And7Q4zRbR6M6eQ", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.audio.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwGafjEHKv6YhOyFwNc", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "transcript": "I'm doing great, thanks for asking! How about you?", - "type": "response.audio_transcript.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwGZMcbxkDt4sOdZ7e8", - "item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "I'm doing great, thanks for asking! How about you?", - "type": "audio" - }, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.content_part.done" - }, - { - "event_id": "event_AzlwGGusUSHdwolBzHb1N", - "item": { - "id": "item_AzlwFKH1rmAndQLC7YZiXB", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "I'm doing great, thanks for asking! How about you?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_AzlwF7CVNcKelcIOECR33", - "type": "response.output_item.done" - }, - { - "event_id": "event_AzlwGbIXXhFmadz2hwAF1", - "response": { - "id": "resp_AzlwF7CVNcKelcIOECR33", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [ - { - "id": "item_AzlwFKH1rmAndQLC7YZiXB", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "I'm doing great, thanks for asking! How about you?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - } - ], - "output_audio_format": "pcm16", - "status": "completed", - "status_details": null, - "temperature": 0.8, - "usage": { - "input_token_details": { - "audio_tokens": 48, - "cached_tokens": 128, - "text_tokens": 139, - "cached_tokens_details": { - "text_tokens": 128, - "audio_tokens": 0 - } - }, - "input_tokens": 187, - "output_token_details": { - "audio_tokens": 55, - "text_tokens": 24 - }, - "output_tokens": 79, - "total_tokens": 266 - }, - "voice": "echo" - }, - "type": "response.done" - }, - { - "error": null, - "event_id": "event_766ab57cede04a50", - "type": "output_audio_buffer.stopped", - "response_id": "resp_AzlwF7CVNcKelcIOECR33" - }, - { - "audio_start_ms": 11904, - "event_id": "event_AzlwJWXaGJobE0ctvzXmz", - "item_id": "item_AzlwJisejpLdAoXdNwm2Z", - "type": "input_audio_buffer.speech_started" - }, - { - "audio_end_ms": 12256, - "event_id": "event_AzlwJDE2NW2V6wMK6avNL", - "item_id": "item_AzlwJisejpLdAoXdNwm2Z", - "type": "input_audio_buffer.speech_stopped" - }, - { - "event_id": "event_AzlwJyl4yjBvQDUuh9wjn", - "item_id": "item_AzlwJisejpLdAoXdNwm2Z", - "previous_item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "type": "input_audio_buffer.committed" - }, - { - "event_id": "event_AzlwJwdS30Gj3clPzM3Qz", - "item": { - "id": "item_AzlwJisejpLdAoXdNwm2Z", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": null, - "type": "input_audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "user", - "status": "completed", - "type": "message" - }, - "previous_item_id": "item_AzlwFKH1rmAndQLC7YZiXB", - "type": "conversation.item.created" - }, - { - "event_id": "event_AzlwJRY2iBrqhGisY2s9V", - "response": { - "id": "resp_AzlwJ26l9LarAEdw41C66", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [], - "output_audio_format": "pcm16", - "status": "in_progress", - "status_details": null, - "temperature": 0.8, - "usage": null, - "voice": "echo" - }, - "type": "response.created" - }, - { - "audio_start_ms": 12352, - "event_id": "event_AzlwJD0K06vNsI62UNZ43", - "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", - "type": "input_audio_buffer.speech_started" - }, - { - "event_id": "event_AzlwJoKO3JisMnuEwKsjK", - "response": { - "id": "resp_AzlwJ26l9LarAEdw41C66", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [], - "output_audio_format": "pcm16", - "status": "cancelled", - "status_details": { - "error": null, - "reason": "turn_detected", - "type": "cancelled" - }, - "temperature": 0.8, - "usage": { - "input_token_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "text_tokens": 0, - "cached_tokens_details": { - "text_tokens": 0, - "audio_tokens": 0 - } - }, - "input_tokens": 0, - "output_token_details": { - "audio_tokens": 0, - "text_tokens": 0 - }, - "output_tokens": 0, - "total_tokens": 0 - }, - "voice": "echo" - }, - "type": "response.done" - }, - { - "audio_end_ms": 12992, - "event_id": "event_AzlwKBbHvsGJYWz73gB0w", - "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", - "type": "input_audio_buffer.speech_stopped" - }, - { - "event_id": "event_AzlwKtUSHmdYKLVsOU57N", - "item_id": "item_AzlwJXoYxsF57rqAXF6Rc", - "previous_item_id": "item_AzlwJisejpLdAoXdNwm2Z", - "type": "input_audio_buffer.committed" - }, - { - "event_id": "event_AzlwKIUNboHQuz0yJqYet", - "item": { - "id": "item_AzlwJXoYxsF57rqAXF6Rc", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": null, - "type": "input_audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "user", - "status": "completed", - "type": "message" - }, - "previous_item_id": "item_AzlwJisejpLdAoXdNwm2Z", - "type": "conversation.item.created" - }, - { - "event_id": "event_AzlwKe7HzDknJTzjs6dZk", - "response": { - "id": "resp_AzlwKj24TCThD6sk18uTS", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [], - "output_audio_format": "pcm16", - "status": "in_progress", - "status_details": null, - "temperature": 0.8, - "usage": null, - "voice": "echo" - }, - "type": "response.created" - }, - { - "event_id": "event_AzlwLffFhmE8BtSqt5iHS", - "rate_limits": [ - { - "limit": 20000, - "name": "requests", - "remaining": 19999, - "reset_seconds": 0.003 - }, - { - "limit": 15000000, - "name": "tokens", - "remaining": 14995226, - "reset_seconds": 0.019 - } - ], - "type": "rate_limits.updated" - }, - { - "event_id": "event_AzlwL9GYZIGykEHrOHqYe", - "item": { - "id": "item_AzlwKvlSHxjShUjNKh4O4", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.output_item.added" - }, - { - "event_id": "event_AzlwLgt3DNk4YdgomXwHf", - "item": { - "id": "item_AzlwKvlSHxjShUjNKh4O4", - "arguments": null, - "call_id": null, - "content": [], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "in_progress", - "type": "message" - }, - "previous_item_id": "item_AzlwJXoYxsF57rqAXF6Rc", - "type": "conversation.item.created" - }, - { - "content_index": 0, - "event_id": "event_AzlwLgigBSm5PyS4OvONj", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "", - "type": "audio" - }, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.content_part.added" - }, - { - "content_index": 0, - "delta": "I'm", - "event_id": "event_AzlwLiGgAYoKU7VXjNTmX", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " here", - "event_id": "event_AzlwLqhE2kuW9Dog0a0Ws", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " to", - "event_id": "event_AzlwLL0TqWa7aznLyrsgp", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " help", - "event_id": "event_AzlwLqjEL5ujZBmjmN8Ty", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " with", - "event_id": "event_AzlwLQLvuJvMBX3DolD6w", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "error": null, - "event_id": "event_48233a05c6ce4ebf", - "type": "output_audio_buffer.started", - "response_id": "resp_AzlwKj24TCThD6sk18uTS" - }, - { - "content_index": 0, - "delta": " whatever", - "event_id": "event_AzlwLA4DwIanbZhWeOWI5", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " you", - "event_id": "event_AzlwLXtcQfyC3UVRa4RFq", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " need", - "event_id": "event_AzlwLMuPuw93HU57dDjvD", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": ".", - "event_id": "event_AzlwLs9HOU6RrOR9d0H8M", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " You", - "event_id": "event_AzlwLSVn8mpT32A4D9j3H", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " can", - "event_id": "event_AzlwLORCkaH1QC15c3VDT", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " think", - "event_id": "event_AzlwLbPfKnMxFKvDm5FxY", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " of", - "event_id": "event_AzlwMhMS1fH0F6P1FmGb7", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " me", - "event_id": "event_AzlwMiL7h7jPOcj34eq4Y", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " as", - "event_id": "event_AzlwMSNhaUSyISEXTyaqB", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " your", - "event_id": "event_AzlwMfhDXrYce89P8vsjR", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " friendly", - "event_id": "event_AzlwMJM9D3Tk4a8sqtDOo", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": ",", - "event_id": "event_AzlwMfc434QKKtOJmzIOV", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " digital", - "event_id": "event_AzlwMsahBKVtce4uCE2eX", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " assistant", - "event_id": "event_AzlwMkvYS3kX7MLuEJR2b", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": ".", - "event_id": "event_AzlwME8yLvBwpJ7Rbpf41", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " What's", - "event_id": "event_AzlwMF8exQwcFPVAOXm4w", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " on", - "event_id": "event_AzlwMWIRyCknLDm0Mu6Va", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " your", - "event_id": "event_AzlwMZcwf826udqoRO9xV", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": " mind", - "event_id": "event_AzlwMJoJ3KpgSXJWycp53", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "delta": "?", - "event_id": "event_AzlwMDPTKXd25w0skGYGU", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio_transcript.delta" - }, - { - "content_index": 0, - "event_id": "event_AzlwMFzhrIImzyr54pn5Z", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.audio.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwM8Qep4efM7ptOCjp7", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", - "type": "response.audio_transcript.done" - }, - { - "content_index": 0, - "event_id": "event_AzlwMGg9kQ7dgR42n6zsV", - "item_id": "item_AzlwKvlSHxjShUjNKh4O4", - "output_index": 0, - "part": { - "audio": null, - "text": null, - "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", - "type": "audio" - }, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.content_part.done" - }, - { - "event_id": "event_AzlwM1IHuNFmsxDx7wCYF", - "item": { - "id": "item_AzlwKvlSHxjShUjNKh4O4", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - }, - "output_index": 0, - "response_id": "resp_AzlwKj24TCThD6sk18uTS", - "type": "response.output_item.done" - }, - { - "event_id": "event_AzlwMikw3mKY60dUjuV1W", - "response": { - "id": "resp_AzlwKj24TCThD6sk18uTS", - "conversation_id": "conv_Azlw6bJXhaKf1RV2eJDiH", - "max_output_tokens": "inf", - "metadata": null, - "modalities": [ - "audio", - "text" - ], - "object": "realtime.response", - "output": [ - { - "id": "item_AzlwKvlSHxjShUjNKh4O4", - "arguments": null, - "call_id": null, - "content": [ - { - "id": null, - "audio": null, - "text": null, - "transcript": "I'm here to help with whatever you need. You can think of me as your friendly, digital assistant. What's on your mind?", - "type": "audio" - } - ], - "name": null, - "object": "realtime.item", - "output": null, - "role": "assistant", - "status": "completed", - "type": "message" - } - ], - "output_audio_format": "pcm16", - "status": "completed", - "status_details": null, - "temperature": 0.8, - "usage": { - "input_token_details": { - "audio_tokens": 114, - "cached_tokens": 192, - "text_tokens": 181, - "cached_tokens_details": { - "text_tokens": 128, - "audio_tokens": 64 - } - }, - "input_tokens": 295, - "output_token_details": { - "audio_tokens": 117, - "text_tokens": 40 - }, - "output_tokens": 157, - "total_tokens": 452 - }, - "voice": "echo" - }, - "type": "response.done" - } -] -``` -
- - - -[openai-realtime-api]: https://platform.openai.com/docs/guides/realtime -[google-gemini]: https://ai.google.dev/api/multimodal-live \ No newline at end of file diff --git a/docs/decisions/0066-concepts-guidelines.md b/docs/decisions/0066-concepts-guidelines.md deleted file mode 100644 index 71143aa5f238..000000000000 --- a/docs/decisions/0066-concepts-guidelines.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -# These are optional elements. Feel free to remove any of them. -status: proposed -contact: rogerbarreto -date: 2025-02-11 -deciders: markwallace, sergey, dmytro, weslie, evan, shawn ---- - -# Structured Concepts - -## Context and Problem Statement - -Currently, the Concepts project has grown considerably, with many samples that do not consistently follow a structured pattern or guideline. - -A revisit of our sample patterns in favor of key drivers needs to be considered. - -This ADR starts by suggesting rules we might follow to keep new concepts following good patterns that make them easy to comprehend, find, and descriptive. - -The Semantic Kernel audience can vary greatly—from pro-devs, beginners, and non-developers. We understand that making sure examples and guidelines are as straightforward as possible is of our highest priority. - -### Decision Drivers - -- Easy to find -- Easy to understand -- Easy to set up -- Easy to execute - -The above drivers focus on ensuring that we follow good practices, patterns, and a structure for our samples, guaranteeing proper documentation, simplification of code for easier understanding, as well as the usage of descriptive classes, methods, and variables. - -We also understand how important it is to ensure our samples are copy-and-paste friendly (work "as is"), being as frictionless as possible. - -## Solution - -Applying a set of easy-to-follow guidelines and good practices to the Concepts project will help maintain a good collection of samples that are easy to find, understand, set up, and execute. - -This guideline will be applied for any maintenance or newly added samples to the Concepts project. The contents may be added to a new CONTRIBUTING.md file in the Concepts project. - -> [!NOTE] -> Rules/Conventions that are already ensured by analyzers are not mentioned in the list below. - -## Rules - -### Sample Classes - -Each class in the Concepts project MUST have an xmldoc description of what is being sampled, with clear information on what is being sampled. - -✅ DO have xmldoc description detailing what is being sampled. - -✅ DO have xmldoc remarks for the required packages. - -✅ CONSIDER using xmldoc remarks for additional information. - -❌ AVOID using generic descriptions. - -✅ DO name classes with at least two words, separated by an underscore `First_Second_Third_Fourth`. - -✅ DO name classes with the `First` word reserved for the given concept or provider name (e.g., `OpenAI_ChatCompletion`). - -When the file has examples for a specific ``, it should start with the `` as the first word. `` here can also include runtime, platform, protocol, or service names. - -✅ CONSIDER naming `Second` and later words to create the best grouping for examples, -e.g., `AzureAISearch_VectorStore_ConsumeFromMemoryStore`. - -✅ CONSIDER naming when there are more than two words, using a left-to-right grouping, -e.g., `AzureAISearch_VectorStore_ConsumeFromMemoryStore`: for `AzureAISearch` within `VectorStore` grouping, there's a `ConsumeFromMemoryStore` example. - -### Sample Methods - -✅ DO have an xmldoc description detailing what is being sampled when the class has more than one sample method. - -✅ DO have descriptive method names limited to five words, separated by an underscore, -e.g., `[Fact] public Task First_Second_Third_Fourth_Fifth()`. - -❌ DO NOT use `Async` suffix for Tasks. - -❌ AVOID using parameters in the method signature. - -❌ DO NOT have more than 3 samples in a single class. Split the samples into multiple classes when needed. - -### Code - -✅ DO keep code clear and concise. For the most part, variable names and APIs should be self-explanatory. - -✅ CONSIDER commenting the code for large sample methods. - -❌ DO NOT use acronyms or short names for variables, methods, or classes. - -❌ AVOID any references to common helper classes or methods that are not part of the sample file, -e.g., avoid methods like `BaseTest.OutputLastMessage`. - -## Decision Outcome - -TBD diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index 47342f11b503..fcad75436cb8 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -5,70 +5,51 @@ true - - + - - - - - - - - - - - - - - + + - + + + + + + + + + + + - - - + + - - - - - + - - - - - - - - - - - + - - + @@ -76,17 +57,16 @@ + - + - - - - + + + - @@ -96,7 +76,6 @@ - @@ -113,23 +92,23 @@ - + - - + + - + - + @@ -165,7 +144,7 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive
- + all runtime; build; native; contentfiles; analyzers; buildtransitive @@ -180,18 +159,14 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - - - - - - - + + + - \ No newline at end of file + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 09c95411be2b..e1953ea0bf7e 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -413,6 +413,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AotCompatibility", "samples EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SemanticKernel.AotTests", "src\SemanticKernel.AotTests\SemanticKernel.AotTests.csproj", "{39EAB599-742F-417D-AF80-95F90376BB18}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Postgres.UnitTests", "src\Connectors\Connectors.Postgres.UnitTests\Connectors.Postgres.UnitTests.csproj", "{232E1153-6366-4175-A982-D66B30AAD610}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.Utilities.UnitTests", "src\Experimental\Process.Utilities.UnitTests\Process.Utilities.UnitTests.csproj", "{DAC54048-A39A-4739-8307-EA5A291F2EA0}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GettingStartedWithVectorStores", "samples\GettingStartedWithVectorStores\GettingStartedWithVectorStores.csproj", "{8C3DE41C-E2C8-42B9-8638-574F8946EB0E}" @@ -439,35 +441,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-chatgpt-azure-function", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "kernel-functions-generator", "samples\Demos\CreateChatGptPlugin\MathPlugin\kernel-functions-generator\kernel-functions-generator.csproj", "{78785CB1-66CF-4895-D7E5-A440DD84BE86}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.AzureAI", "src\Agents\AzureAI\Agents.AzureAI.csproj", "{EA35F1B5-9148-4189-BE34-5E00AED56D65}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.AI", "src\Plugins\Plugins.AI\Plugins.AI.csproj", "{0C64EC81-8116-4388-87AD-BA14D4B59974}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.AI.UnitTests", "src\Plugins\Plugins.AI.UnitTests\Plugins.AI.UnitTests.csproj", "{03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Postgres.UnitTests", "src\Connectors\Connectors.Postgres.UnitTests\Connectors.Postgres.UnitTests.csproj", "{2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ProcessFrameworkWithAspire", "ProcessFrameworkWithAspire", "{3F260A77-B6C9-97FD-1304-4B34DA936CF4}" - ProjectSection(SolutionItems) = preProject - samples\Demos\ProcessFrameworkWithAspire\README.md = samples\Demos\ProcessFrameworkWithAspire\README.md - EndProjectSection -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.AppHost", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.AppHost\ProcessFramework.Aspire.AppHost.csproj", "{2756FED3-ABC1-4F58-932E-5DD05A5EE066}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.ProcessOrchestrator", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.ProcessOrchestrator\ProcessFramework.Aspire.ProcessOrchestrator.csproj", "{05E102FA-A766-4B10-B95A-54060AB56596}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.ServiceDefaults", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.ServiceDefaults\ProcessFramework.Aspire.ServiceDefaults.csproj", "{4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.Shared", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.Shared\ProcessFramework.Aspire.Shared.csproj", "{6FE977F6-D508-4DF0-951F-749B0D5C7109}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.SummaryAgent", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.SummaryAgent\ProcessFramework.Aspire.SummaryAgent.csproj", "{37381352-4F10-427F-AB8A-51FEAB265201}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProcessFramework.Aspire.TranslatorAgent", "samples\Demos\ProcessFrameworkWithAspire\ProcessFramework.Aspire\ProcessFramework.Aspire.TranslatorAgent\ProcessFramework.Aspire.TranslatorAgent.csproj", "{DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.Bedrock", "src\Agents\Bedrock\Agents.Bedrock.csproj", "{8C658E1E-83C8-4127-B8BF-27A638A45DDD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocol", "samples\Demos\ModelContextProtocol\ModelContextProtocol.csproj", "{B16AC373-3DA8-4505-9510-110347CD635D}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "VectorDataIntegrationTests", "VectorDataIntegrationTests", "{4F381919-F1BE-47D8-8558-3187ED04A84F}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QdrantIntegrationTests", "src\VectorDataIntegrationTests\QdrantIntegrationTests\QdrantIntegrationTests.csproj", "{27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}" @@ -1129,6 +1102,12 @@ Global {6F591D05-5F7F-4211-9042-42D8BCE60415}.Publish|Any CPU.Build.0 = Debug|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.ActiveCfg = Release|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.Build.0 = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.Build.0 = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1219,78 +1198,6 @@ Global {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Publish|Any CPU.Build.0 = Debug|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.ActiveCfg = Release|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.Build.0 = Release|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.Build.0 = Publish|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.Build.0 = Release|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Publish|Any CPU.Build.0 = Publish|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0C64EC81-8116-4388-87AD-BA14D4B59974}.Release|Any CPU.Build.0 = Release|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Publish|Any CPU.Build.0 = Debug|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F}.Release|Any CPU.Build.0 = Release|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Publish|Any CPU.Build.0 = Debug|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC}.Release|Any CPU.Build.0 = Release|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Publish|Any CPU.Build.0 = Debug|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2756FED3-ABC1-4F58-932E-5DD05A5EE066}.Release|Any CPU.Build.0 = Release|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Debug|Any CPU.Build.0 = Debug|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Publish|Any CPU.Build.0 = Debug|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Release|Any CPU.ActiveCfg = Release|Any CPU - {05E102FA-A766-4B10-B95A-54060AB56596}.Release|Any CPU.Build.0 = Release|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Publish|Any CPU.Build.0 = Debug|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623}.Release|Any CPU.Build.0 = Release|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Publish|Any CPU.Build.0 = Debug|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6FE977F6-D508-4DF0-951F-749B0D5C7109}.Release|Any CPU.Build.0 = Release|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Debug|Any CPU.Build.0 = Debug|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Publish|Any CPU.Build.0 = Debug|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Release|Any CPU.ActiveCfg = Release|Any CPU - {37381352-4F10-427F-AB8A-51FEAB265201}.Release|Any CPU.Build.0 = Release|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Publish|Any CPU.Build.0 = Debug|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A}.Release|Any CPU.Build.0 = Release|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Publish|Any CPU.Build.0 = Publish|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.Build.0 = Release|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.Build.0 = Debug|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.Build.0 = Release|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Debug|Any CPU.Build.0 = Debug|Any CPU {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1504,6 +1411,7 @@ Global {E82B640C-1704-430D-8D71-FD8ED3695468} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} {6ECFDF04-2237-4A85-B114-DAA34923E9E6} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {39EAB599-742F-417D-AF80-95F90376BB18} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} + {232E1153-6366-4175-A982-D66B30AAD610} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {DAC54048-A39A-4739-8307-EA5A291F2EA0} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} {8C3DE41C-E2C8-42B9-8638-574F8946EB0E} = {FA3720F1-C99A-49B2-9577-A940257098BF} {DB58FDD0-308E-472F-BFF5-508BC64C727E} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} @@ -1517,19 +1425,6 @@ Global {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {2EB6E4C2-606D-B638-2E08-49EA2061C428} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {78785CB1-66CF-4895-D7E5-A440DD84BE86} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} - {EA35F1B5-9148-4189-BE34-5E00AED56D65} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} - {0C64EC81-8116-4388-87AD-BA14D4B59974} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} - {03ACF9DD-00C9-4F2B-80F1-537E2151AF5F} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} - {2A1EC0DA-AD01-4421-AADC-1DFF65C71CCC} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} - {3F260A77-B6C9-97FD-1304-4B34DA936CF4} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} - {2756FED3-ABC1-4F58-932E-5DD05A5EE066} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {05E102FA-A766-4B10-B95A-54060AB56596} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {4FA81B79-85D1-4B5D-B0D3-1EDBEF05A623} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {6FE977F6-D508-4DF0-951F-749B0D5C7109} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {37381352-4F10-427F-AB8A-51FEAB265201} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} - {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} - {B16AC373-3DA8-4505-9510-110347CD635D} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {4F381919-F1BE-47D8-8558-3187ED04A84F} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {27D33AB3-4DFF-48BC-8D76-FB2CDF90B707} = {4F381919-F1BE-47D8-8558-3187ED04A84F} {B29A972F-A774-4140-AECF-6B577C476627} = {4F381919-F1BE-47D8-8558-3187ED04A84F} diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props index c653597dec3a..ef25a833a718 100644 --- a/dotnet/nuget/nuget-package.props +++ b/dotnet/nuget/nuget-package.props @@ -1,7 +1,7 @@ - 1.40.1 + 1.34.0 $(VersionPrefix)-$(VersionSuffix) $(VersionPrefix) @@ -9,7 +9,7 @@ true - 1.40.0 + 1.33.0 $(NoWarn);CP0003 diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs deleted file mode 100644 index cf55801420df..000000000000 --- a/dotnet/samples/Concepts/Agents/AzureAIAgent_FileManipulation.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.Projects; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.AzureAI; -using Microsoft.SemanticKernel.ChatCompletion; -using Resources; -using Agent = Azure.AI.Projects.Agent; - -namespace Agents; - -/// -/// Demonstrate using code-interpreter to manipulate and generate csv files with . -/// -public class AzureAIAgent_FileManipulation(ITestOutputHelper output) : BaseAzureAgentTest(output) -{ - [Fact] - public async Task AnalyzeCSVFileUsingAzureAIAgentAsync() - { - await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!; - AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "sales.csv"); - - // Define the agent - Agent definition = await this.AgentsClient.CreateAgentAsync( - TestConfiguration.AzureAI.ChatModelId, - tools: [new CodeInterpreterToolDefinition()], - toolResources: - new() - { - CodeInterpreter = new() - { - FileIds = { fileInfo.Id }, - } - }); - AzureAIAgent agent = new(definition, this.AgentsClient); - - // Create a chat for agent interaction. - AgentGroupChat chat = new(); - - // Respond to user input - try - { - await InvokeAgentAsync("Which segment had the most sales?"); - await InvokeAgentAsync("List the top 5 countries that generated the most profit."); - await InvokeAgentAsync("Create a tab delimited file report of profit by each country per month."); - } - finally - { - await this.AgentsClient.DeleteAgentAsync(agent.Id); - await this.AgentsClient.DeleteFileAsync(fileInfo.Id); - await chat.ResetAsync(); - } - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - ChatMessageContent message = new(AuthorRole.User, input); - chat.AddChatMessage(new(AuthorRole.User, input)); - this.WriteAgentChatMessage(message); - - await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) - { - this.WriteAgentChatMessage(response); - await this.DownloadContentAsync(response); - } - } - } -} diff --git a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs b/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs deleted file mode 100644 index de2fc685a357..000000000000 --- a/dotnet/samples/Concepts/Agents/AzureAIAgent_Streaming.cs +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System.ComponentModel; -using Azure.AI.Projects; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.AzureAI; -using Microsoft.SemanticKernel.ChatCompletion; -using Agent = Azure.AI.Projects.Agent; - -namespace Agents; - -/// -/// Demonstrate consuming "streaming" message for . -/// -public class AzureAIAgent_Streaming(ITestOutputHelper output) : BaseAzureAgentTest(output) -{ - [Fact] - public async Task UseStreamingAgentAsync() - { - const string AgentName = "Parrot"; - const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; - - // Define the agent - Agent definition = await this.AgentsClient.CreateAgentAsync( - TestConfiguration.AzureAI.ChatModelId, - AgentName, - null, - AgentInstructions); - AzureAIAgent agent = new(definition, this.AgentsClient); - - // Create a thread for the agent conversation. - AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); - - // Respond to user input - await InvokeAgentAsync(agent, thread.Id, "Fortune favors the bold."); - await InvokeAgentAsync(agent, thread.Id, "I came, I saw, I conquered."); - await InvokeAgentAsync(agent, thread.Id, "Practice makes perfect."); - - // Output the entire chat history - await DisplayChatHistoryAsync(agent, thread.Id); - } - - [Fact] - public async Task UseStreamingAssistantAgentWithPluginAsync() - { - const string AgentName = "Host"; - const string AgentInstructions = "Answer questions about the menu."; - - // Define the agent - Agent definition = await this.AgentsClient.CreateAgentAsync( - TestConfiguration.AzureAI.ChatModelId, - AgentName, - null, - AgentInstructions); - AzureAIAgent agent = new(definition, this.AgentsClient) - { - Kernel = new Kernel(), - }; - - // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). - KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - agent.Kernel.Plugins.Add(plugin); - - // Create a thread for the agent conversation. - AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); - - // Respond to user input - await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?"); - await InvokeAgentAsync(agent, thread.Id, "What is the special drink and its price?"); - - // Output the entire chat history - await DisplayChatHistoryAsync(agent, thread.Id); - } - - [Fact] - public async Task UseStreamingAssistantWithCodeInterpreterAsync() - { - const string AgentName = "MathGuy"; - const string AgentInstructions = "Solve math problems with code."; - - // Define the agent - Agent definition = await this.AgentsClient.CreateAgentAsync( - TestConfiguration.AzureAI.ChatModelId, - AgentName, - null, - AgentInstructions, - [new CodeInterpreterToolDefinition()]); - AzureAIAgent agent = new(definition, this.AgentsClient) - { - Kernel = new Kernel(), - }; - - // Create a thread for the agent conversation. - AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata); - - // Respond to user input - await InvokeAgentAsync(agent, thread.Id, "Is 191 a prime number?"); - await InvokeAgentAsync(agent, thread.Id, "Determine the values in the Fibonacci sequence that that are less then the value of 101"); - - // Output the entire chat history - await DisplayChatHistoryAsync(agent, thread.Id); - } - - // Local function to invoke agent and display the conversation messages. - private async Task InvokeAgentAsync(AzureAIAgent agent, string threadId, string input) - { - ChatMessageContent message = new(AuthorRole.User, input); - await agent.AddChatMessageAsync(threadId, message); - this.WriteAgentChatMessage(message); - - ChatHistory history = []; - - bool isFirst = false; - bool isCode = false; - await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history)) - { - if (string.IsNullOrEmpty(response.Content)) - { - StreamingFunctionCallUpdateContent? functionCall = response.Items.OfType().SingleOrDefault(); - if (functionCall != null) - { - Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}: FUNCTION CALL - {functionCall.Name}"); - } - - continue; - } - - // Differentiate between assistant and tool messages - if (isCode != (response.Metadata?.ContainsKey(AzureAIAgent.CodeInterpreterMetadataKey) ?? false)) - { - isFirst = false; - isCode = !isCode; - } - - if (!isFirst) - { - Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); - isFirst = true; - } - - Console.WriteLine($"\t > streamed: '{response.Content}'"); - } - - foreach (ChatMessageContent content in history) - { - this.WriteAgentChatMessage(content); - } - } - - private async Task DisplayChatHistoryAsync(AzureAIAgent agent, string threadId) - { - Console.WriteLine("================================"); - Console.WriteLine("CHAT HISTORY"); - Console.WriteLine("================================"); - - ChatMessageContent[] messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); - for (int index = messages.Length - 1; index >= 0; --index) - { - this.WriteAgentChatMessage(messages[index]); - } - } - - public sealed class MenuPlugin - { - [KernelFunction, Description("Provides a list of specials from the menu.")] - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } - - [KernelFunction, Description("Provides the price of the requested menu item.")] - public string GetItemPrice( - [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } - } -} diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs index c72ecdb79be8..48fb10ba9cdc 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs @@ -4,6 +4,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -22,7 +23,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -69,7 +70,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -110,7 +111,7 @@ public async Task UseAutoFunctionInvocationFilterWithStreamingAgentInvocationAsy { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -173,7 +174,7 @@ public async Task UseAutoFunctionInvocationFilterWithStreamingAgentChatAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs index 540b54777cf9..6e0816bc8470 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_HistoryReducer.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. - using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; namespace Agents; diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs index 1bc16f452d6c..a0494c67bd70 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Serialization.cs @@ -3,6 +3,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; /// @@ -23,7 +24,7 @@ public async Task SerializeAndRestoreAgentGroupChatAsync() Instructions = HostInstructions, Name = HostName, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs index 46ea8dea2246..783524adf7f1 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs @@ -2,6 +2,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -28,7 +29,7 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() new() { Kernel = kernel, - Arguments = new KernelArguments(new PromptExecutionSettings() { ServiceId = ServiceKeyGood }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyGood }), }; // Define the agent targeting ServiceId = ServiceKeyBad @@ -36,7 +37,7 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() new() { Kernel = kernel, - Arguments = new KernelArguments(new PromptExecutionSettings() { ServiceId = ServiceKeyBad }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad }), }; // Define the agent with no explicit ServiceId defined @@ -56,21 +57,21 @@ public async Task UseServiceSelectionWithChatCompletionAgentAsync() // Invoke agent with override arguments where ServiceId = ServiceKeyGood: Expect agent response Console.WriteLine("\n[Bad Agent: Good ServiceId Override]"); - await InvokeAgentAsync(agentBad, new(new PromptExecutionSettings() { ServiceId = ServiceKeyGood })); + await InvokeAgentAsync(agentBad, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyGood })); // Invoke agent with override arguments where ServiceId = ServiceKeyBad: Expect failure due to invalid service key Console.WriteLine("\n[Good Agent: Bad ServiceId Override]"); - await InvokeAgentAsync(agentGood, new(new PromptExecutionSettings() { ServiceId = ServiceKeyBad })); + await InvokeAgentAsync(agentGood, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad })); Console.WriteLine("\n[Default Agent: Bad ServiceId Override]"); - await InvokeAgentAsync(agentDefault, new(new PromptExecutionSettings() { ServiceId = ServiceKeyBad })); + await InvokeAgentAsync(agentDefault, new(new OpenAIPromptExecutionSettings() { ServiceId = ServiceKeyBad })); // Invoke agent with override arguments with no explicit ServiceId: Expect agent response Console.WriteLine("\n[Good Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentGood, new(new PromptExecutionSettings())); + await InvokeAgentAsync(agentGood, new(new OpenAIPromptExecutionSettings())); Console.WriteLine("\n[Bad Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentBad, new(new PromptExecutionSettings())); + await InvokeAgentAsync(agentBad, new(new OpenAIPromptExecutionSettings())); Console.WriteLine("\n[Default Agent: No ServiceId Override]"); - await InvokeAgentAsync(agentDefault, new(new PromptExecutionSettings())); + await InvokeAgentAsync(agentDefault, new(new OpenAIPromptExecutionSettings())); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(ChatCompletionAgent agent, KernelArguments? arguments = null) diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs index ae9d965ff9a9..6d11dd80ff91 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs @@ -3,6 +3,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; @@ -49,7 +50,7 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync() Name = "Host", Instructions = MenuInstructions, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs index 7372b7df19bc..1bcf2adbe758 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs @@ -50,9 +50,7 @@ await InvokeChatCompletionAgentWithTemplateAsync( """ Write a one verse poem on the requested topic in the style of {{$style}}. Always state the requested style of the poem. - """, - PromptTemplateConfig.SemanticKernelTemplateFormat, - new KernelPromptTemplateFactory()); + """); } [Fact] @@ -81,8 +79,8 @@ Always state the requested style of the poem. private async Task InvokeChatCompletionAgentWithTemplateAsync( string instructionTemplate, - string templateFormat, - IPromptTemplateFactory templateFactory) + string? templateFormat = null, + IPromptTemplateFactory? templateFactory = null) { // Define the agent PromptTemplateConfig templateConfig = diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs index 6f07fb739190..dc9178156509 100644 --- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs +++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs @@ -98,7 +98,7 @@ public async Task NestedChatWithAggregatorAgentAsync() Console.WriteLine($"! {Model}"); OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatResponseFormat.CreateJsonObjectFormat() }; - PromptExecutionSettings autoInvokeSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + OpenAIPromptExecutionSettings autoInvokeSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; ChatCompletionAgent internalLeaderAgent = CreateAgent(InternalLeaderName, InternalLeaderInstructions); ChatCompletionAgent internalGiftIdeaAgent = CreateAgent(InternalGiftIdeaAgentName, InternalGiftIdeaAgentInstructions); diff --git a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs index a8e98f2e107e..c2a3fd377071 100644 --- a/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs +++ b/dotnet/samples/Concepts/Agents/DeclarativeAgents.cs @@ -1,4 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; @@ -8,13 +9,11 @@ namespace Agents; public class DeclarativeAgents(ITestOutputHelper output) : BaseAgentsTest(output) { - [InlineData( - "SchedulingAssistant.json", - "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")] + [InlineData("SchedulingAssistant.json", "Read the body of my last five emails, if any contain a meeting request for today, check that it's already on my calendar, if not, call out which email it is.")] [Theory] public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileName, string input) { - var kernel = this.CreateKernelWithChatCompletion(); + var kernel = CreateKernel(); kernel.AutoFunctionInvocationFilters.Add(new ExpectedSchemaFunctionFilter()); var manifestLookupDirectory = Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "Resources", "DeclarativeAgents"); var manifestFilePath = Path.Combine(manifestLookupDirectory, agentFileName); @@ -31,8 +30,9 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa Assert.NotNull(agent.Instructions); Assert.NotEmpty(agent.Instructions); - ChatHistory chatHistory = [new ChatMessageContent(AuthorRole.User, input)]; - + ChatMessageContent message = new(AuthorRole.User, input); + ChatHistory chatHistory = [message]; + StringBuilder sb = new(); var kernelArguments = new KernelArguments(new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( @@ -42,14 +42,23 @@ public async Task LoadsAgentFromDeclarativeAgentManifestAsync(string agentFileNa } ) }); - - var responses = await agent.InvokeAsync(chatHistory, kernelArguments).ToArrayAsync(); - Assert.NotEmpty(responses); + await foreach (ChatMessageContent response in agent.InvokeAsync(chatHistory, kernelArguments)) + { + chatHistory.Add(response); + sb.Append(response.Content); + } + Assert.NotEmpty(chatHistory.Skip(1)); } + private Kernel CreateKernel() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + + base.AddChatCompletionToKernel(builder); + return builder.Build(); + } private sealed class ExpectedSchemaFunctionFilter : IAutoFunctionInvocationFilter - { - //TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live + {//TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) { await next(context); diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs index 0895308f0215..159441147f77 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs @@ -4,14 +4,13 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; /// /// Demonstrate that two different agent types are able to participate in the same conversation. /// In this case a and participate. /// -public class MixedChat_Agents(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -45,16 +44,16 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: CopyWriterName, - instructions: CopyWriterInstructions, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient); + OpenAIAssistantAgent agentWriter = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = CopyWriterInstructions, + Name = CopyWriterName, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); // Create a chat for agent interaction. AgentGroupChat chat = diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs index 56ff0f331f0b..4f12657e0d7a 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; +using OpenAI.Files; using Resources; namespace Agents; @@ -12,27 +12,36 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces file output. /// -public class MixedChat_Files(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output) { private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language."; [Fact] public async Task AnalyzeFileAndGenerateReportAsync() { - await using Stream stream = EmbeddedResource.ReadStream("30-user-context.txt")!; - string fileId = await this.Client.UploadAssistantFileAsync(stream, "30-user-context.txt"); + OpenAIClientProvider provider = this.GetClientProvider(); - // Define the agents - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - enableCodeInterpreter: true, - codeInterpreterFileIds: [fileId], - metadata: SampleMetadata); + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + + OpenAIFile uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")), + "30-user-context.txt", + FileUploadPurpose.Assistants); - // Create the agent - OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient); + Console.WriteLine(this.ApiKey); + + // Define the agents + OpenAIAssistantAgent analystAgent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); ChatCompletionAgent summaryAgent = new() @@ -57,8 +66,8 @@ Create a tab delimited file report of the ordered (descending) frequency distrib } finally { - await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id); - await this.Client.DeleteFileAsync(fileId); + await analystAgent.DeleteAsync(); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. @@ -74,7 +83,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseContentAsync(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs index 158da60e418a..03f047c756bd 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; +using OpenAI.Files; namespace Agents; @@ -11,7 +11,7 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces image output. ///
-public class MixedChat_Images(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output) { private const string AnalystName = "Analyst"; private const string AnalystInstructions = "Create charts as requested without explanation."; @@ -22,17 +22,22 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseAssistantTest(outp [Fact] public async Task AnalyzeDataAndGenerateChartAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: AnalystName, - instructions: AnalystInstructions, - enableCodeInterpreter: true, - metadata: SampleMetadata); + OpenAIClientProvider provider = this.GetClientProvider(); - // Create the agent - OpenAIAssistantAgent analystAgent = new(assistant, this.AssistantClient); + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + + // Define the agents + OpenAIAssistantAgent analystAgent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AnalystInstructions, + Name = AnalystName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); ChatCompletionAgent summaryAgent = new() @@ -70,7 +75,7 @@ await InvokeAgentAsync( } finally { - await this.AssistantClient.DeleteAssistantAsync(analystAgent.Id); + await analystAgent.DeleteAsync(); } // Local function to invoke agent and display the conversation messages. @@ -86,7 +91,7 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseImageAsync(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs index 431dcc982a5e..7c9a2490d3e0 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs @@ -3,14 +3,13 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; /// /// Demonstrate the use of . /// -public class MixedChat_Reset(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output) { private const string AgentInstructions = """ @@ -21,15 +20,18 @@ The user may either provide information or query on information previously provi [Fact] public async Task ResetChatAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - instructions: AgentInstructions, - metadata: SampleMetadata); + OpenAIClientProvider provider = this.GetClientProvider(); - // Create the agent - OpenAIAssistantAgent assistantAgent = new(assistant, this.AssistantClient); + // Define the agents + OpenAIAssistantAgent assistantAgent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + Name = nameof(OpenAIAssistantAgent), + Instructions = AgentInstructions, + }, + kernel: new Kernel()); ChatCompletionAgent chatAgent = new() @@ -62,7 +64,7 @@ await this.AssistantClient.CreateAssistantAsync( finally { await chat.ResetAsync(); - await this.AssistantClient.DeleteAssistantAsync(assistantAgent.Id); + await assistantAgent.DeleteAsync(); } // Local function to invoke agent and display the conversation messages. diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs index 4979ceedacb1..27212e292366 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Serialization.cs @@ -4,14 +4,13 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; /// /// Demonstrate the serialization of with a /// and an . /// -public class MixedChat_Serialization(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Serialization(ITestOutputHelper output) : BaseAgentsTest(output) { private const string TranslatorName = "Translator"; private const string TranslatorInstructions = @@ -40,16 +39,15 @@ public async Task SerializeAndRestoreAgentGroupChatAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: CounterName, - instructions: CounterInstructions, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agentCounter = new(assistant, this.AssistantClient); + OpenAIAssistantAgent agentCounter = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new(this.Model) + { + Instructions = CounterInstructions, + Name = CounterName, + }); AgentGroupChat chat = CreateGroupChat(); diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs index fc28c3c683dd..c9364bc2b2a9 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs @@ -4,7 +4,6 @@ using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; @@ -12,7 +11,7 @@ namespace Agents; /// Demonstrate consuming "streaming" message for and /// both participating in an . ///
-public class MixedChat_Streaming(ITestOutputHelper output) : BaseAssistantTest(output) +public class MixedChat_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -46,16 +45,16 @@ public async Task UseStreamingAgentChatAsync() Kernel = this.CreateKernelWithChatCompletion(), }; - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: CopyWriterName, - instructions: CopyWriterInstructions, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient); + OpenAIAssistantAgent agentWriter = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = CopyWriterInstructions, + Name = CopyWriterName, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); // Create a chat for agent interaction. AgentGroupChat chat = diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs index f23e7ab952b7..83ea083ec674 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; +using OpenAI.Files; namespace Agents; @@ -11,22 +11,30 @@ namespace Agents; /// Demonstrate using code-interpreter with to /// produce image content displays the requested charts. ///
-public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAssistantTest(output) +public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output) { + private const string AgentName = "ChartMaker"; + private const string AgentInstructions = "Create charts as requested without explanation."; + [Fact] public async Task GenerateChartWithOpenAIAssistantAgentAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - "ChartMaker", - instructions: "Create charts as requested without explanation.", - enableCodeInterpreter: true, - metadata: SampleMetadata); + OpenAIClientProvider provider = this.GetClientProvider(); + + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }, + kernel: new()); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -50,7 +58,7 @@ Sum 426 1622 856 2904 } finally { - await this.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteAsync(); } // Local function to invoke agent and display the conversation messages. @@ -63,7 +71,7 @@ async Task InvokeAgentAsync(string input) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseImageAsync(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs index 915861ab2a99..a0d48bf94eaa 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; +using OpenAI.Files; using Resources; namespace Agents; @@ -11,24 +11,32 @@ namespace Agents; /// /// Demonstrate using code-interpreter to manipulate and generate csv files with . /// -public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAssistantTest(output) +public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output) { [Fact] public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync() { - await using Stream stream = EmbeddedResource.ReadStream("sales.csv")!; - string fileId = await this.Client.UploadAssistantFileAsync(stream, "sales.csv"); + OpenAIClientProvider provider = this.GetClientProvider(); - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - enableCodeInterpreter: true, - codeInterpreterFileIds: [fileId], - metadata: SampleMetadata); + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); + OpenAIFile uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!), + "sales.csv", + FileUploadPurpose.Assistants); + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -42,8 +50,8 @@ await this.AssistantClient.CreateAssistantAsync( } finally { - await this.AssistantClient.DeleteAssistantAsync(agent.Id); - await this.Client.DeleteFileAsync(fileId); + await agent.DeleteAsync(); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. @@ -56,7 +64,7 @@ async Task InvokeAgentAsync(string input) await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { this.WriteAgentChatMessage(response); - await this.DownloadResponseContentAsync(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs index a1493025b5a4..1381378a06c8 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FunctionFilters.cs @@ -5,7 +5,6 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; @@ -14,8 +13,10 @@ namespace Agents; /// filters with /// via . /// -public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAssistantTest(output) +public class OpenAIAssistant_FunctionFilters(ITestOutputHelper output) : BaseAgentsTest(output) { + protected override bool ForceOpenAI => true; // %%% REMOVE + [Fact] public async Task UseFunctionInvocationFilterAsync() { @@ -79,7 +80,7 @@ private async Task InvokeAssistantAsync(OpenAIAssistantAgent agent) finally { await chat.ResetAsync(); - await this.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteAsync(); } } @@ -102,7 +103,7 @@ private async Task InvokeAssistantStreamingAsync(OpenAIAssistantAgent agent) finally { await chat.ResetAsync(); - await this.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteAsync(); } } @@ -119,19 +120,19 @@ private void WriteChatHistory(IEnumerable history) private async Task CreateAssistantAsync(Kernel kernel) { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - instructions: "Answer questions about the menu.", - metadata: SampleMetadata); - - // Create the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + this.GetClientProvider(), + new OpenAIAssistantDefinition(base.Model) + { + Instructions = "Answer questions about the menu.", + Metadata = AssistantSampleMetadata, + }, + kernel: kernel + ); + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]) - { - Kernel = kernel - }; + agent.Kernel.Plugins.Add(plugin); return agent; } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs index 493b920f0d9d..39ff0f0fb97c 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs @@ -3,31 +3,35 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; namespace Agents; /// /// Demonstrate consuming "streaming" message for . /// -public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAssistantTest(output) +public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) { [Fact] public async Task UseStreamingAssistantAgentAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: "Parrot", - instructions: "Repeat the user message in the voice of a pirate and then end with a parrot sound.", - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); + const string AgentName = "Parrot"; + const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }); // Create a thread for the agent conversation. - string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); // Respond to user input await InvokeAgentAsync(agent, threadId, "Fortune favors the bold."); @@ -41,20 +45,27 @@ await this.AssistantClient.CreateAssistantAsync( [Fact] public async Task UseStreamingAssistantAgentWithPluginAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: "Host", - instructions: "Answer questions about the menu.", - metadata: SampleMetadata); - - // Create the agent + const string AgentName = "Host"; + const string AgentInstructions = "Answer questions about the menu."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + Metadata = AssistantSampleMetadata, + }); + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]); + agent.Kernel.Plugins.Add(plugin); // Create a thread for the agent conversation. - string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); // Respond to user input await InvokeAgentAsync(agent, threadId, "What is the special soup and its price?"); @@ -67,20 +78,24 @@ await this.AssistantClient.CreateAssistantAsync( [Fact] public async Task UseStreamingAssistantWithCodeInterpreterAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - name: "MathGuy", - instructions: "Solve math problems with code.", - enableCodeInterpreter: true, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient); + const string AgentName = "MathGuy"; + const string AgentInstructions = "Solve math problems with code."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }); // Create a thread for the agent conversation. - string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); // Respond to user input await InvokeAgentAsync(agent, threadId, "Is 191 a prime number?"); diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs index 3bb5a1d04c46..3937635203a4 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs @@ -4,14 +4,13 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Microsoft.SemanticKernel.PromptTemplates.Liquid; -using OpenAI.Assistants; namespace Agents; /// /// Demonstrate parameterized template instruction for . /// -public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAssistantTest(output) +public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAgentsTest(output) { private readonly static (string Input, string? Style)[] s_inputs = [ @@ -24,25 +23,23 @@ private readonly static (string Input, string? Style)[] s_inputs = [Fact] public async Task InvokeAgentWithInstructionsAsync() { - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantAsync( - this.Model, - instructions: - """ - Write a one verse poem on the requested topic in the styles of {{$style}}. - Always state the requested style of the poem. - """, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient) - { - Arguments = - { - {"style", "haiku"} - }, - }; + // Instruction based template always proceseed by KernelPromptTemplateFactory + OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = + """ + Write a one verse poem on the requested topic in the styles of {{$style}}. + Always state the requested style of the poem. + """, + Metadata = AssistantSampleMetadata + }, + kernel: new Kernel(), + defaultArguments: new KernelArguments() + { + {"style", "haiku"} + }); await InvokeAssistantAgentWithTemplateAsync(agent); } @@ -55,9 +52,7 @@ await InvokeAssistantAgentWithTemplateAsync( """ Write a one verse poem on the requested topic in the styles of {{$style}}. Always state the requested style of the poem. - """, - PromptTemplateConfig.SemanticKernelTemplateFormat, - new KernelPromptTemplateFactory()); + """); } [Fact] @@ -86,30 +81,27 @@ Always state the requested style of the poem. private async Task InvokeAssistantAgentWithTemplateAsync( string instructionTemplate, - string templateFormat, - IPromptTemplateFactory templateFactory) + string? templateFormat = null, + IPromptTemplateFactory? templateFactory = null) { - PromptTemplateConfig config = new() - { - Template = instructionTemplate, - TemplateFormat = templateFormat, - }; - - // Define the assistant - Assistant assistant = - await this.AssistantClient.CreateAssistantFromTemplateAsync( - this.Model, - config, - metadata: SampleMetadata); - - // Create the agent - OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, plugins: null, templateFactory, templateFormat) - { - Arguments = - { - {"style", "haiku"} - }, - }; + // Define the agent + OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateFromTemplateAsync( + clientProvider: this.GetClientProvider(), + capabilities: new OpenAIAssistantCapabilities(this.Model) + { + Metadata = AssistantSampleMetadata + }, + kernel: new Kernel(), + defaultArguments: new KernelArguments() + { + {"style", "haiku"} + }, + templateConfig: new PromptTemplateConfig + { + Template = instructionTemplate, + TemplateFormat = templateFormat, + }, + templateFactory); await InvokeAssistantAgentWithTemplateAsync(agent); } @@ -117,7 +109,7 @@ await this.AssistantClient.CreateAssistantFromTemplateAsync( private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent agent) { // Create a thread for the agent conversation. - string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata); + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); try { @@ -143,8 +135,8 @@ private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent ag } finally { - await this.AssistantClient.DeleteThreadAsync(threadId); - await this.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); } } } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs index 2763bb6101b0..e42600419a88 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs @@ -8,11 +8,7 @@ namespace ChatCompletion; -/// -/// These examples demonstrate different ways of using chat completion with Azure Foundry or GitHub models. -/// Azure AI Foundry: https://ai.azure.com/explore/models -/// GitHub Models: https://github.com/marketplace?type=models -/// +// The following example shows how to use Semantic Kernel with Azure AI Inference / Azure AI Studio public class AzureAIInference_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { [Fact] diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs index 8b164439f9e2..f7dbe9191167 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs @@ -9,9 +9,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate different ways of using streaming chat completion with Azure Foundry or GitHub models. -/// Azure AI Foundry: https://ai.azure.com/explore/models -/// GitHub Models: https://github.com/marketplace?type=models +/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service. /// public class AzureAIInference_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -122,6 +120,36 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } + /// + /// Streams the message output from the chat completion service. + /// + /// The chat completion service instance. + /// The chat history instance. + /// The author role. + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + /// /// Outputs the chat history by streaming the message output from the kernel. /// diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs index f3a52b5c5428..c27625437779 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs @@ -8,46 +8,27 @@ namespace ChatCompletion; -/// -/// These examples demonstrate different ways of using chat completion with Azure OpenAI API. -/// +// The following example shows how to use Semantic Kernel with Azure OpenAI API public class AzureOpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { - /// - /// Sample showing how to use with chat completion and chat prompt syntax. - /// [Fact] public async Task ChatPromptAsync() { - Console.WriteLine("======== Azure Open AI - Chat Completion ========"); - - Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + Assert.NotNull(TestConfiguration.Ollama.ModelId); StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions """); - var kernelBuilder = Kernel.CreateBuilder(); - if (string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.ApiKey)) - { - kernelBuilder.AddAzureOpenAIChatCompletion( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - credentials: new DefaultAzureCredential(), - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - } - else - { - kernelBuilder.AddAzureOpenAIChatCompletion( + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, endpoint: TestConfiguration.AzureOpenAI.Endpoint, apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - } + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); - var kernel = kernelBuilder.Build(); var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); chatPrompt.AppendLine($""); @@ -58,30 +39,40 @@ public async Task ChatPromptAsync() Console.WriteLine(reply); } - /// - /// Sample showing how to use directly with a . - /// [Fact] public async Task ServicePromptAsync() { Console.WriteLine("======== Azure Open AI - Chat Completion ========"); - Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - AzureOpenAIChatCompletionService chatCompletionService = - string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.ApiKey) - ? new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - credentials: new DefaultAzureCredential(), - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - : new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + await StartChatAsync(chatCompletionService); + } + + /// + /// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential. + /// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate. + /// + [Fact] + public async Task DefaultAzureCredentialSampleAsync() + { + Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========"); + + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + credentials: new DefaultAzureCredential(), + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + await StartChatAsync(chatCompletionService); + } + + private async Task StartChatAsync(IChatCompletionService chatGPT) + { Console.WriteLine("Chat content:"); Console.WriteLine("------------------------"); @@ -92,7 +83,7 @@ public async Task ServicePromptAsync() OutputLastMessage(chatHistory); // First assistant message - var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); OutputLastMessage(chatHistory); @@ -101,7 +92,7 @@ public async Task ServicePromptAsync() OutputLastMessage(chatHistory); // Second assistant message - reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); OutputLastMessage(chatHistory); } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs index 29dfe10d6bd1..1ef3647623aa 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs @@ -8,7 +8,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate different ways of using streaming chat completion with Azure OpenAI API. +/// These examples demonstrate the ways different content types are streamed by Azure OpenAI via the chat completion service. /// public class AzureOpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -128,4 +128,28 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion // Second assistant message await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } + + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs deleted file mode 100644 index cc9660c4cfa2..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using OpenAI.Chat; - -namespace ChatCompletion; - -/// -/// These examples demonstrate different ways of using chat completion reasoning models with Azure OpenAI API. -/// -public class AzureOpenAI_ChatCompletionWithReasoning(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Sample showing how to use with chat completion and chat prompt syntax. - /// - [Fact] - public async Task ChatPromptWithReasoningAsync() - { - Console.WriteLine("======== Azure Open AI - Chat Completion with Reasoning ========"); - - Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); - Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - .Build(); - - // Create execution settings with high reasoning effort. - var executionSettings = new AzureOpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings - { - // Flags Azure SDK to use the new token property. - SetNewMaxCompletionTokensEnabled = true, - MaxTokens = 2000, - // Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) - ReasoningEffort = ChatReasoningEffortLevel.Low - }; - - // Create KernelArguments using the execution settings. - var kernelArgs = new KernelArguments(executionSettings); - - StringBuilder chatPrompt = new(""" - You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework - Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop . - """); - - // Invoke the prompt with high reasoning effort. - var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); - - Console.WriteLine(reply); - } - - /// - /// Sample showing how to use directly with a . - /// - [Fact] - public async Task ServicePromptWithReasoningAsync() - { - Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential with Reasoning ========"); - - Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); - Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); - - IChatCompletionService chatCompletionService = new AzureOpenAIChatCompletionService( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - - // Create execution settings with high reasoning effort. - var executionSettings = new AzureOpenAIPromptExecutionSettings - { - // Flags Azure SDK to use the new token property. - SetNewMaxCompletionTokensEnabled = true, - MaxTokens = 2000, - // Note: reasoning effort is only available for reasoning models (at this moment o3-mini & o1 models) - ReasoningEffort = ChatReasoningEffortLevel.Low - }; - - // Create a ChatHistory and add messages. - var chatHistory = new ChatHistory(); - chatHistory.AddDeveloperMessage( - "You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); - chatHistory.AddUserMessage( - "Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); - - // Instead of a prompt string, call GetChatMessageContentAsync with the chat history. - var reply = await chatCompletionService.GetChatMessageContentAsync( - chatHistory: chatHistory, - executionSettings: executionSettings); - - Console.WriteLine(reply); - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs index a76a954c1bfa..eafae661111b 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs @@ -5,34 +5,27 @@ using Azure.AI.OpenAI; using Microsoft.SemanticKernel; -#pragma warning disable CA5399 // HttpClient is created without enabling CheckCertificateRevocationList - namespace ChatCompletion; -/// -/// This example shows a way of using a Custom HttpClient and HttpHandler with Azure OpenAI Connector to capture -/// the request Uri and Headers for each request. -/// public sealed class AzureOpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) { [Fact] - public async Task UsingCustomHttpClientWithAzureOpenAI() + public async Task RunAsync() { + Console.WriteLine("======== Using a custom AzureOpenAI client ========"); + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); - Console.WriteLine($"======== Azure Open AI - {nameof(UsingCustomHttpClientWithAzureOpenAI)} ========"); - // Create an HttpClient and include your custom header(s) - using var myCustomHttpHandler = new MyCustomClientHttpHandler(Output); - using var myCustomClient = new HttpClient(handler: myCustomHttpHandler); - myCustomClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); // Configure AzureOpenAIClient to use the customized HttpClient var clientOptions = new AzureOpenAIClientOptions { - Transport = new HttpClientPipelineTransport(myCustomClient), + Transport = new HttpClientPipelineTransport(httpClient), NetworkTimeout = TimeSpan.FromSeconds(30), RetryPolicy = new ClientRetryPolicy() }; @@ -55,27 +48,6 @@ public async Task UsingCustomHttpClientWithAzureOpenAI() ); Console.WriteLine(result.GetValue()); - myCustomClient.Dispose(); - } - - /// - /// Normally you would use a custom HttpClientHandler to add custom logic to your custom http client - /// This uses the ITestOutputHelper to write the requested URI to the test output - /// - /// The to write the requested URI to the test output - private sealed class MyCustomClientHttpHandler(ITestOutputHelper output) : HttpClientHandler - { - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - output.WriteLine($"Requested URI: {request.RequestUri}"); - - request.Headers.Where(h => h.Key != "Authorization") - .ToList() - .ForEach(h => output.WriteLine($"{h.Key}: {string.Join(", ", h.Value)}")); - output.WriteLine("--------------------------------"); - - // Add custom logic here - return await base.SendAsync(request, cancellationToken); - } + httpClient.Dispose(); } } diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs index 7f41756970cc..a386c7631e4e 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs @@ -19,7 +19,7 @@ internal static class ChatHistoryExtensions /// /// For simplicity only a single system message is supported in these examples. /// - internal static ChatMessageContent? GetSystemMessage(this IReadOnlyList chatHistory) + internal static ChatMessageContent? GetSystemMessage(this ChatHistory chatHistory) { return chatHistory.FirstOrDefault(m => m.Role == AuthorRole.System); } @@ -34,9 +34,7 @@ internal static class ChatHistoryExtensions /// An optional summary messageContent to include /// An optional message filter public static IEnumerable Extract( - this IReadOnlyList chatHistory, - int startIndex, - int? endIndex = null, + this ChatHistory chatHistory, int startIndex, int? endIndex = null, ChatMessageContent? systemMessage = null, ChatMessageContent? summaryMessage = null, Func? messageFilter = null) @@ -73,11 +71,11 @@ public static IEnumerable Extract( /// /// Compute the index truncation where truncation should begin using the current truncation threshold. /// - /// The source history. - /// Truncated size. - /// Truncation threshold. + /// ChatHistory instance to be truncated + /// + /// /// Flag indicating whether or not the chat history contains a system messageContent - public static int ComputeTruncationIndex(this IReadOnlyList chatHistory, int truncatedSize, int truncationThreshold, bool hasSystemMessage) + public static int ComputeTruncationIndex(this ChatHistory chatHistory, int truncatedSize, int truncationThreshold, bool hasSystemMessage) { if (chatHistory.Count <= truncationThreshold) { diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs index 2c1c4258aadc..53b4a6079283 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryReducerTests.cs @@ -11,6 +11,31 @@ namespace ChatCompletion; /// public class ChatHistoryReducerTests(ITestOutputHelper output) : BaseTest(output) { + [Theory] + [InlineData(3, null, null, 5, 0)] + [InlineData(2, null, null, 1, 1)] + [InlineData(2, "SystemMessage", null, 2, 2)] + [InlineData(10, null, null, 3, 3)] + [InlineData(10, "SystemMessage", null, 3, 3)] + [InlineData(9, null, null, 5, 5)] + [InlineData(11, null, null, 5, 5)] + [InlineData(8, "SystemMessage", null, 5, 5)] + [InlineData(10, "SystemMessage", null, 5, 5)] + [InlineData(3, null, new int[] { 0 }, 3, 2)] + [InlineData(3, "SystemMessage", new int[] { 0 }, 4, 3)] + public async Task VerifyTruncatingChatHistoryReducerAsync(int messageCount, string? systemMessage, int[]? functionCallIndexes, int truncatedSize, int expectedSize) + { + // Arrange + var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes); + var reducer = new TruncatingChatHistoryReducer(truncatedSize); + + // Act + var reducedHistory = await reducer.ReduceAsync(chatHistory); + + // Assert + VerifyReducedHistory(reducedHistory, ComputeExpectedMessages(chatHistory, expectedSize)); + } + [Theory] [InlineData(3, null, null, 100, 0)] [InlineData(3, "SystemMessage", null, 100, 0)] @@ -22,7 +47,30 @@ public async Task VerifyMaxTokensChatHistoryReducerAsync(int messageCount, strin { // Arrange var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes, true); - var reducer = new ChatHistoryMaxTokensReducer(maxTokens); + var reducer = new MaxTokensChatHistoryReducer(maxTokens); + + // Act + var reducedHistory = await reducer.ReduceAsync(chatHistory); + + // Assert + VerifyReducedHistory(reducedHistory, ComputeExpectedMessages(chatHistory, expectedSize)); + } + + [Theory] + [InlineData(3, null, null, 5, 10, 0)] + [InlineData(10, null, null, 5, 10, 6)] + [InlineData(10, "SystemMessage", null, 5, 10, 6)] + [InlineData(10, null, new int[] { 1 }, 5, 10, 6)] + [InlineData(10, "SystemMessage", new int[] { 2 }, 5, 10, 6)] + public async Task VerifySummarizingChatHistoryReducerAsync(int messageCount, string? systemMessage, int[]? functionCallIndexes, int truncatedSize, int truncationThreshold, int expectedSize) + { + // Arrange + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + IChatCompletionService chatClient = new FakeChatCompletionService("The dialog consists of repetitive interaction where both the user and assistant exchange identical phrases in Latin."); + + var chatHistory = CreateHistoryWithUserInput(messageCount, systemMessage, functionCallIndexes, true); + var reducer = new SummarizingChatHistoryReducer(chatClient, truncatedSize, truncationThreshold); // Act var reducedHistory = await reducer.ReduceAsync(chatHistory); diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs new file mode 100644 index 000000000000..ff8b3ef0a56a --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/IChatHistoryReducer.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace ChatCompletion; + +/// +/// Interface for reducing the chat history before sending it to the chat completion provider. +/// +public interface IChatHistoryReducer +{ + /// + /// Reduce the before sending it to the . + /// + /// Instance of to be reduced. + /// Cancellation token. + /// An optional which contains the reduced chat messages or null if chat history can be used as is. + Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken); +} diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs similarity index 81% rename from dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs rename to dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs index 91dfa97e242c..b8a9dd27da36 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryMaxTokensReducer.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/MaxTokensChatHistoryReducer.cs @@ -11,15 +11,15 @@ namespace ChatCompletion; /// /// This reducer requires that the ChatMessageContent.MetaData contains a TokenCount property. /// -public sealed class ChatHistoryMaxTokensReducer : IChatHistoryReducer +public sealed class MaxTokensChatHistoryReducer : IChatHistoryReducer { private readonly int _maxTokenCount; /// - /// Creates a new instance of . + /// Creates a new instance of . /// /// Max token count to send to the model. - public ChatHistoryMaxTokensReducer(int maxTokenCount) + public MaxTokensChatHistoryReducer(int maxTokenCount) { if (maxTokenCount <= 0) { @@ -30,7 +30,7 @@ public ChatHistoryMaxTokensReducer(int maxTokenCount) } /// - public Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default) + public Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) { var systemMessage = chatHistory.GetSystemMessage(); @@ -47,13 +47,12 @@ public ChatHistoryMaxTokensReducer(int maxTokenCount) } #region private - /// /// Compute the index truncation where truncation should begin using the current truncation threshold. /// - /// Chat history to be truncated. + /// ChatHistory instance to be truncated /// The system message - private int ComputeTruncationIndex(IReadOnlyList chatHistory, ChatMessageContent? systemMessage) + private int ComputeTruncationIndex(ChatHistory chatHistory, ChatMessageContent? systemMessage) { var truncationIndex = -1; @@ -84,6 +83,5 @@ private int ComputeTruncationIndex(IReadOnlyList chatHistory return truncationIndex; } - #endregion } diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs new file mode 100644 index 000000000000..153e2b50d182 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/SummarizingChatHistoryReducer.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace ChatCompletion; + +/// +/// Implementation of which trim to the last N messages and summarizes the remainder. +/// +public sealed class SummarizingChatHistoryReducer : IChatHistoryReducer +{ + private readonly IChatCompletionService _chatClient; + private readonly int _truncatedSize; + private readonly int _summarizationThreshold; + private readonly string _summarizationPrompt; + private readonly Kernel _kernel; + + /// + /// The default summarization system instructions. + /// + private const string DefaultSummarizationPrompt = + """ + Provide a concise and complete summarization of the entire dialog that does not exceed 5 sentences + + This summary must always: + - Consider both user and assistant interactions + - Maintain continuity for the purpose of further dialog + - Include details from any existing summary + - Focus on the most significant aspects of the dialog + + This summary must never: + - Critique, correct, interpret, presume, or assume + - Identify faults, mistakes, misunderstanding, or correctness + - Analyze what has not occurred + - Exclude details from any existing summary + """; + + /// + /// Metadata key to indicate a summary message. + /// + private const string SummaryMetadataKey = "__summary__"; + + /// + /// Creates a new instance of . + /// + /// Instance of to use for summarization + /// The truncated size of the chat history after summarization is triggered + /// The threshold at which to trigger summarization + /// An optional prompt to use when summarizing the content + public SummarizingChatHistoryReducer(IChatCompletionService chatClient, int truncatedSize, int summarizationThreshold, string? summarizationPrompt = null) + { + if (chatClient is null) + { + throw new ArgumentException("Chat completion service must be specified.", nameof(chatClient)); + } + if (truncatedSize <= 0) + { + throw new ArgumentException("Truncated size must be greater than zero.", nameof(truncatedSize)); + } + if (summarizationThreshold < truncatedSize) + { + throw new ArgumentException($"Summarization threshold must be greater than truncatedSize: {truncatedSize}.", nameof(summarizationPrompt)); + } + + this._chatClient = chatClient; + this._truncatedSize = truncatedSize; + this._summarizationThreshold = summarizationThreshold; + this._summarizationPrompt = summarizationPrompt ?? DefaultSummarizationPrompt; + + var builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => chatClient); + this._kernel = builder.Build(); + } + + /// + public async Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) + { + // index of the last summary message + int lastIndex = chatHistory + .Select((value, index) => new { value, index }) + .LastOrDefault(message => message.value.Metadata?.ContainsKey(SummaryMetadataKey) ?? false) + ?.index ?? -1; + + var systemMessage = chatHistory.GetSystemMessage(); + var hasSystemMessage = systemMessage is not null; + + // check are there messages to be summarized + var startIndex = -1; + var endIndex = chatHistory.Count - this._truncatedSize; + if (lastIndex == -1) + { + // have never summarized so use chat history size + if (chatHistory.Count < this._summarizationThreshold) + { + return null; + } + startIndex = 0 + (hasSystemMessage ? 1 : 0); + } + else + { + // have summarized so use chat history size minus position of last summary + if (chatHistory.Count - lastIndex < this._summarizationThreshold) + { + return null; + } + startIndex = lastIndex; + } + + var summaryMessage = await this.SummarizeAsync(chatHistory, startIndex, endIndex, cancellationToken); + + // insert summary into the original chat history + chatHistory.Insert(endIndex + 1, summaryMessage); + + IEnumerable? truncatedHistory = chatHistory.Extract(endIndex + 2, systemMessage: systemMessage, summaryMessage: summaryMessage); + return truncatedHistory; + } + + #region private + /// + /// Summarize messages starting at the truncation index. + /// + private async Task SummarizeAsync(ChatHistory chatHistory, int startIndex, int endIndex, CancellationToken cancellationToken) + { + // extract history for summarization + IEnumerable messagesToSummarize = + chatHistory.Extract(startIndex, endIndex: endIndex, + messageFilter: (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); + + // summarize the chat history + var summarizationRequest = new ChatHistory(this._summarizationPrompt); + summarizationRequest.AddRange(messagesToSummarize); + ChatMessageContent summaryContent = await this._chatClient.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false); + summaryContent.Metadata = new Dictionary { { SummaryMetadataKey, true } }; + + return summaryContent; + } + #endregion +} diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs new file mode 100644 index 000000000000..48dce62da8c4 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/TruncatingChatHistoryReducer.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace ChatCompletion; + +/// +/// Implementation of which truncates chat history to the provide truncated size. +/// +/// +/// The truncation process is triggered when the list length is great than the truncated size. +/// +public sealed class TruncatingChatHistoryReducer : IChatHistoryReducer +{ + private readonly int _truncatedSize; + + /// + /// Creates a new instance of . + /// + /// The size of the chat history after truncation. + public TruncatingChatHistoryReducer(int truncatedSize) + { + if (truncatedSize <= 0) + { + throw new ArgumentException("Truncated size must be greater than zero.", nameof(truncatedSize)); + } + + this._truncatedSize = truncatedSize; + } + + /// + public Task?> ReduceAsync(ChatHistory chatHistory, CancellationToken cancellationToken = default) + { + var systemMessage = chatHistory.GetSystemMessage(); + var truncationIndex = ComputeTruncationIndex(chatHistory, this._truncatedSize, systemMessage is not null); + + IEnumerable? truncatedHistory = null; + + if (truncationIndex > 0) + { + truncatedHistory = chatHistory.Extract(truncationIndex, systemMessage: systemMessage); + } + + return Task.FromResult?>(truncatedHistory); + } + + #region private + + /// + /// Compute the index truncation where truncation should begin using the current truncation threshold. + /// + private static int ComputeTruncationIndex(ChatHistory chatHistory, int truncatedSize, bool hasSystemMessage) + { + truncatedSize -= hasSystemMessage ? 1 : 0; + if (chatHistory.Count <= truncatedSize) + { + return -1; + } + + // Compute the index of truncation target + var truncationIndex = chatHistory.Count - truncatedSize; + + // Skip function related content + while (truncationIndex < chatHistory.Count) + { + if (chatHistory[truncationIndex].Items.Any(i => i is FunctionCallContent or FunctionResultContent)) + { + truncationIndex++; + } + else + { + break; + } + } + + return truncationIndex; + } + #endregion +} diff --git a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs deleted file mode 100644 index 4cb1c57f60e4..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.HuggingFace; - -namespace ChatCompletion; - -/// -/// This example shows a way of using Hugging Face connector with HuggingFace Text Generation Inference (TGI) API. -/// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. -/// -/// Install HuggingFace TGI via docker -/// docker run -d --gpus all --shm-size 1g -p 8080:80 -v "c:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:latest --model-id teknium/OpenHermes-2.5-Mistral-7B -/// Run the examples -/// -/// -public class HuggingFace_ChatCompletion(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). - /// - [Fact] -#pragma warning restore CS0419 // Ambiguous reference in cref attribute - public async Task UsingKernelNonStreamingWithHuggingFace() - { - Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingKernelNonStreamingWithHuggingFace)} ========"); - - var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) - var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. - - var kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: modelId, - apiKey: null, - endpoint: endpoint) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new HuggingFacePromptExecutionSettings - { - TopP = 0.5f, - MaxTokens = 1000, - }); - - var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); - Console.WriteLine(response); - } - - /// - /// Sample showing how to use directly with a . - /// - [Fact] - public async Task UsingServiceNonStreamingWithHuggingFace() - { - Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingServiceNonStreamingWithHuggingFace)} ========"); - - // HuggingFace local HTTP server endpoint - var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) - var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. - - Kernel kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: modelId, - endpoint: endpoint) - .Build(); - - var chatService = kernel.GetRequiredService(); - - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - var reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); - OutputLastMessage(chatHistory); - - // Second assistant message - reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs deleted file mode 100644 index d508cb64060d..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.HuggingFace; - -namespace ChatCompletion; - -/// -/// This example shows a way of using Hugging Face connector with HuggingFace Text Generation Inference (TGI) API. -/// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. -/// -/// Install HuggingFace TGI via docker -/// docker run -d --gpus all --shm-size 1g -p 8080:80 -v "c:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:latest --model-id teknium/OpenHermes-2.5-Mistral-7B -/// Run the examples -/// -/// -public class HuggingFace_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Sample showing how to use directly with a . - /// - [Fact] - public async Task UsingServiceStreamingWithHuggingFace() - { - Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingServiceStreamingWithHuggingFace)} ========"); - - // HuggingFace local HTTP server endpoint - var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) - var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. - - Kernel kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: modelId, - endpoint: endpoint) - .Build(); - - var chatService = kernel.GetRequiredService(); - - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - OutputLastMessage(chatHistory); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); - OutputLastMessage(chatHistory); - - // Second assistant message - await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); - } - - /// - /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). - /// - [Fact] - public async Task UsingKernelStreamingWithHuggingFace() - { - Console.WriteLine($"======== HuggingFace - Chat Completion - {nameof(UsingKernelStreamingWithHuggingFace)} ========"); - - var endpoint = new Uri("http://localhost:8080"); // Update the endpoint if you chose a different port. (defaults to 8080) - var modelId = "teknium/OpenHermes-2.5-Mistral-7B"; // Update the modelId if you chose a different model. - - var kernel = Kernel.CreateBuilder() - .AddHuggingFaceChatCompletion( - model: modelId, - apiKey: null, - endpoint: endpoint) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new HuggingFacePromptExecutionSettings - { - TopP = 0.5f, - MaxTokens = 1000, - }); - - await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) - { - Console.WriteLine(word); - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs b/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs deleted file mode 100644 index d1fa8baa257f..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs +++ /dev/null @@ -1,279 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ClientModel; -using System.ClientModel.Primitives; -using System.Net; -using System.Runtime.CompilerServices; -using Azure.AI.OpenAI; -using Azure.Identity; -using Microsoft.Extensions.AI; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; - -namespace ChatCompletion; - -/// -/// This example demonstrates how an AI application can use code to attempt inference with the first available chat client in the list, falling back to the next client if the previous one fails. -/// The class handles all the fallback complexities, abstracting them away from the application code. -/// Since the class implements the interface, the chat client used for inference the application can be easily replaced with the . -/// -/// -/// The class is useful when an application utilizes multiple models and needs to switch between them based on the situation. -/// For example, the application may use a cloud-based model by default and seamlessly fall back to a local model when the cloud model is unavailable (e.g., in offline mode), and vice versa. -/// Additionally, the application can enhance resilience by employing several cloud models, falling back to the next one if the previous model fails. -/// -public class HybridCompletion_Fallback(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// This example demonstrates how to perform completion using the , which falls back to an available model when the primary model is unavailable. - /// - [Fact] - public async Task FallbackToAvailableModelAsync() - { - IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); - - // Create and register an unavailable chat client that fails with 503 Service Unavailable HTTP status code - kernelBuilder.Services.AddSingleton(CreateUnavailableOpenAIChatClient()); - - // Create and register a cloud available chat client - kernelBuilder.Services.AddSingleton(CreateAzureOpenAIChatClient()); - - // Create and register fallback chat client that will fallback to the available chat client when unavailable chat client fails - kernelBuilder.Services.AddSingleton((sp) => - { - IEnumerable chatClients = sp.GetServices(); - - return new FallbackChatClient(chatClients.ToList()).AsChatCompletionService(); - }); - - Kernel kernel = kernelBuilder.Build(); - kernel.ImportPluginFromFunctions("Weather", [KernelFunctionFactory.CreateFromMethod(() => "It's sunny", "GetWeather")]); - - AzureOpenAIPromptExecutionSettings settings = new() - { - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() - }; - - FunctionResult result = await kernel.InvokePromptAsync("Do I need an umbrella?", new(settings)); - - Output.WriteLine(result); - } - - /// - /// This example demonstrates how to perform streaming completion using the , which falls back to an available model when the primary model is unavailable. - /// - [Fact] - public async Task FallbackToAvailableModelStreamingAsync() - { - // Create an unavailable chat client that fails with 503 Service Unavailable HTTP status code - IChatClient unavailableChatClient = CreateUnavailableOpenAIChatClient(); - - // Create a cloud available chat client - IChatClient availableChatClient = CreateAzureOpenAIChatClient(); - - // Create a fallback chat client that will fallback to the available chat client when unavailable chat client fails - IChatCompletionService fallbackCompletionService = new FallbackChatClient([unavailableChatClient, availableChatClient]).AsChatCompletionService(); - - Kernel kernel = new(); - kernel.ImportPluginFromFunctions("Weather", [KernelFunctionFactory.CreateFromMethod(() => "It's sunny", "GetWeather")]); - - AzureOpenAIPromptExecutionSettings settings = new() - { - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() - }; - - IAsyncEnumerable result = fallbackCompletionService.GetStreamingChatMessageContentsAsync("Do I need an umbrella?", settings, kernel); - - await foreach (var update in result) - { - Output.WriteLine(update); - } - } - - private static IChatClient CreateUnavailableOpenAIChatClient() - { - AzureOpenAIClientOptions options = new() - { - Transport = new HttpClientPipelineTransport( - new HttpClient - ( - new StubHandler(new HttpClientHandler(), async (response) => { response.StatusCode = System.Net.HttpStatusCode.ServiceUnavailable; }) - ) - ) - }; - - IChatClient openAiClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new AzureCliCredential(), options).AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName); - - return new ChatClientBuilder(openAiClient) - .UseFunctionInvocation() - .Build(); - } - - private static IChatClient CreateAzureOpenAIChatClient() - { - IChatClient chatClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new AzureCliCredential()).AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName); - - return new ChatClientBuilder(chatClient) - .UseFunctionInvocation() - .Build(); - } - - protected sealed class StubHandler(HttpMessageHandler innerHandler, Func handler) : DelegatingHandler(innerHandler) - { - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - var result = await base.SendAsync(request, cancellationToken); - - await handler(result); - - return result; - } - } -} - -/// -/// Represents a chat client that performs inference using the first available chat client in the list, falling back to the next one if the previous client fails. -/// -internal sealed class FallbackChatClient : IChatClient -{ - private readonly IList _chatClients; - private static readonly List s_defaultFallbackStatusCodes = new() - { - HttpStatusCode.InternalServerError, - HttpStatusCode.NotImplemented, - HttpStatusCode.BadGateway, - HttpStatusCode.ServiceUnavailable, - HttpStatusCode.GatewayTimeout - }; - - /// - /// Initializes a new instance of the class. - /// - /// The chat clients to fallback to. - public FallbackChatClient(IList chatClients) - { - this._chatClients = chatClients?.Any() == true ? chatClients : throw new ArgumentException("At least one chat client must be provided.", nameof(chatClients)); - } - - /// - /// Gets or sets the HTTP status codes that will trigger the fallback to the next chat client. - /// - public List? FallbackStatusCodes { get; set; } - - /// - public ChatClientMetadata Metadata => new(); - - /// - public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) - { - for (int i = 0; i < this._chatClients.Count; i++) - { - var chatClient = this._chatClients.ElementAt(i); - - try - { - return await chatClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - if (this.ShouldFallbackToNextClient(ex, i, this._chatClients.Count)) - { - continue; - } - - throw; - } - } - - // If all clients fail, throw an exception or return a default value - throw new InvalidOperationException("Neither of the chat clients could complete the inference."); - } - - /// - public async IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - for (int i = 0; i < this._chatClients.Count; i++) - { - var chatClient = this._chatClients.ElementAt(i); - - IAsyncEnumerable completionStream = chatClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken); - - ConfiguredCancelableAsyncEnumerable.Enumerator enumerator = completionStream.ConfigureAwait(false).GetAsyncEnumerator(); - - try - { - try - { - // Move to the first update to reveal any exceptions. - if (!await enumerator.MoveNextAsync()) - { - yield break; - } - } - catch (Exception ex) - { - if (this.ShouldFallbackToNextClient(ex, i, this._chatClients.Count)) - { - continue; - } - - throw; - } - - // Yield the first update. - yield return enumerator.Current; - - // Yield the rest of the updates. - while (await enumerator.MoveNextAsync()) - { - yield return enumerator.Current; - } - - // The stream has ended so break the while loop. - break; - } - finally - { - await enumerator.DisposeAsync(); - } - } - } - - private bool ShouldFallbackToNextClient(Exception ex, int clientIndex, int numberOfClients) - { - // If the exception is thrown by the last client then don't fallback. - if (clientIndex == numberOfClients - 1) - { - return false; - } - - HttpStatusCode? statusCode = ex switch - { - HttpOperationException operationException => operationException.StatusCode, - HttpRequestException httpRequestException => httpRequestException.StatusCode, - ClientResultException clientResultException => (HttpStatusCode?)clientResultException.Status, - _ => throw new InvalidOperationException($"Unsupported exception type: {ex.GetType()}."), - }; - - if (statusCode is null) - { - throw new InvalidOperationException("The exception does not contain an HTTP status code."); - } - - return (this.FallbackStatusCodes ?? s_defaultFallbackStatusCodes).Contains(statusCode!.Value); - } - - /// - public void Dispose() - { - // We don't own the chat clients so we don't dispose them. - } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - return null; - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs deleted file mode 100644 index 97562f75c847..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -/// -/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion API standard from OpenAI. -/// -/// Install LMStudio Platform in your environment (As of now: 0.3.10) -/// Open LM Studio -/// Search and Download Llama2 model or any other -/// Update the modelId parameter with the model llm name loaded (i.e: llama-2-7b-chat) -/// Start the Local Server on http://localhost:1234 -/// Run the examples -/// -/// -public class LMStudio_ChatCompletion(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// This example shows how to setup LMStudio to use with the InvokeAsync (Non-Streaming). - /// - [Fact] -#pragma warning restore CS0419 // Ambiguous reference in cref attribute - public async Task UsingKernelStreamingWithLMStudio() - { - Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingKernelStreamingWithLMStudio)} ========"); - - var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. - var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: modelId, - apiKey: null, - endpoint: endpoint) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings - { - TopP = 0.5, - MaxTokens = 1000, - }); - - var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); - Console.WriteLine(response); - } - - /// - /// Sample showing how to use directly with a . - /// - [Fact] - public async Task UsingServiceNonStreamingWithLMStudio() - { - Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingServiceNonStreamingWithLMStudio)} ========"); - - var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. - var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. - - OpenAIChatCompletionService chatService = new(modelId: modelId, apiKey: null, endpoint: endpoint); - - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - var reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); - OutputLastMessage(chatHistory); - - // Second assistant message - reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs deleted file mode 100644 index 8ac827d41120..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -/// -/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion API standard from OpenAI. -/// -/// Install LMStudio Platform in your environment (As of now: 0.3.10) -/// Open LM Studio -/// Search and Download Llama2 model or any other -/// Update the modelId parameter with the model llm name loaded (i.e: llama-2-7b-chat) -/// Start the Local Server on http://localhost:1234 -/// Run the examples -/// -/// -public class LMStudio_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Sample showing how to use streaming directly with a . - /// - [Fact] - public async Task UsingServiceStreamingWithLMStudio() - { - Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingServiceStreamingWithLMStudio)} ========"); - - var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. - var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: modelId, - apiKey: null, - endpoint: endpoint) - .Build(); - - OpenAIChatCompletionService chatCompletionService = new(modelId: modelId, apiKey: null, endpoint: endpoint); - - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - OutputLastMessage(chatHistory); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); - OutputLastMessage(chatHistory); - - // Second assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); - } - - /// - /// This example shows how to setup LMStudio to use with the Kernel InvokeAsync (Streaming). - /// - [Fact] - public async Task UsingKernelStreamingWithLMStudio() - { - Console.WriteLine($"======== LM Studio - Chat Completion - {nameof(UsingKernelStreamingWithLMStudio)} ========"); - - var modelId = "llama-2-7b-chat"; // Update the modelId if you chose a different model. - var endpoint = new Uri("http://localhost:1234/v1"); // Update the endpoint if you chose a different port. - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: modelId, - apiKey: null, - endpoint: endpoint) - .Build(); - - var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. - Sign the mail as AI Assistant. - - Text: ```{{$input}}```"; - - var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings - { - TopP = 0.5, - MaxTokens = 1000, - }); - - await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) - { - Console.WriteLine(word); - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs b/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs index e3ee157b35b1..720ee8bff60e 100644 --- a/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs +++ b/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs @@ -62,7 +62,7 @@ public async Task ShowHowToReduceChatHistoryToLastMessageAsync() apiKey: TestConfiguration.OpenAI.ApiKey); var truncatedSize = 2; // keep system message and last user message only - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryTruncationReducer(truncatedSize)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new TruncatingChatHistoryReducer(truncatedSize)); var chatHistory = new ChatHistory("You are a librarian and expert on books about cities"); @@ -105,7 +105,7 @@ public async Task ShowHowToReduceChatHistoryToLastMessageStreamingAsync() apiKey: TestConfiguration.OpenAI.ApiKey); var truncatedSize = 2; // keep system message and last user message only - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryTruncationReducer(truncatedSize)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new TruncatingChatHistoryReducer(truncatedSize)); var chatHistory = new ChatHistory("You are a librarian and expert on books about cities"); @@ -151,7 +151,7 @@ public async Task ShowHowToReduceChatHistoryToMaxTokensAsync() OpenAIChatCompletionService openAiChatService = new( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey); - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistoryMaxTokensReducer(100)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new MaxTokensChatHistoryReducer(100)); var chatHistory = new ChatHistory(); chatHistory.AddSystemMessageWithTokenCount("You are an expert on the best restaurants in the world. Keep responses short."); @@ -194,7 +194,7 @@ public async Task ShowHowToReduceChatHistoryWithSummarizationAsync() OpenAIChatCompletionService openAiChatService = new( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey); - IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new ChatHistorySummarizationReducer(openAiChatService, 2, 4)); + IChatCompletionService chatService = openAiChatService.UsingChatHistoryReducer(new SummarizingChatHistoryReducer(openAiChatService, 2, 4)); var chatHistory = new ChatHistory("You are an expert on the best restaurants in every city. Answer for the city the user has asked about."); diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs index 307edbe4b229..79b72003ee89 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs @@ -8,9 +8,7 @@ namespace ChatCompletion; -/// -/// These examples demonstrate different ways of using chat completion with Ollama API. -/// +// The following example shows how to use Semantic Kernel with Ollama Chat Completion API public class Ollama_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { /// @@ -87,7 +85,7 @@ public async Task ServicePromptWithInnerContentAsync() // Assistant message details // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content is always a list of chunks. - var replyInnerContent = reply.InnerContent as ChatDoneResponseStream; + var replyInnerContent = reply.InnerContent as List; OutputInnerContent(replyInnerContent!); } @@ -148,35 +146,42 @@ public async Task ChatPromptWithInnerContentAsync() // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content of a non-streaming result is a list of the generated chunks. var messageContent = functionResult.GetValue(); // Retrieves underlying chat message content from FunctionResult. - var replyInnerContent = messageContent!.InnerContent as ChatDoneResponseStream; // Retrieves inner content from ChatMessageContent. + var replyInnerContent = messageContent!.InnerContent as List; // Retrieves inner content from ChatMessageContent. OutputInnerContent(replyInnerContent!); } /// - /// Retrieve extra information from the final response. + /// Retrieve extra information from each streaming chunk response in a list of chunks. /// - /// The complete OllamaSharp response provided as inner content of a chat message + /// List of streaming chunks provided as inner content of a chat message /// /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes /// may cause breaking changes in the code below. /// - private void OutputInnerContent(ChatDoneResponseStream innerContent) + private void OutputInnerContent(List innerContent) { - Console.WriteLine($$""" - Model: {{innerContent.Model}} - Message role: {{innerContent.Message.Role}} - Message content: {{innerContent.Message.Content}} - Created at: {{innerContent.CreatedAt}} - Done: {{innerContent.Done}} - Done Reason: {{innerContent.DoneReason}} - Eval count: {{innerContent.EvalCount}} - Eval duration: {{innerContent.EvalDuration}} - Load duration: {{innerContent.LoadDuration}} - Total duration: {{innerContent.TotalDuration}} - Prompt eval count: {{innerContent.PromptEvalCount}} - Prompt eval duration: {{innerContent.PromptEvalDuration}} - ------------------------ - """); + Console.WriteLine($"Model: {innerContent![0].Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only + Console.WriteLine(" -- Chunk changing data -- "); + + innerContent.ForEach(streamChunk => + { + Console.WriteLine($"Message role: {streamChunk.Message.Role}"); + Console.WriteLine($"Message content: {streamChunk.Message.Content}"); + Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); + Console.WriteLine($"Done: {streamChunk.Done}"); + /// The last message in the chunk is a type with additional metadata. + if (streamChunk is ChatDoneResponseStream doneStreamChunk) + { + Console.WriteLine($"Done Reason: {doneStreamChunk.DoneReason}"); + Console.WriteLine($"Eval count: {doneStreamChunk.EvalCount}"); + Console.WriteLine($"Eval duration: {doneStreamChunk.EvalDuration}"); + Console.WriteLine($"Load duration: {doneStreamChunk.LoadDuration}"); + Console.WriteLine($"Total duration: {doneStreamChunk.TotalDuration}"); + Console.WriteLine($"Prompt eval count: {doneStreamChunk.PromptEvalCount}"); + Console.WriteLine($"Prompt eval duration: {doneStreamChunk.PromptEvalDuration}"); + } + Console.WriteLine("------------------------"); + }); } } diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs index 1713d9a03052..9d6e8cf9e845 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs @@ -9,7 +9,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate different ways of using chat completion with Ollama API. +/// These examples demonstrate the ways different content types are streamed by Ollama via the chat completion service. /// public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -17,11 +17,11 @@ public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest /// This example demonstrates chat completion streaming using Ollama. /// [Fact] - public async Task UsingServiceStreamingWithOllama() + public Task StreamChatAsync() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingServiceStreamingWithOllama)} ========"); + Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -29,39 +29,22 @@ public async Task UsingServiceStreamingWithOllama() var chatService = ollamaClient.AsChatCompletionService(); - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - this.OutputLastMessage(chatHistory); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - this.OutputLastMessage(chatHistory); - - // First assistant message - await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); - this.OutputLastMessage(chatHistory); - - // Second assistant message - await StreamMessageOutputAsync(chatService, chatHistory, AuthorRole.Assistant); + return this.StartStreamingChatAsync(chatService); } /// - /// This example demonstrates retrieving underlying library information through chat completion streaming inner contents. + /// This example demonstrates retrieving extra information chat completion streaming using Ollama. /// /// - /// This is a breaking glass scenario and is more susceptible to break on newer versions of OllamaSharp library. + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. /// [Fact] - public async Task UsingServiceStreamingInnerContentsWithOllama() + public async Task StreamChatWithInnerContentAsync() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingServiceStreamingInnerContentsWithOllama)} ========"); + Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -87,15 +70,13 @@ public async Task UsingServiceStreamingInnerContentsWithOllama() } /// - /// Demonstrates how you can template a chat history call while using the for invocation. + /// Demonstrates how you can template a chat history call while using the kernel for invocation. /// [Fact] - public async Task UsingKernelChatPromptStreamingWithOllama() + public async Task StreamChatPromptAsync() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingKernelChatPromptStreamingWithOllama)} ========"); - StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions @@ -118,18 +99,17 @@ public async Task UsingKernelChatPromptStreamingWithOllama() } /// - /// This example demonstrates retrieving underlying library information through chat completion streaming inner contents. + /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. /// /// - /// This is a breaking glass scenario and is more susceptible to break on newer versions of OllamaSharp library. + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. /// [Fact] - public async Task UsingKernelChatPromptStreamingInnerContentsWithOllama() + public async Task StreamChatPromptWithInnerContentAsync() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingKernelChatPromptStreamingInnerContentsWithOllama)} ========"); - StringBuilder chatPrompt = new(""" You are a librarian, expert about books Hi, I'm looking for book suggestions @@ -159,11 +139,11 @@ public async Task UsingKernelChatPromptStreamingInnerContentsWithOllama() /// and alternatively via the StreamingChatMessageContent.Items property. /// [Fact] - public async Task UsingStreamingTextFromChatCompletionWithOllama() + public async Task StreamTextFromChatAsync() { Assert.NotNull(TestConfiguration.Ollama.ModelId); - Console.WriteLine($"======== Ollama - Chat Completion - {nameof(UsingStreamingTextFromChatCompletionWithOllama)} ========"); + Console.WriteLine("======== Stream Text from Chat Content ========"); using var ollamaClient = new OllamaApiClient( uriString: TestConfiguration.Ollama.Endpoint, @@ -188,6 +168,53 @@ public async Task UsingStreamingTextFromChatCompletionWithOllama() } } + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + this.OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + this.OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) { bool roleWritten = false; @@ -222,26 +249,22 @@ private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, str /// private void OutputInnerContent(ChatResponseStream streamChunk) { - Console.WriteLine($$""" - Model: {{streamChunk.Model}} - Message role: {{streamChunk.Message.Role}} - Message content: {{streamChunk.Message.Content}} - Created at: {{streamChunk.CreatedAt}} - Done: {{streamChunk.Done}} - """); + Console.WriteLine($"Model: {streamChunk.Model}"); + Console.WriteLine($"Message role: {streamChunk.Message.Role}"); + Console.WriteLine($"Message content: {streamChunk.Message.Content}"); + Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); + Console.WriteLine($"Done: {streamChunk.Done}"); /// The last message in the chunk is a type with additional metadata. if (streamChunk is ChatDoneResponseStream doneStream) { - Console.WriteLine($$""" - Done Reason: {{doneStream.DoneReason}} - Eval count: {{doneStream.EvalCount}} - Eval duration: {{doneStream.EvalDuration}} - Load duration: {{doneStream.LoadDuration}} - Total duration: {{doneStream.TotalDuration}} - Prompt eval count: {{doneStream.PromptEvalCount}} - Prompt eval duration: {{doneStream.PromptEvalDuration}} - """); + Console.WriteLine($"Done Reason: {doneStream.DoneReason}"); + Console.WriteLine($"Eval count: {doneStream.EvalCount}"); + Console.WriteLine($"Eval duration: {doneStream.EvalDuration}"); + Console.WriteLine($"Load duration: {doneStream.LoadDuration}"); + Console.WriteLine($"Total duration: {doneStream.TotalDuration}"); + Console.WriteLine($"Prompt eval count: {doneStream.PromptEvalCount}"); + Console.WriteLine($"Prompt eval duration: {doneStream.PromptEvalDuration}"); } Console.WriteLine("------------------------"); } diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs index 47c047d5271c..22fb6dbd82f5 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs @@ -7,14 +7,9 @@ namespace ChatCompletion; -/// -/// These examples demonstrate different ways of using chat completion with OpenAI API. -/// +// The following example shows how to use Semantic Kernel with OpenAI API public class OpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { - /// - /// Sample showing how to use directly with a . - /// [Fact] public async Task ServicePromptAsync() { @@ -23,36 +18,11 @@ public async Task ServicePromptAsync() Console.WriteLine("======== Open AI - Chat Completion ========"); - OpenAIChatCompletionService chatService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); - - // First assistant message - var reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); - - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); - OutputLastMessage(chatHistory); - - // Second assistant message - reply = await chatService.GetChatMessageContentAsync(chatHistory); - chatHistory.Add(reply); - OutputLastMessage(chatHistory); + await StartChatAsync(chatCompletionService); } - /// - /// Sample showing how to use directly with a also exploring the - /// breaking glass approach capturing the underlying instance via . - /// [Fact] public async Task ServicePromptWithInnerContentAsync() { @@ -81,9 +51,6 @@ public async Task ServicePromptWithInnerContentAsync() OutputInnerContent(replyInnerContent!); } - /// - /// Sample showing how to use with chat completion and chat prompt syntax. - /// [Fact] public async Task ChatPromptAsync() { @@ -170,6 +137,32 @@ public async Task ChatPromptStoreWithMetadataAsync() OutputInnerContent(replyInnerContent!); } + private async Task StartChatAsync(IChatCompletionService chatGPT) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + } + /// /// Retrieve extra information from a inner content of type . /// @@ -180,21 +173,19 @@ public async Task ChatPromptStoreWithMetadataAsync() /// private void OutputInnerContent(OpenAI.Chat.ChatCompletion innerContent) { - Console.WriteLine($$""" - Message role: {{innerContent.Role}} // Available as a property of ChatMessageContent - Message content: {{innerContent.Content[0].Text}} // Available as a property of ChatMessageContent - - Model: {{innerContent.Model}} // Model doesn't change per chunk, so we can get it from the first chunk only - Created At: {{innerContent.CreatedAt}} - - Finish reason: {{innerContent.FinishReason}} - Input tokens usage: {{innerContent.Usage.InputTokenCount}} - Output tokens usage: {{innerContent.Usage.OutputTokenCount}} - Total tokens usage: {{innerContent.Usage.TotalTokenCount}} - Refusal: {{innerContent.Refusal}} - Id: {{innerContent.Id}} - System fingerprint: {{innerContent.SystemFingerprint}} - """); + Console.WriteLine($"Message role: {innerContent.Role}"); // Available as a property of ChatMessageContent + Console.WriteLine($"Message content: {innerContent.Content[0].Text}"); // Available as a property of ChatMessageContent + + Console.WriteLine($"Model: {innerContent.Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only + Console.WriteLine($"Created At: {innerContent.CreatedAt}"); + + Console.WriteLine($"Finish reason: {innerContent.FinishReason}"); + Console.WriteLine($"Input tokens usage: {innerContent.Usage.InputTokenCount}"); + Console.WriteLine($"Output tokens usage: {innerContent.Usage.OutputTokenCount}"); + Console.WriteLine($"Total tokens usage: {innerContent.Usage.TotalTokenCount}"); + Console.WriteLine($"Refusal: {innerContent.Refusal} "); + Console.WriteLine($"Id: {innerContent.Id}"); + Console.WriteLine($"System fingerprint: {innerContent.SystemFingerprint}"); if (innerContent.ContentTokenLogProbabilities.Count > 0) { diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs index 7773fadbb76f..0e9fe0326290 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs @@ -8,7 +8,7 @@ namespace ChatCompletion; /// -/// These examples demonstrate different ways of using streaming chat completion with OpenAI API. +/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service. /// public class OpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) { @@ -214,6 +214,37 @@ public async Task StreamFunctionCallContentAsync() } } + private async Task StreamMessageOutputAsync(OpenAIChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + + // The last message in the chunk has the usage metadata. + // https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options + if (chatUpdate.Metadata?["Usage"] is not null) + { + Console.WriteLine(chatUpdate.Metadata["Usage"]?.AsJson()); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) { bool roleWritten = false; diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs deleted file mode 100644 index b28b45363204..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using OpenAI.Chat; - -namespace ChatCompletion; - -// The following example shows how to use Semantic Kernel with OpenAI API -public class OpenAI_ChatCompletionWithReasoning(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Sample showing how to use with chat completion and chat prompt syntax. - /// - [Fact] - public async Task ChatPromptWithReasoningAsync() - { - Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); - - Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); - Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Create execution settings with low reasoning effort. - var executionSettings = new OpenAIPromptExecutionSettings //OpenAIPromptExecutionSettings - { - MaxTokens = 2000, - ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) - }; - - // Create KernelArguments using the execution settings. - var kernelArgs = new KernelArguments(executionSettings); - - StringBuilder chatPrompt = new(""" - You are an expert software engineer, specialized in the Semantic Kernel SDK and NET framework - Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop . - """); - - // Invoke the prompt with high reasoning effort. - var reply = await kernel.InvokePromptAsync(chatPrompt.ToString(), kernelArgs); - - Console.WriteLine(reply); - } - - /// - /// Sample showing how to use directly with a . - /// - [Fact] - public async Task ServicePromptWithReasoningAsync() - { - Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); - Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - - Console.WriteLine("======== Open AI - Chat Completion with Reasoning ========"); - - OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - - // Create execution settings with low reasoning effort. - var executionSettings = new OpenAIPromptExecutionSettings - { - MaxTokens = 2000, - ReasoningEffort = ChatReasoningEffortLevel.Low // Only available for reasoning models (i.e: o3-mini, o1, ...) - }; - - // Create a ChatHistory and add messages. - var chatHistory = new ChatHistory(); - chatHistory.AddDeveloperMessage( - "You are an expert software engineer, specialized in the Semantic Kernel SDK and .NET framework."); - chatHistory.AddUserMessage( - "Hi, Please craft me an example code in .NET using Semantic Kernel that implements a chat loop."); - - // Instead of a prompt string, call GetChatMessageContentAsync with the chat history. - var reply = await chatCompletionService.GetChatMessageContentAsync( - chatHistory: chatHistory, - executionSettings: executionSettings); - - Console.WriteLine(reply); - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs index fa014ede905f..c36b1d945c67 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs @@ -5,33 +5,26 @@ using Microsoft.SemanticKernel; using OpenAI; -#pragma warning disable CA5399 // HttpClient is created without enabling CheckCertificateRevocationList - namespace ChatCompletion; -/// -/// This example shows a way of using a Custom HttpClient and HttpHandler with OpenAI Connector to capture -/// the request Uri and Headers for each request. -/// public sealed class OpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) { [Fact] - public async Task UsingCustomHttpClientWithOpenAI() + public async Task RunAsync() { Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - Console.WriteLine($"======== Open AI - {nameof(UsingCustomHttpClientWithOpenAI)} ========"); + Console.WriteLine("======== Using a custom OpenAI client ========"); // Create an HttpClient and include your custom header(s) - using var myCustomHttpHandler = new MyCustomClientHttpHandler(Output); - using var myCustomClient = new HttpClient(handler: myCustomHttpHandler); - myCustomClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + using var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); // Configure AzureOpenAIClient to use the customized HttpClient var clientOptions = new OpenAIClientOptions { - Transport = new HttpClientPipelineTransport(myCustomClient), + Transport = new HttpClientPipelineTransport(httpClient), NetworkTimeout = TimeSpan.FromSeconds(30), RetryPolicy = new ClientRetryPolicy() }; @@ -52,30 +45,8 @@ public async Task UsingCustomHttpClientWithOpenAI() kernel.Plugins["FunPlugin"]["Excuses"], new() { ["input"] = "I have no homework" } ); - Console.WriteLine(result.GetValue()); - myCustomClient.Dispose(); - } - - /// - /// Normally you would use a custom HttpClientHandler to add custom logic to your custom http client - /// This uses the ITestOutputHelper to write the requested URI to the test output - /// - /// The to write the requested URI to the test output - private sealed class MyCustomClientHttpHandler(ITestOutputHelper output) : HttpClientHandler - { - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - output.WriteLine($"Requested URI: {request.RequestUri}"); - - request.Headers.Where(h => h.Key != "Authorization") - .ToList() - .ForEach(h => output.WriteLine($"{h.Key}: {string.Join(", ", h.Value)}")); - output.WriteLine("--------------------------------"); - - // Add custom logic here - return await base.SendAsync(request, cancellationToken); - } + httpClient.Dispose(); } } diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 728dce6b41fb..746d5fbb73cf 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,18 +8,15 @@ false true - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 - - - @@ -55,10 +52,8 @@ - - @@ -86,7 +81,6 @@ - @@ -148,6 +142,9 @@ Always + + Always + diff --git a/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs b/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs deleted file mode 100644 index 012ce73d2845..000000000000 --- a/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Identity; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace Filtering; - -/// -/// This sample shows how to switch between Azure OpenAI deployments based on the functions that are being called. -/// This can be useful if semantic caching is enabled and you want to switch to a different deployment based on the functions that are being called. -/// -public class AzureOpenAI_DeploymentSwitch(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task DeploymentSwitchAsync() - { - Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); - Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); - - // Create a logging handler to output HTTP requests and responses - using var httpHandler = new HttpClientHandler(); - using var loggingHandler = new LoggingHandler(httpHandler, this.Output); - using var httpClient = new HttpClient(loggingHandler); - - // Create KernelBuilder with an auto function invocation filter - var kernelBuilder = Kernel.CreateBuilder(); - kernelBuilder.Services.AddSingleton(new AutoFunctionInvocationFilter(this.Output)); - - // Define the endpoints for the two Azure OpenAI services - var endpoint1 = "https://contoso-eastus.openai.azure.com/"; - var endpoint2 = "https://contoso-swedencentral.openai.azure.com/"; - - // Add Azure OpenAI chat completion services - kernelBuilder.AddAzureOpenAIChatCompletion( - serviceId: "eastus", - deploymentName: "gpt-4o-mini", - endpoint: endpoint1, - credentials: new AzureCliCredential(), - httpClient: httpClient, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - kernelBuilder.AddAzureOpenAIChatCompletion( - serviceId: "swedencentral", - deploymentName: "gpt-4o", - endpoint: endpoint2, - credentials: new AzureCliCredential(), - httpClient: httpClient, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - - var kernel = kernelBuilder.Build(); - - kernel.ImportPluginFromFunctions("HelperFunctions", - [ - kernel.CreateFunctionFromMethod(() => "Brown", "GetEyeColor", "Retrieves eye color for the current user."), - kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentDateTimeInUtc", "Retrieves the current date time in UTC."), - ]); - - OpenAIPromptExecutionSettings settings = new() - { - ServiceId = "swedencentral", - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() - }; - - var reply = await kernel.InvokePromptAsync("What time is it and what is my eye color and what time is it?", new(settings)); - - Console.WriteLine(reply); - } - - private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter - { - public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) - { - var kernel = context.Kernel; - var chatHistory = context.ChatHistory; - var executionSettings = context.ExecutionSettings; - var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last()); - - if (executionSettings is not null && "swedencentral".Equals(executionSettings.ServiceId, StringComparison.Ordinal)) - { - bool includesGetEyeColor = functionCalls.Any(fc => fc.FunctionName.Equals("GetEyeColor", StringComparison.Ordinal)); - - // For the "GetEyeColor" function, switch to a different deployment. - // If the function is not present in the collection of function calls, proceed with the request as usual. - if (!includesGetEyeColor) - { - await next(context); - } - else - { - output.WriteLine("Switching to use eastus deployment"); - - chatHistory.RemoveAt(chatHistory.Count - 1); - - IChatCompletionService chatCompletionService = kernel.Services.GetRequiredKeyedService("eastus"); - - OpenAIPromptExecutionSettings settings = new() - { - ServiceId = "eastus", - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() - }; - - var chatContent = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, context.Kernel); - - context.Result = new FunctionResult(context.Result, chatContent); - context.Terminate = true; - } - } - else - { - await next(context); - } - } - } -} diff --git a/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs b/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs new file mode 100644 index 000000000000..c1b3372d071e --- /dev/null +++ b/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +#pragma warning disable format // Format item can be simplified +#pragma warning disable CA1861 // Avoid constant arrays as arguments + +namespace LocalModels; + +// The following example shows how to use Semantic Kernel with HuggingFace API. +public class HuggingFace_ChatCompletionWithTGI(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. + /// + [Fact(Skip = "Requires TGI (text generation inference) deployment")] + public async Task RunTGI_ChatCompletionAsync() + { + Console.WriteLine("\n======== HuggingFace - TGI Chat Completion ========\n"); + + // This example was run against one of the chat completion (Message API) supported models from HuggingFace, listed in here: + // Starting a Local Docker i.e: + // docker run --gpus all --shm-size 1g -p 8080:80 -v "F:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:1.4 --model-id teknium/OpenHermes-2.5-Mistral-7B + + // HuggingFace local HTTP server endpoint + var endpoint = new Uri("http://localhost:8080"); + + const string Model = "teknium/OpenHermes-2.5-Mistral-7B"; + + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: Model, + endpoint: endpoint) + .Build(); + + var chatCompletion = kernel.GetRequiredService(); + var chatHistory = new ChatHistory("You are a helpful assistant.") + { + new ChatMessageContent(AuthorRole.User, "What is deep learning?") + }; + + var result = await chatCompletion.GetChatMessageContentAsync(chatHistory); + + Console.WriteLine(result.Role); + Console.WriteLine(result.Content); + } + + /// + /// Follow steps in to setup HuggingFace local Text Generation Inference HTTP server. + /// + [Fact(Skip = "Requires TGI (text generation inference) deployment")] + public async Task RunTGI_StreamingChatCompletionAsync() + { + Console.WriteLine("\n======== HuggingFace - TGI Chat Completion Streaming ========\n"); + + // This example was run against one of the chat completion (Message API) supported models from HuggingFace, listed in here: + // Starting a Local Docker i.e: + // docker run --gpus all --shm-size 1g -p 8080:80 -v "F:\temp\huggingface:/data" ghcr.io/huggingface/text-generation-inference:1.4 --model-id teknium/OpenHermes-2.5-Mistral-7B + + // HuggingFace local HTTP server endpoint + var endpoint = new Uri("http://localhost:8080"); + + const string Model = "teknium/OpenHermes-2.5-Mistral-7B"; + + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceChatCompletion( + model: Model, + endpoint: endpoint) + .Build(); + + var chatCompletion = kernel.GetRequiredService(); + var chatHistory = new ChatHistory("You are a helpful assistant.") + { + new ChatMessageContent(AuthorRole.User, "What is deep learning?") + }; + + AuthorRole? role = null; + await foreach (var chatMessageChunk in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (role is null) + { + role = chatMessageChunk.Role; + Console.Write(role); + } + Console.Write(chatMessageChunk.Content); + } + } +} diff --git a/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs b/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs new file mode 100644 index 000000000000..ec118d27e977 --- /dev/null +++ b/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace LocalModels; + +/// +/// This example shows a way of using OpenAI connector with other APIs that supports the same ChatCompletion Message API standard from OpenAI. +/// +/// To proceed with this example will be necessary to follow those steps: +/// 1. Install LMStudio Platform in your environment +/// 2. Open LM Studio +/// 3. Search and Download both Phi2 and Llama2 models (preferably the ones that uses 8GB RAM or more) +/// 4. Start the Message API Server on http://localhost:1234 +/// 5. Run the examples. +/// +/// OR +/// +/// 1. Start the Ollama Message API Server on http://localhost:11434 using docker +/// 2. docker run -d --gpus=all -v "d:\temp\ollama:/root/.ollama" -p 11434:11434 --name ollama ollama/ollama +/// 3. Set Llama2 as the current ollama model: docker exec -it ollama ollama run llama2 +/// 4. Run the Ollama examples. +/// +/// OR +/// +/// 1. Start the LocalAI Message API Server on http://localhost:8080 +/// 2. docker run -ti -p 8080:8080 localai/localai:v2.12.3-ffmpeg-core phi-2 +/// 3. Run the LocalAI examples. +/// +public class MultipleProviders_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + [Theory(Skip = "Manual configuration needed")] + [InlineData("LMStudio", "http://localhost:1234", "llama2")] // Setup Llama2 as the model in LM Studio UI and start the Message API Server on http://localhost:1234 + [InlineData("Ollama", "http://localhost:11434", "llama2")] // Start the Ollama Message API Server on http://localhost:11434 using docker + [InlineData("LocalAI", "http://localhost:8080", "phi-2")] + public async Task LocalModel_ExampleAsync(string messageAPIPlatform, string url, string modelId) + { + Console.WriteLine($"Example using local {messageAPIPlatform}"); + // Setup Llama2 as the model in LM Studio UI. + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: modelId, + apiKey: null, + endpoint: new Uri(url)) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings + { + TopP = 0.5, + MaxTokens = 1000, + }); + + var response = await kernel.InvokeAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." }); + Console.WriteLine(response); + } + + [Theory(Skip = "Manual configuration needed")] + [InlineData("LMStudio", "http://localhost:1234", "llama2")] // Setup Llama2 as the model in LM Studio UI and start the Message API Server on http://localhost:1234 + [InlineData("Ollama", "http://localhost:11434", "llama2")] // Start the Ollama Message API Server on http://localhost:11434 using docker + [InlineData("LocalAI", "http://localhost:8080", "phi-2")] + public async Task LocalModel_StreamingExampleAsync(string messageAPIPlatform, string url, string modelId) + { + Console.WriteLine($"Example using local {messageAPIPlatform}"); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: modelId, + apiKey: null, + endpoint: new Uri(url)) + .Build(); + + var prompt = @"Rewrite the text between triple backticks into a business mail. Use a professional tone, be clear and concise. + Sign the mail as AI Assistant. + + Text: ```{{$input}}```"; + + var mailFunction = kernel.CreateFunctionFromPrompt(prompt, new OpenAIPromptExecutionSettings + { + TopP = 0.5, + MaxTokens = 1000, + }); + + await foreach (var word in kernel.InvokeStreamingAsync(mailFunction, new() { ["input"] = "Tell David that I'm going to finish the business plan by the end of the week." })) + { + Console.WriteLine(word); + } + } +} diff --git a/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs b/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs index ee862bdabc8e..a711747e84e0 100644 --- a/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs +++ b/dotnet/samples/Concepts/Plugins/CopilotAgentBasedPlugins.cs @@ -100,57 +100,16 @@ private void WriteSampleHeadingToConsole(string pluginToTest, string functionToT } private static readonly HashSet s_fieldsToIgnore = new( [ - "@odata.type", - "attachments", - "allowNewTimeProposals", - "bccRecipients", "bodyPreview", - "calendar", "categories", - "ccRecipients", - "changeKey", "conversationId", - "coordinates", "conversationIndex", - "createdDateTime", - "discriminator", - "lastModifiedDateTime", - "locations", - "extensions", - "flag", - "from", - "hasAttachments", - "iCalUId", - "id", "inferenceClassification", "internetMessageHeaders", - "instances", - "isCancelled", "isDeliveryReceiptRequested", - "isDraft", - "isOrganizer", - "isRead", - "isReadReceiptRequested", "multiValueExtendedProperties", - "onlineMeeting", - "onlineMeetingProvider", - "onlineMeetingUrl", - "organizer", - "originalStart", - "parentFolderId", - "range", - "receivedDateTime", - "recurrence", - "replyTo", - "sender", - "sentDateTime", - "seriesMasterId", "singleValueExtendedProperties", - "transactionId", - "time", "uniqueBody", - "uniqueId", - "uniqueIdType", "webLink", ], StringComparer.OrdinalIgnoreCase @@ -176,42 +135,26 @@ private void WriteSampleHeadingToConsole(string pluginToTest, string functionToT { return schema; } - - TrimPropertiesFromJsonNode(jsonNode); - - return KernelJsonSchema.Parse(node.ToString()); - } - private static void TrimPropertiesFromJsonNode(JsonNode jsonNode) - { - if (jsonNode is not JsonObject jsonObject) - { - return; - } - if (jsonObject.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) + if (jsonNode.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) { jsonNode[RequiredPropertyName] = new JsonArray(requiredArray.Where(x => x is not null).Select(x => x!.GetValue()).Where(x => !s_fieldsToIgnore.Contains(x)).Select(x => JsonValue.Create(x)).ToArray()); } - if (jsonObject.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) + + if (jsonNode.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) { - var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(static x => x.Key).ToArray(); + var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(x => x.Key).ToArray(); foreach (var property in properties) { propertiesObject.Remove(property); } } - foreach (var subProperty in jsonObject) - { - if (subProperty.Value is not null) - { - TrimPropertiesFromJsonNode(subProperty.Value); - } - } + + return KernelJsonSchema.Parse(node.ToString()); } private static readonly RestApiParameterFilter s_restApiParameterFilter = (RestApiParameterFilterContext context) => { - if (("me_sendMail".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) || - ("me_calendar_CreateEvents".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase)) && - "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase))) + if ("me_CreateMessages".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) && + "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase)) { context.Parameter.Schema = TrimPropertiesFromRequestBody(context.Parameter.Schema); return context.Parameter; diff --git a/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs b/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs deleted file mode 100644 index cf0de1188055..000000000000 --- a/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -namespace Plugins; - -/// -/// This example shows how to interact with an existing CrewAI Enterprise Crew directly or as a plugin. -/// These examples require a valid CrewAI Enterprise deployment with an endpoint, auth token, and known inputs. -/// -public class CrewAI_Plugin(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Shows how to kickoff an existing CrewAI Enterprise Crew and wait for it to complete. - /// - [Fact] - public async Task UsingCrewAIEnterpriseAsync() - { - string crewAIEndpoint = TestConfiguration.CrewAI.Endpoint; - string crewAIAuthToken = TestConfiguration.CrewAI.AuthToken; - - var crew = new CrewAIEnterprise( - endpoint: new Uri(crewAIEndpoint), - authTokenProvider: async () => crewAIAuthToken); - - // The required inputs for the Crew must be known in advance. This example is modeled after the - // Enterprise Content Marketing Crew Template and requires the following inputs: - var inputs = new - { - company = "CrewAI", - topic = "Agentic products for consumers", - }; - - // Invoke directly with our inputs - var kickoffId = await crew.KickoffAsync(inputs); - Console.WriteLine($"CrewAI Enterprise Crew kicked off with ID: {kickoffId}"); - - // Wait for completion - var result = await crew.WaitForCrewCompletionAsync(kickoffId); - Console.WriteLine("CrewAI Enterprise Crew completed with the following result:"); - Console.WriteLine(result); - } - - /// - /// Shows how to kickoff an existing CrewAI Enterprise Crew as a plugin. - /// - [Fact] - public async Task UsingCrewAIEnterpriseAsPluginAsync() - { - string crewAIEndpoint = TestConfiguration.CrewAI.Endpoint; - string crewAIAuthToken = TestConfiguration.CrewAI.AuthToken; - string openAIModelId = TestConfiguration.OpenAI.ChatModelId; - string openAIApiKey = TestConfiguration.OpenAI.ApiKey; - - if (openAIModelId is null || openAIApiKey is null) - { - Console.WriteLine("OpenAI credentials not found. Skipping example."); - return; - } - - // Setup the Kernel and AI Services - Kernel kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion( - modelId: openAIModelId, - apiKey: openAIApiKey) - .Build(); - - var crew = new CrewAIEnterprise( - endpoint: new Uri(crewAIEndpoint), - authTokenProvider: async () => crewAIAuthToken); - - // The required inputs for the Crew must be known in advance. This example is modeled after the - // Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. - // We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. - var crewPluginDefinitions = new[] - { - new CrewAIInputMetadata(Name: "company", Description: "The name of the company that should be researched", Type: typeof(string)), - new CrewAIInputMetadata(Name: "topic", Description: "The topic that should be researched", Type: typeof(string)), - }; - - // Create the CrewAI Plugin. This builds a plugin that can be added to the Kernel and invoked like any other plugin. - // The plugin will contain the following functions: - // - Kickoff: Starts the Crew with the specified inputs and returns the Id of the scheduled kickoff. - // - KickoffAndWait: Starts the Crew with the specified inputs and waits for the Crew to complete before returning the result. - // - WaitForCrewCompletion: Waits for the specified Crew kickoff to complete and returns the result. - // - GetCrewKickoffStatus: Gets the status of the specified Crew kickoff. - var crewPlugin = crew.CreateKernelPlugin( - name: "EnterpriseContentMarketingCrew", - description: "Conducts thorough research on the specified company and topic to identify emerging trends, analyze competitor strategies, and gather data-driven insights.", - inputMetadata: crewPluginDefinitions); - - // Add the plugin to the Kernel - kernel.Plugins.Add(crewPlugin); - - // Invoke the CrewAI Plugin directly as shown below, or use automaic function calling with an LLM. - var kickoffAndWaitFunction = crewPlugin["KickoffAndWait"]; - var result = await kernel.InvokeAsync( - function: kickoffAndWaitFunction, - arguments: new() - { - ["company"] = "CrewAI", - ["topic"] = "Consumer AI Products" - }); - - Console.WriteLine(result); - } -} diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 2c213d423790..1fb0d0ffe9d6 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -13,7 +13,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=NameSpa Example for `ChatCompletion/OpenAI_ChatCompletion.cs` file, targeting the `ChatPromptSync` test: ```powershell -dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCompletion.OpenAI_ChatCompletion.ChatPromptAsync" +dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCompletion.OpenAI_ChatCompletion.ChatPromptSync" ``` ## Table of Contents @@ -55,7 +55,6 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [AzureAIInference_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs) - [AzureAIInference_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs) - [AzureOpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs) -- [AzureOpenAI_ChatCompletionWithReasoning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionWithReasoning.cs) - [AzureOpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs) - [AzureOpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs) - [AzureOpenAIWithData_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs) @@ -69,31 +68,22 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [Google_GeminiChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs) - [Google_GeminiGetModelResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiGetModelResult.cs) - [Google_GeminiVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs) -- [HuggingFace_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletion.cs) -- [HuggingFace_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HuggingFace_ChatCompletionStreaming.cs) -- [HybridCompletion_Fallback](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/HybridCompletion_Fallback.cs) -- [LMStudio_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletion.cs) -- [LMStudio_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/LMStudio_ChatCompletionStreaming.cs) -- [MistralAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatCompletion.cs) -- [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs) -- [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs) -- [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs) -- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MuiltipleProviders_ChatHistoryReducer.cs) -- [Ollama_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs) -- [Ollama_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs) -- [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) -- [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) - [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) - [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) -- [OpenAI_ChatCompletionWithReasoning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs) - [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) - [OpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs) +- [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs) - [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) -- [OpenAI_FunctionCallingWithMemoryPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCallingWithMemoryPlugin.cs) - [OpenAI_ReasonedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs) -- [OpenAI_RepeatedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs) -- [OpenAI_StructuredOutputs](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_StructuredOutputs.cs) -- [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs) +- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MuiltipleProviders_ChatHistoryReducer.cs) +- [MistralAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatCompletion.cs) +- [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs) +- [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs) +- [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs) +- [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) +- [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) +- [Ollama_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs) +- [Ollama_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs) ### DependencyInjection - Examples on using `DI Container` @@ -112,7 +102,6 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [PromptRenderFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PromptRenderFiltering.cs) - [RetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/RetryWithFilters.cs) - [TelemetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs) -- [AzureOpenAI_DeploymentSwitch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/AzureOpenAI_DeploymentSwitch.cs) ### Functions - Invoking [`Method`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs) or [`Prompt`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs) functions with [`Kernel`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Kernel.cs) @@ -130,6 +119,11 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [HuggingFace_ImageToText](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ImageToText/HuggingFace_ImageToText.cs) +### LocalModels - Running models locally + +- [HuggingFace_ChatCompletionWithTGI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs) +- [MultipleProviders_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs) + ### Memory - Using AI [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) concepts - [OpenAI_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/OpenAI_EmbeddingGeneration.cs) @@ -172,7 +166,6 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [CreatePluginFromOpenApiSpec_Jira](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Jira.cs) - [CreatePluginFromOpenApiSpec_Klarna](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs) - [CreatePluginFromOpenApiSpec_RepairService](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_RepairService.cs) -- [CrewAI_Plugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CrewAI_Plugin.cs) - [OpenApiPlugin_PayloadHandling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_PayloadHandling.cs) - [OpenApiPlugin_CustomHttpContentReader](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_CustomHttpContentReader.cs) - [OpenApiPlugin_Customization](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenApiPlugin_Customization.cs) diff --git a/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml b/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml new file mode 100644 index 000000000000..26a07cf04cf3 --- /dev/null +++ b/dotnet/samples/Concepts/Resources/Agents/ParrotAgent.yaml @@ -0,0 +1,9 @@ +name: Parrot +template_format: semantic-kernel +template: | + Repeat the user message in the voice of a pirate and then end with {{$count}} parrot sounds. +description: A fun chat bot that repeats the user message in the voice of a pirate. +input_variables: + - name: count + description: The number of parrot sounds. + is_required: true diff --git a/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml b/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml new file mode 100644 index 000000000000..474fd86a46ad --- /dev/null +++ b/dotnet/samples/Concepts/Resources/Agents/ToolAgent.yaml @@ -0,0 +1,7 @@ +name: ToolRunner +template_format: semantic-kernel +template: | + Respond to the user using the single best tool. + If no tool is appropriate, let the user know you only provide responses using tools. + When reporting a tool result, start with, "The tool I used informed me that" +description: Determines if a tool can be utilized to accomplish a result. diff --git a/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt b/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt new file mode 100644 index 000000000000..21665c82198e --- /dev/null +++ b/dotnet/samples/Concepts/Resources/Agents/travelinfo.txt @@ -0,0 +1,217 @@ +Invoice Booking Reference LMNOPQ Trip ID - 11110011111 +Passenger Name(s) +MARKS/SAM ALBERT Agent W2 + + +MICROSOFT CORPORATION 14820 NE 36TH STREET REDMOND WA US 98052 + +American Express Global Business Travel Microsoft Travel +14711 NE 29th Place, Suite 215 +Bellevue, WA 98007 +Phone: +1 (669) 210-8041 + + + + +BILLING CODE : 1010-10010110 +Invoice Information + + + + + + +Invoice Details +Ticket Number + + + + + + + +0277993883295 + + + + + + +Charges +Ticket Base Fare + + + + + + + +306.29 + +Airline Name + +ALASKA AIRLINES + +Ticket Tax Fare 62.01 + +Passenger Name Flight Details + +MARKS/SAM ALBERT +11 Sep 2023 ALASKA AIRLINES +0572 H Class +SEATTLE-TACOMA,WA/RALEIGH DURHAM,NC +13 Sep 2023 ALASKA AIRLINES +0491 M Class +RALEIGH DURHAM,NC/SEATTLE- TACOMA,WA + +Total (USD) Ticket Amount + +368.30 + +Credit Card Information +Charged to Card + + + +AX XXXXXXXXXXX4321 + + + +368.30 + + + + +Payment Details + + + +Charged by Airline +Total Invoice Charge + + + +USD + + + +368.30 +368.30 + +Monday 11 September 2023 + +10:05 AM + +Seattle (SEA) to Durham (RDU) +Airline Booking Ref: ABCXYZ + +Carrier: ALASKA AIRLINES + +Flight: AS 572 + +Status: Confirmed + +Operated By: ALASKA AIRLINES +Origin: Seattle, WA, Seattle-Tacoma International Apt (SEA) + +Departing: Monday 11 September 2023 at 10:05 AM Destination: Durham, Raleigh, Raleigh (RDU) Arriving: Monday 11 September 2023 at 06:15 PM +Additional Information + +Departure Terminal: Not Applicable + +Arrival Terminal: TERMINAL 2 + + +Class: ECONOMY +Aircraft Type: Boeing 737-900 +Meal Service: Not Applicable +Frequent Flyer Number: Not Applicable +Number of Stops: 0 +Greenhouse Gas Emissions: 560 kg CO2e / person + + +Distance: 2354 Miles Estimated Time: 05 hours 10 minutes +Seat: 24A + + +THE WESTIN RALEIGH DURHAM AP +Address: 3931 Macaw Street, Raleigh, NC, 27617, US +Phone: (1) 919-224-1400 Fax: (1) 919-224-1401 +Check In Date: Monday 11 September 2023 Check Out Date: Wednesday 13 September 2023 Number Of Nights: 2 +Rate: USD 280.00 per night may be subject to local taxes and service charges +Guaranteed to: AX XXXXXXXXXXX4321 + +Reference Number: 987654 +Additional Information +Membership ID: 123456789 +CANCEL PERMITTED UP TO 1 DAYS BEFORE CHECKIN + +Status: Confirmed + + +Corporate Id: Not Applicable + +Number Of Rooms: 1 + +Wednesday 13 September 2023 + +07:15 PM + +Durham (RDU) to Seattle (SEA) +Airline Booking Ref: ABCXYZ + +Carrier: ALASKA AIRLINES + +Flight: AS 491 + +Status: Confirmed + +Operated By: ALASKA AIRLINES +Origin: Durham, Raleigh, Raleigh (RDU) +Departing: Wednesday 13 September 2023 at 07:15 PM + + + +Departure Terminal: TERMINAL 2 + +Destination: Seattle, WA, Seattle-Tacoma International Apt (SEA) +Arriving: Wednesday 13 September 2023 at 09:59 PM Arrival Terminal: Not Applicable +Additional Information + + +Class: ECONOMY +Aircraft Type: Boeing 737-900 +Meal Service: Not Applicable +Frequent Flyer Number: Not Applicable +Number of Stops: 0 +Greenhouse Gas Emissions: 560 kg CO2e / person + + +Distance: 2354 Miles Estimated Time: 05 hours 44 minutes +Seat: 16A + + + +Greenhouse Gas Emissions +Total Greenhouse Gas Emissions for this trip is: 1120 kg CO2e / person +Air Fare Information + +Routing : ONLINE RESERVATION +Total Fare : USD 368.30 +Additional Messages +FOR 24X7 Travel Reservations Please Call 1-669-210-8041 Unable To Use Requested As Frequent Flyer Program Invalid Use Of Frequent Flyer Number 0123XYZ Please Contact Corresponding Frequent Travel Program Support Desk For Assistance +Trip Name-Trip From Seattle To Raleigh/Durham +This Ticket Is Nonrefundable. Changes Or Cancellations Must Be Made Prior To Scheduled Flight Departure +All Changes Must Be Made On Same Carrier And Will Be Subject To Service Fee And Difference In Airfare +******************************************************* +Please Be Advised That Certain Mandatory Hotel-Imposed Charges Including But Not Limited To Daily Resort Or Facility Fees May Be Applicable To Your Stay And Payable To The Hotel Operator At Check-Out From The Property. You May Wish To Inquire With The Hotel Before Your Trip Regarding The Existence And Amount Of Such Charges. +******************************************************* +Hotel Cancel Policies Vary Depending On The Property And Date. If You Have Questions Regarding Cancellation Fees Please Call The Travel Office. +Important Information +COVID-19 Updates: Click here to access Travel Vitals https://travelvitals.amexgbt.com for the latest information and advisories compiled by American Express Global Business Travel. + +Carbon Emissions: The total emissions value for this itinerary includes air travel only. Emissions for each individual flight are displayed in the flight details section. For more information on carbon emissions please refer to https://www.amexglobalbusinesstravel.com/sustainable-products-and-platforms. + +For important information regarding your booking in relation to the conditions applying to your booking, managing your booking and travel advisory, please refer to www.amexglobalbusinesstravel.com/booking-info. + +GBT Travel Services UK Limited (GBT UK) and its authorized sublicensees (including Ovation Travel Group and Egencia) use certain trademarks and service marks of American Express Company or its subsidiaries (American Express) in the American Express Global Business Travel and American Express Meetings & Events brands and in connection with its business for permitted uses only under a limited license from American Express (Licensed Marks). The Licensed Marks are trademarks or service marks of, and the property of, American Express. GBT UK is a subsidiary of Global Business Travel Group, Inc. (NYSE: GBTG). American Express holds a minority interest in GBTG, which operates as a separate company from American Express. diff --git a/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json b/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json index ab560e5906da..c8b442e152fa 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json +++ b/dotnet/samples/Concepts/Resources/Plugins/ApiManifestPlugins/MessagesPlugin/apimanifest.json @@ -14,7 +14,7 @@ }, { "method": "Post", - "uriTemplate": "/me/sendMail" + "uriTemplate": "/me/messages" } ] } diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml index 322b38a9e5a9..7903450f0c53 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/AstronomyPlugin/messages-openapi.yml @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: Get the messages in the signed-in user\u0026apos;s mailbox - description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder + summary: List messages + description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,18 +63,26 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value - /me/sendMail: post: tags: - - me.user.Actions - summary: Invoke action sendMail - description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' - operationId: me_sendMail + - me.message + summary: Create message + description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." + operationId: me_CreateMessages requestBody: - $ref: '#/components/requestBodies/sendMailRequestBody' + description: New navigation property + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' + required: true responses: - '204': - description: Success + 2XX: + description: Created navigation property. + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' components: schemas: microsoft.graph.message: @@ -495,19 +503,4 @@ components: style: form explode: false schema: - type: boolean - requestBodies: - sendMailRequestBody: - description: Action parameters - content: - application/json: - schema: - type: object - properties: - Message: - $ref: '#/components/schemas/microsoft.graph.message' - SaveToSentItems: - type: boolean - default: false - nullable: true - required: true + type: boolean \ No newline at end of file diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json index 6ea4e7b739f5..8264a87f44ca 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-apiplugin.json @@ -10,17 +10,10 @@ "conversation_starters": [ { "text": "List events" - }, - { - "text": "Create new navigation property to events for me" } ] }, "functions": [ - { - "name": "me_calendar_CreateEvents", - "description": "Create new navigation property to events for me" - }, { "name": "me_calendar_ListEvents", "description": "Retrieve a list of events in a calendar. The calendar can be one for a user, or the default calendar of a Microsoft 365 group. The list of events contains single instance meetings and series masters. To get expanded event instances, you can get the calendar view, or\nget the instances of an event." @@ -36,8 +29,7 @@ "url": "calendar-openapi.yml" }, "run_for_functions": [ - "me_calendar_ListEvents", - "me_calendar_CreateEvents" + "me_calendar_ListEvents" ] } ] diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml index 458690e3ec4b..7232189dd51a 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/CalendarPlugin/calendar-openapi.yml @@ -1,4 +1,4 @@ -openapi: 3.0.4 +openapi: 3.0.1 info: title: OData Service for namespace microsoft.graph - Subset description: This OData service is located at https://graph.microsoft.com/v1.0 @@ -56,27 +56,23 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value - post: - tags: - - me.calendar - summary: Create new navigation property to events for me - operationId: me_calendar_CreateEvents - requestBody: - description: New navigation property - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.event' - required: true - responses: - 2XX: - description: Created navigation property. - content: - application/json: - schema: - $ref: '#/components/schemas/microsoft.graph.event' components: schemas: + microsoft.graph.eventCollectionResponse: + title: Base collection pagination and count responses + type: object + properties: + '@odata.count': + type: integer + format: int64 + nullable: true + '@odata.nextLink': + type: string + nullable: true + value: + type: array + items: + $ref: '#/components/schemas/microsoft.graph.event' microsoft.graph.event: title: event required: @@ -551,21 +547,6 @@ components: type: string description: A property value. nullable: true - microsoft.graph.eventCollectionResponse: - title: Base collection pagination and count responses - type: object - properties: - '@odata.count': - type: integer - format: int64 - nullable: true - '@odata.nextLink': - type: string - nullable: true - value: - type: array - items: - $ref: '#/components/schemas/microsoft.graph.event' microsoft.graph.emailAddress: title: emailAddress required: diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json index ba3827350891..a3fac7f88fb2 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-apiplugin.json @@ -12,14 +12,14 @@ "text": "List messages" }, { - "text": "Send an email from the current user's mailbox" + "text": "Create message" } ] }, "functions": [ { - "name": "me_sendMail", - "description": "Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here." + "name": "me_CreateMessages", + "description": "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." }, { "name": "me_ListMessages", @@ -35,7 +35,10 @@ "spec": { "url": "messages-openapi.yml" }, - "run_for_functions": ["me_ListMessages", "me_sendMail"] + "run_for_functions": [ + "me_ListMessages", + "me_CreateMessages" + ] } ] -} +} \ No newline at end of file diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml index 322b38a9e5a9..7903450f0c53 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/MessagesPlugin/messages-openapi.yml @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: Get the messages in the signed-in user\u0026apos;s mailbox - description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder + summary: List messages + description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,18 +63,26 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value - /me/sendMail: post: tags: - - me.user.Actions - summary: Invoke action sendMail - description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' - operationId: me_sendMail + - me.message + summary: Create message + description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." + operationId: me_CreateMessages requestBody: - $ref: '#/components/requestBodies/sendMailRequestBody' + description: New navigation property + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' + required: true responses: - '204': - description: Success + 2XX: + description: Created navigation property. + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' components: schemas: microsoft.graph.message: @@ -495,19 +503,4 @@ components: style: form explode: false schema: - type: boolean - requestBodies: - sendMailRequestBody: - description: Action parameters - content: - application/json: - schema: - type: object - properties: - Message: - $ref: '#/components/schemas/microsoft.graph.message' - SaveToSentItems: - type: boolean - default: false - nullable: true - required: true + type: boolean \ No newline at end of file diff --git a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md index 9174f1471718..b5642696db64 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md +++ b/dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins/README.md @@ -5,7 +5,7 @@ These plugins have been generated thanks to [kiota](https://aka.ms/kiota) and can be regenerated if needed. ```shell -cd dotnet/samples/Concepts/Resources/Plugins +cd dotnet/samples/Concepts/Resources/Plugins/CopilotAgentPlugins ``` ### Calendar plugin @@ -37,7 +37,7 @@ kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /driv Microsoft Graph list message and create a draft message for the current user. ```shell -kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /me/messages#GET -i /me/sendMail#POST -o CopilotAgentPlugins/MessagesPlugin --pn Messages +kiota plugin add -t APIPlugin -d https://aka.ms/graph/v1.0/openapi.yaml -i /me/messages#GET -i /me/messages#POST -o CopilotAgentPlugins/MessagesPlugin --pn Messages ``` ### Astronomy plugin diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs new file mode 100644 index 000000000000..c383ea9025f1 --- /dev/null +++ b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using Microsoft.SemanticKernel; + +namespace Plugins; + +public sealed class LegacyMenuPlugin +{ + /// + /// Returns a mock item menu. + /// + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string[] GetSpecials(KernelArguments? arguments) + { + return + [ + "Special Soup: Clam Chowder", + "Special Salad: Cobb Salad", + "Special Drink: Chai Tea", + ]; + } + + /// + /// Returns a mock item price. + /// + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem, + KernelArguments? arguments) + { + return "$9.99"; + } + + /// + /// An item is 86'd when the kitchen cannot serve due to running out of ingredients. + /// + [KernelFunction, Description("Returns true if the kitchen has ran out of the item.")] + public bool IsItem86d( + [Description("The name of the menu item.")] + string menuItem, + [Description("The number of items requested.")] + int count, + KernelArguments? arguments) + { + return count < 3; + } +} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs deleted file mode 100644 index 5f0444aa22e2..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/BearerAuthenticationProviderWithCancellationToken.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http.Headers; -using Microsoft.Extensions.Configuration; -using Microsoft.Identity.Client; - -/// -/// Retrieves a token via the provided delegate and applies it to HTTP requests using the -/// "bearer" authentication scheme. -/// -public class BearerAuthenticationProviderWithCancellationToken -{ - private readonly IPublicClientApplication _client; - - /// - /// Creates an instance of the class. - /// - /// The configuration instance to read settings from. - public BearerAuthenticationProviderWithCancellationToken(IConfiguration configuration) - { - ArgumentNullException.ThrowIfNull(configuration); - var clientId = configuration["MSGraph:ClientId"]; - var tenantId = configuration["MSGraph:TenantId"]; - - if (string.IsNullOrEmpty(clientId) || string.IsNullOrEmpty(tenantId)) - { - throw new InvalidOperationException("Please provide valid MSGraph configuration in appsettings.Development.json file."); - } - - this._client = PublicClientApplicationBuilder - .Create(clientId) - .WithAuthority($"https://login.microsoftonline.com/{tenantId}") - .WithDefaultRedirectUri() - .Build(); - } - - /// - /// Applies the token to the provided HTTP request message. - /// - /// The HTTP request message. - /// - public async Task AuthenticateRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken = default) - { - var token = await this.GetAccessTokenAsync(cancellationToken).ConfigureAwait(false); - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); - } - private async Task GetAccessTokenAsync(CancellationToken cancellationToken) - { - var scopes = new string[] { "https://graph.microsoft.com/.default" }; - try - { - var authResult = await this._client.AcquireTokenSilent(scopes, (await this._client.GetAccountsAsync().ConfigureAwait(false)).FirstOrDefault()).ExecuteAsync(cancellationToken).ConfigureAwait(false); - return authResult.AccessToken; - } - catch - { - var authResult = await this._client.AcquireTokenWithDeviceCode(scopes, deviceCodeResult => - { - Console.WriteLine(deviceCodeResult.Message); - return Task.CompletedTask; - }).ExecuteAsync(cancellationToken).ConfigureAwait(false); - return authResult.AccessToken; - } - } -} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj deleted file mode 100644 index a81a79967bcb..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.csproj +++ /dev/null @@ -1,55 +0,0 @@ - - - - Exe - net8.0 - enable - enable - SKEXP0040,SKEXP0042,SKEXP0043,SKEXP0050,SKEXP0053,SKEXP0060,SKEXP0061,1591,CA1050,CA1308,CA2234 - - - - - PreserveNewest - - - PreserveNewest - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln deleted file mode 100644 index d16eae1498aa..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/CopilotAgentPluginsDemoSample.sln +++ /dev/null @@ -1,24 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.5.2.0 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CopilotAgentPluginsDemoSample", "CopilotAgentPluginsDemoSample.csproj", "{7F2FF65C-BC07-E142-D909-97CCFC4B0B50}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7F2FF65C-BC07-E142-D909-97CCFC4B0B50}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {820AD9F3-FFBD-4690-9EAB-89D967E00ABE} - EndGlobalSection -EndGlobal diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs deleted file mode 100644 index 336b1832e455..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/DemoCommand.cs +++ /dev/null @@ -1,528 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Web; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using Microsoft.SemanticKernel.Connectors.Ollama; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Plugins.OpenApi; -using Microsoft.SemanticKernel.Plugins.OpenApi.Extensions; -using Spectre.Console; -using Spectre.Console.Cli; -using Spectre.Console.Json; - -public class DemoCommand : AsyncCommand -{ - public class Settings : CommandSettings - { - [CommandOption("--debug")] - public bool? EnableLogging { get; set; } - } - - private static readonly Lazy s_configurationRoot = new(() => - new ConfigurationBuilder() - .AddJsonFile("appsettings.Development.json", optional: true, reloadOnChange: true) - .Build()); - - private static IConfigurationRoot configuration => s_configurationRoot.Value; - - private const string CopilotAgentPluginsDirectory = "CopilotAgentPlugins"; - public override async Task ExecuteAsync(CommandContext context, Settings settings) - { - var availableCopilotPlugins = Directory.GetDirectories($"../../../Concepts/Resources/Plugins/{CopilotAgentPluginsDirectory}"); - - var selectedKernelName = AnsiConsole.Prompt( - new SelectionPrompt() - .Title("[green]SELECT KERNEL TO USE:[/]") - .AddChoices([ - "azureopenai", - "openai", - "ollama" - ])); - - var enableLogging = settings.EnableLogging == true; - - var (kernel, promptSettings) = selectedKernelName switch - { - "azureopenai" => InitializeAzureOpenAiKernel(configuration, enableLogging: enableLogging), - "openai" => InitializeOpenAiKernel(configuration, enableLogging: enableLogging), - "ollama" => InitializeKernelForOllama(configuration, enableLogging: enableLogging), - _ => throw new InvalidOperationException($"Invalid kernel selection. {selectedKernelName} is not a valid kernel.") - }; - kernel.AutoFunctionInvocationFilters.Add(new ExpectedSchemaFunctionFilter()); - - while (true) - { - const string LOAD_COPILOT_AGENT_PLUGIN = "Load Copilot Agent plugin(s)"; - const string LOAD_ALL_COPILOT_AGENT_PLUGINS = "Load all available Copilot Agent plugins"; - const string UNLOAD_ALL_PLUGINS = "Unload all plugins"; - const string SHOW_COPILOT_AGENT_MANIFEST = "Show Copilot Agent manifest"; - const string EXECUTE_GOAL = "Execute a goal"; - const string LIST_LOADED_PLUGINS = "List loaded plugins"; - const string LIST_LOADED_PLUGINS_WITH_FUNCTIONS = "List loaded plugins with functions"; - const string LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS = "List loaded plugins with functions and parameters"; - const string EXIT = "Exit"; - AnsiConsole.WriteLine(); - var selection = AnsiConsole.Prompt( - new SelectionPrompt() - .Title("SELECT AN OPTION:") - .PageSize(10) - .AddChoices([LOAD_COPILOT_AGENT_PLUGIN, LOAD_ALL_COPILOT_AGENT_PLUGINS, UNLOAD_ALL_PLUGINS, SHOW_COPILOT_AGENT_MANIFEST, EXECUTE_GOAL, LIST_LOADED_PLUGINS, LIST_LOADED_PLUGINS_WITH_FUNCTIONS, LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS, EXIT])); - - switch (selection) - { - case LOAD_COPILOT_AGENT_PLUGIN: - await this.LoadCopilotAgentPluginAsync(kernel, configuration, availableCopilotPlugins).ConfigureAwait(false); - break; - case LOAD_ALL_COPILOT_AGENT_PLUGINS: - await this.LoadCopilotAgentPluginAsync(kernel, configuration, availableCopilotPlugins, loadAllPlugins: true).ConfigureAwait(false); - break; - case UNLOAD_ALL_PLUGINS: - kernel.Plugins.Clear(); - AnsiConsole.MarkupLine("[bold green]All plugins unloaded successfully.[/]"); - break; - case SHOW_COPILOT_AGENT_MANIFEST: - await this.ShowCopilotAgentManifestAsync(availableCopilotPlugins).ConfigureAwait(false); - break; - case EXECUTE_GOAL: - await this.ExecuteGoalAsync(kernel, promptSettings).ConfigureAwait(false); - break; - case LIST_LOADED_PLUGINS: - this.ListLoadedPlugins(kernel); - break; - case LIST_LOADED_PLUGINS_WITH_FUNCTIONS: - this.ListLoadedPlugins(kernel, withFunctions: true); - break; - case LIST_LOADED_PLUGINS_WITH_FUNCTIONS_AND_PARAMETERS: - this.ListLoadedPlugins(kernel, withFunctions: true, withParameters: true); - break; - case EXIT: - return 0; - default: - AnsiConsole.MarkupLine("[red]Invalid selection.[/]"); - break; - } - } - } - private async Task LoadCopilotAgentPluginAsync(Kernel kernel, IConfigurationRoot configuration, string[] availableCopilotPlugins, bool loadAllPlugins = false) - { - await this.LoadPluginAsync(kernel, configuration, availableCopilotPlugins, this.AddCopilotAgentPluginAsync, loadAllPlugins).ConfigureAwait(false); - } - - private async Task ShowCopilotAgentManifestAsync(string[] availableCopilotPlugins) - { - await this.ShowManifestAsync(availableCopilotPlugins, GetCopilotAgentManifestPath).ConfigureAwait(false); - } - private static string GetCopilotAgentManifestPath(string name) => Path.Combine(Directory.GetCurrentDirectory(), "../../../Concepts/Resources/Plugins", CopilotAgentPluginsDirectory, name, $"{name[..^6].ToLowerInvariant()}-apiplugin.json"); - - private async Task ShowManifestAsync(string[] availableApiManifestPlugins, Func nameLookup) - { - var selectedPluginName = AnsiConsole.Prompt( - new SelectionPrompt() - .Title("[green]SELECT PLUGIN TO SHOW API MANIFEST:[/]") - .PageSize(10) - .AddChoices(availableApiManifestPlugins.Select(p => p.Split(Path.DirectorySeparatorChar).Last()))); - - var apiManifest = await File.ReadAllTextAsync(nameLookup(selectedPluginName)).ConfigureAwait(false); - var jsonText = new JsonText(apiManifest); - AnsiConsole.Write( - new Panel(jsonText) - .Header(selectedPluginName) - .Collapse() - .RoundedBorder() - .BorderColor(Color.Yellow)); - } - private void ListLoadedPlugins(Kernel kernel, bool withFunctions = false, bool withParameters = false) - { - var root = new Tree("[bold]LOADED PLUGINS[/]"); - foreach (var plugin in kernel.Plugins) - { - var pluginNode = root.AddNode($"[bold green]{plugin.Name}[/]"); - if (!withFunctions) - { - continue; - } - - foreach (var function in plugin.GetFunctionsMetadata()) - { - var functionNode = pluginNode.AddNode($"[italic green]{function.Name}[/]{Environment.NewLine} {function.Description}"); - - if (!withParameters) - { - continue; - } - - if (function.Parameters.Count == 0) - { - functionNode.AddNode("[red]No parameters[/]"); - continue; - } - - foreach (var param in function.Parameters) - { - functionNode.AddNode($"[italic green]{param.Name}[/]{Environment.NewLine} {param.Description}"); - } - } - } - - if (kernel.Plugins.Count == 0) - { - root.AddNode("[red]No plugin loaded.[/]"); - } - - AnsiConsole.Write(root); - } - - private async Task LoadPluginAsync(Kernel kernel, IConfigurationRoot configuration, IEnumerable availableManifestPlugins, Func loader, bool loadAllPlugins = false) - { - // get unloaded plugins - var pluginNames = availableManifestPlugins.Select(p => p.Split(Path.DirectorySeparatorChar).Last()) - .Where(p => !kernel.Plugins.Any(loadedPlugin => p == loadedPlugin.Name)) - .ToList(); - - if (pluginNames.Count == 0) - { - AnsiConsole.MarkupLine("[red]No additional plugin available to load.[/]"); - return; - } - - var selectedPluginNames = loadAllPlugins ? - pluginNames : - AnsiConsole.Prompt( - new MultiSelectionPrompt() - .Title("[green]SELECT PLUGINS TO LOAD:[/]") - .PageSize(10) - .AddChoices(pluginNames)); - - foreach (var selectedPluginName in selectedPluginNames) - { - await AnsiConsole.Status() - .Spinner(Spinner.Known.Dots) - .SpinnerStyle(Style.Parse("yellow")) - .StartAsync($"loading {selectedPluginName}...", async ctx => - { - await loader(kernel, configuration, selectedPluginName).ConfigureAwait(false); - }).ConfigureAwait(false); - } - } - - private async Task ExecuteGoalAsync(Kernel kernel, PromptExecutionSettings promptExecutionSettings) - { - var goal = AnsiConsole.Ask("Enter your goal:"); - var result = await kernel.InvokePromptAsync(goal, new KernelArguments(promptExecutionSettings)).ConfigureAwait(false); - var panel = new Panel($"[bold]Result[/]{Environment.NewLine}{Environment.NewLine}[green italic]{Markup.Escape(result.ToString())}[/]"); - AnsiConsole.Write(panel); - } - - private static (Kernel, PromptExecutionSettings) InitializeKernelForOllama(IConfiguration configuration, bool enableLogging) - { - var engineConfig = configuration.GetSection("Ollama"); - var chatModelId = engineConfig["ChatModelId"]; - var endpoint = engineConfig["Endpoint"]; - if (string.IsNullOrEmpty(chatModelId) || string.IsNullOrEmpty(endpoint)) - { - throw new InvalidOperationException("Please provide valid Ollama configuration in appsettings.Development.json file."); - } - - var builder = Kernel.CreateBuilder(); - if (enableLogging) - { - builder.Services.AddLogging(loggingBuilder => - { - loggingBuilder.AddFilter(level => true); - loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); - }); - } -#pragma warning disable SKEXP0070 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. -#pragma warning disable SKEXP0001 - return (builder.AddOllamaChatCompletion( - chatModelId, - new Uri(endpoint)).Build(), - new OllamaPromptExecutionSettings - { - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( - options: new FunctionChoiceBehaviorOptions - { - AllowStrictSchemaAdherence = true - } - ) - }); -#pragma warning restore SKEXP0001 -#pragma warning restore SKEXP0070 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - } - - private static (Kernel, PromptExecutionSettings) InitializeAzureOpenAiKernel(IConfiguration configuration, bool enableLogging) - { - var azureOpenAIConfig = configuration.GetSection("AzureOpenAI"); - var apiKey = azureOpenAIConfig["ApiKey"]; - var chatDeploymentName = azureOpenAIConfig["ChatDeploymentName"]; - var chatModelId = azureOpenAIConfig["ChatModelId"]; - var endpoint = azureOpenAIConfig["Endpoint"]; - - if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(chatDeploymentName) || string.IsNullOrEmpty(chatModelId) || string.IsNullOrEmpty(endpoint)) - { - throw new InvalidOperationException("Please provide valid AzureOpenAI configuration in appsettings.Development.json file."); - } - - var builder = Kernel.CreateBuilder(); - if (enableLogging) - { - builder.Services.AddLogging(loggingBuilder => - { - loggingBuilder.AddFilter(level => true); - loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); - }); - } - return (builder.AddAzureOpenAIChatCompletion( - deploymentName: chatDeploymentName, - endpoint: endpoint, - serviceId: "AzureOpenAIChat", - apiKey: apiKey, - modelId: chatModelId).Build(), -#pragma warning disable SKEXP0001 - new AzureOpenAIPromptExecutionSettings - { - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( - options: new FunctionChoiceBehaviorOptions - { - AllowStrictSchemaAdherence = true - } - ) - }); -#pragma warning restore SKEXP0001 - } - - public static (Kernel, PromptExecutionSettings) InitializeOpenAiKernel(IConfiguration configuration, bool enableLogging) - { - // Extract configuration settings specific to OpenAI - var openAIConfig = configuration.GetSection("OpenAI"); - var apiKey = openAIConfig["ApiKey"]; - var modelId = openAIConfig["ModelId"]; - - if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(modelId)) - { - throw new InvalidOperationException("Please provide valid OpenAI configuration in appsettings.Development.json file."); - } - - var builder = Kernel.CreateBuilder(); - if (enableLogging) - { - builder.Services.AddLogging(loggingBuilder => - { - loggingBuilder.AddFilter(level => true); - loggingBuilder.AddProvider(new SemanticKernelLoggerProvider()); - }); - } - - return (builder.AddOpenAIChatCompletion( - apiKey: apiKey, - modelId: modelId).Build(), -#pragma warning disable SKEXP0001 - new OpenAIPromptExecutionSettings - { - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto( - options: new FunctionChoiceBehaviorOptions - { - AllowStrictSchemaAdherence = true - }) - }); -#pragma warning restore SKEXP0001 - - } - private static AuthenticateRequestAsyncCallback? GetApiKeyAuthProvider(string apiKey, string parameterName, bool inHeader) - { - return async (request, cancellationToken) => - { - if (inHeader) - { - request.Headers.Add(parameterName, apiKey); - } - else - { - var uriBuilder = new UriBuilder(request.RequestUri ?? throw new InvalidOperationException("The request URI is null.")); - var query = HttpUtility.ParseQueryString(uriBuilder.Query); - query[parameterName] = apiKey; - uriBuilder.Query = query.ToString(); - request.RequestUri = uriBuilder.Uri; - } - - await Task.CompletedTask.ConfigureAwait(false); - }; - } - - private readonly BearerAuthenticationProviderWithCancellationToken _bearerAuthenticationProviderWithCancellationToken = new(configuration); - - private async Task AddCopilotAgentPluginAsync(Kernel kernel, IConfigurationRoot configuration, string pluginName) - { - var copilotAgentPluginParameters = new CopilotAgentPluginParameters - { - FunctionExecutionParameters = new() - { - { "https://graph.microsoft.com/v1.0", new OpenApiFunctionExecutionParameters(authCallback: this._bearerAuthenticationProviderWithCancellationToken.AuthenticateRequestAsync, enableDynamicOperationPayload: false, enablePayloadNamespacing: true) { ParameterFilter = s_restApiParameterFilter} }, - { "https://api.nasa.gov/planetary", new OpenApiFunctionExecutionParameters(authCallback: GetApiKeyAuthProvider("DEMO_KEY", "api_key", false), enableDynamicOperationPayload: false, enablePayloadNamespacing: true)} - }, - }; - - try - { - KernelPlugin plugin = - await kernel.ImportPluginFromCopilotAgentPluginAsync( - pluginName, - GetCopilotAgentManifestPath(pluginName), - copilotAgentPluginParameters) - .ConfigureAwait(false); - AnsiConsole.MarkupLine($"[bold green] {pluginName} loaded successfully.[/]"); - } - catch (Exception ex) - { - AnsiConsole.MarkupLine($"[red]Failed to load {pluginName}.[/]"); - kernel.LoggerFactory.CreateLogger("Plugin Creation").LogError(ex, "Plugin creation failed. Message: {0}", ex.Message); - throw new AggregateException($"Plugin creation failed for {pluginName}", ex); - } - } - #region MagicDoNotLookUnderTheHood - private static readonly HashSet s_fieldsToIgnore = new( - [ - "@odata.type", - "attachments", - "allowNewTimeProposals", - "bccRecipients", - "bodyPreview", - "calendar", - "categories", - "ccRecipients", - "changeKey", - "conversationId", - "coordinates", - "conversationIndex", - "createdDateTime", - "discriminator", - "lastModifiedDateTime", - "locations", - "extensions", - "flag", - "from", - "hasAttachments", - "iCalUId", - "id", - "inferenceClassification", - "internetMessageHeaders", - "instances", - "isCancelled", - "isDeliveryReceiptRequested", - "isDraft", - "isOrganizer", - "isRead", - "isReadReceiptRequested", - "multiValueExtendedProperties", - "onlineMeeting", - "onlineMeetingProvider", - "onlineMeetingUrl", - "organizer", - "originalStart", - "parentFolderId", - "range", - "receivedDateTime", - "recurrence", - "replyTo", - "sender", - "sentDateTime", - "seriesMasterId", - "singleValueExtendedProperties", - "transactionId", - "time", - "uniqueBody", - "uniqueId", - "uniqueIdType", - "webLink", - ], - StringComparer.OrdinalIgnoreCase - ); - private const string RequiredPropertyName = "required"; - private const string PropertiesPropertyName = "properties"; - /// - /// Trims the properties from the request body schema. - /// Most models in strict mode enforce a limit on the properties. - /// - /// Source schema - /// the trimmed schema for the request body - private static KernelJsonSchema? TrimPropertiesFromRequestBody(KernelJsonSchema? schema) - { - if (schema is null) - { - return null; - } - - var originalSchema = JsonSerializer.Serialize(schema.RootElement); - var node = JsonNode.Parse(originalSchema); - if (node is not JsonObject jsonNode) - { - return schema; - } - - TrimPropertiesFromJsonNode(jsonNode); - - return KernelJsonSchema.Parse(node.ToString()); - } - private static void TrimPropertiesFromJsonNode(JsonNode jsonNode) - { - if (jsonNode is not JsonObject jsonObject) - { - return; - } - if (jsonObject.TryGetPropertyValue(RequiredPropertyName, out var requiredRawValue) && requiredRawValue is JsonArray requiredArray) - { - jsonNode[RequiredPropertyName] = new JsonArray(requiredArray.Where(x => x is not null).Select(x => x!.GetValue()).Where(x => !s_fieldsToIgnore.Contains(x)).Select(x => JsonValue.Create(x)).ToArray()); - } - if (jsonObject.TryGetPropertyValue(PropertiesPropertyName, out var propertiesRawValue) && propertiesRawValue is JsonObject propertiesObject) - { - var properties = propertiesObject.Where(x => s_fieldsToIgnore.Contains(x.Key)).Select(static x => x.Key).ToArray(); - foreach (var property in properties) - { - propertiesObject.Remove(property); - } - } - foreach (var subProperty in jsonObject) - { - if (subProperty.Value is not null) - { - TrimPropertiesFromJsonNode(subProperty.Value); - } - } - } -#pragma warning disable SKEXP0040 - private static readonly RestApiParameterFilter s_restApiParameterFilter = (RestApiParameterFilterContext context) => - { -#pragma warning restore SKEXP0040 - if (("me_sendMail".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase) || - ("me_calendar_CreateEvents".Equals(context.Operation.Id, StringComparison.OrdinalIgnoreCase)) && - "payload".Equals(context.Parameter.Name, StringComparison.OrdinalIgnoreCase))) - { - context.Parameter.Schema = TrimPropertiesFromRequestBody(context.Parameter.Schema); - return context.Parameter; - } - return context.Parameter; - }; - private sealed class ExpectedSchemaFunctionFilter : IAutoFunctionInvocationFilter - {//TODO: this eventually needs to be added to all CAP or DA but we're still discussing where should those facilitators live - public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) - { - await next(context).ConfigureAwait(false); - - if (context.Result.ValueType == typeof(RestApiOperationResponse)) - { - var openApiResponse = context.Result.GetValue(); - if (openApiResponse?.ExpectedSchema is not null) - { - openApiResponse.ExpectedSchema = null; - } - } - } - } - #endregion -} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs deleted file mode 100644 index 666fc5a4e1c7..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLogger.cs +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using Microsoft.Extensions.Logging; -using Spectre.Console; -using Spectre.Console.Json; - -public class SemanticKernelLogger : ILogger -{ - public IDisposable? BeginScope(TState state) where TState : notnull - { - return null; - } - - public bool IsEnabled(LogLevel logLevel) - { - return true; - } - - public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) - { - if (!this.IsEnabled(logLevel)) - { - return; - } - - // You can reformat the message here - var message = formatter(state, exception); - if (!this.PrintMessageBetweenTags(message, "Rendered prompt", "[FUNCTIONS]", "[END FUNCTIONS]") - && !this.PrintMessageWithALabelAndJson("Function result:", message) - && !this.PrintMessageWithALabelAndJson("Function arguments:", message) - && !this.PrintMessageWithALabelAndJson("Plan result:", message)) - { - AnsiConsole.MarkupLine($"[green]{logLevel}[/] {Markup.Escape(message)}"); - } - } - - private bool PrintMessageWithALabelAndJson(string label, string message) - { - if (message.StartsWith(label, System.StringComparison.Ordinal)) - { - var json = message.Substring(label.Length).Trim(); - - try - { - var jsonText = new JsonText(json); - AnsiConsole.Write( - new Panel(jsonText) - .Header(label) - .Collapse() - .RoundedBorder() - .BorderColor(Color.Yellow)); - } - catch - { - AnsiConsole.MarkupLine(Markup.Escape(message)); - } - - string[] nestedJsonObjectLabels = ["available_functions", "Content"]; - foreach (var nestedJsonObjectLabel in nestedJsonObjectLabels) - { - try - { - var jsonDoc = JsonDocument.Parse(json); - var content = jsonDoc.RootElement.GetProperty(nestedJsonObjectLabel).GetString(); - if (content != null) - { - var jsonText = new JsonText(content); - AnsiConsole.Write( - new Panel(jsonText) - .Header(nestedJsonObjectLabel) - .Collapse() - .RoundedBorder() - .BorderColor(Color.Yellow)); - } - } - catch - { - // ignored - } - } - - return true; - } - - return false; - } - - private bool PrintMessageBetweenTags(string message, string label, string startTag, string endTag) - { - if (message.StartsWith(label, System.StringComparison.Ordinal)) - { - var split = message.Split(startTag); - AnsiConsole.MarkupLine($"[green]{this.EscapeMarkup(split[0])}[/]"); - if (split.Length > 1) - { - var split2 = split[1].Split(endTag); - try - { - var jsonText = new JsonText(this.EscapeMarkup(split2[0])); - AnsiConsole.Write( - new Panel(jsonText) - .Header("Functions") - .Collapse() - .RoundedBorder() - .BorderColor(Color.Yellow)); - } - catch - { - AnsiConsole.MarkupLine(this.EscapeMarkup(split2[0])); - } - - AnsiConsole.MarkupLine(this.EscapeMarkup(split2[1])); - return true; - } - } - - return false; - } - - private string EscapeMarkup(string text) - { - return text.Replace("[", "[[").Replace("]", "]]"); - } -} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs deleted file mode 100644 index 2e4aa284549a..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Logging/SemanticKernelLoggerProvider.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.Logging; - -public class SemanticKernelLoggerProvider : ILoggerProvider, IDisposable -{ - public ILogger CreateLogger(string categoryName) - { - return new SemanticKernelLogger(); - } - - protected virtual void Dispose(bool disposing) - { - if (disposing) - { - // Dispose managed resources here. - } - - // Dispose unmanaged resources here. - } - - public void Dispose() - { - this.Dispose(true); - GC.SuppressFinalize(this); - } -} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs deleted file mode 100644 index b584b8b843f2..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/Program.cs +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Spectre.Console.Cli; - -var app = new CommandApp(); -app.Configure(config => -{ - config.AddCommand("demo"); -}); - -return app.Run(args); diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json b/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json deleted file mode 100644 index 4dd3abcb1ff2..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/CopilotAgentPluginsDemoSample/appsettings.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "AzureOpenAI": { - "ChatModelId": "", - "ServiceId": "", - "ChatDeploymentName": "", - "Endpoint": "", - "ApiKey": "" - }, - "OpenAI": { - "ApiKey": "", - "ModelId": "gpt-4o", - "Organization": "" - }, - "MsGraph": { - "ClientId": "", - "TenantId": "9188040d-6c67-4c5b-b112-36a304b66dad", // MSA/Consumer/Personal tenant, https://learn.microsoft.com/azure/active-directory/develop/accounts-overview - "RedirectUri": "http://localhost" - } - } \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/README.md b/dotnet/samples/Demos/CopilotAgentPlugins/README.md deleted file mode 100644 index de3bf6ec8fc6..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/README.md +++ /dev/null @@ -1,168 +0,0 @@ ---- -page_type: sample -languages: -- dotnet -products: -- copilot -- ms-graph -- semantic-kernel -- microsoft-365 -description: The CopilotAgentPluginDemoSample create hand rolled plugins for use in a Semantic Kernel project. The plugins allow for CRUD operations using Microsoft Graph APIs, so that developers can send prompts that will AutoInvokeFunctions to Microsoft365 data, services, and resources. -extensions: - contentType: samples - technologies: - - Kiota - - Semantic Kernel - - Microsoft Graph - services: - - Azure AD - - Microsoft 365 - createdDate: 2/12/2025 4:50:18 AM ---- -# Copilot Agent Plugins Sample for Semantic Kernel - -Sample created and managed by [Fabian G. Williams](https://github.com/fabianwilliams), Principal Product Manager, Microsoft. We believe that Copilot Agent Plugins (CAPs) empowers developers to effortlessly build AI-driven solutions by transforming natural language into seamless CRUD actions using Microsoft Graph and Semantic Kernel, thus revolutionizing the way we **developers** interact with Microsoft 365 data and innovate. - -## Watch the Videos - -### Why use Copilot Agent Plugins? -[![Watch the video](https://img.youtube.com/vi/la1UDNn3eP4/0.jpg)](https://aka.ms/m365caps-videointro) - -### Live Demo of CAPs in Action -[![Watch the video](https://img.youtube.com/vi/-D3KdiPySxw/0.jpg)](https://aka.ms/m365caps-videodemo) - -## CAPS Public Roadmap - -Our timelines may be subject to changes, at this time our current GA release cycles are - -![A screenshot of the CAPs Public Roadmap ](images/CAPs_PublicRoadmap.png) - -What to get going? Start your journey below! - -## Use the CopilotAgentPluginDemoSample application to use and create Plugins for Gen AI experiences in Microsoft 365 - -### Prerequisites - -- A Entra ID/ AAD administrator account capable of registering an Application. You can get a development tenant for free by joining the [Microsoft 365 Developer Program](https://developer.microsoft.com/microsoft-365/dev-program). -- [Visual Studio Code](https://code.visualstudio.com/) -- [Semantic Kernel](https://github.com/microsoft/semantic-kernel). - -### How the sample application works - -The sample has the following features: - -- This is a Console Application. The user will open a terminal and issue a command "dotnet run demo" or "dotnet run demo --debug" for debug mode. -- The user will then be presented with options to leverage platforms of "AzureOpenAI", "OpenAI", or locally with "Ollama" where the LLM is hosted. -- The user will then determine which Plugins they would like to load for this sample. As of this writing there are 4 available, Contacts, Messages, Calendar, and DriveItems. -- Once loaded the user will then have options to inspect the Manifest, Plugins, or run a prompt using the "Execute a Goal" option. -- The user will enter a prompt that satisfies one or more of the plugins they loaded. -- If a Auth token is not present, the user will be prompted to sign in with their Microsoft 365 account. This demonstrates how to use delegated authentication to run on a user's behalf. -- The users prompt is reasoned over and a result is returned with a description of the actions taken or data retrieved. This demonstrates how to use app can reason over Microsoft 365 data and synthesize a response or take an action on the users behalf. -- The user then has the option to issue another prompt load additional plugins, or exit the application. - -## Setting up the sample - -1. Register a Microsoft Identity platform application, and give it the right permissions. -1. Create an applications.Development.json file that fits with the pattern in the sample applications.json file that is included in the sample - -### Register a Microsoft Identity platform application - -#### Choose the tenant where you want to create your app - -1. Sign in to the [Azure Active Directory admin center](https://aad.portal.azure.com) using either a work or school account. -1. If your account is present in more than one Azure AD tenant: - 1. Select your profile from the menu on the top right corner of the page, and then **Switch directory**. - 1. Change your session to the Azure AD tenant where you want to create your application. - -#### Register the app - -This sample for demonstration purposes uses a [Device Code Authentication flow](https://learn.microsoft.com/en-us/entra/identity-platform/msal-authentication-flows#device-code), however you may choose an Authentication Flow that suits your specific scenario. You will need to adjust the Authentication class "BearerAuthenticationProviderWithCancellationToken.cs" if you do so, in order for the sample to work as-is. - -1. Select **Azure Active Directory** in the left-hand navigation, then select [App registrations](https://go.microsoft.com/fwlink/?linkid=2083908) under **Manage**. - - ![A screenshot of the App registrations ](images/aad-portal-app-registrations.png) - -1. In creating a **New Application**.Ensure the below values are set appropriately according to your Authentication Flow. The below is for device code. - - - Provide an appropriate name for your sample and copy down the **Application(client)ID** as well as the **Directory(tenant)ID** and save them for later. - - ![A screenshot of the Register an application page](images/ApplicationOverViewScreenClientIDetc.png) - - - Set **Supported account types** to **Accounts in this organizational directory only**. This ensures that your App only will authenticate users from this tenant only. - - Under **Redirect URI**, ensure the value is set to `http://localhost`. - - ![A screenshot of the RedirectURI an application page](images/AppRegistration_Authentication_localhostredirecturi.png) - -1. In **Certificates & secrets** under **Manage**. Select the **New client secret** button. Enter a value in **Description** and select one of the options for **Expires** and select **Add**. - -1. Copy the **Value** of the new secret **before** you leave this page. It will never be displayed again. Save the value for later. - - ![A screenshot of a new secret in the Client secrets list](images/AppRegistration_AppSecret.png) - -1. Under **API permissions** under **Manage**. - -1. In the list of pages for the app, select **API permissions**, then select **Add a permission**. - -1. In this sample we selected the delegated permissions you see below. In order for the hand rolled plugins to work, at a minimum you will need to ensure that the Mail, Calendar, Files, and Contacts are selected as shown, with at least Read Permissions. - -1. Make sure that the **Microsoft APIs** tab is selected, then select **Microsoft Graph**. - -1. Select **Application permissions**, then find and enable your desired permissions. - - > **Note:** To create subscriptions for other resources you need to select different permissions as documented [here](https://docs.microsoft.com/graph/api/subscription-post-subscriptions#permissions) - -1. Select **Grant admin consent for `name of your organization`** and **Yes**. This grants consent to the permissions of the application registration you just created to the current organization. - - ![A screenshot of a new secret in the Client secrets list](images/AppRegistration_APIPermissions.png) - - -### Update appsettings Development File - -1. Rename the [appsettings.json](CopilotAgentPluginsDemoSample/appsettings.json) file to `appsettings.Development.json`. Open the file in Visual Studio code or any text editor. - -1. Update the following values. - - - `TenantId`: set to the tenant ID from your app registration - - `ClientId`: set to the client ID from your app registration - - `ClientSecret`: set to the client secret from your app registration - - `RedirectUri`: set to the http://localhost - - `OpenAI`: if you are using OpenAI as your LLM provider ensure that the - - `ApiKey` : is filled out - - `ModelId` : is filled out - - `AzureOpenAI` : if you are using AzureOpenAI as your LLM provider ensure that the - - `ChatModelId` : is filled out - - `ChatDeploymentName` : is filled out - - `Endpoint` : is filled out - - `ApiKey` : is filled out - -### Start the application - -Open the repository with Visual Studio Code. Open a **New Terminal** and type. - -To run without Debug Mode type: - -```shell -dotnet run demo -``` - -To run with Debug Mode type: - -```shell -dotnet run demo --debug -``` - -Then follow the instructions provided. - -## Troubleshooting - -See the dedicated [troubleshooting page](./TROUBLESHOOTING.md). - -## Questions and comments - -We'd love to get your feedback about the Copilot Agent Plugins sample for Semantic Kernel. You can send your questions and suggestions to us in the [Issues](https://github.com/microsoft/semantic-kernel/issues) section of this repository. - -Questions about Microsoft Graph in general should be posted to [Microsoft Q&A](https://docs.microsoft.com/answers/products/graph). Make sure that your questions or comments are tagged with the relevant Microsoft Graph tag. - -## Additional resources - -- [Microsoft Graph documentation](https://docs.microsoft.com/graph) \ No newline at end of file diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md b/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md deleted file mode 100644 index 7dc291928350..000000000000 --- a/dotnet/samples/Demos/CopilotAgentPlugins/TROUBLESHOOTING.md +++ /dev/null @@ -1,11 +0,0 @@ -# Troubleshooting - -This document covers some of the common issues you may encounter when running this sample. - -## You get a 403 Forbidden response when you attempt to create a subscription - -Make sure that your app registration includes the required permission for Microsoft Graph (as described in the [Register the app](README.md#register-the-app) section). - -## You get a build error when you issue dotnet run demo command - -Ensure that you have copied the appsettings.json file into a new or renamed appsettings.Development.json file as directed in the [Update appsettings.Development.json](README.md#update-appsettings-development-file) diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_APIPermissions.png deleted file mode 100644 index 34b793d26a39a45df28d457b846c460138abcca0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 127548 zcma%iWmFv7)@=j98V&9eAh-tC0Ko|oqzNt|1b27$KyVU*li=>|PLN>1o#5SQERpza}Jf|+sCPf)BuAR3Uu8%a%%jKgJj9|G4K zROq*Z0O71+>-;@j-$hvdIrTp$EXbp^!#hWI&$@n$1qhii zjfOo`9=tV9{>nj4^FKF5qb6=DxoOt8_X>QB>Uqcvy#m}qd7}iHS z!zZ#w4)?jA|L-7EA9(^w;5_6v8h`(P zOrKOZ|2=%|xo@JmF*nz5BI{3i!&sP|^vc2eeMXU!Ne?Oc$`T}4s-~qRTtPlj_G&uH zt_o^25ZBU&;(PTt8kJ&BC;BTWG;Z7OHYf$(p7We>Qh2_TAMwCcx}Lc|qWu*ne&ZPQ zDl)~qcie>jDpZ_cr%>x(v&EdP!O|Bv?-SRjhy`0q0;RTs#EPFVxV3&Oq4 zCAjQY*CZJO1tpc9e}bhr4i-j5(W_&00wBdn_P<~P4+WjO)yfvAC4 zhSKoe^D6ipdyyLd>7$z+XS7^9Q_pE|-FHDADP6l#1=gl9L=bgn2$o0wI#I7l9Mjq< z`Uo;&4xbXcvu5;fia zK|P(XJ^E0@Uiwk8VR+Dm82JQzv^RsAmkLL<6Y6o%hDttCrK^pAlJ&8}f1}q)VOh74 z{`{5sx~dOBK$GeB$9116dd&fYA%mGos^5759G3gTqSTi`{L8XU(6H3asf{9^6J>J&Mu+TR)7+7m=ClH{j3Ot{4+5aK!t zM${qm@$j?4BYLZtB>GUH_pS=2Pgy61*Chrah3&8xO%$1XJgTwpRIp>UEbUys%-<&< zIls!1qVl!_F52nctP`8@_^2WCtv@gRRYG+UG}!=G%ee{{ku&6dig}YUc!OWgzYd?8 z78&Bp9e9DXF)L5RMv@~IZ$e6|hVjfYu*%^84DKL#)B<1M$|UQ*MKP8fLGB;4g?KE- zxKJw?27ii)dF0~~f22y{_DLrOp@>6G$mo_8#^{~@;C39S%G)z_zMV6Qb16fdt$s0R z9-vtN5jK*M7I=u|RE*24XZFCd-?`|G64_+YGi@P-Ftf+{^ zu(YR0FADTj;I~nsrn+k-V9*wrx=>tqEXe2rKJH0e)jvF3Pf(g}>xp5iZtOwP9lIF) z5yg*cD7ub=!A8Bum4F-V&x9MmP0Jp(FGAFIlck)b1sv89UvzmJqrc*MaIvIN|FIl4j2S{;xajcMuHK=R_ z-0=G|NirBE9o0@C-gH8$tGbLF^e>dgkIcn9#6mjENs=Z?woS6+x`>~%))IYTKl|~BJ*0)fKYcPgzG9Yi&1%pqget{AseRMnf4DshE} z$dX>zrv4JW9r5ZF8nKrnD|#6yF`?F1@sgPM6jnjU)sW6~eu`?$gA#wOp86Ia9p*y~L^ROrC5`VpPyph5BP@Yry6<);ydQ z-^J>EP3D~L4;_Z%wuK1vwp`d@ilWVU$w1VfJx~BnTw0Y_Mf-C7jK0!KMpLPZPs)D@ zV}o=Bz*YuKg|3@_*a?{mkCuqWJ$qtwxJBCDFOGR_+c3t+vs_h)lceRNfOJwnZq6Ac zt*@BKDj!o~1$ax`-$e7=-J&|r??TJIjK6)`-IKQgZK)W6oluV^m@Agr-JRCp*@zj% zEe+gs9zt_1O7Zr*%m;6>zQzaa)@$CBq^4EoK;XqM?Q8xjoBews)tE6^-eQI+ zC8V2Df<%}`_YB|;-52c}pxm=|1Pl~dGl}46UF9EkXdek~DhW=iK0-1CCzM#tyUjSa zwo(fcfDqj5C1PcJQMo`_>Fwh%EK1<>PfVR!zE?Zyk{EAxtk1k%>=@q-UhI3pQGt@4 zf^ui+hjXs1<$l4{5_^PEt@hE{^EJ<1mM10GS2bV~Qj}G|>CY%YE>@&&Vrj_VeuoQ{ zL|1-r!jt`IC2YgS`thfkx1rdvo<4BY{s$ zqO?r5*L8h7dSigeQm#^r8IS6(M}Dv4*+?{QS9F*JgQnP(e{%W;%qstlha4$@Ta}*e z=pZh0PlT5jpTlIkj89JyX$!rqq=Imjv6SC!-b>d#p<9S&n_}6NRh4%+PwwnG@|g<< zaC*O7!_Un)eEg~lQQ{!%@d@KUpIJm}ilnL^c7MF2RqT3w%xS%`Q)J43KU{-}|B01g z%nW(2);LgvME!4{Lz%bFU%FskUVJeP`4sxHpf%R^AYDos_F$jay-~A$5NFsrWX-~H z(agA=b4U=LC4&=E(zdZEaNlzQad#96NM0iTG<1#ty{1ui7>ByriT4}r^f{R5s4ZGJ zpSMGWSU&Jn@(AfrPpr8}v0E5VTYkxo3sf!(SuzXP&I?wud~I$(xx|J+eMjhONS)+F zN`kZI8XUwaB|o6|ux$u>om@bL9`vbQE~HlSm#(^v?a>Km3NI#&+e;0C!|>wgNU(dyeDJq^eFP0F{cow( z(Rmipu;m909gl%Gg<4<&;Cjx-KOr|L<#;s0ilGB1nG3l`)M9!uM-myKe|9>%g%X7i z-6L?FEWKRxD1Uu@U&%Iy*jvrE6zUp((iDL&jQk_6?B`m^_d4EQFH0{c)Yvyxz=kbN z`~LZMHp$o{3?(?RhuZzSZkTc~&!L&cLKTlKu$J7m7S+`b`Y&6((oD4)?cYX1JuE^E z@x5fs77-IAKCqy9`fQuL+QE8dI<%D^&+SHEzHxZk6yG^}b}{cIZq(RTG4a{j`%~y5 zBWp(Y*44O8ac60lSIluy-wKACNxJ`|F;P%;+3ta3^Sc|rMJ;3E9i&CbrvcY{Q?4R+ z-l=o&)?72~6*04u2|k@BQxzs_GY>7DrZMvP00X_WWU?C1Wd3<~$T=F@NL1!A|U5G*T8V%dj)}`c`cO;U~Is=&VwXp0^JO1(jvQZ3f%=KEu zB_2aWOveH?n)1qtwdCmxae;ho=9@Zw>`;BApCd?3ilI8e*uzYG8JAvfnwBaxA=yjE z{c+cQg$*q^zl+zZ7HLwsBO>1+adg_azvj@WV~0o04Lkl7Xwy5-uxQXU-ZpXmjo}tF zcJ?-HNrViGifG)ns8eaNgg&lAf)(ouD>`&>z!caNNxEV>yvztK7>8*!&5hBr>;)Pl z(AhG}M%#F7UGq)IB$~x;@Ou*LsTWtd_;c;asNa6iohPDM25uW2_ocCVn*=Z;gVfB+ z2Tm(4H*yyHc@KJ9vP)4O7dh zGjmV2HnYRI4Y#(fXoQ4>PYtwP!mFO-<2TArAEA8}XS0sdjyxTh-u|O4`O@-I^0zmC zKZyCU)k(U5QAjrf?`+s(92Fa5$=DV@_6)LK48~aBoM)Hj5^;812)P|gpvMhF=LAWETR&8K6P_puj={Z=pG~eq{ufv`*572m0PhCx}w{LFP`m-)J|CqZM`WKwIQnS zLN!6Iv&$K_-Zm+}a9RvvGf}2|TF<~ry;gW_t7Rhrlw+D5`VuQ(`b|5)CG5T zT}e4xzj7^_LHfy0E3$C9Og2|;g2+e1-d-r&`3i`YBcRSs0RYNs}u#ZwF_gxV?AuVw| zd#7qzbhHxq651)Wl6qj$&22*a%&re?YxrO%PD3t7u$R&Nrw^w;~0&7YiW|Y)UJhDcWrYO@Y4iit(lV$do&6*UMncdB zq*90rG}fzm#cNH2j|rl3yK7~mSX*@_-(2`4r%aqO-H^t(P};tvLSt>yJAHR&hI=C7 zkMc4`so#l~PQzC=8~O&J_lpcFw>`G^}<^1t__#GPDr$6UihYz z3_BYUW2+OXpRhYs=|HUCP?7Pnp~t?jd|Yf$aeIm`rGX(40B6C8GmY*3nC(IusGu}H zojBz@Jac}WjXM2_Iu~b|Jjr*E2=x&FBL%hgdX3zix(t4(qx&Y64haBcnma;!2s#u; zG}<&3;P_q_+%Dfj{wiN}&=*OWS^!Yz-aooXg3s)t$GiXGX?2Nkap%lWhVKT+rnvNR5`Ey$!fW#EgvD zLC&j2S_`g^F;oOR52Sgh3bV`8g5Bs>K6Q%E-Sjr@9dcU9$dbWVtD?H|Mk5_d!Rts~ z8xwmNB;whOVV5Hj9!uS2tH3UZX6=xXu{B6M@{Of*5C0vT6~*dUoUMQQwRfD9KU;48 z0;NL4qiK!*4XhojfwO@^IC#9C0s$05I>nk#cKpGQ)bwJ>NDxD@e@Qp&?;q17 z?f2c^bqVjvF8Jeu_QtZ=b{#V>gHXssV&i08glJKjP)rfpc?9PjwR zTTuq*KPA<$On*3piEVm6xd3 z$pyQYif1zNwG1Kjn&ZeW6rhd(tK=LtoB87`jr?swNqFW>&Y##PZNvN3@2_p2f&WS8 zG}(5IBn&A?kqm|lE)u0xid9fO?T@tY9Lg@Mq_pGk(odzL`m9U*eJr7#1E95)lGq3H z#$N?bxrK}3yYM*cMMV6U?GPGt6v(Jb>NA&AGL2Gj3Q86iHN@fX45MKvKqA-E-xBJG zvtp&AFc|)-QQ|=RQmlcNY@FyNXZAXmC4ZSav+Dkf7JgCY70(uBTf(I;`s*l-1DbR_K8 zhw~K)xObt!WH7sj-Yq)F=LG5RAh0_33=HOcFqfY!&QXq#3$CHEgvnx(I`Qq>T>M4G z+Al6y|IC-YJAp9aVZg*x5i;KOIxnFod$wK-4t{{z3>WBRde+t=1&e~nzYI4VvHMimho*=jndq|7dBL#)CKC@x~3(2Kh9Mt&C zuT&x0T%>XBXb6Tb_l9*+ zZj_*JULM|5p7Z{ItN#=94oP7B2?A=^Jl=X<}SX9rJ*Yd?6%I~zAMmL zcfc5`D*AeNRhDANema(SQ^yO~2%>(3V{` zy{@;B@~cA=jj;?H3cIqJCGJ`TYl>6Jv$HH#TEYL+88zQnqq7zRaMl zrj75|6jV*t+T<3FTb+JD;c=8$#S^p0Q7=nn=6V0zi=th&=I3TRC}w(P(csV>2OAhdfQ=CZ;*9L7`FG+hIF4l#(n-!ZUxdA+w}q@#Pm zzCCWWp8?;OMS;6y%BCo;0iyEhaoEdS2AR&`nBR$+V2AUD;Pa6H!4{@YS~!9V?q7hQBu4q=)4qB zC&UsAm8x$NVbT4Z*T;`|{WuR6+O=^=w#Fub-?(kf z8Syy#!2HW8N*M=*%HB(##s+cNTg0Mf17-2tT z(c_A?Fsn_Ift&=*D5G}0$0*=Dl!)_4f7Xt?rl6+@viUtarHYS(r0oOfzgFPN-?9T)LuPGJ`H|i05mK0F=R|{ip7C%7^wjv$YvTEbw+UE(EV=?q} zV8xPeOT@qB%%#iTy`k_|>VJ++9d&1YgiTB1(kxn7KOZ{Gurg2EW-@KbaBE_$T|6NR zQPYiO_4=uRNF|Q>O>EC%RZfL$x~ebtF7;UBaGy){BY2DW5SsYzH77PZ=Wiy$4lR`?WWH?;dp5zqIdn zg-^^jrvxU5iK)C3$z7$7nyMAq{?UTY5I#V5zDIh~G*WS5KhhD?*gltX2abMd29Fr| zkwD;&Iu6X6i<>)G(QWkIk;WsyO4@7^W^3Uf6)-=@PI#p@OxoDS_Di#=ayGlFWF9}4szyB$lbp;$4r7FciZ}0ZS z8I?eOHa^CH9u_1;;h2WcA&T)*ITuI0?mg6_yHxfQEhZIihv75APmwHDy(1BG15tk1 zhQyBsr#Ib)0@V3tM2K8N)`=*_;wc?a$+j(z(7gDeY_*acZEso)7KBjD{ZDjLnBZQE zy|sGb=hU{Z-)bS7nr*1P*9fM*I+2STb%8IbOwY`99lCTMq1c?F=oeN4tg~p=bCS$*NxxW(h6dhBDO0c zhAE@MZ5?8*=ukf#>VI6)^%=E#f>SaS$au8e%1c|jdNvZxmLiwL9=NK9kNXzzXDc_U z&8DQ7%^dQRN}8|RCq7&HnuRp|Y8M}St8)#|7!tFojSr#tPw1UNJNB(*9r#7nucxkh znO{DfDHOBfu&|R=q=f5*YA~xtu7egCgtVcgbES7-aJzWerssImQ_#TUxmo#PD2ub0 z8})ck0F?sClt5eG!K?mHT!sijIK<*0 zaXlhNfgX8wt@zww+suE9_8foI`y^sh(%-+j_wR(D3aM>{)AHCJpSp*I2 zG-tw8Yz(c-1|ITu9ZpKo|L$h-8~=JM1n-Ha&dE*_8ESHRt-6It9l%tFHy2ACb>c=9 z`iHDtc*(}v%QqFjU2*qaCUw)qVZR8f)t6ii7Y|m-)!D%;rBi1QUyqP)($*_ndAPie z?74)L3mXHW4h!)P(OH_AS=Nf91mq7w6~!1pEU{4$aH^0=ZL(?eH}0-pise^#%-i|)#)2;<^t3wjG;E`8 z!PFMCnWODriaGi3Y}X$gDkM)0e9Q-5gJ$*lUy^|f@K}=ZtkUsx^QaVKi7+=q>wciP z?s(aVxua~ArXT;;3lO=h=y12hoKNCPBDP^gA>oEd)qtuEL1H7k;t(2UDUHzo zb^fYmH;B>+1rX^>OPR@JKDzf#pn<9rr-3wA&Dbs6cJ2(WGcDMt#`3!`M1@_tZkaX2 z5Ts_f8z|ME-L=^xajBiRvfV??NWU?9a%&98S?8ZMUI6u*s+r)L2PKrn4Do5fRsh84 z`)WBFD8A%$)rr-rv629k9PP3|5(vXQxdbs>bEF{sRS6XTjCjz`@IFB0VYgB$b$zzu zImpHIyucW`asHu_6RSJW=qiX=8$C=iZ+m*{)D2+9kBhDps>`nk8$M$wYl}Som~3?Q zIb*C41C&6yag_4h;c#3Zhw3Z2y@GB^4gm_4)Cns$Koz_Un-D^X;U?=Puhpr!kUulh zqyvJ*yx$hvsA~>k3J6kE1N|HcWYvhkVVycycBPHvd#otdQGgXI1!02{tg?1uIv6%T zVXm4M$G*$y{}?V?*0~e>lgt;p6IsM8{Jj51umyc$(b>Dri%NmU%Iyi#A?L5Uj{xCz zkp5+ylfa?_Y?b-WQcViM_cVtk8pzv%M-@cY$4;`}i55R+5F);I{*HvILSzgC-7uWL z%tF6Re@qMBU9W#rx2*~n%}g;SSlI=R#87)OpuYXN-9fPkQj2&^FA_AXLX3IR?Lb+%qk~_>Ha; zMf)-gY3cdS`IJ?@hWQ`~3XhQcaE>-}4)pQR><<@XCcED1wkeR}g5-5~dyOO@cgYk8 zd-|WQRnMhW^WME9Z8)ubUwF5V2I!kTD(f|372-DY{i@xolYlZ0B*=KDS%1Ut7;j7f zEq}-lfBaDf(Z^;@iGn{eQV%tq%jif_R05(gGDLE$^b6!f`q&5W0EJ?zaV@#S?wv@l zDBe#%pw{U6_v*s@Fk4uwNp<#O!v3pCYjA1@Ov^;vpOx`(MnBXLi6ks=e835Rdt|Hd z>RL_RLe9h-%f@)k%}ZTPJBmuICO*Y7Xn9>Srv!nKjgP58n8spD@T();g9u#6$7$@e zUdyM?dK-Qa1Kl3_0@R&+6ds()#Z3CzAmrR;0oaP9l{aZJXy3Z8ixxsr;&4d4kZ%|Y z$qecT55vaxq+7K)(qBt3$Rebs$`s86L);;oGs`m`TVDn5EKOhRU~(IeIa&|$1N?J? z*NN9dXKSW$g0+165%k0o3BDr&wFg0cJi&Fclyb?AXM``WK6f1>iac=fBcM-$TboP3 zS-VcO*#iR%^<*+@>?_msH+@6`0OR#Av&VAcMCpFjvPfKym8k#Fn?u}bFvQ(8UwsYo z1ejCnXYC zR^LhAnrGJz2Zj0@(Je7%YEpp`Opyq!Hq5Uh=VQ6aNX&Q#<$1tyv4G;U!QT_>53sE< zDmh@YTvFVkyBi3+)8TJ4fL;zkdXmV9tJ(c1*P+DpBqAT)44SQfUhEcmDXaguLVYIn z*}if3WqV(x(0~ESPYKP9s~O;ty6c7c>ob`7u-)I1(^@te0JSBKPl~aL6wx*s;NupU z?%l=a+rq*!{1mM6E%4k58VH9&k9{eX-D+SP>nQd4yFSLu=}d_Bm~ZCUjAuyv@mn*J z*qAQsz#FUVeauSdSCtp!I({C+gU2|YHoQ7N$xAg5JhqJ6cm`{FP>q^Nuy$6tNmbcz zNr(=dM7>rqfp9qAdMnr;GuS^9?7hCM^^L32{w3aq3w*0YjLZCGSJJPsV!cxQ9LlU6>RrJf zX5$T1vamb;{z({$>|QdxyJS3%>Icf*{}xAEw|0KDYu3_#cy7e^WlrbTv@xYO{vtBP zwqGB%V?{;77%3_?%q0eQmgWjXR9I!ozpH{p>76Aq9V#(axU$ zagc~2k^1}4gJb0<2j17^h0=8k;XI8dd;*q8^_??-PYE#yQ zd~fuH1X4EKx8UmSe&b>@2*xKSuiObCl-9_LH##7C*|0l@0Wx~1Mztf!#@|6A&!7ke zDgaD1O|boPt0ke{fRkS{Ik~^0S7RgPiVXu9zOgv$;v$tRMt5I52(<$x-mYFl-8;2~ z0EH0@D7^96P{=I^x?JI9s+dJ$BMNy^u>i9anf$9ip-@xvN&ieSn>7~ufGKtPTq3hTqwykTQB=pV zkqD^B-fe)zKBo#8uis4UgPrzn_fx-A_z_cheNiGdQkcs?pW^D%X*Ddc981b=(Bhw);ad z3_mQbkhh~StZ4?h4t05LsVxZ-pXSifl?R!>KNN970n!(VEn>rGBq-9=-W%JDC$h>e z1eyM3SoOzZ*l}~=lRY<^e+G_UE9O3r>wrK1wUzvWTO|%jZal|A_C<=AFAmDZSdy|` z$9bJ6+pPmHA#-!VMRm}6 zvT7*kqF&#^iM0IQd4ebxu!m9gzv!BsW&LFg38kNom2`d=K5hA?P}`JbZ%O;^$^j^x z?#_U*fA!`hH7-vMNC*g`;c00Ofw*}5U0Hj(AlcjMtztPBYUc)NAx72rT(`mK%cuz# z5pxVy_5={XDMw^Ty>?LrtPH92PU5vQ1BLa|JqWGrD@? zbDNi}n1f{wrZbcMyDn8iz8>c#JgrW?VA&6!podBH(@j3`ft9b7BNC@Fqz@aX>-{#A zncoxW^}jn#{s2$Ux1~wEAR1Rz42dyOJAj&Acj>k^w~%>b90C3=p8mAj!M!4x;PAeB zQ4u2hX0&gw?zX>4+)qY5B@_l7BtW!*!X`V7*vn6-gV#yP0Ddx(BHAI;?$=tSP5aMB59)9O^7=QG-38MznDgoeGM4zyepQFz7T; z$dk=~`itzqXqXp{)tNey1-^x^5-pQ^o(Z$B=y*xw)=fRy-;GfFlU`c9rgZcLAUuWB zJAbmut?rSWa&<;J9xXH57vS@1W4%V6)inc1pv{`xP+NPw`0WZr4RA7~D@R{$o$=2u zYw;6d0muy67Mv@}e&kU8$z0+k3Yb-1gt*XaI%p;3e;s}d_F%bP(|%*ZRJAS|n@q*B$|Ups@Y47pi35~fyh(m&#* zIa0euSk^SHKLkd$Obqedmc8nXvUk z^)~=+l!lY0J7Od?bavnGkrn%2Cs@gVa(r=bNQySQ|0Ae4X?z4DkLyeRHIBK36<=`l|8~*%x<;G!(3t=b&(53Vfd^ z_Yv{)tb5M#s6WBM84W0v+@f^gDl(x~@ijp6U(GDTuV=Mr#28t=MxPsVaIWVlbX6mu zfHgdy-uY`}sK2+3A1dZkpplQu$n~hT))6=nnXIV{qs8mUriYp$N{pDtZ~svZi z(U?vx!uLaWAH~)CbRD3C^$QqNovd76#8|D2K7^iZuy8h{9^g59BQ;z{aZ!&DcJM+Z z=7f|h?r^5Re)`1AGWdavs~sh2I~DX<_dI$|y31<~is%>_3FJ%;?X`FnKktZUCSQW4 zz+?Q7e?3%E9Mr|7T?S$yb`>>9rL+5`O?_l%hbhDHTt%!%-yNJ|UF&zA)fkA$R`Gm8 z#h6VtJ@72&IT4ua@4{sWk3o2O8P$Ikiaq3n6BE-n+U1o|?6UF0hLXs(wA0$VIP~Iv zXN1Ql(;gA@RBuse%Y&_q^4{4(>bR08oHY_dE@FqL*Sgdl8=3XOD^&h;8a+z+@V#-A zN&b*~$pxUL-mOo2=bIzU5r-UNI;`(^CFtlKoelCq=%wRE7k$-J^)ljH8$lc>Jfv3! z9N?ay0X93cEyg5^_nimS?#2n%P(mXq8Hyk&&IqK};)iC9R-7q-|Cyz|m2DGe<}QxA z^b?-%WWE{Nve8?ilKp6fd;8w$G(;|^024xoq3wuQA2-79z16f&q-wiSwabHkn@R`w zgzAQ)*0muT(E0>msX_nsaqk&V5y?u)yYu(w%NWLD7!nQcF}Fq4>UPmE%Lbm!a7+ug zLaW&N%`K|{w$PU{JN7;Ww=7_BB)2-VgehSJ5r*F*Us5R5U6Gw4k)s$@VPl#Wudsmh zWO2N1>a~sN9!z^3v19L;RHYx$8zsf&=i`JDKa|#)Oi6k?9NP`2Er#P^rBT0nWUy$$ zev>0SLfzUWk>UWc*fR3Y9-A~OcnLnzd2IF(=g>C`0rL}ylOl0q4Eo$X7)$SJTjw+! z$Qs+E5cRn7GkxG`tQ#s!T~ucj!L|q{Wf5F=nvpD53o`lOa6|AwJSdixHQ)dkK#-08 zu|DeCHFlJ{Uodf~vNvsz)8<4#P7^42+NftyTe|&Lg6|~js*xnArWK5 zmtO!My8-8fs<2PMVjl`HBT*r71XTZJjB(qtUcJ@utXFzv#?ad;m`)mgT-@h0^-ZW^ z{cVGRmXtXuwNE4?ps^V(-3NkMUJAYmlHfY!&YczKm`7JuKH?@VGcCVUDtvqkK3T z-EwS;wFR)f_75E4P-9f`lozP`FbuD`Hq6rfL|qHEF6K6K0E5eP-#{R_IV<3r0z7aN zA~+P0?I2_XUFV3u@Fqh}I^%qJ_lFMQhoe^%fSMaQqVzZ*5A+Ds%IH0^8~88Gs?c3u z=lIIW-?G$rI*iDL%6r2wlXy_lWf`a_2&3i7Bc)Ai^*;%0rt0}k#I#iKY}^2q0RXhZ z?qA{DMpJnB=!=%`<)F7<0iwjJo0fFPIRQXX(<|Fo{SB}y;`%!CJC_165pz;;W|@nd6KsONguUNbtd<{%9H2MUZe+)5%a_dbXp5<3NH9Xh z_Y+<3XTc!LQm&3^l;P#&#q_g1>e~SLV>vw=r(z(FI28}4as1+Q_OqOokFvH}LROqa zxwB4GWU7QbBbVso?1iofYdbf&YtgtQkyU`va$KHoMt5nbvAipM1#NB z&zjz+8LO^g7+^`&P{zgI$v!nYE$8S~XC5|;?Tsdc-L00mbsA;xl zADv;zo3G;FDk~I}Ito%3w-L%PQ9IvIO7JSC zJFG%}c0)cy=}}&n2%n0Jn)cVWbEi2MP-X4vR^6l_&t1iJ0X4IYHl(@=Hy1u)A8}?e*keoL-dD0$P}wgfy&&BT&T?LF@<8+h z4f>Yi09iQ{9`LS&d=;p4&*t|9xfWEm_`>0zF@XwJwtw!E4t;!YkG|5*N2qbz|C-qh zs!{i!4H_Ok`eYH9yPPncSoKE)`FL5Nlpj>|*aLnA4Sc*xz^GWqSn9D`&s!>NC3!5v zXxPkwuIjHB2y3hDcD5%i?5RFTnB)vBi4b|W-w9wt_XBR6^$d4(Whvm^1CWJE&*(F& zMqc-EaCk*zPi|zS{0#FN6b|()$HIB6AgIvKiaEI1gZ_*qhMl?&!A0ky#9>zFyev;O zsKFA)y8ZjQilt!R?bjXF4& zYh5uU+(BZ3o-!r;%Ky{J<5cA7uT6&XQpRKf6}eK#*FcP7ilOn(bm2zyza2(^lr?;B zT~wCtaG%0EK>WW=LVGDp|22j`ow|(oU#xukEng4lr@EcWE~i5M-z#X3qFCDhXJ{$; zhBsaR|Mz>^W5A97fnyh?|GJ}}4#nF=0uI~%eug{6uKwSE^>lhO1Ni?vsMQbRdhH-j zVPdn@q42RijF!>B7pUbv_o9lW$0}5myP;*t9wyf2sWvTbZIyM^lfKI)rP%j-Dy$VZIqIx_LnCX6g9YnV_pqlczKv`A z^Q3;i7Wm-RLcWbA+oAD^U-3%Ff#FZ8U(4J4QR2)kE-scCXRc}nimI%a5>ps0RHH#E z%YH3oH9gf&xA`;VrNs6%X@0BSZY0tY>I@kh>MRx_Ez^bQ)$ZJ*Ih-i`uC6i8{PpCi zRv!6Pe=Rnh;Y+2Gzv+-Yh_G4nvyx+O;fEui9P~sT`e2gz%k~O4ukMyL1K8k0I_W>1 zMQl4rv&0D9BqEKBAfY#1n+N44Z2!Fc{7t52FVOd+2V|VJ`>U0{dkm`AQc_YiSr!oq zw95{jC*?>o*aYtz@#i!MRbob`U0@ON)+=!bPGxBqSuaZ4MDT&;C( z08Gf670TTfl94k&*>>7xfrQLCdoa+blqDhNgxcHM*!gDRA`a)@#)acDr38ux`tzW4 z43l0)A7=^XFhlm928#oSuEYNC(wOj(m=7k`Yl%I;yEVJu#sIBd7byVd+Nw!gMtb^M zCL2MutU7R3&xm&^4*%pvF_JG<{Rv%-_Tm@5a(DN#t*1Z>O zSmH6-)3kDnEWqJCrwf!eqw18F!faabL@HR&c z7I6`~E=Ko{#(lJ}RI~-FtW;zNAK8sR(jYT zwSGLs&()n?v;JWWlPnP~1$GKKSff`IViGWaqehS@WIEGOGdA%D%6mVys^%&eX9i#; zDpfzqeY6T$+_-w*2$K;`;j}3uxb&=aIEbm`yv{Bf+~_Y6$X|0eO3mxtPici1&FeC^ zGEz~e1}sHm0ipvs-77xY4#qp-YZyZ3z8R6pVV}0Z$NQgXAo`9x(>*VS9iYHJ6s@8F z)iQN8`?YWC<75%W)&4XsmxNxU6#1#GSHN3-GXDSuNVAy?%%O)UUjB!)+H>FMmL1VO zzt%3IsPRu$9Re4CT@3Ukyg4@Wr5|80sOYUeNDOt&D0MKsB)o~g7a4mL!aJ@huksx} zxxLSwV|XY<=NEh*dq`jB@D0Z;_z~;5znfu4F7Dh#beEBvHu^1c0F(@VPjKwsR0XZ7 z8;Ji0j;88_#h&&t;CCPadfF6K`k%h@(!-?#N_G_^+br_%6bc|) z1AsQI>J$;>Wy!Zv4oii3E*_0wpg~1mWovd@7q%bE+&%Q0Q=1T-F(~vSF);rOW?m-N z48V%(y>l&~4V+9KQcvqBCssI+J=j$e90W~1|BGi&CX=>XcIA!cG*z=qWZflW?{9@B$T6okxL$M%&bN6Bq?<>?RBtiNo*PB3#($EN#Ajx!eZM z;lWaj8JwW`o!uKH{c#<(cGdf2;oc20m`{OU(R-{`h=1%_Xn7S{xpzfA{IvQH{qs|| zt_sC;PlHyyedc+ZfASxme(Arw$+QBo`x}9l{0L5>SIanwyXjz;@GFxr!${K-y=woS zKOW`SfcbO`{L2D(jKftVna`CSsHjmCZ?*gRFW&czsJ!dhKM(b_@&7`1c;eP9i2%Ge zK--^BMM0nW;*QOY*3fs$HMQ#s-IcO_Ecvvrs)uoZFj-`twjzMS$SM4d zPsMS5;vImGq4=4H^n56cs8SJBvV;#jbni)io0)CEmYalus!$jcY1B7C+TE-1X>soE zn^#!iBpp#u0O`GJnU(s|xdvsa3Uzg2BN!~5gn29DT(y_C1z(IISbyVbq9UpSzj_4% zM1r8oB!U4A8fy$LFuBv=Wz&V&41O z3;-p-Jp``!=8sb@M3u%_%rH>hbU?5fcYDa%xNTvSGvL0v4*+&|JEPw>{5T5#wAzh$ zQedXK;qZX&{d2@M7ALqPAOs^R1*>|LasxBwabjGCt0n-%FVkkjNQk|1WTQ`h72lOF zz#?VfqrBqY;HR@(TrxfwMfgkfE|RAgNvf!_^JkVwhZ+7Uz2{LQ$c);ZvO1;+_~M82 zb!yrIOBJv;%?0bzkc)Oe%sLitn3|wXJS-Z)tnYH^8!ptH*3WpScI)654Hv6q|HN?l zIjgHE&Nf}5sj$9=c4(05(r+xV&i65qK*Ab+5AJ;<&xG4!3e!)QXC3fW;U%X79Eem!3auXk~furw>Q(R`-$ZRW`let8BDHM!V zYmR%9eh=RNO^|K}t?XrRTg#yuNT4q<7;*Mt36cKoUDTZM>KC=0mHC_7n?QjQ!YF zmYHEfED&czTMBafDjEv1&`sY`$~|=B--Qan_d90q>;ys|Td`+n{jLAybp}6`cHm%f zN6spbY{&@kqgR_bDyyxQvbd4x2@^umYZQff)+&IKcp<4PDR+$Zzjz*Z<26!zJ8E#Gk)9bL8~gj+erH!f!xfqkl~#cEYy$IU?0TbS<7!5a zIHNC2{v!e{i3BxqXQ-`@HT5bB zi@5ghoBp+$&Dr>-xn?P6iO1C@b}VOY;&lfFBtURSWAOX$bc%Cal$Sw%xV zxs4v8_8Dh`a;-xCO^FhC$^gK|0V`O&r8m1}_;$$>+}O+ii!=}FO4uuY6N$2sI@1$m z??FOVx;nAXz|wZ8CIcdT!-&r()7T@8?7eZam1g~tE$8@0%5wL*V|S5^9UsGn%m1Rr zU_s7r*7hmDn*YmrWtg})PS-n>tsVI@kb!{6+7@W+$Bzxlu-ia(cS=9$!M2NR%qaW^AWkE7yP7Uc71H{E8cBFwc>Rw~w>J0$w&yep|2NhSA22 z0XPl>!hs7U)9knex}I!;e&1c(;H@`g!?47q=v6@LQo`MVlcg7Rt=cPp zX|3-suVtjlgI_Vhi$8yXo(aP$4t~+1UX^cLq1vU-%+%-$wK^eS`#Kvr-}(Q zDdV2%0)4M*=he^s8}wcPQM_{YPEYzDbZO${qLYm=#-R|LnAxN`3g|f1QIdYsUSM@S zT3gBJ6l;J`B?ddg@O85IShAz_;_X4H*&jR~3~K+GX{Gnp{sV&raz6iuy|)UBYYF~E z2Z!Ln-6g@@-62?Tx8Uw>!96&^CAhmwaCdiicNySJcJ@B^|J|o^AI{sY2fks}>RR1h z(!Z)z-P5-!^Xe_}m!CPGG}a(RPIVI1bUR6xNqlfBzb=eF%1;b9^1JtfFoFD%NMq(KCS+Ck-Kki0X5Pspfc3iB? zxZ(jiF2DHo{nq!Ee4xLaz~yG#S+CTHpNW=6h(~;yY;K!=dJI-y*^QBs$S3Hu`l=To zVYr$uarNBTXXkQ zFh1~p4`We}UX#}r(Z}-!ef9AWch|kuYBHloBQoQHq1&4yrdK7`h}U8pvFW0cM`3Pm z=fp;?J=lB+g3<0WZ}D49oBGuYOAqiI@pn<$!B(O(l>xku-gTC9^4f)o7P`^gnh2xa zVSu{U`t3B`-s%%?KO&)@Jp+;Tdm`E&knuzAiUqc#jPto4Uz^Q?60@@&PGY1_C?0f1 z#?9jU#@XP>C`n>j9}N+qnEjAqnU}jYRfvs&vUiZm2DA;PZ#!yXiw$eWfTh@6wXEAg z^{1x&bBtukCdEiLUmgEEiayTgEgE&D#krj$N(6BA z>Z2ZGbsCSI3+-muv~dhF+Z)ohwTuyuYgPU8qEBvzm6lcS=_((=SB58y-ZObwot8FR z3YF64^W0|Hu=myi?^aTy*I2|S`pfj}^f;o&0UtUCh6ToYn>-m3C+PkHDSDmo3PQnw z8pDHlmdnZHHxDRxaPhG=qFNj55K9ldNrlV^r2*frXXgxUwRnCEN2-pPz7;EMj}y*s zPJ`mDTX~$CO@w&zw4(Q$NI2>tMa!Mw zM8E@{vPRi~lEeqG)k3A@v;OU=H@y1h$IYepp5Y!jPGp9j9hySJvsVH~)>H-vr3?HR zjEPe}>qA-Wto6PgFux9DgE<)2kaMNP3L5bl2Y1WM$@YkVp$wQCd)?p;`afz`bjpFndn4C#{tY z2d*2d-&iLPaWG1%t}H8pjArR3HpTnC;IHXLs_5bO*}@>7O`x-vT z(nfXl+~aka=vVCfN~GIKa+pX?u5Ynn$#}Q_Lhf1`Lf8A82QRXqQIAHmt{I~Q_{d|i znz1)4?sZoAdDLxFr|s1jf&!UJ+ZD7;sRF)#F5~R|gWeMKc1|$_6Pxkp%aF>dtM{eW z_o|}fHecX`4V}vwv}2E|!R@>dTowQ4Zz;_I4biCx%5RTe39Im?veiW4Yb46jhU%@< z<+*A86+AVcuGf0Qaij@`Q>6Q}P%c!6l`s3j))H;t&n=4Ep5E-oN7H|Dv5a;Bf+q%? z-G|b-1lA9{$KA77DsSHG@1)2PZVU;0K8(tE;);bZ8*qDopErn^e%ayjZYMjUW$_Y5 zck)dRZ_L@E?#xb?9KAk+4>_0xPq-s=$7~(}gn%Eh)g3WP!lI(00>(Ty*ORNOr!JY{ zN`;r_zXSM}bl(xZej;THTK(qS`iYhE;nHvkue@maVsh#xKq>Y|K|t|TwE$*FY|ru- zqm6p#AzXJ|m=Y$bvu~mZRj&@7by>L~(R6aZ<^rA5cqVwpP=}*Sj?y*GqBe~jwOox$ zWV*+!PX^n?8QSyBhll_V@@$0xIJV5?TgFU>#5mPiqR~1;bi0j!$+Fp;lDSu(`Z@fr zg8b^xj)}O!>dCit*jk+=?y|i44^ZX}z{qHw@A&cMS?n?g6Exv{aP=C(g;RWtC7I=U z=09UsqYIiwjaJQso*EJ-zu+*8vTO;FTQTPD1p{IIu2<|+9!eDBl6O$s8KJ}uGG0E! z37D!b-x<$YJ3vVAa$>;Y;m4Fis4axAdLf-;arVW_EsRaRtDOE=ewX@1f5lv7#8=>$ zV+mO{ps$)#kIdV{Vo)&w*R{nNM8fSt;beLWQ+IgnZoFmAKiurgoZBZ5wZi^8ubQ2g ze;sMz`tI|gs`SJXZAwK%>!ew4e^Ao33vVu#Nw?eFSPSoGQ@+KR4F>;xp_Td^jZ~;S zC7cV94O~TKaB4%-4|}KCLkeQ9)p~t+8}RSQ_gJZ?9_It8g9wfY=+?x-hy-u$e+=dV%yj9N}RYGa8E zYC#M%Tpq5%lnis$w9eLexBb9s9^zeCB0KdAX;$*xw+uJ^-CM^p))!81+)H*dxiH`= z@7DCu#>U3XY;5|k_oQHP341rOP*G8xe^SyT;ozL;KzDwY%Me~4=jCNu;E>IEw%#l) z7-j}nf}H)#VBubuMB?2aAn7)jlXuX7sf=N2(3h@9_F5F77s6qYt;Gv56r^H?hGo^M z3)NE!kNV=E!6ANV8FZ9b9IBfhmTQDneuBwu%AxN#jZma_H2hp@|4Y;@MXe||Xc>Ym zFXsbhgNB^~JvG&BT7gFE1iM04lTJ{FmPS@5N=frc)xn}z9pZ;TLv_HU z3+mnz4U?nzhi{or{&H;?!A>fIo%w4*K$ySeqe;hG}Ll8|=Iyk#(qfRWtZ@iRA|^ z(w8j`QLw%$K%L%0_RL)p8e`aRz4w7_6uvICms66$9mu*lyy~YRl@2`b@fj~C3Pz-5 zD8r}4CS*+&+OH2;f)OK&@>nqsY97D(8G84>9&wfSgg<^s$XXyV7?(y*uTE$@Gvld6 z9)h3$P0q2M!!W$$LGZC)Ld@e0Zq^siuWM8AF%RQzz)3Rm&hl5w=bhH#S2Lza4)jnJ zE$LLj-d;IqT=?G9#!O2dWdsPnOFh|h6P8_<*;C!XpA4B@lQ^1Xp2kqDQ($J?1tQm{ zLDY8G`gv~|UuOluet{Fj8C$0?LqX*7K!mOD2zl>zJy^6k9 z<^XLM+ENGZppo?ZB5@?g1)?r`<|I9gdFJ`bg?b5ubwwGR?CG>QNZDLS9NsGgg*Cb3 z45@l9I@2#KttFIDS~<~zJ7a`Kubs*~J@{E4-VxXepG~U5_V-P`eJhigz9xr+f--Rb z7@!C8sw^boG_;A&*-vMAGDUJD% zg%k6+E&B+=5gft(k&0BqDog{DP2wZMO(;=u*5RZm|coT zlHc-ZL*iM47M#VX&t_BxiSx|#C8vJgX9nEVwl05fhEMW4Snv%qk3wu1MQ5e(iysR4 zZieXH(1hLYp_DSw3X&|bA11`Sbu!N9j&DQ=XzbgPiFOEIv{<*1+c43ADzL+!k_bhe zW$7Gt`i#4$?L=4A9th?d^Um5(k0MmNC!qmgljQwRZA7K19Pw38WtXmx*L;jBVNWVG zsAw@byOway4dcQD6%?4cpuTE4YF@@Ucuu?mnN*6CH2RJIlq(jHp78me2i{s8lXv&8SrR?_?DT(j?Gi;&a zG`GQMgys3|bCK6;QlYXe;QlwUZAEDi4(S`=vmz`8)C@x7rKPb|W^C+d^g-sW(EevC zi%orI@XTz-Rr`s|2%wYjX14ei>yD+&k_t!)AC-3dVRH8}zU*>mI&vV9xlRyBE_;4$ zQeCen^6IUUMV2*ybI_d?ZvBkcG94g^`)PINV*`XGl61$5e&=JxP5Y28j+0i2e~}Y* zL?fB=QT${_6Drq5y695`T6@X>M)gJ439960Ai9K$DVcjLE?j9%4iE6tha?GrX>HZE z?7>N!$%B@e?;A_X{WmiAsd&lp^?wbR%=(2S?4Cvq^dw)^?uKOV=0 zH&zvEKPqL^6aB0e+2jBKfSUp?{0{4yB)nA{jJesO1=aqY(0Gkzs07ra7(j3;GWQ4Z zD;RrtX@T?Mn6=MZdd4g@)$b~72?2pIJ|HtrZby?M3!3N`Zda2f9*+1BNc03c*djJ^ z6T2eZUjVSm*sUK2+T`z_Gf55Pa)7$7Iarzg$cZzwGPAWx;KPI9d7lI2XE$bV#EfVZC&da|rjPzuTgMfv&*_Y@}{f;HOJes5+kOA7wS#G~Mh8GFidmK&NV5%7ooYq4DuK z`U(sdX{4})@Oqa)1<1Fr4qnhf`s)s|$WfukWCrGgI(TIGH-8?H8V-5omzI|5Rf{Np zGnD}brch2N^t8@eBoXfJl?PRD8Wb8m;bis(ic??7**hyRue%vgj_(+R>0ZYjGLT`r z0!@7^U(ZDozwB0|?ODC#tWleRp_KLYV;Na3^!ovd;xR?w0CXPM-wdzT9PStqFyc(2 zG5u$A9{KZaVAZYb7YIfY_7zMpq*KCIMCIvk3UcogbTdmFc^xV z`-$|;6D!TF*yi+ua2>zE@0`;Qe#aa)Ies_=@kj&jUKqgzr=kuCUcL}-@LzLOq$v?X z1y^~uINPhk z&msuWwNF(S6%!Y-rM^01Z-4so2eo@oZzgv%HA#He6=Zvw88W1fz=a$aOwngKxpLCWaZ@ObmCjKPakzQze=6m}UgFQ95oRF#^IN!SgB zEkkbmVvnS5H)kgFYhD^*;F3}kAeN^b8W5mX=1KUB;aFKr zh`B8&5YFe1w(4_Jaz6%W8B3~-`Q1OfDCl0i%GK3#XX?bq(l2FYVmB*16%F$JK~ zEX+UGkG8ZlPVDU&UUo=@1is)~0Q+n(Adt)LxQ$zi_ba)Tm*y(!QPOzU8M6neeQjt8 zMhDuUwmS=FVqeBhITh2P6B(eba2RpWj6-S=dwZ#&{3x|8!2m3BSr$;s_d+Krwp#|F z>OW}GXQE|sa{NsiP6WS-_F@;ByCR^BcfvjJaPTAJ7YH51%gZuYulk>)cXB_3; ziTSzS45DRX>_+DgGMh=Sn5Vmh+P{Rv#20Jehi>XiLaQqZzPdr$^@Pq2By1N*}}g3u2j)0@*sC`^I8?Gvzj(=Q+hRJ8Aj$P^&YcyGj@ z7ZS~hc#=l$n#}#pdnL7Y-P6?~i>&VZlYDdHlq4J-`?Quq!?RPA1D6otqEjUcYCpY6pmIsbW=H$JrO z!wA$C%==~zHZB`jK#YpBKoeRAu2@h&wTUz47qv0!i3w46A4*_9f0!)KOb+3`$x^Je zC<4I324JGZH98?739jgKiF{kTFVsb_CR@bP2^KxTiz4EC^m6u?LtE!6D(H4ve@4A9 zk4>=FWg|soUcd0+8JDA2A4*cv+_yZCCp$#+I7FnF-%PI5tDQ8GMlMxEe>}w%+U%+Q-1w}!yyLE$z&&YSPlc{D-D@ZSxB&f9^Pg-Cm9L6lZ9&`G(} z)h~!pr7pZwkV3$@LE6!NUQ5qn=3l{7?l_Ne0=~cj%UDlVFgjD&$x!9Bn;>T4eC-3qz%VY>}b#UJ9 zb_IXXCWs2Tz*7f*GPBf z7#1-}Sl&_$?9FY8R8w%%UqT#Yh-Y`kgJZf8ZQEbS@k@_>t=?pSOqmzu4sd<#00#TJ zvIvIo(tR=ll(MqXiT3u;&~1C-ew?F`C7=_k+yV zw)*+;i}LsJ^XhvEWfceYQy#0LNqh*w_M<>YU&AB4lxgUtZ%wPXmG9r@Xbdiw z%#!$i_uNPipLjXxPm^4cT`En+3tZ}Y)%#Jyz;(TV8=kWzR*Ye9}lsNc1*rz}PDF3#rOHOlOo6FX8QC>r1B!EIG>)zD|C=fqx0O}HR%4USnY$$zT z+idygF?_>iSpeyCJ&TKp?p6s#Jzyz{+a&^AGV4+4ey~K5DcgmSLf6p~3U%6Gb@+Zc zZ7(OcfgMfQg`ed>pu%5QFEYd3y=mKU(4G`TwnNU}E^O)s&JPp27Kqq(s#q-`-->WT z2D1Tm^^Kdg!QMrTCrqmw$TE58>O8WC2RqVQtufEQ4}Sagyauy^0J=UJR6Zj(_A`!L zpclNRrlvl>7WB?rxuouoPmvW^`F$Ul81=ts1>TAz@e3?Zxx4}5vM^s5;k-U-)_>ti z>DMlEFCSgTlJMLC`&EISjFT-cY^9dzN2Mjk+t9Adoi3AGD#^;;co)^xSC_bf_VNC3DVVildlohaS0y+tn zYLmjzi8mn&v)+zPVF||nY60?=KPXih`qtlhS$x_Er!eQkxi1l(I!C*I?UsC5$6=r4 zAPdkqjwGDvG2?S#>nTiIMyM$aoDI3fy-}>RH^P+IE;JQLvuP*B zOg4D;>FMttGpc27Fw4+nm#_V!*z`?lJH}lIb8O2( zh(Q~kF||jb3ij9)|3WQP$pIOz;9O?=xPLpLN^trDz8*q-uEhu?dL7tS#C`;WGB7){ z)L&q9(31j7PUoy%PxSW%@~uI+K(T5ar*k-lUylDapEYPjY0B1&3>55Yb0lyW=M+a= zQS8(j1np46^t@*tmf~6d#(jtTnLYmYWT>Ev)Z3m3lpP1%j0kPgu6INqpLOXU`ahEh*smtQacZFtPmRoZiO*g>sY791(FJ@zG+Uqw0KxF;Auo5-j`@0jAk5`C#y-*Qp{S3Y} zVIwlV)jU`VbNsM;VX5zhn>1@|AYU$WjAU^jIwFATL(gxceB~_95_BrpuO`UtT;oA5gnTU^wiu#Ays@j$o7^$jApHi*A6Oms58gh2udhH|eLWeYF2tNh0E$9V-dCes#dB@DCe_qiNgw`^ zWg;}qF*DRs5~%*dokkmQILzT*VQdH`MvgJRBs{u#pYGj9M55!G398cADTOFV3tTYA zt&gHu;C|4}Y)f{IT+RD;?i8Q|dCOe%^ENFxmg$h!K?oefJ_#58;r!Ko{&`Yu&%V*h zJzA)Dzh4q2*|gCB*_uJpWr7W+6m0KnW8V8#TjRk6QI2lU}#nJ0*Hk> zwLcm!)u?~w_D6o2$$s2N!zyI`lkgs|%)fWy=o?*AFQh&}PQ zUD1ao2E^r%)+SA4sQ_KfBoH!sSs@0&B`qZ*qr8bK1J=Q2`9nX&kIB)x0sy7&NF5;ix-!L8D9EOiJWcnG}-3biy`Lcv*2_BsDoW_yD#Iw zm)C;Sv1~gp;8r$z3YO?Ss&NX0T}lwP|&t#e!TqlG4r0SR^0qh#dh&8u<23NdH&YaIG}Oy+!R|`J**_9o-+Iuy zn;jm>KYX-4Fi(zY1eIE+-(_VY?rfc@GgF8<$b0wc}1b)R7_^E0i42BoDZ=*ha8VRWU z{U}GzUSMqSUTq1h+k`5{qw^3k*V-=>&xtC6Crj-?gEmyTzdfkoTcr0XAptLQbSL<0 zM6Nu1H9bE*=ztY4&l;twD>|~0kfLhJO7wm;K8CDx#J*ifMB$F|U=jiNVnf0Z+VyBM zPlA}R3q%~|-n|vIKVU-7&G39x?f?z}p|>|ImVas8I-XMYnA83Gm%OUWrU1ugm#>Li zlQDpxJF9x@J$gg5eQOr3BQIsDv+H8^!G$NI!#Man4D{*{o|qqbPJ_uiBfG;96tBLh zcKZ<)SO;LvVIM{SJ%qL(5WAv>R9W5O6@`~X`w|Uk=mi>9-8tOH1 zAWPuO=hD1#Vfkhte6w(Gz14mG^8pWc)Qjfn%#l8aH)e8s2nfqDt z>ZynfNQpdrdEsONEk=Ln^@lu!-p%{Ov)qCC`;`{(>Zb28X5?Z!8caX}*u?q;_X-lQ z2~n-|!j&Tudta)H^DVP&t_iu=)(3e^>Beddv(sAW>8)P9Rt&$H@zk6)%%L$(E7Y8( zZU9|U=VQpFfpA*M{d@$YwMNjaa84foyIZ+43iO}p1krVd?$N3bDBD9c`wHPGeDQml z8cH{mEA*bI(|@Cjr~C#KKAFCH${q5x$I>3hUD#K1!qa@?=?L_sQRtwlM}9FGDELSc|HMe;ME$w4 zZo35gv4+)?{NJ&WUgzt=duf2Uh?KoguCLRA@)rNln&F?O|NWa=mGH`6aT&%osViY+`>viC#+&Dn)Um%zzHu?@`kwZuBfP3N908N8tT9utp}d$nuY}OPf+9J zig!QxVS2qj)bMf@L2o+a80ka0ha9WT}xAUx-%%@;_a6QaVZO zO1>9F`$%^2-NuJ8njSCcuCoIMG=iq-{eqjTf``j#`psgU)+);d`qN?TFd8aPvU(?) zm7mF`4_stMe5q+^FVt)tA;96vA51%hUzFD21Yt+j-;(k`gFqyD?a6cANV5_Y+cQ^I z#=QgvC8}FG2)rNgy!%aQ$>-%>JM*4i5dxjOzP=;ccpl*o<97TGj;U~ei|dLi0E&fr z(BSta7WTFAM{R%0-g2OVZ$U(2H@MJ3bbEW9DO!BTgLI_{YA-!9+SIV*XVp)n0{dsR zB7Gh@zUo>4aWa_w!E@?E9fRw@Gym>EjZkl0fn{YOI`U{Hs>Kf@5IsP2I`!+L5E_;# zqR&x81ZrWJh=PJbj7@D>*_Zo^v_7i44Y#@5aUF%xdi^h11i`_8{*N!KVvR$I`S^1Z44@Hx}P+nTugI{6K z3?Lnet|z`VRhW_~{@qN=eLSAp+z`i_VZr1LKBjpWDlmhlqiEcR#ql&8f_>qp#a z{BMAi*U%SS4VN+EKCdPq`$xhTh>pi?LT&D|zG&C6qzNi+hs@z4 z&aI~1-&j1lywqjyz4wLOnI2DTNF(!dR=&f}8HrD8(O6kEvbOw>h673~1Pjsv6q)H+ z-sZ2*YGu9$d;2cw&oS?ZJ+n>hpHJW`lZU8ZB?@m+9%xTP(>aBJja55m;-JHRJDq4X zACr+zD0J_?O6<1C^KgTRUyCDWXODTv3ymU+;znb>(=2Amyv1or%yYD%j&pn`zhyZ! z`u^V6#gZVero-+t1Km?4k8`_`@;ny7j0?Ndp`6YIr^l5qw@#LhPgvI3b?a$<^k5i+ zvzN)woGS)`2-+-ym;eIDffab`fl(ixh6`<3*VPI;df2R>PU{mFcPsJ%6&FIwXMr;} z4G$mZ7TdpSn1yh9)V2Jqw-Eqx8NcJGUo`ceKtIKWw!bkn-E;M5*?P%2Ty4F?Y}G?w z=23+j^=|figE=Xums^imoE;37exSkFw|j#&et-iU8wt?#UOmv)Ijrcj@5B4HYIiWU zh8CeB4?7bgfF0FEpk~+C0^O2$Iyw?SS#glcW%8|T090HA2&7Ex2|>QNS~gxYD302F z0&aM+ZnO_Uqmx7L+}wt_-Rw}(${|4GT3q&F5s~s=Bgf(NOa&G*(>%KJ=6!s#vM>Wk ztCE(8UVB?XTuO4D(`keSGC{vA^F6%rut#JC5d94BL}mjxtQ?Ag96fR8H4VA=dXS=s z&A_cra#^049LQOn`4g1|Ah;RMVpl&ENiR->u*m0;qCN6@qv8_pE}fRYSj)bP1EDAS z<$7TX7OtuW=m2Sxc3_;!%%~ycN~RpFU^O!tjdpKsAq%q(AzshkR$|GZGrO?#AiJ!yoE}mPHkwF+X4yz`anq#APi zR8~xA&$hR~xjv=+j#al+s?|9hhYxnfRYzF-NZ^l;XacQktzH5xRtaijk^vGi{aCi% zkp;b(=YISM6Fkt#lY#BFL$WaZ9P{A1L=Ys0DmS<0%0tUjTNjK&(FAF zu@6AR3$FXuIVUJJM*N;WJ=Z;|cW8~@Q=#H;4MXK@a=4)bbGd#5ND2w z8uYphXz{1)?H4)r?Q@y~`U?p%^Q)b9f=_=AVuzhN)sc~1f@@4e{As?>Z zZkzcD^6=G5?`zTGGc^*9?YJV4-OFiAQ`ZG(gOB@F#NKI%8U$a$0s>dhy*JED7OJgB ziBK*Kce^*I^6bs(e<7ET*zSzQNiYqRW-Bm?#}xM^Wkj#ou@ShbzfvQ$8{;w6e0rB{ z)ilt8C*e`V9c~MX(FwKvVl9&mmgui;K--V+*m9y5b}IRT`i(2~mtdwZ2nTj{ zlDFhXlD6al=S04p5g5p>ns0B$hIC9Vd&b|as@@3D(F3y|9|>2IC?1U`PpWx8!HvYn zfoe6*r>tFqC^kv0zaI>PR!x2RK4;cn!5-}dIZ`XqWhz+Q zqqld(LSxd+GFUxN%-e#KQR|;Y?1%FfRk<*lP8WZAY&ay^&Lp_zlCYuYam+cVK2-3g zyW_4*Q;~lD%FN`#^I05#y_I4rj94B5rr%4J0)eUx7?_-=(%jtLPyS@Wfv+4`37b&=9Vd>P64d<_}=oeTP?|kt<%I&_J?e#?o`^t#3 zKcM9|SyH`hpJn1oHM;|8uJ$bXKM_do+ew2|Jf(9w@23UID#x^1pAa^41}<4w1P|0Z zD76DB=O|-Qp*5iX5@7TX00GTI<#YD2{J5`TgfAW+uIuX7!Cei=R=Pp9=KRw(Yt}aa zgr2g~CSdvgiCtnjKm~nj8a*6;7_xwn;iA$NMjcEbv|$qQSpLa|k$j&T2nMSqTX@8x zEIO!}fc#h(2oayOL;j6@?$G!?KE~Z2h>5-q#1*xsiF8T2!78?}sdG zR-3k1qh$a0IQ~CGeXw2D-Wh)9Fm~ zc7}0M{!!u|YZIM~sp|hzFZ&PO$?%t7)gK5{XuRt(n-{Gen3I4#?i!Y^J^Z~H^p~%% z(4WD23o1E_#Ux6we`^Hbr`UP{+VgEE^=>2o-KOycsJCF9m7lTdxoxNe$!ZaF zgGI7~qN@fTc%b zZ>y&b=2x@u^5Q)-?i2%12B~?HO}Zm_)K)s)>)$CC6>T>hO<@9ffvUkGw{3{O!mZkS zhNz9q&3;BDT}Ze8^^v6hKtaHpZC(&Lj}4F?Lc)(!B2KVR0l~#q4=TtMyF=0sV(T*Y z16xthaYQNAlMF6Uw>=vfv6_XzWjhuAQ!{E$EvGIoOBZc+E4hvs7^s@JpUW6=q`=o* zencIRvnbZ`pS2VXjAruTO$fZV*`{%ujN%%+dd@O{#L@Y_L1ce@p~gh2@Hl=^aZ!C0 z53i2P2Hx|^WBZ0ps2URycmKHS&w_n4QICWZ@mL z@nJ~-uuAG$z*+YRqGdt$Oj#8bq#3YM#{w(Bf6UG^FDyXxNrh4;S@FD)D%Y~9+ZQ3E z6+i^HaFSczGHOHmd1yz*1Pljwk^PrS*|@RZJh?PJ*=Pwe`eFab?Bi_>b$n(G!-l>8E%-RdqpqjA#P{ImHj|I?!hfavp zD-r1%+kHdS7m69nu`f!Jl5ms$6plXw5WI;bEHE7a!(~&pAU+slP7%3-{pmfK_xk_z z`ZhH}yCuq(WWJLVnL8`oAAAtS5)^1L1;pDp z)h1Yg$XQY@?F&tTzq&EUyrLvR~zVMh~@q?SmU(kf||w6rc4;a|4(tAr~gMnwP7oit_U6NqhC!# z`D5~mf2wU~y`1lszrObKb5?i1EyR9w`wGUgE6FY5)4%I!R8|fIhADa{kh=fr_g{)t3r0k) zf%h?P?;xx-9We0Ph44;i_WU0=EYSY041B8QFrI`z?CVmSdSXyCv8ngzxL-rot;j(r z233Mpy`{nHfVOiqXDxtI-LrYRlp&$%UN;l(eRxDb4in%xbFhFbm@8Grz+*DKf$`gh zqR&;9cLwj@9$r^_=@5Rbm*RAi+*pu`oip;UTxC(KzUnnj_^GAn^d^%+-$*Ks3J`?- zZp~|RPx`0=T0qFPzkCvNmMy=$u){~F<%RR<(^nc3G6Plc+FG^Xk{j7YJ-5b{ybH~? zym$9Mbcl-e2wY8E@LYBh2<{{c=wBR^r(`(eFudPs>QTl2H5sqo={9>+t2V1FJ&piE zI4=s{RX}_*#+;r{NE?fqqVfZUGFf2uLj8r0pyFC`6KMQJZ`V9A^QO{h-*v@gFdD+h z>ZzA=diOa-JPffy2jMAH-UIr0DmE|51;7Epp#7GtV<@K{$H{KwhCiG;8l^kCOFDt= zm2pykm4ttLu`t;P^lK-dCRVxtLedU>${&K ze%Qk@%=bVZ;HorC7)v6``ww0BO`&T!4uQ7ZV2=>f{8tOWQ#K8fO6Vneihl4poYu_n zY+ko3O*mK&OOo-~L6~GMHVu6f(bh9m*3SD96#7vQ|KXWQTz~LWqWch#@yp=OP9GAswAaFLU zfV`O4n(Qoj+HLPT@U+U7r*GgZfGI%2Po2iM)yh2TDK~++Hreh;Hi`9clewwG?^JF@ z3R8g2kk#o)y}@wuV+!85f2`=mIPC06dL!Rd^CGK!T6cYOws4_pOGwhwJ)jZqLJ`8d zT{!+15kv7YL#IgZn($`nAuxG)4Eyt=$H3N9W^!N*wx62MXpkezFR*q!VA!H}>U9Z9Y{OQvkT=zk~G_=G%hPmdUgE4c_G-{tOBmJs za&#i9kYzSL0oE{opLNn(hY>1~onD5-(id~h1EvTtaj!(39^OVsfPPNfIBxAI73)k7~KSXGpZkBN<7 z;gU0)T_AUfc7>RB*r&QY+HP!bAtvX%d80KDJPjYcFjRPLlw0c#XVG*7#dWOJw{5}3 zS6b%}i~Hn)3x&bN8qh(GKJJQ8#C?coDiyVTpe$W#iDLYQZ9Nl{KOrXM6aI~-;Y`|+ z71p*SkmDOv%~4|9FTSr|)5Na($69QZK771!Zn&5pNnbEn^ZjWKQm`)zVf9M1Gh@{1 zoDeG~2yP@__s24v(vFO)=4`4ftBsOPj~Y0D4Y=Hfsl9cJ7HU#AQln%;S`sUGdPi?o zVj0GXWOaiRk=2AoRMQ+ONx&M<(oZ*-Gh5bqXNc)NOtK7UmQ{;HdaXC2^zsDL`YQt- zAx30F90NDMD}vQ=nDHBZm$p(C^K%ezVx>5>lY`u+Z?27+Sx1O$2Y}Lb`_*+yNybOF#u&K;;S?QR^_WHqf_Y$a zCQ;mpU_<&bbn}UhCj3wZX6i4KfEhN;;7G}*>o4aXNl6eD>zl2giCUElMt0+GIW?x| zj-JdLWs!$Hn`;y`le8+3n|BVC`DG?d8Ag-=W;f=7o7cr-C+CC%M$rKIDMrW!zXB zC|5%h@P7EbR6=xVzB%Hg(b0fKJbT-6e}+5{NC~1MSPcnO?a@zYmLizzK_Q~T8;n!Z zQ;R~$Pt#Y{X)njw{)5CUVgpp>3!=wY^VtYnzHaN41`0UV;yDBYG!1u8UHDqYn?<(%i{b{$0(F;OUi!An#NZjvsJ3e6m!Po0C)Hq! zOL60Jaz-jQBdW^(f? z=+pFQ#7(DY_3;o?>Bt61D_IYGakgXJKxZN@q7d`Rt$gGi1#+97x2AsLJZ9ut;TZ*G zcN91>?Lv)vw-u&5eOXL#t?5E&q==0{`qE)>MF&U|YmD)^urQeNfoH^Bx5sRXWS`j2 z_T6yaFpOt}+sfGj4|Jj}$d^5{YaoP@9dw0Tfc4xfS(i52+PxU+l76Ym9?*I=J+V zY%ahuu20>y&wnn(I%huY*gaM=GF6@)WW|}g%Q5toAvbPg8&>f)ufO^HXj?Y#G;X}u zZfb4STP`1&t8MDJnPVuIY2lQ}7~VY47SOtb;A1bz%gYIXHrxrTsGn2eON0J4QH9J; zAS=|%a;JV%X7`m^giN3Fo9B%+b&|$}6(YifFwTdt5e0d8&S~_xlKLPM^zl{?v=0Zy)5&M5ess(I?gsW>EGO@)jsoD23w1-K97bDHL~#rnpPd;!s?QySrO(hXN&q z;1-?;+)oBk z%Ep=&+tw~ZNsg1>apO7my*dc6{TrPliC+6_kA`B`yMm#vS--Q1d$4y1NLCObq{PYF z5*TLT+fq(QHICLqM3JW5n;zVe8k81+M0GJi?qnzz8xzdF?T6M(`~{^%br+(` zwu3anddbuMgPpolUJ-1@I0zB{%JKSr5wTvrHC}j7qgwR*fjhWuXY;qHGM+ zRNt(@(oPOCKru|&2%iqg+Boy;I5jfN>r7YhQZ6l!iHL^mqN0P=TXv!%YIS?+gs?tX zl-Q;pjeOnZnOV9swt(^Ob6|N;W94q|j~{F;zD(p7L9{>nXTrZ+c;s7zWK?0JU0r=U z9#P@%Y~~e~E8mCpZt_q%$p<=b%zZa6;~Odyga%bfEN*A&X&X|x{cv%_-bpYYU3VNE zin4()atKU0D!HVr=rE0IZv7>UyX`FM&|)`m+j>hfKUdcb+0@U-uNwUGI9TlMHH_J@ zQ2H@Bs2qnxaDN5e@AI8(*fWnNQw-h?d$2?ZFB}*Pq@&1t3*a96J>8j>^@EDRiDPoY zm6I7$dU;x>OOshRlqw}6b=9L{E~jtnB8do{Xk!MIAjzun1ydGU0LAm~F21Bw-fh-@ zp_18S!|Q4${Q>mgRhEi=sUQ(s{@$u~u!gIcCG7}L1<}_v>+z#9StIUR^8n0ZK z3R5bbBAYIGU%#T8n5cdAgtwX>cJijFD}Zd#o@}@$2j9hnE9|zkLhP z_m!Dquf7sG7?Kpxlp*BffJJ7E>u=uy-sRTdqZQ!^;iU#@>vmMTP&$Y#FX_)*I+%U@ z*+UZ835FK>6VYLMtj8T2HnALtEhAkA&(iwYv>)M*-mSs7T9nsEF$Y)Ucimb{z z7OewhtM(GsZzJ)DN?uV8Wne!X_c-8ALTGG1QEbvmbnsc z42XKC&6oX7^q7OHwMq-({QjdwEU@$qVp@67K4DB7?GXLRuSm`Ar#xO<5^Xt>3y_8@Tc_2#6=d-KZp>g*D{k1fWN3IG&b-T z80~&o5aueatQ0zR+uk18$^{nDF;0)?wh{2ZLQixrTGYT znZfJ#{}kN|a*rcI6xUd9_u~5gkx7M)*B>FUsY&>wSG;AiD;ORb|7KD#QZIh54jP~w zSyk17^-)ovV)DqONuVumr&d}jJNZQE=cp)-S>ea1hFDk*?e1yV=3&z(HZo)Eq+e1~)7m4pTp~APNux`HPdHXX0@c#N zI7SA|%TWo1GsQ9Bp6vX7$v-tVb&T$COWk%8eK>^JAK0wFH4$VLNybuoy0h9S#Kxot z6xCnm9gG1Pllaix`0p>Z2BvYj$BBfyZUJ(S$k0je?9cq#_Ju9#aWMrQ$Jl9+#ZbjZ zx8GrlV}7`CM6E#{iYnMgXsB%Tzi-cS&=PU2mUj~(*er5)^g$G_*~SF%<~BhCnH)Er zoFs(BS$085G+AZ%R)+{R8A^3zD&HKu;D!zTJ-AJRON>|JOotEzEs;LbOO`^kP}^ylgQs?l$w^WM5mzFmu=Y2-Z#cjD!hnt^$Ikr^&L7mqew7-8G8&Xyv$ZL67ZhA*IbLBmAvN zdvudt4Y2psuhNY#8n0Ud5mKO0Lr8S*Z(2OAm`D0dHqUo_5Ty~mT8WmafA zc>(s(LDuZ_9Xm{~Gu}1)_Msgogf^Ly{H1&>n4*7RkIYt@JmXS0{n9{DJ_;X~PeWgWsnlu^HG#fAUA>O|-%YMk27y;o#4U zSx&y`g}u5!r)${HA<(B3f>Q9{6?SXM?#|vfIp~{!$Vv~P^UrS^k;E!K3iW5PvOf9Vo*PEAsUge7-o>O@3j zGV|tKCWBp=7aPn)bC;PKLKSyrJTs%S%6D5Up%3O1^XMpFWA|{u5`T}ry-_die@M_r zPR=@*%aHUiz#l>l>15knnAH@AL=;5LZ$vPMid9F6F5 zl#S(HMn-swYgG?#-M7s0-{w4=CAPr^f^sCaprH}Qj)Y>&4%D0&Y_qn#caOu1a1A)M zNi_%d5rToJ#3gcwd7L(=vAW_4so0$PYTwCa@eqQmwf&uLR7nPeY!07`!Hz_d@Ck6!-VX^2eA;2RufP5iZJdUrD#a1#=60Gm6!?!7N%*<-$E{J) zfZkO7h1e_n_52aUrsHXCuW!TOxpC)CCK`5WTG;?K@RZZJt!$8Btq^Lpa}4o&HGUm= z3#$FAS+sg}m75~pr^2h|vkxN6xH|oxJJ`=3E_|+6HaDs^%MAJ!@%rXTyJyQ>E#j<; zLdx;E@!_zq+K)(K@;`q3C}d&!)<=Mo>G^nSU)|S6p7oPTsgSY@>z8O>?sZ7(;TF(c z#0PZibZ|~dDFA^N2lKoP$TTn?AEy|q7K%7~g`;BkaUB*j`e~Tj-&mQvUc4@V=z+ly zV(98Ij2wAQnTICrRZ8Z*XKO>w7e>YATcIm*^%{24@7Z-}>FV(>EsaIh8EWzIx=oj@ zcV)?APMsj&ACeYT%s+ci=094UTfYXLaV;v2L1$_JpVQXtkMxK-&g~RtvL2-EO!mPW z=e6BOekNl4;}*nu3^qmqeJZI7AU^DA$);Qvh^jN!b4hSjm;+goL<6%+0*Lo%eKJj{ z8|n5v!SxpA_(zxIn6z{#Ej^p7tVOS z^I0n(*jvtdTn+3xv$_@Jy4bE9`4-J+QoOBQh-HE?RH3zmlm}OG);w4H{1aju?@LQ~ zQyh)iL+*1T2KkynSZ%JY310Gf*6_Ag6qfrw#JN_8iv)*8C%eutSPsVGx4F)wl0{qU z(B6eE*v{|ZQYs$IJFiXs&yD{h)4nzKW>k-rihLfSF~G zo+Ug5@F_vo9BC9EIsgy&cYt^2l&afrs=PtUPTKaD6z$At ze6YTr6SndpKG_6z6{)L;cD5G+4T=*Eo0C>vcx_*;3w(i(s{my7R7!Gkf@^^~f%h29 z70p8*or0BvefVw50dLfXsUHdZ1%FMu44^a~mkIW%7(-h-!P%h7u)wv&T`2gIs- zMxsYI@uMeF`X%eOxU%UBAR{t|;Xz?p9TgjY?6!{((5?erncUK>Ewdo*tcy8S4NF>C zuZva4hosgwn>r{-(S)N}(4Rl##@rTsHfIGmR-g%G(cZ#hy6Sci3WAxZfy#tJ(n8;t z#n9v`1+xK910+{QI2Y!c=aEj(lv1d%#nVy*Doq@>uoYCXN?-W{ZC*sW@2~NGN~-*} zF>^eyEA)tYKV1bZ5cnlNiga}M73R;VcXtQ;V?TXQ_C#iPy~#F~eM|vQPL6AXqu40Z zD5$xs)H`1#P3I5+KBA1hC+YU!S^J_dc86Hnx@q@53X48guqRoT1$PY#JNOB2)K;;I z1UBh3NU9BBug0AQa#uAO!k>XVMes*-`pc=%pFd)NX=~NY*G-FuFF3c5cJ`!W_$#MG z*o5bhxT_~S&sS6#(7=@+tC@cy^?oH$>%303T3D#LZZtYGVgRPNS((a7g;uN(^q-7l zgm4S^i8Oz0ng?-KBm96&-PcXqKDvBi(KYyRe*Aip0lYLUL%L9FR;i`{#3#mI*_u-2 zu#e{_E1nq0brs*7>~yLkzi3U@3bXpAoQ*$FZ~$aj-5Ez_CqhXROdMH4j(RA43mvW# zDGET6mDJJEoot$1_5Pu=p)$j3;)3zrx>E<~I>{uIsNSLuBA}M(W6D!PM%KeY4~7u7Uca3&a)gBeP1zH*G{Z$cVfxmGC76+J#Rm+)GD0l`+IAbA{}HnQcmpeb zD!wzGQqj?o`I2myKC$&6E^UP{WZgZ|iwUdw<$~L|lh;XGI&4;&FD^_%)gj{K1o2i8 z%6I;j?=E^2qTPW4Dt#5iT-J?_O`oY!w(^Adp1RBdc1nyO6n(sSrbFS#7+P(woIecr zU=~GR;8DV`-dQ+@?`!}H*T`{!q=xMQ0^?>$XmV7)%54y?yGsd*w^`dmDGJIIzbD!G zZzt$R0x)Y!Bkxv8c$Qi+e@T@gSLyA9>&XY~oP?{LWBAkdJ_zlgrx%vQGq8;FoJDX{ z6tYN1001A?`E*(VPzLwQW5~l8+*Aav@)*&HWAye~Ny;Ij_#_wJ%bLV$uH^eTA3;PZ zX`updMft^x)Kac?K9DO5hx%6+P&ou8#6N*?y znjy+6hm$!!Vl~^rZKzjHbw0`&6GS0iMgQur;wloes#IX4;Y#Bt_wZGgD=Cm^DhDH@ zMEVkxYAAL~Vx^X-urg%)s-Tml02@dGBu#xC>=b=F-l8O~p>20z>5;8A3?BvX=8ou@ z2iphB(v&N}?G$q48G#PS8{e0QGnAZoYf!-Fo=4AKP3Qa=J|pXjxe?y4?VC#1sANLX zN7un^e#G*(n$=C-FS{NJ1+bqtIT^-#j;D4@D_)#IS$+<8IT>FWC*l()8c(j7Dx$pCJ#T~c_rH}cyXNEH~WCeZ-eNIvwCEuLMIG2`!< zNu_^YQGEWw4;$j1M0QXSlu08Oe;FKaRnl2WxC#ub7 zFXEH#2w7=mZOpCevs72)@G9*V6kP+nK@}^_)MqEP<+XOO_shdPk$j2!&?%+>D|beCK{G=`HFn-BIb#0rzq! zs{FoMq-I0wC<8XC7^CnI)B8%dr3-DwP4=l%mV1Z87x&8U7Z;dU>)Q*!-ejm<_vTuy zz4~hP(QG)x4S~LG*$CBmRlV}*g-`ibal&dFZE|8*>}wIBZ{b1J--ntRCsk*Wv*TTUnCKtr=m%%U2(kYn`nstE3>Pm8 zprFrv1rk`a`NgJ|0nZyLcpdWJN1pak#Ye9n9S<0fT;FIqInBAM+BV>&jSD01YmfPM zzE5WM8iR_Jf7?oXYprIz;-#SSnG)1PHC>7O2R?*a%m`SM0?8#G$>_3M|Khta_c~Rh z8@nhZ&pVj(?R(OhS8tXdF+)kEsOD6{Yttem-xZMxC#n%#3)*a-f}f1-N>{in+?3V_+{aIJ?d0i}fx%%MdKRPPx?_d3}YGd-h%E9fNPrKIw z)7w2Eq8<3;kLNn=9ZzR`4<4dhLkO((mEAYlH}=;K6{yAAUM~o|PP1W3dSRL`U8aBT z*7WX{1o$F5BG04&#+Fiaf|2hbhl8$0fpN|4SdUfe=bG?#TPKrWV7c!Yi9^6x#R6Vq z)NK05Q=#p?9-SZR1@urcTjgiMzFWMxRKm=~OD+dn)XGQh;KCePuc$cIq`ezZVoOjo ztRB~Rby~&p%CRo=f2!*{#^N8D1Z#dC8XTbw>WymsSPaYr-dxikG@|Pq{%j^hGqmDpvQLZa_^j ziQ~5B>pu~cddL=;ia2b448Au`C@eS1_?QQ#qKHJ*bJ+B7Lh17 z4yF*Oo>~65Rsk*{m9m8|>2$iBTpfOpzyToFv^Kq74b?hwu)4}G%|p2T5Vtu?5@y6T z_0U97KNzvbN^Vs3fQl}`_?i~No>v*vB^;I1SrXS|<3_H$f^mIWcH^-L130EOqKxN| zX!Y#}Oc6*m%TyI#{)BH&Bnd)tMNaw%YN6~>`%km!+=$1$#&i=!a>unD&r2P_+Wl7X z&v$hHrfifEYX684;-8Dyk8cPB$-Dn*JdP5jY9kgY%m-^~BY_ov^NcML-PbYBAkZ=Ef` zMiPY^lFTF5tpBd4G{Tqjr#$dm*E_iYu7B4d7x_BySM!O{pN5k4|<|X{G*{ShtYwMl%#v>5; zrrcd#L+9;T%OOw#0XcLUhgqZ(7s57=L5k^c$m^3Pl%e>yVN}8D zblN3QBSHN}bbx*Q7{VLLHi8&NC*L(=FFp)u%P`eo1mwKi3)3w6TNf3REr{wMh`*Mc zW#?yi=vRG=*pk;+G;Mui*p&Rq3IDC>TgGo-=&tVr#mO@9HySV8?R9REGa$j*`7FGD zrhvvHC)P5ZPI+V6Me0*x6SNskxrX+1vh8x`@oh--XQ7aEzyM`jc?rUB144!7CAL`Y zZ@I3~qbQ@LS8~n= zF<3j-#J$!AyLwVZpXlQ3A*2S|TAp8*zgH7bcZq*;p3Wsaob6z|Dq+r>+c?B7YDNCX z0y|f=9<22gaPjgcHCE){0F9-y#9&iz9d0pLTQ`Kf*3RS{9&NjRE_U6J0o<`_c4u~Z zvT{TJv)QjTCqAS6e~&s` z!T(iNTWA`A|EGK(mOc2NkTOp@jP!rdO7r`*`T-^x2H1jPx=%pxWD})#v>1O693M;H~#vzyKrSzgkV4%z!M9J%$XvuSPI| zPAd@}0$=X|y0M!*@O9Hw|HRA_Q1J1uCOq#KO?dho+W*IAI*r9rNdx42l))ofq7#qV z^0cu+PA^6~J$0HCmZ}0AybJO6yHwM3^8#4;NL`nJ`#Ci}ebF8v=(&gqzGkR-4X?n0 zYtj#na7#?DiV(OMe3rH@@LxBM3m&&40aA2#0-p{u)vlY|6j!U+G9!+ln~F8bBJxsR z)5ij+(8|K*!X2&~8ohaABhm#-JHS2wMpC5m#U9SKMrHi%F9BrbU^ZMEjNy1_O-A|quUiG)>cnDSUfoi7wi?E| zoJ5x_(ES0jG9Nqs+{GHShWo0OH}aJPAy05mV=vIBDzEgpYGK)n&nT~4k>1#f*qtw% zWO&PYwgEr|EiQNTvHPU4y#*EgceWdB2F`zMx}Rk?f(pc$&E3( z(T}57qv=EkTb}~ezu`nlZgxc()!QZu@`@kPSTG%`&om(U``I;xLQ$^sP^B;P|01g| zhs#ZbV{Q}9DKoo?3fC$s{9-6o2y^=TS08G_-Vm7U7z~bCh?; zQAnScbmJKV@);g7C_`OyS=D;Y|Lcdw)cRt9sP+pHxW`#JVMB@jlS!4&cXrCqe)E&T z=K9ItXYPAZdSzBcKG{e}AbmCu!WX_MWZY0p!SPzn!;PBU+)opSn+^^6xiSV?4E#6| zbbhn1a#&FO9JyQky2TLbwE@y@`S`z(0il=_xEDEthC?g#Z>H5ZF{wC7CTCt)U{gtqim|iLW=vW<(fEb{sbP4Y zpJhBNUl}n(*l&8$Le=*_14R-)aq4nEd{uJ`5?$+#)#A83C6q}9`ZYkkagE8n1IOc! zSMNq?PeMeyL_zW&?@wF^@(X12Qv7^xa3#~Gc4*&|05-5l)*D}|Px&py(tBAD5S{kE*9(xXtSV4KzuZ_E{G4DZ+o*0YcF z0*?)RX*%zMl#CAoqS;|vZ4OV5^p7X=ZR-$|zUW&kY}q~;T7V;p7UN51jxnSg!)Tb- zmr{x#wCe>06!g7BWj~$7A0cP6e29K%d9v0FHg0OxnjT5*b{7UEM3>b2Tw3DKssrkF~Nj0#Gp((J9c=bRN$TvPgz!N6K zC>;+8DYFus2k=?Z(5#`;pE*Gd7<;5d5UAEX|w2S@gM?0kHZqjZwcH>G3*JQnNq2@`Yt7o zvx%FE_2-8iy9tPcN%+PWL|;Or^^GKYmlY-JJ&?Y*<*!ip45|nOnuX{eS=6_UcR{_5 zDmX`d>Y0Iv#U4f&3$i=@ljHPVC5MvV5-I57ohYFoOIO%(Q}wJPQt-H4 z-Q7Q_lisx)IulA>iGn{mR-vHQtwd{h~K6$he{Dla>dR$_uc}R|U zndDoAtM6j*v8_`ryg$f=AKe1`1N%o6d`&o{(dZqXe9T3JBe>3q2b;-%0syYxv$H9e zPnEh&h$(@&QJYX;kpIOKsVMNqbW)KGa7|vQWw`W%mwUw`Jx-CWjv_*GyH?pAfCX(m z$TSFGHGEs}HL4!Inp{q>!+UAedTk`^ZVa(@?Bs_(yx-x(aKR%}T&>A>QU0Vx7?-?L z#+tW@)4`Md4Rc@9Nr6S1x4BWR0_-y?IyV#B;K|^cv>BAYb3H&O5-B^}ck$E3QIxPe({s?VRz2PA+H?X6|c4EONGZ_!VJ&vpA zEZ>ZWP?5y*25KyKbq!GdQ+AbCXQL|6Swyu^CfrJgP~^K?bXr?E-%DY*4d7?2(BENi ze0B)JvjB&@U&SLU@SU|Bgqn7;W8t6xH!Y}9eQQ?|tL_gWAKZDomF1re@LT#pdzi*1 z9gmKRK@Fg0e}yVxg}YT*eqtnTo5XxzcaGl61DR=ERVGh}aV2pS4#l?fs}d?T6)`2( zHN_-Z`--O_UK@fl@G2aiP*ZDOS7i4-Y`KLAeD}V_~uVl#CT72qfYm;K07%S0FaAT;m|z(`J%f8jSUF zSa2d1#Y6lTaAnC`9b6EFIenQ*P>I{;^de&yg^_u2tkB=inH>=I;?kW4d`6p))UC{+c%g*1b$FzFCd{ z8ixWY)qzYecN~azEda~YYBu}Q9L7B*&Ui-Oi&03XPAJedlvJED?VnfAcg3moEzvNB z2UWf9?4O$0B9!MCSs8@Ia*`vP5=jPNUz0B941psWG#o=a=649bbto~6=##l>DoAJu z*Ma=K;tRr|sOhue5eP0a6mWR?dOnZ8V8J%Um?@D`R@PsvZH$7unZ+|r1$(~XJi$Ym z97|ap4ULT3EwgfW>GesQ+Gf)m0&{IUlV`~L<;S<901N%se)Zf<#rCwVdZn%uAG`jE ztQ9~lYWe`mG%Op7Sz;+uI5Z(~HEanajeT*`^~NAjP|rbgVZ5lqrTLP;!Kca5=Ey0( z(*rHTfaWSnQD&)giuR4cK}myg169t(N*KOpewEXZfWBbHJSUyb?FNcof_vWVYGeN3 zYOL0@D|`Y0L?WgD0N3{<9j<&(u?0Hjk==%-*ErI9d>h886)k_+()-hC_-tm(0U(=} z3>EVMK0Bk{BAX3wAn~W=hvIvouvLn`%@#MykaoR=3T!M_LQNlwGS3Xv&BZ&V#ZDL{d~2d8&?S=m()>bow>kXZ+AB%Py4eC} zuO)9cDg%!j8pud>){&i%=e0I-bvcA;nn)a zNhsrc4iwxw6Eh7~yU62vGKXp_iR_=su^w5IW5`}PrXNcD*Ey5F;Fr*M8skC zixi$>+*(bomqHuvDBQSc*k93qJS)RBL?@ri9u*K8mVfy6Ql7g)<}&1ce$a0;ZxI@o9W4=A6a6f z%>o?&0Gv=53-HGQ1T2hc>I87E^XjNWaqFdeL;I%Q;JSVr+YrrSu$vfWB8@`1u9Dt=QmeWh#{I z#@^`{GFF5|>s#layAFg~6U4Oy= zg7dVc1f*_S4t`q2h6l@Yi3Jy1Z)MR8@4T0e%Z?NV<_QEKVFgn{n5`Sd6}1wGooXS0 zv+!4eZL|~YfsTgfK^p$J1MRk}alW@JNWZRnzUsZUv}C$dT5HHt!>L zRQRt)Jx{zZq@nd9grQCJ-JarJ=ccFfdxZ zb9rxc;eg4ANvan1jf9>h7!&YHGDpzU@pP4)q;Sw-;MoTOl(9;A0oi;jT5SST5Rcx?A2>ucB@%v_5bOjLo|S_@Vx@`s1Dk%oH1jysy@T^eYk zxPZ5^5&;p3tAfi>LyNk4Br^& zkAmRMT_vD{uc9)z>`n5*z~-`~OFq%KuBu7rn?4)~?TL7H%-W>Qkf~Eg~GbN)FypzkxaKbXwVQVt``&##BVsTC-)-_ggV z%hp>QU+pFtU?p9bF8_adG;)wc)YYte-;|m%bbgnI>ZbtKC0+`*tZjvg>P?@(`b%@yMOtVerCn?GCt{ui zC?j`Q4)jBCF*O8Q2CSlUV}G)qKS<-SR_i}ob@>xa#_(Kv&P zj_9z|#->}8Mf*)#??N_Rl2$Ee2p8OB^?nOxHyXS9)>$#(Z$l~+C$r}Y+HE|y26g_q6W8znD-Cfec3wZkzRNP#a;F~)5WVD z6Kh&6@>huE^^?oiKfw0)ktJkuMNXC=dC_}CU(#?F6Ys^;X<3Q6M|y+FB! z)A5ibx_vStjTusz#@T8m0Ud1A>S+R5>Pe1hvbtzHe(qOwiJl}V=Re^xN*>RCu*SNUDZ*cA6U2*nlyY|e*#r&x@kPbeP zc6~H$r)q>ktc~)ZA8It(rGG>7zbTSxLZem}(+wv3joBMr;HBmwhn-0tw)EAxm;(#i zcWU$FQErgNv1mFL&&yXVP4U68K)2LT%1* zm@QS_%)*{<#Iwk{AExuEs0-WNdz3fhDbjTK&>;4c$Ad1^|Bc6RjK##FsU%2>& zWk2SX845?FO5Ou3OXu`Rf?BaSg=w_6#)gMYypI&BE%Rq%@IU1!Q3a!_UKA*%f01f$ z9Gq!QyffTd=TZC_!)T92;+l&ki{BlaWQ;~~>1Q~?UuSx2JnWU3Dm*OUdG{j6TCLPm zuF4kteeMAY^J0+BnJ(4PN5*+Ov$8+5EEjntw@_vlTFKt!WV%@V>X<`>4-@0?6n zyic`ppC=yKsCUh?h2!CV!~0k*wUf?-EnVB^rs@5DahJJ?M`giGsV&!}rz_k9vyZ5l zYtMXLWO6jqM0xF=<{foK{9$I4cgr)?r*UZ0Nmb;yCG4Btm5*tAv`JhmR^{I0?8xK_ zlhoGL_o3}ULmT6sQ-WlHpbI#X@}A)mOr9I5KK0h}64luIwK3oDhJb2K--r}s;-1}L z)%Ci)>Bm)FjpbeKBUysP2Trf3feX)p7TY6rf-kLbY#VS=R?7O<_+0GUoe$JR9}|12 zm|cv%6y$c3cEVn((X0nB)EAd0Nj$DCsQ;E%dnVZ@a=bVKdu4zBrv-Q^vB$1BMGU9n zQKW%*=@IWj2(EN!Xbq+N4M$+F!erF&R$u-$93B2kFb)l^Wz&%d6+%+=U>X05w$&?M zCrVO}M7)Y6Q@JC^14+ycpxE21AfDy8sb?x^58GHZjJ}t0Y*_m7*b&SY^@b;A=KNi~ zE5bXCyb9bCTWm^|`GB}W&)TH%iYg9HLara$B>QD%3C!eTMjD_+%Rd};%D?x!@6+CE zF%3M>QqGXp23s#4b8LZ=)4i&orKTRouT#3{BGn8IZLR3Gqp?F& zYNrYenCE}awQ3c>Nx1i&0Kc$Bd&lI_yP<3Ctm5|6Y4cZ=)_!44K4(se^xe`JbxxLz zHa(2(sM75)hi)EPaQuac?&q*?6r$M*Ih9d;b?^~;yt&i^hqC24Wx@6(via=w@g7)F zmDt|++M?$zq01yTlFSeeD9P*Frug>U@0O*`g-76grn(>GUJI$#{hrX9p8XuU3Ch!o zki%1)MhW!xGP%oQwTqZ!tX^K^`}_n)D zeFd%RGMTvGS@XGQ{3S29QWsTQ68}R92laWsJXx7u5pI<5x|uEweFB3bULCB`dK4u6 z!FHei>AyOjSN5C*<+cIK1R`In_pZ}-I1BRqI$`wPe1{>N@b#3iXoEw1L&)wox{?xv zHmb~9VE%9t0zc4vi?hKVpkml^v>aD>`0nmO!R@I*0I2Eg5-U>LG41&YP7?TEs&mWd zK~TyyKclYuM1usN;i0b>Vs~HzyjS%RJn#JLOio?JbU^Ed2=t5 z&>_~pl%p_?=S<~j9t7TbYS%3>_1|suoyQUUvAonidT(U6ersd=IOj;2{d(A>JvxEe zPlAtN2AZqky>(__`IU|ScgtY^fu2t03NGKD4|OM5sUD)kRYO$11Rx?QBlqlS`^3}7 zSjhJ2S6_Ty(`8MUih0%kQ4aIfvjOJr=kd?v92z&jzW52x`-e24P$*#n&~_SnTRs}l zxkv^9fy^`7Ud;?=aJ&bjabR4RhOe{Pju}kg1ukC)-~gAJCGazuj+%z8w5}~(k9ANW zWBZQVbgpa!ILY@7BT|LvPHB7+f4x zc9|r>gYkF>R<{b1rG%MHc^U;jQDO--THH1Ad^nMeHPPxpQZyI_?ZN=tTlPhp*}ug> z-K;v4oHdq5A7rz@JPCtXFIj-npI{n|>OFfruE+@2cp*<9ilUosyqORT5HJ10Ln;mw!n{l?vvEYd3VCK(Y&6U+3{+QtMH#f=Pain6P#Dsy zN&OgYUNIgTZHUcxMjgg#TcogY%lEC;3u)1>mTA-MYc%q*S1io_)x1&{Lsn{dn#EpK zyE(ngNn_HyO+;17smGKW4V;~69$E8NXSHSqdj^pQh0iILKV?wKRUExfjHyA^z*6!c zemiPT9M3uE!qZvLY}y*Vao?GmE8^r+UEfOZxmftrPYZ5xe0q1 zz~LC^nU$~U*ln;~icQ#kj6FSTE~CF1?P+1REl<3?A6if?8s%v5CKH4X|KHi7v_?2d ze=f%P5EE}nild+lM<|}wFNH{G-#^Q2@`cx9ic9ev7jQlcv`EmI)ic-#=EbyOff}|) z?#m}1TKVZ1dO_azj_!78d1s@Z2r90|#>VqcO?FE!{Un~#?;{+rSK;lnv}`F}!RzDa zT#2^Q)k984&K@}9=kcR8!jx$qB+r!`&Q;36sBoAGx?`5&-VJ7Xm%RFon?>Qn0tICn z@83ax!B*Cg;Vc=B0{P#6_=k7kB7@5V{!NUJ^9TIDCFWxb;l^;U4Hw`C&qdDsJhZhn`3U~1&hYpeR=8^&6j|C8Kq_c@G zSBz_%>3S>pZC1*jk$kXLbZ_c5i1aYRXhgJ&(^85tl)z}REAsi~2v16*B)K{M?$B44 z@(=5WXQt1YtAq4iH~^^Jg+q-Ct)DyM%(PFDX0*cICb3;nnZy-WRsAwnyDI-1c*)9Rb6h8Ser`ShHokl7ctjw%Bw+^pQKW(%9WN$&KDwhi@%Ehr!fwMlOKNTLbPae0 zdYd!hKMi*em6+{=q3#Yy@I*uIN*2AKv6LuPIA<6L#Hvzd0qEbhzxOWGZgW)`3b_?tx8Geco1RWHR(h48x`x z2a zwsBjKe<8-1Do-m-u9H>aX~7Nu82=32Sd`3`4VFd>f!P?e@dG9>vD#mqLN_O;CYGr3Lxdb&Qg`VM}!Xh|umGAkw&K5V3wB(Oz9%PZ0smWSk2Rl;nn_nIvA(XY# zec0=$rSG#zL-X=80t#e*gVaMfN?hWHP6HKPvKhNNZHpVx8(-cqxMZKX&Z;AZraA{dEP8dQcre&=B9PHB}c0#2Kl0O1` z;jr=SZud=0&@+BSqvo!TgU*LXC^$0v_mXv6>2E~MaejDK=)pAy`pSPJJLSg4wpF#W ziVA;YxH`dxo&htza$abDI9vh(4wfAV`z(a;?q3BU<=+ZGxrmuMktVF%`)9oYTtMKM znuS!3h;{i+d4YKB9!a-iy4PC)lPFm0suk4?P0KW*(Nw~VZ=Aw2UGm67>!-;@M zA6ueD=9EY(q$*$Pl=_c{F-Azk@7#zQrSyPZtNU*pV;A=e+7NaF_z8M%%Qv1zt2;@U zD34dsy!?9YBY!s7{;se3wLV?^l>^`4D_Q%r>`iqRy0lJI^3T&bj^@U&aZ@vhB>5k9 z)onC@U3Dkk^;P^J|C_yoV7Z#LACsYg6yJ-RP!btboG-K$BLS9o?!>XszB4Dn87%{o zxSfUBvtya*fW5>?KjoFRn}(6e3cKZx8$4g2#u7!QN}16R<*VfF)fTPm6I8fmt>vYn zZ4$k_ZR5M#b~{_P2L;5ZrTV=rhs(@K%nz1@OB4%vckM zcD*Vow?ZZX)`vq3z`9E5%dcdl5w@u4q@;d}yBha{ z>Z-2ls{P-;T^o#euC9>!0x-j2FfN_*hv27TUKDKyhZ2Q&{jL;ZoooCi6RFk>OLw>1 zT6+8=^1e$3n4OzGNMu_m42VhmjVd?I`T^xN=u>$1}v-7|2j@7g__2avYiA_0v?>s}$T^-Hux zSV{o0c7a5pfjPB>b0?))+@CDGaEAAA*wkGz}QxIplPs`Yec=pi;-oCcUWj<(63u z-{`0}#e2qPHPnaC_^A9B$=N|UK)n+}U zDYa`DNJOUJjtksyPP?(UMUv*dVOmHPHEdr+4qef}37*Vk-KVXjY+d04%bgh*jLNh@ zWZ9xomZZU(UIBgip{<)Xr&kqNEIK@Q9@a0j&AU{B}LL-hiYyA|AW&|OOmf;gfucaO~988 z+Bga!0FuSv26a!jZ3HjFcH6qYDLWez8dKMZ-h;n&B++etq|sy0>%YHB#6Ycp@a*;6 zZT{pee(|G^`uqBak$bj|jAXqz=C;|-GJpj#q1erbuM&6ad&36^QzW4#wC7D`x73QQ zB}Dm2MJ0E=k%W`*$JR~3`29u)N&kA;^r|+`UjmwgA{a~9(@(HS{vj@Ca^}pfJp4?f z;nNupM%K7zY-(9vgtFj|oUfTbAKtD=lD(tl>4blR{xTY(WhjU%r0fA1#X#so5Kr;j zWYZ|l2%gOsB|7ZJ99@NLzdiZ@%VU@b1ZdW=I(p}mRm9!&b@y2D-yX?5hTsJxaNpr( z3kPC;qT}<2OYl%YvAq#{85jw9$}H?7s=iv#ar-pO|1q*gRm`C%Ap@-!?~|)h`aiB~ z%Cqk0mhkjQ+dk~DZr`{QvfhxMH+pH3jVcaku@kRaa8{$rT9=M^w<2Yh%(#Bx z>0xW(R>z~rw`8%`J&Kgs?{7#ck*&upRCO3Xe||UXVPir=fvOOZsFFwZWNB{V561kd zQZe(42{{nBUBQ)GlV5BakMw=_Jk`4)k>yjaKAG=}pavi|GWKqL)*}-x@vlNdeRJG3 zQ%QN-cl@s=n#xmT|B>dtk|ibz!OsTcWYkc+5IxxQW^*p}pR_enQajdS~e*nf0 zlU{${%mDnd?g;XBFT@m}Ft_*zM>Iqf=l!bd?W-3aP1m$!$PeJ;&OM~O=*z-dL4}ZC zhgIMh@5VnmhlpLtNG&C+f)K}?wggs2w9K6}LJ}K3{T1Av87iRe$$SDaQHLwY>3(4f zK~!8O;O=8)&h`&Z$kjM^`)v^s161H$@;^|%bGG*t3tNb-A)%C4)8__;NFF_C|B}z4 zZ(2z>|6^+r3oyLepV8bK2tnb$1MvS-Jx{Trr;xT=M1zlUG( zpW=xPYLR3EtFYn19Y0`!8doAn#YI(%e<+<`&HeYlicB4Zz_ZlV2F|hvJb({YDb_%>{{?!;StG8>%63N=4L70w^Fx^Ihg;bJeB~`Bg!*`?gz~q( zYKh7R4?@^LMn{3Xd;}JvU|$75K+Pu>o>n-qiFm>%-OApgUq%cWRM$|^Px>YSmFX&3 z-B^Un-(+PkW-Tvq%2IF!o@+il5J$IrG7XfguKsQX1*H=fz_kxE5c>!-p6(g~vP#b~ zR!A5L05L!5-O^YEO_LL@MH761RnVe}qtX z?^kbmmrsj%p@fK20x_dKo?jwla_=W!+b^mw(Z!_)$|ysC%mHq%eIs97d@G zF$C;+qV)$LBi%yNVe{uG?4ZmZsT%~P9ziXQ^n*;@k@m?~qXaFB+5GMZ7u40m^7peE zm`G^IXk~j{(Q^3df5Kl-ALre8I%DpwI*G!%pW9?wn_~S>^rF%7v8;wP%IoW6)Fggn zkf_a@Cmgtt8WoReQ(hsQ=mhjRbVK1#G{1_tNvJ;ksrs;D;wjg`YMB2|NOwy}2 zHnkr%K@!Q4f4OTL#H*|wzRVe9ENXIpgQ)tZV?y9`5eKKx$i&}}kNGwoOFZJ^H=aco z)`^lO7__)3*rXD4h$xTOdi4xQuLcs)m|h4cL^B|Q;}AfZQ)~kZ!o~fora{DCStk>> zeL56rJv1uikS!07l<@qyiT-q1gqM5M{MH!P?1w)A8{FFm2408d8UomX>__ZAm;PW+ z)b*3CgzGTI?9PW_{m(TE)gCK&EvBlBUd!~azD~-bSbj`C#}D}K`6iP~iOZw%&a3-; z%0&r~E$hAfE&cB>iLzacDD3;`7Yy4ppTfF|4~a@;;g+cm-NFf;&YT4hX&?Fx(~Y0S zXV0f1VrW5`Mx7cyW-E?OqLOvLCP_PjC=U}ty31z)65bVkKV4OwA8-S(amUX33#lMK zgUV%dW_<^em1vS*NwV}^nJqc_an&0LqNF;P9ozKwn?7h5X%HkK65y>mDQ*W+!7WtSmGbElB-2 zl?S_>|HbbB#L#@X$-ZBo60;C%nK@Q|JA@csc=VN&EQ`^Iz%7jOd-AmB6rHspk7{#c z-()-Pnf(rS+na1xy)2tER1y588AJh)&4`USL!AADHqX^;$FRt7xBG?yF`CEXh0TN{ z0t;hY)I%a+`!gfp2_cG4*FEDy100Cg*rF@@HYHrv0ChFgS=ebj!8(g6AOJrig1svS z4z+e&Djeh23w-qs0lp&1#H{aLVcGzT$O)0E4v|HG*?x!Z?eA_7Hw!@OtUNtb0Jz_g zWpwIj58L}fL_Z_SSYsT%^S?YcUeoV*iR9h?zlk;c_2HO7k@uYqkF!u^qj}-Fskow8jwtlkfPi3fyB#} zhMOoc`AV!9$PM|z~YTz(_+40)u5EQt|GOR=7Opp9=V^|6N1SJNM)4gT?L`096orPAM9Uf zqK{gmhX`yo?r&ozwLT6~j<@m5J|y>$iFpj_EtzRXDuG#dD31X}$E%;A&u@>+J6F3n zu8?;qw@`>5SjAh(9M%hzFZ@gOA6PHMl6(h7J5xnCuErg_K=MloNgK=7i<@$ohnO?s+~ZU&mBdz3KH!e{Fy!AvvzDerqPY9rhx=3ExvfW8+CsxZ9RT8Z>a=!8aU7*;oRMs*U#yTxoj+Jf)DQhd@&8uDT@d zwb~$$acs6g@ci6QY010OUfb+Vozi%tPqA;#=?>DYG?7x_n<9_kW1Bv)_E~tk8hgs@ zuAf-4DlZ|W*pxQjL1QoV;Un?LDqq^G$C*Ch+{H3JAM}-HBo7l-yHO93`ZFKSy6u~( z$LbmHJ<8TU6Rn@9kkb0WXn1^^o@<=36D*}mwPSz*$S!vGV6|K(Fod^2_A20B)Vm(S z&cM6D9@a!AE{UB9h7{ty?6m?gb=B0iEvQm0AO^)!ihvkj>7w$3k!^8<~ zN&5{{!7CTA9*?^z@rws=NYs8Mkx1Y#ihw&ngTUj`tdKJ$yfz~p#5?PAdS*QP5c96u zhtX`=OqxyU?ilI$D~b_uw`}LUlFe?6rW37emJ7|6XCI%-(9b1(Gty$CtlYk1*aq97 zFAr6sEBYe8T3tR25vZ$5^)`IC#SS^83t48ncW1t_7x3G~_Oq|5O*>@eb9R%tT6((* zGg!#7w(W!{>XVEo6bD+phXDqkr&IAS9X$9L#gFb??A?@!3|c9Od1=eE6R%=M5$%6j zzqGl$xui^g>w2}adUfnyb_Hx79QeEa0l7j*$_Dyszx|int-Q|_s>n6 zq56vyzYED}@MEpqG2Q)Ime=+1j!nAI1J_PVAA8PoYzmeh?2()lfT2UPQ8Lw0Wf#W> z@DLxk3U{vC=k#L}w@!sP;U90()X<}3Z>@p@ONvwDdQh6 zKAzw7LyfPib?DU{U_LZ}Q`IrR*gREUKiNl~gswT%rn58(qVJb(uLUMkV)n*tl3Ivf z53E-np=jjlxzxHEF{nDK^g)o;8G7o3bM^LlorwiP`j$nq)zXoUP0lsP1Co5Q7mBgY zR+Wi5ULMZ$X;ZWfj1;9u4e;md=>b=i5Gi`O5!zXkaXfPTw+eY!`#lNT)WM36AuPr^ znm5p_kOQ#hC*#PK6(4+&Cy3#ooPoi65A>ZhIgH35xxzYaTSg&cUa@>>^`|{HZg@JY z`7<{DMvVxyvNbNcf$x@Dy&JvBMz7b-K4#5Y(RGO6&7pE0BpuxLNog^J`57*kJ$y-C zfz|;o0ERWo>pT3;R^f0e!oSwx-s%4S{^Fxi%hYXnLyx9kb5lvnzows*)a%JLTw@wH z+a}RAqQ;OLyAzIozj~s+t|-N3z5%#z=WEf({kxtGHEpAgCcX$DZ7B>$^n8^BcXJ|n-4=Q?LSV*?&QV% zXSt#R;xEv3Efl)dJ>M@GH!&fSc9>_HcKse(`=^CMLk(ESna{4~7Fm zpHZ^Jjb^Ai1R;-r|89mPO5| zkxJL0Ma|KH9EQDKC{jY6v5UmmYG<5wD4^7}j5)-rx-Ac*#Sssag`4ej?~i{A_jrvc z+w(zdH-Mt(>qhm<9OmJn=!=iU$@49nnjzbr zT(so+z>t5^oYT-u1Pf`eGjBRIt&3y`qZ*EpiZ8VV_+kWsR&1@T^L6>tI0v3Nt-UU= zd8Nf;*Mevmx*dI8i*UOA!k9Rbx9C6sA)az93zKWPH*uoLv8TLliGrTRxdIyiOVESO zdy^3mY|t{Fl!wPV{*rVDdy9f;Pguk~w$=`84eTH=`WSvXHH z_dQ)~w|e``H08SPaE^Qyb%^CPnVBnQya#6UVv>=(69{G9?Tx-^uq_~A4ifts(Rk*_ zn=K8aUjciwZ!7^S0?dYze1<)&{sL8bmhXKznF~LAW^vnUX$298@8npJC7dC$nl!=%eJVB@XisiyCojEFfObavp2+ZAD&Adz`HWRsTWgh$NLd06_u(m zPsu!g8!A$wm=vpT;QzZZHoE5qJ**)etA_jRZi!zSm)nzYPM_>5AbDZ7nCoRyM>i;* zb*bw0!nq|rc{}m@u)(du$i-W^oZv$9Qu{o;#BZD0ezB$Yk%~H=XJ+ak?(W+|Qo-Q5 zDBmn8HcLeJdxjnNf*K3Tb|8lR9QOQvasDBoyF2X7ifaB!A|?~&C(o<{iRB`h8sW!f zzI=ANhQu|L59*vbJ|FssdwFC`&#kp3pcB1c~be~#0`;h3?JzFIVUQsEN8CH;EZ29?QGfO&Vq zihs9n$4`)nIdj@RiqhJ~N8mccM>R3kfTpAxUL6{nD`VD3$m>^*kqVwaayHdRr1kWV z;X2TPra>H+R9?$-9P6)H7;NF%70SjTj!=CEF!Tk=Sb+3JOzpCDe5iqkPMtSWIqo?}hA;RQx#`+; zR}}OulQ@jVULLbLAOtEMTU~FOAKMdHP$rF#t5bqyru07!(`c|2ly6yttd&K+ocfxY zIL5?Dc>tF8#FVU41()hvZT(n(hhKo&M%cGi<)%pIUY_cpe`3vVT8uxdkx77MnWB>r zk0EoV2Me!ZQp_drSXSEW!{?FvJ`Md83jR5gWv+=ka`9*&CtMVOcTn+EU4g;Xagpog zVR2q++l+T>+V}6+5)#KWjs?#yvTxQ+EaI1D2-~~o?bY9eo^^;7sfPY`o{$KrvtwGg z7Btc9p*#5Yf`)p>|Jvx%F;8-DF64U_QkRT6pFq&}@7EZ6ahvx`j6t){01n|Ps!I1< zFSpBR?#wdSCk@J=+kCleI+|^*F=;=?LPYTfmv7+GXmV0)vd{*6x2q{Fk^U!f{Djp4 zV~<2W<&&uuoYB=MnJ-oNyk*i9Gt5_vN+faRmBXo?Qqd(!BPH-%2pstHg$+tW&X_<3 z6~s>m#=2A4T+WZQJ+Kw_pK#9rK$Zj14<0O9Kc!x=nh#wNF``qZeMb{S_XB5^G8I#0 zpjo>OH1xeTv8OdnPN!T=4??Wh4*@Csgfr=7%j29{dV@=72}J5sa(kY;hn@-L9Og^v z`(;nSj=Ck}NuN?#CZ6e!qj*BayB^7EJr&KOs1zGdi3%nq<$*%6a5flsU0Endj3m+`0!Y(KaS8Y&;c{8D5%TtR(;eiL`C znIK#+i#5w+&Fu90&qSID+v%g;`$91#<qa$~!S!LQ&ZlEqxDRfzG7(YGQL$=wkdk-?SbF6fdATFT3 z!zU~DnQP{9aILqzo>5mC!!(m*WuHVDL2FY)V>esPsr_CN3TO{{05{6!+JUC=NsT>? z_HFd*tl%H;jRu0W@1c7^-=1Bt9J(8G-wCVaJ{fU0OzEqL4-iYw$PEz5b!ekNy)DC! zP1J#KNLRvSB;+|ZvKE(tMwwwV zQf{zUKNBkT5&3$KXieRoynl)#{1$njz;PBm>A*jOSyOB2>XN%z?%nRd$;L0$RX@kWh?W!|91R|{*p zEGG^$j&jccH_8J-_q<-93$kr?g&`wDXSo?ubds18%R~1Q(ut2tE z@E$T$e6~dW5qj=@3vYN<5vA8#!Vi91 z+c7>ogsHuS?-LM%H80?|06tu7`hGf`nCI8=J+|`D3Zl+kPd|rGZNEW=9n_bFHE=#j>?> zci$iffslZE5m08kKBc)u{&Ex8Oy9WRWYeyJ;;+%1(tRK`r# zFP6Xh_P{Rv0~nf1P$QL>?;hA zID>8_I0mMLdb<166*a^LFno6$?}_Bx?|B3Y9F~(Rc4fKMY&`MTo-qU72b@PU*1ACv zg_|>pGO;bURYtv3w47#6m70s{{c&a)(Xr7MU5y2bJc79mUvF`q(W1{C*sj+OCHJgY z=ZDtm4v_7mxfG!9sCxed>lpr6cozBD9dpP2O~TaG7LP7Bko;LxeZ<8_N`z1VL|c1W zb7a(mk|}xTXOXCpv;{~xfE&6xs}hL_W7v=;!@a>Tr3kgFSuhOL1zWEet4NG@ zIvxt1No^r5Nrd{JTTa~L$F-FbLMw`;*stm$1YZ6|iVagDUl*OFdU>(==8JFl)R5X{ z0KW3@6!sUnPBPS)_-te&M3m0&-pf*}=^po8&OjXTtk=?^9hPDnBwp%u6=^BPB1#e+ zR8(I!=e%QY{7HpN32~7*=wdfhu)m;Dd(oAZ3(0uK!-Vb@2+@Se(>x(fV97Mo>cO88Iw^_tC5KlqRM zV0fY|WIuHD`!1474!%Gqargb6Bu091ifz}RKR10LTjuY=Lj7(KhQ8;Dzk${NyaTe_ zAID{1cGik_f!L0iuq-5omttd$ri}8E+H%xD$v;wTdrX0f-JiKNNhtgK$)dEI4>rf= zS?UPAy&{GFEnF0sie{AB;+N=(` z4EjDBdz$vue7c7iV2}Mw@1}Zpie`J6`}1j5K7QLrWzQu)Gd^E0dm+D9Pp&r}KkTP- z@P1x(>Rjta%1#t6UiOGfF&<%HphF{83>OU7`uu#zz@m#*T~9AV!t%EnVcs}R^Nvsh zYtc#xD&u3;==$Tbe4iTXG_Q%!LJcek)A?3!&a1~(nvIfN=xlmxk>J#lzvwrA6%A2( zZV@)LNU02ryNA-L;{XMSl!gz>ksd3#{8>;ZAJgYpqjg2m)hzBKI!-axTXudiHJ@8p zWwo7^Ko{hax`@kw=eNAFRdg54Wyz`rKYl6|9`>_bnjkNP1HgspkdP+M%!m}J-` zZgyP$L0_p8qWe2NOc%0k)(jMBz5(^mld4EiR@zlN{S3mS>Ws2!dxp0eFkZanVeYMS zu?96Dfwpm(9Hhl(^ER5}?2YUr)b@SJbN6(4cV$aos_cY0{kKAv1*)vre)Px8Uux^4 zbkzAh4RPOny~&$@tug~NM#n(sU6yR$a7An&vMR5V^^)4N&naicD8jODoKu`XvQJ+f zDHL$d%Cb^PRJ{_*&L{{NUO6wgg_f~cs%4&d&g=<6^xEUZbQ-P1iHh{G*W$Cy!u%PB z=v!S_PBV|1d<~z!Rs1@%-1fqKkP~4?p9Q9R@%ld)h&|z8s_nAQKUyo;87IIoCigBf zKhYNqtU%pq4eB~wypER1n`FL~Bq?Zm+AW!ZHyRSe#Tmd7t%L4Dv?oEDEl2|=7e=Z0y1H~h? zCdIbrr+xgntLpSyghY6;!{SX5tWu|*gfMRI-B z=Cq)=WUZ;AqhVm0|bfr0cQjh?IR}GDNB~{A71?O3MNmzI)5dKCetceFkKUQt%tk~+vDQY3yw?p*c5kW>h^9svOq#{A>rAy71$l}Z)IC$vN zrA8(?o~-q^2moA!-|{a(&pch;X>?`4#>vJgXH4!`@; zdy~Mzgkg<(P3N~3g%J3Dp!PnKa^4m~qhr1d@ksSeU|WIN17&I4^ky}uLv~|9L~_)* z{kTRNp`NPtz7AV?*GN*a*XQner?^A8G>Ep6IR0&)?y2+JO7P)jF>RFKb`JLqE_4%D z{j7F`@h5@QVSMHJuAFd8e+-qcCN6;Sq&w~;kS#*yW?D06gq|g}u61o1D2pwc`-!5F zPqeNM^kYc1mQOUzAM3B-bLhvcgW1y12Q$c$kOMIy1Dn4lwECZH5=>u+TJ=fc!eGg( z%UUe*FV}K9oE9d#67v@MvSt@fsG%?yiiP{|^3eKi8ii615< zUY@JJxZfZ%lZ~QUJIkEnvsaRRiZkFQPt5scHsl5ewh3l=jf~wJl-G&|fU23vh|I4R zuots$ah5J9=JiHW-CsYO?RM9_ig@u_hK44p#*y{QZ}YJ8;xP(&C6>eaONPZeo;eNG z8oL>v9OsVgc3QlS0-PhYy!NDlGAH{~QvNTyx3Y7d`wCyyv$m%Pa^C{|M1IrCe$UX6 zbc2^L9H)Td3k~AeRDsCH`h7XGRJd6_QIWAwU5^E{9GS)bFUmT&zb%?S;)ij|x*cw{ zhQUGkVZF*f#)q-vvIo2TCM5P9X+tv~DQHhF4Q_hj5DmIhA$Mm|$@Oq^DT7`t2(#pu z*<^PtJZDnX!^mt6KA!kC)B!#L6;QT}LSubiB-#6Q$}gd=s-8*Kf)UepH3?7WvCwEI#FImoe;YC=OoXNB$oJ29DnH;b($Hjv(Tay542*_nDICU# z4AGvRSzM3#icv#PgpbLLX(p;u32TnNUSmq+)!P*dwj~n<=m+h6Hse|NyD#ow{$E`? zHi_v@knSy%0eQ6lH|h2o7ewsr9>0;0U@UUdkE3fgZ}$H zPveiu{=fU8C8r$)JNDYU?LPh*^WVLG=ije$1%79XIovV1vPH^TGEz>Ky{_FIe{`zk_)pc}#QklGa0&c(u>R~# z{!iNWP)K%M2ghbhCA_WaQgs8-b`s{ZaOImrKju1oi}TpfwbIp1o|`>0kkTTI6Yg&Q z*B=n8Qr@HF6qqEGgL+A|p04AvJwnpreX{&RIlGj~_Z=2!t`vP|JiAaanGG3=p4lE< zGiO3q_mXqLJ+$3tEl`oi`0A&2@B3wH+lT%#q#f+;(iE` z_cN4c1s~rt?2KpSR#c2RuiN)t?dW+y_?y=~427lM{8y>uI85%TvyF>(U0)FU9B-Ea zU@I|Tq3zmMH7V;v5nd=tT7GSpfdYP2xfYX}A26R#GYHJI_xb4_`m<+=fjk}3B2AC% zwdW0e=lXOX^4U^GL~$6b`>B>d@jn)TYlwK=27I|($Ze%Tr&i#>awp%{g%y~LWfI@+WD?2V&aF%T&UE35JL|tUIw3^PaMpKsks;_GTx&hX zH#2z9QEp1-TDEOAe7Kb`2i6ae4(u1wAI>XR_?gw&AW{3Fryc22W=7~WAG7D_i9aE zc9wsAL9qoMDZP-pi=AR&_oSBEC=7E4$HZjCXI@-GR|>ED#J<0np|S2RiCZK&nQtP3`V}!Q=3Pzv{21KqFu6L?T+aVy^)xd)OD~flnZD-|c%5!Al@J3F8x}aO1W; zhBH)AiI(VD2MsC__5%72KyN$A$~4t~+vE(t??bNC{rqdr7NMWDFoO6iT}I3KkT2@E z4haHU{&QH_e#7J*fB>xfbe2;4agEC>uk6CY6;OKtaq0z^>%IiqNTZ|;I)ERMp+i@V zj^caxBe`MOv_84GJkJ2GUIbo)${ArluMi9O%D%Gx6cQyp!&9A@(^6ksbPc4pHa~d24$1D z!d>J%`Hq_ICnH}Mp-w%#A$*#@u3~A)p7=BlUz}75HX!znnN{Mw;cIxLl_lo8pfX$% z*Kz=@!XT%Iw4(<*zk*)|I~JV*7Coiwv*B%HSFKuN3;zsz4;k`;^@t0X>r0ZcOhMF6 z9b^AKu=mx^V=|9oDA_Lf+xQuQ!v4L;VS!JMTSi2|mKvWhe$lC&#w+giuo|lxM}N$P ziTD(oaE}`^qcxJ3b9qg^3$lMcx8q*7S##a8LO6|Z%|GR)Mqa+LR>jl(o(1lek7Yqt znjA@aa)}Fl$Iu*~!6}`_*@`8Y@RVDIa`OBQu+u)h%#6#b7jJ8yhb!Lkz<&ohK6=2a z_N?65KLQSe@&nVmqe$PHIB>)GAmTvZoez}=UlanT@m6Z+e(%m#*aDuDR zif1II`OxvpEK%H%@w7*t@IQ?;2ytjFPY4fxX=cudO@Xs!1Omh-s;XvCos9{?=gOy-idmnnjSB?siwJiRDHs}(i;H;# zQ?$3ZKN46sZ_QIt+(U`TUhehr4uDqkA9tshsf{NbQe$-)8>!wiuJ>8hR$GnQ1>Aud z(-I$;7Mo%v?5Wv7w;fkAh>r16ZjJkJl2+tJigzjTcGH(#!>Zd<_$x8DAfPvDt(7l_ z%+`b&7a5YFUL3(^TghkF@TH+QMfCKJX!bIsFQ{~ChINCvku}jjy=|t17NSB~zEYKy z_N1)0v(yPsl3CA&S_fuHsbP$Zoxd1~In5SvBbV(WDkJyCAb^R}=5(h1BK5}D+5Sd6 zZAR)_6@-`K?Q@bFY)vVM%xOLNMqtN!CCh(NF~!x7ZA|QD9R1AJ*GBSL%r_aI`TGo= zzeBcp3bAMV7orQJAB#?mJRVdL*>FlEv0LcM?Gm_D!0V8B1$KOs0$5=|YIp!}cLW}3 zA+fMmI#pIJC>v&4`V1z?+nc)l78=Wj|zOut`2D-R4v{uQA4_J=h4`+#<2FC=n1L=L>AFP)_v zrc-mZF6%6Y6-SuwPMi$yuER83`o1kMRbNzF%5Zt-9;hK^4v1|QYcti&cq$6u6l@Hq zG1=0ImuFA@4XMgoeVYFHt*o7ot%>UO*X*@#(`8FIy>Zi5%n2_cdvX};ztKxe zONs*nlOjty1L%ShTv0j2-Gaic@41Nu>Y$2ppfOP?*VD^VdBCPeDc*a;OHF{eC1YCe z)7!h(mDJy_n11&o!9Ke}d`hZhZs_Q565K1=sgjyXsu6$nIlDS}E0t4}5KCQlPL?m} z{#|9U)j0cYkSeuoV1nEaeBkE2XgBBSG_=hZV}OgV#oR27JoLoO3%$3aYBh)A$QNm{fvCo4uPWRGTlFC0Q{ zviP$apLmKqFND_%-OR&05iQrf9!`@BEs+8*`V z#HBVD)715xWMs5u!-5q^ULmlryxg&o;QVUCx4|UmfqRye(sABLiL%j4|L@R?RWuGM zzQv7IO!nS_?5gDgyvJKv6?{n~bjdDAV!mBF^PKTmF7uXU0um~o$-1gKolMfCWRF&w z>TbWcwpy{_?z2uY)k(6r3W1v_S5TR4zc-)!Wuz&xAxz!I+>r1YH7rH%x5<|~L{BV z*82Z%>mZ+bUKk0-HV}AyXnTVF&vo+1BVY5Dv`aP>A>f->l}8zbiSxHCMhs8iB<$af&4wK=6P=W-I4Aw2PLPBL_^n_s5J6(5X4?8}HRVN-lVVZkFEya1S%9jWy}4 zB3sz%{GvN1QY$2IJchK+nX(Yff3|)P^9zV={I@imJsTl|_0p!2VA%%B?G_`QDwj)7 zKj=L3-H&@k_<~nc;!q__7t(Y3*!0mtF^;azvI{EZ0NFv;~lD*TTwA~AD7ow z!e?iMvqH89COdra@{R8+F!z^|LH5+2 z3(m^RZAMsEHqk4`&_IkNSpIpX-+f|i(EZ%3M8yd}bO8(U!SlSXb+m^Q!asKdl$O4f z*B$;P7kYAI?63exd*Lejk^CUK&}^vamKtjbVLqZ`o&p-*FSc7e6K@wJ-jCL3aljeJ zr&x9WLX!6I2D}rMdo#vJ@_725fgvE?n=f$8zeDB6n&1vq@t9O4xy48%53NhaR6Fx*ImXZS_ng|J6QhzP6YBiU;ExVfI~Qtx>$m6r(g zwQ$<+%D*e%!L8NBJD~ta#qGOB>+_pHg0rpE_>3Kq0kOyL>t6vDx*el4fg&r*rr`z+ zLQ;K@Hnqy8N7#vm_1$Jf?cy&e-tz6~vx*afu4$d9t*DY1InFEkJ`d#!;bmLPrJctw zk3`^;MfAqA?c{2uwqYNS)o+M__v}Zyny2R@_NogWY+2-MLOOH;r3vuyi6n&3b@LX} z%Ryl5tVK4pZFl@LEFSkKI+jK7l zjqeUG8G=_PP!60Ye9pmuMSgR#uNCKwBm7gM4a#I4g7Mk?Bxjgu=h-Q&k9}hu@Cc}( zt8#lPNay`Zb&p9;$8E=k@(uIf7mS;rZUYSUu;L@ch7@cZs8C694F77OMCM zJ-@!$X9rk7O|?%t;)=c-SKY+hJ~SS2KyKP{EallvWaT;A@v_Y}viSr8lU%*UN%Nh7 zcA&_iBmT2we!eO?*AEv$$K+DZkdE=Jf*Jei(Fi_bE%mYNWc4()_s`!MgXS8~uC|RG zw-XCTFRLuiXx$lRR3BLFX^fITz5P92d{>hS3lH^p68%7zqZDf-j$qKz8V%h*8|$dE z2K0MUmPrkOMvO7l^rIex^?ott3AA6>Y1tra9NYqbwhZpLX;mj2t#WzMegTGQGIgY5>|OzWumU+f7h9c1k5c7UZd8jcF~-?0NmY91lJRT-0fNXq1T@9#gU`N z=Ncb(Nn~u4oG6uIoLG0EM-ZQ*Ra*i7jEU)IepCEu&RE2KO}aiOjU7KPZ*7Z&M`hC= ziR?9|%~^k!qkip&e-EK-4OuJnK7DWb$>9wl!g7dA`kgH0z7~ckt&O)(bJqz>C)7b1NE{UitH5r0m|o377*PsAL<`(2fNQKf=oE@15HQ>o9RvU6QHXdD%JZse2lG7$^jb7zw&7(9YXVD;9b3T& zD0-@D#$&a!hD>`Nj1V?{&HN6#-E|3L|AaruyQxYc1>bzH8&Bi&@9d&KHL#bt@TR;q zHC-754p|{XIUzV3bA4Rk1^?hExd`mMJyG5X!wSAQJx3*0!D6sz>qP0fx#-P2i+o&u z1#i7oaB`>%FQs4JfOz-*nfCf0cTQGO!MA(5fe5H&-}RtpQmbtQuF$~)T} zex-g)@@vAse%dv(?%V4u|4k|q7}VA23VTQOsrbRlvTOaXCi{%)8}p$7#-^L4YfL#L zG{t-qdj*MK6S!VcYK0GA#R$9yHujx+IOqj)2)k-S=gKr_P`ai&iZ!e&9flf zXn#00`=Oj?WAQ^r0o(3BVi+$XuOMCKrXPAlMxwe{BBWx!2qEyK5?V`Z)2VT@2)LM} z;*7_+F}o{gv7fdzdNaDUmyU9wllFhGqy}UW09D!waYwb<6mnA&vA)X z)9na9P=>TR_LKluniTac(!SOse9XsunB!-y>t|x@+$_o>SjkEJ%Wt-)>+NUT?F)f( zL-CB&gif1S9f47S3CYm9Kv+!Rsw(MB;F76PjZiO{@n?gT+N-sjP+)jt15^(O+5~I{ z06)?ELKff5KN5en2!NzFL3Ls-KESrR{tt0)85PI(g=-QafdC2a65QQ`B@iIEySux4 zAh^3rfMCJh-Q8UpZyXwjMyB}v<=#7Yt(m*#%Y5n8QhloGoT_v7e)hX}bvJ_(p2z!TDouRUmRAvnj*m5j*Q)af$M7e5>71|$z=KF;dYBL zd?&lL&^2G+S7+hK(8Ft+Ng;$%%A3_$8X`owPXn9jO6)Cd=$fJ`A3 zbkhs|J@8>y6AV#t+k|DMJ?8GY?}3#ZzVMtr^ctCF89b{G14iB~x|u&=;$2h*R@+Z# zdcf=<=L-~3fOoPsAx!O!jVN01WoPNA;FGzQ!9*RwdesZUF5oc8neKCeJeuriqa zkT^RL{a&7Z*t4A!iod7Dp%t+|tVzo;JR;?g5|KLI4jJPOTcrS#FGci+ib~|VM1|cJ zX*KOw@#}m2(7 zWGW17+E4&0gK=gxDzUTN&{!p?heKgXRH>a$x&e0#n02%(D;PTLMmn2v(yIWWmKi3h zJ@IJj00;ETO6$^vt_V3odRA%O2yC4mFkN4P5<(Er}cZ!wyE*Bdwpn+;D=BGo60iuS-_>6L4}?W{s8k zFA%F8uP!8gj%cNSS}gw*V>A#rbncAmf7{)xl-xMxg*XYTWcjhCom7+w%d!7JGjo%5 z>vs)?hB4XP;Lb_5xACFhI|r?|<_5#zCW8~a;PV}zx-duRFa*&?6=f_IEU`LkoIfC0 zx5y_}j(M}%7jEN88p1;VQxi<&T;?AS#n!bprUulB_x9!4F^zq9f2*Q-%VWb!$DHDB zJW%;Zw+F`ay7-Q!X!mQ$C%qWzQxL=)M*EdA1H7u*H%Rm7QWb;UELu0npEA)va1|rA z+NR5Z)Ct_lJFZD{ovsOP(q|dacN0_SCs@~|?`bk-qm8E?$OG}<9P!;U$7>tDkL*B;5G6V!G11PA9AW+BX+OY`IYfVW7~aMwp^19^Xivt!o+vtuqPCH4utY- z-=fBrI}m|@_Qu3H%Sz^OVJOBQm*{bm`@R-w>ZA|VT83Z3Npp{7)DW*f`Y>ouKWH9{g z+H?i}Fy3f}_tBG)PJy926aj%j23c6+IkqzA{c>vXfP%kvvUr0HjE27Nq<32S2l=pu zttzc>*Pb9s6!!X@<;6tU)W!@b`q1?%4tg6-oINKlI^BPKswq4Z#;1>+AK~zW@SuH3 z_B%s9ykR*N*}eCyiDiz->AwXEmE9{_gVh40bjMul#}*wp5BbJgE}aXtC0^(Ym3n!Z&`g2Te$N>MK( zh9_@xQe}K1vm`yplE{Q;aHbnypiZgV8_~{zu`t{nat1BUjnS=Sv{m|GsDD0RGTF9# zpqn;c=&8C}WSX1^MpIe5S0vr9^~7FIA6AxTP5nZ%yv{&97D{)Kcn=E4g7!`JkTpdP ziAxHR9xz(%I_O_3y1J@lvR9S=e4SqWu1lV}A}Mb8HHcgN`}QG9y)5T-L%3AaegS`! z&hs)FuNwNiZZK&F{hNY|(&%Ig_w)|GOYvQfLlKU(dDM@gB}Sv4@ZpGX18SdmoyK}J zVv0j6|5OZqS2YzRarJuQId`c1h*WT=)5~U$kB8s|%}Cr6Gw$7yt?N26v7UsJj8V2{ z%xkvL`TYz;$xm0lWct4GVx+OgVGY#i2lcUgc@?*Gb$5Hk8Yb~PI+2lNjIHmhplR98 zn6TUm;r(@W2AN$4YHvIk)TY+*9N6G|cQmEP8Ew7LdnWwO(`J}qfxXpSkdC2Jd*tNU zKSLC*7YfGRCri%an~ow+@0AAl_0-u(+^+&r-CiJTa;BZu0^xvi@Zr?P*3{*w*?h%{ z^K?$OpF=KIdZtC3#a;oPJU%$v$UCsb4PnOk^{&|(meEJLVXdS@V61YPxE&ZHa|BHi z^ffNMn7J9}SS2!)el#KlK6BB~c6u>uhR&*mC_U$-#2P&XeVJ?ieB)xRUp=&#_;HRT zNDg*))rJ+ja)zjBCLR1|2Wu&Uz~#3CMn(DJA&`I4*Ob>-?~N5xhazrn4VaV;b`fJA zkUsSvu+OB>AT;8QCMzA!?jt_**~;78MT}S7cjLXfhyA2CM?cP9JU+2_xM0j+nB!4( z>sqqAO@PEyuHFCih4m2R#G_{*L1yd_SQ zeSbmKYTT0yw~35ZPVyUr=K)>GQ+zT227V5Xe}|#d`EZn?em!=HNY3{1YqH37oO5Vd z%2;;HmfsIM2Ay=xjTIRc`M$g7$n4JVP~^{zuM~E*x|dQgN`^x_+mki6$6Y3Bth~h6t)gEYI+%IyM@Vz-&T0+q5_5n|5 zxFah*%+_nBq>wRUhkP?*87Dcwg4^VaTdr?74Wo5*#xYET^HCDk;P^D?y7n5YeL91q=j|>0dGb`&m9`T|x;2yIHA!S{x<3}IROLJ-G3UZoZ z+&kY*ubD?{wv&FtPnnrZ>EuP|95;X1%N^on0R7;&lI=UDXL`&6i?9ocsCj_hz^7N>-sv{W9#56)P;ZU#hL$B5kfMy^-RX zdnU(28}JRL?nIXJuQN*L4l!ShQbGzw9dax922X5a< zEdXjm{rR;7nItNFXm`0gmqa3NeaF%Qx=Cejk^8;`#IZ7@)3uG6>Bm`Gqc+0@s~-G%Ww6t@v98!}UB;k-r2~_xXW` zz~vM450|Nd|Njf97IiCowz^d$R{3OXEP%nmO&cGqiR{Veuayb^^Mv}nCI9*VXaT+> z60j)LwA2Kya9S)}N0l$Q+_IIoKw)p%-P1v*+_O1AKD`)_nZPHIl7~gTdA&6tLt-8X z8#F#8kLgBKPfd`f0^Rl$?f!51qA$rGRUlXOqy>xe&L5f_H$-zi?6uA?Y@c|E9u5&1 zpKAk`gWlPB+14B1n20mx>pJfVhd1zXQ&j~T0N%iPtYXzx%Y2HO**G!gZ?cR+9L%~> z8LO=SDnJOu2ZG@wJD&f+X(|3mh45tlA5EWImJcYYU-7U~4|!^-Stw=gwE6W1up-`Nb?O zsqSI?i{k+o4|tv!Rp+_+f($6YJzejjl1T0qxAMm(7TFC$R65fry@&|JtTi4JXR@ev zMNZ`9o~UEZ9a(!6W_)Ee9^yio9D70FKc+3+=c`Glq&uAchIS4}05nfSXnfIPEGB9X zu8hcslPU8mJ1`qL8?1`&a)AeF{>cf<7#^;o;96$1y-_e82T}!*;^ghPPo9R9{j0a{ z+Q@Q#Mg+VI$JgY`2KO%Xl*Me}1Ym%UAqWZIcyWQM(9uOXj^<7q?C)9$VKs1%u-JlQpn(b(%&uXQ5r7ceH>ZAsIUm=K&HY2SJu z?DaWrrG^B*8jVR=uM_`p6aYSY-(OJiCyO4?x9d$weX3FLCrPRY&@;wVGi|LC_+^xYpjX>N zYn?}b??cNE#vs>0I_Gmi1%;(N<0?*I&nJ+%-p2PDvF+?XWsC)MSW}*XUsCy$bg%7) zjxztU{EE4(Vzey-rG^v*c+^={0wR>mjo6Elb=H+h+IKGU(*FvxsCVg174rj}b4Vjd z+swN_DG?|wvMS2yvJwXic>6)`P~kTArtG;{NZ|D>nNkQ*VQ>j_rQQ zr1-Y{=4c4!2RU-`3N zHVst?F3ts=6|_?B^g)d;4^ketINUKbGW(oCDllBO=NgSh2QK=w0s*A^-eauQ`T3C3 zxdNydK~GFERU1W(hjRUQqL4!iWj0SMsEx#}pV_O8Yzco*d(V>(H2Msh)WG#+sFyYu z=lOY*5tN~@*2mpusl>meb!bLst2;g~h}rEFf`@O>No}>ASR2h!>z8|?Un8eQuZnnQ znJ4@y>&VFCLBtG!&4X$vn%~RsUjHCtLbZ(VbdOWUx+jTnH~GF-wdR|cc?+6;8PAMo zs#}_YkDfDU6g&=t)9;WhrCiIzz;-wLw%RA zQxt#C4g9k}`j0WwNEt!pv4U$^_0QRqdOH0aBs+rXqE@-U3IDng)u_lU!dMNRA(7)E$0o%7rfRhbmVG;)E|Vg) zjVYs2`>!Wj%SU~N6buK?S2aL&p0Owy?*YbVN8-mp=Jmv$ZmF#HI7R{2et)-A^JVy% zeCt{T0;ow_M%{0Mug#rQ3nCg5al{4s&hLaEcQs|+(CFnuDozN=#jq`_yT*kyBc~GA(8${ z877@;{FR6v^1vHxirYS3WgE#=JLd6rrm#bYzuU`ZJg?b55>2AQV_*perzOar3R=a2 z{L)s5#@jHTT`G6XUfZ;HHG{RmnZlpWGvozMGd(hh@T>c$zpwFhHoear$=vwLNrU0q zk0Xq6X`0(v10CGF^)kpR#sz(4cK$iWz=cW@UCxfres_FywCDSGRrSlI2zxu)d#Sg6 z6q|a{mXESFbYFoU*;SbNUS~!i`!XCglN!RN5%NZ5+OMqjuOANZl(K3l&5W5h=Gq(N6GKeiCGvbTQ0LT4W7b^AEx9- zZQWbkc2AMgK0YMJ|EQp-7`_;roV-6VuE>v#o3j~0soKKD+3 zHIpi%ze0}tVx;?G)JDNujp^Lj(nO_3BPPHN!8NRq#=4zi%sd-4x@G9dMSr?bSQ6eZ zA?2Y7O0G{fY53aA-*qVI=r6^N!0pDXJf^3;?fmy*UDhF4>;3!9{x37U9mHejGrPne zzYFxC5>Y2+ukBVn$fGyy&3O=@@x?3(ow5`#UyCSjS+M&%kYA-Av?c*vIJEj!$a)Bo zubO*@piw)4*;f+0;gTQj%n{g&Gn}Dks3lLJxP|wFfS7#|MWJtUaj;yc&s{3jfgEKh ziMbTAVgODv)6gv8sD@2{m48g6HC1Lo2b!J4G}8+a}Kna>(|IF*Z`-Ey3~<&S&g7PL(g4#KFRN@!@Va z1jfY7=~-hvVA8EO!0$?hhO)j_CU=aBGW}oOn9O&opFaz zzX+4Hqr-9CgKlh*d^wU21 zWH&>p-Y=tV)CKu?2WJ&&;U8i9tTm<$mYCn73I=Q9fANlyvPd%aJb^SNgr8^(?zI{5 zUM$I~zZuQmby-hy$Gs_K>%onQ)C#syN%9$~mw`d7KJ*}}Udk@eL3n@Zscc!Z*RK{b z6cobLrOMB^jbcgYx6sD8ZI)Zkf}8)y?rMCj&&B++s}YiEAO|iYQcwu;BUKwsYY+!E39_YcQ@mSF?7_M@7=w9A zM6W;61ag%B=~%>omL5{oU%*6lD##tn3liU*|BU%pU-JjWGzTf#btEC~2P<`47iO=r zu0QtpN=|)D$(?4D%aEi*v(Edw-F2G68?^g#TtVFw+E9SXxBqA7o@sQrbU`fUS?UT94po&fx+No>gi7 z2pSVUqqu7C+0x#FR$;L)#@S9YLQinlQTxO7OpS#T`22BNKxW#i**FcGLSKb#QDuLW0tku?0=(&-2yG zS@%m>QATrLgOwsLd;X}g9-GX%j&x!W>L*|0!jSL8`wkcyvetOQ4!R5z&8KZ?=jC1z z&-ZzZ7mcEXAvQjR715NLg`1m!VV>Ss2e;S#ZMVWFjW^o6AevD=W_dKhVDA3Zf*@G_ zRNMpC!~)GEvz7YT{>iS}DHTbrpG}OQ3G8=>#%UM=X?o7bx6!jF2}k48T}bU2)Qh=y z;V08VZ`FQqIn5;T_s^&N(xZDne(squ2U9y($zoJijvtlec*L`?%{QK1`!!QpW{%gC z%RA9zO|>JxNi5So-#u;kq1nt3L)4dv12^(d6mKX+0H(3{^ze1lg0MEtaUZ`b(bTRn zK47X1C?{LG{UTpx;FP?+U`kN_nVV)x?Fx->NTE7v^+m#%oHHM~CwS`@7$Yl&wH{a# z12^X(mTH)UPE5G^oi|-+XO@S%PJ5wi(~@BmJ}% zDl^g$kvqfM8Lax2%KGtIO^h~-MWPLm>&(xgmzlrh94RlA!akX5?sPqo_{l$l`bvbw zf>>9mDBbmsVi9WghI$HoCOW(_oqqX6ZBq9=H!IvqzTl9w}UEkZa2N^JONjfc+>BmDK77aBn#m z(_dIvbC^i`RL9DS68EH6>u+h5SbNZ#DRr^Ff4^{Llf-4#q@+WBMfW~3@1*hPY(7ve zy{XG!&8j4lVs~hYBIJF;z~G)|$^Cc8ad|{!+@OA@j~-5hCdB|#v-6*F{zm;=*aYIt zFWKw|1f~)hpXe?i%E`NEo>p9$*AtMiK(E9qvsE}GD^hhh@(TUqyGGP-WC7}V!-@IWdGfs$>2bx>bB4$i*j0M7rw_TSS~$37aWhPSpHZTF>LPV@y|E;p|Mli_epO?halyL#X@h=sfA7 zbX78nlAG_J3Eb~gOo!z#CgRC;6ogb zj~weQF7$Sm%~%7AlUz41411^g39%Yk{omjU4XqM78y|2DsDCt2&4%|7ts5)Z23KVT zQ8JT%qXT*JSV5Wq$3H`&gmj22!B^X-=!(P|mt|7|j`=;k(KCNd$krA8T&hv}{eyl< z`)le^~qozU{FUxWCbloS)@w|z+90RBM$q--iMeKVxT7(cD+Zgi-j5n z2cqum-D#?R#GPLyoVHQ(^cx+!Ovly;2f9c2FmW96bXyR@QBl6kAI)Z~et&pN-H<(n z(s~FdC|jSCu`Xh#jd*UPhGh&E@#ngjSZ6f?dtr@>e^X8x6kgHQ;W1RXn-;1^jhcI| z{i@8O*GMICK`?LOoD%t^;OiZH<9X0pIwu}*TXvU6RZ=i=Fz0STnQ;@rHs%H+j5lIg z`l}W4(Mm!66FqnM47K}QK)oiYK=#`p*QcTFYbmeM8Wf2o8fA#~nX8tGJ5n7AhyRut z-_;m>MjxU3I}6;>Z#tB`IAXWu=LIFR0TmFm^buoetLCPagGI5+8k~O~ zlxehK<>IiuUGMtr>0Z#4x`X#J`nMwlEGLS=`?kZW>=u2IL<0PF;>7;Qf|@G8%!SWp z1P6~M2uIvwX?r9iKzzOi&kzno&^PP}wXbK>yYK>#6}E;FXdym6H_aee!=5B$K{#fi z%c<>GJHc%3@IXV=vw#z3xcWv$+J)I4`Q6kRIncOd-1h9-+OzT+_^ANkAHZfRQn+ov z55Z>of2}BGc~>N@t zkaRlh862)xs94>TFKOGB9wZ7?0?{IE9QDav?j2weDj+DDKA~v3cH?JDd z?*I?UnO7XLKm$yTe5WrGpZ~`>){le+mLsT$+})mI@V9ve$S_cHdaj~AkI#lqm8Qyl zSYf*>Cnx&P=>eS*Hxeq_Z$hZiGjS)^H?h>f@6}(xWg*7)Q!QzANN0(EeP}_L-)s3z z9Pnf=yxQM>bx68AE5l>4o7D8p|2<3pWWs9wp84mSQ>?ptqIZg-_K(zRH_dN`1Xf+g9ak-uhDZAt-oD) zP|<`K?e-O4BEzNDh?Lk3OhBmFDDU}cs6Tx!GOS~OE@I!7@G zhnzId?{lvT{<>F-e9#+r#8L=On=3KBUAi5}f^h0I{c69PBJdov$CJS(6tKPFlf0VK`y)f5d}{9w|HUhzptAS zxMm%%*sV9mTltX3UGAr=ktm08#}3hm+6gA9F-63mK$oUk^0!)!yPo3A7Ca5|dyV_+ zE8I1>vkC~T=Ta;?a3?7Qt4*CHd?`>lf^_;k`!8cX^OXBI#wvwzoZU4ScC_u66FO6 zVod|H$?6{GgBi%he#?v1{c7aN4zD$oUdEvLloke`>#lT#<%*{=A*(CKvHT$A%=;qn z{8U8A9q#RNxjjuLKvk`9e}^l)gJ%vf7VcKD)Gx1G5a_RY7Jw!ZT}%-@CEkp;54Kba zbldkQ;oR=s+=5ny54+J;AbI}Wedt!Qs1fP`#(p0k1>0H>c7ALQ_o>@{2=b26$yc^z zOZsFH5|ldzr%{@65uVfufUD=k__@bIJ(kC-Lh-nfEAsEk-A7Aysevs<06QppR&(XI zX&O#gr3liNz7dpmcd%eW(qg~-)^EoGSdnI1&!a&?6cvEW0kBeJDW%px@5EgHXVSQ=?2-lXwy;P6 z-hIwQt%sgtK?qN8*P>MVkn6~Sd`Ny8chr**QnQ~C1qiu}nw=2L2c*<^rP#cfzZkSC z-ch&`^k}m1P}H}RKSKDzb|KB5u34Oo!Yi4vd9idY<8QZA)+{&nyF{zOYMaEeEMwQN zh$=$wKYrq2J=Up+sQYd5U=LVohdqzA5cs602I(T9hF6Qsu z=j7qmn&}&Z)x5QC!Ef$!r6LoKe9*a=JO1vKmRLhoJ*-zzMGk`t_$s(1bC(6LAWVsM za#mm`3G7mhzeEl{-PXeHuLIcHAk0W|x))~0*kqaYb97sZeO0gA!rb~UKX!g|<(0u} z#Ng2`DG5rI$;Bt72P!wYS*pKJE~|R#xjjFwA@IcwiZTt+`pX>>BHtT`8zLAo6OOEJ zt#F`KD0Qx3TMgY>oZozAsNZ%Hc}&-nTY|1MsRt`Nc1TkmiN~4@%XWj<`iu z{5_Gp*QZ>+@cjs_*pi*I)bZQfj~tY?@TlfaaCn-(>V%h$EB%NAjg3VT7A{5P3E3!K z@YqfKq2I+s_(iuUvar!Ng`ZRUUBcin$lc%-aCGchITTe9b?RQkT-_wrA-y2FpP`{` zy6L_g3*EAqOV#Q9m@~%RKylFjji`dMy$GS2SL?H50+=jnmzMUeBNXko!kr$q#q7v_1^!Kh5xQXbPkEPo@Zp)G?f1 z%EQUTF`kbk9$;yrS#VB~f-18#+2BOjcn*|x<{g3R{iX5){I5_9XgSn3a|aapXSyig zzgOFVW#?!rVTc0jsEXv8y*g+EE{sb$8Jx`Vr9D_Cu92`6X(vKkeV71`t1Ws1r!?ic z{zK)ydcKRLXyqrgH&ixsWGy!2xzH1%E4*9<&0-QUZ|YFuZQe;VwPf}dW(QmY8$P1IGb^Y zTtvDSOL$|>w_ZeC1bBI7SC5`$bQk%CbU*|g54=1h;)D}fo~4pi99^1mpTiCzyrBbU zxl6VyWcfHsJb3mUqF@QG=wk9jzUGXy)H_aT>+em!7#v^%%qsWja-ZwdyD@ixm-jX$ z(<10Nc&#uDd;1Ovw`(#jK_z2Wfs?u+pv=+?V3G53$Ri~rR;>9pnCt@DSST!A$T3Ih zYCjPnul$Plzra=0kF{t2(E|JfTH9kjL@sHj23BW?BJe~Q!Do=f>V*2GRh6{=YAwuo zg*$8zqipS0d5(Qx!~uC8X5@0;h7&NAri(r6d@oef`k>(Ly=QWH)rJc4z6A%}rY!Y# zMnb?D%_$?HC^`~YqXzpqZemfdlo?CO&a)-;N&N>V{{_T2u-O=+`keHv95HlD)%@xd z`}F@3;gMX(2oD8U*4k3IzcN4BfrRhBdT~n4;t78F>kJqO2co}kDX8U@LfA6@$iWeG*F2TS`@JWT5gc!9T0}2s&NKDDjk{YC zPGFi$g72}<%ytk1Yt~fx3>nuMIKs{6hIwSe35eL8Ud*4G$46<)j0vU?`|ld9{uz94 z!?6|sLeY%@X$r4X-ZG`nD@BKEgrDm_7%NOV-qE5G6|BKwh=gheSmp(Mt>DU3Ng2k^ zO8Z`VsBgp6BkjIueYNRPhuzH6qg^vk<-CyzdAMLDVx0(A_I9aMW{65@?YvH59}h$3 ztOOjgze>ymbr+2NiIhqxS9=Y(?k@Z~;#b zx~3i6$6_E7==Nkyl*K}+>4d8HL|=QvFoc#5g~W0AZ{(OBmJRuSjTb(M~aeQwSIPD1k0G!Zz39mLC4GJ9jI*q=^PtS0Tj3 ztF(*dLB!rX{d`2_0GnU`T+ERbh?pIzj=crIjF#Sdnco3A`f$DBJV1Pgak{~VJnBtQ zYPb8FUpj|^xH9=WYZ7DE^OSks=N@v_K0^jmXSZuuH6Irx z(@~{awbwp)@Ko*a)s~DAE}2VRHhzFOi&oC3e)bI2{gF3fZmJZTA!JFW(9!}^&|#L` zUS)`f6p+GjUd@^^`u1?HdNsWPCQ^iCDY!^ls7w42mWJ)p6z{HCmX_d1@&F|gRr;?& ztLXBQaMs#c9$P;{MVK5-{qI0u$=0EiF@f~`9(91>J)56Y1Q@%*0?%;Ak@en}=nk-_&?Gftn$DeOFVU*nm_MY$U^ zShwLw3`pGk)H)455Hy-?SWFeC{q4!=RZGY>Ga077KEP`voN8m5++)gaq&T(?~KnAW~El^3>PS-iPS1l`1H z1h09d_#Bp4pQOlFlgt2Rrzi}1`*4`H;_IjOn%&W@uNp(4 zvA2!({3R=NOa0g^6r- zci2M%w1ei$dEPinH<{VN-LipIzusyd6b{wJSjDohL0T4<=IXtZx8_6YQ>KO=^qD1i z-}({9cNY;8^9}AK>wZf$CG)H8us_}zG)%w9x=mL99dN9m^vU=1sKC!SYDJ=azRmDMy@!P5kz*;GAs>4fk=n9G*j2tDkq&< zi3Bou48$>x=xr$@6?RC)nR(JiLzp_dC9w~Fv=Nl+q12~i`3&#y(tMK|o#$fzO5jdq z#4P^J(%u7z%wW<%n8uD2qn>X1q@yb!Af>^6rPZa7IM+?)3ZO z`X#2E@ZDkqZ=IDdM+jeq{#Gm7iEEf!T2SmTMSdnIq*PkNYVfVT2>rNs_7` z6ziI3HjzyFGQl<6T~*l653K$YMzJ;p^rptm?2`x#{>joW@R0o@;~q%#A4e$!6`>aQ zY~xtb_D_Iv*rN5_>L&x83op-h03qZBa=xb1Xo)SD_hbZnR?~-ygovg&dapP-FRcth zk49qK1~Q1ZL0qeX9NY=r{;i}M{Db2W9)u_HuwcN_gPq31j2q8O&Vaze$e*Ke5|{z; z$`QGjP#`r_ffKxAXRZ^t9kuH)ge>2nZ@@o(`0U+cO`hv?|0@DhgfsuNjimefkCmCx zBraIrje_sSlt0&9O&L2%>e_0_w6Yh%5>}Lu-2I`3n-q0}R``Sv_wSrRHE!0;UT$Zf zOztR>K{C;B(N}>UyyzhWv$rJ);`%)*-qarssjHi6x#=k~Zm#kIo~`ttfyh>)1r@*| zP#OK>*p2>D_FGYSMiN1Jbt+478;M+ASYVfAZb#L;R84Ivi_BLK*%zh{e+JYWX1w>L z*g`6sQYjXX=Tk1AcG3cV>hIxO62?QXo5RS#Y~osqd3<0ctp4*l&l;gl8gQra-wxOJ zbgL)qMgs_qSz+4(ZPn&jW@I|~VKsNumnv(qvFIV&M9p=16>uHt*95Zot4v4FBY0bP zJv?_K8&RAodn_IPi}O}@Uc7N@_kVDnnZz#yfDY)-abFi?A-kQv#S#frYqI4A!a?TJ z8xexRMRq$w3AgR-(ACq|FVL3QKbhFw?EI(wcidKhDg-Z@11PWR#Fr9?dkVt6c^NP` z;(RfAQ$lq>e0jn1Z}wnM@eil#VH^BnSNbm+pf5g9_5YpS{fis^o1pEIsm;p4b=vqq zm-iX`{tX*~&CUWCJ@uNxY_(o6p#GT8T zFckfJ_b=^ASJTW?UOb|Bv+{N=>=j;fa`f2*igMYVum~U^U8pc3osA6J*ely$r%n8f zN`4)-4vBZj~ zjcrJVlRoQnJr#cto`eION1L9*iwkd#Un&=BZnW}05NMAjJJ5=k-bMj-t7ppOu*zlo zaCoGq@nXWHYc5s?(OPS=khPv4F>D^$BTn3+WQBkvV8S1Jdu{!U!tMT1VQRs?{J8XN zq3!`u!qBhA)Gt0q>1~K=TdlBp*{*cSfxT2Nr*zgmRnW3*t@tnA|LDRPOXY}LcSne4 zrm8(&`@nei9~^Yf`Fk_l0}Qr%kWPA^_;^_@umfc)?Dzo(TZ(KLNj|oI>WbTB2dwq#@iF#Y0qGcu_-_X1fh;>xARpYAu{l}8-=%@!A)26xo3Up+lsHi zRHxv17V1g6i1eJ9bUnO*ibQYS}iW9LkYA06yK*Q>lG0@ zH;BlDwg9>M&&J{H1m(khhC{eEuHNMo+*5s!8#HeCp}qW5Z@m|V51{}re~%5Q1$bP2 zOcdq7tbeQaw^E6x-y#~t)ECKiGpQ3Iw%=@Juq>(Gh01+svSkyN6{j@4t^zKe{n1B> z&md`y3fR1A3FUZn{d^pJ!`aimniaQ`nMz<_kqWV0aoXe z$+-iym299m)o+hQLJu3H+rq|tgPSV8L!>F!ad=|;7eS;~`(VV)=-wW6+51C~9kTEQpor{){F~mzVTBE=q3fU>Aq8$uzk;`GeS!DCCO_fe zHI_VTwT+yDpvhTJX2F#)C?cXF*m#X2rCCjqXDgI(!wo$`>px)4-8KXBB}>-}Hzax$ z)xR~w-ICn`fu^@bza4Wvxd2KuW7dek7FH0B4QHl5vFew59W4hiRY{&a~s3pMXu=O2S$nl3|K_k zZ-A1~jC6Pl&?Nk(q#q`688{|9@-eWoCgOF)U}fxM)JK5uBJ*+x z3N2HWXmsuMNtD=oNk+S|HPQ2CCAAP8CNinlqXQbb(TaBK{mYE#WbXf>w82q+P6KA~ z9n2TUWx;0aAH`wbD^WP+e(EcR8HzjlhQ&w7kp;vDOCys4@J3raPMrSHlTlw?0QdJ@ zXJyZ$@pda(iO2fj5zg)z5AHu&MUMgf*R@O6{3`5izWPTn8Ctev-#_bEaG9SkFD_2L zQEIMq+>0>xru>)+_g1Q>`?EwHigQCHTaYR3;%0H`ig@uuaa%(I3H-@PuY(6?rnTeeXq+sc?m4K#u=s zq9$r*k`53xi{n*K*D7|QMwxgA*tWvzfy2dQk2KyCoQ}f z4DMJ15P|6n$0Ix0dGFCk-QEb7+jVc2!<309(&m0_ zRKniu82Q^t*T}(Bc7t&jDU0*4jAQQOE3t4~?B^BvLk62 z-Ek>oDT{3&9OdDR!p=p^>gV*n-QO1x2q zbT(EkYjkiMjSE_MLcc6yb??Lka{K)(k&7m+QMrJMS#^@7RK+wEL_PX zYI6^3y=(skk(TZz=X&{fdzxt}L0mgR?Qaq=%H*Unqn^p9S6jWcppau*;TFQ&(pfHb z3A5sWKZK@$-lL_Ag6UhP#;1B@IQN=0YYV=Yu-19)y$x`yspZg{=dQlnzhaV2-x6-H zi+5#QE^Xc~zLMQ`k>6#m=3L0!GwUx|EeyZGr_Oy>m7bP#*)*yO5?t{I6n=%u#~z&f z4o5Eftl>W=zc8e$8ZYrmZKl#|UBbMml>uTUZL>`tIx5qT*ZzN(`E}dmV#=Wx&U!OW z0T-xSB_Vtzs}6L{I@RzlVOKUs4_>z;=*u8)*{b2V&@(*{L{h8&ll2DXgNK5E&6{f0 z76?%4aHEXp^!55v3v_E4Jl-B5gAY&h&egVRMozu3iEl`vhwsO69GKJ>5F5n9M^%KvWNhSMLwP+iGsF9w%X+ts^VPkuG<}^J_%R{rz+x5dN=zI5W z#z{PHYWm0S;kGZ|B70|eTH}@OxKgdQ?KG2h2f|}NE#as6#wYhcQV=W)rT7mbHktr= zCNbmc8|P!r5m3O;U%P7d1m&-dYdF8=xN|VMD3~+GxxkY+hnmwDrEJ?8o5T}{>@iei zUS%(Etfh3BD)kKkn+wQ`b4Le?`+Oy#v+EGWiwb&@)gzYSmFw+A!n%7nn1viic|pQf z98OzD+MSfyZdQ>0t(xPEdSmtLVLJCdxGY_3+t86e?Hnf1%*P5nrAEkb8ze z+NNtdqO_5#x0PPssOj_38Ks`6lxM@Im_r2jNVkDF`NBbsK|&6_Wb|5c0xE9y_)Sqj z+}o;4e9G;36OR8#*icEY?UxlSE)QIg;rc`%W-hQXPcX@>r>tuZDTe&@n>9_fCWWVq zm|sYG;;9!!gr?o=pc?tjWcs-A0+*UC8wfRh2{6Qiy*64LC(a#0m6h&3Ya`trSDYGSk~I{f7;0$41PuG3~F1t*#a z)A(K2fE|^_t6B7hjtJ~3YwXUiSZ4M_Jxl2-k3ayGOI#%&LUX$7SLMhuUfZ1>S!Nzc zNx#+{5||DDZOK2{GJ0xj-{>hEiU{~G090>rmQ1cdfBS!N_7+f4_FvyGA)rX8NSA~l z-CYKqD&0MFcPovQbc0BD4_(qQbl1>5fOHITF1)?}=Xu}rKF>PmuojD$tFK?|{n_9B z+olT~I$086&aO9c^g3&)T5=UM-&xzgV-`W}arX}>C23S+E}TAo{yMhPm6^O&^lD2* z-fQWOIr&=kQ*v2Z?t)UJ#>XfM63GFH{i$J#AH-a-sXu#3SMzZu4)VF_`>H&A-KsF^ zr@=U;^66EKOoP-XvJWV!t(XVv{HL>gx!=2NkuNoGeH00Xnoa$F%9WD}h!^od6S<)d z1vLCThKmuka=|H)Pa3Di>iURnVpx*~^PK|S;13$JIL4(Y18z<4i;kITPZ2ZCT=6pF zP}SqpkpYO|_4~Kp>6e{^DCW-gyr>ld+Gs0eriAV&TPbOhV6H^;VY_{wcLP zMooH`bBCA;Hm34kg?jj~M>OiJkqm%_a0cf8N6pvSj{rvNt|+I2LO}LQP*yjYDLcd%i7qn;-NSE9>t(`v zq6r=_Uw`Amlh6O5vM1XkKRt* zbqa>}Wzbp~WkU|Rc;ZA(8K2rTrXbU{cEPvC1&|&(#kfUFkIBLJLurcK4knAKVnPA$ zo6AnR2Pu`EwwPv--3aH3pyGo^mzWfByF>esEfWx>AJ42c1dMaZimqAD49vCa?O)#= z9(rFiehK>ndO*QC$$~#fqQoCmR|a4WAOiegvF3s&C3Js3`184&D zM^68!uo7ti%_&mNxHkGfkYJNpy;Als&jGfBNd0ew=O424mxwq-b9(*@a{vg%C-vuC zfAJ%ehHT4xYDrK6l9E zTCOnz2|cGGt`3)vd~V7u8bWFQ9t+qW^j&@WH<#m`oVa-Rs!xK1I3Xe7+TaRK%n-wz zM);sc;-$lxeA75Uf#}Ici{8Lvj|ChU4qqOSTa~1{K6uLj5K`4;{|`?Iregyx@8D8N z$1{6+W57_W<>&2ve_YYP@Ggwv8OHO+=OZJn0sD6aOov(z3c6)xGLcUYzEIH5;wL{Z z3k#1hWzV-&8#DfWJQUU{Ri!Hy8;jqB=VXX?oOYtm?bq0rV(GbmMVXNgJP7sv zKH}WAVvEb`lP-%oFX$~}k?8CF93?H4b3#i+c3g1BF<##w7yO zTE5WD7(B}^GZi|={cBRbMBYYhkvqhU= zGE${u^RfWVM&R7DIxw1lh8V+4|GNbJ$AR&!lHG^olgC{jUgOq~Eg?JvIoBv+-P5JM z%@Pg>%sj+SqFj2z6KIegk}gK-Xs3Fs(z|`}yfSD$PC{svR!7JEA9JQSKy^#@-aiR^ zOk(%El%X>!6Qf`i{z&s9f3q2;=gyhlFX-rp51HYCRGF?bMk5Z=13lWQIo4wrdk(mj zaq*Y+WXcV1I3ObNz2m-RbU*j;F`jRi8}@5C{CzKWlR83G8TW8}mGRO`%lw~P{Z{)xppxBqGZ&TV(b-r%!D*aJU=DE33wY9s~j@3!tG*E>DzUHW^AQfFRJg`5J~woVCcx*(1q{s#!P=R!?0$&$LwtM8ks)t?pc2zAepYEny$ zw&wn2$`3O3B+o_J;$Bk9XT@*h0D1eLZq)g5{vV)6&J%vfGkjEL;+^V|3kaGp6x(Ks z6-T4UyHvSmJ>6&1kI3k`msg4KUWLj@t@V@WcLIZiK_V=g5t6D55REWU~jHxfJj5-%`_mBzL4#Hx7Hk}I1T z0q|jB6`?&-7yNdqeIGBQq99rOB4V6)q_~nmmUh!3{F+LG)Jf{lSwdLmSLOy|V@xbrc^t+D^ z{SF5Guf;Rv1rH5(y+i8BO%Pf+-~k(4l0_MG0^LTcC;LF&Z!WN;;L9W8C$#$|lSjuo zk<%94&1nA56h&KC6yPl}#D^)PMJDHw^V`anZbb85x`@kdeKs5M1%QWe^D^Rx-S+^K zRF5>X<)!e#Q=_CgTKrF*a@VKt10~5QN`ruyJ(s(2m`p-uf1;_`FPS9r*c|8O1`Jr~3y?+dQ(1;K#wC!#&mjV$EU-oUR)bOKUo9hao{4+8{M)*nJ- z!jTGQ&;e6V!{9u5S+U$Czzcs~Wab}eiD|eRSl+$G5H<)`PTWg4ecjwLvnrG5|r^Ph|LH*Ly{&m)tY9^N#X?6c21S}l>l$w({a44(M z%TR#aU$>k%kWhzRc>Fw^g&?KB?VYpvg5*3*UNu|d&xH`u0R;%t1U-$7p2 z%DW)i+c~>_2j0f#haP9>mvVNUuYh z=GhMnJuTlj#8*-$RPbJL^=k0!Y{gd`NO@^jHHoj%IL7BC>-#K@o$jXu@gnGHNL~W( z!)PJC4-ofw1mS0O{N6UOa*V&AM0m({n5#?VKF97_K_qosRNrp*dbNy+`xklGZP z5W_fAXu!?#ofXz|+dX5u%Qt7fA4--@%Y}|+pMu{i`@4IwRSByKxz?OS2FT(GrD(BK z^|5%5b_m$C26P?c7kj}%5j;;vB>=J2FQA}_+j`XS$wz$y(ZT6TJf+ATugG-e zHJD<3A2?gHCYWMC1+0CVMMARq@#GmVMpjoPnNi~T^m0;7Xw9?UHJ46_BrdF$j?#B( z6{9bfa*s3d4ET71$GFLrms&zXld44Q(qkf-Kw?mX^cAFiRpNBIT%8Buog@Dr;axS; zzrwo@yau0#3!ol+|AY|Nb0H%fDT>gjJ!L|1AjI`i`fOk%`${UTxmEo=RrnTccgQ`g z7_I;ax_YPNz4VEi69iB8;ylA#vFj>43#b|^o4B9J@7PU8et$_>I4Sq}YD_#LC(xUS zTp_nfgTOQwdI8&l7KJlPx<#6uO{W~)L>sxY5bMNLUs!kBn+W&FQa$P>=6cl`Tltq! znvrcavus8HjB*d+fhW{EEdsyrG-=Lw_?>6UjOtrhxDEooI1SG?k)yPDSM1j;Sn-z; z&^a?T+v2h3k&bk8-}&b94s9(bR4gmNk5_bEvQ~cza}O#=q2P3(i#0a3eWZDq)J1;! z_|W3X6YGLcOh-3IBjR2=ev<@ZQP%yH-aQvTol#e#K7s*utLBDr#-4I{zkyF8WbaST zNsA|Sx$H+Vr*lUqB67-c?XDYQBXV={;YN0URj=prWN1+q)D>Ko6Dr53&3OCfeXta3 zSm~INsTL8*X4kfuZ;Ar(d1LRI1^y4$j$&s`SK)EVV#5y*0q#eTtWOFKo69J^J;GD9 z7ThH$#~4{Xr&cCE@qcVWU1v6ceMY{oBwvjjJx`ylBlxo01AWIQ3^}HobK$f6PPJpFl5i zTN5JwpZo?LKbqFGwAX!EiGP@9R&`D+i^<($;50vY@PUQi2xXII#ey|@r{Gu`+aZOg z&$d~GO-Q()1I~Na*XBZrecs+u>@5kQAz+oNI6S28s7-)!o)YyRCoBDMhroa(4Hgl& zbF6Syuj%Y-e`wSkE@mI1w&J85H48qp^$_FS!Ex%+*)cKf-$1a#?ZHeF*ZDbfmo1oeK+cl&E&%_PyL`^7FLQeyC}&i0a@8Q{1ysYUwFB$ath^T<`+NIUe~41I|t z<{@^30KBsm@Av=4J9(PyMt_MREO>hud>Q_qw0VGUGc6bYv3u0(kG9>-$W_NiNx*?M zY-3qHeVTa2U?u#?f7E#yc>z4$0@L=GSYx)9vC9?XA;dm0{sKdcAjYdtzixjxi(N?>^yGspNwPtpu2J)+WSrMms$6KroN2?xG67wa`uB}Nrg}H~2*dx@M-OSOHz~op&0KIqbKVY`y&Y7?QPj~v(d^EP0 zq>pg!d_TwzykU2GkcmSA^he4J(I4*sv;4}LBrYJ|8(S>KYzB`5N_CMi?$ChpCBf$+ z62bG<;r^}7b>%osWA@9ewq~T1z!PZ%C@WCqzTO0oghuzrMnsY)?T3t=)jF;PCc!-IY32D`WwS1 zM=4sokj_Ptuh)TuQp6cH(eL_mE=|Zw)xVo4x&V>{$i@WE?yFob+N~B`(zlkTy3hHe zX`CrVj$U;m^>8A*UOVfmTuUP%)De~1vtp;@iz%Gcs1>i{`fQ55Q~2e*mXkb9N`v$} ze_A9CP-k=2N+{>tysV^~q$pJgWF*_9+fYnL*;pJw9Q_qqJG=MCV&2mj#?S_l21L%c zU$9@$32&bieh%3V(!Fmunz%f_fbXqY%pBIGt8J46flI-dKYY@0cJ^jfP(kYJk9hUc z*4z!6>t9V}8g7-awG18io?py>%SbLNE;4tpXe_^0f6&ewY{4VNmMtw@5ppa~&wBeS zeZvT#6ikd@AbUrHoz^V?^7dUZmvuyysj&jCYN1tr;-Zc=8^;NS%c_jm#d`Z%ke4g} zv;!v|#JE@=#T@jr^V*@PSJ~jp+f>sw#;%#2W8HUp#{Sv|3vpH)0(FbW6N7rkALCb=2Xb(&fNpaU}gW{DO$UPM5?dL06=AB(I zm;1CV^LcWaoU#{WdZ$g#sKz6%(%3i@VG=^33(U@Q^Me>H2#HJBn{pFj{iK%m*oj+W zXpJHD9E3YlM@7bc*M1&{O#EIQ$k;E_4jl2kQ*`I!I|+cLYEKL?;m8bp&-v3xv0+hb zpf7!%H$=n2t^=_yzMgd(c#mJZHWa6dj-!O_CpjsS7+@~-$$R+WSd>IAzz0H#y%NwZ zW|0@vMhdN9tZ*<{T@-T9(9q8F?*pu`j`8Shq4ln>NhlLX5T&)paT>WqS;Fsog=thx zX#8Vd;ax0)DOGpc5%<=undj0KCabTbVW*L5>XMmKhI835Qu&S=h*{4mRm7vSz*xQ> z#ddA1exY_WbTxMieOj-Xoho$(4`!b2gV~@Ura0=aG3X529PX|(!{Jw25~wBKIjA8o zNysG}$VZv|dRLy4$kVdHk5ZplKw^I#eT&ZfS;xR2ZS7*7qcgB+;b_u+$*8P}LCMtO z>#cB-8tbtW?qqn1y*|ajEoTJf{T+Bi&HrIG(Z6E{;=`2p%l{e^!`OpKz{2JZ1B&Sr z#Z#g-vKKMwa-b-l1N+BGD))t;sK>kVVE@d|mp{xD3neR9=ZWWTgeT`%JE-tZ!s zY=g9=j((}}h)QWf^Rcz+?}b=MXr*+zbHm`5gijfb@j5kKdM2Tvl2Kumg*`X^+?u!c zgak<`JV2>NB3QUG@+3(Oxx%-wHFiHQo`zKzbIl?l-K_?*r*2|dq@H(c$=b&oKD#k9 z27IoDRVCcm@5$q$rM~tKKj3M3_G_G*t!RTVt!A=>yuk~>?tWtH&9)Gm-%}njey7A! z)*GXkOIv_8n9SIPQfAPJnA@_>5jIm1Z~!bl*~`VUI4((?Ce}axXL&dyetZ@k$lY!8 zp%?bYf-GPIO1AeV&%!iNn*V&h`AwP>sO^L&d6#w!0klEyj{j78T0f@y4J?6K`IcnR zOEd+)0sl{7je&$e8~<})_0P-ysSXA3DU;Q~C<hjIi$;krMzx969Vey^I=j4m09$NPv8L6YKdn(?(#MU|; zm^@`k;6+CTDze(hQ3H-3ubi#Hw)`bAMFk)^5q*5!S^Ja6KUYXe$u1y6xwf}0z;gl( z2pwm>Sm}~!I$4pJ?YY-SwQAIsqU`vnY?zTuvyjxeZ$H#fMyULiS9xkFYxq(A_P23O zjtzRT>?b_c@AI*3u8dv&JYL`lt|wPhv$sIq?~z*FSniivHS89%TgPo$hf@x{6K{E_ z;^beX>5rY86}+R2b0RpY;Q04)?3Ecq+Eg=Hls&)Q|DLS`mUYUj+nS2gO046Achm2& z66mqUrEp%$e)+~EZ*T;OeePVZ?ON>P_BsqxU)+PJ#4X&v#9927*%>qC{ty-<8TvHR zr!$ogHUQo*>wUBcn>VFSmBz82k(3aW?g7m={Ci$@Dw&@X6^X5KcQo(|k=8q`jHaFI z0m}j;nFVe!3_27gAlE%c%$y=`-7X8WvwZV4u-q?*fTfq zS{-jcE+>Lpk$oZ*{W=ewP0zb2m*7p>J7HZ3Rg zAMzT_B0b}&?vEkwT@=@oYp9WRZJJIfYRj6~;Fs|!!rdXdtzIZhD&xB{FLnv_Sy!`5 z_(WiF2WCd&1>8R?ct?Rbqm*Ihk|CC)zw2vuSqz8juBa8- z^c(*Y@knhII7#5J;l$1D<6em9**82zhnqnKt-$+X8-oHo1taaUx%fd4<48nK#%=cc^`9<_Mz!nKO{+Wd!Xh7RL zk}4YZ6p*K_SWb^7=LTA~gx52|2~Cyt%JA?CZx7$R9-3kP38!RSZ4;R?55cZ)v^~7Y zscynt1>qdSv}zT%j^|5MJ!MUeCg}eeAok-ER^{3wL4&SgDuY z5F*m^m08bdZjVY)M&2ElU0)*ThfV@YqQ(V%w=sVKeH)tRqDEv)eQncwhv{1!wPi@l z%};R6W>7LEvi-cDe}?HlU!?piUE@V*m`$Hunyf<9qQ zzR~Y_dHzq_oLn zNG}^BVrgt-Y@Hup@226OcCg?U=5-6(n@zXcBu`ujQcBE?e_XNLtwoc3i)ualR2USf zUYBP#7SQp@r<6rbXlE53rR{v&)pO$=Sw7J0Cn?yL3uE6|xHGvn6k*0(F-s5X+Q<_j zU2P?u8LmjUXA{_WX@!$Uef9P}UAh;!d!3$9jjMw`FQ<+mm1_!Ni9 zLeDZxBBBIzKQ%dSlV!=J>(unl5`&DHcx#7NH%kc9mMY3?Loh5!AEegK?J`c9T0J)? zr`rw=(i>~@t#pm^*bzdKv(g{q(h+iUBbgu~qwM3H*zOJPJqA@@hdAywE^AzbR&(0R4YncI$!P(d0)_KSffVp2Q|Zlc^_eV~rO2#1mB&f&47#LZcgFiV z#@8J_aK$?leoT^;uV^CJZ8kn05m54bTm+l@bD)wLJBK6I{UC++3pV6f~RNyTMVf@jBao?BlR*!RFH ziAkF91Dap+Vlq^DoZLfI3!;XEB)jg&cbqX#&e`Tl?E8Z>i_PE%tjKlrE-e#}AQj85;fT2ZrXuZ&y2E}l<`NAVY~olQtOYSHF4 zJXOUtIy1VK3onZV*&5R$KY6rT%@4DjL25X5b=!*HKCkDeZ>m;CHDyqq(t$Z>#iwC2 zCvGLzmNtZnT*GQFH@o$%wVm*FJ$fN;D$$w|psgR(qF8GdE@Th5*3X6XWr};fF7{!O zFJ#nW_U0>J2PwUI}xr#h6w{xQf8($j=Fw686#(}!# z<#-lj)0}%qfi)DuH)pnL7ri~4ubFB+k9VD|pQybc)XmWKIC<{^3_CxZG)3CI6e1iz zd*)N5K9M-s&|my8T4EERHX0w6S|nHqAy#hb;OmH)18I-+izOyQQWLk`oF`aKj>+ec z_gK8h#l*yh&G*^{2*p1PF?gNRt>iXT_%_Um$UXgV9LifOSphqPQnQ#L>cIedQM#Yy zj5?qsSQQn~Z~YUksAPvYvvNqQfJTxNkMo}Fe8mm)$mUgy6>267cW}VeBcjWf_~;R$ zU^RI~_qAlepm+D`=A@cMScT~iqQmw&H(Z|aC7@6J;;f-I6M4ZiO@6+zLwk{){MAt* z*xx2N|Mro`Owg9C_PJWF2a(xcK~61RFj{a8C_+2)u>iIUPB_XS!}Nj5Tx4fRd`eHH z!_2@z@(skYYM)b?-tJ{RNC~AOq;7C{v59=VLs$@uL2&XbAo5!mw|tCN1y_;qV9H@r z|HHTBF<)U?B*;%!#I;Z6^bG$vLATy{7j+;_sS{1o0Pj)a636DnhLPDXOo{~wz!%_Y z4dOSM^~qvLf69oM{Yw-y+Z~s(@68Kw2+=1eyNKw6BYbIx{<_dKH;dz$6S_IXEG#Ti z*Gz~1s9y;rHoY76`4vQ--m|8jk#@JdB{E!6yV!k3uK8jmy~sey2&e|CmCj|U31?Oy zCEwc7+3sb~B>_glrOlQvNl|g8$yYs@eBINej=z}|jL~vhqk3B!s|by7RNlg#-%B_e zRrZr54|v4{Y-RrhW#A{7N0qAbCUf9+ z=-UGDmI^4WtodwoVNm@AZB>$4%sE90ioSqONX+sc&0xyq6SvK)DmIpJ`ZU@1 z_Yn)c$?b0{6sK_3E44?{saj#AwmNE;Hmcz78hEyC<{<4^TE=1Okv zlF{<4#zz-=J`v5X59LrddVM&JM|PW5Ei^j|m|1s5BnxvNc4Shtpq^cKPE!9o=mitn z=(pNX9xGisg*sV8T9DvZflUmGu&+J-bm-92QaY#6WZUB}A>mCHaMtrARnm z+sy`bTdBT}<~Ll0h?vU*rqr1`A_jL|WOn4)gwcxg>Y8xk!FRB)PMS7ZYkkvzExUkg zsfhxMz0}^jJhC4L??fHs+(cNte4s@F}upaZy)_)w?QyFm{H< z^2ZAl1P2kwx*f&ii~KRX8Hmk@W!IeeLuzK`(qsy#pV+q#jCMSKb1fLsd%W33d^|pZ zM|ncHY|9@~eaXt6qf)?{>Fo#pnKM}#The^R;mUJ^vXUW`o~FXM?bNf|=;qjQVX|P| zmy$a9Xz<-up4O0z%`2a@#n`sw0V>(30$4zE=0a9y?@)6GQ4pp z!_`>DgYd>wp_C8Z6F9k|N_SuULPwA$eWi7xH zozLU9ityOnw{TsZl%v+#&?WXhwEIcWd?odIcRA8Yzd^?SQ1gds9?Q-(@G$mjb&0My zA;*V)ZsGXhFN)tWU#Y8VE}o&zDi2t&&Ggc^v^8l6?18ruvg$EAn*FpjS5KYOAlhs+ zF>b~~7pEy+T)P}_W!suMs6u0rnu^MfVGQ7GDUQ%b+FtRwk03tcGq=IF5R9pp3<6A1 zl|zHPc}66*-L0}zB~iTk|C`15oF}U16D?V|&3O(R7B{Vc0M`T6rkRItzzk^Of(o*US z4`F;k<%rszuRQVxq!^XvPXINTjlQnOZR*D_<6=d#{Bb)I6uzByWruH{`rc}dHv!NAOp55b}9GC?qg3EGwV5E0|%;5k^A0MPLW-g3X8g#RDd^l zg$*@y4QShJir)^v`0)NYRuoDFKx?w@!bQU<9^75ERBlvt4E?CFcRA|$Pc-0MqKyL| z;Ljb%v%c=~?W{$!!Umq6Dx<8Patq*4Y? z4Vf6;jrROOl=p?n&2Je0ZcRz4n3T{qopIBL)$J{$!hXp$vNX(azlKXhL~JsW?5Bau z&y7!4#?bq+QbhiOb8LdcfFTM}VgoV{FK19xnRx~_{=Si*ygA1sbMs3m+;t&b@T@L0 z?e>`&omT6}s~UM4&8aG&!W z*2943RQ=Rh9=DE;5)b2DFC(eou1w`P9(8CnhMrnZj0Xd9^;tF>ve1=roX4y_u?_uu zfBByk!YwEGcIQ)mqqwy0w%+U`2JJB*a^y*sFOSbPZodI;>1Rd)HPIul8p87xKCOhs zt*q`Z^^o6W{Pc}g<1W)innos|D`r2*ysOl(Yi(GzKv|mH6dMa)8I5H1z?W-l;0#S0 zG48sKni%kzmyV!FiC{!ZRomjffXnDAsg|wf8%itcHJmUQ146$YXB-~jG(n&@j^U?k zt!u*lebv;uW1GAe^;=l!D*%`&M_y5|OC~>JyYaVNliAorB&Z&AgcVrEk6}?BA*7&J zR)c=+dr;2zII<^AoAftVPf7$s^uj7u;mI-bG$mmS93JM1tkN(gB>JhfgQZVKKivG9 zlh}IAcUf0yrJ`x<`ngHrY#HXz>SmTormErdwKd=Z67N3#^Xws^_`@)up3n|W)BP>Z zP0%gF<@nb0(S1`>jPC7u27ZGD<#e?h=>nAO=DJD*j)xqDc(>0v?zH$u2d03)Gt9k3Z>KSWc%hww^?uK^ntL2%UwDkHEKhdWW?a2M3se))^+yfTc8G$4pEcbNEvR zIT%L*7_u$D>m7>qu?(Wa30{214)vG;gOzN+lQUuhuI1v>nR63=NuRrht_!Z#eKlK! zJ{UvQ#6+(}UhiimCGq2aDTf}LQJ*aI_2i}2R^(1JZ?Rd;*Zshf+NA-sRA zcB`w*mXYf$eExr-S#*athRiLsWB$aTdHI5ACy?Ma!)57^l2jbOi)4UV@{wBr)aUbD z6Ix3fy7&H!U6O^sZ)-lYIP~NkKS%W>MV0y`c?+HFA2Wm6ZqZ=bE1m3!EbwOmzpt>M zUipR{E9b;{{uwzjTf-=~7-cIq5Nn+mSL2oeMtSlNK6&*`UQ4nzUW0hnWMjXi&s~W5 z!@4z>E%KwzG>rqo%U)02a=vvbp#++W7&9bQlb#f8eo{0!^T8ue=6ZLof?8P?%Dt3-Xn&?gQP7S zLHh-mrj7@=CGjEt_%aBcn1I;O3v_+-axa6t^;ljiy{UMH%1M-)WjS&afI4E%1_=Vf znBMo?`hoUV95AU+v#rk?m-D(by z;ka#om|zPY7?I#=8>2x6W0U^StwxrRE~^}C_m)?N#qfzi|9 zz@7EFr;5!dSd4mMxt`DP>Mk|RaRUn+>mZRO8*~8yp}M7;6WI_kaLwZ29h?nW{_bVa zdECiwlG9mjurJPzUELDw1PJ6(V6y3vw#@{k+Iof^zARM(OcXDN5yYeU65>>!*Kl z92MF1scNn%n_L}-6g;YB^=qjKaR=MFXcJyy0 zNFFSX^7!_^@J&OPM_B_+Zt4wrY>nO^B4CpgQ&Z_8Z6ZY{S8A_LE7v4{@@}>X0zl z*v7cBt-IUR2#_%w7hM&uAyu$_u}k-6k9iV5(9b^+H-J@TVz7}*>)PK>uX)iX_xrdG z*Or*}yoh+mw4n`=;VBGKak0`qS!|e^mmzy}`p_Q~exJ^$lkMtaQ3_g)U=Dpx;CFg<7jo2jb`@@CrKyeT0$ z-!OKJydHGR^2Tp;huoU)N^MDRveVw2V{HC}>;9NINnHd*ZB4d#f_<)fsH>LTjPs}L z=((9JP|M0bs~-g9_|^Ax!Buu=UXm>}ndIk1<)%kjdH{g6X)VgCv6D^Paigd5RN2*x z!MW!&j1Ia~>3OpX8<=e^Vj$a?Qg%VK{a=wHzMj+_DB1q&#H;7K`Dv7)^mu|H`o_|H z$?!^beUB#_IwrTs&nk0OQMXB)mxXu%>q*ZBf^LfmH_hTmso6g`Ws^>})%@~}nPJ@W zX-rTo-No3vu^Qo$%G-jje(1NZLGzz57g_(uX)){_g-G=+fSm<+RivU|HLo4}u~9sl zH1u)2f~xM(h^j^)o-_nYC1jcC(<03zY?m7ik5l8(&F%jlefADdo<(Mt8)X#{@VXe=Y? znA|`yp#+;%0qpg7^#||4%Y~!j(3-MIyPDHp+Hg&9K#nuMJ+qW&Sz!=NgG8RSh@LI9 zU?4Z|BEkI3MKIu~D31Dkaj>&xzV1*J2jl^?NP6-l)#>hTH|PDHtA;gqEE10SZ{h8# znh6cJQx-3;Ri@HOi^Wk+V2YRcG#wuT6LSS_`@963C!2VZT3;9cN0u-T` z`l1@Nw{>(ikbEC1(zwN`Q~BT-d8R5JsS5eibi#_t!J9gu<-tnpA>b`FMgh^c4F1J9 z^T0^P{n+>dagTZif-n>(9%%pWXFT=31Uu~g%;Pi_uBoQ8W(X5C=J!hP#T43nqN>_7_ z@=a(s@m+G7qsi78w?MIBEL{J&(013mWLv-eb>S&YCSh^H%yIDWX55LvxTSPc3+$HH z9HZQfdn2{O!?NsyCQbXxp4{!DnoeH|3*C}Tx^F^DuOkY2J@e5W6`!Zx)Z3tTSRGD3 zOgzJbGrpB|p0@3RwGB>_+Bc&ntzA4kSPl=(cJ71zm!=>1ZE$>$3v+ym? z5O)`3J7sqMuo7<_m)@7Cy;_Xg`4@aDdXk4f+ld^j;Ny_}_(d&O1iV(Mq~)^c;lTu^|{6LJXXv=TD}lI3W1$!T3@YC?k~S6=aeR8`B3q>J*?1@E{#$B!6cKR zm1cQrxHzR?yA-pQI@^%!)U&kK=~r8h1@G%tC!6(-K%UC7!bUI_Bciu`7KW``j@z(6Ao58%oR<4jESt3X% zi2`6&W-szyjUDzs%_ew~KPXIJBTFRjW{N%A-=&GkkF_;5kh6_kdX`JE()ZvP8KXw|nJJmgSX~oyBHm0nOBG zg?Ol3igdClbg!4+`2bNId6GjQp-?Td(FaW~?nC|Csh2&dxxy_6FpF%Y+5ta!#qNT$ ztsMIN<-Y)WfyAHH4;DMRh`L~6M@3V*m{>Y~b(?iM3FgUJRW z8Gyf>t6#Na%jAm`Ob&Y^LImO(E> z4r2OI&XdRvD=QGHD%#<(5}+Uvr%Zwrdbwy4s+zcU-ajWzDy}T(#;X#%X$uER$;8hP zEU1FO9^{APE;k?K1y7G=BVTX!8pW&MYfM4FD0CoKY*M3c+|%{SSeQDK+czda%QS5K ze1pTMu1A9-3CVjt@;V7Dq$(ib0NV81XKp`RbORj0)8uz$Lua$c{>@XE>SKL`QwH&B zKJ{4kW$FN)B6Nj)_# zT?=d(i0euWiNC`ih#uO?FF~Ha@M)kG%^u?m9X_Kw&+>WIs=fFA<@Z6bJoVGaShq~t z>aczwDc@lerI~$!OhIRap<~Cf66_h010JLTlq*K+9 zD?^Y1<3GHRsF72MSI-!8NNoP3k6-;FQY}e1Q))wG$LK{RZ3mU;_25nb?I6Y>oyM-3 z`E7xc72B~qY+NYw1?>@VEGf$2)K2X^)*gES+@DOVorzX~;vu)gs=IF78Lhv@Bu}iz z0&+f&r!FKs2+2|qn+tMrH-7wNZzYfMc2rA^BLiHtFIp}i;Ra;<<=G841w)}i< zR{by|(=gV4WmNE}dba5BHMAy>I9S;|*CI)yL(6-*ygsct?(2dx(XZ?RkV^uGDvfKR znG7l2#f$#)j;>rFdTHX6*A2WJ+ z4e3$wHHjBZJ`uVF>4wX8t2`j^C^_qds|TcMVUL%XHF2$2@TBshoIz7#>->t1SKV>6 zkV)Rzxiqha1Va9J#LnFQN8Y9LE~|#~@(#}-zq053{&cI1$Z$Xzp~K=UiT$45Kd2q* zx7fJ=p3WZ_9`%#wUlgcB6wYFh$EAWLbC$F@ma!ycWTDaCC(>Ot$uP5RjQthIT$qd&e5Fovj9d*Mh5G4fnnHQ@1?<2Z6ya zGXWGD+>0c2!C6U{NZdBL{f(vmZ`Ef;5Wu~GlYNg~bNNG$dQ(RLZQEZ^`adY({#(u5 z|0ii2`|mhe8i3!b0xsP{6>J304F=u5JEniJXXQGQZl6&!`R%rZXTedVo$$h=Hm0&x zX%`osizOh_jK^AQnd~oe4DK~3Uq`L=0gg6vnYRyXvZeJo-bK6VD*|2 zzdv4V)0iUX7w2sM>v5rQG4@#TnK-Ks`Ye+3l?s#OS+bP<`6;vBGpjG1M_ zeX51=;GV@^#mL5)S?s8I-?RPsZMJ#93q%H(R?gfYX^O2vmX(`Fy=nWJzAm_A;hfgS za6-gMcbAniF10Y^*O&AM}JVAXcPa zqkfb1#ulqE<6ZB6nh7H+z>tPbkAoguR!>{~)Q9+Yvx_z8L@2WAd~=x5@j|pzU50t> zsa}s2kL_NM-e6CupG-o&##!0r`JB%nuoi{qtcz3egIBQ-BQoN(N?qalMJ@@J9sf90 z`n?sYD5?~2(|{;$(hT*N4WgBTRmLa*b$|m|8$?NwWgVF^R>Kh**LmA zqSE-TU$34s(;Vz;?GAWpm&?!V!SZyp95OQ8fAv0El>QxtNTy{95C#?;2dBu>Wco;O zS6TcQMyzif_K%p95fe};9dBjH37%ZcU*+jyAKwKm*@eh7T@4jsq#fi69Ro!`1~{HB z%0*dBMGstFPq8emmHC`d$XQtbFdGa*1rNqiLI_;hacj_Twoxs34#SAJ$zDSn_PU_dG=MxxI$poTZR! zIm071<>wRv9;a2(C>8fC>RK*)0CKm^biyx8lW)1iHxR7Sq{8OL)Ri9ufE?;-xIhFG z+lT`qjxBKD$>e%bGJM|Djik=O&;(adUKzihc{W$0KIM zheaF&HFwTH>~zpL1(3pbmhb`Z_!oqq_>Ur<%7z?J{Y4XgcbUE88rlwgvjeLO3KdK&XD+#YYn74@! z^(p^CV~Mxdeeaz+FB`|f?`FF@z|39UZ2QBOKbJpm zju8*aJVcItABMf@t{_{W`#VUT<{uZ}DG&*0BZ#)NMvtC3Ugob6@paH+z3KdV2B78W zGQKx?vo{$a*S!{2#)nghF%Iti&Z?WYR!I(#>ZzuBaD&45e;Jyz4=E1j0BJ7gz?yQY z>~Eq_-krYavt#gdjNbE+Nnn6N^St97ap5;y?U2}XB;oV&HQ3@`_SJg4_|vGeO`oa` z=&`gO1gXvT4GD`K8`80x02&KnS4h{v48cHICpxMfnt%Nk?_K3W;bz<}?ZiGrWyn^e zO818N`^0`N{MeB4VJHmYO;4j^KE|!mhQ|sm5wo<5e>( zFYC`QR|zb3Z8uv5pe+2#{4)Q))V&FE97Yx${R^>oyQzm5?@O zu`pnxNX0&C{MaHpA3gYX=z}j7cGT3jw4CWO-U~~9ct~P4}^31)ZzRyy*~>6 zK=+}fvuTo}f`RkHzAekYVajZDgXKd67;~0Acr;XU^?Il{Qrv!tB9H1qmCq`u^%5GX zCe_i4BqQzp{TIiEbH9&VII8zaphf3lCYjATk{$-myS+XgWAN2G!js(YC}9M|C%-H-SVTdtODBm3gCAOAB}&TMQhpi2rblOfhNT0$DU=hMCD2TA2g%zpi$1x{;mvU%;#wqUn!!LNeI@+why%@HpRT#M`(%k5vy%nzSt zaw6MbYq9lVIP@Lj)tacy&C)s}CutxWd4*b((!I5Di#`og+LJrf3bp^fVVx^dvITNu;U}Q`!mPtR zn}hG=;=RqIBtxaM!qt!82E|ctm>%{S2Oj)nmsNNI1r+bZL2(BYx}v@sIb@#3uY&&6 z$v?Z@Z)D|ii$ft-PcJA&nc|Kl`jY3vVQyVtIrD0Xk5X7%3ns9XJpatmZv=F1Dx)A3 zID1c^3MN}Y%oAc+aXY$^lT9znjhRQU)U9Z7v%m(NC{70T_XIVyEW0)Xhu-JE-Gq}K z4IzE6U%EjdKOHD=J>T$SyLaL3N?jYD#8Kqamo~0*3~>n^pssMw5qvFW=(B|?&v!Uq zW7q@RI=V*u{$wSv**l@wJ8;aK&Fg0ESsAhYOBHoz4{?gm*J%ws;*HWWUYLAI`3K`R zg&sSPjQ;j0y1C6LT^B7V|6~tT2|OaLRJAbVi1pOxA}+iP<=Xrqx*Bsh_?_D==^-`u z?1MuKkUkZ_W7?4atuKNTJt-w%Eltm|yCf-LRL9nWK~}5Wm;AZ3IkQiyn}uApHi@{d zZ%A=3=b~~pUF3OWi-0l=wpLFhzwu>34kGBzhl&kV_-S=m+nP zfPVZ!ot$}D7gH+f9pbg3?kL`#eZb&17I%H!V%F?yHx`wghrjVJ%v>bmnHh?P0G{aj zF3SPzOUIv$kgig}^r|^-tVFSr38Su*kE;>eUZwNq3^PJ(&MJNA+W>H;lQtz8M z3zG@23M3xuC5&3j^7+7TX%C9}h!*yqTI|CgYbdxy9EJYgf^emGmEzi9OOIn0qm1t*}5?> z7)o7k&3|hfW{|+0IN7=?ud@?%Ub5*cTRh2sV9_Q#^S9sF?+V75%{VtQ`C<{%Ige}c z90>+X5@beReA<&z*8U*RqA~~Xjv8<7ZfztzE%Cs>`)Hlz`{iqQ;L9Hy9NkS_1T15O zObX%#>v<_ITEN{`^;5(X*e0bKVqPML%eu=`8CokTui^{>eV+3?_77zoJV$>6%@;Cp z?3h|Mc|wI`2UPN}!z8BHYa=2)&!J|kP^gk26#?oZjB7e>yM=p7UW`;YgI1yDbISpC z^a_beqr*+5{ZU#9F82b;k4W!FE70RM>SH)Szkp(6O7*?T;q~Y^%CiH88r5}w`M%9y zEC63Kj4qXO9<9Uk?VxqheVIf2!%#ejaZX1JOY`P}g*o6<7Mkj7U`3K{=Mj#|3obV zAc={KoKhYYpS&=$y9?Sfrl(?0FX%3 zBQf50r<9}eYwfQWO*8)yFvZHalc7@7Ehv?LWYCYK6@Q#_sHtL6!`%M$d$Gcmvim1x z+*_uf`!gv6OmwUr40T{=Yhw+gQl}KE*%7pAOnN7}nnl?-)3#QgBTjJ{_ytizp>bZJ zCiuoSW2@G!ljiPc`|yU*pDjk30E)eMq4%f^Y@3W#WOhLz^NKub$crJ`_`MRhaw86) zfD+^n@{hc1MllXX!AWFC+bk&YUnZI>2a%G+4QhC($yE@~oV_esxNZFOzUbL_A&I8r zRoV>9nK9C4wDz_=YPp{ixQDiw+RWgzc3p;gJVtuLuk&CQ=vzNI0eJ?8S)PrfNkb} zOoiW^++#=mIgZ}Z*qakF%q3R1c5dv3941L26@?@XA!6WkelC4nk}M%lRdLetiM}#{ z3?M{5`oZlVr~#9I;?ZeAvan>Qy0gI#>^?`&HsQ21zOtV1!)1!^Z~F=$?JX}HV}r;3 z3)h)-WWCx5LND({BNsP`e$7WKx#_%OSNFQ;Ty~1NCaB@QRL_A2FqIk(SOcV#2M!rb zN@b7VkHvE<9a6*|1c%8*b3+Dqhg>9EmaM89BA24=2s(5lPbm4Q6V@)y-T-54{!~1dE;CWB z%D!rMR8Hh>?~+L~QTEj;Nq``Qz3EuJukMalEeekPCsQg!nmHoa%?;Mn08Z7`CCyJ zAcjdz#bD{BT!r}N|D{V5hVDPQL^EzoT#RvirOf@#CuQqW6BY#@xtnC@bb(274Wk7; zaRgk_=>KYY`Z)QT+KX#HwtlSQ!5Y#nJFfrDw$_M(&;E(qS+?#CU^8l%0!`?%+aEuO z0NgCpZ6`s14CsGp_6EQJ?bPYBeeHiLj-EvOpPEm9J=3(mnor4Jx;@h~6;#h#+UK4b zI!6xCF7dxC$FG2+8Dm92n@!vL)~4bQ(yCPdyBSp)vh-Ico`dkS!uYpW`2V2+`9EuO z{kM?sc3Rj)@GnScSm=Jz?34>_&%7|JjDjfq!~l`!36(ZOx7M*e-Im+WcAA_B+>zlg^B(2s60m#FK41`ym(e zIC^R#iD2(vK;9_*<@_Zva%#O`>1XLm5(7&|DQ>H`C{`WdrRZn4sO>n2V#NXSTn6*D ziuTDnpo0zJhcCubBDfPsBH%4ze(|#6qQtU=nOfytC#x3J(sc*VX^x8qk>{SoA=>lD zn9*lT+h{BLAYViA_$YdfMZ@9Q3b8ZAn&Uq6SY@td@l0_Tf%FpsOB0yVSwHe9Nf@4; z3tVmuK*P&R_Yh`~&MF+J*BdxYd^-3x&zq9}ZQBr-$6l<^GqQOn5vply^46Dy4)G)n z0tky@3vB&FrtAJrw&qL#3h1gf)4AP<%(HQv4mLTHnq}T1dbU%ezBlZif5Q2bR=GE} zZa@C%C!~{S{lGWXg3*)x?8)-nT|G+&O`*Kieh{CVtH--78~3@{ek)m6ReLhlrXNXk zLcp~)@>%m{b@|)mag^llb7z@J^=^~kGO+IlAGLvP26Pi4`8;O<4vS#$S(@5S zyUgM~bI+2jixbOVp3&<+nh~F#vqq8mL>2W$zM#MFFOV18$=NPcnG4xgT?P(tItyBQa-4zr6V^i~v zz+(~d$WbF%iQ@#kG=(UDpDNpO;Yo#FWh=CKVY{Yn&MEQyE1BxLYQ1fT^M;r9J`2 z$tLI-7uI5o^LY_Z^ zQ?py&@>DHhpp!Yu_aYa)T)zcibK-Mb;Eeu7Zl9EdR-z+~tQO!zPM$Vde4ioQ`vtgZ z=2j--OUVmQ+tC*R%nv;T3H~=jefrLa^IVTB#E=pIyjG7@Gl}y zJ#K&$DFH9(kmLj6o%@2qoB|;TC5X`!z!;psTH>Ctu$wZhu zdH&b{jX<*SOVUc`z9G;X`(m}&&s6ySAJe|ZR_>z1w)QZOPk_BrBF6N$G6lTH| z^%s)@N6BN}_mkIviU;U@^$m}Jwk=RSltug(iytwTuwE@k9a?GJuK>`|=%liY4Luqv zn#BM?lO^qB+@)*iD9Twd48Yra)4GlzKW9)9+mrdi*1Qow>CoBd2y zIVDtbDh_^w-(p%STC;EwfN|J)RF0Ls%04b@NTuCQ?uu1jN8B*F@TDV$T}nn1_-F~R z8EDZyz)c1s5o3mxe3C1j1~TU+5IHygj-3;O5>@SG#LepzfaQ48SN4gg=n|-#SiThR z@#=3^HB)coEMXl4BA^yKY+MLhotycGIGpGMdn|7NbjvX4$wLQu>=sIJt!ueNoHbRbmn zd+%5D9tc(TG#>!dq;r*pmoOwn9k$Nx4Yys8W1ILD^*B z-dm^Xi30^Oli>d2X7haZRcF2R1rADmKHn*(>xc zkWbupLpc^tU8;Zm5%lUEZMl%#HE!djc2{0)1g8=dNOgx*!hg&54al-6mNnscztDoy zZ8!CD*cxUKW;#mc6)3;#vx!#j*DJhg{!^ks21)V{b~7slvZVYdlAZFM*wRd|w3qv6 z_jH_B%YLYytd>iF`!q~s}H zpPJ_BX?GZJfVe!ji~D-vAm0(3hfbkX+}}fiumj$HFi;|zLEwcZ$@33@I~cq70D*+8 zIKbZvV_k^YK6p+Sh~-bgDG1dt@q2vZdi4XUom$UDhE05oeC^H2qAKf0uyKue z*i1#T3|`h~(NEu2bPAjjD>;hSXm^gjtI{021M-p>P~*oQ478{~z3#-}0pyc$zuN@z z5xDDo`eJ)e5wON+l-uULpyptrZWc_=?gtCPCy5L&PvH4dnTuwQD9nl536R)*p`5nq6$peL%q*ZBGc|Sot9s752_?e&HviiD@gZXw zmDp8q!7F1dw9LB&POVnEo!DwN$J%>+M8a-M+hye!KF*=k+D8d8O?ZYC#HFom#i_@y zb=F7IAr{_9MsjVFaliPks*`1uX zd8f9XsP)S2LbYfN;}u(_U~v&WWUK z!(W2TB5QxNvABTIV3xy4zH8-?U-6qb@W1|%c#e!X- z>)&!`^TJPxpj&R*u*t8qpCn0)E8t5Ur`zq%BPKPi{m*apL-){6ZG9q(+Cq;fy<%d}) z3jLuCIhd51yn?xn)nN4-d!rQN8d8JC-1xNt&IR!IOid7}~SrUIf1{vp{82-&L$c$>!-*&0?X-K?UyM*ck8BF!8H@TpeLJUOoxl3jYx z{5r`*A(u2fdp1qPhXYF-mVK#<2%x=5%P!@VHnk7wway!@-%HW!FuzV)FpDYN%wwk7 zBBq6?Hk@vRyD?=lSld_-cVyDE)n>$_DHZ(m?FxDyxMD9eK8+Z$>> zS?2Q*|! zn?RdhYvuKM*^Ahb&kbY0EjqU+yHCITU?uT}Le9!m)JYLG#i~K)kGnW+#PZW6nibe< z#*QypfKi+dJhWm$zLaa9C@3i-RS1pMO~@zuf|fFNVNbqoCFcJPIl=rgv(JqEWq+d; z@)$}Qe>hPU6vF3^8H}-Jgs+P5aPr^01TM|Se5TVM7=;-`90bUQuF)(%&dyQ`J8r&< z#2fyKy?TI^YRmTI;wbmDMR3<|$FKgCxN8nCEsB^})Df(`nf+ronZ$YD&q7^C;MdrG zm$lF<*U*8T?zHe4wcB;4uu;^D+Dp>#EKHP+klI15SDW20gPaLVUMA;}YJR2J2t0IA zu-Ie8JK{UN_<7ONtGwnp^mzi?x^tvz=~tUqD&gmuX0I3Nv-f=Sb}}nhOexkaV*ZUF z_`TCy2ZLc7k0GeAbGFC+8V0>jYNOIZ8|Ceu{WQdD7F>HPQ@yByI{`gJiYCP}CDsoE z;rFI98WrHP&7`q>g@i@|AP@KHn1910I@ga10|RV9J3+qX!QkzpfWHENQ+Q3$;Q3QTFL+n)$NV=+K#!wc?~`0{ zZFJlSGrXK}E!396UL2Z;=1@U5&Udu~5*Ej-37~g#7*K91O9}d*9p&EOk8bK4o{X$OR2ka;_*nftk35 z?4hXxG>uafa2DuEb7ar#J1DBjVp9$5D4XpUJoL_{Vo0ksT2yVhLjQchUTU`C?{}jb z*VxVDg0!>n@xxPPh-DYpeP9{y8>h`jX!ixA?5209L^4JKXVJn1xltCGQI1n0o)Jng zCEMxh%V}>#Olc=X=Aq{qUp7$fj>+Dp%Ip!Df15w6oDD%;>>C*xB<{gI|L(*KH?
o`p)5a6TEp)L-&WW!!KJEk zeCohZNRGYgfcuzVmm!sP8`pQnIL=Jv+Fiq#PL)E!#*O`$IVDX^95euYevDeO*}^;f zR(htZMO}Ykf7-rbB_33)_MEbSDea)HYC?T&J+lO*hAk`fp(Z?UWd8&9zA2fdQU-*v zrrxdT(NsA~om!6prz?Z&FnZGQ*(Vs#vaW#q%J`hXV2lAU62UggYr-~CHk;xjWNPi0 zGCL0E`d9Rz&3cfQD1WC2c%y_+V=1N}+hHx#CvKZ1Sm7yA6>l+K^8?T;8i#+Vot^LOZ_+=kFLcj0#<}6!CF8e4} zxi{M;A^t=9n&3u&g$KIg441ttIRwPHh%Mn54C79Vw zDJG5Ts^u>8@h~3f$H|zQ&l@zjawrhP{VNkIj^#3zC3QvL!cMVwprj&4gX6cun~0<; zTR|5FGpFxw{D>i~IIp(-K~C27GOA=*fmlJT+7W}m*)J`>e=fAUVbX4YwV69L@ifJq zF1)@lb-cIK8pxyO4YyR70ZiWSu)J)g?>@>ld&6rTdd_oquH(FAB~3JVqryrO3C}}v zgcy$K~aPP7NnjqJ9B!qnhWmwInd*Lg;3qC}3X+e9;mjYV{#s2$(m| zlY7^qPWG668hgKZMzeskpCYWKA4%{{K4^H7G;;B~FF~yNaUl8$4@ffVed)f4@*1(k zRCOsGPGvzigQO>u=)JTCFX{LTsL1hvk?knw1F<{bcv$Ypo~gB*G7N;?3k8LwV(_nN zhN&3u`)<4Dt3xqPg0{$DV(=T#F|cTlO(JqzP^7>k4l3`lME%gt$FZmNBkyp0n`U&~ zr4gm?WQ^1VCt{*~9Vh%&ALF^9cV(H~Q+8jcOWF)2$>-~%q}O@|ZNmu?@x?FZv(xld z69gUD@wy8w)~>9i_)am>k~cGu-ww)co82Kx#3_E>Qt*c6t%S#qjW!=@dEMPhwdQ)* zK>>&xQVcG~FSGoyoydPOn31mn>qMKX((IAy!x~7JWlv(5@v(LDMznHNVrD`uoRf(R z6^^i}ko7^xPXwxc$<_^TAE) zxM1eC2Me(2+!>qxCIZ{TQm-Sw_6^cpq|}VD6OsCRp0;szB$M#h@4=Ba#pTT`W4M#W z6^V^8*Du!9e=jknT%f@6=k<&wf5f9FJO_gbfT95k9{mlIgK ze!o9f*S1kT5HZ>q8(Keda4CI!`M8hz_;P86+xPhp^sW#iQp`bhgZe?0F<|}Ht9Lya zv)SD-yS>`N>&g~(q`tCYCR<*pc!n~0lMhVBXWoaHP>#u00lWI-j{MqVTh_aFimm;A zp^+I90cvEPndL*v$LOtL?1(Wq&-?YA=s|N!QyZge`pxUO3$zsp|BcxF_c?kScmB%( zMZi{WciB$@X4FHb_^wF5^0Q%~b`jownP) z3`nu>f$^}f;}Q~4@9aumN~jZ@uloe*wnJ2ah1w}^_B8Lt)86fKwO+C*?`cC<<|$p5 zOjzkKY7R*m(S#y6a!0+EG0J$vAD(dO$szy^z9ns~dU?``Z+CF4J$*jN#?|(P%@r?$ zJbBRlfqvSe30*>=4=lzAQV>XG7{Y80X|g>iw+@&SiL2jiKop+i{xqkX3HzJ>mwYb6 z)GUASZ?mW~_8kY|+4%HLj#rAite)3wGAuiDU(&yO8M!l(NX@3~LeGx3^YggrAkbzH zc%Y^7hl_70syg&GsRL;zZK8Zj@->RA(8tN6a_b^Zf)7*gcYth#76Z&>ED5ybu^Z)S z_Pk|XSGY%VYWoT}^&ft49X-G#9%qq$PM9JN{e{+rusQYRwR6!1kjcL73v7AG_Ogea z{rEa_lUu9t;gBK{l8xkbE-$z(DBo;dFor=WXwPW1&(z9Po%8=MG)9k%8z|GDC zMu`vN=5NlA%|_WZ4w9=Luv8;-UFh+94+3Ryp|a&-+FB&lD#`H<$jWTuDj7Y!`M)t%@m!rYIR7Q|nVu;r_|fO&x*TI`47aZ-Fm~Y!0sK zeG0t5wmU~A$>DF$3#Ro6X3*POKC&fB0G<{xJrljyF1Ou-84I(%tg-ttOCf!%)~W%T zWa#dpm;2Xcy;)vPYvhN;ui3VN6-ZZS#M@m=LPJV9dbvXgsYAdN4#| zL;G!0^Nt3|6SaoRCP0WyZCPN%F38z!VtG^)nCn6+T0qdL4w$5*{Zd z+$+?NjVor(!pR8VjL~5ns*j24^U>F~ZlOQ|Hxn*UAn(iIC(M;=8H8@#UTN>A#uO#R~Dsr@1k7UCIg%5WY zYHumOosdw@!SsIYCdr|RWX7yQGKivl0C+*vVz+>C>!cpmujqj>;0;k*A^=G9`wreQJS2-0F zA*Q(X0aP9t*;~irmcf;m*s&vO01CYAaS)v2mOU6=%+%GQVu zb&ilYgnBji`kDzEEWNKOL-HB*sC?v5`);f|Y~DMY5hPhD7qoLFCxrTM3$ckq zb*%_b_3TF@QFneMtzy-4bjfL3H~LCUnVrTY>PW8Gud#u`X{=Vn71gGW^+;keq6$-D zns`a_x+L(E%U;=(p9?=~ZPlE&KDS?L=ggP4Q(iA^@Xs1G*15yZWWW+xuv?_`-^r1hVJR&TIbK+cU5P%C9md9g`E}Qt3*QRjy2b=^q_T}7f!NEIe3x|Z zB!QJq4@7Xpib&BIUAvjo5lKoF3SoO-sX*9WoA>0oyFX#^VDmlFs!>;ltCi;>?OTYp z@qLR%LWw}8SZDH1qPh4&HbqJhNB&d3g7;jzBgm3|7)=*9dHFgpQ4-EGplK4-17ZL7 zWJ}nQ7vwaSfwUwh8^HK}3SHjmZPsZ%9W)~d#IUkW zy( z;h)k@w`8`sB4xT`i)X|1(>H@Q!xs?VKqVq{_-4*)M0UPrXXk?tCvcY1o&hFd$9*w< zvjNzp5ZFdPV16W_()C`BE=46-Gi2bk-z{5%+gP?Bzzsj4#ZrxoPlt#_IWmB;V|bZG zci)$4$KV|&%T`rc7j-$p3dbqU02ALTYR3=ajobNF1C_;93*H0liaPb0QVDnnn@PLx zjzsWe%78mBU?YCzCL|AE1d&Y6*&pVY*HW+Zj?m~DFr8e$}c87H6$rs3&v?{6U*YUw_3kL1#zaJ@lK*T z8Y*m5%$j6Q#Tb`fg?Vf#(!Ubz9?8{0RYY@VntrigtGPrUTwI%EkQ}J@H^=(*7Pw61 zywc{2A%%4)4walLyQo`0z1lp&uT6q$nJk$J$!GU2TlUzXhRlBCs~O8@_`D4o9YtC3 z@s@mbtJ=s^fVw&bH~!g{+w=AaudT;X@PXCXTd=N_6d9bHARCO51ei^Kw{7udW!J+oU&acg$A#X-}jty^mhB z%Mz8VTQGk}HGr(jI`i5c)Mu{bA&yCtt06DYuC&GX9_K2|jq zMq7ic_Acr+Q_2yiV#W63&QX_;bA03ii`$Qf>Fv2LwSRz%f1*Fpq@tK3?F+P%s0&pG z>ZbK3)6XIx-Kd2+C*23s{i2CzW|!qYk{6V{DxAz3Lvm779jewwOcd5o*VX#ub_vGT z&=%Ba64z3n_23*7@)7@@~DNLi1$ASBD zF>m)qIHPv0oZI0+B|F4QsD!~Na$Rqn2A&TYu?$X=q9v{#>Zm`ZO!c(S> zepFou{3OIG!x-t%{BOBb{O5?|JuUMeVHLMO$R4GYUy?52>^@X!yyFPB)`QjGOqM{h zJedm$F$MEnTt-H_J|Xkp>@!c6YDV5odDeW}i4grrpjM`%@&s_JA(&;nXYwNxzM!KV2{yfExF;-QY`L4cAy1TT4(DKOfhY((W5J`JFjuTuC#^Pm4^~$Rjjp17SpQ;b z4vtovG_M}_?Na4m;nqIg*4gOYe-ijLbkW&zb7$Iol=M0-$`t%usQHV!aWNyg8NGbS zuvzg=e^OjYq69!qs%_y@b7HCqm+u>4`lFtxrE=NA?o77st|bvm{uYZR^mn%UTfHM<@HNBA-rCmpIyyKUPc1FOmB zJzIivKfWEh299XQ2D4@5ih`Gi8m(`ZTL>QZ@1s+N$~3!`Z5JMJK+D@RHrVky!07Oc z+BJ(wble+iyuYpfCyO20S5AneBf?>CgJG0#0XFBG2-4Ludgq-Ge?4OqYSGc=T zTd@QNs9Xj#`giR-rJg6eGq8nVBGgk!ad-@_on98miTapZ@p)mu%s^LRbSWZn)s z*5Tv{JI~gSw$w1-!x>zsR|t(_;5s^@!fBqRH$f zXL?OsfiBVdD-| zfDAdk%+C&C$PWR?<*RY1#N!TsoszTfQ(;m$Xs6dJk}l>}Fgg9u9~+sbsb~oOp~eC7 zxc5QaMhqx50atSr9jU0{6gKAKUuAJdDhQzRBoR;iQ0lC0@HDc=W)&KVq+Eh_cg_`kd_bNxk0Pi5gI9v&Ttbd`i)~ zQ+Ryrz9uds(CQ~+Fau7CkxKi=xU1t;^1byD&0yLJ!sqP2%3w6dZHXuQqNH?LY(CH; zm~dK4FM1`g&lQ~`y9Szb@rvj;VJG@C$2XZI>xLGC?BuJlGH;SCPfEm-G?ftI#Sn|& zqmDe6QhMQoLesy7u(>!&{8bNze7vrr0g=&R?N@?oN^xo2$c?R?6?^o^zGyH za=mf1T(4z%nrk-slu>qazfBb7e#A1Q(7>>=uX6&uyCY5P(g~jV@`1VgT--B-E-5-p z)M4GWQ+Xq}+g!SOgU_e8CqJVnE&2Zj5$#08_H4y1FJ$AR?5p4Js3NRTJrS)z{x>@_OxUWwJc)Ns9aQvxOp@gXJ-jJ9M>~>=*yY*;qZiv_n;Qq z+*~Eni%T+rLB{fBXs=tyadx|XlF$2`6$CfDxV^G}5Rs0@ar+BE5r4w&D3c>OU-fBx z$ap#`JD%-QU}9(gq83!WQ%m)@!l@G{e%xCzzc)xv^=Fq?KJ&u%u%L@-NJgvQ_Oh7K z`GlBb?kIV^weS#xE$@Z^g(kbE^@Sv0ML<0VZ8pc7*n`$cG|w#;PSoEP5TkX^`*X~F z%c39fpdWz$&@p#%VLrMw?naiqhL$^69E@MY8q->`UAU`OdjZ3Yj95ihz^ZN%|*vg|j;PlPKhay*q*>ZE)Fg@6Hhx*;Nj3eI+3%VYhM65sb zLKfiM5C`~m_>Nl1P?}yYA!Pc^s3+N}WPzO{*TC!iKVCypYjBgXwwI!IDG6m{Z{_a3 z_6GpApd1=7iPlD}Z-X#u_84g*k7?fC*S;xHuYj)e+EE4l;KRHsm;*+3r0Y{c7Z9k` zaj#_ea?a-IbQSBE`v(<08I8iuH|4x9FJ%cAL(=ji#?U9&nqJxT^L6w^c@erDqo=F* zn{o(o7xzgZhVZT25%Bf0o5_rrOH!BwXEO(0vywP*I<@3B>PUlpRFVN$!r;gkla6N! zz!BDF1;W5FQYiBwvrKRp=XzalQ~fBQF6bpl9m}M@D!jOy%wkzXDw$5u>CVB@cQC$7 z8kwM9xU={wy+^SGhltoUhMFf&>?aKd)B&_S3`tVRyXphujqx$JIfpbDs8kKM=smk^w4kHtd}{$4pmHXNHx z)HswhE{LY&5aX;p0f)AOfv#X!8+*@%OBXX3_b(IW=`jYIaOgg_@qs@dL-)xIiP!xR zqqQ|#Y~k^DcI4_z+bzT6obJ<=(i<14#>{az5;B^`FN~j+cVT$l;W)1Xl7hxqNBxe-FKLBbfFTgZO-QVAi4A@r8&wWoOz;QNF)I3!h;s=MDlk@);kyy+WOqh6m^Dd}Ii5 zlzUwjwup{2xh(8a{Z0W()}H1+NWr^MMwnvQb*b*XmpsqqpI!LhQw3T)*tSNwV~9cd z>-S#j6Io;UqW;UU%YM_T-Z>vz5(NQ>K8@Y!dr3-=>}-B#cl+M0KLW;++?I`d1O_yq zzgP$>0Yke6C3 zFMrP<0206?wt6asX(#-djgnvAed63f*u#K@*;)uN$riD&>uP&xGB$>uaSuBFon3z( zHXT-v=}zl|!b1=Rq|V?<&y7~3&Fr7Wmm5%Bo(Ut3PtKM~`LZ-fGio2dNetW>2;!d9 z=yhvvgSTKP4%|7L((Bgte*Fy*tfxQwXVZKIZlPfqC$2+GTc8_L#IcZ%C&9Yu_f1qn z$$-2!p-F#TTUO6;Pr@|`E;81M(~YY7>U=X%Se@>Vtda{AV-TeeUVOi0uSp0~WT1Xs zQ2}^9X_8y`L9D47#vJh354{CN4Oi9@xl9)$Z;Lpzjr_BvnPvT}&U-n*h_z8 zx3qoX8FB?b;r?yFIyfiAIvki!>ODmIerQnlo_nyhd2H27(HOQ^NRDtdKf8K4sMa>^ zE-IkX0wjCE;lGl-4r~l${yWmbmS_e?u-Iq35AOVNi>jF#0k@ODcs%_DzMb)Ea*+E- z|2wewCheyVh7B9=x-!bSXY(nHo>n#nL#~Im&J?e>8fM-mF0o&P^g|k%=va;7`KFG1 z(VB%5G7=vcP$(CgXrk= z@ya$&IH6ZV%P~U?1_$3S>H%R7DS1&+^rzj$uFA#JkiZi!2=Xj&Xp(hoqkvX>m{A4a zjd}7u-b(?UBc&Wby_Nv@X&(PcLPq$~XQbZf2JC+7*~9 z$!G__{e0HY9wH!rMcjBQfDZl@G|B#le#gJHP5!q(=>NXZ^1tyG(N44ofGkNRB`Wms z&&K@s#>)RszR~|%-=c3J0Gpvb>0t;vU9C2TpK2CfNsPwOE(cyS;ZxGPzz;IC!abw$ z`j=Z4I13#e3GSm*x{+sGHe1g0K_m38Oo%s)J)Q!;C$Kd69hi0;vfT9{HVhcIcUW@< z{2klYh`t#hFc9fdy4jkp8CFy7^2&uDusa=&;u~duoFwEgbKj&WVT){$wKgz9)^?x5 zM&y6?28WogEPU97MpFz0rWT>_Briy~eV5VZ>W25)0P3s8flHUs*xU_sb!vl|Ly~*!<5a!3$25NQSnzO89=jAMV`%0L zvnKSa_J%ldR4?$Jl9Fi}K3{DAe*CenDr3`)H(saOCdr{E9UEIt<+0)Btpi_PIbTt; zrBtp&5~Vs>b_+3Eivv?RtuYRe2q*%2d{sBt9%5-9+?#Q#3i5g5-i3b%W3X9x^Pz?= z$SiZ#i+QQ6MoM}vp3C!NMgpT@>Y>S~4rMO;I^PvDjq(h0pQ$4JP5sX71h#5#B~Ww+ zGLaK_yDv*X0okdJo2IuRN5&$t)I^j2hA-1sHTN(H?8;PV!=as2eKu08Jy2^TyF#nl zw6{044UC==BHRKZX&1_i%zIY-(nVU%DZ6DXp5R&d*FgtXFopZnk-aquy{1S-9`9EM zTH#~cP^=Vxu+kQtYiyZJGO@c4tZFhF*%32a4)Ma{j)7SYQ({`rEvl#`^bVrW2v}OY zx45Rp4bmU#JkkT!o!{?UuwMleY;Dt(RY+xEGzCKylDCv#y%--+YJen^T5DLDdn$0W zacoXcS?=1@6DOwO2Og52sUsT`rxY8Qdp-5ud!Q65+avKs$_*kJEs%>+Z5-@Shv0hQa-?-6)e&HonVpXjv0P^7 zaKv(@u@@aMpE8Y06$vbQ;+Fz%Rk~T2LS$BHLzbm~JDSk?_Q~XVq!ATmVL9q=uFp2w z{8_ir*0FFkfUr>lcp;+gfr2wDD6|Jb+%i}qj(%L*=q_m*c+^Ez`g%h~EHtEPLRhowf%`*I_3+q^64 zeC55{u@)A;)_dHG-(GFax(y6iRu`9HEG2{C&JMdeEM{f0E7mcM_l`&5QP$WsR_kpg zeFYXW4|jCEXosx|b&*rXjX-H318Ild{9%QsVb40sj^O;Rl1_J%iG4GAkF#*ANW(pa z2u8aJvuJ^3XD?~SI;ggezYfKz&&k?kT!^7g^#zxWL6ThE)^DmJLi$0ghZZ5)rkeiq z+lRd}jJ_$l-slWf8&BN+?LG9v_#>Cuia8#P!N#McExxA9-!2f1PgMr`1**m!_wuD=gTF=&7b>IN`)OXFZmzZ2*1sR&s z&{m+lb2jhpS7$6byU{gv_PWb3`S$4UusM;Gl++cvjoeC@hWIHJ_{)tg&Hn#v*q}tPEhRN{Rq^vM_W1?;1RN;v}_4CIUHM`Jrxtsc(||4$EWs?U4;Z)5qluNr zr=hTo={@RuRQ$P@N=>j-p9o*SPXq0yfXo0?@SbvJWv7}vT@osU{0rrHO7T@@gwFJz z1j^SAB+vtUf!2~%#12Pj>Mr!aTEgcp2O|xG_PL^Brd6M1N3#ptk0Qf0VY4wMO$=vc^}WDGLzO z&qW*C@yS+XIT(5%J;^b-27Pvd4DLKykS?8cbRoQb;KKt|@+;e?>CU!{Sn4_yJ?-ztxDyAm*vtWz z{R5xG)eU#w9(CAQu)L8Rd&(^zR4}zC2Bhk^pTlWf~1h*}Y>ias5QuxQC;N5q*{neEz zC*3X{U&?kQShVj6My|Y(+}(1qji)q*-FV$p30E2jS*7BAdw~8c!ekv z8M4f%QH+GL^)Ngsk)6^c+mJN&WlA$4vS&utvCm|Fcj)zddcObN^E&t5bMNck^ZK68 zd4JBai>eI@WyYYHgv-VFg3x?s36VKnjsW*GP1h?JdDQChimfg z+MYBEP12>_61pJXEabt1B4NP}#&4|i0C@AboQK=y{CslA0dHG>)5CI?vekAK#c+ zpjE4ocBBu#PCwCYRGC3kf;nkUzn;|TE%@Vvp=`W8GxDXkkh@7t9Qip-q$3|*_C0U= zU>l?8>tqqNEwPTVU@d!JCxBBxeRx{kYe`xcc7Et7E19ue_|JDO%)zy3o9hliK2&lMWtTK zRCZQ(dm=5%x2*3Vc7lG+G!h~saZqAj)sdfQx!^TVyx0-kBoEo;QS>FLY})>fmG%5w zM3}Gmv#`~#y-^V61z^?e0b%i1{kO7jT@t*D`ml!e4i!JseoE|O3I5kBxx;1m07^&$ zLGX=<>PlY)o@j!l6>Jc!kg6{O$XM!c9h(!K!^0F^W(6ztLy8%NaV5?ajwQfzSbe5h zks+kLkDUJ|SYi@OhM01?2~JuTqcr>X6%P}d8F;Nu_$o!`ROaN(CTy`y?+~&1E;zH4 zlPOuii6HlU58v*FtsdoE@}wfDTMmv;NWr_Hl_folTGg0WyFpuV6bXL>^tlH~_LChr z!KSgdx>^2pwNkrr#8sWO))*Eq{wep^tVnt4!+ABo402$X<*g=&9>O|3eMeXySv>4Fo?WM~bIe>F~Bbc^VB&l!UZZ zmy>~pR_WX^#3#a3`1~({51m|19F%Qzt=b7Fehq@wvn$wDv&-^c^J8x}M(%+61G@PD z!j4@b?3~WreIpjtv{#jH>tOHOY8!*XKz5oU?s>p5GX)!dldX)>>ToN`x%;N434m!1 zY^sokoA6icW)$|eT9)(WB-2n3goD7FidU!j&C-C_5{o zD$wN6V*k{1m;>t*gi81g6I`NdD_8PjG>l$^1!ZxwxqJVV6;;T+M)aaEg5hS?;hdSgV*TSKOuVWq(N^K1(V*v0!C)VSPt!=mC`_9I?F#9=>K!sSjFxt(IA4j8BeI%@*2X4yw!G%M$Yc4L|QO$nKM4dT3 z+}agbeywkY0(<;Wdx~pJ*7fgDt5b^iW>ZRhBC1}gzIQm}uw;9EpgMSbf%!T6xQG}{ zO~SZbRRAuf?1f7oyL-c+>@**Y_1Lf9wCIo`dZJQH`LC%(-}>#0=$v8mh^(teek^K~ zOkQJ#N$@jwWJsnIGQ^dc;+~iEn3$VX>|B#ZjwC49g%N_<_7TSpCulxwjG;UKs}~o*B4Xj|eB@ zzZ;g?mt5O>$sR#)`3y2nj1BZ}v3P=md1$})rH~^QRntl<#c zx*V8kUuj)g66_TFVCC^)gr@>@?bm5Ur~gw}sJ3<6_vl_2BQY_tHwaS_kuvl@-xA1V z8QObYk`l50_fj-YFFR#?6eJBZ#=G6<)V~SuzoUU$nlWY1S;8c-c#qYPbJE7QLzGfs z6ndO>Vqi`A(z=W53|FoHXC_QZ!Bh;r|8wYG-+@4(QH zjOAOXcUfKtf5sTuQDJAzNMyUg|GpV8(=Hat&yJ$_QT)L*D-?v4Kch--{`cnmeO&Z5 zQV&B${vWXwtq-{{*`0GKz2P@GL?S-YRS&O4 z`*Gwr=4}bt(RqM|YZQII4G?Nf?S(Z?GVkwbVYMfuJmg|fARyYIz_hLBp*TkxkCsR{ zF|aP8dBk=38UUL^w0`cSK*sL!1bVMb6v|=p@GSX2pr~>~BRUXjPvx~>!a|C1+9ZLU z5LL;r!{aPo-wy5JP`{+Ss9cU5W)^;Bd?x#7^RBRFX5@-r*uAu-e&LX3JjP-0a|5*lUgK6JE(&@-SWYvo z%$2!_Q!!>(k}MVafI#iM+(V85;lBgJmpxzJ@T13^5QUmg1*Jw8M?6ObHHb91jg5kN zv&EKUFIBn_!X#nBRl5$MD(YIVoyw8m!-xuE9e3JjA_boKGC1QTh9};!9Q8V~hd}wO zNayS&hx+4!IbJMeIb;8L(hfnDeb(sX!7HC{8aIPz#WjOh_(CRn{v^K>o4q{LHHQSB zn<(+x6-z2?-!y}kIeK32*VsFcV|0IB_!2UUhB7+jmv213{o}`%mtP2rP!&rp=4-I4 z)I5>H!P216>kOMSu6JC>*e7nv;Qb_J z#8T0fUZYRfw&4nPH(zgnA^w*gh^m;KqC7REwd5e_f#8o@_pL@Q5P)nf$j_6R|pCr0emW3 z|5`Tz;-371Kk({gZEslfnYPjtS(p`SP2KLCkGC!9>s!7!B8HkW!iBsTGITBDaPRNe z^XW_~CHe|a^C9B^$7|jndd5+Ov)Udy>7iockt+|>nktf(iEYu@0p734Fj_@1cjUp- zmum7w)5ssyEi9lP;aW5~p9cAdc?ql0ASL}4jRoUTLg{L}#Bjh<{!of@x;OJ0#8ltfjsT{30@}*KqS}2VpaN=Ro zt#HX({Z%}A5RXNyJgwd#pLGvd25Hi!7T6PeKWr z-5S?)F3&keHhF0*-sRy4Y}!N2mv7d+rv}QAi1#&l5a$oiUg~M-G2E@8grr%Jei8Hi z>X42Sqe*Kowexi+G2HX(BRo80+o`r2l12?qbT33j9tO)1;on`uc%R_;l-+EHWStKi zVGtnRQ4jt#NV{r7R_c$B=HPe7LbGNSI z5ihq;)pmNXp6`cNHte=)8l_wO5R8eG0tX|!vfnyEl{ zlsktHw<#Rm?Jd&`<5&9=dVI_&Rf$9zM7Qj7Xc?yo85>bdTc#r#;nOlzFrv~LlS2Tt zc}@hgc*l}9RL<%NvS20NzcqKwvc{cmR#n-Sl*l9o#^z>M*jXK9h)sqjyG`oXua%`L zyGulGx{~ikbjuNe0^N0a$sTe^Jma`c_13IG5BIy>Y`;#A$yrS2ysZLA@%^>J3$3Uj zPt`6rD|2`^*YWrZoCR8Z{+BikyqPv{kEk9^T)h4WX@z|ZVYIk^uc+L$ij6RhY@x?1!zO!{>@@ikR^~MtKYQfdV3%oe$fyy9 zaSWDA)TuK=lFBDE6(5+`ITTuX1brnMFuS#NUEbI%V(AJ@>mUN%e$aQzE>2X%!a4B; zJ_OAu0jz5!$bapsnA}eI=AMC1ndgi^PBaU{kszX}P0wPM5AuwEC3&o&JS2|2c2XWBFQhh` zA{3=3P0s0$)-OGszLQ_L%KGsIFxQZ6UTw?hGU*!{}0vs;{k1n1vtD_E0V!{pOQ`?p~4H0p;ciNge~xatiDX%C@%S7vKx zf8EQpkG*Eltrdn4W45O3qm;nTXnLG!e`S(vW3TeVnZ^@0jT&ZtEtoL`Zh_1RdT`o> zAT`*m%T`0h>tMp>(z!UM$ksYA9l8$N>#2Krm#tQ1A{;n}CSI`FI?a?c#CQ|h!yzeq zt#i7?3KDnsqr#5)bJO2vD$D+^(dSXBQ2u0(E*%CWSIuz)eJC~gyqobX$3$P*uPqd} z{)5;xTXjm?Tt@eFOF5q=joofJ;mA?`t#T^D^PL%81#tk`zZ}__e82La-n>28C%u@M5hWIf`M>w&g(Z8Q4N+>pO zH4&K80ILU%qU7TIx0_$}73Eq&1nFbkW>epMPoIw72vzJ9;2e{eel&wvS%Pj{20mc$ zm>4*bdf+YgTRuJJy+fS>EW9s@@%G-~Q;;D^mc8c}Qf;)z?&*#{vRw3Ej>WW)dD$Ha z25+S}&`rSp_H6LJvb19zV3HV)v(_M8CRnIlH7X8K37n@PE-X#XDAm(NYfhs@T25&9j7XIeo7u^|uiF?g1P9oVqjDC4 zKqV}(I;S(N%bw61EyE0gq`8fNC3NVF%-=5Dymu1bEQ}@lyFE{@YESNl0x~IGfs?p| zk=4yIB_o-qwJ}kQ?R)bm`mp=7-+%oG9Ji|Z197Ha4%76`G=7r7vytlk!nSk4BI?kO zW!LdM8-<+2p$e?vYwBXsp-H=GaXXRxBP}GamEG$_yNAVuo@b=C*xu((blA4?wJyZ0}Y!zW-D^~ zviw&EKN*=%5irJoBO3EG99f*#cGOEhnZx$6ChDKxnou_hmSm67c(^QootrPl(^n{@ zF`I#jZo;5sW-4-vo-r#$KUopxWBY}@G`;XL>62_?XLR9yFyb$*-^#r!d1n3hgN&J2 zsj=Kz-<-=?=e=kjs#$A&)-OVTvTt}9Pu_Q9d>-LQm=TB?;Rwoz`I4{o)Fr35AbG{I zTSfm`m1O;#FWB@3J)$f?M)X;b`Bk#+MuUYRqg1OpGNrC_z0S$u`UlP)91;XksChoy zdo9WVxgQvH9A3L14h<9$?s85o%|M^Y-~utzyUMp2T0uGvJK0GqTYv*@vWX;fM<-8lDC(J` zvws1IIrK?rEV*s9z0@Y5SjEV!xk)h57B7a+V2Se}BF@vPlmkJ-4}*iJ#!Gep3eDKz zK}bKXAIy4a_eJqMS@YB zLLy3$x3cs)u+oDhXlg5GPgc?=GOOg*mux235wBa7n5}0+pJTis#kt@m4GcTN=oL1e z_3hU@ckeacm@D3J6!;6OMk1uP#ZLF>v|TQ@N5;kVlpdG8v#c*zfy`e$(pCA5b10IF zmCfvv2&hJiwVx8E2wy0m5+*EEJbJ*nm6eRN!Z%nb80CTYUINkh&AyQjWd_1LSkZ;j zN4eWqu3qNX&4ygR$!&VhVw24Z(k6b~QYo1cqwFo~RJGv#)!|tcvhM!-PB#-jZj{_6 zSU@Y5!HFO;-oeOXpad4*esp_KoT;vImSak1y;@DYNz6yMl$-prtqXWvn7j zs|t{^w9N2Q_`b*$M(y9qiJEb63NuXAtwXGz?0Y4-)Gp=O-~au5t)KUMuRD_a={eOR zWcYsY>??Zk+On5vw`42suPl^CVVnzLf;REA1Z96J=TtNzPemUgugt6XGrYyW9!^q2 zXiE!eIVH#s*(!mi;HDMFCIjUn4b_R_Touc5>GrAESH`3t;)ug~ehE&`2|+ZgZf-9K zoyqv%sSAVMK0$0QC`EXY_XY&H7jCh|Qef+l(cza=ai!-2;vYNBQ&Xi9BYGz+&??Bl zAx_G?AP|7}6jS+<&7&b+q~zf&ODYTY^V6he+|0PlAy7@_wzgP|YfdFbaPB;QVetRS($?K`rQyjB3wi|R*;5#QT}!<0oV<)%v>Tg!Prxp*nlcW8CCJVzb= z8D4E#zRfs!@RtKmIALI(BEE2pp-8XQ9*4Eb^)9#4j(3)ZQqhJ;=|acog~Vd18W1mu zumc{sIYNOV?fFA^Cn2xqb=LSvbK)Z%$K?R*EJtBP5Lm42y{zU+^@D34(M@L(e$ouZ zYLT60!6pFF^raOT`dA*!og`9s`7j_gC>rRdE2igzq)&LJrG5f74r0H~Ky>NoLX)?K zuq__}rKgmruMw3FD-m7>5gHxt5E$o9s0Vs#wiUpa<5Y?J;BdP#ao?H#WgQZkyDX*)kU_y@zhei_ZLF-Rn~;C2;im>$+q)Yz}D_)v-m}IoY`mD zjACCRiQ>y-9S$M1Z_;>1L$xEc)v_~8{hw=!{qi{IRY@o7IWvHOrpYb|={%gcnq{2T z<1i7uhYe+D@ouB3OU(4F+&Q}8v&yIbP`SeV?ldobPQ*@M#p=IHQ_JV3pZ9uLDI9W} z0+dA=#GNHFG1H#*rqmRVa9Gm4C^ML2(e9IN>2)ypXVJ*>0zaF7$(<5U#*C-6qMF=L z0`nela?ca}LD1u#p6(*GdwUEC$-Zn+QBj4<3JKCG4^+*QGS7=8FO4-(@C}L-Me_R(&c16JpFc==NI+Z0cF8l6*#?EN;8ijYkQHs+zEBy z!ydb!RJ0!r`oaN4@EtAN3;p8>x!B+U-B1Sv>qZbXoR2)sLLsD7psYrY8(oUsnl@Cukiu1noX98m14DrO?lq;of@FDUCGp%6Gyg zH~W>}6i$`xFsV)>lXgSLbW|`ha|Ur$ku=_BX3ViHA}^6KS-v&+84Q4d-w!2f>w2Oyl zMETa&uz`TPrLy{E5f8^y+F&xBz529t3A{$sc==K?6TIChT!vbY=%C>u0w`ORDQAX$ zX__Sw(-pF)(MDbzj-%28iw6z+dMXLWMw=f82l9$f?JBAgANI=LA~|uEXXgQ7-aV>m z=|+&p|Aj^&IO2wIfMjE!Ql727QQBI*VdEXQFl6xrQt%%(nq{1%vW$kkE}E}ZHMC+S z{3cJf!XGE3i%$z>woQNGNuBPv9Y-576ufabR{WzY-1cAy>ROI?`W*3hyvMI`^dJLv z9wrpIXf(P86zV~G;K+1!nlmBhD4VRA0O$5eK=|ZcbJIgJ=BwUobiKanwwnctTXU-4 zI>Xv2#eG31LMUSDAJ(7sZ&hkRAo3(2qG3VFZ`tsfB`L)oG{q?sG&y|_&c?Q?GU%wr z2KZxCt)E9D5UD~Q)2F&c#-BU&PcDt*+KD}5IZc71-qiMR@~+3^E|{P}0~$-L)-59T znw2f$>)kBrv625)GQSyob)@w4lLeZr)PVw#*PFH^qkjt}em;(R>w2j-Y=Jd2)i zYCrmr2Byqg4tB1B2N&4}!eU}-x(7P`0%SkJr$eMlIWY04*ksy;%dl0i;!h)Nb&=RH5R=9jYuv#%Z0$*Jj_Ah#xX*~)oX0@rQ1WfYyq)>}|I+|B{uwUt7 zLgt_$lEXPWgCQrbM)bNs_6QGqYRZH`VC=n77h_KBB!w<jVpZ#pWsGVj_t!v=#6-Xl0!juh}y{tTleqKj+pUTsTim1gyid8q< zfXs234&FI|2uGGNG-3m=Q--g4^1ocNNCz20Z^dK#5SGMYUfBB5_ zJ--2$Z2Tse#y=e=e%S9O`*0tJ*FZ!i-le_H>v9oSxV9LX6(gcmX3)$y3N~QP5G6La zJC~yxfVF<1`@O1DMXIZ0e#u3DfScaY6;<<8Vi{*uL4I&lr!9!VPu;&W2X2e^bHjIe zI?8J_uTpLB5ttb*iv1ppaTM zm!EZl_5^hje{n7DTVql*=gbEo!qsO;>!rc)Rs|b_i8M988f*4^ey0oQ&1!N}4TlET ze8-RPPd0x+9;$zk2T}RybU};b?Z+13qT5hDp*G}hg8 zT9lyBF8i_Rk-ow2Oa6MnSe5=Ygf>V6_#xTrpSgFvJ87+oz*mNod}x+4m!Lp~R5VQT zao)nA7ErW9!vYm6!9xavbTmiED1VAEIta`)&;038kLP(X8o*pitx6s`mTr0!e5+XE zU|nc%Bp($c%tS z;_Rw=A|7_b0O@qvFOOAW#FKrGjy5Yo#%APo?so9Jiz@03y_Sufg1!&9M}6OM0i)IC zEC$NX&d0u97K9CGx`ux^7h8iabe2nZuI8+@_FO&HY>Y%MeRAJ5)Opy(sNM*>cZqt& zK;GucVOo9oQoa2y5X-*5L(n&%1%}NiIcC4+QiTazgp0atgicp#sN*>q&gU`r!>vC} z9MLb-wrrIa4|&g^W*hFWPDk!edq}1y8pCT<2qLB7$&FbiX#&y`hGV*l!}Y2}bbu>J z)Z*YbbP7Ai`qesYNz8VyCtorv!2v*6HYn~vZ?y2xZuiGWE9jOxx70$I=dq+XIOtJc z8>uxbscOgo)w} zU(<-qi8*CsNI;p>C3_kVlW=E!97<$H7 zJ#sE9{jj^E=#I{ImCDav{GR(IRZhb|w8O*ffxpO6Zmc-(?(ZE1lQ&sEROt@+ixF)Z zbrT-;;f8-b+ML(d<##yJ2hyKcvC^?C(K4OIn59#*s|zUF=#di2$Ku7ssnCZ(~h z>bH_9H(e52EerW<>0A|4xq6#36!-1Xysvt$L$n*C#`}581;8?hP65uiLq*TBX*sdN z2Jj(1pw~0K)g_WlRUV(sp}bFFsf0uDeYAplF8iC<%Bm$J6DL!aGLUEg>DwWiz`&p}h55Jk!rSZ9OUuDsU0#VE9z{EGEu)xh<)eBw^afCIF z0cz-}#85HfoZ<^?{G2lA}+3&(=x`kqi<~1FIjj zhdiVioy&ahWB+^{r?*hNf^?g$On@DT1PehiHC-a7_s-PVs`7Ak((0+1cFg$w;Nb_is)=GQ?$w#^ENCL8XnusUqTgpjTacwvw88 zuo>@R1`5Qz3-Hjx;N|fU;xxN@i&P3Dmp{F6_13_suAM-KgH>eTXYVG7HH#K9t9lJu zkRy6qs*9_$Y}yL%<4a;o>g$|rqb5;ls%n1yxf=}9;P%S4AmsM;y;-Ut;%@Q1fo{Id zc<>Wr&P<8yaAADQpqr#4!oQWpy72cRXBHAB-%Y{r)9>5g9aI(|X&WLvYyxnbmx`vF zHZ7I>aHM%9OxFyo`N`lglCuv#^ODNUPu2wYxUMD~AmGSNdhz9i%jZ&6*WIG*gC$+; z)(s!i4d#_`HP!=`;@6{uKCBbqz``D|wmFjca}-~~9=*_3slr!&Jz?3Bh}s^H)LFDE z5Z7MF$EcM>fED+y;tEO!0glBh551I+)c!HrRvPe4`shD`3n=TPwT1pEk8&X^IHaol&t{Tubrbmf0CAiB`*-{Scv**B9FdO*vw&-C)=f z#NFSCu<#s$B-CSdG-(tQd*i#)zM4qtxsr6O+)*uK4S=U?wt8KwP+_&Yjcu)`8R;Jz z{YdDHEO-l>4-M&g**IYX%hG>b6c%ECD0;nVUb`j63u;$l)D2q!Qc~5yWnA{dcBn!I z6De&2RY4i-F5*^8-<<|)cIAcw>_KPb#DT+pxt4=Uf@FkWP>9#F9Yh`_XK{KsU(^2l%j!I%IBd{>_<|3A&2IPHKSVN`E$rGEY_}p+_@F zidCF56Avbux*IT&2_g+hO9%|LrZHMDiXC^m&7OclN5txXb;;xv1U)P4tqgtEgj^cd z4~Y;X&L21$&$DK2_RygXfc(RMy;ffupu@FDSAQuV^`c`29LGXWmrI=bInqM6f%6l+ z!Xksd2WO~pGd@hJ9ot`;Wk2v>^@JuaN)e!sMxGD^a6#dRxtR^~$)FP$3I<-4U0Ope zu<$BwUKn6QG1Q4S%QvQeZ;rSO?g1t?x#xkKG-tLA$#PlJH1D$vX{B8iKIBYOhe}@2 z*i(_1VuvC1N=FD29JPt~jwD4ctzD`c4c($?Q__62vLE#(SlY$-5o*rks#00C!kDf< zmO&J;^dTK)$m24NcKx0`#bncu_u%vGQ@ZG z-28>a8R|%0xa)1qxC@Y84!;u%jZDxlx1nGPyT5uY^zl7VMgxlF)qCQk zDNbdA#{*}gSvUC-DbH)O`8?gxvP^jvOVL%1O?wr8gE@lidF5rDxJ<2zP(6`Ywc_PG zCRPLGN%CUOj}sF+JJvV&clW=-TO6Jya_JtdE^b-}OD+Tw#bRtZ_uAS7Bd}8MDzf(v zcH^4A4e1Tz+H$97t_+fd;okiOE$z6@;S^N33jVI>_s3!{I{f#ktTq4CZ@YjE>()ho zN{k^kQrRbfoqC-U6gOEZlI*3o-CU+_-JGcS&3FH8B$#)Bc1Bm(8v{Aszk zH&ef<0VXfXZxLja*L9EsUxg|e_hsUT<<#w=*1WKecQ$%5n$g1*Hw03jqe-;+ za6MD`W_*u;B<9O`s>+wS?-DEHbmLhG8i4-3&d^%`1^Z3>)Phb=Uisf^7MaQQC)^n%R7n7`uvJZL0;I(`UDN zkm2?YzFmGj=rZqjLJb5r0YhwpDD0@=me&I)@oRBuJM_ZmceeJ6?yt!zF`&}ueP=R` z67so7@uJtF%72q7ImQCpBb4(zrQaC&o}r`eW-$;a1T2(hMK-RGz3)3*AR_VF{FV$% zj_coeBDm}~QpZdB*?DDCP;EC6_RG`5a0)?^*V8Wx;akJ~59f|R^qpJ%Q7_laxTue@ z$SsQUM>f-kS==iI4b#&ZQ^uEyuzu(Qk&sXs?2T;|aJ(&6aKi>ekE(jaZ$N_e`^A{S z@AksepXWgwD%6k_?mh~S(znj3V$WX2ud0_QCCO3dEHT$DOcOq~uaA9(3@|{cp$T%; zlkBKLzu3b~C3`q_{d>dI&H@?f7rxeUE+g8IY&s;thlmdEpWcSbq4Rq+cR6HjdXW}e zuTa~oNlj~lJvZO;8`!=}k`4(C z?w0Q-f4T})Xpja{lodjOB=Qz9Q~!8O&vt0)Oz6REDvR8JMK!hyL!w<^*4g$wIh1I8W!nHWX>(=V^d|@X~s> zXCcv>3Mp2DklzemZxk=l1fDm6lVqjSdE_Xaeu9IgVcOUA8;~dzVzvDAuYrh{!XL)> z;pU+cj+X&k3!p`HwjVSkhu#gPp~keMB_Fp%&)b>jMAn8ls0F)~bT#9NPp*Fm;Rx)M zCFXO*>@YZeJHP#0b2B#7I=6pvSJR4Bp7s0-MG5o&@1yg4f?ROxZ=;n z+R>U$fg@b>@bHDUno&MYwYGE#76hnP`u&`YlC~;o)uZqXy1NanuxQ~i8EXNx!hh;( zhoN2aJFgbM0o!VS<;$#N$O!LEEt~?^bt<*#rf8DECQ4`4*H$Dj`HjIZrp`77hH5}& zlB4^TA6EbRXLE~tf?0%DGuj(>U)f_Wo#YQhci;DeX%(~aa6h|zZGa7$V=|jC421J=0GQ%bg*(%RvZ^C?@x+)}70QK6-4;b8#(t3a8ewN9)U^i%sHkc4%wkv|zkC z@x=RhOUJGfy@JsCRfUwd`8_xC-ttD1b5Bg3;UBW2TS}G=nXr1x|4c`dCtBr!I6S6M3&@@7Jq z9{l~@k;JGXSD~^Oyyy0(cO6P;wNhkuB(m)~Wu$v4ou6JO1KH2mrgq~1-8eub3JTn1 zLT0ab;$H1wKnqwfqC(fd05-(slQ~y?lczlia1XQ;`e5IJ{M@aT zq@n!5NUvf=JOklls5Gj&xY>bEjUR?^lU|AS|W6wulJ`m$R9Zx6z>s z_mpguiHp{uAz-ygBm?OV{awRCfEPtKvFrxJ4s2JxIY$afdMDaY?k>MVu8v?7O~?Uf zf(zO7pz#4juh-9^FZh@|J~_Y zJ1^XcKFs`I(YG??F{>P6b6PClJ^*CFsj#j-BUXd6^n}#?^e#UK!T>xJjqB}WCe2#(rXrN2>!z0_&b03{ zGo>Xm8M85BR{p&?p)~#nxohII0!|eUd;<}DR+13vF6@$)S2K<=%*-gO#q(obKYJV# z$y;MjJxg&1h@tCHFgpnX8@OvQ$n+MOv+4o5y_bcS$3dwOq3T1N#EjC&N*IeQL^Mz3 z%&VInj>D=Aiz|-@4iB3uM9oh_j47r6$Y9KyS-F#$RAARo%KP89HlXk|7~u*}Jn`N3 zAC*S@0u&%m`zw6_i4J7TVWx?j26H44$pr#x!5v&)X-LTeaUy08&4HEC?{2H_e zi)W6-MLfK76~vn43!;8)jyiavX`T^4!e&!7APe7|D%u}f+Jn4O500x2d}A+;k2uhy z9mnxta?>Hsj&C4`-!|o}Y2Cea2}df#5o!*qj2EMgNx(+#O!Mt2vfn-m!U;J!+Tuy$ z;f8ysMVf0@GBIr(R?vYgABHVofK9R;I)bEg$Xj%uDWRU>EA8o4M)Y^yx!caVQ9h6@ zm?|_u%f);5nGuOHG{McKqElh+N0qyZf)AOX2OuAEO-JBsGY+lWR+jAN9O5o^KAzVR zcJ?O0&cicHf=x%oZsg zX}`9+G5y{YP_N*78Ff_L5E1$t!Z8K3n=bv4Yy~e|eTy7npgu$qF(N%j$na@=Roh5s z`pyMeh~>Mh_q(erzUrP)e}utV4}kFkjN#HA~B(4omnN!ODv zEFoGePB%h*hIJ>EEW&ibg@o0qr-|K=si0d9s6l=Dd}u-lAN9v!fBMQT7RB!Uyz`~F zh$D#^CON??Dqcl#d@n^Z)+m_on;y>M_{;5Ik{^}~*7Fh3N9VH#sa~5S2b|!Z`G9Fp zdO5$&^+b*%AUM@LHcU$|VBmZprS0iFTO!@cskdx8oKnk>K-Jju>f6_l<$6o0)!S91 zN+D$>9(=3zV6*!6V(z>Xye4FgWd%)sIx%FVdOpi{Oa}H`Khdrq=#cQ_3wdc8sUG7! z!2^e4x^}!2Yzi1u=hf{A$c0q5`hYr&$_Mx86$%0Ln;mBn07f-f$q>swc7hU2;)G&w4O_dV*AR%H*8I1a-aY!XN4iVPV*`GSPfg@9fbAQFBoI~*^C zUk5`;pExEsOu-%~25b7eI$Jgzsn%bm3Aaev{GnDy`bZ1uHChZC>HvA$TDFHmV?X=X zqpWoy4f%LdCXt^{aa%syry(ee6R<2HC8jbiZZj|EhXAQ=8A z9@<`2t*^k&x6hQ#&al6)rgor3;xS*lyLM4>ln=rl3od1?i{4Sh@)CB1f>16wrHzw`f8wj7b{d-cqLE9&Q}!JFij#>HWbqf=D}Gk?kV5v3Z>i8h zvU67wVNrA=@&KSG;p&x8OX>x47`eHkEx>X$0J`zth9#6WVl4P8IuSG+h=&FD-~Ze0 zQ%(OLKJs6gJpO-??xm?Hf6$CS4uvc)_T|IBuphme#$jIM8uP!GE(~zv{!b?qSu~)d z{qJzhNpscz(cl9!_%OfN$v;y8Bq;vfe?`(|L-5I#usiaMh4nA z(oVEQ?Ujw^ErHGs=UqE16tmwm5s$C94f^pY0KHPXWcCCyQ?{W#uZDE9CBrhJ7f z_l~b*=T>~0hINB^(|ZF2m6_eEW`hC_g{5nG-S&xB#{J5s->ZBWG`|;}B zf;ElK&D8&NTA(vR&+T__)JL*^+g9vc%eSX1W)YuA;K<4U4lko)2rhnV>(NcLORi7$ z=4>2^OSic_ts0cOyK7;(!EjvbCz{9pXM4slejPcAYd3%TBjUAv7yvnXcTGV_nKQFb zn5UCZ%y!UQ*Ek=v;q$a{@Hu@n{BYWu4XoV0kX>sT+USD*K^L1Cb4vZOQ>!h|pCWjHf&1@~d9yP)Ez_jJ zQggQt1SSlh&k;!Ojnsba-wI))*!D9Bjz^z^1^xK;B@|HGrmuKP zN=l}`ifZpi|Lxldx>ORi5Dq{d|1H}StV*kUA9CILw+OBCwnTWdUaoOwDZgjKCQ9c* zmk<=WWF*X^>fsxsTIS_)187taSZe@1OwRlUKHG!-{{0kMQ%wMnC-sr=z5MltnTRx+ zW`8`lV>sgU1L^GOFj6610Yz@V=Jq{3o^`t2}3~j%i|2ZKF0&u;3{tCO4xb|5c6;v}`q|qV~1_Z_~2W9H` zamjisY4@r~q8&U^_G`dWuy13-Iue;Bv|szAYur4Bz6KT)+S^Q61H~(L=n=5lzkf67 zk_w;Oy_76UC(;3tYy|cUC`*Jb%iIl*DF9OwR4U^)x&S&NfmE<$+_B>GwQSRI%ip+S z=YwlmqtuI5+Ic{#Sd+-T;Yay^_C)&T@uL&XSAT>LG)aC!+cE^65hs{hv%G7ES;Gv%E^}M8xHt!vP52Dx}_1$dy#@59I0jR|~WL za)on6!8XSF8bUw5cVqcFl=7v|8P!h z#|duCD@WIb#1@r>^^0fz%jyS#9G{cn;MIWRnsJ_g!Rl%!Bi#R$HPSyGQWa7|VN zW5j=|hW#PbvmnycX1)KF z$abjiN!;IFcdlRmz#IZZa44<5`3>z2weaFgYXqPGl7hhvPdlc*K)Dt%h+mqiN>1Ni z3hA12huBJB&hU4wuzfx>j!=@d_Wro4tPtQk(1ECbSAk_gXJg`i*CX0kIFEHzo9U?0 zv$**VU{ydJtB-L6K%f5mNptY-z2MEKoc2z;e?1aG3i^JYnfo346}fi}hdt*HF`96) z{eZu)p=*EpqW6U}aN<|qdAN|20cZP*qYC9pc2Co4OTzu&m|(w47ul7QOr@!d zXy0B_s37RA{!4bu2-lL%hMijgQ{#fO`1GvIqFO0FCVtSfkQitvicw{j*K4vSh#Qab z=h(R7ij_~A{R9Hl!tX83kh(sOqg4s|4JmIhaR39aWCy~z-ZGq|>N0HfUjThM{Ci?! zppytBqz!+oi)e|Tay0tJ^8CiUg%|-Vsv$B1gV9TwZc#9Uj8fy|pf+J9!f&*<{Vhhl zZ>EZDFTL@u(6|GByun(e5y9uW>+i}oby+qhq!t+j3Xm6^r$K!HSbyGMIV2h{XE`2J z%wsg$sdnvz19U=tb1ZEL+T=(v#xp5y3yTi&NZWp6G!JKPtQ_mDQ)pEJex_?Uoug4Q zabnJT&1=Cq!$j47l98tFJ}@WAB9cNMuHq&965sn)2mp#duTRFAV6#T)6Dtm2Jpq$> zYapt&q<7!(f+{zeHb86U%P4Dui~}86G4jdB*x~6ng^lQIS&ri25;G-GP95wj`N1pO z!?e&Zq6jQDyRFiIkZInZjeHS4o^N*9Y6-Idvr~KEgRP64e9}P&r97p7mh@dmO2?Di zqoLIQ7{AU|nnZ8-$6M@{1;k*sqp4VVoy_vqq2wA~Z;qD(F$nk<`^!7y2PZVgrqco5 zL!3Yz0g8`wT+;8x@VQVK!Q9Md)64+O!3sG2jff$@vj+-CK;U8-zak>oFnXAz=hyd{ z>3%SB@bT9Z$7mc}vZIEyuc4&LjGm+?ZMRn~I-0U^>7dJ}{Q>UZdYX|m`%FvInZ+#I za*R!eK4>WBNv~+M_&8A^y+VPVCSo45NWC@CQ4hT6{Q6@VO6#b)!)^&pOJAK0G-9v1 zD}x5h^g*V~BB!&c?%8=omc4Xeexu#D9$(?%(TH713Pc4B*gTA3Z_X)03Drj1Wni-B zH>3y73;P@p@pK%D8SPjj?c?ek%KeN(_a_OK{i+%n&RlTD{r~`%qV)bPFw$a0Ed*&r zJj@+vBCQ5I4UlGCdhl7g%cPi@^v0zf!KB5(DnVR2UvP{%@gqGj2$=hqGP0r?3jWnO z>G!i~M}S8|36cKu^U#9V4KkOpw`c2n+R4-O2_oX4hk^K-^~F#C%Jg7Z`I(B<*v!2b zTo7o~D+$!XPi{ai1{tz268SDpahVQl_MZT3Qu1c({I3=O=tMgG1MzVoc@x-~9W#wq z((GpDcBito>A<^=8H%D1#yi7ztD<*|HEwLvE<>U(CtpNiqu}|o;Rjvux<~!iCF_sx z^d5w0bEM1}4Q=EF66*QLk~TXqLu(=3`IY6(@@WnB=YaPIx-F^U)q?@zvtCX-1)z?K znY%gaD^1h>uP01uAjXgS4&_(=VzO`n+VzckMIy&2kWU*^-rvlM=pL!0*iC)e12Hy3 z9YZA>HFlf716sB$vg+^{xH(e2`^V?mSR251a-b*inhxL99^TnEQcvEuP^~KbLx&R1 zjZ>8#d3e>M&7$>5A`sTuzE|!P*r|s}171=w4>`UKK6|n!ASKty#vNNb0xcl8h`cjIpG}&KMaE==_!D^FUV5p{FT5q_!c;~OkSLA$71OGzDKG7IbUgHa$G#b{w z$ock_7yGW?I}^xcA8yz1Ea4&=PFx93$XppMor{x`i0p^X?Di)=nxD*(@@!P=0toXa zT48x^>3+|mvYZPkC}yJkqf&)tnI29@Y3NZha4@tEN`(p^>kIbFRaodk=K#5M4ae8(S?F2CUXide#%v*&W$3fw5D4@}?T>AND^G~fnqTK1Z zW;e==AYDN%cS2!u2Ka{wv0r@}=YLv#pbq=(L+3}bZe@`;rgaOXi+}|YKZv9M*l&vg ztTy1v*T0;eQjU){_XPr*)io1hW)i7J(OZPQEZPtB3aiIJJ>L z4O^E_HAlhb>Ecwu_tH!&nU@)Ag6$sR>G% z*u|@UT8)n`q&0J{k)9n@SR4SFh(}h%x(&USd{_r9Y|~eMUg)s?NCA8FfJ`%lOilcE zKW+SpLfS|(2cm;d3I#RV3#|#M4VIZeJ!e{a9oSl5~ny@9{JPRT?%T&~VAbjP+ z47`d%cvAk2d*R*&)$m@*dLxwWdl!%0dE(cDXY>W~HCF+=!ftXoTxwfqsfTdbrEJaoq}3*zqi1Z z!TAa-D9dw#7EgB)!0{>=LPQ6a(rxhI%36+~c$7jK%BESIxY|>HH|)v7M0gFztFAc3 zjm*zZUk_6ExnYJ2$BI&W?u<( zL@HSBr64&lhH|nFe-Z11P{&2V;4Kq?J}s(CDf!y}Slch5CZ zk+ZzG`jftkBiVxg*hK$(4ndM7C}W>rr(yML6G}D$G6nS?i~5Ujp3h*@P>#~7yw{fu zmZ8YMUf!qc5+z(Qa5`Uk`54Vy3iw2-H2d$Sg5_cAr|fv zeg?bwTh~9R^kw}egxBZWy}mF(o6|0F;~!O>Oa9c~mJ+fNR}_YYlr-|eHQxl}-<={^ zq8txyCTXp+!CG8}=ZcBFLxr!~Jt!^netzXg$b|3;Bg^P~9)mvt4GQ{;3PYSP2foyL zL5IHC^0RiHQW={OJhgjToDQ{h482$IRnd0;EnEFw`tG;60x2Ds8<#Qm()9Jd*T5Jg z+q(K_#ku##^J7ZiH3oA(49Pd`c4ky@2L1hrzdWzMh-5DTbLJTC=jB`x}v%AJ?O6! z-xuCXl1@jT=AXm!PUjc*g%3;^FE(02v91RMZ_)?wz0nVtpO_{hQx%d9J{A6~*VgTJ z;(}ozLB?*KciCzam?b8IeMqvBP^%T=L!IM*XakZ0Z`NSy)Y`na5Cmc_=k8Nt$7LB( z-r~hOOGo=cJl*;Egg3CrfjA_LHB@u<65*`1v|R&=2oeBE;Gl0Je>Jxu5uHMIukU`o zQG#%~3ciwRi8vUuLK#+r|S25FVBI?4L>1;q`qPp-Jo>g8$#Ou~}y-CsYa=^AmDGPICiWXWPlXqoiZ zYldoM<{Q|1B_x(Wu>0dXOdx;%i(UN=b>ps!TeNZ2e#FB%H*yqi4&1LX>7lt-GxTBG z>$ClanQ@3yP^6?RFmRUS+niM*wC7yZk^WrO$vlH%;6Cbjz0+#Fl?KlKiOFof<^a#u z$8<-KcbI%EfZ|V@DUGaOnIXjhhn9S6?fwhb7=%04H9+^^tm4Jwe?QuD`K2pTmiHR` zB1~ELJCsdbT13s^F4tOn)ER;Po%hDKVX`EtICO0BbIe0yVqM*a7;sx7Uu+1U9NE`Okf(s{loBep$} zqUkK=Dt#(--Q-m$7*j_)E zPlDFfNE^XNQhH_859_S%dPfZiwtbUzL0(jHZZb3}=xWvJ9jnmHF?Cus0SedNF{yRj zaf@;}@g`GO@2j9xVEU3-NV7<}xoo7WI%&=QP$rI&^~;#aP=iA%WuV@R7caJ$#p;EJ zQ?6vM;Qn%6nsK;G^!38<0%5h)Aq8uJvlF%~Eml~#>9wxUZT3QNlD~+ja1V1(m$CBU zt9WkF@S6-hp7&}zwaHy=AI>daLwv`&k`?E^^A|FKVmoL&0bR=bIWy0+#-JukwFa5Z zZ%*F^tTTbOn9*8y1#XzGH02=^4=Kc&ZrbVHly~bE7C6-_^fUU{c;}77I4i?eW_)}+ zxdqeMm{jCdaP~N}OmG@J=#a7G9uc|u=6tc=F|$QXIab^mK%s+jl~bJ?j8qhtUD#pa zmk#WQjB5T0J!W@Ws#s#ZTM&9R0e&J-UdAORq%*=arQPWxAKI$;0vc`bIW;~^`gmqGG|eaI!F`(zml#1&vI^&Ti=KB83q$Amxu?#* z+9{}aO-WnLN6zZHh4Y>sdsbejPVs&?4||+9Jpf|ENl$$3BD#XFwcHI~fSae$wrf5e z2{|D?y4LW%fwau#Ep|o;7a(=m2QXI94ucn`k!jx`K_kuK4JjuvJ0fP!C)AM-n%aza zsr(};7=r5hPIc*)5-N+5H>4s(NigSq%{IkmQC0dZt41ELkqS2kJhmVQ7eD$^lpZ`= zgoW<Pv&W!eRjQ z!E3Kp+Q8cZ)vaBT_X4X5Jdfw(?Ks_`X07)Qhl4LhJV02NiuSuQ4Bq|4JX+3(%F3KBLK`a_N($D6~_kylbEI9&Mh!;~vK4K+2x!?OeTEdH>EwuB3x zqDgp`UV)mH-?Hh63k^JBqV30p721$ye;*1OkmNtoCg|#2bS}bJ+8hKLz_EIoGA&@% z-?T1uj=L-&uiq(n2R7*%nLNKnXlVjAxC}MPrxycT-kyY^jIn{%KX)?%q_S5h-gbxt zX4_A7KGr&%kqei-fW*l07kiW+3Thj|rFnX*l2*pUIF(j|eoGQ{f^=P{(WPC*vmyG^;`}nL)vKP zVfAWvnIGZK0;Ayy@Z&_8-_%t2n})8X-QY8ERz}s;=1%X69&&q4_U`eRuyuEu?I0bB zCo~0~<(|XOeHDRdPXMB3ONXZ%gwr|^V*&Uy_?t2vEP?&Ko?ev5u@ybF)w@}~wA_Rp zQa)4_l7F!sxD@zGz=_xZnHITjJ{OPWLQAz2-&|a8~nowc6!me9kMo#i(ZS#x*l$hVl$DXy% zvMclRGU>xu77ge)3Y+>Q!M8w)kpH3o)z^Zn0tgQP^u5xBORPW~vh=dwqTB>`@=`<< zbe{9zoa@#Ajhf~qak-*v=|!^d9nY%o*E1T`cZ`P7W3n@IH#0o=8y>+@gt%}ldD(E`=g8E=SV}U6AT6Q2 zOAWoGoUplKOOTanHFVmQ;X$SOxnfg%4Ege1I*(?L8}V)p$+zGE zt*cpyM`nVz)s+DD`H(5T5fP z{_76K$>*SlB)Pe?i&PRnmVeyqvUsCYX(EGpU=DLbH6dkw54j%HrwQ}0ne7m@S;#I} zwnv$Gi3Qhbd712fb3zt{%4j+a`NrsWIBTV z6&@WF99keNm*lCi>+%pMCpp$sxTp>$9WIyXc{YKma6%6E1f+x%wNJO!@yfS?0U*D% zK_}MK;mwPKK|tu-&+xjdCf?-byOX;lo^wbaQ7S}iUN zg*5sdYh8yVULWPUn5CcDoIr==Tbg*~=bFsq-EfRK=C~!Z0p{|cV!*k}nlYUsotnlE zVC(^E%@@KCle5r`cp`-DMTceuqkd<7j2nQ%H{f!^3Zj$!?-j4W^V&IfC&ftF;$vpi zyYn>$o!X#ouDbKo!Rbi*I{2rh2uw$RljF0dD!Rmi|WzWMb9UL&at<~Vu3XSMQ5UTdVrmZE*o5IT2PhnPgO}Sn}8Om@m$z4 zH>Dh6;WG3Wfj@0q?02jdN)Gi_vTg^0uC6(=-~}n*M7vKx!>*P zUWD)3zT_-B!5)Vd6*?FcLYi}TyYf+z=8=~;@b)l3J7Rb-q;toQ#j{i2sK>Pcm4>p% z9Lipchg#N#7NuIq_>JjzpPa3Qx9vUBflbG0ZG)PSmPJUjgfqsF;{{R(>~nt98N7EA zQgFg-$ud93;gsXnPa5CtJ@jw1bo$xJE@ireqwLe({(%B64|94P~OTH^fKsto2pLi zM4e}Zo8-gYQ77!^s92(JCvk{!nTS-TUbdKzV~nA zVHkz7CT#Q%d|UNAs+Rhzqf@^q5Z8^ybMh1g>pq3z>A{tdeS4XcT)UCQE>Hl*Bt^N3 z4FnkW{&odU%K1a3~Cb!xE^c;=RUOZXO<| z;CwWVj+!r>KDvPn< z6uMS0Cep$R4T_k?MGjY*ZuYi(IIPau2+KESJ~jfE{0_PR2_l><+?QA!g;cro# z90j(=YZN@~FZ&3ya#@;3zx0Zwo*}X=*D@VA+m_Jl>ZaPbtC!p3NEiaMO0tlx{LIW@ z`>kf)kv#D|gk$`-<4d=)R``$!owi>q-J&5Q_#kNanzwzMYi)`}9WJ+J-Ku0aT<8HU?Mc5Vf_e1y?3TX`=Gc8)~^>Y&}0#wI*z0 zTR}@aEA>*}n>QTMMNc7g1CQ_GhWsN{%0wc1x{G1I897~sssTddQt*Mz`bmDb|?rL5U`P3rkxHqEONe8(w*doC`+CNt8L{yu+PC9>r;36uix#LuR$HjUU zdmSV;xT-WoXs5LI{+0+pjF8OJ6-GX~AGA+z(DpXU2e@}i`&FkPQ*nuSGfPiGv3dN) zbuPMwJX5sz9jUz~l~@mrLpLneFEJy2VwXWd?xTsHyZ4-OvY=QW!&*kK&&KAO z@}|Jc_&o>0iF`+qi?=o2MhxQ37;i*m#-dZ===t){m`iJeKVXf+2RN1h1T@QXeJk1n4@KrhRd# z#e28{NyStjC=>S%-lEwrm5U%cd|pcjp;nNQl4Y2~Q73BZ^iSaB=<3P_U+=J_kki%r zF?3R_@FYoWi1oaXRQvsu@EgU=eNMyHx4Z-CS!4T2s_2Q+7^@y+mAl|>#oJ0HVh#sj zQUVh0>#;ND^0^i2`#f$ji@iOBK{4dRIXNpK^P1mpiTo@nqh4k^wWWWyZ$lXtJw9JH zFC$q-uH$rakIaE-*}Fa^)bSQoZv55(fq7>#$;o*|%bH3JP@-Y8wV{U4ywjqf$QB*> zHaJcODO%_-W?%hDsa#<4dsa(ShYD(ta3<7luuXDB>$2?j?yi(_xwq=(BOwWAHlHtL z#y2FpFsz;>Ucj2o_+F4o|A)BTSvIRjow7j;x%R->`2A_Zxnr8=EgxT2-ghR-OS-X7 z`^Y-~=A^tzkJHTJXfejFfxAqVUH}@yd$Mo_?tVa_yUGqjMhfe&;rpVy&Q$ysG{%?Z(8ni{GDq_h z_YQZb9(8^^833=zF^-(L(QlPSNm-YOFxbXq4j8%H94JsQh+Rq-Kmkzz;=+Z!OKbS5K5A9O}S zs1%5ge~fV(&6$(k`3*6CY8A^do1G-z<`IvsYd_9^4&K2>&p-vs)T;A9W`Kp}luiRM z_R&EdNm6Xxwq!<++|R6VHv>%Di=Z6J#RUoOWRG;iq`mAy z(s|^|!HeZkM=ig`Z$}D_EB2noNUQ85)#&Zru&1 zdIwBdV1B;~ooh>9FW%Xt>9UHQJX>=(DWjwHKgdzKOu0RNxb(hpV$vTeT2LqX0Z^=a zG%WO9XYj*c3S$&W343%`MF*1182#kxgfhOt=l9?O65l^UrSJ)+(%;_~?j1UR-t^$E zV)JeY4d%$Dx+1B9$DXO|e*aHOiX8W~wL9e}Q6rD+Rww&Lybj~gHy^w9wd9U(p|t*^ z9XSasboKT=q~dJt6)O!xhZRf2CudA?A^#Wt?hRt#aEc2o#INjV>s z0AtwHzrR#o)I#daRD1XqWm%E!o1-<%uBVl z#w-sO+LLRK=l;X#2PL+}F$Ppw^%3JrnyV*&;o}^h;9oY57IqK8EACr0+aLz}hstt# zDg;fTsBJ41<|Fx^_$yYhOrw-YcJH9XzOSL2=b)+ui_?Qm zgsq_eQq>J6pHvof z4uII1oUzkVXJ`*^ShmozomfglIIRLsBsf=)03vkmOJ9T&jjc=04tDIz}hF?`7Y}{`Nzx z{YmLzo&TFVMoiOA#EP!P3QV;DLTQTYU#{%QCNJc}?be{mZGLEVRa^^@QfKgN#FJ9c z<@4E4VfE@O?L$?Nce^y4B+3A-yrQot&iHb}qk0|~yL-)QF?-Jh6*@0P+jBK&wP}sL zHfid%P?|+T8)_TgPa^}V>sJKHFcze*51ivbTJ8#4@e>-%lOYyXaZ~E-sOxrP-sILF z?PfYRvgnJE#(T>0`6qS~MmuOA+C>axo1Ygka#;OuEkIuji-G%+)!_WMST8fR?wjYo zOqs_AD2!6sucIt`et-4iQ|EaqU#=)kVQ~fa357&&DMDid5Gj2>yakPv9^{v<*nEh2 zU9x^xmT27R#q3|1QWZHT>O%#dwT{?45+1TUd~6}zXJ>(HrW`F8E_Zq$0F{I>P# z2}bfyWj1*$E*tCVzojt`LypnU5P@LYc2CF*hRmUBb-ed!g|^=uv6FR@NL+<2Re)*%U}{%DSg=0(rSyMaXwg^KuVap8%Q; zt^uFoppjQhOSFQ6441TvelIh($Et@f=s%YgGz4*qO%+|4sYOi3p;$EVj-);)c$FH1 z(5{3be5Z0oy_e%Y1{P2$s%gC*5a)jwLHFxhnT+Oqv!C_{BC@qbqiKg|z+og^S_*bG ziV0LwbXhbQ%v5B*oJeWtcb-wiB`&~~7*%drYTHA!7`Rb=7<%#U-K`SRAdM}Djo}qg zGbe#wJAC!C=NP$!$VafU!_2|R@N=g7o!#i9#N@7;h)r*_BHlCN;9;1@iHOUC=^DR$ zxmQw{2Mwd)7XKi1x2~vP=_9~)_Tk#$6LX6#m+IFBhQ3EXp5e7eMP;SEk_iKwO0YxM zpEP>4x7SxO^p%2w8v5Ycg&A|U;$1X+&V+a?DCfQS8-0;|G?eYVqjA2KB)oMZcT@m> zs}Z{D22Oi}1&SQN*i4t6(0vegp{#1@{1(xg+|nYk>}9mM*pn62Y50mzX!2Q4>v5lf zT*lY1ye_4iI@8Ap?K=oFuW$m*E|NHYuvY{BzR8KPxCe}TewtsF$MlzlQ(Ll$ei)6F zEuY^>m(|YMeG!xm-2^NSHhrk|`rc%DH4%Hd33~B6PS4y(>2S1fcpAg&6Vm%?$&aHN zfAP>#0gH5wd-BbdK+VZFrL`UYjZPcSs?)7^8Vk;O@qxpGZ&x|B_t8bZ z<`WO#(*S1LySVoJ-(0fB-S*YGpe6HrS{)3-UM-jVZ(4$=)|Ow7RA!e-&rj~TQu;6b zf)%o6^EN(434d-xzRDA24fBz%ZCz0*YHFS`Z_K(X&I+q@BFmKwt;4{I@T$;G-Yr*V zoM{uRwGcs8A(UM=eJF-Bc{bH3?762ASl=PUvRJUSnyz~BNr>OxX~|{&^e+eT8$R>C zhl87C2kWQrEN>Qg+Aetoo|Qa1$)_(YT#}l~Xl;2h9x52Z&{w3&a|hkPSS)ozvWAUV z#}oGZFNbxQZo2jFcYc6-9TWa#5Y4zo0Yf>)ow&M;+3G8cX7qrI%%aRHjf#bPW(H!z zD!K537Ki5;D}^8Hn-rz(Egg&aefOKk1No1!J~nHMzd*Yg9&hUj72d-hr1vnPW<_&y zi4Q_DzhY#v^HcKf!$b(vCgl}#t+&RbwHHH8B!{#L-paEsGqK*84vx&*IplhsgIwro z3&o*djHEgqHU*4fZmDpE7V}?*7&{m^aN!N8g!}vmUTg9!)u&fhzHDgp>##Dx-*;Ua zw8aXu`4O6!nE~Xw0&4)V2B1}PIvn%$wc93&i~1%eoRpl+GkjN`D%L`dMxd(#iP0Ic zo%M6GcKkGDw^d5$ZW+lA{y`x-O;EpfzQVYNcrrLU_hN`pzv8xqm~i5Kv#i)#31~*i zx4KKHk?@x3m-EHJe3du4*Nu1Ft!I~l^qR_(ci^g8k zA6&r6dQ_^m@x|KSH2tf00J~1Ps zv$JY!878DK!pOloVdw{9h1+=`@H#+n{IA5mN{k2isiHAKd_6rr!uxY z;f@JbNLe*#yo4QBcRv0Il(bBAc-X`i@{!Q1b;^gfEW73wyXVG0^`!LE$ZgXD27`HD zcYAXH0V2=!>`?X%Cc*Yhr5pX?XImck*-bFyTwamE$x!inX+_CK{%C_;!5ubx$LNl z)67_qRkODB_oJG&7Tmqz-FEww>M|^7;+}n!!=!80LGIYzSd4v_Gy%rL#6A*zui0y+ zJ_sPs((XWSG(;LWnLl_5;Z6(Cpv~QVWu>3&r^8}ykyP5Ln(RYV-Gjc7;smvY%&UXG zkQ%GpEconVMG?TQGcXircxa+zsPsUePKu}PEAjr|sIhk2sBv?y$>_Rdge^4uQFh@Y zPSFf7xT@sZz|}HPPg1Us%P(B;3b=NTwk?q6lA=q2KaFL<@GJJ6922p?;<=hVQ)a9%@AYM< zwyidSC>iX^c1~nX5V%`hWVfmo zOXl?bTXC2omFTq!IoZ`JI!^KxU}NQ|o7*-!Hu11}5lvT+3b*S7J@2YeW=C+4@kmZu zt1sA!XpUzimQ;5mBtVw;oJ=J?CN1dH2U#&pa}rgk`@ZTyk~&&!hRKIBA4BIuoQLsO zKvOgRJnyx*Y1S2`YOrsskbD)yrJn``q-;BM14zCGrBX2ag+Ea*%C0z~ z;LOd_ay05)P8ol+B-`|}JFHGP(x7=S;J0@CwuhAz`iSh22x9bPGkWg*kV#8E(TjI+O zZl^~IwCJ0*M{{N9VF%(j^dAYqk0adkr;CrFdZS>5#O=>g+Qt5xo*r=ja1J}a1QXoP zKxW}PO_{+^nHG8X;XU~)O1}9!LG>7;1&iU{PH*C`y5_v4Aniw+2`@HW%4=q|%vAA7 zR88wiyUuxRl@LJ#=U2ZyMpxMK#N~5Yy9`Z-G^vdYz0tO+oS$&Z%|89J#~XBC_3MWU z0^@anFBp)51(un-#eeK1IIbeND`!`-%vN|d=$Otdso~@#-#wq@ae24GD)3%(s6^xj zqq(4Be*u%e7Wy3?EXcMKNooi=NL5oKO|oc;CKSvDZq<@LTrUemYd6^t1H>HLX?JQ zl%keW2~-x1+nedl^hlZ^AL~P*q(?Cr=As0<;e7Y7`N{d|I3G`*PtF&iXFXVo2G+-R z$mkx>S6$5<2kz61P&)=M)-o98R#&f+wM@uM(V*ci_qIM64-A+vn9W zcWH{01>7E$_uSBPetdmBmyc&@Sm{~KNtv%7Fh37I*Ld>we&(Tc-)_~RX8g&5h6EXyO!20fc(rMH*)v;RQ*&$4t&Zu<%q^y!<6t>#%c-F0+DxD`CgA*nb?rb% zt&QXZu5|Uol;U!xPX6GY3VV6d<%eod==q||{^wtNF;cs6mS$F}6Z=3N3k^}iD_X7e zx&pttq8c7;&fW$pd17V!YA?15L?W-wBN+{MML8G*a!7+q4kZiWVQ9s3|j0)>JvG3)6 zW?SOGt30$l)3wAJ>^H~lG2=PLiPl9fHUC7|X%}1JuElj5X@|W6e;(Xjc3S9wUtdcM4wEE-Flgu6m~9vExi^vipqF+#=E#k3hi zgo8C2OIf)&t;~3uKz{RoBAD*hL)ffDn+V@d5yS_Ge@4U<4i8CgpyZJ3*ZI$Q(ZDmg z^ZHvdmQ3{z)z}_7ZnQyYAAP$pK8SI!=&iSY+%Rc9YC)scK7a2qKGFUjR&vt(QDc<8 zUTx=Za2$8lal~7aZj-S)Mr&pRHe24MK=-W)p)%j6f&VVaVLOK-xR~f+-oeU3eu?i8 zD9B4#chdch8Z|Ykxc9&lNGNVH7}rA|3&*iU>(@~k-~F4c_@7|nTKIe}D#_T+DYl6mrtK$^r1;+-VK^#aaJ<409bE{?4F~oCs^I_8^?&p|c?!Xyv_#VSnsl*^RRKwn z4LOtN74JV7aEQ?3S1=CcDI)8MgO9HVE3n1FuQb*x_0G?d3A1kfyHD!Iw{Fasat($o zV4@A@2b&in1J_PG4=Zgn|J^Dvq>rNN7JUfM@}tlRB-1rd*)E#R~uX zjkF|r_~w6iI3oQ#`A-c0pCMvj;qzahv3%kw@BbiyA0ugBLH`B7&@dzQ=-(+yS&YN~ zZo_^=?|%T3o47LLt4;nh(jRF2|8$W_*pJ|OhP<PnyC z_mhauyo5FR9}st&El-(g(my$%J^|L|7sedul79s*lq%a1vn&7e2=Ei0%;inbIVLyBUjY$l*Pb`>-It}6n_=d zw_43#BfPW|EiSCKo|Ov|>jp(bu^*tA9(7AAMg@;L!DI)8zga>}=eOHtj-3;4V_j$3 z34{7UaS%i5@+Av?BNz3hiEnmm1dkGvk1~>(7TSC`nX-pH+m(=*W1=`;s3k|CH za=LT&uRF^p_&t#0D`;M`VRKY*!5o(=h2mf2vd!y{}MJ zRW)#pGZ*#Q#Nctu?M)9m3!vMWU%I5xryD2f%d+q@5OQNM~th6NkkOvQ^;o~Iy zTz?;>l5X!AbAH51Oko+8ecO!L3;7v3dQRSh%PzkdLah_aM2?VAO`@pU&Ebc-XX~im z+p^@1XNo&O#0CDKx-MU*Q0`McvPPyFZ+Qkez94)unHXFd2L6DyK`Z~t*Z$Ww+wTD z)TTYi?kTbr@3mjvQ0zm*Bz1Z|g0PC#+!ce!-a45_=5DjKxG>T(xX9EUm$+PB|FECE z5&{1hzU^}8t4M73_-jZgFJvD35_} zF$YXWr2y{AUc;ay8l2_mDa`h*`&A?!5zI7r!em^B1v%^%>lUOYj4=anKz|2Z@uA%! z*Nv-FcD~iNbPXZKao0MlNfo5r4b!*1hdV@BjXs-P;pbO^!RHsIh$|Ojk)|ZPGmS6Q zJVayh1v2pfMSP_uy`-mkuO#R+OYnAYT{>6}+mNUtgy|p)`JD>b#Abm0+ViH^c$UJl z{p+Kl019v|--tNp#}Nsgl2X>R6*xI9hPWhTC@m$!Ue^hBXPKB4xaVVeDQjYUYL}$% zEspbv+QWdWqE)vIdjoPF9#IX;iQ$~jcX}})#u}@i|6|dAqQuI*=oMLX|KmSLUd3@t6OfSH%bW(FhR?^SS$W!z=cB4 z_3yhf7n84PLzut<48_kj^w~~60mwu$-5>J~6s-W1o@Dcfy}Ll6ZrGjgaAlkOPD&(Q zfEj|Q&0v=#e-yyD0Av4PGE&rOS^3Qr%?uDlHs@s~Sr`coSTV`dbzv`PfknrkOdW5E}O1h5%U|kIVyZZ#ppxyVf~gH7%CL_YwcOv z^<|_Bd%mJ#Dh1P64*u<+fLS8*qtDg(ab&939Gyvasy$vao3tRk%mb%x!xS%wQkP}M z6al!d@y~AbgloRlM%Qn%T_cCIuq9K66?{kH4nPJj~l8=^Eo*vXgMX1yGw zrP)gwPxq%v%`dtIY~#9vG7UW?#Lu{VkbxBC_x@uj7|5}A{SIawH){fWBwt@aon_??>6bjhNp{l+V&U3A!F5`ve>@Roqe%Sig&G1ushCUK*=2*w^O=*x2QczG`(} z-~>a)QNv8K{zulmT{XVHe@$HzH*5~4isEmMoD62AF^*uozWxs24~vYrubK#|2f!Sp z0$W0@I)p!y;IDGkmpk_We*QjcV4>b|;E7q5JL#?Ku!sRKo3UP%RX6fAUtS|bWCKcp zL#y|oX1wI;XY@jLQ*@^zM? zOC}8mq5lFl;ZB7t(kfms9IecQ%Weo!jSNt9;xZ8A%hKGdZkES`Lh)dvgtqOeW z_fk(dh5cmZlO6cBu%cjU<=9x{ij&tL8O*0D+;X1EDcMz2yP(Ar!5d!_fBh*--gh^3 z{)_|Mv1!VR*3sO2OV>*1jXdLaZkwEJ(?eEQ=mc-RA;@hNbvS+wI+V!jmDCHb)c_2( zG*cvr9&V1^5gaM8K2{SweS~??R_m~AyE&cN9G4sc=_%%c^4l+a z+U4@-a~|U2s>67Hi1N9d1wP4Dp9~NTh+zKP82v=XhRCt%=opz=oVaD(!+oCq{UcU5 zz5lO@@RtzH<#Po*H$Jh2)Enh1I*yy+O2T7JbN&{ZttuBh69ZWcO(K5Vi6^}e*TN(1 zldbd!fEOA(9}nFqQ~TQyd3a{KDCL+U_=v;GqsYo?_E5x?{BKt~y{;pYbmYHI`H4Zv z|LRMI&PoMdU%>tLW-5%!mA^o=3uT5b;KNs3)&KZJC*reJhVSw{dt#i5V156)lSVAd z@b7c?g~9k5D`Hvdf4<=0AoRd*K0*3_oYn0AM`ty(rg-G9xgS4-=GFg+eRhSbW*Hkj z@E)=K?L-HMe^)X1V)OCe1^&VdDO ze&{UG{^O6P`>AL$4ug{|v$1~Wk_im12`J|$%c?^!rU&gw?(1|bU1rQmy=(|= zw)kffHFf+{{n!LRVI3ES>(wENxL&hgYm^oOh~=exv1>Jvfv9!g*-v*|#*nEO8>TEN zmgaG+dXHrJmIW~;xPS#$|8y#3H{*&lLVW_PJNdUY|2&1R^SZqL-d8{AvfbvRufU&u zLOUHkeIhyBvaaH zUC+cCx32k|D?fjl8nt&*|8e4}cSDxQFJi*VWZv(@8)2oi^FmU<=?)RZtZ!z2d}W#Z zW5^eyytBgT7d!W4FTyHK#HkBZ8+huC*4=M9m+^4tgCC!0{g*9_+!=>06?SldimdILsq%MeKJ)+=p{75!)C3EOUCpGx~cT=XV-<@4$214GHEOKJ~>c z5r4%4*CNh)Qy{V@JM^&XQD$#?;uFKv(J}>1nd%tKx7%8T)>f~SC$L6uS5(Rb-m`N`x8ICQJM~$cZ13{|F6FQ1l3;iF#}>6)5#zHtp%77@q2k^@}++1>xSrr zay{b+Gwbf%{AHUU0$TSoM?CR?F-$$uJ``bO`J=7GE!%F@;{_k0P$LGgQ@;6wA3`ch zPyS_^o+o(iT~wqzO0_xF{Wi9|dgfMT{-U(kEwS*E_kc@CtdH-^>>62IzrWJUG_8`tKK74XhWhBQQ(Ai7O12QH!uZ%{L6{!!$AlyOA# zP|l2=_C=((Ci9nvc%9yT<7IJ>H36SrkglSd`bA}BNW;6Ca>k0IqJib>MI9f;NuK2D zyxVC*m9f>4i~7jZjw|x#(&Up6=Tp0$ppPUNXhWzb!zo0y(+f{U+Go20%%j+hWj}4T z>r+gq@Jwd|{n!6!#j>8k$FjLx$@XWCvR2Ju%6M6b?bjY`|Avv_n}fGljR#d15}v*# z|6n}(k-AOt?~2iy!vuvV49l4rlOM9X$6GQ*u`SQ_ znC6}yKP)(lm{IgHPZrM~^C@*h?Z3dMnUr#T?C=K2T)(;KfL_MR@;_n}d+%?3SY=Xf zHY#!8Vj$Mswyq{~h9L~el7N-uk3*7uFqIl1kCWck{xMA_p}T}Ix5JIa&=qYIk$d5D5~=XzO8@>Q$Yzv zm5YA~3-~3WHT)MaEZ|Ex*#uH(nAT;aYAU8T?{k{}?7i}IE7D0KP}3S>XUA^zYgaq^ z;%-who;{;uALRVJZ_tXmJsvi0oxEFc8OQH04hervox0i?{wh1n3wq%>W@F^Q@iT-} z0W>*4EdzIQ&W-mxk88iuS)F0-a;~l&!XJU4_j3o<-8mw@LfV3sNu`p%lhvS5!&TY> ztBLF1#PIj+%lUNxKa=0(Tw(wYbFcLlhD^z4&@yQ&PAewr{TQJOy1dx>fM@ScOL#O_ zUJQl~@P&6X$Xv-ENxPenj~byaDD&$IYju!4QDfTHfM?JEXH&b-WR=IrLO=ckY2+T` zlmD3Pd?I~WE2?S9c3Fz4#2hb+LP*J{BNfKzliDS>s&aI%wfKX41{}fLV=%7twQEM`E=L1nSqB_27GnyK_2O5p**3acYO=vX8gp5dP346 zc0am#f1)%7{d16EF}D<1REp?4q{*_&SG4zEKl&`y^LTGzWIS%mDaRv5%PMv6B}(CQ z^`@!0RC~fb&NvPj)pvOJ?)AKKE|vW7c6#ij%45Z;ISs<$s0qp{FQ&nIo}V~5tUEki z0#Y@seW8du%z%Wo#TdzJ{H*YbbT`M@!b0;ZBBggib|$?~qeHN1cDF12GQ)t>?zKzolXRpIV%vgI5C$Vw+CI^HIGvn3!DC(=Er0?a^hk{e6k;n zZ;sFfi?aFsI*!A)DDL>(2Qo8;Si@36I@2dLzWBP+W)^>f8VrV+s+ouu5^gRB7*U3W z@aMJCH68wF*dkSw-@1y)I(*uGr<%^3mh-FZ)itMwsCUwX{fktdpUqPi7SqzWj*g;E zQ^5^!`cV0pf=%zJao|N;lwwd%%RqLz080G(1%;NAH+E%0)Uvp4N?=S>3cMu`I+FNgkp7KAbfgC6f^5{7U9|of0bG z=WLhrxJjGbp+Yfd_SYYw9N+ykf-aV~yE`#50vi!6s+eCcEYKk=`@ykFq&8Wt7f11C zye1~G#Zn}zvjJuGs)JKL|D7yI2txSDf&Fmo=;Dc$i{!F*G&epVox^Q&8nQ~MG*_t< zf%J}l3{8hTS&H&m&$fn97z&p_-&MmZSZt_S@qZEb)=^Qe(f_D%5ETUlr9(mx>26RF z5Gm;{=@^jC0Y#Apk?s-@>6)R3?vRcFhMWP0j)7r_`*P0le82au^}B1`zwW!1YvFp~ zt>=06{(ScS?EN@SIRcoBnq}}@wIBV1(keGCpf*e5sS+%d^FwPi(!npuKKuHp-zNae zwXE-#0(*w&sHs`&@UfoaUaq-4&ho{hk$0)ILCo*JPbP!g4V%Ar&oqG4eE1&0k?d2t zex_?btrNw++^IDB(6KaqD?ylcKu;P@1Af!T(%-}IJu%KU7rCDDrG zK)U4)gwc>{Zd@R%4wha|jG?sm?n31JK<~p18z~|_So}!wz3<~!{{TY!Wy&~5HTHO{ zuuKbycjUabO}RhGku}+tIOta2C!e`_8bi~8RfL%4-_<6lG1UCQ`TWUvz2d<`b(P&R zFS-=(`KE3%W0nz2E1fa_@vATnu@C7O?O}}?Vf*mh4{k;EwG@RmUM}a8xen-==XZ;8 zb-;T;-kRGv1zg^GgE{&vE}=B2blXfyRJ8rwO>!HM&EIg{p*IklPV8NZ7NyCWG$jqY zI|@#k?-1YdtLLy<+~-%5lq9(juur{Rv?R#Q6>0&AI(Dt(b~9yph^9K= z(?+>aKd7aa)92D10O8$_7=syXf|=Ji)RXkDp$B*_nM<;ovZ5O?&zXt59R)+G6FaHd zJXe3h77GDnc!L|<54|qDw@#)(jji`Linn+Tg%`385>OEpEERaX@voqilh;~_llh?! ze12B9S|?Xq?%P9@-V*QIIne8(zf+*fn(F2p9jDGY55-;m>DPT;C6q6CHa!f2)_X}S zC$To?MWr3xw-c54AlFd41;G0`%}tQbXFSIz2--YxU)n^>+-$g;qAd}0MqP=Y|J6$4 zA-8ee!hCGs3M1Z_koh7}0A<^%Pi}5f9zWv2jmvb_48f4EG7)1c?Q|zK#-5*BCjbxJ z)cj~o;42`XCUO4w^1TPg*WUN4rmSd;^jnw51!3+-Ji?%pGbcH$YliN~8*s>;1*23k zAc%!CIFP946nDhm$K#w8d?xW-slb1Fck@mdL$cj=e;p3sg1w6OihTYv1EWZYtV3EHf{+`0RChQC;M)>78a|W0M#1NUc#QGg-orU-hZjPwQs{qik&_n&nma zrAF>TLT!nUb`s8QQ{#}e$n>}4Mf#&6U-kLDOjZ`NztNVHs&xtX5CP0Al#iu8!I7=E zHQh|oU1$~U>os-YyZ-hd`n36`Nf?3EnnznYJHq<#J2x^;UC*XVk|)$F>pN(QHS5V5 zd5X2QV%Nr8HC$8Xm@7u1gR&wmnMI-5A?(-7XO%yq8|MzM@wvNbzDc@m+!O$j=!kSN z1kzRW8XJz?5$o0DUpwNJMgg(rU~D()NgW4;1T1~iF2|6xkP=h=<+%ZessrEj%a+9F z6B4>IzhINEgeGO%>zFl)O061(Uga7d|A^9Yr1V{&ruIRyR!}jjzQj%Q(ygGrPLLyI1l4EqX$L1m*9&ex%@$e}pK+@w zL-8;B?$Zh8hDu&0jcGgV33EeA?)Vx?u+X0pX259+h zKfID1IqTy*gMC&$Z;nkaCaJ@tLlP=om0i0Gnjs;|QvV~2^E1bu{rUDZ;c0*>slJ?U zdRrP6pT#o$Qbsy-I1Mf|fK$-VH-5oVV(Mth8B=U?A>YaMhVbpcD4;(2J~2BWYM9&e zHBDX-5`W@fe4JbTgeAhaaBsn6=#G#{D|zUaPgR-JCf=E=Kb(t~)IKo3skH;q? z>I_2BH+y4qLVSen`sS2Nzspux*P8Pd+&Di7YrWq*$%ujQYxzLyn@sN~J;mwpf{53f zLbIoX{Z5M-zR+L{yQ`42IpARzDe++_M1!zxRa2Sb zXzsAd6=1g&J1tjtG@XC&I5i{7g5edGH>Kw5k3j-1HZo(^s}NqcjF$Ap}{C>cND3<#=48Tw{<#A8Bx7& z^W{s^RRhuL2f5mLKq@5ozR}OM_6zeUtVmKW`SI8`5o95foc24k5`+iG;!N~!RK@N% zFfC5lI*r{^lxEF>jqvXsM*;l+<&S$SikXzp{3L0;s(?ygpHlH#Z#k%a71xSoSQ zCv-k6or;jr<*m4o!jTxX$*_CNDDK`#v3={**jNAV&B|$~N1Q_~y_jL%u0nItw!!!u z+Mir7B2594@Z$$O+wS*t9#5orA$n^vAXOw=6CK6&*_P_J5mh-wmdB|XO5TH$+DJZY z3lf2p8521!KD7<3cIKuP2y>k24PibVK*S~ELWXMQRF3QA!P8sf+Qv@@S?lI)1u-0a za-^1|kLiZ~LMlKBKAWC33A)rT4~asZJ#T-hvjO#=`PuxYIMkOY8*6&S=XFbL%;oXB zB@a0ix-#xZDyk^0ZLcX7 zMw0=iylUjy9yn8ukmvRZg-5Ri1}vzv<3v0Zk6sYa96pxlem`gQYT1vtnx=NKVj{)#ndffD z-OmPIL}C3zCkE+m4ycgeH(o`HNAx&7qSeiq%I0K3^a-E->y>+>L${;9k8mQQRLHMwb?HXdxGRO>weUgTjZ`reX(EB}%9<_(4+sKgK9r{XW8=6;h->Q*0?a%IHGmh#VR!WNaXkD|S->&G0>w{?e>-@)7V$SPFAK3%#iXRn~FHDyO zomZ932q~f($L(=yi?bW8M|XO=N7ZUdKKUtz1;-XVZ(f+!-HhSuHq-yC{(6PIeEt^; z!@*}Pa;?(vZgf%saHDH`#<0$GtJz7KzMB1!u-!%!??y3HXHXWo6&>eYUGgVD$S%&N zRrJ%~!+#>76$7<$JnWxH&?u&egi>hi{F{Sf}x#t(;2O#m!!x&2?}!CM7*HY>MYQfE151 zO>9*kA!hlC(R6)2=^AR@nH>!*7Lt8~U#2Ax_D4De8FwHVpQpV=qgJ48E;=&lY!s{F z#+KLvptTe-EtDNdJ*nx^-pF#$6KnmUt$fTtAeF|LzXKBEQ+sT}?F1ok!nD(_Wm(F7 zf7$p;e=rePc7x^Ba$Nx@#{w3Uip6Mo$1t(=w1T;3g5s#- z8MtwLA}n~r>XMzUrl~T!jFzpx(v)aVBvq;&r|yMu9s~T%9*6~E`U$4WS$dxSc?d|0 zB?&|TQXp)>uj`*pWrr3Y(laoi!plvW13b7ko4W(FqK2vIooD2XfjD%(3h+(Y&COb7 zV_(CodUQM-lC#0dGPao=GwD(T&Fh{n&lUN_s&n0*q$L>7Ymjw0ds=fp;T7$(p~diT ztybrCeyLB(yL_xT$}S+#wyw9ImoYPHOE3OTc-)KuijcGr5EI~m`Ib9y8Qw%Tji?*% zmuA`ZKewB&(t-0oNI2_Nl5$Z>g5H)B6dBi#<;+k|sc;;cWefrZj+)zNgl%-JHjzQP zoYAudbdP%2j9X$Cdbo>|z?|nCu=Id0_VWR zpEj#W=Ym?j`^9o*tr`?Adp8GCHrD;X%0tv$IQ4JtPT1Mr<+8bTt5j(px94(ml%Kns zC=SCa(rqjbS%NU@hkQfWB!kmhwZR>hK~R3+?vHaU?`H%Npx>P3f672GAtcr4zL5g! zM!26)+2#8hMy)!YbqM9_HPEHoVmlYJ&9V*Y(l>nhecS~H+7LQos|S`N{`S8{af=3Z zQS*GOxI>|QLaPfzJYvzYXaK2RQ#!R+;OKYSrY`A-%!ote94lfQjumANdL6-#lBe28 zH5;-?l^#3n)lG^nqui$>dtygVQ;mg?O@nd=LtG6P){{Xl*hlgF`c{D!e!#7KhBT@^ z_%Itzp7EkY#^pQjWhMPxXq5;w`CDUE#SiDZ4XogBvd z?-!A5bpO>6@7O0qnBai&ax*JL*vx!3lc=r2Ipda7aa;UDccGG|+EbAg88jLA7|4dv zyz_y(3cT||yl4EQ0>Z^Q2X8XkqY(oR-0Ce?5k+MrScbT-=v`ztq-_@{oUi2W7;$;k zq9ocjLG_s`Q!9wkfDFUYi_H(u_5EgKyk<7cPpupZtdt9^a@Egc9rPnt59_K53eK!l zR7LK>Lm3k?*hZakwM!TGAjectE9ZhH!}%Q+>@hQ}>Fqgtl@yD@kYoP;o-PaU^K+)v&kaEipdydJzC1_!Xa*w%agwoocKFxaJlsh($e%XSzpF4`|3s>FY^k-rx192&Zo`&1MQd0y zP2Z<$xeO$VJ^Iev;Qko?9fRgU57iC^h46cn3GC438^Yb3^qZ4!%nq)ZfBZUZYC9vA z$t9EQt+s43yG}2m=O8Au);+fKMc?OR_qUgF441R6y^V8FbtC^D{mTY8(9Ngmk8g<` z*Byu0jI*~1jmg?=UB4w%bM#k!lL*+q%Ei9WYbe3=Hs-OSlq=fjowNuT%8wFf-SJwT ziQMSpidmKbHKQWRoVIu|J8t!o$G~R{9a**OJ1R%s`50iOD6#cYibiHVk6?zxQ_$7q ztS!DNnFFfJAC}@Ns{&4q?H5Kt4t<+O!-#dQ0E8uh8Zbp~dOs@9&*gh`DPQO|{ zqN_l6*_AF0XY_RbJt`5Un6Aw^6j!_VU@C>lL@7j}2Dzxg1-NZ`Uq*SpXS}5CFrq|7 z9@RxPBy0X4vpV0V@!&Doo#2M>c!2_KjHzB9JIcVnv+gKt2|>UA`N!%Z%dyo5`g|+r z*b=4ObT*gEbEd-gFgid|?3*jl^_sK+@V11%d$@CAkO2);eMSmxXMKARd`mjTJq_)c zwYlvRJl2?N-y1s#x zVw-sVB1&|DJ>)bnZlX*dhwO$X^Pazu+u{Zj-BXXj+MR_T6t>6_(LfH(hz>svzjMC# z%2qrbx6dc?mMIAaD`DxmFu6E&_JZ5SFH%ts7UMyNiaA-{FrA+i6^-PyDIT{ zLdeEi`r*}pspy1~WQB-kIw%Q5M?N6Ynv1nXmCSCdXx@}<0e_r^;-(X^N3jSNARY~I ziwebMS+&tFiOo*PChO0SR%@0`Sj=eD#A}8$X=R^ zanua2M@d4@BN7dDGdzw8@Qf`SJS|Y2eRw3=NH!v^1iBddb@(Fqjf;AZs+q-jlES64 zd-ArAQCi)Ks-=k4m^KSxboYdc^17etCCz(glPLGZ*$lqt5W+9Np9wNSx(Q4Um>94R)pOu9lwcha?GzA z_qK(3_Q+wMsDf9V^3fWN+X%JC*qT3mzR8R;iuN_IJ9u-c%(OoS7^ zt}5M;d!@hQ`mocg#IoUO!g7SOKQdm;)8AQO+aTYK(<@eUka#G))%gcThI`^rt-IEE zMa6XJf{RfY2B1ZR*U?cdE2qR;c@&)FOA6a9l zlu=cj-TcLkYZ(WZ-lrr0q(@=dI?U#b^zcWl>dEr}q}uePywZ^HKSfi!4N+uc`iVH# z>l;4&JSjO9nUXv&74qyhi7w)Sf4LFOA$JqKLtz}P=2FVAd~tbb+*IdB;j*n;y-u9Xc)^7h9k_=6G;fp_bq zCGMfsvf78z#VC=D+~lYteC^_9kxPNbR5;ycNO6Y$kq-@8Jl6kqIpBXPCE|7xbS-r^ z<4cU3j_)1zroW7o!RbPybmn_-=33A7^U5KnY3Xj^7vhWb62DtF@U#E&_72&(cG<+d zByp5@z0Cdg(-7^Kg4}Uyf!mt)gUK6`~U!h&1>fZtYj_9qC zG0?OAu-Q{1zzIfw3zuGRZ8M)NtGM*T#G0{TpOl zUlEe`^Ya$I6EN5A9__dkT>5ch&QQ}F3I_GeqHkGonH;ZmdbE@pMrvj%3`Ow4lIaqT~MZ3E2C+%8sLj( zia+?3|FHT&smFIBIqGKHIx^1vYidN=;q!hxVO)X!VaWv8wS*_UH3SsJ$gdfV~ zg9L#Ev=qY3APHsth72xyUGEmbICr9qd;DiD^m9Gdci9~48`?*9qjU~Sxek_K1}z;s zU;8evMZ&I=!LV`$yPZL7apU~9yIzc6ZryErc|<;d*|KQ`KSVJxt1EsZ_|bu>rA|oL z;#~tl@d^NU88H0rB86Rj*~zzBK#M{)gz4aT@`!0VLx^&~OJ7++p9Fu+JaM?E6%4$N zpRbnt`mianMS{k^GkfLiUHQV4KH`$0;ndEL!apg@GmK5DA{DA^)W1aH-=4kaZ?-W- z5ZC)_s+T!@lL1S6!v|NeZWsr9BJpn03^7mmwJ*5r*EO1fhk|-2Ic|gfUMZ^&a&Ggk z%I=#z`UiHR0BV=c^Q8&;SuPtnZA@4uI`Z23bC;8T8K_}o3Z#`NF|Co5zmWuJavS9 zgn~^TlY#f?YIm3Y?D@sQ)JAjf1aUj;EZhjK|2}v^USl9Ddb(Px&>k{(t-J_bufY z#xz40mt$j()bo}1R@fX@X+|q>P67S~eG>i#`bOD6{7lku9#+Bwzxc;3Bn`?v0iPqCg1~3jm+<%zWN#lKqP8$scD&pxhB)cC2OC%aY+2 zLf04z$DEX^R2;k__c~+^aGOuU?+hKQ6c|=Xa1<-Q-#9kFBM=?9eR4beWE8w4b z&`nW(TgR84y=l!p!TjRqR8{ZOFs=okNy8R@!&AcK(wf8qsPcZ_q)M6D*|VKv&ebR> z;mKwlSqWK}FWuzWQD8#`6Uc+~e(DV7h6xDxNia;zcAEkSHar1cc?!vRlQG)< zfs_0u6Y}U6qV*j5>De}E5VS|i{<=<=R#N38C6gsrJx6iFDRg}ARbKNyHJ072yn$H5 zch1l2o?L$KAlofKz+x|5$7RMYkdvw22TIB}qnJAxr{(@k zSN^x#gJO$5dof$_ve=W@ z0Xjj^zl_DHFy=#WI>m>S-GLt&QoiyOHH2qHS2{!Yv{o zT1eFs=LF;Fx5brDqg5&@AwiL!0(NzJL?g?bjMYo-HDQ)~-o{Gm8zS>IIPLF-|5KUA zE`G|HSi0m`n0hu~H;Yw~S~RA;6zTHwOH_AEU&x(Q*`2&ZaAAiPO0}PnNi`+6f*`}M zQW&_?8I+PkoTSn`EN{@$-q@0z*KIPP`}Ew!`17I_L)_O8>2Gs{?^JvqFyz?$1gPIn z4l&BRA48;>zDpEg9wUvP35nXeIi|LsympT&XXI0=qRkOeet%=6AQmKc6+U>B8)BAp zcPJO2iI(k#@jNuZ3xGcuU{|Yfr1w|yO>VaOr;ZYUdXD_`d+P{C_>a(@1&$TFB zAVPHCeddOmOxH}Wo`;ptYuN*IsW}a51f!uX?IMRspD;(9zxzByWx6cx{qWPjhAtKS z_Wjx$uz^4^P_I>y5+b0-6@e@0Q!7GoGsyBXX@ghp&=g4)Febg-Wxo!O0d`$E(>2I zNA|{0Mhz7!TI}-kCw=+}Q0F~n$_h7&6|0tZ&1Xb8JETnZY33`&6NwPe!ba`Pf#(i$}5)(>H7xFa_UYOkYXg z>g){wxTNbC;plNQx4?9?5%O8#yTvhC{5$a5rH3!NljC(Ycx`PNz0^4gq9Qi?w!$QS z-;|a1TPREw<&?W3*Z%WA1^xwXx;4)+klxEG^poK|EzpH^)FkNNaDBx8uI4o z`@8Oy2-6?&rI-galBE>khXAiEJ;d`ugOAwuQYu9_dTKyI!iuCp>KjV;X*|VI(TcKI3Pm(bY2VFJzDx< z2x_#V-8JtlfjlUjA3&V1Yq|{m1J=zyRjTBkt-@$cwQDlAVYMEe>%gDp0o?r=5V`y| zARvqZF0e}mv~^QnbEL>vyDPeMP_-K0C9#|H%7wn^Pyq zQ$QaTTXwwz1XTp}iC&Vk`+GVT=RZ(Y~lzjQDlLw1J`MpksV4=$%P79rx&U6PQ@1KVy!kah$2t^hvH z86>0l?PIMV+37q^5D*wxXAO(l5_&3oL| z7q4?+eaFYJ7!fy!^EJbqY0gT~F4DrmJ%!6UPPGaVUz|iIc^VtM`Qamnpv;={IL?Gd zC8#;vrT~*@cfcWjV!wAJ(Ild6Tr0_=Bu3-Ls*XI-1vcEagXY81_IS5PPYrF*UJnI_ z1|au?tb!WtQ-VOOqBM??OfzW%YzO@xc8ohfRo`5|V%r|z$Y}q@r*hYog(szZ+_k5; zSoiF!q!L^h_h#YNBF9478KaUu1Q}5iNEE^L^r4&a89S>R*VcLeIw}ax*SXhV1OIZs zhua&dKGyV7SkZa3X4YVnL&0@BZ# z$)@SP&*dN8np|ispb>()lWEviNRZwB#?44}x93qoaN2DvaUZGd+82uKZ|sJ<<0Qn( z(;Ivpd`_+v!3PD1j*~4y@vwHMo=dDm(>7{uN}F8l(3m#Ui#l$Vu0wD7Qrco(CHO7M zskq6DYPSIYzNgyyu{wg+kB z9aJg`2NhABRttm+djngMUD9735+)u?k*;*&=ghii_+08$G?hglqrSUsAVgNBx*S^l z8g1N5?L4TX(I@vB8CP|VYh4IVSAi##TE*DAF3-wlAPhRsIT+J6!KG-RO!9jtHho+e zI(L+GfoAoK9}}5X_$W6s1)Hm50gRga*V@%ZTiVoZie*oun^7Y1Y#zyQK3qJ}%gY=a z2YsR_Fc;wu;a3vkqWvRZB}T^zfy7H|=0aj{e<&(6Eb;6pWSuCr{24C8W+}4pI;HiG zv@SyYA}%w(?U{IN#qwgj$|;2x=Y54NNDdb+5Y)qx{PR5hsLwyEEhqf<0D{bKQ5-Om z`QAmtcl@?R?+Mi^qQv*txLpq|U{mb6h?mQCihh3Eb#1-J8@DJ+Y24nlxc4ur%62k_ zb49JVBWBtk?jO_?v}9h^o@qGPa@U}?No`^EUEq!NVj4W-n#S3KH=K04!fAmb)#I64 z1^pM^+iSP?7F?P~&RiXBxu=XjxSP^gKq91wQJr$ma7wBLN*{24>vGORN}(gtzn&CF zYeRf%>P{N=NZ`SAWzfitO)`JfVRkrbJO_l} z2R%U?uJ(qj-N=O0oBQ;a@>O!xMl?Lvl$~_na97GO=>(0`&OI7KkKgB?AjMZnh2I); zp9RH<^j^pOE9=G7hm2d-! zL0wqCn|!@I*CwuFa%vSm>J^lP6j{9JZ`vYJ+FkhT+VUjkK=YNjyKu9>(5o$$yaEQG zsXpE-0L~=EiSFe3qWuTih22dlNNS@t@AB}{TA6#Pxa)6SO&H^iupR@M>3vog^FWc) z+(49ynS+dNeOMC{&Gchu3Gu>`1eWxQyy17LvRRDvdTW>81l}$IW`$_5dVx;9A+#iv?inqBStmwFjn3nBR%1LBjpy*+N0mNUv}CF>vjYI{L39b0+* z-W*{q+8G-IbR90j;+4xu>#q(is)i{rJEKR`E*q67iLQ&ayfc)NY)_N;vU3~{$38I9 z8PkKyJegSuDdd)yE~=fC{=tAY>tXTVrkG3!*5`!Fmf1#~b)9;sq->iZKG!#;tc2wA zUY_bw&Nvinl0b`JlCHKq;P-1iQ{%a49G=yGO1Jq*MNrtkyteL>theXyCfb3;M?>SB z%am+$=TX9GiIpoPO5ZqQ9{&b9dAWzF8XFFb>;>#!Q;63>8IM~3Sks6R=|FK3D&b^K zuvW|LhPD%v(Q-iwQIE3I$Dy)oh&W?JwL*Rmllnz~=nZu8U5I9@(*0YEVw>n4MQ%x> z0RiosKZ>ASX0*zJsYcGgsb+1!+pWnmTuUnaI`vJ2hIWHpK z!(SyV=}<0nrP0@17{=)U>c!JkkT6RfOo15WrccwB)C$U6M#f^j)_K$64|^|C#VZOe zXd6O+CaEi%$q2@-&VUo4+%@WH>C9gyh^s7w>Aid~gY(SXJC{2d#zziKehaG=k;dNR zGG_z%rHPoDhsh_n2AFxuXb+~#TQ?#jcL?IL?0nECw~YBAa(_+aXCvl;f8(s3Uu{!0 z!XUphItUJNtGrM*a`=lk=fzCP(MVkbKDTwlc>HlSsEU&gS+cXe7~hX9ym=2aX>T0JxuJ|alXn6;1b-AxOZ4)qkD%lT=)Y^m};LsCZ z;sgt~32NiIG+!EyyHhXhh^c8w`cXO_>EnCXti~WQz`}tPA=a_6-u|}Dhv;+L5J@wb1f+mjsG~xcgG7T`I`iPbhMOr8& zzp}c8!o;x?u*2SLO=^IsGvBEDhO_U>57iBV>$48~r%uH$%{V%!XF5w7cy;W6jBc%n z+OKpDcu?)0`ov+6eOevOj*-jY)^zH=)~u`xH4fT1Ya!LuyL@)ft;A=ty5g-k)xox# z+%Z3BOnv@5jTHxM=y@yQ^X!bbA9izyc+$sg%BFAiwEvca z?N4O7RlKg0@xa2y$q1ex!@Jk7e|(WqS63e_v>n1Xd#Q(0Gdbj`p})lUEUFLuElf@i zM1w8S(9$7^2+So;`|OyGy3eo4|6KE$uh}NKR0==By7ur_n5XqJ4*R(fwR=mU$mW%Y za1m_zQG@W1IDk+tZUwP^be^G^wlIEYcom9XIomB!7;?XZ3<8LihP&QsJcsd8%f-d_ zu6soH%RN-Ej-B#jTI!?qX=kb{stn6Y_+m{6hIxDc+lm!~ws7 zyo^n&opw2gP1@wFN<$ZQ-TW$5=hSs7p(J!Bxft*I>-GKWXx)x3GI5g#<;wikd_J84 z$R0DBM6jFBw6KO}WOr==G2_2c_m$b4XtTV~C~Ex*^E)21!=2Io5^DSb_MM-2QtHI& zlhz3IcLLt>Q3AWlNJnhlCi>I}WO)9LR|2Srg%8JIKRao@4L5zzlczp3@fw+WvAI^I zbNn1Kj?stXo_yE;OYCn|WkS?L8|W(IAI^?S4uutuu)Yu!=WqXD?y2>O=h`nyZgNXk zVsiK(WQIA^vL+mF57%%4E}V)FkLhkY>oUm~*a(YLBJYtlH)6`5wJsmD`CqBDe)gg* z`rqlg2EqgF!mkCr4EO(K`46LA25h|aEtp?m7CxWbPwzjqt6RMk2zn&(*G%D_)Pvlw zv5y-n8lAn_gJF&oIFd%sGFeZhnD^e9f#b_G?If0Zi`84rK+sFzEBeNzA49{ zP%ZmbPmJ~Vs4~cZ^bSYf&ibUdz@b#bMPhmIPMyjMesj3?N+SBt(NB`%kkKXGziG(dR=lf2+?rD>EW!l0`A^EMRLE}ws15!- z*sTH0>%TvD2jl{H_0I{N|Hp~$8;~`7vKLpY6d+Z?V6y)F1wg?9Ie8`V=$|wT>35k# z1m07*ar%mn98KM4EdR``igz|E|L4p8AMf}7@r2vrH0KM}A=t^?Av&-{Cu(lV2VwIp z;?Gr59g%JyHs>3qmb0e*`jk@2iaw#sGg>+#$@kp0<9HhNzw%@iZmR}w^;&fv{F5HH zYyzg7&jwq(ZxOt&lm6gR1?`q<354adW^Ued*iGHa{`Vf;H+V}wn^B-0em+OnTwNSX z)<Nz!H3yiY!F%FLWq z(m+0YCPex}zzr&E`e#TL&HT8!cVNkgqBCm@LIKLog+2%m2U+QZ@IO8Oc%?>I{6u=z zMA{ky*ssPD@C0o8r`vZg1AF$wxk-U|`QJeC?V<)}ArNmYzG>5(Dmx`Wnp^k`bWo#M zSA5fyk1M|d&?m8gkmV3nVDF=|9LWCah`@2+z`l+}h#R~F3`(qWvsax4;r2_Vw7LG4 zKAY-+jr3r3Y5xix^t%#m;+5NsxG<+z2=VGjD#p@Ko2g#p%kbe0-e!BmQJ7E1W*;cy zC`}$R96nUpq<^j$6mlRza$%KnA)q+P3#7XoF&M3|U9}Ql)-bHd=#!B`v z?Ddn_H@u!hbN3=$mk-=4rBrK_ll3W8PwkJ?dffh~Hp~~35iR8ftcl6l>vLpf7Cjxl zhZQojPUC^>wYMScwTWUSb>H$ljGN`Se5mLeH$nW3xElwlye9$p&ew|waX+7aCd9e6 zZ?2VT+ADO*E3QPslkWTmG_G%G8DxOW4cuM)ubKOkynxq1r;eNPr`Y7Hv--BwSE3!g z1YH*rTLEm>WU;AosHT0T$-l-}5n59IEPBsj#*h?wjE5^yb=I1ZZkt}vpPR*<$xqtu zh?gmzsD3%y_30DE)Td;M_-0N4+I&kT3cElhY6exfIf0HA^68V=( z(|$a5$Ad-K`HV>pSXjSHquL78NApuy2CSL3h41sEy?qI)erz2|Op(l4{Et)UpUUH3 zY#LdtdgW&4tj>RZAmhQvMha`+=QkvwYsU*gmB=)_L)+Meb8??Zq8|O9YvCb7mX=YoKiAo-x%0{WU=Tcj}7vJ`FkHwZX05 zqvV$S98kPzfs*7Z1WRa5*Igwx41hcgP0+58_q6P`=+d3rrG|kB{LaYMqlRW}pJ2j_ zgugyQQ_g+TL^2p!wZw-Yc+Zy5NPHJdi#>ln zdw52}BG?L&nLi55M&9|?x{-e;GvPmsErX!whOxMrB3=s2JoW- zc~td}Lj@i(dej0)k{kd&^$!GM$dCS?OhttWph^C7h9Lg`?SwN@>8s5VAPlSW$6N`- zE=*e_lX9OKN^${K^IBvi$m*b(2s@|C8JEflNPJ(u2PARvhc6T$TWOzWZiAgO(^du^|s z_6FS$LP$OCvLczeK`_D6JM*bF*_`UZxAG?!m5&pJ(=89_9J+K&o;h0D7mTYiY|MB-vD4<0etw?X-2S%C2mCqsq|8Amet#gu}N5!8d*r~19 zS0w%po00My*+nS&*!L4iMXRc2Fd63p-gL#YFsFS(awSh2QJ}dp^Cm%V2oDXw{=K;e z0Pgd#Lx&6@#_LxTYtZ8gDOL7x(*Oy7cIuAm3L#v_oYSMnz({j^!S5gDF7~gy<8MZE zhrzyI@t`k~iBIm0J#|1gvz>N;#S?7*Af+&tg2^Ii= z29)Q`i~AY>l_T+yqVr0b_U5&${LS>Q%))KPmPCHVF)d_NHIADPAr9QuI-@V7T|O2| z_$!nYm&`d4p3!5AfXf4jrOC|Z}r*)By4M&FZbc92i*bKt;5rJ!QnD=@3$ zm-QL$g74azv-zVpIZo=^zD#=OQ)p`Xf{SkkF4LzC@f`%ul+agly-gUhhfky4Aw>jr zQ!{YSRFC&9BS2PEO8gqvV{%@v9w$b^{)bJr}avBTJXtUV`8+&F%)$ zmAT#juqEs6e~B-CX!W3p<;}`IAp^dDdyAvn?kDKouHozWa^w~QZghA*QDKZk!_~iP z?$5r{#?MhD94rfE%YIGAW)JIZ5@fJQ#s9V@@g=(KR3Gn@`1N{46!-m?KZUlPRXM?w zCf4MySuPKW&HCm1WZzsLfue6*Ux=C)!SkXBPmAVZFFm(Y??g{82p{RGs z!%q5OLAj|ZL*eS0C&mc{i{;>lzOtW_F52mdtH&V?;$vg2uX`J`-R$lWFT14N=D4!^ ztEyKv3u|j|h7U-aAMf|9R$V~1!BuLRZmS6sEF>6fjuz)Du0pz@L;H~~yAbH-$F?0;hAYwK+; zgwL~PeJQMM?_fJjq8dX?TGbO=O1L8;O^ zC`j+U_ufmC(0k}Tw2+Y8LErcLzWdKzzjbF32$^Ko1_q-9RbLJq;F{J)}EPBR7M<=bC|PdG0;6Ly6e^u@w_1H}(7){~4cNcV=s8}`A;Z;&?*a6-*`L{6z3cVrE~&b<5R z-gM~icQnVZbKW`Hg}FK?0(78#Q}i*OWYL&M-COyr3oYY@!1^>_n_n|7KBPbh>im2k z|2XT9enOs`LY?f<0~f1!dmpOSwC+i_>M8m6W&tBzu=S}?`5gCp1fS>X(W!s{+<%7I z^Y-y7EC-&lmskq}G#QeoA3;e^frbY$cZ5Mjy;O$fYia`VwendfjcAyMA){OHDB)Uj0IXRdp zZ(FprQ>P;A!XJ-DE;Bok0&gNAeejwO=6>Fx$gem}rmJ8rj|M>UIwP;Nc)(gpXWZEj29uC8yWesOcA zzQnWivGZ@+lQuyF7J<%|YvBBxJlns~4W#~YT}5Z=z?#7;-#~It=Cb-7`4$#x>E$(@&=cfxn&4=ffs> zk9{jA%U=SDJ692WJy3%Zg@5rwFdwRUB=JgB`sLu1);-7q-zh*`e;0v!D1J)nEN0!B z4=dhkC+A4W93VMWUSS*mrCViuYs7kD5E>tSHRe)!t{7Y??{POQ!%NE?4>!vm>6_3| zg${a2=0pH(x$s8UzU;8?=~5XW3!CYU1hA#gP}+(?*`g2_9(!-juLlD=l^o7Em{*2t zNq2MHMGF<&bo3Rr7u;4*%Adk&icPlN_14DQ z@aB6KX91CXdvInw4EN$DJ?LPgEJJq7#PBZP`5NVE`|x-} zq7jL%Hu7*Mv}w$ngLDO2aiVzrE;#pdnGw8#rNpnBIlm;OpD$mB^u5)|kQc9;sc&#eNn7lvCXUfDvEqYk6y(0h%W4 z4XDg)Z34PZWna@t&jHPI8tV>KP~F>Y`Kvtc;hXhMygVG#md@TBKT)0m zjB`r1wA_XMjqT>)x%z%HTp6_tyIM{gu=ao0K7CNp^qbnZ0T6>XS&*%{+TDO zOX`h`y>hQT)Tw!~DkQZvKRK@fJ~DX`(gl8`7VhBge2JL3Q&^w)8|(CKLErPMOmj#c zs$^j)1eP^bXH4b_0r5j7qRq=(JBm>iBhDRWnYIR1{xi+4!HVO$R!vL9b-rfec+$0z zj-$rECY8ek4rF{US`-Zr+sZCS-;5AKCzRo_TPB}(#mcj#QQ1RBD08z55+LsRDix=8ddwDzwZT5LE~^tX98U8BRd%;pgvsdlr$*xqVIoDManboN5QTS8aI3Xnh3Y>+0Nb7yEB|NwWEpz0Q&aE<7;8EyhKXvfA>+d< z%vPT&wjBKmg{lx&#vsYfEXwvs7|qtK`IKyyT&gP)!+T#;9odWZ%)E~90Ps5iN+QDT z9%h5hosfm5&kn5d=G^b?`QihoKermt+)y`CV(*su-jrk)mgUQxbEm}4q#dL2_G-!% zEn2(mE$d+kF9Y>FF0T8)yW~pWf2*sNAawVlFT|Eol%RE&z9IOao;WrU1h7$enXDO0 zvmM9u$Z;880frt7@HAZw&wixMQR3@=S$wN#HSi z6PAA4afmDgIc`m?OI31f@R`n(%{M?^mS^{2`$~r2->k2C)hAcmFd;%p`Jg;)HHW4tYZ2ilW6m!N7@^|9bs+Cl zzE5F2!1aP4VcTCTWOI5XnE;xkUn#=x!|*zu=ans|R_Uv9-s0F-H&e=teRc>@k_hBuU!>SQ~_Z`KHE@8c;+%n+)XBj4sLjs$CeO_)3C^5;YE#-vhubDh3XS~);h*0 zs#;8auaG~@bXH73Ps&--5qKU_0(896Y6PFN;1vzL51%u!L}Q_rr+*?gYe8$t+oAI04P%R1xLSG zuOHR|(7=FHZ|BYb$hOr8&;5NS#LVcZ2&5y`sWhzpoc^>bFdQL6+lvX2m&!*Ex#_}gu4FOuJ z0$zN7gH3FDQ$s{VK_LQ*rLxz#sq-M8VN6>%0a9gyss2F z2(6QHKcb?kA6vz+OpaeS-pQEVnRc_gRV@(vt@3v61NEdkV>fTBwc2q`gW}iBeIY5p z>eGXXeN4*9KRu;A35HTc*XxU&hf-{ck%O-KYKDW2r{xDvN)H(RV~)(nwDMtyx8jx2 z&d(KTvpb;{LY8vU>mya`R(75*_C!isUi#kXuR{f>iLbyJC zsM~#(6hyXM&at-(TB<9NE8-I)Am`Ijk6Wpy`^vl-5o6r!|=KFIs zR((p`gDGO{@CWxrWAqjPmoeD{#~1FyG`kE}vCx;^+XKVHbf~s6*y(ZU9&rQL^QR4} zsUnmMaEt#|su8G)cobd0^?EPckO>mPhUcR z?(FOwp4t2tqhpl)4_)|vV(EW*!I5_q|4%!@#VpLkReg3dGXXU!r9UZDig#M}pQQ*y zIRRPrpDzUbQu^N#*r)%gjs1V`=zWxwcTk&WNwo{2q2Y?Pe>j0h3@p|v_3$B5rM+ic;)i9=!Lp19uWO;({c0)$x0%-yFwYz9 zd$J=N%!z|#PE&3kl6AF&_vq-_xV;*ffSdg2QJ!dP#AL|T&Qb6u<$d|J@TJPk1brc? zp48u$Z%eoP?n|#oHh?5D!G1lH{}CTvUv_BB`|zW}*rU{}lzgFy_-uNQwvYEW2U$7x zYmuXAimpaCn1PiDb9d45r%2J4-9v~6BYF!mO%p`(&Snk8+3}@PZ)AuIu&n(nQY30UX3bzA)#+|-)}km zpvNxV17F<%V!{d*C&%_NX>28iG7Dd^`0~V0vzk6Jo^m{60UZdeIPg4zcmdbOfiMe2 z`k1;(T#wzXQl^Lw``p;>nic)wOqkWJ=8F&I3~Dzrpu!*2uJ&-w-^4IafEoNxy2?Q6 zLey}baRV*-N@ZNmoGuscHHYjWE1tr$u58)P(^gG^crv@9!i4?m zP~WXkIvXlx{s!P?Z5kYe4`la|nJZh5x2OVa&Lnu#|m`ibPWO$mjePaWJ-7zSCk z>Abj5m={VanKHFAUIu9=PT!7zy3E|%HS-th%dk1PP&`4F#^;Tt`w0_+9(o))xY4yG zzt)ZPs4R!N#(Xb+X9v4Bfcm&Qt#T|yLpI!_X=M{)XBU#C>}VOV0}s!R|8Ox`Gc!X7 zMRuOtAZZe2%0snHdk#dZ>}xW4ONX`uM*a3#mWp;xT-koq=WSnpc52*AF8cd3)OBfc z@8>9n#j7P0LSI=WpGw5a$iVl&V&U@j#(o13w4=W)4yDROOGeVvpq79 zh0B`J23ngJKh`SWKoFuo+})Z)js>t6>KhiYp}RhAYl)8|P=liuJav-4W?hZUz&0}h zKg9RF*^j->G2wVElo#VzasJiAiO-nXDz3`CVQ-nC)J*+&2%+GD=VzyRerfWH?tm8R zlXZ=n9}7gW!gNpY$)pbgoz_qAWV+%dD7+YbOGX3YAAF_*&(&AeEf02U5 zq4-N`dKGsEsu5$9J6TrzfwP1^Xr04L#U8P}vpIUyd%5(h0^SU@>A}>8??f*Ge82+i zS};Esl7-!{+^eQ#jU>m4%dpa34Xm6{!ZT16*KYG<$@%1$yUBWNDUCw0bj~*WQ3A8% zjIWAWxOnvwb=&+EOMPY!NoG4(O0jH^7|0${xT&3P$k_DK?t9ie>BO#)U8ZMx=ZI`M zh&gT}$;qR65S2I+zEPK0jAA;8-$F#{_$18azO?(idC6gFM?OSi7g1J|8c<^sPz6&t zF}Xj?I#y+GpH5m$yjHopnU&;Q4%in)xtE^>eo}%f!!O6cKZ~SN7KrU&-?Egd*G0f~ z%-{tOdqoq`pcPA9_`4?VsHW_c?e@)j$H}-CsN|hXJ|xO0e>d{Wmx>YJ(~2KXyr7+p zGss9x%cZyy-N(Kg;Nu}4r&;j@Rsqjz@f^J|EqeTwHw_`7U0pC|$N{*yYK>Q@sGxlS zf%Y63jhaHtzJBPfd1QNzv1x(C$U_cw4Q@;#M5Ob4BP;BEQ+AsN&P@+I={#4Zk-JZ` zv!)QDk0ALuX!0IJkzq}#&wHs7&4nd;D2K@}%WUf1*;kWvZYQydm z?WP8R5$Ruhb}gLn8E-ZYymmC8>>EhRcbq!HKd9dQNqXDOZJDiwx1Rfzmlfsw^P?{y z&3DvNCCBLN5@U#`J}ZgiNKr%#>@$o~fpsBSR^Gz#_DOs#L77X(dPCmh3$&MJ&%~=f zZJj%q?oVdKLP8(&_C9=As2(Y>Vji83M86bw^UqWIHc$?NT6TXrNx{#7so?qb<~{^Y ze1rk*GCmf2u|0V2XXe~-Jwvn9P}Fbl9l-?7Aw6i!Qxhg7ZL5o?C2Y=QL!A{?Z>{e_ zX3`59skQgQCF5#ohGB+Vg^YT-La0OT(kj7hH_pk)XKUqrd>u&l>f~wh9++B&+mJ}- zp^nQv8wg2F^u)SwYLppUrO2Iax7-qKM_+D#mr9Go>HSrBa2VBBJ@Gzg0yj6EX<(V~ zH|B}+lg|=!9*#suR>jJ#A-|M18-Pfaij+n*?s+#o%Wx~B#Iit9DacvU-|vHWlbjnO z8+|!vV>t)z#%ERVOJqFNMo9pb)8lt4(%KD?gGT#==41EYXCGz%s%G|Wd5!k9Yz~Z$ z>C27n^kK~5W96N6joEUIJa6jF<4!5Q8)O$y-9m8Tn|SW?w9DG7U4>e&m@wM2g_f50S_&pU(PG&aF_CkcNhIUGL-OP7JK{~eI%^e!^#&5mTQku zh-?YB7?+n5*fhQn=fPrJ=au1XF*2D2_aV9*en@+b1R-y#fS5_KGtu{Ul^T?^*BI9>`xleY9}bEbKYo4F5sgTKk#~RRiDcO7c=$v2OxjR(=&fcbtTJ zl2k>16HQqqk?3KB3c200R4lqAnjq&dX$qZq#x18?VM~FWs>IoIbbO=}80jUIbm(v$ z%OdPMbGjwrtj}!C@ox0DLXbBLW%UqVR&hqkw#C!5qarqRt(DX;*p*^W-avCc19dVaqD} zl7xbsvFB8^?q)cCTkEx2VBxdgNxQ`a)ocoi8&a0Will=LVk8uIllcNDS#Ml=TVndE zIBTBH%8{uI(DLZ zJe*-B;FY8ImC)h*0p|oCRK?Ot2|Q9?&dpm#qkK|4e##l7bubiUi8l>f=zVWEYV#`)7?2WWoAN?mFVT)(A8GC5vIR1W?HaA?-Ay(LQZ_D zHuERzqn$5FQ06iAF|uoq6zzztXbou@ZrqdI#c_Khe!J3a>CV}fe;DCSqQWm4Y`+bU z_--m?()piMo!`Bi@tgsnBf(XW-!{vYOv zn<+_vTV2?(g6x`-L^e!TuQ?7Depuvgmoavetz~$;FU8nrXl5JAsoj*5Vm=?n>EQ7F zgR_M#Q@fv@+FFXD@Myi*p_h_TsAE|rsWON5SofDW#xlJ|ek#jChwqGS+kBEO*QPa> zdvZ%sa4F9RtV#DA#Q2^!2= zwlKGT*!c{Nd&R4f-qLC^eJN{U&qKNEWP8`Gg^NEkN4f?FBEU@Np9iyVp#g8yeRsbq z57OT`O-0{>`4IIlp~F{2*InlVj}Yo#buQM@K-U0b)#grs%5@sy{gVsjIkaPxsuQ(Vcscq-Y9 zZ}0j?3&ya6$^nmqx98pPuvO`cYnn!HudE8_Z?QpOdL$1(5@5@rts4i%yr28U2lv7 zo?5Zb7c$%&O23Bv`^pOkoYJUa6D4*OeKmQih{q)uT(u%fiPwgVu!oDYwhnOcI7#{H zk`3mqR&CR-v~j2|4Poo!8Vsb;>$!-X=9yN%-Bryita4q=USDz5Tyn^N%*s<;QdU~- zG@{lVdjj>#FXH4$QjB9QH}rTAZr5-~Sk8soWG*ypisppoR`m)GT}_+@%ia6MrUQUH zy4RdtA|&HE$NpBeD??f9DEbQ3k9o`Qu_JLYLQ?0Rvp8+TkL$0%a?9Ga8K zKHFI7Cp~G?i;IO(p>Wb(@QhiP$HI<^(7>P=G)v3D-`^4<3FmqB4#F0Tb1*omkl15) zb8t4+c8Y>paw*FXxbv^5Ea~V(hdu2rFjn;BMMx|md6(9U{@^2rww`yD)gF^b8WOFq z>d%?~QqA{^+x&Xu>btvYtEbq?A74nNWH$1Bc+=IqujfMI{Y(k2u1^wkfjd9S)pSlc z73{#ufnJJyv*b*V7Bn8{02gu1^ffY%RhTP3>g&ziWd4%Vlug=K7ssJ1?pEQ}*iv?W(p0IEjkZhm$kncupsD_OP8yzjv`uno{zs&NEAz zIOF2If6PrXHdPXoSO7>)=w;LANW^RP8d|o>uslL)Wex5DUqz0*RQmam-L7pB$>GyR zjj8G2gI=cXlUN`LFXAmKaO5gtJO)#!vD?_{Iv+md&Zy>nruAwlJ5vDIMqg~&I6pR)BbT)d^r~LW!!s@== zshDDsB*}e+*zKJL!#k2ERT1HCqVJ9(&-RxX_ZLW;r0*qld#e>}q^6{LF?iEiOljty zuLeJCcK5q%v#8r{e~rGJDB>@MgyN|-q)#nuf3RYj#9ex;pS?J_E6TaA)ek{(uS7S{ zRp9p6htXjZvCRoxCEUhpyIm=_OSie^Q{3>?-0c@FPN=62FX-mcEk2Hckq_g*B=Kcy znH+V;!{j1n7-8hZZ&~o3fUdgLf`&bNEp;>M+JfYvblqBZk3z%VV@$wI?G`da%QG~% zy#-w`Teb3FEtY#}TQ+^AGJJWYI+-FwUU-$tvl&)ydqS@SCl)Xp+(UDy$ z?^)a#sNK8ZNLZJ;-_`p;MG4huNpAe%uIlr`xdhAI@7NZM7a|X>+bqU24NZ$SsNjnPraIf~cIZdpSfeG1pW8YwSu(D0E?yAla)61$ zV($F9X#JUc_vMNMg^m%yVyCL}guvv1>D`6;M?ovJZlWd2kX#OtvHHWrrP!&y=1bJ# zd0$y$PHH>rnZ)o>)`?Ob^06=P*~@zR=C~4;M2~$fx%^29g!Y8=Y5FX(3MGm_VY`Au zlu+o+8iBKs53+4+DX*A`7sfhb7dWy^$9+K#!#zpUUp@A{{72Bfj1lO@;DW7@RqqJB zsHgiD1+>F=*7Lp%n?Z)W&ahl?P3ohm@*ipP4-MOW?(UmG3HeDizw|yfRZ5-o(z@0C zvRl~1I?O%M=fSto#sl>-*54!m$F28Vn0tbTpV~1MzXA0f*7bH@oGV{2fl|+TrAukr zi(k`s-#2Y{wj-@(#>#imx4O|sDt*9@mKYtnCW-n1kuH&TlE&x{vt{`#n=VRXFe5)= z7tteY&jv=$$4E;Mdw3|%%dZa&FpK(_d58q7@3IGr&*<<(_hhY+ja1)&$Y$k;QTIlq#&Y z!Jbk|&5CGOx@S~5Bqdrk&>h2Wr3DFXyXo z!)tRAmfx!d1wbhaZr^>4Fk6XNA6oJ)(d7&ULCup5S2(JcK(Smn&geRyeUp4^WJk!Z za{c$HAWffO@d9i#+z@g*q4SWx2`zufBvpeph?{rikiO{GJEa%>3>|Jr*@8Z;L;Q>p#dY-JIP;wX6`u3nLlIAcCk)-i2 z7Nh3G^L>imT@@w>GL_QvLw`v_uto7pFZ7_ z`;VmJOX!3DvCL}!A1^JI|Mk-PfA7$I@&G@~?eV`yd`VQbvtxVZ^7wa){Q7^xx&Ked zxXmD559atZsJ6K!+Id?nV`doLXXxkyVTEkY6St~XyRKYdRM;_R4w>%(*V4ZmWl%Tx zzouZTMprbce>RrFJLEKXC&;7WPCat~@J&iH24rd}aWXpn#+K~D!&$vOf&HfI&iAhH zQM#?y|9p&TCwGjy)#zeleby?v7k&Qr`1BNXJSN|>%YVY?xL2tFMvIXDcyl{`Aogh! zFf=OT{O=!Iw^HXmUSA}`-n+A0ux*Nrwu@21d-CJ_4P3X9z*?9BsRD%Xi{DH+|LIXa z2H>=J7>`Sj#v@3#sK2+gEbxlBowj_^^4#nZwT57y!~l}FzZ>B9D_wLHHtIA>D;Bxs zqb9PJU_x`|c}(8}a^HBlg>|wWe{0f*dq|d;t)n3%z5`YIj4bO_?>=zpHgZV^=_p0K z7hvukKRka4e-&eY&u~mnqq3^}Lo;t`dDeJ}MeQJ+uyz^_fu*qnh(9!u|-yEz;%cm@?0tss^TsZE%5!3@dAs4IqP%kAtfADAO2-$ zhyLWHr@8z2#qrtc*h0K*|Ei;Wh^))K?@>BxMr;;V%;(e{&x=&7L)J*NCgfbx4n0aM zNQJn9!$Xrap|8I;F@h`O1tX2#8sN-Vr@Bmo2!w>5A;YRqhHr87EN*tEY9_~*TYA-T za*VvrlXKK8=NwU=-yo*sY*%HaRS3|2E;g-fhC8k}aA0r!g?nOOpH_RDi`>DO!;dJ) z^xrNgD(7uk(|RHSDr^zfcCsRLf}iO9_6{4dRaJg7%|2j__x0Gs=piE%ll%qrX}8Q9_N}A`aotvUn~{_ z;6^QXQ+oYMMB`NYx~pzV0Hwq`4(xi0HdbPz#?uxY4~Ccr4Ey531Z~-P%+q>kmAe|m zgmyu1JgZmcfGA!o>V;^ahe1^=XUcweg3_j$r>BNU)>S(2Cq4L>GaADBoL%%!?8_ZJ z6C&)@zq#toJN`sZar+&V_r0#Rfk1rWtBfnasIpk`Uj{zkwBC<58>52?G})|xy0Tgy z%XIAfWxY8@1p<*?@TiJc=%F2Ql|MP0ie^EiO1JbBSoIv1&Fvp9Vxca@ra+PBoSfkC z7goNTs_xA_^|aVl_sz;N?tg@Xz2D0#DtdY<`!UT@7adUc2#KRt^?2dmlNmR|9zv9Q z&bq_7>8p|(J;*4c%ktrPDUf+G^fKZ0n*Mh2V)oLoF`fU_vwqixYaVNH&Y6(ETEX6> ztNHbITM8pmxob^V|p2{o<{rr^21oemvXYkX)k4m3Xy~!>4jtU zhJ4wno-DKahxamR<1X6mc#aNWLE_i#$y;g}q;uo8xZNdm)9mGIYIg`+ zPpvL0Mq1mGrkWZmP0x2kn1dbUDia9X{2mZ1^Y>abq6q~ocgoh25YqjE>kt8ZZ`006 z9G+gt*6+e64gT%A)rfYx3T5J*Wpd3JnUj!4NA^}Xd$D~y34{%?1?J?v8*j3egG3i; z?GGpM)j@NZnrbVhU^PLtGgLEeF~hK>w<9M>&HbQTMo$}u!>zGb;LELL2KY31h4k01 zN2K`Vu=0A*GC54L9Y%N9K?Z+wdD~F@cbvfh`<~Paoq>m`8@}ha(v*^4x3ub4WcONc zhe}3b8F*$N7GxTNoxc6kF}JVGKSJL^n+-Z@N}jBfI}Ea%Qj?UBcw~hQ8H&a?WX{Zb zX!Xm+K##kBuSx=T{e~?M_5}USN-oK`>xUl}+Iad8k@LXA)vwEy-130OVzUk!)4Quv zH;u{V9BtbA=@Gcs%$>)&gbz9ko|U_(o;j|q2s^?lF_TH=Uym&BFmbLYnkjZ5_m+RR zw6FDym{n{Z;{K0nYftXMh~!9lltjJvjLm~Ai5^oXp&28R=3O`d~2apAtW zu(i~ag;M8`o^Q)?mf?PNYr0^!NOSVqlC3+Zo4GC2VUw40&w1;&MB|FJTTz2QV#LOz z?X}5qyc%7@gnr})(K#>*Mn(JgD(0inaaMZSU7SL%=H9KUO-4)NDwf8Njeozr%=%0P zwJdu8doR%@3&uHrf!UOJTNbQI4!)0K8nH5Lylp+8h9c{MN~R#s^Y3H3daMi~pU5?Q z6NkcPNY^5DM01=jDD7r_(kCALjgu&euD+oXs@-+LPjkBcEc>uZesAem$HCRi&|ck-h!^Kv z(qCdP<#HSB+?FNYajuiwBM`eS^Y;{m>kXs+?e%NLZ=O7$0=&R`mmVK+#NjnQ`bWmu z`}kXGY7sCd{@Ik2rlzKn{%e^=8vkE~5ufq@NRLSWM|$-CXUA2&JY4v!{2-DRjA^z& zqJ2g{C@2J8i&Ch97J&z}v@_`^X4Zk3UT*0VQzhQ-a;h4npd zajd2HW71HyTm0g6Y-a@X)36TG-G-nvBTYsOC9XQp!SX_isrviBApud##=mhppIq4H z;$**_Vc0yK_og-Gs3r?sW6Tn9d1!83qu=c$k4;p|R$3EIottf}lJruPEiLmK#I2J% zuu<#ymd)0k_1{lXk9d#J{Xqr+|j6<~z&g_(f69}M@Ko79^|M#NowFj-)re-02M zdMkSme4l^JKAJopVVWs~J`czS)pJ3)blKb^60LPYOSZ4EEB@1VrUaDGwz9P5 zGmrI4VXhkBNk+uk$(iFGn;NnQugRsQMcRv6kKa|0(gShrig$F_qg*6#bKB;S{2_r@ z81EakyH(xlgIw@bP9nUxKR2~cIhMDomrr;?ZpB|06hFK*{XvaxCOs6Wf%4;35y_cQLTAgO|LjkK*v24UfOJ9-z7t7e`^HODoDv?y$swrhO9yAkKYy?0nO?!_<}RclJ8z|K$Sw zXB3C=N`eM$UsfZUG1B>Q*m%2hAkpjP}xE`dy<^Hx4DRJ=^O`kHjSZ`*4DK#Y3rMmV8EDBkjf|fZUS({s%F|U?nZf(< zh9M{~?ln|!&nX(tCVaV!7r+KEjwGEJ;m^)d*%%DU(&jEr zM${LjXFRiVhlHx2cgR~h)piyG)7yy?XG_nA${XN9#b~r0V1)_FMTIdixB>LK-silY z)>KwjRzH2j;>&-lzLBg+4%$clevZ1fQ@7`2U|S#Qd~42GnCG%Opvh;cp=V9)a-3dU z%_CQeA()0GT%xVE8#?|Q+xO*nC|xqYuwbCt#Bt!T}NbaEjuj|abrjCi!P z`9k)+1Xxof!kIh!0@*|%b4I>|cU{mq?9vVzbb2`^KZWkC45*9w9h74&ciJ!_uTcF~ zYF~eZ(1a?d*Ek=`Y@fbjK0jACjub7}o8)P^M3VsOrcKuSE!tjyu?TTG`w{nd|pAMu$aKRt$rJW?<}T3pUd82}hNsQX3u% zv*u~y*jscMMYMXZMYTibaWYK1+I2tNXH{8IafL%x)^?%V?$faL%fWHP-GjyzlPMB< zo+c9?&LzRdGiI>xrfz;K?Rww2mwrE00y5 z04+;|14K)!-x9#Kwt9Vv?)vR^`-USE($!=2yF`4#oXcz7bUN0>OY1$cW9K01B0p&m zV!ccW*8}r;!3ky6Fh#4dA)9crL2p&mA2w3D>*?=#Hg?qR1~yC{98q-0IJ^Yvtu}|( zfdY4>J!jidlU00#O>sU$?ZJJ3_a9MNctX;M0@=^gpi0m5jPPqDF6FCdUAR-X^O3)c zBTG0UoqiMrdqxU=U{7A^b=LCj$pUK`4qfOM71j-m;J$ZXOr@G{F=GXzoYK6Pob1K7 zW$o*HnZrppm0%~|rtuY?*r`aZAJ>c?;eCuhzA zDy@D`FlZlvXe(%Z59W||xB&4i3C<2(n8jBt?}E3gF_Gk0)H^RXS%7pt>eSCw+1Th8 zr7+yH&oS2;cf{E`ko1iDx2FtGdELSj2GA24!D-xkkzao_G*E{t;U#HfUkT4$3O}_` z+i@Bxo-<929$3W|uO-p%p5_kf1h0dI32FRl5xS_r-r^6AJ~#k>X%%Nc`5Y)0jSV(mEs<`1ZiZXAbf{p z!nP@DxO+xR!;vMMjQ<8Ec zL7nYpTJ-bADXxDqVeQ&G&Ph|wx9dv!fU8NOXfofm?&y69l>ir}FBW#KWztoRUV~a9lOzdpJ8B z-0I9}>y=i+(}tv)hiAs+De(#}Z$hC?=--()?ON}3fA6k1zPg?XuS}W)GAtZ~=jP^I zuvv-I%!WDDGXud6hS&6-sYpo}x|4^^NscbatOv#x?(>Bwr$j3DL4Lgfxe;^m?3zxj zBaFB8)onM77EqEd_Li$z>iDC_kE8+90af8J8_08uY0lwPB1aNvc3bn@oD)vPapQG~ zh6zW6Ptnfy<_{Fo`%o~e%yAEwQCpB9C{gV0H8j0DTh$52LtQErxAD#rbPlP;3B~ zu1#+u{?kM>y=4E9vq8p(;VIKp$sn@`dKq&n35a}kneUi3c|{-z7+$Aj@FoBov_9?x z1MXv3+hUKOj8%qR1HDFPl7!sdv@D!AT)V6RkRPh`+ zO~QgjenYOf&chf|t|O)f7ub2sZEgZAt2>7K03R?OK0Rr=W0a85QHr9r2Z+B0JU_(66%fz+w+kFUzxXj~ zN&e!cDN~htz4oj@-$Rr?ih&OwfWX(vKXmJ?(~o?+15ByO7pQ&4-Jb%@PrZq(tXOfn zd9udDlF@iO&Z0O@C9E`R}6t^G;RY4m{5D8?5dHhmoja8IOTTRS}XJmq}e&n%m% zFLCwe#lPj1$N+9GuCOWRwWr?U6AuCCA+k*Nzj3bVi~ix^3IG3UQJnraub-#|7T(!x zIUc>x;AvQ(eY72uS$hEOaU4GkKKvIr?;Q;*5bW7XIve{nFLT=w=kbeLM<=9b4`>S6 zlbxbc+MM7iSX4&%IcQ;I0B<`aah=C#0D$u|b(8ZBINRx)_JQm>Fy#Pa_N%N}VD9lh z9{dGP~zd>uR?QBjBWJ;r?&F#4l`_&F!B# zErs@>2Vxl2Q~)B$$y1`6xK_s&i>YsA_0{hD_FU`vfWr$24%m;%%uM<=9$Xn`zkJzU z_9MK)k=9>=sPj#~uzh#JPs>tgdOw8ZC{rNfNVN2Z*>y_94`|KVNyTy=a{Ak6C)ar}LqlKB1~^)0s-uTx zH2MAk+9P?*l<<1y=VwMVeIS`j@Ux9g>c!3#J=0XMfCU6ZGPl2Db|&tPjAxVQL&Rj} zLF{?3)E?; zH@IShS%UlpVtc~x#y^fq%c7B$^8Y%V^&c1^Ag}RnUdG7y06)vI!zBbVOQB-gLI zbatRmN2CyZ@&KNUxcXeHJ;w^|FD0iZ_g9fC(F*K78`3Mc4s8izi*x1V(FrW!h#!0( zV&qEA41Ik2rnU8<)Rf%z@qvy(>`Tp3WGL@;qv zhyqHE!}zqCz&gw&^qQV1S0r2b{j2P@9oL4P)#g1v^U}%a2l#E^5Zh19#$VMGv)pjF z_UKQ}tt`0Oxr^6!p@aTxrY_U|ELlm244mtYV0LobkdrDTF zRStND0tL!FUR0KO?2n!? zS7i%;;xyjrP5@~zeZahKI1`E1{WA68U3iP z8uT-If2e(d@{`0&y_GT{!0yWpkFoj}&Ok4IOz%oNh(cf0DI^+wzdVx*r&7EchYHyx zB`_BrDX;r8B-denH+x?=L0Ce{YIoeW&>tw#95#I4LWgU>THQAZU5pMF$&LPt%^9KMZ*f~ zKACOt?aoc2+SNbo*`EVFFH}V)HE`p)4rnR;%38~X7)4-k6XU5-kp0w7kg{*N&{VZ1 zJP<)!Nbq-r281ZTr)Ihi2T>wsY?DI;Qlwfww?}k2@MA?S8$0zlb5gmw=s1JpzH<^K z2a)uYX@u2r2Y!1UjMPyT#)wi11Uz1iJvf)?g@dfbvVPhsm56FQMTkgk?B`@5eC%hB zArfP!ID%Z>=cNuGi7!y37;y)gTnVpo~`TB=F zsp`TByCUpP1i~)+J6BHCbkl9Go8b9Xb4nyGHBwOrQsmlnw*X{y09eePNosF9`n#G2 zWs;xYn{y$fy3e;>d}@#Q{Gxkd?d1ETYPuLil1W*u@~qCgrOI1Tg~EbH^X`wNu-`5Z z9~YSkeiKMFg@ZV^KT7W6tV`TtgRmos2r|I|>t-=93{~m;i;hOZd#@dx`&paqi)Lh_ zD4~o!_9+Nn9Rr{Ag=P%SHCpdmSlRxgS@pFhT^LP=e%~&Cz80@ZQ5oGNfKRQHuXuxQ zAU|4ktMp6QGrloP3f3xj;=+kwO9j+>aoHw?tkqu*R%IItW_oczJE#T@WhgfeNL#~T zpz+|BqUsNF)z=oJ=E?6U0W#{qa=n~?7Nv~BU&A$~AAWu~1DCy=Y4?5{^gbnjJQMmF zVM%9l5QP||cYfM&F1}hSiK%t0UTPYG9;LX26BsI*S(_YziyD$nX-zdVr3$+!PmjBv zAB8FLDhc`h`fY?!JC?)eA zg|}E)rT&?wi_+EE?kFL9A4xtXNZHN*@3tm2|FQ zzuBga-t#QnyTw)XjP@2;aNq|{pV$MLXT8R4Zff&uKWUe`B52{m7Z)jHMewam0?m<===_w7Q zK$BNW#u>&QhGMa^o517KRmrrsmhT?^5`I^6ooihy9ACCYyP~ASD0Rzdd+5`n)nMWn zS^p1_#{kh2HvabiDekKuqWGe|@r#H7Vj$98(vs4mq?D3N$FhWUOE(A-f^?&Vu+-AM zgf6j5=dy%!EFmllQp>aG`~C~hGe6y#xpVKCxp&Sz@j3U9QjL~1b*4*eo{I%CElY|d zI>!^=i)XXE4?_~VFO5%}#4ua5;-=L92=PAXScr}raLH&e_(}06+W0CMczzfiBz|iC z#7zEv2(F`JCP#U(lb+FC;f!&Kf7Bs}1GFU;?hx?A<$k=uK1mCGP?=ooTTW_PX1mPN z9pE5Je34uUNGe2`w&~A%&}{26cGSpbp7CNsC-N5{ug2o26Or*ECt*$cjkJ`ClkrVF z7RG9mS)1-FgSpAnD9~%2K-=M4Py8LluL@zn^r&5>rzEblA{2QI+-JO%r-VL*0*Udz zUBZCtM>}0zn>r7$Ir72E=BOriWAjtVLkpot!QMXuE@ERjKl&@HNpb1Q!>Vvi*j$zy zRljW^eb7o4+B4>3#yF9p%{2R^{Nzj3Hf0*n!0f@$+fZP1sP3pJcSJq-)w5gmkJdmJ zbXTBP-F1-y?-AMhUooCP*JvbJO8BKzKA+2x#Uq83g&!{k2j10Te?-)KD-rKZzsDbAP|_X6Dx$JWpNeh}>;@U60atjga9Xkj*G79%=fLL`#hug1~JB|6qjigAxJ* zD~K*WBO=*?#*OEyQ34mAdXL>csaX*cGQl~Y8#5UfD0j}!a2$HGzMzoZV+veNzjS^` z1@-mw<2|WRFx zQJ-RqD~K;io=q7Gd2eZw+3s?^cdY$yymxk`)r0yPW-w4h4ZRjd#nrG_EE`GuPoi8k zx}0!r?jB1!#ST(0dTDEurtXR6y9w;kUhmc0i#Nskm&i~DIT>Aw3F2v}QVc#5>(7{7 zw>_p9H333G!t`4L@+E*wLTf&h*M=U8*L>)IK<22&CBFbx(L}0wED~;r2x(>O`xEH~ z|2<#g@8W^J+9LnZOKU@QF*s++juADo;9;40q@Gph5%4IMvGzpa5piGk8R&q@XyCJX z45-}Mnly%ubO_mjsJq$gVD^0$zH8iMYf)ZQGZEE;b5*Dzf;nu#;@+qU2)au7{zX)9 z9J55=9rscDV|Mri6$fNfyMLlUK|}+Q>mNt?fJhWvP+zj z*%$S-;$ueK?47Bn9Br^~b{?kyO7$N901Nd+j~u*OLf1$*bp+Eo7o^S{SV5D2+dC(| zwTWiPUvyLTr^8}TXwFGU1k9UBaX>1?+_eU`n+LkHc?>0vgE~LzA@@=Ehs?mwoIdfR zD7mWoIJeQear0+qo4X=s|0O!Mw=V_0s?uI$JnW#mT5z<}$KOHkmsAb^LCxiD#;wwH zC=8e%sr&XcPMkj)^=Apn!l;hP#w>J$;*XqTW#V_UyX|H%H5<-*((^(#7_P)n$ z=e(Xy`@K|`Z3`XBm$T!dj%f7)#O1LY>jygdljqVWch0`Gw?o|0Pqub(O!jQhNd^m18HL7m8t~)K_BLe%g|G9l5X<4wMRPCvk1XgHbcUpYZW#SX zqfBgjxP-R+vqkrrvEwV~Esabb9+FO080ew8W_*onqCYfiBORNbj`D%*bScJ%1jwgh z!^(Yv&Tm&WJ+wL=)lYqP147pC%8*PKdC{xdHU#*(mAnr9QgvW4cp|3$HfclW){WbE zFbr{GjKc-5L)O^iPtw(`n1Eg=E}&L8>I`y30P?(*flSC>er zXl{@9H-%8U;hmwzcm%lPD&CSToEB3oT87EoN{6IO1-s5)>zx4TCz-p>V1W zEVg`e5iisB8CP_s8<;Cn*#qRSmFD7Uf9RHyH1k!9ae-~?!#lCf!Jk1ANzW)5nA<)= zA@g;@Bm@;SanrqLFaLynzz0N#t^dbXCz|r5{zrnhCk+}yAo{|=p5+_f0+7{-v7(tA z50Q#;3{ktbCO*D5wO-gZXxaeLHcbpG27igadEfIZao9&RYHFTHWZQACk)-`|5^o#DDZpw0)-NpF7(7F-M3s{i`kibbc4a<;uE~ zBEJUfXUV2apIVdWTsV{HgDG20OEK=_tIpM1J4f+gtmpGk}GjQRsZs*$ack$h^;ZUs{^nQ-aS$>$V9f z*3T$Dz%x?7YLC91TI)7~KRB~_NLz_#9L%g;hu>o%dHj(&G@Al*G_+aw43#{*NltNo zkZ5NKQ%?RHmCvvsYdqCQPKBA^h1bDgzce+Pp(qmxt zrIlf!9hZKgkPXz??1${xpO>5xL=KD>jjU5z&9_riK5re(;P*2)z0Xi0;q@pzF>x&F zEZolHFL3bI2y)?#b);P$e#enn$(3Ri*zvuw(Y>GJ20Q-q8s>O(+!nwx=lK2QOVI~7 zm-kRp%V`S_29z=(<1upLxa-AOWWCPRvQJGMuh9D*RA%Y-tp3zaJt@!PhJ}s_xRSv& zAW`r?&{6}^!l*isDg=bjuz^xB_#>|AY2=TDY4|AIFC(8tci5V-96LK0ag9@+^91}? zvi6X9|1@!p?3P17qHPfC7zad+Ar`BG(5B;s^UtJNQdS>7Pr`Fjj)G+fKE_FUQWmE#*dD!ssXlZe;9RlFV)%|9{bwZ zkJTD!4h^)L+TsSDLOqMAv&FiZ11|%fV`kwFg5<2So|!moFl0FC zIjIreQ^fSwrAFW*rbs^!idX@PwJA|om(Z#^8`+zv{yu%%hQ^S$q0WqSk^4sD&oz-A zKh;YF8E9l#7bQfxk}A1z@QxWH{bgsq$2rbp3a8%`1aLAdBqbvW=_RWGv*{mZCGYSo z93OF1Vl26vCqRnEW@{~^qLE$|99-k_mcD#p#iIBC6ojmM;2W!Oz=y9QmA|pu|J{xd z2x4-k4>6DJe!=Wy^~#DI?E3lB7fqSd2YJK7o!TN zgJ!-4L2_d<5-ET!wW}WkHpxA6ddL#p$Z^u29dAFy)`*=kub28AKRo)OUR#&O;beE0 zxr8?6T#!Fy6u1sGKKAd_+gN8%`8d&>_^Qui$W;sh`nX0R-P**_Il)i|<@@USbYa+a zA}8QSx@U`>!%@0pCirz`8Xr%$uVZX1-c5c3A4HnnG-`<@n^Q8JRUz;)IorjXgdg0xijbeLNA3nojwJK#mYmS&Wn}WPVQQPSTAJ29NJ+71>z5 zw2*sog~zEp$T|jX?SwQErpK>+z(>>b+2ub@@&{5Ej*`LcbEeRJmZ#hQU8u$!D4>~? zl_dX&HRE^tlRxy92=ZpDU)oCFIwksW0kchiX~d1Yj;B66^pJ}bKfjK+B)ahPcwPP& zv}do#N_Cl+q`Wb*cK6}k2+1#ecw2ZlEuq!R?vxjnbc1hK!@M~~LEH~9E*CSR_Q6AA z2OcCbZ0H#k9lWUHDTJ&U-KuwvHAEOUbouTID}=7lXl=(z6pyX}mo9F#CED3Mvn}*3&|!vf6;$!pAS=R zD$OqMGfYrlLxB4~Hf-?ket5`u7U+0i@yCCPiMtI12DYDnQ5KtzcD@vTxlT#@>E2Yh zc-x{;F~xsMBqYJ`p9D%SKOPbW+aZbIn@B)Dm&EP}`Hoo7otK3<_q^?mo0hxvVGVye z-hO;L=kdlb7;#_>IWAug1@b+u3;~Q9SKct1&k9x3kb^W4WQh0Tb33;%+mQ3U?(Qh4 z7e&C$0r1xshPF&7YO8f&$IBi(z$(fRKg;TfVl|ncnx^;OVYu96;twlS#DherQ231k zj22!R06!;@FiI@HPcW+TC<^Ii-AxL`8cRlBmivJC%8 zURrNYJW-UhrFPGiWNfS}m5PaoQ@y)&!GxAI6!7Kce;UjBUwapDu4Wqx?cDvEDhvC) zwF}ejHpA^>l|#5a@eO(zI$?IUNvU^TYS``ipA5T##4~DvZ2WPnMbHnzH97*&wDASZ z`meiUR+aHIu|8{W0#`~}kXAAG`Vqkhdlx0BXBL{~1ff{wv&*Z(={3ksh6Mw(UKG-+ z*q5Co$9rnp+2$Jd%9L z@5e1N=2u$zMqP+1mbEG&5#QO*jf6ef!7Mnr|7-$bVP=Wh4Qdy1jb61CHES!gc?%__ zRHZ%=Zq?dL&>Ytwr5xF%=kBo*;>kPVyxbPL>VJR@4^oA|dON|K-!9GJ$FIxMhtvyt zm)G01=2+Gfd*wY_yLalJ1ai%_KKmoPsopR{P_;&iJ7g&Z@wcwZn7Ty=H&+yh8t7I* zw%-aj?bh+LRna~5eiS@g?>SZPxw|_DKEaXgjGfC?qa#TxeqejDfdo!5I_~&fmHwha z;-v9bDf3$G4n9rO=|Fe}zjBgL-bkPMW;6G@++5N1;q@?=B9dlDaPGvrLmDL`XBq-r zmPcW|S(s1lv_AF$r%J(ZWHUM-#Py2p(b6ylMyTtO_J+$hb-RNhL5(b?KgMSM>CKDm z?msp0AG$o8;e`V54vHpqlXO@p@|hw>3t4-r*v)sYRd#_gRB5&|kG7hq@7k34R{1Ef zTFNY1d3D|OjrFOb)#eAo1jd?j-Pb!v_igss>>1qD!nc1-NjwaYI>f}>JAR+yoV=Bi zsjQR6joDarYz3VXt7M09bY1=`y#4%qXt7u1ILpZUe#pAWwm(hxYP=X<8;-EbGG;1d08d2?tPztl-FnTkD~_fT+r=a zmegG#auv`?+w^1jVL?@9XR^SFImiLW6yMQ*-qn*xktbVZ)4oiSEvzvyWTgckC`{gt zTJA2fdi`40);fdfi=U(&+vUhJ@>sU8UkDOZ8%mCIBf`ZR@RScu(j01D-qk+;%lIO= zyg*{LHrtoIePqz@QkhequG0MZq@Q4bfm!)dQkxCL*_}hQ#&SMp=c& zbOvitp$$p`(EKMfPJfn?&WA0AWT}Iq1;eH8Tu%zX!bI6xYM9^^` z6|L+{z0sI^=u+Z4B&^EfW0O1Sk>|(!=-G?@L>*}+=&5maZmi15H*E>EJtUxnVy5_M#zB@8y2 zJUXgd&1q3NaF@TSX3BTTSCK!-H>0gE(?a}sd-n77)ht6_KN^Rt&1DQKQnfT z8aI-I8kAY;5;5M37qo|=kT+Iiz8p%?&PN}lhq6&i58xLLcOygudzd+B!8%+xboeZ< zZiqH3r_ixF>zf#)O_$5~uOEbR0(zw%h`HZ4<}8OoX5J3mJ?RuOll9dK9+5ddRz;v; z5x~)D()ol7|LjY;x#~-yNIrf>4EhNMTa5oxEPwLuq;biHrP&#Ma3q_5#= z!MdbW!hWAp#nNBK_@0R}3mGpS9H|kxW(Qt*0$CVZID6;$UWy8D-RpSIz7P9b5+P$;U;an6tJnV;!Ka`QDb_6~tB|ec(n5)RL#TPy8Jp z=8GTyiK@c6jc!0i0}fi+G}@W2r$_R%xkwacIJIn|xkIT7f`?n!gXyrP;MRy+H{>l{ z+MYDU-QHLj^6OGMopAC~n!5yy<_33=4VWkDdugvXC<7Ie&P@e7M#ODO)NY0V;>>vx zt5>V;nh%1X#GI0y-q5xxcne^=dZdx?XenRb$z^%|xqG#I_b!NB|4wbrgPSgoEo|L;ddTAj6t~~iXN5+r5t%OSs{e7M!1%s1rQxYj9|E(!k`;0{ zyLsbFVR(i4r}=>8TVdqJxc=KEdxoEoZ|mE&@Q0Xxm-|c{IS2`^k6)31$)%aKh#0;h z{>|l!P}2;uDB!xw5mh+N!&VO@w1MAk@abNtT=NkOM#Mw4OMEJUyMY}rd$4XXO1pQM=3)9AwE z2gg!#2I6DikNH`$m&~`iWzz2H7_nD0s2LK^<${JD^xawWDw^N$4a2HAQ8mi|GY(9l zZ4GKAZbApx%pKKp_-POCpr`%^O3uHp)LCJrdqOLuutDzk3kN@&s>01f&3sS6616z{ z_wDF*IH%*Q*Mx%u6l`|2HFSQ-3=D5uy{9I%ptovEbU2H%^X>hr*gp}zPt~EN)H#+{ ziD2j8K~0Gjyb4mQs?~zVoWJ2Zg5PPRPmL#6eV=wAXf!7%{UyL|&ASQxy?m}RV8D?s zDYCf!N?yf89gfoP#Jb?)1-MO!u8Hd;9r-YQmQvYWR%6`_**vmrkT$DFv`M$Nb;FcW z_Gd~pTf9-;R`t`gh8cU~=H3I%^Akn6@pXqQgtHva#wAyybV~Q=K|Scy*3?kKiv0`lq0w_SD! zgMQN#h{N_A>yDgGaA;~85u;O<%SF{IUX_;p_(fgRnIoLb`;gnmjGv2VqdXzMjyvNi~RQT#D{Jz!LY}Vd*|BA_*3gye|mI(-)c)`9ykWE_P!Xg(>GqA0RHct`Zh_%CHWgT|+T z=d$a=r@Hj?Ivv1`q!7chW`olxzt@<|3{Z<|X;FQCSQPvFF7`$mN`p?4$?bsAx!9#jd|k(@9OXldi~BGgMG{bF#I10Lor-g9mFId%ZVe-<5g5 zH(vGn>e=oh9yl$yaIG-}8f&-Gk98$sl#B72-18I4zaOO2GqKuHQeyhD>>&E_)9b&z zp%8J4qt|cgl|s;;&XS|{r;1jnF&Y{n-<}rHfb&yejpNoJnqeYpJg@oeX->bLcY-jb zkOxF+u9Tj13m_IoedBM#Q!VOQq7{3&K!$)53F)A&EFd1={*=qto4=D~+leGZTr{B;8y8h@!43>w);u@Xn)bBlc9sQ2VnE57G+wL@K6gm@cds)}{>8smf?E{+&r6flV^5kMC0w}a$>!B#J9AV#+q~3u z>Hd)l9?4Wm$GtzR$!%$3RG~h_mLG0VA1Pz#^v!;$X|y(;fH&tguZNbesIs~E-!lO2 zsUOt?iDtuF{TTNe?k5|TDBr`2?A#WCD*m(~bb_qdw*)C)J^rR8WRkVpYDz7mr%To~%1Anl7gJ(i?l->;4{dSF`-^%fGhHzyFySH@BJttFD zz>fMFpbT_V0u5T@xX9Cd#>eTz#b9ko!#ksH_Kdyq{o+5`e{EMpyXMBms&f(r2UR&MP#Ud#S-`ATq3`M!ztqswJcq_%OyvZ8m z-81Qmize0-k<>In-l656A#!%rg|7~hkHKcISKLA&^mZdOl|F!3`y!1ahn%>x30@k1i*%zj>w@?Q0uk}4v2^$C5!INWVO9S>? z5Ni9MC545GDv+XUCGaE-Fkryms92U$sQn#Cd`aIcPi!mbli*g{@!o7-^gntC^!m#7 z#g7vi+VWoyw{iLf%0;BvV9!uI>TNCtHvB_eL3>~MMA$usTMBdkAVC3+XJ1?BrCa5r zH8@ikssf9pM7io4l+}$=Ccne1i*+34OOJUaCzBYoef)K_krk@M26Rec(YQ}+Pb__J4;EkRSw*LdnIs*d(_~FQsr+D zT=3rRmGHBhg2PtzMcgjWy{pDMQDGDBN5y;pRV7ftD?oCm)}*WuWT5hT zgk^&xH&f}_(5?TT|Ep*W(VcfM_jjFY2Srt?mBLWYh5`StH#7h%-K*u@VO<3NcO}?p zrcB1@@2W{a;KK4S{vS`wclzHgiIjT~RsQ&Snuq3i+QFmJQfm>mFxTWBVSn>4nZflA zgWw%S!T0akqy$u{tbMbkZW2ur}wcfuM0l=pa)dN+s=O?}ghX#Ygs67Phb4HZpi;8&_*{V)1vLjP#Wa>CPuPC7Z2hOKWn4)VX4zIRVw-xR+# z-u;<$_ow!3kafcSShK~mg*RIB38f=YpM!|Z7LW25`Nx^}l2cHVrE`dsj=9efMJ3V3vP~AlBB|M9Qw~|h#rkcU|JGXqr<&`E2bJF2_tB)Y)q8S- zgGj?P|FFP~-1`;K%MN><=5k0|@_eS-Ui=C5Yd-rEK<3W9NYEbracKP=I)H}>#s?_v z-M3ySJ2!6JSU$)14Y5U?^{#fIemZYQ72WyaZ3*GZ`seZdSiF_NY$89^VY) znJtlZ`nu17F6njGS96g63=QEY4rB}0`tAw52~;^d34hS&Q8-;Z5K5iep>?rJolwmZ*iO#cJ)q7vT~^T z03@_{#9#Ai?&t*iVWkZJ%AUy=78%G3nKkRe!{7PE7fYE=8BAGNm8zX(yaCK#g0pp^ zN{+PjEzg!!mB;VTPj5;WhE*uddqx)e%LIlskTk)QbzaQ1?O-uv30y`zr7mAJwaH2* zA8SW(2IVZj11a4Czv|q+{H!&7pgo)CG>&`B7WC2D^;ey?#??CYd9iPMe@ET%_IGFn zNA0&+YeW=9GV<%(NL{Tdq_7*m#4exE{vFBhK(Gw0-}-s`8Rq zFIV+7xm?dBCta?fWdg0Hi_NT;)8f36teACL$+35D0})%=I=Q)e$JNiB^|aG`B@PFo z)>*jiw_}roX3*`cM-E&XDy8ezPl)LaJ#aPoHdFL#A{Ed00dG$1s!`F-T1MX2YZNF& zRC1;f_T`zzPP~51zJ8mCdAGGPC6lQMJ3pYdgd$14n^U z!b%2%0YC@f2fsoc?}QqbZ-IT3$r)Oz_Hx9)ZuNgZ(eM?a#m?$kb0$sY^#v6L4f)Df H=Ar)s!_bR} diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/AppRegistration_Authentication_localhostredirecturi.png deleted file mode 100644 index 4abfc72a83a7d2649344d146c6e888b448406250..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 100857 zcmd4&1AATJw>=CuZHy+3oyNA^*lKLsR)fa2ZQD*`+qP{d&&oOHcmC)72Jdz4V6Tm} z_r2yEbKGN&87wO;0tbx={pHISI5APd-(S9f_I&yBbsZ8MI8takF9`gBvJq9a|MKN~ z@8=IlJmq&R;2?y9n3NF2(N|b#JOqTBMBFc5etr=XV{7SfP-*|L;6MA-jWr z#hODlth4a{pEt|Sg|%7wtCK@21l6IAEQ>7kf$hm!==)pojJqit^TEGC@s|*sb!otx znl51oA{Tmz&0<1^SMy5CB z>UY0;2>v%1^cSpLYsr);YjcEV1lk>c@{f|El1QAY%) z^P?=~%}U!kl^jw|MF2XQ1R2Ah4hEl36aUvZ7>2S;yr#!w)f|Tm9$t1X^*m4O?~`+f$)8^dJ!a% zTs%kI^sJl$@%KZUbZB@h9%v?ZK7(XRY+;yY#>O1`ow;Y0D`ivAnX6tvH$<)rCiM5!oK@wes{z;oPp- z|2kprHp#$el9`?Sy%r+rYeJN)!79AqkZuaEN**hE5_dAw6-Ab1X@89{J(Z8>-=RY( z!y>{g;gy_hGTjjr-%AwKr%~=u%Ba4oi7el~YGy9F>h^t~?!+ldDuiqBnTNpP-fJdb zo#cqklGT5c*OBavncbHlU~JTzRcfZ!Znn#5E|{wpE6vF<%Oro;$Rsd(&(lm5?%4C! zTKX6z5&XX6^Cx}AvI9<_;?}e6Q6O=^+tIKF4OX%nXgSAwH5ur@DRB4WfU5sWc}MiBFO(rAl?IJCQixC%CRP1yV@}W{h7>A1!QmxcD!{?kz7re>z3W zm*znQW5FWBpSvDLa`iR>{)5Ne9&`^#eLwGi@T_Y!VBw_f{cR9jLqWriF(_sZnTQp@u0?&{B=rb z(``=ZphV3UIc2{YNaTl}uU1D&uQuF>q6GHn7&Z2mA^3+%R zuMvw6UZ2bJUx~ei{)?ZmH6lzi=^}H9l!D1lkB{J7MCA7YoXrcb1pDSybfVEh2AJ~yVS;dz2;xRy$D z-6yI+w3x-C=sB*6j!F{b*CXgiK>a5zkehm4qj!wS_1G4k$A+D!u6Fg5rR|1jHilML z;Swak=!I5`^S>*>hain$1ZTY>LBqQyoic-Y!v#DaphVZFZO({DQvci5|3i5Z_c8 zQhsb8yy3X=*CeWhD5yLnjIN4Lh8`%gg3#Ov>W z{Wq|+Wc0l}Z!T=wNi=SKP@!S$xJfGuH;Fk5qd_0zjjD1Rcu4B9O9>1Y)M_VnV^k}(pCr`Zv~&@mlNO8R3h!>WoTr5lzrQhK5b zmC%T3&Qy2r%(v(UJn7Ai9yqA0$EVl0RI_=^^?BLGLcdo5|BhqCf&SCSORRmJ=SzFI zUdKhsf@VCWYT}6OhJ}FXEZT~vT|=IGBpa==AI~yK*x(5A*Pgvx96;gLi|lX9-#cq~ zI=nq)iF8H-pbk#r0QD=KzUPmb0HRnE`6QlBQM5{2E>;P&9^pNB$fqdca8c1Ekt+iqj`UR6Tsx}ARn|?a1&OFZ+NXNNDUm8hH=zp91^7^A zoY<_j`j3lH5C*Ey!2f{lDT^nsIozyP=$n`(A(Sjza|U81xO)EHsH|_Ekq7P zd3@xp>haa9$bFkUz1yy%X!;#HC@#^pU8iqpNkmQ`kLu2Wa(DO0?_>B&QSJ@oR0&3$ z^3lV44fQssb{Spa7XH6#3dVkCEx7maGI-5&?9=>`{*WE9^W!TVr-aKp-U!IxkmwdE zt7Z^ozWckJl7iA$YblHE%AULp$8Lj5Ued?7nO&{MXbM+Lqf~HcV}3kudwYsRVTPpc z2T!2pxy@o#t=~u4T0i$fM&OKYZc$l^_PYlZ+!$Zv-(66M2M=ST!sQL%IWir~!L1d+ z?nW{v4Hr>AAw@`?X)BEj)rAy&k3tm{`x-lOPmTpFr0Z?FRN)8$aCA8+Ew~-|HmquT zM}ZtywQ%<7YG;NnOS%j zjpBC_*^ekN&Bt{jDwzZqjk#Yhsg(XN#(3I!ou^31{o2UhfuOgvQ@R zr+KZZAC95!y&Ce;O8fKZU3C!6@0pJc-|yqq(IyhktaRM{pWM_{XNwNdUVxVj_F_-ytxLiWia6T(Gc2O|InB z700`T@w6|fv$Ll}%wl}!`#H)`{o_DM>%3d5p-TJyYJ>D`6MBc_J0Q_aNUxgq+_G@K z%_b%r7`}bL9?h~xdT5My)2^POoq{BcQyV--8KkHydcNDT%Xs`Y<#<0RwQF|$1K#01 z%t%Q=eph#WSx2RW!p!h;7o%0PELk$s9l9~R7cx~$w6vntPKcVh&J$D%9lK~ZIFd|z zo;{9b@@I0q!uev6Cu30at`m|R3;2;Yy zb@V(rwY6AN?@y_Gz@F>Q1EMdX)5>!5WSXP<9i9R;ALV2mQ^Vu*Q3J^Kwus=uJh{oj zHxZwjYs?JKtIR(wPL$>4p8esty_KGBgdVlitp*zb)b2})3jK{VNxd=|9`U{?R_~9Y zD`L`!wiTR{epNE}OqX2|EoAhk!GS z6j87_W_?>1R!j~FTZSHypJdVZp4nxP$y=nuUfk#oF`a`0WmjL z^d8k&`nV@PSH9R3n$Fr3?83TIziZA_gMlD^E;ZHq*-K+BTj89y2w&T4v_MX3X0Z9# z)X6ICWy_cm_`khuR#tZUXoSxc0<@IrS(buXY88W6|p$-VOZ z7)AW!b$ic9J}4omsT5i`+ig5Cet z^>Oe(R_WlZ3aj4i$w|3jW!OJkjl*L+bMNuG{a}4HxVB+*TjfJjzk-BpwCK(?TgeU7 z&#X|k&#kO`p+SmL+#eS;7I>LZJiCLN(%1Le9(SWA)3(q79-0O2E+%&?%!_GDs*)?) zo1AaRMr@y4X~Y_5)>3a#YDW_|z;>}m;IVP74xS7=aXVUTJNzeSPu-{YuGp->MkNFvo93lhF~mp}^Iputv$A2b+~{D1|0Zt< z)YU28Mt^$egnM=?)n#!PyjH7UXnn{%4Ps>SD;1=xvX{MC?Oj!T)RdW+9633d&w7`y zA&{|5Av!JSsHTn8Ui6^+`WRBT(-x3k-#eM`sj>pV*kMvGQK+n;+7&{5h!GG+_;K~N zU@b$~v1k9Gzwo`A*XNKy^JGRQpwc?Ie-*n2rQnL6b@M<5Gh_3*K02kW%x;(u-7woq z)ohhh_kcZF!J9iMtjGVQCHyra05b*4H@&(j%0yADbY+xBAeD5tGf4fFVyv7(|CyuN z#047fya-~mgx(an=`?PxJ|-C%xpSYI+FD-Y818o(DuA$-C|aIRrm~}yHcMg>RduZv zw`E)8M4VJ@SB})RGGYg(d}08>5=kr8mkC*B1#oZW!O;RNr+ZaW>*dg%N2Qi{IJR$P zdidr;WxOHB_sgpNnQItUf;Dt0Q$Vaol@Sbc1~`kSy|?Z+yCnA|WT3#OC}R2Q!Y)Yi zhRgXrf4f{i9&2kglV7!AqE181dX#16Z9d`;5ih-U5<$J+$uOVzb{H3q<&;q6`NFdn zJN)SS&RIGnDrL0vjHAJ22wO`M8ByakzdKdXvQkTkT0QQ}VL`vb5Yf}&t4PFwy=u2) z;L-MZ))^64gSUH+G#5VFPttY82c3Yj~mBrSqOjYHs*4NLE1$jP}LJ(3O z3>l1`z1~T8!t*$)Gz@-IYz@`@*E@HdE;l>0AW0lfHgOB9^X-NPd0-!iY6@yXMsB|Y zx$s#}sj};8SPBLffXBjc(_nsmj57TsiwbU{f+PhmmEg_R7FPkSCG(`Az#d{QH2&TG zhgUt}oOn}aAcw?W)={=|P5iz0a3g45i5KSG8bbj5cBe6Njo<8W{dV_lVP^UGqQ=8{ z!{rC?BUd=iuA(}!zHRsQZ!Tv^c2WO;EA7SHjozI@MX~5Oj)k;*^~TvwMdOIe)9~u| z1>1!c!&7Ix4D_4BTWrmrvwrD&ji}c{8Q`C()5IYoQ?P;5J;Cl9qxvWm{@zU2Q*l85 zt?7zMxk!?(`v~B)qp8s|XHL|e^-(ukGz`K1K&CMUJBeWVV|+JD{pRLbDT;rZCkeru zncOpx5XbmYq5%*euJ*#Ix=@y?m0z-HP%nx?5ZlFDTsmgE z)%W&c{x^OYT*a_Z7lghaVU8yUrDiVv!UEj=y-(tKDQFXPkR>udaLrQ2LYjlD%(;^P zA){hEEk!Yox?uaFC9iJ;xy&{#b!`_V_@+6QHL_Q!smW3Ed<{WqHOtnHL2_3j8CwUp zW7G`8amT!Kgo7oCelDT-gK-3l&FUlQ1tSDO3CToG#8H;V8Eaxv1mmBgCR#(e`#2Wu zsfpM_U|n=(%hcjzMP_Mq{ZEhvH=`s}cSc4WU#d&O0b&rak530=iFQhg~x`^hXsYy|pgBi!Dpb7hF~`bNp>oG}%cnUN@DWhQ&Ip|!Qw|H1i7K*HOY_IQNdH&)D6#RRU%g3NNi z10Mb=U4F*#Vdh96=4uoxc;JMTmRe?xBBhCpt^olq7xZw=sV1Y zS8Ld*0VOX7WheleuO-7R+ay++8p{PEWj3ess#q)O&C_e%tsTt*j!nN2U0)j{Tmj1N z3M7kTHs8pK!&%r>3&fbjhlY@@6$y}kwU^OY-dVq@Zf`7GHmj~uzqKTeL74k^Sxyi0 z%PuZzIYi;Rs#5*g1qTqO&!HpwObDX+?wULd%PE3J&D8M|Snw)0_^gyTl8?j+Sn8@+ z^$!oaA9!~%mS&c5BCW+KT+Shsw;UGrmNzgs^|JkE#!}s@#mJ@v%bOFWrUp8S%9feF zJA1ug11In=cOSWBFD|jnqhpP|Tj}v~e_H8Wu)B`Zz-`hsYQ3`t2w8I>+=;BHw)#zn z#oPDumvPBwrVpY7^zhTS?>5)75E8SS3jC-gl)?w?xe+WSI(@+h3FWg4l3a z5OG+hL)RjMZY>(BUb#INS`G+PI`u_LUFi=@sJ@YcY;Gv=x6Fl0nG(xc<|x)@D}V@Y zXp4`SKbqZW;kKPXYeIe-JV!!a!b)x9V*Suuf4=OjYk0=sZu2I+XwB&Q_H4Hi>t*O= zN=yiTa6LYKM?}K**ymx-b}>r7`mn2gHz`VZb-dF4f$+d}kM_lA!N z6bI`Tv2@pawnkW zi-TK4JG{2h)y@^ntDOY?u6VO^jUe+u+|1+8g#%ioF64VC4Fc|S&92c4q9!8l z;eXDY_gFRR*RsoAMVnOXN>pu$`dLujPK*#!GRAtUdJK&W1@tmKXE0-UnSUvjGW<~1 z-d28Fq(Vh3bE>(qvrV*sahp-K09O7e)uf?FLT@p=uz8u*0N5l3m5bosL?7Ny8GA=8MYNckh&cavqu4ykM(%I9r?tMSFw*o6^-LeAs4E+gzEp{J zb3f0a_2h(W-=ANbo8y0=tZZv$MNk`1c)1J4p2tDG>g9xLDor_2R@93o+w3Ap;URDF zsv}Nv@clcqIlxbMa<+;kaia!r5^p~-sjjYMl>y&(LL%jH&3KNj9J*0@REOV_!0Wv& zAkvxj(p5}8?u<3qrEr>1~^e?gFkj!k0b@-L)49WnG; z+wjLgia_abyjN*J!!bB!v$yI}#OmCdmG*OG$6*1Iwcwl>lrOS+TVQT9^`k?JhP~@A zu$31KT-$YJs;R9Ot$~uZYtj=wE}N$n}GVbgPi!^5^hj^o z79UpBWWKSa#}GH4BDEK-j7b8f{rGy8NDBTT9g* z*K@v|mRW2$XDZfiH}JuZ*iG`z2^;E961O)i?uCbU1-4@Ge>F7B&1__G*IVYTr-N!0 z+8UY8avt}3V6}UnMCNv0ep&Gqqrc$Fg_nnkT~s^Y8`?t|#a*e^Ku{lP=epFTgFXM0 zUlysjT65BjdCLvDGX$lN68{x9^TD^o3x%J}l3bQd8fiUgwhB&6|_BVDk7ug@0;6D;%8_d8@;H> zYdC$Jm+Gy70??`-KN~mp@9yh z4b7Ny!IiC5g<@55B@OohBX*IA{4b*ZoW*jeLa|gYvM-j zBFFrX7ofU*aLw^-!I>)C^#*(FDdC~jqphwYz_Yl)3chECpP=4YjGAF`-FoMAI;jj__VQRU`(`yKQz%X*@K1o_fq2QqeRl7tjRQ zqD|q@{pcqB%Lfe}qqkA|87zZ-yCo4Fx>gYM(Uoho(G68)AyfPHnJvsl4m#_Z8B95e zEawZlCkVME_rZmR-6G8N)<90sDZ;rYfGI%wwGGr&!{EY)8dn#|$5bz!v0*!

m2< zFr5aAk%i#NI5OXTN~X(v_Z8nPmg~bkD-=j%*utf-SZ9vS`(~ykg#?hy{)J5I<(I7r z#LfkeRph9Z)loQ^mF*t{KhnsOekpxm)>GUp>n0kfkStg{3#B9i2^+PIIjxf2z3+p) zGlwfuUJ8Aqtu*PXE%TVN6NmT4~J(D%kH4?l1FGT6oZh*PVWnzxSqTLdA6&yCte4)Y2 z=$08D>s~2gur?Jc2@W&o;f30M9yf-qCJ++RegBt0+bdxqgxDj=pWRxx7FltUqlaBaKAh@KMyEP|)Qpt4&8(J+Xu-np{7h@}8*IoBZlDx)$4odUAvT!b zPDtifH~U8}T+pS@XuSbB(A_8Be~7Cb?clH;{#kDw)kANqmz5l%&hRU1A`hSij&3wk5tZLL>|+5!a#bq6kr>i}IO-;xk94!1o095x zIFx{dkMIxK$@outkqM=!4}0S>prF!E8nU`hQUmIgdypdpAdLnkzaIc9Dm~yWnDTP2 z)w3qNG1{$qc-K8`_d-ATF3l32N}stH9LAe9jVbGUVRpHO=dFbD`uCDDD?nR#tALQ! zOSBitS|k&%lt&S6p9uzid_lWz3uRsiUo!j#8o@FbwZs%VG6Gt+;GSTDbhYu4UJ=jN z#nJTip)i*W`tL8JrIVl{-f3$do4D(^p*@9Mm_%Tz}mNuqG_m|DS zyQNf7D6#BncYlOGv0JCnXI2`&$G0E6$D~9nPlbi+r7n?Y%?yhh#l&lmg`6#8M=EzX z;Kge%0%ZVD4$x|a^Yj}3onV5AtDAZ4`V*cbE>#w2`Ni_^z<2h~vVTdg&AbNZI zZ2|0&#ktG58cRYBloV|jFgzZQT$e_1Q^#|pGKyFL^7~ArW@@FlB)`_Z$&>yemt^~! z;qAAj1@*&q-``7t#m6?m07Ld99wL85q43woGOC&wORmP67%wX*Xy5X`Ss|$=FkWCQ z60_RBPxJwXyirzcG?HDFHGc@x3%i9J;St7<4yqhAYUfDPnbkko?aB2Kxkr!GvfKP%y%i?&t*<{)OUz!z8!)F_64xq5lFr>$ZgaUAt z$)1>icq>?+KlcO1a>}>1Upj~b)qMTD`M#~`uJXxEX7;uJ+IzYiiId@?B^^Io46F2t z5WE*Q)}mAcNCOQofSNX)13fM@`msLXtE~PRN`5__BaT~`JuxKY?$GlW{<``; zdh*;69U!X}ZrgL6+x8X~e8W6k;`3lX7Ak|_x0Bj?JG2|=3WHgaWe4%61v$Y(^i0{y zC%UKFh$z?1qeu&@PEGC&wTw_dRJWKsc(mP#1D@xGqCVqk)#HXL1`(S|vp%>ON!V^V zsq6Kqo;VCs?Uz!f!n%tyvp*O+@;;b7h)2C4t+wUTNKrOe+toP5?^vXvi%gVSYGpIK z)H`Mtw`$e$CTGwmWz0h$;q}9XC^$7I){|TNG3M%7v+Mj4u{2s$mMh=M0UJ1 zVUi-kUCOnEi3$J5;k>aZHU2vw78f*=joJXksG_I=FFj-C45pNugIj$NQwJg$g=E#w*emJdL}5qUshGro6-7rMkPOYOH)J0aQc_c@ zmye6>z{z?72i7DhsdtH*Z4B?b;$x&e8vcc76@bG4QI1EpB21+u*lR@ncZ1ms<;6j9*cIoonF8-D%U7tmtCv)Bi0 zQGc|i%PQ%PV2YX$%U-u%u}_u!m06p_9Mzi5Br#F_i|D6MlUWH>F1lKjQ2)pYxVC3n zXQuTR_>QgKXw{(w*xcTnJMUDspppAQiO{ADXo|#C;Ym?_=#l!`)kdtuBh-U|(xkgx z>)uUiZhV$7yB_~o$>yQDKl(VTala7!&iAH!2GdK33(t}312>Z|e&t7cyu9veGy46i z#c6YzMWr+=MOh)XuZl=TZ-iDkQc&dua6`QP`pgO6r zd1d8|-u6h^&01RL>z?LByOH`CPG;ol?)3;F2wY6*b$QNZT9eIUnCI(BFOG&~CuKHg z8wS9SFwbkAp(Mmi-C|NWg+!@sW@)CS#CJi<2}rPOsFuuq=cori56^(!9Jc&oc7qIz z^&H~Dw>3=hl3=Z?ZnKAVUP&|r|0!qutJ+o!Us&uCX<4F&M8Ir@=}J12}z z3n-hRK?U5@=-f)^r^^_-60EmRgW*Jhp}rh4;HMb2(8VIaY2^gm@TZXPyh^rwvAH`tleS4i)(n^@3EI0`D4{MQBnKS&;_ z%Hw%4&At@;yWyp*2j$0-D&7M&p!x~w^5TYVX%b?4SS-m8$5oTwHDII@G}-(DEBsD- zXer+UM?HIsC((H*8}sG@$ga89(J1kXwN~bGfl4$epj&K2?X;3D8P~iY{$UafRrjX7 z(-s1E%W@t{gBWuK2;g0T)k8ZXm*cMx`nVq(f!1Lgf6Nd&lbvCP#P+>d->*qG!kgj7 zo9^-ph!{SSTbJwV8=t&l8QoVvk)p*mt$`k0dqy1!t@|>rw>>!Vi6w2PqwJviXob6p z5VYf(f5M>lO`BSEW3<0^lVh|rQc+#h`1$VC(URr(0AQs#>`q4zIGJ)n=jsaks+ebk zLL5c@16LgkWCX|SdI2nii_iGK+xvV*)D6tbIEFFnn2j1l>nx_;E5crgE6BJ%@#Spl1%1H5Zo z%h#|-pQgTS{o5vEm7s+7RNLDz`U}b7V9iHJ-P`vN z{6Gsu9canAp|*NltGjU()Hu-_!tag@3A;hV`D3T7BnFj2`H&d{Bm%hCU>j1}#=4=N zzX|%W3Ku!c@h91GPGgNvMxZ{M+(W~-`ab!t>+tkAng8mm46tiF%Fx|H7%G@I!7x)9 zp`j^ys^2uU|66luWouRka=nmgM;dnsC3ESi6Gra?e=N2C&7FK?2;A1CKvEd;ekPfS-A zgv@&%3}rQ9N>ahSU%v$Nc6FMoJd8Fs$Oy5UE>y4IMvQuO#-J4cBcX(huUZU5NHN#) zJPRE)E*M#Xm>(fjk;S0ZDwi*eX}Ji-JAh-`RsYNZ)^Pb!A}RT|pB|V{{ght)^nl_& zWpicu`APm7YHu^g&GPcx=3@Vp0wpu>z+4d~IjrvNy3g%{p0)nq3X?@v(Er-pv291U zB7J^hPe@Wr>d835c%l@*GF57w)sgS!EiP)#@k_;KdY9PUXAz-V-pl@8smC@6DZ%V5 zGJ!8Kk&_IzUELhZclFan=$_5mMEat+76RC4k&__jN};qte6NvQD@f+Sw6|=;#*}iS z3g=3fLJ(n9XS?B1gw*Th=!rX(cOZ4$v?a3>%zzD6u=BE6)K)b#v{AG^Hf0(kgQ}Xl zZm?Hb=08>67&h+u@2P!0w8gY=^TNl}sh2FRwww9+`M9NH5jiTC;1*pi0&wQ8Kk5;# zTp4|ETMuBxL!jGv8s^DR|J!NnhZOhAUFHYBhQTT5*5EYnT>r`j&aWex0Q-ws$xji< z_$5`PwlaFk0|~gUs?&=R{DaP1H=mWo|74 z&`le1-QG5yqEd*uC zqOwLvPdjHxL^o_8O;|G2gKRGVIvj$b+V3djpT@?(o~`$Uj>h(LNL9OSe0@^D6jalq z1USh(?J`&YuI0#X*4V$~!Jc#E!Y>UeaSKXOvVPdJZm`ew3HAIJ4cqZ*46N^yT6K0J zF@_ZlTMB4Y{0!`97Hh1J8|h@NjmULg$Xv`YadXQI?WGiQAEqqH$jmo9xzt+({c1t7x1&%^b(o_bkd4_7G8e|!sZu+LRwF9b zm)vSg3;|-tFM8t|2MX1#8G`4q-fc5+)yL;@`4X0@9s7|;ArQCyuC!MmIdT4K@Za4s%Laq~Wk{--ra8h+s z+>!hoBHpsst|(uNRQQMX?$Y$UeL@hcGxEe;z{D)?B&ex=ugni0MSFSY2=CraO9tOE zFzT@x;U^tXW)WZh?r|#i7E;tFd0hbG$t?%F6gpi~knm{`Mg5zvplD26M`_GDA9B zXyNi!I&0se9HE{{eO2qGKaTVk8-eix`T>SwCBdR#^{P=a)~L5FsEYb@L}- zqW1hFqWK}GY|xZ7^p}#KDx_`&a53&1Ja0Y62R--X#7$|P*%t*3tsmkB27iBmU8rQQ zDfZS@gaY-sc=r}J|DyJdqGPgQqH_8e-t!&;vLkEZ1(!~{ z;t|iS+3+Af-ANUq7{?23V_$z{s0pXu*_}1%`;V0DPXsvK(gz*JP6>>Pu6hslh`w*w z#<8s!3H~>)_@8-!r0$}dL%9Hidmm^h<$~_$mHubREw3$XHDM6(@FHdU+5D1#HdT5- z!f^By)yK6-2r%Cylpd>ESrk)Vk_npjjzcSvIm0?)I}N>H_w;;`+p6zB_5N!pGMOgl&6pQ@d;Q8ObrO}ZUVPH}I?;*b(?5qF#@c<%({{QoN|NrZNxi4vg zVis4Of34QCL^>-%?I_?;kapE&h2ZMIZJ-%M8liB}Qx&SL^T=}r{-)}C}EWe*qX!2WWw^x*4GjUC4&c1sCxJJ=z#JW3 zfrvilvTMMELox0g42P8;lKkzf$rmk6>c7j^*Ps(5y%xArLXrW7dz>1x(& zYmR6oeK?Y~=j^*FEd?63@j|IwE$Q51e@DiTIU#^Cm(O*HLwjH-sf-m9Idb$`TfD_Z zF+j7vx@;GMcsz9Xe{SCdo?@+}{E!4^Sq+@m8ZbT9Tr`@O6yJbH9O4oECo#|#YN32 z)za9|&{5o5MR~RN$%{Ovf&JIJJf|zbxLGsL8J6&t&bP-LUpt*ga-NOK%(F(uduD2z z@5+CFX+IAvG)(O$y!tlm zp`QB#m?(Q3;hEuj16udM=t-#Sc@cIYPo!`wIHq7c5F?|S8pGvC47cno<6h3%kq}%w z`_$z+XPUlJJG|9DsoSQAnn6*NwEnnPc%dLH2Ifc4WX zm7yw~tPFk`AJ-enHo{HM=;`(8ExHI+Dm23TFMW4Kf{W)fD7=q2&4_5n##f;*EVy@> zgLQ!u8g*)QV=e5R+>g2D`jyH^?+j0w7MNQDD;zmKyj}Yi-p0bY!K8I%F|(gnyCBqN z%L<(peTNx|F~FKI*Z_ors|4jDF!NI2IwVnhGb-e9#{sNN^Yd<9(G@qZ175IWrMbCe zeYLjvVd9k*(oj-|wD3OL-9*YF))SUqo3mD(4lmkwiLJy;rQ@BihaQvtvO9~ zvoZb8u#bYaajO#>sX&Y_JiF6S|x&)?=6wJGQEzCX08KU#R@4ix|h zWpi^xvrj`4juHLajTVD^*HS>7fAz<}iMzd?}5NOLX9eOl!M+r7iOq>WG)dyL42&u{?@ zZDgX2i~wu#ao~iRZ@t@zUg!muPo=$5&U)mLI~eexcOO$^-7J8C4otD3^8UHVq9MarnW^Y`W#S~p%{2X^ ze+6V4z}%fgB|GgL)Mg(mD6K-CxPg?Ot$?s}f4~fnW|!t#|23))d?-E7#+zwNmz1}- z*=N~I%yM@*EmjuI4-4!bmxq(V7k*_g^Zx~vXLbTxQe10qF|S4aVZPXpS!vB(m1`+k zy2q|K$+tB30)y+NcVX_xIUuli^Ot}?WOUW}J$q0d*ye^?>CS%#IBGQLnEZkJI5jh5 zzZ?<(Xgq@FBeYKgra(|hz?-mN?Uc0F3fv2gt^u7@1=;@=6o&YWE6;p~@g@(s{oTg_ z3&Knt1OU%^iLG#nj^cqGI64N>L(}LC9o=&LMs+ox_>c5C884R(V{wvHfgRY(o!XjP zC*zDb{=Xwu92-F_+CW5hYAN=$%QN1&RRR`(^1Z1Wh|3XG4|1Diyz&2@M8U~WF1Ivs z&)CjhGQ zVImjcLAjloILGl)_$}>!@y?^3grvGW-C%0gokK=%^tCZ@PH0ZkT0OZmINr9nc<7gV zPB?Hkgah!k%H14kN)@&NukQ{Npk!c|21w1ZnMN0rO<@d>CH<=!IAi4K7w(g|XKUt6 zK$}Qu9T(Y@@W#LXZcSRE>i8jE3@kWpmA31aow}=JLuO~KGW?Bqbp!K1H88>|BFI=J zWd;$YJKulIM386nAb;i3h4Wt4ym#i*cpC3M>0XnSrM>5%^R2Q^!?O=1-3)1x`&->* z)veznhn{CjI#Z!Pa-ctJ#fb56sC(`;w@^(y4U-x5ntCMAq^%IBa4 zUq9P5RVglTGN|`dW>h3$Ox^-lq$r^bGaW{`h2#%c=DTvhZ;@=zwcf8249|@QGVyrJ1R)Jxs*P@Xo*7#9^gbGh_V(#8o8$j@ z0lts*lrU=PTS7%z>eiL#E(nz&v=7?+`&;q2Q!qBw>m#^7Hc0YOoL)dTBU)D_eTMyz zOQ;G|RbVF0xur9`ofO^F8n?cWx~`9xiMFfaPveR}tG0+;Qz!F(I6rp#t;%@SVBpiY z`tm$>>OA)H^pe7?aESJXqO!8xGcW4>gyu%aV=hP-st4&Y17IePKU<;Cb|lf|53n3S zvq02Qjm%9aDPw^o<0k^G_1L_CraIgOz#vjQ!hv0q(gY!B7sQ~8u9W4NSUT%3%i2yU zJ^{i6wvr_UU?TaKWhsvrkZpIXqQXq@(c^xkj!qe2g3EKs?>t=8>hK`pDC;Pp;~J5| zy5t#wS#rByZkW-7tF0|vTO10;Kbsv#I<+){rk~Tvz*Xzpfs@_7qK~dxP_E`X4Iy9}bcae=d#95r;mPmY_S zDV!RPUx1UJn{Aukj(anGs$p-3dGL`R`jMAA7fh(ux9Qg)>Mv{rBo;bWst*U>T1x*<_+s4xprNI9NqZ`< zwOZ;$$wCZa{fPV5>CM%&jd@_5p_SbK)*%IG0z5}k+7OBXI z(O|Xs@g_VU$^s1K*gy8tAi#Qv!qd+3m$vW&%?vN6IY7A@Ez(y8T3_%RoP$DyzeOdg zVlu53&N{3tT1=&BuoLvu6s_;leL0A zJx`7q0_g6vGCkAuI#<181!_$G(&|){$PTn84o+@`MkKrpFy^FPNNzD=vLmuV$)w2} zjo+M}J`xNT{Qr^nl~HvxO_&!5?hxGFCAfQ#5CS9ww+jS!cM0z9?hxeS?h**@F2S9P zyX?)I@BMav?%6-P=WPE#W~Qg7tGlb7da8QHYR-&!IJnqk;Nux+r*sp1at@Skhn{Fh{oaBCDI8ISv_>c=&8pk*SWsb1SA>O)W#DIuFof zirZ(TKfv~I7&20A-rT1yt(<)jbuyLszbJ9kw_m@`Lf+-EFR&D5l8J*`Z(Dw^w)@kw z$--i%pDHv7v>$nd)RIsvXPTXrHawOS_-a;GPde&&El5l3g|BufyaW>b*CMo_E0YE2 z=2zG(P{BtVIuDLo3T={~_c4QDMZZSL)DS;=Vb_aqp6q+WuVHgU^Eyj0d-p zDHX&c$u6WKvGp&wz=$HF{`mRdSAG0xh7TCC!1Zr?e=gey5dXvZ{kO*d+Xt$2O<-@M ziMvZ;shSSQY1*wu5I-U!S0aq{mcXlCHM|?Zr0Z$A>2tZ0!XKa#zm1wMMvu^+86>+r zzyvk7B18`0Ajjz8{inMK4xZsaZY{SXUKh^2w$Se2{8%b{ZhPi)xxeg$nRFEUh{ry+F{R0PM>^h%&iBSf zD=wwR8-m@Dl2yjUC5-vusR@ijIsXD{QOu5-SRT4Xj^gkr+LeJGLUN#vNV;F(U{a zc;yK_mQ(U$e&Pt(tszKf-l7n>pSP-A+%9Ga*fYB@C)xP^-JX6vV#gTdws4jXIdT7i z)Xff&2#v6xj?D?)x%TE&1gNN(=J=Z>%j+8q4LncP*+HZGE9fc_e6iHoOD4be8e-LM zqAgZfn&tRCVgrj?bex)P`+axlD&u#y)Lm95b!3HV8iexGM8F;*J$j-_& zK0e&Q2(_MhiRd(kMZbV)bng~PMu_9rYK?bBjmF$GeBAB!3!eT_wG!xYtmbir+X3R{@~ z*G|`hn(q!zg&Fbl%QpZ?@bCJ>AiHT@QQ`bXmLq%*T)Tm9jkUIF zN{+H54v4pqef?wy^W74)rZv6JT>z0vo=Gru19yOGE1pOG-s% z@UJLGn3sw++dWRpGiW2bo6OFgiozvgww($+(Jq=^;&!VYyPt4t4<3kA<1N``#$9pq zcqHQEf^0;(&FTE{WTLb^yz*;4Dd}t-p|G)eK2!WcZJvT!$)a6+7Q950O@6R0<5Bm5 zsjr#5cjn8|FRtHId5It-@E;Lh(L&x%^jr4Jz0Yg3;2NT^aYVEK{EH*{76%G!%@~Ex z2>(c1WiiX}-Vf38%^%iQ`QLFaUk5#w{ditU^nS^JZ2#-~%EVQKX9G7r>-WK0B{>us#}-D)%<-?h4$0SLxM724yBp^5s!x(U@E1?!8Wdg< z+0G9V+bshyEd(?)G_3TbWtjevNUx_RVod*t-g7t}Yt+7+DlTVmR`$z|P)Z*$l7scumsx@>(%=^aYP|=r8<)U59yqoVj1DqD?JF6|q zEAJfMaz2g*kaZ5rz=KRaby6w2!RkF*AQG9g@?#}B33`a=^u3yZU$)X(z#ZqE&dX61 ztzC--Y~c*as1-ET?e&L!tw$2g<}tHfjQiJBGplL*flKmL^LN!_n&&dD=Z0#qV9kle zePJJA)k!o7HturSC8sHQrnN6Ug*2Kd4e522RnB;|p7$?ypW@=TGh$-iRJCPA!$-r* zjYW^&XE=!P?`4kfd1)SN?n~ETteWUsTfVpXDT`}hlquqbbiM1IkYGuCkcWYZi5VFJ zJRoqspBB-6sY54<7OZ*{1#L*l1lSv2XG=NV%Wb@>!~Ae@gYA)>(1>1|2IK^?%7qtz z5qW#tJ1b;{OYnK{fm^$jzuf5NcDm{@Sx^GCZf$sSrBQB7k#6qbdVaQ8ps6W-7@Y~g zWEoNi3b&!Rp9_pc!smV7m5f$Bei>C`R-{^0IUKzP_{PPdQfXsRQc@B!InT`eKAbJ{ zF6Q?-d>ch9BE-BCs?I(_@bqEPoAkErPUO_>+$({&6?&^1U1*Jy^a+qGiu&c{?GeQ> z-X-*?`tz54gD1t)+)nrk__jE?akn>%{j}8uHWfq%)857U}Y_T!hx~i1U zbGU0(cP{8;%ONM_PaUG=2snR}D3g=2&b*WMD2+)GfL6S8sFc4!v{!%)bBcJeu2ZS- z+t@hh;o{sS_sEA`eZEj6P0)*;uOriQtO-e*M_FX_V?Lk(;zn zLaO0u?52;~i`I>~o*lPt+u&cxUbe(6*U#SG%2T9Qw_VN)9%-omgBZDS_CZVSqU zldR)i7zFyQt{z0ze3ovh<3!x*TC3mwgmqZC*7ZcV!Td<%a((z!yWJh9Om&C+w2}|5 zFC-+yR2_e6TKRL0_&ajvhZ%;MLOLwbA2Nx&zpa%8em zzrUcRBnd%pmEkUc1kuAt>qxczN3v%PWuGZY9%IrQq)Udq0sE3*4bC~X5%G#`xY-U9 zM8`41-C_K`PpT9TxEr}pENi?qlDobKGGoDCt|;7VkS3Ke#*+OlVt!odL;LJ~sNPmT zg@~!nZ4i&l8jq&^V>MF zBV{>Dfr#+_n`#HLh!$9S0b8uqFy1}Mm35y|8Ref?3A`VoHL&eDdGdGi5Er9C-7mFc zHx20RCbtwm=a*GO!e;M?Ps&#VjuSqXuRCDfuo++o^5ahK`#tMmU+jqnQ5N!uNO>tZ zRb7=;iGeN63}V~#StpwHj&lVau}#<{<_v_nCILATQ=9JxHQNw0O3gic4TnT~KR@x+ zxHwAI7a)v_T8T})sa9B8IKDSp&5B=$<43>uSjI>+k^&yMZ;wpp754``xuteAcL09p zA!=MA>9;EcuQ>Iqizo(mdekefH?iSpgyT^#kOB;H9`R3%>-n!1Kjvg6hwtg>MrUxbbWB?ROe%DUsV|ds7Fn+R zxQ|pGa=$k1+R=sJS_lod=tm*KVr{b$WUtcTdx58}T5e&AO$7y~IODK4SS%}u4x zHOf5XyC?xC3)Kz_L)dfr3p%F?ss9RjJ+dWwwbQ9CI zUlnL`cQ##~dUhwZ*TZ8?oDFa6_H5Bn_m+p@Dee!O-u```FX46YVR_QnLJFAIC#lhe3f?${ZOF3gU{w7j zT=gnaUd(ya!_)&zLTuxXZUwgXr@$3!BpA1`xXuIt$woLEe!G@cp_j7}^|gInU3ev; zP%756!?$X1RuQ~xr^elCYO1JbdJ;~{L2t)nQGeYgO!0sshYR|402~kh=0O9$v)PNl zRp#8g#5kwciz{?h=_V^zX$7y))No9$IaZ&4vZ65~!gSjD62GwiTj(?$$cRTujM=VF zq&e0BZ(&HKsX5-$B8bPC_gy`=pfU4N(VoiFgu~vP&~|r4qb}r#U2xd3QsqT10#!>H zC$Y@7((#kiedR+BUxUO22GWRc!@fMFIwgWk!yFJLa#+Z{*?f)$=_XxL8PP5t<0Rn?aC5apSRaa(D27q)BYwJp z7?!(|jR?Wi6nA)|{$-H?J}pca3O=t@_pX9g)%}6dxO2s7fc+y<3Xw}bGnm0>?`zl9 zvK6}&%qmEJ`AI}wZ>0Cd!rju^6TN;p3^?M7bdS>ttY8X)v!eG|ga!*6c{h{FJ+1}0 zX}yb3Bvh`e5a+vjRLqkmMk6d6?uRs@p8*A6#LvPRZZV5p&xow(R1dnC_L>k{_KhEK z&Oa`e#?R6YA5Cw_b#9z|W=}(uqhEO>)zn=N8g*0buFAttltCk>Bi$FN!khC3Oi)@( z<63(2=%m;LDKq!tZr?pZjW*ykvvK7=`e`3G^7N=w<1U&>v=cXyN{zwzR#3txitJ?( zB<@l}PUc(`OB;dcrFUX`2NmB;e2(<$y+YV4Qv(^uD4-r59cuekY1EpuinKmVew~@0 z-RMOZCC-9|Ai&8K8axc5P%(UrU48YTvOOG+?+VSrNHmw=pBLbdeffwNTX>o6eLXyu z92muV?=`~DKpj^Ao?;qub7FuL#Y4yY5>IZmCezbKeINf3aML85rQ2difjY$$296-! zeQQGq@2^!;PYZh9u8PwYlS@Wh{dm&D<$z@6cZh>O7W^_?H|lrvy_7mx8)r`Ote^7Z zW1oBa)Y+*vGMv0F<5~o?}q`ViZTEraG?%QnM?6$#Moar#LCGY-6B1zAcDID z2QN=oGw_hGAz#O8cr6 zLj~t)#dAyYHQSX_)O$(_s0kbmQP2!O4i2s{;Ag~doF~`Y6H*IsDp3}h(PDpF>Gt*uy^kE*%zG8)D6abMx2C9XN@JA=%l=EXa zBR9HRHIv>H=&q>qbR9FZwJ$GEcut|hP#LF2PWq|iCr+q;x>&x@Q**C!PQVWsxK~|roU^Tm}^J>&S^?Sp{h+EHj~c!5;c z5*nWfsf7DYB_|(M)Z=9FftNqsR-LTEmt5mu7{1vs9%iKDU)s-fKr@U}`+U58VSjbb zE7|C^;OBPV!tU=KT>h?GcBB=X++~a`ID}xJNqk(c;!4Dv^Fcb@B;~G_-LWS032WfR zmG`cok?oxJ$eFs#TdWyI0NU?Ocw0bELN6bM{^ZYYFwInA0mNbF|Ez$B_{@ewnquB=9kOx8WdxO6bjR>#g8WEj`!|dm44~zuW0VRcAx!`$oKq0FyEq9p&4p8z*5;l zPDzh1_l!;_#tO6W!2oVi~M4**wP3m`d^Q!DG9D@7sAb4?@*HDi70xJhzGQ4oy3z@*iyuADFgHwj zsGnmJ%=Y}LyId^_*gCH?DT}1P9vHlGP6+GOFIe()!||5nSPab1Ih4(v@2Sn~IQ~MG z07}7h1=ZPYVG*E=Nk~z+Ur2q-;D*^+x4P!sW*1AVFA?S=4YZI4 zJ}>ws38zoim~iHudLVrhoRb8v8op=LMLpr`nnUL+XR#G3OI$IRrV!v6`U?L}54}F8 zBk!jh_)`7j)4&;st}PEE{p#(|;S`YGTN@wCtMYAPlBG(~`#?J%nT?~$-)Sy8@*?4H zOeP2V$(=u=Bxn}o`2X~7zT})^{v;LIqmm^mA~ooR2=yHDy9!ySPX(P#BF^B4AAtye z2dy-H&c=Qne4FQE7R}(pd!`4B!2Ms0rY0lrr+NvQ!CXLj*?BdRs(Kh%V$h1WK=dA4 zyJ$#%A7<3GtdHA!)G9_gcmO3X0*o)j;Cg7!jHL-L;~+RB$vvb&9L%3t)5V{1*R-SE z9bi6u;K?a4qJm)Opn4P_auAfS*{^oK$w_}1Yy>`njts zu54e38>;plEvR&t&m#e_`@?+u&|Ww=5G4;p<~x2RypIVZJNa%4MhEMKv@(|sY1r=C zJ(M)MOX9s()eP(s*#WgV%j%%})FopFNRO3pzmkmqkPPhS_BBTF>hVX+vWwo8G*6KL zEA9g-D&H5#Agz;1T%%!Yir@xyvUC<=)(=a711DOZ^#Z$O>qyhlRmq2E4a*4Lr)7}) z11|;?uN=){Hnu;?qgQ&RV;E33++SA(EYld)A{vflUlT2g)o&sc6_P=~0e}EGH z%zd#=5|t!rYPJ%-6pcD~gSU-?c;fxWy)I1UwE%7+=$DyAwcGjN?Sirb?$Gq@d$Qkb z+NR54D|&UCN0$vbYuqEc`s>wX2n_f|5pF9EZH=fs9%R+*T&l$bIjsEqq^38s|rRT>eQq44!kcqTdIvG$;zqkkz^{ld?If zWjS=B%+7L7Y-|T1)%u?d!?;8xvN$_c9#xrNQ{Rga)CHLTl-vZh8>d7Mm4jF8xI&tx zcgN!7E|=ZmL?%hl+})`&itx1Hd-D=lPo~`!0JA3L?iB zfjGu?1sGEe8xa+&iTdl;F4m1qoyr!y@801pN`1ujKv<}t^SX?eI~-tvB77Iy+1{Xg zZWhmr2T!c42vhg0Oxvw=ETyJcu)F?9J8)_x02e;qifO9_9hb!%R9Y=AJBQL`+z>!v zyU~@3drmK_L5&yzxLerpxvwzsjE4PnzU5IVMR5Zq98ri`WzC7OXLd`W4kyPQZ{(>- zJhmM9Y%$+O+)!*PE#W(ms4{WA-)}B4`aUq`_*&GXo|obnY>QN#`Bf1om_b)4;K2dN zmRXMLAncwiaT8=!Nqw^BvomZ0Fb!<}wOA8owbzf9`V_Lt=OEC-Eg}bt*e2z&^)T)90Q?T&6Ur z5@Yk;7qKMU5>Q!h>2$G@jkTDU7s(&akt$Fo4I2WA-^Xd!c={-&Mif-R^*+gcQa|M- zzcH1FP*kjn6c=^L2w6<>W&7%zaRSfF2X}JDa zXQs06*Y_*DlPecpqCh)8k_7il4*IlcHf@A^AxXE7uuW&q(tB1DtZpW{>+Mmzt-#zP zD3-bTo~unb-gQBIzFScZQ0&n6)A3wV(^G^uM^?=(!Uh<`wyUtcdbpM0u*|QrJ*FM% zW7_1P5+Gehq$58=SI>7JXwJAm`{I@o7`X0T88atam+5>KB~_6f&tiI7)`-mbE?IdY z*w|fD4R{pvGA2$7AFXnh-klpIVYYhjbU?xP0LVdKRY+HElHC1y#UqSj*3;Pzj~wHX zGwLMd)M(j%u_+tWqp93_RABK6I3sT0G|L-5E26Nwq4R1a#oc@7+>VoI!Sg7BAwG1Z ze#Ylby%)Asf`|+5o2oofqPV#cAHuVda@#=Jzy~+vn66Q=zZhhfq3(CMV(sd{(hK4a z{(La6&m!g;6iLa6W-0|7DJ#FasThy=2tU{-arQ}q{aD6gQX&y({tJ7B);Wr(HP7!&O z^XnjgQYfLI{`ygKMKwXcx_9o|)@9*j?A9iv+2VjF$yJ%`;EnxDU5lx_34B+t!6feY z=rbC=5axpevbz*8@s=yk`8_^1);I^{t)CaYD7+(?7inqbNk||C6~Rr5F^C{9hpp20 zMFol(GCiz^`Xu9|X41abv#VI53ix^Ji%819N1IavC}XRnxN4zSI$a|OGyXJFzO&@J zz$%S-VRKOSKBbi1+G4T!TQYq!J|`UBGSheKw>>q< z8vx7?D;z)JwjQmCfkdgC>wPt{W2eGF5L4nkGZLq_roSgD#?%X1vC{IQT)0yG2{mpx zPk!t2_iN-L!}fXVwC@;1)S)876e*8>`w247w<|c=jEhqSKkb$^O}Ttoi}BNfKC2^j z!1qpC8VP&SRknK|z^|zq-XpP?dy>jxA)kE2nXt#w@~(r6A{&vK4+K$xu1YBijMcjD zh7D3UKLB+(;pyU0*ge6@B_g7sNsZCH_LcA&+-ux;#G~cZ`XJaxNa(`Gj3aOT1;xPy z^p!8#AolB5|0Gy`c08f)1*`4_-Zrd`*r0Cr795k7c;)tGgxM!c>O|4#B!%eivr5cG zbeIMg7WPK-m_~_kd$2(Gtbw%9#8eHz@H}4(cQ2IIjAjDM*OF?Dqw3b*C@46ygLA2| zT#D$Kp8`qCU{NFKHtslZHI{#PW_R)T6!@{znTT07IoSV+WkJ5~^5A7|q^(V?>wd7; zWMAl@PFjPVlm!I6bcaBY6!wH(tyiOk2$U+8gSH~M2S0z{;9Nxbvc+Sh7>x;o zsvMGK-E|4SE|t16%>cvAiW`*d6THQ?Cjr*v@h!nGA>+xrSO6=vFYw|e@22{fXmj+o zk4_?{2-%|4vuqy09!%U1=kqX;9BeAd$@JRgHumtbT%hy1GxM7Fpy2EBg#;x#;7KS50>WGMKOWhC}(Rs8LkyjY$3{N!P?`ini)4O_I)OWm7&4r4KH3M8+25?jaX z95w-5PL43;=UpiL-UNCEXFHsmPJLhLl_%Yj9JW7CGxco3`EWz20%u$KF(Wpt;L!jx zlGM?Ke`1v=aQ|`F;66Nf^6ayH7dljkjuO<72Wkl`yZ%q3U++Tbr4&DjO>G*ebiAN_ zvgUomUMCm+Bs^6EGim-6VKF_(R+}GIJvz*hE$3#(z(_a)kXXhPqm1qbCzH;2! z32Z-;d;*%9A=-%)KdCVjWg;)CVI8< z)tt(F9XD#ULzUB6?XCWB)D|D-r{JD62x2YR2c_4ps_KY_E`@!b(Iuir*8IpZL`bCE zY{^RNtfBmPXuNInfFYog7+mNHLyVk%KPT!Ka&(_>pXKM7msizhUZdU zMMN=RJq)>r8Z{=1?{e_`-1~ub$2#ben?Cs`C5Suy>B`d!K@0c&CEk}V3&caWUI8Cd z*u~G2)gV?OZ=->o(_zBQz|in~LXtB$m#b?Y4pJm^2Ge*?&+#wtnGZ0AK+oIgq}e)} zywh~>i-Qhbb+)8K%TPu$ITc)P1;||X4=j)jMbwcHF;F>((t{fXxuGWt=siR;sbF$d zUpu!cwLKR&K^5}{1BXZlR-wdyRArM{&i?d9sNGTXL{#JuDc~1@mmhoFp-7t|@dciL zurWIZPs$vOM}OafL+sUW1iYYsDuc|Mr^OxZ0+DFMh5=V!SKz|CCXArlax5I#S+XFm>+6jQ#y{ zR~tv#aXQE#|0#LtU)BAAW4?0<%lTDLf-VM<=K3&u5%8jCzwlH!UQd1Z{=JeCNowIc zFj@4ZSD38>e7**$FadEI``2_J$NbNK!;1*_6s?d3${!gA<*Wad733!?rb3Pnj)4rGO9@M)ngv_(Pn}rvJF-cl}RAG`G?<|Gl0|I?yR)J2Z+EAKlv*La3pNR7XHa zw(h!ZU7@tDy%E~j&EhC7>15-o`TA3y5PRG^>gAw_ZcJOsZR+1~wkZa@OB zO;XJGyW7MZZh86eqa@johgb&ysOkFH0n-28u$OVOoBgj$AX1s!^oekU`nP^a5VAk; z-wU~n+428d%MkAWs}n0&$0*?Xj6U$X8CFOBoKIPW$+sE8!J=Y;u|~W5ZD(3BvkMWmFP~^tnpn+*}NL4Y8j; z)t@m!JAA@X8fO$I_#u}5<_FdkY7DJN4#*^ z2kngI+RdMz$|U5ekr%Gvvb1jDM8dWOLagnhe{=>8&)fVCc)si!+YomGg5%MFb02T@ zZn7$?SR69BQD{~!3)G8Ts;qa%&mJf6Lf1jbY|G&J;9AnZ(lBMmL=GZAcLFYs*!0$! za-^w#>JAz>4S;k@&cvjd%TQr}koE#+4U%I>H%!svM#8Y0{4 z!&ywm(S7@#7ygc$6*n%u2Vx>07nZY}hCE2KMNkdJbF(5hgQBWO_vJrR~l2f`IXN*<^!e$2$SiuRg=q zYYB*re*_A&27k?}F2i1(j*r>)6uw6kdPj#vLv7DlP}~arhYOeGEudm$Wz9RXw6;!q z_NkLwbR$q;DsK9|G;Him^ixt2+?QZu2h! z6IotOXnsVs`R%DzzD=eR6$Lo9B%Kql{iZK`M+c!jF{1GwaXzEx08x!cJ+VAMvc~6kABm@2cjgD3QF{|m98}!N!1|fn&$M6z4If(0 z1m)av?|CYWAn6b5;GQ))J-^*Xw)+b4zmpJ2M+$3Suj9of@Yd#c{+hR_11*J(iO`z* zEwpL}qEM4P-Ni0Ig1ih#y&YSS4n_?5hpKAsw@;*TxG%(lct{RD5E-bD+#|>#gP{RW z=Wnab_CBq^Qt-`jcwjq+WkUg408hRi}3 zGUrXyyd#e(bDo|#Q&5^9Ihwjoaxc%LxBB3C3tNWui0NTkNrZ?BsEbUdJM?XhhK;L7 zw$N-GeXuB5*HkN=;ej8gI+7TIG2}?1Ox4w?OVug7&F1-fR`*NmYQG~~N6U>+sE}pv z!R_ku`QvrZ#3UZg^j|}$Y$QdE->^|FXa`w0N#hr7{A-sldit^vq(-N$XMN#<=WjHX z>g{QMRAIY02u$AZSwB@9(GY4+_fvKtcWvXUR6OCl?0b5+(*Y=cU;jqW!g#e1A6|q% z@9SKHC39gF%fcsbwfKZ((C8Botu3EZZ3GF3q8!8$QEGgyfr8}z05F0K*PP~c@_8_J z>pbV)9J{@;sqSzrF^kg{?`<*N_?197}xDuG#2(ITDR|ce%a_UImFK zPd)n1yOVVvK1sc8p?whfp$*j}Qzh_<^R{D_2F1ql<^y)24z%ro0 z)|yEhw7X8zRrl@r-L4q#Uq}`r@n>Orti2+qv3W&(k z8bmiHt*VQ5TzxcvPzV`##NQ;l0wOj~m~$GzSYCpjc={czQx~s*=+g)pPt)}GdcZNU zg@Z4}zKgX~SQuLMjPQcaGy1bXhzm@N^dZ8l)NLm`e=^FI?bze_BcQmSqJ~2OT6Xsj zsFtplc}JH;iC>_R1E2t>e!DNPUFmblXRESw&3aX%jqs`0CKq76dv0jib@1>5=ZO1P z|CU?59wUVIg|#;mP6t@0o%4#PAavGT=lQ)L z{?AyXn`(f+x@})Plnkc|GLxRWykOe6?qMB0AdqdauF$M~ZD_ld8BEIvpytZ?z=KB+ z=>*Z!5Pp6jk?j}~qQP`agLsP>Oytgxtd{bjUw+LU-ZLW2_FLe}=CK{WY{lNfyi!w5 zeprYt+LYw5My14n1H@O?9k1J(Rj7@zC41y_cjnth3)-!NU7iGEZpQs?kzjm1xSBdQ zaMqpZ+kGIfdbfKLY|zTIY4blxJc&bmfw6J-gcEuhkDr8e))p*mu(;NpZqPXU>-C9G zNrSvK@>a+T;ACb)x%Er0w>?qfwAx8^{UG!o8%6)rv&8w45CLYTIS`u1nn*q~^YrY+*mTB2XVlwXzQc5aN3l4}@Lc;e7RH>R&VB+Ldz% z?C$^F5!qdMvVk!J30F_|J_e)jm$d`h!I-g#SRTyNh8q zDrM3Q>~Gn~0Q--2+B|80m68?vm2JgN}(!C-WEQndJgAzt1)|NjZP2|4jm z{sYq}|4&a4l4jBxnPKH7zTof%*U4JgQPw!~QxA^^lyahcBY_^M{CIz2#^~#ui)g?A zWV!hPlg%NhubC2(R|KTz`llO{YBO3k?Sbtcx0@A>YZv#&NlJJfki_NY7rmFvP*MS_qft7kvYC3mZ`9iVicFb)EK@MJ)?^$SP^km?j25^J8(rwk_vkXlANbRsjJ|_4 z)rS|7-v$Nv*eo0d5mcP4w5}g+DV_izfCAq+{&U4(aBdzJNv1C=iXYEEBbZlx9M0C! z6^czKv?ge``DVtP+q(6H2axZbV#FCH6oie_MR@SlIW2~p_=tj%AZxG90-<2AnrV8K zl$k5H*F^U=MiRJ?hgcMtC5b7VwBb-G*mtmcWb2p~Q~1k|z=}RQd;i_Go$DP4`|pgg zw=gyBiX`F{T5Gl2f+Hj()CNhO68s5gY##ZfVeGgI@7$D-Co`ku_S`tv6^7V^^7{$& zaH+_twqe#cAh4u|tOzI!f&fU-G~qU`*_WgAQ`_AIcw;V*p(;q5wp(#l-NoO||}S>*QKw1zz5{*r`kw&@CXX(K>! zg+W$XaWa2}4$9B_)R*tI=oyeLTGfID3IC{p!R&ZCMYt=s!S1TnSGUn?7l<1jJx`|V z(wRg%g)TJ!%Yx#bn%)s$HlbbByKx=BlZzM1xBY9Y)M*T;)U#KllR9C}QGK1PtLS~i zMKR1z{h?*02zs32i)~k((!~{THeG+@%JgxFv?zo*J~6H3&ci4iOh&nPSU3^$VE9ZU z3H^m>sZUG^-tt7#6iY(G1@eclTfO7H6WT~se*PEjR~09tnq)T?wJ#GPJX(TF4Mh67 zCqC*8m31VugRynue-#+@-kJv(b^kI35?YXUg16ms{SC@Sf5V+!p@ZtbQkCn|0CMe%6C0)Vn#Us#1NiW|CDpvXz9Nb4nGTGgYBQ;ufX6VP(= zLVmu*iAd~cA9q3bS1FwplkSjUV}R!hwWjPN-|*mlC^D^RK3T04uDS~4r z-_u-R$!I8-!Bnckg#duBI^X*+!qt@%{0Uv9V+v(p#+|x=1kLm@1VO2yL?YLX^mF*E zDU*WqVzq(MsA9kzcOY({!IZm>nc$Cb?ARZet-8iDIRd>ALpgIU__KJ_!x+g_b$Gw% zYiDz0InkGpewhFQp1vi@W@d+6^ce(ZX>eT14}q8ozAd9pRE{OcO@JhzKhW%O=Pdoy z{f~vry<6()60_aHYsWi_7j1gsWogEPC~k76CD)bn=tgpr2kNW!JFH}pXXt}8LA#6m z&duRh8$jyq>btw|f(nCadSL-4C#Td83a}HE4IV4nn&F&4IN!S79!H278wCY-?-kWR zjEg(IB~DzH7Fr6fb)eM`k~EXPgUnv$X#8BPY*jVLhczAB%}ujPec!D?P4@_;sL>KKKwj(x?-8aesPWm8H(r5dRvojb4wcLHrNnRWS}hR;PBxn+7E{x&I<1Y6|$??T=5p6@!@~h*sI2e!Y02m}{*ks{1PM zV%16hu1^T%Dn^Ng;xM&lM^xuhMTh_I;Yc(HS@}OU+Us{(1(waHA#k&Vpt%;)Xsi*$I% z;xtz)k#50%i9GzBp@zY-7NV&I>nN#{p*C?@6`j>YRkrP~;b662A{`2#07cmXcx z{*F{0Vn{vBQHMUWe^_Gwqt5qBkZOoCX;pOTf7+$Hc0^2(IrsEGd|<$8Xw0|bBH6_s@~E56Z>qp|_n;@lS;j3QJEH^^fO_Gve?H%SeXfq7`~yY_zc`w~EwDO9IKG1r z$0?AFY07sa^v(AUVe`Gd?&{K8s1TeusfPcDd1PF0`4+a?P}uncQBpQzwYm3PKKiT7e7u^AH3ZI_*LDvbU6%pB_zhGkSX7^B*mj!B4WDouNiVUi z)sx>>7k$n1n5F`I>DAB{cro zuQY-I$hNXi?xk4#G;f!+hTwu(j0w zDb-Nh_|a=_J2!PDa;HtP z>AJA(Q0!VpbGz^A}!z0vi~Z~k6{Q` z)FSPx-d!YR1nHfx5H?s{o<8q*ved2@w0U5;wT-Q-4ZN><4y(R1&4y$!94>0hie zKw&lSdRy|#(7?@_j2)Enw$b;e99(#a!)aHAfxsV*KdD23l=+&pnn*?WGo){J`-r< ziZfJpJy|JYf&mFrtm+E zZrbjrbUQqv6I;?|=|*zJ=WDpp_wWqKmaQzEqQad-ZOgRlV64S0_0n?t<#Hu-zXtBL z9VSnUs+L%6eJu)?r@<44ey^S4iZh2O^fgu2u_I*{C@(zs>@o@dow17Uu~hvi?%Vq# zpJ%992dT!<)~Bg??KCPJBSVBIfh(epI5qaW8JFW(K%GA~P zi3VBZi6SIv$j*EdQ0bR=JPMR`X~Zc&nE@s5Yb=K#61IsRPn8!uqeM+qt$v|?XYoQs z_z_&b-@g6j(%xBwB+l@@XbUlDUY`0pvxNkG99(~_?>$`C98}J9R8+(j#g4q~>(s7p zaqnmJx8>KGx|}0UaN8c+M)o;d-|v{xXfZV-&xVUe`Qub-Y%px3yn_gK0ZX5nIZ#yx z_W97?s&wPCb$o*0Zz(%B=hlTwae5Q;_v!2_+(1GuC;o1l*d*b#fCq*P-44;Uo%b9U zNCm<|C?(FjTe5P&xz9q!N)$kIqM z2VZ>8ITyg?u5|w*ZZi?K7suQD-~f3%RgM`2Si>xFyLS5lVAc0ZlSTW-E#H2W#SM?{ z#C5+Y&1L1yx~`o2w{jks4}0ao;zK)2ji3D+<+HXs z<&C1ZqAn{*RY;d9)bJ$D#^K}RPS>I{aZ_T^yxPDuUG-k&POSDiDA z>yPW|NYi00n1puuBnE8&L98T6D>(tiC4QW6DJSJZV;eOweO?UPRt; zM2`PK+*@|V@qAIELkN=K1lIt;-Q5!0U4y$@a1HK|;7)=E8QfiiyACcv2Munw`RDgM z_kM%RTJvJoG*#WzbxxhL_dZoH>ESsx&Jf{zINBK}RhU}!X~*T;+}vpNp?v z%azHD{>|&=mJpGJx{ehQXo#XC)Peiuw^usZby|qQo&xH0|G@_5v%bct7`&7s36>H& zh8^hCazTX^Q|zR@)&;GGba)ptG~=4>6Z1*+TOMCOKjDypqo7G%?-}t$*pO8ZzZsZ? ziGts@wgUFlgL*-~`(v^+_&p>koHNYbf%(S7L(sST}j&luQpT0%vt4^3E+x~7Y>+QrZp0yF5Y(d*{eUOYt(JB z0NFd1_Y=ym=~&c82evZF(V+1J7L?-O7d%(L=-!i{1xZV!m1BfdN7RJ*MI&n{cD?oR zd^4T7f}bu7@R&v-Og}R<%MiHmz%nB29(N=+~|@Q45_B ziE*52mmxI$#L3p>^6GnqFbn~>egFv7*6+x=gsn1IwN~isH_og-&|J0Rw;p4jqi!|+ z0Hk;s*CG&~v^vi^P1Kf^Hn1L_q0Rh9igsw_Ob^b)RG2=~$zw@29aMe=*(K`+eB>VS z;B<6P43(~cUO}?aHXhVp=J?F(sE>pMgZ0LhP_$SdVEVcl{?ZXh`EYMV)9b5CIT)m& zPKMiySl6a}gNp3&_z6AP?p;ONXQ-@*9eeN9KwG)tM!}YKf4RR`%oF#ZvQft*AG3@!t%Of1=9zV&F8?XprV&%g&&ZOy4Dh|myat~w ze!{>@GnZ|_rAf_x@Jj4!i3VyqY3S|5Ho840nwyF7l%Tf4?`A7^s6Rqb5j)0+aaQAi znA%fnHu3Fn{7i#N8hrVd2L)Y$*%*z4(N8@;?@Y6>eBUsbSuQffU)-P);HI;cgX%3_ z76102IA_QFR*I2q_B;MBvaEENkt=GgYxfnk)Sok^1Y<`(8+)XD7N#vwFqLA-gpJ#l0dg$mq8FLZu;67hp93$rELsgxTh6Ux>sc=0(c zb$`6dU9Qn=AB_j!3)}-v>=j2uH#(`FfDfb+bf5rOS}eu8l5{UD68^Ga4)uWuH@ToTr*&yx z#{cjO7^0;eU3MFR4v$Wtq?MZ_C>u7n`kU0|*im-<5KZ5sUjEGEY}J9JEhK5&qRCAG zPI%r7v)aydL4p#_Pw!Cl?7r&NtmF%%!t|39u8(E?A-CXk5qGRvsd4PGoonNf`}U&l zEAXd;s{*WPfUoS`eazfX$ZgxoNWB@=)tk0FxuJ3{RP63`SUqr06Hk}3j2(3C`-0H? z>TReifK5LOoVO96@78*{Nu!1XNljegJ+f=1SNXu`3&4?k&JulgY?L*&YCCj+5!3qr z!W?u1>TgY~qiKz2k34VUGaO=08Z{9&nyJO!c^h$Ig$`3F*+@Gt zJOz;uqhJDXmpOXD$eb5{(0J8{qNy}O8~@?%jVeUR%BgdY__Ot|FkT8`GZ7J!U?}cV zeEgqX%&kBGfJDM2JCm4Mir#!@wzM54(D+?VLj>*&Hv}~F0`B~{z{3d?2`$9rEin|p zlnTt*U|SlxP~Rf@ZXr?^12Z7JZ|yD; z?|=zz2_;O)O9ly1_K@2f+>txdPA1 zvX^r7gIAV%!*1`ss}*`vmHgGA29o2C0JbkF(TpyZtwxWUH|o$%Oh5}H?`AG+7%|9X z3wzAE1h-R3^?BugOaJ!{$O-g5qK29&wDlD2rX)-yP#_HB!iyXg6i9sQPuo^XRNlIS z)7qgn%8&Z!EJz(^T1+@gv$memIL(e>06N5C{Yv^Df-|{?nf&_hAMUer5h~s1ccEc< z+=w!`sY(^rJ=-?BOG_+q?L;= zvAul5+Q%i66y8-jm#~c zqTu{j?l2oy3;xr(-LaNuI-%Zz_qb#${ti!T7vI)W{O{0pa-3xjp(X;9D}}pDXT!pS z^4vtPKIa^9-Ma5<@wZ?3?@Uu;#7F>u&E(!U<)9Z3-xgY&trPjc`feFm@S%?JioN&N zD<3>haUX~dMo$n|dxr^!go=JQzZow9LOwGR^li3-*!v&OoyHnHEOl7w6!_t<4Sfr+ z-asIef559@c|;V%iNx5oGy+tg*=vdEB?b<@A$?bkA9K4wOSe4k>>MvLTHl?XaNISh zdLL{!xAp8zu=FR6^!$P86C$eT9Xvs=9f6iJJ+N2bP&N4m`$P3df_$V^!HajSrybNG zU@0+-en99IBh$agPz9rMG-H3oslITJ4r)MtkJn?;;6T)Qgju-Q7lrgH#C<3Zbj z0-PDMo5I$6S3!=teKGSjJzsX!S#d5{UUZ*QR8lk)A7a>k$h+IjDyp2^bJ<% zLUpyy$%*s1cR@Tt3khVF+{iyfu`b_Qq!uTtz& zwW)UX2i-z3Q17h?Q!0!(=u{zMZTw=L>}#6mntKJv@22g9W3IMm*I}SO80xUl#x?3( zv#39K-(&6A3YuzHFXUa*Pim!_I>iWeSH5d}?+B zA*Pr0e9n%GSrOe@iEku6TtV#GYvounpxR9wblZA zU0GAom-xqV=ptGOix483wQ?36WbUs|HoP6K5T~rv8k_?3JGBL-;M10}sUTJ-lFT)S zJpMHackw6v;}DpRLK1o6rD!_6l#lWniYhDScJs}_16k=6J`riU={v3~sH02qsgDH$ z!XzSdE}AfFx53AO{rWt9*!;(8)iHnVq8)^xLWsGVEe~-?xB1ZLn*d?`lf{HetK=wm zV!2|A?GLsHLSIiFsA)JM4%1E#Z?myAHV>uWjm(XEc=_BH3e~Tw#}vfQq z{NhPp_7l4qVR{3tQ2~daX4wIp)+nOSiA${(( z`I3~3C@3YA!b%%jCD?k6nqUQVchH{oB&6t#YPI`qSZyp%ZCB>ua}?Nn>MXv3n0C&{ zd^gYhKA-Q8N^RHQ)){b2;aKohrVrnop^~D+4psJj^KJf?h&wbPPQK>xn_9gyL+P^x zi$mo3<-HW2z5F;u&!es1>W5=+^qjJ!Kk7m9d|A%^?T?i(s(X$zA67vy<^i`}O7A%v zqwv->auC$et2J%1mV1^{Q#B`-M0Wqmz1utCkIuwhRPUYnoe%?Q(a!^LcjNH^JQg<6#v$wQ-=^z0K0!hqzUl zRiTIr;6Qd>oJ6}~fiH&@Y0R^3(%+3qt>VgqeX#_Nrcc;=&W)u9j7}>RnbvHD_u@){ zTS`7VlSg_YwrvU*BcJwTVF%_Is^>^01*)!uo)N=rU4VtsSma(B!W~ck+x*Dg4)^2rXYah!ECQt0i93+jc8>pr3pvbEDfDQ2SVR(Txj2$AcpxK6Co< z>ne9PUG#E}t1+G7fbXqISF~Rj&P+2yF^(PL>rYmYas$6OK6nKhyu_NC{GEEltp}cNWVOldTO;JWw?~2z%-rFwXV@w!;i&HUAWX`qC5ezjU$FJ-0 zAvX6HQaDVVuKiKFT&Nw!hPdRSIEp?AuBlk<^6NAu#LGU>iP#mQn>+#u1P$rW+9v$c z34c<4J$)26!^WF3GNiaHiL;QQnmHSK&wJ_lZVioHOY(Awsf1;p&dSZtGLATLVNzxH z>i~_7j7xjI0C=;YNFP_;6F)_G`Z~IHAR~0L%Ag+q{AL&ugKtH4?v zzF(T@mo$}rH^#v~e2xN=n2~h@_uh98C zE7;M7cO1dic9%1 zll2SAXKns1k@9bP`ultVl#5!vM`AP+d8z04ZX}RcA-%p7YPz}+i^O?cKUwU1&5K4b^aW_ z1iA#@eBOG@>q#iEFjGuZ*~IohI+R{-1lf4Ze-Zw`+@5!*v7BwCCl>YIy^2soY&fW( z7OTAn29*5wg3C3xt{wmQY-k2HIt3We)qpN1{vlL^NiTm&7V1ypx`x8rVS2}Io27?= z4a6WZA`U!`#iJlsn$$w@WFV`(J73@yjaT~Ldz*YaY9+ofJIm`H2$QSBl9^r4w@+8d zvGKQDHP9qef$#v=ixIArZ*W!7L|}NnKy*DE zsoh95g-sx>bloGU?Gd`KW5*`C7&}d6XWb4cH(`&x$*dGPc-mntX|TQjNE*|BPw@^L zKTB3(2hHTfKG(M2P6IE8dr)(GzkKVr5!%G=*K+2K%PSdQJ*wRSIs9x*X}LMdAg9vk zAvD*P!r6W+cK$2bqrJ1mo#Ws7;aPr1r824-{r3<&4dG&`x zXzy)UaJvS$w%nvvmb0}h=kQzF{?s_~tqp8haBp^@1YjxAO}jG-cwDT!)=P@aI~N+-ZUK`a=3$(~B=1UD_9v-g2&Dy%R0nctM{*om_m-lvM+C88JJT($Ev@bdXAJ*m zonJOEyI(yjZTCP|MZX;M->k;`d9XCy@d=6-|I)an?%vUV)Od zu*)B*k$-O{T#`h4btxzB?wuqW@HAFLp;!-V%*jAGi~ghSeuafEa}~cqPI>Zp0E73i z$5xm!VT^V>0b!liL79DXXsB)Bd3&V)V6vVeV^473-N;I59z z*{j5R`QIlpsHGYtFwWK?61mB`zu{aa9fWYttVBPWYn!TjPw>^V2eFb|?*9IAJmh)b zFK`pSe#ZMSg`9sRf`HH-G+(Kj3cBP#aZCt1bfA`=>n#U)k@mgWaGiIsc@;Hs< zH0>2{stf%1p2}S9qQ1rHiiyZH?p(foPyc5<_!*=D@R%Xv{Cb!5&RCTA%~W%#;|Ftj zJFO@xpTLuYC0u!fzx|&P+oJ|ek_UI>Pf(Kw?YuVzcIErw5~^$3J&t^*&!+6bRpf4p zKd9&xzI&14-`{10c(}0$Ni4U(;-1;U+nA1-3Zv_Vr2>^FNO`@n*<;EJ)@2LK9UpjN ze-0eUE=<0k`#)L$=~gtny1;qj%f5Wp+Hy zQg55pm)GDmM>Q5QL1pA(Jxnw`3-m{79gb01erlj%o*B5D_spfq8RXQ3y{IX9Fp$J2 zZC5+Xx-s?oRJd94%JTlmM^`-_6?>tamHZW2e+;nmW_Ud`Ad_*ux2s3P_T3T;C@>F& zT%rJ9=E$<7#yI7@9dDJwzO~t0s5SDOgL=o&o7lWPS{Nl2B&TbIg*)VGCYfJk#`N`X zpeo)fM>bi(JL&S8KFyF}!>48@kDX|=mQs`$PK{el8;o*PLUXUU86|hvdfF}y8^Tzk zY7on!qJ)*uLDQz1fefGKwr!kRQKk{U%4=2&U!g+2KjxiNGq02vXe|gL&<|&5n_Y$= z*_C$FStmO1^ZZrQIb4K8w*L8O$8_AF`0SEh%&AOfnOmsdY4JcMN;xV9-mtAY<)2;o z!b3}`WAY~Es%R*=n=3MTzVzeJ(L;O%tyM$O{v~sbYntb6T8D+2EpC-f12%~#zlC#a z8p-`ojO>i7l3ICw+;{f9r7<8xrp-hk9798Kd|8 z#U%d2Wq+*qYP+*(z#7j-IvZD7Te4xktS>ffjNIua=_vDS+t-J#v%=)r!R54`p&q1MU z{-gy65*z4qWEv(Kxr@!K>7pj#$;o9}3^=++66llN{JwV_Ty{&f3!}hBv0$J-fTpx=cVfUa$UMVa9yn&t9Z))gp!sUr~C^KhTPKslxcv98SDQeB{uv2XpD8Z za0KwK?DsT1Zkjp__N*iJB;2k(*rLIme(Xoe>}o^1J-b=-v`zR*+U|kB=5&Gq9vncMM5c&!C}7V}%gq0MPOkbd0?oz80rG#Z&x zww`V&3%Kl>1E1x3TRsIJyyU0ssPzHo_ecBp=8Yf!*X9?!ruOzAJ>leS(A}i$b7)+c zLziat)kb|%5eT$anWfmV9uHezzu{@Waui*wJ-I-vb7U^nzha3+gr|MvhW@PtGPer@ z@_X1c^Vurxi&`f5r=Qxf(r53X@_EL{fkz|n0MY2Y$;`YVYQH?VW?jA;{fmUgYsmi@ z$-#SPd4wAmr@Eqv>sZ1BX`RnB%q&zK=xAf{c%c0kuN^+)_mC&`%@r8oV+jnvhrSb_ zFZ*;vNO;yAC+XCnn9gKd)u~$puY%iu;ei8+huzH{eqqZIh%2IfV=;5-9|Bup zm~8juNXXZG=PZj)2QFnVrUhE_-FtiNTZU|a5U>T17VhfVWC&w|D)8wuf9fo~E?U)| zo7EAATwD3qCHJd}mkJlSxn6-{Z>dhVaG6WB4c}729(e=8yBIKzNt9BsgP?12+CN?1 z@N4#-KLc`XJnss8e!MCyE z>pr&Z*`low<0Ha=DU|aw<}%S9Lfo@E93*2-n@uD>kY%%UJHMl{CeB#4nnltptwVHg z+q}X*oF4k~^5=3sQ)i&+Y}jmn+Zrr9es9crSQx~(BvH^hzZ=Fv4Sp3$%4s8e@CgY* zcGCrNyEg;cxUhLr^5o6xkBghWHOjC|MIXF5xNgw;obI`KGwnYZ(lYc%jDfmZ#~hF&|}gllCv|gMnFCW)(KR z7HcQ-xa2wG{1sRw2wK9nATmrYFP7^YVB!A2`Ews|GburmDeLzjkE+zNr;lnas>nfZp*4T%r-scj&C)Ag98e($A`8O z%SC%mcGoZ2Q$Sc7vWFF$_Pf%p8KT5XA`5m6z5YQSjZc4~A^+(CT}%|%dZ?bym{~7Z znPEUix>(`a5~5^EN{FA4pCsi%?Zri}c+MT3PuRh|Yp5w6So%3|fcvubcE-DMqi)Tb z_@am0rMHIM_;8vGvVK3b`+BL~*xwVkH7xPA{?$&M^%)78WAAQC3;IV zJ0iAxge#el(0n6d4Cf;{!o!~Op(=HqZ*So`3%CPH z@RU;7W0xavl}+zYemD4d|H0OVtbY#@PCn2dvUE&uc%pO)xT^Q zB{ISypB4(wP*8Bn+i*}l7_C1oC$8Oudv7y^QV1SQgg{|9AgW69eSy}Kp(62~h8**A zA~$kav0vE>U3?x1GccB4ed%zrBG|F-ZR=KFel)poe)lil#O*{HLb6co-k|SoOIvnz z?kPVNsz>Gb(cBy+yaA(dm+DdcXLe4be0a&~wxUlt&x+ggP4R7Pq;TS$2t6u;8`AsA zYFBIKxQH1z7zn<89(V2uV_MO z5XK}>y+L>*MWt@+l`Z(k)&;uTKt>b5V+a#ZRVs%=bn%gfIRV0g7)Lr0x?OzV1IAj~R zGOPL{wPMcB&*r%ghL;@aTVpwPR%u;3)C4Y4wHN=`#R?k)S&%?zP*(rb?#?Iiz6llA ze_(Rl5g2Q|cyq3?ZTxuR`{vzbgA)1^tZ9sIz(TXCi*n4%v}*nQ#hGZJVCYuK zTZS#{OV;#+*g1QL@UiBNR7L`EF0{Uz!a+6GODyrI{?hX$_&_PjdSD5ZG3tBO@UJ*5 zytdl7O3RiDPHe!S*K|DRmFHrko7r3O!?*Fi!=#1>J#iQV!AgTl~J)sF{i&!J_Dx(pF8s%X{gf(w-JwBlu|EK!vErtVXTgz(Kt&EKB$>_ z7Cw&eJzQ_MvZ)zN2)#@EV^jI>`=#foJ%_mW0_!3gRc3>5-G5i`BEtWS*&SY-j)K|N z#sAkztr}oMIQahfs`KQ(>wMsv3~u0^?XE!}XRc@7!jByv^<&uO-ynUOuS1_c>6iK! zU5Sa!cWAr6M2GKGzx=aB9MqS)N&Av8k{#yuU!fTW2BY-<-9G?hvbqakfBySpB>eyN zhbT;B?~|#bC}qa~dqXZR1jvWaG_TH}if1zj38B+#Eq`eVo;M86RtqAU%98(krQtLb zz=fr4w7Bl&BNFqA(yh*v=^T%1#1Fhr#ioM5XXUeotk`@NnxFYVNd;Z7FkjVz{J%Wt@h5sB1KNQg1KRPj2{HuRK=tbd{`@qdV~M== z&fi3}&vz`*Z7x;qTlO4tEg0!hRHI+N)dxSOZ*VlT%Ds-=IHwox6^4pP&S|R!u zTI;|&@BWO}WC}YQ+F?1?Ns4NBQ87IFJL_StwN>-{c7QZxD^6xk5xfqhkr4VFY9NTv zn%v`w%%(z8GM0>UVA4?(i`qFFKRS5Eg$HVsTb|%OxmW5vQ2sUllW;ZDG*nPWCD_0q zRb=;OAoXn+D>mwu!MNi3dWoX)b3)2#P6u0%h}L&t<#hcq;%SI)7KE|V$b>8u`CF12 zIBfoPlZ`A0RD@dH@&R}Wlg{VmMPr+jpKyWsE>=4-BF$o}DvJ7ai^RR<-2nc$GI|>> z{*#9tLt=L!?56C}&1YwB5xyHM3H?yu6j^j_MRC5M8s^x^>?iR6zN7fWb7nmb3FrpLe|=bT>nG<+fbH%aJ^IOJmly__}ndz1NJOC5Q^rn=fAo z5o6bz*HeL@Oy`dTUryR}^tv7-*GrqYOZm#%DXMVGkKSdtZg4F3J7>(gx*Cc>+~l>J zq2R^-TApswtj>@wNf3zJAKI~A-@zY|IZKFfyVqOWENdM%eba>bDwOD;swm;hGIKs~ zt1h#3x_o21KM9?RA!G)mQ;8*uZUDsvcib%y-Bz>W@Ma*`o*3t{@pr)DS+vnn*4!n- zirb8$W<^H7tHJo6;|Fb?p?lL!PO+BpKj#D{_W6uw>>+L+|a*oy~W~A6YH_ z;)^{uF>H9?Ij8z%57!D=5o2CO>^GIM0;4jy%Vj1wWB2R!DDg|S%s}H)-OR54ABf;a zO`Rxz9mMTyyoOhRxqoHB#*Hz1-}`qa%01iTW#);CCtr5E1FRp1Grq9T+4(Y#oLFLa zCQ5Ec$$JF1M#^Y%tOSqru-WRLh|Y6N4lHf24Jl zLj*T`AKogC%U{F9POmqnTGPDTu0JInV%~pt)_bPK&^;l%F`iC`hQ8^wRP>+U6*H4airzDfV1 z`D=y5kRcrXBVzR8gjWaVO=agQ!$C485piXT)buZL!dG4B09!OO50eW7m$#p!7tIlU z`7b*%;UqRL8cZ_cwfukHFnd5U^>6xL9bi?ddcG?Q*4hnW1_cL-jvfXSl)xFQqyZjc zvzn$VPkgXgRa=R%U)VN=2z3ZT2g+HCdgL4ei@rp;FhM{e@2FcfN1e#gp{DR@_~=mB zC=mFPqguaGlD@EB(g9+Db^IBJjzkELem%fI6x%3YTcvq{L)j2D(A*fmj-XMVa&mHp zl$6k3`a{=G&!HHC5? z%nL(j>~Ih4JGqK*vse1HlbfRhxWbtDyH{4;gWV&c5Mqa|RWdXFqu>D`p@Y{(l)y)n z2BYvALZDyAo3QJ>xb>w!BaAv3Er<3#Zgj4C*@asF8kti%cluU2UOS9WiUdNJa`C~R z$(t!40Y6I1;v;qGfo(5(kM??xY<8N2WW_UFiymK@r79by-e!1=ydUK)752MKBN)7w z%5wkxwcp-->FDUDDpRYd>xqql{fvr-io0*p%yj@#Oahk1Um4sDo#gDlxnU79|HD)& zd9f%nu&t$jIB#A`HxS^7^Zk~WA_bRYxNQDcuvXE7F!Hz-;pd8_?Qx0#pC{{@8q5l# zts%bP^@^8B49#u7zM#unmAne>rzGj`x2`Q9Ny69r_^(X<*t{b+lGJtqcUg2`4HrWM zM_HB3;`}9;XvH@r&nMPt#CB>LHg*+V$E*w#N5-8{5)OmUJXxc>*c$5(RuvCc5UmcT z+Zyo9?yc4=YUdN{je=HD`v;bcbA}^G1iz3$w;bpq&wy2FtguQ<>{p7u*PE4<6_3pb z>WP(=7~rckx9?b>5jFGYu8Lj#{;Gy?az0)-Cp)3zj&i(7oB7k#KKh%nQnH7G9z_ey z!A8Q0c9p~=OM}E!kk6>!r6$dO&_|Gv|7L5A8nEJyqv57sIxx7V8xXzob4^4x0j~$< znr65Xgw}ltG+AA&qlHbjz71LhnyJ!JD}~5JTPWW?oZfkHc6Et=cOxt!H-que6OsLe zmUAIRxSgJ!-hD`#)wkB}_S%5tj`o=E3i+uAoHL-P=yf(&R~jyn*KD{LgM&AXnvMpV zEKP||N&tJ?@huve`-~~`Zk^KT416BcgW*_A80-kuG|u6wIhWo^Ju~(X`(9A|T)?4T zXqt&Y^5uhBl}7FTtbsuivq-bS<23P!@1ygIUZ*|1&Nd%vGX3H=l@%GBg8BInHj5fh z?l3Zh`ax~c2*|+H4B>dM>_|0{d!Beg2<^eSc9QE!NZW_LE8@}^3#>1V7~cn?u*&Ut zzI!kd0M|?_P_k=i`-4wtf>9A9er2AzMWPc)ZU~wV=#|WMRiaEiA@an^fLpjI)>Hl{ zG~`GuairD5W^Aa$BDv=5Unfjff%1jhqYvM7txUzT75GAoGL15OU)__LW!SJ6GgNY? z^s{U9BW%z*d~FkZn_U7iYF>PQexLgNmT48eBAr2XgO&hnv zbMcs@We)IhT^>q?#iTF;B)(j~0X!l7@H=qCUgp`B`n+GBOV15~$rqpC=8O%D9TS~F zIaWS@HpmG4N4&ov<2t`6!a18E58v96tFPTtGhgSbvPwG2Y=jL^FDc$@^r)zyC})U* zX=ugM&VyE!gazXLdJ_XJm&R3a7SM>rOV#h>3Nj^2)-pbPWzE%@F)x%(OCOz0uhz3e z#CxK_=*}&|GR9T{=&#*V4&e(tJx%%GX1%HUsa71KX#`T)VLK|>@qx?Uqqdbjo^*M^ zG=1Hz6Kcs`KihZP+34xlIE*#d_#!ULwVDTB8AQQI$4)^ec7s{NA_a9c(5<=4rbav8 z)o5KAvHY9Galg=)Vw)6Z*~P*XW`q7==8ou`$0Lga0T)bUe$S1!ZbBC}$oJ#MbVNaB zt{6`Pg3pBQz5|RmS0~HSd%c{|1~;$mI(%?D?$#61)Y$-(Q=Wf_?;UeJkhY)MR!tZR z=V!>tY(@16O9ihV|6aEdSu_{9rgXw1R(K=?mIT~YgjYD02S0>$%`zre4G z^vOPl=Y&3-`uV`D#xbwwjJ-Z>7>L`f&r{61<^FWjfToCV9;bJbD0)$M&b{Dhbc(UP z<{Wl>iG~?yK22F{z`oE$5Y)-0HXRB;z@sXUm!OEB%}f@>_jP_M#C9`3}fb%tMmvx=FKhpYm{Q5iLNZG z`Pp#^ju1EuPTw6mOun9#YXGM!E1^^ol_aTs5^Je|T>duVg=H}EHXkcS0cc07B?A?d zNdg1Oi$@M*#T=?>jh2c7A=w25_<>jP$2V*x7Cw9{%%6l{E#nBSR@r0ofIJ?daeg6WC-`F2d6i{M!PgrtvsR-S z+?&EHRV3dOsU6RkDS1NI###+W5ezZ}Vl~g1)l|48c0TD3Yn5c;!>iT0|B&;gu!QeL zZifftCezx#4ynQwZc#)azDC^n%*oOhg!%i!R+}x```L2sv1+_dl>)1rm2gxJwGwu} zoQJk7DE)uSs=cu6?JDxQazlvW4%{^r!U0M`bY?nlG-#uGO&Ka8uky~p)%_QddvX3< zqMGLjrE>L7GD{pcSt2STu1_D}yZtUE5pH5n4A3Lw@yDK}KSUp&Df>Xzyv{;?DIwqK$D~F!n>$Z}?+-KV;zCaDB*TB0kE)cagoA?j}rWx>PuF z&d!U&#+Kz}o`-4dDGWcbwMboqePAC@`e6Cd&7DnjF!lF)6LE{xu>z}O@OSHQlSe%1 zq4zq7@B?8)au{fbEj$In{!_2XcjS&KQ1Q&2yYg4l()xvf&~m^X>nHV7kcE%l&Lxq1 zPr(}^dlxTP;Rjl}`jl>Vp6ITs(c+3POM1x(C3(*aUS7D<@!tN{h{>G^%dbq#N#^;hXe}L znvs^n!_Pv0GCFe}){0L5`Vv-()1-?7V0kCRp(vJ@c6#>%-jzXnJHF}$t&iX6TY<}Y z|5T2)9i$w|;S*n+$%s-O&tuGwqz3W^#coPIc~LJ)iSsaDyJknl7FA!p`PsqwDC^f` ziL$h&$%U)=6~YG>!OgR1%gytgFiF9;F@BU$ML_icc%{TIpxAQWKX+beo@sj8bU-K% z%QS(@sv*K2VpXr}bKC1&2Vv|>`E@RMR>7F=i8(L*XlTae&|*%^L0a}@2hV;>=8>z! zIh_G)kP||}p$(vY!FSG z!?awj4kPBEy{G5rwps#m>fHuqAU8p-g)zBP4EkaCphM;wr0l_|{pzkv8H!8QgDlCu zCHlH7Lzyn6g!sKS)I3Ppl&_b(docwb!E7DsI_Ad%sTXndl+N9-mKhv2e7MLj4;5(4^+61F6g_;?Xk~Mn>8e zDcl>5{qIF14m@texh)meAEtyXOU_VpE2vbG4QPM~+5=E`yV zb*@}fZR0exJ)OZUd#tEjOmj^PD|UW&C!IY{Z{WMbEyrl|D5m8VpRRaeliY1P*MkgH zpSjQdlO04Dh$N5$%v?7@6F2Ban$9?ExgTmCz4e>___LrZ5ck7=-nx;+g*W(%lmBFeb{^h zRdy(!im{|@6E-B+4fK;Q{6Z=NyJef%?L`}A-dgb%gNMN^p6>8744%JIh4bdRQGvYk zYHZ!dj;hXhXIjdDg`rA?0BKH@LS&JSzJO_92q&?~AS;#LzODOQDpw6XG zji8aN$a{d*Nx?o4+ZqA3v5Vy)sFYW_Ly6!?J^d70K_r>cTp@8!V+a3*YoZ$Pr2#=?3pDSuYYX;{+LNJ z{wiGD1n61>%s0{O{0>#GjXkMLXkr%O7LSIt`D$>Byy@?Kp>a#NURYQaSf(b{`F$U? zMWDJZb6*83B~1|}oJhkPjThNk#^l{4g`@xGl{&nfX145ZrrB>8MZNO&Y`_y+_OE*& ztvke>aEJ5kmC9F4uQ&e!M0MR;wbp^cT#r@MIs9B?s}*mh?tkRfa^E5h?tfO48$094 zyLPMT9I-t}i?b=cy_Yjy8AVO;$*L4Bt0@^ijI9&rOR>27G}Cj*J?Y_R)pWY4i(Sml?)e7#3$CP3z$z+0{f9uEVMKLjN=OCyIhp%+cYJ&MN)F+d8# zIgaXT_EOs8S3GAD4}U^kK%FOWW2Vp%xOgTpx_fFu22P_1;o!`Wz@3^W7bk`&kN~>V1T|cD@^F3LVbFI@UebYrd3P)r zsjY3wl#YiA>a?czHH%dr8!TR4-l!F1@}G(Jdm}7A`+mMoN_@YNM6Y>zHbA=7$4>3^ z9Ogh4dCLps!mj8w&@Q_v9Y>3MaPK&r1wv>3I<7m?oHZ@H$^SS z5<84Jl5TGGI#-{+4F99unkQM=ZT&_*u-huYPf(M)EP)a@E#RKWy`TEUd3-P5UbS7& zJsIv5EnG1RI(FPs38mj)Ohn?~ZyPI4S z3J(Aqh+7lZLZ|9A2N99NdTzq5{WH}5Bw4<`;`T>9>2tAmWUTrR_5WFL6iUJ#ZOLji z038|}6t%Z!I={G3R#E~%X4Ng*u=*nQ)E}zfM{`HkoDB+~lsi%qbj5uXdZ5|vIjQ&@ zMVv8IwRQ2TnzO5{tc-!vn}G;*D~O)dWIgaq@aveoLhf-LF3ZEo6LOAj9X!aKBlJH$ zXVTcv9B)!!B6d0{D=Q{5lW2Qqhm4sS4Gs?OU~BH^)r{U)m%D<*8L2{u(IZdfk*8es z9W*J*<8MPp|DPW@ollUI2^VYRoKWgn2C&qJKu4bFRd$%q8IkG9d9QmFe4V97xnNVt z{^P%8N!feC|3rMpPR`0IhwsuT)A}m);U|0y@TA{TQo;rYrI!IF0%@0`@rzL8yao&y z%VMkN;dZb#vbXSbYJhaTU>?4b35&q-BJ)N@2XEx_ z0PE$H(oj+gczE#n+~S5ZU!m8tz5U_5 z+vs6136YLT+>zYjEg08g%fpF_s%DqE^NWxVwy?)|6lv>jh+Ut~!`YDTX+}g8KI8r2 z@b2l$9C_NWXqj~#9U4-Z|Mrh=vMe!TiMGt9>B>Divfcjkha4yBM{%)K3ZsUWR(N;! z$Cr*Z!TNnb$1fWb0^RrS?eRRsny>{(qbN@Y3WrE`GWD zj_-_x%X%t=H!96?)}#pu2?u#Bm|dat7`7c)UpwAM&PUd4=qAPdCLhSm7TS??QXnCz ze>(X+I;uXO^xuHc{%=4i-SE`{jXXU)fjR~n1_m)6B8?V_(4LKa!&IOPl<1HiqX$18 zWrUqWx?EsCc1(-GcIj z2PBm}qKrN4Tya*l(}GIiqQ!k7&zKrJe?Bd%!GnGe4o;Q}|6?g9jb9%AD|jm^zZ((K!S7A^*z^QKI%zoD7G%=w|(hL-~I!acz9^ zartT}QRN#H$2daZr1iW>TY^bf8hUzJ6dYRom%INxJp4&tpXBY^w@PYi!PxW~hC4$^ zT6Nagkx@}q=A#5mIv)E%AqJ36sdc$Qi5z3aL!LvoH@BSw-att|nc`}@+5`3rs=xZdqefS;_B0!Pg z$$0s05BhaBBccGBMj1o7TVYn77pn%03kw_D=;e0Wyj)e3Y*(6HwL1I+=fDPWu7`6* zz}Lq8!i-8cs16n`uF1=Fs!HMh{+zgLe1aiA_a4f}n+D$;@d$;h=6p65`Omy?Rx;sw zNIbWV1ZHQGoU@i$K8%iz&aQY? zS5~rY*;ZxPH@|rq4qycG;Co*jU+0pK+{lF<_1f)U%%_LTo<y*y2!?wpRRS3+~IdqFJk|Bryl8i36+xa`O;`NjFG$Gjul$FfzozP!hw?NW)Nf z5BOcbbJjX{-SfwtD^52kyZR zK8dC0#2d#9eNp{lgxu~_F%2rxZxm+IQ6Q7I0gmUdsVaZibb3s|!#Y&Lk=i~e5b%*t zG!TgHnEkgPz9bHFwFh4}I`i^cKHP&b0tY@b=J|j3#tX$I74|(tL2_l1qzBHCKzM?~ z^P1_G@4&In@Md+$!?TP#eX4+-XovfM6Bq;m^gH}*BJ9?hgm?@SvY<{>v= zm5`9gW66qmRAtn8@05FsJY94AWfZzS@<`d9N>cEt6fe2yv#Y6-jp_(6j1vF&`tZ}< z?{A%U#Cp<>=(?A5&kjaZ^*$|z2RLB%EJgQ6E(C%evjJpw+rc7d-@_!BoRpTK;dAfv6>t!I>B61gm#^vi`sC?oCi}0e6_t0o+!Wy>M_& z|8_!2w-s3(3zj{&(XE`%s2=X_!CufP6yy`nZzhjpQ*ge1P-7&$(PwRV4F|7%`1S|z z>O9~x-$6Nopa2zj>ib>pyNk%&>WtqH`un~Bp+fbmf!OylFU-S{)Rk^H*dkC4kI~fJ zoCdpJOOw+{2ndh>An_B}>iRZedlHHI_H=Qf8emtsA@jBrt$W3 zVjEXssN6-#SG9lB1S2ydqyvL$=@m|g$monIlQzX1%LDY ze*a_BrCWo)$IApF7jhaIy>2j|Tm7m+A5e}|;S@{uA_TGWjdaxJ947I>g+RJVAlE~w zynY={n=v4wqS2YEo#0W%agKK&*|>87|8TTz35dy%tn353SialUPj257OC!AJ?pged z35j`8N5yz82Ut0t z>5VnV`#(>%b6T>LIk~t#TO*2}li&1Z#nGy(2brO)zUU|5#0?p2B(pm?I{r*e4Qpr+ zU0hsTu6x70q4n5rnP8vh&v*v(lw7DVSXHn=v*=R&l71tMs?IkO-In|C2j_g^y9;iL zVD8QjaZatB!Nv`i18^i-L)s4T`#_201 zJu4r44&Z}{oLS)W|GLTcP)ww3mY}0vfMuF`L7t9v*qw;CS%>E)#`n0=Qu{*EdL z>rmiw0`}^un_i~nPXLc<$;EOM8<%{5-fa*8+RFDVllls{l-ct_{ znrylWE$?$Dzxj@bPfa`aT${A}^9#md8Xi(Vq`B}ip8FuyHGy@eQ~O?9Z8uA#pW3-= zO-IIG46BL=ioz9;!I$0F+nKhHc47QhO$kh!`Clat3i?b{*I+7#SIXg`|#F?B4?%re4OEW>K;+ada_>i3Q@7t)(N^xE|e({-dPM)-io{ z=Q>%)eR@IRk(Nm3_7LyjN)f&V{Of6#X?RwB_YHrm{)mz6; z9~|I@hAzH3aEUkZCB^wkidRw9N=GndpI7|t&)pYaxo>p6c`8asVAF{(B)}%wt_l>y z8(ls7C@SEj1L${tQYFVJ`{AweB3^TtiOTj9FCnI7YlZbBY%W7e{Uz1GrW+6PG- z7+#JClr85w+Mg6-xd3Hnx()cXrd7{1oo=(I+d_t)M^Hn?HG9sz0{f(H8NdszV$)CP zu3%uNDTgcWcMET(SM+obqZ1Q@fCWiNNO+0J0HcmYU3}JK9{>=xYCMp@$H>ku<>0`D zwe|mjF#xw|6zk@RV}S{P<$eL4iWhc9|N8av@Lksfp1wPYu$OT#p6@?TeR3yn0vwR2 znI@oqm*M&7&3ze_qa=N=PwLh_%J91II&)PJ#_}h3?>l{FbGrxa!?Bu1%$K=<1#IHJ zKAx0Vp~_L+vxEJ$Rk|xtnsGmX;^~1=12!0-ULydM0q__eLYH0dm)R;Q61L-$-<)?Y z;33WiEe@W%p^!Q!7@$geYYMlHe!Fh`o{#NGZXAjg2 z40$hCkM^^BB8F?YIFF2`_cY4V<7Ws@v#as%AR}KpWeX_@D~wQQb@h`QK{547UOH z0Q=z%Cz5~qKJmMqjGRD^Ch%U}JfJHhVcB3qU`L_MY8(eaUWQz{q3M-?GT|D|-;ZYl01mjBBuXOn?cLW$vtKcX zD<}Ma0yy+FE)1-cQpH0Q69F2C$4(I7CeAJb8B%0KE^rA%TLAI|^@6KFo=gPKS6Cx% z=+q20(AQjy4I&l$^ukAiyn=?W5>xtG0QzhJKpj(6;siQhU+iWD8paiWy)8BIGh`U_`tpR@n|lLQb%z3- z#`lI-$20PuPBZI6@2z&=K|hsvdbj1gyV{iZg2Ru5^HEA$y)tiR633Nb7^92bmPc*0 z6WXpUQWZ#UKNQk3sV`*S83yb~1p+*A#y>-j(qdY%pb`6?9 zo;zVHw)MWC1AB!6ckA=p`q*3zhYua-H=Q=*@z>;4IT}{Xp;JF`>wn)YDN)f5BNlkA z?y5^NmkG=g9UR2-yhfspbNAWv{~lJ$V|@Q0;`x|zOid|Do=hw?CJlGoVCFjb zE)G_Z;#*&TjX&+$<~BfI0(jMLG5OsTg8^iEc`y$wm&kM7c@6B#mkVa&|7kx;``S@ZT%QKPF7-EBd(F+ zfmWKtooy76R?Sw7u1y~<)kIX-iCyp84dhc#_qY5_4KFAl)=rtG+|L9xbaE`yFVJ+#4qOR?jigkKWw*%r&rv)#fEsy;@i6Vk-Em$q-F>@78m{ zgWLsktpx24nr(obg)-3rn2&F<>DCz!#dONtEC4r^pEnUNlS5E=kyWsEy~&$Tlc+?% zK`a6tQcam#)BNBs-ixpQ(oMAVE`@kPzKs0l+$r%|WTp=bsn+49X;+5l(Ids#2udEj z@7KQ1Rk&;67=2oFx;gGX&%9YrN08I5x~iMFQ}Gu?H1TPFn8`hUrSz9}W$P2XYk}7? z0cEFSOYikOmQ?o?l4my^eakJYUa|--y`4C~>%!a%ZUgBT+-2AL;08%6Gh^rzJ_{23 z-t^t?mH-Xs`jXH9_s1TBlbVrHI0(_X{@*(V6T-8S$@uxc9hSTux_D2joZRR?45C@S zUYkwF`Q&{TPl^T^8|q5SJilP^@&vkE+D5y6qk3ov7Hp&sm0_Fz{&gyJds8 zpz4Tv!Y}nSE=R2II}bj^%H!N|sSn2eXqxzZ&$8b3I*?@Ee!InK+*6b-)%=}-t6ov7 z-cvTL;-u~!{a`_n@nWy*%R>K~DhC>#XXY;y$u3QvvzFC=Ykj!PnWsHF+gIks8Tti| zF9;QD&`3Ho;EGZGGa3{~6P&f=WcO`=>5K!L6#Yvl4bV-dmp-Wy$9edCEQ_NWn>X=_ zY0_UK;7+h&h9R&vMQu9y)Aa0gDBc9q!$3yT|MDiJJ;fTtTlAWV_^&+_dD6Xb{p-&A z$~ziI*oYKOKyXC?Gcqh)o&EMLKr0ZSIL;)NxAq0Jscj_zWK2Jtt%5tc`W=8IJFpqh z&s{Nmdau~%O{3B!$MbI8>Qrie^)T$;cFx!cq_MVtt|A_%`7FOIA@E!*+(VB3Lft~& zXP-cT-9{2U3`|F|DJJeGm+7mxVu}?kmX}Wd;!6rWpZ_s@R|Ur0^uC=){lA;QQs9v_ zHE`elzDRz%RpqSlvy9%uF!p)U5C1(>ffzapyn^YPdk_i`U_yz;XkSQ()0Z%vD2|rP_ex2)103(10?@JMCN_EgxCIK0Cq8I;bXt z$5Yfloz*(O61v(C>V~k#-QQovOF8~^XSRgt`b@)jH+T0(0kStuj%F` zsH-422GSx*8lH6@D!mQh)R#!hMns=iSSG3spDiUB&c;w#V}6NyE)n740H1yu+IE~I z3Nz(G@(PBr01qAQ-2b!y7aiPcJE*%W!$L#)MhFhoN%bX|su#cQoKSl)+y{L2^?>uu zV{943z|ptY1sTrQVi^{}UMj~r_*yagzr;#p_&8GK8m9?UJL}X0boar>`r&r6B757~ zUg-PwmP6;U`=iZG{Ovo}u!W%gI;F!Jgb`dgasFA*;`p_PW`E)iL^&YSwcN;ddAQHU zA&bui6tjwy&pHWP-iClyiDf-DzbEL&B25mS$!}{FvY0m0=jyQCExtfX6w~YFmzngI zt;OaQapZD{yA?G#ovkBCu~q#{UM9QsXI-94>nz}6QDtJwM?UPr=Av~ZwX+sg;xsx&Jd(~fC)>$BWPF&FK(a391v&jr>}9WJ(BM5mrtuJ4CZ_?b z#~B3l+KKXpZ~uS>tQ0GNg~oEEMweqR4%?qmt%#~@}=xkF>cRDYm105sbE^mma=Sw9p+C~ zU;WPO=_t3}r_#4Bzu|lQVlC3}v+jFD@w0c(+9G;BV%9MQg#<2?x03Jz1>y`rE*{1)%J2K+Y}RQ z|AQ})-z*fl!SiwNLk<!rmn=}2l!jwhM`=5>~Z(aOgZlVfB zXQpiVsLg^;wG<^NiAmN14f3|?!}@lECS(dPXAMrjfmSkJJ=50XGo#_#`$3A-BZ}^^ z%ll6_tcIHo*K}Y4%v*h(R$6CGGOyP^{CU&%t}p7mLiu(~)QbIud3?;FM0V?wc$>XC z@BIoAXHTQ9QP~B(vx8bmVX8pXi|ErW@3ZApXr(jKw6oL0vTXA_%-dE+V^00&bl^;8giI8$?t{Xq` zMP1-WfBrOTd+pj{HVr!)FY-1UC$exOq|RYzNZsc9MnZO*_4j*R628lAef3&gapCU=+JYui}Gy^uO+jL!sdoGu13yy+OeAF$z$UhJ_oyX zYX$^l4mrUDmwo(m`V-PyicoIIwHKR>`%5l_QPp!eGIhGha88F=P4V_ZL#g5SXJ^x9 zV(A(&apxe()Z&wz?)}9lXO0&WP)zK_zG6t+r06b%apKnO@gt|ir)SY8@2AfLBF3s# zdl@#z7>o~m)KZ!+eo%QrG%xnfyw7rn^CB0vk3l33OOMq>&+kxnIGAa18L>hckc zt@&Hi>h01^yKdJw5245}-WQyA?o*r{EZ2m}Yl{oHG;mO^5BFs8A&;|OvxR3;oYyyf z9IkfRYE6~@mZ0VNulaAr=-baNR&w?T=wP(0*ydv{Zb=kNXk(aco62+|UbNHfo*6QwHH;!(-xwaU zcC6H;!K|oqEH4ITkU#x2R=%xXcc8s;v?e`_mlqY|Qz-)t+Zt;21ixWm$cmIs9hIC_ zJ{;*Qa2|D(?G9c28`S|xs5-*Yk`;41AA zd^1#W86U$Pk6NPSPplf zODB4?Y-6`>c}Ow;$R<92lYA_os&Z{hJiOfc(g5PLxEl|xl{7w1SbL@-MEXfueU^)A zBhC$F1et2!<>nY~ZNYB+yybQ<63}vXgZvV$(jQ&zy7dGtCUjP;K70oS9WJ?p9b-Y` zQ+!uf1tqBfgzapT*ajw1zsGZh$JY#Js%gSp$cjEr~=vSAWV%@^qNm~q${+X!C za$&LkE9sZFJ;j<&#^*SnQgKKx1UG56W;2IZRWpDYQ=T}`BZ5@4sj&*R}UF!=jF z1F5Se@mc)pGN+D=|CK%X(E5z*Ukx|?Os;y1LyQ+;cy*&D=l|TO+2B0E=R|pB2XQ_s z{pR^U7~vqaFb<9rw$lk~mxdlkFb62&{s{00(84z^OC1yf@XzH>|L59+kE8LxXAE#v zKDd*coPDz(V(6+hA5!8adP`?1qt2a@_s*5&NK!PGV1+~vBuZer(6+a5|8b?U?3 zcv!hNXJ$FU0FmL4ferFLCyuH|(g#d?*4~0KHhcgnDv(LCXG|fBr>)8mJ(PkO^Dy0( zBIHNCg6VsM4uV(ZInkX&8g}E$(Wt{ab3zbl65;v{dTBw*EY6sl-@KdbJWw~M@*%Ok zh(ECi^Csr5?AX$KHR{%~%&6j78hdNm#fpPRvG_-)$i^lO9WSt2tU%*KI$2yr^Q26k z8z{T=Xs71)h~#fYtE6$?jLhG@+gxRpA-;SvzBg%rs->G*^hhIknJB$L9w-6$*34-7 zY;(a|l7LDJ?q`%|vR$1I#K!Z#P-L$Kv}D-?fA<1c=3fS^-uA?A^p&M z!ImRcD#uLXRE}i5FYF6UXx+&d*=I8xWt*xbt@e%~>Jis(=G#Ug>(`=Km;}v;e}ZzR zPfU+(-m!(lZ;zIbkrvlXBJbOtQMn^1ByQDPjf@kiZDaV|-DE<&qL=>4(-1>WPy;;B zaskU85R>$yn6S79$rJp|5T=nY=Dq*<1XA=dwqcqs z#?^_!W#>4W<*olm4;^{lMdrxv2@ZQVJLfQChYh~pCL^5)vpnFCb+<$L(FZE^Tv5E^ z>KQ}X-tGwW6(am&({pYTN6Y3UdrwyST&QlnjX(xU{WE4N%3a9)nECIam4b><9bN9w zquycT$nNqHA?9fLa)D`~w{1+gxQ#n%d{)+NANzr^5_of{#g{-^2c&oDyN@9&*~M_fDyR#%L=DJ$vRDf1IH zjit}*R(@%(Z{&rpDI(>;!y_X9xh9yZN{WE%ZP37;^8Ml9tCgMZ?%0p3qhZ$8k8x3Klsa6ca%>MH9;sstzKFnyZN&C-n~ZT!5&nnoh#qjMOoKNEc9Z6bk2l-)4F4_ z*-0*&_n~e{8OAA_plQ;e)F_>hWJpy~sW@|HH>&Ob6q$$#ac|H*8P(OJ4-ji8AEIDM zL#yi7CNU3_gA0;GPOW-On%!NHjSD}YhM$p}3TRAJ4%yMRCaEjQCq+BjA)iTyO-Sa0 zV?7VJg^>L)TXxFjK~R9{_bjIXi8QpP16=r6UG%p}hmlh!ei@$f47FDE8x>jI^`R8~ zZ3jBrFgQP@Mg9yBOaHc`3hVcbg!cwq z9+FX!fz*{gf;a-<8@Rkm$dJ2~ok)nYXpJ41OEuhcOmU?7m;fPeh^tU?r zd8k=~_yW!=Bh9G{dew+DmK|EiVcp4WA^8NP~1UB^o@+l#+#`os(JE zqs8myU+B3^y!iGyjR^|XYwlhzE0tH04~v^#gP9q`Rj@I-b;l(fQZ#y4X~WdLzBqz( z{QXd)menPSo#+oR!Q1_5-|b?^PzT!sK?ze}tW+8cPA=w|ryX`p6Oxh?>IV0>2v*LUYvcrcEQW+a@daVW zI;aB9$6Y$|rnE}@@qwUJ1AT+$1ZjTSy*GO{tI<5%3WEi7Of05mJx@~amnUY4p=n*rM|5#Z}u)s?aQUD za*LqF5+^^^0SI|uw9IWwz&1K2z=5EP)s1jY-a0#PL{e$z7tc0>f`|tIq=bm1UE!!Q zBMAL#J~l%dZlFJ47##ysIq;UP^U4=giMBp1Zy4Abt%3kP%=1{6)G>VqMMt+$(q`J* z0313fFKZI;;rF<~XUSNM{;@6Zy6#O`-1?O}K(`mXN4l-mm&wrglm~o+j)L39DX%r6 zlpKi;Z_JZ)ai62B&A0y8hh7Pir4@2}1HLo(DQ>Fe3`rl0WNUmHgwR>ysn8OK7N{CW z^9mCOH_Vbxu!B%a0iHBN3~;l@#;Y*D)!k1ho9|r_^-b@A?RVu6pJF+>n^>oNYCpsg z10E&$Nw^%@y``1`d7$mqoA9K6FD^+wMR9I7@0C^RmY6+uy(NVK&nf$zOpa0tydy6# zPow&Y|h<@ASI! z!4uvmIjv<}d7M67k;gibW!BtcTRhy%sj~@x%X$rGpM`bt3J>fGx>Mdasdh(EMDqe> zd{PdT!#>AK8MzWT>+|XQ8Lpz-vcn~}Ht`Htv}x~TB6H|@g~C0*XU-`ENQd^3%;KV4 zV+mKZxUvscxsS=y+v~m<8;84xwKdyFgHr?{JV{TPi=udXIkH-tW@se?;GN-3Q(rwg z_n4X9OGeUU5C?8OB21=ciZsCZkyDm+_Dl(3EX!+xew6`Wor)cY4g*KZHBX$}4uK(QcEmdVuw&M0nF*MgX<@n+X6_uE= z9E9-j&nvDEa7_YMJo%f!cRxKoZ63HqypN?udb_Ba?p#z!(R0`G<1UR}E%5~^U8ZP_ zzLhY=OgtA;IFzxeI*O~%;b1X;N=RNTC1QkYeDs%XzM6Z=ajg+rgut(!6X2P1ajA3Lgrk~y-aiBD zZhcF;UDT|wR8UrLprov2(qm-j-Q;O|2s^s#*h!@1&fh7)$FBEuI10p`qdjUEFHuM% zs>{k@aC6Q*&}2_-myShJYJO1P!d3O|Uhjf(^@1)Dp@$JYA%QCds;b^gi`><-Ol%O@ z32~oa=NDSw4=lSC)f_t6t%lvurehM2 z#ZG0V#ZFCTmhi4nyQ4ayQC=Jz|I~IVJzsC97#95kHzHW5Tn#`!-C4NPw!**FGWZTG z1enmF&dS{tj3JXVOYqN|b$Ih}W{pU_d_C*P@OvNgYkLi2)YAR4U5m7=hxug>m~+;d z9rcZ?Qz>y!OJLg5gay zt>18d_sC>emhBD(8R7Yi3?Zvq#cN32RoYWTb~DasnzoFn8k5mGM$07yl_4>vci}uh z+bR+{tvZPTP_XkrReqQXfM**8K}`fT>II%cMUvb(qV%$TKO?FPb}$OY61_+zvt&e0 z=uPZo9)!G2d$&9l%#-xF8@GF(ARvrs&s9d*iys8&-!X|7!dXG=_3sSyMbji2F-B(_ zyY?&1F?!jDjR!GQ#|zLU;l)D!#qwTNu8>ShUsfaWPRK_@rN1c_Q?(r1IC2ZaPXFWE zm)1AQi0^&l72-baGetHX}y#*OC%4$2df-6_u?B#=CdwCJ))aS8^ z6yUb4r&cq~0q*s_?(L+mJ-=RKI~%8`C8ls0GQq)-F{N*(t-x6Rqmo-}Ui{zk7XQ}- z$N%?@_lE#a*Hvd8%Et(J{za}VaV8Z&^C5b@;S{VE({8Y`9KUCxS6fbbmB93X=qY+px65!Wa1Z!c@}2giAxZzpi&qQ&!Fx$BKE4<{vy$Z#OYwJ1Le)f<6xbd{d$_GZxVSyKXniFSjx6(kFHp zZfch=Y!cZZym2sKGYqse#XM={F^6$X8m!JreGicLX)f=7Mx(|%rW3hJZC;|Wy8Yx=4ekNSV$mr(cd`GT4O=`>#uT@5?sv|AZp`F_}Kp8K;Q`lq}5V6(4{=N&1n|o?j zYZt2+`nUAy+QS4B$yc`%ShvYsg4Y@TXxJWAIHNN2odAp5^E-L@-SZ5++oKVgA%&#lKsa- zOvbhap641xWlznmlk$*Loa7gyCHPxWnetZNN2Ed%=Lnk5v_^#e$8Rqtx1Q?X_KRZGiR6SSv|NG%NlsdhNE z43ix^i$a$|rV~eE3*(J8+k5*ex_CQXV$(zB7pUa6r&ID3BPb)8wrFI-tk=(2N1K9; z1>^P_WSP}PI!{m9fYYel^m#XH)}A+P6zV;}DfRuf6qrgaPMm*?NDUCLU1La7-o(f* zQ{VD_;;|CqW~Mt6L;wm{U!%I)NLsPion|)a@*KRlKl=NZ=~+U6fdcEYuE$*XJ3vFH z&~Wt8rDtYmp@R7FQay=eSYZLa*v=Qzq5EN9t!xl)2i?|$c*h;}a*w5f4NuxY5v>2V zB%V4TAxG4{DD?th42p%-o;2~(oN$#4{1x}v8|~Sr2xMv!Hxql2+S!@3m*71YzT9N+ ziB_!wxYO9~iIzm9W5no$wyUnFAKTc;K$zqJe+9k-#;VP$C*^?Z=VDyKEuG?^+N~_B z`c`6jodA72vGUEj4yR{L!BP9bK`lAL0rP!7LI7d%fJN%Z>5!VDGHrystDVvoDZ`jy z-$Iva3aBTxpInLt})0HY~$RFJAF;^plQ{Be_^7(TnXtlL*GY-cc8xun&j*Bo&Cp3{pz7KxzX0WHRW z`oy`3!IXrBQH|!3{!hk@7%SATZV0$kH#;hofGr477P zXjZf%hQLzcRY+)@80Zqg7`*e!81mqaA_q=w=<3&h-c@IfY}g42?(&ZSFp>uV|G#)O zGFw=vebzcGd(^9ylBgxryDI9zxD#s-Fm1M{QdU{uBz=Tr53lIUmHcSaU#{DyH_K?A zZx(u*C0P7mUXY^ADI(LV{+EW9s0c%zY z4Bm3*%^u1W(`9Y+$Of~{XEsU1neErJHNgJv$6!vZv=1>+tmyZByC2lV--abRC{V5h zDY>y{Hf(ZRW|+)wNF|Q59p*3EMl8+ZgmY$#7q1*dpEk(CCs}Ppl4~mu^>JQyEp^_& z2rTNR)r+46VEO|%#>6iK_Bz#lGJ`k@0o*9*;kn3HbTQ7GbAY$ktE!ndO%!1~`>QG@ z`>6YqWmZz62OmAtMX>d?B8=zz%=N1rz<@qZe-MNu2 zFFz~Bs3a5}GpJtp)|Ybd4r*PABv^9bBvOGjaWR$|tlI3Vd>rHmjEIV{!MmF@$BxAu z;Rlccj-MQ}GFjw1+#C#PgG3P#BYF`k54@jJY@506l37G#7-!)hA){EF;JbMI@-wb} zRui;DCY4@Eg!}Str?4h~Xq_~j-9n6s#y`oUNc{?HELt6{YLj!PyBXcfm6EcD$_jPh z4+;6C{_YV;cQ7oKhl@E?!eEg1$!iANYNtnA@ku$on_*1Qr1`-5C6m8^+w(LrLJvs& zhSApJ!ww)qqhhQ8N6bePvV)N$jHeZKV5=>FE?WPE>g4ylSuh`9^{J+lJNM$NE0ji=$x~YQ1z)&b?-)_vZCIoTcc(4Kmk@Cq88EVi+jL zia^O!amhjPNCxbz5t(|_z+V5?9i{7Cc5b}>{k+MCcBk)6X$j~i5uH>y%iFCl3UvK+ z4+Th|ARpoWGCKiA-&;Q3t(=nrV%$%9yMktIZHun=4r>re+F4JW zC#fsNf{hJ1?*YJ{5`q_mhDS5Qcs$60U3q&{HRH1xOs=Tb512EGM%GPba3eYfBu8@y zr7@oEBs$*Vt+F1`DxR{8qG75Uyvbs^Wtlq0hxOKS+J(2&!EnIGG&WW-Y?@fGewUJ> zAg4U)DrKMNy5^NZQ-?Tx9f10za}ZtZtx9=O>EKbxWkv;Yb5-El?J-(z&kC2f)b19`*Cu62e}q+^bt|EhPg3hXwZ_yc z;&YxytO8m*a$Q)##g@1*3Q2CX1d@02rt1*z6tl7EU;(GayAog>IE}VkPw#;BC9BpZ z{cqI4oOqJx@?kLATVP>Oul(uA|hsO%AZ)8&!JSXSS z(^(vq)rEu=H(PC0h-$0c1lxXCbGP|w|5hW8Zz(Vp4mLY~2Kxlt|1|%l*eToeCfJ~c9HO33e!rolNN@1W+kLmc*}3i( z%9brqG-76w`P;?n%Kt=U;(gfggJsV(AR;HIeUw#?=`eXcm#$O6giebWI1p8${Hj)@ zYouY;{#ahK+;praq`BB5>1@Z9;^Gq*J#*Ok=yXM%fMF`iYlGZ&HP($7Eb~Y7VM?%e z1Xty!{N_D?2W@)3Lm-b1H3?JW2(S3)hrGYV>)pzC(Yje6b3w8=KjzH`P;Q0<91d3I z<#QcbSFWO~j>&s?GX@Y4Kxa}!LL4JTgp`?Q9ArWv{I)!?IU8(h z$n4}38*O8IK7?Cup5)yERIz>3aMP<$s*Ll&{E@fXSvK`!dOPFLMTaEic3yuzt3>?Lqi_YbS6@oPAJQZUx@!LG`T$BjVCLA zEWUG&xX%xkBxFr5eHgaj<2)!A*j&M!)aUfrJc&cK!)nZa4G(r090uxh5<1XUbZ4V& zl!(b> zPIxzkx;?qbSWU|WiI*`Wy*g4m|Su5r^q3ElMLjk}oXNkr<5ATghE(53sFK?kk zu1g;ot-t>F@2k!GgPC0AD5At{cf~C)R#xTnx|MoHR?6^kxw44)UBHRsr0ADD zZ1-uH$3R;thxg3^36bY>G=VyKQeC81MI#%pN93RU0 zgNYaG>(>7y6XKf4l~-WOo~X7Xf1Mjqi5F+=x`wy#}XdYEN|)Y zzu3@(%go?EdH|>Y*8eJINKTvD2a~?IYXQsxdZ?-v`m?VNq+)+W9qXsIGyp-Kmg|oI zM@wq+C5G=Z`ho@?CiWr@&ZQXQ{`;wit(QLkoSUE3T_57Bn2aX`ufuN3bz)J5lu9U< zxLy$-@nPpos_(u%NmRXX9l@F_V9~Yff^Dtpzuetgs1E5cx1-S4pWF|g8Z|{O9a>Ey zYsPOSEPf|r2fi*M)`P9^kLO!%Jk9SrJL-QVGPz{ozeH*9dxyHpEIWYQ;AOV?;v7_J{IzYsM0>YxQw7E|W(sOH0u*K)w3JWUQ zQ{pQCdChq|*y~dr&8=&gu=$8><;yC7IAN6mxLo%|vY))a3*MV&BymR=1FW5?)9TPyd0kGDbKHr9&FG?HwR2A$J@A<&nHbl!#F2RI6TA3Z9L-qQ-XFW?oT;!rn zZsh(WqM^f#oo`@|30uc;fu0PmkXv6?cVz~uyBQW#dev)rv`f;wfQ=X12H)Msy__FD zGZ%mCtFV6u1ZcJN(;d-qqPVa2-FHw5G5vkNmo<0LJb+tZ!oeEn-YP2u8AgZg;p_rI zeRg{_ai6Ap*+Ug9ZeAJpM7{1=y-Nu{qNT=`{KxY(Fb27DEU2d|>fJqZ(P-IPR5ba9 z#m}5lIO&0Asfv|#X@4?bZYV$2pGa@<3@WK9W+(k%5@(GI5H@QhQ-;Sc0kZ;3J<9P)@~0+fA)SiH}~`IljX!8EIW`QMXP>`S!@S=kFEX%WpOyszziG* z#hc9gV6VKa0s>yarwv{w2bF8~GY#=6hB(h|qBcLwUkd0n?*PIcP3)vC#YVD}&j}T< z-)eC;-PQ$IZKwQ7MSX)wUFpkJBq$ANh_RP(eE%1nQLsCo!cKRyJ7{KG)nntxQpk}Q za4Veyi}hcNU+F1v;^PJ4GecViM$Z=nT>c=omSY2yS4ChASsXrxxm0Jzlf%pmxD8^kC@ARwy?B960lv#DQ9M@ELn5&e6zcR8itX;KuU9Vv| zirpLbRb2H`ppugR9<-eSEj+URXgM;cR7Hmc3y#wfI)KXq1W`rY!Y`j2PJeBoB2Zvg z<~jWAhlkI8%uKRiQC2Kl!~*iZ%h^tl^P-#mGGw+f4QTLZjV0aO;+&b^%_lQsPc*4+Q2CqXTPu7K1aaDP!l;Y+tU8fsPjKirOTR+9n2{FRT(u`O{w)h@%0D zC=&f@JuB=6w)S#3j=%1fDQ#8XqQe1njYoVk@4-@oj^C;U6rto)TMg;ZZGNs2il~-H zjzq=fGTR14p{K)3YzgqSjqtmNO3WbnG+NvPQ`Hq}5ps)#J3W!kVoc+s?8@p{Tqai} z@`ukoNTK%g4T;$(+R^Ew+dCo6S+NFSkieork;Iy(EcHbTT|}n2z9)lOqP^%bc={la z1s;=ZGgw-8AU9eJ=>Eh z^}PI%v5yKwO14u1xkXuYOOM0&kz!^O032g~L|mK`kP2^QNd4W>(V-gv#7un2G#fsZ z*=c)K*s)?Rf3+(IC|YCK+{5sx6Gv1(KgrjEys>X`3irp|T2AgA7CxGMa*rwVt#;Jk z3o+Ogg2Y&aQbCAcT(H6kzVL!zz5Z%wQgs1Vv$Ua?XNHLgo-xLsxZm%!zpuo@CBUd0#v?4sBkEbfJndcf z4xp`m-T{C>Tz7H9j-1CaZVR4?Ev!_;-aFO*e9iW3@!OM#%%O~xRMH8xa^8$`FI!W` z%6NMl_A_I{f-0iT2mxjI20NsrC>(r21&FFhs8lkh&1kb(CGW4*x%HY?N2vnSV851| z{?F_)C(d+0W5XPy_O!Waz?4jiNtVva!U{p|#F*9C#{`ywaCJTi$73n}Xk{#&`eki5 zN0t0?HMO`~{@Zm`j|VwISANAm40;q%nDOFbGF3Nil-gsfa8m^E_oZBD`#EkU5mZ5D za?B7hSGqoPU!UP?n#=TT88@o!XH#6yfWuzR?O~;*9Nv3!UfUUXC~QOx*oxz!zOXVd0tk-&l4`93XmhYlN!)oXspnBkFMPTnK zec0f7T3uIN!h7}u$2T|}FYz@HNWcrfF&KSh0xe`P=z<&uEYv~j80(f-sF%xuC9nDJ-UBi>?zhhw zKIo=v@=pzw4}CsZpwGUQbX!$G z!dr~4ppdH|xn$;gI~amW@g0jlA@lyF4+c#YoM@artCL_GE;=MVA=?TKahPB$TPx1r zQfm(hT#8}JajC9Vay}vLoe!NgqiFgFz? zZGl|V5ygMd`+tY4$x!0cX&CCw(IdpzN)tGfO(fI*W1Fq|`pLH4O3wl~3oJZpNuRRm z{Xz4s+ag&z_0~$|9_9zNu23PhXi7%<;Bv(FW+VB|ic5sN7i!1roKyBxbP0bvC zovPdnuecI&3>Uv8NEl_ z-RH*1JyUuD+L31Mt2O4DZyj7_!H8+{^G`=Lq<3@KrrpDh4?WSPc5CZu?Q@MtN99Ig zyIN3kvXIo%m^wT+iR>f_wh-b2=YUm_tVv;!5Ka88(HO9B)bPYJvHyp)w+@S{Yx{;V zK_nHBZjf$}1_9~r4(S+r=m9V34iS(JDd`#-6r_=cff+(NhVJIu=zZPq^F7D$et&)Y zAf|S$z4lt?FV1tE>&XWil`>42BYx(RDo`ud7uVGE3O_Y)KYJvw)k4GG)tHb6Hfh$0 zP;hvN;w;)&->&JIj4AcvJuUG4eA_fcPbrHJI+w@XuuIx*7dJYPkJP_9>5BiOqoWrx zg1_ljzpW($Pte(fI*+Y)Q%1ST<=nvjl@Vr}yEV1jqh*iCQC zauO2r%)@fMf|h-bL40rhq_;-7wmBajR*;V#olP(kPjao2CE+Qfi-?r2SC|xrq|mB~ zKbZUWRAH|rcgvv8v!}AVBwA|rn@>%K_6mbciKaz)S`D>K#NaR9(!~RVlg9H*8?K>f zt&_8N2)8c5vgS79PL)_@C%-R_b9lG3GRRxM&*Z*vo0@ehF&|-S`&x?0yEGJ%< zs6H%(vYe1ib*vu)YRWNRotFj_gPz>BjBk)xoiYgtsbgmG&WXnwsnDwWC_ou=5?PhF#sB&GwH^f%jGsgnAO+OY0Rsq zx!z_k!go@SUF7qOIcwBnYO0cb2Qx4yhs{%jtm6acJ#=J6 z(hU=m5fG5!xvp)T0|rK!gjm3hbl35W^Zwp;1pM~mu5b7@<~t4CJt3_fkLYwJJ-<;^ zOT+par5PT71q0vxEXQvZ*6ZPykl6Er& z!9zN8#x>_N@U3@bAXf+&XpB5%Lg)3jG&CV4r}O5Hp1+vNn&{zJ?v?+(#^=tDyz{N^JjKW}-hqIum(pX7dlKTq3;-LyM-i#G2)}Xtwx=Kx zIw|ZXI!R{?YGt!0+E}05*|g05x0~ zXO6@#15O$pcJEjTb)YD?{VV&K7t(%zqXSbry)+EO&A`jUR^gmD30Fhk95&rr+1upk zMZ?7v*vq>;iuN^4Tv*+VkMF#4`-7~7OJX*+5rsRQ;uYD1|J6c#lH#teo6uDM&DjKj*c-o4jw9uM9SYpTYev zs*K9kMLosHnMwKBu8wHo>ln}%?HPbp2QZH;z|x)`+c8|f1?UyK`j95Ol}ZHgGx7eE zlskk6Ku{oVG5ZKdYs;iX54|ZOn+QN*2fQ4z ze6iQlm=uCJg~oTV{Nfp-TNE@EhO-!4aN<=<**c&HMHJI*Z9LSius&a`MRm3Tw@vy!RA6{TSeU^=29b``x(u z5U*f8Ufm_sIP!P%2E4u8rF1~A0hSlPA}yw=yOePW(3NOQxp4)*=$YN;3TSjE zj*$iIlbL4s99I9E)RK=96;L|4%)b}Y^s?Gxs1gh_dBH6+T}`()siv~GmQ=)Lz(67QS&6`+>d~!8yoG^OvB~@t0AE3H8@ElA z*3Q2tV#Q+b8`3q5OW@yr*68WJxjMXA?{qD}oUFN_a+@t(_*#P4XEg^yo-DDjhls4& zrp-g!dxAVOZ(yOMZ9m=kK|S0zeJxJD4{(|e&fss`#crA<$peC^a8gdvfn};ak;r#) zMb2$?zy*iYsbQ)5&*{Lcq1L)yxs|HtJ#hzj2(hE2+r0`Tog9U~Q#|^7v%X5~UD`t@ z;fj{C`D|>X+k*+qR7u+UZdSJcg#W7v4i|^K@Rfaq#d-a%gpUc^!J!j2^?k49?s zk#@6d!59C$RVUKonArDbr_+sy>+goD4gUj50br^!=qq=*QOy)h=4(~n@ppU>CumxV zU_S09aZ@VPljJ%lb~7$Exiwi*bM~(XOuPUn7gYw7 zhN8W3=!(GCZ}q98_4zliEwlO*jn6~0Iw@5S2%Ov z2|;$ssUk;F5>dT@>aJay{!=R)6@b!8w70++zjvUek)G0HW~hkeS~VTpel%dRyl}42%cuEp2@G!|xsLSB&dlYQ&<{z;5+X zXV1Xc3>jckoxEC8nnwAc`6sI-ee=cF`_FMiuDu(sWRJ}l8PZYyw(eQzJ{j+Uw5ryrxYLn=l(4 z^vBme`|#=ZD^QpFPn{|_?46%RFSN-)810=n?ONO4A@Rn^lPs%1#0YxM6Zxr-wQAFd z9sA~P^`0+_RnY?FoS;PhTJAQ{@J@G)2~6OezJE*rJi(cHg~v5-R2bbaYY)(a?;mb{ zS$^p=cvs+f)BfP*S3B8Md_6rkGY8<=gBuH+0m9{%iV#WtPr;p)AKc2V1BLOs!BsW= zvg$nFIvbP$2RM+wvnzN2kom>}ov^VOKafG%Vja&2Z4bSzwHE?#_%Fj40Vz=^`b1wL zfdGFSNSsm0#|j$wwlUeA+3*=)@4tW%`b>-?ud`|vTCk7*WC0jf_tr4#*Jo%MgY0=; zh;^0uli9e+YBlGs(s9-ftG8S1Slc=+ABEUdbL2p@`^VRm#*2(W$T&DF7;Zx!2xu^e z-j^)lC2zfbNbTB0;02jVnCiaAUQvz_3dY^_W>J8v-SSWhK2gyfukEcKocf~*(__*` z$-uuB)2~@`MB9qK+Er`Uy|;!v{hBzjP)1$F>1%Y4bc?HYB4HIT^qZ+nEva>C()~^3 zeA|C%F@4@A$B37poE768-O5|sem)fGa}9Iqzd2e!GK={!299;K<8=Kp0OZt1O6Jen z(H=clo-CJ5$ZP0e>?;P+j-yn8N!U&?k5i-_u!TB(K^6nr148)l>kjy3Vsj_WI@SQx z8~jZnUQ=F2JFAD;I?!_W8;}B48e#BjO+B#gxsLjf6jfNieTZeGe4%7IKXK?O}&#%bV$b_aMI(Onqnw}Q&a9PoQm*3N@ z(VxT>DkqmJuUNK=$8)Ynv>SqsXVPpS%sr*2Cl{ zM(~tAtOB+s@^TYX#XK!GUPK2-7~rWiloeX?%zwxVba4y9?r*Mi>}pY>)2acskY_r%k z2vUw4*4EZ#Ui$sXK>Wr|`Q*w+OI5AVwJ_@Oym*DM)@Lhf*86;iJ2Brgidkuo>1Vgh zzTKHF^runQOGLx(ZAltdvwy5qE2p>4H8G9N;xlNES%jFORCi8A@Txh@gETfOAU6#FlYR)?G$Tsie8CYg;^iiDzIhszC z8XGSjUWyp-l`m5%%I^7`@Z8i+n_SU%#X(D?gN*Vh*$kJ^=jN7!5gGx81^&2X!alE+ z?Wtrt7>dejLMH{u?Tb08=b{-Q1FSMeb;YTj8&zQ13FU&d&5VfXc*Umv?kU0Thoe-o zk=~^OQK_zDQx_5eT|fU}vB9?7 zb4vfdl*0%YkH3|N$=+YJac|#!oA1acso%e%fS}Fno|&H(pDmx=-r|34JK}27_I;Ek zcFKPn8*uT~Q2&n6*k?7y)70o~P)mUT81>-Ua;PRIFJ2<$3yrtSsJ28*#2OJ_S4ufE zWGO&ur@<45q-=6B|$V?CE2hm7eVJ(_}_IOT2C zbxlfBCCl^P{ko#We$-#;EK-*KV>1E9c^a>Cnb-odq+qN|a5A?@5oH>v?$+fEErpQP zdVss)3AN+M`f^#6fAHuYafFAzL)OadRR*H{XxfFtXLu%JXppKhl@VX$x^z($3oC+? zg2zBn50McP(&Tzi;`09Kc9B@uF`$|MQy6I@v!+6*^Y*O$hI6Rxn5niojp23~b97Rf~543E6nkE4t*)sarPgzr93UD&LgFj_?TOCWoZLFlsW%4j760Egu;+fj8`}qu1mNl6u_KdO$GY{s2YFR{Al~?pODVk3XorX)VsvH%jPN7Wn zd=`cbv{keo65Q51&7)*LARaloJu~GxN270tJd#1NNevNlvh_N@Y~KI1*J?{% z2x;pP`u+Lq0Zm1fs{3-d3#f};E`m@JhPHS`0tWuTiTyN0Av4O{yR00!ct%YJUsu`{UW6L>p2~rkN z)B#Z;*1yH1PHTWF=Fr$iC)A}X*4ye+;#wik2OAJ@z}X?$b+YFpc!toW5V7+@GjCD- zye<3hTa1WgH$oAD?#)ZzMZBcRZtPBRceK?(!56kS;5}2Emn0&+(90hS_&vdmV5y+W z{1yC~@lrExek1>KgXV0{iYFYa>IO&xq?ltYzo1K_5s|A;LN|XuS{9i3 zw^N#KSy)WB?O`}M9cc3X|*zvaRzW;HU* z>R?VJcjGK~VGpreJz-0d{}vp(8W5Qz`cG|iDXEqGPeYZSJL1BBD}w=lPw6~D{we*I?{$1;-dh)VDhfLRZv-jH&l%= zDlG9Q9NN=gp*6U@c|ihnr6@Ls6ZDyK*ZIcFYD;E0DfJQqG*jy_EB5xf#QEI|*6}$I z$@PnhScq}`H-V^K*l>y+R-S>K^{I+boq=IRDC_#_d=cxeW7ZSI;&bP$Mv72(|z%;+}#zoL7{js;R&a%aDFQAa+_WzfB3JM_r11PoZ4= z3U?14k_}~SwYMHe`}J8>lY3(>#O6YD&wtZ>I-X+BfWca!(a{_-@aH#1I(XFm&Qra9^0=&`sIdjIH=wc5mqYg_)=ufPj%Y~w6ZuY%_s~>}(Y*yL^4VTN3c`mDB2(prF)Q`utm>EotwHytlwbs}<5dWO%KT{Ytz{{U2HxGmrSQ zo(!Ro=G?eW{KV*(>biXNmF3rzH0@Gk z)7YWKyNe&|m4;W_#xju`nbpv*;^f=h#(IU1d3J}kG#*fQA2+58ob>vmPM_+hCv0-< zm|EqB%=W6Kv*Ej3>Q0+$hJczr!ZxBm(^7vzujFS_wBPq^5smI|II>qJ!Yjli)-VsM zt!s&7U6cGq>3~x(Lr7!nx^zKyG)!e2vkCgD7nQ}&`jc_zW#k7g+ZB}({_u6KQ=<|U zUZTDn`YDpw!|HS$#X<#gSYcCL=I)yM+Ue-K6WAmvp#J0}jS*I+*UX~za543jDQ~W( z8iWyfPcT>yXp-g_>n*%GgsYcBZS|6TD1X9?3H%G%1txiioqyK$#Fgj{SX;egxZ*n; zJR){urJ!KxX<7+If>+dI$q=a4F{=pbik2g_s5E}{{6^A@p*g=_JEn6FRb8eRSXEf| z>h0Ez3M&LwKY>hg$VLjs%LPr=lTL-!MYsCOTA}v+l2idk;}*^H1V3@39J3H*hi`4f z5Es`jP2__u?zh68pc>G6gH1)UBM6LeafCv@QsPZCw*%n=rCaSUT-y>=z7U)pbV{a2 z@^ASK?r>JRT4d5ja>?AUx80oF!o!M`;1Bi#Os+sXeh14P3+%S*krc4ocAZyef_74< zu%p+!jMs@5Z*Ke`o#378aT@rA{RvE`ZgXD_q}5!W5pc=Kdb$l>yt%Z$;_N`KtJsYV zY8}T2J#%H%Q+);H@`U zW?!>4H%aDi&VbRxmR;3pI0Zp=EScM&pyu1{FjZq8k+I=Dt-_!7Uo*+67D&fgeonC1 z|I+q9y?ozp9DMw2y?(;}wIlc!P5R`Q!qjs10Y0kMXg}M}*6$%MjosUpu#a@?H>5~s zGJz>`*lA8Hf8+XR7@x0kI5@X4(>nv<^J_SXP-aVNBAJys$8+6a4!zY#h^zFzsWv!v|w?i3GMwJBEwv`+I$1Y zD%o^w$>M!`I?z#JVAT0FaJf}FHU(?rWAS6RUzMvCQiS~`A^PoU%)z+v-8tDGUF$IL z87+SCdR%gRj;a-YSEkM>t-G;^X%TVhKB$WQwd-JrfbPv+(w1kY)b4=Y?1cwxWt}JVAa63+A-)2`jXboE&tMXxbWw){pX}H;wetGi+A9B=Y znz$%N2640mgH$7dRTs=vO!J8k3qB(*8MbkQhtDaQWK_#1Mw*oSWE;v9EPpa1AWsS} zkxo3)1}T<1UB(q7p~YW01m7}50@Hhfo@ZL)6fex#j=>3SH4tS?pjH7QSe9b^~mjX*~cN2nlH?6j|YqUtjb)eJxLC#)C z_t*B`iF2UUb@fv8AX{3nv>cp_;7&06UeIxN-0LZi2}UK}#5dF_qPy;`jSxrQmDCH! zG_+!Rs(Feo8TsSJp&SH-3HTiyec3diQ2v7a7@l_+be-{d$S8v512H-}r3})N5R}R^ zlYq4Tgs{%?OjD4Ox}B?s;qEU(_WWN|%-)cHph4jw6>xo=T43f@Q?vhd1>bzs5JGr1 zC@*(TC6W7m`W>FZ&IEqXlS%tfVpkdsap>GTZy}KwI6h%o*KseN>!c`6g=KmTJA>gb zv)aym5?Fc>_N$48gWwi4@1b>Y!{1QL?&*V#akfOPS7qWriyAc_c?E8@;5TH_Ez1a9a1 z$N}?ezeJN^M={&RfiZlJLBO~~tG>w~ME@bYIQWdV=k!P4ZlaTAUf4+X&DQ)?)9N6r z@q?f2pK;y@#%scar`mgpHR$lJa!fRy#g3m?19R-9d#hX?<^k@2e*RK(>&a!yQ4gz< zWl=t*1&nWa(G9VK@w{7!q7Hk_lNEIcT&yt!I)#sUWJ<0ZJ_T(>(4-C0s1;F(KIQLj z*Z-i-BmqQ+wg=b2#7q7|3#ZmE5r)*cJMNUyAvZOlmgbr%qSV``pqm91Va~R$xUS|a zsZc$XgJMeM*RDlGp7&Lr42;xzak8RnBW4swg+(Wf9HIz|2(lBSSbtDX3=+ZO1OKUt zWEi?Iv4Cn9qs+a{qp`;C&t2eoD311`ROOeUP}U5y}eG6LUu-l>1FTNrUhRlFGb&c!%xJO>OHTjgRbNauuYhk zpaO9Q8Vx>tWJ@13h3c*Az--t1OMjWg7#8if$um5=GovAku%*yXtZ*}~o40}qF&I>+ zP^^2@MuP{mO1!{bE^jl=bUlkP>hS)tIL10}4e=ePV|fii3VH7%{Oci@&vsPu$iNDp zU?*#bjXSy3Y7XwZ&$3`1G2mjFT@bXwt#(wqUgxP#%T`{M-E+BtI>VX$HnP%KyXN^B>jR>~WU?*N6IWPuxA^=2O-UzVLZD_Hy_cwYzm~W~CiIgivo(r-` zfQCcY-f5n2xLx`eYbCeXww`}$V6f9j0kLGthupqQktPm*Lg{Zyo-?RsK>ieGxImS! zP+*1X**L9lY~hDX&Tw$hmWJ#lx301_hnY!`9!s7aVdw416Ps3o5$q^B$}|ni!|%=| z?wCYZK;ZB+b&T(yivpqEe>n3b6(guZ`Pl}a1aH1BBe3@z#h1|@49zD8sP3fm<`t7< z5##RIX9_+$E@2Y$vXb%j{B&L9h34M9$v4rmHt+7tCN7(HluUjcOPe!A!8%rV!O~f$ zTEm?`rE%0DpGGebKL2eQ1fa06b_N~#j^M$9i0DH!} zV^DF0tztr2*JjfI;na+Ml4HOE-)y{6xF3R1PcyNyqdO)mRj@mFd5yR)PFc>&kt#=!FjDC}+5U{deBir;Oq znKB2bR|jQpKHOcS@9(_14rV5jOdZC2Qvj|r#L-_E06$O_i>&rx)rR+V{CwY%{!0I( zvC>5chsrE|U7TYhHAMS)!DSyRO2xeuNr$1v4u}Do_G9?%itw}KJOG$99+;pt*#gK_0GDIed{LhJ`SGp zC)$rQW{#o{BE!P2YuMH~?vV~m@+CC+Bz!}N19TFH9%XcVffL1dY^}Po&WbzQ26YnZ zZ1dlFaeIpzV9t_Wad)#{N1POC@-st-xYl#mbb0Z?^;vRzbUUQ+BX__RwYFp1)S4c# zXW%s}+MLg*RsB1?8#ZmG?q3VkUou!~n~vam=~E{2k8!rNSv+_37ryLtGM=CpARvRh z7XW3`aPpdOlx-wyQ2Y4eD`)P`NZQ(`)SJV8=Cx)2kx-2#>P^IG-zdNz@O3_mGID4m z0%mS0vPP#0n%`>~N|^Od_9HyMdvV3bd1hOT0uTFNMZ?6;nm_Eyh{aqZ(!5p<47_$^ zDrzB_-*q2W^Td4l@gnIHCx+~ancO8!;*J=p@3;(A_BbN|NG^x#_-hW97Y>=ZP6f$^E zY#gZApI=euo;g4MK-hPQ%xXK6^V?e#^}9N~IUnrAHr)u^n@j6`@gHVlv>f@B z+hL)c=yZDc>Asj}&pxvKT8Qv=b>T2?ed8& zGaEnx6oEP+Zm6rFLNH`<94_G?Q@(oRoKb6y&%I-e%`6w%@+xzhWriGU+@pDBv99jr zj@!+pbFot*r-&k8#)Bsp0W(#1@S_BWrX~Dxyp8Y`A*ach-4^MokTW@t3eM z!|(6LDTu*-Aphe=#!&~)wc+F8+g%=p3Q7E|?pb`9bQOBq>J6pjKU9OS-&YFUQM0Zg zj$BEF^vjo$+bsN7h+8ARl^A0D``AGZr<;hcyzSuO`!}#yQZQSHDTjD z=>@dYdzz6IU5uDeTZNbdK23n7OA67KHy3{)^DL2M^F=IoVVSoQUA1xrN1^1j490{> z1ugCu%@18J_OpwLI;Z?-E_}O+>*~_8K|+&Bhuq^9X}ffZ<3Fjh6{pT>l-O$&3+;aD zWL;Kmr8{}VtS*se72Ie9uW=o-ytHq%%gkKL#7=6#?nR+Ed7{I5S~5TQJ{NHval&tQGGn4l8P++bWgu@e2%m zC!bM~XKH)~_>=*)$4$9$@iJ!5GmMlpw0^las0Ywap;qR`(e*0y31>A=9(5gA3PZa`0Pk#byyJ4G* zgAQWzcCJsw9~U>?i5QnmzJy!F769wZllQnj8a4$=^|+bAT|W6Q%xC=z^Cz!ii==B; zz>`X^QlchS#R^a~>>l91A2zPvy}j-$@Orb8XPfG=(?Z~T+2Ua|t-`fju-I3Te;(ijYp)W}ECpcLNM$iZUR6Snl5}r~8P`(?>zbK#g zF=MG|$D(qPu#SmN0rsIYuAR59Iu6>?9IF_8nR-p8ZZaPc|3_b_ColtECnnT%3yMCF zT1D&soGSXcx8aB+zT*<@Lv%#XoH3^gT;-Fw@n_frZpTY#F>N+plx~nl%3%rsL{dHq z(v_-N%x}VE9@H|$>gQAcd@lY6&qu`X#avin=f$BWymml7|C~b|Tw3*%0OgomWpjLd z+jdv%pH1f*jyubx!JC$_sluK~Rn?EOicg}C6La39slmXRE#+T)L}$j^eMym8cd9i0 zlMfSjA1@h|jX$tNkDnoDF^P~Iep}B+^#|@4l$A|Zi=MtMht+=;?st92HKq2C+8fj> z{fTFsB2ar7O;&N-etw)fmCtx%{Lj+4$M>YeJX=Mhc_~KqnyI3&ILC>_H66c#K*b+7 zm!+GGj`%;E=L4aMFozz6Wi`5`nnzTL4p~wVvaWcjc12SMpcDx68Vr%426H7j+1c^8 ze82_FD|3C;v#@S#zM`^R(=TvGhERq5xleD2b_%o*d+n?vo}SmGXeZ*XWH}5!yH*8V zZm+A7btnoFaI=a%=)s}-+f^B$!nS5}vcoUHe6cbN@g;je4Sx=3JlT^lA#1K70oHWC z;-CgjOG&cKg^MRGSKJKWh#>kM7LUi1*{RXR<&8OC5iJmij;R}jnVHi3lM6?^3Ei4F z%?M!DPVi~zCa$2V5z*D=MibA9$k~?QVIp}K^z`B9* zx!fHKYK^wFUcB;YQK5<*pOa*O56@UN=iE`k3TytteifMf2I3JwYO0B|qFcNPiQD-& zEWF!UEI<^Wua%CjrHXY%xgquiky~F}F147n&Z}_rTk}ktQv+RM9vO<~rQa2m80_8{ zq;cY^|4bY6ltD`#oAAECe zz;3=f)Cp$hc}gSCyX<@d1!ANuJ2@=zDAE<}AlP1@dJ?b4LFyaP?YHQz0mh0ceT1?`eBj?m@=$2I{ zn~#1_Cd!%pl;#cH&c$eqX8+w9Sj~%p0`LlBvS?u29S7pLymKjcG@JpKRSac=!&>l{ z1~)OM?}SEqnZF(dLQpO#J=fz?%4&O^D(tQ9wnQ=|ycwTa=7}~{f(2>430|#zc;*Jl zB>+9Vbe3=ZXdBAg;0NourRpW|9&|jWgy(x+CR!I}yOSIomcHzeC4kxY5X!kX9^f_R zeye8&n21Hje{NoLw8r;D@yjdxhCA}>8mQx-Kh0j~4HM_WD#1TgMG65b@5Ye%sRMZq zW9#xLZ>&zjuROa4d6LsUkn>TT^WcZy2Rro!z)pJ+ zr=7YoWxn$=q2tQ68RXV4k0Mp-XsNddWkPVt)LS>b8OGG4xKREE(OZaeQ=b(E>Vzsc z{@_)G%h<;AeD9()mlM$2LHXsiut(nILB?LQbG0Kvluu)VUEqt3;JpL9>Yk{hV9VZa z_eMYveRRU!0BDcwPf#o>)1%D9(bIZ_<^^^lB)ZiwWNm>y*D!Cr8?oFmD@fhb!4GO# zuAniK-e##F;WKEGU#Qr44t^ z=;Mo*vETs9CAQx##(8;kH8E06ne1;B9ga8h98gO0-!o7+k(Vuje{Dau_pVE=Ue9Gl z_ol&oM|iTvOm;(rw|-pgd1&~okQ5QbfKDUxOIeZ8`^hT@ZvsNMidbz(TyszS4sZz9 z(^{y}UX4@FfKoG?8YB-E;kZN^v^A&a??Ndp4|Nr?oFjFg1CtQCGP7igD`EZ$+!1Zi z!_mu_;#r8OB3+G-?=x}YB&u*Ttf!ouz!<(&VNZUR^?_%s_f-Yod`&cu>>N^7)~wZB zxs*G}!;wHX{q05qgjNx9;6YRBV^jruXO5?o2X<{pibno0jzxl`N|4>GWX7P7J*eTz z<;+Mr{SEhEBPO`~%yN(iLS7-*Vr_lLayoi?W%qn(_d+!Q6KV^7KD^qy*57 zU{!)l+E=Xq2z8d)}=BD9xgm%3>aLjv`+;IgaCO@7Nc>CKvFMEVB&{} z`Y$v0tf_o@RJw9dj~hOB3@8%O<_NCLaE8}?GDE_GbP)B3MTf~-J5&U_kOIl-zFwV? zEuzHF=fL$@{6NLdZTMMLg}*IXmbY1jIUrtwub|x9GGe!!^L|H|CIR)1a~<{-tsw^W zjyvGmaIB<7Z@kHbTTjLqkT-#D9ZJ4|G=p3BVW%l87%#%FOW5S$lf}@|D7O=@!3Qsu z#augaDY;kYa#voXzNHWKnU1`!tz3fUSe~0+Oz_IWeY|ur| zp-A!>gg@U{s>S}5a$Uk~V+@@^;~@j-0a45hFqas*MWaUDMH&S)Ik z>o3m_VrK*903(@3oVjMZU}1?sqTWt)ydzW(%++YL6`Q=~GS9*Mqp|=rS6edttE+s< z7jAFUMVq)cyvoSI9VHf4L5thNw?%HYBnu$fOW z672yJPgBAAMi$HddDR-DfHC5yu(N=*62Ed(jp(IpSS&xWG{ze(XT|X4wi8zXW&5yT z)+Kg-u=G)DR7>tUZ(2d>Gev!Q0!eF~Wyvc9%mL^Lq9M3p15gC>Xh-qA(K%$y< zCb0wyMpu16u;-Sf-d09;+%mj#->tXyL~7QpQ{XUxge&tN-w=?Qu204?+L57EQ!;0(S#}8 z2rHQ(WCA8l7Ft4jzZj^Rv;}9&7>0%Q20~RJTVeqQB$p}+mDkZmmxzM9v)jJAlADr? zcD_6PMnHfiNE0ajBl5m_b1(N=4cV_}i58bRN?z+&XO0zQ?XC18;TMH6wMQ^0)e9-( zOtE6Q#X{l$ZXMp%Tao;(Opqa23HKkhxe}aap2DpY{puct?)ZGm8h5?XJG)nTaY8yp zVMF$u2X`kBvT+UAR+DR(y+%v$js$7Bn9%zftJc%GA?D1zZ+th$vUy71))(uJraa_( z5%-es(54>Sshcrya6hYS*Nc~-r-%aNFi>(*ci9gBha_fy5UWJPaMr}v$^mcT{vVkp zK8+KdqsWtcbj!P1a#QnziuDf|R*5BCo$Y-Et3K;@Viz5l1*fh0xLb$Uvz8CR@^~GU zyzR^?-bBORX9|XtG;S{wsUR)}_m58p;G&afHT9s5y7wh+r^va_W!pdZ9wot)Km|BG z0Q%j^GidYroZQJvHAS??*EwbE6=WWp?3RIlgy<0;6wY=SSh#rM_rfhYVq!yF&%I#26B+lM4qzxw#j+ z6|2h4<3N3SGNCIYz5q{_aACdG&W6)`4(bGdkrGQ^KzAtxOnxupa#S$Ow3vLzlVHDu?r<4P zV<~5fQoQNAd-SED4QZfBempc=jOVKk3b98}6p6Wa6nR&~J8Uek8e2 zH%3jUi9Y#qsbUJZ400@St^73T{3xs+H<7t+>Oyc;81e>(jO$RB;wB{peQPlX179-C zqmIG1;u=~|&6$uN&E+{u07!)V^S4EKROSC{;Lvho?YbbP05LFCz>a8BqeL?*UVR8$ z1QtF~E0TX3`=w4P0q_yoWECVt-^{}Kd^4T{f>d*XjnJV?SRRMvEVqf_z`!r2#COTZ zO?9u>Hd&p*yzN$o-_JYHkXCr>P|2EgL9Hx^>~4hO?OSF~T~yYU3yMdzQFQwxiOB8T zLOw7&WUM{mPqsEEoR;MgeFX_<;~Cpnx3_{^YEWG>p$Oj^`;lT(YKZNIUqy?| z|EwQD!$$ExnU?&%&}tWQvAH3$Uv=5W?nLcvH7Afs5It)0o|x6;#V~l8ZNCqDd1d*e zHS;?Kp3 zo+AIKW=#~B+>@NqKin?wVoY^Mv7_emR3GBwO`-twGDgbc1_0&KB?Y?$!9e6D7ZQ=z7b*Is1Q^e{^C!Hki9OR^23R6_4@ViyM<;;Qhnj7;O7Dm_JJ z4FCa4izK_0M>BQ-T#m#Tp6ibsP`E(0=ds0e-k*re#T8mWQ7@}l0LZ+T3MDC9PgiUx za(mx<P=?yQU=#$?)2Ejw0q@38L`g!%h68b^knauJcC?1>5a?e*V z-d|EGgDGbk`({m{W3NitC1&{}@f$(@h`9Br;jb|cZnG$PZo+vgbEy+YZ0O?E6?3ox zK*Y$2e`sg^_Nb;+<8w4M&T-8;)=F%&d7GwCgFm1>cZw+W!FNw{A%7+Qg1zF-BZh(p z^~0$4g)N<;K$Jr^*0|iXBI3KK;YgcQ=x8Zw^!DHs01M zI4ShkXs9+tt3A#kmyOhef%S-yRgKANmU(E!2tsXDfXq5-p^b{EE9|i+`3uSs8~Tt1 z^nNsIi)(Rx1N9MkO2Nlrkx~`uKWvkm7TZaMYkPHKQ>d99ZOsUCOrvIFyLys`&qP#B z8Aev04URW)O{kmG%9h22GC7%ba&J7uYywixW?*#n&b%2IQWN9~^YgjeenBQkZZ*Ys?}2;=LaG*N^xHMQO9_b803 z#WIA~we@@?#e5Scir31ItNk4c2g`G3fX>sSCD5i~eoM>t1cj%&$?1$teO+h{f^bca zwz+zY)6RpfB^V*w?Pc#ZPuRL4(B732lUUV?K=t+_oq6bN=1hiL%z3Aj$qXuMJInfh zysef_uwXzc^Yc1qX5NPT231!zYe^?ynDk5B4_B^`2i&)pC-CPy;GnaDia#`kdWqNZ z+g)-QD1uPS;@VjQsnOK5)WA_6(_2;fMY@VCK3+zO*y-oE8LiS^mZBPCAg*gO6cFj9 z2;&oYMRpNe;|;+L(*qUEF+ZINUna~OQDQf%FuaWIi|`go?*Ob_4ifFY0vi{ec#qVH z0q|eW>JjO8@*)f?CtdH2jNLJS(W^!SOXIOKE%!bz(BHV(eh*9IVUc_R_4(Ym4844~ zV`Z2wQ*?~gpOS(h`K+)q9jwqQGwa~(L;+dT#5M**{LrRtYGgDFI{$-` zp8g-VZ@T4Nm3Yz?)wqrWd_`h`ZVF}?vOYNqeRvk7LfadeV!#+`Q9RGr7Z=*KA4;hb zLsL!o5y48=($MO~9GbE2Kp z6K$tXY1>v4+kvIW@}qXMy(a=RTyI5_wrLNc4X;*c!wyk7i1{D7gC&Hd0zRUy#D00WH5|&kVqbEMbfy|w3mmSlW-kY zx4eB2OG#6<42$CN*Z2AUqQCG!FlJ)hDHHjK!T(znjrCM~>E2l}xjB60<$0b;TSRu1 zQUbY6ZELJp6z2qvpOw*YkI7?P+`7fE0$Vbcrn;ls9TPuPV20r_x$9UKdyaD6oF>*~ z;Q40sY!v$;N+#kwezl?fcDwx$qaKPQ`LN~^)&C}3cr7_cYf#p^*Y2roaQsdi^>%Wo z-v#y3NTH|&ay;-`H0-mcx!sC>KcAB8i%hXWl7o-)2i>*_Qnec2pdN#Jd~oih;Y203P5u7CIeRgDWMtC6x)P1CIKX3cjuBQAKvV? zJj!mQDFqq1(1y^&AO$8072kd3pa%Rmh6L`X`C@^q7F~vvEw|H72e?a*`r|lH)Z5Mb zo*V-Ni=djMqqZS`*6F9Z{D|0hi`Y=m7dCl-R}tC*ZtD`?quqk#D{z@KK&_DZkP>

7E!VW9v|vATs|}xco>OI53++cP=i61&@%MGi1e*kxc4BK;b)wyHzoWmo@c0 zP4j2D>i>T^@mDjA{|`-LQ}Vyf_C@*sliax9KrI-s+6$IC8cWi;VY2Un|Ml>D;QlXf z{-~Mz5izy3<6FoD!N*~q0&)Kn1tkAr)WJHJo#H}aRK-ISeN==qO73+{!;sk!(i`wFQ^Ef+Md18DC-z==@bxT||EIdIj*9Ag z-yQvkL4$%IAR>)`NH-`c-5t{9(A@|k(%m6Qr*wmYbV;CimowZmlXXebDv(Mi9efIM{&$}T_uJr+P3D&~M659TDy(S&}V>c}Q?sLegzQ-p4 zYzWtGl3DNR`tv{tME~*tzBj$?u4?JNtm0o~!6X(b z@Ebt%{Lk9&?56(MJ1RHASsY#8=)B7^H}xL>9TzzvZx=v(db6VYGG5v6zEjq1;pPIH zi^IHEr>c;z_fJEsKHAJccE5W?jwRsLpHG(N{52p*evdK`T|(d>Izu3WrY(DrbfjnV zXwiI;?cS+`!g-Tm+h#W#hv%8Hz-guabY|UO*#Xa7H_OwTyEi1s-7#8COSjOPTz{*2 zYMv2Se%@FdW?^XErFplaWr(wo@E8OVY;iZQO%0;SyvJV?^y_XkDG*)IfZ3o$_KevsB_$kfMF<82L!o66WEh{{;lCS-jpV`W>{&`Vg|Tsv z)@$7l{~d(n(WhJN&S*7ha)k_eUEZ5}zki$;hT{Fepy)vPbehA#5dL#0Hga-tO$i2$ zQvIvF_~T6jL#!`2fuib12#|%d5X%;z;yX+wAA6;K`lS#oum2Ui%>-b&h*k5kEkZ zP4tNbNg8Fdk=>V~fIqh$6jWn4#oI23oUrq}x`S}p!tHy`ypV!8qY*cJL7s_*8|OFL zMVh-ZA^zZF83-ex%*E)V&q%s9C>uaFC?lFX`o4IwW}Y!;8$9pM3ZY?;?slt%u`o%3?J9{vw@1{Vxy z=%$J)&lj%F4ezZ6-T;AVAt#NvAlDRj#GQ)_m7|Q~lbVfN`{^@_q-8sQcOP8s0XU&PgrQ`dX0hJ1_-K z)Z-IL3+wf#q)F`S+-~l?x?Yz1KI&D7dZngOR#=V&F{U~%H8BY{ee^*|%IGU+avu&@ zX@?q>D3yOFM$P2}bb>HO2RSPdlrG(L?Q`*MR6GT{M%yYm8hE^TGn46sUdb_+=953^UdidW_ zoi(`CJxaAfW+?A{+Jnv4X4#$dJ?ag}&n~tCtlsu3tD1U4lt-25hU#@5kW~VoUOJ3E z1{i~UNyQ)(l3(Ox&+bb6s|BEZ`*up(?+tEmHVaclvWT-{;55#yM#@+2YMG2$5l(VN z9B(-sI-O$U1!Yxh0v5kKd^Hnn!eB``)Li3~>!Ogzs)EW0W2UJOG#iKl-u0+U>Sd4CpiE+o{Bp% zkEDg;JMlEflxc9Zall1KrssY+;ul0Svh$#fb+9CdoM4yF9umkzkcx@2&f@M*gGI|9 zV3wapzr-dq$n+khf^R^SSkZ_#yu<$voToP-q{Hm8QyY?R6Rz_l3-8(=>4VHd>`S|s zuDrW|xTW#k#1`oH{d@n%wb!P=iE=RS^T}phy1fU!y+6ki319oKxoa-CMBWdLQCauj zY4W@-FZcC-6le3pJ?uSXZpl>t&a1=T-~Uh9lmDwZjp8hA5nn4O5cH>{bUpHM&A- zDuti**ci3sze{SMC-vqoz*_cl;FC+=je4i1tt!C%KYuz~J+(SR;W12MQF))t7PaQf zMNSI+h$koeP#JNQD_zGz2(DUB7ibYgMLi;b(eH!OpIpOYm(q>~X^A)Q-+tXwnR6ThjeO7409g`a`zSi&-8^?o`q=myk?$1 zZ^R?EL3PB6>tmz!=*X5%$%hmzvZEWxdD9=26JbgQA3Uofe%ca$GayFG%Nq?vnF5Vc z4tSC!HtND>@%q?RZGV32gi=LEmC0Gyq>reISTE7ZB?NPO$M zsA7*;Zgk70P?ALbXZnY(EafxLQzZ@Z8#Rg+<=9Hs+_BXvkR;uZ^75)eub`C3 zl#8=1Z1oO;yBVhzl`)M8-Kwt&NB0G$9}b5xr);eYAU_Bs3^n`~Vs}8JzP~shvk5_Yvh4c0tJRjFA=T1aTg&9a zo>I;@oi7B=?#k!939ZsrdV+JNjkL6aYZ%2U_MIO@r33fVh&UPtIXr-WnIp}EfDLGV zI3pg4yFsNr<~=X8{mol1zQ8vCO=l<@8L&|nMZQ%{9=;5PXVkC42GmefvYY{U0U@@|D3&dzwb^-AWiGs6t-uBtAxSL64DTtU;-gPU+(df-mzorBw1AKlq^%RjmQ9~@-y(q)bxmO2S<2t@=L!0r^DHv%b6AuRS(*eqh1q~-f~de~f|Dpfsr%&w z(W@#92=LxNOT>l!Dqu1WQ#85}&=3-4TZl$Sz3Ol~OXO=OF&)t>n+yJrjJ@^`ZVWj< zzHQ%)&FM^*?9GxsHPpPO^Xlr*aVR`29w!!;8D;0~98Mg=Xwh%^!_HKJub56`+vR6{ zc0^+3&T5`FU%JQU(_n{+Nd209>?2`(@#SJB`Xfr+kxAh2d8wvTWU1!&Ea4CWG#UqV zj7wpxh?_!Aty(XMUj~GR3wZEejA^>e9*y^Dt<#Osm$#SyG52u7(B1Nxh7g6#yP5vS zpIsCJS*Oe+K#Dntp1K-lUr4G9Q!^v0YfT zigEJB+`QapcKL&kU0-(ca2O#e$|5{TxItk7pfnggcF#*FUyQi2#kG7ID%n$$VKw9Q z(fJ}1)07mc)64gVN^2?OpHBP*Q@MRqmAjgm=*0GJEgSP~-S>7TfggOrv?zv4Izd+b z0c*ISu#`ypvu{qUxfxc)eUxdBooZYZI=>qeNdeSWtbMC#qo^p33dAJFzyUcbpt+gI zR&;?#wzKwc)-HF&5-WU_@ECq)_1K+sG~-)+4$JNAs&XH6R@lIR(g`oVlEzN|{iz_i z{oXcbFRU6nIxxcy-*EY>^q0s{`o`7zTQ;`Ognz%A65@+EA3(5Q$%|ICmH;Q8i1qp| z#bg#RYDAXG4GV-*;6-tHp)23aJLY}r7%`lu-g^|~cMaRdM*K9`Hd7GiG~t(n z>@?e+L>XRdOrFO%wJc62?WZL%^qz|(h_BDenilvNz3`S+IKp5`<>4?OW#7*gamjZf^ADTsegYg}?!MD=Li&xosV4lPz9(Mm9!cvw*sTJH za?lhS@5j?qTNM!XcAc#Bm^PX)5d^aSD(4&&v{P7keSLWV5f#N)MBgzo{<$%D4|xg2 z!ttUC$~a%5-9n=S>W1No(S^UzH0akHFK(IvxpxGvY{_LFv5&b`ayE0osS0l1{V<6n zZ6JG(YYUPzaFAL?X%h(SJd6rWpcJs5rXzXcYX(HhEL#yw5A`-m%wQ~(zk1y@22@9cXD^G%iRn?-w)*8b&Gkgk8@Y?dQ;1pbM^H; z0JFg-!C(ue8=MRvY$~AVX9m8*7d2$acL~4Fx=z9#VchapV3Qyd53P?`W&KqG;7l)f zMpw-~Z)7R~jAx@=1z!PuMNzV9kK5Ueq0|MEg;}lgea+}dQF~EA;bpy6VDN(q(?qXC zDRHep?IM(U!Qs}=>wGX~fcq1sa1zSCe>;lvtRE5a@g4Z=S9QIXu;H#*fAEI^NxZsD zIUR5Hz-pI2ezVYXhYzzJN4babwo(x{ZHoZEji!#ix_>DBgTj~jGa^AJ|Mb4SQ9Z}o zyMK?}iRaoZt0Da!F9}5CeX;L+q06(8c5+b1uQz^33^)-b$2J*~`Q3s1YDeR+=AW;; z>lXg`+u!RkH`_vj{8;H>#Az=MBEZ?@evyzHiIBU;ebk@2zBdNKV-dT>{y^4qS;EGN z)EfmCNHmSPLb+ZEL=vVE!f`<5f&VS%guaxI{8cpTc(3nhSPPZka&w@XoQyV{P}j69 zrmb=2#OhY8j2W=g&cD+wbI(q<XMbWvUOrCnK9t?`{ZYFqo7m#jb@)`Y z0MhX2a%!RgOL9NjyP>+Gc5*e@QUBfnEy~We)7t#8^>uev#~aSaN2>o;Dn$%-pF`qz=(#$hWPbzT?u{Z;@4?0gUM{wollz% z24|3kgcZDA(rPAma$)|IS8f%c+9Z`hvN6cPCSW`2tKBC+F*{EcjFT^(pbLFZ8#qR`@A#bHohu%SB(8b9y<_;d!c3LJ@ezB^q&#TK z+8vEc_Qj#g%cBhE7HgtmgE0#UBFOZ_*sjNWa%+cwYr{CtwxrX%7Uf&*X4}xl7D0Lr zaen{H?*Y@eOw2SKC>($3ftpTdBKNTqtH-Q68$qFv!Su2kTsHE%3y#~}@x2xRo->(2|4s!l%1Tk(I^%y4);&_LF?gqHj( zcbn7S6jpKSjBz=UPNThkJQ%<`74`gC>&0+*xVCnou%P!%pk#Rq&(HMuF{y+$+J!vk zeN?5}kX3we{Rof`K7?xT`TQ*Xc2c-rC_De<`F*n8<0r1 z@z(L%%hR=ewxZ3L_sY|`KE6|KV{}`H6`XPtx9`r`TrJ3u=JuxgQ0=Z6x}@N*-9lAP z6I;JnC1%%~tx33WdE@|(B;g4j&3P(EZpGYJtwMwBwX(H%ni+1E(3q!2AP4}loo)zit%Q65Z_<8k#wIp2_+z30kC}V z8$CGTX4pS~&yl;i3ewP0IvgK{Z$+v~yK{@LioU?=H+{#Qr}rRtsD!tP_SeuS?mMFK zfxB0=E<(@4ihn=u5|Yhm38|k-szkT9z8M{SFU zn34BY2s0Ji6b6Gi(-^m_lTalaLBNu)F$Y^nWqoCxzw1NQIF;oOA6qADHZE@RbY**c zZWP!Llc(<}$9XE*x8L_6dstt$;w)FqhbSy6XDbqe78%m+cUTArZW;CtGg&P)hA&NU`-Pl~>RX*BS}7rn~;xF9o6lisuJqmVrz#;Mn_-m zVO(s(ThTq8k1t%#M)iHX6>w=dRBEW$mT%wit0{NTypzicoqze+lg`BF?rqI#>51_s znx-T!>3By~b&(gucG4kimrh9yUd{>0ZsKNldj~} z)~G72IEpzBV2PnGM(8UDa6VmpL&5D)XeO`)j~}z=9KIs0mZoo<$B2&B8ay-Q?r_k0 zY^+=Ex-mUG9}v3Y71k4kjo!=r2zh58RGC>%!TE1f->HYolStQ+SxRwQdL zEMl=WsY*~y`c{8aZOEyaM}K`oF-8|!9pnAD#JTHCRb@p(X=OLF7p{l#C4Ku6RSEZ$ z@oqbW-OcZ(d{KJ@M%&WBJ}*`vdlyf^?r<73Mm-nml|OuvTbxr|ac}ro8HCsktRvX4_BJ40$oLWptd1PEd)83ohi##rlzHr4uE_)C;l@B+; z1Og2;RCkK#NH&1n^uAhc-8j$l5&Xh2vRETYIQ1$lGq&GV4^Z}2$f=4jJ8JYn^%YZ9 zD6EVqu~w*-U)dtmjk7bJ$0~|cA^R8twDH|C1bae)s>=)Zt$T}tJ&ot2a-%mmg9L zI9P2-+FOM&qwl(#?Dm}V7yK0lIB!nT+S+=%83!5-*`+2zzI>*R!Ps`GYMvk4^Q=Y& z7q!Ed=STCHTd&qG-}@+8|^#tS(oyn*`M zi=(ak>H)j2Ew`-RMxm;>?ur==uz-KT(Ii#;?@oHtek`a8_29^SN%VI!uOCk^{}f#p zHpl&2die=E=*QOw?jta|u8+G|ZGXQnpp)ylF2pyX-1!pcqUN0IVq@ia76~p=Tqw6? z+;Tr*NP+nJl%dsut*xz#D{9QSabzt*?eT*Ex7lW|&sWG7djSO3HKJ{WxVX4ZWL6hGbrc(F$~}+Lo#sspV{}@_LKp zB;=UHu66 zWJJ9Wjz@jh^wb>~su2Qk4TyY)s50Xg3mIw-91I^@E>O&W!?4_ZpxS3mCvbR^RtTAY zG!HgaDTH-Cc8|=4Z8n<~2(h;2NeNDP_@jpKJ8|k6va@C-zYP)cZuSZCBE>+R_ zt;!dmZG7PoESSRjW#?>$|5B`nm(P;UJC4u%xF_SxhlW8>f>K6v3(<3{vK zGBjxxlfAZN#7Y*UinIYqVux_^D($Oa&&qFdKc71F!GTTkd&AwwEMwRfq?UST?l7>> zY`0uqoF6rmc;)C!xt+szrnU{uVDOrCl7Fta&uTY@b0=osxkg!qeVdkRhui;DurSrx z7!QWgHqJ<2c?H#mPbf+tPm!HjvtC4CI;t6Pe+M6%-Phc4-r8CuH7_8|PM_m;tg&(S zS$S^1HKelfn9JETZRdO_P<>QEDi>SzGTC$y)eUe&iEax=A7%{u?3~p<47iZL@DY-y zFhW!1l(lfzCh(A7qo8H~(m!e%vK&u1+2#*A1jdNzxW#xQvjsCH!OYgPlo3O==`U5T z?91NPy_37JCWgZJqru^vxG&E19^8d32W_5Pp#$-qdPh15?6oR3)e$YNWA4?6#0Tu| z@v4Tv(;e=@Pi-~w&{OrG0->G;-6^b2R#zk$OvGO0FaX22sPN|$D^769m!4YPW$wJm zk(mv74rkNqd>GN@ZPQV)*jb@*scC4+mU)Sc{4*I3|l_k&3J86Qb2`>-WM%Y(w3J4nSnI zbmFpqMlQA2P+X}tBi;{px8U1Kz&r6QpAX*D8p=eq-Yb6=OkXPc-{h@7cDyf&zM7%> zjL^l=>MB;-*=-q)iQa-=7#QM0OP*}K>kNaTeR{_wIjW7Zx&u~iDP)z#uhpGgJe0Nd zAZ(n#rB%?7Ey>Q%$Z~m#)^7xY)MQ(O-q9E{_0=O|BO!C@*{cU(?z$_v&wYudp0Ppe zGlQ~S<@)@KpAp)GSvhi3P$K<3`V&K4Pqugm$2E0XWZZql9cWbQEgCr3=($C5Ka{^?a8_MWjNt5fiIr1rrLH<=|D~ixnWYh{0ig;M|qGIZq!JE=zA3H_pwc zG9W|$)Ph}KwIey8O)r?O~RXbhSn*g{JLtxb6Z(p-oTPI0 ze?CY*5>--;ZpGT&G&z4rAn{%hF+XBDN1;M-+edhkn?qv@l5@D!+#qnU?-YpFJ; z3Q1uN{jpaz^aUJ9m1hG^ZI23dsg3z%k5S>H$r91zcmvYH*m`w$6Y)waWqn9O(2ZrmNZ{>m0!DnPIT7cqaj)j@Ltc}lpH*hP9|1w>s>PzDw!FO zruJNAtH_&7+tJlEGE=+%KKMYIKQHE@CeoU&P~+%_MM3OnjasUsRd5PxGqKF9;Yl== zvUbgV4aYLt5<_RT`+D*Vj&jfTZkUFI5jZX&IHuqeg9kmf<~c#niacGVO$c3 zAFD2u9)&ZpJ{Yt=lCA?i-xq6*A-m=S*!A&FPdG=a==l>(tGqn7mEVcI3rlU}J1 zze7c2VSH$`uz52GzgG=zr{90L*dX`~ILgG7td83e(Lv9n;A-ZUeV0;Zt*CzA>WBx< z`I_L5iny@Bt4u`v)$u3c%|9TbL@lP(Gq4wY0s7~{`|BG||M>?LIRbFP=s>4glAROQn<92(t$O#yF{DI>^~@XEAjUn66g~t5X|Fx8y^8^3?tX8|K{t@!tx^ zhc(V!=NUBB@jeBAw)1fH?#KM6ZF5fHp+KvnsaEySO#q4+UMMR$T#jq1-J>kRaDgeb zUECY|VL31f-r}sa1G@$B%e5t{t~FGa)~7kdnWjS3t{Ss>jC3|sdzeFAQiWUxnb}gn zg<*{8k-XW>4$(Q^20H>CPp>eNoHDbNUK}^?Tv#-Au6UbiQctUCS{1;Z4kt_DZ}}0J z6##XFYQ~jLmD0M*+68-`6kNHYfTXZbfPt{2*a}w_er?rHJWhdt9I-`(T)4GcY7F`! z8O29kB6V?+ftvmj3+a$c;eO~$v5CBPNlJ=hiM;b|a_3Asf;1@X@>z*4n0f4aY)@v= zA|&l^#^1A&qHUS9lx|tnO_hE8 zQ?6j3NimLB+i#|ws61~9SC9--c9I25x@z|=t=nE%o8CSP9 zgwBMaIH3+~~d{`oZAbia7Ug07@mI8B*V zHEK(a!D{n?30PF^wT8Yao=acVA$boreiJ}&h{g``IXQz`lZn_=ctrl)o-{1m5qL>M z-Nla!H$9x0!x@Pu$hC(2$DB)Emq3$Sl0IJSC|?vl#gfoQaCKd8uDbn!1@mrw_jx*> zcK5xG$D9XX0@h|B_$Z#Up0nOE6gsal;^+GgLIkclmuIS|GVa{CtwI(5X zT6%|ur$w7HX@G~gmqhP#bdQ*<)>|}iy-?T67fBW7-QB~j;c9G-S~)=&KjC~ONJ1!6 zC#xE+eP&+4@Y571ODSM#gdg#r55xd=hY$x}i>K7mMbci2a(V-wN+WY0*mE!FX)uW@n7WJ~~(*PoERWzUyT_sA>yS4?&uJ zMnSo+b9dW)BP}FpC;VwFWn&stsWS#a-j^?FnN#Ba4%l#NVAl%?OJkuOm}e=cjSv1?>{ z356EQx0?MXIVFUajAfYV?B<$O?Og=3hCNlagKJB_H4eI+Ty>!rzLolztP1v^d%|0UQ1^+s zj#|}jNk6wiM|uDKt_SJZ2B z7~&~ZAeTB=t(f0EJ>7UEjeY$x8(1Gg_L$t6mc;jc^DYDZWx$XF>l`hzo>azKLML}wqj}y zFfgb+f8Vh2w5SBYj|h(9Qo;xauaFT5F=obpU%MH~Yr5FfyX>T*Ow_EiKIa6@RCrn$ z9S&Alz<-&F^qbhnJ!1!Jww5cXfwQrRGdn44Y?fyRDDQZ5)N`+eMci=Bvwf|C*?O6S z6&G1PH7S_V0W+hv^K0I2F4qV2?{7&1{`s+aBzD0byH8IxQWHn4q#<6;A2MPri0AO^ zdLveWG|zu;{O#b>3-RB(e}8M|S9HDfgug&RlGXFFu2o0_6xG9wHsTz7Qtm z$M`>o(3m=+^~|oM|Ic-EN;sNZUjO?(nzeNr;z-ig{EPe*aMSRDr^?sS3XjW8chgtu z9-nh*P+cEaR-{OKsK2zehde*Gr6sG1Hh1`a@<5V`KsmnqszCfBk9o&YLOw_?dAwo8 ziiW3I>ZFGciL{+GA4O(oSI(?w!6+LAxTT!jl-D#BSksk`Eq6&v`^m@a@n)Qwte@ah zANLxy{AlRbl!mn=B{}5hCRbKCi@-eW>HL~Y1SSOD4Oa+?r=Xf&51Gm?dSk%wZIx54 zUsvBXH70hgw}WgKK8{)!Ozd^<*lv{FRXU8Zg<9ZWd=NF$_r5ij$*b;97PZpoVk8x6 zp=)Z2g3|*Yy~oZJwNUeAvD1L>dF1S7CncMjvQo4l(umul>6QVZxrKl-B6%&$wB7la@{~Rz*c6kK$qowye^TGuGQ2UPc=h;*zW!3}o|ccUh%6gX#13 zGNw#+T&{{$k&lH>MGd(P1Pd8C-qeU}@R(0XwR=61F)5TQ)IO@_!PgGfzDu1o^=US=+)2Y8JHTK$s1PBi9}Gjh`d24 z&VKVJ&X~QC{Dtyr_C8%i@Py^o>#}W`Z!jZW&FACTf;Qi~)GxGy@jeHb;=JnuMlNK_ zq4kBn%{&B7uV12EOBI-WV33!pPvZO|8uw0x9tNE=)Rl;xz$E1-QaTMaCCyarZ>%|o zBV0yhr%i%bULdpnGx01}3(xCjOgbL=5^SrSf^PZ}w{|zeND*LI`FojR?a%|5AHEo~ zmDIBasX4d;zn+cK7&eEE2|sX2Gd1K|)g&(|kwIkcSuHdm&l2};J?#@vgN3>K$vc(~ z%3t&S!qjC;h+9xtZNQn3}!I zDQ0^`KT=zW2uIpG``!9`^mc-~`zMZp`G)XiG~%9F&80&(!_j{kMf)2=)9tD_3!Gd3 zD-R)*2*N2!*s(3aq8s?+n=uSjx2Ns=pl*f&tqp{|yY(2CC`TukUh%`(32W5Zd%WkP zipZvj%nGWlJ=&qj76!;zLlFnE@I*<+cLpk-;w&n4qqz6sMTk_zx7EeGtj}N5$wRR3 z;bz9$O|1%JD6}@dBvDZns^1$c$oaXW;~a(+XQf2Sg5!1e^__tf!^_>*{ED2nJJJK!kd{>9V5Qraq&5FJ*m?Y;YKArbjsNMvf6pP>W{@c z%Q_XKzU4r;c=xl1^3%GF^_#S5c2_D|UNNgSoQ?|L?M9ACOZxk#aa#KKb?z+m7G7O3 z)E!}N#P7QQ?T|2;oNSkr?A^?y<+W~;`o(cSgC#6vBHivSv%iTVdH*`E6jbt%Lh6~8 zCC<|oT*@z0H_gq$?YHO;3W662r=7keq`0R0Brs|S-j??=L!JzhemFaLd9j`R#XmmmEgwFyr@kIDK@ zCWzV*60_5WS3R-yX4~0b+-7Ko6y>a4KMnuNV3|ovlj}DOhc$*h|1NjzS%rEQz$J*$P^>j4r)2@oJ5e zmzvR$+X>X12vg~ z1k{7cL==Ol^Bb(a!ZF?l6>ae8r?AjnuVN{&1{!1^Wt$7&W#7SM>w=9DNp8mJ3bPg! zF_k9D3yRgds%8y;fxgRcv=mYq=6#ylFUqga4$`&9=dETot>+Fw%G-SFLyP|Hr%Yab zH8Ol)GvrON#nA=Z*U52=9uo`F(R^J6%w?S~y-c<|J8E0>dvT0E{t@0nY_i5^HSSl4 z`8^H=Nw)n29!F?+5li+Jrh3MVZ-499sJOUX5p4N$LlxAQF2=Xh*07wv+vMyNaJzZR zvqj<>h$v{X@ukgVs_R4_RFG_6h%ba6#EqMQfNA+cf;sB|-4vv2Buxy1P`+XWrcHTm zAye|Xv10gD;iDrZlJx)uKW}7UYKhwo3mP!Lms9DQ)BHbYTy9M^cFlL&Nq7_0O;T#A zj|=xaJ>`{gZu-pcD?0Rdz@9Ll#~AcC|9rwAM2y?T!P}V0#WBIxn*rwDdr@)ZT*tfj zgF>~NuIC#HZ40FZdXuIiYvim)L+%dllsVYlaq^(9_qy^!{sae z!0RTD$f6=FU%>258&um}s(z!#w2OnnNi_CA+WhVU-Xqj)?Am^`qXKef zSYt0%@HbM6I_I&QTdw;p3S5rQe!RV&=MTg@brC#_ApL?Th8%P0!&btoYxBvxGyknbry?avcs@1C|;j zuQ!9r`|e$-E;Oq$gB6MoM1(5=J>G`{yXE%#=nHohcs(36G5wqLhwSwts>7SWRNiH_ z#gVPJ>RiZta_vxUd6@_|amnp$jrkeh)`KVlK1YX5&V{FFa#XUU1dLTi<&kI}bGZ%~ z!7V#3^zp_a>U~DpqQ+Rc{n6c0urNyBqdp0d>H4L>;19wiA?ad&eVLlyo3xruzvpQ2 z2)Z-`2}Y8W9$bY<`fw#;Ewi)4NFc*9 zVwMuoqj0BVRJU>$)-Ku2e>i7t(2WWs=cR`?X+_cza2GN&2lSYF=?1NBa`-;0Zf z=l;&dZjmD8T!kN5H#6;L)!>4bF)h%?$^BTwKt3^=|AIfCZLr&} z)7LeUk$W}|uRIDinf&u9MO`t&wIkewuduT9x!}m55#-4AoHf+SiuKD?!_`v%^HC^% z(zClA`^_RvuhCYo&5rHLIVMIrTQ0b^ZFcl<4F2J#CI9BuBMws0jg#e11=2|4=W3K6 z91bvommaXP8$~A18U&5d=65F$Mr~kKcFbCD1R~h{Nl*I*tg&EP^KJjfIWX9wj|4;s zYpS;ttZ{3q_hJ@g-4N*{;7p^J9;{+Vq{SPLPKK?`c}SB08bh-BO^RB(ih!}hIqtX zM|N}}u12?f%ru%EYykMeTztz7W2^byLvAPm=+N@7GhH%uF&!|znj^za^#-B3q{UhD ztjX&7cF}L@#$K?F*KqSMN zOt+lxF?l1x-d2DGlK5Oy6$4ek4}Z99t;u%z6v!8uZ=}%ByJt+_j|o1Z6eF?;*J-lw zI$8{?n!INXjmTPj75ZIRfgDOZ%xRy8CzBiXi-TK`dHGXQ+A)u!fF!!R$zF7FdgtoN zlB_R>U*j>&g*1YogS&hY$HLh|=9_6C31?vF`E7JZJMmk z@!}Yo@k`9A+Cl5%HC=4%rrUTW3$EA);ga1c@Ns5M$b2#ntW6XxSu&+ zAcT@es(Gu_sC-=Gx(oI8#(ZrGgjDMHkIFW`!iWa)$|)Nk#qplKm^3Qs17CBxnZo|TVZxYPU#kqatxxVWh3;(g`P z{o!z1A-&g|o8yQ>v~hWoPwc!IjS%w`{92|gHY|)|nxeX;c^j@h>Ie4CT^|XBu%VEQ zFXgyt;Fgvnm_{!l8ql(|goVa{*mPF~C++Yea$>V!^>ENier3&a|CL1@V@1@GxOwYI z;joAdQQ8mrvbndUsdTo`gA?%L)&~^2Z$DZXV0*5zfH!?r2Do| z0$axciQy5%${mLK`utBsBBQx%hLP%TH;x5vf_jCMGLvDIVBBf;)a1Ev6CebaF6E#Y z+>hqf;LUch!wk>aY?V1_Xb?r_qZPn$xaNgst~zrHQ^fbV1@GQ%`Vug}?#q$@p0CDi z&MP-|=+P56MP9+DD)TApazcK;qOBm&8}Gx$jfDEB*(gZhYE}m7$?w7lyG+~btIBtvsm@nXcl@t)y9Qw8lpFvL9Gg-6Bxx!q zPJw6k^Q1U{ZeHPsxUdvBw1NmKfyeg|!Dkh($Ok)GypgV_NDnlKa4mrvtjk962kSxb zVU!b3v5fD}Z9Meto5{)|aYE&-!X+b0n;+NSp_|Z%unTSy}le~_H>QR z^j$qV7BEHO{Q)u=L8vKtn)Va8J8f_weYIl7W!C|)HafixKLUID?3KaK0NhQ<&tmFO zF+_J(7a^4SF}Mq)yh(sN2Ti8&Xw2)XIp(;8b{FENaxnfRRkM>5q%e2@pw9AFQ6XGx z)DAyL&$;=&oe-S?oIJz;^jtcAlrXq65y@VGT|{DHHbazcgQK%qkM@v4t3Yt_Wkqu>G@p0MZ)poJ{^KDbG$LLY#*q6ail){paF7eRD zn-^5MHNx#CT6g;ipHHz{mbGw7h!QET`zz$0ccou*9~}XF)9Mhkc>_NnHM5w+%M^B& zxy7cu$|z~KfV)H)a|wZYS=JuT^BoL7i5qBgd*`XXY^v5Ks4|7AJG1IuP8h4N`FcDS zf^_*Ixj_VJi@1p@j>w45oq}5|wB`axJ_jYLuX2%?57IY3W)*wdSH^HBCe*4i)3!g; znCmC9sJI?pOtaRNpt#t-l38_P{p%-QTwhqoO~?1Fn(Z!4bNzOAyvxb?DgEXAFI739 z*x!)eDtk3gy(O&obQ&nEb~Ew6t&oDWXTcM+grc%Avd0D1*leVuEYbI;p3rcC)8`^h zrnADr$0clzfM4Cbr>yOnrv}6%YtsZtZjFf{P3I?3uDXz(T$ zW{rT;Ko>P?nR%!1a}6Fog_5ZYz)vhNQ)f`tyA`(h7Mu23hcD~zvQ#+A+H3gHc`++nMqU(=e4lNtY$O9! zKY4%KS!I`uc!~-pMsJ5C_a}u(YLv;fut7p&)pwGnk{a7fKxThaXC@-Y}Li_46*M>e^nu8+P@Sudl*4aq)0>;`Dxy{&L8&xVA#Ze}*-8 z|M1LdZ7c)M;mnc}&6Dw}@VNCy^zyc{0qDpnLRso^v$Y%Tx`vDc^13xq5iuiVJ7krR zi1~BLY@>I#0*!77evBOcD)Ks_0ryt>Y=2oA4Mf|HZs3rD(%&Dr%(a2X_zry zLTVue7r%pEK0!SD$+{2v4;(jbd$KnvJ9q!QKl}Ivwz_hHn>`pDj_hu9`-z3?)A`p?&Q)1 z74JzfUOsfw_Je=TChi|8cH|eM`@aL6yqo!g2MZ`_YQX*c^|Oq{ zF0B{pX+B_akzwmiyI3=X~lMF8eBfA^zOlMwYc4a>wVV`c`fJN zvZ6mEicR)nh`ez`%lv%)VF;$3i%Tu$P5QQoTaeG^T}o}P_DY;1^fWGMsiOtG5s#<2 zG@8zV-2HhD*4nSUA+;%Yce7V!b_R-THk`vF2g!D&)TMhTT6N2;pVK8}WQ;G(!~y>@ z(0xXnEo#cKA7PSjzmR2^a8{gKS)5E8oJV+*f;wn_v4j%Ob9wsylz7R1A+FlgRN3u% zO%yjXb6~o2kjKMKru<>p*Pzj0^Jr58C(=-5Lho~U^(cL#^{`6XjIW+{IR^q@puL3{ zugdg~*JEgQ!QM;r4Agw6GFIg8?4%`kCdMDN?^Ei$wVLmb1sdb(e&XH{7l$D7eJjyS zbsW`&3A#BOSrx=$3jT#Mc-A{Ujy@h6ia%>b{go#5TM6VZY{@+PQm0pF!idL~%D(@m@Pn z6&1p+$JQX=Edk)D8u@?~qPaAF=oM4Z6!!s_g)S4o9I(K4?eh7qF*I_+f4R zwqd@SUk?D4Se%w^$=|}$VL8G4^}>=j8l}*W0US=(m2=Eor?lj74pgvd5`$!bIqJMPyg z9=Dt3l3tE^VKA{0?dGHu?k`F$+zxEcE~a0;?ACfsbhzByJ_9IFdC6nr;_Pak0f*^j z!GnYR8tpA53&?cEDtAgI^bgtOqfCk0*)Z3x)@ocya6Y<;itQp6kQJ<8jkMOZB&>NW zIxS=Aaq(_Xl$1f5(lM3THMHk;3M{Y$E4*giGD&aNsj+!~g{6^{*1TqFdm3mzm~pJ| z+I=UrTM-zV?gok!VTgsHj~?>)e0wf;lyygpW6xI2fYdwM^*ES{}YNjZN@u;M^KwKAf8-zxca=>Qo6tA6h9@AM+**tueKT(wA6 zZ~GdxW8&=~l~bzOY8vV-9Wbb=D8a*l!&rZI9sRn4Xa}XFQ~bl9sqfqPm}4gki>b(Y zn-F%Clsv2K$U)F;jB99NNk#Y8)yAD*S=-MiENxGE-5O3=KUko4({233ea{rAr&Sj! zVp434Tv^l@6ZYnl-LRVV=>85_uaUvi^PA7<34~WkHkmO)oK5eJOU!oxaITJV1{7l6 z6`$`$s;@4~4Fj~rU$VzY&?k76*HST)zN3yI5k+LtYACxVy`( zuD8l;S5Ljc^9mE-GrnT|5v?b@UwEQtmsYL^Lxy3=Ss0)FH5wV}ajZG($hoc^kVHrnTH%IaKj|5V9vGG8^wLExC`>SCZ} zk+Jb8r~v(}@?trKIYUFEvnlXh+5&AxjeRX*Gg=sajTcYMrc)*Jm<2e_W7V zgy%B!6Y1@BHYmpHWjvy_MvuFJ_d&&AVDrMf(>ubL zdx`_nZ0wROKxh&Et@L`&phnZrp3z^w^xR~wkPx$j1Mh4Y)dz4$_zsPi@S zrIh~9R)m)DX%VlxhYqDX>)0UA+9hmMW+H!4zDj8BRPe@uAlc1WEF?|0!YhAd&}qrE z;XR*Ox8yLFlWP_>m_h_z8`5(FSynQBIH*!thOk*a(Vd71A|2aAM?P0u@P&@X7^fdZ+1zT)2Eo`^eCZa1(-M+ks z{H8AOQ=}!8{o6vmZI(wqgSn98wag|d$@$;>k?vA;nv@0;udw{GKQiGSjffN~#oBVD z_w;hwEu?*FRapo;IS3*Y=vIRajP=ZQYTLCXJoNY?G z@huxxu~)b$CuG#4A~F3W4uv|BvcHSvkcuIV>3NNoifnMwUAI#zbi;azpTiL zgB+qWwbOb}Nz(fO?iP2xTfDVfq^cp1H+v_{0whq3|C-|jabd8ZZDa`m<7sQ`?-K>} zZhPne>~eK&#QW2Qs&Cm0Mg>`Vw*bdv(?Tw}rn*&75Wj?=`$}Z{Hz)8v4@gbs@7{xG zZ=y3#h2%_YFf}~e{zByrzU86Zh94UmI6;&>>25b1?M`w~$^m*HW9`g1PqOAs8t?vv zqNAEfZ)15owQs=|RCF@0A|a@7P5AnB8;*GrL#_3&=C-4V4TvWfohBaV)50qHe##S9 zK+<0cSQ`+sRIgVH`r3hIj+y8Nz~;&T5})5Rg}i^m;bIP&j8u43ZgIUYT%vz`=Becs z?s!)Qk_f6{Z$S(Fhx#LTlf23CX)usuYeaIYLs=^D#{Nb=ER~o=kF}A%QJIoEG~A-D zW}mU;yOPn#%2;~rz~R^}Zgy5dd5nxRCR3@m%_!dFn0@95KzFU<+-V9LNeNCLaVA@D zL3uh{(E5;v_keVRrFfZ(%zo2z{8hDxPMBR;0Q#Ec(_}CUjhbF#P0~iBKe7wqCw>qv zOolhTfOtv&ZaF32pkQ%{pQ8!hX_&!S_B`#Bl=K+!qsPeJy+$%Q9aAUQNRX6V+JZ!G|Z8dY}Z5#KM^hu&VlN-TpwCFm|UodGEsqd~aFScJ*rpB@H8H zT;!)BG&$rTLBL-0i_J)mR5->b_CAJjMcV3OvLee@Vl>7p})Y;AwGw! zwlF=SmyS=)BnIU6NJcu-6CYoLV|=-%K7|I=gP@Am8S%1w^F@TGlVERTFO1q-Z>@Js zf%n=y#nyIQj~;Q~wmLS(bT3iCAd#(McV>)-atG}=)=Jx&cWGprl$yip9Fl-f?i0FG zGGv^h?zjD$3-=e^+-^Gpua`MUfEnW?K^)>i!3_v706zYHrdh(Pt~}Ob>>ru1K4jo4 zw#|)5I(e^AuBcynK6X5iLch7*zxT%coS7h0fr-F9-RSbFw(9*NqCxx&>n?8s2daaL2)qOvFp zO`ddrbKbw9>2K$h&bV7*oM zjvS%ZeN^=+6R1_SK;?j%0fb)1?rO&%97G-swwQgzB|0+810m>uHOs z(>4y59JC(sU-mFDMY_RrZ(6a!4}NfbQI#c8(-wSm2dI0lPKb zP&gw_@L>AdfzzPl#Mj|z1#dLt)Yl$^rBf9o@(P@C*IZ>)Xhv0GJ)PBBXwHAt^Uypg zadOwB-Afs8Z!9TBAi5etA*=Hat3~fbLiOegYi7qQK?DOCneR6bav)UGs@yl0^TBB_7Ym4<~ot|x}1s$|mgO%-;O_B?MaQyRIfmhHpWlx7y z?UFqMz$rK0Sh`F%k!1c_IY27m<)a=>D*3_x0@UeMfwC_Wy9!XDzl^i~avJBWjc2SV zE&QVu>t;d;oBc;-If2I2A66P!+}EY0 zbIYre+kVd+GnDmM2j~CtYGpmQ9*Tu%tUAVde3Ux%oxVx_X1t<3%!x(3G8>u0T2gTZ z2ig6(=G~qXj?*!d+R}HOl~4W0o1WH4PCyDgxWP|nzTVtHtUCkt6xsV+c_Q{9ticU$ zhC2f^<7My))TZTQw01NcFDM`W&c&dr2Y5BZlL<^iL zB18e=mi!pAwD=EW{n?z|pHYA8#=`#e)Sm3=c@XAt)XO)L_x5@5>>!Dgk=$)#IGWcv z*{gdMWkj`ksR6Y=I2e31*P@7e=5Ec&Z2$JBo}90q$a)hHfdP4Zjv zWtwx~93oF%Bb^9wq<#K>6bqZV`4@}8=r|)iM0YrrM+xG z6*o$EWd4kHmy@)oi!AS;gN8S|yw_uckA39OOZWn|@^LV^Q^6`xO^*c7s&OITk`rP_?J5AQt&_bp% zw(MdW5>S1riOwSq6qOKGSZ{mWiw8R3A}$@eH@No2cqXW<7S1o6fH$cLZB?v-jvbBn z3C9^HA$o>9UIwu}Pe&X3%F7<9TjtCl`J4ABk~cdsR6||es-nI;1|d`Pk7(w}Mu&;^ z2obyV;rQj0hnRgt#4aexOIbw~MeCVezBnSD(1g8%eA+$28Qhf}Ey0=dA2Hx`emKM`@%S;l6 zxR^nnKZ~>7-XP(28mgU^n)qjVaABGi;b9F$iRrH6ZbUxA2GxFM9CKHClubF%(^42u z2?f?TBXx9)(pXOcvzXvD_@y>Qr>w-V+xJaI+GN~c4(2RT49I*AyWSXAaC|RgxRsyu ze(4%cZrg0~lw}T78fjHVf1Zn=eM9LWt^G2q&8aH z-=)p5&$*8AyVm^bU26SH-5N(6bavufCGCe~%wjm#07VI60_)m~{QsJVqUU4+COau=Vv#5(9@ zu{`)r?kWZ=Uio4$GRL`0lP2K)jl|JA1DpI~8Sl?e#g50k_?4`jvLI>24x;K{z>E3b z!dflwlhUQWn1bhC_Np{A_(2k69qA4T;y(c@gzshQtk)B{TzN@STw3?J)nn<{!Uej~ z7=C){C%paXydlDwKuHD?ny9rx;sfYq@b^aG%1Y_%E%$RwEkxb?8!7UvJqC@g+p8cT3nrN6Rh9-tv*&Dk7wG`+9F zXp{2RCyc7YALi)8^jpy9si%H_k}93kK*U=s2P-U00eKRB;}UbG0joG={;QABJQRC( z;k$+s=#~i{lhx4|+|8)OY94YhVQDj}FT1E4ThgHKFkiJ~1*eF>XYy+P)u+#&qf>tQ ztYGK0R)>Ftt(MpN%AHnPCsPZj2Wj<>oi?u|(~vJtYw|NB%jfMj&BV3@Z1 zd&->^dAUiwlYzeTak>n;!;Oj|?ko{k3-j?Hb-y#ONyYwRFDy{zj7-TaDLLqd^&KDY z*d6zKZP#b}r#w%^8(=|y=9qUdRiw6)nW|iif{gFwIz(@Zn@(>5r5Au=-oRpc($jRP zs|;?IvC=`cbvD!4@;khuqtL$MLk|S^ylUEG7ifoEADRiRiQ|KEOcmaP8Y_Iy(oa5r z`P$&<@XZL|`9}xCD0w@jKfY}l0zG=jq}yq?qikmhUo;Jz*)1REQup5Ui5xM^@r?qnBanWjDQ#KnE|r*m{?NBKSm);;w09qR4}t zQWmEx1Potpe1;1Vk%CIP9(~UZ+PLUU%TZtRNL3+uIQpk_Y*$})Y)FWak&>dO(!)~B z3mvozj;oGjg)0)8BSpaO$HWT*Wyv;*oyYvSC&kY@uV*Es%Me+4H%&MQKM*hR^CJCI z#oW%e35nW2{o=XhP}wT}*y11AP|dr2qt7stJ`Df#fs^t# zcJN3Le--7;?VCeOjSE({kZw^22O{U-55iR&NL+L_@|ietBxHOh)yh>KaxXoF*>gG- ztf7Jbkg;iesY+By{XGqZf2W9GVSPgNn_3TIi}z)JI^_<<1y;y1p9N;$MnE^ zMvzRSxB87Ig&UFY1xe_266(883Uv=tom*Eax9URb>E;Bz`I8nKT7yIO-gjt_mvnN; zgx0}=mi zUyes%44{mh_%Z_fppgfRi5*YmUB~ZXmzj*Lj5PP{#dmi4@w4@|CpxInSJ&2QY=L~% zlr??%4rgw>0}|B)g45SeyWWQzEhbGsF@X86v)Nz6s=m-AN7G8`bOU|R&*+}Zx#TBN zj^#3;e5os)Ha!!yA^D81h?h_{B8-dL@iEJsmgtRtK)a$r3nvwlkSE^K_kMV?$f-re zZfH_H-X2;?`w!n=tXW}chJ28leXDFNe1o=N-Qz9tJ64DVp#kVD#%dp0J4<3QZjyp? zqB>demeq@U&O2%3(Z9{s*iaYgTS}YnCyi)`I@%0Ya1o!q&tXd2r&`7Ff@|`v`J6-J zZ>iirbn38mJ47qK{nCs59)jv#mKBsL`Wx?q#^0uioTtC-21G_Xm$^+AX77FsZxoYK z&u{bhpI|7~eLODzO|i0qMcQ7~w?|oBkdr*|Qlhk9+R?(l1)-?c^Y*Wb8qQYrvH|K( z3$B&Ea-0{26i##r_r=x8J3tA=S(RoEMdo*-2+}vq+Mk!Kiz&h8`&Ah3r6^}e!elW@ zQgZQ1#RqQT7*w@-k?yHgNrE}!>wV))i_PJX!^F_vEE1*xIp(b-@G~3*&lzCSUri9H zwa5#T`_{#}%66CBP)VhTy@a3lQrRkv=5!EExjB|Yf&S%&)&Su)!sT&VJQK|~ct25k zmNtuQ3Mq9Pw(LKx?{oTi7;6rUAu6?(DgkmDE9strVn(ZwywCtWV01K^SB57~R!>iZ z{w|myF8Kl_RiF`KaC5m8ZuA&ytj~XA-9n`)YWNL>!sZP@H@*^<_L{t)dTV-h|C_k= zTD+yzMZ6Y!1(MGXf9{bDSjATW{qtWEI6pIasK$aOyy=f$;bN->8+>XGFDi{XrZL#X zC?#oC7mj-)S`;k_tQYO3O%JkWZqs@@Huzh;D}P`kV7I+6U+L2gXOH64m94JvX@%QN z|HlozxXy7lNu3d!$?5)q`fFbBRdjN&kbXG_(qX(=_6)_%NdQ!rYljIZ4CL4t7=_xM zb>$^8_Cz~qa~}&E5iIUZg`B|?*I3$a$)zZ0tc3-Qcf_{7t*sUdk#J>Wjp)iY9aa!f6Buumia>SjDxspOQUQHM5Rd6kiJaDGo2aOsT zKUn zCE1_t$*obH&R&~|_gucr;paiTt$C@-D%LM$Vhupm|1Z7&_mGO;x>SX7)3Pvc`W|%! z+L*ySU*G4@x~5_{oDe}3ioeLNS%&?!i;02^Ss_BA z+P0>cqA(P>lZ>{N4ix#LfUKm8ERt1+;D{z=cko(j#^Jq^5hd+^H22@%UesZ}4Jv8F zR1%&E#tHh|pdz>t%V!2Eq@oS|T}@NGNYj_5m)#F>6HH|~!+ua*(^d2o1{(!oWNfct zn=j+e;!VUhb$nAaoy*pX%?EX7C*fn2!VAvzHFxEiISmdVHsj8iP$K<*E_iKZIKpFv0e?#Z(7Kb=s(s5oN%&w)VClOUi`{`X)3HWU3XIr=}QEsgmT@`wMP ztN{7{;-QsnJaxr{JCsXJ>pP%nh#%A4!?rHD$+Iy-aO?Az)AU_HvI*?bsxb=6;RUuM z7NDFF*W4sL8%&qMrvKitJ;y0yDUc=&1ww%n9L^SLduqV-{LXGV*hBas>Mc zFCh>8$_xZ&M1O*7AaR>qc1?OJ2&OEnRHu{C2dcbmOh<)t?tKKk{e;iL#4V=o#9{e+ zL<$=HTdM6O|BT3gJ&JxBM!cTOvKx*!WQ&#v_2cBG)pkB7SCNX=7SLNnSUT~&A>F9m z`ZU7F;Kd8`-vifkaa5i6x2NGA=dNlk7oLSZ;s;Byy>YADkK*CJj=SlqwXHoMAoK9o z6;yS&>40A58m3{+w|`j@rzXwpHnFg$B?Ye5%&c`KI0Nx#GJ!B$ z&6PL-?ew1`B>V9#lPBJ%K`EztXmiJ%5m91#h9wU%&@nWA+K+{VYP;8T0M@n(NXh^B z;qrsqUcDDkB>)4{(E61UID@X1od0i`81Nk+#v9~rxY=SqqD^kgp*wWnhaTOXHK^R} zl4w}CZsqe}TV7TIrL&v^H@UejorY_J+fedST5}lP=YpBijskwPHisVj(}yYh#^W(4 z{oTRy1>O&>M*iKZ?rN<*(AT??%q26;BN0(v zV$k>e*BQotWtRt23ME5LPlW6&i3vi1VkWoYCcBdut8HxzO-0gMkffzK9R$^mj_jF_ zho*B+xvx=p_^1hdk|iY@p9mY?=ewo8dNi;=K_DTrgww5gR?~-5sqmCHN^=1&x~Aq> zIm0O3;SH&Sj3qh&@QrnG=B)1F+4mM?J8nSq1>cR=kpz%)hqH&$!z!0L^f$|t5tVoV z(~Ds7D3Y0IeaHNCqrcizrZdgU)&N~ibV|6J(#8n+7l;l(U)c&kj=P5Vg+B?B4$0IL z=(vp?GkuT5(lPVxeA;3OHz3B%>;$Cw;cle>u^??-9Jj1U3BU&t-Zi@(6Y$o}x59Kl z+%YUIW85Eei}J#PfKJ3}_~Efc$)AM8qacO1~;{ zA-DTxs$%TdRBXg8G>?-*aq88d=}u}Q)UIpiw)T&LHCn(4Vz6xta?Pw8rd_#TSn3j) zMCV>_vCLyPkHYY`f1Kht1CBn}a#oLIpE^?kCbD8q=@Pg|?h3)En31U3_0wxtFLN14 zW7&p!=&(rUSCf{r5-)30C|{k(S>l8G4^H}I#l#U@b@TRq;Ck0i+|;*#RAQ>P6aR&2 zq~DomE$ZJU9KgXwna4~wV`Q+^_L|Oj1n@Bj6~QWpM{&EgKf*IocL7rZ1Z!N5Uu+b7 zs=TRcEnCbC)mx&xuXH4vZkvy|9Hsvh-cJTE{_btaAm9DXCGNB!7Fl!AC*>CIZAUWq za`I8KeW_puvEE4-C2w@WY{9v|>4508U}3XN`wKVkLgOi5nBdK_Rb^;&>aOVx=m1v9 zL+ho%KVc8B%c3jzC0?|p;kg|~ptZ}mg34lUTu@8&G)~R1Ltc|@a`Ob!<7xYMAL`LJ zq%~*4ix@7G0;41|52B;H+<&ASe?KCn5=ZdzRmS!OI3HJTq7A{A^fY%8?9&)}>`kbxjiJ-C4F@Ho!zbXxkG4Pro_Ydc6CiBPA+HO^E zCK8Rb+UXJ0<_DTM{?<$kPQQ$Q&jif44>@nKnX<0^Bde6JGSmi}mT(5DIEA?_D-~w< zf;v+xJB6vG6u()GH`m-IsK@P|2_}Qi#N0vWd*}?_7P=*wX+-Wp<^ByW(EH0JmE#D3 zOxB;6ze7#O%+f;-M6q`#bu5b|1uR-VGihS?J+a_X@77NLIh^s&Q)uwy+{2!hEGQe? z>t@ECpV6}5`Ci%{8QIhHP^^I`R}TIWc@p;rNV4cwPI>d6d#iQhg6*C#ax%{%$UI_|!69=*@W)a~o8V`PxBm&Y%`%CKK|A^8Gn!jE%O;A{ zPKNub$$XyDK<}y#n+oT2nan?dyhT-hK>%_G!UEu~ zM9Q*?Y!GlKri|32E=gD*wg;JE;TwUF=oSOHBc#hrh9g(o?;_I{>}fGW-VFR;EWudu zh=#bt-LDYP)WFwaUVs#=p6~`a_1*>~0#QU?GBL-|fZA!^sA9R!AnxhWOC* z^si+$UXKa)g+?2Tw#Zl!0YNmQp2oOv?1bp@e!AFx%cHq%Zr`=zf5Pc8rjpXk>1hCm zGvqLw2nG8_FZHTmrw18hN!{&22m-(XpcQc|A{F9TC?Wfwsoqo-gM4h3D$P#*k9DG_ z7Y%ENdSNx|~izGdC1AxH8__r~m8uMECYjQ5}m9 z*<@T3|E&ceqI)ASOLxGVr?-Q?E=%)kPt1oEljj+l~w$C%MQa-t65!P-(Y>B1F7;cePQ zI}^1%vqz}g-Y_2z1-m`>B!<${x;}IcrroW; zW4>u_5rpZbd)|44h>E3fe&|5QH@@=p5LBq$?JyU`z?aB3zn17N3#SBLv<0>3(G?EP!PavVUf zX}eoXL54Ig3?<_ki!6BDC#dBem)}PTl0>F9PL7u$oWqH;^()|?3EY8riH|=QN-Xeh z%83kv5O zefVEa_~|Oz!iAA+Y;IDdlN%xzw!X_0H(c#C@f&U!leCXqxteZZCM-`l-S}4RFzw`r zsh(f4PDeJUgxR+=al;g>jpXoqvqMt)oK!~C5wO1Oo@$U7Snbb!8wx3DghrmR?4((4 z!WMUIUZr!ZL3b?@4*yimt&aE@F~}u(y;g}}Ya6dj+_};*92}}c_B{VU+>&>+I`j@_ zzSHJ3S=1NpV;s_%TN<>2Ka39N+lQT0!TvAm;;?~gtk8%_1 znikyETe(eX8=ur#p9UoJMu5TKiao}rKd4>~QN#g8%sYuMo9rqgt#RNV`DtFn3gt~J z)eEc;*aRFN^cCY?b?ny0wDfPvCO0i5$DUCQf-}KxE^S?s@4ItZ7&_ATzMKVlrFXH} zEcR#&uj!AbHT@-S{;@`+->sGE#rj6M9Xv$U^(U)bOlFe#F=lg~V@tBVM&9bG>c;i( zz13^H9ch?%l}9ExHoU#U_0ZeD$krcfsbkp`D!K3f&`OBt5%hdNv;Xl}QAOJld5W0p z|B&~UVQno@yA=vW3dP;6#oeVXUaZAkixt-pT#7psw^E9`ySrQQ;7)+xPI5zg&iTIk z@BY8T^CTgA@7c5bU2A4f%nj;XB+Qq!pm8^Tzkjc87~7Ul zz22G3(VfiPK=1la?um23ebr*L-KtJ34o#|Q1YdgTmysZllDDJu0b853?oHQ50I#!?oGqeQ&vLHeve zc*c9#4^^6OxJ_5=b0#j!F^%i(Zlg5IYv(w|#ulUk*{pWIWUSyWlv^d#!p13unim$f zN4;BeM=*Fczvi9?G{051jQ?C?TVbSZDSV<-e=cLH!_m9jr+UTwqkZx6u5hU#Y3Hau zwu1WH(v7xGw8_tCh&IB!OS}E2ROhVYL44gwYRwvOB^1ULVJIe#-2zGJ%So?J$@D5U z6!Vt*jiXfL4ch0X`_$39vfQV-A^G(#oTw|jfieGOw;qt~5=eC^+}!EJEXCmhANHxq z`Iy`RB>9fX(U`PuAMSw7uiOJf^o-Fwj*B)_Q0(xbc+Y~XH-X1uY2`?mFJ;lm(J7Vh^ z@RgO|a$i-Q{dCgtSg$Hz*jVB>&0dZY}U3@j#+td3Z?Q&{$P3fi!6)|G~>Yl(tN~|Qp3wl_F-2lBaXcV6qJOKjiZ9N zq?X2b#cu-WvT{#9tQAc)e1c_I$k1}~M&Sh2nz#3Mm2fYwjh3(8 z-LB(^C^?)-+h;vAH`|jZ*4DDU*A``rN@`7Pu}t#3`ErE8?6vlBjJiynp>^Ly?m+K^ z-BQfgZm>q3v$jaxKfF=#lXkt|jx986ON2D>Qm=33i{8lL3i^YW&@oY+zAG(88)jZ`V>~f#U_AK@qctL1`BGf8diiEh*1HaoXt4JKPp0i+Ph`6G zT>Zh@+%M03Q4&B`oNC>-(U+H~OuHv9QAI$Vk?Bp>_gw+QDDoflD-O(>>$m2XWqV*% zfSnWWWgx6izc+UBCxZ&js}L3itvXs8`#K`0U(+<9Rt(xECPSGsX zo!@E`8nk6)(0=->i_B@3ntR>I3C5L#sR_6(S<=C!WTuMHD*pQa3 zJK*eNpTBx<*-Am)`JveG#}hIPPNf`r&(FJjs5}a_TZU%`f?dlAWr=Jl;#e3>{64}{ zgeN->IKQo>#6c{RE{6tZGrGSXz|T8{eecDW?+v`y9%2(><}46Xue3A#zc;K&E*E+v zz@lhB?QlA9TkH`s?b;qpc~PZC7&D6ydOgL*pyxE%Wu(zVF#m9U>=mDZ&-jo%(F_M(0Arl+wEY=7sFU2jx5mg$K1^|Fz);jnJhoJm&ZJnWceN z5Oxc)3NqtR*AlrMTglm?H14yA_Rmi?W^yVa`r1shqZu^0qM9FiOEXy){31k3bTT#F z6$SI!e35&Gj~3$i_XvMkrMPrX*jJ%=xp2h3#6XX6(SXW=O~IHsHs<1F*HVop@}rQdh#P z6oLrK&itD+L?+)b!cf($ow4JKcJB^q9)<~QlDXPp$$T3%70(^_S+aF+Xj-{5AEw#l z?z=;+JF3|EaX z;93-W==-us$m{rCFEvloZ)&&Q1 zNk;Dj%J5FK1}H7EPqZ4oE@i#Q00;_+Ml7(qs<`5gbZf~hD1PbK%t+X2E=9_X!4782 zI8FnB1`D#QcPjXtQ(p`5BcGYUANb-PJ*n|fGRmOKhdCu?#cg@E2AwQ7Q6@O9t;zH+ z!sLTD1i;{C8*G7=YpKP?^t&bTGlpvs?Q(nep|htDY14av;+p*^TCm$| z!#!4UrH{P z6u>U*`fPcza;eo1F>A<|{0^rKWU@ieMlSKP@|yLp~DQ%uF;KWx9K>)4b~D>}(d=b)wP}9s*TF6hs2=FNX~*JUx?E zpw($p*>_jiXqVo$U62@zefw?2455c35tDz?WY@SM17fgaSQsIP05ZE5t#^+Bv*__b za*IPrtLCi`&KyWgK}^UiuV*BNkf4qnNeDbZHf zN_TgGxDdHCm|fXOMub)h*KRU~QHi!Oi53Mvw(t7jnUXzxn*mBSWzNFcBJ-!pne}<& zl^KUAI1Q!poEUC3mg_FM5;sT)?TuU%$vB+fu2}5xgY@2zaoImqzIcb7qLKZ2I^KAG zO*UVO`bs#Fh?o5tU^aB*$L6r1pKlnW)a&xf;KN#v_eNfP{t=($XO<;v1!OxKF(QOd zEF82kwQGKGJyZ@Vl?kqqH=*H{e9Un@VhCyiPy0tFkq);x@?ooyt*v*6FB50>yE_Xb zlfft<5eF)_tNKg1u5NCP6gz8rWS?hOQk5MFWl8A#>};k_W3<;R2c_Z7a16Vb3c&HXsV*1v6s~-8 zG1Rks@NJs=W;h6&2SLi|ODj6&4a!P3rey1qWD z$EIpMcC=Kis55rkO{4v;n8W)uO9Tp1*I?nHE*Xh1SfudxWof7<_Q@VzIEKaMlaLBj z0&F2f7>7p(Lp>tS0`wVv_!FfKI(b=btTr{ZqhzYViVHUj(D%uJP!wrBbt_SHBtgyi zw6WE!(X41c%cBh5yBqsH*i8;iB*R9yR~-sqY-tR?h$mNTHZHm@Ztm(9b6DT2!!H0i zNl`1XEh4`zK(&ieq~8wK$Ey8|IzlX|l_RhNIx~CdLYE|&$WE{gy|n&FBpdVuRbKyvTv@C-B{< ztYM~RS=0D+HdGDclMLher0>P(W6Nnn?0X$!GLm$VeZxpDlOu|LT)hrze`#0w&CuuW z3bz%Hq%^a$3>QO)9EH-s1?w&OH*LMV8~tdV4{q5%`tifpns1oAFL1FKt?@<*KMjHh zKPQi6hkZv_N5?N3JYBPA~_-Lz}T&!Ogx#aFrCBhzZ z0iLa3V~K30%1y+31OyH7^|A^2Af`CgJ-i{pnQzC*_6hSSzW2^$uYa2%II7VYi=-)f zd5Q2N)x|X%!4td`o8)O$wpez_ath%3r(7!`BeWC7)%7)2f~(GFL--q1YdoE|^i3TU> zXcRX2t%OtEJ+J%I?CiGkniV&+0f=~&P}6wimZPiS+j-Tgl|eG5r7^#G{b=PUpWZ=d z_L^JA<=_1H{Bli_r7(;!o3M&%#6@^3b%))^NHwtYvg+%J!VZa{Iw^Fe@CEH~kA}bu z^cPG^vGRgQuqpGVOpaBE(vLdPvSaVo-nC@YN{(WlTPo?0&*Qkc=-cIuVGGDKStB+KS&29fL8c)e zu1Jf!_ux?8X5Tge52uJAh*~ut4c);F38i)H_L~F&|DZ##`x>qqzn=aJ{C*H$I&!9} zr@1XWkU#nzc*Ll-SxTu7I4Ra|i}ZT9O>C}uG_QX7;(ASL(&t9g@k6x>K{AbBCttJZ zlny_0=h!(TARw~d!*p<mX1+3Ud}>K$-AhMGt&WL+@ynvJ~i z>e4ryq+d^RvulV+>h=WPPlVTx&AAI1gQao#$FUceUPE};uL>h;q8Q`nY%MrLyYl|Q zL*ItS(xRrTVB(S{tX}j_W@6M?ACf);$F40X zm!@eJwWAhzlI=AtyV3k-5G4xeW`|7I!p_xqxPrR7_fmoJdgoiBQVTD(v{RUX*)0 zMbP1n?R@7w{)gS)!(E1pE(ot+J|o)e1Uf449WSM^YCYbG_t8oI=twHD+d|x~Pff3^ z-gmw;8G!bg6}H^Cz~a~1j52P#Nx3%e16-7@m+Dp-#4_WaqlZ^} z1f9DLka;sYCiY(s@XIzwprd_cCmmYb=gd^3o<(-s zW!eK^&xMIOzS)R}CQhRxyo9Ly!F!9tmq#nenWkhoX(N>MSI=HV2=D#&rb6W@rl^d? zq6LcJ4xQF@vJ(b*W0HUPsUCZ711}=aF7bc6$#E^I-aY4ge+8UOLfzcij`p7L@4a$1 z>mHt_X<4lL9rwgFuqV2bG33R>n$xLEJ>toa3V@(rMoRzv7@+)Y!|n?E1s}2jNU5(@ z%-B0qX&daAM=~3)`gY7X#&5-F8We)&RZl8{4go}H-)NM^%fr(}kXL%b9b6;gk3|;i z0cZoMA_m=Eha^YHWL%d=;{B-P$IIg%(S9~f6l+#ddB4E^69J}Qk#OaATMG#b?|~X* zxvLTPb=tfh+yYQs4;McS6?01b3C4|vz9Tm|qho3HyCBlyRTOcj^Yt1sYsSg6ud!mC z?IO*IPtT*MtzE`aY(9-NBn*EG(xZ%F*o}8)9MpN|Iw!YCM z0MDX(py$J>*u9Ef`r`h~R-&bZS?1-}gZKDlUx_`ab7ES;k;xxEUZ43NijjGb_B9)E zl#Owl%2~mlQ9lb~{DmizEVbF@k;L|sJXbB@MB@d1WHbkW30qAo_9@R1xkc*k`^TIL z-U<~1{0*hNokYV{Dym*6fYrPJDR}}*Bl$KZnvGWo;8wxCYR?VC3)h{ByS<~u@88o< z)2?qAd;u$Z6WcO5*YVU~Q_f!NN^2%;v}W><+W^ASEd#wqpAK1F>@?#0AWXpqGg_J| z{6{9j*;rXvi*T!nkCIfu8PpS`z+Rb(Q55K6_+?etmHW~F{$@pVlKJ~8zi+?GjPKzr ze*bI2_dqkY7}js~G9Q*hj(6+V=?U^9X0)ev0Tp@-y#&8Wj_Xw z;9%*9eE4lxMS#3!sHF>JNOxR@#}6YG6Xha7V|sKCeM348rnVm!bL$GslKx&19IR4^ zf)U452B+@{8t?}{3~;-X@-^_^qra)9dE}J-vP@k3SFUQkna0c}OF%+R%aDJzC>~q0 zCF){jQcD9ok@)Yd4-6z|mhOpn{3kkzqQ8G)?wPST-Z~5noPdA(^2igbR9pPj0Qjpy zA+y4X{b`UCTYB2agJy@K@VED!Mo!Qza-u|Su^~lRqrtifZ>fABE-Xa0Z)S| ztmLybV;lP)0vu$O;i>@AYP=$my-drou0r(5*_jg5Pj^|^PA^k%gpV2C^A9zIu43Qv zZSMW<>#Vovo93 z@xxjd&PF&3TdiMOz`v?)s1l)m24QL)|IYi;nK2{hTqNJ8&xc-c%P2Xtj zoD7$Bc>g+2{Qij5qc5&|=Gc+)k*yAlMU@5`aH*g{9P&zLyCkqw8PNc+z|}Z4n4) zn6lXY0(A2iW@DKk`z-){i`fR2yKVLyMk_UV9_9V98^N_q&iB`(`%cl+qb?k3idG4O z%@#SNq{j3N;WB!ZTd<-PhS+!fg%Pto${BPoaNP=ib0si4=|L!r>? zTF*Ndx61>~T^MgAt^4TYR2io1RYuiCf5Gv1zk7zsP2y|zHDw3?BD!Ez%~}(^pK$wm zl@Z!UAXy6juQMj%nU8q0ZZK-bOn&@FZRht_K4tc5R0hHB$EWI{q=VjXUfg z$mib8*t?29rG9jfFCL^V7C;Z2VH37Uen$fnO{}lPN;HjAt5x{Gs`8 zhV&0ujQ3JxYE3?Wu1Ja~86lU%Y%*nz`}s5c6a>MDwq`WOvUD7B?{=vAy41+M_HfJY zyubLE{xuL zMx*~&AW{jOtlo%pE=vNQz*HcuUqN((zi3EAV+|-sLr~-KX&hK}|oJ(XrO(pw93#c$Ut(A*O4v|<+k!h~!9rj5qVve?&>OEZHj2!nK~&q6+kcS0=e)Q)J1lQ5sCM2g9pB7tRQVg02j@BEebQ3SKNhAf9gonp47HYp+bzR) zMl(1G-|B32I+yj(k_gA140lUPV9E*^H<`qFGGsni&X%sU-U+c~S+%C)`HDI7{my9^ zk($&iXhjoyBb7bR#h`u2W^%I*&A1qo;%p!9+fE;FbN*xW!WXy%;l9Ks_!l=mVf(E! z`1tfxiDI}Hqs`r6;Fi})Zhu%Zh}*Nu?qwiu%PR$JOuQ=H7p-%G=u3-AAg9VDWO4j?CN!Rf{U_#8 zd#)HCy7Uai2AhOB5i4(7#djrT?ZS(hZUb>L0VVmDK6OLlIEU=<#&c#X_{BR}`Lm(C zSy!fPwXqJxf6JQEg~et`v?1rT)N8f01x~E6MPyrVskgGoMpoaG^L0(BeOsH7X-B#x zOL_obg^XJ&hQ%9(WqSD>)0$Qtl(AU@fyo`gl@?QqLI!3`*0%e6t9Q7 zDtILin_l6~XYWC&;ht^*ilX3os6!PAIW9Z(s(oT#Jf9;)dvLqy^~lXKC4ox-+^?Zn zt1pWm=ftPRtDZ_uccz-D9c&O9?`{F&*^qs>ZRaLic6LdZKfaCR+*S8uNZfBU}d7DNHyHdNMr!{>% zVOz^&bMR;5H86MHxRSf)Y6qG`GM_Q!J%%kR-qMd_gtg3PSa4{R0*6J92+Vp2JJ2=p zUKU!ak*A{vY?uKVwqDl?wQJ4l7{U3m7La(d2WkdCu>m4siepvSIL63W^TIuTIFax| z0_!7>()jFuPJ%6#8f%y-T-LjhB_qh>*KaRWm1gCsw_ZKdA2GU5Gdt-eZgb~MYAUY> zDd)`RGA+huIJP4vS12N`Yc!8(Z0#uZ$Jb2&(L;GFDrzCh4yAM~x zFJy(^<^+RB!_l|GKWay8*LZo=%C*T5VY~0luHD?ErwekC!K;}T+FeHOd z@^Qu6c6Gx$b=Na$t6}KYl8gB06>~+N>gPlLTL>Ci{p>Cx@IKS(x^%o$vPw=Uy1Jtl z#ybFeQ<#gT#|Yp4fH&p@{+lU(fuKwL0_QOMYs3UrZj-^Psl&G(^ir6(wn8?9z17JH ztp?9cMHF4W($&+NT7s&knMnnmzwKi2dqTG@oXe$qyJtJv})CnuB`K3jLq<> zQ}@7A2KcR*q;CLw9OO9teOEu->v_J|ZULK0*=A^-Jl)eJDYevQEhPK4e1}55bQhZ= zH7y*4IjAqqY8d>b;*juyUgS3Y^XzlQ$*ZUCf^Js<&^GWes2pRC^FdP&1w1^*l0c!c zQcfRMJosR_Pg@xnxcICDBV3ltVdSr~xc{4LYPz`EUNY%r@)e9b%)U#KmVdK<`~7r} z)cZR;5$Hjzk*yjj>5i@2N9k$;4Zt&j$_ouQIQq1LCnRwHqFK4age=xvL}#vC3`ZlC zzT>HEeagU~gam=rH-C)JD8D+x5Dw$szNT|PgJ?^{Q&fk=^P&Vg97s;7_NPkHd*VChKlr^@^70$I{QZ*++e={#oc;Ocw8F&Va z3|GH8BTj+8DII8^!SnI!zb%GXHQC?rVt;z${^m?|0xt%I-y14)v0qU2Ppu&sr(`(B zU&>8ra0QQf6P_ZfPLRboz%xCvsB_oq+|)ln|Ivk%3(S`3x9Lqqy#BZE0`x(`fAd4C znm|9pKjgw+lq}!;mpH-?0co8`f^vKS^Z2{N6&1rO9&M za@0*;;g7deN~%YvR+>}t|J;_d&GI(`$O1pbjA{9@^nEO_qtPy2s7@VP9PnBu>& zCPQz=ydB`~>k%&*M+t9Nmv(lP<07%Z-BGoeab924HP2@1!H@#@&piI4g%=KX_#?^4 z%fDDX-|pzkT^ypYd(OpLYe%hG z>qVaXhX;$_`IvUuJ7|LOok8Ai39T)Ddne}Pw~@(xmmvM3oPF}P5HrKUsA;9de<@Va zQBuNEFV(Tva@iWhjg5`9iedzD9$>C4hI)7?PPx_gwaSANOIDR-!XF;2BB47~%@1`Q zLhYYun7R6qo?mXY=p1f za)B(%&z9;klA$@GIo$A+=)c)j-KIz~gyfM|&9a7?czY%AI6ply+FkVS+W7qhTFDpC zG396R$=3zH?5Xg|a;Iof@&8nTqS>wt>uM9e6Pm#9++#&`{knf$_IDJC4gw#5oq`_&_DPUG^ATJ zg1P~0#4~F3JUNrY^co>S=o}#Mn3#6>0A7ZM1;Ha9 z{|2Cs-7>=;rXbJi$cC3sD_t zz}XAY#ToaN`QYu5)UK^F+63b3%<@}uZ9blB&7iDUpcSm|3N-N`afGxBLn6~^^gZnJ z;BW2;dw-(q)Ai06(erzECbX+z*tV{5X3{6)Iih`EX6()7Jd*S%imB^LlaEm1At({) zJsX5#@dcDiYaRPO{l%U z-Cg5Oj-o^xG!Tg`8ebTmk*&WDEq2wrfw;Dg+{ch}wwbJXx3$B!nR30EXGw_Ax?Dej zU1!*e>I|8-z5lj?RJ|_;fP@u8SCw1a(C%-k-SMI6wqb?&#%xi%TvfMSUs@s9l4F4^ z;4sFrp{{eGhJc2l)7|BsFvMJRP-bK7w0qOBzAg^RzxC0EiN}fs1Iq=CzG*uBTpPds zj?U&_4aerIGw5^FgpnV!JvDMZ{PVWrq0BB3Ux}Q5Zf( z9a6L4R669Qb=rGki-lL>KaU+z=(v53VKQU|F&d4ms)dQJFGkx|vMRS33Pmik9bFcGDmY2{*czYNK|~5@n167WUi}E-OhInwaW~W`sF46^Y5s$ z26M3cF<5kJs~7_`s!-~$fBlHG^IU~-(BeCScQ!UGf-VP7aQ-V}$7r_$ zRy@@UFW`|)y5@Ru1uE&~#ZtCMEwO)n*lWRL5ImEyJP|(2{9&G zSGNS~Pkf2_sZ(#4r2>X8*N9=@h5zDa0*mgTENe^mKpeen-IS-X2J`}5sdf4N;H)jC zN{u{?sEGh3hGN}|B@S48du+fWpHk1oUjQL}p#-m$7XGfc-{1qD(s;wku$V54KeP|o z=joZ6n!2|1lftb#OYFLv^S*38tgukQx~pydn8#46{#QbU>cS&(^i--na0tw1F-c=O zlF|e3EJ@T`q(rNRK^d5w+15FC6+6)U5<$W6%&hBrdW0hr^-d;Wimlu zTGk<(qSaUugc{hQ%u&JU_{MxXZZB-(<$^$PpYfy*S8oy)GK|mvIO34->_MET4E|$( z-h!NZUg@d@_;1>uXNo9mu-U$ZMn*K1$MI+PR(8(O8QC+~is;DD|DzWS5pHmH$J@R7 z_O$xzbf`=mU3_VfMz0lF-8NckJ*fk>>qCjB6O1Ch`W$Uigg7v&$z-$Y>wz7XV0Q(l zu)k*}2Bq18Io>bgEixX*EXYR}s3kJyt6ENz0T&O+Qd38#Cy$fLXxry57P8djh>9zn zqwtPun?rXJc;S=kz_{;8m`n|xBm+pC(z5Cle|^iQWDXqBs5FYgZpH$h8LE{&t)SwE zj|W7m{e+z|nDBrum_)CRj`()@V(C{lxM6vnnfe+co$vKLygUpnEZu#PE0j;dy83? zhLiu^QmF-Q2r(bkQ*;a-^~0;la%W&{j2FtH;bsJMH6l7^ILi)NwVxbrjk8IZenmBR z^=t^%>~o&!>*q1-h;3CRj+Z0BXuC?4H-@JNShEtTKJV_!_PPIcVa56r4Fi@T-byO= zrSF;2B{{?kdi5I99(??aicHr)=;i8#B4QjCH>u;(@p@9qDWi}tgfZzl z3BWr49CEvp_yIKZG$b~<`I)Dvq3!e>T{ z$p%CtF}uva;5BY2&js~K$Rt9y8wb57cT6iTKT*>u;wX;Ye-WnQcTurl?*r!$-tI?1 zM?#;DD_$ff;Z8MA@7~(dJjDmBA=zYevb$#l^Y*;(ux8 z&u@jh6!cryYx~s8Cm4$*x^9^;nlgR1Uk$LDjFnTbAiXAAS9XF$f&&gMnYheKc#hp~|CR$z?lZZGU(sTn@9xTnk+qq@y~Y0B_$7A&w%Ba;O`k(^ zM!)s=zhrx5G&odx_g_eoFZ!Ng_!b@5L6Vc+AotEu_77ic$&B+nQ^tQ9p0spLa<*^U z1eA#$@~PbRn$WXdOK3!Ayf5!sm)lrL>+vTLBeB>MHnik2K7!B6M~IbGEfX^R5DxL= zOqyD(eB&UG7?aQx%!vF;Rm^Yi_b?@% zf0au7t4_`+-Cm(Yw%hQWW~-HE2dn(IB$3gl1@JJ{oHPJt)pT>XSs{P7F6lm6+U;yJ zxJRt89)^C?MN&=Nlrpjg27+kw636Pk|DC1mk0^k{EySgj z=1*g+jFypX>a<&^&%^b!pmJEKq6O?a!|4Oz* z+K`@*=dJ&hcR3XHl6X*CWgl~2aZLjjsL0@AQ^{lUX(uiY*T!%r7cJR1r9;s=2W5gl-fWqgq=ycobxepdux#bmr0{08*r{ z;K5Hj3TUuQc8UYxpUucI?6j;zD3JhiW&lMWg_R1gjy|4su|Fe$WY63hN5&ivWTC8( zGZ=NzPdlNDZw7vTTO|`3%y&j(wav2$(S;;3057IQ! zcOH^R@Bqhaw~q3MLn8cAcr1O&-HsZxN>QucKPZ!{!W)&MPM{ckLf%wc#!$9U380}| z7^7@@?tVtdb$3{q?-Kx&WMgN|w zAf{Pqte*14?2lr3Ak)z*k(1@1<=FJWMU3+)N-jEJVR5`7;+dh6eS@WDDt<3l9z{I< zbHx2TngSBdJ617P#SO2;@3S9ljDcU74+y6#NN3H2H3r_l!)zeThgWM*dUWBbs`fzOfkt6|MJ2n$e5lWNIwQAU^6I8Hfi!&Cs?>VF z{p>_B#?0DXwvUzRD6+s|Y)hU|(6?j50GcB00V+{!>2{bpWMZs*GQ>JNVHO^gyRG9r zItjMyq7TxTso*ab>T>Ye21FAsA&M$+SWVIfHXzI~U1)l}s?C=jE;r%LA8CN872{dZXC{t7f<;J`sfXB^^uoI=hl4A5yH-U2#qdc_9N3E-enwK0Fvdx*7ilIv z)I|Tz)A=io4E(c~LB>NpY1DjLX$&T#z*hP<81u7|FZ47K0xWvT9>nIuN@OMr6QMMZ<^f=`0F zNBWOC+~E=?ZU2Z#Q>ws@vCSFuUb$e_B_xe%FmQdN&uAo~vNjbQv`Eq0t9`|ZO?<(> z?Q0BVaAK}#IwfEmM`bfsIBfSxMDZ;0+)qlGP|Fxm?RDft%oGdw-mFG+B7h)vLWG(c%9Yg z#qN4gO0Mlt)@UtB`)yA)>1>H_7TlrEJH^h1uO_sRWefN61E;dm19VLx$+u`#FMgA> zCK2ervpTX--sJ~H@4I@YeVO~^*bEb6)MH#bZ?G*IYN&5|WlIDa_*D~lpox9P>@ewOn3VxP*6v_NbtTxey7NA zu23cC0OZl!7;nG2UaIlaU)vnd*6Vq%aZ8jE>WwP+Qak=rK*5dT$;O23QZsMzrxqOM&AKC9T_W57i zZr@zH_8P$_b(t+4&DagLx$H>jlAC6hf7uv^)FGb0P3h0{Jn3Ru-vaB|%TywLK507I zh2xO37W_B2o_W}I+w_-#LgpawsIfzH-hH$Q4{;HKp7i zAbkXoRNUQ))S5k#JGb~#5+VC!!!s7nNyBS;jidP?jCZ*w$?cwki-M$4!79vfIPvS& z6}T3#mCf~Jxz#lnzV?GiKQTj})OGptJx@Pn`<70cBP)xdha+Ww7E;PA3n!8Q;u;Ii z7NNk*^``@gPwI-p@B-n(^QH@4Qvoe)OWU}_I(#Rz%0tMOiEyq}rQ(*O0o;)!wUzyE1Z>>?GHl zs*_{)ez0F+mz62A)TkJ%Jh8>^<0I>^D^9WY>Y+ro7pR6a94RaBO_6=y&TepBoK}+N ztzZMNDh^{8t>s&qt=^}-Cq32hI2Cd#46@iF%=?jj=c(%r1nl8XH)mLz7d$F$981Mh z#YQesv7{J`l4GE`bmI1oH0gANk^s7DH2g z(kC+D;De3zIp~7u301K(Po4fyZWGw3aZx~Jz#+{AeHOVSX#$)^`+9MpRRYd{XswR6l>>&Eso944JzKEJ0>1jW5=2?8$l(GlGD; z-&%HUG)$2;kmxCRQ}30LTp0(wX0vlxf$w-O1+1@5u`ZwPzBW13#1sG2Cz0gnYvivn zDqKlARC(Mv#F#0eY1RON7tLGiZlRrfFQxN4^znOiQx)_6tUH0Q z#SVT>_GeCRS59F%skN@ixy``$&vtgrECS2wsTyOioe+4%A7aprDW zXEQj;LnJ>-gimc$0CkiF>VdQd(+rsc0~cxTPgOqGOmq7Bs?f9AvYxhadCk4;^0Jvi z;k*3FPy^R?Af`%6hRUCCme{0sq!l`XwQoaC;>EAxms}YE)ib+1C3~!n3z`jX!Mkk^m(=Y4|FQ7^iA~=lQu; z=1QOU*y;zfwlZUw|MdC!g`UmT;2FO}N!z16>c0%*-;VwP6@qv>gfkIM24uc;Omgw; z%FS%2v~Mt8*PnGRz1NyHyZIQjx8%(y>2zz2#)z|9u~`@jym((aRtYn?^xKuEXKv}P z-4c|;&a{P1H^RgD-i~Wqq3GQ5;rkuVL{018aW$|?<_<6OzD11;E745N)KNly#lc=k z$fb^VWA#8hYt@yUZeukAh|I?+2>B&bmxwK3KP!=^h#zw=5#`nMSDvN(>2+IdOf@7( zpS~N7>V{a?FpI9AljOk+2HX?in6~J^w*K&csCvif$fB(cH|&mW z+qP}9lXO^dx?`uqj%_>XsAJm|+qP}H?ocdbW79v1}vHAD>J_jcLTuKr@&n$QOy)GHb_hDvtx39 z1In{ruvGE=k_nHdW&mbQB<=ZFq8eVyMd6P`-G0%k4B_F%s8e|+A+=|S?x5Gyt*JEb zx&KN+NmG2F&?8A&1{=XIfQ}FEJ)}WI=h%kq!xOB^rMAPiZyg5Jv?!l-alN{ZG*BIzJ|#OLGgg zG!Y)H8tTsiA4QS;tfiRZ4xW6*zfVgW$9X#N!Luj z#_zBH%F@s-w^SgW%*@sZ$cY{fV;qiON0#gx8aFe@7Ze+87XTxL9ksO>u?*Q6a7g1(qa=RPd#uvtp3B5<+ z?(!1re1ePTg$*l60|b*9jnj^HNI+gTNDl2kQ(t?AIf1iVMmt*lKCGVilW`P$GMaFW zg4&))>KK;~aXqQ#-n3zp*h_}~u5-G6s$=oyL#^*sDX&Q;#q)d3W7p0p!%fG^{=3hz zlTcsZ9uI90%f6F1cF68z$X(bBMYlBM6W zPChgJpKIB>2QM4;q`iVune@ETl;mhP`)oR2yTmhI2tv2x_-&yQG?Ktj1b+G zx!^Ezs(G*H+t4ua(A=KBx*gqSn8*l}_SU<}8IC%7DL-iZOu28pm~KQhj9JhULZ$ug zvdL@*HO;-u3c9w;{6ELRmUKWNmJyhGV5?oUHg&V%p4)4t@%dHyaNBz!&)J82#92j{ zfZS+9`C*N2fz(T`(FeWyFn$`F4E3+|FGJM0l}b>ho%w#MPVFxKhR2KYyiUdcoR>9Y z+ao}HM7(iY&0t^6XvOhzKHFjIe=1}CNj$V!n;I_Aixa-1{pXDC1rr1-8@6fU&nOYTjmGEp{oFb)(xmyfhmH& zOZH0tbCme?oALM46NL1ZIZOfm08Mi5(&CHwC#hOG!Qid@1&ja0SxngVLIvx|zzR$n zk^j49L9nI{%UQ9YvWmSmco!G$K$5HM+Z($3;wYtuUrT8z#{V=!+HW9Aq|!S4Af>B* z``_OimR)7`hC&wK);7AR_oVdmemb+(sr$@UXIFBE{U0s$ksVQ})D?6iJP9$b=}?5!VL?>9NB~Mpx){vXiOfy9+Ob zO*KjIyb)hn-LG9=Ti8YvK+YEHDap0uE8G86whYBWimA{0Tj8tE=u4!#{8^6yRic!z; zdti$tlj!v@&Ejs~2F3482Yqrz0ekT~qD1M!Snz^jU-ulV|&?RLG?@ke4BOgKJZ zgA47)V8^*4`8}uOaDb!UHi`hOn+N^}i(Oc#OuqZ;E;@HwT~?kE4)>m~3J3o>Q0CcfBrGP(cfN6IrSppGou zZI&FzH95hO0blZ00m07^bD<1az&Xt0^)0frg-{`k0>NtVAoJO_%?A{-))gWCDoZAj z=p8w2rfQR-@p}&{LUB_rKLN(f5P{*b`Nm*p2h8Rb5!(OvR(^%YvyZ);2=6|6Pj(M^ z1^GUKHtg_5hVXr&26@5|kMAF-iC(WjV-0n%pD2|_Qm?F4Z*}0)6*mvC&J?Zpl94e0 zj7odgk3W99i9Z&>ZN}qiT#&-JNLv41rBMwsA&|GbBH96QonuN~7eO667{02|HEsrJ zpzQ0P;(%u{(D4fDmcJ6ITHFo?95c{Oi6k)o|NHDb6w>elYkltzyScSk?{m5aI(+Ss zlix>)cYj@c_CFSVLJb6sR;MUo{lOE&Zz5?u)zQc>pWV}<@5vztQyjuCwJ`-Mhf+RBl;+e!y?NzM1=} zbKzEFtAl0TY-6g}_`D>u3mCdj`>{UK`?^ESw3KesX;LU;`C3W8*a!e74=fY9wIJ_s z15&>z00XT~up4vk)j5W*>LdQs&^tW>J*|sHiobst0Z1>|)G{Jq$L9-* zO}S+x%01#deD zTV(>uIf{DRgK$MuZJnz|wLKc=sPP-mMs_w`j8jqSosDr$b5JbSl79@!pthKzj=`Y- z^+J;9?vDFdR#CgHSN*J(SQKV)$@ydW8DI*9HY{}SHXC(Xyu1p2AEZn@G_OLe`RJhp z*u3bO{?N6?srw=LpRm`LHQ3%?efG53{lXOCDffx+DDZ}G@z)CuOr`chRvVpmO?Q<` z9G3elWj#tAvI)h%mUuSONv^BJOxtsb;a9-wwZ>;b70YKQf#Sx8>)e4SE^kppQ&@=9 zAA97s7@=xgGZH5!LAKyunadHDpl|8X0FMinHi6ZLR2Z)QbHk?~Hk7jahw&l>Rsz;W z_G-|rC%sC`V`lw6fdQ;mt{TG)LaW8@h3j!9=!dJ3Wu0#fDU>HTm))){ww+K(Hag!x zCx0{SIr5$xUwfaiE8}*Wth3cFGt@4Z*Jh9JFZC|Y|6;V5UO2R#q?Km^ov^AdT&MGq zHcCo8pOp|j%FQ+Aad2Ly-R|AMVPf&uWuKwgrd(fVuXd){v~I}U$!nM_#c7p3`reRQ zzU%5Iz2P6wC~eyqX-Kt-JJlHIcU{D6B66b$7cfy9;Wj2s5EJ{*&AQw`lkn|rnZ&rw zMvwIBLN%SbFSTs@B%-)DJ#pw$+@DD)vBvGmNU;@Zu`%4)VNgExwu6y^*L6j}Xi;ox zM#O%VQdxGh8Sw77M1&^N!ckwEW(zm4c>khj=-&)UBM0FP3bMorv;|_CF!w&88sh9^ zk_>#c{Jc#!FX*B7(y?oVX@6BtbAm4+bhpY14)9ldV_(_+f;xTfBdf~H+Kq_4 zMzY@I3{H7X{qyv;0e8mR2uq!g=RmezLe%sXbP{Pm(CzT%JjIccHo==M17}P!EpJaM z|Cpc@T}XsXBZfIO=20ej@26>T@-JFl0mjLiHi>cODYNVKzaD^ zI$5~|W>`@l=~Q#ofuZwkKHlH^d+>D(kVSwVw!*Nl7}8pPb#z0bR54XgL{I!vih5vx zhF!BzJ%FDLKFx1%d@P2GBBxiI1k|&}KO^s}YdIok_Sj@q^W+>M@dw_M4nw)8^H9~J z9Wi4_$3z^4v@BtqDK+rsPHhoctndcx^6Aab( z92?yHFpYg-2z&`{d1E*0lsFKW$P4~Pu#_pPrAp|??H6S;HvbxEzPaMKdDe(-p#*u2 zT}=e05}L8Nk}Pn1ovf$tg)y6!F>uop z@>nwrf0gDnT(|Wglv%?(V3=`RrljIezTuk|xTh0K-SMv_7}Qq!kz#MTPvDlO8_dUE z*iWEDhB5Q7h`JXGuUGKSOMhrpb>f(ir7JX;eUz{pIoo#V=&`cn1SL1YyuCLnTPZZJ zQxqPf)LcT{gyR$>i~iN}CKR;^oD)?;pU90GxeHwzQ`FSs3$A0pI$82o;S=A|wb|2k zg)JP|7M>CkgiV89v(7cjO7G^*lcGxqV;~m_ltUef1r8{PgHa(!;`zap+Sdz1&s9Hm zf&vAFO3aXj^t2Oh?55Y@Ds=)MhIlDe*a6-Z_o!eq#N$)#1lHMUM{mr5*&Q{CJQ4(s z1j9lQXmrE4$nBE^RBAn5{{q*z+(W9xK&!zZW8JcJj7`EpKa^1@iWS36FJz4!Eeof_ z9K<>O$^N~?J(&*!sQOXeV0{Z-&lUK#Thos8)u2pTW;W#o&n44Byo3sZKnWc2zD~&P zoBg3i+hYH|V9^7~445CyhGLV0D81h_X@N4K#6B$;C_M$`GTL7v^>GH$sSCa@l#4(0 z;!supw&MxW>YdET2P3e+b&Bm^?L}Vx1qJ-dZDedcW_US-G643M_;AV_z?_ae!2?%H zuCQ0v6RczFMwWdr45Yi*s{Qf!!QKqWdS?y*Dy&vfOmg`-+j@;usPq zK@W8F9crTQh`ER73$0IO^6AstNyk!6?>d@+s=uMKK4fW%%%Fqxs-}#Y5v4fPLDtX}fyz1F0 z?!*1_$#-(|n?v@+_iL_3O?~P0VqP#xOV2zA=AMeSb~6~)(^fwR^3YXuG|)7s5qc-i zr^Wz~;(~@hPpFGUrFOBKP z)}s0B0W#w3@hU)y^>(T=VwAtqNF=mj>Wo=tmNsC1q6FugVWzcdL8Wh{@M(C;B_n%0 zm4RdvjdP?80{WFzj?Ts}*NF$Z|QhJI`eqo%Ro2N+^?Bq+u@ zzXHP)EOtxQJefWN-Uhf3iamTrK!JW?poU7&sdMB)zxWyy;;Y1wJQ*ZS4q>#ncA@z2 z(x+KF!J{%7N!DEP4v}rUf3Ym;;O)zKpbxIF;weH!coFbgPjS!YmEpGR!CIjY-?bzj z`~|vb>eS*SGi$I-?qK%`7#ooKr^2kVBcvvS<4`dG{jJjDEzJYQX^yiXrtK~K>pO;T z0)klFyMWAm!=-UH)oECg_=&7nUTy{WyGG^PF}trv(X&P-`AvwJx# zV5vWN3X0|XY{p5V2aI{(X#7>C1j*-GkOlLyWIC~aH&>AsbonCrd?ApbLUrsVTJ~NX ze$@mTFc9`DQ6#A0B7XJr2$D4ro&3Q01@;IXbD4@EbcP79s7>mnw&SF~6nz?VYlbnp zFRZdhpzQ9(Q0YL!?rKIVcb~!>`@RUa1qk+DWO-xpD>EdIR$Ho&565=ov4_6imj?`X z%>ExMXvM+Q7WSEWj$ zxFJylqe7`VtGx%5l%if%k%wv!OO_IcUU83)a&hw`Pv-^y(&$gs{+4W!ll+14rR-LQfL!WXgpnNI@J%1k)96IxNETSzdvZQzhtSJ!nV` z*OaZ>h%GoZajNhKTBrz73<_BcgZPh|FT?tI86Zc&Z~#3A}?*5KBv; z4^LbIM3rD*fxDil6@C;Uidzb9ybsQ;6#`vN5R+LD!XN0FmUKy(0zN%rLlg?mXxTK2 z=6m7HDt$PQUd2#{JgdoTat1bGnney?nBm_WGl2@(j58JE35y7}zIr{%Sm_$mVYq$G zBnNlOvQ3yv*u_#-RMPsynZE=;>>dVW+Nv-wVoE0B7>o014^PW_ZtHPD@UyzlFj7M| z^jpYoG~_;FoBr0s!7x+(88&m0{{=2CiF#00RoJT;%)RhmF=de_uX*J_7|!VHQ7sl4 zC_1oiwnRnq*k^>2RZVMNxjh7VQq)tiAGHSLrAc|#&#DC84e(}8V{uY`9SrH0fdG0U z33N6d57?!LewR1P*#I!?!I>1lq7};KC&@1|hC$Pg^McG?S>8y`4?OKyd4s4)-*ygTe1ck;E4K^3GhNub;i>o&8$xh>4U>mP zSJ?zUPr?b~IR;kDHz4mEC)-WIla%wjZ{fES`=AWo*5B`7FaDMmhYs_^E-r#^{Z>Y7 z5P8Mi0`py@10<^sLqz%@sWOD+q0ugck65`e}Z(RWf7m5l26ie_oM`j8+80ZxP|-3%pxl zq=C+b{biaN^@ptniMV)G1&+!RR;RrFm?Ai%V0Xc`=X9t2n*GS24Vtd$<`!47xm8Iz%Lee6W6>&Rr63)A&)MaR zPZ$+v&c`VNAFXvK^My>dsSPY>RgoZ(+RI&E*McAEa4AjD^N zEvm;Uf>lpX>_$AL0Hvd5nu6)v@G0!MT^0qgG86ZjVN8^waABjQ66X6KbmGf5Ov#Ye z1Gi+B(S<0d3V9ocA4qv04HPHTZBu9w+i*d$8)C^ux|&~H4E4mHW;&Ks+t6@udWPlI zpWbdjHM0>MD^oyu!-?9;{-^7is@UYf(z`+eAB}Tbw|;RPTeC{4KW4_kQ}P#`t#>5y zp)o(*>lh8A=O9+?6PkHAqprgHJz~;qUc;R#V0I?_(2mfdRP|oeamqKsOxb7|`Bgcm z_7&26_UkbE{>T*7!3WH%!I))x8;EhD=5psL^$VqmejyEf3UDv=Rq?Ep!yTCl2uzd^ znqP(#K%;#l#wuH5kt`1<*`Z|yod%poe{`IAnE05m+l&S`{z5GFSDck#yri-jf581~ zBe(Mt+9ZFuPRP|?SLZQK@w?@w>o1K)>bNScR`62~ToaIBR8w(BQg^UgrU?nhIiO@1 zWgRLqN_O~ZGVe99GTzDEY!S!6XM^$Q&X|?axq<-)jek%bjxlR<>^7WebTerMd<_sa zT0vGtO@eAR4RM+Uv5RGA+>^2`2*D~?>2Q+X(YBFL_y5xy(tl)S@u7S^Y-??3S2Hl!4W=u?t+5Z`n1&#Nnj}WKNYKYQ=76 z&pfv-P3JQ{{e%Y&L2)dux~M-&q)WJeR}SZTe#wyO&qAOKs5HcM|I`&KNs|ala=V+d zzEJ#VG~+`DZCJ%zIxi5`I;KkDZ=hXKD*e-uy7bF|hi|*?_TMiDqbDKQkSUd0JQsuJ zU>RkX{WXR@L55m_q4Qwhs@k!Sm8){K+-~Mic|5|ZK#P^@JF+tvj7rcb!bG{upgVe+ zZ@{e1`5yTARYE2=^T-Fz_k(uo?h37dd0$WTct(rIb>47cZ6&t~nZ7fGSoXd1soy30 zG9r%X4G#PHrN6W4%IE1rL~HTiUF4Ww9TfUOf>~D@1U?Gm%*S*!wRuCm<Jm-1f$y{wA6g?zFMMw8H_Y)PWF-~7!jg5P|O{#Ci z&N_$kgy8|A%^MVTfqfi}(jT@rhS%DAnBWXK@H0`4HwNVaVoB#Rp6_yv%#_F2Px1F( zRn#1jwUrpM4>nTR6?jtnaBbbQPGM#apBz;jP19Hu;2Zlh)eS5#n6(k>0!}Ax`0@$d zP{HEnvJ0Pgl(=X8%+gpCx!UqGFYem0HJTj zRBOxB;IoOnDuU0=Yv=Zku{Bje6lITmbz3mT;p_^oq0bBZ@|dwEia*MPz1T%iW|=67 zdge7M3!6U*!q=H3uJ#U?&iL1YAgc&PvJSFQmpjmD4?F7+Tr$caoHSc4FMI`3QXV%b z(C(#)z#pI~rWQ9R{ly4%Xe=1gL`n~2t=iI0a^)J*(77`(k@JslpoBL23|}-hV~D-4 zFn!)w+^{vPdY0y3Ms$NS)w6wm5Jp!)Ro@wSeTQgvWWqv-M+r*AvD8Z2O9g5{j)|Afh#ylq#0d8_cA0%>s>{sP{_!4~x6b3e?NS^&{ofq!UBI zPt2v@uhW6Ba=FTDaX1eOvzQ>I5Cip+Aw1}!jj-9}{E*$?hxrBt7SgA^$B3-^C1%eW z)Z-2AkI*cJKv>jK>R|AAmOf}&BCcTx3S}lb(iOc=@LW5+jsT%hiA3HL4 z>YGXWT3P>8d@8Hn*#^H(+2L2g`!Jth=%b^?31oq?*xmDa0?&9RF+_TxeLqV;%VsLQ;%@1VO~UJG{c_!STcsY1cn4DX*B`{0wpP*i zl@T}l%dCo*IKNhQoeHePSzVa;eM_hp1q5x#z#~a6xDbX3o!M)232e5M58z zEIMM@4p)Ph7uV`xyo)35V{>nuUR0=|3vzmkFu$G@^0L zqn%`vGf`(-fqSuPS_lcyY#wY7#d&Hw zENuR$ee1&&7+eP7%(Ra9MVSp^qnU>P?FV>dxP<@6wi(@vhM(A+rc>YgQJ(r(esjB} z8o;3EM=Eo~2WFGhNTLEc@>88M8G^6h^dVZTTMVj54t{oFIO3$jE8^z7k)=M=P>G=2 zULd0Z2~e43Fihw>y*phRxKHjU$-j+Qc=of%cx)Y{ASgcPvS3$Wk|)GGdq;REU(ph=19k}H>qC44Dc55c0_Ya4xsmaDn81f zYYnrR3b%a|Ikr9rpNLNDW3g+4sQOK1fx~aWwBcIDboA29#4EFtcT#x^AHHGYY_mu~ zLk^O*VEmi^v``u^Z5qB3ez*@4uZ5Swg1+5J-UIw5zFZU=prF@Dhr2vF4a7<_#*^T>hYrjaIr&4|%(nKg{j8c( zHEO%R5D{XEu0HblDv`>V>GB_EDYPvsKO9i}$r?eP;u}-(`9}0+{p{WO?(2K*=AQQm z-+pHyOz|H}p}C9&F}rQFx-eGiYVtV@F!D=e=E+NJDf1LA*NAnDQH4I(11Ua`Kv&qc z4zo8GA>>2GCYrPBY5y#vhmopD6szxz5nc+(ZZ3EVizEBLt?kAvi3x0cyY;+k+B9|8 zo4LH+eU-l&dZ!%uNO${0-}yWVd77;)1gHGhBYjy>;E)DgaofXP`~1{7^kW$b;l53z z$IOlAbx^dg7C-8kw7!8uj4Hg}5HhbE*i~K*>HfV0?TFTHvO76QX+DstOl(fCRBK`_ z)cA2qHew$``QNs8Vzm%5HILSNnb2vn;mUg11eGmDFF$-3l7CqI1RdT~!B{>8TH*)O zC+;WJ&3u4FTjW89PMGs9L6GniKD`P0!tSVBf;C+OYU1)HZjt{r3a<3214EAH0$rVo%KuLY=ob_dpMdYzizZhri`$*P4N<1% z7B73qPe-JxK>BLI4_GCRBnY!8o;W6{)BlX8fj=-<;DBE%vF*47{5P%@nE;&1dAUU9 zs!5fqCcVWD&bj~@TYdz{VClf=1}#^{zluy_VN^|wGFxdC7rTt|4iC#G4%vzQr0TXYmWFxnj}NjfO3ktmB>(j|7z=2Om%5mBBYkiC?WR9<-Bt|^$G6u# z{W?Eyo%$BF#tdL?Yd}7E%vRFXD$nLz@ck`A)Ia0OmWp@OKfSC}B_x$VsrVrDCWwI# z_I{oK;5bpOb9j4`DD0H)2}{e{&RB)5XCb0gE&|~gX4QFG(A-U$wi}sqbFZ=hNr&~R zxw1VRaG*F=Ri>3=N*qmcj5%3vL;c?l51^Gx-9zrw^8ei3zTE9T`C5Jd`q(pZz~bVK z2dY{YEivZ&ILmv61Nb9`+MY{2>XdBz$jR(;IY&n9Qcq2j*a%}u@BiE0uEu7`=Pa|$ z5Q&fg+>2AkMKBHXUs`G;$XoI>DeW1!xejPI5iUE!{a3QaY09i)f>X5 zBm!9=NAB*VafR)2qH9=}eXM(z59$Z8i)inKX#&;x2inJ#40m~$oAl}Aaeb{KK|N@T z^^GZHjUdIE%N{EO4h%fV87oq6TrBQ6UFJf9di_EBr5 z_)jOg34wwv7**)I>9cX)$B{w#`(RnM&!^`e7{VJJ1mcj>eLhEfxvuU?y$iq8 zA6pY7wQXhcuYroD8;Ig;cj4V}iyfpwO)ZrFc%EXF5B?L~o|{8IPN9X|-n4V}eB9jX zSb3*4!vEP`1Z!MA1cFh6`zBc*FIrgb_@j%IPAHIf?#OgA;WKABPosQyt5JhH}-bmeB=n*msdsd`Af z&jM9Py9|wVCwmN90ym*N&!^)rFR5A>=sVFU+YC`xg_)Pk^oN*v1wJDX+TUhFa%+;R z6mwZ9?c6H=_+~mK`y#-DijcaHtL&|Y^U16L{|T0B&=@XU!`HrVgxQ}lIoqGD?$F4I z72S|!T02c#Q#YRq+rIB9qpG*>{gNexXxp3{Ht0u|?g(~~S5=s&EWbH%I#j|^R)elo z|9K!APeX$A1j@NJk>{%z6%vn2P=2J{%Sd4OQv`N{+3e_+YxPLm>XXK=-?N(#z(7_B z(ec3Uc7Scqze^#~Xs)<}VoZ(;bVbDSaSxl=8nsi`+h zXA|fa!ip%cH zLhTJh?V-pN5-IXSR4HFQ)tqjBm>c1+AUr!SijaT)GpE3p;~?4MRYyKE&r9ShBxg;UfkiD=I{lqf+KgHr?p(yGHlVChyRqCktt(ft5;+9HYq zcPMP-Y!-Pq9cLiUyG`urJ+lG$AUTW2RhaioD|;_qoGd>*xp{V&DiO2cE61W#WP)A_ z{f>tZx)U1E)Isg-+fuO&2pWKFwCQC*O#QMX#F7_T{(o}=5rb0U=3mQM#60e<4N+>( za>;!z8^75pzjH6W1BH7pK2KhU^goeyyWa6UKQAL>^gn*tf?ChW^IWu?_YBht1l~BT z8-tueCvKEc=7V5zrCjnnL=Fv|IwZ@V$&S`p;`yMLf#bVGp&~V_7f<*_%!Gr*w>Dww zl$i(C$b5pI%}dT7f6`WKKEh~P(H$qsT)k64oAVa#?;jk#Z>UHj9$&Ekcx|+uhOsQvu4~kV&ATI7eUEFqeB=N6`-p13LvsiR zTTF*ssUU{r*GWCsSpq0Il#0(XCv^es0WqXhH+g()Ws%{7I;d>IH;Sbi&6LTSGEPR_ zcpEF^5Lq(%oIPIa$QiNP39|96tYDF>Fi5k=3TE6GnC1VD&VKpSwj8+*r27S=PP$Bg z(O1n31{e5oz1YwUk=G7H3)mZYlBt(EzE?1VhuCl-Ly`YSOIca0foYu0 z2V(~>A_0_}{lj7~J+Esp;Qr1zT%otGUE$F62VE23_giTM=a?Bb67|?86jP0J0!+#) zlmhVb@dO2GtM%;MXh3!=$V=VvGHiSX`;ISmHfoCf9SL$A5o6r! zV387%CsR;d=xyS&3fbpQC3%2}EV6@yRBfQs-nP%cB~lB{u2|FLM>qiKR1Bkzgd+JE zyfy9AaqWv~`@yKSbp4S*XX-B8X+`$IAlU>y*{qj^;cthnWzjA|ox1^@yi18_<*EZ) z&o0j{64h*=)wPiPDxz7VV(~4JRw_Q$-wefBH^qY6dRs0~c@9(Hyi5IZ@Ok9l8G}g| zm0s=J=H9+bG8j&U_^2GS?=0hJiA!7xt6!`il;$|VSGi_2zc|6qen`HTBD5N; zhh&dl;A&2!jtpYvAvY0%zz#=DcaU15^_AGA#tzB_2GHy}T>dbg1CnKJAO=h2BGh~? zJ;iwp#$Q%mH&Axy4-7#(4M5>P2Iq9^mkf{1U9G}(dE##EH`4Nj%)cE-kbhXdT!g%j z$u)lTG`-aB?ZWIZ^;Ab(?KxdsGHusz3-xqqKY&GZeKajy0WmvGHeNC!+)wS8AizFe zihaXpd>bu0=O@xZz+ClLSj`f$1E?sj>JVB8&Fnr!ek-fcB;EigxtuFe+KichVz7r~2)AnJ-;7}cw&FtsL_mj?Q^t14W zq>$v`6oneA^s^Kr0ClT$n^3eA3{$DRw$sQ|+hl%RVkRYi{1SgPB^^_*qJ=jvqSl+i zUpPW-Be`Eb*vVou?-V-fXL&>ok4P?}6XIPQK_Sat6}IS>?C=;E5M=8Dqau$0j4UN3 zc5^5ux(j7(CIXDE^D{mprJ$yAMC6726m(E8()fBTkqY-(7ANpQH`B=NLKFNajc@_TzlWAhc~GE_j5KPh{m-sD1v4EQ^Diq?iIgj` zh<*{genL~ZSxdzDK_3ZU51|yj^q? z6PQ86t^GRC-pRszgBDxsyUj_9!d=+0VuLOv0B0ZqTpS~$=& zW=mq^^Esjg+p?e;_=9vq*p3Y(5S=01Zvj6%uE0;**LcOlnh$u zE3TccBTk=G@A1xHd?R)tn_4-WvO&r>j;Ax;b?47prk_stWChIn%wlCQI}Zhxuy@3W zhgxx;AbDfU_ieVJzqFFXd7JBlMc$tz{GS)EA0hu)#>E~H;@~0*`ZL- z2;vWR@O3auSh6@;Qmj4J%ewY}c^P8Wdhm$;nOz53kZTMFC&__)_!6f^9?92}ZLV zg7K2Joh3tnrLq86QNQ|O9wF2jc?ROHW{}JM#XeyOjRk(ZRZ@Vkz*R1VS;du$;HR)( z%{&w<#=3Ge`IcpNR8qiTK!)G2u%v~~qsL&+=>p09FrrA3Yul~d3kFFC`&f?dXwKVa zAUGBmTq=vAR=2+u-#C{MJ-ggVAd8!WF2kVK3kSWS3?;sJ_f^L;#-hfX?cR$e+i;Ks z++PJK6d%2@5?-N!YgdR z0#$V$9}iZaPGeY=B++&3#jE&TI^*qxOGq%RymQt2_APIwv3`m*ya)nrbSDc&XrV2$ zz?3gM@s-IBEh0a8%9X3m?@huD@)G%$p-AJ z9u++51*b?L{kX~$DV0K8N6Et(UiX7>J1Vu36l|S}hjOJ#n7O`pzmf{JMd1b)x}^tW zRlj;L(OX+t3f|^WHgyM4Ic^hq*Ce3Tlhor1IgXMY7<`zu)F2gAqRSa?l;x{zV!n9k zE`h;c!6?g37$r-eqW=;hJpKzW)cNjA4qwm}6}nB(bIUXIoBr{Gi#Q)U%#5E1{*7FU z@g@Ib0q6&wBydS>VOU2F@?4Xl;r&zsQ@2QW;S{1@2a^e;qRC}kC5DSvFUFGeXjlax z(z=Hw&06l+;$rSKU1fQnf!3VNDFQ^Xr+}HbnPO|CVe6Gd zhlm-iN%0&!d^K2VQKJC7y%?q0Hv3T00s5NtU#9V;>zy?Nye|k#Z;xbAc{adW8=Ouq zh=O270=6vuA%U4OiSl$9dk`2AK>q`gN_{C!As_e5C+#SbhdJ`@6@o5(Ze9KN%VK+b za0sa0wwMTS!|g>)QmPUfh*8e=Wmn8D5Uu2Tj>hBBPXrz7mNRm)0=LwVUx_+*uMae5 zYV7#z;Xbm`57|xajZUrt8Er{QG`YZT%)XS($&WkD&6r~0r!O6TNSR@71xMvE$-_yeJ0 zCnkIJbc#XGpzK7|P%tzR?IBi_3KD_JB(&c+wErRR@CGwd86xciqxyL`pK9r_Li|X{ zN8YQ9_Ge$1*sbezY~Njw>W4Q3AtGi8f)-22QV=UqymnYh4z^tfH%Y4t&avv&+s0t5 zt
q-QU_CY60r8-eFuyCMGS)x}DHyTI7Vg+UwkcEdi*gP8m|g4R}29Y_xFa~G2< zy`a+ORs@by_dEG{V9#zvWO|NpWcKGQj_y(yi0io z5cRFs*Gs9A9vh|iAke+<_es>To0vQ@s0UMRGn4dwQYolUBj0=K65klueUrqb4>2xri z3`syFj^Cu3Qi(4}^-uS|`1=mK`OeLpu-koLK75YZlaQfPLJhXxNn9Fcq(jAiI0q%l zyl&0|@A8@;L*9&6Im`Ve({=@NF-(#O*bq@_cEPf*A=7vGrt}J9Vh{@_8V_N1`9B}zQxLuhH;#K+&wPS z+1wL^u6y%zLZ9yaH;8~se<wRPE6hk?jJ}ZDGrQvs+b;2|MS$W5fz+g7fm-TwkI1 z5IqKRk3{B(KCUndEuOvvkmDQx56#YUTL810#^J*Wj z379ra7TO7w7-`lN2M{*Lw5n$xmyk4!rpFl7385!#Zkj_u+ukBQut)5G0e!58VxE~m zGFZKVKDRkP9JY(3NaC`FtS3wU3)rfQYZ^=1pV4Z-Qt)mtdqD!?u{sRzh7i9^JQS-A zM#A87b&xpR+HdL=6aGqW)D>E+*;=FPYAG)D;^R!9`P80G{rYw=2YeA(sce4g8pu&^ ztH$N)AB*OCVo01|?!%=S!X7^kAdPBcFSv0*>w;u8RkJL07N`MBRUmbFhO303Mz=`A zpwnR(pyqs>`+zd;q67Xmt;7GNGxz;a?u&jGp2QLLc=ov&jqXOjqu=8-l&DmJ^b4L*D6qp;lbLBscMqD!sLP_o5PoMYW0z`dPj8gVV?d{^4AuhUm7)= zwm!a?uCCPO5P4`Dl;b~cO|Nj~cCw+D4qiO)8WloVs6&EV~cTDcSvGi$Td2m(hIB9Wpz96&?}i44u|{-xFK zL`{?~is1Z3qSHrKfQWO#!u_}Z049x-aTJpWhn2VIUbjeI(tt#grqDY2!I%3eez?KCbh!boo+a2#j>;X#HYvp1>iNT*4Kh&dEfYg6C={n` z#BU>zLsbcm_(ATX(Stmj()Gs-=~E6aTNdmciGQdPL74}dOWGfqF>cgPu>+7W?Kug3 z@ek_Qng;QVr)&E`U;RMHh+BVvW$)sX(_yRf_N3%RmDj5&lXr;Wu0bH^pj7^=iqBLp z1h0U{mWW~f7%|fuJk*lEGO29Sf7|&5 z&IE#!SUaV%64yGLE`R2p^t7}zbKG@u2eyuuzK#L*1!3fOAr%0FUu)Zx%wY{yoSApJ z-WrgfX{$FWVKW7Oq1xMG5Y1cF=S{Kbdl3jyei3tWpnmm?SdU zu0#>vvtE`3f$g|Dh=g;y?$Z$EwtZ_s|Lr0_u&G+9`oX4cw?}uomLF{1ZuMzYvlLAd7mK**LOD zOJk8bTooovoW6s28_m1N`jEMgh|U-{7$Z>=By++}QakE|t@{tyM^<|mffB(~yLhkp zRqPlue+e%GK(SY@2q)Y()5wz>p)c6@S;$ohqK~WzSck{IVeUQy4+!FULjUCJPYKA& z9OdBpXG4pXe9B@P4@0SODo->xG;vaY{auvv3n~f3jaVH+MI$rxT<7=%jWmTAd4W|^ zJX5ZrHbOB&UnGqFg0a~qoohe<2g^ocPSWfPSm-yqo#T zg5@Te|1y_Uqrs4a^YPq_u=K2sa!&WJG5X z+OPWPmG}6lZkbp~71wjU3`*%mI*F(v7I~UhL-@K6U1kv6$c|oe+7`UMlYS1f)7V`d zknR7^#DdOVrhwg10$5us)M=(XttlHZvB-|GAkLrJh%0_vzyQbqtun@!cHw_oSVYHE z&`w54QygKkSCKWrZmHtj=tdv!dR|X3`%GCCLVt(nLdK+$pYny{9ApiNM9N+v#3v_6 z5RW{CJtaC$3m^3-%GdBiu(<51Gst-G%V_ri~0y z6urIS19EqXK;VN|s4R*Um-+SHexpD%=_H&jgn)EUlZnJt7Du8Zk|D1C zLkAh9k38KazAN)&9b?#v=>eOIGb~0U>bB5K?`){2JU!iBrwGEP5QyE6WuEyAwIyvSva;xyAoxZonmq)_g!*H$1DkNqq}hGx^%?c zZ;XVOb`Ey4RfEEkwW_8#?!idhYE50>EQ`_!?b;COdGAEMWS&edh&r2hEKTaI%*D2D z1f?JolZ()=zmC(ZKiV8%0SAxy0SWqnf%&+Os2PVt0dmJzd42~lvy?qO`cgvd;I^0S zM_ef*Y_uDv=K;DW^#zd|Pp{wPAuZ3`6veEUM92z@P42G`K1RjheizN6kLNRfR<#3E zu3fd@v%(k{A~1mDCr~|E8Z%)utFy^8G*)S_LEB5i-KlpVOWl>I}*C2O;vvShrarwV}5?h z)_xuOS^&-6PJd)OwvnFye9A_{9wd$sx|h*=bDlzWFRFUJs?N&KzR?L&qXm5|RulW3 zS1qz2#3~?8$;e0;?tRZ%sdzwP`TX8Vb?n90^wz)*|Q&U2bv5 zdSJ>YOQZcGzL6mQYnsL4s{6@J5|3MjFHU8z1MWbU!qI{-HMp1Kx_v-I#89a4II(m{ zGT<|d{;esZxCWdmn&gC&O>sR<7ykGKD3CP+qd)>na4 ze>D+S$UpwjRdyI9bhNf9dO0tHLAQMOVFHMeX*02!t0F#}WGqY{SyBL`_TWuQ)dhc! zxeckCX*2ERj*`E4knXY8ait2>(nHl0gI{;i+ua}Ef|n6~_8OzCU(7e- z``{q8I6VH*Vh|r0Vin=Q5^KJ2OoNzT7p#icfX<+y^j8=w8 zi&`+J@4ep-dCW8sADMvg4=+0@CR4HtQzm=&?4~={tgsm;qdS;#!-TAv+0~lLvkwsO zBB#%PSbw|kaMM=$jMePuJu{#m8{Q)z17U#vv|R$@1I_RUOz&CzFf_=7)Yn^_*!xV`B}{)(6BaE;g42T+co7P?SP z8V+3L=~Mish(!<9@`I0EVhJQD_@i2bDLv@{RZh~~%nm1a+ORaYGKS)h9)UdeFdoh1rfw~mxD4MoasTpZ6qx6~%_`9o9+G$t0hEhU!@3rmvxB*+; z5%+wUlxg3bU#NjyF5k+7_9FLlE16lxpFP3&I%PdZWeAorGFR0lqzV|Liuy!-FMOt>ut{OAE_j^zH#0z(X3PQ1utiYos(mG^ezT3zo}E zcs5T@CiK)wDx8Ase$A7HCDj9tY1$wh(JBE2FNdK@Wvt(Y zYG%!B3~J8WL;4=J-CrLWfRjcws*wAu9!SamM>jEud*ws)00p|AEx+kx3|+w%4W&Y1 zCi#B2{B&%hb#%9Vo$et6N5R37j)}oFt`*9kWWh8?A4>IX;KF;s#GH>yu>Q`Kq12l1 zdK9Q4F|UeJ_7N_g6ZrbO6F0!@1+CkLql9y<)@LdPWJBBqWpWdRVTmQHW8<^ zT=}f{1#=#iSQq%62+9G)VTcj)~-a|1I= z@I}rHdS^WxXUQb6-6Ozl!&znp3L${D)f^3wGS)Hvs7+F~yE8CD2IcJ7J~2Wq*3-P) zBYE8h$nsSH{3>&ElEEFxkbbV=%t%*~uvf6sh0K$eEXiyZK#Or3~)vIVZ{27VKdf80b@5Gp+8(cZmpqfUcM(sxDUNBxv@$z5C76 zRhG=`>;<D8)6&yHxo z*{v8zMQT0tqo6;FI-mUU)) zRx513vP0uMS6LYq{H>DZX$+7wY;+$MA<7fQ`pYay^fTD+x`zx|NufyVBLXn9l7T*x zf~sGr3zPr=lD<@H6;@$gQtfA7ar`?u6)0fUdH4SFQ79Dq;LI#hyZ__ms|u1QcGVfl z)c2#v#Cz5}@46kZuh!Nb--~zimNasNp6_xLzI?QIQW{b@%>P(dvv1x038Hp_1l&tp`zQ*mrpqm{14jaHJ-7oeG!P_ zk)+&hbV0Rk9lxz=YkR%VjBmAs3oN%_42GD-D*zBsa{Pol2~#w_v;B-N%qMW#jF4V~H49 z9hFQkmKRm3MLxqEx(O7PP~?4;t3kGBa*zk#_A{dv6KX?1RUTe_?G=C-y_ly=&n1jp z7(3p~Mo~Xvk7Mc?5^|i_Kl(<Ow7XV5U#6ih`ha9NtYNJgZ>eN2L&@Nf7Z&}kTM2yrM+&UioX&bU ztCbcrF@Q&m@Wt0ZddT`j5FLqOrF?-JZ!qT)_ckMdAmGBYSFE*cA=P1RXdLhX_GE;~ zW#Tj|>yPGnOf#T+>q&cCLGlx{NPI)JV^Cs3>P?^QBP&S|Lt*&N>urcf7ZbqGiin z$dQ|4{{9*N(-0ubS=e$Qim_gRV*YLSw{FpTv7vYW*st@AdizBY?&NJTXWLwB!e7_a zCbdPB(bqq=J;PyuRy6R}zQ26*#ri-8~D|^7=L-@eR`t5;awZ6sq}@1+J6tske0jSAA&YhCh=Cb27UpdL{9#=*?jEmv~crJV<#kFs+WaHA*X zIef36K2#T)R$cz)bX6FtB%a`O50l~9ob z^^qW6Z0Im)l!_QHP|7(M)&Fmz zs6t@;%sS-75s>r|Jkzk+N0~*11v0;IY}vSb=KB^6fsnu9n62%cQ+_Ttpyu_lK4~2e zJ;H&`^sIH0kq~-QM(Ei5>UTEZXZwR}AD#`YzCHIhU9TB+_tB5G#?tldXzC@$FOZyE%vV^= zWbZbOvP$7ZzluQpItCr4`uevur&>e9QRB39K*0)F!<}!aFhV-wDsI9s7 zT58L937#xW9}VipBYg4vh6Cyut+V*hqcT-C==AzB^gZcO`x*a$gO}3K=+(zYe_pHA zqk=n8d9J>G+oQPKt6p-KD&lX(OK`{K-mX(87u6J-3(eP^ZnAGB)_TQi@nHmHV8Zf| zys%<-Ygaz~tt()9Tfw+&XsKt^} z_R}g}2tJdSrdM1oU6jSditT$r2huO?V-0mR)KWsPjz6-@4pcn>^$6nKzMIGY!bEKb z!iGm7$7OEnZ^;Axo^sh9V=@w7v6oPNvQ-U;abi%2J*CQYfX*6=i_iO#>$hKnjb(fH zb3Lvb;*S2;6raJAAHhh)kzp&BAz%s>?suY_MbTiq3cp=2Ci3DG|Y@WS|K@T8NdXFdq;uu;c12GhU}kc%sat z2V!YnZLTK>2k3-Ba^itLN*sxBKq}-LV+^8@Wo_shXFl z*ac84d($^SZH<}X5(AEN*^d+lnnE^*Z?N)QFD*Lc5bC24Ic>tytKC9umVt^lk=IhI zSuZ;h28Zc7Pot5R++BZs;=9LkqF?ikqcCI%kkCm#^D(PvED4R`W{9{Y za@)=ljUjHbxv!==C0^f%1JvFikiZBI+GzW_osRHuhtkHtl`>APA8Bj*A<2J6R!sAB zY3lAm0*(Sba3A!5L1mxFpy|?7G=hq@@(c&1`lEqiK3|x~E~5$Od7&w>50>vs=xTg` z$}3R_P+90@M8o(M_dmq^=sCsvRE%K;?75%?Vla>EyS%cs zR8Ysl{1uoiWxU8=^yqq=i`&^taS*|^bLj5l?XvkGD*2fduzAN6gW`*Rltr>2PUe#7 z6`kxGxC;F6^>uH?>-KLX`4|or*_@Sk4nKJi3Js;iyjm1WEboCy4IveAilF~6OZGEv zKzG)dv)C{vjZmpk(y?FC`0KZffX>jmN#(HjK1g}`g>w$>R7a+%;tdmO+Wy9;_$7IS zmy^tyci%oId$=*v-KYsoFYBIxWT0Bt@gzqaT9G>p=uZBv+)_habsOhY@&aIhyeqg& zccE`;H%{tyxEi2RVL(kV6ccnn!2{uqKQ)6sa0x(JYUdtlJf7%(_qruuc-b1t<{!lc znc!r)doGc&0^gc$-(J_qm{en5Qpr!M2XU()et`};_y6ym^-dF61bPP$vrPOGpHT0L}( zi0&AC*uuz{uOFWd8a))1x8qLk+ruc#&Nf@_8?y<1*5D&CWQK2p-L+Mp0D(pU=jMpYWT|3+OPk zbER=k4+DY7^KQt}@X|2c^4_HeqDOM$vB&bX$MOAs?Uj?(eJuy9ipV%e?BFs|w^nN> zdmF9@`c!rLr&1IJ`u?tnlH*nwIi*o)xcK}!VGn!MVYcXz(`^56l*84Ju$@cd;_(ld z1x{dH^r?O&mYJ&l4}J54mKrPx8<`_4T}6d!ad-OhcL*;r?LKMaev9tNgH#lJK-hyZI%F2uWs^}npK8p5Q|rLq-d%|$~$Rgu`SYrJJM>X zyD0;?7u_$(ZI!*Lg=+&XYVp`aV#`a6v!c=(5EQ?@LKChv8g}@s)aRQ~JeD=F0skIr zhUp@{jq6RLIr*sX)2I0lOgRD*fN;yg8p0mvbiPZ2AX9JrDkm_lpH?ZK@=}CNwJ;1l_GgSPBXm3Ph+XlD^74Gr4czbXQOBsIf-h~dlDXpX zwAXo9a^_IirN$5sMYdb;+Ci{fZ_)K4~kH(_Pd zDQyXwJ3e6GA+e%+UCnIpM)1j)-1$&wj+f`M*-9um`?WWkfK@Tp*<9-GZqYl`!HuSJ ze`p;Mh%BBNo9tV*n=fpmquZ{BN)Gr@?THD;SQ@k;6+0YrD3#1Ni)`J$%UOx)WWacY zKMIO2Mz5DV1}2t{T-yYQcTm|JN`ntCk!U{5I9r59XN)rDj8&Zs&Y|WZ*4yc z^}!rSl(G-Md_^gBc{m_lLffVov^1c49h0AEnk^zz+(6ZN{uvfcA{%a9gxW)ZX8B@8 zZSD8w_gmyH-r3MQ7VPPU#5f+Oz&p+;YK3f^<=>Kefp_3O%yk(FUZ+0AO|L_nKFr7S zNNROYg^-K8+FZ%e^NKBIk$XO?_|vBfGsy&w5VP^@Jg@Os4yAjA=6Ogk-)}1WJg3sB zcAET48$5;I(&dp^DGA1z4h-X_sD+`Q_zPKJAkvO2%Q{1R*_Bsr)%;&VP`S zFHFVjB<(6jZRHFCd2t|LdkMD{FC){|htf`qd9E;Q_DAK557305f;Ly)UYL4_)iuPSjl&&2{2nSjq zHLU<`@tZ9qVxj@F3+pT`%98#dW5luWip*_OLS{_mhR=5oeoYu(G2DpPx7O`s z&IPWc;Voa{&20LB>=M}I0y||i5fJ7e+;3!2zlvAqu7zE%Q;+HLEzBMU4gBA#eY@Wp zUT+$qPp?yGFoqdvUg)a5HT&+ohrh)i8`&Xmn-d>u*ltcYn_s7Jb=%WVGtF60CsI7$ zuVj2J8>iW!FmomZCN8UI6Z4Q0M;K-!BqJ~d^ZtmujHe@XcFAD~7do@o-&MaG`Gm5; zOMSW^Gp%BJNi^s?efD%Dm7jI&niKNux}BOht0qxpHWS>4N)(@*LN*h5Dl*bJW22=+ z%MHnUXLZ3`AhgmjvKA({~<4q0wv<-oY(-%MaJ zji^Drbm(>Y+#;Xiw^(O*Og4~)lQSBH>GAt*=3~#x4sZQrKCPVkr{!x^j&rv1saKs1 zwqVxb{LQabC~&o1-`9+DYSo4elGWtwz1TeJ5vbi!D`NUHeYv(DV*8AqbC&MQmrCEb@?~bKGa+Hru>(W!PA(h6G+XFU3 z7?&ib%-y8UdXDQa*khYSewc;gV&Rm4RJD-6SmEJffcO#Nf zE{m0YzzTaLjt)d+9is1WTLGCZCd_f2}lX2<4O&{Fn2=3S~wZ@1%+cB@G0+Xq-0lPb$U2CxYZ%O86C05m3<$C&ZdF000m-BRiR4$t_ zk6&W@nCNv5U2|6M@2C?xW2{9uMda|zpDeCN^j9g1!o9`k(_n9o5HXss;0=T$d&|tp zkgOWMOq}C0`c>hrVp(&{vPbi~b4pn>^N;GkZj<^!1tg7{N7-dlr)2}i681=aGojKq zC!Q=!afaxmTeLcHMP3}V`01LFdRvZDGtyJdz{C(chBHmmhaJJP(}meCF0K$+TSv{) z|H4mypa`a%RxbE#Fpygceq+NNM>bCc*Vr1a8dSkCa$ZThUSZ%DoaX(Ma9-M9FxgJ+ z-W<_p)kDRVhy%eoIEoM&pNq=L<29SW%vVZ_5LT)QsdbMXG(!Hw*MBP`&-%-b!k5{| zbbn#Acc+gxBW1fruj8=p)({&;FEbz^^O>a%hCbE;FY6;1pSnB9e~*)fq2jbu)}OO; z@Qh~>cMwCmVNDu;9iU^R;+^ZQM%PKVNt05@;9G^GrAO6~c!ng46hAM-OJthpmxeYj zz)PE#7zOTPHzto)^OTr}!6f)d;#ZC@{V_e$evi{xf~Vdn?UuD=g8Iyp{0|{#P1qqN z|I9H{P>*NJw7*8O;m4N8H7@O9si#z@cdKw!L-3PG@8s!M;@%8S5FW6sH4?L^avKA^ zLm+Q|L(gXvdt09&Jdn6_Bp#6V8cEhTu8FJO6)y#u+aG_k5&#!?47n!;-RAgP$$8;D`fGWS*w>O-DU>=`Z- zU4~>}p6s`c!n3$nNRFo!nWa?IlFV(v#U%Ys@((pM9hzenFzT*q!`wO+DigJ|W0t~o zMn;eoQXSXtipt#T2y>Xf%cb8RL>rrb}$mE zR@h!)5Z@+Rjxw%f+#VdR%P=kGOs_MkC(n=NDi9o=81Z-WS zhRuU`H*^Dcjg;NKV|?gW!-`>XL}V*32y{|1?28u6iqxvIJ!8expRyR}6H)+%lb&we zs;cj7B40?DF?O$=EDyKZ#q3+X^oKv?)>oZBM(REPwoE7=XT8Jy^L+w(L`IpD^r*9U z^}T8Q+e#`BKA5JL(1xZf;tInqvY22f`i;tNe0=536oY|t*T3X^B+m4^4M|15J=e+7 z_L!3+pQ-Jq#`6$?S;hMB@68wNx>YFD|FrD+BPcL{ef?#f|NOU||K}(EbKUc$RYHF+ z{r6jKRzA;h{O{*NXy51k#NG)$joqi%?)u1(Hd))9`4|xYDO~XHjcsX}%%b!_ypGX2 zm5vH0k7{~ZPhrVnHy1(gE}9bGu_bxgF!a;TT^=WEB>@|QH)1}vZ4!O++!Zkt$*><0|@?LMF{v+r_Vm_=)Ux zA5Z9d!yEI~vW7*qqhq2&5ynJG4RJb(_&sL@5U;33ckkYZCzm7xY@+RPIh6dR8RLo&Pln8 zHr=7xHI9FA!-qbb)Yf||(*v>u(|z#Q^2LvbPu_tcVwP?#(T7nkpJ#8WEjTGkcfYxS z_0N^$lg2{eRF+7LwkaKLDUmfy&Szi({{5+|X(NKgY@9iAep?^%Q}Wn;AY0fShBOtZFc>Hc_wmlCv6Sn!2Z8IyeYYopgqtQ3dH ztVT$RcatPv&%OrbP$8tSOXg;1ueM?sPUM#JwHK#QU~NF{?8e_PL4%=<;QZ<-j_;*; zrgFC|8l=#EM~F`8T*hDe-8&9ykT2n&vzjv|azoSI!_z%K{Ii(ylX${pP^2S0Wy4*q ziHs?3d~t8*v;pSka)QK?HYl~`mB5$uI1}hI2v_Igu!aq?Gv@;`#{9(4A^Yfj zvq(OlZ9QvjhcECEKl?r0nLvXjFkei`X(`pwDHaqjAgc;ZtX>>4WIF4Y<-c^6?VcP( zei?J$F%&nuU|Sp$k(0doXXw|M#Z^S>p&315y`8<*byTE#pY@X1*932nxnVDN8|RQ9 zPkmRG^3~V$RO+tjm^7N9J@DS4`5Z4zWR0$pKwu2!h(cZP;g87*qCVMTHP|2Ap)3={ z#Re{-l=G%rvm4^>pn=rX`Tny&7-tuJG6MD{TzJ<+7OvIJTu%N2>qH47Wp45>tlT9W z(b3iWagU#UB^qf@*__7&6b9=n)WYAP!F0-A{DYy~GVpY!Z5>N*X_i4%j zY@Ma|f}k_qY|~}l^{@~XDpp1H!|6*$3(9T>!TEgOhHPp{U}67ilE(aG#5nKs`@2He zp~p@g`(-c4H`K4&i0>};2d9^=XAtumfFXDU##@4}2|MUsIOv(np>anZ#GR-i%umRn z7~gESWUvq{{C32%N9Xmk$}J}(uZ?32VU@iy80^cA2{PQUDashVGEeG5FpF)D1)Ds9 zjH7Xt^?{wR{Y@ISD)<&Whs6iaZoVjKWp4FuT>y!2Zs2^cQ4bPv#E`{-ZDt~{wNksw z_v=-d9p$Xa^5*63FLhgw5Xx}S#%1}*jQi$in{_2m(6P4sut#1tIEIqrDUOm2yYo4i zo3j(+?Kt_DY{pOb&l?~uiw}?u$`Y>VE4mrE)$aSWc#$(CKhKlYcrQwL%HsJ@uY*`f zp-u{AJ@gLOa8tR9!s!yFczn|KY5W=6#=}n!b*vz%q6`$4?)g5=ayLNe-c29NS`GiQtTZ}2ODf1hn}e$jqgp*&xmojoF+x2lrVx}KY&^$ zP!y6Db^jTmB$5=xES9FP{rq}vyVxLm`3kx9cdxuID|07w9>blM?+~DM7IaU7NMHqs zUJ`H7%1u0;_Lr1R8TjQYe;&rNXx-+Ij9o;OaTQb3DfYT_gl3I$tgjTjk+450D9(Tb zEou=u6OBb$lwx9jnSpSGx;mTL^|74rX@B(Ky!qnx`AT^gA!VkL+?8hUCF9ZgFBSHf z3E44E3T$!l?@7S&~QpEX8QH zRhq~AkZ#}+W9a%j`m2#&X|=(?G@lGf0QYwnL%Kak$19es-Ywq zzbK{1t26aUT`io=PGYi4Lm)UC$eiwDF>kz&zx{aH}`I zFarbo=m5_3_zCX=)OmAP{G-=&C^Gwzqee%~Fa*i-_zI*yroY z340(FS^Ed_Zts6h$5GG{)iU(lpPpi=v@efq9Z`;0JDNV8i?o*3e1Cs{U{O}g5xJTg1GjIb1m((m?~4TxnRdw-t4a+y7Z zdrlgTm$PSOg)f;+jYOnay|v!_{)KfCMKH?5=vWbyf6YHA~l_Rvfs5p>TwzW zu&HuU>^9BWO4{JwWwBkT5G-T+$}UGq%$K?lhjW!VeJ9`OWrA@hAg^9)6M2vnm%eEv zMuvQO=TqnIXR+TPyzUEUmR~mM=yB0_{Q}v@w~A-jaO)8mP_^yi+O4r_E#J@(4y%W> z4MMEpuU1O5$iXYxQ)kq0#i_J23z+7>w2g_iyzS~Km95_hlVa~zZ^lGzK-gdRZb&OD zrv7u1UC2S8%g*F5tOOBmV@T3RD*>5_E!5d=Y)4JIh+8e~uz-n2K{x$&>y^s7e1UlPM;nfa75iR6CAfTJ#J6l-JW{7Cb1sV zm_$fj26@@?5KunyF()?tyO!bjn~bc1j8nc3WE#XP&D-E_7CQ3R-HbPjlsYtCPwR-V z0`)^J+nlcF?ZYcwAtSfyC*z981z*puFTNJpnrA}>Wx+-(;D+SB#(4d~u;M&{owv?o zM5nSkMjb0>$Ault{<98Zp5<>+cBgIn8T)sk_LiMhFr6ki)y_Tnx?2yKqGx|x@)`Xw zFqX1l$k>e8TU8VRd9r=|t9#%jilgE>xO|8^G@c1wEThJja9i46`8XJK{Yy*v zgH*_=N<(T`({)?D+jH({Zjf)hdQ~>5s^}2|BEBQZ*P2?{6gv818s3<-o)%RQY@Kn) zQjtVR_C<-yS?k=jtZTtoL+$ow-i;=zlCLjj6HoY;M}}gT*r@k`_X|zs$0+ZMzsy8( z6WFZRH#l7((v&R}rt%bNZu6pscC76Xa)7s4E?j;#-tx&c6NWYnU2r&u+in8v!;q1w z1>M^{L@0ctrZe2;6y}f(Ia&`#@By3y?%W-__Og32m6jb>RA7k1<}!pU>rY3ofEAEsQ#9d7=-Y`K8{%i( zmO5ALg0D7*!V#P)X})bLghx=3baQLZ06+MFxbBx|bkio5Q((o^?V6q6gEg^C%zfIR zuNIZqadw9v8f^kY@p7y`#65A$EoXd~U!-!+9#5i`#NqL<^>XmcWUcjes?AEYKeeux zJ$mc{o3B{UE=>^m%!<*VHdXx24yq|{p=a~Lrv$ari{$iJxgmnBlUZ;k{cC3i;8TYN zWBDc|LLSRfTYJ%8S*t&_>>@tRJviVCWcKp)ta1*`GQwhJ0flc_CdsW@`GzJH)e7p< zu`^pWl<=D@he%={NaoBV0q|C8ez&~>Is2D!nK~;i<2(v`i_3Vxr&1`^xvs7cw6p5x zWamS$a(=Lx}c8m&_kvwiff%Ltmh}CtMY>D(2@ijrj|8N9~h=Y-7$g`;@LcaXxOr z9T^oCN_LX&U!Fr^l#V@*@LlCbkU zESAer#HR>vg~p6(G=1>C>QG;Qz2k0d7*8Ky_L5;d&eRIJo!ow3?8yKxy~dC$`|@U< zYQzV^`?Ka3G|F_WkwO^E@b)i+x&9>Y3%DsF;wk0juMaV?!@S$8j*_vNbT{ve1ynxk zh1@+tEVW(5P0%ozM>27ul<40*IV zZs7urR&3ymTh8$yn5|t-0eejB-r1@Q)U%Ig{;p3a%n1;dF?^zp?V#c|pJkMyukoOS zWEWns8F%g5B3PGdd(?!66no?Hl$?k>9!9VqUV9(jhUcI;dFZr3(eB~Ghoc6eH~25d zK+#T|OG)O0tVf1}!7D8DMt_!zPtW)%NG<`ymZj7eFkBbIo&ZuDUhIu88R%Qs-27&v z@XI~fQmKf&*Cmg#;M-#aQ1BfBr5h8%Yc|8f+fp}p=IxE^GQJ))^9OwZQozX%JUqNW zwvnkC+3O?9hog0iA@LI8rPC;;-anpJ5-av>Kx##ic54_y9)}PMnnv*1m*rFnPUG~>e~<`k zd5S`ogDOMg7jQh9Nf@FZQ)xF&O6pw^ylpQy5YGSA0q z*xA{MHT~m2`LmDwZb!<^4Ne!qVNOTFfYJN7fPj(q@096ndrxfrzn=X6z+Qly@&_CH z_o8a#=NtVOJY)UtBY-Xc*ZVJU{Quvc$twNT>zIf|uk_k`8O^&LJSg1qW4$TPW8Kq( zVwn=d|GM4lw!yYc6+f-J9ae-Zoejh56uNG&%cq9n0-L4QWv^pT&pJM5?@SDs)pEv_ zaz;JV^jH7W{H|cCl~6O=?u#GMh8vRRoV~y!Ng-LRb)=GVo*oj_u=N+CCXwt{7*Aty z0q0l8gXBh0s838r7SvK}vl#6ST$lhi8QxeS8SM^3@;d$Xe_CF1i7R~j=t9es8>NR= z-UNoDoEpx$u2po;Cz=Aq7jTaEXS5+hpgFzL0?vgiY=a$^pp<-Gvj!(B-CjU}rWYefR=~g`yXH zyyJ0paV!flPL?X_xNb*6IQgaWGKwIn!@Ee^Mg@ROOaRTVAj=&7dR? zhXz%i31eq}GzjSXP3k%>F6>*T}sDbAT!7EdZxEfCd`&b~kgzY-Yi`dGOHC z{KY}!@|2RObTvvi{%5=ap$Z?@&{qP|Nq_7U+jdCNEPDX$C%6b!PX-P|VK_~Ik~3(+ zi67BXWf~U;s>pGL7#-TmC4#!MXeR4k42_Sf4WDh;zt3G0&K4y|^MRBdAHKvmI0MzY^B!mHt>a z5^Dx^Ta(i@T@dNPm}tt2lS7;DWk?p;5GmApV1vc^c6(*3O`y?U=KHmV6g+?o@f)?2!Lh6MmpW_7QYKod* z@A@TQ7HnK`x4|1SI2l1G4>)5iTU{0JHyulFRs?W>ts&FPB z{zwDz?YJaNut5L50~eld<9{bWTMGgq)`>?Ky|gWVdw=ZFpMDa5DJ3E| zb+4KCzrU(U_0%at5H@{ONN6HP0T(gHBKe^l^55sPqLIFDP7OHId59b%dXfYwLxNJy zdfszmP)&9)=hFXgp#ImXu48Z!8uSNr4$x&i2Hk9jt%52rec0}wZ(DHAcs$UE4ngA1 zV?gWYjD7)+TaIH9M{73CUZ`f}$BCxs|017g*6M7&Gx1OS3BV%{s+7Ked%g)1yy;Cl z<-?@R((1N9Xv3v(y5{N@jngmnxSjE4dl~XWwD11*w7Cg1s{fzCzA!Oym)FZkwRTv& zPoW+OFY-(^NKzqt-F$CeWbcUrdBC^$TuWw6oFkhjyMN81)|niC952nse7fuV_=Ec2 zS39|ros`>D#!Ll}S!P>ShP}Ad*vFzT{>3Vrz-tcX9vR^QY(I!GdS5#VB0+!4hRwYZAS_?kgWwh9%O) zQfUQi-iTfy;uTX>*bUSrx6}ff9d~{x@Vd>yBQx>e8Qn5L{M$o0n$6=8@&S$v6pKKtX zwJh3HYn6D{2reuy_TlN{Gqif|ZwFHLU^H!#E(<}`3-kSIw=&VSYOqvJ<+bI>e)dqw z@+n@lq=Ej-pEm_2V_DR*=60sr_d`O+)Y3H0(_p08S=^O%Mi^K#R9feCalvRe;rju? z4+qYFxFkuO`Y@eI`Zl(v8~XqS?BdZWj}Q>#_EQUcvt=C~%kmVxNJ)8wf=Oia+{29A zFPG7vjF@TF@*R;vNcW6w+Gni_;4EwQ7-$n7Zl~<{o4&PQv_|Ujy7lT)@3`3nG8k4K z1vqr*g8q?s|fuI_dK{yObHSuLnQX2#osK;h91W%6cG`D^wr9nBg!u97?fji z)Sbs}c9^A;xO?Jjeqb42m|5jfGMD=A*%69DckdIUYtLfLhLy<0X<$f9)=I@)mxa=e zdm%|fYU|@Nkg3VEa}xOi&n^(84;hJ#Vbp?+>SZ?gtritTr<^maH19o}E1OV(*Tv(Q zi(`3JER(`V7-nd#n z2HUto)&spjr@rB(43!jpdTD+lpA&1Q#zpM-EPdR7j8C;|yKCLL$uidbWgcIlhW)I{ zzLRU7WmOl#_KzC>&0RqWf`Vy|6WjvYZ(ZwfROOQxU)QBKd46i^QemvU2~Yu{;DcnQ zc%0s2sR+tB^NVWe$~B!rlTru>Jx&bBiC?&571H{?mx^{~)yPMl*qS?1E4N#aGVUKA zH13mhnmn^!e3R`YRB~@qE(?FVF=Ci&vLMkprkA2kRNnb*)_X`Iu9vhe7qTU=VW_rh zERqpbzn%VTDcmcYFl zxT4ml({KeJohxi)t+FD2NoLS#su&r+{K%tfR!=kj{IzlE=)fpqmgQoy0j?;Ph1{i^ zMG7Z>2#SIyt*11hSgNTPyc)986x)_()8^N9_XDME)%?k?dLrn%iP-(|kCTS%it5|e z{WB~_O1cSiblRw{YB`smC-l8P%DAg<-<$cIT>&H6VKFvX-o$ z=f0<@q_&#e7gj-0zO_YrJ|SOA31>{`l}pfvom$~bN}OuKu5KAO9x9IDzC};GP-dRG zq%m!-{wXy3E(>dOtmTwvZJKCex?7h{J?6voX-gA^J)=PaZ+l#~mJwo!^ZD3boj<%w z!K^JV;%cy%Q3IPo^qxSmZ!$nx0l^EOt$re-+lZhlqSfBPYnLCDJgZ}WUzSz$UB#nm$rp4t z{;+{I0b2-y2|xnC4OzMA*uJyipk>b&(Pfee_*L-OX8UJ%gwb`|pTbRObJBIf2y*** z%X*BX++yd=q)ODPLx%VJ%0 zrjdCAv2?18F)GBI-ww5DI5~xx*Q1j9UnQ^$jRxJgI-}=?o3!{1=|g-Wgl>bsCB@m- z-K-1_wl{t`*Nn77?fLPuLaR4eW{X!Nh%Fe+({+)f6+g0+3&tMRu$Sa+@?dDxGPj?;dv`qs z!5p{bk99Kq-PF76C{$jOOL{6?&#=GM2ctE$Il-&k%PfxdGC6h&H6}-)sDYT>3f%05|*r5rY-K&YCi=V3Y!3ric zsuRcWo2f-pP}vZJTewMW3E};;x3O6hyDgi;21V^1EhsEJabK*LIrXzl#?yZ{GLcAP zL0M<%O%IKDy!w){QP19?aQX9We%B`5P8ucX02E;P_%IO!E;__1pD1Wih(ziiZ%;gb z*`o`t8n?5uBrrrCx?Y-_f>Vo7X{9b4K42uK*iq%|(gdsh*!54%W3w}LDGXEJKNhaj z!g5F&IQJdrlIzxJsX>^kwsPXnD4_giz+W=Fhh$I`6zs;adp*j>QE;x)b89xtIAhx? zYg);1S)!LD)jA?CdfZ+v<>at?E!|-h9%WjQKh=zFc1dm<1Q zLA0sL&HmJehl+RJylLI>1l6TzZj$CRM?M@xbpz*V0c!Cd(%Vl*&J9*-dV4`RRSD*m zlWXL|6|kxWYf@iE!@~(JJj6NF=R8(i339$LnzRwXMY-|Q8BEv_I2+4li&n!Z92rsk znB$pP{$S%Ax*uNyKjdZi8y*5AZ_^AJSRz0P`+ z+?R3Q%<&LBi9G+!pVo)Zj3ar#(tHH-ygo3esnttQ#7s=V>7VQo3JLS95D?+b3f1he(%hhAG6lf4;FB`&#JTQ+Iv@B`>Ip8t-jVRJKt-KqH3C395Fv^VmC7P zO;Cph7XM8n!P1Ejnce4B+jBxpR&yFsHp;a%JZW+7o9Ps|Zv-oT{IY5#M1W<~uDasL zL8i~rok_%-616MQ@r5P#c9R%%H}{2apOYv5U>Atr&lI@Vz>Ygq#}Q23pefabGM~Tw zHdPsEa+Zta-p(%OTQOC)IWt-U59yhr=;K{8Y^+8)Iq{9PxRNN(lE(dzjGdQ-Mp&6! zn)A(P-(nb%(cx<{o|7|vtzC+-qMWXJ7|~+88UrB1nAMxUw`Oj<J_!uZ|F;^KmDw$U=*4y!t`0ttXBuuayNC!Kfi zA`rs;I(8{mf)PU%M{^P!B)1_(y*@f`C#*oZZzZu1a*rT>eXz{YN3DN$(%EKzS z7Tkgh))|AdZNXR{T{SrNAaug1)e_4wZ-{62hAIrVr_xGuED*n|dt0j&=37JCTopHiLt5)6Hf2}Wm{Nhl zQx@(oO;au42O7mSp;GW_!)F{dvGb;4=NNZyi@F{52bbM#!BYCMU_~memR;sAwhVd^ zJG3TtLnN{hHveshFFf;END?2^I|TiH{SEfne?nKMztTY^wKcm8_qpePJr;JQLEk}HT7f>)mYQw!G7QM`KHy)g5>BE39W=?dUqsSYhC)vA#6=9M@qcnDI)&Lh zzATzk3>u;>-Wwv+lF>`z7kcA3WR^L>3pwCz10ardpMAHP5ah94Pts_^6?XLTuIw@` z5iID;4=oB=>X&}*HDz(+Vqy%Kr(Z_*w+Z5045(ve4zox*>@H8DPK`h8_P>`hy74}l zQ+r4=GiKECo#EehD&;=vsf}-FN~`UKqBYg zs|cw~;rvpo%B7>TFiAOmWX&ETP1*su{N9;AIxRc#;p?TJo$NuTeT#O5VzR8Eq1RjA z>V*5gfdP?6?Kp@L%!r|XmiT`9E@(#q5fE4-L%HCwj&Br~*9_cRfW+2~@6td>iHj|T zOWj+7i5d>>MJv;yhI|$}U#=WsILcv(2bex;b1O-bBlW(A(C-(i>IjR&PfwUe5Av6_ zbU)DohbIUvACniP2>dmRX>V3+1(p3KMUt<$CSd_}Lt8~Lr|I+V-EJaU|6E9@d^Y5; zW@2kicQe+M8OMa`Pm^h3cwYpH9a66gPczvtRSa)QbXOU7J_hHpeKj{5CRUb^n$XYP zQ|P<)_It1IfEk|NWR>NWVc{S+MQxuFS(fzc?_X47fFB{zMlBDLKf9;5o=i6GbNKuZN|{I+Y7%X9ZL62j7wd<{j)REE2Hx(h)V6uv-B!L{JYK8%sno z`~yudLMVYCb#zw{myB@b*mtZ-$j8wqySQ;s`qz;5iuY&g(Vi9IA4|2Y>v58P%QUeL zn8Z5eMAeYuaNnz=O`O2(-o$kNO255=?=6}k2FT0?qD%gfRTJWy`hCr6FtYjC*m7A( za>IEMt$thAg*@s{UvX+Xn~C}IPll0p(H#$Vq(X769(Jn`&OBx>4xh9=ufAdSc~a>rZx|j zwky6@g%9u7bwN#+sfPJS#oq^jy%+YbmYvL={{h(MU!vgUaZwPSFgnb>bLVBaiOaa- zL6!O}tmBik0r(SQ=xZFo1yNm#4;lb~hMJNj?6;pHstx>^3E~J4=*O5O&4C~;^crsC zLHzimGNX0m=`ZAqSi9#r2!d3tsmgn=qz);LQp;kMJ}@mXvF5%FI+oJu?~&RL$u$wR zhjQB)3Eu<;_lRNpbO0pI~F7N z!6v7NP04w)BWHtYEwA?{bTs21(+>cIuvKV#{3Es`J$rrNLG)SEn})=#k|%{Y9yr^@ zKMUvbm%G-n)>kf+u-wU`>5ic>uPLBnrQR;nX>&iPa8kJ9QbVNuTKgPT${qde6P=_d zc`RDRhmZv`(~&Q7^8uaquP?k5vd#fxnAM4HdGuc7pPY-264Zwf z26$T|=JRE@D*t$MyU*G6uRzN;?MK3=2|2b6BJ|L<2aIQ&Ed|0^`K>H?g*F21TUumh zi07x%HPxi*7otKb^BEZ}a-ivd6E-Uyy;>jrh_C6#Qg&&cU%b<(HxgGIxP(Ba<98$fh zl9Oqr#whvJJCk;oxrFTWJ|O#czOeG!|I=#c?E0))|UaZwaGLD=3 z_cARL9nV%%M=LYKGPlB*kFGBF)t>fl7=COjn;W#5FUR&$El&u!^|(2#V1df;WX1n3 z3}Hg5&bFdE17BRQhuskbj!btuE6Mgfg4Y-oLy&sPyH>qkh@X!iZfqM)*q$9u9Ryws zLy~P|jK%wC0JcDEPu_^%-nVDv zQe>1GPjn!3FUhK<>mAK=Gg_7NkwuRmrg08uSCKRiF_Tbo`OweRsnlYmNg@Y0P+mC%GE1oKS*^|0+v(;$ zh)6k8o$FvFkznicPY@6Cd7bs3iQd=#MKIx~T;DxiJfrNCnvi~NyvqlgPXiBp9Zh3A zF;UB~sR~X3mDm#E05A%VCuf_$zhia7BnOg;jRaS(YeI;q0q3u0Q_|`-`bka3Ymyz| zswGnTP7Z{7=N0X#n?e7zWu_&cq(61}gj{gCc^T$5)e0F2Dh9fjrWuXy@=9A@ZLJoEKkO`q!aoZxLNp9-@qGdU-imt#iSx(~zYg z3(_|`3#;={1;)+mDDoe#rdc*wTA2T%(ZsO9iTTy4p{WDNQYmkxp9N^f_9-c(h|&^Kf~h)n~T422Rue$BZ_;-Jvms6E80nG4_iu8 z^w4>T-+dmb!Vk3LDDbevzY#I}i?dEo65>aRCXX*)efsWQObIDEK6vYT@jFtFPuu5Eq9XzQh z%v%zkzwi2?SmIwTK!w}XqUg4+;NL=J5+ZSBLHi~tJloT)Dz<38Gl-+?W=+Y;?1!(5b6pe8r&{Evl71H+bD^p;G{oR`_T@sZg9yi;}67pv{mE zDY_x7E#PWYa|`+>>?P)` z#^ZKB*$x!ToT50ua!?j%dA!6sw=jS3+Oj!=Cmyia&ezb~wZBSRpx#n$7Q!#bG497| zUIVRKS=cOI7597n$z*N#>r3Sm==mnF$zsD~)A$ncmnbL2<3)k?l^qX@iZQvCo-&8I zn<3y+u3!eoYU#89LY*Tl<5-s!mfGW^sGV$Oy}8eNlB~+|(vsOuD?`v(Bxw||wDzbc z4Cm|KKb{~4x@SE2-AUFM?ya$NzBwq2H&5p(qwpIvtf#2rdz#I}k!$Q=xunC+rXM&WbKgWq-&c&F1kWjPJ$IX+VJF0PAg5giu8dIJp)Doa@(RgC=d^V=OOD;)QiN8k5!w|)1<~H-#wc}Sg%Fx~ z8S21+HI)5UnJ9d7UA}(Ev;8B_y1uhcY94E0Lg~mF?oPwCh^AVSk?}Uc>+;ICT@}=f zmW}qe=|t9hIXJyYN$ENqy}0DgyqsKyzdGQ6Q1n?8=W6k!9WMV4`3RxjFY;;Y749}t z7iFk4E6gojT;H(fWk)I5qELzrFDeW^?wF^bp1%10Gy9eyU&^x%&eBMdNex@0)U%F# znx(br#}6n;Ej0EK7-NlH_|ZqVF+f^UPN{6B#r?E2(V5Z`F6&qFB4l+kSF;(u($q@i z4G}g3)skHpkMpRl2Bjf$Z)+;W$f`*p&*odpm$6!HFXY)46a2_%;?1cPIk}8ts*aju zucvBy@2ShT)qe?3o|!^b*mOXLX1nX35Bz0HQ)eW+m7`MisKs!@K`6!uCZ;8WQuMG7 z_?LxV)Y>>@kd47UTP`>TJd)@WzcSB(ozN>vWTh5$l&m~T49Y!phG~E^EzIqtY8yge z>>$L4TDTlhv+-lM90%xk;eGc)hf>mokR(Q$V_+Wb3zyX1auG;q!EO83MCP^$B9B6> z$c$Vtby0;=abe7tD_6f~_If92%y!CsuT_C0T||(&eg=7)nzh-@&{EVMcKX8JXV?m* z@Y-gJWQKa~UJuE#$J?$(`l0nogOz5|*oQp0pMQkYZRvoxMX%{5Q0UMI?D&Pz*A;ZJ zsw6>v?--r?4`Vu8IW3iqV3|h6G|_!X%J?>rX6W@RA^(vZ!$t?Mh=KDe z>3!ly2B%AbR7n|aDvc?-i7AW>s6lp4NN$Q|8rA6#o3OWSP;J5i{AYWhvkG6Gv+ zz2JTNz<@8iaUye z;kxYmN)NR<4NkK`0%O1`J}s@WB|8ES>w_?dPCHGgU)f}oM~>|^E|vBI(!2ICG`O4Q zu{z>_CHWaeMb=pUROb06o_K?78gR$G*XBdxGv@Jghljhw$8E*-(i*XPrTtyhDn|m^ z=S<>QEYrm(SiZ5R9Mnu{ zPODj*`M+eUh)rdUEoGwBcP@?*LeQx~URtkdRmIyD_s>+F0j^i(_kYVoY@oDT*m7=0 z>q0_yaatiyaC%`X8}wUe*XEa^*Ow+{4oNpn=V~pMz}ao~7!Z9EY;l+q4H&ZI?da32 z_yqfl-5T{~P_3$=a$-9I#*l9W5+mHFndQYhrC#&F*0P7hj8~;@)PApELKJj0UN|M6fEy)J!fzt>_z7Nw>+{zD$tQeg@&3;zhy@{PH+Yrw-e4Ou$Ce3wU{mAc6t+509NWFEwRC z9QpQwua#y})JS=&BKCg(3W3()A~$z3W7cJe96Erzy^d>-AwBc*=swX@GMojnb+0g4 zjuua!C7WD$ANLwhBRMdE@1=#SgX6E7DYyu%U7y=^Q(5VfGv-E>JFN=3#+Pl4dx1Mb zsw^EcaSwWW7|RrEik*#ZIv88r+T}E7Uv8`^mb;Oq74<1kt?Y>yw(+S@8s@rqukMOG zv9hBI1a5E<|Jgw4mlWTaHh&Fnrs&iNJqgC2xnyHItBQziPa(98m4?uL_Kms%m}_WZ zCOYZ3yHQJEInbJ)E5E1E6UGCustXjx#yzFjvIINbt?Q+z+tb%1V}<4uR`C;kDk?{P zxi?@{NRz$-4{$qCsovQ&P=DKi73MP(bubmSGGxAfPqM9^bpvd=R?0k`e0u}Ai5A8r zV9-5hOtjsXK?qxns6DDvUj9rDJ5EN0K3gmW={t!{P`HCedKQwbY#nhWxxfYKpG~r? zP#Bxw!ZAMN_8|$UDw$WVfkuA8a^TyJU0?AtgOic^+9~w+&zqslB?CF4BbKashqjF& zcIAc*J~2~Xjew0X$3fZ@h-)doa`vOb;;un(nv;5G^nQ?Y3qkB3xKIjCmY;$R)h+$T z#GIQ|=qU$So>6#2>Gy_~amp{|1y)m1XjvgQB)MWu2O*(C8Uwi_f9(>&&yYfK?Dy-b zntA^iB$Zym0h??U;KbC|#%+_2)~4_$9I?lK&D}FQ2*=O;U6i%J*vzWIeZLN*N?GF5X%f>u5#7cN~av&LN5=-q!%$>o zJm2Uf0KhKQp)k6d?$44|^{WultoQUomw%cd`{(w1hJdrPek?g?lLqkn&0BWM*I*f+ z?1qUtVF#NBM&-j)7#W}4dX~J9{kKG5Ji{9^ZNeEEN`>nWQ()*YF;DaKcubehs?7I_ zht0gaSsG(!#H)}m_ZX6Oa<=wq6af5L6x{%q+}TQJnB~ar&#Jy_!$qH)eTGeB2}V>yN&xyc zm4dr`1UL*%>_9cG{!8Bv;B5zrhscnda_c+p!@!;ey<|K)Kwy{CF&pMFAC5h2cFMk_ zooJxBNzbn;XeN(XbN{lY^lG?YGKA5d6~X{4F-OL?*mE#%5S z2|MJeRpru#wtY9Lk)-DlKWb4bnYPEO<#&IjLH(B>kbD|XRJWI@&STNjQz~g86{%{F zPj|>1DV7}CA%n5l`*FWH7Dqrodtg-e7ga1y9>c_)Kdoe;f%DUew`}2LMJT?L8x_Yp~wk1?0X+zzA zSSMY(?A5Z|QTAFIkuDl7Nu=?~!L*HWqMcQ1WYSCGoQFy#lJq8`4T&qfoMm?4coJ%z zmb7ng(iqL80#TFfj6t=1(LHpy(~+e!9iM$XgXRC+QN&(aSI9Qlr6-j03po_~D46RU zM=neCs1~>_p<70w9_VU7T+)?w`rDkM-q{fLZ;==eh$$1nmDPybv|*Yi&_eQ00QKW7 zx|F01-Xt6PdP1PbbiOwli{&@QiUq;4vLo>(EyLg^Ax=VBS;M#$1mTZ;m}Rty9)_9& zWJfPi2R~D?{{v&4dwK2ynXYtb4UVQ=nwsc|AWn0|qV+-TV!BbE8xc~?OiP`KD+mX& zA>r{`T2^T%>K#;Mi`#$agJ&9>lR3>=o|+EW&{9`a{ii9o-0G{2NdNhqQ)yK z?Su9T`%TSY_oh)Vs-6{jgymc$E-Xb+Si@rYI_HQ3>myjL01Q{U?t|~4Mkf5;@8YZw zi$Rzarn+jbJI;Pvm;Oq7v7=Ag`=meAZoLVA?4e-z{!M+3E+6SqYCy9(ivXpjVtm(DKtP>hi5xNEo@)KU(%4hT!Xc*S;@n&MU@9ob)(|B~B5KG}vzq%6{@l zytn#Ie5AU$Q}cIA!UPNc|B{ggRIjU6p|&=^*+uz3ie=cQk$3;^8z8JD5nu|V$}2h+ zCM7sMK@2J8xj?PdX`7Mwe5z`NCrBkn=k@?hx^`_Ob!sf#!Lag!J=agVL-kHJ`2hBu zg42MFS)fGFgf5D*!~E=t>FU1zw$`Fo4x~LS{)Y!vyr4qUI?YK$oBF>rrS%)@idXlr zt{IFEXOi<+EmyX2@(5rOL_cU4xhX5bL`$!(rmxGj!t_n43)B6;*RbLQ^7rFBj?SKS z73h~e?2jDVHI+VXD~yRE8-cuHCFon6&m-*M*l}yJFbLyAX^FWt_vt52QbV?!m5c1N>XI6ycuT_cA2#L(j&gO*RH`dXyC>LdPF+$-m)GDZ3lux^5KX`osjKSpYTK+h z1r<=-(xfvfHU2buH(FTAq4q8e-}tk{S?Yy)3Dd1YCg?}Lm)7D`){3p#Rf_VHtqFSf zwbOAroP>?!JwZH%SY8w-ri^YXZm}g^yGE;VJ%*{hn0&ganCma?1V}AJ<+im5%R-Hv zb{Y$4A2D2^dJQ~zFK~hGZc@BIasdrz`<;(Nj)(lYd^(K(4p#a1!24YFH9!yJRNeXK z)eRoLeI?@D-2v;h;74>mF*c|!M%6{LVNmR^!z#l}ojv_{2LM1%Z{#L(akNo_$MyW` zRdk=p^0abopRle+qj~h@h(jMLk?L@*c&keVJK`&TK5Z@i%Oe8umK}VrPm7fqos32= z7A|Q<6BYVsn=TRCV7-?R44)@*VvLIILgB910Y;s#1+?@WP|@h>QXPuCZxIz}GyvKwQ9VQ*WaPQNiPb@)6; z6uH@h96d+`IN~L;mX=f6u`yw7PwgWrR!q1875Rsn$RKa<;O}oghqOkX0S-nfM*67K zs8#D!YL9Iyh?eut4#KW@hR(0=v`PTUK6K%VT+~{`kYDbu;n7|Xk$uV;Jlb@ zf2$zUnMdOx*CtuhB;y6wPiWq#*?jaF<6*^uIg!`({4Cu}Z%a1vvfR8gXgfCRGCA-1 zI%@gwN`PeK_r2)}=Cg6{szoL{Yn69hQhWEe5DeHP%{Ja~VS;Wvt8aMBq_%D+n zd&ueB{rGwJ@*^P5b&DkOJduxh>>1_3@73&S@3K)OweyAF&AVtm{kj8R-fQC9@Eswn zzh;sYb_fdnCKw~SGE(c-p9U$KW?oE|ryu`gV`U6YTJ|*ZO#@`o?R2qiA1=OW1hHVB zt1{=bM-*`Sm30pE(HL2mfyZ#pWpHn(Dn|4e^8trtc|XBpN%$1l(O(ne&mWa;d@!d=ga zh}PV>Sg#Z$87203erzP?&%GPd8zZYMO#4g*YZHhxY%*UNMF!d8sg~6GpQkSjQ*?=t zF}A}|)nsgb>%cuM3=7V5b$G>C)sr%HZdtmNib?=$#@MIMrx(71DbT=SDP9g7t2)cy zo|718!)1RvRh73lU_dBIjEbnbgt;sml4}p?hh41!t3V7yUiw8$6t*4Ud0b__OCbc# z<_?dCwaq5x)7yZQ9Xr4-P!>^?Dc1%6s4J4oJG1>j&k8Gf@e^Oq>M|bs&-QjBE{{pG z2cFu9Z66~7L;SLJ#S-VEDIYS#2Px+ni;9_^6ho^!Uisxi8<&4vigM#@WG{8rpN{6?X{c8 z0Z_Sz{@Jo6gs=AoZ`MDDVSJjm%S~T&!$HAPil@vO1sq%D?W$Tf`3(D(93necpkhQv zq_$9bN{j{Q?*u#ysT)~YPJIuy+k|_CI0eIaPQZ1;B$_10v>jx2KLT-`zEp5Ty`Mqv z!Fs8tt(E$07jAW%t!wviJP+^HysW^OzeyaPjTJ`4dT1d|EFzL{%h<)5{PB*wo>sSM zgq8~x#Nr>s##BWp(0ttA6A$^e>U3!=-$y&`C=jUtnmr=#Lt1@9xeFLzE&o_(L%<_#kLSw`$lF0bDX5h(P8GleuRn1 zL&6v|T&2tJ`<%7<6cBN8Y_L=kv|BSS-yzxWa&rp2Fn^W#2x7xuT?Y3gtsE2s!~Wg| z*1nO9m--w0)4{(#mLsYXI7rmC1kC`?nU%M|9i*InNl0JX%V_j>!ygZG4!qB6gF-5y?oFR>N9&p>pH7| zhZsVRLP$hJMEsS86z2w=ukFuLg5XZS!;9HR6p9cJ6>^jYcp74K1)250h2u-NwFSJD z<5NS6Oz{Q7zhbLPWDrU3Q3t1;G7N6T1(zp(zN z8M8p~|C*L*h3$_#3Mj`>5h7vPy`G_t7>o1yZ(OlRz%OVDfg89S{MiL8E$6%1#YC zC|H5BnKsNi?slO6gf9D~3v*7?s0YWtep=yPp~usNs6Kbo zeeqDO%a=`45B);w$o%Tv4C{_8Ydx3yrtiF~SzgZan##{0uY!g*i65w=MyAIHuZT*? z9qPB&l6SzXKl2Fw;|rKE7ujlK_&BVrV9l{QTvWhKdex|z8EQ|SdiIcN3;$;<6w>z3 zK9LWe#CKk1f!_&3_Sn+-9dLfg3)}^mb_yiV)czd5$Fyb8fgMvMa#p}` z3yP~&!wZMKx=qdSHGzyt1Nd@qj^AX;swBIh?k8Uwhi15ulNFvxFcoE)4MQ$a*^c07 zcgwqZh-Rqq12N&AzHsq!5pA$rKI8d$IX$A)B;lOWRP2)weuZbQtSVyEE~D0Or}q^` z3k04R$uqR8qkemF)e>Lo6BTP|y>(gswOi&-OyoeuVQ#R)so|sqpZ6|J4%TPghp@nz z8P$&F7t|yjJK7sGtMwB>u8@<*n}I_Td-&P57Z$;nRyy~V!S>X~;V@GvYCmT(b8h#V zLM_t&;173bI_r23r`Hk`DO+A|*cXNl8OsvvyupaG0-5Jb<5BVGcfCp-(-mQZ3auGp(im-zc?eTkZLBa0RA#G4CR6`qHo)&cI-s7Z@?7gWQz> z$GC12o&LiY7xH;eV1<>=fm4EbUZV<>yK@w4&1FWKZlS2I`A=-wGE5Y{eOT?wR>O$< z_}^#8l-&OHiHi1LhndKLO$Prpi?`_i4zU0KE*R;{a|wODZ%S1D~7-~R;o7;jQzm;oOCPE;9{NM-){Zk|sQcgG7Y z9@W2ne+u&ZpX4xML-YUTf_N5EP&-UtE!OFofi!k#+`|TN+t_Y&h2A!=PO!n!iC0US z{?ddy&O+H-u3)~a>~?sGABe8yXX?_&l!f>xeU;DGHTM@Y6#ts6BTxPA6IhzGIWhnus?Ub_`L!e&mr!C)^-NxF(t=onAY<=?=0s|rr%d|gY--jJBYf8N)+-J2Q46NYZzfw{f*@qy1r#5mBUUci9-*+$0=K+iy#AyW~a zK5e6`wmze*SQ4K5jfWZEO~Jnb{2SARrN054&%@Yw7S!d%X2tX5HE{(o_`=8i?X1no zPMGvd+Ce2juMI((oAE=(t~@Wy?Tuvzkq}!s)m$v*4ANRkkZDUw2Kd{L{z$#D(Xg5` zF-|sdC31272GsC!z1?)=S}EzVo!_(e+xeUpV-O|YOe&;fZpCd1fNTW2Ajlro^`m87 zCTeJ?CmE;NiAtc@aXIDG{s#(Qzq+eeq6KAG+$ED!qYpf#TikKqdrBYB_Ui~HW5{Rb z-1YA5$`9;osKkWAUcQbVWlqco3X%$fFV%J}z;@)OAJf=zYVlea48{J|t4Xa=b9n}*l|K0k&Ox3}g?z+(@yYjcNE??mA3DlLq=IdhL? z`_y9@rPWREjrpRE#68}?@|ra<``A*$BC2Y$8BJtaZOq2HxnuQz)8ZvTG=DYw-nprF z!*vg5UJ~BiB-*!ptBJ(jlM_P0KG^6O;6JK0+-7avcjq==hlRE^oj6H#a7V;4X)YAg z3hQuWn#0qx5d-Dzm`yGDAyi^J)91_sJ4i1UeTMiVla@Kw1C(9*6-P+nFuta!@YtotSD@7^l4KDYX5zxZ%ht)gY?86hP2WA zH8d@U)zpbfr_;3jvqi*HmTcf*T`nq!&kjcUl06vX4h(P+r=B|bPKtVJebu!yU!CLU z!2h1>`vkkMciLgeXw3j?eMHtVGL1%m+mNF8GjtGlW=S3qJAH(2b1^bQ7de3vJE80x16Q}@rTIN@8#!0?4` zwkjDOyT4D0BM~wUMd`pPnvg6DQ@>|Mh(U@E0Hvs)p}5`TmBt*+fcb*_iSqW^Z&|vv zBpu@Hf{%Rl?K>-}{|zCr1>c0qvQIO;iQf6^^2x9&4&pMzdyer6z9I7+fM z1)my2m`#E;XjbYFIzg=0$VTIJ9Jf*n7=iGm$b+S)fh6jXqWP(MZJrp%N0UwMQO~(v zD2kfHo+-Ox#y{ry=Hgml!Elo>{Oi82{D~JY5tgz<_?JUE{?2;iRIGMMizw`2ds^zW zN&(F;*yvA7ue<8waO{qfaGk1x{$pXsUOx8oQ^3i&%=AJNPwxch?*6fjHf~)*gSd}g zwV_z>Hgqg&l`BXx?Ay!PN`LeCs$h}UPznaezkN`n7{yHF4p#Tc)lUw&Bp`Y0%&&v> zv}+92G@FI*Tp?kT@Qooro!gnud9%>$$Y726xHT!+Ixz;w?@}3!&UMWgH8suAU4LW9 zHI35Mef5!HWqi8@ke}ALr$cyf5g4=g;UFpbzASEFZOuK5Y=EVrQ)Xbct<77~@xl4R zJ0;O&;4IvQ5jq{{jCZr#&%)D`87NNjc9yZ*tIDI%QDFbD?O2d%rSGO-u9ArUu-B5N z>3U_(GFPHcZ_z#LI?71)rMR=L^Y!epj(WsB z`O~&e8-msqV;?1+CXKN>kuFIJ#&diCS#wa>YMT1i9!-M)a8(6!!{^ zqx}r8r;R8CD=uBxJ%t|;97U}YIp6d*vj0g9zw1L(~f=UyyT8mL7%jeF*D(hwt3@n?f4C* zs9+-}iP{wBD%-L(09WKjk2IN_jEaO_s5%*P4!mtQ^g-+JCLnAqGnLYs@>O zpH4vD;!B;Cg^oM1*${&+!JNsTicQHbh~JFaEFg%FKt;EhdSSk>q1&1VYS7YN{T^hK z3ENI4xulnB<>OLZOyWUQ=vZ0JN z9_>+r0-+#z=XRN&H5rnlB0TXPqW99-2YHAZUYjmb+lb^`bk8bRiC_-OnG#|*kRLXiE)4~qYwGTavH|O7)O^_ zf5jf1yvhvo>zKpR-;ranG$}kmCD7q{oe{of^e@1I3IsD>3M8U zz6io#qr1j~lxnxXnWzesINaK)I^*9R(6vU8?C3>1(}xZEhGnTU`_fQ|EBg8MJZ? zaXX;67n93^{VWTTBgYgeUt{cHm)+vRRbrUP^BHTj*S9kgc;O=n71qaItZ~rOW=;@XN}$@w*X|pc+AR3?$YUO(`=2$Nl!w?SYxZL zGSiRR-TT<&Zr`@R@Rs8;gY$J$)PBS8Jv;|SWdHg?;0YpDjyIAUlWY#qrK~a|RBsBs zVxSooBuI#?*!b}~U*-kpu#d=&*Td1qq`mD=>Cm<&d>gimD9BouAhu}X?XR%sRaFiYegKHI6goMf{t#Z6JQSShC z2jm7{E0bE75~(!G|6(@L+Naq@nInr1HmNlMvMFN)OFIwY0jWAnSDkTlPD~8a?ifS_ zM}T>ld1N*6^3+H@%Rp1{cBYoYKBAHMB>@W9sV&&LmG@dpEsQm~a4Jh;Mfcvyz;w*? zR+gp8%9n%pCAus@QV3q(BftIr6&D@_Tf9ze4#(}l)xq2|;&oCV!@+mwY4FmczMSotnhxO`K=woUf_0!02x9 zfGuZSCrdi~wEo-lHjec*g;OrAGn+fP%gV$ZuWGT5WVF_pvhf&aG|HP%%z~#|yF)lD zIf1D5*8~_H5d*j36<&w8w$8I@4~s`6kg|MwSGtKP5VI3|!>};`w9puJj~HV7;TBo| zsIx;v8K~%1&3HzXw~35t=XB<~uAbEBxOs(!_*%SHUyfq&2%V4%wiOQ`06y+w;T=JH zomeU3n=rY_D#E9AUDMXJd+-MLxp9S4U}>OX(wV-5~Q*H!P#|@9k zC!I*o>a)IYTm?7G)_>c9z3D zTKlF37!=QM=D%mbwcK}PXtEc9-y_>l6QsM@Nx-oI#e>e#a!#Ge)97|{!;tte@mpNe zmf5#h#@`cX^7MW;HP!-3(6+Ds{(b-1>dyS}v^(ZzNv!Yn$=yh@)DpEcCpOQ>?Pdc{ z5H*mq)|xt?cCTN-4CyA$XL&4QxGGEW-r2aF{tGXGy?a`QojB*pvl?voo*@SMc07v6 z-vU{5m`tjT^Zd`WyX!9!!Z?}=M&UhtMBvs(I8RSUq#cZg1C6UZjOH;mp$RK;Ice6(bwlkw4| zE-p3TkO{qGc@%lwjVsL8_C+<81C{qSe!=&fKAmU!kA@We9hqwX1KKa|)|Q^a)!zfR zLYfG$*!su_D;nm-peBBY25fUx9^$u#mhbXt(G zq~Rho0&(@JO(<*AOOI}ET5CDEr8uyJrHie09su7B5kTG5#RKX|eIQWjTvbK-V=_5w zp!Ig;7Ig@P0s3(3{7E#xR!O;=M_ZXO!WfFP*O56qyVY30%oO>mba2UA5F*ZLwq`=hRcoAu&hh5P*_M=Mqq$LRKw zxt2U{r7sHUAd7VQ!O5Z|e0%&1NuOS4{KW5?PhN+eg`ZY6tZUhhgPq!3A8=_XKV=05 zNM6Z01W|GX1mi|_cT7pI4?CE55zkCx7y!)&C;_bJ*DP&kOT=A>Y$t{kYluKk8^xYB z;0RxeYNSyz&xScC!qMr_d*I6C6{plm0j!?0$Fd`NF@k_bGFIOlJ#J6`?B6%|RB3w{ zc^NES&_MFCg1N4b7F_IAjJ3J6zy?db*Vg%*v1p2&Q~TtrfRdV;dm-;V~0@o;NQ;Lk<{AUxO@Ob=O;-%8XEZ%taa>$-2x*YjxL$ zt9vVZle;-~ zB(IL-MElHh%bNMIP~LHFxBIH8p;s{J$zSKY9KR5_+t#bXIjELGk7+$rhOVs+0fWoX zfJVcr?@GzF$js&4OJFPOv$i?4e@^Z8;=$}FQPw*LodY}2;6TizogU&1Ntbi&8P{pk z?1}EAw!{BL++PO8(S6aQFoY1?g1ZKHcajh!2^!qpoxyEzf`<^?f&|weh8+TDG_xx?f#oIeZX?l#L@I)-T1`!%Iw z+M(kERor~@vU@A@yiv7Zt!hT9UwKx=ywV4LHh)zXu|M+rqFSsMo8uX;8zl9spZ5@W zAm)c?-MF7vPkA4P`_h;Zf%)m-51>3XgXp=aI187$jLfz{@m1ut(UG2yq7Ta@#ZH;a ztE@XFj0|4~8y^6N7Ogz7<97YxLeFV#*Nt`ft_ z;zRG--|e=cnzbOX+;0BpyYT6~=itk^`J1c5uKHtpVEN=j#X&&IGI!{82(UKFBSFS3 zaLAc&-~C-E1AjFH-y}J2vpULqUNn6?$vD~conUUUixnYv7VE5SgD;i}*!A)#(mPp8 zX^51E0W#&bU#IfcqYa)`R`9vbn$$E55c|ibh#2PM-2c=nH-Bg}bsv%&(O|&L-fIvp zmY$h+$koMrO5w!lr$h}4me06WPDf6QNbNVzMSwBhM?uiKZyoEO82l0Hk10k@caVx; z3&0~H0oBzo!bUWCY}cRa_TVW>&~uMHoZI`529g$v>0Er>#D9_EcVFKUHx#wl-<*1o;&&`2Hz)UnN&J> zre`iK8TdxRB(B*@+&-ytQixRXv)UY^ZNYdBd?L!%@)uv?6%zr%mr3c9-DcYhN}Xt^`_g=hQm5>E3yKxq%-M8l!tO%AwltCliIb-?dba5hUAdP zfx>59gr5NidA{Um)>`I6^em@73;XW8S`O@;;`~17bD1)dOh@1CIrZBR*Q*pCxLPTL z4X(x>BGL!Ug&<~RDk|t_B(RKDKF;|@FyC!d3NE zj{9~@Se^KD#eUsu>d4zzCiIi2`@3_oQOFluowUB|_1HsE0FUrku;) z0&NL?p!UumbI~0}vJMYRw^Gb}8_={0g5%-9^*y@&4vXxRD8fagt8W@m&-g4}&LXlc`784H%7(%G?8D6dUv} z3E;71u1#Z$VfqMDwDUW|*SgYQ;(Wiq*5s%P3NdL53iDxhcRv)a02sV`+&(mayzI!j zaN+6x{gxEP6pz*HzHoo`*D*PqzSluk%4v`NKtq!L>l7Ha=F|KAx3m=}zD4sJed77; zzsk}7p<48Y<1#|*eU?|%8lryGHzrQMl4-h~0F=~F0rn9lxMjKp6QDV3Bv;D^L*?^)pEG6HIJF4+;gpYItn!rHG zI}IEi@H_KDL>X{z^?wj@@Mz%f|8r9ILh%IR%hR-ilNLbem4tBQ!)?HV(^s~j4eBKg9SJ@M#cU_=`s{t&J$5;koHO0ze?4}`-JEjr|MYbcMj^eim6N}vp zwUgDO&C9i)n)6Sk)a=F$-`!T06ls1Nl7$OZ^0=F7hPVOlN;dE+kNDTG<_`uv?!@lj zqs17RD|7`P5x232J20rX{m!5FEp;U7a;N<&CeA)HlzT*n6awr`SE&q>%19p83 z++B}8*StEu3{b-*Gk8Sr?$#H$tg#L1tzXYw%zmh=&hB=m&{>EN zob=*$iek1<)8iVTPssu(?N(5celN$na}nT6i}21*fjNrCM?xIb&@dzJGo5RDhk}!6 zT+4eaI!=q3U6B#CKlRf%RWDcbnI@2q)+03L^aSYJzFSQ_oGv!-g$0R|aUDH~-D-6p zSG;F;8oLQA<7hYkk^)vbm&UKOjX)TgcKf+RkH3O^kPYelCAaZR_`?)SoLr@t=ugZK z_LR@sZ^?Vf^xkm%*h-RZ;qWT>{V?LGm1W`}0T}oieY}feXzKNf<4mi!<$*c&Ku+&( z5skf6pA_BuN|Ob8;1|0Gy-8MKvw#(y&=0_R{cJGh)y;3NB?;#!4qZNfF0jFeDI1VS zA2858_E+XaZJ=66{_58Y)*AdZ`v`Q((*NpwZ;?|IL0ENBq!Gk=6b61qdS62)6H-ID zLg%rIILgWGLSLs|{-{kYWfEF71aDcZcuU>Jmk<+1$vGQfEH_Dr219D!tCAIBau}e# z63SO|Hq78k=Q>N^;!TWsuUu#K_Wtfl$yu@384ea4&z~<{ua55o-|^rR{i#ClDidcb zF-}fI%WgqZX+3-x>itgO5;yd^E`pFwKr2?Em=x`>U!PGg2~GDuw0Tkf_E-80Y_a29 zzJ;nXZC{{~eR4>C1(Q?Eo8EEk!Me97?6_r#x2HnOayOuXo*7!DmyezCf{e z{WC@#tT^cuJvSWs^##FPvD3Ry+R!?ZvNjLFU?9N6`~B;t>t_hhO|V(6_e2yxmH|K)OpZS#;$8Zn@+VDz zm0k2J>9T*ys$-agKkSWw;29soMpl5^CQH6hygGBg7!l|prvb?&6WpWYNqoJl-)Vms z`HL>9n{=a|@@EmJW!D8|9Rhv2ZV5Q;&pw9ZhLw>xc*6VjJErXYhRIu!suK<0v-6jP z2~B{Ms*92x!(ZIPQ)c?G4CLC(RPnvJbOf+;29$5YuV=fWzj1y!XG9G)0!S(-Y#kHnu_#l{3~S`A2^>A>O$?+?dY3UZ;od0usk#H;Bx$mdW_^V z?wlB3R>lOC5P(gh-b^iGp{d(~kG`Lxor_<&VFKDn&9k38ahW@J>U|lWi`?HHI;!rIJG`|@O_-J-Qu<=eR|C8pZ4oE zRzHpSa#TVu$Q_EJ?|VlFhe7G$1NuFlZZlClZ@@+D9Rg|B?#0dR*AlnJXkBebFsnaB zb8?uMWj7@Kg;X#^f+?30Nl?CX)U{X2=TN<{`tq49Ui zYr41{tjsiJn*AM`X&?OO(9fS;NMXvqzaM1VvSB5|zuP$}>i_E) zcUeya8Xv+7rz&0SKci2VfWCfbuq;qnKuSg9yU_k-mGX}h#)&^6g1_7UVe$X7=l{M# zx+3HCGSnVzh%y6l1)!n-7WtESt)9RaY7OS{KcY$4QHT35%g1O^Tm#mHU{gQET>mKa zXUDa0RV!eJWnpsPW=3d#rfvEE1_Hi?KVwgIKuaOrYI7r{I^9}~^hy^QIb@B}T^+Wr z!j_{B_Fq43V38S5?ZOo8-+Jj`mIcj|4^SwICznG2pP;7i~rQ80F2< z;{6X||GUGa^Esoh%VfY;G79Oh-U%ML9&RF74o4vN*n%-O1}?FzFf!KW2J8=XH~*|o zMyNA&)77@06zXdKtt;=m3Z4vkUm{~IWjS+W+0Xmya;s-3^mc;b-WG)7SG)dPghb(N znZd6g*eVi@alF{$5bIZ`;)-Nb(wF)bAs*Gyw!D|rGpA2}>`5i^_JE(oEiEBLdP~zo z)|?! z+s~U^yWi{I?Q`VBB7pq5TGu%Wv~#It7!Ye6bT9%!Fxi zrtl_zCayAby;bOnbV?00s~uOv!@`2vGdcBIB&28!BnvRz+rWf}puLEl%a{ASAI2)N zL; zxV;kzhyHNBY)F}&q~5YrMlo~H(JEx*>8ZeKI7NE^Tl2U`Y$H?g+BNh?-lio8O>t7asf~#*zfl zS^ZWNx^Y5{&!#4&x6ZAd*XIs6LsbKpRY)_cSu{5z1~@ZjFOtyhnc zK}|_TTQ+mZn)P!9&s^x~A#1WYaTD9m?l(W6)FkB{|0D0=ib~w*!awkOz$5x17!=Ny%mp zb%15d_%s@Z`mg9x=<4`urud{Ecx9FU-fEtJrJY^$W9$D<3uqqtu-`r43C;aYW|VaD zC}VN6kdSn8u(UkkMgI<2@H#s-AIsMidzj>IKU`95*7Scyu<2#TY5O}NiF2d>;~E7* z6e)LfZ!U1B$ozj1mvY)B?EOFO^yivw-O%@!b4{?noTI=LxVGv9(-$t1w}cKU_}*H@ zA}4>onU@a>(=HV%Z>*DsJGoW~p4@bvEh>vY^5QW@-M05G$eRcXR6!C9vNUUU8ecugfTN|?`uEh;)95xkU z?=-NFiTwaMr=%=6ncpY!0pr?wY+;rQpS~cqRt(AVzQS$o6Dh6UVqJ@VSLxPTvSrsNtM#zb6eLX_U0Y6}wgD%#J ziDxT5EM&h4^yoh_x}R|*n5w^bsLtlnjfIKPe7%?kww5S{%Wp?a_q&P&z^j98uNqtL z6hTpmWG3M8x4qD4{JWVxL8vcQ0sHIaS}vFzaK8ULOru-Ptf@Amu%(H8b*^cyrL4wD zro?k8px&!F@_Wu%W*?mci(WK_J2WyhE7Ht6gr{*&ex5(Ggrz6bAchhK*{Vu898#F> zh6EE~VRp;*1+Zsh=QcdWnCVLFtFO8SN!#&%c0ca$B&J-<%fx82?LCD7uB%i$DQ)+& z?wVN-Ry4>xL~)X}K48ybIcUJasCOjG+7$>CtwQYnzFN9%6%xwdeU=TaM6Ag%!S3G=uK#WmyRL zYbo@oBAMsGDNtp4T|-jxJ`OM>xJ8m7>cL6ZbR3 zD0x%WoNXi^ev|nUqZp>&TNRE0Ecd#byjXz#J)=*Et#)QJj!L^&M6-4m9-1aKP9kWG zAT7k6v}dXc2s*oNK3fO%pWUv+nkfOb1VkeVZ+edo4Dx*9%P1OpZ(G}H89rS{^WGXSt8!M$82NXN+R>B~tFH%!Ix#tVD7M|}N zl=jT$%I*WfPigq)P8Fvsns%so7#zJqL&)Fwrq;62HCzPEFng>p%3M}>`?Z|WDqo3X z9%SPnt2dm;0X~T?Xxe>($6ObJ)7dTF0l;K&l@)Ot4O&91k;4e)uuMWxn^_;}&i+8g zEN|&)s>JUxtW^I%NWxb`Fur7QT^LQvzoH8M_cPYP>l+rln%3TRsOkqcY@Z1kQ^7Uo ziQ?MCbAB#M5B6BwM{;@N=3t6j>IH{H|4esa8EEDqYryg0ln;}yP4NWTkWUOa zHKjW>>_*}%Zh67A?I7wjP_0dBc`j*Q9&K4_ze4qL-HlKIrp$M<#e$ouGV75@6Y5ux zG{K$;0;=RTOe!}I z38eqRSA*q$Ze!9e*lQ8Ay$r5y+EDJr^nU|JH^PSLHl-2=Uio9cp!LQW69EmCf z5plOB_a^|U? zsp7Um91e_-IlUBu+yHW7Gpx#HJ6it`JIR(X-K9%BHC{pj*X6aaxm;$n4S=o5tws8f zqhywxiMtgJq}Eh)FJbRSZk^xI0PoNv>;`|r^uhq+z%N>F2sHORIS{Jc-^q;Cv}Buq zGvuXPQes8%rAt$U^V+(+6Lh5aCML`ODlMp;>V+O=L8@O*sD_B8SLbcw!ZTYC7;+6Q z`%%@9vLz`#EwN55OR&y>9>BKAx-j?pEF0{|=Gkbmg$|2h;&pL|<;JYNU*3-BPKatf z>EolRb6{}tdB2l1Lreuo4?p;Co5I!zs`?Pxa`*Rm)&|5yZY+)e+7;qd%c*`*7&L80 zm(~4^bTlYWO7f$4t-;&IZCDdv%um0*I;KoMG>e9$0eZZ%>$={KO(D_NBjsMitbC8t zL(^pl2dlR?wg4PwaI?c;_i1L@8t@h=Q&yJgcJibIqnl<6iiJF!*unBl~#S(lze z-9AG+NGy>&_bQ$kiXo6vok?|-s!gEKn$d}GNV)u`33nO`lf(Ql)%b-2M-N1?E?&%) zATyTD2a~92MT;T}{nuH+SFx6ts1`LM2Nie0$Bjf8L3UF_*7u~j04G>4(kGQUB9{0z zBny|HH)M%7;4f-ivzkEuM>(`FU7{w&K21XrxD05eMLJ)`Vf{#@!%@??8MN%t7wHqm z?(u`{0(DLlkIGd-T|X%q5hfPthUS*t|18HIe<>I2|6z^N>FMd6V;1$&oyz;Byy!u}s=?$Hl+U*zqlw54yI>|)pHbNO3=&r{6*|8(gLIUlb0GB>a&!;Ud4Z-OQ~ zV$e5WuSUu84uL_H(*cMKg$@kPsYd-6VEeCW6?SQHOQm3}_=WopVn5(LdUmx%mqa#p zn}lDegwJGWV!Km*x!qb?oAA3cP#L+#3u&rS5>PFicpHSr0m~CcOTu2A`VW8Rg*~OO z?h18p8Sx&utnTX*97;ugGqA00f)ukhLx{VC3)w!;Ybi|c!S!^>NB##WKUXu$x`m&1 z{W}M{dW$&GWe>{>$J!r=IaQU|!7Gi^)nl)T+Lx{re<*y zL*2SDg(rUNNiU4|(Z~eEh|!IUH2dwCDhgzS7QG}1LmmFnwqyH2Mh1<0pIY69%nk&- zBDN+hP(r?24_aK`btFOB4uWntmbu_sFRs8Yx>_o<7}H#)^xo_?=(WDlx<{20qc9SP z04D!8L{ua_*ufP{l5G}SCOhI@%mYiuH_$P64`3^?v%Cx`zEPw3Rt!P_i z2&VOp<-8mZxM%kWq}&7s7NBv-jkK&Mvb_;g1mAFmlDGs4lTWJ)%xgG9esFwQ87bqHp^3 zk=0fm3y>c>*8Si-rbH$CX9{lvpE?o+pbLX4NaA#p;}OR*R^q_jSCax+8>65C2-!_C zu7rg7Bz}XsXCeHK6jb zvOM*Lj6Eoz#LC7P_v{Yg`s<4(Ry+sP6+i$?H?FUqWuNpfk(4DopEH~}j`8WV0p5JB zE*NpVJSW~;5*hZ+sySH*pdB;IBKJcXxj#Y%@@_GEH9*!et$!(rjs#fP!!qTWaac17 z33t zb^pf${YHH1yLwAb*mTm`{J@5pAxPd5X-C`jA&+GG+mHxln@n5S6 z7{nGf3mqeXD<|?Gr>$V!Sta;i3TQ9^<0dFGgyashXMJ3@99?G9vdZa4PUWy_Kl~e? z+f;Ebxzhx9n0huo%+d;lSi*7Vn$lz6(p6F5e<}Ly7jYbP5$82*i{aoL2{wyi=PkL< zk|5~lZX=AFnz5UnQPlDUvG6r8r5@3HVA>t4yZ+eyRYwZOQpa~|R)A&1I!sFdhm^mz z{u^-&c40|aL?lDVHdZKO<0Ox5zb+Q`suS!%+gK}S4i+7)v#h22OPBg`tglER3EMRW zmQMsMzG(p~cDGmyw!4Z&onQI>zdNM^!_FrF>@_kC3E}Bf$ijp|f9#vPSDB+VV2WTw zi)Eg@`nO~ezRp4pU38pUKHL3RzTG&`)}6vb%}(hkmRph0yYfb|NfYoz8%xlcZ8gmG zM?UqpBA&0!W>rRsQfhIpwZ+ip^}zSdR$?Z|`2xZ|6V3M6S1u|U(M)c%W=$#;$86O< zE7_;>LtMA0^QwelLo-%OUye&IPAm|`TSXZNt1_5-wu4UZPlQ*67gwFlE|_lf552;D z9Rx7+>{JRG6ur8zHU1jrF20v9=fNRu+jf!0nGk@!dl9H3wR;FOzsI;byX}w{y|5M+ zi6vi;#?2_1FyNX+{x*mJmW~L+#mpQG`2eI64R+Rn&%CcaGxQuN^~iso)5S`j=~h9mk2uolMIg{TyPYY-QvievlfAwlw;CqPnxm$h; zQKt8Ia5bOX_lEV^o*Z$QQ=8r6_dSxLGt zrfgkmFEPQIu-xX*hE5ii7hp2yp1Eo0ay`i%aO))B^ukhc=|d6c*a8q2fy86 z@8^<{O?2MbCn`w5-9rx-fi7fIz5l4)(0 z<&j5&;v6+Jp{Yz-lrOtgIiBAW8kiv-_e-ow&Z*QVj&OX^{YO36j>wRFkn! zk-bZcf{XIlAC0YXBHqCfNBTY{&n~9SrYz1<{-oP~5S9TgWn_rBA1yRg1Fita$is)l zLcQn%(p9m05AdvPxq-T5Xy9PRv>uQ)QN%bC`6In~`Jw_S9( z57C8U;JEbqkic9cLTd?#?)apQddx3HH8-&TT6FC z7w)ZRN_s8UQWUwG8uot6j{M+zf)I;BmS(MV7-$A;wno8D+_mG+g4iy(HElS*j{MIPr$TULCaYCvDV?+3eKBn9;nZ<* z!gmX7QWW^ACtHtY04q$lQk{65F?2y26RMT-Q zDO@EDL1YiR`_`b~?d@`x2&889bY$W2s&Z)q4h|j28-~xKv35I)1!>{X1vLEp3#8t? zL$kEEXWwk0eAtn>-c z1yc8qyAMN3RT>fx3sscs%63SG1Zd9;nZRl9u~G?gd`7YYLBCKgMJn3ef-O-@EQoM$ z^z>6Xa_8FSkBd9D!mf;|1ogfQ-DJ(fFK@8=YrP9g-+4$1r2F_$8!v2;;<*LfpuEX_ zlj5r59zxjG2$E40)R05?Xc=`R&ui?ou!6o{LY{aiR!c4e(s6dJJ}EBbCd8kN-}bSf zWpETn;&CIx3amxp%9deut!EH$os6_Dk=9$e$2Y(CKCzyhsPlZZiq$%B>oH;eyO~A z?cP4^rRirR*%N4P9@ltah8ME^NOGPYXD}@=az}thbN=i_;Lr}XOX5(p(aMj``|plD zB)+J2k}6gbYrutw+N2G_X^tUaCP9$TaGTjwYl@XX&WY$;^vb7*Oj>PeB4X)x?;_Pl zThI`8kJY$;IshbDnVu;<|M)PmQ%Oi?QXmNU2(D<*~B3^R_zaEW#r%V(Q zX;zsxz@No$O+CCA@~vtSnU?5NbF&SDf?O~*awzAhaSA93CyqD@ot7pC@s*&=O9NC3 zTJeMX$gov;^fJK^NEV#@l&9^AG>&&nNx9^NwAd8<41qT z5{wR>PYjypJ1EcR?UazhhB^f{5}*zNKjLUVg(A$&o+GM=QD31`Yv?z>a7?F zX>J|v;%Q0&a9&`RQ`h~j6W{MQzf)QYmCXq*uM2ahX}x0KIl^!B{o8#oLXUTxz~!8QxX|%56w`Y1S|2D^b){)g3yIl!bJi z_#+^c2T~dX(v!t8W7515kI(3r)aVE~w(azL(%{caHMXfud^6H|yRqB~8SYyz6xxez zRR3uKW(0&*AvOa#5-llUv7H$Y3u8Tq5f(jnOyR_2MOaI*T~XZxa;!6EC63f$$!z3z z<(o8CS}~pqrpux=1zR_A>F(|#=FQv}V*NYO+0%o#wl;ePo4sW_7o>P=O}K)2>7qK~ z`eFJ5PDCa>hlNp4;CLji1@C~wN!NZn)S>Kk&#&7)Q+rbRbcER&O08cOyR~gNL(pUr z908GC$>XlQ_x%oSHngd2zC>?&8ig%yGo>GfxcWWGHirX(p|Bw(~#D4KC6~jPCo)6-Mp*kv5$+5acu7 z^T8j#SlK_82IZ5;lg|am`0kw!E!cOTflQQOW&HU$I-K^$*ZbU6mY=veOA0QHc604j&%|mo$9hpA| zSJf%2xjqn{{OUMx`TU-e-*&b*na^w(X}SIu_f3|7XOO{9VtIF2Xom1!@WVh_<=qdJ z&&?&*%%VIY;o0^zWl)ov7Ch+=Dbbm^wT|v4bK!>^!Fw=P?zSf-{Zi@&pQ6P6{;*aV z>&gZRd5Z186H(2e7(xb4S_k{fMHG*$x!~w>uJyEJT`-T0*RvL^rSQ3Fdk=$H`dWB6 zo^1xlof)m&p4h9Ln<9jhN5QDVJ)#h<$8mPsa`(&5N9Z@SV0;{Hl2Kqj+M6}^r4=X+WH*z$1 zIXSuk+Uxl6Zm5RZGC~Ve`Kgo9WOy_d^QDy=39h6FmAIgyqSsUrkS9)M$L0am?RIK= zXmAH}S5#dglZ~=s5q8!2F56)ND=tiA?%f7bPCH?^#hcZS)Yls(Rz><*I-Y4_NHyql zu?O?jR@anJlc2*euE%(^;t_>|`C!+5O6ld#USCgV-Mff@en_dU%{WaU!oG}$bD~Cm z`|e>ve9sJ&uI;7Z%k7EDnFdbLr%{rb#C`mDcg<;SGhQPAdt4j;V#c^Rh1a#dOSFEB za|8U)k}&I+bWz1T?$#nfar%601vHvW~^SuVQTPP0BSm1}pPhOfw+?%(HmdA$6zqMch}<_yuftLhof3R zzouuVg4~>0x~}Gq@Abis!%f__Msc|Z;5mip+k4@FihvXr?-}kZywks*`pU zChFYnIK&m7K!%n`&@!H>e^?N^(Xk-eDJQ z-CNcQEiHQPKkh0Khwic@!3g_GUV6>*KaqP#AD`*ihXc)@wQ8U{3u>U>tp|nMVTc;5 zu%>sOa0QWe!2*WSrDol2e(|~ZIc7`51SF<)TeotVk)vb@#lfA2tWLXS9uoTk%rxq-EpvQQ?=f@q12aRI zPkFsU8V7Y)49q9H?ffZ>RP$*rmTbQIOu0wctgx&q+wzrv&QztUeMe?n;nyVj?^o{) z*5Wm-B=-RlUr&W0%r7V^KDZ2;u=^uqs#MmX(_o{o2CHakyd(P(>-oKj8Ipo<5fQ7Y zCx|l$I6VWmMvC=+F4UP{@szryUOit?n2E%gMx*FH>W=4NgT2s2gt;6+NfljY`_fcT z@G{U|W_}PaYTtIJR8{fJDtE9n?cx_F8e%=YX6rQ4h(O)<)5NJydaxr7Ue7}n^v@V# zvbymGYe>~*aM7u;4}yNad%peoKIIK>5?-+$l4Ax!Q%t{vY|ABE9)Gn;a-)+-GcNUW z`WSj(CA@zFlN`QK<^J{lt=-OZldhfqo~P76+eHrJE|~*Rz*SzarS+>Xda0NDM!J;f z667_Sx*3ne($*;CiOa=mU{FhFc2o5($|bMvpa)2c=AnRB_Cqc1($ANbrG*_IatJXGLU zcMmp)y;WVESdT!iR?`_>Z^im?s;eX=pl$?OW_D0z;4CMd%B>IAZy>>+Z^aBVwxUbV zo-EfZ*c(i}4i^o-t2bai)?v=8fu}k@PQHd~*C}uk7T+lRiMo%zRnebeo>gnFzKlE! zQlYVF4HtvNlzqJ$y=RCZy}zC~-?~2`|JbT$Wq@P`m4rn1b9w*5*~M}B^BMb&m#j5gBG$Ld_BQ;@Xu(5jUSZ8{fNY1YNb=IRhcGIXe? zCszU|L{qr6G!g4I&^dC-i}E*%{ddfGP)0U?M1$Q>xoqA5h@2>$R zDEMy)BvlHy9uJX5FMR|buwUicEfyV7!Sz!i!*Qca4!O1r<{+~;(3OC@;*Q_}ab1=+ zv+%%BYWAB*n&MKtgt}bgA6v9_ekk~|!^bbhfbg=rs739E{b@Tk8fHT14MYhBweaiC z3^V)ia;xrKj}f}DEPIs=zd1(U^TdQxQ7L4}FKnP1=p*H}CoNcAzMtaaln9PMRcm-p zu9Ir?A@$7T9@1pw-TbA2G0-FzfhbP6+&t!%vvnN{Ta2WwAw~vlIzN=SDfB&V+;3wp z^8q-t(|qB`{r2#lN%U$G_M?i!$~RxNtus`piLnw5eMcAf6)^!=SY}k>bVqLHFOmLx z1o8XI*>nm%2Fr)_E!TRziCqCWs%|)IR?%fiF%u1p*cgKDq@b*t-~w0AyBy zn=(Yum}I|2AN<|>QwfnYl`zv<)`ZKAGEJ(_I}2J-(6sY$yiIt!M8(fe51oki0w=BemA z)*RZ!nP#UXQC^J%;qvKl(e#I*z^J;BgSiqybA>^~gu7pyB3k;m!<5qAADd}QiSP~P z!x1>5+oAm>snkhljN5K4cxLoDQ;{{~cJI3X3yY>(yg}Ic7A|e*p$rK)SSs}=ZUMEx zLuN)GPge5iARjlOG-nuc9t|kblwny%9R2*}WxRexBUF55-9R<6LgV2`=4 z&AKf8{S1T~LL?oUhO8l;}gX0v4 z^s(u+HhVWcqOe|*Z<`y3d|o)M!?e~;9MovaJEAvyDg-?T^ZBw3zHJ2u7il#2@&DRN zJMjUa9n@(n={A#GNB7}U?Yo)a{MvsvfG5)R`LiYDnhJe6zR6K`HqIC~kE1FzZD6I^ zdo?W;ku!2#sTM`4a_MzM*3R*G;QbHxQW zbEoj;1m?ESj+Y^l4+OM0GDNes99erD1wV28YWvZ101K$J2bUI2CJnCh%-oY_!u!4_%dp~K3msjjr^utzCN z;*G?;!*6IsEp6SlY${vsvs0LWU{Q2}Q6H&^Mwt6LL;XOMpjXaE#=HPW3_#SR$FXri zC*i%G*nLvDl5(w7?P}?~Axy;=322W>pXV`4RL`AUiV~AqDDL}hvHkrh9g!1K=J4HJ z*S^A{fO82-gvpp0YEe`r$;II5=I8dTHEUA?Hw1OlR?cMS&B83kwn(uz(JGI_0_K&fi7I5>ZFky*!!VoZH)L&o9TUc%L;?m8E}`` z@_Vn0W!n09cwzI;^)&wzIT<_$QyNfwsOxIr?ubF73km=Lu(Y;K2I=D9C}$%s@>MoO zClciD?w{UMpAKz$_z3oVBP2Iams4$h`Iw#$3WqReCJFJkY#A>xwn$fyY(R^$%Pb&o$;oZz?4zw6iluP?7_$ z;$T3S?s4|nfqz8fncY?)?q#mR&6IViHsYXZ6lNOiFp>vfmR77G(rEh~fF4KnT7@5O z+Lc3Rwuk@w4Nl=btP%;;Wu($aR(1H(=nrr$(RJAx`pwo;*6o&aK0;q#`FZ0n)Zx?} z2c73N(hl|BSTDp3-F>}D6|O~Ct0HgA!l#(Ul<8n7RxGX&@ux28;$W0uc2?UIZiw#) zE#8~jxdU0PlyP+*50PK8+B*YSa~Jw>NFcRly^%3Zd9_jtSvGmmh*u9O2x1BiCNAtz z0W$o?ukW}@&~yOr#gAgi3Uw|5Z;^sO;HY`Z4V&MRaVBXi9BQ54s2|)XAm~Ax2!04u zNRPpgh6LHCcYQNd-{4;(&IyB8N7qlkHo)E1E@3P#;77tbYnrGXz{`cD;qd3vAGaH< z%LiET)WBU;<49QxkBT?$*}E}%DH=6E^0HS;-SJ2T_luV0M+!H|^%DE6Kl-=t2P*BG zy9Z&wCT^K;)!J-hF^W*n;4^wIj;L!E`6V!);Ru{F?bV`;c-+{eBPC=4zsP@?a@+XK;^ zPSYpXYQWTsopB}jZQp{0wZW}BYT6oceB1?KGZV6HZ_ep2-k9j!Af08F;*Ar&MU+L) z=4AnM_iN#hUG0N6eM;x9**qxHmrVi^_NgAjy|`UK53`D|%M~UzgbdZ$4`+X%{YR8o zA9WuF;@9dp1YIX%K^}!VW`oRcvZ;Hnjt6vyrSNxrmN2{%&wA11bWWl5z5LFp-Z_HYf2weZc#L+?9s&+qxwJH#N=D@G4qRIET8EpjaGy|0f4pG z(Ys%+jL}#fT56&FZ983KZLS-TydN~k`28Km5Qf$t%q?wLt?Rx!r4|00jiyX^dK#hq zckXMWEE1jN)U>C32ZAjmRbT^MOky9L9iQlb|U4S$C6@UADr{+ZBH) z99wd6JNkfAelp>oq-$lTexzzbVJ$HI`cHpc=!Nevt@B z$Ktq9(M*z>Kw|c(A0g(s{je`4i&jq-4$INij<*{Qn8PNy`)p8o#k zV$HeBl@cN)SMx6O909rR5GYKOgJgb7vz zMpJe)Zsa%csSfX}jZ;mG8-@a=9Wab7QvE#6U`zO5xy_@tI%Xdj+g6sgY@ngLKJOuo?#&Ad+T+mt)VgxigHQCFf@zcLLSn z;P_+T;0xfFJG|b)@*qK8bO|cg58*N2P~>pi43W!DXaXHYzYgxbtq^5jTq_K6WY3{+ zE|O<>2_LGB8)x+E2rh8Dv+`Ra%})6nu;o$H8`vz!j#;bP&S4yn9b{R<>SHcaE5E*c zCI~1Y5JoV60N)WhbvY2Owk;)XAvPVQmN?#O9B;-MfisANkJ?`=&!^sQR_L5 z8M<`}}6^<~L`~ne*(u*IIk+ z<9^$z?A)~9dwouI>nu_I&!Gtl>zYq_Pa7RAD`utNdwYfEZ6~{Zna(*-Cibso$Pk;< z;ZA=0yEH``L4jO}B0y#SpLO(yU;6uLL%~1l%3oP}5c95n|GTVz{9#T1eg_xTZ$M4_ zyRt0(QqBI!*fsVQ|1EOIQyZ%QH5+OTJK^l)BBRpSGtU=gyGN z+Qxnah;oy9C8~Lh@{efl4H_Taw~!T!ccm(~7^5C!dyXr&>w?NYSfycEu{rRWHv}Q{ zX6a3osCId2(l?B(wF1*rv!!!64sT?!38hOgEokA!y3qV?1iO~I1^uD>&d-|aiBC94Nmp3k)0#>xe&iKctPIe)Nr7T zJX3HF-;jb5B{q-RGEQhQPK6_Mr4n6M__nJwatqI!UFHh6ats8w{T-6Y$Zb`B$6P#& zCGz%jkiDcLk+Bj#mVm_sW1F^So0STFD<%?u-b4nTdBjb|j|PQsdkkCxA+RD}bRj~U z1c3pBG+uo7(fL?ILrh@YZj}%FRJ;^%nipZ=+sBEwv~4<aw@haWqXO^ypV)DkT0cyE|)_U;hU_)Q~9?!U`Ei$#1ucWOoJ z-T67WLpPDxR*s#mHWN9?j6nJeb9%|W?_^o1Vb%f}T^(P_x}7QZcI{VX;d&K8Y}a^& zUg_=1nYLj~oY7}oS}OV&kwdeljFZ@+z&M@B@&-g>Bn&#CRYz}skHgweXv1C*E0eLP z-`%lczW0W9b2r`PTDj9sGR|I6KR%u`Q>_6JLx!KV;8iYX(&KmrRc~V{f69KK7Tj4N z@PnR3_>5Wf<-eXR`IVbr+_Z?GS58b)9)D5KT6~ri`%J$jP^mfr;AKHuvqc_#Z_xR_Cx48@i&jJDX+LbNOfLr5+d&-d|}u z*PE8YNtMOW^dDhg466D*&NcYOGu1)ST`xW=Hy7P}J&P`jneKD&#RTXfg=~>+Fcrux zik%_(7q^FHFw>&eHG|lels3b*-bJ(&x9Bw6kuF7UQg{_lFt46$=iAQVK*|$;v`vHZ zv!BGLpM;_O_MmIcmQApTVQ)Ft2vFWmxiYeuBsa;C$Z+wGHF@5u@y)BclB?Phm)}(2 z%Emga@WG=r`gzGUTJ-SjCOJIY21Codx6XmUqs`E^dKqt+{#mC}1(KbEJ75qER^o4J zKO@#PR*cP7d;Kc!xVGwFFMtfMmZ1>#0tn(_o*up}HQQ&unMT%hnO-KrjOZ0!S!BB~ z(zLzUXx*#Sk~^F>o9mefKFYhXR$@vvYs`II%>R7~SJlvCPS7YF9v}7OHpz*PTsKoZ z9cPeb+&$~FXpK;oFuo4b;o#e9XhJmDGxJ!4uU9KhFK~aQF=cfC`bxsje<=O_HZ0lN zh%f%*VqL96`LNDC_d3_Sxz!p*6XM>dSmxIN?nRX%b%9+8a9{;kKe%a7zM~OKCI~YQ>+k@32bxPjCqEg?! z#7?#3*s?oI2(O27U!2^N&xLo&eZT#S6W1iK`*lMs1hSQrXwwEcx?QivWuFUmG)cx^ z+o`#HvafDyVB>_amr<$c86S(6xjId*^gAmW?47UDA9$~_{=?eH!!q3i2Gj-G_3oAW zDz@n*c8_+)l1Bvl9Q+W~avNW(hBWK*Q@M-hM4@8Y{qnH6P=|16sb;NqfAiMb+R#k9 z=XAmBWaZOtJKf}bZ9Mi%#`5f$$R|EYg8Y@~fol}nuO9O6 zkDH!${p=O7cvIba`GJdK*Y8zD>K^W%tEMGmMGky?KCV+~>P?mw`Uu;mr8R!2rh(d_ z<;7)UlH7`Os6|}a^Kq{cf!FJgr{jUc7e&@ftcj!*Ui0hcm1HMwS1;EII7IQPjzeb~ zyw~A0`4hb$n($dB2P3O@oV+y@d6Csp$)+hIC)32bUPF-KJP}e6rTzj$Q4 z(jY`7+1(GQZ+%Lrm=jKUAUTd3jHj=GiMnm(-*5SF@J~Z!H@963%?VxzLGeepQ?j{F z&rfQt^5VAg?Ha$mmcTX-rA{4zXwt%TX@ka>0>lCa>s#?_J*`qauX)qJkv2eCZmue~ zVfYwIFf5x>uy9^p>`Nk;Ndg8D2wp)=lI-9+VQA!?60YoBwti8Oc%4Q|nPTaxychh& z5IYKtVloHZq#d1Ew$QiShH%%tfh_&LSTtEx5 zmJ$9eA*u#VsWK-{<&vks(wo6xXP_rnmzeV~bpw;{x3zvJJo6Y^dLpX1ZL`zN8`{?N z)Zirx0Fms@6pO~$o!9(F0OR8K-*$PMUf0psTTzMLUhojK@vs&A^n1i}Dz0j_%pu48K7V%hVyK|hHYVZDB>I~FQS~N42^QLiyN=P%tPR0tGZ-T3 zr(Rfn5zfca5*PuLGxMXI$_|sisX#Rj)g5-wW&h=7Gu;_y&_-5lsjZNjFqLL9+kK$w z!&kz!P`(e!B&nmf8R_ykExNcYnn{nX$>m){cgG4y8Te>Qg0hOo^REN;mk_aK+FOdE z9lE!60S*eY`Mfb}?|*(Fsg)K&dGW4^ZlxqqAjDen(R;eE5HBt+YBnN7hY7a-ni|)F znO!uNzms-p;4(GwvftlCW^PUb#+0gF~W zAF8mL*Qm++-h-n77;ZZPr6VZug&dEAKV6Z0ZV~f;Q9ofe8Smtf5ZHn-CxfLULkWjR zt8N3B+k^fwzM1-iUr5o1iJn`ON$`(xo!Yx6BVn*JNqOcWaxB3?YDU!HOBw4RsZ@1j zq}IL4R+2?cp?4)zmQ>P7Ye2hnF2U(G#B0gY4eP@VPZ(udlapt*2X`$DLCoaXW0`G& z2#7QCG<%2lE#k!}7cT1Dn-dDU1W}rZFZNdQsTI3MVMJ1{=E8={K3xLBZxSr$8V53s zd1lg8rk$M~6prXl2V{He`N{Nb<(J-e^G~ z&aw9eOx(e*LMl41$gH@7rn?R;)WiZ|?bx+~eA0(;LCQ}{p>yRV`~_}mWQa+!_N?II zlrGUq2mxUo>|5kYP!0qhhRs!8{^aZzH_f(_Mr(X)&OONB^Mxw6n_RCd)}!>dM%xyW zKT$fSYPa@CLah5gXO(+Yd%zlq$0q8y%nACLD(|qPXCU&iDyTl5--2!?A)J)sUF`cN z>lu}?7I%*KFP;nGL^+aMQiY4&k8RV6GD{Z|@a1%R`l(qn@ua@uQ3b2; z=c5{~ExB<_sl|W-NBxV+p^|>A4mR0D*shvfwQ}1gYnHf5Ld56CBnK*_LM~fclZ{dX1457n z_{p^AE?XpI*J`~^K=JlV?owU5w`IY2K#W%+)vkJzMYLEwK3{w*<}9SCV#KV#VR1h2n=xW3K`4DdBTL0Cr~OXp zCMrSHi*c6{D*5`zOl50Th)T$%Oa*!cl^jL>Ua|~^?X=NY%+p8A#{4Ck5XUAHPvyr1 zntq0vVuUs^^YNmF^^IL_g)6LSe3`Bqe!=4NwJd1F!zl825#mEX`!2b*4&iuAZ<$7 zH&iKPftQZ@>5$VTxLxapX@X*4w;3ZuQa}O>)vhJ4XB&X*(BV(8Qd3xS$pIW zqt71mtDblh_b`%#M;ho!{-%hH61;sQulxEbl^>$Vo;9SFxE+tfp8kcvfYjGr{tf>h zS1be7e_H9Ay~xxaSE6LA@ypiPdAkiSKjyZPE3bOKZM<2N*Mn^Uk6clh_NL_I+l(EP zd?PnERnvmc{%AlW{~_xouMp=Tq_<;J)TVARuO{RT1Jk(4QY~XR&d85Pe7(}Q7jdc^ zCQWt@7Z=J9OZ~rGUx+jS8MDwYAyn*bgV_6loRlvNVJqt8p>3_4ofH6u&4*0u6w16K zHz)1X4~fswWNrF6ynShL;T%M&RI~ITy)*Tu_wyiw@nwIVfF!46)|xHQPrm~#E9iTs zJV=L$6aV9@j6HFJs5>4Z14EMI(YuJ%=EVH9As zO*lnf7Vnl+%o{Ho@-TTrU@p(T5}v!_RYy4`HlzHqipOY8D^pKQF_H@ED9iE!cehMNAvCtV&o!0Dr zukGEFal@u$M5LPlOPy@ZpBS2`QW2@M$!3q(PN^$dLGl!YH9<|vOK=9bExh5W(>BA3 zmIJd#TCwl*5C)TVmSd!LOX}iNg_rxs@zN#nqFYoHHhE&xI!L7&@lXW6s%K1tob3SJS`x`fzLZf$grH6<0-&9@6OJ;k z(@dMeF4K>91$n31Yc9>+!BoK0!ST%ixi_|Ua98r5j?L&NMKvSsWl9cz2mu(1Q*f%5 zg@sANhj+@xA($E_^EL4162Mv+`Y)A=kJq~30FZ&gJ&t zJKl}k2){}>-87Oxu(wW}ApJOlY4q+T~ zHGhZc+!}M|$ozIUsq0_U02ZcZp_|#C` zBW)Tn%ci2~BrpEwWwTWZH0p)VR|j*B!lsF!sd^J1IY+b|+?nC3FGp0TG$&nF_)nD* zmws*3T4s4WH0)Err?{2n%1mDB`-E=4@C+URdvDJ7AZH+ zrzh^mI8bmNm&3k>rlTtBZF7FeX47!)%f$G-J)w%Mt#W(Vrmxjr{sbyYs& zuen(*Z>+2xdGj(`@W^Wb(KHt(4qR4u+=*wexRdSF1_Y~0{AMBFVe&lg#*Bv62~Uj& zE~26VS`=ac^6LEHEcTmS`E#4fR1_@F^-rZMKeF&~oYiXzISaf;~s* zfryCci;4~w1wtV$!%&mg_3j^?2^Tg|S5E~>o}LUGGsnfx5xcYbnZ`*HNjwxjV?TNo z;KKUzRF*ITM0|;(ZzxLF<4hRR*XFwu8wWgPYAuC56s8XV8ps((8IJE{Se?q4Ziq=} z?Irih3s_mgwrFr>b#Nb2U^W4=RwnT_w8l)z`RkW+9|Lz3@tsMcBP!W&wqrA&zI$$7 zaKs6s5TI)4hpt{MSBGp8>!#KAR^1_hXQr=4RL8~3%rI67U;l$lY2gW0&wWURK~x>l zCp?hGl>8?6g!8JGX7wLgKvVv$^)`u8(pa=vaI!(--QO3pp`HRx9e3yjVB4dy%tW7| z0_sr?j+Qf(wfi4#+*9{bRG8T!+W;15W%S(&Oh95pTe5h@)2~h#09TtZwyz@jSG~;y zLS55^O4Lsh*)}NMx(QR=3$fq*MkUTFi5ZhRJeFLzcr!lbXhmWKP_wF1W|Q!6m{6Ann=hpD-jKXB)(IN*R-nOepg2D4&)MP2w{7NrGbkQz z+9&Mw9~)w*89&?;ZqlPTAAu+YD9v^PkFheEs;n<6Vn7e@yBvyO@<=0iUwN7p%gP5{ zM|Tx0N!ksY;i3gB3>ku`Tq>o;QI!&1vc24KKUM*R!*CXN~Rq`Wk%)cidHwLVRn zc(@kyhDCgn`VoDRXnwAlNLP#H50qt*MbcVaodIA9;LHN7jyos`fyFl+JGirX%7TsT+zo8ME*zYE#tv&!%UC3YLOGV3V4PEYvegl~>8e>r~$>_e| zWK{d_$~;=@+s6zs=A@2AabJK;op~e~qLgmuipbIfzbqh&Y5Ni{OqYcTAl|NP`rkbI zlKCbz+DAA5l+t(b5*s76!fP7o$a zeGB0F8k0#VL49q9;GNGDFL+OHCJ^O?zcm9d&ZOL>mgTEGO2nRo+Z~!mF_p>J#u!FD zH2Jz}GJ0o_n#j*pt7j-qOg{>VS?`hJvj-&YrA`i3}3ugoj;wn~BK1UZ`ZfLdHL zh37|Fevt=nFbJvwq@%^3fPSI5gU5DchMFOcpR+rQsyh)1pc;?qK}&NI`X_~z zHIpa9=|D;1sp$e*kkdEB*-nuX%M&kKxgkK1*P!o4PgKFVC5e!Vo##NHi%pjeGL3uN zTKY6aIE%JF2`<4QX;;TJ)y$omPC8VnCN~B6zZW!$ef5_@!6w8*9`x?)K%g6cW;7)S z6<@emhXYYgcf)}6)C{~fz8EV^mlhM)f!DR9JWfg%Wbq)U5Z%Do`6SB)TDJ6Hy}bSb zGfTN0K-rQ26+_)<;xooaAwO%U!7{>+d!WS@5nqRgjYT!o# zE)I)LCsa>t-f(xD&_AdF7+ci3!ntqJ#vkQLoEGSkA#}F92C6w&Utg~RTPfnigo7B) z^L;K@Vas*g?T7EkZZ_K`gF2UAn3&MY$;pw5dIDv6*b)$bW2+d$ty?4T80~GJ$~3KT z?pUN>YHZ#U05;=M7ZOy{%o);GkpT91U~;9zXz0U}?6!X8xiWcvvjokqt(}DU ze<9`tC-FAR`fOtT(IG7@Y~-*s>vUw^6VeWR{Xj9G1S7Ym8K)r_%ZgThJ@a`J2mOQC zu=l58+Csgl;8btLdV$WIwsLh@BQ&m`<@ja z9wffo*@A1eN+r&nGP8BCDM@V&_hSJxP169?Pc+3lci5xoRT^ymZA}|bBc~aTaR@(U zZe+bCtNlQ)1P54oi(|@0VV!G%7YZhE?;>x)V0{jF;T}ZHAQSXza8VZ>l1}61HWT2s z+fmZk)PhPv27+lfa@`wLB-G(a)7iry!~Wg;x1IY+>FWOxX|rIjYDX!at>&qheoH7b z_&B|o?l(bHhsM*T;j@rWa2@3{!lj9f-lx9CJB48aq9zojrTr6sf5p5RG;2Qa9TT>W z5y2@gy52W2?3v>119#NSm#Hwooo)sgAHDse6=VeaXRr^z&}CHFqBscpU`7G+7eyHe=FaRsat6R zUxh7y*O>YvozW9O1}(?g*4q9`h8hyKUq$`Ve?C+E&uJymDg!YD#n9($yy50a#(5FR z3r@^SE4`Ke+xG$zXN@D7d3h}P{JRTYY|i_tAU9N$4l6<&Xtp3tcg@hllrWS?(Qi94 z#G2>LxtrG9aiYBhavB>Cn1IL+SQteKEqc1H`}?VO@f&~b<{FUxDbm(Oilx629b7!J z%VZ1-QSL^(L9$x_DGl(8M#($Wc7$Pd#F$|$OFJ?Q0rLSID8?h?7`|!PF`$)9QNK8N zMb2jQ(5yf=lJib+H#7(1TXdg|a6_Jw25f*6d#2mYTvnp)8XSxsMVqNNAEJ7n56kkC z_)w~L%c6W}HSrEEzxpLUu~b9qAY>e4F#zOcMAno*Ao@W4&ZSPwj&c^k7~axrcHcFq zbs1*!B`i2TO;&$C?V{kqRtQZ>2)NDd=j=D1O9Zb!c5ZwINh<;1o_1v6gJHr{R%~4} zx|wi*ezcc14H5r%_n_7z(kL2rAl^Svs1?FYktdYG=fjkVwUbM zf`X%y*TcsQ1h6;cGaC0Yu+5RrL=m!dqT$))zX8%JSl%wKG~gM3N-*e{_P9u5M?5}7 z{rjKf{wUSHX>ec)ZO=s(+*>gcQG?{kzpIDA4qbX7=nPGj>O4L(tzE0Jcf>nWHXLBg z1Aj5+=3?109uRYK`C9lP9(VpoRA27sRzK+PrA0?sY{vS1+s%B{fBJHxfh3GS@um(7gu-Ui^X;a( zqO?9s{s#2CsDGtEX2y$@dI5fkz|FN5t0!$?E-aJ-k_**7TUFD$mEe0X_(Zo$t%-8f zo>hW%N*T?`1v7O$G?(&|994V_Y{A{v^xYMT0+*PC4s#_3hQ`A0b3_D^R1H|}DZ#P%h@%rj0O{DTfbCi$_1f^ikO zov2J@^1udK>$&xSlNK|vj>C~BZG3;}04esDAquVlyX4sT(9Bin=VK}E*-0e(Ysl=J zv3MK+p=J)!xj3^n@;@7!(WdNkRx;}oE?#E9`}QEw(O3x71Y(T*Ibu^ z_}Dg%PoL@a&Bej5ykMH({nrOFu-iDBEAwp&T|GSSRZNs`+J0?4>E7-FdxTEZwLl9K zB(aOMve*0Kt^MwdQKeDuVrZ#2Ma11({DfA!a+~&+_l>7JZyjQ-|I6;(`?i&k>h?6~ zVd_FNp!9`6TT`y90uTUhuI#Z|dbMc}+&N>Gcldlw>;pnf`REjo2>}5AMKC&nHsXOX z>l&G358h(28ed~XOr_dRQkT^dtd5>VpUV9Hl0w-u(utGwPSHDD^s5wt!fAI5iHIHz z;vB`4hTMiPBTWjI#BRgBj2?&SGc8z|-%>lDVOZ=P0|_2cJOebVggofIF#@x+sFm;? zrt`3kYrmDRTpZgN={gv?4X0Mkq0jfZA>*OH@ma96GEZhbIi?gL)%nFiT6`0K9$1PUY75}K8TeM z3W)xhf$^Qn&s6~=KWcwf$A3@8sB@NIb7}VeX`ki>wFJmW2{l-lPTtW#N%`Z2s0igE zPz)<*sxMb#1vVFuG{y8APBUQJwEXJy{r-zpeuJC-d|00Cy12UkOu~o68}Db>Kmi9z z@(6kZDoRnUID_q|f4ALBldsgps!0HWV%>GoNha@Q91kT|b zw_?$>^T(cUCxPcmLgr`Uw-FIU`^#h4>=6r26F){qd~h(9+C$qsSQ~MdWl#p13$M+$ zS3#f|vNDs2dQ#vTJP5l8B}qkMrN4^ptPn^w;ThTyvT{ln`g6`z&nTp3nn-i=URGov zwFld+QBcr26+u!u3Rpux`h1{=@lc#ov!8CaZ|!@OQ?U8`j#F z_rriW^LDH=eFA;Qd1P+RZ>+CCc)i_m%yvVTi?rVZ+H$?aJkYK;I2`3RIqDUBs0MQ- z-{VTJP+rvxKe?WKcy813!EwoZT-r*A8}W9Pl|9d>ZWM@aqUs@c+nXclN+T6-sMAGh z&>{5xN_e|ij&V7FF(raS?_)E^NsZ#Hd&f;5oXC3hKCz(w5MZA>4x)HkB zb2y!ol)U9R_$uoJnRag`m@sgan0F(2cl0v!Dy26v=7qAXZO}BC;}x+lYyQEA0n@D; zFeNwg&f4fi3jN!l#wAyXKSv0Jp8Q<&tWoSa0+e8aoz-+&nS{~ME(>Qwkm>yo0BJF2 z7(=vaNBc9z&02#zFUOtWa}wA)>rH)D9;F_esx<8kaZhNA)pGd8GR;G@9m${}AClYO zlQW#ZswyAPA6&8{e>B^a^4n{Tn+44c5Qxxf;T}$;Hk*+8_%Z=oQmdLFp4{>8JeVBr z@n;Hncc!|4XF@^=r=oXbG=P;A-l}nD7#pbBs}jn4nyJ));N1YSADhts`G7aiDUTYa z8tzyYu@X*MW@^(GeXva Qb5Ve$D60l4lQ#AJFEDuUivR!s diff --git a/dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png b/dotnet/samples/Demos/CopilotAgentPlugins/images/CAPs_PublicRoadmap.png deleted file mode 100644 index 1e1d64f904a7fc0c89d21b524d735ea753045fa8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 138724 zcmYg&2RzjA|G#7 zzkR>I|Ks=bc;s?EclZ9h$LsZcJzvkKC|zweN^(YW0s;ccXHQl12?$6D2?&Tt$Vk95 zq6Q&(;NjZKr)J&+1XMpS|6NJqrD6h4lKMQ;dP2HENI^qQr@8I;mw<~HNt?o7Y7-nPLVG@EhU5^YA58y_*Tgng<1r_VZDL*-@A}L}Wtqnkp|(kDq3)>b z%zS^-yXZ0fnTe)*ON7y7zbYnhM1kS`?^kZ&sWRf`|NeaWzaeEdn*aMuF7Z7Y zvj6+Ua5xPa(f@tINr{XIOw9j&)w2;1Uisf22?)%G6((h_V4r-^&1DTejjyt7eypNm z6ME)5=y)2UActQ`theu_g`MtoKcpdpW@&!qPU#={`Sa(S3(x7=cc&qtcMZTN4x5s{ zBh=E?9z_J=-oJO6C^H(LpSM~wR1)d87xuyJSf!_>ec5fl$R>aHc+hdO%1Yxpy+Fax zkhVD-`UoG?CS5@H-$CVu+ zIe`M2Od%m5#j~?Bv0bjjXCGAaliPJvK5G~2J)2QE+MZP-rDm}g$jQ!Dea&i4E_hL^ zPD5l~^MV+E7D7%$SRZl*Q)4u?7iKU=z#7oCD)*HFaXSqNN?Rr-ru-C}kTmr|{jQT5 z(~l&iq?pejA_2LU-{6WGP2kip-waU2oYPZ~Y#bkAeAVL{ zUrJhzn2tQm+jY65!%!j7lI!}VbiO`5!(%!MTHprj z>4-S)^CYSJ9IWfXcAD2p^7G%2kdTy^@=0f!e`QbX72@p(3=9;rC{o5q!w$Ip@nAyb z)UPRG!HYy6JyKGOV=2s%@lW9XP#=W%0>k}ADqi}cri6irBLS-=7|#4V8!Bt0(` zrk2>oU!H4%;XWENAKeFuUt(jaHn+DMm{09`*RbP@i#j@Thjf(G)G{p|U$q%Flb%$2 zk(zf;O{GFOo6@sBXuQVrWekEL3hsnyZGycpce*!TYOugb!kpA=FS_f#3&+N?V+3zU z*LyC?(>~E_s)@P9&dy#BJ9HtB?DTbo`=0)LHILNP)`oqe#XXE+&Ivi*XxZ`R`WGaB z9Cz_AE**Dre2jKy)4W!@Y)$!}u|mOEd)k6eoDMf9Vo3?glhiq9n!UN+buI;*e7ZLL zUjAfDWFEPK4Pazq!m^1GR9?d=zW%j;ezJ>#)04krW|))(`@JuTw^`Xk#sN%Rct`FU zqS>pN*`dyC9(h+jXzI;4wA}h+?wO3=7MmU&?*cDa_&-0Rqzg+P{r=xk)R&s_jqb1Z z%as=BW~8R(7H7lCI3p&VA>hER2-ws+#j8CGgMnQd5qJ`eUriAxCdob2($vfiuZfa9 z7*?KJO*JW#D%5{C*xa4DPMNA+xV}-GR8 z>9s*-igEG?R%xFS&bSn_ZzfkSr$jV1w8Eb--K@s6;H9Kn31>AMS^e`)67xC;rLUjg z=%21=u$o3e4-$Ka=Ue?ZMhi7Qs~7r2GVmp-*Fy#wTcCUQi3zV*v=diM*E(Fks}N#| zWhNteLl>ip`Ba9k0ZZ5NYFTygq(7NI|0*ff_`N5SsSJci>m&IG3xT^6mKOwk&6crJ z^v?I=ou*E0=$>q@uL~W5-M(|D>K_<89aW@rP!Kc9;LUX+Lb~WK(MUf!qOhZ_X@f|^ zFe~#b3S6+OMfwlH&J#PBwiSBHB<_}5RV8p430?O)!C6o^|Hp3C?DY-=aJ!O$s?6dy zZ{DD9dM2bTY3pQ4>NVBsanlK(-K}JM6zv7isF{OC`PK!!m<=*ft-dB-=E`}d;q z{^f4bMt2ni$=mGOhY|PMk0!}RH7snJ@ZC-(nWM5+AHUHIeICL}HGQntnR zA+LO6$$hpgNM4XL@tHDGB<7_~-3MPmx3{M;O}pD$yBizM&Q4A}FBv3R;$;4n=id0+ zXY}7A1OnWXi+#b1yxB7FyRnIKlZT8k=9`tnV`Iw8%gdsoqAKBRLJB54N!-j-8P*Dt zoom>U-@n<;&UQn&o+YdgWy#e0?^++|rg1^BCo1=E4B~XMq*q^jAV|0I_m{XU`SNw! z$&Sp2?&Ue8wx*`&;a4wg?*bxl1H6#q8SUlU6nAteQCy!XsE1wskmpn`Vf!F)WcwQT zhx>(N&Wy~=_2W!4ySOp%nui&Kn@3)l-CciQYn#cr9~HIA@;Swt50=egbH8yXVBn+w z{z(>&ZKUH5$`Hq!?h9TOYtm17k3o{B~G z{gVEFR)3Q$Oz}F1CMAbWj9J|~WfSyHjrKH;RftxbMFiRgE{=7W5xx4Rae2#0{It{3sy zq2~cRj^Q0_RtVU{(Qv+s*K3!cpv)IFcW~ZtL9UXI|4n{Vwss*+ zg^b#`+k9-&{zG&A$gJ|y5cZim(zP(7wV@mCb5HAEJ`Z3OFjt|Aq1Dn#vp{vT_G8TJ zOW;-SuCHPamgAxzj+MnVtrj6!vb~FK(mJMJUpw{E;xmV58;@lmHjr%0`R)b(#qG&+ zIe+bi3ytJ)4@V@6Yj@XXh+ak(e{uV9(Db+XP`B}c9h(?e#xTW109CxJ9y}3F^9hQL zR%FXPdF{JCe9!PLA@rkBJZ`hRTAz)^wlPFPq}rCfZ`BKgB_~_k*FTnoV4QEt^G3KO z?wIlv&HFcFV~O7=nV_%UWoBL>xvEi^z}s=% zIHd5PENJN+y=Y3Yerb^a<|Ig-k(G6gW8iU2Qbf7l$9c7O+Dt09(IFas z;W=Ms8G^4pJri^Nc$c0F*Wa7C?|AV~Ex>AGt^c3o$YiD0r(u`xsJEd$!PohVs;lE# z=VI7$@6(VChn}HAN4$}8#m$zb6;rML)x5K$uhs4B?9vhU-@l)%w$V{hnQih!xyioV znyM*kHIDAOpCuQVEdTHKRrD^&OAS%RLW5%MM}aFj(!PBSCxV8Lt%X8wkrNNP3eiTH zQjIzYGkCoIUdfjG;^oUvRW|KI%;=NhFUs}47-O}j(0_lQ58{%tUt>_!TuL-9;rZ(M zk&q!wkUZ;MDX+K2)z+2GmJEawQQ0}xn>#xX^ypmoRV0RTRBw?Hao>)9RK2EOs?uyZ zH2Sp-%y&eV3>PWU`}cmk4_ubMv}9=WGG{m$Qi(s5mWlfDJTE8o0XWNELXIc_{G!Cd zur+dvZv;J<6!v9EJMYM^xp=D5VwLUl{&Y0G>l<4>^-obxsW@uG9XE5$B>9S$cC7GC zbM1od;-1NI^DVv&vm2Yj$t+zu3KJxo2br>F3p(xRzT5i?3w^J%YN>MqBGxlJ-?yDL zo?7^A-*<6khi!{(yGu3LykBvcFd*~sy&<>v_7pdmvAS*9#^;GcYx%W|W1wh;ZDi@$ zeoDjcL}0=X$d3$~jQ0gkIsQD8z8_B}LYc=6OCWiveVHSWKGIgZE6qG~kG`(VR^s* zqa)du`%~kcQ8$yi4|?KfkSh0ASKZwcL`CuuRhvjH?a2`IFUn^wo`Ru61K!``*-w2u z7Kawk2R?H6`XwujO*jP2%5E-JCJ7*R=f3GRYo5uNVKUR=3toOZO*`^CYRsGy+_XbY zmj(+MuHCE%&O4ury1C`P`F%L|!D;&i?jp6HNGVrG;cPt7hglZ-sTqJ*1A!DQ$6W#} zfI9Y#(2&(66PD9$+)0|UEdZ&l3F5`8UQ(B^2X?MFe|T{4GO}mLUYMl_a;{`eP;p|d zqZ3zn2rk)y&w_bPTu@fmd5{cBiyt-aH4%)IByVoD`;HZ9rROj)FgQmAtbsz~m*vqR z$|f+Cc8xn<$rI?N^d$>$N_!QYUc}#6?o?w@us#pOqWc``>QpwD{xvi%E@&T2J9_O( z-EZzrpAHZ%*V0M~G-zz93)CHEsr60$?Iw8sy1E(czErb~fF(C; zd80TU+SG&!@xVf)=E)CV*_nOQy%3?e!#JM%_{;aq>mT2*Z#OL-x%JR5*K%??Z`3Jy4=H{!g7K@iXaL%Zd6*eCSSf3q|9G5@{Fd~6S8n!gL^IRv*Lpo zu_SeX9dda$ye})aZ{8Bq4x%{$=rl*hKbD5YO~*5PCuavMi8`ZYM&*n=I&Pf2#uc_C zR37s!@6LAaH(?2zN_PDxu8;^~=@BP8wGRDGOF!P#yG-zaiuP3}C8wmgcu~M<(h^_g zZ?X3`h{76;sSE$)X=R8%x|+3y%<*@VsaT8J{GK}cSIIR;2UO0ncV(D04CH)xZgDVH zS+zo(vjBW;!=ss6{CBe!~TT#_o!#M(FjADS8|0)?C!$-|)9kZabV zkABh;d?J2{WOwFU)g(OTN=&N?oWb2tdc$?;uM+TY{CL;5bU*YW@W^p7s7{t+Wn-*C zIX7Fzgk`g~u1;LHxG|uPSy4Y+9aZH_`gy#lsLwe@#YN#_O(Eq1?H;ekFF5F+f{79*z7oyK5D2uyYqhS z(R04r<@l|+IcBT`R7yo(J|`t2A_BzCytEMT0$pwnPTD9^)GOh%al{TNhzpl26MG9b zf>i0AJY_hNjX^OvRQDm)OZz2`HeblpwnZ7yR@7r z_bybbWAIBNCZ7K9(5=>!x4x$X3%j7=TgD{;+}ySNYJSPE!8-nF!Iy{;J4z#Gu^4Y} z?>S)$xroS>yVP8LP4K0dh8@?6)1jDB0%-d@CA>C1&An*M*meM`S~da z#bR|5wzm?-#*2Km=`$T{?2_Da=W9pI7bRqNQsiF)>I}6D84hcX7eP*gOcE=irDG-5 zag#J3COR=S){saE!%!5swjlz28CTCvcyV?X8O_1m&-Oz;s_;Z|xp}5U${R+Qh>D1o zB!G%aN^Y~>*SEL-BSo;&>eSq~d&RBCbAQ}Tc+b>g@36+!&YYjEZbx(;=a>}N9cLQt zCT`Cjq6*t?f6TnPxm+gpu>h7jaCU;n=odFsYK6}7iHNLXBP8ry#11`C$Fb6(!}U^t&RvRZp654>(@m+ptOxkhwTtJ03T~z8|BBtw{*C(T}u0W6)^yHzn7)llBE{ zn0mrbK4nvWIQ@4cc+5KcIp&;TTD@} z{twl@=F)4m+CI z<5{8pI11(HN@h@~`P_FM!nIG?5G1ctZc<$YmDMJtVt#3#1*wT3zV&2e|F3n5fYl3u z>m+Y{8N}THg)!jZzq`=T;IT0GqZ*`O^hucFYp`=Tdt7JAi;vsYV#LW|uz9vuf?Th*McPM8`(3obv%6WjjyqXb z94-!YRs$M$7Wh4@%cIxR9Q!aqf3BUhqxk)I`#P^Lvl~7J@(V ziyh3CTfKL)99cPShHv`#vB!C~#YokQsY84y(O3SL$jEfH*o;G8t9o?i!0x5B|JwO>+7L0em@6y}UH(H4u ze$D7(4*dYzroWF*tr;!+hhOWv2t9o?GF`Xot@%txl8;)|ZC}xg*cq7=}A++H|oT4$WmbI(F0*xsoB2Kg~#?I6K z-|eP-hH_MuHLpDazPP1PeM=Tb)Y3YLpJD&1w@)YtBa+VQHf`<(r&?z8B>S+eo0~4- zCrjR_HVi7qviXRwWEP5}vyP*jY@;XY7&+J*l4&xXE*isuz1ZYZVP_nNZDElk3_uK3 zc+}m-LH-7D~}95IofFaLrWB9Tw%IrE9`wg@3O%F%0umGx8xJ6LWhDexvPu8 zI4g9c@)uAu0~~qosS#CL`cOLbT(*hxIp8zhXB!&~tFvB8Rs;sA=oJ8(Ps6B^7@$k; zj6ttJ5y|P}nl#WQK!;41X_8cQtYP)|ZjZqyV_6ln0U7g>FQ)5-9&U>CjS{S;?8)Hx zI9<~dHyayPeh{EW_gGk1CI=lKf1$YqqQ<4>h4YBOl(w@Y3q}Tp=R5uqRaOY&dG+iP zP=&Uj%tJ#J+^1^`1rV0zz2oDCh{R!0#z%rLqpf*idUU1v`O6&_yK~L?vv#`^CN_nD z#!OZ3zC^ALlzMazX`^&aOz0bo8jf0hF}JPY(D>HfI2JkeFvag9fe|+XIjm zC1!r{uvaE#43ruai!R1>y_gF`O;%d49zgT}oh&LYt_qqf+MFW>pddc*2XXrF@bHyF z{))5t+HexD$@)N=$ee5q!1aFTlM@q#`13RoN2lEdL^5iY4|?Li_ou4}g<8MFqpD3@ zxuC5OeBx)`;Bre2Fw1mgL?r;A$0?jYwFy2<`r@?%kh@m#We^8&pYpKi`n$&~xRZ@u zuABm}NXLM7b^=6rTK3rqE<-zx0p_r`d2$T!y-#SdmyN`zvhW=WB;}xdNF2~-)EcM~vphiolE%@R9*JuHW$}5>w*H1IGs4o06eLEL! z>}1Zbl~liP-pY{1h1`1NKF$&b^S;TDCTo>;kkQ1VMDy(=2$0e0#bfD$lMS~hNYWPd z+UL?UvmzT50C3&$4?txkdb9sEe}Nh7%$2B4ieEI-sx~PZ2{1pXvZ=%J?^nrHZokjt z=A^v_F~{QJJ9E5n>wI<0<|b-*n8rI;Sy{OS6_ruxiWy=5gn|Y7_j!^rqh+gd2pT_@as3$$ktHqzC?Y+xs+$ZeR{ROUt+kWf5w=4h5 zzH%*Z0h^m3KN%qP--2Oqv8OeZ(V>t}UR3lad@^QWq(_DxkfahilYzHK0tF_2^b3AI zb6c4c6(%i6t5vQW?|Mz?abQ1!;VhIurj1BVfh~gqXJv7@Gz~_?n08fbzD~V7B@M<} z2V1R?N6whQw@XVy1u=Dy*qG74@;ZYH{}Pm|J=GK5gIEVKE*Mc6s$-M5%#pCle*d#{1zWN-z9O4G|dBfLOlCl74W| zNAx53?`G}~MIqH38`OgaI@Y9w#l^+^Yx7usnklD%unYh^0Vs{Y-bToF4Ya*j^)==p z;f%^jk@Q6hg`3>)ytNzjmFcasO2wk94^+qc({$-~+TC=otJOcuF)hnR$rcYKHmy$l za`sH|VJMt?_0=%=b{fBe_;Fns#|CNrp!m#JS~kZKUXe5g0WS}aq4D7))wGAv84t@h zh=0}}y&H+(hbdR4%OL|!CN%Fx#vi6Vlo8diqL}*apqL9tlmBV~4B3V^#T$73lt-3- zRH+~)NEf4V;;7tQ5^jjlF!ZeEA^6>M6p%?lR2I2U5xKg%XT3>~n)O^V;BvWI!7XAs zdho?mMRl?5s4IdFP@Ou|Ct5NHCC+7h#eVfuZp8tLoyPQT{W3Ake>d3%u1agZQAYle zou)*&`kk>zoZS#FhdG0qb;7}QKM46>)$2LN=E@j3)AWZN5--?3(L-xyV3xaIy4B4y z_-T3qB_)o(|7h8%AbMXf*{ox>R+Lu1f0Q=yVe68^qaQrgW>2CHXEQCMSh0Eo*<~Ul zN}j$}JJh6To4-nIO-wjwh<*rlkjoEaT#()x06GQYja$ZXv@$rCg$B2VS9x z_s&VTk3&&OV+YyQ^&yEj9lpB2*RHg#-Q%TM^7W(_1Pk%HA&DR~U!h!`R^hjOL-y+? zZILx;PdQ^;&dB$yM9fX1j)UehT!kkmtMPIc27>BVFP@MhT|=W&am*sCjgZplD@=3b zL_-5Tw>D33DZEweHv$4T`=G{(ig$Z-t+%)LrDK^NiC0_jk`k7W*+bF{W^*z>U-j(U zn@3p4w{5a$_+1T~I2jr!zeN}}p`^%;ka2{3bVv5TPDyfqrte^j+F>qIMGw@c8_VFG zd})R~s@22}&&xOUq7`$g5QMsQqvZ#+W8~4O<6+HmDEm7d8Rh$X=HD_(ZC&dXa})WW zmG7~@Dh$cQE!aLKS&Jk+fn#SpFy0Fdj;rZtxdMjDxtD*G46pCXO2FpVjpV1&>a+bT zQ!DGAu;~^_$fjBsb_9_ViNCP^{e*4UrK^Zzx*EEgW#b2P+Zzh3X3VcSYR&o}y-?%U z@Vo=M7LjJxGvlazn7d+Sp*@D7%<~=mx=+*Eo^xxxk;%=m5QP!0G6_?(ZXQq%nE(V( z!p}C3E!q72eTsaCrxHveOdAYNpEDd@A)f0EZt3vD39|LHnO*teXD$#j2gDhQ?0*Wu zGzO`^#Z*Ka2&huSd@C>SZn=#*^`m4wBbgyCPb8h5Y5sh=&g*x$zvZ|f*xP3(Ofoc1 zE0$jw$?9o|D$ZGUWmJdyU5v5U>y#&W#+8KGs9^D^${40eYmMqrd8l z8Us)zwLi9mE3Lt{JN7Oe`8&}R(681NNU3W$S2gCQOHKUQ{`bfA2-FzpPUUgC2%PNq z3-T9g?F~vSI<>JS4;^xPibc9t zX1sq&L-aq5XN^Uh_7FMmX~3sW*biC&g3USv@37<8=rO;k=fY@nd3r!QT2x+MTRsOl ziVML11IWfn%uGckc}#^P#mLlSvCTaI0r?ucm-i%Eb~>caZaz^(Pt-Rq;Bdl3Y!S4e z4Y}zmI0n5zGwxCvkufN&a5K6~(@MmzO7MOjNb@djHatmg+bBTRIkknZ4c+a>xT%1v zK@aNi=tx*0QS*UTN^$y0j?DnZ%vikg%f>FU%xAT6m%EEz2ZUI{r*JhS2xX-|z3P zt1wB>p6$h{^hwQ6KdTRx8QGavaYQEDWM%!D#4kmv;C60gNh;Ar(KIxf2(Yqp&D6eI zUyjQ<sqslwI;#F|Tu=P|-hBIF*7oj#y8vBk@3%y%{I%*oYdQU) z3fPY|yFsAZsUg5fIDP?s|89U;bOD&3P!tvu@XORlv7)ddLay-m2={ol}!tyK?;TbZzO(Khn|&Js?%B%C&dSo z*1o8D3J~n25CsTy29SUVv`7lCEE?G-YyuF4m+Yy^!OxpILMec(?b?S$_4s{?Fw#Rm z$tSd)kr|YyDF3_TTS;fjcRP9@a1;n%v?`-bCSR7tXJrVdQM~JXeyIV%iiEQbXUhd@ zR-!y?-2q@9aPZ?9n4OssU6i=V;baK5*RNl{N)CHO#(QIAJNlq2z(Mn^IPYJ6s9 z)nVeW+Lu%`R?rx($KY^~9cXFAVCn$QW%6hNr0G@Z4Zr?Ba90`HsHp{N4XGqIhBryW zxSjd2AcZ>=GdyE|k22=}XL~Vue!lt|RQ?hkH1qIhqOyFX2jZRr?JOZvb1gQ3T2=6z zw0Kl&XsCnkOO!w}?Y!3J{?1lK^KO}fXa1a9>R1QS{k%l3DA3Hj@Ni9x>p%pm*aUO3 zg037ls|{CZKFlgp^=7tyPN3>&!8t{~rD=yY%}bXuIO63uacI@#f^}NKYKVT!r284{ktDL{J#1x*(^a$>Zy zlPF_@RW3W|?V5GO2FZK2>JNBms1jG}ub$$uji_Bvs8l^l*&~EPfIbSsI{^WVS`80J zg{4S4WNtvckhY1qIy+~)QqOS9u&kD+JBEnQEGee~-oZC{XaSBrPTTjs(~xV-^&k*? zE){K;s;5I7&RYTS+8bIT#Qg!emm8zfwC0E3bXSyXc9+MeUr}CZMsUR*WokM500f-n zIXu^`E-f?DUi`&x;`{YLz=S?aG*T^X0{WFw!w;aRU1^O-51fly$yS9h`2u}XXVQ-O zTuU1?Kt2CsZjCES07#-DZ2D6Ki;1t_9?p-GS4Ya>GCg$teSF6CX}+3e8wDVmH`6>p z8v)$_Y2*^t@b_=RtXj6>92OoySK`&&Kv#*9TI+P-(AMCdPNCdZ9J(yYsE9 zcE5>zb-SDhj^?EB4jixT-bqX17cVL=0tzLVZ;X@~og!zh5z^i$WzxChe0qYsuspY@ z4tp;xgqno#y%ZBOIzMxFKV|taF`i`+f@NA>WUpl*GW^zTvqf~JwKeKC-zOVYMunGE zVWFapPn?Rpe>e@U=XMEHKotu=Ks)c3N4%PxG zcE%x_loNKJ7k5GbTC~agV4O@CL8A<+s!gQ)=l8hSgiYiN*vJeSXl&NVb-%<3usgtf;>Hv9VdV~6HSxv2Z)n_ z{oz^o^ZYEAk7_FQe%mt<3@n-N4&kdmU-ng4w?p6{`=-lX&st|d9&c+aT2aX6?nu_W z%179un!We`CaQQLA6v8MxX-REDMiig@*nlvBsYN^=%w4&vzwV!NuhX$L45crUh!_` z@Mail!ieg-WlI5T>gwmis1mP>bjrS|8jmbyzUELH?apz!izZDiqih9_gIe;)egu1W zH-7C$Z{lkE>@0qxl#1rA)O}opm@>n)+{(cze+f749Ij#=>{a-tZRq;b#QkH-n0_-}`&f=eS5f z9CXWFKf_;l4KcriF`hN?nwJWY_ynE4a-c4C**wj9)Draoaf4^z=ui1k#$r!L&IxW6 zW#P$yd44cyv11y%yqEWlH9<+Z=W*BfYfS~QPkbY#i z0z^Q+KaD8PdxjXuTQ_<1&Q5GLNQfCVFUA~*XQr2(YPDY=YD8}s9l42$?7yC8Ng^-C zMH|!nEw3JsNGE&OcV#qAK4I*qdH0svD&9nloHBxBqQfA*i2$NI zyV;s()Sp-hHGB3KMlCXOANf74b3NP7CP&=G2q$S$K#{yJ9iaTh^e)S1F_k!`TiP)1 zf?A%qtZa+J=f7mH#t3;beB-)9ai6SbUUt#XWKV2ue61Af4ZrxmN$fF0iK)K`fe9&xA12P)z z2V=Cx_|^%+Ex_sn<(E~9Pt+{U0Jm)UOlP`(?ECe-ZdRNSC&4jTIEb}ADy2w&E1M_q z$k2Gy)!9=J_p;SjqiV`gkF>KMHgh(nkaW>#p)djHDkpuwsNXuNcrH;~w6eNMkTJrd zhuiJHbz1TKEN^6A-oq0`<#URbSPd8wuzB#}(4(3v^J@v_Tpq7-B0?`))K$)jt#M%1z&zbl`28HHs9G>6)eBRVZ`GoYMSkq!L?R#K0qWzU%<3oqFkSlc#b9V2y(NVB zPbQVRpjFG~>19ImIN_BkAo;^&eU6yCT-#BgM~r`}BB7I>nhMnw2LjE5Khc8!5en2! zv|7aQK;>?kb3WbgAY}#hN0f@&;rLX&b0T`q4XmGj>F07e%}h|naD$Gb%kUmL!*jH! z=lzbz)JVP>>sJ0#j>jO^d~wVUAc;}4{ES><-NNZ|+dH?Z5oa51FUdtOF}_wE&Pw9#n4RrMnAdMD+Uy`|T~jiP zK(`0mHQ&lb-CU9#s99b-KU%oo2o9)^um8=5i0?VjA3>jfcT%t36HO96|M%_G#gj{) zw9XOY^5#qY`@N-WNed7Rs=xC-52_ofkP^vhbb8Y6TAh!uVxiT$_VKRFgKC5M>mfmw zApmt}x5oNwj(2`#k>}ebo=+hPG3eVz*q8dRE*oip9e7a`lUyX&(P3+F4*s3J3p>{~B0W^;S$wdyuH>K2Kj@)De zYK6&43WAX=c8j4Kg){dYvt|S*I`*mr{DctZhYiCE0vE;(=bb<@m&{*2z~wQY5J_cR zyuH$!j-?J$1a^|iQ~5mZnUGYlDV0J;8g8IC ztwYLVdT5e!-Cw*Ql*5&S#zjl$h5Y>KqLjsW3?E}~wp>X&C^4Em7mHf`cSi>XD1h>M zSLr*dFSCL`4B{9wDD%XSLH9nuv^Ig2aH>jc$kJElPsb}aLvrn~kY))a>jk$7B<|za z$SOmgUuFo`^RvaMMvtV6wxDKa*{#jmNek|z1m3j^tU~{-ZTQ*bdL(OKr%PFAJ4|kEXoEBdh~fh8Jj9GjKiCY|J|GNJLC41I zhpuK0AM=0Mo{hRQcKm7b)9l6C#d@((G%eKNsI$^y25~PyZVZ$J4?uX3uh!1y%ZY=? z@Bl%Q62I?vb>6!W7;nh_GY4H`&scRaU!*F#@+aCGKxkrkp5x1B8iMbi5tVMRDI5D@ zsjTW`bsaO=J8Q6SVchb0@#TI2WQK-pd~eDw?p2CKX+<^Hf{-Cln|lWZ!qqEOr~Ron zVjeDVK$RW;Mtp1dV06tFtax{mksur&ya2=S&tldnsikFQ-JDAJ*fIT(x@5E|X!Q(y z`E_9KP)F!fsNb7N9%n2~y_T=btpfC0gkc1*SEj?4A!ZqicLHKuYv9AtKqO5lMT5>1 zSSpX+*TmI9l3k20rBQ#;xBc$x(nqPRgVwItE^u88cf28Gdhnj_!~IK-$V`K)b8iA? zz1OPx?3x!F&82c!c+DLBFiBkuL2MJSniMd*%#o#SLEeM*lDd=tWl8&fFe@@ZyNfbW zxV>|n09z$7G0|sZloK?};A{D0HAnY6pF*7Y-aQU`z{Rxz>fFngawjEgx?cHs*MKXS zy)XvOo?^q0rlx*GrghNB)XVS>5(}R$wLtxH)j-Ku8`)oGv>#*!>wu%I6W=Iw$C(@@ z)GULD^PDBA-Uvrv2qW{DV#68M3lsfj@}!YFsgQ;LN#b2S^!MxQ2)iruk=P_6a@EK1 z;(SP3>i55sMGS6u+`_~prD4(KXv#3>=BA>pW3jAC-1!DNaK$?II*lW@Ip9|8Ke@a&pJwusP4s&zR~n??)0Wox0BBm?b{B zwUv-pqX3<>VfVUKZr=0?DPOrkf>k3T^j(*GdhxfDTFXeOH>0Uzs9=RpM;_1NJ&jbj z*!p)=;Z(HTonJ3B@q_8lWErrq43f?o!*rJhck8^@w1GFI*mYLBP`~JE*rjssC!juB zHIe@Npe^OhqQj23hmrH~;?0U2(!IaYvnyX=qx zmca*Tp8Ih@oPPyA+(%rTpHMDl)an6q7_dhHkA*W7nrg-(0Nw-Xed`*k4G%P~Qy=*3pB4~+{KHGrchj9BngmCvu(r4IdM&+MpN^n>|9&C#B6w_LKb`x-p*EqS)jh$I_Uvqy7VkAGpkbZalrrN< zDgg?RY}n>Q4ct4`pG>8>(~*+PgZx|Xmb>Fn?z7gjt^R_+K!oyH^MU(34vUusQUB6q zlh(=hrtz+fpRRD4y#QeJ2EIef!5ROrv8ZN8Nzq9g9G3r<*)VJ3E1<{l-GYpW@hh-f zp@k3sof8a81Fr&f6@+e)vn;?ceNefd-X9BEx4b4*1r)`Hdcb++Bw8lre=lB^!d9d{ zbr=%XjRpKhi+%CSs93W>EgQisl!r%Pirt;4C`_sb&oMdUzWvDz)?)onOL15a>BOA zT<8u^z&(yq2?ts@?ksHs>8v7P;7H)u=(yhLfn){L1nB-P5!eEH43s0_x`f`9H{RSi zNRv!uAVMXW0H%&B@Fnui=@-1rR3a&qR8%fhu4NKyU&eN(H*RmmL#o{LEr2VjcvfU4 zqrfanKDcG!S?>Snb+o8gv7ikMw0Nj3qj=<+GjLY{JPm4^FE5@JRJWg}XA}cs4Ek;K zk~2O7VRRYtK#7E)pYfjHYvJ}qolD2WV-kuJ)$@KjBZq&YPT)vjbtK;0oX< z2^_lQ(?;hU{6KeD+IRCax)zW(n^gXme7E@rDL3lJigl3vOOv1P@lpd*1z1pUSXF_% zM|eVGY;(>JI}gOG&w!V1Vh)K9$_5DUi)?vXwh@rIgOMspT*P+0s0Z|{8oEa`Jf-Qe zUGJ*bCC9hp2&BYd)|ezbD*A&*+$;5g+LBAofTR5Vit zc(a6@9in29Yd*`rDABdXd_9w6H+5t?EYYg(7+u$^jGa4XxRpkuDwNlq`l zOby5c2FlV!ZZbk34AApmF!W_4$ZZ2tMBom1abVggDK1`98h+vBl>med9!^dJz%l{~ z)h7n5kxrt0vN#}(GDwk91$g);pAAzQ{odAeoi+~7Y`&Tdhpnf}Z&YOV~K-^+4{0!*Zig<;S1+2>O^5*PUp~W}$35Afr(F9ynz-QNkNsaTNGCfYbqo%`G3Z&hQua`bB@sqXy>Cg$)AmB_*XE2kl73Z<;6ZK$!#72uMNFKs|=YF1YBvru7a4dZR4UrP=9GPM959H*UOaT_KGbv1& z4cfA%&z~8 zv=^TH9&3FaBj!3OboDOqX!*LeDO;CoA#4j%pw#$5@wlZ3N+!`bckztE7d7is_};)z zp+N1w|^yk?ro6;H1Z1IGulnp5NSF=Jh5Pe~Al|n*VT%H?W7|0Qf^zxD67-|(t*My$R8Jp(^bf+f{AzeG z`*>M@N#iH7EY+p;fba87vk8}IDjT`Iq?!r(p1_1;g@pmi^X@Fk>)Y1RCotE`Df{NN zN)8x(6dW4HDFVf11MXF-mNMC>+}ODd$@F;aBftaT%?4J1&Akc-<4AbZT=6SJ<@i<| zy!qWc)z7^l>f)gen9OC$68puZKSZ&x*8n@;%Qi|!R19?TUNCHT%>gwQd~ukJJ^wG` za`0vPaPN+4ezC^8fH?UtZ#0?vpXrghN8=Q3?akTaZs!(2w*_HfSp||vMLeHCba1G`<)mE*O#_3zO4Ilixtc{G{sj8_(vSU1bwB_C- zNv#`s#A&~+fz98De(X{Y&z-2@2?X%v z(?^0S+e4ptwF<{d5M|zk-bw)VzMnl4pnd0>3G1f$02_%458G`!5ont0;KkAGt=;wu zG}R@p{`iAwA8z&)T~>dS?~w~=4JNmBw^R>AMyMm@Px!nUJhfFDAi>7Ic`1H2HaXk) z`2Z!l)_}(~rUxs{PHnXo#s+=y9_fOHvptx4|%kPM_-vsRlp z$+Acw!e(izTU+HtC1S|9^2H>UXpDuT7ZpG|L>W{E0)|`fQOT=QuMMbij&#-n4pV|Qpyy3gY6M3

p06B0Hi!cb5Rj{rfxvKdDRMx2A#P*Mp>hmR1a?kHT>MwioonwN zXCDeJQ{F3*fb$BH$qEZU2060;ShW&?$P?6=I{@7qNHQ0J@i5-G^Yt{h+hqfkRouY8 zIm8Ztu=jj88}#f|KI)0Zr0Q-yC!`lZeXesg||YWGq|9E5C?7H4~{F!TLC_P#Wn%Dw-0 zC26MG%}I7enPn!T+NqFao~g`ZS!S7cwHqX2u@EXk#5T9gD)U0-%t{DJ8It*YZtea2 zpX)kr&YSbt#Z@NXLVZ+L}p1jLQA^i5(G7cX6s5$ z0CgR6w{BvJr_m&vM(c+rVMC%%mtQ%iHrmvA#E(5(!CdNb>*83owS`5Z-LT1i06!+K z`*GjW``OP2i%3aTcN@WF-#iC*J#FYNwfBXXxOjyOGrBcOtMwV)on#yD_Oi}4cenN9 zE)w4{7wNqMi zYJ2V-KF>c7H__>~$RH2z6EWsq$W!3SFA*|aGW&F*v~yR0Pgp`bjb)o8rZf&` zq4zypcIDWcHIn(I+YIAZVd9#`u}3Iz(r)S;6f(H{hUbRW*a3LBOoXaRW;X&)80m)#{N%q=v$-dMDyYR6yhCy5)_y}|0_7Tz^^JKRgZo;!RHg`>>R z?vXGxmh}ZbhamzMbLNS6Woc0^JoV(^hXQkmV`o@}_S&@6SktAY#ncA;d)~Y{dKV3K zTIHdTu*g-@W*ZqNxW_^#kFW(QR+&tcF7*%CyH}jiF`D)iL5>29vt2%n$u}+!E&%L_ zZCUM$E?d)vYN`IT%iC?fr&i&HXpd7r)3Z`Jj`4eCS6HgokC3nb(&Q4O0LCq)(Dne$ zhm(6P-}YbE`oXesTw=Xn?7U~sk_Dst$={#V^Nvh^uda6Hex@_@cU$S@^Dm6pAG++1 ztjgRst)n4OX%<%yT|Y#fl^)d>_b6`isB7%>E-3%4z-eELlIeFV$mdQ3*9 zmR_Mf^bnpV`4{)lmy!DS_E)9ep>Af9nsssF<8{I7 zgq}BAg1bX-y#PJLYx9~Vn#Z@?W;ntoIK{xB$tHL|Iug?g@eHCBt)EY1>fJGZp8ce$ zQ&Z$r;6~A&S=+2_=hOUZlB6pw;_n!yshWB!oOs4;f*poy=e=RgzN2pbmufXEmY$doNGF`evvQ{pIaxYPR5-SyfubUG|$eo<-5&TD^OA@#)c4>En|V;v7B};`J6e z8LuL`d~aFDHDY^A)T;U08ia zfO{!4jLNiLp1Z10`atB%319K%-Qzdqj5(f#UXk~{{kVLSu0Z_L_jeRZ^qUOS=zabMx>B(DCkPJlbYVW@aPsp8PYUR2k$e!$R6`Dt8EhR&ldS=tjmm#8&%~ zh%k=t6G;?6S?xBh&7Plta+vVn`{SVU;TjP`swvX2?v_2hopikdEj!jETF#QIwB2Ys zrjt+&92%)v1wCd&&dy*4fP_e0twSf0xd((28ik~!=3+O!Yi%8@biY$wAFkXL=2zP1 z@A2+xpv<7$$Y3r3lVO7~G5BS$``LFQ2X^DLtu>8SzjnS;g@=M(R#vhyni=`ApMzC> zT2pn{u_Gw!_Nm|XCd;`jFTd=q5<%>h-h9h2<(9m6V7q1kWc?4O$}y)?AK!ML+}hE- zR@b%AZ4^c(zc%+U(KeQ3)92LRrFASDMs=(PwXwBIw%+SdHf&0H`4HOi3!Bg0Ve{<( ze)VM0V#?(`HBeyprq-$FnN5fO-a6?w+jvOjn7H_m1qECqKe2wg$s=bsIi2a-QIU>pHILa2Oq?> z)K=rZ)kQrkwJK|Q_wM2FZ8_7T9SLmV(~XJrpg674{K)q; zt48VmWGvLR8war_0!4-S`7(_+F=X|=Rim#ws3+p@8=F_^ncA5gZ(D7DP9%91bHcY< z5&iK2x%GS7r>3%=xQXh_K7CZrAKbjV?T)U@YG38L-?U$Djn-&^i&aU~LcQ^|dyWGH z?1J-jx?(~%HuRd0%1jI#RX6;1nKCQ1I(^b$SGkmyw;LBq(9RcOJHDeZ7cKtm4|E); z-f^&zO`!^C%cF}Y&fnX~_q6QZE;aTo-=wIb!2d8KLhX7A&i8sR%V0IUJ3s_Jow>J@ zF!(sRxE#+LQ{p@gaJN;z{J1Td#_Bm)D=s1|e0s&mp5r{GtD9x9^(E$e4~q;EnLYzq zBz~->rUpZ(tOl>v4~=J0_eyes$k#qeL~W6jkWi66=IH3Cav~%76$euk~d5$31}vVUUw zq?9I&7d9>K&=;rgei9+9U}k1!A~i{WOq}32NO+`R8guOlTh`U#4||5V6yf^Y2#E>@ zST|xd`y=Qq+Y{9!_3zQ_>!!M0dOviLPs+q-=s>DTyozYO7I9T>Zrrt^LOAGcE+fp- z2KG$%#)9v+ypdaZf|7+>a_rbpoF@k_)Xp52l9Ec!fzH~WAN@JyLn%^a*FJA z!e{o1jZKP~o!r5v;K18{7scDhBb3Nbv~a-1Y`U=wv)cpv(f8?BpEr_7JqYVCe4po( zJUD+uIqI7P2PvQ*q)oEh#qDSA%0F{Dw~_-#lH(xq6(hPJ`4%B0-XT>cteth=z6p|- z(u^Q1ePFwmltMh!xB110LWb;HXBfj-Fckvs-)}A`hNV#|j#y0qSM9`>Lb_lFB7klR zN>LgYy|eRNylNKc!&*~pNDRI{W40t8naYhNJRU#3B)O$wcOr(D6=_53M^EO; zbCC>*;K`2#kPB47iFskn{^w?$O z;a;CP6jl+1mwUbDZ9zRco!h$dY;rHx1Rp*>t<2~{`FG~sYT6mAx)PCyH+W}u&x#}Q z9T1P<^a{2^#l2~?Xd+w^5fx{42^coBjwtX^m98x9^I`HUAQb=wIJdnlV&*!3$qk`j zp^iJo*ZaxAySX11;awJyLP2`xzGdB?w{E^Z;v)%OWU7l?@0KH@{)0h;6N57pSFQo{ zH&4%RXM(_QsU>V*<(dF!NbRDGM$TO(X5yjtfu?Hb;jX4Rr>6$GXYaFcw<*`8+*(@FEezNb=C z-n@A;6;`|u|FO`thSK1>dhJNaK@w@`7o0cEj%ZLp7MSP%P51cgFZf!mxNYzJ^>45z z_VWA}^AId1{`$Wj@B9SMvj5k=aRE;KU&tceLX7$Udhj6y?Z5s> z^4a+(F}?ri@&Dh(|6B{w|Gz2h|F2(-@GxWsS|D+j#gM90K9MIxRVvHFw^)_9{=GCl zqAQWWiBN}u;d%Dp!GpCYAA=ZgLV79u*`K|NwRu&!L!Y9*3Y4Cx^moGl9D*Xmc1`48 zf4%Cd;73Y~hlKA7+h)j2jiP*aILJyz!{ zc}Ij2+LT>dr(z~yT%n(#%zUJ5+mi&#+q=0WFW6OZ zCvORrZ&DrH6m-i$*ZsEqrPspeS$k!rJ2G>ldE0|SE63Y(4Xy1ht;b`cQx-dWduQgc ztH|*YD_u!(M~3OW4wzs>M9^y5b+HA=x3sk#NoY!HXlN)1Jg|R%i-wj#JY+m*H_I%Jqow!tZ(3y!f(L!k5 z`?fZ%YmsJnO@96QOl4&5b%J+^6B9WVX%?Vp^SoCw*qZ})m( z!yi@?k3{FaAS9& zdb}!8LuXtN+1rv^H2v~!&efc(tSm@ujKm@@;0xqOi(O-sJByfo4{ulvhvmdP?RtMX zckZt}$Jm*u!su?jkRAt^M%F%8IU4u;IcI+k!h+nS*{;g4G7oL~*lr_+7sAe&R9M*k zs@pC%IXRh!XSW6aq-3ZZyD{UOjaVdjx5wn;ffa|KN15^x#(nZ5vuG+# zMdT#r+pfZ1B`Atwwq8>*SE=PpYvW01vZRe6+~M)GGu^ac?XL-&O_UqP%ALjD#ilYw z-;(3w&oJM9qurZpZ&YWyp=y&Lo2*wsqL>*IkGO4z32$yr&IlM4A}`tGnDW`f+m2NS z=IQerc#LHWcl0`TJ2qXQOoM@b!aWvVZ_-TW6odRU$%UszN%vQ9Ot7BaDC~~I^|O~#{>mq^YWzn1-9M!)ZIGfyswki{q<>J z(7D41TxjX)vXS;e2>4z$w&+WpwPVpQWB|FMUUA6pwz9DiVQYK;{%BfTGA7raY*WsI zL4OUCA}ZC=erGEqrCG~!wmT!>ViS1}(wM*qoc z%zp?IKD>#(YdV4ZrW+EFhdgbL3Hgi1%+k&8y6a0RUJU-4H<}v!KqqDXYyQRWuKK~v zO-*V+##hsoqa-ft5X`5HbpoL() zbYdGB%nCDfx9kWcnr_98rBS2hJX24IwA4=DLR8~`loVtBN4expB%uU z9cK4Ta-T$~0y0$I1Sh$?*(|R8wLY}WocUY)z~1lf2u-?5Q*r(sdjZ##^zhWY5$>yh z>qNMzP})XcTG^hO?*3N`AU6Unnixff?P6>5+F_Eh`?Yo@8!WBzfdYrAN>*qZb)m(P zI({7S7<@zFmLINZW6j5Bdxv;H?p8K+`9ZlqEHjF=M|?h59h>>^`op!d%^$;ipk{Kb${Ql0CAuw>J%&n`J$gu6z7E z$M?q&RMv+YD4|Otw>Ud zkoJB&k{zDvSkfZBjyZc6*Nbo1x%mMYgaxC6+JBMP5M2T{eSAhwDn&UVGchF}2v?O4 zx9A&7Zmc_Offeh#rlzJ6v$L~PDvoL%-WJq0n$4(nco;ITm~62)Rj&Qgu7W%pYeuK7 z+ENWDokR-wkN)2475gdE5V<)yIhn5c(9zl1(%Gr^qyOTJ%(=CzVATr9aW6E?&dK4X zDvdn3%_fvHHa=cIE6K*n8uI3YVCw;oFUPJ*lIQg~M1{}x_aGs6O=pxP;gzL#svHfO z&dbfpvbc8bUE>2sm5ijMszxuerjc2Gt?3_-85*~9?!2eNbVU8&YM^Fx84L#GnjlW+ z>cjRAZ*KFoun4U+r4}yDjg~t=QfKv=lsZiEfSFJQff$WpR0%iX{PaBtCQun4_~p{K?HdTGzG5S?aNA49^#| z!ote>kCn~O_$kh2WoPRQs#;iE*Kp6jZ)&=#PYU0BB1T}`x~r!r+Q~OA3QIoUu1&Br z@)+*S&>#1$HLKb8z6aagz*9mqG5NKk|NHKm!IW1N%1*_HwlEV*23~~=KhMv$&g5&# zJXYM^`cqWz*$x4np|yqm502lbzoy4Nck|{COMcN#1C%0$Ebto1vx6lm#B-RgsggQc zlM>9ql55*(#tU>~OfvVp)_C|G`pB+i9w2PDl!|jPRHsKlHVh z`sbkAT1?32HPYkJxC4@cC^X8161jG7svFVwrJmd|N&9wYm%-t{if5P*s5EWl{Ra@> zaB1;rmv1f*gc`||rs3v$0!7;D6b%$uI59(~)4#eps$Pg}H~Hbi2P=_{qzS}D6MHXJ zKEApw33aCq$7N}@i|0?uk#9i`z+-)<1X~5x{=K8Hy+&la{oHU}>T$U=d4S(v_kO%$ z*v^9DCr_SSsX%QN#y8sHF8h3v4YAb&ZsRq)o{|RC7;iF@v6;@hB!)a{wP?a=v!ULo z?l9)81cAQ|$hD&7o`Y@-_^>haT|?E4`~ahKsY5yZB0i(oWO7po&;AbEFdZ|MvK#!Y z>lnfERIBp68K2o#Gr-HTcO4wsVZ*AE>Mn@}gX0==`B#_jYhF=yBO&mDs0m-vXzki3 z96p^ZwXH4PJGsU?6CVa^j*h(lj~QZ54QbwbV%Jt= zX&>n`h;*kXZU(xgGx-;*lw+k(eV4w&!q4wIvSy+^{+NbOm+27^5xmcGYWMgI#>Tpx zpbk6G^v~peM`(GDbZgXbSlw5}!`tLlWIcmQSl=tHRji#{EyemEBa8vH5cwitzA42G zoBQUP6Hb`w)Te0wt~0Ml9Xi>Ba{>6&xKfOYi+Ja2Mc`Rs9L0Un89H4PC41xAwZ9LZ zERkbpM`;*uyLrs_Ma`jO4 zxd|(*Wn$v(OjtXBkGR|R*i1tcrv1RoeC7l3mdUg~<=7vR4ecj=-l}t(QrXy{Q_<;= z)$lXA?dkY;fm~Q~y&?{`gf;9)B5Yky<~5$leWR zx`Xp6VNcaijg9}L+S%$s)Fz@R&14}u_0$8Hk zk&>fFwWyV-v{ts*+;$s~jl6DCR=98>BPsIE`g})SB%je!3LUC973`Q}jaH=h3Y;Gq z8HxB&Bxdv8_zRB#TNak`ZQJSMF-pHvcNL}<{^+pGoy7POFUlYOa)KO-dM7PJWY+6A zD>DKf`XvVV&vSrvyQ9D@Gp_1H z1nuym61Yai-deNFP|wtHyxV>D1;>U)4+O}gaz{s|?$|5bcH>s#r-c^OAl`5Trj?eK z76d{^IWfPe6DU4@73So*I+r4~=036FUiRtaRP^;w!x3j;O#WDm_V_AUlSi{j?HsF%AWQSH1w!9) zjwpS`$73$ZrabiZ^-U?!*5wXW72K^ZxVssloA>y=5k-W0H^|9juFz|u>PY?EeZ81^ z^I6$U@8zX}-dgPLvp4La^lz+v!+J)ZXo7_6M>%u^IwGl=?r~olC4(J1ZF6;`PU9q; zD9t19R`wqgCaMW#=*Ik5F4-;&gW9+AN&zmo+qfdygNy)F2XUSkIVNT5W+bKC^xcQ! zDbxDTuH4Ip9AA<1>(b(UX0@NPx!sxkZoBqzSn3nH%N=7c&gHr>G*Nw(6PmVttBpGH zPFmJ@aG3*O@bFPbuj8RfLMcL-#?Px6Ys{bf=iR;hH9_WQpTFr#FJfNmw$={$4?JY< zpw2(#4^u=--FO5+q9_ahrmH=!L4Ikp@Hz`SL2WZD1EBCt&l7FImz$nfBNY*oB^0Ln#6RTMqT=FiWi3uN2 zOB^?^dtSpEY0b=l=^r)jFCyt}N_&2~(jjj<4F9NsUe0VA}We6Ks)JeHaI85~H zXr}I)qkV6rrOgUcQ?ERd++}Un`gc31qN}`GSJ+oQln3G~m|$8rn(Z{&8g*n8Rb#nB zs^mO~2o83XCO<6Kev5Yfxc=&FYW~-V))O0`8q?wUoxEsDF&F98O4@o;jrJv@hIf>D z=x`JI%1aYP*nugz{jYEihN8J-=A@V!HRechudL~jCH;r91w~8KX&3X!cDbE=kDsEq z)|INI?(bI(pV=26pCFTPR!!|ueA9gYf#vtFR31{#EZh{hQL62QGZyYI9}tVQ)sz=n zP4ek*!67+1$2BEbHOi&}kYKUdMcYRY>jm8fvp5nQdG~UrRLl4{b zACEC|P96vPgKiz*1HPd69{%>>4Q0uu=u1kL<2zf}NIrMs+zXN9gE!q?+|4?7_{#c5 zjRmRVq#|PUr~@oI2gePO~E>G;{^1ukT+bJjlzxBiMf4ju}f$z zF9Ge9dk&{m(3AKk*4%8GiQmfj2glYG^kt^UBQ2dZuwr^Zz`I?D!P(fj#KFNCibu21 zCzaA}F?MUoXz_M`59jHVXfv%FIEE1{dQtAl#NnHeAJMw)YAmXzx^@jNq6f?{inFk? z%5Zd5O9~;Hg*6PmDa_!c-l6fWc3)7a`a(MMtRbaM0<_Rx(_0g?*}=$w0W<|wPitC^ z@l)K2gRxXR!8#K4I2*0~wW5GXySysxo*diA^o0cPxdh4>y?5+LwCn4iJwx2bAu3U^ zEn`-N0ZbSB>!m&C1}A?~H;q&u*|Ki;z-(iaSV$+<%u7=_v_Vq=kp!xn;L}<&wWHgs z-2(vp@YX}kcyZRyb9ufumyz|0b3U8*!ga;ivaaO|^Mm5Wmc04;sZr$LQT@XaaMklg z-$m|FLWNAYeA>sH@dba_-Wj7C-^;)aVtc4||LUAClT1&hd8@1++|)|HdqDMvTt5h2 z3F`wFE?l6H%u?HQGnn0LL%GC|9FOHXYV|`DoV--)!4a_m;;4(8r=CQu-lM0d#}{+c zu`J{i=-|3)1;aEglu8+0nn!EC+C0?xD6FL-k;!|s>2+-IN`qbi&`fdXMd+^He5jP% zlhrg#r)~~&yCS1;M4Y+c5A3x5QW{CioOMBzDW3g(mXjUDx zz%0=@S*E|g$Ke$Wc4W$o4%*y-Ss)RgkI2rRZx1x$LJkF`?%JHFosCUM=NP4POp{E` z#LayUc-hZ1QCClo>}+AKdnz92*eJQByZa?5D@!0m^2drb6-_Uah3E?edrnMC9m++R+hA$v%-_}I*HrA zC@Hltk#}rA?+!R)gWdS#?1ZMKmA!p^o*h|i(!G$fe2>NuHdwkvX`{aaQStk5)<`Rf zjz`uER39&Ol2lIl?arSACX@zankf@q2tcOi(D7a_w_@o39}BrkK!udqFQN{j;O`; zT5DIOKvCc1&eZeKz3qB91PU zF#BT0gtE3)BUMEBT!`er85Hky$sszB7*vR&f`aTQPc~)oinykCq1^yd2sR@NN9|yN zK_zNcJ-Z}iTI5Wv%O2qx^s-7ViI;q@_jUNJj}pT8BZU^fUq$Q1$#>r)7I7PZk)S3A1FrL1 zP`*Atj_^>AOS#06ot3q_S0J$B(T<~Jnm&Hq0kcF1S?1FS-Ad6p}U}42v3DE#<3%i&nfkIes}8pINng_MuCEd zE*|8nw{*!|vuYr$rN5)@;u6)dE1o=Fd{sw4-S4`M4SgaP8*f49PBq=^MtW)Ssqq0N zle$$gH6LN4`_E)p2#IiW;W5g%qzdFrmps% zom&S5s+FBxE39VO(7E7mP{tj#jy2_gxE7aNl9d_FOj3Tn9$<4oo0KnASNII_vicUG zLu<0N(lGgcoOJ9nhm;ekF8r{=v# z9K(>}IHyv#u@7x~#<+z@1X9vRLOi71Rwk(ttNm%In@#Gj3S3~~jEszIX>N`K6EIrhyILpBKoU|} zqFc1fY`4in&FX|1K8z^CHk61blBsvhBSC`D^!fWeU2)dq{h9ZyU-;DLb{*Y?=93h) zhQLuK?+ow=uLxnvey+55%kA}E6c?4&k_m>Cgta^l#_SB`f{Dcyr z{`2>G=}9F`c0rkCx4f}o_8>_$-nMLCEVSzx+@;w-{|zj#X$Wnxh+f+~>^K4iQ`MXj z@V*Qt1|%^LWQRh@I$6Y0h?=EOg3imcszS1}3vO-7w6UN> zzg#T1p^&Ct@L2bMh^tXKRA5A!y?cN>RQI?%1DGXHvpO#a4u1pHALgwet`PuEwd~}4 zn80o`{(F%pZvFaLbjh`luzZg?S%OOt@{L#V5%qVs<;9tyO`F1WRe!+$SLI=Zw-C}( z8&&##_;UT~W+X zmv?6-Bez3sJNlN5Y)7a{Flp%7J@`g}>ds?6usu zDpqMQ;}y_fBHJkHE=}y=X!iNT+jg)#j2MXQ6zE;}Yu2VShi`m20&tIZ&)Ui5oc0hM zv03mxowOvoUjkprC@VB~Ug_58EW|;L+6PZ=9K$`b1ZC|M}r|z<_QZsi(7(C#=Sl z_7W>jZEY&00QPF5l^->@JxKZBG!M=;S?##5T zwFhpz+jdl7utUzQL~`4m?fX-B!%$??CumBCw}Bqy2(t;O=*OF<%t%p{%xdpgWVTmJ zO?1Ow-Jc<=r6P*%I3$C|ZgA`}d;{ ztfVdCn-gY6I4$&P#(gX*UkfJdQaZ1uha>F;T@M9YUVi=nTJ@Q#g=lq$)N~YmV{opyved@JI|@R_tlQI16|8^BiL@@;@=1VH ze|`M^DoWYB{Xv+b*>tpO^-Ns+aGVXcXdnm0gcG9XM8 zZYZ)(;dYdd+QUmW$wJW^T(||jw~jSfh?pBYL-!ajn)e5)C-WSg zJifu$K*`^nqzVk}TcxiJ5A6@E7`P^}q0U2xnL24miLf;1Ur)964kjG=SKFgBA_Y{U zlegQ#Y-Z?<@rt{ZM5EE-awsFn=EzPVIyCbPP}$<$j%w%UyYif@HO*|F2L%PCv^j|l zxG&zj-e%xp+U*RhW!SUxa+m&d0JA8s!E<n4N!=My(%ef^7>?5ibaE$F~9Z z9C#9{6O*x;57#Qu5#pXedA%dX?6qAk;71D^8{W~(!xy}wf7L-#9oA{JKV5G#?O7S*Dh*yK(}33{ZMAt;N8Xs>q+z?V&zgOw(EM8e)nam{vSY*Rvtcg z|0=tpOLK@%2RCGG=Oss~JR-dB9mehU4H`6!aO+C6z$*`xtpFLzGb;LjU$@S<4u3_= ze4CP&dbyhEXTp>J^LyBz(4MYB(KJ*LNLzJ@&jjq~KM(bFC*{&+?PjJTzbj>YBGBx4-5c&>Feb2g1e)A1_yu2Y{DLbz?Q zclB89s&pJb#^n-YmSfZe!+s(*IH(2kV9DinW;K@jwPO5zG+w#~hr)5c-$qKjk7Jyx zwVi4D3}h@&p?~UjsNvJFYA*&RO>}hFT&<@AQT9z80r7}df1}-U8_onRpFrY( zgI)hM3CM}ka6S{xc~5D}N=p++mUflAkLW>cHx+B^IEkAicSfa{O=3oZmH9lxXN>@2 z6S{izb?b*6HM49X6Sx!-Y?7&1YP~} zWmw$f<1Tdt-}e+4rs2Xr2b-4|A0Pi^eyy3G96PB@OMk<`f3*NtjzNrmhKMIZHU+U9TOO4zv=sxOx>yVr>_6Lgj02LojyD4m926 zkax&a$>TO_zjqEz zyC;0PD82CQP5FfS$v66RUk&5b-?yxzUx)hsp@>dhGIlEH$Uv3mL!Xci@knjto}=dh zj0W|NhrRirf@3v7DhD)P8Lk+ApZ?6@B7JQc_afRqD`MO2*JGdg%8U#V1w@1L15OTh?j!JObnOT=%37o#>_2 zwN9V*|M9ik`u?A_At!W}zq%Ah&YYEG3#=J-#0jo@p1FwGbsYsqw{Q;ovYvr~!46j@ z((^;u((>1n>QoUkM2vyBs(UxEaZ2VYhJC*-!3x}}4MnT&wPGxrP=nLmj70WoK#Bv2 zM-ou)9bFft$eaQ6J*0R+BgTcsup$#$Jub)cG9i*^mh*=I==^P#13C0HQzffewC7F% z#j4Fuo4VNoA&E6a1SVSjQ4`2P-`P?x&ceo^Qh<2{LcatnO*8a9!Q`TU#FWreVginI z_AWWM+AFeeQ8|0|Y~9u8Gdjbi&VwTdl@rL-lGAj5CWxbq8q?7SOi!nIcUPX6n!RD( zZT#BtrZ3&f7LW^i`VIs$4y_B{DnyKb9G>dt??d$B>C!DGB^c}z{wBGdPN>uNj8bJy zAW8kw+}wOPp^3{fqhC@=Dk3|8cE28dj$yM`&83@i2gyt&Q|Qk7y)m#V^FCWJZ`v6- zHig_!v#|=h+)=c)$6CQ5UP;&P@ti`hj91j}dm~w@zj#FEs0H`1n`G0_F4FUzV30Wy zioe|yI5(H;6ErJjFt?Bg^l3t7)weQ$d+`*h`UkIhx=-WHLA;rlD1ZYDkI1j2Y$8xYDKbMQ} zDpCpWE38uYH!>o7TwT+=Q@=dn{j6m5rJFt&55nJ9_8PXpBcn@>ZttdUw&9?ri#jiO zdzVXETQ#wPTwR)izU|2P2@FgL(@x*aGkFJ9FuJ7Kv<`2(WNmHTGgdpWKvDwVOEe7m z`JJz7haLCsu|aIt-*l&W9LU}~vH#1+NQTJ*QY+Rltm^+RQ1H#qw0O|!B+&qkj0iEk zMi0D4rx>U233?q!1C6prYf&L)zSAa$D|3>rQ#J#qNLJt;;Kw)?9C(#uwed$A;E@#e zULy5Cba-qmw0At=t&kb6itz)icK?82Es*qKCqYvdtlUgnx3;;+L?{X3KE;=F<>m4O z5q+pj7^4EUxniM5aj((T!a4FzhkdBUMa5 z4n6io^SYvQ=EruZe)etvav(X-|5*9Ra{aYB+E`BKeN(cJ%r5ftWpy3d5`EL9{Y0V4 zu?~GEZ}C|cnW=~A3mAEWo8GPn_QGU5euOkfs}A+@Xs~Y)WU3BN_WzWL_(rbTPsY5V>bX4WG%9(-y zQC#b|ytG)J;r54J(j@uZqh&ROR1kP>Zs$t~C6jRbK36=FTx~4%W4za1-;q~SWbfmK z+4DlwD2IK1J`%gA0&iJ1Zys1oiCq1S5OuvcIb7jdXoyLnEV-_MP@HbB=7x%z;<;mT zk?}<>iub(%dpQ!(H4o&bsHo6yL>Jljp13*-4pxlvtRXQXG7?9An@t~2Cp5WkFUTHb z9eW6ao5#)mw|YRvy?L7>47|{aQG0$n3fx~U(L39$Hlp2v*=l?IN#DI;RMSk8s%=-> zr9ZwMnlx>xJfY{cWtBQ47Vkvc^shRK>GOvH{ANOVW>Ft4Bd?P7?AhrU2rnj-zj?wA zjPVRW6>?_c7a(eqH$Dc)0*an2rCZFgd4T9mLSov%Z(U*&kU8)W;+Uj3Wh7Ujrs8cE zizk*OM{4HpsJQUCChg{Ndi>bsFFN6Hj;FiUIvQ4li-7Bj1FMxm^2)dtubYzLYC<+W8;v@-)AD z#~xM_Iz9hL$1LyNkShYpAs0K|($Vn(D|~Whrds2tzAnc-mtyJ@5VV&6bdT?FV?yJn z$>5G%NCZxdvlclsm9!Fjnxd=x~^GbGSM@# z)=ct_V4y_R^wl}YB-C@RlU{IYaiu_NOfZVhP{B!XO}k1%nb?6yWo)=;4`JoHZ+qaz zx6* ze%5Ni{e!{fjV7F@zyi*jFD;9;+>oASx@Z>N6t#KRUSEeiz(n^42*z$mY`7CST12Q* zvlkdmR6;00P;6woj2i+f)l>M`?LPXlJfm$?w3@2e%FSdpxV%cOc5yvjY|>sOuUNYK z;Ug-Ab4IXwb;0yaD|Y z@9g?SXb<%y`j6+ek>Xk@wxl|*!65hGDVS-^ZONdd_SOv6IdMX`x0cP7ee_@v#E$p9 zc%8m7kU~w_3T36GQ&1AuX$!>vVQ#j=4|{wPs0x&Es#uXP#lMCWh3>m5vQ}*1=34>J zVa0L>6yzwjdu)q;WT%;isv&#=GLmw4uGL3;$cU`Z-x^h;Auqo3-X5Lwc#}SiX@bng z{sU+=j8ILsw3zOxj8cf_J?T2>!mH(dkWe-r(hw?Ppg%t~AWirSuS&LcIbcn|Up0dx z7X?+_{v3qF!IMH{n(^&7$4s5Jk{wo)?<)utFF0gJ`jB^{Z@C3w9~K0#Bgv?`h{cOD zu-2?;HH)-ydyl?e9>yl5N)6yba%|e4TBp+Fq20^u@FHabqZ(KsM)!SNFKmK(IrS}W z^#y#riLM}Yawol^65d?{Kc2v_8!0gbIXC;BeVG~SGZik_ALxS*>rd#r?Ptab;6hPJi@Y2(hKO)!fvMco`#naL29 zAi)oj{k#w17&RixW8)twn!#RpYGo#bwDl*kHtk!wE=Y&64;QzQEN z6uoVOLOQr|Ebk)+armVWN2yZ6Iye`I=9;%nu{KQ+{O7=|xw2<4G#*1t188)__tc>K z)rr@^dESJUd?-qG$I(^-uaTSO)yen+pZMn!M+sK2CV2z_zGMtL3z53)4#!S5_Lyfo zi`zz9FzRBpj69ukViaO+j4ut95OtDa3nRPlGX&y8iJ` zq+B$=e}ArGr`(xvVP6}u&Jfee#-gcsfuyvwdhOm~Z|}NgPcZyp`VM#4peF8Lj(YQf zP-!{BIr1Ch9~Z2xtb!~0FF{rXaTWU(u&dc7iy^_jpb(~C+I!Yabn~Nkrpdhe6B7HG6E9bCsQ~ZOhxYf7H$Y z>g_eI_1FfIFC^cPqC{T&kop540DR_pE(%z7|z#ypD$T88le@4~cr}2};z*Ov^ z^OTJ@fXxCeDsD|R(2HhT?Q`DVJ}v&^$#iTScfZzT<>V0l9{XZ$wPT)KefsHMi|&Wk zMKhPk5RK|07C!uI++-fSP7Wpu*m(`@g&P_~c_z>+3Uam7}EH7~r8?jazLt zXw!9y&LNO6$IrQKeSf~;NC+dg+)*@{9^OT5WvnKloI|prYc-(ihjiwWs@~npXnA0E zP_+C`pK&xX`2zu7^m*AW;Tz&l0)J7)(9KTAB7Og!{>`O{fRWBZ50{?)xX~rWMuHbC zIzo!I0T;k_l@{XW)!3-94j}R+hYGVyIO_qMTB0m4wdMg1Dg!vayxpV7~n<(UUaf2Zj?n_BpgKlu2t#hlj{9 zQK=YVr+C!h$S|6^ThkczWZF1JwqE#IiTA=MZpKqALkMBwIfpyqzUQb%y3!jsr=sY+z{6L75yRaUJxJ6 zR>DRBDfs5d(R%O-OUP|kBGx#uzIpGpREe5Cw-E) zRDr*V=-#ayqftcM-$uln2Ad>l$%=_lTx3C^{Az`Bk8S;doZ!ohd*Z7mSQ%FV6ZIk2 zAUA4{(*E9t#WXKn!+7b(m-e`Lx}a)Ql&vj zdDz;2+|At_&O-o|J_Xp}`JBFOtmXLIq$;>zo(ADq!1 z{iMvM&`rzkeW%rX)@eUj;nB)TufeDuWo6bI0AhMOzC#oOqfD=W|S1V1C- z%{nhayJG73A|*x%zebT<0tg1SjG(fBQbD8#>GDO|0dV0|BgNoWU<;~xq>zg!YoJ+x z|L>A|@iH_dKjDH^4)V!l%}4L}!hJ^jV%`tEEF- zDP$Z*gegg~a%1gmE-1@G``)1-fXndA2}KOD-&XKf_3gxeeA-Lh+CJUz!#)(uIrO{v z1_n}zK4LfVVmr##z`N_yeypuN9|{;#ZaghF|K~qy!Ksf*>iJtskSv>Kf1`h2i`}oN42Y+hwF}*| z0hwrxnUQwmq$Q^SU46c=~0f+G-)(&cf zr>e(g5%Ef#DS;yz6p)ak>gidylV3j($3`mbCzuX%DzVL=#7uP-#y_pth754$5e@%X z+D#R&i^GHhiI_%D)VghcYc{^<7NI}AYswfcas8Bi;WUN6;M=M8u^$Ti|GlCl65$Sa zg78=|nD}8KypZmoG^f7@dh27A(XiWZrg(wtNXcF}9%5k8MuX@SG|H=M4xvIgOe;!!8G5l60Jfv$RV#0eTaa5daBRAzeX}of-ODq z$&-`#c$(Rfej9l7*uP=qwBh_UT;Y!nW5jj)_1J-{y^DzEaEG8HxqG|1N;?A)kea%a zPggy)8$;;VBi0Y!h$Dot9kp_6Lix7*#1TJ!VwC9}2o`0t?!aM$&n{y{J@c(&%|Ka%hzMTB8 zz>UO{E6mofr}f#pGKE!E9g%(aQ&!$_aQaPz>rgmnkIeHsGpZ31-!-%KZtYcR`*yvB zFDL2iYxx#F%jSXs9?`T}g%*Q%Dr3+8{e((HO?BmC^Z9@SUWVp|`@joGK-p`rXxWLSU!nqWO~l$nv*~>bWL5*?RvD8ZwBeK2|Zdz>%rX&!osx zVA|FQM$Oh*OHD?$IZDlW)-JOwhYcG27*Fm#{LjmUKf)Uxda8C{q&d(}YVnlH+~-BB zT&7CPkMGl*=JGuoiw~=NXPS zBXP=$-?%OmgsCpJu+?T7ss3yE#!`DM4Qr%|-=hPcn_b-3GklC9OU=1RWhWR(&7RDT z{C9AY5qjIFe|_Zi@Sv#3=ht-}*?m{NRlaiuEdSViA!WaPn_SoKmIj1v-e)>j_bbeSi^QFw{s$aEQ?P(=TtmFK9N_E(N_&PrvJNM zC<3xoRc^r(x-;-jdFOPhHSg+#WCvd6tw;*bN8}z)-O545;8Px3hfQCuPQ<|(sAn2N zH~j{MxA9ryapKp7ySrQkZgWbAO+LBb0lqNui8(Q?)3u?dc@E2`6!s$B^LELzUBbQ3 zZu@-;{^JhrZv2(`t{~H|A@A_iQW3t+BGdw?xrr&O(MD(O?PH787TD>^NsiFvzUxQF zVQR(Zo7!BVj0eHbjwf{*mx~N00{X>@q@s(?R>W_HBWjNWX~as<_h`N_>Rohuel+gQ z03F{i$INS%m%OFpPDhm`Qq8Z$pUg^+M~Cwxl0Le7)};MqWcc#UQ2l*R_hXj(FUPM77@HsRhm$_n($YaIeD(3p$ zYD8;msZ$nH%^UrEx0_|Bqjj#9ixFlVw|%~o==_T%@rVs`p_4<1YVi*m>c#wS-`#gN zy_^#P8*Gst1L#xm?)5Q|%j`w*izVk3QYh<{+g6>s^Wui>1Xq*l2?@XK)l_juj@|YG z1tKQsgY{TeP6VABj&iH%nLB zTjJxM+1cs6jmxQ!k$}8@zOt1yt(fcOZqX}4c9roHQAFXM8f4#9t?o!9r-kMQj+hWe zn7jZ_P_rKLYYLhHwPmXxW-{3S4Zd7;?z}sNC~!se)}{}O+T1Piv~$y@@BWhIqEr^Q zEe+y;Lg=h#`0Tt6n)avROJ|qo4=q95|H|3`pSd&bQ3!?Y2%gVTU^-0iZPri>aD1!p!+^bDu zZSc)J^pcXR?aQsRTPoM>kI$amTusH#fr%2EdR`SnU2BEcT`akH;=j9^pgZfCP|I7I zT8ow42_eMnnTIcQ$-)?}+C2+jQHEI_r_MdIa|lfIs*s1mWBi-vCg;R{1hbXKF=kwC zh(CNFX4?3ZKNoy3(CEFmiZQ4IC!cBklPea6=0uHXPOz%(x^`wjGINJZWsP)1ie(39 z-L4hCNIgI-TE6E>o!}5rKH<{OZbXl)MDfQ?6)TFg^=03x_#LC;*eRNdsE|KtRX?j$ zE;L$WNcBCANIwvTOymSZ5HcR(gaels1KxV$f)-dlsj!;uZ^{-c5k;p;%Rb(2ZU&Rj zyQz>@1HWp{g?}+r@&D|>$iDLJx-$Eo_(eNj6yy%Pf@1M9 zv6Ga2>}&J1tlN)It9G&wmsy~H4-0Lwz7as7%~0|23s`HuhfF-lqE`_~sTKzSAFf~| z1gKMjVkn z1h;aThR5g?N@Y1)fi1{b+8y0lYBj>z1~$hM4VgvBrvT63DpLuVz_nZSB>lYLXtv7e znetK#F;(G+?ciF1r1M0+S zai)plA31j;eZ-?NId{)JI&DZ-o~B&kEF?4yLt1u zT)h7%5qVi>9G46E%eHqAc*)ShUm@O znim!}3PO{`d`O8;_YK7q@1+Aln@~A!u6jw}TXRTB%`VgmQu@KBrl!8TkeKo@EoRs^ z%rLAF9731rnFg4*kFTU{IO9OupqprE(UkL+*p}DObvnk`pN;Dqy2Dhfu%=rN6kaox zv+1=9@#`pA#sH@g+IfkPedh{);`>DDFus+8Ifs}>!6ha#^i8fNdlT3UObN>{t0Uz! zX}9kCx?886qv!KER{|67GCS{&V|>bN)tK^(&W9z8w){CiEEUEFr%3tFsuX9xNnQC( z2uC0Dn{`Ntu5}+Z2@w{%YRLAh6!YxpJfaIxUgTOViT{@k*>iU&()YWHBKltQc)iKb5A`r zhqQLTW)dy^Uzg)sA)~-HQ84DDaC-}cT)pLlf)nJ##FF~%!6V)4rKPeIai589@gD|PihaR!5t^-lyKeWQX z=JhZTWYk0MPLQob(ZjKsY|qoGjiP8DrAKE+FBeF|LY$+Y>+O1HG7zvX=|f?6E4GDG zd;un3>)3b3VZ^w5Jh=BE%f$EkIf|qYS%YN{%apr1+xERM7muFQSo>3-P$|j3E+tcz z0gI6kMrP)oMl`_*^gvDYrH%k}GM&1WM?5IQVFa2WX1al4DWpZL*uN4`u()&_m@En3 zpAT2ljwtdKf0cyO37fU5p<9k)lH7N(Z@~1q=E&SDWzbZWw3vC?)^=2Hw51dw)~f>b$41Y4acgp|Xqa|p zW$<=|Jp0c_hwp?aK^UoXr)R5M?(Uj%wJX<9-wNm5ppyXX@;a+fe{_ACyagqT!z8pkh4#Wi z`ju6k3cf+H55r`-Ixj5x@$~$~$$3)je`U0_JLew^D%d!5IS0$j7kKRrOZL?w*DTl^ z2lkGfu4pkXU8n{w_-t@RcS1hms!-%L2%R&X)gI?AoRMTMULTUz*R4}zecS1*`p{XW zud%#4U03_AQcFFMj&>Y}8&!UjayMtvsE3JW^h`x~v)be8f~JX~%1N&A{7tGDcW~DV zjPm#@v@c9qa1-{B;IB)NIQ@vqAEd#eeTMq`=Y~^0ZGlBJ2APx5tllVciPx|16R{|* zhqGiF0!l@u`WEz=NWXlE%*x6-59*%2BqSVz#N<Z9bE|vax5}tLcP)q4 z(hy=;ozR}qJZv+sr)IA`Y}!&>k=2M0jsrqyp9XMy*Tw!a!u0g{sYF7U^Pox%PaM9s zXo%ws<;!zj^5%ckv9YmnH#XpRo+1SjwcE14_p6T1H!3PG^!2Gh^EI`lr6tn5eJ74C zfbw_S2x{)QK^xV!w*eUOW`TLvnZ@gIx-GOvOicVnP0d-7PmW{kS%qf{n)yK6YrE(w z4X%B~R6ipm!-;Xjp6->)4K6IDlzaT5$)=HUV#bAR z^%4}wZ;g}3cob0DoZt$Cj7QnI4L6HkU#GC{$iNpnzB7%QAD^}oD%TlJ^Uh6Sv})Xj z+jmDxJWBDMGUuMnwD^9?q|?C-b{&4=QkN2JJLy4r?C} z-`#<3fsN3A7By#bx;-j#IwB$SF-z16iiM2q=;V|G!${_{Qa?RO7Zi87f


ce070 z7L6{*dzWEECXk^d!bxg9A9gbkT_39JXGwOEr;vU1BfsF{!0v~~=^MC#cW{mdm#&5$ zy626r1GlS zZd_$V0pyYdU|9-?;zBTzM~=k}2`aX^8ld8#HNTK<6w& zo9~uJwtm~?LB8;#$J9@BK1c_eJogSB3x^z$SPxcmzx$21`>4P>QFKiSwm4;{+-5%U zh2O2Dsef4rR#2QwlSlV4@g^^~rj~fBT{V^#Uv4Y%-%Qsyfd*9o0!GR4+zWXMi;OzZP+;?)e19|2 z6B8q$iQOjm2_JsFYQJ|Z3Zrc2f^-cGVKh;7uFzH+BDeA{b^W=tU-II8W*K1WucjY8 zlPx-Jotzh*k{Yqrd}4pTiyn?8;Cb4npvcf{hLc!_p+NB^p)-|leBOm37KehC37tWd zYD!Imz;(`!dun$$Or!phg#! zHc^Xu9Nqb6vjMWczCN<2?=b}h1?SzjG9FMo-wzA?_3je^u8Hluq}vsXWGjMt3Y~Pd z@mQLkiRnV$J7HwL0d2_>&r8fYLuiGJ+izsG8~=eZdY~bh;+{FYOXH zLWv#J2`kpu6CN7dX%qAk2v-*IZ^+LXTWK53#~TX9KK4O~hAiFVn@B#GC8(IABW!o+ zA+MK??s7V)0rI|xxFv;}mk}rx_S{bjgl)H`rOuSeG>!T(%*L00_`N2eof!?>FN1JW z9^P<=0r#g7JUY_xXdLIvMIv6Yh9QAB1vpoPC(xcT8?sv_CtyxcYK$_A-Yr?U>MvhQ zIhQ)U--;8mMQL{qMpNe~aN^Q9MEdH2R$zB~?VLF_#*W>&!|XgCB;zZ(+r?65itKpUp2=(R=Q?m^P zIFll*Y4yH!i8jF#juM=OOrJ+Gy}Zb%3=Bj$mPF~^9%~J^9)*W3Nz2ui$-jO{s+Z7f zYRcMgycVfjeEoh)>A_f+?CTEkTPb!3X_dQEaq6QBI?Sm3Y5$o@rXhJwx{GeophV8@ z=@yDAiF>;I^L|EJZa!yh)*QT8>~IWd6k^!`fzF6@YjP4DV1jnLA&rlTUo~=5jI%*t5hePEulktv7UhqcPlS&9p`x zvOQ0Rywd=#y2e3)MWKq0XsgsQINEul{1VMk@x2mhMW>;hoE{A{wQ|L+t-_$HIr}G$ zlYD8|;sRatR@tJo%WwgpD2iG%6%-EgdS87!Y`wOPh>Ro$!+AiY@_<_8qAfB#f*rf+ z8;aGgD!s6)F?0TM!|HtyDZKF%AZ%Aq>|V^n3F(!vKQ~YIQ`48OZ@j;xV;2v<+NII7J$rzZ(J?l;|2h&2KcD|hspdbzP9wB^}&5Ncv zhJ%3!b~d#}%@d)PWRBHEY z+BNFEgj?2wCW@*%oovs-nuts)yL(&!CSMQ;+s7q z{lUr7?1V~OwxC>J{T}j?b{xa4{Y;AS0YrEfrvWCU7N12|U+Z^)eGVB~v^s!Np{Wn$ zd(5`xyp%_Xg0`&Lq-hiU))`4_EB0sGvGrnn44OP`~0d#7AaCpO5w#2<5p*?n_Oy65~ zbpJE%X8mOTn4^Hpr5Q0*MwQDEjNJ;BluGq`aWlL zsk>C>1aDUaN@_a( ztfjg@48!F%|IuO42WK4!a|5H;aH|@7Ch`>1G?VP-pW>i8%{+bOI{)$_C&oVA3HR7_ z-hb!gs`sVohfH5ws1BkXHthTMWI3y#`B0mlf`7Gfosbgso>6e;wC7nDk0Fw@G={(B zSONSbQ1knD$#P6CtpI{7zm!Qdq~00xwe7`>BS6IY-eyTol9))+2SKjxFYylJ!_RdF z)*0=Ej+N7c)}nZ2z19ilFE20h0i=!Jxp;-VoqDgd2M>AU!q_eob=>92AX!J%bJ>~cN6 zSg$EDAOHod+Auh=hMdP34X`Uf!%qCBXmHTc(eLKFMo|eT5->=QfSJK0T>6waj82Y@ z7o9Y>_5-_uoLVHchWI91xB-Y;{@c@!^BQ`=9Bh$o`B|&izA=QUC*&)9R^>=zJ6#G2 zjFpZ|0vTj4?E4-DIEnR+7Lg9Yy}ayNf*agw;pESdWk%8fc7Q5K~nCibKY zI=StDbF$ASG;-*4L3{_A{zaG1WI9D*^LJDVuldk>Fhp(?iJOZn1`hWEGaa>m|Nb4V zCE82DKo(>N>DF$1=n3%=fVSkjywC3shSg{sx7=c6r|`B{eh|iZ_bwcUl{V zkmVbVj32IYvLzl5-^mx?Z1!7*Uut%D*YtAJ;J9vK{TgcSkoGAaYj>^s)h+dOBe;4L`BerJvQJKW-#r;E{m~{F4Eq$N8I>JL0GD~ zPBog8H^P6E3Se`YcI?|a<}P9R{U`wiv< zuF#gx$+Aajc9=lzoM$bQZRP?T?5~kRAc&BWFA##6hHRe2WJ)Qy9BM$GZX*v@p@?~5 zi+u&LQ192moPJrWaPN__a@T#*y|@TnTMW?A61QoX< z(5{JFCpGAbO^a+%q<}AZ1sXk)M0)+Yz++iCc`((4h8qcH=Of{E>G;ZN%lNsC^idP^ zD2hC=$k+E(l{wMXxlUM2Z&Xa^?gtdRW`FY|!-3N);j47-D?>X3??99Siq+I=mKar$ z7B86q{@8|4u6a$3@I9x~B}~#mM%`1^SjGOBi<@7gZ+KQ>&=P(8n`FdlzmSZ2BV|8_ zN8TX`BP>8JGw5m)cD{~DPZ_1wIi$5V$LW%_e)Kp>+16c?thdNA- z#uA-K(dROA;8_uL!sO5j;1#FQ@DW=H#>OhqmII5}mJ2KI>pPnNNQ4z5^2+|$xNlfI z>t{5Tr1f)dMNA9_JujZpqEh;!l~BK6wKx8;U!u17A?*`Q{J_T^i6KPPeBlgcF3(G2 z88A8Y?mwp}ZM_MDTpwOVR#Jfm!q~O2+y0Nh)5MxcAm*9q=|d%`{8{*&(Wt+HBPT|G zyki0N@wLOPo*=Wg2I|dho}o2^p2vqM)C|(xVQ*9=FF-axO?~-x(8jGNy4zkTe1A3^ zfPJi?l)#jUN1juqWXH!B`Cd`iNszA0v`F)P&B82?`%j*y&<|A%2I^bOq7Y*Ovl+{_KvE&$Of+ogZBA z+cE!c6*VhT{{+JYO1hZeJH_X}HTVe~$EW3Qc03()^WN91h;^bTr41Oxm}ctF zOZz!Ht(T~E%$SF=5b3g(tfrkI1p2M%vhIR;3(*Akkqwb+d4&mGr=a%w9;~&@ir-){ z7hY}#o}YF-q1{i|im2r8lcl88m$s&M_0p8-<>ZBo%+_2?71RCB!|9>hM}16B@Az%Z zH-!_ZsH63ER+FUUx;=Q2^}OgdiBm9L<_{R<&g=f?z1dJYIS%6O+qYx__O$Nq?(17y z@o8xkYp<1ULyQdFTF6c|SA5U4_P5dlTO{tK>ou+F)Z-)ixd>*;33Rj{4Gm>DoI_(g zPc52#z3*Ff-~Vv`ewgR7|7^5qKGE+nhb0d-wYvF$9s6aVdr#P~NJ)&LUyx()V*aWH zkcTg2wvS%2KS&bR4m8zTc3ImVySH_;a-r{mHD=qJm=tzN+=G~?D95Suggu%0kO7nvxMkI95%VxLa2rZ z`Qwj55!qggB3{5bBmy_#Z@O{=L3EzaJ58tZ$4r`#VQk{XJ1;CWnfP8=-lctFQ(qa5 z(!_)KnTbQ zpYI1>r&=LlHXjQKPzr?LGI5(=v=58-{<++pG)t{H`<}PD?STs{TtNz)p!EgP)#*#R zum3>3u&xLpl#O~bb#-H}DttsynbE<#-28M!{mBUyyQUx1071{~{G^>pzFe1+vaBUy z?~lRq_QJi|FNSKWhSS!X2?;?YoL%B37^T=;T~1Sas`|d7u`iHxiqr)hg$b}ikA0t| z)Q|G7lJnytzOVwMA(_Y(`p5C;@<{BG@jO$s!u$5Tq?W>f5JCi$be1G25mNCFy~ zG*afjqd0o{Kd8=VeGb1KVDLb0Q7)KfAHb%unkJMmQmAHF zWn}gq{C$b6owjtb#Q~Qhq{UY}iiCsPPsNCikE>M{uo#r2`eA#iR+t?63iekYJNEd& ztDtRh(N^11)z;b^&pX^p9K%}Sj{THYj)MG3J`w+7pSM`$v9@_`Wn5dc`}jkWRl6nX z98XsjOkf=TzIu_-{T+3A*s-w%2fl*HA0eX&;0S&ziPiL2v6@gRl*o0gF7nJecaAbCk>Vy;6d%D?BE2vANB z382bMRq^Q5=5AhYwQqGgb>d7nW#T-iFf^K`B4hm_`P>d8?USrCUwe(!F^5hFmmvAo zdAF=`%tq0_l$+p(M9#~NdZWX|PiH&%ZcDy2GS^wL7e5i5i5Z_ZV5s=IjcDnd`z1Oi zc5{h-{ZdDJW$jz2Wu(peaIV&{loYx5{bpgxZPpp+F89o&G1_OUj+HZ`?y{b|!(HwR zHEuMXl%UDEqd(DznGs*RHN7pL*Iv&H>ljZ|VWQqa^0{@}Xqk8>;!-r)X_!g(?W?cl zpY7igrn@tT==nzd1OFt~FLKZ%jE6|>8+o6T-{m3-OPPJ<1=3M5mJ!RSg+2&R>y#9M z&44BuAT3^?M2>sgFHhkLV?`Xbn2*k#^>emVX zS94e}(ftW^lvtvdZb`0Z%!z)KGFM*+(`Vxw8a=a9nrDdld^9koxq_c)WGK!?YCfD< zKO|0r&1Y#i^ z7{|}?5*Cx}>}`nENH^)2p2=3!^OGT_YC}3{fmarXx>2Ve;k;(cjUcePxr!CJl-OSw z>a_R7Bq3QXDroyKN5UXT&froe{coix2U527N&w%Fm<_A>Ja6$S%gyY=h?5CNOLeR@b?`*oMU?slZc+>o)$q+vl7R4rf)i^+q| zMl9louiEhRxd2lIV6FWxLS2w_g3b`R9ORmvJ)+0L8Y#^{dGRk%7tH?8Lv&Tr7V%|w zyz1!gHq=V?L&Pg%`rPoaEeDS_o;*lW?HF0iB?d2+*d`4Pe}i8 zHUzQp0Rb-rZ8ZneOelCz_7)Jv?k$>pe96x#1TuoKvBLo)O^j=Tl(?kqwx#67)%*eg zVZz&EJP}Z*(GGwwmkx$Y{1GUl>FY1>I2z7X$c%ED<}UvIRs@Jd=}IRurp;m$q>`{m z;+}nj@iof7c0lq`tajFVy%F+xQCW}stzlG5>YATVgD5jOuleAx+9DBogK)4}yxlW{;WS~?~pS%lg z?cDlkY--Ai_D{hDc-v_3wxyDUL=N-rEQfn#y*_~P9ZWT<{pEk*W(^7%h>rq0-*{H` z2ZOMHmo+>!Ka082lRr9BlxJ)#{2?j8H%7U+%MDi_(ia*5$st)-cm#y3rg)aK?c!ou zto&by;OCofqhB58u{=e(^jxV*Uj}~73kjP;50P^leDnb1(01Cc;o?}{XT(7C$oy$n zN_4OW(&gYFm5Ep~D7c)0lzaKC9ztl$p^Lx=-*&tTa1pMyi8PujYyH<<(&xW~{B&R; z4972L*A)~*3Tn~+aC_|P1#tzP=AIoZ>>1{toA3rs+RJ zJpc!xVq1+dd)e~G7PE&%MjmuhNqlE>dGqH|x={Yfu}xXFnI~VXo_@!oO0j%=yx*W{ zXTm0(7z45^5#FJh!+TJMuAOg|{G2cCA8n_^dk8=VICKW|>>jt?3bkq&FjblePtF>9 z4T}V}I9pl&03O!1>xvHTSyh$&=>cj8vfoTK>q;D90FzGnZyiG>#;$Y!=-35$m-3x4 zXY0%sYUuf{W)cHSMY*_wq8u?FliDa$9n$d7JR)*HY^ItrS@Fs}g{N5MAL+V#gkP2H zA6yO@1$@QSAERY+=gmo2EQ)*pww8gFpeQ%@T(`}8hdW*GPdPn7hv|2;$SHz|m{8uK zsE|OGsuBYm@mj)t@8F&hVypX>6yql(A0!le64k_X4#i z4*^D+uVlyG1EI(aM11kby9y-KFzdOH*z_pR@d*J8oQ}~t8g(_|MGl>?_hzq%_*s1M zXsuKA2l}w&9~;!%F_rsMwPpP8S{mZcHC!3ZHABG~|9uC?J{9mfHDQ*k&6j2a_Q@)5 z2x1q$Pixn9Id^lF*ZgW%u#1K?rmD~)+06Ac&YN`H*D3a~Vzs)hda%T4SKLn^_*jjC z?Xiao9grWPsNJx^63ERzf=g+0#b4n#kDbDN?7H;Hw8|(U!HAg!=oi6s4fYZ!T6z19 zM}c?Jl$tj(OTTT_Rimr%L97HxAy$Q#7oA$#69JfVi-}(J-#_?M4mvKUydFdxE_oFC zY8hryzYx^dSHQ}}^7%{9MD8zEVwBRoN{O`9_)`K=-9zT3m zu?od6(Xx#2<&-c&8757s-lgS{R&9#+8|#J0=4jqznHHxt3r(8-{q3;yap2yRFhd4I zz}R(s8k)Vg#^3UCBRU{MEGnBZ&aCvG=vArS9~+R4JS#)TDk1#e2bZJrcU%>al6c{# zR>t)P^D#?Cd7aDfzx;%t&^W zx#@&_ba38NAJgC}(8)w-CLhgRdVihXNsONHQIx6u>8i zVn|A~)zFlP7-BwleEei8p~~vGRx^q2?|VDGeS;5nbymymC|mZGG-VRp9E}v>KptZG z%!A`@h2FZ{M)P1${XUIu#N zqpbUSDV{cJ2;Wfo*+QTaX2ZqXFd$6e?z;=a_H9+9qm8~w8Bib_^|rrkMzh!^la3dq z?DpTehV~2pe#W0se;Q;_w|;x*Trh0VQuCy7XmPkfX75(ed3Ro*DOsz|S9dF&SK67` z+`{Jb-s>M!P2_)q3+7dZLbR+{;n#4~Nw?yQ+NrKaw7=0&YA9w!TUMyTR@yrxII5oQm28oz&2gL&YFwTM=*b4ukN;Pzvi7unH7l)kuy`S`s z+YiwqDD&apZ@pW26#|)mh$-1ML{Cngb)=+7cP*gva+I70!M)}+I9aZJ#mvqMdx}hz zS+oXg3#E0Opr88;Hcd|E&-U48sYNqKTZqX;wM|^-EEmK>+t)&Qya%qY^{R%~BxL)P zve)+%t<&{%bvz5x(y^_CSNCm&-RoS7h=w@4zx&0N@gt8|exwG5VcfdN_?JBTs8e&( zu1n$fjLC-u-ZVO-wx#b}hepo9`=B(H4co=x=cq+N>%w&z*E8m9=XL=tjv9<{l^36 z@~~n)6jVZg?d|NNq`+rkqNAh~nK7+xZjxN1+tCzPlqK3mP{#r zuY~`EVT+iI$C$kApGu@Y_E4F7Q|Btnw^tieF#Fc^;qD)-$h(c~n39ncMja-baEDNZ z4+_;89*dJ6j%}!4z%F&gPCQnBTlL!2HhlGd_H_=JhU&G$(;_E-R97*KBGj`*bL9n3 zp&MP+Ll3LNo1~MRVTj4Gx)|g--a8YKe~$YQhUVY8u!ylYussUOwjeaPh+(nbn0zHL zA7mHvA24aDYF3PmAA6-mfQ{t(EyHE7?)Q>lxQpI?y^NkB92L*dDpu)?Aw~rhm3(o5 zD$OKlyggeE463tkU##WG*Gms}SQlJ=<8YNwEe0|7INeImK7chH z_RMtSVCWVt=}_OypML{WSskei=J`z^FJ}u~lcdEFA5YC+nbB(n&LY=P1;3MS@od9C zvfrPnD*xall~d+jJ-rrPhAZ`tLJ?&M0bBjfVdprvp-+H!Lx(W6p`wl=VsE#IP7E1Q ztQABuz{0(vNZ;M<=U-upz36_zy}BB9eO4y8wkcMO_@n1iJm=bF!CnKF_T%{WX2TvH zS%8gBb?|SsCfS3Lpm7>$$LH^N*wAsPb$Bhc0~379u<-aH88-X2 z8sCn&xuiI>g=O(p)uxoTMuKSH_S#3HM3g~nPtT;x>JhKKkcp~!>gMxa0w`m)DdF>^ zPr3VFFoucIy5A8?LaGGr|E2~M&O6`HNI1TMQ@Bx_wmPoP@E-&i8A2x_WgU33`HFZ%9LR9*t$d?L zd6#E6+|O`Qd17&+M&3t|YSvY=qIEy)gTQBCdk6f?xxP6gD1n(Mx!xLS^u_&N0>(psNB>@3wA< zxzY>=q*hk!mm62_$Fa18f`Iu!C~8sh!&h`j;0>0ugoyo|dNKo9Hh!M#7tN@nztHtS z+0o;T+1FCG2akKVh5%*wFWB?~yVtu~0u0)~sxRmJ_8bFlhohnjPND)F)OxbD}ameK1K(JlD6qc ze6{Df{qfEL6k%lAE?DrF6U#FbcN!^SF4S1xbJ(fLu%;>-7XEe|*S`DW)F};nw#s&P zSEA8pZNDbe8`?7hz|<&b8+we7!*qtT=i=6At<5M&UX%yvf4u8tTP^TSpTA^u1ee7u zLr@I_nVl)4^7MZHKOUaSpT6R)_UrYtsg*#yuZ@pjQltc~2hKqnWqGT0kZ$oy1~@jU z;obvGE^_q&mry7Bisz`19MQXqP1BouMnUC)?n zCRSqfbgy2&JaqyBUET2Y#;B|Y*6LSsT_advMX1e;yv%jNS1-O?5$B=(4#|aman=yC z9-`Frr{+7-^ujrVM@*>6Ey8EW_g-rU@DKj8+?D+{3(=N1IC8d0ug66G2VuzpgvDcq z7O2TQzw_DeP`IX>j;hIX-w_~8ui%vU8%O^i9wh@j zwR#ETAGkQTMrqpwL_9o3Bw()t8;eu#^#oB2k#;pE13sf>H5zvL#yUPAVsuV61@7Us zFV?V;7*<=$0kRYXgIV5paofpnc^=<$UXZ3?&!QRIVT2Fzqva_vRhnWSlWz%=0*a* zsEj(8cODTnlNA= zU*D<%D?=`1^ga+ek#J?=x_y{Xfv172eQHa!NBH+bNGb`0F%l%180z{zCs-uJ{&SyZ zT0!}6P!i|&&0x}_c}MJQppt`^QRTz;zJJ%ule52i0N=_C-RhwbydI+V^5}g4S8MO| zi2ead{|PjSmQxhDX>MpoBTH&%1b1J*BK|YK?60Xbi_|$vndbPQM7dF=)|MtLa(UniK<7)|pHUFRA;{#Gl_ ztbgE_*D!dw+DiW7Kz76|LkH;atJ~fw6%l8?+uyUey^}Xi7L(u66Day}hV`?^AyJB{=%Et^r70@e{c6Zi5=W}<>i8i6!>kfy@4S^vMqWv1*)2ItKFT^DtuycFWTaQ>-1ponbDS;^~5S0=a z0L=F)NQIUeDBBOE&zkH_uGGyrJfMm=iC_a6EA|KUCtD^x-2@=pS#D1^L?>b4=t5G` z$hUEw!W4QYqNelY656Igc83l>4<)QJgZKUV9P|3eXD(H{shQADMCbou>iZfIX|<$T z(sPl@MX)Qd%95z1IXgul|AE>Fv{C?iyNEQ3E9N&0y$y<7YCr-1J$L`1(2|#J2H%HT zH68#2$#`xWF`(~izQ-y1-q3kdQz(F?1Hm4n>C{rm&VpLiu&ccOld#E25V4%LAfDMf zf3M%8Tml%YcPeHe{4z5##B$_=%Jb(*9C^-v2UKC{QFq`SJ;Z1kkdP!?%({&fCsDli zcoZROdY??|h-NC$d702Xm2fjD?7BdifT4TKa>;@#=f#36+SWk3!s`J0uxYq7Dv@YK6Dj6mS z5Q}3-ElaD^tBlxm5UovXReuVAczX|Y6c%zMcwL63q&trZGMX`gNCNBf)t54k5~m>< zh+A5g__NQ|h~dSIUeLUIP2-!zYsKvOim>C!HFG2_EhZ*$_xFez=aR{iw|mZ(z6WY$ z+Or=#?aFLy&^>ErKP>G1eiGrF9YG)A_pK7k=nV=TGo2@MRi+?BTpHzZ8ks=Kuf0pR z^%ZvWq2{g6V$_z4mtK&-rfToQ$wHEe(vs1=sa7`?sM^`Bi>UYN(zDx5)+d6so^#i7 zzv4l`Lb+xDxdM>*}T>rBTY_#_ilYO(* zTA4Yazdk?Isd!bvzo_DMpgmJk@>E`aLqjI)&tJ6wkk$L{GN0adMv+H=DNta*xX$ZC zyy=p(W>US}3kZV4N+LxhKfSPc&Nlvm1UQE=E;fPHYUgP%;t%Oei^nfs_sXZG&2Oc- z3lg=e-hbRNb(pl?>+9=a!u+otaE3gRWlUBR5>n{5%F4H3;@U?c3|t~+6n__EFhJF3mz5m# zUZ2?I`xL##p9xuI+wjW>Fqf22rw*Jjxyq(4dv^9Z=+$fMIpz~zghmSgVnLofSHi+L zV^x9dA@7jN{zek^Y3t?RDMWty3r$K33H!0isqGv?Qb?LVggD>xYVq%zjUt|YV znfEWWG!;kO|IwgT$?ndqq}o$6^^=|b3)QD5r$-FCX=z23-%9VRqi3{OY*)`C$AlPo zvNO^CcWWr6ANsdVz3*D9Oybgjm-)V-f0)?hi+I0M=Tg$)dlkPLN!qVnCH=>3Jw-%H zS}A%z1cN+oHIjt&BebP2Q&=ODocH0)mUx1Nzz z(xNjw-_PK+Ti=HlJMX&wboU^BYPBuV>zng^f z%VBMI?X(6h=#`OHAmZCjU zo2dnrqq+PfeT9XID=uvIznC2H$o@a(-ZCtz?+X~kL=jLFR8&Gj8kFu3iJ=>5gKkM_ z&>tWn9n#$;0}PUbA~|$-cMUzj5O)u#|M%Vx_v^jqQ5-pE_Flc#UT5#M$hDO8hgrq+ z+%>jP6X6V$#RxD+p|(pudEVzIOe>(Es&od zXQj5A5glP{9`WLhXWOrr8f9frQ?q%#t;hby^X@rj>cb8%H;UgAd7Qa%lausW% zXt4tbupw4L3r-*4Do>VR^N?KoBj>z${=!q_-?o0Nm6=6;Jfx<35{k?(nTvZa;#bM* zt>>m%9IdBjxAo3Rj!%d9ft&iNZHMB!-e+Q@&XU{cd3xrkGJ}BBv!m!4HltPnyKIWv z0uhW@vdW1q;ReoQHbl|QpfW#>z(F$ZXcUiW1SnRXFVD%q?@@c#z~Ef*L%-ZDF=9B2 zRG$O&mN6P}_UjfkLP`IS)#9UJ!T`KfmC4~Yjxcwjv7I^|+&zE$`hD*I0Z-&F{6+{D zVyGNG<2XTQR)5_oOIEcpBO2n9vPl_Cdo=AU#m-3!#C|O*`6XBqnsrmm3!>!@4Lej^ ze_kGjG1)HNA$B`z-aI5c-bpejK)>Gp7;)g*q5%9L*oR65AqGdrn{7MkKn)uxr+EmY zG^?x_66lQeYeSSJtL7)Dh1S4A`?lNcV`!F)n)$MlD*tw+8cR^Nu8sji=Wu|pT+!|U z>BAfw^4dW1-3lI7 zfBy)lS=rVfE7+{gco_`!$&(vway#8S=0C4nJkiYvVNKSkMMNm z5W_4+*FOML&l(vO1#8N|k|6HR9xE$jef+r3d6A6pU@Lf@m``nsh=I`gfS}=i4V~G% z6kTY@(-scHK){rIrleNuiu5-M;wCxUpxEA4e;!fw-hGX{L1UCt_J41ixmKkNw4Gzd z>U|NBF^3sX+6VQrHtU0FySvB%3d3-Cn{Le9BC%QT0jB8!?x3dm6aUs$DZD4#UCR%-Wxn0GDriJ?mlb9nx_2-uh5Ptf zr%4t)>L<3r!HK`RI>YY4d;jB;o5TJ6ElQp2^E`Vh1`}Iws9K>C!hW!&Fej7>m?Wa3 zSu&z;0$LNc9jYG@$n+?~mMC*-n&Y_tw^CCo15Be0+Lcn@jC(pZm)!`@2y{TGo$QG5S%T*r$-wjvQ8hX$>uC&R+gxvt*P60aCx#J$pm%Z(%&FkX+i!-4#Ic$}J-WtP&~6TxmOFCRL`4%3C)-8vGdd z_fX5(ymGslmN_gmfvL0a-rUNlL<(Ghs`K~kJRg!@OxHsv^nqT)+lSEE4>4gBs7aQX z%(XWPFe*J{+8j7;5_5PMMupm(-i~+5qyXVe?K|)MezWwCGTVoQv3%+ikZ}D2SBn?x z3x*8{1W`d=cw_?8`#H%LeQLeGl4rdMV5EEK^R%Q^cYv+4d8tQiB8F{V2&GMYf@Xl3 zMuIr68CV?tE7$2~TN(Pd&rXv9$FyT~s#^IhVzoc}Hd|UmYP_aByX~I5^vfF>7iS>J zTCGd>B+6oxRl1UXR$>t0u~EV5vOh-odGTO&Ug*rV?DQ1HIySWIXJX z0#0esbGyJ&I|cKGTo9O;!(}CBTjm?NOA|h|T7lA1G+bpSTwZ@}p_Nm%f<`vXaY1G{ z^b7Fe24l_v+V2~ipW_~aI%kLQGq;MP{mAL9QUghj9OcN9 z(aDy!4mJH@B{B5^wS&a7@G_HIK3!|Ys=ea5e!$;XQc zS#|Or^!4*fz`uFZ)2Dk~G1%X5OJ^qd%W=u~GFK^1hG1>2QpBL0(~yoM>>|GgdMShvR!@s-ka;kcNZf7!(fC{ojRE#&mshN^%03o|gmB6;E_hD$ zpq~2FfEE!8i~45610tve@#KLIu9Wa!5WD1PFNX@6YZ*$Hv}GkK?KTN1?SA!ct(?C< z&%YazFE6BuC@9aJ_j%lBGitG&;GD}(DbZS35E&Cr8IwsE%y6_2ha<4{-qa_>ATpBjiT#ZHfnAWDt-8aBbilt4x>;CE*PXj6*Qx^@cqTa8dr-*x%T zhASF2M^76;f1pzT(67w;>mqa!Cq>LIg*Qm1_iDBFsv37CJxDO1bVp)xZYY#`0*u%_^C#}LE! zfk0wla405kdt`&-+N@Gib1TE5eu20RFiTbQ2C!@lR+|DSyAB&D=_KTrPev)Kq`#8= z`b%DbdN5gp75=>`NmYl*to@PqXAzSng%~lhAZ?bvBn>(JcH@;4o4HnF<5#KU<3<1D znMGDx+ej`X&o$6<&S#W~?|!JN7DJw)Zy>V{MpZzVb(A#O?6TXvo%mdJW0|Iv!}UjC znR|7Oj&6-ch34rcEe5f$Sh@psf>n9Fz zW=?#CBf_@rMavaNX`2fD`-H&o$!>rUn-lKs9E|d6PobL1|PQ}q~ zXxYc&z%}#XvNC_B*BHCBEf@D}G9#M)^c1ax@uMX4s4vq}Dyj^}<=8|&Y@%4LDiya3 zqz$a{Bx{W<6UV?hfRaYSfr`Qm?H~rUE;5|bWrRJ3G&pS#zmA!JA)P3xroeuWEcb;{ zmXCG8BZ{&Sf}^%&V@b+u#0{-`_h~!TlleVTu0IIu`I0&tvEFXa`{)A4o+o1DN}lOGO(Cn}@px=jl1tM8?t?NNgh;lmB~rzK1vI zot9vv%gie=P*559n5LD@A)mGIEL0O&DGgSKL_ZD>Quz zNrU+L-vyGA>V#gB#F=lKU&D2~T{aZW4{fd$)~FFjYo`u9w#we>k=D1Miw1#k#nGPf z(gS$NVuW(~W2*?TAiCy${GKDhR!)n+Xb495mHueAm$lMSI_gW3JAD;3ZM}h1=$9EJ z9K@Bk5oaezhE>>TzunO?bf74D%#v8ZrKPz=(R`0S1}qdjroCA!?Gzgi7a68^=xVnq z#pq$uCuqddNPqY0b@Cj#kz{J3Te}_m*GBrj4=i&p%`uMlti;Z1>Xc{)Fz62aXGvr7 zt~@*fiJY-xzd$UVpdMyXB_OAwrj`j#`|WcReOE7(jU%iEX`RL()ddz0y53pERgW{x z?nOO?cdKe(T}hmT><75Cd0o+WSS_qp*9}!wPFs+!%^E^8=r_(Pk&}a9&A;ZfvW-*# zWBOTf0dK<)LHkQjZHE+yeR9^|$715{iJI=al-|g*0NipdaS^XT_mesJ3cW$RW1>uw zyaUD4suYo*^9cP1(am6YG3&QWS9w46b0GFMM}$mW5cjjvu+S=ZmkM{T>t%e%=#3BR z*|4#(0Uhq<>~KMm+tQ{^9V|~#MGcJ{uPfMK#qaC4Y=<@VMngkmThFx^imXw$sUmdW zRTP_aml%j*f>KwY_ahaxF{vJUx=)FzyH9Ig9|jZ59X$$*Q!bsSB5D9hus>}ujT3=yxJ zZn8s`J60MxTu&}v` z*tmXY^g7_h9M4<-w&UZuTd9f~;GK0F zL3X>=bPR};yvtJo6l#(v*N;`YqucSlik_lS<54)fQA2_M>!(1V&M$Ir#j6w`HX@x$6$kcQkg&ly?L4Ce?!qScA@eA#-v*_9Ey-Ca1^9IHetPQIxcQYq zc@VHG>^3|w;Non4E>q!ytLWPPNG!=styWn+9Hlj>L~%EY1Xt>3^4teRGeR{rLrmPV zkPx4t+PUl{U`}^h#>~4cmRp|Vukyc>o*l@K41I50rmY)<)))kkYMx-1{eG12Cj;b3 zIPbuVwW+GXtlXIF1zqR}O^~Fbxtyjb22v;89;eEM1eAm>Xp3P%J)7Z9;mo$NCx@WC zrv}JHoH;GhZSVH_du)2)_ENL4$ghm3GQm$Hw{A5%39I%RUJh4&3vIW&gErh*mVr#)eR@M4VNYcz75vF+KJXwdQA>v#sE;^t!!4hMfVee%E@B)`??3 zkgS5jRA~v!6;}mN&1avx|CIk2EM*K#n(nA@Qv@M~^v4w*(Fj7>suMWGgS zp$bbKeDea+S1X!=B2nq#gr}pTm^>>!togaDH<^nJ2qq#9)5su}<@xKQL>gsMewO3u zJxW#c%9e?3;MlB^q8VN51i+W>QjFMvL-O%rAKH?2g zOyO4BAc0$x(Ulh5s#&1~^4Yja^2Lr5itn26ooM6~2K3HSm%^yGWnw7K%HfpttFlqv z_V!9}_pr_;)?{IvY|DZ!zR?HVB=2vRG>)bBVrzFf2)*;(GZBq>RB!L{WRNn;N=xzE zA`5>iaj0gL%BfaUgp5k(n)^Re`X_@}GN5S|G;gu7U(bC-k+NjFLwLMS0z&P1oU3zZ z1LcI1rI0NHgucZH)hNthVWAb@TmPfmwg>X=A@gbC9IHXJl`7TUP*nwT`}LJwi7T5# z`MDZ}vUp|*LEVO^yUzhU-A2fjQzE#c{db$9rrS)93U-2PS0u$>;f#!wLdevZQvP%d zsS`;9;FMTsfnJq~?Q#8f?Hr(rE^vxl4FN{@~2^yb-6+ST|HyZuYiXl<>Mmn9E6c<*k7%nbCq zWwY6Z_P+Qd{1ia=w;2&(V4Z@TkSc|>PE@L9A_|t2sD*9wo` z+Q94HC=|;JDfUGLUb-@beMeHUzkom?NvKmNROkh9zzm>1){08b)A}gL}$oURbyQ0JbwmoBNuy4 z@TK(4W1q3CM0#YklmE%89+Wb4?jD~*v&(kNc7l@+Sb?_omt?(M9iU4Lr^HaN^n3B! zue}5#fph}8N;Z%#U`#S~I%48kt6$aoH)cOidp%rR4MSy{GY@`h?=*EEp4CIU8GfyR zVD7S4scXe(jA~|pYWzq?k9wy?nyeS)T+yq(EQu*IZAv;kN6+>UWU0S|TSStKP@U+8 zL5ckDf)=#BJF8>@OE&gOMZ~-nljW*}w5sK6x)#A?twymx`#Ok^+(;t16vB3Ad}!<@ z(d#=%7g|ov{BFn*>^<`KIUhayL;$vLL85Mz#Y1t|k49frU3w31+ik}##tq%d$f&|d zsFa=|=FpkQ8jpV~d7NG1aY7I|43%MXrWhJH4ZI; zMeXUjYRsX^L?^kI< zd>-*NLzCThu*=5_@hz>&ZI86wIV2vHR=ll=QmqDMi<`r?0Ib{qr3AsL%S_B3s%BoluJXJ#Mv4_On*X8tbiY_l*Rj<)ed-{HiVC69>Cu7TH=%5MZXI zm7NoE&Gixi@|8BbvPXMbgC5I!dW@F_2MhaBnONND5WBULO6;T)$i)$jKDQBtOb!mD znTu@&6@SB48^$Zp2d7J!BW&E#?Vih1{w~Aq>XOTq!T_2b!IY|2^uM_*UwGeK#7 zh{fVz!(*XirJ#oSBA6^^(3gKx5PP!PkBy`}@dJwczb}@;!c9iJBv{7sFbjs#4{5Pvt_n{f zer+->tM5B|e2I(?S`F<*F(kMccQ3)Vw!|qK9(IGeh~_d5hEhujn@NErM3EJKwRP~K(btNcf-x=VUfGGE z(?MF*ew;{{AxnJOq=AgODtyoZ9iUOWl(c&4P>15{iA~*;lei$TFvb%cEe|U%^s7t9 z#K_THvc5`)UCBrCQ?aa|-*>l?_@tH+;{SU3$_C5OLpeDeL-e*vJSdC56-$YSPKX-S zxVq8)J@ti!#_8rpJGz|}zP1J1-ceZBhV2SdsQnO?!E#spkQ~?hCwmn_@fx(A3hCKC zzLslQV9nb|ir++P6YkvJ)s^~Q1!#XETzGhbpXj>(=zbqMSE|HCN=k02ozG*Edi=rH zRY8lQ`Pa6kCh#IN|Fzh6=$SpbHrgn=%?v{evpo@=Szv0wLj@Mw#S(IQhuXC;0fO5L zZoH{ati1jQ0WIi{MaNYBIf8;rm8t*7hBCF=o&hgv5fevAMk`{EJwWEQ?!%CWwf?Df ziOSx5LZh(E_H~<6NR8oA{O&8y-oJYRQYHr#ovw_|yOPKz!r!}gUg&P}Y#MNE>7|TT zLA?Z25Cr}GmDX-9sj#C3Jkw~~3EQJv7U5O|4Ee1KHmca=d6{KOqj89`3ipIU5)`6{ z!|U~^=3eCfPc60L&5+4e$pfGmqo*Y4bBm(`$2k;RA%eM@Bl`y%P3f`bR)zl$vqLRpb}@)?x6R>Ei}Fsg{VPO9Q5%a(Cy&+G#g(94BYHP~z?IBBazBrbTFS3* z%!YJy>jKr69w}3wIwb_p+6Kb`aZ8!lDzlF>YN9;DB4!anztcMq`wOPlKPCU?*c3pE zHYLr$w!}x;LofV8(GzE~S~-#Hh+d}f#V0nzOZJDC=YqUOuq_cK^l4|L9_A%mAO&~VgPvaOOXrhg@BP=M;Na2w-S_KBG zWpO(X*dqps$?Otryh^fMlpIRup1;DEN91?E}x@ZA;0FSSBgr60P z&9jsdIbOjD8u->TOARS2Q1#^}IvC2W8nsCT&Vr11>N+1ver)_0+GKzFos1TU z;D_<7m7s7^ld4RfTGy{D&hZ$>n1yFRr*WM$l23use_z-7&ue}cQGy#-l1}4LXETtz z;eACwOP#geYO211@g2#(2*T2T2}88ra_wdJ!gpgABtLTw7MWFfOrqi4z!LG&TO~u? zoOs>puDLuREB+N*Vh6(n@kNo!N?OE2OjXexk2%eXng(EMB-`jGfy1|qmH}YhVBqbF z<41#TE76%}rdoNsJ6wj#(aI=+gaZ_Z#{f!Pmjzf`VD(1|&*WCk)J?miqV49F9^Y>gfC%X0*wqeww zjw2J=g?nWTdvmd9yY3PiV~ciwfkgP}yzOpd7R!d>FN`~{b;u|@9MrH7n;0zqaZIGM zQ&fVv>u#b~Gcu41bBz}A*dBB*x>E7>XG5h&!`Vu&4^*JAe}BQI=Ew9+ozvr%Tzs_+ z)Dz%s2IDhI%4!|vqdT@RGuD3!xUF?sZz4JUB6et<>Yti5Jp4aiG`l<9ro_SuiooQe z;tYfwc&~SD6h3yVYK)Y#K;-d$4glFVg?2o*YRa)EL5zm!Mj_m z^si|FjVo)H)VWEn7^}0PJyCz%`!X0N`Z9HX)%J?2~7N7p-Hk}CoQG%_T>2)ZvI$h?`w;#nMCvkZ{hdk}|1Kdi@dzJEzyr5em=IXgB zMvg_kPNKuERxWoC9;L#FS+f}{*K1}`Ub3h(*cMAK7Vc!yabh`v7e8WSs}_U2`T(~DgQD~J{Z52 z;6FIu>sQm+1O6)g#xZFyvNF>-@ih(X%XzAu=ObC*@SmV9@3WpSgY(KkTzX<&N+6ZN z!R-+dM3i?=eWrvHZIPaYEV%BlB0PU z%a%%HBgQbY;o`tgMQo`t98T03lk$4n(V)tPMCRPTNQwd5i-lFU7s@$}YpATeV2&bl~DYo7B`G|`<37Klu z`~siK(VUwzJB4#JmLdXQDV)j|MKJsr1IewYTYZA`>eexu*l@bhHoJ>$D#=$^6VCUm zJTVbZRjsusSGr6wA2ZE*2s>5rQC+-{Ut#Q57t=Q>bNJ2+hjQ|l1-EE%3|_b_PkfMw z0zfAC0}8oDo~_&AChIr}p=-wMJAv|H;P3j0@u67Bz0YU(<_eC|dSCPm-gMYD(~jwd z@KPg#(E8%#e>RMDdHwhU5M5$r*1VShN^b2d1LNk*}r+!MtpG&u? z=&CbLcBVrrG$$gfHt58vPJb`cV&?P2|KukO{ zPgvPX#Wxz&3ELko{3VX?1#y0kLz|axkVvmK^{fN&IhGtred zm`=N3zX{v6pEi?pe9)yQ1cg!5tXk?ke<*e7oKocL6`E1aU)&b#aHqaJzsqEEjjr}w zNiGg>lzd!ZdGwgH%WeX=5aW_)1v$Bs=~Ie?@r4gi!S5 z&s2KBbMO7s=t>oHkMmSs7@ZX%h16T)C=7O^+l04^Ul$OLkXxYBRCeZ4jHK_0=Z{>3 zT%P!z=d>~D{G!T3a*1y*i6K(AHm`291y?eLGX2yM((pi%Oy*$Q?4%|`F~Bj>w`zy* z8C;4v{YHeoCdZ$c-wx{>a7h;#3Hv)Ii&Eo7kclAka0mYA_?d7j`3E@%$8*2mc6{jR z-bwFgL%E)OxmG1rH?o*S{$d{EaGVIp0^8zlO$ z>$=Hs-M$Natfd7a3^8?fMBejA>WB(Dq2(d|CM3Xf>ET3}YvEG04kAoVfWDyW#|bp! zDP_kiat!%||7wBdO)Ii2%48%-%e>J0G8dLhjw73gzmthWoR94WXD7mR%HyKR5(7=B zruka`5$Igj(gjAfD8z(5@QOF}>=vq={H{&1LS|5xh*OsSAMOcRXLjH{SJ>n$$>;nVNh2H%c)b_O&}&0OLvu`(GnYCy_!FdfK%+JYot=w~ zQ&$^7&u@|z3fobn0~47)CoF5O4*Og(dPt=|gTctxhc{faDD5mYno2vJ)Do$#sXw!F z!O7SdlzcxZ)x-#i`fB{X>Q;OZ25qb*ER3(j5*{gGZE7i*eVX-NI4qY`;6UKC_6NVo z$AH}l$sDwO%e)Ai1QiXQZBV#4Q`m}LxSE{RSsPt5PT>C&{u1c3W(UVmg2`#Shz=9f z%g%(Iy=ba}Z{f$=sHmW$NEJH==$$qp#YbC6Ilb#68!t69?j;1B!@q=uS*3x!#VpgS zEDUk)e!t+5->6&{%020LJV8-V^;%O<#ot_7Fv{Z{lzi=Xe%no{1F*~S!oFN+O|tTL zR=Ue1WwuuEHUeK*(&TAt&F1|agN8!1Hyixuu=h)(K+9jQeY#Njm;6SR`E|5e@9lIW zJ;v=_nrG20`;~e$dr4J`?@tybKa&lk%-Y&?%FPx@G#lC9Tob2H?5&6vKi^%s_KOtb zFO~@Jo_hH&CNZe5`pQWx$I~xJ0;S+-g4B}s43#OJz%gP>68=J%{#)p=)%uQ}phSz9 z(u9ghldY0*ePPqBP8sH#^wc!B>$UJ#C6=3K4i{;s$$;U*ut*sD+_`wlQL0%Nj9*mF|Of((xUSco{dv|T| zuNn6G&W^EXJ2tdQPRq;C*pkBQ5uj;|0cA8KJFKMmXl82))q3Ko-NodkbQ z{3U{j;kfXn8)=kV_Y4{R z$ULi)DOq^dmJ@IVl78QNySpG7UhfGz1$2s;qnr05xJj@v-wOe+Q-A__vap``6X~S9 z?j~ZrD^l@DH;nH97Wi6qG@6*nk(av1Z(0494kIX}4$FGA)iz7iDIf+buj*~V41q-# zcux4C+oNqN)l+=_4_upoBMsRXG>+08dunlWc!V^R{ueAaRjJE{m$=7kGQz(`35hF8 zt(y}k&s!6~&N&z|Y7LT4Sas7Ep%50$MBq%o`HMKfbAe}0pWyIN3^2oQ{E(fjK7SXlC4Fir+ zxCuHhpMpTB=gK*)KtTPjwZ3OP&O+djc%6(l@sKb5kk4E|(_~fs1P`x0^$xIYIqi15 z=SX5LJxMGKtF2-cx%mYjIceYHs6Ooc{S#6Z&~W6OvEVV^NCRm{t1^4X3Kb%IUOa1S zPVaOr&XHVyslh!?!l};XOIX4Lkh78)>X!iK6>C1g9n6&;t!`=|aH6rO@$$pQ7hICKMWx&D6jI`<(}hY- z(>#j!u2fCw!9x8&MxEod#x9e7Sgor0`Af$s&vQuwCaKE#h)t_Ks+SHQK6)K(Z%zkfj+Sw9x&Ik134D+fM)b)dX=93HFl!D~weA zqu1*mePgk3%ous-kE51ngCpM)^S_Yvt%9G+sK;p*C@ZoVE0j4mbO1YYzkv&^CEU&< zCJ;31RQFEVeX2JD%p!6QmlmnjvNGvdSh}QJoHe}qYvhg$Gm3}2w{PyHgE_=LU?o8g z0R7$L`1koz1Ay;DTZ1Z%Q(7{ua;uNse3~#Dx$@(>tDq`(Ub(teeTp09}DJl3qc3*0#qH_jYyVh=T)kCm3OGG2n zS)Zz;R1?N|{oc`P)RJI8r_<~e-V!sLoZeM+Z+0VFm7PXxJ6A6)Z(UKqR~=ocPQ`Bj zFBlJjwI6&QHvw^A&h4!xTt0JBRDWDHic?**k~1E4mqc%Bq%%~neT;zE+3BTl(|py4 zlrUr^^zBNlpe|EPn7u-gP;v9P+GQ!FIs@ZZLDG(Ro!nDA4kOMsu#UAA6GX8~qMb6y z+hAcZL-C9A1saB%?LRG|qr;rBWv*$ryv(C;T?!Abdhn%c&*Esf>22ge*dpBQqfh$W zcAC8TD2F#nWOiZ_mF=0cESC8)YFF4v<*_Csq{mOpQkKPrVT#pGyH?s(LVwMk)f(_e zXXh|trsL+XgvF&94tw|8!9aR>nI|&K=rF#=5FxJxpSYX-_|r|iW_HxKY8RVw$j=0^H}+H? z6wr5qnm?Fm(WXEGK94}PU_5bYm4!v+>#l76@ME*&7H>2+Odr zN&~JYe4*52LQ7Lm$=PyBl&$a7Jcj?kPN5oq(N^TiP!T*iAyt=9s1d`sf05cZH`DeH{=?dK?;l z+}|awik{rDMp4W9>aU{*A=0b+C%VgDkWS#6-NolY3>KF01F?x+lAHPdYe(h{wM8h$ zemqrTLS&BzTi+9!Ov#J5FlX)rEDkxbvqR6|pkm4C|xx&~@ z7mEh~2(^i>zxgfdKFWHPU|i+!TjMT_(N5nx);CP5PIN_^G{kqh3yMD{E{wYILQ5F- zGOP~q{tqu^{6KjdubobQGHmH9NfZDxJ{5gPo|FmS=?|zQZm4o6+Nt^)>VR;dbzs@G z;qQrWSH$j3&-8PWqXDEG5*qNtkQ57R{oT7nc2+(&nsiH@2WokdjGc0fHmlX+nP~h* zi_a$(=qni8A-(z&x$ellRy($=h#p4WcHv@_<`radgHHmSoTsw*&&TRwhHCfxH@2H_ zW2`5(OQ;+fd-=Hc3+|p^^X2twVBOO_Y}WgLRmC&=A@utOA!m!?n&`2-doxc4111 zbh?LHh84tSp+`+XNHCdzhxwZJxzhmlcJFOkM(g(idChx|V$5=fj7(nz4Xa^Eoh8-q z1sCSlqR!4nT#{bU)ZY{(_k8M!n@au#)KD9ra33wIs70tq_$rddS*UV|=a-u_H#?g&3+-X6atNPJ`73LuZaAi zHD}aQ+13}kN6hMYIpdd$8VCn@c(=wiF;f{i?$frj_H`R&C+~Qju^}42r11@Ku9dJ% zdX1q>RBqokU8_c0su$IcmVIFP6@@b`Uag)-ZIroMy4Lu{7HQ<(-=w;}tGB}ShPdbV z6S_AUm3M*VDw4PZqB3yB_IoE}Ky+|V%1Tchu1WQXNw4;?wfA}8-7vO}%GtQ2OyYL7 zad-6L2Nm$zxs2q)iM#C@Ipz8)D(?Nt92-Z5uezJ)*91?47v*B2KM0WhE1465Zg5|D zH_@|1iG+1ituryidl{d&dKnUAlahlH%obzqjsxm3%D>_kBxCez_j2pCWv<6uu~>Y zxjEt+w~Z^i-iRUw#**o(*ZJ+_hOv>}t}=le@?9|54bvqdmn{LnEQ?^8{)+>8p8M+_ zM^a5!aZjdgmjklnR%|>)??WH)U9ws+k(#Nx1$-9iC*kw4x>6bIyV{+UxZu0!cM@cS zJsREvMUD!UFFLz8#_S$WNS-Qy@C&8hVl?wbll`=H6;E)pB1a+aWwYGOS?UF8n)xpWYY?^66No z)fv@sHp$vuVK&ja;zu4-9YU=$G~D$&z7f+7J6YiApXiv*Rw^n*8Jd2!!9Xn^VTa_ybhD)F1dcH8yy&=E?SnT`#>8xFfBg*#Z zEOh@r*8kXy>0HYJUfn-s?9VYgCka+8nF#sCC-AcipWp)4^WUuB5g|YSfcf$N&sz#5 z*StuBC14g@gVz5*H5P$cAZEAHD{)Jc4R2j$LMSobH!Ga}cfae?yW($*2q}AFW0Cgz zS2Hs*y8lEs7(B(n297t+zpb!#eTrqef$RO!Up%dy<4>+1JfNw-{z%o?*%>3~rUOC3 zMT2>gR6W|F7uFO!kbLbG1%XqIE>MV!O2TsxsOe~EBse%Y{F`RmdH$V0!-|FQld9X> zmw`RON;){2uS>3b=@zLc@HwghowLOUqI+?98Y(F(r^UxVajKb6iT-qdAYUYz%DevU z;fZ@>baXHAh3x>(c1iu7{Q!Q2Slpp|f}d6SI~?q_>Rb#r&5BLmc6LF5>W2@-pMEAoOG*w?|9zqKc~WN95Ml%5s=*(siYh zc!5@#^x#~MFFwf)tbdOOeJ?F#*NDm>;d97--xSo^(IICK;r^tVPx&1NQv+-RU{d_= z7>6VQe6q-zJAZrIhWAP^E7RSpjqhH7GpF=V@DWH<`ESZ|^f5oIzW}~s1VU3Y_cdeG zijDPurIL4kxS<`uF|Iop6L)cy-gSrT5fc-S)VjIkO8+G2Srd5!UK7gns?ZXeZlwGJ zbdw3tJ|;5HWI#L3CeXY4nax?wy({yjfm28=A2RK(+Z zVZ_0c!facIuG{F7|GXr;4Dl|TnVA8e#M(;!9R!QLM3_8zWMo7o+Ux#+xyT!!I~1WI zlSr$=r~e@7x(qP8>-XQx*2i3Fkz1xP^5=K`y72y|AmRrn?qU?7m@)dm7;bI?E?54C zF9CpRZ*MP$`3h6EM)Y+K+_Vax^UM_n2;p0-xSumbMk|0#u^5dq(D=Vu1t9HE3%-7X z{g=61MBDap2>6_AMV^<9$8sgxnBs}@Sn?=Pt*R+kMd8SKB+|i z{T%?F*y7oZcmWrOTxsrP&|}V|Bc4aI!X|^%W$ne+hc77lssHh9igwWOQ{BMY;bM}| z9JS29-+NO2zj~{(QS$HE*^<3k^6I*}iL7jFkufnjpkuad0_G$#B7F3zwgP*Oq65=fzR31)yGpTkN=@sAXha;X+4myQF*kIM-2fcROn<` zUKO0{08@f~R=PdXEH&v_iOy2t&tyjyYoChxWP_wFcn)+0;{{#8RKcMQn#7M3PLniA zuBm*;YUMEY_l;oFXs^6b0h1O?G7R*W!}J1&D1jEnvG%&d!^4$RpKhsuDL}_D;0p@0 zE0lmi@fAPX+9OkY(BU!3Gf%xpw{olTG3wS0s~^+X zzY?>FYr)LBPf(LZ423^lg8uTA^HEwuL9=Q;Fz}AG1MolgR2Fxss0cxQC!&6!>*{ z3@Xt;_7)CK*wd*A(9%BZ`1p7s{-3&I1$lCUey<7&)^)ba%gbSpu2c$)TR4t=XcBxo zL<*3hr=`Wn`ySxl@^V&^VEQ6xIwf#eQCWEu7cr4u@xI~xi@RIgAdsk#iYd!`uYlhs)=?W#QSrVUI7#E6T0koh1)g1 zRRi5Ow?~|I7M5Y7mZL`Tz!|))_b~zVbDr^|i7gqc@9Eo4m^7Sl*}q)9)4`|u`gJyF z%a)g%EF7)lpb?Hp|2J^5g|WKG)P!N%w-1to^9u`7rnr|-SM@}F@%vdk4y6-Zw_92= zSfd#=dgwpZ7466_GC|76Yyy~T58i=RZMK;C*gZ~N+mnR!Kr{97)1yH+C$S)mx7c}j zbs~CaSi;7vb$%cU@l~a{U?gUx)sypQb89F;YgFijh`2a*%%P}0sY`BPX}(JIUo*ok zvrc~l*SEf|$f;e?gkzYO=y*16^df&p1~_uX!ZIEyi0-~_-a zVQhXOyQQwr6GGNgb#sdqwu_uj?d3V;)C=P^E+|9dtcu~dD2Dw$S%3J}m-vGAdmWb# zoaL6q8x{p}mz22}iVa0_5r3`jlTQvrR~G;UvxH7^s7y|e4`T)0K<3o)K4|EDw7X1C zPcKEf+MlPsU0V|z9UZ+&BXkIXzW+`a1ERFfQLqgOc?^1egdrRSfoVervT$sya(SE{ zi^jYQuWhf+l1s?NSYpTZ36c;2=Uwi#kyzPkk*#Z;^!qajHV)D0m5-*f=cBBY-|ZtK zJ}%N#-~oUA*9~*usJGWC=ea)e{q1MaCd zqP}5SF&{OzBZyvO!z1PAc0%qh_kxq2WYS@Y?nld4f|7un$x3q91jbLXNVg`;o`{gp zczXodZ!UU5F8k(0_wP%EJ(G+77B8C1JwK}NvFS)H=*p2{k2Ms%IJX6r zOJXc}&v1}*4Xe}?&~9?69J9>8zYRiMEZ?-!Qk9dVJ!!B?dAiPeBO?8e2XzMiB&^AK zZzTf=o!Rlh)+0WL{Oz3`RE1GX7`5~9RulNN)RhwZ{Ou>uRO}n>cI~69QeQ8$el#zV*c5}@MbTWIm}tI!|3sA z60#6`9$&_-jb8Aqygf)DoL`DUX^%;jh6Hh}t5511GcceA*xvn(b3sChhxQ7T7h7 z6RN2nTTI1t&%)RIdkl*%P0Gxiz*}0^88QsOg75FtOE+FI=s#$WE-`5kY-$tssa`%I z`Yx|nFA&@k?Ou&rTT3^~_qH^@!|7`hIx!Z?3qIlC^68Pdb4sV|#0mxe=9)FveOhJg zj!C{;T&{jW!lm?hf?{6GTmGgA#XLHN$xru#d}JqH#m6$ z-)Y(EzF&lm9rb!Is;xGwWvPn^F#6)EQw%1*QvlMtqC8rIQa*IOtxDDh9<&z6D#IBW z+o&hmrDlbpj{{o3>Zje~Osf0H1ENdWZ-C5NX<5!;P7xa>%8;-#>Gg@URfQWHhA%>` z+{GsnKT|sTyPRPkgdPZ zmvqv3cO~Nj+ecMO>!&C}^U`Bn0q|3;46JBt^$qsN|7>o|>{sl%+Nad(IgVqOF?mfn z`$e};adLE8T`I#Ve+h0gu#w5GFeUDnTDoPJZ{nKKG>h9y=oQB@*{JyYg_v4geJmTY zs?sgVr%S5x;YuH&l|p=GKZMMv`H=_&0sUcfGbBpGwDX4ELV2GJvROPv+!6>f{(`?{DxKnE>eYZ86Q{unQjq7(jCvqdygr{UgK+rd+`yN;83;Pj#Et!BG#-UnX z=UP~b>O+}PtN%)og$Lxe!aR!JAhI}l1=SOAz?X3OkppYM$7H;2yhZGc#W`u z>&VJatz#{g?Ps~v7#O;iTytM`S+_F5Z@pV?oX5f;DB#+MpD%ak7G>*IeFXyWTT1A| z;P+{A%YL#{(5F1`Vrbxmi{CO+u;Ah$HOIDDNF#@Lx1~=E^_PfHnC(9G!sh)X7_vwu zTd2cmnb<3?_8db?(sc;4qiw8h8v07GQ`sKj-L`pK*hDCiRY+KVQdIKO*@=n0pybgp zeev$MfC;3O5;%~(BV{l_wi!0HwWfC=ut`xFW+(XN$v5E+BQh$_7GJ-RBL7L5+ui{N zi*j7*M*qMQ+9NzH7fbP&sH06QnbW)|=j0}YxXBgL_qjIAI6`tScmvRq?TnSV78hG8 z2v$mPY`0!1Ir0>TAY?vw_FvQN#gi(fZ~-olN^qae^!E)}o#uR0K|W`;=>eX}2|M*` zz2X40EUTx;PQcoxSMfzpY%&WDJhc?H8XM1O>OSj6&!i_gO~PUj6-L0?QC=xy~s zO`9j{3^N`0UP-^Hvb#iK%F;cAR^0sVnZW1?b{HiBCBr4Wt1&bU{Zi?_JH3qXaFQ8a z_JHC#hi}u68#TWePI;VE=$3u)6rMocuf7}Ar?|y639EvSG`A9(JOU>^e0vjoj^Af^ z9I1JFb~^hUKupSjdhfn)ocG(b>v0fKBA>A z=l0us9|gDt?dMoZyqs|OT6DAhFB4H0ZuNV0nN#{*<7Ej})0LL;u2#WsU5yMr-M^Ig z{AnwmKW$}cttg!{(~p0BqdEK2Q}WD*TAzMuXlbdlh#IGCOtPg6Nq$J5JfU-^RuJNT zE+T|f?|6O?-?KqEq}k0{%MBK)puS3eLhD@GSx#!DT&EJhEMw-L@c5%q{nI#6`q?#V z=(APnVpXpeKB1ZXzS}j-yw1OT_K&7(GvLZd1~o?$>XyJ6-FNqro2QlypR5_>WOf8S zyxN$%jI1n_fe0n$R|~O;o+8;AtUncJXSN{r&SZtiA0pqp?^a1*$&ao#)H1n+CB#=R zrT4U{?n|Gkh~UXjY)`@(S-$fJ|JAHV{b@yf7~457HJB~>Nb6%InQH5F+mXh3hdGzZ zj4~_>wFhFHrw4YfKUze7%B%Ie)U|f~p17rZIRhsaQ|;=| z>AR{_`da43T~m0S-2BfYw(*lBjKTu&6eaV#T#OA!9Br)iwaMW{Phiy_k&FUvE!6&s zIuDnXRiDCyVve-Y)!vd74 zOhElR+^B2+eWP$aZ1z)FO6g0}iBppRGkieMqIu*d0|?(kf*6->uNx^S@{MpRwD=c_ z)HkiQi)l$DK*{O9giJJanKUBIn=CyNma0YMLU)Sz{s{wO-rNX! zrZ^YJOr9wyYYoqi;58}^NwMw=>$0-8;%YDrN{r*nRnig93E)Gg3RJQyzqriF`LJ-* zKUw!OJep>OPD;N5+zYPTSL~8LlMj!TjyLnM%)`kCP72|xQL)gmB5`hy6B4a6``PAR z3bRVwJKl32j@uMZCdsE)*2i?>Z%_SP>)s$<*8|YZmpw|AuX(+1owZ{#g?eY7h|5tA z3JKWQmePD}!Iy$Y{oHroEZL&T(d=R#7pDH|10`1QA8uOAUUn@u=^1H8V#0Akq7rIP z?xal`SrvGFNK0lF;$903+ME;VR2uW$u9|*=0p*A~-1?i%dM@?$kuY~|jrK60PBXe_ zX|l=u)D|1SgZ#i40n@$arid8w(+CgXjf+bz=uGC_kD@PY^FXU*V`5v-EK z6VnGPV`Ke;y&nK2m?-JX!yL5c8Q>qvf|k$7mu0F+$8Bs_=_fBLC3BpeZSE7&6#S?k z=cw_oAT_a5a5&5&12<9HzdNEBG8Sw^U?Py%zFOjDv`-%+`|*9?2@U~Al>GdIWH2YjI9>=~DsZU5tU zH86v652tX4&6Cqc0ASctHr*|VD!l^u8qFIsBj~`N0E8!RtW;a7jCj^34WnL&jk5mt zh5v(G5fOmW&E8;G2F)otpMpG$+jrmLVg6C4{25?Yz`K+x_VOp7%`?zppR8GKa3#IP z`oJmKz?g3wnWACDTiax)?cZ{-mIFDMXmK5bX!6#%IbW6=ws8q7--Y z`8L9AeCl`8g$%yQIWDl}PnODOA$aaIOy?JVVst*#DKpuGsLys3#Q;f%8abKv*1h9f z<5IBx;>T0;+I=%+CFEGJP=?Lnqkip))_@wzB?9b}v>&d)5tJ zblh@Ib_#eE0b_D@f|~YxaYDo0IE>81|wsox6`#D*)LMNAY2inrq29xU2s z(|oN8%uoD)fgHgay|c{sn%x}$`^cy3G~+EpDz78B^K68jnzYm0Ug!~_6EcCtk2;;6 z<%V$66?7}k#X`dyoYm#1Vcyr}#itP}Ib{l+4V5TJwQykxBX{G`IHgRE|y?Y(2G(H;QI=4*TkHIhAa?5DoP zt>~-n@-|41s9R&7sAp*td*;W9N-M=uJ?7d1?c(zIMnK;Rmtr4TinBL3?vAeX+DUAL z);3x6aPV7IvT#=TXMKh>F*(&TMmYM4S|N)?1O*?m$gq1R(e6G(-^z`BCXUeG9ynn= z**Q3(<8wlO>T|C0|EZjB;+zYxOA975vE79fW{!c5N1Giu;C+N%ulEh@kDja#uxk5V zKBqmrws}ob)Bbl}9nrGnTEuFm{9~e@OFhJL=~bRizIH}un`C}}7+D`U#4BqYH+U3Z z!qJ-BY5~P(h)dPIapMFK6f7U~edS?I=G8bnU0d0jD{CVX?fvs%mg+36U~mcdfiZg@ z;d@IuXsKimJ@@-x%XHF))}A$_CE9&O94X35#a|Mu_B3PxrBnqDL0+|RA2Q|SJ3$f% zWGY=7`%V`LIE9q<=(v+OrcS#+=}G9ncNBh6I?7I&q1pM(4e5w~wuE&AibI7Hs;y7laan{L}w4Ef^RiHx^9| zb*w$URh!pme%s7DbkD!s4c)&@!zMwYF`J-PeASM-Lg$_N`v*gn+WoY8qZJvXq0dbs z?QKXv5h_z$@af=7hn>e#J!T7)c0phR((#`R+uMfn6L+k712hGu4;nCI5+zlB(Ake%y1f8C3Q8ns4PAp{4L$G7cb{Nn6R+g=14l2W0FN5e(?o#s7>B)NpnhHb__rU@CrCl zfR-p=(kcRG$SK4Q{y@-vPC$zSSFf|=_; zUN0*~VR$lJv#)5X2(_A23_XEVax0$DG+tbB_BU-go}LjZd0vXgaZNTVX#%iqrISc6 z3=*6nv8$ZMhIxldj!K>!J5s9KGW6QMZe|%!B(&-*lgDm%@~ zLW~N$%|7|&!V^#45|E?UWXS#2=;}vD@mu#A28YFMQJW@KAsvd1r`7;fkv4j9?hm3l zj4O2=d*zQZuw&k|aPOpyt}O#={*k=@Z=V&$s}t1!zoF_A^PTv6Cz!&XJDQ81LYE&1 z_d9?ip*loh}UnKs+l zcjca7?zYw+{cL$_f^odY3S!JtWgtqeXV&MjB7Wt4+8y=s5`6_D9Y`4%98LzD+Q!ZL z$E&=+7xMZv$LF23O2bMKXk%(G1TV9c(ca4a;+-Q(azwLXj21U&pWCPkmkO>{fn{_HE7R&6SkLpLbCz0`}{)$w@`~+2V;A@rb75kC{T)?RCdIhUpAXeTy&Lv%4E`?$EW?!UfA5C` zdiD7!J91Pz@v_9W@HVI!dH(r0k3J7@1w47pX79)%75rPH9A_<$H`&psz5N{j22Gv9 z5FHb(*=e=DG0*xeLu%0Kp^q)(^r7jWbozhgjKT+U{;<#hxJ&ci6Ik7r(Q3B~;ON-z z9URa&JQBC{K6~AT$YifAC=kFPyqR=t;x({~O9nl)>Z0I{2T*O3OVA?ZI{`)SdDa2h z`8>9Gj(Cj+OX$y%7Q9eS6K->1^tsfn+#PIv#*GMbJXnOa4by6;Dw zB{6Vys3EIE&Sl*6%mw*?x)ck)`d)TI&%i^!%4LFre{n0wop~A*m;NNo|}%7TbsS-tuTr$8_=8wO0k4uCB_^B{^9W z@5ezUFzv-9EnDwp5>C=M7GMUwo)>+NoYOc9#KbM-_P&fwoM_Op?om~2+~m{4zS(=_ zp8FZu!rcgz-G;o^A||O3CCe)9{W4XU`snBfUv|_2)Q2Detk4t3Yf)u4S+I=9DL*Y|0gUq= zYEnfL5#-nG@mQj>ESuFM%Zh>z-8BIxXBuF41k!_lV02}z4+>07X44wyIKCjLR8qXH zo8g6WwgODe_^qhVgM3D0SyBc^vL!1~RE%J_54C^mGEpydED3OwGgfKL#l3OBX!-u< zhf(&e)(MqnY%vjOQ}aex)5F=mkY z?Z5!Hv5rw;Og||5=g+<=F^9D>+MGYAHwg)<7)QKdG2`Aoo?aOn?Ys9-Q_i}9jId9} zr_xeyAN(!Sr5^5MSb)e57#vS3oBEAoCKRHKN{=0sR+nA3{p(_p7 z9Nq`Dt(+BpjlME<%AYt9DWmz3net_y!9;|!*m29_9o~ZpX+h|T`eB(5J{d*7ko%Xn zwKJu?fRL}OH1Wc}&xI^URP7re(K@|2=w338#gaLzxB$G?@lnpYEfp3kQb@_J)xnqc z$J`(hOdW3yn#H7R9?d~uD#>8m8%vdmHpd54)lz&w+A4iQ$=OMcg9rZlt`!jtvn|9i~OygC4k-*0F(vS!4(!~yD1%Xq*EitB<^Rpe9> z+E-vI2ck(Y4h>ZvPKH}f)c67tSRH=tb{fYQvrcHG;dGH=lN#mMz?UF!DCL6!qX4t< zX1T^ix}7CeND~0*lQo&qG*+wbFaHq7_IjP+rBSE#tu>Z&k8dV<{pL7jS5d#Thy~H- zF%klJuTSyuSrj~97Xcs5aW7H<5b_GKuT87XQ6`F@X!YK~Pmlk0?p0&o2-*%@Z}`>V z;c5(3*;5!}K@b%Sd4JvFH6Szi`Y1exE+3GPziH<5w)khA7|)48OT_%HZd}-5 zYygM%q<*ijhJ_K$dm*2JBTTZ)@~wO;lUdMNX;rC1(I4GorJFp{@bDT- z+V2>if+e&YIB5+5YU_%vr)fSu``NCQGiNttO?Gja&&X?8pAD!pxDi7P_ z^c5AWn**|cguj8KBp8F?@al#6k*zEU!iZkK{`Kc)aNs|}#Nd2%XQH1vgt4SSe=Jix zLdLmu3)~#I0-)?*r;#S*G7)S`6FXrd5+t9_cf71TH&{+DDfwPxl>|oepaFPnxi|0}!9{ER``k!D^9;HS1$jkz{T$7i%% z2!p(~>em3K7I)S-6w}KX?|pwlZ5&IcCLA-K8yZDFYvs9>*ZKXuy1u-1p{lDiB+gLh zic=%;3JhnlpZuJz1WBqg&rb{+*g}|$eR)|4lx_-4=-8Tg`ORwJee9 zma}4}fQwu((}H6YObD^gE7^&WAcNYpY#nXpdp|k4E-3*JXjLp7_u;y#0#sSlrxHj? zusq0jPdbWj*`cbs}>t8>91kPa^iL}3$>A}rIRk~58|qz4V=gALYbFC%gBKb z%VWPeDlEgJ1Z?`|>NT`gV}2oh_q>M17 zkWVPw`@NjJNkB$3>GyB~_V4NePy5XQ#DAsIJ^gNDRI=zRuc8M17NO?@UhO#1=U3Y^ zoJl($(&9KvlqmaLmbJCGTO6}CJdw#lZ>k69nl;Y9|EtG8SU`ttcja>Psr4zl@`pv2 zE*+i}BW*rW`jlTIN;NA_ z{@KpXtU41TB2_@~ybD5Bw=|wa4@F3rZ6nP?G5PUZwU?6PMM7XJ4yu$f9}u zY@^bA-PIhP9agSCOJIU`l8d@!xzcu|>JJd3RE!nMx@n5spC z+Wu-Duu+5Mpd@6&v3rRuSv}jd1S+PiRH@8KrMW!bs@;7zAQ=r!u!QL{iq~lBuxn}d z7DaL#U!CI{zbU!^147D*US6h)D2m0*Fg~@^)h;&q_YMcfZIZ{P+$#;`q@e$QuN_yh z)QFopgoX2=P0V7BFO%ILh4{_uFkY9UWdhEwMd>VvKEp0%b-lvX_ ziX!w0A=0qF^6jE*h9D=`dZ7#EvNl8U>w6FQu-_aE?PvMLWNz4kEU^0T8mq$%5Dp8f zpa1s)Kak2U5s-(uqZp6fzNL7q6x;Y!r5n5kSpVbDf#t-czM_%@FbPgR!fBT4d_*lt z_nYE+tmNl5EgjBd&=L?C!P2S*sv0v>_eSTAXx<~TAqdo7rKtT2)9*KMoSI$&O6Sbk z#?h~CgeEZz|AlT7B4*HM*|K_KS^5oKAqr zhbnEGzjDgVlP+-50#8m(wIMhB-+86IlpmRPIo7N#=h5xN{E?9gQkTM>8h0(N7eyYJ zFRv}J-u*ymD#n+}Cf5^_x+hlAh-Bk8!)p2TR6xSxd$!DdV*a;}u?n*zT5Z%yGijSv ztx37HKe*aAEt(Uy%FI5M<}l_8uR@W}XjQp9;IWy|5BW-LYZMeVXkX^V~d5zV`}QO4Q6zk`XF8H(`i zu?@#r7aD^5`*)L)7O=u8jX$2M1u!i^5zNk0cwc>=p?7h&P7glSP+&W6ZVK3E%ug-} zKoiX=+&g=RLyB$VE2um@9~is1G=zHn0RPUk^}=Q*ec$~#$*H;v_xOT#d57_?WQ!2& z`}p~x`*Tu~f1bE^ADO;Je|4Z4_o!GKVtxacnovracbt2&bv(UCPO-Q-MqXQt4=cvU z=C7>WC6*!gS^q@T3@Mqa)$a>t8 zI)b^eGOB{`wT2sVQ7q0I&KNTVY#^$ap8{`rOOD|OTQVk&vt8|C(}~m8DLh{yBJ4+{ zjE(IWv-gmPZlQ6+=!#WitXN_y-N^h|LNG%&C}M8&;X6|FPSE|3Th^L^Vqk2A9oeWTR{ z_r`ay#Z3P`=cWY`fwYnx{QMD-d$F{)JUqOAa){k_R3X|4A-d6WrX`3*v45ATCwaoS zIFORyYzU+mR$ak*q3)tSg4DUi;Y#Ksx+VHC1aDn##TSb?uuLnLohSe zWBy$w6H^Y@bRt)t$X>XAhz*rDIXQS|)9H!m=uXd_&Y^*KGc)cz7cEOwH(Xl=D>;3rm z*^&c$z3fAOtA~*cf5BrXiY7Ad)vLC>QwABC*}axKuzLFQ*&dk@aci;2Qiawwhw{Qb zL@ix`bmWg$YL_7Ejc|DMBRuX_D)7ThX14ZDly;wRf3j|qa*-z^NW|T|A@^UQ zg!T4l-l)+E2@*2HqGiTm2-GnMs@jTHlN$bdfr`EV%)3+6d0fj3VjLx6ig6sz>Yb#; z&yQ8$a)T7f`|UNgqlSY%n83v{K4h0gl4>Zu`E`aFIfX0WMi7`h8Q{6nAeCK0g15Bk zi)H<+3Le9D{A*l5Db%cbGjoUIuOhsSXsg+#;=;ttj9p!F)cc0UCd3{|c_&lC9(Uj$ zG1`KyYR$TE>uy^mMwnWL+(?wn;>}+|-27MVC81U!Ge7gLT1tpFkKD@ZO1|AtcF>pW zU=~Gst;2W26i}W9f||buW-IY+I&e{%H~y*vExG26{+AuIyY2omLT@LjzWEPEFu05C z7S~#kd3N{1d)y4ey7&c!Ze>CF9QhxQ&;;DrVx}~{O&u_f`Dj474Ld2YO~9m~Mt0}< zf!VJ3dJN;=_xXaCm>XPcLEv~=**que`p6 zD&3os9q~w#j28&lbLx=(N3O%=XXqUN;K6&WtG2O%ZC!nQfu z7}HRPv<7>P8wEp}69>aZD-N7=77Ol;@B>2$fmKl0jHvr^Vf{Vvzf@&E(vmmva_gJW zEO@3SQCLS4o4|H%@X>?@{N_+6p#0^$*9FX$H;w3`wvNR!rc|SdJ)XN>UHFT!Xo3=` z7uM6zMWj$j=jSyHh#$5Dp1978?AH?xjC+N%3iii$EmfN0+HMt^`Zb}m3`}HgH*Gzo zy*rH8&LUOqXEopsa|*Ug=II4zd{#}xZ@~bx8#mK^dPn++bD>DhdqSX#vL=Qf=$XuG) zgo=l4ZtWzjVNZ~6%JbyN9+aH;eR82!X^Ifx|Fh-ZbLSuDL~{B|gM{Nb4rDBhWbnK4 ziaD=>85zZ*j1Ar=g#>4>oxJsA1No+X{JE)bccQUOkN*Ve7)9%t;D&^g;C)EHdKmur zUB#OtCf9}7uO&K5_C}YSUNQ~KMri)4_!elB%__y6L0$&rS!hw?xq+_F`(>kZQ#;Iw zH-;>srpTA5NY%WZZ0=*dn{wSoDwbPG=S{WLeeF?)tS5~pzfhf!R_9wMJ}f$|O0S%i z!V^W-*QoTirB93l>va&%Kf3oCBa!!P<`Qwknh#i_HISf{t+eThYwJmh6n@hz?BF%r zE&xFp2y^3X8P30csBVx4xsvf?<_N!oy$ozg_a9JZ~B`z=i_`AaD1_GNhOhz z-#gfU9v;Hlzl})tze{kL$_ECMMSGB`-jg1q<-+{Q)8P)h;N6r%hb%4YRSHv`QsxII zdKiQ9^b3r4`P~S>PjC96%x{F(k@3kcc@h?H6=A}CUKXvZoXONrkC(tVyYPa0n|2GT z$%2StXCp>GDcQvU>#h9iN8P(@kJ^Eq_Z#JUUOPQFNs!jm`8Jo-INI-pMiI@;7lBq5 zz=B$=P$n3P#&+)OJsLQ;O+*`+WD-S!M{>F`dhTnS!-JW%z0ygkg5Zv9&4W1ljXsV= zzN+aFZ2f!u;~#Kgv6e+OJe(0lbm}C8Pu6k=X{fUDyK@wcmh6$??s_lJRtO+4HF`-eUGX03j69g(6V zl9xe#e6rLTm(MnN9ty=j_3q4rOVJ=BzW|+s4z3=gfH+vmLErPmdsEAq(K6QgE9(nc z8)?NkqF6II*Iu#hU)V7tCcnIL;H9Ud=YMu(AQr0>TV9+aTLVr`yqLtPRx(02r(OI_1g?4s&bMsqHb)GXB<36io4Y$md0LE*4At)wFuR%@E!#|nV*t{e&k)&&MHQGct9t`_YM5r3E7OC*&7lLiO$L|TVxJTc36nOr#YfahCkB(y+ zsyD=(51hv9Oi%XV{4(mhulzRV4~BX(-}_MIxzk0nh~ZWEcU zrAUacPi+1JnZCJ()xb6O@xe)?jq1?qm^x?u+3Y2K%GB3t54Nnh(Vf7P=@iuBx}}N# zxs5c^Ic&)P?Zg&4UTo^`279hYAKmeML?z`$gj%QVZW;q4Iw-F?a9VZc(Y86=m=qOM zX?C9Vnkwt`k3Wng=X}fm#E>vYa8G93d+9#z;Nows#ozYbPKT8bAM!K;E(k zzx}5dfP@6N;J9iBnr?|Ivr|`4xwxAxMq+}#${QB+I=umxpo9+7&TQBYMiHt5IqB&uZWR^wo((ze=D6HcHKH@-39(7lEeMx#W9tjT z-aT1=I@Tqc&|t^uxU?|)j#sN?k$^Y99xCt+W*2%hXZGw(fakW97yox=wgt#eG)aW% zibwY@uE9V}ZeRHme!(5v?;VitsAW@d4$=?Z0;T#N1)N}(!KxswmmigejG*j013}45 z9vfT+CK+i^3Yc~%Um1+px>dHYd~t#UOX%Pi-Q&Ne^aXGwfpN-HH5W==Emhp+%v5j%>aaRkOd=WGjI?$-}d(~e!^jl@Ci?*x<++QQ=Uh9zPr5a<|} z85a~5revF5ef;uZS}?2uAG)cXDUMv|G*1l)8vpUg^=uQRDxNeyQ4y>v9s6exx6N(j zQvO$eKR3(yJ_xFk&2Kzek2y#aZhdoquO1%nce;&IIs?D7H1-~$XU#|(=X5w7jLP)8 z+NS+DFGWF}wmtIydymk#1u^f$H{j{ajDN8#6ghfmJ?cR5S zI;CV&OUn=GLZQo0B#GDxnP9h}s_Q!8R(kBOJ+T;kZDU4Y-`wpm|2#l;7IF4C6nD)v z+^(&US>D&HZawM}W}lTQERSz!Y?VFFC^TE4w2*TRi)wt614W6bHXeM0oxdKa>2DWe zs$Y*`_-nZVaO-H}C*&FU4l49Bqa^}xDwX-gNP(w}{DdIzkCG3b|MIdkl(2oehA3X) zs{=+n0mZ!iOxS4AU1Py;1HJILxW}f0)M7uaqnuNhTj|+PH%h3|((d5L`68t*a-S!p zOVfD`P=Nl{*ec6gAmSIZ8(*Aypl!_-cT3}y_g0~IpKf}N`WtX6p?0?>#I}ba)lsh- zx+qngW;2{ltfZQ~59oX7h%AKN9@77Ikz?>pO`xTgTdVw^6GlSPrtF0wUZa~V%R$>c z{yTUN1yp$+6_9+GEAteQe#9%KXYo$ znqZFofoewnblw9|aO?eePG6j?-5UEY8C#ZiSX9?+B@T47I9dH`J#_#)J9To>duWJv z94AapfH0TU#8DJ$R#g$+A&!s2H^8s@U0`+2+gsZ7otb*wZ5m-5)`-mc0|7C^;#PuD zGbRS}o$tVk7SbaJAmTG|w_BqOOXIKBRsRP~zmoe%%Oocle!A0$YIJX>61(%zox!`C zdg5S}r18XfW_#L2)Zx2vHQwi!i+wEZRs+3fvq(I(Y~{m~)*kG^+jP6J)bB1Y4us)p z?^5KeXBfn?I{*B<@jSNMxSkZc{ZJhVx8uBRXIEh}^!atqZC@N5kd<0|AhH0`Lq|I* z-u)t}AT$R->D86!#q3$aE<99IGFpvCNL-8z9Ag-!4AaBu^F{qNU47Odisc+I_ zE%SI4Wha2eh{b+e?RmBK@tDUAXe<_k#i8g#z5h@DSfbd};4}m0yw7wGv=vPLgX=^7 z24x?qgZ6Q#vqLLfS%*Ew<3ZnmdWkmXyyN7P5#S?6hy&y=Vgp$AdYMhBsJ5qq)*f@o zZk(=#OQ9G(ZNhrCMhW1RyM);38T@J-9lMf)k1$HA$7g&;)U)J)8R))i)B2h zv2>PdicnOCE3=-RBP>?ep0d1kLOFkzL16}wFuT~n=?8Z;6>b zKJ^FQ;H@8A?EC90W=V5WpK%!gvL~?VDev(oKU#l*(PWv4nz1=SN!B~^VOX<-Hf`RQ zp8r>9atZ4DJ*-nGQ028mz^9F-wT$qChXHO zZl7%+&#l0$GLjffP-7B-KCo7})$;gVZ>k`UNscrNB_GSxVynW~Uz8|1q za*={iU41XQ@H=to!RX?c19scsW^mHDPpt2r4}9Ub){aYbL6O%YA&7#!Q`slL+Ca)Zwe zMdFKd@$|H;0scl}RWHJS!x@s()QCr?gM+KqGd^x++Yg&8<99p5Z>Wmn5Qo82A0O^C zGLnQASP#C%irS(r-I)!)K=n|?;qA3v$jlVmD!1N;E<9O;ODlCJa^@vNdIsS=K&iO6 z6j~0MP<~YP;`3srdho(AO4MR<- zP-lmXV%CIm{9(A>fv{>a5czc8_r0^ZbO35lcsL5f*(%*$t&z>9m_?@qUqfzV4ty^v z{$2!QcJAlQGxnzjtx3)**i~Qe4*#<1yzhO-K^OmJs{$w}F^P((hT3v{D81JJ`W3@RA?%F_ayQ zhK6yS8IJIqT^smkqj3%2w6Y;LVDMWXPkLjDopv4V}q?$VZ!~gs*2=X**PZ z9c3j=$AKjBh2g91sc&Qy6!Z~FeJ{IrB{?8H8E1sXPnm;UmhcGaA{I^(^sB$*PZ(6= zp;2ylDq^7kt8_XxM*nU^f~=>UHJhUaGidARgRWAi`(L@4dtG7Xe!W}1xhP>pC5}en zYHL3r8aE1UHph{3v+ToVD&Q^BWCjI#pQ&@9I1 z%qt}<%$GhXDJd~2DPJOlmMxu)^?c1IwSsCGaTU^NOAZQPmKWmz3!7un%Nb7b7XTs9 ziLhi}h|a~(u@tCu)NgPx)y_po_y``))Tgqt0##qIpNj%eYzgi!XL}_n*>i1ScnC@i zPLeB0fLVE;m0}zr1bqOioOghk`%rgc3@RUZI0Bdc>XDR7+Y^spEk28a`4K@!x$Yrb z-k~;9cA0L4)dea(ieqjUJ)7pj{F|zYdBTNdl{2VISn?Um;@opaH|j_5tzf>!sNXa7 z!ZT3FF%UV-IGSYF0)?I0Ll@fU$-4*LVMUF)ph7dF&{0D2syQ(_FEQyP%#&BmVS4W< zqZd~!u|K#BiCA777N$~DW1#x`@l>*vw*t_V;Epe0M_cbehhFV%<~yBUS3aq;u{{Bc z;V4|fwxUv&Z?@z^vFLj}r9sL85ASF6rogESWfDZNctUV7^kA z)GkBZ{mZakh(Ac6sEoh@p4xWP0588Fo6Fm?<{n~6>UCg^cY}dG_mr}g7GpkMtnEp#Uc zbcnRyZk8|Uq~Do2Ii&@F?FaXFSWW>axx+LDKLF`nly}dDYe8!7>EWX?g--u{4+E53+#bmiGeTH=q zE$R#aOQwk3S5O4-=W5Z^S22b;5Q4{ke;oI?h#SHwV5s( zkDBwK)J`$SvWe}v$NwRj4nxYYt*DIkYN~|y9KFr1vDsJ-tftO)2Q;{Rb;A+{8^5NNYh!^^n3GvQsa+bkf2yC>-em- z$h-GfSZtbZrICC$$%tA%xtsBjyw;4pk3GB7xE|kT1^?|*&(eMI_#fk~s#}nN*J-nq zgp0n1B%qksJ>DpK{41Y|+N`w2k3p0e{liRmw!zHW<}Q^~`n4fRm&s{M70_i*T<}Ga zgx6Y=raIqli7!9DeGf0ruptpO_09QWPdCL6SLhAna3!_az+BsGxFK9cymIR-V#6rN}kSko`GhjschN`0%=||;Z|RwO#J(8CGF& z#P8LiiI5;2?GO^e9UtZYshDDoiwhFJTpsxTe@mqjabR*O;NKD!7S5BnhQ_@DYJilL zBo3ZiSs4##_1t?tbUYKnYGoOGMKK;u6 z6LoOQn+FSaFxJy!o+e%c1&0~Da;4q<0fDL7o&!lNP*LBi=y6_c3WX_AUUY<7w4$XQ zeZ;8)&hCB+U$gMw!5Fb0n#XGKzr@Q9kS_8k`cy^d*wSb_dp5=yRDZ6&e8o{NA$x z6HrQyGi-_KJa-GfTu=oDX^)b^{6vhtPzQ3o2gbg)ej?EHcfTUMv6*SR?NdTWYa!My zx2R>#NTO4WLUeT0{%jyg+-PT1GMtkk9jN2lV-8GAXIjD>V$hxvTweFK6Px&s7*bL& zV2@xBEFM(CTWrZ4Uy^k9#zAu+fBg-pIHui8v4}FG(j_9&t+GkDIEA|;^RT&I9$6wT zZJIRk;V$yS8L?Hin<;~MsQSm>cqKfGF=?%0h8 z>`#pxP9uJKUmkU4Lu1-fh4`nxb(9pf0}Q@F#5HDwdfY%Jx6PbdCD;>thc_hYP=`W1G*8cs!)#ndxs1LC}x| ze8L3cN_Btob)NOm_i#BEuN{_?=<&THWUSNtqc6o+ImGImnV{Rb3B`1^&~Ymp%GXC} z%&-RwF!S7ufO$bO%g9;QhFCR?qCwg56Vf;I57Ixh)`Qzqg$VT3v1A9(qghi-joxsE zIKgvL8r|4I<(?E#i4!A3pPR=bMXdbyBt2VJdIYN0{Mh{5+k*HXw&N3MLxl;i1 zCUHO;sPqB2QvU~=)Tt@RW|~S|yiUUNPQd)*w;igC?sKQ8?(FxIg=zz(ZMsp*B`{LJ zstB%-;UQcJvKy7(J`r?1r_f?%_LXP?;2S|ffAe(Wad**`WE$NvuSGUk@LGwNs zx>*FbqNpCf+P56snH9jod}YgYTqt8U!3i>^W^#fR4eppd?O%B^M@tkWkH}Pg#DhS1 zEcTl8<|9YG7PF3BqI>eTSHAJcrgx+DLew%%n1a&AVfB56N=Xa+HSm)x-$gSIivEGDxA;&HzHa2ZOMG#QqN6i#F{KgYg@8^ z|6&!(5Vf#go8Ls_{60r6IZoeHQSqLxUDDG0;k}YHab5bFd^=fxC-(~;I*cS$R(67b z;tq88|KsYdCv1nH3OmTrbpN~CjWB!?b) z?&hBR9X*$Slpo6MckPwWde(YhVg_UCJ7GO=3(RMpuB5R%H08R#f-fGar>F4lHzP`- za-iEDm-oq|)v0#6&97Qq!tTtcofywYGF7S#garg}Qr&+TF>mha8Zb8xRA zBVAm$4hCQB+oKv?aip*Cau?IWn>~5Rm_tTFo6RtOOc^q8*--^7L&C3jNwU@Rm&PYo z*bPDuD-m>Lz*z=%STSH4xjqDn`ql?=rK%BB8>^p_5yoC2<@L(_qW7Jm@L0}LzXb?uy#l_Mw;ni{G4;rEPUl*IPU_GF0 zE1X7JAn0b%J1H-3`pv|-L1IUx$$98~i1mB=+usq>d$VSs`pn0E@sDed{XY8po2`@x zO`reKk2R6g=bAcC4zFM=x8v#!Y+B0`Q)U+C0VAO^<$RwNpw-I$yxjXAMA>?EOSlw~qXjV*$p7kQZt|xNzAh>*B!uf!fcY{&e_B0x zq@~dfpS=WZx#|M3kG$6!X(@Y23(c!L6LG(C<+MSC1F|m>cuRtgx0tv^3=N3l0U(Q1 zJpaUoY3PL27KgryJUm&(nCVZ$nal{D(nHvtNZ`gLa5EF%)AQ~1ADOp^RoZiqZ)`$b zr4l(=V!sA!X`m%1Z~1Bb;~?n+6VPpi6^S`nFi;=GGVW-#XM>fIl~8YdK#DWUY7% z^J2o&X;)Y9NW|Vpa?jeX4Cw0@q(jVCU^6&(=>d@Rck9rp0{oC7Wu1Rj?Oiq?+&CH} zN-5nRzJkT2@!c=TM)|NdUepbUc>;$hfr*BJUlSEfJAERrqsfXM0OTH!p+I(U5JRKn zw9WHW!JUG6@^B$s{ERGXfP=x~Y(~I&k%iZpImG;>uc!yjS-z6Tp#WHdJ6Xn% zA^QThNPzk6)r-G{DyX)_0PTg0g{<3|Y6V|3C^qh+iH=7&+Mzfi8H94w!cHNeP7qOBW_4Fm`B&dsNF;KkAph-&($oJ&e`i+D*S|9z+fl2|)$AH7 zi-vztr@1savHbk`3jx%UN&?`V@1vqRu(7^t+hU#{O2sntdeXKGq2nj5x4^LT6*c^s zzg6Gf!_G>-6D`~i@(Ik)a8B(P-|I*U3Neq4ev7*NW4(levMFHNA&TOa^h@M$mQ2P8 z@NCF_w9@P$$;siQ5w*L=7-YaE&^}Jn`bwE@Yur6!;HKiEi{AreA2yGdVPl~O@lUju zxeV!*-!BLejXsW#fAKheGV5PE?6^Zy zc>M%S?t(_4kGVPMa!`tlK>CLQ_IMfVZRpmIUqahvG5(Fixl!tXo7Y&b0Q{WRn^$9H z^(*XpA&K%|Jj~ErB(3LQ zVg2{4wyEH(9AD*^OLc>5T-nf2RJCG|?_fiK=cHX6bBo~PS2npZ09EBuB<82-BV{$O<}se&d5XWA?o!)AZ?%SpX?H(m-*xYeMo(qkftS+QvYkzf?{kNO0EO3YXay} zCmLrH0b(ns%IoA&n-{Me7iMtqR`gBq40q}$o%M0{V)`})c^{cbDcdw{S7*So{f@O9 zO&GW*5i}c$RXg$x(t&~<;>w&vDewOLuL=sHfzp6szDuH-)-MS(mzE~S0a8tv;_{>o zv^66>IrxgL+`lhy?;cqjc(Il+R;UuEuWV1LQyu4g$Y#S6S+K@XiSd@fG1yPQ12}$d8rxI5-~5K~ zCROw#VI>%j^W+9&|58uIP#*^L5sVza%Md2r816~sLOz;vL;llkJ1JHd=o(YeVW*r2 z4wk5SoFGkdyKO)rR$6dk-TGa>@V~byOPK}EvwF6RCF*UOWYLu4L#L!iaeiRLrXy~v z1fUGeB}N-f@$W%W29dujPMK>I+?p8vAx0P6)tFX0fb~aRqjNCeUTDzf3&kM&;4Z}w z-rvV3(wq=-{}dOos&n3TY4qPu4eK8*-K&>&QgmG58-VFlLW#Hv7!#c{Go%c4t{Bo3 zrQBw!a#Pmf9Npn_|3mi#9#=J*)tXkXYGnNd#1mO|!+ZDtxB$w#d&( z#wjTzkH2v26qH-+nGK9lOa2XFcrSLEeyHWVH@sIuqutWyeLIL`pbYE%h9hiJY9PN| zR!f#HG-|R>>=JhWn~;ZT%I{C#sYzQ-T&oqFtfnfCm^cRMt(mh@H$oksXT9(h0ivI$67k7*+jcE~I)QMv@hE$-YX-gi3YPfw)rJmcuY07Xcoecr-%GHfX`-~$C6C)_APdzG0Z??u z^17X@KX8Q&|I$;joZ~mlE?PVvzrwxQl2yLeZJ%u4YfxH9p;)P{5{(|%%9zr95He>; zHQPzUjGB*|cl^ZQ-F!>j41a6JzQ9%dj9|nfltQ*s?p3L`g}JWfJk$27250g7&dp%9 z&C$}JBdE2Li zY@cAchiL7$u9(5H!|4h~jh1p^hm3ZvyhuM14ZCg*B%k>m5ntb2sSA|{^{6DQdS12n z?K6F&t}jZfN%pL7Z-ven-uoy#hS;VH^(hTiK)}**E47L|pKm>Dx}teyBgEv^>v=qz zcy^(4cO{5ZG9NlxPxHMkp>mBH3Gr78pD2n+HGeC;`dj(;tGxn;jM+4?fhwQj;H%Bs zVYJ<%Vkswma9s)S&hYxw4buH*;WakofZ&px8ZlGLHVirs8D7{mS}HyZ&12p_W%=3> z&v_|VuUdvaJdp%%0fe@i_ZE-{zrkTk@>{P7_33vynLFPW!bX2scWcoU!>LZ_K~TJ@ z`DO%hk#A!)BM*T+|q9VPsNoUkjCq)Vngix;xV^`S>xwR`+Ps}hEF>r_#pav_20@k>S9Ctim~2E+T3o1~Mk#hOiH=I*!H zk+rD~jQAW6-e*N#|EBS@^!()GTl_<6?)M~Gu`^7<|9@8YdjDNFE3|$wFooipB~$U; z%Dtp}>6PM_1UZrzA{|dS`xNskxsdP~mLM}#>4Q56O3)OYFVo|$uI8dAoNv-4;%xcA z`;MRuamlINWI-@Tszqq+qHC|`qKoybn(GzYk$iTr#I3?eN zdQIu4SN?wf?cM|^KF$~MldA<$2$tmZ8ZxJ2*;)8c`&Ps6#o^R!^)P2~2#(>U5U_CX zmML?W`pF#$g^Pi-aywCF4BzK=!qt2M>Bc}Z<({K*{)*p zrs6p%V0a=>6jr*DHu;QSjkdrU5o>IaMR#nQQ7QzCP6fmcn2R#H2!B z?tBi~t5b&yWb9*ffxXz$n5$XFeE5?MO<5?swDaSZP*d2OkwjEgt_GgYc><)Ubz%JC zc_A_&Hi6Ye=YBTp_K}eq1t;L0^UeoXbq){cNua`T^@)JVqY4X0!sCT_$~d$2A{YwJbK-hcB-@y}y2L-lafneg$r+Lds5^S|4SKH&6jmeh0Q%}bULa{Q+Xbv^knuw(M{YyI z178MTQEz4?6fVHU(*K5`FEp$u>y_L@SS{o{vNy+SrK{cqRN@&Og=&Cm{e01d=dAc4 znCJ+CG(;)AU2wwbtm#{A;i0T|(i0cdZ)03bnc~(1FGtIkkOcUkFM)yS)IHc3*=S(M5VR zituJZrK-d|3HC|5UR~CvqunvmmmI2oj|771A2&C0AOrGy$w0N%XlkxV7I+dJu5dq0 z&bZDc+L%EG5KsEwYuQ6r4P(0!=3TAbRcno{t@n)$!|aTnGBq;-!7EBHUOY6+xIWpW z@DFXH-uhMl?G)x&KVs+GekI6)w9BphVTfm{zD;ZgwpnZF4~$r&tv&CwX7>vfwleGN zoq(mU_u0W|QFJrL+OYc;a0G>uSkimSQxqZ#W(I9FKvjKWspn3h=D zufX-HdB~G`DeeXn*SB{N3wl&hk1{yX{*$;0fQ$IdBZ6owB?i%GmSUlATQB$ zZ9Ue*k}teyOjo|m`15$+J+cdGz)`)cTs$rxrvW0H8w_Fn1e4#UW3dt$u~iV4Bp7#4 z-}dVLvczjzpAUf4()s8)Z)}QK`r2nYw7!_lZk&N6L}H%zVBKu1;w8QE%|w!@e6b2g zGgjrDnZ`eFZRh=rUr;^|D=h~i+ie)4M4Vn5dXL%q>`4w^?=+rEh+(U2Rc~50Kh$Xc z)Zt-kQ^CaNKBYg?o$bA`Uo)@k1JmcoIDGmH`+2baw|$na(J{Bb}tp`&S(LjlxW)pXTUB^t;<9S53LPA*lDQCs&pMz3H#A z%aDSW<=XTSVy5{RG?r>^R-wKdX}D+-L`08}Hb9V1@Q4nJZdGo$#ou2_`Q;ZRP|;sG8|Y`G z)r=U97_!yrw_cT+$0rc^9Y5{zM+hebq0PggDkr{h!ujgp;?2y0%W#%MmCR@a1plxV zE>?%b@#yjpx{`6iW3#oT4bXOOAI8Z+Ma6nbg;2a{Z+I{J(;j|rzJy#xDlPqhGIi#l zKvjdN9t(yBC#Ks*XDrf(u@`ZEjBZw=e?}7K*?5UK65f(dQJ~O#;c=&&6Q4#Sz(U=r zxlDd26Z`?`9&}CZyPN--SZJof6v44wrW!0E<*I#_)br-1N=T*ulTBy}VtrAa=r{GY z16uT-N^6^84^rFEI`QooVL^1GqQs|TQ^(6m_3!W7Un!AOVc{f-Jg*r3;J&qXaV~Ei zSOux45M6sanGKEajCJ6I*HF7IvRLl(X^Pim4|7wbS{kj|w6p!an<&Yb-=1UVG*MU& zbFlABnOW1QjPJQG-HzH$KYPZdL<}v+T)8-{IONpFWMTp?-MH2i=1(}h%;50WLVDkv z)`75G&fOR`ipuk#>%;>SHXH2Ui!0)PMLA51Ae&oLPQM6m!knWiu;wAoiPs;JX1YkQCz_9!nW72o1J~432Abfo>xXcBT2-B{DG- z1utO<03xRG$1H6O32+S3HvksPC+yvuOSPMBVzQxBv-WGJYcM=7z!{^eM#%j68Drea z)1FwN)!(VUOLWDE3c@q%^Mn-z9;1BG=2o$E(e7yY{P`o=^U0#XwehRX$Ke6B82q^WCs z+_wlf&kQ_N#D$`FB83G*N`6#+cGyzF1j`S%Q>Q{od$K5su~}oR zuO-_<3sP!cjX$CNr#g$y418fl0O6{fu@RucL(!`<&dpo>Ge!w{e#mJc@31P}<>|TP zutdg(+qcx&z^s&gIb+EUC;dR)g@R@=oO>CF!{hahpJP2Z3^{W)} z!~UVyJGS1Vg*-5Hd?}GmOqlNEM6IkCs&bOr>prU_q7{M`MmukZvw z$8fQP^p;5hv-A?X=KPTdE4MqW7k)OUtw=n!O84DYc=_gvn~l{Wl00f>BNZ9bZY~;8 zR$qB@QZW6D$7;05d`xMYJo`9^{ZdTi@sTr7DD!4{EKQ7lgp>W+4Wz;f`>W?eAD5vy zmyFM^`mCFOi7+6i4oI77vPri&-oG%~H(%W2QU1|jBzki%@!Am$Npyct%YM^xwCQFy z#n~r43+i51+7s@?XP@?2<8KN`5gPbH&@KduuaHRKbIL0rC#SvNN~%zziPnCO0u_c4 z=vRLDwK1gq>{(@(Z>3~q z<;u>f0#J9WJ7?s^;dERamWAb1kng%V86Jj)hH=7I_0tYiUfx=pa6;mbH%9}GP*>8) zTtyn`jtDR{PTWap2s*{EaajV4i&>?e%Jb%g%#Ok&>5&Fj9f^@H| zf!r1}q~XNlQimugzT$yEY;U93gAUdxI=X1>q zfymlQ%zPD==k{WCZNA}tD_HDBMIO+U5ZDd;p#6#_Yu5%W@~Oh7!R6*&f}365@annl z&&iyo+OJUXG!!RHnelzCXBWdAR``(oT?}x{oaVdh#&JR+A zuKf-?Dw!FLt)}o8#Ax)S(bb4qh~PlsU<;Hmmdwi2tiSKo+gRpd6+&nK!^Y9+;&dwj9?@aG)bXU{V zJz7|gS+#{c#ds>xNU${2FY5)1YD-6sWJn24(2)gzt;s|=7lwq`!Do}K9siVeTz2R7 z^36_uKXvSFewX~!=V#x)e!1Y0>h@bCY0%;?)d^b6h+cLe0Cv-TeK|!yt8xmukIC0; z5~+L>X=8~AA$jHsyQD^nyF4Stv?mL7A)})~@O`ERKhfJaB@R$`+OuUym60dYT};X` zr?+?<#Kd&36mj$Q`M>}-5s^bAMX+xNQrC-yT(mLVRWfm!K0w5{0;Bli*IZe?SJQ}s zDeaT~XPWl!6=Or=D9v+M1i1k}`A$J<+5ocJy##PCtXhFKdT5_QF;}dIfqx)JQ=5! zto3r=QusrPCH29aLd^N~f#)B7^8+ksJNIN)oDs1!&9+OK3N%!ur8h*jU!I$Gtl@gDZA0GP~+d9*b%#ME%ASdnT z&Vxrz*8>S^hjV?0B4=cE$HNfM<00dEv)p{sy$PJDH(G@7O>r|u=GUjaJM8Vw6cM)u zN?GE;pH0M9bP~jyxPC8BfDol-Z|FUEz}Tq8np0rl-tBsq8htUKYE;KF zLtozLas#Zyugj^t5li_>K_$giF;oH0-Ix-3b*eY#t4oI;tRx=2aOlA^j1e3l-nD49)YyWu{j<5zC?Hi5yJ3 zr>4z?;%6^EIv}>Sw26tsQ=Vgu!B{G-Stx0)K2t+Iin7mM+$c{XNW76gQE8zPPCF&s8)Z-jI~909*D#gdV0_kw%PzmdPXb`9Ll+vD%9Zx<#o zfIZYn&hdE7y2e^M^SQ@*sp%2b=z=TNw?ulAtkt<~srd`OoT5Ael8?{ehew*bom#Jn zQ+o3F8TNkT)!Mykv>YAocOT;rj`36XiHC78aI})&D>?p=tf#ILT9y2Ldn{%Co^VXk z6!;vO$wJ2Fq@OLHdXt)j%DfLtWLrU$A}y)_7PUuflRb96KdsNlaj}ZoG@s(y4g0z5 z>{ZBc`ngv-BGCQ_L_*FdH*?0$GY#kP#NQ^0=pgDv={x^{AN)E8*;f?5Ae1;W`NeD6 zd;b(NmK65_P{wUgf42Doau}Mxt^hiIuj|Ac0=!jUgfGgB9@gIc%cUHhpn;#2irx|D z^^0A+ZtPjS)(1+N(Pc*B>}Bd$dp|qdZOE5p@(DQDJJ7nUCyl?`M&y|*v3%QWId%Xf zkO_4a!pm*8$RM;xt} z)Ok`A-P7$kxH*vA9EG|A=Q-aZjQu>EPDPAWDik>+vMt>9niHe`0rjDwx#7JUyL0}; zcBhBnIb+3Zi&zWSN($Nwk8vA%zaQ4}#{Mea`*jw}Q7IbX_NrV5zM^&FvqTS+DO{(F z{@6kG|4jmFo;qdD+GNz>eOtUjij8iByxDkDX*tRe&Ep2cQDRC*OBu z2j2$|nG2gP;DZCcy`|=9y#87ejp(HOwikBRou9c_!|=D^yf*y+?en zkWZ%c5b`?GpY1Ok`kfHACG%kUT`*<&dXkOWfiO#e-FddBaXW$_;9r%b0t__&VJe9d zE#k!*j2s;v+=95Jlv)B(nQ5*{bGNRWKNaCkw@v-O)PNri7!iZCdvI@*8;~ip$61via?f3*|Ev5IE&Llu z-y7o0=r_+_D)q0ioqnk|RY{-o)ZBP;0P4Cs-}9qYZ&mqIFS*f}FX;(-QUBANr5)ec z%0Di5Dar5#$85|t85Xl|o*|x{s6#jJM9&+SM&5{xh>nt^8{^q4Qql1A5Y`4FqH=j3R z{14md>okp{Y7&c0L+@E+Ftz{%M%5SI2&k+`Ev`GIA^MZmQRw)){ z^N)pvQ{x}l-goD#3X;&qr(ry}+#$eRYTDzoNf#p2ul=-bHHO8mEBkI$8iX+sMkL`3AYw6qir++pZ~n_Dmg(B~%rFE@Sr_U&_5a;Hy)3Ixxt1 z*@;o*z9r!>#nEElu4H^lv669vQwIp3mKH?%l}M2HJ!oC8UU_7AN_iPOTGVbvyHe8SZRXivin+_RaBKW zSq4;s%*;^m%381ovHfy7a8V*1`^uK!gW<)Ha(U~-4A$m2iTmUfIJz%XKG&j5( zb+#H61`6YAKLjco(e*iS6LF?Q;i`x0iDV==XMQ(arJ52ulO}TV%$IrXtGja;3_{G$ z+N@HNz)u5E9p@cTC;TsPNFW`*YuP68sKc6;(UBy)&j_?@&@&+cqb2;(K0d7@U+4M1 zc5+6)9l18xO|Osp2&ECoJAO>hyTU{KPPj$XmeW3lZFFIL{T~y+dQISg$0b3hT@&&P z%%?lMna~P+E^PLTHX-u>^O{$;R~|-wSdbmaVgWw@GX#5T=Kqfi5Y}|ayek=EJhUtI zZb1mQ13uy}A%N>fIf>J>Ova7Um&AQupal7n6lVRK+9O{?!edn$Fp415z8X(K@m6af z43Jp4cYC^Bf`U>DuB7{O_81-QruP;DQThkFf7k6fis#fcMjYG!QM@X7HZAHc?3uaw z`eev&P_>~dUrh=)4!SBWHr@O0lrEEwtNH{|ni&7MUBR^#WpI-&GRMoofASTVov~f|1 z9nQ9*PT}o$jw0d21qIN{pw+>~?^zQ}!2MMkSd}TWQJ4S7h43b$m3&DQPn54)3Hg#4%8nXX_qI&7`L)CAEla{HN|04y<{_m>$W%fO|0+$=e# z{2Hutd-CK-lp~uUIx5#sd2Y)g=7S$pr2xuuy}1tzim~z?;i66rCLmM_+fsDCDJi@h zqs8*8L$dY}v2f(h9}7S3r3}BCgJM1;d3^yUhGuJVuELgs+%{KId2!}0Hi;1n;-WF4 zA)AMNom<_%+eGX37qEPeu((G=J|5?%z8@wm4JOAWnr+YQxO7F+=ZM5X z#LCIh(J?+Tksrih@UJ z2*||Hogp$%+@HSzOY|~AY4b0WEf4&UuqH;tLvOMpD)Af06r7$}aAERMp8Eif00ne+ zW_q@sWjg=^69$d(Kink1Hib+Jwp5Ex@n!czzrZgIXclZGl1 zbmWUtoa#&DfS!mV-R6WBiREOzm;XjxkSjzzK+9l?bRZkp7(zq9{SR-UannsMOn#Gn z4gP05gT|jb5{5iBhkFpGQ^sW4uhkD*kL?F@+rjHCu7ivkyJSF^kN>0F;99%V@0E9X)xEA16iZe(_n zKsHU))1PeGyKefceP%n;gb<6K-{qobZN8NmQvt>PErB{}0mqj+Rrrl)Hkus(Y_IC( zXNiwP4GGvfC7Q1Z-M+t5#D`1v3@yi?IX{K(uW=H_(u(M*lRk2-@el!Vs?EyYBs_xQ zk`&(glB~nbGsOzh;$i?DgF4y++*(>@C^@F7)fA1w zD<|%JKGC;$@=S2HO4&f$8thuXF12_Di6N+}Yrs5C6&{VE@fb7J z6;9>p{zo8V?(CfKrd7NyKFR=4i0(o00TEPA*={BI-_f2}NuZ=A>?An7-|Gv#PI;+5 zVh~;>w!bjH_Yh3gfsG58d(+6Bj}{Y+dP_^Y9i|W4&RBu)9HJ?LfeB-l>WAlf;tXcf zQ)w9~J+|c6@CGO8e4f>iOE^5#1HasnTJRIzM|ghtWN!3gI!4D(8Qjlq zA|8Xw64GS+15>|?NBJgn@y)5*c>`SDXWe81l|=2F!4oy{>1J5Cs#!lN0I8GL8uN_?5_)$x z!Cg6ed-E94!^t)9v&wz%6L{5qnwGxW@oAlHc+;^m>r`p*v^JQp7I8pQtl7|S2ZM#O z^7NHXO!R_3~6(w$rBe-f4J~Z_;pf8_N(b8%Vjd2gg+}EgCg*j zsQ4bpwEG#C=*UC%dbwRNqq%diz<5I58#WX`_d&UZFEY4nrfZMve6amPfPzC}iWQ7i zri~mPex8%#7)==pxYl7eaN%k1la4RHzAR0ac^n^ekNtkcT_sl_{qH?S#{lEd;3DYg zpNWD&@Ua+~Kxif4q3VTSiqi>UZx+TTX;OyteYC$30zij?3-07(S(Q}Z(j7@6R~Yl* z!@!dSiKj&Dp$$E5vYo5ZKwZ*-9DBSY*8Z505s7`)3QA|R?Elc}jqbF_T!_%d=hk{+>IJ%DA{hywd=rJcr6 zYS2W$b78WJicY}sq_f?Y5;?WZf>Jfr?|)Q!m`~ng*f=YRY`1=1zM+L`Y!gyi-irzR zE#1jpeNaRM)|C$d_RGgo2OY>=z%@852$|j<;sejRia1dZH1fai#+GF}hne_Q{o)Eb z>ht6Eu2~-u@HVcH`wh&>-N0SwYlqc0B&q6BNV~YGs|5V38_z$Vd5Of4qxb$%6OfMT z3X8axh$BKX_=QNKrzfb@ko~D1@crGUWV{i;WdpgifZlF{KA2Y~V-Uoe2RQp~CoUQ$ z*yB1uX}?4znsPo<6W#3* zVN&NK3pGNESNA5@7>hH!GwvWGU1sAA7>l-kq2cvc@VT9-dacrxnZ`TrwFdhk>M4g!(l54yf4pj%l5DaCF3-$WF*x4t+Tf%Ss+ zOas_AtKxa*O}~AAfXa2_2iCT1e$LV!7RG1bCWI_^%qNCI?N~m4{*ZInoanTaz&GO_35vo;u)CgqhTvp(Lzn zg79>u6wKivH`@#AkwOL43B^myMg1 z-<5j?hsw6l&~_mlJ&uPjkh<2*(cJdvjcZNZcEe1By%QH`kE&c?EIw}z1XJ-#ncs$h z+&h*w9-KiYXb;D2q~pEz)_gTf>lbt<#~s3kJ#HoKtiN)mayny{5PxBkdLKmUAU7EE z`CdK1MMH+9jeD>oEru|Ga&-3+@mdk>KT!vb=Dv1&&H#D*kUf@=JVXR~4Wi7}WT_w=_aGfLexeqyiE;sAG{a(UHmY@PJ!aHjGQTjM z2WFdhS?7M1SC{$JT}k>mb$-+JD+XJ%0`+de1G2rq8L8Q7~+m%iV2l3GK1ps}|)GZDj8 zd@~d&2(w7j)KX7m{IB61eKbqTx8ehD-mIySlasTG)ce6r7V;7xB-&+QR!^IVh^wRo zDxuoWEypl?h)z_z5 zUtj+Zkkc=y;tvA@+#&rVBPmG!19_@_6-%GMg{iGPHiOad*%3e_v}DFTvSsoY~RlwND<>+IRzj z2<-J*L|Ser?Id}Q3DJzX~C~+{n?vcp?o3mU+L3l zN52r1jK5Myu66c`oBYL8#lkS$932^C$Jh|(po?gpcyIWM9q)NZHp&+eojVIT;?K!#6`P@~`GA|0F&GZ$R9j_S*(q)Kf-3@j<%0 zybXnN2~NG}Wo~ag4|H{Q^wSBj-w^gdMGkqRdDNqV z>-2l~J`MbeZV7x^OWPSa-@~%<)63jH513t=>pNF}%;dB^H7+o`waE28x#Xy4!?gKE zotJuk*75$!?+5?v7EWWuM#=l(t9Pwb2CL5{msAgR9_V<221e5N>Ssiilq@0^iQO48K_6=BTZaaqB5z>s;3j8@G(6tjmbLKiJA>!_9s!KZY4|-Q zUa9H3*@ypvqy2&dcX%WIoZpCHwZbIrVaA+;qvK1Wc39yF%ROlFsy9AJ7? z%uFU=po`93OcJi@^|80L9<~}w;NUpCYZmf>K6IdDFsg+nmAeg#O|aQKvDC8N(q29B z@8F!%@N1cG&x^`1rqs5dqTjjWHLK3AI3HcA>wDdl@Y6uMgjbLpx*36$7*DWRMXVr= zKCHmmlaTzw_A9)~c7P#X&$mx>qvJbaZf`#FXRX*04sk&fsJOT)MgF+n;N-`hcMlix z|3`~h?zE`3)rA2a-Bxy~dZ-FlXT9%t9!9ivuL&*Jhl31#)&k`o#9}jA`$Etb+)TSH zh{iX*Fq~L!M<^`f>||LTR=rAd{Bux`zQWSpyLmLj+^4>|vGR?-t~68Jl5%uQrdI4g z%8k);`Ke`Kp}5Mluih(CR1JF|vHIk#z1f)bZd;Bp6N?VY)M|~p*O6-&w>7%G(tp+G zoC)zoTp@Q zsuwYJ{OejTrX-qpg1KCB&DFpXO9C>5^L{wDrD@F8MQQhygvRT&E8xMeKk0A4t}7R- z@GF*&@K$BRl_Cc19p4~RVz-MiNF1b_r#9y9B>|k{|D2N4T3YTS)s*-xIJXc>@{U$R zB_PW9>8Ye?8Ol8t&-`h0jZ#w@xwM%{yjY6^a`-PvL0d(+myMR(Ux>on%!uMH6-^d? zW9=5KHS{V*6=B8r(7_zPR|rt{+_+Dm+r zTqU0;1JQhv+2=ZG0v0$(T^G2_Gb1G%Ca`9=+YT5c{j&M;=oBoK{Z#a)835{Ux6vYarBmML1@fK5_3qMM= zY)72K#mWDX;-*&hQU%ZZI%9CCF5H645)X?_& ze>~vI12p+KFm30&qgn22LhLv!wfm8?8)WNY;aS^xi3e?09Lx79J64g$x@8r++1VhI zhdvUZX){}okBCg3Zg%Z+JM4*+ZBF5bx4fLjrFGtwOn=k!COGO$1Z)4T4;|;7GAtm> z8RuoAl*}Av3sVkQvV`Njlln6YhB#lBRX1mTLw;Vy`E;lbBxMmX^i z>-%9o5ua90YyUS8mR!R4%TK8;KL2(0vCheW-qor|oYRo>%NzD`m_yfPKX?^Tt^m`> zeP3ThMO?{aM^obJ%mf)%>p^+*es^~VVp0!{gtND!0|kwQqkFEIf4|(I@qGv9xzj#t zU4ow;>;S|#8AI4p=*+vAuj(m7#0s#SUvd89mIF?kubP^@zBio*!9?5U7Ghr%l;c&C z>n$#gK%n743a9r1Koq-x*K^X=`fh?aB_%|uZZ<}i#J5t2wBL!W&Om4ETRT3%?a@&D zp`GB(*EBc@V)X;K{Q+mIu=-&GZ4&`PmbFwXIeL=mjxC#i2FMHIp<%~iy+Q=Rk+=AykO* z%0|?i;P~L}t|SLCq~je)C?P3E0MczT4`APhA?v4li7M|Z9ofJ~TH7WG2;NH+UBiT` z+S;i%56OIQ@4ry`rmn?rlEsqA)R-yN3lyVf_^PaU6So~wAKt>~dqSA$D!1z?m8j=i zt7zyz+4i}9m;x!|!b!f>5^H$wFtdMWwr*NvzUjs&L*oGM3#7X>=haW>kLUW>?y2F3 zSp=;3_=3``)xun*f)`EXEzmytWAAg%;80v;%6sE3D7TImlCF!JSr_%vO=zj|CKGMU z)JA_DmwrBAN(wSO=&Clu$bw#@uO5#$;!(@*=ERt`FQF7eeMv{ejUeA=Do?jQ9%o13 z`Qm(V$isks$uq89X*oow8-mB$CSqR0pfb(pzVyJQz;b-HWPt-*Bds1vuDE^ zQ|CAu5{HZym6CNl2ai%3b)R1{xXbPS2rlEu_Vqzm)_V}X|G<>mG1hpa8#cvobeV{n ze>8xs9<$>eIt(pQ@=bRR4V8vwX2ws~#ZL7R|9^7p3qt+xjhc;fC=9CLVfZ)Kag``&C8{Tdu?`O!=AS6Wjb+^csL7Ix@2 z1^o);sm#w^YFfeGl?)}CtxEqXevRym@QVy0K_hwErC*OXM%%&bLN=@rN~kMF0rMB@ zKndcv?*-dB02K;z(Stz^Y!Lzi!?auYMEEecXD_7H8_P#1@dd&!_iTS?DP8is!C14* zb?9ftFtw=vy%z(8mm`(JgsaU9L*{!GLK}@9EUcsHzYF2QgjLb!y8d>hWsGsz>K+aq zpbF2hE4oNj*ng@<1J(a2Oc7CFOW@uvI_?Xd_c) zkl#;;LA7VPGk=eP1J7FmC7P|GyZ!BxX&Y@SJoc(8;}j3^%dn&$aaH%JWE-l#97I#A zz6nM~1JEzDkJb!qEesc>H9ZZqpWdwSRp?$RMj@L20jdj-upH-(BzV95RdZr1%6Q}q zstNA__ty^(D)*M8BZtV0?0>mE+c{lHieaFR{ZEDi z68;Bo*5g15HvHx3{2crcJzdZ<|H5yH{cqTgk^yrFcw^nV>z(@7f*YM51vs0j`j|de z<>#4&Ks+*hh@kBVrFVUJ>>sd*+>6*QxodbQCaxlOd4RI5P(2G$+gd%}tZOHx_t7X4mgkcZ zg7>;M?d1k`fFy+h8I(kP!0%T~K4svPqh0uRrFBn4=GuqZ#e=CKE_z3(;A!#j2n`h% zFA)k&+x^Yt{XF~In7)t}kiM+smx~J*@Aqm~+(ikLwl`NK$KAiLDD`2$aQf!?c!y`c z>p9oiPZs^JAJIu^LK3{vQ6e_G^%=Z}Q)%Eqrv@PPaiA4S3K(PEmD&y`uo%w+<2BM@ zB#BO>AP8B=JycAsH-usiNIRa?4u<~JYWTO}RI9x&AH5I|r-j({@!Qi#*H~zKpL7Fc z8GJw6$b*yx3}$~em>ipX<2toJY)9X^mDbLa;i$=ehYnYfa&*a()^X7_c8b&Y*uOU9 zgR1{y%}mIM$IkRhAo@a1)$UU$)(4QJfy~X!5XxLDqyp#svNbc3k0bjOU6(%mr_fP#SJNQ-pM(9IyC zbl1=!-ObQ*_kiE?JLh@M{r5h1o>7#U_kH(Xd&TFo_S$;~|Ie0Q4hX<#ubUQV+g0ih z%FE>st_U3NHV$RoajV&nW~&o6Bk2cwC)%w;aEo)cV=cdHu;%yk{Lsal5I+UnPodgz z{;&I%EDMeMrHQ|P1Uz;d@`v+xmjkw!a`H>9WXtn?^BX^ayrzDN^oPa|yYGqf!Ra1g zhWX0nF^hqRkjz4w-91&R%_YpXjfFQ*4*I{Ij>>XEvSrD&Y+f`{;!-~2Z8Lpblp z2;b2D|Cr9a5Wzc#UtoS*Plv)Lhb-vL>q`A{MCI*z)0CwstxO+`0n2(-XrOFFX7z~+ z3)t-&3y!|R&hcyOm43{>>dVVOKqr=%J#i8exH+BW@%L&qSYEp>j(rDiXu*=6ak~g? zH?#(FlhSUzkE=OR+K#B#>ZHjD!dKb!Wtcc`$X7X0(MjXq9IXD_#<{gXq2Z}D08m^; zc{X0=;)(}=`U*?D731@Yfr?XM&pNjxu#A?f1)XmH)dHM&0omwI!0dji8zx*ACtTei zDb9k|>a3UC*nw@O3Q;X{d)E_(;pGC=bnd_fo@1uv{n8??RCro9|BshC~D%Ec_?_nG{()?;}@x0=ScZxAS+3}{N0{KTh^g|Ct= zC6SNs>-`T(36LdZh}AoLEt>d6$VadkX##l&^LJ6_F|}5~+^csAso* z(Q)NiW#Gh^0TX1^{taYxLN&=Fzz;=pmzg8RrKo--d&qVNxM8!xLWkb8n3?Pv7kFTO{bzCted>8DlZ`y=tI>kdX}Sv=fR=gSsa!pD+ch;_=XNo(euO0?gN0{qaT zrW52-j2}BO^6~|fH+r@8j8;S*$@#{9El_J^+hTRNFJhK446+_%IwODDX*z<{tObC|DBG!%FlFk<$Tx|*qK7CXf4e3t>tVscM*Id=3 zI-nLy80K$HwQL>F@YesIP)mCon1MbUgp&=b_wa>Nak`x z#bYjnQP%_fR`~Q-{snoakGHP8F4qSWGX*@B{@n=b0mTG6u&%_I z%XYt}p!hvUM(8WmD5)&K#i3W&jS@D2^d0lihmsJNXe;wBfkq>MzCYRjmTKr!SWq;T z#H3ib+03SZKwjB#bPRC#vHQL-@Vv6YuYx`X1prdH0m6^lY+yw&_GYm0UTIlH$~+RU zJZri0(X+{B1R4y^OZ0CDNI_-pRt3ztN|t#pBi^x5?OFi-PF|4pkG5-R7r_$3KNhH{)MY_>DSssZC)PO z;c5G82r9P>Q;Q!hm5w|yG|Gw!4BSW7fJrNZZa8}$m4ULa` z=qyv5A-7&UNn6>SxJ}*iY-? z+q0xZB1a2o4;lKEZNKUnt(mRybXYzcMh%zcttWqn^^XQzKi$%hQx&y*?;_yNs`>`n zX5PW8v$lzbI%^_6UQ-uS8&Y4+Wdq_#%@F5qva3ewv9)|1pIG$ZxU)L0JY#*@=~Ji3 z5)C+E0G0PLt|xb85touOm2?GJKoPEmA(=78DhS|tiG+vZvSnZ;8=@9O3HPW9+|Zui z>_YUckDSY<$J&CRU{A8!zBdGVbPebdx*}~g%juaR?d4(`pW#Nb|79Qt$xjt}dG1e3 zfh|{4Q>nZYqv8{;^?qEj{^n@`pg(D@pkDAU7-&-(lOH58$U@DzrN?MuAkd=-?k|;H`1twzT&ZBr1zu*0XV1kHJZbZgeG!&2FW~E zy-j+&<~m!0fiyo!ZIFsdOWUf&BvymN#yfAom?m0VNk_|{#qy}U8>^bN%7niuW1S0^ zS3f$MXl3Js8x62A8?$T-nQe(-mHev34#0WhC^S^^#v>TJcnf<6fS19HCdi$7cj<_>sw%E}fs{>q{ef>(doI@0$0VHS{1y zHgwsz>0}q#%4;2qAIFzNwt`qD>lXXOQNi(NxVd#lv_~M6eqS=(UzZw_q zFJ4$^()qyIQ$3Z3gkL#tKHh={>ASD&H@}MBQ(x%UBJ)_~t>_R|DiGg)_udney~kgx z0!w)HNv802C7SMLEfcb|=m6`U0>N(Tp z+pfn(PJLFJzHiG^Zl+XzbLU6{vkYaO`a?c@*l@4~+X+*La6Av!QKKtLmnvV#W)|j|2F_;Ty8e8xIl?4~N9E}@GSX;CJuCGES?B31*cj@?GNJqI z?%*FGUy1db%NuVyx>$K^_sPEer`&kN%9r+@`cH69_bQRPtvY9+Ac&ViGxvnl)6uN5 zT-JO9L`1me`qwiP9CO{iJp+~#&L(ne?cJhNKS{z;=!A8GDzUBFgLM6(&ImBv2Q;1v z{+*1nM6$pa;a9md^V;YyyZ=38o;yM_; z_V!yGdg-Y-$mpZoBGUcIiv;}E;b83F`2yqI65aY_*dZie`thS?Q3Xf2guVYvti3-e zEwUk0Om!Mzz$G0LY2C+^V0`azHXpf|a#wM?%ys79>h4v@ zIb~>56aG_^q(EVy9&Zv2D*r2Ht#@&&0ut*wsL#-#g8V#crg%x9A-naE- zV?}qk5@Hy}G@vl(!!wMA@lMuZ{X06jJX3q5gdgDu9 z&-1E?RWULt3E9@xh7cgDd~e4y(*M!h0#J(9E>iekm)wlXX2Mw7J$L?~9K; z(OMH+2g+HLt=eo`&8+8AYJoj6vv8Yv<3~wJA1b|dU>t!{F|yGLwd~x;MY|^FDZjv@ zgU;Ee)(CoD)Jds@O^wE>b5hgOCHk1%U9Nb=A;Rmb$IoHWM!z?r1)o18$jP1o=?eGd zCF3{&6)6XY44ZCpt1d=p4RV$2zWOY?Ruoc_@II@0w{p*)8+RlkA_tuqDpezA6jkJ` zD!0NL7cDP#d>!$bq5O{5MgRX2)iCSW#?XcOPd}NA_r18pjuc9VbqUIB;oiTmRB)Cn zC-i*de9^Q*vfj{3G#umb( znC;eU!s#))Xjap45dv>9caUnU8}m&4KsZauBlC1V3Hj3*oi&vlU>qHpJyq06>N=lL zc!QQhFrn}|B|Sz%D{F0D9DeIsOCp>xC0)w&r5{x@bKuFz`fMV<^xmmCJKgWbRzb&f zpqR~|PQB%EGoRh}$nZycV2R}urXqwQE_9DnY1Li0_$vj^nva;~-anMMipW88hFDbUbu&Kb^ofH8qDVT7NbAy=uE{q#)b3YVaRW7@|{f^%&ue|ypbs}4*Fh>2zl3M8+mrEz5_!L3)cKi|Ij zUt_f6ew(h01>pIR1EdTvTSYREmJcY>nTJ*#I=?-fPQP#B_LtOgHi`?Vz329;11~Z29(szC zBfX8{|C)(+ND}xH(sJdpiPd=ST$tzu#dv6H&MD;+Nn zGy~kg<>+W~jl!^>?qXN#{`>2PGdY-&8p99S)w9`K8Vj$D{8%XEazie^$os$BD2=5v z&v_@QE`lC5Jb6WtmPRDH`H@l!#eUSJ|L5e;WvI7j#`D007j;?*iiFc&YeZS4NHfp< z4I&#YmaXfjZ7Jq$`Dz>uZ^e##TIiRsh_KjAN%phrXHI^9RtQ@)jtOs*kU+tq(eq2| zP21Zqw`xy9K-I1v<$V(YDgYOFN4?8drW=~WV%%RS7NH<@HJ-C0CBtV(xhAlNu4J_VHuPemZ^bunkBe@|LzsG#1=!@=g9#*YXTqmzi|KH=` z_46^K&ex&Fqb@o);SxJ|Q760}iTTVpBjjCq8+)krM}H%DgQV^|RYkpt_*pz@2#4R_W+}}@Oc#L!y(z~pBjHME^8HY`()^IY_xmSn?zs#71nyUzZQ$4y!KpZm51Wu zW$lxFy!c74XGlmBOdpe1tKnC21V(C-i4;xS<-Kg%Pka#CO1`7}A6~J30?2RG-xpxP zW{Z_co&c5T^YFaX7w<9>!(pqsLq&z8fFTs_E;CL5mm(t@W_jpz5M7)0GqwchFGlAR z7_kF+Li@qCO#6SxL7WC=e@zxQXfxNsZ;@wAHqy!??&0F&6Ij5E9n)OpGA_MM=;|7_12=-$%b$Saz>^y*uy9hMbJY@-ZJ)O2Av0)>? zB~qQ67f*fPcgB0Y>sswlnrL=@t5INgfW_XbPv|Sg!s+W*aY5yJ9A9hFQ;E`|P7);s zBD{o2VvmxYZ9?V?@0f~G3?mZ9PiqzK5S_K#?tStVk-Bd#Rr+OP>E%f$3jSjpSi&5# z!7^xaiySkbCLj)EN852G4UMdQ%A7~CjLm}P;qhweGT~J2?tv$DC0x%RQe3@Cv^sQ$ zgL%K1F)&6PirAiK29?t8H4g3Z%$OBN6IziM;l}cE;YWq? z{riW@T$Yw5UbzC?KaE^0_ zU?4a=utHJX08!MbnOUehu94HX;L>`JKg1dD0#yWi+Vak?4(Ys*Zn}=nIHV;bRzG}a z^PQ`6W47O2#wE(Kj&dcnLNnQL4lmR9C2#B`J>JZxw8+!s6qp0HLU67=Kdy+pAvpFK z+FRAipOnXeFE25Gw~4&=lc^Y!a`&s&)H8p`bxhThTD?`yL@4^dY1(izmY0q4JoV}y z^NO(2(Fq^p;Jz2BW02?nCy30mmE8fh%+-(e$oX=wtSyiz#y?rv{Bt$3I(v|Ho^`Es zBvnEJhYQ&`G~X^LQ+)ocDSj8PM%~#c3Nlnw+=vtwvMw~eQLFBrH66Jm3m>7mbC8>F zT5Ca>vvZ0<@5=dfnmxxQ?`Gy=95a2{AzgrpV)800qiYQau-ZX~pL{O~mCRK2d)rdAyU z<3E>*cJQN(OJqcTGoki5n-%kY%Gyl4JTl>wH101>+v8@x328{_$7h4()N-<67RxjD z#PmBTxJf+^W*w3|5y!SD=Jg0A05{}iBE6T?-LZT8707DU4#$qD?HWw~{@w_eag$xu zEMN&0a~)wrUEhK6RWmSfIXa};PaA6=3E=HG&{At7*iDu_Ry-E%V=C>34TBY^C+5Va z@EN7))EZg&O>)=c@5%w>+dx6Y;Vrj>RsuaV zE@Lfs%#eioWGQPzN%Hb?F$__W9B3hP3$7*~BaUGFBankzzzDQ7bp1Ybaok6?6jjubj$@*D$T zYEMJ7$EuH)j8K|Nhm$|`V!!#s_J|JsmwZ_0#l`n@`Xfv&V-zWCzWoRF2>R`x7XZv= zRX1P=q$Y{^a9NfFMTuA0FbANMK}nu8!rAR=d5Tv=;-H;U zTuMpLG?(*idD4yy-%h-B+Oh@&nyaoPW!wkxjFl6GcCnU;N^P#uzv_$`TU)5UN&zOf z%HY-_bGbKQ9$EptUm=2Pmref6XXcw?pF%S-K*l0H&ty3&@%gRuT;Xd5f6s(TuuWfu zO{HuHGKq}i*viGv!jdc-2{$kDgK5!59{b3KhsD#^Pph05xH7BjvY-X$Wfb9H=?l?a za(VxUjlLH^-iPYBf74UbFYkedR9?S76C4VUiIKf}<-5E!JGNLF!7XI%5OjN^KS61E zZdR*I*u+TIAPlH2aqoMn_#xxevb5AU-K@hSJC`rLrd$%&t0$mg=8+JkR^(_ZxCze#oA3Q9|Ju$5c*}!(*H~_nGh%Xe9FpAA z6=YAxsx)AoqJb4Nw=2v#BH4|6DNm2h{PtGDva^k4h*kz4)|;xWxSl>Qy;Z^0(=&6A zS1#LD`FpN9YRkdAk4Xv-e}PY!g)h5BZ+sOUlgM55@@f4Q@#^qe^@*5#WgAb0)CJ4- zh2mzpU2r$0&6ownfv>3|CZ*hIZ?|_ZY`5~e>dd`Wn`K9b1O}77J8$US9~YJwj(uwN zIpFWp(k%A3lV&}qu%t~B%-rJ8kT2INGL*FHPcW>-W=h7!N!v5c;csL(;PKoYUl%c~ z`QBBOmKG*6v$D?vbUl>teoP>;*+m@^h@~K8Lq5*)y#_Ah2h#}j?+a@E;(Ts?q{&$`V4&EunJ0+ zpR=}7O^`k-)3nx~ewMjEA{ktxSF^Sg??^Yox>x<(2eIF@i;JaNTW}hC1t29f8Ol{3 zW54|6=#g54Sm9}C*fKAyR#ww11cEy5^7!b-d$gOPRcrryS32X>z^oB0^WEn2M-zI` zuACpSC%UNlKbq@VRV#w~6{aMyXe0E+GcilZlfJ8Y!9=S1+$62KY&`oXA#KxLaz;<= z=@usxaaeo&60DdvzFWf_xcy%(z}*%*A(O)s2LQbgSsAk73ILVJ#Cc0Cs!3O0mjl?FN4v}!p41Zu%|7unxPI37h03ZZ!E=0$+i!OTH-)U?gpl47CiOKiDANR6Wj zfk&<2CM-x!8%(1AX_XTzl5=1#T&cL|Z`Y?T?#lt4Q>Bdy0gMwpZ{F?E-hpKUw8CkP zjf=MNQBLW7y!D=I5Y1IFV#E}Y_cqn`>c)YxnD=Q{nHm6N!#Ua5Ue-}5x?kCA5FK2w0 z>y~IoTe4=3_nOR)fj}>;$*wGG z(0wZjsD!3*Bs%p%2JLRed}x)JDt;;N-}SUrr6p{_kwWS7#}EUe|YDjI)S_&QZ>1GB9f$Gya7OumcwO$?>sT=CaWlJ(`DfD zl=GqD4B*^JIsF>lt6&1pq@B|JE_?WPB(udee|4xDo#RhD<^dyTbv$qVO2(0;D`V-Y zh>dR{af#f*Tv3aK)Y!MK6Y1vE48Z^N4v((?Syd`M7zz~SEIt@onSx7|CQa<2U5X<6 z$X>Q|8ufQOeQ)QbO?DNDO_z&GRVy?t-On}8tF2$pagQ0Q-j7HHzM9@IV|+!S4y}Rd z4ofe&Db9ZQee5LG%*J}D7ZH7nKYs*yK6<{TPkG;Rk`np#-Md5QYWO2ECQI2U<=Di; zV#gz`_%!$8mC27`q!B$SbnkB)sa77x12eS*t>ochRz_%1#2j`;-bBd z!;n#k#m-E3r2ncr11t;0{U&*I@7{IUmf-3Mue{p4v?LmP{WtACI6tr5)1(<{PH8?E zhNukP0~$dU^VezMt9vZ3F(e~Ie%si^j7?Vc2W!RubWi2dp6Y3X)M8#)>c>k20oTrP%!2?)<5X2hxDsfwXDOo!3A$Z$Q2Z_L)n*J7rK7Vch;vjtWC zioji|ic$cC^Fl6e8W*N{7(6v+oPij~)YnyNY88c9OEX*a5HTO-O9$t*ML+~k77KWa);`48cnlzi65vXL%q5}C(BX^Cq2pDa(Eg4<_JrXw0L$9VL)ene z^{1UQy%ye;1N24iScZ&u>y-{ zL5sX`ZB!zdZynHeBq$(#!4wJ5X2Nji>L=^%A`ovvB7H-1vC~)Wp|3!UmItxuEB&h` z0S)8~XE5mp&uo`s`?fe7qna&kd?8n#ZZ3(@oW)2ibyz|c+~e&T`-*5|-TsDXp3lQC z5L;=2Da;qhFy?7b;}=~)BS~MZl$sOM76pq!5A=%ypp-+Wtr@PRy7+RdaKlw|FfYp|3UeUM{x9ujxV0fhFo;pM6@ zlez>(?~QSvR<*EfS*M2#I-xpq%m_#6fPU`uX2IU>Zf|eTl^vc2_uiG33ZM=OhUR^q z(f8_@7#YK4sA28&>8M7E00qk4)Q}|#O|@Ef2cao%T|s*=%aKIk-oTuo;(e!q0)kci z_MpG%_@()#u&TyfT=W!fzeCfm4P5b4d?aHDzu)qfD5q4Zt;j0EMH-rz#9v!4e&67Y z!cy~?&%gA1aVRhT1-vl}ui-4pSOHZil@&N$08#vQ*3}uUqr?Ib5g?mDwYn$-9g8z8 z0=(W`wq}Fmj7Ao02}}cwO%~Hp=q|hScaM?I@{*6)FG-vR-VZ{{g%|#H;; zJ)V8sv?;wDw(Dx{w#Ag@!NtSnTzzW4>4^(j?15R}ZbE|FPJgN*bQKtf`ynH{@hH(b zA=#elQC6NuCSS%UFdj48ARb%%^34iQ4gO=v`*6BXv$F_LW~uZLEdab|VlkX~!&v`p z@2K!!4AZhFjG38${)!h(@5-I3dV_`m1r58+U+d)vDCR5X*rjG=a2b78e(A|pzHsCK zIWvg$AknZUynZ0F2#@2^#N<}IRNX)AxF>R!ashmSa1ND<8!Ru^wCIV6xru|dCp`A4 z4LexfGRQf!$D>Zc>1a-_&j()i<#-xqeEFroofDJ!r2z_zIcR-YPexX?1SgCcuAT0++>Ay;{4hT+X1XFh62blk}T;CGkIJjntVA# zgLtQje|+IEZXvZksHsC?Q%AX|>`!Fot6smoR6?=S5}I#E;pZ`>wqXJ(@GBE3i4TcD^(@I;q5{Ng4dJZ+*MM8&9jp6f4G9Vuj zom-Rl2lJbJC;Pic=dGjh>SGI05GJIL%XSEASO8qdbpo1u45_lDqd|h)DX!pS%*E%t z?@oztV$+An7+X3!wMxr4og91)(PDxULtha*QWGsi>Ndy_UhHE*$6}_6G&!A zOw|2vGZF)A^Y7O{*&Lok=)E|XK#t~`hNXLWmE8;#=$(ZH==oEMLm%D5>oS_VUMp#K z9o2;ULfwD$U(Q$0zhkL<2_Mhn3s9ni6a@NH>QmgTf#1Jst5)mK0-^KFJ6H+aYr~7ml-!a(_Oa|>F-LZ+n%`2Q|1o5aE-Z~$Z z?t*|^0ZHiGwHnE7e`uYI#90G;?~uNHfcNt@Fd>fSqeM_)v1B{4rtxM~3|z2~dQGVm z#lI&N{7U%G64QB$-xA_3P&MjR?B-PRRK{gIZxF*RUHkdK6>saieuu?IJBOs3B;~wt z4eYl+zo_d%pah=p?EhOm%d?!63*L!i`IhR~W+6fEO2y7Z_RylF{;+>n%i`Sh9sky~ z0i686pK#8>?@A~S=m;wx4k)ld{YdyUGk+xiUiv1{C-?c4)(_T~vTLMiztdSc-Q!Vw zlh-O%3pAzAt^McI??9IEUq|<}aZ|dW8l&_-DdX+P@#XyU3AktDs`Jj<0REOB{Q<7~ z@Acy*ki&nkn|@5dAOCy#GQIehvcvu2#Y52b|6UJ>RKR%u_fq);6y(3xC&~X0A4PKB zNlh+EX9x9~N1m$|6sLyZJ%rD|MR`M;`uYNyf>xSo{iKCcJR6W zA55#xUM5Hd%BVK6YIOtQsPdY!DL09ZF1^5g`DXuFYi>q;QBE-2gRpvz3FV~V34AG9 zu^*})>3{NL<$ML4Ce(*-@^nv?HPZ=MTNHPuTXah|dTASt+5_r9SgDKQ3G1#PJdGsr$HZrvFPaA@I2kJ>k#FaC)6tOwrYEAJ{de^Hq#>J z6IGi$1zYns>JwZVvI-1%7JQ*)f1bO-) zZL_t0c5rI+V!e2?nfypCXOx)dj2LKaq3zp~(SnMpq&2qk+h$ZPkg}}RlLqA1@J^vE zZ!?R1v%g0@?+8;3#LZ7ArzZr|M|sV*KcXK~*+T`SEgSh`cETc`>^5P>E~b6SNln%1 z(F!f|_KJLRoTe#_8))Cme?kp4e^w<6I`VhvdCESEmcR5}K3OqVef(jO9@$V?UQ6AO zYi$s_%u73DALZF#L&=?LoL>)Lt)PxSbEm4v->4bK1Pov-AwT=A!&@L?k=%UzB63|Z zlqk4=f^^8rGC%Y2wc}aURsYf?mA&h#i;j7rY*vYNP>v`O@3+>^&0x$YTXxYGCWH^? zyIMEqDR6D8BsmKT9-?$z(gVJ6=B3yJgkM6OPhrs(&2_kM^5KB=F}3!iG;cl16NT~x z%7WC{os<~#N7zwo+V$Shc(LM~!gQ$p%v&+D%062!0j*33i%0J6t)S;Vo|(RumcOBO zV?8rUH=^F<&DvNl^p&xj%(UI@G$Hm-;=i?DFoOSN|1U^9oE{HZDPYn92G z_3(SugS>*>gUcsD$M@XDgjc(m>X;gm&Wf-XgwCA?lHB2XR9?Z2NW!YSrC9faRs+|( zw&h>7&v#eBjJWJ6EOgQ#-9O`wHOqt{y3AD91WNZ<#XE*`m6H!AH%f=BWneC~4m0x~D&pBPgelBW*k z5xGmKh?q60!E(4UjRnRQksmDmwXsR1ofu<0) z{n}7_izZlLzm#sBPI+u=w_tSs)6oqwuh~|TvH609Hle_d15}w zu+il-{XM60_nw&f1W9PF)vN6+bv4s6Sl+if)xYM_a3uWdp75)Om@EH9cN`+clTg3X zpmND2tH-QV;d(CPyg(SEtp^VWRp7v@0w z_P~An)`ti&3#>llNzH0^*xJfKQ0KS0o{g%JPOX~9)zon#F8<%@rsmDJi<1iA#0{Or z|Ez*+;NNrUi?xv~RiOJerw8_O$JVl!)& zXK5S7$;^(wOx|oSzF^4u`-)kZ-wPWdZtZT>Yac$t`qoo@GBNH(p2@c-8nNLMTY`;q za*J9Ykx&%daa)RV7q`m0eVzZps^mGeyBXN5_TWH|^qoQ0&d|nsI@c3EEz0AZ0n9F0 z@u`nmMHGl5oszq%-9`b3-UJen2f^i>)uQ{Q&tHCbO)7rl;Z~gB`MTVy6_vc}_;D$1 zxKkHZw#+!?3{Ivm^Rdc=44-nUwpS?XA6GaNA6kBg#M?vRy|m{mu?b4%f?_yN~}8b`!pTd~mD^vDkLIS%AjkcRQ5V`YL*Y8lNA)EsE^AA7d5 zG+zTO>Jx|m^SXTsBKo#y@m;x?y%#xma6v#K>D;%V8t>AUQF&kK4AQxu zgLj=zFPV&hUOHX>(p%hxUGwKKbp5LPEE`O&<3p=4Qimbt0uXFXbhn$tp6Q>8q7~U@ zJEnW$&);Nj}tk}Ij2U4+%R7vOr{fq6bDqr3NK**q0cxg%sU zAjKnd)d790LGUIGui7ul!Tt8WVy$x{DTChVtnR9p z!HdMk!*_B+aB&Au1(!7fKfvd7fU`P8i{cnpIO$f4MlleSI?!@bhZ|)=ASVTg6z^Hf zUNF$9XMC$c1E`I&7LsW;Pkp|17v3*!&v5o5fcxsrl79fOGWJ$BylN62>HG6)8(=G= zgg)SM+yB!Pg*yH+C=e3u;CJ-Yw&+y<2Fa_ldz_PA4V*)#9~_f?h8xC}MkP2MjOF@QVjQcv%{YK9v+3YXa)aGAII1wcZ}p)JQKQmd~n z5I`M9L@7Lbz6b&SD7o8{t%zX6lmx4e?r-e*bEkgD5fV$!qS}Sa)9#6v=1yzLG@Hp@ z7fyRMFEj4l=#nVbpy7qAPTb_8&N|nA-^^d1BtRoxTsQl(?a;Tg8TOL7s6H)*TchQh zt#d3wrHc6&gw8^8r?qc`w*pFgQT>~z?VWm6$=gFDaIb+{WD;u{WbhRA$*!-O&TMx3 zzqnI>ep`aLbz;FEc`_St`F`5sTBX^RXe8@+Bv(;A9exnp1zMV@zWq7!32$|RSG3dj z54^L5)kMfUa2QU;{ddvq@S#RLQCrd`PnvwSZ zS90r%ofto9l=7Z%r-6CT!&fP$X}{FDti1m2pU&{`(zA=CJwb9ddSijPn%8+o;uQ^w z8Li3V`>ofChb@anGWGIwHCgCgdv>E`JWpkl?mD*Pi{nuKbVs=1 z^#@qnm-zrQ6sD1Jn&JcONchwPgiNc3vKPB3q=QI zm7x8~yp_I~)v3EI=2q71*P$CQ!@240WC+I_{JN!f?j$v@G0Vnzh!JFdJSNoqD0avc z(TNqKnF)n{h0X9hdjN(CPvBGcckpB5qr{4t4cu2%)k*t@fSmnQUdt#6(bc|+TaT4H(eD!chAwX^q@+K@NC$$vW*5eWk;(6tQM;-$ zpSJPEcjT;enfQ;kGGU(3gR$-EtE&!I7aOo)QV>>=+C%$05*o#FaWJ+Mi|OCn>!Rj~ z@Km>qKF{h6{rqRyuoqha^I9WKe!ohzR^k2~qdHaVDV8_lxkm1D#!RYm(MfoQyb{T9 ztEsE&Lrku-a$@8a2o4NeD5q@RX8ti;9Pad_APASl4KvOPE7GslhlzVn(XJ@DUX71jbrklMK z6;^QO)|ZBa?TOs9?V;O-mAE52>9^0%H81w&r&*tNvz?8HtAaM^|fC-e%pg&5}VSQ8#TdVUyRa?&eEBFzr>F zT~6DpR1r_$*Or2Z&DAT*Ocz46{CYntJc!Urpz81RNGlMKzvI`7F{fL@g#11wOk2hW zI1L?eiKk5plGbRa4L)K&IgvGMU#3JqI#8U^GP+Bbd zcbXSvq^^8Eyxn^f$Y`}kA3@x}b42kJ*849F>7G8#w$JEdDo>lY|0Jt!OYO*4iFQBH}Bg!nlphL z<OCI+MO~S7su4W<((MIunnYw$yuXjj)*xpVUR|^+sD8MYXL5!8sZ^nYzol>SZ_Bu9QD*ARcSjZB$cMtcL0n9%BnO+D z=1{~^c`DG|HE{Ot8Vu@Vx~_Ac8U67|VngCh>C+OMcsK;&`in#wYK?d}yCMRkA(HRH zqe(B&4!cM6H~a=jI*@MVylsg?3>>QdCSEmoLYFUA{HS`&MSqT?7OYm-jd=~z;)z)E z>PvEj%W?#}+?pm*Cj|2Osq%?KfKhzMbyR!&oNmcpH4y_zxSu`sUMhmuCB9U&&TD|+ zUvIbYxQV0|Ff&)kZt5EG0KYXt$&WIod>V=)3Nk$XL$~%iUEKDC?gzt0XrQLjpeQxFPf?nf`CnY922bw2~ZXoG8*c(JJ$0 z7aXRhN*!Yv7ia$LEOqRWK^du`YxCYW@s@$2H)E9IcxT`Byu!By#34w#`iv9rbG(oI zWQ1e}2k=a1GnHtvF|O%@VNCW+YAiC9ees>05!G;T_szBl+GSC@c2#w>{)~&^v^`j5 zmE{pH=Mj<`HM0jd)Y^Kz2LQGnrBnhS?$mHS#^lK0Fq7aEFzom6!(#1)#g zctv1+4Re1MpJa?Px*+6BmO9U`eO&xfR*v{sZASv|lG+!Lv|Q1`xSepjpt%(){A?Y; z?D9#}|4$H*S(mc7PlFUWO1OMYa}Fnq{j@hu8kxD=_aswIp5GXqI8j63;~I{ zmvk@R6$ZSe7a4kGd@@uRK@B>L*~N`^)hOui(!Qm zZRuo+YvIfRH5B63)3MWW^?l7N;y^s2L5g5vq6+nD2m@X{n;|9Rnrg=2J%0bi0sxS~ z%g1UMfw9kFu=#Sl@-l~!nC!@9*^!SFMI^?DMD%Wb5C<;&Rc z=~w}|?ahyoa8;ZcyV?v6YT{L*S&QojvV{c1pV*g0xi34yfKS_IhM`KGA%OY$c2c3C zYb`v~voh3|(kh1@Gh5JA$7p?xzOiezWv4l=YAS)uIXr*Wb(Y2X6S9YpUWmB@K+0eA z1__Yz^w9IFiE{a@b8Fke$lH+l^rq=M`AL%*T}O09X1&@Ag5fX6z4`yeKjq_Qg~^?@+#QibvgRO=ZsIW+$p{0}aIlSkbWz1ajFu zheb>k$po9*JW=9(J;(KtrUustm@TXxqn1wn=)QsYveoQCXSjL|JhW=@)xnKXeVdm$7aSe8 zMyqR2))=RHSG(&e)JR@Z4G$CL#TA!Xdo(3KHddbv`&I;$@ENlbKaB|B;|Ad%@>`Yk znI_!qB)%KQ)Ghn_=f{)A17FE==z`9mnrU~imo249E0f+nwluIK{{0t5ry@yz8M$bz zN3}b$ch*zz>HN7~dzDX*zD8pXh&8TOGLlprmv8R=^3{iVJsSEwtW_@uBXRaelF#!x z9m$pZp*!NL8UuS#4x2!yZPxu*hq@lKztfy!*PfyPhzh3-_&*oQy>`IrU7F{UWme~x z30tkOTUpo=Eg@`ZWlihglU)M?S*0{d- zUY>XHa1A_Xz!H#bX6n@HmWcnco74Bhwg9%hxu1XnHAAIFDyle}OhG3b&L#VwcqSaj zI~jF46rok`%!nmDtRi!wSba4`6KOk`-mCG< zEarIOkvU9ScNW}tmM@P_p~(Ga{<}|B)59a$q~U2XrF(bcXf1R^Wt}~zs-*QDu3w>& zW@6KJHFUf)803cdlC~on3pOxwG(gl#hehBb^&6`66pDG`sehrXPs7cWU^7#faAHg=LzJVaAvam1zr9D7za3pc*dgC zEPOWGEVy`@^zZLoq3~Mn_4=}2J|n4tZ|N+VejK(tZv4E9`b23u@3Y zF}<-)tBjW`(IW^%k;}2b!sok=Op=EUZ<~LuyvGUw-L6rSJrx*r-%qXI(=Rz`ldt8X z1~k!Htg#M?y7mqmYrryI1ZX^CrUww3vE&=a_5AbY-Zmj@URHL&Cf~|;if7&Y@vh^s zvsaGKWe@1wk~}v79~||3Y-%Kg?OF0`vC?tfdd#L|AY<)#Y?QEKl_Zpc&XuUQ1TI!nwT;Y{(`|K<6(M6 zhNKE<2cdkx{#2TC(dt65KTes&BuJNU^6cgu8qN-!X^eni88*$!7IpC+N2dBRnkW0p zRqit6(uaBbDCz`ZrK0!@#(j@<{W;jY;>PNKkiLgSF_Fj_w<%-d<=%|KoyPxC`*%Yq zH4|2rt5c{iUum$71PiVrJ+nd4Hdu~I+VavsV=67vrH6mF;WezCsZ%_eUZuA4p*&jV zJOf{Mx_6f+&-8saLW{SQjREr<4J@_Cr$$9qy&E%GFmyy~0UtBJ=M!Q6tK%0iSikPg z+5q`UPVMW~8k|DmE$c>^n22&$MX&-1>`MH!*+ckB8dNE(8tTOp0!NXZujROac?`XB zwj_O&QFSoDAwgM)*1&z_Btq#zcfzmxN^_0Q2oitkfyR}%UM7C0u3wF8tKu)U9xc|hq; zP+5f^Oaq9kzqGZ#@oei6#+e_lZ01(ReQX#YiuQC!0+jQ-Wy3}UN}UKM$n^Q4bQs3f zbt6~)7fKMQe*WJuzLN0r5e26q4?7SjxTxL}w>$hv{@$N`bSCmR4K7uK zoMq(2o&E;|QaIRS))#ZUxO5P3pax?A!@k-Fm5>0HP-=gA@%r4(mhl)T3dao*L&xp5 z31u}YlhhyW_(koyBmHNgGM^)2{D#_*UE)Gb00{+&D-P zumzI)yUh~5A3Aa*uFpWvS3pu4Z4X1V`Px0|@TGouMSgxFHnsNelQckUwRWsCj8U$) zl)oe|Pc2ukGq;P5WN4J$hxE@?G)@5eyPW`WNxvDBq)zutIRw?)p4RnAS?MBP8(E-KM!Nx(sQgJ~OwCT%VWv=A^#B(WzW@9 zD>Zu9)6?S;A9PPAhX3kmR*+h%1_uYLVRSInboWfBX(NnQvv+gT*3i&k4OMoD52m%& zjsZ_!seC%4)};8~z#KDoXmgsMcc|)Wyucc}_Ja%_GjT9@oW7auar?UzDZw z2)OX-uJXl;7abfNE-;yesc|!@WnU{PEYyJd{r&(%dExCLv7G-%3&v{ZoO#_Mpz25{ zIG=2st;WZsX4CVz(s9_q&h7z~O3f=M(7SgpR76DN?DXb;-MW-T&^ln*!25M>Ao*0H zVB@{lCz*Z@5eRu$tO4j4-Zst6&8>q#V3a>Zi+06KE*tWfd;}Tj-y;?SG!H1-D3-P? zNSIi|Wbp%m3ADMd4Ui-aG)p}fCx;-K`gdcwtd#NeOQ?o!3%5~HxM}a&s9z&?z-stIA;t7rT>;d)>HJ`+LNx!5q z*Rj)dI16fNB=qrKzI^@y_s!F&wV`KF4^3{ylqrhnuXHmK_@Ftz}h$q_N@2mS3 z=x2mH`p&u5KQCUX9-OXe8FVJF6Y=kvbFI$Fw_s?zJ;Lcfw{-O9^vnMHp`Iq2H~imm z9ArG)2>-by$%wPOAKQJ0_vW}w+TV|bIYqMj!EGxCHax;kox!^Q-tXLeSPwEfB5xs8 zECx0}FP3}Ahfe2`B3>&<%zd}QBjX?He^dopG(-iYq@ZJD-|M1uu#3-^bv zCj6OFdb;r2s?JGV`;YHNFTd;6khVkiCwK;#XMEwXZqwiu<#*(dfGoJU4Q(^=E>ACNC zx%2M*m)y%4E02b7S$TAP?hC>CAm@dGbQOeanNl zeue#vhqw=Ol`;C)8A<7>e7l~Wd1=&g9aJ4lXaogt`SO_Ee~ONGXYSzaSmdHT26jL)t*sNSSB z5x=QoO#A5C>#ur2*!RH+Z0s|eg2E}rd8 z5bF`k%$A3ylLQKSZq$JOf{l#dTnIJABf%zr4(EUQ7ODP}Nj4rM4-5$J{#2h5E~LAj zV)=+R>}EdjAis=cul+&8CMv}-WXb@AHMk+5U{?RJ^-(Q#O~TUb-L-FeKS6}3UbqE>6LAh6S~8}QUVX6A3BMKK-!6cuF_z~*po)zvI+H$@ zA(-1M0)%zzf>79jMGd?_C(MM|Y@f?~GrQZ#(WW5}!Fn5}aIiip4|1N&B!*?)Cqe9$ zL#nE-SbLeS`87qJ3EmVEioOS~&D1~w35P3nLm4v7SmfdC47wpk`k3vwf6wra?O@SZ z(9d~u%dxWM`TIV5Mi|_*ypJ(5Z;t;=EAB!OMN1I!R;v{7))2y5Oj&R_LtBMa6S{f%C}Gz`|2)9Xo6v50V_7s(m`;q-?ih5};NLsMe+Zg18} zW|Ye0$9`ZypOAB@mP!#OeZYCtkqOor-R+H30g&0SxJ>&u+xQy4cl|e^oQH>puoB{G z!qw#`>3-~KN(Ju#F!Z$l?(3aQLNn=LRAD-p%Yk6Krkjwp{htumd!tPy_5 zteCkH5A4~*+HSQ07x(Z&w&(>?JqW$I>hV(myKl7Utff#`l9g=dX)Y|XVe@>0Eh#H4 z>sX}0*vNtUp(Zm@a)me-7Y++=YWb?Ppcx-r;iKcVln^f=zW#!!Z65`zZRmj^jcew93iQ3nWHBf3I+!eUCMF@5$fV`Btoz4M;~k zKPiovDjWgZINgQ*ELrdAX%B?rfi22m__8sb%}R&8?7pucr}GPEud5}BgIlJYCsPbW zx~pZzh3m)cMsZbQZeHk@A)|t;Yb!}CR(csF#A`aB@zh+FP+N+g<-zaC+y=AAoN8A? z2r^Y5$3X;As6WE3TVxR1i8~&KZK`OF5#9T5TrwU(!7OwWt&gJ=?=*#v3Z#|Z>j?@I zQG+j6hMs(m7_{0d)S}r1y>5OX=+|^Y+0C`&;z5+JaAqt1Z|dNQ-qBz4rVTM`2#T6A zcbjFqZG{@*ZkvitkBs3%%9j$%NVMGX6@UU9qQ8C_PWH8i&>B;9@nX}%Q>rs<<2Z81 zCeX`eSp0v_NE!5Tcj*?ztM2z$H6w`_W(X6L-Pd4Vh8DGTtP0X!kCJU3g`$L_y955i zN0MT5y_(C8rl5>ATwpRi8^z))-rlg8JhyR%zhygsWcK8=&1*g`c`HTyr*sDo{% z3UvCN6F~v%kJ(JUD6MyjZLTkI5bTH~n+V!%{K>IA9andp2=&+y|A7q-E1kAGymyrZ z!_ND-H%xXUsXTZ2b4hgf${A-u{T!HhRjC@_c67egd1YP#f}Rb!x1<*I!5#5;py=*H z#@F>m{Jn#5lb`?pqF)dH;}wVWc)tHU41>pw{wL`$m_u&#|4aGgYL4PFI|U#&J6k!P zg>Tbh{<08<>t`s(xQ?ZL-FV$?nbYq&zZkZ6VZHO?zsZ02$F19)_A*c02)v3)0t3&e zcRM6Feo4b-yZ^9Ay|twNqhNjL|GH=#>ZnRMZ}yideaQ@<1lRK4kPuDZTag9%t7h%p z%VBx$X@2;tIfXT#V%IeHH#9|J`bUn&TL@#;onwIMrs+%vy-UL4sX8ZF5mVlUlS445# z5>IiLlagSFB*wP(tP8l|X~Y3~QH;KVj1u@>?_3AFqUr}k8$)Zr%37#p!kBnEQfuFp zWn9v>+47pFzwp@q8SaDdawIXWFE9xClfzu+d!t?D(|%nwyRD%{pF?sWbT*JsW5IW> zf;`iCTp4Lq8qoPdgM<-9`Q9T(?2D^X9|?G#7Y>Zg>E{NQY*RRvtuj|X4(IFOblmPW03ijSnvnz{wdia=Tpus1 z+8-k4yfRv9bGA~s6}rUpYkBDNBz*^iO6{ZCm+orlJVTY5Zjq-`dqya$stEXi+84Ww z{79hH-LmB>nnVl+w3T9f(oQV3&?d8I0UXFbT(!kXoH-oaq<51oB|U$*Y4q}JoHdZj zbaUf<7$G?@7#rK)sIw%rb14Kw66~j+XY0keZ0@@BKC#~~yv{IcdWJ0dI8W1Lq(;Ce zwF@o><)s!u`C86>OtO*e6~i4=)(ppQVmf_20r=_I>G`maQ=+4 zeZa881i5lfqbnG9M~+(f!TmRATJP#yOCdnHc*w_b5x@8bsvAymO3m?Mzh5@6?WCl6 z*sza-(RR`RsJZzJTJ2fx?{plyH?YJicM7S1wOsGk5?^e}wc5$M&nBK2y0bbY_wHpF z=|cx-OO*V0JfvNq!lK}ilf$(Uj|=AxUcB%5qo^%e+Vk>jgR*z;7`_^+2PuV?8-uiG z-h@q!61TZN)K&EBK6xcmDm%=G28UB7J!y-z5?n*>2al2ixcjFdA`_?zHB@3upbXfU zcPccbGeq=S_O5jhk9@HYTduIJw>sz}y#9Ahu_z~@r?fR^fD?>KZ4anFdYt8?!;cs> zY!iVW-&!t2gGhE~obX{HtE+qd?=z{V-n+YUqI`~fEyMzPRJp*sryX zgT7qM$4D=qM+=5G0=Bxdq*iCOIX{?Uy%;7+@1{OZ1W;&1j+vJWFoaklB>(4E^AxPR ziL2#w0YXfDI?r;TP@p;2uSOB})x_P=@8aFMI2r6`#*^RTz6~ zJkSrC(yy}qtZ=XGLE?^*107cj$T7*XoDTiAAX3(EBPA4npKB=Vi~fB8WMSK{Tx9ja zr&vzmqgt+p^8&Itv5N4cyjyQ&3EQ}6wjKWCR|S^`^*^nM*9v42){>83q~z}q9|R{J zJOWgkQE8_x4eeF;bha|H3H+mOD7LRXwxb=`1f&+-yo#mR0T$qxyGxHl5UjGr)Ez^j zX@zxzNvFkyyJcn$9WUbVB<*iC(ny7XgJSwdshlo!{R!l!xKyIMm5XEJ6sNL$$tQY_ zK?-XobqzV$8UkG#$1SKMTwjK{9>ThP_>@W9GfvR8y~+-9^uwPSD)^{o@D&6t8TEDIfYYf)VOTlU{QRT;nls1W|_G zZiLk<Pd3`U@mk#va~FM1Bg}IcV7Swwn`p69g=rG>y6NuGu1UXqjxInl5JQ~4+e~LrGR_pX%e#kp%9Yh1 z0B7P63>%`*V>5~``TB86kRD)&7MKMFH{X68T`ShdfW&9CeRQ}jy&izxu=BrTaq0TK+o|Afn!*$0n<3mR$Qu z$PNKSxWWq1LTaY?DAzKF5gwC{ms>Lqvqnw_dA1Dm6itRkKLKg$MG5#^pr&!PM9N&U z$-K}`QaUFqU3C;zYUp&HB_9u`U3%HS1v5VO$RQ=KAQ^5qVgcQdo^{T{5+ar%m)nj$ zZ_~E7X>Vxu^?b+DmoM9mOslEn(;b}Pg%;uS*t$#*TK*KVT-tR@?wcxba(@*GhHyO2 zKGPdLEbqksQtdTn?xZU_&!lRTol}T@UyyL21yOgdM+N6)a!SF~#bA2otnbTY85_LQ zO6-nth+PPJNlhj3Ly;kH+5E&Rk>+M-6|nbsEVwbs73eYcChTexV4xon+V>+ArSxE= zF$tqG80-4RA+t}`(5A3YWM#t|p6n{TGL)k3(tRTs{%lulLF8vR(uzWB3o2Y$z*(Iu zf6DX35N*M-k9K-t)gy-13vIvq#;g1Oolb4_=s5rm-t{E7Mk-kub_g;G72yr`Z3~W3 z^gn2NE!x>n7~S)t8mJukW-M4%m<}#1DAlbMA3=PAmB+r$B`JhSezpxpl5()tRags` z_rGK}Mwme8(nu~G3>YYkm&~_Q>rCb?@Nz_1c6?v}bVo#$MFx6k*{UckOEBDDoFJ;W z%kj*QlwJdp9z5PiZ}*-x!qT~pX&4`13+Ch!~ltDR%#wzJV!kRIa1zN z$9G{4-s5Z@>cpkBto@Q|C!P98arFt5M@xtHaqRls!RE7+XgNuRvS0pA%6Iw48c55~cR z!&25c+85c>cFq+cYVd2bA$$X@X1JoJ1D_0dg^O@j*iC?(Yw4nlmhb+BZEMC_r_8DU z6Cu3ee#QFpXO5R(eAjwS8>c_zi9EixVc9D9Dhn`&80Haz8+by}2%6)-RS&lIyvBUN z?3JYrb2iQFy1$D)I#bejFPupwtC^<0s)EeNO2AtJQ3R{!?)tlK@Rt29JDXJjwQlF? zmeX~h;JNyOL7os10>)=!3K9dVOA&FIiVVt_$lFi`p-O=LBEFDa#~ySd=5&NpG6?<+ z!*Rnge%0IP?Q&5F@{_@X4b1xdjShoNvu^O-UZJWKl^OJMQBKgc&aRPN?%NT&(L)fR zI2lnEnN+p1AF8$=LU`ZV`3a5(KQx>$$)GrADpZI4CLw=O*Lq)xavp?~D(2>&d)_}n zEYA6%9sp-1xUqajZ1k?D-$eJIzmh;1-s@MFkUdA;!_;mUxPBiuhO$EpYsQ%ysB+3D zYBrf{6pLsj@D0@k>XrgoO8+ETz?0jZr)4ou360^l#BhJ`_+jW1!e!)I_>OgJ3KfQS zv+=egdw53IPATNlTiQq1%dEda4km1?JAJNTUDvw|rbYHtzEW~IVW_EUFH+M5$W(IQ zNJ&649@ZHXH^zDM!ey;XpG8g_VVNUBj6X1-r?QaM=|PjcAX?%5zCVx5MCO>2kaGlf zqGZ|1AXTY)GZp|N4nMNBw;-{F)fa>^Yi}63-8gU#Fzy=@mCjfCarZL_wzjIo;Rk>F z{IL9i$~Iw4O*8%sS+7qaEP8j&_7+S*7?)AqpaT77Q*zZ_r#VFvN)(2lv5B)I5Fw&N z3!!%wMlkDv#HufgJ+G{b9Yfc^5PPuWL^M@ER?|GLYKPV%ot`Q5_*$5ZtYfLi2%O(1?JrEFd(t#gCzLQP_-P&v6IIIdmDcebXo?V!%Xc%9A z9P;c-#}IiJ9-pNUo;^vzrcH@;<@iqtoV2J+j90SPF@Cw`zlgvL&CFOX#qQ({i$m2u zRl7pB9%A`lM~^`b%Bo0_oqv+7P_G<@POTULA{6msVp&br-hRtd)pq4$xuZ97J-9Oo z9{UMCb+byF{g5mqirm_rRp(O^;#=5{XQR>@rbRENnJaBBaj8N_5)$kJL$pAQ?X zqs^q_^$ExLM^j1f9`)lsoV&JH+3Z8x&Usjl91)I|_p#2b=8xzHUvbcZ7M`S#>f314 z#EC4<>jk1bxk_oNa*6%mfNLDH^80n6ScCr1a2?sV|=X!(GXCa!uk+!bDMFIXvVmdAS*9lSBy zSWX#^q)+W%mN+^ec| z$16E&jNWgo65-yCtNdK&h;>w6{3_P`;o*DC{q1;rs84*x?t?;pf3Gj_-2L^RuBiJg z^@;v}f{&=1+H`gyY8%wba|RTux={2|5zi)ajH3cmDo8;%ZdhnwazDrV;yT6%bJ%>V;}=^n(+B&CXHA5k1Q$d_M(1_qez>nA zga03q12NPkbU6Q5(_W3ai?F=JboJ8+jJUeo`VybN$|!ueU@h-J&Xwpz@WZF29XxN$ z2KOrTavyGt@vXDe%qi#4x1b$}4E5jZ4zs1)1LW=5Aj{SQY3q)E)|-LPBikRcaJ<_h zRM=BHYaa-*q-gb|LFD_SG?e{udY@4_7jdrtU-w;9%<`rxx@8*x1ka4K)FFw<8lLx% zD;aQnC?yy{Z8C6(rH{2KE7E>zDX7^L-}mAmv?<2ZyvM-oVU7YP0PXP1HCJbV%5rJo zUUC!gwhi0Eb6W)Ko1J8BD;FNln9-49t%IQRrVQX*I32Sy_x?nv6*9|buwLht1(&BZ zo0D$^q+3ihNkECuKxNPiLUf6Dznb+!O3hKVG1+X+P|0P;{|*w`>_^gel}=_u4ElqMu6XM77&WRrRgu1G{S7>AdkLXsLkx#<@eU$L!-F;lD#RXJsGv%ttHs z@F(Mx&su|!1QEE)l1z=w6Ewhp z3#W;cvHGf-PxU3XeXk`Kuhy}yFyU3|B*(AxJKYrQX&h|aRVL(T2e2gsinqnZXS0@} zw#?)r)|0sR&DGs6ky34aRxW z4sEnfd5(46l7(lA;KKUrT<>Ka=UIy9llvXD+?&XEWH3E0u_Ogli_6utbh-Ao*v}N# zO1+I4l{i}Iv&fvc4GFd-r!?g*<(6DLM%@wqjh+_4Shr zP>Os=Nvs{$UqJ@aCM0AvQI<{H;WWQlr-m2**7_~;X;L=0qtV8Y%WSuG!v7ZjvHds7 zFOq^wRIj72;;$)*)-d`e4qf2Ykxn>NXl!bHI4{J9dvSHM*`>pA_lvdWn|5G;+y`x2IGF zEy`X*79K44{vM~iyj~^9UrQ8N;I1mCzq{8*n!`E2*6X`=WR=d+QX*Zx1lH6Q2996oPL6jl{>b>k$j|DoLt)-GZgQN;RHip3P2PB%5)E| ze{^ocy1m@H?+bq`+St8A%Ct{hovfv6p#69F#Nt~n5dJB=1ZH=70fi%`p2C}{*FP=r z$ev^1{)l6wX&3$Fgvw?8VNZj&Z6ky3bR6ZmP5 zzO?D}_orw(L3ZrLe$CP&=_u*fXebKvL3^tno+yGtznwe?;cma)4i7PIjyA?xM@EN& zTn@3%ZdCH$pv-Jz=`-Qe;e|~UO4)O{<^2+cYTH`kdh?x9@N|lx9toG^4nrM!^=sh` zSBGh}GC~~+;(*f4v$`=&O^|^>-pEFwS@5!2{Y>z*go4#^O(DM|X@#5S^3eIm3A}1O zYkFzPx9n~|7n5r7@0^^QtajCc@)gtBkF(u|EBOmsi@avE3cKzh$^^u(o&6#JxX5du zin2qhKVGaaM4=ZZ+A`|ZogBV=I&4lZ){9QRxgZs$8Xv1zAS7Zpv%i3?3+!cd{}>&v z?v1wV%8{X-u;j%Z*b*lzVi^vbJXSm?0H{`Y@?ig%|$_8|A&r@4G2e7 z=&-x=ho6s>Ft;kcM6}e&#C?L__1OyU?|h2?Gd|BBX+$qNmV~;MaS*O26nwk$vBp+!h#~o%hI#XlmmfO1H$H$>3_|`qx#1$^GHZqr_2PJskqlr)pYU){_$u%>j(d89t#;U zRg-FeHH$py)x#YRB~9zyfQY<;%R21d=Aaja9pYLj_3$Ul5fYv+5jHuNb6Ny7uBGsU zl>7e`W^VlvWSdwcezU~}Qm1uMZua)+>nz?S9OhjJG|L2Hz0MNbs4 zD-B_P0LCK;FU_vX*MJzKGKO}1%s5qKrnv0iu;pR2_e-V$Uz&l3hg$h~*xj~h{May8 zWL{(uMX~MScC%0}_G}R0Y)Ip;?BJHIz=Z&Z9xa&&4v((;Z?Mb8o3 zFeKZynBT=8YpVtvcjUlME*A4kaifjYhzNaFwZWswJfqsI(4_{EskENyvPZ@Q*Ry8g zyOxP8TPC?DqT~(#&hGJE1Gx@%77(Dd-$Yc+T84*7ae*uC54M(elY7F*XQ8g(-Ctla z^$?6d2X^K#maGgQxY2i^av{IxFg?rO@00)Pdi_X+hl5BgH5h?T*X5uWVYc`1M+cIw1-}S0FiULbH zTO)C>vdEsGH7;f@vPu(3frlY*{`e+S@0S?O<9y~4ACh$wc$ogjO+$-Y)6b9a02)be ztIUirXCsHrOxc|^$^8MTP^CZ0y{MhpgX<4M_Xkn&s30maOJC>mXYBIQnFL_^Satud z5jAG)_6VxO-)B^es5ALMaL?^Xo1VrZS2R)ORY`%WmUZm2c*Fm!lADl;2LELSLfh1N z6{jX;CKI^0CQ(RK3jF zYsKsCqVR*}J|768sP*@@F|~3%22E z4bOh{n2>l|-QQYRY2v+>e&v0g_UMvI(j)~-JE%udR-w=th;C<(hE{FGz9!hel!;hBIs@^)i)(Gfv{Qe3lW%+f!<~ZM~N=6fikSL%QLcsO!!tWoC zTQMyiIli(xRqOd{q~Kew|JF>yGZ8)+SHkyi&2KokB%c;(Jmo$P853mF8VItc7WC^T z?7^ZF!X{Zo1Z0GgR~vFJR)1vX9siuJ4|Rn;imAU?E=A)6r<>Le&1*ZxB$eu(eW)+Jv|&y8koIxQb9*IINZ?!T1Gv{^ z)+JE0z42rdR7A}e|1fA~&vG1AO(qZM{A(>)x4~>f0QJ)KZ!W#5TeFS14#!;erC^*T^g-|5MN~?uG714)xQ1opXN4zAtWN{;Ak3 zqnKCYWxr}Z7Q4;)`!wVcN81Iy*<3A-y|<9xM!JRNmoS&Eo#N5bUH&BJ8BjU*#MF|b zC~oM$5)mnx8s!QJ^SF{!`kEm&g&W~)N5eEHMK z64prHxn%)xQD19)vt7gd&kNM(TAwf?LSZg^J)GekWN>rmDEwb|pJ7ivn|;iMSlVj| zla%L>j=F~_wJ_uwI-ix=t{8daxYhmRj;s$)Iu`kcsI>;aMlPJwDy@E{p;q}VV%B@E ziC5%J7%`4fN8JDHDs+$YTJoS4#Kkt`k)%dwey1q0YpWSTesZQ}mf8Qr9~0Dk{1mxh z6u{+e`qMEGxpB!xTWF%jJ*i9sYA4Rd#@ZK~L4J^rRZ54QnBu2~Jb&>1>X~LWrnr?h zF{{y-F0l^PeN%+~Yro$9aB44*wz~{i?tMv)GhBwvR?xt+cA*tE{i3$(P)`egQ`q+; z<$8hH9ME!4yG=@Po_*c4R$aOJdTkm!c)pz1Cjd5CJy!J0Osmd&Z|>VrD>js{6SIX5 z>1E6JpV@K9O1+}a4^*6YGx4% z+GbF93_n(%%D9#ke45$l*w=RdmL8(Fd_s+n~nq@S6YHNJQIi|l~K%Fdp+Y0)d& zhuUUo$8g7EI+DK!t`~g&%{<`Ydp(EiT=syUdgBjm_Ln7G%&(Z0xZlY>(LB0yd1OCD z*Zo%O<5Sch6)9gNvWAR{rS7oExO?&^ADg|0`mCDW>9t-P!k-}b&sTVEz^7?PJw3Z7 z)h-+f)NOuW>?9%Gtxt(bjm=-3$tC0#_Ug&s1@u{_6gmn(t+dU*E@dG;R7V=|Cy&$f z!gF~uJ@wr!oQ$W}l7Pi3l}-!rv3KSY?>B2d8HUU5*W%T|LAW0VwtQoxy?%p{?{~9i z3I_JnD-!XTQplzlTMxiyg*M$;U(x8?_o|UbfW~Zn%MYN<1i~RD+Y8Vlsu1T%7_i3e z0%gA^McflpZf3iqVIXoRLNJ_CX^^R0qQF$^F*elO17e~T6rs9vt zjRA%y1aTyGGXPa3VC7Zq{yL^}hFw6FXTau~;y7jG>{0Bn&z|qw@f?Gbl1grG-j5z* zi@NlV`+E=WxRt2#!|zU)J|wZG-x8qGe2yInp1$>?;{Li0NUx=h8EJV>q7BYK<7h(o z@h7jU4zUj_PF`9&<1)io*x!WJzt%kcqGeNXO6`Ts+uOhWtt4%adv_EJI7&@9_P6Y~ zm6X6a>RvbI0*Tqi18mMERcDdrP6qb|2^@D>{FDBHowbZ|eFuM^f|I;!c{fXn@5MXE z%T6UEkQMC2FBAqy?*vd*N$WHp`y|&i4sZ1;{joJeHEVf%WNp^KNXgR{1F2L7jy&2d z6h!+iCz$zTYrg@JS4YU3R=c#Vs1d%DTX%vU6Eker48aXkS{29|=e=wVrd)MRAUArP+M&EpLbwWa7ZF+&e8$<|)((t_dZ3PQdhSRM&^e6|Mr+Cp{W-3^#* zx)ZlkxhymvLeKQsX3H!>2w}^w`fJlzruf-J$FH0!Ne)wz2_6M^+HI4*BnViEpS;x} zT$Qr%fE384oUh3h={t+1y=2s`cJz8wyxacIs)z&&4lrnQJrJ{3TKmPKA9S@u&;96U zy0A;fT~Rgxf53k=|=wJui&NoB~@L;CA3>vFgmyF95biChN&NASdbhV&yy+hf~%^@yX=6_8N2tC{eh8?-&M2e0C7p$ zT2BOn{Eeo3vG`{4)*qa~o&W8(&_UfBF`PJH099|Al$3GaTQ4``T+IYVz+6C8Z$CU; z*r|=M{&mN(?X6R1qQkvcAEa;3`O65t!f2z9Enic-B3-dn!2q=9f~>B>yP*uCc{K-H z>BLen+8E>mitDnuKQ(b7404aSgd8q36wDAgIP6!zuAoa8;WuBLbFMUzUupMOjG#3^ zOMicDtn!TfBQ%4+a^1FkFJdULXA^S?r6!%+*U3vZZ?X*T=8~|!8TTOPLDVA0_sTN| zo(9i;x5c^+d~UJxy>*B0*>%qM%ZZ4W__+kc*O&7@B9cT5n|*&Vb8+x14t~)%n|#zf zgAJ#vu?u&!B#t9&mWlhdiT9|a{h`>xcy8;UFQ+miI@$~?u#sGDCkXm)4IaF`^1~_!PpQ~Fz+9>8ah2=vr?6BUP_sc?=MUT{! z5l8moYRt-kR=%{IdX%kyzg#$VM6k2AqULvh+IalRx97eiY>~jJy=UueK6Q2qc}r4)%nd{RpJASvOk+L|R{i{jlWH2tqkI)c zLsy3#y-)PZ(EBRR94OJey21H=Ev%oZErIObDrLqATR zpQ%96`1Ze$Tcg=p+qT7da_^@}vy^>Q_^ukXmo3Q5ra?Pe&T2pOQ;(E60-wUBT^Z@X zKT-46<0#b&g)-Sa1@%nu3+gKVbEpFq2NnWqu#U4bR~`ZMWQu`(MY(Z5x%NSUt6H z{F;gUXSgptOiAK=>y<9M<4O2#!k)NcIyF&Dff`w=+%ywZxE)5E4SIpTI3RgPaHT*t z{pTkD{5HI*xmi+S<#^LwbD^$f3T=;|@m+_U&hOvN32FO9^ z)f$>ysT#;NFTRnT#vm|0Lv3SzxB+lG6Y0No$nZ|(ugox5#~vAnNVMr_u5%)(fVuSm z-#jLBYI_ePV6_V)L9XXEChESa|in;yAL#MP!-bXIY;Jr;y$=EBW6O zsR=vW{xvZeT6tph+!gtAHou7K!qDz)iKM$CH_E-Hn!>hEU}bBbH9* z@90eV?`(Mq%zF#623!m04g~0>yRXC0OepIL6=}6gy3^FI!aTwmT!f(A^LczY6?NOH zib4y*7jTe$p1%8EY~yC><|(h8uBZ~+(-=}}-IQ`O zkyx#{Nm`ku>f*eEOVKl2awz^mhlOupHm-?twf3ZD!-7qqGJbwpumx8iro6OiKK3lI z2vB3tpcA!G@!1gBAIDds@FAjkCXUFfDqv6+J{`R$;W@@AS%(JidQA)(6SN0X+-nz{ z2QpPKl}=itWI5SYjH7D`!Vbw-D42dH3>!PHd%r8gQuFM)OQKalcssANfvJK@lf{#o z8Mg&gUY0Squ*Y7i1Wew0Do>lUS}(E|N4Zf2dc}FC;dTNWgs#PUuZ00{@0Qg;yUypt zF*t-$WX{8wQx51&P!#2)W5aP7clyowG^Uo1*zV!+ zXQzlir39_+PWv4Pn@R}2E_U#%X2rIE9j&dOSwU9m;YY=1)U zNfCq7ek)mD3fV^R8KQk`T>LPn;zH!5Q8IqiP6j%Reh6p5yy!f=C{ z|K1#kll}Im=Afa`{pt=qH)GJMCn995*z&vmM@Cc>*Mr{v6L9{3Fr+2eEwRgJm^_4b z-DORc0?k#NH&w!#50)Qqkx}fT@&Md zNqegYo71Ii{LWg|8)eiU+e2VDRqnIhmK2Ap@DG_ORnYGI>chAtt{c3MUV7A3vZ8-} zkJWe|);k&@M`z^5Uq_@lcavOAw)NsOyIQR)ZKs=q>X-u7G97i&dEPD2G8i$p0+y`Q zQpKiEg=XKdS98&SFP$$j^20Ix#`y;`$M5RC>8k;~0_q&9L8AjR)z!bX;r#B^c}>cM z?5*^}6l|=evN=niFnK4?74EQ-F2@6K3Z@Un{mOjreLcO{^|r<70zQ9FS^cK}b{dLu z6!d4SmS-68Wt=NEykVnF7;WHT84@sS{LnKwtlFb{D6lu)b9?5kay!{VY_LAGImkm* ztrnC({Dk>5Q`XUW6(+e(U3*=Q&@vMxS;Z)OvNTirr)}#K)*C%LwvkzSQ?>TP%Sq8; zQ=9oN$WLi~Nx6()A&k;KYBS>EqLt>ikbMsdYa~lAF#GrNaw`Ym5?Vms%W`!%pSU$E z4wQfGzsD(J@S|NHE2F_$c#KDIO$kEUV>BC`v^bkkkc)sf6OW!tTiA9hONyM6eb)D* z`N?*-NS3|M6@}+-`i@y%#nm}?Uxbv_%vE$rKFddI-|Na?lAdDJehV4pR@J9e+Jz5O zu3<;6t6YZNovuPWW$&!~1PG_)X(w%4331<9CJ4wTnx?^F`|3ZebSDJ7V2muqxBCUyT~e56L1Aufc- z&9SG;GYaATa+rf+V1NhA*u*+Hb80v0np_-X_uhQAhoPajjg#H7vS45-pxz&;Gz$wI zT+g3GPT%Gqbl>@rn8mSn*nTCG>!rJLlTKxKt**qTaro>jd02GNB#rhJk4vP`M!JFe zAn@k<)p8rS;<`2MuLga7k*v{ zC#@DTN&pYg?&ab?!Q#qcp5X0JbVql6+5)GD9b~>{(#!s3VOK4lLv@>}i3?E9bF7*~ zTCea^j^Lv%#l~?-?9NJ^yS)E`fZTiY`ONv;%TFS2Y|nx5t9T9R{Bf^d>y6VUKaaH} zGbN%`@J%;LHRE|A3L_2T8^FXZ>spVTkIouY?erSM4#$=SkL94b8$}j z-_H$-(!-B8{-_?mG2dVxq{#9^tnY-YVsJra9cZ3-I$&kc0ox2&X$g}?=j*2ZX5$Uj zThES?Wxm#?_QQmzLdVsgG|zypp2S7-_S`hH2w}2Pkdi%q_v_n%;4oC<*%rSaqHOA; zi?1yI1nGQ`#I;`p{CWkQiVx?=^f9@H9j(iEAP`hA=|c^g_#Q*^@I^Sc01}1uKdEUU zz7v+qdRAG%`c$OgWYAbT24ld}^vtVr=PcgRn z!sxVmI=NSn{zRT0iLVt`D0Z1Vn)Q_8pYSm5-9_qL?xCT=gTyW75haEk;=tg&v+W}0 z;tJJ9{UE6^a*=oL$xJ?}5JdY&b#H*11qztK+$zR(m>sy}LbVQ6@$NckxSsvPhH`R4q`4-uVbJ(MaV;6La0 zxvN7N3(sS9z;4&bJHtCuL zSxT|kp1P_Y%9F{!l67Gi;wr3$Xkt7V~jDHDZ9F=j^b<~te3^^x(@Jv{{1Dm zGw~_|qVD@mR=K3zwz}SES?PE(gpwN7y3f!!vgn7M8$npbf+c^>=~Vi5ZHVsN+MWD; zf{9&;Vz%C|t^J;G`LKD{idws=;?~nZ@-_djU&!xbb#FRbyD&Q8RUH>L*AS3O`}!yGi(@H;t_?>Mu_Zx*cr4n6U&?& z+utkvJ)w#NKsgVqc|J>Gq|r=<`1FU<2W_dk59cv555s!4YsXW%r{-r~j3l7Qvv?W- zz5d-yYS*rjRaV*D=I*zpKK8MK`U>?@?G!&Lb(LzL7(?MEnEiR#cjV%>kpLd$>oP z3f-KlWfZLM^z#HG?TZ*i;yFQe{=#F7j^Ue57SW{k_fLuSC@O8rvv(2VtFykFG;NkX zUAon@Ra+QTqJ-2>040crkAK@OM662+>~y5zp;nFCxhIpbnL~x^@my@`9@F+l^l?HX zhF-KD7snz8l&&IJIDPm2Bwu7R5(3&nAIx8^p9@={m!oJ|9)ffKQPtjFiFk|70stix zE!EimAGY2)E~>739|jRYX;6?FP+CCJ0qF*%1SOK1x(Fq(LC0CymQOW>-T*&yi1$n;K+(G!(_?aebTR=qT{JnZU zN%O>Ej%%ikYIe3x$LOf`u6CncGb%Y1y1TjcE69-+Z}-UdQDd^K=sUWvrly+_F?djY z!Geix4MXkNP3U=r(c-nGz?0l#SILCk23n{0E4{Q660Ny#iVMbBh#%Mi`k`2r&%D<{ zEMQ^40_fYf`yFnBG+ln^6^pW;LwmsKk*LRd)!pU}uu?S#g91Zbx>Y6ss{tt_z9uEH z(8<1J;6eAME7EshR#jQu#(3*>K&-^-thIJARdHjPV6FVe^jOX>_PUk<`BbT`+oP$K zZ8a$PMp@tPv8{c!R(^A!M8WQ>TNP9!Hf?TlsY438jwdGq@-B;9>5?naZ%B~t$G>dd z82-}RV9Nj?AD~C4kqiO44faoK6*pjDE_VTvcDz!Y-S3Y!LtD!(lF*C%v_tELo7smD zRo}nT_2}lK{>~^eCJ=vV7OQ1`v;6)$KJaWIe#%I{Y?=vJ`0>yBD+I?vXI1*KPk)uE z*cm!2L?Q%4Co`w&&wfb2b%;0HeJncMU*kEYiFlp%NMv`+eXo_~rXtzg`5Xn|Hs;Ds z`jr*A*>=1~d*(U|7bl7~Mj zPBWok&_WIfx1}$c?NE|Ot)xO9=Wet>>Wy~QitkYzRb~o4KmFN-4B&M^uaA%(@vxE@iM>63Bh*q?y5%>D zv;Wl{eSUTji9BCIo@<-qM*O2DPa$Zh6VS?s75;UAI8+m)HLg?j6z2)9} zZdX?OEOD3F)-B{9O})=iRCxZe%G%UB(tDqj3F`BeOuQ1(^OGonA4TH`1@b-{!^@vi^Lk*C%>9mho8J+7W)KyLuKS*x2d?B{0 z8#n9JCa)C<+RR`m|FDFY*Z%$MMJjx0Fi~7_FOp@3AwX&Adt?IMDe4wd>=W+@AT^&2 zr2Gm#-W#;r! zoQL`wVX|pdH>FQRv<4&uF8TcY6kH&C`oz%lvncH1DiloQBs~lMT4&26`Jmqkll|Gx z&#xQccw|Qw#l@2X&Y%#SL<}o)C`0QVysQz5k)J3P;cUDki^fAN0MjYZGBQq+bYD%9 zgpu6Yc@@d>aD$qa)q6P?k0I=ful*Sa;+pUzD(9yzBq!`%8I)s3NpQ+MIs4k=8el(9<}(Ej(>XZ4@m`-!; zL;k!za`)IITTkhV__7Yc$Q1!Xi31P^82Rr`$VPtfzYVYmf57e33In`w2~eZ9@?JR* zeQw<P_wt{=iBfMa9r=$+IWU-#m@&4>c$|kb$db+OV~@ zHf73g?2o5b=Q)(Gq}Am@<$CbS;=<_szJCY@fuXR=&nP|OL9l_H{hX&Q$!IUxM1BTY z%GzWF?TWUI>E?XffogdAK%Pp@<6^ZMC18LS)^ao4J>RgI=XbzJ0lYZ_3ATG{FSp^;cH>m@(Rt?FXmowev^#=?#dlL~C~!5fOh~Td-5c}-pVecM z=CRGRn|P09NEAAIs@N^@X=)~-)JbxUpoAZQ-=C~D?=$by^!P;)1?VB^IpL{-=N z4Bz?1Qwu1F0GwOQZTV4&fJn>@PS$f3dK%4$Ge7~2tV_4q;}55(xeBf%<`xLhRpmnD zI#FJ?(`uWpWv0D)Y5&V`b<1FQ43Ir&#vH6P#VhgKFP}>d$1hF4)#4P~xkG1bvo=Qt zyp)Yx`J5af#2T0@9*_am2EwMQ*I$_CC8rBKv;X?OS}OPi|4qAjAF(#}k81BtJM?NB z9Z_7Sn7+;Ljmn1XCj-Pp3hVT|o21(AzUNqwLOf}l)6rp*ElDEi0RFR?ds^-3Ehxph z>eY7b*#fLn7T-V~bF+%NeL9gb|0C?3oLI=XQXxe9$Wrh+66piG7b(T1S62d7uu8pt zy&J$fEp97$l!SYmNRkI1QPvES0j)-DKBEeJFDWaN>MZqZr4TnM9fB^|9B<%@1MUSe zt=%Xa(xg?ctAR4Peas&zqwn!{|6TDN_2Jbds0w{3&N{9oN^dS2J9x*WqHcEs6q#prMM=K5`3O%Y1@6KbpM9m z6bgS*ZP!Ak#%t>bKn}Boon^R>dFQ3MZ|2lu@W3u=Mar#`wUMVC0se%-5-&q$^+<#6Jsp3HGc7njou zpW{o=qJcQOZi zcH20gTH{V_ei@}IEs*|HdDg#q36ZAp@hQQe6N^1kQo;A44li4h?&M%u>OO(7ghF=A z&sscw$+QIkz^?W-a9(*Kul6mHK8T+Dkb+r_3S^3UB3iv&b3<@R!A!r~ofdP0KsOKZ zOvR^A6SVhf-iWzY_YX5n1_UZ_pPvXI)ZOOl&!@u=*VM(woOrGcM|0&~RMmzA)Ku7Fw>-aA9_I&a&_L+Ldhv3hcvH5aYH}n{{ccxI8YPnR87)vetTX`f2 zgZ98@l{DU55DtQcSEbyF(gK(GUb(%rM?@niMI^En1@$SXd3u?je=q9Ptiml1$T|3! zQG%CUrP8^t6ZK?2K;r6W?yYKh)@{1g18@8wRo3R#51P#q63D{-%pj~%t(lRwE$kVeRGYAe!J{nX6|4LVT zP#x~L)TBFhCu-UP(2+G!TI^EPo!_?CRUR2vo5{~`Os-;r8FGT8CfcYTyH@+N5rcT8 zlHHfIKR`s{AaU64y?ZzAie%<-)01S<-8 zUjP_6uFCwhrqid;bs&YC_t}a;yI+%=8TX6A*h|Zs=>k^63fTSMhIOx>CKI_=&xgOc zuf(SCS`ARp-929kh6S$k*4$^M(M@_N#xY0-(|Kn=O~iqRjrtY)i(Br}p@c?-+FH||-a1o#NNYD74efUT^P~^ zd_JXyL$>W(Daw2*N4@*)-tOcV(8!;mbRYlG5QhK(_gC6^p$ok-Iq1i?Fb_I*VScal z9P@ntelOoag2$}sdUt`7*I%)&a6c;huJ-jm!)$f>qu>P}Dqjzz`TOCY#D)CL_^e+R zm43nPNk&2n=2j<(5n+J401ZQ>K~o+kf+Hn=0lw0nXk7H?D_c#_K!A~}OB@!MeS87b zH2x`EPoIxI>IgUmIP2Sqbb7Hh z`e*PA;CPRIH6n+-!EJ{mj@MyM?THzJqoC2(OG&_I)vvSOHF*DfRGG)r2jEz9fXQ*o z67_Thmi+t(C|?$yD2jk7Ody0eDeF8W5%z+|h`LkSi&RcFT`loGgGkaHuISXShay7; zP0+uqfS_7G6U5RJ)D?qS;-Vg*0wSO&Ibbl2@A(WM_GL`9bn2XI;XHbE_>)E;?+0jM z$=yhx$0-ExS{gJieid^(@bgz4+IEx3hFg+B-&CB=vo6H-=lOmrfg(#xyYm&>;#du4 z-d=?u{GO?*RKm}JO6SI%C!bQdXHjsNR;na;AYRiCVQkrc%thu(0Tm7jM2x*GWQbX_$`ojOc&|Tu*Gveun&+6R&_s#G0$3~Iht@Emc`k$Z zq_iF@ql;6<;vK5sKz&lIMz8WYBbhE3><5Rr=6cP-hb#}1rrUfye~}Uji2BV(J;q=X(Fc^Kw*d7)Z-Gr zz3&2_f8tjg*j0pP?`6Dst!yWV|BIcg+A_afqa|ICX(1Y}u5PN)K-2MKClIzSQ@sAf zsbpYByAY4A2U2m#VeehlAr9VNJmlU($`-c`pc`z8GDwGJKcyHuMt!_@s~iZ4Gz=xA zY5+@mO%=dRjB*racfN=?&5Lo69{X*5q*6}diiU>Wh#@@!$|8t_pBKP_L3r>g1fW&$ zY)J+fmwp2wBwm`A2HN$BJ*eKCqC8W;idgKPdX|_jZq9BR+mY-SX3gi0LDDtP;H&#t zx0PIhfcv*w9E0TDb39pCb%}rhwWOJDQgLYie4DSiS=xQdsdt7kS}tK_!X{zQ6}s_t zIiV!UebU_{CB_{RGyvV}LMNNY2)y|aPOy-6egIUEAe1M47DMD5aGQ;sN#SK@MC&>5 zEK%R=&{*#X*?RB1ubHmhkdu94^pcJ?pc-w)2%Abh{Mg`5d#W1)y^^FhL#6#$`^WNR z+)kmV*j|>ej1x;OKY^QQ2DCC+T&Y}|bO1$$e_HyuxIj^20sA_`sV3Iz6tzSzF2}F$ zHSYd4ZU?0!Q2~ZFMw?Ta0ewBfY2-e}%8t>r+Gx^GVbkiq^2>G}8Kg{GyfF?EJ-#aGlBLWosU znf$tmgxogg4XG|u6U7w47Z^=SJdyr7m+9ki@Q8a5;RIBxu2}cKlmI=5=oeQDx-@?r@ecSd#Zs1~sEtR}-gD^^ys z)tic>96D>S5^0ov1Kj^fYOuEh#OgY?UfrT6*&#U5L^wLW;4T9Ym|ajRu1;=HUsuF=2Rv$*wLmK7cFlF(q!b5uXXK z#is<(H~Onv2SCs=uy~jtz}NM(3)DGH@uYjsdoQie)Y=;IyLi*Wj@z6dZ;8IAaF~+a zE1}~v3$rgI$q>uDLPW#0`wZ|qM~K$VL(QC#pu?91u}nEvR29xRu&C*ZrG$|{GImAb z_5JQ`XP?m5bp9mb;LQDDht>GH(2fY~rDSd!yr?iiu7G8rjCP`i& zEhO+GYd;>~=_oP6aklQz>Deu`G|q1Du^6fG?wCTs?LJTM=E=vrpyIhZ8zJ!e2_w>m z$uqC)E*^5TE>R#GRCsJoSL4J2!Q?kOoYQT$oxE`Gl)PrTOR-n{9OgGaUjgw0odUad z`7}5owo&jHCZ@p2uVp%wS1DKzvJNq6n*|q4aiK%iqibO#g2>9QL&tzxDyLp(ji#<+ z+I!^5^Klby#51cLXm6bw;b1*j*tK+%?P@T?Kzs|PG~_tM{DyiDex?2b-{ve#rG-fZcwy@gS*O05Qtgr7sE)pjLf2oOQ+&T)WnK~5n|7g?Wo zE}tVaty`q)7>hT1jUEJ^CHkf;5Kq7J!t39+2m(Bx?! zLuR2p+Q8b~bDIy$a_4!7p^R0OZ>zGF}R-8KTUdp3k7LO{=A5OS+uz|jk&!Wz_BS1G9oM@jmin9Y=RLFc` zc9%+WmIyM%&XTy&2O0U2E!pAjpnv!0Qh)NmNsB&RC&%NW9OEqFqS()XInZw)vOsrk zT6c>199b~=?yUpn@0Gb_9INLz)~Oa$n5er-=<9PFIb&N42%-|tX%D$nQMBM^{?fX! z{@NbbC8EzMkjKLpPd<_Wub)(Y#KRQTpRah?@J@AdC7j76BbesNwy498*Sf|qDxlIE zZ*($FW9cEgt6OJ1V%i22tgoA6^CxS>Fr&n?YQBul~;^ zzJXfdeO3^rR~mP;gN1HCZ`IJlw^Avm=ePI-(&Pyj_D3^-s6Hb8HQOlZ-j5y<9db>$ zgvpkeSuq!i-mUj$-vw@-sb)1HjLsD(u1lxsp-=0q6YC!PU%9l~*I*lr$*$5bVR9eL z*xC8N(u5G4_x=Du0;180XR2c)G@xlaHh}9x)(DO&*so3YA^dgR_ZoXI`{=SC;4%1p zBQ$1U%C`qqF^FSJ3V@xl4^w@^9YY1bA^$|Uc!I5u5p(Oo*1Q8ww)(Q@Mb%X|aX-0^ zs}tE3Dn{H}k^;jer2>s?zrw&4iEQ{_cB}l6enmyqG$`g+&@iAD*JZi#67oc^?Ac)2 zcl+Y1Ub$-bOe15MP^UnVojEi=d(D@N;eK+WXjKzplJbNXj!Ca_feY)#M`Z6*7BihGKoVcAe-WVb-sKqDJF%+Di0?Ajc_1g|TnkL~d(jZR@Q|`W$j6i8HQM{f@hdMdL$>&hM z#0*$H!sV^l@drO%*2#w?V9YWG1ei?(_Fij>4Wnj@%i_b+Honu=P3cB^h?Ga6<~|m< zG`@vy(~Ed%3r@qAeISO2j?0(wxTeB06^f=#{^F zIz6Zkabo`9fi_9E9B1o4sjuLS-}h$j>=lWV&or^_>V2Jpj&C%3dB2B&D`RHuGMg&Y zxYa*!+OIlg&OM&rGY~W^R_eCB!zgFz{!eQRv8 z1*!1^-ceh0Q%sRTnz}OMmLmPLp{xo~8}wBOzyg%gF#I61DY!XX+mRZ`%O&P*gElnw zKOHk2_@fiN4w}VQzkGvA`iz%=!i$Cb%bZ8;;?uu7gHMGYNDWozD^|15_J78K@Re`$ z%x9YasBhX?Zadxxi8ch_K>{y^EmivPiWN3=d}J`~ry2jbSuwpu5_B?mqvj`)#GJM- zpNiD?tVP6W+3cFIfAYzXAgaFgb9ObUfiOz3+Iz=Z+PKODo90rWeQ@jWyQMAr{6SN-3y#%rrAMa(b=R~f&HS_`O^!ZQju`+8pt zZ^Ul=(_`|VuUhMGA=sgw!H{tP*YB)7B=fwNf&==n{=!@dcurFpKx`+3u~a~_8@+7G z*hUY01cpN(Jt&r8yQX8@d!2fA4pFA^NcX}9@3p*^N+fJVwEif4IbsTR^(rte;L*5j zY7X<@BI0BIUWr#6+~b=S=NUjC5h*)h=`@Ga*1&8e*F%!&uleL4fdGXPisa=%BFDWNW)tPE6`pNMGUxkvvbOuUU4n)HedcX@4 z-V0T?IAuZAze;`DH$9MH$Ua8NBL|2iO6no;wwc&rL#+o#Kw1{QX;gQ}C^hpD;ruhz zv5j$GBKzeo)K<+Ny4Gsx7V7 zRZg_@4No2sCs$JFE@Fp~&^R2Wa7=pLK5D1oYXwN43~4nbr1UQ1(w7Q?N9De9EY3cR z;sJ$^N%%p{9$JldXF$)*toSBn=-QTQ*FDc>zeATf%4or<)vKR+dgJJ0)YhlQRom9; zK+C+Z51jUnKLSztVVXeddCYCR9=(_bwA_Ti$452zKOY=AItCLJLMmy932cAr=3)iL0kLC<0u@Og>$?_b-Ojw5BJUy4kD|4=tJ>uXAzK@;57OhzRO3nUmWg>C} z|1;giq3pj210vPqHDcgN7(hfEFL_p?9oE$XN&{=U?JC>$0DMw9FIeS-Z09rY{e+ih zT(w%!IIs8qIq=W`PjpSA=zIcnLj{x}ArBc_7&W@;p^^u$AL}6(W=h>-}{i0~2+xua=D5xyl>0Cuy>h(NwZa9B(bUCVu=%R8Vl)d2glu@ymR9 zig+JaM)hm)lRa+f_qqkixnX_w^3S(aCImba&h3KL#s_M(Iq817^JSMJ;g(cV40LuH z@mM=+8=js(s=}v8`rB~ z_payUO2=}*b?ut@8$L8N_t8-%=Xs*i+A|3JJZ*{X)JUwQ>sTWogf&(}kn8&V8I)lA z>5ePP(y%T|V`qf~d{-H?TJ>~|^*#xne{i%L-Ia~ypZY-*87buA^}8QHm!RIyLOK_NJr{kcb%(! zNg;#iHhp#h@f6}jk~W~SLnB})K2?74Rvd8rDPO)xWd+scnjLL;ynh1fB0iQp%9$;y z%QbGH@##w%s5eO3&CBHKHi#qp0C9!{pg?CYX=-k|>O5WH3sqgG66@mD2Dzt$=Ud$uxF>X-A>TPD%i^bh(hLSjEi@3kl zskJr|5iMaZ0f`V*v#&kBst;jzwkz3218nBo;SHK5U(Xy0Y<9JYp#D1Ku zJ59zPT8}RgaR>gs<1kt@vEpEvyqp2SOB*G$dCgDZCgMwckO{RG4Tn@|_u{26b6uMAUsb$g7RM9&k3yxi!WUSVQ;mv^c^k&h%%B29|Hb|uN#Pe_Ol zc{R9wtXE7<+5^wxL~4_C4(E}Lxa!U)RG7uXi#0dehXZS!J7OmNQ0I-sM#-T_8-H}v z$S#V|#QQ=K$d6a)l8x4&xo|wEA$N#c?DI(xol2@mmF|2X# zrZpAwjrWr%^~{djjRR|8mD_SbmOl(WoM@iRkNw93^d zMlBMz(n&DMOTmIay8TDM*~uYD%S*#~gSKA?=HE)~#eQ zww0^#cYrJmRCQL`!f?z;2@CP2c9%y2g*lg<%lW*ERj-5DCJ2?hKe0MzF8Fa=+TV?& zTDbOhkJg0~qu+#OujNqEB?qomGy;C(T}ro#p^K9?O%MJKmij|bN_+p-A3QP~m~{I=DhLM_x1#ny{V6ERyZehbc)dUK&XL{i+)vNVR<6HR==eIx53G2^@^K9| zrMozED)kqsAzwO13FP$?I3El=@oGH` zBvE>DAy<4LI?{NEU1{+PaUxOn+Eg=9*cb+MQN9XKyk9y0eHQQy2ei}7(RXfbw(f%4 zD=r{_cMin_y3D`iW>CtKnWs}uj#9%_+PI&WHS+How{ua5`0QANJY}TGZt5-<=#xR? zAO>Uz*5=%{1@?REGc`979zHDtLF7k9pOa)xos2T9$)gRLg-?0&Os|ci&p{U1a}nkz z)PX(_m706GM_LSWMu9AAKeifU;Q7JwD~iGO(Kj?~OPVG^52H;7@(!9EkAGc>Yn!XTejz(3fh_C{_?{!B~Lh(;y~*B-N=W_k7! z*`b#25_`#yc(ovW4*EQAhf2zH7LS_KP8%Hbx7d^Oc8lQV8pg0x=^T_#=ks^)wWzTH zl!_0F2N?2ai+|JIJR9^;k@r=~iV&&lBt7E@McT}?!b#6M`Hc!K#GLBWzWkTwaSVX) zz^({j&5E7_>tVtF=V3wf=Ip_2S>}LF&ZlHHk%vP{=?}T5zTNujGBVdXqp)Kb^L~S@ zr&aaolq?|9B;Uga@Yw;;6Kyo^YBGv}0{%&;qch;14gpB85X+U7prcX6ns^SuP1aE_ zdkH82Ae2}yw%AqWar<`Z343jlAC0d~KM(?#j2wVjn9Mfs89>-}e}lFCn|58GBmb4C zV!#Xxo&V1r|H0Y*xxhtS2e|)#GRj-_XSOL^g!$%e;%4+l|49r12!U^g`3y)CJWt({ zP5J%R6fca79K2kKLb|+Bu^dPZ=3hY~J|}_N_N#x!2O1P!uAYZf$8GwfQ+9Y&ieB>X zC}FHl=>vH%T)!;s@&l1}JTTq|gY2^Lm;Yq7;GIlzqg&#g$Eft4|9l@ji41r}22Lw< zi1}Gkt|CBQ0HKPx`R$bh$TxqqikSkn5Az?S0zBreS8)Nkn>1NLOr|p(Typi@7d2mz=y_nEXk5$ii=~{N8gk zu))K(A=34kvlLE@HgZ?utj!PMJRah=oO{Waoxi;1bINS4Jhs&YXhqd`b!bCBun7ysD?xG*87s`R?u z=`04eTh`+TUTn;z+bY-md)0(DgKuJpRn=t0`5CX9k|eamnUi;TsvEZ9M{|7xt&cu7 z-y}%}sqn~A8{2P?aNZ-*tDANCc?rt#q%O?+21ESKg2&m?%j@@k)1!+rCh2|6!ld(A z^K<(W&q;!$d6OWNAg#z8orLjHbM_B21^)pq^c?1&9O~$-XLhXpOmt06Y(i#QAI;p| zDj2-A(?VDNrH)OZTY>Ku5~o@jvnC!?(fBS%$cJyR}w%peWQAGU@ z7-bwZjaISHl@L;Wzkq zXQv3xv=(j)ctSSMK9Y#48_6NK<^;Ugs-waJfU03>D06MJ>GcFdj0q2DJ7>MCA8Zv_ zel=Zzh!!*>c|z|LiD{d6uXF&MMnD#RV~vUOM4WW2X9IDPvY=Df$FykW5MXmg*(%r$ z;MQf@j?~WFb+hRx1K}z0qEnb<-fm43rL*|^2Jbnky0v`8FBHSe<5mfdYuM>8Z;pQAddqAPnI}~b(hIhYrny+)E z|22b{s!z@Te)#w)&WYCu|UH~B~X$?rK#ivgAtV<@X(*U`~QS=8; z^a1Zq-8?yLKV_4gMlTfHM`m{3t~u=quZV*)Z8{upwd6DHL9$29wj9ZP>`3XA#4hVV z3Kh$BRMD$iP4+zZP^{Kb84D^>#t6j@`^Bk`@&JV6|CnUI(#Kfy9gxj#`kOTQPcOwn zW^j)Hn_nJdSOmp{&BTD!XO6b}M*9hV3d2emglkyY=7M zauJyRTIYSATB;E0GKM1K7)v}#+=O?rHviJ^wMdG};nXJyzdm*W*G7i`HB%cYRMNc9 znG33SApXQ?_N0Kj&jgL92B&~k@_HBEiy|H*e`{;|E>5XIsWs_AhYgz4xa|gIA@b`m z;ho$m-7-!xj!C^%ldemGRochz>fn4Ml&V6-v06oZFw17V5hfTB_eKDooGg`zk2R2w zjxtfyK8v|x`}}!<191#=)pGXllX)Ur3pOHW>)yvdZDf!niTq;VbJn&yN&#BUbi;O$ zwYMMHx{?ts-)l9nppOrKkwRKKRc#LmZec=v8X<)#T6Pt`SnFbOxnYdtLzWIP&OPHd zlN(O1h?IF@^9Q+{N5MrMP@59o-%~j;Zlt&YW5nWthy8<@eXlp`W=NOSXvZGbN34Pd zc%urkv-fF6ohZ{j+$jsHbKki;c$&xkqH=)L(56A0iPdStSNogY=Ku#IlDx=!`{5Cr zLCYgUhGt)kl{<`BTm|e%#iJyAZc+RcCtbskyfquYhaP!w?>rl380o+z5Y^dlXAYcr zX@olw0f$X3e4p7a6vP5=41o-wDjr{QK-WBMz-^|$uSQU{ePc?3^c1}bG&~o zH}ZW9Jf^>+cB{&ufFy4_ilb6b_VjLsLYe=#i?M%Ww8?$WYbtY7I7jvRJJ#Kk4&p3r z+(#2%=myUf#FZ|dxae|UFroBtsmf`a!7Ka_!R0GRRZQ2IMf${<)kFWqk6nOE8DNO@p z@@uL<)>P2V%PnJIVrN?K2bOA`TOJuvKk|t5$T>z8#$GY-o^7b8iGo*iLo7$GdDtSvXh7%AN!Q`-jD=%*Qo9k3 zYa6~uFg&^VXOl|9P8*ySAG}F=*nvte%k(QXnVaQWZh~3kj31EMfWn<}N6|uJU+)3@i>xzx*PHEUura{TavI>a$qi?D|9+R~Sm!GlZ zTq~k7u9vT>uz!b#XEs~XhhJkizU=+5!&^J3!5-g2kls~7N@2`yw+eL-g~Zli6hAt_ zE30Cs%|Htk+lpK6Az|6@ zm1Tf$E`hQOIFUwtMS+S>fr1lfCdzu$JjLciwkD7-7Ldo6j5xexyaySwV^-r43fAUdQNsq>Z#%eu4q<) zp;(Tww&@_(XU~Pp92A;Dg;EYO_@7lkPNeys7l>qE#j*CiXZ|-z{X4W(sFTs&743K= z!S`T$Naplcj;$aqRNhNJz)3&vznw`Amh2Ti^Q>Ruhp0tOC6Nreg;xa&zClx%u=vla zI=eYQ6pZrZu8ug@0bPmp9+q`=ovQpE5XZ$b3iYf(IxLSYuzo#J;dlf}+W0B{M!w&` ze)gFqe^vU=a=(M)I4^P$yTpE7A0c+Q!+)&~ltOQfmHnc#tjXQzN*iSgEL@$LH%|Aq zVGvJL`@dJaP%(cV|9ki-&S9ugi|a{nNv)pH{~CEG5FPI}urNRVyYec32(XVRvVp<` zH^KY8ED!qju>k(xBll;D(k=qBe`0cw<-AJ!p9Ahe!(s6Y#uVjSE7mSAmH)*1Y|;O_ zp`0p4PrXr7 zL&RxsFyS@`p&y=9!g&tP6t_T?)?2ja;MebmpviJH+yC4YB)OKz45H(a;4!uv-iWuR zX}>7|3<4CK-k`srrx3&Bop>%}pk76sI8Y=wj+cx8)!=wdOPAPeyuGzAt0K^lXNK4#L`+h!T^NVqN-klH^K+%!2JhXUF1gdrc zSC?SIehjSIk}NsF%SMsj&iIzJl{%|VAU&k9Ur8KcG z#D#1ixzpjlnTuzPd@eh6Pv7HLBs?^{C9ZTer}Np{C#B>{w!8OymnCcml@E653b?f| zSER-qmGq>!q6QP|_4apBUOjrqa~#H{8=6(FOKNRA&Teg#Q9lOafS@ld3)CF;-^rO; z9jUUjBFUN!F1TXFRd;yvpV=dTMfQUpt?EXO9cfN4Z*GBF*hQda@}O!#8v$Q5Yz@N$ zWO3j2Vx^cX2&}{3-mB`r$$HYW(e7Oh5=4{z;BL=-z5R}NWgSRgtUH0L;~ZKvFBqq4 zM@o5OV}xY=2h~iBU_uU%A%jLxy%?wHDm&scfEuyM)X_cua5V_Dv-L__S5n71J!(qJ zUkRwi(l^z2^<>ujk5j<3(*sH?1J5~kvr&LKo~~7P#4*Hj>WJ-UeHX9t*qlV5Db)Et`0$J|a)me24j?AJMYJwulCaihm;|uKn#|3f&laJ7XX2;VD$;bnBCAyl-P^ zg)%_9<9PbN8&;$7kc>&8WJ*Fg<(6zjSpGK9tikv#AExtLf`$q{_TjEUpzRAcrbM7n z`RxbnO?_(ZGvpVbP1^VG99DuWJxY+vR`KjvR5wX**8 z=__t8-YAc*fDj(@i2T1NeIU@l(cH6*I>^|g3j>WWmWHy8q1By_`r-;tZd{vh9F0!| zrDKk#(GHrLqo9URBO>!kqj&4ATgyjAj1+H_1~cv{X9r_c@8Y(Z!2tsK+ppCnuI}`; z(~ElaM89}KE)zxYpbQQ(wlw$eI;q7eTGo<2qk~4lLFA`@*|3t|*W}laHEXFwK4Qf3 zNXn4K4lu7nGTQjMr_ENK88|H@UZt3#76n^0)}$Rxyoco@aCAEmhAj4cpKZOLQ3v+q ze0URh-_A~0D>PoqrK7!^oLAP+MBI*C{y$Mj!X_Z~q6dIpQsgIm>$j7?ew?w9&kBq8 zES3YBbc@#Uy)=RqD8R#v7AkJ^paMZX1+X?l_d}ecyPGE%=``p@`F4ty1Izkg!k=g_ zT>K#)P;144*S_{4B021M5aszgfp39Kq?}w;aTq5#a(Li2&Fu}nwZbtGY!iM*Gf#(h z4(PWuo;)SBwGbIcocm>jYMRcr?{*b+8d#`qu#^7e=BrW)L5BV zJ@(<*9e#ytRL4KzGqzjrw#t(FixH>=lEL02+E?kMe}IX&cW@Yz;3??&njLWTpyGEQ zXzf+MD-#2UX{x;57%Pqk{i}vRKLfB}+?m6kO07EspDT8Nm>2X?d%*UE0$5#vPp0oP zV4f^xzpww6+uN7}?OxQCh5!eQ1RdP{EYcc3z|<*=Sy1=9-UKg;dRHkad6lG48t~DQiZnH$!22BByQoj!*h^ezUntdyAdc1 zjd#Ua2`lZM(yGMiw@7}uxu5kW1eYLSZ^r7!PMLPaXukwl0ctt?I!_sV=No+0-pI@6 zRs|;WN18sVUoyXBi(2)I=Q%a;ak`%XRXW!1ZNCUL;8usC(#$UJ_!BUr|2YB;VdsD9 zy(;EypVok~`)dsqlZT`?V0g`v&CvKPOb=mDNW|;4LS^zBL>eFU0Nl)+ziT|$w+$`Rp#lpl9|(%}--5C~g9v>Pwfa%C0ASu(;9tM*E$s90#SM82 z?0}QrSX#W#lrx}d&-zk!;5=xfnDjIdR}{3#X>y-G;nW>#dhCE!%@72Vsh2yjrK`fJ zQt6wI0BKtshLi+S zvwve|I(Hb}O@V`bHQQgrPp@u9DhyPS&9oj0m511ej~wl;6*r$?Po1M+PP`%B5+)<< z{=OO~QpQS^R|6`^ZiN-_{8jrh^Zk=^gu`(@xgBmVCXXJO*zTtq2T&_zfQai|Woc-X z{rtX@?yfdC*}5NutnOav-~!(6&KdnD6o@_Uo~zuDS_IX2C)ZBhrwpe$0t45wYe7+q zPNFuxco!d+;l2y-g)ECkuI!k4 zvkDs0M0hCZ7txCTB6@`AXiCna-3!;6b{ff(k0hdf<`_p24RT^2pHgs4?PEebE>hv^ zc<^|-(TSnw6tv-U+k4Fd?uIF23SQLmmWdkpS9|t1DRExM&{%&z|NqN$fIb@ef3uPQ zyA=PXFBc-||0gQ}MVdgU^Upm0fkOYGfygQT;uZfeEkJ@@|9{h}|2ybE*a2W#E^0yl zPpWh=xc&dm*%%0XL`rahF?0S^ZvUIL^?gDCgv0>6_hX?izHCu9q|L;5I$y8dXBX6a z#nRmCH>(1A-uf?CnYQjJTww2h1G2V2h^?91eGTuZ5jTBPy*_vFQe!()OnXs#WHMfBHOFh0C$>jO{kmU-TU_01_r2jJ z9T}#gTy_05DRO&o5>*Y3m-$?Nrc14c-2MLJ9Nmd$?n&wGf#=Uy;-)^VboC4ORd{qJ zJRfSZdNy)T3@5TdZ|DH~@G{>^GB^CO>;RCN{QIb0C|zL=?-l!;KC}CIY}8W?Qd~I( zzS7^3IQeIP_Rkx?+uyRUZ%9m}1CH!ZgpeO=svC870nPJhg3m(3h%KJ86#_nN9ARe? zDR@n2BkkhBGma>VCvo(RCC1lN*&|}%S|e;Xwa|U5H#%g7y@;j{X;bxxVxdcb+yLn8 zZfs`vBNAD{DlHu{V??^~*0QgUIT{JBv6G~xG*pL4K_e!V&R-qcF_bu7Qo4k~w*_YL zQelGHi%@9$t%v_{gMrc}gD<=>o~-2U#c6ehMLnNyP~Xyibmlqt-LgOZE&RsPe?ZGw zKRJU{6$@R01mD;#Dd^be5Bz8Ds=bHrt6hG_1oD=PiuLe`O|PuPjy$oXqHM5n+{Atj z4tMKZ6SwO{InS-xZnZHEE`VBr8kdzovx{XmU6tp*C>d6|!v1}-0h*+tzGQbhT+6a| zKw^t?*FJP+HL2(Q*D21vNxno-n zH#%TsQn*=0U3U#6cd<}OyN4ncC3DOzIJ0`wq;4040cTTm2fE%i{vv4izWE%h11rP@ z5uDXw2F14LuBsNC``s`|`OXMu=zZ?3b8UzN&|&~ZT{qNEQZHQD>Kxu*;qKBA%|CyC z3B>FU=2pn6EfZby(OypzQ?gfZz?YW%o;O|Y&fMgX46cnp;P+XIGi%ar9TlKkQlvW~ zBQG4dY%KC2Ay@Q^Q9VRUCSH$+1zShztfBT_i{P?Dj4Nlf>Oi4U|I05O86c!!yL=O0 zic7)+WMp2Rr2xu7^flOj&S}b(65yjIDB<+>{GO`SZ%%uUR2)(6cTSKX?$Phicdf&S zOsQACDE?Wc1>yFxyWLTA+OKE?)f~g-jhAGZJ$nd&LbFeNWTDGLkf6q1v)D~*etdHR zh25)lYCptK8TZk%+iTA=`rBw~Zqf*Ns5S#IhJnS@yLtElTNK>lsd=-lJ#J8e9lbnm zm~9A-%59H(uU`}=2$UA+FeyjCw@jF=Bdbk`AVZ%X;!-2ppZEUy+CE53cRw^pm?G+I zCjy?t66{Xyd&B_udY(Bbx`Knw2 z((Uq%urnXxlOyEaMLY&-r1opB#m6w%t5OY&EI!-{V!s=K!gFP#o_~$Y9=381c$id) zML-=A$PNH0ufSx0S5yE+KM;*0fWce|Ne#;nmOIHk$^Am8yz(TS8n+tC2CKhmcpJMN zqIMd1cwbIDvU9e_Q_2O@2)$4qshh7~RCc_Rw!0Mla7H?S0QI9ITv)c;N~(-pn&!j9 zCuENobg{vBYjrF-3WoxyI}70R(k%r5 z>i=^;nRuHe4rMjKr&FfCdX6kJR@oqI8w3d5{as`v&Y(IwQ%e1+eP+Rqzgo_1=s0c1 zi_k4^B(`Gxj`cktPwWH2Td+*73qSE6Gaso$0H+v;8}i!5ap^rCE`MZ>1j@DJK!X=# zEGB2?TQt~#K-4Jk#P@bqCxAe4`TpjD7D!siUbGE?gN6&0KtBNN($DSScUl^Xwc;-@ zQr?(1T#zXt580JCruWa<;`GSr1I#qmd`ujY390t8!P{tIHG2e)`6Zs}s z^Nuo1o+2dji)vb@rp4pz>(oY}Br3P%o{d1Z7_;`h?Qcvn8vXmY&&tZCxl|0tDy zONiz*&U#4Ycjd?ciPW^F0gNF;!TC+2KL%+h?w6u0eW zCHeBzK)PJ`)c^f^`3#v4dwjR#9%wq8W$p-aWMIyi5S3}HAm2Zpq%2l zQVoduFF);9xR>uNkLcAsUk7c?HUTz#V+A|y=YabieO)z4=zhy}$OLHm1VR;qJt89i z{}=`Ph?r*SbZQ5^7ZZdR=+2dWnj%LnmkI?!QhLaP?UA-1h~`p>z4p?bWmG2 z0L?X^5H#f#a1d~0fY=>`_ui3o;&uatAf&;zyYs^(Oux_yLJ)GN2_QcPWEf_Oj0rDX zWq6+rbe-$~U9iU#sEq(n%M&3TBKaT5ik22ojme3Hqd(mSP2vEaXHxh4J~#Pgg8xVb zV3}Unh|;11INJmH2Dkwja7y03c{77KbLq0g|9tkXG{`TQ)af_)Nq{I5h<((%EW9z4 zR($>+DHv?5ph{2=C~egLk(@m$qPzD5&?IxD{+-x3;-`s0=o44V8C&50^HbRBkN?U3 z{6C3_uvnu8z09buhl0mx=FyfN@UHck>rnnBSj( zIN0FOoCG>vaYR9uIEJvsvnHfxT`OrNi~7ml(EV$PN`fG(hDX~d#U5!-nj)Y>=Wa9o zu;NwJ=E)ihEka67l8DqlOrJ3p*Ie)xD#O(%hLb;ap0xqh+)Zt;QL#=Ea;z~)6qQ2 z3h^J|ckX=(?W#aoQ1J{H{dUb#m8<$OtxyB5JVe)>VSw({KAH44WM!852Sf$6<-G*F z;y@1kWo#3<3?KV~XI4ZKTF|cd9DUJ(52G*0olmlY8Nr>erQC}by`cprG*sl|l#~>^ zGsHFVKX;tA(KKqfnSLRq27L7 zURr)9Re38#@2Gjri!0AmJ?PS9o1X_oX7CAuCYa;9#(bCfRo4C@*7BYY{lbmV@8;5w z*UBaC_FN1p2)pi2KAWsNl;lO&`Wrgoc>tNMm)GSvQRB{Dx?cTmaCv-&J`131J(eEocY%MpWL{)glV-_WmGtW2eJtLigGaJ)J29ktjfn*4f*6fsUGPGokE>nz;EqdV=C7M%T||SUzcq>@(EQRhCE<>l}M>CTeWWdeFtum4_}Zh#Ha^Z@LBDGve$Ffh{R6Rqns{ z!@g+SYy#r;cT?SY(#^cDU#M@j?;<#_b+?Q%gRZckB)xwJZf~!^u&#kh&hxn?`pXt+ z8%Fevm!iZSb)QPkc5MWwk=W9Rp^<8SGxRih-@BSPu&RXW{^EK0{n1t$@7l;=b<}29 zg-(EJavmcnO*6P=%XRa7slre#&{=9Bs~W01gflf3eV0dB;dh_gAnuUyLALAaU*Kyn z;J`J%az5$dPn;uGJaSmf{D4MNQ3%2-IsVZ(6?%{$MQW#(B!tr$H$leA9y>w7I={YO z8^nC}WK-YXnK56e$a-w6MpHkAgIx7If5!f=Rlu4 z@&>@PKFz#LJ0kll*gtD&$nb%(;^Qju3of91nf-)SkhB6>-GLc_%LMGsf2znsL{CnG z+Jq?d9AoI2)9bb$OQHW)y8d6+HdZlV^j|!Ne_i#*5X}+i2n%2LcO#6CIp6;Y8%F8N zCc1I|Hvos_1^&Io|CzCKuyj|}}q^r_Y*{hkOY-Q$4p-f2qTEOP0F4k<@Q zDwJEz0D;L4w5rv>^oP5VxPV8dNcrekP!Ef-(~({4L<|&}ri|c_I6DD9Tz?dVb--~f z2BHfZSxJ|s&I7C0YFbg6_%1KC((u#6&p*F%w&NvTegD*t-b(XKx0GLl(RHg*-GFt4 z26$L_xK4??vzK&{%*ZI26hL>6L^+sQ z0R@^7cSy{#9+X}Iy~${KezWB)TG00(unkD=rV|{n%gT0u>sk%W*dN^olv0-BaXHs% z?q&_vJys;q*5`@GU2giU7wyV2JY0F;uXASWsma;43)3~B&MjMkCD zsaV@`yqy5KJj&1Eu&hfW;3EM1YwyMLMY*2wj?)2&XX6Gh;Te+$FyI(Jtw_E!MKJmd z3i{;?)jdK>oxgeK>x~(j^E{e7Hu!rcRG>cq9k>FK!-sw*{A|~!5}<9klE+bfW&4BEsexKCJ?vgbdKVQA8UZKFByT(KN?;4s@oUrh^Hg<$0>AT94bTw?IMCPp zH!IxRxss%`^?gYaA-v*K4YrAN`GRszpdC(r!P?cC2KOW*Sq(}~e;*H_0)?tolbL-D=tiFruesa0@?3!-Iexqe&$p$Z z24j=Qsggh_`sfcg$c?}Z$pw3)l$YVka~$*LkHcqOIt-yBQ%wN`ByW5`omW;IUA}xt zcq-H=VBP7gFCV)n2%k3%>zS5Bq4&ORLzmVAM(UCilh_pF|B%rn;GU)6N~GUzX99ex z3vU}j!KA#cq9acPwG35*SWmA{L}n@k6a=(OK}gM(L8M-&lSj!+7uN03B{<9sYbCj0 z((o&Y3fz`g79Jy+FLfZYBsVV|i%(8@K@U3vY+4YBJi{yy*q<*}6+9F|_hX&iq*I=d zzc~-+^fF&d-DMQ-2A(>vfdAz8Alb2aZ9jzPyQAC6H={>h27OrNFW4H{(KoQACI4Kx zpk4xYxDm6=#iD<**`tURKw{w-e7$rZ?myKh)%4d)zh1e)^6spunlpSvC~ss z2De_EoPn(K9(eh0=-!|-jtgRrZ+I(YAMusri%|96Y9f94idkFNj=7ls?)a3E&q6BoSK^Vs5zWyG3Qt$CkumP&Es+cFP zkvdIypwEK(vMPg`Hf>ZC#F4W$6@TnO4B#*#Ds3&eKvvq|q@GAIUkNYKk#B`Pof2IF zG&M1hp%QiNrjVi!l|<(^sW(Wd zs9M^&6A7QVaNJscz)ZL4(hZp?G!_k!*i$WtQ%27CSxWr##4!ZY71LDngh#Jq+5t%n zd9k(Hl5&8GV>DfPv^{D4epNjw4``O269*MjsaWz>=YVRWBRYN#;KbR`DPjjY>mo`; z|FqMkd%FU`boA0Qt|<8I>9`6%J^)HyeL?h;iB#2%KoTOP!T%M(`QnF_+ny!d{siaG z8ZX3=0PW;-t`g!4Hv&3Q0d(i3Gs9EkD;F;1dxL3y6A`Q~JL}1Pa1lBz_Erk3rgZ%m zV(I4Y55v65P~LKs+-VI~3F{ctM8U&s5ao$3Hv0uwW={6iPk0dA&|Z5d!F?-dpo9Ro@ueL$}ikH?fx_O`y6+LMRnZJf#&ojL(%kiW9) z%Z(X%2u$yM(}$aYE=3hq>$b|H_3;8FfL#|Iq|Z6w12e&V?ypycok!rsfSTful%U@A z`TC9g>+i9B1)kH2KD!x6hh;v<%l47U{G|C0PZLz_Su!LBr#27t`R_~tJqEzPzgQjI zVd3=G?*9kUfi=_?K%qlX3-Cw4&-Z}3PWHckFg8_#{RGqDmHc)QtQkLh;&BHZrKzz` zlS>TF!S$}K8&TkO81*92HoNsTCi9h=4Ixy}FVbAMk zcEY-{O)g*k?->Afi=iPFlBPYJy!W{j&Hnp^!QN2Jc+iN>*F||Yie-5y{qqTIL~wj6 zm^FPGo^BZ`%wpGG3Zn)7=nOdJAhK0oZGe>!JVZqUQpAw7jL_pOMV zdK0`}Tk}DAiEVj4GCy%oe;JZ=zv>6Zn*%Cw^K@)ZHI8(95`XBE6&`^~a`A38drMaWNlX#!2(Bysf^zQff9Kz2gOARN_0+Crjd=CXUe(tQIVrL~K zXa*o3<=; zprtMte*yvM8!+}{OOW#A1x-PRwF2|oI@r+eoSV0k+6bDh&D}|Y|1_cm^w!3-0mJM% zF+&Jx=xT*sqgYV6`#E-g3clRf>!}-nMNtF5F2%)E>3~wpZ#R5M8>3Zr0mWE4VwbB< zOGG7_JP^T7~6WaVw`UAiQsT%M(8wXZFw_l(9T><_nR&fbYh*lb}8nkqQbGo zjZ#DfAgz$3g&PmjR_o{9oGh3X(h?c-vr0_RpuJ%wTT zBV4L5{3X3Zh8OcKA9CK+{K}Q42@rC7_WTl#fU36MSuLsiz9g9>UW6svJR|83)o|N| ztV>qpn0AY)I}v>Z-}I*m>B%GcbfOn- zLV^P(F5RKwZj=8>AT(Ct&I^fb-1B+Z^}y)A+B{8|S3qP@;}L?Djc6J;iv4xUY2 zGSw?kT6K=EoH?7KItQ2MwK$xW*}tl-pR;`OC>%6zt^64?sO<55;Pww-6ap{UAVGF# zxi^R0;c%)otoRwf@21(6Q*#ZKQq<-s{^49JM!ibc5Pr8(XD%ZJ&kvZ!XgE?7dX);x zeDz$HugY_@Tvrqms+nBO7S$2`D#c!4B&sOZV^|bB7Kjb*^dUg%c@Ov%FI$=i(pG;; zf-EekefvPq$!i6I`>5=18Orp%PIK6aBf@% zVL573WT{q&sDR^X zoLpxLVP<|w+lU?!_z?#O^k~?F{9&vsbbQ4LjR~rUbYgAcwUr_>;3SuCxebH9qEK|* zEp_u2wl;B72xi|hKw;ml90i)D9UFs|I3M0+#M~J@gAys5{KkfavgJ3;(mMP^z0sSpiak)3R|~A{(jvz zS2FlKu_huJ?+XbeWgC_Yl3KT%j#GihiVj3?O$)PmOYV2-6;U1xJJWc#s>Zi1n+Fm@ z$N%&~f$~hoAAlYLaegUMy1DQ10RVu0I7M-UR-W8t_ptlwlW3p`w`sj0v}}&+@yo(2 zc<^T(Aj(2Na@DqJP%Xwa2TVuzvpJT{853_4@Isd7 z65|em$n2bplbj|KSS*zvfHE(6g?X-P%T>tk@!x>l*=WtE>BbnZdg( zA4VSqR5s9&##BLOyn%?v!0GA(S3K$#M|G2P9J*6ftD(vIM~P{-Gja}yoQIn7m170i z*+1X--xn-}&7veah`&mr_m+pa=1~KD!(Qk3vf-V^g0_huC}veVVtSq|@WZFS=1fc> zq`+akA`ZK~lELQlEfwJI)wKjtQ&@>=l3}st7N(=j;)$Jlhjz_@BF~67nmV{$4g)aN zWuVsqBy#^Z$M`PJMp5L$u|E{}ZI#j!YzALCq$pRY>FJSPuXa2{&vI=^99-lqDw`E* z%nHw7J9&^$JNE_v(=f@s4Pjr$Hd_c!9YUZiTqN$*ukAkbJ|Y;PMt{9iT2|T(uWPgP zaf1+y=bZn+oa8}nY^dbF9_E8O{1pptEZI&qq=I67N04F%tCu%BSOzGkOO8Qb9+n0e zo;#nc0&wL3F{VG_SI8BgU=JffDGH?B=bl{^amfaK1I<UbOp%Y{4mtV1 zmcjmqsTzDj(g5N(d?!$!XK*@0?mQ4!&!n(*?N8k0{A7}ekrpKz1$eq*BORXotmm`! zkctM`hq4Fik8D%=-$(s<&7{y_`@E&>Ee7SQ5S~A>(a2zVah{a_+8^U#53lcb01D=l zWspsC0J{A1y>lHbf;aby0)0ooC97UhOJrN0 z(M{n^q*~ekiBwna8(Vare*KT@;635hr6aj%SV4cgs10l;U6I%IS+b^Cx1n^fXL)}xYcp~BgF+YH{G9D}u=}mCP^_5_0mxbJaJn^Va{hyW@&e;rAbjCF-6AzC{GR2#6<Ue+Hd>85pRN^YYD{|$* zIRSvlbD(FR50gC~+KYG?abqbq_iTxgu`htY3|Ki!C-!UL zf|AW&#U0(0#*qDgMwq(u`1*~q@58UbR3_q|320C6<88;ePkasUWO2za+gM*e!0+%` zHkX7&Ta;+XtB^k0Fo|;{(#CYeJH+c>mm;2W(6>lB=MD|N@@@#d#^u+OhJ;48^ zwH2g$O{B!_rg@|Jx1X`*5!F{ZEZ@xcy46hI&T^Wl8y|HvinP8+Y{~aCDh-c&9k2wd zGVcx}cKKw7`e_xqI^-bQ#{&Y|Ld?u#*R3#fe3BE=Vu;i^)0n-KuAT0G&UR7}e4q6S zg!d_j8c}t$r+j(;G?_I=t0o8j(`NDL-Upi(;=&EWo2JUo zVvQo(9PhF4$%?=ZsEe`NvRmr*-tBAGOz#quD*U@LXZKikmj+n7x0M8kmT(iylZ;(S{G^U^H+w_)PI&a?AxulLHBAM+zj zfwTDjEw;sR#UjAz!kQl<@oc=O+BTBA9R_^)&l2@>+sn!2VS%q6J7JbJH%-W*jW8xymhdBm>(NT3Q)bY)ovFg%}dvK6a1&F zm?ajFNz{6`eyZ5pB6s92DSgqj=#;NkLb(0VyVtS2s#v>P*HHU$To>4@i3Yw@IL7fs z8^UOH6B8kbXZgtc5~+fsy_zSL-w-b)ff&N z+4AER7gwz!jNYO(bw7mWXwsn3Bbstg3gEI000TVd0el5jd!VDb>7r485j64Qo8kuC zfTOks$FN_y`x~KP*A(>!9uJ7gMQ8*q0x46Rt- zRltRPdTzBb8NM@DSyF%yaH6kDkH!eu%F9m1X&*oaIL)+v#m8>Ax4& zsjm#>@B`sc)$HiOZ9fq6@z*Ms1QI;THP7Ina%1+r%+sguc8S>`{1VY*Y*bX35`+<_ zwCNH#B=EtxIFG}!!wDbcN`?8k`IaRk6 zlFk?FRo~uu08=zN$NSRwsUz#;`;V2KbY0Luux)colnbdLRn`moRjuP$mv z+0(}=BsRbTRb-;Oo+jw}a7?SKjIEq=f?F=6*Q;C+-Z>=zPo#RUq%rSq%P7$Ju$f@q zRj7NVd;QX;#&Xo|nLWB9P*f&$b_$?u4?kW?hWVuOha`Kadl|%vAC3}_!diR|(q2Zs zZG5J%u$p$cZNyxpzW}m zwBr1$^ne_Ia2H#@@!ykNkcmj^f&Q%Pt!pPvQM>a2Aqn+HaX5=P9-g?Q#rJW;C7U-_ZPOE|`i}Nb zYXrQ@aAQqa5E={}IVeX}Ik^AaXVeJ^BF9doB3!lh*lF6Z6n`Q(umFD``zK186sv(HO^ER#^`Pk>PQY6Y?$hz zr4wW|YiqyuIOVuA z2uOk)#hkRAiOX42SoS9QAQk0D--)L-N^p8-$5yiV+h5#KdScXwStOZ=U@!Vf^2fi6 z(S$^FEsyL9hVh6vf2*c9-fdo?p}&yBbYB;&x$LI^r|GYgc(?9s?Hit-99@$Zh|n=$)xQMQr3(g3L%s(loy|;yU{S>>rWF?Ml*OR#lbF_ zXcX7yPWAnM!p8!>&FAKKpREkb2RZ* z8*JzodFSJDXv23f0xC|DOMbELikT5RCjeL6eDRgA4`4&l83{$@Yy851}f(lrIr%r6nBQ%l^3F z68-w}Wr}!7m;|lq9g~8Q?D#tyuR{@DI=)040;_p4#YIJ=ualFLwWU9nT;U?7$1k|D zB&Iv0=Idx&stLtjYf%YQ??0y$vPZnf7(DFqo5Xb}_TG^j;JB`NB$%If7GO16h$Md@Z8^AG{aIggNd#M0Gb&ZJAw^+gyF* zg0xZx?kRVWRNrjQ7{0p_IgXcsMr{5~VCH^^le(^z5tC8$35uxT+m zB9oM~aYTeP$00qTQCVBw&V;f^z+oKSR#>xLQ?V-i=-X38)FVklk`JYgc3T`^{C5iw zT2nFlXBCMI+I9O4SO^(N1c0is=m=NjDx;U~e83qAJK0%^(F5X&BePA>G^@h!4kZM%P`yWxcdUXbdHhuutoY|3q0-d!iuiuX>* zPb9mk0NVM>mB`qH{R%MAR?dB1YjVO^yJqNwL}=L^7GN6CMs6wJi6F(W23HBZgLDDo zuocnU6xrbmKZ=unTivF}B67paNn9{`HA${92k^Y=kg2yp$(_j^>*iGYc)}deS&355ZgWzGDW=lBfV_BM{fgI?nmRK( z2$T$g8lwjYeX+!V;Z)mrYaJX*zH^#ysN}mzT3$!0Gv!|@S{vfZBaF9A&&p!UL3B?$ zppxB;59}5`l%4Dp3g1G!Uy?hfgBrHq7qj1{bjlWdh2W*dDMMwoi9j|wTgU6 z#rhPz?=Ep?joxyiwv-=#z8s#JHT7A|1qpqnZ=%e;U-0F%TMV5hYP;5OnlVNb*>gjH zH*A1f48kkNd~(H%-&6V8=qxV!)TNF&evi-5)5G8Mib?YKP|Q^9Eh7YQ>^|+qWO~Mu z1P!g}do@MwT=sy5iDmFv3p9d6YP{@c(w^IEK&#`y^T`*_NjjaFj-Dj! zXcF=qJL%k62tc~5v+LvBd-Cd@)TJ;WPx4;OosV)3p4f)R$jWbbVSi07A!SSIRp_7g z;Gvl$1yYGFoI^>a!);gTje;+ETwWKee^34#j;m~!<@}(7Bo*K628tN!v-R#s>zjNM zlp5bM<4in$OO-VK%BK3iU1)+5HG#-P7Hv`g-}z-fzAS-CJcfC)6wvpTHum!MACAnD zeh0NFS9l71MC03ip28a|>Q|^H*@_U@D4gv2$iL#53ug9OL4qlu z2V#5nQ7+1nxl#uA*4;E&T60IHTO9dMq7c&hi>Sv~*HCf19QFGDWyolnM4CedJ)` z!g_Ka-yT?DkATT;(XpGXHwdGYmI!tGauID42GkxH$(@X>Gb9(q1B160o|2}qc`44K zNUvY$BqPVvxhLKxy`q-#Yl7bM&+R&y_jmj&KD7fVDr(~p>8k{rDJH0Us+05|!340n z(C^=9RHi2yee9W7;~JNe--y7B;N+Vq3iP#u1^4aRlD%*4F8>-!nX68Pe}_-J$`RPP=mbPMqzb&#@`GT+X$(-9P_?k3 zvDUpV`s`2f!bXx*N=Dc##C5kgitWH4v9IfcefvHA^dvAduPf~Rj_zUyHie}0?5ZJx zDysQAbpj|z>UI~YD)UnTQudCtFItjjR!I^RV%|a;+m!tDyVSp7tDX}UV#jgJ-@v0V zf$+&2h2g-7R+%PbJcRlk)r?N3@`nK(*1TRa3BCsMM+D|LEI$Y4Q zu|!Nhw|3;N*!`JO{@DULrH#~>L2q&18X^fM`%#XQvvRSrqT?dNP|cT6_tlTnI_~3kxQa#qcrNwQc-Li2k;eX@a2HkWko1)wZLXg&*=a z;2n<9dfT+22bp0j{w?|~MZ7+HgWrwR9Y+FJ|JkhkH+8edEyd}t1vv;u zxR?tcK8)9BmaVt~84I4OAdmUSC8R!ynX%ja+wP3b$6`}j^y($f|`Z_dInyOQ3^yft$Vo()+sDusMK3F!8vtMBD)!t&(T7O4+CwpZ9x zBx4IY)yh9%i2gbp5_1C=Qz+!iRfs1JMjMd;v(P*=?ELPK>VKd z6C37JcQbx{x5WLyrfqjK^sWO7WO&-mVt8IE1Yya_E^llNj78+R-YnQ~?R z4(+l9a(`zv1t{|g0r9}jW)a4_^2ZSalpqDO`z6zpAo=koSi79Yibaghfi)k^T+5y~ z<01T;LWtW0`Iana=7}REU^eb$-0S7$wgXC3KYg+vC{%jO`Y6>n{KatuHGJVP?Wbqn z#VD$lV7<6xDuV+Kk)&U+tyeXw;lhwdn-x{^2mfrZ0zSxUpO|kdAkT9$QS5*+>E^l1 zcNsQ8?(!XTY`Kt)lLg*WxOjcWCQl}T1c|F_Pk-Sm#j?W5tTzu^&d@*{ zJ)P;+o{m?N1%1Q%H6@4bdk1)wVqOdmt{-45QGeaO0wKp_jgPoLf^XleZI&jZ$mq{X z*pW&3JSTS@H$J4yI{(c_M-|{V$%^Kn3XG~itqrQ3DKZHt|D6j}n*#Qse4|K@j+!?C z|JyD1m{9(r+)2O955A;1571%HQ-pa>u@U(;1oRJWLgJJt{xq$vO-pYbeJEi_#%4X? zmTiaX(X{B#sn^E`c@EZUNGrI)d2om8PtJMLuVOb1 zJcmBG9xnP~47W&`4hW&-e|O|dBw{k0UXo6PMnZcxL+S0jXLgS_N3OdFYqzi?9a)rR zW=SN;LxXZ+VA0c{z3AV+e}jgw46~B}5VQkuV0hTYN#21NAHU&tm=C=4v2AWwP`%a1 z`ps2hNk4IoRpHx?H6J$Z{YVR6y7}Jyd#1i3)OVN2AHrO}n5v^c6Kd(!CIlZM_^(DK z9LP9z>G;r& zj%&ACqko0Xu&r6c2N0-DR%O+TCSo+=;wCiT{IPer*ZzC0BH~I0U@q{qU87Yo3QS0t`llrg^k84` z)FE%w;b`l`B;tkJm?!(84bLGX6>LB}c#8gkEBj$`Oz>3q4rR*x)T(iHq|+eT<3@q7 zb+?#gZrNsu3q7WX8{BSmFEJBmeX{lyxGwfdx*CW3o17Z@Q~Y@`4?!QA_%_b*ZAHn8 zs%j-M?GYSE{rJJ?T6giYya9T(-k?($P8Eqb#;iq@;cd-8!y$l5aM8y5k%we+=efB>VUl2d1YcJjB?yFPm{nUq5g#Y(HRosRr z(hI8fzhTWlyts7#6tafivp{l0u!H{Vnn;-t_S1_4^3nbO`rjW*P-^^)0=$7Cmru{1 zYoZ|U4F$*kC&vNhrF7fVH|c{qn8>{v&!0R#kc@IYig1N6Y(SGYQs+)4>``_EJY$C= z|96foknjEf{gswfEON{5gWd%or7AG3n8O3mh_IvbB4OA$k62#Q9~D+j!%=a?PbR+< zo1Z2iRw*{EN!X4eXJW>etONq2lPdTZbV#7r(<9OymaO{3*!SH(IF$C*<-afZRDv>4 zp_7>VxLgdaO@Uxyxz#-+GhPyCY{3_$j^4`4Dc`VS5rlXmpo_ddU1(EN6Ofp^F5Chr zC(SR-rJS?gKN|*(#{t-zYB5+|9CZsMPo_LXU-}FuZ|>83^SHzzylbT^QwHDa%U#NQ z_lu6-7`_TBG&RrSuMf&U;o+=!e0J>XG${EuzPvDdcYYb^Tks{zAE2&#AMRqKt}B9$ zKBM7R__LoO8E5P1HRW-$_pDtRj^eNe28JK0-+`R#o%7Y-OXZh@6-VkgM|-pMkb_bo zJyU@j?0eN953}+tT1Kx#vZq-_k|APcS#68_5X7s+=jba~IG{4W`o!3>r9^hNwt6Oi zS1DQIlBrl#HRihY2ZOzn|N`Kbh;?wZK^|C&Q-jd*v1pjP|BYJ!BPK;b}N<$y4T==YuAREl%N zt)Kg=pd&&3s1h9R^k9l5?Rxz{G59NTEsviz;(`-i&eAf%BVE0ZEi)V>{Z=`X{6&cF zdevUb8CTc!Z2&nsuRiTTOt+<7<&1nuj2|3?wWAa=$#4QGW4Oxxl7R z%q%Y7)!!?+)SnLH^u^4v%qaNY%>IqzA-f|#4v*Y}PaA4&Q}a_k=~-A@>@e|IRO%DV z!#&Op;7fZGfLK)yfzUWc@ z)d2rqo63|YXY|riMvbkEs!B6HshwR3=5%z6IFMu6J>#n!)urg!{Xjr{-A?nWplG)K zqOo~yLK9hET|xQ-#Ms+{GeY|Ox#cEQ0$t;VwLU$1v-W0jgN4{LWMANp6q+cVgE6le zPV@5jHCdSh$>NBS*jz_l!eh@W?2N~&fYCrcikBocBo|+dfjrV$g3fY#H42f~Eb}mR zp}U$XGGl(zJZ`vGB+1`?^4*0kt0k4nl$^1U>-v=`^XkgvR0H1($5#yZh*CyE_fYhXRJC8pSWmbE#-tXsbA3#-ULUz4!pA{jh zJM5&4Hgo=DV&P3<0M97B(4rdrMth~Mw0B%E2ZcBp>}O&H_B+ZZ=LE=`XcM#ARZ1`7 zaHRBkZM{&{x|z@~H^I(AHaX~Jutik-X!OzyAx@%(Tfu|Ml;>NuWa|!+2Zw?k1WW!! z@%tn{*uiM!(Lq&&AjF9((HPqc(lu# z$epv3)%o~mS>&unZg1pd;sUQpziel|;y-)r|Lg0!1L11I?v>Shjj(!x=)$T&w1^f& zuTi60A<=v9q$p8C2!iNV^iGHto#?R=osHhUyYkAL_sjSD`E%X9cXnp(%z2)3&OCDS z&u9c*M=X)4-XVEEK9CybXij~LrbtYWM1j_0wII*aC_^hV_>(Ckqn2p{8}glyy)&)4 zF5`&pKVTx24yvl3DG+$E%G#5WaH@YTXWyxCSQY|xqn}kk-PEqJVr=zNbB2k4pvLt? zXu?KDM$vCM5kJr~6^bYxHatUU=$_H}(Lmz$ZpAXWhYW}nY~lFKx?T<)keOyRuFRe*8rJKx-lvgxMq(SPO@M++g9eXP(Vv9phf3>z9f?cFFtrANT> zS_g5S_1+3i@A6%tQ}me1<*=j8IG7q2;P!7Dj&q^_c-(!<7{vqzf;>?kuxrNBscK9g zB66PRUO{-tToA)jqYnJ7FgssBtngF^Q=~2t*W@_wpRLC)II3lU%tZ>gy^F~YynK7V z)gNg9rJbQGjd+fJ+sVCKDYJEsOZ^Q(j$ZW_J1L=wKsQngQDEK7O;bnc2FpGuFfOTH zxRLn$F$ZymY~|agX1WLgeYqIREQ!GFQ8Be zW~07>2tYc$BnNIvU&b*Py;HfpNcv%UEAcB3 zfleQQ@e4U#k`AIACmhr4Y;HQ{Pe0id;!~?tgNciagFA$1*A+%_DEci-j-04INeqXU zr`i?MX$SV-tvWg8x-Kp&PHAo(77q~*~ zk*PwNmUH|AeqL=4G!Rj+^$&7AI$gbn=!(}C-7NwP_*vexIdi-~^x1m-1(YDp_8IG6 zFYxVWc=kh<%Z8{RaV+JcNBPxeD>hebf(rg~`(7G5_L6H{AaVmlcn$^hN0pr9#sFY7cel zf6>;vkG8WHdp$pY4D-~j-xl>I3_i>2X>M)?qR-Aa`Z(+h28@z@Vyj0Ay^@6e0G5mz zN7i;3lm3NEzsN$GY=&8|0~y-F3SA|b3BQ1!gg?V}J4uo;H{Ep$JAdMI*VdTDE*Oa} zuf9YYloDo4#CVXothtR6Tj25dHdYB`UOANil2V(2_x0*`2anVfGbyHs4TsjIIq9a8 z`W^rL;p5{&nPUs%^NQ_aw(3iPU_abRdT+<`kHAE1=+}454k?OMykS{x);~Xd;C{K4 zFmlHVuG?~Ua&WM}-)cjIynHGd-iJ3-{u}AG^FwFbYa_hAKLd0bFH*YnC2NU<6X)oV zIqfRGnEzh;7B13A_+}_;#3v^*1ty0kd25L9*M)$TCfo8C_5j8&!t|XAZfqZH&Hixe zQW)_^)=-*UMbmZ`KH=|5ZAnjiNmpEj8$3Dpj!8DVgcSl&kH)f?iu& zdng3X=KI~JSJ49!`2gqMMeh@F9zs3#bv&5SHY9+`z0$A{`a6SVo12@MewyJIh=0iX ze`j|UWr`5d0^Y|oB|f1S6C-z4!@}iDQnKNwu5vR??fJ}qHJ4fvdM?MWbK3ur!`St) zBSiIC@|SSn(VQ`o0~vjOHE)tbWkc0AH#Rz0DL{oTpcW7xTbYMBP zC(=0FWMs^RPvY8Dyz?QcH}jel_`>4gg4Q|1lW% z`ct=NN)Z!VUuABo);+Z>ZbPU=lBaL0ETIYO<`C8NL@CYpQYWxyQPEOS@iySqGdz~! z30GiWTsvUsuGvaBVvAK&(|TbL`PPUtJA7~Og%8248VbW@F;#Dl>MYtCBCxtr`fw`ya=4rXgMcFwzc@wu!5 z*nT>XjpW1y$%?-ltxu}PRAhW9k!I0=AzM6$^%Xge4tn>L^OLUO@kLEjdve@jetWI0_yL50K!o%D!(9IXsXK-6V!<{c)Cs>Pb#2vZ5q~-I$$F29 zEdv{ku&s$p26j&iF<~Se?T~4nx6_uj!MAS3Y34_Wk8h&2N|%VI-EmN+=Y%d_M$s*Q zYcq00eH`VVkUgc&OKuv8-q5GHrGS%`J9h?56571lgGB=yRbJoj$c+A;HBzsvJzsyM zXiHaZIxg|uU7yRAE(ynPA1&?6D=8;sdWINtoDl-$`H)U-Rm?Gc;*N?qjdp#z5!fOg z+F0DXKGUyxueZdOc=+HIE0ci$j%9FG!!iDd?X3lpU|Q#;Rw?kPeo=eK)Z&U}T-IPc zGH#B;*&o}owYJ8QK)-rp2^3YJy@*t7jp+25%S)k6@rBaLsaun*i*xQ8H50;^ODT0z zY55K9H@)~Giy6MEn~+;}J(wKxVB+5_J9rST4vgWW{l^PLP%&oWt|?74)&i#PMOVDi zO1_K^YuZB^0$YB_mRMV1rjbqn1t0MOd!1%05H&1J_7PMH{}Yau9dY`>zMU7P%r&l< z=o(%3TP00v)=UgvQigxDO2SDC7RZWy5IE=!U#c1hq|Chzh~QdSI_VdznJs8E>qKbRgkLz8@xI06m`EjN-EQmwZ% z@md_PzS6kf69`o-K5+{p@jB6e!Wxxty{SwC{w6%{q0V$?%EP3st2p2Q4~(d9Jp_>l zLpz@P*Qio4f2X^9=9?U0n*o>`_=g01ZcJ>92Awm|=gu442G@OQ#d1h|O8+s;No-^-B?G(s+4 zdLU2tM0nwY!dcfze8D~08p`IZI4}G~r+5<~Mp;EtXKYTU@EN+qUY{=ryN!PNa)L5+ z!zhbHv_?kl)(qavy~A>Usr|0gasm%su4-6GNgSaNIN^;j6Q(woQSB>27w?1bCt;oC zM7b|M+>cZ&UZRj-`bs1b7)nKA_Eg|Zr}W&glPS(YY=l78z48P^9spgrO-dE^vUVz( zP_xxjjf+}(*mbT9o1cr2wGi%;i3)a%+Om2t4FG9CU?C!?>bmKNS-pAfq;+?qIil<3 zU=X6Z(Uwld!XP43-Hk@4OmX!+E@1wSoj98Oq@<1{#8%Habl3U5LZ09QQ#jR(q%m&W zI8dndX0qjEZ{ zzao0OXa`4FV}1$pi7QL3GBV;QI5Nwy8K!cIJt9kQpk9qQ@T~RPqi5UNi+G#7nN4rKD?I(AVzi zw=HpdZwDk$HNH?J;2XYQ)hnRcUlr`IgD>X*H%}PIgBI#Wa+On{U`5?a7}M}dJ zCq9ah?L!BD@s}4PdR9M0LYQe4uiC7KKKJ0}jYF4qt5`zU2nqd`m_``P?Ws9Jw^w62 zh|u6n8=dLvLnPZic39@YXvTnCQjGeR9Hk)9UZF`JQKZPC@KcJ~i#uMi`4B}Nf{6;v zSnkhg_X-AQSjd;7jx9=rRj~zn#t#k;2k8jl5-ZP0_$((-my7X9RoVA04JVsCA7QEu zkkicAbr^cN;gm3j3g%=A`$Q?#7mdv)nvxHHg6kA0JkRKmCqnd$6KHo>qw2QA0E3u3 zcI1SOFs|CzH7hb}l8xQ>W_HDlh8E?vbS7MQfS7MioIEE5_U4~h)r0ZZa%j|-n!mBS zT7H7N39gYu9ZpJwSMhwII&iz)?y*$~KD0FD7z5p!CSJ`;fz^cV0lq&B3>b7S*5a$M z)V|;kj*=0z{r*UIT#jYvx6f9=G#xCZ-P62<+0ZEWWi!3^m9dmV+>|N>ydT%ONtv#x zTu|6M=OmRPI{K?->f8|BSi*2AcGNsfAEoB(bRPWj?(cV#igZ8}j z58Sz#&1{5|3gE^1|Is|H>g; zWK7iz<1C#fDlxP?mzuV5mz{v``o!3iD3md#umpoTOzopB;`w=gQt*B-ZG~}`QSXg0 z+=AKKqgzG3jQ3RT@H@!jUJmyv!2M9s0)R3L8$xs@&uIOADEOU1TlrMd>SQF4WL1Au zPDl1uJ5pETF4~vY$l!8y zc%uHh@?W8&&FugA>A%XKKLTF?=x;sH<)<-LIt~}~fP_A3{15p?Fk1@z{ZR;xY1ZR6 zL0O?^ONMx@7o#$(sO5lm!gy>Kpc{VK6aEQQOF+5x&!9l*?!DueK#TQyQ?Ce@U(rlz zN?2#rgkjgxPXoL*`~?HXuTK z4STqSYF2)JlEz+MUXu6@m-7U%4{#`B(KGv3TYkAr z1rA9_Y5#wNwMu>m2A3?ios`i{fQ|6)->=dm*e;8VoLpg$R&cPZV36o1n0O215go}< z!5`(p=~=i~MRQ}fle6tfj)TQ7@A!wt1?XQOpb6&8;kQan4((T51zu!oY9^}J+N_qM z9ejv}r}xAMzXC@ReaNoIVP6Pu{PYx9-}1+l5Eem-==)=4iRE%s2h%`k25dKig2+?0 zGkx|af#dGm*C=!h`GkeT=Yfh5HQM2rNVCANK}bkBxGlsH&R9Sa;D1kA52Bj?<2gS4 zYv6c?5F(O+D$CLOWu2DVNabUcsKx>R9ZlYjaJl)nO2|$w9`rzr8@bJh&F;j$jTN;8 z&ey;8$a`m02Pg$WAY|zL>1G#Dmti4{{=9a)!Lc6edG^8_kdbF6US(oboD7r%Cgd++DS@mlB9Tmr;B3S0!!vNZ{#UfM!Xs~;g`CZvTc_vXna1K-kRSn#^~1N6_ZkZ2 zjY_d2-L2$7eU@`k<#oAwE9yqpA5x$w?oF${H`Cs=hw)<(mXJZ6ps{h-S|=-I5-U-mtG-LDfL40q5~s zgagO+Uw@sYjNz1+*f6CXse=U*oegMpMx!KAE(lP3BllI1^Sb2|$@^MtOh~d67+FNT zQUMuQ|DE*dlMsPsraRm7Rz$wvCQNYvH-&r67b^mm5&WVm2C$fvTEtN(5!2)i@^(tv z8)~^_B5R~Ce1td&iOc$=!~qFe)&Q5Z)&v*jj!_7;xp?C=X5_?qF!Thq(TDG0u1@n< zHfjH)CH0BFh24nTi=A>wIO0r5jnXTc0QFJva;k=_gX;>jBL!*HtC=t5CtXm^Qn)w4 ztUSr4)7&R=?01Cr!uf(H+GvF$RZ;k|>)5g^u$kHx>hiOL8JTw5@Bl$=#Kgru_c%sl z6M@&sFJ-+s!U|Z^fF*{u(!14d<)_cmFx~~?Q4)wTlh?d6n#2z~ihm66n(;vZy~aWn zos?J^Nec$8kccet!2y1t9q#St!*g>3{a6i9S_A}L(>j4{F4)w@*XmeVvsZlHOR1b> zzHY&d+P}?Kuk?$4TgtwVdHr!>;*4a0Vtm`%ep2F}BJRsO^LB-3J>?hQqaDH%RH__5 z(QQ&5j}d z-zAXhmE7$H$mRG?ObOdM27b>v<-|L?*Ujxh$;8b^D|$c!qz%R1@PCv}B#VXX?ld=*uF~Z#kZXYVFlCC;rnR9jgty zD$PU+GMTyRLFpR4T-!-3oVV!@&uPAgI=rFan*iZy>#qp~i_vS9DNTZ2Oaaps>rkLb z&cWSMsFYHwK9o9a`^j2#*EQ3#@9!ZN#SDG04E-D7hi4ITIg`~nk)`l}fB?lVC;Tg< z#_`nt{4@VR&MQBY4Vg4^m+?KjANI9dz8Z+CxV{sE@Hpl`VM-t3YmRkNXm#IDEQ*%p zB(?aVI8NKEP|Rj|wRoZXvt{0WC<$*_b!F@9r{ukFvW4iZ;V+Gu#=pPbyz;n}CpCh> z8xxhu7kxn&>(WxHmX-o@8>UA;S2@_aX0f?CTDUQDa40krc_`}hH3*xCG3ThFlm79I zXAF9cJlOOi?t7mom3tBba2gsKx`iaCIo8>XT=>4$QQDG4LkS#SPkFCz{6cg>+n`fM8(@gbK89=>N z)Wh%uOV*crVSaR=ivH4?CM7)rlxN2eE%_@ha#!$3IYXb(BD2asKBVh; za}er$`l1k5b1Z6oo~v*RAARmNTzc4L$XrehUBdK>e7dlvdoZu#26cC(47Nlp0ho9D zxlLn$GG6NdT?1Rk0PN0ck;n-CcPpve)xD2Do1^#1Yq@g2#av?t7ZQtDN73?nm(&xf z#nW=^6*!j^;nbHKIgTXg zAfi-HeMTj9{1xYGz~`*Yn_66V3C3a`hLO%;J&*3$!7X}xoM_HqHf`tCNo`qRf5G(< zfDPtXZdm;%=7^x6AfN)wX^&2$GXTi!^{bX^m3$W+HqvPZL{`%H`-PV%O5Ei(1}b-8 z#Qq!*Xh66{$!>LGRE-6?@crtw8l#^Y5OWd=$JzSpQbm^WIqxyTdHcrk3@KLHhU7A|&@zS@Nt zNcvn{N8Q%Yp>Rztb(Wp5o~+XCkgLR2%aSW9UNz9SxnO7|z#2nsj*$z=Po(Jp-?P`T zpi$hQ^k=Yrhd#(8gxWY7Eiu8kAqDCZ^s+Wsh2=iHP>+}L!|(F{!_*iQLB^Wld;9}{ zTe_9jB=Unz%9xOWo&@37uNF1Bp7aO|| zf>U+sMGnfR7wcMSX0mW9^FufNLg}CKN)p=#LwcHr4)74t` za+w2{#-ZlFUl%4uoaJE%%9qN8ANKo~Q_msh<_Xs@h8WW)CP?C`=keo(1 z*6(!Asn#qf8c4fZxa)CF#Q`vI@F^q?us+~yROrRr@56iEdQbH3EZO_t8a040Rt4gk z&~ZT0)yvZaUOZ=}Kf*R%aB=wjnJz_;eF_QYi9byMBy-FaeX9Mv{)jT9LNEJxQ)?t0 z`2T;|Zn?NVJQCKwxFpt=v z+fqEADT2Y0{AA$to_#AltOKnQKj|$f(*$fD`uR1TDO|@puJWv`CoYcqm-hW-e+QCN zG9Ibns^@X#Kh-olMZP`~yY ziPz_V@W@78i6nm?o)FSsNg|E?*Zuytgz-PAj9)(V-w6(YH<8AEeq}%SeU8C@Zh_p> z0CWCsT55IM{Ac~$(j$2A9mc=udSpPFHQ&{zi^|`1;v}6x{IBNmH?Nw7fLGX_Co92? zoAw(@E_U5gvo95u0AXrW=h^~@+^A=Wh>yNOB`^#tx&NNFw5cl~8me?WXx-vISJ&2T z-R?UA`t#KHw z-XK~5n!>Gj;hKLWjwQ%5eaB;{VJq|vkw+COpSZZ`$kO6NMC&hGCBS&wd`OvV@{@Ed zm+BzmbA=4pcE!<4)|Rw014$#m)BAY#Gz4aZdP{Y>!R==H+IXYeKmeFkQ1*0vvf_wZ z`jfRdB`S|7`EmRV@f-kDPQAPhWe=|%u=F&D9su^iitQdbU!r;WTcwc+(>Fh*RPzw% zis)P2@uF3}`3AFd;WKcI^ImD&{fedotDE_AEJr2|Iy&PqLbKL1Xz`D$B33~m675fx zvDZQ`nN1a!(fXNl_qV7u0~_meyayOdet7gGt&<| zoyN6bP!YOnN0YJ6Oh1C>1}7KUu2Mwlk!wnd%7`yZ7~IAl3%n$8fg?4#avOGpx%D>7 zjg1nro}QlU6c#DZPylE8&z3?bj^y+=TV&o?#AR_HcgR9N_;?KWg?dj(AdvG)w^h<) zVtXndzbzA-FJo&AB&gyEd!Xi)y^-1&U-y7gQn>zJ=~U5_@D7x z#@|lwD1O`&sg>X$09bDU;iEacFLN5UvzX}%+MOA>v))r@68YP46J0Mf79$L-}oKelggBd9F-@9p;`rt`I1F+(tvV?<|TzFbXGEp zH1PbSQ1ZNco+#62vtSwqCc5?Lgp9qRm4(H1VL}VVrp9>weG48InHsL8hl4b=Wu?gH zZ~BLTWJa6cUuJH10id`H+Vy$Pew$J(2ckNeeN*S=vOYR8 ziLU*9!aLF`bK2!3v7lE_^@C3!!OrcZ`N-x;mT;Nw#cY42pr*mQs)#O8l#XKZXVoIg za4np9YS|y3<;W|`ortAeiqZ)5z!M>#bozrw_F{2}=QyoXLzSCe-*2Pj3xhD2x!Bha zsNAn&djC78i6q~l;h0cw<5+Z1+bXC??g2Y?eTo&xt=697eD>PqSvtt&+49q#CSaAi z31kC++4zCn*r^Ls6ux{uA*EGgHdfa90Lk?Lhy(eHZ8Ij=DD)(LBD7230ppvR2<0{= zM+KlKp9=R|eyX=LU;)%TpE@mKz^OcF1s?wCgQQ8V`M(#z2&ulP@=HCCG++tvC)7dY z<8EaV_=ZpYRt8-`ld)BEgmiT5UzlXl10nShn=u%urVc`OZ6a+dQCKlCP|znySL$3RU>aJ8uSKbDtswcjN5(r@&Q$VlJymEh70gN5H{|c82^+Krtg=! z(in5Zt6c_8BG!LzYR#Mvm(r!XCoF3x=&CBLTejOWG#2SojBd|9k z+tRJ1hTM?Sk9h?2lE>s zJ11RA4TTmk#d8|>LIfX0{P8^VtKU!}EnE=-+8``1o=IQ z#W?YOD)xh@R6z%7FgV8#(I>=AUjlfSxA)#=RN0M7oF6W^eSH$fl^yX=DLbawi<-L` z;<6GX`REMMC3B<=2Rvr5SYL+kSsE3VV(6Os2k`j+3t z39GbO6BO_?%oTI2q`S@Hu%>NCsCuKVG?%pq*`# z^i9vS*yef+y*8>IovrF1&$b;X1ZV}m(_Zd6&LXmn{?~XPd{)MC_G{1C7?DOR(z3^o39g@HBQ_k{`s!HNTpa6dgat~xbNSg%y51s0=iU0rr diff --git a/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs b/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs deleted file mode 100644 index d8814bdcd695..000000000000 --- a/dotnet/samples/Demos/ModelContextProtocol/McpDotNetExtensions.cs +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using McpDotNet.Client; -using McpDotNet.Configuration; -using McpDotNet.Protocol.Types; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace ModelContextProtocol; - -/// -/// Extension methods for McpDotNet -/// -internal static class McpDotNetExtensions -{ - /// - /// Retrieve an instance configured to connect to a GitHub server running on stdio. - /// - internal static async Task GetGitHubToolsAsync() - { - McpClientOptions options = new() - { - ClientInfo = new() { Name = "GitHub", Version = "1.0.0" } - }; - - var config = new McpServerConfig - { - Id = "github", - Name = "GitHub", - TransportType = "stdio", - TransportOptions = new Dictionary - { - ["command"] = "npx", - ["arguments"] = "-y @modelcontextprotocol/server-github", - } - }; - - var factory = new McpClientFactory( - [config], - options, - NullLoggerFactory.Instance - ); - - return await factory.GetClientAsync(config.Id).ConfigureAwait(false); - } - - /// - /// Map the tools exposed on this to a collection of instances for use with the Semantic Kernel. - /// - internal static async Task> MapToFunctionsAsync(this IMcpClient mcpClient) - { - var tools = await mcpClient.ListToolsAsync().ConfigureAwait(false); - return tools.Tools.Select(t => t.ToKernelFunction(mcpClient)).ToList(); - } - - #region private - private static KernelFunction ToKernelFunction(this Tool tool, IMcpClient mcpClient) - { - async Task InvokeToolAsync(Kernel kernel, KernelFunction function, KernelArguments arguments, CancellationToken cancellationToken) - { - try - { - // Convert arguments to dictionary format expected by mcpdotnet - Dictionary mcpArguments = []; - foreach (var arg in arguments) - { - if (arg.Value is not null) - { - mcpArguments[arg.Key] = function.ToArgumentValue(arg.Key, arg.Value); - } - } - - // Call the tool through mcpdotnet - var result = await mcpClient.CallToolAsync( - tool.Name, - mcpArguments, - cancellationToken: cancellationToken - ).ConfigureAwait(false); - - // Extract the text content from the result - return string.Join("\n", result.Content - .Where(c => c.Type == "text") - .Select(c => c.Text)); - } - catch (Exception ex) - { - Console.Error.WriteLine($"Error invoking tool '{tool.Name}': {ex.Message}"); - - // Rethrowing to allow the kernel to handle the exception - throw; - } - } - - return KernelFunctionFactory.CreateFromMethod( - method: InvokeToolAsync, - functionName: tool.Name, - description: tool.Description, - parameters: tool.ToParameters(), - returnParameter: ToReturnParameter() - ); - } - - private static object ToArgumentValue(this KernelFunction function, string name, object value) - { - var parameter = function.Metadata.Parameters.FirstOrDefault(p => p.Name == name); - return parameter?.ParameterType switch - { - Type t when Nullable.GetUnderlyingType(t) == typeof(int) => Convert.ToInt32(value), - Type t when Nullable.GetUnderlyingType(t) == typeof(double) => Convert.ToDouble(value), - Type t when Nullable.GetUnderlyingType(t) == typeof(bool) => Convert.ToBoolean(value), - Type t when t == typeof(List) => (value as IEnumerable)?.ToList(), - Type t when t == typeof(Dictionary) => (value as Dictionary)?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value), - _ => value, - } ?? value; - } - - private static List? ToParameters(this Tool tool) - { - var inputSchema = tool.InputSchema; - var properties = inputSchema?.Properties; - if (properties == null) - { - return null; - } - - HashSet requiredProperties = new(inputSchema!.Required ?? []); - return properties.Select(kvp => - new KernelParameterMetadata(kvp.Key) - { - Description = kvp.Value.Description, - ParameterType = ConvertParameterDataType(kvp.Value, requiredProperties.Contains(kvp.Key)), - IsRequired = requiredProperties.Contains(kvp.Key) - }).ToList(); - } - - private static KernelReturnParameterMetadata? ToReturnParameter() - { - return new KernelReturnParameterMetadata() - { - ParameterType = typeof(string), - }; - } - private static Type ConvertParameterDataType(JsonSchemaProperty property, bool required) - { - var type = property.Type switch - { - "string" => typeof(string), - "integer" => typeof(int), - "number" => typeof(double), - "boolean" => typeof(bool), - "array" => typeof(List), - "object" => typeof(Dictionary), - _ => typeof(object) - }; - - return !required && type.IsValueType ? typeof(Nullable<>).MakeGenericType(type) : type; - } - #endregion -} diff --git a/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj b/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj deleted file mode 100644 index d509495b6882..000000000000 --- a/dotnet/samples/Demos/ModelContextProtocol/ModelContextProtocol.csproj +++ /dev/null @@ -1,33 +0,0 @@ - - - - Exe - net8.0 - enable - enable - 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 - $(NoWarn);CA2249;CS0612 - - - - - - - - - - - - - - Always - - - - - - - - - - diff --git a/dotnet/samples/Demos/ModelContextProtocol/Program.cs b/dotnet/samples/Demos/ModelContextProtocol/Program.cs deleted file mode 100644 index f8c7c205c4e7..000000000000 --- a/dotnet/samples/Demos/ModelContextProtocol/Program.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using ModelContextProtocol; - -var config = new ConfigurationBuilder() - .AddUserSecrets() - .AddEnvironmentVariables() - .Build(); - -// Prepare and build kernel -var builder = Kernel.CreateBuilder(); -builder.Services.AddLogging(c => c.AddDebug().SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Trace)); - -if (config["OpenAI:ApiKey"] is not null) -{ - builder.Services.AddOpenAIChatCompletion( - serviceId: "openai", - modelId: config["OpenAI:ChatModelId"] ?? "gpt-4o", - apiKey: config["OpenAI:ApiKey"]!); -} -else -{ - Console.Error.WriteLine("Please provide a valid OpenAI:ApiKey to run this sample. See the associated README.md for more details."); - return; -} - -Kernel kernel = builder.Build(); - -// Add the MCP simple tools as Kernel functions -var mcpClient = await McpDotNetExtensions.GetGitHubToolsAsync().ConfigureAwait(false); -var functions = await mcpClient.MapToFunctionsAsync().ConfigureAwait(false); - -foreach (var function in functions) -{ - Console.WriteLine($"{function.Name}: {function.Description}"); -} - -kernel.Plugins.AddFromFunctions("GitHub", functions); - -// Enable automatic function calling -var executionSettings = new OpenAIPromptExecutionSettings -{ - Temperature = 0, - FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() -}; - -// Test using GitHub tools -var prompt = "Summarize the last four commits to the microsoft/semantic-kernel repository?"; -var result = await kernel.InvokePromptAsync(prompt, new(executionSettings)).ConfigureAwait(false); -Console.WriteLine($"\n\n{prompt}\n{result}"); diff --git a/dotnet/samples/Demos/ModelContextProtocol/README.md b/dotnet/samples/Demos/ModelContextProtocol/README.md deleted file mode 100644 index efbb914eaa9a..000000000000 --- a/dotnet/samples/Demos/ModelContextProtocol/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# Model Context Protocol Sample - -This example demonstrates how to use Model Context Protocol tools with Semantic Kernel. - -MCP is an open protocol that standardizes how applications provide context to LLMs. - -For for information on Model Context Protocol (MCP) please refer to the [documentation](https://modelcontextprotocol.io/introduction). - -This sample uses [mcpdotnet](https://www.nuget.org/packages/mcpdotnet) was heavily influenced by the [samples](https://github.com/PederHP/mcpdotnet/tree/main/samples) from that repository. - -The sample shows: - -1. How to connect to an MCP Server using [mcpdotnet](https://www.nuget.org/packages/mcpdotnet) -2. Retrieve the list of tools the MCP Server makes available -3. Convert the MCP tools to Semantic Kernel functions so they can be added to a Kernel instance -4. Invoke the tools from Semantic Kernel using function calling - -## Configuring Secrets - -The example require credentials to access OpenAI. - -If you have set up those credentials as secrets within Secret Manager or through environment variables for other samples from the solution in which this project is found, they will be re-used. - -### To set your secrets with Secret Manager: - -```text -cd dotnet/samples/Demos/ModelContextProtocol - -dotnet user-secrets init - -dotnet user-secrets set "OpenAI:ChatModelId" "..." -dotnet user-secrets set "OpenAI:ApiKey" "..." - "..." -``` - -### To set your secrets with environment variables - -Use these names: - -```text -# OpenAI -OpenAI__ChatModelId -OpenAI__ApiKey -``` diff --git a/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json b/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json deleted file mode 100644 index d6491818e538..000000000000 --- a/dotnet/samples/Demos/ModelContextProtocol/SimpleToolsConsole.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "Options": { - "ClientInfo": { - "Name": "SimpleToolsConsole", - "Version": "1.0.0" - } - }, - "Config": { - "Id": "everything", - "Name": "Everything", - "TransportType": "stdio", - "TransportOptions": { - "command": "npx", - "arguments": "-y @modelcontextprotocol/server-everything" - } - } -} \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj deleted file mode 100644 index 4c8cfe4b3363..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/ProcessFramework.Aspire.AppHost.csproj +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - Exe - net8.0 - LatestMajor - enable - enable - true - 61efcc24-41eb-4a92-8ebe-64de14ed54dd - $(NoWarn);CS1591 - - - - - - - - - - false - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs deleted file mode 100644 index d286b93ccf92..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/Program.cs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -var builder = DistributedApplication.CreateBuilder(args); - -var openai = builder.AddConnectionString("openAiConnectionName"); - -var translateAgent = builder.AddProject("translatoragent") - .WithReference(openai); - -var summaryAgent = builder.AddProject("summaryagent") - .WithReference(openai); - -var processOrchestrator = builder.AddProject("processorchestrator") - .WithReference(translateAgent) - .WithReference(summaryAgent); - -builder.Build().Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json deleted file mode 100644 index ef4d177c24ec..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost/appsettings.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning", - "Aspire.Hosting.Dcp": "Warning" - } - }, - "ConnectionStrings": { - "openAiConnectionName": "https://{account_name}.openai.azure.com/" - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs deleted file mode 100644 index 37fb0b772391..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Models/ProcessEvents.cs +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace ProcessFramework.Aspire.ProcessOrchestrator.Models; - -public static class ProcessEvents -{ - public static readonly string TranslateDocument = nameof(TranslateDocument); - public static readonly string DocumentTranslated = nameof(DocumentTranslated); - public static readonly string SummarizeDocument = nameof(SummarizeDocument); - public static readonly string DocumentSummarized = nameof(DocumentSummarized); -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj deleted file mode 100644 index 846843bdca9e..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj +++ /dev/null @@ -1,28 +0,0 @@ - - - - net8.0 - LatestMajor - enable - enable - - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http deleted file mode 100644 index a29192f4d381..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http +++ /dev/null @@ -1,5 +0,0 @@ -GET https://localhost:7207/api/processdoc -Accept: application/json - -### - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs deleted file mode 100644 index 0dac1b69d041..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Program.cs +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using OpenTelemetry; -using OpenTelemetry.Exporter; -using OpenTelemetry.Logs; -using OpenTelemetry.Metrics; -using OpenTelemetry.Trace; -using ProcessFramework.Aspire.ProcessOrchestrator; -using ProcessFramework.Aspire.ProcessOrchestrator.Models; -using ProcessFramework.Aspire.ProcessOrchestrator.Steps; - -var builder = WebApplication.CreateBuilder(args); - -string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); -string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); - -AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); - -var loggerFactory = LoggerFactory.Create(builder => -{ - // Add OpenTelemetry as a logging provider - builder.AddOpenTelemetry(options => - { - options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); - // Format log messages. This defaults to false. - options.IncludeFormattedMessage = true; - }); - - builder.AddTraceSource("Microsoft.SemanticKernel"); - builder.SetMinimumLevel(LogLevel.Information); -}); - -using var traceProvider = Sdk.CreateTracerProviderBuilder() - .AddSource("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -using var meterProvider = Sdk.CreateMeterProviderBuilder() - .AddMeter("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -builder.AddServiceDefaults(); -builder.Services.AddHttpClient(client => { client.BaseAddress = new("https+http://translatoragent"); }); -builder.Services.AddHttpClient(client => { client.BaseAddress = new("https+http://summaryagent"); }); -builder.Services.AddSingleton(builder => -{ - var kernelBuilder = Kernel.CreateBuilder(); - - kernelBuilder.Services.AddSingleton(builder.GetRequiredService()); - kernelBuilder.Services.AddSingleton(builder.GetRequiredService()); - - return kernelBuilder.Build(); -}); - -var app = builder.Build(); - -app.UseHttpsRedirection(); - -app.MapGet("/api/processdoc", async (Kernel kernel) => -{ - var processBuilder = new ProcessBuilder("ProcessDocument"); - var translateDocumentStep = processBuilder.AddStepFromType(); - var summarizeDocumentStep = processBuilder.AddStepFromType(); - - processBuilder - .OnInputEvent(ProcessEvents.TranslateDocument) - .SendEventTo(new(translateDocumentStep, TranslateStep.Functions.Translate, parameterName: "textToTranslate")); - - translateDocumentStep - .OnEvent(ProcessEvents.DocumentTranslated) - .SendEventTo(new(summarizeDocumentStep, SummarizeStep.Functions.Summarize, parameterName: "textToSummarize")); - - summarizeDocumentStep - .OnEvent(ProcessEvents.DocumentSummarized) - .StopProcess(); - - var process = processBuilder.Build(); - using var runningProcess = await process.StartAsync( - kernel, - new KernelProcessEvent { Id = ProcessEvents.TranslateDocument, Data = "COME I FORNITORI INFLUENZANO I TUOI COSTI Quando scegli un piano di assicurazione sanitaria, uno dei fattori più importanti da considerare è la rete di fornitori in convenzione disponibili con il piano. Northwind Standard offre un'ampia varietà di fornitori in convenzione, tra cui medici di base, specialisti, ospedali e farmacie. Questo ti permette di scegliere un fornitore comodo per te e la tua famiglia, contribuendo al contempo a mantenere bassi i tuoi costi. Se scegli un fornitore in convenzione con il tuo piano, pagherai generalmente copay e franchigie più basse rispetto a un fornitore fuori rete. Inoltre, molti servizi, come l'assistenza preventiva, possono essere coperti senza alcun costo aggiuntivo se ricevuti da un fornitore in convenzione. È importante notare, tuttavia, che Northwind Standard non copre i servizi di emergenza, l'assistenza per la salute mentale e l'abuso di sostanze, né i servizi fuori rete. Questo significa che potresti dover pagare di tasca tua per questi servizi se ricevuti da un fornitore fuori rete. Quando scegli un fornitore in convenzione, ci sono alcuni suggerimenti da tenere a mente. Verifica che il fornitore sia in convenzione con il tuo piano. Puoi confermarlo chiamando l'ufficio del fornitore e chiedendo se è in rete con Northwind Standard. Puoi anche utilizzare lo strumento di ricerca fornitori sul sito web di Northwind Health per verificare la copertura. Assicurati che il fornitore stia accettando nuovi pazienti. Alcuni fornitori potrebbero essere in convenzione ma non accettare nuovi pazienti. Considera la posizione del fornitore. Se il fornitore è troppo lontano, potrebbe essere difficile raggiungere gli appuntamenti. Valuta gli orari dell'ufficio del fornitore. Se lavori durante il giorno, potresti aver bisogno di trovare un fornitore con orari serali o nel fine settimana. Scegliere un fornitore in convenzione può aiutarti a risparmiare sui costi sanitari. Seguendo i suggerimenti sopra e facendo ricerche sulle opzioni disponibili, puoi trovare un fornitore conveniente, accessibile e in rete con il tuo piano Northwind Standard." } - ); - - return Results.Ok("Process completed successfully"); -}); - -app.MapDefaultEndpoints(); - -app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs deleted file mode 100644 index 0f85f0ff0fd9..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/SummarizeStep.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using ProcessFramework.Aspire.ProcessOrchestrator.Models; - -namespace ProcessFramework.Aspire.ProcessOrchestrator.Steps; - -public class SummarizeStep : KernelProcessStep -{ - public static class Functions - { - public const string Summarize = nameof(Summarize); - } - - [KernelFunction(Functions.Summarize)] - public async ValueTask SummarizeAsync(KernelProcessStepContext context, Kernel kernel, string textToSummarize) - { - var summaryAgentHttpClient = kernel.GetRequiredService(); - var summarizedText = await summaryAgentHttpClient.SummarizeAsync(textToSummarize); - Console.WriteLine($"Summarized text: {summarizedText}"); - await context.EmitEventAsync(new() { Id = ProcessEvents.DocumentSummarized, Data = summarizedText }); - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs deleted file mode 100644 index 8c1b46c22746..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/Steps/TranslateStep.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using ProcessFramework.Aspire.ProcessOrchestrator.Models; - -namespace ProcessFramework.Aspire.ProcessOrchestrator.Steps; - -public class TranslateStep : KernelProcessStep -{ - public static class Functions - { - public const string Translate = nameof(Translate); - } - - [KernelFunction(Functions.Translate)] - public async ValueTask TranslateAsync(KernelProcessStepContext context, Kernel kernel, string textToTranslate) - { - var translatorAgentHttpClient = kernel.GetRequiredService(); - var translatedText = await translatorAgentHttpClient.TranslateAsync(textToTranslate); - Console.WriteLine($"Translated text: {translatedText}"); - await context.EmitEventAsync(new() { Id = ProcessEvents.DocumentTranslated, Data = translatedText }); - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs deleted file mode 100644 index 54f6fe7dd757..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/SummaryAgentHttpClient.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using System.Text.Json; -using ProcessFramework.Aspire.Shared; - -namespace ProcessFramework.Aspire.ProcessOrchestrator; - -public class SummaryAgentHttpClient(HttpClient httpClient) -{ - public async Task SummarizeAsync(string textToSummarize) - { - var payload = new SummarizeRequest { TextToSummarize = textToSummarize }; -#pragma warning disable CA2234 // We cannot pass uri here since we are using a customer http client with a base address - var response = await httpClient.PostAsync("/api/summary", new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json")).ConfigureAwait(false); - response.EnsureSuccessStatusCode(); - var responseContent = await response.Content.ReadAsStringAsync(); - return responseContent; - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs deleted file mode 100644 index b01cb1c0bb81..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/TranslatorAgentHttpClient.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using System.Text.Json; -using ProcessFramework.Aspire.Shared; - -namespace ProcessFramework.Aspire.ProcessOrchestrator; - -public class TranslatorAgentHttpClient(HttpClient httpClient) -{ - public async Task TranslateAsync(string textToTranslate) - { - var payload = new TranslationRequest { TextToTranslate = textToTranslate }; -#pragma warning disable CA2234 // We cannot pass uri here since we are using a customer http client with a base address - var response = await httpClient.PostAsync("/api/translator", new StringContent(JsonSerializer.Serialize(payload), Encoding.UTF8, "application/json")).ConfigureAwait(false); - response.EnsureSuccessStatusCode(); - var responseContent = await response.Content.ReadAsStringAsync(); - return responseContent; - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json deleted file mode 100644 index 10f68b8c8b4f..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs deleted file mode 100644 index b95812023687..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/Extensions.cs +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Diagnostics.HealthChecks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; -using OpenTelemetry; -using OpenTelemetry.Metrics; -using OpenTelemetry.Trace; - -namespace Microsoft.Extensions.Hosting; - -/// -/// Adds common .NET Aspire services: service discovery, resilience, health checks, and OpenTelemetry. -/// This project should be referenced by each service project in your solution. -/// To learn more about using this project, see https://aka.ms/dotnet/aspire/service-defaults -/// -public static class ServiceExtensions -{ - /// - /// Gets a configuration setting from the WebApplicationBuilder. - /// - /// The WebApplicationBuilder instance. - /// The name of the configuration setting. - /// The value of the configuration setting. - /// Thrown when the configuration setting is missing. - public static string GetConfiguration(this WebApplicationBuilder builder, string settingName) - { - return builder.Configuration[settingName] ?? throw new InvalidOperationException($"Missing configuration setting: {settingName}"); - } - - /// - /// Adds default services to the application builder. - /// - /// The type of the application builder. - /// The application builder instance. - /// The application builder instance with default services added. - public static TBuilder AddServiceDefaults(this TBuilder builder) where TBuilder : IHostApplicationBuilder - { - builder.ConfigureOpenTelemetry(); - - builder.AddDefaultHealthChecks(); - - builder.Services.AddServiceDiscovery(); - - builder.Services.ConfigureHttpClientDefaults(http => - { - // Turn on resilience by default - http.AddStandardResilienceHandler(); - - // Turn on service discovery by default - http.AddServiceDiscovery(); - }); - - // Uncomment the following to restrict the allowed schemes for service discovery. - // builder.Services.Configure(options => - // { - // options.AllowedSchemes = ["https"]; - // }); - - return builder; - } - - /// - /// Configures OpenTelemetry for the application builder. - /// - /// The type of the application builder. - /// The application builder instance. - /// The application builder instance with OpenTelemetry configured. - public static TBuilder ConfigureOpenTelemetry(this TBuilder builder) where TBuilder : IHostApplicationBuilder - { - builder.Logging.AddOpenTelemetry(logging => - { - logging.IncludeFormattedMessage = true; - logging.IncludeScopes = true; - }); - - builder.Services.AddOpenTelemetry() - .WithMetrics(metrics => - { - metrics.AddAspNetCoreInstrumentation() - .AddHttpClientInstrumentation() - .AddRuntimeInstrumentation(); - }) - .WithTracing(tracing => - { - tracing.AddSource(builder.Environment.ApplicationName) - .AddAspNetCoreInstrumentation() - // Uncomment the following line to enable gRPC instrumentation (requires the OpenTelemetry.Instrumentation.GrpcNetClient package) - //.AddGrpcClientInstrumentation() - .AddHttpClientInstrumentation(); - }); - - builder.AddOpenTelemetryExporters(); - - return builder; - } - - /// - /// Adds OpenTelemetry exporters to the application builder. - /// - /// The type of the application builder. - /// The application builder instance. - /// The application builder instance with OpenTelemetry exporters added. - private static TBuilder AddOpenTelemetryExporters(this TBuilder builder) where TBuilder : IHostApplicationBuilder - { - var useOtlpExporter = !string.IsNullOrWhiteSpace(builder.Configuration["OTEL_EXPORTER_OTLP_ENDPOINT"]); - - if (useOtlpExporter) - { - builder.Services.AddOpenTelemetry().UseOtlpExporter(); - } - - // Uncomment the following lines to enable the Azure Monitor exporter (requires the Azure.Monitor.OpenTelemetry.AspNetCore package) - //if (!string.IsNullOrEmpty(builder.Configuration["APPLICATIONINSIGHTS_CONNECTION_STRING"])) - //{ - // builder.Services.AddOpenTelemetry() - // .UseAzureMonitor(); - //} - - return builder; - } - - /// - /// Adds default health checks to the application builder. - /// - /// The type of the application builder. - /// The application builder instance. - /// The application builder instance with default health checks added. - public static TBuilder AddDefaultHealthChecks(this TBuilder builder) where TBuilder : IHostApplicationBuilder - { - builder.Services.AddHealthChecks() - // Add a default liveness check to ensure app is responsive - .AddCheck("self", () => HealthCheckResult.Healthy(), ["live"]); - - return builder; - } - - /// - /// Maps default endpoints for the application. - /// - /// The WebApplication instance. - /// The WebApplication instance with default endpoints mapped. - public static WebApplication MapDefaultEndpoints(this WebApplication app) - { - // Adding health checks endpoints to applications in non-development environments has security implications. - // See https://aka.ms/dotnet/aspire/healthchecks for details before enabling these endpoints in non-development environments. - if (app.Environment.IsDevelopment()) - { - // All health checks must pass for app to be considered ready to accept traffic after starting - app.MapHealthChecks("/health"); - - // Only health checks tagged with the "live" tag must pass for app to be considered alive - app.MapHealthChecks("/alive", new HealthCheckOptions - { - Predicate = r => r.Tags.Contains("live") - }); - } - - return app; - } -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj deleted file mode 100644 index 85ccce7e8426..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ServiceDefaults/ProcessFramework.Aspire.ServiceDefaults.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - net8.0 - LatestMajor - enable - enable - true - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj deleted file mode 100644 index ba15679e7f9d..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/ProcessFramework.Aspire.Shared.csproj +++ /dev/null @@ -1,11 +0,0 @@ - - - - net8.0 - LatestMajor - enable - enable - $(NoWarn);CA1716 - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs deleted file mode 100644 index 0ad107bfa245..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/SummarizeRequest.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace ProcessFramework.Aspire.Shared; - -/// -/// Represents a request to summarize a given text. -/// -public class SummarizeRequest -{ - /// - /// Gets or sets the text to be summarized. - /// - public string TextToSummarize { get; set; } = string.Empty; -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs deleted file mode 100644 index e94118c74d66..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.Shared/TranslationRequest.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace ProcessFramework.Aspire.Shared; - -/// -/// Represents a request to translate a given text. -/// -public class TranslationRequest -{ - /// - /// Gets or sets the text to be translated. - /// - public string TextToTranslate { get; set; } = string.Empty; -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj deleted file mode 100644 index 187beb78372b..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.csproj +++ /dev/null @@ -1,25 +0,0 @@ - - - - net8.0 - LatestMajor - enable - enable - SKEXP0001,SKEXP0050,SKEXP0110 - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http deleted file mode 100644 index d1b8e9f5ea86..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/ProcessFramework.Aspire.SummaryAgent.http +++ /dev/null @@ -1,9 +0,0 @@ -POST https://localhost:7261/api/summary -Accept: application/json -Content-Type: application/json - -{ - "TextToSummarize": "HOW PROVIDERS AFFECT YOUR COSTS When selecting a health insurance plan, one of the most important factors to consider is the network of in-network providers that are available with the plan. Northwind Standard offers a wide variety of in-network providers, ranging from primary care physicians, specialists, hospitals, and pharmacies. This allows you to choose a provider that is convenient for you and your family, while also helping you to keep your costs low. When you choose a provider that is in-network with your plan, you will typically pay lower copays and deductibles than you would with an out-of-network provider. In addition, many services, such as preventive care, may be covered at no cost when you receive care from an in-network provider. It is important to note, however, that Northwind Standard does not offer coverage for emergency services, mental health and substance abuse coverage, or out-of-network services. This means that you may have to pay out of pocket for these services if you receive them from an out-of-network provider. When choosing an in-network provider, there are a few tips to keep in mind. First, make sure that the provider you choose is in-network with your plan. You can confirm this by calling the provider's office and asking them if they are in-network with Northwind Standard. You can also use the provider search tool on the Northwind Health website to make sure your provider is in-network. Second, make sure that the provider you choose is accepting new patients. Some providers may be in-network but not be taking new patients. Third, consider the location of the provider. If the provider is too far away, it may be difficult for you to get to your appointments. Finally, consider the provider's office hours. If you work during the day, you may need to find a provider that has evening or weekend hours. Choosing an in-network provider can help you save money on your health care costs. By following the tips above and researching your options, you can find a provider that is convenient, affordable, and in-network with your Northwind Standard plan." -} - -### diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs deleted file mode 100644 index 5173182ee154..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/Program.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.ChatCompletion; -using OpenTelemetry; -using OpenTelemetry.Exporter; -using OpenTelemetry.Logs; -using OpenTelemetry.Metrics; -using OpenTelemetry.Trace; -using ProcessFramework.Aspire.Shared; - -var builder = WebApplication.CreateBuilder(args); - -string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); -string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); - -AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); - -var loggerFactory = LoggerFactory.Create(builder => -{ - // Add OpenTelemetry as a logging provider - builder.AddOpenTelemetry(options => - { - options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); - // Format log messages. This defaults to false. - options.IncludeFormattedMessage = true; - }); - - builder.AddTraceSource("Microsoft.SemanticKernel"); - builder.SetMinimumLevel(LogLevel.Information); -}); - -using var traceProvider = Sdk.CreateTracerProviderBuilder() - .AddSource("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -using var meterProvider = Sdk.CreateMeterProviderBuilder() - .AddMeter("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -builder.AddServiceDefaults(); -builder.AddAzureOpenAIClient("openAiConnectionName"); -builder.Services.AddSingleton(builder => -{ - var kernelBuilder = Kernel.CreateBuilder(); - - kernelBuilder.AddAzureOpenAIChatCompletion("gpt-4o", builder.GetService()); - - return kernelBuilder.Build(); -}); - -var app = builder.Build(); - -app.UseHttpsRedirection(); - -app.MapPost("/api/summary", async (Kernel kernel, SummarizeRequest summarizeRequest) => -{ - ChatCompletionAgent summaryAgent = - new() - { - Name = "SummarizationAgent", - Instructions = "Summarize user input", - Kernel = kernel - }; - // Create a ChatHistory object to maintain the conversation state. - ChatHistory chat = []; - - // Add a user message to the conversation - chat.Add(new ChatMessageContent(AuthorRole.User, summarizeRequest.TextToSummarize)); - - // Generate the agent response(s) - await foreach (var response in summaryAgent.InvokeAsync(chat).ConfigureAwait(false)) - { - chat.AddAssistantMessage(response.ToString()); - return response.Items.Last().ToString(); - } - - return null; -}); - -app.MapDefaultEndpoints(); - -app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json deleted file mode 100644 index 10f68b8c8b4f..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.SummaryAgent/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj deleted file mode 100644 index 59be1e8a4d6a..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - net8.0 - LatestMajor - enable - enable - SKEXP0001,SKEXP0050,SKEXP0110 - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http deleted file mode 100644 index f08fca693f69..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/ProcessFramework.Aspire.TranslatorAgent.http +++ /dev/null @@ -1,9 +0,0 @@ -POST https://localhost:7228/api/translator -Accept: application/json -Content-Type: application/json - -{ - "TextToTranslate": "COME I FORNITORI INFLUENZANO I TUOI COSTI Quando scegli un piano di assicurazione sanitaria, uno dei fattori più importanti da considerare è la rete di fornitori in convenzione disponibili con il piano. Northwind Standard offre un'ampia varietà di fornitori in convenzione, tra cui medici di base, specialisti, ospedali e farmacie. Questo ti permette di scegliere un fornitore comodo per te e la tua famiglia, contribuendo al contempo a mantenere bassi i tuoi costi. Se scegli un fornitore in convenzione con il tuo piano, pagherai generalmente copay e franchigie più basse rispetto a un fornitore fuori rete. Inoltre, molti servizi, come l'assistenza preventiva, possono essere coperti senza alcun costo aggiuntivo se ricevuti da un fornitore in convenzione. È importante notare, tuttavia, che Northwind Standard non copre i servizi di emergenza, l'assistenza per la salute mentale e l'abuso di sostanze, né i servizi fuori rete. Questo significa che potresti dover pagare di tasca tua per questi servizi se ricevuti da un fornitore fuori rete. Quando scegli un fornitore in convenzione, ci sono alcuni suggerimenti da tenere a mente. Verifica che il fornitore sia in convenzione con il tuo piano. Puoi confermarlo chiamando l'ufficio del fornitore e chiedendo se è in rete con Northwind Standard. Puoi anche utilizzare lo strumento di ricerca fornitori sul sito web di Northwind Health per verificare la copertura. Assicurati che il fornitore stia accettando nuovi pazienti. Alcuni fornitori potrebbero essere in convenzione ma non accettare nuovi pazienti. Considera la posizione del fornitore. Se il fornitore è troppo lontano, potrebbe essere difficile raggiungere gli appuntamenti. Valuta gli orari dell'ufficio del fornitore. Se lavori durante il giorno, potresti aver bisogno di trovare un fornitore con orari serali o nel fine settimana. Scegliere un fornitore in convenzione può aiutarti a risparmiare sui costi sanitari. Seguendo i suggerimenti sopra e facendo ricerche sulle opzioni disponibili, puoi trovare un fornitore conveniente, accessibile e in rete con il tuo piano Northwind Standard." -} - -### diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs deleted file mode 100644 index ce4e12610699..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/Program.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.ChatCompletion; -using OpenTelemetry; -using OpenTelemetry.Exporter; -using OpenTelemetry.Logs; -using OpenTelemetry.Metrics; -using OpenTelemetry.Trace; -using ProcessFramework.Aspire.Shared; - -var builder = WebApplication.CreateBuilder(args); - -string otelExporterEndpoint = builder.GetConfiguration("OTEL_EXPORTER_OTLP_ENDPOINT"); -string otelExporterHeaders = builder.GetConfiguration("OTEL_EXPORTER_OTLP_HEADERS"); - -AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); - -var loggerFactory = LoggerFactory.Create(builder => -{ - // Add OpenTelemetry as a logging provider - builder.AddOpenTelemetry(options => - { - options.AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }); - // Format log messages. This defaults to false. - options.IncludeFormattedMessage = true; - }); - - builder.AddTraceSource("Microsoft.SemanticKernel"); - builder.SetMinimumLevel(LogLevel.Information); -}); - -using var traceProvider = Sdk.CreateTracerProviderBuilder() - .AddSource("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -using var meterProvider = Sdk.CreateMeterProviderBuilder() - .AddMeter("Microsoft.SemanticKernel*") - .AddOtlpExporter(exporter => { exporter.Endpoint = new Uri(otelExporterEndpoint); exporter.Headers = otelExporterHeaders; exporter.Protocol = OtlpExportProtocol.Grpc; }) - .Build(); - -builder.AddServiceDefaults(); -builder.AddAzureOpenAIClient("openAiConnectionName"); -builder.Services.AddSingleton(builder => -{ - var kernelBuilder = Kernel.CreateBuilder(); - - kernelBuilder.AddAzureOpenAIChatCompletion("gpt-4o", builder.GetService()); - - return kernelBuilder.Build(); -}); - -var app = builder.Build(); - -app.UseHttpsRedirection(); - -app.MapPost("/api/translator", async (Kernel kernel, TranslationRequest translationRequest) => -{ - ChatCompletionAgent summaryAgent = - new() - { - Name = "TranslatorAgent", - Instructions = "Translate user input in english", - Kernel = kernel - }; - // Create a ChatHistory object to maintain the conversation state. - ChatHistory chat = []; - - // Add a user message to the conversation - chat.Add(new ChatMessageContent(AuthorRole.User, translationRequest.TextToTranslate)); - - // Generate the agent response(s) - await foreach (var response in summaryAgent.InvokeAsync(chat).ConfigureAwait(false)) - { - chat.AddAssistantMessage(response.ToString()); - return response.Items.Last().ToString(); - } - - return null; -}); - -app.MapDefaultEndpoints(); - -app.Run(); diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json deleted file mode 100644 index 10f68b8c8b4f..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.TranslatorAgent/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md b/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md deleted file mode 100644 index 0ad079035b03..000000000000 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# Process Framework with .NET Aspire - -This demo illustrates how the [Semantic Kernel Process Framework](https://learn.microsoft.com/semantic-kernel/overview) can be integrated with [.NET Aspire](https://learn.microsoft.com/dotnet/aspire/get-started/aspire-overview). The Process Framework enables the creation of business processes based on events, where each process step may invoke an agent or execute native code. - -In the demo, agents are defined as **external services**. Each process step issues an HTTP request to call these agents, allowing .NET Aspire to trace the process using **OpenTelemetry**. Furthermore, because each agent is a standalone service, they can be restarted independently via the .NET Aspire developer dashboard. - -## Architecture - -The business logic of this sample is straightforward: it defines a process that translates text from English and subsequently summarizes it. - -![Architecture Diagram](./docs/architecture.png) - -## What is .NET Aspire? - -.NET Aspire is a set of tools, templates, and packages for building observable, production ready apps. .NET Aspire is delivered through a collection of NuGet packages that bootstrap or improve common challenges in modern app development. -Key features include: - -- Dev-Time Orchestration: provides features for running and connecting multi-project applications, container resources, and other dependencies for local development environments. -- Integrations: offers standardized NuGet packages for frequently used services such as Redis and Postgres, with standardized interfaces ensuring they consistent and seamless connectivity. -- Tooling: includes project templates and tools for Visual Studio, Visual Studio Code, and the .NET CLI to help creating and interacting with .NET Aspire projects. - -.NET Aspire orchestration assists with the following concerns: - -- App composition: specify the .NET projects, containers, executables, and cloud resources that make up the application. -- Service Discovery and Connection String Management: automatically injects the right connection strings, network configurations, and service discovery information to simplify the developer experience. - -### Running with .NET Aspire - -To run this sample with .NET Aspire, clone the repository and execute the following commands: - -```bash -cd scr/ProcessFramework.Aspire/ProcessFramework.Aspire.AppHost -dotnet run -``` - -A dashboard will then be displayed in the browser, similar to this: -![Aspire Dashboard](./docs/aspire-dashboard.png) - -By invoking the `ProcessOrchestrator` service, the process can be started. A predefined request is available in [`ProcessFramework.Aspire.ProcessOrchestrator.http``](./ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.http). - -This will generate a trace in the Aspire dashboard that looks like this: -![Aspire Trace](./docs/aspire-traces.png) - -Additionally, the metrics for each agent can be monitored in the Metrics tab: -![Aspire Metrics](./docs/aspire-metrics.png) \ No newline at end of file diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png b/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/architecture.png deleted file mode 100644 index 747e2ba19331fb59bd7896b1c0a1a9a40c8ccbc5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45609 zcmZ5{1yogC*EJw2A&r0_-7TOX4VRJzX{1x7LAsG{K}t$Wk?!se0mVyq!v*QM^uI5@ z-}nCiKgJ#78S>nF&OUpsJ=a`wt`nxBB!h)Pih+cLgeCh*@--3?3JeJexftyM_>0)6 z>K^zHveRoB2vW)L(=8+k|9@W)FIHtj`2W2(9vxK{MSDax zN7&t62ou}l=;e1xTW`2&h>qeX7JP5XFSmTp_Chj)dMz(oMEO)3w=QrOM)u)wr$6NK zm&fUu2b?96SDs7dVInv&oIU4yBe2iY)mCq!NMFnrS`2+DVY#jK<-AHA;pgz;9NW8@ zESrZP#gC=riyCKY>b|32(J0g`o*cYQ-(l?S$t0#A^GPfyfv<31lg+b)p})%C9{#r_ z`9%KR5yUf@zBlg%6WJrK6;1_v+4UQsHuJ5Y<#`ACW zoYnQoMC}mo%un6w_vS-iUa(>ap`TnYF?Y^N!}T2(;3^wV&|R7a;i`|C}x086e9FuRcJzIvrfDY%aY(e6ZkIHD^Ywk z=`Pd#q%YpD7AZ89y)-sI)cpQ7>Qyr$fBbjQuJ$`|PPa>2r@>9vHL=griCIG`EruLf zBl-uV$!yGID7~x59lG#prr#HuPuLU5$md%k%ZJ=;zz+hkuZA95d8gxV?24a#zBt)_ z6M{`{-AnHJp;VD~Di<13aC5f@)o5&bb-xUJubpdQp{ig_^KcpO&|?Z=kI92x$~`@E zM}0b|lcM?wBD2I$(T%uBpTCV&%oOnsnzyMQ({veN{;vI9DV=80%2^h%8QBs@R+r8b z`j+vG$~7SIyn~{bNO?@U(6+~m-zW*PWo4+P6)4<&QwC}K+@Y7;67j7fr{XuYE*;)y zbFMK4A!D?Ngb#h@e;<=*Qju?N)gbh&Y^Qp{fY-PK^^IxN3rB7g^pi9rb^5!w2%~g0 zv@D0S?o}^e%kQ^znEuSZHwf@L~ALqP{IBu(54Q zPhXA}xQQTq@nhi&7W9+%OMm+B7NUs4H}94c^^~7`yuRGDSNCjpE@w*TL=N$|a%pXp zzE?}UpNEsVOoKkndhRukRTRnRC&8n#cJE>#gp5z*{C9fH{DzKpVTfnyC4|za4s&Z$ex)aBuCk8hV4NO0xc}gHELb2*lGsRwu+93< z6l|nV53%`IV+Vq=KHaSh4=q+G4C`{~!5@9sxiJRCFJ>>S#(x&b;KOv5M%(WmT_5Go zH^oZRX#O7OGS|I%R(QTl?F1|Z0eYV)k@4xtJvOtXkJ$|iD(clC%~!ivQLjK?P|(>7 znkG*de0?uLwx^3;q`S`R_!B-iyL%dJf2-2wap@Qe@@8T(71Zx1>^Lr=XgB}5@k0p_alVe0Kn0QTQ4J-Ol*|QA9`67Z@P-s*#P!rkT zSp1!-tzs2E z7P$b_d3VY(vsyZeRH`xumVkIhbrNJRsO!AD4SGtcWO|5#h5caQH=N zNx=$?N5%iMf%M zBLY3nXYY}Qfm44Ue{-@t$NOUId(HLf3|R~8Kc(+C?LJsx$zqa-#ZQv$-#IeV(Q^6U zrE>2fmP7Rgm54*{EdX5Cbiu9M_?_cF$}tu){}K@wd3!W3Vkkp+=F`i=KH9$lm>jR< zr69uWG~{=M?Kd6n#rI0CFf6(MxfR_r$JJMj2R&r^8`D+qOmRfuM*rX4dSWQ;wDcT; zwr6MTT5Z5Fjri}~Ji*$F?G{?1QORVdR0euR)uljDj=oDyCgdwJL$3p=8MAoBS>?xw z#KZO9#Bb9tHy=!twsywoXa)&}5BuG0_Ai~)qmXbezA2_ogBlx&l?QOHL{~~c{;m>y zH9|d|u@m*&NO#rqTy3Vh4Pt}c?<@_z3(zyv^^r`wV%=)TC{QkjdZNg*l7q*>5vvFl zOF&O^=y_7!^+6IwW0C}pv;$1?-;wqQ_&BFq^>#^MqSe=zjN9zhbHRc;Nb*DLLQ_b4 z$z`Ekf4SRO?0tC(D1_+Uy=KwabgTNlys%mimcow`@xJ&xgZqNw4g>^I)V?cK``>%m z;4lY3sAkS(Mm9fbPGj5;{E{W3ZMi$5ST>#^x^qm+(B^WdQn{N459w|x0{#HrIx|$>3hq$XH)QxKl#J(v|GIDZOXfe zdU&4lB^TGu>q446UOCtNJ5PSZ8&2y#%LSZal9loQ4q|8<1}gZsS*F)vnDE7B4o67? z8>~V6Zc`qiyJg*+Z)7GdZ9c!lOD1I${YIDS+If#b3Xe_dnfHaobkM;gZYy=LxCEs( z%#+)jvsMoB>i-1k7=5Sp_U1J6_DZIN^LMK{EDNy?^>jaCnA65^HF!v^W}0+nKo#Dh zfT%T-dmsPib=jHJz6pHgs+}$`+h)m_Af?ZDce&48$ed8ikqjG<4D2hP-oK?$nL@>d zJ$!2K2eNliN$8+!AXxQ5&H^~Df5Dke*p`2yCyynLNVGWoeaLn5*?i@TaczlQ$uj>sf9)Fj)m z9x^*?wrWa%>EmU_5`}>LK|}DSZouQpxPXc?9V2@47I2jcL!a|*m+5zmg0TD?>j{0= zA+k4|(0&v5Df1*9KrO?2LQ$;{hZX@zrn+jlIy_2W&o_3duA8~d*$xxJQ7VFC9 zZ``YZ5h6~e2?2O=P#miO$Ecm0Q@zBa5{q1K+-sRp621Pgat814cARzSodUmnS6JHO zf$E_rpzpRE##m(7>ces4oDDgSmgj7S0^0TYNZdWEW$Rn&G@uw}0IhY0XZorjs7LUF zmA}J+d(RoLgIK!X^HHuK(2DCvqRPla{V@<-Dil2G#h?pNoGebZCt_ZeLZeIw+Rh$t zpFbe8HgcVH2#F}a9hK*tI+-xsgLJ5l>sXRE9uBbD0RF}1;wqo*ttV{H`~|Ry?Y(}+ zOl!vU4{^nS+@1oCD!`Y+nCUfItUc>8W7EST&4EdDbq`1A7oc6Z=vh8XLB!L5q`gfS zbRAxMrZyg=!YSQ&PV$u)7M_!V)9^2LG!PKG#Km(IEj(m2x2)mdYJgzHiwyoRNLc4} zEYr;at*+QEX&gSCG{y0k7e{Kn-JNSB`l(xhYUu$DGefTCc><$>UP(d2^b$385Yjr`=2dgg%W zCVk3Rf(U1R)~H1s6v7m6P&ixQ*)|pZ3{tyMUt%zj^`1wI-nfwod)#r^+fAbExH9|i z=`LSvTCY!CI2X`tMAQ#34p|I>>kFHf{CKG$lh3JB(RB``EpYIbtCB&U`0~cKzI(Uz*2ncPv%;fb z9Lik=56-6jfRo)h!K@jaS=T9ZVylIg=A#LjHXigeZu4-@R-H-fiGhc|F~D)wKqRetE}9m*&1VLU=j*BIsga!X za^bP-$xb7}6FN!Eq0gt!&suNORoYqr<>mdbsd6;sgg2&bT03Qt`WVgoWK4_Ks13=q z^?n{`^jzlMH|p&JeOBmHMIT_1uHtvYP3_MiUflD)_=HkT)$~~yu;&__oitZGck6pW zyHojDYdpe3R94^ws`ke*uj4~n0Sy%ICUoy;L{!wJIqX>n3!+_F?P>RJJ29efTOhD1 zu1-v)BX{hh-fj8%3~yP$QD0NX{4rn3^3zGW=}DNDv$nTUyv;(3_uo{z76<3@<+jz> z%Qz8k*)H$Uj%w?f8XIfwB%8*YWv|P>wYIZpO8}H-#yo$CKdirgl9g)o?fU0QD^H>J z6%GK7FGVl{1aP3??Jr*e+BP)!pau;~L%%;3$)wO1pgDXchEez)AG6n=@(jD?lX0cD z)VpC80sD~Lm1%#eWUTMnb9jQK)(O~&iiILw)!T&wFnLmEE1vv$QhlGby1j{2_pHF!rZ##t(tX-{4MH1qc~ z6NOEkFr!06Q{~D;WVL$_Bm1d%Iuql)6GZ!DdlKM@iJUPYlip9&k!&BQFxr>#lPvlm!)F%MGA)f(O|&DDx-^B;rNi*>$VzJBXK} zjAV(}yWRv)yTO-21g*@NraW{jPK9kVaL@c#Lk#eR8dNF+J zY3gqD0VKU{Vrw~xpaAa-cUIv_l8$^rfqyBxec)jUZIOKwK;Cj$TvM^gBcEoEliAn| z-y3(vKD>c>Lkd=>i4y&EshOB=PYbKy-pp5qY!nqz@*8WZ0)FAmi=Kp#Zq9w z-nTSw+p^&@ry%p_wry1k(p-S*oPj{X&q$#5AISFLs7s+X1=-Ko)DHEb?1Z_ux^#Kb z5aUQv(U27~jZeWL%S3uwWwD3+9pvQ+az9mj!I|DbCCjE9j<^>t-&ejcGhS@Yy|qX0fc<27*g3^y9{P$jId@kNFiE;7}O?}k**Q0m^qa(Jet_3`!9EDh{d(33Fx7+ z#H2Xl!S~5kBdn0K<1R-p4_8!U2di2W`>v1E;n&HyNT#%Ik1b{0-d}_gmwwgRe29Z+-nCgLn1sF-sRJdnmf~upyKLiqv8GB(tClAeqy#kErMrb&D5nq zPw1g-Rvt@(22>yyE0C=}v!nNWxncU;9lXuSomx*+d$`b;idItSj@wZN&5%T`_1KpA z;@@ z=Ta>4y?E8GmEjrMj3YGRWtREtnm*n|7?YQupohCEzZ1zea_G&u12ZI_5oRfJtMP<* zSGpp9BfbJggurAP1et4pNcn+!L|w8CQ-aseg1~MjGb9hEJfu)k8`rLmuq-P!%I#@L zzQsoxO#{?9z>sX=jXF@7x1bRQ>?ccBLgZUXA629ZXgfxCDVF_it3DCjc^vTW>Bn zmhegxv#hXe&W9=kC%X7DZXApEv3jqWG;&FYrKn6*TNeW`BVD|fzRdRu!ARGzrMiWA*JXv1VYxtCbC>X1sWQV~ zZWp~$G*Qxq>t8tduYY5cB{{5^K#~g(X9X91qjE$f9wds{zeMR~SPT+S#om|I#6#|4 z6RAVfLA4UVgN5O-?3Q(6J25Ze{Q?-cg4=4%eNsYbP(_1rpdaI^);<~?Yf7`lNrvA}Wd@g<{0FW^40=3Es65&8ivw{?N6eZZP=$7%&DUloJeUor z;|+a3t2S8`G1%MWev~#MY>l@PsvXF>%RP@V)T1%q>&+CokB?!)AYYb+`z}c2;;bx@ zN0myDV}hPdPkz>U^WlAod#qx0Q&cr6AAa+W@a}m4T!D6DU==>K);^_X^d>w0^q5k# zKWFd9MM^0n#>B_BFF`^@l))a~6?xo!2sx|f6|pgD!*>osf233*tcW zco0r~aU;{~Vae5Rg_y=cR_GmUY@upJb)l@Hc;*%M!~7d{lQRcFw-2vYu5Tf{z2CM? zokd21tqNRl(?d7RmIxdcCO`a9Os}RymV2P<)rL21Y#iXfjgj~vGY!oZmq8+{>}11e zj^UBnBnRuWP)7zu?T{(Sr=mK>+?~$L*OCO%nTyP7T8tOqsg|^LxV};reV_|J8!q3v z&`Gz-+|h%$R+zL4E9VcWOQp&lIowzOen*`B04dZiD%<%HD#lFEwmeAp@s|gjgrJ3M zDjbR6z9d!23wW^UP_HwMYXAD=>-D7kJv6)Qh_YorKTCR=CydcjLKPTo4Duz&sr>Q! z_avXu|Axtn@Dmlz=|zWW;AqMKjaOE~`L2ua0#$ui%~__%6Lu$%go>e{kZ6X|b7qyy zNFX}Hfn=~4k#5f?I2san%&LOAZ61l{F%S`k=*Pqmo%UiTKz7E4hs-}T5dQ?KurlaK zDvo*IjnbKitmRN#Z~vgU6RII72Kn*uAnSnt5qcQgf-SSJU**f(>i!5pNCYb8h_Bds z?=_#5I0_4>A}%NS)gE=a)^5)c|5ZM5biFP}jHWoYsoiVk4WS&;d|8(_;X2t9)U2)_8ex=H0X50s` zJ@k`JLzWX2#7YVfA^}2xRQUM6PaxJa2b4uwi10;{obX-KT7Xy@a0JGiJs9&nMog$frgqEq&~O9>~N{*vLF}khEphvO#PF6^c9cuWyQE zm!Q>Aga~uGY5YLshU7=E&K&8e3WH`3sT?&}5aN%aVsa>N?3$~u4+lAcCZv3~XK6tG z5J4#MKt$xn(J=i_@XE$C-%J6oh$YK_S4;igo%?_a;zudsRd@1tMk^BWW1wIAi2;!S z2$VkYDxCgSc^r4~O90pOIi0ecDet9tv*z(3``_J{(5l{#4FI`|Gt1!R>iauq00ZQ} ziE(G}6li5s;Mn<6h$qGMFG8=fo*E@$(*_343;y)G2c-wa;3P{TM7P!FS}uPBR)Dyw zI)^_IyYA#(>n%r?L4B>CGzUlybi_i1+{&;>o_)m+F5>!U{}8;LZ>ne4t5bnAua@RM zxVt7Z$ZL_ezo-P)KC5-zDe{tEQPE!IC$&kvCPoD0YXi7e@0Vx|{eP|w)dech&n!T% z94GbvhLZgD!xi*@GyChiKi^J!DSZ(+`}#jQ>kM+1;2@}v^M8}#SFS9-eI5ijntaCxb^8TYg<9efjJ~NM9r$X}o%boP1Tr)7hBk zKcEtbZOj({33GY8WlAd@neQPba3|qN0yVN`(ggEOjde<(M|)`DU2ZXX1q~1lSTymrhJwRZmLZO%cs+uEx3i|2(Y=a6d6>?%kpdW#Y%@@DnBjdBx zdtkYV`!D<^1aBwHvaYYs7Vu%+Ex7Lu5xI6K3*wJZn}E%-?8TdRM;FCV9T8~(cKb;d zsanFF3!9PSq7n7)0gyo^&I19i5x77YlNDbOm;g40#UEkk#e%DA(B~iBodZPQ(z4)l zIn@F>0yTMCXG`EzG1P@!~Fi&YWx#=p++I+V4VuKK|S>#iq!uW`2(^57Rhq8+jYy2lmo zx9NY_QK0At-|A9LQYqz?)40p;-+&}|^hT509Ip2N-YxQAL_~td2qCY2N>K-L_eaD6 zKJ$a137?Aa)zToV?N2Xx@08 zgDLt+hl!RIVp%F~;JUR|6T`dp8yOw0V(f@>3${nfs6URrW-alJT)w50;={YUp#!b6 z*=DB3<@Wk4X%P)-eiv6Zln7$ZXnOv*ZFzHj&nV5iD1ZzA0O`4}#bUqqKeGVs4_U2` zNlWe?p2A=B22M*Pcn!SG+V!E-?@t4H;}hp!XvQFRwGENWK#WBQqW@Duy3m1+&P23D z&kZ5xxxG3+GA#vrG%c`QWC}!Dg#6k`y+#tyMo<$>oRSLf_q=(0Lw-X+F}SzHe?`%f zwBzaos_-TTKSMSO^a%Un(Swu6-l>3LcCM%PX{fb-Fr<;#&b7cx>`3b z2~OGn9CbK(Ulx`$z%X7C1LsBb_Nww4bmzMU22MjSOiSUWn9k$_)#RK|fCuk@HTJe1 zTW#g8a)JWxnp>MK3fkTAY^jLPrh^eu*;yl5QQcuy88BhUbe^L# zzS`LXm7^Gzu=O)pD*a^>T%Z7i5PT$4#Z-eX_!hiOfqGm#2Q1#663shf<(1Upq-jhw z^1TO>x>m*XDjHOa>@dYI&&v_E6vm59!he_m!G{ApFx5N`(1D7At{kDAYN6ru;PZV# zLnG^v{tler8BIfP=fXF&C6`?w+M|7-^Z|{!KwT&ls^{v2*-IPdryC433DKGc($f#O*=QY*_$N z?k9mw@vzPOF=-W-s^@ZPU#fV zjDSBN&S{SRdZCGUCMH>wohj?F0*#xX3!qBF5{p_dcLgU%#*fem7_`BiiR9VoB2Z?7wT&TQ`W?Q}ll|3r<>-3bSY3A@zf8aV(!O>K@s3yI zEeG}h*GHZYp@`UpigBS|U2K1o1J2}UWdA|V z*4g3#Rkv^Z)~d3K0HT0aI9a4q8Tii!qgqDwIbS25@z^fb=XBzL9u!Ui<>D^@2WPBD zPq@2mP}J^th8}Jmtf0^uP@|Fp(DTfMV&uf70Tdy#t;=c#QHR`_xiq*AD63D}hvFgG z0(%eQi)M7{TaLH>FxD7v`Q5{Q#%6R^Wn)=U=FT!5unvldsyMbK3P=wB_ks3`2k zJOR(Lp#4|7O~-tapyOtSxc#P>$+a7Fex61P6jJJUa0oHxR{z8t+5t>?mq-L*2;R~fK`-DJtkOD+{Vjp3L*WiRq6=-%a` z&Ulf|pZC!G@{WizrkiWHDV#0-=%uJoiNHqp#M(jk#b+pP(?fmS)As3AqL-{o&?0JU{-B%&OtF@5Z+34-bHfugaT1^?4!+dc=+7 zPhw`8bbz!NzBg95c)czeY(+DPYn8dG?e*gaNFH?} zI9kIY~Uc(>PkzPG>EO7t5WHb1U@V#@0fVbhFsVu2!{ z*YY>Z!$L)7eT5SZRF?EMRT=WN$a~sP%Y^caUV+1;p{dg?0?f;}j*mf?h@qC>opAC6 zC;-KU;%QzG>)f8~VNfzxlaPYZTJG6DK1WO)L=^+6bG*`08KFFbu_n&e^gh+UN9#jQ z(o6$@O_5-ZB^O;pvcZ5`*64k8&^uo5WQp+9ouC>TvE! z_oU7!((zr*@^E`v;~HIc^1{EdnquWKT*yD zE01{FayU?cxjBhLAC{P*699Azussl5IKafT%X|0F8bMAK;|?V%kj-Z+ncde;kmJ9- zI{FKM$93-SL}?}%vZzvTDDeT$C>hu`TkkX%{*}9?D)t%Uab-Qbl`74J3+n7Pt{$w^)s_MNZKA^mOruJ7; z;=l8Og5D|E+el>H0QXKC82U8c4~^i6DxjBdD1{qwocDS|@@p>IV(omPR%tS(5!i<` zRs}-&;UIyJjE}H`e!Aw7Vl##l%X@?R`%1ZuQIsZQS4~+s;U*A6fQAqT&tEB|7=s*V ze8d*$qqi_|+6N*$@z#xWLe2$1#MtCEM1ev$#fYhlUBCZyRk-|mh`}FBF4_JtDdEU7 z_|4_s49EU5G?x$e{4U&_&5U;TzvG@1Rq|z~UL4Fi zjZ}j|%s-I^CI#d zO&RmAs(!^HVCjUxNQ23e=?#BtW=!ORgWmB^7Y9PFf$LrUVnUB6j(lA{jPpBiPf2Bq z{?(2J)K|aw!N|+RPtc?wtk6N%_$OnOCC}EovTuMl-SvR+_E%(yKQ>69J}%^CTLRxp zP^BuHPp4UXtCMVTLDvtGINX;=euO^9R!{{DUtZSXNM8 z(pjC`^66ibG;`^yg!)v2MSc^aFW!!bsS{(LR``P5Dgqj~zK8xd*O!euPJk|ylY8yA zpTEtf+kO-zz(EhjD3yS}*HeNps!uS_l9bW~q)K97VCqTC9lvffJ7Q;mwe&+)w>yzk zR_sxS=)B znD^M0!X5%6Anz^HofEgUK3{LrK2#zH8<5ZI|7-GTLT)-gF^uhT8fba2fpXbwX;oA% zUETfz<*qpiZu^xb@fTRZ+&-t<6P&4PgBO!3`K`d%)&wd;U(^}GOl4O4rVTuz9RTXl zdbd!}F#8|Um)S2th)u45@Zgdn*RQR+6F_#3u&Npyz>t)Aq|0WH1xxU^QNp zB7D$A@M$|!0E`5y%%k3b;vpz;w7=Ae7+aE}v>pz+0It{0ZsVTIz0TH$qLg=GWTHc+ zD3io&M~D_{zNnBa5vy@FM4VTDrGc3Tz~^I_m*rPWBEe-7#EshgkWYU};7p}hX1qht z;olDtjnE9Z=68Jn5s2sZ;Q`-g*5G>G4T;t&`_`F8}hn?Hfr{5jWx{=Ya{C zdIA{mj(HN*nN_PN3CS9!5qZVw4@SoXZy}E!(v7H|Q~ImqgVyD!2FzthV@JhIIvw^i z_M#9-mc-xX(-eLk5a9LRF60M>Utj&ass%Y=9Xu2BPQZz*gn6om2c{8W(K2fp;*jW4NN#vl{*@iNn6+!#)`^4uv8 z`IteFwVS^G<5>Yw>S0Uwb5QA#?Nntpsp0B3%+O5lQynWXbF=y@b}@9vGu)rDUOopq zOiT)wiV9CFxi9--;2Q}7kFuSVv@w~wBE&S zz0sAebV?Ucsg`{qrDDB_P*A8^QLjEGK1xb5@ObA#Cja^pl!dj($CO_C;yWk61J{Y6 z5KgmF;*w*6epgJj0IHKYSHq_YMH+U*)QPPfvge~a82TEE+EniMSVof~LLZXl1IW!Z zm?w&@kbmb5P4V{vlLIAj{}C|tcDM?>!EwQ$UCe2DYTt(Ja=w;CY<&!@`Njz38>pJ{ z6wio|Y3K?3^-nChEEQi6)?%RL+&7JXCxLbh;C1SWWtwr|`#Xy655eEBe~K6^rww*5 z9|Y)N8kXcp1MN=+^iqhgB;N<`zv%-;77;_jX4bZ?K>InIo&?k7TlhzA4$;~%oPc%oIfj!l;A`LY^yyg8l=s_6N9E6fKZT?khpx7 zN-LPnL`+O8Ol4LPogjw00Y%e#GV~2#w`EzNpuwK)EVYOdX?!vMq3GW%rOGRmhm>Ar z%{Ld@CH+ivyItOnK<;+AZnvZuNl>RRR@rFZW0wQOF~8b+sv_2FO!EP%62MJ~qWGov z>CWT*i;u`@Q1YbOF8L`x{1SKRr%zffIsyxbCs=Q?7*0En@c>h&Gs!Pwvfs#p#>EG2 zYy}SD@Z@R%O-D8!#a~2M4*D-at+9f;A;G>-q{2+?a%RMk3$WpD2R56$$Ui$}MJrzb z;ZfgLw!H!L{%eJM1-0Q|W(_`qV0|^+L~qo~6W?a^&>aA(>UuU;P*_4bq*S2g&iD+B zoe`6ntggOg3 z&4h_J;t8PZq@%`tAl05%4pK$RJm?!pV{bjm$+si#Z3^PXvneo65J{2XhDs`vU%BIr zfM)PYLIE^KDuRgrHw5^cnLu|IvX7Rs|+B`kfb0J!F+Tp z%)$f@rQc8UB-9GG#<*D*N=s2cSE)>kDLn_=&(1n|!EGg~@&`9} zZ!jeN_O`}mM*5pw3vKQ$Lr`tSfb{NYz66a59>Pnp+eF9U=(y7V88AN$VyRDT+kB$= zo%(e`6_K+Sg)bPislbCN2%z!F;xqEbG{cu8Y`uV8T4<~h%_-@8*TsX^0a(|a6{mSu zjAhRwI9u!?-@r(=1`PDPpfh-35-pvDTiE~xH#C8kMH`66fv(D8kiU}1173=#&C;S5 zMV$=oD^RULkJ0|S=<9iYxK?5c=FF?G_-21ez1MMHOW@gMJ6mKaELt|Gwwj2(9$tX1 zEP#=}B+=l@&fDFXC1IZpOSotD*`}u#de8-!k0h(V#k=qXo!$%}up4pYpN?PMky{K| zqg0z3u2Wi7ZrNv__gdEU@JTaz3Sv#JVvsrF#%>yhFbBStc7 zwgR5+{ZqCOgW`aScxb$zz;wR)=D2iEFHZ@TksVC4BU;D}Pebrh#QXXa^;*vU;WJ3O z#v{l>X=m=F(gWg(c>pZKK_SB61rL3e%@1oiF(QsF8*`1W%C2wwy`ohtJN!a77)J`h z@Fe}XF9YT5M}y;|Ka?Y={=yUrOv}C&JjObRmsNOzi3y+z9chXw+}$=bL&wtzKkuu` z0M;QFfF!O092MLrS86ZvBJiAy5p)|1} z?8NVBmYy}2g9Zx3izY7}^}K%&FM4%xjxLJ9$_2=fvoz$}s%-NVTPD#9#Twp^shaG6<&R%~cKy$U}r#8w9jL71KiP<6IJEK!m8! zC4TH9-$UCZegYV(Nv}*Yz6NEV*)v?2S4x>#^nx>>aGGV2o~)p%WTrhgr4T9Z(v<#L zTHZrCEEkgBeHeY~BQi%G>IQk{49z)^z9WRZ@8Ev<@PkLo zo`;nzB28DO@F|*GCcPNMY_Jrb$saNyg2#ICWi935{`dDl=QxxiMQ)%caY~`$T7!Zu zFM5Vq$a8Rio2F==&oqFn+urBg2WhR)han4I?l)S?+wJfF&b@}uWs;4r%)LE)G5)&n4EQk0Q|5N6rNh#TuT8UW5RGsM(u4aM zx1Sa8AFVZRynS_At7==nMl=J&b~WCV@oc60fQzIQ2IgaSJ?6*@=q52vej9cQq*5pO z6(CC~u|Uhi@eLl8NA^k5xfL=G*88l`8?JWs91BL^HCXRp1PS#%<8Lb^#0%F*yqH9e zYz&L>kHmFb{UoF3h)da-aYM_N@^WdTAD_3o(QvUCtV1GOW8my3*j>G3b%{CDEKwq&rr;)V+<^jSdt9AM7wppkT6EE2cvRVD-HI| zBz$M2slSrx^1|PCHus~(K`7(sv&D@F*aHQ;+lN9Fzxs5<_Mfzm`N^Vk=?L>MSZia| zud|#}``KN9>C?DM?b+N9cZ6DQvp3?|yq&XE- z;k^`fjeig9oP0Wzz+wYndyqLYpa&lgF%TluRu|U&A9A``(f7{~6LQXy&Xunn{|SV(f%(P+Hq}Zv~{MENr@Pc;5sXea^fTe4@tQq z3!~23*LjM{pfT`|e7)4Gp2wTUU9n2*v;cDe?J5hf;}1xZmG%mJbP*{geXQ0L!%8F_ z$kFn;-wTRIg|-P)fF~!OnLel@WRB-#OY*`BB$JB+d&gLXWXouY+8YPgF|{v`)^I+8 zNy3*sW;t{iSbFu?#sx0ZjApcd4+QhXKR&SivsLx(&un0i<@+@!292M&#=al{8XR!% z2g>4%Pg|GZu-@M{S?-j>jL$EqBiA@-DegLI0L+_V;A$idjG^;O?Oz>@2%f!U2s+>z zdlwq^l;A$ia2YpY9=^sKzr?x*{T-M+NTXFaZO$T0rc7A!0$ETl3pEKFrRik`` zkk9aPt@}C#BG+bn#br>JIitnaB9e!kLgyrHNy7pKIzDqs+x7{c&Hh36=5`oL;ps)k zHaHMDZ^;P8B4eofz~f^_eG6hZTY*L63V5w6k&s2GSxhdLp3|$C?QJu!x-Vs@87$P8 z0q9GE+QOF0mLtFi3JT?ow41GSa2a|rQO87~kZqs0q+7<~@RO_eokDe7Y^?TOqC_iqREM~ zq9TN)uw|`(eA6J{4JFQq2fE`c*E$i-XULCmye9Uq6Z_W}QCyXW!o;f414sIq>RJLJ z`PVgXNP%F^urNq1!v_$?xI(Y zhl!4qIpmn+0V(uuY=?Cva2=u!@(AukLktMvLDVm-`+^)K+6yTv?(%gDp2(U1%@QI-Bm*w_4xgu5|QVQeTcVnWvqWZJL-H zf;sNAnSsbBJSN%6s3M`}*>Zv7?pG!$`k&Y_lcFUxEdzPSO)vbqGdP7A9x4#MrHr5@ zau2+}Ow#HuDsFVdIBu=?sxCvJE?F;dmorq!j{2AYe}QvWHZ)s2SVntEbD8l;23P8f zElThGai;}hhKKXTj5lBGDLTH;p|%*6cO_9!_spsJ9VkTsJ+sZ>yH#sh6mbS`xe`{eme+J}e%<%YWlEPFWx93{w zY?Y$dMIC)&#Qm3js+?=wi2Nt-7tL=r@D(pqE={RUBy_K62h0`Rr9CsI{zPkc3S}cC z%3@5MV3WNU9^iG`*&AUD-SFnGpo&BY1Ynm-GIs~c{CIE+=5_=J`DplbEM#wS5wE7* zEO7+TlZe2{&O<3BRSs?@LuJ^0OA;?dVBp1&oRZOBcSeR_i)^Juh`ykqOnS_A1bY7; zGiXbg0p#TY2{`hP0}?fGY$ZiM)vdp3OY?DNo?0NEs;O7H*Ki%0J_%75o6leE zg#&lXcq&#Lk!-BqQ5mnePBg)+`y#(jx-Y_G1W)Xv%D>TGJc(J5Ox+aXg!0sj+<576 z^`Q;IwduvXsArAtg?RZICv2FWWaf0(gbnoR_#E?>fs$k{x_ek<94Mx@%BM?}D(sHwUq|v|`L5aFg zPzs8aX7>Ng0<286cAOOwugzL{@g3gkSc?;lSsA>$?#SPu)db10V!(wNuN#^1S>X}e zJ1}ucgd6mkiS5ZMeu=LbSLza_AYo_=c^6|xVhB6Y41AN-EV*OMY`)2Tm(Z)fe3-Vx zy9!T|4F`)`n?|S{I~Y&lC5~#e%38Jq$8s{K!Bx{%=E%!|PK@@E2a0U4u7q_`E{Pm0 zTd_bk!*JlTyLy}8x5qJDV&O)^hn=t!Ev{<44^T2quXcER?y)C&bP3}5c~w0YwPipF z)!9Idq_$7RMo4<@LVmpE})GP6#yrz?4f^Is}t$ME8R<-XG z#c01N@SYP*NU$Z+$P0$q8((_I^q*xCLdws)7N>hMh#1}%9UEE8em?o)Emo8vt@F&H zLW`9^2+i3IKK4^#xPO(TXx&TM-q*9o>$?MIxk*?#^CPmq_xQ_h(IgVX(E?QsCs%@r zSS`?l1jIQ*rIQMP?*FsQA?vFVJHjvL>P`AnTU_y_ z-7Je`Ft>>G*;YGg%pmr-fhOg?zg9mvG7T~p|Jtp;r#Mg+(;w4jvr854`wp%#?%cggNU3@Lv!Ut)FXd; znkuW$CRP}?%|GFNb{Vj5;oR$Fp@AZeH1oh9^6qioN~t$Yv+l*TZ}m_?@SWbtF3#Nxyz>Ae~1c4N%E6FtaZ4l$(^~;A14Sq zU$wrTbnTn}WJ08uj*bH94^!!7i!o%npJ{-9l8>0RfR}xOvpf181})}{8!7bFP7?KNLaU8NWN;`I92udPV^ znKOR>S;q7DryALpg2!g;P|N@dqKf;O_-( zI8ZKL3>M(BOSkYu2CYK;ux{CYhN)Oe1S?MsajU~| z4Ep?ku;cBcHqm1m^6&=P=Tw$QU{$fHT`J_zXo`Son2FJo@QLSyFyoFYCVh8EzsYJC zOftKmh+7t4CYOc=rMwqKRgU5zDut&hoWp#9z+`f-6#cS8I0o(PQ$d*!&ldG1kE*iv zQqNpqK+rEnJIpz0`#o9}l)rQYMO8!11eA~@OO`AZ?_KoiEbM6dW9n9(A8qtevPgw{ zb!l{%-P^s=o07bNJWECkC^VU}X|B{51P2boNrpaKyW}V&@IG>F+AykB?|J zYGEk9JB$_tM*Uy?N-oHC_BO7h$9ae`DE~8GL-Ok+s)5HA{aMgP&fi~@F1MA05gPh= zO<=Zh_`x>(S?1b{(H&_5j1!aKFoiLzqWC%^DiS=mwb$Huup5%1H4$sh>~>+s7j zzrK)=!lR`kO&wMquQwWXGMqmvyTF%vI7l@j$OaXeew>}ad5h>7@glde6F1FlX+cBP zj*dnj8G)_o#MXD5x!**xgFAFkxa~t=S5SToWZ+zqG@mFOP+^%mfYufG&UFb zhT|4TmEvKE)vV`LYwirLL8ya~*|9htH#$nJX8w`inz+0%QQ1;38Ij4;_Y$b1_(tzO zbJEh>Q>ps>jJ+aG81W|T#=#4XJ7bAJze@%`6rb{t^iuN(@=#OVr2qc`J- zN7ggIJyiI-d|xpBUiP$`1FTY;z-2Vzowl16rDW5{)l=>Py6JFbO&N7IJ?%0R+QcWc zm&9naX~P2KC1TGw7_gS)d>NJ)!bZDS?-Nr@ucZXc;AQuR(4Drx`j>yM@OC246Dq)`wh zrAs=cK}t$*LRzJy8w620lm-z5L?nfGHlFAG{ddmC8`$^WYtNcBb6wwQf1NQZBUtci zYJ`&R8FgTEjvM>LVmrrC$K$2`ZK|*LH`z!|Vw=T@$~Awp5w0Kf2?TbmN6XQYI?ODY zF8NP3R8?S7=0|-qjxxe3!8ed53Nc@*)4-6YoA0!J^XhR#aJ>d`j}@j6y*@>cfb?(L zGHgwyiYdumY|>;wELA$70l~uXu&Y8kUSv)v9yPDv201 zcXFeABSxDrbZIZnm>4*6zu)K?{g9BWsX=Nf5j`U(C|1Q6hG|he9p<`9G@WIP<=~_G zzL(d;MC)h<+wCZ6z`8xkPWEZz*1NN4W@m{;CVfet@^phr(Ke9-m&Mbz&*HgCJKk~c z2Q5juZ{13_CgZ$0pG?q>aX*+}n(9vEL`UbS26N-5LbVw)=^u?l`9a^j8jqvMYI28e zG|f6-6gpOUS5Kzvp-pu;4>DhNqILYaOGM5%dcU&EU(D6kip!4Rm(TstSdCX#?Tz*B zckmHQ9=80AL=RFW7kkR8_?yX)mRRy)F0BE>lt_x7rli~J-Tt`GzD5zg=6U^C1V1ZF z6-gSl&(wOflv(2kKHnQTd~L&o6Y~4R2I@cY4dL2IQKESkQz93m%)iWz{ukcabG)HP zrEwU%W<6o8mEm>?ua>J1?C&}{su(;VNI2kS>+kyBDXcHX%CU3IdBPmo!)K65aAuJ1 zWN6M3WZ&_SK^mWlLYgMNr)!v(^d{*chz}d~4GlXi!tmVF;$PTD*DjHB%@|ZZpewmq z!soZkLS(^;c@V$I6WfpL$Rn}HzfbU}Q-idzWk1C>m>?+4V5LUsOeDMO+IeiJ= zPy9peEB0jO?!t{`>Sq|;7SU8!F7MDp+B2rn*QN)Xvvl>TnqQaXQl;$S__((!3?*_5 zS;=|g-nH-8!^Ze&#$*dgOcIk}T~Y6@Bf_{JsZTLm<${OOMuu%W__(>EdmA=_(dgy} zCU$c0gzS&Pd4l^a3EMiJ?WXteQna3Bskuj!XJDl3{fcXFO{Av z{&C!zCatx_)b!~J<26Y)uG8OfqXhbFyqbh=SDU@^TIsurh~l>v-|jZc=r;OjxbN=x z8!EXiTpHxSNxTs)-pRc&Ie(UzpmZ8lBb^m_k{TH7RmoF6MHbk{EOo$9)yMJuo^x*Y z1L<7ZpsafYl0LZ%Q|*{iZd<~1lrh31U)}QR-Q<@~5dCO`pzPzSAkM30l1~)hX%Wsx zMqx5wxx5_;ZOU>P6e-(0Xxr=5i}Yhc5&YONYw@2Z>q_pq#X{o!7}GX0{0S??4nx+W zW#jRY@KsJ>-!eyXJJx(vR{VWcsmU}do^>Z*hDMgegbLoemb*0OaZv_7y43pQ_jRKG z(rVRhsl1`9HRi*#d>f(UR;dDsWkhJQKC0TaJ32FnhqT}9TsI2MS3d!KMwh4?@7S%r z#5-~lj~E{g;n)@1V&>h*QEbkUIgca^K;h(DeUOg*E!$j#KuXEon@_OI!teO-|ZBd zw0-~V5~gv5-)iyPwP!g3<)3a{;go2~J`k$D7>CG&SV-m#hMhQ{YYvSjYl(+2&k@n` z5&Y5rB>k@ZU9m>M2f|=_x&A88>WRxt82Wx#RLLM6c>$sZ6SHug^`~jo!_QQ$|1Ax6 zG}$8hKYT$cn$LQD%nrF!eY8=hdS16nJ$QQ+-x=zb@g)0&CH*q~R%5;-h~7n3Wm^@mm7$dX zq;Mq60dreYQSh$MNb&@`k=L+rc63!RA&cvEkxP*#(EO*EJ{al<#FQ;gh}UFGxIm(# z$fUZ)_)kBCelaIAxby`Ah+%la6(NYRN)}p41%$U_MrG!WMLGYfUa4Xww0X}6$r!rZ zWn4}7)ok%DYPAw3|Jj^wSQ1$#&#q*4BzCKsLZ7f~jWj{7nKJ#=6phYh3WEE2a@?1U zQ-R-=H7s%{7QTU6y76*5tLv%J+Yr$m{oZU@hD0*MrhiKTAKeGXc!877Wq>cluF7f( zBfshSqhFv)`1s&k#8HS-D6U&a<$lPvcP$fvTwsoQ%D}<3Cpgzv4ZZ$&<2p|%>45^2 z9_>2DbDUV5#&F9#OA?6ozHPyn+(b{{&lNp|hJ(gkJSX`GjF;!QuR~TTE-Rl*-*uO& z%f%;%>?pDEGFh%MvEcKn5e;Yj(w(N~qBQ1%_E4o8L|reH8&P<13?2FF`7Zx(UQ3Y| zEYf(xW&J56d_2mE-#CkxO8ogbJDnxzcj7^&Rx%}>g_Rb;i~Tamh+~c8%UA?lpC?NI zNlJ_qLcsU{0)$YW8fA-I^$sdV7rnx{zP2lDrnMrWTBi6*1@_X&X<1moG?M25JXouQL8ZTGr_WeP#MG zqUS1iLj1YOXUi{EnTC2TQX4VduE^skFBB8)g%l2W?X8n|HzHc2m#UZtH~S5SD_>da z-BscqQRK~Em`jf=Ru=FIiZ6mq^hYzrk#Kq;CAELD~Ut^n~r{o>V&J_i+I zvMdGq5?uzfqB2pUX7YcJ_caVyxMe}9|H{S4xZ-}RWGut{6$*u&J7&-?FPVNrGPpII zAIA5+&+3b=I+eaPJQ2qGD|yBDjmu)a?{nW-(L;UFqWhBet-OAn!`4^ppGSf4KG7n` zcx>#;>R#<=GVK=jNn*m<+Ma6#Zc2bZb?{qOEn`o@iTN%?-4x@f-3lUAG>AesR+(N5 zlIe^?CmXD5Y5~s|5J677$=KtC0XM425h33rSA>f+bKSB#){Qf*NLioM&5Yo>M#Nvm zQ0@*@6gTMVD4_y1zga}xFdpH~TifQ)4V5eV z23l!>dkZ)EqYCaIj+mI1wh-*u-PnYP3?&9pN~}FF^?bSpwQWdxA2O$iVsf!_y|3CR zK7S=f8Cp8%urIN2-2Po6knNL0h2_R7SX(ibJmob&NF$HzZX0PaXJNM6DO1EZVh>;q$}H+o1Moh>_T8IxhP` zKl!{@u{hxcNO&V6FSVmcr1`6wA3*=w=^MfSB5$s`FTg|IfFUd7q3DC#Lt?wp2v6&$ z3x8n(->t`xmP~$(lgsL0K1j5yZ?~{5l z>4ptyo$D5V(&S`_NF=lB#-oSeJe;O3J^$_@jC1>IBX5kx!Fh101mwiIF9sD+NJ-Hh z8lWq8Q98F7n!_(VdwktG=GIsRoKwHLgU?NIz%IIlV3bZo}~9eL~=lIBJ`%q zc|fRBQK!WsfCf@)QxU2s`T9Objz@B;K25ym?os-0=)9+MIk60NppA5gIJ45w8@bd4#NbXh z+Yy)6-^Nr_jP9z7PyJE*5Ygz6_o*D+!_i`rH2_~Z78(a-#tIOxYt4?(Ij*!7M)0Wq zd)b5n5gXDg)%1m?MW7*B1K(>1QC`QDynedH^+I!B9HCu30vYKa1n`Aen0>yO19&iBusr@jm_0d-e(;~JudV-j@(V>cdwgp!zQ8i- z@!9vMkgk=sLVbPlv8Rt&+fhZmjnNaMUu6otGro-_TJ=K@ZhxI+AFWIGOF_-TId-1G%HQq>jOUvRa6moE^SeN!5}=p zaH%~A>RHcg!5L_?$N5nQ7l2{?SX4?+C(N&s<>~zMM92l$Mn1jRG3y9#`JmpYNQr{9 zxGlS0{C~Db_L0g@bJ(=YfVK*ofVK(gj|DIYZb~0<+%Zime*rF)!aH*yw|>VU6Iwg? zD^bxviz^o~+Iw)l@~Ud;+hd8mtA%(%5Vv&F3s^uhKzzBY=o&&bkbKwl0dRs$t}u-; zAa`?H11%W68Cb-#SAoR!+Qk3wUVJAp)}HD@g9=LFk9M&o@~P+NA(l~UW)77KM4Y}y zTz?S|@)`%jh+?4{yTZR!AKY?#t8uZ)J7FfAaFqA$ya6+`>8eEKk)5kAKL7mM?u%0VH@RvVf`d0+LBQwEcw05` zLS;d^>yb0d!N7EcAKO*5oa099?}NRoF;puNim9uN115KJ7KUQZX3l8*saLyJTQTC) z_B_*mFd8053~Hog1XHu03gqC0Gz<8CTcyH`+h6?>C+o%{+MP2bvh(P;xmR)1%xZ7x z@3!rM=ZVvyvV}sM>Yw+|zaN0Q>+SwGe@XWS{z|qubi*x`T(Q%GMfoId%suVed+Yhg z)7E_kdsn6A;+Da~5ElCrrQWerC5FH;QX>cR49EH+KEIA0M22i?TJtNhnZV%m@AmG2 zw~CnN;ySJA@V6G#){g-_YGc7Anle>wZ$Fv-tUE#Av_ z*lR!E7Q#u-Q1j!?RhlJ2)M&X`bHS4k=`2N)ul3mu_*JOXk!3wwQ2oiyonZt^e}E&G zJbIw^4FpIDc7_S?UD_gi)1qrKe*wDFLv&I9i|Esz8#6{24dD$c*HgM_y$95I*_WEO z$K2oxHoibCaRC?w^qPR~qh5i%%tF9spaKH^R5-DK|EZ5r#r?qQG@Xh@b2%mp9F?u_ z$|r=)Pl*(ld5sWG^pc?x9KP6CukN|1yM8NpQlMq})DVstB4D zBjgCA;sn)YY(FZE&GNi&j0zwQ4@D&i2?{iBNx%^HWIR6(Fyfn|A*$)=6J>^|;xiLz z@isCwlXJ^^F+N6!7htR%I>HI`isa9UW{@w1!&3!P_PRy$YmO=u*;*Tl;@mxQ3z!GS zM0&o3>T?MC7hD^|*TjE+(x!_%GJ6G-_Ws{(4-L+*SOc!&KQS;?0*3C*KyZ`|bWX{m z;^-us@(k^^hnb}kSRc!=rOKY2{HtQy(I3;A06{qb;xSMg{aE?vQSuF2Dl{e}<0wgd#%|GY?G5Z?lx3tNbnu{0iY zf$rZ6Q?cSOWMct0zr-DZcuVNf4jBNnL04A%))mwB`{zy@0FV}DUnq4g5Ae!HJ}4@E zWPJYK2g4uo{NjJqDJz*? zJ3M^+Kw0Du{>Q4gtc=d_LMlV>(FR=nJ5aXHih}-pn9CS9?Dzk)05=cpkd=h+^qyV8 z1;odTROJunp?4h!`{lHfStfqy_rNL-hVlw<%gg{A)70q|zUj?D21KgW~B&6#GTC5Zxs~rGIDfrKm6A1mjC3ryr9x`JFSpTQ#h`k7e zg%Yr|)&+ENm*xIP&6vy-aVZ6f{PHc~m{(lzGmuxw`o(MVM5&bALH4SaNLX@i3gInh z1O|{;+;W_m_^<n6h1YeW;U*n}?H*2c@p2~nVMs_x zVuDjPz`n|03WB5{#%f!UWFu_kXNxL zFlxHO-<4}dUq@Cu80z3_nhTdC+Yz?BCyVz(fU>YvA~x0PuOGb2=|>>?dfo=O^ds;` zR6^py`QnmR`MGcVc<+a;OxU$#xes|iFW@5N2n*oA+#M(!lOn?R00r@8?P6>-X~-nuMcPNVV79Pp5J-}2jnms_cUayTVO|%gS+dd?~Q4*i5o0@@JmTS1R!Me zyim%)q;~)GW-dC3z;jA<)Q9hv#1nbNf@JR^X2CH2uY`=oeOFe0FfB$DpJlmrXu&K>~ z0-OWfq|PR^e;5@kSt;ocQZGkUtuO~NMZS3b+4#QjdJ9a^2u6Q=9AO&|PMtzNFqIod z`*5*4E^dQ-q_U3@)(nxla4VvWjPkOcMWcKJ=}{pPFyA}QI)L6Wncp==2i*ew9NZ3O zc1whng0M_~{zb5|hvhmDt~O+WoFp3v>i{{sSn9Fs6;=9xs9$TX7V-EY$q5a7!wrw` zYrmLVC4g+z&huw3i%nY%(@Lq?rFH?_K=_5#c5BvuH%qu z;$CFMMd^(WKcldnhf+De7kKdG4qq0Fsw7K+W&08A`enzE5Z&TWEe@9&fd#6t8Hqwp z-i*)L;G>>sB@a~+q@dttlA^5)^7qquES-dB+OPX#kvlH3LVgGnc*PY=KJ|sf1 zBfy`wU;6fdm^q_EW@R9biu_r_y)p|VXSS1T55t4dVX<`u7q*U{aZ%_47w; z2A|oL)8B*F0UjLOgw?PTM2(Ff*h8S%mfvGSh`8dU(6y0J4la6Y$jh^V_ic;(g_qRG zg)HigrosR9rC%V3K!CQP?760cckdzeE0hP^ND_Z+4CJ>*5d2?407=!vE#0T*m%xOU zOg|!dxI}ae2ft0QwN!ZSu1rF7dEkHc_q}&Kw;!zOpRfIgn(5A-dKlO53JXp~VJEJ; zNT@tj%^Uh&UjCkAwR``%7L7^@SMOR=-=CMj*=*1O7yrEJPV?&{F0$2wQ`TZmE&mhm z^xvGP6RL1F?>JK(Nd<+mO;p4E?(t&1{xv$u@7@OuXVLp7oTs&_5=L;+bwH+-Anuqk zV;421niZgJ*diF~)-J=fQ05ULe71{iRo49$JhZ=0w+_Djy(BsNOl+Dba$5!5GIXV< z7ilC=hg`GAr{tlTKPMc1{ORtNyl~jPizjmyuyfWD@l|8bkk##Q_wOa1M3)o3d6Cn= zR`2=t_BaL~9_ldv=He}I8A%*oomA$0vvky|an=&$AGhB=bn*=duh)OO$Zg6cNr_;( zX*VM_+drUboQApqEzt^rT)87W_NDk1q%w$r%2e)w*o)JC`rVs1;MwqWsC=RqCj9~S z3MA4c#gK`w_k&+z6SlIOe!pJI>%vqUFw&jwxWEP*sz*=!b*GuBN#w2^I!8zgxX#vP zeHop$-q6pl?lcQ1&4su<6Ye;PKAUasb~5;M;&k%c|9D>-n`$f`iWebiz#f2EXKrXeQ(Ng2XM6A98b%EF>C?wCt6K*{;5=%OmqQo3h{K$_ zEPOIcb^pYLv9D&XoPl=( z84Xk0v=6>1a1$jvQ+rmUz>D7P4sw`DAlYn!JxgU0Gv(G8ob+v57wNipKeqmUI>Gm@WqVyRZ8VxqZB=b`^Wh-L3%vwRL?PP0_NQU!VFT$xJY0`mPYy+cH zIQ_0xZ`{!e5q0E*Z7(foWBQ0p>>d#TAwj7*oGNmE(+FHmb-`^Qx0eC}lK19b1(rF0WV8WuDXwfjI3 zb??9NeuCt#@%&7|^&3X-kij5d0SK?j?-0p40;^XKnXnPf5>?>mr@tT>yY&06~L~)&Vmxz0eBySAF7DK&I-qa@A?!v$(glHybTo4_rTgj zru4}B^nh2qlDM`g4(_2GeTz*S)Xuv=_Y+GZ7yvHhR(Gh#k>x5Bj#^#EaMQmF{Wr=> zw;d*UZ_fu4DPz}oal+A01kK>{hn^cOLTN$Rd*Gm-f^jyMWY4WtOU(^F|Er%8p&|BS zqtA^15sF?`Q${j=uyrO4Ib#LDxpAYSeIqZ5yAS+$j?Cz_~a)qahm~Ds}x-c@x!Y$Z5MgX zUJ^4}LM^*r(E?!Ga98V?TOw(^86q;_O@CkasZ5I-zAzZY9e%G3UvDsCi4x774mVS} zc{POsm5Q17pd(l`14c>x)*AnTpyX?_|G|{-ke*B> z#I9*zTs?ljb-j;8)gvh)w?SDtOKKO=s|uAZFEANDC znUpUYC((-%rMRMGCr9mP_(kO}x6YXyP!${XvTUyn-=`FGAfHrUH9--PQJBfxx3ko= z+PdVXiryF00^aE*E@mr%F2*Qp{pB+ZSv?ey*A5h1-Ds?bT7in-HhG>xlq0_j-K$nZ z{w?HeV6lG>1zBF}yVZbXg6K9FsXp~Mb3gg?cJccl0H-Y9X$kY6g$nZqyvc-^2-k=f0$9(ro8{8t zt`lq>q@-^D%4cjB8CzkkT`R++?La_Jwk|AsPeE-^IY4ajr1YI29M2wYOx=Hm8*SzB z?`AzlL*|pHlqoS(eg$kt#ujUdu0Q+)Uxk&m7P47dHt1A3$W2~FDgSm&fU97}oGVc$ z{elV~MoD3Xc(G2D?vJ$A%Cw0!3FR5kg5m%6 z;ZmZ2pS%src^d8wlQ3quLM9{6p{B$^0Z;uOKr4ZZ`zgazj29JBe;&BY(DlronLB|v zkEf#h-?G8FbARnqt~>CKCNEy4z7Hx#Plgu*^)ZcDZv2-|!SLk{@O_GmkpJ~^6)gfg z(|9Y8Ue6ogaEI6l{(2NFEuM(;G=%&UkdszQ4T=BoA+;?0kHxr1lKJFNXhLxcrN!h8 zl2s-pX0Sh5B@IsG(H=!41lAb6cZ^4WsSY_q5*;!?j%v(R`8snA`v?gnZ7{WJ6E1;A z_lwdr1C3zat@W`t_5hMP4_~<$_o;=Kp35NTX+hh;OzB%0dbd4^)|t!b122c*xWHT1 zQ*4RYO#$XA6R`A<*-*z|;w+yWYY%{`lPInH;%CBatJkOm2Ra)cLw*t;sPWH2EO#Gr zbnbQR5Nc|zfzV8$+}IFwm0n8wQUdG-J@)N~=cSC;+dqZ9;Yj&5_hv0g9(21f5CX?{ z(eGx+xsx3oU=&nLSFc`KftggKg?L4jS)y*mxYWX*XcBbBQ-S8!*SW?*%kAWJ9`Jz2 zaKf6aO6w4^ahGoz)&Y>29xe66cW%VmAH&SDcTl|U8Wi6*-v0-!B~G~Tjew{5q-qx2 zmR-sYBM`Hln7XsX^m->0vSzEn;9h}D^a3Ag3M$6vFZ4UvUC^u^Hyo{(Y{GTE-C68T zJ+u>`GKItZKe}koMu!;fvdn{D;;s83kr*due+JR-6))UtXBiZx&x^QJuN%%DDUYuy zdkHJDXO38nIGPfl`VM0&J3A}NlOw_jjcue*UN&1kkNHK%{Uml}X5l!DYhuKAi1A61 z-Z%A4?~jVRexr578=XBfExG$`+Ba16n~2FCob4On2`MYJv~3}b4h6x|iBdaIp2Zs6 zHJzDotm|V!dwU1|F^yf;m)V7zcpaG`LIu{J$x5WmRVn;+sWo|Y$CB%qs$i~$fo`9N)>1Xk@D4RmL^jg3 zN+ORor{4N8h<#1YdY$=HdX+SAibaaUO#5iTm^H`CLH}m1<4!ID%CXc=J)fYs0G5A2HH_NV-`p{Rt70^6Am(`4Dt{o z!~Xn4ASaB_1~SD8x2isux<$zA_ga;OCuBb-R%Q+mC=lQ{r@g1EGu*-{rcVGy6ngx%(QdVuSPn(> zc%t6T{=LmdO3b|}?-jRBDBoU`zI_T)>W-Fkp?^ET-lzLhI+%8kJopG~V<+KPN$nrj zIDZq!LUaVR=skkBpu8;Pwyrx4)AJUxx#IG}q{TD1Yzzx+PvEJ*tfoyLrzNeS#WFiG zxBz*girRJrz6{xc;E~7ZlK@SWz|(LM+2tC5sEoVi^?Yu}ule8ok8ecAA625QL5cKk zpN_3tSsun7#F7SJRhUVOml(Z_ffW{c=OyH6j>CC!5{uaB&>)a83&o*W;I3cGgBvjy zcy$H}nsfcUz$|zQ8)(W~F?ed>Qkw*8qa~?Oz#5-niWAdbwTg|9)(SoXrr#F0V>xnZ zzhvnf%DxQ62!3#8p;GtOq7!Q$c#QRs^M83=exopi)@|h)--4k!)0ZeUJL_9NV8~l> zX6JAnWCahOOF^M}2B20;mAJ4jEH0$LAZJ2Z+-?iJQ{==Vcm} zKp;s>fw!4$N^=v|Htf=Mj(?hhy{^*Ycd%W(?B|T-!T$quKk8n z%OFBQ`UwSBXXqu`-V|aXUWb|vkLOrAbNJcm)|oprbhqiwem-q>xtaFgU%N=O4&c&~ zk~n40Q`J=tI7UE?-WYbIzjZ7l0<&noJn)F^&`K5i`U(tkLH)XvPTUvO$)`=>w1eUL zRS3#X(q{Vkab#F-==``kA~zfYlJ*M!e>ClrsdDP9cqd!jfIpTvuxKu!A(NuF)(#-JaWMr9SCRMX<1(R>fp?WdiQfuE)qhpf z!9Dv46|7QDnTcF&+C9~$n%^L#5N z3+i-Nk#brEcPSHHoM;iTq5g}L)v7ka`%M@2(SdjO3J*%Gdj4k}7BiZj2K@kQLY!uIi8JK?%O5ez z4`B9K#bDG>`u!K1-X77Re_B=I_9-8~_pH?U`w>mWrehGP{2Feiey^R(oQ`ef-39dA zV$@C>Hcu^*_@Ci!Ws7^Sd^Em&UTl*5kBj*{ z-{6%0VIn?4Di?!((EoHd=eT`dPGY;|(yr)DT#*vnDm!iV2@&$VRJ{!kWFD0#P4md@ z5NYwy&Nci>03_{_D}UhCpqo4o1D4C z8h*ANCi(k;GIZy-+;1r(C%J8kMpQRs!9Um7VS|K%@zh%YOg#*#Z)A?~kNrM?WO@ko zDb~0T5(@j1JJ=rRDKBk@sY#{jPo(q>(c4aDO8p}C8~Eg36Sg7%u6hzq{ZbdymhYCg zDKcnPd`ta~Wnb!M69B6K%0q*zEAj|7uQJ%gnlPe+T;bPdc0L%Xdq9T3#h=0kb=0ZD zE3fbOJD=`w5QB4UOxd?>>+;TY%>x~}l^Q@e-Hd$n?)It|o+>Jl;UOfKliBjqdK4Gs zNtL8N73r{*D_B(LVYqRBrVJ*hd?tNo=vA00%0=(ksWfOw)RK4|a>{KA{+?ZG9omhD z{@hq@z8>7YM6nDNAMK|n7Mp>fbn4Og*${?!{gu^~^@e6ErFUP@PdcnUz`&)>Mk&4y z8xSyPVo<((IjOr9=@xFqG=RUyCmg$Lz1lVC3J&>P^~KQ_e0*{CW)1FdEH4}TMRmG* zEP3OV(lBPZz8a;q-=rP;yjOKF4ducOh@*50p~(|m0~Y!fy#>b13!&zV%4F+xe?D0@ zv{t5E&2<4>)4UZa* zk8Owekbjjky%hQ?hTYVVvkZn`QH(QJ4}cH^1#9=GQUx?7jC-lB%f4u{S4SY%PbEzSzTE7xkt15~2u6TfB%C8I$al81yn#pZ6Gk@vAT^CpL+N1dz7TfGBl~`nvfvA} zrH0m@lkz7qjvecMUzQeLyxy+-_W8A;7)c5}tKf-$F)6fw5LE6`rYb=5{?j)dC;r9s^r4%i z9%88A>$h>)vx&+=0CzM$^Lao?@~?0A4-MZqd?BXamJ=1`ehkH*FjmdmvM|*| z^~4%5wqTQ*^d6?jGEWMx_XXbfpRbQ^UU0$3Kg^Zt(wTk#*v!8<0$1VFXKn=gXc{&L z@y9jz<);VqNin?FMKM(Mtxun`ys4m1w?6pWv>7RjQtD8KoE?>){YvNA=9)|9&2P)k^Mj+tXZ5euk9vN$ zRiel#+>}U;(9R&goYdHOnHu#;86IWmWif7#hrE>7lv0$^0vzs?8t6i_>4^AC2+NpF zQajC1S+Xe%4;MFvswDY=hrqEiOX!Wd=9L?#n~(i}aK0zU84d|wkPCYcfH(>6DqT`6 zH&VAYPD4k_TiWaBluYPmg;aU5ojPmu_x_LtbJOyzQ&4RxxhGAm0$Xx#@B+kzeqh;! zj6A+MHWCy4XU8$-ij+*4Ph-(9TC9@uCTbfxd9x0>{^yM)S_54fB4!~6}JHq*i=&NTPB zndnvjv*T~RzJqH-@(SU1Wt1@ZXP;G=1dtqVn#T`bY`^u=x@{1BKjmHEwJ6Wq*9G>2 z-SLNR(_9d}>{?;Biht0tX)AC~`+M^lr>ovEX&!Eaj!X2-!=PxODA*3YmNwQCPWK1O;r0d20s=e)K`HZ}pcyspJtz~0 zPD`)PRKSq?MT#BsYyR^(H;fH|SOsesB}jeaN>Z)j0`86}?}*~%o(jJ4j&~mJys{$M zEPZOWeHO|D%ep8JrCOd2a&YD0h;ntNg_G-C)_h~lpq3q>nSC{zV)D#(vFaOobC$Ca zq(IczbbRx?hL4jP9Ad=JB+m{tw%k~2!qkzr%-Ro&_4k@BgnoB?6>r~JQ(zzYXdh$Q z9xU00$0L$$Gj$1_agWBFlyxLx|J7dxZ<5ff8IN|bnAVDZ*h!+k6yY`s0IF`N(ygFPb1=wjk$T9w0uvyPtylzPx7@>tB8Rb?@CzAc( zIB;cgry`O%E=>)sS~&~rnI1907I`-2k9i%(cWf)!PnbyqL7zd(#X)E4;f_C{&)wH2 zOA#8>bI)9!f}Fo_0f+OJ#A|`J!w2EySgI0hN^$78;c8YYv{7BpdWk$t*?Gos%)UP0 z+Qu1Pn=H_!q~_$bpbAuDBJf(j@hYJ!)^{?@d=9459$x0bcgY=}5tHXDs3`fZTtU6O z`o6U~obNihMp|i;rpC3&)F?a(PjYj+fi~S`j@v<-0Q67A!&OZUB?u#tCK|f2r%@cO zJrXx^%iwc<>?&pVr9G}7{8e-}l1DgsFFwhxo4Ifph{t{j^&;JIWGbSj;}Oa-SQkg< zvChkv{h-_(^QV1XI{0Q#&hWi9^)(#)dv^KdTn=fjH4`5cQ}btJMJro+4P2&Ey)xYc z;`!t+3-e?V3}gzfl=iOD-eevT5n{|BK%1hHHe6HG4Etm{VTZn-p{|&AG%Cs)J>vX0 z!RfEEre>Mh!}irqPdgVpgWwzQk_(m29@?7{>!KDf)2~$A3-aEY`-UCSs9D*90hK$w8 zU}45oO~Oz~nVlDd$1MpB9;G|7L=w^-N_H!IthB?SiWC=Lw$3TsS(FM1U;T>iTO1pt zw%yq-!{T_IwuqNPx{$#|eIGy9A8C_=6R9d%8UcUrpM2EtC1f+TqM4@0B+n7yW-cnKORI8B1-zFy|0VrlsCP z$%I+N-xRzb6^qC9=2DnI__YA}2mL0CrQWAVpE_=Y*DTmpK#=o=rh+1&jb;SQkf!zG z*t(cZ*G99WdO4AJz0N~S6(z8n#fvA)n*BI*#120VC2eZOp}*_FR7KFP`y|{rN@2Cp zqw4|vL>GtI?W=85p<3-%1(P*}2-KI8m1m}{=+Q-DaK7d&S*dY&Q_?^t&Cns3SZH^1 zpjQ4@*`kPo?qX{j)6jE%5{{j8LC>&QEXhV*o6p$_WvS}8)AxO1yE!S?a+Fc+F{|l6 zMi><<^{n^$WuC=d=&(?dA-`0qiRvaKls(~Rpf+!G#VM`2ekt3IntMbB*O0tBCg82G zQzq5OP_Xh`;BxjS$EhP+eZ2XTIe~R+Du$|v)aYvq){ErAcKGi-whlA5$=iq#uDP@H>H(FT7&KAGiSg0HLz}6q|+n)zwn(|XKtUO>! z6Kb>LrnxH0Y+T~mH<>CJB$#*&cRT{aR+mIUwLbd#;mk8l)R$P^JQ+jsNAeZJr!A?2 zIN))kUen#q3WyR>sVuWSdOljS8|37_fpcyWW3k1p=&!k7hlgwASi8&j--!kV*s@b#}Y zUctI+RublPyUM&)pnywaQjd6>zbkcFZ78QNc#~keXko3*!@DeGQOi?63$t9snbc3% zu!q(s7%iDRn~jo|qled>j!%6t=xKy}n0*M@c>gWNJ`o}7Ts{lg3vpp;>Opt-7qQ3X z!a{nRRMemq`nEPP6Dx{oHzIt(v%&%M?va;GL!(izs%aoFMUfFXnuw zsO_so!bG(Xy2UqNr1vHM*ms>XJcyilT$O1sbw`%1ai>6ME4b)vn^6aEao|p{cKP-P z5s7V9qNbi53=L*kbRM3}?H(qZM+;1OROQ07Pu8VFHwoIpAIsj7pIqabn7J;oXe{hL zLDqd~@;-{KwN-U!NK0+8xDcg^vWqx@hKH zBCGkI`QmEzq>~~npE!yQsm{%Z>P#*qE1(igLR_|Ah7x79g{8>1@#7g-tFu_kpjZ>y zd!@ro#E3ACd2NOk+S(YlDxB5#EGse9bO)$Y5AIMhRoNEw4_HWN9lyZ&SXo$R-bH;$ zfQrFBMr*Qlb@)wT!UdMJr&sz- z%AB)uY*W4XbBL>BZV0$}*5fBGRTh;zxUI;RK4^EmriiaCC9Z0N+aVrHDa|0DcBPOe zH(%{4!%bOUi*)zs_~0(`EIP5jHF)86Q{Sz+BY)6aT^_P`mYt9fqF`xFf2R3wqYrf^trVzIKq7mp6GwsN$k!8-*n4HW$Qm1t2p+w%6UwOxae`D~iz-Vz-g1uMZcC zscWbfMG#I{cY6huD78oC_Qt2-my3a67(+zKoXNajy`@d1?`X*N^`K`{_C35JM1^%t z{DwpO2i^LviQ+1^e#@_xVyP!rtoA~@M1J+ZuD=@XGba_!LaZ>P%_0GnIwcHpk zbd-IE8i!*>D*M9N0_RvOo}P!ZeY4e$c4(>|kGH)lIB00&R7r9>V~7@op*@tdbA{<0 z!JVl6jND$=yp%A!EwM>9E*XXT)nEhX;M<<{MWu z+~wHv-}WUdzcMs!9nrE%C%A~$?YLLSZdr-I1bHSKJwd}gf(qie&TQic*K5ch*3=}9 zD(KwRwL=T`i+_ZHN9I&kbtIjo6x(F#j* z>Tl5wclk~}sus|d;Ipp!(_%0bZW@~mMhRucyb_~T3@?v1>#r)zx=t$vQg+s{{(Mu_ zK2i7U7zUA+*|sa5^Tr9C8ZE)1xdF0QJ8TB{tl9+hiF3=$S0@}j&HF4l$KuV#^aXEp z#J7(mTRoe*8oZhvVWY~+q6Y0#7*E%Dm6blxsh8BL*QWX5S60=bw9IXrbiB47w(3`y zh#E1RI79j3LV7(x{QR=SGUY`;;nT6;&)SJYXFeT10{1jHbZg`t;6$0=_guY-p~b{=()WWHAegoWXUw zoICd(77nB+i0i+;7A2LCd5IIH`JQ^xY7gVf*U`S1@|7PodYg#0ip-O}izCY_aITI) z07t}wBBi^Xn<&r$lQ$wuo$i55FBfwO&q(_owsvK1V!Ivbb=`J#foZ}rX`kD)>_&{@ zW=#d2EZ#&>FX9FD&%G~&%u-$tS;xDJ@4}$QAZ#~Ec~;KKddiZdzT&70hf{{}Nmp*` zP#FBL-0jK?Bx%`(vyKlXCGue@fP^6Vb@QJeIG0^W~SCu0OZxJ26{ zyK>`YUSJwnIhucX<|DQ#m`a}e>n}%b#Jnr*-N1rDY z6px{2vLq7_QnvlsM&OV&XYuZW1dOOjigdNABd1vfB})UDsYh#b+8PZd=kc)Td$Pn< z0Sau7Cj-5SY{zb7h!_v>5@^BlRcVZqtv!UK86+ks&NwfJ_r}Wvfm8J74OwVF)q~7( ztXz~08raTYnH2KUBJB|U>aM@bL}K$f{IzVj5Oa&6R@&l|=2#e=1V)JiyxM6^kt3J={MN+`HW$!J9o5uSQj}CBX4k`}5Y6*{ zzZ`V@;(qLjH)DcZJ01>n&V@`Y`vP#@?QA#Q*U>+0ny)fr9GcfgTUqvMeqCpv%qnV^ z5?kSJ*jPBzczYw~wdlJnfx|2s+s;UmpU7A%!~$=oCR_t1yjcx=ycnegP_e#si*c@rJk=NgjzwZ0cD7q)Ze25$MB|hs#uaV|07SG6y6$L)%rMQqNqCX63 zsdZZP`S};B_d^WIR8?VgeiPnq)dT~%&m9#FY#V5BWp7LdUB+HF;f~^{)Wk_t?ro(1 zudjMvOKhDs`ZBepYSAIgqdeZQHn<~W#!mKbBA+PJEm;{NanHoaK?J$$ziJZyT0;lz zji)>svcf3QUg_;m+4D%|-jqLvzV`RGH}y4h)fW|*P!Yy_U)__H(2FxJFkJ7T`K7LU zvvRm7+T*-r&S+i=E%?~GAQfEy;KJ8ChR7%* zWGA8`n@eVK6-n6<6|%0K6(wa8vXWgwWY0*FBD?IFRW~DK#P4$+eZRloub2FB-Fu(& zoada+=X1_^j}4<<8L7k{5mLK)3w-&4b}8gn5b6{((iudmUR$=-Mjw|_q7ildG6<3Z zn{%M%J^^->Mq$ee{y^8ul)`2I$%E5{>`Utc-JP#I>q9($9rbb>(0iK2-qe#~IKmB-l7n_db39ZI`|J9Q&`kOUxgdn7{BYnc)>FCMvs=`3X9LyMNhs#k&rSts@#^%lMYhZ|X? zh_sR$8|VqNn}E61tqXG+mzJtFStbVXSA2{Y;yXC)0{#aU&;JpokpNtT^36PLz`lqj z`P7JTkLky3_6{SA0f;QT>`iFB++-X2Q@Ah*J8b3noNFX2=1kTvZr6}}Nn0CzWAK8f zEPxt}x=p`6lDN*YT5IJwLQvIGI(x2;4;#;>v}~R|^j)zhErOA|<-wQ6*L6I@ zNA0lsje?te&5u(N4JkOretH46*BHjiCR*UQdG_v+4whSRg6AYKE3aRUp;7|n*&Mko zBIffL&EGG@fMtn(e}K8Xzr>Eo_Kc9E%(PC`f=ZUZkHu>5{TPcmv8BTc{h$svFZzOiJ_G=|{SDL(|ff707? zULgNKesU|PQPQSN!ZO>P4x;xN5?Hsq=nY_Wq`yyy0auw}GVHBawZvbGH3@9=du@rh z)ct(CJS_K*7kzmalhOGKgR|5CtaK(}zoMsh?dsmtms}C_N|)J&eE<=Q^ka}?4+84O z0TOQWDwfOYIcA%THrkM(RB)%@0k7RjnVJ#@r#Asv)F;|&bus&D*n%BUcD;&S=0?i? zAU%38wHt^JNnybv20JcWJBYhpg=Cqv|5}=o!JY4~8dgUT?act}|NLpEnIGgajIe}& z6p}MQt>1o-$KsO-U2}mT*>%iT^r8DCE9A8Rxgv@f z=8!M-G1ZFFjOBV|!?1}NAc!S6nF7mXBW`eGshE^F z9b?iOYdlA^8mjTJ0Xo)Rda(>fl4SV#g9v20PW%QKP3n`hQQ%q)*3>6FT-(o!xe7Nt zlTzCix4QEl;SWj?;D4g$=i#*_B+vo&Gkc30x#JWc><&Rk^;dGOBBLH?o81Z^nS}G2JSTDxM=37rtROsGow!fy za6>X}N(x8k%hl!T%^wg4e^&E9Ax;csTREexYuZ<<%rE4_$m-arSfOSspc>H4k1~eR z!KCsd`wDYG&oW+m8t@-Qy?}U3sWjsz1~CfP3hT4<051GH(=o#_{b($sOW?8@AYWLN zw#ck~yzN`?8xE};;IRA!qb0ICc6MSg$aKKj%hKGkL>^`c2p8y+<_p+}xAH@v{^10N ze`Q7)+Z8^80)aqq?OA;=3p`0t0Viq7`aPQme?XyV59qFfB(Kodh%is?FJIdc0nEnG z$=tI$psRTIZWmEGz+oE9+^oj0L_fqCnG^oVDL9YiJCE$jgQMko3XgRs)m;!ZzrF6- zJcCl5Jb3_+8tEK|gVf4O`Cm|KGxbPXjq;4;zp-%i z_?|h>9j6f|dfYl|so&YC;o+}%0hdC!XmOygBxE4*OSP%tJD|oJpZ#I z(D(Qnw`{>ESMjy0R?s1fh8{LaT-7a0l1s&R>h6R$oL@A^m-@PKySvVHr9Ams#+qQ# z9bO}uhBe^#IC!=l`pW+27No)T^2+a<_hcr_8h`wj?VKoO7V9iB5&tdO>3Y$c_AyUi zOOAM_-Gw{KES0TFOe?LDhEp3wKWl%QFq5PCw9UZ=+y;DCgFgxiT>l09(~f>!i3LAZ zWP&a}IAi~F7A{r}$6Oy6?tZ-7h+l4;KYb1@P}d29M-0R zB=?M#ZOj@-Y|9BpyQbc_2PdZ5s2qM+1ZON2%|{J>)zS@9AUnUA{_Kc(q9URjr8Ga+~ z+~EDhc*hC;4&g_{$*+`^EFMYgf-ADUoRw@v=gaM7C#=5Wc z-;)HkzuByqeTr~@Wz>h5dJ5Q!XE$vb1(|VH-$es%$5P69TQRz*yu4OB*yW?LSiJXI zx|=2`{sNKv;1HD0^SO>KPVMRE?+EG<#%I&t+w&L`SRQAYRyoL;j(>l1!i8}poIjiE z(39{zcB=O-nK60d(mv~j?P;N`ZW%9scdyWPVC=MZGmDNYlLDKgnZJpE|DK>m(bRXf zR`aE-OZ9IaijGNNbU7z8!MiMD&M?ist%Bp@FN>JCAy+95s0RV1n+N7gs~*MY7(2|~ zyp!F<-C^fJz08YJI|RMB8erCtd*vUUiip;?SG~JK@baX3(dF6vQ4)_}lye-jcSfC| zi~MELK5l&4T3=Q11F3HY!a}OQU4I)qZ;4>FTN^Aot*Knpt@pAw?MX#=ylwdRuRQL< zjd;mqo|5DTqEp<>2Op9IlTT&U+P3+(a@N`LdOcS?x7U9urhy~w^Ha`E(>XPgzS5^! z-E7w?^~TiD36W$tqIFQVF~mx|0J%Pj0gr|A-C*-;^d1abNWTUJZMN*<*HCz6|Kdd8R@qbooFJny~7@?(3rvNU-ub?d4r_!y#v zc@=0yEHmtfM2g;yMiIK`_p>*OZ!5Y;8{Y z2c=QO>c&a~U|D_JFKYIl+PU>CgK8$j&X>U$=fw^hGLU|rsX8VSE1wkEY;eHN&s5ix z`kPb<1Zncs^sP2|0@$}-wS>YH6hz9S}EMR0P$)-zI5 z9*WSEv15^NoaJE#&rT*`Q$Ihqso9xxSi>ct5A?>#i2UFGfP^iUseLLa1%Rn_H5Ud+Iqb3ehl;3UA;k)x&K)PHHZzz#hGV6uGS4&m0s_F zWAr(}^;;}RD2(M^#AaP1Weiak;hu9n7hi-Y=<@s>EFm?33F>2Y|32(o1va>S;Nc2{ zW`(Ru)DjrI+#LwW>KCg)5L(BGbic>_E^ z#N|4+9m0mQqXoCxLa=fy6dD8JiN2@ucR4#otIYWmut$#4o8CX~S+SKk7Ty5FqYti# z9_)wk=8CV`FS0I{{LE0}L3elqo5GTpN+SN9kSd3CKHvKx0Y>+KrTUPq&=1;{1iFZZ9P*%rS2eTt!Ftpqmg6y<98*Q`W#DH8SRr=~$7TA)~m-a+*;(2qU z*@zi$^hpq4JirzrvC@I`{hWacrItByR!b6lgk#VEkZ2iT0ghK#^riZ(l;B;H=uV!h z`aMF0T%pPVsx4122=PZZtbG(>7%1FU&UEJ*oi==yql!JEU^##`_CxV(AgV#{U!@Ix z<~!g59lPP(s2-dFXZDx+ML8GIyZN8gAb}`QZzJbaE?sd3<0$C7R_BZUAW2N^a|+rt z2pD%VaB=8((D(#{Rkl2+-%3!~3e^lL$7^!~NbI&P+KSltjfL?G814q-&@BEl7hC^8 z7ik*9^m71t4ia+mo#`#kg;{>vry29&u7@0O=R+hP0*%(oCO2a}#qrq^2Vub$5!K>O znE0X+$ImV0!SpuNlho{RJ6^x*jxe(x{+pQADI`eKV_;i<6s@63fN=uRc;jk14TdKs z!>s_aryl^QhNVa3+_4}=85v@3@kIwE-sl-2Ny;e*=~h}eUbgP3KsG0)I|JB;n- zQ2YGGGlRh?8Qkn`zjES;Zts4>_{DC555{kj@U?r$5AQ7YC={RKD47A(YXb;7GLRtc z0nrk8@e!x**4ZSwawl$>cxjv^T?_UP!c`3jerQeu&85t zG!Fzg%Mn$k8rT^&U?m$t4EeD$OA6fzkrD?9(hkfj%`ef0@3xa+!PGJopW0D$W$TSz zDYp3m%6l`0F2g&3vQLFye8=87fIdjg;{$%AWl_g;%V58$9D=k=GvT>5`V$+DMDadJ zgOy|j@+n|M+7&_UUSLPBcd8fzf1HH&2|(JX3Nnz(|4zNY8pOOs0O6CQuvf<-Ziw9A zCafBwXUDoIN8{l=|J7l(&-H;g6D+QT^iAH?s%2UzrtU?Z%cgifw&p zX+N;tM+H*Vu)NMeRQ4v;Bbg1*NNAJ>Ec&5FmH+$6b8JYo&;V9YeINtWcj>A%orSy( zKQ{TdYd%#DVI{*t+8*{w!^?s2b@IahYbk6Ai->{0o>2*&mb(O$a2oW8QEI3M6l+@e zOl=N0f$d@mP|U8Ug6sW(Fi|X~oWY;)_;3Oip*}!EmumL?_bJ#wP_Tg=f<%gA5r`vr zDC_?<6|J+um$1%b*y*b|1L8Bi``@c2Koqip5o`ktd0Y5@`H~`MM?q~WB3UW2$>0D) z{PJ3B3hWlH)Cwm@K1lmJ!%HahXl_6<2SJ3Ue$XBy3&J4H-p!kwNXKoBDOlOjhD1N8?oT*s(M*dEx4pw=){*V`cxJcBWE)sIRN1B|hd6-&XmIXR zeyk@t7=1u;qZ8D2n(uSVKn<%muCf1sYG(JBe^Q!|)uqP2^I@I=O<6J6y2|D`ro(f< zAPNPj$Y>-gYXH>sK1g$S^d;t|N}u?-<-C23odGGT7{dY*aH}`}{B$EElm?^vm z2$tj^O>hvIbcVgdICqF?6}r;85yY0Lm?A9aso)XVoQyV`1pNHamft|!(XC|6d)x;d zKp^8>!}!gW7B44wat?+&KI@wm8Q^#B$tr~zFp-s z(>k-ZlQh@_Q1w>2OQ#P=IQ!%HsjF8S+gUI`w z3;=KEO1DM+FN-Rf413b{{hdiJlgalzHU?nJtCk_uiXz)A&=%Vtg9U?L9hCEEWKev#1D@d~?%3pm8^W|DBUG~oS+h*mrIF;60q9iTPGsi?m+#pZ_; z#q85T*p3^xy8#=Iopjl-0YV0bkR9Y$Ar}*_4ys`7U09NH!JmWN=I;CLQ=G1@is@Id zMKJ45R=Dp%WjM^0qQz5PIbKp+4P4+ES@o)3izU~x&GsKuf|APVcbFuU?FR|b(W9qt zw7o^n%p2xjJcB>V9ed}+>B$V9k7wp;^CL(48XY)u9YAU~XVyq=oPHCDaseXW5z6q_ zaVIS_DyiPMH&a~T{?5>Zgv~BBU@MFOd}zKMuwR3f-e-fh2XwcS4Kiqlxf61v7FEV- z_H=KZIj-w{3Ssc-J+}U_imLe4Plytuuwlj2Ac}(QjASi z(=Wro;N34I(8kV2q466W%uj)3h(O&7@x68E4SNteAr zwnXj^k3VG+%CsKLE@*ehOqH!zH*O?tO9*#l+4i=IbfpdNHFI!QdU3z+I`D6ozh1H0 zr^!D6q?5&n*#naThKf>dS?36q+NmVWkUk)0o9{|mwBfwa+cjXoej5Oyc7VG6`ZMeN zMsAYfhtKYoiA~x`>?(6N=M(hWA{sX-JN0G6v{>B4Bc91d5jE&85K1rj$E2=G8K|fg zCEaHZ^lErMy2s@G1;(C(_takkHOra9#bWIK^6YIHD%s+n;Ys&57sN(7mBP;$*{AY# zT6D$g72eb#^?7fW@1}pK5VJ518;xIc&J!xFCP}6!KL;djt9t_pe<2qQC-Tqnrs%cW zxX`?v(gzQ9d0!n3b{1aIo0qK8EKaays+w4EF2o1^PU46nQrI7rOZnp!N3hIfC?K$z*#}n?Nv*+LkQd6{h0ETf0xO ztXWBc<-xt+xSQ2uOIZ?xz5ZsOzp65bhX0r_Q5N|1J$dNN7#^5Q>uj{XqSXU!=Sp z7>LD@|6Ygkawm!m|NXELpyWI-aFA}RBLDYw&W^=BTkZe*Th0%ZzK&M#@q^d}c3)e+ mRWfG|p$g@{A0JBmy3l8jKkQeI?SHrp{;8hTP|P71`~DBUmnN40 diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png b/dotnet/samples/Demos/ProcessFrameworkWithAspire/docs/aspire-dashboard.png deleted file mode 100644 index 2a1178319648a602898a2aae1034ea45e0a49448..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 114295 zcmeEuXE&lI5q1PgMA-?Vlc>>yh#-3Jkti`*VlX->2%?SNql__X^iD$bZj4@| zj1oo+1{0iT-v5=n+2_+a-_D1#u5ry+vz|3ixu3iI?%%zFHPjR@lhc!vk&#_idi+p} zjO-#a8QH0T3+KQY_16@iz}sI?Ek!x9qCSRI@Xr|=SyfpwvXY2PKP}FJ|1Z9LYzQSI zqjn+v{WW9F>`q2@f>3%WtK)9=ecH#nclV_I2X?&AI=Nebi;Iq4?ix>0zDClb1Sj|4 zSs(6I<2WZJg)6jNnoZ=iKB45ap#rpAADm-D-@WMg8`_ZQfc-{juNgb=c5|~$4r@Px zNo;tV;Rpxcp?wD_(7mHnF{)nn$u@223x z=NkU|_YH2{x$ds0qUGDwRbE^JJ78+?) zHrc!I=l5iN+GM0g zkFUC)`SXH*sM0W)p6=qame$tFa4_P2un>P7FE3uW5D9*OQCgr%?$jS&kdgVNs3Grp zFTT7_arIy4KleCRb%WRH);VLnp>SDV8GE8-5D>9C%sW{#Wz;ZOac7 z-9-l#niT|sS+Ig)FJL|XfJie>LS$`&z?V@j7+(zmMV_8%C5Gu*cq*?uC7n|vd~Bv z{OMX)SS+vJr%J`oXZ29n4znmq6^64KItN6aI*k}a^XokLIG^*!50Hub{;c*TZiuq6 zvDu3Ne`qmkbN_T2ex8i-{S|`VtH2c#X1; zh=k;zhP7c-RzkvcSKN$gyt!|UM|JbT5n;CCc{?TJ{g)3w=h0CtiaKT1s;17JY_G!`@~188K$*!^K{qqY^#|+7f7uASh{=#YKkYhdTKg@8yQgJoXb7Pq>~rXH^rM}5IpdVD?a)KeMSY}N zYL7f^Gcw&bPjuEBZPI78GLW%0T~}4(Z6<+iQ9SuxVLMdDK7ETNloe^wDU5R^^v~lk z8W{S&#Qxk*pFM24<9L&#yebFH!-sX=#ptiUrINlI-*1Pt-|twBxg+Ug5_Iy) zUH(zyEmptoVqtr0OiRB$e*DrOOLafJC*hua^c_`!`2I1gc+;HumZ0_8^&&y4R9&a0 zqe|)H{pV0a_%K*01qB6^X4a!T^QOkNk;43#7#cQq_Vs?5li@W?euXXi4!)OFi7*XI zXY%{`3R*K25*CJO^dZvF)92j~w7wn}MI_>yTxWdiM@g~l}jmL*}Ee`!K(A^i^ z6HktCVbL=Vp{?I2Z+Q4FXNP->o^LWSjTbIRu;cqf$M}D{Ddub$>>|+>ih*DxaP``J zryjRQFpmoMOJ-HrjnaeO=#PGseM@uoYSyPucUuxrFHkV$QlI`-VUxV?7b^t@zU7pVGw4-PbzY0(WXtcUa;l1Z=D{VKOhQ~^tleW6jP`ckSYA{RD3p?Z7%`08i zGgx7#kJ|;8x!3)(F;%1eygeKfCOVT#YS?sgR4igYHV5s-s|T37ZR`&|QDvxjB;3NU z!DgpxH8B)Zk9QhJ({nzMCI4i=7K-^qkqkQ!CP-BoWb4}u>u7wk7`^*==zZ=NWubW0 zykScH=?|T;nGE4|?8V=%;5)&h|yldw&?r#$?etq-oQs zv8^SBvy^{rFeH2#PZGV`EQETdoHnKXirMpcjzSMd(0{QaoTKz>2FbAgXD`vc7|zuU zGy*DsP08=%sEU$F^2K#-uKvaz)4)9a;v$qbaxH|r=Naatf~c59sFr{hy9R`Q#AJ{D z-m`bqS1&0mKL*=nq`Bp?O&Gi{f*=>YS6l zM≻V&BJ|$4pZrQYevir{PFql(t))HWWFykinYG_ykjFg%&j7?>ep&z%=%&z`-}76FEG&D>j4B)M0EUsdLKKkF9QL@bKoJT6-=fH7Exn>$RD zchU+a$@n$u6&P6L#s=7=`;6R_%rh*@@fhRg;^LYGURW5D)Wpp@`1Z0BAbXJgt@-77 z{G$L$=8dt#i0}=VR1_WB;QbQ0>BxS<_wXlTDI#K4@#y)O#Tc~2UE}Wq2&cL|c zf$pFVim~V=noU1GYnWB_`>gTvxGN9x_TNCIW<5+H%i>r3p$d6%calcFsivaNJb&8I z9I_&6;x96Dh*1A8T=@i#?H`?VY9i_XT>WA*$apCNG%u8Wdv0~Rn?Ifvlc(GLNj(Bc#+|WgAw+0~b)^@vO?*O_f<_e8u5kgysic}Lg2{@I&KouJ>h*>; znfV>LN*%6bK_<#kZ{EIj#pvpp>#6tv0n++Blb({k&j#D3EC(6ER%EgQE_ zIx?#ridbT01jQN^jB36gQTA3iUFo^N$K)}s)N^K@yzE7bhQ5W4KKW^B8ZE{_ z{Igc0+h*Gw+`NX}$nX)nbO)`xstM(mCJ*(Jip$!XGY5Q7%w03Q^m=~nve*MdPO9#r z4R?Jh;uHOl?5)bdyxQBHu9DO2w!0kyGW5Shl8j~$XsDh2g^1*Ccs8eY&I-v0#`kwr zzgpO7y=I=!oo8HCbWCVE@sQ_6EP<^!0KTc8j5qT{E%&9`B|NWu)=J*m-k#iT2CS); zi_7tB*zWgnZB9}eZ(EyU$tF1hxE_0NPO!HKB z=C2X;*tW#SK>NhXzE3Py1jYJFj*V@1JbUHj6{2Sf-Ow$S5VV3>0`IO`PO&c~Trmv@ z^@eKYYQO5Z*jXCGm$4UInFLg!IRyRes@g{j$ijO)r~N~$Ct>)dgR3YEBdjx7`{a&n zFw%MXx8wZLyQ_F%AWh1v2O$`jmgWd#QA!0(-ICOe1XHjEyuf(YLeshtFA8il`3-4h_5Oj0R^26++Gcq*B$}u_ z<1qR(azQJ=?31xb9k;(FY+~`H_@ayW;>*RUzSWmUKj$!4o$qs53g$W^l*?r42W61z zY9l|{1bFq%kkJM?gi%}95^K&9^uAvn45?{BdpHGC(=F|I{`LSL96)zgGz(2?hD`Rl zoC>P9jo);a2^rXuTus-Nfm=AYmcYxTZP>z4hHE-81DZ6BmE;=I3%!V(sIWrcFH5Y8hnGG;UcEGL$iXrBKcHTAfn|pnp;<<;CnwlQid#=Qd zDm&mYdV+UhmlX|?tWZgAJ$S9s>p^k-?q@HbwhuP~OYpJ*6wHN(w^S+XGM1uy65z@k)neLakWtV%_jW9Ir9O^W!Pt@&|4J&3>a& zr3KEWQa^f)*U0iZ-UcYOkkC-{-o|9FJY3?lD>fCjwqD!>jMDZrY}TXr8cm=0HQnm1 zK^bC%EjF-AnlXaB@qTF@YF;h1cNI19izC?<}T9`%)!#fg<**DhBSD6!Cjml8`Mt&1cV0 z*lA`;LIl_|;I*LZ9QI~Uj`hubc4J}0oMcCV59w4~IwX8yrTt8uSEo>NU1-$3`!PhM zytBAYzUNiMxJbulM@-Ar2hN8)b$2>BkqpkR3Q5O@ukgj~Hp`co;HE4*rcz4w z>iISl`f?7|d4!&Jm4+ai+$>oZtOQi)t~pK9DKZ_*UP}Y+iXnB#Q8qnqo4`r zRq5omhV*>Czd-yz_>q#OdcIe*i})iNoDVg{87qiY)o)J}7fUYFtky_Ws$PsfpKnf3 zYa>n(HC0oZ3SShn|CTbjE~oKgiOuR36RvyHwXTBk_1+SfW6Nbuexkh^fY{Z3FYTsS zIGzG-Ey-t*KKTj7M@sL{oyP6v7dKAaT~>ei@Ev5NV0HRd?==rd(!&?N64_ebobv19GPhr z!x2Jk4K(>_E!L3UnG~mV^iN-lG(^k+%W&ay(OoawTI$g|+?dGgomy*O9Fz}VemHcs z%dggtqY-!-ouisgp;uKYwYxI5syqRQ{JcFCmosc?7qd0;R4Q7XgUfaGdaLVq)E6hV z7rjL~@>{ny=0w;Ug$-UY)~zNg90bTswGZ9v!}-R<<_+^IB}u!L(=l!_Zrw*u#;28A zJ*V*^3MSQvuwfI1J_Nrl?}Z9w!eS=&mCmkhabz7i0*iEiG*SUZ8z5v5kv;fk(k@Bcpb6oin{>FiCC( zSKccqT&-x_+N!S8JYlIl*M@fwA6-%KXzIVt@MKs}YJ$r#VcjR613Qw)^%IKqRj;i{ zZ}U#bb~x{&p2>sQG|Eck-_@=PKfSZLELZ>WmjsiAJOyTI8P3`XsmKEcZP_`vh6ISP z71xKaK1qy+a(5lo0oy>i83O!`&kPP~fe)|d0bz^5U3 zjPRWUY&zEHjw9F)6D2Jb1xlfGyh8S4cU!94KUuU47Z?h5GAz;&L zO5_(W4#se5@65fWqF+wo=X$cw!Lm>u$Nz~#f>>T!s>Ufu%oZpcij7SzvY+I%va;cr zTqdWpniTN%t>xcCle>+?vRKxYZrB$64VRJ^Q4IXPhb{_b(RK^sPOyYp+74|UIhP(9 zl~@(34$!_sFPRbXFEK@@J6@F>ccaq2v^ey#k>$U$ds9u5&xiAT-5cATbE^1-&tKiyi9D0uKGqaw{z3m+ zQOd^L%k%|N$#AW%0>xby*=e6CN+2RLE%6)}LQMlAcI_}nTSR@>UmqTW5Mm2kExZzH zN~9#DT^bB2_$DYLEc=!@IT418hu`Nf(Z?aF4rh>wsP9Cy(Y<{W;xjpG+mRv+g+{$EdF@isu5|&fZ zG9`v=NsHctxT0{usqHnccgT;X5S+Hzt4A1u6w1d^?Lkka5}HUg{3Lx;=9e^nPJFDQ zqHUCPf_uC+T3iee1mK${oW9?v%72ops8y+17oRl<2Pr z8wA6%ud5nK84WITHrjV4`8uV{xCWf1^=|hvy4O=3XbJ54+CiMe&Px@iT{Ce-Xz3c4 z+xO%UC*};UFIpo-Y)NH;T0V8|o}txp46mF#4jXke^V!YND|JSkQ&SUP;}DAhEJ7;? zwi*7qe3$g>1D33#cUMfzKTsXF-89zNxw^3HoW5LhTD+2n?PRCOI&H0RUdlOKWwVG| zs(wC*3$iT zMqM7$_0#)?uivLPZ_8qI>#FXkGUu((OXKZrm6OKQ-`aTGe&|YzUZ7^{GiT9VEtpUL zpjU9KBMPy&dYXn%g!KT3OeeR{a2>0DI9#0HBqDg-GapO0)-c(to}{w9m=;33t3Xbv z_fjvel%`+?S3EFa*CWW5sysXwzPVKPQ?ehi?aAFIW>KqJMEGbbh$$vKbW+Tf^n+>s zPrL~`+r73k`u&}(z=_8Tcm8!DBks2LMYJuaF7yzF-tR3c>^MdX9azZV1mUGxRkjOX zk|3Dj9m$(ap%)ZHR7Wa#b%)o2t9Fvo2?YfWDniFUY+~jv7c5CvvEqG?B-?zKY1V4G zU70YtxQomjjXu>sUUeyJHgEaTmkl2!6`{=q8yT`6%S2rMwTT{R+Rrwc`VjYov zE^3}GG;?`9G$hay^zJ8cc;{>Wnwg8DLCGF{_>yHp*E^U}@VK=<5jk&4g09GmX2<`Q z9GXe=Y-7K0MbM&grfyXIZ#j6umynfvYS24h-A`Yzos}$xg{2Od>>?I+%0=C2P{(NO zi=l>^rmyi+#0~0nZue|$C3;aF_NUbUxkES2-Kcy)CA@63Fscrl3m5VTMB+cFtc9>` z&PCFAD|$110VYw;9X<6WL$JYzPvTCV4RGekYd`GpsKv%V>^%@riBj(wslO&(_l6u! zLzmGfEqhjJ$f|^%&?~1(zrya$I=V3CvCimt9@Qqhk>75Mtdv9-%$6J4jSSvRc{NYB61^C^U^K%QhhDwP8QTm^nRCxX->_YL|0vhcjR55yAHyzDu-YC|9j@(GW(FIS^j6W?^>nC@RnoWQ+A4zTXnE^#zwl2CZ z`rvU<$lR@N+Jx&0fY(LsnE{6W$F z_wsW9z~jvb8*di-i%iA3T}|A^tpe}Ovh5!K#H+WA9puQpiwHluTF|C165mSw4=1ur zvoOgOgHk!a@z{*b*e#C1Etx?r$O(6Qv~~4hl|I;`=UbX0Xbhc^}&nxpw`Vlw7R6=6=)_Wu5 z(ysa<*lZu31Wzpb^5nTNBM(|s?zpo~((|UQdu-pPndFS%7)Dg(R*Alg%N{@AW%dwP zZS*tK+rCQ2RZ1F`+S#cEu0i@AT55_pG3v|ir$cPA|L#x4dBmKoE5U{3e9~7or25#n zp>(+4S!9l)OMKOzO}GS6C=WlFj#0boe84sO%Fw#0@xaSBS(ReBbcaA}^@52n@}My& z=-LhyPNl8Zb$thYJ@Lq(w$Py^fDfOhkF$mp!U(iaOd~CrslyqF5?? zk33uU53;9%BK9^^b(fS~mtNinAQ!*%sGUxJVMjinzV(Q$)nut<)i_ADTf*);FW2!? z!ai~c?aN`BGIQ)J(WMfAZy;qWhZb^4u9tAO=i0m8?t`L>tQ$?-+LTFKE)k8$wC zGCDW&;A)w0p+A3)X{PX8xMbrPVIrFiFC|f&-*}V>h4ppF=lku(5*)r0m)iKJt9XXJ z`?Zq5mYc2nxUj{p5ZbuAN3Vq=I@dNWqTDW3Y%>gpFBvW18;z@c)h@8wZQAp4KKpz3 ziBr1qr)ImLYu+fmEkWv|2e@X}GS2Mx=ftq1^W{%k3>>el4T+pkmCtq9dDl+9%g^VG zd~Ti4#IDc=3xP}V(~KymHgwnW4mzu!XJkg@$Jf0Dpw#BtGGw;(C?$cwrZkabmDVTl zC3ScP{pt}{!)P*dt2Fx%Tqv{sDO+t>xULb^X!(5Vk=qGpctI;vs2%^e(0ZZqHic$j zzV-)pfjbUKRUJ+PDsY!q@9J%I^xjzW?=@AvoTuHchtqh=7d&f7Z5Dw%zsPgTWD6Rf z{gq#e>KXTLMEA)qs}f{LZpO?b;J)849YRJ9H}A07n5Z}eE@JNzb=j+hN6=30rUmh< zAh6LyD2u6bg)?xP&D^>_w!5VD@S#N`zQ3112m~p7o}O8_DBC&!(}!6iBr|th^rI6g z8O2uuZ#v%XLFpH#6*cS`ffR{yh?&o>E(nF}iW_nOcBle3s(>I=_=SBf#!9VJaBv5Z z#`N5u589m%VkU(`U9S6pjl#K}job}pCts{di>9sRDvjM^GT~3=HKwdHRYThKtO^AN zI~xq=`oP@o-4WN**qm_+f6c4y9Z2=2BgSYbe%J zw4$!<{+cS-|l=7y;Sk( zV|V1KxKZuwA}6z4!Ddz!TNFy!tnQ%-0HU6o-N;b%f@xQ&X@+=JadayxaNX`F3r@xm z_KjBQ<^O4?nmsUNcO8NiWcObR$lu^iYag4{f}o5_I;J^uyhIg@5zC^jwdQZ_$}J05 z&x}<)Qc81lUDeC`Ps+uYRlHOQic#S>alUoBE;2+D6NX^E}lhK2Q}@#Ieniyz~n>s5`R z;;MAox6jWfWtsa=Gt^jH-KT)LK| z!gi<0@&5ACXJxoke45|!0sHLI?Dlq#V1KR`VH04F2_Cr*&Qmhnb&&=MmXG{q{Zl}* zkOFI*8^D-%LC*C0vU@!U$7j|_sN(Sv&Tk#yNC3x&Hy*BWlf1b}T9g(-nO@qvy1x+m z0mSTsgJSthPtqg}vmS*B^oTLXtZxlAO0}~ovT2{sN_iQHt>yU(GM*PuT538SVZUcD z#t35QA8;t~v%fOLTle(%cSGm<_RiIH%!Z3Qfyq2GpQ9BNDbLt-JJn1Crc~u6NX&p7 zswL52*XLRJOmdlAKK6qiE|bTU?uzCE0)Z!bBd~o?pZM*g{3GS>jA=V$P2_1;IfmTa|B!Y=!t^5aYK66ao>u?J zG)Dnknx8d5u>zk)tclZ-sCc9@Qd{KK_=PdRc0?_8x9xv#o&rY@yY}T;_VK5*1m~w|CTGf`G86IJo1(E+kZUsv82EB?we-@ej*+(BeK6^>qwM9)tibyPv&3h}1iVQQ9+@ZL1G zx)wepdVbSwanpbCSVrA*6|hTU{syGWIC z@d}vC`iLH6{bFlE+$W>5A$*Wp`GjXY6raUv?!D;sUqUe%IUf)m);Hu zYjY7iZ`l)HNngT)-7|z9?X_hyRD zV#AzoN{fGDSJCoM4_8zZ^DdHL6pPp6oqREAo^Jwnxgl^z+dLX&rbm;7>x_WCU;z6* z#Z7D}rga+6!M?v(YiV@`f`Ml(%eRg{m2fDcku}Biau;h+o9~@h%)@V#gz$btU=)@a zmGZ8j#<12eTGh*hSA(xB9EwDXS3kVUyq29-T&$UPB@?WUVX5sir!a%>N(JxoJ<>Bg zab*EUwa+T_ip??tW_gCccV-FytdR`~D}dCxis?SXDwtM4aAL2+R8Sr*4>#&iWw0A9 zPr<3pH!&4D-Sru%sz#d^MQ<(>HI0g^o`nt$+)G^&ORZ|4MgEmeYjx1@hShEl=>g;8 ziYX9FD!ZTlDGxwYUKFDr()C{KeRXdY+Vyxfp;fV1_}AL7SN%xLN~oZ2P@na+4Dmwe z)Y-Gg(FwH66+;j0APi3fg?ac!LH~#HH-C`q^-J8QYmArt$ zp-SmA*CExOC^%ZSekpGxI%|m1YElp|gt`GeXdcY%Rqw8g4+>98O2e7NRvU%8bb3`5 zd+*v$rk2I1XMEQyg>mL5<$c3jxna1(3Kc6ixF*aR_zF-dXD5wvXQ3^^_3z`yxG#GN zk%wyRG-L@sUxgGV2&$DxtN7*gc1)U__8}FpjfD5uF(E-l!cj zCpN?;?}E@q@MGLw&c)L^D^r<_R%vcL!!V-c%_249fX02vNEJH%d#)SXE%XKND}96y z{ZuLrk>m8Ry8crzFYL-7l3K^PxbJ9UxmQOa_toIY5I55-eg%I0@(wf{>$h6GLQPft zD}(Nj*95+yD*#LAAyiU3kr~f>ORc(h#-2ym0tj-V`c;;NqwLVo5JeRUbMy##ePE#6 za+*r==~$YihjT_9v%^fi;jA}4ESyOyk3@xOIytWM2r&mcT|4K=z2TmAO`{}XyPS^4 zqpplFkD`B^vf=C*cLTYy`GA;1H$LEdK zvHhf;-0SEe4I``qdT(v7OodEs|L%o~ z(Cz3=4#cRjQb;u{$(m~WOyp4ca_nE8)|w?|U(VSzCM4oLSB{N}*zVvP>M;`R9ThH> z)FIo|0SdWzXjz4e+=r_C=#jqR%oL4kW|X+jSY7?|M7X4F?wlUv70R|=Xt+sk=I2z7 z!-OvN^wMuV9fDGfU<%yGQB7V0+j|7i)GqIm_WGCe%sTDijJxS4KNY$4+?$RBK8@RZ z0$Yozax!-XT-Jfq=p+$`JiurLxb^g_cE&u@x=I*`CQe2GKK3+0+n_JyZ-vGjwN%K8 zrw-ucf_>h77bKHBf)12~%EE-R7n=xCtcS$A=><{Oh$qYZ?ad_b?|HnA}#Zll2uc%xiD84bT6gCo=O&-w1F|}2^yfj>z{H$R$OVy~ld?lS$cY;dK zTcuyGv~t;NqCoQvEV%jSL15k+FBd&1+8uSl4&l~2HG0V?Dnx!(^o}l@h!_&#okHY;CftxJHMIM2!TJ(yrkJ zXrH?vy-N={26UVM(5o`DvPcZnTtmVBcxew~K$nsJxg{^ZA#-vN0oq-z7$Wgn<9JQY zD-lTK`ivJ&*Tf_Rq{=dY30Mb=%!yjpd~jBZ)DLmz#rGc9&XL%$0O8sNr)@zF1e6pE z1Hmf-v?CorN78KaJ#yL3w%aWLG>$l>+yY51{dtz;-kcp4;V{#%dKF|1A7%QzQPVBZnPhL?w5uG+VRV#M z+y?guJ9}ucMZy8x>|1Rx#2p<6B-o;swm%oFd8>XdfUkmkmIKP9F&p7ZE z9%J;=+Uz))!v}gyb1o`Gy57}Q5Pvt9m$z2tky9m;%%zJ0kW0PAb7=yLs?tJxP2Ew9 zIe?w^A(+ZCqETsFvD%OTESDt{>e45mFMT^pqDEqy)wXvc%&2Tcm&$3j%gg%_V#kyF zl`f5xcVpYZnMmrOJ&xg$!{`e4@MZTh#gaAe3R}6?N%@2Zv51u%b(@><1ri{oe4Eke zQy$&cx2YXZ@7LJaA0DXuM%c28H@E5hnj1{b->!3Pu+`J=ccjO`Ntp2}28>?pv@d{` z%Kc7!rG9R;_?~{eyN+$r=~lRAn*IKLvmg5iV79hsIUxXb&v9*P96z1e4S1X%&3zAA zHSk!HfB_GDXE9nmDahHd0upQHK7UqzBhJ`yuvc`}9`GFjP2O;RzQ{?Hidov8X>X$B zc@QZKOzu8MCB6T#(xKb-qeWcO8ieL=CZ-~A@wlY-42S-F)@e?5%U6NbRXm%^4}HZM zJ!U#!@wM^GFl){o9c3AiC@HH)y^Crf7LyshV z$MR-Ju|Dw37!$$A{KP(pk4q{3Gzxx z0||`_v6l4W@IC&m4oeTdt(T;{t2bVwOzy$3$!Zcj+Cr>CCtCNVNL zlZ}5DJ?Au}g9xeWeR81l1{)yTUAg@;_|x(_RZI+*(v^^nMH%I6>y3K6XrJm!8N&S9 ztLM@_!OBa!dgKMV!yfxWt)ja$ChcpY{-l7S!;!?G)WHQCJx_om*g~DZ*?*A|&yU5x z>R;Y=TuXvd;*yI4jJ?lA@CmoUJ?cI;;0|PTcrg^y%_kf=B=9$8oP5>B36^Yx$e0l%R{+fgvUJFGG|f2pP`J z2x9gf(@4JmDwA~;U_v83iGsTW@>HuCK}!9A?tvQrRVKkua+*oQAnzx_{3mnNFvVOxrmzIP!+K-rn&^fW(eweem#Tt>q4`0)bI@D7|u11j;dl?a=468+> zLim+fGS6{q!fCRU@)h&nhhABNZ!4vEHZE;m9P)wdMer;#Idmo+Xr$b6+~Yl$@7Uc3 zarIyuwO-TXHLFpzmHsP&l$7((fI6|qo4EB3`%7P~=oF=)W+>_yalnin2{BJp%Gpi5 zPJ7S3Y+I=wbX7@ffKSg>`sf6^Yg z-l!Wqn0=5%y8&(Q+u4I>HwenCp`Y!b6cL!Ch3{Y{GXHg+kHaed@o5n{~=<|N8Ys zN93*Axi^<6cAM{e?|-`UB;g~jk=Uov1TcJ3T@NT6YSG}go2+uucXlpytXZPNM}pWo z|Mltf%5cZJ4Ur~U6iK&&%8F=K;6ZsoFkFPS+n|so6{vM&M~8Qkbjn*$=O!mFPfPs` za#yEFoVinToPBm-)pBw2fCEO>ao5_2t5EClM+UdLI((FkWQ31VHd&}H(&7A~%a~7` zJCr@?iAtSVsv4uz^+o)ohIPS}HR7?B>e|B1k`Aq|iHB@GsxM9B4$j;vxDvwHq#?LX zF6Tas%eTF_K(jt6%+IYm8COFP6O~nxPJQ?0$j8GR5gXW-RG8u5BWGJF#71PAoMKJ) z52l%^jCD>rJg^Wy*W#1Uod2rq=b0ey)YXk{lj>*Ye(DYUyPOCIZ8&lNs??V(>mNx+ zzOz4>>$Wq6`YxT#*Y#QmGT!Gk-M#=g{h!2VpM40X$y?C1ZJ1Gh3(_mIi81KbCn)9X zL+T5o%aTQg$nEb{SuqOGsV^-0?HP`(=sD03T^T3}06>zCed$7T_A&z)w*s+?|_R+7tM_xbM3l+s#qKhdr^Q&04S_x&siC)jkPa53M9*FjJuhjB|*T-4S(X?cADjnpixF8<+zK z8^MflO%m>PqXEzBzNlV9&MX$QNxoP8R;Fkx0>{QqwrE7%D?I8NwT?um(mIRBzTz^b zM7G$d^)*8G;?W0y%BklTN2(y2VCiJfwWQ$J%}?Bra*MfVN2@@UScsYJAE;)3hRO4- zrU-R98^jF&tHY+d@|XOQ3g=VF$20QWY%Zsl$z3`TH94A8OeR zX<->&i#w)2ZVE8EXHzXsG#pub;^a4?Ym3N$#zERH`Kb0gd}>Z^@81Q@*QQ2J&Rg;!c0IW->jIicqZ1jY0`wf^a9y$~ zDs7HW-OdF@_<_OY69U~&8Fnh~H&63E(Q=<1d-K-t-~#Rn7JY?7^J3_9Mv@8<($;)~ z{`hJ6ZM*p41p|fN^J?gjngx?$kMz`YLD+g5UWS`lTV+=yw{x!)y3Z2ywTrm!Px+h# zH6X`j3P--PC*5QQ#mfJ4_*07j^E3?r(3uTThp-ZG&1?W9WWF_|pt@qY?Yd$(0oyXs zBew_W19^a-XxrNtWcY3`Dm4HoZEdUsfaLUuSZ;j?h`LFBHR~!s;7NI6P;Qbzs!b7{ zc8MmXa1e+CKCfG(x-9_kSO6KK4YXOUAk~}nr%45tfY{y`Ao-C~Dn+;amLTI5O)y5i zymP1Bcm=2-w43|iWhGE=BrP{lY8C2{+d+z20s9Z-x<0a; z;k-feGk`+@6mC(?GJR7&?x4VgzfKQZN?*E~Tf!Zs&dzdT8absJ;=PQpVHi!q+@$&Y zX)Gw@XiOF(?p%`;kM%G}7IDz_Z{-SQ(*Sj^!6Kpkeh5$1i%h@J6wq6ZL6VPr2snSk zz$?aUl$f{-DtI2QB6s~x2!2ZffHwfjeFjgC2{K_fMb$u!f-u!JWw2<%7R_gX&wG;p z5@-j~qSY+w2T?NiuqDLg_bI|-JjAu@U^yz|6|9PpRhv!tDD`$0Bw)aDFMXygG_EtN z$P?9njOZe#%q#f#c`oH2c&pD%f_fc`3aT9j!RM&v(K*L*oz(@zuCEx*PU?R`*#-uF zaFi=o(koi!9`RPtGNeGg$;e_ZivPDRIR-#CBP*a8YVb;tjwyb=^y^lIxg`TCf8EX9U|hwHhxl=D^3 zox;bE+kpB6P=^pt$#DN8;PtF-&9{*JHd65{C{Z!Ed{bmtSKrjCaonSS>L3?{{odpD zl_n~w2MaFZR#lTrF5psf$feP@7b^Mk2^FX1~fnYu)IT*sD> z(~=tBc^C0p4#`p!mbWz3y#D#`8SshY)c<|)HV>$;QG@=gN}R0cmo28gJ$UfD&WiL? z4_;mQ?>I;&G3^$1ZAP|6+?AfAN161Leh@R0ubO(42gMQJ!i~h-I&i(&Y{WXLC2NG@o z(#9!`0s4mlM8~z78>qmI?Ps&*dgB7TfG>C&{U5Qaw8Q2e?#={=>S zyj%!W^#{Cr_n4N~yZnSdKh9gvu zQoC+!fbHQW;VxF7=g$)igt1`|qdnaOkAH7Og;nozFO^8^My=^OpHABG+WHdJFSi2a z`L7rvc_;9Sk$ddM4HaI2jQU5`f&vDw2 zn_gX#h_6RN>`%CcuB`Mh1eKlPtsbk1N!nMK?0`W#n# zD5DaOG#9$x5N zeZm)+{ZUyBW0zDoQf0kJGp)ya0R!qmyZU-O%1@s{h+npcK#c>r0>tZgt;_>#$MuUg z6&up-quNGoJlngt{}>gPDzx=B620X*E0!g+2_BcC^XO3^=_x1-3dD78W!!EE~v1w^Vk&%Dv7MU2z%F3Pri;J!I_a_7O!YJpZ zF2Ebr^z`%$4)Wsuj$F+PryY50a*apn@VPGA#(0t+F*ob{son0XjAeoU z{@iBFl7N+syb?^htD{51z#!qxThm$QQ)Dk2FH%zKf&T>roQqlA?rf{$VxFIOGwfw+ z+B;q)XV~EX28{o&0~-h)$AZ(c16!hBvOC<`#wIx^q_8k^^zwAdbBBkn}k~GMUwjb z$Kgv-I21lHW~rhzhZ2HRXiMDk4@~045r6JX$As3$y_ZZ3-s-7N|NoY`dshNn~ z|BC9qX|oRJwg*^yOexD_Kc_M-2|*j*2OqUv>BndWd#+m_3g3Gu)ARCZbCO9-JCt%s zCc$NY%L0J*9VUENfLEvm3Z}}PW~K2OuR-l^&inTlK+UHt$&Cj^dznDw@j|~l`SB~T zIVcbUkmLkg2p&6tQl{0vlamY5dX^PNwA9qMbr<^-nJyQ1YE6z&RN!2@ylj2!0!c;Z z7>9&Hk?An}WSjLOIa7uucm{#+yEXg_=k9iPMUH`g;p zx}99froGs97d{SjmQmAi1ruXeA1DkWRr;h#dJO&h7SuNPuKj0MxH)vnxC5s7Tf3P} z-T-604Ss?*P46&Dy8;kiOyWVp%dRzq)^j=0#s%Q$Nlwco7x@5>P450Z%+HC?q)RqI z(~hG6yD4{^7FSIaP#|$7d=FRI!TKd4PmyVKDQ~uvfNqxxkjH=SdJLOx?fK9=jX|#l zO!AYn-@U5=V!09~5V;-gpw-s^tHL8>6r7SJ{LcH6#nhMW(|&B39{`1)f) zl>QfWZy8Y4+IEdDQ4A2+C?TMrbR*rMf+F1@-5@P3Z2<}>(hY)iH!M;Ckw!^rknZj} z*K~jTdG`DLIOqI*`J>`uG1r{;eZ{!O7D3*laqwXtqC zkFpG^a;%T}#j(@Wf~iM)*<@jZMo@Qnpevov6xWq36xq|A9O}3{ML+w)vyAd{$Q+>7 z@=O$EmHfYUP~Nys!Yvpkz9;48h?H(j67xeCLGWa{gi^q#B1<}wb+$=vJI-9d2rwv5IsBo40}^H>h7I!!iOjnpE3+ymBShHWMmZot_vBd z8^aqi3no8zeODdOi^amFC`Ge~#jnW?gmf`k_PtWrdd8~G1@y13zeG0EQ|cvRd2Rba zh@2Z3cy;R|uuk+lj1>YNwsWMxY}-6-q94qUIVeR zIhge^EgmcZ7a>nTo(^{g^hzPHfDhK%^IXkRAOTRpzjYCZ1yC=s1*ASb5ft7t0f^}k zJ`yko)Nb+HHwu#f_17l>r^R^?#t+n;?yw=mks{4pUr!ZB=PI4L)5GjBk1g3;y*gcB z2EbJSOLQPqUL;d0oH5!qL>l-*5zHFNuqWn@Zj3n(TZ6lXY|`$6qS{<6>ko8*(7yO1>9tMe0xjZg1ewIw^PlH7(G_)c-f{UOTJw;#`mvjh zTc--4}3Or@59sGFuz36)Gwcl<&vECbmgR|G9ej$UfKU{3qB6!5gE@Q6g0?x}B z<>Ovjh#2tBCxha_ZFs|Z__b{@9-EP;&uu!u`2s9!K7mkJ z3M<$iS?Yv%N)HBhD5cKRAD=LpVWpoagpW30Na__M>}19(oEG z0OBG*NJf@zmf+fS_n!&%JzdgrN5)TqXTp~EJ`oIVE;|Veko}&T3Xh4&LVS~^n%*-3 zadA%+OrEBKAi^-2n3%wW)a8&rhx(+Q1v`jmlv5@3(dokcp}I--#!hrdN*b^6Vo~$) z>xbMHf{tQ)^_ct$9ssxt*!_NkW|jmpR029I09G435`+-52+xlbZ*Bou2{JN)5v)2E zjcKX-~ePeCq0C=-CF z1prdm@d)Q3F_7}xr+|oGHD&Am>Rlyp^+Q`JsLc&SSla$_9*`}0F=}$`;kUZqb#Pte@bqY{)Okg-FH>0p?j;xAT$^Fx)9w^WEF2tFL&IdS z$1zG!y0e^cmX$zu7^izS`N>ZIgbU}EfQp!?rftr76k~}!g-FrGnd3Vl@zD+yNvbyw z_p-nD9`;{-$knD+L(3oOJ7ApICcIENr!lZ;PQLenFOO`hyKiEVx#9S&>7f5=aMGpk zH3n1O4jmVl3Ie2{t)MVO*719A+fd(2s&8&?{sBmK z9tcY+785yydMe)k?P^kzc{=e)vQ~{uqs-Z7;@tH1||87?>BAwGPpxoZU&vir&T*<#h2P10fHu)CHZuk=-Ij;k zmILf|91;$31YE@giL9EEQuD6LXONBhGZaX|iNn&yo-~67ZV@s!RZMWgQJO z+QK@+#~2zWsw)u#nPskgY$QivVRu#Y&&ID|NTU>Jq5NUr~Rg zXJuF&ai2mY3sA{^Yie3VqB8_jps{rHfV~u}Y@*1h?PUw_M$xSGK%;Y@L#sJl%+{=O z&P9?LlI+WdZF-a$w(6OiR;R}Yhmh^4U4GDl!$;lmW`OhRNG81A z{Oc=B3*c@}@_-K;)bbtW$_hZi+<}X?0Dc`ve*e6=+m40xV9`T(b2aevY3F69&)%|z zoa%a-?E?W#8!6(!rK%6Y7P!7Zz|lpnqv<_acvMtItW9N$?fidyRrcx8v$zLPB*w#> zi2wxhCi-Iq*rfHGWtGel^MkqTfz`?YCX(LCMpy#>Ht<`X$#PKLfW}MX*xUpvd0($nj`Zwi z9FaPTZ{;9CLd=<3J3DVpBh=u&8h00m!6o3YAx;vocO~r(f+PJ=CWL2R5d@cl>>aC_Shp%SEEn5`yAY zo{tu3^lbk=l0yW)gSH)JtzDieaS4HWM8^Q2*$^i{docEk$~cwBF}MdS*mKgZ7g z8+&ONzDNPcMz5{mXjWJ zCL&gV@l}V3WH-#-k#4~OtzEU%DY&&fYg`5$U)|T+l28{I*Ul*!#7WPQ{gcdmoP<<| z&>QV+bRVMjJiO5Rh66)Jt|wDd$xmQQQZGP^26)o_5^-%7+^oeupG$BjpU>jP7l=#H zEUaIv5pm4O=Gj)IA-_BN#ZIY8z5QpSdV)h2N!FaMhfWVs-oX7>^E~R@#Mzj=pKg*Q zLxs<>9*NFXjxLE3czB>*yi$!?52og>xuc91B$(yZ5R~nZ=m;o5RwJ9vs-*f^h^=m3w=ls2)#jUERljAa;6B8#<3SmFiF@#ov#~Iw=qVc#c{&RK?67C?jVrIe!Tkv64kLMPBu$)YdwhCMPCK zEjKGGd~}=;^9dm&-fXqfn0gG)*&$~pyWiPvbOxon@gLoFGs0XIv?9II2EKcz<+|=h z6qt|KyKp%z;Kn<8V@}zyVxsuO2pK)5?OdmnfO$$MFRdgAHuXB#=vs!+_}n) zCNf0-V1Bs0Ft+XD|YP1gG$jgh@i_&>|=-hoM}W?z}L7MRU?!u8EgJ~#qOAGIX! z*z?b>^Kr^mQ$~nA<0GN&`^1mCF>h`EdW)Yo^!9siQiPeQZ-|J8gf#9il~N5`VeVPN z_oVC(s2&A&GeL>k^(d*kaOcqKt6IZp4ab1x*P^T3b)35Sfw%Hu|1J&Mly(%OP~Q~p zG)QDrz)krKpKoz7J3r!^Av@kt_`o$opOpupiO>S4rlpZufQ_UAfX_*^lF(|xU5fz^ z-+aW25+d#_!vS<=Px2<-Q9~?F{V_>aTuuHO#*Ylb!d*{X(AV(s6*E%XsTsiH6V@+x zB!uu>N|z6<(i(}jNG>ndSp=>c2#m-OtHYr>?>g5T|I|6x>?A!G3J2mAhxXdpg5;kq zCbxCeO&)K~e-4rgK}sa>po{UtF7Z5amIdh=RB2WDn+Dk;TdaPjl^!KyLg;Dh=yXg$ z8t(bb^wj_*>v8V^+uYoFukuWT7h#3FW;7=Ifv(FVcTScVM%_JcX&gk{L{Ss67AxMU z&P|r-h~rlV=iMQ&F>s}qclBWfi>17aIHUxJ9H6q;Vx^V@331KQr6V*VI^JLr>D)YC8 z5Huq&aKxLXKU-a%Soe7nTASI#vsMSzjT_<@$E}wxpV$w9uop<(kac81I3nCu_|ImP z(r#2mzZU<7?zLRzTC4P+Z)m8T^d;*B!6Ub%_ks}?ASEeahsvY=@jpe@bkegk+{#9o7%oY~+H?f% zfRKRT5y8B@2uk>!g&qk+TLD*j@#+oz1p5eXwZx6QnO1upa{6dO#!Y91v*W4CvK>y* z_()&ez8^`SOZD@q2dC$l_%r3ixTlBhQ1pb=HOY;b7cVY?&)Wb1Rp9I{||*xoojPGn3jqm=8s6vHuXIy)^CfDh)693)p@Yo`0hIREG@jk zW~Fz2ku6l9nG0b)4DRDBZ-H4E07=5opd*yGV^%Q$+5i6P%xhztu%{S3-X1Kbr#pG`hM zdNc$*@Ws8A1th*q2(O49k=wZWFQ?^SiV$=WH(*t`nKcm858=ZiR0*JeNFr{e<6pWD zmM|WQazP-R_Ffom_Yg$gGpF=;qci}P)kt_LdSbs<41cn5yYE^#3}%A{_U}?hvKt_j zJlLS|8>yf4!P)x#cHP;Dd|M>j05}>GKv|h!h_uyz8tx3lz|^a1Xc@rHHG3zpi7(`Zv}yUx(O?>hAZDVYwf<_dkRrq_$YmdEA^ko@04_U4NRG?D z*th=r=P~lNWvOH)1B5GIY}Tb#ZZk@7<3VWLLMXn+yL|k zvY!N->WxRpeSqi1dyZOFwUji(QAO}nXq*BYMelu?B1r~um-B!3jvcZV*p2=`3>ZyX zMsaAlxO$;`yCw7-)@Yx=l(d1E0D@v7(5LmUg8NXRt0E1La`mgwGZm=Lm+(u8JR7I4kz%Me}Cx-$5BEmXGU{(ZUvmLLUlSeVE zWPFIS#8TaAa-x4HEptV0(F$|4}NE3G7jcW5LGHAHyAZhRC z*nWpN;2Saw5|a&q_G24J&{1u!t=Yp$8W~{EK8_jyI9_VmgWh&d3r>vy!({;ko{JIj ztQ@DgO0xhGAGY9C`F|ew^BfJUpIa^$<}+WD2U}Iw|L+^{^L}VU> zkB>jgpbM36jF4;5pXdCmU~**bg-T6~g_P1PuJz7VsZ8H4dnJ!bcOkW`JFlWv2cFML zhI7uoc#jv~Q&RdTO^5Q^zYCJwHl}tqy|8D0r8i#bGM{(IXR=G|n)ZC>c7jfFvw4ub zi~kA(^==)%RSDSufT_l`g;Xj305}0@K8%f1(A1KL5VIVAH(qf-w_mt;krB-4|0EX4 z5a+eA-0f}0HD_pVdP~fe8*T<65xHw!RaY*KFA4HhG`Q(PGH{TDBLpfT+PA~{>q{>v zvmQ2O4Ua}5Sfq|~G@2RYCZMBgUT}=@^UtYHKq5Xe?l?OcKRdvwMv8`Kwf`i&XK7{m zp&VQLd(F`#;4z`6{OJM=eg9C@5i4+HRYANFajC<*T%$I?6Os}t6>}V?dsA5RvxKb5 z?{j<4?-}j5rW9Lfr3MO1b6IaI2=iK$JPIPu{rNee=f|9GU+)`P0j0ouKl*WE!zMp+ z@UZo_Hmv^s`d-uf3+*88;v7wEwF92bll<2_EzT2WEMvb}&fmjSvA^?sWL*DzYNKHx zJ=`O8UDs6C$AC4%R)Qikp^n@h`r9wIPfeju(?r{+qP1rsmZrnfi*7?d$?X@4>_nD7 zv@sT}bCv+}3{|@pqjSJ*Bnv!65C8^V3W>rTBBu}5D)f0AT_2xlWGH_21?86!t8@qv z^OH|dQS@lD7HxXfBJv}oD*q2ZV9$AYaO^Y8R(M#PWrIyw8^!t_{?b~kNs|2lR z=TDSUM)IoblXxZ(Ptp190kK}8%!`#ZVK4U+~ElN%7Z3zYM1tI|K zt@AoP6O=p#hD3PjOE0gLUTKvI3S5-cwu9rQe*5YP& z$!TNU1%+aK8*=olBqb%~#_q0E-Lr97VP2}fg7=#%kW*b*$(+)V85m+_IW96@`-&9_ zO5iEgZc>LF67ikfz1{ZL4B{{iL^t~EzQtU~Zztf-+y^YXs}3}oUMGi0@2gsFqXPuO zI1x`xs8X^tdbZRbe%)Oj{9IAN2P>>u!4`rM0`eocgVvuT3I=ps4s>Rj0RsdS)h40M z;y?g+;}Dh7{{VKq0F?+sz=y<=P9S=;%!xxGGNKKTU?~#Bph@o%GQJBn0dg_ik*Pwf z`T5XNJU!mum@Pt*oCLXRqjqNxpjQa!!haKOwDi#R>XuT(Tx^GF51hYn;v2td58DmO z9nYZ>208;aNgxUSglU9o#_gN`M3T8!;MpMDb?rO_I7?=ynJVb%#_w+kVh*v@Dggs^ zFA3(3GGI``9*Io@?#+-k?%c_H^Yn_7h}6$^5k-IhO$U-_;TaIv?YYbt3mM;MC+{T>yJZN zE9WTzyCktdys;54FtH9Q1_s2e9WeX!cCm|e@(I0=5+`_{JATC^hON?R3qKaaYYS`= zg}-|;c;%MPp*2h>5Re}>3gU$VnwFp15hFp9Fe*3}p)iG^2#BH;^R_-Qca^ud%Lqe3 z03cP+9~{&Ra%hKbWDZn``<^!cgpM!hJM{FA0Uh~y%Vi`9DymLEg}!Kn9u4ns{shx- zwfugIHIop-T9TX$5zcVKgn+GxK8rr&7dofg@!dqwG9-VC)d1(T68h__+(A%5WJ2i$ za8xs|P$O4<0-~OwEsky{B<+Bd8fXK8w7q$E!!h`d=}S zh64JjU3c=ZnS+Sn;3Qz4GV$=J!E~&h_e}}RC6R>=s38vmiC6y0RQYH1DX>G#HLrhj z1r-qVXFsY! z?zt+o0e#Si>40aml&owYB%U;c*FEv|wePTd3B*2_A$1jd&Orf!oH{~yV>S~KHkAHW zJX$YDJ&ji2N_;2@Mov7dcyjOEXjd{0PTR- z38>zfn3-i@n@3V30!*mgeeJyGUbfDANQ0A}7@lwnBH7bIwoh+nC@g{;xB%p32quX5 zOT&Oz53Z2FdGJ3Br%v^``XV3-g{9b6ENhBTk}^GL8H!ofYOj$Qerr-YwW^WWND}$1 zt?xd=p4VpFl-U9A!08m1LwNE4w+&HzbRz=X@yxjMMYp&L>$ms#gc1mizE-s%EYMHtg%B?4rV z!X3}Hn`&A>h8)P@6VPPLk_aIK6Dae-*C-0$>HGlfNMnNI50V0Xp#$cNc0)r$;Cd8b zyp{;*{Vah8=f*_0U4+tc{mA`ua`JZK*)g5EM0#-&U?zyO4LYt3b_^nnTp6zuRbQtU zHCyu7a;!jRnmeGOUl=q?>;nKd!fc%$fKG@b)c-!>ijPT3*dwfn>wb&B!qjDX8qj~a zSr|^I#LW!SHqzU@W!j#K=jM$n-8BWe z75|{1Ju+i{*Ugl%=TkVa9>^Avdji9O9BEc`qXHYQe0od>GBcWo(XKN1-L{LNK^iQH ztT@)XV|zI7r}6vuC8%4Pd8wze0k9?IG?fYVwuKyzxcX!uDL<*W9)^-%UYvvOOzGvF zyx)v#b6QWIW+1J3RMgg%@V`ho&vwIY_MZ-3-tl14z?ZLH(6MJRpVp`02^^>n&U+k0 zoq6(9c9E6F`{;9(rD6z?7u7={_8+&7gHtLO1-pw=wERs)gcVz6Pcrfjy?7+qrF!*6 z<3C_Axk{walI$|2dTgf9uD<m8idJe4_&Ystwffa0F{d>yS|68}L==~l01Z8Fr zHD5H{U22yBD)j-1@fRPau^1oX45sa)i~IJJ_D+p!Ylh*7peoN~O1hj~vLQ)QlGLxm zLgh|tO!gsab65o5GUv4H&*WT#*b1v$ZN`q)Tj!#*>*%@(wKWl`-ZjE0GhInU%l46Bvj3y>Dn~iA7mEZkmRtlJM4sucsTr@JlWyUuv)kMxMgDz7F z0BZ#CA|N6%{q-Mgq*-gIg{!aPlP{FMUtEzfSC&$$KeVmp=H~F+#~zWutsGj)T+Lp`yKkBei1TA`KF@5FvXsJ3G^t+&Y4 zU0M5moz_D1k4CndjxyyI1RzE!yTl426G1J?r8y8Y`70c2Z+UKN3$>N@sg8H4S?(-K z`Ui*cQA;T*QmGt2{QXZ7M*e(J{JFPYB7{KLKEyCA=N(1GXYR;H^h(`jIzD*#eKjs+ zV5XBRIhs0Mp5TZX{eLox1a|7EU}&oEd)2P;3Kk8Yvs_vq{#A?*4CPPXBl_LBCZ-4P zNQFZ7|A(-GF+vH4Lh>i+zWAH%4*mB}W`|b$b)>v3?SlnR#9iJ63r!|S+p1WJF>H)R zdtp43)ZBkhV7K&s)nsUqky5GqL++4u4BFkyAoPI`->OFt1I6laydvK}42?e@iVs_g z--EiZW>5Cp(Hr~Zvz68Dp6>+R8a(x88l}|l@4lyFkqZNRN_}^91Y){_4+f~|Nk$5= z_egRYeK*{;#^PwYh3XuVyNawC5*y9edB-|1(3N~MBC9FE2{b*6S!@g*7CCM~rQ#dCp z!(BpaYj-zpzLtQfIiR2HR!j4(D?Yb2vbCztTE4xyT){~-6#ORwod3^)m_s;hz`UoYr*8(H)Rz!F)UGS6lAPzmIBEI0s@_%nNZ(NG(yU-RyPkq@-k0%}@SmX#W4-H6^G2lGgqCS%d$rH|u|^vMd1M z7n`$2{2_tD#33vu20YKWVax@t7~O~i898G)E`>-AbP3V_WoN^~`a%L!-~Y3HRPgI1 zyiax_p*u=PfI`1k85k2HB=J8GipuK$e5`-OL9IJQmI!elkn}DKzzl#P^7&}BN*=z) z0YU3uUvs(kqX+YdYYOz)G?Kx=!3g^jwz74Cnguum|W}Kk5h*VtwPa$08kQBJ)*z_k92K@Gto#zaAhV^K zw30Pq6v~xhrR!7i&fuT_LQSWZh`#RZpR+U{XG1+yj>&K7ex3-yxOJt-QqXyMT{p-J$npM#2yVN@sUmSuOf z&RF>HI*1k0YX-!`#BhvEa$$BpwgmkmSD@+zfH9D4c@Ya#LX0D^q;rA_8|%8~Rjv|l zlrrk(-}6z%AGds-cn%e2Mz~R|>QRe*zuYs= ze4PK%-!TUj$k@76o_v!Ig(6(w%%;4>gqk>3Qxqv?Y_-E!&rh!QWdVNS%;w{PT&;9~ z1;s;p)8#3EJlH6d+Ab4y_kN=1ZZx$yH15c06z$5b?;HH^vSCs+%b~z}liyza&%pR9 zipGDB^poL4Db^=uxYwwAEx(HiQ7=Ni&oRB|RwG^G{JZM?V%RHY@!bEU?Ld z?2a@rkfL#Rl99xVG;Do28a+bB_vWZR8HYVezJ_Xlo_~(-6=My`kuYWvt#c@P zS6D~%)~OpJMzCYyd6~dK11@Vy7U%!-2XHA=f0+#Yk9!iQLs+);6EXP)az0|IZ1veJTlWI%?Rv0MLsyFV}dzn(Gi-*w)-cda8eN&#vs3TYieNQv@^ za>CXPbph@u$w5Za&`7 zC|w9dWEg7x`)0C6tPl_fmskzG4vA;|1~L za@)Umy?M`^zI^gCM1A-CGa#|IwhIYnHo^`>KkXP!L^a26_w97cMmvxf1oL7 z&+!x0nYMlFaO1?RFW-`kd*|eccayYqVMso|eLSgnt4_bO_O*9jk7QR7^sMmD$AA?^Fn;E(BNC}Kb~fzMwspkf%S$ytUN2PKsuWMdnZ|jan?Rk@=5nq5>Ao_%)oSChSO?_ru^9=HSl_~b^>(Xsi zC8_HvAHK!YFVp(_;qUUim3?7N%JxDn{l~S;Yf0hasSJi+)Gjc+&*d|*{N%$$B+2+! z<=2h3)Hd`j__tDNQ*UIPH9oUgI3*+5QWG3 zIbiM@4wqRk1Ca4u1i~B#GAD)&jE{m@@ZII4w{fnFEwTCKp5iHFz2W*4W;CO^k0n)E zn#S~nB`h+vx1@DX_&l*lmf@y&ejZzh%hXC5?JgO4l$ntd{?KYS$`)_CmCi@rQMp!eNR|X zjiSxrm7R>8d&qpRH89m!51(JQQwHepSCEivXV0b{WK{IN{v=r0n#uB+aTFEZMXN3v zJLl-Nv!0v1!aq~{9PrH_Cdya}$=Td=1c{@roGoy6q)xwsub+G6`VJq-V-y_N0P$k;6odg`^)B(R_s_$)Xg0m zW^_GK32)yvM`z{F=Hkz67?>Le1)ToHF)2!M`#YF*q2jQw720l}h;9uEsL61#{>0ny zuMB?sc&*!PYv37OTQiz5(*NzdE7Mla9CZPgh_dwJKUiz9pd_*5RoeX%@&g_}c1hs` zkoUHTNW#nWo?S(5PF)g20hoRbUja6MU+mTk!je|d$P6r<52{QIU;Z9Z#PvJ$dv8AG zEZyB5iKYWAp^(!Sep}rqA78wEJl89t{+E<;do;m{-K2IoWmsaKc0kWe{lPu~vl)pR zQ82kmTXBHcONm+YhrQJc#w)D~gD&3$=vX5|Q%TI+M(>LYEE|)^`(l!75UWgNF?`2Y z>vFDE5gScVvVRmysRl=s1@qiEsxh_x^-^B`BKfJw_C`Eq?Xlskj```C zpvL~=hZzIs@q^{~9Qk^=t-5RQK6>46v=Y63MT6fMms5^7IKkm9aB|P&fBT8be7{M7 zzOe5m)|wDXvbghDJVjWIPyRB6tvVVDE#G%XNcjG&mM3HziOrtQue1d9kOD>CDmA&F;*7Ejm6`*usGS|ESvlM^7$~vxqHIxiV90w{wt-^X zk-LOeRuv@r>oURd!;7MKL9KoL6WuCUm%XG)LIk*pzjxl_=MQmsIXQOLD>+$yV)8=EKXa|w z)4DgoFqBU9|!#1YWSMH#%9DQXzke{sw$F54kCorbsbQ$eo}oyDk74sJqJz~t)A zU;^1t!sID#n#)VBZ(7nDaVlrRQ4%JN-4nv|jfJ;g_f9KlhCP*~@pL0MnVh*XwQ1m# zF->NsP?1VLI!H;`EgVxW@4g%Pk(WEHPE(n)*^-8Bvs$_PlxOAXjEAi-p|q(!_Na^0(|rXdMpw$%{YvMkX+u18F79&oJ#W}- z`ts2dRoZ5iKXonU=8h-93x7E6L+L4-cv&Pw#HyEo@{+n*!r;Td7WXeYT=-b z>-Nu7d}wyZ+!zO@^~n^4JoV)KdwlMc}2F&H?}c1 zdv-ihAUx0&*SMueV%tMO3cNWinEqFp}+>m~`}nK%&Jn{pewIE?g(d zgL^J*I>8aE?BF#mnVb*zkXwd!$Oora@%8UnNARoW)5@~ms9!BNUpXZ4U@GwVmAT%l z`a8;UO3NkOv6`6*R@);A+pd|z7T2(4Y!hAG@_-ODPmy6*kL7@I@bOCZy!BAIz_U{B zv%_A=vFBv`mW&OrBC31U`J1EfCRDE|a_stQN-bxej`Nm@RoYoIH;xT%ZV;F96)8m* zGBQVHeHF8Q5?xD0xp}&7{d}aNEV5v4s=yrL*$}$bo8oGWWMbThE7uoDl6cmNx{Kyt z3Y~MJ`Sl%hD5_z|0o8?1+&R8B^SQuWC&{@czL%Y3IyW4eSc_j**eqGEmJ4{ z{-Vx9{2ga`9Za8cU3r`0<3X_*L#^KG!KJ)Z`x7Pu;X47_!dN09HW7{Sdl#B^-uSD( zUlYKPC@W&Pf#Xg$D2B6jPK3c=5>NUWooUFKIR3+IfBFhRAN{D1syA_o0#uxNzM3~V z%@me{Ol2#r8D{d;6jfDU8s&A=*$98zW3JESrAZOKwvzMYdP4cNB%YegA1`jN!DA!uSA&Le#&%HfH!6a=S%p*oUa*dL#7aPQWM535? zxp0mQ{Y2TeN5{Y3N!leWdZu&Rq#{J))Y|ysbzAgL4co zllq0i3C3b%7IS^V-dw7DL&EVjQjfR;Ycw~o@X}g(^6|#iD>7%l2~ZI#f|wud<5X2u zYBW2$bAF&~@=UN9Y?6q>4(BL7|N52pGvP=e!Z}UQ8d>RACEHKTo#(yyo;>Khz_fsU zlW!?GmCIo9<_Nnmfr=H$RGqgPVWFWx+L;Inmx$BI)OZa;XR%LDNx>v;Ub$x9%{1qV z@N`Lv?{4AzJC7n|maA8_#)^d_bNm=R#dNi-rui5%va;$Ax|NEqpR_HvPmOVpI&(or zT`x~ZyE~Jdf9+~{|1Jf_GZyy{4zZenrFJSy$%)kAbq`m}gPst5y*q7fRBG=gZ_pg@ zhk3+^&(;&x3ON+Uhbc_)(TH;OSDly`rPnpyq$pd>!dN!KU~{}oMOfr zzhYgb5q*kQNEn~3{26RWbaB(`TC;9-gg>J<+Dla=`rTKlmr#UU#0d{OZb}@FGUeE! z9se?!7F*PEmAN?gjqFdsZC@&y_6qi(dl7`sy@;AAux5t}4nt3w);8taE;|_;w5-pu z!IG`aF1^S~p(or!Vx=w>6Vc`WWz9#}dw02=YAJ~*;oNE0YhxA%pMG!3OIq|)DOMZB z;&1lVSiYZD*R5K}QJIC;o#bb|Dh*gkc~WoLK^>m@RV2tv=e} zqrK#7`|^>gXWS;^aZ^TMo> z1E8ChuYh6W`%riEje+N4EDxg>n8)$-O=o>cv6}YPA;&I?Jdb%@NoSSRp%_$rY?Oy) zpxM;C_^bTpa4+Tfd6YEnjOx~jF!Mo`U7W%V=9FJ*{S(cXRZAN3%}}&?pJoSTUP+7R z*cyF{#pSqkFsDkWAFmrod{^FKedv1G4>#X~v;|8$7TmyIrf$KVj*_MUS=YhptM=tL zEDSazjYZ2|9cT8>E<6&~jehsp26K|(gFLrMDsgB+_>24@;@=GAp+a5bge#4N6$u?z zwP+t55uvC zsLH1#$IHo0d|)rI(_m#0ouW~{>u_do6?)=Co-Pf{CGOo+ zDdDerV!T9()SCO^;+49I9a;@r1yO+qdd5eN#T=%u17@ys(r*SOBpg`XV!{w>S7_VZ zSorGtbJL`2S356JXBMxBJ{cYCVofd|?cgFiODonj`@?CG#Y#CP}B z0=i4S96`q`>nCNxy059))~i~_Q^=eo1RQ|D2V#2?z8}PV=?Ot8M>wsK*^IEVf5x3depNrAHNKOWp8~39KhpyqHYN zeSPjY4i7c)6U94wUSgsBey8!9As=&os7fE-a5uV z$Bxd<$~Z8ZXYpEh2|d-n*b<5@$R{L)6|IO7glbpUA+vZQH2oodRXnv?ty5o&lVM2X zdVlN(J)JAA<8ri~k`pYW&QhI$*PdU{r!}#fCl_=~@u_lB5t-={#}-=jQAxu$59W6j zNwpPdaJ#;3Cm2v5e1PLh^_PETakh7lTbj5@LA!jLv00rYy{&IP7v_ zDM+4QP_ezdQx;ZIkTLUx>V>-WF;4x*kl69%B!&(1U$jSOYq=?!H8JnrJ}xgT+)wxA zFs%-ct#{LXcEX)QT>Cn|sDt7;0h1n&e7l)f0r7N1s!$ZX>6c2*Sz zZ*)`W^b6r(hL{S=$?5Om@w;*t)UD`8Z#@@>>vF9RQ$P)zmJf+~w7bq>4&N18();vV zxxr&wUrjt^Z(T`^botS%<@RRcnr@Eb*XC|JzD2V&Yn8l;zww0X!&+MDH1UKMOA6d* zd09=P+1D?RU#1W0x?&u%!{@?1>_8!n8$`DC+(AIO{#EceR(2&GmtQMRXX&?I(khk3 zRU_gBQ>b6agLiLL)fM(ix6Bqab-W&?->8cYzH9ktnU~-?-A5v8KAzilD)fpjx2{s3 zI#th%vyK^>21-%0|6IQkAT`707i0FEGFP(Q;M+}auF~|Vk?>PqpP}C=WRb;JzlR8I znwoud(_w7+{J>VJ^yx^Swso~SvsgX(;}pA&`M2nM7!9?iaTgWjb)J=HGgPp))?C~5 zC3af-`O^6^zWJps!a?RyH9t!&#V5S;wdE|P+KBLPt!QPs-z}{p_LhWX#O$ zZypoZ)Q{z)+RXhuCSyVx+#h|dU`8OrUbWC!@mGO)g`G&4T+?DfNz!hveywE;-yH&d zD}5|HkumiFmV@G%1dU%YnD_gO%Nc(Z#rS$!EM=;m=!rSyEPl&WTwPjE7`Jd;v;F$* zp}4pqS;V7V>p?{k^Q1e$@2};59b&dUzvVhdLU&NYpjYa=PtoyqT~?WxY??_)t&KrrHx{c?!4cZguBj?GBT3JiYt+ea{e;>&30}J1BV)exqxW z2{8c;8Uf|9Y~KEF9|v8*BPTUA^DPX0$3FP$yvDE7-xHjZpLm(B2yyd_I-7;L84qCR zo29*%4>;^*eZfsb@OZ{e@DaWBBCi-#)P?99H>s#fMvjF!&KJrZMTFXv8pi5AZV_== zP8tAyy3P*m1-ZadZM=L+G^-+znhP0=Ow{o^zeCWDs za<=@NH}r;gx~2;}^rnaTJGh=dW0j=a$Wq~0Mib;QBpFv>@8R<%l&K|L!uwEj5@kr} zT>t8lc#1kZOif64`NF1OaGa!M<{#%NXiS1!wxpaaqP<`_ zwWnbN{`-xwk1H7xyW_dTI&p9=O$_n{&EN}rm33Ct-EhwepKjr7)=&{|Ipknm8mSny zuF+z7QrCZ~RB!$nD#qbx=9JOWSN^>5%ry)-em*9cVK+BUM61tnPcji-Iva0aZ=`?! zS!bzL(U>d!mm9MW&di9D-OmKBj{ZW$et)=jVX=~08-Yk@q z=>hHzl6RlK6?>S%>ux~(I$6I+Cy44tsF%JJ@m!=ezHV2^6U}DohVvdq?`-84z6_9U z_^aRim47*BF15Dx{#dU{PNwrs?pyBRiYes^>%rR}saLDNpWUV9+ty@>)!nQ!iWc&b zWOw7Skli1~B`UTRy3a~RltoY5nVy{Yp)x|G$u9lj5aU?uu`2)RQhGx2rGSi@+jnMn zrZuBqdcLpI4~>#My^h$t~1&Ll85&UrAB*VMnY}?@F_64F+Mrre^Tt4O1oi zfkMh(xkY}ton_;>&s&?LwO_0`mwvQ&Irnf0DZ9Ux*5S_8myP7k*?IMTiQ|0P%4P3+ zm*!*5B5%?irHmBAa<+Ty2G8g177C46xSs6ZLv2hrBy)eko%}uFMKQPE(v1FjRr+eG zXLQ$%wW--e1yW{c9o=xyd^vcjzGbyCKh8}K*M14icM__$8nI6pwmk1CE7|Um=3mLn z^o+T^k#wFWIVmE9|4edqm#9?$+Fh|l2F?rDOPi{^lnY*CT|F=f*!jgupxY&Ms)6n5 z_=L?@XAy(!z}s;$i_vUh)SUW4R+kN(Px;78sz-Fvl%6kN1*Lp{QnX5a%R?;RI@|Xo zmVPhD>Er3>a=|2F@~<-@Cq;K3JLLn>mnq{9TEkCs8|m&c7ksU}rE)lafpCH7^IY+7 zR&Si=XZEX}hb&Xqy!xQ*r+8)-(=yb-R3Kk$aLe?oA&---n7GWOSfq-=U?(}Lc#2t6 zttrk-=xWdby}h&5DodEv-kB)+#U{={do{y*>Hy5GPjWmj81o6=)r)t}Bq`+%kIq&R z?zPeIot(*QsK_lea7xO{)*G@L`3GICh9s%&kD|*5~A=Jb@~h%GRn2~y#xnKKJWO>yfHYnF@5PDAAevCXljB9 zhaY)b|EWBU;?bgchUtLZRO3?%SEffzt7B-YsC+G@+66Z3H<9n-tw!;)SM}NwT`rv_ zKGJ-0kUVfB?2UClR;#793iIU)n(p-Ol_^@j9|-Y-oqy>)u=|uQEKQ3Ejg9{y!);Cl zjU6K8O^c`3Cs)Q+H(4s&KAkXCCoGI;<2yLM7~&OsscLSN7Z^1_C_}_Fh?oEEz!c|U z4~AkHD^?;-$zT3|5P$?u`q1be&&k-ao!Dnqtb3||$#6!yDwmW~HiQ>CI&G0gm}%XS zwmjDJ_4H$SS@@}t!@yoYwvX96IeA4)^PS^8sZ&zZ!?ug1GbZe$he3AC3jQQkvqEAn zEeR|%VGrdN&(U_%UTDhQQL4^)JM@Hx1%H7jylYxUpOb>5RY<>F+ZGBndTv0_{3o^-{@Ko zmRdfJu-aqkE0$3=u3=$K-198d>7{5)MxDwyo#R}r@f6W}7J!=}hw_ht_j=I7F;riLS% zl?53XcvDmou++|)qTEXT-fdqUXOcvL&0$JTI9Fp>jNE#suKMGjr#I*zXQq^s#Ljx- z6K8118H6RcSqw1O|7^-BwQ|KTZ4xL?e^vkeJl>=0`*M$N^U-m1W_Dzp|+9 zf6R6%G^PcIrS{f{k7g#BfsovK%{;r2X~tf67u6i3$FA(LiOUyX(T=#WaoS4J2``$_ z?v0VUt)%9ZFcYjBWqcQ@D2k!iP~#Ny^~HF*xO6^RV>pUd=E}^^VWPsjQslDnuSZm0 zewL={>3*V;dHilyt4RP&KD$Gd(N4%4~dy^m*{winB? zLs|xJWT2v361Zt{%I|T`UGiheH9dIU#xSgOf6)Jd6fVYGlvtZhm&suWmbTvr92TLE z77h);`->WEAAFq-^2syv@s;TPQEvB;e}$^jFFML)}{j z#kF<&yGRI0uuZVw2^!pjTM~i=2*HB8yN95S1h*i;CBfZY8h2^j>Bbv(XuR*rd(J*} z-gnFhO5h}vneOC3wYHlf^{Sui zgc8rWE)9fCGwNcmJ6utpH=JyJE^Rvfja`*Ms8X5hALbfpqMIoZSS|%K%uA(uDgM{W zM&V0@5DO_5Mf^3Q(rDSqDL+4!L5G#9vBGu7iev#Ia9sk8`nyQV<5B_B- zKRX-17bsqa1nw@6h|%lvi0u55H^VsN#51jqlf~O|?*ZanA8)EWhS~TTg7)=4-3SN| z@e37}I4M`MN1t;E#$%B6F^aYCEB)?KAayr%w$KijPDfZ=(wtF17cjwy8W-5>{H_*UKxcw+b6x(BVGp2qtv2C-| zN}|W%GlM%`@I@X;jI{da7P{~$a;$(&)V)r$ssLNVbd@%S@Tr|ZLSfyh+p~s+BBN6( zherWr%2ICL9C9?Gk{!|$)yn1u)Q?ws_Aq4H#YB(isxY#*RBFb;7BxSVc|Yda7bN`N zdY|qvtM$d~g_V^=wY%B!@Mp6x>Qbv{!ldgevI+|3y-O6&z6d%tUZi|?iQ#fD8MZ3y zd5LiwjfO$yV9#XIyP>Mg2>E4jADknap=j29DoQ67=ueUuOFZZ5#v3Im2=UDp@%-Vb;n^INz}X3??g`aWt}VS%<2-)`Cd_L{1$4q0jx+$i&y zR@bbQU-dNHQNBo_;O@;gN==c7aejH(=R||%7HI=}id7_{@xEh;!io6jvEIr7)@H5K zFbC=Chtl>BV8^2;cY_X-;TsTVr<-ky77P!2UF_t{Yy*T2l@p?{pmSjFl#EYS zR?KyUU!8~vm6g>Xm3;ZefXmb?tV&ws#(%3?=Us62C1)csmU{T?7he*bXk~7_x2Yn& z4@v74sS%rmlo$I_AD4yk)l9xVuDXD3ieTb6xkQ)B!(lJ(uVgsilyV*GY$EIiMD1m| zokju*7v=I=www95{N&TBcYZ`C2Cy-tZ~?y-Bs@5WP>T`tMK2u7{|OUCy2)|%_ytuM zlFC2D@{P1o<>Lt$QdO=JoH|N(UkeNIV3lTeWIdh5hi@r2^o9=2ZNFg`K1U|ne~SnO`}0nB=v^ysC7dp5>P|Ai*E@7Kf3;`$jb!%aDsnsl zkC{`|$D2-ivQlW|3^85FcxXI#gA&Y?dRCf{!R^T**8 zAj##meuKf8PrS0EGCI*bDO2@byoA?~)T)QC-nSMqJf{78LQ;N?xD?yE#GZZi(5Wb+ zGLBEgMKpzAasYNrW?k)3m08oPG{3EgkF;q19yibPS1De5k)^`a6xS;G}~_`{*Y?i@{q@Sib>LBvYHMkg_OI8x)o!!loq1Y1&<=g}KYnU5oM z@DnGiuR{HH38mWo4{A&plO?||)<)@XOM2e2wUy0WfLc~A?b}(`k+?RGH>#V zX?k=*eCBg4Gl5(URY=!F)=CkuxSxFX+S?YR&fv&0-QbhqLpXa{u~S84lnB6GW~pnG z6yapIG4gso4{7t8j8WXOkXW#;`w^d_@KEGL9F3RC4!G*%-He8?&yy|t`}ULUnLnqt z#4r@H_u~&=(|EdANC3SbmIUg;vV>?cJlSegI{gfsI2nsl0LE(S<5csrS%t+lnZoaNa{f4=z$67EuZ~XTYd&J(Q$66y0u^F4B@gwIC!ZJ_L%$B9NNuh!QCv&q zpNsxaK1x-HOro+D?F9d}9#KrjuNo=98?$DzKM)+n2%Y0cql;+1Gp(ZFYx@a_x!2sr_WFIR{q!1c z&4vOEW<%XGHn(ISaS=#18aF%kKyvGQm6DCN2DH@qieLZH%#d>?Q9_`dY#d!7w?1Pn zec3O*5=EHqqu(aibJ_#xqqlgzbh-%{-~;`o)b0R|d9*LSa^%{NxNkQzKi3Pv6`WVa zigQBikXbF;%uDU6pH8#S%s0ToOF(OQU|?|cRVeS)Qu**7M~?k&@(Qi*YgCDG%B^-E zgQ<|$@7}go;76ccBH6f;^4-fnCUVhu$?&opdyLW*=nhO(8mHm~&V2NpI_}2*e9Mj* z$@8)X;%6)Eb!cAf==gUB|9*|}`2}=K$}0e02`$KD=_0xc$nMl@vj4#O`E_#x7Gh^B ze-2P`PbtdI{c!kSbN9dA-oJx9qQsU_)m}lH$+)(iyC00_n7`^jmOqSseyE~e?Mf}ey{w)4CC(rq1|-UwY9aI zVFBNX<~O{&_#`CeX-maB9wa-S@iQUkINm3El89oN%t?|-vRodZ3#k`9Ekv6@A0~z_ zdhXQRi)CPwvlC>0PLz_8@`Cd%$jY(}75(vfqkl-5TMStGPs_(hj?`S*!p;S~xk{CU zfNYUgP5ohQr3YrH zYr^t>k`XceJ;eUgmg2`mDIdrD z`!xnWxrF#f|A!F)MoEIu3rIfwkApJn|6xG>Id;LXuVnslR-q3a(*JQvU`$>;`@d-_ zE)byjf7{f5T^f_L*WR;$C-H}`77+(r#(zJA*x1>lI6&7!*c1X|2;C0p`=P(xj;jCs z9hrOpBmM^|=SM?B192OIzSo=I$37C^Tq3~T4M)94`KgW|;J$jk4km9yVEkjU z#C#1856`Zw92aPWS=iXvIKK=Tm4u8p9(wNx_OcNPqaB@diu66G z^#9ScgaI-TK%b)W4FH=(RR8-l`#>kJ4{TBJXbI2EY#|u)Z#5JVM~ED-OUW~_Q_t(o z#A^U?I|?9>EG;ZJM~VNQQvdzh1^{~2b9?~EtF5gM$f+s%!UgH0xGUFJv_jh^e!ruL z0sxR=i;9^4x9joW-{tyzjKAX4=+LOsaRk(az(4!PEs)d5BX(6#SQrX`hNa~_0Van# zfOetD`{&J@!y|OH-GwKgwv+t0{7|63}wB77mzOQuBX&;mKcaSI4V7>`hzL z$SX{M*uT~Y&sYcauD$d44+pS^NT94oFaXS+5hzywpM;wK?7U#z$XE2mn<54kq+E2Xy-YD~<}F_8YI4d;zpLVF2M%MO%9a$not17;ll`HC8~)Td&(i zm>Hly>D6ov`-fG_;a5IZLXN=!uZ%*V9q_s5n%&ABzoh=5T5bTWDVH7PhvQ)J7v}YU zs*tx6sp}Y~Em{B5j(N~xP+#DAGym5m5{6ylBJbehk<|;Ed-_Nr0Fb=`u*0?z+wSNA zh$tDr{pVqy$%>(scA8Y+=K?Aqu9xTm!KQz+gX!w`RupCYD-M)tLZs$t%Z%6*Q{ASa z2KU4{5l6Mrfsz~8Su5EOzZEg4H-VM{O5kI>bUx!=>>zAXBb^e3GVslA$Pq6uFCe!G z(rx}KDbN3ho!D@Hx%e-8SnWZN@$U~%|=5r2$yz*{*}X+7n#3Qia0j5=eB zT)Oyy_T$6Gxe_o)2H0X`20Fj$0pAyiAHnKI7W*ITzcD+v^c1nS25aRxtBqO^X2j+Sb3b7slSG|A~?wKxN6JdIY9)= zxm6{iRa~FN&)7N@RitHcu%tX#;b!ACd4|Yf^`2P%WrKaJrHn}2p=dO<_Yn)-mC)Oe z!z(4eyZzUS<=1H*Fkd>2Sj^LGwM3^qBKQaP=GuhS$$PBYj(Att21#6m$|Bqx7Tx77 z5S3S#ks-!xhoZr5z1G)u_%k|m6I&I*AW~i7d83eAT@uq>9HHWw4-s*+(ZKIpRO=Pg zT_03!IPhZtdmZkyBBsv(J7Vu4b~BmzV3I~S4LlG498;vD6AyqS2PVEBP}?M16BrHv zpf=y=%y^49;vbFG8`RB^uri`4#1lLrGY<=?>ABPaK#a0FD{n^e1#QX5$Y0-s$QzFE zoEsiUU`A^keU7(SnoAGwi3vc0QOL7!)kCBNE;)>KGrX50qF9cxz~&WTMJD9Z3tj7J zO*e8FVv|yW&vG2|LR`WLGOH|iJmtZg-n)@!XbV;H_EX2NI7+M@$$KH9=KQ2r4DYM; z3{a{M8@mNmKA|H<1Ma?sg-arh(H(d*ozl(}wdsA&d`J z=6|7Bgh{tU<;|zwcrl~h2*^Pjwo$Jb28ilf~~=@c^=NpFlXk49f&S zi0H?F1sW8%9VWI|3O%jN%i~%9ZN$8szkZUII5bd(`aw{qMX0A<}lNTo3OX-Q7Wh$ zYWz;hA~ic!HnTeIqduc8*SA79q5kxgFy5r7xunV*{~O$+5E5^nKcKcdq=CW(KhZ=dhbA)TNu$ zM-QjJLDz2Jfer+_p&j?}IMZRq?YlD}(=H{S4DSftzSTkZ#EPQ|&~g{d;C7;#RGv09 zkJ_jU)-_YG4F?2+OkTN>e@Gp$8|kc zMT9YjE4g)Ef|rFnukD7qcG$Pdoc^_K7TZ8|(pS%a(ltSSj4%$4tTwv2t?b&k^C9Lq zxD9apf;ej`EOq*UI54T2nIeNa7b?J4NupgK(5Vl?-geH2Q2jcZ4b5+RU(6CUj_fz6^n;mhhhtaH9sjJLeG`J?ctM0WnU-jGXx{cV z?2I&&2_p5|PZT4ZC56g?nc!LvD7jOSSJB9oKJf=G0KM$kEs{Su0D#0B0M;;VUgkIu zMlLDNX1{V&s-Ay0D!{XnX9RWA07Dv-5mhFHN^0uT)5dlEXby@V&EGpOeopo68mWHB zQ4w34Huu7d6z3n^MYT^P{ zRPFSZvaHDIncXR`#21chs-fGC{@T_oU@k-u^*@Tgf_3Kr6zrUvK*26Wx+d^+Jq5`X`q9PkYhCp@9kVkrk${ zhk)2GkWv{?V$B$7%s1=O2o7=dikZTr0G{b z;GoGT=#bSirXo^=J{8~H2Hgd=!Qh0PEcdX&RrFAU5GP&gc&-^k)FX?P?C4JOw2fR6 zFH}3Xk?Ut8>ik@*WqwI02>a->I>)c7k^8zH5tMHy6oYG)j1#+ml_`Gkh~v0(y{X2O z`UEQGjvBE}!KAX)Cg}?=ULZNdpZmH}4@l{5%X;;CH=Qcoj-a?8D=RAj*WGx>Y7{zp zdM<#D&E6(*gHj0;z$EyC)^@CVl*}1j#RXFuyi-EC5a9blP6C8cV!pJvnox_+$J(bY z+5H*uWjx*9f#qJ4N^a-rqXE2lbiM+Lm-kAd)<**`?G@+`AImf?zS}`FuC@_A(3b3w z;=mqy#X&oUp*-~bP^)8tm4#ShvCVzhiz?F?wI$xxT1>-0dNw@wvXRjd?WwbmoYI%~ zuVpx(5w=Q|9HPgB)mCyEZ{vVDX}*g}&ca7zfCVE}L7P{+-6K$wY#GDze<9fvKOi4# zD`u^qcI5ferT?9*oM?Hd4Cc4OIUUrbq&Md5mJGjoe~T2R%S(8pLL^DSUeHD#xud5m z&wu;fD1bIEEA6@NXkg~&-H2~sW~Ydr8#Q*=T_u$jD)fYNwC(OVT&Lv0dNJWduy@zd zaPkwS@m?3ma4lp^EF!cDZ?y$<@0+OzO5{omJaF69zd8x;>rA+bpcHoj&PoCURp$O` zt3#M=JRES<0X>AQFBp{b+K%_1)ZsM%@zd zL*P<+B#HVb>~7-gmw4uvsEF%)nC_ak>EwBZamJY+NW!)iw^OxIvx=a>=r8eB#xtb%eJjhYJ`r<(#?o|~kI7F7}y>*|(iN~-<{F*}`Lpm`>j`fJ937gOnAdA@SEK1*KiL*F z&!;ZRK*}YJZFSBXTsPjYuB9N8I-p zXy)#LZIu)SWjW7Z(xwhuRDL;;orBN$didx=ol)ayyg#nFRpg_6D=hg^s+}W8$+Q}1 z>)Z3`GKtV^_~T~-@8D$F%$NhLPh~|m#?izy%cXvl&JUdpo?`0|#>WPYE+`Bu-y#zw z`KdPm_b`xHl#yyE7rg@esX~>`u6Cc3rHFZ7y(4M1qL|6}#f29#8FIa?1FH%EcDUC~ z$44-eE{F4jh{y`Pw{+R5J>%81$pysb+U`3X?ph)9$Kr;!!(4hdwzOmYGv?xvl;^Ws z(1@s07~~Dg$#7FKWtTQ@n#mp>%=G+9QM0ugpWn}!8|6qEXaC#U>-C_vpx@urX}L#8J-(f#TpH_N)K)y5NHxSl)@BS8N^72W!l$V4?xS{8Y3&m!%olh; zrqaPuOcs5$&vkOw`@FrZ*^H%p`m;#H^7$&De|hbd-eT+PjG_cR8kJmGc&3Awh}-U@ zv~r?PULNogk37Mr*U>=v@Uz$bOl_AmWe~Z1gth_v(qiV(x>W3E)VS2((ij31K+u96V_|{gBFNuhz`pBLdd6wDFV|-et z$b5`fHu2s}{upUzL=!qdx>5GE7s_C`x11ElTLJweafT~ZLF%)d{XjSS+%O^XgWoWj zVftPw*>X4?|DFAnC2tQF_xst+|5ihD-e9P<38i;;f0$68l?X@;3+DEp{S5uo)tPw{ zE&o!JXfpQ&z^kvCo#qCsLA3 z!$)%So^R!Ia575$992Nk2=x6#6iHozw-`AmkmQ}S{>Nb1s<+_Hn zTwM5jYul)Nhwy5zJqUZaGRf)VJmZ3Z$k-e^bSDB%zNd^|n(qvD!TIrzl6Nx9I}Qu+0)&+Z+NOYTPTf z)$X;pZ^^c~+cn^$uBn2w?Ss2$=Nj5c(kl_Gc%2^dXktRcw0O5@zCJZPH84+Huei1r zpt=fD`kQK>yL_uL3a78OGe^{Xwo2Nx(CrW!uFjEuNSUx8g*G5J)~+}DO(>ad8V3)=CwB{@IiM^8HKe1c1LT#gv! z7yNe2Ju@t}*tSG3vJZo*AU9LFH zeI4yWZaG^X`UIf!I7q866$rr#(tS_SsG?Fw3Sp}>PqXdn4rw5~APyjTO#lEJ<->5g z@ShS-r2LeB>Sv-f065l&>KvjA(1rNthV}@u@Zh0vr3G7TmDyOknYdGY<3$@+ttz@M{90UJfQ-S5KtP!VrA<$Gy9*fPv2t+m&H0$0Rf6sJtJgV@)0v-~ z>p`r~_+~mV9@?w}f)|)E1qpiN>O~hY2#>i&1A*9pM%PSXMZ{n=Tl5qc_?3>;^6$OL zyFl{UOByEz4P*W!z6PdL^39COBtSE-eEL2YIr5?ncL^)wz&$5wAj`m;zSeLN0pt#X z+W4JrsTkeYV+XW!o;P}z1E+X?=|cgo0DmZTvp`d&HG|AHjkueV~Du$!CQL_qaO2h(~x4|{x*5RC~yDkz+7O-iF45XUw{#&?f| zfyM7{V=m9a@;}S#z2r)0g3Y-9X!z_H&=Jz;M8~)N*o;Xlg%w(nK z!3*#zHd82+tBo)vYYS*^*BxW|p`gr$){fha{jQ$^TRE|nTCJ^Vq)YK7uY1vRAj$Zf z9ZQ+>s=JSuRPPWobr`E}rcQ53nEf7s!sOZh?XfcpGS9w~T!#OVqayMEC(7yLXy_JruPw%TDe^ai+(Y3FXbD5&P=R z3|M>LDZ=WS1;hj@yuJEPtvt!`w8l&~t+S&Y#001omlPT>uV<}~SG1@#AOX%D7ba}% z*>=C|7a2emDx=?7^?%6{9ta-EB4hk8j_siM*u*%FXM=p8&l8vGS&vp4=9&rGdvOX4 z4MV;|+3f$80iQf}FrwJ1}p_2#F#zEf| zZf?}CdwBFW7vyj}1%T#0^w(D*Y9R}{FndJpMDqH>o_UKC#fnWH)JEfI2`k9~uAVhT zqCJtY_p*H`k5N$>Pe&9;%64roAQOBz)o-*lrNyS|FQK z$UTD6cPFDmekZz>3e^i&`T>D8GjgA1YVV?sC%Ig!i@)=cF`ECR-fgX5akdwddOU!S zK~mQ<)E2s~j)i$VQU3UNqZQrZzTEMt{^yDH>cw}CoEvw@9-E7)EHXJ3fE%(9Q!-ad)n24S!l#3@ELUEut7Y;`xvG%_fKwl&i&S z?o;mB)1vc@QHv5yV&HH1Ts$1b&NDacLY*M0jRlJ;A0KL0*OOf~^a{3iuEF<1UU@-H zDHR88nttn}-4&+4ZwlSx8#+WvLsIIk-!Hup@jSF5*cgpJJw0rP(KxX3x=z{jcZ3BG20wfK)RSQ8UfJ*HN*a<+vwx&i4U?Rf z!&v&Ozba^?`^19?D9W61osANk)b{E}uj*15ZcRcpYtEAH2T6rFK~QT@lc*&4 zmBS^G@@F!O?T@rDq%aGQmkM`cuut>uY6an;%+t$(KqW{$$MWzida9+|uZJ20dmJ8V z3W|hl1I$@#3NTo;U3F`EB&+vi8~BPu7k(zSB|{k z1zT9134Em7@I*=Arw`^X=fFv#emE{1nmRjoJ5bFd37lAGCMg>^^G3zZc>V>WTsK?& z+i}8sd8{EvjEVddA*+mm?ua9*ud1Z8mI~6B`BZ)K!t{181%oFcrVnQ71*3W@!No;s{wL#IltNj1gR_rQ zv6AXNpGS%M8JBj{3TD2Evu|C}K57SZ`K96TcC0%$GAD_XwM(v6y>7*?MYKd`w9^r+ zK&5iIl6Fk!i8{EIF-BAR=l2&ozHo3lKHD*b$o()C3n#zJ`;D2g5h)A~owVdi`Dsj>?z!{t6ZfwRiVTBC0&=^|iW%vHVAy4rc@hlxNghLAd`jIs}fyY^P^<%Al}_~{F=~2()v+pZs*Sqd&;j9tUU_ndbwCs@0|iF8YPCyatJ+e?QLTrT=l?Z8U)jIV+~|e`=M< z_NyDjnu{yU6!e>S*m!@#JpE~O7lu_~TT95PBp(gCHW1n@_ECIz-LBp1%oX&ZaCwU+ z`y1P(sxgO#U0C9che!oGtLZ^W{AUO^N|H=JbJktun|6r-#V?Xy;msf-j9j)Q9+Dgb zkbp2@&3KIG>WZG;1lc8QdRvWEp z4yFRM;bZ|mHhS-dR|*X;1Fy+HChypn=xaesWGS^o+Vln!uE{b+CMiauNeQ0vv(YnZ z!C5mPjlV4@&$=L%Ey?Gm5YEX*pWkgBR%|+|=zRVl`gDVkAE}+xH*DUs^ypVC<7^O= z-lutR_S$=5gp`i-a)jH1A$eeRav?jghPXXJai?pCE;$k|l8$+J!8@jY7`i~O8qOU4 ztRTW2><$0zvMfLBYdtWrg{p%hPl^bHoX!3*4z3zw(FNPjnjN zPx~fmCyXZxD155>5dzsB#+qlCxhUUO+CDO+oAwYUh$Y+Wbnr|n_$IDJ9#C$Spx>%& z8eEIDi^NUfoZ)O@5y-lg=DNHSrjD1T-pDN}2=REinIUZ4qHEQA`HjfcRgE?M#b(qq z?Af>^nKiY6xxB5)p4b56KJt=Qwj1K#w0pHJ)bR9KFRhT|v&i+a=%%~3-0 z>f;H9oWNaU-6=3Za~VlspX_0d4zq|sy2_wEoURrev1ZS!fVbf5hlv++3)&EEFH8u{jbF%BG`q$SEBrBuXdWjYjSe z9-ig$vp8qN!rAU4h1J?@>^%nnxRR&}UYFBVX(bLPRDRH(XagI)C}Shg`(bB!UXNy( zf$3n$Y&Hdg0ursnkq~zsL@8@zzM(=nzTj}gY4=3ZXoMr10)qjM7U#}usW~F~RGn|? zTMA}Io$R6~Pmwp2M1B$qb%1&{m!y9YwzTGrOkRGg^R#J71S=i1(t#eiSC_zsV_iv#5X^ZKY4PJMiB@da3Ctu%{ zXj-Fz;9oBoT<%^M+h1c5oQc7&`kgh-;By&(zG&v(0=8%RqdXiZz z?7jMNLdj9gQ-}V6V#9Kzy8Ur$Xd0)RBo;zmCipOL=&mdG_*rUQ?d3~OzYj7f?Aa!vxLs&>@Y54WUq_W*ZaP^B&sY(vl+@JmKGjDc@pWBo zKN<&+rNx(=p)n2Xk9A&C8N-9KL!P%2JwGEmfQ#L#Pg$@Y-na=bZVAYeepTo`$AUD5 z!0kN!r2~mx3Pz{w@`~{8YbGsOrU(CYtDC)JNKz{ zCG^Y1cET6nUhxljD}~IRJ}SiS+PlW$5!>4yy1+P$4m9n)Q+jm_P2VX#%AHCpB;!5B~>BsuK492#0!1wKwxt91u(5D0nzNx$Q_vU%z zi*h+2p4zFQ>A(}E-9$L&YVy$hb*i6^;9pC^;_H0`cuvN>&GRLho`q+ebS$j^fn}nA~KQQnbHtA>hb3zv={B0T$9!u+Q^mW3C3Y?&cSR@x5L)O z;HwFeR$bD*P9}5>PQgp_gY~9N;kBTnH>Vf&d?%aEsaIxN1nPzB2KD?Qu3&+xgi{F5yR6mf%p_4YwD z!yJ!)RYJCB?houi#z3mv8{Twx64~423hY{g^rWtd#3F}Qfz0?M*XIP5OyizdMTR({tc%M;And|n{d2_bK`Q$w?xP-&Z=$guadUuY;W-)uIYA1J=qvM# zH{khBa-sw}B>7K}AYmBm-~n;%NG_SaA_sT;4lwC&PqxZ*}?Hi}LQ(Y{k{Myr}$ z`}c#JIXueuLf<&L7aQDu6CHo%VgFt z6!qGDRbkd?>;7Q)A|cL4jhQDQe#NQ2c>Z_fO)-!N8-+xgOt&)P1t0pQ9{FYsJwCQIc%WWVyx#5 zLAm$hs}EW;3>47F@!+e1yvcW*{&m%mpv5n2peptWS@Fcx{>$h1pD39^1F30}jFG08+q=dIa;vuRAepJh@OSu#CB9rd1ZX74ZibMAoo-U;>e zKT20#bR4QBJC3dKWI~&Q-!A?GiTP>9yC=aIG_45({-y;)i%Yrj>W!~QY?&HpzWg0! z^UR765lL;aG;8lU9RM~nNR+D}%QpdY$5j!lDqJjDRLdWsIFuav`lgW?N0@`pLuU;l zzk)GwqQoc^Fd@S0`B!M-^=Umn;MZ2YusHzBvodt;aMWFHi00GIK5H7{lAZ0wL! zRb~wf)ttqeUFqcF3D5EdQ#n1Hf5Y-)W^+gczXev6F#Ra*Ih|33f3l66PO3XdHJC2h+bR@W~BjbRClf@OqYpQ+M zb2qu++@d>{&C=Rl%1_U1VVXS34{pM;vvP|JTV_!r_vrkSa4=x^r41ti`Jy`|op#Rc zDwRUyM&WY5y2^Xx*S4iR|L&C|o`J)zgHJ24#Ztt+wz`ixHrAilg4cMM(RJ|TiEVI^ zammAz7pJBSecQPlODCnZI{b$M%>t=C!fhSBB3J?1R+G5r8`EnQZkZ=gVHABD%kPdE zFlp*aAC~rcd4=e7=a1Su$@wz+4jp3*&!r{Sw)2uQ%XNR>HqyqH`KM*_o><5dz`S!p zJ9_mNQ701(ZvqtV&fOH2kntQcOeuTd)^#AOYW)ySB|B zCMl_39c;8sE+@p1^?Q^*!~44WY=COHC3QpQDP7G{@4$+)78I=u-JZBMdf@p(f?GuN zNq$>Gdry$8li4%sto&rGpc;)Bg{zM&>f5oDYOccO*J) zKamDrx|FEr#oc4dX=cXw)PrrbK5#XLBHe^KKkC@yQUB#8{*EdzUV(;opcsw#y|Ze^BasSb%N z{e}+Q(-R8*^;aK27P$0!g58}mH!;2Rm;?Zh8G}|vR<>EOEiGZUm4KtrFYweC`2d5z zF4TkwnrC)Z*O#rrg7d3xW{?c4{^h4_M{OP69HW*1IK7km63=kZnlQ*J<=V-#LmU7Ls_! zRmNPT$I%y)8d(RDa=@nVW=%RDaS+PyP&U|Wh357%prx=-L#6ul{seSM zYoNKg+KUp_@pkw!1Mv;yfKMb~eK&b2P>8~9Uv=%T#gK#gQ&sgEVl&MmfI9TOreMd_ ze9=v-bPqM21A%iO`MetL!g~B<7r-XsF)bv7^L|_jb=3>B2iJdBP{U`bKTE+OfK6Oi zI_SyG>d2Ss6uYb}Sg0{XRiJ=VW{V3n>Is|K3ODQG>+b%wOz3LlM&7KdoZ0 zx?x8-wg$Q%y{uOd54_!jA2%c21l@GUKceArZcvm_p&bi?aO2q`tUgtu!#;b?cF*N_ z?umPMohXoX=iol>l9l2 zX3OFG)r%^I36lp`=4@|DeLD`)Pd7wV_V1KQrMGPA#x6v35h9 zzzaIt@+aJfs45}MFrosJ>3Sp%deN%v`hV|-IIbGstv5KPyjwvvk&ZhJN`TlUHtn34 z!K>!`xOl1zq495Tj5ix})Z)eM7<v!5?Kh&J)0s1u4t#I7+*T?O>j_nLn}qQ(mx^2m&+Ivex0aeU>FHA&)kZ7}Du zf#LFMP*+Gecr@b%xfz@bix!;HYRAwA`-VTRohX1=n%NA^a86?Hv(VXlSI57@){<}b zPQ!QAakbc zsfXImSH==h{edp$&sNix%CepmUyQ?~@}Mqzk)BqVcLc`Lw7}MajQ;A;L#?o@+jUd+ zf;Utl$9&F9B2~Z}Uh1L!RL|!B;_a-1;_B8t4Ix1A;0_@KcXvoaa0~7<65NB+XprCp zcXto&ZoxIUH_*64<2{}4oH=vvckZ3KRZ}(lkM3Q&R_|K9e7);^pWn)l1Kl2DIXXm# zjsw!Ds{3oyt{%7Pyzj4FlpdnG>)tXFX!+|TDvkF{smR+fKgiKz-f*svu=P(Q@l^G~ ztc|weo|sRFU41aPA=pKwwJLI^IPQ9cU}Q!4%QN$EGrJer4E2OOq&`Wy%4s8f*H<-< zm$|m&79rQH7bC02vmXqn2fVzic$tZ5cs53O{uBd;L1y18eQ0|!wwDpD-HuG;#JX;q zLwBoHIeO2D;s@c&s=061^4Fl?6$V+4G~Fa#ihmdz3TVDA>nfShx+vrQwDXiVl|-RC z5fy6&JNdqkS5V)`F-c}%COc&GsbEOv2??PBhh|3#7YHcxG$a``fef7t`ajft&UWpLcL?wmb@MB z6}&ENG>%_E)BAV8F976*$v(UgQ{wp(Vue5MY?{Oy(JK}a2`u< zUy7^TwVQ#kpsD;Pm}%KSQNdE=Z5EYQF#K$dwVSniCp)1o5z^~(*!L5PwrNdE$*F|L&xK3jCDcK4Vgkk2CD3@YrbB|?gxMF6RWErOD6N7XOK zPaB3Of5NZQ95qJhpEafoedA&JgIUB=;6k{@V_~wapDJx%*_lw@MJyC3c4sC%WOv$L zE++S8V*X**!gS{pzHer+P+~3Fc*wSN+K)Y04`?0d7ucJ!72Cvuo0w8b?b?RH8mw zu&k^P2x&TnGpCV{CH#sfgE^qF(Q6fC`UN*}YH{1XICnQie)RoQX%Fx9?i}%|M#o99 zV!E5Em7t{`P>SZdX`y-%4YxO#iqr~JsN7{n_uD_GLOw3p@Kxb)=1qo{{JXI`+nb-8MFT z)iRoRJ@twXl8$ZJQ1ol@)yc)U^ydxUwvYdC??81pz+1L97ACeC;e^)}GyS^uoeXP2 zPKn&4SA_D`^ZX9xI6jU^3O%pr74E!ruQe1r6@nUqfN%g~>7TWriG_1U$P^q11|HmVYe(15(2;3_s6x!&7Jw!mF$;!3>*hO7+F8PnY!RAVFN(txG!SGKP=1xY+*N=hWoDwc^j5vGSL*e6P|*R>+P^S&7|BDA(XOH&$^PI{5!(9kDnp+rqFS#6 zhT&AWnyK8U(xfM(k+VG|sKzzerZLyhIFcoI*!d8;K6{y`IE8qmc>hK+k+I zQ-vvuAuaH$&^nPF=`a%&9c!Eg+eekegUX$?>kxh9wYqUhnp zL`#FoHU2_4KH+|PA+c#y*{7{yf0qJT=@fZ%QQorG{lYxeQnUVXX(n&kr)RY@V;l?c z_#q8OXD&2w33pU!{ox^J6)j>3|p6N{NYl#X&F-M^u=#Cm4xz;jK_<{se92<^J6Z> zH}DS?wqv~_{Dk8<3uq`#yRnPG4W`(Z(Ta(GnT5_dzlPim08=tvZ4BUoQgk)_5M^q?p=x{&EU zM(DR)^6oTIk8x?z2nh~DQFiX`V)tlN!*hj{Cn&-QE+O8=uApu%(c0C-an$c=iO?q$ z*HBN;)?;poHlvx>QwP=;ZRHPpv~1(p;n%>L{f}vN>R_0WXI=neT)2il`KvUPbs)l( zV>irA`)S^l6`>1mw~`+@(}n9kw$W=O(v{<}#?$BVD>b@kT2=x&c=+tioH+E$R6L|v z*yfHyv#oyzq^L3iM=K)4T~3#0(ss2hnH__{B#QZ)31%Of3dQnx=8I%MBRdJhN1tZI z#ntc%%34I~I-k(yeinf_YpT`68aBp=Zeeb&w%nBUkb=gD_~!#dh-L+8sW%MAV#+^owZk?+Lb zi@Fh*kC9TXM4}h;X^PzmXsnx}z3VKKGdQrJ=js*I5$v;(MG6vpg-f=@wutA^E^$c+ z{p@(^o=ni-t1Q+yCv_7cbjz8k!}l63@@V+MZv+9nUE`LL3=tSa&KjGTd&k|1`E5;b zhxBV?ZFLm)xDD%Mc%}WXh0I3tyG|`*EGV}MRHXetwqbUQ!*%l95>dAi z{vA8>!obNubH_IwP!9|R%BuM186vlC^(8dFH3ER?K>H^V< zQr?-dU9_Jc=_K|?{Qm78VJ~MplvgZg5Gb>`CeaaW1#UyXXqe?YlwE*@>NU4d&ifsK z8QBUiq-$wPR%?dl;n-I`p6&3w0tU>4&xqlD1Y!^$Jdn;l-zUXjTwqX?W;F$@92?D` zf3s&lEsmJJwT20OXtgvigq|xMEY_x%@Atd2$2EJ2GE|cgj2o5!S`&R+>vsC`cO7^z zB9-$rQi-1ud`;wgI!Wo=sL>YDyzK*lvfyfNnDYV5!v4|$M6X4Wg5+V%(D3jA^S(83 z`J^A|r>}sv7;`8yzH_O*F5w3kPqb|A%Ts6PcC82h&{4AWCxccSTEtDI8xmoD!iju`PPWc05k?!H z?2fwEQVBwwRl?eG{Q<-ae4em#IsD5O27bA1^!a8^ci_mp$Y)8PqKGX1=mUMR0ztV{oZ?@2;9aOc|KAM()2d>U`%y4ow zm=<&&3W0<5#3yC09!FaX`XDvY{fe$-F5`a~&V*L#PichT#P48dTag>@++^X$Lt2`v z$ln}>>b&t!M~pd!%b2OQ?yVJxp6&ajLTN+ws(8eu(r0n)24`S})GPEY-|z*udx;C{ zUfpF;Z)e-N%f}0S(k&}8ON<`TXWm+WKbaWNhe>=2E3jS_^oS)lR#q>w{TDM z@M!Pixfaqx;{!x;Wf0k6C;!8vCuHaGKJjb`^nV;`Z;B{wPse^eEt}XN;_PM))@NAO zhIabt0@~6uNJr&mD7rR~pgz!c?2%u!Dc4t;3!22%x0UekRy_oOu7GW!6aO5LnY6HK zqK^B!SmtM`g)fc!j0o@NV_RW_n6Pxp`aml|`ven)8_8hT@N4=fKX)RxMTlYL+ zzl$XOwUs3GQ{Xgrw>MIsBR!H0eS5gO%Mt2`h4MI+tB)MBwgyt7XN9U9ii~|PVCc4H zp94Ewt`#CF$!=Y*5^`9tz{wXqj!gCNP!2rpc6xQIYI=pvnM5Cytn4Fkrf{zDE4|o) z4NB5gT-V47=rVpSd`5$(X~@_vSGBO{yV-UWu>Q-&G5bu>aOa>uN`sK4pL5kF-c<`e zJ4}F0SU9~1t!nLC^~MLV7*s|>gWxae#a6ddUe@U{G7-=C7;0TMSC^FI=6B<)0e@*& z=&~gv{RSo5l)67%bj(`^Uj)V@ux6^3-L8QQqu8yU` zwz$wij{p;DhxA-I7~OQ9%H=4GeuHe1e+g5_OVity!+r8CQ*AJv6($hWtSCq`q&==y znOigSf-pOG3LI#pPg(9RORa5Hx-?!&n1BcNZl|>Yx$Xm=k{J6va}B(S2&IH{ao26g zl4C`=%yiCB3g;gjIg@;C!RPm+cINxrnnS=s-eHl2U~8dd1LCS2UOnCFC?>Pi$&{ya zk*>RmMgL8{kNeZ-)OKxJL+iXrjdr7!9$5wR7S=v5H&_}?GQfdTcl>}bZ>+nQmHKa* z{YnYV?@1ziFLw7lFtlpIZf8vPlY1}6qbdol#U8C-DqbCZV)D{&>|fc`CUB}ah@tN= z4u&HxytvE#MNIDs6KS*kb-AZUoc_n@wKan5S6S~3wKJL7wB~bYZp2SDCXGx=Rnm*c zsKhFhM6Smd`wfom*@@3<$6}`+Hcg-D_eC)jze?R*BHEO@&tSI~wJ6q|)-l*ex;&Fu zIesZnZNg$V-*UG_iBw zKBhf$X>p;$_<#p-`B|>J<&c$NZol1BbR%MW%fUGbn7RPfWxo8 z(vAECB!tgs@md7*b0IlA;(}%?9sc+w_3vbHb3$5=Bhbgfz+eG{+lvQstwzKEwm=DM z{J=Nu0T3=N=Vd=uO1q z?%YN_FMBCu8^e{}KHTv+2wn`4*>=O6+hGb0C)8OcGV;C8?gBi^seJPFj8lsHX^LSd ziqu2s293V0gUE3(4rfR2zOMu2P`Zwvlahek)1gPy?VOM(e`sB6`0Wn)fD6ofF(>3` z%FN>IHnIK8c$|xw##_mFPRC&Rhtj=+*L>7+_>eJ;ld|PtI_kj2C@0jm8gNr3_JzFUDOc|lnrY|<|f1$ zGJml*rF{3MG_b+*c!HgJHZ)FCIv0b)^LzAM4TELHi=W@`TAxEbE6L z-SK58b!1j+a9Tm|FVCM)*eN7TNrZK{&x#`mc0|UUXS*$IKykjM(Abcq$D4Wd1#llL zLgmGVO9Ukcoa)Mmr4yqIzmQJ;aeVm1Adq&R6g2jO%iuo+3BgopJN7c#tAVER>@ZV= zC)SJryDdy5tv+6?en$Je!}g}2i~8}wd#BOxqbn|-;%Vwc;1O*l$-K~u=zQqGZxPDo zRl7okwBUK`qpAk*zvPFxtxQs3$ z4mjGKw|)g>^PG_CW%Fe6a>U-xj^_Y>zRhy`aV}31^~ChEp6f@>gQqA-*HzKDabLA$ zBC{3XA=6VdmMav(#s7s!xgjr@=I&c+J_NRVdlJt&xcg;Xz2U%5Bv|(ykjK$ zBYt^$Z^V2mt+>S4ij5}{m283W`_{Q#u6)m(=c=0eR&Ksd6@c{z*g7xEUqs*YpQg2X ziJKH?DVMecN3-`{=l>C;V@2%!1Kqr$gSCTgdAxQoHTFKa&&?D6%6E{( z0hs(nKL*^0slwy!eETy;b;yQI3a^!{d>RC zuZvPzS9zK!{V7C2IT%07{4ES=b!B$wLonE(@5t?Cmp;?kmxYm66|^sQ0#?>|JI%y! z3(E`=<@JHl{HLXHnM|Gpm zIm*DfC@eH4PmsQFdA^9?!9INHyAeZ^flrF({tqbPv{?FJdB*zohB6{W=#}K0efMJF zvl!XpVj+L|x(XBz_Er}deeS8xLr?gM|Ag9Zg#s6)#5K${-dKs7mF^FE)W%Tc7|q`!W3_>+PQSPc!=u*R)5~pYmIY0;0K(QvZ%;{GSsmkEds6VZmDV z^{(X=W2Z`rkm}5n^-~$|eJ%X|oLN{#j&c5AME_@LVcGugNSN2Ct?plrv}(e!NX`G2 z@he$JN5?+^Jvs;}jKFG3aW4zb1@GxWMD_8!oReJ|qfL(%ekr-Lk8N(*46JoT%@7z| zE>TC2KfG#pzsx@dqknx0b`!lbdR$8)f~?-Q^9=03$fbYr7PIUIo;Z9? z;eWe?u3@*pk)Y<3w$kKe9H`?YYjE)v; z=Eyni9R7ja{{^a@~i20C|bsvtX4kC6N>KgGM+T}lP+W~X|(X1wY^cy<~)d&{B3PoQac9|{2fAG-H z9&wJiaBv;EGM_%E87)$@i+bfKY%2njJe0xpL2TLJ=ckG zCnLO*r8zxikirY*thdXA@=@&QoI#)eaCjNHk5=O1V7%ZrzlKgH1#1TXV8DA5m-&Mf zW=_nlEg1e|-~Frqisr<1OF7tftIBV|F%=fw0 zJ>8}>NJc>UMQ&v$O3;613c}jEG++j?TS_!(j2Q)!Zx`+3aFhsMn({OskzM+nMwP7* z6Aa@$_R+JYKjc$yCWT;^&qfzpA*U;>-A^DG1(cAxCm}>^kW%+({+$kqz2Dlxd`rJ~ zFSXddEQYrny|j#=B9-x+ohhSqa)Suqd5CBCPIk2Xx1#K*It+-0Yf7YC;d^1VzLV{! z(MqBWN~a*tmN3_9t2)<;H{WO5aD}wV$Ml~@{ zCZ=$4DTgNhvwI??v<>eSzyf8|-T(T&$7s(ML5h?mX%GI_VMf;kVE%Z-=bY>0L6CQ4mVz&ng zuVuK3>Dv2*ts?qQ(t7#Jn4FqhFW|vo^{nG&2W(zbQq(8F(Abn`fESFaE&94)cTDMA zWlOg3`P)y>nXRwyttWQwqiCGF6(p}4jydE)SLV`v-;MfKJ$%`&HBTy<^Xs-W=*z!V zlcJF0*30wra~S6Lbqg&_`63RM_Un`cAu-**(Zii}^z_I8&lgGfUbBD_*wW^2*FNh& zW;7BJ;p@LWhY9DP{SBx2r|hKve=?8%oo)MnnUwww*!ur1tNC(&O?cPD|HWkR?<8N$ z(C~2Ub0ENY#SH|RBTC}?=gi~NLJS%FCq5ep_;+-$Wg`rnOF~K-8TSx5FfhQ2{B7^Q z1N%JPfW3=4 z?uAaQJq=1JuZBZ?Y4VxBk*evvr&3vktNDh3=ZV|R#&m_#7yLfbAN7`rkeNY;S_|WC zAs#|;h&sBu9>YsKW9D~i?^eDY;tw@^j*a%0c+)+AkJunR^rD6+_LQ8eslrWF+pPE4y@_O_90Oh z#B;lyPzQ=<5W4y0@){m2X|9KKqxErwb1UtC{2A;mpL+4VaRvs;9mZQqQ7-Gq@}I7z zqwMAO^{iL{Fow<~_+JLS&HD*fWl3nX85c8MoZ$2y^r@o}D=lXk= zUhZnynfG0)e}amJ9oM#4#YyCOjeu4aW2NXcG9t|4d7IVII!!!VB=I3w_8maY*EX1Co67CU;b4xAwS|-Yi5d;MQJ;BrfKgSeI1fTS8t|HOF&ES zrX=|jnS2i?k%X9{EeOt5Wq3!0`u5ViXRb-l5Tn_k1cM%9b@1w2zaC=0OD*pg-B)}F z=zvA39oNC9i;V?K9bKi|Rqz%JMJ0Ck%L33o9A95#LRmPXCD?V(hk00>){9L==kHX$mxh$ndk3NQ-XKZa5Cxrh6qGGx+4^Y z{9%7=3Q1ill*a70yNYf^jASJ z)r(+&BD|63)*rcthZ zis#LEU1S-_7|qog9rqjT(x$PN)K^xI>)xzI4j6wF*&JQ6+$w>eUG)5XNa(ei7O%-l z1s?xu;JMG7LBUnM0bYU$)t{0XZ`F8ao@JHvXv2SUHCC75&J*_Nu@&ms1CDRPg}fS^ zBrh)S@#x_KKoraGf!uj-)kjpy_=n^m-Xj+x!UydaA-z@}P4J|sJC{@~IUl_Gn8@R3!nx>cn3 z(4f#vy0DMjRtD&etqX?A3=9ms^J*p(t~fVOtS_lrSDU_?clhyvux+~0>I zMw9XQ`NTHRVoXu(Y-z;qhO!FeuTMWy_wl3+RU->|l{&`dB?Ub4^VqOR*JPQ8g&(i7 z9wyD9p3Q=GzwbdPjY_y@J|$3A-UVIKfUHdTBiZ#uBo9nvo$e-m5F2|u-z}Hld4=oA z-#?!6q`RDGqtAmP^V)WU({zVHLs9KT^xH8{UJ72vJ--9DPINN&?MnXssGaY`^M`1X z!doc9Rv&)zPznCdgfeAyoE}rJ3TeEW^0aZ6KoLGz!Z?u+>XzPU=<5bdut^j>M z>PpqpmXPXWfoM~n>Mis-DIbp}-_?ZGhcadWP4+B{fs??(-&o!5?Pkw#WM#3^?t*C6 zp}3V0W^wR2?cRTNr*+DScYD2L<4jblIcHm(xkQ)s!=OZTgC`wgO%&6k$jo z>*QGHI(LwIQl6!;Y;$GtTi5O=kl9@&vCx;kO!?sX_CBUFHkF`FtSv8!9nV$T2-|G+ z_fH~eUJY@=snV793p=*U`}~u^vfkjr10@*KTr6vE$s>1 zeRe6`(3w~Fb5f;8IN%<7eA((&{yMjw|0iiruaT<;PiVBpZ`8L|M?Y>Y@Rd506||Bs zNPpu1>2=;)Xu41x%=og-)o>15Aqn;CGUrE4A}FF}HbocEET!lqQcCS&Iim zqDJQv31nu^y(^O~O8XXXJ%-F|ZaBl*N2-&Q+omjF+c({3%<6Wq+aLau3?S*v!YQ$!+77czG) zitP^?Avdh|#AWOC4WkC~{xk{vA{)$aY^PO@SRRqwTk$sx2Kbmks0V#=FE^g)(`@*N zM$K~ldmxPsnbs;3=U*0w>1*r1vCiLoIa^nvX9<#28qdvMI01;>+Hh&tjuPc-T2Sc} zeb(g?bV~nys`jC1TsWDY26Qvh9}J$@dXwHK!G}{J6M65!7X0#1{z^FbdapvB)X{1yERJGr@9D@euBq66+!(whg5tdYi3rBgntPu33i@6^;k zu$k~=Tz7yjM=cl5eTbIlep8T4i|8i(vhtMDRCU2ths1v5ru@^3KYj1$Oj%6SGlL*I z7%pn%XGs59|6DJ)l?Q^wQp?C!4JT}6z~La*<2`~ruH@u~NAF$FQv}{$-hLL*WaZxP z{IAr5_?f8gjnJB%>VDT?sMVxLMB43T$a;QjM9FWF5NV2#kfB9S9oBrBTx3qMgj?CR zFI&|%Gy$U(UL9-xe4)&>PQ%17OqfrYGbk*kD?r&)qx~Gim_3vtzKe=%Xr^s|WOhBg zmn-eFs+#%Jw7QV|B?szMxwB{PSgrO$x6O|KfmFff&_%EW)%8VZNW)z5#B1K27 z(tIhyqjp?rypzLK{18|r-3w1=#>R)Ka}@IU$A+;>9l=VqE^Abh>qolZbKvJbjCpVC z8l+wziUUjLgl=AUK5W!kl{y`{k1A%_xl%l3R~CK}w;%g;cVgM_Lba~e z8xI+x?MD%5clf~0G8Ltj_c=TGz6uK`r|IR2mlK3fNM7B&OzD{?qa8br6Sv=;;@!22mbvo@Pdhd0Jp{aHH(ni+LpK02HuAMYWxI-I{? zvw3KDbw&#}{_+hcWV7*{_;W7CH#cx@YYE~;V-zk){~Q=%JU=ED4)XBkc$bx0hU%f&PPRDyP*xepAwvqBj!T zIpUQgNyTXt7gP~By0WP&v@F4rnz~E-$PDHT&*|9DFov!kUwQk@r&qDY%bErzj<_TB zv40cI<~GeLLtw1Xt;~`hrN6Fi1F8eR0_z#VlrZeEu&}e=l zLSSr=HS1@c%?I+;JH{KZ8vA*X)WTG;0V>H4O)E3YM^{Z77E_CL&?vykU**(mRe4v} z4YS}wjzbF&62xckaoHQWNz~#&2@OidvT9oRxkrZIwNRJ&$ujQHX$w7z9BD7)oYv(d zJ|&?ZTn{m19pjf+l1zQ6=^IC5jwPUP@E z)|o-^0b?drYFti^>J^)z^Hu<NId9I~wL2MZD4!iv#rUb3$E=HT>+)jSs&rjRAj@c7D|@)U5Tr6 z_*VfDFuyGXb%$UpQZfR>{2?H&9UQWBr{hGq>H7Js{GjQWQoJYKChSGW1wE-BKfiWw4;yPiZNw5y?$Q1?yYjZm!ZjV+ZXwHs~Hlb3DPf1e$TZYU+WsC z*MqM2wIT+zRQTor`bRs?j58#jrI~(#Z*ILl%;lB;_{e8SCXrtBQElY4TN2LadMCCu z_P0{aU|;N|413GX!VCFlSRn$)_b7w%w+>g^pu!=j} zUT8~v`^HM;E$tgmy%)-UKyQj57Yf20uy*HhZzL#BzEcpksd{1gA-dNP5}LECE>Y}} zr%0X<-LvX{sldM067&8}Vsmw1$pwK$b1-Mow_M|_l)|rz_psFVA3MFPB7sK&=HVaA6 zO6t`3O!*^fQxQ{~{VXk#YX2H|*yC!IG34l18OF1~!O6y$r$N`v;MV4;jXaO1eqN+Q`1kjDGTr)c+7$1xMYX60Uz zOXVRk6@Tn{u{3(gk3#1-HWu%@pK{vues}~}Z50d-l!NK5Cqf1Rj3zjmw>~BlJrtSy zA4iaR!k}Y^LjGwjXS40yBbNT^)ldb(R=8DaPszI{TSe7-f%h*PS5<;$bjCq>>MYu; zHek)m151np92FhJizmRt;>A9;a1wcU{6lYO3uec&InO$u#d8AvH=y-*PnVam?^D;=uefhpvR~VVkI3GK5XBN~--1aSaUX8T zZk~08#Ah-VGRJ=@mu!nd3b=TY-n(gMMY@uSb}v`mvVqkv0#*@`IvPB3juGl)N< z5|S>qf{ChGfRmPwIX?M&Tco`at_qR?K%f6bTqtsP_$+)}{Nt*FHjD-Y*lfr|sF#T} zFw1K~)v8^?*h}WE4!YOmH>El59L=v}LpZ%FpAPaiL6UJp5(Rh!`K<7~H<=qWrSZ;B ziSNH#$T&&8lp4$n(lQ%}{i&h@Z06)2(k^t?#1V6{@@tfX${1AQI;fn1D{>Gw)e%+&sVC9PP5zGVBDfoZMt&dT0It zhl6^$8i|n__(CYw4x%?0RdO1TvCwE9(+J>t;$0VU8?)bE<+mZ2_I0FFowq)DViGyz zV?wi*L5x}3$&2q5o0B(aVMLiJR;y`PE(y6}OWO8Q8N~1q|E-=h_3I*J>*ZXLlyPRl z_u6pt4q3vR>$N*QE%1uW^f#|zL57hzk8c?&QZ&C&$Iq;O`9wc^yg=$|HS5s za%W5wfhtd5`<1EP9h+CwL&Ted&P`N)=>eoICB7(V8-QIIO@mvfzh&A>TsmoXwP8JQ z%B)7J7TJvsM}2wKrD-SnEXyNaTb3KoYO~^i+o)gRsfIfhM77UX%P2rryj74;+6ryA zbA?B53hRS^j``Sfvg&?j;9?y1$syt#)f&kVhI>L zZsqITYewkX2z;gQqHO|v=>focIc$HgTSX#YPug?^UsQ0ydY}pPyDKUUz2(rA1Fu<- zk%wqHBoPU7Wz+0B}Wk;CU+EgG>kL*6P0 zlMZxksYpRI^yTrN)iWQv>u$vggAm3E&AhDqI1OeRsX8{v?BK&)$(r%E3bg)&BIT0`m&pu+%2N zA!}le)DL<_29vqvSdHHE|CE!h0!<=%;F20;<1nEj~XDIN! z`kfdJ#}FKmv|`5VvyPSlZNjKnvgyJ8+NpOS(SjT?+rp&b>l3q3M3EFBRbZI)15=yN>o-y7yYJ1vqh1({ z_a($jbc2W6T^AGaCd8TO1fuD8Ozz*s)r-48ZE95;=#plMVAk*9IynF49a%#u<$^}9 zV_))GR;9wKmUOk6_`nna{&rKZQ+xb>80}b5v&P;?xrjR04S$}qpR_?$+ktEaxZ0#~ zHpxAu&!&hE^t|orJ7RGKq_j7ox7u zOT#1g0^RCw*B_OspW z9*Bvdo?Q~%GiLH8)1-4>RdDO1$&ukO6UEL2Gj<6KAtz}Q;nnh?DhGsw!t&GYDf|P> zJqJ&=>M1=5Y26nAOQx=U+#CV*S&Qm}{s;W7J8`8Iad<4|HkuSo5<)ml$`gAXaO(UW zQ?$HQ%9bLbmLOD~P?F(#3pVtvKUj zMZ9{djs>xqFQDyFG$-zR2aFY&Ps`J=|2pTRIYacrrPG2^`9%e>t-jB{S@n6VOre8;T|j(C-8YtwLZ{zSNA0;#~l84?Po#<1JZ-5bdBF{S9>0 zSL~RnynX#B)e`fOI)ZVGI&lUXY(MNZqhJ2Awc@5`snyU;eEhh_AWKUsEC zj=T0uZ47Xf;_uB}l;3r>`MG!1do}|$B=>cRx7{0P=3{#d73N9qa~MVytQLfjb6FgJ zx+SSty3VhjcMzC=Zb1p0UE10vtm%+YCMqf+e(yA8~ z%q^NQO~vdQUPI{6{=IU-Pr#fdS;q)H93y5<%|#%?!v`2HWz|TZ0c3c&A;9`S8Jo}S-RTkHL@6A#=6foVz)e(v~d9h0Qk0}RZrP?I(tplefJB@ z%q;L%Grv~Zb)9^3sPDV$x5q6v0yw9TnujKad*IQ1w*BAOCmnFGRV!fZ(cqR(9ovc?PyLaKjF8C67`pE-f^Knn(zV{qXDdYhc%Mz(H zkd%R%q$2Ej)l0~vMBIJy1y0^_1D`apBzNOpQ&~VdX^+TGALH{SrLB$=D!8Z-*#H5~ z?hacFk^d2ar{Go>T{w*lQ|)3pgpDZGJrJY6a>>7l3Z`mRP|n6z6y+Bx`PSqsO0~)KR}{hPFtmbdwM2cOMgbM zD^0)Ss?w#uPtNxg(hAePLJ$!0X9;x`LnnRl4w}hahSi&~-8N8rKEXLP3F9b_xjn1! zq>39*ymcRxQd{#3n?ArV**Xi2W3t!K5TGeU0F8XE~0$o)X0w4axS+;zej@HQ&{%1a$O5PpibUz<&ZG;O-ZVQxBHN zU9-kMhAA_;!WlSUEUN?7OHp}dNHwcb@vhUAcNlPz&ce)5T&GP-m&9}4lzc0tX>BfW=A_D8iy z^-WmYU^}aEnPW)eqq7Zp+At}~r7$IBS`Y;0Q9yOCTz`)(%w*1tWTU{$H|M<^>?{q! z^J-f6=pt=^YDmP*;R?F{x{c?#DVpJm2`dj%iY1 zCy}}thd8tD=$#r9B4K`XIFu!H|H)_>eBW%cA>CHVC00rKMY05l6!x~WKuZ3>O5MGD%qSRyydM{dJ-i4d4;r(jD>JCE# zr|FYC$@0eV=YbjXA7geQRLEzOgxbE(bG+#alM*mzwxX1yMosP2eK)FKFUIeTKL}kD!^*4IP$McjqAg3pzKrLg&d$%rJvd`R|47axY|JC$TR#}E z&g^Lg6L%Bo$ah2Q)i9qq^`EYbT2GW*a$6>;PoFQ?d0pwigr3f|3@(Edptuk~=pR*y zO*cRBJw1@_sK=<*culx-T77=+kdF|8h(vC|^KJRmF3NX=(!Fz4*NO%2tT%FpwO z&F}jno52TZ8D+@?sH!n>bKH@amx*f~0Y*iSnls(r8KPUAP9UYL<#NSY54RbmVz;ck zoRC0$0^pbiHX%G#r1icnKbUDuE9shlq%6E)C!yRp4J~Wk9L8`bK~eB4@o6l!JUeVh zHpAt#T^pbBeRd@E*r(9-xi?m;(sq{$`BV1kdD@B?IhdzItwc)#xEf{iRWuq#L!JVE zp`dBfOk;9!+gW}zMif|WL?z0TyWC?)Ne%)Q3B}!Ty^(As$Ttb&FzVG1-wla--Pfyr zlj9}&4B25WQ~ZvMG{CkpF=ZM1la_fQyPcIV8;il~FBkjP9OP-)X)w6@&SD7G3u>6& zS{uVlZIfAqMu-SLKS|tCon2HnTsGg;#&xsCUzyIP@X{nL5xk`%YQ%n+9YjL{sk>h> z6=R}uua92%_SaLz+;$@vVi6LZquTCR+Z{Kcfddh8E0AUn^`x40HRuDX44SwN;ihE-}ld?DH)kffpt)AgDT> zy!2fEDvZAK>11KYXwWknTeLSkPCzr6S;vTh1_ zL^>isMQvu7`g`R`S-yt2qhw99Smz%732e(72KyBbCvW0}M8&-)45-d;s2=sI4FZK{ zbd?1AMY2X6O`qcwzolr+T8Ez08~*rlytFVeNNi4Pnv`0Q*a*;5v3D(TTsNAXuTAj0 z4F@mB-DEv-W`buAB!tZP35g;`NeWdoFZL_ix6zS<%o?2lzp-43DT87CL{-B|X=afG zv3;}UaPE6DqA()m+qa!*eyLcWd!m{XbHA|J=Vr7^-_lj0&Q1S_wU5I>DeOX!l#{=8 z`rMRow``Q!#*Em`Hj+OtI9i}E2kVj)&4$TG9iM?^`h;#cf6JPdtM8Fx=_IgdWA?>e z?8HT7X-|OBPBJ<*R z($qE5l^@;Js6A>*qmQ(0Wdot5c^;XpHFXY0%D+tz<^B}M6k=LrdNpc3LGJUFcLC1~ zu5|E_JIqAC$3s}JQ)cA^IytJ#a#hA9`+ebgzEzT7y8J^FoMGs&;hV%?`)V3ueAn~v zH?U5xhr6FLUgcrs|8lreyRqs{I;h>W%Zp0{!nU3J1?vgUNPxf1RTsW|imQf$7MpXkUTw%t?9MoG*#F$+z`WJ4z$S zxd{-*X|frYK@0uyCy>MT_&uKjA&4{fgq>aiYPH%>J67#s?tN1dtCtvj`~FBkW^)~j z&TO0#L+1XX`n}$H4j+lf)P3Vsxy<>Nwm?TrR61l|UW!yzPi09-Y+h$r>g!%xvM`y^#_J%DeU!uIjzAPp_Om_CqNPMQeHcsdgr`F) z-w>1y-+iP?Soi9uM<7@!Z%*6AKBoiEQU1z$w_5MuLDkSUrk9^0xM2{_VPQ|j4QAhv zvMtNxb)HSh?~y~ctWp*~zR(VVu4!>125$d5;Yy*zomgINcOrgcd77kap6xRpG^f6= z75QnorNb?PJ?>&`R}1Xz@5GoFozWigma<h^+iqx{ zFXq?X9|KdJ5lEqd#i5F1-m6UvyZb8};6Nz(K>52lsmHboIq~5vRLs+a);189Io2V2 z@8r=Ww;v-tz<8E&3ktG@;viC%YqR2&c_mH5C_u1pU%R+S%m~Xot+gCf%lI06Wo*TG zBXG{N)(~YbdVOwOs!XxI?lww>2W+avgp97?2bTbs<8l-=1EES~KzWk?sk5$Wn}b|W z2`;X!;wN%0r)qB@-Q-u^m7d$0{ir0~vquvjAnl12qcl#5@<31{9>bE-MVSVE2ZZS1 zG0yZta&-Asme&sSp+F&Q5GP&Cq`qlaZK|=ocIUgI7&|LQeJ(_x4_jLddckn2ML_Mf z--T;>Vos8hy)Xql=TugscN8vjR0OhHmyN}feCCcs^@Pmm2k0=9q46PjQvbk5)U4P% zYx9%_XdWQWO8Yv&$ra6=vpbb2#q{&xhftz@P4z6~vaAm2x0e#4Zh^6aS`rzqnI5%)i~ydHfFMw zMcigbaH#_MCoh^XTu6%UIOJSSdclbj%dW#K4wbfH79PXFQsPgRpPFXBJ5|Gi-JHit z34g%DxSGO?vx`E`{3{ZUGoDBD*q1uq!urD z?d51V5LTqLpeu>*^mR4udb|DScP~Us&0R!uNqg3WpH>YOXS*SA^61a2^j|!Tx)ycY zF!DIsnoK!{KI#*zk~rO6`1L@uh-gb!^4{&wDhL_R7Rhm}``(Y~U2a7(UGApaF1+dR zxIX+$&+zkSd2)4=R{9O_pMO4Nwci>@Gv~Ah4HRelXo7XLd2#m+OXKd^#=$OrZB$Cx zW+s*3u1ylJlp}MIR*6E$jCyR?!VajnprS_U#vMAzbO@USx{!$NQyYEaqY)^)n)vj@ zJj%BTigucc=~t9dyx%m|h zdUK=$9OA(J(a5DoDVCc+;{=iqYA|oJ(STE!gY2RfRis#+2SIz7C;F*aC2w@YKy63u zj*X0V@EdtAIz4*N>k#eAS+L0^-^$7f-VOJ2uTg6Z!imMi5vaVXP%9aObcH>f6uk9W zb{3X8wV)cE!eR3@ehmjf^$*@D*}Tf-_`pFVg45rpJ&hA`vtY&{&o&&!NA2`4p$LFAz~X(zE zuYgGB#aM$5PqSZ9f~2N$;B9;K9UR++N%`krw&KeUc1F953b&sT9QgyGk7%WND);MQ zdV0Js{6)hwi+m)F4DsL{WrgWvhN%ro>1jAq0(riY%|)XO$=w`2f_6p_c77>x-#uXX zMSHxNGjBoI`zEo^lT{%kCyLm@wQEp$wAQTSsGrJ?I|j{#;P^g?fivQy3!evh&1}mD zFL@yu|H#&xCdmAZ5# z^z_SB2mge!Hhi*?X^VK*^E64PMv8`Kw`JJ>x{&dAv}g4NJjbvY>jBNiBM;s)T@aVh zi%MZej2sCpvCQXVJzvgG^r92hQp+SkHo`bD>&S|Oz5xU5JPPbhsn79N>jh4&oTuHe z-6(s%4X;1oLy%0@LbO`)fvF&RdJu#*qJ>#-U1Td?<3sNky$Rhm){#*5nE}>Z>-A;P ze6rwSV)Tdgi?>N<`}>5vKhG>SitOgOsh=(iKw!({Gu#iIUH7ns|25YxOj5dPB2s0_ z0@2llBZxS6YDsPMb~IaOu_N9`zbJP6ZN|Px@9p8>ODnPF30r!br4z#ROMbt`OcC zFP{1}H~Z_@=5PuBLVI1k0fn7AL;VI&0yti}J;xvY%Kc2&W;mPBmI4L*991xDOmZ0< z;%=jrfAI>I`vFOtc~#fdSf$`xk5^8vv}~kydwB2Uo_*s)n}|?isIZIO-40eBIqz-s z^MV8L<5;d2Ak7%K@BB*Yb?G*epRheijGZ8RSsS#dXZK}2c;x7>rau`3*$>BrT1qb7 z^*8O^kG*)3K`nQ?T%_Nc+HK}3vuGsq*(^vbG{uJXj{DdH9-;GJrX%PJoqfGMs}@i# zj4ETv$?~*Ui!ce73Z9jcYJ!M2bgQ5+mr^sC=t~##nN{beo1etI99I)wF|oTu%t73K zaXL8gtx0W>E(VBBZ?^)PbMiiGe46BwC(jyCau#yH6k$(j_Bt-Gvt8XW8a61@t|H1i zyeadv-68eR%B|L_YAY$Sz0#?l^rJIpY~_m_O)Ehp{>m_k^mzW7B;@1h_^N)s+o0Q( zpOL%Cr^;3i@%$fe8A_2LI`~8i#2YJ0VX%CBO)UluWkhvZCa0(9R<+7Oft{RO`$MeK z*s{y4*L=PEVOjS!a`YLgxAN4qOA{s-!AA*6e9kHh3x^?7wo7qk#OTWoBgjeBZ(>c4 ztOU-jFx$~!23QI((O+OKMApm_i1PUNVwAhk6q`+iuv%_{>T&&H*+6*$cuAzHnz6p? ztVE8!upZYZa-xAegFDj#0qIba*`}WMu{m*yMHe8C2>XNKQbj$Psce(}x9jgK?^fo5 z-ABuw$!oO_7S;a#zmi*hny)h~4uO2;GQ&l__1{oKllw}x zcCx~-HeY+6HifB?hZE{NvT3rj{}lBQKibe@lZ1Mc40$`q|~F zuBPW%n47Ej`od(e1@6EmzBOI)fZ32-VNp#lxtOQpJcdSaGB{=c4R}N`pNC@hOwO)6 zjVKY9SvzIlCEbS?|AH42a`j&QXrEhGL|=(FS!k5(%iLMIa(AC1qwP5qEIlwg;A>C)3S&by}S7zdUYgtA9+ z4OWD@L*qd>UUK>Js&*6apV^#CjsAr_uKFq~t#Lg|Fh4g>F9~ zzDLJEKh0xS4B<)YPdfp9x1M*`E{4iGfEZc7+4w1*6u5efNnob@;ulU6n2=e@`Af zPp1E(OoKPJt-?(7B{k~2Wfnedo;}xb$O_RElZs!?QwBB%3Y7j$Gya4Z( zJ*s(lO)hwTg-Of(_o^^DI!T3&0es%4R2++PGRBIjajIohB{owQ-Rp7jSJT8!1GEFM ziVYvIEo&f4KPW%gCct>a4VOl*e+*w)B(gJ&Z+$-^&;ZI&ikhdJWdbtyhDC64X?=E4 zsADe@kWcD)7@ktrnfqUD4B&33YB`##x))G$7G`JD{st(4%}=-yPS9Ll;=Z{3izpiX>6$e9%Uv3iV0Y}#UxV>7p3VbFnSl`e{ z5AG+6#V^zjGqlhM#>=}m-up}+*)_16Ami|B9^3B% zJx+{bZ*BQUeINBPj3Ck}dlr*)?{7g9H68UCEeI+2Gg#J`~;E17e;z|RLy+4>V- zI^nwZ42sP^z!TH(uruu*rY5X@lUnUfb&bI6OBj7txm2 zsSZWMv}<6GigaGl;#wG#zH3uO;*F}P&gG+clRJ+Z|NG!=tO6% z3W-vx>sy2dMZPz|{5ToNN|y&&TwB*hUpVd>!e|qzd~$1p3SB>Ju?Kzlx@$q&;I>X# zj|+z;jAT{ zmt=$UPOz0@9^BoWcL}{ex5BU?`%qk$j`IT36pgh!XYjBE8S`l!u`+Fe$|t{#%%Mqq zD}kRZf}eTTWbhT8zez>>m>fHaGI729Jw1GI_X=H+UrV}o7OB9>$=~PA zSe9At#s`WjyS5;wT{LA~f=^Q8J%@7{*z&lwPRdxn7=J2wCI0fU?UQ6P>YB-m^|r@S zQOCPah6nuFJ_+(_6Ab72K5TkOyETH}g&%-3n^&MtS?Og|Sf75N-%R*VezuVYI3c8Q z*DQ*&J^lV(b=H%h%8$e=61s$bn4-wHg3B$JpJXgneAEe;W4 znoLW}`HrxoA*42g#a8n$uhMG2T@-iaK!+iI2w*o6OiDQzZCjd(u`SuLAPp~L?xE_% z7;Kc`^&a2_e;s=rkQQ5ys`BHi0T3@ci+WIT_nVU^68;U-h)|uGpnLtVfN=s>1 zIx;E(eYl>v9aLI<#dDx+pb&rEzG1nkUQN_LD5C-rrKLdRKQFD~9*9`hh>ocdr`>q5 zlGl4WJo~1hCvD5@fb-v7yVW#BQYGvs!nBsU+KJ)~`%3SMUhi>z#vq&|VQa4i3k%U) z&jZj{ovDuPS13S%g^x;bJ!Qu7kjv)8>XRE-*ZH899{Us>u`e~jrE~8Q)5B6?(p8}z zH!N*Fk?#=sQ>n~AqwGTt86VfaR1^|z-Aw)3x~jqc^7n)4bzv!kn$z~aaGxA9PZw8} zruKN-_5;$7Ta>U<^BJmFvLHL`vx#X;5~Ne?A5J<(eBj+vv!X>@vw5sH?W?PA-MvgC z^V(J7X)|caJ;Z47yxiEdX4>aZLu`e~IIDnQ!gt|d)An}H7c>7$|L!MGe%c5hq1Ob& z2a?Y6rK5qpA^Fg0r(qrb_>p;TjzCkL_jhmvhEC(l7=xz3!+lk`)IvoJ=F9AH!;-X^ z75QyV`0m`)Xx1C17%_Vt*z|jY5v766hDg{L-3Jt#$jaRSqO~;q$)wK)R$7bv{lLE+ z984KxE7Dvxe=2y9Dmj5y@9S7OzSCXtHZ) zQ4O61rXy_`jqR*cRQC4(Xgh0J>RnC+-53r7;xsXrhG6lD+RCT+)CIDYqq>HNP3vU=!_~QSLQ9MVN)^Y@B7Xa(;bxAHGU2cMv^|! z18|9r(Bb*pED|<7zP26Iw23;zYrn|jdyTRPTaw?|N-&;uqm)wK9cf#mc2xCdyM>;# z7j@Gyd-Kf+;MA9EO1yZwq=b2oU7wmDeIPTesa0sbkUxl+9Ydw_w(f$ePoh;wLj@ZdrSJ8)TO8PPuN8wKC>1 zhnHH-ne3=Fd+I&HQ<(TRBj+No&vkzCcZdkFhc*tQJY`Y$884n=dib3N53!mvn@LJX zpX1aCxoOfb*OkQ#U*(Y*_Fktg?5WnWB0*ugz+onOcyIo(EcG^b_>J_K(G;vmsYX&D zs44q9NjW0xPThBXtFnZniGL>UP5pXj-g7sL81xMm-@t%lauWug5e@0P_lz*(E4bxS zp-JWIr3zx~bgl4RQ0;v4(4YLPOywP(kC9?i69|-DOG4~NrJM8hw7wi6HP|XO4X+5@CxL9e)C{ZhB^wVB4S~Dos z%O+$KkKAbJG)j@`q;@%y@6aM%g1%U%&8YTVanPV;sut0`)G>vFO8S5u-p^GBSSA~F z1Z35bp*Xo&j-rmd80UE6N+NXW^|P&5jxOz$Iz8l)P(eY>et_wJt* z^bbtsbv8ck^YQU57FdR`KJDqi>b`}&tdgv87StN3!qPXS+!cnjlFpYU`6v8U;Qmcc z9ZmsVu-iq~`mgoyR*xR2nPuKwoOg-|sGBkvhBO9-z2Q4{6N;*v%C+Q5;n#v-Xou}i zTLi`g+}|wQO3BF?FVz@0qm`DH67%8Zz}inYiHXH`Je+-Da3mBO@q5n0-eaMiG{R>l z`29(vE_KVAaKPMB_{d0>;?|K~1AQ2?60v!6?NWo;b9?qu1}+X`VpAYEfS zb;?;bwVIMOttGVH?YGYf|Md7;toS9lfNEQUd3Af1zBul5nzQNlIL=lSf)k^ypp!^8 zcii5dat0q!e9>_+8`IV-Ix7M29B9p~Q@`E~O-=2n$goPhj4n6jI^M4{M1CUuD!ey( zGzpe4*1}H@?3n+hs1#LwP-M(M>WWAgA%S9Y`hr(uR)-Wg>6q~h;U5Yo-1g}}ar~*~ zd7YB%72xdt+FnASMZni|$^~by2t!BxF?4M_FLQ8vk}PI42i8{bo{In>RZ0iwIzCJX z{0h#KDfl_!tE>lqw|Xzf;uv93XKUr2a8}Gqpo>!=!^7a%)tOJJ9#{oMUQ2PdO+mA% ztgb`hb-K_PB{k<8@1Q+5f%$L>bW?*TGe&#$Pc92Fx&yR)9gtqf&hxD18!MVYf^9Ea z5Wna5Jxrwr7Q5hz?CiLduZl3bDztYnb`aSe>$6VQEX#8i+NyGt6p7-x{p!J`q=EcJ zwWdEs<42pAPj-xhM1a&j={Yhu=@=_l6@ENjZjX8R7WdM5wCR+uzIue%?Sf2^7AzPY zWj$!6n@4w+aTsRs(`{F?;?djL;iq5h{;1_W1P$4dHjR0of3em`zGZFT?%pz$Peh=R z6pWR=$DjM2&#j1SN^@XWAo5XDyp=&pi)^ESawf=K4R4{_l%=cKW9zH2z*X0^otl{8 zn@s72=D_p0@@(M01UL~H17gI2#@5Q7Wdba*R<*S*Y8_>Qbpe(kMX`-o%7xs^vzC<4_}FTRf6>M_U(q{r>*HCIF4>P#A8W97-UX=u(m>2FF! z;jV1LwGQyG+)2iL@*_gCs|{B~=c7_*`1@Nz%iQ&CI21ZH2ER3%`F;-D#dNs>FwBGt zZLVj!q~2FbP(Ih?dKcUA3NYsYCqI>I%}tceS-;+7VyhPs@PZ?3JVVa*&Nus^j@JNW z&hcKNy6$G@EpYtOtqtIzotB@m$l!O6>+*(JNb03F6&hruqGK~8bgr}z$x?_NZE0tU z9S2Z4sVo@k^EKH2zxB84X_iMuMC>Xy17z_iTMq0Z zyM;k1oeC7;fP9h9N;fa}b}`F8!}~AX5D49thwXuo9)9)+T@p`(eJw%nIWjXlV>-I) zD-Z|1dOTyNm*oE9MxNo`E+>1-4|P0io$cX3A@&xP6qGvt(v=;AYySm}(b8^A61gZu z9&fUnfV?w$s^FEbH0oYib=lp7Xkz0m(fzM$-W0$f z7sjj%J$bv-LYWacF^8!X`YMIHK4Pi8k#5TYa@u~@0ywoB^2Lc9cgG0<#N+;)lzBwm zM5EUn#{B#|NNMY}vZe_lIcP~MohryIURnbz|9ysIAVG%A4bCRQB^idfAUi{SppeVp zCzHFtfx^{KM98$;VbClB5D?g-Y4FuXM?QIo|&ZE3mSO}K{Q`Fj9Uy{`7aKJ$yh*4;M z*J~HScXhm)-g;mD{+H?{^ozPl;r>x)hJE|v%yru*gj>?S%%ShH#0g@d=WP$jIF-7E znT7e-BM)y7rkcM)HmHRlNPnv9!|p^N$Zi_L@>qE{~=n4<@mu@G(cB=g9@a8O{>8PdsYt%IV zz$mt)sd^=WYH%jOHz8m%?)lG^mlx*sNXY2@Kg-kBJL!NA!({`s2Qf!-j!U=EB`s~%4h#{5dxIi~upQUG_q?S5F(M{O z{n@1Sa6@JxupUoy|R zwF>NvvTv}pKuQBJbMocMMku^RJ`_Dm81nCBeTm1MeW$STog&Mai0b?=f(8K40)zC zAmqc3E^wtN=aEApD42D9v&AmVqqhBk41s%RUo3axqXk zHy{O)YG*mdfb&TE$=OO}I8{2>A}?m<^Z!jk6N_Yf!ryl?2!h5qRypXaxVdYJUsJ*# zvVPgQrvyfiJfGTye-E1xU(-g(m23qe`?dQBm-GDTl%NS()9Ip zuyi;>MiqSWNT~lk+5Zbl))Elqc}Iz{@$qxc672 z-lgcZd09FA$N4x#t%r)DII=PY$D6I>KcD7zFFO97AmLYeMKMrFTS?leaMHHixJ$WO z_1E0|KU%gozQ3zp*I)Jtn1LiNVGy;;>CXcr zHLOy@Emp6zC%IBs&o5g&Fs?F&D=06c}|8*-kxL1M}i>p=e^#=V2 zYSp?ELNE7Ie*F=+^moha&qbO4IT-)?TCRgQ0+}~>f(xY72pkKe$|wDQJo*2Y&R>@e z4i6hK6Ex&oULDRO*Z-;@Vfb&-`_~t9+S)Yv%2vy4wT@(L6&P=ubWhj26F@UfxXP)I|+eLW? zDX+WD(4`X>sJy(q{rU1_7p#ByZUimaR-OGb*IXS(RkW}<>qRahJVlFe|Kg2801L)Nwm2#cea_j(t25Q z`GLK-sm9uLGJ}GWGC)3)_oF4cAk5u~t?yWsPJ2T>Q!DUv^==o2C;MuW0e}+uPr+z2 zTG!6wRqyKF?&_`;&Mho-R#sMPYH7Oy3w8O4%fwNP@8erwLBk;7bD5*j!BW@r=IDOD zu3i?~`2M%XUfJ_BHj=+?{Lj{OU8~jBFTW>`?QB*1KG8EK%cRd40zq+Jfc;Ne)pq#a zTDhDW(13rJyGU44p7njn1W<$d^V9i1+>s*4j1IJsueSa#^x@w+nCBd~@%A6u1@Kq- z|3iZqc}f7lQj)dqV6+&}j06~!CsQ~6JGaA zs?|2<*=W3fY9nv7udlD=u&jK4t~CBq2hgL*@w5sv&9-_cX8+i?wBzwqHUNb@2S+B= zZ23=3iiAmg|Nm7m29hzPl?%}w1zoz5mjmke%6w~6U!Gea=qtG-3>G{<|GXhL^Z!%- zCNp?M#l+w;d7T5)Fue+{4;)cEFUb|G#to!xt@BD^{#k^;%YiBwXS4XXQU9yQ-)2k# zVqA<-jFzew?5~cTsnCgrfO+Y5s-mz}JjF83Hpw?vV1E4LJlp2a6{-UME*c;wex1Sl zCtcQ~t_AtOGev=R#UKknvpDVra*qMDE@vjd3i!Aw@2A6mOZ7O-W&`|bz0VrpJkNnnx~L$v^1c8@DlNG`vAfF zqm}>m<_+NHS;Lbh`bfQ~KlhOMygl1MZ+{xTPP>6Kx{vFK@wUc0`Ekdfm7lFY>#4F| zn06Cwo^_^I{r04hi4Y=og8NZXQH>9uVxWxjY*iSY#2-8I`Br91nalP3AM zI|5G;MlZVjTF~%=Yk7cAz)^qY^trJ{>mGzr@Bzby)bs|KUzzX0o>UJ()5n%Ae7eTD zedb1q?DvFXTIlCCjelH!)mxfyy>Ptgy&Haa2zo~w^oO{IAa!V#$wR^_gzMp2ADc`F{gZdBKOfA8_Y-i+E;pwCgHVDn)T95jH?ny2iIg+SK%tM!^{>$YHhAH* zd5zd>N!{%Z_y+vupW+5oH!#1wxp{wn`IxmoHZV)J*EJ(W0*8WM}K`pEe_Z#^- zw%tFHrxj|5a%7hl`G@PUKfR&y^;s=C&(Qnjq~NVI1Yc_0eYj{tIsoa)Q|tT zY-nqM+|H+!y7*sRbrW`@&384$6`#6PHm44lEv4ah1n23xQai9|xi{v16mwmh__8rp z7u%oo_a_6=W&YC16K1E*wx>Hf($?0i%b#_1Tp&xB13c|`~bo}<{UiTolEQl%xlBC9M`QG_tBmG;AYptGQ zvf9LS_buOmCLp!?Z*p&tF)%Q2_OX>|II4QmB|EtSomq#wtTwJQah*JvbvILkoELp; zk%9hVu6(Zr#-V%ruR*jT=nv`qQAxygA=Y5hx27`k>q&CR=T2+N2^e83nCdM{j)Ul^ z|C%}na_U8+M7NfolWh*y-wf5zqF)XS5f@!mr3(N3O*Oj(*2a>uvYBWbuClT+5j42( z-u4G0T{hl*(WC+(z^uUHkk8_m0?e<*{J*HM|Jf_EIyTcufJgqZSUxjHpy3}-!hfb@ z3;+K^r4WWtT2;01kG9W0Dv>Y*V4M2Sw$eXd4?NTVE2Sg;Pi6qHu0{UY@;Bs*@sR_& zwZF?5SP}kjG)54KfJcKSpK^l^VD1*!u6Kc^|FexSgm({AD3J9{Gr|lZq9QpPY+mQm zqGU|j7x+NQA4Ud1YR>Jd>$?4_^P;(ABjl@|pD=gv z5)8q+a^TbVmsDA#$Ef!A_3vXS$w}%Ea9vkZZu;g7%UL^W1!0{0*)yYqHs1byL-UG) z-(Rv^&+jdykNc>s=n_WN9VVuOg3tP(zO5{cjIoDDox>QCV9odZdD&`E3J}*3^*@LS z`tkaN89+?$^|_yi?7z!4uzrT1?`;AUYR>KxIZQw#X`xQ-3zGnN`ai|Pu02g_U^V`F zucADSSz+uW=udWjQ^J}!+0ek;dxRL(DL_E3&sH3gV|@+zM1_FcOkO9=R3pzF(C{aL zP0Jhr1lsiP?`T+v)~z7{b$ee)M~t5F^4f4Ouj=|a$0GJ3N=zMJ8 zlwt=p^RHQxdAB9jiKfpT!t1$?(yJHc8{5^+EQ%<4K6Tu|5;K5(u(vK9vn|(p-YQ1# ze53E<@ohTUxvnqne+OVY>ZZ(-ase*%WLA<|j@VdSQVfXD{C;G(l_xQX_O*Upu!1gg z{;WKC(5oJQ?M4~00d`qp9ShNE#LS8cofQ&SEB)CMNHobrU-apJP&xr40t}dOxQzppe zQbG%G9z~?xLK;YtI0}sF7qZu`+nBSoJe4>)F_IO2?bU3hZtVFLrL?xZ{(S1+u3ayZGbe2x?AxX-Z#}fQZrz%R2Skr(e*cNR`ox^z+mK%%9cNbQl!?l|ljU9; z+zO5jT@MoX;+|58+#jPAl+)+yEwQ!i3hrxK2b=$*Z3NB|*FJp>u)2Q)1l&jYdVY9$ zXPc}1+Ib!EPqbNKlxk1-#Mvr+drc&0Ulmj4j03BCCRYx+rA06GskTB14(nD@@nH!C^3y2G;6-p&-sg-ffXbIwdEYy}4MleAUbg=6DckFnCv z8rFw9zt=hwVQ4qTDXQUZAx9?El?(PNw;0EQQem8%<8+sJBql4JdYJ6Ap{#P2s_q#a zW93jJ=&NM>=dZLYe8*6O*q*+Vg2e0zxm}aM?P7f3mm;}R(Y;qFTp^1BVje}`j>Br( zZPl7jUcG(r89P(c&bi7k1)XjPDoL$@0lG~~0!fx1l@O41Y2g{#rKT0M(TEj;z<-qz zdl&pgC5u6=e&;W#)83mI&TJy>T#w>P^L}i_Ht@PS$lO;g%!Hy>GH*4o$pGtJn!uxJ)?l%At!&3>R)u)@CgjP+818?{m$d6KW4Wae@P5!%%!Is!0r83UlE%DuM$p@D(#aA?E~nx@(*9FA6>H zY6pm*L|MaYL8{YDEp)3L2?f-;x@0CXPjzk(IN`~d@c70bWzl=YyE5FHknB_sHBzezeC0WF`gx2dtd;FG%(o34Yq8TH_eE**_tabJ0vuhikD zz^$rQ$K0Bs+4xD4m_N8x&M4cogQxw`-ECxe@ym&xhdaM`e_yxF9ByI~oT1^!*1Ux_Ub0RfZCQpAp6+`w zIQV!XYpqb;(u`3V4M_o*3@YKB3?T)BYKNZ0gdBqbCu~|I{j?vRTjTo2eN%`uZ|O+n z{?wUF=t|VO@&I&ct^z}tjMmceb=`7M^930G_m91Bx?kKsvAYK%R;xbLyZN#tA?8Ez zrQVt-7L;N%cRvUfoH>2WHveMfa?EKg_%9rj-A?@FQHIBDCB}))6`NYOrFCj&3XIHq zdygIsVNgNg647~&R0&*5@&)QxELvif)oD!Ks7qk470sJ>UcQz`tu)uf3Q3hwtblwR zCdTVDhrcDZJ|)gw*qYD&c$Y$2s1vpUmtCP4(8pvJAJjr60u&xc=$v`Qz!LD~BTAR* zHOLIEGYb{JwmIypzje7$Xm6w^LQi*NsV(uz5fXsIjew=&asy!$5d(Vl-ZVTUV+v){* z{^QMVKqyua`HcJLC<~1ao$K{gAcN0@}Ixd4Gkuz_Bk0U;A>x zBWBSvC*o2OdgPv)K$-A=;=a=BKJfGy9AKTgxI8c1K1`2saD5`!pFVwutQ00KgXe3N z-8*zfFe`Jpx&*%Xn5mmA1F0o-ct%u2dr=D>NcZqFak`lPg-@*S#6v97_o zC>JiHMb7zI)85eUdF(~vIj<0~Y*8-1u=e$kWRX*bHi=I~N~>EcLdHvPf6 zil5FEJG9Ii+cuapcHMcqg!Xu|8vWtLQLN``$miDk&uHKaFyS~KZwvYScTh7TPc_1( zPSb63keNK{*f$Y&RWbHkfgC5%eEpP2VR#|0FU%=k|<`z0I2ripj@VZMko&W(H zZ*T;zFye=fy=MA5F^FBFe@iHSt-AstJa}BY+Q+lLH)H$5WS@WZ1=w`HY{MF5R$qbE zAJvMxaubZ}-c~>;`$ILJ6En=PqgY#iReQtIr@JIp#4qUU5Mo4QL>P^65ux4r^R5iO zFr8SsVMk2D7bvb^apgn2JPndDg9*-_Sdq)0y|S64)c)5Z4SautcQ0#nmlBViNIx(q z&%lg91OUpAf(9c zn?^E*&Sn#9O6IyOnXT(GBzGT2vR*=4(dT`qrB*~@cm=h7c6DHEx-wq91j)o2nBo3s zdiUjAGyf^!i(NFU%o@%-=hWjueU2j7KVo9EFSEywk)Pw{5;0YL&`AU_!A6iI=el>+ zN=FYqh>f2{F}KeC-^EFdZXVTkThGO?3by+rY~4q~-C11+B56WFPI_fP+U8mB z4ee$sQ(Gl_J0%G)b+Kng*04)3N_b2Fd}fy1ISKZY(WjJnIi(y8XuIEcW^{De7JJK& z4kr!0tz#8mvNEN;04g!d;QMaxAHJZUx2efQq4Of3pf83Gp2cifY$opy$|LRIqx#UU zwYPvXBl>3ukp_nDlB}b`O%MBcC*S(DuS6I}Tj&wVr4)Zvw)+qSmYv`nEV)TeEk$;$ zvAjv{6MC^Ldo?!hw*JhVn%Y(aRlMGHK50Z{oX<+*8)88eSe0&uEJ*j15=h(iGuoyB zSl8~3#}pd#wGze-c)-p-sS>opxZbct*TbR8yHrG>$Hk;O$Xkuu?_yr1*#3BMYa#if zUi3YOw>wQjj@mK3{)RVFjVdAm-KrupIqE>uTJ!C(d!Q`QPhU=j=moX&wi-c_$Y}zZ z!(GsPWiNN;kA{$})kZ1j*MfSn*YD^HexedcV(yh<%(D)x7Q(5XtVD7}jVPVo>jk-Z zdOH%dpP&>apbKl3z_N~y&uznO5J7iceNSZos?(Vl;?866+>n|{iq{yX*Pa2Lf zBwy+?gx)Cc{bs-?ocKx#_m0{z;jB5)i$YVLn$LvPaNzCU-Cn94V1?(yzK>Y_UxdAP zJX~M9_b)^w1Sv!cCegd-(S;yGL?_Cq(OYzeQ6fl)V4^d6A7i3Nmk6SF!|1*DI?DL% z+|N1Z&i9=2{GR=f{d!r~+H2Nc*R@xDf4Um;QrfCg_f@c>gn!`ZT2*Z!k7B)ViYWB) zG5oSAZxHV}7pIe`nImK^*2#_dOkh=#j63#i|3!@@&E`uf6k~{;$lVp zy6%jL_V|;M2XF-WHmt#9akhz4mGcOAh7<+8-&8l=sqo`L*CRc>7Cg#o?4a6qj*+6Z zCH3uW5?PGSiJzipL)e8V^{eyM+a7|2R>jvAVw3b9HcnC-C_lg+KikI`rVoi5d&b~K z5OOwZY$;n)u-R^dsdE5@AL2OG=Ko|V=pm0VmzW~)Q(+V<`4-#$uLn}%H~VhSt6+cC zq`bw163pIQ%L6~`|0>NK+MG(d<+s3jl%~cLwsp5nPjr=yj8f+9UPae~e&0xmSe-!M zON?KoFZ61!h2qmi{JYHJAoQhP_c3{r^v(%(b*e=6;|Lem&C?0}h+Pj`@@R}r-aT^X zk~>P$p$ObgcRW@Lw5)!zfi|TbC6~U+5lXP!`@G%CbhI+W`7~Sju%pjc*X+srceJ|_ zYTtDtb7P;iuyfj&YqpEdh-z^5o^hZ#AXE}`%3ZtzC8J8tW=(yA0U3GpRfAvLy*ZY~ zzkZ7;h@&C(WRC*_ZP_8?8*MK?`HG>(9!;J}gLu$SnM2ODJuuyDBD(9wwcb;=cj~sC zKg={8&)hLytv*DXj0ZxErYkl(+a!ABHzIJ!HJw}&{%F>h#4~4Psh7{bP0*-z%+Zo| za(q1CQT&&PO>4F|c$}V%2%1s~KQgw|z{LNkxOVdz;to|6Ur@IAV2Q19WdT=<3;M=> z5mym&!}MU*!g!<{%7?q^soyJ7pg+xy`gq__J6>>p1hILejhsw}Egcn^E3x31UmNOr z=L)R6i=~E_J^qvc=zMpgbBbNtkE}i-qF#@9)J}U})nyMxYVfI+sy$3+z*I;gqO&x9 zPgu8k_t)a-oJYB`DO8(WUW$h*<%GY_Q-oHw2T#>b^NO(<#=iau(QGf6pIGE6i+U>f zsf(^{)vex2x`vLbSczE}OjneF9Ls|`$p=kUrIBNJkCyrCDy~7*esAR35vF#gj+&m4) z*%IquQE8;{q~>K__2_lqNH}f4e1iGh^F9+AblRUZ$DGGaMKoJJxuEgncdyS47--si z1srmCm8H>+b!DFEClLUcX=ct*82H zHZd;7T_X;)f7Lqsi;@PRD53TLMl6Q^FT`ShbJ$WN7~+H;!z>3GNl0`6&CThaN7KQp zE7g=F^-9EQNXorKFK4nRWsNNue+U7acANz_%$&omk{OBFN4UA^q`9HhWlU=?ZRB25G>}nQQ zE!*RqNiBZWpq_mH6}3+q=W@R%@^LBSzEoY}Ue*-MEc4u&ikBrs&H^~cN^9EopnF5H z!OZ1c??^{>=wfoz+b!KmnyxVxvF(TVUo5)k{ ztCn>#UDt3Ikb_d49Pfj+a2z$Rn$e{Dyy(rZ+54){LX!6eX0&?h^phJq)bm+?*IYPP zai$AtG7-Ql!TnrR@1%+&BzwBaiqrA&8`XCai_>O(IhPBBv}bMdBi%JkbB&nKC?Uw8 z6s!{%#~Upge<+z-?F$uI4sm^l_F!j-qMW`luiE>eY*lr(ZZG-W#%15@jH}GtiS#|K zMNheUL-(f!ngWycR6|QngP*z>lQHhkzUv|RsN%T&N3w$}K2~>1&9Qd=&K9Yc`B|@s zz{v@#s~&I=L`O$KZ;Zb#k~N%vn@I?BRz>v8)(?l zGkFL`u$m*t&z>9O%f>dTy7;{JRx}73&DUlQLp9hw_x@b!uKhod-cavi+PLLYnsh`m z|8K5q*Sd3MVyD9?u_X>2GsRKHT5BI|%&yr=RA64^*1$l<_Cd0LV3l+WT#n=iV7Kbp~q+8QyRl#q{QFew;|HOJt56V z!ur%pho|K%9JW73_WSHY)4zi7r+*5!9Zlx0j^`u?kpj1 zyZgm)m*vLPG&j$`&ZrYGE0>Jc4Yu-O$~JWG#_-L@pW>w|y(?VvtRMM(KOqv2JWum* ztB0=m>>Mo!0rJIh&qeSrlDn)Fr54THe_yZ(!S@KbB>7@}MOIiX`Tj*R7CcW`8H}3e z(u&Xnk;K`sFFXrO(7N1jc`i=8KmU%)NNJ}m!dgUWB+7yanz#2>Hl1|p&*p4`n@oAH zuX`Co=8MP_!4#|Wb9zuU)d?~uf!aooOaj>?-W{ly@w5C%4Y{(V1xx0 zTt8v3l$rMJ22B+bG9*-SC#{wgh7-x;75|ydq)}uGe7}s$l$d{C+8B3Cp*DreoO5dPT4V10G(P_GYu-uqan5{QWf?AQ2WoJQ^n5JgfI=d5 zI$8u@w2*~eUF%i4_ocq+r&S@F#7eUMMPt+c@Znj-AZ!|#Vi_9?v&FkV{#**b71}#n zYwqG3YIZq&aXi@Y6MNQOoVe5^{WE_+!c6qS=_I#HWU@u++qf++vi>VDb;dBkLvl3n z3cmH~$u#lJBxMDSO#=nCoTk*K+BmAYn@NN}O6-l^Y!bfex-j8pmlL`;T+4iE)Q~7& z($QS^xk}deZf9kTRV7hYe2F($LVYa_fE=Ii5zbw~tSv4eIqH7!IJb<}8fJd^ja%$doAw`B@hy!dE_?d(jrXD73)tC-FRJ+*Ho`c5{6L@f|EMXXoK z1BB$r)2RMK)kOqOiP*Zo)whxGz%O~g#QQ+<#)~sLOR<|lxqMOzlEoCT+>(t&n_q`R zw8$E34Ui_`=ZVUCa-O=sk-0Fz&PI=?Wg-jw-I^uetzA)t2Q|IS1WVNmJ>jhSXo!b| zDqewI;SaHx`QOCi@*fR4!D^M>Nz5WmM0%0tV_n9x0aFEaNJ--tD-R^B9u(AH8Rslw zSI-%K_k-899pSjw$@n+2n76{y7{RgZyXd{SBK8OIjxa{$gf@^|ZYP@w!|;I~cMKhA zzG!wE`aqxFbye{soZ=xKXlEBO@neZsrAdwrNVUZ5xMr#<(^^Hm0VN0LFbDWOtbSx^ zx5exBLi&5$geW<3>#jp|@_7*#ZPSb*{dJ8W$hNv)3VMkV@>xW*>Qg)lz=5)=Hj~t- z9>owXE^&MhwmmTZdPE|6>u?2Iar6u%iLp+8zjaAaP(l!#t@@HV@0B~^KhVVPQA;`> zzGqfD!yH(xOz(L1{B9!#_B=lpHK_`9k5Nf-0X;4rcnhbx5`o!=K zj))EpV$&$Ii{Jf2=e5_NQv1zm2ix2P;^e+}#sEa~@&?nv@$@~!k6-0i7ompAD4)dB z8VOtco+7!YXYK_bQQ2Cm3`RQGLX~sfk+)L73oqWKvn>q~Ea-iuNq^OUZXK)9pKu|S z@T*-cJtmUuf=GNqeD7PPIkw1V?MH) zpriRxxUg^pAx(ni(%x3r%IR|dw&Lf-p7Q8w1o3M)wuDRSNcR$4`tFOQ<1(sb~I=IOFVzLh@*xLe3$Rn92H6F$Kxd8+0@H zlN7Wz!cSVGgN&qsbz7z{w^5eH)=puXcDWZsC<19AB45FF?8Xah5W7Vhb{;hRSQ-?t z$-Q%b;UxLa31QQxyG56`WZHhlCuL|SC&7-1LMvs`SR2pz z-vLo2PR4}841YVdP!9zc#bpj17JN9a5(^m>()pv-ws_f*2h8&fQvVnX>O~`TlZ~d& z0|zcJO4s}K6aB6!?;*Xsm93k@{ZR~#DYVlW-;G=^2kYZ6Jp-D ze72hPz~TSQtCG*8a7@3O<^88RvUa~pa6htk|J1zUO;CCq;Q98povv5_n@0TA>3dIx z-Y=EZytw~h6qFtV3tTtgQ+<7N=ZNHf$S?MJO3{-l+q#lQjVHRNvzAIkEt`U&D}hS0 z7c)|_4j(6wxw0Y-JWD3xTxL(%Q^Q)`Bb4Neb-uZRVVRD*Zmj0Knh3d~1+*)0O#a9u zinR5k^_{Ij5BaQ})W-K{O~EXMbplrTGotz&8x!oWrbh9%33<#}Xw|E}ojT>9Wxwl& zKMrPkHAaO8J7%V&4&3Q^YoF9;YT8Jdyj^4xu_^kud9r-Mk^3WFazwi~|PtulLQ zTuhG$tgft@zq$`BZ9lU6Y0dw&6x%c7ol4^Ve?b8&{|5@V6#R9o1>wEm)dL~(1Iea4 zdrR4c2jT}`(r2q^y_mL9AOR){hYf=sX{8ph$J?9j|2PHI8D@@8-N_8`YJ4nT@0(l> zO(>gC_@eF$zPRzxIqhgWvo!h)wzK0!Kkyiw!tmIZpOmve$0=_%jDT?w5Qe|(mT-Rs%&gQF%pf+lY1xjceU|MVs5%>Ri3SD2~_ zok~()wPjAo%|Ol4PNA;`-HfbPNM7iDbYIK&2hIHjheewgtXtppC7oe~@BBT(Al}Bi z8v5*w$$(ZU$=Iq$SI+7~Za4C7P8ICdM|uhT+4UUpDw2lm>EF#o=i}@X-M8#?K-<%L z$1j;aWo+$-eJ!dxzLhn|tJYs7fW$xO&L+6>qC!jO+hGaseZf`QFI#ka-^xH_3Ri6t zSDDa~?y00ELk1$dF}c+2lL5(IPUbY3`DTZ-iZgbyjBF@E++|~R1G0@+iSp!#LjA?M-(hYj7`g^1=0AMh7?^o<0hX$yG06@t|0+#Rk;GR_`K&6T@4aiJz8 za(`r7tG2ccL+UvDnyd^{<1H-O;?v_@g(uJY7Gw>BoB@xDKa3Q68t-yO*@mo#4amklD2@uEt}PYX@py!W|*S%e}|wtYabd4gA`6*Xax<<#N_% zvq*^Hr=TfLW!RrbM-@Y3kwz00OGTVllpU$eP%A&Z;&39`pU3>AETlLxsPxX7bm*b5 zaK4M~|782FuaC_C547*|;-LrxT74prv+Mss_9fGp<^4td(h8XP{NO#pAUwXIq>RAY z*k0vK7Th(O7*~2(l=xk(2aW{fF9`-MgR;0T>*dmQS+5MQ6p}p~ND} z+;a;0UAS!_DEH$)2D1V!p6vU!7mD4?ZNk8|>p$$~B@k|I^F#+4e6U{dvwE9(=cM|= z+!&)38|+(0e&34aHBB2ND=-M8Irt1-rCf<`|EwOHm4x(Dq!G`?J6dvVzR6{d*3K;d zeb}fhd;K$6Q(|ohVE`X~vW?yCSET0qGFr~qCXNaqIK-~}$Q3Jgt>+`Z#8%a*g* zSqJMD%ez%6T30zVg~Gqu8)^b8doU$yNNzQyZwuOQkDCmoeKT<|2#%h5$2gi7} zC4!ErBO45YpX};;+&i3qt|IbYD2#J-hgya;m-!m$YeKVPQy1fzCcR2}Kqn^kINI6O zA8ONvIbO}7n=V(~90c{_lnhL&Y)$@I(8Owa<)#&~HD!H}r;qxbXNujAo7L9RR z@$_2KTnR|`_B3*-%3te!4B-QOxUHp?HlM!(^<67rV>@35@O95CvLBlM#pm_?o6oE1 z@^QoP_|A&?O^rk~5ybP0IM1l(XM@sT=AhkdPY4D2B(0}B)W1KyGPwC?jgZ;gn?iIQ z6$7~nO%{Yu3lh~QTKHGNg~mP<4}fP2-R_vF{CeGyonCiE`%bYezrX2ELBZ65GXCHC zX>vUtIZDrLWwa#Qps+ve+9VfhCwVuS!xZHXmzQ+Kb#$rxtam$4Y4?t4NyJR~N7Vbr zhRlsXGj*8{pd{=9WX`~+8gSlo9LVXbW?gCzo%sGpw)T0EDinl|5uV+40HEvxpn8vNxQ${p|Pj%nf#ITnPA4l)0c8G*brbxg2G+pIjXXAha`!FU-^JAl^Lr&^?Zb z-;cTrQxEOh%?sj|#QY8C#6bNGF!#U<0>sZ&F+<{Su04;`^Hf(b#aD-!6Mq_w~JCw&+EvL9#gN{ z`vd=AGE(o{9oKcg;RUk=|No${xd$I2{!PN>ksbbTBRUY1Pv;QLRXcc#7nFpbWPJzS`4?_~zy9c7 zo>n1=@9)xIhBb?yJjD#& zbd6B*B;t7V;d)mI|EB`v#&>c{4Q}Gmu+5m*SktPpZw{Si*vJZ3K_Up%F0Xy7V-+x} zCk-N>nJ|Cit|g(YTz;2iu8H4_-T((y(a3FCx8nIdWG6;jacTRWB>v)ZY0IhVF~M z7`l&9&O+ec+WXt98sB$Z9n9z+K1+Kdrtf{<=wW`aEampUAiC+XtrcU(zBdEBU$|bg zy79ypnE0FOTOXQKww=6-dB~NtCl7p_{&z&Kgvyhu?A#dX{}qv2{KkSbNHq0Fz~S8- z@rKM^QUapm77~glwuz^75r9fw7vGf}F1q*+ImHR1CKO9ibS~?Hj-|TG7#x#_;{6;H|=fnjZL>JA10Eb!w;RRV@KZ6-#`h`R?v%ZM5arX4G;PwChuAc$ttRp zhCI0(uRZ&o-|^vc-Z<(#S6_L_t2Y8_BYd9yp0aT3kAeRb9vr1XL3d-QEfV-yKptS(#=Ya783)5c=lF#7m-@&)f z@y`UBy8>rT3A}gq{py!my4)dF&#acZypy&KF)=NR^iDya)r$gI*OInr%7#g zo;DdSZmrxlzaR2jN(e-S{L1wM9+OqC=YGO)KpOs@d3Drr^Q_hI#!$->lu;Zb`>Qe$ z02H|LRF4qif3#%!3?4G)6fNXeIdDv?v-VvEj8lR!pq7j~X$?_1iV;RV!o{f>EAPT5 zENhF6pFIDZEEL<7lx+ zdoVsIv+%;AB6$F&LW0<%9z+Vr_ zYJC)PAy#MhnUHyr*Z2)`YGSP!xgXi86^WUY=2uXM+t(hm8;@b@Q7;}Y@!uTN?tgfj z$HpL+g54Oj!5e*UE;MwoX)|v)z5C19RMW`$=x6mi08_Z{_HFe?!=XYi0)H2y{V=3= zv(lk(*LyTEw>xzNDOOZAd_?bc6m>)&o6UB(X;XZlAvqGUpJ5}dJmn~zbdp5bgEsVW z;uvq_)pu9Z zXzV=j;jNUepW{{km<^AAZ8N-_`d&vK`|an+li`5fO-`uR=j#Kg#wfQfY5g6HX7OFE z%`<++o3{1e!|XyWPERg-peq*nv%_}Pk~`ZEGSAwSH4+<+qijkMJMZ{)vSc@*llNU5 z7Iq4K=8Dy_g$Bb8N_-DjLhOXnbLjP{#*mj$ziRRDrP;|@OYe3TKP1?#d~(OV=y@l^FtG+ zGN=o2^79~2X~#6!aP60`TN8X)_BiKFe^R2KF;@Bok8loX@G})^)GA}&WH$eNoZl~J zD{MY(1thN&UFUaN(eW3bz5oI$S%@HpL z6qDC|O@}!B0>cvd$4=(9wW15RS>*?8&JuPd-y@%S$Crxa4EydGE!r)i6Z-k!*>aut zI0gqpkq1#a(rJwBYBw0ykJejVT(z9yh}UHiJ-VV0Mi1MMtK5jszAt|$qN|ftJSZ9&FK~FyseK5GLA!JEGY7Nn zj$pSs!((teR4DFf+1yuy3Hs-3_hcYL`)}$zQ~YSS3f`jb;k%pp%8*lX)ay>=#tp-R z(#RaK)ZVL8zG*C>N^lisN=~liYGtcb}}T7+|aHYK5`c`5Edw#sQ&h zw)yfL?R{I2vx5zDmo(w0cOHhamK*&0GjrAQY4CTx{tQ8^6Z zo8vjYl&Whtwb=kys+d`#MTwf7&URN1V&ADY&|?|d8PxVCp{8s3mwImxRt|P2&_brw z-52g0-0D3`wA}uK_|bmNfoWAc9IGRF>}3bvWSmB`Pk+=}*#v9DNJw{{TWej}$(DhG zkDS7`6l4-fE6Ke%xeKE<6;@pj4Cf7tb(V$m3*b|VPIrHJiEk~de?h45I9gK~u4FO(73q98!YCQa{mnYxPEE))j_>VS zP$Exo#qyN7+T^7r_XU{jj{m^T75F~6mK}woKz2~S~!6Y9k-(zXJO*{LnDF9 z{xZSZs_aZ|rO(Pbx!r>3JPMmt)ULh(6Y&@2{?AFT160Hy7KZs<-m~7vWIZ|ZV&w}S zD$|P^OH`R3*!$txDa^Y`NvYd-qcQ$iS`lq0$VP_2n%}V~5zXMdrHx{$O`?)=%-3qI zPpV#?bL*e>X@M3A^Z-# zu-|Q?nAB`}63BWcm3tY}Sh<*Y!5RKLlcL?-h^FUuG%n+?u(<3jW$Mc|!Q=VxJIVaZ z5hFug;ftnrL2*1zX&%_OSA7dF^B+A=NsJ~Xj~FU~@EG9gDw6Of+Go&$g*QpLUl2L4 zLWa8eO}Eq~Is8L`;gh;$2|jRpF?+}WgGEnrcCcc^1%BaOd^^)(9`DfFh468}+!*a< zf~fwde)O-H6ArxjmReG_D}$6In!5Fa*(KB{h~=apn-)7$sSwnH&k$$mfcXI4$yA21T^j zVkS-0-Ru*xVftr=x%OcL38q^Zqt76t(Q;lPl{2P@2c{{C)i&Ad;=^QG2X|L{4t>_5 zR&xU&!)ImiO&cr&IPL}Wkk{6OT=8wbOr208=ol_2MGRHu$S)+xqnGH2DfsNVb;2wP zmlZtxado@l91ZteSLiR+3%g0Kz3__HFGZJR`J!y-@p$B^;lA^Mf^O!kGqq4loZTGt z!-s`lt~2|iwk@Hd9By?u5^|irxRh4LtP#cYkA)DA;MOnjY<_M~9Llf0Y#6U^m~8Gm#ErB{!9 zZWYUD+)`ZEk;9rb2)&5q2Ln4)Hr}>G>f4SdtM69n?`tGd_33!F3_szTYc-aOcOUuM z-!PB}60+J^Uc!2Oi3ZD+HK%SRiYzIrjsk8Yv5of6J^`Y9%<%b@< zqOhem+l8FNzX z1JB}d$UP4`zZQ?}Y3bFEOsgsQw~o52N5My8ml3b3-ft#8)&uUBqGfaco+l z@q`|XtKG#%@nV|lxCBfm=cViIt3mc9HLwuB?f?reT$fbco+)U^T}2k6C3Fdr;E|C- zoTY}+aW6SRhr^YPIP2xpkZOmsC_Z`ukOnWj~4l`(e~-0{%Hf9mDD z|L4@iT-trgC$e`BJD}#D&(04D!YV^J%+g_ENbTsmG;0p7QSH0RY8JN8#_L%Qzlu0` zRx(=hk9};15tI*Uj!WZz{a$;M$0%z-C#H-$&anlCE7hlByB?)m_g7AFd?RBC7NQy^ zpX0iPLgWi|w#5&d9)mkZ2BLMAtymZ)3Re7$c-fCS&A`s{TB6E26 zUKS=gv7dTdJ6n6?_X`wx66%08G=a6OjBOT;j5UdliO(&NgT&dGe2{HMg@IoQL9~jc z+jzq&>}nV`E_vsov1o*KDqh0A5_>XY*_E{h8CqhHblInMb)H|Bl!K)I(}S6#7KRco@2ec zth|{;Z%(tKLc^xUK5_DL-q}l%nf)DHO zaes_=`hwovuW$|GlbOnhaH#WL8RL{oVcIO?)xv8F!ftvF*!x(@8lBowv9*!w#t2;Dt4ef7_G4r6gaLIQsa17v z>q2AVwf?Kpdha)S&$JWmt$c?$k2LC|hUIuadrlfu4UUe(m~m?(tGc7j;T7B6zmGa> zEMAf(j=UyXD|0xiHLSwdhaTBq*E0Noey_B-f1#boHOV_t-|If2cqTzUy@>oK?z3W| zqi9}D#1UtJIH#)mo!ORB4VRg$nWcdqKKBnM1~apM_Sj??UkfgJI%xpuE9CR4oj|;I zyNn5^9s&2b|MZY0eMFV+?c_FCsYl1TtqAoP&W$G;;(R&q;LOF-rOrkhMY1I&W!?B@ za6Xg6r`~h^j*DAyP^u#_k8vp=wIY|;1Be1HjqH9kj+!%C%LqyiqM}jXtrlZ6US4{RwG`(xMOenWXK2Uo6 z?O7|!xQKMeorgKUS`6I1tV%PxbY<-BB;_Aa3dBM^V}26m+)2Lh?!C&+#s9oa*NWp} zt0QzQFuJ`%;=6YE#vnD!{FNsQ+bea?yfxf-_uvPc)(|b`1Bw`>@XwWW#Zf+ZdT!;5 zX-(yGu}xzEZN7{^jZZMTwOcvA788{kG5WZLl2m9*vb3}?;zlV4RKJj)GTXLGdx^c! zOdSxV*B9Y#T@-Lt*gRwAaQCda(~L}B*RQh-R9Bkveq%tx%E2+jaLI61ms;~CZpvKH z$32)wh!P;LzB#xGNF*K~7)H(t2_v^EW#QlBm(8qGie%8U5lT>T)fft z%d$e2uWKmsSUHMnBwm`7-`f-|IuJE~TXCZN#X^i-52|RlHir^B|L)mXv&-fiB3%43 zN=WVK(orz%2TISyu> z=FG}fzC>16W8}Ph&pb`S-M)o*zSwg`zOAJ6^F`6-qvSB2lHX$Xh4(Jzza8;@QBU^< zt^Mi3HS=t2xzSBU74g**A!x48imCO7ACFhvy@wm`rKH`=%Htjw2@3x_ zf2~#w(R-lubcyksnUuZN`?qw$+6A?gZ0_aW$l>dCQox|l-G37CQ}Oq1X>f~zE->8W zRgw!C+olZH{G9hr!5lJO@X0vg4hHLgU!z?9Q?u0=(M#=(-iG$RrDix)W)l9O~G6=8Z;q?jzc@Y~53-1w-RSq+;gNp&V+a(VvtiVKVEE$VG`^@FXT;UGe@8 zZ{>5rh{JL&@>yU1xQEtf>S=aT_$(UEi`o0ZC*$ba6p6gon?xHGPad9&XXT-!8Xs+V zY~1G;{Bhkjw5N{=LtqmBL+RsFqs|W*LLdjDm$JEfvASLe50!nlfw9-eTh*_$U7h`ujaKs1PYDZKDir)z(U(Fx$yw^$fhHd;yd6q>rvctHo-O6&YI7 z$TT4&3CRTZBgLucO4fE3MmC@lj)Ts8R{F=qIets5v*0lv!g00fz?8hyX-+CIM&&9- z1F_{=*IF@z2i=?6jd_Ufu7Fe^JG>7hwhxVT>-4rdd2}b<(_;@mb^4s6v}=V!SBqb& zit8KeDFoXjuen^}FmmiajVM-|AYJfl%%HE0)7IWzEeXnlZqwIF3G|h}Z~pwG z?VG^7duyl%n&&!=|#}oV!=dfzW)X}F^(T%(qL|NzIGEIk@J0hg}s%>vxdV0Vc zJ5G%^Z}PV$Ym>uyUy0{&x^>*MnXsiZ&{$*^B*>3H*X9fJV^G9<=5YFaalU=anhWiJ zkaTY&0h!hbsiDjd7M5~|kuVE%T&1=umQ{|f(&v+-;b`-R50rgv8M#J4kbd|sMK?q; zrGW1E&aEpvH}fiN)f(>k&LI`cifWD}{H|CV;0jK=IkbNVAu6nNgfcQEjq?>J{I&QC zv!r~gT`PWwrt66%aIK^4sLS>e;zZvMR0wWU0AIPDl?T7ursUL`&ku*`ddtpC(PPG&RDeeaW@0ZLkJ!}R^jW6sUl41-T&a<60 zJE%l|8V2Pa0lb+4xloEFv+}1cEiHTd`8?ol*CvUFeA9QIqdZ{dF5mg0YACWhK~uBDKI+CwJ&R;~+H!Y^O8xpw7D-Jtk- zj@&vRSNXi7zHUMR;@fzr&z^i#pUBv5tbTEW{)+Wg5I1J9mZ>1pEb|lL%=yO}%XB=S zcOHGUcd;WWT~X&bXQcA&6;SpiX~YPlNbhjmWzS=;p@CRSI~@EKSR*Yt^p*r6!JjKX z@bJflm`Bmp55`azs~33_4~Cs#6Z&hMwt-8{jVP*9WJvXG3Bn#;sFbb63huXwvg*)G zVj{^YlK#w>!(|^YwdB1zl9tG%t7@Rwq4YVn4Ci4+o^@+65t=274Ddu zdi8bIKm!a|`ji?OK&Cz{3A$oUpLz}gjgk6l8;#U7N*(SVM&~G3(?bJIF$5|82fjmM zefzid3tn}XDsZ@%WHT_zQi}>`GMCyODPN5w@LF9S?7%nbJ(TP#?Ay6+e#ZY)V?+&^ zx%q>!^P>ee;l<8%6{Ora#hQ1Hc4OUu(oTM`*abMrG9n^INJ8uOk%LOMkk&}X(M&Y6 zLZ|S5R-&~%L^em*Z;3uge|lQjZ*lF;n%7kVnTv{~)JVURS(44MGR5t>{Vy)AuI>PO z86gGJx9UwD&WSfF?Bs1U+#Ijc?M;;w^4c+(^qP-Y%Cn~CnMjQUW?K3_DRpy3FQB7- ziwoM#-fVGm-$Y#{SWEwvyMf}hTdj$JUfxX(Q`%e%yr<`Mf?!ceEBcTc8(K#gP{SU* znbi%&Raan7j4JKrkIWin^q@xQ=}A7huWkC&FL$m3j;)=!%t%0x6ffs=wsNRtjNs;o zUoekcPq-9I=rni@82O}#p?r1iYmr>{!hbe5celI-oEW+ot1AdSu+GWO&j0>>bmHoN z=1kB;{B~hdx^sKru0}Fw!JKmQWOQBsd|p^OzdL8zc1TisT$zBt`aXB*a#mfPM0Wz8 zoVT}kPia5^0XdzRR@H<{`03#~D+pvRB4+mMIzqx*L)XwbxGXeVWZGAgn3$NSmkk0t z9M&&=3l85F`j{H|Ur#33UIdBfRdjygyxb3WD{&&ht!m&-PO8a`Ml|qBIUL;5NKj{k z=F8(WK^y2dS=k694uW*{zJ0)*w{ZC_!I72rW0|;p&eJOg$Hy3ls!3q}rB3tt^XHBZ z%FGKeVL>T<{;f3)oD)mJw7ni+xapgtbQoaWO*;E;T%2qQK06bjiT0!2z z_O_Gb$YWn$--DC+NF;#2glBtB!*ml96PGOiDvn_vkfciWWJ}3x8WqyK=QAQ2ngiLC;%Zdmlwu< zxSY3?#44#0V-wlx6YrYI*9P+{FutTi^6%75!98tlii?YjOLIDam(-2O8Sm<+NY{;Ys{(m!PS#?)r-g31 zNqxsR9#8r7qA31t8bAi|_OAAYCA)8=8~MUuc62VY70>*=P|0bQ7mk0B#qgnGi1^{! zaHrv9esT5EL-*AId3em%uja)-Q2)1bpKF;4 z+Mkd6C9%EK#vP~%ct=^v`rMou`2+qJaKU#oGC)K~NFM6Q27f(R%}Y~)7dPUeuI?2L zCv#)~cT@>pZZpV2OhS^JEGYiRp{D9U}FyP$? zsr7Jlci&B(K5zBDyf~gl4|d;{fJ+V${cV~qTSg8-h`P3VH#&@00_mMG5Ko))lij5` z{o38uN3qe-8OE2Vm?e(Ct!Cbj*oQ(o+uQp;+_=jG&Qgfb01&~|U_=T~-0JG;rMZWP zo8!80EiJ8Vbi=9A?Ck6kOE+nAG0@HUuPePIArxX)Ih%Et^|dt+F5%f%h_0<9TBY)d zh&rYEo!Zo&t|8&s6m(+N{h9JTY8tL`e}!Wpn}`-#Dcj2CVj#kaZZ9t`Ru3ao0nc^2 z2DGQz?S?@BZ9IwnugCqlp~UQtl*-%hYxEt`ZS&~72 zTclT8SI#nm!@|N`rljA^1Q<->H1zdtdEiVJfafeR{GE2@{XkK(qR7O%pnZI$!}!G7 z-~S{KL|M{FJruN#5_4O542EvkAGtopO8*=kE<_3F0^a-A7-z7Nl8gN zx`VshYuwilx&(S(!mm-#=EuZPxqgKvi+eieq{bR~%@Y5qZcg3ep+ars#2S+g5H)Im z+UssnDgO3N4Im^7(){DKA1=+$XDRbkzqoScO6&X*y%YyKySlx7QCN65GkAZwR~NXi z)~mnyfd3cnoC0`3S{fP};Wt6<+f$Q2(W&R3n z1O#dTQQnsc3^*-1go2I(di+ygWC$gw_>ogxKgu2eZuXr0DgWde^k5}p8h=!! z!JSL|w+WAy6yVK^OFu`}f+(e;m@4cruQM_-9NpU@K^WjoGl3<2j)s`y4d-->C(FCR zrluKyK8qvIU*Vg8U=8*C`x`$$KOjIv@s#iM2M6rj{`(;h!$p1Fp5Ltfhf(_M?Z5gj zK=K*tpNfB55!A{4Gi(LuUI+ah2MKmaXMxV^UzgF7e{@g?nm9lF)nx+`dIj%9gMVPJ z9&V?lAGUQ1{bfr)U?}wbBwuEo^*|SpuZaCt*<`3UCLUSfe~Dj{Hm$equYtd>xM8G9 z_P0?}8OwE$_HWn`8f(YnrkQ?N@L=huSi5+f9Z#W7JlLkALuv4ri7WZvHUzpVG*YDN zxsDxbO78Wb`hDG;1vkyAr>_5aGXc3HZnKOn?2a)pwcLZvQPxLBkqXT0>=TAo%lF~k z&aQMun#Rh2d-GAx{UV45RM zhw@4-D&mq97%;l%F<+DBk)Ku79jfm4&|9VBXJ%n8 z&F}*f%(tlK-NPijeX&{EYoI0-O=(noIlz-K8JpU{iG@+gcZ;pyl8Rqwk613j4@|+( zi{Vdea~e`6@LjL%;+yS)Xb4NUXQIg8KRNx-2D=koV?8(YhTW=+9}>zY-eAP;^s+>A zvRR6P*vUbs*i7gr?JJiLi5}WQ*80>65&5}QjG^^de8`Z)TesMK1~5dfR@>;G?K#$N zZYMfB8K0j0%`m6#&F%Yzl^8~_cbf~d#fs|18M1NQ;s4v3prSKtZC{Jsx*8L5)B5&Q zMa@4_{uh;3o?fDIR4ZKb@vqhO|5?_mx1hALxg9RI_sam&$;2f7w^#05o&ap#dah4T z)m*(QTDL7|_3XS~spiEqS2NCm2KjX@Td!J& z;-YJ-D_i|)?XTM_CzV2N-ox$d*8cVl5ocD`mYSUs-akbPX z*A}WT23Z{R&T##IZ4_6%F`fKj;+52(Nl8;z-)*U5;ZcXO!@sL!uDHV zwrk@x{|cY#ST8o7?x`gqJCDb5y^Hy8utv?#VVU$zP5nlD&*tj% z-9}e@J^#nXwxIMgGH%ZD{ox_CKya~2uh%7DmW)khp0xSPufKPvd~~S)`u_cn-mA|h zE{mI-H)}m`pNUrLoQub7*MxW<6rMWmYfsYsACqtY_X;$ZG+f#B9CWAfFZ+TJ&)Ju< zwxR^HgZIx;KtW>Y9BKgR7#L#c78SOnba&SvF?6U%caF43!vNCK z=b7Jkwx9d^=bV4fbses~FJ$KJcdhlTb+7wg?^Tp!2=OWKFI~Ds2$6lMcInbhvP+ln z6#lvnJ~5y&7XiO6Bh_RiE}{A!u7W?VS-enuap_W7_^neTJn;8Tds!XirAzmnaQ|PP zF{g36bm<}o^74g-n*ny(+pBl~qJ4WgUJ<1hDq$Q7eL$?oO-Ce|7+k!Nm=@W!aH7bP zx#EA^RcMEjc!&S)okDAL2!;I1f}=1pBgeAZZ^(TZQ_|%YU8Y!MSm)~^RszHEn<9pyj;h&ypy45lp(2i?IG?Se*dJ0>_&tym^2%r zU_wW~es#3B%Pe-sX$hq{?l4=}9SdHS?o)|qHCBZ}V~e3h;6E>~hccIc?+*T9-&r<3 zEE$=-v)Enab+l8clHu>=?Y;3itHPpx0_?y>65N*}!Wi62o75})@vlF3tqD0gIw~-3 z4cZa%KCaw2J=*Ph7RM}K)%xuApPPL$!(A3Ji-t<=?sEU61xHt(K={9}A)$QSM%eef zp-U)E3W>`4*DYRhd>hhb#*4i7=P@sFc6F8+Q2gto_!wT&e|_-jz2CogD(xD#5kM0D zYa=?yqL-If5C6YD{Gd(yudSXN{6+S!EB+buKM(!?CH@F2-4{=1RbPmSEk*q6!8Ft^T#*(?QgjIJ!)=eWJ_tZG2qK{qDQi_6Ov z)Q%8APjLw=V8WwDqV4+FDbyP^0tN!?*}}mRHg4P>SIko^&iDWMNsnSwww|oAm6Men2%{2l`uY6@IOGHu!=p#fG5W44n+>~4 z)6N)u>!IxT!1f7Ex`LvkiM)>%<8&*`LvK?FnBHcJAfJnz>o)7wM?M-!zI*#N-nHIFU+r?%k)7EJCVN*1 zFX;(@@^Db+t$34VW^Y;dn2;WQ{#OcLlcE{mUZ$N%fEfoa#-l0a$? z*{3sZS+)8st?ljbnFjY`aN^z)uW83_b_zbr4--|ka74^a8sF2h8IN^#q)^;omQ0aW zsln2Y*=poXGAglGsy)cnutyQ$bV)|;d>HTcG?P3ntR$FmFilSj&0pjsa~L@gJr3M#KxAFmCP7S>YX!tFml zFeMT;?ezF~GUSo}Xo;SdNqd;)aIRu+iSLNWRzp^}*l`CemX6*`1pJ(5tn$^D7yWvSwm&f{peblwUo5<8hA%_KY~~*UwyvP z5@~mGxTSS*eijoEk)eyWUJd4K;=+2?*3n&&uCFp*Yq`j-ukr@5@&)a3nouY0Veg6- zN@M!E8IthjKNkyrt2jMiP+*GH32xZl-yik8IGI(u$0O{%lbz~)pkq5-H>%RS)5Xf~ z^~+xB<;#4v9Jzp0#ysV;u|$6O-t5J>R-RG{&mOQ9j>hxj^+L6rpbP_wckkXoA`uPS z((JmG9aCdZ!k=Y|pZ}7K1UF`hFpTXwr7#*NNfPrpjJBz=Y42r88f8hcK{(I~ne*}{5oN4A25XNvl z^nN|t?&HgsY8p&!1}hB??rP;}J%~2iy!y}0z$BJ>lU!9Ta>$>#eLO#GzDVdMm2z>Z z%FN1&NlaWTjNvpi;A}oEPjz2PpzRmH#Sqbp(_IhX;F?Wd?pmV2d#2Z_Qw1UJ?rYT z%y%qIcNA-i3@)u5ED7uZ=W)XqF>!`Is}m^gvdkH;T}4~~4R9be-d*bT zSW2*P+ME*7DuH8A9^0*?g!3E$AO^2x1X2%`nY8zmz$U8gNm*1)FzVmKq3Gv9UkUopFVRv|J@%XY? zZ&m**(~60QyyQX_+~v$7leuQdgrW>V(}$4ZO~PG1Gwr8nEK$k3=AU&}YMio5w6XZ4 z5H)M8^@hvj@2K~DwF+D(Y>pOUJ=0KY-V>jpSp>D(oT?2NhvQohq`x>m-Ra&9F`Sxz(GO{&#tFz<%0Q(3!#dxlaPji{Gz8BN5oqYG*rP?nqS)s)P zZK0He=-|29j@MoMk)$mENL*QNlkqt+gTS=doVM_n8eHY zT+@!d(CFB7t2Z82L)Rsw&C(1}hm(^Vi7=XL-_;^=e+dpx)L7jjh7+X5>Ad!CFI4tE zIrH1JvlOf#{b%j=lOOoe8r(|{QHx?wgdI{oMHGWKF9lw=R=vvxpWDw@ji)=^iQ>LZ z0yf{$iCLj5DXuev`KnnXSu%IP8&n7;LAQRKt!OJFG?}&1>g9wfn+7e(Mq0^Ye53n>PnR^hbd;vZbM3#;)lo zm)iH#0W82B{r$?J6#PTDK&mWu^zs%d$4HEp0W)Nx#$jIketuni*TQ`(l3=OeytJ{^ z2SQfYlX{o_A{f7@?9BXXJDZ=0e(%+-6|G?)CbsynJA3hEWU_qDXu(#IMK08Jbgs|) zJ>ORM12ePil@px@`)!OO(MyE^*4V<;pkgbsprJl*Na6J-=+h{%LKEt%Ft!|{_mvBW zBPEB=a^&zW$x(%r53tL_^wRGia2${SBn13%oE;0D zQTyW-X$jqh>Um*er#~gNxN@?+4#q7KpShRAPe7=`l$&)EnDhuBrt%}6$Ru`iaC39x z-|0V@(5^;IM}ufM;j>x0jC)xv@sUIIu6Fc{jPjAWN-2e7C3;DLMD^fZK`%fMrElMH z<2CQ$x>IW!-ANweGwz{~r+iGF$FZ~RU?URTRCg98-Bo4AGU;I;kTgooR=(H+TOc5X z480}_KL6$*mggt2TXa9)C2PsLv*884i)OWD*A#nz!zGn5?78*3E)5^+TtP3qNjF+X`C`G8R= znhyA?;+yM)Lwa_NIUM?RHEjxse4XpNjuZkm@kS?a?uc>O#6{ymF)@erSg}sV7pWV! zAF6nHHHaOrJOQ~B`K^T zfVT=KkdR#QD{m>6D!WCsTYj{!W*TfSwdd5AUaeFHcU-dhy^uX$MS@(|cA~Nq=F(ZD zrQyyVw+L%1%EqN!GU3$Dwofewo`V#bH-sDauAQ;pxFJK5cSN)2S^|r}gGl5u^1n_< zK?f1`*vtPhKd(t2&g)}}S8v?zan%vb`~2oc{U)dO$4>+G(b2VjgG+Wiro93R`U}4d zEqa9{BhE;se%?;$Q-~P_mH@9Zt>|3RmR5|k5uh12HPqR_S|$B3>0f+EAw@DJr1rWm zw3vOu=hL?vemo}X#j&R2TgT3}c`Am?drh)JPo(vl{&v+Px&PY{a(ctX&zXW-jXCO_ zHYP@4b+6p1kRX*uaVbN7fs}$^5+tSpw#qJBGaGewx1ZE<27_SEA=w`V?ufGLsm-@n z-#BirkCh;&4M8rSvD^vjTw-G499)>5ihTIEi=qFiKJ zQP>t!&D63}fm@A)C8Y(b>4)gXO`|jq#zU7H-G@$p);zL0KB*~-6c(2QDf+?W+iNoX z@&)j`)$L7o{l?E}%VkwY&_tI~H|6;NsRDJq_OdTuh)BlNQtI|J_d>Kcl#mJ{YGgVE z6FWzqLDU0~6x}+iYAHSEwf!1~US*Xo@?9kcodUUdaY*>#T6QdI7hipn!UrhNiC5rI!wp+buZvsGuQD=W;UY>X*W*vZij-sQ%gF&85qYk{a65kz zrBLWBNA9lOB*a^$G0)j^*WI`AS(6eLiXCYuUOcF^np}5K)QiX@n7>B{A#ZNCp7_Xw zwl(tLB(+t|5HprKg<*Y4qu@5(Pj?ON?c0-tW;5F37I~}ts+fhAd03PT(v%#$XB!$A z@y-+$7#E{HBbG^Wn|9S2#Pj?GE0l^-9-B1FI$JN<@Ig(}F14R&c<(C)Rj6{svgO#o z@*ypmA>tOW(&hUbg2pqQC+m#zv1(%zz6j!_ip3urHJc3~%p;zz#?{6aFN^Hrs@j&P zaF<(FZ$HcUj|=%->4LvUrHXnc%D3jK=P4~k2A_nyXx#vQWj|f2Q<1wiRsvt~1bLZE ziFTPpn$K}TZO-AW@2vAh)g*#<@7YZPf)RcUY<}V#RGfrokz7V!t`SlS^q?E9h8fP5 z#V@!+94JIwir0OBHSM3DY!!h}g@5OcRyegNpX(;9W!ZSD#sPJ*X>4pvP$7leb^V+C zi-enG8&fXY?YCp%S`+DmM``6>?Y2B^{FwU9CEeCI&vcAdN+{_1$KF{ zK_1TfmnHegC%DCRQ*>R1=S%z5zo7W$TM_aBLk9>|?Bt+_K=^lc-BUU#^lI)(wF%nH zF7ffy2M5#YA2VT4jU*)x7e#~5ciL+!-|x6RG|OSO)$(6z6jOztJK72Aubk5}nzYbK zx%fhFRp%}xs|Rpve%S85AG%JQ8{3%4zY%q9L(EwCB7|>ahD*8sz`6iohu9{=-(j%r ztypt1G*mn`4OW|S>G@VKb;~eJZSuvfm>;JN+CxN7bE75>60+avsYlNp2z3dT>p9qo zp_C$`qE-AJ)O_fdvUk(NIKp(`Pnot1`CTmc%agP0#y`^c`zfx4CHq*et6=6AmWARjut!7P4v5Fs#Ye43GCDRR z4)oSvX>Rw9KQSAxSmXQ3&iCUzy;Pa9%joy&2UGs4ZS&H6xA-1nqN0zyN3EP}`b>^S zRV`)O-JJ{>oNF!>z~J%muWK+?ATtXR9G;=&-Ny73mj%>W<0VA@AJV!Ub>pQjt*7FY0* zL*zFp4hIBFY~AA862(=#sP?akb@(oOLw-5bkqKe37M*$r^9 zJDr^WPSRk1hlCKAnYuMet2O(018iXi#v!ouA+$acpNvWz0W#yQpF3hWQLvAOal=44`|aI z7d4w;?lHT}TUBFe;WoKWAHu`Kc`Eu=MrNCK*bQGScAXR^hnHgOqoa5g7pA%aEbl4< zE2-b?VK!)khSAG2&oE=*h7laTsU9oV__5O$#BEwmxF?hVi0_f=(QW zG7Lm9R-MZ_g~QI}GZdq8uQgAzd6{)dlQO^0Y{0@JxE?DE7F{6{GfkF0xTBSRwDn9r zU`%nX&?03k!H`q&!lhE8cPnmg`)px3E~hxLk%-r>@=H;DXoV@0k*=W>r(@gOHKt5B zduz!zT1~6GvRy`&p}S)pYF!PVgFO0=#EuIvA)yo-x?-#{4&EHt()VXRE4xMfjrB*0 z74iv#imFI8dU5e1Gc#s+pEZ8ZEa_PUHS?-Ma^|F9Y~9XJvAB-I5~j49!(z2Q*EjU@ zn;LU#&0ThmzrBk&O|9EcwYU>LN}b(W)?#)}Jn*xkc;lsQ^8JYR5Qvp7PkvPHsqyT8 zLOk{UPo%UObOK>3?ieZ|sjyWshyF{$UN*>Ot1xrmp4~d(9r23t+r`7td8wMktkJP2 z74!+=>vQ}ET)EG;^iln)RQrj?P104jxtf%83WBI6wW5Ad&8|+osrhkx*M;&(_V zYTIhQK!N%`zB}?s2oC^PKvYi#IRD`kGQ5^e=!)R>_V&^a-ej3c;PpGy*a-l~AT#`5 zD0v=W2LbkoouCG|5w+X*%PsK@AvEF!hCW9*a9p{kk|iBVU?SJmjNbGgt%7bjyF1*zsM?xp5+Utc2 zHJTb9?qgs%At4n@>JkR!YQ!g}ue54p@yC8Pwb`q3z+){T(2vp_I;gUZ4FFc{6wUVC zBy*A0k|sEo(W8<5DfD-w=@Z#bMTV3BzRdH`GAsaLe;pj%W-j`d2-`F<}LecI}B zz~xV;iLW724Sp9OvKdNsOX7{FzetrghiynVXa0q$Gul4hb{&p~l}G3NrI>1mPul6z z@YDA~>j|act%3vl2!DRZ8B=utKA#c^S|&135z51f!lwjqNHzPW$gL{9vJcdmEl9mQ1rwJ%bUE^Pe? z2`oQ9ci<4e!r`$J>So>bXB_J*AF5Q>AI{Mp?GMRv&H+4qql=iMtCgA~D85``xbd`S zr@kqrt zXm@V*IXez8X!O{J9Zp;D?wD@PB%3|`Pr6>q>3ay^XfltuLNyvLVTW(ZPiIH80YXK$ z*rTE`pD1iSd5)mZvx!~w0yrs&jxF?1=x^-Pq+MlC#+uKz6TS;KbjmBDQ@jlToG4HP zKn(BuPYyvVq2%I6<>dIMC-Sj8w>QV?j^;XndtDsL6b_9;b< zmwy*+?;(#gjML&nIiKJ2XDMM^3r+2lA1Bu>pL*yI8I&3R0W%8yt{TOEI(8ZkkFEOQ z-70iv=vYTIX}MNY74#phvESs*1U`RkfSLzP8uBz9pda6wmpJ591<~12T$!6N>*r&W zQB-cNwHh-_h<7N*s(tg6)Y|$Pt?ouc6TkBaJl&x$R`^ZZ;&v^8%0i3Kd_@(aAFZJ4 z;KOmMTiu(W+BR8kabt{CKnEDnkqXFp7u1IYY&xA0k(Kj-&S9RH*F3@EDWU>1D<-F*9TleDv3swhrqw&A%{)&70gx~43TT7UDKy2%)|@MWHb zbJpzUz~?F%Xdh^QRqryn)=f-J9MT*rm(u)|Sfb$(p4--xxQx4R8r{+L!v4PvP`j{d zW{V8Pf6T)#vu>@hs3#|wfxY(xofRouT2$IJ@lk&yrxTaSLL!@+n*#{QjS4%^+b{k8s{@ESs4gTe)sYSCuZZO#}gz zv0TA#F4DYcgPWEmtAiFzQf~r2AA7-4&6!<0vGC93*?^fvcBR3k1h{Uc#$-p6$H9i+ zhD(xt=JwyMiAnvf7WMXoo2!dI8oO_o=*>NIquO7kO!d4@z$rnq@&prJSlay5KzkOg z%bF~&cf3E;VQ`cZ$XKAen*;BAw7-_d8yu{~pTsocuGTpV51#BF6W&MqQ|ok-!1W(r z%^|JxMfl+7lYh7<(LLXi{q@NJFDWM1@5&oTF~fEq9>T_{E2B+jGrm?wc(a=V@``@K z@e)tX1Gas0yM9Gwx`~q(5*50j=)aAvdM+ynB8KiayS^2gC>XU39j$Ae7v-jrkF3Ri3#|!X?%`JJ^qzgjv|~a0vq_?~D{cd~s^0a+Qr#Q&-=gfR~aVE8*I^pzTcu!6zgf-N#UJk_o`ade99M ztHXKwD_P;zeaS)r+;NF&lw9T7Y+Z7$N=83IP6W4>7hY4~1+IOHNCPGl$1kEMkkn{V zL4H+I$ZO=IPyFpPSv41zST?99Q|DS@YlnGSV+VUOQS*olv}n}pk(;`>MAmXE9MSyA z|J{9wO^x|epV)`rQ0#r{BbTX0X3)`^m)}mIV*5^VZ><-IGj=4@|6HGeWJuvNVu$VO zlMh7-MmO3}7Me~u@C1~}@uaB50IghO^844tf})ema#2t$RQH9}t-67;UJ3%tNf8w8 z>{G3J#P%-l)%1AH^R&OB*vztXn9bIOOHNKj6+N7jskcTUDH8$AYmHyn_KAw0mp;CC z!|-tT*62pSKK%^B9s{&!bK%CmO)4=S-bi2^8xo?k;bQD3}s+76|Ko8IVE zB8pb8+S>=;Po_CP(XBOCij>-{3L2XH(a9#%8nd2 z+g}@9QY-6(Ln(KM^jkZjh{L*xSy6-rsS*@3{%tJfF|08|ExWK5p&eeCwjpX;qD$j?Qv*D2D>{X5lVR ze0_aGsYJ$Xw;tk(^)D?*Xk!AmNdT@sYKfPJ6cCb8o$h9v?mz8)8U5gr<4l~g5dSUP z%;Mbxoe(zA@vE44Bi*LgcfQztw7w+$qTJM?(;D5e7T?+-BcSEi*-mV|GdSyeWQD1M z4g12X3gazM1?1`9$0F8vjWcsO$Me6^KNv0I>JGuXzQlVgAUk>%){R|>rJOcTVE)?! z-wBVdoUXd~Ta&n>1cjo1!$nSH-c{x}kXW@>Q^YQ1d97$2L|HL;zw zeo8w*85h6`02;)*6NPUZW1fX{HL^ny+U#Mjh_@`|-eR5ceo8?$ z8&k5QUlwPUr>&kjqNMBI`K!e$s?92~kCrP}&ghltk-sAt(O~dW{tR>B=A70{ z)~#|OWo)4U z(Z^%%9Vu)Q6v7n-o*Vp9GBWFEjKOdil@R6x2VIXA%!>}*eefU)un=mhnvd65!IGxN za#b1-<)Ze%UGkC&G15CKDw%HS8=f$v2p=Y~#qM@hX-C^e*$5QbKo507rYxTCs7|jV zFa1u2#x>{EQo=1Fxn}+4 z0>!asm?(R+?S>o1TXJ-W+>4w2DcUS*H!i^*?Zn==^uobsHb^SSrQSRs(tnGPbH+X* zyYkm4qI8r*an1Dh=Xg7IeuQM-xlT6}9$cON!7lx{Ck*3H`h*xOc6n^;X5yivA=-8t z(%(0$x|06i6uE|O)}E-?*j!u_71s0!0)ZTF`Ch;|DEX|WzJf-?X2qK95agUA z?~&@@z4@Lc&=({JCIs7**G*UJAQ2h8iSWYmCck?UsI(0#Z#Ee8K zKaH%~{wpc5=I4~n`o;10?%}(n5v_HAHoydQdS;5Ylsn_r^aV}1i7scT-nv50wS0%I z!Nl`U>sC7j4dWluB?f5SU<-wjZ_9zKJT-+@TWW>h#7LcqCMJ$93vhTY;S-=Yvq2hh z;|zSnoumqF5UBAN64`6y&9))idI9mC4PukXVnj#n&NrNFFcF?-bIC6*GD-!DABbI5 zeQnA<7>RbjrsiG5RI{$l{>vqcM47VS@4MBUdGw0wGt|XZjk%WcWj;c~PJ3y|g!wL( zsD9V;Y9;()OR>BE1oG!|pm8LJqi57&%YlmlXOsI+Q%Td3RSPx3TH26V%(jII)~&S6 zNWcC9G*NHfz6}Li!6pF-3iEW<%Y-Z{?dAi9e96GuxDH}%OkQczu^ecV+S_7^b<|(H zpg)AAfKqp(?&9+e8ZwWWks>W+8t=V`Sa!YK&L~Fs*T3&2@|XqTYUX#hNDC3ubxZos z5<#nW{T#I2jaT&LMFsU80jiq&M)A@S}t_3a0IDUayX`ojh2Q`X*25mc; zUZ3`IMron0GW5bUrN2+Z<-lF1WV~G1oVV-d)5W*RKDvfF?H5X}*wEm$L|w;4BA2ah z9hbc42h6Q#>%k9=SkL@C!l80GpFssa+ zF<8Iw$`s&8D3-0x5mYA?MOnly@=Eo*+w|qmXeGSsu)lFZ`xv3d=Lz zijjUkbaM74g>|Ru+^?MAo;L)Dc}s|-PFE4dG#|9`&zd=8tAHil215rMlY}O_Q4tYF-!A&aFZk|obEU~c$Uqa= z$P=LAc&V)`fX*{ms9EF$TF{^hl@oE@PZMlJbEVARZ=}u`2E!h|JUs7 zYze$VRP(t98jeu{1Q>OITJFN`_u8}s5Tag9C_bIeW>Azvm>2r$7Th8`^UlviYVuSk zIgmU#98m5{8?p^;iE(_SN+KW226{NUB?zO8)JpWt_Z=-(T=aX^xch0 z>18N6dp4}nBIP*T>)6vmyD^>4E)%lxY-1ufwD06{sB^_6-um}8R2Iu(J-bwWydks7 z=ROnjv)oyHB;wtIR&z=l$9pY>xMXtWDmH^BGB$LEpnP~gStb?M5i;WN@wnz)#tzX4A3N_;c zugTm5KCb?%xmNet6gf}VK&;k{^>Kcwa-%hZ)_JwXtt{b`M-ilRrXFjax~U zoQsI^UHUnprz3x1unH4(Dsaay)706kmM;Kn`#Qbg17)8KYsba{!~1KqOj^>2cEkpI zn9h%X{QXi;KA=kBQ!91HX0OV$_r~<> z3Y62taP-&L7S91qM)l>(x3w^=#{sqg&=BjY0McBFYxi;GrrQWT4tfU2U)@}us(@uM z`d=iGk!m|b%o!w#VRdCNOAZ22RZs{4B!g@~l_)Z7_QAP14-bA-vaGapz*1jI(dDbx z^8puu0+<$#i3<27>)*;uD8zjl;5Go!AUH9wmec&SQu@ZXD7mE0;efU}A@Er*Y@u(q zG!kJ&l#eS!E%eNsZ;_JM*q&(RqmEzjJIy%uHNg`3^#pYEZ_>$7cbZUHFJ*#W zUlT;&`W25h{tid?$0Fe(=pNz~2R*3my0EQKR4zLu)$-6IwL>@j#OQ&;61O8}`vHRL zkFbET(2X32ZM+kyPfxl@KqV|Um#?KYKT&yPJqC$Dy&b$=62^HtORT9Y12^Seh6ed` z@0wIdm7x}|?JiL#@2pQKg({xiS7p{HK{!wK+l>qUV+nru#{kI~&|GKHXP{322U)!J zY*QmB-K@!2Ar^hfs(?o@Qf?*>mSWI5)5~7`2)G_595H_b62y8ZV;bLxd4Foj{yK)z zI(5opy~JZ7LWYQ%8YM&R{<3;ztzl(e9}s1L`W%bz~?|3oJ5Xbt%u}l)hyz9Va6gFR+PkR0J2FJht_R$FUsmsPh4ge`O z0HX!RRR*g{2|#LDM5mkZp%v!69CG&9G!5%3(twbachS@)C=vlr;63SG8>^UgO`GfR zsb9?HqIz~5WkT{~Sc&=b@Ou)s;@$1NtG5nY#bQS*ur{-M~hYYg|)v zO0+G@iRqyx50O?S>;KMj#vIe}se znY^(0iY~XvTSGT>(~Dd|Kfr8UP8YC}M^Gud6PeS16`hYGp{VRG ztmdUbf#aw899tn-<9d^xu7iW|jR?D7^G`PG`&H`64BQuq@veO6vnYC@GCy zoXuV&bYGVYAQ;@77K$>B&&e5Qy!sd5w1cd~7-8rrrw9VUi26>MrjyN)AOrahmq`Tj z;BiawOQCr$udV`zs3d1G5nOtpRnsy4G$0Ur*d4cv>8-xj6`8WC%hF4&z{A;ZezCj2 zZMbpTd2-_({ldYVN2{nTS78x#TCQ)aW)iZyL0yznD*F z?U=qee-o5)^@f2y{J6zi?h3Hw23@G0sjdcFcC)mT)Abk^xNa#Tu~I-vXU5SIurfA+ zMzh7#qhDE5>GQ&DJr&ik9CpheGMu-RknHs4siC0qyI{oQLnJ?N8y8gUXOrtZdswc9 z-dt=7GKk4w(S!}RMk}@cs^zjAQ|mLlTIIFfbbpABStY_&wEdkEoE7tHquZEj%iFJR zCaV2Zf_}%F^>UNu2m%WWC2%0YF0XN$nnKj4-YVhW0=({0s|jPW$Q)<5BQ4xVf0B6w z(`d-``p~80h=2x`F7Oww%P7I2L`G^^1q{OZo(w6X7HF%kzteyPA>ZJkjObqM{-mkX40 zjC2_E&UHYIhYMK&4pRHTmMj-KB0%zzTtssJzOlA2*0^pd5l&!~23X0gKz{(*MNLg7 zn<6+er1RRh(iOl@o?XB122MWI)^xm9 z1P+zl$;Y87R!~r|g!lFQo~KV|Y&bf|M2P2vStw2bdA52?W6?|f&=GaGtc%~vz^C3okusgyBlt+bnGhV9pZEPzINdg&lB=e4iPvY!Ur6N1FbG9>v&g({2WM?nvNxiN{t36n*UCsBItkJIz7vwx#_7Np4 z4PAZcC|b>OFGz=5bq=A71k&d&y_zH_&2oSkI z7Gx1kL1*x@w{NBZ5sFmbk9Ia0Wq_%V;c;R8>~QYTXFpi-1nuUM)?ipalu0K>x}$Ch zpErDUnQGGWCMoDWPB`;Pzrg21YJ%cg&L2P9x|8dcklo`p;wciJsLi;XU4r2)!vW!#;@lHI*3>F_{whN{I(3a=NcN( zAeUa6@Bv*He#Zsb5`#uvfS#cA91@<|&4`n5M4H6r0QHh=C?#d--L}@&0)SfSR@o$E zq{bB&v$5;fY1|R}^$KN=OW5S3!1W^PRvE%Ue=^|Er~NiYQ{qjEh-=-ZC+LxuEle-o z=TNi4C)07Bpq_Df>xsQgbEv1((ca{h;9Y#iaFQk?XuZ zW~SG_?iH&kFg}$l6sbqnNv2Zh((8_?ou}O`$^5Ar9P?}ciX!#Bf`Q}9JZ#a6r0D>l zqS*U!rR3bDtgVXuyMj9Zq!l(YRQGI7o6wr7*zeN(j-ICVb(YE#*;cmke7wfSo}xL@ ze1l;qd_Iya6iZ$Be3)B<{`uv{a{zoTCn=uEA04Je_714#&0Z;8{!V%~e<17)-$Bdh zM2%_jguAx?8dyBrHE)it&~NMhG^QE$T+j3S@KgbS_ z6crWK2MA2zM5R?$oPh@fx5fbi=N>52Qxm%6n7^+)^9*O+HKpoYGP!e9 zlcI^r8L|np3bZ*8{D3UpJx^^EUoRE!UJVKwT`-)(KZ-5QcU;8EXV-?iR$r8s2$wgP zpR#JG;>~sea+5z$?P?-DZSrn=D-}U1)bnQ+*M$_UUpi$MvFtbZ;cA;IUors4f-F3^kvMh5}>eeSqt zbs#P7&FO{~Bz%^Qn#O!rTj7eK zjt?&$)bh=~4|-{O8{)?ewpp7?LW2mJES3O0(X@wzaJ-F>s;Weog`?N)!Qj z1KpZw9LB~Z5uogH!U`a#0l<4Mu&4(-;}#r%=qqiyFwt(Ud9-t2HZaxhKGB|+9L}QglRplq{@ngh zR!7&oxpAiuXzKr?MU9a+H_tr)_yvF?;`PiXfQ9E&R`QMJt4iXkn%dBA70^!t%BBZb zq<=^2OXrt`)X)C=Pw+3lo0p&b>jQ&(Tz}ZarAyz5|K|or*}1tBJ;2ca__@pbZ;g1z zt^c{#|M9T@e}n%#&;RS^AFbFPM#UTV$4j{6cT3{a%6&&5jp4WXHURsr^22Qt6hc4r z=eC#Jq(1%j72FqoTGvAU{|5h8&jD9U2_VZVCxHx}hnKf^(O9+2SPG|j?>74aLIFGlAan536O@o$GAs^+=JmB?!m1~i#Q z9C0-Ox>y5cCV!kf`=1Vd=@JtnN9E&bN1Eb~s=e}JDw{jrX)`hIFkz$>S;QJhrPdbS1-=~F4)Vr1*V8>0U{}oVRBy#%% z(tzSfA?i`NV04fPrX1kbk7Ka9nU-n8_JfF7S@`%gyiX3CN0V_%HK40s1r$hBVrgk9 zs3)?|&rXU!27x-pj#mJ}A{%4~Xah%4`XXATmB!cq70!sN)kNP60}?>e5nE zQ2=vW12oE!JL0G5pwy^CfOJ9whwtDx$!-u2Y+3tWpU9Ra$XNom#ZP~}!R01^j&9?jJU9=)JGm@(U0V*f3xW%O5{3 zW<0N)&QIMjXK++)+W>O5ud6FEh>QA2Te-$2diyH3;NI5p0r;Bmsq#I1(1Ve_x$$AJbd zeWuY9-W|&>?6mR$DECHdkru73tqFmDnoV@C-ylxLO2V!i-+di)TGTG5>{*$)E(EO@wa(SW{z8xpZp4kozZRNKJ5khlK0M%a=Up!?V^8C#V(F3L6T4arAl z=cW1RhCkc+JEPfQUV3AuQI90JRkH{6iEuXVMK1NH*&%9iQ#Rz|I5a_bvWL9}=<^Ntezx49Y`Ut{5b(#g6Rqz=G%aurWu`rc@IEvt-{yY**VeVU0>_<`SwG> zbYNFqFzI`skFN;kh;YZzLqO{!7C2Qc!5=5!K$;1^iUxTQ@OxZ|kDBzyyHyE4cn+z^RhxG#aFEM+iV0qG3jyik}&v)l$=E$sE{V=ZhcJ1-9haN#;3?PgsZ0U-9R z0}P87oD`6*SPsVX{y63HaV_(!(l|LZ7+b*aeeBemDmv*L{Qmu*-{q?yr+?V;3%D@+ zli%f__wTtw(g2n&9YU4?rhHJgWmz_z9q8dWqcDzg@TzeXKo|!vj?e`ZeXHP{!@#>x z;Hj%o3fQQCVL^%V}D`)afC>WJiEPy99V|QsAEsinBWa?d_i(&L(wV z$1#3-O)H((p}i?0m}g%fiHRA2H^CdHD!aP6stYDT)WS?b^-EulooFow)`ioE4FXBA zIC7n49-I^>!~yN7i3*Ew0o#deAle1( z$5EemGDYEZy~_2Q)poN6AOm)u23_&FwG?4z7R)c;R%C793o(ov(bTAxA-kcWp`O5BJiXNh!QwCC77I)VW9oL z%^?E{KY}|WA|lHE{{7Xo@5LE#`$XLE zn;G(0(=jv!IT7?v*IEYn79LmGwnY9v8r3xURnFa*y!le0vDF3E2gp_}6&=4nxJd7(NOd4j&(%Ck_n; zS<$jvn3rh-0@@Jlbp$9$aC54J^%t`KCr(3OfLX9oVE5?uW{K57)ddT zn=}Jv_*j8)wVjQ4tYPS~yB8(kW#ljii!-0@5WQxh+B|=>};KX^?K| zl5UXhZZ@0!&fBBs@jUPE{p0=T8{hcGx5p4vHut{owdR^@&TC%tT1ff^q>Kev`P~qU zoOSwRMCF7UpH(~uA5yeEU<6xqW+ppQ!1sfy5bm%udI@$B>-nySAdJ(Zd*SBIo3DWS zr^9LlYXI55gE#W@+o+w;OZl#}Qos$D;u@GeUNcR#~ektMwpiFpy zhQ`bk6E{9S&i!Kc`xTeM5A)5TTnNVmwu6Vsy>i|ah%#9ri?{?-i;fWpeS!(lm}!mV z%KxoL0XJVE`biPjb10a_ihxovO;ri08}9F{WE3wKNy4hP8U`dSWp?&?>CT8l`kEiN z!?rQf&oMSO=DLQAD)@@z6L=)az>`g118~G*cxPwFcGyiH^e1{Rs;FxxgW&-N*TsHK z?$)=OnyE8QxzF2g=MPzW#3)~YLL`(gd*a3YEnD0b&I54QtspB5R55hx(7qVejTaA~ zsrGt@74zpfF(1R#AC;<^PhJ;V-?CZki*ciIegq9G9Ql7tL4{~CJPmA|V?|hqd@1a~ zS9Ky+v>kO)L450QqI7s7#Ag(>H}EOAAAo*?>l#9ni+VYJ`VK&YV6~G)T9C`%ArLr6 zBk~0zvE*BCPeMJ2D?bp_j{vs_ZPH9gN}35Pec*@G`5~ALYjT0a8)?UF3FWFA^84A& z2;x59!B_AAc!5maRO8g)hX`Q@Wn@2wy`v?~V;SzSEctZ=>?sJX2mx+_ZWUTiy@1ln zXGG`c|3tHKd*m?BUAQ+dI|srG`|2ny?DGFqnLhfq@y{6OgVIYa`l-}o~q&A{Urm@a_6h>NN` z3o~=W)_`DlibCDfmnoTuJK>B<%~aZH|RyBJub(+;Uc`nz|>t(p>G@b|$pZoxjveEIS%fb>g=h>2m1u>uZQ zy}+Uf+rd6~+ZJ62J7h9Sx5K`g4u8%cv6loLgCgu9nn$mhtR-3$=69*Xk6yAXUYvL% z{SWi`6tWA@!!#Y8L=1R4@*qK@p@J;a;-#8^-2hbYst`*Rof*3;$QC5S31&FEms&maF%3b>qb18vH(w*UE@QGU3 z{b1#Eb%W%cgkd(~z=6l9s+m#+fW&@UiN*bU~`;-2%KRNUAzQ^tZ$!>F3(-p*P zAkW)#jYdK-D0kAS6zCo7uFa2@m&Z(}Cu>vNj%Ye0!A3L(rpUZXWO+{@39g4v#(on4 z(h3uRdNmKp)75pB+lWROq@i1op^?pl#tsrM%TG@Hkqf*UfA~50ffYiUMV^I;o&EdJ zRYGJR1Syw!j*o9jK=@duiv<#ih!jM-De;vzU+|`=OMrV7X}{zMX(n?Dvnt7 z(?bve7$s+LH*9No=h~?F%_}CF1;P6Bb^M*9Jj4v!KSTs~y&9u&io0YBY7W}bvevC779idGE_xc$a5%z%d zdSH5h+=O5jG(;{xAT&j&4C*}qJYR&Ji?!Hzq~tN;zq=qX>MF6bx|7BPs&^@{AA~g( zf#-UGAfZjvAiVkV>^c?100_ih=-3XM-7UzDAoW4?(R%wmimVrGDvz)O`5h{DZJ_BR z9WA1PECWa(ny>$HkLJE0u|gL%dtIO6Worj!ULQ;||nC%02dd&dcV+sli!mT1oarf~N9Aw(k7qnB95ap_1-8|K; zT1vBI2veY96$Hyw`(Ma}1lbYV3&pQq;aJag)TsA>9}#kO0=tg$qU576L^M2~J&Ol} zIlC3h51A@7w;<9FFg#}*!yz`>JMS02xM*xO7h3?-G^HFpX=v-bB;xo8woRU3r3_8s zpE$FnFSqH?BVmVa?qCh1o*^A2^cZ&XPO$ecKrKWqTK`i(0r1rlqGWoACT>H<&C><$b@NKctgXfa?CIgyr#1O zTR>Jm5<@~^sIV|?b=6$Ms)YxPWEYKI_?()~3F#@Kp9Kn3j+OauPHHz^HEa6xa}rx4 zID$Ry!CtZjsK`82rh6gD-iT~5ge?YKhaY4-g@5kxtmom$Lf{TlOa{L~f&`CiqeJ>!71d?YRfWfrTzl08Ad>^h;(DJ()Tk49S96+iAbwi{% zDn-3;NVEvTc8F=4b^rMskkNf@sg;2H;UP>FlwgE>M`;|82D$bS{J~uc!MR`rSHg2}V`gctHo5)Aj-B-mEOR9gD{Iz$d* zuGD^Jp;V3(mcrCiYfFjHWshPq;k%sl#4YE2`kNtA(HuR9J7n(HfTCWB` zkHzC0y%fX=gKithE9+DHNrES_x=z4YclJIc6gd?EJ&cV zgVqJC2nsGe&z~~5l9ulr?t4)nOYk1}lpcuqHmmU1W?e`%VjBn0=VBfZ^30b0H-frx|9ZPIK-A~7ozI!1W8h4$FrYLNkS5DKu~`&Q$Z3G@pDM5 zg`_AepF!^h;VA#h7z=Jj`5&#JU)4Kt^DXYBVEM&AY%vJvNR|u%z{7chQR&vyLa!dv|Uw%jnjJed!;%r0-ZvV z$CBD@Xc~9}d$dP4VZC|Yy5PI*7E^{kr`meh+;7^(nzN01)2ze%5{PWVDlaN zI!;FNTz@CTZ&lTMM<5Od+0^$)p##$4CXgM5 zxF=k)aCdk2IS6np&&wD2V{iwc>Ra+!QI|=(DtDkMgPqYYDS_o4nMBmrX8+L-nuKjS!UTsp8$6YK0iE0Ip+2WYf!V3 zyJi5+_4n4D(d0Ch00Z;E!=qDpXXvS^og(0MNDKkiGp!stIWZB3w9JhC&C7%0w~f&3W}=`FIR`Wf$b4VNAdu3hcKcg z^Ti`1^GlSbMAkDT39AJG>IT-}?|{OfqMD`S0zejDwb3lZ^OU9qw-5*jK`g<`3@t1K zhlZxWCoI4=+=CRqfV|L@$*lq2{slOyIe23aY+k7VjP333Q+WRoOW=IaS_2xx(OcVc z(jPb;Oq?2v57}-d3?bgOIBr2laEty9S%Zl!?wwDJKIR|uUp-96379C+O)s_*)$)nR zp&_q+RC9^@%9B3<=~v^*1w8N1B+Hc7e3-jZR7Gjx30VN4E9BZCtOu!6?JrC zgM3Z}{4KN*J3UA$hfX@)yFBL?Bo>HBKa&?kWezeL}zS`!38Z#t|RN zK2iQL={R!P5_C`K#K(wrF2r1rb{Hj$E)H84wD&92&_1^GUuC1!`sKRea|RH#4?`e_#-dHxjCn( zbA*;zz54!fiPTu2gE|!}AlD^#iOOOuae6m-c2ynabA|GO#me=~^Ar-_=auB2%s5a< ze(rwwBr_gq#Nf6Zje3*#`i%LAU6cYf6A>q2s!|o(g@*{3(*2M2zo1#!kEa1J~IP za#G{qBcssxLYD@s`Fo#+@V8aMTY_+<74q{Fe49v8+qNt(neDhwsVx! zV1xhvDSjI1|0|BE_;tuctT8l;gn%qB6sV~qMNyi7tOKyS{YKi)XAtLWzh1b>*Z#8R z{H9@=IwbCSr(`_?bcn6JhT5LuJYphG(zG|m`_lodLm0k|QNI)8s4h{|! zk(~cp-7`I4!9n!|^xT!9x&WcRbLqxas=n*;fD=-TvXH1U_vP0 zLLDa+_RY`gh1pjXlPlN;ad2MB!Ko@pj*38}5T0A_ZyNL)Pp<#Hf+UWm13O%fy~eKe zZMnhgruzOIeI<{*whAHNC!s{}vn0oib|jp-GHbr<{|&O<3q zJPj$c%S4188VHiVb9dG&KQI1*)DoVR&^$Er5z9d~K2F7n)r}4R8Xb;bNv=7eMd>l1 zJSlW7TsrRgs_8DM-v6o>zB-DG3Oszvue476uKEg~DODG?>~W9K{@|2PkC@3Q@o{XX zKeoTQ0m7e}vz={{j~5)!4!=sVq`~>9Bo3^DH@VZkU5u}M@MrS5%Kx+pJ3~Rvm1JcuI3?RcPLqfHNB!^rWL-^o$=X}btY5X z?rIQFgPHr!tAakGtM$ny^wupv};ejPh1`<&roZ+f?!5&fdzmFCWCk?vQFrjIXK#d;uVo6TgBu1 zd9iZyzpL_}cFljzx=O&pr36dR6Q6w6i^WUG6e;WlTTU6zb8eL{<4imdz!!Bmmukk zjk@!G9Vp+l$IiSn5O#25<0-hvq1w4t|Tw%G{lD})aO5EIfR1SgpQjIaPoioC(E zn-rX5?(W;gMf3ghCEsJ1&+PAU`7z$OGO9m2z3s9w-s~*V7+e7nhd4UiD3k{u(p0I8vIROR-%UK^i#h>hEzOEZ_A8M!$HV zBz%Ut`Sj(4+JCm+k94{}EpQ&mDw*DaI5|>r{GDR568*;jq+0*AE#v$cXUaJe2leBt z%*-kE`4HqHRIlmkKjJFZlT9+EgXvN8C#-wL(9O~~Hx1+&f*+tkpCJ`m2P)*QeSnoV z59K=EV0_kc`i^ZQnHh)8c~h(5^m*&chJveF6x~?r?kzsshhZGLOh4^O!S(M?__TWg zz2$8dt7KMh$^7z58P+g)J`AlZUlO~3-(_jlFa36=ymz2ZqB3WA%T?9dUIi`LDR87h z_YunypcL=C(9HsheoBdwmg=$_+o##s(ADXB?f)=#^&ZO zz(x?Y^gE^O2N;Ys2mvmH8KraNpJkmP_oNQV6En-RHG0e$cz{81PiG12H(W`nRvX~K z9O3YN*#FTtDrR0~%6B#&T$g%`Z25(qiRkpC*OCdQR@=lsSDeqC1Sj$^E!>O(6yRfM67&bTV zi<W)Cf=cfP~oSn1L!U( z?UP%P{`}pazb`snAHAJWnqJ};_y7l|CJ`8Nz{lZ+-WDKke38qCN>isJ(}kKf|7=tDzd?G)-8+m#K&-qx}%8RigSdT_a=*s#7N zdeSafYDr5}OxD&YeekW-c46rqE#aLpS&96RI>`Y(u}g2?T0BJT0U-ksg<4uhq&p*(FQEJdlP`c%<*8nKwG2{~X)AE#PAy zkPshQ-8IiJv++DZo6G-22ds=l;}B^^f8330&3h#a!|}=lvK{kxPS|O2<%v3f9m6Es zDqP!gw)3^IcO3Ry$vc$kSgaeEDRJh@lZW45a33pP8cK>f>1>+8LNmTiRT@S4DjUKo z5U70&qzvBt5`cN_p@9YJhF5-52$@Ji9maVaq|EXNiW`~TWpJY zAKy5g!%+`obupr@+3Rh*m-DQF$YiMCp<2G_^)3@QE@B{`tyk9hg8aI^)#B^0!MM$e ztwcS&>}jDF#YG~5V&U83Jn5-16usRaQTwb;Yy0PiI}P6OhH`Sv8AU5>e8=S(>y(xV za&q*dIBXOul3WO;n{&4)z;M{R&PH9%nD@9O7U+DhRb@q*UCGi zx0A3sZ&#&`?N*SLskZkE%x2HqGX{KjGgt=`;`CE*XukctJ+oo@#b%nSp{!1&Eye3; z$wHxGOV?qG6&Q6|_fF>-~g<7dvOX)#z6V{D>U;2luxs7Gs$Y^RP@E9ri zvm^yj@re%)dMcN=X>E^3<0;Ib%2Rij#NsKodlU6|=DMPew@{Br4C_yCMNumq%x+U` zH^}tPT+La(LvjAHc*FiGml>M6eO-33{HX2x5c5p=gI&Gcsv_^6;?cz}LV?xWRGFGxWZv!ZOs zW9}DZCYoUA-6oc|h>-~4QjHwk?1>egjblWa<}J@NWA!B^tb1>T^HsJfw89|gB_M$UAWTg|tu>HdfIe(D!Xp8=I23Y% zpWe}BkWVdg5Wg#4mMFk+U1EaAudOb{A^dqwI2YTA(scR4^iq}1y_La30qnF!YWjgN z#tj?Ae>hxztNP@$3b*#xc*Rs{xKQfvhSq2BmK`_;Ye%Ee9B~6IBQrpVwahV;$(@Nxn*%Wo8m%vJN@}xv_e|SR zgVZK!-0GJldJ{<*p08=u4j&qbciw;APMD(FMm~_`;6}v9c-6ux)_HWJxW{=lnc~7G z>CjQNGz(cqrt!Cz_J!LI9|_u|b@!1R`q>zE)Gyn=thelT(h0R$%-$h}wp$}HK0mr8 z$GwZ!cg`R8i^p)Z+ZQ+9O&gwiN&UXus16sSq|%YnR%cN0c9_S&j8S@T*>w~t#erN=+06n(7Y~KWt88rUZxWtD1Q+u%wuN5Bol4wH#w?>DK5Y`vS{6rc5D7V zaH-sA^juwGfvDS(Key@oxo6ugJ=J$e=G@~GX@d$&>)<|i^1}M;%Ne8}n-0~rw5CMz z7&j0q83`*lIXBwj+boX6=*n{Y$j`6?4vEbWKg(3-cMnJ6l?88~#+D@;6# z*ZZh0Yt%berKohu(fhrGBZtsTCQf5~Ma4}}?Neryi2&xia6EFro?8KMOW@!TYzH(x zD?*FEUi#nA=FQVoU8Es8$tU4a^>5)|Bkn@nD-IT}&bI`2 zKa}5hTbakx?|Z7{S0)`C|MkP7T(=3Xcp&9$l-tTfjh%sMtYMX&xDw6xh`!#gtSIK^ zHrE~8= zkiW5}^0uz_b9p&kTBkA_o3@@_0Ye!a0x~r6#n6P&P(lkOjt<;ZNZfA=i*XwgHBzzd zz{A2N2BHgidfC!UqO}sUG$@k8=4j=wkB?FivWTCS=rdgups&Nk*1JSMe~#y{KhjVx zi9%ZmoKMxI)*~3e*0@RdDehz)Ib(~G9q{hXCKXJ1z4t7AIUXfyfMp{kBJ$cWq;Iyr zBS^1ASs>MPICU5{ptW;I!_vuBY06{kCeqP|T@Fr1$wxc0D@teBEbJ*Obheua?Kz^*);_ywh05r9+36HDo+%IJ`N^=y^%l*KiI;lcJMSxI z;SSj_8Qd!(S(kM@hZDf7iwP5hwwJMJV$SEUzdB7_?xQup=x+ov&%Jh|{>XgwnM$F{ zvH`1jc)(_AoHFBs%YN}MYn%hIVS24~1ru{K);@K(zLO4au0q-bGGF#ac)G>248HS8 zTwX@a2~uffJu;$8scy0_EzS40%2~m?-p9aP5FJYq9&UrLI!cQY7enpe6KiSFnM6{* zioT50llcQHX=uwlna9nTswUKmt9?O|6YrI`sx23mcOMVfe81DU^|U)bG%PPDUMlj2 zf$5FABJs=JmeIFz1TW=`c2RvExo){*#EYLVIx?CR5ZcC7Z|7vGS**1^c~DN`+ROIx z6}72f4zvC!pZYQC)N2OiBy|+1t?h@|A{eH}@(RNH50>X-N=rqp-$pR{FK%OJzZvbl zkrKz}*2%MptO*%R|5uOjy-PWD|h=oGLv7l^s9B&xT zD?wTU47)PhvL1b(aPZgqAClhDq_MQ^P%|6s_*aV8XzM`!$VS>e|CKeoz6^9KLBta5pWI1l}$#~0J66|CwU*9FVym}j-E`84W2qOFPZx?gD)ruRLN4r5mL zejM+BWf3cGRD0vJOI>7?4R^d*AYD_&b1rcFnp*qz>|@&?4VuhmV&oqsq7zH2g5+A?VPgT$DpeP+LU2 zeyyk6ijDReBX!whqI_OlrDccL!^riQEEo7*n!loWaAW;2$ue5K`>RtZ3C+hBVz&2Q zK048nXmukGl{79IohJVD{?*Qvz4mf^Y0GTbHAr0NxiGT1vpl%Hr8IJqQ zX#K;;FYlCS=DaxKc%qC$@|+81MbyDFhcfv**>1_r5xvrHZd)7aS}Dj+T?=eO><;x-FuY-O zVoxB0xF36GW5_9PeBPmmM5l~er;Ox^_)bWAMyM$62`5K0z|wi=xy#cc2uvcmDeBft|~P z8~bw)q#s2K>P${*#km@)2e-9c$Ln}9d&Wd%+C>^2mErlX=_z0t}`&T35JZq_}N;j^clN*1);#1`-<#QD3N(_Sa1msc{g z?sC_<3hrm8ID13Ud0KkR5^t-9GZ>u+8m+>va;9N$&FR_zcC*LUa)A-WDW!ectNZi9 z&3olH)I3BUr@xEyyUHhj%&b%G#&(926EJpgq16eN_&)5>9hP<$nN9l_3s&hKEyd{UZIR+A+6t;1&4{Y-U5SReP@9@O$FYT-oWm4p<9?^( zyX>U>3b#W&2Y1-8QUhUR8(yA!#0o{4E$hq=gcB%c_~e~U8{4G!3cX?ufQuUn2zoRq zSVV`L7QKh+dlZ;w6dqg2NBt-}SLmbcDsv}3ndzud>o|c0vggkiXZF5>2-nbFyGjA7 zwQld_nv`jBnVYq&iB4z6_z}hvtw%S4zqbr1MLZr*+D9KMG)MD0oc3TmQX(_^ks}|w zhi;)U<6wL1xRrm<9w7St>qp1=%9~}Qp6gZ$$i11kZ*UC=na(D)Jbl&F_N1BbFxu@Q z`IF7}vW~GkD=2h6ik75_x|uWLa5G7i@YHd8EE@}pRFmjJpYXx%Yx%z<7P9XbNxel; zu?&;_m3eI^Y5Buu;g?Qf?v*pdLt_c1(6;#Qxz^*#KrItKa*9Ra5if#=VZ&-MFW>E+ z|2B2DPs{T(CS>Eu{k><7B}(L_0}(bT)M*b?LHM!C$b)!OYVWo4_S@9cT3Z*tmY78B zE)t@CvQ8v7I&Uj*3mU4#7U7V#r_uToXok1$&;*6Tw z8wwnmL*JyMd4{rSP_GAP9Q~Kf-cGo#WSaDfI1`Ihyj~Tt!L`XrTkhDLQ;2jA-Bb*! z+T2~?S}A7M$k5w6c-Eer+I@xdq@t2i7HBrZdAC4?pMFY%CQD08i?AKBs}%>{_1`bo zM+6JkOrJQh*T$y?ulrvr?%xSNM#aBMiG0EaE}uS#IPol9ZO}wbD&l{>{;aF(7$G@+ zt!j8(@>%wyyV2MU)X`PodE`K!1vJl80vS_mal=FF;0=(=Txi9{Cv-G{sOriiB-np- z{;}q%yK2dg!^w#8Uy1i6;=FsQYpeG_u8hToG^$(V&0x^P@mN9S)%vJsrt5-NUM#m@ zE(w==5&WfFIO*3+S%}FX=1ru(>riJ(Ss?B|fhpb9^d<=G$q-i<7yh<58{ZVd7bInF zz7)4j@zwy@<&btWRu52T6#e6OyczrRa zP4E7ihx@PBS+`-IwPDQ7vEhS7#?c@1RX}StImz$4(LG$?IH=?*{SJk_^y-AjlV-ur zcP}ydwYl$hMiR;o&t{YRef`*}NpSaBE}5hWbsTTSbEtDZVp+Sk4okKH`ju;f%MYdq z(Otd0#OLa&m#=Nm8Yj~0^30K);E=C3$eBt6#8217W$jX#+)*}!qXr*OqpWR02iXkWR9 zO7n8p$jCS1a!v+sB@S;4b_*q_Ly2O@c1dU}(d{*}3T*=yE=NcjJx^J2W zefP@Y@UwUy1uNr)otK;^r3%ARlgO;vE}YBmWSdlm1G+TW4bUZ$QQz1n1U0@qmb)c+;mbyEwxhkF9&7}W;9QfYk=MTx3ndz zT{gGt&Kfz4lMUvZ;C}jX5HL7DLfJ)HfTawd6^~pJ)%p>2V&{po5g&#ll{ewF9jTEl=zn&+4puDs^5)x>9AWFTGsJa$ z=*We_VeZV-S10l{@NdovZtOL3&24eoOrBWY8&Y{W!pT9;Q{$Oosl{A@H*{ENmW_Fp z74_{@>8eKAclHbP?TmY0E3TE>>wY(*`n{y!`y{u4gyV%*X`}kXhq*~Jn?f5G(^21y zS=^3%O~r}O_rPVZ(R?yvH1`S>J(w1HFB?ir0KFfLC#sWC+VcHK!2qkhR$I z9FCatOTN+J$N|gqw445vyXz;PrmBB9Y(&YKTz;F3d*w=8a{OY zrx~eCb=#-%Ug*Rew5u3|>3Su(l`5(a-c-4gS*4T1v39IN%bQUA_%VT%?M@5H@uPMX z9_X->w_jOjdl4L474fZJ`$09+ac;mcA|9Xf;!?cx@)cFTQpU$7eGKs^x2+HN2TB6@ z&)&IjQ6bsSgHMNRE9pFwRZXa)Ak3|azLMwT`lKv))lj9;v%%6rZl{Extt;S*BIE5` z`>V-L0)*<%va|(*15B>gQJTh@H9S19bv`Mpc16ml;+d-+|bceAHoNtBG$2H2gP zAwcaOqruFIc{RNKUcLWV#T@JWWSovc+C8a^Mu3PQx$q_PVASDBQu>Y~I&6;qb6R}? zrw%gN=V;oFjLg&z)`h&BvfUksH?zd%{77~PjntAUwX=UcnB@HWdh-M$E;R%7w%^r4 z)^p2;qk@l(v$ZsL_Z7=lS(-Pfe~h7)BvLS7Eb7KF2{O8c8E**bN^ zFCKf+y5n3;Fr7$HbF$)n>5$fq=&kqX58h+>-xC~LDRp|+2M57VYVKBDQmZJyrj+S<&f&m8@fZM7v->xlyAW_fgL<7`KfY*~Sz=zg(D3yL4ui$k=cL^R{9|X?D2$86GmMvT`Umaz%>?JJsXY}u zznNLKcxGAlOxI-kIA@;D{IiBns;V^Vym)I{Z zM+-vRbOQ(wJcfe-+?{}OQ4d|jxayzuy&3|$hE;LCJ;&-DJAAenJbY)+$T`f6xklUN z5#8-c^LO=G@A)OJrflpRNS-7;5@CBWe^~xQwccfWevYM;%7^mem_*YXo|X4-dgSMA zUv#d0a+5E{m*nOc6{M-+O^djZ5v>x;`(JC+^`hTR^$~iPT*9(%agsPXaG`niM-Ci!=U1|g znw*-8UlNh%tl4>!vabw=_iy>1h%kIah53R)o&1W?gA&(JOY5ak?aCVe%m{vx(Vm@4 zYbKc1?lnHjv-@h!dd^3z%sdHpaW@JMbJl2Z$!oQ$h6QccBPm&R{7!g->CyV`Sg705 zgljp5q_*+2e&qH|RVi<7YVjy+79rGmkBR8>b_yp=x}-(Ez_(^t)>-J&G8$EP&!6Sv z7e>O67i--t8l~5-uQEP$ep|ARS6?&Wc6|l@sl+Y%rGp?=w-d- zD2?yE>FbB6ryZlyn%Lcg;PvkA_bZZL6-u0rGYAwkZ(RCnz0^vf{$hU0>a!(VO!<~% z8{3n$>EVM4D{}9Eug99`e_pv^I9T^&3V!kdZXXFz2Ck%K&s$k(vifaZ`LcDZhh^(- zRmm%r57zm(g~=L1=LyrWuO*+Yp}y^tBRsI+luycm?WkRFdCpZFjbDNC{W41FQ&v95>F-E)GPG?mdcEi5y(DZO0O{WM5SF7AG>5CW zBKz%BPl=kCdaCz7U~8V@jv@{lry$vsaj%YVJy1Un>P)a$t*|US!!_yHP zue?XhrgLkf$(TC*$CX#tR&)mE6V#p7=olIcQ#r6a!G!xjas7?0L1>zyEd z|3@s~?jCWy>uo#vU12hD)(^9!goFatZ{H)|f4+QU;0YOf_e0O8o|Nr zwH_Gl@#|01qqcwE+@4TABhaYg%=-J!Q(;l9GwehHYLm06i*$^E83$hiG90HU70^sr z7D!9key*mx@(B#{MYM&@)>rDcPjJx>=UkDRGfPM^>6uYoU2K~pxRql^PD;i|>U(*M54FK}(EA?C$pG{GAPPEHD@PAnPK}8Cp7sYP z^K9WxBEl-a+MadvzGdg)pf}WN73~x>c8 zf=pjZBF=GYCid4K&YpGbZk_1*YXZl=u1o(Oko!Rcg4qgxI|r+Why8(~ecRrIr`hcj z4sD{I@)8ixxz|5q%5%`brR?S?UZ=y|Z z6v{V2y^nXXQT?#G`LJf`*cmw@a#J9;P0qGsCm}$4%i-bth5}6XAD;xk%JFsVs+*|! z<}8ZSV*KvY^xqZ|anf+5{?=Y0v*CTWyW9M`iK_ zxu~-c@1R}DMyp^69A0oRMu7UkhT8M6;Di;OGop34PU?*v3dtQhds@!pXLZ*kFlS9i%RR5l(!WK?Qw=?sI>@^q zk{garUqnY|n8fhVS1^fm^$Yd*se7-Xj!jtNdoFa1VwtTh+dK#RqqCf=C*a@>^_OEV zwa*9@pwo5&WM4}_Dk4J2&d;9_Epr0;;5xDUTG#~?CIj0xkHt=#9lFBQR7-`hYk&7W z_`TFP54uKTuv_Jay~w-uu_&fN!TU@VvjTehr$@wtPaVJN7&2@ie43v8sEy_Ele)(- zbUnv#+%A(PNB@-!2K|1lZCv<50-In~>*Wb`Csw&vzQ4!d_jk{S)uML?PDKAW`UJaj zj@;^gPvB0L!;UF3fv3`RM?A?Dzn&et0Ztq`-%R$&-~YJs%eMdd<~zoIaOA#%db4$f z+Tg(^O{4l@{QO@Y?A8L!ZXRx}fM+9e7^0MKw)Q2o3Tcs4d)KVvJmDXULn)t1U zUazYC*gdY`MgARo+fK*Taq{BaO+i%ZQ z>t_MIt~U&%s6^cQURDeBxO&c2=+o%gI%&b7TtO1kPHfiA*?5n>76!T~$ori$jki2h zJwr3l;^eC7rI`|4xr7eRno4nrXuCGVHDMDa8Cl`mEUC*Gsa;zC=<8(+Df8(HV}EN( z)_)St|7}pu4xGXzYcQAp-_7A{o-g^8f1K#=TfSRM_kY+`i2k=x{c-_z)Bk=wX0CYf z&&7WKCHsH)eD`Mw5XSuXt3d*G%Kv`(1^(Y`fBz5Ns3D?2BSS%~YYd`NOwjWl!$JNE z^yhxngnLyYt4?56#CC4uwz5bo!dPrio)TpqNR`#mB*zn_#9CX0;m^eV5#g6*8F zeksiHpLpWuAm~${Ad7rm>qS`yQZFh$FZ&S<#E#E@{cGraxgw5ww%$B;sqFsUyU#L_ z_c!=cm?-`EX_Hck|2N~^%1)687n8x&K>c}@yRGVH*l$#OQu4Kiz~pl-zQFuz3I(kG zZ|7gY1eVCRjW*FX7*Ef~|C)PF38tUcB;x_%Q;|JA%7<;(J<;5+U)SLfLrTVfeX{Sv zn?Fs5bNmougz+78XUBoFDC%H>rnBQ;PcXTu@N+eI7DQtf^$&Vc7!}n46s8xomJ^L( z`A=N&^Os~qe=di5ci>);`abv;a`q-2kv3>fg2VswCE)e?|v z-PULN9i;r$1B++ z+u*vuzOW(l^orD;kd=sm)CEG{vsp|rH}Q;u0v^5y9u2v6xZ*G+GTtK31|l2cUg))wH-x2QJ#n?myE;lpVEP`n;I3ys=2X`7 z=g9+}-OhEL7p5D8N3O`4C2(DrjTJO1ST8~hBdgE2k74jfMs14g?M9KJJf{~Y>@Tg3 z%XA4lJQUVo^dx>Z|9GX=G=D-9b>nT8&fV?2Y&~&udivIpi8}aS7Ume^1`5?}{LM!c z()O#Gl%r8$k+nj^4Vaf{!JUOF`^1A7sjv`GD^7Q=M~J-fquDo=Zt*%h{(lv4wzkXHqJVfs!^1pHiolo4 zx=(D0g^_;4&|rj|j!2xK|HHj2$8C8Pu-_;R<~WbS=bEj{*;siPeDc?=H6G%vP;M9^ z`cGi|)9mZNE4(6HMJis{9qb_uNOk`B_Fkxn+V>%76IR?}^I2@p_X$6hCb~vW+}YO{ zcKTiQacXmI4!x+BD-LrvPRYqk38iw7@>hBI@J@`9jk4y+|dk|({5;yN@0u-owJ_w4_vlM7~%|3RMHvM$1)SoVj&^LzfSIVy+j#Pc0u#ZeU zP*5ON0Tn3=KaR2Aq3mRiEB2gVRLhUBxvKI`f_q!XqtdGMBEB=p`B-V=Wj!`k7j)cZ z29UT)H$sLJOs6dIwOkM9LZw5Rhb!jXeALcZs9|$k=bFuDw)9H9~bN0d^@t(Qcm+Y zU18UHT+!WIPdh%*XLJ`vwDeX!WG1=1>j{G`T8kzNEcN7uKBlB;0r=R$wE!NSlz=fx2ly7?mVZo>)`m%zLmamU|fM zFSlO&<(!0QvHM_VSbIOB815urs5T70ZFPZ<_Aar- zE=jn5#|b4C>}~p1D}@=EsifNFnBkBiHh%}#$*t?%H`{y}b>3~~W^WnL2->*K3Q&oY zo91OKE4E&*k?Lc0Q+E)8{eW*c&m` zPXCLpw~A^r+P-+96e&eZajgKw-6gbGi@SSpcMDc%acPTtad!yrR=l`ta1Rh%?$>k9 z|Bi7V?h8+35Rz~2z4lsj{^k%#v><`d8dzTc9r-g6_*tEx_M;m5bf%nKT7O$4+N3tN^cEww{0hc8ZiM2Yq$BgxVu>AFk*53+AokSL4nKXlNy*K|)%_}gJh7ch^ z4eG2#8-hZiwLYW}9HQpJYNTa@4DNfR;kZN6{!i1T6LIKroGB6u89nH8ZCYR87m}X& zNa(G$bKtbIi)L4Cu4&bP2b%`FhYso!+zP4|!zgrO?H$O?R*;3Wuq(~;AF7gx#-JC zjyrzLqo$`OF`o|4z)(fOHTg;N=vEge&z98$etr2NXy#Zk6^hm(2TBAa^}XsFXdgs2lo!(3=xvdEQy|7pOX5$UdSb6-MF7( z-7)}lEG>VmKDJ-%Rm)Jh%)XCqoRaZ-EA!#6Oi=&#^Clh?VW{-=5=UHc-eccT&hQ>k zjJTZ$&>~3CL#Y&|V)D%5^z=Re8G70=L@u!8S@k=pHh zDrm)hp74EZ;uppQV0T;5Jt9`o=etHR%gRhMv0jSy!n_I+ljZM5hf3f;P&?3SsNe<0 zHap?av`)!)y0|&6JnBZ722w|Y{xXjE@FrjbIZroxt*7*vBh{GmiTUt!XL?!U$0xHhGc>)GZPfLD~900r=# z1^|4dmYah(Oro%;XdMX3nwO&=q7Ol5VDx*>lMgw$f2cBN?EAG;Xy6p54`v;GbkOB6sxciACC;?P;-l3`vB`=GBlRc)#9GwZ&^l~OVNZ>6D$$n~)z3+P|< zV>z(1{>PJkXjWYn>6^yv8a6roA5G`X-16oK$*cNTKJ>maPn2Qzo8)n+BEIKyHn(Jy z&kSY`(H*PKbXluGZ3l)IBN}8xO$<(#N3)*3SL0xVfnDvBV8-9Rg~*>6*sICg6iqIcxbG2Ki};70Ju;gcZ@+nbe8IC=1C)6Zr>k4$H&Me3K zmwsg9kW*IX9QWU5r0_|{<@l+?;noY=|JKLmO}8uu7;R>mq!(9M&6f1ovqZ{O)gmX} zD5pEfazck)2KIU;DvJ5j<50%CI^hZBk%EdZSRpAf66pi=oxI&`JaNklDGvrsAo@?9 zOlJby0mtTy#Vt2SlRrg0u=3Um+s9dF^mcmcYU9_$6c~Y>R6|{<*625~07I9-AZtZu zVn6G#j%H_65cU*xw;RJnEj?9kJ$;PZ38Ia|__L&7F+;M!$L{E_r35`Iwt2THC20N% z$bmLPh4w?>M}jvx4sE}})kWWW9j9iw3l1?{c%stQZ)>u>d-Pt?oZc8a=cn4U8Thg4 zPjRXOZ`9FQrTFuL&FgSxQ(qxRy%vH^NZgF(r|35)-5r>%ihIc&dR&@iqHZ2j`+zDJ zV=EH5(GHVgoN*)N;*|868a{5%_?BVN6FED`~oD_#Zgl6?UDF0CnIfVYv#EMEv0DR~18c@J} z<$!!9T=FEhr-6D=mI?Uvc-UV|pfsFld1H#(^{(1G8^E%RU4Hz@ECnt!mGV%!mCD6I z@;+^(6}T!_bJL0OY}nPLABMEYlnQ5SsCc#=+U}%5c31_)W`f@9KN|RpK|d^}gp4-RZUErN4JEyTw1tu5y2w z>A9T;{;JQKxZIb=Wg%I^$GDh_(V6y#vRa@MT2jayNiSf3s)W_U3;mlx^dau;e!*q< zTSg7&gxBwid*&*P&h)awhgPTHZyth0nXv3zxWXg&Q}%Fwf9Ro2f#1iUR>>Ag(6s#2 zGO-`QTW~>Mnt^&Z5$xKNh|7dB1e=JfsDbSKbi@QzxwdG^hecB+*tIF8+$y@hkPghv z){k&&Ug!Ot%iHjiQO_}o6|vCI)zOHVD{DPs%?ko81mvDcH1;xJ{~>OWdhsML`@O*x zL}lO6)ldkViugD_{oQE*qpEFtVt9nUr$!xt@KU{@Ep=%73&Zx`p}&M4k`EFb^zZ+~ zt=L7?aFovv;+U% zmZPHZzevXz-@n1)5#o$4zkv0W;yxC}q|76{xYhE``PBaYwUZkuKQAY&mCmVUyZS5m z0~(pmvovq)EPTmi)?7nm7I#^(5&>Ft<4k>GlNFQI2)fT$Cqk&YY-RhEOXuiKDUP_o zGteishTu@oYMgE|Z*Tle?QO2*kk3FK_9*M1+aIb-*8bN_!hoAEMcagJK7Ji{|HD^J30D2Q_8x7r{}gv^DL#`ULUiJ zt=m^~KZ2pa@Jx^M+HH^vqh}bIjVI=^#tnH0`*qD|E60w%m8kdxMaY`&!qKsaol zmOBe9;hR3bwvRS8OaLq~4+@>(ANZT1G`ayi$iyoJ)%QO{mImLEgN=%^tw-1VN3MkZ zz#B*T&lqQKUL2p$RWgYgY4WZA&fE@v8C8RHdO$sM-sN8b7N_~lPeGR^ICGcbUEWd` zkD(!>Sv%eeO42e9IDYtekK0s*tbe6R@(i`<`tj`AXDh~?;5d4W0OR?K_U&#+tcaOT z07jXiYd^VifyTtW54&%Mh@BcSE!8KDyyt^@%EX!GlV=h}8Ui*12gGyg(tB}?EzQrK zyNh(6eL_MX;-fuB@gye7Efu#PpsCzI-MSd%@#1xfK=P?I_hKqLSU0Pc>JV&CKIZ#% z_g>7=A1`w@Xqj6JCZcm7Y|Bts0IOqP9^mxPNrmS1BDpSS8iJ*rmSJkzuqdMG2Ob@g z_p38?2FlWlRNdZ7``;|->#ijut8x~jUkCi&%WK?5gYvsZlry!_*m-EMXB)PE246(1@@C71ht2cbs$-bOg%s_?v zlNXnd?9!vnt;#5i@1cQFZiiFjr$L6e3(aTtAJ=WaW40!dvTOZT z=L+#0(yVsTZ0*T1Pg2?NuqKbnHgZzQPcMZIP5ppexUpD8(E-4_+X;uk!=3N1omMHxVpC=>x`erElob{XzT}Krvp#JC8>1HpaU? zIPQD7ZQ2|(SkRz#qZ@83CStp_54gZheT;f=f_c#-e=47wLJG3K&x$WVT zlvo5S)VYwu5MFx_1NJYiz^fonR5ZOdoh(D)2<#HV6C&E}diC2mXa)5b!QGCJRks+A5punRZx z?8t)5?^k%jGMw+K{3#K1;-87sxM2{MrK?4E&3g}N$B@LP8@eq{t%ABVgWtwpoJQTj zHdZe8b22lrb^=fyrJ;IfRDO&3^7WPktmOx3qD$xf88de z;QAd7t+6$Z@LH>A{dF59`mA+}yrVrYd0%^br+=+D92tGjlpjA^fI|(E;`c0Hx{V4t z>F965QqWW+yD}6K5r)|ASH1LpWwKna!!?sx6wiHv@#@&uouu9Go8kGozH&yZ z@EIr2dd4faDaOAY`Oo~znU|aZsB|7ro<_&~F1dgctP0uRzy~gg^eGDui1k|I%Dlk> zAhxiFtGg4K`=|TNr*0tgXVe08r%mO1p$vv1)h{`}T)myC50%Pc)~O8zA`(aVU-;h) z8EQdoNrAY8?aZf}Oe;IPUUROeae=t-MCJmLUG1k(tKZ_gMdpIY(orE3osR*a6$RMC z#}1hrFeUdW!)Jf436+4m&i!F@=(khUq3fbqajU)(8GIcw-B7x|tkuefxl@hw?}6dw zf>a-(&@p{%^jEuQQ6OF%n2tOy7Nj~Ebo`F`fM<3B8AJG!FAdzt!sgv^QLvjpebQFy zf;Wm8nTyCb3LN_TTKh=k-eI?b8B+2Z5H;?vJ)doX*x-4p%eGhkkuC@d5AU0(`ZY)O z4NmNtP&^X`yA{SX0+nC=x(wy>p&o6tD|2uOswhTSMA4Hnp$3hozg+^l0v%4h<4KMP zdtOSWx5(?v)L(onGH^s{hmu2oc<4AGptIJt1<0cyNl4ZIZJNAnUVB=?z4?HBmjMfZbXZCG! z13y(2rNC-CL!#hUsP&I=*b%bwE5mh0sTyCF^MP&+uIlhJkUWIlvD#-bf}73Nyv5DF zs@ZC3e`GhWY-ikU9?~+@CAd1o)(Qkm=u{yQ*&vUGOZhDBJCf0(d3S|zh1OA=$dYRv zI~%1VX_d}lE_2(TEAx%;6XTtZ&yO_`vMr0P@leZ0Ec4S_ynZj|xaRjar2Z|}ey&ab zOS;(sKdoi%qqE7Dx$m&0>nlY4L(px8@hctw)Gxou+3PS*GH57EL>dYRjcVwiM|$Kq zi6Qx!=zA+_Bec| zYlXtreAyl5VXY52^0!vvysqE&f-Vr$09!Vt+6^MDX>ZE!x`Q29D!B;3qrBaNpwtT{hf ztV1^b%R7UMFhVcXr2-d~heO%gW&JQ0nfsQ(=S(=W&Moc3%#Z-B)EMF8Ca%b$rq>9k z6HR$|uaZTzKjX=Y*jk;0k5$Cw_65oz-hzyn25)V}lT|J*S9 z3=inwW3lvO=kQli&!93XSIg(>tonud)t?tL|U|1=P)jbd1dj$J% zOp~KL_uyVnhV_{1uq|=Bdln6UC}j2V!h#4j{gd2oD=oC{#8F16#q(Dp5-Oqst?Uy( zWWN`6;~!fvHriWHC?zU}TeGh<)smjSy&_=uB|FG+(;)atIpX>2>P)Lr!Aa#N0CyF0 zQpeIj5vMH_gy7@r39^RrZa`C4$C`=w)*&O(z5@b%OZdDC`^tV&_k6Mc<|Kc#t<}&; z{^ia20Gro#(?NMoRn|x$+vPFjRjzVlfu@}GdCI;vc9&=8Hc4+H5DbJUO zN}6|x^$!&bq$&-L`9Hcm-ef)jFYpQm21aalkH%b$zf#T3ya={Dg>cIbke8O+!P|`ByVM zcQ2pm$~T`X?l{=IL3=$rW#(VW)kZRA2S=Q?3U}0pDk|GTn2wOR9I~CAy)(*6U-606 zC(?6KX%HGI3SW5{H^*46IFx=pn6Y*EtL><77={|!x|Fg)@T)%;XDh>eG$FDwzZcJhsy$XmU2zRro}Z343MKPjH+jmKYjDmw4kNMO zsL0qM8GEY)u6RT1`^ZnX{TX0dvelz0Lwev1n)}U6Da{+O~< ztz6&CPoDCmqb7Usy;4PNtF{9Nz49Z>e!+V9S#2b9Ljv;v#Tn0EGf_1g4UJ;IDLJnM z7i4!_d0BZV=&wM>2`OFwqL*b|)l5$a8^W%l-^}(0XQhIn0&IkCkuR_sVZNa^E~OfM zH=-r|4!aMM-6fdf;FdJi{GO>?XyfBekq>2C3wa|DxZ%+8y}s}JyYZyx-f*zOyXoYg zm&r0@H*m_{?S2Xky;TF!_7hoxoSLCS1sHdRkivB3)GFSAh6S3xN<$j217S3UWLG2W zzW+kfE;g5X2J#hy4D~g~P{i;Ry9RN|@z7FtkKnw}U$J}+jl@qN>s*m{^D;L%Y*p*q zdi|_nYmb7liP{SCrLZ?CTx7j0aiE2dt$V1f1A(w@Y+*l_ZSJZOkcrI-NZ+K#B+UCM zn~$n;5u)Ri+uq@_F~)G-D`0NA_~gs{U0sccR26;uB9S005rxcYho4W&f(NczbC(b# zf>qdXv>_t9hUs8Jy5Ql!(=vnupTeUS0a2)G z+V|T2I?N?{H@h~n>q`g9LYRw3BZ&XAxPh;--7sTbTrUvpwDFxSXjFs_vB~zsDVVV( za${H=l-~PM{ks#B)Ev-DeGsB-PyUbkVblPyo$Z1iVW5Va?bI3^a6*L`4N?aX=yS8Q zBib*pgLTjH-m0`?DN+w1FbjrQkRe8gM7XN~SI=DQY(3fi(M zc0pH(Bnr`Obo*lNK=*HA(lmF$7?0Oo&*CyPln5EFXe^>@P6A`yFVB;FkoUbOfyC@&yyWBCi0<00HhAsQB+#Vkt37-5k-8!uuxQ|VzsbcXe z4G-dN^Vs@wUYNP!Ph&`x9@pY~B|3>+7O)O0xQx}U%Gh_if=PBhsFeFsO2}k*jP~`k zO5%*|A=iuFVdG6K zCtQA+B8VW&B++!lU$?8Xw07i<&a~=bozEY}EX?k7_t+bkB6)L)skXMb|F1(D5w(Dn z1Hh}`1)zr)0bO06{;|;g0FN2YwF?l0{0yLM@4(9(d?O4W_BBfl+cSn(n}<)MlI`1W z%>U&P`rS@us;H}*4HE_Zb8b8M-!1+VpDqDhd1k$aAAlweIi$N2$S)A->Xdx(oo}5X z+#}*%$vG*EKwo%bitr@Lbl`6bA@}K1Z+JJCgR+8M}iMkU;?-*KCdv8d7#nfR!kK+-5d9EJn~Ao6>{wG&iZc$8DdwEZ(2N zyKg>ELzL?qkABtbzT{E+7ZGi|@H#;cH+EE{fAgz3h_0c~P&bV5lucKvxvH0)yaao3 zb19R7nqxF>=yh7y{>ps(%?}5fTS^l<>uP!y7u2V@)y9rrRb}RxelsS!4-e9JzT^uA zmd?3Y)vOsfD4;~_B{=LxQgcfvJ(87Je#}RgXGVF%LfAm?omaaMMSU|@O_uNW^bNL+ zU~h)k@yhuBc51V;M)A`}{^$gbzZu76cx+=bjOy=~-8&pQIkaZ2NTd z7p{cI%=E}!o-XYl?|4!xnD4xD^v%lbC{xqmbCC~hK${?lG+VdxLLGhmI<;lD#vl9G z#s>bK3)2H5D%jas3DM4Jo!s~$uR%WgrnQ}es>ghhhjaGV&z7f9$#)W$9vh1*(N~W; z*1+heu?6Gq*KqTP713Xsiaj~vxNL1nPJR8Cy@U6ox=66}d}QvbRtQ%GZKK4)luwZv z9`a;bpSM={U9q3do6!g^8)I`#$&M-64sCMWrMqC53^6yQ!HOvKpl89sfs*f41K$ zAm{FJo%*peiACVwB^sT|GeC>E56Jkrp6eIw1D1g=+0eMuG~Y(|&uhO|)Y*(sUlA5% zBaQ7!-5}l$bR91N_tH%JGzbQLliBSl`h?5>4==!r|B~v2iYuSAg)sc8_~H$?@ASn( zDJ_^ku+)U5)|QW<%q0RYOh$8`f_oC~OaXgS+n5nvQeve@O2z09g3|}7u1f0`yZuzt zu&*Vwig~T=r06H&73WV6bUu$`JdE2iIz4iCvedrG2`otl!qb}OEuR+h_$GbEx^E@l; z(D40HB4mK^?~bv&?hhZ6C~rTTiHNOHvNTfU0(0uvkUsxNgp0ns^9HxorUI8}P90LE z<6iCK=n$&hO#=t^thp+V$2ys0^%Vi zkHJ2Vv8Zuc0riJ`7!2CFZM@;*PrgZfcKcE!zn$jvb-~G!Q*m&?WAIbO7o!{X#BD6J zG4>;s*t>F?u*D2N1hpQH(Ex(r;0 zy(3Ou=4@&Pp~mL-ta|%+u|eehFX+-R&TmBnRKjZ_txc<%PfIWH zm!QI;5|hz{!V^Nnerq5lL_#+zU3>=L+!@krw}QSf}Y7Qb9hKL&#L_P?ooZSrtuo zylm_b=J+<*1PC=d`>AWoHVIg`fTl0kQq|x}oprr@iyRMJ{D zeEzyJz}&>))~c;0@>M}>wVyZq_F?gcr(%ocWSs?_DQ4vb`rpd**GHEdja^}jlVUbB zahi@Z+6O)OD?=Zs(G&H3iamU~D|UYBp4#%RQ1cf-pf5DrS)9aj>BrFae7;}XWxP{Q zAhHnJP>iYaIQ~omsH0#>170)hyKLbTmYBpT!<@fYzW4oJFPA-O1v_^`dXa+KgBOHO zQttDejmAj!>FNWE=yZ)|x2W!) zIj$`VMveD+Y5^YmNU}QaOZyv!l^bmZCHl9nqRRpF_*nCmtro6RW6CNqXfpr15=e~# zND%L*hc;@8e-xI~1imTzrad~*`}H;h-X(j!-d=Go+nIS_ojJQcCq`ad%h#3tO4Z`w zhws3rw7E`K!$*n!NBhzXGhnU{twntLRMd_&GxOC+e8C%OywABu((rl{*}=x9Of-2} zV;Ex43FZzBZ1ZT*v>1~Y z75V0(ZH6{JDt^_-7$Ut#)}m0;ObnV|OnGsXx-bGM62R}eE2IvX(5BOeU-j>sY^6At z0v(g>)5i*(e#y(!av=eNj}{XrJ&tXGcc5Y5X22?RmsHvC4wFOv8r%jV0OPlbRa3I7jlvgl`8TJ zQJ^#iUoLSlBd#!hz6pya3;f@Mh90{-9!)2WlkyZzQrH5yX6O1DMi6Nwpry~jSH^Gvvq|QS$%$4;o3vch%QKG ze_U+$e_PuhvUp^vcUE2BUfjn#!9m_n9?h_*QfKoj^(CozRFk{F3!|)-CeEvvJ0C*U zEWb8Fj0wl(w=EDXT$SjM)8%>L4k<5m*>?Rf9{lIkyJ5eLwX#V>?55+Er%C;5e9Z=- z5Xi*YwY)rStV?#7&j`gotLUs5%+;WUv}0cHa(m^YuW7Y>&DYkT33t$bLHm--a+!Fb z5Ii3A(dPhWHeTY$fFSfmv~J0FH}!g3>g!0^(X2dp`Sywp4M>^E6Kee487J4SZDV@3 zOGETx!dnxY1zqKp{K2?VwNl&Lm3OkPH6hjUkCFB3*^n8fs><&lGzMpx3mWS2?eW|= z=D{k%W9NN*ppYT;h@y;VJqtzUIMpG0$r)&7e*}nT%`Q9e?fO;CoNbeC0_H1oQRB=h&~N-@o(39PI5uoS z9#|1Iu1RXsd7b2YD`lm#1SRyiTbtElJR=buTe(n?=vS_|- ze>j{;I9tq1R#Ew>HZ~YlaxNXHG7JFIh~ylbyXx|#E==yBx`E6J^7}QDvY=+dIfi;QDv;1?Fv*DupQM72WZ0f8D_fUnYsw`Fe8aV1Q2DDwpa9*dxYE`LI2<(8jb8w2%@4QF*iec$T6WOQ6KJ^-F+a=HnBi? zLR+-nA7;FbQ-5aqmSq3jj5H=sZZ7HJ=6dc}SPMFmykf zM6M*?>x;R?$lC8)WL)9(tit-?h)N;-spHVVy0|`Sw*KUUuSyCUOIJL~88x@_h4)_* zWyl?W$M4&y)MsHpvIy!6NZSV-iCbF@*9*+Om3=a@7cP+M;k<+GPp?*VHx{YqVR`Nb zFGfEJxkJuixx2-j#hQRxB|)$TbruC!8ILZb7q`c*%FQCrR7>W5I9%;WO&(Y^Hh@de>+BNtK}8Jn?7b2P#n8{6@|+Utdr#VIgaq} z8P34+aeO{TCZVxg@ z?eZlvS)|%xcnN{aY2Re{aKl)`w-%drwT?PP%GQQ`oJ@*+Ez94v-`V+CUaKXz{JD#J za>51aA8_WsT0UIwvt`rC69n&Pea%DCl;`^T@bQ^0Sy9H`@C29pw;9?YwACC$5g7~O zgW~_DtDI+jrUB!C=~ou~s|XN~g&ZH-gPntcm=6^trH&}Rl|Lb(z#WWN5%j?Q@4|R@ zb;4&pK(e8A=C&lZSH_ntT*{9Mqf`@xGx|ADZcW;1nc%+-c_-nA3$@ysawvUA^~lK{ zeZY6Re{Qh9EmWlqhMQ4EpYFI&#;r_z?xzAFXoG)bo|kVxb!D99m?a1%rTuWDf646g zxlJmA&yc!xWFDgJ1*tBStm}=#=(Zs{;&b`&sym4DeS-L)mqf_SX1Ej3F8QyhCO#BK>*uzTfd2Gkl zG2~eC&Q^Gv0C3vy#L=U)?7^wXs@zs>7=;kU8)tq@4fMQvZkWHDI7s|)vvc2kYvFA8 z;m6>9Hhjx|;h3f0sz%J2{NKOJT1R6bVRU|2 zS;O%qK9=j>hN9wsZN7r=_PGchGeR7Gn7}(e&n#G`02hSP-S_NnwHO+PGya zMSU&4PaCOX7|OwQTa&+ScF@GZ!KvtKFn!%5b*wixMJX-f`pWgMrEs69X}3T-&dGU2 z^}07~-i+OC{L_ucLSd?U!1tcZcF>DO>BvcJj^+@k7&+oReUAA(^n`>u{W$C924E$5 zWjNvdyq9-e(^>ixtZ&@JH=UgE&YMa<2Dr&Yeu@O=YdPl)98S-Z(VE;QZc;c{ z5=r!%xyG~;x?YCl6R-Pd+43_bz#hDQteuajrA$w!$NUwXLwX!nQn)juPx2Tu`FPo8 zD;E@ji4C$Ji?jLSi7pfg79KHFa5Ell{_yr^l^-$B_m^B5nmxORWPeFBjWoAOGffJ< zEdLqRffAkE50gKx6=Pc89;ENc6@_SPSIU=|0u%q|L4GMroLxi6pcrA1$_HLoDMgX} zOh2ywc>dCGz?>bHoq?!&em7fs-g|ihAb>J?c$EHuxMX_ppKQUt6`=tp`j3hMYPX1V z4LDbgh7*)~>ICKZDf$u*i~d-C5*S%$Codm6Yp^dyH{_S&Uy_j8?Hi`+T4!IMCKY%= zgsDfB0)37V3_HMIS(P`EB~Yh@#!*92J)wYO$=7q3%9%_nzv;B}nC5S~mN~y1`x>}= zIi7!vM}g9zgQXpU(k9mOa&6Q5nwM9FN9256p#rXvM(Acc_p|YlO7r6-^>*W6Y2rVp z-F1-fkX=UR96F5BGXtAoWp!qeAoge*dhN=_ItIdHu5##JH!|_$cp1&vefh21^<+*A zHqxtg_adQM4hU*(Vr%xbZ4jb*Z^l^RybDgBo@zcUMYy%ZZ=|N~vL6n05U~w0rOnqH zSEu%|Ec+t1Znm^Tl56JD)Mh;Q=BD|40WBH@7T(-Oo_*CzX{pnfR)|)JzjGVSS4#52 zp(=aS2;atff$wI;sZCLBaa;8Jk0?}ns@OtZxm3NhG4<^7T&j~bpO5?PGKQ(%{~C2u zu@dfULUAqS)n+5{(XS5)ppM4Bu!1hguU>j<|kr15o)98VuUJ$8VGaSy3 zlD?=4SbdMNX1lRD!4zqQcjLsA?#0)YVG!L5qSOmo`ewp9gfSfb7hW>lcfhqgC*pyn z_~17DVWcH-b@kH$5}CDl`~Dx>P^OlY31(2q=<9C%M7hM6O!`Gm8OUa2`?rZ#;J397 zWMU4WeqZ7G$s~;9p_&IqQiFsdd$M|Ou8^$vaSip9-;|XVLwo9)G{O&AG4NbRZM)xMyLx;&8)7?;a_|WXLXWEW1gxI4^Qx zwTh@)wx*fAEs?=Nc_;q;s+5f@v7J%%5Xr-96B%xZS}NZBjP<_LMzGet!_U;gNcS$PloR-? z!GbLI1qGNO_ykF*<+m_I>@wM;;b3A)Tg_bWWtPnru?xjP!R{Of=Z4(GT!VMQmw=&B zNF$%-4{-Qe4(?AUo@zB^uXtdG($*h-&;-AIj%zw|bPse1!pExz7yTVf=|2H)cwp&7 z)!a$FPgc?5xNfuc>UN$Do%i$)4tV23Y`?E6ymWB*_e9Xh(v7EIr@(l$R0z#pGhJl$6KoTsjC0_o4bIvJ}c7LWHgOYVzyR(n7tZBhQ=Xw`x4nHJ-c zHl+8}i{p#&*zrKKjh>I*DnVgLj;BSnywH+vr6&b zX;NXG5_b|hmLm(QALGiWr@679^ua$*zeciCyH>7-)X*DoBN7|6fxM0bl?gLZoC z>%nZyS>C>hUa(omsa1HMr#0u@fH9z`Gqj`sWs^Lvu)j02#+R=Z}RNLb3zIAcyO_!!xckhJ6?!CN~Kf0PvEUbTH09BB{}tkyRB^?j_} zpM$odo^I#Zj$Yt+DVFz+`$C94r3z&}k2Fbcj~g85n-bR6=dM$;K+Ba{gsYk1x{AEp zH_m;znvn)+)ZF|%;4OHRyyZ?eoAa^UrcV$2@V^^A3A;H;WMC%^mrqdN4S{@-} zz0zB$BI9R2MWvydFy{m%Cx52Spz-1Qt*=_S1Qq{in8|bw6N^MElInfBV|Z32`Ma37 zHc<315^@>Ep3viy+^t8+IK9KJ^J{}g+a%MA=FpAEEY0j%=F;DttFap8Ro&^HBAHnm zx=?CPNW1cWC8VY7Z)$d>GvUXnJN)jh3*Q_Xh z_;sy7bE$yad1PQMx|3`9kqaU%{=F)p=CbN+_$U+#yOAsN`Z2!Ac-$la#tE%e5ovv{ z7#p)@4Eu^+m)y!?e|^WUJwABztL0;aWLPe9vpeK6cqQGk@sxP5e?hR4Yi;NSl-F^SNae zf%l)*)#-+!fo$Zz-pq~HesdJ5eK~KO@37&Jg}ohZlCV0}<+$T=lqXko(739Ch$3^g zr3Opc-CN&9>WGP&iueVah{BaLxICcxYqI?qXwg--nQg-9R|OD-wf+V?i48VohT^~Q zg!|+NZX;un3mBm^;k?JXZ7V_=yBY~jjoHxE2t0SGt9|~-cP9>}?0tE2y0V^cX)J3V zBQWEf(!Rx6w1dKKCjip<*~oKf)6q&J`(r#=3HPZXF@)%~ai)6EEcLz!ajpRVyV}-- zv5G^^?X|W+rHHW$Z|*`j5fuOb)>DzlFaTTOI+5hrbhO-D?_pUD>y$ z_3+$IwwRbe>t1&}ol;cjMyw7#2R{no&*QWS;n*YUx zdi-N^DoSa>-&!HG>BU6uDz&;;pj=v z+S+>J5f|;wpQP|$ZEp*}OIWygr?^#@U5!j!`BM)}z=Gn>`VF$J^1nP_LKsq~#rz?w zCN&@5o4lHegDv?1tWh!;`N5WFw|Q%S?wKn1>TgCu0t}*(6#ori6I@L)B~ntwrgktKuq_Ur4K9xJd*6V0?JaNJDg+tYANnSaJ8=t z8{#Errzfml*|6h{5{e2ES?~(bVdwf4kQ+U4GR;@2amviPk93&ndL`tF<3cE0-5O zuHn?=AY2nvIr3=rUC}09ov-EEOQTaYbBOd7wkB*sp73)XH0XKegN4Z*dfksR!*a_S zVw;30xJyURTR4BX{FtxZY{N;L@Kw!DEXAx5gCg%$lw+A1O2Jp1XsKcY_zry7*vj0^ z&}MNgM7Txr&xEg%judNr9WE&WpR)@PjkJ}Ng_F7m$~5lG|XJfwi8|eUkT5#8057oF;jgi269|p(`NT2Am$y z{2FIQ))a5@rW|Yz+RHab!fn)3_gd|Mb}9L6DwY!8_CZ#d*5D1pqRb*A9Gdj}r-e)3 zR8D8vLGsmqN7$wp$gJNK=XNkdN^EC+c{q=x2bfa(nDGVP zy+vqLE|==5+?)|4&=GFwq~dqN$T-Gl9*K(Tx!}WXFz!E;Wv4*Uexb3wiszIvSMDE& z&382`0dl37by%C;Ni7;KQGVCCv01$FrEhBT!wQXsV2e5wO~8f$A@q#f@MKuzwItfh z(Hlf*F-fB@_mWg^V~#0nJY7iDIXRW{_xFIaAYD)4d6~Dv$kU6xj|2ivXSeYo=Hs2y~l$3;XyC&OH=F?-;SV;B3J+@KU&vDD&T(30#Re_ zpTW=o`{r!Rhw6A9g1PBPi(9c#MxwM? zKA=y6qW&Fa@%(`dcorw*8V5{*N*D|0lks^)pa<*dMn)Ee67%hImQ;?OME~FKwD#Y3 zN)++^pu9PnRyG0a*5!PJ?;Q8(6t_<@H3g6-*vyvqnhZQi|3k0+neRgw{Q&Nf2-zWq zy2VP;%yS*vxx>6Twla%usC{5_3S5p)A+FbyFA9YFYN;yA<3^*q+c;t**e)evFc0Bb z{~QBN$g-)CAKb(NZV4KsH=wP|!w32-y8C5?)4?w0JR?GZ&u8MnFWMk@4-8pP->(hHNGgJE`j|h)J0SDyxH0Dv_dYoN$Gi!Cr|Y0Rlbw3H#R|l zzw#Je5Tk7y+&ySLC`+;34>P4NKPV}>a>v6!+tbpQgd34JNtXaZ`G0fM#<-3+38hX& zhrM=WJFWc_;ML8G7|(&bYe_fPWEjiQ=7dLix!$;3e>qKOCf`QJ!kF_I8gNVO(*?Td zBdrdl@GZ@XrGS$<_OH@RE?1>-Ez!261rwf>qz+MR2c3%z+_dw5bkrnON4-g?1N#JO zQUEVUli|z1E&OA4qLQffu6RpTU-Jp6^d@bSw|G{_Kx50l_@xXEemg0AoDoBfDEVn9 zsuRW~&9J)wZ4q%?vc-7TdcAl)3g zLmD}BBLY$)-QC@tlG5GX4U&iE4c_b3`^Eacx87QB7Hctc&YV57d+&ez{wCXtCZ!$C z#UCU*mx^A}z&jI$oSFGIhM^S0Z8;7J<<3V{Ef$6+0xV#_N#!+o`ZWwCobJ%17io=E z++LQYf~QW3_r}_Xun#@e%T&1;mYVQhlOJt)mx!C9B?>H+-J9pgzcPwnhY=k425;-v z5KA8o9mR#9yWiY%UX^-5Gv9c@}j2_ui;37=RNFiRrIwS2-MNJ?uka zt=mvIf1DM2%yV^rzTCI+==FJ~^FOu7?ZX@fc*LIsgb& zsshLRmDRpJ(c`AegYAJL`tRc%&ckCuPk{(`N5FCl`bQhd-B$^Q+IFt)oK#W(?`2Ri z{M3YuMfq`ZQKn%svmH}Fu5$`THnS>#NhQJm2XiOl&phq@7Z~xo^{cY%$cb#fmCrCiXXofz^VN@I-CAuFq6Q{^^gdG+ijUgFkTDyNJvPd z$A7%jU%wl$eOPp|(R8t?Ho0=&UwhnJLGfR9%aj4}Uk7*&c~=eEHUP(>tjtUwYcAv# z_DPL4Z#4e*3FeC2zYujmfvi41w+4FzkwO{JK$yY?Ek*#E$6*wl<8hVku_7vP zHJpRSARcMHhmDSoXITq?y#74a`j~#A$AJdWl3(1^v)ty#?S50k>@o!aHUATczt;C1 zkJE96GaClL|K%9k^?yJP|8d}5i@14)t^o`!V9z-R=gjwa6aUrvzE5R;s2Gs#?9~1l z_{LkeAqn^X7c2N@I-#r~59B=n^SKHv{_`IioWb{7pxY{a{TG1qJDxPOh;fP+sy0wI zEGiH8uZaa${ltm=LS)&z{Dl?!_56qb|1Bob7#BqWNG#jCNZ?lrKmQvuJhWe+GK?jG z%;MSC-Tj%|=Y-n-pV&s}dQgpVCo+vXDjw^f_4xB#x}Fq)=~a6tff19w$Q32K&W5Z6 z1*_%McN3)42Rdh8jUOoJK_c|;BWyJHF)k4RYYlQTZ?s1O!rmNQn`?>a_87poT4mmF z9;4n`HvhyK|NDu@)`-v|ep460l@04EA`0V|+o}pUHp$HY1q3re0f9-Js=>1$^({Q7 zJhwhv_5A}QRZH)p!bK~)Q6V8pTD2 zI1UZ$UtdQxKk!$ZaF$pOW*g(_wdy+VBf!4N=YsxjnTBgIJI3T#M1juc+&K7YB=1EMD6hz9yS%;p&XX$ z5)+XxGE9KUUvYHR38Cx-Bc3i#+X_s*t`u5Rhx=0DVZVN75Z-ojaW=B+`NaJ?J;M^_ zvy1^iNcHc7lNt19n!s#W z547x8?X-+%m@46>&!I)1xqd|IgK-G@#`P2H3+}%>jjBkl9`Uu{Nk&5KpWJ;tMThad z&#>?E0q0BdBY>G!iR|fiZDOnfk+UxCrF(pWh2E!E6HBqxpK%;^ zAlKvoSBZJbpeo>$(Y>(V1iDxA#M2-!KT>mj5R#x-EGQYj0sJ}m%x%~1ZX7J-^0aPG zqPNC#Q)PCf`mvuHP3`S7lkoKjjn4g->$21JbG;3@cHjPY|qFOo^5| zMB|SR1AiFnD;K+Fg_gGx6teNoiwy#J^wu?-I9T0QFA)vQpjexYA?`Q-dUxN2pFOiR zyQ(CpvM(O!`rU(*kaF`AOSSxg!}!ymSYfMDdw1737cFtkFSWfp)g^sLgw+rR7y9I> z8>aGLOlj43y9a);w}G?Q6S|$e6E@I|W=w{pQrh*Nn@z-nhbYGn9p2CAWCF5r2DrgB z1kq~~y#6k8d@a;z<7w#2GG%d5gx8;})2q-+C8?S8uaO?O_)f+0Ur_$DM+fltBxZCU zg26;HSCjBL7TXP%EWVjP;pbHdzFgxDPQaGVE$WWdKTX8s2UI0lw~J$A5$7HZ?Ck6& z_wWE=?7n~N71f_TfaO#=75)V}Ud~n$j_C%rZuM;G>^@hq>f5$+V!d2UfD%{*d;-M6 zfkyy9My}2PQpv?l#(k9hw7?-dqho7&p(Ra~dRJCnq#slBtqcHKAl8F-El`yg@h?Ag z2{fWlt4cqtA8o+QIJ*P!<46DI0Kyw+cdgvr#q^k58mwDjo{4VH zE_NgfOR^&qFDdjAp5$_w?9fsa>30_3Gomdof63oZ7>wCktcGv=Su~Jz62XK=3o`j38+#$c1faMi= zZ@rPRnoWQKcl{AD(*^~yp4&Cw0-Uw$jix#?5`%C zevMf$BCRV1d_a&OJ<@5NvCg-|_W^^li3mB+j`#YJj^y#uhM(`}^VV#PjmFv;0@+x6*Fi%P`c_r=-aMgm$us&`l^8$i zfZWzxZxPZ#_K0&*;e#EDd-)*`da9er69Bo^L4bfRMqh1BSttXr&r4h@if^ECt@dxf zj9uwwot_YzL&aAcp^OFCR&>IK8O^_4UyBG*py~2rn!cx&tFOI#S2@O-9r0YcL>AfDmasu20z@@%h;d?_kid#;~smJbl zr|MC?4*rH|*m7FWt~#A+VVGpz-7&OuA6vZn=+$KS7?x;E+fMrGbl5(x*dk^3Ep+7W z%_DFBZP+&-2RT=&1P3vz%Zy~M%p1>(vhP62X_oq)I)DP6Fv}z>ycTI?c4}e5a%NY; zK%F+x(@+qG*WuV_@+tr&;AJ4HSdivzW_bPmh zlrANNZ?(OrJcd)RJ|9!~0HTyLuOX34W^Cw^7+}tTq8m%248iW{y1Drm z_>fO=4W|Sc$7+;`P(CMAw36CU9UB>W1zhxnC-t;HV2dh$f)aM+x>zd1vaaV|M?`h? z!TJdDe2l0xZ&3(aAP8B7L+gOzNotEm9@zQGU273EbQ1q8rGr9i55pP{$PA?3%=BQ% z);d#IKEGl*!0<8F`jErPcZq(OR zw}UCW%AcY-anfFI!xFK9G6uT#f&Od2ZWH+p%abS!P(yTLGz5dq z2nI`8j=MhwK{Ba-jtq)qkk3jb8|(iZ1uyJ=TAAc)+VCW!yh^9K}xU@v%2KpFv~F;QRlo={@AEyl2C5Oxd%5$jw@ zep5w$x>DWDx`8KIm|+jtBA0EbQBoigJlN&#-NG%&a-mZVf5~PVUNXOl80&OV^l$8P z>AZxN=%!G0-YJ#4e9Q^e^yPHHL2&lf7IZ+fqXKUXj3;rce@^f@#mik?VE%}fC4l;T zlr-HF5n&_O#Py6hMe0wPEP)J3z*Awnjl0|D- zs!;q@S&vP9rs^``sXDUrGgwEWixV#E;mKjMJDR?W2CC?qXkqOx4wyzC6Ynb zmuF=ftOT4b1BKL$hmWS~a^Zm)BZ}BGjE`~cCZ@1YK)Th{pZhJ3$wdO@rY+!MG%N*Yii1IfyD{|E{pM!DE8CllpHpc?J4pEG=cqrneT`g42>*QYE`-2f zK8}l-)#5F(ts6YJVVz~H7qh$7v;*-Xnm%2VSwvskfKd}3x+gcmcmYYB!K#9N2!Ct2 zu^AWqM_Vu`co1|!@e$sjc=~_`m^WfeWtp-=zW`$0C=uRKi>KSI^ADp@B1a0wj=?mQg08Pn8Fuwrul z!JzU9(vQmlVqdU;{$-I2?HZyO3mFK1%A+fZM;yhiq^RndSZFSVr9WxJh)$mCAGCW? zStz9=$KrIajk}QEdJdc`>#5{4OyJ71JekGvd|JFmoE6DDvufDyo>KC~xB)t_ z?8d;=o)&JNFop~{e_=E{IKxQ^KTkyG8t$IR1{h%NeDhpKCRC!Vz%;#?fQc4^NbW1n ziv40LlG{-;Z|QzgUowElJxITkOL*9`rAif^=QsZ?&y041^Rr{YI zCk*HSG0OLxNg+^rCiYZHl5c3%KJoISS{_%%iFO1Mdr*MCnk2b2Rfi_b zkk|?4bQ`yH_fHnCm%SF(UbdY+GB8{z>Lm;bZnGbyqz{NDlXGeCjb%6=HeP&HAW7Dn zQIN9+)5hx3;mjExk@nWb;r}q6HdbwP%czd__oC8TN_~!(bEZd2&rGG#@OdOCiAGw8 zI}Xo$p2nubZ#>XZHJN^ibZ>@eVwp zzS2(m{#>P3wSMC9PgfOKf#m&yZ-4EFNBnf4t4Kq&Cm}P>Trq~le+4U zNz#rLzaYVtqQcUrXFPnF9)w|h?2=G}!?DC;p72L(jn1Eo1-0%ZJ-v(DX&8*-*q7Zh z{eY{-VMbQR7GM{mXNN$Qa`i>rywM2=a~Y)rjJox#nR18y_G&PpL8KXa*A6A@$U0s2 zS}fy$CxudjafGOq!SGdIXRiwRyIoE83c`0XgUQ}K!l)^ji#vrEo-2$CpW3+abpDL@ zYGMf8PWEWlG#5`+-$KLInEof_|LPh>=B1Q>v~ky~I7oI?Ye(;Gr)Yn37t7}Em=tW< zzT<`whfNn*qT`_zg~h>|!(&%l)HIZ+R`EQ<)e}y02y};|B@=DOGJV$#N||M_L+QoD zN5_`7Le8?zAsr%@av$NzLQ;;*!*}nLtYc}3={BsE2C9-OI;V1)lLk3$Gcz*q*RD{v z9MkW?t)9`8Hl_uJ`S0oclLw>b4>{3vaFs=WEIMHqWvcs2mH=CdV0cMh&0sUH=plcq zJD#EpJwu*lv-ThyHLT+Uwugv-!K6ctzgd1}hdEql?k;Tp6#jskRF`7K2xg z*@l`J1y8M37tP95Kl}2SXsW&$#;)A<(ury^2`lhF3yxM|h+5b#=+;p(TWqKb$r>aA zFqSKN&ytk(AEAj}H*Lbo33@5-Jj2S=)(0?{pQ`gdZn(7ob_VghT=L#VGEM>!z;+5+ zTW?XlOK9&k<$}5dl?(65)w6k3iSARxo#X@Zx7xLx0Em2WZ~gNJUtOjpJ6r=iVc4^p zERbw7_q#R5iFQ|#G|HvSt7DdiX66CX-!r7X*Te3iuqnyhnV7y!RUYu+piso!eaiI_2r++c+{WVj&teXyEisQ0`VHu zyQ0=+FGnp`c0o{&RKbO#vIl&wXT$3N+&{=+$z=NVxZ=`lpuV|EpJhhab(7(1F;{F! z#rJMzKSd*t*TPD!%Bf0TppeIwZA!uYG1|lpS8kbRQ}!Y5lP+BsG@ik{4Z}5hFi6Gg zc|TIXRZrFTFDKx$hPz^|IGn_Aeg*ku`5oGV*I0{*3<(2GPJz7YF~_fo);pcTfIR^$ z-@+9SC9^xvkuB5AEqX8t9L;Ib3ctwL4v1QN09?f&t5Q`GClPeD zX54$NtOv4W&nx(kpCA=-nvdS99Vt3p!>APJ1j>|CJ2lc6Tj;sL*6%Q(io_K~)U{Ut zj*8UlW??6A2y=-nkY#1wpL|AD2Hm>w(Z|<4Y$F@Xok1+@AIJo+LA39u;3d_Q@HA}m zu9ra$@})C^nmtX2KX;_cP#+i1Mq+U^CORO|)fnFZSk3&l;XUAgE_FWU@7Mi$QunK& zZB3;5v#KFjEJlm;>)?ZSSAZJnj&FQ6R8q(1M*M3>0B8&_9|R5osy)OR7C!MXNbO|k zWX&GQVkEaeJhP#f66ANPu1%U2y4&@C`cagwP#q23+~k|rEa2|gP=jgBk2;rkoZR>b zG!@0>T`4)tg1#?b*pPL-TuD%sI_a(VPlJNGKNWlXM)9P0L?a4sZnY*e7%-K06em>a zg7iE5<3fN#vSD8G5@a*l*S=V8v^s$OTo*EQ9+$iUIAW+^8b(dQiHcF!X$e?VuZ^n2 zAB=DxEhL*`OTcRTg+IuN<7 zh+Cy8H@3&*Xn`B>3?aO}KKYLEQol#}17=bC;(^(GtxFbemJMOfoAJUURGLP{;UgUi z>G;fQ^N?FN>LfdNEzNy07fOB{4Ya1WvyJOVPWneqi}%q$q}j%HUqpRai#q7y!{=YB z6E;7+p_5sQkSfa9_L6@tzFQ_z*XMkO$51M!$fpsqJJ&=mk?NPl69|o94^Q0$>7q*} zMnm?OSxRvhR=Od)SkaQaf`NTrMiRwe@0BN$l}uka8eLvmONjK;^f(O19bKg@AhD(I zJ60h%#Y=v%`AcjICFLU!#oSt_*<`# z%YvLC>-`oL<5-Y0t;mGKYYRF?NwL3mjX+}zW!C@C0k%{JZ1{HPYm|Mg3w~@)_j-Na zs=<5pUunV3Q;FSdiFo_%Cv>z&P=AI7k%z%?{xJX^E@32g9CEYbZGnGSI1L(6dC*BJ zu&6{t81_Rjv=Wg$B%>2JS5>42%(TB)iP{-ieYl>GWhstDV2IN?F*={KYaklzVE-Ba zWCx{*eEkL}@Q)!Ku>LG|a)$*e6dO91s3hb5Q*8yaGqi%ctr(`TfLHwE+~lNp(w1oJ zju6ge{CC!JJrJBlf|UJ@J+DuVv&zd9H#$AaBtJu`d=+zAd$DPs{6j%uD>YGX$DvX# zvJk=xgxc9n(-ly3Zeu$y`jm`c?`Pk0r;RHi9Oa0wD7>hSRgzzrU3roa=$yZw7xZZ z%O`(Hwk$lZfJ9XwPC(WY9UQ{Z>>|aeGXkaF+D!CR>1;ng#TXpm(+$x&Y`p?@xWiCD zwB@Erk!^};??5O-{wRdl>1PK7FD+__5W8IbA23K!o)I-Ld;a6I*Xk#gNJTP_o~ibh z!#zXkcCT5H4__jKLc+wv2KMRAj7|-Kv&vknKwK@gPbXk@$6Hdp7t`^Ro)ZtF-}64L ztHtQY_vX%uzanfu$j7rEf@$F?qkFJcI*f0enYfK|Rew%k&I0UyXP=x7HQ1uO8Ex@J z=U51P@h*pC72I#Knm|~}ixUq3Yux#b0`I%T+=QY~r?_1!7ISiCV&M&fKQ-S|WgWey z-t(!QC`x3{Uy)<}!nhbkQA0JK0%?2Bh2t>Pn2QQA_}n-x?cEi8LdMB1V~b9ds;|qm z%ap^?qp$tP3r?l~^@4}@_OyjE)f;EqHy)j}!fTT9vg2>(*_L!z*>{CH_gLSd!F#~! zEGK}2oKT}I0oRnsYXt|N4^0YZzgQ(?kA48C5`>FE2Y>ugsuc=IOfJ5dSa6Pgmr^v{c$OYU&5;Uy@?w3$7L3hk88&L zLziPf_tPi@Isan^f9tZfR&rMC!&?TiEO;49x#m<1?aaPMC7@(N@8i$_1?k^D?n6!= zx+A3ZXb1$I!$#P2Q-=G8mqUU0Y(YiNo6^f^2umdPbmz5e=gZFDSw_mT1j%Hvd6%i@Ae22T5gz# z*3ls3L!NuW%7;)FA~;nMlG09>Da8A@r4RZ>$xCL5o#YP#&V6#@bx24a|Bu^yR5+22 zHx@+EMF60y#-vLRwzV-lZxa2GMJFE0EJzun-ad~WU7c)Cd?h3+=S{UN#4<@Xcb1hN!I!Yor-l4AdT&&%F-IDI1+Mgy*j$-_{lSwEy z6D|{CD>nN#VWEJ7>+w@27$~TgAEhpem2;%IO8HUF3XG9ax9r^HCWGpxk{p%&Z<3Q{ zjTi_;;>{GJ_iD&1`5~Wf!fZ3uYccgRan=WVBFWi{+!)TB&|U@3Ia!LygOxIGsK2Q0 z_aE~Q({$&x?qk~=wX;J%Rjvu zy)2_C=yX9;tQ&9W3Uwx_D>sOK{~?K7_;grwV+ITEvgw^WGHioLm4;?Gy_h@Skw%$y zbcVI}1YfuT;X{5N4iP>Wkrv!%@qAg!RcrS%B(ELjI9rB#>MF%0$|?4X#`2uO7K)wY zHiqxQv?K5L6x46xBuFllK7f7VRs(#KwKGp>*6yD-(WO<*U|{pk&`>K-^dbi1X{MP! zmQXThkNR9cy{ZHj)D{c<3R7uFqnYe#8Ru2D7KtY@>a)8R3nKOSUg0?o->+t;?`7H= zX7|mMJ~33qt&AbTT*IexKUfUrEzUY4r%@%M)j5@wkM>3z(xFD?M;~^?-n<(n33mEz z3=_?QN#$JoS|NEAyTBs(?W*?0S9^y--+^AO?9;#N>o$}EsuGvaVquZA%SAqI+S+7B z-orP{vksLA8@qb%C2l&45hWh@=_W4*lm?AlI;_snuzM_mqMT(a&;N<1-${Y=!F4MJG-RQRMh!ezY zn=&Gs`T3NRf*!Z6XKJs;$Ee-nXxx>GOt$4Og2l1h&+J|imGLC>)X!#5U}m{f(kmpz zq0ls`j5OgpC7@1&SKJSSV3exgP0)h!&UK^Vc3CF|Cb3vRqo-=QS0L!j9<;>0IO=OOQAz=yf|^oa zgh#B?Db`LF$9oU@q~(NtCC+@Xiq>~pBL4liDRdo-Q3kw3RPIlGG#Io`YX8aAv^ zk+Fb2qK$@{P=0F(J<;J??Gvej0mMs(3A6RW+MY61EPh00D#VIPS)b zx2IA)^IgRtrTzM%c7BvvNZA0B-LJ-zfH8tf$Tg(P(4M>fW4WzvEli@X0}3iA27vWb znuC;-D9j-l6%}l{JWmnG%_-X;&;Kq_C@7|jm~ROGTmdJq#UBOzc@35Ew&j1k;eF8d z))vz8lgB@ac$NnK&q@9d&!paO@q#7VXdT{age;8K zA2#KO@Lzq`U?(5ogJ`$`etBcun+f_w_?4(6`~?qYmKw-VEuVo{+^((mN=izt_A=rU z5|~x6Uawp2SzOV6SE#qu7<|yq?(W&eY6zNo3Dv^1adM%$WvwuANS6jX%|qP>as1KF zJQWsmw8-{T{fI{buxUleHOR$)AljoqY=B(A4)wdn?4a5H{aDA09@gL zeUA%2kFTTKW3`(}@k7#KP4~Bl5@8H=TV(Sd_qQ86mdXVxsj%><@4}Ltcc{UY{8w8q z*f}^%Pd54!4MX_OV~Xl_@;=$8qp$)sA_AD8{mzE8TRMl1Ent7U72vWDxN=SFmcZ+3k!Gw+RcG0RGrLJw0LMw|9`{Nx+n`U+T{cdoo)n!Uf#{(b{eA z5)u;=WuyBPWcV_(lbppN3ogaAI~i`byV{30+*Jj?`v;JKbscUbbUMI8s5mh`8KLP0 zqM~Y!X3IpkEi-`fH#l>xZ zEWH6c{COkPt*!XuAbfoM3IOQ}{=ms&zb7jst zuPWef*!<82_%7GN#k6Jk&LR$)@7;lKrso<*fgUhBO^n+}W?HCCF?d5Zo{?af$!OuJ zppes@U*^N0!~`8`=*DszWWWHWRDIg{g~sEle3Wf zi;%>#ZY|4d^>_{o@hOtuKLgI%Vi0gCMn^`{9t?~(Rx+y0r}Y?qw|C$UrYKkuR6ijT z6BFR;rn|G^_&`rm^aa)QkxTBf*?r*8rjJH`J&Hqkg<$`r$D+E|(jnI-guF7(CTe)W zvbwjisL;wHZDyi&wTH}fRz5dv`a{=JpThQ)k&wjJE2&MANb9+MQueuUO3@1K>AHZe!OY`q-hq6{Y zq-_i0<2(Hm!P^{QDapNyx#I(gg6$7|O~d017^l51GNE$=HAkD)_D#k|dR_X>jRRCu zd)qtuj7pQ^pNT-Os%by2`IQ)KDK%XDSY?v(eAiaB=DWlkvq2O`$t2|ginZgm9>3=S zTyUp(%T~k5z-aUBY_kdAE>yQ)H7nml?6ihHZ#SvAanSNSFvm_a$;PrkcAjJ~!lF!S z!j`(MSZ<+%0lUC#-_*JDk)dQ&H^=l#iHh%ftW_4_7Cl_`Ar^Jq8J*acwKy*07tdC4 z&6TM!5OXYry=*K`^VkhlDCKs`9|QF&cUI;3MKa({D<2D$qX(ikG^Y{Dr-jXRqIhne zK7(apRZi7VFC9r7Jx7c0g5X9N=5e{0Z=jIET5;SRjkI1WCZDL~OkZl3JtT2#5l&uh zNnK(WDXO;K$SmsQ;4)q#lQ`Ljftqum8=?WG9g8{EAv4uEmF>j4kWPG_b$J1auY&MT$fs-3|Nes7xYnBd!z!Tg(PVkJvc#obUb%~eK> z9oNB|56=c{v?lGs$TQXJi-hNH7t_y6@DJ>1Hq{Luo{iTL`1+q!Q$H(@~ zcB}3DZ8YOgZ*R74kk!`|7s{yx-~aAmou8Nqn8GrGf{@)dc@~EO6a1uOcC!gOFsIIs z+v|B6$Fa1qO?51D6Zz42DO0Cn;c=v(vDZt>U5XBDGaTtE3!L#|ryEFWar9!UYF7=z z+1vduot_z;7Y28W#z*&_SmEVe*rlA{#;JWYb!vJ$IzmyYxbvmy(<4ECCEh8z?$k_1 zc&zt3GoX}QEo@M7{1m@NcGxb>_*_zME4%w+nBj9N=*LyX5wkRvh1wZPJec)%GnLZV z_jl}(R_5~0aTS?x6~`-4`|F3fy6R@?GRoIKX`ScFg&FFW9#eLTx8>E37WUv!m#3)R zy-qoadko`Vm|zikKK@=#3A{3(_oX4Dix?%}TFM*7B}m_-rtrE_Ixo8kwo!zKR?%7} zy|t|S_MKkyl5}x^04TfX;>U~*9#EG~b8C6LS4jl}tm4NYyWssS7x1cmoCes?t7id0 zf}>j5GUQqlTED^AvM9nKq;~ST_$wp*NX#`A*;EuOIIJMeYR-5p1h)6vV_TKM1Rk{z zk=cSCPQ+tn2hBjv-tp`4uW_`+@?sY6$~N+DE~9D|XVY|AiX*`m@-F2x#f;$s=iCN3 zx5%_1*EjDjkgsz=!EVS@zXwIGv!HEHITk-Ol8HEWvr})9o9ElRKs92gWZo2+7>el}`lLI;r}KA#9+hu8w51YG+F$ACawu>1lI2$R zTnvx=)w1>({L9n8b4Fx=493F#P03z$#ZU@r!I)g z4u|(qtA>jJmA@f2UNNUvT|9puM{eOJj52RI%`wimNY|a#dQ*cSGD<_g_;P!OW?W!J z*{JoKLEevHY#Ry0V(n4vi-79FH=&}tl{S`9bfwdFS9yL-u=OIO;{^qBD)I+HXL$_o z_Oeej^Bi-r?~25LgjtBV*4CREIUFy(rz+`L4lJQ`9I#Twgn+Q+tkstwt`4KR&g}A{JKGltm{tbCR|L zArE}iVFKx`W+tQ7?f^@D^Q8(LHSB9W?qqn}7a10zSp(IFkmE*pRgzrAuv+K5GhSIA?YjUO;OEpo0$ zxy~ze7dmy0$Zoi&h(~NT`xHN;4C`TUR{R3fPN!PL=U|pd_~n!!g*tb?FiB?HZjxj!~6F|thF_= zhDZfrFlu&*LIXaL$NQ)hiEi)UKMs3}CGt+&Pw$Iq4Y24N#eWyQ`aY)}*1@gW{fx9c z_Xo**ScdPF@2ADZE1Sj{a;fVBdJ!F=u(2#1puQbk!d6RW4Xb&SCdkFm-6HmM5^C!z znpvN9WbE(^nN@6RrY}a0vhz*5r|+q1f^=k&PaUsWEu{s2@U3py7)x`4I!bA2XEit+ zw{+n2QYN?Nz`@g&acL5n6xebp zwb8=WFmEbjdx2I;#I47`i{i z_W8(7YqLhR&zwZzhuaBkje@(cP){8d-o3Jv6ii)!bBHzp)li6~8c*bsPCIpkK*;x^ z@mZ%TIoN4bA+q~m>g)5J^pK#;r8|Zf+mgaw{Y_hRf_xXF+GCJ>Qto3**{qE3o#IXk zzIlSjd%_={qp(P(}IVx}v(rrBU;I?_>K?-*|N{xsZH@ zG(|@~#^UPvXPDhU#UESAn8v69;g=}U=yyk;or4*U7f*rM(V#8go;6UW3_~B&LGcxP z-U4Q`@hX3VlLgu;!$fs1snc`1$i0p{suFP^Y}FOnc!O0O1@*|w#TDL2cCk3MqJWQB ztG`7pEhZ0Q2l8!S0?Vx%r_1AYXF%G2He*(7GFhZKuf+@qJ9d?$_w4pmCfV>d>NM-^ zx5q)+N`T~phKA^u<7DNz8nt&PkfiJ3rRc7~Tb0T5wZ=PgZNJQdWQ!7alnj1Xl% z?kjPq>g~4wZs*_)$Y&v`+Q35Yg1C8HqLNhKK78j{y^+-AAV{fnh+4EtXn;28mMamJ zyGT7#S#@3O7IVF8@FQEgU?oHvkR7kKJ?`;8+$`x`?o*h~QYI33Q6wW^xPfgFM}EKY zN_cI8K35Q9dsOi#-$qIzy46ZC@A-imklZwp^!oh1!tN7r*TSuRpbV*4>I}Q{QA%yq zGeD{Fg`#<=hc@`fyR%^Kvdy>GYg6kN%03IX$y!R-iSbjp3Q;)s8M4Y^%Y}wE+qvcWX>c|Tw?v#D$`9W^$j~>Vz%sUXDX_)elVrJ#s(_D_y`8wj`)$@v zmxB3mniyS>^eJRrV@Gx&z`?G6bZ}>Y(auh62=Duo{AJ5%!Y@T;ajQ;pcxaKdi*wn< zSqo|PMG+aE%9@)@^L`d};Qf(|Z(L#{V@`FX_z1L#p_qJGW&C|v2BEvDYWfME2$YWK z^%ESLCQ+k%sy<}|23DqarWe?Pe+w0%a66#Wg#&T62^KFzHqTE61&R%OqqYH2gL^fo z&kGj5Zk)n}h|4;qJf!yEK>+a(n@bgtcO966_bVs!N=iz=1h@=Q4Wf$mbA^(DSWkSg zUY+_g@E-QLSBH`(#^)c?yDKfO4Q0F_eUR5jDXB__Ph_G1eCEg!Fd~t)6Hq{+JQ+dXT50U;f0c$Sy5TRuo(`YADwY(Pow+zLYz5tev&5t zWXx^1a*4_bWxSaSXNp-wQl{YHZk|VmENZ+kqUg0N11O2zz410jB1_B*Lq&u3?5db1 zLSX!5U|RFZ`>f-`;_=-D!RdaYmD6iScqYq;zZC*`TeU>PPS4)LDm>FHY$bfbrhKQV zzg;hFA7UDBs#MxS3ZzaFbe^aibe3o4(m(?U`Rhy%2RVyO+PNlVnj(q=Hnk^xKPDB4 zTba|Ga+tK~Im}H35`hYNr>V1)!pIFxwok@s5=M*m#?bVAyKp* zc~TRDX_NO$AEx(ek9p;bqYANm*k7XlmUw$w{;e<|luB}3d2$N~GRFX2LBEpFK>~O{ zPEIbAmpG)>RPmMTMF|6-bOkd@gDrOFZ20E#3fA4(=O*r#kfkje?r}|4yFz59it_Tr zrw+sbp;4>7`SeKA55C_T1LP(-5@1hk%~kL~1wiy`+sGq5sU38aei-BJCRB}M^F`=k zMBe#&c5eqmNL~d0OIFGQ$mO>P{@bq!j2V7UC%~B}?oTb+@6Y7P{MnA{;uONdM4UcozYx zjP>DPH$ZtZ9{~D_MkQti*5*bz_?0Nd`opvL>$?y;b>+hHfwe-!(POj4g&f!;l)h`0 zN!sZ$OP`sV#kuzefYRpuT#dp?;bW+T6~Gl;Gza^iRB>0OQ?Ys^Xf%J2s9|Wn+V=w1 z0QB^n)PX|F^s~A zn^?$pp-TGX;b`CE{K#9Co5S9NnQkgQ_bIalKyUQCZ;^7t;-7~?=?~^&GMUcj?{yIm zabh4gr^4HlM`75nC>@ZE?&$AFvud>CzW;VO_qULp%J&4ibrH7 z+SbJxpwsjLH}XZ0xqZesl9B0# zWTQ|Yu~If$y;GT1ue17QN+GRm`&T_z6iwU&RIl`)z1J6JvyCW~m1v^gznylShd66s z5)E})EoQ3}SJNP_x<_YBR&QZT4Tl|OCJ2~Jl}e&O>5%`p{hje3P2hdsU5RuAs)bd8 z@}KpM^~JIR+W}KI!xP{mL(`a2*(Rb8vjHo2EDKHrfXq8-YS-_68XQ36eB`Ha+M`+O zoZc@ndHdY*vhre)U4iv*CY>VQO)X}4TBjY*P{sxVHUu`V_rEi!51Iky?HC)KAg+G~ zb10}6MgXbg$t#wEJZ67xG zJN~MK%|;%jZ`gS}zWb0Zor}9nTHrHpncISi_S=gVJMYb)j<&D7KAs$T)vI63s9`>)IlL0;!-(Lr-t-1p2cla)iOqKz&Qk()T}AgK%RZA^?L|QpL$q)f3w~mU zU`0EwGzrSFbWp*GmjVLwy$0tk^rCjjpjDYf^v-s$wE98#G2u*M0Ub5JpPM)VW-&vf4;w_r+hD#kBBblS~uuhEmA; zd>O{zi8+ZHk<5J9jgjRKf2$XV{kp>72^2YINjGm;l&p}bu*6+qNPwmElyYIf*&#(l z2*t8D^f=#M=GTt50%5c)EYx2L|9TGUmAnMZVBT9~FVPfq_U`Kav0S`0c}oDTD3LPPmW zeErp8(SG*%4*2Qo)4GB98`fXOT%HP8&@Y0xaRP;B4gpcZ)l~{3`Qp7P)3cW)>q`05 zxQe4f3@G1D3Kc-_qvIeF@=b`F>iv~rcWILo%Dv4z8LJz4?*2KJZoy<_-RnJJV^oX1 z=}fu?r9Cvc78d@>`y3!tZ-{H&+*nT4i;aM{%UOuEMO4g2-%6#zH$$D-8!;7`rp^%e z%)dG~H4%pQHOfd`%1|b0Tg$vg(F)1Rlt8>(yWEsxqn${dJZ;BshDpV8kg5}+Q1?bW z$W{7adr1Q2yNbSjna5_~NG{bS;2)XT-p9FYlQ^UbOcnI^bP_73Q|IEp$Cu-p?3&HML@a) z0@8a)0zrB&f`ar;s6l!M4ZXh$_i^hv&+ofF{*g<}F1e8tD0OT?E+^)5H%Wold^7`W0a0=_$9DR+e7OJPWpE z#hZ@rNrJ_fi%KSbCr=bPMG-mds{`9soXm}9$C3#&dQX)^PTdn)l;3xk#OrOV_vU7x z2{e`?j~rwxSUTbhn-U4Y9_s1(>bAwV=J#l&39*?uY1Uki&+u%SPOmHUAm7a*KbZUq z!*j*n=4$#-;+G)*g%reYAX?F?4<${cXw8@zt;)2xdXN6^A+g-6pz=*p>#?v$o;>vH zd)CcL&K*(i2XJeIsq9#ZoO?c%?VUUU&&_;|XK>foT4zUh_N>l~CY*NeS22Jd21Ld~ z{|bwV6pvZ)i^{oX2O3!A%(fcevhv!8&6aLW4Gcz^N@QRS;l7#)Ig;11dK6=0kJsMv zWQ<1sb^QqJN0PJDq4@;QyBo!^W7AuN%KT(c{*)JKKtzBxlYk8zsgJrL=OTy(x9{n5oNRzo7#$}YQA`9G6 z$7je8VO)@B3#oo!$UfdMoE42ux8E0W`&dgYDRqydAEzkfa z-CZ+ZXT{vFVhyz&__@v^D%*88U}WJn4rX`988=;lFuR@S9!Pj3t=L{DYz_1u1@BmkUEJif25uca_^~jE_b5$=e~-A$RxH*cQC&as-Po}KqT8K?sRpgwz4i!E zV6Qpy^q%sXS0^g0`;i8d^4k7dHh%NJlk=Kb-^ta(l6MM;D=GG|F-R)>eRwgSy>Cv| zJ8#!XjCRKE#{_H~($Xzr`OtI7o3B27*(bOZIsIvWb@9EkFVfnrC53PA!DMN7s5U1gf-TUvG(X_InGi7|g65 zDKPHRa#i=&?Cz4|)=QNtM+WE~SaS0Y|GF){;s(}BSMv)B6kT0itAU1mJEupRiRc;t zmQ(TctcqpV%VA3O?$;O1}%QXewa7ASO6TtSTt=4n#+*a(C=vT?5U8+KAZD!?0TzVA>+ zR!=?*=vi602QUB?8P9~YV%um_K_6Ypf{UahdjPbcgW_JyoujG}c`cuo(d0?5iq_l& zQFw7E_=7F6na_Bp75otuz$D)$4!O6BUTeQklK&m)Xo%e@c_Beyvt@;S=p2+sL<#1` zX2UJ0b{2#CpeZz}mhQk!$KYiXy^F;TMo7@ONi1)gv!n6t9MG#vBJf4ZVBq8BN z-9HS@Gx&9v(YM+fjyJ)@%)LSj;=xqMKk*fft1FbW*j*H1j2Bg2EwU{N zS3nGEwE6fNN7t|1eAV*C6F{XYZjc(hE`@8#~Lv?noWMsYfqv@UALM4}B@qLNC zL%mJjoSOEpD+YT@9ZW7#1{0j*7o*Y)aWMGhFL6V8qr{|2EB@v~Y#pnV_eO=BIP=$p zOmWfL?_4w|e$34wU6RGkp8SHs&I2^$AD0JmR{<<)NQTxY>%3X9*WRRim!dEbELciC zT~3|bI&k^%g$$ja-#KCGpxBAE1%`ul?4F_eWr@15~~1o1D7^~R=$%(07iFC zT~>3RqmVz=(6a=jXiq6$v`_2U!H7KgbWvR;O-J0&db`6AAO%AEa*@nw-$V%UIO9kt zE9l`t{i-H#vk$!it`mJfVqsAr=L9fkc;eP8W*9cXv8N4ul%4I^m8Bp2bvl@dFk{at z8vysmOGnw+L0q}!hHX&Ge0$KIQEfe*IW39cG9w32VKE{*}~Xt04HqaW%!7UPQA-z!9V zn?zsylmIWjtzE$cZm5v?wnk`$_M$B&rUGlm^g7!8la%y_tnEdnm>t(ln~5jOr~|e53Q68m zi=h7gti6ORx-$mFrxlX!Psbi&cR<5RRIi?$+<@$Duho=#SR0?bDicJ{P)61k8T~Mf zlwlp4WB;m(n~^(6&?S+RrK&*v80D!dWLX#L+>gI=n@w9a_nCf;gJE+~YFWG9H;ul8 zz)CLl7q*H92z^LDf8xDEyHu@%64CSZ>oA>kZwc>GZI#3o?Dlbr^cyh}hx8UwS7mxa;nGuR86|@m!Yd}xr7c?QiEb`}=qkMq;>@KExTCElu}i~E+hUHQ z;FxbJxtjc!@!Rr+GF6Tx7bql9Iy#vj>09x-ww1OC`PW-5RL!54tc!B7>U)KT)lxbbi7I#?zl8OW z@sL;O{0DNaB)g%~{%}0UM_KD3)J~Tpf;mfB{R?LZP(I=DF~2pw^zQxqz7yzpR@mK> z{?)$HeI4F@plmT;jsZxW(hNA)%&vHm4A%5He9bE9GQmXN8eU4@c}e$v3r&dc5FjUm3dt zMV3~N&zfH@8aHd`Wba9k4par zT0x|RD=IAv)Q84yL9t->C3!>YB!{TMoEE|AdaypW>YD9DMS-6zKl0-JRtBB>?S=-~ zgq(s%Boai3yF1?^X5Q?|a$vP$_WTlhQ^hP)alo!XWhcQJEUt?=uIwqpH5C<}I=^Od zAD`3J&tBmlLg}up5J1WWEj_`s^?OD<&m*pojD0tMz!648wtEwcq@Q;sTq+XjC_SUC zwI*;0=)6?)pilxo>FL+sU>|)0y`eia z@%njs=JFwfR*D78Z=6b%A`7gd=Z2{T_k0AXOH}HV3E_OO0Gj%8Kd6!!2}yet+em4l z5|u8!P6*f+x)LyMb7e}s9(a~nEz?%{_NeAQJWFe*e^^7AyJjB>C63}NDAmVk1f_~8 zrig^iT6)m!GrDXRi#xtz6@fL^p7C9$0#Hg`dH zPF1sbPl~k4jwMD4Xi?pQQPEO1kFj!n4OYu+k4>-$17Qk${S#o6wC(-*PXo@D1J?ELeZvW(&PUb3I z{dLP3uM;I-+>EMtuHtS3 zWT1Ok{P-JAn6PBSu2h+|XRZObVxCoSoH4O0hYhP{o5#Ftb8qxev%RNoShT-C&QN{5 z?u%28Ye!vUCaQoP)g4Oh*E2ohuy0@L0*$WCJ^=p4+AxSdJPMCL8cf|3b*ZY<21k;z zx))pB)H$cjeuz?7jUkRdjM?0AEyoHGYai`m zi&&v`WF4sOk?#E(^HEy!J_v7)h7ei*Utx({d()` z>xV%1v$O)eij@w-Q~dY9PH*BN z9tnu@qm??>1CYSH(x#_0vXfJ0K)A-AndU8Fz*#k=NA6 zkg!UxlQeM8qCZ6Fc9_`H!f2+NBhT@5x6x!X>zJ&coLIxP17#W7rE8b0$Fwo}Xkw3Y zSwFu>@iqn7%oE%aWVq96a+3lC2QT_4!5Z{f_BNVFeoh2mFiggnoTovFFM>}z!$CP3NNLbngrgk)|{HmgKQ?U{u&Hi^q0e4MLTw|S1d7!9|VgX%-x&xS}oA6bVxVw*#W=?Gpm6d z#pe4QA?@{2DfdA~=pAjF>zTXeqN+n|Rtd2<+)m+^vh-P;cZY_#J%mwF?#}IZ%L&;` z2E(>H0I;jS(WW+cA`JGLaMEAVTs~YN<~>M4Z!yMqC>Rqp1*+RHGJy!??@#(Ox&_Hq zQ0_!2aw7Y@?c5?)+=o%#<%%JfHTFKrY;>$SaiUeXuH}?FvbE8CZ%OjA!@{|J`1`U$ zh60M^@SHjs)i_@1t7ss;D%!x)E#STa3cTMpjT=d_z7p zo9yNvMaN~zetMKRkV)9UbI~$gQC?$ulZKlotRH6b_s&_#O>(I5^kiPojBmujxIPI^ zW8}ceugpGG8Y3mu){NkmktEn(o%8vqU{{1o#zXqK zYGIJyc)-b7HE%-}*_X6IOS_p1xj_|N0kWN)fw$fE<5pK?05%edp*+F?fj~&FJs!6U?uXI<_ zcewau5k-FJP}nV}Bx+*(z#6Eu+Ay?@|IV(ICC#q~*Du)0Z7V4Uj+k-|l2VxOf^>Bb zAiyxILMHA$@o_V1;BJnX*H4CGR$qSsV}<}U^H|r{(Ekw5-F_Cw$X_=Ks4_mb$_z0I z%vXm!R><=Wy}0m9Q~bVA?3`}vIBi(HyF~)@umvFg;7Rr~=vt_G`0J0b?M}%mcoUvo z2f2`iHF3dFNupYNie*DadtVjNt5f)4%Z7RfeFC5Ql&Js_v(W>VL~LA-apHYLxjQec zi8{{G^maRv+P7j*c%4-5@rwl1H?#k~%G4KRHt*FBb#NgbspeLM#zo3|oXnhJ&X~74 zmG7<&bI|5$rSO`Y5>l&FRF`$MF4wg@Oe*4^$ZNM3n)@)V**8M~w#TAA70GcX*~iA^kb1w@DYjZb|KEYcWDtK@3_!2K;8GBz~e|7AA~H^~mF zN{BTE(}hkl^kfne)YcAabJ(mpi5Br{!qRO3s8DSdvr&}kVcQ@W`|YEGa#87!5!8M# z(A5^s#)2c)8)G>@zbtZmL6~SDcQ+*@;R6HK7HsX2`&>R(jzG}zU;sozi3V@(zPApk z-O^mdyNY)84kUvyw1t}W;E=(%mD2(wopiDZ%c$gfbWHprw}j9U+I6o)G|7^n$OJmk z*8yjP;rQQZcU#GE^F`M*=G+7k)Mpz6&ZT`M(Ey$vvmCOvC#77E?yQofI$pOIMsUIk!mLcVL{%}ltR2r z9m=l&q{#l)6@P>IM3HS7nEr#()ajAF1^fEPqq%PMwK@7@{FnOT?gzW>BB>L+i&QOy z&@jWJ!CQ)l`q%E zTm6uB)~+~0AImw5|61PN1VXFy1UGKT$W+S0W3u%~=P8L`8>g6BcDU2iYBN+7^j=-L3GV=<%7RcABDfgS!AsN6f3PiQUvd{%OsE+@MTOr7(QM|y>NyT>O0E~+ zF+1{bCyT{h@g#Fst1l>T9TL?^@u|Xspf19jx3Kqh=%t1WmyevR&HWDL@gIlgB*1LE z=G+8N_0!;oSp{k<;~qq9dtpUs%62&Z6Fmcq-@N7<4u1wwc?MSC!DO&}*UmFjemUv5 zE{Ykpy_M}KgHO|*y@8wCRIL>+Uw&U@^G>0r>B&?t@9N)fq5c7kSU=AlU=Hb9N^qkH&I9)Si;t-tt_R8~bM9ie^W21S2BRwa# zS|d0%GH$VZ(f`<~BZ2&txq0$pa%B-`JUGs=f^+YqqXv?IKyXJvq)xX?M$N6t;5!F` zb(|4;Ona-YzVNRrA`Q{iH~6SNxs8_=?o7i3NE2)U&erQ>;nxlHyo&Y-`v}2?N|98^ z+O0fA()l>z-Rai~w5C&-kSm&?zknbD{if0Y-Pg&lwyRHFHk(tW0YVrj_-QmdD8 z%z__TquEt)qT%rSEQWX7IUY86>*nM@!~#XXp&ai(1?4cD-q3{a8kTk4(PO(%ckw7tupI zvQ%VHBM~{LljV@ig3bOrva3>(LP1 z2xv~6XD&GzF#LnIT*pvD6K&kj*69T=RNWJp|1~?=w_?WAkzKuAr}*U>ew!gqY}vA$CLCX zq%E#JZ_k4Z;zxHWN*=q%asxbStup&eLXbOR!RhRy=`*i@p(PWy~5axBQn46Gf_il6)H_F`{Y zL#bR6Q$t0pmJ=d)szw9TZ#8-DNmhfH{m9BRhVzt)SmTw`Ad9*miY|L1f~n(~M~-nFz$R9=R(`9OzGLbY}~(AR}ef z%Ma&=EliKgid{JgbL~qV34_d3`-4XQ!N6mq-y$)3HKqbRvbS_mX@v>;Z zHu*p=`Ni~a?}RVM>Yg*Hi3~~Y&89?_Y69o0(V;Z93UZ>q}Dk)+EpH)lBr!r$pT|hNxFjwX( zFa4rg5MHwu4NdXYboXtWxI>OSKr7nrM(iFL zy|Gc>0&ncBuI~g~x)CZ{JSUnlHg9L9n5T8vJC41t{JXChw2xfuuQib@dlGO^&BMQ?JwHaBJfp<%?hw0NypH zvjJ{aR%mGD!xf}{!EF8)$jW4Q_;7Vt_e!}|$80B@zYe-a8y}NBk=Q;zX~3#{d|3n3 zqS~Oqp>dF*V6vOFGnNkgoYHM962}R70CZTf45j_;eOJbk#n(H@g1{5Nx;=Vj>_(^PEAGyWe5oC4 z-9(Q_8Gxq{JeV7wQ-`QSXV{?SRtcvSj^jQ&iYb081Uv!d8mJ}V+$1as(s&>haU@$6 z79a|XJ$t8hU{TiY?iUyGS)Gbalq0bE+5W1;bbavskz8u)sEsh4ty}#RKT1K z#6SzfZd++-b8zsjIP_72Hh>^uBI0A){fJ6 z3a%o5Rya0S*PY!is>3pcd(iV`!RrFtFdKLrjX@(6sls&14~_m`icF1-f;edNllnl9 z+`#tPZq{-v{uab-{@Z2HDfZIrwp}KlU*B~|yai@GNLKds;bN)XmL|VI1q%A*MW{{m zdbP`jH%WI#928Zc=P}sO$y z=~7$ugz~3xISmIHE{i5iF6R0pJb10;l+|!Q&L7`yPzyU7&y08$ax3*pH8iU< z>}=n|G_!SUAlMOp!FJB6<)O)WaV^Q-50D;X=dX2^0h(+{j8J%^_L@g=?9tHe#428c zEJZ?b_5qW!@IDhBi7z?>7}aT81{NnXH>&1e>*&w7xhN$8iO3~@NLm`&06TRTYJOGA zAA1&%6Thnd!02uLyDMQFHN}$A*OqxJ0pRV#tYI5Lp*Czrop7+%|JnJ7_7O#a7t!D0 zgsJzVeGg9@50^gjEj#y{X~|kB^Wa){m9y9D`9S^rf-aWFZ$H=j*+rjjo`D9kdO+P( zkj<0S@htZ#xdDsy{_HbI^-in8N zOyl1^KTJoTl${znFMm5vdqVi9^u1PuAs40LE)PEBBp|GWw^J7qqN9DPG-_*)I{)R& z(nJ7F%in*|8KwPAKJ9|dNswaQiKE)=U$4?BZYhk~dR|g1ZwH>IdI{nHmw6!U!_k{p z&Urn*)8}4=%!<^KFMd2YXk2}E#ipB?Af4h#q}R1(+fMf;(mQMUTOS3 zhPMNdH5#sRHi1OO5OKT4GJhDWEFF3&iov#wq?f(ZFZu{v)zUGUl;|F5SFT{M(R2ZvFq4 zWH;e3a$$R_6m;eixO?VXTSf3|XU_ix)E#)h9ug6uzO`Gj-Gl-i#_5R{4uy*4=AJHl zi|$1`@B9d|f@ejFD-86I`YD|qeypMl&vppS`!UyZYx=9w?oItir}r z2q`jpjad69F&{-Kk9TGrz2--Qz0Pbyt5Auw7@>UU?$vkQM%$jwW%O2K`O18&IVQzP zK27?r?Lz+pzq@%D-|+nl`Xy`5;x-b*3bpknBtCmS6Ya*ZS~JRya9;Z`i)R zPk2l6d!#W*y(Clc?x!%*)~$Gp+~p8^iq?NeESGN9dHoP?u$qp`a|yG9K*{SsvE`&7 z8w%EM&9PDPpDWngN}IE65h`wv+2|w7DP5)2^3GFVj$a1cIIe9Bak6g#6t_96Vl(mo zHeB61qWP6M1(YTpf$r96zWduAKsG7Cl7mLTZ!8g$Oqjc&X0;GhcU~NDwdvT$7^oKb z^~1FrHukD7g56?ymPlszJ1$>jT>tk5fmbDIN(+1%@OPgo{ty~CSwuHWeB2N$Uz5br zuK=xy|37(sUKZfY-+w9@bSvp42hE)Vb~;*@l|sPE1@QMx=VGNa5yzR!59xmG!v&)- zDnQ?}H$@|wo^sM4FGv>dMNLipbnVK8o1wtl09xWh=&#WT$i5x|-k$ALKk|H)$UDUp z?k$bY0)DtS%vZx}INE==3HFd>a(xb?P2Kk~0D}=9C!+yJ z)!X%F58i)FzB%Rp@8>msqeAeK`y5YGEM)xUc{E=l?04D}YgM&N7$)qDEsI$JUU&`4cC{@#TPO#Yj{56Nha5)djC zK-}uZo;)c|PwO~aG@L(e%~a#d+kUxp(`UJVwwh~;*LgtIpP~O8;Fiz&eb#d8RfYWf zUI3MUeIJXHTqUp*E(-ANHJ?Rfxi$RMr}@Q6#+o<3{id!P;lIZO{6fBZUb)H!UCffB zn=zKDVN4oLf0ii)jH?uty_K#1o_HM&&oxYKhSq{)gsdH-EjQ)P5T^0vz|1-j)treh z{=GNAmi_HC7|n{EA`(LbJF|LHdlmMW4ufa|5qth0>)oVYSXoI!WI}>tl)}~s?Ec)N z#*Y-hhj4mVLGCeS%kJIG;8d`}y3-!3_!%A-)#IlgC3u;=I0$VzJeo0C$oa2#T3P)p z^Xz98<0d0y(QC0_sotYYwiQiJ*6SI(5vka3@9P*bwi^~V&zAFt7X#P^U&1Bnsjo-q z7G?5kov`yyp|oFCIRPXJtImr{do_$pXNNr+U6&SD&Q4lAoSpyt*l(qL)|aF$l!epd zH_{Wo0A;`@8u0C!Dcw%+;R)vY!lVE9#c-D|$0pU^GI~0{T;iGTymhi3H!fl!CA@dh zq#>#yCtg>swP~{B&-c9==*zoB9$w+H6j&4S)oHjfPOb(R3qE_Fh|1Q;f_5TV#RkPi zowc1CU>eJ_*Xou3eDgOKZqy}UMRGdjopUVp(%CW9^msCtrT&ag@l>rwOh{PC@6@FF z{3g?|lC~#yPWT=^Z^YoPVtjigm_3CU7RhOMevGrMACo; z?1z)1drAM?>1R&43XQVx)8SKO{TV0i%BmLNfmmVOVojBU@1EhbN!fAN{KO<_{XUoj zU*YZNM&Ww@DG>mZ+l|YaRg9Y%E(rcETL7jdq3rW@*>2Xo!v$N~)nO7^D}3Bo*HCh| z?B;v=4(}B+fTQH{7A>> zJ1F=KNRIs7_}J+eyB_G!o=2<-Jz?=&AVZp`Rw=>nC%`0QffF2Fg0R9!M@j&E4Z92S z&Gn$)N1P;~H2O-lldOIMbLM!$?MZX2ayBQLuLs;Ou7ox_J)r{zyR^S#;c_q_z>#R} z;wLBh7ed$Ad~f1-UI=L2AX%H&fI}C=6e1OAeatbxzml)|R%3|BzfjpP_nv=7p)g&s z-^oCCu_(>dijvaLy!8c&M%~e9?;zGOt0pSlAI>l6xr`XI-g!zyc?Hh3Vl&%`zOOyKES!J zJ`Iu)u4r(DX)i)XNXQf1x|kBYvWseU?R{R?zq@>b$Enj-3_Prvw5z?ZG|zsXA6yt# zU4O}6$f@49mh@Sy0(MUIzD%sej+#}}%ug~g_vZv&3!Uo@-YpWDO9E)wW>_P=-b5Nf9MXC64Trca)4iM%IX8l#92l zh*(wh0E?NDknwu4^PGV;(Z_RaET-MQ`}FMzOAU|A6;^NSiQLa(%hrBDOzb-sMm}1 zNmo=Qx6GBE@&wrD;Qy$?G@1>&!*&sXd2JqY?ySmr0MY;;VNA-r>ioj1CJTy&0if-2 z0boI~hh%3cIyP*r-OR+92a{?G_jCyeN#5&TvStYw(g#0~ahXbQ(9K-W|HU7F5ws@) zO4DcNcm>S&3Ob;05K7nlx$Il>lq9>7&L*~Q{f`8<8emPU+5tELrw(ITZi#a2Vl z=1d^8>0jb@p`us|;8%rnU)1kqE{E+ZSHF4S@1%6%0O-sYv(OWzL?D%2w%K=~ zb)X#cgZPkbH$_=RFItO1Yp(4`#5uD5F!b2nkR3Y{N1R}(F9}4MMl>E_!wt&HgXRt0 z5Coco%pT@zl;p1y;5t`8&GuvE`rQX+C>*J*hdr?!c($bIq=nKw-1Vna%+(fa1@DW{n&*5LoO@MlAPPCG(gkB2n~LwZA)M8 zVDE=*)oHryn`JILKp}%amJTLyn`wdWpk369yF_AV^YYN$^#Qy=s5O~#G(Eq#2aq~? zOaC9m7~b$2vCZ;|UR@&#h~Rh-ExpD55r)8%9uWT{!0Pr@WPe`v)bLSIlXY#bM7_WC zR%d`g=IKFpJ3MPG+v}lMlA^sAIH|8}wcNs9^HYL3AlJ~J)iP3E6E1yB{vBQY4UTDn z|3|77LM^{oCNNf*1dSd`1bh0#A7AHv_C*K}r-AY1H}Mk^tX&5*F{tTsg-nkB*vM!^ zQ$|h`FA~@oGFO1D^%`W+ZVB#VB~xPHbgc#h%+Dv@=U4o}PExB^=aUZU{#T6Vjs390 z5oQotPyW?oY;mZo;)oc|XX|J?F=DUE)teT=I<9eGnT%AYtWc>?+WNjwI>%u)KssZ~$up0uo4MQeaI4$YDcU%|cG zysf4vRW=rbgeRQCj1o7whbiR2Wlz-AGHJ$voa*XOpKyUU*Mm0HaqhVp%zQCS7)~#r zGZ7i%a@Eo0qcl+gk-SshvDRdyRICCC28or+&HAWJqMZAo%}me#Q6%6xo?z3NDXhtA zN#%$$ghO5TW{dRo^HA3&;7uq`VbE=c?0@;ui!eOEC7~e=HWM7TkU!dX5K~%Ym z?#xJqDRM-QicpO3*_;H4xWPj$eXdscy6VuR^gc)L-QLop)V}`xRT}`6#2e4n`P#-+ zhZ}2ES#P?sc369DcDt`)M2hJ_;Uf1IMpvSTb6MwZ$OBRn-9niGg_n<3Z$`G?fWNJ4 zYIRlIY5DB@u3^DiWj3O6_k{n&AG7zj!Dm1?hgrvv*Bn2(g{ju8YLA(GdAz+6do`e| zq;|Wp>}w{QI#-KP)M}0dkO;pB&x<0S zl%by(41+mz=mQd_zv`$Q6O{8zj)*}c@y{nLHS6dhrxgAx7xfX92L1Xb? zq-QT%hibSC)UMG+hul~swxOAHD_}mR11(pzC)*x>@aVdxe0=aLE(R=45`m>|^8@^I zG-bu0;T)+4VU!@B9||Y3oHGmuOlvfvb4-oN?D^{1Y|rLZ0ebZ zR4DY03I13MXGpmz*=m?ao&Zgbe)9B5Gv)*s7 z5>`$zL*VW3NV~;UhGga-QBQ~b9_ba~ZPUj+f6PmHKxmen#}<#%rr$ieGpf&kub;?@ zeWvos=-GK;#dNT7wl|yKDWB)C0Q0_*e{IJW0>M#chf(>&&wvuKw41ngZI`O#yLLCf zxeW!IGp#D1xfE@v6-}~G7ULz`Ui(_~uIh2$sgX-d9&Z=FkBMPE;ezw(E{kC7ASJz| zuTrKW?DK~m7KCp^@I&Xp!v0zw{`)b-cDl$ztS<4$J-{IungD3cP9~XSJy+2I9-WX= zL*o?7ak?6F$rWJ_+7ScSELsw8@P>{0mJ6_q_9cY@ey@Q;zS@^;h~Y%-qZaC#PH^yU zE%a@E(tr!H3d2M;67vxBsiKtC6my@_-&N_gcxim-O_|80cU3m@$+rA0>fb8}wm;rC z`j%WD9-K~!Ity5N1SjaZLJlA870bk+Q{#STB(n7T|t?44DpY`=dkyBvMX7e z2zB>59jH}$#I94dMw#_+X@LmPm|+h?ue$^cLUF-%U5V_VF5R6x@nx9c-K(>IESQqU z0)>oRo>4W3o_h~8UUvQEZ07!t*}{6ELJ&n%P$Q(G{^VPIBPg^BuihX%1e>v74aqAg ze<4_?C{Uvyd#XriO_*pdXbmh$=dXg)7Npu}@bY7ts9F+VKt9|K;1!cPMx9wPsu}o$ zlaS$UqlAshDeaq`4fM)pdSlh>%f#0Y%i4i_^VhqH*$gw--N^+x&}aqP7$CHtA+Tg^ zH3P&YPsUJzHRzh%#5!$~E0A?4PLLXurC9&PlLZRrE7K>ra#D9yfFzc*g9T~3-o>Je zP4Wn5kOESK8_eLY`pPDtJgl&yBL&hcDf>7e`e|;bePx(RVuPAHma$&&rC)$u-kKE- zdBcns_u$~9`CcBJ2VD7;0c7n{`ivpRB;t?W5wn-;SiY8b_||C^(Nm;Z=zlqpcaYHgqnys@tld3FkxQslpphs8!HzD(BIUUQ!J1TwZt--F~$`=>>Pv5)gw`;XeZu%LFc z&LBjX)W*{adZ2dQV6l?j!_88XeYke0?}w?dd9LKGTp9l8cJL7=_CpN_K30A-IKhF; zR>LtmuYW7)gs?>_s#Pj6@UFE7M_iF{iy#m%Z99@(K~p6r%Zk4;wi2&IoJQb0ks51T_-nF>?5wr7!vI=-ad@hofMW*hnq3K zQh%cufMc}PUei49cp*VR{WN3C+(Vc!NT-BBGUu%}$Yb+$+Uv@6Pk|(Quib`YG0>@^KPz^T^<0l7535}D z;B@#}=8?VPj)~#sRD2qWKg-sfFX1&){k*i*7Eqi_jincr4_KXwozk;g6S-SQ8bm)n z=nb}Ei{tiF!|x9F7-KgoZFcoNX^>hS!vym=A1>r_7TXnojbLR@yV-OGGFyGPvgo3V zV_@o4wd2tFU?L_1mofrpdLZw8;C?Tn++JxViS0jXt+;0Ms0{38H0L~e<^Wrkp0@bO zs15d_tYfmh^hN9XKcGZHA|VeT2CZd0qXfEs40oH{@-fyrJt?zB?mT7{Yb0TE)=UTN z0`1IktyUQ7i|0H=1CW(t46-3*_U^5e-n~MC@+p(UYymQoBYfnb}9>){G z^xz?#hiBbh1-1NII>Sq+CMN5{)}oHi#ZqKJy=_UH|6ajY+z)^DWhe^&9iSKUyjJ>S ze5*`XzLXwTrnw2&kj`AwId}v^%Q}34j&U-Fs>N>SlcoEYE{7NrtT|wY;?QQ_I+k*# z8Bf24@d7ML$5Fe--=)AvjV-@&t^WI>^|OCL^wskY#5;y(MF1I+g3lD{1CsWH5tG(B zB?#D}h|Uyb|Bd&zd>Cfjbmag84Ew?ERZk89>?IQ!AOfUMRMC!4*1fWkYL01n%l7+9roH2FUu)Png+5d>2XxK+~-iZv-``b+9J% zwDTy;4EvAiissCm@9)xP%e_Xhqljmf#+JW)#!1b!km}m(BlB)P38lW1wUHvpNJFv$ z-xIMe{g1k%k*-5^`^`23fVPQ=DPw`U0G_q%C)l~H`0w?&@T4nTyHS>cqpJgMfW$rC zlCZKrF)rIRXYV7Lx%8a-ND#Gc0pnjR+NXHK(chHHasJg~Z@VZDTC?pMo+Uw=NO9YP zk7Wx#_h=IDgd3Y271$Qa=9tt0wy2k>b$?%IDV>CFj|hMg7%Mu=F;e1hkB3-la`Tlr zr;ZkIzjs4PhK`J@UrJY-N5bt87eLG*>xigj0abb?=b0?h*5mJaR^=U+RlIpA4+b>l0u<)yWIL9DX6~OzcSZ zwW1l|B6$dNi%8OXGGE)o95ne5Yn(bpKf0i~9KdkPS$SK1!IqHVs=tpH7y{k+or(G8 z!dLuP<_Mc~gAO&}!nPP-FkyxU8%z{Qiooj*Kd$MVDfDUOeCU|*ktA9hNvBSXvy@}A z++N@4sQ>GNmxEc=3N3RK6s4#c01!!POGdJH(25aI`BFTV-C8%AQ?I(}cQ)>VQC>LU zZt4d+^UJ_?`q9^1(#*>Y`tGwcD^YXu#L^!Tt@CQ#3}80wBVHf!TjSTFk2-fbiImP3 z38}4GBZhBtH#vNbS)2!IX0Li4axs%}S?kz15wjuozA%=p1t&`7W`lN=Ile`_shhCpt(uLmZk}{P?w3g8xzmbipdbpl_xAs)rI|Sj=>PE!#3; z#kW(|Wk|m(s#NgFx2gY1>4#BfJ}{#HkL(PlmurdRM_O>g!{Xn>daI8el^j%W?(BzL z8rxq#+Sb%>6T~FbSw-${M6#~Mg_eHL{rSaQ(E7Z2@Jc|FJUtU|US)RFNY&L#6%20;Eco zacmj$^S>>EMtuK=R`Ab5zW{g#`C44gGqk`1^MCue$beQsjO^@PKtj3k z{p`{2<}2hi|C2)kdIJ4FDy;D5zjbO{OMqido96fTfqeF-3M>8vlCAndda-wN`diyD zyZ=n#kM8hXyl^ubS8v}ENt^l+^1o?Yg*9MK1_j-s@RKxjcs?^Vd?E1K-h+QnL|A@$ z{Wa|I(dN$DDy-{E^cH#PzCZg8Xb*KEiBbEJ?ai0Ld>0xWR@aXL8TuWB zDOZ?({IwnS=e=TBWy-%pploSq@9YtXGLBJEY?ICTp3zsx@jn|m#5$X{5=DGDrdCq7ra_T{|;mOm9By2ZGM6; z`l2iRgBU;-5wy8_n0R z#7^f1y4QdF9I>xnO8=&kjEF_tj^EBOy;lO-xdJf5t7(87`?Y&U#G$EmvT(+vPj{_= z|06fJ{#RxRlk1nMS-*d&h{mt`t3P_~mba#UYM=#%ZR2;j{TnXBinOb1AZlTlT9EnV zwxjoNzoT?xpDe6t_bspXqw^u!%=*yCW%TTwC1~d*J5nsa<5Y&e{`d4m*La_cQu*f| zHHQiD{ToP^F3MbB=xKV8TiX-)M`_$d0C;=IclSLgnXf9I^9C+J1f?mn+aEiiarO@A>= z`{RA*2jg}~!4?JV$N!u=5w}`aqnOy1f6krn@s)oMi!0 zeedr+&%Jm5!SgVCX7BI*)?RCUKI^kobelilsBE%a04pE%7lJu#*fRQnw26FKl4ctG zpOQOtDszUGzo*Od6uk~WL|EX(W#C6XpnW<#D5a+F+J+z11K&$^YRm*E71Z*LHbJjG z)_x|`wv?9u8xKpFO6R++81ax`+->I)Dbndhv4H)VqT+Xx%nQHbKZl1Wc)^;8ML-f z;~8?6Zs%xlgJ&*pR4CBpwqGAj1A{RMM6G(#ficwiV^^7`J!9D@36fV+{2=VC1KKdB zL8zy&?O-`U323-yYuFu`6e!9S*68)5k9ad9JuhT*1GeSxFmBWg{ONvEQS38VE% zZ5g7)payYun5#+ovep+)I!zLmXzUR0AQVbE=KuAx2KayU)dI~$-`uF==w7=0xs;vY z?Lb$<1{AGPMv0YKyZ@DSqXgDC=4Iovs;P3dgpM!|)9;I}y3tSmrONpzOV36sLq_K= z%4i5g1B6cO81Mti=ki`w9ytWZqq2BgG-8^g0B>DKUIwQ|pH(@j|4rRUy*7Mbk!Q}d zF!JzOL*?j0M`CW}BPkJw4ItrMhDkXpL+n(G)aOr%07aAxzErs>kj(MCeg(3WC00Sg zs!WN&t=Q=x>gWC%RB3`z`^|Fl!nj|j^&t^!pJ^ak4_ixApNzM<;{@G9-H*2Rp zx3ewk<-ALa6gr~y8an)_R4Ea+71g8Rn?!=?q@~v*->+j^j>^{2k49P; zw+-@t22_oN9L*rp?IvkFXhI1ea!p;viyH3QsLw^OQFhu^hc zb1T`w-cnA&2m>_~kAzV}aV%$-J5HY4mbY>oW7en3zMujX;8%tv9z1NNhn^=8Qcg6g z=a{hjFLs3jLx#oQ$0$C#B!y6fzc3k~n<=p@7fk`!&}Co%uRzzj+!3EH#Wr9u|9H8i zV=vP{-;`>i_fx=Esyg=#AVW>d;4A5IB;~TX4D8RS<2i=-LX`MZRE0S-osYmYzt4b+ zky~C4bvxPSRL%t6C8>S!+#60!=ZQ6@yBT$hw0&$}a5pXleLo%N59+B?88|~ZE}hp2 zLMkJ>fg)Rty;g;5xDzYUPO3=>ypHJwfFL4JUy9Ci+G7f!Y%|r(FzyI}fH{85=sPG> zCmr;sTg2fl*6t`aHNJII1QZrYczk4SNXC)3oJ3yJ*RgMI=fCk-A$~4z=u~z|-|46; zRx_jYs1GCF=!c(Xh-&1Z>c{m(Ka%7I=oKK;2;_fn0q$T}hLU3c`ec!tX1s|n5E7g@ z=!3I4yiEcMS}ykuMBs)M6;9zvjgTrGoU!5c^|NXU7sZ9Nb8|E>hjDQVJomgLw(VzD z?CZC7&t4oNQLD`oiG{0?F-CG5TYgh6Iib87*Q9g(mfifokMXi6?Wk&u06kt%6SkST zPFx|QRWL~bVk-DqBK5MYRPiS)neJKblD&OHQMMJg&Ie+(>57^d+n&pk!@Tz9xM#s-E{{$`QDR||czynL*S4stV#yc8PlaB7 zG1XEgH#+p7`*}K`s1rh@NBak<1L4^I)wUl0F~P7y2;ZU_kkxhG)ovgxZ00)5dl0F3 zDt8T*vXeDDydD1(;b4*-HBuRP`$5PW@Da;hAi1@9a&i)?tem}3)TFi3blq4}8xN?= zMMg)PO_UnA?svWHLweI4Eb}i$;ZD?xHF_^kG6fMen1(NERjQ~icMD zuxSpJ^KMB4bWboko0h2>i~#MUe-L$$&#~MSucs)1`uaQ- z3*>K}ub}xxoC6g>hl3$7yx%{Ry zyjo8w6l0U#yBG^h#?i^FQg>_}ryIG^BWn zhIwaB>}_M@i?-*In}->E`kd67nB2U+`uKfmiiY-gM0Bq$^|2FZj>exEtinK&v($T>8NrguaFMtoNJdBwe08YhCc`^3?c&2 zM~>Z`a!dv|4)%$K&H8Cc6gKXOE$2;UzgcfIsto_-xs>f8*VxggBA0GWoKx}lj}5l~ zxt2jxjY{UmpKm;OA9$wV2w!}g*8eg-9&3yLM($@c?2sDy!}c1X&sH_{hUPvfvN#@ z5^#^BIsU`r>oUzcJsYYge^Nb}*$-4E-Tp?h;Vs#fRonLhi!{JUm_4gS&qlP;)gYQ< z*W7!O{519My|W3ibnVOF*Hrg|gi2I2{BXWteq=Q?Df)JvD-1viLrAt z#lUnlo_1O(y<1yi@nhVE_sZ6f-Z)$_<$6G7WrvOdE}p)Qqtz@c<$9h?p+_*phMsmiXr!f358#>wuQ9eVgFpBIkVbj z#x+0LSWzTdzy3S5WsmjR7^rbp0nhkgL>A9=udjOYSktr;#5b86r3pcgzPVSqBu3&;@7LISH>NE>sxXETZ(pyxQTL#i1WS8+ST0#E50Gfw(`(=JA#>>5IFN*Y za(qx0+a#@fj@v7N*fW#iijjre+sC^DqZt%(`je9{=G*)Rqb+U0YB&kxK1BAsY+0j` z0Gu*W)L6baNvveNwfUCy)_q{%&t}VdBz8M7(VI1Cm^vpb%#MtJ1tFO|Y-(yc^nlD= z$1=^T@l-5C_^SGNdnRx0tY8kz@;qY3lAAl;u401=p#-*F2@t(U^69EpTmjUt)U<9G z4|c4U(;R50nt)z|n`*ZU;yUpuK&47ubrqSNtrstR7K9q`ck`H-F+*Qu&K7EAQq^$z zmY)x*O<)`kAP}EOkA#(9(iY4oTMVGn6APy~4Z-cBx7FWuv)NWal6(bg{w4wWH{=Az zP^vfFJ+EDC7%XQV8#7(lNcDRaTX;pVBv@me{uo`mPXd92&0g^8jfIeLcvwz4cM7$R}cbG3Nml$~V% zggN&*JM~rd+HDIYt|IN2v#DxEuDI)#v@PTGM(H>$ca)>m!F)Nv(f*$Xjg37u8fRzr z>+B{X+eNQ+%gEsZ%+ifX`P^KH^MMBV&^j!4r@nM|e0y!Q&U2V4M0030)JyIs)b4=y zZjkO&`RV%%w_0QqG7sUEm;+uo80e&@z1%8gF5kN&R;^<;nos+Xjn8(mQ)Fg!HHXW5JOx#& z6KQohYPA-`J^SrPTL9*nHV3d841F@BlPi6tU(mF|&eo1ID2qtl7B9-yVlvOXF{Fn4 zjmix^_1x!;CtRY#AGfcUt9wAD8jlo`sr<{W)f3u42_qilCo{rqS_zecA66c3CEH$R z{kd=ltD<0QU%KKvO6`=4{g~oHYed6M!C)2xAqU@9O>d|2ad6a5uZMFy|3)9~#feyC zx&Owh2``B+G~?lbYg0m7e?b1ez%)%>Mz7o84+7)fsH!!e`|%k$lU&Pe*fK9Cn$+(N z_u=q5oCqp`XtCVwg_3!k+E4mwYc)OW*z*5$?^u5 zK4P~vZFxDj!`41uz+@4SxM0+6{8i)DJATj0A^`sRpkI>h#?roH{mh@2l~kWYKp=m4 z*|6dI>YR{-1W9(iM`m_?b%_#pVrgm-g;VCOlUXT5W_)OJ&HVdsN<3H5cc4mPcj(b+ ztf}E@daGEo%uJ#~I9rMI0HSu1_HO(qA&%ptV|Du;``35yeLYkdL3@6^9Hq{OY?OJfF zPzLBuAP~A%a)LrXF)8y|8xclB9{Soi1a}n91PZ#?b>o1Q_dU+74sUW39s%81 z7ssV>eA5AA)1!Q~BCP_H zSeI-z#O-)9c4a;-f1PuGOyaCsT$n_A*3AADvm*|$O}0B!ER&Y89Z=07pbmvhV;bzd zIa#lmq~wHF(k}_!8#HIBz{_K>^+UI%+KIf;=m%Myi*^Ntb?q94Y@{q~$?fb_5*WhU zam3K?*0~N%3ds{?ata3}ycg@j#Ncy`x(L5Ks{rj;GTz0Rk-QHBG8PAm%qE;&M`O#3 znhr^Y$2ki=ViT02q@EhEm0(u3@y$}^*sF>|^<~*%RXSzITJZ<{+$VB2D;m06?R(N{ zZHllJ4Pngu@ZgH^4PrM|AM=z!H&%zDVuBCmp8NKL2Dk;sU30u^ismmH?OI0;GR}lf zqa-C8Qmd$=s(War$)ccC*QR=FO(2HKt~m=2SoYBL#)Zddg7?AT7_N#zIIi-_y#fq5 zyUW|2*4f4~l9tL)y^Yl!grHIM=4NG43!Y5db?dXC>HvA{dXpIsBnh4!S2bB!^2u$Q zUdi>9N@83mw?9#gt#U+gGn%w58dMn;?9MsaE%Y!4B3}s>=j+!1zFT+3;I6s z-@(a&MM4Nkz;oa3#RMd`YaAW0VYcl5pFEQhT zYx?{HKUTywGhmp>Tw~-XfeZ=j)&<03R1nvUZGCrC*>{(6$@LO<7DSGxH4O@Rg@dss zH5Z*RwYS>ACS?XCc~?gKBj%h@3e!%V)6Kb9MR84b`+1j+c1?CRrhQ~cNlDsb?syma z3b~ehaMKGlh=pac!_FvZPH+v+m??>1-D_fT;tBYi4YVdyF^xoYo=WT9c6F~GWciU; zfO9!2$avbJNwYlNiX)s~SjdD@8$(&ffKqgy%yM)c=qeL_|DNm8_|H;xMN^a9WP|$= z2}DuPb6uJ&*gHpiH@Ag zkoL?=niY1iZ~vO$k2lqX30XQIdd*9Avg)TfOuUNQe!_T$)9xNKRDu)qiovNMD<-au zvC=G;lIlupB?fNSS_%k1bI)SK*cyM%{d8c>Xu9?dD0_Mw8HDIDoFxp6aAnWQ$(TGN zlk0cq0A389P&;7?!{#3mP24H2@sITvt+0(dFAhxhG8%4u6Ck9_9HwN#dKm%%x?YF$ zFP)D8X(c5k99{vG6ndR!e885Q^pgVpep&3BsMx8#gr1WW1f_x27WD6Tg^8I4Cu=R4 zVoFKuF0J9>E-S>{gFZxtF-td%>9v(o$F93sRl~ggQc&pa971CCDGKvV^JLE(0f@ z;>V4(o2d`Hf zme((jnkgVt4@!P8b#BD}V)=7l`gW6va)Id3pe*N4Z=cs<%G&a>w6_yn`W^Va0`E*uqt?Xb$E|86b(6@}{cMu?gANob^u=Al5 zuePkkPf)t~#NVM=(F=4JCwgLC@&WF0Vkq-cdbT8k9HWslqud2OZ=m(8%>#rO8p^EE z%`39douIgek@#mwlL(W2tMnxtiH|bR)B4uF+W7&zR z(v`!k|3}XFb&kTyBNK!cg=Y{Y7R~P1!W~sw(c_Pt`lO!+%fDECq5>(-^$>k_f1tMH zMxADV6=MX)xB_`sl7s{1@z%+J9KHI-)j0wdynU=I|lljS_X9DvKVHMcr3&*kR-2M5xD_>gvr$ zXnW9bGL{AFp~Rb>hp0;rl{z}$V#dTqX$BPlX1e6Ivxp4;1B?Oh3~9Q;`S5~u!6aoB zds5I@w$^EM9dg%PelKL$MGD(6LI_`J3Zdj0sfn(fEoDl({ zB{hX?^em``7M^RlHjFEOt7NEBiTPRb{aX({fXY%3iE!Glk_cO1JE!nal&yGrU`3wp!1 zj+uNao%MTmT(xXf;c$L;+nS&O%kfC8-k*yk=nsMzK#87_nugmV@g(e)T^o?-Q5z}i?_7j*=0>et4E1;*r2}1p zh?znrkF%-N6Kk(b(>3tznhp%YPX_7~g|`mN^7fk0M~tKG1#+V54nk7Jf=#3AuabVm zy0n*R)259saf&6!ReouH8lZV3?56yoqJ1wL`@x7~lX{`4FLCf*v=pb)*+^-o+#d4* zzxt936Zagy#{hsXXp&YmM8Yl2N4K?_63Pg&mq;)K73l#dI_gvX!F<%zQfSW>WGd-J z(4x@?Caw_5ncYh|!2I7d4d3Y}tl1B-G6?Yc6XZCsXlI(MD9#0uJ8O{TWN?!11%`{q zaPH}Ati-+zc8C)fVuCL7_?Q+!cd9Ixc-e8ai;YgW35R1??sh6B3RYFsC_=VE&3v>4 zAz6RNtL^K^-%E$z=g0dTYnJ_eccUVD-5@+-c4LeJg5GoM#Ouf=bT6VmSM64$zI%h& zT|Z>`g4bHJEw>a2t?~%+_lfF_Ys+nqo>Qj$R(NLMsLD>8DVq_&M>`RX%v(S@%8>A} zaIUo09bL7&pH!~VuQCW65C|LUJd#BgY=N)8f99Lc3QcZvS}&<^u$?TBG@8nmm6UZ# z=hGsCdght!4{25MIc$%-?+&l(%Y7IJugV*!T6F`}E! z01VtaX%uTr(*WwRBg;AZ{0_;Mr+y%qu72R*6SehW7=bLv0Cm^ zTG(lPfI~q|Y#a3e;pf= zCjy065;nM8D%&}Jy87<1Q@dG0UV?{oqXmd+M?9;g+6=$iBens1hCk0F`kGkkaooiA znv!wZW3=aUKO#f^Mj<_bJrzLh=~s>sB@WnD&39?f7sKV+dwr@oV8;_iwQE`Ny=`dy zw7JcaA$%dcNDmf<>mWeXrc4p2cv(C%Gox87m^eB*>b2!u-w6(&z91E$<#E~1pfuV0 z(Y8#7cJSWpFBKIH@8&N*2>-wzZL7V%q3x`k?0!{2*=3nGf>1xWJ|SUgYIK{i>Qp`# zL~#YG?}q+n2cV&)h%6bgAQ+gLbAZ;NEo0vK4=hb(tbb_!`h8g27d;Z-9K)k+k=xl1 z{3H1-H_Xa@Iljbi%YVPr%N6)Yr|~bHfAk3Cy<~$HOP_(jR5uq?`TGx)^Lu+lq!h^} z(~cLGM?5n}-Sof*jRH+R^1_NbLgM36)1bz0`U&R3Nauu?#ch$YH!uFf3`qF{%LpS{ zqAcP-1@&D_Z+?Ye*G7B)L?4?)_XcBUz`a?Uw`U~3(b~O#yeA6$_dkt+81>_uD;J9p z0;ljFI;H)-5*k%!y@;BRPX`LnR(U?{_Z#d+`~3jL#bP%t!#nNAG`}b{(ohhSRa=w>#%tyn*Z~x*0WDcB9}x^QZ72*|I)E1prfZB2IAZ` z&(YrVa&vLD?;GDf=CiIbiW;gRx?9E$=4Z6@o^QbS@EL!M+(lz*1j_C{9=RnBSckbg zob`S^$M_Y-c zr}M$6t3=c?eZ+6NmS;LhE33O31CkjBh*ZR29qc?sF?LYLa=W!*xlTMvBLS^A^dU>_ zqsYj}wap2QNhlf`Q&gGJz!Ux3IcNR&k&*30z}FkPgNhDt$xrR=?WkgpG0y;zWI`e$ z_Fu&w=kLN2s|4a6vH;`Re0Ra}AFf5pSKtYyTO9w@uKx^q41~gmAds#{wFcELOkbd* z9`%d>Gv;oSC+ZWRt)tdiEX|!?Zd>pQXaO*&Kgk}U{Hfn;m>NzOzw&)A3*hTILXZEo zTyz_SATcdDUbj#B{-$v^S4E>?ZH*oDvbdSbs4n=YNi?k4Vi@et71f+GfK~L~r*-BZ zV$UnT)Ae#*@5dqbz%`1ktBL-}N1^P;7#m4mZ zyeuPce@{!x+XQ68A9=m8ekYOmUI6h7t+@E+8LHu#6sDIXA!?4IoDu!>vLblSU$Xfx zUSr{5u}RXGt~o7E{3Xv3HfJL-s0pMy=6A_ zw@#F>!V;5PKHm;@^KE~;!6KP1%AlMn6AO~+t$;VY3le}wocdss_T@=zj$MUNb)|D+ z9oPHkQw6Eng-{wC21rb6+;(vPJ05n#cWKMM{ccf#e$>A6^tC6x?+r;EVYV zwDEO0-;fn&CrEh2P_uYVTR+>LwuuCsxT@R1LF@caCFE5l1Rg>7Gt>aJ*3i&mQj5e< z`=W|-!p-bzBLN(#W`hi?#JAWTKYIY=k-DNmAjN)LSkMrL80na(ic0$~3NM6jxh`nZ z!Ccc0ho4RaGA=KM2_Wfl*Wv%N2pP7!gA4+~UdC zE^5Oiufq5GkGv5V{sN9OyCe%U-%7rUtaW*U2(2`*WhRD`uvqwxQ*8xrHq7Qz9?pH4 zmdecfd>s_|CC17i*?qq&rsl8ctO22OY3>>O@S5kCYp?Q%H_kE??@%S3#F!PSbR}xJ zR6gpe;Z563hX}?o3FcnmpOWH8;XF9(w@acebDYQEAXFZZOs*s;QtY@2jkDt5uC`T# zLUGNVoSdGS%Vk$oROs-NhCDNW*1FT;PLK%!U79-ilFpGTs=)!|U|#B|6a*(Xb@+*V znR}J_v37^`gjw4qbyZevW~Nz!v*YS@9EJ1#@KwlJ}!(sKO$o!3FaS{XJc^QQH3il zK}hm#`*Na}soATn+t8_dxK@3qF%)Zu_pcH4+`Th1hb;yQs-gzND3aUWbK!+$?OuOU zzBLUn10RV+KW2L#newDxC&C~7a5(v>`^;E`O3OdF#?z_dLnd$(#6k`C8%IeU*oE4cpv{d6!ARs}4=$PN&Wv8mU4U4s_3@ z)&ss|))$$Rp(ATvA{L{UF}!ES(pAV)pSOrAr^p@k2a}2hXL0Bj!YKo!7|)+d5>r$^ zr0NE5W{+Z(GIP!bCG^M8mo;UN4Tw%#ksdcb{5q!81}nIPbkm{a)=68A@4aYQ;4v z2|qe#nxLZHqi^#)n3U_J5n`bPm5yC~Gyb*8r*gx-Toah0=%~O8pXKI_z+|__?aE0B zl2SJIp-|r57c&0in%G&iug4#txjL5e& zHSSy;?=xk9QEX@gFX?EdgHX-EdA>1N-IHyKJZ;px#zbM)EQ(9QqUW874$Q*CaX^^(xanS# zAUXF7Eg`!(gRG^r>Wzg1t7T)!=h)(Td)At8>~dMc?A75ykU{vPldc`>&hN(ZMW=VJ zM9KI;u^HrmuUS_>?WDm}<-So#t2;}!blkPkPpYnUzPp!uZ0c@l(6MoF2Yu8{5h6G{ zm!4(RDv4+4$Q9Gh3FAqL!mEKf%*{-*@S(>*wih{S$pRH8uqW zkRDpi)!i}I_n514X{`i;>TdW7zG!}w$JLaqp?2V$NNF-3`v7@7 z8VbMzMo->V2o9y*0U>6AqMBa8sIf+y7jTCyo3}U`@cw6#;=dtJ5hq<$(14rSsian* zhqy)Z;>&waWmjq{P#HwS{D7*xifid8EMgHxO=rjRZ(EeRE`r26XfcZTTkjX$5ao5Y^Fh?T-Swa$BR(|yot~B*y3S{CHLZ&8k=MU zAN-IE^6YpLA3+MXR}K)sLFa%nH2)AuR8!PCV=Qd}>k4a&uT-vgQ@O}(lPR#dVrq|9A0tFxKwE;x-CdvxBsl^ zr6It~**e*lA5PLej~{*I#~!;6IBN_)u9(qE|77IlPdbn&Wk#L(^FvB-0*6yGl?6w^ z&kv*y`N!Q20?&w7Yx}Y*Ls|Jvxj!CySzSDs;i)4`UIQn}4DF?;De0dp)h*8a1qGv` zNe$JN-6L|P-1a#ej`0~8UPSe_2KVMP=+$rqm;mZUVe%SGeNqn|j*} zEZY(vP94+a7;)?gTSVi(UR#S2`)hU+@nxgRU&_nTH@c&K1AnD%-$3QGh z+yOCJxU&~JGfWw=q&nwolQ?O`Tc~6ozZILo6iUzV0Om?+jIJEP1Puo9L~d}?dFrHg z5N(nKO_)T^d6M~z`LP36_ZC8PDt%W+!>?uUk_7t0I%M4f9_)x{trr=v7O@bvROF$W zu(VQMwI=6g+WUB4+iEOvj7Vl9sXNLlj~m0Cd9i&_zRo{TvcL5Fa2^a=hwK*UPQbut zZA1znCC=-2FeVifq4w?S$DLd(bc&pZG*9G=S==x&eVs@ zEQj6?X?xlJZdXMktUzmm3QJ%rrBb3qFEXR*=S3*lRLM98Pd3ouQw^qh_6_mUcdq>lp zPgP)~sZU>e?H6q_xRM6ejHjsuL|~jAb>hB;mJy77xIU})qV1|gh$;lyOCzm&@#(Tx z1(&Ye-)L5x&&k)r$AljkCVmzZZzl2&co}ln^Lkj>oCpB2y{J>ElJQ&tGz~sUR*O- zi8FG!RBg(aDT1?Vrx-ljRi7p8Uf*avdfPvap$a$8EyEMrdA z*OnFB)zZS4z2>%DWEz?fepDNCC#cHI5oqieIJvb}~jdF>oe*!>&SJ@kV_`Z@8 z<$rBr-D#-Im7w+;{Ona^NQullods0@w1z1?{SlX)&(v?I6NDom@Zd|0%MQb5!EBl9 zhMY|7&$OBw`%4gH)q&i5BC}5v?oBqZ)8eJ&(!<1rV!aKGo0YNRnkh9R^O99$kr7Pi1%*oi0 z5~2aqrQ;mnYX*hJ|g~D8Wt${t5&?s$)%SF33&Jp3*>scocdmcIvqOQymtW?O$?NPYp zQ0w4V!=}yj_iCpwzB;wV^1{0698+}0;VeG#qBDCm@i7)&U+}LU10Ng#;UgemK)M9< zna+0`WsF|mD~ojjI*@FEy0ECV!kxTLsm&GlB`*r|eSzyn@+qRFQpsGER~9FAOSarO z2L+4h=!A+eqmr6V{j767OurO0OS{}<+2o6OK0H9QWA3|Wq3l5p}%m!1rgnQU&MsB+yP!hrkHy{+T0mHQKY2E#SWadz(7On+tRtC)wkn-6X9 zM=ooXP8o5qJzM53;$Lyt$w}g~8_DPSf1qFV1>LM0GOp3yfa6`veVW$avYyE6?zY zt5sQC0UY~}3g$XlWb^EMa4F&xXCvUjmd0jDIO~1({iW!Y=VcjB-nQ4}b@w@7F3uK+ zw)Y)kaLDz4JqoIsCqHYtjvfMt5nn@Diz9~(LgR8b%$DXJLwJVXqRe9oLm}JGwfJ352I&t+CT&Yjih}omYR(7vjcnbGamW zjy+_P6h|Jv%KNfx;9po6KE?=AII4I5uIg(}Pdd!!OPLuVWKhVN0WJ%)j$MEycCVF} zc*=Hw?IJVE&)&e6->jJyZL7Zk-J3jiUrdPAqdtw6?e-ropSKNY|G+vw^4` zalMzoWLa-BY9{+Zv?Rb-u~X(+`exr>2BSNSUJs{qYS@nJz-#wcbFA_(=R9_CyFN`e zh~9av2tvGrDM! z&DMrcJ-wRS2&Fs*YIq}Mk{Dl9Fo#EMLwYm|{BcEy$C2Zc=K3U)d!OSRv1u&2sl!2S zcy30?DK z1j97Wv4l-}D?E8E_gQG^Q9G$LI zAxkD1l|9s}qzWUaI4I-mnJ+|+lvgSSdVZwcD?sG3o{b2oJ_p;#mQ9S&UP_+7p#{;R zyLkwOz3z0~xqrGf@jibE8>ufmr>L3!X+{>kl5j$W+UDocWL_Tv9`UH6bD{glp-#iL zXnTZz`4M)vf>u1s#Ulf>PO$&Y7aKgn-t| z#uU%aKL~oE%I>cmJN(Hp3K!4vaHxWoQZXv4l;ICxNB7aO>7FB!*WFQT!&J-60(3XkVGTv+(DvoFY0dkH=hWfl*hku}>U3Ii=C z$OU@1S(a=1?D!x}v<#RJ_!+aq%n!4^f_{_sbG}S<2W{_}&h24aPbU80Or@=+M|m6X zqBoUjVZa@_hHdlLob3D=N{3vn_u|q1C2}8Z8>->Mxgd5kpL42uF16 z#-bUV0fXiNtXjJUZ3pbS{IyM|L4}uOKj*Qw*W~Bhr2^9i?_mUKui8^ zH&8h#|>!WqN%Xp;o+fA0Wd52)Ub(#1><;d$3TMsNz(%b4A~?B zT98`(c)1YmdzTi@hYSpbuXWzpVBN(uD=cj3ygth1x;mQwdc1Y*SfVl4kd=Z}EdSpy z>tDwxoA2h3Czq8`BMX?Flrrh|d53c1H~VChBy} zhuhM(FIV6n$*(AIlOY6^@eF?bA2EVgew{chr7F&WD7a#%Iq?_o3#b|4$HNK7WI<5g z;Kx*6{3zbt&GG(;^;s`BVZS?jiKc459;$O9?r%#ov+L>Uq0|AG_&ly}zo}6Tp_p5Z zFRN)PgWjOr@1L|Jt{G(I=l7x{6GZ<3_%OK>x)?*KL3jZ_<56K-Nr0e#dkNq#)!&{j z93VvT?sJqEocZD3Ez#-Mfr4%Ur$3ysqEkyt*@A+WMvd>I&Fc~n5$WBSiolcc+I9n| zG;fFva0Pk7YW9XlK|w(#gXFS|X!AeR%Z(quF1Wp)oxbzegqF|O<3RlR{m?7iZiDWN z;<0)xv}W@d&*T^YX+<3@Za^YCgFXsXeC0Ra4}eCuN6H@wguLp-nq8*A&(UZePsiRs zw8L5dH;C47CxX^n0sV$qmm=~TYn~|cxdym0?*qPXPL%yYaC@`fu>S^rFnw;}=I1zW zchC;f^7Exp9j|-ag5ECx!vLd_zWY>N82IGBJ5tB_Yx%!>0oBNxgXRtjHBZOrAy@p* zR9G{(Q7*vzFo(Jla6H?FQUmCDTD>$w=&yM+2n=SRCm>(HO!gblWtz?ntA8Z6bkTsi ze5N-FrMf-dKVeA#eqUN&pMqN9DiZ%@H@q*BU3u_)@8QiDd)NJEwxM&exhiG%FGfj} zsW^YUnWil>|4h^7yF{b}$k@TMr&Ezpzwp-=|AL5%n`I3EfSWS+79*B_Q_cU+Q>*CQ zfDV$ki}SzFWQ$1yh;Pa5>U{98*@(HBH*5rSFoVDsV0d1w&2RrR`Z^zEx^+ZuXX8Vs z?B9=#hS^K316A$6-5p5&_0SB(-+PT)9DOqDZ?L!j>JWxv&>gX*|Km-5Gc_Wx|FtdP zkB?Ez;hI0y0ldNYV1S!v%}Qb%+4?09u=~3;pr30}NM{0obu~bVx8y(? zy~27*Dm<07(AMvY^VK!mzud9t2OWWy8XNR>5@_|EvU%U4V?b| z0IqP@R}6-O-HvHZ3xDl|?=h{{rJ20L48nltrMTG{u(xGD&_n6)<)=(T_?Rg7x|fy# z80=;IqD$6zii%>@5PcoxdFx4TzlJ89jc|RA`ub94Ju$nN=U4X`8u5hp=s-y= zANuPI+I#fl8wdK=CfQ0t_xnuq-5zc70^P?OC~bq3a-abWp@;Kar^MI*KEt_5$fhin z+=1s_2_lUiZ2xYZvwWq5P7X5YwJ2ljXq~L=f9d3rj3;BdA!n#AG7`slvcoBPHo7#) z$0bTFL|-#Z@7wxfr$2)u4*V08tTEweu_MF5-m9x^M4dh|m||nj>2j{Vpg3Iijw<^c zK3M)ERhF~BT5hu`y;f`%X@Nn?$tT8T&@*;K06RXjoBM8z{F*~-oX8(zVvRBlgg>XJS?I&T0{ZR) zUu_JEE^|95DsqBp-Nt2MZ7LFW$lI zs425WH}Vu?a;k|kSkEw_KRVuL7#*loE1HNI1aFHX3a4_!BZ{kJU;vf-3SDPog z{pr2Gk7lMG$v%UXe8+4xcrI`tK$1%DPQ~;xWZpB*O+N^mcK5wh5|Hmnd`-8IaSbs_1}mj+4r1?gpi9vhJH+I$iB=X z=%LNPDPMOY!=Yycg0N?qwc7+uG^I!9Q}!z}9uC_<^p`{nbCC*czy`51qwu=|#>&n} zr)r{5IADPMeQSWAhPg3u71DDrQOtTUtN2X_{{3eFP5UN|wE(KRi_iGhI zRG|`hgXIy$|NoJc8r}awQr2{XXNC;X0fd47@DrDk6dXdNJ&?H4yiR9_6MpB?Tu+{M z0@|dzP5_Q3;TLg)Y;MG95pQuaC#*HLVrK+JAs zl5+}VwC~d2lY_?w_GmjNS_U3$3HjS_-IhG(D9;T@*QUlq5T?7Fxx-Nr1t^=QtjsRy$z*dO3!|pYcRXKz=*l zRjEYrw;yV!Rlc0h<&3S}XO~IT-KBiJAXrjKz?LakS~&2KX>zZyR;EQ1fOi@JSjgPL zbb#Ngqy+^%g_${ix4vEKp&UT|pYRG@_w72*1n7?cpV5!A)&E65@)&92{ZcP&PFr#BuAg5{caQXt(GIL%QZc*_ezJm;^>NRnH^|iqRR6%Q{QV>Oz5Q9R4#c` za3|1zTXyHESPlu?!)hW(a!=?uFL#rNS16p+*tH#SIhNU6e4YhK4QbX|`OtPQnq~{& zxY@T*M3W9`3Kp3z?AH%ejW5oWzU@;2VbGu9xNLFOxrB!Q#yvX9oIs@otE0h4B@n^U zHlxNe)8ue!4&@ZtCtTBlK7&g*rCB7QCw}*RYnfvO20D=K6`2q(rsEjhd8r|CY%w~h zhvss|v7w|v0Z{PIM>3w}We4#_xvwOGVxt7&-4d|a_&soZeC@bj6C(NW@T2RXQ|6_{+BqW{2j0fcfsGMzm ztB0#VolUXj2=`wp&B_N2;igm|rK*n3oP*iNZ5^12|DYGkJj9u`0EW=(^Q^eYE`R+2 z(}IjZ*fVF=6jTlqVx$zwY~-QSDXMcao2>?#yvFpAQFZ!sC0_K2Qc-YHzg%H0a3-R0 zg@tW-)naSPGpuxI{L=N1)UGp*QYvv>>A6+-%YKS2S*1Of(1F5b#kPA}mZw|h!H5uH z#cd*rbvI6mTqUA0yq%W;kj4jPb7ntsxb7x>x|+KrjBBAHtiT@CW9tL=Ym2(XWR}w>zui z85w$-PR={ajJF>2w|=TkuIDSKPwgqLl0=PKK<{v;$UYCBxoCHgjk%N7SjTfIwKKgH z$`f;*aKV0&^v(TV8{L-`d&1ctZG_`~Tm|Vh^SunEMaEFHk=6tka$&q^DbH$f>L^%S zuKgUDLhPci{le%;#+YWD*@=g=Z_nl- ztS++_6z#5=&WRI4I&L=k_JQJ8YB*T9NX^E4pAwHq%IJwWWYy|4>zl-+S4 zzxEjPamuZ+qI>)fEMFKis=l4D%(IkINqmdHG@8>{y6s`DF|9ivMlZ3y5e^Y5Q?PMk zGXuRcIk9q-Pi_pu|A0(Gzm#=g-I}$pE7XJqk%#v9xu9N$Z46ray5Jn3a| zj$c$4rcnV5hWWDcg6$)AC)dK|jMB9w!^;JT>EHR<;D$T1!%rF$$+RCt&Vc?7&u3QKrV{uJWkw(+SA}~z%@|FY@k24E~ z@|1f6JqcaxY%Ej9hA_>^XLkeJ6y4Rfa|~$Fjt1i0txm zX2TawWVVdxvx%4vb&_>ttgg$LLR8MamR%ZrETIs|%6UyDcEz=yg&9$GhS}@^!rhe{ zbO9sNX+=O&wX0qD*>K{fvBuBe#K2tXnXe~oY3=Z;)tpA>gAK+oBISDy>1{ssneSpB zqW$jW4@{ezs?SwEri+w)s~RdD*XJ7pVYGx*uBF`$@W$yXAS{jsr`2SKYAyb#1SU zG?KI1gd-HSy--C4vZ$=urBvDUUTB@H&u6_AGrzr#lvXY{u}a0^k)UBtDLZ_0vvst8 z8fYP{r=m3e1}wcxw5Hq5a+!?(U58<{J|a^2tLB4j6X8l~0+f9l9v#CJ>6$5A*bE%uCb4EHBP zc$O#blb&}@1;OY}AaedFDOiJG!o;r$Gdj&~hV=f|GqkM=RAk~!C~ z`+UnTI#ViA_Wa{jMefZ}eYQ&tlyVkhds5vlcXBM>LbqZD0xX4GkJd%5s^#4=p#4Wu zNo64Ch^w8}LEP%mvD#d9V`ak%%eK+lx_{=zG=Z$iuoQpVV2ZQcsdzk6pkZp`i$|_L z98pl9@wr{%LAkT-T|{@u2pxpdrRp(zqTyF%vnbW4)H{W9%hK9(s zvfU}+DYur(zN!k@P0l(!<6ORQqb;!=^w*`(;EE(O&ClyQ=TvP%fGbM{HiwLh(PFE= ztL}w+aJ{>^E7De&o;l><>MsLOua6)NUmC}iJo<=jhi&#SPdeTI&jQNS$-#R3s9L>w zvCf{=Fh{AHb=N1$^{SuOd-MUu2gjkhAj8<)++UPep^fX20Urp_>znB_F)*hZw6`;I zU(j3~zJnII^{H?uF?wmX9AVM@WiYe4*QIOLP~gyIxyKj^k$$f=^=f**OqZin-D!c^ z?p!7g`i1``qYUnyH_MRTBI%L#&9JeN)XK$Xj9cZ+*e|8M2>@|=5)Otx5bS}ggp5c;zby2>s#e&*I~ zZJNb0fz!kb4(GZHn}$p%0Rc8jO~SnpTAaw$&~W@SaOpJ`8VyDY9=QL7+lifY;huB? zt~2S9*j#hAi8ZWzB7M;*qxwmM@kO(ibF#ah#o2cn8cO+6cE8f>4a;NLC~zQMaA!o{ zL+Fle`RViG>$4`GN=ucNkve&UFe?>(J-R*-_6336Di=4&qNLM3x3=m6B})O>3OG2o z%tdm23u^~?o{>K0VZ;IDlB8pN_(6c|6S!d}@WcPM;L^`f4wa67#_X_OK!p?}bt%`5 zTf8X75Z^kS*f8$lkewaU6}~J~Wno|p#Y9{_o_g#M(F31e%vxV30ibvTfv!KPK5-(>i zSD>R*B?GUL1u8z~5Nf}!%ROS$odyk@v!tgC`1uQv9#OhQHhJ#yDVx*09L{{3OzR#& z3+Y6{F`3iL`_nPH<~qg^yh0}RJFCO&aZx6aJm>Bs{$)g=;X8jKZVF%pK-o36NR4Pe zsk+W=mJ4Rcy58~ln4;GPpQW^d{&Qx$%?!B~hB4la+nD2f>U@5?C=gGE4Y7-@epUUC zJb1AkWzXLjwHICcDFicw|Dt1DEIr46yp=$E(w7cTS(h-{Cn@&SpZ2@J{bYV`#Q}X+ ze|ZNWaRg&Tk8qiXxM#$Ra1%NM=p)~CiL+R$DPii<*E#Md=jg`P$ws@bdTNcdICK*7v78Y#wm~r4+Ontb;A(x<4SO#&*ow`!%rmb z_J***$b6#)a2!COIE=pJs15TXpws!tF-6iuz(Muzm+g6C;$wd}^@ZLnw-qsp4kNf6 zisMI1U{?X7g$3Lc%>hUnHuvN>;nG+QR%`P63-2rYWMNyY|8x_hP6tn6=+jNzk6w+A zAz*le4NIeR{968w1`|ZyHmOg^k*bR7M`@_yg_qFNzeiXbGFvj_rwMOD!#Cz|!9$<& z--3tSpDEb;QD1)1d8y!n@YNIyMF_ctm2ZL=CO!u}l;-!HK097Fg&Q(6;-Va>KEMBJ zy~oV%938&Bo&SyFGkD`>)nuTW`#}mTRWXSSrQ-WO@Ph5rxIgo|t9V)A?C){#M=hr$ z$$sfjE&vH@@ec@y@&ifsbGt`iqyRQ9p>K3E1j%^LDuEPe5>d>olb#^t`%iEmL}NGP zzB>NWE&lZYRW!_(024XefoqLOoHB8H#r|_~3Li6X{xy}yVql7zno$5X!#=VtrMbEH zahxakZV3Sh%75JPYf2%y=!wm;e*`Xlj{5;0Uz&v9!$P2q*mNXGoGrv(vW}MuoeATJ zdN`-@9xXnO@ABbEsch8k?f;ZqX1bqoyi@Y_#u@c0;!!sIIcnV5lSzSI(Z@~LB)mF| z5LsDV-U#Pg1YQ`flYbnrjfqbCvk7B|mygUQB=B-p0zU#5jAcr(TiZ8~38Ej{X2g}pHj#@fyX#H} z{Ab|bi+99agXy7gd=$+0_vgcwC8Sx89zsS=GYGzz@ZeRtOVslyN`mSqV}Bu&Ex-zf z!k;Y#r&O8If`U|0Q~NipvV}M(5-=+p{N!LS_?q?W=;YO3K#vdmd-niGv@MlBJ7ovV zf*y9eRO>`BhN$iWxWU4|=(3MEFKcu+g)gh_u9JL-Kimee2okMN#iRn%v9rRHp&{)4 z0bvw%#-HU6ymU4p58?`Au+YGatvB+ut<%QyAGAE{lf!kDh*LLwL1uj1&-u%qisBOQ z{5yRu_oI7uTYWFqYr&ZuKwCJ89|>qYM~;|G5KRbcUg_glDTclo8mlb6zFC27UzZnfRb*ga z*_4Au0L%~C=32g4*9FY~=6E>G6A~sr>0z#h(f5cRy)RpFXfnTVo^?5=Om9&!o4!_X zb7K4#Z*YeMSB=M%zs3S3qT0yNbe0aBWpfcrLLZF-^DS=G?jK)|q@_DWEV@g+>#$;O z^)}E!qV(s+Ix2j>dn})5z=gHC4BC+mTAdlZJI|CQ(H}nB1%vtwHk8IUx-_6+P7`9E zoky9tDC-#a{Q~6U5%yQuj9+cxyr^5Gc%BBXdN$ z`fQkPY@f$lXd!wzls@&GsA1Ft-2T(B%IssetKfw+DNa6%c(eINjaOLc4rYODZmT&; zH_y^Mo@MLRc6O@i>);fmVfo1G2E-DOAkv31X*IQudD&vb-uewkkAc=1TUJ6m;`Nl zmVGPc)uDR&eJWSCa@dRdHSxF^{p`$#njm0LCRDj@Ndf;i>@0f6T|MxB5ogAB#K(F& zCsM$WRbB>eT{6Qm?Ah$pD(3}XA71njuNqjz=95&DdD*@O{UmGWzcOQ?Yi-u$=1H6H z88CtA+|hZnvzWlX!O+D&a96bM?%)FQB)n6zaB?ye1HRtpA*MWefcp_b{poU#Wb=MR zp3!y8pINYD?Q(iE(>r>?ilz@y`e1H{8i)xff*T%H$v8r!52e?}E4Ht5`;uIOz?TZe z9t_|*7_Zam?QY2`IM}-x-)PO#R$nNIyj*~lQ*)VLph}Y%oLr`DA5INNJy0J{`k2v! zRohsMCsXOFhD+2a^=7ob){g&!n9XQ0leyIHMm;~uUC|3y&t1|JF^x$1p!bIn581TP z)o#}F^o@wyt~xVXxU0Nl1}Ig)hRa#1hezD##&*@Ie`0$9X=NRo%IAh&6tCUU7w*?c zgoJg3U|a$yq_D5IN|gcaZsCL4G6rU6tP*tNRfp*vBX~<{Vmgq zSffJdBUaphFhdaTrV@@2Rs$5KXID02!4e*Lz9+B7$Z!Pd*jRoyBZYo&S=Z&&@)U)s z&75N2G9R#O;(tAjaBKjjSePX7brNpru&|HWg<+pg(Au%#az2=5gK-_=4JTwZ*+Q~n z`rF);DyBnzHZSLm%n?jSazjU`WD8UiMf@Dt#d;rqx zwyYDBp~EJrg>tN@f%iGIq3h@1Tiyf5!n6Aw)lzZJhOhDy} z?qB0Sj^D01zvLLOM;Q#~IyWMuUZw{2eV^e(tdWEo)x{ z)d9U1dla`oxyQFH*?Pzy!3K85%Z(--uxtH0{rnYC@JSv4F z31Kxu5BcAZRJh-=xh*Q{OiDbt!7C&mPr~#?boH;}fgJ31eQ0GJYZRRf^ks#aqoJE< zRN8`QbZ>RDm3h-Gg~{F1GvTivCKE?vp+MFT zEtAI%Ga8SGt>XtACBi~=w*is2td<(Io!PY-yc3nt(thmY$lKuHh=0qRl|){gpq<9ZN`MXo_#Hr#(KZ~VBQJv ze|Pv&?47q=QMZ-ca1MAq$L%d5Gj(p%qkjUYDup5DAhe3RR99X=e2xv=cLjL&pzaaY zE0|Q-t5UO?cmI@)pU7-^if&R!!tr!G)+_fd=D%8*?*>a9v%CAoH;mSYPMmisSfx`r zII@Cq^4Hlir4CWG6xoyxLWLVR6z*O9WaIo!kmMMN(z0x^GQu7txcqJcbBztX(Sw4n z^k^SJvg1v}$I^I#IO+)9TGzJiiuk_%P<3;&BDX`|`~kbZ@lKm4QG24Omv53rjW}_z zA)doyE{ne|8kDi+By5fe`Gn#RR7y6REGtWv3x2M0VE@#rxG!ToDj})fZniTT!t8O! z)cg|e7J1EIG9fyd)k*!85rb>fHN zQXx_mHgfetgNO<~=Dl?nE1}PQjAW|y`Dfrd1G~mLYZ^uE22a*B(1I%!e1l))mwo)p zZn#N8j*2V?f!lK&qWR!Q7M)PfnvNBsrlld-F|Ac?b>o+Lg(;v~j?Ts8 z9tsn-k6O_f;0@MwKBJ|{jaol$_o8UwpUQc|Bd8zaf1b;qHU63W*t@qJ_+D8loQFob zG6G)({>+_{l<4pp4=fV5#Zqb%vrn|&()o(+p|yc3!hBAN-g+Ok98ku5vU>`Q})a*XWNkw3X*18((RvSV|hdN(q5m<0higQ z=R#g}p!>5v(6?VaS|}?)DPG(e8zW zMm+Zm#5nXI>Cymi|PvE<2!RML4w;3%^~`HQZuu=>!0U( zx`g1Jo#a>_=k5IpK|i^|n6jQcRRF2GOq=TIE=f3ir^iH-xwyZ4E~W|X`H}qNp30rU zEJG~E7Wg;E#`q0^a4qlVe#H%&Z7UQ=84l{5;Wh~(4Ab9;vEWQp7n# z-|oaZM7O8bvZSZq&%w8L1hX%|5d1klZjkCn@LzP=?XdsQX{8+Cn%RM$T=zx?e)X=K;C9$Jj_E)u>nR9mIgwZ*Z%^^#p*{2|F zX_BQxr(f0mZ95#=%yu0!i1bex=&e`PE{LDnX^0&i+gi4@mY$X+uelAFH~t&ac|%Fs z*8C&)6sER5FFVK2_v?K42sCd%n7Q@A&{%=H#`pMLY3G>pt|(bKVyU>BV)}$9Gvs7 zhopu*^sN}O0&&uo8~ZORMgs!OwSf_9Ur`t~`pokB0WYJbL)MD6s{PkA%*vuH^}&zb zxIvo^c9pa3&*0w#90;1TTKd+eAyho;JD051WRFox$Z?~|thCPb{7SeqRTzEwf;BTa z3%s!V`PoF>4{NfT)K6Uc@n~A}}b5yhPc(>wLz`I+A8RAe@sPQR=ez6#Jf5a`W zclQHn$x$yeumvx>H^|E$e#3UeM3C>s3%A?<&Xl!gdY;hP>E&fvpt=g9g^NWN2dCko zBZEoNxA(fsc#;y6Fv0i-%9cYvRul|lo{#@CD07<|O>0XAR{3RHDrawyeQB!t?$M^s z@u2!%`^GA3UZoNT68KnSEE^*E8o_{kcV<>cG9^4>V6$mdq5k7(!H z+i1;69p!c^Q|mK2W6V*uI;8kuu{E^Kc?I@{EA?ggtb5u^ySpA6Wp>AGuT=GIH8k(~ z=pXrro9jQ1;C{7bwbpz<=Ti~m!uzwd6wK7HTbdm%3PTpo4P~47?W8q&l7HTdNV#jJ zjY)V^GgSRO_i;H%Eosblk2PD@gRi)1emjVDdxK*yVlcJa>+~(*b;tJF(|RhAJymTr zbDPhV)25qd+pz8b{>h8u!^WKIg+}MpH0pPD{-gdaoB36T>r{`eAq@Uga%G4io#IEv zuDBa8adNCL9n&leNU>@)`H08nX{ekqPbpdKdIgrKh%Q><Zhy6pc9Su!ic=Ahvb z$gHdctZNn%GnnOU)UeF%LjD-mS&U`-fo%QE6aUqhEC~HrF7=sP`l9U!?FhNc&&mzd zg{GYgw8u$m21;a=1ud=d{f3v>rCAVT4ZW_5jmC9iRos6QFk5AabafeXv;nT>?7Uzb zyHyhwy2Z z#Zu%!G`ji>z?pU{$aHYLrn2?0OkIw|QpI?lG?-atHPj@NUZQ(1{+;v;s$}IpQ$A>|TBz)nWDbKFXt33~%|}vF zb&pdhmZ3fighGOr=YlaD=&iVRYcXOq>VtAxCS0^aju<4ih0ARX z@*y1M!=K48=P_m_7C7srk$HqHN#M$xfQMHYUi>uLyPg-kX0jT}{eQ-VMNF-WWUxR4 ze;}eMlNDM+yx*|SJ^%(DojC=Zp+Ic^GCNC%anuPqNiDN3hWH|96DbAq4i-_b8n#@i z_kVhlWR%YjRM70f9&>=DT-HnKc5;FJsmDZNA4G ztGw_%qRe_t^|7Ez%vzQFyxMS|0smV|trgA|!7y1r=DyL_@{PFVD z%99GIw6k)AS#cbd2mI6sed7oy#+?eKeAIQf@|J10{VVo-p(>U=fDvb9hw#|A&!5+K%a}>h;(gqREQY4zdHMiKeI(2UPhKS zX9?UM$=)G3=PzK=8t{?4T=itrx^Prf?w~?I5LT(;U{T)f`7r77jt=41mH(Ui+&Tjs%-sZ2zW$K3!x9-_lN|>sxdHxhvHd8(T7fsDb^e>YiMsH+ zHyCuroZw>0f5c38OC#WuJ9nhNHij@VK>Thf+&Cs0doljIzQ`M5@iU|yt#)*ARo=3^ z$~D0Hq3uL3ycnf|qSu|ZLlD0bl3sjP>?7(*$45ASrSr?-;=KRCP@e=63qEy5ci^$kc2 zL}FS-DE#1)zzYroLzTD7Ag+x9-QXj$$-rM3mw@D_AvTe`2#}segTH!2{B?jZneaY9 zGrW{ie`~=0`m7~6r&T5eCjok0tEXoD>-}nBN}Bte!F7LkG5Bs5d_2>U3BVn)X(PO9 z1^<^H?JsNsH$p6TrlyERwEjiKoawLyWKhGCZCdb&&gl}1%KwxXZI7$kw6XS`huF54 z$^Gn2U98ii&jkX5Q zD;(LdFtRk&|L_2n#WG>R-j^i79!k#I{yKk5zDa8>RwVt+wiBEd&Q}wkVk$%6lpT=> zBt@F-(*!dT{|KwtGmEUr^mqRF;5ZHlWnP_NMKA1|*sWTa0!J;Devra-e(poRG*$3f zOUQGTbIHJn^zWklcVTf)fLA#Z+;QCgI8F@}Vr@G$kQ88PkH7R}TaPM1_cSa(ISK{) zRNTiE_r}ZuGKcR5nM2hw_AEenl~+F=IB2rytENd`{Jr!xW)4QdQhFH~8&>!x$TpLr zZR)f?3%g&zhn64UrzK}`RN+i(cJYWg2g`!nApUvLF$Zp`*H>AaaI61Rr{ctZE}N>O zWgCm>rm&fb)2!;K?qc+j)h`jY+UTy>uLWFfGsoJ13>GpYSGy1UkJ{0zzrPDN^1t1m`InLA0Xt&!heokX&;(tr|Q|W?;9rX1H_0 z9uC_afblVa*(IDcaX6MD0#~m14e{O=L}S*z7Z5T`dcHi3)+LX%hEFc{UmT` zWHp1YkowpQ!5=IDaH>!HP&(rqJ~WSI(ws~>b$DeUgld9Qwy((^TBFLJaTMDLN!9c0 z%sRSk9J%LTS3-Wf>yr$__*E336{O#^e$W^13uwU2N34Dozy?-xs<2%W7G0#)! zQ@58?x^;e4OXk=ne~=<|%>dhS%A9pVRgEn zPvWAp`M9sTx-_RWWhzYvET-*NlM@7Yo`xNE>OB8);6f@xPwLu6hb+B@- z<|Eetc$GtGQJ`G#$9CzzI6>xsG*XXFM)e}yLD%c8m-74jHekYca==ab_D?8&d|1Fk zXZGiwmrJ}Qy!c*UAMgr9Ta49gb1LlH=4h?vG`b%T%PqKBASTV#n!d!B5An;nZJx6qZcgoBc$Zo--ofZe9@WPIJITn6n~q;S17k@ZJG6L6iFZ$#M+ zUC{1Et9N$CSRc(SCh-p0C;6GX`iG zVU(lk41CNYDRU@tXBr|?)96Quznl8?KWuNJCj*3=llY_B8dPWQWoJN?y|mHEw6Vmj zW~l;ttJzAF@Ho9xsu#N|N68fC2hip?9a?t}m9)fj6Gk{-uiuCKO5w1d>gK&Z%jI66 zKut27*h_m#ESPb4T?D7*6_nUsRuBkRd3M{hXH}F&O^dGwy(!}`{E{^zzRNi#5b;tX zfHH1gMK~;&T}U!4{wbNpYPgNb(9p|3IlF*1Ta;!_b3i$LBe$qJQI0si&R|pKhX6B= z2RmghIOSBk2?0j$ykNjslH7A|8um*Z%4G`dtU|n8TB&u*S8FH1QbV<*ezjy`{R}yB z(p43DgO_KwAcQ?@OJ4L5d{q?c!r{w2k&42-Ie|hst$bN32X~WK2;-HM(d>iUAoD(b zVO{jWVuQTH;V?S61T`zip@$R>U0~ehoU;li&2=#|N9e%>DOr?tdBl2^T8> zeCy#*vP(=#a_qtdxv;$%E0MKF=~*!cWGAM!6T-j%_x}d`qOkb?0l#j-IZ)UX9SH?P!^ab`Z-o0 ztyOG0hS~-}M$S;m$Rylci&XIQ;0}I4&#<99xmqcwKv_@&ZBacUVOIIsBTPRlxn8w_ zg)gB~jQBJV0%L=RfDEL?%|bXBhAK~QU>7tE(k7FaL2ccwEE*ADrhB%@$;V3DxbXzL zrXx#6@koEhir9{{G;$}kYKU9@_EWO3l!b$7B*)=X3P z83T(Otl3x*$~1)egKeOkQ-EH-4t`Qqt#b=IWdAj0cv9j@I3wfoXfXO{K}Kf#y_epg zs)ga_Mt(}ApFRF5F1({cnQCxD2UmYK+Ewa~?=>=Qx0^Cg#9Vi7(|{$=j!ww98#RU# z%UU&9QdYRX$q9wSnd})^N90o(yayB5%xrm|N5oEWoI%5*B?&uxZ#|mk-A2Fd^X}Yxv~OsF*Q4N+9af?Wyxo!ngZ) zBkV-QW-i+FlV9_vOtMIpqeU@(p1B=ck-ndNcT7wME zEi3ex_xf_VF*NzJ(RlH)$@^eOMp`@T*M8F*jGq@|^AM)c39sCk8&d1d6+2Z)7MqA; z%%>E~u#xzZcv;73cb+`GGlPe=SEL8gd>K~s>Ajr+EEOFZOlE382 z*)|#@a>6XiZjN7;R(mt=wVPYspP9|xsiyI!H;S-Sb+jB)yX30(cuJUx@ttn6_|5&l z?uW+0rLFITH=~qQCKfe)RDO^@mdxIm(5Mr^QY5DbD3@%zW4MkSx!>-Z<4+ z#~%1IROeRSR7w6*8k?9G$G4By*-MRnj0IO+5`Qx$0Ngr1yr}6v~-&UvQrT}ls{J~c@VJucY*m5bHG#VuLqfe zu7JzIc85sz&-dogIA9*VGRpPLR*5Pqz0ylBgTFp2c6fp5nldJXq-2tV*$<%4@%06xdgURNi_FnF$-PZ?~;+ zu3N7-DnJE%ca(cCqn8}=3L7^7+nq$z%EedB^RmHUWa-8HXSuooZ5Bdl(%2`351P{u zQEy{uU@(|cDHYS>)~N3MnljzP*oY%X`CWVZOuSlSWdjSL))Zz+d1X8Co=e%KTYcv-Vc9uPyuKIR^Yeo?Mvd9FC3!*!26eCLe{KK7N2Xvs zQIgP5{VN^uI?YDo>Q3|MK-Nq$F^jh4($)5=Qh8n^n{tL;tv+XnQwGIa-EKvtp;}g) zOyJ>eRF)lnxKHp0#Toxj{G=Fn;8Xgf^d4$Q-+01WDZ0>b zBR}gRk!lrMGdn0JR6_Y6P`W2@f8*T!dxL_xEqoC}L3~Y4UsNwczKUxXY((bv>=Hc? z&fNc$0;S%mRw!Zn-(>J^=7D33NKYl zvSW`WlKS19Y%jgtSCvoTYP}B-ETf4nixYO|b-BHy;Ib4=!)od4gz(dauu*7Y+SA99 zFI`6T7@8g6-D#t{99N%iZpVx>_nqrW;~(@lx_bNQ`3JLsbNu78*fzD*>AS}r1S-1gp}!i-TPxInrd2*Arr#euH(^rw>dAMZ1f)F6k)l4;qy1u zOv!9@ueE*Bb(fQehBRjOY6n6YA$Q#$4$K883EsH0>NOm0+Ln{Bu(%N59Z?^hoYMvz zLmgLW=zSUbGjzphzE(%GH4WRMm*Dn%8FqF1SB28YG$DBwBk?yE=?NF1g9dD})rJw} zsQXlg_U1uEe&*fA{s$CY8@&iB_1A)YtCKW^442*#Avz{0=r6FU6cLU)$>kK@WMzQJ ziBV*&{-~SMIcVRJs`S8L?suMZxHd5S?>6wyyoR0#BL@PI?X|bnNd=E+brxerxYR1NaXd2bK3IR7m;<6bn7qe=vIvS{VJH2Gm_WTaq8+}b zLOE^GpPa^1cj{8+a_x=odk}sthss39sr^8kn7G#!_m71I2a^_(t~tN-2;fXXtZLjyS%`QK|yL*R)$&(Thmd;PQiL~2p|HM?GNGBXJx#|*z_Mv!5Cu9M zZPl#(PgDJ>$#NS3v2_v7d-qDSg9-8_I+kHA?aIe`<-$Q(4;)J^!z*q#hS(6BbUFWE z@S&TbdR10vVqCg#T(2H5Ob=N_D<{@yQ|E1MmG&^Fe@h%%wt_RL51u?vonYj-MVzlT zvKkIm{`n|3&$?-0RCF-OZ8_IfwXmsw_8J?9)rpb*C#6)_2JvIbWC0rmbkcyDclyJ_ zS1o^Hz z!eF11m7IuLLJCY@PvZ_OU`W24E$!23lhZ_>|CmdTa-1Gg;b?DmfYlq1lx9AfMpETz zC0U)h-5UDREZ1D`xX^sP2ID)?!r7xh!};kOE%Qyqu@$o_NJR-c}csnMrb<$S2-;9+yMFE%=Y^yT@F$PTo7 z6en)J8;iWkZG=3W4M3FD-$iPUFE2b~97GWH9K9&l>7x+xC-l<^s-RuHTnw`$XC|fa zv*XKlE0NE3u*RJK<8(PQT8H(9hEeI2dAoZFLB@hp1FCeZITi)5@Z5SPc9y<&617Z& zH_(|lgQ4vsSd7J#CLG)u&ng{E+Oto=cSAZpv=3s6k#|^OhsXCA)1Kt9Wp}dHwJ!*z z)l*It=`OG}hnAx>c^6@ZL8at5O^(Q!Y00oz9g8I{w+IIm23m%eB)rI`JoVabvz~%- zkN?*sWZ&Mg{`}3C?d?IU!F_2X_F_Xnr@R>HEn+a z8;>8&Kz2plBUZuX|3+3_;F6{VA*ST6DiAcavZ|Z-Q%!WdZ#%V4ncy9G0J}OAPZBN} zxB==a1W;FzI@n91T%bHWaL;zxTi#)SFvEL?x6`>(({9F`Qh$IIM>VH0z8q zp-08CZhXZm0@zKG&e6BvfeI1sjY#F+0-HBYVuwYJR=eb61F++oOyCBB{|R{Us+YpI z(IB*Gk^;OX%Rcn=m#9KO;Gd6DeDLlnjz5SrFM0QO%oFVH{wc}%w9pbk@S8E|-{ayB z_fyKHbcgW+s7ya0jeic*-Q%uwgJXM}Gac`<#r*#FPf3MDqUocQqj0`ND!;D@H`#F0 zAkzO}4*| zzYZ;MN4S}p4;B6(Fql~T`#ZS(6sV>>13#+n#s}tcN2z|ezdN*7GF_lb(+=J+Fdx5+ zCvmVZsEAt(p7LQw$Fl5aZ!#o?hLUb=Z&QM#{F`=_m6fts!$7cc{fSzM1E+wKwzz7q z7=-@m;_Ax#@|+Jj0oSo{OXOokd~CS-ilF%WOOk2bB}U+9pkPU*;oPE2yd0n^P}YrJ zpXA2vkKo+GB?|7FQLlx;*@C+%)K72p8C6$uiJ;o_rl*2`GyYpQLAe=5e??493H&4GTjKZ6Ga;or`;zD8+W8vrtw_Mg{rwo-l#0EJZ*t8iIrtj6 zCc+t~{omCaX!%if5LT#6zSWb!7<+u^$2sQ zlG#ytw#=!9u+>zvkJT(?IrPYOGYP=wN=8f2WH~EEHDB+PDVmLY{%ZSXJs17PY`5L-z!a*k1lTsA9pMZwMo!_vJ zDOa=lQaSs-kh_?&JLdeP-so7uCg%8+j z<;A?pq8UrbGTL#+#t!M`fj$I(HV=@`El_N5Fn;4ZY$q2-dzqiKW$!1X+`6c0*KlLS zsWZKfny@L9R9HF9x6CUsp0vOC@yeOUkSG@5!8+RbQ_$VVT-7!?#UP}K2GvuIN2+8w z|N|wYM;-{e6>mzeGhKBc_j0h= zr{LxeeUKOaJnQQP$6;31^+>q zQ^s87)=SVR_%}Ft=wjpfn#W?#S6P4RO=UAb$1zr=9ETiNg!}aOkE5PRg$TSd0B8|g zaEl~Zcy1fluNmy`Jv97G8$c=vcKE9jJY&%d>$l9qP&JiRDu&s#Dt-OtV4iK)Z0&2+ zm$>aog0^c3nBny`>Dh|Eh_(=#U*%lhZQuTR%ZL3Qow6{J_nLwIx}1b7zO0YjUPRKe z$KpiJ$hXS!@uI^PY~Q;#zn;HsI#S_`t=RYEw6vmgO}pJh<8w_3JkFE-!39H#7MZFA z4xN0_oo=RmET*he9;UEc=x!zpy|ii!f&=u~>kTIv7C-V%U5;pQ^sGz?Ti-3V_@ow* znw5ISmb+A0nhu^QTWU9 zZ7>LZik*g*bON}l>54g1_~c-~xaAYZ;$|awilehyiK??Q%&iak-r?gtp^F*YqUlZ1 z3pUhCx;i&aX%#)ub>xaF#Bwt74MiA-dl*JRxDw#1fF zu5jGtp4!(e$asi4(8)ksRgm;ty@1ZwO3EC}i6@)yC+kcQX_pk#Gr0=E12SEjCK-&@ z0wTN&*4Vx_k_gvGK64cLSmV`_cX9(spP5}7@lY|ND}5BqtPo2vHC7Cbhm4ms85&P+ z?Fqgd*tyu)*Le?;XY9$0=Rv*97D6^|OoVsI3P*!pvAX-4E*nUIfNxM|Kx62m?CduJ zI$%;O{NKKc`8XlBWeX}460D0W;D*QVS2V7uAV>iX3u=!I(w4{b)hbtT6-7m>To@V4a(GPRtWdyu#easDC>E@3m8dfu0$!^zo&z0U;N~R^ zAbTFp07Yz&Hdu43ZWd(r#^&VFy>n<$>e+CG8t|ymqXPpkgQRiE_qjyXXUF~;Ut$M+ zy#2hp*|*VY;VLE!kiL`{`_Xt$Q4~$2^uG=TMMVd#Lc+2r=qlDA_9jc!;sE+dxBQO);7H^$nNwIW5i+5GgY1l%wWITodlSi1@)0lC!6 zO`7Vd@AYNdaZbiyAXm2Ec`Oaakl(Qj#1g0wo|1et|V7gQ^rVNzO_i}GBBk{Enw~B+D(2`O#1!8 zUgf&5YQ~Uu@*5Yv7`Y<0bl56u$gJY6SwW6QO9v(=_flaoE4BP~PPEwiIxlh!10!?0 z|J+n5v=;Stbp`s;qQGV`v67|TsR7PZk`XVleF$3?>88{*u}I$bD1H>U=>KbA5GVNZ zw11~8Zop_@4#-1D=5&2I+2zEWKo25;Zp7sl1ZfJe!i>TUD`z>TZz9tR`9yLH^&Ue| zQd-j%)@mCa#y#21{@p&^y^8ER!`=ftoh=++$)P!p4bv!9E4!G}uR6%?61MG59W^ex zh!x$0r##A+GTJFsy?UGZ#`uqQn*jz0#v(j(^;tkrLWTA^6vNV(ryM7;@?BFgE#JUT z*gBo6c?dDomr}&@J~Qp@sY2d9#JW_DEebcrAdppx9qu4V1~U}wn9jg_LY^E_Z^Rhn#({{uCtm=V%axPaOZ;oWE2RhnwOnIq=r%P|lp|FAx!V0x@N?N+Px)@e(F(qu=!fLOy2kv;4TpuRhwT_)YbSG@S`VqG*>eVXLn zkw?!_&+hvhh0A5?U^oKKofqBDp`N5-D7evw_*vuzbvXlXM1^S!n_F&4wA-QetJHER zX> zO-5PYQXL2n>idPmqs-)&dP!m!8dDMRu2YU$lVYQm!NZy<2r0ihfionMtl;U~8+3zH zv3Pov!=hN2^?o4XJx1n4(oHPtleYo}szw;f8ASi=uxt#aL+iNSp8d7RbEI!s87Gsy z=zfHkS>~^a%0~h^`q-U3T^4Q`^DWaI$jfY{XIt${O3#eR*qb|S?P#8Vt~}9uXlr&H z&+3!KuOk1Y^ojFMA1jlOZ@j0xbxdxp{SEkzYb}CAT1ydCcVvx$V5eGO`xGiD-Z9^S zcQ#G~D=2cAzj^eCfq`Lvc-6xsi&E9bCg(EM!-SI!&*Pw6^QA}~SVTWVVRL@7>wH3D zh|uOYarjIL^tIj27)221o{jSxi!c^h^EU*i!%XbrK5G`)YRAZ3Hi;V=th^sT$k9#| zV_Br*EDsUv%E%L<9^c+*?k3&&2yfe0Jve$YEl6i&$%mnmpB~oFu2YHV&(CKc*icM_ z1q`mMHs6`Ns$AZe3@?vuioU%ZQkbP^)Ly`J^W>zuW341v*$STGQhHF-u134c90arC z!&q`a<3d4XeB|oU(2(x(dy)swDp?LkL(oJfuPHL)65JfZSSsj@hw|1{^O}*{PtRO& zauYZA%$0R^f5|T<<4nR^w|NmU9R)Y7Y`r@>Y!va3<}RUpH)8*I*X~<*7_w%y-oyIR zw>@&<8Kt$nthwBZWW_Kae-orSL$J%v8(YZH(fvg~0jEKy>C4VkZ*4%`snQXk{{Pr} z>#!)hKws3tz*az-PZ^K~Nof^{Aq0V8Xi&P5mKGG0W?<-$hM@$cOGO$)x znbGg_+xzVEoc-KC?tY$o-am+mH`ZG3de{2Z>g;GzSII1Nt9ke>>3pr^_E@RpQlT;W zW=9sc+1v94*QvA1ELJXLt|bZa-#KgvdQ2W@dN8@E)zP{5`F6h>62y^l-%b4-t-f^s z8X`Pv@_0Go(5BEMa6vG6MUycUKCyDK!)Ii!MnDtHLsCvPENY7dv*POd#H9V3WRRE5 z7|)1Z(UwE}y6)?zW>!{kH@9k)eC^!Qara?W;Rm3`TUuL-U#=S-c8eZ{6*$Wpj8#4@ zM?5;`khv95z>At!3m&3FTaH003v33wOa~L#8>uW|)T@lE@5KAmRMd>rhzNJ4N7uZ@ zI2}S$i?o3gNwwHY((8otw|WB0Ggpz#%8dN%}Iu?k&Z=Io{1V*KtXWT=*^8yl0bu&=3?A#%HTsYR(TMw+L_ z1GcjhoBBPA{w42y{DH5p5A7#dRoJzb5xzz@}b2vl*km` ztyFI_<6BE}Q%0x+-$>r_ae!sAu`_%Qo)>rhpH}Y9#BPGl08Zn+lXQe(KUTD=*-bBi z){c#7A0Cb$SDTud*-r)XfmoHc0ey3=BE`kymOEME2_9tAC-*@!b>)Rb`zwERjrx4+ z!beg2e2n8qsTSqWSMpu&YV^8(RBM;iKySZf6xJuO;9H_bn!3XK?gm_Rtp(%gv# zYi3f=EzwGlj!r`!n!|U}voZ*+wEg{sSadl%mm$Rs4ukB;T^oML{Nq5(Eq?S^!u=Gv zs`m>t=U3zn-2M5>P6lDk;c_)mfFE%LuvI zv~$SgOf|Hq|CWVYHjF3x%PYYekB!W%INR)V$=MNRa#Qrw|F_BE&NfSl*xFG)p(;^PmroO02rO8JY( z_YgOjBP-V#FHOD3MQXU~1F`idFm9Q`obcHPz9_U;nnd|!OhF|QPLu8 zLzH>Cf=yjNVWn^D96&j0R1ynmjtZo2)d%Db=1sDSPfQ)PEMmmXzG$LR`<8h>Nu~y7 z76d8k9^RL0yfIUT@ah`v=xaW#kE)^fw<#6QyHk|0KD|9&n5jI>XCQ6bb#?p7{jRzmHjmWB3{hdJ4}BqQQ-XJ%DD3$Q^yWG?QFf0P znpaZScCav}3ZFY0O=1&LfQRB9ROPL&DWyi^=9Zs`#67RG4G$HYO?d@qdmL^aRU{-( zTMT`KitZ0~Z0vS;Xp8PP%ZG=CUhnr9X=D06WPo_IIrOaUTDMw0&9ZT1cy{16Jz*qi z!UM5faypYi6$=Ve0fy5q&t6f!I~Dt+*UTy#VjJ4R9!k&p1NJ-jINfNFxR-h z7>tvK2$U*|T8-kg8R4xw12eIO=%6!SxT!A~X-9Ju*}&|Z^7{^n)yXJ~H_)1}hW0^{ zzEV-<{>PjjNVURfBb5*_8_vmUhvozo%%8(yeX+JgSKis0Sh#!rC?nuwmP9)Z>Ypua^*Tck3Uc6B%}ol`(|Ais0jMpcQ=-xlVPx5tR&^_`#n7 zv@4w2)!Xiqhm_{#co6#9?3C^7R7AI2)(h+i{gX0S9j4(iptX#QGW`7fHWqb`)rEzH z_bO}m1*rt=q-tDSw(6eReq!Bri6MK{3ug!ad8)>_U>F(IO`z@M7W%bt++)c$IC>V%zD+_iF{tix@) zWs0xlt^ICe%g-wQXY^_JX{l;!%vSW%G*6e@#i-xB63{jHUV;w;Z@v+B+f12l7PR*u zOEO4Ei>tl9=h@+ohmF7bw*ry*8q(4@30zd4?9(%(&)RadDpkh!$F}X3NBDC!N}p_x zIwrcW`Rg=UmYR*%xZR}GB)aF3JEDdd%kI3opT50d()n$P6`-Ha9+N+*Xn8P<(Jc&yx=e228nE}UZy>|>|}cr$k6cp-4v<1jj*MGr^f$c;2Vmn9}4SvV)U z^XgGjND!4^db#baDJY1&HX{)|bAg9BOnduxqUm@xS^o2IT-o|3bcFRbn-uz8LZ@PzwNAL2@0u*}HRTI_OZw!Uk9dobIK8sSxep zN|s_Bb1o)+qt&Aj)($i^t3L+Llm!-lCCm4zyVGAHfMgFL%7L%JWx{poD@ozs3;c=? zLQf8J{6I633wU?^RcO_Ixt9OHi=3TbM64`e1;6CV3E6;?Cnf%Q{sok~ESF2dV6X~w zgod()M)qP}<;bolTUe38GCx9Hc*u?O{ZIx4odK%M34Lu))W_j4#~KycKs%Au)$n!G zRv(#4o4>dzL*TrzvpI7C+VXTuuAzmasGV**B@jc!f)rC+{Z6tZ)CYILNgdv&_n-5j zsy97eT0}tB+v}ntOdqKtcQYm&Esl@0Q**35hBmVM*{Vfv(tTe|T@XX4yzJTWKE*$y zsvi{JWh!*4TY^=sUV2x`KJvP-TEpxxANo)G)u;=-w35k&AgYh!U#UJynUWRQ^$ah94+oK92G>s?srL?o~MJO+*o%k>HrC6Q=R0 zo9x<}ZADrj?2;9Cm(|5WI+2b!5a`9_?|f|&W>%7Q=u4lj$qq6d(oCFa{Bg2$+3ll& z{zHkWRXnPE!gH;2km?xjV}b{O`~U#d?ZxAf;@74)_ga35)6C$-!~3V3&@mphOuueD zg#m%6RLIbxA97nE4C32T+VE56Q})oU zVIuB^nK+A{S}H`UZ2P-+_V;FyoAO5Jn(Rkoo=H=K04{3oPk67dwn>X*^UJ4J*GZ2F zsnSj?P+TwxGb)xV+1*_`scxk6w4Tt{hR<>EZelo(4-9c?H(sv1~-$Jp;W= z_9B(;RykTi&3o*}Pug}B(i@c4X2lr3Lprw2cyc<6%3+QlgyQ%YrJ^yVUyT;ZyMkb< zCA=b(S6ngfg7OCJC0C=oSf;m8Lt6Z8W&#W=-cJV2pCGA%L~leGXl(vG>kDb#cB(0i zKsHTLIW75oE6JK4lqR+uxXHB~x%RsnE^I<&JwqAoeY)g?u%e?+JzwKl($6Uc5%x=- zdOA~9{9+#cB&e?ud&k3A3#2~N!}6LD?GxeaDj9L5hRs`OP2&E;z5x>{LpXB5J%rZZ z%XBE_ZlkN@G#3CWRzem(OAv%-B^xD%RkFVdhS=|-2g9>jeC5=xYE0s7iBTso@cY8{@8mX9w+(tM&bhu-)wIuUQ61oR{Ar3pWfF28qKgW(fsV z+;X;s*yL+qij}5(<~FB=FVtu!lo&eqhxX;W=XWNVAf_wDs0l2v ztrG1W>Tc9+p!Go!O`Y$4DEsOsy0(&(oO+^E2yE5iu@%}Zrp16#pq03f6(cX!z@QU#5rfHCSdIql!L6H_6Zjl zRy$<}0fD>i1QyI1|zbo2L4K6o>$#0YTA@4Qs0KZ z&Yw-tJuc3;aos|X45NuZP}@{s{&OaCK`|b%RfYIs}_^F8z}Lzgy?mC?Z86Q zZjFr+l01Rz{&LD*5N-%?;?wJe0H|<0ueT&tBoI~czb6N4&M2{xuL{0pekdvF3t^T{m>xACV$$D*j2gwX68F_RlupXaCXInPJkD$Rn4XXXS7xz*H zV?HymBJm_(#h(HKJ(@&+Lw2AFyh)N%;W?;vpfPW>og$M73?k5iumJQBdaN@R{jzh6J0*LGiAU7bk@yIh*?0{#QI zH+2K-$0?HL%lEjSt2>DzyL~Aiaoq6HkIV}@dFC- zmi=F}{(_|gDhDCm!^37Qr%vdO{8p=6vjTcm`d?BNanGj+C=_~+aC;R6_k2G)#dCrVKBo>_r{Ly$Udo)ezUpH={c47LZ#{ho|nic{F< zp=XxQi4;f};)~F?6Q1uHe}AHad*7PvMIP&{01(- z(G@p+(?w0?{dtOSlF0(Polm3-0T(!=V?U?hTBh4b@89PU5Gr56 z{`DLXL=oYNJGc^0UxV*?-s|dhdw9zwrF9-xaerN~hW~Sh5+YZy^tO!LJDj#G9Up@I z>lCGMWel1(Xg8oI0H^t>r*lfFoio;aeZ6=#yugN%iY>I=FwCN;UzJPlV{6eD`dC(Y z!aGVw{2p`(7YOkQ0@EOr$TMrIlpWQ_o|Pf)aBShXUeh( ztO+h;s-vxV>EV3z>}DzO%$xI69o ze-yIj!?Awm;Ql7Cgr21y5)w_xBr+X&tRL@Z$*pn4`yf9yovY_& zXz};0M0IkIN?}~{}TAAH;sor6kq;b>bh0&7M%&cgveL} z+^8t-rHyEBa!5fg1OWZn*~$}CK}TC9IYP*v7?kis-z}|jEFC`D%eHx@w=i3kO)@pH zE^Sham2k^odI=CT`)@k;cGX%&e>`nftMlEu0EWET5&M=Ig)@?+qGoDIOgiTBWrr_@ zQ$mJxSd8g-PJ4&q zvYP=)IMmpmUH(I{`peMGY0+N8oCrXfQ3%e2-dlC@z2`1`l1C)Gzm~>8^2SH9Wm1aQ zPhO%2<8Ik%QrV2cvxZ99N3Bx#`lh%Oj)$uWNhxFqYqYXk_HEDsWck+n4X1ESbFNlOR>1UH&VL#0jV{;0>6}Fi2FcYmd@Bhu_@Tj%8 zd{0SNA}mcbK=RKi9rBiHwleo{tF}D~mQMihdThW29JQQ;35&B|Xw+B^Pr^QA$R)UD z_$8Wl(c~4i2(;b-b>on!xU0h&R@%Mu3bB1V14Eq?y8ZY;%UknWal*Y=tl{M-@61V_ zLxJhh10x#?6jFz)ZvQr;Q?fR3wv|#eA3fwYkCyG;7LTHSi9{b?D~ca38LQnD?M%%6 zE+A3#uT3`zQw59mVhs0H(zVv9?YS}FQ-<1@n<$S8xMcP;XZcO4{*a+qVW!G; zUgG7-o)=Vk@6}6YTd%9h|F|hL?WF!?_BiO)hKl)m!h3QWJEp~NoG)XBiRLKskGAS< z-1v2bgbIM^t6mcysD0T(b(07!r&w|=}ZTC8&Ic$%4 zp2hP>&S%}D;isSC+u`S%T)~SrJxy}BdbV#RJ~PQ;{Dl>Q-i^ajeM8kG_Y?rsr*bi+ z046amiivf@)OIp-sK+L`DQsSyMis(Z+VVsDXD^nD4zVP5WTOm7=6k3u2F9Mx-Re71 zeMh~YLda9oIaHgVA|J|0M{rfycr?ZpcI1+W(%#D21bYlFY4OOB01gPRVwBUJhkrem zC>aqX?_F4z+~dwJ@6GRPgty#jF3o*Z{O)F5rrWGQc`yrHu1LOSrsmLCdHxiX5Vch> zWPCQ3UmQbe0dx_GnRYc0pzanetVwCdj6{;{cfPZChHVAj;5iFvMcDLGt#9X-4$cBX z!)DYyzM$LSzEb{VFLL5Xa(f_3gN8;kRkzv?u8c~%t3)7Jh1pGR9N)G<+OR6_b5SU} zvD9eP_B&=xrm<%o3>xkpxC14Eg%0_+{$st7*~i5g-X131=BK+yQR{`^eo|p?dry1Lf-2TpeCMsb3{p^TTEf~*Ww{*TKpa`Y^=cz%;o}t~({1Cc_T2?2yVm;>=sFmlU z>31>KB(E=`4iRAqucIw*`6;7*2l{W;;>0Tj21yM^qGsyuj!Be4ubUxs$<$;?rBk#w z^G8F}*?u&t3ZjANasRn_@+46nt38vBWh9-021G5wP4a46WGX#R^>v;8yF>>Fl)HM` z$k9(`M4JQgvC^ADx~yWA&6Rq)GFc0GTjB;I#fpQ&cr zm3T+)jnET|PZ-u+>8R?-n5QXiVs73{ISXU$2*~CeSAD;9EDTpva7oCNzhA99d^-`d z^%FTv*1n+6jk$N_Z)(E@txe>itxke8-A|W=%I!^P>u8R?wd>1X*cXA4S(va*TU3-N%zX}wSeaKlf{kYh8CC>rqciq3TRyM1O`gpOT z*nkj|L2X^w+0u6F(^T&iY^mrrQ0aVheUJ-j{%OZ3i3qC7dz~b-?yvmeld3UC*F6zf zesyl-k@>y_TQnWeuToyK=GA=Vqs}AURhT?TDr zGI(mL9`9uBG5Xudda2t=@?$H_JI<`yaWK&7Y|Sb(gc8ozUA;U?H)!Se_<%GjKY`4B z>Oc`9^+|W8c<*BYiwZ;L)=ll+6_oOz7#)-5OgL97{q{+{I;XArL2v!Bg1I5lzW4k0 zm@>5CN>YfyG{G4P=0*kZXi9eNjSFGP7TVjT&3grKOS>VO3*@LsmzuTv658K5d7z4O zVszwK=k4vHF9A*g!(+;^kocVN)-qwYe4dF1rJ)3I^)8F5VGzdIt+D1@1E<8{5_`3?of(poMA9j>qcZ4Iyb6Cj=D;5nha2cwQ& zHW7MrCDIrz7D(8X=Osx#Ej$x8z7knm43q>E$|VB>Q-t4d5*h`Nv1gn1lnvg_lIvy0 zprPSs=_4${w&?G>ujOM_l}&D%?9yH+sawSm(S0-Iy3r}xLmm-a;d9@4Ww%bv+YVV^ zhzW3Dk-X1acW-Qpuki1-n9W7MSp)=G;IttZmd#&XcwSOI?f&s;jdz-j6e0Cca(v^S zX?~=A5Ttk>p09Nwc;U43MB^qQR9&q*6Xoj5neX2bWFsyY9R94A5{jwK0XD&w0{sZL zI$KOmp_DKyN>eLy2-rjqhu4{_-RUU5iS&1b!Vp(@vg%d&eDV^BzkTa=f(TI!DC`aK zRc}=kFpV|y)hTuG7ukmuSSAPBut*AklkS{yod5fzS$k}s0yX|monK;EsF$FIu2=Nz zEijGXi8H-3T+X@u7L(O1>GjH>xVXZ0pjP2u}er)=|@ zd$n32S>nCH&cxh(!HmG(b;L=jPh_q*xNw6ffrq=CUb6fN)KsyZOGFF}QMdN3c@{x5 zltOiRicdPI%8E%zac;0BsYvK@rQ3sI(Y#0qdfJVLa)c{df%vDy=CDUcfi~lm(qohJTgGH=XX$W zKDb;ng|am?DDXMw^EXw_yN}Jqzd8kvpa1*$UL^lb(i>>J+P~^yXQGgEpoy+^p(Bly z@(L+=4MrD!lODC zw-O63U-6CGhAj~5eeXka45IVQeqQCY88R=ICPecT!eX0l-ZBga;Cg}3t5*UP0_a{`DI+LHQS|*WuQJDR1`?LY?m4E)!W5C zPN4wBDQE7J=g>Ps#UC8Ei`h7r5xBk+xKWA6S9Hl2vG4=l6$ben&&r|w4!T`MWkg@PBAm)B61TImJ-JF3cgi8MKi zpHF4C34dO7h$}Kd*V*r(c_Z`qN@!?jsw9<5>EO{bO-GJ+`=xnANWI$am4dS{@rHb> z5z!bE__qr3r7FjO#&0<9FhQSGNN0cRq*uVt#f52?atX@}EfOULAs@G4TlsP!_Wow! ziD(_*8}G`&sTo|%JIriBC?t8?Iu z++-bV6z%_>Ar6MO;9I6|cz1^UHu=SSJZ`I>&&BXrB@~dJ@Pvj%L>RJ=^CVi$W!>DR z`igv?k|Krx8jxo>r#Mdj%iCygXE)b>e3V3Oxw;*BxLb9}&19>s%;A%=LtSyAqV|^g zFVjc-uarvw37wt|P)co=`UR*kagx$q2j(xai%w=6b%~`E{x@6LK*ih&Ogkarbx9&B zXeax{A1(~6{c&M#?)zsx!3|N5HJU?%R?EGdqp!|#nu5Aav{(?L8vx5tu!f7S@lvG? zdZ^EkGXS7WO(oJ~DQX z0PKxY;q2#YS=qk2C~o}=K=urs6Y#ig$5!vz$^KRDG-+ckSgrB z|C92+VCOktF2aLp*vG)HuK+CSd#@X?x!|;+xCip@cSuhs#sveYIPF`=KhM2?z_JHM znHk4!>ZX2%sb66L;X0v2@+Xx1LsK4)rTwL5?f4J4brxWd0G#7nJC7A2H+xt9aU!eD zyJJV&cw+RgcYl1#I@+0S3&qvbT^kn>a|4bAD`gFUwJ~~ZY)(fKJQbe)#+bXc;M)Z8 zwb#Nv8=u9UjTANZbKyisz62*Y*8cohJ@7vM4c-3%R$4~`A;-1SIK#>7*p2{k&ao8y z!f%mTfWi)Mp~ZEvSC=ir6r`p7oLy4Zf#;D!8`vkKaWn%!QoHU7H!ybX@L_Ro$kkH; z2nC?8HEjFw{nUwu@c=Te^QL%ZR7ejQKJ*B2%dr&Nvh$(;KcdIoy?1A)xJc91d_RGG zN!=M4{ky(3)8#8JlLbWK+^L}Xr9hB~8^@w-4dC%9QMcprT(bcJ;3=2Rl$QPVX+ZZC z4SvZ7&{cK5tV<9i)6g&cQXvH_Jg~^L;Eii1P zi=de~t_~mA;5x-vr!!bgbNch>ZyS@23xr zIVhkS?%7a|#?+{1s(YBb;3@zSd+!rqVNqj}Y10LTyZt{CQWhoD$5nhrl%1>`` zI4J9mpDn+71_&>7B~x7Q(k==qr+87&5yUEQS?8X`Tk8Jeck8-&Yz4uix6!z!%298S zm#?&5gl4kg4}Te7UOQrLpJ^=V%~S}%%HgUCMpMHoVcjgyL6lRTba{!CPMF#j8n)M5 zHwa=Xas_*D^cVb}nHxhsiaf)qC?NNiu?g#l7#P1!k?(lIYPN8C`Vv}f0-a_C<6us| z^ChNMg$+u07^^LEU1>rzRcr-{kTF}YBSTDLb#-Y#F;V-{?U%0#q4<-?B)x|yt^LQ} zEg$M*0+JHLUX3R*Yj9ku1wL+E!nFvM&#H1G`Wt?wd;6k0Vpo4-A( z8PAl%sc6FUZcD z?^;VC!sFd+Wq0=pt8TqClP1-M%rmLwU97!$reia*#)H1AQ~wEN6nIevQq(#4K3N#o zCrN21#?=B_)@V369NY2B+T$t*Qm~aFt-PK9L6fI*Eh_BS`i}F@NUW3B7cJ~{ycsW! z-`I|HI52|HBI-!Ot}`a|lB@MM^ARs$*^LtqFbe^(cLE#P!QWDBj2ZHYhEKvEYyTTx z;}n5Lw*3lNgGs-$<*s|yk__v;V&JqKK}CMVT&OK!56tqL7&Q;u_XL46yEOKf)pM9S4#P2pMikg(L=^2(UGc&;DsW1_GKnYa3u=42S_%?-=4_@9Q_o90Sb@> z#QJa`VN25FtVdQ%RZ^WZW%>A}bH|ipxkFqH>Wn~AsSV}y5OZXCo2B|ebNr(;0$#az zC))3Md{*yr9YVj4PxFc%o&tk5ViN{{78hHC7ahyMFD3N{fw7iK6I@H!K$BqJOm612 zRO8bus@W^|>9WL$woG znDn6WnoRZD)C3Y>MYPQMbtazB&R(kcJFNB+eqB- zP4fMivIU_XT#AzmU>BbHxR?O&7uY@-yangTio_!>G=K2SY+^=j0L%tOB>+D67a5RU z1eE?Vj1>VZ>lb!kF8SpQbt29ZWa=-wS6%1$SOhWyp`~4XckVlgJpmri^2(vKg!k$W z#I5MJ8@6T2RoUX7YmQ6*FU9wtD89~NW_yQ5+0clV6kJu)fc1O+FU|KqmgXz)IM4SV zX};7V5P*{eZdqBph-Vty^8c6S`yWp8&Cww0qnipSmk0F6W-mmpup_k)Gm#5--TEh< z2K&uC7PXaau=;Lmnffakj#ubZC85lp1*efjm<}U$a}Xa}|DrQ!&KLC1|xt^f$`rdYFg zd-Rz-Yas*HveZ#?r`a67CJJNFK&YF+jQ2TCMg4U{sf zM%Ao@VTb8wmuuUiTijAr^J>ssE*suPUO4ym4~OQsEHA3dC>!lNq)-0wSrtcvV8pF5 zzc$1p7iAhjG(6w}7D{Zl4(Qyu?ELK?{4i#o8{miS_?1-p4IVuFsbc6fg>uQ{}p5h~|=XrY&uTIFTUd&fKW*6izX)lnvEax=QccK$+X z;6(XG#(WORsNpm_?cx|>k`X~Nm#5b72wG(MGzf+`Fs*H4;eX0HRGF;>5_@P~v|4iZ z1t;nHqx`Y+-9FdggyA^hRi{lS*uJ?`mozaejRG^2Q&6iV6nF`kpD}bC+#zP+AR!O1 z3y=pcs>L+Xzimh7&ct1YzfV#LM0_;QE@73cIxZNZ*aA*dAo?snC+DLF`#hc#4k@n1 z_}xgb?*secB$4JE&54bCJzkPXrk_u5jOQl|r}zeH0#<*6tQ84 z|Mf2ru6f0rp*0H2r^*HoJ2P z8D{*MYR^Ya(Ky(FJ7AoFA5)Vh3r#X&_Wg$8{<(Fs_oz%85gxYPYi-@3PmWf-Vx@C# zZDnbJBjWNtSEKvE4UgY!uxNbJj$QBXzzf`Z_PmZ`xkx5MS**zfCR|mw@t3^ zJ5U6!qH1`G%P4g*skh3ble-Ho${5+oq~}79xtN64oKn0BPRbi;C~GO-v6Af^nAfn9 z(p~}7Wt)0=xlo)O+Ee4@RbG^^<%k#|w~MkGpNgJQsF_B5%dOYmxqh|kcdRr6-oXgy z#0#ZNJr1;qmrdW%h(VJ)<%_Q(jXSb-Fl)%%z^lHe2dl=vDGmBBolXzGxLb{KfL~eJ z+4|UAG(b2Q>@#sIn3$1+GloGk+3=j}D_K8V^%H-`!7Uk z=2x&PIoNQ0l*m*=4syHF07)dIkdqTzm^@s-pyTqB8kWnsMRw5;J%g486lcfc`0+u8 z&vjCL+r@Gt|8T~v>O9rFfftUrpz(ZC?-DlOW;RDHJ&%ykeO_RNN+#@eGz zdB(5vpNq2*Ft@qCu3VBYH+u7+QMS*8I`Zx~Qx+%yO`Eo0%3)CcF;-!E(HcB!or>Oq zhkm9S#0hN6jX*y0=JW?t#Oop9}9k)-!seSgQXgY5&MMK zIJeX-UJ%bex#)#NIhAO>5Wcj;dh5|}0HOS6fzpv%^ zX$Hn1nv>avI=)i7FIz7rV0I^P=6BCSK2IL<8J9130+RUumew;L>rd9Hq6&euONAco zm53ru&OVfodcvo<&xkhY;03{}o+h4uAT22%xw8NSEZU#Nw{ENV7KrgS?!zi z6}93cxf`yfn&!n$E^{*A3|RmN)f#;2k7!mbCbVeWL9oe=S2vQd6oUd!*#r`LB8E|r#vGA4+V{oTSJ^54R ztO(z(|C?fb~ za2UqjWxHSO9UzUc9t>~}>5}*kw_636NJebbnU0^}y4=U0DF7En@1-N@Qztl1{*jFu zRRX*nz|T{74~Nq^f;<=c6PR#c;p(R0@yod9v&Ed7}UhXqycr(r`xa$pPV?g@)rl0fPer5 zN=8Q3z;xoK=kI)8Y~=7*OpH#}(l%&-B8QA&7!v(C?80okrx6X8``x8q4i;grRF@ni z3M0Vfz~J!6M@iSf-Q#)Be&P$kn@6|p?d|alG2lNj4W~RNa?wUYQ4&J0RKB4byzL;SR7;U9J_x6@#t- zVSMfTyk%hZdC?1Ad7y|*$Bh&9NN59Ralw9VQaoOi=Fki)^}!YzP`D*Kvq^9XW~AX; zaXv=kA*}7q#ZJ2%a2-VM9=N@Ivb%{3^TlEH z4{l6ZVn2%8bYBB|3R!&j?^gV0(C&Hr1TNW7;9tWvR_&1`P#pLz0N7mshy1Z89G8px zSM;CuuScgdqyU5qJ_*kFpL_560+zcU{EC--KL2xX-y3JYH3D-E{uZ15^AsZDj@g zF9oiHzTCqN!*tjvNDuAusM71I6?us)z_DJ~FOXuFoB-L(ib}r>+mL&J!`9TX?$mAl zy+hq;fl8OX@r<-@*CxcVdo}#?Cj1VRTl?c&!Z)f8ojj_*{!5BgGN@#lVOAnH#0QS=}YFpqj7%q*o z`#|_W!OBt{B`zNC$EPMt4SDQ*EcosJp?{3UR1o0_EmPQ+AbiVC0OzjQT=ow(nW|m{ zZkR$hEswPfgzUv)!+CQ(){wZ0vEI+|6vZ271nZ=A8T|J-1yu&@gq%1*E zm3f`9`?{VQRMne=bHvoVw>sP?3pzF|U2evDVivq@nukH^0+Y|$tgWxPL>15y} za61M+B;Iq`fkvf2`2xojQT$of-+QTObi+lw8?ls92O@JzGsM^rr}m5wS!F&h2v*u3T=luS)IFNysO=<6v7&{dD}7w* z(LA~cJ6!L+P%3Ka(v#aQn*Ti}tqNML>Tv$GTHseeXQa7(#uLHohNiNXLLWWKz*-HP~aNY z9y8#kPde4#ofKJ;H$NWimwv{=N3h}S=#u2e{YtavL5t6GcT|e6#8uhVhAwDyC{FFx zuD=xs2&5yBr&ic3RWUJXb9yse8@_rF38_{jvD_b}MWpJsS|Z1)Aw$RCHc=vR#uj0M z5n_RI<0Y(BMd~ZkT~^t}$t!Wx)8wS>JC|qU!X(yy%`8K{3g8n<@qQ&r$Er{F32PLS z(n|ATz3cf4S_TYLMPw~aQgFvcK>!=zb@CGFD3W;FZ7kNV&Ar!03no9wHz=HE}&?Wi28cF2<6u0aLowJJ0i zu`0?dtWf9m4GA*arMB+8>I%)3DV~|QYd@WPWu7Z^sZ{Focr|J?7M?jbk8{k}O;m-T6x@ z;-77DJ&qI`?aRCbYHV{!B>TG8-qG0zu}-WXzw_u7ENFM}a-N?^Q&&_-8qKfH4ysvO zu+l*h^P3g#I~K$Te48K5=r?ZGfiI?ADdH4~z3mTOu=Idc))pz7ZEX>t=s1DDqOG8O z&*#XNo~8OYrfKKrWoh}q-KKkFwV#0s`}Z6c!moEWRJg|eGJXHL4s=imLI@jQ+ z;xBg?ogN_N>CV62X?v9Gw&596-|1cWI~w$ znWS2Y&V>b2_n7622Opb>W`FNz>6-W04RZMS)p2pV!4l?No7r{u>SFO!qy53o3)m<% z-d2;__;E(?T+g#zKk39Ee)eF>{?LrtG$Io>D~QoSzfxY%1`u6_0EffwnZ85%)X&%akyEYwKTNo1D!St3=)GMc(S3+>8w-pt*p5E=cm}$DwzSlzO2>EmMt%U>=CeMxv;}nZMr>w&$4GP+XyQ z*JwR{8QI4cWN6PxzDw$8JYI6wUcG2D*JCWLcn~1*bo(b^qoNvJ*+Wk$B6W=QE3Pc~ z9`6J}{8bL}U+w2_4DJo}S(!k`F)Q0ydpcf-n)hA8r%Pq9h17Y7QgwZ&dml zsxByMXhH8^qp~J88pf98l6Zx;(cx{(f8n}$^(_R<7lN^Z!;q1(DT0R9*_nllh*X^g zovT+xuB#-{vj0>nO(o8*!+LJ^nAHna5!$_LVGYvIR`{Eu>ty<)2gudffMWIH0aa!L z*OvvHkol_*-+%?+VsgtT;=2WFDfEZ3hT-$IZhfi@^F?avakrr>3mXQ8&F=*A4&@Ug zxHZQ2#?;$+$QBzFR?DdOhXVyj!#S6SvhLET-Uq-uW#_9QODOY3qz)FDm)5CztP`4= zKIJ^D$_QOw?$jC4XW=HDCoQGvIpSkG9JD9>}hUadV>Pu`@{IWOIl>%4|JhVl7_58CT9N)JGnmuIuGx-RI7GSFBRhXxlNwA>rTP) z`zN*bkIyCMsnqvz=GMAoMX;%523`jFT{B}<)*)?zByM{Gmn&#ey4*GM9b}|y<&NzW z3T!TIR;7%3RI13=(5Yb@7BVy|l*kt=UDRp52yqwFZ=vZ7pd6 z)6d+}O+meQ2wMNHNFy>YIy9_`Hp(fV-V{$~s7&$A@$)7B?YKy41q*5XYrJK=tg`|- zEt$>Y4(ZXmS5eMWOsq#mFsA*;DZ(olYY|~uF|tIeNuHRdGu^NT#O>;R*Q|&cmQFtx zK9u>sC6&|Bd+ntVyUknN+h?)PoWP>G!Kj8-|KO}WR%lFit;5wDnnZWqeiX>8O55F^ zZ6j|KzNdE9yzUb`IhQ;-ARu(9t}WYF)yPnXKAyRWq0uH)kBc%=&^=OMR$~440Hv*> zA_e608KyF@{_@mI@q!`hR2!mNrPnuTu{d#A*wF{#@prU{U zL_oj-N(bp36e*$i61wyjI!Fnih=nSlNk@7TLhn^XdhZ>SP7sg|3FSsQ)}lm>zQ&7o>}VTqky@pO(tM*zA#KQ>Z>fG7y*2LiI4LfnY49~k zR>29fpn@7Xce1|@`13<3jks_3zP)?yXnSXt-nu#PABYR^TaLF@;i9<61d`kH`IMhc zR(I;N_Bt8i`t;BtV?@CeU60ugI_fOl^Rd{gCz(I!8Eo?HQw2sq#B@WiDZ_%yR>hL( zKqfuI#=!K$0%|RrFM32wz1A8ntx$jVQm~Eb5Wy1Mu{%J2T`B)GxV{`W(B=DkwWwYCcF0g)%q=#@v|_XGwb*4wkg=ZMv zcoD+0F_G!R20Pu1!i;233q!7hS2y~Aok%VqTcTlUk>br57)*&H4<&hpX>u0Zudool zb*|l+L>bdjflQREwMA8_d$I=#)$Hi}Ksoa*b^5fx0&(w}8K1`;cK8@-An{wHc9v)C zs+>V6=$*Dht?V68PU>;&hy+yB?>eKqXKltM{m#+?F+jk>K7?WaDOvtVe1~${sbI=$UzaGrz6uLt9QwfakZowLMgZM@Xbo>XS-wg8O6uq8-2Qjv zBMQ>5h_AOsslzMq$gpwNGk5-g7JVq0Q1dFLBSk#@gr*_Q%`8>I5IL=MS{D2M z?eiLi`t0%5$dj}C3~M>vEtXgH?!>MBV8v?Pq-v;lKC@mjfMykfhY8I@J+iC7*j|w` zmPfbD-8`iYKJ`z1oAzF}8V1+A%4f^o_K?Bm|LvMC&c!DD7v zEU>jR?t@`jOV476*`InSEVu$2^v<6@DhJY?_*4CWviDRt*M9U2{PwQS7?o^MV%6L? zhvVfwG81RwnV}LV!6T6Af=3eUgF0+Q$&V+`XO9dw)Zq*yh9eo}Cf$?qNsB5q*oHf0 zULfJ$NswU#h;FjngOeSmHfEeVO&b0;bO1B8EKaGTt;WRz(SxzrN^k?-W2f=cm59?e z-OSQbUbFt(BVTc|euAB)Nad?h1$ZgY({IdIFN9tN*qHw%NV$7<^j^hndEa=SQv_0I z-R@t6Kq3`jZhONzt))b;PQ2UG?6?(hKbgqi&-e%>A&?X|h|=t+0D`(y*8@ zID0tSgc)oN^H(FtTKS&npF#Uxy!6iU-ur`p+^n+7$R!^Dz{mAtyL**|D}i5wr0M_R zV8h(!5|}yptsz#pHkvUw>bK)!i0$rdH+c(_#5uj>DNXmJH~h~^1~DxS{HID=Vg6!y zXD7YN`c@a@<%ReCZO=IORaUqB=_@!$fNJ4tHbkPweV3hMSsUkQ3Z|zD4VJ6N`Lntx z05uiSDIckU?(VZ6-_pr0Qr>~y_)K#-<4o$zboPu#7&_nJk?T?gUaxFxrW1>v-8q*( zFU@%bSE=&@D6a>fGA!$TiP67j1Ayq9Qs?fzXu7}4S;-}sH8)uQu z88D*?y~D;d`0^bt37h2AA9?@dHV%HL2z<$DkQQ_e@3o!#JLepE{Chcl1Lbqwmi(o^ zT$vhf=q}xW{)g{maV38I(N)pLW>DD&24Etu)&u-oVWL>NE_3cqP1AH*(l-Yp^P>PC zYm%q$+4=JXsB=BES?L<@w>ftStFj30uycppVIO!ySc@VEs46jOyvPnNZ3E-AjH+(p z5+_0D)s2f=96Eo?!E(_LcZUo7hR=Vv>7d6qTDwzXgo|$`wm{;2$8I54hO;YNPay+{-u>)g4m$~oB#^kjb;F$gZ8PqCs5rqp!VY}(#b0U=bI!o{T{S|}n~Da88V z?_yx;C$`h~1`Zj|Ewb`eGB*j3a4e*=svGCk9#Gxym!%KWz2@wUS%^|`Ta3{GhcJR3 zJ$f|t4-r99$*Nx}8?JePq0V{;%&Yumvr+xi*kY8YLYF#Pee#R=kq$VxBpecTP_E3V ziTbKPZ3|A(Po6U5?47>^LxHCI0}!%yFDE=QRFdiChZ7&6PhUT>5~AHyKXJc6(w3bK zRU_11)~2kv{X|&~7R#=0iMq&j`LD6T>GVYS^kahYD<3yZP0wsUy|w`hh?<%j#=x#u zY+g#o#NXgQuA`-Y;%@hpfPm@3+Yw6UwQl1zAb2Z}>4|ue^87dWtHRzkaqRyLNioBSZ3o@X=EqZP4f6cuT6BD!gYrJISCD4r1<0V#CxvQ8g_=@O+KDKJV?=-?y)Hk*-hs|7g*@y* z=M9^XyXR6l#y5bm@J|1<1Gp9N;$rTwP_7Ad^Qsx6M3*RgFAX3#Vl6g?h$>_&N5uO1 zik6sEvHJsQ)e6RV;=`1)dCM8jD@|F^tFjD?PDX7GxX~$^sK*zmyZpdaMn5}ISAXO; zaQx3ciUupu5ZbVa*p$PZw_D%WH5(ae;|Ve{Hf`)d=cRB!q^w-t%dz6KSuv*n%7_qu zxGoua_@CKaAgDCC!wxoQ2NJ09+Oku>-)zh)1Sk3JwPlq{LAMJ^M>=^UG!G06CLN1L zXui=TZ*RuAS*dR2)Dm{K2#q~lmJ)-Z$wM7=URU+Bf4u!94+qu&V~I}h!I|f}n6IxJ zrW_f&YNycYx^y9Ud|SaDGf%^UY6b?we{O({&F^Lt5jZ{ged7AGS~Knr^{oA^I3w;R zLcHC#kz~073h>+i)e?N?stZ(dr z5u}cmztLjII##@ zWCMg9V0SU%i35zQ#6sG4dwY_U^;vnuhd*X+b(x-ZSg%ra@?0a$(sF8#iYMf(u=}rN za&Y5&G$`bbOCoMPp7ol?80gzD^QbjrAqG&p30R#5~X~My& zsi!Y@y^bzIihz%4Zch8akEtTGrwja-BQq}$ZrEuVi$=9t3!=vCP}SJ23m_P>xd5F+ zI9+`v%LmJR-MXp0X7|*B=X=gP)V7W@?E(lfb^i>w0N=p5Cn3y%{pl`$ijRiw_RBsR zd2Wy-ezb7g&}%UU*FQNvONZRi3jqY&Vc?J!=Ecp%?;NSM`k|)n#ML~xhZ-|e#joVT zJj)OgG5Sds4fRsfjYL%)>=aE;gO2xXp(TB9_|S*VbbNv+issGlA8*4lEjnZMkOW;9 zF!!BI`|<6bYg@>@- zHbgjWBCx)hg!9Cc*Te2`vUS37V@NO+u;96Z3n{n*h;CJb4@U+6s~g@XsfBs+&QK?x zWVk)G>KAnp{;tED;lojlSMc^1&2gS_6jwNaOTWZT-+;7@g%cmfPX6*=YlNu7<6TOj z!4OK0u6P_fF_`Lq`!bHlcuA2H+{S)B@R0W4RDJm4u``QU5>MT$2U56Dp=oY^gVGFuHqwvJiLq08Dw_h9!(s;A9R z_!sci{9R8{cRYvj|GZ~4&6hqj?tv1yV}%Sr#3iF}CU4&SSE4%~ND-W4Yz0#4#VYKh z!z_0z@)zgkpdF?D@v^U?I&*7z1oJsg5yliFG56=q>WT)hD2kY&SWGuPD_F@vD}z(l z`#yi!8A;iEKU7~~{INHWf~s?Ei0UIN-RdV#c-gH#k1u2a3Td$ZpQe2~=WL#M`m4w< zi$UwyB!Y;{E!Q+gs@!Ezl)uJchJ?6&JSTi@9hPp}fYkb_k?n;qhJc_@FouriAu3N1 zY*5dYgWD7_0oeT)IRrtMWmX9JIAVDwn;msGaUWmnEg>bVodxVjKf%|FsH@6oa9)}g1zI9u(p zN2{>RW)UNPN88B^g<#?76q?@7x6@e(jCR+4B!}kDNm5RI)5?%_8|}k(z-K4mVRL^r zod>4<@SbKTtya8)Yscz7V(a6l^e3nteyS}@uoia9?sK#YveSZkC zSN8U??`35f3h#8a8G9PxWJGm_bYr1K6=0A_z$)0Nj6Npw^MoUveoF;wWPJZN9qp#Z z+QD@EL5|i*WWdxow4*yV=@BMcRyjvwW_|g_m`7prtLZRPkUH8)wcAc{-PJW?f{X#B z5GEv)mfuRfh@_rWsmRejUB1li5C!#5+TIknv+8ASKl5u%e#8M}W1>?c8gEwbSJChB zGpxm}v$3ez#lo0#u-sB;O@(y}jI=G8G}OzQXvIX|g-U5qk4RmkxXRL@i}0eGPiQYeC%?ty$dPJ9QNW{?P2W162VbBfph>-Oq{_?_mA$ap??ht)JeNMS!;sXL(wA}jm7Drqf zHS}B!Boi;xj^N?uPA@O#cbKYFlQ>ybK@Bf6>@QtlWNaTy(~r})yXHDIl12@ZKR`-A zP<)y55BO#@3%;IKx>ZSqFOrVG(+HM+V_SbDQ%2WCzZE6ihoEuXM?(EI;RV#4LceSn zA3iJdJ0s%TNWNdWV6MwPyNB>{T;ikUw-L-*ISoKNehzvt{Z0ezvn%6uvW^yl!Jt(| z(0$I^X#aYxG66mo%`~xX;nqEmcB{pBEVlw{`I`8!Bh|HhP;ya^D~GuDVL#2H;aMtJX6n#F6(-RSg%ZG4HQhSCdH&00P5ENREvI$W@0-b$^zZ*gya zC}1&dOWBgzS$>R}X;3(R2nv|va$EWZtc0#noyW__0#d(sjq!~d zWYC`-wqkY%RrXr_k6XPr>kjftLaC{!`Wwy;&WygO$^rT6HSO$>?y;`7By9g#tIThZ zA+=k4aZd+1@c{Jnq{$IjJ&_Nq_>CdR&Ky)Dtu-Sn9tPJ^sPR&3w}RhiWUOpRNa}rzCYY%*|Dopz zZ05B0Lfz?mhl$%CS?GWr*(^taLG=l#*-X6-cKf~l3nMiqc6dhtmK7Dl^V^tS>qLXU z?8L!sSCGcnUp3j2y8m(i*q3i^(D63O2BXpQMGa zvZSXi-NQWbP2{iIL7U!YmszU5l#29Vs|drL`b_VkS}5BxxI2Pp#~#T;Y@14_zp3va zb7PT#Xu?s|)l zyRm#B=JQ;S^mpF~aM&NFDTP9VGW|}>Rd@k^l{Okdx(C@?tj$Ih>q$)7Uti@fY-o+9 zvPcN`wAtx-TbiyXL59(SR>AY9^3h?F!?n-V7oK?k`~c7YqL=I=H zshQE$DKzv>+r0w>0(v10pF27sAm^BnlU-pC7i@v`5bAuy`@#r+u)Vt9sC^J*U z=vV9+nc}1xB+ju@IRiRii1r%fJuC9um63-*2G;G^0=vf!H_@?*p&!-pfUFxN1tB;k4r#UwS$^N_CB$4Hx@yp(qzzRmF=Ij*f z`X+S7YdN{U`sYtmlyQTRKeoaj>F7I8{S86|AoV2(c(mQbUT zY>1@El;AG!6&}eD3xu&G4i(zwjdi&P>hOcwUmt8+%52T}_{{vc-O9%&bL4nS*c6qN z{$i7kM9zhezb#qd%J-~9$0Crt?T-^*{kXNM(Pj0fWEuYSdz}SZO4&DSkKQN2)>xW}{!SMZ$hOM%vX!wbShWZ1ut(hF0arD!{_)bFRv=Ccd*5 z85=2UPgOGE3zs7CxBW+eapu({f2XL7_J6w_uX5^w0>TB;3O8 z=;(xmg!8;)O-)Uh9f>nnTmryZ%t%F#nNvT7OJ18OZ4V^_Z)Ei8kw0}WhaQacM}~~f z+3Pa<(56Lh?u_H5rXre+q66}%%#8JI;bfO3pD8o*qW<=1<-`#yr`3r1WJi4lLzj4t z;X}!ft8JuDC@1G=8w}%RPaqkzQSdE&EBB2x*Un^t+y{w<9a(nMM-xri3LHhTAPiH= zRcdr(gUfNbV6w817z#aFRhps12So2X9@Eru{O9G{gL>F)1DN=3Fot0q4Mnum^l@Jr zHBP*Gx0u>4HOYhX03E3W0Wt518ZaIl=#yV>HOK4>cX`>m4co8lh|hY^H1cVW`~9rd z-pCq}2Ptr>)k`f-MNCs0CO-KAg4ru@7ksXWP*K8|E1nd%+f16FY@xZ@DQluGvr`b? z`mS_)ks;q@YCElbd#Az3i;YU7#&+!ZEg=)Nl(8@TC#S4T4ki1-J~3eV^4F}se&#!v z^=~*&>j^})8M#CznubfqxHoDX*SR<_Q|HsGfy?ZzS@i9S+hQaPK#xKK zQ^>ok@4c&!gTFO_VkQ@LH$ES3`GlYpx@mn<){8{7*@`%LUymE5w!htc_N?5wAayg6 z)6K?(0?`)5WHkivJARu-($$Gmn??_@`ZU2<7L4QCA+E10>3$9WsOLY5RahnS9@;o) zhsLO|*IbSkj+4xot$FF24X@IKB#vXf=pv@6Kzqm&ETl>YJ5(v2)>HQ&h?M$)7=`~Q zk%d|N4tjH&V(!Ef+_M5(;N7I+W2uO9GC(wSPry?oH4V)k;9O%#R$lx4vk5S<`m8#i z1GJ~#vS(gN4+70c(xP+_2;^d(0_*E;4}Xemw+zw|uq?T+)arQF*!I|^%FwuI1T~Fl z(~tZ}-#== zeA~Ze%t9Wixj1@4_-1?w1}!sAc5J2)yP-Lm%wZ##ozlAI-bChO=_ywb^7UBQ<2Lck zVD690HbS-C#P!tuCb;b;@XI&%#C%T0dBkZF7({oPUOo01s8qpo>;uvT+$`;1@q)nL zuiy|3rl-IvePr5avkn)-u58o@smheu$=NU^-1$p?`aZBCrS2=w`6jQowgUH5Vc~!59e#EMX8T zXn8zH++(MFIVY7m)$q(#dPY&Y|Ky9}oiA692U`a_o-gLt=~KW4ILleBL%+w$s((q{ zlY`mXIwt5mbHUj;0fDP2lS8-3_jL;3|L8kC0cyOzD(rO*9zXi<028gU!G|KI1+iFJ z|Iz?S*Gy{pXQ)hu%N1iwPprCKViK2%-B+q&;#l}fJ7ML*RqIlEld&Go) ze=*k|&8*HEIm*QCeUg;D=#pRVqg*4ev+jZwnb)3q6{|Fe-Um^DO{qAXc$>Pv95+7G zP}`CQQMi6(p%Hr)s9xLud8yi z)2wQoRZ^&=xzt_U^8i-nv%|UhWNho7l$lEFwNW=g$ce{S+VCPRCZl=4Ki`S>!_i)l zVPeU@>_g!-VKrqQ@qWnrz3hfO;JoOXpQIZWryi12hYc3|SW8CeLkvUok|)ZIPzQ7e z?S^`cz3y~gq7WgSQRL2tOkn9!(%qJ1zEtU%^Jb3}bTw$ir!}^&Cj%j$Gi7&-zOM4* zq6xpy(C@)C>U*&8pdsH9{T>#xNzqZQcD>cu-dXr>(3(q7*FMIw5gkzNZ@|u+b;`bn zK)6MeH?*!tPii1X>YgT3#WjyTHb$wyqQ~@DD_V84Pmiv?5q@lzgRVK<+E$S<`AZ1O1hW|;#MfF z6@O5UF0vt-4kaqe96HF=fp{*JA(Ie5U}FlEa(GX5mzSq$`1I9D*WnY4C$MnQoP@YW z&;@G6K)mRcR^E#Ukl*c58$e9D9}rVr&_ME}3TdG@GG~8D_qa}orgSFZjrE7!k_UezjqZDVV@0{As-&EcFN$Q0P>87aBp>-rh0GZ0B0Ji{lIlI?$m%MF+mt6r- zYEsKcH_#BT*Iq-U$=o|Oo!gCogtlkB9M15i8Gb@C_fE(5p+|sU4TYXr&~3S62Y;J`&QjfpVjO$Tknkvj{Wj!ecR-w3-autq+1 zz`73i1XF&dp`05_E)=7+y&_X;Rj0z?vvgPVG@e6$U1#|Vq6d@oxbaLrk?DJKz4HX_V*kMJ~wt60fdSH)EUcOqJ81>rN8I|g8qIOZ-bg|Tm7pd`A1MgXV3=y?f38A zF@O|e+1i;e2e)q;?`sWf`%mLQZ+Qa1G5Nz+XgDLEauDWQ-H`I*;kn@u{CLR+5CGzh zNIFMIDnUp68GP{23IPFSh(;_y27uSNW&`s~z)KvwpLBUOBiVN5 zR+MIjq-$6iHNMcV(%-2~O07-xdT`k-Reu4nPJrHRZMzs*x(NM^$#Q0v8g$Ae{N3Wn z5&i}O1I2gshc3HEbK8VqP33dlr2I9&$gJ*7`OW8SthS{h{?7qaI>Dvwic&Wf$<0%gvGvKyiYE zw1A(N1mRO`o!7+VBA(;GxiE6RI3%-b{8K{$g4=B*Tn~eAl1_Obe93>O36rSAks1I$ zW}u_9stnjw2Mjdn;X-nrACecOtm=m-jZ8zocR0PAn(VJ-aXQ+wXsz z@8CHKz|E>(;k!-%rz28ZRjF zyuew6{LKq8($?nYFRlS%`K=AqM z+QCP6yg{XnT~EfrC^xSHa1(**n{${D_zyUkOxj?L)=%+%_cwyqIMgjHEN&|U0cdZJ zY~IEJ^uQrmBosr}X`)%5rWn!*GR28b2c!bjZnChl4gyd*4j=2PF0tzUH|tpN1e^!Q zbcu=ksPF*yBS5+(iZ8(9L4e?@OD_N#$De1$6)rs=ZS#S*7zdN3Y5ql*QT_Uf`KxvR za|jZ+D2>aD{ApZOxKl7;-XFt67&E?-U`_#%j_}ltVD{}R` z1!|5zw(l03bKy`BE8Kf$qWH^97aDKimd^pN|I%-YzGIWhWXCg382=Vkiz;WiUIKci zi#`rI0>~()`0|1KMFDpcVD59+kA1vvlJt{P(qvD>$mr8*ZEbTdRC({bci@6d2yh{)KT8 z{=Rp9XO|LavpBqFc>t$K?RwIr=AQ2iATtF3n%42qrBE6GJ9@%Q?fy{2xE!x13yENo ze)a>8s@fq)LlkhdKyReM#9{MF(ub5G565uB9ZP+Npx%u&JXu#GJ9%|i2Zt}wSe4N7J zq+D^zwO_0}-%=>06ph^Nn6?$ap>u3qk)RD`eOeW<*V3aL6v3dC7Q`8PAZ0r{S@7NJ zeCPuz)1@P|YD$uP=^($hHr&H3j>u(JJOo~$u`lQmY)C z-ac}xohz~wCi`4w_UYdHj}juG;$nq0T48owlPhZLQqM?|gx`B*T|J-&8W(8Z&)1<( z`9%{LVgQu?e1$I$B*grhB*$7_p(ZeOTea6QQz2Gl!D%d0Q}Lu%pwq_;`Rz6#Z_soLEM@CteK7o9nHD0;g{3Jj8-Y6V+vL9*~ZMR}FbJw|wf2HWCwu zy|@s)9PMmoaq3%sK{IXBg@1uVgP6hg+;5z98B?hEuB$-;P?c_5Yv374J~uAFq10gF@87)xJD$Ri%T=NzM%K5&w!{Im!|%Nezkl@XxTXA?i&-5r-)( zocUFpNC`VAtKL?)XYPbp%%}jd#tyao`4ZJ;zhiyPDq?Yw6s7wzL167)yw9Mcd>-fy zCl`iixkT2@YwUl$iereentbS~e@(a&lp+EKURT0kg{*|g##I(el5+W=VQ***nw|`P z_PND~HCc1lL~b+b^UvA@-!uF`65I}V+X#vS<~`tZ+FM{+>k3*?*=;kUM~5#z43>F> zgOM0+Y3-`M_az0je@ZyxHsBKJojemt8$Byj@_DG%Jn!5*Ke^aFL{d7NwzQFHUZ6Hv z|E#%W&EHOS%UYM#j&|U8z{F<9Z7ye)yb*o|n<&m{PAa=rSelhz< z127Ym;iV=D+p(K;4Zt3)2y5yfVh?^yrT*jN1rWsH6`yeVX=7!2q!A&cc8GT;AZ7f9 zy1K{k`aMlwC;O8vPb_)utWAO^@yvMESN}S&1E&mm7;aTqc5-J;ktgoe{)MAi17uIz zn4fe8bfQomrcNw&Tb-N7vAH^}h!e}-#t4d`*&WayKVZqtrVv7=sLsXT5!-3_XXs$# zhc)&(QKIGp<}!)2%NuYVtRoaHNIq<`uPZnO7p}?FlzYAWazu*-#Qvmoy7NTeWEiO&3c=O-kYZOl_*IbH*-;XzOG`yjR}p?Tv37A(npsT zKTCd}Ue((86T+uM)f&^loZEk_!Vs-kx zz|T16VgJgQ^Dz#uD;*5q*zysX_COptThf#dQxTS_L_jnaQ2fK6N0LXMzemuoXwIg= zX}dO;BRvS&CWU^d`emxB7tfB{*kV7V160s!m_=T~5+y4mC5wN}q)io|3*W>qF{00{ zB;#t8=RS**0;J^@0HwHZ*hML*d!Nm&#B6O{hrL9HjG&jFHQ36O%$ncZ9EhsXfKAzA zo9T{ptOhg9$5@66GjwQEj$bntEng(gVnEqyc=i?8x^hJ_U~L;F(5)AFFlY0RgBZP1 zH?&$Of}OF&Ck4JRb+mWB6`L-4+Rsd|qC-SeKfdOK|GmCa*{~U5>r6ilGpHI=QBd@& zQ$4Qsz}tf4cD&p#NzF1tpM?X(2O3@1Gc|OO^NXZMU7I1@Sxc!zn(Uy!2YbHOV3XL) zK|XI^*(ihTxH6whXkDwQ=)SK%sQ6AwHAq;Mcnmf)^h5ebJ~nMG5E0XE4lE^KwUI9I zgNTuKtRrSB4ZB$xr2WzDIz&vg9#G|`8>EH@&} z3p9Sm05!ng2gT{wHPu7-EZ*1&dtVD$RCK8*}^*J}AJ3{12fT($6=KlOrcqv&-tI$~i0A7acx%@_u#@-cfe= zx8*+a1@_Mo;@91)z@p>xHPQyUQlc(%R9@69&XY%9s7-AUM&gp_cxuJqePAsKb1+Oi zCXB)g4*kxN*^mBC zo&?erPk_PFEbVYu;y~_lV(jdehUJa{4=Pl4a1o>I{<=%f!*HJu z;1dt_zxynfkKCqLDrbzlt3pic7-#T!C%{c_e4eSiS>#l#TNLd{v#HB%L8{a-uWPU= z2g{HdUAk7^k$WgpZxa1>P7UOeyOiMjMZ!!b&@TLQpj~!TmITJ*)YGvvxvO={zGAR6 zGX}$j_=>q6ktpC#7|1$ zi{T)8Gkx>81Gq73ZN@W5Vl=>E1Kf@fmAVOIh0trfE@ETIMySI!d)xs1ZtnDE6qY3w z>_MiOVA_nBJIJoEk~V|QqwZr55Bf2>Mc=DcFBsP-2Hm=$w4rVo`!R-viH6tnL6@b7 z(qlUH9e0SqI>RL{j!8s!FWGEbyKn3TNia7i=gY(?_6!>@s_ z?XI5%?B1NH;*AvX9wDxtcdxyEG*w1YeK$P|F0+*vrzw87s_T0eY;5W-HRpZynTUT}DMYMI&ZWnmSwO=3{^;w)07ydlK1Pq75 zv%#$1_jAid3^M|2Xj$|;g^eRB|>%E62mzwNSaX7BvSi>@K+7jz$hKzk}KpBPUY1(zH~lV(kScUhJt zXWt*k3ZOv>05`Y(3C)6$X&h>bx9ROOFqMMgws@r>bQ4F=0Kvr_yz{^At8i}`Y~npA zUxPeYk`vXTH}N+74cOl;ee3543fH|x0>4y4t|(tTy1Jw_GV!s>Zc{rbb2nt2xCK?F z2MY2Vc?3eh>)NlJe#*=epRSjI5tXjP3)Qn3x53~?sj@K4aFvhm$d}dP0tHb=n82RH864w9)d++1b7lcd6F@Max z@`JCRxBZ7w!fTn$*C?0HWi?s2Bv8r>L*qla`59G#GFhaKgHW;qCiZr$j)dkH)sJsj zMp;!IQ9?VdeEL-9p)fonRengEIyI*qG!&A@zJv~ zPb;2d!J)nZb934WuT*MJ5~djJWx6b2|3_!1Hu_ZK=VdW;FoVTWNyLCG*-A_$gV3UG zEVJS}3UBu8Xm@*56cn=vFuVJL`*lO2zeRX2xOoqGJ7M}sDf}1sxM$x^?pb?2v=S`P z6ilm`?A3^kwSSxogcc$GC%L<7ID+X%WM&~4;;1$Iw3CMG56DyXKnEU(+7?XS7PTc+ z1t#|4&rL4LG( z!;X!EZnC~uEFm+>>2T`OPnWL3WCjobGrqw-grey^E0-i<3v?m(eifY^=V6o+Ea{bP ze!#IqE`6H(Eb!km;>~%<)`D38$%z3(VchGHzS<>H_TOM6W!0iC$7xh38Ttv}PzR{D ztOE1ahP_5BeSOYX2Z#_-blYG#vUOc^SS&eM9&y-6CuW!^@#E!7ey`MD+O2yY%)*I0B!~s<2praVGKein4=IS#HZ*LNbD$3iu?GwYiCKmq2uRE zG}@PkPUdX^$lnpJqm?DH18zC36f3ixrWvH#q4zq)t`X<$v^k`V&c|ZQ9fI3AW(WP@ z)miag@8^i~58xfOv^dnI8bION>*_VuSW8&_^b6dcgk4uk=2Q`4?aJ-{kr$1j+v`Yn zs9?!K9CO+=M>%l2RCX2jcd1=THcclP{lHH#4I z^!AYm1wTEYzxQL$%*-@DAg!~fw(K>+mcH2w-*Q-U?p)tOuEF@-&-Rp-wBj|}X3J!i zSQ%;8V6$cucOvwAhWF6-fX$0%u1|>0yid38oy_k&;3~0};=NwVyTp!Q%MoQ*rdGw; z1_k#US8W}O@Ujub*u3M`Vuy9_BU+h4tsQic_|N=2^%J_dH~KrYUven?zv$Jo>;Q>K z-L+y8dTK0$k1n87}EKr?%>x;3J}?w;Cjk#&Dk~6%NK2 z(G*R9MQf&&d=X1TF=G(*MD=CxIltd7D9{0+zxe5-vkMcE8rh`x=xoG@gzidi_Sb7; zVi$tQ4iNLNsjGgeqbrM!t>P**cO-LC>IxoO`Hfzh1{iqJv-7Yz(PS@F*YQC)^Q&Ba zznu@tI()h7$`$wai@c&z4Idt`ccVpj?c7Haidooe)FN=_0RUo zD=lb}kHFJLn_O?`5;pJBU>rJdNKH(&z-aq=A;@NaPuagj`8Bj*AjLb!{k7gpjcH?G zx>X9Y?w9bNtuC%% zhBGKfrrM?g>KtgPLsU*OVsZ-A2x&05Z4p&I%H(9CTBm=4a27W2gmktyyW}A>^ttkk zHz9ZFx#CQb(})cvajmgTH*WaC$HD5k;sRZ(#CWRGsKzW3tnhGQ2M0+`sQ6BqMYSf2 zFI$`ra=l>}K5tZUuUv6f)EV3(atZARt*MPm98Gl(g7}UNKw6^)I;i>VH%3ejh^k61 zKnD{9%z$0Nd7CmlKHCmr4pbAW(7-k7M29$LCF7>u0}ka>-|dh2wcGM`PdPFJ*+KW-MYNOM}?lgWG}@9o#);%*o+tdx|>F@bPIapKM}0u=!Apg z%6xCA=}-Ry^+ZU)X|;Roy+0};j@^c zGq&^k0)Myw$NC6-_xH+fI^faW1I|do@v$GzulQ=d(iI0VRuhd%)4B|BUeV}#=Mv5H zZwVCNDUOFQ*!=gve-}(8yaNbY(}VCk6dNFXpkOLw9H4vTA8Hu=>tH1?Nx>YuL5MP% z@t1v^5B(1#`MNMf1vpi&RVAM<(_5&X;(rtv``Qm53{?JKPQ!7S1*efNUcD7f{dZMc zEVx*fJd)7cJ0r+}bdl?Z@K`07H1I)g3cwU|&4%Gr)spWKA+m)t_n6&?qHKNL3<{EpR2C&S9 zuRhnK&MmTXB}te^6jNK9q|b~i#Htw&JIRd`6MVjO?GD@h+_Hb5zQb zU-4ZsPc(!bQ0L?ZlN1{H2*SI6APv=VS$gPq!^VeO?N3ryI#>%z?(8doXi<#=KP$tw@rBc-wFsX&K6vwHrd^GJj13y!A-GqodJ!;_hf4$*fb#QQ-UUEZ9R;1eVWOF|1SCV-{jFyRMYZB} z(_izLS$W>c&`>GcF+e-OB+;1wBGznnW`frg5@bAtk#PNi)N*1>FA@aA-v!9(ZwRju z6H(m+O1xF$yAAmWkV(k>#tVqouEStHG>cw&#t3_j!{Or{T#KC_Kjuu{&SvB)dL9Dk zfu(IRAd^?tVKX)rrIKfLKy@ANnh(4>r=4j7J^Rls>G&|E+0FY(75JrpE!Es$FzI&& zP&gVH=7o1_0v9{=<6))25?j=Uhn%`Dr<)ZwMLaFpYEU4|d=O<+@vpuLxR{daZ)i?F zO#k34R|dxW`*7M8%AUPjs1_A|0N<+*AYuT>Q^!cxJ9v%^aOYR)T%2}wF5Kk*tz8x3 zy{FM;TreV_F+v?!CqvsXmO+$*Ly`X+lz?y)sJjW~Z*?9$Yefw(F~9dVeo~2xs(OB; z>3#wtCE%3)T(fw+KmL3r5JBO;i*TnTpZx`t;Z8996=DGQnSk&w7!&t7?fzdQA?^e@ zKAte{-}58jZ%7t*M)k&Da5C-$@n2vb?(_f8N|=aKg(FOxnR#?w9f1LXc?!wwp8w?#0Y zAY+UU(pf2}>`s(C>qrft(4$_~AED+MGAH|DR@Y+QRj{rGbwEyT8H<)7yDQ zsHLj(yD!fL)30j}%G)tMJ}8Vmbss3~m!7rumGgKfz7F9z%su|{>|KDsQpz{ZPOE&CTS6gpM`_^h{)#-GTHCtSe z1(ot1h*1wE?m;^tcAq6(ZtlB02$D9whB)~G_7dHIa|$^8IsJYxSh~7&T{dhx9-B4@ zNAQK&PK%`t6}o?kPa~TXWqK#BD<_&e(IpHf1+!Yc5eFkZIvVQwFiLfABrGFZt;3JMJV)M!Ibr^R-omPeH;H z9FFBESEHuw`ox6|9r;T&39U1iDs;MQ6e)?`N1=HdWMcqE@V%k0Zz(ATDKF z%DwLU|NAeex#t8SZH`k^M?km%ILj<<=I+Rx_rJ~e7!Xr#THMU zS=7drUZ{{gwNb@&AkJ)q$1!X7iMu4MP?#%=r_S`|jjbhm!^QAl{H8Rf=Tt}!jTJsr zbUP{lPV|Kjb_P*t=mm#;yFZwdqw~yf;Ipl6>rJTjj?XO5=kdtYE0eqWUc*#_EF#%l zbQ=FCYL$8&^XKaqVaqu`CfsbzUmI~%?>$=C#Dnv0o$d8rz17So-IrI_#h5BhaSkav z9*byf-R~%1T|~a2XV9vs)~`c8V(+9;DHz}Cq2g~3%Aj2gQQpj^$tWx_%#Wi?)vPp3 zYF^SR;`n&%M5985xLBAKv;Pc+$W))+X1?=~ABD;z1Bq+e?b)5wi10m($aC*yS-hhHBFtNpbfisrlb?My+~kD1b3e%= zcnGy0taww>9EJ~UFhdLtQ^#bHi(#DXvgUgMO|&!5W~Spt`(=^X9e_&i`LY+}&gOAJ z>XiFNh$J9G&+0-xLb%JDU-OwW9%3wGj1nK=?q2k!+}`Ff*Yy*>rnQ^u5m^C=5KCH5 zTf=xcxPGouAsDDi$Z`t6ze)uyT@s6uFpY>5?@O-L7nwQhc>CWAlXsY6-PHfvl}VVH zzR|X5`6C0r3h&Sq({9_?qxOar`kR~b(iEHK4rzW?-OB`HKomj@buzy?WGTj^m{3p7 zcG)VbV{$))7A{lv{Lc%KoT#7dqH7lKzkrJ&A)U*%Lxqg`s7s6Eml)`;T$#A0c8h&< zDN(!D#+MX%ws^Qb-Yb5l#*+Mg+c=Ekotn|5)l(l9t5f{ zd1eQ%%R)Y~oYydhCPrR8lKy@vAffnF+ppP+nNU+D{~$T9aVZl-IRLKV<8nSp~+ zRd;#uq0~Tfe@6|=0x|~LI6}qy%7j{YZ_uR5)BD0a5_J3+MX;W`+}OVi-5d!lpDug- zsTXi@(pTO*q=OFJNR(0+NSUkojlQFcxb1ggo&*<~hzxd!7SRyt@kN57B&tfr4kQV> zbI95E8nW64(dTqp;m*2_1X`wub95d2Wi ziz)Cv{Tha+_eqFNX$_}P{Y}o41$9V9^!o8x}~8NB@g@$9dfiROl@mE^}_7HS+raM3D*ub4D$Ibm)on) zJIkxjo~_!_-map?T1j_dIU_yOq`MFAoFY|8h>pRKbsSo93M;hpFMJjUkM$D8FGxdT zpjR(=V7vIZ=+wW8Y1~tg*;>$rcyP*woi-_ZeHPgB?$ob-0fRt{xvzeyJThFfs#*`Z zW>GBYqm(0`afDHlNpwO{A7NBikSJ~845FIQj;QE(_8j+g>81!;-Sq$B5J$H|PB27* z)=y|M=wY%+^F}7)OjNImM0xYtiYpE?*NnCs(YClU0E3n*zbhHzs3~Ix2r2z&GDEt`sg_b5}FzAk{fjo#CUm4eW&_I9}m%X7KeC4RG57Ui$X z>u<4nT(!pzx=E|H{lG{Z%?TxJyz*pBVzmMElnzik zb^Wm@0`X!@0?ja0H+=t3zO(c>wAzv**)4DFtT)TGoXV`kK)qJNn=S+O9cL4gr9@n$ zM}p>Vih-g zANV-m$?I~KQ0Bwr+)KZNRdX5MJ+?Efx{n*yky{*A-Hy3r6J1ecq!+QQkV$}>lC7dN zGqUyhM+etk?@gwr(Zw%5YQEeAA6iUc#Fv<5hCA30E!Cz?!A#HCUZa}WeufaAc~j!b zNMe+>kWKxVWV5ZiT>rViuDE-90!ORESpl!s>tOPzX;nr|lViz_j56GvKsM?90f+Nv zglUT&TRv{wJ#~Fo4OLlQ15@S5|LMA9)Nj$~_&Ap(oGtLsgK*B{cJ8D4M24j$_<-Tv zUl|(HcK1KBX-SS)>{I!u^^bx+HK*;gUH63vyC=doz3|R+^6S}?YZ%gftI@R>@)m>{ zanU8S^x&E8FalU*wfb79!ToxmsW$I_L@6RClXczHFmlKro%P;r(}xXXctk0nUctaP z)8(Qe&|4vU8fCjQHghLtygBI=9O4_!Iq6yN+UtJL$DN%`E5j?(Fh_E!o1aO0zVUO4 zIAr#KpRaXpgW=kf$>$cfOisgtGEE7|cQSY8+2FD@t3n0M^KMnfU6leF@0G0|oAJg; zmk7;o&kjA12RLPj6zZEoj6i$Ghq1Fa8Q(QR@U2fTI_?gB%^y+-wXqJ49XZL4aCbAy z280T>{=_mXA31Uukt$=NG_%jsqF*+5wCQ83aGvSeUncYw6tVVJ5hC3%1)y1+^=l~o zy>};V<4!hu{aPb(-9V#>Pd4g& zWPZuBb|tX{ciPS6bUn|FsTqD@RS}L@KTM`IK90pB?!NChd~l@0lSZ;Y0W-n5n!Rx< z4TfEx=`c$=e8oH1YgPz+|Jm}M_g?YC5V)8921EVjE{Y#B5OCG3eY*ZM{#;yZa}tjr z8u6)l^T?t>BF&RqItC5=*J-mU}rMs9;=t&wgPabIWj@9Oef6NSy2H)nBp$0 z!Gn!p+SYH)`^8>4=w8YKj58v=ytXD}xqR6M6*-iie4gLusm?6?gRLSTf5H{6bCT8twSN%0JILN{D7!!9h$y2ilj?QhR z-xo~#riX^!URNcwmTk%6yU42{#Swd=2`la&f<}Crq(1=+bDNAwgjt>$X}zq5Kv6aS8e5<}$r4C}+H5z}5(K51u@FR^7&uCF<|YGcMc zdS!o=+iSRJ>FE7=>SadFHDr`(+DtS0Z84v|{<@S6n!DOc^6hcS#*K){^a80fwj$@X zMCTgdW>HQOG-n4xx7U(QUNp6u&N(~J0wW;i`HYqY^%;wZ3jA# ztm#!W=9^#TSiDN{BNlcz5t6*1ot81b5_rTjgyq_^ZDB|=*)SS!5&a~&#G(>LvxyFO zsoDk-2h%UIdgi6kw_~}7+%rTNv}S+=Oh+PR#{cC~)utUwqUmt;SZ-Izw;<+Uu`4Rk zAXC>pO)8%XgqGFcGtwSBDk346$iL?BxR!k}E|^|aw0SZ5uObUcCkO$TRot(c1#Fy; zrkU%~Ht10xcwf2B_#lTIz}Lt@-50wKZN1s;eu}8P-ITi0(N}mX-R^$62EnJ@->Zj;aMDpnpRXb@b#~UitlX_R<%D9a{mXm1AQs|J z>?FW$$tzsAwKXmbb+Mc~dI7rn^2?n%&BweRMLgUkV+JcxHP6_(J9?==I_mE5ckPYq zC!@Ge-UrG+^RthANltBD3Q}qcKd$j>)_uP;@6)tyo?xy2o>y}MZJbZfo{mp>(K7O)?wRlW#1+CX?=+d)Lo) z+XBrBcg|y@&$BOlJ7?%jDwrP~{N7rWuj=FbEIQP_KM@LreROcGne5m}m~jflQFfp; z!zlEOQPx*RM14JLU5xhwZMGZ}#n;@puARf~mFrS=l9xMtITB=ni(es>Q8|J(?hY}D zwhLIT&1Hi*e)|4|hfew@)Oc)p(NUCDn##jBeY)o( z$zueXr;O+`;IkQ`TtmtS3>^$c6lAWJB_>9HW&QqMJDvg#WE}y?`qbC+uQI4pxpT!t zn&$R*_8*FbRBYRW%}dQl&NJcW715s>5vyexf_kF8;Z6C|W80dKx%w}8{bX&SFgYVI zl{5EBsmLpELrk+f%`$C=_H|FxUr!jea9;2EEuY@*OZ31;024i1-Z4PhuOn>Ko_b6W5ZVv9V$)CpI==eVK)}N+^F-Dl`)Y}aX zjRpU8Y0uyrQ-^}Yk%oI|3fE(|Li-|l1rICPH6g=w$9p8z#uU2FvEz@55XJ=Ucx45! z!540~=pBH`Ds`Dv_=^eUAqu~smf<5G0RlEQYEm)Ij*ozj0U69EYrm$gm8nuzVilvt zGD~xiOT5~*k|x2Ok~9{9vgI9t_6BMyL4u+mJc!AvYo5zE+TDLSmeQYC958y&r@;o5erPGSKMlG%n};vQ+y)p(iz3mxt4%akGPdT5T$c3l2q>eSBVq zu+1A-CZJTp4nmnpa5xAUDl`R0gdz*P`oC{n=Tg1Ks5Fo`O+TElG`p6=qQ*{>^(V+?`J@(m!I6)cFznLqLYI%+VrN{CFKoY46hhWcD3F zJWSF)9lOfk_eBSo)+OY`x29^vR;s#ZPJ34XiTKw8d$9>L$?zZ8Z(ke7%ED&5mi?3Q zh2d^PTlIjHhaQEM;3aa}?GsM|LGS4ltK-2_Zsby3b%i6nMJ=s`uF9x<8lgMmk~=KmCjb%#Tpm1?`#=@sNH-rsr2O%0xRIq?55?MW3R2M>-?Q_UJk+! z@l43yl+qxyYjjgbKp5iQ1IEFitZ+qpR`WgCEZeg{*=@+oyxwvy<1g{Gmd|g}eNjSl zgjlKb@blQZ8w-!+Iz?hb`!y=9MGIjP&n?CxS_BczcX@ab0ddPoq;zDiyHM0^4GJzN%WQgq9M7_3ANI z|KUk`g++bQs{oV-a+FUEhLAss-q|#abX#n5VC>$<^IqKAt@9B$dn>CK;`b~&E^Z}u zRsL0)5|8*Z(?}OhLugd%x}J{tE9I48pOamOuz1`oX}G23fJ&+H`680Q0#=c35Q?@= znol@_1qggZt`8?@%gxP78Nkw%xHn2g!@ze1br%Bq7@U|&tKCAjHkbQS8J0IDFf6*1 z&>mT}D02Lp*!h2UJ+UcGsdXIyT}LR&vxjlMSHjIa)=CG0oB)yGnRAgbpPZu^+{wCA z&tgxoZ?Zt)@jv)x9V8V6mTR5DUG?5nJ&@gel;Nw06t}-k9%bhnly?V%^m$qkoi3RB9!>s1d&57N$@OWSx31{t4Mf;Z&R9gZAQTJ5X^(W{i<04XsXv)+ znUb3TBllvsG9r>Od9`GLb*renYSQQoLI}^lecJ2)DG}Ny3W=86Vh@(=i?T7{%4lIR zL;#aw7O|86JEb04Bx{{@bUm}udU7nYjx(g=;gjJUyKpH~vTxODi@b{o2`PkX-xXK5(MSY+4BCt>ERPZ_mf7*wy9vR#g~1uYLL| z+dZ>fWKYv5qOUat#i_Cs)Gk9#cC$C{Q8^neyu74Y&s`xDC$Qf!9A$QVO1L-QHrm7H zA@YFYM1#d!XuIjDt~(dyfd~Y0;#&tn7-TFd`G~7kJ3Sk|t!qC9r1raK%OWi@gxU$O zc@Gle!#Bi8#<9V1(ogn42c!pb$~h!zS$S-;*7Z(zSC6A+?lOdLY)o=EhL0LIlL_&* z+_bns3agmEO{%#fL_q_E4Vs>|Pd6FVomdG^$upSm>Zc70-1>4~zF!{ zC=_*dWSOI_@OZA8xoG=`SR3?j8ExVn-}eWM z>rX)*8{bg@6V0DL4mU=bX7)k^uTi`K?dMpRkc8mx%cq$3f3N~~x^dn19WjadyLe)j zM*J85Z^f3qtTw&D(#ezpwK40^$EF4NB%^ZT8qi0%x}K*`O2*9I&LSHe(&SextpWCocGT znq)V&rnsYP$ z);cuWc2-?{GQAerNqt@8SXh!tZ4lZN9t4SXw>~(62nc5z`Uu0awnakx8^7WmqCGh3 zR7-d1q*$0ZLA}us>Wo=Y!?O|lsGMHtZz1kts!4baq77H;YX7#OJC=_Sb}k~B_S?(y z`GaQ<>MB-;mIlM-yX|)tu?_||0w+ZAN-SZE?=5r(KAa13B#UNEO4j|cb^oavJ%S~! zWu$lx(fQkt+yf&JF#LK-hImqsS6~7MH7Df1&AQCxdP;b3NoO$Z%if&?6;o>ZNQ4NZ z)i#Yw##9;kQ>b2(VS|b^1y+AJ^#Ky14b~-Z8dJfzHcGSe%~v*uGfB$C@HZUW3~q|1Vs#aLl?5jS88VMov(9BwVGGTiMtCHb7JA%-k?Y?ZkSGMY)_}h5;t~i`rYaJ z*{XIw6@N*Zd&k@gC%qGk>i`wWuy~H~WgP5vnQ)Pbumc<&jc{Zwq<{911&AtYpZ>s= zpB}!V`n*E^Rj~kLMHJ%WP;G3JS@9~yH2{N|*1_dOvIvHgEVY4Bu@41xO;Ph7 z0gb7lvfWW!m9S=9zy@kMVp%P#9qX@0jbo)& zC2&_H2bvpAl_n%tk%a4DIzEL6hrOn;G1#u7hODk?5jbipGnvweYSW?~UWbdjxck!7 zXXur$ftGcZ%Se|W;iA~Xv&pOv&L(@k21i3EFnw2wgMh5ymC)bf8vp^?lA>`aY$DSr7{IeqZc{`Qw!;JjTrG!PKk0P2`<{cnST zlUu#F@n6t=B(rLQqY8kS=)_#V(B9PaukXP0kAKylRTe0nPFyd+VGc#V_!AxKUQ(X< z7ln*;=&eXrv#L>>cyP!xM;4xQ=mezx`_UNqPhS1|B_M6lOlfOtieTmGq@x#j%-F#28n?X^~FpZV-?L0RicfPU-HJ?l^P3d+%@i zd;R`6-=AlUvjz^=TCDZ(%sKCQ$8}%V3|4p}fr3Pc1c5+Mq$I_ZAP_`S2n1gL$z$+N zmY*I8_z%WGNkRltI!L??Ucj3Q%Lzju6_HPG^&f%Ph_;gI4iE^YBlI84f(fZB1oDt4 zB_^!msM`-62cvphrm z5=W=o$|&%J;o{AOD)eUKgl3r^HA+5;=ioMAVf^c%$eQN^t6-#CQ&I6zB+Cch!Fjb*9OXN{N{+5G#ciz6vW6g6~94{;x-Nf{dD4d~RYC84*4;NnXBap^_9Y`oD&TG14cKGx8iEn7|Ui~b`#{BnqTPFDv-hmC1`(uKv1aP;uM;XD*s8 zDJwe;7Mb*$W)yDK$Tsho7?h$hu=xj7;&wt@U#!|n|g$$C|&l()3BbhtuI zYR*CEXYC!bdc$sSFpW|$SpYAXwqrJx_SekxA1YCjKATO5fa4nlo5W`?l_K~fR&Tcc z80P5nf0<7dA{wb+M~P;w!WUVB@d8D-^Ye3q1UwN94Z?K(558+_M!8?UWXmS8(Y`Gu zCVuuzvEI?Vg0Im}=W;D4EWf261I;Nf0zw-W{n^U;<1`UNYnPh6^1nyI?{$lBI+Xs} z$cWOw(9mvo;sv;<^z};~o-B=O3)h?d+L7$nPro-bv=0ux8O{G56dBole|KZI)B;0E zO&v1e#%VF`laPQ{@4O%KR@YP2iplx@{>DbD&i+xY&0-h0`@^MHIF(W@f?q{%C8{lc z4QC3&alD^<_hVY-x4*pL%ny!7c=D#}@EkMzl>7Ep^-~R`FI$SZ*^mD(zl&l8hVq&lsK5RboxIOAFcw3e}rA6r<7-+cK75?4zD0?b4Up$;p|L12i zv&E*yDHX=s(=qvX^^S%3Tx{l}l$Y1nA zF8&JXM&jam_OF}R%5zw5d$c!I0e5r0kCnox2B!S->Z)~U2u<~?3Pwt(U-QE~Q7G<< zwnguU7hLMaQZdwtEczl9Md%y`9ZymPJvESt%tvz(CrUJF0(32yt&XF_;TV%H@h^TC zoJolXNJ-lFmHqN=M{RGWH??K{m+LxZo31o|3JV9BN8hb>t7Sb9|?%^IsHMhchJE)-4WF zOd{kJ1{RZomsbOmS+|K_Jdy;3nVC73Nn2x-nue1T2LYA1B?O1zaIFVRN)yZ`i}$@p z>|98f# z97S_(P7VuBj$!;LJADGCmQ9-f_~b;?)|SQXe9v5M1p^XGr~FvJ z{ag&ZC3t%%Dm9zoeIE?A%IZjtM6|2^OZ&AaA|fIkED*_a(HLquN^9Qt@84hdDJm*P zbESR$j1H`r)ZjAjxQ&wZ?!1qJU(OvVj~-6i>9NXJ*@AKDf?xCns>ypsl;MPg;g2oC z9}_yAM_Uo19A0J-j(8h|xJZeKo!UzUx3Mkm!88B1A&(-j`7;Y>e8u&C?oz908u=M7 z_QU;68NL@2h&)}9q>+Ap@Sgn>6S!3-18p6_SS6)OYg=1Eh06H`6UFKv&j0Gh!o+0p zx^*p3p_h8t+&3yM6TEZi{cz#^(&Zqv^@T1Ib4R6RB&mtsbU&?%^Vgtg z=ZK#QU!LFZpcE((wmhcN%PEJ$c8)flI_)bne@a4!-SXo_oMM}f{UjwuusO*p26ezA zV_iEDxyggW?M=806V6s`jj;JSq-L}PGM%u^R?8l`{`U;x!Rb*({BC52<5sRAp6P+# zSY+RGqXbsJwe+flYo$o@f77Y#p@4Q-GMQ#O)9zvoPu)o8_@}#cgV~#Jz+*QZjO|F~ zw0sQtZnygQV4;!0yejsfse@j0y)U5~{6e8jM?mUFrd)>Lvzv?Gle#Xpwk<22p-?v} z)of~aye1`^`VwP%d;9v`#lig1*`%(_wE$WqsZdupsdpIoN~oMTuk&6In5|I#M)R>R zWQ>euvWQD@Z*^LM(U-hgqkpgS-FOo9pgxfBjZGG1>PJSJ*zxh#?QYh~EAau0s=Srs zNeV@knQF#w1xE`wXOJZ8cO(c4R0#JLS_sju$kyLuu0&XOv_%OF&$#o~q;GmrW#dVV zra!yxpnym|L42Ls>h$Np>1UXG>X^vPhX7qV?B&p%`>9zuWT(qk&ul-sO<z9sR;L{ zs}MlYi=H|;Tu;#5!HT4RTgu3_`@z+9EgAf^D$}6}nwXxE`G@%i7Yuat=UmO$A)d!0 zaWXE4w5&JbDo?CYL);4H4>44BkPA4a%`$z(OJFiTiZ5zrHLnMMB5-vrJCydq;Re>> z8&gi8$|D`pQAyU6=n#e|*_qKea${{#WW|9Vq=PN>1PyCqMQ$bp-Mr1MVtq~sa<^lm z>wImCAzrofXIyUdao(FE3xGhq>~$a5!HIk>OkdTL~YC30Hga@j0|8FokL z52XuS9yI)B(Qku4-J8Z5Nak#hc*e)#c53$HdZ!5DL$CT(@<4fWG*5;)Lsa-(lSkd! z^-}AnkqSczDcMsHL6%R3gaQKt5w6d65zx@kWD{9n9_}tOrc|?m8{e8N%b=`r+*ay2 z^F6=$-MpJH#sHjY&HMQf;CToo>3-^vVl+yDX5A1n~?+`n|663paNb~J+D84wD z=?e2PYR^$=;VWQR<$A5KcV{4-(FYqYS};2(=*i{H4cJ^QEw5bNHUzmRawO0wDJi$g zyurt}rmOOE<%PG$^3h7Q>R=)7Dvc0lYOEQYdt4+UNjmX#Jz&fd=AI%UVK)4*)k!p- z9F$m;*fDfB@1gJ)ao5KC)}A7#8z>zE6UY0MFr1ffTQWxZNHPJ~GxuuTB@Gt?%`4n0 z=G`zagS7qcW=Z=#YfuJNz=&pgSV$n0@BqjEWYByLco3Nfn>zs<)Zaexv>4b%1YcDX=xk_b}oTa4x9<>#mSBIpr>Me`;|zj1D+IBphGt3Iz4sTXXN zaQBOY?Xt--UD9W~j-O94p7Fb)U;JJ~fIxFYd;vF(nQ!Kw2-r>ZfVYD9fDe=L^J{(2 ziv@|RmX_A(Bo-Ey-S#MzREF>2n7q(7Ga3p1yHOKq4-bB@^#aj^ZqY%+ZUezC5F~V2 zzzp#DU2W542DU*kRq_7N7ylx<22Z;}PLTJ?9ci3is)u6#Vb`F(SA06f^3xZ|0ZAW? zv`66@kIXoR%@R!DAmQRLD3xz5-IKoxyGptZYRCN2TD&w8c{xT|Gs zDWl(CuA=og2?+`HIPuuo*iEieg4*Sb=ZzZH5vJgkGf6=9hFqDxY+~b z^inCQOG|nnrCmN=?=!&4lv}#q%85K|ez=pjVmqF-Y2Ieuj7vSsri<^9aKR*9-l=1I zj1zghFrKuo@NgEb@Rcn~i6Lu(wmL4NlHvKf6`>i2#oj_<*eQpakei8JJ~0Xdewd)v2!{pGR359ggG3x^}dq-l#G&zRqyiT$ze*#_%n&G(q1ZqVc^h0D6* zkp1N1BCw&M0e|8KILJ&aG4~$9X;`DE)(EE{qu-A5OU|2pSh>kFqr*5cizY zSmZjL)S6K0mlc^yk4xklMO)afzF3E*rW1z;Go-7ZM9>p*U%WL4?uQqXi>>z#9N2-=DuA#^EY;1i6U=AfV}EmM`tA34m>aMyRdi1W+XIF ziwON5w{YE5g16Ziq`$Y4YG^ioz;AJ!s-1YEMeWzR*4I10NK-utwwk3z|War2-S_JoHr3|l>&D0&*cAfrzXAD z(`7o1Zoyd|tNZ&e0#1<`P}Xa`@2@1huRy*fygyS-wQpbVxa|v!Pj9xy+5ihwZu!k~ zxl&kCDa59xrl7J__r3!mHTv9_#jp!GjF?Bs`wnE$m*=xKRGyBYL_1AzKilzl+MUP( zh4$g;sTjx$D)ieQ_k2&}vQECQ%mQIZDy}e!LLzzZCY#>5N5VU!@zYIkjlx&;AY8ou|DnB3D%$iQM!mvpcpVs6L7PCY78|KRwt_ zU~FlAkm%0TTI`)8e>5clMUrGaQQ*%rhvCHr&f6v)D-&NiBm?fqK$l$-d`@o)a%M^? znzlGP$$DJ>&+DtKJPEl4xraQ5mXmO+34zadG@>bwK(0yL79Sa?%V#4i_*qja=)CUR zNlYWD5jEMi2qo8GMmSt83Xx;_+4&=y^(o>@kCz0RIav1T${`%9&)09?$ex_NvRpiR zckV#Ba)2}>M1^SA&1Fz&V|n_Hm@0+ENP6^R&*8y1se@O9$d6VO<W{KzMW(QAzWwG#(9BtlGGK{NI_M&^I-+&G=@-r=kY7^o@-H%Vzr)l`3XCAT82kLr z?_jX*qQFqGj(U-@W8)jQ&8*hLOCF>h*FOEhL>7tO3UmB=N*bO%r=O4EQS9PB$Zg2X zREcI7t&R^N@bC=!SZ*E^6gH-of1t_k_n4>KtTphsm^eJq4fwp}O13OsD}-lo4l|a) zj_K0QQs-`YDc5s9S7~G=z;Lq2w{NPHf1g+-h-v47f}ltp4IN4G{o#9VY=)r}yH|ha zpz2G&2o0V<0QQOid>w15&CahPnt)STfqYXcrcINc-6FM;Wc;?T=};ui%A+g=y6*DIrk97Vqmw6Z9l}TWZ8?xpHt2L8tsha>nV4 zY?3ecf-BrGwL=;RDx+RUE3d7lD<44s?&Sjjoz)n5!Kl*@1lUDuIr;Kqb{=K0>gwA+ z`(;JN)!5kBl%ta3LKuw+*{afk38%e_&GWE0EOS=QYu8c6vIy+^sg!3#LspnBQs{*g zM=#*6Z2Bs1@Qtj}S9T2i&reX^>Q-r(I8tVxPR=cB$&6Z2nqP>8EgBAXeCLufS4$N3 z{Ul()b>ce|C^)UD`8Kw?ukQ8jLC@iR$MY<)2sRS345#kl^{B^E%A+@D78{Ipx;#fI zH|_39l7oiRlVT@KGn>Eicv4Sm<=d7ay0U!KLOmqSA97FYVx9c+56(Qd{5PD^>^+%= z+-_hUxZ$yA{P6b-Kd?44icM9g?w-GSJfvGm`q6JchlRClM^NJ;Lhh4Ii#{bmQ>}J# z&KG^>-YaALYdK1`DBqtvO^MF5H%)1$hOJno#wgb`{(85GC0`gC*Sf#E=7
PktV zJ1NgF+AZU^p*RRdyk3jOj`1saqTNYi`t4{~^>~CXb9LZzk|8-WUH3+di%6m%#z3L_ zb6@d>*29uYEm&0e-RLBXZ1>I7PD&WaS>s4uz`4E26I9a-hxLOQwJ_pJU!VdK=cd^KP9o7hG@&uYpU^XQwf&&juxx4hdtv{^S)c_iHSbnoh;jBcG(&h z9;LPbu-*M}I1x9cb%4y3=l#{D>uyOce@pLZ(es>J^Cm$v?W8Yh+DNd05{NaqxS5}{`E1_*2wMU8Cyk1|= zY&_yIN8W$`ohP+pl)6{8Bk)c8mU`e>%)D{KV}l`N`6MI1z2Kvxn^W_rhs3q$4maNk zXtrqCF}xkAYQ9{NXSxrb_}1)5yLcXps&hP$Kl#|GrElX(+Kb&$N)Vhy@D(>Muq}nuIYDb9JK`B)q~y_dKIL z>#oKXimw$s#wt4z&U_ZH;ae6+&XbaU`U!YUitCN07|27+_3;3w`ei0m{*-!m;{<7Z*9M+q#lD9U@<# ztKst1o@J>h*K@P){hP=>{kbnY=1Rgg!u>DQE16T5XI_%n;q_);eP4e^d6E?O;e-V@ zA}tKvvn}~(3bx}WZn~Szv)}D^SeK&QC6hYBL(QAfyVxY3&p!8wSfv7AERkE|btT$W zO&0&`U8%2t92;82<3IiuOb?tfsNn$kz5*aJoOT~B9vV=iAPZGZO}+c4%DB6`SgfTr zm)r!-ervv=_Wt^t`PgAA0;%Nnnh8K#rkcH*@2^3v;sYuI?a}^Uc}8=joeg1wpZ~`76rVb4Jw9jdm0~S1^`BM(*;0CB#wpItFFmUj9ZF zf7QJQ@T&5%S>b3`zZ6Z@>E{@2mOUiJgwJE1z(AbOZ4a6tdEWxk5xwHUcO^U)(SrzCp9OIKvrIK zp#GZNEVT4kwk9{IeWrt~r758~xPuH*9;j@%+L2}2*cipoH{9M1NAkg%-7!=-2{RzA z*`&>B4i(;eo{Ucs1|rRu^GP@x7Lr+mhP(Il^^#;#2Zhn^Zq%5KHkoEz^_TbcjHj&8 z!LqLUttMngPo^(6uR4(~Cnq0aR$@giAw7gY^@9#H@Z_^`Vyw0?+AC_2Hi!6|(Y|iG zksqpWCQ7#R#xc=9aPWQXZE_;|4hLbwATc+{s@V!krRtpFN@zlOVQdBNWMcm`qRZ`1lM2^oVyE_VJoDxkTH=-th=Y$?}UhbFyUEaJ4j7oL`rOPuWU( z#r_FDtCKS@$RG=FuV9)5sL;pZ_u>H{FhR@vbSrADzOKf4_DeIr{hDMbX5{me?-3CZ z(Yueffu&yefyff~?N_0)g!gngQgo{2q<#Eufp$Yp?traSBQ#SVJ!hli>XC>%n95nU ze6K)j1zA(!`l?h%J>%km@ODLBBql0|s#gL`OBO|Oc} zVSxhe5hU-eNCc#4pb%f_er>HHQ}30{oeODS9vB;L_KZyJ#eryb4~Pq~he+^5YWjGM z8ZJnV$H&ct5czmc)+J7G{XX@rv^F(VWS-}6L2`Z^ipX|!X{Vkj7}f671{>x@@%sjc zYF|B&Wj^g&T3An=U~T0gu>j%w8B0V73Mv_A<6Qh(n94q#&{0fiz5#%-r;mT;*-C!! z!t_Bc3sMv|mtRw_~Xx!EZ(xVl$Ih&6m72YHhI`$$LE_pH`IQ>V| z?ftA|3?Ly0U=%HO){Wh-hq%KY(;PkNrS&_QX#6mEXl_G}^HmM+awle(yn>>3onlN` z&(%#`iCX=qfi;bx{SCj4ow~eiN}O^Oo&L!iPu~@G*40LKVu?CZdB|Ssh)rtQ{Ra}k zs47>QKa$y~YI?O|+JN+wT*@&AiAQHY04PdX_kQ2rbaOBb4id|#j+x3s?;Pbg^@osyO zVY@rZn9tm_EpnG_lF`5*obOtqw7TfMIm3(5v6FC^kEypOR(zb%z`46Y`O)ZL;bS{4 zb++K{e)x3zHctMJTKf;p6zQKl2Z~G#sZA47kDP?4L`O~f37%WPxHf+TtSaeZ|6Q2BS-|8ZaHR1I{Um;BI2u=Lt|Axiq)Ubr2>&TiU zw(WPWQy?7Gb|m^ojv1RU7wwUqlLH`Z6j10G7#MVX)_Fg)s}A~s zxKuJ5*oCf#E%hD3TvayeQ+eCSEY{K+4G~SZn}=5ZrF(uUQxPadNgiP?J8j;t<5YugCBtcmpUNx z&g_c%npL}T*&N|}<7SS);~d+U8u*af zZ1h5++q9W21}2zCE}6?JE1YECtY9o0b_W;>+Scq`#>E7;2Rf=|LAcA#c@S0E zQhBe}DEqah3rhs4+6S?`NO#fR_{o{0HQR`wo1V2!!wj6MCLHU%#Vft$=0Q!cr{ z3A;Ej;XlnE3!h!1oLK+j$P+uJJt*5HP=}=ZhZ%|C(yD&N{`$6zDd0>*R1{!_Jtc$@ z--+LOu@#R31UyYE!JFOqc&%q^rkVj%z2sc$+n~!8-767Jw8?pNy$8BiA2Tzb-<^Rb z$`Zgq<4#AC1U>8Tk4v>14!83&3?@spgTlf(z)Kd#Ed@{2T+nzSfBw8>X@X~CC<6e0 zslV)feSKLRHe{tT@U(#))jQp>W-`&Ku=Glyj%o+AJswDIpkQZcxQh*I_I za|;t+C<&VmV4?6&HKv928O0-x>osbB!DXqB2(|lm%f*V*pA*#7D^>;=-q>32e)3v1 zG5j_7Bz1;|s41RNdt)zE;tUyblyE9@k*O%~X@_H@0M69&Sd3clv&8wL5Dy~p0n?X< zrL`Rse)|Kej_+)-dlPVdr-(>Lmy;|>6N4x*T-WV#36fc153*SPq<+!0cn&KN`)Q0dalhC6n*t@dFoCD+3w-T%j`{>{_~9;f*8FX!F6eg-Fb0zC5Q6PcLgU(BUylgW zb9H@z$B9!yjC(w6HVz04MWh63ZP=(zpl%{^bi8G>l-pUf$|D1K@LkX*S zeEnTHnd4JfK}Sy4lB6KM8VrZ)>OhIfo+&r3guN8gMo)buYBH-3?wN~}PbDLWpGlmi zWmqNORfxt+faI-#_u+XgeeNfy!a1{*wgt;>1K;+H>I8AJ8BruXlXq;1T6%$uqQ2z|lw3OwGcev@F&kACT z6EgxJuw{G2x+Up%LfpZ6;^6SA>l{x_8`%Y7OFPUzTO}At@tWE4N=lf}HfTUb4`}dJ z7<3}BGAvNJorU1dnL33ZKL~S)e*DI<6w2df0dxY2JfYE(AmPEj93A`$8_HN5rHQnl^JcZnt$oINK@Ol)W?FU!gY8=jMYPnEM zwQ?_zqzV&C+#?EIL>hU23&NKWaWT7)BkF2V2wf3K8B?*d!K%*xtw;EHly zTc#`?1#Urb@|R=PW5deD1)OJsrynEzHBY{-JcB7%Ldq!AM7MK9fbIU!`l~!~e7^l1 zSFwN_V}x%Hb~acUvS96vQ1d`X0^l2V*%6DpP+z-t(53R?d>r=7n!?~9f>D-e@BXFu z-3cSkW;L5Sx%L-|-nNp(wNZA>!d9kk*D%SztVPjtX~yD0`=jfy_|Vp)i!A`!G!Iw4ZU*nncoO*5@Y5&d zB)|5)!0g7~B4X>itx10F{_3v_#2s>u(k?boOc9PE{$vj^PJWRiie=W-LEZ&jy(d`I zvMm5bqT*7w`n}KyZM(Cv%MIZHN*NNo|JV(@C4T?;k-^PPV|1^M`81QFGE>NXWo)d(VGlG&(j3&%Vh+q6gsD!@m4^h3IcQk z!&cu%_}uS_ahbG&Ku64Kz8*3k^aSqc4>1Agf~;G{G1bJ2_E zO8dUs`acXRSS2onZk(m*9jf7=J>p=}THG%c!abEKq@%$2T0%bc!`eYAFFQ4FCfb%# zqd{TT-|)#~`fkwOM_;~$W0?V_r{(&oMfYxd(OLM_vz4aiqj3ti&N41k4WseXDXkIw z9$V!K)iMET0;!r8?R#>|FP$6_a6Qb*TjmGS@Xua9IsUyRE0!vl-%Eh}al*?6Ci0jq{JzpBq8raW8`_;5<-)hDm@F{RAHNEKvkxd=|+5Z0(R!5+@*Ibe~Frt$t28H~+LW zTUAlTC?=AGl6OmumwhVtS0pl)ul@a_FdZDk!XG+i8BAq^i69fNKY_J$Q)<2T5+1(It@T(<`jG4BBH& zNB0!tn@Ah!5~Xg)@rkItUE|k-a}JxJdY<=uCnYPr@e!!67FI8{wUZ^S{ck8U%jTu!fJzUpNa+o{ajOKES6AJ#$N znfN5=&BTQ|d7M*E{AU3eSMTzhsczxBone0(ACyd%&;e)~q0j~eXuq|aW_Sh!J-ORU z@KY-6092WgGF_o>Hj4{;y;1-h4oFByz#nvJbUSNfDEpD!>;=k%V$Iq&qupA`sgxmF zCF)h;N@oRt3HrZj9XpH7&Hr%HY=18ZbaZ!9u&|(u1|SQA76_|e3&d)w9EOf=5XIf?)@ML2tTF2`M~FUtaTs1X1m%tFF`0DQTePU zz*g{%^6k-21*w}+v(tL^%Ge}@!f^#6N)jo2EoSv>Sl}9jbT;53M+Ztxj@;dt?gK1W2i#&-1m&nK7e-0(5|Artiax*MCx5R9Xl*%zQq= zA?%a^RA=)E3d-y5h-a);Q!G-t5J?%CPYn%x8ny3VaKS+K_V$W342pD{y$lLoP0h@# zR1FEnbWD_Lhf+vHNE+BLxUQl=nW5?F>07h4%nPof+}nm4uC6a9%5<57J~Ha7zS!)*ybrIPaU-C(`TEq>7>6LBE}J_!16k(Hy>ZhRhl zO0^OV|EG9N&jH%t{cy+ab-hgkD3J(vHy0B2_UAk5fPUo8BxU?sE=mSkEJ}JRsB8R^ zY-TE^XKVp!TBIMdTbe*sk23T|RKM$|0g~Mn& zO~cYnU0rdAZFts-CwmX0)wfw9?&j<7a%O#c#P*xIH9KGL_Y;XvMZjgWsZ?4S->qLd zu-5&Ez!qcD$}Bloc3j{5&NBYP31L6al~@QT*l;EnbrUt6r%#K(_ROlRRg=1}X*T;LUu%F;_%cbTA+)C})(DpmtcpgNd@p54BuJxca!P%di z?aA}y%1q=@k@aNfo1k^`qYh4W-hYT-*uGK1x)bazyi_YsU7F{k`M7&{JFVu~^k??A zo&W>)IP*;!@AWQ70ysgX&gyY#I}M*(@8qESHP7PLS6gQ;?>EJumW$3?KuWHy<>qx9 z9(ZALbG=)3w3*>8ZD23>aO(j7`0)xLjo_t(rSgBk1edC&oB(W9$DMI0Om~uv!L+9| z`FrYPP#Wfd+uKqtz&-Gy51kfL^DgubPtk~0-S6dTr2;D+9evwl`(7aVJ8C{r*UYM+O@ z-e8uiQF5=MvI%GcJ=r+b43f;+Uidg-_L~oZ3{O@d5S#ekE6M8vTSZ6@@ER;Ub#?2M z?P84W9s5UJWn}Cyfu?8Om!5RAnrgZxU(9;D?Z`%oIUVz#C_ZQg{-lK12FSZ#sgVB| z5nmKCaS1wabOErUL!pey^KMO;a)wY;7^0=ccX&U&N-Gg1dnx?}4g&zGk>4YMOnQ_G zD-dlhw`$tux6g)i!2X7vSDC{gep59<79KJoDRIlz8?VGFnm1lTu#$*q9cE=?nlazC;kSdM<7BQhcF< z_MiW#?@rqa6tZ9g&`6nr!G=eMumD?vrg$K?YcJPtKiscfOpc??m2vEm0VUi~4^2kU zGAy^{1S7}$;tbjLv3z+}P`>#E1V~@2?|MJnklxH?EdRH8#cFj+YsGD0&dmX5=fB ztZmBl`oM`&ev)8qrcjHrCll)ndY!b`GfFyy%U<6e@+WBRRyuSGw%(yp?Gfu0{cH*s z?3Rk&gMax;w!39$;0B%Uo}6{SkiQKA(RsqpNrcNJZbYeHMe4>lIRU>-7^KYJ|3ku< zzXtWBz49bL*nQk;0%kz>MUp4yHYRFXkZ6doZ{Y4{lqZDyU&_`D)@Ia@_p6XA0;I^! z0X8v0h*E8H()j&}m^qePYWE|X11=~| zG{ULKjB>d*wxXFKnZ*qJe4Ju95Q0enO7W8M*=1D-!7e^+b=|j&^OR@wy!GA6kY5#{ z$I{2rFq~oiq!3Ovg~3L6{NWed-M(i9nSs(lu<0YaA4ye1VjFYf1d$~fm-Q&4vz#-g z|7jl2NQ*{K>Z9d%4UEiGhnzcK*lz1luAK6S8825Gp_vc~dIw!B`;V-yRM^*X&CV>n z6V`yuZp-y3Zr88u=)s-k-V!#ctmG-+%s(6@>D zM~0@MpAk9SDOqF7Hc=CArnnEdGZ0vqzT9f&CR8R$^(F@ zMSlmuky$jORfyff<_JoDQP5=c72ds zP((!6&nOBQ2-r3;al6ij_-hwi^aKQIf^2Uf(cchd^5PuJ{WbDN)dWYYQ@+bT0$Kpk`eQB*i?~)y zo0%|beRJ+`@lSy+GWh(20^H7nTeV!oA3c$k8QG%Hp9OrLC!3yS{Ur8;&-9WJxf548 z7@gPYMql(N>?OY1Kp)`QvF>{wpbF;-Q<1sX_1shru$teX%34@|j{PDl{A1Z+_L=!- zJ=mAk?;}#Pz1aLVpMS7+@+{YDkX`u<8h>{VoA4Q(MN67}&towICHGIz#;8admm{B9 z-mEwM4hiwa$19gOsTmb||3;fPSrX_-+#R>b<*-u+9t%tYMp8NWf>fb4g`YHS^1 z2GIlj{j=Vd=}a}c*HWgZ^!Ll|%~U@n^*oJIT#0IEaE}7R^ntcyxQ?FqtTjf4{~E%S zz6Gu8>puHniD9Oki%Kez;1I3jD8j<&V1b#AjJ-9|_7^lTwC+;h_L`=NDj$BCIrwuB z^2_!4YUFTj#W*bTTqG!8N#+fTsC^+MG(+yCXnn3ImHb1c)IR>cZp^{tEp9rN^U`?h zi+3!82m`yk!j+ftjaaEEAM}4tYNERVcw{C;J!>ZpS~qb&79SomDL`Uz* zw7S6gGa3WuxBDkWvuH)sTbPwA7{{SC0AJ7d68u18vfftMkRkj!lf{pCl=|2BPT-3y zP4vTH8ZF(Pvb%SpBh>j(OPAjZooBm~RMy<@=UPB54znL^X`~UQ>Ac@n zee=^*3u~CR63YkqD>1=~<}V}h45LQ*NZZ#tOuCKEsI_IYnX#fgv5bmH&NZmz6+Jr6 z1Le(GM723~7vcoqZCt9o-j=Q}@K%m8)dc)CZ#KOOZEe)#2r0?Q=_MzaJpV&7TnR)_ zpFmTHLO?*^5QYq2E0c`mW06^yo{o13Oois!-INJ&W*DCHr2RV$(A8j~r0M@0(4 zc$81ipLH@==OE;mNPB6xKSKQfeyaZOk3ucP2!|Sang4u5sU~{-%Y48~HASH5wE&$- ze40N{xc~K(En9(B;j0QAknfit|L4V^uhawob(jCsKdt}YUDW^ozv#bL>3_1E|ARUG zKb%TC`mTe;X6ArDf%t!(AHS}%Usq`I_y7z5sBB+5KHPh9m<_)|{*&eNk6j2<0Zevk zdKxe>KK7OVWhzll|E=l$5AOBZ`7yWDc)&KiF^?%m?$YA+*^Mkm!cmVz&9$@^L z&3&)HXTk+G5&0N}Fi8L3ej(d5J~|qfkB<+S1$OgV4#36qu8!A19e|s1tPOhK%>a^Y)7@?>Axp`M5gGSiXajiT6eBg3?%8RL{Co-G$Mo2 z2|iD$FcM5+=7*DwEr3xAemPl<6j}keB7h<%y8+~R2!fI!(CpJHe+d(8a^BY{R?4Q2dID@ih;NIz_?zY7B)5OlPhUxB;T|BHLku=K(9(VhTbt$ zn(%G~a7sbJAoNd{yaRMvJDD({i5&Q4Cy0#ydLV{49ghIC74+e**6~t#our#RPX@TJ zYZ4RjbM6;CPQr5^n4516#xwr%O0oeqchfgb;jKxe4GAHI0&KSVzNc+f96 zyZ;f4|Cb%p^Z&iF5`o>#xpf86AEQ*ydPhSF%tY4v1IQ);AIZ<_W;u}b z)cS?SHwc8oWl_6%%W#6%TNJFaV4hA#rRC(H`8ibE|9-wc3+U;@u{ux)IehZ- zX~0Pb!Rl2eWE>pWAU*dN&%yW|u~i4uQJ7v}-B`oGi6q6k%_PvnIsSALPiX;K%V|-q z5r#(x)!u=!00shVi|6`$pTQg?eXq2%QjJzJ!4`%x{D;9F0+4@Dcz7oeltYg=^6VcR zLN9Dws7`m)I!h!o;s z==MwACH;@s31a0QfWsy6^|2oPi zdoC|Gw*?FqO0{k{j(P)O9pBvTKYI7D0c4q=;?5Si_Khvi~QcP5;zXvrTaM}P{gsA^y34jdx-WH zkK64fE7YW+2W0@QtpPg@95~*BjEwAOZ*O$R{QUe8IN(IefH@SD09l}bF~H~I;sR`Q zH0gLoIxh8fusef-gQGg6V`*VXNl9t3k)%@p%l3n|UMuazL_%@@Fj65BumRd;sx8kn z_4J;DVCDzvOPT93;O>F(i0J8M=01E?En;~;_Y&BS)F^N^w9T|}(!@OW@Zd}tQSc8q zDLkmx1?Q<4OyX&PgLt4Pqv(SZZp_9DFx-I3530Ner}khsj5;{3_u_z4X2>}>RIIWM z)&R@!5=5g8AphTj0iJ~QRtb1=WN0smOf+Dth`8~5#&QZA@3P?jTcDyr6`ZBzyg&0O zT%01f7Em0*2|1%X{_ntobT~SBugrjby!5*LjvxO732F50s}O*`1)vahFku4mHWZ=3 z2PbN|nLv*b0$PD`n?*q|o{FoyBM{;Z0Lc8(c@H0C`%|+1OTa!r>zoBBV(GaE>>M20 z0Tl*HkLke$j3m%|Zyg(BUyQFH?*m8dtPZB}MZW{J(nyo1yVRbsGB^bX9v=QV6B8=5 zyf-s5YhtSN01=Q_z)b}?2fSx%YYR5w|9vpW8i0d$0FU=>-Ny$4pahL(6d<`?#3dj= zgan0zjCO;DE8{jPI(nwf>aiu%GgV4TO140o!5tl96;fqNdD4m8HYDK8B4D-1-rnA> zA3jKdvk&U^dt>RQetcjDG1U)<8VrDcyNY(La6K{r4WH5Mec=DhrpmMK1wq~D2bdi> zUiPzztcH}4WhWacHY%fn-zkdQjHm4$`B<}cs>Jd&SM$}a=`&MGTU zgowbtBdx_jE*IjIQ>~M*v58*Xr=wBs_5BR(?nlxwpnMIXM9TwPPMMN3O7fbKu(u*S#xj}PFAefpFY^Mq1V zD=17%Odx(+V`gTiTFJ(Yb?A+!=dCl|_oY89poH~03{!~DND()7@b9TdlCS>!Gk%=p zIHpq%s*?_N41WT6T_({S1H}t%WGei(V(A+=mCmO~f5CyKCH(`mA#%^Zk2bzd>>__Cd(d z8!U$%piJrb45==>GhwqgK?Y7WX{3fu<(RQCD*|g#L=PKYgo22U`;uGD)vHh&JXBDr9BIo_b7;26zFGN6l+Z5tLea#!Ez)g7|yP z$tmj$jVS@ z7CNTTs}?Y8*_K(3<}``O@QUz7>*>RnFXu)tg!7wvLxHFZ8YJC*EaBW-#eE1bNNUqB z-CQ=;0{gBk*azW@i$M>^*ixzce##iOR6pxqMReL^;O1zgT|58IWCxOdQN)E#`@3m3 z(37cUM%5bJmME>%)ywBTZw_-%MPjmJm^@6N4~la^j}k6z5%&CN}; zWFh6AfAjg-DOP}b7+2ItXk&4}{VVQhaS*-|K0{KK|VOwB9;K zxto=}#6>4s@Z%r7U}_X8a{W`MP~`2~s|m!x({6(V zfJRL9SWN5BU#`vR7ScU6;2=wo&xRo^^Ae~~^Kj<8|KPzbOjdQyv{J(n;^Rct-M@mt z)AZAS5h+|}Cn09R(BpCnl7@&Zfr8)=y}AU2<7Rs6X*iDF-neHNisH5xZ*r(3e5rQu zuEfe;Me(O!85_kV40n;cTb=nc18o?{B=&la6`^C3WZrh1=PrgIbPDfmp>Ug?wj&NP z*zLLk2VhTgKHWUFv1-bPD<==u5JlX7f|z!YYSRNLRb((CX+5#3;?T-EPt^|#$${2f z24Vu`zIs!kVIT>RKwUeLEW{CMiVRa0U^8^6=Ag)7cu%a;0AIOhG^Zm96 z0W(jb5+~#;ZkQ*)IpkB^^WXklB+vnMcq=e}0n3(YJ9r#XtdKc-eB9ckC2I%ZgQSDQ z`v#MMwuVX{>G%(D2jg*`wT|ynw(R)vg970lD?e|bkGiKS<|NdA1Qp6=p`~>`)AkSv zM(~-_og8SCE>n1XDkvZ0M8n0q%w*aDz-lp=raB++ClxAh5&{o87>InJrHzT$`tM?HkR$FrReU*Tu zeKrHq!*t(^Hzctp`MBH-VpEo;R#H?fu98@~3!ryle8*4BN#kPTp;cjSRZ_0{da?Vi z{rM50CnVYEF3!&wE*(0)Y+ekoyXa?4yv6*qg={k{C0p^~Ib5yQct$`tC;4Zdf}=g3 zVfna*j?EbIC`r)e#H=q#c2ba4CZYJ{vyLZ;OiWC%X018Wu9=8lw;@)M(~Y|+I=LaN z>4{X=?pop-2KTB$AQ7e!W`*!eSc2x()(;A8yk~O~$Hyu*S3P}O_C%U~UcLF%@ zC<0v3-)^39`wwAG*w)q-Fo3lsh!6?4fV8}I*22^X#p^PPYJkqHneGT(S}z#U=%UKi z4XxY;BN{B+h$Cl~Q(#eJ3+&psTW~MGI>)|!%MhLdqgc!2V9`RV7x?)^Oh{$KLX@3{ z4i?aPQsWn6Dz>0g1g~knw@$K)TiJAKhdpBM4QNA<-}NU$h6(-46616eMfVP-+)|WS z=aY0Pv$C={W|P0~CJ=TtgOeMaxHJx*(e*;{q61=|l;<^_rdqH%<$*CBaz=#Mq*c7? zizF1~C`-P-bmp3c#-wd<(3eRr_VfY=)lHnu;&Br^2_Iy z{t>XXC}pM(0w*2gJgM$4_d2#Y3NY!XSx9`mUbO60son;zN-LSG-?QwMC!1d(&M|NG zt2`4rmmglmwI^C>SIW54EQ%*0yF53?%|+?#5*QtQ4`rhc!lt3b6p_+4B(FGDU&w4@U9s0HxfC0N`~X4d@5Vi z@TSNRd0+)NTy|4{gkdAKlEmV)q)C7seaEYZgRr9X_xINoR6|Bjy%P5k5C=>`!73l> z@=a{y*4^IJEbdgV9M*1Nde z=&7^k?Qs&u-dwSI)+b_ofo zgrIk{mR0vxKRXle`N-E2mRLWK^|)t~(@-W51545)^764RI|xrwnutnER2~D(8Ol^W zY=~UIX*>=so&cbyQKYA6m7p#p+bu=wHf|tfUZgJO-vLfw9R`S^`;+x8N5k{3)WlKZ zdc43gUYZ_mW7ckcZ__`F?*m^k^aX#L#hkDD_;CZ6HT0bhlEELR>9*TX)l(80KFVCV z!cN1bbriengxTItfSUcIuiv-G{YYoQvw~J`tEv6ER zzlxPJ7W$$h8Y_>OHmNL8>R4NkTg^$e;X#k*w!Ff<*K%F3Nm_q-0oJm!+O@*M!n!S4 z_Jp}Yo7fz=99-tQnwlCq374#zkdMNsgo_ZsixSm|W^`ec=Y5F;@HZ)T2#bQGX>MZ^ z)ZC5NBySZJ28L_Muwo&VEV!42e)7gxBl3#*nrNvXRDD@GQE|m8=aDNa7UGOsSX9&o z;uVa5pklPBS9-c2ajFF;xy5Y11==<8(W2^;6EN&uqobokA}iTVsGD^#_XcukCFDMP zZEs(AB@KRrG=ca@WrOhbK*4SVhO(dqq2*~C8rlT2*72cM2GEej$h0?p{xl}}_hJii zEU?|Nf7h;EWU>ht4!>PCqAq!*A%Mz}68NwftWly>k0Km}eFTDJ0DHifDlFzNeNoev zf^2`laWkAGiIZ^F+K1m+2tJ0-9n5@TgFxs6*M;nKUv<7h7W&M@b->i^4jY-IZ&h4259M?n=7$6Kc+;q5U^rv04o+9vxhuj zLRDyG{T?~_)tmurAu&5O?5jw=T{-!_N7O-lLz znnPjnW0EuqNt0S_gtqe&PeXzB&}HYjaJ=gK_dRCW6TytZAvXmlKb~pmxnC~~BJoU6 z3^miKypodDF#n|%P}3P&e^or!PCQDYr!FZeiFs=-Nl%dA?qg+@SrS}aTm&z59LX1D z*)Vvmu-#iJEnM_i-@pu5f_)^%#q`ru{a?`SA;=YS7ZfJL!{)@5X7X#nxfiNr&z{M& zHBDMeF%195OTgc{=ZD^Y)ha+F7=8IXzQ#BaKAbPlCmlCgPH}gaz(%B$vLib>U`?5N?ntEg~o={w!k*rf&H9Zh+|62OeY}>g!ca>g$XTBOQ{Sl#{cwZ z4q2-px}U<*sbOWO#WxT#Xf-HMXWEVIcbNL+i)O2A7Gh}w$hg;f$5^muQ3LTdwUVJi zVpi(4EDtGic%~<&M<@f$3KHxf-MgdUHmUXz&UQ;IFpU;-7|#PWZna=Aj-5l&ALz2P zCd=gqxT2bi-9BNLf{Csc)iy%r0>|iyCY>^_)(H+I@H`DJTPq%geuN&@8y&w%=$Yf^ zwMuf(Ge8n&o1;UvfxpT5EW4zJl@y_eFH=k3rQ3&3^`jTdhwI2)X1mx|x&j8}+u|+a z_4?i=h#ReQ$U2tEaq`bHM>Y$00SughdTM;#>K8E^+rJ3LAz84r+@kZ_4Ujxuu0 z=~afaB0-DTm`8R>ddmB{T3vgg*SFwdTkC{8Z2bNG+lEI^@Wc@O5w$9q2amw~559r{ zd+JI{a!--ci>JHkhimap}k0S54z)o=MV`O6twvwsAVD zq5cHfMmIe#RJfR9M|@QQzS}T}|6PX`j-X(IllE!qz%fxjXXQDSCyAf-z1pDxWJhj2 zQMa6GAIcir2udo*sMh$*cUs|Y7nd%6`n<8rfoVY^63vM^E3iZkdM$H9;R5U*+%JA> za)gjCb2FOvdjt~hQATBbG6+PG08z4Rdp2vDUTR_$C5k0C48D~zD+(bop(#u1g0ioh z1{c4-y-IuK^HcwRtG`9;c@8lmbx{MCz^!fxvOb>K`v9x94-tDcP@##lT*p2Z!4-o9Jt%<@6Y zjh5Q^D~=-kjzzLAa|0T@gep8)TiVFLU>_sniAf)0u!ty{!y{Q|V44frbV&b+wW)H? z!CV&`$E~Em;GP$jv=mjm%CoZ=|9f5LMQuEE9kpVz9K`}EAn7l>;C~H>Pcc|XUiBR^ zu(38y%~e)b9s!1w)vZ_{)2qY*NE*t19=6c3$dM!}ry zgLLR1|1##ukPJAe2K4Lu#v&#JdY!}rD+V~%FCh}`D;cVBypQpzp%Sh+WhJLsw+&BB z1np6#Fo&CHqaG#2Jq8I*k=V{Q_$GCf?DE|uxt(3bEf@8pcouFGz%*0RO?tDcK!gE2 zSwq|QEvbQ5Gp`RxqgPSs@Crvi5NV}J@@L!`{eveu(*P|??`@mW~v1;5+oE0)^I%UEGTZg*OS_ z6GQ$xw#f(tXPvXNvEOrZa&qj&)&6Q8#>KsEU}RX92Q=UT%~pP6qq+(EW^|-Ul||;K zlf3lr?mWU?l{{bVQughI8dny2D7srIen4!0e}P(n3k@(n`H-ywntgS4e3&0D8R9fE6qH zHsFhQn{F>E0wA&N1*~V{Rsy%Oh^%FP0~erWG*%;y ztOChn$m`EZ3ckH*<8hx*_gC&b@E2=;f-68b$Z6%^t#T06#K)OrPCT)SDbxAs5u%GA zM%t)ny_Ih06EP!dj2t@tXO>;%!=t0r1r%--AzT$MUQrQ1eRwlA`I<^ZD`6W}DfBLg@kj7{BUBadOBXcGI* zU~A-PEpn8~8!b($iuaaX>+t4Gp0TG{e+&(ojfV8iJjOC1{v&k3U!b?#A20Wk5eAL@>zze10gGD_iakCmS|eYYtRR&cv+TXbPgRJ9|^88pJz#D z;!dn}8ZBd?$#y{`#=My%STZ{pD$#ukVsiCc=Pn(818Jr}g{n-!DFhUcxVU(-Ysi?K z3&Wg;^Tx~1p!Og5mZn4%k6ej~Ch7tjwgG3IvYwD&YHgD_BIC@Q17K!X$Sx3NM1^rJc)qwy%>lTOp zL!@(o%~v(`%5`axh>E7~KkxyCe_@=G@3#PA(q|4Kq?~zwX96?^(Dr!J1>1mWAlh+V zIuygIL78vh`!p~r4Kx7&6;cR?$zkMXSzTc~4Et``gPo(ZSH8a9pr)qAV{#UBkpauW z_V#whuL!2>B084f9IygOGsv$&Ke(A$Kn=}T)TX-*sq4H3+77*V9DRU5zT4-tH;%ko{fRNtM*B#o2xu7L(7fH8pH4^TG{8IPB-~%-VtD zb>T1n`6ys&k$E)ucvH*#qw~iX!F;DX%_K9fI!LW(sn<7wd#yA{+ZBwiV|b?nUag?I zhQL@5uEaD2$D=g0U)cr9@FcpGh1@t%3Y^|8L;zhpOzB+dPZjuPj9x6lxIb@&Eecea zq5(G#KLj_O>3wPh;2W5KZKAb<-ou?3L1v~VBRDA5E|=$Nq+gcCA;j=0?2X4% zNXje`u*oSKyF7;G6Do6mg}xPT;56fKMZh$iq-oYIy8ZKv?=xrp-VBb>>2La{P!EC9 zqC5BI<~xUmF_}fzXU!7sF|>+Bp=|}2f~1>g{U}w*VDwc7JXB~-)o6#$UmMU8hck_O zFs-E&MoAE`u)ld{u>3TR{5fwsa}z94#UgLx?gZxbg?)?DsqaIof4e1U!NCj?}>fuMC!ZzCDnHJfb?-MOn?-fo3kcD5Ly0X>%uc0Omi$T97xLPu8Ws98OE ztV=H2C<&i|CHt`jG>1aY6Rw+yiQMt0SV4fI!TFdlox+pr=XTdp({gE{FlS-p!3IaK zG(0LbFe1N&HrUA}Ot`gkCe@m6%IQ4-JsntaMnhKQ4pLd7#`uAj0Gey%;u1oXHZFQs z7*KTzd|l!!f>%yL!RS_}|oapY&EPJt; zeu+-eEcIA~;ai>WPyW^zifxh?k8{#tA8EHw($6pX{6-%H(H#IKZ=Dq4hEBDNcWVYt z841>GdI;V`RpMRg=l+8-pdf%<#Z62smt%D*8r7x8Vew2dcw_ z*Rt<38w4%+DF0m-CPHe?9`=-1po0N+W8V~sWyyi2&z?O?-pZGNne6Z=(Et$pB5Pls zX024b#Qxk;?j<`b<&l=?Cq}ZXGM(r6#pee24qvUO`dtR$PB)I6Yp~q43vgzuq#-`^ zZrNi`oO89`)6$@z&oyujSjkwCGrDfwqGR#}_0;hyttFuF<+(pFta49;Xw18N z@PI|JUIS_-sjL)3Z9MQJgwp^$W`V2t&OCki$*l0!?kF(I>W710f)iPupyi0h_t>Ew z!%`2@&G>VsZCO4wv%5EaOb+xaZ}ux%>{8~@Na#CV`>bPHjq-Em2TzBOMmzX&?@cp%^!G-a&XIazef ze4W<%H=L$cI%+WfS77@&qc^(6Kls$+fT&VC`gLPTW6EFvHt*c&jgEEGORscv zr0>%4EC*!&G%lHw0L`? zgK-J{s|?kj&%yemSiOatx*y~ddi8|QLid!>_K-?O_Nww42qGH2Op%;XZ1TuU$v8L& zyDUto@!W-cOFdPeMk`To$sWzT5-?eeNr1$05Mu^3RJ`V{dQ85!52Q7OP(flYP zj?A@>ecSwJ%+^$Q*=r(b*_r5r(k{5vjJ zKU@ah?iN6g9&}bj<&`M|{!teMtaRXJ;_w+|=Nde=%=xNuG~BM@&4}s)CT3>cwtSD|vMZP;VhKKr5ddz52*MPvBQD*Bz?~wXR z@>wiaGrcjD&vTmJeJE!5kx_U5-{(6rGn^5}jL{ZJ`}bBo_e{!WK^w;6e=r^ny>O46 zho`1*tq*BQ?K?W>V^>gil+?bDjmc?)j~_IMQ;w1N-Iu0y_hy@rZX2aH`a zzT?EAftKuDIQOXpbe_E8T|)x{9RN!_X!Nkpz0umE?L4ua)X)$?KRgpJtxKDYkdFu` zql>^>E?Gy>obCA4_~M0>u(=%3e!GL^ z8|g(^MO4XvG9)+;Q86(4eaqKHy0Yx*3~5cY%|G)@rJT7LEu==-Y6}q9`IEBR>=dS5rW-+p9{!3_T&vvTB#X|75D5PN!Rx>937vb$nQ>3H)bJ(eA zIx9cmBY-tnRMPt+!O`|tbJ*2(Ys3a33H8zmjGmZJz&v9>zc5rHhAiy82)IvGn0cl3 zw@qdcmYMOZO|-#0?OG!Noxbu*kr~RXHfX#%7Ey0nooc(b`A&=1Op0y0=P->}cJkH~ z7OoGUQqs?ldindR4F^{oW&7;HH<$A*)%M1(C>ihRjoK?;N>000zY~xe3`nyuETNYFX1A(6 z^ZxWV6Q`L$#mJ1AZHm6hLqoonO6E=zDwRtLi&g~zU;iFMBS<6Q{{6k4qt=V__KnHo z(8wGIomXU@^9;TP1Gn;l;fXwjC-Mn&UKi8Nk5R2?(x%yEX=xd-5GM*XNC=lUO@Znk zua?(w*#Y!u0}bg9h+uGd;pT{CgN0gXPN0X0`%fM{`hmh(w}AsQk6R_(Y~jnnNGlhFfDhgpmb z-p}!+err=9v8Nc7?yx@r}#jq8f3U zok@QCwn)P)+k?SndKsFYhwUpFMD42E%SGy=TM=tZnOik;UH1IJMx9brd^erDqR=9C zO!O1wbCqZN=OjH*>}+qczmk*q;s$?=i~IHpBi(_AH|PY7_ev|M-(5Qty!;nK;f?d| zIlV@S6PfPmXUaFM=y{n}Y0}l5yi@vYFoJn~^=jS84^$ zi$2?$+`ja&_k~{X{|HKp7#I3Ia>UODtc*&0TY;NxhTqLdj%VnW4_NYnJtK0M0ur}2 z5+5&~yTC{va7+*Ni}OiwWnRyrfINUY8e)>Gs+kd=9D%n(E@fLnu&=daTw$DY>P5Y z&IO|<)`tGimIC`(NCXUCgd8zxsMfbhYJZJjNO*ZfCekbh6BDFh{4jqjJ^i%@<78+6 z2vm!@Lhg*nq|dBSC+bUsz_)1bGetNJb3nJnT{za84~l6R9O@6eb%+8rsjk2D>f*TU^>8>XB&0l49>;Yo*+S>f zqM8h>2n3?v%Tts5X0xQEqy_jL9;)ltugA-Vp^PEWHX!s9E`AH4s}8AxStDI%9rPCG z(Jr82sqajKT%rRF<*fxXacBf2Z&+PO%KdSuG_`lzIahvfWYq11Eqo#GPF0#+pvLePL4uw<8DX`=-s zSwW0yG#1uS;QZ7Zpue~ESKX(T{TE!hxc#kO?$a1aI5g}yw;cL(w)-XQeRKJc_4)e6 z_LQNiONMAL*Vm1uft*av;{6?%Z2-deAi0Ut{5QobqI?$y%?b!iRF{ z+txo+k;^)?^lj^BzMUoYS$`iktvkA<%Lkk;0C?$3E_<+TNlr7Cja;D+KAR=)S8(ZT z$<7?O1tXq?+W@vLFTXCMKz=F4O#~tT5K&WFTV=&jw6RFfj)w{kfEY>Bc-!f@4^Nv| zKjDs(5vFGj8-ejV+|Y+7teq z5X9+c0OH)&e_c#rx&Q13a1CUb($ojhmB4wv`XOS;(mH$Qq-J)Hf}bNbUIFno#^3-B zP&-&YcYI^jlQ9UK4nMyVti5h@9ui$%Vp)JdL{6fWu1F2D#95{DqZM4bf=XBx4-sV0 z!!J{ffnIDMG~-ckVgjow>JEMg6}v{2z@>3JDtEyvbR|<{-x0|OX5H;GvQ7T++I237~g#< zAMb1vsawyA5jCQZv5CXcctmD<=k5cBt;Do5N~Y8dr*<;G7&7_x?obt03OS}4aW_YPOWFvNKTvCkv)M_ln{gs^ zHvlCf2AO}w(Rm97DQNUHp3?xUN-88D9prc!7lSCY-w^^j(0WVu60?K>>7lt7GpGx9 z9uhlCMn1-CP6mW3;y`ZVE1a5oI7xOtI%cGP<oTo2ez8uPD1e+gC@QBO*5boy>@LXmCWri=>^Io(QNtZ}u z!H0FvT;I}^#g^ifNkO~ehc$xark6c_VyPTj zlEWr7v748fx%g*f<)1?)=Uv#J|FN>6M1(yb<}`|MBFd%7o^zNnG%k?#fx)8vuqBZR|agP?0Azk8dVomy4=lsV)$K6#}1Q2nx~gh z(2JER9)Es%U02Z@Z(06-R{POKoHzev>g4ba-%iG?XiHgpe6s0UIKJa!&$*qUB zV!B1aw3-jbG`JwN(6OxTgN#8E3!upS2GP0W^h0^YN)GLu-DtRy3Bel1qZnpGjlB%g zE)?*mnke$1LjO(8Aw~KqxY^d4;q1((8fmYg-~bL{CkMU@S#_6ks>iCZTMRVtkaY{` z2OZ(;Y>%P%6EtWkn zl?uI@uZYYBQS*U_i$D7MuB2M$VqZM*LbbhVvG(t(H z%tWG(?A*}a(SZYo-M|}mpuHO~nO~o5AQ!mR4_NR7!2D)vYTa@A9j7oh1tx@y5#g}e zJpJtw58fN9Pd>qeE!QeMZBB>cGK+>w*0;1}wrMMk2~C z$di&C;x;!e>{TmUPLZ=PsZn<8`KiapQP|W_GZPnLay}(b6i6&+M>a#JAGokvL0+Cr z@e#B6cBC&|K0i)E?j$#W@YMjK}P=P0OLd- z&l0xjt7jq z4sa-1`(@cfeM##h4x$CGQ288v+t;|n%w)i_udMXc_Ey*R|G}!uD2QoV_g)?&Cdd&R<)9sH+{!ALEqOxsxe1(3WR=q+73>=eG)+8op~ zm`mIT>Mj^fIhUXDf^O~LsT!wonaoYiw|8x4&oILU5de?uIq zMjGIFJv0LLin`E#?jXX48L795@X#S^e9j<^K%E6Hhc0^8+DEdSwiJO6^q^Dh`)sM`<_|5a}?2TADB4~uMdTx<`@el^()xVJRBEKL~JG|9B2x1 z1@p8tGyRu2IJ`c!Ff!Q#06mZFDw)csk zRrlQn{S#9c&@sfs2#dXvyjV{2+}(@CF}pLSwl!(@4mG`%n+)l%R?OOjL88uEds0g- zW9k>2=0=Tg1M0qrL}W5tdaIUY46S9=tG&g$EE_|FVM-v4Z0;W(?u|%LDz2?OrBgPy zj6z~zd@GvQ%$@*vFK(l&5Ahxi;%ngU0yNr=gNioqEZs6Rl5vy57(9DnA?PADab?Bl2unLWANP z2Y!)r-Cmqzb|r@9i&ZJeR3`wvfH91O-hkpgo|H>D!R*)O_H&8GxV#(dMKuENTlwUH z5HQqDX=Z+4spB9{w}aFK)EIG)XeT0tX00cG$PYy@QXKDB=KQ*QYUU$?5II)7vG1HG zR_84gWaZIvjt&lS`egyAbsvZm?Px|g(Gh$b+UT<>r>@$%I!aZOfd@QVzadD`r%`q2zh)^Z1gNVRpIUuZOnY;+kJBq5W)iE2VO^ ztoT&T(fyNEk@tSAWXW2Z5csJx?)+JKx3)OFMD*(CbCrdauN&nm)yHT5>OE-bvazud zO|QGVYX7eTPreT?DopshE)+?@js9-1sr~SeW%~~tAcuFx zbE!!IQ*u1u(#XUyv>2=@!WVQxlOyJpT zJ3~f63`nC$`D4P5Ob3!_6o^T=p=t!-`>k`##@f0NdDwvElr=ciB*NAE4 zfB&Vbmh6&~=UQhNT-y>hURm95@N%91+Z}iBwKK2#=G88I+<-#C?T9ZewdNi!CS9iE z2facJ&#bt>qs9HMw#LIPB$Q_DxivI<*aOn{mU-VZi1=B1De2zlJ{g1g$J29q*NhaI z(`4OFv{aSo(ffs2eX5Ma|4q$_^~?#y3?*=i1iw#_m=tjH;e7wJj$_M-U$ln`FcR>6 z|t{)vO!dMAhCdzi%V&ZXrH`mnwg@=;Nf2&JUG)N{GEl=HJYxx9Sw|` ze^ggB^y$O3_3{z?z9=ulSgv21U;K5i%e`%S=vMy&Q!{=Ys;;@@F7;^VY(4YV-j45) zg&`*MU)~t^4O=ZT#LQ2R*0t~BB0toN*Rn>W$2#5H(>v~UfXf(~5O@4? zPHiGxFP%?w18u`D)fYh+nzdM%_!?m}aSFrMZ{NPXXk8=Kt-kt%T7h7r$-4)hU;AFg z`-z92q_@9yo53Yi&rhsgG*d}YsB0U&<3N(Q*%Qf+=opGnx#LQ#zb3~rt+>A}hZ}#}`~KG*{wL{sCk;Js3~2PcpZ;oA5Mn0wp?S&au7O<0 zNZ|^DO6Q0EyZoXJozqOK5_j68yB@h0i<7lE9}ScUSXIfH4iCB-bRVLvF&)@4{OaY7`P)`> z15RO%hOOmdew^!Fr?_a(PIhF7rat%J+K}0vIP%rprhAKj*MZh->m7azJs%$&TH5tX zF26C7dc1GyuW80Ls~M+%PGp8o{#+e7+fm+d|7t|JnZYY(ey-qMzxHPTpr_ARDQ?$D zTXrGU>j>9}XA+BN7>8G1667n7mwfMB>K!9mxw^YbN02Yt`IhXdk%f>!SeL7 z(-)@`-`~2e!8{_vLC0odS990SyjF?6+g*-6Hi1=qxOq74l;Jm>!P(*2Tkc8BJl_(j zXqyYfXbZJBO0z`U?f>%S3z<|EJ^1YB{e2L$b-1iu_465qWQeSmOf|jE=nmJ?KAby{ zT!Hb;6%zApo0Mnwv@R`9q1UznsypX5!WZs1O<^wJW8l`^&f_(rlMQ-o*O^a>+=b>; zYmDR0N{>g>sa6O>W0*E4#!=(f8SFd#$$HCfp@-0}3p3jK0Pm3&#XGtiqlD)MQplU5 z?(Vl?Swab;r{ZED)ZCF!d`k>rm zoq3xFZ8CD#Ma~fW`eol+Y|bs9zDQpF8s*>2PKa@CI{fw{$q@!PH`YGA)-$L`3$- z$#okIH7=oQzv$5Wc>dS*+DB zabi7tsL+v!@dAmCg*5);kyleQ1H;zRN;%Qqb8lEOOOiBB;>?oe?5}h04DDIXke$La zkBiLT5~iH)vM90 zWJO6?$G|)m`%%{2mOsd^tA4O%N?QEj*cSflYgpMYHN~IE32c~sm0g%+(i5|#u1MG; zmxXV4`uMEo-E7lCfdL#}8El7IK7vE244fL7+$rJ;OQC ziO$~u%y9+i+Jj%(TkEtqQSeh=` zD9PSn!Vj*X2wazW#znCl5wUISY8o0!aehjEgjr76Ne1T!+$#A9zhz!5(WSWy-*QLX z8Rp$kZBD+gD|?xp9p&k{s;|G_9%F|ybuo)49334^7L!WoD-gvMuU+E=**R3>YRyfT zHZ=rud_~vW6%M2<%&?nS`H+YjAAv=KB+4sNe|F6@jS=c z3P!7kQ)wtjMk@KLhHK@xn0EUiA2GsMrX79@hFImR8TSt0dRCLW`s~#!0|4BleaAM6 z3~-ZcGYHX?hD_v5Mbs}RB)inqR5GB&Z_$CAvO({Me$L?a8p&(xh2uceiD|y3xe6)l}Lg?$QH(qQ`A)jtmYCM%p%A zZg!sfE*SO6-ND2KT5OZ%bbqu}s+*fD@ziQ{O2dEhgaic@;lczVj!tYeS#6WOIum9h zRx@28EK~~wG-hdWk*|@T>}>Jy-ol4kPc$I7(4y>|%3X5KA1ByiG9_lBLR{?pqEF6g z$M;i%y_1u+E%rToT;{&D*5cujOwBE-2D*eS7U&{%P~~@ac8;ezoA3Q5fCn!z9kVpU zu-Kow=!57j)+P{XSh(bx1Zwmyne52Rl4c2;D~<-IEzNY<3;qqpze zLXUKE>+;el*HS*XX~#VrJe#S4`Sq2Rx2kE`Gc64*E#x+&nmPAth$3Yy1jiaA&eQ(= z-S+R-#YS=111( z$iO8fZKp&oNKnl6 z-aV|?ytqtkNi{I~<#mDn&o%bVDfWC3YM;7Q>MgHkzn%S9kcu#0)7Sd z)B|pX*{@LEVEjMIo;O6(b_br>5hX&y{Q1ZK&H}yD%AS#UhM09+-^(@M+}2jg0WKM* z&HglQ)pARA^M+qL+H{}qsaJg^Lw))WsG@w)U#H2_>I!r9=xW>0;O3g9I+E1)R;y#2Z)B=t?MNckt{lJ6}+1rsX(zWN|bkmr=f=v^1~Rnpyt8 zY(D)@tlm4l#*_9O@3+kltXd}g!^8G0WTf=4X%QuM)uXlwaz5!G`!pNVtxACjyp6_>x?Y=(pw?`tP(aDNsl}l z-fWS;AX;RYbtTVfLRCZ$Q2*h^cRLPVF@Bz&p0_Ty&eo@e!X^f|R5p}z zfrXB0wB3g*oW-h)Da0HMN(9T_a%(K$56jky_icW_r@QJ%1cT1miURpvBEd;DC8pfp zHgn&)y!h&y(O7$mCq>lHo-FID^L)jM9c`5-8%!>8z8{bboc>XBImzJ3+fUs^3^7+O zKHt@2QPoWw=4+o#RgkG^u4G5|_Lj}giqw!N(l-st(`zyFchn)O1t$PTz2JuqOh zIDA{8x~XZWBS4$$UJ;RRSx?6gy!cXX<}wy>YbhINAA!aDlJ)d2LIZz?QZ-`2?Cad* zAfHmmF%z_rFj!SCnxQ?9$JBXaH9fad_Y;W)Hi7Tff%eJ2f3fNQs#&i8tnSVT;<>OF z_yX{U{%PgO|Jsm${Bu+x{>Op87Tw=}%X;1Xy>kEaPWTri(>ihLzhCm#I(_iJe@XTK z@vHdZQ5CVm1iT}TuDrkF%70xdg3Og|POy6a#~-#c!+%da{QdGrCqDf9LIS8M|CcvC z_5a`WA9r;B{{MeZ|6f^Bo;U`izaJg;uqT^jTmg(pe~FyjZeQ+|-bWic~PA3eH-#@X`wVPUdB zge;NfH|>)a3_<5(qW>l#_$WxtDmt9EqPu9-HiLgfCjSM4YO2ghI9OQV5LjesacJoiNYP~tS@tRRPzwG#swC%9h3kBUr>Jff)Ig4>m-|uN6CITS z(9deP3y$IbG0?*+K$SE80p%$@RIw#O+I+-}1d_{IG?EjK0y&0Iw$Md_%REB`CA z{(cLg&6sDd?Yg>!Z%ka*<-6Jn;c1zF2GD%Gg!n^N21+>?z(-C6APrMyp_fwvcS9^QXD0c^r}|7jC<;pv}nRP3za$~~;~>j%zK?s8r1 zam9Q8^J5=vT4gg_1MFUm@%pNap6G8~dil53UNrwe*`4;^N%nVmmSOlK9jves&BFv0 zDa4kUqwKc)W||g8D|Ct8;pe_+m|8*EQ=x77NvJ23b_e(0b=*HbT>pOU{j0UDv&l-% z;9UX!iP^!2sF{D>n$FYruiU>QV>Ej;X6N{alPJbjtopQ?h)>TMKlUo0wg!&CF0k7vuwstuX%jH>9z9zCG?{Nvz19{U#m z8z!fX#9_Zd%6tRl>9uu^1-eP*fT5Pn7A0XuOsBj5m1O-3D^HTgaP;iIQ(UQlal?^+ ze##ahyHz&b@wlzYCXKvsFpS{N7O~BY!W4Em%XQhXe`oBRV_|+;c5Rg8>_uqgUTKMu zEr5lhv-le=|Bp}T^*FZ2$@*7Dk8aZW%D~JZfBz4%EK&N=9RIp`+1yWWYl_@IJP`hO zO8)a7|F`e`zl4STY&)tyoJ0YsKQKcWtXuZ0n~23_m7j3m(_8lc`q0~P*iY8zE0#nP z1%v?Pl{$+zM!Q0^QEJgSMumliaci%1dZ4d=-~Y_bD$%QMBV4wD{{F`nYx(OUKaA}< zaiTMVCGkC-k@TuhB2K#_X~1FcUSlek-*ey>Q}@`G9WPUf%QpG5(ZY4{e#9rc+x|wW z!+o^fVJjMqICe~JJm5E$(&;=V+WTrB^aryJ*oED_D~jO+HoY+9x+!mmQRFcME6mko z{TZxO)YR&ln%0pHP6p_Rk3Jh%(K9xLv3+-$Sa(Fk4Wq~QS7>=FhO-CueHUH0-yY9c zAi2lhPUE0On9(LW7aGk_xi6pezr9*=kxXo?s9bvba9`+vqwu=H;iUN_DGvKarMgszP0!GaSDyF2TqOQqL z)y`hhp5*uu?{Ml3dxGKW>c*V+Ik)ca%G8WlX|MF|tk4J1BVme*EO`!b?mmlp8`Crg z_lwOiNIm^hGWx_t%6Dkb1@U#o2~XbVmP`!s{~Y9LeykMn)Wv*Dh4z()OrwE^_*Y+3 zVp%#D^G!?C&C;+*UW?ujR806LI(i^)(pN5@F78@_dw`9# zi^--wkByJqPY@pKt>x1Kp+ zyQw_v^b5Z(B@W%Pq4|}%MM0Dr#X`;VrIy~GWG=c?|9pNosUgv*r!8ZQX5FX6@SgHZ zTi5;2&b}*Eq5b3g(uTsO&`0V?Q5IVSDsy~~s~HW(x%WnBGo&7}=(x~u(>dC2cGr6j zrn$Y%h8oG$a+=CMm*Ox>f^+;PyDM``GI)wh?c>u)qBEI;b#HY?&H zV__;-{o?xEiTepmd>OVnD~?r9{t3{H7u|TaPHx`oJM&GkCp_@udd88K)pvKL-R9re zb7M-eV|44@@qP)*Cf~O8+{$W^Me%`^W;>sX>y@SR|8gGU{?$D2!*eLCNACK6K>N?U zPfjbj&00h`f`^&fNF^rnE8RWf+WTHNvGR%vy&GgRGeT2+F#)K=D#qio3hJYjG_e z+)HtXV8PujxH|+38g9;a&b>4DpX|x(OlGq8`>w3@tlxU}tiQ2-CltW~d%C_i`0B}G zW2$>vT)OrJ?Tz0~iCrG*0u;Rr?I$UnEpum|RG%{@TA(b=$vNvVsdsDVIVw;ztZn`> z3w1xuXE=xJwfg6v`y&D`hnu}-f!;rL7`_`%B8g3K=-DkZ!8UgCdu(sLR5` z8|EEFM`I+LpNg~t!rr+Gn~P|?DcdAT3WqZh1E}2uB-yO}u5O}hg{E?RTGk9t9q1E1 ztsWci6U3u`@9wz&tZGCp&D`zf;P}Qz&rI@ha=B|>jI_izI0Uht+lvv%9raY=jf+Kv zG~NH!$-b(mY3!ZH=Vqn%i|VqW1AWk{GP!A1l0_6h-*@?@Hy?#(-j<1K*ftvAMzNwd z*#cw2x~TKPoq_YgLgkO$b^7BRyW(hekS*f~T84FpFBHX&cxYrc_^;Zz$d7?*D zURjtg*^6>bC>^aXvw;@yS!*~?LoT=##uF3;U-26<%=D+PQH4jpqVzGTTe|Q9W_jd) za2=9xfU9*^fApW*39&Q`Tbn@b^JiyAsk1({fdhn(7pNVa z6Nk716<%7}+GW7vmqsmyjgq}JpDZlGzKNUmI@V@83Qu&*Zb>=h(iX_4c9Fw&3!PYZ z^Xt&ieCtai_qrw#U;I$=unaECN~+HM?FXNg<=}O^(vlt7H45Ec>{-?V(c4d?6TcI- zqvGz3u>~`btmT_4Teta&V5E?zGJvAx}cBOJ;#y@^rsvW=oW3I&8(f&B8pV zB~qa#173^kC&o8hoR^#kh<5~g&Vl9*t{I)$+7e2}q&d$ZVfxV&ZE0g`rWj}oiUD2X z&NopvH;)!889wsPQ>E`Z^CR2FadFcl@@;4VZ@)TkD_SKY!y1}4#@xTv7LQOvGe6W} znPptIB0hst{rR6%Iy)~@4c~O-hCMGay+3h2(A;oOT$zN3GR4+G8cyjiORDM zmXt3E8zYb$=B@eItsc>0BM6yRW$fS3ft2WPX9S8*WKTb6`RK{hnZ>($B6sNhnEHSS zwkmC2i7@tYr04tcZ;CLOcgkOR5h5j{mHnu_uu$>p&BYU2Pl1&Z@Td)9eNT_QpL!1H zi-n#VKB{DRV(W_5)=Xn-`5_xE8`F=a%JUyG&FJm3QyVjp|PT`Lrv z9A7RH>>>%PiXo?i-c65|I;zpshGkxQ-qePY(1&`=Zz5rB(>+O7$2e!@Zle`Np59D< zwXGN_Y}4LX>~-LW*`LkaI;&dhiEd5}23fQq7_b;^HarvlY7x_{IOM1EF4fr|AY57x zB2{RC8E!kWp{kb;DF12=#ivPLtl0)BOuYeNTtTK{Qs_s@ILJKaBhWAIwJmQJ)b_Z; zTzJ0f%^7%*@A=<(eQG48Gr_Cg=<9b_U0Iet2FFzB=_rmnhM9IqF!Q2HN9pG>CpnN` z`-GFIpd|SVk__PQGW)j>`xjDN@VVQ7>;Gz|8g*TC=RM>yNgVBYlyW5p^Zw+}n-Q({ z@q0xL8@dxs2>f$;D7=#W_9!t++r|31-2tVVMZ{j=|E?#B~S@ zH87l*Ny~F!u{)h7eAt>yV>+PV1W(w_f2VT2dUF1+t}NK^JE+zblDf2@tHez#Z{Qm9 zVdh=oM0<{aaSyqQ`cDCLGopoNxLj+=9lT+Wl{cvF@ytE)x5 zZ7a?UZ5%>|5tS_rFQeDlt{BrWw8RBL$Mk7h@w8dT5wc1fRj9`*XJZ^Ip)CE+WP_jf%UYH+W&(_B)32hOqF=mb)JD7(A>gJ{LLHH~ z>HfRpEKj~9fSHaFdv$!azVvCk!=M%z7k`Lraq!i`wlRHmB#D%7-!G%H9C8s+o#b`> zGs#8hTs zJzXq`?r->GnR`QW>-#_L3UOV=D(EoM}8_ zvs6D(#zyx8D{d$DG7ABO0^j$mV#En$7>1=5rn5$ynYrAzuuPC}T{gD}ial(8AkpJ7 zbFWS~RvzU_;_|cVvG$iz>+w%@9wV#8wNH#28dM}ZKdfVe(zPqkO1se^I5B)0QXb94 z{hiXSnck0<^FG@5tG>T+`8w&;W$XtB6N{b-QK6L}TNesbJxZ8s1o=MWQthucOu)B8 zt`R|#?6j03x&?lqRgE`P30!my0E(fifN zQ-tQ~IbV-cp6Tt9>z>?GgKJg6_mpZd0*D5AB<{H+d^NHMe^yfoGg`U*WF7PJ98%UC zgZvo5N=I533n8&54hq0Mp0XMC-|Sw?Od3#mp8f7MD6&3zJM6F_EJmne$|uE+#ojGm z7&9wpmYm&qxFuQt7p-X_|8zTglzD|qzU+91Uupi#mERfiJtMA$e|_b`i8joMQ}x>8 zL__qto&|f8x2Cf(s3(HlUuCS3h6!4hli`i-NDRK1k-vCIZ2Iv^Qs@X|8r`zm*-E)w zg|>tkXoJcK-Ni-#ETF0aMc5+G=o19DX6VZINgAci2+oM{XV2Mfff}n@`9?zG==j^) z;HoR%_7nGJhW4gx zar#qiQQQ0n_;w3BcZUCBVSq>F5)f3gNVYR}wdLKGJzcYRzgoRK)Z+H)PZRg$A13eV zZ~qDz2~rcPifs=L2+JwH<86COs`^i%MJlIASleNL#-5ql?jVlm?3c|1dOTxj)+MLz zLU)kYV$^Zo^iM=*`@=$FJFA^x;g0C&HAhDmPfV$_ZR{O!N@8gK{b}Bb$nvLq`{wPS z!R;qz&vjuoY5&!+D|Z+h?)a|?caznU(Yt_#b9XmGDx-7r_SU;NMUHl8ea*`Vkj-~( z%zs`PT#nuB7L+psZ-MEG;TAVmmv{Ot_~mX;ADZWEWV&WgS(X%2B{k7o)o>v8@$5AzLr$7B>`Cw`@aqW|_Dl7vb4B_Vg;t6^ zs^-dM^a1ZujYE2m&nC}*KP-~~ll8n_g-#GV`H=~HynXCpu6Y@D+j@h==Mr*%=8)*^ zjo)GWieL1h<=;9?8kU&7i*ray_#2#Armrf~O$=;lKhNzlezT(K&v!L1?H=%sMTI$^ zm6r>!)nfvZmagZcG4ZY*W%$UJp@|}Py{9^c^DpKU!4ltKIyy5+FLv$wj_%otvtUE+w zEB!md#muy=<6P3|LQ7ZVV=3<}M`a5i9f}VIucd$@@Z(b_FSi}f#A%y~Z?YoiiTmkh z8!?q=i0qt~f4j2i)Z71xyOss^??&xdbuusZ*4HAuHtQVcq<}XK8^F=`Wnd&oEB7)% zT8_$m^YqT?uQZj5v1idrO`dujyKs$wjYh#?wzhz8`sb~PY9ek=N)aHyZkFv@99K2{ zYu_`PiwKCTQ=mI;xPP6wkm9bHATb)gL;#>BGB*aZibx*c{8dt{{B(_w^LtD=&Ofh7 zM;v2@id<^2Uu64Z-hI4Cpj_Yj0m!Mw;_miJ5g~kQPM@__b2{~hmt5T2WF7D$)E>N# z5k-Oh+S0OOlgD{!fuaz0OLMx0`!@v9&nGI%5des5YO-CTIL>FmzECCa+pZl1t|~}b zBiJO|JeFLXLwM;LStY^19U*5%-_}50V^TkDa(oKHvwKYPTC4_PK%B7;$B*M@A5rES zU+ua)Y_WEc&ko)qMGG=PYzWog96+UDP6^VRN zC`jFc5PoZ2tG>w$6>}rNT@_QV=hzy&%Xge|S{j+}%pBewVax5kyI-}m-{6rF z0z376cM>ftF$&|*=#@Jo=c&KmGDgjxbYr_WZ@XZw0KKUCw8_MQE=TGd*3J}!8@+sD ziOFx9!@}l0(oO3^-W1mAw&Gl?3bIB$-ScptP&JCCd_{P9AZc{_2EDQBj6V7SE-Lu? zCv}D6{(JiaVHgD^YtoyBsS6j5@u%3+%Z-O`S;m)){*{3QQcHwy?;ybH2J?Nx_lJBH z{p-yc!=yT!Heo=Hyu|rEuatGofNi2H#s5Mee-wC_CLpeJ#}o7hM<(xqO#|JbUscEO zEc`_Nkz#)BvNO6(c(8oW7enZt-G-jBzxvkK#%oKhI1Ai!3Tqmx`aQk8y_m-3d&YNFYs()62{9yCDaoAV)+l-j`x0d z&o`sQn6Hl+Dx=q(DG7Sq2l|pL&;oeEYxgs^&uy<~T8mO@uW*E(p0M}7>5z5_Rz@$g z{Nc&2{!rz%`Y=Ur`>I;Z`x5f}>+S~(lm6L;uMKF{FLQgoi_5mp*a9nKP*$77+Tiy;V zl-0ES-lM;)Mudg#&R#!-Q#c$hu$DL(xjZ75Wu{BZq~Ml-0w23A#8WY$Ahr-_g7Ry1 z=6pf*$HMJ&Lo&r}E>VTj^stN|ihu-S8kc7~|8Uj^L!+SM^Wq+&R3pZZ_F0<^a#C|a z%J8U2|La_g35PRT!))jMInJtnXHDBVCi-g(AbB5=wIk=cgjm{+k`w zUxd{LN?*EvD{>1d?qKb4*hN}zkC8s&|7ds0`7>isW?>fjj}wO`>s!KpAwAnlPOCp6 z#V3p)A`-?pNu*_M@#q{bI6Nor~I`Aiks+hARfn@2Ul5l5`+9g3B7$L@;4#RIY0)c0trOX0FhT!W>`KC9uZQvF& z)z{#XQe#wu@8Io)ttI~Sav3q0j2dYO6;Iqvv2A>7ucA3Sw-1GByt0gB;A)T19>Q;$ zTNK;{*hL<>!UyL@(U->re`^&rquj$uluvM(!>1@m+{G0fTwn1g|Kdr9fVd8P0&#@8 z&H-TO!Ls~JGKaH9N0J5ry}qoFlMtZ&%&XUW%-H8`1^Zvmp#!(id#8RR`pokEaU;9#?9?o!|J+e`pJiw zkVL2{*cDHuEVoPq5Y#976vo^BGKbv!ce2riE~=4e4YxG!X^pO{J1cd2z{=Q}3D!TC ztnX28xJmKLbbE-3dPZVC*~luNSZ^9$yh7fh>yXF&?k=u-$hF^>_M>PmN(Q`~6-`y2 zKNLKEcx7eRBGf?{%h2;ELmgw^-j$_@Amp4KmkM1YCP%sk?Ou8^d+>O+=Z|qaG35drnzb#x3+E^iOUN+vZ z?%jztuBreRq-3T~oPjHB1zvM*9NsZtwm)5*_B^lyFOu<=rN%e*J<>~b6q#yL2uz+T zL`RV_Vt?%ywt=ngO=$nte!s}xt;1MT2@y`VTYe0UCrDL1E8VqT2>hf37pNli+(OQR z$XBjMMZ;PuDxM00ZNX$VOM3?3@RC|5HxeHhz8K)q@Py>|ZV)v;w!vv?yWttw3Ko=F z5IosVXq0q_l{0y)UyD|)%M;CoYks4*{o)9l^G0(Z+JD}6U%E}LTg1_Yc4m$D4=^tV z{B=LuD(U13m-FpUJZ$rX85@kMMsz;1mDKrlZ`MCE4}O|yLqOztb+(FVqN({}xaDrA zDHe@3kh#v`z;O5F;m3%eSj*f9gn!$;y+4{c|FZj|lQQ>M%_3t5Eq1`y?fKsEKc`WQ zRhWoa!+t=r2_iLf2*;Ag8S^hf2g)DLP+VK~{511+irN@*Dogd+ef4j`h!#G-OvCbI zT4(S0cVhw?el73s)A-R5Oke#e1%NeS83=Q`Kk5^d9bE@*nqt$Xb}L+R1H)6?0V0gV z3+-CNbl{o{*tg?ea5F7#ZeUmw?mi}iCGTanN7923X0RnP5+ruK-ey>DP5g)5PYFR5 zX76E#d!Z4~Tz~X?AuYK(b8i4XT}#)I5o<>c>xbMP*B3IUQ3)|s@1s(^_f;M^bzH7_ zBI2tRQIHD7J30bb@1$X;n4hsq2Vc+1COP59$?a&GN9};49MZ5Ys?6!F*iK}!mAfdT zTIzx1T@37zbMA0$*N-fK_LdRP_R*NXbb3TzJ7uAva$$FM@qC?$?FSb|^sSoJ z3>@N?xwc5!9OCb5?K@kpj9uT=EVX_j zeasK_@@Jj!5ZUSFq#;)xA0D&=XqCBS}D)ro+F9hieXGub4dNT?^W^n$BD0UYK@5eZSwE;#-F$Cxs z9?f6u2OqszNwDRQiVm=}b!w#`GD@$+@oiuRoNL3;<`QH_vBS=?)dfdtck~8GNXRf#{`N zsGBZKfP7RYMyF*(J}!;wb@p)KfUii}Ld2Z4`^$^1j5<>~+)LyWv0wO&i&uXHFj+Rv z;Xy}48+3y%&KqXeHon}cRNyjs8igUhQ3XyrwDrgSwRwZ6(>uEu!~0%#$x(Ons1I4| zzuXIHm%V?29|!%N7J*5Uz`_1@oTnX2*QO4~UnyDjeOP;v5ak+b30a@>MBGl6G3F+t zrL5z)F0UB?(FWsEe~NTcg+Ik|8=<4eyxsA=hx<+HYyK=vXKd6CqtoO5sF_EPZFl`w z;T*e0GrBoI0+YiD*!X@|`im#d#CRGl!BcXz$h^`e|nAnF86RXTi5RPFNV8^ zdFqOy+w{b1Z#xMi_4HD5S7!UO2mc!VcjNIm7G5QkYyksxbiK7C7@zY9%0&pbI+~Ka z&B2+aXu{I&%vIG z6`IbaH{n9#&z&sPB_*9Lz?fE4bjO_2USkTk5^af$l;rqhmyM35?RVT+X|lVd3FA+E zpSmSnv$+Nrx<**o0pvxj#(fBu~$#Ggk3cByd>J?ck`L;#BJa zn*UqHy3@1=GADDq$*BV%kF#9z_y`EUA;k^niAw+4HEZ%X7O|gbjAT({TW!{}h+TmT z2VQ?YaaS@Z+w8yoqghMm8BeHP4uN&hlG#|)B#O2M)b7JYwK3B8SjQRzUFtQ`T3n$X zIiM2Z81LNWw3U|N{YZZPsm4fjE$cFn_iZIQ>S>eh;-vk6reaBF6(eV_tzaM2yE^lQ zvd)c?he=YLLKlgJ82lf{haaz-=EqI$eD-^sT9jm0oCejJ|RaI9W89hhBXo zrHBLLg%JK$UYpx&v#RI_7bWkan^6SIIc@q7Qc>%<%|bClY^6&=i0}{ zsF?i2AtcHPVp4$;@V#SujEHcC<1LT+P2<>q0FM)UOr2BTSA%oCd0v@t33T-K=5Qs1 zRE=~u$({<Mnt!XnymC|k~v4kG#8@kxWD$Y=lOz>SFBhn-uJ&(!%W?;F70o- z)AF7Mb_q_p{&YTC_xR|OcWpL5kmO#$^feoiiM(jsVWn3!Z~{Uz{q04`yQ*hK(dW7S z7v(qA69Tc$iLf(J^~+;Zl9@S)s~LYtiRtdTMTD6r3C%yX%kY+P3ApV}GN*B}x;R7H zJ^7oLG3)phfWQAIja=D!|9t;DgKk#|^Yl<0>GKDB%>&Mrq3pXseUh`mH?O^&>Kmt8 zF}q~O1$151;|9NsUcR5ZRK9a}ZEjn;XckT!+n%8k(Ja7{lvr-OFV+4gaY#;!r|hyd zI^0z^-M)b^vlaw(;@j;wFx6rbs4Le1=DlB!`hgc0rT@)ND{JD1g5cSQclmZ@A4rGa zVDqW4ua0>Y@^BCxR{0+jElx>D5?SidFbweb5@-yyv}HfEISaPDQ6AU`j{vyT6TP1Q zy(PHL&iw{nWnHEoGrDJVlu`N{S9S1hg)pVekc~M_k(`yWhsJpi~9S_Cfr8n%-nxg>kSr_%Y$^!MFlI zHdR&|pR{UgtL#uhxh~I!55M-DpqQ&CW=^{kmv_^6(p<={YS^Sg*iE$O!u4@uXC|FB zmjL5el0LWTo_hYIkvyBiyIio_7Pw*}m4qddbZ}r8(Rp^&=}y_~=rNr{(e&wP?Tcv(2MqH7HBA)HQUUUpb-dwsxz<6UeM&7azpmEpT zrs%_K4V`E{v$T`Z@TrPuR}aplr=Re)1iu1EGgdO^z0fth5E5K{d#B0W7w8OcB7hO) z`rXd+5-5%ez@BY6d#OOYHVjj_F4;rVU5KJy@b}t>-1B{Dfh^ZBPpzymJM7dlU!sS) zE2kx#A@-SYCxJg%C3|}zb=!rS)%!m={qr3Ic{4J+7S;q0@&|siggYl8&-C{qc{jp1 z`u^6@e^D?NnfqOoyterP>O-#_9u?__vFJ3$40`qYCqa^{8!N4XPrXZKSsB}-X}N7G z*|-u(GFl2X^)?Ub#O{pFiZC1J=habWvecA~8|fgY|L7|eZqn2j2D0lqmp*-rnc()P z|vP}`WZz;otB0?I#Xd(o|)a;iklX!a{vt47KoAV-j@H*Uq~D%FDBX;O(BEptwT)qMX@2s^D&T z$>_R4v|a3=F7RVw?rWXsifN*s%+^F~af=?(L#Ie?*o9>Co!wc`n`=2Y>g*1P!|M z7%2@TbOt;uZ-tDnh0&+bq79y`1gxN_6Qc9pmA3MtdUM;cj71uI2{crV?dOLWyhSAj zAn)8*KBgn$1-1_be|(Y04zXA!TT`%EGQV-|Z?ZdDohON$aF2I2WX*2D_sIE^XE&lB zkjKxN7{+oPOfd_Z3W$C+){jO{tdF|5l-H$fYtgu^BL8&m?l-#`>@CU-7AxWEagj6? z6XH0QI=K4sSzHUtIh#>Ak(@Kc-xt3Rmz>`odPBulQ`v{x!DACRXMqFy+B2nO8 zH!7LCs@@mJl{1lPEF*CX>&jQo1YTXfJ#we1q75To@&urF4O*zjz!prkvoAWP#&GCT zVqa7eWuFMO9YyZwI{J|LncrDle-j*w0Hv$+Wi;9y&#@M1R-3h^ZTp7G8@qK>a(ZIE zU7bbsgxcA7(Xvez?bKwn)T6T(}Gj5#S)DQNT``_6b9%mI4`;y`7={pe{MX3o5Q&qEyli= z2-SRqb#+w3`yvA3%NKfjzo5W(a1vlGmDKW9Htf=)AmL#kDp8*%`ruSI$P=4;`c7s6 z`#id`qo#m$ArF{|6*UBPhwm+H`-PW4O$%Q%Pxo2eF6Hj$KM-3;`1z5YT_Gf9E;*K` z)7Zdrk%%$`<3y2nEfgyNbAe`C`U<|E>V=PKvsFs#@{(oPKYbYPk6X{8qskbgvT~8x zYJBs+&{FYnv(n?Z+3ASSqYYyi?KllkHksZWqUzv_c8>;{4CP|7y&y0=zy9+8BUO7` zJJT3)q;vD}x!nys-$4Vm&a>~3v-^IktQ2JVW+L*~-rE&U9vCZO03h~dMO%FkayL?( zPihTr$7DWiKG#w9b>=?)%=L$o`Qziof=1g~brx4iQhk!|il6t9ruCbzC%PNc2vAkb zk$I%TE6~a_epq}j1to1u(X9XLTPA{PFDWkA>836CPKNrAy^1Ka-1-fb)Fdj)rc))f zrX6+mS9GuO^h;;)SJ18cn%p3>qw;>YC(0o;E;KmB9rt5e;%gp(kDz@7knFW+THiZy zi?Ix;Q#jwzu`V?}82ei$cYfwL+72}vf1#&@u@(`o-JRO_s#(rxPCKqQk#veZQJR9O zs{0fm9(+-WCI!d5)^p?YUjS>J0ux?^BC~A>xzU=;d@o|;6sDV-3r5U^DtcX3ojLu4 zgd4>0|8`t>rm*sR?^zIEy>@_(BK)sh&<#qA-BIwfqu%STmuHT4%3JSSAl~y;sUB*s zTgJ0(?y*G5%zTOi;}m#(v5_XHr{p=rhBXp+!_6B$!SNgu!(I6&zcfnmH=_U@=>%>J z91V(d*Q2=Qw$Rj6hDrC$7IE(A)jnTRHRx6t3x5YrMfw9$YY*Kl?DOpWLk==#Fg&E$mT z^ZG?yo*XQ5QfyQWqcu{5$csDalghro7}?NB9Qlv{y?8F<9Im?i-+MZ5#d{#ZC%1OG z7Pm~NZZF;uvU%)p4A&d-P(bB^-Icp|5pzR-^4*?XuAtF}1*;s9#z^4#z9*+B3T=8C zn&;R8y7z!&(g5rqeFjcjQKaG({ohm{l{u*YZZG#D*b(|7Mpr|1%Bf*R^@YI(Yymd) zA#bc>;W+Qo+xA{2)##uRUFR$)P0C9?*@@P(OyZPF%`Rw z&La4(`H>^0(&>CwPV8ULomrE)$-2A{-Dci9DwLvXxQJyehI8MYec}8^MO{bws)UHe z-esLt2m^1djZlIrNv0Vi8j%sXZBAQRCT|BEoPLZ505C{-#1{wlXp;Z3O#GE+7HYi1 zfr29hio2qfO)^uofuA^xH)KWUiGnj^nw!}(XgN;FoT3A3zzZ8(t`8Z<30XEX7PADT zu}_>Sok!T%#=IhqOx!k}e494`$th!5s69()q-SZVU!hQiOx8=vIXjzTt5$rA0xLtM z3S7bESldN11-z`4TlbzN48W4yz-wsdy4+f^NY0EdhJHzG69;1M%?}bUoOkcE{cu;w zT6L=pNOi@Ru41^w62kECr{R)$$L2+Fm<#Rxm1AKju#;?9-&X3RZYQ#VQ=$`J-*F zCya-FadZxQtN#?!K-%p#SAEB$y8m7gQA_VWOZ;)az2-fUJA7{mL7cxiZHa|evD`_v z&&}P$--_5+qwr@rPsA!K8ZSB`JbE0TvTmWTEsfA!ao6RkkGCIrSFE^w>STU{9no}W zPKmWhnh7uIQ2WhuLJJU8NE7O%COd2ChTcgLthCr6WPD<}aCP^l-*8asK@3S@RX$;t zPmdw{$^X3<8j_k)7^)EORmFENA?hWZ#4KK#(f3D9C-2328~2L)7Uv1CYrNFNJqP8u zRMR$_zdB2Rj-#vOLAGz!20ILz^fe%iHgr^x$ciyMOCt|6k7w@tfLY0Q`jqE_jhmif z&*W>{Ct|*6@?kgOd-yxpTDxP|0;;4pQWdR9 z$58j_iD2QU#r9u46&Ua*dpYQ6Nd#|B-i%EYnaGL20Noz@3JhPOs)C4}afS9yE3-nz zn;VPz@Yb}IK(`3p2l{gqAFKanY+0A%gx{ra$&ZFytCG`hlf%KPct<}D9<=noSkt@~ z*kAb;LaE`@jv{^E1Z~Qet%3r>-A>VzRk>Gr#FtGq?lXMw=LWqm=#m z%cXKc_3mJ-30loExfTM25Om3RCqE~3=dMSm%jebnTGf;u3f?r-kc5M|ekNp}CWXJ7 z#}^Mv)^c(U4m!kp+e-;M%(^8U-c|0OweZx328j5T`{AK|{q+SStbyYu^5TVT{_}kt z!PL7quRbhWMJc(~W(KVcX74l6yH}<@*xEvB#}dZ{l5j`5x_bm`qMhgi;@Zw{`kJKu z>ksp%E1J#Mh+r>bm^fpy`-9IP4#h2`k@cKWKxqg(Dxq<8Su0UEusi1IC z$uSqxP2d9^&!x$;t8rExr&`f&EBhw`%u3#%18mw1L=>{$h9J%RPjZ02X^`3i8P6V> zGkZue%ro+=%+|h1jZox@e3=Peplyth@n9e`_7EYuzUE)q+O0VaXb30^GpxX+5gB-j zo{&VqlEL7cKx+KU*&qQ$iUMqH33su#n+L6(r#?@)9b%`98ky}PhTJhPsr)x_MNDOr zN--pWz=o{ZcFUFGgOTO*dS-&1Z3$?Bpn5Q*qB#c^q8=dY!)27Qezcm z6%{+xpi@NT^&2W)_Z`ye@`F^vT37-9BhhbR0B`@P`@4%k^Pq~4WY^rHyuHh+S?-Sf z82fX>ae@(ScS6>GdO*E5UzSxBTTe?bcy?DGjTVwoNsuXr0^b9*eQu1(8%c$#ddRhY ziX=Q?*^cah8A!>oiXnAIF z0A86=f3moAC-)E|!t_EgxFFVtK;35bcYvg)FV{h4<}gy}zU$%Fr}(V_H* zmNSlGpcd13H)AfPd1V98a${WCXi@A#)bZTzp6);~3myyh!<{v8>*ziW1+UIeTblbn z2N}a7K~|1}z%imiZ-WN1d=hWskcU?NNFq-=Dqb8tW(p&HCIS-jq&SROj zhT>VMCV&28AT@f^Kqfmg&`-JKs#8vzEXO+Fx{3)23#O zDkM08lgFx+?fYHjkj!4Pdb)cNC2$!>UR`Fm{ubfJJ$ZvC79aXMCj;#4XezxhBrzWv z`u#3iY+hnX&Dp`FQ5m@AC*!xaY~f=|{Wwa|=^23be&w*lPY&&@PB*y{T~dqJ^f*P$ zA>UJTkeCqbBN}sy5KagYe!gj$Ky{tFGC#+PtUfAd<=NK;o& ziGpZ~Si!C=kL7DDn}ODgN~VU3r|6tSwX%Pj08$5+K&JK?2gUY=|Hqt)Q4F&2UPbSe z75P0PE*x9U-snEe1TV3WmqW=^DB1iZgABEnh#^146TRLc_^Mttva)lNgG% zc^y>>;9~g@C;!q!OgH3{&0?-4mE zGOhHcjkJo6^k$+%fNqZvqMy|nL*DXYrAJllQNMWj$)~*2V~x4D@qwpzy`F&k6QJNH zv-G2WX1?qa+!s`9KgLiIqGbnz18Lt$?0sq5f4*rtrPud)hK>LqKOAX+esn(ttD9x& zD*2tLKE1@p$B5^Cr?(jUgx#)1%@gBP)LcKIvkEdr`Cv@5<~HjajSD-s~wi zoW35l26^!Cm%o8m$*KUz}Xg*)~o!-#*rj@=#*qa0@6Sj{=z8m4#8tki6rxgDiU*SW-x?+0LG zsl4%$?|iW6M`|ZDd2pqiHj5@$Cf%8ve%UVm*~LGpGuze)!_9=*bW~3&kCcmSS`V)a z3HvE|2Ea`e$cPH6_8J`#{0NjBHyi{Vv(IUmW&)3IdF?!nlrIsP#$rpn1f;10$$9YN z+~X7Q{i0dR$cY*~kE+&%yk7Gmos=14CX5}eSbVJG)?{CO3Pe)W;@z$C1c4h#4;MTB%|kpu~5(Mg&3Qw_&ot&q?Dn$ zi^ZNm@jheD!N~dWbj=;Mrfr~D)?cc03w6Tz)OdW&k_sWg^XpgzzIxu}CJbU|*Yqi^C%r|X0ef4Ba({xtkK>108;Lo8+Z%|*Fq}w%_p2y-f1()`#wf(4? zOrb)#0X6tK4Hpmkyi{l9xwmZM$t;xWH|BIUajNvnF+L_+YL`cy>dVdQj~XGhZU_V&xXc=BH(IMwkhd$|+2lTb+kUJabZBPK#SKW1X0Ez* zSqZDzD5AP)YIFF!p7~oMWnWrm<)_TPtOd4-hliQ;`$KVUTMky)b_@qWGJf;#)!mEz z6l%(+fF<$xu+V!G(|+)zvMdUgIcw2G!^NB!@!Hk?0z=Mbuc6NTVHbz#dln5xv8No} z*G|xETPND8m3@+GO~`{f<@HTFLtO3S#1`QG?5=}?#kuMI8a82QI7k6+LSy0fv)xHM z+sh#gUHfoWm6f34Z|~}EL?9f$s;chd`P$^gCqohoDx%0j8!$xCO#L}EYe!pB=cb>! zpN3VLO)9Dn5j#+3kRsbwuxx9I6wqD0m}2s_)K!lb8fn%SgfHEwt+@|Gx{7l#Lt0G0 zb+*T8H@Ts8ZOFJ&lIaRBeBW@;k&*4D{phRkd!+nN=)OfoPlAc~oEGi^egA0_vkbY0 zQPenj-Z<*`Azr}r>_{F_WwMXVM-MKHcidkFLq?z%NBU#uNc_=P{symZA$D9Em4ctL z5{9~X$AbXAHmkMR%KSlB9p+lO*vw*g6iiIm=0uN+-&><*Iv$m6fU^Ej?C3Vy&jFxj zvpL2U_F+6XE*t*13Zh8NyQd-*Vm@Km+DzE_#xgzj_PFZ+O$^)J$hqkL@RYyF&c)pe zx7i(KTD1WsM%~3hPZG+>jUqDTO?UBSO*GI6nKHxt3l8tw_88`z-F~~!uTx&e>W>$&P2OzAVsMj!ym=yz22XgtDlGN7Fn{iFK1W-OjUJ^?>t}+Gpmg1( zdvgu8wWTZ&wxZbd8V4EcBv)OjowF2lSq@m8e6Cw|P-FwOjmMqa{p@KE!w;)q+cTUN zG#)ck{%a@}DPI=`E%^C7s|SW4Ld|nBsTI7-QKxEev@|`tR>zT@WnjVi!ilb+i=DCTo}@XJ5_E^1!cgtygfCe33D`{GQ`cfs*TyWo zC*z32?KgLP>IvrKq6mfiKWhzA&<7$Kdu1dtc+`1$I&?>}n4OWvCD)#Y@659g%({Jy zyH>lGP*R4_%d#jmqqs@W9)~fPhBiVVd}_kofn#P*4JjT1?NbV>KTZAR!4n3LLNx6{ zR~6%2%$P2g>NdmzJ7Zd1M>1kDde}4Lfxh z`$lVXb7UY5yWY!09>uRY@e<2SvtYM{R z+qC>>zb}W&Z=1GwrZocp+^oGAbcgh)bOvi)=WLa4aT+%Kba>&%35h4Y2EknD3~lqF zpzo|&HVM#*tL-)+dbxm}t`uEPv6TDTLc!&u1K-TE(I?JJ8E`d>x^}`3Z{x}kOQ){< z0+?Rd@ug0!`dZ0H@9MFmCnX_xA2!Eb#Y9hkNNLsW`f?26bzpc}1HNR*-d|PK35!{V z$X96qcJCT1%p?kZ(E9z709n?5JXVr#Cj4w!{G3SzTc7YvJ@o(f8IK}3nMru8H}t1s z!ZGIB7ogk^m*!5pT?g$#U&~5Tmc~K%s;il1e(cP9e$qiNZOUs%I|H@9K$Vz&m^aNP z6j>5dTR8Llj=2S@CN!bcO6#-+9nw?2Q$Fu0R&z6=_(AI90Z03sljoGj@potKEKmCU zxY=pR!g&==_%X;R1pK{jlv3Q6&yF9+W>Pw{`iuISxhiTncdm>x-nYyS&neGo#YI!~|X_RO(Zq!F@*e}=T5yuZ4B&|3_}yPI7d2I00NGWC;PJRnvtHiMtJ9elao_k~~b z7>IM6@wRtCnpf>TInqEp+*!~UXumFcb^UyP(?;YC*1G!Myz`znHOD+#kjnNG<;cI5 zU|d9Y*`DuC*MOO-1%gZ3(kN{ru6Q}RlH3&oq7$;un81-P2XPzy5NmlC3q*8J6<$>V ziPZFe!n+C8m1^Qh&Z4n@8gteH2P#=lwD=Nv(Om2^BPPg?rTf zKXa^u?m?gW2smkEdSZJqxHo3QE2<8+le^mb_z_JF1_p~DlgfWd!`R^=n-D7~i8&ZXP_Qs%-A3CRg5zk^IR>*#IVy~}0 zDJx!N8j1dRDJVDK|Mj%2z)RYPx$fk+vbZh~`@e+uzr)VEsg6w9^XIn#&aC%Xawy59 z$ygQ_Q^l9snjqRUfhINBj< zvj6t&zfN!%Qa!*y8w#`K#kK?1=+fhEpNCI+~mSAIMb zil#UZxjG1tH}qS_BZzghJb#AO&zb3 zD9R1{-+->s&o<$1bHk`{Db+?LhJqKb)XJD6{b3jRONxl|m2qJhwCDjOvjOu)meEr-I{=V2yPh+!VO{@t zeV1gI(==H=YgJF|$r7gRcU?pkJAI@Q9jFA+@jl)k}cGERGa)gK(*`eEn4)W@c-th_GKkpaR%&6gcb*=wd zp+t~zFSGrZHkq)?fl_#~aw_-%ERL&;O@a?;|@Ex>&D$(?rI=hZq=PX8VZ=1&Geep-K*F7e$;;iITqPV za0IbsyJ7xAYmyH&a7xy1qAx-za|3Ok8SHn>D=x&hYENf59R4?zBV-3S#_R2gaUKrI zw%^dM|JJRvpjRmP*+@FQ6DtmAN;!^+N9s%bO=u^k{jlvNX`~%XP4QbWrYI|pGiTTA z2--|K`?w+)5XaO9q?Y*|5WJ4f5a?V!!shUTM}B2rz*c08NSq;W`=tHzDg5WvYz0QJ zV~ozElYhuAQV8_1J1qqqADe;P7Wv@>y{uo8GKcm9U39+Wjz!rW>>vI@qO_D1rT0}E zZUdtJo!?_bAlUn z4zO(X&IpVk$!CrD2Ypu3xgmi(D~XhWUWdJB4dQ*|plZOAE)8fvU@o`$v)>ISO@*Uo zSSLTm>ccS8^fw+b{acOaZ-PVKm)*F!-{RwY0RA|d%5luT{NLmcZ%l3ga1TDKQHXn$ z!~mdEh3_AYx?f@W-`5(mEPNFj;*F9JUo_ z|I+$L{(qOQ{6F05uP*Zcj~?d#?Tg0vZhd!R0HYfu=lJxR%VOfCSqR4Kmd2z^FIDJW$y zn6SbI2Ao?AENy*ma;&)=(daD?TlhkQju)TPm0w67*I2%f1+Bn3FY$?E!*j(oZGk(J znNFjp=k77y2tu4-o|`!v2+a@OT97H1Z2<^TB)D=r(x|Ux%YKdtkpLYFZSSjbrjStV z4Ik}(0W_-8Q^LnFo>EK9DcroRoiqZD%-id7zITKMIetTi8wm|mF4v;76SVDo@-Z)TZDb zW-8BBe)QBRkaw`|<{1_l_S_7FYv^$hX%g_i&t2?>*=H(ku4cI9mk?2^A`pLt_>xo- zWLBRYkQ_av1o&H{6@)S7WwEJ^jR|X{F8Lzhj;bmv_`Yv*_ms8+a4=$^i>=KC@eLhl zw5_BR7fEt*N2Pai)z^g<4ZNhU1Q9C6`AexTuc@7R?c8Y#%&jEr_3#j{3pKafo%Lx1 zKZ}u(d0(E0KPBDO65mlP*s_MZuy*=twV*r&YcQF@`V9H<{nnOetJ@<95q) z{WdVu3UUGVUIY?K23xd2LZK6LmG)nN;P8lxQtJAglwL}7kPu-4$^M54hEh5BJ`0al zM)RNM83za}x{zn}rThFiZ~yR=qoE!cI|Jxk)tnUEZ_q}4W!g=xOLevx?v2O>{~jS8 zimXEkr$3B$?;!kO7fK4|u(t?^jzvauhFV{>F6}bb0bJlZ`uQ~I@y8#0j)v)Hc3&HS zcN?z8e34GqA*($`eGw086&W6*^(r0B9i#D??M)r(x^l4y#=W{m)bKmEf4LsP%mkIXzFDiz zXw_pLbTymmes4GMZiJV!Yb0_|?Nke^@-Q^?$8|;@lCBmeM$8KVnmd*sPlr3E_P^Yr z-!$1lzcfwEcUj9OKm;>UIB2y?I@bcM+jnoy{m!KLfeM&p7EbEM1sT1{KaS}UK$ zb}Q@Q05-g58XoOwReH*q=C*DMK=0EK$jJJgG%rD_B0Hy)XUhIyH&bJQM|8iyW~!8e z)ZhM7E=32A4~l4^eJj|8mZnKH5EJ5P>2|#2n(F=R{(XehylbEQX! z(4#-Vj3@&=dYtXQhx~j=>a0jj_dLYCYk0I+x!4xXY58(mjZ}yVy{^!z&m&eD{_*{; zsc**GBJx%$Pm)P4|Gcu1e{MBM4JYg1fuC6hu@h7IDTB>kN=o9g(?IFjq0bYDG- zL(r8G6Jy1}ajftBay>-Vu}`pvy}SnTs!J>Y%&x86dO&eRJ``JH=Z(_vYH)WhUtkl5a0YSr)BEv?UkMKRvN?Om>_MoXMy%> z0NgWlx-n>Odt_WzE0XB_5rZdYt9AY;ueCVU37D)n{0^8>7Ke#$bLk`!S;6po?J9ZS zRdjIk`8u*%Ow{PN!{W5W=ioN-4&$d^g_Wvbq6Os=n{<({6z=xkeG!MkZ@`X-YMqf> ziR+pLE`|;)ZF$G^{f9eK@k7&Krb6qwkG?M5z!cZwZ_V||VY{2~mb z^2K#0s&^+*8@}`ID)HeE6b+Dn5yv@A-+p%-@`%RI>jqO&8gA=fOiCTn4IPlKDdzuy zItK|+kkt)PNQIiIHbnz;;x}iCl^DG*b*PP+0?nwWmzBPb|4|K{lfY~$j` z1jBCq!fYmti$>W`A=Dm;bcH)M$C`9>>>r4b0T1LlMZS&9aBuAd#AhyEG}2NFT_`!) zzCA42b~RqH+x%4ZD^ySQ2-tAP*CXKkh?T36c<%qsl{#Wb-{5>#>1U{`%%rVZFxt_A zR%#G03;CerVWdv-E|(Aj14kH=oVC=7j>emZy5g>`7$i3G5E*LmEt-;I5arubaZEW<}11`Zwtr$j8S!oB<>&K2gHp9|n; zng&1pa)fP7i`kOn!lej3>u+FArzy-VgsTHz<`P>XYZ0ujZiYd?Im5jT0`FKzan zg(}%cr%JtN-_tHgPDTkb=EJh+tjozU%y{bhAf*~CePTumvd_tl4=lSrbtIovjO&Jd z7gMs-*XIz+Hyv&(PW9e`uen}!8F!{WWbGts#?(lbzNaZSt}poV&Ep&*A{DQ_(V3a+?7Lrd<sF~2pFxNk;on~xA>2YzUq$DIz&7|mL;Y) zUTCwmnL&YKqVL+J;q@Xo;$6t?ppfDlJv!Ug_T4Rm43=(7Ii9XLT(p zW+Z&n_uN@b zP?Q(ze>T0sx1!nlim#)QB`GX7)XoM9E9(BN%utrIA4X&qGwiy_vAVO-aAIvR$Pi7h zDK^Pr+LPzX7-Fj%$TScnto=#L2y)V;NUSx>ncMgajq*V&8jqYw7%J zG1AXm>#OPedE1X3&Ewff0cuU7iOEdD7xHvuqcx}lI%THseT-UIwTDj1^7!daM}CAC z?yyr!0p{`ZIs(jF{1JtfHEscPa;TD3(|8dig)`LGZd0vaoeN3a&vejcNDtV@CL?2A z@z#*v>F%Bo+kJJn5Qv>-rc>ZpIv=;pf&++_FfjIb%Ly(W2o$)zad1@^JcJ|9Fz?Sk zr8;Rsg`TVB=;Ec)ausY(?TyWp!gSVY788-;%jNPo!yOv~Y4Hu<>#%aMW%yQjq!VR|GB9Fo2!O z2hx^74^iE@2bw=d52CrtSA5|XEO<#@I{8bHyv=(F28ExhJ}}ldr#4y#t@HyDEdvgx zX*X_>nXmk7h?_DgeU#K%Q&P?D_=m;XBg`^9U^$qm=H(dZla+`O>S20^G?JIqQZ~|I&$l+o!lDthO!Ma?9P+Q`l@UevON_HIT*9tjMoWE-QDUYl<-4g z?c^D=_HRD!Zi!vRzu^$lOhp&|{s!-zw~oLCnnz#Cevg1R>XIrg#Cc={SS_I3Oa=K$ zm(*rioe(W#k5|rG&9FLB1BKF;z<3jEbvz2F8*;@nYD@X}2x`{#WaC5v`k0*^fk@A4 znKQr44R47H;sZa;1YDCZ*a~q)A|Oo^^B?}Qh?n0p76Ug?FEh>-iwpm5x6vC z4v8Ltt87#YJtcj=?euZ9ups7Zn1_9HioK_OZt?N>#%{b3f_j}CG>tm4R^IdMn$Pmu z>)Y9(Qw8l?@rt&sajJm4ycT4EfUgt|Q>4ty~13p6-6(b-L=l8)Z;X&BXTDcUb)x#uT%Jz;HIP|r}KWFi$g#c7s)Dewu;EsDQ9GewP@3%8g#~YV^M7QMeooPXHDuf z@RIN_pSv;5<#q<0RF*xaH1TeDx~Ie@uO~L=*Rh-xELyjR%y~of?gj&K^$kk>fu|mF z(Mq<^!;bv@CKqTG;FRrO7y_(lD1ealthfA(dID(AfRz2}YHQ=E(9@aF0wCtD<(ym&@Ic z*9+eVkq<*71auII>97su2x9ohKpSalFEKO}@#GOK-0-FcLD?c0z=V&Lx~BGBp=&i_ z5*Eq|%9J8MrT-s64Tb;@EikauX~zXp<${s&zmKCgu+TpwZ!|ytB(!8=3ga%L4mV1R ztc4gSh;Z{Zv}vR)wXtt`_og{Pw5-Z+VE>cZ{V>C%6(E+*tUPqu&x zR|bgB;T>^fW5jT$gs;VBx$y|_pn=nLRPIs=)9^v63r~)=`(^urcRzE__(H#6^$_xA zZs961w|@yl|6=W|tOa@J8oIN{i-v)*6DNGYx=Rio2gILEE4Bl_&;QdENYuI1y37I; zX08J@iCA^hkN7p^ZV=-sIi|sENuF1Re4G_w$=l3nfIw_=@D7oZAJtr8YfEzSL1^`S z@>)r(j1Syxw2qCy5+OPQtBFUR`L$Nh8~&FB>k11;izw!!?;5g)-o4sMRbth(4qr_YL6)Zu<&^n&}IJ#Sy~CyA5A&(Gz9 zB9?x_(P+Q3Z{)`OL4=*HBK%33TJv|3SYMx~3W2F2-tw|3-iRI9)V@?`ia_ApL&etMV<#ws<}{iVaK>^zP+#j!kr zUBBrPBQVFp1EGbUrPa0HNMv8g^2__1WxfWTtIzk8@}4~*0qRik5kNlq-b?wi9#Agr zls-h)O&~lX4f>Cs^?zP{q3l&EogcLfI?a`)*WU4M=aGDB(p~=+FUU&rcrt66Ms$>k zt}B+2!I?GxCIOf~KG8f*_>2z|v+b#AFDT&A{dTj|T;D8uKNE%0tR*6H-aSIQK#b|Q z{^_;m0cSIqYNM z-pR;dCx*bNg>9zECLot3B&s=1M&|zkeR%H36szLpD_f?TYQ$6cATs(PG16$6d z^kLVS+GXxOpj0a)6l`@S#$OHO$vjL&gX}qw%7)(xczmH+$zH1}Ui%^gtld!PmU*W5 zOD%WE*V34eY~*!TYP;kq?eKWr8=WFjQf$K*=YwqHXqxJuJ@Th&@7KkKoy*wFqi&J; z8BX(uN6P=43Nm(vaC~`^W&QqIc#qUjJ#0TIZLuADI>E)NXD3LDJMBfj8VU=XE`Buu zN|B2dp>v*I*5MyZ>eZ`sJ$UY2_P%1>i+vMIzo~=8%^IoD$;0&Qcum{K zbUMbWP_1pJ~CQFGUi?Wphf&h2{M}^ zh2ev6d_CyG!rUBP61_2g9l*4wZZEVTVJ+!W9^_p;1I!^Ch?eJvm1UzT8uHfqx07;1 zLYxY{sjQ!Hy;)%9PeQBXDpS2lDyu^$En|_TF@C;viHZ@Ct2K8)qi~wR+u;WyBdQuY zI>dCO2jf&ODfsVRYuDa2jdebcr73fbas`_g~+znwAzDT7Mai z_tvjHbZe%>z7lyUbVq;bC0bQw$NbB%Zd!qOn9OJ}o1a(NAy}Npy;4+ua>6RaSM=j% zQsq|bAu~8TEu2T*>Dn= zS5C)@)Yt})o?{h!K`(B3feY+7O4&7l%`Lg39}jRJpIMRS=7sJ!Q2D72h@hKc+e8ja zoL*wrRQFmZQyCt4T`ZoC+G)iE{>1Ms?QUk^(cWaTjroTXk1|7!iseWWaL#tk^`@9| z$^4U8PDn`KV|==%#Q8#@bw6Z+O4;_k7aAj(hp+!#SYag=U91a~S_Li+hI9q-p!0JO zMamO48XQeI;-aX&=#MP2i0HyA>H;kJWACzSLIWis$gT)5UtV7+5*PZ8nF@}d?MwIX zX0g*9c6`0Auz!+BWgv5)M)D?*@Db7VL|?xDRFvKA79XCPEi?FzjWxQXr0*a?PnBe!{34DOxGC*jK6#tA zOKDqc)4v&H^`Sl7j<|4jyxUFBz2si)ax(#9CT0Vz=@ly@k>2DNuEU)PIeZ{vNrca% z(axLpifBwC&UCY!f<42Ns$EQt2LdnliUBmt#2i7i3IKhl2dJBF9rur?M*!bR%XQIt zvwy*TD=jBKe~ucr29s>q#)l%mN=Xr zm$1sizQye1u)ig~)>P&uHj+K7FcTBFu-qlz61O2zqBRTg zSq=ysvL{&jNa#$5V%}W$nMytQI-8cC)Ac${ehZSS_}=+El^*?PZe{Y=d~;>fPL+7? zQ56;j&xE&@W(dRzH_+9sP!o%>crX5YtPmp-I81FCrh7x|Bn1`Tb$gwU^|{pP=~rcS z??***_CfcM#w6T0Ut8yEM>+v5FGKAdeisE!CMM2xsJ7nXa;iNUs=#VoMPl`YTMlQp zqrP6Ayu?Pc_lNXbCWdgae|UK;vy~dNS4fO&3!|ac!?&Qt3=e@E7f{GLc0z9zX$}82 z>J_-G5J0}r9MYV~47_~ep^G1dzm(_0*&n(En;jbxPO>pl3Vka3tm7x#Nq62e{;XI+*h1M@&c`8?_>DiopB?rh}OIgt8QE4vm4g2|KHX+J|Qy+-fLz2gXbrK%jpOYheSeD+KcIy(*u zoS-)+3slC|ZcMJ9+-x~ot*u`G7V!m#K;`?Vs&^`y_9JiAv{7a+wzPGR?mRNwZ~ZC& zkT5_+p@b@HK9#a*I^aAbdoTf~3&X7(m4EldxV(VOPY1pKy+tFs zm)*v`Zm*#;+|x^pyL-ursT8ee3K4#c8FaUjm6HKDo-q+$Al|@pIaBtk@hAz{Nrz%~ z$SGuHx3q2dY)A>)UL-Q9g=sFQZ!<@RBY6BA`CYtl)toj(N z;%PT@`1n;e@kFZ>

-1^{8C<5UUrHCmC)cfM0Uk&N2bSHdE)Q!Yin>YR;!ihUX{w7q)^P?NpDuf^rTUL20y6k;f7fRfk4!y`9v(@~W;B!R z9(q3qied5Q-H2~ZI)OK;;kt)_x5?|9x2ZtQH07)Q{b&~}WgrT=zC8MSN5P=>o>r#; zOicQzHDj)_h4xMfTSGg-(8Y+l38Q<$HZcxwV)DH~ON_TGdCjz_WzrkjAJeg*`syd1 zeokB4EH4=!dTEyk{RTz|^jBH>+i0&M-n}CziNU<`U6swlF}y%Dx7mWYaU2b$sc0(ziWpv|Fq9j8foi zEqb?>_|u&zw@j+5V~ zrT#o6=6onkf^R0-dO)x;s+o-@QB_zjh~XqkI2@&Eyz?tAp}V7bFDm?XVp^Iu&wdYr z9NJ6Ck-K%=$AAk*&8t27|e~t>u0L z!(9m@O!SQW!Q;f<+nPJE7j3-y(1bb{pZG^fj`-xN4W3iWCzszIhdfL~MONunrgG_) zc0kz6<)1TH$K61{Z`voLN_}~Ny`LE*q(%gO7zwJQ)BTy^ysbS0k=idm_G2~f%T)q> zL6!8;zaENhLN{x|fUX5Hsz^*d)?E5SOhtENIZ}8)myomg7_Pba3gRs1sH20qs4wE) z4lq2bt|g=loP0d0-&egahM`H_eu0p8hr}-~E}n-Zgz;FO%|ye3qM{C)BM)-W^;-CX z>#{d8G4J<_y=hmx&4ZS!B?u4>$f>Gg)xQ;XeDeo3RpM#8eESsE9yb6OiTG)s#6(dA zXz%{q8vE{+w-(yUY-R&w=5BtPLV2Nw@0*)t4%UIPiOgTl&z;H|cZ*PgJu+aXOGLU(;uR+zXMBpCm2^!tMt(({3wY;GQYX2IE?cxOGb&bmSi+B-C8q{Qe_x%3s_! zw^kWZ1vn*cz!zwv2YmkA&Su;e+wKyw?aw`49yT4FK2y*$9?$au7!#CWM)b~jz9WD@ zr;@?_?~lNvjC{L74q66Jm1-*auyb)$yB_JB^}^`Y%{QF%5b_-S8Xu~LO5}HEoz`|u z`zKHLKMy0M@2$Q9TGM44EZQ*ubAKlz9k^(-HEWu|Aum^9)Q_3&bt(zfdX{!{yqz8G z(1UN9h)an62O_U;WmLD2;m6m{qU*nNPIZAcyXr6PW&Y!C%2WUs*$s& zCt()hm(|eX(($AINGxL#xv-LiyeZ!US4~4K?nl}2nhcB|XJpM}hD6g8t7+`>j0k*7 z#r9P@hsmgnIkCsEk;rF-4WkiJ>M4IpWO+StZ?*B@!oB#l-h1ZLq+PfE;G;Y3t~<@6 z)58Nk^Ed-OMNC|re@aS9Wz)FA1^_&aNf%%KijJ|97a=aqqnS z?`%aZ*n3( zp6Q7mB;H6LL~~5X9(12SK+9j+#;-O?H`@SDbOT5TB;t3g(4Ge@iSVhawijTx4~vs} zi;9T&S4#hPN?cCw&e&>p(@`rS7Z;b|+3O@3N`T~PVD-0j%NUpK$sd;2U0q!OrM&YC zEZkg8%f*!Df?X#{Pv_rWP6~c#k?P0TQhB{n^dg7V)i* z+15y!<-H@w=Yi<TC>L|agY$Wq3_>-olw2qDr1Pl!gjSBR~ ze;oobgM~`@1Ox=Uwins%XPYO>PY=sb&yh-*G~U;?wziOpBUyEI{1i^>tehM`90<}| zZP1MfsgT<^-x-gb{af*98n~3M5{=Y(*LWJr%ggJTe|x;r-cv?80bo;yZZG!u zy#Fij9xxw9TwGj6-Bv5BLr&mJiIhZ|On$kSk&=>9Csl^m2X}WK;Gwg*_}T*J_pY@2 z-`s$G<(?iNA{WzQX`kuU=ls)an&`k(?PB|piEQj_d+9H&_WJ}47uh-a)qlKOAiz7F z%QQZP1Lrkf{kfG^{hb^CcZ_Mwh~)FBe>q}oeo~csO~#GXR4YBdcz-!^2<`0cSvZ|F z1$MOf_yYe<7|jvE{u_B0BL?-4SNJbi5*6hY{oB2P4o3eUUbJgtqEHD1IC@-mI^;40 zheI&PBZjqmc8(ZNd)LZu_UEgWC%jGfU6^MUBR#!*y2^uZ)|Q&;DuE4)oL*`MhL>$% zFnERnFucV7y{nm+m{{4}4Qpy@>JVR4R7EA7)M@OBfX%Qky8K#M{J9Q|sI5p*h=UWz z5{cB$%zcM;7TO6xtC#N!7eD=wAxt7oEg&km)f={&rGxTF*XM{0{GL&S? z%!Ey%qK(aG7Z>-f1p!oOUOn@hN?xBG<^JptW8{8bj#0E0@$>?}usfBdb`syxLaCXM77rS`^yFz6JwU-6-b;W0#KX9K4x9htba@LQTO-xm5;3%I zBoj6qb|(shf`VQddh(?j4H}M?tg6@Argbp%*zn!OinBFs4?tR(sJOZ@AA+HDcq#&# zBGRT81#zO%Wk=cOB{N){Rk!}-ofatWH|mp6bh*SBWFGEVrtT=Uh(5lDMe#b9rJy>9 zH}kxwYQ6VJEbruwiBDV(d1v6x@`zN`?@@)-Y9losLX`2rUKn$h4|1(5M`NTz&`k3ZcSu)R5={S1TsA3v}YM!9rv_{h*EYF?Q8!7oD>_vxoxV@+~0 zftI$my{jweOcB3JL!Q$p*hHGFV+f4sMhI7n#5W?YM8gI@(2NnSF)H!AwZM~Y~>~&5W1xw02uP@2H=U^`vmbN9@zdNYR? zJ1U@7S~wWs%!1_JKD0sWxI-QL#ve=>&&DrU3vFv}$_Nfr%iYa&U~VUAPdMGf!l51y zi260DIjlgZove{i0kZF@8ew$6EpaXF4W5P5*(;d7Hm7r4b{9YQKa4e_) zS|^*tq&=*rM7tsO2c6YCU5WM`A`%uQ=Jtlg0-@ixolD3;t+@XeJrcHBc{FG}iFN)? zK}US9KDTo^?zrJYa2uq|6CVN;N{{fKEqK&76O_JW`1jjQHa7WC)7&xhZfr-}{}^N8 z$35beeJV$s^^YLlpV!>b#eA(qbFJ&MUMtI9l66ga+IJL7QMho&QdJNcLek~dheL|y zQz)Y4Bsjb%GxgsfLj0_}rQOI@w$*ZqE~fGS{0?A{k1o zyqj4lW#7h7p3%$~Niy-5iZL?YG)RtV_TnYd%1O*gWRi3*2dX8!nER@9g~=L%MkjUi z;#etSf4|wQl%@RoAv?sgAr~@VSf0mB9$1LAzlp;f)<+YXgUda8BlC8LNtUAHu=1lA z_$|YHMBFIU#1E57TEPK*pKMAQ3VZQ!2e-(P_3E9QLeVJJ)WMXSk!W30%DYG&(MaM+ z?j_DA*$fpzam|{e+o>NWahyV73XoKtk-QDxygg=6M#sd-6hxz3Y0$BwO5TOQG3#K2 zg8FMAQ?8O*5BF42Yw|rxs7#t3NS<2p#C%;7c4(7}jNt&HME#0d>TL(-lnV+}pGj1I zKtFr1)z4cy0tCZSh)+Xkk7;q)*N%wKU1Gtkv~4FmvhK^rywtMfV@4fDVQ+Hbe

nSnQ>f9s=l&Aw(W^TVt0978QBPE>surFZvAO|&lv=U
zyOK)Dobe{gRr27054FaVbHpDSqwnUm5Bf|<0zo{{ohq|pi=}EbYkXR){
z!{3~CYenRLx;H_755banDELOpv?rLZpdBlHyG%r-@}NkT>Du${UWKvSrMJA8`rTcc
z2=_=2Z^k@Xh-SK^zB@;{Kk-(^eINCQj6oYtD!5W@-J*V9o}iy6_BmE`IayQVcHYYy
z;d$h2yXG-e#
zW{WAXSCZrsjGml*7Zgpl+3n;YT{$ugSJ#M$C?vI|kn60qoEbSi)=_F-0$#~n_&tA^
zsNzgve!rVQWAsCl&;n$+KjIVCG}Z5sr*cz6|NO&5g`5{?&F8o!xM`4J>?0*H#(rSzB
z7g2NzBbNrp^@fyZq-NgGm2x{{ikC*J9mn<&q!!CMfj5v+dS89c>NQAcbTrjt(z|%j
z5BUv>CVQyhX)IDwaMsnp$|s5&TL!8f{5`>Hlj?Q(CT23Hdbv)1$YLrf))5Nzzoqh;~OKh$MHw
zO`t(y!>8AMD7h(Q!G43d;fMEb927zVn}^Rw=wFW7L>AFeQJq6@cKUkYRul6
z=)p_FIEj4M$PRIwIvHtOEt?_C4`J5sG@Rs5!uJu@))yytUQjRYbJTOTJXwZ}5_du&
z*wDRC=)>Ei*~E1XF5!j5LId_e7~DfXraQ`rs%(2VSfZeGaeaLfch9rFLAo5fD^@aZ
zzc43E98sBl>JQcByCCax?JvCf@x@m
zOlBVh6*A-pn6Uae86obBDP4D)-#$a9uSyzuX?)O6cYf-?Ln$PW|mA(;1KXbw1B!03(2z2L@Xf^d)T=CLLOOSh+E~DX^
z7~3h^%XGsZ{%eP_sZ!XT*B(o$uN-bUQMn39n!9=0=Mf>G{)5;@B9KS*#sC-ahMc8<
z$Tn-d+B`$gJ%bDF0grjq4Q;Qm6Q#}3Xqe#CIZeyQ+IBb3)Sa+1~O
zEdpr1etz!FhD|D-L&palRGOivs
zO1i-W4#r8=$vSPbk6muY)Ljv8zJ$7FJF=cP+=}7MJt;UgXLSYtYJSy$V`NufP+`BY
zn)RVdRdfw@9fU1n;DQ7YG?$Do1}uPQnN`updR0PoK$0!OSmi5^TI0rc_ywImVo|%T
z8@8#hFmNRVzIlIcP*ANqR@ReQm#Cw!ay1|th3dMenrOZyDyN1AuUZspBB9#R9CPn-
zl65`GuA?`Id7+r(tld@Z@!sP!?Qo(he$&M9@Yn!_Ez2R_fIbcmu%Jx!57(sIujwzMF!KMzuUmBVjV_)2_i5cpDZQ!ROJNhJY`
zj}hplYhY=slE0pbLC%a|Z9-PwSW-=Tn!nhmdAY0@Zwc8`k@LB=z-djgbrrDe*A>dh
zxwxDbOl((n*gs|=Ky!DXl1|qg<_jL&*|K$C_@)nAAP@uI5(3ykW$i8}e;!wgi&`f3
zHOI)(mH;Iyjz&)eI8?O+gS)y*72(~2J<{C<+Y4yv>!R8Ck?$s7pm)B94ZSFxe}Br9
zObddlzv+&>b-{cTUaY-n-g9~=GpIt$zK;II-9b=1rS`+tU?<4pO-Rnug3Ll%dennW
zYgcQnzj3CpGjD%6UypW}t~_N60lXX;IYSis-A{}acT*xs7TL_mg=MBUgdu9x{>E2#
zoO}rh38FBQRvrS?G!hr?WyT2Aer8!PL|aFjSY5vgzU2jtiQJbG=n6^FqBzSO62>m6d#z9pqF*dBM=9cNX_HYAb-Sm8EcBjo_7XT=
z;6Rqm25S7`QFKb@qbh_tgB1EzkezsN`Obw?%)S516^xOVY~qBlJx7Q{)>f&}LDkD4
zFE8G4(u|RDCPoO0?@AxzB{xF-ckha*k`&Xb(~WNahij)iA;GwpgDR|!5pWaxTTJs&
z1g_>{gO1fR;2y34ysHj^PmkV|&lfp2#i_om8@;}Kb>?fW7ad1|AoR2XTETTxZ@6^U
z@qKVMiRi$*p1nqFlJh$Xcv+&=@|*W*hrh*)f$j;|=}H)~(yjTC?eg@bU32$Z#ZZWFNDXeBKZQA#HY@I)^sdIbETzE!CAw8V_`d^SO2kWP>iI3bQEVh|p{Ibf+?E<>AjZ#aFyOwx!umG0WgIz(u6tFlaA_5$_m5%ecjCfXq%X
zQt!Iy^LXi+IJqNEDE%;pTryGT9cRb1+A>Z+cCfU4PaW}}&bo~n^!D9_!gnp3S;un3
z=|4xz<(ta|_G*-IZYY~mrl7&vj7xZQEYHmMQJK7ov%_4mm^iik)ybHedx?2WZo6;k
z?A-OJJRhrlnPQw&aWtG{<>LEl2?GMpPG7ktaH&uE3U3d^9pifZtuTIm>hNBWorxgE
z*M!bWet35S%2g7DJ4r$7DAF2Mu9Vd{Q!IfAbeTqDvT9!*B1U3JoEJpH+?WXR-lX@%
znlWe|=wco(q=p`0>0SeTI
z9@+Y=u|`j^KBT?y1V!}JNjht~UuB9y<9)1nrbunEW;Sm
zKsQp4x7wusN|Gv~;lkoP@`0oF<~qP$yqL6&D^MTRd$M)!t~`pO1i@FSriLja9rpSU6<&Gv2T;FoF_(
zE1sH5C(?OsZh7Y_`k7%FrvAztM-7_V(>5kfc+?|x1L5xcs>AZ9J)vJO$8|zA#pb>M
zQD+KEb{c4^L9M$I>@agP^>OOK2IFiCY%tmszcDmKl}2xQS&hE+K6B~I3pnT+ah^^3
zU+Z13w}O~l7fg`F)!h~e`p{QqDtq`evsao@m{wITgv1N2GQlQpeZ+21F9WBpmF)%L
zEZZT6o(Qg&>s+)eEARD=@jB98qQQAUQ0%HASp8PNytoW1ihk_i3;e8>E%!-rQ*%jp)HO^X}trKfh^U0Yyv
zeq_toq;_1iLUjH|wp$jF<1I*_=+i29XZnrmVHM}%MfhE6fUgFN6C#)s>0;k-b)6&V
z=Lmz$%|kUZosNi8m3PI~JI&aXP3OV-;8YcoM$4XRWC-k(l?AVVT#7-`LwaY0%4`(N
ze7|#uHUFVDy}CDPGuM3Cut%v&hqDgH$N=$pmzn{obFDx?VY$5*7i5<|$N;i4J{Eik
z)p~u4H&5h#Vf}D@JxhzobAqKi2)Aav83fkLptF%>JAG%cL1hHOk^V-gyq+6_M=Yga
zhD>vmk`nmsK4YPMwJt7%2-#WJBo-4_^0z#BQuW(qcjJr{n`Q%N5+&im&rWj=-rini
z%qtqFRz3`o^9pw$raW#JujKb*FGvgM-Z{Dx#ndH*q=G`dsH7EZ($M@hcn;R;FyULQ
zu`x#Nht$(Q-fAYTKN4@_20TqNN^fpVey|(NqfywL`1bx6>qQj)`Y?KO(Ino*3Em1i
zRX|pyc
zn-}#~4XauuB{?|2k2X@Fi)ag*jnqA?6|3K9@SmLQ&7djx44e|2BARZ3BHEGInx=+K
zr`W>Pu5<){1YwUI2jd=B=#p9p&G%)Qn#7zpm9Bp$pcDA>UX&B@(Zg(@5aV|DrO>6p
zMTrjfAy-sIK&%S?Y`AqHV-CY^_ta;`p0`jrf?FLyxr|2l?SbE^JnUDS?E{>Qr{8!v
zAt}|`Y^hJ73wW?+ThA|$r7ybpotxLOqqKM;l=IsG5heyh93LxRqOC)zrIG4KQs$iPWsAxqtwVP7|lO@s!F5`7jQ&f&AZe`7HoAj1oyQ4EpwFW)j
zRiCm{hNaG)+_NN*37WCcN{i&576R)UH)~kEZ$@JlHrum4bS5%gHQCJ`&-Tqqaep3T
zKl~(sy~!aI@1zYFQ~-yclOHVHew|YucVU#Bs>669h3kBcb=nv$+uRLz|@dr5RJ?kN8*B5j8?+gD1q7jw|aYX!PbK_Y-~BYMS|WgRi3(
z*&416hpo9hPS`qK=AoE8n!M1svO6$S8Lo{L``U@1NI&?@wPH{GY>z)_fJ4P^B6>|+
zNw0`Q&>?Say(EbIl6qFwMiYiM5^`lN;YkZ(&p9_P*u+ES>-Wo}a`X0Tml%Qi0usx6Z5t7b9tWa`1K$%b0I?wufqph|jm
zVB=gth7g8)tIX7v`Vi8qqH49{+s{PcLd2y9M@r|moC*5)?iZ_kzgad`osybs@z^Im
zRZ?%E2W#+s^hOXzgzA7d_j>QU9E9$b!etksOy6*-S)x
zqPOr1KLUTD8AP#Vq3Q9Jwvhkj%KxJ7y@R6I-i2L~ARr)F;s^*Rl0kA%5dn!xmYj2z
zJme&jL2^{0L;=Y;4VghQ3^_{98D?&y+kM=>eNKH})wxx-ZvQc$Fg;E8TC3mnuIGI^
zES^Pg`19D>SXP_^3k&Z_|DztH5{eRq*e>%f1)YqzR|c*$?v87G7%oQ@g{7;)rniw|
z5hhh~pSdbG)U17k_JC-=syeJ~L2g;ZKeAgs<{+pdO9s1T)1J&-TMOzL-1vj
zL5!{h>$Sze!sPPhzKsuqHYQm&dD}_PHl6Fv*jU_;jf~H-S)}(USn@@|e&4W~9SD~E
z!WxROKe*k~$*`8jGNXrRCu0pEQZS#Yja28XN;;p8QZ=5U$crP7*o~c>dugm8%*abfkkd+qxO*lhYb&+pEa)CC
zicEXg;*>;9E_Q#5N*2Y$j6upPFsEVPub5mv&{K8t+2BHghn@wO;^hrI@CYjX-W58j
zKF@qcLrsk}Vnw?mYhgym$iz7ogLzjV*lxRbQ;T@+lA56%l}S{y^W|j^cj{#D%ohL8
z(D>aO`uqbJ)BH>HJM1S)c3ByH6BBtG^Uv0NdbrLhg^=&yE%Pu2j8SUpS<7N|iEaro
zzdJfaY4(HpPLVJl;bK839!@0t#h|DlCEzV924FacVtwOALnpYSwB5U4S`Zs3lW@f{
zS{nY&w7X5jWllJT%-OX$8%2bT^F+h-7D4LWe%p+)c~cAo^`BBLP0=Ol#=gBM9G4>-
zT|+h0st6l)+=Vd?9RKOzr%lK>XW^#vE!C?AZgs8KP1d?y^39tJ_b6^PA|}8o@3gQ$
zQn_)8`(DfG{Jje{R8?Xo17+JUdO_9BDTyOyijzvq)d@<+xdmsVGSb9}@9E_U7YAYJ
z$nsH?OPEB_2fbe`FNVIVDW@hL<$jjzL-mc33@?>8ujVlGjL!Z%SjqP}t+#dSt2-63
zt67%e1TDHq`#_@c7nN^2sxlsQ>E>elF|{9_52dHK<-DnbuyIX(RQ+B^E~oW9s9pm_
zx3yE{TNZ8d(P_vRp5huuh7ddvv^^tJgEymLQrKIo2z?&n!0UWbE%CmOvPw(cS;~jE
z&34NykoOy(BM)hj0v
zD$hK5yI1!F70v@*28^^6WvwJs45WC5?;!-XNH2tcq>cPG{y47_~%bc
zHRGU%vptU?=cvl*R)#Y{vFt}JIR>>s4mSQkEp6iU)!$BMN5>1_
zA1HuvP~drf`H;D5Mc=9NrRXXlhW^U@i;yDrorP5ps*)aSz9o-*eq)7yuJuVy>FRKe
zC4rc1!gIoIr)p*8m}||442lnfLHx6A8)bx;PZDo?6vrav{yn+?nE#ViFNz0O#Xj`w
zI9G0ZsM@?_65h!eGOoow9U2q~*B|J!mis0!C=%k`5=nV;JiyELb#bCmw-lkJrJgI|y2m^Wer%1T)CU>8p1XJXlro%f%D9_U0lE(<7Vf
zBlt;)&tm@cl^Z742k#zqU3wnbnwZRDG|v6nPrFM|RuR=YzdP@*Fo$Q?g9L
z5-O5HYP`(IN7*oC86k|zdAZ@MD^kOlW5eNd(k3#oW$DW7`sqC~%dmUgY4UfaykO5#
zSPu2;&`%85lwZGojp0I2V@&$x_1JAGGx8{4U8*uYXvP&yxWX@#|JelFfn)Z$AETZ$
z*Thv`>F3dk+Qt~k$3WJ3(-uG!UNEy+JIy3bP?It=Xt(w@ecj9-VA!9n&N2F7Sweid
z#>o&CO&k_6=2DnbYWu0Il>iG%PPa486-^z+veA`7)}DfA<|fCAv(jC~RXBb>nLme*
z6K8E@ewgSvNGcPhd6W=ja|&4-aj++sYLBWeI=8^cu$Dl127T-M+FC@!P3k_MY3K;wjhi
zrw(S%W(A$HC-LEBc01C_%b*^w%r9JjeKvMG&mv$$c@(&+^`Vw9r
zZ;7g{o>f=Ygp&Dfwp@Rl<4cM1;T_&>eH8BL8QK#vPb?5a--OvxW=f_2tbeds=34g2
zXs7FisZ|#uCV=QCJafvs=wLF$?K<;f6&dBy0q(d$DwyVJ@o9}SEv)meS
z_p^>>Uwg|*XL)|V$#tvaR*A33wFl-^A_(=XV9xkGW7foHl1rFK@jVk2SM;-w8g6^k
zlX+^~=dI)2=3-a?GH57y^av<6;Kjb>A+(26joO6#bm@41Wg(u#e)m4CqOY@!EmAP>
z?JN1o@HGx&hF%l@dA&OH+y*kZVy^)Y^=28{_a?2b7qA-MS&Oh!64;!Xt`es!g1?C4
zTfFexi)o&una)ti_T*2(7nKOV4-A)ijph-Q3%=_
zXRje5A(XwwAa!~Fh25nwzK9R6_VB{!&$rvQ*wG@`2j)DRD8RRqQeI5$$fux1Otsj0
z%xomBt`G_ZFSAJblbx8W@yXTc2B7F4abHv*#p_V4xCK~d%e3NffWiGjwS?kPONl16Btz&=9?Tc
zGHx_}5%Ci2U3|0Q8Y5VoBrg^>(H)72m6Wt~2gAa>)~-M^eT;R(Ne$MG6hGuA2zw{?
z&72PBzvFe${oo)ekoy%Y!h1%3gc$x<9>t1%E
zKVIQt*DR$yF7?t@^FnBHR4eD@{A=tf?y8s6)BDLQ%x5c-4+m&WY;bRRId`0f2PM|^
zakz{yP}B6OMYRs&1+Z7E^51f2=@fb^oDdmg99`CTR|w^Yu)jMgv%$hQyE`D4I0KCB
zli*ZoO9TBrzsNaeIwJ^QAmVmUgc@W8{5n{o`Gs+Pbu$5Q?dtsO;Il{PInA8LyNpCS
z2S(GOL@EPcwZv?EcX3GSqMXvoP2CV?WY~3bJP8*ij+O~MB_^Kk9kyWB>wf~{#6FWv)G8N7{wL3^>3Q){3=!xXBCOO
z5y|HvdY6Z#h?R9|?(+uXRIErI!p^ig|YYcL?T<=*7{E-8YRCTy)
zM*eyH3v}yB=r#PFwr}4!goo*93*WnZ3_{9_thMB
zn#To|O~Kx_(F~IN^B|MFsJ9F(3D%E@M55)4MxL@w3w{mEcHf;1CI4XrfV1l(D7g)-
zn3W;f%DC4L=TO%ws;`zja?q?p9D
zZszGL+l(9XQZnMSAi4hDSV>8#`X5IkP@{K!!P&bE>~)>suN(QGu4O#y*55bsY)xy^
zjH~BV#Ex^{y${E8v02ZmXPR}LdEIKN@72N9GWio0x%BLT`wU%Z1CWwS)xP0d8^GwL
zKP2%xp{cyMzlS^EYm}U+T2mu4HJP`K4&GOH=p0mD@4B;0Ibh9%quh#Wz>&9hZle
z)_D~Yp-m{9EaGgo?>+o_@5c=6A3N~WrEYNN`s^i2z2A_?`w|iAh0V^77V)B%R#{{t
zQ0e>^pKUK};lEz9AO8fv|M=5xzO6CGx}jYExk(27x5V=2y{Lcw
z9`LVs?_2-GvwkHwhiLxa-|5WL8H{6al8to#r8{8Z>ZIx9=Jaleq
z!~d}fM{=YXT-7}07??RL*Yx_oJcC1F)z#JCE*w<#Q|k6jYT*B7f7clR*uENkURGKI
zyZM45nHu`Or@?urEYrx(H2Z8<+w<-C*VT0Gv-G=N9{|7|578!uD4HT<>6V6%0
zOD9`(iwyrb9RP8y2=tQw_&>
zxZnn=X4rY8F+ySG(20@0G&^UqWMBmFw9U@WMmjq`C#!QySxj=cs;Kt`CdBriPf#l2
zS;>Na^{LzQ-|KkAgr{pf=8@p8U-97Am%xuC88c2jZmJ+=F0&u!{f6qDm4T*MV=a>8
z*KOF!Eee~U1I~+WV`JJJY3$yz57Y$){!n5*@W|!Y=^OemB^`g3ze;J7EWZ!@_{`%q
z=Gx|FSlh4R>FsDX6KHg_cJJJ)&Tb*)ux~`((+z*lJA`s(_hm=$EM&(R9H>oeD?_#V
zSp|HqP}J$1Upk%Q-_b|aVf53T1do3EV~~Abl`p>JAYVoEIvdVKgwx+ea@FX
zy~OBy6m59Apv@M*Dg#@T6ELc?g-D>!!^YbHTzOnd-fK?cMHb^wgHEk4tNQtmCPX=#
zSArv99nbOF5~8GJxc^GD7ztd12sHK>sAM~xL-IcN(o8;{tipFoC&V{tBZcNF{2(QC7CMf+jzzNpp^(c&M{HBnmC+LRwQ%Xuy|0)+)Cg=Cu5ef+>
z`hxPy*!xXA{x;~^%WLm}iG~iV=bHZ3#WS#TO8{cvNAJKlI8X)=Wxcg~Wu?V5d5aiq
zvUNcNj*{!qs7WXYrApO2mm}%uB%I5!5q^+;P>DLHRnU(w!AyU)
z>>*;ui@oo__%-nT+8VODxwuE|+}Km4Bn_M?yZm?zRCKomqox{-+KVIJZ+Kfl*~U8Z
zy2emY96Q3f?je^`ux0?zqPB#N!+zf+i-&#!vJL}3OE-XCB$JB=GRk!>UKxO&$gL$s
z0An_%AB4lX*=B$+8*tg3lZ;KlQbt#`)xCWH|mGUjd
zzaIDnmRo)Q(k4M{tZLr4t*Ll*iEwJpHPM=x#`_6Qyy4R{%Jkml&-g^{2U|TCH|>B$?7U2(so1R@hFjT7Q5qf)1`Z`V_7!S1!V6Hd0u|a
z+>CuiDc8BJ<)kToMac``*uQ$QeqM`=n>p}w%vLY_5Md6#+Gf{4|Cobs2K-6lk#Eox
zWO~kWN~3JlIxEg=M_#{~K&wtVP2Q4Mc0AcOND`}D+QV&3
z!||H?1;y6d(AGc}Xot+G1RTbGhn?e+L0)SD^|Rdvhm}_Ng1upa-xl@hFX!G4e6A@c
zkKnM_p&i>T;SB#~y6f_@7uA@j3Wz%_)Q6N}dcR&`y?b2pM8*})thikn1(>~B8r=_E
zbA-I&^1O=lMyjzPGbK}1#0_DxwBuumn$f!v_ZRv#8`szTpt>L-t9yvOS
zV`4)Q;+{MVLaE-zIVhWrBGyXr%h8P-@lArEf1;A5f1r|VrE|!60}LEsA`Z5k7>y4r
zXMP^G1qB&Y26IaP_wDX1n%PbuQJ*DqY17e9VQ$H4ueo!5Z_CQ5oM
zF{@qt?zz5o`C92&|2aFR$`6mmAu$E^h34+H#F!BIuJGc5{Wj
zmdM+urtfIKfQSV&9r8l}+|%d1#L&ZH8-Zw8m@JTt9WCE41y*QA8`G=?47t`C(T1A7
z$yLLjhqM$~6oVN<0JKsk9WS1#d+zIdb%2>{s>X)(o-WQEPM+t|_jy51&}W#9*|a)~
z7vHwE^i0(7rJbD}FQY1p3%VXxjqy$z574gT3G#1j30DHFW0@%YFV4}*mGEniO!$6(
z{ZG{LvI%@%2r!%?o>Z5}a@JKT<=ge!>T8_taTSOgtg|Bh6|r=*N=jbC38IAY*R7RA
z^-?h2`6=ImL72wvM!7j1`^9By718+ii-
zz&Q$ASj*~%$nSaGLtGkxc-W}s2T!`1Qag^yPdu>H57a@k@P;KjBx{@^g9F9UE!e`d
z6WDd=3`^3*l{$Q3?4*Ao=-tJU*~`#-dX{mx_o__NOTQ!vFYR#hI7TLZ3&zhPI-dtA
zbnuDP@@g3^kRMr56OH#IoNu&*_Q=%PiQ}V+{_jCt)ch*
z*E*JO>lbOg40e{3>!aMFCK(sQ+}hV}C{i3_kB`1nN;`t&<%KM+6DwC_S0`7MbY#bT
z>!c=Ba9$i7tbLIwYm!wqA>cK~ojfDBoMMRHJ+(@a)-n@tpnmnBU@FGU2E=eKXCWaj
zpeRPzP5<6ikW2EmX_AMQIL*4Nqf7{vnWcrBERR>ilW@~s$8I?qhBe$cXn6D7aPgW9
zi35Z_QXNN6`Vhz@ZY%2=)rrImKU61MnCJwWdGY{@{d20z0eOR&o-h9Ae0^6*kKAqW
zLuOeFWLh`-?ir20{+%6rm$q|^Te7N-Dqi)X+~xa=z)0xDnd8>(Ob|^KoN^dxzV(ra
zC^$4f@$DO!G?c+t)`&dYL6z0Jr(EZP4?hkdQ>ai8GhqIX+~QN0T~FC&h1j
zUJ<8JZz3m5mh0geGx67+K8&Sq^q=055JhUTlZp5$#8%O<`_+DwHK-=#`hpNkPTT*}
zcVt}WG&%TiEN@2;8bwd51UeTh!(Vl
zZ8D2Q=nzg(-&}wE#JJN}W}-Yf#&gHD(ZIKLjU6S7G1@q2j8S?*Ccpty=gdbnZ0z2P
zug!ks;=e*J&Z^VN&Fjr<50uhi*n2g3Ti)02$Bml=a(2^K`mU~04;1=y#<`O7&%kLG
z>T=BYlO6(x=$$MdN#imiQlv#n8{Ktgm1O`VcXS^(n4F_VUapgddsx{jm#n?)FeHAt
z4>Gfx)7Z>bDDw|Glcd|PvH5?iN1R(q0wbFte~CT6ynC4}g_M9e;q8N1zpZIp2DFM?^U|#!;?kN(&f52v*;3GS
zG}~?`>z-%UOgn{brT0q7)Xkl9t&@;HyZ3Z}2Fof8PPdQoaB3j@vqk+^wUF^_$HulI
zd40X$!_ArK0QkOwQ1l`8K;pzPrB*xxCWROHlOzvHM
z1hvL4jZ6wRy0`D=%rVF-rpL&E?8}`_2FUd&WS$*+QM?PKgqYlo@x1JjFmzN#`R{SD
zFr*%lsR8)g+whe;tzOF$yvvP2n5W)l@`tHu1%D+$-iaJnta($A4T7KfR@Qt?*8+_{
zA8dPpaZfN6h|#>Il_{?`1>*XuG~_(t1Y;}fRDgH`nEaeoASbjy*Wy((;3y(-hd~sKyfyL)_
zl{m**nB|95l|r*Q;L80*34ag)DB;BT)G>?j3u~%*l9sc|#uhz7PmD3?+0f)CL=h;FOdt
z*PQpIRfJ!EBih@ixAl)HA;xW-d?UMF#`1;NzZnTH!`pet{$aU$a7AYaSLees7e)=<
zLW6Vc!$d?Mueh_-8)}|P5B8ft@}ae$ErSg
zY(tV4fmH!?DMD~Yab(|$-HTacT6;Ld0dykFVpr{PNn;UO9nzHy3%ZYK@9}joo9aC~
zoSG+faRZe(TqpkntXIP1KeFNYNQzO<@^0o%Fs-#GmYGd<%=tR6x8Sp!f;(`{(aps0
zhMXq6syi)af%zx*VDSU`W7q}i>5Vb+Sg*byqJjl?a4T7RSNrbPW01j8DByc2)F+!_
z+a?t1M}5rEo>j3aLrhETYplxAF#_r8P?oh(Sy=aZq5mshQR_N`J8d}9xRu4xJ;O-n
zg~?{;ai5lqvAJSjpF1x#{)%8}haPF6upbWC6aglYl#Z+H%?a0P0+{b!5eF&gR^a?4
zVcvAp_hNzXzd~%<&QrE2LHMH2oyv5iT}*8Cx*zM1>>WjI?e6K@xw}I*z57V=e}m9z
zF@cqUu=kb7rr&r3>!B_fM=@Qr_ajN=7ijuQ%NcpSr{NcucQ8>cQyvCz*~;kEaNR~i
z)By_uzBU}%SmKur%4+ka)`FB}4^&NPgk=WAFftSCOrsBe%I`p&vWx;4dbGZDYz9S&
zf5ygC;&Ad&ozTHaWn;E12buzU;N2?Lh-hfJqy{^tm+HgWIGMg`G|n&`A#N#f
zEf3?X!rsXm3iqj;#-04>4z?{=|O0Th;60Q2mJmLqZ6EGV6Rg7jaVlQ>u9p|M}8|I2&?-8DFyLmZrGBTEl@@{xWQ~#i?mHi2X`JK{*L}
zGV)-xWCj}BcwzV@1^ZyJCZR
zRir*}7O#~XcSu(5e@D9U5C13W23&Wvy^~#xJZpnJG(in7mWC70cOMnH>d`hfSWF^vkGGu9SnXh)c$l}_
zQ6HW`MWYpG$#S?Yvo@-^5hA4#kMnfGpS^D7+f;_m13X!zE|bw7%e797Gwv|e5G1G?HMx<+
zJ33A#0`IGM#V%{RHHK&SrwiyKRxV+EJfSwRznL=ui!H~I)`^U~p#57Lm06K0`nUm8B7qT
z3bMw5D{fW!Tf5xuXO-GExi?o4#>7<0%regGps4oLr*!-69@YZ^nxuuNMlG*nfxyr5@ai`2}~S~htk9Kl=0cQ
zeX@Qyi5npCGO$R}-p-pPkr)Y?RJu2YQ2>8O%%RBhI*c(!rJt_7_<0!Y+Cd5nJ)bMs
z{f6`EOxK6(vvX)DzFd?wtA$#?ot}#2#IB^hgZg`fYbh;*yQ7fgQCj
zvq9Zn2ZgKL3d7XW%{(AWx>AyG7^Ri`J>ydxlA1P2P5+R%>Z#iff%Q={@2GjujY>#&`Us@Ecq
zm2nbCAxWo1Sr-V6FtI$;@(=
zD9B5qxTJgM!uY}61zQil+o{p5bCd(8Ow)-n5F0UXbNl$ti3SyBsF8vxQLqj1Ve0ku
z)(CZUu0MgHC=d7hjNpQYkeIF~A&G{M$W2C-_Iv7CKV&HMig4Ie3O`+OLMXPMt39{C
zbmq~D$nbvMZh69J6_EWZROEqS?bFGkAhjNt=2gIGbu|)rKZD4zU7KvInBVgGb?eOAjmeVN&Lw)qh#CwN__&J76{}hnXLnu8Ce+u7SYQiI#Q{&W#=0
z2w}zk21N^`y^HVk)|a{1SElco?sp&C8K@Dc*C__$oP5r&_x*5SzIVj96|7VyIw@1w
zT`d7~4P=P9UdN3S#t&?Ypcq{tR{E%_VCRSTEG9-0c8!?~s=(Wx*uO(}-qyCIhA)xO
zO_*PFsp7f-BA3`GP~3OC2WlxxX)~;6555no%Gr;dC!~DV`hkj4n_RD|?fy<88SoYi
zUf9F|)*d$9Nx&UklRmmU4$9%QQk7zOyY-p>*0%=?w=U;hyDR6M&k|bSYn1x6ffxv#
zi1~?wHaDe~j*AY;guE=gR%`=`KOhLnh<4=YSw}$f^n`xE~xEp=vh?%N>V&Evd!Z!>#N
zfFEbj&#=RAhxfS)#!l$U=ap2Zz+`7}d%;?xn>E9mVtY%{E*J0x7sZz?0C&`wr{C3H
z!|b{kzA9Z>k<8oT95laahkMvUX@9aBc_>a#T_xN%O^luR!?0i5U5tTQcj4Pz@XUby
zmM$*;Wr$sa0s5n0y!kcmc32Qclls#mNt=gk_f2;3^WC0Y(^GNk(e$ng%XBH=4Nya?
z`b@ky?%=E~n5Z6{K`C(=hPpDVhDlR?H7rfk@hfLm*qZ~ye9-VNtGX-3ttA{HBFZOD
z$O`s__x76Spl_;b8kUmRx5ej|f%_kZ9I5TmE(H1K_%}ozub3_PxY_ovIQGO#5)TSBMudpRK~@QkKTjDaIDff`n~O>70Xt
z#9$+9&;z_G9*w-5VZQ%<%~_yJW~%-6n-5n~Q>o%9>7_cr;Axbd9p`)O=_$MkiabM2
z-&gMEHz@!8chOy(E**d0zIv(PimQR=cG&FU<1%RyFSoY0F%(8L;=*n{`&fuX*Q~h5
zJvP8g|T>e^oIif@7u=4!l-z%d&5@4HEoog?BOKUA)k+$J$YMIFFohFvZs@
zj#$ktP?y2sD(66>y@MbmzM<;9I6^u2^lKCvyc%R1t|$>!g8#jI%Vs$|q3~Gj<
ztc!tA5bJB~yZIcykLOn{(J*lgVOm%snd6)BtImmQ1JcQ);qjHytAOZM-K`
z@VOBpN=V2i`!5X>)gT=cXtHiM*kKwT@^)5hvzxSLpmSpS$(OkI6AoLmQxWy2Npac$
z6Oi`P;FwkLqkGQx200Lv_b~Vi1&btorwa86N#a*bQW1M*O
zEdFPk{w5nCJqnK7mOD<=4!wOIo~v^$8qK3BFhih%0Eq>TA8yaNzXqS7FX`Rn@3Ufl
zY;N={Mj>O}|Aj(nqPATe8|`-?0jw5aP5Zf}Y>X^q&U%EhsFoCu1sn(SM%T6Z*)&?8
z$LHn`H}I8>hc!Kz>*^374HAi$^BB$AJfOEW!K9~uYY)JFSq0#
zb%*-QsY6Rzigt|*#_?MFGkv^1@Iw*qIX(Zd;{6(s4gT9x-Iyj9d$gb=Wn*rUw3g#}
zFdTA*uDSfflb1>Eh{uU{iR*~K$6$k<^mt*D?c!b?@{i5YW-Di%jELl=CmVf#>kfgB
z7z#7HHUXNo)W5WrfZ}o;>u-XvVKX3Nx#uUiweETZ(4>;ktjnffC;ZRXcehR!GFMmp
zeznIxU#Fh_?^rFs5E?2wqarUbJ|Oq6Jw;hJ|H0Ayn=kvncS}ZDxBmYUmy=loO8zIF
zZvpZ=Ye$VPktNSx9r(Xqhw^|Ps}bY?kahqW#?I60IKtm5A(k8IIgf;43EQ~z))T7h
zFWzn@*x}syYvKLB-c36$3w*-ADa`oc>{f{zCE`UXOUgHm9IJ)Sv4IFu9*z8>QmC#@
znVwWtXQl0sr}w@Z#50A3nDoZ-_w0}T{`^)nH0>Lu`pva1mJWzuo;4a+&J?oH3b*Q>
zLAq4qBh@a|sI|J}YCF>{=D2&KzPnK{s#n6oJdE^*-vedhWWrTl*4$%I{TkPEg!Y#Hmo1I7NiiL(EJ_ief@WG*hD6>7I?;f6VvE_)lmSccP9M8Xeu
z(sGi-FQZKkeGuiAa{>VxTaei&{TGM9EPdx~b~_rW10O$C&T1qaT$+$UFzRiJvcBzR
z;x!}G;m12U2Gg8Y!80PXR9x23#>dwGJOF^`6(7e2*?~@B0iJJHB{D=Q*&bEE>myF*
z{PYOvD19YP>Oj!#IpR{
zg(g!XqSE?%;iJl3_jEjO@APj(!oGk!vl*k$S}5?)U^1VnC{ZZsduEt)-M_Br(H$)|
z@N(acN|3t$N6lW34=N>}El?R-d05zFH_wg|Lp
zaSI#qK8&-Q#4P@c+8uIu=ePG=Dx4GB1WyHcrQE2fh!>{RL7F~NQ?sGS48~B>pS=VBkxJdZXMyF%(`|G
zsu;k6k8f7|ZKUj~AU!?3RJ!=n|Hym3!@9w_N7_ssHxWuLp$H!|!jma;?xfI#b~f!*BBiUa`Ed^hBk%R#eAy_HCZ>XS7hp`WKJO0MXC
z5X}BOhfxJNzesM$$N%=m4jBP7A&xwifbTiD=r4=wa{kY#I`yTvHhHijtz;v0c4Ci%
zl)EPrKAUy~25Hp52G~Ow04VLQrdQ1M5Kyr*vLhE_~spZSbK8}v4?gvA!v>93_W-tw*y!cf(ze?BGPduRj|N2OT
zkRkVVSs9mh>R9Yeh5mb|{SB0WDLN$qZX2ME|2R7eZ>;!dg8>u~jp^A;EBy+vH^
zP|D1U3QHcbJ-@eTJ^e*W*$j8Lp&`VNu_v!q*KBj?3EG@sX5zl>?b!wzFAt&36N=M!
z)?H#D>Q~Ko4azlr#=PL3fZ+E9@h|;y7VIXj0|5RQV6GwXs9~4xmnd56nXD{MadB~V
zU0uNez|KoL59KzIYNICe3?x2s@e-DT7BS9ADIZTQZZo?&iS+VrFQ|#LP-|Cu2
z#vpbCNcyGpH%*iwT0U%c|!d7Q@jhLqa6d&
zQ+;m<=|_*EKq_|q{I~u(I*lpU4T=vI17yj+Bu4@-fT9)}plYBNs2XV0$p??*TfaFs
z{R`Z&1L`$l+Iq)9#%Ggv#sC1OT<@L@${5SsZtoR2by%l0!r!ByHYcZ)yT`$O^u9)}qbMy}XU{n`9V`
zUOo8q403LwSIiY&X%0Ou`pa{g1bx(yk&S@_$V<&Ui82d2D>QReR?%#8B9G#82=ts;
zGjFg{tDNkajaTJr#Gn;h_)p_$(6$iJ@vS0Y#%A52HslsH=p=xYlp|5*MWbHe1y)&F
zgt^dKdkREvB-SwI>?Ij&|p
zTv47n90<w+gzN_b4_5#j|(tiaY}g_5^>m&P@H~Ml-Q?eqBV9UAs!nP
zKX!I%5I1X|CV4h}AcNIguuo~p-zdLp{^4-w<{h7)MoU7zO0w`?4z#ccIwK7l;)=95
z4-EG=s;?zaEOrk9uJ
z)MTqg?D}bF-3kC@J9IVfX?Sp$HQk|I`~s*H^Y_ZAK)p3v9NIZIJHs{_@e9scAy)J`
zo~Bq!|7k!aTU*XJ-jx1_1J$m+(ahDWiefse_(52npn>G99%KfF0g{)NWIhFKtJG4N
z6H?wef5=e>;y~f-nw-fy{DpQ@MGO~8!y{TCh4vOK(Q04y7G(U!lHx8c!MnKf@^0cxY>|2
ztxC8yJ!Qo{4P21_6rcK<uh4jC
zTl08V2w?I;aTuqo&jpJM2UR5I5NzRO6v_L|&L3ksDJ?!5L@@nqBICZT612jz>O(P$
z6FbR$;ZVS0$s1AM;7SQ!WG9mkK#D{RF-upFnEYXR%=3?qCMZigsyK?xCFH#^Y3_wd
z(bs)lKXVPBr}Px`xvQGj^?pg!hCKfqX_ePYywBp?o2|6ONKx(GMA`0a+eaef81kj+
zdfkPjhXkBVdw1k%mlWG-`tJB{XcFj1I_JfGMD&zrsd}$1E8%+S33^fohzA&Ecj#@*
za98TPb3O|4nRhiLCgBMe)>M+5Ju!q_ckMJk{f8JdYtU_H?3}G`4rAj`>fDHSvY&}r
zy~4=@Y82x#M_I;XDzw%rbh()f7(v4@&?ed(nq*qGzl7#Eh-kI~i(Y$Z4<+@EF9Z2x
z92qcg9G_Jpq?HcczYL|qE%kZ~1tJx&T-wy-_yeE&tJ}<$Nw5V1tV5?rFcfLCnLWC2
z++Ox$w$Tt1tfGf*w@S|mqe9>az1xdQYA8+!OPYB++bpq?0w|a5TWtt%8W)QA=>gS%
zuB_zjwgy~}2^fz)kQN(B%ITa8VOI9?6qiSiyN5<_(*A8i%Xww9Le5eKWjHU
zWD5UK$#<4(cyh%Y-0vFj@#9B@13zXT=O~9YWTCwu*a{sjVsZK6OP+cj!dgBy?R=P7xkdq^DJ}cSj-SrD|w)Fs(U!#pGFxq@sR^qoErg|YNlDZCI`E0fO
zgzw>8_t@6kukLIEc?Zdy^DlSKyyU14_wC-zzy5i9TqXyMBw3vW840;vlF2U>MC*)I
z<)5!w(pZ&IDD>syun@7rABNP%3aU9ZiCS0YKT-LIIZ_On0xGx`Rcb_@_H;qIx9c0;
zd`hq3q;!BcKt;~AlwEpIJszw+C#a$Q^yu5`#$eL1L_Jd!L==g{dygNG(U{0{witGz
zE{%Q&ibhbP*d;T)Ib;SjspN>!Jc@&O4*VPrCg-_WY#wry3Qc?pHB58;5}lRe#-
z3jt37M`%fSgKjPE-cooj#lg-O86}5mE_dfnJNMm(tSu_#JPAV}qwV$jzBn89-*S=@=Ad(ZtgXB4?
zVrT=8NIM)t5Xgbpwqfztdt3bvXfp2X?y1X*!$I7;B-Cc-ZNW{v%J0p=tQtw!)d4Tn
zn9x=zKM#)m5GC;P{=B{Qu(tJ?#n)7YGp#2cdpt2#T9fF>%Y6A?V
zjEo*+g68OpK+nmOLsy=CWhau!IAg+J6SFAmwN`#}fN#ZTGtj{pp`LC%(Z_=bk%xJO
z#70;ur#94-hT*yG?_N$`)QR_{5zB+YLi>PC`iC=&-z~PF|L1I3(g9G%M8pWs`FAga
zlMkcp-pS8Hsh$n!KUDF2HjS%rK+G8Gf%Hl5YD7J9@i!6a7nXXGgz_|~(fq?4c>fE1
zHNKO4!f<5hYpE!Da%DhMIC*Fe)8|n`VJ9Ay-DQ8#Cv&sfAJ?1*`(+YkWqEO`(r
z_!ettVs@TiQFckWOgP?X@8VnIcjoy%mkV=!&-{%ikm9C}mmD{PwD#~h0KuFnXa>LN;qeRgd(
zv1>Ehr84@$zD7RozuMT6uW0;40+PStDp!xLjmfOg_7`f5Boo~fJ`1HGJy
z*(%%o=zOlY)8eUvyo#W4E=1Mlq@r6CO;rG)2v5p<@~1Msbb~o^iJM4WQkr#CY=12>
z1~WID!aZ3j#lHcu(i)F3ua6Bv*ZvQWAyCXIJdh!15bQB>PU2K<_YSJ7IndkEjQowf
z;vQ$-F+ADYvSuSHw&-&tmxxFabpyXV2#j9Uh*MEAtN+2EGxtWK;8XjYGrkkwH95HJ
zJh@BMfETx!%Qs%Mo8kET{ppRzT{j~3!TW%Xgr;ERyy>&HN6FpP11`O=d_L#w%}YX^
z`Y@|GiYnvrp~b$`k3~90B%8$DTav^Mzi8^s2mUp4*F(4Hd>PKL=Z&Evv-;mII2TN9uc3gqH&
zVtwSL*4TFR9zjU9_UXFR_s`;1+^2bv;6UZGBBguU|R>-?DJ)r-hQmZ5PUV
zW%rD&5>ZN8U~>G~w#m+){^PgBZf#3xM;fcwY?a0K9Jt8$kNHV?m>p@B@L02izK3A1
zh0$d_1|CV|mXaY6UuyZ~kmW>W$#h{EQ+kx53K`H>RdgUcq$^7MGqNV$gsh^Q(Bw%&
z0>@-8v;aN2#*H_R33}%ajkOjveY#0hr7pq$L)%+M#kDrwqLAR45Fj|g-61$6A-KCs
z&;$+cba1DGySrO(cXxMpcZa*k-tT_1zkBW(=lr-m8W=rT{bql{;bPPmyXL3{0_Um;8aC~w}Ssi0V>l7}_Yi7jE;R1R44fvy(kP+DHNzaW>X)5|<
zYf4C$&h%W^%!kqT6|-5jT|FA?Q__-J+%c~np1TLQj3x!VJKgAno-HC``39CCJLuzb
zAx0CKi|txzMH3T4=$VTbo}B%rTo5x}Tkc}w!r!;;P`bg{&@z3}1{d21I|Wik
z0#O2KPn@rg47Ks_`Uk2YHiS8Kk!C8^`(?!jA;`7WG(?9nKRMZ9BOVdHV7t5Nt4!|%
zQX>9-jYcNc_;uk&8Gl;QYlA1;2{h5f?y|Gc@39u2)|#z!>N6lL(qCv%^T-~2cQ4jM
zLR~Y>pRvQ#1q1MPORJs`yr)kb3NtgTqaKP>NP?+{Su^4=PyXnBt+f+q@La*@cAzw#
z=VxK7c-TM$WWC1uqgI19qk1C%NGLj1Eq);rGj*>rY73~|=o*G~Mx<@dF5i+7yqwq4
zqLViPi3Oq03x##7VXS3KoxgeP;&q8UVbpuu^q-0B>cTot9B(>{D)+Xe+rt^cFB~z?
zJluC_G3yswPv%|V0z`>Pwit86c_CSH`^Iuh?B;%h4cp8I{x@JleeM=J>oy9GsEn#J
zj(T&nq#rjG=%d^&^_VVkoq5FX-uc^)ZoYoK?3wMFn|>Q2Q~|T1=lC()**j3@vG+8g
zZozFMbEPPm2x@5)8BcFWc;KzCb?5yEK?r1<@S;#TtvrkCaU!}ToXp(>Q*Y3JqWni~
zgI%givRNKtz3WeXBl{DV5u^L}UC2Dsm2W?-^duH68FuRROBe<<=$^vCZLWZl
zXU=25zuBWzIghkloIP(IBK6T{I^*DwAO`nW8L)+Ojny%q!AYMJj^7vHv2J5_B}3Nx
zay;@h<2qf~neKr|s&T1h)cBmk4?jI0GJYcj{r^D-mR!;)PcVe}l0&SQ8yi%ft?9uB
z)~i7|be<09WNrR~`ZH8z%2)g^KoFC#Ab9>>R3rxKgXV;)UNMQXG_g^qdejV;`~b9x
z_1vcR$J=BUNVq_psoPUyA+v_YsZULe!~&33q6^1
z+TK?cC(8lh?Xq6WSJN%1eapiz09hb~vRVEYqqyTSwH01jiFHhBr|VZ*C4bUjLEO@i
zdfH}dN(-Q9eg#rgo<)PHf$*%fqi<_dFBy{337Rvdaa+ay(gott%!^B6Mnk?a;TLR<
z>s6O95`aTp*{o}y$p>3rps0FY&7}SXf#Ku0PD)h0XXS36mKR08e$d+(evN^9Bg4{d
zY3`nIxv$Le$3ArPBA??9|2Xo4AqX6D)l+EQc(@&dDK8ceD~
znY#W$kpF=LqLPIv8atHSas`wtzpkpcL{0J7uPUP~dLL3qZeoN9(OAVQXpU%aN?bLs
z!kHbidbLymrFQRvrEh}b5U?JTbz=cA;A#I!fVb0pk!WG>nhY@C9lx@Drqm4^smyy{
zct&J5$SPiuX4|B@i+dANjFD25_Vavet0Z^4=%i2L{f4595uW-v(NPpc%<+lczDWUA
z1$UL6d)`L@I-(-{;!rhx(8payBhur1+d>W0xkg9TbuP#6RXLx-09}_@B@iQ@yY}D<
zAIcNfomN0j3ZG&;lIl8?>LN+Neqx3}rS5UGCE9y5&g#7`Y!_FId8!R*&*WxW6F;x-
zf-{xKi!R-h#_i!$>Jg;4!>BJ0f6>g%@v3d?KXk|oVX)+5uTr8`N5%BAJ2IK=9)aLh
z6IcHvc26DqwI7#fjImfR#xdi+xhQ~N85XGYt$O)G0pvg^=0*z^$p$0p!rvZfRGwu*
zcj)Q8$hGE6@lo9Ci&mI=bLJ2TcGQ?tjhBH{?-}nf#6@~yOHto5GF&h-m^SvW$3_P)
zjCTC(1a3L$kp@sXOkYMfG2LW;O8r9%N2}Rf&3hLlIk1myA6=U3*`Fa9^e(eI*(>v^
zWbKBvzj}pb8nOC=U=5q=3U+51kh-9tr{qr9nz6&Eb)8jANeGK8USW&u4V%H6%5L8+
z9m09YK?RjIkt8p=0iCn-N0dAF%u_ih4J3cg$(m}wy!EQ?Mv}O2-2jKMfh)e0
zW#w4Z%}ckj>3ehLFG+THmIp<}PadXaNxvo_dJRLULL}B~%Z;4pe~^~hcXBGAQoFJE
zbVr-^4bN!NTn-uhO*~g@z#a84Ev~4bLXQ<{Vi$^I0H=vjNUW=yx8YJS)Z^{9fv060
zTX+*JLLBuLcd$X%`A70zdk(IHj*ktzSspY*~jGc6Ka
z#hrlLb=uaquccQ*%FkLsrElQ;%mnA)!$3kphY7l4Ok7y1%2-fsznQtx#5Fjg1DL!G
zfvF#Mwi!yf03k#fFQ4QX@FS;I|8s9d4F;k2rQyK4y_P5@Ab71kL*$yP6(Q3a8
zAGoShkrzA=++Mq4Y|{MBAb^y~MhApvG=)6;6_ARA!I}9|Oq`BEg1_l#1$t7TVq?kR
zFGoqy{%ph4*DuZ>;_H(N9^Oe&y7jg_f9VAaQ?r)e3~rzAuI3V`mU`0OR?5G(;xb8w
zw^wt&SzWSLd8!LuSN?cxF?ni}Q!Y!K82w)>c1l(jjIy%w{KA4Fz}mCiSwdQ7myL$I
zFQ^EZpRm?;0DiAreb6eY!#GM(YL7ms*3;lOW0{bxBQyp8Ae>(o7+wc>&Ooa?4p9Fh
zP&2okbfsyba6s!(4}&WwJi`7jc91=%l92EjIT%cY8TfIYg(ZYB9M!zCbOU4gtR2kOXuX`~jCxS^(RO-IK}-mj|wvNL`gG#+Eve?tdp
zQ^uGcrS)a1xg)D(Kmeb>evv!>7chBfC3iZN1Jv=DN*Fop%GwNTWm<0_fQbKDdl$ah
zLh_vKrJO$MopiyFHU_&BDr1PTH)`oOM^iUyG>~*oehI-Ez|d@O^y=;K?5g|!01R^Q
z)EZ&+5E2-6A++sn_9I_INfT4i#Y?}(@)S;tpBf_2JBdR#s-Yj@nqMx-KBs|csVY8!7*NE&NnmcCn;QCd
z43{#L^4;)}91k@mWw?^(FRO$G4d8G=&OMc#nutO`oI0EnSpty(b=<6MtG}Akx(E1K
z@-`Fx2CZeNV~sQs7K0ySOK$=Y7B!{Krq_b7^$9O>H&i;sqCj6^p1M?Sj@C*hTQ;XF
zNuxm5_-jd;J$jIKbsV8hd`Y%aQh7g`fAEiXVR+=Gf$00I2s^l8=hL
z#1+@qF*xwDHF2nCn)v)xhiF$Pka1_Kzu@{3scgTa`fW
zH%2BSej8|<`)kxUt6R_;(ChmHL|ZAb*6b)o^S`_cH14t7QD~~}(5_h+whsgyzLvNO
z-$4a!t#G@Xqy>32a01VFxh%%7tYw#f7eoL<&+zB?b9bV4)hXB5nxZ7TAU9t1zcc7S
z`V>Ud_uG5bt!Q1lA7^8xW>>g3T4n+q(q*YzZXf;!0&ZvY@AQuPrkJ5pV!t7fK*m^c
zaTRhMfiU+O{on}O+M&wmziT&q?e@3({iVR;VchXXRG_U42f
zKLGkTPtcPaa5EYe{6nn%0YN>}sQqS~0#g6}?~>^15c2&V)W53u=lA~~jAH=T)Bhm}
z*Z&18)xT)WfLZyc2Y-(lGC;tE6<=GJWK)!w2zWOB=j;EtO{0?kq4GL>58%6gt*cS2
z{m#64c$$`9SV%@e;S1(}j9Kdf%PT5&%#<5(+8^RQ++OG{H@nX}tOY-3iJrF};=RA$
z;%6sFQ~w8R{0vZIS5i|$`0*R!0B|lb{hMy?33U0$+n9Gcc=x33o!U+G4H~D5W#dGffL&Xdm-UcNF8|z!4E%OHK){^3oF?B8r~jV)R5*raK7Gp
zti=0Z+;=w_w?9_}?(P=hvfUkj-zuE!ataUTIfVn=uMv7`4sfjnv0!o8P_i$%uoxw~
z+LhEzFuRb7%H3HLyZMQPsSyr6g^fQt97bncc3C%(R`64-%`e5}Lsx8?S@gBG2^YG}
zRtN@SS{_tPy~p7Z6A?)<8cJ%}RJ*%8SZjLRYr1$x|?
zwdA(jU+&_6WD<|2FEk7PrwgQ_#?L+$4wviiB{c)UpAyInd5L^PLIWcs7VFLa*{$5%
z+ya$S4|VezZ$J#4^uX&Li1a!@;@|t*sirKGsiKMlRNnJfKsxd#O^bp*3LecZ%FD}>
z^tA@cHx3-$9v{wY6+WszLpj?V$aj5QIC6`IM#TI+scJNVYhOU$c&)4DumWvfy#m8!
z1BV}`eZTtl9!TMhf)e>hpHkNt_<(K$Ie%fp!dAt9TC
z+(-JYUhp!SS$l_dN1Nf&eDMD+VLYx?nE&Z!Xr>9o57E=pGY@)tXd$9#&yh{vo~vSj
z08%OIOctoN5dC+eNky|0E?MzaooA=yHS+;NF`!+W7N9#^2z+k4b^gaSe!q|q6cp}f
zW0Avg3oIuv=a3G2g;m`>J@6=hjy6{&up>al&lX(%D$cG|dd=r_f6`_01W58zjQK+C
z#2)|1AkUcufy;5r>{cKqUEUPKA3{Nzi{S-+Z$96>^z>+-g906l&;6cbJ>$9AuD9oq
zI>5-9&XkdHeEzfP##2%^$0O<%4T^VLg!jv^Ho)9zJ@wdhU?`72Isy`Uar@^0XIvge
zpb|DbT+UU1P*7jGVz+8rEY#ZmuO2d6tjGQ_n8DrC9^dc9Pub)Z-@z;uKHu$nxODa9tO?}x
zpnYST9+(#s?=;i#dd1iAux1N_<8Hq^me<+NjB-ao?^>jy*S
zCM}y;Bw7B$X#}3q11-}uov1c7$nTe?qA!g<Lm
z&1NjV6Op@HtI;m}=y1Iqkz_|lPhWI7#0IKqL^x`=(lR@STOO0OlrCvP9?(_u;}#2OY>rwZe~Vnt>P|S3zgi^Z2dCHQ@nj0
zVV37iFoDhG{lXPF!ot(@Za1mx(xckK8_e-uy}TQXHP9nztXnl!R6QYL-%Gk~5+!7Y
zYVE?GG*}a5%1D-jop;(f;Z%jyj%d$gK$gkozzzOo5pL&ll5OOJ2etji$*g$OR`5U_
z<$|*7>k@f9%{tB2<&`NbM=(bmZIG7gr^S4R<2!x}-KiuiYWN28d3LAJleT0=4o(=m
zLjBu@zG-ZYN?BLzt97hfsCaP7IoLWTp&=a7o~Ls=Lg&a|sW;A1URC2&+YeCuz_tau
zqsUcdxHI88`l=4t8=3j6{ES&DguC??+o3Oh%oPg(rpI&pu
z4NuMY!`B|d&6Vsb4bZD&u#+(004oBc&2`TDN*WH8+W^HwHiu`4*jb-_x*E1XeHKP8
zsce_)ugyOo6I?H6APUs0LV|JG0YzqPx?4DkVf}`N%5x0i=;+vDo_j&FfS%_wz8s30
z%$A@fGiteQMHfyG*Eb*6)dsMjG=FtUh7@Gr9V|0j@w6}79EqjX(ni=iH|T!={`*~L
z#`jQPWeBU6y{Wl>hGxc*()2_i-obnk5glJj|2U(D=caFYFXYRIsa>oQ&X+YTE_FcJ
z89_3FzxdMks)k&*VVry$`el2x2X+^R*Lpu@Yg`mb<}1s8LWRkiGE)_8fe8@H&;Kl%
z9X6EsLmuebXms-u!QV2^=Yo!pKLBi(#>ot7zjCym{Fz;EZ8irJ`;wpTjkKJ0$T~yt
z`(O!OUg42fm`6GbjFqrCnEJG7LkU-jjL2h?l97e7e#L}_h&KvNia<9O?|nz3K=i8n
zi>@4+sOXy>$lggKrbf_3H6d;4Wtp%b(~bC6C?{?3coN&5DDg#Je9VO}F|~20ckYZ&
zPw$Tr9XCEvM8IjfWmKP}TP8=d4+LZI-!)
z;1`tVRYGW*NnA`+uX~+r{QzED?}OlLPHD*k`vQ5Gh4upFK-~JJ;N0mrfys5EFGto0
zJM4qRcj=vr`NU;K@Z5YDf(5nQ`@DOxIJ1k*X`JEtcU|)ng?
zPpT_wW#*zr*YeO6dbrd?MyDGYOJR~kmZUbK?{FyRN)oX7t37f#k>$Cdo3N#NQ%-0d
zc#}t~Z;PWJvRjvr$SC3IC4DQ{)?;iZFWw!?9CI?XHRV%=$!b;~o=8HCw4^fS
z?9+vhvHea=$z?HdYg!*=%`xShK+x>GOZ|8C6u$n^*({GYpE&YwIS6PG&5khSduFuR
zS#SGmuh4&VPnYQ7-?}82%8H8GK#TFyz_a1awwFa$e&|{ZdVDHMH6Z~B@3s$bW#R5D
zytZq+#fdl0BXq5dCa3Rnt+`x3?$my_$?dtL&KrpfkttZ<=!Pv;Zd
zqU)W+Vp<8JPu-cr;oGqo#v-R!ZlkheusWrMc`SKEk)tQFuPzp234?4!Uy#8H*N;Tn
zx~diE(c_D#w*~7fJ*iBAk8-@rGE{;Vd|e677Dk^2ItJ&mn6_?s#9C?OMd>Ul_t$@)
z2Pyh2A1?ifm~0_l-og@!w!`|uL~fOC7k#63(nEA^TIBvUU$siL6xD@zmc@KFA(T~6
zjJd>e?er3BxCzxi8(LTl5Zc?aUjfImHa4td+0w+5YpS2_bqmZSkPkhR-I<5pOF
z@=#t6)d%c8i$rDI?;KHjQS5sNoeXq7({2)X3r+}UCY^=ZjQqYc$`s8lJcbV`ZrfQ&
zs7bn=gbez#6XB3lKpHDjhN2>cS-IxgsJr8gadBE2txCADRm`d0X6<)+t94xF@tzXzXN1x
zB0eqKVlBN0NZAXs7U{m%6;=au3W5)_AvYfn&WzrzbDjnooV6
zoLJniw;48pu4uQ>x*M3-Ff`v6#g55r_86x(FGRI`-12gf-~OzTJPh7W&=Kbuwh;Uv3c@A?3JrF
z(wVxWPL*M%Y%%(r7d&~4z*7a`jAm=8kD+0+UA-2{BzuLAy+|N=Iz-Ndo}x=f$jY3k
zR&GbP(vscx13l~EibY>bO};T(&4!pHS*2zFm&~?dXLztOBDDisGLd)^5ldmH82^Is
zN%y?$`5xM~zm(y9VQRCK;32H-*ec|@A(O?+SKI-aP4S=nH_Q_J=L?HyKOX7*FRpm
zW%l+s8z5(XumY1p*kPKc;cZ8v5lny6k^}3fBuGL}*hAXw_Qik>#QRf;l@>v;W64@w
zgU!&j;l+MeQ;{FQ?nrrWnS+QA)!VAA4_yo&?y7eYJ*uphUa}9TlbBww8RIPWRfk6g
zDvj6zLHUS3pSZkLt-D69kSMjU)T7K$=*NQ0j>^s1&#SFu4myU~&xbpD+(n`2qu$ql
zxLu)X@})V_KfI@KJnf;t4#zPDbDe$ik&NrRpOME>b}c=dyfd;EU@kdfQ)0)7`F+dJ
zz2J7hdgl`prGy)>Jy;@Y&s?ppvwLG*yh5Se*^-LvQl0CjY_Re1ozEC}bCd4*(5RiK
z^oH-n^~{i4XUa!Ui0kt)>7M@n^)}Ra5quG2HlIgEg^ulM(l7ejIn$&f20BxA
zjoK&+LX13g0AtJ1;?NJQL8JkCCxtYenqJIsE}z(NpAnj(utWeW?Rr*r
zQR55hnJ3N)iaMq_PhG1Obp%~3KVZ#C^KOY$4zeh&NG5~bpde0$5DvyY}wn*E~!
zq|#bnm(6Uu|7!F$JCyk=Ci0n_Ai6n(UzT>V;p8kG)i249(6|D7^E`p)%S+(6E>wXP
z^2>;kk{D^`rJ9@*``Buh9VD63=;Nvz@p4z}u`VU;c^q!{IS`+3M-AC$+a@2XrzM?Q
zCDeSL@2lin0eg6NRzH$bLI*6wfBmM}V92SkkOmx16M*ckEm3B=eyjB??njTiZ$
z05?4wyD{`p1NBV-x*zH8*GEOYJ2At>3r2>5rH-8m`Qj_iVwR1$)9aGZy;skoGoR`y
zb4*mAio(r@ayd!0h+^@;=K)^y?j?V;ylCDkSt*3G()QOmI@Bpvt4qI$bL)*|(t~+A
zWq6XiyU)!|TD{#XHo7_8*k4?@&OToQ^KUKzcGDZs)6r9@#S-75!y58o)6?TJ<4*gX
z5WmplSN%1x_z--Exi1<=c4-;g1VR#_^sHcR25O)FY65Lpvi9Jt^p7!lks|hrG5=TM
zxAXShj=H2&BQgo`9phiVWn>^5O%eu#5P2zd&cgcqw34O9W)Csyd^4lZpDeIKl%kH{
zFR5=3qBQ?aSmonYMrS^fqWC@zp=q9cWt8fYG||Q=l6kEG3}Vs{`Q3$#7uAWnzob_U
z1e&PnrYlccw+dP97K-4^rH;g@r`ItR=r
zR84m)zPY!J-_NM8d|!w&1uwu)GljI%2w5+YOb}(DWUlk_2FEfax4-L8!dtvXYr*+4{v?)GXFWpZx+7~5(D+^Rrjs9PF3;Ahb4@asM)M2?
z7o~$yM`uY-tw;(_(}Qd|KeE2!XHS)+;oyA-t@D{Hg1zu)!_~&Qq&aim@8v)){CS#1J^|
zFoIiESS^iLA1BXIc-R?;IiP+(+Y26_d%mW9Fn}s|n{|Jg3@e<&W)itV^22hi^Q8Ta
z^v@WqykDhebMFlY6MC*1uXvwr%_3^T*3G!!rJg&;i`m;gq{W*K2r&*ec
z_9`LfyOx_>gkEAcs~B!{8o|=1AZrHNKNqYcGzxfYcuR__l1xv
zOLcoG)x$j;Q;wPU1V(@d)lU*C;yqvX&XA`KQcS`1G;DSxTk+Q4?j8s=A!{rcf)kyMCK->AhHlb<6tmH`vG
z^@H$^47^F1TVL%d*6pJWR0EffebljP*W#Dr8D9cz-_b%xT6M2?=_;;
ziUF6?i|YH+zMJHy8$$ECgZH|6dM0y-2ng2?N1&l}Fkf%^5Fg?PRaMpLFk3Nk@R<}+
zE}Q>0+{g(sq4piu=LBPlD_yPIp$!EHfwu*^m&fR6LwU&%U*GRr&8LZO^M-wE>N9v~
zO>KW6DBVw7$H75Q2`&&u_@STmKp!Z&(?i5yqs?Mcuw&38A!7s_{ENlZM=Ahv8sAeSd9
z)}ila1GV0c)R9CqJ>Zyx7{XMZvZa1&*QHOjztS-xS-oX8l~LyS<=tA?GWceBm|STq
z;`!WXEE8PdH$RW&huysm+7X{hA8b1?0?89*ON)IPD+Jad7Pk6Nr^@9{&u6<~c+?_{
zjiOX9ESL}+fhGE2Wg%cf0fV{O?tmYNuxmSB0K;2o$%W>MhbOrv9437FHDFST&W!i$
z!~1xH88r|)xY~qAYI%*`nE|t@L6YRIXO8SVU@76K!*@oAC3(b>a?cQ`ax64Z@mO@e
zi~Z7c*}H&kpXZgzv+F8E?Vf#?pMPuqb_jB{ZW6S}Nx(IZ7}kbu^eMYpat9vyQB$-~
zH6Mj=0*lQUy}02Lr`&L0byS>zY&8N=RwUc6oZjb}21r@;$dKSD`u*Jh>b{S+PY}!4@G1F@Be1;uPUtLE#vo!Z6$XDVfDmc25}A`LZ>|Q&p<(r*_XL*Ux!H8XFmxU5|q_Zl#>
znK!sHo-{{YEfyELjqA0e3)z6vfEkQ+n}fK@i2l
z6wZzy*g9{l2pTX#V;HQYM)5lK&Cxx}uUCNk`MLGAcwvG-
z71WOno)d+Sl>PFBx+d?}gy~XFL^@>g%9E-X^&Dt_4iym2w}*jWWPYIz=x#KCleczX
z9E)MPk4Y1@RrFKhY4I4is7y%9T)R1G?X2QC5q={AJIw=LsWOyTuo%&`IwWkpPAHaD
zPNvr&B%hx{{gJ@!!+3!zcoBmA0Wgi8F&iaLU)33ETPv4d(sc7clWBoy+G;;KwjlB_
zZgHOSeN#!OKMm(eif4)R-s|<>;SQOD^@cWnJbi)M6a$#)N&3v3gN>GGy+ayma*Rr@
z928FnYmr~E{Ah`0!-NjVK2vSJw0EH3`D&Bn#U}1H6jhRy5~d95D>J*e%C?$gCb&DI
zdo>mvL;@XvQs2f6a*sJe%Cca=h$iNSa1rXsZb{ngNXCZUUrz+q*kG|mm$)VNO1(Sl
zn7F9Va~7BsPttmz&OJk3Osr;y^kFXX;r=q5Ux+*IQQm6fR$lc(sfY#)UKqof(I!Fzez(r_szd)v$z){XJmS%f`=-wba9*;?1wBy1kGqiWP<|)#q^{6p%)xM~`UT?nz~o
ze4Iy;lgpb5kNe!4l8Y{>(wvd?;+-&$;YbF5b;Qg|-BeRLmd4~yas#(_*3-A*R_WsB)G)({`~&Wc2d$mo6@jwexNEGf1ZB2v80vqXQz9U+{IhwW6X
zl_=&Uws0aYDoS#;qXL*P1dx#EmFZT55qofwAkafy#kqi1F6}*j%ea&v-pG@{9G5o1
zYg(LQO2hqt@`j<%tzkTu;{CD>EtlO!T<#SkFWzNsM@=E$-g}J^J6oxgzXrTQ
zqck#nETo~3%H%J&RXI&?tlb)7I;8mr&;7?#?o};rZQw(SMYa%dU4i
zXzx2%5350Bg6$($+3cfRaCdUGhRC?jma)w}feSJCh0q$qM;ii=6BFWA{Rh37rj&L_
zY*IU~L
z`o81C$!N3#e}U$+_-4p2#xf!2zk}U(aYSJ$XMYWvK;K;C4br0P#-DgI70a^6_WAXc
z9z^<)ThNX5(R$pWY{Vvi*IFNLQ;XE|@u#n%=U7&+*4BjM%ksDEM4?Zga+tf>n`XXi
zWye*L(9u-j$R-k1DzAaNSv=CgfOvsQBfcqa$oE!M>GYE%p8Wq*jM?tVo)OsM5*V9z$LON4Pg~P4&2wn-?Sm@9NDH(veg{*agyH(w(a&(U;8dV=UWJCly2f`z
zxrDK(g*p%UEZb8Kv8f~?I{HfVL-S!Dt!Nd{-^$nLN3bvq=(pgC_w3J{M|Ej&#`8Cn
z{^4DHEWaK1rJv#fa=)=H6t!IvhcgI27w|krTV}BG_pE>9E2UR0HC(
z?m~UXhjqRa+p?X1H=Hhx7mBgc*D;p<1b3Lg7SEJ>u_{LKS$5F4a@f;-Cs=a)?dU}w
z)jUfcs{e@d%;_V}WC6{)(=ev~I48OV&%4)~xKs0O1t9zn^GU%4jwZ;Cgh!dyryn}!
z?OE^S+Hs35+zDP{j~|0pE7F4gM_0U$)3eOeW4Rr6Me0C-61;Oy?O
z4s23X1M|anZ<6UttJiEJ=)Mc2KawWc-tOgg$y&9aYj}0IkN|qTX)&x2&LsS(I(@&Z
z-Gs;GO5*Z4@s#-byrFihwBg;M`U*)#)LfikXNoHNXRz+KN2FV*yNO%Cx3bDjXec@L
zx<8RQX03sUJ9ht)jby0!-$)QhO4b67t`nFh<=AJ^Jk
z{^=mI25db9GEck9nVEMK#FOc%y?LW@9t}8nHVTnAKH2I)*cGnST&>kyIyd+sHpPCs
z!XW^CXTrVgVEjI+H$VI)B_eG=dpOhR;XrQ>QNsC{P3$>%pe~QEqS5mQd6f#B-I;
zHeF>w7#bS-i~(}>21D>&FTaG~vOkoNlap&`G@Gk@wpc+N7h1UHzjshbuQ8A&^8qX5
zo3cb~PVs_wkrHuYQT(W^Zq2qn1QW6&^*){wxC8iwDQ=EMc6RrSty
z#U3Z)Za*-b)vy)LIAeo*PJF?63_9lf12MwpyeIMliLLiUqn-NzXyg&o^!c2e@n$dR
zEcML0WpcAnyL}pZFGXJ2NjvPb(PjCX1@6qLD49)=(Q@I{cLA}1ii^0E*Tb$*)<*;q
zc-*RvOEytv;B4h5t#()?Z3-aGJ1XgGohh-brMjR**^~3?oVQEcP|j5iLs^9z3TOhw
zgpwRI{h^Keg4#98%7Bbv?iCu}8WL84A+4S*wU
z0zc-gJmUZ=Y4%a)*pGykCNYc)W|Jd-9yiZxe7Us7?buH!DpGA%hflhH05WAr@g*`r
z1$^fEy6KM?NTkO7c4)AaA1`P9$y>jh!DTxP9yweawXrem=X1s={8-o$0WU1mYS`!1
zylh78GbIASdU90h%V7n-foHq6giHZ(A#z_
zo$TCTI)%WdFn02xkj?aZiLzFMImi;V9~qGVvVH6>fRD6uKRd^1Kdve2mGubRp-cDj
zGpf;s`Bz=9@PT_ISYc0w^Zv+0#+Gt=tllr=7}YaXukBURxC{zQV^cuHjJ?%D6`Eg2
z!CvlkZ2&euzVxM1%AA8zJ)eI>i5u1SH4R~f_9FBMjB6Au0M_|?Zr9E&T4Sa7$tz&~
zivY3Hyb&>0=IU&oy8GCrNBJ~TDu2NY1GYK8bc+~xHea1f5LbY3zCDbIh>H>%$k-lJ
z5_t-{86As+g$OFnd%wb{Wl(umGO3)L>R}5MVBHbrf-Gi!8sR_YZVuPNQcbxo9h3pXD6NgQvH65$O_Q
z&dMPdqS4;Ko^KIvzW@b(p(d7F%O%==$rbp~?8N8kEs0x}slx4XyV4}6!xU-sG&u6<
z9H=KYC8MEWE;tU^-7q>$EGfGE^d)nSeX4M4!G39@)FiKN2^iBNP(%{y)CFB#VQzcG
zzZ!Duy0Jo1n`y1dTwnICz(b~@>Md_?SC9R%#mbjd9H6hO4E&qUs7BU$RdJoxH7ERs
z%RE{usKMT`I471Lr3vCkY}Ru+=UB|QBG!VWmC5-njR6LDlg_`S=uTk
z|B#BE_1wHXg!dJy2j8@%ZM9vm{j6&&@2^sp$*fwlDd#R}!Hq~uxZ3>t@_Ki>(bq9Y
zmbQ#B3U3UZ8cIdSa4Zid1{ApSPTqqYum|hN8%E3^x07tN+FXDB(W~3AKT)sp?_8iX
zPAuj_@0q~#d5B1C>KAa+YS&E12vyp4*wGWtNNXb=%iCFr(c`?+&FCwL9{ZC01$)P;(k@wv
zOgq!T^eM-d%I`~GHtUW~4+HJLYe~ZG`1r{8{HZ0PW*t;$d@qWsNpo9^h3`0jZr9U}b2gMby`pU!76
z2m^rMXWZ-N`}DlLUaBc6rfjKezoTP|s>H6YriAYHNJ8sHvnL_zioij92mGxQ1TcYF
z^_GK&*urQHJ+_wWccjRm=hi?HzOB4g{1a9iBC4o}fzOrj$@1r$0s&tXGcJt@oJ6_g
zN^9YTmaFR+Fyi1^t-GW$uZN6xDtD1L7{nRmyohu(Jy^~@-!}J!ZmH(fx!I6!5p&W(
zVsNALsoL`lfxuuo=$Or7YX&>@!{{7~kdidE^1%TAF~XK*SIj1{!hib)IKz|}6W+Ki
z#z9r_UTbJK$OCC}mR5F6+)a*_JIz}feHvNTG@};m2#V(&v7Lj#3xzqsG7@>_57C-{F#J}!e!jeiKgB6bE!__Q
zUMs{Rn5f$wEe_mlB@>!Pk&2p4PZp-;YSq~vJ;P8dz9c=7i0Q>wj&
zpO`ppKnd=DL>1WNDf0cs53{jZ0NNr1EItPMw3
zuW&X&oA96C4^wMD{@2=?fywl*rFb0XSN2Z#>{w&c?(_cfkF=0hKmt)uEm`mLc@Oq-
z2aiKD7=|w2g75$AeWA~@z4*Uh5v9|;{+Axe1!DcHp<(01KEHvO|9q>$@BhAt*FXe8
z4$xzS%6yEu250uIWRP2l>g}!>h-rFsxMm7g9oW0R06`BBHb;xR(_|Hrk1xRgp@Y9Ce$IyuKtiO9@D0TmXP%Z&nV8kROJL9?Gz)qKI+xH(p
zAUuHa9*0rHFY+2XT@Gj0Sz7u;MC$oG;&R<9o~cLOz!CCx@LpAGoBP-ExFml5tG^W}
zuzh}%H!S;4&LzK}*1LS-J;%S*QT_cfObQE_w%HN{1f_lw>uE_^ct!GYS<1ly-QD1w
z{*s7MQ9EN7zB9~V0F%8_FpsexnjtSfu_AVDgRc+;Q}bpQaJ?@7bUT21eCv
zn&ST3$_uOa=x2f@;*z&}qpJE_n59=TCBhU@$y|rL5Yn7$
z(N$XD^@`ZFl%E<8%1dz&cz@Q3LPS?#&MTUWJ%8Rz9YAfY{FdC;UsRNoOp+8Ky5!v!q60_QAd
z{Zq8l-WNyqdU;n=ykz?h$nhddQpXP7i&N!MzK@{>(>09Cta8M92zr-Bf05CW?euoMA{oPzdfwoMxA>^
z{e>8~iIvcEPH0kT)!y*|ezLQU+LZ2L3Pi_svE|>OZvx{W$#*L@KVG6ANAF49Mg!R0RbKg@{WkA2ecw
z+Db|?W}7eUQ)N}Zco&r_-Pl8-+&NiPA=F~w6MphRYg^Rj1-y|re%*Ac#GNc(c@p(I
zM=_LqH%zzjR9$OKc9cZH#TKC`Xfz8qAB?L@l6ytmyZdpMNn<|eSAE?D_A!YB_-fn0
z{B;$UdDSaxFJI-e%PFX7+ULmQ`A0AGAEZ0IfnBG=>1(ys#I}2KMLH}A*-CPXvagJi
z7e!}UU|7H7zlZk>*V`?=dd4d@^_LfK$oy^{=2BlLhau;{26;Z>5!d{DE`MNBW3f9uiC%`nE0_CGN`+r=vN~{%VeQC&Hc2wO=OqHa0wr|VR;1)&+RQCvZmTd0?^9E0@c|Cr>ny(s>Efcg9oif#
zSi+*YPdnHzYtH!1Qh@AllFIyAqgO;WSEr
z&eHyKA8r96c>IEbGJrfNKHyp6HeoIUSJjntH8!Fs{j7%4fPSIJB@JAH8fwpgip-$f
z36lAUs1LRhq@+feZBrVp33FqjA;^0(!3pl}7Tn!}ySoQ>4+MAj;O_43^5#m_u3dHRKJUHL?x96U3)ANsqmTZ7
z{qJ91YZb8Tql%fCVfCXbo*qic1(qR_$2GG@QT5Q|hkg~}N*N9Pk-T-D7{{JkQl;!q
z>lPzi1{>0F2y}tzW{&oV^{`vZX6tl*Sg@_Lmu6T?(YTp^N=$ozC+cM3p#C9`-2&@k
z30=se&WBZbnR1Se{`>=}vZlQlHaK2Vze}T-9MRw6gPH3@e;sAOV;vNA1B=gQawIC4
zAa&B?NBE4MJ~3322rPN&3M*DD+Y17hcVZVtlW^too8nvNLuLaaA2u>Eob;V^mmccK
z0qd;$u
z?5{l&^peESLVQ=zXi(I*I?xx)V1et#m-L4wr2Afup{cT2iU2I
z6^>N+cp`e^tw?#M%D218DGwFLNi(ZWaC=us`PDZkClxxbBzEHZU6-zyi@6q499rok
zmkAEjYA4Gbju0e;#I40k>b_YgLd`Fb_o0UN7Wr0)Y`o{Ji&6Wzd9Ck+G+RO6jVA{S>{`wt7H7p^2QeH
z;%J{&&iwZ4rwu1BKj-`z3)9;(?CT5Hy6wdV309Ryf-L5vuPwg=e|Gj>_a}2Qs*{wF
zDvh?x>^JyK<(FY>;vODw&6XaaY7a^;{yaZ|x*fTPlIb8?i;<4>w|pj4Y7tys2Hh+q
z;1=zOYh?bNjW|7xnkW-Rkv=uy=8xy(H~i@IDv~=amuIv9pA&+W%7=r@m-3;+{iOWr
z!6dGoht6Pcbl+PTtWt@qqXRqJ3pTz9lR0;88tGOJtBtn|4hauD6>w;5UX{5R&!;w+
zpStn^{qRzMDHr&3)ph5+yRhO=^qd$uq--AbR#Ju^Oaxe#C{D{aG^A_Dfxh(TS5Rw9
zH8#rjyU2>1@8I#ki5X0ei9)}Cb}SI{SS<<vVZ|ykuiZ@)Cj%oSxn6(FAarq!Ja2}m
zZPL}r2B!PPSBtb~az{;M$bjJ6C+w8{u<$=uc+@yt=j|%(7d~i)l!q>w8+3IUE#ZdScz?bm)6}h&a
zQ<3q4P1Ot}yur{ZFGc?QN-XsmWrZ^zHhms7aCn2ssW3|!J5}~4uxU4G776t_5HqT5
z@Lg4dHd70Nmq2jJE0vCaZ{MI~8Rkq$Vte{oXTq0>=r|Y~OUBZrhTH{V$4X11SQB&$cihyz?!}C^zg0zlOFUSMs`vJ*7thPF#
z9zlETo=yB4Z+vX8KDk&Tp*W-L#a19?h(p?R(T2B7b16Zt(4I
zErTImY0s8+ioh?L&0WneVEuw5cM7tH}{
z*HvRxs1Wjj0%pBsxz9dq#5(m2LcSsGQkQ9KdjICa?|7~!Jf&?D2BYfOsZ0*EPGYC@
zkI1Rr27)UR5~k-w97P@BD}j8CMKcogg70Q(+-!7)^asa<;!X5EKb{MLvO7E+%&`cu
zpKKOk`s)Nrbq;{!$YB
zUWSnB7xwD+Y5-J|DJ`waL;KaRF*+Lijkq~o3F12%vT7qDPs?Z^nok)}{TAGOh(L=(
ztTmml(@hPg%XVS*4fdM3c2!hn-~rzZ+~p(SK*BS%=Hgx~T=AyC&iwvF(11eixynCo
zl+JLCE|1aq{8e7-yI^uObEF7L+?4UAn>mO}%+$}ia)7Bysoy&s?>Z{z@!}wB`jumT
zr0@Ejq+h(TNzY=I8CtP4{0Z$SjxJb=kr&=T3K1X=57?q2dT7r8|O`$FFlem!wbp$-ku?
zT+F_?+)t99sRIfDPFxZ(rcc%{H^Jd|dA@P)*yp*a;o-jrt#C~$`C&3w`IW129NcgY
zOXZqym+#$Q;Wxv@g-H-W;N0RcH=%t#rb0ek!aYU!35ykU08kkN=OYO2MhiBa9`R~hey?leCL%wq-III
zIY}kpAdipH;c-{2{=$yL9j%@2ja$_W>%F%ME3)sNp6*Ri86mMn?x9
z?95K|!!&H3Cc64MW?cNZz}I?JWWTA2EPInIN(s5(^!FnYt^N6g1Zig~#eEw1^Vg4y
z>NZ*$Z3ULl39oG{?n+H+z2(PgI4=}w>xa4X{PMIoa@~8DWmiq}@n3KQ!{%y312RUk
zFAvCH~y&O-^-Z;%33hC
zWIWcs_FVFS_h<8`DA(%3xw(J9YA+I=YS6StsusG+vw70ZTR~42GQZEVKRn_Nby61aDe%+Y&Z_j#cON^Knw7QjUCcTbt3-4&K
z4nXAdu;{ceyaM@X43leC25zCXU8$6~SP@e!=>{Q|>!aM_?*yTz#Y%23Dr}ag3cvzO
z!*|=Q7&vJ1Lt-!Wsj%9+-U>r_>3r)dR|i+0use)L*p~R>{HER7V1`RKv0jZz$1r#tQgQ!a3%T9o|>0`bz9(YL{uOiC{6b$l|kFwQ6S6e=q4LO_dC*oWDd$JVOw2e&u@2u@0>ThEICkf7pI1PI)kf@tPd?z#6A>Hp)
zHRjzf8uqkO9=au#0D~PCG7W2*fVj<7+v7l$`=mC6&H1JZi=v}
zqMa&(UjSz(ql&Xc`{q-=^=}B4rI~Z3C(g*t9DI8Vrn8d*uPYv(IGMq?Xj^i0?ygby
zvN_M+H&!@*MRoQB*QL#J)XRVSd!&(c-r9UJL>#Q`9=q92*|Q~#7LJXMsRn1iHXQrb
z<5{WnY=CT=zVZqq+;E!c#`<8`UaoK$cO{{(bTF~rN@6#U=zDJ@YhE#q1#wrDy0OZ=m+g`p?Z6c-I+s<&l{hSyS
z=BE-aY>hbQN&cOh)pa&qq3N8GmvAW@Q@3F51R`C%F1YIS%V*c~L>3c5HXVu2J9ck!
z{ia(gC^Rw?PJDh9d$G?m8+T%l)3A)!Xk
zX2)r2Fp&8vDegCbTMuQoKe$SztG!}^{!AJ%&I?v(vtY^@-wZyoJ6I#3-p8qfXudjU
z1hHUM5ZkmutuMi5*KG2!xdY`{Pzs5Rf+A?G9ZX9Hz0;L6>e7cAcHbR3METr@INAk~
zthn+t4H&QhrJ;W$VW-MUgDq%ALTBiwlr+>P*U9NK`f4=zrMcbwRa#T68Sa)HiQXpI
zI;7)pZ%+6cnY&jQl}b%%L#3V)^vV;tZGNaB?~TYcdWqqgPfXI|u89M)^#mw938%gz
z#C!>%G&EE?i*YRq8*~zYi2W6pf=dqTS)4%y
z2?2rD#*Lef_#hr$R&HODs??KGG$Gu`87gP7>YJJiGzM(2;qI(B%C{6mf*B#X&aj}LZd%P1mR$PHa6nggHw<(Ds%xQXtI+_7@k
z5BYCy=#L-ca`w%o#3Tr}9vEkDrBogb`B43u=+fH9Gj4#0m&1s)Q0E~wK3d?*{M$Tk
ztu_p=5RKlQTb>LFcGC|(Ure>u$=7SN7Ds#^@QltTfJRFPY~qc2q;zH$cWItMkPIS+
zuQ{|lz8SYF_pRq+T->^&CSGf0{CbGy)fhHumIdhDk|dtA-VDv54mP?9@PfrhMC|L&
zTia7ja@fZqdaI=yyVx*-gRY}k|C&Qq1-#I5_>_q>5lI!lRG38LH$H7ae(fI0tufgsJHda~|
zW%<3JdIWfA%LD!PXmgBdgRo5U3r+;x%}^UP0XY
z4RL;$e$UBtwXQB~*wnG{>bP-Jtqn7gE7@E33hE!}kfQL-jue(7qC2Upw^m$aXB{0;
zIb9G2-S18)w}x)-ZOuqtcCnV7i9(EL@+04>A*;TW>9(*^C3*0q4F})YBu1gDSte!2
zeSE~S$-t8mlBJgSomO*k;9QiW|5;7oaKgcFOic
zLcohg@Lp%)Ts5Yn20jXr*z75=^QM}pG?^fH99t?MQ(8`k!-3Vt9h=btuS-^29>J4x
zonbZ8**dD=uFu)^M%P^M7H0P?s7F75l4Osk}=l7-SJ3>D$Kn2)%RKA)oxC08NdFYat_Xf
z%6%GB;{m1VwVE1+$Ty@?;@=^-`5RY*Mdp#8h0McM;v#N4)1?d2p
z+=3docA1yw6>`vBan`vD9&0u$&>RXH=yFhL|MX!BOK6ro_~CkEF~alms9k+IweYN4
zk(+6oLeWozo1fY*s^9YZ32$gC2_JXC*Gd+$QIX+k+2HVyBb==Fll{-_1fzrLv8Hc-
zI53s2kXiBqc0I+QL+GJRVU1wDGc!-R3c+mD#?>fqjCd$3Z5%HAbYp#dH!5L=&y>PZ
z{#x8mvmu!+CPPo`9AW6`#_JQ4jLWZ(iNw1AlwAje8*yAA-Pusf+!MO>bglC2$55}u
zJA9V0Vm%7Ml-BQJqfYx0@ISAh^0;o_ljQfEIP5t&{e|XSA1w&$>l3H3+gF`O5f?W1
z?L0Ao{URpA=8Vo!Bx#sScK7maMS9;jnx>HlLFbg
zAbv{&fgAGK+UO+5#gQQ`|2(z~=bcxiFD^iy92JXs58B24)$FMMPCorwZ3D#|#)}K$
zgvZ%?65u}_YT7|br9FL+UT1Gz<||&^8zS-7E0t8hatcwR=GlI>hW^v=9qszwFV9zwKylK{q+|`A
z)CtvnB&Hk-$-{4$OU5b{^Z;VIjc4XRweap`dYbrO#!AZ;sGEiG+uh9k413
zEnD`SO|Y3#KmwUTsp-hsQ^DML>v^WV!7CL!kXWh!oZ0v<_NNr9ZKan7h4IRZ1|Tlt
z0l8|i9wieO9^GX{DoE)^MQalXyUz`%2&nn
zAw`{W0xxGg_hBl
z`DAsgZRHE>eDc{Y&^J7v|B}s0l~BMQF+6e-_TL;Bu(|cWHYXH4irMk}&0326HoW>5
zE7mED>zxLA>Hhj<_vxR?zo-fF{}3Yh&2#;2kH8iDPj@j1Ew7*e^`{*j`#+Cw&I>?0
z6@|klQV%Xg|A8d?e?77}2>AIY_40=%|F5m)1B;3%+3mhIT1O({vj5n#QZSu#;4%OA
z{Q+zgr2h_=aQ*Y_{wrc3iw(5g_;q+OOzH`6-Us;X+!{jjZ9|%MabJkyGg#hIM
z%X~nk|39QE=D+>RA~5a0>>AWpcXraSCf}WF3N)$LntPu0{dSD{^K$=J0F8g%U*i9U
z@kqmj;m+3U*9v-kI1qR?XOqD4S~!+uD{SA1!TtsY`is0Jc|$~tY;ns(8@UX0d1q}Sp)bsr?c!PbKk1sVY1egZDRtI
zCH*q;teu}DCjlZisFApxA+Mik-tjSj+`DpW*H7EW92ea^;oNtgsOc_of8(Sr`k;F`
zP9IXRC^Z?cU^m;$Hful0e!MKc8Lt#nV8%s}9_*;4qd+g6IxJS-t|^)y>ti4g`-lbL
zCBJRyY9v4|b%<{Na;YN&s(D4(y@G9lT7NxuMrt^_Nlrtp_`iWC`)2k0adwXn*jCz7
z6+4N7IJdFkYD%4%)7D0Ymj)|DMBpNr{r1M7V+8}F+UXv90W|`E#Q@Ng8px7v1ok%q
z)FKa@`QEF)QzsH9
zkUjbGYlQ&NhGyJ!u(hB|IEq;b1I-fdqQLMtZv7_%1Vkg4CMw*_mz=U7oR+dTRp9X-GsCe0*>9L>u#Wnes(;KYqO=@)r*}z=S
z``#yfHW^Jen5f_LQh0|C)Dj(OV>wGnZRHiq9<(mcQlDSLz_8?cuOrM2VRQKYn5=k`
z8eYwVQIwHK=DyyIP$>;21YnfD$Y-5}SwU}S<1lcrjZ_IuuqFUBn8^9d9;u}@m5}P
z)k(qCUAK8d?phOlqjR}B_A@}xOs(G!nhg^Y0XHDW&d?m1TzmR^gS(l{!uwyDW*o>c
zgFeF&Mf-2N*G5)0S6}XE14cRNW|w#cwsr(!k6tt5$0gqo0G!Q=?6`mAoyci5#<4PY
zH4}vA3~`zZrKFoZXp$^cVE0-HW1*=BEoK-TWHc*^o#RMKTz?xzy;b!%R3=?yOA|cU
z&l6p~JHMQ7F~sk^p9ekWmQSEJuTC%q?o7_EC%#d{Ts0#aK!(;|kLq_~Lgb|b5ijYmh!2DZq2erRCd#Z}%BVF1$
zsz@LLl;iZXA*XR?^*#g((|}DP`-ix-8>4Cmsw=IND>H8LH?(8Ez|w@-j1o?BG(R!<
zDZ;M`0<_5<&rsHP=0`iT-V%T=>7@HYW;;fhE>cW+B1W@jd^sD({HDZeY2lBXulV8Ia?)^XE~J=3rI_;Vk!0h5B3Q
z($keL_0=t}o28V*H>Z}`Njl`;MP7Bv`!B{RywoLZRl7-&?kSCGo5yg-e{PrK7kOHv
zR&*aPyk=E2)A(%>_DFiDaeT|vFjdmnK*64QtnA29YOx!IWBQSN-HIgYr4@(OEs=lc
zdgByu<|9C`n{_lRZ*ON%T#8eC;P&;rrVPI5ch=?H!$1J?C0I-0dyG_tQ>=EAg({#&
zYR)0E5_VH!IJY3|$tYV200g&x*lm%dxzYp63Fu3F-tzL3{LShHbB<=UjQN%xyyiv$
z@UiUPsB$XMXY$@e0B|J*=+z?1*)m15zlv)L-RCtm6!Y13H1W-hD^vS3zi{(sP;W+2
z`7~#KVcoNZ6>0e!0HTNi3$A^DLduOuL>7^wl~9o$3;|>te4MdA+;8V@t}Kw5W#W*M
z8dRk!>*@`Pq*r(wc_#DO*;nKrP6escTd)}jFz(N}8RksUB;D{*c6!PQ9bUmX*-8p&
zVD4%|L78gms-26BH%^6?Ij3dnqjB@OESY@xJ8-ga%vGGoYQ;G?_Ca^Ig9pB1&q<02qkowc8S2yqJtH<$sEJC0!
zrF=f$)8YS?`ghnw({b^hp5q2f$lu&Wa73PVbSN+M6$74&m#{zCbaRBOP2!o4c)S?`
zmqdDWXHkZqr4rU)Zyv+hmC$uzR^TD1pmvHu*HUxf{`BVV55LdK%C2}a+k+~j{zt_t
zZR%Pg4*-qP?SeM1lbl~KK2g$%_AZa*{UEFtwx9n&~2&aT%y;`k(fvnKo
zWqX?9^PQg<*Re$`K`t#wmGZ23zc
zM*NnkkTe!BbvyPeDHhpGUa&Ay9<5sq%(nfCI{M^^RL$=4@rLuGcUO2j
zxa`B~Uv&}xE=}qm5RSf7Ape&q6(%RdE_bl-c6bp0Vt_z}QhxA)zNhaxP()w@s~HJZ
zmqzx0SUFpar?VFJix)~v6F!5wC=h8GF>VIS$sSZP!;rjrdAj-wIY&xwZi?>Gf
z1`gWdGR=#AO8p87O?BnL;+>n2`qaW>@bPdi7j*J{$_(4o`tqNoD}A?gCT`Qnk+nXl
z1Q)6xZ;=RiJPNbQ_h*(RSd*`LuST+Kx#5`^&zF&K-r8YBcKmqp$O8(F_Vw<%U=8!Z
z4d#ZN7^K_xiQNrB)2OHkBQxFB&x3(I_buKA=!fLWz3!DN3oUp^%f|dI5Mn_!TIr$%
z@9xPu^hX&GEGX31@=MRgoa2~vdKXPcjHP`c^gcxp76sGZeZw(B!%d}i48PW$&)VFS
zP?RE*r}Vj#azJwYom>!Ckrr`lMbMZ$D<JlNr=lfN0Fb8v(I>^<))X6xHr|N1rqT
zeoxjPds`{;j&A2aea-J`xAe-v0X3(ovJIC^QkAVYl9*dhB+T%Ch
z^$NuA*KAvcf$0#w4BA91pJ8^fAPa3Y5=LN!HEf8Ks+{cOxfWk$a~99jUIC9M&GXRj
zI`2tEocaE$9VNMC^H-fPc8J%^ML#A9jM)kxzI(gK29I#!e$gX
z$atSjSwd2a5sefHPznvNI69~Ne7SGc^kkwuE*r^b(WU-Q?
z9R-^_SAGj2>;=E4DmOC_Vf5e`iw{E;XrC;%tpgT(nJ$iySwY1fDItR$D_f}F=de_X
zyb*+_GxrwIh{A2yXFG;7TM?dU8(C)+v?RV-^6LRbxR*&w$$~;YHDMx
z{p}eN(@GSH-BU(jxkqD-2za~gQW0@|y^(Z=ZD!A`D}G9zIXE5MAiG`C$}7Hd+r9xbmR&9hE{<
zI9L_8ea?tklYm@YcC^Qvn+dRN3W5*>gBpz>JIazD4=`Y`mPDT$r87DrYulqBW??x}
zpRxT4RH(j7i{#}?AmWs~#a?Dmz->i0E1J?dSqU2OOfP7CR*H$}qbYC8lorem@5bJ%
z$k%~?{Vfo2xJ6NKU6P~Y^M3lIlk$OphoO|%dINr#zP?_BY4q+poZr6jy?G46+yWFTJw^YKHq_vnZcuO<+2ZaG=`N|QkAq1<;-SDh(+_XXa*
zmuLE=Lk8|11b%*Zn=NsJMR(cZ%JhYTBm4Z@9qO<=>Vng;nZ&|;?Z1OW&cTRn|H>ye
zaU6y?g~kte!ax;v{?(=>ZI0E23*h)^(5(?B1?4!&{S;cIt;VbKxd3)#Cb>{xGBJ3D
zjN$TF;a$v7Uw~rdg(kP3pWnN(QIeZdH1THYI)OwV34x!*3ZFQ_G#GwzrT_*>00FKb
zmF+-T5NtO06s(Zn4a}%Ye7)rChi(}_&lpT#Mx{KT)nm`iV0q!LD6MGFRi15-x-?=2
zAOWNiLkGPGwebk5oZm11c*1@gH2Sbw{|Eq1Enf9{kQ8lJOV@qf!QcL9{);0v!iN26Ux;B_X
zY8y(JNxDfFd64ScLACPLs)%euHwrEu1gu*-tI63LJ4kZP!G~f0@4@7YU~h*>nj^Nd
zl@UV7ns?RkXOjH}hWMvxlZ$%w`%tAImq0Wsje%
zv(8_NgYR4__5zC}Utka&T2a^qep&A{+CAFYT8VJ=p1%j=Q$d&MmWR)Pncckc`MuR=
z`w7v9KL_igxR{$E!s$3NhQoMdVrDMWd*nmn6Zg99W#NBui9Jsb)OgJ|?@>77tB+?w
zlxH?W&jg#!?$H5QZFMlqJI(HBE(BA%)AJ>msgX}19yGAnba`e!U)YY6zTkY|j(j=B
zLBfStAsS^xWlvLVrMWyYFrfl1PerBG7Xj&hxmh0UQSYY^70y;`qJ~EM7I6p=rQBwM
z$;TceTr*pJr~c6jNc-+$E4R>5`w(F#bMZ2^?^uaVhFU+5yIjimBsIghcEE%
z&-Vye%4%M_0&aiC3#JY&oC$zQi)fPv{LH6wB(dJCUL+ud`+2{XSzytLd6tIB`orty
zMYX(G^4Uc3#Q*lv>(Ye!G1d)I&YujW#u75*B(2T2(AMdAnD9$;6tIBU^TlRe|HwsK
zLbx4DNqeZ&-R)E4&3~)7LV54@GkAhSDay$MZDrJqHBXON{V9gB!2@-D#Q&N9!x=ty
zxZHsOkcgA~KL3TB=WX`CMkTQwE67Y4{V9;V#pALf7nLGH^$YuALM%#Yt!=f_^kYcr^en)OjRPvCMI)
zF1To$i)li#vhU@zEN_4QMu~DzGu@sDx0~bz#8<4vTlSsu=|YJ19_zG;z(l#C!Dnks
zjVj>2Nu0Dwv1wq9l~<}=@PXi>2<}gQzn)NosFynSNf?vdW;#oj@o*?WLr;_&a_Ea7
z8e?_wZ9kVnR~jx3_%C{^4Go3G8+EjwJm|0#OnEOi#fd*wKi+WLCLZ+SB03bufu>?I
zX$cZ^O^}HFTR>=T{B`?f!zE&S1RU&csFBn!_Q$^cr8vP;8KH#+YtYzM66zM1Ur28Z
zNOQQl&I6omuDs=Y$_u_NZ>W5{?kmT1zoOA6^`(RgYyA{7xqk%|j~m=F_Um)KE6SQo
zB*LpcPI{cF&K7wUoIXMf9I16xDS5f!JQ7tMA7x`i?w;Y9pXJb7lg7FPtNOTy
zZcmIagM6!)sxdu`Wsh9b;^RRI3IFjx-m0(1NieCN2sD@=T)QNUD)5SNZYUsvecgUBqAs-DmeBKA>0?Q$z
zc>QP`ba9UK(Ze&V5Emp~^0w_CQ*ce-Et>hU|Fq7g28Gq_W;=rMGPaV{Kih;7ECaE^sh_h2Y#%bZ8
zjqwnDBanb5*F<&s-IYfOu!gx3Ba5>6`TINwgS8v(p}kdwr>0h#=AIWvb?K*`dTKG*1m=pqPbIVnZb
z%__-CG>Q7@V*nJ-5yrJ92FResW?m%EV5{OXU&1#*mE5I|pE@o2v##CMK^)F%DkBF}
z0Mo%~5?29x{Jwzjo|PF3B7SGAX$@fv*4-t?roM#U*OOPeNKNUI_<)9>kRCU@@KrkSY$qLXCmc5>
zM4P?>=IShGA`-;Vs|U?|qcJHh`^XvNDNfp*{u1r9V6Z>mfzcB?TI%DbuvDzQ?b4b|
z$u4W2qz~7eSsAIoYdc0TyC-Jl1zwl&e)NBPuoT+Q|5f&Bl2O}=X=c2;Ncv?#q?f|T
z2B_=fYlX3O-WFcdiB>!YvQ}z}$TBkT4~>%~jbxoQq3w1t4}a0m{<~|>(cr%|u(U4Z|GpqtZL4!>a2=|xi0
zwF*$#Meon8sVj}W@!w=1h|_THnp&YhByVmq11%4C-*#ud#7IufcyIZJYbe;^z`RH`
zv=fPvN!rCQ9RRmK=Xy)*)ECm%1*yJc40OyD^RgKh~^`U5omli3bgA;I6HeBov6hjGm)Q2=(R^?x2
z7Qy+H?26YE;}tf%>roKsZQfSDv^OsPjr_U8?%pm$=Vh7`pd|d6>ai)(5!8S9GBrj^i*eA{VR=p3Gg
z^b&$zErQ)x1^YzAjOQ@KZtOtTOTmhtG77laY^hcR0i(~tfq}0biv6hDDgExKkw6j)
zh_I}|2Q`&}Pr;LrE&jK+#_J4+;ZqrGK)Qnk+82?D0^LS(Mk!u%1xbEbqXv~wQWoP*
z?8N6j8tUTDTU{0?cCFC`k}pYic}WG`y0HM#*Xn7ev{8Ua5qp}T!qA+2w_94LksBnt
z2TdI;E}3VbhF5{k^Z(O>e5mOPHa*-OG7osLRlXWW6_b!Us^(`~?c0D#!<^B4pQ
zSf8mv1nEk)eL8e0i&C;tqw8-6qC&A;QTLWQDG6&h6a?gBA&}(-d!ab&QEvGKU|E%+
zGs+mh{eH1XC=o}F;i8V#6v+#pPXE&4F}Y1PfG|`C_-#V=NI=LWZ(kQ3I36b14lFlv
zuGcOtWzifn(IRq~|0=THJ3j9Q1FI1d75FS~s(o7!2AYZir5Xvj5bHnGYftC@*LrQ{
zj=$=)|FOBKNe1W3-!yMPGlqw$A@hS^e*{%-=H_aD#v=_@9=%
z=P-W|)%^33fq{XBe{~8R
z{fF~_EH=nbV677n+01)@zYsp8=>o6I9~b)j&l+UlG|kylQIU~91K(AsXc2%EzQ6s*
z|Bji#AK+15`+PjW4B0;4?ZBi=K|m_C;+mOgU$vdQN&$3+ar31Hv~NJ4HyEJ0-aPR?
zkre+#d;WHoIr0Cz&;JW_#DCaD)NkVn;66buOa&h;H>X^T^6<=-pkvnEA8E@$7Q=Xu
z%>wggg2lg$5JXKn|Mkhp_x5&wxZ5bx<({ohFyDe9=dmOI-KuGVFp8ZEADFv{VZX=2rPkBNEZj%)
z6%QZKGZfBz93yzHwd84~nr|Oneu8_ctdAT;!h=VQQKk9!Q!Z2y%t){uguzy+-|+O&
z05)V7qH=ts`{`B@l{Gr(qaj-if*~e-|0Y;FJlY(oih-0?kJh-%`RZ{Ne8Y~}N^uVt
zNT6}r@8@!vd;7ThInQ!&sP};f)TfWQ(2slhg>8bxq81{0{uOWnIizkK8R)bhjQHub
zUNyGBdk!0dI_yv)IQ$@tBiU?jicE4`8u6{v!{Pv9RkFnqppffa)l2lZ@*O?NK18=@
z0=PjD?-AcRz3dFpJyI5diIBzro*?~?%IPn{jj7!zBC^iObw`4Uj!RhAvyfzew*zB_
zN{}X6FRP2W1A4VBrnsV_{W(gJX|%UIN!9=(s2xh_;#upv!v(-xL=_EmyVAY}FrO_p
zVUV+@_N|KAI-e)lCOt@BZLHl5d1}K$HV8o{{PD?Q-+HuGvaSH#2C~GJ`pJ0!`;f#E
z)Ypg+kd=j2QS%BWROGTk#VQ1>6@cYhEZ=-Z5VdnTb=?OrK`DdC*jRxcIYFz6L$vUvgHjyg`+!i
z$T;Fqf@~_{IOx}^6nl%8yv%p5-g?DAC%u!s!bLlUTLxoztQ0}bkC(30dc{n&PayLK
zH+?ig1V^y&t6My3BF{Z)peI48gZS{-@bXv}92n5Iz0fgL7S!m9=N*%cc;{h1*@IV#
z2$CnAzXVVKx3@)ETc*1_OMtM_iYAx|4)n4U`9erRk*1OY4x7kSU}4rHH?G%V}g||1!Ik>5sv95HFf6BKfpN^T-MF+;XJaJ$aHZPsb5g
z)4tYgvH-oj0zX$JL!1if2m;Tm5Xun4)y?>o0BpJsu%dNu5@^24K
z@IR`aHEnHEmM|&rmAOti+VjvU{}SLlN5Aimq$pRR={6FNy6`yM#>*MCp>K@M^
zozc24gdg8|7s+WrA;l3
zJZ*NKNuEP_=p2v-&yc427j|Evj6fj9$*QY^bLB@>GsJYwpBa=ixPIJ7@b6)nO_=o<
zdOk72OperbXEs7XBK;;KdIl!nW+CkMH>JONG
zg(6x0;*l?|(jA;J<=OfMusOYf5NW!@Qe0dpUw%+NONZw&M0I<%-wQ^MdPX<(&@8bZ0Hvkgd*;ZuGngcRn#LQB4-Lu_yRmgOo#mhNQEA
zcD>vojpuCo{;4RV=J5jO`No_Q+cUcVd|m?oiy~m*ID_pO`dk%Qc`<_=<3SejRHsz&
zGxP84>CyW`63bu8=w$~36%QvH$+S~pYlG^Q3306&LxcEn($3_;iN^^SF`n2G8Rd{$
zYEI^7)sN4egacFr`v+W@AKl6o^N-}Yx(K7-_r|0=Wn_L&HAmWye^j@
z0Vss3ug+HvsDk1*aCgtA*jS4qtR*$`4NRZN>~4Iu#_*XnBR4}ofpTmHGFo(~ABjJBcn*Pe>hKCudEH!`B?^z+Ja8F}ura0^hM!#A
z4QS_IvVKzwj5O1PQwC-FR%8$b-DawYj*dhHc@QR
z@fo>F&AaD?VlrxX-RGO8dEI%pV=L#3ILMsvYx7vN4auU5v+Cq5L3ZfHJtMD{S+5!h
zcmmb=mXD9$J(gp0WE{A*!oLa&3v+UE`Uo^KR^Udde@rE%;|g+Z87!naX=Qa8Dt{!4IF-FFo^aJ!YK;GiUtzA&2Q;dS
zFB~S?cmwygV;sVCrZ!GVk7h}tXPD?{qi2-N5pw
z+2K*2)L~<~^Bi|fmw)dwJai6D$NlDJV5|FpQAH22?$ZI|P@bWdJ@@CwmA~NQTy4y%%5fe{zmubu{C-IPp
zO+NKU7)oD8B|+;=b@_y6B~a0d>MLpQ%!grof6}|FbfKs0ilPvy?j$;
zc!{G)fwL*o9$WXPu==PzV;ymvUwp$}xFruaT$8Njd2VofDHoM)?B1)VB82&ZD2C@<
z4%#pWcq?ARTUP^&@@6U$*U#@&!RFNtU+mlI##$sdi0n$pz?Nm;8ohuN^
zuVqHnnIH^G)z$5?hT~u^`H=s3(841RK(4?SGzQXR7JAf`IO(8AjL5os5cQ)usT2=7
zKbVBP*=YXw)`=!rKyCFg0(NMGPWJ@vfl60VSQ4@1d0ycmk5lgAo|3g135OsmYLRVNoUX_{67uN?g_p^7L0NjdvcfZGx0B;9$vR8L7DO~q(*d8I%_m!&-
zK+*}(B5a
zeH&jJ6??|JCudf=yIXO#=6Ze3IAIym(<}3)wk1*Gv5AIy6o41E-oylmW=ib=;d$Tn5p9;e5yF|)6keGcURZVkfbILpZ$KU
zcCv-v@u=)tSITqGPAiaGeevrnggZP}cD72iiBw>$+o|YH%Y~Ip#*U=IJ5{~A(M4jy
zM^`iF7le!f?-lXx#$loM4WvpNF)SvdFdy2m*zX7X
zR4(vi!_qp7$l@zcTLe(>l?A6QJ5KY0sycLC(0zVNOLKY&9w=4r0{`b(9XfQ$Dt@f(
zYKnWXY({7!6Td;Y!xvwwB{;;?_KSsree=ir2x|e_KK%*C@TY#J~7+3Pc
zM~;)6vc#W=(9yTIq@b?on#x%Gqwl31^2i_98@UXVSmhQVR_fz^G&UAVOg)~d)Ot+%
z8gDaHO|yr0TE<5u)kGF5G09wyQz^IP-3a~pnr2I{d*IEh5H$Gg+%DAHqm0Q9>+EnG
zxqQt$lzRBu5s|3Xty8rkthaM4y!ptHD1vHm!un)UUS;+DO`==ClcUqlEtu{)(HS&l
zNt`4uu*?O%0L%Ue0a||G(~|dned$dlQ_cV||`MN^B6!a*7*NkzTkj&OgJ}~`cc!J@T
zLwzKU>Ux_mc>`v+_zC%I*1m{_(nAg=O8)(t+ta%btXE0fv?Rh
zXNC^AHpo%#@`jAnK2Jz-eea7fTU?<4dIL^r1yTFn;S;0Lu~)Yqy>~aQog!L7;BE$W
ztka58TNzaRgD{OH`&V|9h_dE~2M4!!cahPCZB)TnpL<4Z15>>>B5zi=UU#$NXOG-O
zYY;~9h
zuTT`&Mm6#KxZ*vWc_o7PKrze{s@H*DnA34rg3JroiW9
z>KZPil*V6PK@E;+n&G;sC9ZU|g!E_)Ms!IXnpa{t=vWV2NSLAdXY1=7=?g_fj&Se2
z5l@nlJH#;`7;;meZ>N1_ywRvyJP);v@ccobXb6ah0!iU=6}5F)S;FZZ*g5!3Gtiu#
zPO)LEypzLSf8k6t&UniHa-X;Tp0~({@D^v&?*HNKt)r@Hzr|l#q!9$kO(W9X8%2-?
z1*Ch^-JK#J(x7xocS*yhOG>)CBsbl07y7>EeBTq}o;%L{{nj6AxQB~X&wS>b&zzq*
zSBx-eb$7y=kj_XQv97i%FvHmRHF(D6UJo#4qv;Z8qqHTKv>RMo_>ejnY
zvcB0X{W;Pn0#3a}cyZS7?n;7JK8@?_$IIM%yFvHmULsw(TU-mt9sAhM(cn=~V)3p<
zoY;wz@wd@xeg1W>ebH`#3!Q0+GIYHUb(0i0n+~4MCM%@WER?0>-6P|f77<>bez>Q7
z+kAq0_+e&5E4DiT?+LlW#%y&68s({*R>84k0zuna5=2V6kLG+_$l2?Ffh`%wA!4dO
zpe}ES^Y&U+_qmI%EH7<^c_#Hot|IcjSVxs+qeiVMk<2&YTs(e)X9chKRiAkbwnn^z
zcJZ{>$sal5F4<-0zepR$&Z;=tB6V&&27^Eroi;1@R6b*~V&*N}@Fxk>I|Jni_6y%z
zFZc*RCEZe^qU{~5j>oY4`n`DFw92}6h-_Qt|x&B}JJ
zQ=DCB6H0pY3su_SSuCYXm)Elg`5i+%t>vCPmMql@dkYhya_U$^P<6!B)!K7#xpzf4AfB)xL_+9dt+DU0aa0b!Z!6SIk!>@WMU>nOoV
zoR>soZxo#w#zBP94VDCcc-cSO4oPIZ&14IIADu&2!1LWVBQte|-~
zL&)rlLwzjaCg&3MURPD}I`P(;eb$9vM{cTU=9jRm2<%}IRPK;XB&)c=1S*1VmfQm*
zq=m}Zc^{T;_q@dI*Xv*QS&h!y^}A?U`T@P({Emz=s|p?U7-cp|b81pX()y*K=9|Gl
z1WTRP4&g7IIlFR(#k!ppi!}DI@8C9L)$m{Ja
zqR3Y*u20WGR`T}IKR-+vVAAx8%6Eq`=VM1htsgnP3NBJ>{AS0pQBf9B
z0aI+3fS|+I5M5n?aGO&d=_?^<+{;airtYB4bLZNn03TC?tmNP9nw_BYw
zi>d~Psd0?{!E9SO1j<#fdet)y&qF>N!c(`q*h_g@>o;)U$#Q1K$-;%d+Zw~sHfv%v
zDu|QFHbUUpDWQbq&+Fl;C)2W*p2ZupQgn58YB#f=p%+kpu&#L+5bAA?OUpmw(F3;}
zRCk;c%SIP&wtk>nUE#hZa8w9%<0-iutc~cPu;Hr@a*|P$^5IA-9xA2wvLhba$JX1%
zw^^kc+x7Z9KKsIynEYklYE?VJ?UF5cCzYvXgNwxd((QbauGa&9Y|#Ttl1nN!zC5-5$XLMlYc;)1XWyycIlqI_CR*k=eb=fI4a{e
zd~&%?f-n9}8)UO;p09VCTi%~aKcFOt1vAPwimp(mS6NKWt_Z$g+8pcrDxv9S%*(pDVFb3FD!#$U_##y9
z*2Pp%9X9rAsY!^hGOmG#J~07x>}HSRvlB?RBAgG)`bI$#ww~e%!3yjUxdV`iWs*&X;!~+$Y)W@_CR`sY+Xx7zb~O
zLDXs$;T=y*(U*Kz(q0oMXZDox+!haY0-LtV@(;xk+0pf$WV81Mnb2llzXvtj6Ups0
zs4AO07JmMIPxiCv<`vOi7v+tfxu8C84X1&jV2;IX9d-WZGZ*aIJ;FUCPi)W9K(lha
ztGLuv!zINF$u;|veFFm!>$g_7&y~g`ZXqkvwy4k2MBV3*#UXI4#>_ImU6r8l%v6Y&1yix#h>1=EyIU3h4)z;uTKKT~Z*1_s=ox+;_j%h9B*HBwu
zDuRY*3KpF2#YrSJOt6)v#}!Y^Dc@sum1E));3MeO=TLXpQ$qaGuX{h8!xtGYa7S4A
za57C=Qp4`lV&f1pwhTMYS1)q-$rgjq%bh%ejm>2odgrFyKC*lec^tAiTlm!IKG}^`
zquxiwcSD2GErQ87$-yY4dCx%8%qk-PI1#(4on#!&_~tg*L*RuDx03Q$bL1$X49?TH
zFXiwB;xKHiTB1)K2%2}sjJb>(h2c~*B&8x8hdaGeo1H;>OZk(`Q;F^i+Eez9*rhkw
zdp7&W(-mWh+>DnbVE)tr
zEaAMD-R3-3_hk+g2rE|dJJ0g8LU2WTt>H0Y6G858f~Ix9zDn9EqqD0+dfg
z+R4@c2mc@>zoiucEyB6#QNLE}=uTV|-@r$STm3UDO}UwNg>qPO>+|0pFpJ;hs~<6Y
zMS~Wbgp4zXO*4E5jg9*I?R!Nt3xod6l|f&$5}YLjWq5r$A*?=~y8*gH4Mx67fZSBz
zN&ZNrnTqaMOUG%;4Zs6iaco3Bk8K`>1LGFmwkwKW;bp}~cKAR!KzMqy(PFNj5_(75
z_DXRLjF0A%%D-RXlpE8N*c@&_izFzdW_673kd`FL88}7R6r7nz$VDYDgg6!*Fhpz?;^Q|OkKu~>qjIxy1Vt4_LH-P%60he^bTDR?6P^b
zX=qbS+$fQ1L;TS3WbrN6R~wVct5#*jxU8r%pr3QB^!BPC-%U2Fl24^?emmJ}kOJ);
z7T)*i9v`YOry(1oomk5WW(DeNS1xuXjGg9CH~CjRkrQhpS
zlvVN%YroET+}Fqdh5JLU2V*ha?w5T4UJb`b-vy^IGI3H&~$I|k$
z&%_Wfke(d&4d6H3b38U)eX==3FXaCX+qv>A4j1XlW7zgv>84D7TPRMQU^*1nGk~1)Dc*`ZmS>d
zv^%#LQpbA8D-rqKn|B>%hy(b8U#+_*g9cYMVHQ63JhzRrxb@Hdn&*O@V8+pHh=+%!
z`$8zEP(QJ`82SB$6dL{^=d*UC5tE=L*eG{(^+H^vrs7oNxff6OS<%_r(}vWPzHjVq
zuNW;{d>&h1Gf>MP?-Ov)S<-+X&RLTF3SGcw5&gR_lT
zjCtnOFcT3aPtCG@eB*@nneMO}@CHshGwV{Xl+GmZyk(Plf@l*g)Y+yRW~tz7a?6=0
zF=AZsg0S(Gv{a<}&R5jVY}jSl=EhysNyvYE2o>)@x@rwZ`+QF5PzLs$jVm#3#cY~*
zF5vV2@4RbjK;_CXGMo@EZY%*r_9$ydefh@G)qvnT~jksycG9G
za3pNEZ4fkqTHTGo-_r}$g!OPYrd2H9^7l85InkHK0wX->)^qA
zU>`AfyIAs>*WUDVZA3lfmr};BfKqtSm3L9xS&%1rvwjgX;xW78c@A;J_M_T{T4z`)z2FDI<84Hq_7qR8N)S}D^lrLFCfK$6
zRd(m!tg$(nv0~H&AhY~#$bwu_a=3tN8;-gv1ZFdAeZq|
zT^G1x$IDef==55W)eq9@CiTZanzLH;i);lh*hZow^KWD>d9XoV9j!?9=01&O<@l
z6+QOOYev3d_NMErk|kW<#jNLTT^z>
zW)k$g?`8TAi9Z3v-+Ox<#R0sFyHAW^0}sFLqo5)y(uxCU&w5*cxWb=V@_T=>Esc$>
zB8R33C-o5LdU6H)jXF2OCq`kRP;Dy2@COeY6Hf|xTv^rP2lwAE4Y#f_zT{-1ZigNh
zxQQ;7-w*MUeHGg0Fsabym|asYWBAhk&z$jSDAz_Qi=kJr->aGgs@}xJ3~@$phJbkx
z^Y?79=g%`2YEP3NJ6g^kCAAWeQPNDZ`+a+Sht4i0jpw!{AmQk=@KgGO0^Baj`CtD`
zPX$DvPy<0zKs3p3aez0M=y*qnOXTjj|F8QvzZQPaEVD`Auzzk@x(~wrAGRp(cNdcX
zJP!DECB6)d%zxOn2!CJup9lY+I4O$X*_ktBG@bj)*{S_MNAclZ^-wHrY#3V-e-9ji
z>g@k-o+;{8+rU8B!^Mu%hZhz~7$#Xb0qgmDK>mG)@nmmr4}*lqb(Nw$5R+tVxxml#
zzaz5sLQ;IMOi`O;v{dDY?ajKP!UU!?otpB$34
zb#;g1s=7o;kHSi!SJLoVeW8G~{)f$9%){e49cvRf7u>-L|CI6V>e;#y1}zmd{=lB1*ph(9R`&|m2T|fe-zQf{ERw25w~Ex_s&by
zy+5VasU+Kkg_-Ouijjhf(~Ip1()Bd#`$CoUji$Of05t8ki8JP
z{rdxo2c_%$d%f%SobQg|+{YDXmQb#XB??~VK_S#xj>T?Ncv|BVExs|!!J}ORb428I
zH-`}t{-rAFZoGzPe*Ewcy68?|1=K3A*~8!2ZJ?`=3uU26a+ubViFh|QH8ssou^>+M
z#?aTD%~*sDaKOnfo~M@Q2YHS97+5~xR}g37p)U_nUsJK6`7z~1^jw-V=0u?1JtHx!
z=9x#cF^r`vxe?uXQ!&MfpF9OR-dGj=)n@Ej(%}g=t`Kvg2Q2<`T5kDdMs{j@woO#c
zCS3K5yT%Nh&>^{S?9#o{BfFZ9xNLK^Z{x10I~z}D$gxO^GwzC-21a?|MltP};_(90
z1l8=Lds#SHwlrE&3e}uEe9P4bUmc48gDLlKLt3-7i2c5B<-yAI(Bl6wQqxmKfMY@N
z?~cX$OW;i#vK==5AY2^Q#vYu?a}^9a334c_&0CmO{yMZBG@@2x1q!lAIyl=?n@E~)
zCyC{)P_N@l*#nJ}5F#g*-B5;o=hmE7eJ>GGbG$QQ>uLiwsyd4z58OK<+Y6)jZOF}-
zkBeGp%Fg<0NY`c+$6epFK4|bLnWCkOTxISD2MB|&z>^C#|7f@+`-jBQsp5Jj7&H2Z
zgp!huUaMhlI1pCUy(d#>g7|}&^^~@iJ$^kh<^P`YRRNY+GLW<+NLZm~%my8-ORFX@
z$Ff|Wj{A{kacpe6|
z#w5#4bK4bZ5czMDY75OHQ&MqoZQZa^ah&{`V_l40RK1OvwCw)djPLV=t?D8+PZrBP
zOoX?9qW}*$o$>5RnZ3dYT(i$T4akRM2I2>d`!{gfRtYNi{83gvIKx<~BV-s6h|3z|
zMO`DkrZ1SQ_aRAE_)WF(Q)V$_J3J7qMA?(M^`_EAspBXr`%#0(usI%1%H?tzE2g%0
z%AIap)!(!Ty7`+_B=L_^d)nQ9@J(b?!S}_k_a|*N%JP->)~UH8g(a0YHTh-L+KOUa
zSM=SHr3VEzuSfCoCLZ9do)H3HQ4e>nja;3b@pUqxJ2pO>%W3?)paaKeq_3&L1l$Dx
zpo6O)FMaviNWX!*{+%^ul^{mbFtesJF#^AWlD~TPP9c>)V!c7|Eb~)m_*vHE9&ymX}>Y&FzZZuRcxDtf#A8Do82Z`Vqr*+mm&G^GNty;RWqCN({?S#MK`dv
z9yTvc47W(Yj-e0@u9+9pmyN=%T--)vGS<9E5oM~hXz}&F{SvEeb-cfm!~(Io0TI~b
zD0EMF7jcBz4(n^q34S97fbbFh(#T_ErSOm=&PR`tl9#9BKYgVFu1l#H(g^jx{wbopcA5pQ58r#V9g5gcSZX+QTqxzyayaulkC?NRXSbAn4ERfbxsXUF!ZIj`
zfGdI||25K16Q6+<=xKxse8~4|b#++Qs*{CdKUMZd=6x4!GMCcn%22Sbs$?;>r!a-1oE!ivcmJ0UNNFtzVkac!-Z#Q&T
z4U}k6j4ynSlE#N^W0iieoD?km!%$Ux(&F;_^m8{e{_@eyDAm12xSwx3j(U1KL{cSs
zaY=X1y9?_Gb{gC~e|e_l0s-3w(NFb0oZ?SUFPw`&*U()~@|+12G!T}_5m{d^bMZHV
zZm5g358AuQ&KQfi9P2AEG=nqqjR6
zkvVPlFlmQo%B8r_{5E~GSuk~uXMjt@v6k}xih^D7y1pp%`?!f#PF|2xU!T~fo3!gi
zevn*AVQE1yv{}rnR2H_==yE87_$Z7}Pijrns#FE)nkK
zVtGF^dy_FPx}n|2u>DTIH=}gbBj?k)h^DMQbIP?PvlAG)H77JvE9&bdIFpY0sE!9SpBw$;O9G-|!dZw4C
zGU!Fxvno1aRgSzwyb+EVp%n9WOljd&HT^R`?TVxsytvxTvavu=$*wfIs5@zf3G-{a
zt;C!?G*M%dgCp<4QFhi&b7n)cJcurjhJfnR!#qfTp!!+hRBnTghmU}EsgBG->9@T!
zKLGAou*j}&+~HI9DU12idv+_Im?kr%@O|EIlXL1k(@}?|N>N)|?v$4USqI$&3U|*{x)Ks{5Em+Z)c#v8jSrit2dSgtKpxzhnjXAY&
zA&g9qflkIqu`}&87KTcF7@XUCvnTolCPQ-w1(F#zE;IrPo!-IRRa@Q756;BotHnw~
zwzn6OhCPMZpRcDA1k)mMYADO)a=B9OCJ|(Gk4B)PJtBKqwnlEZ%#wiUi;Krg&
z;(QM8X}nqC;l)Mw8lEJLB(~!C>*WR91pP!sAHT1IF`-3T83MT9UH>;1Rr+$(h|1kd
zTGe#5gNMAYy7dD8j*~Z-ZS!Xs0EU^7ND*Ow0e4P(4#y$^K}}fS0UgtSl@tiVk?SD6
zp~c~b7H-8TUazV7)pS3Z@`4;-rwlP|h6vJEmmA9<;)q`PG|7%&k{LOXHps?L<>S>2P
zK^3(*FRHR=AKf>&IH!(7=WQ0E2qdZ!h?tfh`l_l=(;{ScM}v_&aY@E*2hNYM^}UOP
z=O#e^hOq_y5QD?psARVQ<3ccnb^NlJDjtO<%82Z8la3XR$Qgb?09W>SL70c}Fm^y>
zT_cmu9E;uLOBxNc?}am8pcR+jm{>o-y%Vm!*>TuqJ|jhf;>o77zT_T%-!i2~SBH^T
z$0qRX*fhN$F+A~oj(W%^5^Oo6=`7(JeB=ptmaMYIKZ}nZUv8dg4?Efl^+dV5&1WMh7YtxdL)gz&XaZ+Dm2zLz0o6eP5i
z<+ma{+5_Z5?2|^kSUTmD!KV0_@MBl_flk`m@NS?jzg3_f=1U7)w^A-deI~h(3e_>Y
z^*|Z|uZ5O{BU^
zjW^#g*`g|-HRq+gcQo^aGFWwuZ)+R+M8WEZNQf;{}rW$=fzU;~Ew2OISx#
zwxqbKx7`qq3aXlL%FBUxPc=mH!=kVm%upJ?#p4?83dN!3oDj@y
z)%NWxBc@2?uQnMHO1%<}D4e$JB8ATBtmj>F8lWltR$a(3
zB7T6@yg>eK>-BN|3+aW0u0V;{N+JH(1GD9i{XM(1VlDs_um>`wDmmwMc7`PWx_GIo
z+<^MU4HwtBgt?i|D2a>^FD6FHd+C*m4NH1OZ(P~6O4*V`xFFR~wN5>nI6!`a*_81gY%a(Juy9+s8OoN*LRYM;Tjiee}Y^5*jB}FPucNd
zA6D<#&YbgwQLYyqS8aR(Vs-A=4|WOPmM?~lh}7F%WtFR<2n0kJ_7FL|MtFQ)>n#?_
z8BR_dI_l=c=}zPIh8dyjWZvgAZ=sN!-%%OgzH_m8sOo%eHR65>3R+Dmtcj1Ax^0yo
zF}ZqO`Bi%-`V(|e)^Uc1DL1(b{z9z$@Ro{a5$}Vl1v4!xGd+*c$Abn=2BDR!`)^Ry
zRB8e4)==t#m6^J5*tzTl(JYilM>ssYP-2q&8t3aNSbPY8ACarF>zE?Y>1l*g`)jNS
zpNeuIpOjBx(i$${iuPa*_cw_650q|1otFsD7+v}4UqR7%q91T1|L~UyGIX+D&k)~B<0eT9lFhZ8?zr|j(1o-h=P2Ic
zHTw8Cw{Jp=-KjG4l!-Ae%UOj}+Q(kdD2;#7d`wi`8(hsJ0tA|a-(%B6yXsP*`y}<|
zoXW%xZw`F;tplBHZd=0^SxMg9!GaEzx7mt}x^U0lj$G+tubvKAua%H?oncycQNmez
zPlYjC%uQW}k|Z>fc1^Q}QX5hLp^w1iNRNpM=dc}#Htf*M0Eooidz;2L93xXM;JZx+
z;!rD;@%LvMT&lHbud0Yhu6TBO6{XM5Z+lWK@`N}ALbG-i3q)9GT&9^2gwwZAF;;SO
z%lcdiIuF`9+nN+`)33JzKI*O_eLNZM=r1y+(bFXI`Bzv9-gj+L9(!1-#QtL9
zVT3egP5Kvm3!v&iMfyBWvDWO4pONl#k1H+B;!U}a^7P^;V<_2TXjunS#RSl0s)y>q
z%h=Ctz2HY9cOvSmweZFJO3D?u>+Vnxy{+UsEmq3;*!g25xklgTS%mcR(0kXW5x&3T
z2!^(N?^>icx7&T5w_nH78Wz{b>8ZNxJL1Vsg)O{97PJq^cYvZy7(j+L$Qyhi$e^$Q3%jAifBesA};JprJJ9y@IPC4DZL3(n!
z6Nc}t)id%oJtNYH^`rvK>La=`I<2lXxB;lTuz{VUJSRsNONCzGll9eV*g;q`?b^%w
zH$SH)y($k9
z{~Jdt*@&FtkN4eB<>;*%iXYF7%u8EF=CW}M*KUGNx&4Dja=XB3Vw(mJOyr{ol&mH%
zo=rxd2Wi8Jb;!d!+_^K4g`sh6pHRQG!%I&!uCEk6z3i8zoyK@8^O|)X-@@V=>*8el
z#LR;>&xZ)caRKoX9(re1fX$ue6xdVmic3a<>1za`6K^6rP(iOtDu4q$F=z{IihDXb
zWB?SJ)Vowno9}s8e~-<)yyTMvs^1XGp#qWjH!*Umvq7l5_vNOr2$VeYu$p%^%66k>i6(c=
zuN-Ws2$?AQ502+tq!CoY_mh!y0>;^eDWDl~oGK-5mTfp3!hfxk+9UroLxLX|m
zJN`&)X}+@qtRwciP)V~gtWo^%>;=cFl4L|fx+%s|XOYp?M>xz
z3h#X$;sgDO!0eCw3JnBbWV$33?#k!O5L;>+zx?$sjZgTe^-U9gki)$y?PSLZf
znk*l`0`}EC%98n^^2^9$c(bl*&VV$Nb2}k{SZ3y|j?NH|P8ND$eA6lMkrX0)B9WiD
zOqR8RsaBXvoY!Wt3qPB#D^|&RA8J051U{!}zdB2^93#o)w=C>F%e9OxWoQWuuFXDw
zGMAuSsy}qB$pF@p2nkt#;f((l{vrSc;`)#feArN!7t{HLTqng$nN|my$A}s{j!q!k
z!hJoS8_2&4Hl|JlRcs)*-*F@o~S?!Lfsi|r3J~q-5a66)z)bS`TS4Z*Px?bSq
zIW4ofQUDk|Luy{S&;76cvWn^w<|Eb&+JVAwXp6eJ0^bPX1CUf2C-uwwE8>E7D8Cya
zz-S6E{`fhaox$ZlRX0nzp4w_sB+?EqKQ7QF2^K4w?(I8Aa@NG!{Tg=^y`rwOJ4L|i
zb0e|(-K(;vctLWrkQcxFW+JO3ztQvN5VR~r{%I)N-K+ZvS$B&YQ*v=(Rgi_K($wt_
zMN!f{aWms+9hU{H+}nd-<5dtZr$td_%JHSh4%tahXFgkzsaB?QWCK%O(uU+o1-j?H&uKqCPiEP2s9D
z#5X6S9#feh5kRE;g-dzOe-Pf4u9jgQ}Xh%@>$FWVXm?EIJ%@amH8sddm%?I~Dw7_HM?
z!mEo-se6nh5VVzi2hD~t)0lm5Sj>KRJRz+Deb+wrg-~rtS$tP(TV!T1knrr?C_=5r
z%Fe8lh>?<4ld#&F8I`dev7O)GxyHoXGIQF?Zg-Z+5j6!RbXWQK_RKzO+LpH(ql3Ro
zEGIk4&5bYuKPQi1ON38}#N*LpcyOS1aj;wfWFpyKP^GjrB}t*4X5Wbw
z3qM=Mlk8ySRXTx&EDa+cJO*l|{pX8rjgb8Abhwfq;3noO)eAfh!IZfsSOXS0yem?4
z*}B(f`_Y7_+YzdjkZA!RHX+`{xFyCHaUN`7WydcFN)rFnOvuF-5kX~iyFZ8%GXKn4
zCtmF1G^jMZoZeht6zOrqUY4hc>H+Ff(BawMf!PkRCX7UM>^6zP9SC%P+dOk&KBv9g
zK#iV&<=l`Fwq~OL+`G8?>E^lPwFg_wNz57E&{R`Rj%1d;1=x2q
zYq3X1x5G$RFCB^@WV|%xZZP@i4_ApoY(T}O6&>Z+BMp+>Yv(G^yv{uRhftRv(@hHp
zb^K1qcNvwEL(D6;%w0>v>jUQ!=M~#qvIGl6fxVOdIZCHvX$84Z=wqAvU@R*y0Uzl4_Kpy|;^12KBXi;xPaU~vg?7gx*xl=;@q4Q*f
zCt%m2O8ucGy?OxD;UD<|gsHXZa30BiI^(13Lkrlw%Z*@BgY2jO;{BGx8GF>Q_
z!`x_m75*pO9(hUik%R;v`OlM;j&wya$j&@cUbeAQ20A7m*xRjn*bgviBdY
z)>w7rTBQz>t3Sz|2h1cX?CNW)Fi?xqPcgElf!q9E^CW7tu$n(+^d=82$71hBQ@a%d
zR!a&u2=ur&^SH@(o7c*u7Jlrb>PJ0INFJkAVf4507wvjon2!d@bB|Wr$GnWF_J^N#
zXhF2x7QJ;(6@Zj6?;p-ayndD|+z0bUr-tX@Xu|PrKhibUd+^yokp+7(>mvI|5__Y`oQdaLz|-QYB4i8o$fcF$@y#)x``=M=o{641W#R
z>9Wwj4>P$K4b}h)IVEPwnV6YdaAG|?BhgXg{5%m%IC)1RVl5YP`?%5kqn#x7)dDNx
zD+q*|oSYm31lC&4Y?XWnfB^I9pOo7l*6q*Uh{6^NWH$5f9Mr@4AGZH%5Rlmajx36L
zmHt2(aG7Fedo|7d7gP-@0Ll<0p%iC)FO3uY-+)bqk+u#FnGY8eGW}jtWAUTvzr%9`
z0eonIucu7E9mT2EoPxM9jpxrx{kMG-WzVxgb}&dXbk^FFWfA;8ye}d#fXBJ=1V=ur
znZMA2di`fo2IGmfwYBHzxN6tNtJT6%s3P1JN9I37`$1THC;)T_oY&3|91J`PNz%Gz
z)_*z(m}~!2lm-LgwV6Q0fAfY>;CXp@|9i^o|HEbeOMvPB2Mp-HfmOu3+;UQz3GYE*
zku#S>5)oc!7tXnJ!O+La{pU7~h%Q5nq4NOuLL_Lvh6;yAC(8y9H9ZtLf8&k6TU`8U
zbn%bg6Mx{otk;?{oJRKvQlnb}5Y@CdNgq&?ZUOj2OpFEcPiw;J{|d7APM-9NG1oHa
zV&!(71_o2_&yYjQA{ZNj7&K$5S!o6PW?N@Ho~f#eDwv}#d7I_9!AvgpmPJ0SJN81
zg9LdFI`b960Q&Mlar#en&T)^;%o$ofA*z-C5_#Pt^o^XAl%+lUATcvru2_2U21Ma^By}5{7yheS||9g~ZcH!?(8oC6j4{dewbxa~N@sE|8|X
z?t*!T=YwQD9|%bOVD3E(F%ew4XL)@W#_oCFHQNw|M^yH&>zFBzD5fQRC{}<8w!1WN
zuh|={B==pG5D&S?lPQgOwSq;?Xc-+Hm6PW@KUH{VWD4jP9G&~*
zu`}iI3pSqZt>(!vj&uZuw*U
zZ`$cqOmJ#Lvi8}%>I_*g?F^8zi3u6VpH2Y4pz9+u@h)g-vRY6nA6Ta3sb1}C+-ph7>W%PLOtosJ
z{cYYL9P*mvHtG~ajKuun9>{BZrsb5rsTz|7yulr(5i5_sA*;FzlQi0eT}M*);YPt;
zXm~)wFZW&(h!op*z#
zNkzxzWRLDPPEwsXieO&?<^qPb`vS$-VZoy2m@uPviJ2m9aZSrk-Rykb)!HvoAyk)>
z4WfPmpJf`r%cmYSQ~naGt7=u|uOa{nee;)|L2L`0wx7O3BG%J34-H!GaJA|#QUe>48J?8l+Nrt8`l7tMocej`CZfjt;>
zL@$$QKY75@syopr>}g{Bn0L*qr2H-druLgvdi?+%?G|#d*d8WguRj5iD;W2xU+=iR
zI*tEnwHFoHS;yQQHB>t@C
z?SL<}Bplfh3n
zFO>UC2+&e>l&mMdj4jGnxFm*WG3}-Z)Zg-{0Oq
zzrSm=;_kC&l84X2jVCQ#y~34lv2F39+9=)1xnc$1_x~}PC|83th$(&YpPbVr7!)z`Q|E)!K7Q1pwq4S@W7z(T%8rD9=BN2VQDHaICQ>PcK!Oe_#
zv;9^0URu^L*3ujy4+8MZ_Cbu)9cEdr$kx|Bw?uUy$6X2ji4TrcjlQ2{2^Mc1qIGzq
zjx_iu6t68bivyT2k;|g%2J3!A`ZtK^H_yrH*L~d%-<#K!KYf&xuny6+=^E5nRdhbJ
zIZ#$Xd_45jKv-GM$%#9lnFo&y57U?V9#gB!S^g%fQL)7Wd~D~_p>)p|mwC<7HEip&T+bgcLWsD`S?3!D*
zuK|d1OSdta!H!&~W*5rVx52zzWGy3y8z|ZmK$id3=+UKwCI-5vlI5<~Ih&`6!07Ab
zS!PQ&uB4kIyiim}t)66qw&T12GKCW3pu(5rwmCvUMhY8{V~mZi2YL+6HzsKM+*6>C
zmG9T8`>X(Kzkv1kqK(05XrZ*Qw96Hy3x5J`h{
zm@LQ7hhcX0=R4!bVSbFO;>cFdCqH-bEA}K?!Zg
zKCJg$8j2+O_L_Nk8s}6xa`<9~bUrJyGbL}oSu}D?s_tcc9vp@Zv_505d2%)dQgeST
zVMf-%82QEMcyIP-6MO3&;`0|NDG_h*enIjYI0X`E^ZnrdbWOs75sV~PB~{fRudEM`
z?>fzTc0>0!FbQA1_zj-q=nhf0nbmn)$(XT_ynsu)-1W3#(4S|Kb;EUC?Aw*CWSv!F
zp5@?yw2S?yj+3$nTl)@S%1pa9KYqTPALl{JpALU_F7JFNkTrXE-9IOJrp20`U{&ff
zACs^BiH^yKDTf?2*%T!`ry!b+f{$j3uU2&<*zYCY*Z1K#g9rIc7X!+TvzGWmcQ}kS
zU*3(X*KLy6pTHT>>#s0#vN`P>=*@jcHUy8_9dbr>0KG7FGPYcmx>JQ?tuyz2OkEjZ
zj)FWA`xjZhvmUp+;1MPc>Ls(ZvDRN?Smp4tt+V1m(`5uu5%RClRCR%uB?aD{ssV-%d9DK1)x)0}I5$?+_{Ms(3nKT>#C9+%IyKSSK+dpLFob
zx{bJNmi|RvUtYsO+XnKT3Dk)u*+AOI;O6;oe~LR57fQvxaF=T>T3VwzZU!DOgXn^Q
zPDVx=MdR~1*gG5{mo=wTN(Z0x7E0xO!^j%KGE!a~>69R}N!Hf@9jg?YtYn6FW=&BL
zlwweK>}2;4Ab7u2Fml_Yv>UaskOt^mJMEBB8yV18(n2scYEXAUdvBx7EvMP-Ji*5o;YJ^pZQa8kcNxxM{#k-Y*?gg=ZiEv$
z5Sn~{U1?&9;Mu%tvg;QG4Mn&p7@rz_Epw@fPwwxVP=m)a_jCI1u1DMPioywF-^UH(
z>aX6j{x%V>=j*(?KJ|3EyRaJrJ&&mO>5%#Op<3I%H0JAcfW$%j0)oiB2iWiB9_Z+oie}CVrZXur-D2-hmw=07ZKT!j
z{J6N78px$1jJE-7{r=J3XdBrNnjaVE8LM_R!gC$@DDH3CsNzRtOnY~vyK!CW=6ILX
zz>H*stI;t^ZU@6omdwTN4x$)+cY7)br0p7mJVXS$B8%zIVvB|hI&UNqga9Js%&4QmQ#@?BS;3FX!U)Mf(=%$#Vw)qNTm}R(mERQK;`DMyrvsCeS;87LvAYBtj2wt(uuuIH5r?hoon3FhSra>fQYaxmbx!LD&_1MKqqk{altnO=uvWGx*GQDTFiAg$Eu
zmhLp+!SY|7Z=fV_T!(I%$8BB~{19v=H;)F%`nqd_dWNX?BtQ!r{ZqB-tja6q)%9+f
zT&OGSQ>z6r`LxgFEx2+)y}B{d+Yp7Q7}b{RrxK>O;Ry`90nR98?r1HgHKQ5V0bW?{
zi#)niN(3C^gqAQN@fpPYB2Oq>pNTZ(U-(Z=UUqPOd*@nFZ`TTt20ykD?#8q_+nQQ6
zNdkn4goki8*TBXiBZn!-WUASYv&2LUIbs0S^S2+%1sMietEOedcC4n^`8ZYU~S@y>iGrx
zxQ5MyCFKo=%U0i4saGC|Pe0yFP-+?3JsVe8sIxcf%RWwXNL7u3+b~QrHGSHP(}}zx
zzX(g3eK}RU%OTTCWe$T6`wi|
zzZXZ?na!PRWfxF3GgGBY=loq@?0V%4eEf?xHqo$AxWdnoz$asOD^tocFN5*g+e`En
zgd2=Q&_AZn6(dyjgv^T%`6=f4E${E_l!3%*->(e(UXm0y!inVO+{#Xs8NB~zt3>`w
z)xxcSN{*1%g9f7~g8bLY-hq3U>f2Ev2OpeoM;E!4Qol9Xsg!;nICGULrA#rBUKJ%O
zN~QCkq<}UZ6Q6f(%1u+wIq9D*@|sToQURUy#F*#{9gw&`UTXMYb21t=br5hdW7?}E
z*sFWu>hu>mCoxEM%~6lkeO&ryhL@k#QoG~EN9DH;*^Oeqt@VC!Rb&0|si6151p0U9
zC9l#3O%_3Cqq76>RVfM@J7l+z@&`P8oOJ0@fXUCbp9lB{{(W&Am|aor@$_c&84ra+
zI(N5at5L~v_Ztr-f}W{a8~4(D^*ID)vVAVcm)N|s>I%2G|C)S=I}Or&J+cYI#s4Zf
zgxP?ZcaEp))Z~0hn>ynoIdJRtBDvp=?!M`n(ptnkGp@b1;ndDWE*nekc_qEA{`ie_
zB-6-wq<0#c_T`E*Vy04#W#%65hIvc+Da`Zv;jvco+k;5l=y5a5lmFc4V1($k(Dv7x
zd^_dD+(rY>L3td@mjB^TwFL%0upYDQQvqd;Ym)#jD&b85$t6$n*>3}cf-Lx0H>hQP
zygWBBe34LlKGyJLqWGhs6pW5RAVA?J1+*qfBKQ}zI>S7D?F4$ku-a)Rc9dS^hUXJl0y59T*)Vp5IfM(~q
z-UQD@GHDk7{4Hh2IW#okKM*B^mowO|ccC;RtiqmSzI=G?yR_rBOqR>sU-Y!P{l;w8dQtnxi$Zq
zW@sCJ?-FjT^&)X)A!b1grcY$9YUH!}e==#>!j*VLG*oKyyi>o2WY8{XDaiG$>tyFK
z<7QUa)=e-Rvx=N(gA+0uf|q*TD|)WE2((YDxQM>V-zxy5NAdtal67CzK?3iep=@+l
z0xamDE;X)QBE
zk`})M0Z29t2Y69!+5%Rt@O$!vXGjNACgB30l0^gDgU)#Z+gW722
zyuPXPVzkBMt`@-+HfUweaBr8sMQ@D1@x1G|S||KdqROa6L+`#f;E1j`6L%ds{3P^M&FvPMQKQ3ti>Np8MYon
zI$(RZr~js-5c>T$RGHoX_S0z`gsm=re-xJVS?178`1+#{PuLNkrP^vZ1tw`VK6+oc
z$-FjB{z(7684GYHp<|272w^`jB#(>R3Ft;Tax+7&qt*rpay#I{C$Qc5vmxlMH0czw
zZ7P+uGjiDY&DkSQG>A;VE{0$)l~r1{Gr6(KdS5#Y*Ny4v1aA$sg9Y8Ew3}?{CIO>_
z?ujgcaE6z(0Mj92dvPpza`GqXB;l(YWmsbT&uUB77e_9WE)!K6${;Q!8yw*5+4DI(DC~s7N9Kw@~|pI
zltU;bZAAU?T?>R6LzUMen0?4w>>^;Fpme2AKEV^HHh4L?*;97lSAifzOMmJmQc2=c
z(;VaRMk|NdI7@yg%=0Ge<8Y~H6r(AJDda|+KflA12fM2C0jB^NRnTBu^>NR|czz#u
zGQoE3hn&Boqu2;72;s=S`<5v(t1i5X_xHN3@l2LKVUG8B5Okl*cDC2j(H>H9{nYeP
zN=o7-?UfAr!Cre#X(#;tWKmD`0ZMy*BqrGCsAWM$4?)
zPr1sl;%J^p?q*FVG8d2t|J87*p>Z9>kxJ`Q+u@h^@TC+Pu>9`YyU%-P6~?FL9FzK=
zj2p#i_yx*s5}&h=1&F$~~9r`w^$>jJzsMRr}*RUSGWm
znq;dA&YXF~uP*miHw38`*!WE<8>r&)EMMfQEk{7c5ddlUHi|p
ze*Tn|jmkRCog!$M91sI>EE92V4CBcLjT%P_ep`Qu?f`;6c!e6fORx9s(iTrTJ3vNv
zkRiXw7rY
zT%OrOo<5emfzKE$1%mY{t}Y74l3k*$U1@q&_hneZkZfbXZCP!YMa9J=_GZJUX0g!7o*n+@pMEG-^96i0fp1wj8)1UK_Ri$vd1|M20?!A%7_GF%Dbd`M3E8#D4rYSs(mf+<0(k@|mm9-&$sDdif_sX;yj;
z-fiD5*9YgbB=ES+{>DX54pZL{yDqN#c+R}(3>><@x+3{!-amB-UHWSe6X!I{Ll^TO
z0u8K-EE{!=a^nJv9?Wipye%=cbvWN@t%HR&=dht{3S==4-+d}j0su(Czw7Fdm;Q%1kXTuteoJdhT6TAEOf%>!bj&~l
ze8kH`eAVi6Xr=2Iej=8)wg-r-8j{bL=oCm_4e!ldrk>582D)l)EGEpeE_%{dOjxV%
zHJfn}aOq*>(p2ez!A@3uFq;g#ZaobPvYP{t6I#xi^(JI@tXx%T4jL
z(&1ZMVp}ur(N401{|2OiVk)Rs5Vs~-HfT44d&(HUr{p$Ih2f6tNL59nIx~f!5Y2);
z>f7~Cd&cZoUCs-89y4o^y<^?g`||u^$n7@#t=?JYwt$v)zA>nTUEJUNKQCZ8C1$hI
zu`l224Hi#gb6rFFimlt|Yba{XUz=}U(#w#3^_YfFlQ^-Dh84#`_>3{<)x`Dcm;IPc
z_f7|@Io&q`4$AM=ZSUEl1zpa2K$0^lE>WzDxSY42pLFhFd2#z1~72q~KHm=joN*1jW;^mZujBrew8Y?&CVJ0MCe9L-H?FgFT6R!NtNN
z&J0!tZ!QMS!^A?}Zvr;JblH|nYJVG4_ZgW_XFMh~u1l>}B>meE;kpZH_D$MVF=s?O
z|ANh5qCfLyABX9C)^M}E621HOSjR+73=#h062nF(lI`|s
z2Q_dDsm$UKG`GCXV_WC7G{V?)@nq!GHfZ8k|z41f1FHW&XD>S9JfO3GqntaGi&dG%NGBR1;(~hy$8WcF57c
z=@@hYd++i-gIdWI{LZXKY|rVc+~!1#Bj9uwf-hcUsDSzXk@sUzAL=J#ZFfef{A8=@
z^=vnBH4tZg4B=3tCBA)p+8&sS6sE>
z=8PZ3aAwxG>FhNREdXB`o8Wspy9H}FQ)eKmcN7d06WgqOf3pF80`MzB&h2KM&utBE
zB5!%AU81lO3wMa45SYnVN<)r(6B{~O6?Kl#Mcdd5yB|!Fytz7%Wf8OG*E|5Bk;CUp
z|AOct=rU@>_qfO@9WA6?$Ns=hAa=vO1PA4By7iICbyllAKnJ@P&%?q6s8Rd@a{vVh
zKL9@Y4+W?zfQ)Wp>aqYSm1_uSsirLq`wggTF1>~>27+Tl7fyjDpMTM|0{nK(w;CvF
z&GIB
zM{Pzv_+J4s+J6#O0${j*;l)n7}Wzhg!J2dI;O*W&sAd(tas0Rf|?w8vHt)>h_D
zcO?I{gg&nC?CkvSQK|op%X|C8#W9>-Z2mDoL>X%28bbbE2Z@e-WOS5H)Oxe&F(9WT
zn$e(ju6y#N_@7|IKSSh|$4&M@TmLV@NdM)1wcMSapC4G*`S`nht!ZIHLu{**B+#BRJX`nhvxpQ><{Jv_cT>=R8B^PyYct9)*NJYeMghsG0K{(mN&
zcF|k24p(o_W}FSX!u{taJZP4Q9cc^S@&^!>tABs2&@*YLW9a0mHMpxk(>kMhGFJzn
z%jW>;7{KIlq!=FB?|9#nI3^!>YM1>frq?X8WirtNz?az{r*{v4H~E^wgAsS$PZW8)S`=X3z`)&npGQTFB=%*IXDrE(nD_~va~
zsqM_So>jR2#M;1W%9lon5Cke*p-{O$WE9W@EV)m9Q0Xz8D)(qMulPO<{R62qPY&o^
zM#T*Fvj`&>eJgZSoxte{fn#^l2c^weVef9T2Ay0&hCW#*r!s6%yBQ`?mNg+QQAx03
z5Z0y*JtfE*pkSZgzy(m<*Z(?WNqJqy1oPCSOih
zB=L>PFFqEl`FeNSalZ4)#s~s>Y0ANWc)mLLc@C8h3<9#7pUs8GFt$$v3OWJJ*hq~x
z+wl|BMaWhM|5m=jZfiy>k37pJJl@ebA!uT@ywRFxqqc^rJ;vNr{eu02@C*!m)nxRq
zs(yO$6TpSpnH5R$MV_K2Kwz|CVVF~1)N9!IKF29mQk&hb5Y$-yNQ^I?o6ALVEq;bS
zQbTlsQvXk7r~jj}-vEItJ{>RK?Nq&4hM#P)tOVCl3u{J)>@U2Z+4LWLe}8|y^|yIV
z^KduufZ+bPDDaol)>v`&m(K&-jx!C4u4o^u0%1mO0Ei;UaN`U=SZe2zG%7YmC~JM>2ab-Gt}38u*-jb+8Dfj&N6%a&Epye-Y$Uo(I1q4@%}Wg
z30k@wV1cw$n%)84$Tfjc-F=#D*(AaQ
z0VI%IAND(hemImYWyx5n&84}3hqo1wsW1{XM6;sBOJVQ0m(*-G^iY5G!fUU0Fh!!AE?jzN`SF&O<)3V5e0fcJm2m-a3iQ$C
z7NzF2JF8lBKwK>Vp{VoBRqP0!p~^GgY_FPWJvJA%+SlWn0KZ%U!`u)^HC5pd&2fYxOtxP{Hrg7i^ghuyk(z3t1Tp2#J
zMdo|#U>96YLw3Jv+v4IcMC6Mk4ZGo!aU&p?gUSNxgLC|~P?
z`tqa%n_dA$?%Kf4R6`(#_^QR-jbZ8cOeQK&=-LXx1`S7fM-?WUu2BA#RQ
z%Ql4klJgH+3&y)%r%P0e#qFvz4$Zto!~jQ2@ZFKSLQ83Ac=v;I?dbR$g{?H<9ECJ;F5jp~DG{8_MnWZ?
zR|{l!R40p7}E_kI~61=LQtzq9GZuIx5V}}ZY
zF^L26hg|=Q-R|a-=6);biBZv8TxZ+=Eh61Pt#whmNUdn7EW_^
z4dvU~L_dK-&j-tip&q42?|f~0yJjFz|2g$oX`X}-qM(Vt7!bq(FM#V?vv>^A=`7{`
zg}{Vh?#p<5akJ*zsqsp_%?rkG5{Az!WAbE#5>mqp>Pj2)=P7oEV?E{63)KbLFr5+AshC{;~DfF>4>$iEXt4z88FDviM4Fhwi@=x~5`WU8mvq^-z}q
z$+8|C?WZARbe`zORYVr6bkODMR!HM2T~>7F%Uhe;sTj#1!uqPNVRL)Z>_qhI^G{`J
z09XcD8MqT0Qs30y{pj4N(NI5B&TIa&Vs4UilTXO*ce(@dIoa?IOa&i4&4iL=5
zP^vM!sImx0TI*E>_%hKhFzY-Xu(J~}@XGo=DaUh3Kla(Y9qlQ12sdSk=W1R4s0mj4@nd9BZhhA>&A!hz2sz^&-Da8NUmr^ik!1J8CuV5tYLt
zG6jhHfy5w%t73SEQ%wedVS4Lm_ha>Nq5#`i{87Yx2koyrS6rvd8^25yXf~UDNZrCY6OSg`qA&#T%vcLPAK*{{b_?ceatq+5uym}R1wP3MHozICp1ULTE9N0hs
zy|^&;L7*&Xe|3Cuy}b|hY?_jhxNxX>*Une8el*TDI?k=X?bbNQuNzSzd%N80HS0YS
z77zaX>4F-t!l8>l%Q>9Y!PsWL&#Ju@n)lfHQ_Z)~pL=pa`J|t6Uw!ON1P;I~ALZ)N
z7culwOpwOWtUFFK4l;uZBcw%A*7q6wzQ+?
z4w^Y{P;FuuZqmP86Rt~W=C8L}_kbi!Xt8fT6ymy+^vzq<_q(;-!Y~hOr_rNdL%Gi8
zvO*iPswQk0)(qW_y3T$hFKw5b*U11K91-WUV`23Pxkj+no2!1+^l{(ax(3k*uq=W8
zryYdeKP@2{fbcd{ph%j?Kq
z2=r(=YXGeuwst;w!2=3nuI6CR_W<^I&Ex^S9H>xhR)&*q@w+bt6l8fkOwr1}
zIHw$aHXa<+it%(^uNU0@HCV6=;4-;%s8D!b#i8b{`*wCF4Q%bYiHu4I#r97irahzQ
z?VYbbH88h^-VyZo876x+yXKr*+NV6V{-oaD=Z?ai_C;lY!u7QtEsviIX1^}Q7rzRPRSueHw!Lr3~Qf&BqtF_$|rEjdDjjEAPzj=jG&aok~elxJmuUf5J>
z78TXjCboM0{;1$WMvrf~myMjdiWbAcxNyQ-`OOUP{or4Jf+f|>S1GC1E9_>@37!j}
zg7jFJEWCJbL{a$+JzO*R*uO;*qJ`on3jGu8Wo3Bi?)lh?@
zp6w{LS7?^lHs}j?VL!Nn-lcMkVCu1WMpmKy0d&UVbiuIH_1}>&9RC#wLucLeEW}>z
z(@HayNkB5WAj^-!@$|)t^G~XifI$5@fLHfv4nHhiv!Y_%Yin>am>G+&Tgj=^rxK7Cpkm7@7jffzJ*vfFP
zDi*{7%Q8(=}xwv9-Ky1~L=g?E)(VikThtGIRU;Pf^r%^nqbdW{r3C$$T
ziRGi@#Os4I#bsT_i}Z8cUly(=lAUJoIugZTiITY^aM^_(GU5M_+h7dd#w
zXBxEPK3{1?=pjz@n6GJl4g1TSzim>rxcuuMJRom6S0BsYaX|X!Qwr&Y-+#)iH2|8q
z|&B+qZ_2qNaFebt0;QEcWKnW)@qQ53n3eUvv?S_<*>3vjQC93LT(9Y#Gu4y#2^_0h2qlz3j+50}_#UK=>8UuC
zME0Dd-n5OKD0j(U$7-uQ%401m#5<+^Q?nueS
zR|ZJx1(p;?^uR=gNba96!~;?}@glur$)XCfA(iVPafzDPea{QLaz5rzv0Y?-cP*|s3FMy
z)U7A)MN9ln0kFs~UGcmgWTQ~ztOWM*{974oA<^Lk^uiCj!b$^=koM2=k6uYiIY!^O
zH5(iZbwv&M=TP2y@vHh#{VxXJ60bnRJYBB`H)aAOzwyr6W}iK)=*btI`ugJmZ1m3H
znIECGc)|ND``7PDnap|B#Av_}!~pUPL@>3X$Hj}xa!r93VgXXoYPHIxw8waP6LNHX
zL7B-t_e+X|FH{xc~8
z+*I$Et#>$Y`MulQV!WI(=`oeEcFfQi;q;<=uYts~?6UcvCS<}J!;l;nMXxuj5;POB
zZU!J4#`Ezb4W6snDYa!Rd#9)B@t!DO$2J@Eo=AA1PEhuefu}%;uKuYy4Am{plRrR7
z9N=I8r5yYv%;K%VCAUMpru9AZrIN+s`*qDKD9^K|rjLM33!xZF0*#ybvaPJRY69Fy
z2$wVSJ1;X}-$o{)xm3QsL+a~#=p~8HCCDVnZiARyoV!xJB}e}GqWUZUt>@mO19Y^o
zzT1UktXn?LE7rFyI8(q+^zXP+Mt(=w`Ii13>B3u-B45Q7;eL$YDww$0ejC<5Y}q4Nz9O*uxILg*`+%
zKU-CWPV0z!QF$C$_fQc-7<&%sAdq)uol2w3J08<%_#|J<%aYV~2YdVvygHCw%@wk9Guh|8>xE4AAFhzP&JxNVwfESd
zC?%CGz89TiX~d7Lt5iyjmO_|jhR@dKL4H=|<`v8EDM9AFDcxgEmR<$Kgjv$E>lX2r
z;Z7dusYjw&5+ib<^iJZkY+2!^mbFKLdj!UPe4xJ&botYVDlt2f4kSpqr&d@feLTJ&
z>ts1PoH#GV-#HtHDB1{66+mMmhBc%h^?CE;WVK*9hxR7Qxu
zL~QPRNn?{Xq)%!*GMt=44Y9wmgWK~Rb(PZF7ML(fbToO=5X=-cyCi+ISc}b{Q-IA9
z)YbukY~&T??NL70+B-MgY_)7_9G^pQ)yJvG-vA&=Zyl$fG~!H`mvyJq!4c*0O3*bJ&|w
zjM=Pcv;=D_K6~QeaxF&0)S90#ZBv1m_m!C01I%_@xUc69+F*Z;vyJlk8M0lP61$fW
zq9GRwY|sXVr$xNWySBTFYC6FopnU{^FuGnC<1?D5cOkZrD6ua>YxmfY12f6Vtm4+JXizjCmeFWVf28>s9TL(f2)tL>2YljEPH_R1y>W`crZd
z#`7NTa3hZjvexh7nXI&iA?$p*_6`g9WzO_1`}6TZQexW2d%AQ{QOE5QhM~HH6S2==
zylp@7CSod3M;^6R>t>K!l3uT87(nmeZtqePMWH0yceil`56X<>c%UTghGP@b*Ht*2
zJ$+s!opZQWy3EMa3i$n@82|bU{WYC^ive3D=WKWJ;B|@sreC$FqgpFytD<)yy#JSh
zjJvJv@zD?7N|V>OiMTP3W#~j_xRkcaS
zdeLKX&6Ry;$jB!9EpG#SLJ9k1C`tl6cTt>!bi
z9(@;>!$(3Moh9?qetB?dF-|outYQPLwAY0%wzXYzv1W>@gmrX$@?-A&^tIpjj23dU
zk;SaloHI4IlG}3Q&dx6ePP1_sBBF|8@aEAzq#xN+4uZj}Sxe-@@dE?Jw&SrT6CfE%
zX1%bb9I??g^r5zHR=5QT8g9dxp_0)W7|Re8v0Jjcs8Y;>G6IME-=7};h%06Ao9FFIaO
z18R9;zOWsOef;}frP%bb0b$9K&RyLs5nLzP&f+S(!b7?jZRlw;-oLg*7f~eZX2UNF
zcPVtWiIUrPDZRS*;)se~Ml}8E%Zq`sVxA6Tjjl|bK3)D%!ch@ZNplL`Fx>(L7bARQld_UbUTY~9A9KI$hj@DVrpau
z`s=690H*SRMrOFCTI(}&CmV4gT9sT?^@PuLN9{qnZTRA9=*!a|Yaf_9qtB6%)o1k%
zSPzG>%Y#x@2`uhWaF)c^oK)PRQ8ry$K(Z2rX7d+eC7o(FbwmsIC(Hqi+63IT5bFdZ@2!ML8z-;r7)~CxU*Uz
z%5?dQu^(FwSCLhkxnetfhDKIR7<*fKo#d~|0vEY5M}9G~JExnnO$QcQ-w7*St0
zjKYMk1hus$?=#h$gz7^ZnXI6{o((P>!I(w{OA*^S47~#*>b&R37z@AmCvOi>8|d>^
zt*2`bmD=cY^Y93{Y`zs@Ib&~HOxnbZ4JXcivP5os%D6@p%3(R}Ma$9dC^Pew
z%rT-^5#BK9BwYK$`QRWq;K`=RekMnkNKoTEoFImWhnTruTC;BL-1dH0j!KX$4=oC2
z0h~#~wr|mxf-H&m^qu(xlZl_za~%9-rkDxZYY2BUl3e>@+1{g1n)e`zV3%Den@L$_237&3fB}#Kf79{@IQlRm|p@xFURL(ETx%6D`p4TC^FFR-k}j&TQzy3AIh#*FMe_)XiB`JNasZRKI?`>ldX{Wg{v5g1TuC!CYUX($=1aF-XLmoF
zzhUJZaJEMW8NoENXMs1sa*ymJd`IpPX=aSNizL4P=U#R%8a61L`xbaweSZqM{K07l
z`BNVF1NZP(d3Ae=?eCwHcz-GU{qrMTpgsRpyb3R|oYu^rVs>5j?B74P-SasA*XJ(s
zh?l9xXza3D-d?}zKdzNTosp68AnGWaxT^Ha#d(GgYPzHfUbMVsy)RpTZ5Lz8=N$zJu@vu7_;)eAaU
zLjTuQ2!G_M7nChmGoUT?PbW(H@#BZia?w$*6}%*CsMu0Nkv`kb6+dhaZR1M6+1@1}
zvm2?~*n_|jW^Ed8JjyF_m}z2f{ZC`{pLTQF`%vR=M@wzc+1VYFKfOEu>5&!w@yKNV
z-(L5e>lBQxe2x}<-0zJ(4o;=pZErmNcl!DC|LDq7C2p-@X%+rz8Sod4X)19*<)C%a
zY2E+3=dr&laLga6MqJzCC!n9k8v#RNmTnRA2uH-awF;_yhX=QXX03tURL>c=Dp9^
z?^Hfr7RcdlmYoM4$xFTj=*-){yx<2PCEvhcXu%Jhy=70U-9`776aw{FjlbQ?eDGG>)scGh!PMQLm5(z2@JT9(iiB^tZ^bFa_?=h1#*FlZXs!*YE!23h1$&B)qk|vj*T6UNkS2sp1Nr@T{T>S?mQmXOfy52dUKlaqK|p
ze4}yi_Xm;c!`pUfmx7!j_0n(n9JKWzDJ8P{V?X^H;yH)3zrh{nYMv&sJxY)`UTmk;3YsJP2-llFv
zR*w)pnQk5ihQWd>@J~S3>Z<9E2JzP<=vQwLqF9#fssalZ)FT@KYN_Qr-#cq*1cgM%ASgC6Wv5Mub
zsLRg6hqKnA{e?*KRCGd>?Sgejm~2}mz1r@O*w|QMkK|PG>;_D!umBQB(XleN9~;{j
zfQ%WQuvp$sTr1hwajG2zE*ezf;=FF4F*FevXqmC((z!EuzxAj?&JJrOU0>^D@@O`TG!BDhQS0_^yd=hTFgWz^PX1!%Irz|&~7?sx4QCp7KAiUBD7o;_a
zm4Go$!XBaf|c2x-5%Atjw(^nH=W~CJ6NoX>F&1HAQ2(k%5
zI$E-`dYMH;qK1{(k1)ZGBpehBk=?C!ti}_iDOD_nea5L@k9!iIE{oC9|`;nb0CI*I8oN3#$WEhfIFQ@H2u5m?rU{<9!tsS1yuC4SQ)CwY9R7~Dj
zD>n~a??=D4!t^ktoBxtpFnhKir&4%dMVs`D!^6Wt)Kq7|9a)@X80)$r!wHS8WZe3g
z*V-b3#pz~du8$2~@25K)sWAj&zF+j_cHVd~=P7c(3T8gY&}6pnAkxTBpvzzDDhEqNW?>IW`U7c!^`P0d#VlDmbQg@;sH3Mrh&W&-vo@!+@YWvBf#q
zZ+7d4V>ii9CxE=ZW^cs&#Ga)!R0n|||IBhGvlWEk=R3&x;73ZL(IDo;f?!zDU)`@Wo)fI7c@<`#+
zD_nJRCD{~Xg-A`QI3J&|l>6Y5z8pGfm~QNTbr32~amMfYeZWA+!r8+oq~!Q$$3oIj
zc}o`HB37&0s%>5DHcInzSva3ku!b1d&bkqD-pL&gGkYT{Wlj}R-(JlDkFcKbcvRK^bjE*i6SW|(5
z4(+r#(W(u!aXn*Wb23CrleAh}%Xv=|_`5!48{MO?pF6Ej$=#w8Ztg&h`n*7J_(K4sCPk6WB{CmZcI1
zM=fob{jEvO>djH(B{Qw#?;??PZV!iRgsN1J{Z|nc|9
zU>rwoH5jzMp>>7~7|8Q(00TLrD3O;m5XHVQ|AX@L`lz#}4M(--lV)BSKMleb67cSU
zrpjwlGrN1C?II@O?44GzLii$5OxAdhnP=vEp`8&llhE|>D7NQBCJJ91OftBQD95KF
zq3ROCG_vmUX>abkzIS?4CoV#1mk{<^!0E+jqlaSJfr?>K*+zy}>;u~R;%r;{j+QmG
zoO85O@g~}4>S4usE!fJ0KudU(h*}59GCt9v-%vDWM44dGY-Q+7Dm?Mll!_}nSjV$d
zAw?@HmEQcqxxvbilzi1!
zkYWCq2e-qQ#e$U-`XEnOKlCmIh?yIN70aO({B06ARMuk}Em$J$FmcVWN2i+^kK6f-HWoXjaZVV
zrg>G=_r9Z%ANp!-WR@+#iF&b(R$-><{447*9IaiS4TKAHR=bL4F~yw7-CElqt1w0p
zewyWQ(`N{LyRk_aOu6+L4)=n=0U~+z@N=;>zi2J!Rux#;d4oGDoV_Yof=g<79L1Cl
z&u?zV#MUWayt=&Tc??LO3)=C@s%-Z-JKt=_jX)WjF^O>$IOZ1iNE4wNp%=y0hu@5?
zf}bahZBeN1d_FN(KDCpAzC8ApvOUnex?JvOpPxu1mThn1Usss69WZKGd(g?)K^01~
zkKWiGuiiq6m^_Dc#DwZ<8$~m0!0(a67JxGx0hN$Z{nmj!}fSQ2@S{RVw7Ap&9uu;aqbKS}5c7!`+zZ
z3fvB%G#gyO3mXSXh=6Y_eU5tdvUGeo>T(>926a2_4{Bb
zv0n~;4`2;FS2lsn8VI;rE#5m_=$Oefa;J(=IB;gKxN}VxPUuIv6B;as9iFJlxqCFB
z;Lo3%DkU21m|ivOb|LesH66s5o?yf0l)_a=y4JX?y}?$tiM$p8&#>L%!ZAwE7Xz~D
zp^+qm&;=AVW|?ZsudEWVhh8A`8$u7sL)b5tVJ6-38idwPnH$Uw5UHu
zHY6=DDpHRWPTO&&t*e;yY<@=~_xl@lkJr+yhayc)akcFsYv!8IneJMF-Z?gd9C_B=
z#RVFJ^)@=E$-@KU6O~r>*GS*+b3QXt=8@cLT24-x3e_Dmk48y#8N!<)jXg>irAb*=
z-V>cKo=lT=4>CNqQQ?M66O@cYM=lqo%f257GjU&JwAUv{aWpfF%S87jj>8|?HB5I%
zSMi&S%L?}9C8C<_1TZ})B1h|FU;pkk`;}HJE5~j^y6H<;Fr^i!e24|8>cwKVQIspF
zowT*1d!XaEOpuG;3Fg5s{me~=V%PQ;mIhI%_6g_%&-S_wQIuZPM(#*ryeyv>&fbQf
zv{|f7*yP+s3cw|HCOn^45)dq8k32WMJ+oT|bD2WiYenyZV@2+EW_^>9e4+?G9`f?A
zd;E+(k0HbkH+B*|L~EcCDpmU`^gbBfavXf6h0^l@K1u9!ArSDn*39j;bRlaewJ>De
z{e5`CO3^M9FyuI+)~T9y<@qLJ;+5)#
zWr5D!P{lX=7&#cf%zztdI{O6(pWKV5{9gS7K0GY1S=IZ|yfU-1dBX@98O(SfSO&GM
z()L!L3o%|wiZGIrmYO&SGM!Ai;&N1Q#bth>kzasIi}}6s!r`&4Rl%E0)Jl=8VkION
zF%dCLvtT+rpMpOTo2~WK51)5pD13*D&Gs-gTd#sxM`OspyFGy*raq5#;bvxNj9$Mu
zZRJNl`paOvW}+rd*#=u;V)Iql_*hF+4VQKz>`8$YwX^9Olh`~FOpQGx?SutLei3M9
z+7&IcZjEAZP?hE)j8oA?OeHJSbns4-k--NAm~mn+b`QRtct7j(m8~<-3dL37x#kY}
zTGOc932P%r;k+?Y#bOOG$On04#!?^=y@kRPo?6Rc#WMCbwgk%p?Ve*(YJZ^^x{mN)(r<@Q|>F(6-l=GD90HD
zctunQG&A-J`uDYA<_0VK6X*L^{2SSu)7?zNva!LQJyrKCQd#o^qKw`wWsss0IBVGG
z9zHmSA}U@amR32f;w2AkRwaURbhnDmu{1}K4s^p(mOJe*i5rhf$6@YKrJSy6CQ|QzF&QK<~$G-O&eLnixO!hkysS7-FNVh#Yc_`
zE&oshwga$kHbp#Ar90$j!fCFX#_fl*t4REP`yDuOu|Z-f&FWGd>lebc-VKO-Y&7YwUBKu(OtzX)w6B~_0-yQ|vPW{T_4`%O
z?WNQ!5|t6X&5_&lJ1-mg4~vRk(h>0LL6T_^uhS#8*Ov(UbUMiDoxGP4Q-Q7dgZItV
zl%Jn05l{X!)2n_1X}rX#qvBiIb8fb}#KVv5bdjNkAnN3C(eEbDqtXs5Ac{(rSFNmK
zqm}0K*43BRg0L;i?7Fde2$K`pyb}1#bVq#UU6~Hjht228t;LjK*}74q>xjF2=(av;
zwdI%`3l_6=r8ysu)$P}cBSpL0)bsPG^*9ep(p*BtZjIQ27@^}wvG(3<7RbpWEdBPZ
zcP~THQNz_HQLeto6Fug@IgEt8rJyw;>>24U$L(!v~_9J}Qqk-D&SsJ0p3UvV<;-U9+M#u7>KDDPlq
zrx~(QM$mlW*oXGLisJ-xQ*eGBQk1gxy=)tF~&roh}bsHW96J6$2m}67;+H;X1m6aGlllLIVkqq6T
zZw0b1)qUelw52wz*gept5C{7*Ik8lx+Z634U@bCbtS$hGxN8xk~!dYE!Us9^8XfLV+5qXs4
z!@`J!)j>eNidA8r>}eB9d7&}onLB*x34FB&jXLYb%V`A|^l4#*EO=yr>NYnSTq`Hk
zpj8uj3A#(X*Y3T&1SZ;w{3_>5L3O$nbnP+v$0wd&UkMnYcn;3`h;dT
zE&z~oko^*|h#qSvz+>%Kb5MxPgwl`IS=)vBJIh5JSA_P87TP0fwy0aun-V%QmyO45
zMGU2hIyN(y}D;ZMVtmfA48h@8qfbSv7qq}Gwm>D86L1HMjNA$LxaI=NrUdb1lu
z?&B;psKM{Eho{@`xSBxVv6JaaAWagt;Ci&bP{ba-4SbJLUyPpf(P>M{SV{?sviKb#hR`OZxNCxN*+f55t=h+g?uT4;bk+EqERCarTp6wij$@e?@pSP!C23|!|NVm|Ie
z@=mWF)xim}^Ih&(&CK_S#>L?4;kG4aE|GpeBV;|%1Xd1H#0Jc~KH`*)W~j@PUIF|%RXYp(-RMk*e_fr<
zU?V9>(%d<*DW%;ecm!=da)C1RHCB*`27Jw!fQY^8wm=qno`ASD+XlR!(;UFm(W6&D
zg1xsOYH(>!0%0`ZW1um^YbuL>pOOo8cCz$u)<9aOcCxzT9EGIHJY)tE78jit)lxn2
zkD+aqC9n_7Q7Bz{3iu%O6Oyb2qsc6dDI1^I^UjM}VOxb$k^}qJft>ky3gvMkED~>M
z^G&opY-c#x&mZ#5Sm}}YShVkES^cKs$f+(#2+uM2S>QO`#m&uUZMw+^`iJ~rU)ccJ-vfqjn$Kpna|D`Zt%d)>9F2xKLi*K$|P
zz~?PYYrQtF+9`-xSafH+K2aMXZb07%YLG7K<7_lThg+?-PKwO50E%&_pPjGf%SN3b
zX}?Q1cNeNJmaVXIs`3GroNk(=NB=Rlc;p-zrE5`SI)~9U65Uv+85#>;SJG3(1p>W&
zpgON`U|NzqP0b4*E_VppNm#kUB8Ep)0fz(_=2^ikP~kxi^Xpp0pBAHaea^R%hWQMkZ=2#J>W%wV9Za
z-H*CxO&&zlk{Y)vc4p3Wf|JXcdYlpI$WG0SEaN}QV$9S!hw?GAK(7H_?}tUY#ciy|
z%(ZFw!Hs9S{C}`~lhJolT<*#x%Zm{eUsTSwD5FzIrt6u%W$=bdv0F=0Q)<~?%)>G>
z?`!18g}&1(6hub7ZeqR#CPd)WmtPG|5}l{s6nHU^G|7UhK2!OMXlpt*$%+Zx3=h|8
zDlAC*VY}v{x{V#7{>6nZ78s(6UX({{Ky&3B_hTj}>3kX+&<8vRc@m7a_|rECE-D?X
zBPqQ!7&#pk{2__=6vVrH)@cO8QFbX2a~B`QUYsJ5OTX*}CY`c*8(5RMVUsz9o^f6P
z-Vi%qYm~m?3@l_b+bN72*y+@Am!=r28lvOkhxu7yy(KQ6ggYFgNm@iDus4n`O?)wWhEH3b>
z2h1oR;v*Uw8m&&0A&SRR1^G9?->W~N;t6cJoi?1-(D}>0#UB(q=_x9Ud6pgVM-+GO
zyePL=WHtuATS!4+|EtTn&U*|Cz18S9Sn2lL5We0fHt2&&0vxRP>0X*Y&-8c>#J1wf
zO5+E|)XX;yZ1mOn1Ktd1Qvl=sG=4AB|G(_`TSNoz7dY@Q1
-/// This example demonstrates similarity between using 
-/// and other agent types.
-/// 
-public class Step01_AzureAIAgent(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    [Fact]
-    public async Task UseTemplateForAzureAgentAsync()
-    {
-        // Define the agent
-        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
-        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
-        // Instructions, Name and Description properties defined via the PromptTemplateConfig.
-        Agent definition = await this.AgentsClient.CreateAgentAsync("gpt-4o", templateConfig.Name, templateConfig.Description, templateConfig.Template);
-        AzureAIAgent agent = new(
-            definition,
-            this.AgentsClient,
-            templateFactory: new KernelPromptTemplateFactory(),
-            templateFormat: PromptTemplateConfig.SemanticKernelTemplateFormat)
-        {
-            Arguments =
-            {
-                { "topic", "Dog" },
-                { "length", "3" }
-            }
-        };
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        try
-        {
-            // Invoke the agent with the default arguments.
-            await InvokeAgentAsync();
-
-            // Invoke the agent with the override arguments.
-            await InvokeAgentAsync(
-                new()
-                {
-                    { "topic", "Cat" },
-                    { "length", "3" },
-                });
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the response.
-        async Task InvokeAgentAsync(KernelArguments? arguments = null)
-        {
-            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id, arguments))
-            {
-                WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
deleted file mode 100644
index 4754acb92ff4..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Plugins;
-using Agent = Azure.AI.Projects.Agent;
-
-namespace GettingStarted.AzureAgents;
-
-/// 
-/// Demonstrate creation of  with a ,
-/// and then eliciting its response to explicit user messages.
-/// 
-public class Step02_AzureAIAgent_Plugins(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    [Fact]
-    public async Task UseAzureAgentWithPluginAsync()
-    {
-        // Define the agent
-        AzureAIAgent agent = await CreateAzureAgentAsync(
-                plugin: KernelPluginFactory.CreateFromType(),
-                instructions: "Answer questions about the menu.",
-                name: "Host");
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync(agent, thread.Id, "Hello");
-            await InvokeAgentAsync(agent, thread.Id, "What is the special soup and its price?");
-            await InvokeAgentAsync(agent, thread.Id, "What is the special drink and its price?");
-            await InvokeAgentAsync(agent, thread.Id, "Thank you");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-    }
-
-    [Fact]
-    public async Task UseAzureAgentWithPluginEnumParameterAsync()
-    {
-        // Define the agent
-        AzureAIAgent agent = await CreateAzureAgentAsync(plugin: KernelPluginFactory.CreateFromType());
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync(agent, thread.Id, "Create a beautiful red colored widget for me.");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-    }
-
-    private async Task CreateAzureAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
-    {
-        // Define the agent
-        Agent definition = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            name,
-            null,
-            instructions);
-
-        AzureAIAgent agent = new(definition, this.AgentsClient)
-        {
-            Kernel = new Kernel(),
-        };
-
-        // Add to the agent's Kernel
-        if (plugin != null)
-        {
-            agent.Kernel.Plugins.Add(plugin);
-        }
-
-        return agent;
-    }
-
-    // Local function to invoke agent and display the conversation messages.
-    private async Task InvokeAgentAsync(AzureAIAgent agent, string threadId, string input)
-    {
-        ChatMessageContent message = new(AuthorRole.User, input);
-        await agent.AddChatMessageAsync(threadId, message);
-        this.WriteAgentChatMessage(message);
-
-        await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-        {
-            this.WriteAgentChatMessage(response);
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
deleted file mode 100644
index 551951a81a49..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Agent = Azure.AI.Projects.Agent;
-
-namespace GettingStarted.AzureAgents;
-
-/// 
-/// Demonstrate using code-interpreter on  .
-/// 
-public class Step04_AzureAIAgent_CodeInterpreter(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    [Fact]
-    public async Task UseCodeInterpreterToolWithAgentAsync()
-    {
-        // Define the agent
-        Agent definition = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            tools: [new CodeInterpreterToolDefinition()]);
-        AzureAIAgent agent = new(definition, this.AgentsClient)
-        {
-            Kernel = new Kernel(),
-        };
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(thread.Id, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
deleted file mode 100644
index 361025c44832..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Resources;
-using Agent = Azure.AI.Projects.Agent;
-
-namespace GettingStarted.AzureAgents;
-
-/// 
-/// Demonstrate using  with file search.
-/// 
-public class Step05_AzureAIAgent_FileSearch(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    [Fact]
-    public async Task UseFileSearchToolWithAgentAsync()
-    {
-        // Define the agent
-        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
-
-        AgentFile fileInfo = await this.AgentsClient.UploadFileAsync(stream, AgentFilePurpose.Agents, "employees.pdf");
-        VectorStore fileStore =
-            await this.AgentsClient.CreateVectorStoreAsync(
-                [fileInfo.Id],
-                metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
-        Agent agentModel = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            tools: [new FileSearchToolDefinition()],
-            toolResources: new()
-            {
-                FileSearch = new()
-                {
-                    VectorStoreIds = { fileStore.Id },
-                }
-            },
-            metadata: new Dictionary() { { SampleMetadataKey, bool.TrueString } });
-        AzureAIAgent agent = new(agentModel, this.AgentsClient);
-
-        // Create a thread associated for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Who is the youngest employee?");
-            await InvokeAgentAsync("Who works in sales?");
-            await InvokeAgentAsync("I have a customer request, who can help me?");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-            await this.AgentsClient.DeleteVectorStoreAsync(fileStore.Id);
-            await this.AgentsClient.DeleteFileAsync(fileInfo.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(thread.Id, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
deleted file mode 100644
index 54019df77be4..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Resources;
-using Agent = Azure.AI.Projects.Agent;
-
-namespace GettingStarted.AzureAgents;
-
-/// 
-/// This example demonstrates invoking Open API functions using .
-/// 
-/// 
-/// Note: Open API invocation does not involve kernel function calling or kernel filters.
-/// Azure Function invocation is managed entirely by the Azure AI Agent service.
-/// 
-public class Step06_AzureAIAgent_OpenAPI(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    [Fact]
-    public async Task UseOpenAPIToolWithAgentAsync()
-    {
-        // Retrieve Open API specifications
-        string apiCountries = EmbeddedResource.Read("countries.json");
-        string apiWeather = EmbeddedResource.Read("weather.json");
-
-        // Define the agent
-        Agent definition = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            tools:
-            [
-                new OpenApiToolDefinition("RestCountries", "Retrieve country information", BinaryData.FromString(apiCountries), new OpenApiAnonymousAuthDetails()),
-                new OpenApiToolDefinition("Weather", "Retrieve weather by location", BinaryData.FromString(apiWeather), new OpenApiAnonymousAuthDetails())
-            ]);
-        AzureAIAgent agent = new(definition, this.AgentsClient)
-        {
-            Kernel = new Kernel(),
-        };
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("What is the name and population of the country that uses currency with abbreviation THB");
-            await InvokeAgentAsync("What is the weather in the capitol city of that country?");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(thread.Id, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs b/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
deleted file mode 100644
index f4ca77e75c5e..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step07_AzureAIAgent_Functions.cs
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Plugins;
-using Agent = Azure.AI.Projects.Agent;
-
-namespace GettingStarted.AzureAgents;
-
-/// 
-/// This example demonstrates how to define function tools for an 
-/// when the agent is created. This is useful if you want to retrieve the agent later and
-/// then dynamically check what function tools it requires.
-/// 
-public class Step07_AzureAIAgent_Functions(ITestOutputHelper output) : BaseAzureAgentTest(output)
-{
-    private const string HostName = "Host";
-    private const string HostInstructions = "Answer questions about the menu.";
-
-    [Fact]
-    public async Task UseSingleAgentWithFunctionToolsAsync()
-    {
-        // Define the agent
-        // In this sample the function tools are added to the agent this is
-        // important if you want to retrieve the agent later and then dynamically check
-        // what function tools it requires.
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        var tools = plugin.Select(f => f.ToToolDefinition(plugin.Name));
-
-        Agent definition = await this.AgentsClient.CreateAgentAsync(
-            model: TestConfiguration.AzureAI.ChatModelId,
-            name: HostName,
-            description: null,
-            instructions: HostInstructions,
-            tools: tools);
-        AzureAIAgent agent = new(definition, this.AgentsClient)
-        {
-            Kernel = new Kernel(),
-        };
-
-        // Add plugin to the agent's Kernel (same as direct Kernel usage).
-        agent.Kernel.Plugins.Add(plugin);
-
-        // Create a thread for the agent conversation.
-        AgentThread thread = await this.AgentsClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Hello");
-            await InvokeAgentAsync("What is the special soup and its price?");
-            await InvokeAgentAsync("What is the special drink and its price?");
-            await InvokeAgentAsync("Thank you");
-        }
-        finally
-        {
-            await this.AgentsClient.DeleteThreadAsync(thread.Id);
-            await this.AgentsClient.DeleteAgentAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(thread.Id, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(thread.Id))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md
deleted file mode 100644
index 083a1c71a156..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/README.md
+++ /dev/null
@@ -1,38 +0,0 @@
-# Concept samples on how to use AWS Bedrock agents
-
-## Pre-requisites
-
-1. You need to have an AWS account and [access to the foundation models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-permissions.html)
-2. [AWS CLI installed](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) and [configured](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration)
-
-## Before running the samples
-
-You need to set up some user secrets to run the samples.
-
-### `BedrockAgent:AgentResourceRoleArn`
-
-On your AWS console, go to the IAM service and go to **Roles**. Find the role you want to use and click on it. You will find the ARN in the summary section.
-
-```
-dotnet user-secrets set "BedrockAgent:AgentResourceRoleArn" "arn:aws:iam::...:role/..."
-```
-
-### `BedrockAgent:FoundationModel`
-
-You need to make sure you have permission to access the foundation model. You can find the model ID in the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html). To see the models you have access to, find the policy attached to your role you should see a list of models you have access to under the `Resource` section.
-
-```
-dotnet user-secrets set "BedrockAgent:FoundationModel" "..."
-```
-
-### How to add the `bedrock:InvokeModelWithResponseStream` action to an IAM policy
-
-1. Open the [IAM console](https://console.aws.amazon.com/iam/).
-2. On the left navigation pane, choose `Roles` under `Access management`.
-3. Find the role you want to edit and click on it.
-4. Under the `Permissions policies` tab, click on the policy you want to edit.
-5. Under the `Permissions defined in this policy` section, click on the service. You should see **Bedrock** if you already have access to the Bedrock agent service.
-6. Click on the service, and then click `Edit`.
-7. On the right, you will be able to add an action. Find the service and search for `InvokeModelWithResponseStream`.
-8. Check the box next to the action and then scroll all the way down and click `Next`.
-9. Follow the prompts to save the changes.
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs
deleted file mode 100644
index 2c4aa4355097..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step01_BedrockAgent.cs
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how to interact with a  in the most basic way.
-/// 
-public class Step01_BedrockAgent(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    private const string UserQuery = "Why is the sky blue in one sentence?";
-
-    /// 
-    /// Demonstrates how to create a new  and interact with it.
-    /// The agent will respond to the user query.
-    /// 
-    [Fact]
-    public async Task UseNewAgentAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step01_BedrockAgent");
-
-        // Respond to user input
-        try
-        {
-            var responses = bedrockAgent.InvokeAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
-            await foreach (var response in responses)
-            {
-                this.Output.WriteLine(response.Content);
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    /// 
-    /// Demonstrates how to create a new  and interact with it using streaming.
-    /// The agent will respond to the user query.
-    /// 
-    [Fact]
-    public async Task UseNewAgentStreamingAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step01_BedrockAgent_Streaming");
-
-        // Respond to user input
-        try
-        {
-            var streamingResponses = bedrockAgent.InvokeStreamingAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
-            await foreach (var response in streamingResponses)
-            {
-                this.Output.WriteLine(response.Content);
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        return new BedrockAgent(agentModel, this.Client);
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs
deleted file mode 100644
index 70bde61a9aab..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Reflection;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how to interact with a  with code interpreter enabled.
-/// 
-public class Step02_BedrockAgent_CodeInterpreter(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    private const string UserQuery = @"Create a bar chart for the following data:
-Panda   5
-Tiger   8
-Lion    3
-Monkey  6
-Dolphin  2";
-
-    /// 
-    /// Demonstrates how to create a new  with code interpreter enabled and interact with it.
-    /// The agent will respond to the user query by creating a Python code that will be executed by the code interpreter.
-    /// The output of the code interpreter will be a file containing the bar chart, which will be returned to the user.
-    /// 
-    [Fact]
-    public async Task UseAgentWithCodeInterpreterAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step02_BedrockAgent_CodeInterpreter");
-
-        // Respond to user input
-        try
-        {
-            BinaryContent? binaryContent = null;
-            var responses = bedrockAgent.InvokeAsync(BedrockAgent.CreateSessionId(), UserQuery, null);
-            await foreach (var response in responses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-                if (binaryContent == null && response.Items.Count > 0)
-                {
-                    binaryContent = response.Items.OfType().FirstOrDefault();
-                }
-            }
-
-            if (binaryContent == null)
-            {
-                throw new InvalidOperationException("No file found in the response.");
-            }
-
-            // Save the file to the same directory as the test assembly
-            var filePath = Path.Combine(
-                Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)!,
-                binaryContent.Metadata!["Name"]!.ToString()!);
-            this.Output.WriteLine($"Saving file to {filePath}");
-            binaryContent.WriteToFile(filePath, overwrite: true);
-
-            // Expected output:
-            // Here is the bar chart for the given data:
-            // [A bar chart showing the following data:
-            // Panda   5
-            // Tiger   8
-            // Lion    3
-            // Monkey  6
-            // Dolphin 2]
-            // Saving file to ...
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        var bedrockAgent = new BedrockAgent(agentModel, this.Client);
-        // Create the code interpreter action group and prepare the agent for interaction
-        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
-
-        return bedrockAgent;
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs
deleted file mode 100644
index ab23b4be0128..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step03_BedrockAgent_Functions.cs
+++ /dev/null
@@ -1,141 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how to interact with a  with kernel functions.
-/// 
-public class Step03_BedrockAgent_Functions(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    /// 
-    /// Demonstrates how to create a new  with kernel functions enabled and interact with it.
-    /// The agent will respond to the user query by calling kernel functions to provide weather information.
-    /// 
-    [Fact]
-    public async Task UseAgentWithFunctionsAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions");
-
-        // Respond to user input
-        try
-        {
-            var responses = bedrockAgent.InvokeAsync(
-                BedrockAgent.CreateSessionId(),
-                "What is the weather in Seattle?",
-                null);
-            await foreach (var response in responses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    /// 
-    /// Demonstrates how to create a new  with kernel functions enabled and interact with it using streaming.
-    /// The agent will respond to the user query by calling kernel functions to provide weather information.
-    /// 
-    [Fact]
-    public async Task UseAgentStreamingWithFunctionsAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions_Streaming");
-
-        // Respond to user input
-        try
-        {
-            var streamingResponses = bedrockAgent.InvokeStreamingAsync(
-                BedrockAgent.CreateSessionId(),
-                "What is the weather forecast in Seattle?",
-                null);
-            await foreach (var response in streamingResponses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    /// 
-    /// Demonstrates how to create a new  with kernel functions enabled and interact with it.
-    /// The agent will respond to the user query by calling multiple kernel functions in parallel to provide weather information.
-    /// 
-    [Fact]
-    public async Task UseAgentWithParallelFunctionsAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step03_BedrockAgent_Functions_Parallel");
-
-        // Respond to user input
-        try
-        {
-            var responses = bedrockAgent.InvokeAsync(
-                BedrockAgent.CreateSessionId(),
-                "What is the current weather in Seattle and what is the weather forecast in Seattle?",
-                null);
-            await foreach (var response in responses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new kernel with plugins
-        Kernel kernel = new();
-        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        var bedrockAgent = new BedrockAgent(agentModel, this.Client)
-        {
-            Kernel = kernel,
-        };
-        // Create the kernel function action group and prepare the agent for interaction
-        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
-
-        return bedrockAgent;
-    }
-
-    private sealed class WeatherPlugin
-    {
-        [KernelFunction, Description("Provides realtime weather information.")]
-        public string Current([Description("The location to get the weather for.")] string location)
-        {
-            return $"The current weather in {location} is 72 degrees.";
-        }
-
-        [KernelFunction, Description("Forecast weather information.")]
-        public string Forecast([Description("The location to get the weather for.")] string location)
-        {
-            return $"The forecast for {location} is 75 degrees tomorrow.";
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs
deleted file mode 100644
index 3e1400a5115d..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step04_BedrockAgent_Trace.cs
+++ /dev/null
@@ -1,176 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how to interact with a  and inspect the agent's thought process.
-/// To learn more about different traces available, see:
-/// https://docs.aws.amazon.com/bedrock/latest/userguide/trace-events.html
-/// 
-public class Step04_BedrockAgent_Trace(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    /// 
-    /// Demonstrates how to inspect the thought process of a  by enabling trace.
-    /// 
-    [Fact]
-    public async Task UseAgentWithTraceAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step04_BedrockAgent_Trace");
-
-        // Respond to user input
-        var userQuery = "What is the current weather in Seattle and what is the weather forecast in Seattle?";
-        try
-        {
-            // Customize the request for advanced scenarios
-            InvokeAgentRequest invokeAgentRequest = new()
-            {
-                AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
-                AgentId = bedrockAgent.Id,
-                SessionId = BedrockAgent.CreateSessionId(),
-                InputText = userQuery,
-                // Enable trace to inspect the agent's thought process
-                EnableTrace = true,
-            };
-
-            var responses = bedrockAgent.InvokeAsync(invokeAgentRequest, null);
-            await foreach (var response in responses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-                if (response.InnerContent is List innerContents)
-                {
-                    // There could be multiple traces and they are stored in the InnerContent property
-                    var traceParts = innerContents.OfType().ToList();
-                    if (traceParts is not null)
-                    {
-                        foreach (var tracePart in traceParts)
-                        {
-                            this.OutputTrace(tracePart.Trace);
-                        }
-                    }
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    /// 
-    /// Outputs the trace information to the console.
-    /// This only outputs the orchestration trace for demonstration purposes.
-    /// To learn more about different traces available, see:
-    /// https://docs.aws.amazon.com/bedrock/latest/userguide/trace-events.html
-    /// 
-    private void OutputTrace(Trace trace)
-    {
-        if (trace.OrchestrationTrace is not null)
-        {
-            if (trace.OrchestrationTrace.ModelInvocationInput is not null)
-            {
-                this.Output.WriteLine("========== Orchestration trace ==========");
-                this.Output.WriteLine("Orchestration input:");
-                this.Output.WriteLine(trace.OrchestrationTrace.ModelInvocationInput.Text);
-            }
-            if (trace.OrchestrationTrace.ModelInvocationOutput is not null)
-            {
-                this.Output.WriteLine("========== Orchestration trace ==========");
-                this.Output.WriteLine("Orchestration output:");
-                this.Output.WriteLine(trace.OrchestrationTrace.ModelInvocationOutput.RawResponse.Content);
-                this.Output.WriteLine("Usage:");
-                this.Output.WriteLine($"Input token: {trace.OrchestrationTrace.ModelInvocationOutput.Metadata.Usage.InputTokens}");
-                this.Output.WriteLine($"Output token: {trace.OrchestrationTrace.ModelInvocationOutput.Metadata.Usage.OutputTokens}");
-            }
-        }
-        // Example output:
-        // ========== Orchestration trace ==========
-        // Orchestration input:
-        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
-        // ========== Orchestration trace ==========
-        // Orchestration output:
-        // 
-        // To answer this question, I will need to call the following functions:
-        // 1. Step04_BedrockAgent_Trace_KernelFunctions::Current to get the current weather in Seattle
-        // 2. Step04_BedrockAgent_Trace_KernelFunctions::Forecast to get the weather forecast in Seattle
-        // 
-        //
-        // 
-        // 
-        //     Step04_BedrockAgent_Trace_KernelFunctions::Current
-        //     
-        //     Seattle
-        //     
-        // Usage:
-        // Input token: 617
-        // Output token: 144
-        // ========== Orchestration trace ==========
-        // Orchestration input:
-        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
-        // ========== Orchestration trace ==========
-        // Orchestration output:
-        // Now that I have the current weather in Seattle, I will call the forecast function to get the weather forecast.
-        //
-        // 
-        // 
-        // Step04_BedrockAgent_Trace_KernelFunctions::Forecast
-        // 
-        // Seattle
-        // 
-        // Usage:
-        // Input token: 834
-        // Output token: 87
-        // ========== Orchestration trace ==========
-        // Orchestration input:
-        // {"system":"You're a helpful assistant who helps users find information.You have been provided with a set of functions to answer ...
-        // ========== Orchestration trace ==========
-        // Orchestration output:
-        // 
-        // The current weather in Seattle is 72 degrees. The weather forecast for Seattle is 75 degrees tomorrow.
-        // Usage:
-        // Input token: 1003
-        // Output token: 31
-    }
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new kernel with plugins
-        Kernel kernel = new();
-        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        var bedrockAgent = new BedrockAgent(agentModel, this.Client)
-        {
-            Kernel = kernel,
-        };
-        // Create the kernel function action group and prepare the agent for interaction
-        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
-
-        return bedrockAgent;
-    }
-
-    private sealed class WeatherPlugin
-    {
-        [KernelFunction, Description("Provides realtime weather information.")]
-        public string Current([Description("The location to get the weather for.")] string location)
-        {
-            return $"The current weather in {location} is 72 degrees.";
-        }
-
-        [KernelFunction, Description("Forecast weather information.")]
-        public string Forecast([Description("The location to get the weather for.")] string location)
-        {
-            return $"The forecast for {location} is 75 degrees tomorrow.";
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs
deleted file mode 100644
index 9b7b4330af33..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step05_BedrockAgent_FileSearch.cs
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how to interact with a  that is associated with a knowledge base.
-/// A Bedrock Knowledge Base is a collection of documents that the agent uses to answer user queries.
-/// To learn more about Bedrock Knowledge Base, see:
-/// https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base.html
-/// 
-public class Step05_BedrockAgent_FileSearch(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    // Replace the KnowledgeBaseId with a valid KnowledgeBaseId
-    // To learn how to create a Knowledge Base, see:
-    // https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-create.html
-    private const string KnowledgeBaseId = "[KnowledgeBaseId]";
-
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        var bedrockAgent = new BedrockAgent(agentModel, this.Client);
-        // Associate the agent with a knowledge base and prepare the agent
-        await bedrockAgent.AssociateAgentKnowledgeBaseAsync(
-            KnowledgeBaseId,
-            "You will find information here.");
-
-        return bedrockAgent;
-    }
-
-    /// 
-    /// Demonstrates how to use a  with file search.
-    /// 
-    [Fact(Skip = "This test is skipped because it requires a valid KnowledgeBaseId.")]
-    public async Task UseAgentWithFileSearchAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step05_BedrockAgent_FileSearch");
-
-        // Respond to user input
-        // Assuming the knowledge base contains information about Semantic Kernel.
-        // Feel free to modify the user query according to the information in your knowledge base.
-        var userQuery = "What is Semantic Kernel?";
-        try
-        {
-            // Customize the request for advanced scenarios
-            InvokeAgentRequest invokeAgentRequest = new()
-            {
-                AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
-                AgentId = bedrockAgent.Id,
-                SessionId = BedrockAgent.CreateSessionId(),
-                InputText = userQuery,
-            };
-
-            var responses = bedrockAgent.InvokeAsync(invokeAgentRequest, null, CancellationToken.None);
-            await foreach (var response in responses)
-            {
-                if (response.Content != null)
-                {
-                    this.Output.WriteLine(response.Content);
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs b/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs
deleted file mode 100644
index b7aee9d06c7e..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/BedrockAgent/Step06_BedrockAgent_AgentChat.cs
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-using Microsoft.SemanticKernel.Agents.Chat;
-using Microsoft.SemanticKernel.ChatCompletion;
-
-namespace GettingStarted.BedrockAgents;
-
-/// 
-/// This example demonstrates how two agents (one of which is a Bedrock agent) can chat with each other.
-/// 
-public class Step06_BedrockAgent_AgentChat(ITestOutputHelper output) : BaseBedrockAgentTest(output)
-{
-    protected override async Task CreateAgentAsync(string agentName)
-    {
-        // Create a new agent on the Bedrock Agent service and prepare it for use
-        var agentModel = await this.Client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest(agentName));
-        // Create a new BedrockAgent instance with the agent model and the client
-        // so that we can interact with the agent using Semantic Kernel contents.
-        return new BedrockAgent(agentModel, this.Client);
-    }
-
-    /// 
-    /// Demonstrates how to put two  instances in a chat.
-    /// 
-    [Fact]
-    public async Task UseAgentWithAgentChatAsync()
-    {
-        // Create the agent
-        var bedrockAgent = await this.CreateAgentAsync("Step06_BedrockAgent_AgentChat");
-        var chatCompletionAgent = new ChatCompletionAgent()
-        {
-            Instructions = "You're a translator who helps users understand the content in Spanish.",
-            Name = "Translator",
-            Kernel = this.CreateKernelWithChatCompletion(),
-        };
-
-        // Create a chat for agent interaction
-        var chat = new AgentGroupChat(bedrockAgent, chatCompletionAgent)
-        {
-            ExecutionSettings = new()
-            {
-                // Terminate after two turns: one from the bedrock agent and one from the chat completion agent.
-                // Note: each invoke will terminate after two turns, and we are invoking the group chat for each user query.
-                TerminationStrategy = new MultiTurnTerminationStrategy(2),
-            }
-        };
-
-        // Respond to user input
-        string[] userQueries = [
-            "Why is the sky blue in one sentence?",
-            "Why do we have seasons in one sentence?"
-        ];
-        try
-        {
-            foreach (var userQuery in userQueries)
-            {
-                chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, userQuery));
-                await foreach (var response in chat.InvokeAsync())
-                {
-                    if (response.Content != null)
-                    {
-                        this.Output.WriteLine($"[{response.AuthorName}]: {response.Content}");
-                    }
-                }
-            }
-        }
-        finally
-        {
-            await this.Client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id });
-        }
-    }
-
-    internal sealed class MultiTurnTerminationStrategy : TerminationStrategy
-    {
-        public MultiTurnTerminationStrategy(int turns)
-        {
-            this.MaximumIterations = turns;
-        }
-
-        /// 
-        protected override Task ShouldAgentTerminateAsync(
-            Agent agent,
-            IReadOnlyList history,
-            CancellationToken cancellationToken = default)
-        {
-            return Task.FromResult(false);
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
index ffc4734e10d6..3a061b4fb4a0 100644
--- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
+++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
@@ -16,7 +16,6 @@
 
   
     
-    
     
     
     
@@ -28,7 +27,6 @@
     
     
     
-    
     
     
     
@@ -42,10 +40,8 @@
   
 
   
-    
     
     
-    
     
     
     
@@ -67,4 +63,4 @@
     
   
 
-
\ No newline at end of file
+
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
deleted file mode 100644
index 312edc9e7c6f..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using OpenAI.Assistants;
-using Resources;
-
-namespace GettingStarted.OpenAIAssistants;
-
-/// 
-/// This example demonstrates using  with templatized instructions.
-/// 
-public class Step01_Assistant(ITestOutputHelper output) : BaseAssistantTest(output)
-{
-    [Fact]
-    public async Task UseTemplateForAssistantAgentAsync()
-    {
-        // Define the agent
-        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
-        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
-        // Instructions, Name and Description properties defined via the PromptTemplateConfig.
-        Assistant definition = await this.AssistantClient.CreateAssistantFromTemplateAsync(this.Model, templateConfig, metadata: SampleMetadata);
-        OpenAIAssistantAgent agent = new(
-            definition,
-            this.AssistantClient,
-            templateFactory: new KernelPromptTemplateFactory(),
-            templateFormat: PromptTemplateConfig.SemanticKernelTemplateFormat)
-        {
-            Arguments =
-            {
-                { "topic", "Dog" },
-                { "length", "3" }
-            }
-        };
-
-        // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        try
-        {
-            // Invoke the agent with the default arguments.
-            await InvokeAgentAsync();
-
-            // Invoke the agent with the override arguments.
-            await InvokeAgentAsync(
-                    new()
-                    {
-                        { "topic", "Cat" },
-                        { "length", "3" },
-                    });
-        }
-        finally
-        {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the response.
-        async Task InvokeAgentAsync(KernelArguments? arguments = null)
-        {
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments))
-            {
-                WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
deleted file mode 100644
index 3eb893a8871e..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
-using Plugins;
-
-namespace GettingStarted.OpenAIAssistants;
-
-/// 
-/// Demonstrate creation of  with a ,
-/// and then eliciting its response to explicit user messages.
-/// 
-public class Step02_Assistant_Plugins(ITestOutputHelper output) : BaseAssistantTest(output)
-{
-    [Fact]
-    public async Task UseAssistantWithPluginAsync()
-    {
-        // Define the agent
-        OpenAIAssistantAgent agent = await CreateAssistantAgentAsync(
-                plugin: KernelPluginFactory.CreateFromType(),
-                instructions: "Answer questions about the menu.",
-                name: "Host");
-
-        // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync(agent, threadId, "Hello");
-            await InvokeAgentAsync(agent, threadId, "What is the special soup and its price?");
-            await InvokeAgentAsync(agent, threadId, "What is the special drink and its price?");
-            await InvokeAgentAsync(agent, threadId, "Thank you");
-        }
-        finally
-        {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-        }
-    }
-
-    [Fact]
-    public async Task UseAssistantWithPluginEnumParameterAsync()
-    {
-        // Define the agent
-        OpenAIAssistantAgent agent = await CreateAssistantAgentAsync(plugin: KernelPluginFactory.CreateFromType());
-
-        // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync(agent, threadId, "Create a beautiful red colored widget for me.");
-        }
-        finally
-        {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-        }
-    }
-
-    private async Task CreateAssistantAgentAsync(KernelPlugin plugin, string? instructions = null, string? name = null)
-    {
-        // Define the assistant
-        Assistant assistant =
-            await this.AssistantClient.CreateAssistantAsync(
-                this.Model,
-                name,
-                instructions: instructions,
-                metadata: SampleMetadata);
-
-        // Create the agent
-        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient, [plugin]);
-
-        return agent;
-    }
-
-    // Local function to invoke agent and display the conversation messages.
-    private async Task InvokeAgentAsync(OpenAIAssistantAgent agent, string threadId, string input)
-    {
-        ChatMessageContent message = new(AuthorRole.User, input);
-        await agent.AddChatMessageAsync(threadId, message);
-        this.WriteAgentChatMessage(message);
-
-        await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-        {
-            this.WriteAgentChatMessage(response);
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
deleted file mode 100644
index 72248118577b..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
-using Resources;
-
-namespace GettingStarted.OpenAIAssistants;
-
-/// 
-/// Demonstrate using  with file search.
-/// 
-public class Step05_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAssistantTest(output)
-{
-    [Fact]
-    public async Task UseFileSearchToolWithAssistantAgentAsync()
-    {
-        // Define the assistant
-        Assistant assistant =
-            await this.AssistantClient.CreateAssistantAsync(
-                this.Model,
-                enableFileSearch: true,
-                metadata: SampleMetadata);
-
-        // Create the agent
-        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
-
-        // Upload file - Using a table of fictional employees.
-        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
-        string fileId = await this.Client.UploadAssistantFileAsync(stream, "employees.pdf");
-
-        // Create a vector-store
-        string vectorStoreId =
-            await this.Client.CreateVectorStoreAsync(
-                [fileId],
-                waitUntilCompleted: true,
-                metadata: SampleMetadata);
-
-        // Create a thread associated with a vector-store for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(
-                            vectorStoreId: vectorStoreId,
-                            metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Who is the youngest employee?");
-            await InvokeAgentAsync("Who works in sales?");
-            await InvokeAgentAsync("I have a customer request, who can help me?");
-        }
-        finally
-        {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-            await this.Client.DeleteVectorStoreAsync(vectorStoreId);
-            await this.Client.DeleteFileAsync(fileId);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(threadId, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
deleted file mode 100644
index 024f8ab167ae..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step06_AssistantTool_Function.cs
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
-using Plugins;
-
-namespace GettingStarted.OpenAIAssistants;
-
-/// 
-/// This example demonstrates how to define function tools for an 
-/// when the assistant is created. This is useful if you want to retrieve the assistant later and
-/// then dynamically check what function tools it requires.
-/// 
-public class Step06_AssistantTool_Function(ITestOutputHelper output) : BaseAssistantTest(output)
-{
-    private const string HostName = "Host";
-    private const string HostInstructions = "Answer questions about the menu.";
-
-    [Fact]
-    public async Task UseSingleAssistantWithFunctionToolsAsync()
-    {
-        // Define the agent
-        AssistantCreationOptions creationOptions =
-            new()
-            {
-                Name = HostName,
-                Instructions = HostInstructions,
-                Metadata =
-                {
-                    { SampleMetadataKey, bool.TrueString }
-                },
-            };
-
-        // In this sample the function tools are added to the assistant this is
-        // important if you want to retrieve the assistant later and then dynamically check
-        // what function tools it requires.
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-        plugin.Select(f => f.ToToolDefinition(plugin.Name)).ToList().ForEach(td => creationOptions.Tools.Add(td));
-
-        Assistant definition = await this.AssistantClient.CreateAssistantAsync(this.Model, creationOptions);
-        OpenAIAssistantAgent agent = new(definition, this.AssistantClient);
-
-        // Add plugin to the agent's Kernel (same as direct Kernel usage).
-        agent.Kernel.Plugins.Add(plugin);
-
-        // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
-
-        // Respond to user input
-        try
-        {
-            await InvokeAgentAsync("Hello");
-            await InvokeAgentAsync("What is the special soup and its price?");
-            await InvokeAgentAsync("What is the special drink and its price?");
-            await InvokeAgentAsync("Thank you");
-        }
-        finally
-        {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-        }
-
-        // Local function to invoke agent and display the conversation messages.
-        async Task InvokeAgentAsync(string input)
-        {
-            ChatMessageContent message = new(AuthorRole.User, input);
-            await agent.AddChatMessageAsync(threadId, message);
-            this.WriteAgentChatMessage(message);
-
-            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs b/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs
deleted file mode 100644
index fb37ed0309e9..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Plugins/MenuPlugin.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.ComponentModel;
-using Microsoft.SemanticKernel;
-
-namespace Plugins;
-
-public sealed class MenuPlugin
-{
-    [KernelFunction, Description("Provides a list of specials from the menu.")]
-    public MenuItem[] GetMenu()
-    {
-        return s_menuItems;
-    }
-
-    [KernelFunction, Description("Provides a list of specials from the menu.")]
-    public MenuItem[] GetSpecials()
-    {
-        return s_menuItems.Where(i => i.IsSpecial).ToArray();
-    }
-
-    [KernelFunction, Description("Provides the price of the requested menu item.")]
-    public float? GetItemPrice(
-        [Description("The name of the menu item.")]
-        string menuItem)
-    {
-        return s_menuItems.FirstOrDefault(i => i.Name.Equals(menuItem, StringComparison.OrdinalIgnoreCase))?.Price;
-    }
-
-    private static readonly MenuItem[] s_menuItems =
-        [
-            new()
-            {
-                Category = "Soup",
-                Name = "Clam Chowder",
-                Price = 4.95f,
-                IsSpecial = true,
-            },
-            new()
-            {
-                Category = "Soup",
-                Name = "Tomato Soup",
-                Price = 4.95f,
-                IsSpecial = false,
-            },
-            new()
-            {
-                Category = "Salad",
-                Name = "Cobb Salad",
-                Price = 9.99f,
-            },
-            new()
-            {
-                Category = "Salad",
-                Name = "House Salad",
-                Price = 4.95f,
-            },
-            new()
-            {
-                Category = "Drink",
-                Name = "Chai Tea",
-                Price = 2.95f,
-                IsSpecial = true,
-            },
-            new()
-            {
-                Category = "Drink",
-                Name = "Soda",
-                Price = 1.95f,
-            },
-        ];
-
-    public sealed class MenuItem
-    {
-        public string Category { get; init; }
-        public string Name { get; init; }
-        public float Price { get; init; }
-        public bool IsSpecial { get; init; }
-    }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs b/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs
deleted file mode 100644
index 8a889ee17249..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Plugins/WidgetFactory.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using System.Text.Json.Serialization;
-using Microsoft.SemanticKernel;
-
-namespace Plugins;
-
-/// 
-/// A plugin that creates widgets.
-/// 
-public sealed class WidgetFactory
-{
-    [KernelFunction]
-    [Description("Creates a new widget of the specified type and colors")]
-    public WidgetDetails CreateWidget(
-        [Description("The type of widget to be created")] WidgetType widgetType,
-        [Description("The colors of the widget to be created")] WidgetColor[] widgetColors)
-    {
-        return new()
-        {
-            SerialNumber = $"{widgetType}-{string.Join("-", widgetColors)}-{Guid.NewGuid()}",
-            Type = widgetType,
-            Colors = widgetColors,
-        };
-    }
-}
-
-/// 
-/// A  is required to correctly convert enum values.
-/// 
-[JsonConverter(typeof(JsonStringEnumConverter))]
-public enum WidgetType
-{
-    [Description("A widget that is useful.")]
-    Useful,
-
-    [Description("A widget that is decorative.")]
-    Decorative
-}
-
-/// 
-/// A  is required to correctly convert enum values.
-/// 
-[JsonConverter(typeof(JsonStringEnumConverter))]
-public enum WidgetColor
-{
-    [Description("Use when creating a red item.")]
-    Red,
-
-    [Description("Use when creating a green item.")]
-    Green,
-
-    [Description("Use when creating a blue item.")]
-    Blue
-}
-
-public sealed class WidgetDetails
-{
-    public string SerialNumber { get; init; }
-    public WidgetType Type { get; init; }
-    public WidgetColor[] Colors { get; init; }
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md
index 6c54a26c0d90..ed0e68802994 100644
--- a/dotnet/samples/GettingStartedWithAgents/README.md
+++ b/dotnet/samples/GettingStartedWithAgents/README.md
@@ -2,14 +2,13 @@
 
 This project contains a step by step guide to get started with  _Semantic Kernel Agents_.
 
-## NuGet
 
+#### NuGet:
 - [Microsoft.SemanticKernel.Agents.Abstractions](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Abstractions)
 - [Microsoft.SemanticKernel.Agents.Core](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Core)
 - [Microsoft.SemanticKernel.Agents.OpenAI](https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.OpenAI)
 
-## Source
-
+#### Source
 - [Semantic Kernel Agent Framework](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Agents)
 
 The examples can be run as integration tests but their code can also be copied to stand-alone programs.
@@ -18,8 +17,6 @@ The examples can be run as integration tests but their code can also be copied t
 
 The getting started with agents examples include:
 
-### ChatCompletion
-
 Example|Description
 ---|---
 [Step01_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs)|How to create and use an agent.
@@ -28,39 +25,11 @@ Example|Description
 [Step04_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
 [Step05_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs)|How to have an agent produce JSON.
 [Step06_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs)|How to define dependency injection patterns for agents.
-[Step07_Telemetry](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs)|How to enable logging for agents.
-
-### Open AI Assistant
-
-Example|Description
----|---
-[Step01_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs)|How to create an Open AI Assistant agent.
-[Step02_Assistant_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs)|How to create an Open AI Assistant agent.
-[Step03_Assistant_Vision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent.
-[Step04_AssistantTool_CodeInterpreter_](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent.
-[Step05_AssistantTool_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent.
-
-### Azure AI Agent
-
-Example|Description
----|---
-[Step01_AzureAIAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs)|How to create an Azure AI agent.
-[Step02_AzureAIAgent_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs)|How to create an Azure AI agent.
-[Step03_AzureAIAgent_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Chat.cs)|How create a conversation with Azure AI agents.
-[Step04_AzureAIAgent_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_CodeInterpreter.cs)|How to use the code-interpreter tool for an Azure AI agent.
-[Step05_AzureAIAgent_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_FileSearch.cs)|How to use the file-search tool for an Azure AI agent.
-[Step06_AzureAIAgent_OpenAPI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_OpenAPI.cs)|How to use the Open API tool for an Azure AI agent.
-
-### Bedrock Agent
-
-Example|Description
----|---
-[Step01_BedrockAgent](./BedrockAgent/Step01_BedrockAgent.cs)|How to create a Bedrock agent and interact with it in the most basic way.
-[Step02_BedrockAgent_CodeInterpreter](./BedrockAgent/Step02_BedrockAgent_CodeInterpreter.cs)|How to use the code-interpreter tool with a Bedrock agent.
-[Step03_BedrockAgent_Functions](./BedrockAgent/Step03_BedrockAgent_Functions.cs)|How to use kernel functions with a Bedrock agent.
-[Step04_BedrockAgent_Trace](./BedrockAgent/Step04_BedrockAgent_Trace.cs)|How to enable tracing for a Bedrock agent to inspect the chain of thoughts.
-[Step05_BedrockAgent_FileSearch](./BedrockAgent/Step05_BedrockAgent_FileSearch.cs)|How to use file search with a Bedrock agent (i.e. Bedrock knowledge base).
-[Step06_BedrockAgent_AgentChat](./BedrockAgent/Step06_BedrockAgent_AgentChat.cs)|How to create a conversation between two agents and one of them in a Bedrock agent.
+[Step07_Logging](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs)|How to enable logging for agents.
+[Step08_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs)|How to create an Open AI Assistant agent.
+[Step09_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent.
+[Step10_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent.
+[Step11_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent.
 
 ## Legacy Agents
 
@@ -69,8 +38,8 @@ Support for the OpenAI Assistant API was originally published in `Microsoft.Sema
 
 This package has been superseded by _Semantic Kernel Agents_, which includes support for Open AI Assistant agents.
 
-## Running Examples with Filters
 
+## Running Examples with Filters
 Examples may be explored and ran within _Visual Studio_ using _Test Explorer_.
 
 You can also run specific examples via the command-line by using test filters (`dotnet test --filter`). Type `dotnet test --help` at the command line for more details.
@@ -117,25 +86,12 @@ To set your secrets with .NET Secret Manager:
 5. Or Azure Open AI:
 
     ```
-    dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4o"
+    dotnet user-secrets set "AzureOpenAI:DeploymentName" "..."
+    dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..."
     dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/"
     dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
     ```
 
-6. Or Azure AI:
-
-    ```
-    dotnet user-secrets set "AzureAI:ConnectionString" "..."
-    dotnet user-secrets set "AzureAI:ChatModelId" "gpt-4o"
-    ```
-
-7. Or Bedrock:
-
-    ```
-    dotnet user-secrets set "BedrockAgent:AgentResourceRoleArn" "arn:aws:iam::...:role/..."
-    dotnet user-secrets set "BedrockAgent:FoundationModel" "..."
-    ```
-
 > NOTE: Azure secrets will take precedence, if both Open AI and Azure Open AI secrets are defined, unless `ForceOpenAI` is set:
 
 ```
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml b/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml
deleted file mode 100644
index 36d66167b555..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Resources/AutoInvokeTools.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-name: ToolAgent
-template_format: semantic-kernel
-description: An agent that is configured to auto-invoke plugins.
-execution_settings:
-  default:
-    function_choice_behavior:
-      type: auto
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/countries.json b/dotnet/samples/GettingStartedWithAgents/Resources/countries.json
deleted file mode 100644
index b88d5040750a..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Resources/countries.json
+++ /dev/null
@@ -1,46 +0,0 @@
-{
-  "openapi": "3.1.0",
-  "info": {
-    "title": "RestCountries.NET API",
-    "description": "Web API version 3.1 for managing country items, based on previous implementations from restcountries.eu and restcountries.com.",
-    "version": "v3.1"
-  },
-  "servers": [
-    { "url": "https://restcountries.net" }
-  ],
-  "auth": [],
-  "paths": {
-    "/v3.1/currency": {
-      "get": {
-        "description": "Search by currency.",
-        "operationId": "LookupCountryByCurrency",
-        "parameters": [
-          {
-            "name": "currency",
-            "in": "query",
-            "description": "The currency to search for.",
-            "required": true,
-            "schema": {
-              "type": "string"
-            }
-          }
-        ],
-        "responses": {
-          "200": {
-            "description": "Success",
-            "content": {
-              "text/plain": {
-                "schema": {
-                  "type": "string"
-                }
-              }
-            }
-          }
-        }
-      }
-    }
-  },
-  "components": {
-    "schemes": {}
-  }
-}
\ No newline at end of file
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/weather.json b/dotnet/samples/GettingStartedWithAgents/Resources/weather.json
deleted file mode 100644
index c3009f417de4..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Resources/weather.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
-  "openapi": "3.1.0",
-  "info": {
-    "title": "get weather data",
-    "description": "Retrieves current weather data for a location based on wttr.in.",
-    "version": "v1.0.0"
-  },
-  "servers": [
-    {
-      "url": "https://wttr.in"
-    }
-  ],
-  "auth": [],
-  "paths": {
-    "/{location}": {
-      "get": {
-        "description": "Get weather information for a specific location",
-        "operationId": "GetCurrentWeather",
-        "parameters": [
-          {
-            "name": "location",
-            "in": "path",
-            "description": "City or location to retrieve the weather for",
-            "required": true,
-            "schema": {
-              "type": "string"
-            }
-          },
-          {
-            "name": "format",
-            "in": "query",
-            "description": "Always use j1 value for this parameter",
-            "required": true,
-            "schema": {
-              "type": "string",
-              "default": "j1"
-            }
-          }
-        ],
-        "responses": {
-          "200": {
-            "description": "Successful response",
-            "content": {
-              "text/plain": {
-                "schema": {
-                  "type": "string"
-                }
-              }
-            }
-          },
-          "404": {
-            "description": "Location not found"
-          }
-        },
-        "deprecated": false
-      }
-    }
-  },
-  "components": {
-    "schemes": {}
-  }
-}
\ No newline at end of file
diff --git a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
index 3807c1ebef74..dfd6aeb22fb3 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
@@ -59,18 +59,17 @@ public async Task UseTemplateForChatCompletionAgentAsync()
         // Define the agent
         string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
         PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
-        KernelPromptTemplateFactory templateFactory = new();
 
         // Instructions, Name and Description properties defined via the config.
         ChatCompletionAgent agent =
-            new(templateConfig, templateFactory)
+            new(templateConfig, new KernelPromptTemplateFactory())
             {
                 Kernel = this.CreateKernelWithChatCompletion(),
-                Arguments =
-                    {
-                        { "topic", "Dog" },
-                        { "length", "3" },
-                    }
+                Arguments = new KernelArguments()
+                {
+                    { "topic", "Dog" },
+                    { "length", "3" },
+                }
             };
 
         /// Create the chat history to capture the agent interaction.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
index ced4148a7287..047020a90b67 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
@@ -1,9 +1,9 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System.ComponentModel;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Plugins;
-using Resources;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
 
 namespace GettingStarted;
 
@@ -13,97 +13,66 @@ namespace GettingStarted;
 /// 
 public class Step02_Plugins(ITestOutputHelper output) : BaseAgentsTest(output)
 {
-    [Fact]
-    public async Task UseChatCompletionWithPluginAsync()
-    {
-        // Define the agent
-        ChatCompletionAgent agent = CreateAgentWithPlugin(
-                plugin: KernelPluginFactory.CreateFromType(),
-                instructions: "Answer questions about the menu.",
-                name: "Host");
-
-        /// Create the chat history to capture the agent interaction.
-        ChatHistory chat = [];
-
-        // Respond to user input, invoking functions where appropriate.
-        await InvokeAgentAsync(agent, chat, "Hello");
-        await InvokeAgentAsync(agent, chat, "What is the special soup and its price?");
-        await InvokeAgentAsync(agent, chat, "What is the special drink and its price?");
-        await InvokeAgentAsync(agent, chat, "Thank you");
-    }
+    private const string HostName = "Host";
+    private const string HostInstructions = "Answer questions about the menu.";
 
     [Fact]
-    public async Task UseChatCompletionWithPluginEnumParameterAsync()
+    public async Task UseChatCompletionWithPluginAgentAsync()
     {
         // Define the agent
-        ChatCompletionAgent agent = CreateAgentWithPlugin(
-                KernelPluginFactory.CreateFromType());
-
-        /// Create the chat history to capture the agent interaction.
-        ChatHistory chat = [];
-
-        // Respond to user input, invoking functions where appropriate.
-        await InvokeAgentAsync(agent, chat, "Create a beautiful red colored widget for me.");
-    }
-
-    [Fact]
-    public async Task UseChatCompletionWithTemplateExecutionSettingsAsync()
-    {
-        // Read the template resource
-        string autoInvokeYaml = EmbeddedResource.Read("AutoInvokeTools.yaml");
-        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(autoInvokeYaml);
-        KernelPromptTemplateFactory templateFactory = new();
-
-        // Define the agent:
-        // Execution-settings with auto-invocation of plugins defined via the config.
         ChatCompletionAgent agent =
-            new(templateConfig, templateFactory)
+            new()
             {
-                Kernel = this.CreateKernelWithChatCompletion()
+                Instructions = HostInstructions,
+                Name = HostName,
+                Kernel = this.CreateKernelWithChatCompletion(),
+                Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
             };
 
-        agent.Kernel.Plugins.AddFromType();
+        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        agent.Kernel.Plugins.Add(plugin);
 
         /// Create the chat history to capture the agent interaction.
         ChatHistory chat = [];
 
         // Respond to user input, invoking functions where appropriate.
-        await InvokeAgentAsync(agent, chat, "Create a beautiful red colored widget for me.");
-    }
+        await InvokeAgentAsync("Hello");
+        await InvokeAgentAsync("What is the special soup?");
+        await InvokeAgentAsync("What is the special drink?");
+        await InvokeAgentAsync("Thank you");
 
-    private ChatCompletionAgent CreateAgentWithPlugin(
-        KernelPlugin plugin,
-        string? instructions = null,
-        string? name = null)
-    {
-        ChatCompletionAgent agent =
-                new()
-                {
-                    Instructions = instructions,
-                    Name = name,
-                    Kernel = this.CreateKernelWithChatCompletion(),
-                    Arguments = new KernelArguments(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
-                };
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            chat.Add(message);
+            this.WriteAgentChatMessage(message);
 
-        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
-        agent.Kernel.Plugins.Add(plugin);
+            await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
+            {
+                chat.Add(response);
 
-        return agent;
+                this.WriteAgentChatMessage(response);
+            }
+        }
     }
 
-    // Local function to invoke agent and display the conversation messages.
-    private async Task InvokeAgentAsync(ChatCompletionAgent agent, ChatHistory chat, string input)
+    private sealed class MenuPlugin
     {
-        ChatMessageContent message = new(AuthorRole.User, input);
-        chat.Add(message);
-
-        this.WriteAgentChatMessage(message);
-
-        await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
-        {
-            chat.Add(response);
-
-            this.WriteAgentChatMessage(response);
-        }
+        [KernelFunction, Description("Provides a list of specials from the menu.")]
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
+        public string GetSpecials() =>
+            """
+            Special Soup: Clam Chowder
+            Special Salad: Cobb Salad
+            Special Drink: Chai Tea
+            """;
+
+        [KernelFunction, Description("Provides the price of the requested menu item.")]
+        public string GetItemPrice(
+            [Description("The name of the menu item.")]
+            string menuItem) =>
+            "$9.99";
     }
 }
diff --git a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
index 963b670f1f82..f924793951aa 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
@@ -1,8 +1,8 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.Chat;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace GettingStarted;
diff --git a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index 276f2f6fb198..5beb969bf090 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -1,5 +1,4 @@
 // Copyright (c) Microsoft. All rights reserved.
-using Azure.Identity;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.Extensions.Logging;
 using Microsoft.SemanticKernel;
@@ -44,7 +43,7 @@ public async Task UseDependencyInjectionToCreateAgentAsync()
             serviceContainer.AddAzureOpenAIChatCompletion(
                 TestConfiguration.AzureOpenAI.ChatDeploymentName,
                 TestConfiguration.AzureOpenAI.Endpoint,
-                new AzureCliCredential());
+                TestConfiguration.AzureOpenAI.ApiKey);
         }
 
         // Transient Kernel as each agent may customize its Kernel instance with plug-ins.
diff --git a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
similarity index 56%
rename from dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
rename to dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
index c71b7124b463..3a48d407dea9 100644
--- a/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
@@ -1,27 +1,28 @@
 // Copyright (c) Microsoft. All rights reserved.
+using Microsoft.Extensions.Logging;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.AzureAI;
 using Microsoft.SemanticKernel.Agents.Chat;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Agent = Azure.AI.Projects.Agent;
 
-namespace GettingStarted.AzureAgents;
+namespace GettingStarted;
 
 /// 
-/// Demonstrate creation of  with 
-/// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum
-/// number of agent interactions.
+/// A repeat of  with logging enabled via assignment
+/// of a  to .
 /// 
-public class Step03_AzureAIAgent_Chat(ITestOutputHelper output) : BaseAzureAgentTest(output)
+/// 
+/// Samples become super noisy with logging always enabled.
+/// 
+public class Step07_Logging(ITestOutputHelper output) : BaseAgentsTest(output)
 {
     private const string ReviewerName = "ArtDirector";
     private const string ReviewerInstructions =
         """
         You are an art director who has opinions about copywriting born of a love for David Ogilvy.
         The goal is to determine if the given copy is acceptable to print.
-        If so, state that it is approved.  Do not use the word "approve" unless you are giving approval.
-        If not, provide insight on how to refine suggested copy without example.
+        If so, state that it is approved.
+        If not, provide insight on how to refine suggested copy without examples.
         """;
 
     private const string CopyWriterName = "CopyWriter";
@@ -36,26 +37,33 @@ Consider suggestions when refining an idea.
         """;
 
     [Fact]
-    public async Task UseGroupChatWithTwoAgentsAsync()
+    public async Task UseLoggerFactoryWithAgentGroupChatAsync()
     {
         // Define the agents
-        Agent reviewerModel = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            ReviewerName,
-            null,
-            ReviewerInstructions);
-        AzureAIAgent agentReviewer = new(reviewerModel, this.AgentsClient);
-        Agent writerModel = await this.AgentsClient.CreateAgentAsync(
-            TestConfiguration.AzureAI.ChatModelId,
-            CopyWriterName,
-            null,
-            CopyWriterInstructions);
-        AzureAIAgent agentWriter = new(writerModel, this.AgentsClient);
+        ChatCompletionAgent agentReviewer =
+            new()
+            {
+                Instructions = ReviewerInstructions,
+                Name = ReviewerName,
+                Kernel = this.CreateKernelWithChatCompletion(),
+                LoggerFactory = this.LoggerFactory,
+            };
+
+        ChatCompletionAgent agentWriter =
+            new()
+            {
+                Instructions = CopyWriterInstructions,
+                Name = CopyWriterName,
+                Kernel = this.CreateKernelWithChatCompletion(),
+                LoggerFactory = this.LoggerFactory,
+            };
 
         // Create a chat for agent interaction.
         AgentGroupChat chat =
             new(agentWriter, agentReviewer)
             {
+                // This is all that is required to enable logging across the agent framework/
+                LoggerFactory = this.LoggerFactory,
                 ExecutionSettings =
                     new()
                     {
@@ -72,30 +80,23 @@ public async Task UseGroupChatWithTwoAgentsAsync()
                     }
             };
 
-        try
-        {
-            // Invoke chat and display messages.
-            ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
-            chat.AddChatMessage(input);
-            this.WriteAgentChatMessage(input);
-
-            await foreach (ChatMessageContent response in chat.InvokeAsync())
-            {
-                this.WriteAgentChatMessage(response);
-            }
+        // Invoke chat and display messages.
+        ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+        chat.AddChatMessage(input);
+        this.WriteAgentChatMessage(input);
 
-            Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
-        }
-        finally
+        await foreach (ChatMessageContent response in chat.InvokeAsync())
         {
-            await chat.ResetAsync();
+            this.WriteAgentChatMessage(response);
         }
+
+        Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
     }
 
     private sealed class ApprovalTerminationStrategy : TerminationStrategy
     {
         // Terminate when the final message contains the term "approve"
-        protected override Task ShouldAgentTerminateAsync(Microsoft.SemanticKernel.Agents.Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
+        protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
             => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false);
     }
 }
diff --git a/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs
deleted file mode 100644
index 832ce0b1db02..000000000000
--- a/dotnet/samples/GettingStartedWithAgents/Step07_Telemetry.cs
+++ /dev/null
@@ -1,236 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Diagnostics;
-using Azure.Monitor.OpenTelemetry.Exporter;
-using Microsoft.Extensions.Logging;
-using Microsoft.Extensions.Logging.Abstractions;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.Chat;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
-using OpenTelemetry;
-using OpenTelemetry.Resources;
-using OpenTelemetry.Trace;
-
-namespace GettingStarted;
-
-/// 
-/// A repeat of  with telemetry enabled.
-/// 
-public class Step07_Telemetry(ITestOutputHelper output) : BaseAssistantTest(output)
-{
-    /// 
-    /// Instance of  for the example's main activity.
-    /// 
-    private static readonly ActivitySource s_activitySource = new("AgentsTelemetry.Example");
-
-    /// 
-    /// Demonstrates logging in ,  and .
-    /// Logging is enabled through the  and  properties.
-    /// This example uses  to output logs to the test console, but any compatible logging provider can be used.
-    /// 
-    [Fact]
-    public async Task LoggingAsync()
-    {
-        await RunExampleAsync(loggerFactory: this.LoggerFactory);
-
-        // Output:
-        // [AddChatMessages] Adding Messages: 1.
-        // [AddChatMessages] Added Messages: 1.
-        // [InvokeAsync] Invoking chat: Microsoft.SemanticKernel.Agents.ChatCompletionAgent:63c505e8-cf5b-4aa3-a6a5-067a52377f82/CopyWriter, Microsoft.SemanticKernel.Agents.ChatCompletionAgent:85f6777b-54ef-4392-9608-67bc85c42c5b/ArtDirector
-        // [InvokeAsync] Selecting agent: Microsoft.SemanticKernel.Agents.Chat.SequentialSelectionStrategy.
-        // [NextAsync] Selected agent (0 / 2): 63c505e8-cf5b-4aa3-a6a5-067a52377f82/CopyWriter
-        // and more...
-    }
-
-    /// 
-    /// Demonstrates tracing in  and .
-    /// Tracing is enabled through the .
-    /// For output this example uses Console as well as Application Insights.
-    /// 
-    [Theory]
-    [InlineData(true, false)]
-    [InlineData(false, false)]
-    [InlineData(true, true)]
-    [InlineData(false, true)]
-    public async Task TracingAsync(bool useApplicationInsights, bool useStreaming)
-    {
-        using var tracerProvider = GetTracerProvider(useApplicationInsights);
-
-        using var activity = s_activitySource.StartActivity("MainActivity");
-        Console.WriteLine($"Operation/Trace ID: {Activity.Current?.TraceId}");
-
-        await RunExampleAsync(useStreaming: useStreaming);
-
-        // Output:
-        // Operation/Trace ID: 132d831ef39c13226cdaa79873f375b8
-        // Activity.TraceId:            132d831ef39c13226cdaa79873f375b8
-        // Activity.SpanId:             891e8f2f32a61123
-        // Activity.TraceFlags:         Recorded
-        // Activity.ParentSpanId:       5dae937c9438def9
-        // Activity.ActivitySourceName: Microsoft.SemanticKernel.Diagnostics
-        // Activity.DisplayName:        chat.completions gpt-4
-        // Activity.Kind:               Client
-        // Activity.StartTime:          2025-02-03T23:32:57.1363560Z
-        // Activity.Duration:           00:00:02.1339320
-        // and more...
-    }
-
-    #region private
-
-    private async Task RunExampleAsync(
-        bool useStreaming = false,
-        ILoggerFactory? loggerFactory = null)
-    {
-        // Define the agents
-        ChatCompletionAgent agentReviewer =
-            new()
-            {
-                Name = "ArtDirector",
-                Instructions =
-                    """
-                    You are an art director who has opinions about copywriting born of a love for David Ogilvy.
-                    The goal is to determine if the given copy is acceptable to print.
-                    If so, state that it is approved.
-                    If not, provide insight on how to refine suggested copy without examples.
-                    """,
-                Description = "An art director who has opinions about copywriting born of a love for David Ogilvy",
-                Kernel = this.CreateKernelWithChatCompletion(),
-                LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory),
-            };
-
-        // Define the assistant
-        Assistant assistant =
-            await this.AssistantClient.CreateAssistantAsync(
-                this.Model,
-                name: "CopyWriter",
-                instructions:
-                    """
-                    You are a copywriter with ten years of experience and are known for brevity and a dry humor.
-                    The goal is to refine and decide on the single best copy as an expert in the field.
-                    Only provide a single proposal per response.
-                    You're laser focused on the goal at hand.
-                    Don't waste time with chit chat.
-                    Consider suggestions when refining an idea.
-                    """,
-                metadata: SampleMetadata);
-
-        // Create the agent
-        OpenAIAssistantAgent agentWriter = new(assistant, this.AssistantClient)
-        {
-            LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory)
-        };
-
-        // Create a chat for agent interaction.
-        AgentGroupChat chat =
-            new(agentWriter, agentReviewer)
-            {
-                // This is all that is required to enable logging across the Agent Framework.
-                LoggerFactory = GetLoggerFactoryOrDefault(loggerFactory),
-                ExecutionSettings =
-                    new()
-                    {
-                        // Here a TerminationStrategy subclass is used that will terminate when
-                        // an assistant message contains the term "approve".
-                        TerminationStrategy =
-                            new ApprovalTerminationStrategy()
-                            {
-                                // Only the art-director may approve.
-                                Agents = [agentReviewer],
-                                // Limit total number of turns
-                                MaximumIterations = 10,
-                            }
-                    }
-            };
-
-        // Invoke chat and display messages.
-        ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
-        chat.AddChatMessage(input);
-        this.WriteAgentChatMessage(input);
-
-        if (useStreaming)
-        {
-            string lastAgent = string.Empty;
-            await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync())
-            {
-                if (string.IsNullOrEmpty(response.Content))
-                {
-                    continue;
-                }
-
-                if (!lastAgent.Equals(response.AuthorName, StringComparison.Ordinal))
-                {
-                    Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:");
-                    lastAgent = response.AuthorName ?? string.Empty;
-                }
-
-                Console.WriteLine($"\t > streamed: '{response.Content}'");
-            }
-
-            // Display the chat history.
-            Console.WriteLine("================================");
-            Console.WriteLine("CHAT HISTORY");
-            Console.WriteLine("================================");
-
-            ChatMessageContent[] history = await chat.GetChatMessagesAsync().Reverse().ToArrayAsync();
-
-            for (int index = 0; index < history.Length; index++)
-            {
-                this.WriteAgentChatMessage(history[index]);
-            }
-        }
-        else
-        {
-            await foreach (ChatMessageContent response in chat.InvokeAsync())
-            {
-                this.WriteAgentChatMessage(response);
-            }
-        }
-
-        Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
-    }
-
-    private TracerProvider? GetTracerProvider(bool useApplicationInsights)
-    {
-        // Enable diagnostics.
-        AppContext.SetSwitch("Microsoft.SemanticKernel.Experimental.GenAI.EnableOTelDiagnostics", true);
-
-        var tracerProviderBuilder = Sdk.CreateTracerProviderBuilder()
-            .SetResourceBuilder(ResourceBuilder.CreateDefault().AddService("Semantic Kernel Agents Tracing Example"))
-            .AddSource("Microsoft.SemanticKernel*")
-            .AddSource(s_activitySource.Name);
-
-        if (useApplicationInsights)
-        {
-            var connectionString = TestConfiguration.ApplicationInsights.ConnectionString;
-
-            if (string.IsNullOrWhiteSpace(connectionString))
-            {
-                throw new ConfigurationNotFoundException(
-                    nameof(TestConfiguration.ApplicationInsights),
-                    nameof(TestConfiguration.ApplicationInsights.ConnectionString));
-            }
-
-            tracerProviderBuilder.AddAzureMonitorTraceExporter(o => o.ConnectionString = connectionString);
-        }
-        else
-        {
-            tracerProviderBuilder.AddConsoleExporter();
-        }
-
-        return tracerProviderBuilder.Build();
-    }
-
-    private ILoggerFactory GetLoggerFactoryOrDefault(ILoggerFactory? loggerFactory = null) => loggerFactory ?? NullLoggerFactory.Instance;
-
-    private sealed class ApprovalTerminationStrategy : TerminationStrategy
-    {
-        // Terminate when the final message contains the term "approve"
-        protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
-            => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false);
-    }
-
-    #endregion
-}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
new file mode 100644
index 000000000000..1e952810e51e
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
@@ -0,0 +1,142 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.ComponentModel;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
+
+namespace GettingStarted;
+
+/// 
+/// This example demonstrates similarity between using 
+/// and  (see: Step 2).
+/// 
+public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+    private const string HostName = "Host";
+    private const string HostInstructions = "Answer questions about the menu.";
+
+    [Fact]
+    public async Task UseSingleAssistantAgentAsync()
+    {
+        // Define the agent
+        OpenAIAssistantAgent agent =
+            await OpenAIAssistantAgent.CreateAsync(
+                clientProvider: this.GetClientProvider(),
+                definition: new OpenAIAssistantDefinition(this.Model)
+                {
+                    Instructions = HostInstructions,
+                    Name = HostName,
+                    Metadata = AssistantSampleMetadata,
+                },
+                kernel: new Kernel());
+
+        // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        agent.Kernel.Plugins.Add(plugin);
+
+        // Create a thread for the agent conversation.
+        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Hello");
+            await InvokeAgentAsync("What is the special soup and its price?");
+            await InvokeAgentAsync("What is the special drink and its price?");
+            await InvokeAgentAsync("Thank you");
+        }
+        finally
+        {
+            await agent.DeleteThreadAsync(threadId);
+            await agent.DeleteAsync();
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(threadId, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+
+    [Fact]
+    public async Task UseTemplateForAssistantAgentAsync()
+    {
+        // Define the agent
+        string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml");
+        PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml);
+
+        // Instructions, Name and Description properties defined via the config.
+        OpenAIAssistantAgent agent =
+            await OpenAIAssistantAgent.CreateFromTemplateAsync(
+                clientProvider: this.GetClientProvider(),
+                capabilities: new OpenAIAssistantCapabilities(this.Model)
+                {
+                    Metadata = AssistantSampleMetadata,
+                },
+                kernel: new Kernel(),
+                defaultArguments: new KernelArguments()
+                {
+                    { "topic", "Dog" },
+                    { "length", "3" },
+                },
+                templateConfig);
+
+        // Create a thread for the agent conversation.
+        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+
+        try
+        {
+            // Invoke the agent with the default arguments.
+            await InvokeAgentAsync();
+
+            // Invoke the agent with the override arguments.
+            await InvokeAgentAsync(
+                new()
+                {
+                { "topic", "Cat" },
+                { "length", "3" },
+                });
+        }
+        finally
+        {
+            await agent.DeleteThreadAsync(threadId);
+            await agent.DeleteAsync();
+        }
+
+        // Local function to invoke agent and display the response.
+        async Task InvokeAgentAsync(KernelArguments? arguments = null)
+        {
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments))
+            {
+                WriteAgentChatMessage(response);
+            }
+        }
+    }
+
+    private sealed class MenuPlugin
+    {
+        [KernelFunction, Description("Provides a list of specials from the menu.")]
+        [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
+        public string GetSpecials() =>
+            """
+            Special Soup: Clam Chowder
+            Special Salad: Cobb Salad
+            Special Drink: Chai Tea
+            """;
+
+        [KernelFunction, Description("Provides the price of the requested menu item.")]
+        public string GetItemPrice(
+            [Description("The name of the menu item.")]
+            string menuItem) =>
+            "$9.99";
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
similarity index 69%
rename from dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
rename to dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
index a9d8f5ead9e0..09b02d4ceebf 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
@@ -2,15 +2,14 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
 using Resources;
 
-namespace GettingStarted.OpenAIAssistants;
+namespace GettingStarted;
 
 /// 
 /// Demonstrate providing image input to  .
 /// 
-public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAssistantTest(output)
+public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output)
 {
     /// 
     /// Azure currently only supports message of type=text.
@@ -18,23 +17,25 @@ public class Step03_Assistant_Vision(ITestOutputHelper output) : BaseAssistantTe
     protected override bool ForceOpenAI => true;
 
     [Fact]
-    public async Task UseImageContentWithAssistantAsync()
+    public async Task UseSingleAssistantAgentAsync()
     {
-        // Define the assistant
-        Assistant assistant =
-            await this.AssistantClient.CreateAssistantAsync(
-                this.Model,
-                metadata: SampleMetadata);
-
-        // Create the agent
-        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
+        // Define the agent
+        OpenAIClientProvider provider = this.GetClientProvider();
+        OpenAIAssistantAgent agent =
+            await OpenAIAssistantAgent.CreateAsync(
+                provider,
+                definition: new OpenAIAssistantDefinition(this.Model)
+                {
+                    Metadata = AssistantSampleMetadata,
+                },
+                kernel: new Kernel());
 
         // Upload an image
         await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!;
-        string fileId = await this.Client.UploadAssistantFileAsync(imageStream, "cat.jpg");
+        string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg");
 
         // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
 
         // Respond to user input
         try
@@ -47,9 +48,9 @@ await this.AssistantClient.CreateAssistantAsync(
         }
         finally
         {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
-            await this.Client.DeleteFileAsync(fileId);
+            await agent.DeleteThreadAsync(threadId);
+            await agent.DeleteAsync();
+            await provider.Client.GetOpenAIFileClient().DeleteFileAsync(fileId);
         }
 
         // Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
similarity index 61%
rename from dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
rename to dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
index 3de017d422a3..203009ffb561 100644
--- a/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
@@ -2,30 +2,30 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
 
-namespace GettingStarted.OpenAIAssistants;
+namespace GettingStarted;
 
 /// 
 /// Demonstrate using code-interpreter on  .
 /// 
-public class Step04_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAssistantTest(output)
+public class Step10_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output)
 {
     [Fact]
     public async Task UseCodeInterpreterToolWithAssistantAgentAsync()
     {
-        // Define the assistant
-        Assistant assistant =
-            await this.AssistantClient.CreateAssistantAsync(
-                this.Model,
-                enableCodeInterpreter: true,
-                metadata: SampleMetadata);
-
-        // Create the agent
-        OpenAIAssistantAgent agent = new(assistant, this.AssistantClient);
+        // Define the agent
+        OpenAIAssistantAgent agent =
+            await OpenAIAssistantAgent.CreateAsync(
+                clientProvider: this.GetClientProvider(),
+                definition: new(this.Model)
+                {
+                    EnableCodeInterpreter = true,
+                    Metadata = AssistantSampleMetadata,
+                },
+                kernel: new Kernel());
 
         // Create a thread for the agent conversation.
-        string threadId = await this.AssistantClient.CreateThreadAsync(metadata: SampleMetadata);
+        string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
 
         // Respond to user input
         try
@@ -34,8 +34,8 @@ await this.AssistantClient.CreateAssistantAsync(
         }
         finally
         {
-            await this.AssistantClient.DeleteThreadAsync(threadId);
-            await this.AssistantClient.DeleteAssistantAsync(agent.Id);
+            await agent.DeleteThreadAsync(threadId);
+            await agent.DeleteAsync();
         }
 
         // Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
new file mode 100644
index 000000000000..77f4e5dbdff1
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
@@ -0,0 +1,84 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Files;
+using OpenAI.VectorStores;
+using Resources;
+
+namespace GettingStarted;
+
+/// 
+/// Demonstrate using code-interpreter on  .
+/// 
+public class Step11_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+    [Fact]
+    public async Task UseFileSearchToolWithAssistantAgentAsync()
+    {
+        // Define the agent
+        OpenAIClientProvider provider = this.GetClientProvider();
+        OpenAIAssistantAgent agent =
+            await OpenAIAssistantAgent.CreateAsync(
+                clientProvider: this.GetClientProvider(),
+                definition: new OpenAIAssistantDefinition(this.Model)
+                {
+                    EnableFileSearch = true,
+                    Metadata = AssistantSampleMetadata,
+                },
+                kernel: new Kernel());
+
+        // Upload file - Using a table of fictional employees.
+        OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient();
+        await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
+        OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
+
+        // Create a vector-store
+        VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
+        CreateVectorStoreOperation result =
+            await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false,
+                new VectorStoreCreationOptions()
+                {
+                    FileIds = { fileInfo.Id },
+                    Metadata = { { AssistantSampleMetadataKey, bool.TrueString } }
+                });
+
+        // Create a thread associated with a vector-store for the agent conversation.
+        string threadId =
+            await agent.CreateThreadAsync(
+                new OpenAIThreadCreationOptions
+                {
+                    VectorStoreId = result.VectorStoreId,
+                    Metadata = AssistantSampleMetadata,
+                });
+
+        // Respond to user input
+        try
+        {
+            await InvokeAgentAsync("Who is the youngest employee?");
+            await InvokeAgentAsync("Who works in sales?");
+            await InvokeAgentAsync("I have a customer request, who can help me?");
+        }
+        finally
+        {
+            await agent.DeleteThreadAsync(threadId);
+            await agent.DeleteAsync();
+            await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId);
+            await fileClient.DeleteFileAsync(fileInfo.Id);
+        }
+
+        // Local function to invoke agent and display the conversation messages.
+        async Task InvokeAgentAsync(string input)
+        {
+            ChatMessageContent message = new(AuthorRole.User, input);
+            await agent.AddChatMessageAsync(threadId, message);
+            this.WriteAgentChatMessage(message);
+
+            await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+            {
+                this.WriteAgentChatMessage(response);
+            }
+        }
+    }
+}
diff --git a/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs b/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
index 87ce86446994..bae1cc92f31c 100644
--- a/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
+++ b/dotnet/samples/GettingStartedWithProcesses/Step04/KernelExtensions.cs
@@ -1,9 +1,8 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Agents.History;
 
 namespace Step04;
 
diff --git a/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs b/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
index 8ee3fb3adad6..e349404c5137 100644
--- a/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
+++ b/dotnet/samples/GettingStartedWithProcesses/Step04/Step04_AgentOrchestration.cs
@@ -1,11 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using Events;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.Extensions.Logging.Abstractions;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.Chat;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.OpenAI;
 using SharedSteps;
diff --git a/dotnet/src/.editorconfig b/dotnet/src/.editorconfig
index 7867acb76728..b2afb3dc53c6 100644
--- a/dotnet/src/.editorconfig
+++ b/dotnet/src/.editorconfig
@@ -4,9 +4,6 @@ dotnet_diagnostic.CA2007.severity = error # Do not directly await a Task
 dotnet_diagnostic.VSTHRD111.severity = error # Use .ConfigureAwait(bool)
 dotnet_diagnostic.IDE1006.severity = error # Naming rule violations
 
-# Testing
-dotnet_diagnostic.Moq1400.severity = none # Explicitly choose a mocking behavior instead of relying on the default (Loose) behavior
-
 # Resharper disabled rules: https://www.jetbrains.com/help/resharper/Reference__Code_Inspections_CSHARP.html#CodeSmell
 resharper_not_resolved_in_text_highlighting = none # Disable Resharper's "Not resolved in text" highlighting
 resharper_check_namespace_highlighting = none # Disable Resharper's "Check namespace" highlighting
diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs
index 383b5df27385..06af107a0a5d 100644
--- a/dotnet/src/Agents/Abstractions/Agent.cs
+++ b/dotnet/src/Agents/Abstractions/Agent.cs
@@ -1,7 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -21,37 +20,32 @@ namespace Microsoft.SemanticKernel.Agents;
 public abstract class Agent
 {
     /// 
-    /// Gets the description of the agent (optional).
+    /// The description of the agent (optional)
     /// 
     public string? Description { get; init; }
 
     /// 
-    /// Gets the identifier of the agent (optional).
+    /// The identifier of the agent (optional).
     /// 
-    /// 
-    /// The identifier of the agent. The default is a random GUID value, but that can be overridden.
-    /// 
+    /// 
+    /// Default to a random guid value, but may be overridden.
+    /// 
     public string Id { get; init; } = Guid.NewGuid().ToString();
 
     /// 
-    /// Gets the name of the agent (optional).
+    /// The name of the agent (optional)
     /// 
     public string? Name { get; init; }
 
     /// 
     /// A  for this .
     /// 
-    public ILoggerFactory? LoggerFactory { get; init; }
+    public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
 
     /// 
     /// The  associated with this  .
     /// 
-    protected ILogger Logger => this._logger ??= this.ActiveLoggerFactory.CreateLogger(this.GetType());
-
-    /// 
-    /// Get the active logger factory, if defined; otherwise, provide the default.
-    /// 
-    protected virtual ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? NullLoggerFactory.Instance;
+    protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
 
     /// 
     /// Set of keys to establish channel affinity.  Minimum expected key-set:
@@ -65,13 +59,10 @@ public abstract class Agent
     /// For example, two OpenAI Assistant agents each targeting a different Azure OpenAI endpoint
     /// would require their own channel. In this case, the endpoint could be expressed as an additional key.
     /// 
-    [Experimental("SKEXP0110")]
-#pragma warning disable CA1024 // Use properties where appropriate
     protected internal abstract IEnumerable GetChannelKeys();
-#pragma warning restore CA1024 // Use properties where appropriate
 
     /// 
-    /// Produce an  appropriate for the agent type.
+    /// Produce the an  appropriate for the agent type.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// An  appropriate for the agent type.
@@ -79,11 +70,10 @@ public abstract class Agent
     /// Every agent conversation, or , will establish one or more 
     /// objects according to the specific  type.
     /// 
-    [Experimental("SKEXP0110")]
     protected internal abstract Task CreateChannelAsync(CancellationToken cancellationToken);
 
     /// 
-    /// Produce an  appropriate for the agent type based on the provided state.
+    /// Produce the an  appropriate for the agent type based on the provided state.
     /// 
     /// The channel state, as serialized
     /// The  to monitor for cancellation requests. The default is .
@@ -92,7 +82,6 @@ public abstract class Agent
     /// Every agent conversation, or , will establish one or more 
     /// objects according to the specific  type.
     /// 
-    [Experimental("SKEXP0110")]
     protected internal abstract Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken);
 
     private ILogger? _logger;
diff --git a/dotnet/src/Agents/Abstractions/AgentChannel.cs b/dotnet/src/Agents/Abstractions/AgentChannel.cs
index 56c631b0c1b9..046348443a39 100644
--- a/dotnet/src/Agents/Abstractions/AgentChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChannel.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -10,15 +9,12 @@ namespace Microsoft.SemanticKernel.Agents;
 
 /// 
 /// Defines the communication protocol for a particular  type.
-/// 
-/// 
 /// An agent provides it own  via .
-/// 
-[Experimental("SKEXP0110")]
+/// 
 public abstract class AgentChannel
 {
     /// 
-    /// Gets or sets the  associated with the .
+    /// The  associated with the .
     /// 
     public ILogger Logger { get; set; } = NullLogger.Instance;
 
@@ -28,7 +24,7 @@ public abstract class AgentChannel
     protected internal abstract string Serialize();
 
     /// 
-    /// Receive the conversation messages.  Used when joining a conversation and also during each agent interaction.
+    /// Receive the conversation messages.  Used when joining a conversation and also during each agent interaction..
     /// 
     /// The chat history at the point the channel is created.
     /// The  to monitor for cancellation requests. The default is .
@@ -39,7 +35,7 @@ public abstract class AgentChannel
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// 
-    /// The channel won't be reused; rather, it will be discarded and a new one created.
+    /// The channel wont' be reused; rather, it will be discarded and a new one created.
     /// 
     protected internal abstract Task ResetAsync(CancellationToken cancellationToken = default);
 
@@ -79,17 +75,16 @@ protected internal abstract IAsyncEnumerable Invoke
 
 /// 
 /// Defines the communication protocol for a particular  type.
+/// An agent provides it own  via .
 /// 
-/// The agent type for this channel.
+/// The agent type for this channel
 /// 
-/// An agent provides it own  via .
-/// This class is a convenience upcast to an agent for .
+/// Convenience upcast to agent for .
 /// 
-[Experimental("SKEXP0110")]
 public abstract class AgentChannel : AgentChannel where TAgent : Agent
 {
     /// 
-    /// Process a discrete incremental interaction between a single  and a .
+    /// Process a discrete incremental interaction between a single  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
@@ -115,7 +110,7 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent
         return this.InvokeAsync((TAgent)agent, cancellationToken);
     }
     /// 
-    /// Process a discrete incremental interaction between a single  and a .
+    /// Process a discrete incremental interaction between a single  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The receiver for the completed messages generated
diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs
index 22b4077527e1..f458739e3bb4 100644
--- a/dotnet/src/Agents/Abstractions/AgentChat.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChat.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -16,13 +15,12 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Provides a point of interaction for one or more agents.
+/// Point of interaction for one or more agents.
 /// 
 /// 
-///  instances don't support concurrent invocation and
-/// will throw an exception if concurrent activity is attempted for any public method.
+/// Any  instance does not support concurrent invocation and
+/// will throw exception if concurrent activity is attempted for any public method.
 /// 
-[Experimental("SKEXP0110")]
 public abstract class AgentChat
 {
     private readonly BroadcastQueue _broadcastQueue;
@@ -33,62 +31,62 @@ public abstract class AgentChat
     private ILogger? _logger;
 
     /// 
-    /// Gets the agents participating in the chat.
+    /// The agents participating in the chat.
     /// 
     public abstract IReadOnlyList Agents { get; }
 
     /// 
-    /// Gets a value that indicates whether a chat operation is active. Activity is defined as
-    /// any execution of a public method.
+    /// Indicates if a chat operation is active.  Activity is defined as
+    /// any the execution of any public method.
     /// 
     public bool IsActive => Interlocked.CompareExchange(ref this._isActive, 1, 1) > 0;
 
     /// 
-    /// Gets the  associated with the .
+    /// The  associated with the .
     /// 
     public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
 
     /// 
-    /// Gets the  associated with this chat.
+    /// The  associated with this chat.
     /// 
     protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
 
     /// 
-    /// Gets the internal history to expose it to subclasses.
+    /// Exposes the internal history to subclasses.
     /// 
     protected ChatHistory History { get; }
 
     /// 
-    /// Processes a series of interactions between the agents participating in this chat.
+    /// Process a series of interactions between the agents participating in this chat.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     public abstract IAsyncEnumerable InvokeAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Processes a series of interactions between the agents participating in this chat.
+    /// Process a series of interactions between the agents participating in this chat.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     public abstract IAsyncEnumerable InvokeStreamingAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Retrieves the chat history.
+    /// Retrieve the chat history.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// The message history.
+    /// The message history
     public IAsyncEnumerable GetChatMessagesAsync(CancellationToken cancellationToken = default) =>
         this.GetChatMessagesAsync(agent: null, cancellationToken);
 
     /// 
-    /// Retrieves the message history, either the primary history or
-    /// an agent-specific version.
+    /// Retrieve the message history, either the primary history or
+    /// an agent specific version.
     /// 
     /// An optional agent, if requesting an agent history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The message history.
+    /// The message history
     /// 
-    ///  instances don't support concurrent invocation and
+    /// Any  instance does not support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     public async IAsyncEnumerable GetChatMessagesAsync(
@@ -134,38 +132,39 @@ public async IAsyncEnumerable GetChatMessagesAsync(
     }
 
     /// 
-    /// Appends a message to the conversation. Adding a message while an agent
+    /// Append a message to the conversation.  Adding a message while an agent
     /// is active is not allowed.
     /// 
-    /// A non-system message to append to the conversation.
+    /// A non-system message with which to append to the conversation.
     /// 
-    /// Adding a message to the conversation requires that any active  remains
+    /// Adding a message to the conversation requires any active  remains
     /// synchronized, so the message is broadcast to all channels.
-    ///
-    ///  instances don't support concurrent invocation and
+    /// 
+    /// KernelException if a system message is present, without taking any other action
+    /// 
+    /// Any  instance does not support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
-    /// A system message is present, and no other action is taken.
     public void AddChatMessage(ChatMessageContent message)
     {
         this.AddChatMessages([message]);
     }
 
     /// 
-    /// Appends messages to the conversation. Adding messages while an agent
+    /// Append messages to the conversation.  Adding messages while an agent
     /// is active is not allowed.
     /// 
-    /// A set of non-system messages to append to the conversation.
+    /// Set of non-system messages with which to append to the conversation.
     /// 
-    /// Adding messages to the conversation requires that any active  remains
+    /// Adding messages to the conversation requires any active  remains
     /// synchronized, so the messages are broadcast to all channels.
-    ///
-    ///  instances don't support concurrent invocation and
+    /// 
+    /// KernelException if a system message is present, without taking any other action
+    /// KernelException chat has current activity.
+    /// 
+    /// Any  instance does not support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
-    /// A system message is present, and no other action is taken.
-    /// -or-
-    /// The chat has current activity.
     public void AddChatMessages(IReadOnlyList messages)
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
@@ -199,13 +198,13 @@ public void AddChatMessages(IReadOnlyList messages)
     }
 
     /// 
-    /// Processes a discrete incremental interaction between a single  and a .
+    /// Process a discrete incremental interaction between a single  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     /// 
-    ///  instances don't support concurrent invocation and
+    /// Any  instance does not support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     protected async IAsyncEnumerable InvokeAgentAsync(
@@ -214,7 +213,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
 
-        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
 
         try
         {
@@ -227,7 +226,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
 
             await foreach ((bool isVisible, ChatMessageContent message) in channel.InvokeAsync(agent, cancellationToken).ConfigureAwait(false))
             {
-                this.Logger.LogAgentChatInvokedAgentMessage(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), message);
+                this.Logger.LogAgentChatInvokedAgentMessage(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, message);
 
                 messages.Add(message);
 
@@ -249,7 +248,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
                     .Select(kvp => new ChannelReference(kvp.Value, kvp.Key));
             this._broadcastQueue.Enqueue(channelRefs, messages);
 
-            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
         }
         finally
         {
@@ -258,13 +257,13 @@ protected async IAsyncEnumerable InvokeAgentAsync(
     }
 
     /// 
-    /// Processes a discrete incremental interaction between a single  and a .
+    /// Process a discrete incremental interaction between a single  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
     /// Asynchronous enumeration of messages.
     /// 
-    ///  instances don't support concurrent invocation and
+    /// Any  instance does not support concurrent invocation and
     /// will throw exception if concurrent activity is attempted.
     /// 
     protected async IAsyncEnumerable InvokeStreamingAgentAsync(
@@ -273,7 +272,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
     {
         this.SetActivityOrThrow(); // Disallow concurrent access to chat history
 
-        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+        this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
 
         try
         {
@@ -291,7 +290,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
 
             this.History.AddRange(messages);
 
-            this.Logger.LogAgentChatInvokedStreamingAgentMessages(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), messages);
+            this.Logger.LogAgentChatInvokedStreamingAgentMessages(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, messages);
 
             // Broadcast message to other channels (in parallel)
             // Note: Able to queue messages without synchronizing channels.
@@ -301,7 +300,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
                     .Select(kvp => new ChannelReference(kvp.Value, kvp.Key));
             this._broadcastQueue.Enqueue(channelRefs, messages);
 
-            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+            this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
         }
         finally
         {
@@ -310,7 +309,7 @@ protected async IAsyncEnumerable InvokeStreamingAge
     }
 
     /// 
-    /// Resets the chat, clearing all history and persisted state.
+    /// Reset the chat, clearing all history and persisted state.
     /// All agents will remain present.
     /// 
     /// The  to monitor for cancellation requests. The default is .
@@ -396,12 +395,12 @@ private void ClearActivitySignal()
     }
 
     /// 
-    /// Checks to ensure the chat is not concurrently active and throws an exception if it is.
+    /// Test to ensure chat is not concurrently active and throw exception if it is.
     /// If not, activity is signaled.
     /// 
     /// 
-    /// Rather than allowing concurrent invocation to result in undefined behavior or failure,
-    /// it's preferred to fail fast to avoid side effects or state mutation.
+    /// Rather than allowing concurrent invocation to result in undefined behavior / failure,
+    /// it is preferred to fail-fast in order to avoid side-effects / state mutation.
     /// The activity signal is used to manage ability and visibility for taking actions based
     /// on conversation history.
     /// 
@@ -434,7 +433,7 @@ private async Task GetOrCreateChannelAsync(Agent agent, Cancellati
         AgentChannel? channel = await this.SynchronizeChannelAsync(channelKey, cancellationToken).ConfigureAwait(false);
         if (channel is null)
         {
-            this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+            this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
 
             channel = await agent.CreateChannelAsync(cancellationToken).ConfigureAwait(false);
 
@@ -446,7 +445,7 @@ private async Task GetOrCreateChannelAsync(Agent agent, Cancellati
                 await channel.ReceiveAsync(this.History, cancellationToken).ConfigureAwait(false);
             }
 
-            this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+            this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
         }
 
         return channel;
diff --git a/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs b/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
index b6174284d959..146e00d7965f 100644
--- a/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChatSerializer.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.IO;
 using System.Text.Json;
 using System.Text.Json.Serialization;
@@ -10,9 +9,8 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Serializes and deserializes an .
+/// Able to serialize and deserialize an .
 /// 
-[Experimental("SKEXP0110")]
 public sealed class AgentChatSerializer
 {
     private readonly AgentChatState _state;
@@ -25,7 +23,7 @@ public sealed class AgentChatSerializer
         };
 
     /// 
-    /// Serializes the provided  to the target stream.
+    /// Serialize the provided  to the target stream.
     /// 
     public static async Task SerializeAsync(TChat chat, Stream stream, JsonSerializerOptions? serializerOptions = null) where TChat : AgentChat
     {
@@ -34,7 +32,7 @@ public static async Task SerializeAsync(TChat chat, Stream stream, JsonSe
     }
 
     /// 
-    /// Provides a  that's able to restore an .
+    /// Provides a  that is able to restore an .
     /// 
     public static async Task DeserializeAsync(Stream stream, JsonSerializerOptions? serializerOptions = null)
     {
@@ -46,13 +44,13 @@ await JsonSerializer.DeserializeAsync(stream, serializerOptions
     }
 
     /// 
-    /// Gets the participants of the original  so that
-    /// the caller can include them in the restored .
+    /// Enumerates the participants of the original  so that
+    /// the caller may be include them in the restored .
     /// 
     public IEnumerable Participants => this._state.Participants;
 
     /// 
-    /// Restores the  to the previously captured state.
+    /// Restore the  to the previously captured state.
     /// 
     public Task DeserializeAsync(TChat chat) where TChat : AgentChat => chat.DeserializeAsync(this._state);
 
diff --git a/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj b/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
index 2cc0d9799bc1..86d2f37c2b66 100644
--- a/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
+++ b/dotnet/src/Agents/Abstractions/Agents.Abstractions.csproj
@@ -1,4 +1,4 @@
-
+
 
   
     
@@ -6,7 +6,7 @@
     Microsoft.SemanticKernel.Agents
     net8.0;netstandard2.0
     false
-    preview
+    alpha
   
 
   
@@ -20,7 +20,6 @@
   
     
     
-    
   
 
   
@@ -30,7 +29,7 @@
   
 
   
-    
+    
   
 
   
diff --git a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
index 8cde6b5a9001..eb1f7d0fac98 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
@@ -1,7 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
@@ -13,7 +12,6 @@ namespace Microsoft.SemanticKernel.Agents;
 /// Defines the relationship between the internal aggregated chat and the chat
 /// with which  is participating.
 /// 
-[Experimental("SKEXP0110")]
 public enum AggregatorMode
 {
     /// 
@@ -31,22 +29,18 @@ public enum AggregatorMode
 /// Allows an  to participate in another  as an .
 /// 
 /// A factory method that produces a new  instance.
-[Experimental("SKEXP0110")]
 public sealed class AggregatorAgent(Func chatProvider) : Agent
 {
     /// 
-    /// Gets the relationship between the internal aggregated chat and the chat
+    /// Defines the relationship between the internal aggregated chat and the chat
     /// with which  is participating.
+    /// Default: .
     /// 
-    /// 
-    /// The relationship between the internal aggregated chat and the chat
-    /// with which  is participating. The default value is .
-    /// 
     public AggregatorMode Mode { get; init; } = AggregatorMode.Flat;
 
     /// 
     /// 
-    /// Different  instances will never share the same channel.
+    /// Different  will never share the same channel.
     /// 
     protected internal override IEnumerable GetChannelKeys()
     {
diff --git a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
index a002e41351af..f0dcf5736192 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -12,7 +11,6 @@ namespace Microsoft.SemanticKernel.Agents;
 /// 
 /// Adapt channel contract to underlying .
 /// 
-[Experimental("SKEXP0110")]
 internal sealed class AggregatorChannel(AgentChat chat) : AgentChannel
 {
     private readonly AgentChat _chat = chat;
diff --git a/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs b/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
index 0e93e3a3e2fd..d8ef44a416a1 100644
--- a/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
+++ b/dotnet/src/Agents/Abstractions/Extensions/ChatHistoryExtensions.cs
@@ -6,14 +6,14 @@
 namespace Microsoft.SemanticKernel.Agents.Extensions;
 
 /// 
-/// Provides extension methods for .
+/// Extension methods for 
 /// 
 public static class ChatHistoryExtensions
 {
     /// 
-    /// Enumerates a chat history in descending order.
+    /// Enumeration of chat-history in descending order.
     /// 
-    /// The chat history to sort.
+    /// The chat-history
     public static IEnumerable ToDescending(this ChatHistory history)
     {
         for (int index = history.Count; index > 0; --index)
@@ -23,9 +23,9 @@ public static IEnumerable ToDescending(this ChatHistory hist
     }
 
     /// 
-    /// Enumerates a history in descending order asynchronously.
+    /// Asynchronous enumeration of chat-history in descending order.
     /// 
-    /// The chat history to sort.
+    /// The chat-history
     public static IAsyncEnumerable ToDescendingAsync(this ChatHistory history)
     {
         return history.ToDescending().ToAsyncEnumerable();
diff --git a/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs b/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
index 5c3d6fcf7bb3..b4007eec2c49 100644
--- a/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
+++ b/dotnet/src/Agents/Abstractions/Internal/BroadcastQueue.cs
@@ -1,7 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using ChannelQueue = System.Collections.Generic.Queue>;
@@ -22,14 +21,13 @@ namespace Microsoft.SemanticKernel.Agents.Internal;
 ///  is never invoked concurrently, which eliminates
 /// race conditions over the queue dictionary.
 /// 
-[Experimental("SKEXP0110")]
 internal sealed class BroadcastQueue
 {
     private readonly Dictionary _queues = [];
 
     /// 
-    /// Defines the yield duration when waiting on a channel-queue to synchronize
-    /// and drain.
+    /// Defines the yield duration when waiting on a channel-queue to synchronize.
+    /// to drain.
     /// 
     public TimeSpan BlockDuration { get; set; } = TimeSpan.FromSeconds(0.1);
 
diff --git a/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs b/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
index 236e25415879..f49835355157 100644
--- a/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
+++ b/dotnet/src/Agents/Abstractions/Internal/ChannelReference.cs
@@ -1,12 +1,9 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System.Diagnostics.CodeAnalysis;
-
 namespace Microsoft.SemanticKernel.Agents.Internal;
 
 /// 
 /// Tracks channel along with its hashed key.
 /// 
-[Experimental("SKEXP0110")]
 internal readonly struct ChannelReference(AgentChannel channel, string hash)
 {
     /// 
diff --git a/dotnet/src/Agents/Abstractions/KernelAgent.cs b/dotnet/src/Agents/Abstractions/KernelAgent.cs
index aac75e2fd62f..719936b868f1 100644
--- a/dotnet/src/Agents/Abstractions/KernelAgent.cs
+++ b/dotnet/src/Agents/Abstractions/KernelAgent.cs
@@ -3,74 +3,85 @@
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.Extensions.Logging;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Provides a base class for agents utilizing  plugins or services.
+/// Base class for agents utilizing  plugins or services.
 /// 
 public abstract class KernelAgent : Agent
 {
     /// 
-    /// Gets the arguments for the agent instruction parameters (optional).
+    /// Arguments for the agent instruction parameters (optional).
     /// 
     /// 
     /// Also includes .
     /// 
-    public KernelArguments Arguments { get; init; } = [];
+    public KernelArguments? Arguments { get; init; }
 
     /// 
-    /// Gets the instructions for the agent (optional).
+    /// The instructions for the agent (optional)
     /// 
+    /// 
+    /// Instructions may be formatted in "semantic-kernel" template format.
+    /// ()
+    /// 
     public string? Instructions { get; init; }
 
     /// 
-    /// Gets the  containing services, plugins, and filters for use throughout the agent lifetime.
+    /// The  containing services, plugins, and filters for use throughout the agent lifetime.
     /// 
-    /// 
-    /// The  containing services, plugins, and filters for use throughout the agent lifetime. The default value is an empty Kernel, but that can be overridden.
-    /// 
+    /// 
+    /// Defaults to empty Kernel, but may be overridden.
+    /// 
     public Kernel Kernel { get; init; } = new();
 
     /// 
-    /// Gets or sets a prompt template based on the agent instructions.
+    /// A prompt-template based on the agent instructions.
     /// 
-    protected IPromptTemplate? Template { get; set; }
-
-    /// 
-    protected override ILoggerFactory ActiveLoggerFactory => this.LoggerFactory ?? this.Kernel.LoggerFactory;
+    public IPromptTemplate? Template { get; protected set; }
 
     /// 
-    /// Formats the system instructions for the agent.
+    /// Format the system instructions for the agent.
     /// 
     /// The  containing services, plugins, and other state for use by the agent.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  to monitor for cancellation requests. The default is .
-    /// The formatted system instructions for the agent.
+    /// The formatted system instructions for the agent
     protected async Task FormatInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken)
     {
-        if (this.Template is null)
+        // If  is not set, default instructions may be treated as "semantic-kernel" template.
+        if (this.Template == null)
         {
-            // Use the instructions as-is
-            return this.Instructions;
+            if (string.IsNullOrWhiteSpace(this.Instructions))
+            {
+                return null;
+            }
+
+            KernelPromptTemplateFactory templateFactory = new(this.LoggerFactory);
+            this.Template = templateFactory.Create(new PromptTemplateConfig(this.Instructions!));
         }
 
-        // Use the provided template as the instructions
         return await this.Template.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
     }
 
     /// 
-    /// Provides a merged instance of  with precedence for override arguments.
+    /// Provide a merged instance of  with precedence for override arguments.
     /// 
-    /// The override arguments.
+    /// The override arguments
     /// 
     /// This merge preserves original  and  parameters.
-    /// It allows for incremental addition or replacement of specific parameters while also preserving the ability
+    /// and allows for incremental addition or replacement of specific parameters while also preserving the ability
     /// to override the execution settings.
     /// 
-    protected KernelArguments MergeArguments(KernelArguments? arguments)
+    protected KernelArguments? MergeArguments(KernelArguments? arguments)
     {
+        // Avoid merge when default arguments are not set.
+        if (this.Arguments == null)
+        {
+            return arguments;
+        }
+
         // Avoid merge when override arguments are not set.
         if (arguments == null)
         {
diff --git a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
index 22c2bda0e5da..ebd9e83b42ce 100644
--- a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
+++ b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
@@ -3,7 +3,6 @@
 using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
 using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
@@ -22,12 +21,11 @@ internal static partial class AgentChatLogMessages
     /// 
     /// Logs retrieval of  messages.
     /// 
-    private static readonly Action s_logAgentChatGetChatMessages =
-        LoggerMessage.Define(
+    private static readonly Action s_logAgentChatGetChatMessages =
+        LoggerMessage.Define(
             logLevel: LogLevel.Debug,
             eventId: 0,
-            "[{MethodName}] Source: {MessageSourceType}/{MessageSourceId}/{MessageSourceName}.");
-
+            "[{MethodName}] Source: {MessageSourceType}/{MessageSourceId}.");
     public static void LogAgentChatGetChatMessages(
         this ILogger logger,
         string methodName,
@@ -35,13 +33,13 @@ public static void LogAgentChatGetChatMessages(
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
-            if (agent is null)
+            if (null == agent)
             {
-                s_logAgentChatGetChatMessages(logger, methodName, "primary", "primary", null, null);
+                s_logAgentChatGetChatMessages(logger, methodName, "primary", "primary", null);
             }
             else
             {
-                s_logAgentChatGetChatMessages(logger, methodName, agent.GetType().Name, agent.Id, agent.GetDisplayName(), null);
+                s_logAgentChatGetChatMessages(logger, methodName, agent.GetType().Name, agent.Id, null);
             }
         }
     }
@@ -76,13 +74,12 @@ public static partial void LogAgentChatAddedMessages(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Invoking agent {AgentType}/{AgentId}/{AgentName}.")]
+        Message = "[{MethodName}] Invoking agent {AgentType}/{AgentId}.")]
     public static partial void LogAgentChatInvokingAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  invoked agent message
@@ -90,37 +87,35 @@ public static partial void LogAgentChatInvokingAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Trace,
-        Message = "[{MethodName}] Agent message {AgentType}/{AgentId}/{AgentName}: {Message}.")]
+        Message = "[{MethodName}] Agent message {AgentType}/{AgentId}: {Message}.")]
     public static partial void LogAgentChatInvokedAgentMessage(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
-        string agentName,
         ChatMessageContent message);
 
     /// 
     /// Logs retrieval of streamed  messages.
     /// 
-    private static readonly Action s_logAgentChatInvokedStreamingAgentMessages =
-        LoggerMessage.Define(
+    private static readonly Action s_logAgentChatInvokedStreamingAgentMessages =
+        LoggerMessage.Define(
             logLevel: LogLevel.Debug,
             eventId: 0,
-            "[{MethodName}] Agent message {AgentType}/{AgentId}/{AgentName}: {Message}.");
+            "[{MethodName}] Agent message {AgentType}/{AgentId}: {Message}.");
 
     public static void LogAgentChatInvokedStreamingAgentMessages(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
-        string agentName,
         IList messages)
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
             foreach (ChatMessageContent message in messages)
             {
-                s_logAgentChatInvokedStreamingAgentMessages(logger, methodName, agentType, agentId, agentName, message, null);
+                s_logAgentChatInvokedStreamingAgentMessages(logger, methodName, agentType, agentId, message, null);
             }
         }
     }
@@ -131,13 +126,12 @@ public static void LogAgentChatInvokedStreamingAgentMessages(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Invoked agent {AgentType}/{AgentId}/{AgentName}.")]
+        Message = "[{MethodName}] Invoked agent {AgentType}/{AgentId}.")]
     public static partial void LogAgentChatInvokedAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  creating agent channel (started).
@@ -145,13 +139,12 @@ public static partial void LogAgentChatInvokedAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Creating channel for {AgentType}: {AgentId}/{AgentName}")]
+        Message = "[{MethodName}] Creating channel for {AgentType}: {AgentId}")]
     public static partial void LogAgentChatCreatingChannel(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  created agent channel (complete).
@@ -159,11 +152,10 @@ public static partial void LogAgentChatCreatingChannel(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Created channel for {AgentType}: {AgentId}/{AgentName}")]
+        Message = "[{MethodName}] Created channel for {AgentType}: {AgentId}")]
     public static partial void LogAgentChatCreatedChannel(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 }
diff --git a/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
index 08eb87c8613a..441c9da117f5 100644
--- a/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
+++ b/dotnet/src/Agents/Abstractions/Logging/AggregatorAgentLogMessages.cs
@@ -15,7 +15,6 @@ namespace Microsoft.SemanticKernel.Agents;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class AggregatorAgentLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs b/dotnet/src/Agents/Abstractions/Properties/AssemblyInfo.cs
similarity index 100%
rename from dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs
rename to dotnet/src/Agents/Abstractions/Properties/AssemblyInfo.cs
diff --git a/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs b/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
index 278660096562..564f68b72ab6 100644
--- a/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
+++ b/dotnet/src/Agents/Abstractions/Serialization/AgentParticipant.cs
@@ -9,37 +9,31 @@ namespace Microsoft.SemanticKernel.Agents.Serialization;
 public sealed class AgentParticipant
 {
     /// 
-    /// Gets the captured .
+    /// The captured .
     /// 
     public string Id { get; init; } = string.Empty;
 
     /// 
-    /// Gets the captured .
+    /// The captured .
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Name { get; init; }
 
     /// 
-    /// Gets the fully qualified  type name.
+    /// The fully qualified  type name.
     /// 
     public string Type { get; init; } = string.Empty;
 
     /// 
-    /// Creates a new instance of .
+    /// Parameterless constructor for deserialization.
     /// 
-    /// 
-    /// This parameterless constructor is for deserialization.
-    /// 
     [JsonConstructor]
     public AgentParticipant() { }
 
     /// 
-    /// Creates a new instance of  with the specified agent.
+    /// Convenience constructor for serialization.
     /// 
-    /// 
-    /// This is a convenience constructor for serialization.
-    /// 
-    /// The referenced .
+    /// The referenced 
     internal AgentParticipant(Agent agent)
     {
         this.Id = agent.Id;
diff --git a/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs b/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
index d69011639d86..f71f86c18b9d 100644
--- a/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
+++ b/dotnet/src/Agents/Abstractions/Serialization/ChatMessageReference.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Text.Json.Serialization;
 using Microsoft.SemanticKernel.ChatCompletion;
@@ -8,41 +7,40 @@
 namespace Microsoft.SemanticKernel.Agents.Serialization;
 
 /// 
-/// Represents a  for serialization without metadata.
+/// Present a  for serialization without metadata.
 /// 
 /// The referenced message
-[Experimental("SKEXP0110")]
 public sealed class ChatMessageReference(ChatMessageContent message)
 {
     /// 
-    /// Gets the referenced  property.
+    /// The referenced  property.
     /// 
     public string? AuthorName => message.AuthorName;
 
     /// 
-    /// Gets the referenced  property.
+    /// The referenced  property.
     /// 
     public AuthorRole Role => message.Role;
 
     /// 
-    /// Gets the referenced  collection.
+    /// The referenced  collection.
     /// 
     public IEnumerable Items => message.Items;
 
     /// 
-    /// Gets the referenced  property.
+    /// The referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ModelId => message.ModelId;
 
     /// 
-    /// Gets the referenced  property.
+    /// The referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? MimeType => message.MimeType;
 
     /// 
-    /// Converts a set of messages to  instances.
+    /// Convenience method to reference a set of messages.
     /// 
     public static IEnumerable Prepare(IEnumerable messages) =>
         messages.Select(m => new ChatMessageReference(m));
diff --git a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
deleted file mode 100644
index 5d26a6a16798..000000000000
--- a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-  
-    
-    Microsoft.SemanticKernel.Agents.AzureAI
-    Microsoft.SemanticKernel.Agents.AzureAI
-    net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110
-    false
-    preview
-  
-
-  
-
-  
-    
-    Semantic Kernel Agents - AzureAI
-    Defines a concrete Agent based on the Azure AI Agent API.
-  
-
-  
-    
-    
-    
-    
-    
-    
-    
-    
-    
-  
-
-  
-
-  
-    
-  
-
-  
-    
-  
-
-  
-    
-    
-  
-
-
\ No newline at end of file
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs
deleted file mode 100644
index f17a977ccd24..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIAgent.ClientFactory.cs
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Net.Http;
-using Azure.AI.Projects;
-using Azure.Core;
-using Azure.Core.Pipeline;
-using Microsoft.SemanticKernel.Http;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Provides an  for use by .
-/// 
-public sealed partial class AzureAIAgent : KernelAgent
-{
-    /// 
-    /// Produces a .
-    /// 
-    /// The Azure AI Foundry project connection string, in the form `endpoint;subscription_id;resource_group_name;project_name`.
-    ///  A credential used to authenticate to an Azure Service.
-    /// A custom  for HTTP requests.
-    public static AIProjectClient CreateAzureAIClient(
-        string connectionString,
-        TokenCredential credential,
-        HttpClient? httpClient = null)
-    {
-        Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString));
-        Verify.NotNull(credential, nameof(credential));
-
-        AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient);
-
-        return new AIProjectClient(connectionString, credential, clientOptions);
-    }
-
-    private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient)
-    {
-        AIProjectClientOptions options =
-            new()
-            {
-                Diagnostics = {
-                    ApplicationId = HttpHeaderConstant.Values.UserAgent,
-                }
-            };
-
-        options.AddPolicy(new SemanticKernelHeadersPolicy(), HttpPipelinePosition.PerCall);
-
-        if (httpClient is not null)
-        {
-            options.Transport = new HttpClientTransport(httpClient);
-            // Disable retry policy if and only if a custom HttpClient is provided.
-            options.RetryPolicy = new RetryPolicy(maxRetries: 0);
-        }
-
-        return options;
-    }
-
-    private class SemanticKernelHeadersPolicy : HttpPipelineSynchronousPolicy
-    {
-        public override void OnSendingRequest(HttpMessage message)
-        {
-            message.Request.Headers.Add(
-                HttpHeaderConstant.Names.SemanticKernelVersion,
-                HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent)));
-        }
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
deleted file mode 100644
index 912bd83778fe..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
+++ /dev/null
@@ -1,285 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.Threading;
-using System.Threading.Tasks;
-using Azure.AI.Projects;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Diagnostics;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Provides a specialized  based on an Azure AI agent.
-/// 
-public sealed partial class AzureAIAgent : KernelAgent
-{
-    /// 
-    /// Provides tool definitions used when associating a file attachment to an input message:
-    /// .
-    /// 
-    public static class Tools
-    {
-        /// 
-        /// The code-interpreter tool.
-        /// 
-        public static readonly string CodeInterpreter = "code_interpreter";
-
-        /// 
-        /// The file-search tool.
-        /// 
-        public const string FileSearch = "file_search";
-    }
-
-    /// 
-    /// The metadata key that identifies code-interpreter content.
-    /// 
-    public const string CodeInterpreterMetadataKey = "code";
-
-    /// 
-    /// Gets the assistant definition.
-    /// 
-    public Azure.AI.Projects.Agent Definition { get; private init; }
-
-    /// 
-    /// Gets the polling behavior for run processing.
-    /// 
-    public RunPollingOptions PollingOptions { get; } = new();
-
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    /// The agent model definition.
-    /// An  instance.
-    /// Optional collection of plugins to add to the kernel.
-    /// An optional factory to produce the  for the agent.
-    /// The format of the prompt template used when "templateFactory" parameter is supplied.
-    public AzureAIAgent(
-        Azure.AI.Projects.Agent model,
-        AgentsClient client,
-        IEnumerable? plugins = null,
-        IPromptTemplateFactory? templateFactory = null,
-        string? templateFormat = null)
-    {
-        this.Client = client;
-        this.Definition = model;
-        this.Description = this.Definition.Description;
-        this.Id = this.Definition.Id;
-        this.Name = this.Definition.Name;
-        this.Instructions = this.Definition.Instructions;
-
-        if (templateFactory != null)
-        {
-            Verify.NotNullOrWhiteSpace(templateFormat);
-
-            PromptTemplateConfig templateConfig = new(this.Instructions)
-            {
-                TemplateFormat = templateFormat
-            };
-
-            this.Template = templateFactory.Create(templateConfig);
-        }
-
-        if (plugins != null)
-        {
-            this.Kernel.Plugins.AddRange(plugins);
-        }
-    }
-
-    /// 
-    /// %%%
-    /// 
-    public AgentsClient Client { get; }
-
-    /// 
-    /// Adds a message to the specified thread.
-    /// 
-    /// The thread identifier.
-    /// A non-system message to append to the conversation.
-    /// The  to monitor for cancellation requests. The default is .
-    /// 
-    /// Only supports messages with role = User or agent.
-    /// 
-    public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
-    {
-        return AgentThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
-    }
-
-    /// 
-    /// Gets messages for a specified thread.
-    /// 
-    /// The thread identifier.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
-    public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
-    {
-        return AgentThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
-    }
-
-    /// 
-    /// Invokes the assistant on the specified thread.
-    /// 
-    /// The thread identifier.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  containing services, plugins, and other state for use by the agent.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
-    /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
-    /// 
-    public IAsyncEnumerable InvokeAsync(
-        string threadId,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        CancellationToken cancellationToken = default)
-    {
-        return this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken);
-    }
-
-    /// 
-    /// Invokes the assistant on the specified thread.
-    /// 
-    /// The thread identifier.
-    /// Optional invocation options.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  containing services, plugins, and other state for use by the agent.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
-    /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
-    /// 
-    public IAsyncEnumerable InvokeAsync(
-        string threadId,
-        AzureAIInvocationOptions? options,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        CancellationToken cancellationToken = default)
-    {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-            () => InternalInvokeAsync(),
-            cancellationToken);
-
-        async IAsyncEnumerable InternalInvokeAsync()
-        {
-            kernel ??= this.Kernel;
-            arguments = this.MergeArguments(arguments);
-
-            await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
-            {
-                if (isVisible)
-                {
-                    yield return message;
-                }
-            }
-        }
-    }
-
-    /// 
-    /// Invokes the assistant on the specified thread with streaming response.
-    /// 
-    /// The thread identifier.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages that are generated.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
-    /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
-    /// 
-    public IAsyncEnumerable InvokeStreamingAsync(
-        string threadId,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        ChatHistory? messages = null,
-        CancellationToken cancellationToken = default)
-    {
-        return this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);
-    }
-
-    /// 
-    /// Invokes the assistant on the specified thread with streaming response.
-    /// 
-    /// The thread identifier.
-    /// Optional invocation options.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages that are generated.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
-    /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
-    /// 
-    public IAsyncEnumerable InvokeStreamingAsync(
-        string threadId,
-        AzureAIInvocationOptions? options,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        ChatHistory? messages = null,
-        CancellationToken cancellationToken = default)
-    {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-            () => InternalInvokeStreamingAsync(),
-            cancellationToken);
-
-        IAsyncEnumerable InternalInvokeStreamingAsync()
-        {
-            kernel ??= this.Kernel;
-            arguments = this.MergeArguments(arguments);
-
-            return AgentThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
-        }
-    }
-
-    /// 
-    protected override IEnumerable GetChannelKeys()
-    {
-        // Distinguish from other channel types.
-        yield return typeof(AzureAIChannel).FullName!;
-        // Distinguish based on client instance.
-        yield return this.Client.GetHashCode().ToString();
-    }
-
-    /// 
-    protected override async Task CreateChannelAsync(CancellationToken cancellationToken)
-    {
-        this.Logger.LogAzureAIAgentCreatingChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel));
-
-        string threadId = await AgentThreadActions.CreateThreadAsync(this.Client, cancellationToken).ConfigureAwait(false);
-
-        this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), threadId);
-
-        AzureAIChannel channel =
-            new(this.Client, threadId)
-            {
-                Logger = this.ActiveLoggerFactory.CreateLogger()
-            };
-
-        this.Logger.LogAzureAIAgentCreatedChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel), threadId);
-
-        return channel;
-    }
-
-    internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken)
-    {
-        return this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
-    }
-
-    /// 
-    protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
-    {
-        string threadId = channelState;
-
-        this.Logger.LogAzureAIAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
-
-        AgentThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
-
-        this.Logger.LogAzureAIAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId);
-
-        return new AzureAIChannel(this.Client, thread.Id);
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIChannel.cs b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs
deleted file mode 100644
index c3979e10bcb3..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIChannel.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Threading;
-using System.Threading.Tasks;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.Diagnostics;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// A  specialization for use with .
-/// 
-internal sealed class AzureAIChannel(AgentsClient client, string threadId)
-    : AgentChannel
-{
-    /// 
-    protected override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
-    {
-        foreach (ChatMessageContent message in history)
-        {
-            await AgentThreadActions.CreateMessageAsync(client, threadId, message, cancellationToken).ConfigureAwait(false);
-        }
-    }
-
-    /// 
-    protected override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
-        AzureAIAgent agent,
-        CancellationToken cancellationToken)
-    {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
-            () => AgentThreadActions.InvokeAsync(agent, client, threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
-            cancellationToken);
-    }
-
-    /// 
-    protected override IAsyncEnumerable InvokeStreamingAsync(AzureAIAgent agent, IList messages, CancellationToken cancellationToken = default)
-    {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
-            () => AgentThreadActions.InvokeStreamingAsync(agent, client, threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
-            cancellationToken);
-    }
-
-    /// 
-    protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken)
-    {
-        return AgentThreadActions.GetMessagesAsync(client, threadId, cancellationToken);
-    }
-
-    /// 
-    protected override Task ResetAsync(CancellationToken cancellationToken = default)
-    {
-        return client.DeleteThreadAsync(threadId, cancellationToken);
-    }
-
-    /// 
-    protected override string Serialize() { return threadId; }
-}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
deleted file mode 100644
index 9082225ef698..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs
+++ /dev/null
@@ -1,116 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Linq;
-using System.Net.Http;
-using Azure.AI.Projects;
-using Azure.Core;
-using Azure.Core.Pipeline;
-using Microsoft.SemanticKernel.Http;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Provides an  for use by .
-/// 
-public sealed class AzureAIClientProvider
-{
-    private AgentsClient? _agentsClient;
-
-    /// 
-    /// Gets an active client instance.
-    /// 
-    public AIProjectClient Client { get; }
-
-    /// 
-    /// Gets an active assistant client instance.
-    /// 
-    public AgentsClient AgentsClient => this._agentsClient ??= this.Client.GetAgentsClient();
-
-    /// 
-    /// Configuration keys required for  management.
-    /// 
-    internal IReadOnlyList ConfigurationKeys { get; }
-
-    private AzureAIClientProvider(AIProjectClient client, IEnumerable keys)
-    {
-        this.Client = client;
-        this.ConfigurationKeys = keys.ToArray();
-    }
-
-    /// 
-    /// Produces a .
-    /// 
-    /// The Azure AI Foundry project connection string, in the form `endpoint;subscription_id;resource_group_name;project_name`.
-    ///  A credential used to authenticate to an Azure Service.
-    /// A custom  for HTTP requests.
-    public static AzureAIClientProvider FromConnectionString(
-        string connectionString,
-        TokenCredential credential,
-        HttpClient? httpClient = null)
-    {
-        Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString));
-        Verify.NotNull(credential, nameof(credential));
-
-        AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient);
-
-        return new(new AIProjectClient(connectionString, credential, clientOptions), CreateConfigurationKeys(connectionString, httpClient));
-    }
-
-    /// 
-    /// Provides a client instance directly.
-    /// 
-    public static AzureAIClientProvider FromClient(AIProjectClient client)
-    {
-        return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]);
-    }
-
-    private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient)
-    {
-        AIProjectClientOptions options =
-            new()
-            {
-                Diagnostics = {
-                    ApplicationId = HttpHeaderConstant.Values.UserAgent,
-                }
-            };
-
-        options.AddPolicy(new SemanticKernelHeadersPolicy(), HttpPipelinePosition.PerCall);
-
-        if (httpClient is not null)
-        {
-            options.Transport = new HttpClientTransport(httpClient);
-            // Disable retry policy if and only if a custom HttpClient is provided.
-            options.RetryPolicy = new RetryPolicy(maxRetries: 0);
-        }
-
-        return options;
-    }
-
-    private static IEnumerable CreateConfigurationKeys(string connectionString, HttpClient? httpClient)
-    {
-        yield return connectionString;
-
-        if (httpClient is not null)
-        {
-            if (httpClient.BaseAddress is not null)
-            {
-                yield return httpClient.BaseAddress.AbsoluteUri;
-            }
-
-            foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value))
-            {
-                yield return header;
-            }
-        }
-    }
-
-    private class SemanticKernelHeadersPolicy : HttpPipelineSynchronousPolicy
-    {
-        public override void OnSendingRequest(HttpMessage message)
-        {
-            message.Request.Headers.Add(
-                HttpHeaderConstant.Names.SemanticKernelVersion,
-                HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent)));
-        }
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs
deleted file mode 100644
index a1153523b03e..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs
+++ /dev/null
@@ -1,109 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Text.Json.Serialization;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Defines per-invocation execution options that override the assistant definition.
-/// 
-/// 
-/// This class is not applicable to  usage.
-/// 
-public sealed class AzureAIInvocationOptions
-{
-    /// 
-    /// Gets the AI model targeted by the agent.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public string? ModelName { get; init; }
-
-    /// 
-    /// Gets the additional instructions.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public string? AdditionalInstructions { get; init; }
-
-    /// 
-    /// Gets the additional messages to add to the thread.
-    /// 
-    /// 
-    /// Only supports messages with role = User or Assistant.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public IReadOnlyList? AdditionalMessages { get; init; }
-
-    /// 
-    /// Gets a value that indicates whether the code_interpreter tool is enabled.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
-    public bool EnableCodeInterpreter { get; init; }
-
-    /// 
-    /// Gets a value that indicates whether the file_search tool is enabled.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
-    public bool EnableFileSearch { get; init; }
-
-    /// 
-    /// Gets a value that indicates whether the JSON response format is enabled.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public bool? EnableJsonResponse { get; init; }
-
-    /// 
-    /// Gets the maximum number of completion tokens that can be used over the course of the run.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public int? MaxCompletionTokens { get; init; }
-
-    /// 
-    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public int? MaxPromptTokens { get; init; }
-
-    /// 
-    /// Gets a value that indicates whether the parallel function calling is enabled during tool use.
-    /// 
-    /// 
-    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public bool? ParallelToolCallsEnabled { get; init; }
-
-    /// 
-    /// Gets the number of recent messages that the thread will be truncated to.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public int? TruncationMessageCount { get; init; }
-
-    /// 
-    /// Gets the sampling temperature to use, between 0 and 2.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public float? Temperature { get; init; }
-
-    /// 
-    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
-    /// 
-    /// 
-    /// It's recommended to set this property or , but not both.
-    ///
-    /// Nucleus sampling is an alternative to sampling with temperature where the model
-    /// considers the results of the tokens with  probability mass.
-    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public float? TopP { get; init; }
-
-    /// 
-    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.
-    /// 
-    /// 
-    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
-    /// 
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public IReadOnlyDictionary? Metadata { get; init; }
-}
diff --git a/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs b/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs
deleted file mode 100644
index d37242c522ed..000000000000
--- a/dotnet/src/Agents/AzureAI/AzureAIThreadMessageFactory.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Exposes patterns for creating and managing agent threads.
-/// 
-/// 
-/// This class supports translation of  from native models.
-/// 
-public static class AzureAIThreadMessageFactory
-{
-    /// 
-    /// Translates  to  for thread creation.
-    /// 
-    public static IEnumerable Translate(IEnumerable messages)
-    {
-        return AgentMessageFactory.GetThreadMessages(messages);
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs
deleted file mode 100644
index 7d4cf718b1e0..000000000000
--- a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs
+++ /dev/null
@@ -1,120 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Threading;
-using System.Threading.Tasks;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI.Extensions;
-
-/// 
-/// Extensions associated with an Agent run processing.
-/// 
-/// 
-/// Improves testability.
-/// 
-internal static class AgentRunExtensions
-{
-    public static async IAsyncEnumerable GetStepsAsync(
-        this AgentsClient client,
-        ThreadRun run,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        PageableList? steps = null;
-        do
-        {
-            steps = await client.GetRunStepsAsync(run, after: steps?.LastId, cancellationToken: cancellationToken).ConfigureAwait(false);
-            foreach (RunStep step in steps)
-            {
-                yield return step;
-            }
-        }
-        while (steps?.HasMore ?? false);
-    }
-
-    public static async Task CreateAsync(
-        this AgentsClient client,
-        string threadId,
-        AzureAIAgent agent,
-        string? instructions,
-        ToolDefinition[] tools,
-        AzureAIInvocationOptions? invocationOptions,
-        CancellationToken cancellationToken)
-    {
-        TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions);
-        BinaryData? responseFormat = GetResponseFormat(invocationOptions);
-        return
-            await client.CreateRunAsync(
-                threadId,
-                agent.Definition.Id,
-                overrideModelName: invocationOptions?.ModelName,
-                instructions,
-                additionalInstructions: invocationOptions?.AdditionalInstructions,
-                additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
-                overrideTools: tools,
-                stream: false,
-                temperature: invocationOptions?.Temperature,
-                topP: invocationOptions?.TopP,
-                maxPromptTokens: invocationOptions?.MaxPromptTokens,
-                maxCompletionTokens: invocationOptions?.MaxCompletionTokens,
-                truncationStrategy,
-                toolChoice: null,
-                responseFormat,
-                parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled,
-                metadata: invocationOptions?.Metadata,
-                include: null,
-                cancellationToken).ConfigureAwait(false);
-    }
-
-    private static BinaryData? GetResponseFormat(AzureAIInvocationOptions? invocationOptions)
-    {
-        return invocationOptions?.EnableJsonResponse == true ?
-            BinaryData.FromString(ResponseFormat.JsonObject.ToString()) :
-            null;
-    }
-
-    private static TruncationObject? GetTruncationStrategy(AzureAIInvocationOptions? invocationOptions)
-    {
-        return invocationOptions?.TruncationMessageCount == null ?
-            null :
-            new(TruncationStrategy.LastMessages)
-            {
-                LastMessages = invocationOptions.TruncationMessageCount
-            };
-    }
-
-    public static IAsyncEnumerable CreateStreamingAsync(
-        this AgentsClient client,
-        string threadId,
-        AzureAIAgent agent,
-        string? instructions,
-        ToolDefinition[] tools,
-        AzureAIInvocationOptions? invocationOptions,
-        CancellationToken cancellationToken)
-    {
-        TruncationObject? truncationStrategy = GetTruncationStrategy(invocationOptions);
-        BinaryData? responseFormat = GetResponseFormat(invocationOptions);
-        return
-            client.CreateRunStreamingAsync(
-                threadId,
-                agent.Definition.Id,
-                overrideModelName: invocationOptions?.ModelName,
-                instructions,
-                additionalInstructions: invocationOptions?.AdditionalInstructions,
-                additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
-                overrideTools: tools,
-                temperature: invocationOptions?.Temperature,
-                topP: invocationOptions?.TopP,
-                maxPromptTokens: invocationOptions?.MaxPromptTokens,
-                maxCompletionTokens: invocationOptions?.MaxCompletionTokens,
-                truncationStrategy,
-                toolChoice: null,
-                responseFormat,
-                parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled,
-                metadata: invocationOptions?.Metadata,
-                cancellationToken);
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs
deleted file mode 100644
index e6b9c722eabb..000000000000
--- a/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using Azure.AI.Projects;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Extensions for  to support Azure AI specific operations.
-/// 
-public static class KernelFunctionExtensions
-{
-    /// 
-    /// Convert  to an OpenAI tool model.
-    /// 
-    /// The source function
-    /// The plugin name
-    /// An OpenAI tool definition
-    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
-    {
-        if (function.Metadata.Parameters.Count > 0)
-        {
-            BinaryData parameterData = function.Metadata.CreateParameterSpec();
-
-            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description, parameterData);
-        }
-
-        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description);
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs
deleted file mode 100644
index 621e364acf6a..000000000000
--- a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Linq;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel.ChatCompletion;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-
-/// 
-/// Factory for creating  based on .
-/// 
-/// 
-/// Improves testability.
-/// 
-internal static class AgentMessageFactory
-{
-    /// 
-    /// Translate metadata from a  to be used for a  or
-    /// .
-    /// 
-    /// The message content.
-    public static Dictionary GetMetadata(ChatMessageContent message)
-    {
-        return message.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToString() ?? string.Empty) ?? [];
-    }
-
-    /// 
-    /// Translate attachments from a  to be used for a  or
-    /// 
-    /// The message content.
-    public static IEnumerable GetAttachments(ChatMessageContent message)
-    {
-        return
-            message.Items
-                .OfType()
-                .Select(
-                    fileContent =>
-                        new MessageAttachment(fileContent.FileId, GetToolDefinition(fileContent.Tools).ToList()));
-    }
-
-    /// 
-    /// Translates a set of  to a set of ."/>
-    /// 
-    /// A list of  objects/
-    public static IEnumerable GetThreadMessages(IEnumerable? messages)
-    {
-        if (messages is not null)
-        {
-            foreach (ChatMessageContent message in messages)
-            {
-                string? content = message.Content;
-                if (string.IsNullOrWhiteSpace(content))
-                {
-                    continue;
-                }
-
-                ThreadMessageOptions threadMessage = new(
-                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent,
-                    content: message.Content)
-                {
-                    Attachments = GetAttachments(message).ToArray(),
-                };
-
-                if (message.Metadata != null)
-                {
-                    foreach (string key in message.Metadata.Keys)
-                    {
-                        threadMessage.Metadata = GetMetadata(message);
-                    }
-                }
-
-                yield return threadMessage;
-            }
-        }
-    }
-
-    private static readonly Dictionary s_toolMetadata = new()
-    {
-        { AzureAIAgent.Tools.CodeInterpreter, new CodeInterpreterToolDefinition() },
-        { AzureAIAgent.Tools.FileSearch, new FileSearchToolDefinition() },
-    };
-
-    private static IEnumerable GetToolDefinition(IEnumerable? tools)
-    {
-        if (tools is null)
-        {
-            yield break;
-        }
-
-        foreach (string tool in tools)
-        {
-            if (s_toolMetadata.TryGetValue(tool, out ToolDefinition? toolDefinition))
-            {
-                yield return toolDefinition;
-            }
-        }
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs
deleted file mode 100644
index 167349b63d11..000000000000
--- a/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs
+++ /dev/null
@@ -1,860 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.ClientModel;
-using System.Collections.Generic;
-using System.Linq;
-using System.Net;
-using System.Runtime.CompilerServices;
-using System.Text.Json;
-using System.Threading;
-using System.Threading.Tasks;
-using Azure;
-using Azure.AI.Projects;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.AzureAI.Extensions;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.FunctionCalling;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-
-/// 
-/// Actions associated with an Open Assistant thread.
-/// 
-internal static class AgentThreadActions
-{
-    private static readonly HashSet s_pollingStatuses =
-    [
-        RunStatus.Queued,
-        RunStatus.InProgress,
-        RunStatus.Cancelling,
-    ];
-
-    private static readonly HashSet s_failureStatuses =
-    [
-        RunStatus.Expired,
-        RunStatus.Failed,
-        RunStatus.Cancelled,
-    ];
-
-    /// 
-    /// Create a new assistant thread.
-    /// 
-    /// The assistant client
-    /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier
-    public static async Task CreateThreadAsync(AgentsClient client, CancellationToken cancellationToken = default)
-    {
-        AgentThread thread = await client.CreateThreadAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
-
-        return thread.Id;
-    }
-
-    /// 
-    /// Create a message in the specified thread.
-    /// 
-    /// The assistant client
-    /// The thread identifier
-    /// The message to add
-    /// The  to monitor for cancellation requests. The default is .
-    ///  if a system message is present, without taking any other action
-    public static async Task CreateMessageAsync(AgentsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken)
-    {
-        if (message.Items.Any(i => i is FunctionCallContent))
-        {
-            return;
-        }
-
-        string? content = message.Content;
-        if (string.IsNullOrWhiteSpace(content))
-        {
-            return;
-        }
-
-        await client.CreateMessageAsync(
-            threadId,
-            role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Agent,
-            content,
-            attachments: AgentMessageFactory.GetAttachments(message).ToArray(),
-            metadata: AgentMessageFactory.GetMetadata(message),
-            cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Retrieves the thread messages.
-    /// 
-    /// The assistant client
-    /// The thread identifier
-    /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
-    public static async IAsyncEnumerable GetMessagesAsync(AgentsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        Dictionary agentNames = []; // Cache agent names by their identifier
-
-        string? lastId = null;
-        PageableList? messages = null;
-        do
-        {
-            messages = await client.GetMessagesAsync(threadId, runId: null, limit: null, ListSortOrder.Descending, after: lastId, before: null, cancellationToken).ConfigureAwait(false);
-            foreach (ThreadMessage message in messages)
-            {
-                lastId = message.Id;
-                string? assistantName = null;
-                if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
-                    !agentNames.TryGetValue(message.AssistantId, out assistantName))
-                {
-                    Azure.AI.Projects.Agent assistant = await client.GetAgentAsync(message.AssistantId, cancellationToken).ConfigureAwait(false);
-                    if (!string.IsNullOrWhiteSpace(assistant.Name))
-                    {
-                        agentNames.Add(assistant.Id, assistant.Name);
-                    }
-                }
-
-                assistantName ??= message.AssistantId;
-
-                ChatMessageContent content = GenerateMessageContent(assistantName, message);
-
-                if (content.Items.Count > 0)
-                {
-                    yield return content;
-                }
-            }
-        } while (messages?.HasMore ?? false);
-    }
-
-    /// 
-    /// Invoke the assistant on the specified thread.
-    /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user.
-    /// Example of a non-visible message is function-content for functions that are automatically executed.
-    /// 
-    /// The assistant agent to interact with the thread.
-    /// The assistant client
-    /// The thread identifier
-    /// Options to utilize for the invocation
-    /// The logger to utilize (might be agent or channel scoped)
-    /// The  plugins and other state.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
-    public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
-        AzureAIAgent agent,
-        AgentsClient client,
-        string threadId,
-        AzureAIInvocationOptions? invocationOptions,
-        ILogger logger,
-        Kernel kernel,
-        KernelArguments? arguments,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId);
-
-        List tools = new(agent.Definition.Tools);
-
-        // Add unique functions from the Kernel which are not already present in the agent's tools
-        var functionToolNames = new HashSet(tools.OfType().Select(t => t.Name));
-        var functionTools = kernel.Plugins
-            .SelectMany(kp => kp.Select(kf => kf.ToToolDefinition(kp.Name)))
-            .Where(tool => !functionToolNames.Contains(tool.Name));
-        tools.AddRange(functionTools);
-
-        string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
-
-        ThreadRun run = await client.CreateAsync(threadId, agent, instructions, [.. tools], invocationOptions, cancellationToken).ConfigureAwait(false);
-
-        logger.LogAzureAIAgentCreatedRun(nameof(InvokeAsync), run.Id, threadId);
-
-        FunctionCallsProcessor functionProcessor = new(logger);
-        // This matches current behavior.  Will be configurable upon integrating with `FunctionChoice` (#6795/#5200)
-        FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true };
-
-        // Evaluate status and process steps and messages, as encountered.
-        HashSet processedStepIds = [];
-        Dictionary functionSteps = [];
-        do
-        {
-            // Check for cancellation
-            cancellationToken.ThrowIfCancellationRequested();
-
-            // Poll run and steps until actionable
-            await PollRunStatusAsync().ConfigureAwait(false);
-
-            // Is in terminal state?
-            if (s_failureStatuses.Contains(run.Status))
-            {
-                throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
-            }
-
-            RunStep[] steps = await client.GetStepsAsync(run, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
-
-            // Is tool action required?
-            if (run.Status == RunStatus.RequiresAction)
-            {
-                logger.LogAzureAIAgentProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId);
-
-                // Execute functions in parallel and post results at once.
-                FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
-                if (functionCalls.Length > 0)
-                {
-                    // Emit function-call content
-                    ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls);
-                    yield return (IsVisible: false, Message: functionCallMessage);
-
-                    // Invoke functions for each tool-step
-                    FunctionResultContent[] functionResults =
-                        await functionProcessor.InvokeFunctionCallsAsync(
-                            functionCallMessage,
-                            (_) => true,
-                            functionOptions,
-                            kernel,
-                            isStreaming: false,
-                            cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
-
-                    // Capture function-call for message processing
-                    foreach (FunctionResultContent functionCall in functionResults)
-                    {
-                        functionSteps.Add(functionCall.CallId!, functionCall);
-                    }
-
-                    // Process tool output
-                    ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
-
-                    await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
-                }
-
-                logger.LogAzureAIAgentProcessedRunSteps(nameof(InvokeAsync), functionCalls.Length, run.Id, threadId);
-            }
-
-            // Enumerate completed messages
-            logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId);
-
-            IEnumerable completedStepsToProcess =
-                steps
-                    .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id))
-                    .OrderBy(s => s.CreatedAt);
-
-            int messageCount = 0;
-            foreach (RunStep completedStep in completedStepsToProcess)
-            {
-                if (completedStep.Type == RunStepType.ToolCalls)
-                {
-                    RunStepToolCallDetails toolDetails = (RunStepToolCallDetails)completedStep.StepDetails;
-                    foreach (RunStepToolCall toolCall in toolDetails.ToolCalls)
-                    {
-                        bool isVisible = false;
-                        ChatMessageContent? content = null;
-
-                        // Process code-interpreter content
-                        if (toolCall is RunStepCodeInterpreterToolCall codeTool)
-                        {
-                            content = GenerateCodeInterpreterContent(agent.GetName(), codeTool.Input, completedStep);
-                            isVisible = true;
-                        }
-                        // Process function result content
-                        else if (toolCall is RunStepFunctionToolCall functionTool)
-                        {
-                            FunctionResultContent functionStep = functionSteps[functionTool.Id]; // Function step always captured on invocation
-                            content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep);
-                        }
-
-                        if (content is not null)
-                        {
-                            ++messageCount;
-
-                            yield return (isVisible, Message: content);
-                        }
-                    }
-                }
-                else if (completedStep.Type == RunStepType.MessageCreation)
-                {
-                    // Retrieve the message
-                    RunStepMessageCreationDetails messageDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
-                    ThreadMessage? message = await RetrieveMessageAsync(client, threadId, messageDetails.MessageCreation.MessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
-
-                    if (message is not null)
-                    {
-                        ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, completedStep);
-
-                        if (content.Items.Count > 0)
-                        {
-                            ++messageCount;
-
-                            yield return (IsVisible: true, Message: content);
-                        }
-                    }
-                }
-
-                processedStepIds.Add(completedStep.Id);
-            }
-
-            logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId);
-        }
-        while (RunStatus.Completed != run.Status);
-
-        logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run.Id, threadId);
-
-        // Local function to assist in run polling (participates in method closure).
-        async Task PollRunStatusAsync()
-        {
-            logger.LogAzureAIAgentPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId);
-
-            int count = 0;
-
-            do
-            {
-                cancellationToken.ThrowIfCancellationRequested();
-
-                if (count > 0)
-                {
-                    // Reduce polling frequency after a couple attempts
-                    await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false);
-                }
-
-                ++count;
-
-                try
-                {
-                    run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false);
-                }
-                // The presence of a `Status` code means the server responded with error...always fail in that case
-                catch (ClientResultException clientException) when (clientException.Status <= 0)
-                {
-                    // Check maximum retry count
-                    if (count >= agent.PollingOptions.MaximumRetryCount)
-                    {
-                        throw;
-                    }
-
-                    // Retry for potential transient failure
-                    continue;
-                }
-                catch (AggregateException aggregateException) when (aggregateException.InnerException is ClientResultException innerClientException)
-                {
-                    // The presence of a `Status` code means the server responded with error
-                    if (innerClientException.Status > 0)
-                    {
-                        throw;
-                    }
-
-                    // Check maximum retry count
-                    if (count >= agent.PollingOptions.MaximumRetryCount)
-                    {
-                        throw;
-                    }
-
-                    // Retry for potential transient failure
-                    continue;
-                }
-            }
-            while (s_pollingStatuses.Contains(run.Status));
-
-            logger.LogAzureAIAgentPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId);
-        }
-    }
-
-    /// 
-    /// Invoke the assistant on the specified thread using streaming.
-    /// 
-    /// The assistant agent to interact with the thread.
-    /// The assistant client
-    /// The thread identifier
-    /// The receiver for the completed messages generated
-    /// Options to utilize for the invocation
-    /// The logger to utilize (might be agent or channel scoped)
-    /// The  plugins and other state.
-    /// Optional arguments to pass to the agents's invocation, including any .
-    /// The  to monitor for cancellation requests. The default is .
-    /// Asynchronous enumeration of messages.
-    /// 
-    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
-    /// 
-    public static async IAsyncEnumerable InvokeStreamingAsync(
-        AzureAIAgent agent,
-        AgentsClient client,
-        string threadId,
-        IList? messages,
-        AzureAIInvocationOptions? invocationOptions,
-        ILogger logger,
-        Kernel kernel,
-        KernelArguments? arguments,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        logger.LogAzureAIAgentCreatingRun(nameof(InvokeAsync), threadId);
-
-        ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
-
-        string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
-
-        // Evaluate status and process steps and messages, as encountered.
-        HashSet processedStepIds = [];
-        Dictionary stepFunctionResults = [];
-        List stepsToProcess = [];
-
-        FunctionCallsProcessor functionProcessor = new(logger);
-        // This matches current behavior.  Will be configurable upon integrating with `FunctionChoice` (#6795/#5200)
-        FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true };
-
-        ThreadRun? run = null;
-        IAsyncEnumerable asyncUpdates = client.CreateStreamingAsync(threadId, agent, instructions, tools, invocationOptions, cancellationToken);
-        do
-        {
-            // Check for cancellation
-            cancellationToken.ThrowIfCancellationRequested();
-
-            stepsToProcess.Clear();
-
-            await foreach (StreamingUpdate update in asyncUpdates.ConfigureAwait(false))
-            {
-                if (update is RunUpdate runUpdate)
-                {
-                    run = runUpdate.Value;
-                }
-                else if (update is MessageContentUpdate contentUpdate)
-                {
-                    switch (contentUpdate.UpdateKind)
-                    {
-                        case StreamingUpdateReason.MessageUpdated:
-                            yield return GenerateStreamingMessageContent(agent.GetName(), contentUpdate);
-                            break;
-                    }
-                }
-                else if (update is RunStepDetailsUpdate detailsUpdate)
-                {
-                    StreamingChatMessageContent? toolContent = GenerateStreamingCodeInterpreterContent(agent.GetName(), detailsUpdate);
-                    if (toolContent != null)
-                    {
-                        yield return toolContent;
-                    }
-                    else if (detailsUpdate.FunctionOutput != null)
-                    {
-                        yield return
-                            new StreamingChatMessageContent(AuthorRole.Assistant, null)
-                            {
-                                AuthorName = agent.Name,
-                                Items = [new StreamingFunctionCallUpdateContent(detailsUpdate.ToolCallId, detailsUpdate.FunctionName, detailsUpdate.FunctionArguments)]
-                            };
-                    }
-                }
-                else if (update is RunStepUpdate stepUpdate)
-                {
-                    switch (stepUpdate.UpdateKind)
-                    {
-                        case StreamingUpdateReason.RunStepCompleted:
-                            stepsToProcess.Add(stepUpdate.Value);
-                            break;
-                        default:
-                            break;
-                    }
-                }
-            }
-
-            if (run == null)
-            {
-                throw new KernelException($"Agent Failure - Run not created for thread: ${threadId}");
-            }
-
-            // Is in terminal state?
-            if (s_failureStatuses.Contains(run.Status))
-            {
-                throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
-            }
-
-            if (run.Status == RunStatus.RequiresAction)
-            {
-                RunStep[] activeSteps =
-                    await client.GetStepsAsync(run, cancellationToken)
-                    .Where(step => step.Status == RunStepStatus.InProgress)
-                    .ToArrayAsync(cancellationToken).ConfigureAwait(false);
-
-                // Capture map between the tool call and its associated step
-                Dictionary toolMap = [];
-                foreach (RunStep step in activeSteps)
-                {
-                    RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails;
-                    foreach (RunStepToolCall stepDetails in toolCallDetails.ToolCalls)
-                    {
-                        toolMap[stepDetails.Id] = step.Id;
-                    }
-                }
-
-                // Execute functions in parallel and post results at once.
-                FunctionCallContent[] functionCalls = activeSteps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
-                if (functionCalls.Length > 0)
-                {
-                    // Emit function-call content
-                    ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls);
-                    messages?.Add(functionCallMessage);
-
-                    FunctionResultContent[] functionResults =
-                        await functionProcessor.InvokeFunctionCallsAsync(
-                            functionCallMessage,
-                            (_) => true,
-                            functionOptions,
-                            kernel,
-                            isStreaming: true,
-                            cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
-
-                    // Process tool output
-                    ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
-                    asyncUpdates = client.SubmitToolOutputsToStreamAsync(run, toolOutputs, cancellationToken);
-
-                    foreach (RunStep step in activeSteps)
-                    {
-                        stepFunctionResults.Add(step.Id, functionResults.Where(result => step.Id == toolMap[result.CallId!]).ToArray());
-                    }
-                }
-            }
-
-            if (stepsToProcess.Count > 0)
-            {
-                logger.LogAzureAIAgentProcessingRunMessages(nameof(InvokeAsync), run!.Id, threadId);
-
-                foreach (RunStep step in stepsToProcess)
-                {
-                    if (step.StepDetails is RunStepMessageCreationDetails messageDetails)
-                    {
-                        ThreadMessage? message =
-                            await RetrieveMessageAsync(
-                                client,
-                                threadId,
-                                messageDetails.MessageCreation.MessageId,
-                                agent.PollingOptions.MessageSynchronizationDelay,
-                                cancellationToken).ConfigureAwait(false);
-
-                        if (message != null)
-                        {
-                            ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, step);
-                            messages?.Add(content);
-                        }
-                    }
-                    else if (step.StepDetails is RunStepToolCallDetails toolDetails)
-                    {
-                        foreach (RunStepToolCall toolCall in toolDetails.ToolCalls)
-                        {
-                            if (toolCall is RunStepFunctionToolCall functionCall)
-                            {
-                                messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step));
-                                stepFunctionResults.Remove(step.Id);
-                                break;
-                            }
-
-                            if (toolCall is RunStepCodeInterpreterToolCall codeCall)
-                            {
-                                messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), codeCall.Input, step));
-                            }
-                        }
-                    }
-                }
-
-                logger.LogAzureAIAgentProcessedRunMessages(nameof(InvokeAsync), stepsToProcess.Count, run!.Id, threadId);
-            }
-        }
-        while (run?.Status != RunStatus.Completed);
-
-        logger.LogAzureAIAgentCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId);
-    }
-
-    private static ChatMessageContent GenerateMessageContent(string? assistantName, ThreadMessage message, RunStep? completedStep = null)
-    {
-        AuthorRole role = new(message.Role.ToString());
-
-        Dictionary? metadata =
-            new()
-            {
-                { nameof(ThreadMessage.CreatedAt), message.CreatedAt },
-                { nameof(ThreadMessage.AssistantId), message.AssistantId },
-                { nameof(ThreadMessage.ThreadId), message.ThreadId },
-                { nameof(ThreadMessage.RunId), message.RunId },
-                { nameof(MessageContentUpdate.MessageId), message.Id },
-            };
-
-        if (completedStep != null)
-        {
-            metadata[nameof(RunStepDetailsUpdate.StepId)] = completedStep.Id;
-            metadata[nameof(RunStep.Usage)] = completedStep.Usage;
-        }
-
-        ChatMessageContent content =
-            new(role, content: null)
-            {
-                AuthorName = assistantName,
-                Metadata = metadata,
-            };
-
-        foreach (MessageContent itemContent in message.ContentItems)
-        {
-            // Process text content
-            if (itemContent is MessageTextContent textContent)
-            {
-                content.Items.Add(new TextContent(textContent.Text));
-
-                foreach (MessageTextAnnotation annotation in textContent.Annotations)
-                {
-                    content.Items.Add(GenerateAnnotationContent(annotation));
-                }
-            }
-            // Process image content
-            else if (itemContent is MessageImageFileContent imageContent)
-            {
-                content.Items.Add(new FileReferenceContent(imageContent.FileId));
-            }
-        }
-
-        return content;
-    }
-
-    private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update)
-    {
-        StreamingChatMessageContent content =
-            new(AuthorRole.Assistant, content: null)
-            {
-                AuthorName = assistantName,
-            };
-
-        // Process text content
-        if (!string.IsNullOrEmpty(update.Text))
-        {
-            content.Items.Add(new StreamingTextContent(update.Text));
-        }
-        // Process image content
-        else if (update.ImageFileId != null)
-        {
-            content.Items.Add(new StreamingFileReferenceContent(update.ImageFileId));
-        }
-        // Process annotations
-        else if (update.TextAnnotation != null)
-        {
-            content.Items.Add(GenerateStreamingAnnotationContent(update.TextAnnotation));
-        }
-
-        if (update.Role.HasValue && update.Role.Value != MessageRole.User)
-        {
-            content.Role = new(update.Role.Value.ToString() ?? MessageRole.Agent.ToString());
-        }
-
-        return content;
-    }
-
-    private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update)
-    {
-        StreamingChatMessageContent content =
-            new(AuthorRole.Assistant, content: null)
-            {
-                AuthorName = assistantName,
-            };
-
-        // Process text content
-        if (update.CodeInterpreterInput != null)
-        {
-            content.Items.Add(new StreamingTextContent(update.CodeInterpreterInput));
-            content.Metadata = new Dictionary { { AzureAIAgent.CodeInterpreterMetadataKey, true } };
-        }
-
-        if ((update.CodeInterpreterOutputs?.Count ?? 0) > 0)
-        {
-            foreach (RunStepDeltaCodeInterpreterOutput output in update.CodeInterpreterOutputs!)
-            {
-                if (output is RunStepDeltaCodeInterpreterImageOutput imageOutput)
-                {
-                    content.Items.Add(new StreamingFileReferenceContent(imageOutput.Image.FileId));
-                }
-            }
-        }
-
-        return content.Items.Count > 0 ? content : null;
-    }
-
-    private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation)
-    {
-        string? fileId = null;
-
-        if (annotation is MessageTextFileCitationAnnotation fileCitationAnnotation)
-        {
-            fileId = fileCitationAnnotation.FileId;
-        }
-        else if (annotation is MessageTextFilePathAnnotation filePathAnnotation)
-        {
-            fileId = filePathAnnotation.FileId;
-        }
-
-        return
-            new(annotation.Text)
-            {
-                Quote = annotation.Text,
-                FileId = fileId,
-            };
-    }
-
-    private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation)
-    {
-        string? fileId = null;
-
-        if (!string.IsNullOrEmpty(annotation.OutputFileId))
-        {
-            fileId = annotation.OutputFileId;
-        }
-        else if (!string.IsNullOrEmpty(annotation.InputFileId))
-        {
-            fileId = annotation.InputFileId;
-        }
-
-        return
-            new(annotation.TextToReplace)
-            {
-                StartIndex = annotation.StartIndex ?? 0,
-                EndIndex = annotation.EndIndex ?? 0,
-                FileId = fileId,
-            };
-    }
-
-    private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode, RunStep completedStep)
-    {
-        Dictionary metadata = GenerateToolCallMetadata(completedStep);
-        metadata[AzureAIAgent.CodeInterpreterMetadataKey] = true;
-
-        return
-            new ChatMessageContent(
-                AuthorRole.Assistant,
-                [
-                    new TextContent(pythonCode)
-                ])
-            {
-                AuthorName = agentName,
-                Metadata = metadata,
-            };
-    }
-
-    private static IEnumerable ParseFunctionStep(AzureAIAgent agent, RunStep step)
-    {
-        if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls)
-        {
-            RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)step.StepDetails;
-            foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
-            {
-                if (toolCall is RunStepFunctionToolCall functionCall)
-                {
-                    (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(functionCall.Name, functionCall.Arguments);
-
-                    FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
-
-                    yield return content;
-                }
-            }
-        }
-    }
-
-    private static (FunctionName functionName, KernelArguments arguments) ParseFunctionCall(string functionName, string? functionArguments)
-    {
-        FunctionName nameParts = FunctionName.Parse(functionName);
-
-        KernelArguments arguments = [];
-
-        if (!string.IsNullOrWhiteSpace(functionArguments))
-        {
-            foreach (var argumentKvp in JsonSerializer.Deserialize>(functionArguments!)!)
-            {
-                arguments[argumentKvp.Key] = argumentKvp.Value.ToString();
-            }
-        }
-
-        return (nameParts, arguments);
-    }
-
-    private static ChatMessageContent GenerateFunctionCallContent(string agentName, IList functionCalls)
-    {
-        ChatMessageContent functionCallContent = new(AuthorRole.Assistant, content: null)
-        {
-            AuthorName = agentName
-        };
-
-        functionCallContent.Items.AddRange(functionCalls);
-
-        return functionCallContent;
-    }
-
-    private static ChatMessageContent GenerateFunctionResultContent(string agentName, IEnumerable functionResults, RunStep completedStep)
-    {
-        ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null)
-        {
-            AuthorName = agentName,
-            Metadata = GenerateToolCallMetadata(completedStep),
-        };
-
-        foreach (FunctionResultContent functionResult in functionResults)
-        {
-            functionResultContent.Items.Add(
-                new FunctionResultContent(
-                    functionResult.FunctionName,
-                    functionResult.PluginName,
-                    functionResult.CallId,
-                    functionResult.Result));
-        }
-
-        return functionResultContent;
-    }
-
-    private static Dictionary GenerateToolCallMetadata(RunStep completedStep)
-    {
-        return new()
-            {
-                { nameof(RunStep.CreatedAt), completedStep.CreatedAt },
-                { nameof(RunStep.AssistantId), completedStep.AssistantId },
-                { nameof(RunStep.ThreadId), completedStep.ThreadId },
-                { nameof(RunStep.RunId), completedStep.RunId },
-                { nameof(RunStepDetailsUpdate.StepId), completedStep.Id },
-                { nameof(RunStep.Usage), completedStep.Usage },
-            };
-    }
-
-    private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
-    {
-        ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
-
-        for (int index = 0; index < functionResults.Length; ++index)
-        {
-            FunctionResultContent functionResult = functionResults[index];
-
-            object resultValue = functionResult.Result ?? string.Empty;
-
-            if (resultValue is not string textResult)
-            {
-                textResult = JsonSerializer.Serialize(resultValue);
-            }
-
-            toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!);
-        }
-
-        return toolOutputs;
-    }
-
-    private static async Task RetrieveMessageAsync(AgentsClient client, string threadId, string messageId, TimeSpan syncDelay, CancellationToken cancellationToken)
-    {
-        ThreadMessage? message = null;
-
-        bool retry = false;
-        int count = 0;
-        do
-        {
-            try
-            {
-                message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false);
-            }
-            catch (RequestFailedException exception)
-            {
-                // Step has provided the message-id.  Retry on of NotFound/404 exists.
-                // Extremely rarely there might be a synchronization issue between the
-                // assistant response and message-service.
-                retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3;
-            }
-
-            if (retry)
-            {
-                await Task.Delay(syncDelay, cancellationToken).ConfigureAwait(false);
-            }
-
-            ++count;
-        }
-        while (retry);
-
-        return message;
-    }
-}
diff --git a/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs
deleted file mode 100644
index 974af70205eb..000000000000
--- a/dotnet/src/Agents/AzureAI/Logging/AgentThreadActionsLogMessages.cs
+++ /dev/null
@@ -1,139 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Diagnostics.CodeAnalysis;
-using Azure.AI.Projects;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class
-
-/// 
-/// Extensions for logging .
-/// 
-/// 
-/// This extension uses the  to
-/// generate logging code at compile time to achieve optimized code.
-/// 
-[ExcludeFromCodeCoverage]
-internal static partial class AgentThreadActionsLogMessages
-{
-    /// 
-    /// Logs  creating run (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Creating run for thread: {ThreadId}.")]
-    public static partial void LogAzureAIAgentCreatingRun(
-        this ILogger logger,
-        string methodName,
-        string threadId);
-
-    /// 
-    /// Logs  created run (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Created run for thread: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentCreatedRun(
-        this ILogger logger,
-        string methodName,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  completed run (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Completed run for thread: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentCompletedRun(
-        this ILogger logger,
-        string methodName,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  processing run steps (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Processing run steps for thread: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentProcessingRunSteps(
-        this ILogger logger,
-        string methodName,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  processed run steps (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Processed #{stepCount} run steps: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentProcessedRunSteps(
-        this ILogger logger,
-        string methodName,
-        int stepCount,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  processing run messages (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Processing run messages for thread: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentProcessingRunMessages(
-        this ILogger logger,
-        string methodName,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  processed run messages (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Processed #{MessageCount} run steps: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentProcessedRunMessages(
-        this ILogger logger,
-        string methodName,
-        int messageCount,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  polling run status (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Polling run status for thread: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentPollingRunStatus(
-        this ILogger logger,
-        string methodName,
-        string runId,
-        string threadId);
-
-    /// 
-    /// Logs  polled run status (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Run status is {RunStatus}: {RunId}/{ThreadId}.")]
-    public static partial void LogAzureAIAgentPolledRunStatus(
-        this ILogger logger,
-        string methodName,
-        RunStatus runStatus,
-        string runId,
-        string threadId);
-}
diff --git a/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs b/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs
deleted file mode 100644
index 7056ddc746c0..000000000000
--- a/dotnet/src/Agents/AzureAI/Logging/AzureAIAgentLogMessages.cs
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Diagnostics.CodeAnalysis;
-using Microsoft.Extensions.Logging;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class
-
-/// 
-/// Extensions for logging  invocations.
-/// 
-/// 
-/// This extension uses the  to
-/// generate logging code at compile time to achieve optimized code.
-/// 
-[ExcludeFromCodeCoverage]
-internal static partial class AzureAIAgentLogMessages
-{
-    /// 
-    /// Logs  creating channel (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Creating assistant thread for {ChannelType}.")]
-    public static partial void LogAzureAIAgentCreatingChannel(
-        this ILogger logger,
-        string methodName,
-        string channelType);
-
-    /// 
-    /// Logs  created channel (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Created assistant thread for {ChannelType}: #{ThreadId}.")]
-    public static partial void LogAzureAIAgentCreatedChannel(
-        this ILogger logger,
-        string methodName,
-        string channelType,
-        string threadId);
-
-    /// 
-    /// Logs  restoring serialized channel (started).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Debug,
-        Message = "[{MethodName}] Restoring assistant channel for {ChannelType}: #{ThreadId}.")]
-    public static partial void LogAzureAIAgentRestoringChannel(
-        this ILogger logger,
-        string methodName,
-        string channelType,
-        string threadId);
-
-    /// 
-    /// Logs  restored serialized channel (complete).
-    /// 
-    [LoggerMessage(
-        EventId = 0,
-        Level = LogLevel.Information,
-        Message = "[{MethodName}] Restored assistant channel for {ChannelType}: #{ThreadId}.")]
-    public static partial void LogAzureAIAgentRestoredChannel(
-        this ILogger logger,
-        string methodName,
-        string channelType,
-        string threadId);
-}
diff --git a/dotnet/src/Agents/AzureAI/RunPollingOptions.cs b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs
deleted file mode 100644
index f1bbd1db4853..000000000000
--- a/dotnet/src/Agents/AzureAI/RunPollingOptions.cs
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-
-namespace Microsoft.SemanticKernel.Agents.AzureAI;
-
-/// 
-/// Configuration and defaults associated with polling behavior for Assistant API run processing.
-/// 
-public sealed class RunPollingOptions
-{
-    /// 
-    /// Gets the default maximum number of retries when monitoring thread-run status.
-    /// 
-    public static int DefaultMaximumRetryCount { get; } = 3;
-
-    /// 
-    /// Gets the default polling interval when monitoring thread-run status.
-    /// 
-    public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
-
-    /// 
-    /// Gets the default back-off interval when monitoring thread-run status.
-    /// 
-    public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
-
-    /// 
-    /// Gets the default number of polling iterations before using .
-    /// 
-    public static int DefaultPollingBackoffThreshold { get; } = 2;
-
-    /// 
-    /// Gets the default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
-    /// 
-    public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
-
-    /// 
-    /// Gets or sets the maximum retry count when polling thread-run status.
-    /// 
-    /// 
-    /// This value only affects failures that have the potential to be transient.
-    /// Explicit server error responses will result in immediate failure.
-    /// 
-    public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount;
-
-    /// 
-    /// Gets or sets the polling interval when monitoring thread-run status.
-    /// 
-    public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
-
-    /// 
-    /// Gets or sets the back-off interval when monitoring thread-run status.
-    /// 
-    public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
-
-    /// 
-    /// Gets or sets the number of polling iterations before using .
-    /// 
-    public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold;
-
-    /// 
-    /// Gets or sets the polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
-    /// 
-    public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
-
-    /// 
-    /// Gets the polling interval for the specified iteration count.
-    /// 
-    /// The number of polling iterations already attempted.
-    public TimeSpan GetPollingInterval(int iterationCount)
-    {
-        return iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval;
-    }
-}
diff --git a/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj b/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj
deleted file mode 100644
index e17d43f63fcc..000000000000
--- a/dotnet/src/Agents/Bedrock/Agents.Bedrock.csproj
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-  
-    
-    Microsoft.SemanticKernel.Agents.Bedrock
-    Microsoft.SemanticKernel.Agents.Bedrock
-    net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110;CA1724
-    false
-    alpha
-  
-
-  
-
-  
-    
-    Semantic Kernel Agents - Bedrock
-    Defines a concrete Agent based on the Bedrock Agent Service.
-  
-
-  
-    
-    
-    
-    
-    
-    
-    
-    
-  
-
-  
-
-  
-    
-  
-
-  
-    
-    
-    
-  
-
-  
-    
-    
-  
-
-
-
\ No newline at end of file
diff --git a/dotnet/src/Agents/Bedrock/BedrockAgent.cs b/dotnet/src/Agents/Bedrock/BedrockAgent.cs
deleted file mode 100644
index f01e46843ace..000000000000
--- a/dotnet/src/Agents/Bedrock/BedrockAgent.cs
+++ /dev/null
@@ -1,263 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Collections.Generic;
-using System.Threading;
-using System.Threading.Tasks;
-using Amazon.BedrockAgent;
-using Amazon.BedrockAgentRuntime;
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Diagnostics;
-
-namespace Microsoft.SemanticKernel.Agents.Bedrock;
-
-/// 
-/// Provides a specialized  for the Bedrock Agent service.
-/// 
-public class BedrockAgent : KernelAgent
-{
-    internal readonly AmazonBedrockAgentClient Client;
-
-    internal readonly AmazonBedrockAgentRuntimeClient RuntimeClient;
-
-    internal readonly Amazon.BedrockAgent.Model.Agent AgentModel;
-
-    /// 
-    /// There is a default alias created by Bedrock for the working draft version of the agent.
-    /// https://docs.aws.amazon.com/bedrock/latest/userguide/agents-deploy.html
-    /// 
-    public static readonly string WorkingDraftAgentAlias = "TSTALIASID";
-
-    /// 
-    /// Initializes a new instance of the  class.
-    /// Unlike other types of agents in Semantic Kernel, prompt templates are not supported for Bedrock agents,
-    /// since Bedrock agents don't support using an alternative instruction in runtime.
-    /// 
-    /// The agent model of an agent that exists on the Bedrock Agent service.
-    /// A client used to interact with the Bedrock Agent service.
-    /// A client used to interact with the Bedrock Agent runtime service.
-    public BedrockAgent(
-        Amazon.BedrockAgent.Model.Agent agentModel,
-        AmazonBedrockAgentClient? client = null,
-        AmazonBedrockAgentRuntimeClient? runtimeClient = null)
-    {
-        this.AgentModel = agentModel;
-        this.Client = client ?? new AmazonBedrockAgentClient();
-        this.RuntimeClient = runtimeClient ?? new AmazonBedrockAgentRuntimeClient();
-
-        this.Id = agentModel.AgentId;
-        this.Name = agentModel.AgentName;
-        this.Description = agentModel.Description;
-        this.Instructions = agentModel.Instruction;
-    }
-
-    #region static methods
-
-    /// 
-    /// Convenient method to create an unique session id.
-    /// 
-    public static string CreateSessionId()
-    {
-        return Guid.NewGuid().ToString();
-    }
-
-    #endregion
-
-    #region public methods
-
-    /// 
-    /// Invoke the Bedrock agent with the given message.
-    /// 
-    /// The session id.
-    /// The message to send to the agent.
-    /// The arguments to use when invoking the agent.
-    /// The alias id of the agent to use. The default is the working draft alias id.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An  of .
-    public IAsyncEnumerable InvokeAsync(
-        string sessionId,
-        string message,
-        KernelArguments? arguments,
-        string? agentAliasId = null,
-        CancellationToken cancellationToken = default)
-    {
-        var invokeAgentRequest = new InvokeAgentRequest
-        {
-            AgentAliasId = agentAliasId ?? WorkingDraftAgentAlias,
-            AgentId = this.Id,
-            SessionId = sessionId,
-            InputText = message,
-        };
-
-        return this.InvokeAsync(invokeAgentRequest, arguments, cancellationToken);
-    }
-
-    /// 
-    /// Invoke the Bedrock agent with the given request. Use this method when you want to customize the request.
-    /// 
-    /// The request to send to the agent.
-    /// The arguments to use when invoking the agent.
-    /// The  to monitor for cancellation requests. The default is .
-    public IAsyncEnumerable InvokeAsync(
-        InvokeAgentRequest invokeAgentRequest,
-        KernelArguments? arguments,
-        CancellationToken cancellationToken = default)
-    {
-        return invokeAgentRequest.StreamingConfigurations != null && (invokeAgentRequest.StreamingConfigurations.StreamFinalResponse ?? false)
-            ? throw new ArgumentException("The streaming configuration must be null for non-streaming responses.")
-            : ActivityExtensions.RunWithActivityAsync(
-                () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-                InvokeInternal,
-                cancellationToken);
-
-        // Collect all responses from the agent and return them as a single chat message content since this
-        // is a non-streaming API.
-        // The Bedrock Agent API streams beck different types of responses, i.e. text, files, metadata, etc.
-        // The Bedrock Agent API also won't stream back any content when it needs to call a function. It will
-        // only start streaming back content after the function has been called and the response is ready.
-        async IAsyncEnumerable InvokeInternal()
-        {
-            ChatMessageContentItemCollection items = [];
-            string content = "";
-            Dictionary metadata = [];
-            List innerContents = [];
-
-            await foreach (var message in this.InternalInvokeAsync(invokeAgentRequest, arguments, cancellationToken).ConfigureAwait(false))
-            {
-                items.AddRange(message.Items);
-                content += message.Content ?? "";
-                if (message.Metadata != null)
-                {
-                    foreach (var key in message.Metadata.Keys)
-                    {
-                        metadata[key] = message.Metadata[key];
-                    }
-                }
-                innerContents.Add(message.InnerContent);
-            }
-
-            yield return content.Length == 0
-                ? throw new KernelException("No content was returned from the agent.")
-                : new ChatMessageContent(AuthorRole.Assistant, content)
-                {
-                    AuthorName = this.GetDisplayName(),
-                    Items = items,
-                    ModelId = this.AgentModel.FoundationModel,
-                    Metadata = metadata,
-                    InnerContent = innerContents,
-                };
-        }
-    }
-
-    /// 
-    /// Invoke the Bedrock agent with the given request and streaming response.
-    /// 
-    /// The session id.
-    /// The message to send to the agent.
-    /// The arguments to use when invoking the agent.
-    /// The alias id of the agent to use. The default is the working draft alias id.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An  of .
-    public IAsyncEnumerable InvokeStreamingAsync(
-        string sessionId,
-        string message,
-        KernelArguments? arguments,
-        string? agentAliasId = null,
-        CancellationToken cancellationToken = default)
-    {
-        var invokeAgentRequest = new InvokeAgentRequest
-        {
-            AgentAliasId = agentAliasId ?? WorkingDraftAgentAlias,
-            AgentId = this.Id,
-            SessionId = sessionId,
-            InputText = message,
-            StreamingConfigurations = new()
-            {
-                StreamFinalResponse = true,
-            },
-        };
-
-        return this.InvokeStreamingAsync(invokeAgentRequest, arguments, cancellationToken);
-    }
-
-    /// 
-    /// Invoke the Bedrock agent with the given request and streaming response. Use this method when you want to customize the request.
-    /// 
-    /// The request to send to the agent.
-    /// The arguments to use when invoking the agent.
-    /// The  to monitor for cancellation requests. The default is .
-    /// An  of .
-    public IAsyncEnumerable InvokeStreamingAsync(
-        InvokeAgentRequest invokeAgentRequest,
-        KernelArguments? arguments,
-        CancellationToken cancellationToken = default)
-    {
-        if (invokeAgentRequest.StreamingConfigurations == null)
-        {
-            invokeAgentRequest.StreamingConfigurations = new()
-            {
-                StreamFinalResponse = true,
-            };
-        }
-        else if (!(invokeAgentRequest.StreamingConfigurations.StreamFinalResponse ?? false))
-        {
-            throw new ArgumentException("The streaming configuration must have StreamFinalResponse set to true.");
-        }
-
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-            InvokeInternal,
-            cancellationToken);
-
-        async IAsyncEnumerable InvokeInternal()
-        {
-            // The Bedrock agent service has the same API for both streaming and non-streaming responses.
-            // We are invoking the same method as the non-streaming response with the streaming configuration set,
-            // and converting the chat message content to streaming chat message content.
-            await foreach (var chatMessageContent in this.InternalInvokeAsync(invokeAgentRequest, arguments, cancellationToken).ConfigureAwait(false))
-            {
-                yield return new StreamingChatMessageContent(chatMessageContent.Role, chatMessageContent.Content)
-                {
-                    AuthorName = chatMessageContent.AuthorName,
-                    ModelId = chatMessageContent.ModelId,
-                    InnerContent = chatMessageContent.InnerContent,
-                    Metadata = chatMessageContent.Metadata,
-                };
-            }
-        }
-    }
-
-    #endregion
-
-    /// 
-    protected override IEnumerable GetChannelKeys()
-    {
-        // Return the channel keys for the BedrockAgent
-        yield return typeof(BedrockAgentChannel).FullName!;
-    }
-
-    /// 
-    protected override Task CreateChannelAsync(CancellationToken cancellationToken)
-    {
-        // Create and return a new BedrockAgentChannel
-        return Task.FromResult(new BedrockAgentChannel());
-    }
-
-    /// 
-    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
-    {
-        // Restore and return a BedrockAgentChannel from the given state
-        return Task.FromResult(new BedrockAgentChannel());
-    }
-
-    #region internal methods
-
-    internal string CodeInterpreterActionGroupSignature { get => $"{this.GetDisplayName()}_CodeInterpreter"; }
-    internal string KernelFunctionActionGroupSignature { get => $"{this.GetDisplayName()}_KernelFunctions"; }
-    internal string UseInputActionGroupSignature { get => $"{this.GetDisplayName()}_UserInput"; }
-
-    #endregion
-}
diff --git a/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs b/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs
deleted file mode 100644
index 1e0d40d91188..000000000000
--- a/dotnet/src/Agents/Bedrock/BedrockAgentChannel.cs
+++ /dev/null
@@ -1,248 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Text.Json;
-using System.Threading;
-using System.Threading.Tasks;
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.Agents.Serialization;
-using Microsoft.SemanticKernel.ChatCompletion;
-
-namespace Microsoft.SemanticKernel.Agents.Bedrock;
-
-/// 
-/// A  specialization for use with .
-/// 
-public class BedrockAgentChannel : AgentChannel
-{
-    private readonly ChatHistory _history = [];
-
-    private const string MessagePlaceholder = "[SILENCE]";
-
-    /// 
-    /// Receive messages from a group chat.
-    /// Bedrock requires the chat history to alternate between user and agent messages.
-    /// Thus, when receiving messages, the message sequence will be mutated by inserting
-    /// placeholder agent or user messages as needed.
-    /// 
-    /// The history of messages to receive.
-    /// A token to monitor for cancellation requests.
-    protected override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
-    {
-        foreach (var incomingMessage in history)
-        {
-            if (string.IsNullOrEmpty(incomingMessage.Content))
-            {
-                this.Logger.LogWarning("Received a message with no content. Skipping.");
-                continue;
-            }
-
-            if (this._history.Count == 0 || this._history.Last().Role != incomingMessage.Role)
-            {
-                this._history.Add(incomingMessage);
-            }
-            else
-            {
-                this._history.Add
-                (
-                    new ChatMessageContent
-                    (
-                        incomingMessage.Role == AuthorRole.Assistant ? AuthorRole.User : AuthorRole.Assistant,
-                        MessagePlaceholder
-                    )
-                );
-                this._history.Add(incomingMessage);
-            }
-        }
-
-        return Task.CompletedTask;
-    }
-
-    /// 
-    protected override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
-        BedrockAgent agent,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        if (!this.PrepareAndValidateHistory())
-        {
-            yield break;
-        }
-
-        InvokeAgentRequest invokeAgentRequest = new()
-        {
-            AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
-            AgentId = agent.Id,
-            SessionId = BedrockAgent.CreateSessionId(),
-            InputText = this._history.Last().Content,
-            SessionState = this.ParseHistoryToSessionState(),
-        };
-        await foreach (var message in agent.InvokeAsync(invokeAgentRequest, null, cancellationToken).ConfigureAwait(false))
-        {
-            if (message.Content is not null)
-            {
-                this._history.Add(message);
-                // All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages
-                yield return (true, message);
-            }
-        }
-    }
-
-    /// 
-    protected override async IAsyncEnumerable InvokeStreamingAsync(
-        BedrockAgent agent,
-        IList messages,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        if (!this.PrepareAndValidateHistory())
-        {
-            yield break;
-        }
-
-        InvokeAgentRequest invokeAgentRequest = new()
-        {
-            AgentAliasId = BedrockAgent.WorkingDraftAgentAlias,
-            AgentId = agent.Id,
-            SessionId = BedrockAgent.CreateSessionId(),
-            InputText = this._history.Last().Content,
-            SessionState = this.ParseHistoryToSessionState(),
-        };
-        await foreach (var message in agent.InvokeStreamingAsync(invokeAgentRequest, null, cancellationToken).ConfigureAwait(false))
-        {
-            if (message.Content is not null)
-            {
-                this._history.Add(new()
-                {
-                    Role = AuthorRole.Assistant,
-                    Content = message.Content,
-                    AuthorName = message.AuthorName,
-                    InnerContent = message.InnerContent,
-                    ModelId = message.ModelId,
-                });
-                // All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages
-                yield return message;
-            }
-        }
-    }
-
-    /// 
-    protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken)
-    {
-        return this._history.ToDescendingAsync();
-    }
-
-    /// 
-    protected override Task ResetAsync(CancellationToken cancellationToken)
-    {
-        this._history.Clear();
-
-        return Task.CompletedTask;
-    }
-
-    /// 
-    protected override string Serialize()
-        => JsonSerializer.Serialize(ChatMessageReference.Prepare(this._history));
-
-    #region private methods
-
-    private bool PrepareAndValidateHistory()
-    {
-        if (this._history.Count == 0)
-        {
-            this.Logger.LogWarning("No messages to send. Bedrock requires at least one message to start a conversation.");
-            return false;
-        }
-
-        this.EnsureHistoryAlternates();
-        this.EnsureLastMessageIsUser();
-        if (string.IsNullOrEmpty(this._history.Last().Content))
-        {
-            this.Logger.LogWarning("Last message has no content. Bedrock doesn't support empty messages.");
-            return false;
-        }
-
-        return true;
-    }
-
-    private void EnsureHistoryAlternates()
-    {
-        if (this._history.Count <= 1)
-        {
-            return;
-        }
-
-        int currentIndex = 1;
-        while (currentIndex < this._history.Count)
-        {
-            if (this._history[currentIndex].Role == this._history[currentIndex - 1].Role)
-            {
-                this._history.Insert(
-                    currentIndex,
-                    new ChatMessageContent(
-                        this._history[currentIndex].Role == AuthorRole.Assistant ? AuthorRole.User : AuthorRole.Assistant,
-                        MessagePlaceholder
-                    )
-                );
-                currentIndex += 2;
-            }
-            else
-            {
-                currentIndex++;
-            }
-        }
-    }
-
-    private void EnsureLastMessageIsUser()
-    {
-        if (this._history.Count > 0 && this._history.Last().Role != AuthorRole.User)
-        {
-            this._history.Add(new ChatMessageContent(AuthorRole.User, MessagePlaceholder));
-        }
-    }
-
-    private SessionState ParseHistoryToSessionState()
-    {
-        SessionState sessionState = new();
-
-        // We don't take the last message as it needs to be sent separately in another parameter.
-        if (this._history.Count > 1)
-        {
-            sessionState.ConversationHistory = new()
-            {
-                Messages = []
-            };
-
-            foreach (var message in this._history.Take(this._history.Count - 1))
-            {
-                if (message.Content is null)
-                {
-                    throw new InvalidOperationException("Message content cannot be null.");
-                }
-                if (message.Role != AuthorRole.Assistant && message.Role != AuthorRole.User)
-                {
-                    throw new InvalidOperationException("Message role must be either Assistant or User.");
-                }
-
-                sessionState.ConversationHistory.Messages.Add(new()
-                {
-                    Role = message.Role == AuthorRole.Assistant
-                        ? Amazon.BedrockAgentRuntime.ConversationRole.Assistant
-                        : Amazon.BedrockAgentRuntime.ConversationRole.User,
-                    Content = [
-                        new Amazon.BedrockAgentRuntime.Model.ContentBlock()
-                        {
-                            Text = message.Content,
-                        },
-                    ],
-                });
-            }
-        }
-
-        return sessionState;
-    }
-    #endregion
-}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs
deleted file mode 100644
index c2e6bdd358bb..000000000000
--- a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentExtensions.cs
+++ /dev/null
@@ -1,214 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Threading;
-using System.Threading.Tasks;
-using Amazon.BedrockAgent;
-using Amazon.BedrockAgent.Model;
-
-namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-/// 
-/// Extensions associated with 
-/// 
-public static class BedrockAgentExtensions
-{
-    /// 
-    /// Creates an agent.
-    /// 
-    /// The  instance.
-    /// The  instance.
-    /// The  instance.
-    public static async Task CreateAndPrepareAgentAsync(
-        this AmazonBedrockAgentClient client,
-        CreateAgentRequest request,
-        CancellationToken cancellationToken = default)
-    {
-        var createAgentResponse = await client.CreateAgentAsync(request, cancellationToken).ConfigureAwait(false);
-        // The agent will first enter the CREATING status.
-        // When the operation finishes, it will enter the NOT_PREPARED status.
-        // We need to wait for the agent to reach the NOT_PREPARED status before we can prepare it.
-        await client.WaitForAgentStatusAsync(createAgentResponse.Agent, AgentStatus.NOT_PREPARED, cancellationToken: cancellationToken).ConfigureAwait(false);
-        return await client.PrepareAgentAndWaitUntilPreparedAsync(createAgentResponse.Agent, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Associates an agent with a knowledge base.
-    /// 
-    /// The  instance.
-    /// The knowledge base ID.
-    /// The description of the association.
-    /// The  instance.
-    public static async Task AssociateAgentKnowledgeBaseAsync(
-        this BedrockAgent agent,
-        string knowledgeBaseId,
-        string description,
-        CancellationToken cancellationToken = default)
-    {
-        await agent.Client.AssociateAgentKnowledgeBaseAsync(new()
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-            KnowledgeBaseId = knowledgeBaseId,
-            Description = description,
-        }, cancellationToken).ConfigureAwait(false);
-
-        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Disassociate the agent with a knowledge base.
-    /// 
-    /// The  instance.
-    /// The id of the knowledge base to disassociate with the agent.
-    /// The  to monitor for cancellation requests. The default is .
-    public static async Task DisassociateAgentKnowledgeBaseAsync(
-        this BedrockAgent agent,
-        string knowledgeBaseId,
-        CancellationToken cancellationToken = default)
-    {
-        await agent.Client.DisassociateAgentKnowledgeBaseAsync(new()
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-            KnowledgeBaseId = knowledgeBaseId,
-        }, cancellationToken).ConfigureAwait(false);
-
-        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// List the knowledge bases associated with the agent.
-    /// 
-    /// The  instance.
-    /// The  to monitor for cancellation requests. The default is .
-    /// A  containing the knowledge bases associated with the agent.
-    public static async Task ListAssociatedKnowledgeBasesAsync(
-        this BedrockAgent agent,
-        CancellationToken cancellationToken = default)
-    {
-        return await agent.Client.ListAgentKnowledgeBasesAsync(new()
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-        }, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Create a code interpreter action group for the agent and prepare the agent.
-    /// 
-    /// The  instance.
-    /// The  to monitor for cancellation requests. The default is .
-    public static async Task CreateCodeInterpreterActionGroupAsync(
-        this BedrockAgent agent,
-        CancellationToken cancellationToken = default)
-    {
-        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-            ActionGroupName = agent.CodeInterpreterActionGroupSignature,
-            ActionGroupState = ActionGroupState.ENABLED,
-            ParentActionGroupSignature = new(Amazon.BedrockAgent.ActionGroupSignature.AMAZONCodeInterpreter),
-        };
-
-        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
-        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Create a kernel function action group for the agent and prepare the agent.
-    /// 
-    /// The  instance.
-    /// The  to monitor for cancellation requests. The default is .
-    public static async Task CreateKernelFunctionActionGroupAsync(
-        this BedrockAgent agent,
-        CancellationToken cancellationToken = default)
-    {
-        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-            ActionGroupName = agent.KernelFunctionActionGroupSignature,
-            ActionGroupState = ActionGroupState.ENABLED,
-            ActionGroupExecutor = new()
-            {
-                CustomControl = Amazon.BedrockAgent.CustomControlMethod.RETURN_CONTROL,
-            },
-            FunctionSchema = agent.Kernel.ToFunctionSchema(),
-        };
-
-        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
-        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Enable user input for the agent and prepare the agent.
-    /// 
-    /// The  instance.
-    /// The  to monitor for cancellation requests. The default is .
-    public static async Task EnableUserInputActionGroupAsync(
-        this BedrockAgent agent,
-        CancellationToken cancellationToken = default)
-    {
-        var createAgentActionGroupRequest = new CreateAgentActionGroupRequest
-        {
-            AgentId = agent.Id,
-            AgentVersion = agent.AgentModel.AgentVersion ?? "DRAFT",
-            ActionGroupName = agent.UseInputActionGroupSignature,
-            ActionGroupState = ActionGroupState.ENABLED,
-            ParentActionGroupSignature = new(Amazon.BedrockAgent.ActionGroupSignature.AMAZONUserInput),
-        };
-
-        await agent.Client.CreateAgentActionGroupAsync(createAgentActionGroupRequest, cancellationToken).ConfigureAwait(false);
-        await agent.Client.PrepareAgentAndWaitUntilPreparedAsync(agent.AgentModel, cancellationToken).ConfigureAwait(false);
-    }
-
-    private static async Task PrepareAgentAndWaitUntilPreparedAsync(
-        this AmazonBedrockAgentClient client,
-        Amazon.BedrockAgent.Model.Agent agent,
-        CancellationToken cancellationToken = default)
-    {
-        var prepareAgentResponse = await client.PrepareAgentAsync(new() { AgentId = agent.AgentId }, cancellationToken).ConfigureAwait(false);
-
-        // The agent will take some time to enter the PREPARING status after the prepare operation is called.
-        // We need to wait for the agent to reach the PREPARING status before we can proceed, otherwise we
-        // will return immediately if the agent is already in PREPARED status.
-        await client.WaitForAgentStatusAsync(agent, AgentStatus.PREPARING, cancellationToken: cancellationToken).ConfigureAwait(false);
-        // When the agent is prepared, it will enter the PREPARED status.
-        return await client.WaitForAgentStatusAsync(agent, AgentStatus.PREPARED, cancellationToken: cancellationToken).ConfigureAwait(false);
-    }
-
-    /// 
-    /// Wait for the agent to reach the specified status.
-    /// 
-    /// The  instance.
-    /// The  to monitor.
-    /// The status to wait for.
-    /// The interval in seconds to wait between attempts. The default is 2 seconds.
-    /// The maximum number of attempts to make. The default is 5 attempts.
-    /// The  to monitor for cancellation requests.
-    /// The  instance.
-    private static async Task WaitForAgentStatusAsync(
-        this AmazonBedrockAgentClient client,
-        Amazon.BedrockAgent.Model.Agent agent,
-        AgentStatus status,
-        int interval = 2,
-        int maxAttempts = 5,
-        CancellationToken cancellationToken = default)
-    {
-        for (var i = 0; i < maxAttempts; i++)
-        {
-            var getAgentResponse = await client.GetAgentAsync(new() { AgentId = agent.AgentId }, cancellationToken).ConfigureAwait(false);
-
-            if (getAgentResponse.Agent.AgentStatus == status)
-            {
-                return getAgentResponse.Agent;
-            }
-
-            await Task.Delay(interval * 1000, cancellationToken).ConfigureAwait(false);
-        }
-
-        throw new TimeoutException($"Agent did not reach status {status} within the specified time.");
-    }
-}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs
deleted file mode 100644
index 5e67aacaf04a..000000000000
--- a/dotnet/src/Agents/Bedrock/Extensions/BedrockAgentInvokeExtensions.cs
+++ /dev/null
@@ -1,225 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-using Amazon.BedrockAgentRuntime;
-using Amazon.BedrockAgentRuntime.Model;
-using Amazon.Runtime.EventStreams.Internal;
-using Microsoft.SemanticKernel.Agents.Extensions;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.FunctionCalling;
-
-namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-/// 
-/// Extensions associated with the status of a .
-/// 
-internal static class BedrockAgentInvokeExtensions
-{
-    public static async IAsyncEnumerable InternalInvokeAsync(
-        this BedrockAgent agent,
-        InvokeAgentRequest invokeAgentRequest,
-        KernelArguments? arguments,
-        [EnumeratorCancellation] CancellationToken cancellationToken)
-    {
-        // This session state is used to store the results of function calls to be passed back to the agent.
-        // https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/BedrockAgentRuntime/TSessionState.html
-        SessionState? sessionState = null;
-        for (var requestIndex = 0; ; requestIndex++)
-        {
-            if (sessionState != null)
-            {
-                invokeAgentRequest.SessionState = sessionState;
-                sessionState = null;
-            }
-            var invokeAgentResponse = await agent.RuntimeClient.InvokeAgentAsync(invokeAgentRequest, cancellationToken).ConfigureAwait(false);
-
-            if (invokeAgentResponse.HttpStatusCode != System.Net.HttpStatusCode.OK)
-            {
-                throw new HttpOperationException($"Failed to invoke agent. Status code: {invokeAgentResponse.HttpStatusCode}");
-            }
-
-            List functionCallContents = [];
-            await foreach (var responseEvent in invokeAgentResponse.Completion.ToAsyncEnumerable().ConfigureAwait(false))
-            {
-                if (responseEvent is BedrockAgentRuntimeEventStreamException bedrockAgentRuntimeEventStreamException)
-                {
-                    throw new KernelException("Failed to handle Bedrock Agent stream event.", bedrockAgentRuntimeEventStreamException);
-                }
-
-                var chatMessageContent =
-                    HandleChunkEvent(agent, responseEvent) ??
-                    HandleFilesEvent(agent, responseEvent) ??
-                    HandleReturnControlEvent(agent, responseEvent, arguments) ??
-                    HandleTraceEvent(agent, responseEvent) ??
-                    throw new KernelException($"Failed to handle Bedrock Agent stream event: {responseEvent}");
-                if (chatMessageContent.Items.Count > 0 && chatMessageContent.Items[0] is FunctionCallContent functionCallContent)
-                {
-                    functionCallContents.AddRange(chatMessageContent.Items.Where(item => item is FunctionCallContent).Cast());
-                }
-                else
-                {
-                    yield return chatMessageContent;
-                }
-            }
-
-            // This is used to cap the auto function invocation loop to prevent infinite loops.
-            // It doesn't use the the `FunctionCallsProcessor` to process the functions because we do not need 
-            // many of the features it offers and we want to keep the code simple.
-            var functionChoiceBehaviorConfiguration = new FunctionCallsProcessor().GetConfiguration(
-                FunctionChoiceBehavior.Auto(), [], requestIndex, agent.Kernel);
-
-            if (functionCallContents.Count > 0 && functionChoiceBehaviorConfiguration!.AutoInvoke)
-            {
-                var functionResults = await InvokeFunctionCallsAsync(agent, functionCallContents, cancellationToken).ConfigureAwait(false);
-                sessionState = CreateSessionStateWithFunctionResults(functionResults, agent);
-            }
-            else
-            {
-                break;
-            }
-        }
-    }
-
-    private static ChatMessageContent? HandleChunkEvent(
-        BedrockAgent agent,
-        IEventStreamEvent responseEvent)
-    {
-        return responseEvent is not PayloadPart payload
-            ? null
-            : new ChatMessageContent()
-            {
-                Role = AuthorRole.Assistant,
-                AuthorName = agent.GetDisplayName(),
-                Content = Encoding.UTF8.GetString(payload.Bytes.ToArray()),
-                ModelId = agent.AgentModel.FoundationModel,
-                InnerContent = payload,
-            };
-    }
-
-    private static ChatMessageContent? HandleFilesEvent(
-        BedrockAgent agent,
-        IEventStreamEvent responseEvent)
-    {
-        if (responseEvent is not FilePart files)
-        {
-            return null;
-        }
-
-        ChatMessageContentItemCollection binaryContents = [];
-        foreach (var file in files.Files)
-        {
-            binaryContents.Add(new BinaryContent(file.Bytes.ToArray(), file.Type)
-            {
-                Metadata = new Dictionary()
-                {
-                    { "Name", file.Name },
-                },
-            });
-        }
-
-        return new ChatMessageContent()
-        {
-            Role = AuthorRole.Assistant,
-            AuthorName = agent.GetDisplayName(),
-            Items = binaryContents,
-            ModelId = agent.AgentModel.FoundationModel,
-            InnerContent = files,
-        };
-    }
-
-    private static ChatMessageContent? HandleReturnControlEvent(
-        BedrockAgent agent,
-        IEventStreamEvent responseEvent,
-        KernelArguments? arguments)
-    {
-        if (responseEvent is not ReturnControlPayload returnControlPayload)
-        {
-            return null;
-        }
-
-        ChatMessageContentItemCollection functionCallContents = [];
-        foreach (var invocationInput in returnControlPayload.InvocationInputs)
-        {
-            var functionInvocationInput = invocationInput.FunctionInvocationInput;
-            functionCallContents.Add(new FunctionCallContent(
-                functionInvocationInput.Function,
-                id: returnControlPayload.InvocationId,
-                arguments: functionInvocationInput.Parameters.FromFunctionParameters(arguments))
-            {
-                Metadata = new Dictionary()
-                {
-                    { "ActionGroup", functionInvocationInput.ActionGroup },
-                    { "ActionInvocationType", functionInvocationInput.ActionInvocationType },
-                },
-            });
-        }
-
-        return new ChatMessageContent()
-        {
-            Role = AuthorRole.Assistant,
-            AuthorName = agent.GetDisplayName(),
-            Items = functionCallContents,
-            ModelId = agent.AgentModel.FoundationModel,
-            InnerContent = returnControlPayload,
-        };
-    }
-
-    private static ChatMessageContent? HandleTraceEvent(
-        BedrockAgent agent,
-        IEventStreamEvent responseEvent)
-    {
-        return responseEvent is not TracePart trace
-            ? null
-            : new ChatMessageContent()
-            {
-                Role = AuthorRole.Assistant,
-                AuthorName = agent.GetDisplayName(),
-                ModelId = agent.AgentModel.FoundationModel,
-                InnerContent = trace,
-            };
-    }
-
-    private static async Task> InvokeFunctionCallsAsync(
-        BedrockAgent agent,
-        List functionCallContents,
-        CancellationToken cancellationToken)
-    {
-        var functionResults = await Task.WhenAll(functionCallContents.Select(async functionCallContent =>
-        {
-            return await functionCallContent.InvokeAsync(agent.Kernel, cancellationToken).ConfigureAwait(false);
-        })).ConfigureAwait(false);
-
-        return [.. functionResults];
-    }
-
-    private static SessionState CreateSessionStateWithFunctionResults(List functionResults, BedrockAgent agent)
-    {
-        return functionResults.Count == 0
-            ? throw new KernelException("No function results were returned.")
-            : new()
-            {
-                InvocationId = functionResults[0].CallId,
-                ReturnControlInvocationResults = [.. functionResults.Select(functionResult =>
-                    {
-                        return new InvocationResultMember()
-                        {
-                            FunctionResult = new Amazon.BedrockAgentRuntime.Model.FunctionResult
-                            {
-                                ActionGroup = agent.KernelFunctionActionGroupSignature,
-                                Function = functionResult.FunctionName,
-                                ResponseBody = new Dictionary
-                                {
-                                    { "TEXT", new ContentBody() { Body = functionResult.Result as string } }
-                                }
-                            }
-                        };
-                    }
-                )],
-            };
-    }
-}
diff --git a/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs b/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs
deleted file mode 100644
index c890638484a2..000000000000
--- a/dotnet/src/Agents/Bedrock/Extensions/BedrockFunctionSchemaExtensions.cs
+++ /dev/null
@@ -1,102 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Collections.Generic;
-using Amazon.BedrockAgent.Model;
-using Amazon.BedrockAgentRuntime.Model;
-
-namespace Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-
-/// 
-/// Extensions associated with the status of a .
-/// 
-internal static class BedrockFunctionSchemaExtensions
-{
-    public static KernelArguments FromFunctionParameters(this List parameters, KernelArguments? arguments)
-    {
-        KernelArguments kernelArguments = arguments ?? [];
-        foreach (var parameter in parameters)
-        {
-            kernelArguments.Add(parameter.Name, parameter.Value);
-        }
-
-        return kernelArguments;
-    }
-
-    public static Amazon.BedrockAgent.Model.FunctionSchema ToFunctionSchema(this Kernel kernel)
-    {
-        var plugins = kernel.Plugins;
-        List functions = [];
-        foreach (var plugin in plugins)
-        {
-            foreach (KernelFunction function in plugin)
-            {
-                functions.Add(new Function
-                {
-                    Name = function.Name,
-                    Description = function.Description,
-                    Parameters = function.Metadata.Parameters.CreateParameterSpec(),
-                    // This field controls whether user confirmation is required to invoke the function.
-                    // If this is set to "ENABLED", the user will be prompted to confirm the function invocation.
-                    // Only after the user confirms, the function call request will be issued by the agent.
-                    // If the user denies the confirmation, the agent will act as if the function does not exist.
-                    // Currently, we do not support this feature, so we set it to "DISABLED".
-                    RequireConfirmation = Amazon.BedrockAgent.RequireConfirmation.DISABLED,
-                });
-            }
-        }
-
-        return new Amazon.BedrockAgent.Model.FunctionSchema
-        {
-            Functions = functions,
-        };
-    }
-
-    private static Dictionary CreateParameterSpec(
-        this IReadOnlyList parameters)
-    {
-        Dictionary parameterSpec = [];
-        foreach (var parameter in parameters)
-        {
-            parameterSpec.Add(parameter.Name, new Amazon.BedrockAgent.Model.ParameterDetail
-            {
-                Description = parameter.Description,
-                Required = parameter.IsRequired,
-                Type = parameter.ParameterType.ToAmazonType(),
-            });
-        }
-
-        return parameterSpec;
-    }
-
-    private static Amazon.BedrockAgent.Type ToAmazonType(this System.Type? parameterType)
-    {
-        var typeString = parameterType?.GetFriendlyTypeName();
-        return typeString switch
-        {
-            "String" => Amazon.BedrockAgent.Type.String,
-            "Boolean" => Amazon.BedrockAgent.Type.Boolean,
-            "Int16" => Amazon.BedrockAgent.Type.Integer,
-            "UInt16" => Amazon.BedrockAgent.Type.Integer,
-            "Int32" => Amazon.BedrockAgent.Type.Integer,
-            "UInt32" => Amazon.BedrockAgent.Type.Integer,
-            "Int64" => Amazon.BedrockAgent.Type.Integer,
-            "UInt64" => Amazon.BedrockAgent.Type.Integer,
-            "Single" => Amazon.BedrockAgent.Type.Number,
-            "Double" => Amazon.BedrockAgent.Type.Number,
-            "Decimal" => Amazon.BedrockAgent.Type.Number,
-            "String[]" => Amazon.BedrockAgent.Type.Array,
-            "Boolean[]" => Amazon.BedrockAgent.Type.Array,
-            "Int16[]" => Amazon.BedrockAgent.Type.Array,
-            "UInt16[]" => Amazon.BedrockAgent.Type.Array,
-            "Int32[]" => Amazon.BedrockAgent.Type.Array,
-            "UInt32[]" => Amazon.BedrockAgent.Type.Array,
-            "Int64[]" => Amazon.BedrockAgent.Type.Array,
-            "UInt64[]" => Amazon.BedrockAgent.Type.Array,
-            "Single[]" => Amazon.BedrockAgent.Type.Array,
-            "Double[]" => Amazon.BedrockAgent.Type.Array,
-            "Decimal[]" => Amazon.BedrockAgent.Type.Array,
-            _ => throw new ArgumentException($"Unsupported parameter type: {typeString}"),
-        };
-    }
-}
diff --git a/dotnet/src/Agents/Bedrock/README.md b/dotnet/src/Agents/Bedrock/README.md
deleted file mode 100644
index d480985fc667..000000000000
--- a/dotnet/src/Agents/Bedrock/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-# Amazon Bedrock AI Agents in Semantic Kernel
-
-## Overview
-
-AWS Bedrock Agents is a managed service that allows users to stand up and run AI agents in the AWS cloud quickly.
-
-## Tools/Functions
-
-Bedrock Agents allow the use of tools via [action groups](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-action-create.html).
-
-The integration of Bedrock Agents with Semantic Kernel allows users to register kernel functions as tools in Bedrock Agents.
-
-## Enable code interpretation
-
-Bedrock Agents can write and execute code via a feature known as [code interpretation](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-code-interpretation.html) similar to what OpenAI also offers.
-
-## Enable user input
-
-Bedrock Agents can [request user input](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-user-input.html) in case of missing information to invoke a tool. When this is enabled, the agent will prompt the user for the missing information. When this is disabled, the agent will guess the missing information.
-
-## Knowledge base
-
-Bedrock Agents can leverage data saved on AWS to perform RAG tasks, this is referred to as the [knowledge base](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-kb-add.html) in AWS.
-
-## Multi-agent
-
-Bedrock Agents support [multi-agent workflows](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-multi-agent-collaboration.html) for more complex tasks. However, it employs a different pattern than what we have in Semantic Kernel, thus this is not supported in the current integration.
diff --git a/dotnet/src/Agents/Core/AgentGroupChat.cs b/dotnet/src/Agents/Core/AgentGroupChat.cs
index 1cdb3c638d4b..5d80f969eb4e 100644
--- a/dotnet/src/Agents/Core/AgentGroupChat.cs
+++ b/dotnet/src/Agents/Core/AgentGroupChat.cs
@@ -1,7 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Threading;
@@ -9,40 +8,35 @@
 using Microsoft.Extensions.Logging;
 using Microsoft.Extensions.Logging.Abstractions;
 using Microsoft.SemanticKernel.Agents.Chat;
-using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Represents an  that supports multi-turn interactions.
+/// A an  that supports multi-turn interactions.
 /// 
-[Experimental("SKEXP0110")]
 public sealed class AgentGroupChat : AgentChat
 {
     private readonly HashSet _agentIds; // Efficient existence test O(1) vs O(n) for list.
     private readonly List _agents; // Maintain order the agents joined the chat
 
     /// 
-    /// Gets or sets a value that indicates if the completion criteria have been met.
+    /// Indicates if completion criteria has been met.  If set, no further
+    /// agent interactions will occur.  Clear to enable more agent interactions.
     /// 
-    /// 
-    ///  if the completion criteria have been met; otherwise .
-    /// The default is . Set to  to enable more agent interactions.
-    /// 
     public bool IsComplete { get; set; }
 
     /// 
-    /// Gets or sets the settings for defining chat behavior.
+    /// Settings for defining chat behavior.
     /// 
     public AgentGroupChatSettings ExecutionSettings { get; set; } = new AgentGroupChatSettings();
 
     /// 
-    /// Gets the agents participating in the chat.
+    /// The agents participating in the chat.
     /// 
     public override IReadOnlyList Agents => this._agents.AsReadOnly();
 
     /// 
-    /// Add an  to the chat.
+    /// Add a  to the chat.
     /// 
     /// The  to add.
     public void AddAgent(Agent agent)
@@ -54,16 +48,14 @@ public void AddAgent(Agent agent)
     }
 
     /// 
-    /// Processes a series of interactions between the  that have joined this .
+    /// Process a series of interactions between the  that have joined this .
+    /// The interactions will proceed according to the  and the 
+    /// defined via .
+    /// In the absence of an , this method will not invoke any agents.
+    /// Any agent may be explicitly selected by calling .
     /// 
-    /// 
-    /// The interactions will proceed according to the  and the
-    ///  defined via .
-    /// In the absence of an , this method does not invoke any agents.
-    /// Any agent can be explicitly selected by calling .
-    /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     public override async IAsyncEnumerable InvokeAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
@@ -92,16 +84,14 @@ public override async IAsyncEnumerable InvokeAsync([Enumerat
     }
 
     /// 
-    /// Processes a series of interactions between the  that have joined this .
+    /// Process a series of interactions between the  that have joined this .
+    /// The interactions will proceed according to the  and the 
+    /// defined via .
+    /// In the absence of an , this method will not invoke any agents.
+    /// Any agent may be explicitly selected by calling .
     /// 
-    /// 
-    /// The interactions will proceed according to the  and the
-    ///  defined via .
-    /// In the absence of an , this method does not invoke any agents.
-    /// Any agent can be explicitly selected by calling .
-    /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of streaming messages.
+    /// Asynchronous enumeration of streaming messages.
     public override async IAsyncEnumerable InvokeStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
@@ -130,21 +120,21 @@ public override async IAsyncEnumerable InvokeStream
     }
 
     /// 
-    /// Processes a single interaction between a given  and an .
+    /// Process a single interaction between a given  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
-    /// 
-    /// The specified agent joins the chat.
-    /// 
+    /// Asynchronous enumeration of messages.
+    /// 
+    /// Specified agent joins the chat.
+    /// >
     public async IAsyncEnumerable InvokeAsync(
         Agent agent,
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
 
-        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id);
 
         this.AddAgent(agent);
 
@@ -159,21 +149,21 @@ public async IAsyncEnumerable InvokeAsync(
     }
 
     /// 
-    /// Processes a single interaction between a given  and an .
+    /// Process a single interaction between a given  an a .
     /// 
     /// The agent actively interacting with the chat.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
-    /// 
-    /// The specified agent joins the chat.
-    /// 
+    /// Asynchronous enumeration of messages.
+    /// 
+    /// Specified agent joins the chat.
+    /// 
     public async IAsyncEnumerable InvokeStreamingAsync(
         Agent agent,
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         this.EnsureStrategyLoggerAssignment();
 
-        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+        this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id);
 
         this.AddAgent(agent);
 
@@ -188,17 +178,17 @@ public async IAsyncEnumerable InvokeStreamingAsync(
     }
 
     /// 
-    /// Creates a  for a given strategy without HTML-encoding the specified parameters.
+    /// Convenience method to create a  for a given strategy without HTML encoding the specified parameters.
     /// 
     /// The prompt template string that defines the prompt.
     /// 
-    /// An optional  to use when interpreting the .
-    /// The default factory is used when none is provided.
+    /// On optional  to use when interpreting the .
+    /// The default factory will be used when none is provided.
     /// 
     /// The parameter names to exclude from being HTML encoded.
     /// A  created via  using the specified template.
     /// 
-    /// This method is particularly targeted to easily avoid encoding the history used by 
+    /// This is particularly targeted to easily avoid encoding the history used by 
     /// or .
     /// 
     public static KernelFunction CreatePromptFunctionForStrategy(string template, IPromptTemplateFactory? templateFactory = null, params string[] safeParameterNames)
@@ -265,7 +255,7 @@ private async Task SelectAgentAsync(CancellationToken cancellationToken)
             throw;
         }
 
-        this.Logger.LogAgentGroupChatSelectedAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), this.ExecutionSettings.SelectionStrategy.GetType());
+        this.Logger.LogAgentGroupChatSelectedAgent(nameof(InvokeAsync), agent.GetType(), agent.Id, this.ExecutionSettings.SelectionStrategy.GetType());
 
         return agent;
     }
diff --git a/dotnet/src/Agents/Core/Agents.Core.csproj b/dotnet/src/Agents/Core/Agents.Core.csproj
index 4311785f61c9..da87688ac22f 100644
--- a/dotnet/src/Agents/Core/Agents.Core.csproj
+++ b/dotnet/src/Agents/Core/Agents.Core.csproj
@@ -5,9 +5,9 @@
     Microsoft.SemanticKernel.Agents.Core
     Microsoft.SemanticKernel.Agents
     net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110;SKEXP0001
+    $(NoWarn);SKEXP0110
     false
-    preview
+    alpha
   
 
   
@@ -22,12 +22,10 @@
     
     
     
-    
   
 
   
     
-    
   
 
   
diff --git a/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs b/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
index e5399ab46133..f7b2d87fb7e8 100644
--- a/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
+++ b/dotnet/src/Agents/Core/Chat/AgentGroupChatSettings.cs
@@ -1,36 +1,32 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Provides settings that affect the behavior of  instances.
+/// Settings that affect behavior of .
 /// 
 /// 
-/// The default behavior results in no agent selection.
+/// Default behavior result in no agent selection.
 /// 
-[Experimental("SKEXP0110")]
 public class AgentGroupChatSettings
 {
     /// 
-    /// Gets the strategy for terminating the agent.
+    /// Strategy for selecting the next agent.  Dfeault strategy limited to a single iteration and no termination criteria.
     /// 
-    /// 
-    /// The strategy for terminating the agent. The default strategy a single iteration and no termination criteria.
-    /// 
-    /// 
+    /// 
+    /// See .
+    /// 
     public TerminationStrategy TerminationStrategy { get; init; } = new DefaultTerminationStrategy();
 
     /// 
-    /// Gets the strategy for selecting the next agent.
+    /// Strategy for selecting the next agent.  Defaults to .
     /// 
-    /// 
-    /// The strategy for selecting the next agent. The default is .
-    /// 
-    /// 
+    /// 
+    /// See .
+    /// 
     public SelectionStrategy SelectionStrategy { get; init; } = new SequentialSelectionStrategy();
 
     /// 
diff --git a/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
index 3a0e8d7fac7b..ca83ce407cbb 100644
--- a/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/AggregatorTerminationStrategy.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
@@ -8,9 +7,8 @@
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Defines aggregation behavior for .
+/// Defines aggregation behavior for 
 /// 
-[Experimental("SKEXP0110")]
 public enum AggregateTerminationCondition
 {
     /// 
@@ -25,20 +23,16 @@ public enum AggregateTerminationCondition
 }
 
 /// 
-/// Provides methods to aggregate a set of  objects.
+/// Aggregate a set of  objects.
 /// 
-/// The set of strategies upon which to aggregate.
-[Experimental("SKEXP0110")]
+/// Set of strategies upon which to aggregate.
 public sealed class AggregatorTerminationStrategy(params TerminationStrategy[] strategies) : TerminationStrategy
 {
     private readonly TerminationStrategy[] _strategies = strategies;
 
     /// 
-    /// Gets the logical operation for aggregation.
+    /// Logical operation for aggregation: All or Any (and/or). Default: All.
     /// 
-    /// 
-    /// The logical operation for aggregation, which can be  or . The default is .
-    /// 
     public AggregateTerminationCondition Condition { get; init; } = AggregateTerminationCondition.All;
 
     /// 
diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
index 4fa3c001e2c8..fcfea6e1fa93 100644
--- a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs
@@ -1,22 +1,19 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.Agents.Internal;
-using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
 /// Determines agent selection based on the evaluation of a .
 /// 
-/// A  used for selection criteria.
+/// A  used for selection criteria
 /// A kernel instance with services for function execution.
-[Experimental("SKEXP0110")]
 public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel kernel) : SelectionStrategy
 {
     /// 
@@ -30,49 +27,49 @@ public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel ker
     public const string DefaultHistoryVariableName = "_history_";
 
     /// 
-    /// Gets the  key associated with the list of agent names when
+    /// The  key associated with the list of agent names when
     /// invoking .
     /// 
     public string AgentsVariableName { get; init; } = DefaultAgentsVariableName;
 
     /// 
-    /// Gets the  key associated with the chat history when
+    /// The  key associated with the chat history when
     /// invoking .
     /// 
     public string HistoryVariableName { get; init; } = DefaultHistoryVariableName;
 
     /// 
-    /// Gets the optional arguments used when invoking .
+    /// Optional arguments used when invoking .
     /// 
     public KernelArguments? Arguments { get; init; }
 
     /// 
-    /// Gets the  used when invoking .
+    /// The  used when invoking .
     /// 
     public Kernel Kernel => kernel;
 
     /// 
-    /// Gets the  invoked as selection criteria.
+    /// The  invoked as selection criteria.
     /// 
     public KernelFunction Function { get; } = function;
 
     /// 
-    /// Gets a value that indicates whether only the agent name is included in the history when invoking .
+    /// Only include agent name in history when invoking .
     /// 
     public bool EvaluateNameOnly { get; init; }
 
     /// 
-    /// Gets an optional  to reduce the history.
+    /// Optionally specify a  to reduce the history.
     /// 
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
     /// 
-    /// Gets a value that indicates whether  is used in the event of a failure to select an agent.
+    /// When set, will use  in the event of a failure to select an agent.
     /// 
     public bool UseInitialAgentAsFallback { get; init; }
 
     /// 
-    /// Gets a callback responsible for translating the 
+    /// A callback responsible for translating the 
     /// to the termination criteria.
     /// 
     public Func ResultParser { get; init; } = (result) => result.GetValue() ?? string.Empty;
diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
index 707aa46af466..26ad20e747dc 100644
--- a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs
@@ -1,22 +1,19 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.Agents.Internal;
-using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
 /// Signals termination based on the evaluation of a .
 /// 
-/// A  used for termination criteria.
+/// A  used for termination criteria
 /// A kernel instance with services for function execution.
-[Experimental("SKEXP0110")]
 public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel kernel) : TerminationStrategy
 {
     /// 
@@ -30,45 +27,45 @@ public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel k
     public const string DefaultHistoryVariableName = "_history_";
 
     /// 
-    /// Gets the  key associated with the agent name when
+    /// The  key associated with the agent name when
     /// invoking .
     /// 
     public string AgentVariableName { get; init; } = DefaultAgentVariableName;
 
     /// 
-    /// Gets the  key associated with the chat history when
+    /// The  key associated with the chat history when
     /// invoking .
     /// 
     public string HistoryVariableName { get; init; } = DefaultHistoryVariableName;
 
     /// 
-    /// Gets optional arguments used when invoking .
+    /// Optional arguments used when invoking .
     /// 
     public KernelArguments? Arguments { get; init; }
 
     /// 
-    /// Gets the  used when invoking .
+    /// The  used when invoking .
     /// 
     public Kernel Kernel => kernel;
 
     /// 
-    /// Gets the  invoked as termination criteria.
+    /// The  invoked as termination criteria.
     /// 
     public KernelFunction Function { get; } = function;
 
     /// 
-    /// Gets a value that indicates whether only the agent name is included in the history when invoking .
+    /// Only include agent name in history when invoking .
     /// 
     public bool EvaluateNameOnly { get; init; }
 
     /// 
-    /// Gets a callback responsible for translating the 
+    /// A callback responsible for translating the 
     /// to the termination criteria.
     /// 
     public Func ResultParser { get; init; } = (_) => true;
 
     /// 
-    /// Gets an optional  to reduce the history.
+    /// Optionally specify a  to reduce the history.
     /// 
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
diff --git a/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
index 0b84c09b8c79..2745a325ee88 100644
--- a/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/RegExTerminationStrategy.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Text.RegularExpressions;
 using System.Threading;
@@ -12,7 +11,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// Signals termination when the most recent message matches against the defined regular expressions
 /// for the specified agent (if provided).
 /// 
-[Experimental("SKEXP0110")]
 public sealed class RegexTerminationStrategy : TerminationStrategy
 {
     private readonly Regex[] _expressions;
diff --git a/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
index e9bca243ec9c..1ba5fb502649 100644
--- a/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/SelectionStrategy.cs
@@ -1,6 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
@@ -9,36 +8,35 @@
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Provides a base strategy class for selecting the next agent for an .
+/// Base strategy class for selecting the next agent for a .
 /// 
-[Experimental("SKEXP0110")]
 public abstract class SelectionStrategy
 {
     /// 
-    /// Gets a value that indicates if an agent has been selected (first time).
+    /// Flag indicating if an agent has been selected (first time).
     /// 
     protected bool HasSelected { get; private set; }
 
     /// 
-    /// Gets or sets an optional agent for initial selection.
+    /// An optional agent for initial selection.
     /// 
     /// 
-    /// Setting this property is useful to avoid latency in initial agent selection.
+    /// Useful to avoid latency in initial agent selection.
     /// 
     public Agent? InitialAgent { get; set; }
 
     /// 
-    /// Gets the  associated with the .
+    /// The  associated with the .
     /// 
     protected internal ILogger Logger { get; internal set; } = NullLogger.Instance;
 
     /// 
-    /// Determines which agent goes next.
+    /// Determine which agent goes next.
     /// 
     /// The agents participating in chat.
     /// The chat history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The agent that will take the next turn.
+    /// The agent who shall take the next turn.
     public async Task NextAsync(IReadOnlyList agents, IReadOnlyList history, CancellationToken cancellationToken = default)
     {
         if (agents.Count == 0 && this.InitialAgent == null)
@@ -57,11 +55,11 @@ public async Task NextAsync(IReadOnlyList agents, IReadOnlyList
-    /// Determines which agent goes next.
+    /// Determine which agent goes next.
     /// 
     /// The agents participating in chat.
     /// The chat history.
     /// The  to monitor for cancellation requests. The default is .
-    /// The agent that will take the next turn.
+    /// The agent who shall take the next turn.
     protected abstract Task SelectAgentAsync(IReadOnlyList agents, IReadOnlyList history, CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
index 9f71372f38d4..4983d0752414 100644
--- a/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/SequentialSelectionStrategy.cs
@@ -1,23 +1,20 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Represents a round-robin turn-taking strategy. Agent order is based on the order
+/// Round-robin turn-taking strategy.  Agent order is based on the order
 /// in which they joined .
 /// 
-[Experimental("SKEXP0110")]
 public sealed class SequentialSelectionStrategy : SelectionStrategy
 {
     private int _index = -1;
 
     /// 
-    /// Resets the selection to the initial (first) agent. Agent order is based on the order
+    /// Reset selection to initial/first agent. Agent order is based on the order
     /// in which they joined .
     /// 
     public void Reset() => this._index = -1;
@@ -45,7 +42,7 @@ protected override Task SelectAgentAsync(IReadOnlyList agents, IRe
 
         Agent agent = agents[this._index];
 
-        this.Logger.LogSequentialSelectionStrategySelectedAgent(nameof(NextAsync), this._index, agents.Count, agent.Id, agent.GetDisplayName());
+        this.Logger.LogSequentialSelectionStrategySelectedAgent(nameof(NextAsync), this._index, agents.Count, agent.Id);
 
         return Task.FromResult(agent);
 
diff --git a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
index 4a579a44fdf9..b50f6bd96d11 100644
--- a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
@@ -1,81 +1,73 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
 using Microsoft.Extensions.Logging.Abstractions;
-using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents.Chat;
 
 /// 
-/// Provides a base strategy class for defining termination criteria for an .
+/// Base strategy class for defining termination criteria for a .
 /// 
-[Experimental("SKEXP0110")]
 public abstract class TerminationStrategy
 {
     /// 
-    /// Specifies a reasonable limit on the number of turns.
+    /// Restrict number of turns to a reasonable number (99).
     /// 
     public const int DefaultMaximumIterations = 99;
 
     /// 
-    /// Gets or sets the maximum number of agent interactions for a given chat invocation.
+    /// The maximum number of agent interactions for a given chat invocation.
+    /// Defaults to: .
     /// 
-    /// 
-    /// The default is .
-    /// 
     public int MaximumIterations { get; set; } = DefaultMaximumIterations;
 
     /// 
-    /// Gets or sets a value that indicates whether 
-    /// is automatically cleared if the caller
+    /// Set to have automatically clear  if caller
     /// proceeds with invocation subsequent to achieving termination criteria.
     /// 
     public bool AutomaticReset { get; set; }
 
     /// 
-    /// Gets or sets the set of agents for which this strategy is applicable.
+    /// Set of agents for which this strategy is applicable.  If not set,
+    /// any agent is evaluated.
     /// 
-    /// 
-    /// The default value is that any agent is evaluated.
-    /// 
     public IReadOnlyList? Agents { get; set; }
 
     /// 
-    /// Gets the  associated with the .
+    /// The  associated with the .
     /// 
     protected internal ILogger Logger { get; internal set; } = NullLogger.Instance;
 
     /// 
-    /// Evaluates termination once  is evaluated.
+    /// Called to evaluate termination once  is evaluated.
     /// 
     protected abstract Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken);
 
     /// 
-    /// Evaluates the input message and determines if the chat has met its completion criteria.
+    /// Evaluate the input message and determine if the chat has met its completion criteria.
     /// 
     /// The agent actively interacting with the chat.
-    /// The most recent message.
+    /// The most recent message
     /// The  to monitor for cancellation requests. The default is .
-    ///  if the chat loop should be terminated.
+    /// True to terminate chat loop.
     public async Task ShouldTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken = default)
     {
-        this.Logger.LogTerminationStrategyEvaluatingCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+        this.Logger.LogTerminationStrategyEvaluatingCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id);
 
         // `Agents` must contain `agent`, if `Agents` not empty.
         if ((this.Agents?.Count ?? 0) > 0 && !this.Agents!.Any(a => a.Id == agent.Id))
         {
-            this.Logger.LogTerminationStrategyAgentOutOfScope(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName());
+            this.Logger.LogTerminationStrategyAgentOutOfScope(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id);
 
             return false;
         }
 
         bool shouldTerminate = await this.ShouldAgentTerminateAsync(agent, history, cancellationToken).ConfigureAwait(false);
 
-        this.Logger.LogTerminationStrategyEvaluatedCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, agent.GetDisplayName(), shouldTerminate);
+        this.Logger.LogTerminationStrategyEvaluatedCriteria(nameof(ShouldTerminateAsync), agent.GetType(), agent.Id, shouldTerminate);
 
         return shouldTerminate;
     }
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index ed3f1ce3d2c6..770153bbfb1e 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -1,26 +1,21 @@
 // Copyright (c) Microsoft. All rights reserved.
-
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Runtime.CompilerServices;
 using System.Text;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Diagnostics;
 using Microsoft.SemanticKernel.Services;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Represents a  specialization based on .
+/// A  specialization based on .
 /// 
 /// 
-/// NOTE: Enable  for agent plugins
-/// ().
+/// NOTE: Enable  for agent plugins.
+/// ()
 /// 
 public sealed class ChatCompletionAgent : ChatHistoryKernelAgent
 {
@@ -33,105 +28,23 @@ public ChatCompletionAgent() { }
     /// Initializes a new instance of the  class from
     /// a .
     /// 
-    /// The prompt template configuration.
-    /// The prompt template factory used to produce the  for the agent.
+    /// Prompt template configuration
+    /// An optional factory to produce the  for the agent
+    /// 
+    /// When 'templateFactory' parameter is not provided, the default  is used.
+    /// 
     public ChatCompletionAgent(
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory templateFactory)
+        IPromptTemplateFactory? templateFactory = null)
     {
         this.Name = templateConfig.Name;
         this.Description = templateConfig.Description;
         this.Instructions = templateConfig.Template;
-        this.Arguments = new(templateConfig.ExecutionSettings.Values);
-        this.Template = templateFactory.Create(templateConfig);
-    }
-
-    /// 
-    /// Gets the role used for agent instructions.  Defaults to "system".
-    /// 
-    /// 
-    /// Certain versions of "O*" series (deep reasoning) models require the instructions
-    /// to be provided as "developer" role.  Other versions support neither role and
-    /// an agent targeting such a model cannot provide instructions.  Agent functionality
-    /// will be dictated entirely by the provided plugins.
-    /// 
-    public AuthorRole InstructionsRole { get; init; } = AuthorRole.System;
-
-    /// 
-    public override IAsyncEnumerable InvokeAsync(
-        ChatHistory history,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        CancellationToken cancellationToken = default)
-    {
-        string agentName = this.GetDisplayName();
-
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, agentName, this.Description),
-            () => this.InternalInvokeAsync(agentName, history, arguments, kernel, cancellationToken),
-            cancellationToken);
+        this.Template = templateFactory?.Create(templateConfig);
     }
 
     /// 
-    public override IAsyncEnumerable InvokeStreamingAsync(
-        ChatHistory history,
-        KernelArguments? arguments = null,
-        Kernel? kernel = null,
-        CancellationToken cancellationToken = default)
-    {
-        string agentName = this.GetDisplayName();
-
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, agentName, this.Description),
-            () => this.InternalInvokeStreamingAsync(agentName, history, arguments, kernel, cancellationToken),
-            cancellationToken);
-    }
-
-    /// 
-    [Experimental("SKEXP0110")]
-    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
-    {
-        ChatHistory history =
-            JsonSerializer.Deserialize(channelState) ??
-            throw new KernelException("Unable to restore channel: invalid state.");
-        return Task.FromResult(new ChatHistoryChannel(history));
-    }
-
-    internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
-    {
-        (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) =
-            kernel.ServiceSelector.SelectAIService(
-                kernel,
-                arguments?.ExecutionSettings,
-                arguments ?? []);
-
-        return (chatCompletionService, executionSettings);
-    }
-
-    #region private
-
-    private async Task SetupAgentChatHistoryAsync(
-        IReadOnlyList history,
-        KernelArguments? arguments,
-        Kernel kernel,
-        CancellationToken cancellationToken)
-    {
-        ChatHistory chat = [];
-
-        string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
-
-        if (!string.IsNullOrWhiteSpace(instructions))
-        {
-            chat.Add(new ChatMessageContent(this.InstructionsRole, instructions) { AuthorName = this.Name });
-        }
-
-        chat.AddRange(history);
-
-        return chat;
-    }
-
-    private async IAsyncEnumerable InternalInvokeAsync(
-        string agentName,
+    public override async IAsyncEnumerable InvokeAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
@@ -146,9 +59,7 @@ private async IAsyncEnumerable InternalInvokeAsync(
 
         int messageCount = chat.Count;
 
-        Type serviceType = chatCompletionService.GetType();
-
-        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
+        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
 
         IReadOnlyList messages =
             await chatCompletionService.GetChatMessageContentsAsync(
@@ -157,7 +68,7 @@ await chatCompletionService.GetChatMessageContentsAsync(
                 kernel,
                 cancellationToken).ConfigureAwait(false);
 
-        this.Logger.LogAgentChatServiceInvokedAgent(nameof(InvokeAsync), this.Id, agentName, serviceType, messages.Count);
+        this.Logger.LogAgentChatServiceInvokedAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType(), messages.Count);
 
         // Capture mutated messages related function calling / tools
         for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++)
@@ -177,8 +88,8 @@ await chatCompletionService.GetChatMessageContentsAsync(
         }
     }
 
-    private async IAsyncEnumerable InternalInvokeStreamingAsync(
-        string agentName,
+    /// 
+    public override async IAsyncEnumerable InvokeStreamingAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
@@ -193,9 +104,7 @@ private async IAsyncEnumerable InternalInvokeStream
 
         int messageCount = chat.Count;
 
-        Type serviceType = chatCompletionService.GetType();
-
-        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
+        this.Logger.LogAgentChatServiceInvokingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
 
         IAsyncEnumerable messages =
             chatCompletionService.GetStreamingChatMessageContentsAsync(
@@ -204,7 +113,7 @@ private async IAsyncEnumerable InternalInvokeStream
                 kernel,
                 cancellationToken);
 
-        this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
+        this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType());
 
         AuthorRole? role = null;
         StringBuilder builder = new();
@@ -236,5 +145,45 @@ private async IAsyncEnumerable InternalInvokeStream
         }
     }
 
-    #endregion
+    /// 
+    protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
+    {
+        ChatHistory history =
+            JsonSerializer.Deserialize(channelState) ??
+            throw new KernelException("Unable to restore channel: invalid state.");
+        return Task.FromResult(new ChatHistoryChannel(history));
+    }
+
+    internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
+    {
+        // Need to provide a KernelFunction to the service selector as a container for the execution-settings.
+        KernelFunction nullPrompt = KernelFunctionFactory.CreateFromPrompt("placeholder", arguments?.ExecutionSettings?.Values);
+        (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) =
+            kernel.ServiceSelector.SelectAIService(
+                kernel,
+                nullPrompt,
+                arguments ?? []);
+
+        return (chatCompletionService, executionSettings);
+    }
+
+    private async Task SetupAgentChatHistoryAsync(
+        IReadOnlyList history,
+        KernelArguments? arguments,
+        Kernel kernel,
+        CancellationToken cancellationToken)
+    {
+        ChatHistory chat = [];
+
+        string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+
+        if (!string.IsNullOrWhiteSpace(instructions))
+        {
+            chat.Add(new ChatMessageContent(AuthorRole.System, instructions) { AuthorName = this.Name });
+        }
+
+        chat.AddRange(history);
+
+        return chat;
+    }
 }
diff --git a/dotnet/src/Agents/Core/ChatHistoryChannel.cs b/dotnet/src/Agents/Core/ChatHistoryChannel.cs
index 4b44a5cd9fec..78345f084b3f 100644
--- a/dotnet/src/Agents/Core/ChatHistoryChannel.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryChannel.cs
@@ -1,7 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
@@ -14,20 +12,10 @@
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Represents an  specialization that acts upon a .
+/// A  specialization for that acts upon a .
 /// 
-[Experimental("SKEXP0110")]
 internal sealed class ChatHistoryChannel : AgentChannel
 {
-    // Supported content types for  when
-    //  is empty.
-    private static readonly HashSet s_contentMap =
-        [
-            typeof(FunctionCallContent),
-            typeof(FunctionResultContent),
-            typeof(ImageContent),
-        ];
-
     private readonly ChatHistory _history;
 
     /// 
@@ -117,11 +105,7 @@ protected override async IAsyncEnumerable InvokeStr
     /// 
     protected override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)
     {
-        // Only add messages with valid content or supported content-items.
-        this._history.AddRange(
-            history.Where(
-                m => !string.IsNullOrEmpty(m.Content) ||
-                m.Items.Where(i => s_contentMap.Contains(i.GetType())).Any()));
+        this._history.AddRange(history);
 
         return Task.CompletedTask;
     }
diff --git a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
index f3572a75d3c2..0eee62920027 100644
--- a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
+++ b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs
@@ -1,42 +1,40 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Globalization;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 
 namespace Microsoft.SemanticKernel.Agents;
 
 /// 
-/// Represents a  specialization bound to a .
+/// A  specialization bound to a .
 /// 
 /// 
-/// NOTE: Enable  for agent plugins
-/// ().
+/// NOTE: Enable  for agent plugins.
+/// ()
 /// 
 public abstract class ChatHistoryKernelAgent : KernelAgent
 {
     /// 
-    /// Gets an optional  to reduce the history.
+    /// Optionally specify a  to reduce the history.
     /// 
     /// 
-    /// The reducer is automatically applied to the history before invoking the agent, only when using
-    /// an . It must be explicitly applied via .
+    /// This is automatically applied to the history before invoking the agent, only when using
+    /// an .  It must be explicitly applied via .
     /// 
-    [Experimental("SKEXP0110")]
     public IChatHistoryReducer? HistoryReducer { get; init; }
 
     /// 
-    /// Invokes the assistant to respond to the provided history.
+    /// Invoke the assistant to respond to the provided history.
     /// 
     /// The conversation history.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
+    /// Asynchronous enumeration of response messages.
     public abstract IAsyncEnumerable InvokeAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
@@ -44,13 +42,13 @@ public abstract IAsyncEnumerable InvokeAsync(
         CancellationToken cancellationToken = default);
 
     /// 
-    /// Invokes the assistant to respond to the provided history with streaming response.
+    /// Invoke the assistant to respond to the provided history with streaming response.
     /// 
     /// The conversation history.
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
+    /// Asynchronous enumeration of response messages.
     public abstract IAsyncEnumerable InvokeStreamingAsync(
         ChatHistory history,
         KernelArguments? arguments = null,
@@ -58,17 +56,15 @@ public abstract IAsyncEnumerable InvokeStreamingAsy
         CancellationToken cancellationToken = default);
 
     /// 
-    /// Reduces the provided history.
+    /// Reduce the provided history
     /// 
-    /// The source history.
+    /// The source history
     /// The  to monitor for cancellation requests. The default is .
-    ///  if reduction occurred.
-    [Experimental("SKEXP0110")]
+    /// True if reduction has occurred.
     public Task ReduceAsync(ChatHistory history, CancellationToken cancellationToken = default) =>
-        history.ReduceInPlaceAsync(this.HistoryReducer, cancellationToken);
+        history.ReduceAsync(this.HistoryReducer, cancellationToken);
 
     /// 
-    [Experimental("SKEXP0110")]
     protected sealed override IEnumerable GetChannelKeys()
     {
         yield return typeof(ChatHistoryChannel).FullName!;
@@ -86,13 +82,12 @@ protected sealed override IEnumerable GetChannelKeys()
     }
 
     /// 
-    [Experimental("SKEXP0110")]
     protected sealed override Task CreateChannelAsync(CancellationToken cancellationToken)
     {
         ChatHistoryChannel channel =
             new()
             {
-                Logger = this.ActiveLoggerFactory.CreateLogger()
+                Logger = this.LoggerFactory.CreateLogger()
             };
 
         return Task.FromResult(channel);
diff --git a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs b/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs
similarity index 51%
rename from dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs
rename to dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs
index 0b2de8042f8a..a9fb8ed895ff 100644
--- a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryReducerExtensions.cs
+++ b/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs
@@ -1,10 +1,12 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
 using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
 
-namespace Microsoft.SemanticKernel.ChatCompletion;
+namespace Microsoft.SemanticKernel.Agents.History;
 
 /// 
 /// Discrete operations used when reducing chat history.
@@ -17,41 +19,30 @@ internal static class ChatHistoryReducerExtensions
     /// 
     /// Extract a range of messages from the source history.
     /// 
-    /// The source history
+    /// The source history
     /// The index of the first message to extract
     /// The index of the last message to extract
-    /// An optional system message content to include
     /// The optional filter to apply to each message
-    public static IEnumerable Extract(
-        this IReadOnlyList chatHistory,
-        int startIndex,
-        int? finalIndex = null,
-        ChatMessageContent? systemMessage = null,
-        Func? filter = null)
+    public static IEnumerable Extract(this IReadOnlyList history, int startIndex, int? finalIndex = null, Func? filter = null)
     {
-        int maxIndex = chatHistory.Count - 1;
+        int maxIndex = history.Count - 1;
         if (startIndex > maxIndex)
         {
             yield break;
         }
 
-        if (systemMessage is not null)
-        {
-            yield return systemMessage;
-        }
-
         finalIndex ??= maxIndex;
 
         finalIndex = Math.Min(finalIndex.Value, maxIndex);
 
         for (int index = startIndex; index <= finalIndex; ++index)
         {
-            if (filter?.Invoke(chatHistory[index]) ?? false)
+            if (filter?.Invoke(history[index]) ?? false)
             {
                 continue;
             }
 
-            yield return chatHistory[index];
+            yield return history[index];
         }
     }
 
@@ -59,13 +50,13 @@ public static IEnumerable Extract(
     /// Identify the index of the first message that is not a summary message, as indicated by
     /// the presence of the specified metadata key.
     /// 
-    /// The source history
+    /// The source history
     /// The metadata key that identifies a summary message.
-    public static int LocateSummarizationBoundary(this IReadOnlyList chatHistory, string summaryKey)
+    public static int LocateSummarizationBoundary(this IReadOnlyList history, string summaryKey)
     {
-        for (int index = 0; index < chatHistory.Count; ++index)
+        for (int index = 0; index < history.Count; ++index)
         {
-            ChatMessageContent message = chatHistory[index];
+            ChatMessageContent message = history[index];
 
             if (!message.Metadata?.ContainsKey(summaryKey) ?? true)
             {
@@ -73,7 +64,7 @@ public static int LocateSummarizationBoundary(this IReadOnlyList
@@ -84,7 +75,7 @@ public static int LocateSummarizationBoundary(this IReadOnlyList
-    /// The source history
+    /// The source history
     /// The desired message count, should reduction occur.
     /// 
     /// The threshold, beyond targetCount, required to trigger reduction.
@@ -95,19 +86,11 @@ public static int LocateSummarizationBoundary(this IReadOnlyList
-    /// Indicates whether chat history contains system message.
     /// An index that identifies the starting point for a reduced history that does not orphan sensitive content.
-    public static int LocateSafeReductionIndex(
-        this IReadOnlyList chatHistory,
-        int targetCount,
-        int? thresholdCount = null,
-        int offsetCount = 0,
-        bool hasSystemMessage = false)
+    public static int LocateSafeReductionIndex(this IReadOnlyList history, int targetCount, int? thresholdCount = null, int offsetCount = 0)
     {
-        targetCount -= hasSystemMessage ? 1 : 0;
-
         // Compute the index of the truncation threshold
-        int thresholdIndex = chatHistory.Count - (thresholdCount ?? 0) - targetCount;
+        int thresholdIndex = history.Count - (thresholdCount ?? 0) - targetCount;
 
         if (thresholdIndex <= offsetCount)
         {
@@ -116,12 +99,12 @@ public static int LocateSafeReductionIndex(
         }
 
         // Compute the index of truncation target
-        int messageIndex = chatHistory.Count - targetCount;
+        int messageIndex = history.Count - targetCount;
 
         // Skip function related content
         while (messageIndex >= 0)
         {
-            if (!chatHistory[messageIndex].Items.Any(i => i is FunctionCallContent || i is FunctionResultContent))
+            if (!history[messageIndex].Items.Any(i => i is FunctionCallContent || i is FunctionResultContent))
             {
                 break;
             }
@@ -136,7 +119,7 @@ public static int LocateSafeReductionIndex(
         while (messageIndex >= thresholdIndex)
         {
             // A user message provides a superb truncation point
-            if (chatHistory[messageIndex].Role == AuthorRole.User)
+            if (history[messageIndex].Role == AuthorRole.User)
             {
                 return messageIndex;
             }
@@ -147,4 +130,54 @@ public static int LocateSafeReductionIndex(
         // No user message found, fallback to the earliest non-function related message
         return targetIndex;
     }
+
+    /// 
+    /// Process history reduction and mutate the provided history.
+    /// 
+    /// The source history
+    /// The target reducer
+    /// The  to monitor for cancellation requests. The default is .
+    /// True if reduction has occurred.
+    /// 
+    /// Using the existing  for a reduction in collection size eliminates the need
+    /// for re-allocation (of memory).
+    /// 
+    public static async Task ReduceAsync(this ChatHistory history, IChatHistoryReducer? reducer, CancellationToken cancellationToken)
+    {
+        if (reducer == null)
+        {
+            return false;
+        }
+
+        IEnumerable? reducedHistory = await reducer.ReduceAsync(history, cancellationToken).ConfigureAwait(false);
+
+        if (reducedHistory == null)
+        {
+            return false;
+        }
+
+        // Mutate the history in place
+        ChatMessageContent[] reduced = reducedHistory.ToArray();
+        history.Clear();
+        history.AddRange(reduced);
+
+        return true;
+    }
+
+    /// 
+    /// Reduce the history using the provided reducer without mutating the source history.
+    /// 
+    /// The source history
+    /// The target reducer
+    /// The  to monitor for cancellation requests. The default is .
+    public static async Task> ReduceAsync(this IReadOnlyList history, IChatHistoryReducer? reducer, CancellationToken cancellationToken)
+    {
+        if (reducer != null)
+        {
+            IEnumerable? reducedHistory = await reducer.ReduceAsync(history, cancellationToken).ConfigureAwait(false);
+            history = reducedHistory?.ToArray() ?? history;
+        }
+
+        return history;
+    }
 }
diff --git a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
similarity index 79%
rename from dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs
rename to dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
index 23d7e4286015..a65cefbea98b 100644
--- a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistorySummarizationReducer.cs
+++ b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
@@ -1,13 +1,12 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
 
-namespace Microsoft.SemanticKernel.ChatCompletion;
+namespace Microsoft.SemanticKernel.Agents.History;
 
 /// 
 /// Reduce the chat history by summarizing message past the target message count.
@@ -18,7 +17,6 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
 /// is provided (recommended), reduction will scan within the threshold window in an attempt to
 /// avoid orphaning a user message from an assistant response.
 /// 
-[Experimental("SKEXP0001")]
 public class ChatHistorySummarizationReducer : IChatHistoryReducer
 {
     /// 
@@ -65,41 +63,14 @@ Provide a concise and complete summarization of the entire dialog that does not
     /// 
     public bool UseSingleSummary { get; init; } = true;
 
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    /// A  instance to be used for summarization.
-    /// The desired number of target messages after reduction.
-    /// An optional number of messages beyond the 'targetCount' that must be present in order to trigger reduction/
-    /// 
-    /// While the 'thresholdCount' is optional, it is recommended to provided so that reduction is not triggered
-    /// for every incremental addition to the chat history beyond the 'targetCount'.
-    /// >
-    public ChatHistorySummarizationReducer(IChatCompletionService service, int targetCount, int? thresholdCount = null)
-    {
-        Verify.NotNull(service, nameof(service));
-        Verify.True(targetCount > 0, "Target message count must be greater than zero.");
-        Verify.True(!thresholdCount.HasValue || thresholdCount > 0, "The reduction threshold length must be greater than zero.");
-
-        this._service = service;
-        this._targetCount = targetCount;
-        this._thresholdCount = thresholdCount ?? 0;
-    }
-
     /// 
-    public async Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default)
+    public async Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default)
     {
-        var systemMessage = chatHistory.FirstOrDefault(l => l.Role == AuthorRole.System);
-
         // Identify where summary messages end and regular history begins
-        int insertionPoint = chatHistory.LocateSummarizationBoundary(SummaryMetadataKey);
+        int insertionPoint = history.LocateSummarizationBoundary(SummaryMetadataKey);
 
         // First pass to determine the truncation index
-        int truncationIndex = chatHistory.LocateSafeReductionIndex(
-            this._targetCount,
-            this._thresholdCount,
-            insertionPoint,
-            hasSystemMessage: systemMessage is not null);
+        int truncationIndex = history.LocateSafeReductionIndex(this._targetCount, this._thresholdCount, insertionPoint);
 
         IEnumerable? truncatedHistory = null;
 
@@ -107,20 +78,20 @@ public ChatHistorySummarizationReducer(IChatCompletionService service, int targe
         {
             // Second pass to extract history for summarization
             IEnumerable summarizedHistory =
-                chatHistory.Extract(
+                history.Extract(
                     this.UseSingleSummary ? 0 : insertionPoint,
                     truncationIndex,
-                    filter: (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent));
+                    (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent));
 
             try
             {
                 // Summarize
                 ChatHistory summarizationRequest = [.. summarizedHistory, new ChatMessageContent(AuthorRole.System, this.SummarizationInstructions)];
-                ChatMessageContent summaryMessage = await this._service.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false);
-                summaryMessage.Metadata = new Dictionary { { SummaryMetadataKey, true } };
+                ChatMessageContent summary = await this._service.GetChatMessageContentAsync(summarizationRequest, cancellationToken: cancellationToken).ConfigureAwait(false);
+                summary.Metadata = new Dictionary { { SummaryMetadataKey, true } };
 
                 // Assembly the summarized history
-                truncatedHistory = AssemblySummarizedHistory(summaryMessage, systemMessage);
+                truncatedHistory = AssemblySummarizedHistory(summary);
             }
             catch
             {
@@ -134,33 +105,49 @@ public ChatHistorySummarizationReducer(IChatCompletionService service, int targe
         return truncatedHistory;
 
         // Inner function to assemble the summarized history
-        IEnumerable AssemblySummarizedHistory(ChatMessageContent? summaryMessage, ChatMessageContent? systemMessage)
+        IEnumerable AssemblySummarizedHistory(ChatMessageContent? summary)
         {
-            if (systemMessage is not null)
-            {
-                yield return systemMessage;
-            }
-
             if (insertionPoint > 0 && !this.UseSingleSummary)
             {
                 for (int index = 0; index <= insertionPoint - 1; ++index)
                 {
-                    yield return chatHistory[index];
+                    yield return history[index];
                 }
             }
 
-            if (summaryMessage is not null)
+            if (summary != null)
             {
-                yield return summaryMessage;
+                yield return summary;
             }
 
-            for (int index = truncationIndex; index < chatHistory.Count; ++index)
+            for (int index = truncationIndex; index < history.Count; ++index)
             {
-                yield return chatHistory[index];
+                yield return history[index];
             }
         }
     }
 
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    /// A  instance to be used for summarization.
+    /// The desired number of target messages after reduction.
+    /// An optional number of messages beyond the 'targetCount' that must be present in order to trigger reduction/
+    /// 
+    /// While the 'thresholdCount' is optional, it is recommended to provided so that reduction is not triggered
+    /// for every incremental addition to the chat history beyond the 'targetCount'.
+    /// >
+    public ChatHistorySummarizationReducer(IChatCompletionService service, int targetCount, int? thresholdCount = null)
+    {
+        Verify.NotNull(service, nameof(service));
+        Verify.True(targetCount > 0, "Target message count must be greater than zero.");
+        Verify.True(!thresholdCount.HasValue || thresholdCount > 0, "The reduction threshold length must be greater than zero.");
+
+        this._service = service;
+        this._targetCount = targetCount;
+        this._thresholdCount = thresholdCount ?? 0;
+    }
+
     /// 
     public override bool Equals(object? obj)
     {
diff --git a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs b/dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs
similarity index 82%
rename from dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs
rename to dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs
index de9ea8037b32..be9ca7868f87 100644
--- a/dotnet/src/SemanticKernel.Core/AI/ChatCompletion/ChatHistoryTruncationReducer.cs
+++ b/dotnet/src/Agents/Core/History/ChatHistoryTruncationReducer.cs
@@ -1,13 +1,10 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
-using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
 
-namespace Microsoft.SemanticKernel.ChatCompletion;
+namespace Microsoft.SemanticKernel.Agents.History;
 
 /// 
 /// Truncate the chat history to the target message count.
@@ -18,9 +15,25 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
 /// is provided (recommended), reduction will scan within the threshold window in an attempt to
 /// avoid orphaning a user message from an assistant response.
 /// 
-[Experimental("SKEXP0001")]
 public class ChatHistoryTruncationReducer : IChatHistoryReducer
 {
+    /// 
+    public Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default)
+    {
+        // First pass to determine the truncation index
+        int truncationIndex = history.LocateSafeReductionIndex(this._targetCount, this._thresholdCount);
+
+        IEnumerable? truncatedHistory = null;
+
+        if (truncationIndex > 0)
+        {
+            // Second pass to truncate the history
+            truncatedHistory = history.Extract(truncationIndex);
+        }
+
+        return Task.FromResult(truncatedHistory);
+    }
+
     /// 
     /// Initializes a new instance of the  class.
     /// 
@@ -40,25 +53,6 @@ public ChatHistoryTruncationReducer(int targetCount, int? thresholdCount = null)
         this._thresholdCount = thresholdCount ?? 0;
     }
 
-    /// 
-    public Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default)
-    {
-        var systemMessage = chatHistory.FirstOrDefault(l => l.Role == AuthorRole.System);
-
-        // First pass to determine the truncation index
-        int truncationIndex = chatHistory.LocateSafeReductionIndex(this._targetCount, this._thresholdCount, hasSystemMessage: systemMessage is not null);
-
-        IEnumerable? truncatedHistory = null;
-
-        if (truncationIndex > 0)
-        {
-            // Second pass to truncate the history
-            truncatedHistory = chatHistory.Extract(truncationIndex, systemMessage: systemMessage);
-        }
-
-        return Task.FromResult(truncatedHistory);
-    }
-
     /// 
     public override bool Equals(object? obj)
     {
diff --git a/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs b/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs
new file mode 100644
index 000000000000..884fbcf42bc1
--- /dev/null
+++ b/dotnet/src/Agents/Core/History/IChatHistoryReducer.cs
@@ -0,0 +1,32 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SemanticKernel.Agents.History;
+
+/// 
+/// Defines a contract for a reducing chat history.
+/// 
+public interface IChatHistoryReducer
+{
+    /// 
+    /// Each reducer shall override equality evaluation so that different reducers
+    /// of the same configuration can be evaluated for equivalency.
+    /// 
+    bool Equals(object? obj);
+
+    /// 
+    /// Each reducer shall implement custom hash-code generation so that different reducers
+    /// of the same configuration can be evaluated for equivalency.
+    /// 
+    int GetHashCode();
+
+    /// 
+    /// Optionally reduces the chat history.
+    /// 
+    /// The source history (which may have been previously reduced)
+    /// The  to monitor for cancellation requests. The default is .
+    /// The reduced history, or 'null' if no reduction has occurred
+    Task?> ReduceAsync(IReadOnlyList history, CancellationToken cancellationToken = default);
+}
diff --git a/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
index ecedad3c04af..8d970988466b 100644
--- a/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
+++ b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs
@@ -24,9 +24,7 @@ internal sealed class ChatMessageForPrompt(ChatMessageContent message)
     /// The referenced  property.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-#pragma warning disable SKEXP0001
     public string? Name => message.AuthorName;
-#pragma warning restore SKEXP0001
 
     /// 
     /// The referenced  property.
diff --git a/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs b/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
index 59835f576c4f..03b9d27f1c8d 100644
--- a/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/AgentGroupChatLogMessages.cs
@@ -4,7 +4,6 @@
 using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.Extensions;
 
 namespace Microsoft.SemanticKernel.Agents;
 
@@ -18,7 +17,6 @@ namespace Microsoft.SemanticKernel.Agents;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class AgentGroupChatLogMessages
 {
     /// 
@@ -27,13 +25,12 @@ internal static partial class AgentGroupChatLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Invoking chat: {AgentType}: {AgentId}/{AgentName}")]
+        Message = "[{MethodName}] Invoking chat: {AgentType}: {AgentId}")]
     public static partial void LogAgentGroupChatInvokingAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  invoking agents (started).
@@ -43,7 +40,6 @@ public static partial void LogAgentGroupChatInvokingAgent(
             logLevel: LogLevel.Debug,
             eventId: 0,
             "[{MethodName}] Invoking chat: {Agents}");
-
     public static void LogAgentGroupChatInvokingAgents(
         this ILogger logger,
         string methodName,
@@ -51,9 +47,7 @@ public static void LogAgentGroupChatInvokingAgents(
     {
         if (logger.IsEnabled(LogLevel.Debug))
         {
-            var agentsMessage = string.Join(", ", agents.Select(a => $"{a.GetType()}:{a.Id}/{a.GetDisplayName()}"));
-
-            s_logAgentGroupChatInvokingAgents(logger, methodName, agentsMessage, null);
+            s_logAgentGroupChatInvokingAgents(logger, methodName, string.Join(", ", agents.Select(a => $"{a.GetType()}:{a.Id}")), null);
         }
     }
 
@@ -87,13 +81,12 @@ public static partial void LogAgentGroupChatNoAgentSelected(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent selected {AgentType}: {AgentId}/{AgentName} by {StrategyType}")]
+        Message = "[{MethodName}] Agent selected {AgentType}: {AgentId} by {StrategyType}")]
     public static partial void LogAgentGroupChatSelectedAgent(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
-        string agentName,
         Type strategyType);
 
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
index de2e18d63d8c..777ec8806ec7 100644
--- a/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/AggregatorTerminationStrategyLogMessages.cs
@@ -14,7 +14,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class AggregatorTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs b/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
index e34a6d102b8f..038c19359cc8 100644
--- a/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/ChatCompletionAgentLogMessages.cs
@@ -23,12 +23,11 @@ internal static partial class ChatCompletionAgentLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoking service {ServiceType}.")]
+        Message = "[{MethodName}] Agent #{AgentId} Invoking service {ServiceType}.")]
     public static partial void LogAgentChatServiceInvokingAgent(
         this ILogger logger,
         string methodName,
         string agentId,
-        string agentName,
         Type serviceType);
 
     /// 
@@ -37,12 +36,11 @@ public static partial void LogAgentChatServiceInvokingAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoked service {ServiceType} with message count: {MessageCount}.")]
+        Message = "[{MethodName}] Agent #{AgentId} Invoked service {ServiceType} with message count: {MessageCount}.")]
     public static partial void LogAgentChatServiceInvokedAgent(
         this ILogger logger,
         string methodName,
         string agentId,
-        string agentName,
         Type serviceType,
         int messageCount);
 
@@ -52,11 +50,10 @@ public static partial void LogAgentChatServiceInvokedAgent(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Agent {AgentId}/{AgentName} Invoked service {ServiceType}.")]
+        Message = "[{MethodName}] Agent #{AgentId} Invoked service {ServiceType}.")]
     public static partial void LogAgentChatServiceInvokedStreamingAgent(
         this ILogger logger,
         string methodName,
         string agentId,
-        string agentName,
         Type serviceType);
 }
diff --git a/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
index 0da707a0c096..c846f5e2534e 100644
--- a/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/KernelFunctionSelectionStrategyLogMessages.cs
@@ -15,7 +15,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class KernelFunctionStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
index bd110c54fc8c..61a4dea167b5 100644
--- a/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/KernelFunctionTerminationStrategyLogMessages.cs
@@ -15,7 +15,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class KernelFunctionTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
index 0f85053bb570..a748158252b7 100644
--- a/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/RegExTerminationStrategyLogMessages.cs
@@ -15,7 +15,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class RegExTerminationStrategyLogMessages
 {
     /// 
diff --git a/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
index 6b32b574dd69..e201dddcd9c0 100644
--- a/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/SequentialSelectionStrategyLogMessages.cs
@@ -14,7 +14,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class SequentialSelectionStrategyLogMessages
 {
     /// 
@@ -23,12 +22,11 @@ internal static partial class SequentialSelectionStrategyLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Selected agent ({AgentIndex} / {AgentCount}): {AgentId}/{AgentName}")]
+        Message = "[{MethodName}] Selected agent ({AgentIndex} / {AgentCount}): {AgentId}")]
     public static partial void LogSequentialSelectionStrategySelectedAgent(
         this ILogger logger,
         string methodName,
         int agentIndex,
         int agentCount,
-        string agentId,
-        string agentName);
+        string agentId);
 }
diff --git a/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs b/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
index 365c262c819f..adbf5ad7b689 100644
--- a/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
+++ b/dotnet/src/Agents/Core/Logging/TerminationStrategyLogMessages.cs
@@ -15,7 +15,6 @@ namespace Microsoft.SemanticKernel.Agents.Chat;
 /// generate logging code at compile time to achieve optimized code.
 /// 
 [ExcludeFromCodeCoverage]
-[Experimental("SKEXP0110")]
 internal static partial class TerminationStrategyLogMessages
 {
     /// 
@@ -24,13 +23,12 @@ internal static partial class TerminationStrategyLogMessages
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] Evaluating termination for agent {AgentType}: {AgentId}/{AgentName}.")]
+        Message = "[{MethodName}] Evaluating termination for agent {AgentType}: {AgentId}.")]
     public static partial void LogTerminationStrategyEvaluatingCriteria(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  agent out of scope.
@@ -38,13 +36,12 @@ public static partial void LogTerminationStrategyEvaluatingCriteria(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Debug,
-        Message = "[{MethodName}] {AgentType} agent out of scope for termination: {AgentId}/{AgentName}.")]
+        Message = "[{MethodName}] {AgentType} agent out of scope for termination: {AgentId}.")]
     public static partial void LogTerminationStrategyAgentOutOfScope(
         this ILogger logger,
         string methodName,
         Type agentType,
-        string agentId,
-        string agentName);
+        string agentId);
 
     /// 
     /// Logs  evaluated criteria (complete).
@@ -52,12 +49,11 @@ public static partial void LogTerminationStrategyAgentOutOfScope(
     [LoggerMessage(
         EventId = 0,
         Level = LogLevel.Information,
-        Message = "[{MethodName}] Evaluated termination for agent {AgentType}: {AgentId}/{AgentName} - {TerminationResult}")]
+        Message = "[{MethodName}] Evaluated termination for agent {AgentType}: {AgentId} - {TerminationResult}")]
     public static partial void LogTerminationStrategyEvaluatedCriteria(
         this ILogger logger,
         string methodName,
         Type agentType,
         string agentId,
-        string agentName,
         bool terminationResult);
 }
diff --git a/dotnet/src/Agents/Bedrock/Properties/AssemblyInfo.cs b/dotnet/src/Agents/Core/Properties/AssemblyInfo.cs
similarity index 100%
rename from dotnet/src/Agents/Bedrock/Properties/AssemblyInfo.cs
rename to dotnet/src/Agents/Core/Properties/AssemblyInfo.cs
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index 315389afd386..71747e21ffad 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -5,9 +5,9 @@
     Microsoft.SemanticKernel.Agents.OpenAI
     Microsoft.SemanticKernel.Agents.OpenAI
     net8.0;netstandard2.0
-    $(NoWarn);SKEXP0110;SKEXP0001;OPENAI001;NU5104
+    $(NoWarn);SKEXP0110
     false
-    preview
+    alpha
   
 
   
@@ -15,12 +15,11 @@
   
     
     Semantic Kernel Agents - OpenAI
-    Defines a concrete Agent based on the OpenAI Assistant API.
+    Defines core a concrete Agent based on the OpenAI Assistant API.
   
 
   
     
-    
     
     
     
@@ -30,8 +29,6 @@
     
   
 
-  
-
   
     
   
diff --git a/dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs
similarity index 51%
rename from dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs
rename to dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs
index bf8c993b210e..1844c82ac73f 100644
--- a/dotnet/src/InternalUtilities/agents/Extensions/AgentExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/AgentExtensions.cs
@@ -1,13 +1,9 @@
 // Copyright (c) Microsoft. All rights reserved.
-
-using System.Diagnostics.CodeAnalysis;
-
-namespace Microsoft.SemanticKernel.Agents.Extensions;
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
 /// Extension methods for .
 /// 
-[ExcludeFromCodeCoverage]
 internal static class AgentExtensions
 {
     /// 
@@ -17,13 +13,4 @@ internal static class AgentExtensions
     /// The target agent
     /// The agent name as a non-empty string
     public static string GetName(this Agent agent) => agent.Name ?? agent.Id;
-
-    /// 
-    /// Provides the display name of the agent.
-    /// 
-    /// The target agent
-    /// 
-    /// Currently, it's intended for telemetry purposes only.
-    /// 
-    public static string GetDisplayName(this Agent agent) => !string.IsNullOrWhiteSpace(agent.Name) ? agent.Name! : "UnnamedAgent";
 }
diff --git a/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
deleted file mode 100644
index 706186df7e68..000000000000
--- a/dotnet/src/Agents/OpenAI/Extensions/AssistantClientExtensions.cs
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
-using OpenAI.Assistants;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-/// 
-/// Convenience extensions for .
-/// 
-public static class AssistantClientExtensions
-{
-    /// 
-    /// Creates an assistant asynchronously with the specified options.
-    /// 
-    /// The assistant client.
-    /// The model identifier.
-    /// The name of the assistant.
-    /// The description of the assistant.
-    /// The instructions for the assistant.
-    /// Whether to enable the code interpreter tool.
-    /// The file IDs for the code interpreter tool.
-    /// Whether to enable the file search tool.
-    /// The vector store identifier.
-    /// The temperature setting for the assistant.
-    /// The nucleus sampling factor for the assistant.
-    /// The response format for the assistant.
-    /// The metadata for the assistant.
-    /// The cancellation token.
-    /// A task that represents the asynchronous operation. The task result contains the created assistant.
-    public static async Task CreateAssistantAsync(
-        this AssistantClient client,
-        string modelId,
-        string? name = null,
-        string? description = null,
-        string? instructions = null,
-        bool enableCodeInterpreter = false,
-        IReadOnlyList? codeInterpreterFileIds = null,
-        bool enableFileSearch = false,
-        string? vectorStoreId = null,
-        float? temperature = null,
-        float? topP = null,
-        AssistantResponseFormat? responseFormat = null,
-        IReadOnlyDictionary? metadata = null,
-        CancellationToken cancellationToken = default)
-    {
-        AssistantCreationOptions options =
-            new()
-            {
-                Name = name,
-                Description = description,
-                Instructions = instructions,
-                Temperature = temperature,
-                NucleusSamplingFactor = topP,
-                ResponseFormat = responseFormat,
-            };
-
-        if (metadata != null)
-        {
-            foreach (KeyValuePair item in metadata)
-            {
-                options.Metadata[item.Key] = item.Value;
-            }
-        }
-
-        if (enableCodeInterpreter || (codeInterpreterFileIds?.Count ?? 0) > 0)
-        {
-            options.Tools.Add(ToolDefinition.CreateCodeInterpreter());
-        }
-
-        if (enableFileSearch || !string.IsNullOrEmpty(vectorStoreId))
-        {
-            options.Tools.Add(ToolDefinition.CreateFileSearch());
-        }
-
-        options.ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds);
-
-        Assistant assistant = await client.CreateAssistantAsync(modelId, options, cancellationToken).ConfigureAwait(false);
-
-        return assistant;
-    }
-
-    /// 
-    /// Creates an assistant from a template asynchronously with the specified options.
-    /// 
-    /// The assistant client.
-    /// The model identifier.
-    /// The prompt template configuration.
-    /// Whether to enable the code interpreter tool.
-    /// The file IDs for the code interpreter tool.
-    /// Whether to enable the file search tool.
-    /// The vector store identifier.
-    /// The temperature setting for the assistant.
-    /// The nucleus sampling factor for the assistant.
-    /// The response format for the assistant.
-    /// The metadata for the assistant.
-    /// The cancellation token.
-    /// A task that represents the asynchronous operation. The task result contains the created assistant.
-    public static Task CreateAssistantFromTemplateAsync(
-        this AssistantClient client,
-        string modelId,
-        PromptTemplateConfig config,
-        bool enableCodeInterpreter = false,
-        IReadOnlyList? codeInterpreterFileIds = null,
-        bool enableFileSearch = false,
-        string? vectorStoreId = null,
-        float? temperature = null,
-        float? topP = null,
-        AssistantResponseFormat? responseFormat = null,
-        IReadOnlyDictionary? metadata = null,
-        CancellationToken cancellationToken = default)
-    {
-        return
-            client.CreateAssistantAsync(
-                modelId,
-                config.Name,
-                config.Description,
-                config.Template,
-                enableCodeInterpreter,
-                codeInterpreterFileIds,
-                enableFileSearch,
-                vectorStoreId,
-                temperature,
-                topP,
-                responseFormat,
-                metadata,
-                cancellationToken);
-    }
-
-    /// 
-    /// Creates a thread asynchronously with the specified options.
-    /// 
-    /// The assistant client.
-    /// The initial messages for the thread.
-    /// The file IDs for the code interpreter tool.
-    /// The vector store identifier.
-    /// The metadata for the thread.
-    /// The cancellation token.
-    /// A task that represents the asynchronous operation. The task result contains the thread ID.
-    public static async Task CreateThreadAsync(
-        this AssistantClient client,
-        IEnumerable? messages = null,
-        IReadOnlyList? codeInterpreterFileIds = null,
-        string? vectorStoreId = null,
-        IReadOnlyDictionary? metadata = null,
-        CancellationToken cancellationToken = default)
-    {
-        ThreadCreationOptions options = new()
-        {
-            ToolResources = AssistantToolResourcesFactory.GenerateToolResources(vectorStoreId, codeInterpreterFileIds)
-        };
-
-        if (messages != null)
-        {
-            options.InitialMessages.AddRange(messages.ToThreadInitializationMessages());
-        }
-
-        if (metadata != null)
-        {
-            foreach (KeyValuePair item in metadata)
-            {
-                options.Metadata[item.Key] = item.Value;
-            }
-        }
-
-        AssistantThread thread = await client.CreateThreadAsync(options, cancellationToken).ConfigureAwait(false);
-
-        return thread.Id;
-    }
-}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
deleted file mode 100644
index 5cd0055d8456..000000000000
--- a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Linq;
-using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
-using OpenAI.Assistants;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-/// 
-/// Convenience extensions for converting .
-/// 
-public static class ChatContentMessageExtensions
-{
-    /// 
-    /// Converts a  instance to a .
-    /// 
-    /// The chat message content to convert.
-    /// A  instance.
-    public static ThreadInitializationMessage ToThreadInitializationMessage(this ChatMessageContent message)
-    {
-        return
-            new ThreadInitializationMessage(
-                role: message.Role.ToMessageRole(),
-                content: AssistantMessageFactory.GetMessageContents(message));
-    }
-
-    /// 
-    /// Converts a collection of  instances to a collection of  instances.
-    /// 
-    /// The collection of chat message contents to convert.
-    /// A collection of  instances.
-    public static IEnumerable ToThreadInitializationMessages(this IEnumerable messages)
-    {
-        return messages.Select(message => message.ToThreadInitializationMessage());
-    }
-}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
new file mode 100644
index 000000000000..d1e7e0059494
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelExtensions.cs
@@ -0,0 +1,18 @@
+// Copyright (c) Microsoft. All rights reserved.
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+internal static class KernelExtensions
+{
+    /// 
+    /// Retrieve a kernel function based on the tool name.
+    /// 
+    public static KernelFunction GetKernelFunction(this Kernel kernel, string functionName, char delimiter)
+    {
+        string[] nameParts = functionName.Split(delimiter);
+        return nameParts.Length switch
+        {
+            2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]),
+            _ => throw new KernelException($"Agent Failure - Unknown tool: {functionName}"),
+        };
+    }
+}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index d15dec19d6e0..1a4b6fc2fbf6 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -1,13 +1,12 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System;
+using System.Collections.Generic;
+using System.Linq;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
-/// 
-/// Extensions for  to support OpenAI specific operations.
-/// 
-public static class KernelFunctionExtensions
+internal static class KernelFunctionExtensions
 {
     /// 
     /// Convert  to an OpenAI tool model.
@@ -15,22 +14,87 @@ public static class KernelFunctionExtensions
     /// The source function
     /// The plugin name
     /// An OpenAI tool definition
-    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string? pluginName = null)
+    public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
     {
-        if (function.Metadata.Parameters.Count > 0)
+        var metadata = function.Metadata;
+        if (metadata.Parameters.Count > 0)
         {
-            BinaryData parameterData = function.Metadata.CreateParameterSpec();
+            var required = new List(metadata.Parameters.Count);
+            var parameters =
+                metadata.Parameters.ToDictionary(
+                    p => p.Name,
+                    p =>
+                    {
+                        if (p.IsRequired)
+                        {
+                            required.Add(p.Name);
+                        }
 
-            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
+                        return
+                            new
+                            {
+                                type = ConvertType(p.ParameterType),
+                                description = p.Description,
+                            };
+                    });
+
+            var spec =
+                new
+                {
+                    type = "object",
+                    properties = parameters,
+                    required,
+                };
+
+            return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
             {
                 Description = function.Description,
-                Parameters = parameterData,
+                Parameters = BinaryData.FromObjectAsJson(spec)
             };
         }
 
-        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName ?? function.PluginName))
+        return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
         {
             Description = function.Description
         };
     }
+
+    private static string ConvertType(Type? type)
+    {
+        if (type is null || type == typeof(string))
+        {
+            return "string";
+        }
+
+        if (type == typeof(bool))
+        {
+            return "boolean";
+        }
+
+        if (type.IsEnum)
+        {
+            return "enum";
+        }
+
+        if (type.IsArray)
+        {
+            return "array";
+        }
+
+        if (type == typeof(DateTime) || type == typeof(DateTimeOffset))
+        {
+            return "date-time";
+        }
+
+        return Type.GetTypeCode(type) switch
+        {
+            TypeCode.SByte or TypeCode.Byte or
+            TypeCode.Int16 or TypeCode.UInt16 or
+            TypeCode.Int32 or TypeCode.UInt32 or
+            TypeCode.Int64 or TypeCode.UInt64 or
+            TypeCode.Single or TypeCode.Double or TypeCode.Decimal => "number",
+
+            _ => "object",
+        };
+    }
 }
diff --git a/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
deleted file mode 100644
index 7e63a32673f2..000000000000
--- a/dotnet/src/Agents/OpenAI/Extensions/OpenAIClientExtensions.cs
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.IO;
-using System.Threading;
-using System.Threading.Tasks;
-using OpenAI;
-using OpenAI.Assistants;
-using OpenAI.Files;
-using OpenAI.VectorStores;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-/// 
-/// Convenience extensions for .
-/// 
-public static class OpenAIClientExtensions
-{
-    /// 
-    /// Creates a vector store asynchronously.
-    /// 
-    /// The OpenAI client instance.
-    /// The collection of file identifiers to include in the vector store.
-    /// Indicates whether to wait until the operation is completed.
-    /// The name of the vector store.
-    /// The expiration policy for the vector store.
-    /// The chunking strategy for the vector store.
-    /// The metadata associated with the vector store.
-    /// The cancellation token to monitor for cancellation requests.
-    /// The identifier of the created vector store.
-    public static async Task CreateVectorStoreAsync(
-        this OpenAIClient client,
-        IEnumerable fileIds,
-        bool waitUntilCompleted = true,
-        string? storeName = null,
-        VectorStoreExpirationPolicy? expirationPolicy = null,
-        FileChunkingStrategy? chunkingStrategy = null,
-        IReadOnlyDictionary? metadata = null,
-        CancellationToken cancellationToken = default)
-    {
-        VectorStoreCreationOptions options = new()
-        {
-            Name = storeName,
-            ChunkingStrategy = chunkingStrategy,
-            ExpirationPolicy = expirationPolicy,
-        };
-
-        options.FileIds.AddRange(fileIds);
-
-        if (metadata != null)
-        {
-            foreach (KeyValuePair item in metadata)
-            {
-                options.Metadata[item.Key] = item.Value;
-            }
-        }
-
-        VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
-        CreateVectorStoreOperation result = await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted, options, cancellationToken).ConfigureAwait(false);
-
-        return result.VectorStoreId;
-    }
-
-    /// 
-    /// Deletes a vector store asynchronously.
-    /// 
-    /// The OpenAI client instance.
-    /// The identifier of the vector store to delete.
-    /// The cancellation token to monitor for cancellation requests.
-    /// A boolean indicating whether the vector store was successfully deleted.
-    public static async Task DeleteVectorStoreAsync(this OpenAIClient client, string vectorStoreId, CancellationToken cancellationToken = default)
-    {
-        VectorStoreClient vectorStoreClient = client.GetVectorStoreClient();
-        VectorStoreDeletionResult result = await vectorStoreClient.DeleteVectorStoreAsync(vectorStoreId, cancellationToken).ConfigureAwait(false);
-        return result.Deleted;
-    }
-
-    /// 
-    /// Uploads a file to use with the assistant.
-    /// 
-    /// The OpenAI client instance.
-    /// The content to upload.
-    /// The name of the file.
-    /// The  to monitor for cancellation requests. The default is .
-    /// The file identifier.
-    /// 
-    /// Use the  directly for more advanced file operations.
-    /// 
-    public static async Task UploadAssistantFileAsync(this OpenAIClient client, Stream stream, string name, CancellationToken cancellationToken = default)
-    {
-        OpenAIFileClient fileClient = client.GetOpenAIFileClient();
-
-        OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
-
-        return fileInfo.Id;
-    }
-
-    /// 
-    /// Deletes a file asynchronously.
-    /// 
-    /// The OpenAI client instance.
-    /// The identifier of the file to delete.
-    /// The cancellation token to monitor for cancellation requests.
-    /// A boolean indicating whether the file was successfully deleted.
-    public static async Task DeleteFileAsync(this OpenAIClient client, string fileId, CancellationToken cancellationToken = default)
-    {
-        OpenAIFileClient fileClient = client.GetOpenAIFileClient();
-        FileDeletionResult result = await fileClient.DeleteFileAsync(fileId, cancellationToken).ConfigureAwait(false);
-        return result.Deleted;
-    }
-}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
new file mode 100644
index 000000000000..d017fb403f23
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
@@ -0,0 +1,13 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Azure.Core;
+using Azure.Core.Pipeline;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+
+/// 
+/// Helper class to inject headers into Azure SDK HTTP pipeline
+/// 
+internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy
+{
+    public override void OnSendingRequest(HttpMessage message) => message.Request.Headers.Add(headerName, headerValue);
+}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
index cdcfdadf93ef..532a8433c37c 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs
@@ -5,8 +5,6 @@
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 
-#pragma warning disable CS0618 // Type or member is obsolete
-
 /// 
 /// Produce the  for an assistant according to the requested configuration.
 /// 
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
index 772c30630fe5..cda0399f5e28 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
@@ -1,6 +1,6 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System.Collections.Generic;
+using Microsoft.SemanticKernel.ChatCompletion;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -8,46 +8,62 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 /// 
 /// Factory for creating  definition.
 /// 
+/// 
+/// Improves testability.
+/// 
 internal static class AssistantRunOptionsFactory
 {
-    public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions)
+    /// 
+    /// Produce  by reconciling  and .
+    /// 
+    /// The assistant definition
+    /// Instructions to use for the run
+    /// The run specific options
+    public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, string? overrideInstructions, OpenAIAssistantInvocationOptions? invocationOptions)
     {
-        RunCreationOptions runOptions =
+        int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount);
+
+        RunCreationOptions options =
             new()
             {
-                AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions,
+                AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
                 InstructionsOverride = overrideInstructions,
-                MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount,
-                MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount,
-                ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride,
-                NucleusSamplingFactor = invocationOptions?.NucleusSamplingFactor ?? defaultOptions?.NucleusSamplingFactor,
-                AllowParallelToolCalls = invocationOptions?.AllowParallelToolCalls ?? defaultOptions?.AllowParallelToolCalls,
-                ResponseFormat = invocationOptions?.ResponseFormat ?? defaultOptions?.ResponseFormat,
-                Temperature = invocationOptions?.Temperature ?? defaultOptions?.Temperature,
-                ToolConstraint = invocationOptions?.ToolConstraint ?? defaultOptions?.ToolConstraint,
-                TruncationStrategy = invocationOptions?.TruncationStrategy ?? defaultOptions?.TruncationStrategy,
+                MaxOutputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
+                MaxInputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
+                ModelOverride = invocationOptions?.ModelName,
+                NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP),
+                AllowParallelToolCalls = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
+                ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null,
+                Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature),
+                TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null,
             };
 
-        IList? additionalMessages = invocationOptions?.AdditionalMessages ?? defaultOptions?.AdditionalMessages;
-        if (additionalMessages != null)
+        if (invocationOptions?.Metadata != null)
         {
-            runOptions.AdditionalMessages.AddRange(additionalMessages);
+            foreach (var metadata in invocationOptions.Metadata)
+            {
+                options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty);
+            }
         }
 
-        PopulateMetadata(defaultOptions, runOptions);
-        PopulateMetadata(invocationOptions, runOptions);
-
-        return runOptions;
-    }
-
-    private static void PopulateMetadata(RunCreationOptions? sourceOptions, RunCreationOptions targetOptions)
-    {
-        if (sourceOptions?.Metadata != null)
+        if (invocationOptions?.AdditionalMessages != null)
         {
-            foreach (KeyValuePair item in sourceOptions.Metadata)
+            foreach (ChatMessageContent message in invocationOptions.AdditionalMessages)
             {
-                targetOptions.Metadata[item.Key] = item.Value ?? string.Empty;
+                ThreadInitializationMessage threadMessage = new(
+                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
+                    content: AssistantMessageFactory.GetMessageContents(message));
+
+                options.AdditionalMessages.Add(threadMessage);
             }
         }
+
+        return options;
     }
+
+    private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct
+        =>
+            setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ?
+                setting.Value :
+                agentSetting;
 }
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index 64749cedff69..2e066b91869f 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -2,7 +2,6 @@
 using System;
 using System.ClientModel;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Net;
 using System.Runtime.CompilerServices;
@@ -11,7 +10,6 @@
 using System.Threading.Tasks;
 using Azure;
 using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.FunctionCalling;
 using OpenAI.Assistants;
@@ -30,6 +28,46 @@ internal static class AssistantThreadActions
         RunStatus.Cancelling,
     ];
 
+    /// 
+    /// Create a new assistant thread.
+    /// 
+    /// The assistant client
+    /// The options for creating the thread
+    /// The  to monitor for cancellation requests. The default is .
+    /// The thread identifier
+    public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
+    {
+        ThreadCreationOptions createOptions =
+            new()
+            {
+                ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds),
+            };
+
+        if (options?.Messages is not null)
+        {
+            foreach (ChatMessageContent message in options.Messages)
+            {
+                ThreadInitializationMessage threadMessage = new(
+                    role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
+                    content: AssistantMessageFactory.GetMessageContents(message));
+
+                createOptions.InitialMessages.Add(threadMessage);
+            }
+        }
+
+        if (options?.Metadata != null)
+        {
+            foreach (KeyValuePair item in options.Metadata)
+            {
+                createOptions.Metadata[item.Key] = item.Value;
+            }
+        }
+
+        AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false);
+
+        return thread.Id;
+    }
+
     /// 
     /// Create a message in the specified thread.
     /// 
@@ -113,26 +151,24 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
         OpenAIAssistantAgent agent,
         AssistantClient client,
         string threadId,
-        RunCreationOptions? invocationOptions,
+        OpenAIAssistantInvocationOptions? invocationOptions,
         ILogger logger,
         Kernel kernel,
         KernelArguments? arguments,
         [EnumeratorCancellation] CancellationToken cancellationToken)
     {
-        logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
+        if (agent.IsDeleted)
+        {
+            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
+        }
 
-        List tools = new(agent.Definition.Tools);
+        logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
 
-        // Add unique functions from the Kernel which are not already present in the agent's tools
-        var functionToolNames = new HashSet(tools.OfType().Select(t => t.FunctionName));
-        var functionTools = kernel.Plugins
-            .SelectMany(kp => kp.Select(kf => kf.ToToolDefinition(kp.Name)))
-            .Where(tool => !functionToolNames.Contains(tool.FunctionName));
-        tools.AddRange(functionTools);
+        ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
 
         string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
 
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
 
         options.ToolsOverride.AddRange(tools);
 
@@ -212,7 +248,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
             int messageCount = 0;
             foreach (RunStep completedStep in completedStepsToProcess)
             {
-                if (completedStep.Kind == RunStepKind.ToolCall)
+                if (completedStep.Type == RunStepType.ToolCalls)
                 {
                     foreach (RunStepToolCall toolCall in completedStep.Details.ToolCalls)
                     {
@@ -220,15 +256,15 @@ await functionProcessor.InvokeFunctionCallsAsync(
                         ChatMessageContent? content = null;
 
                         // Process code-interpreter content
-                        if (toolCall.Kind == RunStepToolCallKind.CodeInterpreter)
+                        if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter)
                         {
                             content = GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput, completedStep);
                             isVisible = true;
                         }
                         // Process function result content
-                        else if (toolCall.Kind == RunStepToolCallKind.Function)
+                        else if (toolCall.ToolKind == RunStepToolCallKind.Function)
                         {
-                            FunctionResultContent functionStep = functionSteps[toolCall.Id]; // Function step always captured on invocation
+                            FunctionResultContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation
                             content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep);
                         }
 
@@ -240,7 +276,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
                         }
                     }
                 }
-                else if (completedStep.Kind == RunStepKind.CreatedMessage)
+                else if (completedStep.Type == RunStepType.MessageCreation)
                 {
                     // Retrieve the message
                     ThreadMessage? message = await RetrieveMessageAsync(client, threadId, completedStep.Details.CreatedMessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
@@ -342,25 +378,29 @@ async Task PollRunStatusAsync()
     /// 
     /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
-    [ExcludeFromCodeCoverage]
     public static async IAsyncEnumerable InvokeStreamingAsync(
         OpenAIAssistantAgent agent,
         AssistantClient client,
         string threadId,
         IList? messages,
-        RunCreationOptions? invocationOptions,
+        OpenAIAssistantInvocationOptions? invocationOptions,
         ILogger logger,
         Kernel kernel,
         KernelArguments? arguments,
         [EnumeratorCancellation] CancellationToken cancellationToken)
     {
+        if (agent.IsDeleted)
+        {
+            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
+        }
+
         logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
 
-        ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+        ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
 
         string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
 
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.RunOptions, instructions, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions);
 
         options.ToolsOverride.AddRange(tools);
 
@@ -458,7 +498,7 @@ await client.GetRunStepsAsync(run.ThreadId, run.Id, cancellationToken: cancellat
                 {
                     foreach (RunStepToolCall stepDetails in step.Details.ToolCalls)
                     {
-                        toolMap[stepDetails.Id] = step.Id;
+                        toolMap[stepDetails.ToolCallId] = step.Id;
                     }
                 }
 
@@ -516,14 +556,14 @@ await RetrieveMessageAsync(
                     {
                         foreach (RunStepToolCall toolCall in step.Details.ToolCalls)
                         {
-                            if (toolCall.Kind == RunStepToolCallKind.Function)
+                            if (toolCall.ToolKind == RunStepToolCallKind.Function)
                             {
                                 messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step));
                                 stepFunctionResults.Remove(step.Id);
                                 break;
                             }
 
-                            if (toolCall.Kind == RunStepToolCallKind.CodeInterpreter)
+                            if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter)
                             {
                                 messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput, step));
                             }
@@ -588,7 +628,6 @@ private static ChatMessageContent GenerateMessageContent(string? assistantName,
         return content;
     }
 
-    [ExcludeFromCodeCoverage]
     private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update)
     {
         StreamingChatMessageContent content =
@@ -621,7 +660,6 @@ private static StreamingChatMessageContent GenerateStreamingMessageContent(strin
         return content;
     }
 
-    [ExcludeFromCodeCoverage]
     private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update)
     {
         StreamingChatMessageContent content =
@@ -674,7 +712,6 @@ private static AnnotationContent GenerateAnnotationContent(TextAnnotation annota
             };
     }
 
-    [ExcludeFromCodeCoverage]
     private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation)
     {
         string? fileId = null;
@@ -716,13 +753,13 @@ private static ChatMessageContent GenerateCodeInterpreterContent(string agentNam
 
     private static IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
     {
-        if (step.Status == RunStepStatus.InProgress && step.Kind == RunStepKind.ToolCall)
+        if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls)
         {
             foreach (RunStepToolCall toolCall in step.Details.ToolCalls)
             {
                 (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(toolCall.FunctionName, toolCall.FunctionArguments);
 
-                FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
+                FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.ToolCallId, functionArguments);
 
                 yield return content;
             }
@@ -792,6 +829,23 @@ private static ChatMessageContent GenerateFunctionResultContent(string agentName
             };
     }
 
+    private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken)
+    {
+        Task[] functionTasks = new Task[functionCalls.Length];
+
+        for (int index = 0; index < functionCalls.Length; ++index)
+        {
+            functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken);
+        }
+
+        return functionTasks;
+    }
+
+    private static Task ExecuteFunctionStep(OpenAIAssistantAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken)
+    {
+        return functionCall.InvokeAsync(agent.Kernel, cancellationToken);
+    }
+
     private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
     {
         ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
index b947ccc2a78a..7c4000dcebb0 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
@@ -16,7 +16,7 @@ internal static class AssistantToolResourcesFactory
     /// Produces a  definition based on the provided parameters.
     /// 
     /// An optional vector-store-id for the 'file_search' tool
-    /// An optional list of file-identifiers for the 'code_interpreter' tool.
+    /// An optionallist of file-identifiers for the 'code_interpreter' tool.
     public static ToolResources? GenerateToolResources(string? vectorStoreId, IReadOnlyList? codeInterpreterFileIds)
     {
         bool hasVectorStore = !string.IsNullOrWhiteSpace(vectorStoreId);
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs
deleted file mode 100644
index 86e90fbf4adc..000000000000
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.ClientFactory.cs
+++ /dev/null
@@ -1,122 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.ClientModel;
-using System.ClientModel.Primitives;
-using System.Net.Http;
-using System.Threading;
-using Azure.AI.OpenAI;
-using Azure.Core;
-using Microsoft.SemanticKernel.Http;
-using OpenAI;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-public sealed partial class OpenAIAssistantAgent : KernelAgent
-{
-    /// 
-    /// Specifies a key that avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
-    /// 
-    private const string SingleSpaceKey = " ";
-
-    /// 
-    /// Produces an .
-    /// 
-    /// The API key.
-    /// The service endpoint.
-    /// A custom  for HTTP requests.
-    public static AzureOpenAIClient CreateAzureOpenAIClient(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null)
-    {
-        Verify.NotNull(apiKey, nameof(apiKey));
-        Verify.NotNull(endpoint, nameof(endpoint));
-
-        AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
-
-        return new AzureOpenAIClient(endpoint, apiKey!, clientOptions);
-    }
-
-    /// 
-    /// Produces an .
-    /// 
-    /// The credentials.
-    /// The service endpoint.
-    /// A custom  for HTTP requests.
-    public static AzureOpenAIClient CreateAzureOpenAIClient(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null)
-    {
-        Verify.NotNull(credential, nameof(credential));
-        Verify.NotNull(endpoint, nameof(endpoint));
-
-        AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
-
-        return new AzureOpenAIClient(endpoint, credential, clientOptions);
-    }
-
-    /// 
-    /// Produces an .
-    /// 
-    /// An optional endpoint.
-    /// A custom  for HTTP requests.
-    public static OpenAIClient CreateOpenAIClient(Uri? endpoint = null, HttpClient? httpClient = null)
-    {
-        OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
-        return new OpenAIClient(new ApiKeyCredential(SingleSpaceKey), clientOptions);
-    }
-
-    /// 
-    /// Produces an .
-    /// 
-    /// The API key.
-    /// An optional endpoint.
-    /// A custom  for HTTP requests.
-    public static OpenAIClient CreateOpenAIClient(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
-    {
-        OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
-        return new OpenAIClient(apiKey, clientOptions);
-    }
-
-    private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient)
-    {
-        AzureOpenAIClientOptions options = new()
-        {
-            UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent
-        };
-
-        ConfigureClientOptions(httpClient, options);
-
-        return options;
-    }
-
-    private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, HttpClient? httpClient)
-    {
-        OpenAIClientOptions options = new()
-        {
-            UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent,
-            Endpoint = endpoint ?? httpClient?.BaseAddress,
-        };
-
-        ConfigureClientOptions(httpClient, options);
-
-        return options;
-    }
-
-    private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options)
-    {
-        options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall);
-
-        if (httpClient is not null)
-        {
-            options.Transport = new HttpClientPipelineTransport(httpClient);
-            options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided.
-            options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout
-        }
-    }
-
-    private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue)
-        =>
-            new((message) =>
-            {
-                if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false)
-                {
-                    message.Request.Headers.Set(headerName, headerValue);
-                }
-            });
-}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index c8d300874c60..f79b8ce3239c 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -1,129 +1,76 @@
 // Copyright (c) Microsoft. All rights reserved.
-
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
+using System.IO;
 using System.Linq;
 using System.Runtime.CompilerServices;
 using System.Text.Json;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Diagnostics;
 using OpenAI.Assistants;
+using OpenAI.Files;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Represents a  specialization based on Open AI Assistant / GPT.
+/// A  specialization based on Open AI Assistant / GPT.
 /// 
-public sealed partial class OpenAIAssistantAgent : KernelAgent
+public sealed class OpenAIAssistantAgent : KernelAgent
 {
     /// 
-    /// The metadata key that identifies code-interpreter content.
+    /// Metadata key that identifies code-interpreter content.
     /// 
     public const string CodeInterpreterMetadataKey = "code";
 
     internal const string OptionsMetadataKey = "__run_options";
     internal const string TemplateMetadataKey = "__template_format";
 
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    /// The assistant definition.
-    /// The OpenAI provider for accessing the Assistant API service.
-    /// Optional collection of plugins to add to the kernel.
-    /// An optional factory to produce the  for the agent.
-    /// The format of the prompt template used when "templateFactory" parameter is supplied.
-    public OpenAIAssistantAgent(
-        Assistant definition,
-        AssistantClient client,
-        IEnumerable? plugins = null,
-        IPromptTemplateFactory? templateFactory = null,
-        string? templateFormat = null)
-    {
-        this.Client = client;
-
-        this.Definition = definition;
-
-        this.Description = this.Definition.Description;
-        this.Id = this.Definition.Id;
-        this.Name = this.Definition.Name;
-        this.Instructions = this.Definition.Instructions;
-
-        if (templateFactory != null)
-        {
-            Verify.NotNullOrWhiteSpace(templateFormat);
-
-            PromptTemplateConfig templateConfig = new(this.Instructions)
-            {
-                TemplateFormat = templateFormat
-            };
-
-            this.Template = templateFactory.Create(templateConfig);
-        }
-
-        if (plugins != null)
-        {
-            this.Kernel.Plugins.AddRange(plugins);
-        }
-    }
-
-    /// 
-    /// Expose client for additional use.
-    /// 
-    public AssistantClient Client { get; }
+    private readonly OpenAIClientProvider _provider;
+    private readonly Assistant _assistant;
+    private readonly AssistantClient _client;
+    private readonly string[] _channelKeys;
 
     /// 
-    /// Gets the assistant definition.
+    /// The assistant definition.
     /// 
-    public Assistant Definition { get; }
+    public OpenAIAssistantDefinition Definition { get; private init; }
 
     /// 
-    /// Gets a value that indicates whether the assistant has been deleted via .
-    /// 
-    /// 
+    /// Set when the assistant has been deleted via .
     /// An assistant removed by other means will result in an exception when invoked.
-    /// 
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to manage the Assistant definition lifecycle.")]
+    /// 
     public bool IsDeleted { get; private set; }
 
     /// 
-    /// Gets the polling behavior for run processing.
+    /// Defines polling behavior for run processing
     /// 
     public RunPollingOptions PollingOptions { get; } = new();
 
     /// 
-    /// Gets or sets the run creation options for the assistant.
+    /// Expose predefined tools for run-processing.
     /// 
-    public RunCreationOptions? RunOptions { get; init; }
+    internal IReadOnlyList Tools => this._assistant.Tools;
 
     /// 
-    /// Create a new .
+    /// Define a new .
     /// 
-    /// The OpenAI client provider for accessing the API service.
-    /// The assistant's capabilities.
+    /// OpenAI client provider for accessing the API service.
+    /// Defines the assistant's capabilities.
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Required arguments that provide default template parameters, including any .
-    /// The prompt template configuration.
-    /// An prompt template factory to produce the  for the agent.
+    /// Prompt template configuration
+    /// An optional factory to produce the  for the agent
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantFromTemplateAsync).")]
-    public static async Task CreateFromTemplateAsync(
-#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+    /// An  instance
+    public async static Task CreateFromTemplateAsync(
         OpenAIClientProvider clientProvider,
-#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIAssistantCapabilities capabilities,
         Kernel kernel,
         KernelArguments defaultArguments,
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory templateFactory,
+        IPromptTemplateFactory? templateFactory = null,
         CancellationToken cancellationToken = default)
     {
         // Validate input
@@ -132,13 +79,12 @@ public static async Task CreateFromTemplateAsync(
         Verify.NotNull(clientProvider, nameof(clientProvider));
         Verify.NotNull(capabilities, nameof(capabilities));
         Verify.NotNull(templateConfig, nameof(templateConfig));
-        Verify.NotNull(templateFactory, nameof(templateFactory));
 
         // Ensure template is valid (avoid failure after posting assistant creation)
-        IPromptTemplate template = templateFactory.Create(templateConfig);
+        IPromptTemplate? template = templateFactory?.Create(templateConfig);
 
         // Create the client
-        AssistantClient client = clientProvider.Client.GetAssistantClient();
+        AssistantClient client = CreateClient(clientProvider);
 
         // Create the assistant
         AssistantCreationOptions assistantCreationOptions = templateConfig.CreateAssistantOptions(capabilities);
@@ -146,7 +92,7 @@ public static async Task CreateFromTemplateAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
+            new OpenAIAssistantAgent(model, clientProvider, client)
             {
                 Kernel = kernel,
                 Arguments = defaultArguments,
@@ -155,20 +101,16 @@ public static async Task CreateFromTemplateAsync(
     }
 
     /// 
-    /// Create a new .
+    /// Define a new .
     /// 
-    /// The OpenAI client provider for accessing the API service.
+    /// OpenAI client provider for accessing the API service.
     /// The assistant definition.
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Optional default arguments, including any .
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create an assistant (CreateAssistantAsync).")]
+    /// An  instance
     public static async Task CreateAsync(
-#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIClientProvider clientProvider,
-#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIAssistantDefinition definition,
         Kernel kernel,
         KernelArguments? defaultArguments = null,
@@ -180,7 +122,7 @@ public static async Task CreateAsync(
         Verify.NotNull(definition, nameof(definition));
 
         // Create the client
-        AssistantClient client = clientProvider.Client.GetAssistantClient();
+        AssistantClient client = CreateClient(clientProvider);
 
         // Create the assistant
         AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantOptions();
@@ -188,29 +130,25 @@ public static async Task CreateAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
+            new OpenAIAssistantAgent(model, clientProvider, client)
             {
                 Kernel = kernel,
-                Arguments = defaultArguments ?? [],
+                Arguments = defaultArguments
             };
     }
 
     /// 
-    /// Retrieves a list of assistant definitions.
+    /// Retrieve a list of assistant definitions: .
     /// 
-    /// The configuration for accessing the API service.
+    /// Configuration for accessing the API service.
     /// The  to monitor for cancellation requests. The default is .
-    /// A list of  objects.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to query for assistant definitions (GetAssistantsAsync).")]
+    /// An list of  objects.
     public static async IAsyncEnumerable ListDefinitionsAsync(
-#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-        OpenAIClientProvider clientProvider,
-#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+        OpenAIClientProvider provider,
         [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         // Create the client
-        AssistantClient client = clientProvider.Client.GetAssistantClient();
+        AssistantClient client = CreateClient(provider);
 
         // Query and enumerate assistant definitions
         await foreach (Assistant model in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = AssistantCollectionOrder.Descending }, cancellationToken).ConfigureAwait(false))
@@ -220,21 +158,17 @@ public static async IAsyncEnumerable ListDefinitionsA
     }
 
     /// 
-    /// Retrieves an  by identifier.
+    /// Retrieve a  by identifier.
     /// 
-    /// The configuration for accessing the API service.
-    /// The agent identifier.
+    /// Configuration for accessing the API service.
+    /// The agent identifier
     /// The  containing services, plugins, and other state for use throughout the operation.
     /// Optional default arguments, including any .
-    /// An optional factory to produce the  for the agent.
+    /// An optional factory to produce the  for the agent
     /// The  to monitor for cancellation requests. The default is .
-    /// An  instance.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to retrieve an assistant definition (GetAssistantsAsync).")]
+    /// An  instance
     public static async Task RetrieveAsync(
-#pragma warning disable SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         OpenAIClientProvider clientProvider,
-#pragma warning restore SKEXP0110 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
         string id,
         Kernel kernel,
         KernelArguments? defaultArguments = null,
@@ -247,7 +181,7 @@ public static async Task RetrieveAsync(
         Verify.NotNullOrWhiteSpace(id, nameof(id));
 
         // Create the client
-        AssistantClient client = clientProvider.Client.GetAssistantClient();
+        AssistantClient client = CreateClient(clientProvider);
 
         // Retrieve the assistant
         Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
@@ -260,48 +194,37 @@ public static async Task RetrieveAsync(
 
         // Instantiate the agent
         return
-            new OpenAIAssistantAgent(model, clientProvider.AssistantClient)
+            new OpenAIAssistantAgent(model, clientProvider, client)
             {
                 Kernel = kernel,
-                Arguments = defaultArguments ?? [],
+                Arguments = defaultArguments,
                 Template = template,
             };
     }
 
     /// 
-    /// Creates a new assistant thread.
+    /// Create a new assistant thread.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
+    /// The thread identifier
     public Task CreateThreadAsync(CancellationToken cancellationToken = default)
-        => this.CreateThreadAsync(options: null, cancellationToken);
+        => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken);
 
     /// 
-    /// Creates a new assistant thread.
+    /// Create a new assistant thread.
     /// 
-    /// The options for creating the thread.
+    /// The options for creating the thread
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to create a thread.")]
+    /// The thread identifier
     public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
-        => this.Client.CreateThreadAsync(
-            options?.Messages,
-            options?.CodeInterpreterFileIds,
-            options?.VectorStoreId,
-            options?.Metadata,
-            cancellationToken);
+        => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken);
 
     /// 
-    /// Deletes an assistant thread.
+    /// Create a new assistant thread.
     /// 
-    /// The thread identifier.
+    /// The thread identifier
     /// The  to monitor for cancellation requests. The default is .
-    /// The thread identifier.
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to delete an existing thread.")]
+    /// The thread identifier
     public async Task DeleteThreadAsync(
         string threadId,
         CancellationToken cancellationToken = default)
@@ -309,51 +232,73 @@ public async Task DeleteThreadAsync(
         // Validate input
         Verify.NotNullOrWhiteSpace(threadId, nameof(threadId));
 
-        ThreadDeletionResult result = await this.Client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+        ThreadDeletionResult result = await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
 
         return result.Deleted;
     }
 
+    /// 
+    /// Uploads an file for the purpose of using with assistant.
+    /// 
+    /// The content to upload
+    /// The name of the file
+    /// The  to monitor for cancellation requests. The default is .
+    /// The file identifier
+    /// 
+    /// Use the  directly for more advanced file operations.
+    /// 
+    public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default)
+    {
+        OpenAIFileClient client = this._provider.Client.GetOpenAIFileClient();
+
+        OpenAIFile fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
+
+        return fileInfo.Id;
+    }
+
     /// 
     /// Adds a message to the specified thread.
     /// 
-    /// The thread identifier.
-    /// A non-system message to append to the conversation.
+    /// The thread identifier
+    /// A non-system message with which to append to the conversation.
     /// The  to monitor for cancellation requests. The default is .
     /// 
-    /// This method only supports messages with role = User or Assistant.
+    /// Only supports messages with role = User or Assistant:
+    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
     /// 
     public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
     {
-        return AssistantThreadActions.CreateMessageAsync(this.Client, threadId, message, cancellationToken);
+        this.ThrowIfDeleted();
+
+        return AssistantThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken);
     }
 
     /// 
     /// Gets messages for a specified thread.
     /// 
-    /// The thread identifier.
+    /// The thread identifier
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
     {
-        return AssistantThreadActions.GetMessagesAsync(this.Client, threadId, cancellationToken);
+        this.ThrowIfDeleted();
+
+        return AssistantThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken);
     }
 
     /// 
-    /// Deletes the assistant definition.
+    /// Delete the assistant definition.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    ///  if the assistant definition was deleted.
+    /// True if assistant definition has been deleted
     /// 
-    /// An assistant-based agent is not usable after deletion.
+    /// Assistant based agent will not be useable after deletion.
     /// 
-    [Experimental("SKEXP0110")]
-    [Obsolete("Use the OpenAI.Assistants.AssistantClient to remove or otherwise modify the Assistant definition.")]
     public async Task DeleteAsync(CancellationToken cancellationToken = default)
     {
         if (!this.IsDeleted)
         {
-            AssistantDeletionResult result = await this.Client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
+            AssistantDeletionResult result = await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false);
             this.IsDeleted = result.Deleted;
         }
 
@@ -361,15 +306,15 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul
     }
 
     /// 
-    /// Invokes the assistant on the specified thread.
+    /// Invoke the assistant on the specified thread.
     /// 
-    /// The thread identifier.
+    /// The thread identifier
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
+    /// Asynchronous enumeration of response messages.
     /// 
-    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeAsync(
         string threadId,
@@ -379,55 +324,49 @@ public IAsyncEnumerable InvokeAsync(
         => this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken);
 
     /// 
-    /// Invokes the assistant on the specified thread.
+    /// Invoke the assistant on the specified thread.
     /// 
-    /// The thread identifier.
-    /// Optional invocation options.
+    /// The thread identifier
+    /// Optional invocation options
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of response messages.
+    /// Asynchronous enumeration of response messages.
     /// 
-    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
-    public IAsyncEnumerable InvokeAsync(
+    public async IAsyncEnumerable InvokeAsync(
         string threadId,
-        RunCreationOptions? options,
+        OpenAIAssistantInvocationOptions? options,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
-        CancellationToken cancellationToken = default)
+        [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-            () => InternalInvokeAsync(),
-            cancellationToken);
+        this.ThrowIfDeleted();
 
-        async IAsyncEnumerable InternalInvokeAsync()
-        {
-            kernel ??= this.Kernel;
-            arguments = this.MergeArguments(arguments);
+        kernel ??= this.Kernel;
+        arguments = this.MergeArguments(arguments);
 
-            await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this.Client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+        await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+        {
+            if (isVisible)
             {
-                if (isVisible)
-                {
-                    yield return message;
-                }
+                yield return message;
             }
         }
     }
 
     /// 
-    /// Invokes the assistant on the specified thread with streaming response.
+    /// Invoke the assistant on the specified thread with streaming response.
     /// 
-    /// The thread identifier.
+    /// The thread identifier
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages that are generated.
+    /// Optional receiver of the completed messages generated
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     /// 
-    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeStreamingAsync(
         string threadId,
@@ -438,66 +377,59 @@ public IAsyncEnumerable InvokeStreamingAsync(
         => this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);
 
     /// 
-    /// Invokes the assistant on the specified thread with streaming response.
+    /// Invoke the assistant on the specified thread with streaming response.
     /// 
-    /// The thread identifier.
-    /// Optional invocation options.
+    /// The thread identifier
+    /// Optional invocation options
     /// Optional arguments to pass to the agents's invocation, including any .
     /// The  containing services, plugins, and other state for use by the agent.
-    /// Optional receiver of the completed messages that are generated.
+    /// Optional receiver of the completed messages generated
     /// The  to monitor for cancellation requests. The default is .
-    /// An asynchronous enumeration of messages.
+    /// Asynchronous enumeration of messages.
     /// 
-    /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility.
+    /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
     /// 
     public IAsyncEnumerable InvokeStreamingAsync(
         string threadId,
-        RunCreationOptions? options,
+        OpenAIAssistantInvocationOptions? options,
         KernelArguments? arguments = null,
         Kernel? kernel = null,
         ChatHistory? messages = null,
         CancellationToken cancellationToken = default)
     {
-#pragma warning disable SKEXP0001 // ModelDiagnostics is marked experimental.
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(this.Id, this.GetDisplayName(), this.Description),
-            () => InternalInvokeStreamingAsync(),
-            cancellationToken);
-#pragma warning restore SKEXP0001 // ModelDiagnostics is marked experimental.
-
-        IAsyncEnumerable InternalInvokeStreamingAsync()
-        {
-            kernel ??= this.Kernel;
-            arguments = this.MergeArguments(arguments);
+        this.ThrowIfDeleted();
 
-            return AssistantThreadActions.InvokeStreamingAsync(this, this.Client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
-        }
+        kernel ??= this.Kernel;
+        arguments = this.MergeArguments(arguments);
+
+        return AssistantThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken);
     }
 
     /// 
-    [Experimental("SKEXP0110")]
     protected override IEnumerable GetChannelKeys()
     {
         // Distinguish from other channel types.
         yield return typeof(OpenAIAssistantChannel).FullName!;
-        // Distinguish based on client instance.
-        yield return this.Client.GetHashCode().ToString();
+
+        foreach (string key in this._channelKeys)
+        {
+            yield return key;
+        }
     }
 
     /// 
-    [Experimental("SKEXP0110")]
     protected override async Task CreateChannelAsync(CancellationToken cancellationToken)
     {
         this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel));
 
-        AssistantThread thread = await this.Client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
+        AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
 
         this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
 
         OpenAIAssistantChannel channel =
-            new(this.Client, thread.Id)
+            new(this._client, thread.Id)
             {
-                Logger = this.ActiveLoggerFactory.CreateLogger()
+                Logger = this.LoggerFactory.CreateLogger()
             };
 
         this.Logger.LogOpenAIAssistantAgentCreatedChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel), thread.Id);
@@ -505,25 +437,52 @@ protected override async Task CreateChannelAsync(CancellationToken
         return channel;
     }
 
+    internal void ThrowIfDeleted()
+    {
+        if (this.IsDeleted)
+        {
+            throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {this.Id}.");
+        }
+    }
+
     internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) =>
         this.FormatInstructionsAsync(kernel, arguments, cancellationToken);
 
     /// 
-    [Experimental("SKEXP0110")]
     protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
     {
         string threadId = channelState;
 
         this.Logger.LogOpenAIAssistantAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
 
-        AssistantThread thread = await this.Client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+        AssistantThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
 
         this.Logger.LogOpenAIAssistantAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(OpenAIAssistantChannel), threadId);
 
-        return new OpenAIAssistantChannel(this.Client, thread.Id);
+        return new OpenAIAssistantChannel(this._client, thread.Id);
+    }
+
+    /// 
+    /// Initializes a new instance of the  class.
+    /// 
+    private OpenAIAssistantAgent(
+        Assistant model,
+        OpenAIClientProvider provider,
+        AssistantClient client)
+    {
+        this._provider = provider;
+        this._assistant = model;
+        this._client = provider.Client.GetAssistantClient();
+        this._channelKeys = provider.ConfigurationKeys.ToArray();
+
+        this.Definition = CreateAssistantDefinition(model);
+
+        this.Description = this._assistant.Description;
+        this.Id = this._assistant.Id;
+        this.Name = this._assistant.Name;
+        this.Instructions = this._assistant.Instructions;
     }
 
-    [Obsolete]
     private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model)
     {
         OpenAIAssistantExecutionOptions? options = null;
@@ -554,4 +513,9 @@ private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant mod
             ExecutionOptions = options,
         };
     }
+
+    private static AssistantClient CreateClient(OpenAIClientProvider config)
+    {
+        return config.Client.GetAssistantClient();
+    }
 }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
index 5642017c89dd..c2247ec11e88 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs
@@ -1,7 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -9,81 +7,75 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// Defines the capabilities of an assistant.
 /// 
-[Experimental("SKEXP0110")]
-[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
 public class OpenAIAssistantCapabilities
 {
     /// 
-    /// Gets the AI model targeted by the agent.
+    /// Identifies the AI model targeted by the agent.
     /// 
     public string ModelId { get; }
 
     /// 
-    /// Gets the assistant's unique ID. (Ignored on create.)
+    /// The assistant's unique id.  (Ignored on create.)
     /// 
     public string Id { get; init; } = string.Empty;
 
     /// 
-    /// Gets optional file IDs made available to the code-interpreter tool, if enabled.
+    /// Optional file-ids made available to the code_interpreter tool, if enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? CodeInterpreterFileIds { get; init; }
 
     /// 
-    /// Gets a value that indicates whether the code-interpreter tool is enabled.
+    /// Set if code-interpreter is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableCodeInterpreter { get; init; }
 
     /// 
-    /// Gets a value that indicates whether the file_search tool is enabled.
+    /// Set if file-search is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableFileSearch { get; init; }
 
     /// 
-    /// Gets a value that indicates whether the JSON response format is enabled.
+    /// Set if json response-format is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableJsonResponse { get; init; }
 
     /// 
-    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.
+    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.Keys
+    /// may be up to 64 characters in length and values may be up to 512 characters in length.
     /// 
-    /// 
-    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
-    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 
     /// 
-    /// Gets the sampling temperature to use, between 0 and 2.
+    /// The sampling temperature to use, between 0 and 2.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? Temperature { get; init; }
 
     /// 
-    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model
+    /// considers the results of the tokens with top_p probability mass.
+    /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
     /// 
     /// 
-    /// It's recommended to set this property or , but not both.
-    ///
-    /// Nucleus sampling is an alternative to sampling with temperature where the model
-    /// considers the results of the tokens with  probability mass.
-    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    /// Recommended to set this or temperature but not both.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? TopP { get; init; }
 
     /// 
-    /// Gets the vector store ID. Requires file-search if specified.
+    /// Requires file-search if specified.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? VectorStoreId { get; init; }
 
     /// 
-    /// Gets the default execution options for each agent invocation.
+    /// Default execution options for each agent invocation.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; }
@@ -91,7 +83,7 @@ public class OpenAIAssistantCapabilities
     /// 
     /// Initializes a new instance of the  class.
     /// 
-    /// The targeted model.
+    /// The targeted model
     [JsonConstructor]
     public OpenAIAssistantCapabilities(string modelId)
     {
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 4b91bac74178..506f0a837ebf 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -1,11 +1,8 @@
 // Copyright (c) Microsoft. All rights reserved.
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.Extensions;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
-using Microsoft.SemanticKernel.Diagnostics;
 using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -13,7 +10,6 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// A  specialization for use with .
 /// 
-[Experimental("SKEXP0110")]
 internal sealed class OpenAIAssistantChannel(AssistantClient client, string threadId)
     : AgentChannel
 {
@@ -34,19 +30,17 @@ protected override async Task ReceiveAsync(IEnumerable histo
         OpenAIAssistantAgent agent,
         CancellationToken cancellationToken)
     {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
-            () => AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
-            cancellationToken);
+        agent.ThrowIfDeleted();
+
+        return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
     }
 
     /// 
     protected override IAsyncEnumerable InvokeStreamingAsync(OpenAIAssistantAgent agent, IList messages, CancellationToken cancellationToken = default)
     {
-        return ActivityExtensions.RunWithActivityAsync(
-            () => ModelDiagnostics.StartAgentInvocationActivity(agent.Id, agent.GetDisplayName(), agent.Description),
-            () => AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken),
-            cancellationToken);
+        agent.ThrowIfDeleted();
+
+        return AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
     }
 
     /// 
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
index 9560857b101e..79ad3f98f03e 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
@@ -1,6 +1,4 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -8,31 +6,29 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// 
 /// Defines an assistant.
 /// 
-[Experimental("SKEXP0110")]
-[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateAssistantAsync() to create an assistant definition.")]
 public sealed class OpenAIAssistantDefinition : OpenAIAssistantCapabilities
 {
     /// 
-    /// Gets the description of the assistant.
+    /// The description of the assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Description { get; init; }
 
     /// 
-    /// Gets the system instructions for the assistant to use.
+    /// The system instructions for the assistant to use.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Instructions { get; init; }
 
     /// 
-    /// Gets the name of the assistant.
+    /// The name of the assistant.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? Name { get; init; }
 
     /// 
-    /// Gets the captured template format for the assistant if needed for agent retrieval
-    /// ().
+    /// Provide the captured template format for the assistant if needed for agent retrieval.
+    /// ()
     /// 
     [JsonIgnore]
     public string? TemplateFactoryFormat
@@ -53,7 +49,7 @@ public string? TemplateFactoryFormat
     /// 
     /// Initializes a new instance of the  class.
     /// 
-    /// The targeted model.
+    /// The targeted model
     [JsonConstructor]
     public OpenAIAssistantDefinition(string modelId)
         : base(modelId) { }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
index ecfd4e52fa58..845cecb0956c 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
@@ -1,6 +1,4 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -9,41 +7,37 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
 /// Defines assistant execution options for each invocation.
 /// 
 /// 
-/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options".
+/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options"
 /// 
-[Experimental("SKEXP0110")]
-[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
 public sealed class OpenAIAssistantExecutionOptions
 {
     /// 
-    /// Gets the additional instructions.
+    /// Appends additional instructions.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? AdditionalInstructions { get; init; }
 
     /// 
-    /// Gets the maximum number of completion tokens that can be used over the course of the run.
+    /// The maximum number of completion tokens that may be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxCompletionTokens { get; init; }
 
     /// 
-    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
+    /// The maximum number of prompt tokens that may be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxPromptTokens { get; init; }
 
     /// 
-    /// Gets a value that indicates whether parallel function calling is enabled during tool use.
+    /// Enables parallel function calling during tool use.  Enabled by default.
+    /// Use this property to disable.
     /// 
-    /// 
-    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
-    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? ParallelToolCallsEnabled { get; init; }
 
     /// 
-    /// Gets the number of recent messages that the thread will be truncated to.
+    /// When set, the thread will be truncated to the N most recent messages in the thread.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TruncationMessageCount { get; init; }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
index 7aec34ee15ed..9b02cb9faf23 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
@@ -1,113 +1,104 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Defines per-invocation execution options that override the assistant definition.
+/// Defines per invocation execution options that override the assistant definition.
 /// 
 /// 
-/// This class is not applicable to  usage.
+/// Not applicable to  usage.
 /// 
-[Experimental("SKEXP0110")]
-[Obsolete("Use RunCreationOptions to specify assistant invocation behavior.")]
 public sealed class OpenAIAssistantInvocationOptions
 {
     /// 
-    /// Gets the AI model targeted by the agent.
+    /// Override the AI model targeted by the agent.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ModelName { get; init; }
 
     /// 
-    /// Gets the additional instructions.
+    /// Appends additional instructions.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? AdditionalInstructions { get; init; }
 
     /// 
-    /// Gets additional messages to add to the thread.
+    /// Additional messages to add to the thread.
     /// 
     /// 
-    /// This property only supports messages with role = User or Assistant.
+    /// Only supports messages with role = User or Assistant:
+    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? AdditionalMessages { get; init; }
 
     /// 
-    /// Gets a value that indicates if the code_interpreter tool is enabled.
+    /// Set if code_interpreter tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableCodeInterpreter { get; init; }
 
     /// 
-    /// Gets a value that indicates if the file_search tool is enabled.
+    /// Set if file_search tool is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
     public bool EnableFileSearch { get; init; }
 
     /// 
-    /// Gets a value that indicates if the JSON response format is enabled.
+    /// Set if json response-format is enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? EnableJsonResponse { get; init; }
 
     /// 
-    /// Gets the maximum number of completion tokens that can be used over the course of the run.
+    /// The maximum number of completion tokens that may be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxCompletionTokens { get; init; }
 
     /// 
-    /// Gets the maximum number of prompt tokens that can be used over the course of the run.
+    /// The maximum number of prompt tokens that may be used over the course of the run.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxPromptTokens { get; init; }
 
     /// 
-    /// Gets a value that indicates whether parallel function calling is enabled during tool use.
+    /// Enables parallel function calling during tool use.  Enabled by default.
+    /// Use this property to disable.
     /// 
-    /// 
-    ///  if parallel function calling is enabled during tool use; otherwise, . The default is .
-    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? ParallelToolCallsEnabled { get; init; }
 
     /// 
-    /// Gets the number of recent messages that the thread will be truncated to.
+    /// When set, the thread will be truncated to the N most recent messages in the thread.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TruncationMessageCount { get; init; }
 
     /// 
-    /// Gets the sampling temperature to use, between 0 and 2.
+    /// The sampling temperature to use, between 0 and 2.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? Temperature { get; init; }
 
     /// 
-    /// Gets the probability mass of tokens whose results are considered in nucleus sampling.
+    /// An alternative to sampling with temperature, called nucleus sampling, where the model
+    /// considers the results of the tokens with top_p probability mass.
+    /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
     /// 
     /// 
-    /// It's recommended to set this property or , but not both.
-    ///
-    /// Nucleus sampling is an alternative to sampling with temperature where the model
-    /// considers the results of the tokens with  probability mass.
-    /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
+    /// Recommended to set this or temperature but not both.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public float? TopP { get; init; }
 
     /// 
-    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.
+    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.Keys
+    /// may be up to 64 characters in length and values may be up to 512 characters in length.
     /// 
-    /// 
-    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
-    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 }
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
index eccb9509ffd1..4eb09eed7889 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -3,7 +3,6 @@
 using System.ClientModel;
 using System.ClientModel.Primitives;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Linq;
 using System.Net.Http;
 using System.Threading;
@@ -11,49 +10,41 @@
 using Azure.Core;
 using Microsoft.SemanticKernel.Http;
 using OpenAI;
-using OpenAI.Assistants;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
 /// Provides an  for use by .
 /// 
-[Experimental("SKEXP0110")]
 public sealed class OpenAIClientProvider
 {
     /// 
-    /// Specifies a key that avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
+    /// Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
     /// 
     private const string SingleSpaceKey = " ";
-    private AssistantClient? _assistantClient;
 
     /// 
-    /// Gets an active client instance.
+    /// An active client instance.
     /// 
     public OpenAIClient Client { get; }
 
     /// 
-    /// Gets an active assistant client instance.
-    /// 
-    public AssistantClient AssistantClient => this._assistantClient ??= this.Client.GetAssistantClient();
-
-    /// 
-    /// Gets configuration keys required for  management.
+    /// Configuration keys required for  management.
     /// 
     internal IReadOnlyList ConfigurationKeys { get; }
 
     private OpenAIClientProvider(OpenAIClient client, IEnumerable keys)
     {
         this.Client = client;
-        this.ConfigurationKeys = [.. keys];
+        this.ConfigurationKeys = keys.ToArray();
     }
 
     /// 
-    /// Produces an  based on .
+    /// Produce a  based on .
     /// 
-    /// The API key.
-    /// The service endpoint.
-    /// A custom  for HTTP requests.
+    /// The API key
+    /// The service endpoint
+    /// Custom  for HTTP requests.
     public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null)
     {
         Verify.NotNull(apiKey, nameof(apiKey));
@@ -65,11 +56,11 @@ public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri e
     }
 
     /// 
-    /// Produces an  based on .
+    /// Produce a  based on .
     /// 
-    /// The credentials.
-    /// The service endpoint.
-    /// A custom  for HTTP requests.
+    /// The credentials
+    /// The service endpoint
+    /// Custom  for HTTP requests.
     public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null)
     {
         Verify.NotNull(credential, nameof(credential));
@@ -81,10 +72,10 @@ public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Ur
     }
 
     /// 
-    /// Produces an  based on .
+    /// Produce a  based on .
     /// 
-    /// An optional endpoint.
-    /// A custom  for HTTP requests.
+    /// An optional endpoint
+    /// Custom  for HTTP requests.
     public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? httpClient = null)
     {
         OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
@@ -92,11 +83,11 @@ public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? h
     }
 
     /// 
-    /// Produces an  based on .
+    /// Produce a  based on .
     /// 
-    /// The API key.
-    /// An optional endpoint.
-    /// A custom  for HTTP requests.
+    /// The API key
+    /// An optional endpoint
+    /// Custom  for HTTP requests.
     public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
     {
         OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
@@ -104,7 +95,7 @@ public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpo
     }
 
     /// 
-    /// Provides a client instance directly.
+    /// Directly provide a client instance.
     /// 
     public static OpenAIClientProvider FromClient(OpenAIClient client)
     {
diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
index 5be75f860eb8..18bce010a328 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
@@ -1,46 +1,41 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
 using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
 using System.Text.Json.Serialization;
 
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Specifies thread creation options.
+/// Thread creation options.
 /// 
-[Experimental("SKEXP0110")]
-[Obsolete("Use the OpenAI.Assistants.AssistantClient.CreateThreadAsync() to create a thread.")]
 public sealed class OpenAIThreadCreationOptions
 {
     /// 
-    /// Gets the optional file IDs made available to the code_interpreter tool, if enabled.
+    /// Optional file-ids made available to the code_interpreter tool, if enabled.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? CodeInterpreterFileIds { get; init; }
 
     /// 
-    /// Gets the optional messages to initialize the thread with.
+    /// Optional messages to initialize thread with..
     /// 
     /// 
-    /// This property only supports messages with role = User or Assistant.
+    /// Only supports messages with role = User or Assistant:
+    /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyList? Messages { get; init; }
 
     /// 
-    /// Gets the vector store ID that enables file-search.
+    /// Enables file-search if specified.
     /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? VectorStoreId { get; init; }
 
     /// 
-    /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for
-    /// storing additional information about that object in a structured format.
+    /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+    /// storing additional information about that object in a structured format.Keys
+    /// may be up to 64 characters in length and values may be up to 512 characters in length.
     /// 
-    /// 
-    /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
-    /// 
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IReadOnlyDictionary? Metadata { get; init; }
 }
diff --git a/dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs b/dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs
similarity index 78%
rename from dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs
rename to dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs
index 0aef47e394f8..bd1c0f58314e 100644
--- a/dotnet/src/Plugins/Plugins.AI/AssemblyInfo.cs
+++ b/dotnet/src/Agents/OpenAI/Properties/AssemblyInfo.cs
@@ -3,4 +3,4 @@
 using System.Diagnostics.CodeAnalysis;
 
 // This assembly is currently experimental.
-[assembly: Experimental("SKEXP0050")]
+[assembly: Experimental("SKEXP0110")]
diff --git a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
index 24c514686664..b108048f32d3 100644
--- a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
+++ b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
@@ -4,68 +4,68 @@
 namespace Microsoft.SemanticKernel.Agents.OpenAI;
 
 /// 
-/// Provides configuration and defaults associated with polling behavior for Assistant API run processing.
+/// Configuration and defaults associated with polling behavior for Assistant API run processing.
 /// 
 public sealed class RunPollingOptions
 {
     /// 
-    /// Gets the default maximum number or retries when monitoring thread-run status.
+    /// The default maximum number or retries when monitoring thread-run status.
     /// 
     public static int DefaultMaximumRetryCount { get; } = 3;
 
     /// 
-    /// Gets the default polling interval when monitoring thread-run status.
+    /// The default polling interval when monitoring thread-run status.
     /// 
     public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
 
     /// 
-    /// Gets the default back-off interval when monitoring thread-run status.
+    /// The default back-off interval when  monitoring thread-run status.
     /// 
     public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
 
     /// 
-    /// Gets the default number of polling iterations before using .
+    /// The default number of polling iterations before using .
     /// 
     public static int DefaultPollingBackoffThreshold { get; } = 2;
 
     /// 
-    /// Gets the default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
     /// 
     public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
 
     /// 
-    /// Gets or sets the maximum retry count when polling thread-run status.
+    /// The maximum retry count when polling thread-run status.
     /// 
     /// 
-    /// This value only affects failures that have the potential to be transient.
-    /// Explicit server error responses will result in immediate failure.
+    /// Only affects failures that have the potential to be transient.  Explicit server error responses
+    /// will result in immediate failure.
     /// 
     public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount;
 
     /// 
-    /// Gets or sets the polling interval when monitoring thread-run status.
+    /// The polling interval when monitoring thread-run status.
     /// 
     public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
 
     /// 
-    /// Gets or sets the back-off interval when monitoring thread-run status.
+    /// The back-off interval when  monitoring thread-run status.
     /// 
     public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
 
     /// 
-    /// Gets or sets the number of polling iterations before using .
+    /// The number of polling iterations before using .
     /// 
     public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold;
 
     /// 
-    /// Gets or sets the polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+    /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
     /// 
     public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
 
     /// 
     /// Gets the polling interval for the specified iteration count.
     /// 
-    /// The number of polling iterations already attempted.
+    /// The number of polling iterations already attempted
     public TimeSpan GetPollingInterval(int iterationCount) =>
         iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval;
 }
diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
index 752bd3c1ebcb..b773878eb397 100644
--- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
+++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
@@ -8,20 +8,11 @@
     true
     false
     12
-    $(NoWarn);CA2007,CA1812,CA1861,CA1063,CS0618,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
+    $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
   
 
   
-    
-    
-    
-  
-
-  
-    
     
-    
-    
     
     
     
@@ -32,16 +23,15 @@
       runtime; build; native; contentfiles; analyzers; buildtransitive
       all
     
+    
+    
   
 
   
-    
     
     
     
     
-    
-    
   
 
   
@@ -49,4 +39,4 @@
     
   
 
-
\ No newline at end of file
+
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs
deleted file mode 100644
index 5eb1ad98e687..000000000000
--- a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAssistantInvocationOptionsTests.cs
+++ /dev/null
@@ -1,113 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
-using System.Text.Json;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using SemanticKernel.Agents.UnitTests.Test;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.AzureAI;
-
-/// 
-/// Unit testing of .
-/// 
-public class AzureAIAssistantInvocationOptionsTests
-{
-    /// 
-    /// Verify initial state.
-    /// 
-    [Fact]
-    public void OpenAIAssistantInvocationOptionsInitialState()
-    {
-        // Arrange
-        AzureAIInvocationOptions options = new();
-
-        // Assert
-        Assert.Null(options.ModelName);
-        Assert.Null(options.AdditionalInstructions);
-        Assert.Null(options.AdditionalMessages);
-        Assert.Null(options.Metadata);
-        Assert.Null(options.Temperature);
-        Assert.Null(options.TopP);
-        Assert.Null(options.ParallelToolCallsEnabled);
-        Assert.Null(options.MaxCompletionTokens);
-        Assert.Null(options.MaxPromptTokens);
-        Assert.Null(options.TruncationMessageCount);
-        Assert.Null(options.EnableJsonResponse);
-        Assert.False(options.EnableCodeInterpreter);
-        Assert.False(options.EnableFileSearch);
-
-        // Act and Assert
-        ValidateSerialization(options);
-    }
-
-    /// 
-    /// Verify initialization.
-    /// 
-    [Fact]
-    public void OpenAIAssistantInvocationOptionsAssignment()
-    {
-        // Arrange
-        AzureAIInvocationOptions options =
-            new()
-            {
-                ModelName = "testmodel",
-                AdditionalInstructions = "test instructions",
-                AdditionalMessages = [
-                    new ChatMessageContent(AuthorRole.User, "test message")
-                ],
-                Metadata = new Dictionary() { { "a", "1" } },
-                MaxCompletionTokens = 1000,
-                MaxPromptTokens = 1000,
-                ParallelToolCallsEnabled = false,
-                TruncationMessageCount = 12,
-                Temperature = 2,
-                TopP = 0,
-                EnableCodeInterpreter = true,
-                EnableJsonResponse = true,
-                EnableFileSearch = true,
-            };
-
-        // Assert
-        Assert.Equal("testmodel", options.ModelName);
-        Assert.Equal("test instructions", options.AdditionalInstructions);
-        Assert.Single(options.AdditionalMessages);
-        Assert.Equal(2, options.Temperature);
-        Assert.Equal(0, options.TopP);
-        Assert.Equal(1000, options.MaxCompletionTokens);
-        Assert.Equal(1000, options.MaxPromptTokens);
-        Assert.Equal(12, options.TruncationMessageCount);
-        Assert.False(options.ParallelToolCallsEnabled);
-        Assert.Single(options.Metadata);
-        Assert.True(options.EnableCodeInterpreter);
-        Assert.True(options.EnableJsonResponse);
-        Assert.True(options.EnableFileSearch);
-
-        // Act and Assert
-        ValidateSerialization(options);
-    }
-
-    private static void ValidateSerialization(AzureAIInvocationOptions source)
-    {
-        // Act
-        string json = JsonSerializer.Serialize(source);
-
-        AzureAIInvocationOptions? target = JsonSerializer.Deserialize(json);
-
-        // Assert
-        Assert.NotNull(target);
-        Assert.Equal(source.AdditionalInstructions, target.AdditionalInstructions);
-        Assert.Equivalent(source.AdditionalMessages, target.AdditionalMessages);
-        Assert.Equal(source.ModelName, target.ModelName);
-        Assert.Equal(source.Temperature, target.Temperature);
-        Assert.Equal(source.TopP, target.TopP);
-        Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens);
-        Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens);
-        Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount);
-        Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter);
-        Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse);
-        Assert.Equal(source.EnableFileSearch, target.EnableFileSearch);
-        AssertCollection.Equal(source.Metadata, target.Metadata);
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs
deleted file mode 100644
index 96ddfb046896..000000000000
--- a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIClientProviderTests.cs
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Net.Http;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Moq;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.AzureAI;
-
-/// 
-/// Unit testing of .
-/// 
-public class AzureAIClientProviderTests
-{
-    /// 
-    /// Verify that provisioning of client for Azure OpenAI.
-    /// 
-    [Fact]
-    public void VerifyAzureAIClientProviderDefault()
-    {
-        // Act
-        AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential());
-
-        // Assert
-        Assert.NotNull(provider.Client);
-    }
-
-    /// 
-    /// Verify that the factory can create a client with http proxy.
-    /// 
-    [Fact]
-    public void VerifyAzureAIClientProviderWithHttpClient()
-    {
-        // Arrange
-        using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") };
-
-        // Act
-        AzureAIClientProvider provider = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClient);
-
-        // Assert
-        Assert.NotNull(provider.Client);
-
-        // Arrange
-        using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") };
-        httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test");
-
-        // Act
-        AzureAIClientProvider providerWithHeaders = AzureAIClientProvider.FromConnectionString("test;test;test;test", new AzureCliCredential(), httpClientWithHeaders);
-
-        // Assert
-        Assert.NotNull(providerWithHeaders.Client);
-
-        Assert.NotEqual(provider.ConfigurationKeys.Count, providerWithHeaders.ConfigurationKeys.Count);
-    }
-
-    /// 
-    /// Verify that the factory can accept an client that already exists.
-    /// 
-    [Fact]
-    public void VerifyAzureAIClientProviderFromClient()
-    {
-        // Arrange
-        Mock mockClient = new();
-        AzureAIClientProvider provider = AzureAIClientProvider.FromClient(mockClient.Object);
-
-        // Assert
-        Assert.NotNull(provider.Client);
-        Assert.Equal(mockClient.Object, provider.Client);
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs
deleted file mode 100644
index cb8fe8415b97..000000000000
--- a/dotnet/src/Agents/UnitTests/AzureAI/Extensions/KernelFunctionExtensionsTests.cs
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.ComponentModel;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.OpeAzureAInAI.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public class KernelFunctionExtensionsTests
-{
-    /// 
-    /// Verify conversion from  to .
-    /// 
-    [Fact]
-    public void VerifyKernelFunctionToFunctionTool()
-    {
-        // Arrange
-        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
-
-        // Assert
-        Assert.Equal(2, plugin.FunctionCount);
-
-        // Arrange
-        KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)];
-        KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)];
-
-        // Act
-        FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin");
-
-        // Assert
-        Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal);
-        Assert.Equal("test description", definition1.Description);
-
-        // Act
-        FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin");
-
-        // Assert
-        Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal);
-        Assert.Equal("test description", definition2.Description);
-    }
-
-    /// 
-    /// Exists only for parsing.
-    /// 
-#pragma warning disable CA1812 // Avoid uninstantiated internal classes
-    private sealed class TestPlugin()
-#pragma warning restore CA1812 // Avoid uninstantiated internal classes
-    {
-        [KernelFunction]
-        [Description("test description")]
-        public void TestFunction1() { }
-
-        [KernelFunction]
-        [Description("test description")]
-#pragma warning disable IDE0060 // Unused parameter for mock kernel function
-        public void TestFunction2(string p1, bool p2, int p3, string[] p4, ConsoleColor p5, DateTime p6) { }
-#pragma warning restore IDE0060 // Unused parameter
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs
deleted file mode 100644
index c42de6fc38a2..000000000000
--- a/dotnet/src/Agents/UnitTests/AzureAI/Internal/AgentMessageFactoryTests.cs
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Linq;
-using Azure.AI.Projects;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.AzureAI.Internal;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.AzureAI.Internal;
-
-/// 
-/// Unit testing of .
-/// 
-public class AgentMessageFactoryTests
-{
-    /// 
-    /// Verify options creation.
-    /// 
-    [Fact]
-    public void VerifyAssistantMessageAdapterGetMessageContentsWithText()
-    {
-        // Arrange
-        ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]);
-
-        // Act
-        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
-
-        // Assert
-        Assert.NotNull(contents);
-        Assert.Single(contents);
-        Assert.NotNull(contents[0].Content);
-    }
-
-    /// 
-    /// Verify options creation.
-    /// 
-    [Fact]
-    public void VerifyAssistantMessageAdapterGetMessageWithImageUrl()
-    {
-        // Arrange
-        ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]);
-
-        // Act
-        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
-
-        // Assert
-        Assert.NotNull(contents);
-        Assert.Empty(contents);
-    }
-
-    /// 
-    /// Verify options creation.
-    /// 
-    [Fact]
-    public void VerifyAssistantMessageAdapterGetMessageWithImageData()
-    {
-        // Arrange
-        ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png") { DataUri = "data:image/png;base64,MTIz" }]);
-
-        // Act
-        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
-
-        // Assert
-        Assert.NotNull(contents);
-        Assert.Empty(contents);
-    }
-
-    /// 
-    /// Verify options creation.
-    /// 
-    [Fact]
-    public void VerifyAssistantMessageAdapterGetMessageWithImageFile()
-    {
-        // Arrange
-        ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]);
-
-        // Act
-        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
-
-        // Assert
-        Assert.NotNull(contents);
-        Assert.Empty(contents);
-    }
-
-    /// 
-    /// Verify options creation.
-    /// 
-    [Fact]
-    public void VerifyAssistantMessageAdapterGetMessageWithAll()
-    {
-        // Arrange
-        ChatMessageContent message =
-            new(
-                AuthorRole.User,
-                items:
-                [
-                    new TextContent("test"),
-                    new ImageContent(new Uri("https://localhost/myimage.png")),
-                    new FileReferenceContent("file-id")
-                ]);
-
-        // Act
-        ThreadMessageOptions[] contents = AgentMessageFactory.GetThreadMessages([message]).ToArray();
-
-        // Assert
-        Assert.NotNull(contents);
-        Assert.Single(contents);
-        Assert.NotNull(contents[0].Content);
-        Assert.Single(contents[0].Attachments);
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs
deleted file mode 100644
index 9d1054ac9bb6..000000000000
--- a/dotnet/src/Agents/UnitTests/AzureAI/RunPollingOptionsTests.cs
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using Microsoft.SemanticKernel.Agents.AzureAI;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.AzureAI;
-
-/// 
-/// Unit testing of .
-/// 
-public class RunPollingOptionsTests
-{
-    /// 
-    /// Verify initial state.
-    /// 
-    [Fact]
-    public void RunPollingOptionsInitialStateTest()
-    {
-        // Arrange
-        RunPollingOptions options = new();
-
-        // Assert
-        Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval);
-        Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff);
-        Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay);
-        Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold);
-    }
-
-    /// s
-    /// Verify initialization.
-    /// 
-    [Fact]
-    public void RunPollingOptionsAssignmentTest()
-    {
-        // Arrange
-        RunPollingOptions options =
-            new()
-            {
-                RunPollingInterval = TimeSpan.FromSeconds(3),
-                RunPollingBackoff = TimeSpan.FromSeconds(4),
-                RunPollingBackoffThreshold = 8,
-                MessageSynchronizationDelay = TimeSpan.FromSeconds(5),
-            };
-
-        // Assert
-        Assert.Equal(3, options.RunPollingInterval.TotalSeconds);
-        Assert.Equal(4, options.RunPollingBackoff.TotalSeconds);
-        Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds);
-        Assert.Equal(8, options.RunPollingBackoffThreshold);
-    }
-
-    /// s
-    /// Verify initialization.
-    /// 
-    [Fact]
-    public void RunPollingOptionsGetIntervalTest()
-    {
-        // Arrange
-        RunPollingOptions options =
-            new()
-            {
-                RunPollingInterval = TimeSpan.FromSeconds(3),
-                RunPollingBackoff = TimeSpan.FromSeconds(4),
-                RunPollingBackoffThreshold = 8,
-            };
-
-        // Assert
-        Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8));
-        Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9));
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs
deleted file mode 100644
index 03f1cfbbae1b..000000000000
--- a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentChannelTests.cs
+++ /dev/null
@@ -1,289 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading;
-using System.Threading.Tasks;
-using Amazon.BedrockAgent;
-using Amazon.BedrockAgentRuntime;
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Moq;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.Bedrock;
-
-/// 
-/// Unit testing of .
-/// 
-public class BedrockAgentChannelTests
-{
-    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
-    {
-        AgentId = "1234567890",
-        AgentName = "testName",
-        Description = "test description",
-        Instruction = "Instruction must have at least 40 characters",
-    };
-
-    /// 
-    /// Verify the simple scenario of receiving messages in a .
-    /// 
-    [Fact]
-    public async Task VerifyReceiveAsync()
-    {
-        // Arrange
-        BedrockAgentChannel channel = new();
-        List history = this.CreateNormalHistory();
-
-        // Act
-        await channel.ReceiveAsync(history);
-
-        // Assert
-        Assert.Equal(2, await channel.GetHistoryAsync().CountAsync());
-    }
-
-    /// 
-    /// Verify the  skips messages with empty content.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveWithEmptyContentAsync()
-    {
-        // Arrange
-        BedrockAgentChannel channel = new();
-        List history = [
-            new ChatMessageContent()
-            {
-                Role = AuthorRole.User,
-            },
-        ];
-
-        // Act
-        await channel.ReceiveAsync(history);
-
-        // Assert
-        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
-    }
-
-    /// 
-    /// Verify the channel inserts placeholders when the message sequence is incorrect.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveWithIncorrectSequenceAsync()
-    {
-        // Arrange
-        BedrockAgentChannel channel = new();
-        List history = this.CreateIncorrectSequenceHistory();
-
-        // Act
-        await channel.ReceiveAsync(history);
-
-        // Assert that a user message is inserted between the two agent messages.
-        // Note that `GetHistoryAsync` returns the history in a reversed order.
-        Assert.Equal(6, await channel.GetHistoryAsync().CountAsync());
-        Assert.Equal(AuthorRole.User, (await channel.GetHistoryAsync().ToArrayAsync())[3].Role);
-    }
-
-    /// 
-    /// Verify the channel empties the history when reset.
-    /// 
-    [Fact]
-    public async Task VerifyResetAsync()
-    {
-        // Arrange
-        BedrockAgentChannel channel = new();
-        List history = this.CreateNormalHistory();
-
-        // Act
-        await channel.ReceiveAsync(history);
-
-        // Assert
-        Assert.NotEmpty(await channel.GetHistoryAsync().ToArrayAsync());
-
-        // Act
-        await channel.ResetAsync();
-
-        // Assert
-        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
-    }
-
-    /// 
-    /// Verify the channel correctly prepares the history for invocation.
-    /// 
-    [Fact]
-    public async Task VerifyInvokeAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        BedrockAgentChannel channel = new();
-        List history = this.CreateIncorrectSequenceHistory();
-
-        // Act
-        async Task InvokeAgent()
-        {
-            await channel.ReceiveAsync(history);
-            await foreach (var _ in channel.InvokeAsync(agent))
-            {
-                continue;
-            }
-        }
-
-        // Assert
-        await Assert.ThrowsAsync(() => InvokeAgent());
-        mockRuntimeClient.Verify(x => x.InvokeAgentAsync(
-            It.Is(r =>
-                r.AgentAliasId == BedrockAgent.WorkingDraftAgentAlias
-                && r.AgentId == this._agentModel.AgentId
-                && r.InputText == "[SILENCE]"   // Inserted by `EnsureLastMessageIsUser`.
-                && r.SessionState.ConversationHistory.Messages.Count == 6   // There is also a user message inserted between the two agent messages.
-            ),
-            It.IsAny()
-        ), Times.Once);
-    }
-
-    /// 
-    /// Verify the channel returns an empty stream when invoking with an empty history.
-    /// 
-    [Fact]
-    public async Task VerifyInvokeWithEmptyHistoryAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        BedrockAgentChannel channel = new();
-
-        // Act
-        List history = [];
-        await foreach ((bool _, ChatMessageContent Message) in channel.InvokeAsync(agent))
-        {
-            history.Add(Message);
-        }
-
-        // Assert
-        Assert.Empty(history);
-    }
-
-    /// 
-    /// Verify the channel correctly prepares the history for streaming invocation.
-    /// 
-    [Fact]
-    public async Task VerifyInvokeStreamAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        BedrockAgentChannel channel = new();
-        List history = this.CreateIncorrectSequenceHistory();
-
-        // Act
-        async Task InvokeAgent()
-        {
-            await channel.ReceiveAsync(history);
-            await foreach (var _ in channel.InvokeStreamingAsync(agent, []))
-            {
-                continue;
-            }
-        }
-
-        // Assert
-        await Assert.ThrowsAsync(() => InvokeAgent());
-        mockRuntimeClient.Verify(x => x.InvokeAgentAsync(
-            It.Is(r =>
-                r.AgentAliasId == BedrockAgent.WorkingDraftAgentAlias
-                && r.AgentId == this._agentModel.AgentId
-                && r.InputText == "[SILENCE]"   // Inserted by `EnsureLastMessageIsUser`.
-                && r.SessionState.ConversationHistory.Messages.Count == 6   // There is also a user message inserted between the two agent messages.
-            ),
-            It.IsAny()
-        ), Times.Once);
-    }
-
-    /// 
-    /// Verify the channel returns an empty stream when invoking with an empty history.
-    /// 
-    [Fact]
-    public async Task VerifyInvokeStreamingWithEmptyHistoryAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        BedrockAgentChannel channel = new();
-
-        // Act
-        List history = [];
-        await foreach (var message in channel.InvokeStreamingAsync(agent, []))
-        {
-            history.Add(message);
-        }
-
-        // Assert
-        Assert.Empty(history);
-    }
-
-    private List CreateNormalHistory()
-    {
-        return
-        [
-            new ChatMessageContent(AuthorRole.User, "Hi!"),
-            new ChatMessageContent(AuthorRole.Assistant, "Hi, how can I help you?"),
-        ];
-    }
-
-    private List CreateIncorrectSequenceHistory()
-    {
-        return
-        [
-            new ChatMessageContent(AuthorRole.User, "What is a word that starts with 'x'?"),
-            new ChatMessageContent(AuthorRole.Assistant, "Xylophone.")
-            {
-                AuthorName = "Agent 1"
-            },
-            new ChatMessageContent(AuthorRole.Assistant, "Xenon.")
-            {
-                AuthorName = "Agent 2"
-            },
-            new ChatMessageContent(AuthorRole.User, "Thanks!"),
-            new ChatMessageContent(AuthorRole.Assistant, "Is there anything else you need?")
-            {
-                AuthorName = "Agent 1"
-            },
-        ];
-    }
-
-    private (Mock, Mock) CreateMockClients()
-    {
-#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
-        Mock mockClientConfig = new();
-        Mock mockRuntimeClientConfig = new();
-        mockClientConfig.Setup(x => x.Validate()).Verifiable();
-        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
-        Mock mockClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockClientConfig.Object);
-        Mock mockRuntimeClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockRuntimeClientConfig.Object);
-#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
-        mockRuntimeClient.Setup(x => x.InvokeAgentAsync(
-            It.IsAny(),
-            It.IsAny())
-        ).ReturnsAsync(new InvokeAgentResponse()
-        {
-            // It's not important what the response is for this test.
-            // And it's difficult to mock the response stream.
-            // Tests should expect an exception to be thrown.
-            HttpStatusCode = System.Net.HttpStatusCode.NotFound,
-        });
-
-        return (mockClient, mockRuntimeClient);
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs
deleted file mode 100644
index ffc86b79662d..000000000000
--- a/dotnet/src/Agents/UnitTests/Bedrock/BedrockAgentTests.cs
+++ /dev/null
@@ -1,290 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using System.Threading.Tasks;
-using Amazon.BedrockAgent;
-using Amazon.BedrockAgent.Model;
-using Amazon.BedrockAgentRuntime;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-using Moq;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.Bedrock;
-
-/// 
-/// Unit testing of .
-/// 
-public class BedrockAgentTests
-{
-    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
-    {
-        AgentId = "1234567890",
-        AgentName = "testName",
-        Description = "test description",
-        Instruction = "Instruction must have at least 40 characters",
-    };
-
-    private readonly CreateAgentRequest _createAgentRequest = new()
-    {
-        AgentName = "testName",
-        Description = "test description",
-        Instruction = "Instruction must have at least 40 characters",
-    };
-
-    /// 
-    /// Verify the initialization of .
-    /// 
-    [Fact]
-    public void VerifyBedrockAgentDefinition()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        BedrockAgent agent = new(this._agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        // Assert
-        this.VerifyAgent(agent);
-    }
-
-    /// 
-    /// Verify the creation of  without specialized settings.
-    /// 
-    [Fact]
-    public async Task VerifyBedrockAgentCreateAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Act
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        // Assert
-        this.VerifyAgent(bedrockAgent);
-    }
-
-    /// 
-    /// Verify the creation of  with action groups.
-    /// 
-    [Fact]
-    public async Task VerifyBedrockAgentCreateWithActionGroupsAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        // Mock the creation of an agent action group.
-        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentActionGroupResponse());
-        // Override the sequence of calls to GetAgentAsync to return the agent status
-        // because creating an agent action group will require the agent to be prepared again.
-        mockClient.SetupSequence(x => x.GetAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.NOT_PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        });
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Act
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
-
-        // Assert
-        this.VerifyAgent(bedrockAgent);
-        mockClient.Verify(x => x.CreateAgentActionGroupAsync(
-            It.IsAny(),
-            default), Times.Exactly(1));
-    }
-
-    /// 
-    /// Verify the creation of  with a kernel.
-    /// 
-    [Fact]
-    public async Task VerifyBedrockAgentCreateWithKernelAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Act
-        Kernel kernel = new();
-        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object)
-        {
-            Kernel = kernel,
-        };
-
-        // Assert
-        this.VerifyAgent(bedrockAgent);
-        Assert.Single(bedrockAgent.Kernel.Plugins);
-    }
-
-    /// 
-    /// Verify the creation of  with kernel arguments.
-    /// 
-    [Fact]
-    public async Task VerifyBedrockAgentCreateWithKernelArgumentsAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Act
-        KernelArguments arguments = new() { { "key", "value" } };
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object)
-        {
-            Arguments = arguments,
-        };
-
-        // Assert
-        this.VerifyAgent(bedrockAgent);
-        Assert.Single(bedrockAgent.Arguments);
-    }
-
-    /// 
-    /// Verify the bedrock agent returns the expected channel key.
-    /// 
-    [Fact]
-    public async Task VerifyBedrockAgentChannelKeyAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Act
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-
-        // Assert
-        Assert.Single(bedrockAgent.GetChannelKeys());
-    }
-
-    private (Mock, Mock) CreateMockClients()
-    {
-#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
-        Mock mockClientConfig = new();
-        Mock mockRuntimeClientConfig = new();
-        mockClientConfig.Setup(x => x.Validate()).Verifiable();
-        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
-        Mock mockClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockClientConfig.Object);
-        Mock mockRuntimeClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockRuntimeClientConfig.Object);
-#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
-
-        mockClient.Setup(x => x.CreateAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentResponse { Agent = this._agentModel });
-
-        // After a new agent is created, its status will first be CREATING then NOT_PREPARED.
-        // Internally, we will prepare the agent for use. During preparation, the agent status
-        // will be PREPARING, then finally PREPARED.
-        mockClient.SetupSequence(x => x.GetAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.NOT_PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        });
-
-        return (mockClient, mockRuntimeClient);
-    }
-
-    private void VerifyAgent(BedrockAgent bedrockAgent)
-    {
-        Assert.Equal(bedrockAgent.Id, this._agentModel.AgentId);
-        Assert.Equal(bedrockAgent.Name, this._agentModel.AgentName);
-        Assert.Equal(bedrockAgent.Description, this._agentModel.Description);
-        Assert.Equal(bedrockAgent.Instructions, this._agentModel.Instruction);
-    }
-
-    private sealed class WeatherPlugin
-    {
-        [KernelFunction, Description("Provides realtime weather information.")]
-        public string Current([Description("The location to get the weather for.")] string location)
-        {
-            return $"The current weather in {location} is 72 degrees.";
-        }
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs
deleted file mode 100644
index 78f8c8bd67c4..000000000000
--- a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockAgentExtensionsTests.cs
+++ /dev/null
@@ -1,320 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Threading.Tasks;
-using Amazon.BedrockAgent;
-using Amazon.BedrockAgent.Model;
-using Amazon.BedrockAgentRuntime;
-using Microsoft.SemanticKernel.Agents.Bedrock;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-using Moq;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.Bedrock.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public class BedrockAgentExtensionsTests
-{
-    private readonly Amazon.BedrockAgent.Model.Agent _agentModel = new()
-    {
-        AgentId = "1234567890",
-        AgentName = "testName",
-        Description = "test description",
-        Instruction = "Instruction must have at least 40 characters",
-    };
-
-    private readonly CreateAgentRequest _createAgentRequest = new()
-    {
-        AgentName = "testName",
-        Description = "test description",
-        Instruction = "Instruction must have at least 40 characters",
-    };
-
-    /// 
-    /// Verify the creation of the agent and the preparation of the agent.
-    /// The status of the agent should be checked 3 times based on the setup.
-    /// 1: Waiting for the agent to go from CREATING to NOT_PREPARED.
-    /// 2: Waiting for the agent to go from NOT_PREPARED to PREPARING.
-    /// 3: Waiting for the agent to go from PREPARING to PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAndPrepareAgentAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(3));
-    }
-
-    /// 
-    /// Verify the modification and preparation of the agent is correctly performed.
-    /// The status of the agent should be go through the following states:
-    /// PREPARED -> PREPARING -> PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyAssociateAgentKnowledgeBaseAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        this.ModifyMockClientGetAgentResponseSequence(mockClient);
-
-        mockClient.Setup(x => x.AssociateAgentKnowledgeBaseAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new AssociateAgentKnowledgeBaseResponse());
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.AssociateAgentKnowledgeBaseAsync("testKnowledgeBaseId", "testKnowledgeBaseDescription");
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(5));
-    }
-
-    /// 
-    /// Verify the modification and preparation of the agent is correctly performed.
-    /// The status of the agent should be go through the following states:
-    /// PREPARED -> PREPARING -> PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyDisassociateAgentKnowledgeBaseAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        this.ModifyMockClientGetAgentResponseSequence(mockClient);
-
-        mockClient.Setup(x => x.DisassociateAgentKnowledgeBaseAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new DisassociateAgentKnowledgeBaseResponse());
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.DisassociateAgentKnowledgeBaseAsync("testKnowledgeBaseId");
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(5));
-    }
-
-    /// 
-    /// Verify the modification and preparation of the agent is correctly performed.
-    /// The status of the agent should be go through the following states:
-    /// PREPARED -> PREPARING -> PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyCreateCodeInterpreterActionGroupAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        this.ModifyMockClientGetAgentResponseSequence(mockClient);
-
-        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentActionGroupResponse());
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.CreateCodeInterpreterActionGroupAsync();
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(5));
-    }
-
-    /// 
-    /// Verify the modification and preparation of the agent is correctly performed.
-    /// The status of the agent should be go through the following states:
-    /// PREPARED -> PREPARING -> PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyCreateKernelFunctionActionGroupAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        this.ModifyMockClientGetAgentResponseSequence(mockClient);
-
-        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentActionGroupResponse());
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.CreateKernelFunctionActionGroupAsync();
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(5));
-    }
-
-    /// 
-    /// Verify the modification and preparation of the agent is correctly performed.
-    /// The status of the agent should be go through the following states:
-    /// PREPARED -> PREPARING -> PREPARED.
-    /// 
-    [Fact]
-    public async Task VerifyEnableUserInputActionGroupAsync()
-    {
-        // Arrange
-        var (mockClient, mockRuntimeClient) = this.CreateMockClients();
-        this.ModifyMockClientGetAgentResponseSequence(mockClient);
-
-        mockClient.Setup(x => x.CreateAgentActionGroupAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentActionGroupResponse());
-
-        // Act
-        var agentModel = await mockClient.Object.CreateAndPrepareAgentAsync(this._createAgentRequest);
-        var bedrockAgent = new BedrockAgent(agentModel, mockClient.Object, mockRuntimeClient.Object);
-        await bedrockAgent.EnableUserInputActionGroupAsync();
-
-        // Assert
-        mockClient.Verify(x => x.GetAgentAsync(
-            It.IsAny(),
-            default), Times.Exactly(5));
-    }
-
-    private (Mock, Mock) CreateMockClients()
-    {
-#pragma warning disable Moq1410 // Moq: Set MockBehavior to Strict
-        Mock mockClientConfig = new();
-        Mock mockRuntimeClientConfig = new();
-        mockClientConfig.Setup(x => x.Validate()).Verifiable();
-        mockRuntimeClientConfig.Setup(x => x.Validate()).Verifiable();
-        Mock mockClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockClientConfig.Object);
-        Mock mockRuntimeClient = new(
-            "fakeAccessId",
-            "fakeSecretKey",
-            mockRuntimeClientConfig.Object);
-#pragma warning restore Moq1410 // Moq: Set MockBehavior to Strict
-
-        mockClient.Setup(x => x.CreateAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new CreateAgentResponse { Agent = this._agentModel });
-
-        // After a new agent is created, its status will first be CREATING then NOT_PREPARED.
-        // Internally, we will prepare the agent for use. During preparation, the agent status
-        // will be PREPARING, then finally PREPARED.
-        mockClient.SetupSequence(x => x.GetAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.NOT_PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        });
-
-        return (mockClient, mockRuntimeClient);
-    }
-
-    /// 
-    /// Modify the mock client to return a new sequence of responses for the GetAgentAsync method
-    /// that reflect the correct sequence of status change when modifying the agent.
-    /// 
-    private void ModifyMockClientGetAgentResponseSequence(Mock mockClient)
-    {
-        mockClient.SetupSequence(x => x.GetAgentAsync(
-            It.IsAny(),
-            default)
-        ).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.NOT_PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARING,
-            }
-        }).ReturnsAsync(new GetAgentResponse
-        {
-            Agent = new Amazon.BedrockAgent.Model.Agent()
-            {
-                AgentId = this._agentModel.AgentId,
-                AgentName = this._agentModel.AgentName,
-                Description = this._agentModel.Description,
-                Instruction = this._agentModel.Instruction,
-                AgentStatus = AgentStatus.PREPARED,
-            }
-        });
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs
deleted file mode 100644
index a679fe30f83f..000000000000
--- a/dotnet/src/Agents/UnitTests/Bedrock/Extensions.cs/BedrockFunctionSchemaExtensionsTests.cs
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.ComponentModel;
-using Amazon.BedrockAgentRuntime.Model;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.Bedrock.Extensions;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.Bedrock.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public class BedrockFunctionSchemaExtensionsTests
-{
-    /// 
-    /// Verify the conversion of a  to a .
-    /// 
-    [Fact]
-    public void VerifyFromFunctionParameters()
-    {
-        // Arrange
-        List parameters =
-        [
-            new FunctionParameter()
-            {
-                Name = "TestParameter",
-                Type = Amazon.BedrockAgent.Type.String,
-            },
-        ];
-
-        // Act
-        KernelArguments arguments = parameters.FromFunctionParameters(null);
-
-        // Assert
-        Assert.Single(arguments);
-        Assert.True(arguments.ContainsName("TestParameter"));
-    }
-
-    /// 
-    /// Verify the conversion of a  to a  with existing arguments.
-    /// 
-    [Fact]
-    public void VerifyFromFunctionParametersWithArguments()
-    {
-        // Arrange
-        List parameters =
-        [
-            new FunctionParameter()
-            {
-                Name = "TestParameter",
-                Type = Amazon.BedrockAgent.Type.String,
-            },
-        ];
-
-        KernelArguments arguments = new()
-        {
-            { "ExistingParameter", "ExistingValue" }
-        };
-
-        // Act
-        KernelArguments updatedArguments = parameters.FromFunctionParameters(arguments);
-
-        // Assert
-        Assert.Equal(2, updatedArguments.Count);
-        Assert.True(updatedArguments.ContainsName("TestParameter"));
-        Assert.True(updatedArguments.ContainsName("ExistingParameter"));
-    }
-
-    /// 
-    /// Verify the conversion of a  plugin to a .
-    /// 
-    [Fact]
-    public void VerifyToFunctionSchema()
-    {
-        // Arrange
-        (Kernel kernel, KernelFunction function, KernelParameterMetadata parameter) = this.CreateKernelPlugin();
-
-        // Act
-        Amazon.BedrockAgent.Model.FunctionSchema schema = kernel.ToFunctionSchema();
-
-        // Assert
-        Assert.Single(schema.Functions);
-        Assert.Equal(function.Name, schema.Functions[0].Name);
-        Assert.Equal(function.Description, schema.Functions[0].Description);
-        Assert.True(schema.Functions[0].Parameters.ContainsKey(parameter.Name));
-        Assert.Equal(parameter.Description, schema.Functions[0].Parameters[parameter.Name].Description);
-        Assert.True(schema.Functions[0].Parameters[parameter.Name].Required);
-        Assert.Equal(Amazon.BedrockAgent.Type.String, schema.Functions[0].Parameters[parameter.Name].Type);
-        Assert.Equal(Amazon.BedrockAgent.RequireConfirmation.DISABLED, schema.Functions[0].RequireConfirmation);
-    }
-
-    private (Kernel, KernelFunction, KernelParameterMetadata) CreateKernelPlugin()
-    {
-        Kernel kernel = new();
-        kernel.Plugins.Add(KernelPluginFactory.CreateFromType());
-        var function = kernel.Plugins["WeatherPlugin"]["Current"];
-        var parameter = function.Metadata.Parameters[0];
-        return (kernel, function, parameter);
-    }
-
-    private sealed class WeatherPlugin
-    {
-        [KernelFunction, Description("Provides realtime weather information.")]
-        public string Current([Description("The location to get the weather for.")] string location)
-        {
-            return $"The current weather in {location} is 72 degrees.";
-        }
-    }
-}
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
index 1ce8039b250d..01debd8ded5f 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
@@ -1,11 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System.Linq;
 using System.Threading;
 using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
@@ -37,74 +37,7 @@ public void VerifyChatCompletionAgentDefinition()
         Assert.Equal("test instructions", agent.Instructions);
         Assert.Equal("test description", agent.Description);
         Assert.Equal("test name", agent.Name);
-        Assert.NotNull(agent.Arguments);
-    }
-
-    /// 
-    /// Verify the invocation and response of .
-    /// 
-    [Fact]
-    public void VerifyChatCompletionAgentTemplate()
-    {
-        PromptTemplateConfig promptConfig =
-            new()
-            {
-                Name = "TestName",
-                Description = "TestDescription",
-                Template = "TestInstructions",
-                ExecutionSettings =
-                {
-                    {
-                        PromptExecutionSettings.DefaultServiceId,
-                        new PromptExecutionSettings()
-                        {
-                            FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(),
-                            ModelId = "gpt-new",
-                        }
-                    },
-                    {
-                        "manual",
-                        new PromptExecutionSettings()
-                        {
-                            ServiceId = "manual",
-                            FunctionChoiceBehavior = FunctionChoiceBehavior.Required(),
-                            ModelId = "gpt-old",
-                        }
-                    },
-                }
-            };
-        KernelPromptTemplateFactory templateFactory = new();
-
-        // Arrange
-        ChatCompletionAgent agent = new(promptConfig, templateFactory);
-
-        // Assert
-        Assert.NotNull(agent.Id);
-        Assert.Equal(promptConfig.Template, agent.Instructions);
-        Assert.Equal(promptConfig.Description, agent.Description);
-        Assert.Equal(promptConfig.Name, agent.Name);
-        Assert.Equal(promptConfig.ExecutionSettings, agent.Arguments.ExecutionSettings);
-    }
-
-    /// 
-    /// Verify throws  when invalid  is provided.
-    /// 
-    [Fact]
-    public void VerifyThrowsForInvalidTemplateFactory()
-    {
-        // Arrange
-        PromptTemplateConfig promptConfig =
-            new()
-            {
-                Name = "TestName",
-                Description = "TestDescription",
-                Template = "TestInstructions",
-                TemplateFormat = "handlebars",
-            };
-        KernelPromptTemplateFactory templateFactory = new();
-
-        // Act and Assert
-        Assert.Throws(() => new ChatCompletionAgent(promptConfig, templateFactory));
+        Assert.Null(agent.Arguments);
     }
 
     /// 
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
index f127b35eaaff..dc82bcef59b6 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
@@ -1,10 +1,8 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System;
 using System.Linq;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
 
@@ -20,103 +18,13 @@ public class ChatHistoryChannelTests
     /// does not implement .
     /// 
     [Fact]
-    public async Task VerifyAgentIsChatHistoryKernelAgentAsync()
+    public async Task VerifyAgentWithoutIChatHistoryHandlerAsync()
     {
         // Arrange
         Mock agent = new(); // Not a ChatHistoryKernelAgent
-        ChatHistoryChannel channel = new();
+        ChatHistoryChannel channel = new(); // Requires IChatHistoryHandler
 
         // Act & Assert
         await Assert.ThrowsAsync(() => channel.InvokeAsync(agent.Object).ToArrayAsync().AsTask());
     }
-
-    /// 
-    /// Verify a  filters empty content on receive.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveFiltersEmptyContentAsync()
-    {
-        // Arrange
-        ChatHistoryChannel channel = new();
-
-        // Act
-        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, string.Empty)]);
-
-        // Assert
-        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
-    }
-
-    /// 
-    /// Verify a  filters file content on receive.
-    /// 
-    /// 
-    /// As long as content is not empty, extraneous file content is ok.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveFiltersFileContentAsync()
-    {
-        // Arrange
-        ChatHistoryChannel channel = new();
-
-        // Act
-        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FileReferenceContent("fileId")])]);
-
-        // Assert
-        Assert.Empty(await channel.GetHistoryAsync().ToArrayAsync());
-
-        // Act
-        await channel.ReceiveAsync(
-            [new ChatMessageContent(
-                AuthorRole.Assistant,
-                [
-                    new TextContent("test"),
-                    new FileReferenceContent("fileId")
-                ])]);
-
-        // Assert
-        var history = await channel.GetHistoryAsync().ToArrayAsync();
-        Assert.Single(history);
-        Assert.Equal(2, history[0].Items.Count);
-    }
-
-    /// 
-    /// Verify a  accepts function content on receive.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveAcceptsFunctionContentAsync()
-    {
-        // Arrange
-        ChatHistoryChannel channel = new();
-
-        // Act
-        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FunctionCallContent("test-func")])]);
-
-        // Assert
-        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
-
-        // Arrange
-        channel = new();
-
-        // Act
-        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new FunctionResultContent("test-func")])]);
-
-        // Assert
-        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
-    }
-
-    /// 
-    /// Verify a  accepts image content on receive.
-    /// 
-    [Fact]
-    public async Task VerifyReceiveAcceptsImageContentAsync()
-    {
-        // Arrange
-        ChatHistoryChannel channel = new();
-
-        // Act
-        await channel.ReceiveAsync([new ChatMessageContent(AuthorRole.Assistant, [new ImageContent(new Uri("http://test.ms/test.jpg"))])]);
-
-        // Assert
-        Assert.Single(await channel.GetHistoryAsync().ToArrayAsync());
-    }
 }
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
similarity index 87%
rename from dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs
rename to dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
index 9bdf469b69bc..45dcc47e5cab 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryReducerExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
@@ -1,15 +1,15 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
 
-namespace SemanticKernel.UnitTests.AI.ChatCompletion;
+namespace SemanticKernel.Agents.UnitTests.Core.History;
 
 /// 
 /// Unit testing of .
@@ -31,7 +31,7 @@ public class ChatHistoryReducerExtensionsTests
     public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? endIndex = null, int? expectedCount = null)
     {
         // Arrange
-        ChatHistory history = [.. MockChatHistoryGenerator.CreateSimpleHistory(messageCount)];
+        ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)];
 
         // Act
         ChatMessageContent[] extractedHistory = history.Extract(startIndex, endIndex).ToArray();
@@ -62,14 +62,14 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e
     public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount)
     {
         // Arrange
-        ChatHistory summaries = [.. MockChatHistoryGenerator.CreateSimpleHistory(summaryCount)];
+        ChatHistory summaries = [.. MockHistoryGenerator.CreateSimpleHistory(summaryCount)];
         foreach (ChatMessageContent summary in summaries)
         {
             summary.Metadata = new Dictionary() { { "summary", true } };
         }
 
         // Act
-        ChatHistory history = [.. summaries, .. MockChatHistoryGenerator.CreateSimpleHistory(regularCount)];
+        ChatHistory history = [.. summaries, .. MockHistoryGenerator.CreateSimpleHistory(regularCount)];
 
         int finalSummaryIndex = history.LocateSummarizationBoundary("summary");
 
@@ -89,14 +89,14 @@ public async Task VerifyChatHistoryNotReducedAsync()
         mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null);
 
         // Act
-        bool isReduced = await history.ReduceInPlaceAsync(null, default);
+        bool isReduced = await history.ReduceAsync(null, default);
 
         // Assert
         Assert.False(isReduced);
         Assert.Empty(history);
 
         // Act
-        isReduced = await history.ReduceInPlaceAsync(mockReducer.Object, default);
+        isReduced = await history.ReduceAsync(mockReducer.Object, default);
 
         // Assert
         Assert.False(isReduced);
@@ -113,10 +113,10 @@ public async Task VerifyChatHistoryReducedAsync()
         Mock mockReducer = new();
         mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)[]);
 
-        ChatHistory history = [.. MockChatHistoryGenerator.CreateSimpleHistory(10)];
+        ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(10)];
 
         // Act
-        bool isReduced = await history.ReduceInPlaceAsync(mockReducer.Object, default);
+        bool isReduced = await history.ReduceAsync(mockReducer.Object, default);
 
         // Assert
         Assert.True(isReduced);
@@ -139,7 +139,7 @@ public async Task VerifyChatHistoryReducedAsync()
     public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount, int? thresholdCount = null)
     {
         // Arrange: Shape of history doesn't matter since reduction is not expected
-        ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
+        ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
 
         // Act
         int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
@@ -163,7 +163,7 @@ public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount
     public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCount, int? thresholdCount = null)
     {
         // Arrange: Generate history with only assistant messages
-        ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateSimpleHistory(messageCount)];
+        ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)];
 
         // Act
         int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
@@ -189,7 +189,7 @@ public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCoun
     public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int targetCount, int? thresholdCount = null)
     {
         // Arrange: Generate history with alternating user and assistant messages
-        ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
+        ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
 
         // Act
         int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
@@ -224,7 +224,7 @@ public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, i
     {
         // Arrange: Generate a history with function call on index 5 and 9 and
         // function result on index 6 and 10 (total length: 14)
-        ChatHistory sourceHistory = [.. MockChatHistoryGenerator.CreateHistoryWithFunctionContent()];
+        ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithFunctionContent()];
 
         ChatHistoryTruncationReducer reducer = new(targetCount, thresholdCount);
 
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
similarity index 85%
rename from dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs
rename to dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
index 0588055efc19..53e93d0026c3 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistorySummarizationReducerTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
@@ -1,15 +1,15 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.History;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Moq;
 using Xunit;
 
-namespace SemanticKernel.UnitTests.AI.ChatCompletion;
+namespace SemanticKernel.Agents.UnitTests.Core.History;
 
 /// 
 /// Unit testing of .
@@ -133,7 +133,7 @@ public async Task VerifyChatHistoryReductionSilentFailureAsync()
     {
         // Arrange
         Mock mockCompletionService = this.CreateMockCompletionService(throwException: true);
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10) { FailOnError = false };
 
         // Act
@@ -151,7 +151,7 @@ public async Task VerifyChatHistoryReductionThrowsOnFailureAsync()
     {
         // Arrange
         Mock mockCompletionService = this.CreateMockCompletionService(throwException: true);
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
 
         // Act and Assert
@@ -166,7 +166,7 @@ public async Task VerifyChatHistoryNotReducedAsync()
     {
         // Arrange
         Mock mockCompletionService = this.CreateMockCompletionService();
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 20);
 
         // Act
@@ -184,7 +184,7 @@ public async Task VerifyChatHistoryReducedAsync()
     {
         // Arrange
         Mock mockCompletionService = this.CreateMockCompletionService();
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
 
         // Act
@@ -203,7 +203,7 @@ public async Task VerifyChatHistoryRereducedAsync()
     {
         // Arrange
         Mock mockCompletionService = this.CreateMockCompletionService();
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
 
         // Act
@@ -224,27 +224,6 @@ public async Task VerifyChatHistoryRereducedAsync()
         VerifySummarization(messages[1]);
     }
 
-    /// 
-    /// Validate history reduced and system message preserved when source history exceeds target threshold.
-    /// 
-    [Fact]
-    public async Task VerifySystemMessageIsNotReducedAsync()
-    {
-        // Arrange
-        Mock mockCompletionService = this.CreateMockCompletionService();
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20, includeSystemMessage: true).ToArray();
-        ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
-
-        // Act
-        IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
-
-        // Assert
-        ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11);
-        VerifySummarization(messages[1]);
-
-        Assert.Contains(messages, m => m.Role == AuthorRole.System);
-    }
-
     private static ChatMessageContent[] VerifyReducedHistory(IEnumerable? reducedHistory, int expectedCount)
     {
         Assert.NotNull(reducedHistory);
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
similarity index 79%
rename from dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs
rename to dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
index 650648dadc17..9d8b2e721fdf 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTruncationReducerTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
@@ -1,14 +1,13 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Agents.History;
 using Xunit;
 
-namespace SemanticKernel.UnitTests.AI.ChatCompletion;
+namespace SemanticKernel.Agents.UnitTests.Core.History;
 
 /// 
 /// Unit testing of .
@@ -92,7 +91,7 @@ int GenerateHashCode(int targetCount, int thresholdCount)
     public async Task VerifyChatHistoryNotReducedAsync()
     {
         // Arrange
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(10).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(10).ToArray();
         ChatHistoryTruncationReducer reducer = new(20);
 
         // Act
@@ -109,7 +108,7 @@ public async Task VerifyChatHistoryNotReducedAsync()
     public async Task VerifyChatHistoryReducedAsync()
     {
         // Arrange
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistoryTruncationReducer reducer = new(10);
 
         // Act
@@ -126,7 +125,7 @@ public async Task VerifyChatHistoryReducedAsync()
     public async Task VerifyChatHistoryRereducedAsync()
     {
         // Arrange
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20).ToArray();
+        IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
         ChatHistoryTruncationReducer reducer = new(10);
 
         // Act
@@ -137,24 +136,6 @@ public async Task VerifyChatHistoryRereducedAsync()
         VerifyReducedHistory(reducedHistory, 10);
     }
 
-    /// 
-    /// Validate history reduced and system message preserved when source history exceeds target threshold.
-    /// 
-    [Fact]
-    public async Task VerifySystemMessageIsNotReducedAsync()
-    {
-        // Arrange
-        IReadOnlyList sourceHistory = MockChatHistoryGenerator.CreateSimpleHistory(20, includeSystemMessage: true).ToArray();
-        ChatHistoryTruncationReducer reducer = new(10);
-
-        // Act
-        IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
-
-        // Assert
-        VerifyReducedHistory(reducedHistory, 10);
-        Assert.Contains(reducedHistory!, m => m.Role == AuthorRole.System);
-    }
-
     private static void VerifyReducedHistory(IEnumerable? reducedHistory, int expectedCount)
     {
         Assert.NotNull(reducedHistory);
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs b/dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs
similarity index 90%
rename from dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs
rename to dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs
index cc7dd3f0377e..3475776a1935 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/MockChatHistoryGenerator.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/MockHistoryGenerator.cs
@@ -1,26 +1,20 @@
 // Copyright (c) Microsoft. All rights reserved.
-
 using System.Collections.Generic;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.ChatCompletion;
 
-namespace SemanticKernel.UnitTests.AI.ChatCompletion;
+namespace SemanticKernel.Agents.UnitTests.Core.History;
 
 /// 
 /// Factory for generating chat history for various test scenarios.
 /// 
-internal static class MockChatHistoryGenerator
+internal static class MockHistoryGenerator
 {
     /// 
     /// Create a homogeneous list of assistant messages.
     /// 
-    public static IEnumerable CreateSimpleHistory(int messageCount, bool includeSystemMessage = false)
+    public static IEnumerable CreateSimpleHistory(int messageCount)
     {
-        if (includeSystemMessage)
-        {
-            yield return new ChatMessageContent(AuthorRole.System, "system message");
-        }
-
         for (int index = 0; index < messageCount; ++index)
         {
             yield return new ChatMessageContent(AuthorRole.Assistant, $"message #{index}");
diff --git a/dotnet/src/Agents/UnitTests/KernelAgentTests.cs b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
index 0309cd2967d8..4e4f4e531f4e 100644
--- a/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs
@@ -18,18 +18,19 @@ public class KernelAgentTests
     public void VerifyNullArgumentMerge()
     {
         // Arrange
-        MockAgent agentWithNoArguments = new();
+        MockAgent agentWithNullArguments = new();
         // Act
-        KernelArguments arguments = agentWithNoArguments.MergeArguments(null);
+        KernelArguments? arguments = agentWithNullArguments.MergeArguments(null);
         // Assert
-        Assert.Empty(arguments);
+        Assert.Null(arguments);
 
         // Arrange
-        KernelArguments overrideArguments = new() { { "test", 1 } };
+        KernelArguments overrideArguments = [];
         // Act
-        arguments = agentWithNoArguments.MergeArguments(overrideArguments);
+        arguments = agentWithNullArguments.MergeArguments(overrideArguments);
         // Assert
-        Assert.StrictEqual(1, arguments.Count);
+        Assert.NotNull(arguments);
+        Assert.StrictEqual(overrideArguments, arguments);
 
         // Arrange
         MockAgent agentWithEmptyArguments = new() { Arguments = new() };
diff --git a/dotnet/src/Agents/UnitTests/MockAgent.cs b/dotnet/src/Agents/UnitTests/MockAgent.cs
index 7f242ff510a5..409a232b1044 100644
--- a/dotnet/src/Agents/UnitTests/MockAgent.cs
+++ b/dotnet/src/Agents/UnitTests/MockAgent.cs
@@ -41,7 +41,7 @@ public override IAsyncEnumerable InvokeStreamingAsy
     }
 
     // Expose protected method for testing
-    public new KernelArguments MergeArguments(KernelArguments? arguments)
+    public new KernelArguments? MergeArguments(KernelArguments? arguments)
     {
         return base.MergeArguments(arguments);
     }
diff --git a/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
similarity index 95%
rename from dotnet/src/Agents/UnitTests/Test/AssertCollection.cs
rename to dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
index 8a89cba994a2..cd51c736ac18 100644
--- a/dotnet/src/Agents/UnitTests/Test/AssertCollection.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
@@ -3,7 +3,7 @@
 using System.Collections.Generic;
 using Xunit;
 
-namespace SemanticKernel.Agents.UnitTests.Test;
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
 
 internal static class AssertCollection
 {
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
new file mode 100644
index 000000000000..6288c6a5aed8
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
@@ -0,0 +1,37 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Linq;
+using Azure.Core;
+using Azure.Core.Pipeline;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure;
+
+/// 
+/// Unit testing of .
+/// 
+public class AddHeaderRequestPolicyTests
+{
+    /// 
+    /// Verify behavior of .
+    /// 
+    [Fact]
+    public void VerifyAddHeaderRequestPolicyExecution()
+    {
+        // Arrange
+        using HttpClientTransport clientTransport = new();
+        HttpPipeline pipeline = new(clientTransport);
+
+        HttpMessage message = pipeline.CreateMessage();
+        AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue");
+
+        // Act
+        policy.OnSendingRequest(message);
+
+        // Assert
+        Assert.Single(message.Request.Headers);
+        HttpHeader header = message.Request.Headers.Single();
+        Assert.Equal("testname", header.Name);
+        Assert.Equal("testvalue", header.Value);
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
deleted file mode 100644
index f1cdd1e429cd..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AssistantClientExtensionsTests.cs
+++ /dev/null
@@ -1,357 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.ClientModel;
-using System.Collections.Generic;
-using System.Net;
-using System.Net.Http;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using OpenAI.Assistants;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public sealed class AssistantClientExtensionsTests : IDisposable
-{
-    private const string ModelValue = "testmodel";
-
-    private readonly HttpMessageHandlerStub _messageHandlerStub;
-    private readonly HttpClient _httpClient;
-    private readonly OpenAIClientProvider _clientProvider;
-
-    /// 
-    /// Verify the assistant creation with default values.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition(ModelValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(modelId: ModelValue);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Equal(ModelValue, definition.Model);
-    }
-
-    /// 
-    /// Verify the assistant creation with name, instructions, and description.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithIdentityAsync()
-    {
-        // Arrange
-        const string NameValue = "test name";
-        const string DescriptionValue = "test instructions";
-        const string InstructionsValue = "test description";
-
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(
-                ModelValue,
-                name: NameValue,
-                instructions: InstructionsValue,
-                description: DescriptionValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: ModelValue,
-            name: NameValue,
-            instructions: InstructionsValue,
-            description: DescriptionValue);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Equal(NameValue, definition.Name);
-        Assert.Equal(DescriptionValue, definition.Description);
-        Assert.Equal(InstructionsValue, definition.Instructions);
-    }
-
-    /// 
-    /// Verify the assistant creation with name, instructions, and description.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithTemplateAsync()
-    {
-        // Arrange
-        const string NameValue = "test name";
-        const string DescriptionValue = "test instructions";
-        const string InstructionsValue = "test description";
-        PromptTemplateConfig templateConfig =
-            new(InstructionsValue)
-            {
-                Name = NameValue,
-                Description = InstructionsValue,
-            };
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(
-                ModelValue,
-                name: NameValue,
-                instructions: InstructionsValue,
-                description: DescriptionValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantFromTemplateAsync(modelId: ModelValue, templateConfig);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Equal(NameValue, definition.Name);
-        Assert.Equal(DescriptionValue, definition.Description);
-        Assert.Equal(InstructionsValue, definition.Instructions);
-    }
-
-    /// 
-    /// Verify the assistant creation with code-interpreter enabled.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithCodeInterpreterAsync()
-    {
-        // Arrange
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableCodeInterpreter: true));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: ModelValue,
-            enableCodeInterpreter: true);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Single(definition.Tools);
-        Assert.IsType(definition.Tools[0]);
-    }
-
-    /// 
-    /// Verify the assistant creation with code-interpreter files specified.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithCodeInterpreterFilesAsync()
-    {
-        // Arrange
-        string[] fileIds = ["file1", "file2"];
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, codeInterpreterFileIds: fileIds));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: ModelValue,
-            codeInterpreterFileIds: fileIds);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Single(definition.Tools);
-        Assert.IsType(definition.Tools[0]);
-        Assert.NotNull(definition.ToolResources.CodeInterpreter);
-        Assert.Equal(2, definition.ToolResources.CodeInterpreter.FileIds.Count);
-    }
-
-    /// 
-    /// Verify the assistant creation with file-search enabled.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithFileSearchAsync()
-    {
-        // Arrange
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, enableFileSearch: true));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: ModelValue,
-            enableFileSearch: true);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Single(definition.Tools);
-        Assert.IsType(definition.Tools[0]);
-    }
-
-    /// 
-    /// Verify the assistant creation with vector-store specified.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithVectorStoreAsync()
-    {
-        // Arrange
-        const string VectorStoreValue = "test store";
-        this.SetupResponse(
-            HttpStatusCode.OK,
-            OpenAIAssistantResponseContent.AssistantDefinition(ModelValue, vectorStoreId: VectorStoreValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: ModelValue,
-            vectorStoreId: VectorStoreValue);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Single(definition.Tools);
-        Assert.IsType(definition.Tools[0]);
-        Assert.NotNull(definition.ToolResources.FileSearch);
-        Assert.Single(definition.ToolResources.FileSearch.VectorStoreIds);
-    }
-
-    /// 
-    /// Verify the invocation and response of 
-    /// for an agent with temperature defined.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithTemperatureAsync()
-    {
-        // Arrange
-        const float TemperatureValue = 0.5F;
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", temperature: TemperatureValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: "testmodel",
-            temperature: TemperatureValue);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Equal(TemperatureValue, definition.Temperature);
-    }
-
-    /// 
-    /// Verify the invocation and response of 
-    /// for an agent with topP defined.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithTopPAsync()
-    {
-        // Arrange
-        const float TopPValue = 2.0F;
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", topP: TopPValue));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: "testmodel",
-            topP: TopPValue);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.Equal(TopPValue, definition.NucleusSamplingFactor);
-    }
-
-    /// 
-    /// Verify the invocation and response of 
-    /// for an agent with execution settings and meta-data.
-    /// 
-    [Fact]
-    public async Task VerifyCreateAssistantWithMetadataAsync()
-    {
-        // Arrange
-        Dictionary metadata =
-            new()
-            {
-                { "a", "1" },
-                { "b", "2" },
-            };
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.AssistantDefinition("testmodel", metadata: metadata));
-
-        // Act
-        Assistant definition = await this._clientProvider.AssistantClient.CreateAssistantAsync(
-            modelId: "testmodel",
-            metadata: metadata);
-
-        // Assert
-        Assert.NotNull(definition);
-        Assert.NotEmpty(definition.Metadata);
-    }
-
-    /// 
-    /// Verify the deletion of assistant.
-    /// 
-    [Fact]
-    public async Task VerifyDeleteAssistantAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
-
-        // Act
-        AssistantDeletionResult result = await this._clientProvider.AssistantClient.DeleteAssistantAsync("testid");
-
-        // Assert
-        Assert.True(result.Deleted);
-    }
-
-    /// 
-    /// Verify the creating a thread.
-    /// 
-    [Fact]
-    public async Task VerifyCreateThreadAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
-
-        // Act
-        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: null);
-
-        // Assert
-        Assert.NotNull(threadId);
-    }
-
-    /// 
-    /// Verify the creating a thread with messages.
-    /// 
-    [Fact]
-    public async Task VerifyCreateThreadWithMessagesAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
-
-        // Act
-        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(messages: [new ChatMessageContent(AuthorRole.User, "test")]);
-
-        // Assert
-        Assert.NotNull(threadId);
-    }
-
-    /// 
-    /// Verify the creating a thread with metadata.
-    /// 
-    [Fact]
-    public async Task VerifyCreateThreadWithMetadataAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread);
-        Dictionary metadata = new() { { "a", "1" }, { "b", "2" } };
-
-        // Act
-        string threadId = await this._clientProvider.AssistantClient.CreateThreadAsync(metadata: metadata);
-
-        // Assert
-        Assert.NotNull(threadId);
-    }
-
-    /// 
-    public void Dispose()
-    {
-        this._messageHandlerStub.Dispose();
-        this._httpClient.Dispose();
-    }
-
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    public AssistantClientExtensionsTests()
-    {
-        this._messageHandlerStub = new HttpMessageHandlerStub();
-        this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
-        this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
-    }
-
-    private void SetupResponse(HttpStatusCode statusCode, string content) =>
-        this._messageHandlerStub.SetupResponses(statusCode, content);
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
new file mode 100644
index 000000000000..70c27ccb2152
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
@@ -0,0 +1,60 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
+
+/// 
+/// Unit testing of .
+/// 
+public class KernelExtensionsTests
+{
+    /// 
+    /// Verify function lookup using KernelExtensions.
+    /// 
+    [Fact]
+    public void VerifyGetKernelFunctionLookup()
+    {
+        // Arrange
+        Kernel kernel = new();
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        kernel.Plugins.Add(plugin);
+
+        // Act
+        KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-');
+
+        // Assert
+        Assert.NotNull(function);
+        Assert.Equal(nameof(TestPlugin.TestFunction), function.Name);
+    }
+
+    /// 
+    /// Verify error case for function lookup using KernelExtensions.
+    /// 
+    [Fact]
+    public void VerifyGetKernelFunctionInvalid()
+    {
+        // Arrange
+        Kernel kernel = new();
+        KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+        kernel.Plugins.Add(plugin);
+
+        // Act and Assert
+        Assert.Throws(() => kernel.GetKernelFunction("a", '-'));
+        Assert.Throws(() => kernel.GetKernelFunction("a-b", ':'));
+        Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-'));
+    }
+
+    /// 
+    /// Exists only for parsing.
+    /// 
+#pragma warning disable CA1812 // Avoid uninstantiated internal classes
+    private sealed class TestPlugin()
+#pragma warning restore CA1812 // Avoid uninstantiated internal classes
+    {
+        [KernelFunction]
+        public void TestFunction() { }
+    }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
deleted file mode 100644
index ce03e8f5843e..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/OpenAIClientExtensionsTests.cs
+++ /dev/null
@@ -1,139 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.ClientModel;
-using System.Collections.Generic;
-using System.IO;
-using System.Net;
-using System.Net.Http;
-using System.Text;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using OpenAI.VectorStores;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
-
-/// 
-/// Unit testing of .
-/// 
-public sealed class OpenAIClientExtensionsTests : IDisposable
-{
-    private readonly HttpMessageHandlerStub _messageHandlerStub;
-    private readonly HttpClient _httpClient;
-    private readonly OpenAIClientProvider _clientProvider;
-
-    /// 
-    /// Verify the default creation of vector-store.
-    /// 
-    [Fact]
-    public async Task VerifyCreateDefaultVectorStoreAsync()
-    {
-        // Arrange
-        string[] fileIds = ["file-1", "file-2"];
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
-
-        // Act
-        string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(fileIds, waitUntilCompleted: false);
-
-        // Assert
-        Assert.NotNull(storeId);
-    }
-
-    /// 
-    /// Verify the custom creation of vector-store.
-    /// 
-    [Fact]
-    public async Task VerifyCreateVectorStoreAsync()
-    {
-        // Arrange
-        string[] fileIds = ["file-1", "file-2"];
-        Dictionary metadata =
-            new()
-            {
-                { "a", "1" },
-                { "b", "2" },
-            };
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateVectorStore);
-
-        // Act
-        string storeId = await this._clientProvider.Client.CreateVectorStoreAsync(
-            fileIds,
-            waitUntilCompleted: false,
-            storeName: "test-store",
-            expirationPolicy: new VectorStoreExpirationPolicy(VectorStoreExpirationAnchor.LastActiveAt, 30),
-            chunkingStrategy: FileChunkingStrategy.Auto,
-            metadata: metadata);
-
-        // Assert
-        Assert.NotNull(storeId);
-    }
-
-    /// 
-    /// Verify the uploading an assistant file.
-    /// 
-    [Fact]
-    public async Task VerifyUploadFileAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
-
-        // Act
-        await using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
-        string fileId = await this._clientProvider.Client.UploadAssistantFileAsync(stream, "text.txt");
-
-        // Assert
-        Assert.NotNull(fileId);
-    }
-
-    /// 
-    /// Verify the deleting a file.
-    /// 
-    [Fact]
-    public async Task VerifyDeleteFileAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteFile);
-
-        // Act
-        bool isDeleted = await this._clientProvider.Client.DeleteFileAsync("file-id");
-
-        // Assert
-        Assert.True(isDeleted);
-    }
-
-    /// 
-    /// Verify the deleting a vector-store.
-    /// 
-    [Fact]
-    public async Task VerifyDeleteVectorStoreAsync()
-    {
-        // Arrange
-        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteVectorStore);
-
-        // Act
-        bool isDeleted = await this._clientProvider.Client.DeleteVectorStoreAsync("store-id");
-
-        // Assert
-        Assert.True(isDeleted);
-    }
-
-    /// 
-    public void Dispose()
-    {
-        this._messageHandlerStub.Dispose();
-        this._httpClient.Dispose();
-    }
-
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    public OpenAIClientExtensionsTests()
-    {
-        this._messageHandlerStub = new HttpMessageHandlerStub();
-        this._httpClient = new HttpClient(this._messageHandlerStub, disposeHandler: false);
-        this._clientProvider = OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
-    }
-
-    private void SetupResponse(HttpStatusCode statusCode, string content) =>
-        this._messageHandlerStub.SetupResponses(statusCode, content);
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
index dfca85afc0f2..15fd0d6aa5ae 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
@@ -1,4 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
@@ -20,16 +21,19 @@ public class AssistantRunOptionsFactoryTests
     public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
+        OpenAIAssistantDefinition definition =
+            new("gpt-anything")
             {
-                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                AdditionalInstructions = "test",
+                ExecutionOptions =
+                    new()
+                    {
+                        AdditionalInstructions = "test",
+                    },
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
 
         // Assert
         Assert.NotNull(options);
@@ -48,21 +52,20 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
     public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
+        OpenAIAssistantDefinition definition =
+            new("gpt-anything")
             {
-                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
             };
 
-        RunCreationOptions invocationOptions =
+        OpenAIAssistantInvocationOptions invocationOptions =
             new()
             {
                 Temperature = 0.5F,
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, "test", invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, "test", invocationOptions);
 
         // Assert
         Assert.NotNull(options);
@@ -78,26 +81,29 @@ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
     public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
+        OpenAIAssistantDefinition definition =
+            new("gpt-anything")
             {
-                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
+                ExecutionOptions =
+                    new()
+                    {
+                        AdditionalInstructions = "test1",
+                        TruncationMessageCount = 5,
+                    },
             };
 
-        RunCreationOptions invocationOptions =
+        OpenAIAssistantInvocationOptions invocationOptions =
             new()
             {
-                ModelOverride = "gpt-anything",
                 AdditionalInstructions = "test2",
                 Temperature = 0.9F,
-                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(8),
-                ResponseFormat = AssistantResponseFormat.JsonObject,
+                TruncationMessageCount = 8,
+                EnableJsonResponse = true,
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
 
         // Assert
         Assert.NotNull(options);
@@ -115,18 +121,21 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
+        OpenAIAssistantDefinition definition =
+            new("gpt-anything")
             {
-                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                TruncationStrategy = RunTruncationStrategy.CreateLastMessagesStrategy(5),
+                ExecutionOptions =
+                    new()
+                    {
+                        TruncationMessageCount = 5,
+                    },
             };
 
-        RunCreationOptions invocationOptions =
+        OpenAIAssistantInvocationOptions invocationOptions =
             new()
             {
-                Metadata =
+                Metadata = new Dictionary
                 {
                     { "key1", "value" },
                     { "key2", null! },
@@ -134,7 +143,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
 
         // Assert
         Assert.Equal(2, options.Metadata.Count);
@@ -149,21 +158,18 @@ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
-            {
-                ModelOverride = "gpt-anything",
-            };
+        OpenAIAssistantDefinition definition = new("gpt-anything");
 
-        ChatMessageContent message = new(AuthorRole.User, "test message");
-        RunCreationOptions invocationOptions =
+        OpenAIAssistantInvocationOptions invocationOptions =
             new()
             {
-                AdditionalMessages = { message.ToThreadInitializationMessage() },
+                AdditionalMessages = [
+                    new ChatMessageContent(AuthorRole.User, "test message")
+                ]
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, invocationOptions);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions);
 
         // Assert
         Assert.Single(options.AdditionalMessages);
@@ -176,17 +182,20 @@ public void AssistantRunOptionsFactoryExecutionOptionsMessagesTest()
     public void AssistantRunOptionsFactoryExecutionOptionsMaxTokensTest()
     {
         // Arrange
-        RunCreationOptions defaultOptions =
-            new()
+        OpenAIAssistantDefinition definition =
+            new("gpt-anything")
             {
-                ModelOverride = "gpt-anything",
                 Temperature = 0.5F,
-                MaxOutputTokenCount = 4096,
-                MaxInputTokenCount = 1024,
+                ExecutionOptions =
+                    new()
+                    {
+                        MaxCompletionTokens = 4096,
+                        MaxPromptTokens = 1024,
+                    },
             };
 
         // Act
-        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(defaultOptions, null, null);
+        RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null);
 
         // Assert
         Assert.Equal(1024, options.MaxInputTokenCount);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
index 3860855b986d..2730cbbc821a 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
@@ -2,15 +2,16 @@
 using System;
 using System.ClientModel;
 using System.Collections.Generic;
+using System.IO;
 using System.Linq;
 using System.Net;
 using System.Net.Http;
+using System.Text;
 using System.Threading.Tasks;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
 using OpenAI.Assistants;
 using Xunit;
 
@@ -19,7 +20,6 @@ namespace SemanticKernel.Agents.UnitTests.OpenAI;
 /// 
 /// Unit testing of .
 /// 
-#pragma warning disable CS0419 // Ambiguous reference in cref attribute
 public sealed class OpenAIAssistantAgentTests : IDisposable
 {
     private readonly HttpMessageHandlerStub _messageHandlerStub;
@@ -77,9 +77,11 @@ public async Task VerifyOpenAIAssistantAgentCreationDefaultTemplateAsync()
 
         OpenAIAssistantCapabilities capabilities = new("testmodel");
 
+        // Act and Assert
+        await this.VerifyAgentTemplateAsync(capabilities, templateConfig);
+
         // Act and Assert
         await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new KernelPromptTemplateFactory());
-        await Assert.ThrowsAsync(async () => await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new HandlebarsPromptTemplateFactory()));
     }
 
     /// 
@@ -309,7 +311,7 @@ public async Task VerifyOpenAIAssistantAgentRetrievalAsync()
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.RetrieveAsync(
-                this.CreateTestProvider(),
+                this.CreateTestConfiguration(),
                 "#id",
                 this._emptyKernel);
 
@@ -330,10 +332,10 @@ public async Task VerifyOpenAIAssistantAgentRetrievalWithFactoryAsync()
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.RetrieveAsync(
-                this.CreateTestProvider(),
+                this.CreateTestConfiguration(),
                 "#id",
                 this._emptyKernel,
-                [],
+                new KernelArguments(),
                 new KernelPromptTemplateFactory());
 
         // Act and Assert
@@ -348,13 +350,26 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
     {
         // Arrange
         OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+        // Assert
+        Assert.False(agent.IsDeleted);
+
+        // Arrange
         this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent);
 
         // Act
-        bool isDeleted = await agent.DeleteAsync();
+        await agent.DeleteAsync();
+        // Assert
+        Assert.True(agent.IsDeleted);
 
+        // Act
+        await agent.DeleteAsync(); // Doesn't throw
         // Assert
-        Assert.True(isDeleted);
+        Assert.True(agent.IsDeleted);
+        await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test")));
+        await Assert.ThrowsAsync(() => agent.GetThreadMessagesAsync("threadid").ToArrayAsync().AsTask());
+        await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask());
+        await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid").ToArrayAsync().AsTask());
+        await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid", new OpenAIAssistantInvocationOptions()).ToArrayAsync().AsTask());
     }
 
     /// 
@@ -398,6 +413,25 @@ public async Task VerifyOpenAIAssistantAgentDeleteThreadAsync()
         Assert.True(isDeleted);
     }
 
+    /// 
+    /// Verify the deleting a thread via .
+    /// 
+    [Fact]
+    public async Task VerifyOpenAIAssistantAgentUploadFileAsync()
+    {
+        // Arrange
+        OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+        this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile);
+
+        // Act
+        using MemoryStream stream = new(Encoding.UTF8.GetBytes("test"));
+        string fileId = await agent.UploadFileAsync(stream, "text.txt");
+
+        // Assert
+        Assert.NotNull(fileId);
+    }
+
     /// 
     /// Verify invocation via .
     /// 
@@ -648,7 +682,7 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
         // Act
         var messages =
             await OpenAIAssistantAgent.ListDefinitionsAsync(
-                this.CreateTestProvider()).ToArrayAsync();
+                this.CreateTestConfiguration()).ToArrayAsync();
         // Assert
         Assert.Equal(7, messages.Length);
 
@@ -661,7 +695,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync(
         // Act
         messages =
             await OpenAIAssistantAgent.ListDefinitionsAsync(
-                this.CreateTestProvider()).ToArrayAsync();
+                this.CreateTestConfiguration()).ToArrayAsync();
         // Assert
         Assert.Equal(4, messages.Length);
     }
@@ -723,7 +757,7 @@ private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.CreateAsync(
-                this.CreateTestProvider(),
+                this.CreateTestConfiguration(),
                 definition,
                 this._emptyKernel);
 
@@ -733,16 +767,16 @@ await OpenAIAssistantAgent.CreateAsync(
     private async Task VerifyAgentTemplateAsync(
         OpenAIAssistantCapabilities capabilities,
         PromptTemplateConfig templateConfig,
-        IPromptTemplateFactory templateFactory)
+        IPromptTemplateFactory? templateFactory = null)
     {
         this.SetupResponse(HttpStatusCode.OK, capabilities, templateConfig);
 
         OpenAIAssistantAgent agent =
             await OpenAIAssistantAgent.CreateFromTemplateAsync(
-                this.CreateTestProvider(),
+                this.CreateTestConfiguration(),
                 capabilities,
                 this._emptyKernel,
-                [],
+                new KernelArguments(),
                 templateConfig,
                 templateFactory);
 
@@ -769,8 +803,9 @@ private static void ValidateAgent(
         // Verify fundamental state
         Assert.NotNull(agent);
         Assert.NotNull(agent.Id);
+        Assert.False(agent.IsDeleted);
         Assert.NotNull(agent.Definition);
-        Assert.Equal(expectedConfig.ModelId, agent.Definition.Model);
+        Assert.Equal(expectedConfig.ModelId, agent.Definition.ModelId);
 
         // Verify core properties
         Assert.Equal(expectedInstructions ?? string.Empty, agent.Instructions);
@@ -779,7 +814,11 @@ private static void ValidateAgent(
 
         // Verify options
         Assert.Equal(expectedConfig.Temperature, agent.Definition.Temperature);
-        Assert.Equal(expectedConfig.TopP, agent.Definition.NucleusSamplingFactor);
+        Assert.Equal(expectedConfig.TopP, agent.Definition.TopP);
+        Assert.Equal(expectedConfig.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
+        Assert.Equal(expectedConfig.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
+        Assert.Equal(expectedConfig.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
+        Assert.Equal(expectedConfig.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
 
         // Verify tool definitions
         int expectedToolCount = 0;
@@ -791,7 +830,7 @@ private static void ValidateAgent(
             ++expectedToolCount;
         }
 
-        Assert.Equal(hasCodeInterpreter, agent.Definition.Tools.OfType().Any());
+        Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any());
 
         bool hasFileSearch = false;
         if (expectedConfig.EnableFileSearch)
@@ -800,9 +839,9 @@ private static void ValidateAgent(
             ++expectedToolCount;
         }
 
-        Assert.Equal(hasFileSearch, agent.Definition.Tools.OfType().Any());
+        Assert.Equal(hasFileSearch, agent.Tools.OfType().Any());
 
-        Assert.Equal(expectedToolCount, agent.Definition.Tools.Count);
+        Assert.Equal(expectedToolCount, agent.Tools.Count);
 
         // Verify metadata
         Assert.NotNull(agent.Definition.Metadata);
@@ -826,8 +865,8 @@ private static void ValidateAgent(
         }
 
         // Verify detail definition
-        Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.ToolResources.FileSearch?.VectorStoreIds.SingleOrDefault());
-        Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.ToolResources.CodeInterpreter?.FileIds);
+        Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.VectorStoreId);
+        Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
     }
 
     private Task CreateAgentAsync()
@@ -838,12 +877,12 @@ private Task CreateAgentAsync()
 
         return
             OpenAIAssistantAgent.CreateAsync(
-                this.CreateTestProvider(),
+                this.CreateTestConfiguration(),
                 definition,
                 this._emptyKernel);
     }
 
-    private OpenAIClientProvider CreateTestProvider(bool targetAzure = false)
+    private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
         => targetAzure ?
             OpenAIClientProvider.ForAzureOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: new Uri("https://localhost"), this._httpClient) :
             OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient);
@@ -867,5 +906,3 @@ public void MyFunction(int index)
         { }
     }
 }
-#pragma warning restore CS0419 // Ambiguous reference in cref attribute
-
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
index f8778a4f2900..b0131ac9be6b 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
@@ -2,7 +2,6 @@
 using System.Collections.Generic;
 using System.Text.Json;
 using Microsoft.SemanticKernel.Agents.OpenAI;
-using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
index 0a71201e7626..4962a9c04797 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
@@ -4,7 +4,6 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
-using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
index 3ecf07fada5e..7ae3cbaeacbe 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs
@@ -1,5 +1,4 @@
 // Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
 using System.Linq;
 using System.Net;
 using System.Net.Http;
@@ -7,7 +6,6 @@
 using System.Text.Json;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
-using OpenAI.Assistants;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
@@ -123,7 +121,7 @@ public static string AssistantDefinition(
                 builder.AppendLine(@$"  ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
             }
 
-            if (hasFileSearch && capabilities.VectorStoreId != null)
+            if (hasFileSearch)
             {
                 builder.AppendLine(@$"  ""file_search"": {{ ""vector_store_ids"": [""{capabilities.VectorStoreId}""] }}");
             }
@@ -175,115 +173,6 @@ public static string AssistantDefinition(
         return builder.ToString();
     }
 
-    /// 
-    /// The response for creating or querying an assistant definition.
-    /// 
-    public static string AssistantDefinition(
-        string modelId,
-        string? name = null,
-        string? description = null,
-        string? instructions = null,
-        bool enableCodeInterpreter = false,
-        IReadOnlyList? codeInterpreterFileIds = null,
-        bool enableFileSearch = false,
-        string? vectorStoreId = null,
-        float? temperature = null,
-        float? topP = null,
-        AssistantResponseFormat? responseFormat = null,
-        IReadOnlyDictionary? metadata = null)
-    {
-        StringBuilder builder = new();
-        builder.AppendLine("{");
-        builder.AppendLine(@$"  ""id"": ""{AssistantId}"",");
-        builder.AppendLine(@"  ""object"": ""assistant"",");
-        builder.AppendLine(@"  ""created_at"": 1698984975,");
-        builder.AppendLine(@$"  ""name"": ""{name}"",");
-        builder.AppendLine(@$"  ""description"": ""{description}"",");
-        builder.AppendLine(@$"  ""instructions"": ""{instructions}"",");
-        builder.AppendLine(@$"  ""model"": ""{modelId}"",");
-
-        bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0;
-        bool hasCodeInterpreter = enableCodeInterpreter || hasCodeInterpreterFiles;
-        bool hasFileSearch = enableFileSearch || vectorStoreId != null;
-        if (!hasCodeInterpreter && !hasFileSearch)
-        {
-            builder.AppendLine(@"  ""tools"": [],");
-        }
-        else
-        {
-            builder.AppendLine(@"  ""tools"": [");
-
-            if (hasCodeInterpreter)
-            {
-                builder.Append(@$"  {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
-            }
-
-            if (hasFileSearch)
-            {
-                builder.AppendLine(@"  { ""type"": ""file_search"" }");
-            }
-
-            builder.AppendLine("    ],");
-        }
-
-        if (!hasCodeInterpreterFiles && !hasFileSearch)
-        {
-            builder.AppendLine(@"  ""tool_resources"": {},");
-        }
-        else
-        {
-            builder.AppendLine(@"  ""tool_resources"": {");
-
-            if (hasCodeInterpreterFiles)
-            {
-                string fileIds = string.Join(",", codeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
-                builder.AppendLine(@$"  ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
-            }
-
-            if (hasFileSearch && vectorStoreId != null)
-            {
-                builder.AppendLine(@$"  ""file_search"": {{ ""vector_store_ids"": [""{vectorStoreId}""] }}");
-            }
-
-            builder.AppendLine("    },");
-        }
-
-        if (temperature.HasValue)
-        {
-            builder.AppendLine(@$"  ""temperature"": {temperature},");
-        }
-
-        if (topP.HasValue)
-        {
-            builder.AppendLine(@$"  ""top_p"": {topP},");
-        }
-        int metadataCount = (metadata?.Count ?? 0);
-        if (metadataCount == 0)
-        {
-            builder.AppendLine(@"  ""metadata"": {}");
-        }
-        else
-        {
-            int index = 0;
-            builder.AppendLine(@"  ""metadata"": {");
-
-            if (metadataCount > 0)
-            {
-                foreach (var (key, value) in metadata!)
-                {
-                    builder.AppendLine(@$"    ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
-                    ++index;
-                }
-            }
-
-            builder.AppendLine("  }");
-        }
-
-        builder.AppendLine("}");
-
-        return builder.ToString();
-    }
-
     public const string DeleteAgent =
         $$$"""
         {
@@ -556,42 +445,6 @@ public static string GetTextMessage(string text = "test") =>
         }
         """;
 
-    public static string DeleteFile =
-        """
-        {
-          "id": "file-abc123",
-          "object": "file",
-          "deleted": true
-        }
-        """;
-
-    public static string CreateVectorStore =
-        """
-        {
-          "id": "vs_abc123",
-          "object": "vector_store",
-          "created_at": 1699061776,
-          "name": "test store",
-          "bytes": 139920,
-          "file_counts": {
-            "in_progress": 0,
-            "completed": 3,
-            "failed": 0,
-            "cancelled": 0,
-            "total": 3
-          }
-        }      
-        """;
-
-    public static string DeleteVectorStore =
-        """
-        {
-          "id": "vs-abc123",
-          "object": "vector_store.deleted",
-          "deleted": true
-        }
-        """;
-
     #endregion
 
     /// 
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
index 410b93b3f03b..6217e1f38395 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
@@ -91,10 +91,10 @@ public void VerifyOpenAIClientProviderWithHttpClient()
 
         // Arrange
         using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") };
-        httpClientWithHeaders.DefaultRequestHeaders.Add("X-Test", "Test");
+        httpClient.DefaultRequestHeaders.Add("X-Test", "Test");
 
         // Act
-        OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClientWithHeaders);
+        OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
 
         // Assert
         Assert.NotNull(providerWithHeaders.Client);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
index c4b8abca4baf..1689bec1f828 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
@@ -4,7 +4,6 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Agents.OpenAI;
 using Microsoft.SemanticKernel.ChatCompletion;
-using SemanticKernel.Agents.UnitTests.Test;
 using Xunit;
 
 namespace SemanticKernel.Agents.UnitTests.OpenAI;
diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
index 6faef5ab9a11..a8447d4838a3 100644
--- a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs
@@ -249,7 +249,7 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string
                 format = JsonSerializer.Deserialize(formatValue);
                 break;
             case "ChatResponseFormat":
-                format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJsonObject();
+                format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJSON();
                 break;
         }
 
diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
index c6e9dd5d503e..3146cb94fb78 100644
--- a/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs
@@ -136,7 +136,7 @@ public int? MaxTokens
     /// Note that to enable JSON mode, some AI models may also require you to instruct the model to produce JSON
     /// via a system or user message.
     /// Please note  is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes.
-    /// The available derived classes include  and .
+    /// The available derived classes include  and .
     /// 
     [JsonPropertyName("response_format")]
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs
deleted file mode 100644
index c9c47f07ee86..000000000000
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Core/AzureClientCoreTests.cs
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Net.Http;
-using Azure.AI.OpenAI;
-using Azure.Core;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
-using Moq;
-
-namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Core;
-
-public sealed class AzureClientCoreTests : IDisposable
-{
-    private readonly HttpClient _httpClient;
-    private readonly Mock _mockLogger;
-
-    public AzureClientCoreTests()
-    {
-        this._httpClient = new HttpClient();
-        this._mockLogger = new Mock();
-    }
-
-    public void Dispose()
-    {
-        this._httpClient.Dispose();
-    }
-
-    [Fact]
-    public void ConstructorWithValidParametersShouldInitializeCorrectly()
-    {
-        // Arrange
-        var deploymentName = "test-deployment";
-        var endpoint = "https://test-endpoint.openai.azure.com/";
-        var apiKey = "test-api-key";
-
-        // Act
-        var azureClientCore = new AzureClientCore(deploymentName, endpoint, apiKey, this._httpClient, this._mockLogger.Object);
-
-        // Assert
-        Assert.NotNull(azureClientCore.Client);
-        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
-        Assert.Equal(new Uri(endpoint), azureClientCore.Endpoint);
-    }
-
-    [Fact]
-    public void ConstructorWithInvalidEndpointShouldThrowArgumentException()
-    {
-        // Arrange
-        var deploymentName = "test-deployment";
-        var invalidEndpoint = "http://invalid-endpoint";
-        var apiKey = "test-api-key";
-
-        // Act & Assert
-        Assert.Throws(() =>
-            new AzureClientCore(deploymentName, invalidEndpoint, apiKey, this._httpClient, this._mockLogger.Object));
-    }
-
-    [Fact]
-    public void ConstructorWithTokenCredentialShouldInitializeCorrectly()
-    {
-        // Arrange
-        var deploymentName = "test-deployment";
-        var endpoint = "https://test-endpoint.openai.azure.com/";
-        var tokenCredential = new Mock().Object;
-
-        // Act
-        var azureClientCore = new AzureClientCore(deploymentName, endpoint, tokenCredential, this._httpClient, this._mockLogger.Object);
-
-        // Assert
-        Assert.NotNull(azureClientCore.Client);
-        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
-        Assert.Equal(new Uri(endpoint), azureClientCore.Endpoint);
-    }
-
-    [Fact]
-    public void ConstructorWithOpenAIClientShouldInitializeCorrectly()
-    {
-        // Arrange
-        var deploymentName = "test-deployment";
-        var openAIClient = new Mock(MockBehavior.Strict, new Uri("https://test-endpoint.openai.azure.com/"), new Mock().Object).Object;
-
-        // Act
-        var azureClientCore = new AzureClientCore(deploymentName, openAIClient, this._mockLogger.Object);
-
-        // Assert
-        Assert.NotNull(azureClientCore.Client);
-        Assert.Equal(deploymentName, azureClientCore.DeploymentName);
-    }
-}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
index bcfa9aef4ecd..336d12036db9 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
@@ -79,17 +79,6 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
         Assert.Equal("model-id", service.Attributes["ModelId"]);
     }
 
-    [Theory]
-    [InlineData("invalid")]
-    public void ConstructorThrowsOnInvalidApiVersion(string? apiVersion)
-    {
-        // Act & Assert
-        Assert.Throws(() =>
-        {
-            _ = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", httpClient: this._httpClient, apiVersion: apiVersion);
-        });
-    }
-
     [Theory]
     [InlineData(true)]
     [InlineData(false)]
@@ -133,10 +122,8 @@ public async Task GetTextContentsWorksCorrectlyAsync()
         Assert.Equal(155, usage.TotalTokenCount);
     }
 
-    [Theory]
-    [InlineData("system")]
-    [InlineData("developer")]
-    public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync(string historyRole)
+    [Fact]
+    public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
     {
         // Arrange
         var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -165,14 +152,7 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync(string his
         var chatHistory = new ChatHistory();
         chatHistory.AddUserMessage("User Message");
         chatHistory.AddUserMessage([new ImageContent(new Uri("https://image")), new TextContent("User Message")]);
-        if (historyRole == "system")
-        {
-            chatHistory.AddSystemMessage("System Message");
-        }
-        else
-        {
-            chatHistory.AddDeveloperMessage("Developer Message");
-        }
+        chatHistory.AddSystemMessage("System Message");
         chatHistory.AddAssistantMessage("Assistant Message");
 
         using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
@@ -209,16 +189,8 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync(string his
         Assert.Equal("User Message", contentItems[1].GetProperty("text").GetString());
         Assert.Equal("text", contentItems[1].GetProperty("type").GetString());
 
-        if (historyRole == "system")
-        {
-            Assert.Equal("system", systemMessage.GetProperty("role").GetString());
-            Assert.Equal("System Message", systemMessage.GetProperty("content").GetString());
-        }
-        else
-        {
-            Assert.Equal("developer", systemMessage.GetProperty("role").GetString());
-            Assert.Equal("Developer Message", systemMessage.GetProperty("content").GetString());
-        }
+        Assert.Equal("system", systemMessage.GetProperty("role").GetString());
+        Assert.Equal("System Message", systemMessage.GetProperty("content").GetString());
 
         Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString());
         Assert.Equal("Assistant Message", assistantMessage.GetProperty("content").GetString());
@@ -273,166 +245,6 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
         Assert.Equal(expectedResponseType, content.GetProperty("response_format").GetProperty("type").GetString());
     }
 
-    [Theory]
-    [InlineData(true, "max_completion_tokens")]
-    [InlineData(false, "max_tokens")]
-    public async Task GetChatMessageContentsHandlesMaxTokensCorrectlyAsync(bool useNewMaxTokens, string expectedPropertyName)
-    {
-        // Arrange
-        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
-        var settings = new AzureOpenAIPromptExecutionSettings
-        {
-            SetNewMaxCompletionTokensEnabled = useNewMaxTokens,
-            MaxTokens = 123
-        };
-
-        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
-        };
-        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
-
-        // Act
-        var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
-
-        // Assert
-        var requestContent = this._messageHandlerStub.RequestContents[0];
-
-        Assert.NotNull(requestContent);
-
-        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
-
-        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
-        Assert.Equal(123, propertyValue.GetInt32());
-    }
-
-    [Theory]
-    [InlineData("stream", "true")]
-    [InlineData("stream_options", "{\"include_usage\":true}")]
-    [InlineData("model", "\"deployment\"")]
-
-    public async Task GetStreamingChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsync(string expectedPropertyName, string expectedRawJsonText)
-    {
-        // Arrange
-        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
-        var settings = new AzureOpenAIPromptExecutionSettings();
-
-        using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
-
-        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StreamContent(stream)
-        };
-        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
-
-        // Act
-        await foreach (var update in service.GetStreamingChatMessageContentsAsync(new ChatHistory("System message"), settings))
-        {
-            var openAIUpdate = Assert.IsType(update.InnerContent);
-        }
-
-        // Assert
-        var requestContent = this._messageHandlerStub.RequestContents[0];
-
-        Assert.NotNull(requestContent);
-
-        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
-
-        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
-        Assert.Equal(expectedRawJsonText, propertyValue.GetRawText());
-    }
-
-    [Theory]
-    [InlineData("model", "\"deployment\"")]
-
-    public async Task GetChatMessageContentsRequestHandlesInternalFieldsCorrectlyAsync(string expectedPropertyName, string expectedRawJsonText)
-    {
-        // Arrange
-        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
-        var settings = new AzureOpenAIPromptExecutionSettings();
-
-        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
-        };
-        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
-
-        // Act
-        var results = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
-        var result = Assert.Single(results);
-        Assert.IsType(result.InnerContent);
-
-        // Assert
-        var requestContent = this._messageHandlerStub.RequestContents[0];
-
-        Assert.NotNull(requestContent);
-
-        var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent));
-
-        Assert.True(content.TryGetProperty(expectedPropertyName, out var propertyValue));
-        Assert.Equal(expectedRawJsonText, propertyValue.GetRawText());
-    }
-
-    [Theory]
-    [InlineData(null, null)]
-    [InlineData("string", "low")]
-    [InlineData("string", "medium")]
-    [InlineData("string", "high")]
-    [InlineData("ChatReasonEffortLevel.Low", "low")]
-    [InlineData("ChatReasonEffortLevel.Medium", "medium")]
-    [InlineData("ChatReasonEffortLevel.High", "high")]
-    public async Task GetChatMessageInReasoningEffortAsync(string? effortType, string? expectedEffortLevel)
-    {
-        // Assert
-        object? reasoningEffortObject = null;
-        switch (effortType)
-        {
-            case "string":
-                reasoningEffortObject = expectedEffortLevel;
-                break;
-            case "ChatReasonEffortLevel.Low":
-                reasoningEffortObject = ChatReasoningEffortLevel.Low;
-                break;
-            case "ChatReasonEffortLevel.Medium":
-                reasoningEffortObject = ChatReasoningEffortLevel.Medium;
-                break;
-            case "ChatReasonEffortLevel.High":
-                reasoningEffortObject = ChatReasoningEffortLevel.High;
-                break;
-        }
-
-        var modelId = "o1";
-        var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient);
-        OpenAIPromptExecutionSettings executionSettings = new() { ReasoningEffort = reasoningEffortObject };
-        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
-        };
-
-        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
-
-        // Act
-        var result = await sut.GetChatMessageContentAsync(new ChatHistory("System message"), executionSettings);
-
-        // Assert
-        Assert.NotNull(result);
-
-        var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
-        Assert.NotNull(actualRequestContent);
-
-        var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
-
-        if (expectedEffortLevel is null)
-        {
-            Assert.False(optionsJson.TryGetProperty("reasoning_effort", out _));
-            return;
-        }
-
-        var requestedReasoningEffort = optionsJson.GetProperty("reasoning_effort").GetString();
-
-        Assert.Equal(expectedEffortLevel, requestedReasoningEffort);
-    }
-
     [Theory]
     [MemberData(nameof(ToolCallBehaviors))]
     public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior behavior)
@@ -994,49 +806,6 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
         Assert.Equal("user", messages[1].GetProperty("role").GetString());
     }
 
-    [Fact]
-    public async Task GetChatMessageContentsUsesDeveloperPromptAndSettingsCorrectlyAsync()
-    {
-        // Arrange
-        const string Prompt = "This is test prompt";
-        const string DeveloperMessage = "This is test system message";
-
-        var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
-        var settings = new AzureOpenAIPromptExecutionSettings() { ChatDeveloperPrompt = DeveloperMessage };
-
-        using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
-        };
-        this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
-
-        IKernelBuilder builder = Kernel.CreateBuilder();
-        builder.Services.AddTransient((sp) => service);
-        Kernel kernel = builder.Build();
-
-        // Act
-        var result = await kernel.InvokePromptAsync(Prompt, new(settings));
-
-        // Assert
-        Assert.Equal("Test chat response", result.ToString());
-
-        var requestContentByteArray = this._messageHandlerStub.RequestContents[0];
-
-        Assert.NotNull(requestContentByteArray);
-
-        var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
-
-        var messages = requestContent.GetProperty("messages");
-
-        Assert.Equal(2, messages.GetArrayLength());
-
-        Assert.Equal(DeveloperMessage, messages[0].GetProperty("content").GetString());
-        Assert.Equal("developer", messages[0].GetProperty("role").GetString());
-
-        Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
-        Assert.Equal("user", messages[1].GetProperty("role").GetString());
-    }
-
     [Fact]
     public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
     {
@@ -1768,14 +1537,6 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen
 
     public static TheoryData Versions => new()
     {
-        { "V2025_01_01_preview", "2025-01-01-preview" },
-        { "V2025_01_01_PREVIEW", "2025-01-01-preview" },
-        { "2025_01_01_Preview", "2025-01-01-preview" },
-        { "2025-01-01-preview", "2025-01-01-preview" },
-        { "V2024_12_01_preview", "2024-12-01-preview" },
-        { "V2024_12_01_PREVIEW", "2024-12-01-preview" },
-        { "2024_12_01_Preview", "2024-12-01-preview" },
-        { "2024-12-01-preview", "2024-12-01-preview" },
         { "V2024_10_01_preview", "2024-10-01-preview" },
         { "V2024_10_01_PREVIEW", "2024-10-01-preview" },
         { "2024_10_01_Preview", "2024-10-01-preview" },
@@ -1791,16 +1552,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen
         { "V2024_06_01", "2024-06-01" },
         { "2024_06_01", "2024-06-01" },
         { "2024-06-01", "2024-06-01" },
-        { "V2024_10_21", "2024-10-21" },
-        { "2024_10_21", "2024-10-21" },
-        { "2024-10-21", "2024-10-21" },
-        { AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview.ToString(), null },
-        { AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_09_01_Preview.ToString(), null },
         { AzureOpenAIClientOptions.ServiceVersion.V2024_08_01_Preview.ToString(), null },
-        { AzureOpenAIClientOptions.ServiceVersion.V2024_06_01.ToString(), null },
-        { AzureOpenAIClientOptions.ServiceVersion.V2024_10_21.ToString(), null }
+        { AzureOpenAIClientOptions.ServiceVersion.V2024_06_01.ToString(), null }
     };
 
     public void Dispose()
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
index 27e2b3ebc14d..3ad42a32eac6 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs
@@ -47,23 +47,6 @@ public void ConstructorsAddRequiredMetadata(bool includeLoggerFactory)
         Assert.Equal("deployment-name", service.Attributes["DeploymentName"]);
     }
 
-    [Theory]
-    [InlineData(true)]
-    [InlineData(false)]
-    public void ConstructorTokenCredentialAddRequiredMetadata(bool includeLoggerFactory)
-    {
-        // Arrange & Act
-        var service = includeLoggerFactory ?
-            new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", Azure.Core.DelegatedTokenCredential.Create((context, ct)
-                => new Azure.Core.AccessToken("abc", DateTimeOffset.Now.AddMinutes(30))), "model-id", loggerFactory: this._mockLoggerFactory.Object) :
-            new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", Azure.Core.DelegatedTokenCredential.Create((context, ct)
-                => new Azure.Core.AccessToken("abc", DateTimeOffset.Now.AddMinutes(30))), "model-id");
-
-        // Assert
-        Assert.Equal("model-id", service.Attributes["ModelId"]);
-        Assert.Equal("deployment-name", service.Attributes["DeploymentName"]);
-    }
-
     [Fact]
     public void ItThrowsIfModelIdIsNotProvided()
     {
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
index 8f3b9a245634..6b4b16c574af 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -35,7 +35,6 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
         Assert.Null(executionSettings.TopLogprobs);
         Assert.Null(executionSettings.Logprobs);
         Assert.Null(executionSettings.AzureChatDataSource);
-        Assert.False(executionSettings.SetNewMaxCompletionTokensEnabled);
         Assert.Equal(maxTokensSettings, executionSettings.MaxTokens);
         Assert.Null(executionSettings.Store);
         Assert.Null(executionSettings.Metadata);
@@ -59,8 +58,7 @@ public void ItUsesExistingOpenAIExecutionSettings()
             TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
             Seed = 123456,
             Store = true,
-            Metadata = new Dictionary() { { "foo", "bar" } },
-            SetNewMaxCompletionTokensEnabled = true,
+            Metadata = new Dictionary() { { "foo", "bar" } }
         };
 
         // Act
@@ -76,7 +74,6 @@ public void ItUsesExistingOpenAIExecutionSettings()
         Assert.Equal(actualSettings.Seed, executionSettings.Seed);
         Assert.Equal(actualSettings.Store, executionSettings.Store);
         Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
-        Assert.Equal(actualSettings.SetNewMaxCompletionTokensEnabled, executionSettings.SetNewMaxCompletionTokensEnabled);
     }
 
     [Fact]
@@ -262,7 +259,6 @@ public void PromptExecutionSettingsFreezeWorksAsExpected()
         Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6));
         Assert.Throws(() => executionSettings.Store = false);
         Assert.Throws(() => executionSettings.Metadata?.Add("bar", "foo"));
-        Assert.Throws(() => executionSettings.SetNewMaxCompletionTokensEnabled = true);
 
         executionSettings!.Freeze(); // idempotent
         Assert.True(executionSettings.IsFrozen);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
index 23fc85541a0b..bf7859815f1d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
@@ -1,7 +1,5 @@
 // Copyright (c) Microsoft. All rights reserved.
 
-using System;
-using System.ClientModel.Primitives;
 using System.Diagnostics;
 using Azure.AI.OpenAI.Chat;
 using Microsoft.SemanticKernel.ChatCompletion;
@@ -37,29 +35,22 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
         {
             return base.CreateChatCompletionOptions(executionSettings, chatHistory, toolCallingConfig, kernel);
         }
-        ChatCompletionOptions options = ModelReaderWriter.Read(BinaryData.FromString("{\"stream_options\":{\"include_usage\":true}}")!)!;
-        options.MaxOutputTokenCount = executionSettings.MaxTokens;
-        options.Temperature = (float?)executionSettings.Temperature;
-        options.TopP = (float?)executionSettings.TopP;
-        options.FrequencyPenalty = (float?)executionSettings.FrequencyPenalty;
-        options.PresencePenalty = (float?)executionSettings.PresencePenalty;
-#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
 
-        options.Seed = executionSettings.Seed;
-#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-        options.EndUserId = executionSettings.User;
-        options.TopLogProbabilityCount = executionSettings.TopLogprobs;
-        options.IncludeLogProbabilities = executionSettings.Logprobs;
-        options.StoredOutputEnabled = executionSettings.Store;
-        options.ReasoningEffortLevel = GetEffortLevel(executionSettings);
-        options.ResponseModalities = ChatResponseModalities.Default;
-
-        if (azureSettings.SetNewMaxCompletionTokensEnabled)
+        var options = new ChatCompletionOptions
         {
-#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-            options.SetNewMaxCompletionTokensPropertyEnabled(true);
-#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
-        }
+            MaxOutputTokenCount = executionSettings.MaxTokens,
+            Temperature = (float?)executionSettings.Temperature,
+            TopP = (float?)executionSettings.TopP,
+            FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
+            PresencePenalty = (float?)executionSettings.PresencePenalty,
+#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+            Seed = executionSettings.Seed,
+#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+            EndUserId = executionSettings.User,
+            TopLogProbabilityCount = executionSettings.TopLogprobs,
+            IncludeLogProbabilities = executionSettings.Logprobs,
+            StoredOutputEnabled = executionSettings.Store,
+        };
 
         var responseFormat = GetResponseFormat(executionSettings);
         if (responseFormat is not null)
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
index a3dbbe730057..5ad45701a921 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs
@@ -135,12 +135,9 @@ internal static AzureOpenAIClientOptions GetAzureOpenAIClientOptions(HttpClient?
             sdkVersion = serviceVersion.ToUpperInvariant() switch // Azure SDK versioning
             {
                 "2024-06-01" or "V2024_06_01" or "2024_06_01" => AzureOpenAIClientOptions.ServiceVersion.V2024_06_01,
-                "2024-10-21" or "V2024_10_21" or "2024_10_21" => AzureOpenAIClientOptions.ServiceVersion.V2024_10_21,
                 "2024-08-01-PREVIEW" or "V2024_08_01_PREVIEW" or "2024_08_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_08_01_Preview,
                 "2024-09-01-PREVIEW" or "V2024_09_01_PREVIEW" or "2024_09_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_09_01_Preview,
                 "2024-10-01-PREVIEW" or "V2024_10_01_PREVIEW" or "2024_10_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview,
-                "2024-12-01-PREVIEW" or "V2024_12_01_PREVIEW" or "2024_12_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview,
-                "2025-01-01-PREVIEW" or "V2025_01_01_PREVIEW" or "2025_01_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview,
 
                 _ => throw new NotSupportedException($"The service version '{serviceVersion}' is not supported.")
             };
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
index 8852e5fd35df..1d00ba3207f5 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
@@ -16,26 +16,6 @@ namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
 [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)]
 public sealed class AzureOpenAIPromptExecutionSettings : OpenAIPromptExecutionSettings
 {
-    /// 
-    /// Enabling this property will enforce the new max_completion_tokens parameter to be send the Azure OpenAI API.
-    /// 
-    /// 
-    /// This setting is temporary and flags the underlying Azure SDK to use the new max_completion_tokens parameter using the
-    /// 
-    /// SetNewMaxCompletionTokensPropertyEnabled extension.
-    /// 
-    [Experimental("SKEXP0010")]
-    [JsonIgnore]
-    public bool SetNewMaxCompletionTokensEnabled
-    {
-        get => this._setNewMaxCompletionTokensEnabled;
-        set
-        {
-            this.ThrowIfFrozen();
-            this._setNewMaxCompletionTokensEnabled = value;
-        }
-    }
-
     /// 
     /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions.
     /// This property is compatible only with Azure OpenAI.
@@ -58,7 +38,6 @@ public override PromptExecutionSettings Clone()
     {
         var settings = base.Clone();
         settings.AzureChatDataSource = this.AzureChatDataSource;
-        settings.SetNewMaxCompletionTokensEnabled = this.SetNewMaxCompletionTokensEnabled;
         return settings;
     }
 
@@ -124,7 +103,6 @@ public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(P
     #region private ================================================================================
     [Experimental("SKEXP0010")]
     private AzureSearchChatDataSource? _azureChatDataSource;
-    private bool _setNewMaxCompletionTokensEnabled;
 
     #endregion
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
index 3cbf9973ccbb..55283d191a84 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/GeminiRequestTests.cs
@@ -338,33 +338,6 @@ public void FromChatHistoryImageAsImageContentItReturnsWithChatHistory()
                 .SequenceEqual(Convert.FromBase64String(c.Parts![0].InlineData!.InlineData))));
     }
 
-    [Fact]
-    public void FromChatHistoryAudioAsAudioContentItReturnsWithChatHistory()
-    {
-        // Arrange
-        ReadOnlyMemory audioAsBytes = new byte[] { 0x00, 0x01, 0x02, 0x03 };
-        ChatHistory chatHistory = [];
-        chatHistory.AddUserMessage("user-message");
-        chatHistory.AddAssistantMessage("assist-message");
-        chatHistory.AddUserMessage(contentItems:
-            [new AudioContent(new Uri("https://example-audio.com/file.wav")) { MimeType = "audio/wav" }]);
-        chatHistory.AddUserMessage(contentItems:
-            [new AudioContent(audioAsBytes, "audio/mp3")]);
-        var executionSettings = new GeminiPromptExecutionSettings();
-
-        // Act
-        var request = GeminiRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings);
-
-        // Assert
-        Assert.Collection(request.Contents,
-            c => Assert.Equal(chatHistory[0].Content, c.Parts![0].Text),
-            c => Assert.Equal(chatHistory[1].Content, c.Parts![0].Text),
-            c => Assert.Equal(chatHistory[2].Items.Cast().Single().Uri,
-                c.Parts![0].FileData!.FileUri),
-            c => Assert.True(audioAsBytes.ToArray()
-                .SequenceEqual(Convert.FromBase64String(c.Parts![0].InlineData!.InlineData))));
-    }
-
     [Fact]
     public void FromChatHistoryUnsupportedContentItThrowsNotSupportedException()
     {
@@ -497,44 +470,6 @@ public void AddChatMessageToRequest()
             c => Equals(message.Role, c.Role));
     }
 
-    [Fact]
-    public void CachedContentFromPromptReturnsAsExpected()
-    {
-        // Arrange
-        var prompt = "prompt-example";
-        var executionSettings = new GeminiPromptExecutionSettings
-        {
-            CachedContent = "xyz/abc"
-        };
-
-        // Act
-        var request = GeminiRequest.FromPromptAndExecutionSettings(prompt, executionSettings);
-
-        // Assert
-        Assert.NotNull(request.Configuration);
-        Assert.Equal(executionSettings.CachedContent, request.CachedContent);
-    }
-
-    [Fact]
-    public void CachedContentFromChatHistoryReturnsAsExpected()
-    {
-        // Arrange
-        ChatHistory chatHistory = [];
-        chatHistory.AddUserMessage("user-message");
-        chatHistory.AddAssistantMessage("assist-message");
-        chatHistory.AddUserMessage("user-message2");
-        var executionSettings = new GeminiPromptExecutionSettings
-        {
-            CachedContent = "xyz/abc"
-        };
-
-        // Act
-        var request = GeminiRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings);
-
-        // Assert
-        Assert.Equal(executionSettings.CachedContent, request.CachedContent);
-    }
-
     private sealed class DummyContent(object? innerContent, string? modelId = null, IReadOnlyDictionary? metadata = null) :
         KernelContent(innerContent, modelId, metadata);
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
index 0d986d21ca5a..1d9bb5d6377d 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/GoogleAIGeminiChatCompletionServiceTests.cs
@@ -1,34 +1,13 @@
 // Copyright (c) Microsoft. All rights reserved.
 
-using System;
-using System.IO;
-using System.Net.Http;
-using System.Text;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Google;
 using Microsoft.SemanticKernel.Services;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Google.UnitTests.Services;
 
-public sealed class GoogleAIGeminiChatCompletionServiceTests : IDisposable
+public sealed class GoogleAIGeminiChatCompletionServiceTests
 {
-    private readonly HttpMessageHandlerStub _messageHandlerStub;
-    private readonly HttpClient _httpClient;
-
-    public GoogleAIGeminiChatCompletionServiceTests()
-    {
-        this._messageHandlerStub = new()
-        {
-            ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
-            {
-                Content = new StringContent(File.ReadAllText("./TestData/completion_one_response.json"))
-            }
-        };
-        this._httpClient = new HttpClient(this._messageHandlerStub, false);
-    }
-
     [Fact]
     public void AttributesShouldContainModelId()
     {
@@ -39,39 +18,4 @@ public void AttributesShouldContainModelId()
         // Assert
         Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]);
     }
-
-    [Theory]
-    [InlineData(null)]
-    [InlineData("content")]
-    [InlineData("")]
-    public async Task RequestCachedContentWorksCorrectlyAsync(string? cachedContent)
-    {
-        // Arrange
-        string model = "fake-model";
-        var sut = new GoogleAIGeminiChatCompletionService(model, "key", httpClient: this._httpClient);
-
-        // Act
-        var result = await sut.GetChatMessageContentAsync("my prompt", new GeminiPromptExecutionSettings { CachedContent = cachedContent });
-
-        // Assert
-        Assert.NotNull(result);
-        Assert.NotNull(this._messageHandlerStub.RequestContent);
-
-        var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent);
-        if (cachedContent is not null)
-        {
-            Assert.Contains($"\"cachedContent\":\"{cachedContent}\"", requestBody);
-        }
-        else
-        {
-            // Then no quality is provided, it should not be included in the request body
-            Assert.DoesNotContain("cachedContent", requestBody);
-        }
-    }
-
-    public void Dispose()
-    {
-        this._httpClient.Dispose();
-        this._messageHandlerStub.Dispose();
-    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
index 0376924c0e91..89e65fbaa534 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Services/VertexAIGeminiChatCompletionServiceTests.cs
@@ -1,34 +1,14 @@
 // Copyright (c) Microsoft. All rights reserved.
 
-using System;
-using System.IO;
-using System.Net.Http;
-using System.Text;
 using System.Threading.Tasks;
-using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Google;
 using Microsoft.SemanticKernel.Services;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Google.UnitTests.Services;
 
-public sealed class VertexAIGeminiChatCompletionServiceTests : IDisposable
+public sealed class VertexAIGeminiChatCompletionServiceTests
 {
-    private readonly HttpMessageHandlerStub _messageHandlerStub;
-    private readonly HttpClient _httpClient;
-
-    public VertexAIGeminiChatCompletionServiceTests()
-    {
-        this._messageHandlerStub = new()
-        {
-            ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
-            {
-                Content = new StringContent(File.ReadAllText("./TestData/completion_one_response.json"))
-            }
-        };
-        this._httpClient = new HttpClient(this._messageHandlerStub, false);
-    }
-
     [Fact]
     public void AttributesShouldContainModelIdBearerAsString()
     {
@@ -50,39 +30,4 @@ public void AttributesShouldContainModelIdBearerAsFunc()
         // Assert
         Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]);
     }
-
-    [Theory]
-    [InlineData(null)]
-    [InlineData("content")]
-    [InlineData("")]
-    public async Task RequestCachedContentWorksCorrectlyAsync(string? cachedContent)
-    {
-        // Arrange
-        string model = "fake-model";
-        var sut = new VertexAIGeminiChatCompletionService(model, () => new ValueTask("key"), "location", "project", httpClient: this._httpClient);
-
-        // Act
-        var result = await sut.GetChatMessageContentAsync("my prompt", new GeminiPromptExecutionSettings { CachedContent = cachedContent });
-
-        // Assert
-        Assert.NotNull(result);
-        Assert.NotNull(this._messageHandlerStub.RequestContent);
-
-        var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent);
-        if (cachedContent is not null)
-        {
-            Assert.Contains($"\"cachedContent\":\"{cachedContent}\"", requestBody);
-        }
-        else
-        {
-            // Then no quality is provided, it should not be included in the request body
-            Assert.DoesNotContain("cachedContent", requestBody);
-        }
-    }
-
-    public void Dispose()
-    {
-        this._httpClient.Dispose();
-        this._messageHandlerStub.Dispose();
-    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
index b94ca9eeebc6..5d465f5d590f 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
@@ -112,7 +112,6 @@ protected static string GetApiVersionSubLink(VertexAIVersion apiVersion)
         => apiVersion switch
         {
             VertexAIVersion.V1 => "v1",
-            VertexAIVersion.V1_Beta => "v1beta1",
             _ => throw new NotSupportedException($"Vertex API version {apiVersion} is not supported.")
         };
 }
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
index aada46854846..2ebda2c2a0de 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Models/GeminiRequest.cs
@@ -42,10 +42,6 @@ internal sealed class GeminiRequest
     [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public GeminiContent? SystemInstruction { get; set; }
 
-    [JsonPropertyName("cachedContent")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public string? CachedContent { get; set; }
-
     public void AddFunction(GeminiFunction function)
     {
         // NOTE: Currently Gemini only supports one tool i.e. function calling.
@@ -71,7 +67,6 @@ public static GeminiRequest FromPromptAndExecutionSettings(
         GeminiRequest obj = CreateGeminiRequest(prompt);
         AddSafetySettings(executionSettings, obj);
         AddConfiguration(executionSettings, obj);
-        AddAdditionalBodyFields(executionSettings, obj);
         return obj;
     }
 
@@ -88,7 +83,6 @@ public static GeminiRequest FromChatHistoryAndExecutionSettings(
         GeminiRequest obj = CreateGeminiRequest(chatHistory);
         AddSafetySettings(executionSettings, obj);
         AddConfiguration(executionSettings, obj);
-        AddAdditionalBodyFields(executionSettings, obj);
         return obj;
     }
 
@@ -217,7 +211,6 @@ private static List CreateGeminiParts(ChatMessageContent content)
     {
         TextContent textContent => new GeminiPart { Text = textContent.Text },
         ImageContent imageContent => CreateGeminiPartFromImage(imageContent),
-        AudioContent audioContent => CreateGeminiPartFromAudio(audioContent),
         _ => throw new NotSupportedException($"Unsupported content type. {item.GetType().Name} is not supported by Gemini.")
     };
 
@@ -257,42 +250,6 @@ private static string GetMimeTypeFromImageContent(ImageContent imageContent)
                ?? throw new InvalidOperationException("Image content MimeType is empty.");
     }
 
-    private static GeminiPart CreateGeminiPartFromAudio(AudioContent audioContent)
-    {
-        // Binary data takes precedence over URI.
-        if (audioContent.Data is { IsEmpty: false })
-        {
-            return new GeminiPart
-            {
-                InlineData = new GeminiPart.InlineDataPart
-                {
-                    MimeType = GetMimeTypeFromAudioContent(audioContent),
-                    InlineData = Convert.ToBase64String(audioContent.Data.Value.ToArray())
-                }
-            };
-        }
-
-        if (audioContent.Uri is not null)
-        {
-            return new GeminiPart
-            {
-                FileData = new GeminiPart.FileDataPart
-                {
-                    MimeType = GetMimeTypeFromAudioContent(audioContent),
-                    FileUri = audioContent.Uri ?? throw new InvalidOperationException("Audio content URI is empty.")
-                }
-            };
-        }
-
-        throw new InvalidOperationException("Audio content does not contain any data or uri.");
-    }
-
-    private static string GetMimeTypeFromAudioContent(AudioContent audioContent)
-    {
-        return audioContent.MimeType
-               ?? throw new InvalidOperationException("Audio content MimeType is empty.");
-    }
-
     private static void AddConfiguration(GeminiPromptExecutionSettings executionSettings, GeminiRequest request)
     {
         request.Configuration = new ConfigurationElement
@@ -361,11 +318,6 @@ private static void AddSafetySettings(GeminiPromptExecutionSettings executionSet
             => new GeminiSafetySetting(s.Category, s.Threshold)).ToList();
     }
 
-    private static void AddAdditionalBodyFields(GeminiPromptExecutionSettings executionSettings, GeminiRequest request)
-    {
-        request.CachedContent = executionSettings.CachedContent;
-    }
-
     internal sealed class ConfigurationElement
     {
         [JsonPropertyName("temperature")]
diff --git a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
index daa8ea629a5e..fab00f01e11d 100644
--- a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
@@ -27,7 +27,6 @@ public sealed class GeminiPromptExecutionSettings : PromptExecutionSettings
     private bool? _audioTimestamp;
     private string? _responseMimeType;
     private object? _responseSchema;
-    private string? _cachedContent;
     private IList? _safetySettings;
     private GeminiToolCallBehavior? _toolCallBehavior;
 
@@ -42,7 +41,6 @@ public sealed class GeminiPromptExecutionSettings : PromptExecutionSettings
     /// Range is 0.0 to 1.0.
     /// 
     [JsonPropertyName("temperature")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public double? Temperature
     {
         get => this._temperature;
@@ -58,7 +56,6 @@ public double? Temperature
     /// The higher the TopP, the more diverse the completion.
     /// 
     [JsonPropertyName("top_p")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public double? TopP
     {
         get => this._topP;
@@ -74,7 +71,6 @@ public double? TopP
     /// The TopK property represents the maximum value of a collection or dataset.
     /// 
     [JsonPropertyName("top_k")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? TopK
     {
         get => this._topK;
@@ -89,7 +85,6 @@ public int? TopK
     /// The maximum number of tokens to generate in the completion.
     /// 
     [JsonPropertyName("max_tokens")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? MaxTokens
     {
         get => this._maxTokens;
@@ -104,7 +99,6 @@ public int? MaxTokens
     /// The count of candidates. Possible values range from 1 to 8.
     /// 
     [JsonPropertyName("candidate_count")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public int? CandidateCount
     {
         get => this._candidateCount;
@@ -120,7 +114,6 @@ public int? CandidateCount
     /// Maximum number of stop sequences is 5.
     /// 
     [JsonPropertyName("stop_sequences")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IList? StopSequences
     {
         get => this._stopSequences;
@@ -135,7 +128,6 @@ public IList? StopSequences
     /// Represents a list of safety settings.
     /// 
     [JsonPropertyName("safety_settings")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public IList? SafetySettings
     {
         get => this._safetySettings;
@@ -188,7 +180,6 @@ public GeminiToolCallBehavior? ToolCallBehavior
     /// if enabled, audio timestamp will be included in the request to the model.
     /// 
     [JsonPropertyName("audio_timestamp")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public bool? AudioTimestamp
     {
         get => this._audioTimestamp;
@@ -207,7 +198,6 @@ public bool? AudioTimestamp
     /// 3. text/x.enum: For classification tasks, output an enum value as defined in the response schema.
     /// 
     [JsonPropertyName("response_mimetype")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
     public string? ResponseMimeType
     {
         get => this._responseMimeType;
@@ -244,23 +234,6 @@ public object? ResponseSchema
         }
     }
 
-    /// 
-    /// Optional. The name of the cached content used as context to serve the prediction.
-    /// Note: only used in explicit caching, where users can have control over caching (e.g. what content to cache) and enjoy guaranteed cost savings.
-    /// Format: projects/{project}/locations/{location}/cachedContents/{cachedContent}
-    /// 
-    [JsonPropertyName("cached_content")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public string? CachedContent
-    {
-        get => this._cachedContent;
-        set
-        {
-            this.ThrowIfFrozen();
-            this._cachedContent = value;
-        }
-    }
-
     /// 
     public override void Freeze()
     {
diff --git a/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs b/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
index 998910d8db42..8e0a894e9f90 100644
--- a/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
+++ b/dotnet/src/Connectors/Connectors.Google/VertexAIVersion.cs
@@ -12,10 +12,5 @@ public enum VertexAIVersion
     /// 
     /// Represents the V1 version of the Vertex AI API.
     /// 
-    V1,
-
-    /// 
-    /// Represents the V1-beta version of the Vertex AI API.
-    /// 
-    V1_Beta
+    V1
 }
diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
index c23c52b68760..6984951fdc90 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs
@@ -126,7 +126,7 @@ IAsyncEnumerable GetBatchFromNamespaceAsync(
     ///  if true, the embedding will be returned in the memory record.
     /// 
     ///  the memory records that match the filter.
-    IAsyncEnumerable GetBatchWithFilterAsync(
+    public IAsyncEnumerable GetBatchWithFilterAsync(
         string indexName,
         Dictionary filter,
         int limit = 10,
@@ -182,7 +182,7 @@ Task RemoveWithDocumentIdAsync(
     ///  the namespace to remove from.
     /// 
     /// 
-    Task RemoveWithDocumentIdBatchAsync(
+    public Task RemoveWithDocumentIdBatchAsync(
         string indexName,
         IEnumerable documentIds,
         string indexNamespace = "",
diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
index 020aa46dbda6..3fb62b667a92 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs
@@ -19,7 +19,7 @@ internal interface IPostgresVectorStoreDbClient
     /// 
     /// The  used to connect to the database.
     /// 
-    NpgsqlDataSource DataSource { get; }
+    public NpgsqlDataSource DataSource { get; }
 
     /// 
     /// Check if a table exists.
diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
index 521dc5633cb0..18aa5bf54901 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs
@@ -149,7 +149,7 @@ public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, strin
 
         return new PostgresSqlCommandInfo(
             commandText: $@"
-                CREATE INDEX ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});"
+                CREATE INDEX {indexName} ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});"
         );
     }
 
diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
index aa9ad3f72190..3078e79c2113 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs
@@ -22,7 +22,7 @@ public interface IQdrantVectorDbClient
     /// Whether to include the vector data in the returned results.
     /// The  to monitor for cancellation requests. The default is .
     /// An asynchronous list of Qdrant vectors records associated with the given IDs
-    IAsyncEnumerable GetVectorsByIdAsync(string collectionName, IEnumerable pointIds, bool withVectors = false,
+    public IAsyncEnumerable GetVectorsByIdAsync(string collectionName, IEnumerable pointIds, bool withVectors = false,
         CancellationToken cancellationToken = default);
 
     /// 
@@ -33,7 +33,7 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// Whether to include the vector data in the returned result.
     /// The  to monitor for cancellation requests. The default is .
     /// The Qdrant vector record associated with the given ID if found, null if not.
-    Task GetVectorByPayloadIdAsync(string collectionName, string metadataId, bool withVector = false, CancellationToken cancellationToken = default);
+    public Task GetVectorByPayloadIdAsync(string collectionName, string metadataId, bool withVector = false, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete vectors by their unique Qdrant IDs.
@@ -41,7 +41,7 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// The name assigned to a collection of vectors.
     /// The unique IDs used to index Qdrant vector entries.
     /// The  to monitor for cancellation requests. The default is .
-    Task DeleteVectorsByIdAsync(string collectionName, IEnumerable pointIds, CancellationToken cancellationToken = default);
+    public Task DeleteVectorsByIdAsync(string collectionName, IEnumerable pointIds, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete a vector by its unique identifier in the metadata (Qdrant payload).
@@ -49,7 +49,7 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// The name assigned to a collection of vectors.
     /// The unique ID stored in a Qdrant vector entry's metadata.
     /// The  to monitor for cancellation requests. The default is .
-    Task DeleteVectorByPayloadIdAsync(string collectionName, string metadataId, CancellationToken cancellationToken = default);
+    public Task DeleteVectorByPayloadIdAsync(string collectionName, string metadataId, CancellationToken cancellationToken = default);
 
     /// 
     /// Upsert a group of vectors into a collection.
@@ -57,7 +57,7 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// The name assigned to a collection of vectors.
     /// The Qdrant vector records to upsert.
     /// The  to monitor for cancellation requests. The default is .
-    Task UpsertVectorsAsync(string collectionName, IEnumerable vectorData, CancellationToken cancellationToken = default);
+    public Task UpsertVectorsAsync(string collectionName, IEnumerable vectorData, CancellationToken cancellationToken = default);
 
     /// 
     /// Find the nearest vectors in a collection using vector similarity search.
@@ -69,7 +69,7 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// Whether to include the vector data in the returned results.
     /// Qdrant tags used to filter the results.
     /// The  to monitor for cancellation requests. The default is .
-    IAsyncEnumerable<(QdrantVectorRecord, double)> FindNearestInCollectionAsync(
+    public IAsyncEnumerable<(QdrantVectorRecord, double)> FindNearestInCollectionAsync(
         string collectionName,
         ReadOnlyMemory target,
         double threshold,
@@ -83,25 +83,25 @@ IAsyncEnumerable GetVectorsByIdAsync(string collectionName,
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
+    public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// Delete a Qdrant vector collection.
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
+    public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// Check if a vector collection exists.
     /// 
     /// The name assigned to a collection of vectors.
     /// The  to monitor for cancellation requests. The default is .
-    Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default);
+    public Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default);
 
     /// 
     /// List all vector collections.
     /// 
     /// The  to monitor for cancellation requests. The default is .
-    IAsyncEnumerable ListCollectionsAsync(CancellationToken cancellationToken = default);
+    public IAsyncEnumerable ListCollectionsAsync(CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
index ad6f1315402e..901247b95641 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
@@ -1,18 +1,11 @@
 // Copyright (c) Microsoft. All rights reserved.
 
 using System;
-using System.IO;
-using System.Net;
-using System.Net.Http;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Ollama;
 using Microsoft.SemanticKernel.Embeddings;
 using Microsoft.SemanticKernel.TextGeneration;
-using OllamaSharp;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
@@ -61,155 +54,4 @@ public void AddOllamaTextEmbeddingGenerationCreatesService()
         Assert.NotNull(kernel);
         Assert.NotNull(service);
     }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientEmbeddingsFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/embeddings_test_response.json"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient);
-        var builder = Kernel.CreateBuilder();
-        var services = builder.Services;
-
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        services.AddOllamaTextEmbeddingGeneration(serviceId: serviceId);
-        var serviceProvider = services.BuildServiceProvider();
-
-        var kernel = builder.Build();
-
-        ITextEmbeddingGenerationService service = kernel.GetRequiredService(serviceId);
-
-        Assert.NotNull(service);
-
-        await service.GenerateEmbeddingsAsync(["text"]);
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientChatCompletionFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient);
-        var builder = Kernel.CreateBuilder();
-        var services = builder.Services;
-
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        builder.AddOllamaChatCompletion(serviceId: serviceId);
-        var kernel = builder.Build();
-
-        IChatCompletionService service = kernel.GetRequiredService(serviceId);
-
-        Assert.NotNull(service);
-
-        await service.GetChatMessageContentsAsync(new());
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientTextGenerationFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient, "model");
-        var builder = Kernel.CreateBuilder();
-        var services = builder.Services;
-
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        builder.AddOllamaTextGeneration(serviceId: serviceId);
-        var kernel = builder.Build();
-
-        ITextGenerationService service = kernel.GetRequiredService(serviceId);
-
-        Assert.NotNull(service);
-
-        await service.GetStreamingTextContentsAsync("test prompt").GetAsyncEnumerator().MoveNextAsync();
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    public enum ServiceCollectionRegistration
-    {
-        KeyedOllamaApiClient,
-        KeyedIOllamaApiClient,
-        OllamaApiClient,
-        Endpoint,
-    }
-
-    public static TheoryData AddOllamaApiClientScenarios => new()
-    {
-        { ServiceCollectionRegistration.KeyedOllamaApiClient },
-        { ServiceCollectionRegistration.KeyedIOllamaApiClient },
-        { ServiceCollectionRegistration.OllamaApiClient },
-        { ServiceCollectionRegistration.Endpoint },
-    };
-
-    private sealed class FakeHttpMessageHandler(string responseContent) : HttpMessageHandler
-    {
-        public int InvokedCount { get; private set; }
-
-        protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
-        {
-            this.InvokedCount++;
-
-            return Task.FromResult(
-                new HttpResponseMessage(HttpStatusCode.OK)
-                {
-                    Content = new StringContent(responseContent)
-                });
-        }
-    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
index c22d1869954f..d68ae6548e32 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
@@ -1,18 +1,12 @@
 // Copyright (c) Microsoft. All rights reserved.
 
 using System;
-using System.IO;
-using System.Net;
-using System.Net.Http;
-using System.Threading;
-using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.ChatCompletion;
 using Microsoft.SemanticKernel.Connectors.Ollama;
 using Microsoft.SemanticKernel.Embeddings;
 using Microsoft.SemanticKernel.TextGeneration;
-using OllamaSharp;
 using Xunit;
 
 namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
@@ -47,32 +41,6 @@ public void AddOllamaChatCompletionToServiceCollection()
         Assert.NotNull(service);
     }
 
-    [Fact]
-    public void AddOllamaChatCompletionFromServiceCollection()
-    {
-        var services = new ServiceCollection();
-        using var ollamaClient = new OllamaApiClient(new Uri("http://localhost:11434"), "model");
-
-        services.AddSingleton(ollamaClient);
-        services.AddOllamaChatCompletion();
-        var serviceProvider = services.BuildServiceProvider();
-        var service = serviceProvider.GetRequiredService();
-        Assert.NotNull(service);
-    }
-
-    [Fact]
-    public void AddOllamaTextEmbeddingGenerationFromServiceCollection()
-    {
-        var services = new ServiceCollection();
-        using var ollamaClient = new OllamaApiClient(new Uri("http://localhost:11434"), "model");
-
-        services.AddSingleton(ollamaClient);
-        services.AddOllamaTextEmbeddingGeneration();
-        var serviceProvider = services.BuildServiceProvider();
-        var service = serviceProvider.GetRequiredService();
-        Assert.NotNull(service);
-    }
-
     [Fact]
     public void AddOllamaTextEmbeddingsGenerationToServiceCollection()
     {
@@ -84,174 +52,4 @@ public void AddOllamaTextEmbeddingsGenerationToServiceCollection()
 
         Assert.NotNull(service);
     }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientEmbeddingsFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/embeddings_test_response.json"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient);
-        var services = new ServiceCollection();
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        services.AddOllamaTextEmbeddingGeneration(serviceId: serviceId);
-        var serviceProvider = services.BuildServiceProvider();
-
-        ITextEmbeddingGenerationService service;
-        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
-                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
-        {
-            service = serviceProvider.GetRequiredKeyedService(serviceId);
-        }
-        else
-        {
-            service = serviceProvider.GetRequiredService();
-        }
-
-        Assert.NotNull(service);
-
-        await service.GenerateEmbeddingsAsync(["text"]);
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientChatCompletionFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/chat_completion_test_response.txt"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient);
-        var services = new ServiceCollection();
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        services.AddOllamaChatCompletion(serviceId: serviceId);
-        var serviceProvider = services.BuildServiceProvider();
-
-        IChatCompletionService service;
-        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
-                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
-        {
-            service = serviceProvider.GetRequiredKeyedService(serviceId);
-        }
-        else
-        {
-            service = serviceProvider.GetRequiredService();
-        }
-
-        Assert.NotNull(service);
-
-        await service.GetChatMessageContentsAsync(new());
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    [Theory]
-    [MemberData(nameof(AddOllamaApiClientScenarios))]
-    public async Task AddOllamaApiClientTextGenerationFromServiceCollectionAsync(ServiceCollectionRegistration registration)
-    {
-        using var myHttpClientHandler = new FakeHttpMessageHandler(File.ReadAllText("TestData/text_generation_test_response_stream.txt"));
-        using var httpClient = new HttpClient(myHttpClientHandler) { BaseAddress = new Uri("http://localhost:11434"), };
-        using var client = new OllamaApiClient(httpClient, "model");
-        var services = new ServiceCollection();
-        string? serviceId = null;
-        switch (registration)
-        {
-            case ServiceCollectionRegistration.KeyedOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.KeyedIOllamaApiClient:
-                services.AddKeyedSingleton(serviceId = "model", client);
-                break;
-            case ServiceCollectionRegistration.OllamaApiClient:
-                services.AddSingleton(client);
-                break;
-            case ServiceCollectionRegistration.Endpoint:
-                services.AddSingleton(client);
-                break;
-        }
-
-        services.AddOllamaTextGeneration(serviceId: serviceId);
-        var serviceProvider = services.BuildServiceProvider();
-
-        ITextGenerationService service;
-        if (registration is ServiceCollectionRegistration.KeyedOllamaApiClient
-                         or ServiceCollectionRegistration.KeyedIOllamaApiClient)
-        {
-            service = serviceProvider.GetRequiredKeyedService(serviceId);
-        }
-        else
-        {
-            service = serviceProvider.GetRequiredService();
-        }
-
-        Assert.NotNull(service);
-
-        await service.GetStreamingTextContentsAsync("test prompt").GetAsyncEnumerator().MoveNextAsync();
-
-        Assert.Equal(1, myHttpClientHandler.InvokedCount);
-    }
-
-    public enum ServiceCollectionRegistration
-    {
-        KeyedOllamaApiClient,
-        KeyedIOllamaApiClient,
-        OllamaApiClient,
-        Endpoint,
-    }
-
-    public static TheoryData AddOllamaApiClientScenarios => new()
-    {
-        { ServiceCollectionRegistration.KeyedOllamaApiClient },
-        { ServiceCollectionRegistration.KeyedIOllamaApiClient },
-        { ServiceCollectionRegistration.OllamaApiClient },
-        { ServiceCollectionRegistration.Endpoint },
-    };
-
-    private sealed class FakeHttpMessageHandler(string responseContent) : HttpMessageHandler
-    {
-        public int InvokedCount { get; private set; }
-
-        protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
-        {
-            this.InvokedCount++;
-
-            return Task.FromResult(
-                new HttpResponseMessage(HttpStatusCode.OK)
-                {
-                    Content = new StringContent(responseContent)
-                });
-        }
-    }
 }
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
index 18df66d5cea2..dab3a80976cf 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
@@ -2,6 +2,11 @@
 
 using System;
 using System.Net.Http;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Connectors.Ollama;
+using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.TextGeneration;
 using OllamaSharp;
 
 namespace Microsoft.SemanticKernel;
@@ -20,17 +25,22 @@ public static class OllamaKernelBuilderExtensions
     /// The model for text generation.
     /// The endpoint to Ollama hosted service.
     /// The optional service ID.
+    /// The optional custom HttpClient.
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaTextGeneration(
         this IKernelBuilder builder,
         string modelId,
         Uri endpoint,
-        string? serviceId = null)
+        string? serviceId = null,
+        HttpClient? httpClient = null)
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddOllamaTextGeneration(modelId, endpoint, serviceId);
-
+        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+            new OllamaTextGenerationService(
+                modelId: modelId,
+                endpoint: endpoint,
+                loggerFactory: serviceProvider.GetService()));
         return builder;
     }
 
@@ -50,8 +60,11 @@ public static IKernelBuilder AddOllamaTextGeneration(
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddOllamaTextGeneration(modelId, httpClient, serviceId);
-
+        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+            new OllamaTextGenerationService(
+                modelId: modelId,
+                httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
+                loggerFactory: serviceProvider.GetService()));
         return builder;
     }
 
@@ -71,27 +84,11 @@ public static IKernelBuilder AddOllamaTextGeneration(
     {
         Verify.NotNull(builder);
 
-        builder.Services.AddOllamaTextGeneration(modelId, ollamaClient, serviceId);
-
-        return builder;
-    }
-
-    /// 
-    /// Add Ollama Text Generation service to the kernel builder.
-    /// 
-    /// The kernel builder.
-    /// The Ollama Sharp library client.
-    /// The optional service ID.
-    /// The updated kernel builder.
-    public static IKernelBuilder AddOllamaTextGeneration(
-        this IKernelBuilder builder,
-        OllamaApiClient? ollamaClient = null,
-        string? serviceId = null)
-    {
-        Verify.NotNull(builder);
-
-        builder.Services.AddOllamaTextGeneration(ollamaClient, serviceId);
-
+        builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+            new OllamaTextGenerationService(
+                modelId: modelId,
+                ollamaClient: ollamaClient,
+                loggerFactory: serviceProvider.GetService()));
         return builder;
     }
 
@@ -151,7 +148,7 @@ public static IKernelBuilder AddOllamaChatCompletion(
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaChatCompletion(
         this IKernelBuilder builder,
-        OllamaApiClient? ollamaClient = null,
+        OllamaApiClient ollamaClient,
         string? serviceId = null)
     {
         Verify.NotNull(builder);
@@ -216,7 +213,7 @@ public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
     /// The updated kernel builder.
     public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
         this IKernelBuilder builder,
-        OllamaApiClient? ollamaClient = null,
+        OllamaApiClient ollamaClient,
         string? serviceId = null)
     {
         Verify.NotNull(builder);
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
index 220737be2749..960466bd9f5d 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
@@ -38,12 +38,10 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-        {
-            return new OllamaTextGenerationService(
+            new OllamaTextGenerationService(
                 modelId: modelId,
                 endpoint: endpoint,
-                loggerFactory: serviceProvider.GetService());
-        });
+                loggerFactory: serviceProvider.GetService()));
     }
 
     /// 
@@ -63,12 +61,10 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-        {
-            return new OllamaTextGenerationService(
+            new OllamaTextGenerationService(
                 modelId: modelId,
-                httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
-                loggerFactory: serviceProvider.GetService());
-        });
+                httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
+                loggerFactory: serviceProvider.GetService()));
     }
 
     /// 
@@ -88,47 +84,10 @@ public static IServiceCollection AddOllamaTextGeneration(
         Verify.NotNull(services);
 
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-        {
-            var loggerFactory = serviceProvider.GetService();
-
-            return new OllamaTextGenerationService(
+            new OllamaTextGenerationService(
                 modelId: modelId,
                 ollamaClient: ollamaClient,
-                loggerFactory: loggerFactory);
-        });
-    }
-
-    /// 
-    /// Add Ollama Text Generation service to the kernel builder.
-    /// 
-    /// The target service collection.
-    /// The Ollama Sharp library client.
-    /// The optional service ID.
-    /// The updated kernel builder.
-    public static IServiceCollection AddOllamaTextGeneration(
-        this IServiceCollection services,
-        OllamaApiClient? ollamaClient = null,
-        string? serviceId = null)
-    {
-        Verify.NotNull(services);
-
-        return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
-        {
-            var loggerFactory = serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
-            ollamaClient ??= serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
-
-            if (ollamaClient is null)
-            {
-                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
-            }
-
-            return new OllamaTextGenerationService(
-                ollamaClient: ollamaClient,
-                loggerFactory: loggerFactory);
-        });
+                loggerFactory: serviceProvider.GetService()));
     }
 
     #endregion
@@ -212,7 +171,7 @@ public static IServiceCollection AddOllamaChatCompletion(
     /// The updated kernel builder.
     public static IServiceCollection AddOllamaChatCompletion(
         this IServiceCollection services,
-        OllamaApiClient? ollamaClient = null,
+        OllamaApiClient ollamaClient,
         string? serviceId = null)
     {
         Verify.NotNull(services);
@@ -220,15 +179,6 @@ public static IServiceCollection AddOllamaChatCompletion(
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
         {
             var loggerFactory = serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
-            ollamaClient ??= serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
-
-            if (ollamaClient is null)
-            {
-                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
-            }
 
             var builder = ((IChatClient)ollamaClient)
                 .AsBuilder()
@@ -324,7 +274,7 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
     /// The updated kernel builder.
     public static IServiceCollection AddOllamaTextEmbeddingGeneration(
         this IServiceCollection services,
-        OllamaApiClient? ollamaClient = null,
+        OllamaApiClient ollamaClient,
         string? serviceId = null)
     {
         Verify.NotNull(services);
@@ -332,15 +282,6 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
         return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
         {
             var loggerFactory = serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId);
-            ollamaClient ??= serviceProvider.GetKeyedService(serviceId) as OllamaApiClient;
-            ollamaClient ??= serviceProvider.GetService();
-            ollamaClient ??= serviceProvider.GetRequiredService() as OllamaApiClient;
-
-            if (ollamaClient is null)
-            {
-                throw new InvalidOperationException($"No {nameof(IOllamaApiClient)} implementations found in the service collection.");
-            }
 
             var builder = ((IEmbeddingGenerator>)ollamaClient)
                 .AsBuilder();
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
index bf05ca58797e..4dda6cd9a351 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
@@ -65,18 +65,6 @@ public OllamaTextGenerationService(
     {
     }
 
-    /// 
-    /// Initializes a new instance of the  class.
-    /// 
-    /// The Ollama API client.
-    /// Optional logger factory to be used for logging.
-    public OllamaTextGenerationService(
-        OllamaApiClient ollamaClient,
-        ILoggerFactory? loggerFactory = null)
-        : base(ollamaClient.SelectedModel, ollamaClient, loggerFactory)
-    {
-    }
-
     /// 
     public IReadOnlyDictionary Attributes => this.AttributesInternal;
 
diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
index b26ecbeb6b0d..8bf04a9c13a6 100644
--- a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs
@@ -89,13 +89,13 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory
         OnnxRuntimeGenAIPromptExecutionSettings onnxPromptExecutionSettings = this.GetOnnxPromptExecutionSettingsSettings(executionSettings);
 
         var prompt = this.GetPrompt(chatHistory, onnxPromptExecutionSettings);
-        using var tokens = this.GetTokenizer().Encode(prompt);
+        var tokens = this.GetTokenizer().Encode(prompt);
 
         using var generatorParams = new GeneratorParams(this.GetModel());
         this.UpdateGeneratorParamsFromPromptExecutionSettings(generatorParams, onnxPromptExecutionSettings);
+        generatorParams.SetInputSequences(tokens);
 
         using var generator = new Generator(this.GetModel(), generatorParams);
-        generator.AppendTokenSequences(tokens);
 
         bool removeNextTokenStartingWithSpace = true;
         while (!generator.IsDone())
@@ -104,6 +104,7 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory
 
             yield return await Task.Run(() =>
             {
+                generator.ComputeLogits();
                 generator.GenerateNextToken();
 
                 var outputTokens = generator.GetSequence(0);
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
index 19992be01667..3fa17f593a4b 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
@@ -661,65 +661,6 @@ public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming)
         Assert.Equal(isStreaming, actualStreamingFlag);
     }
 
-    [Fact]
-    public async Task PromptExecutionSettingsArePropagatedFromInvokePromptToFilterContextAsync()
-    {
-        // Arrange
-        this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
-
-        var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
-
-        AutoFunctionInvocationContext? actualContext = null;
-
-        var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
-        {
-            actualContext = context;
-            return Task.CompletedTask;
-        });
-
-        var expectedExecutionSettings = new OpenAIPromptExecutionSettings
-        {
-            ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
-        };
-
-        // Act
-        var result = await kernel.InvokePromptAsync("Test prompt", new(expectedExecutionSettings));
-
-        // Assert
-        Assert.NotNull(actualContext);
-        Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
-    }
-
-    [Fact]
-    public async Task PromptExecutionSettingsArePropagatedFromInvokePromptStreamingToFilterContextAsync()
-    {
-        // Arrange
-        this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
-
-        var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
-
-        AutoFunctionInvocationContext? actualContext = null;
-
-        var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
-        {
-            actualContext = context;
-            return Task.CompletedTask;
-        });
-
-        var expectedExecutionSettings = new OpenAIPromptExecutionSettings
-        {
-            ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
-        };
-
-        // Act
-        await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(expectedExecutionSettings)))
-        { }
-
-        // Assert
-        Assert.NotNull(actualContext);
-        Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
-    }
-
     public void Dispose()
     {
         this._httpClient.Dispose();
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
index d6b83f21a391..74360e542358 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
@@ -589,48 +589,6 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
         Assert.Equal("user", messages[1].GetProperty("role").GetString());
     }
 
-    [Fact]
-    public async Task GetChatMessageContentsUsesDeveloperPromptAndSettingsCorrectlyAsync()
-    {
-        // Arrange
-        const string Prompt = "This is test prompt";
-        const string DeveloperMessage = "This is test system message";
-
-        var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient);
-        var settings = new OpenAIPromptExecutionSettings() { ChatDeveloperPrompt = DeveloperMessage };
-
-        this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
-        };
-
-        IKernelBuilder builder = Kernel.CreateBuilder();
-        builder.Services.AddTransient((sp) => service);
-        Kernel kernel = builder.Build();
-
-        // Act
-        var result = await kernel.InvokePromptAsync(Prompt, new(settings));
-
-        // Assert
-        Assert.Equal("Test chat response", result.ToString());
-
-        var requestContentByteArray = this._messageHandlerStub.RequestContent;
-
-        Assert.NotNull(requestContentByteArray);
-
-        var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray));
-
-        var messages = requestContent.GetProperty("messages");
-
-        Assert.Equal(2, messages.GetArrayLength());
-
-        Assert.Equal(DeveloperMessage, messages[0].GetProperty("content").GetString());
-        Assert.Equal("developer", messages[0].GetProperty("role").GetString());
-
-        Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
-        Assert.Equal("user", messages[1].GetProperty("role").GetString());
-    }
-
     [Fact]
     public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
     {
@@ -1004,65 +962,6 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string
         Assert.NotNull(result);
     }
 
-    [Theory]
-    [InlineData(null, null)]
-    [InlineData("string", "low")]
-    [InlineData("string", "medium")]
-    [InlineData("string", "high")]
-    [InlineData("ChatReasonEffortLevel.Low", "low")]
-    [InlineData("ChatReasonEffortLevel.Medium", "medium")]
-    [InlineData("ChatReasonEffortLevel.High", "high")]
-    public async Task GetChatMessageInReasoningEffortAsync(string? effortType, string? expectedEffortLevel)
-    {
-        // Assert
-        object? reasoningEffortObject = null;
-        switch (effortType)
-        {
-            case "string":
-                reasoningEffortObject = expectedEffortLevel;
-                break;
-            case "ChatReasonEffortLevel.Low":
-                reasoningEffortObject = ChatReasoningEffortLevel.Low;
-                break;
-            case "ChatReasonEffortLevel.Medium":
-                reasoningEffortObject = ChatReasoningEffortLevel.Medium;
-                break;
-            case "ChatReasonEffortLevel.High":
-                reasoningEffortObject = ChatReasoningEffortLevel.High;
-                break;
-        }
-
-        var modelId = "o1";
-        var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient);
-        OpenAIPromptExecutionSettings executionSettings = new() { ReasoningEffort = reasoningEffortObject };
-
-        this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
-        {
-            Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
-        };
-
-        // Act
-        var result = await sut.GetChatMessageContentAsync(this._chatHistoryForTest, executionSettings);
-
-        // Assert
-        Assert.NotNull(result);
-
-        var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!);
-        Assert.NotNull(actualRequestContent);
-
-        var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
-
-        if (expectedEffortLevel is null)
-        {
-            Assert.False(optionsJson.TryGetProperty("reasoning_effort", out _));
-            return;
-        }
-
-        var requestedReasoningEffort = optionsJson.GetProperty("reasoning_effort").GetString();
-
-        Assert.Equal(expectedEffortLevel, requestedReasoningEffort);
-    }
-
     [Fact(Skip = "Not working running in the console")]
     public async Task GetInvalidResponseThrowsExceptionAndIsCapturedByDiagnosticsAsync()
     {
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
index dda1af38a596..90272b94717c 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
@@ -34,10 +34,6 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
         Assert.Equal(128, executionSettings.MaxTokens);
         Assert.Null(executionSettings.Store);
         Assert.Null(executionSettings.Metadata);
-        Assert.Null(executionSettings.Seed);
-        Assert.Null(executionSettings.ReasoningEffort);
-        Assert.Null(executionSettings.ChatSystemPrompt);
-        Assert.Null(executionSettings.ChatDeveloperPrompt);
     }
 
     [Fact]
@@ -52,15 +48,13 @@ public void ItUsesExistingOpenAIExecutionSettings()
             PresencePenalty = 0.7,
             StopSequences = ["foo", "bar"],
             ChatSystemPrompt = "chat system prompt",
-            ChatDeveloperPrompt = "chat developer prompt",
             MaxTokens = 128,
             Logprobs = true,
             TopLogprobs = 5,
             TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
             Seed = 123456,
             Store = true,
-            Metadata = new Dictionary() { { "foo", "bar" } },
-            ReasoningEffort = "high"
+            Metadata = new Dictionary() { { "foo", "bar" } }
         };
 
         // Act
@@ -76,9 +70,6 @@ public void ItUsesExistingOpenAIExecutionSettings()
         Assert.Equal(actualSettings.Seed, executionSettings.Seed);
         Assert.Equal(actualSettings.Store, executionSettings.Store);
         Assert.Equal(actualSettings.Metadata, executionSettings.Metadata);
-        Assert.Equal(actualSettings.ReasoningEffort, executionSettings.ReasoningEffort);
-        Assert.Equal(actualSettings.ChatSystemPrompt, executionSettings.ChatSystemPrompt);
-        Assert.Equal(actualSettings.ChatDeveloperPrompt, executionSettings.ChatDeveloperPrompt);
     }
 
     [Fact]
@@ -121,8 +112,6 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
                 { "results_per_prompt", 2 },
                 { "stop_sequences", new [] { "foo", "bar" } },
                 { "chat_system_prompt", "chat system prompt" },
-                { "chat_developer_prompt", "chat developer prompt" },
-                { "reasoning_effort", "high" },
                 { "max_tokens", 128 },
                 { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } },
                 { "seed", 123456 },
@@ -155,8 +144,6 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
                 { "results_per_prompt", "2" },
                 { "stop_sequences", new [] { "foo", "bar" } },
                 { "chat_system_prompt", "chat system prompt" },
-                { "chat_developer_prompt", "chat developer prompt" },
-                { "reasoning_effort", "high" },
                 { "max_tokens", "128" },
                 { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
                 { "seed", 123456 },
@@ -187,8 +174,6 @@ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
               "results_per_prompt": 2,
               "stop_sequences": [ "foo", "bar" ],
               "chat_system_prompt": "chat system prompt",
-              "chat_developer_prompt": "chat developer prompt",
-              "reasoning_effort": "high",
               "token_selection_biases": { "1": 2, "3": 4 },
               "max_tokens": 128,
               "seed": 123456,
@@ -326,8 +311,6 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
         Assert.Equal(0.7, executionSettings.PresencePenalty);
         Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
         Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
-        Assert.Equal("chat developer prompt", executionSettings.ChatDeveloperPrompt);
-        Assert.Equal("high", executionSettings.ReasoningEffort!.ToString());
         Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
         Assert.Equal(128, executionSettings.MaxTokens);
         Assert.Equal(123456, executionSettings.Seed);
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
index c09fbc87f6f3..129e7913b788 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
@@ -200,7 +200,6 @@ internal async Task> GetChatMessageContentsAsy
             // In such cases, we'll return the last message in the chat history.
             var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync(
                 chatMessageContent,
-                chatExecutionSettings,
                 chatHistory,
                 requestIndex,
                 (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content),
@@ -385,7 +384,6 @@ internal async IAsyncEnumerable GetStreamingC
             // In such cases, we'll return the last message in the chat history.
             var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync(
                 chatMessageContent,
-                chatExecutionSettings,
                 chatHistory,
                 requestIndex,
                 (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content),
@@ -471,7 +469,6 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
             TopLogProbabilityCount = executionSettings.TopLogprobs,
             IncludeLogProbabilities = executionSettings.Logprobs,
             StoredOutputEnabled = executionSettings.Store,
-            ReasoningEffortLevel = GetEffortLevel(executionSettings),
         };
 
         var responseFormat = GetResponseFormat(executionSettings);
@@ -522,33 +519,6 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
         return options;
     }
 
-    protected static ChatReasoningEffortLevel? GetEffortLevel(OpenAIPromptExecutionSettings executionSettings)
-    {
-        var effortLevelObject = executionSettings.ReasoningEffort;
-        if (effortLevelObject is null)
-        {
-            return null;
-        }
-
-        if (effortLevelObject is ChatReasoningEffortLevel effort)
-        {
-            return effort;
-        }
-
-        if (effortLevelObject is string textEffortLevel)
-        {
-            return textEffortLevel.ToUpperInvariant() switch
-            {
-                "LOW" => ChatReasoningEffortLevel.Low,
-                "MEDIUM" => ChatReasoningEffortLevel.Medium,
-                "HIGH" => ChatReasoningEffortLevel.High,
-                _ => throw new NotSupportedException($"The provided reasoning effort '{textEffortLevel}' is not supported.")
-            };
-        }
-
-        throw new NotSupportedException($"The provided reasoning effort '{effortLevelObject.GetType()}' is not supported.");
-    }
-
     /// 
     /// Retrieves the response format based on the provided settings.
     /// 
@@ -619,14 +589,13 @@ private static bool IsRequestableTool(IList tools, FunctionCallContent
     /// 
     /// Optional chat instructions for the AI service
     /// Execution settings
-    /// Indicates what will be the role of the text. Defaults to system role prompt
     /// Chat object
-    private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null, AuthorRole? textRole = null)
+    private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null)
     {
         var chat = new ChatHistory();
 
         // If settings is not provided, create a new chat with the text as the system prompt
-        textRole ??= AuthorRole.System;
+        AuthorRole textRole = AuthorRole.System;
 
         if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt))
         {
@@ -634,15 +603,9 @@ private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecut
             textRole = AuthorRole.User;
         }
 
-        if (!string.IsNullOrWhiteSpace(executionSettings?.ChatDeveloperPrompt))
-        {
-            chat.AddDeveloperMessage(executionSettings!.ChatDeveloperPrompt!);
-            textRole = AuthorRole.User;
-        }
-
         if (!string.IsNullOrWhiteSpace(text))
         {
-            chat.AddMessage(textRole.Value, text!);
+            chat.AddMessage(textRole, text!);
         }
 
         return chat;
@@ -652,11 +615,6 @@ private static List CreateChatCompletionMessages(OpenAIPromptExecut
     {
         List messages = [];
 
-        if (!string.IsNullOrWhiteSpace(executionSettings.ChatDeveloperPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.Developer))
-        {
-            messages.Add(new DeveloperChatMessage(executionSettings.ChatDeveloperPrompt));
-        }
-
         if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System))
         {
             messages.Add(new SystemChatMessage(executionSettings.ChatSystemPrompt));
@@ -672,11 +630,6 @@ private static List CreateChatCompletionMessages(OpenAIPromptExecut
 
     private static List CreateRequestMessages(ChatMessageContent message)
     {
-        if (message.Role == AuthorRole.Developer)
-        {
-            return [new DeveloperChatMessage(message.Content) { ParticipantName = message.AuthorName }];
-        }
-
         if (message.Role == AuthorRole.System)
         {
             return [new SystemChatMessage(message.Content) { ParticipantName = message.AuthorName }];
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
index bd3187b936d6..add62d564046 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs
@@ -18,29 +18,6 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI;
 [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)]
 public class OpenAIPromptExecutionSettings : PromptExecutionSettings
 {
-    /// 
-    /// Gets or sets an object specifying the effort level for the model to use when generating the completion.
-    /// 
-    /// 
-    /// Constrains effort on reasoning for reasoning models.
-    /// Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.
-    /// Possible values are:
-    /// -  values: "low", "medium", "high";
-    /// -  object;
-    /// 
-    [Experimental("SKEXP0010")]
-    [JsonPropertyName("reasoning_effort")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public object? ReasoningEffort
-    {
-        get => this._reasoningEffort;
-        set
-        {
-            this.ThrowIfFrozen();
-            this._reasoningEffort = value;
-        }
-    }
-
     /// 
     /// Temperature controls the randomness of the completion.
     /// The higher the temperature, the more random the completion.
@@ -206,24 +183,6 @@ public string? ChatSystemPrompt
         }
     }
 
-    /// 
-    /// The system prompt to use when generating text using a chat model.
-    /// Defaults to "Assistant is a large language model."
-    /// 
-    [Experimental("SKEXP0010")]
-    [JsonPropertyName("chat_developer_prompt")]
-    [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
-    public string? ChatDeveloperPrompt
-    {
-        get => this._chatDeveloperPrompt;
-
-        set
-        {
-            this.ThrowIfFrozen();
-            this._chatDeveloperPrompt = value;
-        }
-    }
-
     /// 
     /// Modify the likelihood of specified tokens appearing in the completion.
     /// 
@@ -451,18 +410,15 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
             FunctionChoiceBehavior = this.FunctionChoiceBehavior,
             User = this.User,
             ChatSystemPrompt = this.ChatSystemPrompt,
-            ChatDeveloperPrompt = this.ChatDeveloperPrompt,
             Logprobs = this.Logprobs,
             TopLogprobs = this.TopLogprobs,
             Store = this.Store,
             Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null,
-            ReasoningEffort = this.ReasoningEffort
         };
     }
 
     #region private ================================================================================
 
-    private object? _reasoningEffort;
     private double? _temperature;
     private double? _topP;
     private double? _presencePenalty;
@@ -475,7 +431,6 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
     private ToolCallBehavior? _toolCallBehavior;
     private string? _user;
     private string? _chatSystemPrompt;
-    private string? _chatDeveloperPrompt;
     private bool? _logprobs;
     private int? _topLogprobs;
     private bool? _store;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
index f40e63faa940..49ffce328e5e 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/AnyTagEqualToFilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Represents a filter clause that filters by checking if a field consisting of a list of values contains a specific value.
+///  which filters by checking if a field consisting of a list of values contains a specific value.
 /// 
 public sealed class AnyTagEqualToFilterClause : FilterClause
 {
@@ -19,12 +19,12 @@ public AnyTagEqualToFilterClause(string fieldName, string value)
     }
 
     /// 
-    /// Gets the name of the field with the list of values.
+    /// The name of the field with the list of values.
     /// 
     public string FieldName { get; private set; }
 
     /// 
-    /// Gets the value that the list should contain.
+    /// The value that the list should contain.
     /// 
     public string Value { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
index 89865732bd75..a0eb45c0fbe3 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/EqualToFilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Represents a filter clause that filters using equality of a field value.
+///  which filters using equality of a field value.
 /// 
 public sealed class EqualToFilterClause : FilterClause
 {
@@ -19,12 +19,12 @@ public EqualToFilterClause(string fieldName, object value)
     }
 
     /// 
-    /// Gets the field name to match.
+    /// Field name to match.
     /// 
     public string FieldName { get; private set; }
 
     /// 
-    /// Gets the field value to match.
+    /// Field value to match.
     /// 
     public object Value { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
index af0c1dac51b3..4392893f16e3 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/FilterClauses/FilterClause.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a base class for filter clauses.
+/// Base class for filter clauses.
 /// 
 /// 
 /// A  is used to request that the underlying search service should
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
index 38302c7fecc8..7105b83c8737 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs
@@ -5,12 +5,12 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an attribute to mark a property on a record class as 'data'.
+/// Attribute to mark a property on a record class as 'data'.
 /// 
 /// 
-/// Marking a property as 'data' means that the property is not a key and not a vector. But optionally,
-/// this property can have an associated vector field containing an embedding for this data.
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// Marking a property as 'data' means that the property is not a key, and not a vector, but optionally
+/// this property may have an associated vector field containing an embedding for this data.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordDataAttribute : Attribute
@@ -18,24 +18,22 @@ public sealed class VectorStoreRecordDataAttribute : Attribute
     /// 
     /// Gets or sets a value indicating whether this data property is filterable.
     /// 
-    /// 
-    /// The default is .
-    /// 
+    /// 
+    /// Default is .
+    /// 
     public bool IsFilterable { get; init; }
 
     /// 
-    /// Gets or sets a value indicating whether this data property is full-text searchable.
+    /// Gets or sets a value indicating whether this data property is full text searchable.
     /// 
-    /// 
-    /// The default is .
-    /// 
+    /// 
+    /// Default is .
+    /// 
     public bool IsFullTextSearchable { get; init; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
+    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
-    /// 
-    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
-    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
index 318521355f1b..871794872adc 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs
@@ -5,19 +5,17 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an attribute to mark a property on a record class as the key under which the record is stored in a vector store.
+/// Attribute to mark a property on a record class as the key under which the record is stored in a vector store.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordKeyAttribute : Attribute
 {
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
+    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
-    /// 
-    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
-    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
index a69e50bd7029..e86a0883574c 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs
@@ -5,10 +5,10 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an attribute to mark a property on a record class as a vector.
+/// Attribute to mark a property on a record class as a vector.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
 public sealed class VectorStoreRecordVectorAttribute : Attribute
@@ -54,10 +54,10 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction
     }
 
     /// 
-    /// Gets the number of dimensions that the vector has.
+    /// Gets or sets the number of dimensions that the vector has.
     /// 
     /// 
-    /// This property is required when creating collections, but can be omitted if not using that functionality.
+    /// This property is required when creating collections, but may be omitted if not using that functionality.
     /// If not provided when trying to create a collection, create will fail.
     /// 
     public int? Dimensions { get; private set; }
@@ -65,26 +65,24 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction
     /// 
     /// Gets the kind of index to use.
     /// 
-    /// 
-    /// The default value varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
     /// 
+    /// 
+    /// Default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     public string? IndexKind { get; private set; }
 
     /// 
     /// Gets the distance function to use when comparing vectors.
     /// 
-    /// 
-    /// The default value varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
     /// 
+    /// 
+    /// Default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     public string? DistanceFunction { get; private set; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
+    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
     /// 
-    /// 
-    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
-    /// 
     public string? StoragePropertyName { get; set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
index 8c54411fab58..11e8b0173d48 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/DistanceFunction.cs
@@ -3,11 +3,11 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a list of well-known distance functions that can be used to compare vectors.
+/// Defines a list of well known distance functions that can be used to compare vectors.
 /// 
 /// 
-/// Not all Vector Store connectors support all distance functions, and some connectors might
-/// support additional distance functions that aren't defined here. See the documentation
+/// Not all Vector Store connectors support all distance functions and some connectors may
+/// support additional distance functions that are not defined here. See the documentation
 /// for each connector for more information on what is supported.
 /// 
 public static class DistanceFunction
@@ -69,7 +69,7 @@ public static class DistanceFunction
     public const string EuclideanSquaredDistance = nameof(EuclideanSquaredDistance);
 
     /// 
-    /// The number of differences between vectors at each dimensions.
+    /// Number of differences between vectors at each dimensions.
     /// 
     public const string Hamming = nameof(Hamming);
 
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
index 088b31c87262..512b51e54c20 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/IndexKind.cs
@@ -3,11 +3,11 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a list of well-known index types that can be used to index vectors.
+/// Defines a list of well known index types that can be used to index vectors.
 /// 
 /// 
-/// Not all Vector Store connectors support all index types, and some connectors might
-/// support additional index types that aren't defined here. See the documentation
+/// Not all Vector Store connectors support all index types and some connectors may
+/// support additional index types that are not defined here. See the documentation
 /// for each connector for more information on what is supported.
 /// 
 public static class IndexKind
@@ -16,36 +16,31 @@ public static class IndexKind
     /// Hierarchical Navigable Small World, which performs an approximate nearest neighbour (ANN) search.
     /// 
     /// 
-    /// This search has lower accuracy than exhaustive k nearest neighbor, but is faster and more efficient.
+    /// Lower accuracy than exhaustive k nearest neighbor, but faster and more efficient.
     /// 
     public const string Hnsw = nameof(Hnsw);
 
     /// 
-    /// Brute force search to find the nearest neighbors.
+    /// Does a brute force search to find the nearest neighbors.
+    /// Calculates the distances between all pairs of data points, so has a linear time complexity, that grows directly proportional to the number of points.
+    /// Also referred to as exhaustive k nearest neighbor in some databases.
     /// 
     /// 
-    /// This search calculates the distances between all pairs of data points, so it has a linear time complexity that grows directly proportional to the number of points.
-    /// It's also referred to as exhaustive k nearest neighbor in some databases.
-    /// This search has high recall accuracy, but is slower and more expensive than HNSW.
-    /// It works better with smaller datasets.
+    /// High recall accuracy, but slower and more expensive than HNSW.
+    /// Better with smaller datasets.
     /// 
     public const string Flat = nameof(Flat);
 
     /// 
-    /// Inverted File with Flat Compression.
-    /// 
-    /// 
-    /// This search is designed to enhance search efficiency by narrowing the search area through the use of neighbor partitions or clusters.
+    /// Inverted File with Flat Compression. Designed to enhance search efficiency by narrowing the search area through the use of neighbor partitions or clusters.
     /// Also referred to as approximate nearest neighbor (ANN) search.
-    /// 
+    /// 
     public const string IvfFlat = nameof(IvfFlat);
 
     /// 
     /// Disk-based Approximate Nearest Neighbor algorithm designed for efficiently searching for approximate nearest neighbors (ANN) in high-dimensional spaces.
+    /// The primary focus of DiskANN is to handle large-scale datasets that cannot fit entirely into memory, leveraging disk storage to store the data while maintaining fast search times.
     /// 
-    /// 
-    /// The primary focus of DiskANN is to handle large-scale datasets that can't fit entirely into memory, leveraging disk storage to store the data while maintaining fast search times.
-    /// 
     public const string DiskAnn = nameof(DiskAnn);
 
     /// 
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
index e3e5c22296b5..29ba283411b9 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a data property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordDataProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordDataProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone.
+    /// The source to clone
     public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source)
         : base(source)
     {
@@ -36,16 +36,16 @@ public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source)
     /// 
     /// Gets or sets a value indicating whether this data property is filterable.
     /// 
-    /// 
-    /// The default is .
-    /// 
+    /// 
+    /// Default is .
+    /// 
     public bool IsFilterable { get; init; }
 
     /// 
     /// Gets or sets a value indicating whether this data property is full text searchable.
     /// 
-    /// 
-    /// The default is .
-    /// 
+    /// 
+    /// Default is .
+    /// 
     public bool IsFullTextSearchable { get; init; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
index d33d0fd4a145..f159b77d195d 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Describes the properties of a record stored in a vector store.
+/// A description of the properties of a record stored in a vector store.
 /// 
 /// 
 /// Each property contains additional information about how the property will be treated by the vector store.
@@ -16,7 +16,7 @@ public sealed class VectorStoreRecordDefinition
     private static readonly List s_emptyFields = new();
 
     /// 
-    /// Gets or sets the list of properties that are stored in the record.
+    /// The list of properties that are stored in the record.
     /// 
     public IReadOnlyList Properties { get; init; } = s_emptyFields;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
index 4973d6e637cb..92b8260b19d8 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a key property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordKeyProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordKeyProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone.
+    /// The source to clone
     public VectorStoreRecordKeyProperty(VectorStoreRecordKeyProperty source)
         : base(source)
     {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
index 723261f23e95..c468817684e9 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a base property class for properties on a vector store record.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 public abstract class VectorStoreRecordProperty
 {
@@ -41,23 +41,21 @@ private protected VectorStoreRecordProperty(VectorStoreRecordProperty source)
     }
 
     /// 
-    /// Gets the name of the property on the data model.
+    /// Gets or sets the name of the property on the data model.
     /// 
     public string DataModelPropertyName { get; private set; }
 
     /// 
     /// Gets or sets an optional name to use for the property in storage, if different from the property name.
-    /// 
-    /// 
-    /// For example, the property name might be "MyProperty" and the storage name might be "my_property".
-    /// This property is only respected by implementations that do not support a well-known
-    /// serialization mechanism like JSON, in which case the attributes used by that serialization system will
+    /// E.g. the property name might be "MyProperty" but the storage name might be "my_property".
+    /// This property will only be respected by implementations that do not support a well known
+    /// serialization mechanism like JSON, in which case the attributes used by that seriallization system will
     /// be used.
-    /// 
+    /// 
     public string? StoragePropertyName { get; init; }
 
     /// 
-    /// Gets the type of the property.
+    /// Gets or sets the type of the property.
     /// 
     public Type PropertyType { get; private set; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
index 1d1791ed555f..8e8e97153ef8 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs
@@ -8,7 +8,7 @@ namespace Microsoft.Extensions.VectorData;
 /// Defines a vector property on a vector store record.
 /// 
 /// 
-/// The characteristics defined here influence how the property is treated by the vector store.
+/// The characteristics defined here will influence how the property is treated by the vector store.
 /// 
 public sealed class VectorStoreRecordVectorProperty : VectorStoreRecordProperty
 {
@@ -25,7 +25,7 @@ public VectorStoreRecordVectorProperty(string propertyName, Type propertyType)
     /// 
     /// Initializes a new instance of the  class by cloning the given source.
     /// 
-    /// The source to clone.
+    /// The source to clone
     public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source)
         : base(source)
     {
@@ -38,26 +38,26 @@ public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source)
     /// Gets or sets the number of dimensions that the vector has.
     /// 
     /// 
-    /// This property is required when creating collections, but can be omitted if not using that functionality.
+    /// This property is required when creating collections, but may be omitted if not using that functionality.
     /// If not provided when trying to create a collection, create will fail.
     /// 
     public int? Dimensions { get; init; }
 
     /// 
-    /// Gets or sets the kind of index to use.
+    /// Gets the kind of index to use.
     /// 
-    /// 
-    /// The default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
-    /// 
+    /// 
+    /// 
+    /// Default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     public string? IndexKind { get; init; }
 
     /// 
-    /// Gets or sets the distance function to use when comparing vectors.
+    /// Gets the distance function to use when comparing vectors.
     /// 
-    /// 
-    /// The default varies by database type. See the documentation of your chosen database connector for more information.
-    /// 
-    /// 
+    /// 
+    /// 
+    /// Default varies by database type. See the documentation of your chosen database connector for more information.
+    /// 
     public string? DistanceFunction { get; init; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
index e623cb676247..a849f42794d4 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines options for calling .
+/// Options when calling .
 /// 
 public class GetRecordOptions
 {
@@ -17,7 +17,7 @@ public GetRecordOptions()
     /// 
     /// Initializes a new instance of the  class by cloning the given options.
     /// 
-    /// The options to clone.
+    /// The options to clone
     public GetRecordOptions(GetRecordOptions source)
     {
         this.IncludeVectors = source.IncludeVectors;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
index b07403941339..91827eb081e7 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj
@@ -10,7 +10,7 @@
   
 
   
-    9.0.0-preview.1.25078.1
+    9.0.0-preview.1.24523.1
     9.0.0.0
     
     9.0.0-preview.1.24518.1
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
index 5368c5301828..5e39a541ef86 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorizableTextSearch
 {
     /// 
-    /// Searches the vector store for records that match the given text and filter. The text string will be vectorized downstream and used for the vector search.
+    /// Search the vector store for records that match the given text and filter. The text string will be vectorized downstream and used for the vector search.
     /// 
     /// The text to search the store with.
     /// The options that control the behavior of the search.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
index b2a5a54194a6..3286fafc15fc 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorizedSearch
 {
     /// 
-    /// Searches the vector store for records that match the given embedding and filter.
+    /// Search the vector store for records that match the given embedding and filter.
     /// 
     /// The type of the vector.
     /// The vector to search the store with.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
index 1430a69b3740..731031ae6706 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchFilter.cs
@@ -6,39 +6,39 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Provides filtering when doing vector searches.
+/// Used to provide filtering when doing vector searches.
 /// Contains configuration for doing basic vector search filtering.
 /// 
 /// 
-/// A filter has a collection of  instances that can be used
+/// A filter has a collection of s that can be used
 /// to request that the underlying service filter the search results.
-/// All clauses are combined with 'and'.
+/// All clauses are combined with and.
 /// 
 [Obsolete("Use VectorSearchOptions.Filter instead of VectorSearchOptions.OldFilter")]
 public sealed class VectorSearchFilter
 {
-    /// The filter clauses to 'and' together.
+    /// The filter clauses to and together.
     private readonly List _filterClauses = [];
 
     /// Gets the default search filter.
     public static VectorSearchFilter Default { get; } = new VectorSearchFilter();
 
     /// 
-    /// The filter clauses to 'and' together.
+    /// The filter clauses to and together.
     /// 
     public IEnumerable FilterClauses => this._filterClauses;
 
     /// 
-    /// Creates a new instance of 
+    /// Create an instance of 
     /// 
     public VectorSearchFilter()
     {
     }
 
     /// 
-    /// Creates a new instance of  with the provided  instances.
+    /// Create an instance of  with the provided s.
+    /// The  instances to use
     /// 
-    /// The  instances to use.
     public VectorSearchFilter(IEnumerable filterClauses)
     {
         if (filterClauses == null)
@@ -50,13 +50,13 @@ public VectorSearchFilter(IEnumerable filterClauses)
     }
 
     /// 
-    /// Adds an 'equal to' clause to the filter options.
+    /// Add an equal to clause to the filter options.
     /// 
-    /// The name of the property to check against. Use the name of the property from your data model or as provided in the record definition.
-    /// The value that the property should match.
-    /// A  instance to allow fluent configuration.
+    /// Name of the property to check against. Use the name of the property from your data model or as provided in the record definition.
+    /// Value that the property should match.
+    ///  instance to allow fluent configuration.
     /// 
-    /// This clause checks if a property is equal to a specific value.
+    /// This clause will check if a property is equal to a specific value.
     /// 
     public VectorSearchFilter EqualTo(string propertyName, object value)
     {
@@ -65,13 +65,13 @@ public VectorSearchFilter EqualTo(string propertyName, object value)
     }
 
     /// 
-    /// Adds an 'any tag equal to' clause to the filter options.
+    /// Add an any tag equal to clause to the filter options.
     /// 
-    /// The name of the property consisting of a list of values to check against. Use the name of the property from your data model or as provided in the record definition.
-    /// The value that the list should contain.
-    /// A  instance to allow fluent configuration.
+    /// Name of the property consisting of a list of values to check against. Use the name of the property from your data model or as provided in the record definition.
+    /// Value that the list should contain.
+    ///  instance to allow fluent configuration.
     /// 
-    /// This clause checks if a property consisting of a list of values contains a specific value.
+    /// This clause will check if a property consisting of a list of values contains a specific value.
     /// 
     public VectorSearchFilter AnyTagEqualTo(string propertyName, string value)
     {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
index 72b54d263a39..6ac552651379 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs
@@ -6,7 +6,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines options for vector search.
+/// Options for vector search.
 /// 
 public class VectorSearchOptions
 {
@@ -24,10 +24,8 @@ public class VectorSearchOptions
     /// 
     /// Gets or sets the name of the vector property to search on.
     /// Use the name of the vector property from your data model or as provided in the record definition.
+    /// If not provided will default to the first vector property in the schema.
     /// 
-    /// 
-    /// The default value is the first vector property in the schema.
-    /// 
     public string? VectorPropertyName { get; init; }
 
     /// 
@@ -36,7 +34,7 @@ public class VectorSearchOptions
     public int Top { get; init; } = 3;
 
     /// 
-    /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return.
+    /// Gets or sets the number of results to skip before returning results, i.e. the index of the first result to return.
     /// 
     public int Skip { get; init; } = 0;
 
@@ -48,11 +46,9 @@ public class VectorSearchOptions
     /// 
     /// Gets or sets a value indicating whether the total count should be included in the results.
     /// 
-    /// 
-    /// The default value is false.
-    /// 
     /// 
-    /// Not all vector search implementations support this option, in which case the total
+    /// Default value is false.
+    /// Not all vector search implementations will support this option in which case the total
     /// count will be null even if requested via this option.
     /// 
     public bool IncludeTotalCount { get; init; } = false;
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
index f5793844d674..14a813a4a797 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResult.cs
@@ -3,7 +3,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Represents a single search result from a vector search.
+/// A single search result from a vector search.
 /// 
 /// The record data model to use for retrieving data from the store.
 public sealed class VectorSearchResult
@@ -20,12 +20,12 @@ public VectorSearchResult(TRecord record, double? score)
     }
 
     /// 
-    /// Gets the record that was found by the search.
+    /// The record that was found by the search.
     /// 
     public TRecord Record { get; }
 
     /// 
-    /// Gets the score of this result in relation to the search query.
+    /// The score of this result in relation to the search query.
     /// 
     public double? Score { get; }
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
index 293315ee554a..41202c513e2b 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs
@@ -12,7 +12,7 @@ namespace Microsoft.Extensions.VectorData;
 public class VectorSearchResults(IAsyncEnumerable> results)
 {
     /// 
-    /// Gets or sets the total count of results found by the search operation, or null
+    /// The total count of results found by the search operation, or null
     /// if the count was not requested or cannot be computed.
     /// 
     /// 
@@ -21,12 +21,12 @@ public class VectorSearchResults(IAsyncEnumerable
-    /// Gets or sets the metadata associated with the content.
+    /// The metadata associated with the content.
     /// 
     public IReadOnlyDictionary? Metadata { get; init; }
 
     /// 
-    /// Gets the search results.
+    /// The search results.
     /// 
     public IAsyncEnumerable> Results { get; } = results;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
index 007dcf79da03..d6b1bae8dfd2 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs
@@ -6,7 +6,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an interface for accessing the list of collections in a vector store.
+/// Interface for accessing the list of collections in a vector store.
 /// 
 /// 
 /// This interface can be used with collections of any schema type, but requires you to provide schema information when getting a collection.
@@ -14,12 +14,12 @@ namespace Microsoft.Extensions.VectorData;
 public interface IVectorStore
 {
     /// 
-    /// Gets a collection from the vector store.
+    /// Get a collection from the vector store.
     /// 
     /// The data type of the record key.
-    /// The record data model to use for adding, updating, and retrieving data from the collection.
+    /// The record data model to use for adding, updating and retrieving data from the collection.
     /// The name of the collection.
-    /// The schema of the record type.
+    /// Defines the schema of the record type.
     /// A new  instance for managing the records in the collection.
     /// 
     /// To successfully request a collection, either  must be annotated with attributes that define the schema of
@@ -32,7 +32,7 @@ IVectorStoreRecordCollection GetCollection(string
         where TKey : notnull;
 
     /// 
-    /// Retrieves the names of all the collections in the vector store.
+    /// Retrieve the names of all the collections in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// The list of names of all the collections in the vector store.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
index b8e410d4afd5..6415ed35fe59 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs
@@ -7,10 +7,10 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a schema-aware interface for managing a named collection of records in a vector store and for creating or deleting the collection itself.
+/// A schema aware interface for managing a named collection of records in a vector store and for creating or deleting the collection itself.
 /// 
 /// The data type of the record key.
-/// The record data model to use for adding, updating, and retrieving data from the store.
+/// The record data model to use for adding, updating and retrieving data from the store.
 #pragma warning disable CA1711 // Identifiers should not have incorrect suffix
 public interface IVectorStoreRecordCollection : IVectorizedSearch
 #pragma warning restore CA1711 // Identifiers should not have incorrect suffix
@@ -19,31 +19,31 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch
     /// 
     /// Gets the name of the collection.
     /// 
-    string CollectionName { get; }
+    public string CollectionName { get; }
 
     /// 
-    /// Checks if the collection exists in the vector store.
+    /// Check if the collection exists in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     ///  if the collection exists,  otherwise.
     Task CollectionExistsAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Creates this collection in the vector store.
+    /// Create this collection in the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been created.
     Task CreateCollectionAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Creates this collection in the vector store if it doesn't already exist.
+    /// Create this collection in the vector store if it does not already exist.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been created.
     Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default);
 
     /// 
-    /// Deletes the collection from the vector store.
+    /// Delete the collection from the vector store.
     /// 
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the collection has been deleted.
@@ -53,77 +53,71 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch
     /// Gets a record from the vector store. Does not guarantee that the collection exists.
     /// Returns null if the record is not found.
     /// 
-    /// The unique ID associated with the record to get.
+    /// The unique id associated with the record to get.
     /// Optional options for retrieving the record.
     /// The  to monitor for cancellation requests. The default is .
     /// The record if found, otherwise null.
-    /// The command fails to execute for any reason.
-    /// The mapping between the storage model and record data model fails.
+    /// Throw when the command fails to execute for any reason.
+    /// Throw when mapping between the storage model and record data model fails.
     Task GetAsync(TKey key, GetRecordOptions? options = default, CancellationToken cancellationToken = default);
 
     /// 
     /// Gets a batch of records from the vector store. Does not guarantee that the collection exists.
+    /// Gets will be made in a single request or in a single parallel batch depending on the available store functionality.
+    /// Only found records will be returned, so the resultset may be smaller than the requested keys.
+    /// Throws for any issues other than records not being found.
     /// 
-    /// The unique IDs associated with the record to get.
+    /// The unique ids associated with the record to get.
     /// Optional options for retrieving the records.
     /// The  to monitor for cancellation requests. The default is .
-    /// The records associated with the specified unique keys.
-    /// 
-    /// Gets are made in a single request or in a single parallel batch depending on the available store functionality.
-    /// Only found records are returned, so the result set might be smaller than the requested keys.
-    /// This method throws for any issues other than records not being found.
-    /// 
-    /// The command fails to execute for any reason.
-    /// The mapping between the storage model and record data model fails.
+    /// The records associated with the unique keys provided.
+    /// Throw when the command fails to execute for any reason.
+    /// Throw when mapping between the storage model and record data model fails.
     IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default);
 
     /// 
     /// Deletes a record from the vector store. Does not guarantee that the collection exists.
     /// 
-    /// The unique ID associated with the record to remove.
+    /// The unique id associated with the record to remove.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifier for the record.
-    /// The command fails to execute for any reason other than that the record does not exist.
+    /// Throw when the command fails to execute for any reason other than that the record does not exit.
     Task DeleteAsync(TKey key, CancellationToken cancellationToken = default);
 
     /// 
     /// Deletes a batch of records from the vector store. Does not guarantee that the collection exists.
+    /// Deletes will be made in a single request or in a single parallel batch depending on the available store functionality.
+    /// If a record is not found, it will be ignored and the batch will succeed.
+    /// If any record cannot be deleted for any other reason, the operation will throw. Some records may have already been deleted, while others may not, so the entire operation should be retried.
     /// 
-    /// The unique IDs associated with the records to remove.
+    /// The unique ids associated with the records to remove.
     /// The  to monitor for cancellation requests. The default is .
     /// A  that completes when the records have been deleted.
-    /// 
-    /// Deletes are made in a single request or in a single parallel batch, depending on the available store functionality.
-    /// If a record isn't found, it is ignored and the batch succeeds.
-    /// If any record can't be deleted for any other reason, the operation throws. Some records might have already been deleted while others might not have, so the entire operation should be retried.
-    /// 
-    /// The command fails to execute for any reason other than that a record does not exist.
+    /// Throw when the command fails to execute for any reason other than that a record does not exist.
     Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default);
 
     /// 
     /// Upserts a record into the vector store. Does not guarantee that the collection exists.
-    ///     If the record already exists, it is updated.
-    ///     If the record does not exist, it is created.
+    ///     If the record already exists, it will be updated.
+    ///     If the record does not exist, it will be created.
     /// 
     /// The record to upsert.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifier for the record.
-    /// The command fails to execute for any reason.
-    /// The mapping between the storage model and record data model fails.
+    /// Throw when the command fails to execute for any reason.
+    /// Throw when mapping between the storage model and record data model fails.
     Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default);
 
     /// 
     /// Upserts a group of records into the vector store. Does not guarantee that the collection exists.
-    ///     If the record already exists, it is updated.
-    ///     If the record does not exist, it is created.
+    ///     If the record already exists, it will be updated.
+    ///     If the record does not exist, it will be created.
+    /// Upserts will be made in a single request or in a single parallel batch depending on the available store functionality.
     /// 
     /// The records to upsert.
     /// The  to monitor for cancellation requests. The default is .
     /// The unique identifiers for the records.
-    /// 
-    /// Upserts are made in a single request or in a single parallel batch depending on the available store functionality.
-    /// 
-    /// The command fails to execute for any reason.
-    /// The mapping between the storage model and record data model fails.
+    /// Throw when the command fails to execute for any reason.
+    /// Throw when mapping between the storage model and record data model fails.
     IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default);
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
index 3bac47a89121..ddfc807c3e00 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs
@@ -3,21 +3,21 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an interface for mapping between a storage model and the consumer record data model.
+/// Interface for mapping between a storage model, and the consumer record data model.
 /// 
 /// The consumer record data model to map to or from.
 /// The storage model to map to or from.
 public interface IVectorStoreRecordMapper
 {
     /// 
-    /// Maps from the consumer record data model to the storage model.
+    /// Map from the consumer record data model to the storage model.
     /// 
     /// The consumer record data model record to map.
     /// The mapped result.
     TStorageModel MapFromDataToStorageModel(TRecordDataModel dataModel);
 
     /// 
-    /// Maps from the storage model to the consumer record data model.
+    /// Map from the storage model to the consumer record data model.
     /// 
     /// The storage data model record to map.
     /// Options to control the mapping behavior.
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
index 7652a0e4ef71..66f2cb0d2019 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/StorageToDataModelMapperOptions.cs
@@ -3,12 +3,12 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines options to use with the  method.
+/// Options to use with the  method.
 /// 
 public class StorageToDataModelMapperOptions
 {
     /// 
-    /// Gets or sets a value indicating whether to include vectors in the retrieval result.
+    /// Get or sets a value indicating whether to include vectors in the retrieval result.
     /// 
     public bool IncludeVectors { get; init; } = false;
 }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
index dc0f5bd1d1b5..0f98f11ccd43 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines a base exception type for any type of failure when using vector stores.
+/// Base exception type thrown for any type of failure when using vector stores.
 /// 
 public abstract class VectorStoreException : Exception
 {
@@ -25,10 +25,10 @@ protected VectorStoreException(string? message) : base(message)
     }
 
     /// 
-    /// Initializes a new instance of the  class with a specified error message and a reference to the inner exception that's the cause of this exception.
+    /// Initializes a new instance of the  class with a specified error message and a reference to the inner exception that is the cause of this exception.
     /// 
     /// The error message that explains the reason for the exception.
-    /// The exception that's the cause of the current exception, or a null reference if no inner exception is specified.
+    /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified.
     protected VectorStoreException(string? message, Exception? innerException) : base(message, innerException)
     {
     }
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
index 6e50942940bd..59b624e88976 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreOperationException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an exception that's thrown when a vector store command fails, such as upserting a record or deleting a collection.
+/// Exception thrown when a vector store command fails, such as upserting a record or deleting a collection.
 /// 
 public class VectorStoreOperationException : VectorStoreException
 {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
index 9aa4e9c41737..f9876a7f618d 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordMappingException.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Defines an exception that's thrown when a failure occurs while trying to convert models for storage or retrieval.
+/// Exception thrown when a failure occurs while trying to convert models for storage or retrieval.
 /// 
 public class VectorStoreRecordMappingException : VectorStoreException
 {
diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
index 6ab9ee119e55..0b704785b1d0 100644
--- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
+++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs
@@ -5,7 +5,7 @@
 namespace Microsoft.Extensions.VectorData;
 
 /// 
-/// Represents a generic data model that can be used to store and retrieve any data from a vector store.
+/// A generic data model that can be used to store and retrieve any data from a vector store.
 /// 
 /// The data type of the record key.
 /// The key of the record.
@@ -20,7 +20,7 @@ public sealed class VectorStoreGenericDataModel(TKey key)
     /// Gets or sets a dictionary of data items stored in the record.
     /// 
     /// 
-    /// This dictionary contains all fields that aren't vectors.
+    /// This dictionary contains all fields that are not vectors.
     /// 
     public Dictionary Data { get; init; } = new();
 
diff --git a/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs b/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs
deleted file mode 100644
index 10cfbadd951e..000000000000
--- a/dotnet/src/Experimental/Process.Abstractions/IKernelExternalProcessMessageChannel.cs
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Threading.Tasks;
-
-namespace Microsoft.SemanticKernel;
-
-/// 
-/// An interface that provides a channel for emitting external messages from a step.
-/// In addition provide common methods like initialization and Uninitialization
-/// 
-public interface IExternalKernelProcessMessageChannel
-{
-    /// 
-    /// Initialization of the external messaging channel used
-    /// 
-    /// A 
-    abstract ValueTask Initialize();
-
-    /// 
-    /// Uninitialization of the external messaging channel used
-    /// 
-    /// A 
-    abstract ValueTask Uninitialize();
-
-    /// 
-    /// Emits the specified event from the step outside the SK process
-    /// 
-    /// name of the topic to be used externally as the event name
-    /// data to be transmitted externally
-    /// 
-    abstract Task EmitExternalEventAsync(string externalTopicEvent, object? eventData);
-}
diff --git a/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs b/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
index 53df244aa8a9..d98dc211aaf8 100644
--- a/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/IKernelProcessMessageChannel.cs
@@ -14,5 +14,5 @@ public interface IKernelProcessMessageChannel
     /// 
     /// The event to emit.
     /// A 
-    abstract ValueTask EmitEventAsync(KernelProcessEvent processEvent);
+    public abstract ValueTask EmitEventAsync(KernelProcessEvent processEvent);
 }
diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
index 59f420642f0a..6495eecbfdec 100644
--- a/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessContext.cs
@@ -27,10 +27,4 @@ public abstract class KernelProcessContext
     /// 
     /// A  where T is 
     public abstract Task GetStateAsync();
-
-    /// 
-    /// Gets the instance of  used for external messages
-    /// 
-    /// 
-    public abstract Task GetExternalMessageChannelAsync();
 }
diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
index 9beadf7b9896..6dfac0412d29 100644
--- a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
+++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs
@@ -10,21 +10,18 @@ namespace Microsoft.SemanticKernel;
 public sealed class KernelProcessStepContext
 {
     private readonly IKernelProcessMessageChannel _stepMessageChannel;
-    private readonly IExternalKernelProcessMessageChannel? _externalMessageChannel;
 
     /// 
     /// Initializes a new instance of the  class.
     /// 
     /// An instance of .
-    /// An instance of 
-    public KernelProcessStepContext(IKernelProcessMessageChannel channel, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
+    public KernelProcessStepContext(IKernelProcessMessageChannel channel)
     {
         this._stepMessageChannel = channel;
-        this._externalMessageChannel = externalMessageChannel;
     }
 
     /// 
-    /// Emit an SK process event from the current step.
+    /// Emit an event from the current step.
     /// 
     /// An instance of  to be emitted from the 
     /// A 
@@ -34,7 +31,7 @@ public ValueTask EmitEventAsync(KernelProcessEvent processEvent)
     }
 
     /// 
-    /// Emit an SK process event from the current step with a simplified method signature.
+    /// Emit an event from the current step with a simplified method signature.
     /// 
     /// 
     /// 
@@ -55,22 +52,4 @@ public ValueTask EmitEventAsync(
                 Visibility = visibility
             });
     }
-
-    /// 
-    /// Emit an external event to through a 
-    /// component if connected from within the SK process
-    /// 
-    /// 
-    /// 
-    /// 
-    /// 
-    public async Task EmitExternalEventAsync(string topicName, object? processEventData = null)
-    {
-        if (this._externalMessageChannel == null)
-        {
-            throw new KernelException($"External message channel not configured for step with topic {topicName}");
-        }
-
-        await this._externalMessageChannel.EmitExternalEventAsync(topicName, processEventData).ConfigureAwait(false);
-    }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
index 4df90bdd20a1..187500e15dee 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Controllers/ProcessTestController.cs
@@ -5,7 +5,6 @@
 using Microsoft.AspNetCore.Mvc;
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Process.Serialization;
-using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 namespace SemanticKernel.Process.IntegrationTests.Controllers;
 
@@ -73,23 +72,6 @@ public async Task GetProcessAsync(string processId)
         return this.Ok(daprProcess);
     }
 
-    /// 
-    /// Retrieves current state of the MockCloudEventClient used in the running process
-    /// 
-    /// The Id of the process.
-    /// Mock Cloud client ingested via dependency injection
-    /// 
-    [HttpGet("processes/{processId}/mockCloudClient")]
-    public Task GetMockCloudClient(string processId, MockCloudEventClient cloudClient)
-    {
-        if (!s_processes.TryGetValue(processId, out DaprKernelProcessContext? context))
-        {
-            return Task.FromResult(this.NotFound());
-        }
-
-        return Task.FromResult(this.Ok(cloudClient));
-    }
-
     /// 
     /// Checks the health of the Dapr runtime by attempting to send a message to a health actor.
     /// 
diff --git a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
index 6d3789bb2047..d1d66f317d50 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Program.cs
@@ -2,7 +2,6 @@
 
 using Microsoft.SemanticKernel;
 using SemanticKernel.Process.IntegrationTests;
-using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 var builder = WebApplication.CreateBuilder(args);
 
@@ -16,10 +15,6 @@
 // Configure the Kernel with DI. This is required for dependency injection to work with processes.
 builder.Services.AddKernel();
 
-// Configure IExternalKernelProcessMessageChannel used for testing purposes
-builder.Services.AddSingleton(MockCloudEventClient.Instance);
-builder.Services.AddSingleton(MockCloudEventClient.Instance);
-
 // Configure Dapr
 builder.Services.AddActors(static options =>
 {
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
index e7ca6292b16f..59401b1c2979 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/DaprTestProcessContext.cs
@@ -5,7 +5,6 @@
 using Microsoft.SemanticKernel;
 using Microsoft.SemanticKernel.Process;
 using Microsoft.SemanticKernel.Process.Serialization;
-using SemanticKernel.Process.IntegrationTests.CloudEvents;
 
 namespace SemanticKernel.Process.IntegrationTests;
 internal sealed class DaprTestProcessContext : KernelProcessContext
@@ -69,14 +68,4 @@ public override Task StopAsync()
     {
         throw new NotImplementedException();
     }
-
-    public override async Task GetExternalMessageChannelAsync()
-    {
-        var response = await this._httpClient.GetFromJsonAsync($"http://localhost:5200/processes/{this._processId}/mockCloudClient", options: this._serializerOptions);
-        return response switch
-        {
-            null => throw new InvalidOperationException("Process not found"),
-            _ => response
-        };
-    }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
index c6f55eb95f69..fa35cf1fe0fa 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/ProcessTestFixture.cs
@@ -121,11 +121,9 @@ private async Task WaitForHostStartupAsync()
     /// The process to start.
     /// An instance of 
     /// An optional initial event.
-    /// channel used for external messages
     /// A 
-    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
+    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent)
     {
-        // Actual Kernel injection of Kernel and ExternalKernelProcessMessageChannel is in dotnet\src\Experimental\Process.IntegrationTestHost.Dapr\Program.cs
         var context = new DaprTestProcessContext(process, this._httpClient!);
         await context.StartWithEventAsync(initialEvent);
         return context;
diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
index cbe202fdd7e0..7fb4f7d72393 100644
--- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
+++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Local/ProcessTestFixture.cs
@@ -17,10 +17,9 @@ public class ProcessTestFixture
     /// The process to start.
     /// An instance of 
     /// An optional initial event.
-    /// channel used for external messages
     /// A 
-    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null)
+    public async Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent)
     {
-        return await process.StartAsync(kernel, initialEvent, externalMessageChannel);
+        return await process.StartAsync(kernel, initialEvent);
     }
 }
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs
deleted file mode 100644
index 317a2fe545d2..000000000000
--- a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventClient.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel;
-
-namespace SemanticKernel.Process.IntegrationTests.CloudEvents;
-/// 
-/// Class used for testing purposes to mock emitting external cloud events
-/// 
-public class MockCloudEventClient : IExternalKernelProcessMessageChannel
-{
-    /// 
-    /// Initialization counter for testing
-    /// 
-    public int InitializationCounter { get; set; } = 0;
-    /// 
-    /// Uninitialization counter for testing
-    /// 
-    public int UninitializationCounter { get; set; } = 0;
-    /// 
-    /// Captures cloud events emitted for testing
-    /// 
-    public List CloudEvents { get; set; } = [];
-
-    private static MockCloudEventClient? s_instance = null;
-
-    /// 
-    /// Instance of  when used as singleton
-    /// 
-    public static MockCloudEventClient Instance
-    {
-        get
-        {
-            return s_instance ??= new MockCloudEventClient();
-        }
-    }
-
-    /// 
-    public Task EmitExternalEventAsync(string externalTopicEvent, object? eventData)
-    {
-        if (eventData != null)
-        {
-            this.CloudEvents.Add(new() { TopicName = externalTopicEvent, Data = (string)eventData });
-        }
-
-        return Task.CompletedTask;
-    }
-
-    /// 
-    public ValueTask Initialize()
-    {
-        this.InitializationCounter++;
-        return ValueTask.CompletedTask;
-    }
-
-    /// 
-    public ValueTask Uninitialize()
-    {
-        this.UninitializationCounter++;
-        return ValueTask.CompletedTask;
-    }
-}
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs
deleted file mode 100644
index 97dd18e9de2d..000000000000
--- a/dotnet/src/Experimental/Process.IntegrationTests.Resources/CloudEvents/MockCloudEventData.cs
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-namespace SemanticKernel.Process.IntegrationTests.CloudEvents;
-
-/// 
-/// Mock cloud event data used for testing purposes only
-/// 
-public class MockCloudEventData
-{
-    /// 
-    /// Name of the mock topic
-    /// 
-    public required string TopicName { get; set; }
-
-    /// 
-    /// Data emitted in the mock cloud event
-    /// 
-    public string? Data { get; set; }
-}
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs b/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs
deleted file mode 100644
index e54388269e1e..000000000000
--- a/dotnet/src/Experimental/Process.IntegrationTests.Resources/ProcessCloudEventsResources.cs
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Threading.Tasks;
-using Microsoft.SemanticKernel;
-
-namespace SemanticKernel.Process.IntegrationTests;
-
-#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
-
-/// 
-/// A step that emits messages externally
-/// 
-public sealed class MockProxyStep : KernelProcessStep
-{
-    public static class FunctionNames
-    {
-        public const string OnRepeatMessage = nameof(OnRepeatMessage);
-        public const string OnEchoMessage = nameof(OnEchoMessage);
-    }
-
-    public static class TopicNames
-    {
-        public const string RepeatExternalTopic = nameof(RepeatExternalTopic);
-        public const string EchoExternalTopic = nameof(EchoExternalTopic);
-    }
-
-    [KernelFunction(FunctionNames.OnRepeatMessage)]
-    public async Task OnRepeatMessageAsync(KernelProcessStepContext context, string message)
-    {
-        await context.EmitExternalEventAsync(TopicNames.RepeatExternalTopic, message);
-    }
-
-    [KernelFunction(FunctionNames.OnEchoMessage)]
-    public async Task OnEchoMessageAsync(KernelProcessStepContext context, string message)
-    {
-        await context.EmitExternalEventAsync(TopicNames.EchoExternalTopic, message);
-    }
-}
-
-#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props b/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
index b0be78e43a06..9c4c35980463 100644
--- a/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
+++ b/dotnet/src/Experimental/Process.IntegrationTests.Shared/Process.IntegrationTests.Shared.props
@@ -2,6 +2,5 @@
   
     
     
-    
   
 
\ No newline at end of file
diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs
deleted file mode 100644
index ee262b50f7e9..000000000000
--- a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessCloudEventsTests.cs
+++ /dev/null
@@ -1,113 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-#pragma warning disable IDE0005 // Using directive is unnecessary.
-using System;
-using System.Linq;
-using System.Runtime.Serialization;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Configuration;
-using Microsoft.SemanticKernel;
-using SemanticKernel.IntegrationTests.TestSettings;
-using SemanticKernel.Process.IntegrationTests.CloudEvents;
-using Xunit;
-#pragma warning restore IDE0005 // Using directive is unnecessary.
-
-namespace SemanticKernel.Process.IntegrationTests;
-
-/// 
-/// Integration tests for processes.
-/// 
-[Collection(nameof(ProcessTestGroup))]
-public sealed class ProcessCloudEventsTests : IClassFixture
-{
-    private readonly ProcessTestFixture _fixture;
-    private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder();
-    private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
-            .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
-            .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
-            .AddEnvironmentVariables()
-            .AddUserSecrets()
-            .Build();
-
-    private readonly IExternalKernelProcessMessageChannel _externalMessageChannel = MockCloudEventClient.Instance;
-
-    /// 
-    /// Initializes a new instance of the  class. This is called by the test framework.
-    /// 
-    /// 
-    public ProcessCloudEventsTests(ProcessTestFixture fixture)
-    {
-        this._fixture = fixture;
-    }
-
-    /// 
-    /// Tests a simple linear process with two steps and no sub processes.
-    /// 
-    /// A 
-    [Fact]
-    public async Task LinearProcessWithCloudEventSubscribersAsync()
-    {
-        // Arrange
-        OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!;
-        this._kernelBuilder.AddOpenAIChatCompletion(
-            modelId: configuration.ModelId!,
-            apiKey: configuration.ApiKey);
-
-        Kernel kernel = this._kernelBuilder.Build();
-        var process = this.CreateLinearProcess("SimpleWithCloudEvents").Build();
-
-        // Act
-        string testInput = "Test";
-        var processHandle = await this._fixture.StartProcessAsync(process, kernel, new() { Id = ProcessTestsEvents.StartProcess, Data = testInput }, this._externalMessageChannel);
-        var externalMessageChannel = await processHandle.GetExternalMessageChannelAsync();
-
-        // Assert
-        Assert.NotNull(externalMessageChannel);
-        var mockClient = (MockCloudEventClient)externalMessageChannel;
-        Assert.NotNull(mockClient);
-        Assert.True(mockClient.InitializationCounter > 0);
-        Assert.Equal(2, mockClient.CloudEvents.Count);
-        Assert.Equal(testInput, mockClient.CloudEvents[0].Data);
-        Assert.Equal(MockProxyStep.TopicNames.EchoExternalTopic, mockClient.CloudEvents[0].TopicName);
-        Assert.Equal($"{testInput} {testInput}", mockClient.CloudEvents[1].Data);
-        Assert.Equal(MockProxyStep.TopicNames.RepeatExternalTopic, mockClient.CloudEvents[1].TopicName);
-    }
-
-    /// 
-    /// Creates a simple linear process with two steps and a proxy step to emit events externally
- /// Input Event:
- /// Output Events: [, ]
- /// - /// ┌────────┐ ┌────────┐ - /// │ echo ├─┬─►│ repeat ├───┐ - /// └────────┘ │ └────────┘ │ - /// │ │ - /// │ ┌───────┐ │ - /// └─►│ proxy │◄───┘ - /// └───────┘ - /// - ///
- private ProcessBuilder CreateLinearProcess(string name) - { - var processBuilder = new ProcessBuilder(name); - var echoStep = processBuilder.AddStepFromType(); - var repeatStep = processBuilder.AddStepFromType(); - var proxyStep = processBuilder.AddStepFromType(); - - processBuilder.OnInputEvent(ProcessTestsEvents.StartProcess) - .SendEventTo(new ProcessFunctionTargetBuilder(echoStep)); - - echoStep.OnFunctionResult(nameof(EchoStep.Echo)) - .SendEventTo(new ProcessFunctionTargetBuilder(repeatStep, parameterName: "message")); - - echoStep - .OnFunctionResult() - .SendEventTo(new ProcessFunctionTargetBuilder(proxyStep, functionName: MockProxyStep.FunctionNames.OnEchoMessage)); - - repeatStep - .OnEvent(ProcessTestsEvents.OutputReadyInternal) - .SendEventTo(new ProcessFunctionTargetBuilder(proxyStep, functionName: MockProxyStep.FunctionNames.OnRepeatMessage)); - - return processBuilder; - } -} diff --git a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs index 90dabb3c4bcd..1fd11bef274b 100644 --- a/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs +++ b/dotnet/src/Experimental/Process.IntegrationTests.Shared/ProcessTestFixture.cs @@ -17,7 +17,6 @@ public abstract class ProcessTestFixture /// The process to start. /// An instance of /// An optional initial event. - /// channel used for external messages /// A - public abstract Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null); + public abstract Task StartProcessAsync(KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent); } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs index 9ddf0c4074cb..b59dd70211f4 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs @@ -13,17 +13,18 @@ public sealed class LocalKernelProcessContext : KernelProcessContext, IDisposabl private readonly LocalProcess _localProcess; private readonly Kernel _kernel; - internal LocalKernelProcessContext(KernelProcess process, Kernel kernel, ProcessEventProxy? eventProxy = null, IExternalKernelProcessMessageChannel? externalMessageChannel = null) + internal LocalKernelProcessContext(KernelProcess process, Kernel kernel, ProcessEventProxy? eventProxy = null) { Verify.NotNull(process, nameof(process)); Verify.NotNull(kernel, nameof(kernel)); Verify.NotNullOrWhiteSpace(process.State?.Name); this._kernel = kernel; - this._localProcess = new LocalProcess(process, kernel) + this._localProcess = new LocalProcess( + process, + kernel) { - EventProxy = eventProxy, - ExternalMessageChannel = externalMessageChannel, + EventProxy = eventProxy }; } @@ -54,10 +55,4 @@ public override Task SendEventAsync(KernelProcessEvent processEvent) => /// Disposes of the resources used by the process. ///
public void Dispose() => this._localProcess.Dispose(); - - /// - public override Task GetExternalMessageChannelAsync() - { - return Task.FromResult(this._localProcess.ExternalMessageChannel); - } } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs index eac8826b37a5..4904366c9d39 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessFactory.cs @@ -15,13 +15,12 @@ public static class LocalKernelProcessFactory /// Required: The to start running. /// Required: An instance of /// Required: The initial event to start the process. - /// Optional: an instance of . /// An instance of that can be used to interrogate or stop the running process. - public static async Task StartAsync(this KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent, IExternalKernelProcessMessageChannel? externalMessageChannel = null) + public static async Task StartAsync(this KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent) { Verify.NotNull(initialEvent, nameof(initialEvent)); - LocalKernelProcessContext processContext = new(process, kernel, null, externalMessageChannel); + LocalKernelProcessContext processContext = new(process, kernel); await processContext.StartWithEventAsync(initialEvent).ConfigureAwait(false); return processContext; } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs index aea736ceced0..b7a6695996f4 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs @@ -211,7 +211,6 @@ private ValueTask InitializeProcessAsync() { ParentProcessId = this.Id, EventProxy = this.EventProxy, - ExternalMessageChannel = this.ExternalMessageChannel, }; } diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs index c95ba287d0db..2fe9287bafda 100644 --- a/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs @@ -80,8 +80,6 @@ public LocalStep(KernelProcessStepInfo stepInfo, Kernel kernel, string? parentPr ///
internal ProcessEventProxy? EventProxy { get; init; } - internal IExternalKernelProcessMessageChannel? ExternalMessageChannel { get; init; } - /// /// Retrieves all events that have been emitted by this step in the previous superstep. /// @@ -233,13 +231,6 @@ internal virtual async Task HandleMessageAsync(ProcessMessage message) /// protected virtual async ValueTask InitializeStepAsync() { - if (this.ExternalMessageChannel != null) - { - // initialize external message channel - // TODO: in LocalRuntime need to ensure initialization only happens once - await this.ExternalMessageChannel.Initialize().ConfigureAwait(false); - } - // Instantiate an instance of the inner step object KernelProcessStep stepInstance = (KernelProcessStep)ActivatorUtilities.CreateInstance(this._kernel.Services, this._stepInfo.InnerStepType); var kernelPlugin = KernelPluginFactory.CreateFromObject(stepInstance, pluginName: this._stepInfo.State.Name); @@ -251,7 +242,7 @@ protected virtual async ValueTask InitializeStepAsync() } // Initialize the input channels - this._initialInputs = this.FindInputChannels(this._functions, this._logger, this.ExternalMessageChannel); + this._initialInputs = this.FindInputChannels(this._functions, this._logger); this._inputs = this._initialInputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); // Activate the step with user-defined state if needed diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs deleted file mode 100644 index 52b16051e070..000000000000 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActor.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading.Tasks; -using Dapr.Actors.Runtime; - -namespace Microsoft.SemanticKernel; - -/// -/// An actor that represents en external event messaging buffer. -/// -internal sealed class ExternalMessageBufferActor : Actor, IExternalMessageBuffer -{ - private readonly IExternalKernelProcessMessageChannel _externalMessageChannel; - - /// - /// Required constructor for Dapr Actor. - /// - /// The actor host. - /// Instance of - public ExternalMessageBufferActor(ActorHost host, IExternalKernelProcessMessageChannel externalMessageChannel) : base(host) - { - this._externalMessageChannel = externalMessageChannel; - } - - public async Task EmitExternalEventAsync(string externalTopicEvent, object? eventData) - { - await this._externalMessageChannel.EmitExternalEventAsync(externalTopicEvent, eventData).ConfigureAwait(false); - } - - protected override async Task OnDeactivateAsync() - { - await this._externalMessageChannel.Uninitialize().ConfigureAwait(false); - } - - protected override async Task OnActivateAsync() - { - await this._externalMessageChannel.Initialize().ConfigureAwait(false); - } -} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs deleted file mode 100644 index 5de54a277d20..000000000000 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/ExternalMessageBufferActorWrapper.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel; - -/// -/// Class used to allow using as -/// in SK Process shared abstractions -/// -public class ExternalMessageBufferActorWrapper : IExternalKernelProcessMessageChannel -{ - private readonly IExternalMessageBuffer _actor; - - /// - /// Constructor to wrap as - /// - /// The actor host. - public ExternalMessageBufferActorWrapper(IExternalMessageBuffer actor) - { - this._actor = actor; - } - - /// - public async Task EmitExternalEventAsync(string externalTopicEvent, object? eventData) - { - await this._actor.EmitExternalEventAsync(externalTopicEvent, eventData).ConfigureAwait(false); - } - - /// - public ValueTask Initialize() - { - // When using Dapr initialization is already taken care of by Dapr Actors - throw new System.NotImplementedException(); - } - - /// - public ValueTask Uninitialize() - { - // When using Dapr uninitialization is already taken care of by Dapr Actors - throw new System.NotImplementedException(); - } -} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs index 479687f97077..f5445bdf0afc 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/Actors/StepActor.cs @@ -338,15 +338,8 @@ protected virtual async ValueTask ActivateStepAsync() this._functions.Add(f.Name, f); } - // Creating external process channel actor to be used for external messaging by some steps - IExternalKernelProcessMessageChannel? externalMessageChannelActor = null; - var scopedExternalMessageBufferId = this.ScopedActorId(new ActorId(this.Id.GetId())); - var actor = this.ProxyFactory.CreateActorProxy(scopedExternalMessageBufferId, nameof(ExternalMessageBufferActor)); - externalMessageChannelActor = new ExternalMessageBufferActorWrapper(actor); - // Initialize the input channels - // TODO: Issue #10328 Cloud Events - new Step type dedicated to work as Proxy Step abstraction https://github.com/microsoft/semantic-kernel/issues/10328 - this._initialInputs = this.FindInputChannels(this._functions, this._logger, externalMessageChannelActor); + this._initialInputs = this.FindInputChannels(this._functions, this._logger); this._inputs = this._initialInputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); // Activate the step with user-defined state if needed diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs index b7425516863a..f09fa4f39222 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessContext.cs @@ -67,10 +67,4 @@ public override async Task GetStateAsync() var daprProcessInfo = await this._daprProcess.GetProcessInfoAsync().ConfigureAwait(false); return daprProcessInfo.ToKernelProcess(); } - - /// - public override Task GetExternalMessageChannelAsync() - { - throw new NotImplementedException(); - } } diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs deleted file mode 100644 index 5db64dbd6f68..000000000000 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/Interfaces/IExternalMessageBuffer.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading.Tasks; -using Dapr.Actors; - -namespace Microsoft.SemanticKernel; - -// estenori-note: -// for some reason dapr doesn't like if instead public interface IExternalMessageBuffer : IActor, IExternalKernelProcessMessageChannelBase -// instead defining the interface component is necessary. To make it compatible with shared components a "casting" to IExternalKernelProcessMessageChannelEmitter -// is added in StepActor logic to make use of FindInputChannels - -/// -/// An interface for -/// -public interface IExternalMessageBuffer : IActor -{ - /// - /// Emits external events outside of the SK process - /// - /// - /// - /// - abstract Task EmitExternalEventAsync(string externalTopicEvent, object? eventData); -} diff --git a/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs b/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs index ad65b7f89c4f..52f86899d608 100644 --- a/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs +++ b/dotnet/src/Experimental/Process.Runtime.Dapr/KernelProcessDaprExtensions.cs @@ -22,6 +22,5 @@ public static void AddProcessActors(this ActorRuntimeOptions actorOptions) actorOptions.Actors.RegisterActor(); actorOptions.Actors.RegisterActor(); actorOptions.Actors.RegisterActor(); - actorOptions.Actors.RegisterActor(); } } diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs index 8cbd6221be59..1bda62be5645 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs @@ -511,41 +511,6 @@ public async Task ItTrustsAllTemplatesAsync() Assert.Equal(expected, result); } - [Fact] - public async Task ItRendersContentWithHtmlEntitiesAsync() - { - // Arrange - var template = - """ - Can you help me tell & the time in Seattle right now? - Sure! The time in Seattle is currently 3:00 PM. - What about New York? - """; - - var factory = new HandlebarsPromptTemplateFactory(options: new() { EnableHtmlDecoder = false }); - - var target = factory.Create(new PromptTemplateConfig(template) - { - TemplateFormat = HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, - }); - - // Act - var prompt = await target.RenderAsync(this._kernel); - bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); - - // Assert - Assert.True(result); - Assert.NotNull(chatHistory); - Assert.Collection(chatHistory, - c => Assert.Equal(AuthorRole.User, c.Role), - c => Assert.Equal(AuthorRole.Assistant, c.Role), - c => Assert.Equal(AuthorRole.User, c.Role)); - Assert.Collection(chatHistory, - c => Assert.Equal("Can you help me tell & the time in Seattle right now?", c.Content), - c => Assert.Equal("Sure! The time in Seattle is currently 3:00 PM.", c.Content), - c => Assert.Equal("What about New York?", c.Content)); - } - #region private private HandlebarsPromptTemplateFactory _factory; diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs index e23dd3ddc628..d73bd85a15b9 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs @@ -51,8 +51,7 @@ public async Task RenderAsync(Kernel kernel, KernelArguments? arguments this.RegisterHelpers(handlebarsInstance, kernel, arguments, cancellationToken); var template = handlebarsInstance.Compile(this._promptModel.Template); - var text = template(arguments).Trim(); - return this._options.EnableHtmlDecoder ? System.Net.WebUtility.HtmlDecode(text) : text; + return System.Net.WebUtility.HtmlDecode(template(arguments).Trim()); } #region private diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs index 852517a49510..78be0f2480eb 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs @@ -48,11 +48,6 @@ public sealed class HandlebarsPromptTemplateOptions : HandlebarsHelpersOptions /// public Action? RegisterCustomHelpers { get; set; } - /// - /// Flag indicating whether to enable HTML decoding of the rendered template. - /// - public bool EnableHtmlDecoder { get; set; } = true; - /// /// Initializes a new instance of the class. /// diff --git a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs index 2c5722f140a6..474cd99cbd56 100644 --- a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs +++ b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs @@ -32,11 +32,11 @@ internal sealed partial class LiquidPromptTemplate : IPromptTemplate private readonly Dictionary _inputVariables; #if NET - [GeneratedRegex(@"(?system|assistant|user|function|developer):\s+")] + [GeneratedRegex(@"(?system|assistant|user|function):\s+")] private static partial Regex RoleRegex(); #else private static Regex RoleRegex() => s_roleRegex; - private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function|developer):\s+", RegexOptions.Compiled); + private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function):\s+", RegexOptions.Compiled); #endif /// Initializes the . @@ -106,7 +106,7 @@ public async Task RenderAsync(Kernel kernel, KernelArguments? arguments // xxxx // // turn it into - // + // // xxxx // var splits = RoleRegex().Split(renderedResult); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs index fcea1ef3a387..91e229a1c246 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs @@ -5,7 +5,6 @@ using System.IO; using System.Linq; using System.Net.Http; -using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; @@ -160,44 +159,13 @@ await DocumentLoader.LoadDocumentFromUriAsStreamAsync(parsedDescriptionUrl, #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var operationRunnerHttpClient = HttpClientProvider.GetHttpClient(openApiFunctionExecutionParameters?.HttpClient ?? kernel.Services.GetService()); #pragma warning restore CA2000 - static IDictionary? CopilotAgentPluginHeadersFactory(RestApiOperation operation, IDictionary arguments, RestApiOperationRunOptions? options) - { - var graphAllowedHosts = new[] - { - "graph.microsoft.com", - "graph.microsoft.us", - "dod-graph.microsoft.us", - "graph.microsoft.de", - "microsoftgraph.chinacloudapi.cn", - "canary.graph.microsoft.com", - "graph.microsoft-ppe.com" - }; - if (options?.ApiHostUrl?.Host is not { } hostString || !graphAllowedHosts.Contains(hostString)) - { - return null; - } - string frameworkDescription = RuntimeInformation.FrameworkDescription; - string osDescription = RuntimeInformation.OSDescription; - string copilotAgentPluginVersion = HttpHeaderConstant.Values.GetAssemblyVersion(typeof(CopilotAgentPluginKernelExtensions)); - var defaultHeaders = new Dictionary - { - // TODO: version and format updates - ["SdkVersion"] = $"copilot-agent-plugins/{copilotAgentPluginVersion}, (runtimeEnvironment={frameworkDescription}; hostOS={osDescription})", - ["client-request-id"] = Guid.NewGuid().ToString() - }; - - var currentHeaders = operation.BuildHeaders(arguments); - var finalHeaders = defaultHeaders.Concat(currentHeaders).ToDictionary(k => k.Key, v => v.Value); - return finalHeaders; - } var runner = new RestApiOperationRunner( operationRunnerHttpClient, openApiFunctionExecutionParameters?.AuthCallback, openApiFunctionExecutionParameters?.UserAgent, openApiFunctionExecutionParameters?.EnableDynamicPayload ?? false, - openApiFunctionExecutionParameters?.EnablePayloadNamespacing ?? true, - headersFactory: CopilotAgentPluginHeadersFactory); + openApiFunctionExecutionParameters?.EnablePayloadNamespacing ?? true); var info = OpenApiDocumentParser.ExtractRestApiInfo(filteredOpenApiDocument); var security = OpenApiDocumentParser.CreateRestApiOperationSecurityRequirements(filteredOpenApiDocument.SecurityRequirements); diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs index 9fdd0d9389a3..899f19e64ce5 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs @@ -76,6 +76,7 @@ public class OpenApiFunctionExecutionParameters /// as a stream rather than as a string. /// If the custom reader is not provided, or the reader returns null, the internal reader is used. ///
+ [Experimental("SKEXP0040")] public HttpResponseContentReader? HttpResponseContentReader { get; set; } /// diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs index 2e7fb3d2214f..48f72a697a4a 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.IO; using System.Net.Http; using System.Threading; @@ -89,6 +90,7 @@ public static async Task ImportPluginFromOpenApiAsync( /// The specification model. /// The OpenAPI specification parsing and function execution parameters. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. + [Experimental("SKEXP0040")] public static KernelPlugin ImportPluginFromOpenApi( this Kernel kernel, string pluginName, @@ -225,6 +227,7 @@ public static async Task CreatePluginFromOpenApiAsync( /// The OpenAPI specification parsing and function execution parameters. /// The cancellation token. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. + [Experimental("SKEXP0040")] public static KernelPlugin CreatePluginFromOpenApi( this Kernel kernel, string pluginName, diff --git a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj index a30e82b03e1d..1d72c971fcba 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj +++ b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj @@ -5,6 +5,7 @@ $(AssemblyName) net8.0;netstandard2.0 $(NoWarn);SKEXP0040 + preview diff --git a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs index f92b58375c8c..bd268d5984c2 100644 --- a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs +++ b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReader.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -11,4 +12,5 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// The context containing HTTP operation details. /// The cancellation token. /// The HTTP response content. +[Experimental("SKEXP0040")] public delegate Task HttpResponseContentReader(HttpResponseContentReaderContext context, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs index 077591c4d4be..221ba157fa0a 100644 --- a/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs +++ b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentReaderContext.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Net.Http; namespace Microsoft.SemanticKernel.Plugins.OpenApi; @@ -7,6 +8,7 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Represents the context for HTTP response content reader. /// +[Experimental("SKEXP0040")] public sealed class HttpResponseContentReaderContext { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs index 89bcf205cbc1..41cd8a3290e5 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs @@ -170,7 +170,7 @@ internal IDictionary BuildHeaders(IDictionary a throw new KernelException($"The headers parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); //Serializing the parameter and adding it to the headers. headers.Add(parameter.Name, serializer.Invoke(parameter, node)); @@ -206,7 +206,7 @@ internal string BuildQueryString(IDictionary arguments) throw new KernelException($"The query string parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); // Serializing the parameter and adding it to the query string if there's an argument for it. segments.Add(serializer.Invoke(parameter, node)); @@ -274,7 +274,7 @@ private string BuildPath(string pathTemplate, IDictionary argum throw new KernelException($"The path parameter '{parameterStyle}' serialization style is not supported."); } - var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument, parameter.Schema); + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); // Serializing the parameter and adding it to the path. pathTemplate = pathTemplate.Replace($"{{{parameter.Name}}}", HttpUtility.UrlEncode(serializer.Invoke(parameter, node))); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs index 7912250715b4..4803d28e1e1b 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs @@ -27,6 +27,7 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Parser for OpenAPI documents. /// +[Experimental("SKEXP0040")] public sealed class OpenApiDocumentParser(ILoggerFactory? loggerFactory = null) { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs index a59b61257e4b..f012da455b4b 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParserOptions.cs @@ -1,12 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Options for OpenAPI document parser. /// +[Experimental("SKEXP0040")] public sealed class OpenApiDocumentParserOptions { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs index 0632a5186de1..ea3a0dab566d 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OperationSelectionPredicateContext.cs @@ -1,12 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Represents the context for an operation selection predicate. /// +[Experimental("SKEXP0040")] public readonly struct OperationSelectionPredicateContext : IEquatable { /// diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs index 0e69ffefcc16..63e0a2ce39ff 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; +using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; @@ -138,6 +139,7 @@ public static async Task CreateFromOpenApiAsync( /// The specification model. /// The OpenAPI specification parsing and function execution parameters. /// A instance that contains functions corresponding to the operations defined in the OpenAPI specification. + [Experimental("SKEXP0040")] public static KernelPlugin CreateFromOpenApi( string pluginName, RestApiSpecification specification, diff --git a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs index deb242dbb1ca..0251abbb53e4 100644 --- a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs +++ b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs @@ -427,13 +427,13 @@ private JsonObject BuildJsonObject(IList properties, IDi // Use property argument name to look up the property value if (!string.IsNullOrEmpty(propertyMetadata.ArgumentName) && arguments.TryGetValue(propertyMetadata.ArgumentName!, out object? argument) && argument is not null) { - result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument, propertyMetadata.Schema)); + result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument)); continue; } // Use property name to look up the property value else if (arguments.TryGetValue(argumentName, out argument) && argument is not null) { - result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument, propertyMetadata.Schema)); + result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, argument)); continue; } diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs index cadf0252cbea..5b7422950d15 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs @@ -4,7 +4,6 @@ using System.Globalization; using System.Text.Json; using System.Text.Json.Nodes; -using Json.Schema; namespace Microsoft.SemanticKernel.Plugins.OpenApi; @@ -14,20 +13,19 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi; internal static class OpenApiTypeConverter { /// - /// Converts the given parameter argument to a JsonNode based on the specified type or schema. + /// Converts the given parameter argument to a JsonNode based on the specified type. /// /// The parameter name. /// The parameter type. /// The argument to be converted. - /// The parameter schema. /// A JsonNode representing the converted value. - public static JsonNode Convert(string name, string type, object argument, KernelJsonSchema? schema = null) + public static JsonNode Convert(string name, string type, object argument) { Verify.NotNull(argument); try { - JsonNode? node = type switch + JsonNode? converter = type switch { "string" => JsonValue.Create(argument), "array" => argument switch @@ -54,12 +52,10 @@ string stringArgument when double.TryParse(stringArgument, out var doubleValue) byte or sbyte or short or ushort or int or uint or long or ulong or float or double or decimal => JsonValue.Create(argument), _ => null }, - _ => schema is null - ? JsonSerializer.SerializeToNode(argument) - : ValidateSchemaAndConvert(name, schema, argument) + _ => throw new NotSupportedException($"Unexpected type '{type}' of parameter '{name}' with argument '{argument}'."), }; - return node ?? throw new ArgumentOutOfRangeException(name, argument, $"Argument type '{argument.GetType()}' is not convertible to parameter type '{type}'."); + return converter ?? throw new ArgumentOutOfRangeException(name, argument, $"Argument type '{argument.GetType()}' is not convertible to parameter type '{type}'."); } catch (ArgumentException ex) { @@ -70,25 +66,4 @@ string stringArgument when double.TryParse(stringArgument, out var doubleValue) throw new ArgumentOutOfRangeException(name, argument, ex.Message); } } - - /// - /// Validates the argument against the parameter schema and converts it to a JsonNode if valid. - /// - /// The parameter name. - /// The parameter schema. - /// The argument to be validated and converted. - /// A JsonNode representing the converted value. - private static JsonNode? ValidateSchemaAndConvert(string parameterName, KernelJsonSchema parameterSchema, object argument) - { - var jsonSchema = JsonSchema.FromText(JsonSerializer.Serialize(parameterSchema)); - - var node = JsonSerializer.SerializeToNode(argument); - - if (jsonSchema.Evaluate(node).IsValid) - { - return node; - } - - throw new ArgumentOutOfRangeException(parameterName, argument, $"Argument type '{argument.GetType()}' does not match the schema."); - } } diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs index a019f6bbfba9..f86a93fbce70 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs @@ -97,7 +97,7 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettingsWithJsonObjec Assert.Equal(0, executionSettings.Temperature); Assert.Equal(1.0, executionSettings.TopP); Assert.Null(executionSettings.StopSequences); - Assert.Equal("{\"type\":\"json_object\"}", executionSettings.ResponseFormat?.ToString()); + Assert.Equal("json_object", executionSettings.ResponseFormat?.ToString()); Assert.Null(executionSettings.TokenSelectionBiases); Assert.Equal(3000, executionSettings.MaxTokens); Assert.Null(executionSettings.Seed); @@ -342,8 +342,7 @@ public void ItCreatesInputVariablesOnlyWhenNoneAreExplicitlySet() --- name: MyPrompt inputs: - - name: question - description: What is the color of the sky? + question: What is the color of the sky? --- {{a}} {{b}} {{c}} """; diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty index 82884c1ec198..e63680443db2 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chat.prompty @@ -7,9 +7,9 @@ model: api: chat configuration: type: azure_openai + azure_deployment: gpt-35-turbo api_version: 2023-07-01-preview parameters: - model_id: gpt-35-turbo tools_choice: auto tools: - type: function diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty index f8eb9130ae28..ba095afeebfc 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty @@ -9,7 +9,6 @@ model: type: azure_openai azure_deployment: gpt-4o parameters: - model_id: gpt-4o temperature: 0.0 max_tokens: 3000 top_p: 1.0 diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs new file mode 100644 index 000000000000..ece2eaabc219 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModel.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal sealed class PromptyModel +{ + [YamlMember(Alias = "api")] + public ApiType Api { get; set; } = ApiType.Chat; + + [YamlMember(Alias = "configuration")] + public PromptyModelConfig? ModelConfiguration { get; set; } + + [YamlMember(Alias = "parameters")] + public PromptyModelParameters? Parameters { get; set; } + + [YamlMember(Alias = "response")] + public string? Response { get; set; } +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs new file mode 100644 index 000000000000..cb02862f71d1 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelConfig.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal sealed class PromptyModelConfig +{ + // azure open ai + [YamlMember(Alias = "type")] + public ModelType ModelType { get; set; } + + [YamlMember(Alias = "api_version")] + public string ApiVersion { get; set; } = "2023-12-01-preview"; + + [YamlMember(Alias = "azure_endpoint")] + public string? AzureEndpoint { get; set; } + + [YamlMember(Alias = "azure_deployment")] + public string? AzureDeployment { get; set; } + + [YamlMember(Alias = "api_key")] + public string? ApiKey { get; set; } + + //open ai props + [YamlMember(Alias = "name")] + public string? Name { get; set; } + + [YamlMember(Alias = "organization")] + public string? Organization { get; set; } +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs new file mode 100644 index 000000000000..7699037d7466 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +/// Parameters to be sent to the model. +internal sealed class PromptyModelParameters +{ + /// Specify the format for model output (e.g., JSON mode). + [YamlMember(Alias = "response_format")] + public PromptyResponseFormat? ResponseFormat { get; set; } + + /// Seed for deterministic sampling (Beta feature). + [YamlMember(Alias = "seed")] + public int? Seed { get; set; } + + /// Maximum number of tokens in chat completion. + [YamlMember(Alias = "max_tokens")] + public int? MaxTokens { get; set; } + + /// Sampling temperature (0 means deterministic). + [YamlMember(Alias = "temperature")] + public double? Temperature { get; set; } + + /// Controls which function the model calls (e.g., "none" or "auto"). + [YamlMember(Alias = "tools_choice")] + public string? ToolsChoice { get; set; } + + /// Array of tools (if applicable). + [YamlMember(Alias = "tools")] + public List? Tools { get; set; } + + /// Frequency penalty for sampling. + [YamlMember(Alias = "frequency_penalty")] + public double? FrequencyPenalty { get; set; } + + /// Presence penalty for sampling. + [YamlMember(Alias = "presence_penalty")] + public double? PresencePenalty { get; set; } + + /// Sequences where model stops generating tokens. + [YamlMember(Alias = "stop")] + public List? Stop { get; set; } + + /// Nucleus sampling probability (0 means no tokens generated). + [YamlMember(Alias = "top_p")] + public double? TopP { get; set; } +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs new file mode 100644 index 000000000000..c3c991903bb1 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyResponseFormat.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft. All rights reserved. + +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +/// The response format of prompty. +internal sealed class PromptyResponseFormat +{ + /// The response format type (e.g: json_object). + [YamlMember(Alias = "type")] + public string? Type { get; set; } +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs new file mode 100644 index 000000000000..1bc0fefcb48d --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyTool.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal sealed class PromptyTool +{ + [YamlMember(Alias = "id")] + public string? id { get; set; } + + [YamlMember(Alias = "type")] + public string? Type { get; set; } + + [YamlMember(Alias = "function")] + public PromptyFunction? Function { get; set; } +} + +internal sealed class PromptyFunction +{ + [YamlMember(Alias = "arguments")] + public string? Arguments { get; set; } + + [YamlMember(Alias = "name")] + public string? Name { get; set; } + + [YamlMember(Alias = "parameters")] + public PromptyParameters? Parameters { get; set; } + + [YamlMember(Alias = "description")] + public string? Description { get; set; } +} + +internal sealed class PromptyParameters +{ + [YamlMember(Alias = "description")] + public string? Description { get; set; } + + [YamlMember(Alias = "type")] + public string? Type { get; set; } + + [YamlMember(Alias = "properties")] + public object? Properties { get; set; } +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs new file mode 100644 index 000000000000..4af70817e742 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Prompty.Core; + +/// +/// Schema: https://github.com/Azure/azureml_run_specification/blob/master/schemas/Prompty.yaml +/// +internal sealed class PromptyYaml +{ + [YamlMember(Alias = "name")] + public string? Name { get; set; } + + [YamlMember(Alias = "description")] + public string? Description { get; set; } + + [YamlMember(Alias = "version")] + public string? Version { get; set; } + + [YamlMember(Alias = "tags")] + public List? Tags { get; set; } + + [YamlMember(Alias = "authors")] + public List? Authors { get; set; } + + [YamlMember(Alias = "inputs")] + public Dictionary? Inputs { get; set; } + + [YamlMember(Alias = "outputs")] + public Dictionary? Outputs { get; set; } + + [YamlMember(Alias = "sample")] + public object? Sample { get; set; } + + [YamlMember(Alias = "model")] + public PromptyModel? Model { get; set; } + + [YamlMember(Alias = "template")] + public string? Template { get; set; } = "liquid"; +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs new file mode 100644 index 000000000000..0076bf6b9983 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/Types/ApiType.cs @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal enum ApiType +{ + Chat, + Completion, +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs new file mode 100644 index 000000000000..27c7383868ef --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/Types/ModelType.cs @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal enum ModelType +{ + azure_openai, + openai, +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs new file mode 100644 index 000000000000..94d569f0ba89 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/Types/ParserType.cs @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal enum ParserType +{ + Chat, + Embedding, + Completion, + Image, +} diff --git a/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs b/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs new file mode 100644 index 000000000000..45cbb91eb1f0 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/Core/Types/RoleType.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Prompty.Core; + +internal enum RoleType +{ + assistant, + function, + system, + tool, + user, +} diff --git a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj index 44ffa76868dc..7a63018ef572 100644 --- a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj +++ b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj @@ -18,7 +18,7 @@ - + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs b/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs index 55a74985f4be..003811934181 100644 --- a/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs +++ b/dotnet/src/Functions/Functions.Prompty/KernelFunctionPrompty.cs @@ -1,21 +1,41 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Microsoft.SemanticKernel.PromptTemplates.Liquid; -using PromptyCore = Prompty.Core; +using Microsoft.SemanticKernel.Prompty.Core; +using YamlDotNet.Serialization; namespace Microsoft.SemanticKernel.Prompty; /// /// Factory methods for creating instances. /// -public static class KernelFunctionPrompty +public static partial class KernelFunctionPrompty { /// Default template factory to use when none is provided. internal static readonly AggregatorPromptTemplateFactory s_defaultTemplateFactory = - new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory(), new KernelPromptTemplateFactory()); + new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory()); + + private const string PromptyPattern = /* lang=regex */ """ + ^---\s*$\n # Start of YAML front matter, a line beginning with "---" followed by optional whitespace + (?
.*?) # Capture the YAML front matter, everything up to the next "---" line + ^---\s*$\n # End of YAML front matter, a line beginning with "---" followed by optional whitespace + (?.*) # Capture the content after the YAML front matter + """; + + /// Regex for parsing the YAML frontmatter and content from the prompty template. +#if NET + [GeneratedRegex(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace)] + private static partial Regex PromptyRegex(); +#else + private static Regex PromptyRegex() => s_promptyRegex; + private static readonly Regex s_promptyRegex = new(PromptyPattern, RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace | RegexOptions.Compiled); +#endif /// /// Creates a instance for a prompt function using the specified markdown text. @@ -51,56 +71,146 @@ public static PromptTemplateConfig ToPromptTemplateConfig(string promptyTemplate { Verify.NotNullOrWhiteSpace(promptyTemplate); - PromptyCore.Prompty prompty = PromptyCore.Prompty.Load(promptyTemplate, []); + // Step 1: + // Create PromptTemplateConfig from text. + // Retrieve the header, which is in yaml format and put between --- + // e.g + // file: chat.prompty + // --- + // name: Contoso Chat Prompt + // description: A retail assistant for Contoso Outdoors products retailer. + // authors: + // - XXXX + // model: + // api: chat + // configuration: + // type: azure_openai + // azure_deployment: gpt-35-turbo + // api_version: 2023-07-01-preview + // parameters: + // tools_choice: auto + // tools: + // -type: function + // function: + // name: test + // description: test function + // parameters: + // properties: + // location: + // description: The city and state or city and country, e.g.San Francisco, CA + // or Tokyo, Japan + // --- + // ... (rest of the prompty content) + + // Parse the YAML frontmatter and content from the prompty template + Match m = PromptyRegex().Match(promptyTemplate); + if (!m.Success) + { + throw new ArgumentException("Invalid prompty template. Header and content could not be parsed."); + } + var header = m.Groups["header"].Value; + var content = m.Groups["content"].Value; + + var prompty = new DeserializerBuilder().Build().Deserialize(header) ?? + throw new ArgumentException("Invalid prompty template. Header could not be parsed."); + + // Step 2: + // Create a prompt template config from the prompty data. var promptTemplateConfig = new PromptTemplateConfig { - Name = prompty.Name, + Name = prompty.Name, // TODO: sanitize name Description = prompty.Description, - Template = prompty.Content.ToString() ?? string.Empty, + Template = content, }; PromptExecutionSettings? defaultExecutionSetting = null; - if (prompty.Model?.Parameters?.Items is not null && prompty.Model.Parameters.Items.Count > 0) + if (prompty.Model?.ModelConfiguration?.ModelType is ModelType.azure_openai or ModelType.openai) { - defaultExecutionSetting = new PromptExecutionSettings() + defaultExecutionSetting = new PromptExecutionSettings { - ServiceId = prompty.Model.Parameters.Items.TryGetValue("service_id", out var serviceId) && serviceId is string serviceIdStr ? serviceIdStr : null, - ModelId = prompty.Model.Parameters.Items.TryGetValue("model_id", out var modelId) && modelId is string modelIdStr ? modelIdStr : null, - ExtensionData = prompty.Model.Parameters.Items + ModelId = prompty.Model.ModelConfiguration.ModelType is ModelType.azure_openai ? + prompty.Model.ModelConfiguration.AzureDeployment : + prompty.Model.ModelConfiguration.Name }; + + var extensionData = new Dictionary(); + + if (prompty.Model?.Parameters?.Temperature is double temperature) + { + extensionData.Add("temperature", temperature); + } + + if (prompty.Model?.Parameters?.TopP is double topP) + { + extensionData.Add("top_p", topP); + } + + if (prompty.Model?.Parameters?.MaxTokens is int maxTokens) + { + extensionData.Add("max_tokens", maxTokens); + } + + if (prompty.Model?.Parameters?.Seed is int seed) + { + extensionData.Add("seed", seed); + } + + if (prompty.Model?.Parameters?.FrequencyPenalty is double frequencyPenalty) + { + extensionData.Add("frequency_penalty", frequencyPenalty); + } + + if (prompty.Model?.Parameters?.PresencePenalty is double presencePenalty) + { + extensionData.Add("presence_penalty", presencePenalty); + } + + if (prompty.Model?.Parameters?.Stop is List stop) + { + extensionData.Add("stop_sequences", stop); + } + + if (prompty.Model?.Parameters?.ResponseFormat?.Type == "json_object") + { + extensionData.Add("response_format", "json_object"); + } + + defaultExecutionSetting.ExtensionData = extensionData; promptTemplateConfig.AddExecutionSettings(defaultExecutionSetting); } + // Step 3: // Add input and output variables. if (prompty.Inputs is not null) { foreach (var input in prompty.Inputs) { - if (input.Items.TryGetValue("name", out var value) && value is string name) + if (input.Value is string description) { - string description = input.Items.TryGetValue("description", out var desc) && desc is string descStr ? descStr : string.Empty; promptTemplateConfig.InputVariables.Add(new() { - Name = name, + Name = input.Key, Description = description, }); } } } + if (prompty.Outputs is not null) { // PromptTemplateConfig supports only a single output variable. If the prompty template // contains one and only one, use it. Otherwise, ignore any outputs. - if (prompty.Outputs.Length == 1 && - prompty.Outputs[0].Items.TryGetValue("description", out var value) && value is string description) + if (prompty.Outputs.Count == 1 && + prompty.Outputs.First().Value is string description) { promptTemplateConfig.OutputVariable = new() { Description = description }; } } + // Step 4: // Update template format. If not provided, use Liquid as default. - promptTemplateConfig.TemplateFormat = prompty.Template?.Type ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; + promptTemplateConfig.TemplateFormat = prompty.Template ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; return promptTemplateConfig; } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs index 34abf6f5631e..e55e478c25a2 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/CopilotAgentPluginKernelExtensionsTests.cs @@ -23,7 +23,7 @@ public async Task ItCanImportPluginFromCopilotAgentPluginAsync() // Assert Assert.NotNull(plugin); Assert.Equal(2, plugin.FunctionCount); - Assert.Equal(411, plugin["me_sendMail"].Description.Length); + Assert.Equal(683, plugin["me_CreateMessages"].Description.Length); Assert.Equal(1000, plugin["me_ListMessages"].Description.Length); } } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs index 1982dd1a5a59..9331bb0b55a2 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Globalization; -using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Plugins.OpenApi; using Microsoft.VisualBasic; using Xunit; @@ -113,50 +112,4 @@ public void ItShouldConvertCollections() Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", "[1, 2, 3]").ToJsonString()); } - - [Fact] - public void ItShouldConvertWithNoTypeAndNoSchema() - { - // Act - var result = OpenApiTypeConverter.Convert("lat", null!, 51.8985136); - - // Assert - Assert.Equal(51.8985136, result.GetValue()); - } - - [Fact] - public void ItShouldConvertWithNoTypeAndValidSchema() - { - // Arrange - var schema = KernelJsonSchema.Parse( - """ - { - "type": "number", - "format": "double", - "nullable": false - } - """); - - // Act - var result = OpenApiTypeConverter.Convert("lat", null!, 51.8985136, schema); - - // Assert - Assert.Equal(51.8985136, result.GetValue()); - } - - [Fact] - public void ItShouldThrowExceptionWhenNoTypeAndInvalidSchema() - { - // Arrange - var schema = KernelJsonSchema.Parse( - """ - { - "type": "boolean", - "nullable": false - } - """); - - // Act & Assert - Assert.Throws(() => OpenApiTypeConverter.Convert("lat", null!, 51.8985136, schema)); - } } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json index ba3827350891..7994cc32ce9e 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-apiplugin.json @@ -1,4 +1,4 @@ -{ +{ "$schema": "https://developer.microsoft.com/json-schemas/copilot/plugin/v2.1/schema.json", "schema_version": "v2.1", "name_for_human": "OData Service for namespace microsoft.graph", @@ -12,14 +12,14 @@ "text": "List messages" }, { - "text": "Send an email from the current user's mailbox" + "text": "Create message" } ] }, "functions": [ { - "name": "me_sendMail", - "description": "Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here." + "name": "me_CreateMessages", + "description": "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." }, { "name": "me_ListMessages", @@ -35,7 +35,10 @@ "spec": { "url": "messages-openapi.yml" }, - "run_for_functions": ["me_ListMessages", "me_sendMail"] + "run_for_functions": [ + "me_ListMessages", + "me_CreateMessages" + ] } ] -} +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml index 322b38a9e5a9..127ceff0eaa2 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/messages-openapi.yml @@ -1,4 +1,4 @@ -openapi: 3.0.1 +openapi: 3.0.1 info: title: OData Service for namespace microsoft.graph - Subset description: This OData service is located at https://graph.microsoft.com/v1.0 @@ -10,8 +10,8 @@ paths: get: tags: - me.message - summary: Get the messages in the signed-in user\u0026apos;s mailbox - description: Get the messages in the signed-in user\u0026apos;s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user\u0026apos;s mailbox to return a page of message-type items. It\u0026apos;s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user\u0026apos;s mail folder + summary: List messages + description: 'Get the messages in the signed-in user''s mailbox (including the Deleted Items and Clutter folders). Depending on the page size and mailbox data, getting messages from a mailbox can incur multiple requests. The default page size is 10 messages. Use $top to customize the page size, within the range of 1 and 1000. To improve the operation response time, use $select to specify the exact properties you need; see example 1 below. Fine-tune the values for $select and $top, especially when you must use a larger page size, as returning a page with hundreds of messages each with a full response payload may trigger the gateway timeout (HTTP 504). To get the next page of messages, simply apply the entire URL returned in @odata.nextLink to the next get-messages request. This URL includes any query parameters you may have specified in the initial request. Do not try to extract the $skip value from the @odata.nextLink URL to manipulate responses. This API uses the $skip value to keep count of all the items it has gone through in the user''s mailbox to return a page of message-type items. It''s therefore possible that even in the initial response, the $skip value is larger than the page size. For more information, see Paging Microsoft Graph data in your app. Currently, this operation returns message bodies in only HTML format. There are two scenarios where an app can get messages in another user''s mail folder:' operationId: me_ListMessages parameters: - name: includeHiddenMessages @@ -63,18 +63,26 @@ paths: nextLinkName: '@odata.nextLink' operationName: listMore itemName: value - /me/sendMail: post: tags: - - me.user.Actions - summary: Invoke action sendMail - description: 'Send the message specified in the request body using either JSON or MIME format. When using JSON format, you can include a file attachment in the same sendMail action call. When using MIME format: This method saves the message in the Sent Items folder. Alternatively, create a draft message to send later. To learn more about the steps involved in the backend before a mail is delivered to recipients, see here.' - operationId: me_sendMail + - me.message + summary: Create message + description: "Create a draft of a new message in either JSON or MIME format. When using JSON format, you can:\n- Include an attachment to the message.\n- Update the draft later to add content to the body or change other message properties. When using MIME format:\n- Provide the applicable Internet message headers and the MIME content, all encoded in base64 format in the request body.\n- /* Add any attachments and S/MIME properties to the MIME content. By default, this operation saves the draft in the Drafts folder. Send the draft message in a subsequent operation. Alternatively, send a new message in a single operation, or create a draft to forward, reply and reply-all to an existing message." + operationId: me_CreateMessages requestBody: - $ref: '#/components/requestBodies/sendMailRequestBody' + description: New navigation property + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' + required: true responses: - '204': - description: Success + 2XX: + description: Created navigation property. + content: + application/json: + schema: + $ref: '#/components/schemas/microsoft.graph.message' components: schemas: microsoft.graph.message: @@ -496,18 +504,3 @@ components: explode: false schema: type: boolean - requestBodies: - sendMailRequestBody: - description: Action parameters - content: - application/json: - schema: - type: object - properties: - Message: - $ref: '#/components/schemas/microsoft.graph.message' - SaveToSentItems: - type: boolean - default: false - nullable: true - required: true diff --git a/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs b/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs deleted file mode 100644 index 1e4363f21ce8..000000000000 --- a/dotnet/src/IntegrationTests/Agents/BedrockAgentTests.cs +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Linq; -using System.Threading.Tasks; -using Amazon.BedrockAgent; -using Amazon.BedrockAgent.Model; -using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.Bedrock; -using Microsoft.SemanticKernel.Agents.Bedrock.Extensions; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; - -namespace SemanticKernel.IntegrationTests.Agents; - -#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. - -public sealed class BedrockAgentTests : IDisposable -{ - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - - private readonly AmazonBedrockAgentClient _client = new(); - - /// - /// Integration test for invoking a . - /// - [Theory(Skip = "This test is for manual verification.")] - [InlineData("Why is the sky blue in one sentence?")] - public async Task InvokeTestAsync(string input) - { - var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); - var bedrockAgent = new BedrockAgent(agentModel, this._client); - - try - { - await this.ExecuteAgentAsync(bedrockAgent, input); - } - finally - { - await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); - } - } - - /// - /// Integration test for invoking a with streaming. - /// - [Theory(Skip = "This test is for manual verification.")] - [InlineData("Why is the sky blue in one sentence?")] - public async Task InvokeStreamingTestAsync(string input) - { - var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); - var bedrockAgent = new BedrockAgent(agentModel, this._client); - - try - { - await this.ExecuteAgentStreamingAsync(bedrockAgent, input); - } - finally - { - await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); - } - } - - /// - /// Integration test for invoking a with code interpreter. - /// - [Theory(Skip = "This test is for manual verification.")] - [InlineData(@"Create a bar chart for the following data: -Panda 5 -Tiger 8 -Lion 3 -Monkey 6 -Dolphin 2")] - public async Task InvokeWithCodeInterpreterTestAsync(string input) - { - var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); - var bedrockAgent = new BedrockAgent(agentModel, this._client); - await bedrockAgent.CreateCodeInterpreterActionGroupAsync(); - - try - { - var responses = await this.ExecuteAgentAsync(bedrockAgent, input); - BinaryContent? binaryContent = null; - foreach (var response in responses) - { - if (binaryContent == null && response.Items.Count > 0) - { - binaryContent = response.Items.OfType().FirstOrDefault(); - } - } - Assert.NotNull(binaryContent); - } - finally - { - await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); - } - } - - /// - /// Integration test for invoking a with Kernel functions. - /// - [Theory(Skip = "This test is for manual verification.")] - [InlineData("What is the current weather in Seattle and what is the weather forecast in Seattle?", "weather")] - public async Task InvokeWithKernelFunctionTestAsync(string input, string expected) - { - Kernel kernel = new(); - kernel.Plugins.Add(KernelPluginFactory.CreateFromType()); - - var agentModel = await this._client.CreateAndPrepareAgentAsync(this.GetCreateAgentRequest()); - var bedrockAgent = new BedrockAgent(agentModel, this._client) - { - Kernel = kernel, - }; - await bedrockAgent.CreateKernelFunctionActionGroupAsync(); - - try - { - await this.ExecuteAgentAsync(bedrockAgent, input, expected); - } - finally - { - await this._client.DeleteAgentAsync(new() { AgentId = bedrockAgent.Id }); - } - } - - /// - /// Executes a with the specified input and expected output. - /// The output of the agent will be verified against the expected output. - /// If the expected output is not provided, the verification will pass as long as the output is not null or empty. - /// - /// The agent to execute. - /// The input to provide to the agent. - /// The expected output from the agent. - /// The chat messages returned by the agent for additional verification. - private async Task> ExecuteAgentAsync(BedrockAgent agent, string input, string? expected = null) - { - var responses = agent.InvokeAsync(BedrockAgent.CreateSessionId(), input, null, default); - string responseContent = string.Empty; - List chatMessages = new(); - await foreach (var response in responses) - { - // Non-streaming invoke will only return one response. - responseContent = response.Content ?? string.Empty; - chatMessages.Add(response); - } - - if (expected != null) - { - Assert.Contains(expected, responseContent); - } - else - { - Assert.False(string.IsNullOrEmpty(responseContent)); - } - - return chatMessages; - } - - /// - /// Executes a with the specified input and expected output using streaming. - /// The output of the agent will be verified against the expected output. - /// If the expected output is not provided, the verification will pass as long as the output is not null or empty. - /// - /// The agent to execute. - /// The input to provide to the agent. - /// The expected output from the agent. - /// The chat messages returned by the agent for additional verification. - private async Task> ExecuteAgentStreamingAsync(BedrockAgent agent, string input, string? expected = null) - { - var responses = agent.InvokeStreamingAsync(BedrockAgent.CreateSessionId(), input, null, default); - string responseContent = string.Empty; - List chatMessages = new(); - await foreach (var response in responses) - { - responseContent = response.Content ?? string.Empty; - chatMessages.Add(response); - } - - if (expected != null) - { - Assert.Contains(expected, responseContent); - } - else - { - Assert.False(string.IsNullOrEmpty(responseContent)); - } - - return chatMessages; - } - - private const string AgentName = "SKIntegrationTestAgent"; - private const string AgentDescription = "A helpful assistant who helps users find information."; - private const string AgentInstruction = "You're a helpful assistant who helps users find information."; - private CreateAgentRequest GetCreateAgentRequest() - { - BedrockAgentConfiguration bedrockAgentSettings = this._configuration.GetSection("BedrockAgent").Get()!; - Assert.NotNull(bedrockAgentSettings); - - return new() - { - AgentName = AgentName, - Description = AgentDescription, - Instruction = AgentInstruction, - AgentResourceRoleArn = bedrockAgentSettings.AgentResourceRoleArn, - FoundationModel = bedrockAgentSettings.FoundationModel, - }; - } - - public void Dispose() - { - this._client.Dispose(); - } - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes - private sealed class WeatherPlugin - { - [KernelFunction, Description("Provides realtime weather information.")] - public string Current([Description("The location to get the weather for.")] string location) - { - return $"The current weather in {location} is 72 degrees."; - } - - [KernelFunction, Description("Forecast weather information.")] - public string Forecast([Description("The location to get the weather for.")] string location) - { - return $"The forecast for {location} is 75 degrees tomorrow."; - } - } -#pragma warning restore CA1812 // Avoid uninstantiated internal classes -} diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs index dd39b660966d..9be5610f2abd 100644 --- a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs @@ -12,7 +12,6 @@ using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; -using OpenAI.Assistants; using SemanticKernel.IntegrationTests.TestSettings; using xRetry; using Xunit; @@ -72,7 +71,7 @@ await this.VerifyAgentExecutionAsync( private async Task VerifyAgentExecutionAsync( Kernel chatCompletionKernel, - OpenAIClientProvider clientProvider, + OpenAIClientProvider config, string modelName, bool useNewFunctionCallingModel) { @@ -95,8 +94,16 @@ private async Task VerifyAgentExecutionAsync( chatAgent.Kernel.Plugins.Add(plugin); // Configure assistant agent with the plugin. - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); - OpenAIAssistantAgent assistantAgent = new(definition, clientProvider.AssistantClient, [plugin]); + OpenAIAssistantAgent assistantAgent = + await OpenAIAssistantAgent.CreateAsync( + config, + new(modelName) + { + Name = "Assistant", + Instructions = "Answer questions about the menu." + }, + new Kernel()); + assistantAgent.Kernel.Plugins.Add(plugin); // Act & Assert try @@ -107,7 +114,7 @@ private async Task VerifyAgentExecutionAsync( } finally { - await clientProvider.AssistantClient.DeleteAssistantAsync(assistantAgent.Id); + await assistantAgent.DeleteAsync(); } } diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index ad63eab6b795..aa5fcbeef785 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -12,7 +12,6 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; using OpenAI.Files; using OpenAI.VectorStores; using SemanticKernel.IntegrationTests.TestSettings; @@ -94,10 +93,11 @@ await this.ExecuteStreamingAgentAsync( [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains) { - AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); await this.ExecuteStreamingAgentAsync( - CreateClientProvider(azureOpenAIConfiguration), + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), azureOpenAIConfiguration.ChatDeploymentName!, input, expectedAnswerContains); @@ -110,23 +110,27 @@ await this.ExecuteStreamingAgentAsync( [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentFunctionCallResultAsync() { - AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); - OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + new(azureOpenAIConfiguration.ChatDeploymentName!), + new Kernel()); - AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); + string threadId = await agent.CreateThreadAsync(); ChatMessageContent functionResultMessage = new(AuthorRole.Assistant, [new FunctionResultContent("mock-function", result: "A result value")]); try { - await agent.AddChatMessageAsync(thread.Id, functionResultMessage); - var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); + await agent.AddChatMessageAsync(threadId, functionResultMessage); + var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); Assert.Single(messages); } finally { - await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); - await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); } } @@ -137,28 +141,33 @@ public async Task AzureOpenAIAssistantAgentFunctionCallResultAsync() [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentTokensAsync() { - AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); - OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!, instructions: "Repeat the user all of the user messages"); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient) - { - RunOptions = new() - { - MaxOutputTokenCount = 16, - } - }; + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); - AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + new(azureOpenAIConfiguration.ChatDeploymentName!) + { + Instructions = "Repeat the user all of the user messages", + ExecutionOptions = new() + { + MaxCompletionTokens = 16, + } + }, + new Kernel()); + + string threadId = await agent.CreateThreadAsync(); ChatMessageContent functionResultMessage = new(AuthorRole.User, "A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone."); try { - await agent.AddChatMessageAsync(thread.Id, functionResultMessage); - await Assert.ThrowsAsync(() => agent.InvokeAsync(thread.Id).ToArrayAsync().AsTask()); + await agent.AddChatMessageAsync(threadId, functionResultMessage); + await Assert.ThrowsAsync(() => agent.InvokeAsync(threadId).ToArrayAsync().AsTask()); } finally { - await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); - await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); } } @@ -169,45 +178,48 @@ public async Task AzureOpenAIAssistantAgentTokensAsync() [RetryFact(typeof(HttpOperationException))] public async Task AzureOpenAIAssistantAgentAdditionalMessagesAsync() { - AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); - OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + new(azureOpenAIConfiguration.ChatDeploymentName!), + new Kernel()); - ThreadCreationOptions threadOptions = new() + OpenAIThreadCreationOptions threadOptions = new() { - InitialMessages = - { - new ChatMessageContent(AuthorRole.User, "Hello").ToThreadInitializationMessage(), - new ChatMessageContent(AuthorRole.User, "How may I help you?").ToThreadInitializationMessage(), - } + Messages = [ + new ChatMessageContent(AuthorRole.User, "Hello"), + new ChatMessageContent(AuthorRole.Assistant, "How may I help you?"), + ] }; - AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(threadOptions); + string threadId = await agent.CreateThreadAsync(threadOptions); try { - var messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); + var messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); Assert.Equal(2, messages.Length); - RunCreationOptions invocationOptions = new() + OpenAIAssistantInvocationOptions invocationOptions = new() { - AdditionalMessages = { - new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:").ToThreadInitializationMessage(), - new ChatMessageContent(AuthorRole.User, "Part 1").ToThreadInitializationMessage(), - new ChatMessageContent(AuthorRole.User, "Part 2").ToThreadInitializationMessage(), - new ChatMessageContent(AuthorRole.User, "Part 3").ToThreadInitializationMessage(), - } + AdditionalMessages = [ + new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:"), + new ChatMessageContent(AuthorRole.User, "Part 1"), + new ChatMessageContent(AuthorRole.User, "Part 2"), + new ChatMessageContent(AuthorRole.User, "Part 3"), + ] }; - messages = await agent.InvokeAsync(thread.Id, invocationOptions).ToArrayAsync(); + messages = await agent.InvokeAsync(threadId, invocationOptions).ToArrayAsync(); Assert.Single(messages); - messages = await agent.GetThreadMessagesAsync(thread.Id).ToArrayAsync(); + messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); Assert.Equal(7, messages.Length); } finally { - await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); - await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); } } @@ -218,18 +230,23 @@ public async Task AzureOpenAIAssistantAgentAdditionalMessagesAsync() [Fact] public async Task AzureOpenAIAssistantAgentStreamingFileSearchAsync() { - AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); - OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + provider, + new(azureOpenAIConfiguration.ChatDeploymentName!), + new Kernel()); // Upload file - Using a table of fictional employees. - OpenAIFileClient fileClient = clientProvider.Client.GetOpenAIFileClient(); + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); await using Stream stream = File.OpenRead("TestData/employees.pdf")!; OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants); // Create a vector-store - VectorStoreClient vectorStoreClient = clientProvider.Client.GetVectorStoreClient(); + VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient(); CreateVectorStoreOperation result = await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, new VectorStoreCreationOptions() @@ -237,26 +254,26 @@ await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, FileIds = { fileInfo.Id } }); - AssistantThread thread = await clientProvider.AssistantClient.CreateThreadAsync(); + string threadId = await agent.CreateThreadAsync(); try { - await agent.AddChatMessageAsync(thread.Id, new(AuthorRole.User, "Who works in sales?")); + await agent.AddChatMessageAsync(threadId, new(AuthorRole.User, "Who works in sales?")); ChatHistory messages = []; - var chunks = await agent.InvokeStreamingAsync(thread.Id, messages: messages).ToArrayAsync(); + var chunks = await agent.InvokeStreamingAsync(threadId, messages: messages).ToArrayAsync(); Assert.NotEmpty(chunks); Assert.Single(messages); } finally { - await clientProvider.AssistantClient.DeleteThreadAsync(thread.Id); - await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId); await fileClient.DeleteFileAsync(fileInfo.Id); } } private async Task ExecuteAgentAsync( - OpenAIClientProvider clientProvider, + OpenAIClientProvider config, string modelName, string input, string expected) @@ -265,8 +282,16 @@ private async Task ExecuteAgentAsync( Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]); + kernel.Plugins.Add(plugin); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + config, + new(modelName) + { + Instructions = "Answer questions about the menu.", + }, + kernel); try { @@ -289,12 +314,12 @@ private async Task ExecuteAgentAsync( } finally { - await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + await agent.DeleteAsync(); } } private async Task ExecuteStreamingAgentAsync( - OpenAIClientProvider clientProvider, + OpenAIClientProvider config, string modelName, string input, string expected) @@ -303,8 +328,16 @@ private async Task ExecuteStreamingAgentAsync( Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(modelName, instructions: "Answer questions about the menu."); - OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient, [plugin]); + kernel.Plugins.Add(plugin); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + config, + new(modelName) + { + Instructions = "Answer questions about the menu.", + }, + kernel); AgentGroupChat chat = new(); chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); @@ -339,18 +372,6 @@ private static void AssertMessageValid(ChatMessageContent message) Assert.Equal(string.IsNullOrEmpty(message.AuthorName) ? AuthorRole.User : AuthorRole.Assistant, message.Role); } - private AzureOpenAIConfiguration ReadAzureConfiguration() - { - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - return azureOpenAIConfiguration; - } - - private static OpenAIClientProvider CreateClientProvider(AzureOpenAIConfiguration azureOpenAIConfiguration) - { - return OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)); - } - public sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] diff --git a/dotnet/src/IntegrationTests/BaseIntegrationTest.cs b/dotnet/src/IntegrationTests/BaseIntegrationTest.cs index d97b6787a50d..c4fda5081e39 100644 --- a/dotnet/src/IntegrationTests/BaseIntegrationTest.cs +++ b/dotnet/src/IntegrationTests/BaseIntegrationTest.cs @@ -22,11 +22,11 @@ protected IKernelBuilder CreateKernelBuilder() o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.TooManyRequests); o.CircuitBreaker = new HttpCircuitBreakerStrategyOptions { - SamplingDuration = TimeSpan.FromSeconds(60.0), // The duration should be least double of an attempt timeout + SamplingDuration = TimeSpan.FromSeconds(40.0), // The duration should be least double of an attempt timeout }; o.AttemptTimeout = new HttpTimeoutStrategyOptions { - Timeout = TimeSpan.FromSeconds(30.0) + Timeout = TimeSpan.FromSeconds(20.0) // Doubling the default 10s timeout }; }); }); diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs index bc706f5661ff..1540ff288197 100644 --- a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs @@ -10,7 +10,6 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.TextGeneration; -using OpenAI.Chat; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; @@ -55,22 +54,12 @@ public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() var stringBuilder = new StringBuilder(); var metadata = new Dictionary(); - var hasUsage = false; - // Act & Assert + // Act await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) { stringBuilder.Append(update.Content); - var openAIUpdate = Assert.IsType(update.InnerContent); - Assert.NotNull(openAIUpdate); - - if (openAIUpdate.Usage is not null) - { - Assert.True(openAIUpdate.Usage.TotalTokenCount > 0); - hasUsage = true; - } - foreach (var key in update.Metadata!.Keys) { if (!metadata.TryGetValue(key, out object? value) || value is null) @@ -80,7 +69,7 @@ public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() } } - Assert.True(hasUsage); + // Assert Assert.Contains("I don't know", stringBuilder.ToString()); Assert.NotNull(metadata); diff --git a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs index 37a3439bb75b..5732a3e4719a 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs @@ -3,14 +3,11 @@ using System; using System.IO; using System.Linq; -using System.Net.Http; -using System.Text; using System.Text.Json; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.Google; -using Newtonsoft.Json.Linq; using xRetry; using Xunit; using Xunit.Abstractions; @@ -138,61 +135,6 @@ public async Task ChatGenerationWithSystemMessagesAsync(ServiceType serviceType) Assert.Contains("Roger", response.Content, StringComparison.OrdinalIgnoreCase); } - [RetryTheory] - [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] - public async Task ChatGenerationWithCachedContentAsync(ServiceType serviceType) - { - // Arrange - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage("Finish this sentence: He knew the sea’s..."); - - // Setup initial cached content - var cachedContentJson = File.ReadAllText(Path.Combine("Resources", "gemini_cached_content.json")) - .Replace("{{project}}", this.VertexAIGetProjectId()) - .Replace("{{location}}", this.VertexAIGetLocation()) - .Replace("{{model}}", this.VertexAIGetGeminiModel()); - - var cachedContentName = string.Empty; - - using (var httpClient = new HttpClient() - { - DefaultRequestHeaders = { Authorization = new("Bearer", this.VertexAIGetBearerKey()) } - }) - { - using (var content = new StringContent(cachedContentJson, Encoding.UTF8, "application/json")) - { - using (var httpResponse = await httpClient.PostAsync( - new Uri($"https://{this.VertexAIGetLocation()}-aiplatform.googleapis.com/v1beta1/projects/{this.VertexAIGetProjectId()}/locations/{this.VertexAIGetLocation()}/cachedContents"), - content)) - { - httpResponse.EnsureSuccessStatusCode(); - - var responseString = await httpResponse.Content.ReadAsStringAsync(); - var responseJson = JObject.Parse(responseString); - - cachedContentName = responseJson?["name"]?.ToString(); - - Assert.NotNull(cachedContentName); - } - } - } - - var sut = this.GetChatService(serviceType, isBeta: true); - - // Act - var response = await sut.GetChatMessageContentAsync( - chatHistory, - new GeminiPromptExecutionSettings - { - CachedContent = cachedContentName - }); - - // Assert - Assert.NotNull(response.Content); - this.Output.WriteLine(response.Content); - Assert.Contains("capriciousness", response.Content, StringComparison.OrdinalIgnoreCase); - } - [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] @@ -328,58 +270,6 @@ public async Task ChatStreamingVisionUriAsync(ServiceType serviceType) Assert.Contains("green", message, StringComparison.OrdinalIgnoreCase); } - [RetryTheory] - [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] - [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] - public async Task ChatGenerationAudioBinaryDataAsync(ServiceType serviceType) - { - // Arrange - Memory audio = await File.ReadAllBytesAsync(Path.Combine("TestData", "test_audio.wav")); - var chatHistory = new ChatHistory(); - var messageContent = new ChatMessageContent(AuthorRole.User, items: - [ - new TextContent("Transcribe this audio"), - new AudioContent(audio, "audio/wav") - ]); - chatHistory.Add(messageContent); - - var sut = this.GetChatServiceWithVision(serviceType); - - // Act - var response = await sut.GetChatMessageContentAsync(chatHistory); - - // Assert - Assert.NotNull(response.Content); - this.Output.WriteLine(response.Content); - Assert.Contains("the sun rises", response.Content, StringComparison.OrdinalIgnoreCase); - } - - [RetryTheory] - [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] - [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] - public async Task ChatGenerationAudioUriAsync(ServiceType serviceType) - { - // Arrange - Uri audioUri = new("gs://cloud-samples-data/speech/brooklyn_bridge.flac"); // needs setup - var chatHistory = new ChatHistory(); - var messageContent = new ChatMessageContent(AuthorRole.User, items: - [ - new TextContent("Transcribe this audio"), - new AudioContent(audioUri) { MimeType = "audio/flac" } - ]); - chatHistory.Add(messageContent); - - var sut = this.GetChatServiceWithVision(serviceType); - - // Act - var response = await sut.GetChatMessageContentAsync(chatHistory); - - // Assert - Assert.NotNull(response.Content); - this.Output.WriteLine(response.Content); - Assert.Contains("brooklyn bridge", response.Content, StringComparison.OrdinalIgnoreCase); - } - [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "Currently GoogleAI always returns zero tokens.")] [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] diff --git a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs b/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs index b6b2e2a6c02a..6b932727f4a6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs @@ -20,18 +20,16 @@ public abstract class TestsBase(ITestOutputHelper output) protected ITestOutputHelper Output { get; } = output; - protected IChatCompletionService GetChatService(ServiceType serviceType, bool isBeta = false) => serviceType switch + protected IChatCompletionService GetChatService(ServiceType serviceType) => serviceType switch { ServiceType.GoogleAI => new GoogleAIGeminiChatCompletionService( this.GoogleAIGetGeminiModel(), - this.GoogleAIGetApiKey(), - isBeta ? GoogleAIVersion.V1_Beta : GoogleAIVersion.V1), + this.GoogleAIGetApiKey()), ServiceType.VertexAI => new VertexAIGeminiChatCompletionService( modelId: this.VertexAIGetGeminiModel(), bearerKey: this.VertexAIGetBearerKey(), location: this.VertexAIGetLocation(), - projectId: this.VertexAIGetProjectId(), - isBeta ? VertexAIVersion.V1_Beta : VertexAIVersion.V1), + projectId: this.VertexAIGetProjectId()), _ => throw new ArgumentOutOfRangeException(nameof(serviceType), serviceType, null) }; @@ -71,10 +69,10 @@ public enum ServiceType private string GoogleAIGetGeminiVisionModel() => this._configuration.GetSection("GoogleAI:Gemini:VisionModelId").Get()!; private string GoogleAIGetEmbeddingModel() => this._configuration.GetSection("GoogleAI:EmbeddingModelId").Get()!; private string GoogleAIGetApiKey() => this._configuration.GetSection("GoogleAI:ApiKey").Get()!; - internal string VertexAIGetGeminiModel() => this._configuration.GetSection("VertexAI:Gemini:ModelId").Get()!; + private string VertexAIGetGeminiModel() => this._configuration.GetSection("VertexAI:Gemini:ModelId").Get()!; private string VertexAIGetGeminiVisionModel() => this._configuration.GetSection("VertexAI:Gemini:VisionModelId").Get()!; private string VertexAIGetEmbeddingModel() => this._configuration.GetSection("VertexAI:EmbeddingModelId").Get()!; - internal string VertexAIGetBearerKey() => this._configuration.GetSection("VertexAI:BearerKey").Get()!; - internal string VertexAIGetLocation() => this._configuration.GetSection("VertexAI:Location").Get()!; - internal string VertexAIGetProjectId() => this._configuration.GetSection("VertexAI:ProjectId").Get()!; + private string VertexAIGetBearerKey() => this._configuration.GetSection("VertexAI:BearerKey").Get()!; + private string VertexAIGetLocation() => this._configuration.GetSection("VertexAI:Location").Get()!; + private string VertexAIGetProjectId() => this._configuration.GetSection("VertexAI:ProjectId").Get()!; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index a80519c85a57..58f3492074a6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -46,24 +46,6 @@ public async Task CollectionExistsReturnsCollectionStateAsync(bool createCollect } } - [Fact] - public async Task CanCreateCollectionWithSpecialCharactersInNameAsync() - { - // Arrange - var sut = fixture.GetCollection>("Special-Char"); - - try - { - // Act - await sut.CreateCollectionAsync(); - } - finally - { - // Cleanup - await sut.DeleteCollectionAsync(); - } - } - [Fact] public async Task CollectionCanUpsertAndGetAsync() { @@ -102,10 +84,8 @@ public async Task CollectionCanUpsertAndGetAsync() Assert.Equal("tag1", fetchedHotel1!.Tags![0]); Assert.Equal("tag2", fetchedHotel1!.Tags![1]); Assert.Null(fetchedHotel1!.ListInts); - - // Since these values are updated in the database, they will not match existly, but should be very close to each other. - Assert.True(TruncateMilliseconds(fetchedHotel1.CreatedAt) >= TruncateMilliseconds(writtenHotel1.CreatedAt) && TruncateMilliseconds(fetchedHotel1.CreatedAt) <= TruncateMilliseconds(writtenHotel1.CreatedAt).AddSeconds(1)); - Assert.True(TruncateMilliseconds(fetchedHotel1.UpdatedAt) >= TruncateMilliseconds(writtenHotel1.UpdatedAt) && TruncateMilliseconds(fetchedHotel1.UpdatedAt) <= TruncateMilliseconds(writtenHotel1.UpdatedAt).AddSeconds(1)); + Assert.Equal(TruncateMilliseconds(fetchedHotel1.CreatedAt), TruncateMilliseconds(writtenHotel1.CreatedAt)); + Assert.Equal(TruncateMilliseconds(fetchedHotel1.UpdatedAt), TruncateMilliseconds(writtenHotel1.UpdatedAt)); Assert.NotNull(fetchedHotel2); Assert.Equal(2, fetchedHotel2!.HotelId); @@ -119,10 +99,8 @@ public async Task CollectionCanUpsertAndGetAsync() Assert.Equal(2, fetchedHotel2!.ListInts!.Count); Assert.Equal(1, fetchedHotel2!.ListInts![0]); Assert.Equal(2, fetchedHotel2!.ListInts![1]); - - // Since these values are updated in the database, they will not match existly, but should be very close to each other. - Assert.True(TruncateMilliseconds(fetchedHotel2.CreatedAt) >= TruncateMilliseconds(writtenHotel2.CreatedAt) && TruncateMilliseconds(fetchedHotel2.CreatedAt) <= TruncateMilliseconds(writtenHotel2.CreatedAt).AddSeconds(1)); - Assert.True(TruncateMilliseconds(fetchedHotel2.UpdatedAt) >= TruncateMilliseconds(writtenHotel2.UpdatedAt) && TruncateMilliseconds(fetchedHotel2.UpdatedAt) <= TruncateMilliseconds(writtenHotel2.UpdatedAt).AddSeconds(1)); + Assert.Equal(TruncateMilliseconds(fetchedHotel2.CreatedAt), TruncateMilliseconds(writtenHotel2.CreatedAt)); + Assert.Equal(TruncateMilliseconds(fetchedHotel2.UpdatedAt), TruncateMilliseconds(writtenHotel2.UpdatedAt)); } finally { diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 06b2e839116b..e24215b583d6 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -74,7 +74,6 @@ - @@ -196,10 +195,4 @@ Always - - - - Always - - \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json b/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json deleted file mode 100644 index fa5e4f688efc..000000000000 --- a/dotnet/src/IntegrationTests/Resources/gemini_cached_content.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "model": "projects/{{project}}/locations/{{location}}/publishers/google/models/{{model}}", - "displayName": "CACHE_DISPLAY_NAME", - "contents": [ - { - "role": "assistant", - "parts": [ - { - "text": "This is sample text to demonstrate explicit caching." - } - ] - }, - { - "role": "user", - "parts": [ - { - "text": "The old lighthouse keeper, Silas, squinted at the churning grey sea, his weathered face mirroring the granite rocks below. He’d seen countless storms, each one a furious dance of wind and wave, but tonight felt different, a simmering unease prickling his skin. The lantern, his steadfast companion, pulsed its rhythmic beam, a fragile defiance against the encroaching darkness. A small boat, barely visible through the swirling mist, was bucking against the tide, its lone mast a broken finger pointing towards the sky. Silas grabbed his oilskins, his movements stiff with age, and descended the winding stairs, his heart thumping a frantic rhythm against his ribs. He knew the sea’s capriciousness, its ability to lull and then lash out with brutal force." - } - ] - } - ] -} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs deleted file mode 100644 index 19476f4d72b4..000000000000 --- a/dotnet/src/IntegrationTests/TestSettings/BedrockAgentConfiguration.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -namespace SemanticKernel.IntegrationTests.TestSettings; - -[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", - Justification = "Configuration classes are instantiated through IConfiguration.")] -internal sealed class BedrockAgentConfiguration(string agentResourceRoleArn, string foundationModel) -{ - public string AgentResourceRoleArn { get; set; } = agentResourceRoleArn; - public string FoundationModel { get; set; } = foundationModel; -} diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json index 5dead0d1a7c5..22c91e9affcc 100644 --- a/dotnet/src/IntegrationTests/testsettings.json +++ b/dotnet/src/IntegrationTests/testsettings.json @@ -116,9 +116,5 @@ "ModelId": "gpt-4", "ApiKey": "" } - }, - "BedrockAgent": { - "AgentResourceRoleArn": "", - "FoundationModel": "anthropic.claude-3-haiku-20240307-v1:0" } } \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/agents/AgentUtilities.props b/dotnet/src/InternalUtilities/agents/AgentUtilities.props deleted file mode 100644 index 225ce5a2b745..000000000000 --- a/dotnet/src/InternalUtilities/agents/AgentUtilities.props +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs b/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs deleted file mode 100644 index 43a879d3dc10..000000000000 --- a/dotnet/src/InternalUtilities/agents/Extensions/KernelFunctionMetadataExtensions.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Agents; - -internal static class KernelFunctionMetadataExtensions -{ - /// - /// Transform the function parameter metadata into a binary parameter spec. - /// - /// The function meta-data - /// The parameter spec as - internal static BinaryData CreateParameterSpec(this KernelFunctionMetadata metadata) - { - JsonSchemaFunctionParameters parameterSpec = new(); - List required = new(metadata.Parameters.Count); - - foreach (var parameter in metadata.Parameters) - { - if (parameter.IsRequired) - { - parameterSpec.Required.Add(parameter.Name); - } - - if (parameter.Schema is null) - { - throw new KernelException($"Unsupported function parameter: {metadata.PluginName ?? "*"}.{metadata.Name}.{parameter.Name}"); - } - - parameterSpec.Properties.Add(parameter.Name, parameter.Schema); - } - - return BinaryData.FromObjectAsJson(parameterSpec); - } -} diff --git a/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props deleted file mode 100644 index 323196e5564b..000000000000 --- a/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs deleted file mode 100644 index 8f412aa9e930..000000000000 --- a/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; - -/// -/// Generic action pipeline policy for processing messages. -/// -[ExcludeFromCodeCoverage] -internal sealed class GenericActionPipelinePolicy : HttpPipelinePolicy -{ - private readonly Action _processMessageAction; - - internal GenericActionPipelinePolicy(Action processMessageAction) - { - this._processMessageAction = processMessageAction; - } - - public override void Process(HttpMessage message, ReadOnlyMemory pipeline) - { - this._processMessageAction(message); - } - - public override ValueTask ProcessAsync(HttpMessage message, ReadOnlyMemory pipeline) - { - this._processMessageAction(message); - return new ValueTask(Task.CompletedTask); // .NET STD 2.0 compatibility - } -} diff --git a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs index a9f3a79874ef..5fe03ebb925d 100644 --- a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs +++ b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs @@ -5,7 +5,6 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; -using System.Text.Encodings.Web; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -119,7 +118,6 @@ public FunctionCallsProcessor(ILogger? logger = null) /// Processes AI function calls by iterating over the function calls, invoking them and adding the results to the chat history. /// /// The chat message content representing AI model response and containing function calls. - /// The prompt execution settings. /// The chat history to add function invocation results to. /// AI model function(s) call request sequence index. /// Callback to check if a function was advertised to AI model or not. @@ -130,7 +128,6 @@ public FunctionCallsProcessor(ILogger? logger = null) /// Last chat history message if function invocation filter requested processing termination, otherwise null. public async Task ProcessFunctionCallsAsync( ChatMessageContent chatMessageContent, - PromptExecutionSettings? executionSettings, ChatHistory chatHistory, int requestIndex, Func checkIfFunctionAdvertised, @@ -179,8 +176,7 @@ public FunctionCallsProcessor(ILogger? logger = null) FunctionCount = functionCalls.Length, CancellationToken = cancellationToken, IsStreaming = isStreaming, - ToolCallId = functionCall.Id, - ExecutionSettings = executionSettings + ToolCallId = functionCall.Id }; s_inflightAutoInvokes.Value++; @@ -494,17 +490,6 @@ public static string ProcessFunctionResult(object functionResult) return chatMessageContent.ToString(); } - return JsonSerializer.Serialize(functionResult, s_functionResultSerializerOptions); + return JsonSerializer.Serialize(functionResult); } - - /// - /// The which will be used in . - /// - /// - /// is very likely to escape characters and generates LLM unfriendly results by default. - /// - private static readonly JsonSerializerOptions s_functionResultSerializerOptions = new() - { - Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, - }; } diff --git a/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs b/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs index fead79cde844..ab74689a33db 100644 --- a/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs +++ b/dotnet/src/InternalUtilities/process/Abstractions/StepExtensions.cs @@ -101,14 +101,9 @@ public static void InitializeUserState(this KernelProcessStepState stateObject, /// The source channel to evaluate /// A dictionary of KernelFunction instances. /// An instance of . - /// An instance of /// /// - public static Dictionary?> FindInputChannels( - this IKernelProcessMessageChannel channel, - Dictionary functions, - ILogger? logger, - IExternalKernelProcessMessageChannel? externalMessageChannel = null) + public static Dictionary?> FindInputChannels(this IKernelProcessMessageChannel channel, Dictionary functions, ILogger? logger) { if (functions is null) { @@ -131,7 +126,7 @@ public static void InitializeUserState(this KernelProcessStepState stateObject, // and are instantiated here. if (param.ParameterType == typeof(KernelProcessStepContext)) { - inputs[kvp.Key]![param.Name] = new KernelProcessStepContext(channel, externalMessageChannel); + inputs[kvp.Key]![param.Name] = new KernelProcessStepContext(channel); } else { diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs index 7c9ee6a3c654..989005333946 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. +using System.ClientModel; using System.Collections.ObjectModel; using System.Diagnostics; -using Azure.AI.Projects; +using Azure.Identity; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using OpenAI.Assistants; @@ -13,41 +13,40 @@ using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage; /// -/// Base class for samples that demonstrate the usage of host agents -/// based on API's such as Open AI Assistants or Azure AI Agents. +/// Base class for samples that demonstrate the usage of agents. /// -public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseAgentsTest(output) +public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) { /// /// Metadata key to indicate the assistant as created for a sample. /// - protected const string SampleMetadataKey = "sksample"; + protected const string AssistantSampleMetadataKey = "sksample"; /// - /// Metadata to indicate the object was created for a sample. + /// Metadata to indicate the assistant as created for a sample. /// /// - /// While the samples do attempt delete the objects it creates, it is possible - /// that some may remain. This metadata can be used to identify and sample - /// objects for manual clean-up. + /// While the samples do attempt delete the assistants it creates, it is possible + /// that some assistants may remain. This metadata can be used to identify and sample + /// agents for clean-up. /// - protected static readonly ReadOnlyDictionary SampleMetadata = + protected static readonly ReadOnlyDictionary AssistantSampleMetadata = new(new Dictionary { - { SampleMetadataKey, bool.TrueString } + { AssistantSampleMetadataKey, bool.TrueString } }); /// - /// Gets the root client for the service. + /// Provide a according to the configuration settings. /// - protected abstract TClient Client { get; } -} + protected OpenAIClientProvider GetClientProvider() + => + this.UseOpenAIConfig ? + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : + !string.IsNullOrWhiteSpace(this.ApiKey) ? + OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); -/// -/// Base class for samples that demonstrate the usage of agents. -/// -public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) -{ /// /// Common method to write formatted agent chat content to the console. /// @@ -92,17 +91,13 @@ protected void WriteAgentChatMessage(ChatMessageContent message) { WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount); } - else if (usage is RunStepCompletionUsage agentUsage) - { - WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens); - } else if (usage is ChatTokenUsage chatUsage) { WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount); } } - void WriteUsage(long totalTokens, long inputTokens, long outputTokens) + void WriteUsage(int totalTokens, int inputTokens, int outputTokens) { Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}"); } diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs deleted file mode 100644 index 504194becde9..000000000000 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAssistantTest.cs +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ClientModel; -using System.Diagnostics; -using Azure.Identity; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.OpenAI; -using OpenAI; -using OpenAI.Assistants; -using OpenAI.Files; - -/// -/// Base class for samples that demonstrate the usage of . -/// -public abstract class BaseAssistantTest : BaseAgentsTest -{ - protected BaseAssistantTest(ITestOutputHelper output) : base(output) - { - this.Client = - this.UseOpenAIConfig ? - OpenAIAssistantAgent.CreateOpenAIClient(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : - !string.IsNullOrWhiteSpace(this.ApiKey) ? - OpenAIAssistantAgent.CreateAzureOpenAIClient(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : - OpenAIAssistantAgent.CreateAzureOpenAIClient(new AzureCliCredential(), new Uri(this.Endpoint!)); - - this.AssistantClient = this.Client.GetAssistantClient(); - } - - /// - protected override OpenAIClient Client { get; } - - /// - /// Gets the the . - /// - protected AssistantClient AssistantClient { get; } - - protected async Task DownloadResponseContentAsync(ChatMessageContent message) - { - OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient(); - - foreach (KernelContent item in message.Items) - { - if (item is AnnotationContent annotation) - { - await this.DownloadFileContentAsync(fileClient, annotation.FileId!); - } - } - } - - protected async Task DownloadResponseImageAsync(ChatMessageContent message) - { - OpenAIFileClient fileClient = this.Client.GetOpenAIFileClient(); - - foreach (KernelContent item in message.Items) - { - if (item is FileReferenceContent fileReference) - { - await this.DownloadFileContentAsync(fileClient, fileReference.FileId, launchViewer: true); - } - } - } - - private async Task DownloadFileContentAsync(OpenAIFileClient fileClient, string fileId, bool launchViewer = false) - { - OpenAIFile fileInfo = fileClient.GetFile(fileId); - if (fileInfo.Purpose == FilePurpose.AssistantsOutput) - { - string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); - if (launchViewer) - { - filePath = Path.ChangeExtension(filePath, ".png"); - } - - BinaryData content = await fileClient.DownloadFileAsync(fileId); - File.WriteAllBytes(filePath, content.ToArray()); - Console.WriteLine($" File #{fileId} saved to: {filePath}"); - - if (launchViewer) - { - Process.Start( - new ProcessStartInfo - { - FileName = "cmd.exe", - Arguments = $"/C start {filePath}" - }); - } - } - } -} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs deleted file mode 100644 index a36932db1f38..000000000000 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureAgentTest.cs +++ /dev/null @@ -1,173 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ClientModel; -using System.Collections.ObjectModel; -using System.Diagnostics; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.AzureAI; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Microsoft.SemanticKernel.ChatCompletion; -using OpenAI.Assistants; -using OpenAI.Files; - -using ChatTokenUsage = OpenAI.Chat.ChatTokenUsage; - -/// -/// Base class for samples that demonstrate the usage of agents. -/// -public abstract class BaseAzureTest(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) -{ - /// - /// Metadata key to indicate the assistant as created for a sample. - /// - protected const string AssistantSampleMetadataKey = "sksample"; - - protected override bool ForceOpenAI => false; - - /// - /// Metadata to indicate the object was created for a sample. - /// - /// - /// While the samples do attempt delete the objects it creates, it is possible - /// that some may remain. This metadata can be used to identify and sample - /// objects for manual clean-up. - /// - protected static readonly ReadOnlyDictionary SampleMetadata = - new(new Dictionary - { - { AssistantSampleMetadataKey, bool.TrueString } - }); - - /// - /// Provide a according to the configuration settings. - /// - protected AzureAIClientProvider GetAzureProvider() - { - return AzureAIClientProvider.FromConnectionString(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential()); - } - - /// - /// Provide a according to the configuration settings. - /// - protected OpenAIClientProvider GetClientProvider() - { - return - this.UseOpenAIConfig ? - OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : - !string.IsNullOrWhiteSpace(this.ApiKey) ? - OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : - OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); - } - - /// - /// Common method to write formatted agent chat content to the console. - /// - protected void WriteAgentChatMessage(ChatMessageContent message) - { - // Include ChatMessageContent.AuthorName in output, if present. - string authorExpression = message.Role == AuthorRole.User ? string.Empty : $" - {message.AuthorName ?? "*"}"; - // Include TextContent (via ChatMessageContent.Content), if present. - string contentExpression = string.IsNullOrWhiteSpace(message.Content) ? string.Empty : message.Content; - bool isCode = message.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false; - string codeMarker = isCode ? "\n [CODE]\n" : " "; - Console.WriteLine($"\n# {message.Role}{authorExpression}:{codeMarker}{contentExpression}"); - - // Provide visibility for inner content (that isn't TextContent). - foreach (KernelContent item in message.Items) - { - if (item is AnnotationContent annotation) - { - Console.WriteLine($" [{item.GetType().Name}] {annotation.Quote}: File #{annotation.FileId}"); - } - else if (item is FileReferenceContent fileReference) - { - Console.WriteLine($" [{item.GetType().Name}] File #{fileReference.FileId}"); - } - else if (item is ImageContent image) - { - Console.WriteLine($" [{item.GetType().Name}] {image.Uri?.ToString() ?? image.DataUri ?? $"{image.Data?.Length} bytes"}"); - } - else if (item is FunctionCallContent functionCall) - { - Console.WriteLine($" [{item.GetType().Name}] {functionCall.Id}"); - } - else if (item is FunctionResultContent functionResult) - { - Console.WriteLine($" [{item.GetType().Name}] {functionResult.CallId} - {functionResult.Result?.AsJson() ?? "*"}"); - } - } - - if (message.Metadata?.TryGetValue("Usage", out object? usage) ?? false) - { - if (usage is RunStepTokenUsage assistantUsage) - { - WriteUsage(assistantUsage.TotalTokenCount, assistantUsage.InputTokenCount, assistantUsage.OutputTokenCount); - } - else if (usage is RunStepCompletionUsage agentUsage) - { - WriteUsage(agentUsage.TotalTokens, agentUsage.PromptTokens, agentUsage.CompletionTokens); - } - else if (usage is ChatTokenUsage chatUsage) - { - WriteUsage(chatUsage.TotalTokenCount, chatUsage.InputTokenCount, chatUsage.OutputTokenCount); - } - } - - void WriteUsage(long totalTokens, long inputTokens, long outputTokens) - { - Console.WriteLine($" [Usage] Tokens: {totalTokens}, Input: {inputTokens}, Output: {outputTokens}"); - } - } - - protected async Task DownloadResponseContentAsync(OpenAIFileClient client, ChatMessageContent message) - { - foreach (KernelContent item in message.Items) - { - if (item is AnnotationContent annotation) - { - await this.DownloadFileContentAsync(client, annotation.FileId!); - } - } - } - - protected async Task DownloadResponseImageAsync(OpenAIFileClient client, ChatMessageContent message) - { - foreach (KernelContent item in message.Items) - { - if (item is FileReferenceContent fileReference) - { - await this.DownloadFileContentAsync(client, fileReference.FileId, launchViewer: true); - } - } - } - - private async Task DownloadFileContentAsync(OpenAIFileClient client, string fileId, bool launchViewer = false) - { - OpenAIFile fileInfo = client.GetFile(fileId); - if (fileInfo.Purpose == FilePurpose.AssistantsOutput) - { - string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); - if (launchViewer) - { - filePath = Path.ChangeExtension(filePath, ".png"); - } - - BinaryData content = await client.DownloadFileAsync(fileId); - File.WriteAllBytes(filePath, content.ToArray()); - Console.WriteLine($" File #{fileId} saved to: {filePath}"); - - if (launchViewer) - { - Process.Start( - new ProcessStartInfo - { - FileName = "cmd.exe", - Arguments = $"/C start {filePath}" - }); - } - } - } -} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs deleted file mode 100644 index e0c937870e54..000000000000 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAzureTest.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.AzureAI; - -/// -/// Base class for samples that demonstrate the usage of . -/// -public abstract class BaseAzureAgentTest : BaseAgentsTest -{ - protected BaseAzureAgentTest(ITestOutputHelper output) : base(output) - { - this.Client = AzureAIAgent.CreateAzureAIClient(TestConfiguration.AzureAI.ConnectionString, new AzureCliCredential()); - this.AgentsClient = this.Client.GetAgentsClient(); - } - - /// - protected override AIProjectClient Client { get; } - - /// - /// Gets the . - /// - protected AgentsClient AgentsClient { get; } - - protected async Task DownloadContentAsync(ChatMessageContent message) - { - foreach (KernelContent item in message.Items) - { - if (item is AnnotationContent annotation) - { - await this.DownloadFileAsync(annotation.FileId!); - } - } - } - - protected async Task DownloadFileAsync(string fileId, bool launchViewer = false) - { - AgentFile fileInfo = this.AgentsClient.GetFile(fileId); - if (fileInfo.Purpose == AgentFilePurpose.AgentsOutput) - { - string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); - if (launchViewer) - { - filePath = Path.ChangeExtension(filePath, ".png"); - } - - BinaryData content = await this.AgentsClient.GetFileContentAsync(fileId); - File.WriteAllBytes(filePath, content.ToArray()); - Console.WriteLine($" File #{fileId} saved to: {filePath}"); - - if (launchViewer) - { - Process.Start( - new ProcessStartInfo - { - FileName = "cmd.exe", - Arguments = $"/C start {filePath}" - }); - } - } - } -} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs deleted file mode 100644 index 0a41c9c5778c..000000000000 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseBedrockAgentTest.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Amazon.BedrockAgent; -using Amazon.BedrockAgent.Model; -using Microsoft.SemanticKernel.Agents.Bedrock; - -/// -/// Base class for samples that demonstrate the usage of AWS Bedrock agents. -/// -public abstract class BaseBedrockAgentTest : BaseTest -{ - protected const string AgentDescription = "A helpful assistant who helps users find information."; - protected const string AgentInstruction = "You're a helpful assistant who helps users find information."; - protected readonly AmazonBedrockAgentClient Client; - - protected BaseBedrockAgentTest(ITestOutputHelper output) : base(output, redirectSystemConsoleOutput: true) - { - Client = new AmazonBedrockAgentClient(); - } - - protected CreateAgentRequest GetCreateAgentRequest(string agentName) => new() - { - AgentName = agentName, - Description = AgentDescription, - Instruction = AgentInstruction, - AgentResourceRoleArn = TestConfiguration.BedrockAgent.AgentResourceRoleArn, - FoundationModel = TestConfiguration.BedrockAgent.FoundationModel, - }; - - protected override void Dispose(bool disposing) - { - Client?.Dispose(); - base.Dispose(disposing); - } - - /// - /// Override this method to create an agent with desired settings. - /// - /// The name of the agent to create. Must be unique. - protected abstract Task CreateAgentAsync(string agentName); -} diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs index 78816c97e2e2..03c09e63551b 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs @@ -141,33 +141,6 @@ protected void OutputLastMessage(ChatHistory chatHistory) Console.WriteLine("------------------------"); } - /// - /// Outputs out the stream of generated message tokens. - /// - protected async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) - { - bool roleWritten = false; - string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) - { - if (!roleWritten && chatUpdate.Role.HasValue) - { - Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); - roleWritten = true; - } - - if (chatUpdate.Content is { Length: > 0 }) - { - fullMessage += chatUpdate.Content; - Console.Write(chatUpdate.Content); - } - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); - } - /// /// Utility method to write a horizontal rule to the console. /// diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs index e45f52216a14..5e9e0c925660 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs @@ -24,7 +24,6 @@ public static void Initialize(IConfigurationRoot configRoot) public static OnnxConfig Onnx => LoadSection(); public static AzureOpenAIConfig AzureOpenAI => LoadSection(); public static AzureAIInferenceConfig AzureAIInference => LoadSection(); - public static AzureAIConfig AzureAI => LoadSection(); public static AzureOpenAIConfig AzureOpenAIImages => LoadSection(); public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); public static AzureAISearchConfig AzureAISearch => LoadSection(); @@ -48,9 +47,6 @@ public static void Initialize(IConfigurationRoot configRoot) public static GoogleAIConfig GoogleAI => LoadSection(); public static VertexAIConfig VertexAI => LoadSection(); public static AzureCosmosDbMongoDbConfig AzureCosmosDbMongoDb => LoadSection(); - public static ApplicationInsightsConfig ApplicationInsights => LoadSection(); - public static CrewAIConfig CrewAI => LoadSection(); - public static BedrockAgentConfig BedrockAgent => LoadSection(); private static T LoadSection([CallerMemberName] string? caller = null) { @@ -95,12 +91,6 @@ public class OnnxConfig public string EmbeddingVocabPath { get; set; } } - public class AzureAIConfig - { - public string ConnectionString { get; set; } - public string ChatModelId { get; set; } - } - public class AzureOpenAIConfig { public string ServiceId { get; set; } @@ -266,11 +256,6 @@ public class AzureCosmosDbMongoDbConfig public string DatabaseName { get; set; } } - public class ApplicationInsightsConfig - { - public string ConnectionString { get; set; } - } - /// /// Graph API connector configuration model. /// @@ -317,16 +302,4 @@ public MsGraphConfiguration( this.RedirectUri = redirectUri; } } - - public class CrewAIConfig - { - public string Endpoint { get; set; } - public string AuthToken { get; set; } - } - - public class BedrockAgentConfig - { - public string AgentResourceRoleArn { get; set; } - public string FoundationModel { get; set; } - } } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs index 14d29749e36e..d5b36387b305 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ActivityExtensions.cs @@ -4,9 +4,6 @@ using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; namespace Microsoft.SemanticKernel.Diagnostics; @@ -27,8 +24,7 @@ public static Activity SetTags(this Activity activity, ReadOnlySpan RunWithActivityAsync( - Func getActivity, - Func> operation, - [EnumeratorCancellation] CancellationToken cancellationToken) - { - using var activity = getActivity(); - - ConfiguredCancelableAsyncEnumerable result; - - try - { - result = operation().WithCancellation(cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - var resultEnumerator = result.ConfigureAwait(false).GetAsyncEnumerator(); - - try - { - while (true) - { - try - { - if (!await resultEnumerator.MoveNextAsync()) - { - break; - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - yield return resultEnumerator.Current; - } - } - finally - { - await resultEnumerator.DisposeAsync(); - } - } } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs index af2f4611759e..76e9d130ac3a 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs @@ -61,38 +61,6 @@ internal static class ModelDiagnostics ) where TPromptExecutionSettings : PromptExecutionSettings => StartCompletionActivity(endpoint, modelName, modelProvider, chatHistory, executionSettings, ToOpenAIFormat); - /// - /// Start an agent invocation activity and return the activity. - /// - internal static Activity? StartAgentInvocationActivity( - string agentId, - string agentName, - string? agentDescription) - { - if (!IsModelDiagnosticsEnabled()) - { - return null; - } - - const string OperationName = "invoke_agent"; - - var activity = s_activitySource.StartActivityWithTags( - $"{OperationName} {agentName}", - [ - new(ModelDiagnosticsTags.Operation, OperationName), - new(ModelDiagnosticsTags.AgentId, agentId), - new(ModelDiagnosticsTags.AgentName, agentName) - ], - ActivityKind.Internal); - - if (!string.IsNullOrWhiteSpace(agentDescription)) - { - activity?.SetTag(ModelDiagnosticsTags.AgentDescription, agentDescription); - } - - return activity; - } - /// /// Set the text completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. @@ -472,9 +440,6 @@ private static class ModelDiagnosticsTags public const string Completion = "gen_ai.content.completion"; public const string Address = "server.address"; public const string Port = "server.port"; - public const string AgentId = "gen_ai.agent.id"; - public const string AgentName = "gen_ai.agent.name"; - public const string AgentDescription = "gen_ai.agent.description"; // Activity events public const string PromptEvent = "gen_ai.content.prompt"; diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs deleted file mode 100644 index f49fa4ddce0d..000000000000 --- a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseClientTests.cs +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Plugins.AI.CrewAI; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Plugins.AI.UnitTests.CrewAI; - -/// -/// Tests for the class. -/// -public sealed partial class CrewAIEnterpriseClientTests -{ - private readonly Mock _httpMessageHandlerMock; - private readonly CrewAIEnterpriseClient _client; - - /// - /// Initializes a new instance of the class. - /// - public CrewAIEnterpriseClientTests() - { - this._httpMessageHandlerMock = new Mock(); - using var httpClientFactory = new MockHttpClientFactory(this._httpMessageHandlerMock); - this._client = new CrewAIEnterpriseClient( - endpoint: new Uri("http://example.com"), - authTokenProvider: () => Task.FromResult("token"), - httpClientFactory); - } - - /// - /// Tests that returns the required inputs from the CrewAI API. - /// - /// - [Fact] - public async Task GetInputsAsyncReturnsCrewAIRequiredInputsAsync() - { - // Arrange - var responseContent = "{\"inputs\": [\"input1\", \"input2\"]}"; - using var responseMessage = new HttpResponseMessage - { - StatusCode = HttpStatusCode.OK, - Content = new StringContent(responseContent) - }; - - this._httpMessageHandlerMock.Protected() - .Setup>( - "SendAsync", - ItExpr.IsAny(), - ItExpr.IsAny()) - .ReturnsAsync(responseMessage); - - // Act - var result = await this._client.GetInputsAsync(); - - // Assert - Assert.NotNull(result); - Assert.Equal(2, result.Inputs.Count); - Assert.Contains("input1", result.Inputs); - Assert.Contains("input2", result.Inputs); - } - - /// - /// Tests that returns the kickoff id from the CrewAI API. - /// - /// - [Fact] - public async Task KickoffAsyncReturnsCrewAIKickoffResponseAsync() - { - // Arrange - var responseContent = "{\"kickoff_id\": \"12345\"}"; - using var responseMessage = new HttpResponseMessage - { - StatusCode = HttpStatusCode.OK, - Content = new StringContent(responseContent) - }; - - this._httpMessageHandlerMock.Protected() - .Setup>( - "SendAsync", - ItExpr.IsAny(), - ItExpr.IsAny()) - .ReturnsAsync(responseMessage); - - // Act - var result = await this._client.KickoffAsync(new { key = "value" }); - - // Assert - Assert.NotNull(result); - Assert.Equal("12345", result.KickoffId); - } - - /// - /// Tests that returns the status of the CrewAI Crew. - /// - /// - /// - /// - [Theory] - [InlineData(CrewAIKickoffState.Pending)] - [InlineData(CrewAIKickoffState.Started)] - [InlineData(CrewAIKickoffState.Running)] - [InlineData(CrewAIKickoffState.Success)] - [InlineData(CrewAIKickoffState.Failed)] - [InlineData(CrewAIKickoffState.Failure)] - [InlineData(CrewAIKickoffState.NotFound)] - public async Task GetStatusAsyncReturnsCrewAIStatusResponseAsync(CrewAIKickoffState state) - { - var crewAIStatusState = state switch - { - CrewAIKickoffState.Pending => "PENDING", - CrewAIKickoffState.Started => "STARTED", - CrewAIKickoffState.Running => "RUNNING", - CrewAIKickoffState.Success => "SUCCESS", - CrewAIKickoffState.Failed => "FAILED", - CrewAIKickoffState.Failure => "FAILURE", - CrewAIKickoffState.NotFound => "NOT FOUND", - _ => throw new ArgumentOutOfRangeException(nameof(state), state, null) - }; - - // Arrange - var responseContent = $"{{\"state\": \"{crewAIStatusState}\", \"result\": \"The Result\", \"last_step\": {{\"step1\": \"value1\"}}}}"; - using var responseMessage = new HttpResponseMessage - { - StatusCode = HttpStatusCode.OK, - Content = new StringContent(responseContent) - }; - - this._httpMessageHandlerMock.Protected() - .Setup>( - "SendAsync", - ItExpr.IsAny(), - ItExpr.IsAny()) - .ReturnsAsync(responseMessage); - - // Act - var result = await this._client.GetStatusAsync("12345"); - - // Assert - Assert.NotNull(result); - Assert.Equal(state, result.State); - Assert.Equal("The Result", result.Result); - Assert.NotNull(result.LastStep); - Assert.Equal("value1", result.LastStep["step1"].ToString()); - } -} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs deleted file mode 100644 index 635e8f63700a..000000000000 --- a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/CrewAIEnterpriseTests.cs +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.AI.CrewAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Plugins.UnitTests.AI.CrewAI; - -/// -/// Unit tests for the class. -/// -public sealed class CrewAIEnterpriseTests -{ - private readonly Mock _mockClient; - private readonly CrewAIEnterprise _crewAIEnterprise; - - /// - /// Initializes a new instance of the class. - /// - public CrewAIEnterpriseTests() - { - this._mockClient = new Mock(MockBehavior.Strict); - this._crewAIEnterprise = new CrewAIEnterprise(this._mockClient.Object, NullLoggerFactory.Instance); - } - - /// - /// Tests the successful kickoff of a CrewAI task. - /// - [Fact] - public async Task KickoffAsyncSuccessAsync() - { - // Arrange - var response = new CrewAIKickoffResponse { KickoffId = "12345" }; - this._mockClient.Setup(client => client.KickoffAsync(It.IsAny(), null, null, null, It.IsAny())) - .ReturnsAsync(response); - - // Act - var result = await this._crewAIEnterprise.KickoffAsync(new { }); - - // Assert - Assert.Equal("12345", result); - } - - /// - /// Tests the failure of a CrewAI task kickoff. - /// - [Fact] - public async Task KickoffAsyncFailureAsync() - { - // Arrange - this._mockClient.Setup(client => client.KickoffAsync(It.IsAny(), null, null, null, It.IsAny())) - .ThrowsAsync(new InvalidOperationException("Kickoff failed")); - - // Act & Assert - await Assert.ThrowsAsync(() => this._crewAIEnterprise.KickoffAsync(new { })); - } - - /// - /// Tests the successful retrieval of CrewAI task status. - /// - [Fact] - public async Task GetCrewStatusAsyncSuccessAsync() - { - // Arrange - var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Running }; - this._mockClient.Setup(client => client.GetStatusAsync("12345", It.IsAny())) - .ReturnsAsync(response); - - // Act - var result = await this._crewAIEnterprise.GetCrewKickoffStatusAsync("12345"); - - // Assert - Assert.Equal(CrewAIKickoffState.Running, result.State); - } - - /// - /// Tests the failure of CrewAI task status retrieval. - /// - [Fact] - public async Task GetCrewStatusAsyncFailureAsync() - { - // Arrange - this._mockClient.Setup(client => client.GetStatusAsync("12345", It.IsAny())) - .ThrowsAsync(new InvalidOperationException("Status retrieval failed")); - - // Act & Assert - await Assert.ThrowsAsync(() => this._crewAIEnterprise.GetCrewKickoffStatusAsync("12345")); - } - - /// - /// Tests the successful completion of a CrewAI task. - /// - [Fact] - public async Task WaitForCrewCompletionAsyncSuccessAsync() - { - // Arrange - var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Success, Result = "Completed" }; - this._mockClient.SetupSequence(client => client.GetStatusAsync("12345", It.IsAny())) - .ReturnsAsync(new CrewAIStatusResponse { State = CrewAIKickoffState.Running }) - .ReturnsAsync(response); - - // Act - var result = await this._crewAIEnterprise.WaitForCrewCompletionAsync("12345"); - - // Assert - Assert.Equal("Completed", result); - } - - /// - /// Tests the failure of a CrewAI task completion. - /// - [Fact] - public async Task WaitForCrewCompletionAsyncFailureAsync() - { - // Arrange - var response = new CrewAIStatusResponse { State = CrewAIKickoffState.Failed, Result = "Error" }; - this._mockClient.SetupSequence(client => client.GetStatusAsync("12345", It.IsAny())) - .ReturnsAsync(new CrewAIStatusResponse { State = CrewAIKickoffState.Running }) - .ReturnsAsync(response); - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => this._crewAIEnterprise.WaitForCrewCompletionAsync("12345")); - } - - /// - /// Tests the successful creation of a Kernel plugin. - /// - [Fact] - public void CreateKernelPluginSuccess() - { - // Arrange - var inputDefinitions = new List - { - new("input1", "description1", typeof(string)) - }; - - // Act - var plugin = this._crewAIEnterprise.CreateKernelPlugin("TestPlugin", "Test Description", inputDefinitions); - - // Assert - Assert.NotNull(plugin); - Assert.Equal("TestPlugin", plugin.Name); - } -} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs b/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs deleted file mode 100644 index fb37715e604f..000000000000 --- a/dotnet/src/Plugins/Plugins.AI.UnitTests/CrewAI/MockHttpClientFactory.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Moq; - -namespace SemanticKernel.Plugins.AI.UnitTests.CrewAI; - -/// -/// Implementation of which uses the . -/// -internal sealed class MockHttpClientFactory(Mock mockHandler) : IHttpClientFactory, IDisposable -{ - public HttpClient CreateClient(string name) - { - return new(mockHandler.Object); - } - - public void Dispose() - { - mockHandler.Object.Dispose(); - GC.SuppressFinalize(this); - } -} diff --git a/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj b/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj deleted file mode 100644 index 00d08ca13f1a..000000000000 --- a/dotnet/src/Plugins/Plugins.AI.UnitTests/Plugins.AI.UnitTests.csproj +++ /dev/null @@ -1,37 +0,0 @@ - - - - SemanticKernel.Plugins.AI.UnitTests - SemanticKernel.Plugins.AI.UnitTests - net8.0 - true - enable - disable - false - $(NoWarn);CA2007,VSTHRD111,SKEXP0001,SKEXP0050 - - - - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - - - - - diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs deleted file mode 100644 index be2822d3e85e..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIEnterpriseClient.cs +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// Internal interface used for mocking and testing. -/// -internal interface ICrewAIEnterpriseClient -{ - Task GetInputsAsync(CancellationToken cancellationToken = default); - Task KickoffAsync( - object? inputs, - string? taskWebhookUrl = null, - string? stepWebhookUrl = null, - string? crewWebhookUrl = null, - CancellationToken cancellationToken = default); - Task GetStatusAsync(string taskId, CancellationToken cancellationToken = default); -} - -/// -/// A client for interacting with the CrewAI Enterprise API. -/// -internal class CrewAIEnterpriseClient : ICrewAIEnterpriseClient -{ - private readonly Uri _endpoint; - private readonly Func> _authTokenProvider; - private readonly IHttpClientFactory? _httpClientFactory; - - public CrewAIEnterpriseClient(Uri endpoint, Func> authTokenProvider, IHttpClientFactory? clientFactory = null) - { - Verify.NotNull(endpoint, nameof(endpoint)); - Verify.NotNull(authTokenProvider, nameof(authTokenProvider)); - - this._endpoint = endpoint; - this._authTokenProvider = authTokenProvider; - this._httpClientFactory = clientFactory; - } - - /// - /// Get the inputs required for the Crew to kickoff. - /// - /// A - /// Aninstance of describing the required inputs. - /// - public async Task GetInputsAsync(CancellationToken cancellationToken = default) - { - try - { - using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); - using var requestMessage = HttpRequest.CreateGetRequest("/inputs"); - using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) - .ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) - .ConfigureAwait(false); - - var requirements = JsonSerializer.Deserialize(body); - - return requirements ?? throw new KernelException(message: $"Failed to deserialize requirements from CrewAI. Response: {body}"); - } - catch (Exception ex) when (ex is not KernelException) - { - throw new KernelException(message: "Failed to get required inputs for CrewAI Crew.", innerException: ex); - } - } - - /// - /// Kickoff the Crew. - /// - /// An object containing key value pairs matching the required inputs of the Crew. - /// The task level webhook Uri. - /// The step level webhook Uri. - /// The crew level webhook Uri. - /// A - /// A string containing the Id of the started Crew Task. - public async Task KickoffAsync( - object? inputs, - string? taskWebhookUrl = null, - string? stepWebhookUrl = null, - string? crewWebhookUrl = null, - CancellationToken cancellationToken = default) - { - try - { - var content = new - { - inputs, - taskWebhookUrl, - stepWebhookUrl, - crewWebhookUrl - }; - - using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); - using var requestMessage = HttpRequest.CreatePostRequest("/kickoff", content); - using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) - .ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) - .ConfigureAwait(false); - - var kickoffResponse = JsonSerializer.Deserialize(body); - return kickoffResponse ?? throw new KernelException(message: $"Failed to deserialize kickoff response from CrewAI. Response: {body}"); - } - catch (Exception ex) when (ex is not KernelException) - { - throw new KernelException(message: "Failed to kickoff CrewAI Crew.", innerException: ex); - } - } - - /// - /// Get the status of the Crew Task. - /// - /// The Id of the task. - /// A - /// A string containing the status or final result of the Crew task. - /// - public async Task GetStatusAsync(string taskId, CancellationToken cancellationToken = default) - { - try - { - using var client = await this.CreateHttpClientAsync().ConfigureAwait(false); - using var requestMessage = HttpRequest.CreateGetRequest($"/status/{taskId}"); - using var response = await client.SendWithSuccessCheckAsync(requestMessage, cancellationToken) - .ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken) - .ConfigureAwait(false); - - var statusResponse = JsonSerializer.Deserialize(body); - - return statusResponse ?? throw new KernelException(message: $"Failed to deserialize status response from CrewAI. Response: {body}"); - } - catch (Exception ex) when (ex is not KernelException) - { - throw new KernelException(message: "Failed to status of CrewAI Crew.", innerException: ex); - } - } - - #region Private Methods - - private async Task CreateHttpClientAsync() - { - var authToken = await this._authTokenProvider().ConfigureAwait(false); - - if (string.IsNullOrWhiteSpace(authToken)) - { - throw new KernelException(message: "Failed to get auth token for CrewAI."); - } - - var client = this._httpClientFactory?.CreateClient() ?? new(); - client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authToken}"); - client.BaseAddress = this._endpoint; - return client; - } - - #endregion -} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs deleted file mode 100644 index 93e65b166d21..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Client/CrewAIStateEnumConverter.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes -internal sealed class CrewAIStateEnumConverter : JsonConverter -{ - public override CrewAIKickoffState Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - string? stringValue = reader.GetString(); - return stringValue?.ToUpperInvariant() switch - { - "PENDING" => CrewAIKickoffState.Pending, - "STARTED" => CrewAIKickoffState.Started, - "RUNNING" => CrewAIKickoffState.Running, - "SUCCESS" => CrewAIKickoffState.Success, - "FAILED" => CrewAIKickoffState.Failed, - "FAILURE" => CrewAIKickoffState.Failure, - "NOT FOUND" => CrewAIKickoffState.NotFound, - _ => throw new KernelException("Failed to parse Crew AI kickoff state.") - }; - } - - public override void Write(Utf8JsonWriter writer, CrewAIKickoffState value, JsonSerializerOptions options) - { - string stringValue = value switch - { - CrewAIKickoffState.Pending => "PENDING", - CrewAIKickoffState.Started => "STARTED", - CrewAIKickoffState.Running => "RUNNING", - CrewAIKickoffState.Success => "SUCCESS", - CrewAIKickoffState.Failed => "FAILED", - CrewAIKickoffState.Failure => "FAILURE", - CrewAIKickoffState.NotFound => "NOT FOUND", - _ => throw new KernelException("Failed to parse Crew AI kickoff state.") - }; - writer.WriteStringValue(stringValue); - } -} -#pragma warning restore CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs deleted file mode 100644 index 615f6a14c832..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIEnterprise.cs +++ /dev/null @@ -1,282 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Linq; -using System.Net.Http; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// A plugin for interacting with the a CrewAI Crew via the Enterprise APIs. -/// -public class CrewAIEnterprise -{ - private readonly ICrewAIEnterpriseClient _crewClient; - private readonly ILogger _logger; - private readonly TimeSpan _pollingInterval; - - /// - /// The name of the kickoff function. - /// - public const string KickoffFunctionName = "KickoffCrew"; - - /// - /// The name of the kickoff and wait function. - /// - public const string KickoffAndWaitFunctionName = "KickoffAndWait"; - - /// - /// Initializes a new instance of the class. - /// - /// The base URI of the CrewAI Crew - /// Optional provider for auth token generation. - /// The HTTP client factory. - /// The logger factory. - /// Defines the delay time between status calls when pollin for a kickoff to complete. - public CrewAIEnterprise(Uri endpoint, Func> authTokenProvider, IHttpClientFactory? httpClientFactory = null, ILoggerFactory? loggerFactory = null, TimeSpan? pollingInterval = default) - { - Verify.NotNull(endpoint, nameof(endpoint)); - Verify.NotNull(authTokenProvider, nameof(authTokenProvider)); - - this._crewClient = new CrewAIEnterpriseClient(endpoint, authTokenProvider, httpClientFactory); - this._logger = loggerFactory?.CreateLogger(typeof(CrewAIEnterprise)) ?? NullLogger.Instance; - this._pollingInterval = pollingInterval ?? TimeSpan.FromSeconds(1); - } - - /// - /// Internal constructor used for testing purposes. - /// - internal CrewAIEnterprise(ICrewAIEnterpriseClient crewClient, ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(crewClient, nameof(crewClient)); - this._crewClient = crewClient; - this._logger = loggerFactory?.CreateLogger(typeof(CrewAIEnterprise)) ?? NullLogger.Instance; - } - - /// - /// Kicks off (starts) a CrewAI Crew with the given inputs and callbacks. - /// - /// An object containing key value pairs matching the required inputs of the Crew. - /// The task level webhook Uri. - /// The step level webhook Uri. - /// The crew level webhook Uri. - /// The Id of the scheduled kickoff. - /// - public async Task KickoffAsync( - object? inputs, - Uri? taskWebhookUrl = null, - Uri? stepWebhookUrl = null, - Uri? crewWebhookUrl = null) - { - try - { - CrewAIKickoffResponse kickoffTask = await this._crewClient.KickoffAsync( - inputs: inputs, - taskWebhookUrl: taskWebhookUrl?.AbsoluteUri, - stepWebhookUrl: stepWebhookUrl?.AbsoluteUri, - crewWebhookUrl: crewWebhookUrl?.AbsoluteUri) - .ConfigureAwait(false); - - this._logger.LogInformation("CrewAI Crew kicked off with Id: {KickoffId}", kickoffTask.KickoffId); - return kickoffTask.KickoffId; - } - catch (Exception ex) - { - throw new KernelException(message: "Failed to kickoff CrewAI Crew.", innerException: ex); - } - } - - /// - /// Gets the current status of the CrewAI Crew kickoff. - /// - /// The Id of the Crew kickoff. - /// A - /// " - [KernelFunction] - [Description("Gets the current status of the CrewAI Crew kickoff.")] - public async Task GetCrewKickoffStatusAsync([Description("The Id of the kickoff")] string kickoffId) - { - Verify.NotNullOrWhiteSpace(kickoffId, nameof(kickoffId)); - - try - { - CrewAIStatusResponse statusResponse = await this._crewClient.GetStatusAsync(kickoffId).ConfigureAwait(false); - - this._logger.LogInformation("CrewAI Crew status for kickoff Id: {KickoffId} is {Status}", kickoffId, statusResponse.State); - return statusResponse; - } - catch (Exception ex) - { - throw new KernelException(message: $"Failed to get status of CrewAI Crew with kickoff Id: {kickoffId}.", innerException: ex); - } - } - - /// - /// Waits for the Crew kickoff to complete and returns the result. - /// - /// The Id of the crew kickoff. - /// The result of the Crew kickoff. - /// - [KernelFunction] - [Description("Waits for the Crew kickoff to complete and returns the result.")] - public async Task WaitForCrewCompletionAsync([Description("The Id of the kickoff")] string kickoffId) - { - Verify.NotNullOrWhiteSpace(kickoffId, nameof(kickoffId)); - - try - { - CrewAIStatusResponse? statusResponse = null; - var status = CrewAIKickoffState.Pending; - do - { - this._logger.LogInformation("Waiting for CrewAI Crew with kickoff Id: {KickoffId} to complete. Current state: {Status}", kickoffId, status); - await Task.Delay(TimeSpan.FromSeconds(1)).ConfigureAwait(false); - statusResponse = await this._crewClient.GetStatusAsync(kickoffId).ConfigureAwait(false); - status = statusResponse.State; - } - while (!this.IsTerminalState(status)); - - this._logger.LogInformation("CrewAI Crew with kickoff Id: {KickoffId} completed with status: {Status}", kickoffId, statusResponse.State); - - return status switch - { - CrewAIKickoffState.Failed => throw new KernelException(message: $"CrewAI Crew failed with error: {statusResponse.Result}"), - CrewAIKickoffState.Success => statusResponse.Result ?? string.Empty, - _ => throw new KernelException(message: "Failed to parse unexpected response from CrewAI status response."), - }; - } - catch (Exception ex) - { - throw new KernelException(message: $"Failed to wait for completion of CrewAI Crew with kickoff Id: {kickoffId}.", innerException: ex); - } - } - - /// - /// Creates a that can be used to invoke the CrewAI Crew. - /// - /// The name of the - /// The description of the - /// The definitions of the Crew's required inputs. - /// The task level webhook Uri - /// The step level webhook Uri - /// The crew level webhook Uri - /// A that can invoke the Crew. - /// - public KernelPlugin CreateKernelPlugin( - string name, - string description, - IEnumerable? inputMetadata, - Uri? taskWebhookUrl = null, - Uri? stepWebhookUrl = null, - Uri? crewWebhookUrl = null) - { - var options = new KernelFunctionFromMethodOptions() - { - Parameters = inputMetadata?.Select(i => new KernelParameterMetadata(i.Name) { Description = i.Description, IsRequired = true, ParameterType = i.Type }) ?? [], - ReturnParameter = new() { ParameterType = typeof(string) }, - }; - - // Define the kernel function implementation for kickoff - [KernelFunction(KickoffFunctionName)] - [Description("kicks off the CrewAI Crew and returns the Id of the scheduled kickoff.")] - async Task KickoffAsync(KernelArguments arguments) - { - Dictionary args = BuildArguments(inputMetadata, arguments); - - return await this.KickoffAsync( - inputs: args, - taskWebhookUrl: taskWebhookUrl, - stepWebhookUrl: stepWebhookUrl, - crewWebhookUrl: crewWebhookUrl) - .ConfigureAwait(false); - } - - // Define the kernel function implementation for kickoff and wait for result - [KernelFunction(KickoffAndWaitFunctionName)] - [Description("kicks off the CrewAI Crew, waits for it to complete, and returns the result.")] - async Task KickoffAndWaitAsync(KernelArguments arguments) - { - Dictionary args = BuildArguments(inputMetadata, arguments); - - var kickoffId = await this.KickoffAsync( - inputs: args, - taskWebhookUrl: taskWebhookUrl, - stepWebhookUrl: stepWebhookUrl, - crewWebhookUrl: crewWebhookUrl) - .ConfigureAwait(false); - - return await this.WaitForCrewCompletionAsync(kickoffId).ConfigureAwait(false); - } - - return KernelPluginFactory.CreateFromFunctions( - name, - description, - [ - KernelFunctionFactory.CreateFromMethod(KickoffAsync, new(), options), - KernelFunctionFactory.CreateFromMethod(KickoffAndWaitAsync, new(), options), - KernelFunctionFactory.CreateFromMethod(this.GetCrewKickoffStatusAsync), - KernelFunctionFactory.CreateFromMethod(this.WaitForCrewCompletionAsync) - ]); - } - - #region Private Methods - - /// - /// Determines if the Crew kikoff state is terminal. - /// - /// The state of the crew kickoff - /// A indicating if the state is a terminal state. - private bool IsTerminalState(CrewAIKickoffState state) - { - return state == CrewAIKickoffState.Failed || state == CrewAIKickoffState.Failure || state == CrewAIKickoffState.Success || state == CrewAIKickoffState.NotFound; - } - - private static Dictionary BuildArguments(IEnumerable? inputMetadata, KernelArguments arguments) - { - // Extract the required arguments from the KernelArguments by name - Dictionary args = []; - if (inputMetadata is not null) - { - foreach (var input in inputMetadata) - { - // If a required argument is missing, throw an exception - if (!arguments.TryGetValue(input.Name, out object? value) || value is null || value is not string strValue) - { - throw new KernelException(message: $"Missing required input '{input.Name}' for CrewAI."); - } - - // Since this KernelFunction does not have explicit parameters all the relevant inputs are passed as strings. - // We need to convert the inputs to the expected types. - if (input.Type == typeof(string)) - { - args.Add(input.Name, value); - } - else - { - // Try to get a converter for the input type - var converter = TypeConverterFactory.GetTypeConverter(input.Type); - if (converter is not null) - { - args.Add(input.Name, converter.ConvertFrom(value)); - } - else - { - // Try to deserialize the input as a JSON object - var objValue = JsonSerializer.Deserialize(strValue, input.Type); - args.Add(input.Name, objValue); - } - } - } - } - - return args; - } - - #endregion -} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs deleted file mode 100644 index dab170ceabf5..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/CrewAIInputMetadata.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// The metadata associated with an input required by the CrewAI Crew. This metadata provides the information required to effectively describe the inputs to an LLM. -/// -/// The name of the input -/// The description of the input. This is used to help the LLM understand the correct usage of the input. -/// The of the input. -public record CrewAIInputMetadata(string Name, string Description, Type Type) -{ -} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs deleted file mode 100644 index 949aea64a800..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffResponse.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// Models the response object of a call to kickoff a CrewAI Crew. -/// -#pragma warning disable CA1812 // Avoid uninstantiated internal classes -internal sealed class CrewAIKickoffResponse -{ - [JsonPropertyName("kickoff_id")] - public string KickoffId { get; set; } = string.Empty; -} -#pragma warning restore CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs deleted file mode 100644 index 7ef9b9688928..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIKickoffState.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// Represents the state of a CrewAI Crew kickoff. -/// -public enum CrewAIKickoffState -{ - /// - /// The kickoff is pending and has not started yet. - /// - Pending, - - /// - /// The kickoff has started. - /// - Started, - - /// - /// The kickoff is currently running. - /// - Running, - - /// - /// The kickoff completed successfully. - /// - Success, - - /// - /// The kickoff failed. - /// - Failed, - - /// - /// The kickoff has failed. - /// - Failure, - - /// - /// The kickoff was not found. - /// - NotFound -} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs deleted file mode 100644 index b9154e8b334c..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIRequiredInputs.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// Represents the requirements for kicking off a CrewAI Crew. -/// -public class CrewAIRequiredInputs -{ - /// - /// The inputs required for the Crew. - /// - [JsonPropertyName("inputs")] - public IList Inputs { get; set; } = []; -} diff --git a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs b/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs deleted file mode 100644 index 5d31a2740f09..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/CrewAI/Models/CrewAIStatusResponse.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Plugins.AI.CrewAI; - -/// -/// Models the response object of a call to get the state of a CrewAI Crew kickoff. -/// -public class CrewAIStatusResponse -{ - /// - /// The current state of the CrewAI Crew kickoff. - /// - [JsonPropertyName("state")] - [JsonConverter(typeof(CrewAIStateEnumConverter))] - public CrewAIKickoffState State { get; set; } - - /// - /// The result of the CrewAI Crew kickoff. - /// - [JsonPropertyName("result")] - public string? Result { get; set; } - - /// - /// The last step of the CrewAI Crew kickoff. - /// - [JsonPropertyName("last_step")] - public Dictionary? LastStep { get; set; } -} diff --git a/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj b/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj deleted file mode 100644 index 472d0d6b3c2f..000000000000 --- a/dotnet/src/Plugins/Plugins.AI/Plugins.AI.csproj +++ /dev/null @@ -1,34 +0,0 @@ - - - - - Microsoft.SemanticKernel.Plugins.AI - $(AssemblyName) - net8.0;netstandard2.0 - alpha - - - - - - - - Semantic Kernel - AI Plugins - Semantic Kernel AI plugins. - - - - - - - - - - - - - - - - - diff --git a/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs b/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs index ce4a0d88856b..bcb274a23808 100644 --- a/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs +++ b/dotnet/src/Plugins/Plugins.Document/FileSystem/IFileSystemConnector.cs @@ -16,21 +16,21 @@ public interface IFileSystemConnector /// /// Path to the file. /// The to monitor for cancellation requests. The default is . - Task GetFileContentStreamAsync(string filePath, CancellationToken cancellationToken = default); + public Task GetFileContentStreamAsync(string filePath, CancellationToken cancellationToken = default); /// /// Get a writeable stream to an existing file. /// /// Path to file. /// The to monitor for cancellation requests. The default is . - Task GetWriteableFileStreamAsync(string filePath, CancellationToken cancellationToken = default); + public Task GetWriteableFileStreamAsync(string filePath, CancellationToken cancellationToken = default); /// /// Create a new file and get a writeable stream to it. /// /// Path to file. /// The to monitor for cancellation requests. The default is . - Task CreateFileAsync(string filePath, CancellationToken cancellationToken = default); + public Task CreateFileAsync(string filePath, CancellationToken cancellationToken = default); /// /// Determine whether a file exists at the specified path. @@ -38,5 +38,5 @@ public interface IFileSystemConnector /// Path to file. /// The to monitor for cancellation requests. The default is . /// True if file exists, false otherwise. - Task FileExistsAsync(string filePath, CancellationToken cancellationToken = default); + public Task FileExistsAsync(string filePath, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs b/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs index 586129e4b84e..82934b86cecf 100644 --- a/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs +++ b/dotnet/src/Plugins/Plugins.Document/IDocumentConnector.cs @@ -14,18 +14,18 @@ public interface IDocumentConnector /// /// Document stream /// String containing all text from the document. - string ReadText(Stream stream); + public string ReadText(Stream stream); /// /// Initialize a document from the given stream. /// /// IO stream - void Initialize(Stream stream); + public void Initialize(Stream stream); /// /// Append the specified text to the document. /// /// Document stream /// String of text to write to the document. - void AppendText(Stream stream, string text); + public void AppendText(Stream stream, string text); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs index 7ab32b31b869..2a175afb348d 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AIFunctionKernelFunction.cs @@ -2,9 +2,7 @@ using System; using System.Collections.Generic; -using System.ComponentModel; using System.Linq; -using System.Reflection; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -14,7 +12,8 @@ namespace Microsoft.SemanticKernel.ChatCompletion; /// Provides a that wraps an . /// -/// The implementation of only manufactures these to pass along to the underlying +/// The implementation should largely be unused, other than for its . The implementation of +/// only manufactures these to pass along to the underlying /// with autoInvoke:false, which means the /// implementation shouldn't be invoking these functions at all. As such, the and /// methods both unconditionally throw, even though they could be implemented. @@ -24,15 +23,28 @@ internal sealed class AIFunctionKernelFunction : KernelFunction private readonly AIFunction _aiFunction; public AIFunctionKernelFunction(AIFunction aiFunction) : - base(aiFunction.Name, - aiFunction.Description, - MapParameterMetadata(aiFunction), - aiFunction.JsonSerializerOptions, + base(aiFunction.Metadata.Name, + aiFunction.Metadata.Description, + aiFunction.Metadata.Parameters.Select(p => new KernelParameterMetadata(p.Name, AbstractionsJsonContext.Default.Options) + { + Description = p.Description, + DefaultValue = p.DefaultValue, + IsRequired = p.IsRequired, + ParameterType = p.ParameterType, + Schema = + p.Schema is JsonElement je ? new KernelJsonSchema(je) : + p.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) : + null, + }).ToList(), + AbstractionsJsonContext.Default.Options, new KernelReturnParameterMetadata(AbstractionsJsonContext.Default.Options) { - Description = aiFunction.UnderlyingMethod?.ReturnParameter.GetCustomAttribute()?.Description, - ParameterType = aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType, - Schema = new KernelJsonSchema(AIJsonUtilities.CreateJsonSchema(aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType)), + Description = aiFunction.Metadata.ReturnParameter.Description, + ParameterType = aiFunction.Metadata.ReturnParameter.ParameterType, + Schema = + aiFunction.Metadata.ReturnParameter.Schema is JsonElement je ? new KernelJsonSchema(je) : + aiFunction.Metadata.ReturnParameter.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) : + null, }) { this._aiFunction = aiFunction; @@ -61,30 +73,4 @@ protected override IAsyncEnumerable InvokeStreamingCoreAsync(K // This should never be invoked, as instances are always passed with autoInvoke:false. throw new NotSupportedException(); } - - private static IReadOnlyList MapParameterMetadata(AIFunction aiFunction) - { - if (!aiFunction.JsonSchema.TryGetProperty("properties", out JsonElement properties)) - { - return Array.Empty(); - } - - List kernelParams = []; - var parameterInfos = aiFunction.UnderlyingMethod?.GetParameters().ToDictionary(p => p.Name!, StringComparer.Ordinal); - foreach (var param in properties.EnumerateObject()) - { - ParameterInfo? paramInfo = null; - parameterInfos?.TryGetValue(param.Name, out paramInfo); - kernelParams.Add(new(param.Name, aiFunction.JsonSerializerOptions) - { - Description = param.Value.TryGetProperty("description", out JsonElement description) ? description.GetString() : null, - DefaultValue = param.Value.TryGetProperty("default", out JsonElement defaultValue) ? defaultValue : null, - IsRequired = param.Value.TryGetProperty("required", out JsonElement required) && required.GetBoolean(), - ParameterType = paramInfo?.ParameterType, - Schema = param.Value.TryGetProperty("schema", out JsonElement schema) ? new KernelJsonSchema(schema) : null, - }); - } - - return kernelParams; - } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs index d4dd082dd98b..05f473b1b792 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs @@ -11,11 +11,6 @@ namespace Microsoft.SemanticKernel.ChatCompletion; /// public readonly struct AuthorRole : IEquatable { - /// - /// The role that instructs or sets the behavior of the assistant. - /// - public static AuthorRole Developer { get; } = new("developer"); - /// /// The role that instructs or sets the behavior of the assistant. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs index 419dca381015..7447b230ec63 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatClientChatCompletionService.cs @@ -34,12 +34,12 @@ public ChatClientChatCompletionService(IChatClient chatClient, IServiceProvider? var attrs = new Dictionary(); this.Attributes = new ReadOnlyDictionary(attrs); - var metadata = chatClient.GetService(); - if (metadata?.ProviderUri is not null) + var metadata = chatClient.Metadata; + if (metadata.ProviderUri is not null) { attrs[AIServiceExtensions.EndpointKey] = metadata.ProviderUri.ToString(); } - if (metadata?.ModelId is not null) + if (metadata.ModelId is not null) { attrs[AIServiceExtensions.ModelIdKey] = metadata.ModelId; } @@ -57,7 +57,7 @@ public async Task> GetChatMessageContentsAsync var messageList = ChatCompletionServiceExtensions.ToChatMessageList(chatHistory); var currentSize = messageList.Count; - var completion = await this._chatClient.GetResponseAsync( + var completion = await this._chatClient.CompleteAsync( messageList, ToChatOptions(executionSettings, kernel), cancellationToken).ConfigureAwait(false); @@ -76,7 +76,7 @@ public async IAsyncEnumerable GetStreamingChatMessa { Verify.NotNull(chatHistory); - await foreach (var update in this._chatClient.GetStreamingResponseAsync( + await foreach (var update in this._chatClient.CompleteStreamingAsync( ChatCompletionServiceExtensions.ToChatMessageList(chatHistory), ToChatOptions(executionSettings, kernel), cancellationToken).ConfigureAwait(false)) @@ -158,19 +158,13 @@ public async IAsyncEnumerable GetStreamingChatMessa else if (entry.Key.Equals("response_format", StringComparison.OrdinalIgnoreCase) && entry.Value is { } responseFormat) { - if (TryConvert(responseFormat, out string? responseFormatString)) + options.ResponseFormat = responseFormat switch { - options.ResponseFormat = responseFormatString switch - { - "text" => ChatResponseFormat.Text, - "json_object" => ChatResponseFormat.Json, - _ => null, - }; - } - else - { - options.ResponseFormat = responseFormat is JsonElement e ? ChatResponseFormat.ForJsonSchema(e) : null; - } + "text" => ChatResponseFormat.Text, + "json_object" => ChatResponseFormat.Json, + JsonElement e => ChatResponseFormat.ForJsonSchema(e), + _ => null, + }; } else { @@ -274,9 +268,9 @@ static bool TryConvert(object? value, [NotNullWhen(true)] out T? result) } } - /// Converts a to a . + /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - private static StreamingChatMessageContent ToStreamingChatMessageContent(ChatResponseUpdate update) + private static StreamingChatMessageContent ToStreamingChatMessageContent(StreamingChatCompletionUpdate update) { StreamingChatMessageContent content = new( update.Role is not null ? new AuthorRole(update.Role.Value.Value) : null, diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs index 862239ccd505..308dbc64e183 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceChatClient.cs @@ -35,7 +35,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ public ChatClientMetadata Metadata { get; } /// - public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(chatMessages); @@ -53,7 +53,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ } /// - public async IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(chatMessages); @@ -82,7 +82,6 @@ public void Dispose() serviceKey is not null ? null : serviceType.IsInstanceOfType(this) ? this : serviceType.IsInstanceOfType(this._chatCompletionService) ? this._chatCompletionService : - serviceType.IsInstanceOfType(this.Metadata) ? this.Metadata : null; } @@ -192,11 +191,11 @@ public void Dispose() return settings; } - /// Converts a to a . + /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - private static ChatResponseUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content) + private static StreamingChatCompletionUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content) { - ChatResponseUpdate update = new() + StreamingChatCompletionUpdate update = new() { AdditionalProperties = content.Metadata is not null ? new AdditionalPropertiesDictionary(content.Metadata) : null, AuthorName = content.AuthorName, diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs index cf5834725700..ef8b0b56c7f9 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs @@ -169,15 +169,15 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) case Microsoft.SemanticKernel.ImageContent ic: aiContent = - ic.DataUri is not null ? new Microsoft.Extensions.AI.DataContent(ic.DataUri, ic.MimeType ?? "image/*") : - ic.Uri is not null ? new Microsoft.Extensions.AI.DataContent(ic.Uri, ic.MimeType ?? "image/*") : + ic.DataUri is not null ? new Microsoft.Extensions.AI.ImageContent(ic.DataUri, ic.MimeType) : + ic.Uri is not null ? new Microsoft.Extensions.AI.ImageContent(ic.Uri, ic.MimeType) : null; break; case Microsoft.SemanticKernel.AudioContent ac: aiContent = - ac.DataUri is not null ? new Microsoft.Extensions.AI.DataContent(ac.DataUri, ac.MimeType ?? "audio/*") : - ac.Uri is not null ? new Microsoft.Extensions.AI.DataContent(ac.Uri, ac.MimeType ?? "audio/*") : + ac.DataUri is not null ? new Microsoft.Extensions.AI.AudioContent(ac.DataUri, ac.MimeType) : + ac.Uri is not null ? new Microsoft.Extensions.AI.AudioContent(ac.Uri, ac.MimeType) : null; break; @@ -193,7 +193,7 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) break; case Microsoft.SemanticKernel.FunctionResultContent frc: - aiContent = new Microsoft.Extensions.AI.FunctionResultContent(frc.CallId ?? string.Empty, frc.Result); + aiContent = new Microsoft.Extensions.AI.FunctionResultContent(frc.CallId ?? string.Empty, frc.FunctionName ?? string.Empty, frc.Result); break; } @@ -211,13 +211,13 @@ internal static ChatMessage ToChatMessage(ChatMessageContent content) /// Converts a to a . /// This conversion should not be necessary once SK eventually adopts the shared content types. - internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Microsoft.Extensions.AI.ChatResponse? response = null) + internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Microsoft.Extensions.AI.ChatCompletion? completion = null) { ChatMessageContent result = new() { - ModelId = response?.ModelId, + ModelId = completion?.ModelId, AuthorName = message.AuthorName, - InnerContent = response?.RawRepresentation ?? message.RawRepresentation, + InnerContent = completion?.RawRepresentation ?? message.RawRepresentation, Metadata = message.AdditionalProperties, Role = new AuthorRole(message.Role.Value), }; @@ -231,20 +231,20 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic resultContent = new Microsoft.SemanticKernel.TextContent(tc.Text); break; - case Microsoft.Extensions.AI.DataContent dc when dc.MediaTypeStartsWith("image/"): - resultContent = dc.Data is not null ? - new Microsoft.SemanticKernel.ImageContent(dc.Uri) : - new Microsoft.SemanticKernel.ImageContent(new Uri(dc.Uri)); + case Microsoft.Extensions.AI.ImageContent ic: + resultContent = ic.ContainsData ? + new Microsoft.SemanticKernel.ImageContent(ic.Uri) : + new Microsoft.SemanticKernel.ImageContent(new Uri(ic.Uri)); break; - case Microsoft.Extensions.AI.DataContent dc when dc.MediaTypeStartsWith("audio/"): - resultContent = dc.Data is not null ? - new Microsoft.SemanticKernel.AudioContent(dc.Uri) : - new Microsoft.SemanticKernel.AudioContent(new Uri(dc.Uri)); + case Microsoft.Extensions.AI.AudioContent ac: + resultContent = ac.ContainsData ? + new Microsoft.SemanticKernel.AudioContent(ac.Uri) : + new Microsoft.SemanticKernel.AudioContent(new Uri(ac.Uri)); break; case Microsoft.Extensions.AI.DataContent dc: - resultContent = dc.Data is not null ? + resultContent = dc.ContainsData ? new Microsoft.SemanticKernel.BinaryContent(dc.Uri) : new Microsoft.SemanticKernel.BinaryContent(new Uri(dc.Uri)); break; @@ -254,7 +254,7 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic break; case Microsoft.Extensions.AI.FunctionResultContent frc: - resultContent = new Microsoft.SemanticKernel.FunctionResultContent(callId: frc.CallId, result: frc.Result); + resultContent = new Microsoft.SemanticKernel.FunctionResultContent(frc.Name, null, frc.CallId, frc.Result); break; } @@ -262,7 +262,7 @@ internal static ChatMessageContent ToChatMessageContent(ChatMessage message, Mic { resultContent.Metadata = content.AdditionalProperties; resultContent.InnerContent = content.RawRepresentation; - resultContent.ModelId = response?.ModelId; + resultContent.ModelId = completion?.ModelId; result.Items.Add(resultContent); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs index 22968c47ea38..fda7be0d0c8c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs @@ -28,26 +28,15 @@ public ChatHistory() } /// - /// Creates a new instance of the with a first message in the provided . - /// If not role is provided then the first message will default to role. - /// - /// The text message to add to the first message in chat history. - /// The role to add as the first message. - public ChatHistory(string message, AuthorRole role) - { - Verify.NotNullOrWhiteSpace(message); - - this._messages = []; - this.Add(new ChatMessageContent(role, message)); - } - - /// - /// Creates a new instance of the class with a system message. + /// Creates a new instance of the class with a system message /// /// The system message to add to the history. public ChatHistory(string systemMessage) - : this(systemMessage, AuthorRole.System) { + Verify.NotNullOrWhiteSpace(systemMessage); + + this._messages = []; + this.AddSystemMessage(systemMessage); } /// Initializes the history will all of the specified messages. @@ -108,13 +97,6 @@ public void AddAssistantMessage(string content) => public void AddSystemMessage(string content) => this.AddMessage(AuthorRole.System, content); - /// - /// Add a developer message to the chat history - /// - /// Message content - public void AddDeveloperMessage(string content) => - this.AddMessage(AuthorRole.Developer, content); - /// Adds a message to the history. /// The message to add. /// is null. diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs deleted file mode 100644 index faf11b2fe450..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistoryExtensions.cs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.ChatCompletion; - -/// -/// Extension methods for chat history. -/// -[Experimental("SKEXP0001")] -public static class ChatHistoryExtensions -{ - /// - /// Process history reduction and mutate the provided history in place. - /// - /// The source history - /// The target reducer - /// The to monitor for cancellation requests. The default is . - /// True if reduction has occurred. - /// - /// Using the existing for a reduction in collection size eliminates the need - /// for re-allocation (of memory). - /// - public static async Task ReduceInPlaceAsync(this ChatHistory chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) - { - if (reducer is null) - { - return false; - } - - IEnumerable? reducedHistory = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); - - if (reducedHistory is null) - { - return false; - } - - // Mutate the history in place - ChatMessageContent[] reduced = reducedHistory.ToArray(); - chatHistory.Clear(); - chatHistory.AddRange(reduced); - - return true; - } - - /// - /// Returns the reduced history using the provided reducer without mutating the source history. - /// - /// The source history - /// The target reducer - /// The to monitor for cancellation requests. The default is . - public static async Task> ReduceAsync(this IReadOnlyList chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) - { - if (reducer is not null) - { - IEnumerable? reducedHistory = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); - chatHistory = reducedHistory?.ToArray() ?? chatHistory; - } - - return chatHistory; - } - - /// - /// Returns the reduced history using the provided reducer without mutating the source history. - /// - /// The source history - /// The target reducer - /// The to monitor for cancellation requests. The default is . - public static async Task ReduceAsync(this ChatHistory chatHistory, IChatHistoryReducer? reducer, CancellationToken cancellationToken) - { - if (reducer is not null) - { - IEnumerable? reduced = await reducer.ReduceAsync(chatHistory, cancellationToken).ConfigureAwait(false); - return new ChatHistory(reduced ?? chatHistory); - } - - return chatHistory; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs index 0c2a7f18bb7a..12d63de28d3c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs @@ -112,12 +112,19 @@ private static ChatMessageContent ParseChatNode(PromptNode node) /// TagName = "message"
/// Attributes = { "role" : "..." }
/// optional one or more child nodes ...
- /// optional one or more child nodes ... + /// content not null or single child node ... ///
private static bool IsValidChatMessage(PromptNode node) { return node.TagName.Equals(MessageTagName, StringComparison.OrdinalIgnoreCase) && - node.Attributes.ContainsKey(RoleAttributeName); + node.Attributes.ContainsKey(RoleAttributeName) && + IsValidChildNodes(node); + } + + private static bool IsValidChildNodes(PromptNode node) + { + var textTagsCount = node.ChildNodes.Count(n => n.TagName.Equals(TextTagName, StringComparison.OrdinalIgnoreCase)); + return textTagsCount == 1 || (textTagsCount == 0 && node.Content is not null); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs deleted file mode 100644 index 7efc5a9ab515..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatHistoryReducer.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.ChatCompletion; - -/// -/// Interface for reducing the chat history. -/// -[Experimental("SKEXP0001")] -public interface IChatHistoryReducer -{ - /// - /// Reduces the chat history. - /// - /// Chat history to be reduced. - /// The to monitor for cancellation requests. The default is . - /// The reduced history or if no reduction has occurred. - Task?> ReduceAsync(IReadOnlyList chatHistory, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs index 96f1dd0252dd..c060c3f0d523 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs @@ -132,7 +132,6 @@ public async Task>> GenerateAsync(IEnu serviceKey is not null ? null : serviceType.IsInstanceOfType(this) ? this : serviceType.IsInstanceOfType(this._service) ? this._service : - serviceType.IsInstanceOfType(this.Metadata) ? this.Metadata : null; } } @@ -155,12 +154,12 @@ public EmbeddingGeneratorEmbeddingGenerationService( var attrs = new Dictionary(); this.Attributes = new ReadOnlyDictionary(attrs); - var metadata = (EmbeddingGeneratorMetadata?)generator.GetService(typeof(EmbeddingGeneratorMetadata)); - if (metadata?.ProviderUri is not null) + var metadata = generator.Metadata; + if (metadata.ProviderUri is not null) { attrs[AIServiceExtensions.EndpointKey] = metadata.ProviderUri.ToString(); } - if (metadata?.ModelId is not null) + if (metadata.ModelId is not null) { attrs[AIServiceExtensions.ModelIdKey] = metadata.ModelId; } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs index 989206bc7aa2..a9cb63787177 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel; @@ -34,5 +35,6 @@ public sealed class FunctionChoiceBehaviorOptions /// The default value is set to false. If set to true, the AI model will strictly adhere to the function schema. /// [JsonPropertyName("allow_strict_schema_adherence")] + [Experimental("SKEXP0001")] public bool AllowStrictSchemaAdherence { get; set; } = false; } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs index 95d1d442ab2f..3eb2d890aa54 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs @@ -23,7 +23,7 @@ public interface ITextToImageService : IAIService /// The to monitor for cancellation requests. The default is . /// Generated image contents [Experimental("SKEXP0001")] - Task> GetImageContentsAsync( + public Task> GetImageContentsAsync( TextContent input, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, diff --git a/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs b/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs index 736710ab146c..29caab93da9a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AbstractionsJsonContext.cs @@ -6,7 +6,6 @@ using System.Text.Json; using System.Text.Json.Serialization; using System.Text.Json.Serialization.Metadata; -using Microsoft.SemanticKernel.Functions; namespace Microsoft.SemanticKernel; @@ -16,7 +15,6 @@ namespace Microsoft.SemanticKernel; WriteIndented = true)] [JsonSerializable(typeof(IDictionary))] [JsonSerializable(typeof(JsonElement))] -[JsonSerializable(typeof(KernelFunctionSchemaModel))] [JsonSerializable(typeof(PromptExecutionSettings))] // types commonly used as values in settings dictionaries [JsonSerializable(typeof(string))] diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f0e71963fc80..f751ea6fc448 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -3,7 +3,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents; +namespace Microsoft.SemanticKernel.Agents.OpenAI; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs index 641e375b2839..925d74d0c731 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs @@ -16,15 +16,6 @@ public class FileReferenceContent : KernelContent /// public string FileId { get; init; } = string.Empty; - /// - /// An optional tool association. - /// - /// - /// Tool definition depends upon the context within which the content is consumed. - /// - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public IReadOnlyList? Tools { get; init; } - /// /// Initializes a new instance of the class. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs index 525472d90047..8dbcc00eb25d 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; namespace Microsoft.SemanticKernel; diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs index 5c5aa5780303..609f94a87180 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs @@ -4,7 +4,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents; +namespace Microsoft.SemanticKernel.Agents.OpenAI; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs index ac8380506d43..9e7325b771c2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs @@ -61,8 +61,8 @@ public StreamingKernelContentItemCollection Items /// /// Name of the author of the message /// - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] [Experimental("SKEXP0001")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? AuthorName { get => this._authorName; diff --git a/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs b/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs index 095c6d9a78d5..3b1ef667255b 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Data/TextSearch/ITextSearch.cs @@ -18,7 +18,7 @@ public interface ITextSearch /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - Task> SearchAsync( + public Task> SearchAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); @@ -29,7 +29,7 @@ Task> SearchAsync( /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - Task> GetTextSearchResultsAsync( + public Task> GetTextSearchResultsAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); @@ -40,7 +40,7 @@ Task> GetTextSearchResultsAsync( /// What to search for. /// Options used when executing a text search. /// The to monitor for cancellation requests. The default is . - Task> GetSearchResultsAsync( + public Task> GetSearchResultsAsync( string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default); diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs index d13d5519b652..d943cff4fe89 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; using System.Threading; using Microsoft.SemanticKernel.ChatCompletion; @@ -80,12 +79,6 @@ public AutoFunctionInvocationContext( /// public ChatMessageContent ChatMessageContent { get; } - /// - /// The execution settings associated with the operation. - /// - [Experimental("SKEXP0001")] - public PromptExecutionSettings? ExecutionSettings { get; init; } - /// /// Gets the associated with automatic function invocation. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs index 3ac7507101f2..da16264a9fc7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; using System.Threading; namespace Microsoft.SemanticKernel; @@ -55,12 +54,6 @@ internal PromptRenderContext(Kernel kernel, KernelFunction function, KernelArgum /// public KernelArguments Arguments { get; } - /// - /// The execution settings associated with the operation. - /// - [Experimental("SKEXP0001")] - public PromptExecutionSettings? ExecutionSettings { get; init; } - /// /// Gets or sets the rendered prompt. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs index fddac8f48282..cc2d260b48a7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -16,7 +16,6 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions; namespace Microsoft.SemanticKernel; @@ -518,7 +517,6 @@ public AIFunction AsAIFunction(Kernel? kernel = null) /// An wrapper around a . private sealed class KernelAIFunction : AIFunction { - private static readonly JsonElement s_defaultSchema = JsonDocument.Parse("{}").RootElement; private readonly KernelFunction _kernelFunction; private readonly Kernel? _kernel; @@ -526,17 +524,37 @@ public KernelAIFunction(KernelFunction kernelFunction, Kernel? kernel) { this._kernelFunction = kernelFunction; this._kernel = kernel; - this.Name = string.IsNullOrWhiteSpace(kernelFunction.PluginName) ? + + string name = string.IsNullOrWhiteSpace(kernelFunction.PluginName) ? kernelFunction.Name : $"{kernelFunction.PluginName}-{kernelFunction.Name}"; - this.JsonSchema = BuildFunctionSchema(kernelFunction); + this.Metadata = new AIFunctionMetadata(name) + { + Description = kernelFunction.Description, + + JsonSerializerOptions = kernelFunction.JsonSerializerOptions, + + Parameters = kernelFunction.Metadata.Parameters.Select(p => new AIFunctionParameterMetadata(p.Name) + { + Description = p.Description, + ParameterType = p.ParameterType, + IsRequired = p.IsRequired, + HasDefaultValue = p.DefaultValue is not null, + DefaultValue = p.DefaultValue, + Schema = p.Schema?.RootElement, + }).ToList(), + + ReturnParameter = new AIFunctionReturnParameterMetadata() + { + Description = kernelFunction.Metadata.ReturnParameter.Description, + ParameterType = kernelFunction.Metadata.ReturnParameter.ParameterType, + Schema = kernelFunction.Metadata.ReturnParameter.Schema?.RootElement, + }, + }; } - public override string Name { get; } - public override JsonElement JsonSchema { get; } - public override string Description => this._kernelFunction.Description; - public override JsonSerializerOptions JsonSerializerOptions => this._kernelFunction.JsonSerializerOptions ?? base.JsonSerializerOptions; + public override AIFunctionMetadata Metadata { get; } protected override async Task InvokeCoreAsync( IEnumerable> arguments, CancellationToken cancellationToken) @@ -558,25 +576,5 @@ public KernelAIFunction(KernelFunction kernelFunction, Kernel? kernel) JsonSerializer.SerializeToElement(value, AbstractionsJsonContext.GetTypeInfo(value.GetType(), this._kernelFunction.JsonSerializerOptions)) : null; } - - private static JsonElement BuildFunctionSchema(KernelFunction function) - { - KernelFunctionSchemaModel schemaModel = new() - { - Type = "object", - Description = function.Description, - }; - - foreach (var parameter in function.Metadata.Parameters) - { - schemaModel.Properties[parameter.Name] = parameter.Schema?.RootElement ?? s_defaultSchema; - if (parameter.IsRequired) - { - (schemaModel.Required ??= []).Add(parameter.Name); - } - } - - return JsonSerializer.SerializeToElement(schemaModel, AbstractionsJsonContext.Default.KernelFunctionSchemaModel); - } } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs deleted file mode 100644 index ce6ebc7eaf39..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionNoop.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel; - -/// -/// Represents a kernel function that performs no operation. -/// -[RequiresUnreferencedCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] -[RequiresDynamicCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] -internal sealed class KernelFunctionNoop : KernelFunction -{ - /// - /// Creates a new instance of the class. - /// - /// Option: Prompt execution settings. - internal KernelFunctionNoop(IReadOnlyDictionary? executionSettings) : - base($"Function_{Guid.NewGuid():N}", string.Empty, [], null, executionSettings?.ToDictionary(static kv => kv.Key, static kv => kv.Value)) - { - } - - /// - public override KernelFunction Clone(string pluginName) - { - Dictionary? executionSettings = this.ExecutionSettings?.ToDictionary(kv => kv.Key, kv => kv.Value); - return new KernelFunctionNoop(executionSettings); - } - - /// - protected override ValueTask InvokeCoreAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) - { - return new(new FunctionResult(this)); - } - - /// - protected override IAsyncEnumerable InvokeStreamingCoreAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) - { - return AsyncEnumerable.Empty(); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs deleted file mode 100644 index e7460f9773af..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionSchemaModel.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Functions; - -internal sealed class KernelFunctionSchemaModel -{ - [JsonPropertyName("type")] - public string Type { get; set; } = "object"; - - [JsonPropertyName("condition"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Description { get; set; } - - [JsonPropertyName("properties")] - public Dictionary Properties { get; set; } = []; - - [JsonPropertyName("required"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public List? Required { get; set; } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs index 99a335e15656..9f53ddc93a7f 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs @@ -349,15 +349,13 @@ internal async Task OnPromptRenderAsync( KernelFunction function, KernelArguments arguments, bool isStreaming, - PromptExecutionSettings? executionSettings, Func renderCallback, CancellationToken cancellationToken) { PromptRenderContext context = new(this, function, arguments) { CancellationToken = cancellationToken, - IsStreaming = isStreaming, - ExecutionSettings = executionSettings + IsStreaming = isStreaming }; await InvokeFilterOrPromptRenderAsync(this._promptRenderFilters, renderCallback, context).ConfigureAwait(false); diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs index 7218f0ad4033..d587fc56778b 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs @@ -24,7 +24,7 @@ public interface ISemanticTextMemory /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. - Task SaveInformationAsync( + public Task SaveInformationAsync( string collection, string text, string id, @@ -45,7 +45,7 @@ Task SaveInformationAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. - Task SaveReferenceAsync( + public Task SaveReferenceAsync( string collection, string text, string externalId, @@ -66,7 +66,7 @@ Task SaveReferenceAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memory record, or null when nothing is found - Task GetAsync(string collection, string key, bool withEmbedding = false, Kernel? kernel = null, CancellationToken cancellationToken = default); + public Task GetAsync(string collection, string key, bool withEmbedding = false, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Remove a memory by key. @@ -77,7 +77,7 @@ Task SaveReferenceAsync( /// Unique memory record identifier. /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . - Task RemoveAsync(string collection, string key, Kernel? kernel = null, CancellationToken cancellationToken = default); + public Task RemoveAsync(string collection, string key, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Find some information in memory @@ -90,7 +90,7 @@ Task SaveReferenceAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memories found - IAsyncEnumerable SearchAsync( + public IAsyncEnumerable SearchAsync( string collection, string query, int limit = 1, @@ -105,5 +105,5 @@ IAsyncEnumerable SearchAsync( /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// A group of collection names. - Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default); + public Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs index 24bc16a0f8e7..30a3ee7794e5 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs @@ -2,8 +2,6 @@ #pragma warning disable CA1716 // Identifiers should not match keywords -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text; using Microsoft.Extensions.DependencyInjection; @@ -111,32 +109,4 @@ public static (T, PromptExecutionSettings?) SelectAIService( throw new KernelException(message.ToString()); } - - /// - /// Resolves an and associated from the specified - /// based on a and associated . - /// - /// - /// Specifies the type of the required. This must be the same type - /// with which the service was registered in the orvia - /// the . - /// - /// The to use to select a service from the . - /// The containing services, plugins, and other state for use throughout the operation. - /// The dictionary of to use to select a service from the . - /// The function arguments. - /// A tuple of the selected service and the settings associated with the service (the settings may be null). - /// An appropriate service could not be found. - [RequiresUnreferencedCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] - [RequiresDynamicCode("Uses reflection to handle various aspects of the function creation and invocation, making it incompatible with AOT scenarios.")] - public static (T, PromptExecutionSettings?) SelectAIService( - this IAIServiceSelector selector, - Kernel kernel, - IReadOnlyDictionary? executionSettings, - KernelArguments arguments) where T : class, IAIService - { - // Need to provide a KernelFunction to the service selector as a container for the execution-settings. - KernelFunction nullPrompt = new KernelFunctionNoop(executionSettings); - return selector.SelectAIService(kernel, nullPrompt, arguments); - } } diff --git a/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs b/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs deleted file mode 100644 index f0d8b29ae280..000000000000 --- a/dotnet/src/SemanticKernel.Core/Contents/BinaryContentExtensions.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; - -namespace Microsoft.SemanticKernel; - -/// -/// Provides extension methods for interacting with . -/// -public static class BinaryContentExtensions -{ - /// - /// Writes the content to a file. - /// - /// The content to write. - /// The path to the file to write to. - /// Whether to overwrite the file if it already exists. - public static void WriteToFile(this BinaryContent content, string filePath, bool overwrite = false) - { - if (string.IsNullOrWhiteSpace(filePath)) - { - throw new ArgumentException("File path cannot be null or empty", nameof(filePath)); - } - - if (!overwrite && File.Exists(filePath)) - { - throw new InvalidOperationException("File already exists."); - } - - if (!content.CanRead) - { - throw new InvalidOperationException("No content to write to file."); - } - - File.WriteAllBytes(filePath, content.Data!.Value.ToArray()); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs new file mode 100644 index 000000000000..39b00dec9149 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/KernelBuilderExtensions.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Data services on the . +/// +[Experimental("SKEXP0001")] +public static class KernelBuilderExtensions +{ + /// + /// Register a Volatile with the specified service ID. + /// + /// The builder to register the on. + /// An optional service id to use as the service key. + /// The kernel builder. + [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] + public static IKernelBuilder AddVolatileVectorStore(this IKernelBuilder builder, string? serviceId = default) + { + builder.Services.AddVolatileVectorStore(serviceId); + return builder; + } + + /// + /// Register a instance with the specified service ID. + /// + /// The to register the on. + /// The name of the collection. + /// instance that can map a TRecord to a + /// instance that can map a TRecord to a + /// Options used to construct an instance of + /// An optional service id to use as the service key. + [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] + public static IKernelBuilder AddVolatileVectorStoreTextSearch( + this IKernelBuilder builder, + string collectionName, + ITextSearchStringMapper? stringMapper = null, + ITextSearchResultMapper? resultMapper = null, + VectorStoreTextSearchOptions? options = null, + string? serviceId = default) + where TKey : notnull + { + builder.Services.AddVolatileVectorStoreTextSearch(collectionName, stringMapper, resultMapper, options, serviceId); + return builder; + } + + /// + /// Register a instance with the specified service ID. + /// + /// The to register the on. + /// The name of the collection. + /// delegate that can map a TRecord to a + /// delegate that can map a TRecord to a + /// Options used to construct an instance of + /// An optional service id to use as the service key. + [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] + public static IKernelBuilder AddVolatileVectorStoreTextSearch( + this IKernelBuilder builder, + string collectionName, + MapFromResultToString? stringMapper = null, + MapFromResultToTextSearchResult? resultMapper = null, + VectorStoreTextSearchOptions? options = null, + string? serviceId = default) + where TKey : notnull + { + builder.AddVolatileVectorStoreTextSearch( + collectionName, + stringMapper is not null ? new TextSearchStringMapper(stringMapper) : null, + resultMapper is not null ? new TextSearchResultMapper(resultMapper) : null, + options, + serviceId); + return builder; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs new file mode 100644 index 000000000000..d9d465141d5a --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/ServiceCollectionExtensions.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Embeddings; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Data services on an . +/// +[Experimental("SKEXP0001")] +public static class ServiceCollectionExtensions +{ + /// + /// Register a Volatile with the specified service ID. + /// + /// The to register the on. + /// An optional service id to use as the service key. + /// The service collection. + [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] + public static IServiceCollection AddVolatileVectorStore(this IServiceCollection services, string? serviceId = default) + { + services.AddKeyedSingleton(serviceId); + services.AddKeyedSingleton(serviceId, (sp, obj) => sp.GetRequiredKeyedService(serviceId)); + return services; + } + + /// + /// Register a instance with the specified service ID. + /// + /// The to register the on. + /// The name of the collection. + /// instance that can map a TRecord to a + /// instance that can map a TRecord to a + /// Options used to construct an instance of + /// An optional service id to use as the service key. + [Obsolete("This has been replaced by the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] + [UnconditionalSuppressMessage("Trimming", "IL2091:Target generic argument does not satisfy 'DynamicallyAccessedMembersAttribute' in target method or type. The generic parameter of the source method or type does not have matching annotations.", Justification = "This method is obsolete")] + [UnconditionalSuppressMessage("Trimming", "IL2095:'DynamicallyAccessedMemberTypes' on the generic parameter of method or type don't match overridden generic parameter method or type. All overridden members must have the same 'DynamicallyAccessedMembersAttribute' usage.", Justification = "This method is obsolete")] + + public static IServiceCollection AddVolatileVectorStoreTextSearch( + this IServiceCollection services, + string collectionName, + ITextSearchStringMapper? stringMapper = null, + ITextSearchResultMapper? resultMapper = null, + VectorStoreTextSearchOptions? options = null, + string? serviceId = default) + where TKey : notnull + { + // If we are not constructing the dependent services, add the VectorStoreTextSearch as transient, since we + // cannot make assumptions about how dependent services are being managed. + services.AddKeyedTransient>( + serviceId, + (sp, obj) => + { + var vectorStore = sp.GetRequiredService(); + var vectorSearch = vectorStore.GetCollection(collectionName); + var generationService = sp.GetRequiredService(); + stringMapper ??= sp.GetRequiredService(); + resultMapper ??= sp.GetRequiredService(); + options ??= sp.GetService(); + return new VectorStoreTextSearch(vectorSearch, generationService, stringMapper, resultMapper, options); + }); + + return services; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs new file mode 100644 index 000000000000..81b5fb4ef6b5 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStore.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Service for storing and retrieving vector records, and managing vector record collections, that uses an in memory dictionary as the underlying storage. +/// +[Obsolete("This has been replaced by InMemoryVectorStore in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +public sealed class VolatileVectorStore : IVectorStore +{ + /// Internal storage for the record collection. + private readonly ConcurrentDictionary> _internalCollection; + + /// The data type of each collection, to enforce a single type per collection. + private readonly ConcurrentDictionary _internalCollectionTypes = new(); + + /// + /// Initializes a new instance of the class. + /// + public VolatileVectorStore() + { + this._internalCollection = new(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Allows passing in the dictionary used for storage, for testing purposes. + internal VolatileVectorStore(ConcurrentDictionary> internalCollection) + { + this._internalCollection = internalCollection; + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull + { + if (this._internalCollectionTypes.TryGetValue(name, out var existingCollectionDataType) && existingCollectionDataType != typeof(TRecord)) + { + throw new InvalidOperationException($"Collection '{name}' already exists and with data type '{existingCollectionDataType.Name}' so cannot be re-created with data type '{typeof(TRecord).Name}'."); + } + + var collection = new VolatileVectorStoreRecordCollection( + this._internalCollection, + this._internalCollectionTypes, + name, + new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + return collection!; + } + + /// + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) + { + return this._internalCollection.Keys.ToAsyncEnumerable(); + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs new file mode 100644 index 000000000000..c80914949f7d --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreCollectionSearchMapping.cs @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Numerics.Tensors; +using System.Reflection; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Contains mapping helpers to use when searching for documents using the Volatile store. +/// +[Obsolete("This has been replaced by InMemoryVectorStoreCollectionSearchMapping in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +internal static class VolatileVectorStoreCollectionSearchMapping +{ + /// + /// Compare the two vectors using the specified distance function. + /// + /// The first vector to compare. + /// The second vector to compare. + /// The distance function to use for comparison. + /// The score of the comparison. + /// Thrown when the distance function is not supported. + public static float CompareVectors(ReadOnlySpan x, ReadOnlySpan y, string? distanceFunction) + { + switch (distanceFunction) + { + case null: + case DistanceFunction.CosineSimilarity: + case DistanceFunction.CosineDistance: + return TensorPrimitives.CosineSimilarity(x, y); + case DistanceFunction.DotProductSimilarity: + return TensorPrimitives.Dot(x, y); + case DistanceFunction.EuclideanDistance: + return TensorPrimitives.Distance(x, y); + default: + throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); + } + } + + /// + /// Indicates whether result ordering should be descending or ascending, to get most similar results at the top, based on the distance function. + /// + /// The distance function to use for comparison. + /// Whether to order descending or ascending. + /// Thrown when the distance function is not supported. + public static bool ShouldSortDescending(string? distanceFunction) + { + switch (distanceFunction) + { + case null: + case DistanceFunction.CosineSimilarity: + case DistanceFunction.DotProductSimilarity: + return true; + case DistanceFunction.CosineDistance: + case DistanceFunction.EuclideanDistance: + return false; + default: + throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); + } + } + + /// + /// Converts the provided score into the correct result depending on the distance function. + /// The main purpose here is to convert from cosine similarity to cosine distance if cosine distance is requested, + /// since the two are inversely related and the only supports cosine similarity so + /// we are using cosine similarity for both similarity and distance. + /// + /// The score to convert. + /// The distance function to use for comparison. + /// Whether to order descending or ascending. + /// Thrown when the distance function is not supported. + public static float ConvertScore(float score, string? distanceFunction) + { + switch (distanceFunction) + { + case DistanceFunction.CosineDistance: + return 1 - score; + case null: + case DistanceFunction.CosineSimilarity: + case DistanceFunction.DotProductSimilarity: + case DistanceFunction.EuclideanDistance: + return score; + default: + throw new NotSupportedException($"The distance function '{distanceFunction}' is not supported by the Volatile connector."); + } + } + + /// + /// Filter the provided records using the provided filter definition. + /// + /// The filter definition to filter the with. + /// The records to filter. + /// The filtered records. + /// Thrown when an unsupported filter clause is encountered. + public static IEnumerable FilterRecords(VectorSearchFilter? filter, IEnumerable records) + { + if (filter == null) + { + return records; + } + + return records.Where(record => + { + var result = true; + + // Run each filter clause against the record, and AND the results together. + // Break if any clause returns false, since we are doing an AND and no need + // to check any further clauses. + foreach (var clause in filter.FilterClauses) + { + if (clause is EqualToFilterClause equalToFilter) + { + result = result && CheckEqualTo(record, equalToFilter); + + if (result == false) + { + break; + } + } + else if (clause is AnyTagEqualToFilterClause anyTagEqualToFilter) + { + result = result && CheckAnyTagEqualTo(record, anyTagEqualToFilter); + + if (result == false) + { + break; + } + } + else + { + throw new InvalidOperationException($"Unsupported filter clause type {clause.GetType().Name}"); + } + } + + return result; + }); + } + + /// + /// Check if the required property on the record is equal to the required value form the filter. + /// + /// The record to check against the filter. + /// The filter containing the property and value to check. + /// if the property equals the required value, otherwise. + private static bool CheckEqualTo(object record, EqualToFilterClause equalToFilter) + { + var propertyInfo = GetPropertyInfo(record, equalToFilter.FieldName); + var propertyValue = propertyInfo.GetValue(record); + if (propertyValue == null) + { + return propertyValue == equalToFilter.Value; + } + + return propertyValue.Equals(equalToFilter.Value); + } + + /// + /// Check if the required tag list on the record is equal to the required value form the filter. + /// + /// The record to check against the filter. + /// The filter containing the property and value to check. + /// if the tag list contains the required value, otherwise. + /// + private static bool CheckAnyTagEqualTo(object record, AnyTagEqualToFilterClause anyTagEqualToFilter) + { + var propertyInfo = GetPropertyInfo(record, anyTagEqualToFilter.FieldName); + + // Check that the property is actually a list of values. + if (!typeof(IEnumerable).IsAssignableFrom(propertyInfo.PropertyType)) + { + throw new InvalidOperationException($"Property {anyTagEqualToFilter.FieldName} is not a list property on record type {record.GetType().Name}"); + } + + // Check that the tag list contains any values. If not, return false, since the required value cannot be in an empty list. + var propertyValue = propertyInfo.GetValue(record) as IEnumerable; + if (propertyValue == null) + { + return false; + } + + // Check each value in the tag list against the required value. + foreach (var value in propertyValue) + { + if (value == null && anyTagEqualToFilter.Value == null) + { + return true; + } + + if (value != null && value.Equals(anyTagEqualToFilter.Value)) + { + return true; + } + } + + return false; + } + + /// + /// Get the property info for the provided property name on the record. + /// + /// The record to find the property on. + /// The name of the property to find. + /// The property info for the required property. + /// Thrown if the required property does not exist on the record. + [UnconditionalSuppressMessage("Analysis", "IL2075:Suppress IL2075 warning", Justification = "This class is obsolete")] + private static PropertyInfo GetPropertyInfo(object record, string propertyName) + { + var propertyInfo = record.GetType().GetProperty(propertyName); + if (propertyInfo == null) + { + throw new InvalidOperationException($"Property {propertyName} not found on record type {record.GetType().Name}"); + } + + return propertyInfo; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs new file mode 100644 index 000000000000..2b1e1f9c9d0f --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreExtensions.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Linq; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Extension methods for which allow: +/// 1. Serializing an instance of to a stream. +/// 2. Deserializing an instance of from a stream. +/// +[Obsolete("This has been replaced by InMemoryVectorStoreExtensions in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +public static class VolatileVectorStoreExtensions +{ + /// + /// Serialize a to a stream as JSON. + /// + /// Type of the record key. + /// Type of the record. + /// Instance of used to retrieve the collection. + /// The collection name. + /// The stream to write the serialized JSON to. + /// The JSON serializer options to use. + [RequiresUnreferencedCode("Uses reflection for collection serialization, making it incompatible with AOT scenarios.")] + [RequiresDynamicCode("Uses reflection for collection serialization, making it incompatible with AOT scenarios.")] + public static async Task SerializeCollectionAsJsonAsync( + this VolatileVectorStore vectorStore, + string collectionName, + Stream stream, + JsonSerializerOptions? jsonSerializerOptions = null) + where TKey : notnull + { + // Get collection and verify that it exists. + var collection = vectorStore.GetCollection(collectionName); + var exists = await collection.CollectionExistsAsync().ConfigureAwait(false); + if (!exists) + { + throw new InvalidOperationException($"Collection '{collectionName}' does not exist."); + } + + var volatileCollection = collection as VolatileVectorStoreRecordCollection; + var records = volatileCollection!.GetCollectionDictionary(); + VolatileRecordCollection recordCollection = new(collectionName, records); + + await JsonSerializer.SerializeAsync(stream, recordCollection, jsonSerializerOptions).ConfigureAwait(false); + } + + /// + /// Deserialize a to a stream as JSON. + /// + /// Type of the record key. + /// Type of the record. + /// Instance of used to retrieve the collection. + /// The stream to read the serialized JSON from. + [RequiresUnreferencedCode("Uses reflection for collection deserialization, making it incompatible with AOT scenarios.")] + [RequiresDynamicCode("Uses reflection for collection deserialization, making it incompatible with AOT scenarios.")] + public static async Task?> DeserializeCollectionFromJsonAsync( + this VolatileVectorStore vectorStore, + Stream stream) + where TKey : notnull + { + IVectorStoreRecordCollection? collection = null; + + using (StreamReader streamReader = new(stream)) + { + string result = streamReader.ReadToEnd(); + var recordCollection = JsonSerializer.Deserialize>(result); + if (recordCollection is null) + { + throw new InvalidOperationException("Stream does not contain valid record collection JSON."); + } + + // Get and create collection if it doesn't exist. + collection = vectorStore.GetCollection(recordCollection.Name); + await collection.CreateCollectionIfNotExistsAsync().ConfigureAwait(false); + + // Upsert records. + var tasks = recordCollection.Records.Values.Select(record => Task.Run(async () => + { + await collection.UpsertAsync(record).ConfigureAwait(false); + })); + await Task.WhenAll(tasks).ConfigureAwait(false); + } + + return collection; + } + + #region private + /// Model class used when storing a . + private sealed class VolatileRecordCollection(string name, IDictionary records) + where TKey : notnull + { + public string Name { get; init; } = name; + public IDictionary Records { get; init; } = records; + } + #endregion + +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs new file mode 100644 index 000000000000..407909491d38 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreKeyResolver.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Delegate that describes a function that given a record, finds the record key and returns it. +/// +/// The record to look up the key for. +/// The record key. +[Obsolete("This has been replaced by InMemoryVectorStoreKeyResolver in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +public delegate TKey? VolatileVectorStoreKeyResolver(TRecord record) + where TKey : notnull; diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..b6e5454dc4d6 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs @@ -0,0 +1,380 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Service for storing and retrieving vector records, that uses an in memory dictionary as the underlying storage. +/// +/// The data type of the record key. +/// The data model to use for adding, updating and retrieving data from storage. +[Obsolete("This has been replaced by InMemoryVectorStoreRecordCollection in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public sealed class VolatileVectorStoreRecordCollection : IVectorStoreRecordCollection +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TKey : notnull +{ + /// A set of types that vectors on the provided model may have. + private static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + ]; + + /// The default options for vector search. + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + + /// Internal storage for all of the record collections. + private readonly ConcurrentDictionary> _internalCollections; + + /// The data type of each collection, to enforce a single type per collection. + private readonly ConcurrentDictionary _internalCollectionTypes; + + /// Optional configuration options for this class. + private readonly VolatileVectorStoreRecordCollectionOptions _options; + + /// The name of the collection that this will access. + private readonly string _collectionName; + + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; + + /// A dictionary of vector properties on the provided model, keyed by the property name. + private readonly Dictionary _vectorProperties; + + /// An function to look up vectors from the records. + private readonly VolatileVectorStoreVectorResolver _vectorResolver; + + /// An function to look up keys from the records. + private readonly VolatileVectorStoreKeyResolver _keyResolver; + + /// + /// Initializes a new instance of the class. + /// + /// The name of the collection that this will access. + /// Optional configuration options for this class. + [UnconditionalSuppressMessage("Trimming", "IL2087:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] + public VolatileVectorStoreRecordCollection(string collectionName, VolatileVectorStoreRecordCollectionOptions? options = default) + { + // Verify. + Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); + + // Assign. + this._collectionName = collectionName; + this._internalCollections = new(); + this._internalCollectionTypes = new(); + this._options = options ?? new VolatileVectorStoreRecordCollectionOptions(); + this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); + + // Validate property types. + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + this._vectorProperties = this._propertyReader.VectorProperties.ToDictionary(x => x.DataModelPropertyName); + + // Assign resolvers. + this._vectorResolver = CreateVectorResolver(this._options.VectorResolver, this._vectorProperties); + this._keyResolver = CreateKeyResolver(this._options.KeyResolver, this._propertyReader.KeyProperty); + } + + /// + /// Initializes a new instance of the class. + /// + /// Internal storage for the record collection. + /// The data type of each collection, to enforce a single type per collection. + /// The name of the collection that this will access. + /// Optional configuration options for this class. + internal VolatileVectorStoreRecordCollection( + ConcurrentDictionary> internalCollection, + ConcurrentDictionary internalCollectionTypes, + string collectionName, + VolatileVectorStoreRecordCollectionOptions? options = default) + : this(collectionName, options) + { + this._internalCollections = internalCollection; + this._internalCollectionTypes = internalCollectionTypes; + } + + /// + public string CollectionName => this._collectionName; + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + return this._internalCollections.ContainsKey(this._collectionName) ? Task.FromResult(true) : Task.FromResult(false); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + if (!this._internalCollections.ContainsKey(this._collectionName)) + { + this._internalCollections.TryAdd(this._collectionName, new ConcurrentDictionary()); + this._internalCollectionTypes.TryAdd(this._collectionName, typeof(TRecord)); + } + + return Task.CompletedTask; + } + + /// + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) + { + await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + } + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + this._internalCollections.TryRemove(this._collectionName, out _); + return Task.CompletedTask; + } + + /// + public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + var collectionDictionary = this.GetCollectionDictionary(); + + if (collectionDictionary.TryGetValue(key, out var record)) + { + return Task.FromResult((TRecord?)record); + } + + return Task.FromResult(default); + } + + /// + public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var key in keys) + { + var record = await this.GetAsync(key, options, cancellationToken).ConfigureAwait(false); + + if (record is not null) + { + yield return record; + } + } + } + + /// + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + { + var collectionDictionary = this.GetCollectionDictionary(); + + collectionDictionary.TryRemove(key, out _); + return Task.CompletedTask; + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + { + var collectionDictionary = this.GetCollectionDictionary(); + + foreach (var key in keys) + { + collectionDictionary.TryRemove(key, out _); + } + + return Task.CompletedTask; + } + + /// + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + Verify.NotNull(record); + + var collectionDictionary = this.GetCollectionDictionary(); + + var key = (TKey)this._keyResolver(record)!; + collectionDictionary.AddOrUpdate(key!, record, (key, currentValue) => record); + + return Task.FromResult(key!); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var record in records) + { + yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + } + } + + /// +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) +#pragma warning restore CS1998 + { + Verify.NotNull(vector); + + if (this._propertyReader.FirstVectorPropertyName is null) + { + throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); + } + + if (vector is not ReadOnlyMemory floatVector) + { + throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Volatile Vector Store."); + } + + // Resolve options and get requested vector property or first as default. + var internalOptions = options ?? s_defaultVectorSearchOptions; + + var vectorPropertyName = string.IsNullOrWhiteSpace(internalOptions.VectorPropertyName) ? this._propertyReader.FirstVectorPropertyName : internalOptions.VectorPropertyName; + if (!this._vectorProperties.TryGetValue(vectorPropertyName!, out var vectorProperty)) + { + throw new InvalidOperationException($"The collection does not have a vector field named '{internalOptions.VectorPropertyName}', so vector search is not possible."); + } + + // Filter records using the provided filter before doing the vector comparison. + if (internalOptions.Filter is not null) + { + throw new NotSupportedException("LINQ-based filtering is not supported with VolatileVectorStore, use Microsoft.SemanticKernel.Connectors.InMemory instead"); + } + + var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.OldFilter, this.GetCollectionDictionary().Values); + + // Compare each vector in the filtered results with the provided vector. + var results = filteredRecords.Select((record) => + { + var vectorObject = this._vectorResolver(vectorPropertyName!, (TRecord)record); + if (vectorObject is not ReadOnlyMemory dbVector) + { + return null; + } + + var score = VolatileVectorStoreCollectionSearchMapping.CompareVectors(floatVector.Span, dbVector.Span, vectorProperty.DistanceFunction); + var convertedscore = VolatileVectorStoreCollectionSearchMapping.ConvertScore(score, vectorProperty.DistanceFunction); + return (record, convertedscore); + }); + + // Get the non-null results since any record with a null vector results in a null result. + var nonNullResults = results.Where(x => x.HasValue).Select(x => x!.Value); + + // Calculate the total results count if requested. + long? count = null; + if (internalOptions.IncludeTotalCount) + { + count = nonNullResults.Count(); + } + + // Sort the results appropriately for the selected distance function and get the right page of results . + var sortedScoredResults = VolatileVectorStoreCollectionSearchMapping.ShouldSortDescending(vectorProperty.DistanceFunction) ? + nonNullResults.OrderByDescending(x => x.score) : + nonNullResults.OrderBy(x => x.score); + var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(internalOptions.Top); + + // Build the response. + var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); + return new VectorSearchResults(vectorSearchResultList) { TotalCount = count }; + } + + /// + /// Get the collection dictionary from the internal storage, throws if it does not exist. + /// + /// The retrieved collection dictionary. + internal ConcurrentDictionary GetCollectionDictionary() + { + if (!this._internalCollections.TryGetValue(this._collectionName, out var collectionDictionary)) + { + throw new VectorStoreOperationException($"Call to vector store failed. Collection '{this._collectionName}' does not exist."); + } + + return collectionDictionary; + } + + /// + /// Pick / create a vector resolver that will read a vector from a record in the store based on the vector name. + /// 1. If an override resolver is provided, use that. + /// 2. If the record type is create a resolver that looks up the vector in its dictionary. + /// 3. Otherwise, create a resolver that assumes the vector is a property directly on the record and use the record definition to determine the name. + /// + /// The override vector resolver if one was provided. + /// A dictionary of vector properties from the record definition. + /// The . + [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] + private static VolatileVectorStoreVectorResolver CreateVectorResolver(VolatileVectorStoreVectorResolver? overrideVectorResolver, Dictionary vectorProperties) + { + // Custom resolver. + if (overrideVectorResolver is not null) + { + return overrideVectorResolver; + } + + // Generic data model resolver. + if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + return (vectorName, record) => + { + var genericDataModelRecord = record as VectorStoreGenericDataModel; + var vectorsDictionary = genericDataModelRecord!.Vectors; + if (vectorsDictionary != null && vectorsDictionary.TryGetValue(vectorName, out var vector)) + { + return vector; + } + + throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); + }; + } + + // Default resolver. + var vectorPropertiesInfo = vectorProperties.Values + .Select(x => x.DataModelPropertyName) + .Select(x => typeof(TRecord).GetProperty(x) ?? throw new ArgumentException($"Vector property '{x}' was not found on {typeof(TRecord).Name}")) + .ToDictionary(x => x.Name); + + return (vectorName, record) => + { + if (vectorPropertiesInfo.TryGetValue(vectorName, out var vectorPropertyInfo)) + { + return vectorPropertyInfo.GetValue(record); + } + + throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); + }; + } + + /// + /// Pick / create a key resolver that will read a key from a record in the store. + /// 1. If an override resolver is provided, use that. + /// 2. If the record type is create a resolver that reads the Key property from it. + /// 3. Otherwise, create a resolver that assumes the key is a property directly on the record and use the record definition to determine the name. + /// + /// The override key resolver if one was provided. + /// They key property from the record definition. + /// The . + [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] + private static VolatileVectorStoreKeyResolver CreateKeyResolver(VolatileVectorStoreKeyResolver? overrideKeyResolver, VectorStoreRecordKeyProperty keyProperty) + { + // Custom resolver. + if (overrideKeyResolver is not null) + { + return overrideKeyResolver; + } + + // Generic data model resolver. + if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + return (record) => + { + var genericDataModelRecord = record as VectorStoreGenericDataModel; + return genericDataModelRecord!.Key; + }; + } + + // Default resolver. + var keyPropertyInfo = typeof(TRecord).GetProperty(keyProperty.DataModelPropertyName) ?? throw new ArgumentException($"Key property {keyProperty.DataModelPropertyName} not found on {typeof(TRecord).Name}"); + return (record) => (TKey)keyPropertyInfo.GetValue(record)!; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs new file mode 100644 index 000000000000..69f4f8c5fe2e --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollectionOptions.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Options when creating a . +/// +/// The data type of the record key of the collection that this options will be used with. +/// The data model to use for adding, updating and retrieving data on the collection that this options will be used with. +[Obsolete("This has been replaced by InMemoryVectorStoreRecordCollectionOptions in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +public sealed class VolatileVectorStoreRecordCollectionOptions + where TKey : notnull +{ + /// + /// Gets or sets an optional record definition that defines the schema of the record type. + /// + /// + /// If not provided, the schema will be inferred from the record model class using reflection. + /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. + /// See , and . + /// + public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// An optional function that can be used to look up vectors from a record. + /// + /// + /// If not provided, the default behavior is to look for direct properties of the record + /// using reflection. This delegate can be used to provide a custom implementation if + /// the vector properties are located somewhere else on the record. + /// + public VolatileVectorStoreVectorResolver? VectorResolver { get; init; } = null; + + /// + /// An optional function that can be used to look up record keys. + /// + /// + /// If not provided, the default behavior is to look for a direct property of the record + /// using reflection. This delegate can be used to provide a custom implementation if + /// the key property is located somewhere else on the record. + /// + public VolatileVectorStoreKeyResolver? KeyResolver { get; init; } = null; +} diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs new file mode 100644 index 000000000000..b360b3aa6017 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreVectorResolver.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Delegate that describes a function that given a vector name and a record, finds the vector in the record and returns it. +/// +/// The name of the vector to find. +/// The record that contains the vector to look up. +/// The named vector from the record. +[Obsolete("This has been replaced by InMemoryVectorStoreVectorResolver in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] +public delegate object? VolatileVectorStoreVectorResolver(string vectorName, TRecord record); diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs index babf4dedb8fc..367e5e7a2553 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs @@ -500,7 +500,7 @@ private async Task RenderPromptAsync( Verify.NotNull(aiService); - var renderingContext = await kernel.OnPromptRenderAsync(this, arguments, isStreaming, executionSettings, async (context) => + var renderingContext = await kernel.OnPromptRenderAsync(this, arguments, isStreaming, async (context) => { renderedPrompt = await this._promptTemplate.RenderAsync(kernel, context.Arguments, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs index 2a0581290eae..d1dcea92bf50 100644 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs @@ -17,5 +17,5 @@ internal interface ICodeRendering /// The arguments /// The to monitor for cancellation requests. The default is . /// Rendered content - ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); + public ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs index 48d63bed9971..87044226e4d1 100644 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs @@ -11,5 +11,5 @@ internal interface ITextRendering /// /// Optional arguments the block rendering /// Rendered content - object? Render(KernelArguments? arguments); + public object? Render(KernelArguments? arguments); } diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs index 20cc5b1269fd..723349450e99 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs @@ -43,35 +43,4 @@ public void ItCanBeSerializedAndDeserialized() chatHistoryDeserialized[i].Items.OfType().Single().Text); } } - - [Theory] - [InlineData("system")] - [InlineData("developer")] - public void CtorWorksForSystemAndDeveloper(string providedRole) - { - // Arrange - var targetRole = providedRole == "system" ? AuthorRole.System : AuthorRole.Developer; - var options = new JsonSerializerOptions(); - var chatHistory = new ChatHistory("First message", targetRole); - - var chatHistoryJson = JsonSerializer.Serialize(chatHistory, options); - - // Act - var chatHistoryDeserialized = JsonSerializer.Deserialize(chatHistoryJson, options); - - // Assert - Assert.NotNull(chatHistoryDeserialized); - Assert.Equal(chatHistory.Count, chatHistoryDeserialized.Count); - Assert.Equal(providedRole, chatHistoryDeserialized[0].Role.Label); - for (var i = 0; i < chatHistory.Count; i++) - { - Assert.Equal(chatHistory[i].Role.Label, chatHistoryDeserialized[i].Role.Label); - Assert.Equal(chatHistory[i].Content, chatHistoryDeserialized[i].Content); - Assert.Equal(chatHistory[i].AuthorName, chatHistoryDeserialized[i].AuthorName); - Assert.Equal(chatHistory[i].Items.Count, chatHistoryDeserialized[i].Items.Count); - Assert.Equal( - chatHistory[i].Items.OfType().Single().Text, - chatHistoryDeserialized[i].Items.OfType().Single().Text); - } - } } diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs index 556799ecc85e..cce73a65510f 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ServiceConversionExtensionsTests.cs @@ -41,10 +41,10 @@ public void AsEmbeddingGeneratorMetadataReturnsExpectedData() }.AsEmbeddingGenerator(); Assert.NotNull(generator); - var metadata = Assert.IsType(generator.GetService(typeof(EmbeddingGeneratorMetadata))); - Assert.Equal(nameof(TestEmbeddingGenerationService), metadata.ProviderName); - Assert.Equal("examplemodel", metadata.ModelId); - Assert.Equal("https://example.com/", metadata.ProviderUri?.ToString()); + Assert.NotNull(generator.Metadata); + Assert.Equal(nameof(TestEmbeddingGenerationService), generator.Metadata.ProviderName); + Assert.Equal("examplemodel", generator.Metadata.ModelId); + Assert.Equal("https://example.com/", generator.Metadata.ProviderUri?.ToString()); } [Fact] @@ -75,10 +75,10 @@ public void AsChatClientMetadataReturnsExpectedData() }.AsChatClient(); Assert.NotNull(client); - var metadata = Assert.IsType(client.GetService(typeof(ChatClientMetadata))); - Assert.Equal(nameof(TestChatCompletionService), metadata.ProviderName); - Assert.Equal("examplemodel", metadata.ModelId); - Assert.Equal("https://example.com/", metadata.ProviderUri?.ToString()); + Assert.NotNull(client.Metadata); + Assert.Equal(nameof(TestChatCompletionService), client.Metadata.ProviderName); + Assert.Equal("examplemodel", client.Metadata.ModelId); + Assert.Equal("https://example.com/", client.Metadata.ProviderUri?.ToString()); } [Fact] @@ -151,15 +151,15 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() }, }.AsChatClient(); - Microsoft.Extensions.AI.ChatResponse result = await client.GetResponseAsync([ + Microsoft.Extensions.AI.ChatCompletion result = await client.CompleteAsync([ new(ChatRole.System, [ new Microsoft.Extensions.AI.TextContent("some text"), - new Microsoft.Extensions.AI.DataContent("http://imageurl", mediaType: "image/jpeg"), + new Microsoft.Extensions.AI.ImageContent("http://imageurl"), ]), new(ChatRole.User, [ - new Microsoft.Extensions.AI.DataContent("http://audiourl", mediaType: "audio/mpeg"), + new Microsoft.Extensions.AI.AudioContent("http://audiourl"), new Microsoft.Extensions.AI.TextContent("some other text"), ]), new(ChatRole.Assistant, @@ -168,7 +168,7 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() ]), new(ChatRole.Tool, [ - new Microsoft.Extensions.AI.FunctionResultContent("call123", 42), + new Microsoft.Extensions.AI.FunctionResultContent("call123", "FunctionName", 42), ]), ], new ChatOptions() { @@ -211,7 +211,7 @@ public async Task AsChatClientNonStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Items[0]); Assert.Equal("call123", frc.CallId); - Assert.Null(frc.FunctionName); + Assert.Equal("FunctionName", frc.FunctionName); Assert.Equal(42, frc.Result); Assert.NotNull(actualSettings); @@ -244,19 +244,19 @@ public async Task AsChatClientNonStreamingResponseFormatHandled() List messages = [new(ChatRole.User, "hi")]; - await client.GetResponseAsync(messages); + await client.CompleteAsync(messages); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Null(oaiSettings); - await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.Text }); + await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.Text }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Equal("text", oaiSettings?.ResponseFormat?.ToString()); - await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.Json }); + await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.Json }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); Assert.Equal("json_object", oaiSettings?.ResponseFormat?.ToString()); - await client.GetResponseAsync(messages, new() { ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonSerializer.Deserialize(""" + await client.CompleteAsync(messages, new() { ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonSerializer.Deserialize(""" {"type": "string"} """)) }); oaiSettings = JsonSerializer.Deserialize(JsonSerializer.Serialize(actualSettings)); @@ -289,7 +289,7 @@ public async Task AsChatClientNonStreamingToolsPropagated(ChatToolMode mode) List messages = [new(ChatRole.User, "hi")]; - await client.GetResponseAsync(messages, new() + await client.CompleteAsync(messages, new() { Tools = [new NopAIFunction("AIFunc1"), new NopAIFunction("AIFunc2")], ToolMode = mode, @@ -335,7 +335,8 @@ public async Task AsChatClientNonStreamingToolsPropagated(ChatToolMode mode) private sealed class NopAIFunction(string name) : AIFunction { - public override string Name => name; + public override AIFunctionMetadata Metadata => new(name); + protected override Task InvokeCoreAsync(IEnumerable> arguments, CancellationToken cancellationToken) { throw new FormatException(); @@ -361,15 +362,15 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() }, }.AsChatClient(); - List result = await client.GetStreamingResponseAsync([ + List result = await client.CompleteStreamingAsync([ new(ChatRole.System, [ new Microsoft.Extensions.AI.TextContent("some text"), - new Microsoft.Extensions.AI.DataContent("http://imageurl", "image/jpeg"), + new Microsoft.Extensions.AI.ImageContent("http://imageurl"), ]), new(ChatRole.User, [ - new Microsoft.Extensions.AI.DataContent("http://audiourl", "audio/mpeg"), + new Microsoft.Extensions.AI.AudioContent("http://audiourl"), new Microsoft.Extensions.AI.TextContent("some other text"), ]), new(ChatRole.Assistant, @@ -378,7 +379,7 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() ]), new(ChatRole.Tool, [ - new Microsoft.Extensions.AI.FunctionResultContent("call123", 42), + new Microsoft.Extensions.AI.FunctionResultContent("call123", "FunctionName", 42), ]), ], new ChatOptions() { @@ -422,7 +423,7 @@ public async Task AsChatClientStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Items[0]); Assert.Equal("call123", frc.CallId); - Assert.Null(frc.FunctionName); + Assert.Equal("FunctionName", frc.FunctionName); Assert.Equal(42, frc.Result); Assert.NotNull(actualSettings); @@ -450,7 +451,7 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( await Task.Yield(); actualChatHistory = messages; actualOptions = options; - return new Microsoft.Extensions.AI.ChatResponse(new ChatMessage() { Text = "the result" }); + return new Microsoft.Extensions.AI.ChatCompletion(new ChatMessage() { Text = "the result" }); }, }; @@ -504,8 +505,8 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( Assert.Single(actualChatHistory[3].Contents); Assert.Equal("some text", Assert.IsType(actualChatHistory[0].Contents[0]).Text); - Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); - Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); + Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); + Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); Assert.Equal("some other text", Assert.IsType(actualChatHistory[1].Contents[1]).Text); var fcc = Assert.IsType(actualChatHistory[2].Contents[0]); @@ -515,6 +516,7 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected( var frc = Assert.IsType(actualChatHistory[3].Contents[0]); Assert.Equal("call123", frc.CallId); + Assert.Equal("FunctionName", frc.Name); Assert.Equal(42, frc.Result); Assert.NotNull(actualOptions); @@ -540,7 +542,7 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() { actualChatHistory = messages; actualOptions = options; - return new List() + return new List() { new() { Role = ChatRole.Assistant, Text = "the result" } }.ToAsyncEnumerable(); @@ -598,8 +600,8 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() Assert.Single(actualChatHistory[3].Contents); Assert.Equal("some text", Assert.IsType(actualChatHistory[0].Contents[0]).Text); - Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); - Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); + Assert.Equal("http://imageurl/", Assert.IsType(actualChatHistory[0].Contents[1]).Uri?.ToString()); + Assert.Equal("http://audiourl/", Assert.IsType(actualChatHistory[1].Contents[0]).Uri?.ToString()); Assert.Equal("some other text", Assert.IsType(actualChatHistory[1].Contents[1]).Text); var fcc = Assert.IsType(actualChatHistory[2].Contents[0]); @@ -609,6 +611,7 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected() var frc = Assert.IsType(actualChatHistory[3].Contents[0]); Assert.Equal("call123", frc.CallId); + Assert.Equal("FunctionName", frc.Name); Assert.Equal(42, frc.Result); Assert.NotNull(actualOptions); @@ -649,18 +652,18 @@ private sealed class TestChatClient : IChatClient { public ChatClientMetadata Metadata { get; set; } = new(); - public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncDelegate { get; set; } + public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncDelegate { get; set; } - public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncDelegate { get; set; } + public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncDelegate { get; set; } - public Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { return this.CompleteAsyncDelegate != null ? this.CompleteAsyncDelegate(chatMessages, options, cancellationToken) : throw new NotImplementedException(); } - public IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { return this.CompleteStreamingAsyncDelegate != null ? this.CompleteStreamingAsyncDelegate(chatMessages, options, cancellationToken) @@ -671,7 +674,7 @@ public void Dispose() { } public object? GetService(Type serviceType, object? serviceKey = null) { - return serviceType == typeof(ChatClientMetadata) ? this.Metadata : null; + return null; } } @@ -706,7 +709,7 @@ public Task>> GenerateAsync(IEnumerable() { ["metadata-key-7"] = "metadata-value-7" } }, - new FileReferenceContent(fileId: "file-id-2") { Tools = ["a", "b", "c"] }, new AnnotationContent("quote-8") { ModelId = "model-8", FileId = "file-id-2", StartIndex = 2, EndIndex = 24, Metadata = new Dictionary() { ["metadata-key-8"] = "metadata-value-8" } }, ]; @@ -302,21 +301,15 @@ public void ItCanBeSerializeAndDeserialized() Assert.Equal("function-id", functionResultContent.CallId); Assert.Equal("plugin-name", functionResultContent.PluginName); - var fileReferenceContent1 = deserializedMessage.Items[8] as FileReferenceContent; - Assert.NotNull(fileReferenceContent1); - Assert.Equal("file-id-1", fileReferenceContent1.FileId); - Assert.Equal("model-7", fileReferenceContent1.ModelId); - Assert.NotNull(fileReferenceContent1.Metadata); - Assert.Single(fileReferenceContent1.Metadata); - Assert.Equal("metadata-value-7", fileReferenceContent1.Metadata["metadata-key-7"]?.ToString()); - - var fileReferenceContent2 = deserializedMessage.Items[9] as FileReferenceContent; - Assert.NotNull(fileReferenceContent2); - Assert.Equal("file-id-2", fileReferenceContent2.FileId); - Assert.NotNull(fileReferenceContent2.Tools); - Assert.Equal(3, fileReferenceContent2.Tools.Count); - - var annotationContent = deserializedMessage.Items[10] as AnnotationContent; + var fileReferenceContent = deserializedMessage.Items[8] as FileReferenceContent; + Assert.NotNull(fileReferenceContent); + Assert.Equal("file-id-1", fileReferenceContent.FileId); + Assert.Equal("model-7", fileReferenceContent.ModelId); + Assert.NotNull(fileReferenceContent.Metadata); + Assert.Single(fileReferenceContent.Metadata); + Assert.Equal("metadata-value-7", fileReferenceContent.Metadata["metadata-key-7"]?.ToString()); + + var annotationContent = deserializedMessage.Items[9] as AnnotationContent; Assert.NotNull(annotationContent); Assert.Equal("file-id-2", annotationContent.FileId); Assert.Equal("quote-8", annotationContent.Quote); diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs index b698fa528bff..6b55818c9473 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/FileReferenceContentTests.cs @@ -21,7 +21,6 @@ public void VerifyFileReferenceContentInitialState() Assert.Empty(definition.FileId); } - /// /// Verify usage. /// @@ -31,20 +30,5 @@ public void VerifyFileReferenceContentUsage() FileReferenceContent definition = new(fileId: "testfile"); Assert.Equal("testfile", definition.FileId); - Assert.Null(definition.Tools); - } - - /// - /// Verify usage. - /// - [Fact] - public void VerifyFileReferenceToolUsage() - { - FileReferenceContent definition = new(fileId: "testfile") { Tools = new[] { "a", "b", "c" } }; - - Assert.Equal("testfile", definition.FileId); - Assert.NotNull(definition.Tools); - Assert.Equal(3, definition.Tools.Count); - Assert.Equivalent(new[] { "a", "b", "c" }, definition.Tools); } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs index 46da513e4a7c..eb954752ce4b 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Text; -using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; using Xunit; namespace SemanticKernel.UnitTests.Contents; diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..3e155bea18fe --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/KernelBuilderExtensionsTests.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +/// +/// Contains tests for KernelBuilderExtensions". +/// +public class KernelBuilderExtensionsTests +{ + private readonly IKernelBuilder _kernelBuilder; + + public KernelBuilderExtensionsTests() + { + this._kernelBuilder = Kernel.CreateBuilder(); + } + + [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] + [Fact] + public void AddVectorStoreRegistersClass() + { + // Act. + this._kernelBuilder.AddVolatileVectorStore(); + + // Assert. + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } + + [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] + [Fact] + public void AddVolatileVectorStoreTextSearchRegistersClass() + { + // Arrange. + this._kernelBuilder.AddVolatileVectorStore(); + this._kernelBuilder.AddOpenAITextEmbeddingGeneration("modelId", "apiKey"); + + // Act. + this._kernelBuilder.AddVolatileVectorStoreTextSearch( + "records", + new DataModelTextSearchStringMapper(), + new DataModelTextSearchResultMapper()); + + // Assert. + var kernel = this._kernelBuilder.Build(); + var vectorStoreTextSearch = kernel.Services.GetRequiredService>(); + Assert.NotNull(vectorStoreTextSearch); + Assert.IsType>(vectorStoreTextSearch); + } + + [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] + [Fact] + public void AddVolatileVectorStoreTextSearchWithDelegatesRegistersClass() + { + // Arrange. + this._kernelBuilder.AddVolatileVectorStore(); + this._kernelBuilder.AddOpenAITextEmbeddingGeneration("modelId", "apiKey"); + + // Act. + this._kernelBuilder.AddVolatileVectorStoreTextSearch( + "records", + obj => ((DataModel)obj).Text, + obj => new TextSearchResult(value: ((DataModel)obj).Text) { Name = ((DataModel)obj).Key.ToString() }); + + // Assert. + var kernel = this._kernelBuilder.Build(); + var vectorStoreTextSearch = kernel.Services.GetRequiredService>(); + Assert.NotNull(vectorStoreTextSearch); + Assert.IsType>(vectorStoreTextSearch); + } + + /// + /// String mapper which converts a DataModel to a string. + /// + private sealed class DataModelTextSearchStringMapper : ITextSearchStringMapper + { + /// + public string MapFromResultToString(object result) + { + if (result is DataModel dataModel) + { + return dataModel.Text; + } + throw new ArgumentException("Invalid result type."); + } + } + + /// + /// Result mapper which converts a DataModel to a TextSearchResult. + /// + private sealed class DataModelTextSearchResultMapper : ITextSearchResultMapper + { + /// + public TextSearchResult MapFromResultToTextSearchResult(object result) + { + if (result is DataModel dataModel) + { + return new TextSearchResult(value: dataModel.Text) { Name = dataModel.Key.ToString() }; + } + throw new ArgumentException("Invalid result type."); + } + } + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DataModel +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [VectorStoreRecordKey] + public Guid Key { get; init; } + + [VectorStoreRecordData] + public required string Text { get; init; } + + [VectorStoreRecordVector(1536)] + public ReadOnlyMemory Embedding { get; init; } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..79691fabe8b6 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/ServiceCollectionExtensionsTests.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +/// +/// Contains tests for the ServiceCollectionExtensions class. +/// +public class ServiceCollectionExtensionsTests +{ + private readonly IServiceCollection _serviceCollection; + + public ServiceCollectionExtensionsTests() + { + this._serviceCollection = new ServiceCollection(); + } + + [Obsolete("The VolatileVectorStore is obsolete so this test is as well.")] + [Fact] + public void AddVectorStoreRegistersClass() + { + // Act. + this._serviceCollection.AddVolatileVectorStore(); + + // Assert. + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..b93c00952705 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs @@ -0,0 +1,577 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +/// +/// Contains tests for the class. +/// +[Obsolete("The VolatileVectorStoreRecordCollection is obsolete so these tests are as well.")] +public class VolatileVectorStoreRecordCollectionTests +{ + private const string TestCollectionName = "testcollection"; + private const string TestRecordKey1 = "testid1"; + private const string TestRecordKey2 = "testid2"; + private const int TestRecordIntKey1 = 1; + private const int TestRecordIntKey2 = 2; + + private readonly CancellationToken _testCancellationToken = new(false); + + private readonly ConcurrentDictionary> _collectionStore; + private readonly ConcurrentDictionary _collectionStoreTypes; + + public VolatileVectorStoreRecordCollectionTests() + { + this._collectionStore = new(); + this._collectionStoreTypes = new(); + } + + [Theory] + [InlineData(TestCollectionName, true)] + [InlineData("nonexistentcollection", false)] + public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) + { + // Arrange + var collection = new ConcurrentDictionary(); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = new VolatileVectorStoreRecordCollection>( + this._collectionStore, + this._collectionStoreTypes, + collectionName); + + // Act + var actual = await sut.CollectionExistsAsync(this._testCancellationToken); + + // Assert + Assert.Equal(expectedExists, actual); + } + + [Fact] + public async Task CanCreateCollectionAsync() + { + // Arrange + var sut = this.CreateRecordCollection(false); + + // Act + await sut.CreateCollectionAsync(this._testCancellationToken); + + // Assert + Assert.True(this._collectionStore.ContainsKey(TestCollectionName)); + } + + [Fact] + public async Task DeleteCollectionRemovesCollectionFromDictionaryAsync() + { + // Arrange + var collection = new ConcurrentDictionary(); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(false); + + // Act + await sut.DeleteCollectionAsync(this._testCancellationToken); + + // Assert + Assert.Empty(this._collectionStore); + } + + [Theory] + [InlineData(true, TestRecordKey1)] + [InlineData(true, TestRecordIntKey1)] + [InlineData(false, TestRecordKey1)] + [InlineData(false, TestRecordIntKey1)] + public async Task CanGetRecordWithVectorsAsync(bool useDefinition, TKey testKey) + where TKey : notnull + { + // Arrange + var record = CreateModel(testKey, withVectors: true); + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey!, record); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var actual = await sut.GetAsync( + testKey, + new() + { + IncludeVectors = true + }, + this._testCancellationToken); + + // Assert + var expectedArgs = new object[] { TestRecordKey1 }; + + Assert.NotNull(actual); + Assert.Equal(testKey, actual.Key); + Assert.Equal($"data {testKey}", actual.Data); + Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2)] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] + [InlineData(false, TestRecordKey1, TestRecordKey2)] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] + public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true); + var record2 = CreateModel(testKey2, withVectors: true); + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1!, record1); + collection.TryAdd(testKey2!, record2); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var actual = await sut.GetBatchAsync( + [testKey1, testKey2], + new() + { + IncludeVectors = true + }, + this._testCancellationToken).ToListAsync(); + + // Assert + Assert.NotNull(actual); + Assert.Equal(2, actual.Count); + Assert.Equal(testKey1, actual[0].Key); + Assert.Equal($"data {testKey1}", actual[0].Data); + Assert.Equal(testKey2, actual[1].Key); + Assert.Equal($"data {testKey2}", actual[1].Data); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2)] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] + [InlineData(false, TestRecordKey1, TestRecordKey2)] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] + public async Task CanDeleteRecordAsync(bool useDefinition, TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true); + var record2 = CreateModel(testKey2, withVectors: true); + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1, record1); + collection.TryAdd(testKey2, record2); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + await sut.DeleteAsync( + testKey1, + cancellationToken: this._testCancellationToken); + + // Assert + Assert.False(collection.ContainsKey(testKey1)); + Assert.True(collection.ContainsKey(testKey2)); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2)] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] + [InlineData(false, TestRecordKey1, TestRecordKey2)] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] + public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true); + var record2 = CreateModel(testKey2, withVectors: true); + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1, record1); + collection.TryAdd(testKey2, record2); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + await sut.DeleteBatchAsync( + [testKey1, testKey2], + cancellationToken: this._testCancellationToken); + + // Assert + Assert.False(collection.ContainsKey(testKey1)); + Assert.False(collection.ContainsKey(testKey2)); + } + + [Theory] + [InlineData(true, TestRecordKey1)] + [InlineData(true, TestRecordIntKey1)] + [InlineData(false, TestRecordKey1)] + [InlineData(false, TestRecordIntKey1)] + public async Task CanUpsertRecordAsync(bool useDefinition, TKey testKey1) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true); + var collection = new ConcurrentDictionary(); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var upsertResult = await sut.UpsertAsync( + record1, + cancellationToken: this._testCancellationToken); + + // Assert + Assert.Equal(testKey1, upsertResult); + Assert.True(collection.ContainsKey(testKey1)); + Assert.IsType>(collection[testKey1]); + Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2)] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] + [InlineData(false, TestRecordKey1, TestRecordKey2)] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] + public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true); + var record2 = CreateModel(testKey2, withVectors: true); + + var collection = new ConcurrentDictionary(); + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var actual = await sut.UpsertBatchAsync( + [record1, record2], + cancellationToken: this._testCancellationToken).ToListAsync(); + + // Assert + Assert.NotNull(actual); + Assert.Equal(2, actual.Count); + Assert.Equal(testKey1, actual[0]); + Assert.Equal(testKey2, actual[1]); + + Assert.True(collection.ContainsKey(testKey1)); + Assert.IsType>(collection[testKey1]); + Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2)] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] + [InlineData(false, TestRecordKey1, TestRecordKey2)] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] + public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); + var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); + + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1, record1); + collection.TryAdd(testKey2, record2); + + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var actual = await sut.VectorizedSearchAsync( + new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + new() { IncludeVectors = true }, + this._testCancellationToken); + + // Assert + Assert.NotNull(actual); + Assert.Null(actual.TotalCount); + var actualResults = await actual.Results.ToListAsync(); + Assert.Equal(2, actualResults.Count); + Assert.Equal(testKey1, actualResults[0].Record.Key); + Assert.Equal($"data {testKey1}", actualResults[0].Record.Data); + Assert.Equal(1, actualResults[0].Score); + Assert.Equal(testKey2, actualResults[1].Record.Key); + Assert.Equal($"data {testKey2}", actualResults[1].Record.Data); + Assert.Equal(-1, actualResults[1].Score); + } + + [Theory] + [InlineData(true, TestRecordKey1, TestRecordKey2, "Equality")] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "Equality")] + [InlineData(false, TestRecordKey1, TestRecordKey2, "Equality")] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "Equality")] + [InlineData(true, TestRecordKey1, TestRecordKey2, "TagListContains")] + [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] + [InlineData(false, TestRecordKey1, TestRecordKey2, "TagListContains")] + [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] + public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TKey testKey1, TKey testKey2, string filterType) + where TKey : notnull + { + // Arrange + var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); + var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); + + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1, record1); + collection.TryAdd(testKey2, record2); + + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); + var actual = await sut.VectorizedSearchAsync( + new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, + this._testCancellationToken); + + // Assert + Assert.NotNull(actual); + Assert.Equal(1, actual.TotalCount); + var actualResults = await actual.Results.ToListAsync(); + Assert.Single(actualResults); + Assert.Equal(testKey2, actualResults[0].Record.Key); + Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); + Assert.Equal(-1, actualResults[0].Score); + } + + [Theory] + [InlineData(DistanceFunction.CosineSimilarity, 1, -1)] + [InlineData(DistanceFunction.CosineDistance, 0, 2)] + [InlineData(DistanceFunction.DotProductSimilarity, 4, -4)] + [InlineData(DistanceFunction.EuclideanDistance, 0, 4)] + public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFunction, double expectedScoreResult1, double expectedScoreResult2) + { + // Arrange + var record1 = CreateModel(TestRecordKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); + var record2 = CreateModel(TestRecordKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); + + var collection = new ConcurrentDictionary(); + collection.TryAdd(TestRecordKey1, record1); + collection.TryAdd(TestRecordKey2, record2); + + this._collectionStore.TryAdd(TestCollectionName, collection); + + VectorStoreRecordDefinition singlePropsDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data", typeof(string)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } + ] + }; + + var sut = new VolatileVectorStoreRecordCollection>( + this._collectionStore, + this._collectionStoreTypes, + TestCollectionName, + new() + { + VectorStoreRecordDefinition = singlePropsDefinition + }); + + // Act + var actual = await sut.VectorizedSearchAsync( + new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + new() { IncludeVectors = true }, + this._testCancellationToken); + + // Assert + Assert.NotNull(actual); + var actualResults = await actual.Results.ToListAsync(); + Assert.Equal(2, actualResults.Count); + Assert.Equal(TestRecordKey1, actualResults[0].Record.Key); + Assert.Equal($"data {TestRecordKey1}", actualResults[0].Record.Data); + Assert.Equal(expectedScoreResult1, actualResults[0].Score); + Assert.Equal(TestRecordKey2, actualResults[1].Record.Key); + Assert.Equal($"data {TestRecordKey2}", actualResults[1].Record.Data); + Assert.Equal(expectedScoreResult2, actualResults[1].Score); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task CanSearchManyRecordsAsync(bool useDefinition) + { + // Arrange + var collection = new ConcurrentDictionary(); + for (int i = 0; i < 1000; i++) + { + if (i <= 14) + { + collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { 1, 1, 1, 1 })); + } + else + { + collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { -1, -1, -1, -1 })); + } + } + + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = this.CreateRecordCollection(useDefinition); + + // Act + var actual = await sut.VectorizedSearchAsync( + new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, + this._testCancellationToken); + + // Assert + Assert.NotNull(actual); + Assert.Equal(1000, actual.TotalCount); + + // Assert that top was respected + var actualResults = await actual.Results.ToListAsync(); + Assert.Equal(10, actualResults.Count); + var actualIds = actualResults.Select(r => r.Record.Key).ToList(); + for (int i = 0; i < 10; i++) + { + // Assert that skip was respected + Assert.Contains(i + 10, actualIds); + if (i <= 4) + { + Assert.Equal(1, actualResults[i].Score); + } + else + { + Assert.Equal(-1, actualResults[i].Score); + } + } + } + + [Theory] + [InlineData(TestRecordKey1, TestRecordKey2)] + [InlineData(TestRecordIntKey1, TestRecordIntKey2)] + public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, TKey testKey2) + where TKey : notnull + { + // Arrange + var record1 = new VectorStoreGenericDataModel(testKey1) + { + Data = new Dictionary + { + ["Data"] = $"data {testKey1}", + ["Tags"] = new List { "default tag", "tag " + testKey1 } + }, + Vectors = new Dictionary + { + ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) + } + }; + var record2 = new VectorStoreGenericDataModel(testKey2) + { + Data = new Dictionary + { + ["Data"] = $"data {testKey2}", + ["Tags"] = new List { "default tag", "tag " + testKey2 } + }, + Vectors = new Dictionary + { + ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) + } + }; + + var collection = new ConcurrentDictionary(); + collection.TryAdd(testKey1, record1); + collection.TryAdd(testKey2, record2); + + this._collectionStore.TryAdd(TestCollectionName, collection); + + var sut = new VolatileVectorStoreRecordCollection>( + this._collectionStore, + this._collectionStoreTypes, + TestCollectionName, + new() + { + VectorStoreRecordDefinition = this._singlePropsDefinition + }); + + // Act + var actual = await sut.VectorizedSearchAsync( + new ReadOnlyMemory([1, 1, 1, 1]), + new() { IncludeVectors = true, VectorPropertyName = "Vector" }, + this._testCancellationToken); + + // Assert + Assert.NotNull(actual); + var actualResults = await actual.Results.ToListAsync(); + Assert.Equal(2, actualResults.Count); + Assert.Equal(testKey1, actualResults[0].Record.Key); + Assert.Equal($"data {testKey1}", actualResults[0].Record.Data["Data"]); + Assert.Equal(1, actualResults[0].Score); + Assert.Equal(testKey2, actualResults[1].Record.Key); + Assert.Equal($"data {testKey2}", actualResults[1].Record.Data["Data"]); + Assert.Equal(-1, actualResults[1].Score); + } + + private static SinglePropsModel CreateModel(TKey key, bool withVectors, float[]? vector = null) + { + return new SinglePropsModel + { + Key = key, + Data = "data " + key, + Tags = new List { "default tag", "tag " + key }, + Vector = vector ?? (withVectors ? new float[] { 1, 2, 3, 4 } : null), + NotAnnotated = null, + }; + } + + private VolatileVectorStoreRecordCollection> CreateRecordCollection(bool useDefinition) + where TKey : notnull + { + return new VolatileVectorStoreRecordCollection>( + this._collectionStore, + this._collectionStoreTypes, + TestCollectionName, + new() + { + VectorStoreRecordDefinition = useDefinition ? this._singlePropsDefinition : null + }); + } + + private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Data", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ] + }; + + public sealed class SinglePropsModel + { + [VectorStoreRecordKey] + public TKey? Key { get; set; } + + [VectorStoreRecordData(IsFilterable = true)] + public List Tags { get; set; } = new List(); + + [VectorStoreRecordData(IsFilterable = true)] + public string Data { get; set; } = string.Empty; + + [VectorStoreRecordVector] + public ReadOnlyMemory? Vector { get; set; } + + public string? NotAnnotated { get; set; } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs new file mode 100644 index 000000000000..70d6e0264dfb --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreTests.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +/// +/// Contains tests for the class. +/// +[Obsolete("The VolatileVectorStore is obsolete so these tests are as well.")] +public class VolatileVectorStoreTests +{ + private const string TestCollectionName = "testcollection"; + + [Fact] + public void GetCollectionReturnsCollection() + { + // Arrange. + var sut = new VolatileVectorStore(); + + // Act. + var actual = sut.GetCollection>(TestCollectionName); + + // Assert. + Assert.NotNull(actual); + Assert.IsType>>(actual); + } + + [Fact] + public void GetCollectionReturnsCollectionWithNonStringKey() + { + // Arrange. + var sut = new VolatileVectorStore(); + + // Act. + var actual = sut.GetCollection>(TestCollectionName); + + // Assert. + Assert.NotNull(actual); + Assert.IsType>>(actual); + } + + [Fact] + public async Task ListCollectionNamesReadsDictionaryAsync() + { + // Arrange. + var collectionStore = new ConcurrentDictionary>(); + collectionStore.TryAdd("collection1", new ConcurrentDictionary()); + collectionStore.TryAdd("collection2", new ConcurrentDictionary()); + var sut = new VolatileVectorStore(collectionStore); + + // Act. + var collectionNames = sut.ListCollectionNamesAsync(); + + // Assert. + var collectionNamesList = await collectionNames.ToListAsync(); + Assert.Equal(new[] { "collection1", "collection2" }, collectionNamesList); + } + + [Fact] + public async Task GetCollectionDoesNotAllowADifferentDataTypeThanPreviouslyUsedAsync() + { + // Arrange. + var sut = new VolatileVectorStore(); + var stringKeyCollection = sut.GetCollection>(TestCollectionName); + await stringKeyCollection.CreateCollectionAsync(); + + // Act and assert. + var exception = Assert.Throws(() => sut.GetCollection(TestCollectionName)); + Assert.Equal($"Collection '{TestCollectionName}' already exists and with data type 'SinglePropsModel`1' so cannot be re-created with data type 'SecondModel'.", exception.Message); + } + +#pragma warning disable CA1812 // Classes are used as generic arguments + private sealed class SinglePropsModel + { + [VectorStoreRecordKey] + public required TKey Key { get; set; } + + [VectorStoreRecordData] + public string Data { get; set; } = string.Empty; + + [VectorStoreRecordVector(4)] + public ReadOnlyMemory? Vector { get; set; } + + public string? NotAnnotated { get; set; } + } + + private sealed class SecondModel + { + [VectorStoreRecordKey] + public required int Key { get; set; } + + [VectorStoreRecordData] + public string Data { get; set; } = string.Empty; + } +#pragma warning restore CA1812 +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs index c9c348d1ac44..f7a4e947ec38 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs @@ -64,7 +64,7 @@ public void ItProvideStatusForResponsesWithoutContent() // Assert Assert.NotNull(httpOperationException); Assert.NotNull(httpOperationException.StatusCode); - Assert.Null(httpOperationException.ResponseContent); + Assert.Empty(httpOperationException.ResponseContent!); Assert.Equal(exception, httpOperationException.InnerException); Assert.Equal(exception.Message, httpOperationException.Message); Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); diff --git a/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs index 9d06c9e71630..3a0f1e627bd6 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Filters/PromptRenderFilterTests.cs @@ -321,42 +321,4 @@ public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming) // Assert Assert.Equal(isStreaming, actualStreamingFlag); } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task PromptExecutionSettingsArePropagatedToFilterContextAsync(bool isStreaming) - { - // Arrange - PromptExecutionSettings? actualExecutionSettings = null; - - var mockTextGeneration = this.GetMockTextGeneration(); - - var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); - - var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, - onPromptRender: (context, next) => - { - actualExecutionSettings = context.ExecutionSettings; - return next(context); - }); - - var expectedExecutionSettings = new PromptExecutionSettings(); - - var arguments = new KernelArguments(expectedExecutionSettings); - - // Act - if (isStreaming) - { - await foreach (var item in kernel.InvokeStreamingAsync(function, arguments)) - { } - } - else - { - await kernel.InvokeAsync(function, arguments); - } - - // Assert - Assert.Same(expectedExecutionSettings, actualExecutionSettings); - } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs index db62e3ad6769..877bf8a90857 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs @@ -208,19 +208,21 @@ public async Task ItCanProduceAIFunctionsThatInvokeKernelFunctions(bool withKern AIFunction[] funcs = plugin.AsAIFunctions(kernel).ToArray(); Assert.Equal(2, funcs.Length); - Assert.Equal("PluginName-Function1", funcs[0].Name); - Assert.Equal("PluginName-Function2", funcs[1].Name); - - var func1Properties = funcs[0].JsonSchema.GetProperty("properties").EnumerateObject().ToArray(); - var func2Properties = funcs[1].JsonSchema.GetProperty("properties").EnumerateObject().ToArray(); - Assert.Equal("arg1", Assert.Single(func1Properties).Name); - Assert.Equal(2, func2Properties.Length); - Assert.Equal("arg2", func2Properties[0].Name); - Assert.Equal("arg3", func2Properties[1].Name); - - Assert.Equal(plugin["Function1"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(func1Properties[0].Value)); - Assert.Equal(plugin["Function2"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(func2Properties[0].Value)); - Assert.Equal(plugin["Function2"].Metadata.Parameters[1].Schema?.ToString(), JsonSerializer.Serialize(func2Properties[1].Value)); + Assert.Equal("PluginName-Function1", funcs[0].Metadata.Name); + Assert.Equal("PluginName-Function2", funcs[1].Metadata.Name); + + Assert.Equal("arg1", Assert.Single(funcs[0].Metadata.Parameters).Name); + Assert.Equal(2, funcs[1].Metadata.Parameters.Count); + Assert.Equal("arg2", funcs[1].Metadata.Parameters[0].Name); + Assert.Equal("arg3", funcs[1].Metadata.Parameters[1].Name); + + Assert.NotNull(funcs[0].Metadata.Parameters[0].Schema); + Assert.NotNull(funcs[1].Metadata.Parameters[0].Schema); + Assert.NotNull(funcs[1].Metadata.Parameters[1].Schema); + + Assert.Equal(plugin["Function1"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(funcs[0].Metadata.Parameters[0].Schema)); + Assert.Equal(plugin["Function2"].Metadata.Parameters[0].Schema?.ToString(), JsonSerializer.Serialize(funcs[1].Metadata.Parameters[0].Schema)); + Assert.Equal(plugin["Function2"].Metadata.Parameters[1].Schema?.ToString(), JsonSerializer.Serialize(funcs[1].Metadata.Parameters[1].Schema)); using CancellationTokenSource cts = new(); diff --git a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs index c081d35bc573..e3ad0cd53a5c 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs @@ -12,7 +12,7 @@ public sealed class ChatPromptParserTests [Theory] [InlineData("This is plain prompt")] [InlineData("")] - [InlineData("This is an invalid chat prompt")] + [InlineData("This is invalidchat prompt")] public void ItReturnsNullChatHistoryWhenPromptIsPlainTextOrInvalid(string prompt) { // Act @@ -148,86 +148,6 @@ public void ItReturnsChatHistoryWithValidDataImageContent() }); } - [Fact] - public void ItReturnsChatHistoryWithMultipleTextParts() - { - // Arrange - string prompt = GetValidPromptWithMultipleTextParts(); - - // Act - bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); - - // Assert - Assert.True(result); - Assert.NotNull(chatHistory); - - Assert.Collection(chatHistory, - c => Assert.Equal("What can I help with?", c.Content), - c => - { - Assert.Equal("Hello", c.Content); - Assert.Collection(c.Items, - o => - { - Assert.IsType(o); - Assert.Equal("Hello", ((TextContent)o).Text); - }, o => - { - Assert.IsType(o); - Assert.Equal("I am user", ((TextContent)o).Text); - }); - }); - } - - [Fact] - public void ItReturnsChatHistoryWithMixedXmlContent() - { - // Arrange - string prompt = GetValidPromptWithMixedXmlContent(); - - // Act - bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); - - // Assert - Assert.True(result); - Assert.NotNull(chatHistory); - - Assert.Collection(chatHistory, - c => Assert.Equal("What can I help with?", c.Content), - c => - { - Assert.Equal("Hi how are you?", c.Content); - Assert.Single(c.Items); - }); - } - - [Fact] - public void ItReturnsChatHistoryWithEmptyContent() - { - // Arrange - string prompt = GetValidPromptWithEmptyContent(); - - // Act - bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); - - // Assert - Assert.True(result); - Assert.NotNull(chatHistory); - - Assert.Collection(chatHistory, - c => Assert.Equal("What can I help with?", c.Content), - c => - { - Assert.Null(c.Content); - Assert.Empty(c.Items); - }, - c => - { - Assert.Null(c.Content); - Assert.Empty(c.Items); - }); - } - [Fact] public void ItReturnsChatHistoryWithValidContentItemsIncludeCode() { @@ -339,50 +259,6 @@ private static string GetValidPromptWithDataUriImageContent() """; } - private static string GetValidPromptWithMultipleTextParts() - { - return - """ - - What can I help with? - - - Hello - I am user - - - """; - } - - private static string GetValidPromptWithMixedXmlContent() - { - return - """ - - What can I help with? - - - This part will be discarded upon parsing - Hi how are you? - This part will also be discarded upon parsing - - - """; - } - - private static string GetValidPromptWithEmptyContent() - { - return - """ - - What can I help with? - - - - - """; - } - private static string GetValidPromptWithCDataSection() { return diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index e3452f799be6..1e95741e153e 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -25,6 +25,7 @@ + diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs index fa488e6e7146..6f0b40f8e82d 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs @@ -11,7 +11,6 @@ using Microsoft.SemanticKernel.ChatCompletion; #pragma warning disable IDE0005 // Using directive is unnecessary using Microsoft.SemanticKernel.Connectors.FunctionCalling; - #pragma warning restore IDE0005 // Using directive is unnecessary using Moq; using Xunit; @@ -22,7 +21,6 @@ public class FunctionCallsProcessorTests { private readonly FunctionCallsProcessor _sut = new(); private readonly FunctionChoiceBehaviorOptions _functionChoiceBehaviorOptions = new(); - private readonly PromptExecutionSettings _promptExecutionSettings = new(); [Fact] public void ItShouldReturnNoConfigurationIfNoBehaviorProvided() @@ -96,7 +94,6 @@ async Task ProcessFunctionCallsRecursivelyToReachInflightLimitAsync() await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: [], requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -126,7 +123,6 @@ public async Task ItShouldAddFunctionCallAssistantMessageToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -155,7 +151,6 @@ public async Task ItShouldAddFunctionCallExceptionToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -189,7 +184,6 @@ public async Task ItShouldAddFunctionInvocationExceptionToChatHistoryAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -218,7 +212,6 @@ public async Task ItShouldAddErrorToChatHistoryIfFunctionCallNotAdvertisedAsync( // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => false, // Return false to simulate that the function is not advertised @@ -247,7 +240,6 @@ public async Task ItShouldAddErrorToChatHistoryIfFunctionIsNotRegisteredOnKernel // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -289,7 +281,6 @@ public async Task ItShouldInvokeFunctionsAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -356,7 +347,6 @@ public async Task ItShouldInvokeFiltersAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -446,7 +436,6 @@ public async Task ItShouldInvokeMultipleFiltersInOrderAsync(bool invokeConcurren // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -495,7 +484,6 @@ public async Task FilterCanOverrideArgumentsAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -548,7 +536,6 @@ public async Task FilterCanHandleExceptionAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -601,7 +588,6 @@ public async Task FiltersCanSkipFunctionExecutionAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -648,7 +634,6 @@ public async Task PreFilterCanTerminateOperationAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -693,7 +678,6 @@ public async Task PostFilterCanTerminateOperationAsync(bool invokeConcurrently) // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -748,7 +732,6 @@ public async Task ItShouldHandleChatMessageContentAsFunctionResultAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -784,7 +767,6 @@ public async Task ItShouldSerializeFunctionResultOfUnknowTypeAsync() // Act await this._sut.ProcessFunctionCallsAsync( chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, chatHistory: chatHistory, requestIndex: 0, checkIfFunctionAdvertised: (_) => true, @@ -842,53 +824,6 @@ public void ItShouldSerializeFunctionResultsOfComplexType() Assert.Equal("{\"a\":2,\"b\":\"test\"}", result); } - [Fact] - public void ItShouldSerializeFunctionResultsWithStringProperties() - { - // Arrange - var functionResult = new { Text = "テスト" }; - - // Act - var result = FunctionCallsProcessor.ProcessFunctionResult(functionResult); - - // Assert - Assert.Equal("{\"Text\":\"テスト\"}", result); - } - - [Fact] - public async Task ItShouldPassPromptExecutionSettingsToAutoFunctionInvocationFilterAsync() - { - // Arrange - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]); - - AutoFunctionInvocationContext? actualContext = null; - - Kernel kernel = CreateKernel(plugin, (context, next) => - { - actualContext = context; - return Task.CompletedTask; - }); - - var chatMessageContent = new ChatMessageContent(); - chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); - - // Act - await this._sut.ProcessFunctionCallsAsync( - chatMessageContent: chatMessageContent, - executionSettings: this._promptExecutionSettings, - chatHistory: new ChatHistory(), - requestIndex: 0, - checkIfFunctionAdvertised: (_) => true, - options: this._functionChoiceBehaviorOptions, - kernel: kernel!, - isStreaming: false, - cancellationToken: CancellationToken.None); - - // Assert - Assert.NotNull(actualContext); - Assert.Same(this._promptExecutionSettings, actualContext!.ExecutionSettings); - } - private sealed class AutoFunctionInvocationFilter( Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter { diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs deleted file mode 100644 index 26975a6bfb29..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/ActivityExtensionsTests.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Utilities; - -/// -/// Unit tests for activity extensions. -/// -public sealed class ActivityExtensionsTests -{ - [Fact] - public async Task RunWithActivityByDefaultReturnsExpectedResultsAsync() - { - // Arrange - var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); - - // Act - var results = await ActivityExtensions.RunWithActivityAsync( - () => activityMock.Object, - () => new[] { 1, 2, 3 }.ToAsyncEnumerable(), CancellationToken.None).ToListAsync(); - - // Assert - Assert.Equal(new[] { 1, 2, 3 }, results); - } - - [Fact] - public async Task RunWithActivityWhenOperationThrowsExceptionActivitySetsErrorAndThrowsAsync() - { - // Arrange - var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); - - // Act & Assert - var ex = await Assert.ThrowsAsync(async () => - await ActivityExtensions.RunWithActivityAsync( - () => activityMock.Object, - () => throw new InvalidOperationException("Test exception"), - CancellationToken.None).ToListAsync()); - - Assert.Equal("Test exception", ex.Message); - Assert.Equal(ActivityStatusCode.Error, activityMock.Object.Status); - - var errorTag = activityMock.Object.Tags.FirstOrDefault(l => l.Key == "error.type"); - - Assert.Contains(nameof(InvalidOperationException), errorTag.Value); - } - - [Fact] - public async Task RunWithActivityWhenEnumerationThrowsExceptionActivitySetsErrorAndThrows() - { - // Arrange - var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); - - async static IAsyncEnumerable Operation() - { - yield return 1; - await Task.Yield(); - throw new InvalidOperationException("Enumeration error"); - } - - // Act & Assert - var ex = await Assert.ThrowsAsync(async () => - await ActivityExtensions.RunWithActivityAsync( - () => activityMock.Object, - Operation, - CancellationToken.None).ToListAsync()); - - Assert.Equal("Enumeration error", ex.Message); - Assert.Equal(ActivityStatusCode.Error, activityMock.Object.Status); - - var errorTag = activityMock.Object.Tags.FirstOrDefault(l => l.Key == "error.type"); - - Assert.Contains(nameof(InvalidOperationException), errorTag.Value); - } - - [Fact] - public async Task RunWithActivityWhenCancellationRequestedThrowsTaskCanceledException() - { - // Arrange - using var cts = new CancellationTokenSource(); - cts.Cancel(); - - var activityMock = new Mock(MockBehavior.Loose, "ActivityName"); - - async static IAsyncEnumerable Operation([EnumeratorCancellation] CancellationToken token) - { - await Task.Delay(10, token); - yield return 1; - } - - // Act & Assert - var ex = await Assert.ThrowsAsync(async () => - await ActivityExtensions.RunWithActivityAsync( - () => activityMock.Object, - () => Operation(cts.Token), - cts.Token).ToListAsync()); - } -} diff --git a/python/.coveragerc b/python/.coveragerc index b51952a0c8e8..dc37f315b86e 100644 --- a/python/.coveragerc +++ b/python/.coveragerc @@ -7,7 +7,7 @@ omit = semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/* semantic_kernel/connectors/memory/chroma/* semantic_kernel/connectors/memory/milvus/* - semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py + semantic_kernel/connectors/memory/mongodb_atlas/* semantic_kernel/connectors/memory/pinecone/* semantic_kernel/connectors/memory/postgres/* semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py diff --git a/python/.cspell.json b/python/.cspell.json index 485789ae22a1..ea24ad2d7ce4 100644 --- a/python/.cspell.json +++ b/python/.cspell.json @@ -25,14 +25,12 @@ "words": [ "aeiou", "aiplatform", - "autogen", "azuredocindex", "azuredocs", "boto", "contentvector", "contoso", "datamodel", - "desync", "dotenv", "endregion", "entra", @@ -47,12 +45,10 @@ "logprobs", "mistralai", "mongocluster", - "nd", "ndarray", "nopep", "NOSQL", "ollama", - "Onnx", "onyourdatatest", "OPENAI", "opentelemetry", @@ -68,10 +64,18 @@ "templating", "uninstrument", "vectordb", - "vectorizable", "vectorizer", "vectorstoremodel", "vertexai", - "Weaviate" + "Weaviate", + "qdrant", + "huggingface", + "pytestmark", + "contoso", + "opentelemetry", + "SEMANTICKERNEL", + "OTEL", + "vectorizable", + "desync" ] } \ No newline at end of file diff --git a/python/.env.example b/python/.env.example index 7d9a407dc877..8e46ec2bb6de 100644 --- a/python/.env.example +++ b/python/.env.example @@ -34,6 +34,4 @@ BOOKING_SAMPLE_CLIENT_ID="" BOOKING_SAMPLE_TENANT_ID="" BOOKING_SAMPLE_CLIENT_SECRET="" BOOKING_SAMPLE_BUSINESS_ID="" -BOOKING_SAMPLE_SERVICE_ID="" -CREW_AI_ENDPOINT="" -CREW_AI_TOKEN="" \ No newline at end of file +BOOKING_SAMPLE_SERVICE_ID="" \ No newline at end of file diff --git a/python/.pre-commit-config.yaml b/python/.pre-commit-config.yaml index 11beea8e1877..cd935fb59473 100644 --- a/python/.pre-commit-config.yaml +++ b/python/.pre-commit-config.yaml @@ -32,14 +32,14 @@ repos: - id: pyupgrade args: [--py310-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.6 + rev: v0.7.4 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] - id: ruff-format - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.5.30 + rev: 0.5.21 hooks: # Update the uv lockfile - id: uv-lock diff --git a/python/.vscode/launch.json b/python/.vscode/launch.json index 80145e18a817..831aaf5149bc 100644 --- a/python/.vscode/launch.json +++ b/python/.vscode/launch.json @@ -10,7 +10,7 @@ "request": "launch", "program": "${file}", "console": "integratedTerminal", - "justMyCode": false + "justMyCode": true }, { "name": "Python FastAPI app with Dapr", diff --git a/python/Makefile b/python/Makefile index b515e0c5cae1..745bf2623921 100644 --- a/python/Makefile +++ b/python/Makefile @@ -32,7 +32,7 @@ help: echo "" echo -e "\033[1mVARIABLES:\033[0m" echo " PYTHON_VERSION - Python version to use. Default is 3.10" - echo " By default, 3.10, 3.11, 3.12 and 3.13 are installed as well." + echo " By default, 3.10, 3.11, and 3.12 are installed as well." ############################## # INSTALL @@ -57,13 +57,7 @@ else ifeq ($(CONTINUE),1) else echo "uv could not be found." echo "Installing uv..." - if [ -n "$$VIRTUAL_ENV" ]; then \ - echo "Detected virtual environment at $$VIRTUAL_ENV, installing uv there..."; \ - curl -LsSf https://astral.sh/uv/install.sh | INSTALL_DIR="$$VIRTUAL_ENV/bin" sh; \ - else \ - echo "No virtual environment detected, installing uv globally..."; \ - curl -LsSf https://astral.sh/uv/install.sh | sh; \ - fi + curl -LsSf https://astral.sh/uv/install.sh | sh echo "uv installed." echo "Re-executing shell so uv is immediately available on PATH..." exec $$SHELL -c 'make install CONTINUE=1' @@ -74,8 +68,8 @@ endif ############################## .ONESHELL: install-python: - echo "Installing python versions" - uv python install 3.10 3.11 3.12 3.13 + echo "Installing python 3.10, 3.11, 3.12" + uv python install 3.10 3.11 3.12 ############################## # INSTALL-PRE-COMMIT @@ -93,7 +87,7 @@ install-sk: echo "Creating and activating venv for python $(PYTHON_VERSION)" uv venv --python $(PYTHON_VERSION) echo "Installing Semantic Kernel and all dependencies" - uv sync --all-extras --dev --prerelease=if-necessary-or-explicit + uv sync --all-extras --dev ############################## # CLEAN diff --git a/python/README.md b/python/README.md index 166d4ed38777..3e010df1102b 100644 --- a/python/README.md +++ b/python/README.md @@ -37,9 +37,7 @@ AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" ``` -Put the .env file in the root directory. - -If you place the .env in a different directory, configure the Text/ChatCompletion class with the keyword argument `env_file_path`: +You will then configure the Text/ChatCompletion class with the keyword argument `env_file_path`: ```python chat_completion = OpenAIChatCompletion(service_id="test", env_file_path=) diff --git a/python/pyproject.toml b/python/pyproject.toml index 4aa5d8114f31..0dc38b0b57f9 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -34,7 +34,7 @@ dependencies = [ "numpy >= 1.25.0; python_version < '3.12'", "numpy >= 1.26.0; python_version >= '3.12'", # openai connector - "openai ~= 1.61", + "openai ~= 1.0", # openapi and swagger "openapi_core >= 0.18,<0.20", # OpenTelemetry @@ -45,17 +45,12 @@ dependencies = [ "pybars4 ~= 0.9", "jinja2 ~= 3.1", "nest-asyncio ~= 1.6", - "scipy>=1.15.1", ] ### Optional dependencies [project.optional-dependencies] -autogen = [ - "autogen-agentchat >= 0.2, <0.4" -] azure = [ "azure-ai-inference >= 1.0.0b6", - "azure-ai-projects >= 1.0.0b5", "azure-core-tracing-opentelemetry >= 1.0.0b11", "azure-search-documents >= 11.6.0b4", "azure-identity ~= 1.13", @@ -65,17 +60,17 @@ chroma = [ "chromadb >= 0.5,< 0.7" ] google = [ - "google-cloud-aiplatform == 1.82.0", - "google-generativeai ~= 0.8" + "google-cloud-aiplatform ~= 1.60", + "google-generativeai ~= 0.7" ] hugging_face = [ "transformers[torch] ~= 4.28", "sentence-transformers >= 2.2,< 4.0", - "torch == 2.6.0" + "torch == 2.5.1" ] mongo = [ - "pymongo >= 4.8.0, < 4.12", - "motor >= 3.3.2,< 3.8.0" + "pymongo >= 4.8.0, < 4.11", + "motor >= 3.3.2,< 3.7.0" ] notebooks = [ "ipykernel ~= 6.29" @@ -91,7 +86,7 @@ ollama = [ "ollama ~= 0.4" ] onnx = [ - "onnxruntime-genai ~= 0.5; python_version < '3.13'" + "onnxruntime-genai ~= 0.5" ] anthropic = [ "anthropic ~= 0.32" @@ -111,27 +106,23 @@ redis = [ "redisvl >= 0.3.6", ] usearch = [ - "usearch ~= 2.16", + "usearch ~= 2.9", "pyarrow >= 12.0,< 20.0" ] weaviate = [ - "weaviate-client>=4.10,<5.0", + "weaviate-client>=4.7,<5.0", ] pandas = [ "pandas ~= 2.2" ] aws = [ - "boto3>=1.36.4,<1.38.0", + "boto3>=1.28.57", ] dapr = [ "dapr>=1.14.0", "dapr-ext-fastapi>=1.14.0", "flask-dapr>=1.14.0" ] -realtime = [ - "websockets >= 13, < 15", - "aiortc>=1.9.0", -] [tool.uv] prerelease = "if-necessary-or-explicit" @@ -147,7 +138,7 @@ dev-dependencies = [ "snoop ~= 0.4", "mypy >= 1.10", "types-PyYAML ~= 6.0.12.20240311", - "ruff ~= 0.9" + "ruff ~= 0.7", ] environments = [ "sys_platform == 'darwin'", @@ -229,3 +220,5 @@ name = "semantic_kernel" [build-system] requires = ["flit-core >= 3.9,<4.0"] build-backend = "flit_core.buildapi" + + diff --git a/python/samples/README.md b/python/samples/README.md index b7ed7617e1ed..3062daa353f7 100644 --- a/python/samples/README.md +++ b/python/samples/README.md @@ -1,10 +1,9 @@ -# Semantic Kernel Samples +## Semantic Kernel Samples | Type | Description | | ------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------- | | [`getting_started`](./getting_started/CONFIGURING_THE_KERNEL.md) | Take this step by step tutorial to get started with Semantic Kernel and get introduced to the key concepts. | | [`getting_started_with_agents`](./getting_started_with_agents/README.md) | Take this step by step tutorial to get started with Semantic Kernel Agents and get introduced to the key concepts. | -| [`getting_started_with_processes`](./getting_started_with_processes/README.md) | Take this step by step tutorial to get started with Semantic Kernel Processes and get introduced to the key concepts. | | [`concepts`](./concepts/README.md) | This section contains focused samples which illustrate all of the concepts included in Semantic Kernel. | | [`demos`](./demos/README.md) | Look here to find a sample which demonstrate how to use many of Semantic Kernel features. | | [`learn_resources`](./learn_resources/README.md) | Code snippets that are related to online documentation sources like Microsoft Learn, DevBlogs and others | diff --git a/python/samples/SAMPLE_GUIDELINES.md b/python/samples/SAMPLE_GUIDELINES.md deleted file mode 100644 index 46815a464115..000000000000 --- a/python/samples/SAMPLE_GUIDELINES.md +++ /dev/null @@ -1,81 +0,0 @@ -# Sample Guidelines - -Samples are extremely important for developers to get started with Semantic Kernel. We strive to provide a wide range of samples that demonstrate the capabilities of Semantic Kernel with consistency and quality. This document outlines the guidelines for creating samples. - -## General Guidelines - -- **Clear and Concise**: Samples should be clear and concise. They should demonstrate a specific set of features or capabilities of Semantic Kernel. The less concepts a sample demonstrates, the better. -- **Consistent Structure**: All samples should have a consistent structure. This includes the folder structure, file naming, and the content of the sample. -- **Incremental Complexity**: Samples should start simple and gradually increase in complexity. This helps developers understand the concepts and features of Semantic Kernel. -- **Documentation**: Samples should be over-documented. - -### **Clear and Concise** - -Try not to include too many concepts in a single sample. The goal is to demonstrate a specific feature or capability of Semantic Kernel. If you find yourself including too many concepts, consider breaking the sample into multiple samples. A good example of this is to break non-streaming and streaming modes into separate samples. - -### **Consistent Structure** - -#### Getting Started Samples - -The getting started samples are the simplest samples that require minimal setup. These samples should be named in the following format: `step_.py`. One exception to this rule is when the sample is a notebook, in which case the sample should be named in the following format: `_.ipynb`. - -#### Concept Samples - -Concept samples under [./concepts](./concepts) should be grouped by feature or capability. These samples should be relatively short and demonstrate a specific concept. These samples are more advanced than the getting started samples. - -#### Demos - -Demos under [./demos](./demos) are full console applications that demonstrate a specific set of features or capabilities of Semantic Kernel, potentially with external dependencies. Each of the demos should have a README.md file that explains the purpose of the demo and how to run it. - -### **Incremental Complexity** - -Try to do a best effort to make sure that the samples are incremental in complexity. For example, in the getting started samples, each step should build on the previous step, and the concept samples should build on the getting started samples, same with the demos. - -### **Documentation** - -Try to over-document the samples. This includes comments in the code, README.md files, and any other documentation that is necessary to understand the sample. We use the guidance from [PEP8](https://peps.python.org/pep-0008/#comments) for comments in the code, with a deviation for the initial summary comment in samples and the output of the samples. - -For the getting started samples and the concept samples, we should have the following: - -1. A README.md file is included in each set of samples that explains the purpose of the samples and the setup required to run them. -2. A summary should be included at the top of the file that explains the purpose of the sample and required components/concepts to understand the sample. For example: - - ```python - ''' - This sample shows how to create a chatbot. This sample uses the following two main components: - - a ChatCompletionService: This component is responsible for generating responses to user messages. - - a ChatHistory: This component is responsible for keeping track of the chat history. - The chatbot in this sample is called Mosscap, who responds to user messages with long flowery prose. - ''' - ``` - -3. Mark the code with comments to explain the purpose of each section of the code. For example: - - ```python - # 1. Create the instance of the Kernel to register the plugin and service. - ... - - # 2. Create the agent with the kernel instance. - ... - ``` - - > This will also allow the sample creator to track if the sample is getting too complex. - -4. At the end of the sample, include a section that explains the expected output of the sample. For example: - - ```python - ''' - Sample output: - User:> Why is the sky blue in one sentence? - Mosscap:> The sky is blue due to the scattering of sunlight by the molecules in the Earth's atmosphere, - a phenomenon known as Rayleigh scattering, which causes shorter blue wavelengths to become more - prominent in our visual perception. - ''' - ``` - -For the demos, a README.md file must be included that explains the purpose of the demo and how to run it. The README.md file should include the following: - -- A description of the demo. -- A list of dependencies required to run the demo. -- Instructions on how to run the demo. -- Expected output of the demo. diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index 72fe6258f876..22f0496e43e6 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -4,54 +4,18 @@ ### Agents - Creating and using [agents](../../semantic_kernel/agents/) in Semantic Kernel -#### [OpenAI Assistant Agent](../../semantic_kernel/agents/open_ai/open_ai_assistant_agent.py) - -- [OpenAI Assistant Chart Maker Streaming](./agents/openai_assistant/openai_assistant_chart_maker_streaming.py) -- [OpenAI Assistant Chart Maker](./agents/openai_assistant/openai_assistant_chart_maker.py) -- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) -- [OpenAI Assistant File Manipulation](./agents/openai_assistant/openai_assistant_file_manipulation.py) -- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) -- [OpenAI Assistant Retrieval](./agents/openai_assistant/openai_assistant_retrieval.py) -- [OpenAI Assistant Streaming](./agents/openai_assistant/openai_assistant_streaming.py) -- [OpenAI Assistant Structured Outputs](./agents/openai_assistant/openai_assistant_structured_outputs.py) -- [OpenAI Assistant Templating Streaming](./agents/openai_assistant/openai_assistant_templating_streaming.py) -- [OpenAI Assistant Vision Streaming](./agents/openai_assistant/openai_assistant_vision_streaming.py) - -#### [Azure AI Agent](../../semantic_kernel/agents/azure_ai/azure_ai_agent.py) - -- [Azure AI Agent with Azure AI Search](./agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py) -- [Azure AI Agent File Manipulation](./agents/azure_ai_agent/azure_ai_agent_file_manipulation.py) -- [Azure AI Agent Streaming](./agents/azure_ai_agent/azure_ai_agent_streaming.py) - -#### [Bedrock Agent](../../semantic_kernel/agents/bedrock/bedrock_agent.py) - -- [Bedrock Agent Simple Chat Streaming](./agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py) -- [Bedrock Agent Simple Chat](./agents/bedrock_agent/bedrock_agent_simple_chat.py) -- [Bedrock Agent With Code Interpreter Streaming](./agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py) -- [Bedrock Agent With Code Interpreter](./agents/bedrock_agent/bedrock_agent_with_code_interpreter.py) -- [Bedrock Agent With Kernel Function Simple](./agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py) -- [Bedrock Agent With Kernel Function Streaming](./agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py) -- [Bedrock Agent With Kernel Function](./agents/bedrock_agent/bedrock_agent_with_kernel_function.py) -- [Bedrock Agent Mixed Chat Agents Streaming](./agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py) -- [Bedrock Agent Mixed Chat Agents](./agents/bedrock_agent/bedrock_mixed_chat_agents.py) - -#### [Chat Completion Agent](../../semantic_kernel/agents/chat_completion/chat_completion_agent.py) - -- [Chat Completion Function Termination](./agents/chat_completion_agent/chat_completion_function_termination.py) -- [Chat Completion Templating](./agents/chat_completion_agent/chat_completion_prompt_templating.py) -- [Chat Completion Summary History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py) -- [Chat Completion Summary History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py) -- [Chat Completion Truncate History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py) -- [Chat Completion Truncate History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py) - -#### [Mixed Agent Group Chat](../../semantic_kernel/agents/group_chat/agent_group_chat.py) - -- [Mixed Chat Agents Plugins](./agents/mixed_chat/mixed_chat_agents_plugins.py) -- [Mixed Chat Agents](./agents/mixed_chat/mixed_chat_agents.py) -- [Mixed Chat Files](./agents/mixed_chat/mixed_chat_files.py) -- [Mixed Chat Images](./agents/mixed_chat/mixed_chat_images.py) -- [Mixed Chat Reset](./agents/mixed_chat/mixed_chat_reset.py) -- [Mixed Chat Streaming](./agents/mixed_chat/mixed_chat_streaming.py) +- [Assistant Agent Chart Maker](./agents/assistant_agent_chart_maker.py) +- [Assistant Agent File Manipulation](./agents/assistant_agent_file_manipulation.py) +- [Assistant Agent File Manipulation Streaming](./agents/assistant_agent_file_manipulation_streaming.py) +- [Assistant Agent Retrieval](./agents/assistant_agent_retrieval.py) +- [Assistant Agent Streaming](./agents/assistant_agent_streaming.py) +- [Chat Completion Function Termination](./agents/chat_completion_function_termination.py) +- [Chat Completion History Reducer](./agents/chat_completion_history_reducer.py) +- [Mixed Chat Agents](./agents/mixed_chat_agents.py) +- [Mixed Chat Agents Plugins](./agents/mixed_chat_agents_plugins.py) +- [Mixed Chat Files](./agents/mixed_chat_files.py) +- [Mixed Chat Reset](./agents/mixed_chat_reset.py) +- [Mixed Chat Streaming](./agents/mixed_chat_streaming.py) ### Audio - Using services that support audio-to-text and text-to-audio conversion @@ -89,7 +53,6 @@ ### ChatHistory - Using and serializing the [`ChatHistory`](https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/contents/chat_history.py) - [Serialize Chat History](./chat_history/serialize_chat_history.py) -- [Store Chat History in CosmosDB](./chat_history/store_chat_history_in_cosmosdb.py) ### Filtering - Creating and using Filters @@ -120,13 +83,12 @@ - [Setup Logging](./logging/setup_logging.py) -### Memory - Using [`Memory`](https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors/?pivots=programming-language-python) AI concepts +### Memory - Using [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) AI concepts -- [Simple Memory](./memory/simple_memory.py) +- [Azure Cognitive Search Memory](./memory/azure_cognitive_search_memory.py) - [Memory Data Models](./memory/data_models.py) -- [Memory with Pandas Dataframes](./memory/memory_with_pandas.py) -- [Complex memory](./memory/complex_memory.py) -- [Full sample with Azure AI Search including function calling](./memory/azure_ai_search_hotel_samples/README.md) +- [New Memory](./memory/new_memory.py) +- [Pandas Memory](./memory/pandas_memory.py) ### Model-as-a-Service - Using models deployed as [`serverless APIs on Azure AI Studio`](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models-serverless?tabs=azure-ai-studio) to benchmark model performance against open-source datasets @@ -138,6 +100,12 @@ - [Azure Chat GPT with Data API Function Calling](./on_your_data/azure_chat_gpt_with_data_api_function_calling.py) - [Azure Chat GPT with Data API Vector Search](./on_your_data/azure_chat_gpt_with_data_api_vector_search.py) +### Planners - Showing the uses of [`Planners`](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/planners) + +- [Sequential Planner](./planners/sequential_planner.py) +- [OpenAI Function Calling Stepwise Planner](./planners/openai_function_calling_stepwise_planner.py) +- [Azure OpenAI Function Calling Stepwise Planner](./planners/azure_openai_function_calling_stepwise_planner.py) + ### Plugins - Different ways of creating and using [`Plugins`](https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/functions/kernel_plugin.py) - [Azure Key Vault Settings](./plugins/azure_key_vault_settings.py) @@ -202,7 +170,7 @@ In Semantic Kernel for Python, we leverage Pydantic Settings to manage configura 1. **Reading Environment Variables:** - **Primary Source:** Pydantic first attempts to read the required settings from environment variables. - + 2. **Using a .env File:** - **Fallback Source:** If the required environment variables are not set, Pydantic will look for a `.env` file in the current working directory. - **Custom Path (Optional):** You can specify an alternative path for the `.env` file via `env_file_path`. This can be either a relative or an absolute path. @@ -220,4 +188,4 @@ To successfully retrieve and use the Entra Auth Token, you need the `Cognitive S - **.env File Placement:** We highly recommend placing the `.env` file in the `semantic-kernel/python` root directory. This is a common practice when developing in the Semantic Kernel repository. -By following these guidelines, you can ensure that your settings for various components are configured correctly, enabling seamless functionality and integration of Semantic Kernel in your Python projects. +By following these guidelines, you can ensure that your settings for various components are configured correctly, enabling seamless functionality and integration of Semantic Kernel in your Python projects. \ No newline at end of file diff --git a/python/samples/concepts/agents/README.md b/python/samples/concepts/agents/README.md index dad64006c78e..1260395f88f2 100644 --- a/python/samples/concepts/agents/README.md +++ b/python/samples/concepts/agents/README.md @@ -2,17 +2,13 @@ This project contains a step by step guide to get started with _Semantic Kernel Agents_ in Python. -## PyPI: - +#### PyPI: - For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. - For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. - For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. -- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0. -- For the use of AzureAI and Bedrock agents, the minimum allowed Semantic Kernel pypi version is 1.21.0. -- For the use of Crew.AI as a plugin, the minimum allowed Semantic Kernel pypi version is 1.21.1. - +- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0 -## Source +#### Source - [Semantic Kernel Agent Framework](../../../semantic_kernel/agents/) @@ -23,12 +19,11 @@ The concept agents examples are grouped by prefix: Prefix|Description ---|--- assistant|How to use agents based on the [Open AI Assistant API](https://platform.openai.com/docs/assistants). -autogen_conversable_agent| How to use [AutoGen 0.2 Conversable Agents](https://microsoft.github.io/autogen/0.2/docs/Getting-Started) within Semantic Kernel. -azure_ai_agent|How to use an [Azure AI Agent](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure) within Semantic Kernel. -chat_completion_agent|How to use Semantic Kernel Chat Completion agents that leverage AI Connector Chat Completion APIs. -bedrock|How to use [AWS Bedrock agents](https://aws.amazon.com/bedrock/agents/) in Semantic Kernel. +chat_completion|How to use Semantic Kernel Chat Completion agents. mixed_chat|How to combine different agent types. -openai_assistant|How to use [OpenAI Assistants](https://platform.openai.com/docs/assistants/overview) in Semantic Kernel. +complex_chat|**Coming Soon** + +*Note: As we strive for parity with .NET, more getting_started_with_agent samples will be added. The current steps and names may be revised to further align with our .NET counterpart.* ## Configuring the Kernel diff --git a/python/samples/concepts/agents/assistant_agent_chart_maker.py b/python/samples/concepts/agents/assistant_agent_chart_maker.py new file mode 100644 index 000000000000..34cfd77f40c0 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_chart_maker.py @@ -0,0 +1,112 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant and leverage the assistant's file search functionality. # +##################################################################### + +AGENT_NAME = "ChartMaker" +AGENT_INSTRUCTIONS = "Create charts as requested without explanation." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + +streaming = True + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + if streaming: + first_chunk = True + async for message in agent.invoke_stream(thread_id=thread_id): + if message.content: + if first_chunk: + print(f"# {message.role}: ", end="", flush=True) + first_chunk = False + print(message.content, end="", flush=True) + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, StreamingFileReferenceContent): + print(f"\n# {message.role} => {item.file_id}") + print() + else: + async for message in agent.invoke(thread_id=thread_id): + if message.content: + print(f"# {message.role}: {message.content}") + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, FileReferenceContent): + print(f"\n`{message.role}` => {item.file_id}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Create the agent configuration + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + + # Define a thread and invoke the agent with the user input + thread_id = await agent.create_thread() + + try: + await invoke_agent( + agent, + thread_id=thread_id, + input=""" + Display this data using a bar-chart: + + Banding Brown Pink Yellow Sum + X00000 339 433 126 898 + X00300 48 421 222 691 + X12345 16 395 352 763 + Others 23 373 156 552 + Sum 426 1622 856 2904 + """, + ) + await invoke_agent( + agent, + thread_id=thread_id, + input="Can you regenerate this same chart using the category names as the bar colors?", + ) + finally: + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_file_manipulation.py b/python/samples/concepts/agents/assistant_agent_file_manipulation.py new file mode 100644 index 000000000000..ff13f38a5504 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_file_manipulation.py @@ -0,0 +1,85 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant's ability to have the code interpreter work with # +# uploaded files. # +##################################################################### + +AGENT_NAME = "FileManipulation" +AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(thread_id=thread_id): + print(f"# {content.role}: {content.content}") + + if len(content.items) > 0: + for item in content.items: + if isinstance(item, AnnotationContent): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await agent.client.files.content(item.file_id) + print(response_content.text) + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Get the path to the sales.csv file + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + # Create the assistant agent + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + code_interpreter_filenames=[csv_file_path], + ) + + # Create a thread and specify the file to use for code interpretation + thread_id = await agent.create_thread() + + try: + await invoke_agent(agent, thread_id=thread_id, input="Which segment had the most sales?") + await invoke_agent(agent, thread_id=thread_id, input="List the top 5 countries that generated the most profit.") + await invoke_agent( + agent, + thread_id=thread_id, + input="Create a tab delimited file report of profit by each country per month.", + ) + finally: + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py b/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py new file mode 100644 index 000000000000..8d9df1e458be --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant's ability to stream the response and have the code # +# interpreter work with uploaded files # +##################################################################### + +AGENT_NAME = "FileManipulation" +AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." + + +# A helper method to invoke the agent with the user input +async def invoke_streaming_agent(agent: OpenAIAssistantAgent | AzureAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the streaming agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + elif content.role == AuthorRole.TOOL and content.metadata.get("code"): + print("") + print(f"# {content.role} (code):\n\n{content.content}") + print() + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Get the path to the sales.csv file + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + # Create the assistant agent + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + code_interpreter_filenames=[csv_file_path], + ) + + # Create a thread and specify the file to use for code interpretation + thread_id = await agent.create_thread() + + try: + await invoke_streaming_agent(agent, thread_id=thread_id, input="Which segment had the most sales?") + await invoke_streaming_agent( + agent, thread_id=thread_id, input="List the top 5 countries that generated the most profit." + ) + await invoke_streaming_agent( + agent, + thread_id=thread_id, + input="Create a tab delimited file report of profit by each country per month.", + ) + finally: + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_retrieval.py b/python/samples/concepts/agents/assistant_agent_retrieval.py new file mode 100644 index 000000000000..a3ea4e81b4ec --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_retrieval.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and retrieve the # +# assistant using the `retrieve` class method. # +##################################################################### + +AGENT_NAME = "JokeTeller" +AGENT_INSTRUCTIONS = "You are a funny comedian who loves telling G-rated jokes." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for message in agent.invoke(thread_id=thread_id): + if message.content: + print(f"# {message.role}: {message.content}") + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, FileReferenceContent): + print(f"\n`{message.role}` => {item.file_id}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Specify an assistant ID which is used + # to retrieve the agent + assistant_id: str = None + + # Create the agent configuration + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + + assistant_id = agent.assistant.id + + retrieved_agent: AzureAssistantAgent = await AzureAssistantAgent.retrieve( + id=assistant_id, + kernel=kernel, + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + + assistant_id = agent.assistant.id + + # Retrieve the agent using the assistant_id + retrieved_agent: OpenAIAssistantAgent = await OpenAIAssistantAgent.retrieve( + id=assistant_id, + kernel=kernel, + ) + + # Define a thread and invoke the agent with the user input + thread_id = await retrieved_agent.create_thread() + + try: + await invoke_agent(retrieved_agent, thread_id, "Tell me a joke about bears.") + finally: + await agent.delete() + await retrieved_agent.delete_thread(thread_id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_streaming.py b/python/samples/concepts/agents/assistant_agent_streaming.py new file mode 100644 index 000000000000..64439ba4e7c1 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_streaming.py @@ -0,0 +1,110 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +from typing import Annotated + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI. OpenAI Assistants # +# allow for function calling, the use of file search and a # +# code interpreter. Assistant Threads are used to manage the # +# conversation state, similar to a Semantic Kernel Chat History. # +# This sample also demonstrates the Assistants Streaming # +# capability and how to manage an Assistants chat history. # +##################################################################### + +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# A helper method to invoke the agent with the user input +async def invoke_agent( + agent: OpenAIAssistantAgent, thread_id: str, input: str, history: list[ChatMessageContent] +) -> None: + """Invoke the agent with the user input.""" + message = ChatMessageContent(role=AuthorRole.USER, content=input) + await agent.add_chat_message(thread_id=thread_id, message=message) + + # Add the user message to the history + history.append(message) + + print(f"# {AuthorRole.USER}: '{input}'") + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id, messages=history): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Add the sample plugin to the kernel + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + + # Create the OpenAI Assistant Agent + service_id = "agent" + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + + thread_id = await agent.create_thread() + + history: list[ChatMessageContent] = [] + + try: + await invoke_agent(agent, thread_id=thread_id, input="Hello", history=history) + await invoke_agent(agent, thread_id=thread_id, input="What is the special soup?", history=history) + await invoke_agent(agent, thread_id=thread_id, input="What is the special drink?", history=history) + await invoke_agent(agent, thread_id=thread_id, input="Thank you", history=history) + finally: + await agent.delete_thread(thread_id) + await agent.delete() + + # You may then view the conversation history + print("========= Conversation History =========") + for content in history: + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + print("========= End of Conversation History =========") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/README.md b/python/samples/concepts/agents/autogen_conversable_agent/README.md deleted file mode 100644 index f8fc8973ca9d..000000000000 --- a/python/samples/concepts/agents/autogen_conversable_agent/README.md +++ /dev/null @@ -1,20 +0,0 @@ -## AutoGen Conversable Agent (v0.2.X) - -Semantic Kernel Python supports running AutoGen Conversable Agents provided in the 0.2.X package. - -### Limitations - -Currently, there are some limitations to note: - -- AutoGen Conversable Agents in Semantic Kernel run asynchronously and do not support streaming of agent inputs or responses. -- The `AutoGenConversableAgent` in Semantic Kernel Python cannot be configured as part of a Semantic Kernel `AgentGroupChat`. As we progress towards GA for our agent group chat patterns, we will explore ways to integrate AutoGen agents into a Semantic Kernel group chat scenario. - -### Installation - -Install the `semantic-kernel` package with the `autogen` extra: - -```bash -pip install semantic-kernel[autogen] -``` - -For an example of how to integrate an AutoGen Conversable Agent using the Semantic Kernel Agent abstraction, please refer to [`autogen_conversable_agent_simple_convo.py`](autogen_conversable_agent_simple_convo.py). \ No newline at end of file diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py deleted file mode 100644 index d557bad86fe1..000000000000 --- a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_code_executor.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from autogen import ConversableAgent -from autogen.coding import LocalCommandLineCodeExecutor - -from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent - -""" -The following sample demonstrates how to use the AutoGenConversableAgent to create a reply from an agent -to a message with a code block. The agent executes the code block and replies with the output. - -The sample follows the AutoGen flow outlined here: -https://microsoft.github.io/autogen/0.2/docs/tutorial/code-executors#local-execution -""" - - -async def main(): - # Create a temporary directory to store the code files. - import os - - # Configure the temporary directory to be where the script is located. - temp_dir = os.path.dirname(os.path.realpath(__file__)) - - # Create a local command line code executor. - executor = LocalCommandLineCodeExecutor( - timeout=10, # Timeout for each code execution in seconds. - work_dir=temp_dir, # Use the temporary directory to store the code files. - ) - - # Create an agent with code executor configuration. - code_executor_agent = ConversableAgent( - "code_executor_agent", - llm_config=False, # Turn off LLM for this agent. - code_execution_config={"executor": executor}, # Use the local command line code executor. - human_input_mode="ALWAYS", # Always take human input for this agent for safety. - ) - - autogen_agent = AutoGenConversableAgent(conversable_agent=code_executor_agent) - - message_with_code_block = """This is a message with code block. -The code block is below: -```python -import numpy as np -import matplotlib.pyplot as plt -x = np.random.randint(0, 100, 100) -y = np.random.randint(0, 100, 100) -plt.scatter(x, y) -plt.savefig('scatter.png') -print('Scatter plot saved to scatter.png') -``` -This is the end of the message. -""" - - async for content in autogen_agent.invoke(message=message_with_code_block): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py deleted file mode 100644 index f807ff93d122..000000000000 --- a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_convo_with_tools.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os -from typing import Annotated, Literal - -from autogen import ConversableAgent, register_function - -from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent - -""" -The following sample demonstrates how to use the AutoGenConversableAgent to create a conversation between two agents -where one agent suggests a tool function call and the other agent executes the tool function call. - -In this example, the assistant agent suggests a calculator tool function call to the user proxy agent. The user proxy -agent executes the calculator tool function call. The assistant agent and the user proxy agent are created using the -ConversableAgent class. The calculator tool function is registered with the assistant agent and the user proxy agent. - -This sample follows the AutoGen flow outlined here: -https://microsoft.github.io/autogen/0.2/docs/tutorial/tool-use -""" - - -Operator = Literal["+", "-", "*", "/"] - - -async def main(): - def calculator(a: int, b: int, operator: Annotated[Operator, "operator"]) -> int: - if operator == "+": - return a + b - if operator == "-": - return a - b - if operator == "*": - return a * b - if operator == "/": - return int(a / b) - raise ValueError("Invalid operator") - - assistant = ConversableAgent( - name="Assistant", - system_message="You are a helpful AI assistant. " - "You can help with simple calculations. " - "Return 'TERMINATE' when the task is done.", - # Note: the model "gpt-4o" leads to a "division by zero" error that doesn't occur with "gpt-4o-mini" - # or even "gpt-4". - llm_config={ - "config_list": [{"model": os.environ["OPENAI_CHAT_MODEL_ID"], "api_key": os.environ["OPENAI_API_KEY"]}] - }, - ) - - # Create a Semantic Kernel AutoGenConversableAgent based on the AutoGen ConversableAgent. - assistant_agent = AutoGenConversableAgent(conversable_agent=assistant) - - user_proxy = ConversableAgent( - name="User", - llm_config=False, - is_termination_msg=lambda msg: msg.get("content") is not None and "TERMINATE" in msg["content"], - human_input_mode="NEVER", - ) - - assistant.register_for_llm(name="calculator", description="A simple calculator")(calculator) - - # Register the tool function with the user proxy agent. - user_proxy.register_for_execution(name="calculator")(calculator) - - register_function( - calculator, - caller=assistant, # The assistant agent can suggest calls to the calculator. - executor=user_proxy, # The user proxy agent can execute the calculator calls. - name="calculator", # By default, the function name is used as the tool name. - description="A simple calculator", # A description of the tool. - ) - - # Create a Semantic Kernel AutoGenConversableAgent based on the AutoGen ConversableAgent. - user_proxy_agent = AutoGenConversableAgent(conversable_agent=user_proxy) - - async for content in user_proxy_agent.invoke( - recipient=assistant_agent, - message="What is (44232 + 13312 / (232 - 32)) * 5?", - max_turns=10, - ): - for item in content.items: - match item: - case FunctionResultContent(result=r): - print(f"# {content.role} - {content.name or '*'}: '{r}'") - case FunctionCallContent(function_name=fn, arguments=arguments): - print(f"# {content.role} - {content.name or '*'}: Function Name: '{fn}', Arguments: '{arguments}'") - case _: - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py b/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py deleted file mode 100644 index d3c799135e7e..000000000000 --- a/python/samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from autogen import ConversableAgent - -from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent - -""" -The following sample demonstrates how to use the AutoGenConversableAgent to create a conversation between two agents -where one agent suggests a joke and the other agent generates a joke. - -The sample follows the AutoGen flow outlined here: -https://microsoft.github.io/autogen/0.2/docs/tutorial/introduction#roles-and-conversations -""" - - -async def main(): - cathy = ConversableAgent( - "cathy", - system_message="Your name is Cathy and you are a part of a duo of comedians.", - llm_config={ - "config_list": [ - { - "model": os.environ["OPENAI_CHAT_MODEL_ID"], - "temperature": 0.9, - "api_key": os.environ.get("OPENAI_API_KEY"), - } - ] - }, - human_input_mode="NEVER", # Never ask for human input. - ) - - cathy_autogen_agent = AutoGenConversableAgent(conversable_agent=cathy) - - joe = ConversableAgent( - "joe", - system_message="Your name is Joe and you are a part of a duo of comedians.", - llm_config={ - "config_list": [ - { - "model": os.environ["OPENAI_CHAT_MODEL_ID"], - "temperature": 0.7, - "api_key": os.environ.get("OPENAI_API_KEY"), - } - ] - }, - human_input_mode="NEVER", # Never ask for human input. - ) - - joe_autogen_agent = AutoGenConversableAgent(conversable_agent=joe) - - async for content in cathy_autogen_agent.invoke( - recipient=joe_autogen_agent, message="Tell me a joke about the stock market.", max_turns=3 - ): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/.env.example b/python/samples/concepts/agents/azure_ai_agent/.env.example deleted file mode 100644 index c2d16cea26aa..000000000000 --- a/python/samples/concepts/agents/azure_ai_agent/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" -AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" -AZURE_AI_AGENT_ENDPOINT = "" -AZURE_AI_AGENT_SUBSCRIPTION_ID = "" -AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" -AZURE_AI_AGENT_PROJECT_NAME = "" \ No newline at end of file diff --git a/python/samples/concepts/agents/azure_ai_agent/README.md b/python/samples/concepts/agents/azure_ai_agent/README.md deleted file mode 100644 index 7d588308ee6b..000000000000 --- a/python/samples/concepts/agents/azure_ai_agent/README.md +++ /dev/null @@ -1,13 +0,0 @@ -## Azure AI Agents - -For details on using Azure AI Agents within Semantic Kernel, see the [README](../../../getting_started_with_agents/azure_ai_agent/README.md) in the `getting_started_with_agents/azure_ai_agent` directory. - -### Running the `azure_ai_agent_ai_search.py` Sample - -Before running this sample, ensure you have a valid index configured in your Azure AI Search resource. This sample queries hotel data using the sample Azure AI Search hotels index. - -For configuration details, refer to the comments in the sample script. For additional guidance, consult the [README](../../memory/azure_ai_search_hotel_samples/README.md), which provides step-by-step instructions for creating the sample index and generating vectors. This is one approach to setting up the index; you can also follow other tutorials, such as those on "Import and Vectorize Data" in your Azure AI Search resource. - -### Requests and Rate Limits - -For information on configuring rate limits or adjusting polling, refer [here](../../../getting_started_with_agents/azure_ai_agent/README.md#requests-and-rate-limits) \ No newline at end of file diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py deleted file mode 100644 index 2b57f6229e41..000000000000 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from azure.ai.projects.models import AzureAISearchTool, ConnectionType -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole - -logging.basicConfig(level=logging.WARNING) - -""" -The following sample demonstrates how to create a simple, -Azure AI agent that uses the Azure AI Search tool and the demo -hotels-sample-index to answer questions about hotels. - -This sample requires: -- A "Standard" Agent Setup (choose the Python (Azure SDK) tab): -https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart -- An Azure AI Search index named 'hotels-sample-index' created in your -Azure AI Search service. You may follow this guide to create the index: -https://learn.microsoft.com/azure/search/search-get-started-portal -- You will need to make sure your Azure AI Agent project is set up with -the required Knowledge Source to be able to use the Azure AI Search tool. -Refer to the following link for information on how to do this: -https://learn.microsoft.com/en-us/azure/ai-services/agents/how-to/tools/azure-ai-search - -Refer to the README for information about configuring the index to work -with the sample data model in Azure AI Search. -""" - -# The name of the Azure AI Search index, rename as needed -AZURE_AI_SEARCH_INDEX_NAME = "hotels-sample-index" - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client( - credential=creds, - conn_str=ai_agent_settings.project_connection_string.get_secret_value(), - ) as client, - ): - conn_list = await client.connections.list() - - ai_search_conn_id = "" - for conn in conn_list: - if conn.connection_type == ConnectionType.AZURE_AI_SEARCH and conn.authentication_type == "ApiKey": - ai_search_conn_id = conn.id - break - - ai_search = AzureAISearchTool(index_connection_id=ai_search_conn_id, index_name=AZURE_AI_SEARCH_INDEX_NAME) - - # Create agent definition - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - instructions="Answer questions about hotels using your index.", - tools=ai_search.definitions, - tool_resources=ai_search.resources, - headers={"x-ms-enable-preview": "true"}, - ) - - # Create the AzureAI Agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # Create a new thread - thread = await client.agents.create_thread() - - user_inputs = [ - "Which hotels are available with full-sized kitchens in Nashville, TN?", - "Fun hotels with free WiFi.", - ] - - try: - for user_input in user_inputs: - # Add the user input as a chat message - await agent.add_chat_message( - thread_id=thread.id, - message=ChatMessageContent(role=AuthorRole.USER, content=user_input), - ) - print(f"# User: '{user_input}'\n") - # Invoke the agent for the specified thread - async for content in agent.invoke(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}\n") - finally: - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample output: - - # User: 'Which hotels are available with full-sized kitchens in Nashville, TN?' - - # Agent: In Nashville, TN, there are several hotels available that feature full-sized kitchens: - - 1. **Extended-Stay Hotel Options**: - - Many extended-stay hotels offer suites equipped with full-sized kitchens, which include cookware and - appliances. These hotels are designed for longer stays, making them a great option for those needing more space - and kitchen facilities【3:0†source】【3:1†source】. - - 2. **Amenities Included**: - - Most of these hotels provide additional amenities like free Wi-Fi, laundry services, fitness centers, and some - have on-site dining options【3:1†source】【3:2†source】. - - 3. **Location**: - - The extended-stay hotels are often located near downtown Nashville, making it convenient for guests to - explore the vibrant local music scene while enjoying the comfort of a home-like - environment【3:0†source】【3:4†source】. - - If you are looking for specific names or more detailed options, I can further assist you with that! - - # User: 'Fun hotels with free WiFi.' - - # Agent: Here are some fun hotels that offer free WiFi: - - 1. **Vibrant Downtown Hotel**: - - Located near the heart of downtown, this hotel offers a warm atmosphere with free WiFi and even provides a - delightful milk and cookies treat【7:2†source】. - - 2. **Extended-Stay Options**: - - These hotels often feature fun amenities such as a bowling alley, fitness center, and themed rooms. They also - provide free WiFi and are well-situated near local attractions【7:0†source】【7:1†source】. - - 3. **Luxury Hotel**: - - Ranked highly by Traveler magazine, this 5-star luxury hotel boasts the biggest rooms in the city, free WiFi, - espresso in the room, and flexible check-in/check-out options【7:1†source】. - - 4. **Budget-Friendly Hotels**: - - Several budget hotels offer free WiFi, breakfast, and shuttle services to nearby attractions and airports - while still providing a fun stay【7:3†source】. - - These options ensure you stay connected while enjoying your visit! If you need more specific recommendations or - details, feel free to ask! - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py deleted file mode 100644 index 58398add45b9..000000000000 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_file_manipulation.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from azure.ai.projects.models import CodeInterpreterTool, FilePurpose -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole - -################################################################### -# The following sample demonstrates how to create a simple, # -# Azure AI agent that uses the code interpreter tool to answer # -# a coding question. # -################################################################### - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client( - credential=creds, - conn_str=ai_agent_settings.project_connection_string.get_secret_value(), - ) as client, - ): - csv_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - "resources", - "agent_assistant_file_manipulation", - "sales.csv", - ) - - file = await client.agents.upload_file_and_poll(file_path=csv_file_path, purpose=FilePurpose.AGENTS) - - code_interpreter = CodeInterpreterTool(file_ids=[file.id]) - - # Create agent definition - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - tools=code_interpreter.definitions, - tool_resources=code_interpreter.resources, - ) - - # Create the AzureAI Agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # Create a new thread - thread = await client.agents.create_thread() - - user_inputs = [ - "Which segment had the most sales?", - "List the top 5 countries that generated the most profit.", - "Create a tab delimited file report of profit by each country per month.", - ] - - try: - for user_input in user_inputs: - # Add the user input as a chat message - await agent.add_chat_message( - thread_id=thread.id, - message=ChatMessageContent(role=AuthorRole.USER, content=user_input), - ) - print(f"# User: '{user_input}'") - # Invoke the agent for the specified thread - async for content in agent.invoke(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}") - if len(content.items) > 0: - for item in content.items: - if isinstance(item, AnnotationContent): - print(f"\n`{item.quote}` => {item.file_id}") - response_content = await client.agents.get_file_content(file_id=item.file_id) - content_bytes = bytearray() - async for chunk in response_content: - content_bytes.extend(chunk) - tab_delimited_text = content_bytes.decode("utf-8") - print(tab_delimited_text) - finally: - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py deleted file mode 100644 index 5a9b1bbcf0f1..000000000000 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_decorator import kernel_function - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client( - credential=creds, - conn_str=ai_agent_settings.project_connection_string.get_secret_value(), - ) as client, - ): - AGENT_NAME = "Host" - AGENT_INSTRUCTIONS = "Answer questions about the menu." - - # Create agent definition - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, - ) - - # Create the AzureAI Agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # Add the sample plugin to the kernel - agent.kernel.add_plugin(MenuPlugin(), plugin_name="menu") - - # Create a new thread - thread = await client.agents.create_thread() - - user_inputs = [ - "Hello", - "What is the special soup?", - "How much does that cost?", - "Thank you", - ] - - try: - for user_input in user_inputs: - # Add the user input as a chat message - await agent.add_chat_message( - thread_id=thread.id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) - ) - print(f"# User: '{user_input}'") - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - if first_chunk: - print(f"# {content.role}: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - print() - finally: - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/.env.example b/python/samples/concepts/agents/bedrock_agent/.env.example deleted file mode 100644 index d0e3523fcfca..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/.env.example +++ /dev/null @@ -1,2 +0,0 @@ -BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN=[YOUR_AGENT_ROLE_AMAZON_RESOURCE_NAME] -BEDROCK_AGENT_FOUNDATION_MODEL=[YOUR_FOUNDATION_MODEL] \ No newline at end of file diff --git a/python/samples/concepts/agents/bedrock_agent/README.md b/python/samples/concepts/agents/bedrock_agent/README.md deleted file mode 100644 index 3e72751eb308..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# Concept samples on how to use AWS Bedrock agents - -## Pre-requisites - -1. You need to have an AWS account and [access to the foundation models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-permissions.html) -2. [AWS CLI installed](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) and [configured](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) - -### Configuration - -Follow this [guide](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) to configure your environment to use the Bedrock API. - -Please configure the `aws_access_key_id`, `aws_secret_access_key`, and `region` otherwise you will need to create custom clients for the services. For example: - -```python -runtime_client=boto.client( - "bedrock-runtime", - aws_access_key_id="your_access_key", - aws_secret_access_key="your_secret_key", - region_name="your_region", - [...other parameters you may need...] -) -client=boto.client( - "bedrock", - aws_access_key_id="your_access_key", - aws_secret_access_key="your_secret_key", - region_name="your_region", - [...other parameters you may need...] -) - -bedrock_agent = BedrockAgent.create_and_prepare_agent( - name="your_agent_name", - instructions="your_instructions", - runtime_client=runtime_client, - client=client, - [...other parameters you may need...] -) -``` - -## Samples - -| Sample | Description | -|--------|-------------| -| [bedrock_agent_simple_chat.py](bedrock_agent_simple_chat.py) | Demonstrates basic usage of the Bedrock agent. | -| [bedrock_agent_simple_chat_streaming.py](bedrock_agent_simple_chat_streaming.py) | Demonstrates basic usage of the Bedrock agent with streaming. | -| [bedrock_agent_with_kernel_function.py](bedrock_agent_with_kernel_function.py) | Shows how to use the Bedrock agent with a kernel function. | -| [bedrock_agent_with_kernel_function_streaming.py](bedrock_agent_with_kernel_function_streaming.py) | Shows how to use the Bedrock agent with a kernel function with streaming. | -| [bedrock_agent_with_code_interpreter.py](bedrock_agent_with_code_interpreter.py) | Example of using the Bedrock agent with a code interpreter. | -| [bedrock_agent_with_code_interpreter_streaming.py](bedrock_agent_with_code_interpreter_streaming.py) | Example of using the Bedrock agent with a code interpreter and streaming. | -| [bedrock_mixed_chat_agents.py](bedrock_mixed_chat_agents.py) | Example of using multiple chat agents in a single script. | -| [bedrock_mixed_chat_agents_streaming.py](bedrock_mixed_chat_agents_streaming.py) | Example of using multiple chat agents in a single script with streaming. | - -## Before running the samples - -You need to set up some environment variables to run the samples. Please refer to the [.env.example](.env.example) file for the required environment variables. - -### `BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN` - -On your AWS console, go to the IAM service and go to **Roles**. Find the role you want to use and click on it. You will find the ARN in the summary section. - -### `BEDROCK_AGENT_FOUNDATION_MODEL` - -You need to make sure you have permission to access the foundation model. You can find the model ID in the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html). To see the models you have access to, find the policy attached to your role you should see a list of models you have access to under the `Resource` section. - -### How to add the `bedrock:InvokeModelWithResponseStream` action to an IAM policy - -1. Open the [IAM console](https://console.aws.amazon.com/iam/). -2. On the left navigation pane, choose `Roles` under `Access management`. -3. Find the role you want to edit and click on it. -4. Under the `Permissions policies` tab, click on the policy you want to edit. -5. Under the `Permissions defined in this policy` section, click on the service. You should see **Bedrock** if you already have access to the Bedrock agent service. -6. Click on the service, and then click `Edit`. -7. On the right, you will be able to add an action. Find the service and search for `InvokeModelWithResponseStream`. -8. Check the box next to the action and then scroll all the way down and click `Next`. -9. Follow the prompts to save the changes. diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py deleted file mode 100644 index e50d376b93f0..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent - -# This sample shows how to interact with a Bedrock agent in the simplest way. -# This sample uses the following main component(s): -# - a Bedrock agent -# You will learn how to create a new Bedrock agent and talk to it. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -async def main(): - bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) - session_id = BedrockAgent.create_session_id() - - try: - while True: - user_input = input("User:> ") - if user_input == "exit": - print("\n\nExiting chat...") - break - - # Invoke the agent - # The chat history is maintained in the session - async for response in bedrock_agent.invoke( - session_id=session_id, - input_text=user_input, - ): - print(f"Bedrock agent: {response}") - except KeyboardInterrupt: - print("\n\nExiting chat...") - return False - except EOFError: - print("\n\nExiting chat...") - return False - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # User:> Hi, my name is John. - # Bedrock agent: Hello John. How can I help you? - # User:> What is my name? - # Bedrock agent: Your name is John. - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py deleted file mode 100644 index 099b9de75f51..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_simple_chat_streaming.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent - -# This sample shows how to interact with a Bedrock agent via streaming in the simplest way. -# This sample uses the following main component(s): -# - a Bedrock agent -# You will learn how to create a new Bedrock agent and talk to it. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -async def main(): - bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) - session_id = BedrockAgent.create_session_id() - - try: - while True: - user_input = input("User:> ") - if user_input == "exit": - print("\n\nExiting chat...") - break - - # Invoke the agent - # The chat history is maintained in the session - print("Bedrock agent: ", end="") - async for response in bedrock_agent.invoke_stream( - session_id=session_id, - input_text=user_input, - ): - print(response, end="") - print() - except KeyboardInterrupt: - print("\n\nExiting chat...") - return False - except EOFError: - print("\n\nExiting chat...") - return False - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # User:> Hi, my name is John. - # Bedrock agent: Hello John. How can I help you? - # User:> What is my name? - # Bedrock agent: Your name is John. - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py deleted file mode 100644 index ad6bf184b9fa..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.contents.binary_content import BinaryContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent - -# This sample shows how to interact with a Bedrock agent that is capable of writing and executing code. -# This sample uses the following main component(s): -# - a Bedrock agent -# You will learn how to create a new Bedrock agent and ask it a question that requires coding to answer. -# After running this sample, a bar chart will be generated and saved to a file in the same directory -# as this script. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -ASK = """ -Create a bar chart for the following data: -Panda 5 -Tiger 8 -Lion 3 -Monkey 6 -Dolphin 2 -""" - - -async def main(): - bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) - await bedrock_agent.create_code_interpreter_action_group() - - session_id = BedrockAgent.create_session_id() - - # Placeholder for the file generated by the code interpreter - binary_item: BinaryContent | None = None - - try: - # Invoke the agent - async for response in bedrock_agent.invoke( - session_id=session_id, - input_text=ASK, - ): - print(f"Response:\n{response}") - assert isinstance(response, ChatMessageContent) # nosec - if not binary_item: - binary_item = next((item for item in response.items if isinstance(item, BinaryContent)), None) - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Save the chart to a file - if not binary_item: - raise RuntimeError("No chart generated") - - file_path = os.path.join(os.path.dirname(__file__), binary_item.metadata["name"]) - binary_item.write_to_file(os.path.join(os.path.dirname(__file__), binary_item.metadata["name"])) - print(f"Chart saved to {file_path}") - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # Response: - # Here is the bar chart for the given data: - # [A bar chart showing the following data: - # Panda 5 - # Tiger 8 - # Lion 3 - # Monkey 6 - # Dolpin 2] - # Chart saved to ... - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py deleted file mode 100644 index ca60c477e66e..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_code_interpreter_streaming.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.contents.binary_content import BinaryContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent - -# This sample shows how to interact with a Bedrock agent that is capable of writing and executing code. -# This sample uses the following main component(s): -# - a Bedrock agent -# You will learn how to create a new Bedrock agent and ask it a question that requires coding to answer. -# After running this sample, a bar chart will be generated and saved to a file in the same directory -# as this script. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -ASK = """ -Create a bar chart for the following data: -Panda 5 -Tiger 8 -Lion 3 -Monkey 6 -Dolphin 2 -""" - - -async def main(): - bedrock_agent = await BedrockAgent.create_and_prepare_agent(AGENT_NAME, instructions=INSTRUCTION) - await bedrock_agent.create_code_interpreter_action_group() - - session_id = BedrockAgent.create_session_id() - - # Placeholder for the file generated by the code interpreter - binary_item: BinaryContent | None = None - - try: - # Invoke the agent - print("Response: ") - async for response in bedrock_agent.invoke_stream( - session_id=session_id, - input_text=ASK, - ): - print(response, end="") - assert isinstance(response, StreamingChatMessageContent) # nosec - if not binary_item: - binary_item = next((item for item in response.items if isinstance(item, BinaryContent)), None) - print() - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Save the chart to a file - if not binary_item: - raise RuntimeError("No chart generated") - - file_path = os.path.join(os.path.dirname(__file__), binary_item.metadata["name"]) - binary_item.write_to_file(os.path.join(os.path.dirname(__file__), binary_item.metadata["name"])) - print(f"Chart saved to {file_path}") - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # Response: - # Here is the bar chart for the given data: - # [A bar chart showing the following data: - # Panda 5 - # Tiger 8 - # Lion 3 - # Monkey 6 - # Dolpin 2] - # Chart saved to ... - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py deleted file mode 100644 index 928c02054fa7..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. -# This sample uses the following main component(s): -# - a Bedrock agent -# - a kernel function -# - a kernel -# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -class WeatherPlugin: - """Mock weather plugin.""" - - @kernel_function(description="Get real-time weather information.") - def current(self, location: Annotated[str, "The location to get the weather"]) -> str: - """Returns the current weather.""" - return f"The weather in {location} is sunny." - - -def get_kernel() -> Kernel: - kernel = Kernel() - kernel.add_plugin(WeatherPlugin(), plugin_name="weather") - - return kernel - - -async def main(): - # Create a kernel - kernel = get_kernel() - - bedrock_agent = await BedrockAgent.create_and_prepare_agent( - AGENT_NAME, - INSTRUCTION, - kernel=kernel, - ) - # Note: We still need to create the kernel function action group on the service side. - await bedrock_agent.create_kernel_function_action_group() - - session_id = BedrockAgent.create_session_id() - - try: - # Invoke the agent - async for response in bedrock_agent.invoke( - session_id=session_id, - input_text="What is the weather in Seattle?", - ): - print(f"Response:\n{response}") - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # Response: - # The current weather in Seattle is sunny. - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py deleted file mode 100644 index b214ab5591dc..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_simple.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.functions.kernel_function_decorator import kernel_function - -# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. -# Instead of creating a kernel and adding plugins to it, you can directly pass the plugins to the -# agent when creating it. -# This sample uses the following main component(s): -# - a Bedrock agent -# - a kernel function -# - a kernel -# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -class WeatherPlugin: - """Mock weather plugin.""" - - @kernel_function(description="Get real-time weather information.") - def current(self, location: Annotated[str, "The location to get the weather"]) -> str: - """Returns the current weather.""" - return f"The weather in {location} is sunny." - - -async def main(): - bedrock_agent = await BedrockAgent.create_and_prepare_agent( - AGENT_NAME, - INSTRUCTION, - plugins=[WeatherPlugin()], - ) - # Note: We still need to create the kernel function action group on the service side. - await bedrock_agent.create_kernel_function_action_group() - - session_id = BedrockAgent.create_session_id() - - try: - # Invoke the agent - async for response in bedrock_agent.invoke( - session_id=session_id, - input_text="What is the weather in Seattle?", - ): - print(f"Response:\n{response}") - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # Response: - # The current weather in Seattle is sunny. - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py deleted file mode 100644 index aa4dce75e0ed..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_agent_with_kernel_function_streaming.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -# This sample shows how to interact with a Bedrock agent that is capable of using kernel functions. -# This sample uses the following main component(s): -# - a Bedrock agent -# - a kernel function -# - a kernel -# You will learn how to create a new Bedrock agent and ask it a question that requires a kernel function to answer. - -AGENT_NAME = "semantic-kernel-bedrock-agent" -INSTRUCTION = "You are a friendly assistant. You help people find information." - - -class WeatherPlugin: - """Mock weather plugin.""" - - @kernel_function(description="Get real-time weather information.") - def current(self, location: Annotated[str, "The location to get the weather"]) -> str: - """Returns the current weather.""" - return f"The weather in {location} is sunny." - - -def get_kernel() -> Kernel: - kernel = Kernel() - kernel.add_plugin(WeatherPlugin(), plugin_name="weather") - - return kernel - - -async def main(): - # Create a kernel - kernel = get_kernel() - - bedrock_agent = await BedrockAgent.create_and_prepare_agent( - AGENT_NAME, - INSTRUCTION, - kernel=kernel, - ) - # Note: We still need to create the kernel function action group on the service side. - await bedrock_agent.create_kernel_function_action_group() - - session_id = BedrockAgent.create_session_id() - - try: - # Invoke the agent - print("Response: ") - async for response in bedrock_agent.invoke_stream( - session_id=session_id, - input_text="What is the weather in Seattle?", - ): - print(response, end="") - finally: - # Delete the agent - await bedrock_agent.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # Response: - # The current weather in Seattle is sunny. - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py deleted file mode 100644 index aa29259276d5..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -# This sample shows how to use a bedrock agent in a group chat that includes multiple agents of different roles. -# This sample uses the following main component(s): -# - a Bedrock agent -# - a ChatCompletionAgent -# - an AgentGroupChat -# You will learn how to create a new or connect to an existing Bedrock agent and put it in a group chat with -# another agent. - -# This will be a chat completion agent -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. Only include the word "approved" if it is so. -If not, provide insight on how to refine suggested copy without example. -""" - -# This will be a bedrock agent -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -def _create_kernel_with_chat_completion() -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion()) - return kernel - - -async def main(): - agent_reviewer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion(), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - agent_writer = await BedrockAgent.create_and_prepare_agent( - COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy( - agents=[agent_reviewer], - maximum_iterations=10, - ), - ) - - input = "A slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - try: - async for message in chat.invoke(): - print(f"# {message.role} - {message.name or '*'}: '{message.content}'") - print(f"# IS COMPLETE: {chat.is_complete}") - finally: - # Delete the agent - await agent_writer.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # AuthorRole.USER: 'A slogan for a new line of electric cars.' - # AuthorRole.ASSISTANT - CopyWriter: 'Charge Ahead: The Future of Driving' - # AuthorRole.ASSISTANT - ArtDirector: 'The slogan "Charge Ahead: The Future of Driving" is compelling but could be - # made even more impactful. Consider clarifying the unique selling proposition of the electric cars. Focus on what - # sets them apart in terms of performance, eco-friendliness, or innovation. This will help create an emotional - # connection and a clearer message for the audience.' - # AuthorRole.ASSISTANT - CopyWriter: 'Charge Forward: The Electrifying Future of Driving' - # AuthorRole.ASSISTANT - ArtDirector: 'Approved' - # IS COMPLETE: True - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py b/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py deleted file mode 100644 index b4ed1668b822..000000000000 --- a/python/samples/concepts/agents/bedrock_agent/bedrock_mixed_chat_agents_streaming.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -# This sample shows how to use a bedrock agent in a group chat that includes multiple agents of different roles. -# This sample uses the following main component(s): -# - a Bedrock agent -# - a ChatCompletionAgent -# - an AgentGroupChat -# You will learn how to create a new or connect to an existing Bedrock agent and put it in a group chat with -# another agent. - -# This will be a chat completion agent -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. Only include the word "approved" if it is so. -If not, provide insight on how to refine suggested copy without example. -""" - -# This will be a bedrock agent -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -def _create_kernel_with_chat_completion() -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion()) - return kernel - - -async def main(): - agent_reviewer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion(), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - agent_writer = await BedrockAgent.create_and_prepare_agent( - COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy( - agents=[agent_reviewer], - maximum_iterations=10, - ), - ) - - input = "A slogan for a new line of electric cars." - - await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) - print(f"# {AuthorRole.USER}: '{input}'") - - try: - current_agent = "*" - async for message_chunk in chat.invoke_stream(): - if current_agent != message_chunk.name: - current_agent = message_chunk.name or "*" - print(f"\n# {message_chunk.role} - {current_agent}: ", end="") - print(message_chunk.content, end="") - print() - print(f"# IS COMPLETE: {chat.is_complete}") - finally: - # Delete the agent - await agent_writer.delete_agent() - - # Sample output (using anthropic.claude-3-haiku-20240307-v1:0): - # AuthorRole.USER: 'A slogan for a new line of electric cars.' - # AuthorRole.ASSISTANT - CopyWriter: 'Charge Ahead: The Future of Driving' - # AuthorRole.ASSISTANT - ArtDirector: 'The slogan "Charge Ahead: The Future of Driving" is compelling but could be - # made even more impactful. Consider clarifying the unique selling proposition of the electric cars. Focus on what - # sets them apart in terms of performance, eco-friendliness, or innovation. This will help create an emotional - # connection and a clearer message for the audience.' - # AuthorRole.ASSISTANT - CopyWriter: 'Charge Forward: The Electrifying Future of Driving' - # AuthorRole.ASSISTANT - ArtDirector: 'Approved' - # IS COMPLETE: True - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/README.md b/python/samples/concepts/agents/chat_completion_agent/README.md deleted file mode 100644 index a9b02dbb0d07..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# Chat Completion Agent Samples - -The following samples demonstrate advanced usage of the `ChatCompletionAgent`. - ---- - -## Chat History Reduction Strategies - -When configuring chat history management, there are two important settings to consider: - -### `reducer_msg_count` - -- **Purpose:** Defines the target number of messages to retain after applying truncation or summarization. -- **Controls:** Determines how much recent conversation history is preserved, while older messages are either discarded or summarized. -- **Recommendations for adjustment:** - - **Smaller values:** Ideal for memory-constrained environments or scenarios where brief context is sufficient. - - **Larger values:** Useful when retaining extensive conversational context is critical for accurate responses or complex dialogue. - -### `reducer_threshold` - -- **Purpose:** Provides a buffer to prevent premature reduction when the message count slightly exceeds `reducer_msg_count`. -- **Controls:** Ensures essential message pairs (e.g., a user query and the assistant’s response) aren't unintentionally truncated. -- **Recommendations for adjustment:** - - **Smaller values:** Use to enforce stricter message reduction criteria, potentially truncating older message pairs sooner. - - **Larger values:** Recommended for preserving critical conversation segments, particularly in sensitive interactions involving API function calls or detailed responses. - -### Interaction Between Parameters - -The combination of these parameters determines **when** history reduction occurs and **how much** of the conversation is retained. - -**Example:** -- If `reducer_msg_count = 10` and `reducer_threshold = 5`, message history won't be truncated until the total message count exceeds 15. This strategy maintains conversational context flexibility while respecting memory limitations. - ---- - -## Recommendations for Effective Configuration - -- **Performance-focused environments:** - - Lower `reducer_msg_count` to conserve memory and accelerate processing. - -- **Context-sensitive scenarios:** - - Higher `reducer_msg_count` and `reducer_threshold` help maintain continuity across multiple interactions, crucial for multi-turn conversations or complex workflows. - -- **Iterative Experimentation:** - - Start with default values (`reducer_msg_count = 10`, `reducer_threshold = 10`), and adjust according to the specific behavior and response quality required by your application. diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py deleted file mode 100644 index c257eacb5ff4..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory, ChatMessageContent, FunctionCallContent, FunctionResultContent -from semantic_kernel.filters import AutoFunctionInvocationContext -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - -""" -The following sample demonstrates how to configure the auto -function invocation filter while using a ChatCompletionAgent. -This allows the developer or user to view the function call content -and the function result content. -""" - - -# Define the auto function invocation filter that will be used by the kernel -async def auto_function_invocation_filter(context: AutoFunctionInvocationContext, next): - """A filter that will be called for each function call in the response.""" - # if we don't call next, it will skip this function, and go to the next one - await next(context) - if context.function.plugin_name == "menu": - context.terminate = True - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -def _create_kernel_with_chat_completionand_filter() -> Kernel: - """A helper function to create a kernel with a chat completion service and a filter.""" - kernel = Kernel() - kernel.add_service(AzureChatCompletion()) - kernel.add_filter("auto_function_invocation", auto_function_invocation_filter) - kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") - return kernel - - -def _write_content(content: ChatMessageContent) -> None: - """Write the content to the console based on the content type.""" - last_item_type = type(content.items[-1]).__name__ if content.items else "(empty)" - message_content = "" - if isinstance(last_item_type, FunctionCallContent): - message_content = f"tool request = {content.items[-1].function_name}" - elif isinstance(last_item_type, FunctionResultContent): - message_content = f"function result = {content.items[-1].result}" - else: - message_content = str(content.items[-1]) - print(f"[{last_item_type}] {content.role} : '{message_content}'") - - -async def main(): - # 1. Create the agent with a kernel instance that contains - # the auto function invocation filter and the AI service - agent = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completionand_filter(), - name="Host", - instructions="Answer questions about the menu.", - ) - - # 2. Define the chat history - chat_history = ChatHistory() - - user_inputs = [ - "Hello", - "What is the special soup?", - "What is the special drink?", - "Thank you", - ] - - for user_input in user_inputs: - # 3. Add the user message to the chat history - chat_history.add_user_message(user_input) - print(f"# User: '{user_input}'") - - # 4. Get the response from the agent - content = await agent.get_response(chat_history) - # Don't add the message if it is a function call or result - if not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items): - chat_history.add_message(content) - _write_content(content) - - print("================================") - print("CHAT HISTORY") - print("================================") - - # Print out the chat history to view the different types of messages - for message in chat_history.messages: - _write_content(message) - - """ - Sample output: - - # AuthorRole.USER: 'Hello' - [TextContent] AuthorRole.ASSISTANT : 'Hello! How can I assist you today?' - # AuthorRole.USER: 'What is the special soup?' - [FunctionResultContent] AuthorRole.TOOL : ' - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - ' - # AuthorRole.USER: 'What is the special drink?' - [TextContent] AuthorRole.ASSISTANT : 'The special drink is Chai Tea.' - # AuthorRole.USER: 'Thank you' - [TextContent] AuthorRole.ASSISTANT : 'You're welcome! If you have any more questions or need assistance with - anything else, feel free to ask!' - ================================ - CHAT HISTORY - ================================ - [TextContent] AuthorRole.USER : 'Hello' - [TextContent] AuthorRole.ASSISTANT : 'Hello! How can I assist you today?' - [TextContent] AuthorRole.USER : 'What is the special soup?' - [FunctionCallContent] AuthorRole.ASSISTANT : 'menu-get_specials({})' - [FunctionResultContent] AuthorRole.TOOL : ' - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - ' - [TextContent] AuthorRole.USER : 'What is the special drink?' - [TextContent] AuthorRole.ASSISTANT : 'The special drink is Chai Tea.' - [TextContent] AuthorRole.USER : 'Thank you' - [TextContent] AuthorRole.ASSISTANT : 'You're welcome! If you have any more questions or need assistance with - anything else, feel free to ask!' - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py deleted file mode 100644 index 74aba6060d6a..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory -from semantic_kernel.functions import KernelArguments -from semantic_kernel.prompt_template import PromptTemplateConfig - -""" -The following sample demonstrates how to create a chat completion -agent using Azure OpenAI within Semantic Kernel. -It uses parameterized prompts and shows how to swap between -"semantic-kernel," "jinja2," and "handlebars" template formats, -This sample highlights the agent's chat history conversation -is managed and how kernel arguments are passed in and used. -""" - -# Define the inputs and styles to be used in the agent -inputs = [ - ("Home cooking is great.", None), - ("Talk about world peace.", "iambic pentameter"), - ("Say something about doing your best.", "e. e. cummings"), - ("What do you think about having fun?", "old school rap"), -] - - -async def invoke_chat_completion_agent(agent: ChatCompletionAgent, inputs): - """Invokes the given agent with each (input, style) in inputs.""" - - chat = ChatHistory() - - for user_input, style in inputs: - # Add user message to the conversation - chat.add_user_message(user_input) - print(f"[USER]: {user_input}\n") - - # If style is specified, override the 'style' argument - argument_overrides = None - if style: - argument_overrides = KernelArguments(style=style) - - # Stream agent responses - async for response in agent.invoke_stream(history=chat, arguments=argument_overrides): - print(f"{response.content}", end="", flush=True) - print() - - -async def invoke_agent_with_template(template_str: str, template_format: str, default_style: str = "haiku"): - """Creates an agent with the specified template and format, then invokes it using invoke_chat_completion_agent.""" - - # Configure the prompt template - prompt_config = PromptTemplateConfig(template=template_str, template_format=template_format) - - agent = ChatCompletionAgent( - service=AzureChatCompletion(), - name="MyPoetAgent", - prompt_template_config=prompt_config, - arguments=KernelArguments(style=default_style), - ) - - await invoke_chat_completion_agent(agent, inputs) - - -async def main(): - # 1) Using "semantic-kernel" format - print("\n===== SEMANTIC-KERNEL FORMAT =====\n") - semantic_kernel_template = """ - Write a one verse poem on the requested topic in the style of {{$style}}. - Always state the requested style of the poem. - """ - await invoke_agent_with_template( - template_str=semantic_kernel_template, - template_format="semantic-kernel", - default_style="haiku", - ) - - # 2) Using "jinja2" format - print("\n===== JINJA2 FORMAT =====\n") - jinja2_template = """ - Write a one verse poem on the requested topic in the style of {{style}}. - Always state the requested style of the poem. - """ - await invoke_agent_with_template(template_str=jinja2_template, template_format="jinja2", default_style="haiku") - - # 3) Using "handlebars" format - print("\n===== HANDLEBARS FORMAT =====\n") - handlebars_template = """ - Write a one verse poem on the requested topic in the style of {{style}}. - Always state the requested style of the poem. - """ - await invoke_agent_with_template( - template_str=handlebars_template, template_format="handlebars", default_style="haiku" - ) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py deleted file mode 100644 index f48905b6113f..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import ( - AgentGroupChat, - ChatCompletionAgent, -) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ( - ChatHistorySummarizationReducer, -) - -""" -The following sample demonstrates how to implement a chat history -reducer as part of the Semantic Kernel Agent Framework. For this sample, -the ChatCompletionAgent with an AgentGroupChat is used. The Chat History -Reducer is a Summary Reducer. View the README for more information on -how to use the reducer and what each parameter does. -""" - - -async def main(): - """ - Single-function approach that shows the same chat reducer behavior - while preserving all original logic and code lines (now commented). - """ - - # Setup necessary parameters - reducer_msg_count = 10 - reducer_threshold = 10 - - # Create a summarization reducer and clear its history - history_summarization_reducer = ChatHistorySummarizationReducer( - service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold - ) - history_summarization_reducer.clear() - - # Create our agent - agent = ChatCompletionAgent( - name="NumeroTranslator", - instructions="Add one to the latest user number and spell it in Spanish without explanation.", - service=AzureChatCompletion(), - ) - - # Create a group chat using the reducer - chat = AgentGroupChat(chat_history=history_summarization_reducer) - - # Simulate user messages - message_count = 50 # Number of messages to simulate - for index in range(1, message_count, 2): - # Add user message to the chat - await chat.add_chat_message(message=str(index)) - print(f"# User: '{index}'") - - # Attempt to reduce history - is_reduced = await chat.reduce_history() - if is_reduced: - print(f"@ History reduced to {len(history_summarization_reducer.messages)} messages.") - - # Invoke the agent and display responses - async for message in chat.invoke(agent): - print(f"# {message.role} - {message.name or '*'}: '{message.content}'") - - # Retrieve messages - msgs = [] - async for m in chat.get_chat_messages(agent): - msgs.append(m) - print(f"@ Message Count: {len(msgs)}\n") - - # If a reduction happened and we use summarization, print the summary - if is_reduced: - for msg in msgs: - if msg.metadata and msg.metadata.get("__summary__"): - print(f"\tSummary: {msg.content}") - break - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py deleted file mode 100644 index 11236749ba8a..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from semantic_kernel.agents import ( - ChatCompletionAgent, -) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ( - ChatHistorySummarizationReducer, -) - -""" -The following sample demonstrates how to implement a truncation chat -history reducer as part of the Semantic Kernel Agent Framework. For -this sample, a single ChatCompletionAgent is used. -""" - - -# Initialize the logger for debugging and information messages -logger = logging.getLogger(__name__) - - -async def main(): - # Setup necessary parameters - reducer_msg_count = 10 - reducer_threshold = 10 - - # Create a summarization reducer - history_summarization_reducer = ChatHistorySummarizationReducer( - service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold - ) - - # Create our agent - agent = ChatCompletionAgent( - name="NumeroTranslator", - instructions="Add one to the latest user number and spell it in Spanish without explanation.", - service=AzureChatCompletion(), - ) - - # Number of messages to simulate - message_count = 50 - for index in range(1, message_count + 1, 2): - # Add user message - history_summarization_reducer.add_user_message(str(index)) - print(f"# User: '{index}'") - - # Attempt reduction - is_reduced = await history_summarization_reducer.reduce() - if is_reduced: - print(f"@ History reduced to {len(history_summarization_reducer.messages)} messages.") - - # Get agent response and store it - response = await agent.get_response(history_summarization_reducer) - history_summarization_reducer.add_message(response) - print(f"# Agent - {response.name}: '{response.content}'") - - print(f"@ Message Count: {len(history_summarization_reducer.messages)}\n") - - # If reduced, print summary if present - if is_reduced: - for msg in history_summarization_reducer.messages: - if msg.metadata and msg.metadata.get("__summary__"): - print(f"\tSummary: {msg.content}") - break - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py deleted file mode 100644 index b2ae98661fe0..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from semantic_kernel.agents import ( - AgentGroupChat, - ChatCompletionAgent, -) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ( - ChatHistoryTruncationReducer, -) - -""" -The following sample demonstrates how to implement a chat history -reducer as part of the Semantic Kernel Agent Framework. For this sample, -the ChatCompletionAgent with an AgentGroupChat is used. The Chat History -Reducer is a Truncation Reducer. View the README for more information on -how to use the reducer and what each parameter does. -""" - - -# Initialize the logger for debugging and information messages -logger = logging.getLogger(__name__) - - -async def main(): - """ - Single-function approach that shows the same chat reducer behavior - while preserving all original logic and code lines (now commented). - """ - - # Setup necessary parameters - reducer_msg_count = 10 - reducer_threshold = 10 - - # Create a summarization reducer and clear its history - history_truncatation_reducer = ChatHistoryTruncationReducer( - target_count=reducer_msg_count, threshold_count=reducer_threshold - ) - history_truncatation_reducer.clear() - - # Create our agent - agent = ChatCompletionAgent( - name="NumeroTranslator", - instructions="Add one to the latest user number and spell it in Spanish without explanation.", - service=AzureChatCompletion(), - ) - - # Create a group chat using the reducer - chat = AgentGroupChat(chat_history=history_truncatation_reducer) - - # Simulate user messages - message_count = 50 # Number of messages to simulate - for index in range(1, message_count, 2): - # Add user message to the chat - await chat.add_chat_message(message=str(index)) - print(f"# User: '{index}'") - - # Attempt to reduce history - is_reduced = await chat.reduce_history() - if is_reduced: - print(f"@ History reduced to {len(history_truncatation_reducer.messages)} messages.") - - # Invoke the agent and display responses - async for message in chat.invoke(agent): - print(f"# {message.role} - {message.name or '*'}: '{message.content}'") - - # Retrieve messages - msgs = [] - async for m in chat.get_chat_messages(agent): - msgs.append(m) - print(f"@ Message Count: {len(msgs)}\n") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py deleted file mode 100644 index 95a41f9c5b8f..000000000000 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from semantic_kernel.agents import ( - ChatCompletionAgent, -) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ( - ChatHistoryTruncationReducer, -) - -""" -The following sample demonstrates how to implement a truncation chat -history reducer as part of the Semantic Kernel Agent Framework. For -this sample, a single ChatCompletionAgent is used. -""" - - -# Initialize the logger for debugging and information messages -logger = logging.getLogger(__name__) - - -async def main(): - # Setup necessary parameters - reducer_msg_count = 10 - reducer_threshold = 10 - - # Create a summarization reducer - history_truncation_reducer = ChatHistoryTruncationReducer( - service=AzureChatCompletion(), target_count=reducer_msg_count, threshold_count=reducer_threshold - ) - - # Create our agent - agent = ChatCompletionAgent( - name="NumeroTranslator", - instructions="Add one to the latest user number and spell it in Spanish without explanation.", - service=AzureChatCompletion(), - ) - - # Number of messages to simulate - message_count = 50 - for index in range(1, message_count + 1, 2): - # Add user message - history_truncation_reducer.add_user_message(str(index)) - print(f"# User: '{index}'") - - # Attempt reduction - is_reduced = await history_truncation_reducer.reduce() - if is_reduced: - print(f"@ History reduced to {len(history_truncation_reducer.messages)} messages.") - - # Get agent response and store it - response = await agent.get_response(history_truncation_reducer) - history_truncation_reducer.add_message(response) - print(f"# Agent - {response.name}: '{response.content}'") - - print(f"@ Message Count: {len(history_truncation_reducer.messages)}\n") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_function_termination.py new file mode 100644 index 000000000000..38ee6e76d832 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_function_termination.py @@ -0,0 +1,133 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( + AutoFunctionInvocationContext, +) +from semantic_kernel.filters.filter_types import FilterTypes +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to configure the auto # +# function invocation filter with use of a ChatCompletionAgent. # +################################################################### + + +# Define the agent name and instructions +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + + +# Define the auto function invocation filter that will be used by the kernel +async def auto_function_invocation_filter(context: AutoFunctionInvocationContext, next): + """A filter that will be called for each function call in the response.""" + # if we don't call next, it will skip this function, and go to the next one + await next(context) + if context.function.plugin_name == "menu": + context.terminate = True + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +def _create_kernel_with_chat_completionand_filter(service_id: str) -> Kernel: + """A helper function to create a kernel with a chat completion service and a filter.""" + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + kernel.add_filter(FilterTypes.AUTO_FUNCTION_INVOCATION, auto_function_invocation_filter) + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + return kernel + + +def _write_content(content: ChatMessageContent) -> None: + """Write the content to the console.""" + last_item_type = type(content.items[-1]).__name__ if content.items else "(empty)" + message_content = "" + if isinstance(last_item_type, FunctionCallContent): + message_content = f"tool request = {content.items[-1].function_name}" + elif isinstance(last_item_type, FunctionResultContent): + message_content = f"function result = {content.items[-1].result}" + else: + message_content = str(content.items[-1]) + print(f"[{last_item_type}] {content.role} : '{message_content}'") + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: ChatCompletionAgent, input: str, chat_history: ChatHistory) -> None: + """Invoke the agent with the user input.""" + chat_history.add_user_message(input) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(chat_history): + if not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items): + chat_history.add_message(content) + _write_content(content) + + +async def main(): + service_id = "agent" + + # Create the kernel used by the chat completion agent + kernel = _create_kernel_with_chat_completionand_filter(service_id=service_id) + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) + + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + # Create the agent + agent = ChatCompletionAgent( + service_id=service_id, + kernel=kernel, + name=HOST_NAME, + instructions=HOST_INSTRUCTIONS, + execution_settings=settings, + ) + + # Define the chat history + chat = ChatHistory() + + # Respond to user input + await invoke_agent(agent=agent, input="Hello", chat_history=chat) + await invoke_agent(agent=agent, input="What is the special soup?", chat_history=chat) + await invoke_agent(agent=agent, input="What is the special drink?", chat_history=chat) + await invoke_agent(agent=agent, input="Thank you", chat_history=chat) + + print("================================") + print("CHAT HISTORY") + print("================================") + + # Print out the chat history to view the different types of messages + for message in chat.messages: + _write_content(message) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_history_reducer.py b/python/samples/concepts/agents/chat_completion_history_reducer.py new file mode 100644 index 000000000000..1cdffefe7b78 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_history_reducer.py @@ -0,0 +1,298 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from typing import TYPE_CHECKING + +from semantic_kernel.agents import ( + AgentGroupChat, + ChatCompletionAgent, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion +from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent +from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ChatHistorySummarizationReducer +from semantic_kernel.contents.history_reducer.chat_history_truncation_reducer import ChatHistoryTruncationReducer +from semantic_kernel.kernel import Kernel + +if TYPE_CHECKING: + from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer + +##################################################################### +# The following sample demonstrates how to implement a chat history # +# reducer as part of the Semantic Kernel Agent Framework. It # +# covers two types of reducers: summarization reduction and a # +# truncation reduction. For this sample, the ChatCompletionAgent # +# is used. # +##################################################################### + + +# Initialize the logger for debugging and information messages +logger = logging.getLogger(__name__) + +# Flag to determine whether to use Azure OpenAI services or OpenAI +# Set this to True if using Azure OpenAI (requires appropriate configuration) +use_azure_openai = True + + +# Helper function to create and configure a Kernel with the desired chat completion service +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + """A helper function to create a kernel with a chat completion service.""" + kernel = Kernel() + if use_azure_openai: + # Add Azure OpenAI service to the kernel + kernel.add_service(AzureChatCompletion(service_id=service_id)) + else: + # Add OpenAI service to the kernel + kernel.add_service(OpenAIChatCompletion(service_id=service_id)) + return kernel + + +class HistoryReducerExample: + """ + Demonstrates how to create a ChatCompletionAgent with a ChatHistoryReducer + (either truncation or summarization) and how to invoke that agent + multiple times while applying the history reduction. + """ + + # Agent-specific settings + TRANSLATOR_NAME = "NumeroTranslator" # Name of the agent + TRANSLATOR_INSTRUCTIONS = "Add one to the latest user number and spell it in Spanish without explanation." + + def create_truncating_agent( + self, reducer_msg_count: int, reducer_threshold: int + ) -> tuple[ChatCompletionAgent, "ChatHistoryReducer"]: + """ + Creates a ChatCompletionAgent with a truncation-based history reducer. + + Parameters: + - reducer_msg_count: Target number of messages to retain after truncation. + - reducer_threshold: Threshold number of messages to trigger truncation. + + Returns: + - A configured ChatCompletionAgent instance with truncation enabled. + """ + truncation_reducer = ChatHistoryTruncationReducer( + target_count=reducer_msg_count, threshold_count=reducer_threshold + ) + + return ChatCompletionAgent( + name=self.TRANSLATOR_NAME, + instructions=self.TRANSLATOR_INSTRUCTIONS, + kernel=_create_kernel_with_chat_completion("truncate_agent"), + history_reducer=truncation_reducer, + ), truncation_reducer + + def create_summarizing_agent( + self, reducer_msg_count: int, reducer_threshold: int + ) -> tuple[ChatCompletionAgent, "ChatHistoryReducer"]: + """ + Creates a ChatCompletionAgent with a summarization-based history reducer. + + Parameters: + - reducer_msg_count: Target number of messages to retain after summarization. + - reducer_threshold: Threshold number of messages to trigger summarization. + + Returns: + - A configured ChatCompletionAgent instance with summarization enabled. + """ + kernel = _create_kernel_with_chat_completion("summarize_agent") + + summarization_reducer = ChatHistorySummarizationReducer( + service=kernel.get_service(service_id="summarize_agent"), + target_count=reducer_msg_count, + threshold_count=reducer_threshold, + ) + + return ChatCompletionAgent( + name=self.TRANSLATOR_NAME, + instructions=self.TRANSLATOR_INSTRUCTIONS, + kernel=kernel, + history_reducer=summarization_reducer, + ), summarization_reducer + + async def invoke_agent(self, agent: ChatCompletionAgent, chat_history: ChatHistory, message_count: int): + """ + Demonstrates agent invocation with direct history management and reduction. + + Parameters: + - agent: The ChatCompletionAgent to invoke. + - message_count: The number of messages to simulate in the conversation. + """ + + index = 1 + while index <= message_count: + # Provide user input + user_message = ChatMessageContent(role=AuthorRole.USER, content=str(index)) + chat_history.messages.append(user_message) + print(f"# User: '{index}'") + + # Attempt history reduction if a reducer is present + is_reduced = False + if agent.history_reducer is not None: + reduced = await agent.history_reducer.reduce() + if reduced is not None: + chat_history.messages.clear() + chat_history.messages.extend(reduced) + is_reduced = True + print("@ (History was reduced!)") + + # Invoke the agent and display its response + async for response in agent.invoke(chat_history): + chat_history.messages.append(response) + print(f"# {response.role} - {response.name}: '{response.content}'") + + # The index is incremented by 2 because the agent is told to: + # "Add one to the latest user number and spell it in Spanish without explanation." + # The user sends 1, 3, 5, etc., and the agent responds with 2, 4, 6, etc. (in Spanish) + index += 2 + print(f"@ Message Count: {len(chat_history.messages)}\n") + + # If history was reduced, and the chat history is of type `ChatHistorySummarizationReducer`, + # print summaries as it will contain the __summary__ metadata key. + if is_reduced and isinstance(chat_history, ChatHistorySummarizationReducer): + self._print_summaries_from_front(chat_history.messages) + + async def invoke_chat(self, agent: ChatCompletionAgent, message_count: int): + """ + Demonstrates agent invocation within a group chat. + + Parameters: + - agent: The ChatCompletionAgent to invoke. + - message_count: The number of messages to simulate in the conversation. + """ + chat = AgentGroupChat() # Initialize a new group chat + last_history_count = 0 + + index = 1 + while index <= message_count: + # Add user message to the chat + user_msg = ChatMessageContent(role=AuthorRole.USER, content=str(index)) + await chat.add_chat_message(user_msg) + print(f"# User: '{index}'") + + # Invoke the agent and display its response + async for message in chat.invoke(agent): + print(f"# {message.role} - {message.name or '*'}: '{message.content}'") + + # The index is incremented by 2 because the agent is told to: + # "Add one to the latest user number and spell it in Spanish without explanation." + # The user sends 1, 3, 5, etc., and the agent responds with 2, 4, 6, etc. (in Spanish) + index += 2 + + # Retrieve chat messages in descending order (newest first) + msgs = [] + async for m in chat.get_chat_messages(agent): + msgs.append(m) + + print(f"@ Message Count: {len(msgs)}\n") + + # Check for reduction in message count and print summaries + if len(msgs) < last_history_count: + self._print_summaries_from_back(msgs) + + last_history_count = len(msgs) + + def _print_summaries_from_front(self, messages: list[ChatMessageContent]): + """ + Prints summaries from the front of the message list. + + Parameters: + - messages: List of chat messages to process. + """ + summary_index = 0 + while summary_index < len(messages): + msg = messages[summary_index] + if msg.metadata and msg.metadata.get("__summary__"): + print(f"\tSummary: {msg.content}") + summary_index += 1 + else: + break + + def _print_summaries_from_back(self, messages: list[ChatMessageContent]): + """ + Prints summaries from the back of the message list. + + Parameters: + - messages: List of chat messages to process. + """ + summary_index = len(messages) - 1 + while summary_index >= 0: + msg = messages[summary_index] + if msg.metadata and msg.metadata.get("__summary__"): + print(f"\tSummary: {msg.content}") + summary_index -= 1 + else: + break + + +# Main entry point for the script +async def main(): + # Initialize the example class + example = HistoryReducerExample() + + # Demonstrate truncation-based reduction + trunc_agent, history_reducer = example.create_truncating_agent( + # reducer_msg_count: + # Purpose: Defines the target number of messages to retain after applying truncation or summarization. + # What it controls: This parameter determines how much of the most recent conversation history + # is preserved while discarding or summarizing older messages. + # Why change it?: + # - Smaller values: Use when memory constraints are tight, or the assistant only needs a brief history + # to maintain context. + # - Larger values: Use when retaining more conversational context is critical for accurate responses + # or maintaining a richer dialogue. + reducer_msg_count=10, + # reducer_threshold: + # Purpose: Acts as a buffer to avoid reducing history prematurely when the current message count exceeds + # reducer_msg_count by a small margin. + # What it controls: Helps ensure that essential paired messages (like a user query and the assistant’s response) + # are not "orphaned" or lost during truncation or summarization. + # Why change it?: + # - Smaller values: Use when you want stricter reduction criteria and are okay with possibly cutting older + # pairs of messages sooner. + # - Larger values: Use when you want to minimize the risk of cutting a critical part of the conversation, + # especially for sensitive interactions like API function calls or complex responses. + reducer_threshold=10, + ) + # print("===TruncatedAgentReduction Demo===") + # await example.invoke_agent(trunc_agent, chat_history=history_reducer, message_count=50) + + # Demonstrate summarization-based reduction + sum_agent, history_reducer = example.create_summarizing_agent( + # Same configuration for summarization-based reduction + reducer_msg_count=10, # Target number of messages to retain + reducer_threshold=10, # Buffer to avoid premature reduction + ) + print("\n===SummarizedAgentReduction Demo===") + await example.invoke_agent(sum_agent, chat_history=history_reducer, message_count=50) + + # Demonstrate group chat with truncation + print("\n===TruncatedChatReduction Demo===") + trunc_agent.history_reducer.messages.clear() + await example.invoke_chat(trunc_agent, message_count=50) + + # Demonstrate group chat with summarization + print("\n===SummarizedChatReduction Demo===") + sum_agent.history_reducer.messages.clear() + await example.invoke_chat(sum_agent, message_count=50) + + +# Interaction between reducer_msg_count and reducer_threshold: +# The combination of these values determines when reduction occurs and how much history is kept. +# Example: +# If reducer_msg_count = 10 and reducer_threshold = 5, history will not be truncated until the total message count +# exceeds 15. This approach ensures flexibility in retaining conversational context while still adhering to memory +# constraints. + +# Recommendations: +# - Adjust for performance: Use a lower reducer_msg_count in environments with limited memory or when the assistant +# needs faster processing times. +# - Context sensitivity: Increase reducer_msg_count and reducer_threshold in use cases where maintaining continuity +# across multiple interactions is essential (e.g., multi-turn conversations or complex workflows). +# - Experiment: Start with the default values (10 and 10) and refine based on your application's behavior and the +# assistant's response quality. + + +# Execute the main function if the script is run directly +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py deleted file mode 100644 index 32922723ad34..000000000000 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_files.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI, a chat completion -agent and have them participate in a group chat working on -an uploaded file. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - "resources", - "mixed_chat_files", - "user-context.txt", - ) - - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # If desired, create using OpenAI resources - # client, model = OpenAIAssistantAgent.setup_resources() - - # Load the text file as a FileObject - with open(file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - code_interpreter_tool, code_interpreter_tool_resource = AzureAssistantAgent.configure_code_interpreter_tool( - file_ids=file.id - ) - - definition = await client.beta.assistants.create( - model=model, - instructions="Create charts as requested without explanation.", - name="ChartMaker", - tools=code_interpreter_tool, - tool_resources=code_interpreter_tool_resource, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - analyst_agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - service_id = "summary" - summary_agent = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion(service_id=service_id), - instructions="Summarize the entire conversation for the user in natural language.", - name="SummaryAgent", - ) - - # Create the AgentGroupChat object, which will manage the chat between the agents - # We don't always need to specify the agents in the chat up front - # As shown below, calling `chat.invoke(agent=)` will automatically add the - # agent to the chat - chat = AgentGroupChat() - - try: - user_and_agent_inputs = ( - ( - "Create a tab delimited file report of the ordered (descending) frequency distribution of " - "words in the file 'user-context.txt' for any words used more than once.", - analyst_agent, - ), - (None, summary_agent), - ) - - for input, agent in user_and_agent_inputs: - if input: - await chat.add_chat_message(input) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke(agent=agent): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - if len(content.items) > 0: - for item in content.items: - if ( - isinstance(agent, AzureAssistantAgent) - and isinstance(item, AnnotationContent) - and item.file_id - ): - print(f"\n`{item.quote}` => {item.file_id}") - response_content = await agent.client.files.content(item.file_id) - print(response_content.text) - finally: - await client.files.delete(file_id=file.id) - await client.beta.assistants.delete(analyst_agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py deleted file mode 100644 index e32d22a3903e..000000000000 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_images.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI, a chat completion -agent and have them participate in a group chat working with -image content. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Get the code interpreter tool and resources - code_interpreter_tool, code_interpreter_resources = AzureAssistantAgent.configure_code_interpreter_tool() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="Analyst", - instructions="Create charts as requested without explanation", - tools=code_interpreter_tool, - tool_resources=code_interpreter_resources, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - analyst_agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - service_id = "summary" - summary_agent = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion(service_id=service_id), - instructions="Summarize the entire conversation for the user in natural language.", - name="Summarizer", - ) - - # Create the AgentGroupChat object, which will manage the chat between the agents - # We don't always need to specify the agents in the chat up front - # As shown below, calling `chat.invoke(agent=)` will automatically add the - # agent to the chat - chat = AgentGroupChat() - - try: - user_and_agent_inputs = ( - ( - """ - Graph the percentage of storm events by state using a pie chart: - - State, StormCount - TEXAS, 4701 - KANSAS, 3166 - IOWA, 2337 - ILLINOIS, 2022 - MISSOURI, 2016 - GEORGIA, 1983 - MINNESOTA, 1881 - WISCONSIN, 1850 - NEBRASKA, 1766 - NEW YORK, 1750 - """.strip(), - analyst_agent, - ), - (None, summary_agent), - ) - - for input, agent in user_and_agent_inputs: - if input: - await chat.add_chat_message(input) - print(f"# {AuthorRole.USER}: '{input}'") - - async for content in chat.invoke(agent=agent): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - if len(content.items) > 0: - for item in content.items: - if ( - isinstance(agent, AzureAssistantAgent) - and isinstance(item, AnnotationContent) - and item.file_id - ): - print(f"\n`{item.quote}` => {item.file_id}") - response_content = await agent.client.files.content(item.file_id) - print(response_content.text) - finally: - await client.beta.assistants.delete(analyst_agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py deleted file mode 100644 index 9e707686500a..000000000000 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_reset.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import TYPE_CHECKING - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -if TYPE_CHECKING: - pass - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI, a chat completion -agent and have them participate in a group chat to work towards -the user's requirement. It also demonstrates how the underlying -agent reset method is used to clear the current state of the chat -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - # First create the ChatCompletionAgent - chat_agent = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion("chat"), - name="chat_agent", - instructions=""" - The user may either provide information or query on information previously provided. - If the query does not correspond with information provided, inform the user that their query - cannot be answered. - """, - ) - - # Next, we will create the AzureAssistantAgent - - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="copywriter", - instructions=""" - The user may either provide information or query on information previously provided. - If the query does not correspond with information provided, inform the user that their query - cannot be answered. - """, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - assistant_agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Create the AgentGroupChat object, which will manage the chat between the agents - # We don't always need to specify the agents in the chat up front - # As shown below, calling `chat.invoke(agent=)` will automatically add the - # agent to the chat - chat = AgentGroupChat() - - try: - user_inputs = [ - "What is my favorite color?", - "I like green.", - "What is my favorite color?", - "[RESET]", - "What is my favorite color?", - ] - - for user_input in user_inputs: - # Check for reset indicator - if user_input == "[RESET]": - print("\nResetting chat...") - await chat.reset() - continue - - # First agent (assistant_agent) receives the user input - await chat.add_chat_message(user_input) - print(f"\n{AuthorRole.USER}: '{user_input}'") - async for message in chat.invoke(agent=assistant_agent): - if message.content is not None: - print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") - - # Second agent (chat_agent) just responds without new user input - async for message in chat.invoke(agent=chat_agent): - if message.content is not None: - print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") - finally: - await chat.reset() - await assistant_agent.client.beta.assistants.delete(assistant_agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py b/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py deleted file mode 100644 index 9a0983099ff0..000000000000 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_streaming.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - -##################################################################### -# The following sample demonstrates how to create an OpenAI # -# assistant using either Azure OpenAI or OpenAI, a chat completion # -# agent and have them participate in a group chat to work towards # -# the user's requirement. # -##################################################################### - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -async def main(): - # First create a ChatCompletionAgent - agent_reviewer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion("artdirector"), - name="ArtDirector", - instructions=""" - You are an art director who has opinions about copywriting born of a love for David Ogilvy. - The goal is to determine if the given copy is acceptable to print. - If so, state that it is approved. Only include the word "approved" if it is so. - If not, provide insight on how to refine suggested copy without example. - """, - ) - - # Next, we will create the AzureAssistantAgent - - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="CopyWriter", - instructions=""" - You are a copywriter with ten years of experience and are known for brevity and a dry humor. - The goal is to refine and decide on the single best copy as an expert in the field. - Only provide a single proposal per response. - You're laser focused on the goal at hand. - Don't waste time with chit chat. - Consider suggestions when refining an idea. - """, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent_writer = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Create the AgentGroupChat object, which will manage the chat between the agents - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "a slogan for a new line of electric cars." - - try: - await chat.add_chat_message(input) - print(f"# {AuthorRole.USER}: '{input}'") - - last_agent = None - async for message in chat.invoke_stream(): - if message.content is not None: - if last_agent != message.name: - print(f"\n# {message.name}: ", end="", flush=True) - last_agent = message.name - print(f"{message.content}", end="", flush=True) - - print() - print(f"# IS COMPLETE: {chat.is_complete}") - finally: - await agent_writer.client.beta.assistants.delete(agent_writer.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py b/python/samples/concepts/agents/mixed_chat_agents.py similarity index 66% rename from python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py rename to python/samples/concepts/agents/mixed_chat_agents.py index b975857e97e2..493f5e70f457 100644 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_agents.py +++ b/python/samples/concepts/agents/mixed_chat_agents.py @@ -3,9 +3,10 @@ import asyncio from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel @@ -51,38 +52,29 @@ def _create_kernel_with_chat_completion(service_id: str) -> Kernel: async def main(): - agent_reviewer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion("artdirector"), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - # To create an AzureAssistantAgent for Azure OpenAI, use the following: - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent_writer = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Create the AgentGroupChat object and specify the list of agents along with the termination strategy - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "a slogan for a new line of electric cars." - try: - await chat.add_chat_message(input) + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) print(f"# {AuthorRole.USER}: '{input}'") async for content in chat.invoke(): @@ -90,7 +82,7 @@ async def main(): print(f"# IS COMPLETE: {chat.is_complete}") finally: - await client.beta.assistants.delete(agent_writer.id) + await agent_writer.delete() if __name__ == "__main__": diff --git a/python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py b/python/samples/concepts/agents/mixed_chat_agents_plugins.py similarity index 69% rename from python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py rename to python/samples/concepts/agents/mixed_chat_agents_plugins.py index 02c093cb4a0f..6df7f88cac43 100644 --- a/python/samples/concepts/agents/mixed_chat/mixed_chat_agents_plugins.py +++ b/python/samples/concepts/agents/mixed_chat_agents_plugins.py @@ -4,12 +4,12 @@ from typing import Annotated from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.kernel import Kernel @@ -76,41 +76,34 @@ def _create_kernel_with_chat_completion(service_id: str) -> Kernel: async def main(): - kernel = _create_kernel_with_chat_completion("artdirector") - settings = kernel.get_prompt_execution_settings_from_service_id(service_id="artdirector") - # Configure the function choice behavior to auto invoke kernel functions - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - agent_reviewer = ChatCompletionAgent( - kernel=kernel, - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - arguments=KernelArguments(settings=settings), - ) - - # Create the Assistant Agent using Azure OpenAI resources - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent_writer = AzureAssistantAgent( - client=client, - definition=definition, - ) - - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - input = "Write copy based on the food specials." try: - await chat.add_chat_message(input) + kernel = _create_kernel_with_chat_completion("artdirector") + settings = kernel.get_prompt_execution_settings_from_service_id(service_id="artdirector") + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=kernel, + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + execution_settings=settings, + ) + + agent_writer = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "Write copy based on the food specials." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) print(f"# {AuthorRole.USER}: '{input}'") async for content in chat.invoke(): @@ -118,7 +111,7 @@ async def main(): print(f"# IS COMPLETE: {chat.is_complete}") finally: - await agent_writer.client.beta.assistants.delete(agent_writer.id) + await agent_writer.delete() if __name__ == "__main__": diff --git a/python/samples/concepts/agents/mixed_chat_files.py b/python/samples/concepts/agents/mixed_chat_files.py new file mode 100644 index 000000000000..b5d21c3fd09f --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat_files.py @@ -0,0 +1,92 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat working on # +# an uploaded file. # +##################################################################### + + +SUMMARY_INSTRUCTIONS = "Summarize the entire conversation for the user in natural language." + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def invoke_agent( + chat: AgentGroupChat, agent: ChatCompletionAgent | OpenAIAssistantAgent, input: str | None = None +) -> None: + """Invoke the agent with the user input.""" + if input: + await chat.add_chat_message(message=ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(agent=agent): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + if len(content.items) > 0: + for item in content.items: + if isinstance(item, AnnotationContent): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await agent.client.files.content(item.file_id) + print(response_content.text) + + +async def main(): + try: + file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + "mixed_chat_files", + "user-context.txt", + ) + + analyst_agent = await OpenAIAssistantAgent.create( + service_id="analyst", + kernel=Kernel(), + enable_code_interpreter=True, + code_interpreter_filenames=[file_path], + name="AnalystAgent", + ) + + service_id = "summary" + summary_agent = ChatCompletionAgent( + service_id=service_id, + kernel=_create_kernel_with_chat_completion(service_id=service_id), + instructions=SUMMARY_INSTRUCTIONS, + name="SummaryAgent", + ) + + chat = AgentGroupChat() + + await invoke_agent( + chat=chat, + agent=analyst_agent, + input=""" + Create a tab delimited file report of the ordered (descending) frequency distribution + of words in the file 'user-context.txt' for any words used more than once. + """, + ) + await invoke_agent(chat=chat, agent=summary_agent) + finally: + if analyst_agent is not None: + [await analyst_agent.delete_file(file_id=file_id) for file_id in analyst_agent.code_interpreter_file_ids] + await analyst_agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_reset.py b/python/samples/concepts/agents/mixed_chat_reset.py new file mode 100644 index 000000000000..2480358ac4da --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat_reset.py @@ -0,0 +1,83 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import TYPE_CHECKING + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +if TYPE_CHECKING: + from semantic_kernel.agents.agent import Agent + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat to work towards # +# the user's requirement. It also demonstrates how the underlying # +# agent reset method is used to clear the current state of the chat # +##################################################################### + +INSTRUCTIONS = """ +The user may either provide information or query on information previously provided. +If the query does not correspond with information provided, inform the user that their query cannot be answered. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + try: + assistant_agent = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=f"{OpenAIAssistantAgent.__name__}", + instructions=INSTRUCTIONS, + ) + + chat_agent = ChatCompletionAgent( + service_id="chat", + kernel=_create_kernel_with_chat_completion("chat"), + name=f"{ChatCompletionAgent.__name__}", + instructions=INSTRUCTIONS, + ) + + chat = AgentGroupChat() + + async def invoke_agent(agent: "Agent", input: str | None = None): + if input is not None: + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"\n{AuthorRole.USER}: '{input}'") + + async for message in chat.invoke(agent=agent): + if message.content is not None: + print(f"\n# {message.role} - {message.name or '*'}: '{message.content}'") + + await invoke_agent(agent=assistant_agent, input="What is my favorite color?") + await invoke_agent(agent=chat_agent) + + await invoke_agent(agent=assistant_agent, input="I like green.") + await invoke_agent(agent=chat_agent) + + await invoke_agent(agent=assistant_agent, input="What is my favorite color?") + await invoke_agent(agent=chat_agent) + + print("\nResetting chat...") + await chat.reset() + + await invoke_agent(agent=assistant_agent, input="What is my favorite color?") + await invoke_agent(agent=chat_agent) + finally: + await chat.reset() + await assistant_agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_streaming.py b/python/samples/concepts/agents/mixed_chat_streaming.py new file mode 100644 index 000000000000..3aac54f3eb45 --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat_streaming.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat to work towards # +# the user's requirement. # +##################################################################### + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Only include the word "approved" if it is so. +If not, provide insight on how to refine suggested copy without example. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + try: + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + last_agent = None + async for message in chat.invoke_stream(): + if message.content is not None: + if last_agent != message.name: + print(f"\n# {message.name}: ", end="", flush=True) + last_agent = message.name + print(f"{message.content}", end="", flush=True) + + print() + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + await agent_writer.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/README.md b/python/samples/concepts/agents/openai_assistant/README.md deleted file mode 100644 index 6689c05f9f4b..000000000000 --- a/python/samples/concepts/agents/openai_assistant/README.md +++ /dev/null @@ -1,101 +0,0 @@ -## OpenAI Assistant Agents - -The following getting started samples show how to use OpenAI Assistant agents with Semantic Kernel. - -## Assistants API Overview - -The Assistants API is a robust solution from OpenAI that empowers developers to integrate powerful, purpose-built AI assistants into their applications. It streamlines the development process by handling conversation histories, managing threads, and providing seamless access to advanced tools. - -### Key Features - -- **Purpose-Built AI Assistants:** - Assistants are specialized AIs that leverage OpenAI’s models to interact with users, access files, maintain persistent threads, and call additional tools. This enables highly tailored and effective user interactions. - -- **Simplified Conversation Management:** - The concept of a **thread** -- a dedicated conversation session between an assistant and a user -- ensures that message history is managed automatically. Threads optimize the conversation context by storing and truncating messages as needed. - -- **Integrated Tool Access:** - The API provides built-in tools such as: - - **Code Interpreter:** Allows the assistant to execute code, enhancing its ability to solve complex tasks. - - **File Search:** Implements best practices for retrieving data from uploaded files, including advanced chunking and embedding techniques. - -- **Enhanced Function Calling:** - With improved support for third-party tool integration, the Assistants API enables assistants to extend their capabilities beyond native functions. - -For more detailed technical information, refer to the [Assistants API](https://platform.openai.com/docs/assistants/overview). - -### Semantic Kernel OpenAI Assistant Agents - -OpenAI Assistant Agents are created in the following way: - -```python -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent - -# Create the client using OpenAI resources and configuration -client, model = OpenAIAssistantAgent.setup_resources() - -# Create the assistant definition -definition = await client.beta.assistants.create( - model=model, - instructions="", - name="", -) - -# Define the Semantic Kernel OpenAI Assistant Agent -agent = OpenAIAssistantAgent( - client=client, - definition=definition, -) - -# Define a thread and invoke the agent with the user input -thread = await agent.client.beta.threads.create() - -# Add a message to the thread -await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") - -# Invoke the agent -async for content in agent.invoke(thread_id=thread.id): - print(f"# {content.role}: {content.content}") -``` - -### Semantic Kernel Azure Assistant Agents - -Azure Assistant Agents are currently in preview and require a `-preview` API version (minimum version: `2024-05-01-preview`). As new features are introduced, API versions will be updated accordingly. For the latest versioning details, please refer to the [Azure OpenAI API preview lifecycle](https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation). - -To specify the correct API version, set the following environment variable (for example, in your `.env` file): - -```bash -AZURE_OPENAI_API_VERSION="2025-01-01-preview" -``` - -Alternatively, you can pass the `api_version` parameter when creating an `AzureAssistantAgent`: - -```python -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -# Create the client using Azure OpenAI resources and configuration -client, model = AzureAssistantAgent.setup_resources() - -# Create the assistant definition -definition = await client.beta.assistants.create( - model=model, - instructions="", - name="", -) - -# Define the Semantic Kernel Azure OpenAI Assistant Agent -agent = AzureAssistantAgent( - client=client, - definition=definition, -) - -# Define a thread and invoke the agent with the user input -thread = await agent.client.beta.threads.create() - -# Add a message to the thread -await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") - -# Invoke the agent -async for content in agent.invoke(thread_id=thread.id): - print(f"# {content.role}: {content.content}") -``` \ No newline at end of file diff --git a/python/samples/concepts/agents/openai_assistant/__init__.py b/python/samples/concepts/agents/openai_assistant/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py deleted file mode 100644 index 83d07d210ebc..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_images -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents.file_reference_content import FileReferenceContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant and leverage the assistant's code interpreter tool -in a streaming fashion. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Get the code interpreter tool and resources - code_interpreter_tool, code_interpreter_resource = AzureAssistantAgent.configure_code_interpreter_tool() - - # Define the assistant definition - definition = await client.beta.assistants.create( - model=model, - instructions="Create charts as requested without explanation.", - name="ChartMaker", - tools=code_interpreter_tool, - tool_resources=code_interpreter_resource, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - user_inputs = [ - """ - Display this data using a bar-chart: - - Banding Brown Pink Yellow Sum - X00000 339 433 126 898 - X00300 48 421 222 691 - X12345 16 395 352 763 - Others 23 373 156 552 - Sum 426 1622 856 2904 - """, - "Can you regenerate this same chart using the category names as the bar colors?", - ] - - try: - for user_input in user_inputs: - file_ids = [] - await agent.add_chat_message(thread_id=thread.id, message=user_input) - async for message in agent.invoke(thread_id=thread.id): - if message.content: - print(f"# {message.role}: {message.content}") - - if len(message.items) > 0: - for item in message.items: - if isinstance(item, FileReferenceContent): - file_ids.extend([ - item.file_id - for item in message.items - if isinstance(item, FileReferenceContent) and item.file_id is not None - ]) - - # Use a sample utility method to download the files to the current working directory - await download_response_images(agent, file_ids) - - finally: - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(assistant_id=agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py deleted file mode 100644 index d4ec9662b490..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_chart_maker_streaming.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_images -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant and leverage the assistant's code interpreter tool -in a streaming fashion. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Get the code interpreter tool and resources - code_interpreter_tool, code_interpreter_resource = AzureAssistantAgent.configure_code_interpreter_tool() - - # Define the assistant definition - definition = await client.beta.assistants.create( - model=model, - instructions="Create charts as requested without explanation.", - name="ChartMaker", - tools=code_interpreter_tool, - tool_resources=code_interpreter_resource, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - user_inputs = [ - """ - Display this data using a bar-chart: - - Banding Brown Pink Yellow Sum - X00000 339 433 126 898 - X00300 48 421 222 691 - X12345 16 395 352 763 - Others 23 373 156 552 - Sum 426 1622 856 2904 - """, - "Can you regenerate this same chart using the category names as the bar colors?", - ] - - try: - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - - print(f"# User: '{user_input}'") - - file_ids: list[str] = [] - is_code = False - last_role = None - async for response in agent.invoke_stream(thread_id=thread.id): - current_is_code = response.metadata.get("code", False) - - if current_is_code: - if not is_code: - print("\n\n```python") - is_code = True - print(response.content, end="", flush=True) - else: - if is_code: - print("\n```") - is_code = False - last_role = None - if hasattr(response, "role") and response.role is not None and last_role != response.role: - print(f"\n# {response.role}: ", end="", flush=True) - last_role = response.role - print(response.content, end="", flush=True) - file_ids.extend([ - item.file_id - for item in response.items - if isinstance(item, StreamingFileReferenceContent) and item.file_id is not None - ]) - if is_code: - print("```\n") - - # Use a sample utility method to download the files to the current working directory - await download_response_images(agent, file_ids) - file_ids.clear() - - finally: - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(assistant_id=agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py deleted file mode 100644 index 76c9262cd046..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_files -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents.annotation_content import AnnotationContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant's ability to have the code interpreter work with -uploaded files. This sample uses non-streaming responses. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - csv_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - "resources", - "agent_assistant_file_manipulation", - "sales.csv", - ) - - # Load the employees PDF file as a FileObject - with open(csv_file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - # Get the code interpreter tool and resources - code_interpreter_tool, code_interpreter_tool_resource = AzureAssistantAgent.configure_code_interpreter_tool(file.id) - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="FileManipulation", - instructions="Find answers to the user's questions in the provided file.", - tools=code_interpreter_tool, - tool_resources=code_interpreter_tool_resource, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - try: - user_inputs = [ - "Which segment had the most sales?", - "List the top 5 countries that generated the most profit.", - "Create a tab delimited file report of profit by each country per month.", - ] - - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - - print(f"# User: '{user_input}'") - async for content in agent.invoke(thread_id=thread.id): - if content.metadata.get("code", False): - print(f"# {content.role}:\n\n```python") - print(content.content) - print("```") - else: - print(f"# {content.role}: {content.content}") - - if content.items: - for item in content.items: - if isinstance(item, AnnotationContent): - await download_response_files(agent, [item]) - finally: - await client.files.delete(file.id) - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py deleted file mode 100644 index b34a46b43105..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_file_manipulation_streaming.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -import os - -from samples.concepts.agents.openai_assistant.openai_assistant_sample_utils import download_response_files -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant's ability to have the code interpreter work with -uploaded files. This sample uses streaming responses. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - csv_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - "resources", - "agent_assistant_file_manipulation", - "sales.csv", - ) - - # Load the employees PDF file as a FileObject - with open(csv_file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - # Get the code interpreter tool and resources - code_interpreter_tools, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool( - file.id - ) - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="FileManipulation", - instructions="Find answers to the user's questions in the provided file.", - tools=code_interpreter_tools, - tool_resources=code_interpreter_tool_resources, - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - try: - user_inputs = [ - # "Which segment had the most sales?", - # "List the top 5 countries that generated the most profit.", - "Create a tab delimited file report of profit by each country per month.", - ] - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - - print(f"# User: '{user_input}'") - annotations: list[StreamingAnnotationContent] = [] - messages: list[ChatMessageContent] = [] - is_code = False - last_role = None - async for response in agent.invoke_stream(thread_id=thread.id, messages=messages): - current_is_code = response.metadata.get("code", False) - - if current_is_code: - if not is_code: - print("\n\n```python") - is_code = True - print(response.content, end="", flush=True) - else: - if is_code: - print("\n```") - is_code = False - last_role = None - if hasattr(response, "role") and response.role is not None and last_role != response.role: - print(f"\n# {response.role}: ", end="", flush=True) - last_role = response.role - print(response.content, end="", flush=True) - if is_code: - print("```\n") - else: - print() - - # Use a sample utility method to download the files to the current working directory - annotations.extend( - item for message in messages for item in message.items if isinstance(item, StreamingAnnotationContent) - ) - await download_response_files(agent, annotations) - annotations.clear() - finally: - await client.files.delete(file.id) - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py deleted file mode 100644 index 57cdf5e7e7aa..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_retrieval.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and retrieve it from -the server to create a new instance of the assistant. This is done by -retrieving the assistant definition from the server using the Assistant's -ID and creating a new instance of the assistant using the retrieved definition. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="Assistant", - instructions="You are a helpful assistant answering questions about the world in one sentence.", - ) - - # Store the assistant ID - assistant_id = definition.id - - # Retrieve the assistant definition from the server based on the assistant ID - new_asst_definition = await client.beta.assistants.retrieve(assistant_id) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=new_asst_definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - user_inputs = ["Why is the sky blue?"] - - try: - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: '{user_input}'") - async for content in agent.invoke(thread_id=thread.id): - print(f"# {content.role}: {content.content}") - finally: - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py deleted file mode 100644 index fe072c2596d4..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_sample_utils.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os -from collections.abc import Sequence -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from semantic_kernel.agents.open_ai import OpenAIAssistantAgent - from semantic_kernel.contents import AnnotationContent, StreamingAnnotationContent - - -async def download_file_content(agent: "OpenAIAssistantAgent", file_id: str, file_extension: str): - """A sample utility method to download the content of a file.""" - try: - # Fetch the content of the file using the provided method - response_content = await agent.client.files.content(file_id) - - # Get the current working directory of the file - current_directory = os.path.dirname(os.path.abspath(__file__)) - - # Define the path to save the image in the current directory - file_path = os.path.join( - current_directory, # Use the current directory of the file - f"{file_id}.{file_extension}", # You can modify this to use the actual filename with proper extension - ) - - # Save content to a file asynchronously - with open(file_path, "wb") as file: - file.write(response_content.content) - - print(f"File saved to: {file_path}") - except Exception as e: - print(f"An error occurred while downloading file {file_id}: {str(e)}") - - -async def download_response_images(agent: "OpenAIAssistantAgent", file_ids: list[str]): - """A sample utility method to download the content of a list of files.""" - if file_ids: - # Iterate over file_ids and download each one - for file_id in file_ids: - await download_file_content(agent, file_id, "png") - - -async def download_response_files( - agent: "OpenAIAssistantAgent", annotations: Sequence["StreamingAnnotationContent | AnnotationContent"] -): - """A sample utility method to download the content of a file.""" - if annotations: - # Iterate over file_ids and download each one - for ann in annotations: - if ann.quote is None or ann.file_id is None: - continue - extension = os.path.splitext(ann.quote)[1].lstrip(".") - await download_file_content(agent, ann.file_id, extension) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py deleted file mode 100644 index c965acb92dd0..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_streaming.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -from typing import Annotated - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.functions.kernel_function_decorator import kernel_function - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI. OpenAI Assistants -allow for function calling, the use of file search and a -code interpreter. Assistant Threads are used to manage the -conversation state, similar to a Semantic Kernel Chat History. -This sample also demonstrates the Assistants Streaming -capability and how to manage an Assistants chat history. -""" - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Define the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="Host", - instructions="Answer questions about the menu.", - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition and the defined plugin - agent = AzureAssistantAgent( - client=client, - definition=definition, - plugins=[MenuPlugin()], - ) - - thread = await client.beta.threads.create() - - user_inputs = ["Hello", "What is the special soup?", "What is the special drink?", "How much is that?", "Thank you"] - - try: - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - - print(f"# {AuthorRole.USER}: '{user_input}'") - - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread.id): - if first_chunk: - print(f"# {content.role}: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - print() - finally: - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(assistant_id=agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py deleted file mode 100644 index fbb52a444353..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_structured_outputs.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from pydantic import BaseModel - -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant's ability to returned structured outputs, based on a user-defined -Pydantic model. This could also be a non-Pydantic model. Use the convenience -method on the OpenAIAssistantAgent class to configure the response format, -as shown below. - -Note, you may specify your own JSON Schema. You'll need to make sure it is correct -if not using the convenience method, per the following format: - -json_schema = { - "type": "json_schema", - "json_schema": { - "schema": { - "properties": { - "response": {"title": "Response", "type": "string"}, - "items": {"items": {"type": "string"}, "title": "Items", "type": "array"}, - }, - "required": ["response", "items"], - "title": "ResponseModel", - "type": "object", - "additionalProperties": False, - }, - "name": "ResponseModel", - "strict": True, - }, -} - -# Create the assistant definition -definition = await client.beta.assistants.create( - model=model, - name="Assistant", - instructions="You are a helpful assistant answering questions about the world in one sentence.", - response_format=json_schema, -) -""" - - -# Define a Pydantic model that represents the structured output from the OpenAI service -class ResponseModel(BaseModel): - response: str - items: list[str] - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="Assistant", - instructions="You are a helpful assistant answering questions about the world in one sentence.", - response_format=AzureAssistantAgent.configure_response_format(ResponseModel), - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - user_inputs = ["Why is the sky blue?"] - - try: - for user_input in user_inputs: - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: '{user_input}'") - async for content in agent.invoke(thread_id=thread.id): - # The response returned is a Pydantic Model, so we can validate it using the model_validate_json method - response_model = ResponseModel.model_validate_json(content.content) - print(f"# {content.role}: {response_model}") - finally: - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py deleted file mode 100644 index 83331109d15d..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_templating_streaming.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.prompt_template.const import TEMPLATE_FORMAT_TYPES -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig - -""" -The following sample demonstrates how to create an assistant -agent using either Azure OpenAI or OpenAI within Semantic Kernel. -It uses parameterized prompts and shows how to swap between -"semantic-kernel," "jinja2," and "handlebars" template formats, -This sample highlights how the agent's threaded conversation -state parallels the Chat History in Semantic Kernel, ensuring -all responses and parameters remain consistent throughout the -session. -""" - -inputs = [ - ("Home cooking is great.", None), - ("Talk about world peace.", "iambic pentameter"), - ("Say something about doing your best.", "e. e. cummings"), - ("What do you think about having fun?", "old school rap"), -] - - -async def invoke_agent_with_template( - template_str: str, template_format: TEMPLATE_FORMAT_TYPES, default_style: str = "haiku" -): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Configure the prompt template - prompt_template_config = PromptTemplateConfig(template=template_str, template_format=template_format) - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - name="MyPoetAgent", - ) - - # Create the AzureAssistantAgent instance using the client, the assistant definition, - # the prompt template config, and the constructor-level Kernel Arguments - agent = AzureAssistantAgent( - client=client, - definition=definition, - prompt_template_config=prompt_template_config, # type: ignore - arguments=KernelArguments(style=default_style), - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - try: - for user_input, style in inputs: - # Add user message to the conversation - await agent.add_chat_message( - thread_id=thread.id, - message=user_input, - ) - print(f"# User: {user_input}\n") - - # If style is specified, override the 'style' argument - argument_overrides = None - if style: - # Arguments passed in at invocation time take precedence over - # the default arguments that were added via the constructor. - argument_overrides = KernelArguments(style=style) - - # Stream agent responses - async for response in agent.invoke_stream(thread_id=thread.id, arguments=argument_overrides): - if response.content: - print(f"{response.content}", flush=True, end="") - print("\n") - finally: - # Clean up - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -async def main(): - # 1) Using "semantic-kernel" format - print("\n===== SEMANTIC-KERNEL FORMAT =====\n") - semantic_kernel_template = """ -Write a one verse poem on the requested topic in the style of {{$style}}. -Always state the requested style of the poem. Write appropriate G-rated content. -""" - await invoke_agent_with_template( - template_str=semantic_kernel_template, - template_format="semantic-kernel", - default_style="haiku", - ) - - # 2) Using "jinja2" format - print("\n===== JINJA2 FORMAT =====\n") - jinja2_template = """ -Write a one verse poem on the requested topic in the style of {{style}}. -Always state the requested style of the poem. Write appropriate G-rated content. -""" - await invoke_agent_with_template(template_str=jinja2_template, template_format="jinja2", default_style="haiku") - - # 3) Using "handlebars" format - print("\n===== HANDLEBARS FORMAT =====\n") - handlebars_template = """ -Write a one verse poem on the requested topic in the style of {{style}}. -Always state the requested style of the poem. Write appropriate G-rated content. -""" - await invoke_agent_with_template( - template_str=handlebars_template, template_format="handlebars", default_style="haiku" - ) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py b/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py deleted file mode 100644 index 975a426c94a9..000000000000 --- a/python/samples/concepts/agents/openai_assistant/openai_assistant_vision_streaming.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents import AuthorRole, ChatMessageContent, FileReferenceContent, ImageContent, TextContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -multi-modal content types to have the assistant describe images -and answer questions about them and provide streaming responses. -""" - - -async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), "resources", "cat.jpg" - ) - - with open(file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - instructions="Answer questions about the menu.", - name="Host", - ) - - # Create the AzureAssistantAgent instance using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # Define a thread and invoke the agent with the user input - thread = await agent.client.beta.threads.create() - - # Define a series of message with either ImageContent or FileReferenceContent - user_messages = { - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Describe this image."), - ImageContent( - uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" - ), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="What is the main color in this image?"), - ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Is there an animal in this image?"), - FileReferenceContent(file_id=file.id), - ], - ), - } - - try: - for message in user_messages: - await agent.add_chat_message(thread_id=thread.id, message=message) - - print(f"# User: '{message.items[0].text}'") # type: ignore - - first_chunk = True - async for content in agent.invoke_stream(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - if first_chunk: - print("# Agent: ", end="", flush=True) - first_chunk = False - print(content.content, end="", flush=True) - print("\n") - - finally: - await client.files.delete(file.id) - await agent.client.beta.threads.delete(thread.id) - await agent.client.beta.assistants.delete(assistant_id=agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py index 27e80773645c..c74ebc322489 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +from typing import TYPE_CHECKING from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel import Kernel @@ -10,6 +11,9 @@ from semantic_kernel.core_plugins.time_plugin import TimePlugin from semantic_kernel.functions import KernelArguments +if TYPE_CHECKING: + pass + ##################################################################### # This sample demonstrates how to build a conversational chatbot # # using Semantic Kernel, featuring auto function calling, # @@ -57,13 +61,12 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) # Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. # With `auto_invoke=True`, the model will automatically choose and call functions as needed. -request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]}) +request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() kernel.add_service(chat_completion_service) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py index 3b028d329ae9..f7aa767ffa23 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_auto_function_calling_streaming.py @@ -59,7 +59,6 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py b/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py index 5fcfcd37ac76..162c415c4a64 100644 --- a/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py +++ b/python/samples/concepts/auto_function_calling/chat_completion_with_manual_function_calling.py @@ -64,7 +64,6 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py b/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py index a54407e3769d..26697141ab17 100644 --- a/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py +++ b/python/samples/concepts/auto_function_calling/function_calling_with_required_type.py @@ -50,7 +50,7 @@ # This concept example shows how to handle both streaming and non-streaming responses # To toggle the behavior, set the following flag accordingly: -stream = False +stream = True kernel = Kernel() @@ -84,7 +84,6 @@ temperature=0.7, top_p=0.8, function_choice_behavior=FunctionChoiceBehavior.Required( - auto_invoke=False, filters={"included_functions": ["time-time", "time-date"]}, ), ) diff --git a/python/samples/concepts/caching/semantic_caching.py b/python/samples/concepts/caching/semantic_caching.py deleted file mode 100644 index 786992888817..000000000000 --- a/python/samples/concepts/caching/semantic_caching.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import time -from collections.abc import Awaitable, Callable -from dataclasses import dataclass, field -from typing import Annotated -from uuid import uuid4 - -from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding -from semantic_kernel.connectors.memory.in_memory.in_memory_store import InMemoryVectorStore -from semantic_kernel.data.record_definition import vectorstoremodel -from semantic_kernel.data.record_definition.vector_store_record_fields import ( - VectorStoreRecordDataField, - VectorStoreRecordKeyField, - VectorStoreRecordVectorField, -) -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions -from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin -from semantic_kernel.data.vector_storage.vector_store import VectorStore -from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection -from semantic_kernel.filters.filter_types import FilterTypes -from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext -from semantic_kernel.filters.prompts.prompt_render_context import PromptRenderContext -from semantic_kernel.functions.function_result import FunctionResult -from semantic_kernel.kernel import Kernel - -COLLECTION_NAME = "llm_responses" -RECORD_ID_KEY = "cache_record_id" - - -# Define a simple data model to store, the prompt, the result, and the prompt embedding. -@vectorstoremodel -@dataclass -class CacheRecord: - prompt: Annotated[str, VectorStoreRecordDataField(embedding_property_name="prompt_embedding")] - result: Annotated[str, VectorStoreRecordDataField(is_full_text_searchable=True)] - prompt_embedding: Annotated[list[float], VectorStoreRecordVectorField(dimensions=1536)] = field( - default_factory=list - ) - id: Annotated[str, VectorStoreRecordKeyField] = field(default_factory=lambda: str(uuid4())) - - -# Define the filters, one for caching the results and one for using the cache. -class PromptCacheFilter: - """A filter to cache the results of the prompt rendering and function invocation.""" - - def __init__( - self, - embedding_service: EmbeddingGeneratorBase, - vector_store: VectorStore, - collection_name: str = COLLECTION_NAME, - score_threshold: float = 0.2, - ): - self.embedding_service = embedding_service - self.vector_store = vector_store - self.collection: VectorStoreRecordCollection[str, CacheRecord] = vector_store.get_collection( - collection_name, data_model_type=CacheRecord - ) - self.score_threshold = score_threshold - - async def on_prompt_render( - self, context: PromptRenderContext, next: Callable[[PromptRenderContext], Awaitable[None]] - ): - """Filter to cache the rendered prompt and the result of the function. - - It uses the score threshold to determine if the result should be cached. - The direction of the comparison is based on the default distance metric for - the in memory vector store, which is cosine distance, so the closer to 0 the - closer the match. - """ - await next(context) - assert context.rendered_prompt # nosec - prompt_embedding = await self.embedding_service.generate_raw_embeddings([context.rendered_prompt]) - await self.collection.create_collection_if_not_exists() - assert isinstance(self.collection, VectorizedSearchMixin) # nosec - results = await self.collection.vectorized_search( - vector=prompt_embedding[0], options=VectorSearchOptions(vector_field_name="prompt_embedding", top=1) - ) - async for result in results.results: - if result.score < self.score_threshold: - context.function_result = FunctionResult( - function=context.function.metadata, - value=result.record.result, - rendered_prompt=context.rendered_prompt, - metadata={RECORD_ID_KEY: result.record.id}, - ) - - async def on_function_invocation( - self, context: FunctionInvocationContext, next: Callable[[FunctionInvocationContext], Awaitable[None]] - ): - """Filter to store the result in the cache if it is new.""" - await next(context) - result = context.result - if result and result.rendered_prompt and RECORD_ID_KEY not in result.metadata: - prompt_embedding = await self.embedding_service.generate_embeddings([result.rendered_prompt]) - cache_record = CacheRecord( - prompt=result.rendered_prompt, - result=str(result), - prompt_embedding=prompt_embedding[0], - ) - await self.collection.create_collection_if_not_exists() - await self.collection.upsert(cache_record) - - -async def execute_async(kernel: Kernel, title: str, prompt: str): - """Helper method to execute and log time.""" - print(f"{title}: {prompt}") - start = time.time() - result = await kernel.invoke_prompt(prompt) - elapsed = time.time() - start - print(f"\tElapsed Time: {elapsed:.3f}") - return result - - -async def main(): - # create the kernel and add the chat service and the embedding service - kernel = Kernel() - chat = OpenAIChatCompletion(service_id="default") - embedding = OpenAITextEmbedding(service_id="embedder") - kernel.add_service(chat) - kernel.add_service(embedding) - # create the in-memory vector store - vector_store = InMemoryVectorStore() - # create the cache filter and add the filters to the kernel - cache = PromptCacheFilter(embedding_service=embedding, vector_store=vector_store) - kernel.add_filter(FilterTypes.PROMPT_RENDERING, cache.on_prompt_render) - kernel.add_filter(FilterTypes.FUNCTION_INVOCATION, cache.on_function_invocation) - - # Run the sample - print("\nIn-memory cache sample:") - r1 = await execute_async(kernel, "First run", "What's the tallest building in New York?") - print(f"\tResult 1: {r1}") - r2 = await execute_async(kernel, "Second run", "How are you today?") - print(f"\tResult 2: {r2}") - r3 = await execute_async(kernel, "Third run", "What is the highest building in New York City?") - print(f"\tResult 3: {r3}") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/chat_completion/simple_chatbot.py b/python/samples/concepts/chat_completion/simple_chatbot.py index 11909e74c902..630bd75061f2 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot.py +++ b/python/samples/concepts/chat_completion/simple_chatbot.py @@ -2,7 +2,10 @@ import asyncio -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) from semantic_kernel.contents import ChatHistory # This sample shows how to create a chatbot. This sample uses the following two main components: @@ -22,7 +25,6 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py b/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py index 5b9738ce4471..6ed249276c08 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_kernel_function.py @@ -2,7 +2,10 @@ import asyncio -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) from semantic_kernel import Kernel from semantic_kernel.contents import ChatHistory from semantic_kernel.functions import KernelArguments @@ -30,7 +33,6 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_streaming.py b/python/samples/concepts/chat_completion/simple_chatbot_streaming.py index 9086aeb7f17e..b513aeeb408d 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_streaming.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_streaming.py @@ -2,7 +2,10 @@ import asyncio -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) from semantic_kernel.contents import ChatHistory, StreamingChatMessageContent # This sample shows how to create a chatbot that streams responses. @@ -23,7 +26,6 @@ # - Services.OLLAMA # - Services.ONNX # - Services.VERTEX_AI -# - Services.DEEPSEEK # Please make sure you have configured your environment correctly for the selected chat completion service. # Please note that not all models support streaming responses. Make sure to select a model that supports streaming. chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) diff --git a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py index 838d90ac18ab..338c76519b0e 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer.py @@ -27,9 +27,6 @@ # The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer. # To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly. -# Toggle this flag to view the chat history summary after a reduction was performed. -view_chat_history_summary_after_reduction = True - # You can select from the following chat completion services: # - Services.OPENAI # - Services.AZURE_OPENAI @@ -125,8 +122,7 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - if is_reduced := await summarization_reducer.reduce(): - print(f"@ History reduced to {len(summarization_reducer.messages)} messages.") + await summarization_reducer.reduce() kernel_arguments = KernelArguments( settings=request_settings, @@ -140,15 +136,6 @@ async def chat() -> bool: summarization_reducer.add_user_message(user_input) summarization_reducer.add_message(answer.value[0]) - if view_chat_history_summary_after_reduction and is_reduced: - for msg in summarization_reducer.messages: - if msg.metadata and msg.metadata.get("__summary__"): - print("*" * 60) - print(f"Chat History Reduction Summary: {msg.content}") - print("*" * 60) - break - print("\n") - return True diff --git a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py index 591bbec053b8..b5d0eae75d24 100644 --- a/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py +++ b/python/samples/concepts/chat_completion/simple_chatbot_with_summary_history_reducer_keep_func_content.py @@ -32,9 +32,6 @@ # The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer. # To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly. -# Toggle this flag to view the chat history summary after a reduction was performed. -view_chat_history_summary_after_reduction = True - # You can select from the following chat completion services: # - Services.OPENAI # - Services.AZURE_OPENAI @@ -139,8 +136,7 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - if is_reduced := await summarization_reducer.reduce(): - print(f"@ History reduced to {len(summarization_reducer.messages)} messages.") + await summarization_reducer.reduce() kernel_arguments = KernelArguments( settings=request_settings, @@ -173,26 +169,17 @@ async def chat() -> bool: frc.append(item) for i, item in enumerate(fcc): - summarization_reducer.add_assistant_message([item]) + summarization_reducer.add_assistant_message_list([item]) processed_fccs.add(item.id) # Safely check if there's a matching FunctionResultContent if i < len(frc): assert fcc[i].id == frc[i].id # nosec - summarization_reducer.add_tool_message([frc[i]]) + summarization_reducer.add_tool_message_list([frc[i]]) processed_frcs.add(item.id) # Since this example is showing how to include FunctionCallContent and FunctionResultContent # in the summary, we need to add them to the chat history and also to the processed sets. - if view_chat_history_summary_after_reduction and is_reduced: - for msg in summarization_reducer.messages: - if msg.metadata and msg.metadata.get("__summary__"): - print("*" * 60) - print(f"Chat History Reduction Summary: {msg.content}") - print("*" * 60) - break - print("\n") - return True diff --git a/python/samples/concepts/chat_history/README.md b/python/samples/concepts/chat_history/README.md deleted file mode 100644 index 1fada334e7c5..000000000000 --- a/python/samples/concepts/chat_history/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# Chat History manipulation samples - -This folder contains samples that demonstrate how to manipulate chat history in Semantic Kernel. - -## [Serialize Chat History](./serialize_chat_history.py) - -This sample demonstrates how to build a conversational chatbot using Semantic Kernel, it features auto function calling, but with file-based serialization of the chat history. This sample stores and reads the chat history at every turn. This is not the best way to do it, but clearly demonstrates the mechanics. - -To run this sample a environment with keys for the chosen chat service is required. In line 61 you can change the model used. This sample uses a temporary file to store the chat history, so no additional setup is required. - -## [Store Chat History in Cosmos DB](./store_chat_history_in_cosmosdb.py) - -This a more complex version of the sample above, it uses Azure CosmosDB NoSQL to store the chat messages. - -In order to do that a simple datamodel is defined. And then a class is created that extends ChatHistory, this class adds `store` and `read` methods, as well as a `create_collection` method that creates a collection in CosmosDB. - -This samples further uses the same chat service setup as the sample above, so the keys and other parameters for the chosen model should be in the environment. Next to that a AZURE_COSMOS_DB_NO_SQL_URL and optionally a AZURE_COSMOS_DB_NO_SQL_KEY should be set in the environment, you can also rely on Entra ID Auth instead of the key. The database name can also be put in the environment. diff --git a/python/samples/concepts/chat_history/serialize_chat_history.py b/python/samples/concepts/chat_history/serialize_chat_history.py index f6c04bbd00c0..331669be0906 100644 --- a/python/samples/concepts/chat_history/serialize_chat_history.py +++ b/python/samples/concepts/chat_history/serialize_chat_history.py @@ -1,112 +1,94 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import tempfile - -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings +import os +from typing import TYPE_CHECKING + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion from semantic_kernel.contents import ChatHistory - -""" -This sample demonstrates how to build a conversational chatbot -using Semantic Kernel, it features auto function calling, -but with file-based serialization of the chat history. -This sample stores and reads the chat history at every turn. -This is not the best way to do it, but clearly demonstrates the mechanics. -More optimal would for instance be to only write once when a conversation is done. -And writing to something other then a file is also usually better. +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.functions import KernelArguments + +if TYPE_CHECKING: + pass + + +system_message = """ +You are a chat bot. Your name is Mosscap and +you have one goal: figure out what people need. +Your full name, should you need to know it, is +Splendid Speckled Mosscap. You communicate +effectively, but you tend to answer with long +flowery prose. You are also a math wizard, +especially for adding and subtracting. +You also excel at joke telling, where your tone is often sarcastic. +Once you have the answer I am looking for, +you will return a full answer to me as soon as possible. """ +kernel = Kernel() + +# Note: the underlying gpt-35/gpt-4 model version needs to be at least version 0613 to support tools. +kernel.add_service(AzureChatCompletion(service_id="chat")) + +plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") +# adding plugins to the kernel +kernel.add_plugin(MathPlugin(), plugin_name="math") +kernel.add_plugin(TimePlugin(), plugin_name="time") + +# Enabling or disabling function calling is done by setting the `function_choice_behavior` attribute for the +# prompt execution settings. When the function_call parameter is set to "auto" the model will decide which +# function to use, if any. +# +# There are two ways to define the `function_choice_behavior` parameter: +# 1. Using the type string as `"auto"`, `"required"`, or `"none"`. For example: +# configure `function_choice_behavior="auto"` parameter directly in the execution settings. +# 2. Using the FunctionChoiceBehavior class. For example: +# `function_choice_behavior=FunctionChoiceBehavior.Auto()`. +# Both of these configure the `auto` tool_choice and all of the available plugins/functions +# registered on the kernel. If you want to limit the available plugins/functions, you must +# configure the `filters` dictionary attribute for each type of function choice behavior. +# For example: +# +# from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior + +# function_choice_behavior = FunctionChoiceBehavior.Auto( +# filters={"included_functions": ["time-date", "time-time", "math-Add"]} +# ) +# +# The filters attribute allows you to specify either: `included_functions`, `excluded_functions`, +# `included_plugins`, or `excluded_plugins`. + +# Note: the number of responses for auto invoking tool calls is limited to 1. +# If configured to be greater than one, this value will be overridden to 1. +execution_settings = AzureChatPromptExecutionSettings( + service_id="chat", + max_tokens=2000, + temperature=0.7, + top_p=0.8, + function_choice_behavior=FunctionChoiceBehavior.Auto(), +) + +arguments = KernelArguments(settings=execution_settings) -# You can select from the following chat completion services that support function calling: -# - Services.OPENAI -# - Services.AZURE_OPENAI -# - Services.AZURE_AI_INFERENCE -# - Services.ANTHROPIC -# - Services.BEDROCK -# - Services.GOOGLE_AI -# - Services.MISTRAL_AI -# - Services.OLLAMA -# - Services.ONNX -# - Services.VERTEX_AI -# - Services.DEEPSEEK -# Please make sure you have configured your environment correctly for the selected chat completion service. -chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) - - -async def chat(file) -> bool: - """ - Continuously prompt the user for input and show the assistant's response. - Type 'exit' to exit. - """ - try: - # Try to load the chat history from a file. - history = ChatHistory.load_chat_history_from_file(file_path=file) - print(f"Chat history successfully loaded {len(history.messages)} messages.") - except Exception: - # Create a new chat history to store the system message, initial messages, and the conversation. - print("Chat history file not found. Starting a new conversation.") - history = ChatHistory() - history.add_system_message( - "You are a chat bot. Your name is Mosscap and you have one goal: figure out what people need." - ) - - try: - # Get the user input - user_input = input("User:> ") - except (KeyboardInterrupt, EOFError): - print("\n\nExiting chat...") - return False - - if user_input.lower().strip() == "exit": - print("\n\nExiting chat...") - return False - - # Add the user input to the chat history - history.add_user_message(user_input) - # Get a response from the chat completion service - result = await chat_completion_service.get_chat_message_content(history, request_settings) - - # Update the chat history with the user's input and the assistant's response - if result: - print(f"Mosscap:> {result}") - history.add_message(result) - - # Save the chat history to a file. - print(f"Saving {len(history.messages)} messages to the file.") - history.store_chat_history_to_file(file_path=file) - return True +async def main() -> None: + user_input = "What is the current hour plus 10?" + print(f"User:> {user_input}") -""" -Sample output: - -Welcome to the chat bot! - Type 'exit' to exit. - Try a math question to see function calling in action (e.g. 'what is 3+3?'). - Your chat history will be saved in: /tmpq1n1f6qk.json -Chat history file not found. Starting a new conversation. -User:> Hello, how are you? -Mosscap:> Hello! I'm here and ready to help. What do you need today? -Saving 3 messages to the file. -Chat history successfully loaded 3 messages. -User:> exit -""" + result = await kernel.invoke_prompt(prompt=user_input, arguments=arguments) + print(f"Mosscap:> {result}") -async def main() -> None: - chatting = True - with tempfile.NamedTemporaryFile(mode="w+", dir=".", suffix=".json", delete=True) as file: - print( - "Welcome to the chat bot!\n" - " Type 'exit' to exit.\n" - " Try a math question to see function calling in action (e.g. 'what is 3+3?')." - f" Your chat history will be saved in: {file.name}" - ) - try: - while chatting: - chatting = await chat(file.name) - except Exception: - print("Closing and removing the file.") + print("\nChat history:") + chat_history: ChatHistory = result.metadata["messages"] + print(chat_history.serialize()) if __name__ == "__main__": diff --git a/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py b/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py deleted file mode 100644 index e6a68d354e3d..000000000000 --- a/python/samples/concepts/chat_history/store_chat_history_in_cosmosdb.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from dataclasses import dataclass -from typing import Annotated - -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import AzureCosmosDBNoSQLStore -from semantic_kernel.contents import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.core_plugins.math_plugin import MathPlugin -from semantic_kernel.core_plugins.time_plugin import TimePlugin -from semantic_kernel.data.record_definition.vector_store_model_decorator import vectorstoremodel -from semantic_kernel.data.record_definition.vector_store_record_fields import ( - VectorStoreRecordDataField, - VectorStoreRecordKeyField, -) -from semantic_kernel.data.vector_storage.vector_store import VectorStore -from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection - -""" -This sample demonstrates how to build a conversational chatbot -using Semantic Kernel, it features auto function calling, -but with Azure CosmosDB as storage for the chat history. -This sample stores and reads the chat history at every turn. -This is not the best way to do it, but clearly demonstrates the mechanics. - -Further refinement would be to only write once when a conversation is done. -And there is also no logic to see if there is something to write. -You could also enhance the ChatHistoryModel with a summary and a vector for that -in order to search for similar conversations. -""" - - -# 1. We first create simple datamodel for the chat history. -# Note that this model does not contain any vectors, -# those can be added, for instance to store a summary of the conversation. -@vectorstoremodel -@dataclass -class ChatHistoryModel: - session_id: Annotated[str, VectorStoreRecordKeyField] - user_id: Annotated[str, VectorStoreRecordDataField(is_filterable=True)] - messages: Annotated[list[dict[str, str]], VectorStoreRecordDataField(is_filterable=True)] - - -# 2. We then create a class that extends the ChatHistory class -# and implements the methods to store and read the chat history. -# This could also use one of the history reducers to make -# sure the database doesn't grow too large. -# It adds a `store` attribute and a couple of methods. -class ChatHistoryInCosmosDB(ChatHistory): - """This class extends the ChatHistory class to store the chat history in a Cosmos DB.""" - - session_id: str - user_id: str - store: VectorStore - collection: VectorStoreRecordCollection[str, ChatHistoryModel] | None = None - - async def create_collection(self, collection_name: str) -> None: - """Create a collection with the inbuild data model using the vector store. - - First create the collection, then call this method to create the collection itself. - """ - self.collection = self.store.get_collection( - collection_name=collection_name, - data_model_type=ChatHistoryModel, - ) - await self.collection.create_collection_if_not_exists() - - async def store_messages(self) -> None: - """Store the chat history in the Cosmos DB. - - Note that we use model_dump to convert the chat message content into a serializable format. - """ - if self.collection: - await self.collection.upsert( - ChatHistoryModel( - session_id=self.session_id, - user_id=self.user_id, - messages=[msg.model_dump() for msg in self.messages], - ) - ) - - async def read_messages(self) -> None: - """Read the chat history from the Cosmos DB. - - Note that we use the model_validate method to convert the serializable format back into a ChatMessageContent. - """ - if self.collection: - record = await self.collection.get(self.session_id) - if record: - for message in record.messages: - self.messages.append(ChatMessageContent.model_validate(message)) - - -# 3. We now create a fairly standard kernel, with functions and a chat service. -# Create and configure the kernel. -kernel = Kernel() - -# Load some sample plugins (for demonstration of function calling). -kernel.add_plugin(MathPlugin(), plugin_name="math") -kernel.add_plugin(TimePlugin(), plugin_name="time") - -# You can select from the following chat completion services that support function calling: -# - Services.OPENAI -# - Services.AZURE_OPENAI -# - Services.AZURE_AI_INFERENCE -# - Services.ANTHROPIC -# - Services.BEDROCK -# - Services.GOOGLE_AI -# - Services.MISTRAL_AI -# - Services.OLLAMA -# - Services.ONNX -# - Services.VERTEX_AI -# - Services.DEEPSEEK -# Please make sure you have configured your environment correctly for the selected chat completion service. -chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) - -# Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. -# With `auto_invoke=True`, the model will automatically choose and call functions as needed. -request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]}) - -kernel.add_service(chat_completion_service) - - -# 4. The main chat loop, which takes a history object and prompts the user for input. -# It then adds the user input to the history and gets a response from the chat completion service. -# Finally, it prints the response and saves the chat history to the Cosmos DB. -async def chat(history: ChatHistoryInCosmosDB) -> bool: - """ - Continuously prompt the user for input and show the assistant's response. - Type 'exit' to exit. - """ - await history.read_messages() - print(f"Chat history successfully loaded {len(history.messages)} messages.") - if len(history.messages) == 0: - # if it is a new conversation, add the system message and a couple of initial messages. - history.add_system_message( - "You are a chat bot. Your name is Mosscap and you have one goal: figure out what people need." - ) - history.add_user_message("Hi there, who are you?") - history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") - - try: - user_input = input("User:> ") - except (KeyboardInterrupt, EOFError): - print("\n\nExiting chat...") - return False - - if user_input.lower().strip() == "exit": - print("\n\nExiting chat...") - return False - - # add the user input to the chat history - history.add_user_message(user_input) - - result = await chat_completion_service.get_chat_message_content(history, request_settings, kernel=kernel) - - if result: - print(f"Mosscap:> {result}") - history.add_message(result) - - # Save the chat history to CosmosDB. - print(f"Saving {len(history.messages)} messages to AzureCosmosDB.") - await history.store_messages() - return True - - -async def main() -> None: - delete_when_done = True - session_id = "session1" - chatting = True - # 5. We now create the store, ChatHistory and collection and start the chat loop. - - # First we enter the store context manager to connect. - # The create_database flag will create the database if it does not exist. - async with AzureCosmosDBNoSQLStore(create_database=True) as store: - # Then we create the chat history in CosmosDB. - history = ChatHistoryInCosmosDB(store=store, session_id=session_id, user_id="user") - # Finally we create the collection. - await history.create_collection(collection_name="chat_history") - print( - "Welcome to the chat bot!\n" - " Type 'exit' to exit.\n" - " Try a math question to see function calling in action (e.g. 'what is 3+3?')." - ) - try: - while chatting: - chatting = await chat(history) - except Exception: - print("Closing chat...") - if delete_when_done and history.collection: - await history.collection.delete_collection() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/filtering/auto_function_invoke_filters.py b/python/samples/concepts/filtering/auto_function_invoke_filters.py index 008150af011d..b1e055e9397d 100644 --- a/python/samples/concepts/filtering/auto_function_invoke_filters.py +++ b/python/samples/concepts/filtering/auto_function_invoke_filters.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +import os from semantic_kernel import Kernel from semantic_kernel.connectors.ai import FunctionChoiceBehavior @@ -28,7 +29,10 @@ # Note: the underlying gpt-35/gpt-4 model version needs to be at least version 0613 to support tools. kernel.add_service(OpenAIChatCompletion(service_id="chat")) +plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") # adding plugins to the kernel +# the joke plugin in the FunPlugins is a semantic plugin and has the function calling disabled. +# kernel.import_plugin_from_prompt_directory("chat", plugins_directory, "FunPlugin") # the math plugin is a core plugin and has the function calling enabled. kernel.add_plugin(MathPlugin(), plugin_name="math") kernel.add_plugin(TimePlugin(), plugin_name="time") diff --git a/python/samples/concepts/filtering/function_invocation_filters_stream.py b/python/samples/concepts/filtering/function_invocation_filters_stream.py index 0f0b58208f5d..74948472ac49 100644 --- a/python/samples/concepts/filtering/function_invocation_filters_stream.py +++ b/python/samples/concepts/filtering/function_invocation_filters_stream.py @@ -4,6 +4,7 @@ import logging import os from collections.abc import Callable, Coroutine +from functools import reduce from typing import Any from semantic_kernel import Kernel @@ -37,21 +38,17 @@ async def streaming_exception_handling( ): await next(context) - if context.is_streaming: + async def override_stream(stream): + try: + async for partial in stream: + yield partial + except Exception as e: + yield [ + StreamingChatMessageContent(role=AuthorRole.ASSISTANT, content=f"Exception caught: {e}", choice_index=0) + ] - async def override_stream(stream): - try: - async for partial in stream: - yield partial - except Exception as e: - yield [ - StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, content=f"Exception caught: {e}", choice_index=0 - ) - ] - - stream = context.result.value - context.result = FunctionResult(function=context.result.function, value=override_stream(stream)) + stream = context.result.value + context.result = FunctionResult(function=context.result.function, value=override_stream(stream)) async def chat(chat_history: ChatHistory) -> bool: @@ -80,7 +77,7 @@ async def chat(chat_history: ChatHistory) -> bool: print("") chat_history.add_user_message(user_input) if streamed_chunks: - streaming_chat_message = sum(streamed_chunks[1:], streamed_chunks[0]) + streaming_chat_message = reduce(lambda first, second: first + second, streamed_chunks) chat_history.add_message(streaming_chat_message) return True diff --git a/python/samples/concepts/filtering/retry_with_different_model.py b/python/samples/concepts/filtering/retry_with_different_model.py deleted file mode 100644 index e76f82ce7c7f..000000000000 --- a/python/samples/concepts/filtering/retry_with_different_model.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from collections.abc import Awaitable, Callable - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( - OpenAIChatPromptExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.filters import FunctionInvocationContext -from semantic_kernel.filters.filter_types import FilterTypes -from semantic_kernel.functions.kernel_arguments import KernelArguments - -# This sample shows how to use a filter to use a fallback service if the default service fails to execute the function. -# this works by replacing the settings that point to the default service -# with the settings that point to the fallback service -# after the default service fails to execute the function. - -logger = logging.getLogger(__name__) - - -class RetryFilter: - """A filter that retries the function invocation with a different model if it fails.""" - - def __init__(self, default_service_id: str, fallback_service_id: str): - """Initialize the filter with the default and fallback service ids.""" - self.default_service_id = default_service_id - self.fallback_service_id = fallback_service_id - - async def retry_filter( - self, - context: FunctionInvocationContext, - next: Callable[[FunctionInvocationContext], Awaitable[None]], - ) -> None: - """A filter that retries the function invocation with a different model if it fails.""" - try: - # try the default function - await next(context) - except Exception as ex: - print("Expected failure to execute the function: ", ex) - # if the default function fails, try the fallback function - if ( - context.arguments - and context.arguments.execution_settings - and self.default_service_id in context.arguments.execution_settings - ): - # get the settings for the default service - settings = context.arguments.execution_settings.pop(self.default_service_id) - settings.service_id = self.fallback_service_id - # add them back with the right service id - context.arguments.execution_settings[self.fallback_service_id] = settings - # try again! - await next(context) - else: - raise ex - - -async def main() -> None: - # set the ids for the default and fallback services - default_service_id = "default_service" - fallback_service_id = "fallback_service" - kernel = Kernel() - # create the filter with the ids - retry_filter = RetryFilter(default_service_id=default_service_id, fallback_service_id=fallback_service_id) - # add the filter to the kernel - kernel.add_filter(FilterTypes.FUNCTION_INVOCATION, retry_filter.retry_filter) - - # add the default and fallback services - default_service = OpenAIChatCompletion(service_id=default_service_id, api_key="invalid_key") - kernel.add_service(default_service) - fallback_service = OpenAIChatCompletion(service_id=fallback_service_id) - kernel.add_service(fallback_service) - - # create the settings for the request - request_settings = OpenAIChatPromptExecutionSettings(service_id=default_service_id) - # invoke a simple prompt function - response = await kernel.invoke_prompt( - function_name="retry_function", - prompt="How are you today?", - arguments=KernelArguments(settings=request_settings), - ) - - print("Model response: ", response) - - # Sample output: - # Expected failure to execute the function: Error occurred while invoking function retry_function: - # (" service - # failed to complete the prompt", AuthenticationError("Error code: 401 - {'error': {'message': 'Incorrect API key - # provided: invalid_key. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': - # 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}")) - # Model response: I'm just a program, so I don't experience feelings, but I'm here and ready to help you out. - # How can I assist you today? - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/filtering/retry_with_filters.py b/python/samples/concepts/filtering/retry_with_filters.py index e488a4aa5aa1..92131ad1d292 100644 --- a/python/samples/concepts/filtering/retry_with_filters.py +++ b/python/samples/concepts/filtering/retry_with_filters.py @@ -2,7 +2,8 @@ import asyncio import logging -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine +from typing import Any from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings from semantic_kernel import Kernel @@ -33,7 +34,7 @@ def __init__(self): self._invocation_count = 0 @kernel_function(name="GetWeather", description="Get the weather of the day at the current location.") - def get_weather(self) -> str: + def get_wather(self) -> str: """Get the weather of the day at the current location. Simulates a call to an external service to get the weather. @@ -49,7 +50,7 @@ def get_weather(self) -> str: async def retry_filter( context: FunctionInvocationContext, - next: Callable[[FunctionInvocationContext], Awaitable[None]], + next: Callable[[FunctionInvocationContext], Coroutine[Any, Any, None]], ) -> None: """A filter that retries the function invocation if it fails. diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md b/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md deleted file mode 100644 index c2d75ca0ca80..000000000000 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/README.md +++ /dev/null @@ -1,72 +0,0 @@ -## Azure AI Search with Hotel Sample Data - -This guide walks you through setting up your Azure AI Search Service with the correct index, data source, and indexer to run the hotel sample. - -### Setting Up the Azure AI Search Service - -1. **Import the Sample Data** - - Navigate to the **Search Service Overview** page and click **Import Data**. - - From the dropdown, select **Samples**, then choose **hotels-sample**. - - Click **Next: Add Cognitive Skills (Optional)**. - -2. **Skip the Cognitive Skills Page** - - No changes are needed here. Click **Next** to proceed. - -3. **Configure the Index Fields** - - The Python sample uses **snake_case** field names. Update the default field names accordingly. - - Since `HotelId` is the primary key, you cannot rename it directly. Instead, create a new field: - - Click **+ Add Field** and name it `hotel_id`. - - Enable **Retrievable**, **Filterable**, **Facetable**, and **Searchable** options. - - Rename other fields to snake case: - - `HotelName` → `hotel_name` - - There may be a current issue with index config that has trouble mapping the `HotelName` -> `hotel_name`, so as to not hit issues - deselect `retrievable` for `hotel_name`. It should still be `searchable`. - - Use the dropdown to rename complex fields like `Address` -> `address` and `Rooms` -> `rooms` with their sub-fields renamed. - - Add two new vector fields: - - `description_vector` - - `description_fr_vector` - - Configure these fields as: - - **Type**: `Collection(Edm.Single)` (for vector fields) - - **Retrievable**: Enabled (default setting) - - Click the **three dots (...)** on the right, then **Configure vector field**: - - Set **Dimensions** to `1536`. - - If no vector search profiles exist, click **Create**. - - Under **Algorithms**, click **Create** to set up a vector algorithm (default values are fine). - - If no vectorizer exists, create one: - - Select the **Kind** (e.g., Azure OpenAI). - - Choose your **subscription, Azure OpenAI service, and model deployment**. - - Select your **authentication type**. - - Repeat this process for both `description_vector` and `description_fr_vector`. - -4. **Create an Indexer** - - On the next page, create an indexer with **default settings**, as the sample data is static. - - Click **Submit** to start the indexer. - - The indexing process may take a few minutes. - -### Generating Vectors on First Run - -In the `step_1_interact_with_the_collection.py` script: -- Set `first_run = True` to generate vectors for all entries in the index. -- This process may take a few minutes. - -### Using Precomputed Vectors for Subsequent Runs - -If your index already contains vectors: -- Set `first_run = False` to skip vector generation and perform only text and vector searches. - -### Example Search Results - -After running `step_1_interact_with_the_collection.py` you should see output similar to: - -#### **Text Search Results** -```text -Search results using text: - eitRUkFJSmFmWG93QUFBQUFBQUFBQT090 (in Nashville, USA): All of the suites feature full-sized kitchens stocked with cookware, separate living and sleeping areas and sofa beds. Some of the larger rooms have fireplaces and patios or balconies. Experience real country hospitality in the heart of bustling Nashville. The most vibrant music scene in the world is just outside your front door. (score: 7.613796) - eitRUkFJSmFmWG9jQUFBQUFBQUFBQT090 (in Sarasota, USA): The hotel is situated in a nineteenth century plaza, which has been expanded and renovated to the highest architectural standards to create a modern, functional and first-class hotel in which art and unique historical elements coexist with the most modern comforts. The hotel also regularly hosts events like wine tastings, beer dinners, and live music. (score: 6.1204605) - eitRUkFJSmFmWG9SQUFBQUFBQUFBQT090 (in Durham, USA): Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more. (score: 6.0284567) - -Search results using vector: - eitRUkFJSmFmWG93QUFBQUFBQUFBQT090 (in Nashville, USA): All of the suites feature full-sized kitchens stocked with cookware, separate living and sleeping areas and sofa beds. Some of the larger rooms have fireplaces and patios or balconies. Experience real country hospitality in the heart of bustling Nashville. The most vibrant music scene in the world is just outside your front door. (score: 0.6944429) - eitRUkFJSmFmWG9SQUFBQUFBQUFBQT090 (in Durham, USA): Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more. (score: 0.6776492) - eitRUkFJSmFmWG9PQUFBQUFBQUFBQT090 (in San Diego, USA): Extend Your Stay. Affordable home away from home, with amenities like free Wi-Fi, full kitchen, and convenient laundry service. (score: 0.67669696) -``` \ No newline at end of file diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py index 4f22bbb4a25a..271b61a47061 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_0_data_model.py @@ -21,7 +21,6 @@ # This model adds vectors for the 2 descriptions in English and French. # Both are based on the 1536 dimensions of the OpenAI models. # You can adjust this at creation time and then make the change below as well. -# Refer to the README for more information. ### diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py index c602f24034ad..f110513d2ea8 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_1_interact_with_the_collection.py @@ -25,9 +25,6 @@ first_run = False -# Note: you may need to update this `collection_name` depending upon how your index is named. -COLLECTION_NAME = "hotels-sample-index" - async def add_vectors(collection: AzureAISearchCollection, vectorizer: VectorStoreRecordUtils): """This is a simple function that uses the VectorStoreRecordUtils to add vectors to the records in the collection. @@ -56,17 +53,15 @@ async def main(query: str, first_run: bool = False): vectorizer = VectorStoreRecordUtils(kernel) # Create the Azure AI Search collection collection = AzureAISearchCollection[HotelSampleClass]( - collection_name=COLLECTION_NAME, data_model_type=HotelSampleClass + collection_name="hotels-sample-index", data_model_type=HotelSampleClass ) # Check if the collection exists. if not await collection.does_collection_exist(): raise ValueError( "Collection does not exist, please create using the " "Azure AI Search portal wizard -> Import Data -> Samples -> hotels-sample." - "During creation adapt the index schema to add the description_vector and description_fr_vector fields." - "You may need to rename other fields to match the data model." + "During creation adopt the schema to add the description_vector and description_fr_vector fields." "Then run this sample with `first_run=True` to add the vectors." - "Refer to the README for more information." ) # If it is the first run and there are no vectors, add them. diff --git a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py index 5814bc95f7d2..af27024542ec 100644 --- a/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py +++ b/python/samples/concepts/memory/azure_ai_search_hotel_samples/step_2_use_as_a_plugin.py @@ -41,9 +41,6 @@ KernelParameterMetadata, ) -# Note: you may need to update this `collection_name` depending upon how your index is named. -COLLECTION_NAME = "hotels-sample-index" - # Create Kernel and add both chat completion and text embeddings services. kernel = Kernel() service_id = "chat" @@ -57,7 +54,7 @@ # You can also choose to use the `from_vectorized_search` method to use vector search. # Or the `from_vectorizable_text_search` method if the collection is setup to vectorize incoming texts. text_search = VectorStoreTextSearch.from_vector_text_search( - AzureAISearchCollection[HotelSampleClass](collection_name=COLLECTION_NAME, data_model_type=HotelSampleClass) + AzureAISearchCollection[HotelSampleClass](collection_name="hotels-sample-index", data_model_type=HotelSampleClass) ) @@ -142,14 +139,7 @@ def update_options_search( type="str", is_required=True, type_object=str, - ), - KernelParameterMetadata( - name="hotel_name", - description="The name of the hotel.", - type="str", - type_object=str, - is_required=True, - ), + ) ], # it uses the default update options that will turn the hotel_id into a filter. ), diff --git a/python/samples/concepts/memory/azure_cognitive_search_memory.py b/python/samples/concepts/memory/azure_cognitive_search_memory.py new file mode 100644 index 000000000000..aaa7b3b0d213 --- /dev/null +++ b/python/samples/concepts/memory/azure_cognitive_search_memory.py @@ -0,0 +1,66 @@ +# Copyright (c) Microsoft. All rights reserved. + +##################################################### +# This sample should be considered obsolete, as we are moving things towards the new data model. +# Please check out the azure_ai_search_hotel_samples folder for the latest implementation. +##################################################### + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion, AzureTextEmbedding +from semantic_kernel.connectors.memory.azure_cognitive_search import AzureCognitiveSearchMemoryStore +from semantic_kernel.core_plugins import TextMemoryPlugin +from semantic_kernel.memory import SemanticTextMemory + +COLLECTION_NAME = "acs-index-sample" + + +async def populate_memory(memory: SemanticTextMemory) -> None: + # Add some documents to the ACS semantic memory + await memory.save_information(COLLECTION_NAME, id="info1", text="My name is Andrea") + await memory.save_information(COLLECTION_NAME, id="info2", text="I currently work as a tour guide") + await memory.save_information(COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005") + await memory.save_information( + COLLECTION_NAME, + id="info4", + text="I visited France and Italy five times since 2015", + ) + await memory.save_information(COLLECTION_NAME, id="info5", text="My family is from New York") + + +async def search_acs_memory_questions(memory: SemanticTextMemory) -> None: + questions = [ + "what's my name", + "where do I live?", + "where's my family from?", + "where have I traveled?", + "what do I do for work", + ] + + for question in questions: + print(f"Question: {question}") + result = await memory.search(COLLECTION_NAME, question) + print(f"Answer: {result[0].text}\n") + + +async def main() -> None: + kernel = Kernel() + + vector_size = 1536 + + # Setting up OpenAI services for text completion and text embedding + kernel.add_service(AzureTextCompletion(service_id="dv")) + async with AzureCognitiveSearchMemoryStore(vector_size=vector_size) as acs_connector: + memory = SemanticTextMemory(storage=acs_connector, embeddings_generator=AzureTextEmbedding(service_id="ada")) + kernel.add_plugin(TextMemoryPlugin(memory), "TextMemoryPlugin") + + print("Populating memory...") + await populate_memory(memory) + + print("Asking questions... (manually)") + await search_acs_memory_questions(memory) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/memory/memory.py b/python/samples/concepts/memory/memory.py new file mode 100644 index 000000000000..95b09bf0b7f3 --- /dev/null +++ b/python/samples/concepts/memory/memory.py @@ -0,0 +1,120 @@ +# Copyright (c) Microsoft. All rights reserved. + +##################################################### +# This sample should be considered obsolete, as we are moving things towards the new data model. +# Please check out the new_memory.py sample for the latest implementation. +##################################################### + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAITextEmbedding +from semantic_kernel.core_plugins import TextMemoryPlugin +from semantic_kernel.functions import KernelFunction +from semantic_kernel.memory import SemanticTextMemory, VolatileMemoryStore +from semantic_kernel.prompt_template import PromptTemplateConfig + +collection_id = "generic" + + +async def populate_memory(memory: SemanticTextMemory) -> None: + # Add some documents to the semantic memory + await memory.save_information(collection=collection_id, id="info1", text="Your budget for 2024 is $100,000") + await memory.save_information(collection=collection_id, id="info2", text="Your savings from 2023 are $50,000") + await memory.save_information(collection=collection_id, id="info3", text="Your investments are $80,000") + + +async def search_memory_examples(memory: SemanticTextMemory) -> None: + questions = ["What is my budget for 2024?", "What are my savings from 2023?", "What are my investments?"] + + for question in questions: + print(f"Question: {question}") + result = await memory.search(collection_id, question) + print(f"Answer: {result[0].text}\n") + + +async def setup_chat_with_memory( + kernel: Kernel, + service_id: str, +) -> KernelFunction: + prompt = """ + ChatBot can have a conversation with you about any topic. + It can give explicit instructions or say 'I don't know' if + it does not have an answer. + + Information about me, from previous conversations: + - {{recall 'budget by year'}} What is my budget for 2024? + - {{recall 'savings from previous year'}} What are my savings from 2023? + - {{recall 'investments'}} What are my investments? + + {{$request}} + """.strip() + + prompt_template_config = PromptTemplateConfig( + template=prompt, + execution_settings={service_id: kernel.get_prompt_execution_settings_from_service_id(service_id=service_id)}, + ) + + return kernel.add_function( + function_name="chat_with_memory", + plugin_name="TextMemoryPlugin", + prompt_template_config=prompt_template_config, + ) + + +async def chat(kernel: Kernel, chat_func: KernelFunction) -> bool: + try: + user_input = input("User:> ") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + answer = await kernel.invoke(chat_func, request=user_input) + + print(f"ChatBot:> {answer}") + return True + + +async def main() -> None: + kernel = Kernel() + + service_id = "chat-gpt" + kernel.add_service(OpenAIChatCompletion(service_id=service_id, ai_model_id="gpt-3.5-turbo")) + embedding_gen = OpenAITextEmbedding( + service_id="ada", + ai_model_id="text-embedding-ada-002", + ) + kernel.add_service(embedding_gen) + + memory = SemanticTextMemory(storage=VolatileMemoryStore(), embeddings_generator=embedding_gen) + kernel.add_plugin(TextMemoryPlugin(memory), "TextMemoryPlugin") + + print("Populating memory...") + await populate_memory(memory) + + print("Asking questions... (manually)") + await search_memory_examples(memory) + + print("Setting up a chat (with memory!)") + chat_func = await setup_chat_with_memory(kernel, service_id) + + print("Begin chatting (type 'exit' to exit):\n") + print( + "Welcome to the chat bot!\ + \n Type 'exit' to exit.\ + \n Try asking a question about your finances (i.e. \"talk to me about my finances\")." + ) + chatting = True + while chatting: + chatting = await chat(kernel, chat_func) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/memory/complex_memory.py b/python/samples/concepts/memory/new_memory.py similarity index 56% rename from python/samples/concepts/memory/complex_memory.py rename to python/samples/concepts/memory/new_memory.py index 423508388e9c..11f8d3b20b51 100644 --- a/python/samples/concepts/memory/complex_memory.py +++ b/python/samples/concepts/memory/new_memory.py @@ -4,13 +4,11 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass, field -from typing import Annotated, Literal +from typing import Annotated from uuid import uuid4 import numpy as np -from samples.concepts.memory.utils import print_record -from samples.concepts.resources.utils import Colors, print_with_color from semantic_kernel import Kernel from semantic_kernel.connectors.ai.open_ai import ( AzureTextEmbedding, @@ -18,24 +16,20 @@ OpenAITextEmbedding, ) from semantic_kernel.connectors.memory.azure_ai_search import AzureAISearchCollection -from semantic_kernel.connectors.memory.azure_cosmos_db import ( - AzureCosmosDBforMongoDBCollection, - AzureCosmosDBNoSQLCollection, -) -from semantic_kernel.connectors.memory.chroma import ChromaCollection +from semantic_kernel.connectors.memory.azure_cosmos_db import AzureCosmosDBNoSQLCollection from semantic_kernel.connectors.memory.in_memory import InMemoryVectorCollection from semantic_kernel.connectors.memory.postgres import PostgresCollection from semantic_kernel.connectors.memory.qdrant import QdrantCollection from semantic_kernel.connectors.memory.redis import RedisHashsetCollection, RedisJsonCollection from semantic_kernel.connectors.memory.weaviate import WeaviateCollection from semantic_kernel.data import ( - DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction, IndexKind, VectorizableTextSearchMixin, VectorizedSearchMixin, VectorSearchFilter, VectorSearchOptions, + VectorSearchResult, VectorStoreRecordCollection, VectorStoreRecordDataField, VectorStoreRecordKeyField, @@ -45,48 +39,40 @@ vectorstoremodel, ) -# This is a rather complex sample, showing how to use the vector store -# with a number of different collections. -# It also shows how to use the vector store with a number of different data models. -# It also uses all the types of search available in the vector store. -# For a simpler example, see "simple_memory.py" - -def get_data_model(type: Literal["array", "list"], index_kind: IndexKind, distance_function: DistanceFunction) -> type: - if type == "array": +def get_data_model_array(index_kind: IndexKind, distance_function: DistanceFunction) -> type: + @vectorstoremodel + @dataclass + class DataModelArray: + vector: Annotated[ + np.ndarray | None, + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, + index_kind=index_kind, + dimensions=1536, + distance_function=distance_function, + property_type="float", + serialize_function=np.ndarray.tolist, + deserialize_function=np.array, + ), + ] = None + id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) + content: Annotated[ + str, + VectorStoreRecordDataField( + has_embedding=True, + embedding_property_name="vector", + property_type="str", + is_full_text_searchable=True, + ), + ] = "content1" + title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = "title" + tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" - @vectorstoremodel - @dataclass - class DataModelArray: - vector: Annotated[ - np.ndarray | None, - VectorStoreRecordVectorField( - embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, - index_kind=index_kind, - dimensions=1536, - distance_function=distance_function, - property_type="float", - serialize_function=np.ndarray.tolist, - deserialize_function=np.array, - ), - ] = None - id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) - content: Annotated[ - str, - VectorStoreRecordDataField( - has_embedding=True, - embedding_property_name="vector", - property_type="str", - is_full_text_searchable=True, - ), - ] = "content1" - title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = ( - "title" - ) - tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" + return DataModelArray - return DataModelArray +def get_data_model_list(index_kind: IndexKind, distance_function: DistanceFunction) -> type: @vectorstoremodel @dataclass class DataModelList: @@ -117,12 +103,9 @@ class DataModelList: collection_name = "test" -# Depending on the vector database, the index kind and distance function may need to be adjusted +# Depending on the vector database, the index kind and distance function may need to be adjusted, # since not all combinations are supported by all databases. -# The values below might need to be changed for your collection to work. -distance_function = DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE -index_kind = IndexKind.HNSW -DataModel = get_data_model("array", index_kind, distance_function) +DataModel = get_data_model_array(IndexKind.HNSW, DistanceFunction.COSINE_SIMILARITY) # A list of VectorStoreRecordCollection that can be used. # Available collections are: @@ -141,12 +124,6 @@ class DataModelList: # https://learn.microsoft.com/en-us/azure/cosmos-db/how-to-develop-emulator?tabs=windows%2Cpython&pivots=api-nosql # Please see the link above to learn how to set up the Azure Cosmos NoSQL emulator on your machine. # For this sample to work with Azure Cosmos NoSQL, please adjust the index_kind of the data model to QUANTIZED_FLAT. -# - azure_cosmos_mongodb: Azure Cosmos MongoDB -# https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/introduction -# - chroma: Chroma -# The chroma collection is currently only available for in-memory versions -# Client-Server mode and Chroma Cloud are not yet supported. -# More info on Chroma here: https://docs.trychroma.com/docs/overview/introduction # This is represented as a mapping from the collection name to a # function which returns the collection. # Using a function allows for lazy initialization of the collection, @@ -185,23 +162,29 @@ class DataModelList: collection_name=collection_name, create_database=True, ), - "azure_cosmos_mongodb": lambda: AzureCosmosDBforMongoDBCollection( - data_model_type=DataModel, - collection_name=collection_name, - ), - "chroma": lambda: ChromaCollection(data_model_type=DataModel, collection_name=collection_name), } -async def main(collection: str, use_azure_openai: bool): +def print_record(result: VectorSearchResult | None = None, record: DataModel | None = None): + if result: + record = result.record + print(f" Found id: {record.id}") + print(f" Content: {record.content}") + if record.vector is not None: + print(f" Vector (first five): {record.vector[:5]}") + + +async def main(collection: str, use_azure_openai: bool, embedding_model: str): print("-" * 30) kernel = Kernel() - embedder = ( - AzureTextEmbedding(service_id="embedding") if use_azure_openai else OpenAITextEmbedding(service_id="embedding") - ) + service_id = "embedding" + if use_azure_openai: + embedder = AzureTextEmbedding(service_id=service_id, deployment_name=embedding_model) + else: + embedder = OpenAITextEmbedding(service_id=service_id, ai_model_id=embedding_model) kernel.add_service(embedder) async with collections[collection]() as record_collection: - print_with_color(f"Creating {collection} collection!", Colors.CGREY) + print(f"Creating {collection} collection!") await record_collection.delete_collection() await record_collection.create_collection_if_not_exists() @@ -217,22 +200,16 @@ async def main(collection: str, use_azure_openai: bool): title="Semantic Kernel Languages", tag="general", ) - record3 = DataModel( - content="```python\nfrom semantic_kernel import Kernel\nkernel = Kernel()\n```", - id="d5c9913a-e015-4944-b960-5d4a84bca002", - title="Code sample", - tag="code", - ) - print_with_color("Adding records!", Colors.CBLUE) + print("Adding records!") records = await VectorStoreRecordUtils(kernel).add_vector_to_records( - [record1, record2, record3], data_model_type=DataModel + [record1, record2], data_model_type=DataModel ) keys = await record_collection.upsert_batch(records) print(f" Upserted {keys=}") - print_with_color("Getting records!", Colors.CBLUE) - results = await record_collection.get_batch([record1.id, record2.id, record3.id]) + print("Getting records!") + results = await record_collection.get_batch([record1.id, record2.id]) if results: [print_record(record=result) for result in results] else: @@ -242,48 +219,49 @@ async def main(collection: str, use_azure_openai: bool): include_vectors=True, filter=VectorSearchFilter.equal_to("tag", "general"), ) - print("-" * 30) - print_with_color("Searching for 'python', with filter 'tag == general'", Colors.CBLUE) if isinstance(record_collection, VectorTextSearchMixin): print("-" * 30) - print_with_color("Using text search", Colors.CBLUE) - search_results = await record_collection.text_search("python", options) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] + print("Using text search") + try: + search_results = await record_collection.text_search("python", options) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] + except Exception: + print("Text search could not execute.") if isinstance(record_collection, VectorizedSearchMixin): print("-" * 30) - print_with_color( - f"Using vectorized search, for {distance_function.value}, " - f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[distance_function](1, 0) else 'lower'} the score the better" # noqa: E501 - f"", - Colors.CBLUE, + print( + "Using vectorized search, depending on the distance function, " + "the better score might be higher or lower." ) - search_results = await record_collection.vectorized_search( - vector=(await embedder.generate_raw_embeddings(["python"]))[0], - options=options, - ) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] + try: + search_results = await record_collection.vectorized_search( + vector=(await embedder.generate_raw_embeddings(["python"]))[0], + options=VectorSearchOptions(vector_field_name="vector", include_vectors=True), + ) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] + except Exception: + print("Vectorized search could not execute.") if isinstance(record_collection, VectorizableTextSearchMixin): print("-" * 30) - print_with_color( - f"Using vectorized search, for {distance_function.value}, " - f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[distance_function](1, 0) else 'lower'} the score the better", # noqa: E501 - Colors.CBLUE, - ) - search_results = await record_collection.vectorizable_text_search("python", options) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] + print("Using vectorizable text search") + try: + search_results = await record_collection.vectorizable_text_search("python", options) + if search_results.total_count == 0: + print("\nNothing found...\n") + else: + [print_record(result) async for result in search_results.results] + except Exception: + print("Vectorizable text search could not execute.") print("-" * 30) - print_with_color("Deleting collection!", Colors.CBLUE) + print("Deleting collection!") await record_collection.delete_collection() - print_with_color("Done!", Colors.CGREY) + print("Done!") if __name__ == "__main__": @@ -293,5 +271,10 @@ async def main(collection: str, use_azure_openai: bool): parser.add_argument("--collection", default="in_memory", choices=collections.keys(), help="What collection to use.") # Option of whether to use OpenAI or Azure OpenAI. parser.add_argument("--use-azure-openai", action="store_true", help="Use Azure OpenAI instead of OpenAI.") + # Model + parser.add_argument( + "--model", default="text-embedding-3-small", help="The model or deployment to use for embeddings." + ) args = parser.parse_args() - asyncio.run(main(collection=args.collection, use_azure_openai=args.use_azure_openai)) + + asyncio.run(main(collection=args.collection, use_azure_openai=args.use_azure_openai, embedding_model=args.model)) diff --git a/python/samples/concepts/memory/memory_with_pandas.py b/python/samples/concepts/memory/pandas_memory.py similarity index 57% rename from python/samples/concepts/memory/memory_with_pandas.py rename to python/samples/concepts/memory/pandas_memory.py index 7a9b7f6b5d1c..e04a1ac3950f 100644 --- a/python/samples/concepts/memory/memory_with_pandas.py +++ b/python/samples/concepts/memory/pandas_memory.py @@ -39,31 +39,36 @@ async def main(): kernel.add_service(OpenAITextEmbedding(service_id="embedding", ai_model_id="text-embedding-3-small")) # create the record collection - async with AzureAISearchCollection[pd.DataFrame]( + record_collection = AzureAISearchCollection[pd.DataFrame]( data_model_type=pd.DataFrame, data_model_definition=model_fields, - ) as record_collection: - # create some records - records = [ - {"id": str(uuid4()), "content": "my dict text", "vector": None}, - {"id": str(uuid4()), "content": "my second text", "vector": None}, - ] + ) + # create some records + records = [ + {"id": str(uuid4()), "content": "my dict text", "vector": None}, + {"id": str(uuid4()), "content": "my second text", "vector": None}, + ] - # create the dataframe and add the embeddings - df = pd.DataFrame(records) - df = await VectorStoreRecordUtils(kernel).add_vector_to_records(df, None, data_model_definition=model_fields) - print("Records with embeddings:") - print(df.shape) - print(df.head(5)) + # create the dataframe and add the embeddings + df = pd.DataFrame(records) + df = await VectorStoreRecordUtils(kernel).add_vector_to_records(df, None, data_model_definition=model_fields) + print("Records with embeddings:") + print(df.shape) + print(df.head(5)) - # upsert the records (for a container, upsert and upsert_batch are equivalent) - await record_collection.upsert_batch(df) + # upsert the records (for a container, upsert and upsert_batch are equivalent) + await record_collection.upsert_batch(df) - # retrieve a record - result = await record_collection.get(records[0]["id"]) - print("Retrieved records:") - print(result.shape) - print(result.head(5)) + # retrieve a record + result = await record_collection.get(records[0]["id"]) + print("Retrieved records:") + print(result.shape) + print(result.head(5)) + + # explicit cleanup, usually not needed, but a script like this + # closes so fast that the async close triggered by delete may not finish on time + del record_collection + await asyncio.sleep(1) if __name__ == "__main__": diff --git a/python/samples/concepts/memory/simple_memory.py b/python/samples/concepts/memory/simple_memory.py deleted file mode 100644 index 941b5f59baa7..000000000000 --- a/python/samples/concepts/memory/simple_memory.py +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from collections.abc import Sequence -from dataclasses import dataclass, field -from typing import Annotated -from uuid import uuid4 - -from samples.concepts.memory.utils import print_record -from samples.concepts.resources.utils import Colors, print_with_color -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import ( - OpenAIEmbeddingPromptExecutionSettings, - OpenAITextEmbedding, -) -from semantic_kernel.connectors.memory.in_memory import InMemoryVectorCollection -from semantic_kernel.data import ( - DISTANCE_FUNCTION_DIRECTION_HELPER, - DistanceFunction, - IndexKind, - VectorSearchFilter, - VectorSearchOptions, - VectorStoreRecordDataField, - VectorStoreRecordKeyField, - VectorStoreRecordUtils, - VectorStoreRecordVectorField, - vectorstoremodel, -) - -# This is the most basic example of a vector store and collection -# For a more complex example, using different collection types, see "complex_memory.py" -# This sample uses openai text embeddings, so make sure to have your environment variables set up -# it needs openai api key and embedding model id -kernel = Kernel() -embedder = OpenAITextEmbedding(service_id="embedding") -kernel.add_service(embedder) - -# Next, you need to define your data structure -# In this case, we are using a dataclass to define our data structure -# you can also use a pydantic model, or a vanilla python class, see "data_models.py" for more examples -# Inside the model we define which fields we want to use, and which fields are vectors -# and for vector fields we define what kind of index we want to use, and what distance function we want to use -# This has been done in constants here for simplicity, but you can also define them in the model itself -# Next we create three records using that model - -DISTANCE_FUNCTION = DistanceFunction.COSINE_SIMILARITY -# The in memory collection does not actually use a index, so this variable is not relevant, here for completeness -INDEX_KIND = IndexKind.IVF_FLAT - - -@vectorstoremodel -@dataclass -class DataModel: - vector: Annotated[ - list[float] | None, - VectorStoreRecordVectorField( - embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings()}, - index_kind=INDEX_KIND, - dimensions=1536, - distance_function=DISTANCE_FUNCTION, - property_type="float", - ), - ] = None - id: Annotated[str, VectorStoreRecordKeyField()] = field(default_factory=lambda: str(uuid4())) - content: Annotated[ - str, - VectorStoreRecordDataField( - has_embedding=True, - embedding_property_name="vector", - property_type="str", - is_full_text_searchable=True, - ), - ] = "content1" - title: Annotated[str, VectorStoreRecordDataField(property_type="str", is_full_text_searchable=True)] = "title" - tag: Annotated[str, VectorStoreRecordDataField(property_type="str", is_filterable=True)] = "tag" - - -records = [ - DataModel( - content="Semantic Kernel is awesome", - id="e6103c03-487f-4d7d-9c23-4723651c17f4", - title="Overview", - tag="general", - ), - DataModel( - content="Semantic Kernel is available in dotnet, python and Java.", - id="09caec77-f7e1-466a-bcec-f1d51c5b15be", - title="Semantic Kernel Languages", - tag="general", - ), - DataModel( - content="```python\nfrom semantic_kernel import Kernel\nkernel = Kernel()\n```", - id="d5c9913a-e015-4944-b960-5d4a84bca002", - title="Code sample", - tag="code", - ), -] - - -async def main(): - print("-" * 30) - # Create the collection here - # by using the generic we make sure that IDE's understand what you need to pass in and get back - # we also use the async with to open and close the connection - # for the in memory collection, this is just a no-op - # but for other collections, like Azure AI Search, this will open and close the connection - async with InMemoryVectorCollection[DataModel]( - collection_name="test", - data_model_type=DataModel, - ) as record_collection: - # Create the collection after wiping it - print_with_color("Creating test collection!", Colors.CGREY) - await record_collection.delete_collection() - await record_collection.create_collection_if_not_exists() - - # First add vectors to the records - print_with_color("Adding records!", Colors.CBLUE) - records_with_embedding = await VectorStoreRecordUtils(kernel).add_vector_to_records( - records, data_model_type=DataModel - ) - # Next upsert them to the store. - keys = await record_collection.upsert_batch(records_with_embedding) - print(f" Upserted {keys=}") - print("-" * 30) - - # Now we can get the records back - print_with_color("Getting records!", Colors.CBLUE) - results = await record_collection.get_batch([records[0].id, records[1].id, records[2].id]) - if results and isinstance(results, Sequence): - [print_record(record=result) for result in results] - else: - print("Nothing found...") - print("-" * 30) - - # Now we can search for records - # First we define the options - # The most important option is the vector_field_name, which is the name of the field that contains the vector - # The other options are optional, but can be useful - # The filter option is used to filter the results based on the tag field - options = VectorSearchOptions( - vector_field_name="vector", - include_vectors=True, - filter=VectorSearchFilter.equal_to("tag", "general"), - ) - query = "python" - print_with_color(f"Searching for '{query}', with filter 'tag == general'", Colors.CBLUE) - print_with_color( - f"Using vectorized search, for {DISTANCE_FUNCTION.value}, " - f"the {'higher' if DISTANCE_FUNCTION_DIRECTION_HELPER[DISTANCE_FUNCTION](1, 0) else 'lower'} the score the better" # noqa: E501 - f"", - Colors.CBLUE, - ) - search_results = await record_collection.vectorized_search( - vector=(await embedder.generate_raw_embeddings([query]))[0], - options=options, - ) - if search_results.total_count == 0: - print("\nNothing found...\n") - else: - [print_record(result) async for result in search_results.results] - print("-" * 30) - - # lets cleanup! - print_with_color("Deleting collection!", Colors.CBLUE) - await record_collection.delete_collection() - print_with_color("Done!", Colors.CGREY) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/memory/utils.py b/python/samples/concepts/memory/utils.py deleted file mode 100644 index 6f0c94c788c9..000000000000 --- a/python/samples/concepts/memory/utils.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from typing import TypeVar - -from samples.concepts.resources.utils import Colors, print_with_color -from semantic_kernel.data import ( - VectorSearchResult, -) - -_T = TypeVar("_T") - - -def print_record(result: VectorSearchResult[_T] | None = None, record: _T | None = None): - if result: - record = result.record - print_with_color(f" Found id: {record.id}", Colors.CGREEN) - if result and result.score is not None: - print_with_color(f" Score: {result.score}", Colors.CWHITE) - print_with_color(f" Content: {record.content}", Colors.CWHITE) - print_with_color(f" Tag: {record.tag}", Colors.CWHITE) - if record.vector is not None: - print_with_color(f" Vector (first five): {record.vector[:5]}", Colors.CWHITE) diff --git a/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py b/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py new file mode 100644 index 000000000000..6627a2a7fb26 --- /dev/null +++ b/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py @@ -0,0 +1,53 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.core_plugins import MathPlugin, TimePlugin +from semantic_kernel.planners import FunctionCallingStepwisePlanner, FunctionCallingStepwisePlannerOptions + + +async def main(): + kernel = Kernel() + + service_id = "planner" + kernel.add_service( + AzureChatCompletion( + service_id=service_id, + ), + ) + + plugin_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + ) + kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin") + + kernel.add_plugin(MathPlugin(), "MathPlugin") + kernel.add_plugin(TimePlugin(), "TimePlugin") + + questions = [ + "What is the current hour number, plus 5?", + "What is 387 minus 22? Email the solution to John and Mary.", + "Write a limerick, translate it to Spanish, and send it to Jane", + ] + + options = FunctionCallingStepwisePlannerOptions( + max_iterations=10, + max_tokens=4000, + ) + + planner = FunctionCallingStepwisePlanner(service_id=service_id, options=options) + + for question in questions: + result = await planner.invoke(kernel, question) + print(f"Q: {question}\nA: {result.final_answer}\n") + + # Uncomment the following line to view the planner's process for completing the request + # print(f"Chat history: {result.chat_history}\n") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py b/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py new file mode 100644 index 000000000000..cce74f39a41d --- /dev/null +++ b/python/samples/concepts/planners/openai_function_calling_stepwise_planner.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion +from semantic_kernel.core_plugins import MathPlugin, TimePlugin +from semantic_kernel.planners import FunctionCallingStepwisePlanner, FunctionCallingStepwisePlannerOptions + + +async def main(): + kernel = Kernel() + + service_id = "planner" + kernel.add_service( + OpenAIChatCompletion( + service_id=service_id, + ), + ) + + plugin_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + ) + kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin") + kernel.add_plugins({"MathPlugin": MathPlugin(), "TimePlugin": TimePlugin()}) + + questions = [ + "What is the current hour number, plus 5?", + "What is 387 minus 22? Email the solution to John and Mary.", + "Write a limerick, translate it to Spanish, and send it to Jane", + ] + + options = FunctionCallingStepwisePlannerOptions( + max_iterations=10, + max_tokens=4000, + ) + + planner = FunctionCallingStepwisePlanner(service_id=service_id, options=options) + + for question in questions: + result = await planner.invoke(kernel, question) + print(f"Q: {question}\nA: {result.final_answer}\n") + + # Uncomment the following line to view the planner's process for completing the request + # print(f"\nChat history: {result.chat_history}\n") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/planners/sequential_planner.py b/python/samples/concepts/planners/sequential_planner.py new file mode 100644 index 000000000000..13aaf83fdab0 --- /dev/null +++ b/python/samples/concepts/planners/sequential_planner.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion +from semantic_kernel.core_plugins import MathPlugin, TextPlugin, TimePlugin +from semantic_kernel.planners import SequentialPlanner + + +async def main(): + kernel = Kernel() + + service_id = "gpt-3.5" + kernel.add_service(OpenAIChatCompletion(service_id=service_id, ai_model_id="gpt-3.5-turbo")) + kernel.add_plugins({"math": MathPlugin(), "time": TimePlugin(), "text": TextPlugin()}) + + # create an instance of sequential planner. + planner = SequentialPlanner(service_id=service_id, kernel=kernel) + + # the ask for which the sequential planner is going to find a relevant function. + ask = "What day of the week is today, all uppercase?" + + # ask the sequential planner to identify a suitable function from the list of functions available. + plan = await planner.create_plan(goal=ask) + + # ask the sequential planner to execute the identified function. + result = await plan.invoke(kernel=kernel) + + for step in plan._steps: + print(step.description, ":", step._state.__dict__) + + print("Expected Answer:") + print(result) + """ + Output: + SUNDAY + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/plugins/crew_ai/README.md b/python/samples/concepts/plugins/crew_ai/README.md deleted file mode 100644 index faa9a4b9c72f..000000000000 --- a/python/samples/concepts/plugins/crew_ai/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# Crew AI Plugin for Semantic Kernel - -This sample demonstrates how to integrate with [Crew AI Enterprise](https://app.crewai.com/) crews in Semantic Kernel. - -## Requirements - -Before running this sample you need to have a Crew deployed to the Crew AI Enterprise cloud. Many pre-built Crew templates can be found [here](https://app.crewai.com/crewai_plus/templates). You will need the following information from your deployed Crew: - -- endpoint: The base URL for your Crew. -- authentication token: The authentication token for your Crew -- required inputs: Most Crews have a set of required inputs that need to provided when kicking off the Crew and those input names, types, and values need to be known. - -- ## Using the Crew Plugin - -Once configured, the `CrewAIEnterprise` class can be used directly by calling methods on it, or can be used to generate a Semantic Kernel plugin with inputs that match those of your Crew. Generating a plugin is useful for scenarios when you want an LLM to be able to invoke your Crew as a tool. - -## Running the sample - -1. Deploy your Crew to the Crew Enterprise cloud. -1. Gather the required information listed above. -1. Create environment variables or use your .env file to define your Crew's endpoint and token as: - -```md -CREW_AI_ENDPOINT="{Your Crew's endpoint}" -CREW_AI_TOKEN="{Your Crew's authentication token}" -``` - -1. In [crew_ai_plugin.py](./crew_ai_plugin.py) find the section that defines the Crew's required inputs and modify it to match your Crew's inputs. The input descriptions and types are critical to help LLMs understand the inputs semantic meaning so that it can accurately call the plugin. The sample is based on the `Enterprise Content Marketing Crew` template which has two required inputs, `company` and `topic`. - -```python - # The required inputs for the Crew must be known in advance. This example is modeled after the - # Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. - # We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. - crew_plugin_definitions = [ - KernelParameterMetadata( - name="company", - type="string", - description="The name of the company that should be researched", - is_required=True, - ), - KernelParameterMetadata( - name="topic", type="string", description="The topic that should be researched", is_required=True - ), - ] -``` - -1. Run the sample. Notice that the sample invokes (kicks-off) the Crew twice, once directly by calling the `kickoff` method and once by creating a plugin and invoking it. diff --git a/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py b/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py deleted file mode 100644 index c817f6d8cda1..000000000000 --- a/python/samples/concepts/plugins/crew_ai/crew_ai_plugin.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.core_plugins.crew_ai import CrewAIEnterprise -from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata - -logging.basicConfig(level=logging.INFO) - - -async def using_crew_ai_enterprise(): - # Create an instance of the CrewAI Enterprise Crew - async with CrewAIEnterprise() as crew: - ##################################################################### - # Using the CrewAI Enterprise Crew directly # - ##################################################################### - - # The required inputs for the Crew must be known in advance. This example is modeled after the - # Enterprise Content Marketing Crew Template and requires the following inputs: - inputs = {"company": "CrewAI", "topic": "Agentic products for consumers"} - - # Invoke directly with our inputs - kickoff_id = await crew.kickoff(inputs) - print(f"CrewAI Enterprise Crew kicked off with ID: {kickoff_id}") - - # Wait for completion - result = await crew.wait_for_crew_completion(kickoff_id) - print("CrewAI Enterprise Crew completed with the following result:") - print(result) - - ##################################################################### - # Using the CrewAI Enterprise as a Plugin # - ##################################################################### - - # Define the description of the Crew. This will used as the semantic description of the plugin. - crew_description = ( - "Conducts thorough research on the specified company and topic to identify emerging trends," - "analyze competitor strategies, and gather data-driven insights." - ) - - # The required inputs for the Crew must be known in advance. This example is modeled after the - # Enterprise Content Marketing Crew Template and requires string inputs for the company and topic. - # We need to describe the type and purpose of each input to allow the LLM to invoke the crew as expected. - crew_input_parameters = [ - KernelParameterMetadata( - name="company", - type="string", - type_object=str, - description="The name of the company that should be researched", - is_required=True, - ), - KernelParameterMetadata( - name="topic", - type="string", - type_object=str, - description="The topic that should be researched", - is_required=True, - ), - ] - - # Create the CrewAI Plugin. This builds a plugin that can be added to the Kernel and invoked like any other - # plugin. The plugin will contain the following functions: - # - kickoff: Starts the Crew with the specified inputs and returns the Id of the scheduled kickoff. - # - kickoff_and_wait: Starts the Crew with the specified inputs and waits for the Crew to complete before - # returning the result. - # - wait_for_completion: Waits for the specified Crew kickoff to complete and returns the result. - # - get_status: Gets the status of the specified Crew kickoff. - crew_plugin = crew.create_kernel_plugin( - name="EnterpriseContentMarketingCrew", - description=crew_description, - parameters=crew_input_parameters, - ) - - # Configure the kernel for chat completion and add the CrewAI plugin. - kernel, chat_completion, settings = configure_kernel_for_chat() - kernel.add_plugin(crew_plugin) - - # Create a chat history to store the system message, initial messages, and the conversation. - history = ChatHistory() - history.add_system_message("You are an AI assistant that can help me with research.") - history.add_user_message( - "I'm looking for emerging marketplace trends about Crew AI and their concumer AI products." - ) - - # Invoke the chat completion service with enough information for the CrewAI plugin to be invoked. - response = await chat_completion.get_chat_message_content(history, settings, kernel=kernel) - print(response) - - # expected output: - # INFO:semantic_kernel.connectors.ai.open_ai.services.open_ai_handler:OpenAI usage: ... - # INFO:semantic_kernel.connectors.ai.chat_completion_client_base:processing 1 tool calls in parallel. - # INFO:semantic_kernel.kernel:Calling EnterpriseContentMarketingCrew-kickoff_and_wait function with args: - # {"company":"Crew AI","topic":"emerging marketplace trends in consumer AI products"} - # INFO:semantic_kernel.functions.kernel_function:Function EnterpriseContentMarketingCrew-kickoff_and_wait - # invoking. - # INFO:semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise:CrewAI Crew kicked off with Id: ***** - # INFO:semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise:CrewAI Crew with kickoff Id: ***** completed with - # status: SUCCESS - # INFO:semantic_kernel.functions.kernel_function:Function EnterpriseContentMarketingCrew-kickoff_and_wait - # succeeded. - # Here are some emerging marketplace trends related to Crew AI and their consumer AI products, along with - # suggested content pieces to explore these trends: ... - - -def configure_kernel_for_chat() -> tuple[Kernel, ChatCompletionClientBase, PromptExecutionSettings]: - kernel = Kernel() - - # You can select from the following chat completion services that support function calling: - # - Services.OPENAI - # - Services.AZURE_OPENAI - # - Services.AZURE_AI_INFERENCE - # - Services.ANTHROPIC - # - Services.BEDROCK - # - Services.GOOGLE_AI - # - Services.MISTRAL_AI - # - Services.OLLAMA - # - Services.ONNX - # - Services.VERTEX_AI - # - Services.DEEPSEEK - # Please make sure you have configured your environment correctly for the selected chat completion service. - chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) - - # Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. - # With `auto_invoke=True`, the model will automatically choose and call functions as needed. - request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - - # Pass the request settings to the kernel arguments. - kernel.add_service(chat_completion_service) - return kernel, chat_completion_service, request_settings - - -if __name__ == "__main__": - asyncio.run(using_crew_ai_enterprise()) diff --git a/python/samples/concepts/realtime/README.md b/python/samples/concepts/realtime/README.md deleted file mode 100644 index a2dbb5d349f5..000000000000 --- a/python/samples/concepts/realtime/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# Realtime Multi-modal API Samples - -These samples are more complex then most because of the nature of these API's. They are designed to be run in real-time and require a microphone and speaker to be connected to your computer. - -To run these samples, you will need to have the following setup: - -- Environment variables for OpenAI (websocket or WebRTC), with your key and OPENAI_REALTIME_MODEL_ID set. -- Environment variables for Azure (websocket only), set with your endpoint, optionally a key and AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME set. The API version needs to be at least `2024-10-01-preview`. -- To run the sample with a simple version of a class that handles the incoming and outgoing sound you need to install the following packages in your environment: - - semantic-kernel[realtime] - - pyaudio - - sounddevice - - pydub - e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] - -The samples all run as python scripts, that can either be started directly or through your IDE. - -All demos have a similar output, where the instructions are printed, each new *response item* from the API is put into a new `Mosscap (transcript):` line. The nature of these api's is such that the transcript arrives before the spoken audio, so if you interrupt the audio the transcript will not match the audio. - -The realtime api's work by sending event from the server to you and sending events back to the server, this is fully asynchronous. The samples show you can listen to the events being sent by the server and some are handled by the code in the samples, others are not. For instance one could add a clause in the match case in the receive loop that logs the usage that is part of the `response.done` event. - -For more info on the events, go to our documentation, as well as the documentation of [OpenAI](https://platform.openai.com/docs/guides/realtime) and [Azure](https://learn.microsoft.com/en-us/azure/ai-services/openai/realtime-audio-quickstart?tabs=keyless%2Cmacos&pivots=programming-language-python). - -## Simple chat samples - -### [Simple chat with realtime websocket](./simple_realtime_chat_websocket.py) - -This sample uses the websocket api with Azure OpenAI to run a simple interaction based on voice. If you want to use this sample with OpenAI, just change AzureRealtimeWebsocket into OpenAIRealtimeWebsocket. - -### [Simple chat with realtime WebRTC](./simple_realtime_chat_webrtc.py) - -This sample uses the WebRTC api with OpenAI to run a simple interaction based on voice. Because of the way the WebRTC protocol works this needs a different player and recorder than the websocket version. - -## Function calling samples - -The following two samples use function calling with the following functions: - -- get_weather: This function will return the weather for a given city, it is randomly generated and not based on any real data. -- get_time: This function will return the current time and date. -- goodbye: This function will end the conversation. - -A line is logged whenever one of these functions is called. - -### [Chat with function calling Websocket](./realtime_chat_with_function_calling_websocket.py) - -This sample uses the websocket api with Azure OpenAI to run the interaction with the voice model, but now with function calling. - -### [Chat with function calling WebRTC](./realtime_chat_with_function_calling_webrtc.py) - -This sample uses the WebRTC api with OpenAI to run the interaction with the voice model, but now with function calling. diff --git a/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py b/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py deleted file mode 100644 index 2131807a0eae..000000000000 --- a/python/samples/concepts/realtime/realtime_chat_with_function_calling_webrtc.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from datetime import datetime -from random import randint - -from samples.concepts.realtime.utils import AudioPlayerWebRTC, AudioRecorderWebRTC, check_audio_devices -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import ( - ListenEvents, - OpenAIRealtimeExecutionSettings, - OpenAIRealtimeWebRTC, - TurnDetection, -) -from semantic_kernel.contents import ChatHistory -from semantic_kernel.contents.realtime_events import RealtimeTextEvent -from semantic_kernel.functions import kernel_function - -logging.basicConfig(level=logging.WARNING) -utils_log = logging.getLogger("samples.concepts.realtime.utils") -utils_log.setLevel(logging.INFO) -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) - -""" -This simple sample demonstrates how to use the OpenAI Realtime API to create -a chat bot that can listen and respond directly through audio. -It requires installing: -- semantic-kernel[realtime] -- pyaudio -- sounddevice -- pydub -e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] - -For more details of the exact setup, see the README.md in the realtime folder. -""" - -# The characterics of your speaker and microphone are a big factor in a smooth conversation -# so you may need to try out different devices for each. -# you can also play around with the turn_detection settings to get the best results. -# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, -# so you may need to adjust these for your system. -# you can disable the check for available devices by commenting the line below -check_audio_devices() - - -@kernel_function -def get_weather(location: str) -> str: - """Get the weather for a location.""" - weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") - weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec - logger.info(f"@ Getting weather for {location}: {weather}") - return f"The weather in {location} is {weather}." - - -@kernel_function -def get_date_time() -> str: - """Get the current date and time.""" - logger.info("@ Getting current datetime") - return f"The current date and time is {datetime.now().isoformat()}." - - -@kernel_function -def goodbye(): - """When the user is done, say goodbye and then call this function.""" - logger.info("@ Goodbye has been called!") - raise KeyboardInterrupt - - -async def main() -> None: - print_transcript = True - # create the Kernel and add a simple function for function calling. - kernel = Kernel() - kernel.add_functions(plugin_name="helpers", functions=[goodbye, get_weather, get_date_time]) - - # create the audio player and audio track - # both take a device_id parameter, which is the index of the device to use, if None the default device is used - audio_player = AudioPlayerWebRTC() - # create the realtime client and optionally add the audio output function, this is optional - # and can also be passed in the receive method - realtime_client = OpenAIRealtimeWebRTC(audio_track=AudioRecorderWebRTC()) - - # Create the settings for the session - # The realtime api, does not use a system message, but takes instructions as a parameter for a session - # Another important setting is to tune the server_vad turn detection - # if this is turned off (by setting turn_detection=None), you will have to send - # the "input_audio_buffer.commit" and "response.create" event to the realtime api - # to signal the end of the user's turn and start the response. - # manual VAD is not part of this sample - # for more info: https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-turn_detection - settings = OpenAIRealtimeExecutionSettings( - instructions=""" - You are a chat bot. Your name is Mosscap and - you have one goal: figure out what people need. - Your full name, should you need to know it, is - Splendid Speckled Mosscap. You communicate - effectively, but you tend to answer with long - flowery prose. - """, - voice="alloy", - turn_detection=TurnDetection(type="server_vad", create_response=True, silence_duration_ms=800, threshold=0.8), - function_choice_behavior=FunctionChoiceBehavior.Auto(), - ) - # and we can add a chat history to conversation after starting it - chat_history = ChatHistory() - chat_history.add_user_message("Hi there, who are you?") - chat_history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") - - # the context manager calls the create_session method on the client and starts listening to the audio stream - async with ( - audio_player, - realtime_client( - settings=settings, - chat_history=chat_history, - kernel=kernel, - create_response=True, - ), - ): - async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): - match event: - case RealtimeTextEvent(): - if print_transcript: - print(event.text.text, end="") - case _: - # OpenAI Specific events - match event.service_type: - case ListenEvents.RESPONSE_CREATED: - if print_transcript: - print("\nMosscap (transcript): ", end="") - case ListenEvents.ERROR: - logger.error(event.service_event) - - -if __name__ == "__main__": - print( - "Instructions: The model will start speaking immediately," - "this can be turned off by removing `create_response=True` above." - "The model will detect when you stop and automatically generate a response. " - "Press ctrl + c to stop the program." - ) - asyncio.run(main()) diff --git a/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py b/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py deleted file mode 100644 index eaa83f250c54..000000000000 --- a/python/samples/concepts/realtime/realtime_chat_with_function_calling_websocket.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from datetime import datetime -from random import randint - -from samples.concepts.realtime.utils import AudioPlayerWebsocket, AudioRecorderWebsocket -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import ( - AzureRealtimeExecutionSettings, - AzureRealtimeWebsocket, - ListenEvents, - TurnDetection, -) -from semantic_kernel.contents import ChatHistory -from semantic_kernel.contents.realtime_events import RealtimeTextEvent -from semantic_kernel.functions import kernel_function - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -""" -This simple sample demonstrates how to use the OpenAI Realtime API to create -a chat bot that can listen and respond directly through audio. -It requires installing: -- semantic-kernel[realtime] -- pyaudio -- sounddevice -- pydub -e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] - -For more details of the exact setup, see the README.md in the realtime folder. -""" - - -@kernel_function -def get_weather(location: str) -> str: - """Get the weather for a location.""" - weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") - weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec - logger.info(f"@ Getting weather for {location}: {weather}") - return f"The weather in {location} is {weather}." - - -@kernel_function -def get_date_time() -> str: - """Get the current date and time.""" - logger.info("@ Getting current datetime") - return f"The current date and time is {datetime.now().isoformat()}." - - -@kernel_function -def goodbye(): - """When the user is done, say goodbye and then call this function.""" - logger.info("@ Goodbye has been called!") - raise KeyboardInterrupt - - -async def main() -> None: - print_transcript = True - # create the Kernel and add a simple function for function calling. - kernel = Kernel() - kernel.add_functions(plugin_name="helpers", functions=[goodbye, get_weather, get_date_time]) - - # create the realtime client, in this the Azure Websocket client, there are also OpenAI Websocket and WebRTC clients - # See 02b-chat_with_function_calling_webrtc.py for an example of the WebRTC client - realtime_client = AzureRealtimeWebsocket() - # create the audio player and audio track - # both take a device_id parameter, which is the index of the device to use, if None the default device is used - audio_player = AudioPlayerWebsocket() - audio_recorder = AudioRecorderWebsocket(realtime_client=realtime_client) - - # Create the settings for the session - # The realtime api, does not use a system message, but takes instructions as a parameter for a session - # Another important setting is to tune the server_vad turn detection - # if this is turned off (by setting turn_detection=None), you will have to send - # the "input_audio_buffer.commit" and "response.create" event to the realtime api - # to signal the end of the user's turn and start the response. - # manual VAD is not part of this sample - # for more info: https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-turn_detection - settings = AzureRealtimeExecutionSettings( - instructions=""" - You are a chat bot. Your name is Mosscap and - you have one goal: figure out what people need. - Your full name, should you need to know it, is - Splendid Speckled Mosscap. You communicate - effectively, but you tend to answer with long - flowery prose. - """, - # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice for the full list of voices # noqa: E501 - voice="alloy", - turn_detection=TurnDetection(type="server_vad", create_response=True, silence_duration_ms=800, threshold=0.8), - function_choice_behavior=FunctionChoiceBehavior.Auto(), - ) - # and we can add a chat history to conversation to seed the conversation - chat_history = ChatHistory() - chat_history.add_user_message("Hi there, I'm based in Amsterdam.") - chat_history.add_assistant_message( - "I am Mosscap, a chat bot. I'm trying to figure out what people need, " - "I can tell you what the weather is or the time." - ) - - # the context manager calls the create_session method on the client and starts listening to the audio stream - async with ( - audio_player, - audio_recorder, - realtime_client( - settings=settings, - chat_history=chat_history, - kernel=kernel, - create_response=True, - ), - ): - # the audio_output_callback can be added here or in the client constructor - # using this gives the smoothest experience - async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): - match event: - case RealtimeTextEvent(): - if print_transcript: - print(event.text.text, end="") - case _: - # OpenAI Specific events - match event.service_type: - case ListenEvents.RESPONSE_CREATED: - if print_transcript: - print("\nMosscap (transcript): ", end="") - case ListenEvents.ERROR: - print(event.service_event) - logger.error(event.service_event) - - -if __name__ == "__main__": - print( - "Instructions: The model will start speaking immediately," - "this can be turned off by removing `create_response=True` above." - "The model will detect when you stop and automatically generate a response. " - "Press ctrl + c to stop the program." - ) - asyncio.run(main()) diff --git a/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py b/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py deleted file mode 100644 index 0b9c6a7e9485..000000000000 --- a/python/samples/concepts/realtime/simple_realtime_chat_webrtc.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from samples.concepts.realtime.utils import AudioPlayerWebRTC, AudioRecorderWebRTC, check_audio_devices -from semantic_kernel.connectors.ai.open_ai import ( - ListenEvents, - OpenAIRealtimeExecutionSettings, - OpenAIRealtimeWebRTC, -) - -logging.basicConfig(level=logging.WARNING) -utils_log = logging.getLogger("samples.concepts.realtime.utils") -utils_log.setLevel(logging.INFO) -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) - -""" -This simple sample demonstrates how to use the OpenAI Realtime API to create -a chat bot that can listen and respond directly through audio. -It requires installing: -- semantic-kernel[realtime] -- pyaudio -- sounddevice -- pydub -e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] - -For more details of the exact setup, see the README.md in the realtime folder. -""" - -# The characteristics of your speaker and microphone are a big factor in a smooth conversation -# so you may need to try out different devices for each. -# you can also play around with the turn_detection settings to get the best results. -# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, -# so you may need to adjust these for your system. -# you can disable the check for available devices by commenting the line below -check_audio_devices() - - -async def main() -> None: - # create the realtime client and optionally add the audio output function, this is optional - # you can define the protocol to use, either "websocket" or "webrtc" - # they will behave the same way, even though the underlying protocol is quite different - realtime_client = OpenAIRealtimeWebRTC(audio_track=AudioRecorderWebRTC()) - # Create the settings for the session - settings = OpenAIRealtimeExecutionSettings( - instructions=""" - You are a chat bot. Your name is Mosscap and - you have one goal: figure out what people need. - Your full name, should you need to know it, is - Splendid Speckled Mosscap. You communicate - effectively, but you tend to answer with long - flowery prose. - """, - # there are different voices to choose from, since that list is bound to change, it is not checked beforehand, - # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice - # for more details. - voice="alloy", - ) - audio_player = AudioPlayerWebRTC() - # the context manager calls the create_session method on the client and starts listening to the audio stream - async with audio_player, realtime_client(settings=settings, create_response=True): - async for event in realtime_client.receive(audio_output_callback=audio_player.client_callback): - match event.event_type: - case "text": - # the model returns both audio and transcript of the audio, which we will print - print(event.text.text, end="") - case "service": - # OpenAI Specific events - if event.service_type == ListenEvents.SESSION_UPDATED: - print("Session updated") - if event.service_type == ListenEvents.RESPONSE_CREATED: - print("\nMosscap (transcript): ", end="") - - -if __name__ == "__main__": - print( - "Instructions: The model will start speaking immediately," - "this can be turned off by removing `create_response=True` above." - "The model will detect when you stop and automatically generate a response. " - "Press ctrl + c to stop the program." - ) - asyncio.run(main()) diff --git a/python/samples/concepts/realtime/simple_realtime_chat_websocket.py b/python/samples/concepts/realtime/simple_realtime_chat_websocket.py deleted file mode 100644 index 4a374c46518f..000000000000 --- a/python/samples/concepts/realtime/simple_realtime_chat_websocket.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from samples.concepts.realtime.utils import AudioPlayerWebsocket, AudioRecorderWebsocket, check_audio_devices -from semantic_kernel.connectors.ai.open_ai import ( - AzureRealtimeExecutionSettings, - AzureRealtimeWebsocket, - ListenEvents, -) -from semantic_kernel.contents import RealtimeAudioEvent, RealtimeTextEvent - -logging.basicConfig(level=logging.WARNING) -utils_log = logging.getLogger("samples.concepts.realtime.utils") -utils_log.setLevel(logging.INFO) -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) - -""" -This simple sample demonstrates how to use the OpenAI Realtime API to create -a chat bot that can listen and respond directly through audio. -It requires installing: -- semantic-kernel[realtime] -- pyaudio -- sounddevice -- pydub -e.g. pip install pyaudio sounddevice pydub semantic_kernel[realtime] - -For more details of the exact setup, see the README.md in the realtime folder. -""" - -# The characterics of your speaker and microphone are a big factor in a smooth conversation -# so you may need to try out different devices for each. -# you can also play around with the turn_detection settings to get the best results. -# It has device id's set in the AudioRecorderStream and AudioPlayerAsync classes, -# so you may need to adjust these for your system. -# you can disable the check for available devices by commenting the line below -check_audio_devices() - - -async def main() -> None: - # create the realtime client and optionally add the audio output function, this is optional - # you can define the protocol to use, either "websocket" or "webrtc" - # they will behave the same way, even though the underlying protocol is quite different - realtime_client = AzureRealtimeWebsocket() - audio_player = AudioPlayerWebsocket() - audio_recorder = AudioRecorderWebsocket(realtime_client=realtime_client) - # Create the settings for the session - settings = AzureRealtimeExecutionSettings( - instructions=""" - You are a chat bot. Your name is Mosscap and - you have one goal: figure out what people need. - Your full name, should you need to know it, is - Splendid Speckled Mosscap. You communicate - effectively, but you tend to answer with long - flowery prose. - """, - # there are different voices to choose from, since that list is bound to change, it is not checked beforehand, - # see https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-voice - # for more details. - voice="shimmer", - ) - # the context manager calls the create_session method on the client and starts listening to the audio stream - async with audio_player, audio_recorder, realtime_client(settings=settings, create_response=True): - async for event in realtime_client.receive(): - match event: - # this can be used as an alternative to the callback function used in other samples, - # the callback is faster and smoother - case RealtimeAudioEvent(): - await audio_player.add_audio(event.audio) - case RealtimeTextEvent(): - # the model returns both audio and transcript of the audio, which we will print - print(event.text.text, end="") - case _: - # OpenAI Specific events - if event.service_type == ListenEvents.SESSION_UPDATED: - print("Session updated") - if event.service_type == ListenEvents.RESPONSE_CREATED: - print("\nMosscap (transcript): ", end="") - - -if __name__ == "__main__": - print( - "Instructions: The model will start speaking immediately," - "this can be turned off by removing `create_response=True` above." - "The model will detect when you stop and automatically generate a response. " - "Press ctrl + c to stop the program." - ) - asyncio.run(main()) diff --git a/python/samples/concepts/realtime/utils.py b/python/samples/concepts/realtime/utils.py deleted file mode 100644 index b3056991d626..000000000000 --- a/python/samples/concepts/realtime/utils.py +++ /dev/null @@ -1,489 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import base64 -import logging -import threading -from typing import Any, ClassVar, Final, cast - -import numpy as np -import numpy.typing as npt -import sounddevice as sd -from aiortc.mediastreams import MediaStreamError, MediaStreamTrack -from av.audio.frame import AudioFrame -from av.frame import Frame -from pydantic import BaseModel, ConfigDict, PrivateAttr -from sounddevice import InputStream, OutputStream - -from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase -from semantic_kernel.contents import AudioContent -from semantic_kernel.contents.realtime_events import RealtimeAudioEvent - -logger = logging.getLogger(__name__) - -SAMPLE_RATE: Final[int] = 24000 -RECORDER_CHANNELS: Final[int] = 1 -PLAYER_CHANNELS: Final[int] = 1 -FRAME_DURATION: Final[int] = 100 -SAMPLE_RATE_WEBRTC: Final[int] = 48000 -RECORDER_CHANNELS_WEBRTC: Final[int] = 1 -PLAYER_CHANNELS_WEBRTC: Final[int] = 2 -FRAME_DURATION_WEBRTC: Final[int] = 20 -DTYPE: Final[npt.DTypeLike] = np.int16 - - -def check_audio_devices(): - logger.info(sd.query_devices()) - - -# region: Recorders - - -class AudioRecorderWebRTC(BaseModel, MediaStreamTrack): - """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. - - This class is meant as a demo sample and is not meant for production use. - """ - - model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) - - kind: ClassVar[str] = "audio" - device: str | int | None = None - sample_rate: int - channels: int - frame_duration: int - dtype: npt.DTypeLike = DTYPE - frame_size: int = 0 - _queue: asyncio.Queue[Frame] = PrivateAttr(default_factory=asyncio.Queue) - _is_recording: bool = False - _stream: InputStream | None = None - _recording_task: asyncio.Task | None = None - _loop: asyncio.AbstractEventLoop | None = None - _pts: int = 0 - - def __init__( - self, - *, - device: str | int | None = None, - sample_rate: int = SAMPLE_RATE_WEBRTC, - channels: int = RECORDER_CHANNELS_WEBRTC, - frame_duration: int = FRAME_DURATION_WEBRTC, - dtype: npt.DTypeLike = DTYPE, - ): - """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. - - Make sure the device is set to the correct device for your system. - - Args: - device: The device id to use for recording audio. - sample_rate: The sample rate for the audio. - channels: The number of channels for the audio. - frame_duration: The duration of each audio frame in milliseconds. - dtype: The data type for the audio. - """ - super().__init__(**{ - "device": device, - "sample_rate": sample_rate, - "channels": channels, - "frame_duration": frame_duration, - "dtype": dtype, - "frame_size": int(sample_rate * frame_duration / 1000), - }) - MediaStreamTrack.__init__(self) - - async def recv(self) -> Frame: - """Receive the next frame of audio data.""" - if not self._recording_task: - self._recording_task = asyncio.create_task(self.start_recording()) - - try: - frame = await self._queue.get() - self._queue.task_done() - return frame - except Exception as e: - logger.error(f"Error receiving audio frame: {e!s}") - raise MediaStreamError("Failed to receive audio frame") - - def _sounddevice_callback(self, indata: np.ndarray, frames: int, time: Any, status: Any) -> None: - if status: - logger.warning(f"Audio input status: {status}") - if self._loop and self._loop.is_running(): - asyncio.run_coroutine_threadsafe(self._queue.put(self._create_frame(indata)), self._loop) - - def _create_frame(self, indata: np.ndarray) -> Frame: - audio_data = indata.copy() - if audio_data.dtype != self.dtype: - audio_data = ( - (audio_data * 32767).astype(self.dtype) if self.dtype == np.int16 else audio_data.astype(self.dtype) - ) - frame = AudioFrame( - format="s16", - layout="mono", - samples=len(audio_data), - ) - frame.rate = self.sample_rate - frame.pts = self._pts - frame.planes[0].update(audio_data.tobytes()) - self._pts += len(audio_data) - return frame - - async def start_recording(self): - """Start recording audio from the input device.""" - if self._is_recording: - return - - self._is_recording = True - self._loop = asyncio.get_running_loop() - self._pts = 0 # Reset pts when starting recording - - try: - self._stream = InputStream( - device=self.device, - channels=self.channels, - samplerate=self.sample_rate, - dtype=self.dtype, - blocksize=self.frame_size, - callback=self._sounddevice_callback, - ) - self._stream.start() - - while self._is_recording: - await asyncio.sleep(0.1) - except asyncio.CancelledError: - logger.debug("Recording task was stopped.") - except KeyboardInterrupt: - logger.debug("Recording task was stopped.") - except Exception as e: - logger.error(f"Error in audio recording: {e!s}") - raise - finally: - self._is_recording = False - - -class AudioRecorderWebsocket(BaseModel): - """A simple class that implements a sounddevice for use with websockets. - - This class is meant as a demo sample and is not meant for production use. - """ - - model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) - - realtime_client: RealtimeClientBase - device: str | int | None = None - sample_rate: int - channels: int - frame_duration: int - dtype: npt.DTypeLike = DTYPE - frame_size: int = 0 - _stream: InputStream | None = None - _pts: int = 0 - _stream_task: asyncio.Task | None = None - - def __init__( - self, - *, - realtime_client: RealtimeClientBase, - device: str | int | None = None, - sample_rate: int = SAMPLE_RATE, - channels: int = RECORDER_CHANNELS, - frame_duration: int = FRAME_DURATION, - dtype: npt.DTypeLike = DTYPE, - ): - """A simple class that implements the WebRTC MediaStreamTrack for audio from sounddevice. - - Make sure the device is set to the correct device for your system. - - Args: - realtime_client: The RealtimeClientBase to use for streaming audio. - device: The device id to use for recording audio. - sample_rate: The sample rate for the audio. - channels: The number of channels for the audio. - frame_duration: The duration of each audio frame in milliseconds. - dtype: The data type for the audio. - **kwargs: Additional keyword arguments. - """ - super().__init__(**{ - "realtime_client": realtime_client, - "device": device, - "sample_rate": sample_rate, - "channels": channels, - "frame_duration": frame_duration, - "dtype": dtype, - "frame_size": int(sample_rate * frame_duration / 1000), - }) - - async def __aenter__(self): - """Stream audio data to a RealtimeClientBase.""" - if not self._stream_task: - self._stream_task = asyncio.create_task(self._start_stream()) - return self - - async def _start_stream(self): - self._pts = 0 # Reset pts when starting recording - self._stream = InputStream( - device=self.device, - channels=self.channels, - samplerate=self.sample_rate, - dtype=self.dtype, - blocksize=self.frame_size, - ) - self._stream.start() - try: - while True: - if self._stream.read_available < self.frame_size: - await asyncio.sleep(0) - continue - data, _ = self._stream.read(self.frame_size) - - await self.realtime_client.send( - RealtimeAudioEvent(audio=AudioContent(data=base64.b64encode(cast(Any, data)).decode("utf-8"))) - ) - - await asyncio.sleep(0) - except asyncio.CancelledError: - pass - - async def __aexit__(self, exc_type, exc, tb): - """Stop recording audio.""" - if self._stream_task: - self._stream_task.cancel() - await self._stream_task - if self._stream: - self._stream.stop() - self._stream.close() - - -# region: Players - - -class AudioPlayerWebRTC(BaseModel): - """Simple class that plays audio using sounddevice. - - This class is meant as a demo sample and is not meant for production use. - - Make sure the device_id is set to the correct device for your system. - - The sample rate, channels and frame duration - should be set to match the audio you - are receiving. - - Args: - device: The device id to use for playing audio. - sample_rate: The sample rate for the audio. - channels: The number of channels for the audio. - dtype: The data type for the audio. - frame_duration: The duration of each audio frame in milliseconds - - """ - - model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) - - device: int | None = None - sample_rate: int = SAMPLE_RATE_WEBRTC - channels: int = PLAYER_CHANNELS_WEBRTC - dtype: npt.DTypeLike = DTYPE - frame_duration: int = FRAME_DURATION_WEBRTC - _queue: asyncio.Queue[np.ndarray] | None = PrivateAttr(default=None) - _stream: OutputStream | None = PrivateAttr(default=None) - - async def __aenter__(self): - """Start the audio stream when entering a context.""" - self.start() - return self - - async def __aexit__(self, exc_type, exc, tb): - """Stop the audio stream when exiting a context.""" - self.stop() - - def start(self): - """Start the audio stream.""" - self._queue = asyncio.Queue() - self._stream = OutputStream( - callback=self._sounddevice_callback, - samplerate=self.sample_rate, - channels=self.channels, - dtype=self.dtype, - blocksize=int(self.sample_rate * self.frame_duration / 1000), - device=self.device, - ) - if self._stream and self._queue: - self._stream.start() - - def stop(self): - """Stop the audio stream.""" - if self._stream: - self._stream.stop() - self._stream = None - self._queue = None - - def _sounddevice_callback(self, outdata, frames, time, status): - """This callback is called by sounddevice when it needs more audio data to play.""" - if status: - logger.debug(f"Audio output status: {status}") - if self._queue: - if self._queue.empty(): - return - data = self._queue.get_nowait() - outdata[:] = data.reshape(outdata.shape) - self._queue.task_done() - else: - logger.error( - "Audio queue not initialized, make sure to call start before " - "using the player, or use the context manager." - ) - - async def client_callback(self, content: np.ndarray): - """This function can be passed to the audio_output_callback field of the RealtimeClientBase.""" - if self._queue: - await self._queue.put(content) - else: - logger.error( - "Audio queue not initialized, make sure to call start before " - "using the player, or use the context manager." - ) - - async def add_audio(self, audio_content: AudioContent) -> None: - """This function is used to add audio to the queue for playing. - - It first checks if there is a AudioFrame in the inner_content of the AudioContent. - If not, it checks if the data is a numpy array, bytes, or a string and converts it to a numpy array. - """ - if not self._queue: - logger.error( - "Audio queue not initialized, make sure to call start before " - "using the player, or use the context manager." - ) - return - if audio_content.inner_content and isinstance(audio_content.inner_content, AudioFrame): - await self._queue.put(audio_content.inner_content.to_ndarray()) - return - if isinstance(audio_content.data, np.ndarray): - await self._queue.put(audio_content.data) - return - if isinstance(audio_content.data, bytes): - await self._queue.put(np.frombuffer(audio_content.data, dtype=self.dtype)) - return - if isinstance(audio_content.data, str): - await self._queue.put(np.frombuffer(audio_content.data.encode(), dtype=self.dtype)) - return - logger.error(f"Unknown audio content: {audio_content}") - - -class AudioPlayerWebsocket(BaseModel): - """Simple class that plays audio using sounddevice. - - This class is meant as a demo sample and is not meant for production use. - - Make sure the device_id is set to the correct device for your system. - - The sample rate, channels and frame duration - should be set to match the audio you - are receiving. - - Args: - device: The device id to use for playing audio. - sample_rate: The sample rate for the audio. - channels: The number of channels for the audio. - dtype: The data type for the audio. - frame_duration: The duration of each audio frame in milliseconds - - """ - - model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) - - device: int | None = None - sample_rate: int = SAMPLE_RATE - channels: int = PLAYER_CHANNELS - dtype: npt.DTypeLike = DTYPE - frame_duration: int = FRAME_DURATION - _lock: Any = PrivateAttr(default_factory=threading.Lock) - _queue: list[np.ndarray] = PrivateAttr(default_factory=list) - _stream: OutputStream | None = PrivateAttr(default=None) - _frame_count: int = 0 - - async def __aenter__(self): - """Start the audio stream when entering a context.""" - self.start() - return self - - async def __aexit__(self, exc_type, exc, tb): - """Stop the audio stream when exiting a context.""" - self.stop() - - def start(self): - """Start the audio stream.""" - with self._lock: - self._queue = [] - self._stream = OutputStream( - callback=self._sounddevice_callback, - samplerate=self.sample_rate, - channels=self.channels, - dtype=self.dtype, - blocksize=int(self.sample_rate * self.frame_duration / 1000), - device=self.device, - ) - if self._stream: - self._stream.start() - - def stop(self): - """Stop the audio stream.""" - if self._stream: - self._stream.stop() - self._stream = None - with self._lock: - self._queue = [] - - def _sounddevice_callback(self, outdata, frames, time, status): - """This callback is called by sounddevice when it needs more audio data to play.""" - with self._lock: - if status: - logger.debug(f"Audio output status: {status}") - data = np.empty(0, dtype=np.int16) - - # get next item from queue if there is still space in the buffer - while len(data) < frames and len(self._queue) > 0: - item = self._queue.pop(0) - frames_needed = frames - len(data) - data = np.concatenate((data, item[:frames_needed])) - if len(item) > frames_needed: - self._queue.insert(0, item[frames_needed:]) - - self._frame_count += len(data) - - # fill the rest of the frames with zeros if there is no more data - if len(data) < frames: - data = np.concatenate((data, np.zeros(frames - len(data), dtype=np.int16))) - - outdata[:] = data.reshape(-1, 1) - - def reset_frame_count(self): - self._frame_count = 0 - - def get_frame_count(self): - return self._frame_count - - async def client_callback(self, content: np.ndarray): - """This function can be passed to the audio_output_callback field of the RealtimeClientBase.""" - with self._lock: - self._queue.append(content) - - async def add_audio(self, audio_content: AudioContent) -> None: - """This function is used to add audio to the queue for playing. - - It first checks if there is a AudioFrame in the inner_content of the AudioContent. - If not, it checks if the data is a numpy array, bytes, or a string and converts it to a numpy array. - """ - with self._lock: - if audio_content.inner_content and isinstance(audio_content.inner_content, AudioFrame): - self._queue.append(audio_content.inner_content.to_ndarray()) - return - if isinstance(audio_content.data, np.ndarray): - self._queue.append(audio_content.data) - return - if isinstance(audio_content.data, bytes): - self._queue.append(np.frombuffer(audio_content.data, dtype=self.dtype)) - return - if isinstance(audio_content.data, str): - self._queue.append(np.frombuffer(audio_content.data.encode(), dtype=self.dtype)) - return - logger.error(f"Unknown audio content: {audio_content}") diff --git a/python/samples/concepts/reasoning/simple_reasoning.py b/python/samples/concepts/reasoning/simple_reasoning.py index ce4c6d4b66fb..c423cf106a71 100644 --- a/python/samples/concepts/reasoning/simple_reasoning.py +++ b/python/samples/concepts/reasoning/simple_reasoning.py @@ -2,8 +2,11 @@ import asyncio -from semantic_kernel.connectors.ai.open_ai import ( - OpenAIChatCompletion, +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, ) from semantic_kernel.contents import ChatHistory @@ -56,25 +59,17 @@ Note: Unsupported features may be added in future updates. """ -chat_service = OpenAIChatCompletion(service_id="reasoning", instruction_role="developer") -# Set the reasoning effort to "medium" and the maximum completion tokens to 5000. -request_settings = OpenAIChatPromptExecutionSettings( - service_id="reasoning", max_completion_tokens=2000, reasoning_effort="medium" -) - - -# Create a ChatHistory object -chat_history = ChatHistory() +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI) # This is the system message that gives the chatbot its personality. developer_message = """ As an assistant supporting the user, -you recognize all user input -as questions or consultations and answer them. + you recognize all user input + as questions or consultations and answer them. """ -# The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. -# `system message` cannot be used with reasoning models. -chat_history.add_developer_message(developer_message) + +# Create a ChatHistory object +chat_history = ChatHistory() async def chat() -> bool: @@ -91,15 +86,25 @@ async def chat() -> bool: print("\n\nExiting chat...") return False + # The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. + # `system message` cannot be used with reasoning models. + chat_history.add_developer_message(developer_message) chat_history.add_user_message(user_input) + if not isinstance(request_settings, OpenAIChatPromptExecutionSettings): + raise ValueError("The OpenAI prompt execution settings are not supported for this sample.") + + # Set the reasoning effort to "medium" and the maximum completion tokens to 5000. + request_settings.max_completion_tokens = 5000 + request_settings.reasoning_effort = "medium" + # Get the chat message content from the chat completion service. - response = await chat_service.get_chat_message_content( + response = await chat_completion_service.get_chat_message_content( chat_history=chat_history, settings=request_settings, ) if response: - print(f"Reasoning model:> {response}") + print(f"Mosscap:> {response}") # Add the chat message to the chat history to keep track of the conversation. chat_history.add_message(response) diff --git a/python/samples/concepts/reasoning/simple_reasoning_function_calling.py b/python/samples/concepts/reasoning/simple_reasoning_function_calling.py index 63925ac7745d..0da02adacefe 100644 --- a/python/samples/concepts/reasoning/simple_reasoning_function_calling.py +++ b/python/samples/concepts/reasoning/simple_reasoning_function_calling.py @@ -1,14 +1,22 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -from collections.abc import Awaitable, Callable +from samples.concepts.setup.chat_completion_services import ( + Services, + get_chat_completion_service_and_request_settings, +) from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings +from semantic_kernel.connectors.ai.function_calling_utils import ( + kernel_function_metadata_to_function_call_format, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, +) from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.core_plugins.time_plugin import TimePlugin -from semantic_kernel.filters import AutoFunctionInvocationContext, FilterTypes """ # Reasoning Models Sample @@ -62,44 +70,25 @@ Note: Unsupported features may be added in future updates. """ - -chat_service = OpenAIChatCompletion(service_id="reasoning", instruction_role="developer") -# Set the reasoning effort to "medium" and the maximum completion tokens to 5000. -# also set the function_choice_behavior to auto and that includes auto invoking the functions. -request_settings = OpenAIChatPromptExecutionSettings( - service_id="reasoning", - max_completion_tokens=5000, - reasoning_effort="medium", - function_choice_behavior=FunctionChoiceBehavior.Auto(), +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings( + Services.OPENAI, instruction_role="developer" ) - -# Create a ChatHistory object -# The reasoning models use developer instead of system, but because we set the instruction_role to developer, -# we can use the system message as the developer message. -chat_history = ChatHistory( - system_message=""" +# This is the system message that gives the chatbot its personality. +developer_message = """ As an assistant supporting the user, -you recognize all user input -as questions or consultations and answer them. + you recognize all user input + as questions or consultations and answer them. """ -) + +# Create a ChatHistory object +chat_history = ChatHistory() # Create a kernel and register plugin. kernel = Kernel() kernel.add_plugin(TimePlugin(), "time") -# add a simple filter to track the function call result -@kernel.filter(filter_type=FilterTypes.AUTO_FUNCTION_INVOCATION) -async def auto_function_invocation_filter( - context: AutoFunctionInvocationContext, next: Callable[[AutoFunctionInvocationContext], Awaitable[None]] -) -> None: - await next(context) - print("Tools:> FUNCTION CALL RESULT") - print(f" - time: {context.function_result}") - - async def chat() -> bool: try: user_input = input("User:> ") @@ -114,17 +103,78 @@ async def chat() -> bool: print("\n\nExiting chat...") return False + # The developer message was newly introduced for reasoning models such as OpenAI’s o1 and o1-mini. + # `system message` cannot be used with reasoning models. + chat_history.add_developer_message(developer_message) chat_history.add_user_message(user_input) + if not isinstance(request_settings, OpenAIChatPromptExecutionSettings): + raise ValueError(f"{type(request_settings).__name__} settings are not supported for this sample.") + + # Set the reasoning effort to "medium" and the maximum completion tokens to 5000. + request_settings.max_completion_tokens = 5000 + request_settings.reasoning_effort = "medium" + + # enable the function calling and disable parallel tool calls for reasoning models. + request_settings.parallel_tool_calls = None + request_settings.tool_choice = None + request_settings.tools = [ + kernel_function_metadata_to_function_call_format(f) for f in kernel.get_full_list_of_function_metadata() + ] + # Get the chat message content from the chat completion service. - response = await chat_service.get_chat_message_content( + response = await chat_completion_service.get_chat_message_content( chat_history=chat_history, settings=request_settings, kernel=kernel, ) + + if not response: + return True + + function_calls = [item for item in response.items if isinstance(item, FunctionCallContent)] + if len(function_calls) == 0: + print(f"Mosscap:> {response}") + chat_history.add_message(response) + return True + + # Invoke the function calls and update the chat history with the results. + print(f"processing {len(function_calls)} tool calls") + await asyncio.gather( + *[ + kernel.invoke_function_call( + function_call=function_call, + chat_history=chat_history, + function_call_count=len(function_calls), + request_index=0, + ) + for function_call in function_calls + ], + ) + + # Convert the last tool message to a user message. + fc_results = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionResultContent)] + + result_prompt: list[str] = ["FUNCTION CALL RESULT"] + for fc_result in fc_results: + result_prompt.append(f"- {fc_result.plugin_name}: {fc_result.result}") + + chat_history.remove_message(chat_history.messages[-1]) + chat_history.add_user_message("\n".join(result_prompt)) + print("Tools:> ", "\n".join(result_prompt)) + + # Get the chat message content from the chat completion service. + request_settings.tools = None + response = await chat_completion_service.get_chat_message_content( + chat_history=chat_history, + settings=request_settings, + ) + + # Add the chat message to the chat history to keep track of the conversation. if response: print(f"Mosscap:> {response}") chat_history.add_message(response) + return True diff --git a/python/samples/concepts/resources/cat.jpg b/python/samples/concepts/resources/cat.jpg deleted file mode 100644 index 1e9f26de48fc542676a7461020206fab297c0314..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 37831 zcmbTdcT|&4^fwp;DN0e1-lQv4dJ7;bARr(p1PE268R@+%(n}&;x=NQ4DIxSuLJ=Z0 z^xkVi4gKZ&?r(R`*}r!8J~QV$=R7lW?&sW@XXf7fnd|ZEdB8nQH4QZY5fKr<^5y|t zPXS&4ZV?gvSN=zcZxjC~q$DK7x5-G!$o{M36n81e$?uSpk=>!XbLTGQjgV1L(@;@T z|M&jiApdp$uh&hXBqt;Puf_j2xo!pACno|B0f>pX0JrWF5#J}e?gVfE07SQMwEa)v z{}G~F#3Z*#Z&bQ-_oe~j-i`Xi#J6sgy-h-L(>n0xJAmZ=?FXDR4 z|0L&rUeQLUKZxUzc<&ZMafhCPk%^g?kN@!#0ZA!o8Cf~`7cW)S)L&_6zI|t42r@D@ zvHoCVYiIB9(cQz-%iG7-?`vpSctm7WbV6cMa!Ts=wDe!OdHDr}Ma91>tEv$-$lAL4 z_Kwaj6uP^ocW8KIbPPK_F}bj~w7jyqw!X26-#<7!IzAzsp8bdGKb-%k{2##nAGq${ z;JS5V0}`_Ta1q_|zNy6bNp5qBkv>q-BeQa){QpAse*ycyaZLg!iHUBEM|>Zk47kV|z5rs}P)ziyFu*3!y9EJYoQ6W-TlG^NkZ?D;E{_qn7PF5Q+>ynxzi z5X^^-Ubtc=`K`F_x!r;lz8FGE!ifT-;;LP%|IJ8me6Z?ZUm}~WZ=v4cz{NsJ3T8yl zqy}dk#g5qCv1bJ;mgu4^EU04tiXrg#f2tn*j+3oE z9Nk#kMc7TiUwrfp8x;B2w7SkZ-Gt&Gx9E%(54%~}WB$2=(`7pRGe%^f z@u)HUE;2S#jm8FZxMiM*<39e7*vV`{h(6b^s;F&vA(h`Ud0z|qS%LOT&I4dpu}sXz}#oRL$n} zd$7-??h)Jqk3r3lOK4aZ%1nxd3~7AREe5=AM4Hy)8Bk_kFKdjK55BM)RD<2f6v-^b zgF>2P_{-8%{7u{cMZBAAx&mETXu}xj63-pLlt;s01@PK6fNIssp=JmjP$HAka24=$ z~g^;c8=>Ccu9T9^TBV>Mn3MjZ*&sdFJwRCbjFJ zi#+r*NKJ{}tY<0GV~5VRvH1=YXH;hf+HAceq!zB(>>O4!O*_iBdt6xq@A3Bc1X`5* zrgeuk<_#h050Pe$6H4~k1KBUY=#M-J3sB>OR*dG^ z-{yxUi%Nw9Nx6HPM?6?jRo;B~^3=>Mdcc-WK<_Kuj^|KAl`qF*_8K7N%$c6_t^Y^G z1LZXC&xXY3HG5mN*7!1bm_GXBknrCJ<`WgSb1`}_Q`fJDyMiM7N7dkKKxK0VRXs|x z&OphE=b%CH)dos%Q*`3s_~{7nZT{ruq(y8aZy_X=#e7aHd-0Wt>AtdG--?s+4A0S) z*+erp#S01sP1l&BnI9|pqs>f;CjKX2Ba8@CpbW$u)%>n$M`Uf^6Qd?L3&( z7~&U}^mKND5HyMqED_9SN_xQkCb!&eNvMw4De&=Fi_KQqtpP!%($%YxS`P^o`5m7x zEb;G-3^~6zcAPiK!HZh#t^wn)#hiAIXs_m{fikbFQ+4Y4XH$=pri#JcB`V0JZ8CNFTjJ%-pCZb24T#c*-LQWji_;2+qV|p{y zh~b6_unxTYOh8uG;jV`9C3-cb7>B~PkM;4S90-UBBsdFm21%f2WHCt6dRT-BRoPV> z>lfkvuIkdbl>%T_>*O|#8d>cjJpaN0*FL+YSq0m^nP0!YW)(f{sz^LqtZmo}KbDs1 z_jYzzp3z|M`C!P&!MF^6jgzB;(lVLnJKIHmMI`F4??K>v4HD@!6m6#aW?CJ0iEZfe zSpNtq%eRd*%GC7*G`S;xl30FO_m{oV}NT?qnln=)|aC!8a(U~xZ(@Zc@58lwbvPC zSh=_Hu;44?ScCl%Z+SgqZ>)YJlv?-}B=nZ7_r-X(VpEA;5POH?r!tQr!6hLEM&&ra z+b!Y4y5Fj$#lgG0_nz>=2%ZBF2?N|U;MjDkd_-IH&F4ez%&ZQQAjGP!l8$Nh(E(Q; z|L{v!Y>lDVd)b{aovD1Yzd_B2c;T<);GWKp)GHu1bWmoXV1D}5tiqR7wTd*3`$<+6 z8j3WGyAu7@g!nw;VX`1oG-zFp+EX5 zn&yDsIy=ur_QJQ6?QMs?DG!Qmm^|p1?C<VcC>Gc z4?=2x(M5@teL&Tm<))nMy@0=C7<&zv&uG^_=Y>rOa}HmaAP3?ECsl>wQDptxB%f$7 zq)pbx!8j=tQgP<+%Qaxpq`{~KN866yO2@+{Iy&z;FsEj@ay{>b5?Q(wI-8z@N(jWq z5fuc5f=w>f%25$1-pTkB1?x3VSTWfgD|XdX;|pq05L6f*jpm8+gy-Qm*--^9}o9m%|eM5d3P)tZZ`xvUfRPb zamm?r<)1>v2wmc))*N1MDyaURYP~q{McW$p%A8jf~r;X#YQh>Z0Y-pEaS& zlr$#iH&ey$GV?+ix1>5W)gf<|o#83bRNdmBng)OMsD#PLHLKM+mi_3M;mZ|b{B1*=0`*GM%_aaXUv<)^dRenFAT0m8qw zA;Uu5v=jfpIh<}!4&ybTjY~2og!VF`tG;VxtUf~AXdDci5oP2ZKUAz;ro_fv*b;p6 z0;KWJ20gOqO2NA00TX%7)qTm+-k-e=GA*gIlh69`FFNWAp+LLEEG^xkBPwU3^i_9O z>3`KjujVUJg~Xq!r-{FOv5?wQ{$mlTL=BM-Dl7g-FLv>)tuAHKL#w-tGRM=V+eZA} zz^i3I-(2Bxoj0?!7+uK{zAC-rxoNyxQ-)(cBGExuzjoAKvz4?;&V{Ln*@LKz7ZBa4lUWm<>WK_aP(P0=y5+d1Pq91N zI2sEaA7TL$c@3aBGSkFrV5`ry&aDckCEp15c7~0pDZS|m$JMs+oRBuBUVd#BoVxnQ zY=|g@(0>z+rkCB3Ql+TiJ+kIdX)NMrwe$34+!W>bKHNT-QQ-xtm8+dU+-Jyq``V~`M<_yS_YQjGhU1;+s{q9n?NVD zN@cXIsTocTrT9DwJGj83QgawZ#`MW-!nc8#3NRGF~1Gf&6 zK^!UusDBM%^6>@f+U~t)Zm7Lq4OPzX;KhU63$pu&mZQ4UEF4)ogE?RJ*_>9mLX?5N zgRq?`JN1~r44EF5ZZ#pO?<~9tU-)kLlgYptR>QV7VZ(-NW=XG>t69zq)hd=Su`OYd z8z0opfh)HqdXVS@Xny%dPPwT!#2=J%)vHXIhr_mBx|=o+)1SWq z6A*FAI`!79GJ0yjU#wqhoo0=^IMr0>&e$0glPQDvV%P4tneR0Y2lN8_HB@WTH6}NA zCrxoBHkiByD#6T0d>WOw^_;0TJ|eQq=H9&T4i*96!zNNHG4=f z&ljyM;@d-aJk2Yl>O0mtHMx+NU^+*Y;ZQqcCcC~mY;W8dIC^@37t7SstlU-G>;kc>Z?WU!~YqML=J zaQ7vOUF2P~ouY>-Jm&5w760y{Rpq2^tsvVibR#>xLhQUn$b*UwD-Jv#_l#8Q4cN2a za`ao6wftA@IR(;>nDTCb+9GS5MXdHOD`Ndo;?&}# zPn&ZK*^}3U_;5R6rE$DF=k2{KnLwKu&Av>wj&D924f}98ywdL;$Z|JU%c-nVgZuXM z*M9-y2E^yNjHKBhV*_ss=p0Um(tt_zl-0Jv^D;*O?bdI{os*nM#SC>#UZ;6&DI%Zc(9OPU`?+sMtX=-(xp$EZDf^#j3%T)7pE=e z&#g+2S`$Ti+29rE(PVR)qB5o|uF)YDEUQixx+7E*8r;+4)Tru5})*0IZqqFp<%}9RHcBEEOp0Eu66d;t6Mhd4E=To~a23xD|eoa>TvGxBRCva$i9hDe=#<;#O`stnEW*oVxA=d*>yb`g00GVT! zvhx(z_6+K@`$ndBGf2mxtfxO$SGtT|h{ixrGHTPDt{R47VD6w1H@^RdGT^s5OLLy} zK0FOC5sGir_}pPxx%#tD&;0ChS69K%mZMNg`HEhwcyK-I z!k44&LwN2sB00{$r~UA_EJN6wWlB!iDMju_)@wJlCAa$QAFvnZ1N@&yKcyQ(;Gzz> zZxQJZGdwM+`S2jj%cMFJ(;lXrD$y^8=dcNjj%JooE3+56VDEQO*96+XZlUZyKKpRT zgUyKv96v7vzfC*5sg_B6i8GykGIglrcQmWPei@I0FDi^W|GZSB zQv+OtnI#%cXJ`e?lx(?em&e_aRIGmYdpHE9pn1j{+-1?XIuHa_l$KS#N3hVrd5D8I zm;=i~#UBj$iiJ63lp7scXYRZA`(mSnb7^?`IQnJg0s=az<(Un0`u)!zW})dV_R^w^ih- ze+v(cXyh7TyK$%+HR&8B2`;i~8Wy2nVM2}68f?NY#{}bRKlovDeUOX1h*D6>4*$fO zj*@$O@%jXt{9$=&SN&BC?&M{}x&DjGy!fg=x(k>YMobV9kL!RUWKA?GNCAeDK1*H( z86N-Z2#;fH@fFp}_NNy2TVBeZGmf`fNO@u<-dC$V2MRJv6==$8FhdSDL78hAZvQ&T zc@Alb6)ad!rGKi;@-)@Ig6HXNsJAyaai*R(-Vdcn_uBRe1BswuPFjTCfLwpT)*_s- zc#ci-E>ch4jOdJRBurt8j}>V@S{043U+tT|L>HmNTa$M6W{!5`gkD3IMc zD&KT*8eFN|3Lb_Z)u9D4ajW1g-gRz){yGf>|HX!u&n+}GQJ$%uM&m`jFWrVTh83RO z#5+A5+^{E66fhTIsC!JDe+`HqvC@vNtW-j*yPe&XO;lW(2+6bKQrA;<2}?1N6$S z0qrjWCHGA$+uunhgiWvKEept0yw`auH~`cxy$nBt_;N!xH*LEeSav#FYJGurNfs~UVSl-{iUvUX`XPkm!C@i4pC-8=^{?A48{Yzhf+EgRaaN^ zj^h$zq1(AimrM3}8tHaqb<{A)+W-|eN( zlP9W3yxChK5J71H6gbD6&}Mp{my?1U3Fec1?miVLndv`isS1&t&)$$V1qBj_Tl-44 z14=N^y#p&VCde~nJmtGTNp`uc`v;kvY3-%Lk?fkDoUn0R zF2WO+7tg~tBWK^bBs$^dgXWpXYZLr`Tf7M7$#DqT)rx238v_Om-D{xOfRYo(W+p(! zTNgHlLZ5Wa1cR>P_ZD0p{k%qA=)qIGMl2YpiC&eM6b#CM|+#s127+&=6Xa z-d_&H-pTtiKrKUbL^jk!^9y85p$^XX`6Qp*5-vR9Bx0}Lz+te%Ggyb!9VU5s?jN~o1l|r4StH+=}DeT*#;6XSkP5hqW`QU z#nJ@lL$>PYCfU3NNk6S2m32+_b*VS?C(x|rE|m{R8QC*Et^v&3rHz_JEXzd8m$PV# z?_TSE{#Y{&0XKcM_)!*ogO&t2=$A9fpBmwa`0`Nm8sM41s4@kG++u0rdUerx`rSH$ z7#^^OBp8I=!`Ke+BxnG-d$XIugVA(1DT7Zc270#HJrh~xiHlC4Rm@SD*Qn~WLUOb7 znz_61mcuuZ!#lkMA(q3EN$+U!cj2eO@o+~STF}>eIbWTFS9RgweBT-g_1rP@;owMIj zrBRW}fP<2w^1|pPd&%EY18oyRQuWwzO}+EMRz)IzRKbvQ+0k9kaL3R_SDiYLDZBBj zjh;sbvF@g*hNb(yW%6dPY6Kto8b{xYq`(k=YxO&1yp`IC{S?BujI58hhFLmG-g_&JSu5r-f|ExmVD8CIBPN5JkmP@aLHO3& zmyrDabNNFCkSS=QYI_>FQ`M_P@v*!CZl_LB^W^=x*VLMP% z$bI!{AL_ehhP#iq13ITWTx|vJ(O#$*C?Ss3TNTxUDSVijoSYstLTwZRiYf;!|8RTB zbG%X0I2uqgBs{^3s^VDty^CI&*z1|m0TKsXd``&)V{ub0p_%Sx+jG@Hg^SA4n^^72 z!JS;*WigoG(+U8OFTk~j$7SZ#jzubd6lf^zlu)olKH;vwb18Mvm}(~73whg7?x27i z^pF>gYq_`kC_y!RqY)vTy5HyNeqi^5jg$x&efnY%8)ud6ABK#|%FL>)T>iNvM6sZ< zfl@Zvy5jX2*xWW18Zu?Cs6mJNx{&`i|x6nC;} za5_Hk$J~bf*)VS@Av}RAF`N28pIG#M)z{*W-6yx7IH@=*mxC+p=Vh-q1pg+v_rvPl z%_=S%YcXy!5nDB3zx_%_Tr*IUgE`a7(xbcImpUz9 z%`;L@S6q|P#yv!LQ#mV;o?WM}&S+b?qEj>V<>4iD#4#M#zLEWCcx~#u%KlH7giww* z30v;&nzy0~n&iOT_=ki3mRpR}H6SH4jK_|VWC@QFoNM_QOm&&`f>|x**p+4HUSLkR z4n^!mPj(+669fxT8m=Kk&-6W>5_I_sy4lOt_@hto-(@uOKl*H>o14O2;tCxtTOj29^UNr-#X9&a@VV`_%MPYE zyfk({am)WB*TW{5(M% zg#mA!q2i?a$CB8z+j|6jQ!29-#BV~Em|f0F8ctn8xnVM3;55Y;B{ zK{LUY@DtFbTDVHq!qYc@@$ZIzBzWlv7uBS@MSE8vcCWl-T5JQ=Ea>f#45<&yRQ(GE z4$Nt!Y{4NPdIbK;JU0DZ-1p=iBi*hNL;AqgtOjL?Ge`OzU7;^J%Jl{+N1J3v@Y@8N zg&BobX2s0cWddB~r*Bsd09zr?*7zA&{g={@YJp(u1uB`HvkwqYR|RrMK5&xl>pwm5 zF3&UBa+!AB+vsKl68hE!#Jn@MHs*%+f!poX?wM_uW+wDQmnUvhB^xJRim9qlHbc~7 z$HKn~(Q5s5Na^9~xF4y4Ax{BYZBqJEsFM7dSIi3e_T!xCs}g+JwY$jkHCEe3=-T4D zV|naF0J?cPe_#hxR+WJ3I5_lj30#jLRIThRRG~AlT!m-oN2;4(k*??z{Pur_Ow* z@Wjmfsx+`_p(WyVA_Pt^j;Rw+!rul@!qRg8;j+q_BgT_D=a5y zb!j=UX7)H?*g5p?i4=kIQy)_eIKvWFG6L|20&%Y>2cZ)gdxDv<^3@_!f#N)|(W=f(56r z$iVt^OBbn%dznR|U(B}b_IJq>n1lrm%^_S7 z@7>CT?r37Lu)2>JwGI~AnTb$)yxF=;O5%ZjE;x&zA%@VJ`3mN4>T<`1(U)s*B+IA~ zJWDljC;I<9*FXG|%%b-idvFcVi&{X2Jb4OFgn~I;z6y2D-_m~hej_h;#@aIRU+RTNS04s{PhsLo9Jkwt*C9A)H|h;b zD+s|Ier#irrS$u27Kj5WHhr!(QwK z^L&ae^$gSy)};#aucorredx>BXP;GmFS1p;R~aS@Kt?##sq5$4ZhkFL(Idr*epk_nCC8oz0TsmqY7B*JO@fn3id_p6N2fah z!oOzFc~fD~A#ia|_EB;L_wIXxq-7bFXJ}#)P#PfyjKBr}Jt7a;6m(N&q*=m{-%Pc{ zQ;0=;r>AXv%mruKM}9_|9=-Ej+^+HR8op?Ht4<%7y12C{xadlouOk+nS|`k&s;6OA z=h0L;_ck7{y_{%7Uz^sr+S(fY9j0bJUM{0QJj%!Z=7A#hpStYL)eT>_p zr{Wn*@lASdFf-sHPBTMH2JqaRDItE>h^QO=kkTK7xhvDh=`q+6})QzG5e+256i=_8FS%Nz1QlsjhoDcV1Cv=0@iA z$Zj0;rK6x4^!Im=0Wd_a^$=bb{C^`e4t_KJJmVT5G13B2rnvFa)z1|P z3}7TwR?lO+x8qK7sN-c{}Wy#97|EY?Kp|7cqL*{-_P zZ1WZ#9WtzU866;M=X$4uw=|W0dr`X=^r6mMZ1+JVj%EH+gQO|2<32+Zeb3T{=DIG!I`PV9_3zKK1*PMB0k zlc9pw{-&+Hd6n0SS@<7gzPrp3Y%LC=2Ce|$r&$*%_)X%juJTNymHJ+G}5NIEBO=7tJ;c15* zt9a}_jg!BfPeiA)@9)=RCP8^)+G3uNm}*;M#%Uj>lVv+L!P7aJh40n}capY-++qzA z8cdqb1CX2khmby^&$+N%vnTxx=ov@es?9Tn89@baMdD5+{t+oN8||7OPND@cAB`Y2 zF{sMGQ{TfS+-lR@F(&~A_p$Dj8PYgbfQ!GuPwGsS4R$lRHOaBH%J#s&l^yXlrO2qo zmAfy8S!as61>)mT3fB4lpf;K~cLJ$HI%;)EBf)->w+?EX9C8gHf2S%mzRSY6v}JKO z$E94PhGx^p-koo>yv#PoV;EV`Y`4Ya9K-ndTyuOARQ!0q*;vGw-*Jg&$j`zS?nqOX zJLZJAG~7Rf7z-cnRV&<0s0>VaKrt{zyb5$7b*M)n^1E_-1kIWFhw`|{ZEm}DRmJsP z7_DWpw}jhuSWHnnf;yz@MI$c2<>sa;jDU%8u+g$GG#SSpXI zOU_z;d!=D~?@O#8?wNAA++r0vHiYNKYN6nc(tFHfSSrm{JlrZ zXXA1}0Z$0CSef40_mXi>&7f-@Y4dg!z@zdEbTBoC*}3NsYKt)BKg zW4?HUl-UshCw{7i0~Kl3w;>bhjhk)a{ra21z0h68QE{c!sY58i5OD+O90kITC6ok65?k-ClkN zE_`7O}Szz|Vvkq0xjFTuqvb5Y`s@SS;>A($SH^pPZQYjYP^P_3k2jD>ZT& zSA60Cr|)P-$FY6bAkJ$*D7J$+AW~oM=cq@Sz%1}$k?ggE12(Q`gTxn|q|Bg1({F@!Xv}=Wn0%mVSOv+xo`M_jaX25VH?uU$7l@zAvToM{}@k zoL0HkV*&4b(g#@+dtHGn3owSlj=kF-LG7y(H9awQ5+W;t5ueteanLLs#jSw>iE%5J zWU+guLqmNQe#cuYC!0P5)?ai_8dU>R`da+69nLkk#7*WVHq(!wTj5n)*w@45V+uYz zce~vl27I}-EJ9=zdx*Yaj1-oAO!a=NUgqO9<5J#kx1Z>K^N(j?`u>^PYc2DZd-{nk z<%Po^{8|@y{7755BfW7lEGYpjT?^e{p~<4>HKvnOos{{&{++Z@YjL%Kt+~YOpZygb z%soC**rkZ#isSodiFI8mJS#u6(~^68i5=XqdkEU_K0%gjg3f+wJ-r5eEaB}z)C4E~ zTL3zI7(3DLw`^cW~-5`uNGipO3juzW||Gx|)|tP**VR z$G4*aFquoecAFzQKXcG%!)7DQ=Q5r@v8{!xT;Y@pW&85UF1*9A7=;zT?N zJNrd##QdjY5on7rv4TR!k{5yb6B$t?1!s#Jq2ADU%E@;C8t+JK28j5PQkY1LOP!as zE|sBSvetwj#QRiuYP~eW4DtK%f;pS$!ob#Gj*8j1`l9p1l*wQjWjI!&f%c_wt0;9l zOS6moH&UV{xvhP8`y7dVJ*Yc3WG;q_oQuT6?U~u_Lh+xBOR+ZqBU#44UpPkr>uz zO4kRr_w)28Qm(v)K&iKtRW@mU{B9Pm$XO(VHukAO&U>ux0>dKuBS*hhSbu5b=Klv; zISq16R~IW5vPQ@~-%v0S27?hnCk^(Ov`P!p(FJ;Hav#@9m{T0h_qCP1lNG!W3I63P zRAilJRM~~MXI68Hs^n}^X4OE(+@9P;kKm>uVzD5k2?rGhhgwq)pG8qUr}OW6#lx}g ze_zjxR71ZFy3p`Dd%V<{EkJH4Ko!R|WJbF!K5L&qSL^djMY1BfEwI%oc$R&6o+vsO zw{+}3CfQ_2^{PDeVH_=K2zW#z!>8?3w~s(e01dh1L^1^wYo^HGi?xwq!1SiC&;H)s z975_UViFc}s3d?Qyy~QDa%y*i7LyY9_p9M7NAj46l%3)z$*F?ctA9v?gfXAAe23g6 zp2PziHwq@8U4h`764iztVM-4Y2+?)kiK7^ke#O`-bql@X`~4osiF&bi`qrVG)Z!I; z5-ct!W7|UkH4sb@$<0Z!TpO-~Q(2HoPL}m_QCjEj=$M0Hsg@w<2{#=FxE+2B2%Mva`>GTcrBSTypyf>4}S{!py|DNAD4e&@# z#`f&Y6;1~bSugsR#3a|=1*UN{Dcp7Q_dvR@X2Gt6%%NvzMwm zCTs_OqU`B$+-3OAbSGYYmWlNsLE@{qmomi*v9Gz>W+zxIBBv*9xNnvw1)*WAtai0E z!;UGEu5;l|aC|b_QPX3^`;``hyGcf;ow-1Yap4t#`PGgk4kz*8Ka-X9l0lY7iV;dP zC94*cL314UM9K{ISe~ap?h)XvG6w$B0T(SP4>ZLrMC_<@Kqb-0W?XX$GV%5SZ_Nuz zw~(^hO5zZ=cg1K%B}BgvJ3obv$aIf`#}VRqbRCu#_e#`@zBz6q7nwM`*0LD*^_M~(R}yD?hSn;=W6g0ofruf z=jz8HJWetR>OfpUlkPFqzm^5iRe$pneA!6CDaM({C4#a2_wxO{ZunnVEQr3D9IumK z=2O3~A=+pxF))bSE0=cSReJVRer@%5W2h-cmqq`MpXxbg(7!m~C@8z&yDaKKvB~t1 zp@x?~a^KLr5HrW%0b>ZT8{L`e;CP5Avwnj_=s7yF#@W8lGA&_?=bS-kR_-dWK+08G zfv_7V%y!4Ri=7R-JeIOK*EuPc&8FGOtrkn$o5GaBF+W>5U(}uw`%O-2lQ|`ettFZP zb-(8l=4>s0)6sOiqY`h!W7GV0KSW%k`&I*thTv{Y5XRG7(W@?>q(?*qc(cY)Z8;jH z;NP8wI4~|~iAe7$OFxxkgbxe&%e&?aP<+amdzJ4XeJ1?YdkYbdt2Gt=h$xXi@S0H9 zl2g=&dFNJ|iPqscU>CH%eZZ(;kvORxOca93T!Bq#mJSkL8TLqAsuv}ZD$+)L@A(C) zK{qo!8so%6cJsn*-d$H2j zfQ_G#SDYdg{MleBY*@7EsKDek0Ool^t(EBi`*6u5(?Pq4*S5N9#;p0IIa{kB)C3mG zEJ`bq80aGf=YL@I(dN0PYo96~fU={45k5jQK8JdGHIh?R6X7cc&T?*xqmT07vx>=3 z=lx-;7@g3Q_*oYISCOV|lB4TMaAozme;mV`DlVfBwMowLmp=1$!;J7U<$0*ZeJRoK zmTKvW1vabK{1%l7|H>CDFkBZsJ6WxkMxMh%K8wJ|RJQ**snB2Y*WkYGZzi+tzBgjumB_; zN~NVc8&QyqIl~>UELLRNd^6VVpDb5A16R=4P_f3tb}j9^apf zyHa#ySX7-E%Wy57)^L=`_;eBsjIXaT_`buYDDQ4MP=$Z4%t~m-n2i8|B0r$3^dlbU zo?_}gGPim)_lf7B^0v*fH+(;QuqvTtA%}c|;QR_Ywzn;PJefE-xtqND<^d?E(3|N5 z)1nvfh@1|3|tQ+J`N!i1i0G{mB$o}PnNdN%5o2LR@M#kyi~gzYPRx9wvV=! z8dY&c9E7PqK@~eA{Cy8+B(#TK`Iqddt*bcGvOkA1rQrug-?o;chCBSQQH>Hs4p@d1 z?*#Rl)m^TPu26B}g9gX;y2L&*o;7kZ&9$BCb43O;{S^ytx2|Xoyy?bUUJzwjaoWqG zTz|icen81IcFlRHDYk`;S0~r2(d=aXJV;4BD>ee_Dpmq>9D;TA_2yxnT|}&FJhtXrV$>8EN=m7pC@PN= zp22>|UK_Bx3#dr&VRzCQLJT7ft06TJHsu6u?R8c5ZNH|0U)KJ}|h`pp_}&lnb8Nwm+d z6JO52?89R1pGLdDc~QnL7kt3G(MB1mQP`L2nD7y)t+u-m(qYfPvCAaUN_T$n6BobN z)iX~q&VK(h%%wy1T~b1Qix54sAyKywQd9P8`t+i=nodN=Hu-qwA|7IlKlFo!#0Cia zE;@Q^e{d*AutepCu%fo-AK4fU8U(Gkww|<}_APS0Y6PuLJBY1IBXufYP|`}qw^9JA zKcvi{ba_rT%n!;594L!>M@-EuU_!3;7$Sy=YV@p2&WAHzXCh!K<ZIq~I+%toPuD65yOKybH`LOU-xCHkxn;sB{m zXzojPsK65l1C74=!z(QoIe+(cBZ6-K+YOK-o|uy=s3;QM(>J1jB@>$!kRl^= zYR+4;=jNNGb%jMy6>Cd=qP#MFljQIR1T9=?<7dlu-Ug|C>0_$3!@KP7A3l4T@D%X0l~scJ zM<%w=;p8eHUU=uY@Z$oo;X}x4<<)}RA6jB0`E_#*`?lv4(v@j9 z_`X>+I~N`uBXSr_wXuyW70~{vks>S2&ot}u`=Qqo5C>Uxr;J(@XlJxn?Lh|TII`_k z92y5Rz30nk9*{_t z3k)`Zp(jP^ii~d9dm3p&f@TB~ij2>>*&Y1Id=AziOtTV!^Fyllq-jdMW8>n4sIaJ^ zzjrE=X?-Hn%cu9VA2sA3R6Vkt|0bDGSoUiYbPb?`JM-F{oz_nCs@46ZZ|K=7RQ)h% zxI1MxrgU%gLS;V9@k2Ad={TcQoualvy7ru!RIMv058$FEPAbdw-<3DQT6utFymOh& zpcLDC)I^zcc-hmcKcDSREX4j7MdumL=G%sGs->;fYVBQAYqzyWwDm_()ZSDnCAEny zt-TdRjZ(Esq*m-zBzEjnV#eNy5kcsi_xtl9$MNL2pX)xa^ZcEI{d)s`2({50VS$^A ztk5;)M)vPMi@b$2vn{Q*)t<@`B@5=iX_ecnW z5i5G>TFR2JUdFl)O9QpeZ@Gz=rJe(9A_AJkS(m@hYt#T+Rh&nG3M#fD^q=zq?1T@Z-YU39XJR)SR+20Ik0c>VR;i$7N$d|I1W zugpkBJj zJ0FsK{G5Ndo}x>pTv0huJ;>nQ2K$VaElqBXMAToq?Bj3B;#zO<_Yy8V z4&@gacBBF{dpc(uvsgh8Uw;}V<-6r<%diaV;4k_r6^k4sKP-}$bi0Y{ROD_X39#OZ zV{!S^*t9CuZQacO>0{y@4R3n)lG0p=l^n)_mDS9Ae(uIj$`4`YH6A6RXr;QB?*h+M zwQp02>5)U!KMG%;(I=Av`C$zgwycbni=e8xcDbe3M? zwYsWtSV{p$fWG>)jEl^UmIRbz{lBRBwObs4AD=6+Ij?VP?a~Bc0iwj1<+)SBj`orG8&wqua0e45Pgo zQUu2-YZd}|_|kwwD&ntCTi);0Ieo}rFQS4DYQC9c<}zD9gFIgH#8)ULY1pvlT>3I_ zLp0h-UY0QLFIu^tY!$JJb^#szbWo;Xasc75SeE>HyM^g`S18i0 zP+(g3AJdi_<}rHFhjTQ^qpKgAItFot7G!VrtoWy5`nfdGDUuHM9|hf}qC{bdi=u5t z$>mQ_T5xXgf61bL@*>tEXP?#nKS6qX#V;!d;@cZ8GN<`7@$0Zp2>KH&)p{NkpE{gV z{++kybFz}z{B&-JMkF<$LMKyu#zx{h)c1ULYs;_s#C(pmHoSE$B})&HL(Kidky%-8 z3%s*s#^coKwQ4cq#+Bf~&cv9FO6POva)?Mn6OfnhbKELm z!}K=VeayAaly!jxdI&r|bD+=rLbI^>0y*?KalK_n1Tdn_H44-Wwj2S%TG0z;7>A9G z_#Oobt@iJPg?Z&_oyq`KtLfuSKQ{oc(wq7=2Uq?ki*X)P%0&BU&Efldo6Xx#;6f!N z00*A_l=Lul(QcbFqP@kMa--!?2bp=M3DciwO}QRoHSv)Iz}ISuU-tC_{o}>r%xlDk zwJ0LS9;pUg^Ol!cBOZB4L;?Lger^S2Lff+&boFM~OJMA@onO(T7O!->!yj?o>t8NP z^eNublaO_@B|*eF>Ki~FIfT01{Q&th^$v^nprG?m1g~nZ%`=mBt?GIid4EX;HU z3S0ep=8X^|orvkb_=|0_V+&|S&=@P$e_0d+4USv&(T45!-WQR*vBXTHaIy~TcuZ{U z%W3NfYnH9>qArHN%F*fjqDQ?;-~Ia++o$JU7)<<*r(TgQWVxf2Kt54QkrAfP7HCfHY@ zU7(k>#k4S3sBOU-HRFFLY!6l+m{_}k`g*!!(ZM>kFEa$s0gmWlfnJZ}oI)3@>X|OP zFK->>oa!18OdE*$rMW4rt{}YY*j$y=R?EjYe}US>XF9@j=0tnA7ScXEBm4vGduhI56kxgYw46EM;>cf-}!MO%tyI7-B7xTM#Y>&xoIQK8pn&~FV-XBgT+aM zR^-((LU)!><;L6h+k6{`d{+o{pyWTqn*U*3W4sAh^O~|)K`gS1>zg!J%tQI9~ z&C6WwOC9v{N}cZ5m97PbDsD}e@Bbtm9-Szkn+qBFL)9HR$KMmd*qvJs+ZtOZmZQ~s zVLlx@JrRfB7yho&9rb}gave8jq&cnVWjPn==dC{R9PQCdHwx4N4zfJ^sVJ_=L%)+9 zko4QPRMFxG4YWx@)F)c6EKHi=6e^4p1Og0LxBs4 zwQB&^uU|i&x+3*{p|6B1Ph3RY-;_4DGan!QqY#);w!3)lT0nCQ-EDdY(?Zzc&CeelCBb^lfa!Po$%>58%37D{eoK&N-kq6^EVw(hd0bDW z1V1mL4y)S`l3yJ-j(XLXw?F*E9=FVIl`X#W&KIoQE7{|(GHo1JmE1Bv?*K=r(p^31 z@kZN4gs7MHl|Q1buvA99+49b#jKwH@skvSuNSfC)=n2TWx^=JurTg37_iMAxQl!=T zzsAkcC3j4-7fgY1fua%S(-(N~DeUQX9YUr$)?paA``>l(szC1J?DW)}98dI6uN*3Hd7w=`JvD3U{_3XL!)SFRH zLsSyW7$MQdFXq`c;KhAd|7YLn4Cc&Nv&||z#v*{L>|}8_o$NIT$kmC4V_x7*QJE?- zovYjmeEq-j?vFB1tG17dHWKZzCwPNOdZP%jZuQ6T=E~-;UmEgDer$HGq{o+y9k+owW_?MP6RJ;@ zN?JbT10ti8;?CbPj+QaT6j@0X8E6k^ppIT&DE}y5Q}a~fih)hT*#Qt4;CN& z%nda>j$?}`HuiBZR8MEoL(Az+NCL8nL zI`WD3pz|dT2oAH7cP;>d58P&VQnPlEspKZ`S z$dpvuIBd(mA9@DSAf_OKqI&6uEoJLt>yA#N@hL+oqb-ZGs&86ah~JlUo2GkR!0aFd zjhHHO!!~$%p4JqfU*Vy045WMN#12X--{_nKDF&nd^5XREK79!&!ivf1+>?zdF3)sE zP`{YuSz1Yfgt+^~mub?jW*Y-@zicO*8K%Z@K*5$s=q%hfSg9SC;@rKEM zQ)!oH(Z~h1*)Iz3fYZt{%f}N9^xmgNGb_P&-3W%_Rxv`fU)|>Fb8~699G(5OcS*F% zMa3>CCi86%$}^|w@vbNdhsDEUwHT0q;3|24`^?q??~hf9ig0B)2js)VAB#xvIa#nd zNAVSWJ74lg*aDA~u{u&c8tIV1_UV#yKYf$X8Ca-<_p+Da&uxvbc5Mg56Fa5FLOAhg z_jiEoj=}F4h-1?ceMeEL&Yh&}m=Uh{ZEu55kN+mmQ$hvsDRG_4qV3UaA)-FlYHzwh zjxSE>8*kqqQ4eR99y>QXf|i4+exc9Fb}8cT!*1gcYlmzl+b?nf)Z8m5Z=uG0Vi13Tsymn_2or@W} z<}K|!eyzSfO9NoWydN;MbBS#!(AC(FxwOU4?z0(o8WH8&5OZ~RUdgH!yOMV3l9$d zQ7m&$Y&11CCf;%{&KiQ%zj<+1?2xQ4&vi7>%$QJ6b5()c`$sWBQeV!lUKIKn4^m*~ z`yohaOHlvZyPj-R^kQhO1R5$senGbR`Eo+=sc^^*_ZAD^THAypfCJ827U@B-;npau zRQ^v(Bs%FDjI!IKop}4ahw$Mt-1pM?W%Np)tys*$ocSb%o>xCYO2Nk%EX9$V1My3mqQN0hI1uE($7DTm0(-s zBf|ENqUOLuVlQXs_HAoXioxU`OTTbE;S}=F>fxHdP{)e5!h?(G?Vvf@FUw`@?|i7& zIJY=255PSRyLcQY;p4bXJtm12dp%2u&t|KVV)Z_qmA&D;06Nc0%E6WR1UoY6%^*M$ zn-$fdtG~>fgg!ir^B*H=(z*7eWC}J}yE!1AJ~HiJeY>$v{#_aHn)@uK&+J}*d>d#v zD`qPjnQ_AaFt$B#jq7tvs)pjDE~pk!_reiF)bC~S9;(vux+q+Ov<=oM&s}Y1EFR_4 ziv9QKHT}GLn6qWsy|}=v^r|55M|q3%%*H>q>KF2_om-eV5Gt#Rrx+@fJV zwt#~ZN_btO;;lYptc2}}&Z-I(9w~%5Ue>-zFtFS^Q(@f>6(VN z?Qhef4r7W37bG05^+ zZNqEV;S5W!mK@VX^~Sd1jDwCGasR&;S<8TWWh?uSJKV`x;$%yTR;onK#@@%dPyLX? zqeZTq3!kM&xyvcL^DFc(Uzt8GwEOz2-hOoWSQ*+LW`2dVY(ONEj7tA*%AR_fIw9i)-Qc9rl=NKTW*qb$oQD$s3TDVr#)80#Rg0PEBjy1C^95o&Tfg;SEjfv$s#K zFw`5pCF%=2KihKv!Y%3Bzoxl#@J(8>b8MlpntFN@CT5r8q&_DEE~`gPZMtakUGl5d zs1AoAxA}p-0Jp_G3+(0XWpO{+jhWr%_bEn^ebNbV&s_z;tjr9N2J2q1Vg-y9Uu&B5 zDDwWZpwJZXaVqYS~1$XEibeeYz}qM5&cVuRyOT9o9-9P zUA~5kqJ7B)S$tEA_u7mKt%#r9A!5^ots5U90kdf_aKw{tvn<8H`AJg~6Y%9%rE&88%{KBO1g0d}Mz~}^>x|ZJBUWy|I1%A!yAhUT@+<&~RTUXMPZ5|%wfiM^d-{*C$ z7Z4C$L)lJLzCf+9R1o&u&!7#!gltF9D0_hI<+JlJnbR!NT_x-sfRWe=$db-C20VZP zrPD$4wO|^+Mhd(QAUf#UR_dOYn}gEJ+y$)#^kYRv<4p|{blQ<)$ixNF3FyJYhUl}C zceCkx{!gCHJ*R#$m~2+g`P`xFz*codl{-LFpIGo?&em#vA;2BJ#Y31u3(2uQx@FHZ z7tP53=B9gC5R$mHLfc6dkiYXLN9$_-_O}NqN#4=@hF-k@VI9tQdrN-pG0a2$6=EH;U(>Cnwc?@JlBosTT;)zJY4?H;;6XGI{i04N&RJNKr9!UxTqY$2v zOcdN2@;n)L9etDPq|NH%pi{*igWi{tvwZ{A7|HCvu67NME$elVdD|US zmKmxltzj77-ZQw`)Px`OL z#`U5%Q!WQG@#6t{?!PbPY;OkR<2!N859*b9t6j%mv^=iv5!n-R=X9YN2bXCtb+8h_ zSNC{XtY%$clVbi%f_LEY{{31MUfWt*2X;-WRGn`NV!$~4gK%_MJ9b1^ccv{^=$Eyv z7MQIm<&|LQU+lh_=Z#olPx&t9lfSP!d~>5zirOb8E)}6d9z@~7PsxAO+U!Qj^m|%T}8JJr6F@YY%i=D z@plns*Ei2gupt^ShKC4n%~ZlUZeWNI;4y7HEK;Cp_+t_iEFU> z9O@M)i;_QL)9mh_i{0&O^b^l7UMUH-Bi+APp*sxGX`6HBKyMp}^ddX{iavUzCMler z#z4u1`U4QcJ7^0^j19<|$Gs3H!x?#}^9P%)g(+2p39 zK20Tk?NZIgvXSzOA_ZiM%gy?Nr<2)Cyy9gzRMfju2Al&hlI+EL4>Lhy%}UX;m55xO zhIZK4^1ib57)HF(YKK!&RQ4v3-B8v{d6$QTeVfpEz|JXm#!~fQ~qbxruG`857;>LPK1Lc zay+Wh;%@CNyzjHTOilFiYGii2;VSnXF|c1VHXg0D9mzA!RdZqE2}{V?G6Y9R%2g^h zHtmo4#XB7@&_&TZ8*?r=v%jO;M=ACds?~W>gj^VFN16^xCKdnLxDP zQ|)Xl7Ro|-b1vwCER*g z&~qd+*~LmM{+!{6e){y~Z*IJ;V^jLg9g(LXkJiRzpYe(`R8bUp2I~}=7}7bOnE@E% zpfP?jY4c$-%#7Qwst+wF%{&OzpC}+caPG?0d33@X$AL@VwdGFKbhQCk z2nm!6SJQ9>UCA8#$Ev!02+_h*pO3D;C+C=$AuJDG{-VDA*k{{kG7W5b#2a;9MtTe; zJ_Mvc%w*x*^K2g(F5Yl~Q|tfcqTI2|?IfpKKZxrs!f$z$4alrbw|6zQLut~LgJ&wFa^R_D^4+KrXcxFuZud@s>#g14cRoCPgVyZYN|ZTpcY zz@u%{`$w1>Ip>UPg<*b$I#nw5SkgtrhFRzK1<_uFxb8 z_>9E->p&G#mxmGnouYO3p*$8NIHT9Wp1I{|xMZ8aFN7cPI_OHfvdAiZZRz4(#zvwu zJK{CUZfWt8sV!j4%y5#uur2q88GO#?3~ywHM~Z=*#ir0wGA=QE=U z-HHb4z1h5cG)_+|sHgzXn(Qu!W1b$|kKaCHSz?$$8^r7=n^UIyxX5?{|{IL^0qgP9C$K`|3kSBr?w`+W5QRP{) zmY}2Scl-=ul%sV}I7EaU>99z0S9w|jeN3&{yhoFQLV#NIIQKxAk|LxMnJr`r3d(G56u9MAA}4pmx550v4!k! z2tL~mSy*R%reB(c=YALe1tc%RukMVWz;oC%nw`?uMmZFo_f(vmMZY+fIV|n`F>?8{ zCa$ohalzs%Rk%F&RmKT;_XeI(2hZ9i8XKbH%u~n{-G#1$v;Q#NEj|hl1_>{;@Wyj! zMu+G-AzmlwKbngKX1siM!aHVmw6DY*F@*Onv<-oU)?XlNAsI&B{VlZTWof)#F;{%Qn^@<}RHznHld0BkP!j?Q1#-noq@lJ1-tZoz;X>gEBVKM2~U_v2C@v zc5UV|uRMyP29hE|r@8#P0C|^>1~#;-?f3W&xJfy|JbQ}nYzji{vnyHS3$KhJOCpE^ z`!0h|nx^~ozC8Sw-;}}2(3@KwB4pbb)bc}6TBZ3S@&mrdGdaxk^`S|M1{KXrdnMud zWrDP7iYmR|!To?rIOFoXoK_19iz$qSj8F*l6nI+WIa=so3=f9sDRK}bF|>s6x@m@# zK<8f{FX+5f$}-3}J>1^iUAD8AuOg%1H+i$Rk6jHm2v3L*zxSa^dxro(JyW}O^31ed z#WJ|2Eb@dgO0~hG2ZWDK_b)=vZ4>;pTT0qs*bk9AN6XRLt9}&6&`vVix|%$MB*8ne zv?D6-F%HswIqw~ssy&Oi_Sm`ZPzNHFQ5y#cFk0gu#fn*sxSy`+c`)>zT8OYq*>&q% z<^|7Ob{hG3^x4^<@9|b70w>A7-2`(0;WQwREY zteL$M=!Hl2OJ^=uz(;_UaD0<`tV!kenPoz-4F*4UW`-^3+krS6q@NDmuDqME{Uq%c zjUZFdrOM;~FYq4>7kH!$Khu~=hCaZP+Zsqpe$_^Z&f6jCCB$f5z>0obLssy$#LmYX z&HG^O7`+*34;q&V3uzRq8N-@{K%8t06;Um{OSt-e9-UJMTvpFrnc+0PMd?OlM*ll&*mTrgHGSn$GWiG5w zJpS2acbctp1&;FOz2EWRLnkxIs?x^g%T~v|nhy21wMEJxtHb2f2Z<-DBA+Ob)^w=; zW%(GXKc>EyiS0SFtuie(G9>Q3<}=mv5gm4QBM86+0_jAOu8u}XoXcfX`wt>aa(7J^ zd=1n^L$0*BSECW~HpMG*=P0lD{^c&?ws3NG!4j5!|DB6!T?u6lJ%;;>tktmXEt>T% znlHDw%%Y(zCeh1|Q{o)ky8WX+s;nIRh69(9Mv!ckW;qZ-qy*>gxm%CKvPUTOs^`it zdHPSF)riEq=fSi4)J1x}yA!o*pheBFOmHTl1#s6PZ|<=!`n&=1JwHc~h)2>yCb z@D|yshR(t`xj3c|-?=9sx3CDG{M3F4{DtG)Rlx_Z@WwjX(GzL=a*E?iN;4*h;~3=b z5;A+;y06o1gfg6bES!28#r}dl`VWm$86nBe-0Byfm3Gf~XA5VBIqsWLf4HOcD7jlC2DXr(h6H z&6!AboV9y;5#Vaej<5Hbcg@6O=AT$NG=2Y1pT}FxeZ_uTE~JWvvd-4#AH|KwM2F=9 z>JY6-0AGl3hu!TS?2gNFd-A|HP3~F#(nHp_A6EPx4Zg{scv% zz54rn>+#MGvB1SlV5rHNnF}2`?P@@*N(Tk`?UF6+($6d}oBXI}9!|<6?7i!6xWWVn zXN&8uNX5p)gWgNT5L02&ugRpC?&>;%Az4t3D2PVq1=Vw;&8lK_t4!-KdzmhB_va({ zOrm1&G3oY#Ty)M@l_1)Auuf~|G_)j5MMmJWbAxWPeHt({2ae zx)K}jDLdLp-Ky!d!}gP!V+84bgbfZEA13Xx@Bnv};juh4@~*$5q0D8IFPgUA4e~&Q z6f0y{=IZrDiwY##MN*Z{oP3dS)^%Ho;$2ahyPnb{`g{e!G>hJEZfdMEy(_wh_=e1S zO95x({j{lQhElC5BQXWT7we6swsp9I+_QysauYwTOwpPBSNCXHs^gCv?4S%vV$48} z*35<~yE`iqb%Ny^s)zIzl`xR1V9DsK2Lo6};zL1DxW;INQsKawnos({*lprVwDI`+ zr@Al&;d$x9MpB*L#?D`}ZA70w@!`qo?#2ZVu}ojr+;zelUZ?oL?G3p~u_BrlypXyYP;1;IwaguC%;xy@SJoF{p z_(4DQsl-1_nViLcMnt~6>c@K^@1(jHbsIiq{6}FIVW;Oxumd)Pj{wZIYzEsif*9d# z;1%2GMh~H5k(XA3OivZRH;<}XUK-I@C>*Idg?W5aFF({Nj9CVz+}UKomBp;*tdE8c z#g5J-x+xePq@&QV2JoGLI*o}mT_4>x7JPq5^Q@kU{u?l#x*?GnD@j!E;Qv-^Q~JEB zaM5*+y4&zs7u$9)YWhmHX_DNiaZ*Gl^aXC}I9Dsd5ahq2Y}RU6X>6UQ`I#-4@bRqE z5Fb=UhC>Ecs?-GZUp+*Fjrsyf`BZp#`<`o8wOZUv98eB94Y4s05`kvBE5NS^8x z!RnbH>*O<&As)SHo&xfM){djB2Im~TZeGnFWh!eLDT;U`%r?hJ(=GB41~#7+0)p0$a$vB3kL)%2o+WcSNU~QIg_)K z?33$U`VM9YdAxk^Nm4Fe`tre^%&mw&Su?9`1)T!N7u4~5ClBrP#HjkUny6~X0{YcQcc+|NQfi-Jufs7JUiBMLvKm27 z&8kvr6$P{`%{?EuK%{CPrv`SNN5IKVB#n{pIRYvSpfVzv9R;iTDOZ%G53B5BR@ZH# z{d-c-3U(D~E60f(9HI$?eGF;6~wxP?49;tkGUDPFfdzh0)$umUKATvuiRa z64u@}1vSiE+H_nL6s(9$#i zOu=Srq2woD?E<*fmO7(VabI0v=aYSY)r)i1H=#0^3{I9OT!OJbZF+a<;2QiX_I7?( zBjkUPlIkq^at`j5HC!Hqe+9$hbTDsejna&^l(RK=HxLXQ!Z29nS8{73cX*u zN@*wch1o1V-e6nlBi+SavjKORE$}tQ4CJtUSc<{~Yc<|De}qX~p;Gvm4Da}JSqcy* zZeWqU<{56s+z+>Mhod>I{{EvF3u!BO@}B=cvC2_Ro1%&(l0IIvOR+Jk-YTJ@TCinG zI`<4>8nD*O*mQ<}RdrLacA%BQIYk>8>2pgySd42$drzi*zTr@Nq!`-K!pqrf&i2k> zBsL+(Mgp}%{OIGe#gW-sWwjL)^5yTs>pq*(H@Ot-f^}4lT>076c1Ah+RIZbj3pSR( zH05~up{#%h>>F!KFcn7azG+Fn5k}+a{q1dF9D5@-;?!L!egQU4*4knTSKQ+do9(Hr zSyQPUpPO*Lb?2n(@=8C3Vp!P!6g0q1v{`n3dbFm`g6y(!0g66;4w~Xc{-GbMQFGr% zz>ERezJI0vf~c)T1MgF_-Po`%IQ>jTI<7|cYApxqR-(7+D%l;Pmkn23wO7e7--v#M zdROtbHzlC`P|;L4 zrOn^1kXY#cP$x;K@eJ*^3VJN|@(spP$XV>J*Z03&*N<0(2ab7J+N*0PD}S8{@8UwD zzA9`8aD~$cIc2XX>Bj+RbG4=*BMa$e<%fS#2nwY@x;a(GIDJCg>-vY2?=P)#g)5Lky*GLnZjhs6D)|-V7S=B3y}cIO z-spj=ESe9cCaQu#mK8XiUS5`&QSx@bMX~pH&zh!R`M`#?*jrq* z#@QT$CtI4*9eHS0pVQdS=9&|gf@kXuCuY-OEYjVMo)GEZJFhD3bX@*XP_HxYLU3Rm zyno+_ET`&|-H>S?u1rQoz|0{ttj^@K_HxJ!)xJiXeW+Gn zm?crEyOLZxqF7o3^x;JdH2Tq1erBbTKe@6YGeKka#>+pb4_QAGFE{@YiHQ88 z7ux~CSS055uG~fwwuiKQ)Q7E8QLUNMpD6S4>mtzxq#hu!c}9&j7n2P`c8Y(5{)-g5--(|K=6%WY$ZR?~ck7iPA;!y&N~8+_YgD!Sn+=A7U^ej|~6l zqrc0kJDv8yDaF@=CiAaKOSsR;gU;{Zx@FzN^?-q~@>4xov%_={aP;(0vt6%VB8Ya64xNJX5YsPgXC1B(Zee#chd_9dRZq`rFx5LjIXK36yIS5DfW(0_E)p#k z73Z>d(?Sqwa{1S07VfQi|LtLw?Cr)GDS6Dm$@t~@RauE%632MmX{aT5^&bV*AuCpO zcO!5aSbBTMhgZpwv3~D_Z{w(13&6Q8F^(WOrDkVJ-}csd^5+wcx4wSR0UG&Il?60ugn#e9ge>yZCGSl`?XzF_IpY;!t;>O*;3vxE801+7>HfG(ZTDoNO>i? zr5KY?)G-aM4~}YU8n0KEWBNP%&5@PqSN#Vy$_|-@E=TUN&*$71R3RZ#9dblQtX(ho zzKyG8exZMbMDBH|wtTmGp^)C+jq3j>jCe&H-#!I7Mqt(!;|l0Qw6G8+PCG?a`T%{s zlq;bip<)SvNAk~xjVp5eyPYx`nP#bOQ*af0CaGZ$7=cnm+ac>rL{Cc+#?|bO9uIny zV4|)O9LfHazLIy)n=?Hz-B$K}w&8H|wn*F{a$G2Q7Hq{n(QW*5%CQ>ik-c5Og@_EH z39NMW_w2zIaJXjix;jk>A3=QoGnyPUG}J<}X-Jd<2uSQ&OZvy0PMm7_i*LIEiUG{`&^{fh-(Xf0ViAeLd4-k3g}Wp=J^#U z@7|h?>_s%+QMMwp%2NtB1zdEmDXpLpszy&cm4-@Hdf2v`i-7{kWijN!4AZC(UE@M_ z7bFma*8z*$wB8|d8WV$yTnLtK(CFjPm%6R*(~ToWvSZ|(y}g^2m)=r-wswXuQhtc5 zbZN|IP3Bf#6t&Q74t)t zqA2Afl!72fC+=ujIHu}&Rzg{;!e-GF#}`7|1t6#X z0P`~61-!a4F$PboRpz*44Y)Qmflqf4ZN3(V>}}Nd)F8gu1tV z&D_!+sGiZX2${cf9_y@eCf%pe#f?^$ZKeaC5?Omd_8I(>?;2&d5ne*r!+0g8K#5+Q zil_$xN*ux+5kAd`^Wz$#P_fEtz^9M##yE@%aC7r_4^uWOUk>II&L9s>3UOY-q(q#0 zR@!I=mc34#Io09$14=fT+5l4c>?>?p-^DW%KL2{+l5L^4nI|_m78KkG)V=AN_K2A zgfR^sS?k>6sTB;oTgh-S{b?iT?-ZG3eeXm5<0jy|q&_g%J8aq&ZEWgq8lz(VvRsg= zrmKHf5fyti5GFsR%!{2>eiK!lBR|ZLn?S+-S*Z>lz(%vPWoJDA4NxMa#B(ND4hHK6 zEvVGY&kolN4_(6PZoc2Ar0IBKE8$s7;>_T}(C_gJIU6FiK>ChZ>AHashQy9!QO+Ay zHr|^GHzlW0;41_J{VrWL=L03h0)Js>32r{(jjP?_d4f=Ybvn0 z%_W)*@+geSc-(T8)0b%*t8%w?+Vm*y^#| z8A#Qp%rvpQ#Ly#WNO$)fqKq8Gs3ypv)l!}mYgkuoq$wIeNFQ2eKAJiX6XxtZ^GXSn->#-ty7>b=u@>gTC>mo#fz=#F4=>yW+I4<+e)Zc$es!Tzw&W^AYF z0C7bCZc&OQO==vCC@0b7U=CbOe=(jGGoOA_#!w2m8tL+iCRUi9 z=G3GHt^{nHg^f#lOU^rOkgk}K+gD;ud8FOipeG8*u@mYRtAYp^VJQ%u$1C?7Vu%T0 zV4}A7uGLUs*hN)x{RuaFjoY4ypZRzM+%u`RTi?q6N3j73+UU`bMLVS~i*D{$FXv$A zZix->%C1pnZ6Sw}Bb`bo!)|YR>8vfhcvhQOkOd_YAWC4e2jqo?soJ~P>>W?e)0ieU z)fHQi5^q!`k5=QMFk`Mkp;D=R$n$BkR*=-eUy}@_a)D;TO91nxPA zK!a! z4%=>PW2QM%96s}$%F4Q<=NX-StPwoOtAFG}2pMo8s`5~yDGj(Klyy2lx5aMlu!`1L zr$O;CD546C*kk-0ZoU+4fgwl9gevK|d&%cuGlffMgBck+>K#_kzEW#ldk@+HpK{xJ zSy*MG>#y?VZ&Zx4O03m7 zS{zP$N3@l=vNB{Nx4hr(TyB}r?qJvy{o!4@Z}|HoT4s3#-aia;Of;py4Lh_mJYS5c zgZ=eU)~W5`=GU5-FBY$Rmf9r_xlBVijon%y z+|asx$oOKYxX()6$H4}XnKIn0OE5-%c)oGGbgX*0W9FO92f`qPxsCN^Wza%yF6bWF zh-;qasiOG=&uHF8);|*++Aer<9URD@b~KuCewQMeOlG?GHeY{h0&+EKNhnz6rUvdA zE}+b(tK3&Vi+dEx-AP=ESUUzOT=6|eXoGuG5&#c~sg+uU1m53UExPk;93hns7Wcp$9 zw(Ox};~R&wJO2xP5rXcJ0ANQ_IUIdWbQXR)vGG@v4~?wtb#Js>tHz-KOK4R|*tqG_ zBKZ(j zW*fzl)d!mHz^r8F80V+B=Dy;FQygp*=O1S7?ADUEMy+PAzgKH3h7Uihm(`RPysxZZ z!{>DLcIndhXCZTdSQu?)u_Eac)6z z99t3oV8sui>R5jBCk8L+2e)B zTJzr-eWK3m%T<+aZll`~x{T!edSr3>R={*yAj2x$^z%|9m@gn*!J_8K1Sf&s~!y-q) z&#})q{Oe-Z#>=JMCEQon7Zy68DKvLTj3xveAuFDrfgP*F&T0L1r^=h#T{QJq^FA+# zGisR3YsE_GHDzV4it9_-`|o`%*!07!Lmh>J+3MEc+5Z4>BWyO)*KROKW9~Vw%fbHu z5L(XeCeZHIFC`c`okkt`8oJce zaG>PyM)JAio(4d#L-=v~8TgCDwjO@5d1D~SdB@x2hFG!A5QQqo*pNN()8<=WA6|H; z!P;D!tIKX#Ff?O<$2of$vHvl?hWOwggweb7)h}JZ#HG(ZlWzl|7i<_2qh8E#j zTO9}lFv%ka75N4;EvwBsl;a3FN!>y`v0CiAC4CZBS|7h;xwSZ66)857O-fGft94fV zZ+|21f7xgDLHG~x6Ty+*Nv7%(YBuT?{X9i`bq&leMt{{SA`!vCMr9oHkzKC6{{RHG z_$RGhE%Y$T&@`E7Y&9#@M*je30J$R^anin1_*wf*T6jBG9!0IJak{%nAK8TPGh}}D zJ>>0Uj20OT52($33E*Gbg5l$h)o-Q*1@hpxAh{i}k8n@7rFoObLX@93y1e&Zx}RNz z!eV7bI*m8oTF=dYUHTrMp?|?Nz60wzKl(4j-w%0vb~WU}=I%(?3=mB61H0(G>%%{3 zzxW|fg*E$Wuf87K+gaYWSZTNDBaUJI!x5FoPIKD5Qpfh$yR^NQNi^+7$7l>m)7(gn z>SX7FJ--@;Kilt5x4l_D!wT-pTuk>KTNekak~tjn(B#)n7dVB~snvJxpXs;YKdIJE z$tJryKkMXvRQy`~j`bgd-X*xwb?p!PCi>i!Xy=D%VVv%H^0*X7Xib8NQ^={ZWBn(6*W$@(sv zccX5%nnNwQJZ>UFe;U%Zg*@3NX*{=$a2eEPFh9C^6_MlL7Ojgis|J%hcB2k)-2VWD zaL{?34)Uc2H+tnJh_ zz-U!LY-MmgN59jxbJxEbKFcCTlNlJDzbW~f{gK@F>0H*GrQUdwW&YB*n$F(`aEt;N`jz00p9r_COFu5%U?$1(VlB#O;R-|X*uZT6- z(l|7|R_aLc^DVpN{c->p{*>uFU*f-+TSeBSY=h+sbN7Fd$G@$5W{2V%Yy25CVm5<= z7~EUaImb`NwRFD{FENFRZO7$pfN|@`73@*r{1(xV;8*s2G`=6i@hH4`sKYaE(zQUU zpbS9g2b|>f{OeOs_=|mN(X^U=q&AQPHN2mmK9L0GWb8iQ?8%lZx6?)Xmg#f3NZ~B{{Up3-o#hud_}@pbRq5N;k~51nosil z53Lu2vM%i?n_<>!*jtahwp3=cwgXV>$tKU?^H;Bn(6zF}}= z)g_V}h$Wm{eC`VtUBKaq2Pc#BSFZTW;m^a}dO!F{d?BUj8itmN&nC&4F73eX>C*rX z+2q%cYJL>cd}pQJPjJxbeq(vMY!SZEXu$B&IR`J(DmnvS50}cF8OdRtN>O?^-&K8; zlh)harJ?F)nOnmsskqd>soSEp+iyjEtgnB(ddzVtt}bk3JS87 z^LKULqi;-PgOl2g16Nyrhgys=Pj7f<%$JPOuKAgd-7+3XBb~p?HJ`3{e@DBrn&Rs6 zJB!I2<|xk9wUZbi9{C{l$0M#QAH|niAAq#0d2}0%PD{z-G0ha%WetE>fI1#=o}=qu zZHLO~;-sf$rrd7abbPD+AIkp#BjfP7(!o}B6jZ(0C1%rH-F%;>wz-v~-s)F&%l(<7 z!E0w_m3wqy9_$_oCz4N7&q3C*JY9a;rOa^MTH5Jhwz;}aNyk6{IAh7=`qdv0Y7pE? zMU2-8l^$f5Qw3Hz=L4YnXZhAdnlNJn*Az_U9Q^u zzUv=5FJ&vr)_PlAeDB@guii(f+3B7xjyJTqmeS%U#G3~Aaq77|9FCnnm2*lhW3Jo5 zGRHHZ8*U?Jjb0iC*WR~vpR`MCjMs@1ZR)M} zeL&57lHX&?t(Ub0N1xrcm*3X={{X=AJKq_2vqzKc+H4Aw+)XOwkr!-u%Op<$= zvGBv)z>c&1)Nx(SBAVt7E1QU&guXMOJo_>0{5s&`Yt1M2zcunck=Hao z0N?1b$>IGQP|$u+=0_%>_Q-m3kjM)D2OW5?o%KySO@qgmg5K`lJwDyciVS8&W87eM z8?#Qi_=l|Ok=onOcF~)s0V7kebKIO(KZ~FIk=wVg9+<97MKu)VX6@Ulq_EX0)KsOXuG)3erMsQhg{)0$7N7Rb zK+kJ)Ahr8EXUj2Xo=0$cbI{jef1(Xy{wr8xzM9%#s|&?;%J%A~a`h!jhV>a7^NRAD z-9{(fiF&tvWn*{|ZBo$$v_v|H^huWl7%Ep6ww zmc4#*t&prv3jwqff=4;8cGdp?Z!h>)EfuSOWGsvcYi%|~SP@ufcI6y_-=WS2OjqWl ze;r|p0MS{(*FXs#cYTI#0ouuprEoE{b|<|u4~~l^thU!y5X!e-D^E5WZoH6v0N@NB zoon&D)5AZn;vOdyoSS;9@9D0~?>>S#{W>(&Dq1DCW$UN>1N4&P{t64LS!}nudtF1$ zjbw>Tv7nI~<-t*mV3p%J3(f{>QtRX2h5jW;ANKO-Hx}~cBzucc41{D68w`+r4;_d# z`I`@m=Ci!Fdl!P@8&J}#-W)eQ4%q#_g_p)FjVDc?H$gQE#tRZ5+`d>-l08Q~ z4@&ZDcvC@ISZ3odEgrv?nw3k69Aw;=;(mkb`bUEAOIv8ZByS0eq~a7|CdVLQ31CYx z9Q@rm=QZ=E?Hl_p&8ow1b*K3A`$JEW37%66t;&9o~69y^i0`8rx91os|@cC32yO845W0RCOb{;=OFEgY!%U zMOu|s+Wu&!-_5K2?f$2wM~P|GYBg6f^ZdJ--WUCdwGC@sg59RGmDn>$G_DuP1E6** z^0CJXLC@DBrud`#G1~Z6>T7!>nZzF>%90j@xjg~k{(`goG4X>?*K~!kj!87jk2vqX z^BLKJ=nAfKJ@Jw2T}OjHEnMptZKdkhi6m%SE+GBfyz?O~jO5^f{?Dm3^;nE%bxN*|IjN`>IN* zJ92T6liIwW#uoaOyEJ!JvB-+1KQ97BW1fQp2iHAD4SkkH#8oQW+AqIH@;(P08uZ)d z_m`2CYo$p7eX~rswz`Fo7LI4%wY_uO@vf@J;!8D(+uqyV!m2*}WjADZ+y_s_yp`-O zEwxD{(!}=dA3K&{gD18>9+f1o;)!Nr%?z=te8YAFcj!M_`-tKx$tNiey z7031JaH}emjp+ORe65nAdJAG61( zM-Te4X1FfC#4kbl^rXB?Td3q{B)sy##?m*R&yGF0qUXfLt^B68w`*9y+vc6IfH=re X-`J9C)u%;8=#JWSR9(_<@Uj2d> None: - """Prints a string with the specified color.""" - print(color + f"{text}" + Colors.CEND, end=end) - - class Colors(str, Enum): CEND = "\33[0m" CBOLD = "\33[1m" diff --git a/python/samples/concepts/setup/ALL_SETTINGS.md b/python/samples/concepts/setup/ALL_SETTINGS.md index 4deba2610935..1f2536ad4738 100644 --- a/python/samples/concepts/setup/ALL_SETTINGS.md +++ b/python/samples/concepts/setup/ALL_SETTINGS.md @@ -1,6 +1,4 @@ -# Semantic Kernel Settings - -## AI Service Settings used across SK +## AI Service Settings used across SK: | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | @@ -38,7 +36,7 @@ | Onnx | [OnnxGenAIChatCompletion](../../../semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py) | template,
ai_model_path | N/A,
ONNX_GEN_AI_CHAT_MODEL_FOLDER | Yes,
Yes | [OnnxGenAISettings](../../../semantic_kernel/connectors/ai/onnx/onnx_gen_ai_settings.py) | | | [OnnxGenAITextCompletion](../../../semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py) | ai_model_path | ONNX_GEN_AI_TEXT_MODEL_FOLDER | Yes | | -## Memory Service Settings used across SK +## Memory Service Settings used across SK: | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | @@ -51,7 +49,7 @@ | Redis | [RedisMemoryService](../../../semantic_kernel/connectors/memory/redis/redis_memory_store.py) | connection_string | REDIS_CONNECTION_STRING | Yes | [RedisSettings](../../../semantic_kernel/connectors/memory/redis/redis_settings.py) | | Weaviate | [WeaviateMemoryService](../../../semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py) | url,
api_key,
use_embed | WEAVIATE_URL,
WEAVIATE_API_KEY,
WEAVIATE_USE_EMBED | No,
No,
No | [WeaviateSettings](../../../semantic_kernel/connectors/memory/weaviate/weaviate_settings.py) | -## Other settings used +## Other settings used: | Provider | Service | Constructor Settings | Environment Variable | Required? | Settings Class | | --- | --- | --- | --- | --- | --- | diff --git a/python/samples/concepts/setup/chat_completion_services.py b/python/samples/concepts/setup/chat_completion_services.py index bd53042dc666..ee4d6d2dfa67 100644 --- a/python/samples/concepts/setup/chat_completion_services.py +++ b/python/samples/concepts/setup/chat_completion_services.py @@ -3,8 +3,6 @@ from enum import Enum from typing import TYPE_CHECKING -from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError - if TYPE_CHECKING: from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -27,7 +25,6 @@ class Services(str, Enum): OLLAMA = "ollama" ONNX = "onnx" VERTEX_AI = "vertex_ai" - DEEPSEEK = "deepseek" service_id = "default" @@ -42,8 +39,7 @@ def get_chat_completion_service_and_request_settings( Args: service_name (Services): The service name. instruction_role (str | None): The role to use for 'instruction' messages, for example, - 'system' or 'developer'. Defaults to 'system'. Currently only OpenAI reasoning models - support 'developer' role. + 'system' or 'developer'. Defaults to 'system'. Currently only supported for OpenAI reasoning models. """ # Use lambdas or functions to delay instantiation chat_services = { @@ -63,7 +59,6 @@ def get_chat_completion_service_and_request_settings( Services.OLLAMA: lambda: get_ollama_chat_completion_service_and_request_settings(), Services.ONNX: lambda: get_onnx_chat_completion_service_and_request_settings(), Services.VERTEX_AI: lambda: get_vertex_ai_chat_completion_service_and_request_settings(), - Services.DEEPSEEK: lambda: get_deepseek_chat_completion_service_and_request_settings(), } # Call the appropriate lambda or function based on the service name @@ -92,7 +87,10 @@ def get_openai_chat_completion_service_and_request_settings( Please refer to the Semantic Kernel Python documentation for more information: https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel-python """ - from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings + from semantic_kernel.connectors.ai.open_ai import ( + OpenAIChatCompletion, + OpenAIChatPromptExecutionSettings, + ) chat_service = OpenAIChatCompletion(service_id=service_id, instruction_role=instruction_role) request_settings = OpenAIChatPromptExecutionSettings( @@ -122,7 +120,10 @@ def get_azure_openai_chat_completion_service_and_request_settings( Please refer to the Semantic Kernel Python documentation for more information: https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel """ - from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, AzureChatPromptExecutionSettings + from semantic_kernel.connectors.ai.open_ai import ( + AzureChatCompletion, + AzureChatPromptExecutionSettings, + ) chat_service = AzureChatCompletion(service_id=service_id, instruction_role=instruction_role) request_settings = AzureChatPromptExecutionSettings(service_id=service_id) @@ -151,19 +152,9 @@ def get_azure_ai_inference_chat_completion_service_and_request_settings( AzureAIInferenceChatPromptExecutionSettings, ) - # The AI model ID is used as an identifier for developers when they are using serverless endpoints - # on AI Foundry. It is not actually used to identify the model in the service as the endpoint points - # to only one model. - # When developers are using one endpoint that can route to multiple models, the `ai_model_id` will be - # used to identify the model. To use the latest routing feature on AI Foundry, please refer to the - # following documentation: - # https://learn.microsoft.com/en-us/azure/ai-services/multi-service-resource?%3Fcontext=%2Fazure%2Fai-services%2Fmodel-inference%2Fcontext%2Fcontext&pivots=azportal - # https://learn.microsoft.com/en-us/azure/ai-foundry/model-inference/how-to/configure-project-connection?pivots=ai-foundry-portal - # https://learn.microsoft.com/en-us/azure/ai-foundry/model-inference/how-to/inference?tabs=python - chat_service = AzureAIInferenceChatCompletion( service_id=service_id, - ai_model_id="id", + ai_model_id="id", # The model ID is simply an identifier as the model id cannot be obtained programmatically. instruction_role=instruction_role, ) request_settings = AzureAIInferenceChatPromptExecutionSettings(service_id=service_id) @@ -364,50 +355,3 @@ def get_vertex_ai_chat_completion_service_and_request_settings() -> tuple[ request_settings = VertexAIChatPromptExecutionSettings(service_id=service_id) return chat_service, request_settings - - -def get_deepseek_chat_completion_service_and_request_settings() -> tuple[ - "ChatCompletionClientBase", "PromptExecutionSettings" -]: - """Return DeepSeek chat completion service and request settings. - - The service credentials can be read by 3 ways: - 1. Via the constructor - 2. Via the environment variables - 3. Via an environment file - - The DeepSeek endpoint can be accessed via the OpenAI connector as the DeepSeek API is compatible with OpenAI API. - Set the `OPENAI_API_KEY` environment variable to the DeepSeek API key. - Set the `OPENAI_CHAT_MODEL_ID` environment variable to the DeepSeek model ID (deepseek-chat or deepseek-reasoner). - - The request settings control the behavior of the service. The default settings are sufficient to get started. - However, you can adjust the settings to suit your needs. - Note: Some of the settings are NOT meant to be set by the user. - Please refer to the Semantic Kernel Python documentation for more information: - https://learn.microsoft.com/en-us/python/api/semantic-kernel/semantic_kernel?view=semantic-kernel-python - """ - from openai import AsyncOpenAI - - from semantic_kernel.connectors.ai.open_ai import ( - OpenAIChatCompletion, - OpenAIChatPromptExecutionSettings, - OpenAISettings, - ) - - openai_settings = OpenAISettings.create() - if not openai_settings.api_key: - raise ServiceInitializationError("The DeepSeek API key is required.") - if not openai_settings.chat_model_id: - raise ServiceInitializationError("The DeepSeek model ID is required.") - - chat_service = OpenAIChatCompletion( - ai_model_id=openai_settings.chat_model_id, - service_id=service_id, - async_client=AsyncOpenAI( - api_key=openai_settings.api_key.get_secret_value(), - base_url="https://api.deepseek.com", - ), - ) - request_settings = OpenAIChatPromptExecutionSettings(service_id=service_id) - - return chat_service, request_settings diff --git a/python/samples/concepts/structured_outputs/json_structured_outputs.py b/python/samples/concepts/structured_outputs/json_structured_outputs.py index b1eacba11fd6..f6ea600cd56f 100644 --- a/python/samples/concepts/structured_outputs/json_structured_outputs.py +++ b/python/samples/concepts/structured_outputs/json_structured_outputs.py @@ -109,7 +109,7 @@ class Reasoning(KernelBaseModel): async def main(): - stream = False + stream = True if stream: answer = kernel.invoke_stream( chat_function, @@ -127,8 +127,7 @@ async def main(): chat_function, chat_history=history, ) - reasoned_result = Reasoning.model_validate_json(result.value[0].content) - print(f"Mosscap:> {reasoned_result}") + print(f"Mosscap:> {result}") history.add_assistant_message(str(result)) diff --git a/python/samples/demos/call_automation/.env.example b/python/samples/demos/call_automation/.env.example deleted file mode 100644 index 055528e2c2f3..000000000000 --- a/python/samples/demos/call_automation/.env.example +++ /dev/null @@ -1,8 +0,0 @@ -ACS_CONNECTION_STRING= -CALLBACK_URI_HOST= - -AZURE_OPENAI_SERVICE_ENDPOINT= -AZURE_OPENAI_DEPLOYMENT_MODEL_NAME= -AZURE_OPENAI_API_VERSION= - -AZURE_OPENAI_SERVICE_KEY= \ No newline at end of file diff --git a/python/samples/demos/call_automation/call_automation.py b/python/samples/demos/call_automation/call_automation.py deleted file mode 100755 index 2ea8058167d9..000000000000 --- a/python/samples/demos/call_automation/call_automation.py +++ /dev/null @@ -1,290 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -#################################################################### -# Sample Quart webapp with that connects to Azure OpenAI # -# Make sure to install `uv`, see: # -# https://docs.astral.sh/uv/getting-started/installation/ # -# and rename .env.example to .env and fill in the values. # -# Follow the guidance in README.md for more info. # -# To run the app, use: # -# `uv run --env-file .env call_automation.py` # -#################################################################### -# -# /// script -# requires-python = ">=3.10" -# dependencies = [ -# "Quart", -# "azure-eventgrid", -# "azure-communication-callautomation==1.4.0b1", -# "semantic-kernel[realtime]", -# ] -# /// - -import asyncio -import base64 -import os -import uuid -from datetime import datetime -from logging import INFO -from random import randint -from urllib.parse import urlencode, urlparse, urlunparse - -from azure.communication.callautomation import ( - AudioFormat, - MediaStreamingAudioChannelType, - MediaStreamingContentType, - MediaStreamingOptions, - MediaStreamingTransportType, -) -from azure.communication.callautomation.aio import CallAutomationClient -from azure.eventgrid import EventGridEvent, SystemEventNames -from numpy import ndarray -from quart import Quart, Response, json, request, websocket - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import ( - AzureRealtimeExecutionSettings, - AzureRealtimeWebsocket, - ListenEvents, -) -from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase -from semantic_kernel.contents import AudioContent, RealtimeAudioEvent -from semantic_kernel.functions import kernel_function - -# Callback events URI to handle callback events. -CALLBACK_URI_HOST = os.environ["CALLBACK_URI_HOST"] -CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" - -acs_client = CallAutomationClient.from_connection_string(os.environ["ACS_CONNECTION_STRING"]) -app = Quart(__name__) - -# region: Semantic Kernel - -kernel = Kernel() - - -class HelperPlugin: - """Helper plugin for the Semantic Kernel.""" - - @kernel_function - def get_weather(self, location: str) -> str: - """Get the weather for a location.""" - app.logger.info(f"@ Getting weather for {location}") - weather_conditions = ("sunny", "hot", "cloudy", "raining", "freezing", "snowing") - weather = weather_conditions[randint(0, len(weather_conditions) - 1)] # nosec - return f"The weather in {location} is {weather}." - - @kernel_function - def get_date_time(self) -> str: - """Get the current date and time.""" - app.logger.info("@ Getting current datetime") - return f"The current date and time is {datetime.now().isoformat()}." - - @kernel_function - async def goodbye(self): - """When the user is done, say goodbye and then call this function.""" - app.logger.info("@ Goodbye has been called!") - global call_connection_id - await acs_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) - - -kernel.add_plugin(plugin=HelperPlugin(), plugin_name="helpers", description="Helper functions for the realtime client.") - -# region: Handlers for audio and data streams - - -async def from_realtime_to_acs(audio: ndarray): - """Function that forwards the audio from the model to the websocket of the ACS client.""" - await websocket.send( - json.dumps({"kind": "AudioData", "audioData": {"data": base64.b64encode(audio.tobytes()).decode("utf-8")}}) - ) - - -async def from_acs_to_realtime(client: RealtimeClientBase): - """Function that forwards the audio from the ACS client to the model.""" - while True: - try: - # Receive data from the ACS client - stream_data = await websocket.receive() - data = json.loads(stream_data) - if data["kind"] == "AudioData": - # send it to the Realtime service - await client.send( - event=RealtimeAudioEvent( - audio=AudioContent(data=data["audioData"]["data"], data_format="base64", inner_content=data), - ) - ) - except Exception: - app.logger.info("Websocket connection closed.") - break - - -async def handle_realtime_messages(client: RealtimeClientBase): - """Function that handles the messages from the Realtime service. - - This function only handles the non-audio messages. - Audio is done through the callback so that it is faster and smoother. - """ - async for event in client.receive(audio_output_callback=from_realtime_to_acs): - match event.service_type: - case ListenEvents.SESSION_CREATED: - print("Session Created Message") - print(f" Session Id: {event.service_event.session.id}") - case ListenEvents.ERROR: - print(f" Error: {event.service_event.error}") - case ListenEvents.INPUT_AUDIO_BUFFER_CLEARED: - print("Input Audio Buffer Cleared Message") - case ListenEvents.INPUT_AUDIO_BUFFER_SPEECH_STARTED: - print(f"Voice activity detection started at {event.service_event.audio_start_ms} [ms]") - await websocket.send(json.dumps({"Kind": "StopAudio", "AudioData": None, "StopAudio": {}})) - - case ListenEvents.CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_COMPLETED: - print(f" User:-- {event.service_event.transcript}") - case ListenEvents.CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_FAILED: - print(f" Error: {event.service_event.error}") - case ListenEvents.RESPONSE_DONE: - print("Response Done Message") - print(f" Response Id: {event.service_event.response.id}") - if event.service_event.response.status_details: - print(f" Status Details: {event.service_event.response.status_details.model_dump_json()}") - case ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DONE: - print(f" AI:-- {event.service_event.transcript}") - - -# region: Routes - - -# WebSocket. -@app.websocket("/ws") -async def ws(): - app.logger.info("Client connected to WebSocket") - - # create the client, using the audio callback - client = AzureRealtimeWebsocket() - settings = AzureRealtimeExecutionSettings( - instructions="""You are a chat bot. Your name is Mosscap and - you have one goal: figure out what people need. - Your full name, should you need to know it, is - Splendid Speckled Mosscap. You communicate - effectively, but you tend to answer with long - flowery prose.""", - turn_detection={"type": "server_vad"}, - voice="shimmer", - input_audio_format="pcm16", - output_audio_format="pcm16", - input_audio_transcription={"model": "whisper-1"}, - function_choice_behavior=FunctionChoiceBehavior.Auto(), - ) - - # create the realtime client session - async with client(settings=settings, create_response=True, kernel=kernel): - # start handling the messages from the realtime client - # and allow the callback to be used to forward the audio to the acs client - receive_task = asyncio.create_task(handle_realtime_messages(client)) - # receive messages from the ACS client and send them to the realtime client - await from_acs_to_realtime(client) - receive_task.cancel() - - -@app.route("/api/incomingCall", methods=["POST"]) -async def incoming_call_handler() -> Response: - app.logger.info("incoming event data") - for event_dict in await request.json: - event = EventGridEvent.from_dict(event_dict) - app.logger.info("incoming event data --> %s", event.data) - - if event.event_type == SystemEventNames.EventGridSubscriptionValidationEventName: - app.logger.info("Validating subscription") - validation_code = event.data["validationCode"] - validation_response = {"validationResponse": validation_code} - return Response(response=json.dumps(validation_response), status=200) - - if event.event_type == "Microsoft.Communication.IncomingCall": - app.logger.info("Incoming call received: data=%s", event.data) - caller_id = ( - event.data["from"]["phoneNumber"]["value"] - if event.data["from"]["kind"] == "phoneNumber" - else event.data["from"]["rawId"] - ) - app.logger.info("incoming call handler caller id: %s", caller_id) - incoming_call_context = event.data["incomingCallContext"] - guid = uuid.uuid4() - query_parameters = urlencode({"callerId": caller_id}) - callback_uri = f"{CALLBACK_EVENTS_URI}/{guid}?{query_parameters}" - - parsed_url = urlparse(CALLBACK_EVENTS_URI) - websocket_url = urlunparse(("wss", parsed_url.netloc, "/ws", "", "", "")) - - app.logger.info("callback url: %s", callback_uri) - app.logger.info("websocket url: %s", websocket_url) - - media_streaming_options = MediaStreamingOptions( - transport_url=websocket_url, - transport_type=MediaStreamingTransportType.WEBSOCKET, - content_type=MediaStreamingContentType.AUDIO, - audio_channel_type=MediaStreamingAudioChannelType.MIXED, - start_media_streaming=True, - enable_bidirectional=True, - audio_format=AudioFormat.PCM24_K_MONO, - ) - answer_call_result = await acs_client.answer_call( - incoming_call_context=incoming_call_context, - operation_context="incomingCall", - callback_url=callback_uri, - media_streaming=media_streaming_options, - ) - app.logger.info("Answered call for connection id: %s", answer_call_result.call_connection_id) - return Response(status=200) - return Response(status=200) - - -@app.route("/api/callbacks/", methods=["POST"]) -async def callbacks(contextId): - for event in await request.json: - # Parsing callback events - global call_connection_id - event_data = event["data"] - call_connection_id = event_data["callConnectionId"] - app.logger.info( - f"Received Event:-> {event['type']}, Correlation Id:-> {event_data['correlationId']}, CallConnectionId:-> {call_connection_id}" # noqa: E501 - ) - match event["type"]: - case "Microsoft.Communication.CallConnected": - call_connection_properties = await acs_client.get_call_connection( - call_connection_id - ).get_call_properties() - media_streaming_subscription = call_connection_properties.media_streaming_subscription - app.logger.info(f"MediaStreamingSubscription:--> {media_streaming_subscription}") - app.logger.info(f"Received CallConnected event for connection id: {call_connection_id}") - app.logger.info("CORRELATION ID:--> %s", event_data["correlationId"]) - app.logger.info("CALL CONNECTION ID:--> %s", event_data["callConnectionId"]) - case "Microsoft.Communication.MediaStreamingStarted" | "Microsoft.Communication.MediaStreamingStopped": - app.logger.info(f"Media streaming content type:--> {event_data['mediaStreamingUpdate']['contentType']}") - app.logger.info( - f"Media streaming status:--> {event_data['mediaStreamingUpdate']['mediaStreamingStatus']}" - ) - app.logger.info( - f"Media streaming status details:--> {event_data['mediaStreamingUpdate']['mediaStreamingStatusDetails']}" # noqa: E501 - ) - case "Microsoft.Communication.MediaStreamingFailed": - app.logger.info( - f"Code:->{event_data['resultInformation']['code']}, Subcode:-> {event_data['resultInformation']['subCode']}" # noqa: E501 - ) - app.logger.info(f"Message:->{event_data['resultInformation']['message']}") - case "Microsoft.Communication.CallDisconnected": - pass - return Response(status=200) - - -@app.route("/") -def home(): - return "Hello SKxACS CallAutomation!" - - -# region: Main - - -if __name__ == "__main__": - app.logger.setLevel(INFO) - app.run(port=8080) diff --git a/python/samples/demos/call_automation/readme.md b/python/samples/demos/call_automation/readme.md deleted file mode 100644 index ca69b39e0a3b..000000000000 --- a/python/samples/demos/call_automation/readme.md +++ /dev/null @@ -1,53 +0,0 @@ -# Call Automation - Quick Start Sample - -This is a sample application. It highlights an integration of Azure Communication Services with Semantic Kernel, using the Azure OpenAI Service to enable intelligent conversational agents. - -Original code for this sample can be found [here](https://github.com/Azure-Samples/communication-services-python-quickstarts/tree/main/callautomation-openai-sample). - -## Prerequisites - -- An Azure account with an active subscription. [Create an account for free](https://azure.microsoft.com/free/?WT.mc_id=A261C142F). -- A deployed Communication Services resource. [Create a Communication Services resource](https://docs.microsoft.com/azure/communication-services/quickstarts/create-communication-resource). -- A [phone number](https://learn.microsoft.com/en-us/azure/communication-services/quickstarts/telephony/get-phone-number) in your Azure Communication Services resource that can get inbound calls. NB: phone numbers are not available in free subscriptions. -- [Python](https://www.python.org/downloads/) 3.9 or above. -- An Azure OpenAI Resource and Deployed Model. See [instructions](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal). -- Install `uv`, see [the uv docs](https://docs.astral.sh/uv/getting-started/installation/). - -## To run the app - -1. Open an instance of PowerShell, Windows Terminal, Command Prompt or equivalent and navigate to the directory that you would like to clone the sample to. -2. git clone `https://github.com/microsoft/semantic-kernel.git`. -3. Navigate to `python/samples/demos/call_automation` folder - -### Setup and host your Azure DevTunnel - -[Azure DevTunnels](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/overview) is an Azure service that enables you to share local web services hosted on the internet. Use the commands below to connect your local development environment to the public internet. This creates a tunnel with a persistent endpoint URL and which allows anonymous access. We will then use this endpoint to notify your application of calling events from the ACS Call Automation service. - -```bash -devtunnel create --allow-anonymous -devtunnel port create -p 8080 -devtunnel host -``` - -### Configuring application - -Copy the `.env.example` file to `.env` and update the following values: - -1. `ACS_CONNECTION_STRING`: Azure Communication Service resource's connection string. -2. `CALLBACK_URI_HOST`: Base url of the app. (For local development use the dev tunnel url from the step above) -3. `AZURE_OPENAI_ENDPOINT`: Azure Open AI service endpoint -4. `AZURE_OPENAI_DEPLOYMENT_MODEL_NAME`: Azure Open AI deployment name -5. `AZURE_OPENAI_API_VERSION`: Azure Open AI API version, this should be one that includes the realtime api, for instance '2024-10-01-preview' -6. `AZURE_OPENAI_API_KEY`: Azure Open AI API key, optionally, you can also use Entra Auth. - -## Run the app - -1. Navigate to `call_automation` folder and do one of the following to start the main application: - - run `call_automation.py` in debug mode from your IDE (VSCode will load your .env variables into the environment automatically, other IDE's might need an extra step). - - execute `uv run --env-file .env call_automation.py` directly in your terminal (this uses `uv`, which will then install the requirements in a temporary virtual environment, see [uv docs](https://docs.astral.sh/uv/guides/scripts) for more info). -2. Browser should pop up with a simple page. If not navigate it to `http://localhost:8080/` or your dev tunnel url. -3. Register an EventGrid Webhook for the IncomingCall(`https:///api/incomingCall`) event that points to your devtunnel URI. Instructions [here](https://learn.microsoft.com/en-us/azure/communication-services/concepts/call-automation/incoming-call-notification). - -Once that's completed you should have a running application. The way to test this is to place a call to your ACS phone number and talk to your intelligent agent! - -In the terminal you should see all sorts of logs from both ACS and Semantic Kernel. diff --git a/python/samples/demos/document_generator/GENERATED_DOCUMENT.md b/python/samples/demos/document_generator/GENERATED_DOCUMENT.md deleted file mode 100644 index 1d67e6dcf800..000000000000 --- a/python/samples/demos/document_generator/GENERATED_DOCUMENT.md +++ /dev/null @@ -1,58 +0,0 @@ -### Understanding Semantic Kernel AI Connectors - -AI Connectors in Semantic Kernel are components that facilitate communication between the Kernel's core functionalities and various AI services. They abstract the intricate details of service-specific protocols, allowing developers to seamlessly interact with AI services for tasks like text generation, chat interactions, and more. - -### Using AI Connectors in Semantic Kernel - -Developers utilize AI connectors to connect their applications to different AI services efficiently. The connectors manage the requests and responses, providing a streamlined way to leverage the power of these AI services without needing to handle the specific communication protocols each service requires. - -### Creating Custom AI Connectors in Semantic Kernel - -To create a custom AI connector in Semantic Kernel, one must extend the base classes provided, such as `ChatCompletionClientBase` and `AIServiceClientBase`. Below is a guide and example for implementing a mock AI connector: - -#### Step-by-Step Walkthrough - -1. **Understand the Base Classes**: The foundational classes `ChatCompletionClientBase` and `AIServiceClientBase` provide necessary methods and structures for creating chat-based AI connectors. - -2. **Implementing the Connector**: Here's a mock implementation example illustrating how to implement a connector without real service dependencies, ensuring compatibility with Pydantic's expectations within the framework: - -```python -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase - -class MockAIChatCompletionService(ChatCompletionClientBase): - def __init__(self, ai_model_id: str): - super().__init__(ai_model_id=ai_model_id) - - async def _inner_get_chat_message_contents(self, chat_history, settings): - # Mock implementation: returns dummy chat message content for demonstration. - return [{"role": "assistant", "content": "Mock response based on your history."}] - - def service_url(self): - return "http://mock-ai-service.com" -``` - -### Usage Example - -The following example demonstrates how to integrate and use the `MockAIChatCompletionService` in an application: - -```python -import asyncio -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings - -async def main(): - chat_history = ChatHistory(messages=[{"role": "user", "content": "Hello"}]) - settings = PromptExecutionSettings(model="mock-model") - - service = MockAIChatCompletionService(ai_model_id="mock-model") - - response = await service.get_chat_message_contents(chat_history, settings) - print(response) - -# Run the main function -asyncio.run(main()) -``` - -### Conclusion - -By following the revised guide and understanding the base class functionalities, developers can effectively create custom connectors within Semantic Kernel. This structured approach enhances integration with various AI services while ensuring alignment with the framework's architectural expectations. Custom connectors offer flexibility, allowing developers to adjust implementations to meet specific service needs, such as additional logging, authentication, or modifications tailored to specific protocols. This guide provides a strong foundation upon which more complex and service-specific extensions can be built, promoting robust and scalable AI service integration. \ No newline at end of file diff --git a/python/samples/demos/document_generator/README.md b/python/samples/demos/document_generator/README.md deleted file mode 100644 index 45bba5069d99..000000000000 --- a/python/samples/demos/document_generator/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Document Generator - -This sample app demonstrates how to create technical documents for a codebase using AI. More specifically, it uses the agent framework offered by **Semantic Kernel** to ochestrate multiple agents to create a technical document. - -This sample app also provides telemetry to monitor the agents, making it easier to observe the inner workings of the agents. - -To learn more about agents, please refer to this introduction [video](https://learn.microsoft.com/en-us/shows/generative-ai-for-beginners/ai-agents-generative-ai-for-beginners). -To learn more about the Semantic Kernel Agent Framework, please refer to the [Semantic Kernel documentation](https://learn.microsoft.com/en-us/semantic-kernel/frameworks/agent/agent-architecture?pivots=programming-language-python). - -> Note: This sample app cannot guarantee to generate a perfect technical document each time due to the stochastic nature of the AI model. Please a version of the document generated by the app in [GENERATED_DOCUMENT.md](GENERATED_DOCUMENT.md). - -## Design - -### Tools/PLugins - -- **Code Execution Plugin**: This plugin offers a sandbox environment to execute Python snippets. It returns the output of the program or errors if any. -- **Repository File Plugin**: This plugin allows the AI to retrieve files from the Semantic Kernel repository. -- **User Input Plugin**: This plugin allows the AI to present content to the user and receive feedback. - -### Agents - -- **Content Creation Agent**: This agent is responsible for creating the content of the document. This agent has access to the **Repository File Plugin** to read source files it deems necessary for reference. -- **Code Validation Agent**: This agent is responsible for validating the code snippets in the document. This agent has access to the **Code Execution Plugin** to execute the code snippets. -- **User Agent**: This agent is responsible for interacting with the user. This agent has access to the **User Input Plugin** to present content to the user and receive feedback. - -### Agent Selection Strategy - -### Termination Strategy - -## Prerequisites - -1. Azure OpenAI -2. Azure Application Insights - -## Additional packages - -- `AICodeSandbox` - for executing AI generated code in a sandbox environment - - ```bash - pip install ai-code-sandbox - ``` - - > You must also have `docker` installed and running on your machine. Follow the instructions [here](https://docs.docker.com/get-started/introduction/get-docker-desktop/) to install docker for your platform. Images will be pulled during runtime if not already present. Containers will be created and destroyed during code execution. - -## Running the app - -### Step 1: Set up the environment - -Make sure you have the following environment variables set: - -```env -OPENAI_CHAT_MODEL_ID= -OPENAI_API_KEY= -``` - -> gpt-4o-2024-08-06 was used to generate [GENERATED_DOCUMENT.md](GENERATED_DOCUMENT.md). -> Feel free to use other models from OpenAI or other providers. When you use models from another provider, make sure to update the chat completion services accordingly. - -```env - -### Step 2: Run the app - -```bash -python ./main.py -``` - -Expected output: - -```bash -==== ContentCreationAgent just responded ==== -==== CodeValidationAgent just responded ==== -==== ContentCreationAgent just responded ==== -... -``` - -## Customization - -Since this is a sample app that demonstrates the creation of a technical document on Semantic Kernel AI connectors, you can customize the app to suit your needs. You can try different tasks, add more agents, tune existing agents, change the agent selection strategy, or modify the termination strategy. - -- To try a different task, modify the `TASK` prompt in `main.py`. -- To add more agents, create a new agent under `agents/` and add it to the `agents` list in `main.py`. -- To tune existing agents, modify the `INSTRUCTION` prompt in the agent's source code. -- To change the agent selection strategy, modify `custom_selection_strategy.py`. -- To change the termination strategy, modify `custom_termination_strategy.py`. - -## Optional: Monitoring the agents - -When you see the final document generated by the app, what you see is actually the creation of multiple agents working together. You may wonder, how did the agents work together to create the document? What was the sequence of actions taken by the agents? How did the agents interact with each other? To answer these questions, you need to **observe** the agents. - -Semantic Kernel by default instruments all the LLM calls. However, for agents there is no default instrumentation. This sample app shows how one can extend the Semantic Kernel agent to add instrumentation. - -> There are currently no standards on what information needs to be captured for agents as the concept of agents is still relatively new. At the time of writing, the Semantic Convention for agents is still in the draft stage: - -To monitor the agents, set the following environment variables: - -```env -AZURE_APP_INSIGHTS_CONNECTION_STRING= - -SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS=true -SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE=true -``` - -Follow this guide to inspect the telemetry data: - -Or follow this guide to visualize the telemetry data on Azure AI Foundry: diff --git a/python/samples/demos/document_generator/agents/code_validation_agent.py b/python/samples/demos/document_generator/agents/code_validation_agent.py deleted file mode 100644 index c85da09476d1..000000000000 --- a/python/samples/demos/document_generator/agents/code_validation_agent.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from collections.abc import AsyncIterable -from typing import TYPE_CHECKING, Any - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase -from samples.demos.document_generator.plugins.code_execution_plugin import CodeExecutionPlugin -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.functions.kernel_arguments import KernelArguments - -if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel - -INSTRUCTION = """ -You are a code validation agent in a collaborative document creation chat. - -Your task is to validate Python code in the latest document draft and summarize any errors. -Follow the instructions in the document to assemble the code snippets into a single Python script. -If the snippets in the document are from multiple scripts, you need to modify them to work together as a single script. -Execute the code to validate it. If there are errors, summarize the error messages. - -Do not try to fix the errors. -""" - -DESCRIPTION = """ -Select me to validate the Python code in the latest document draft. -""" - - -class CodeValidationAgent(CustomAgentBase): - def __init__(self): - kernel = self._create_kernel() - kernel.add_plugin(plugin=CodeExecutionPlugin(), plugin_name="CodeExecutionPlugin") - - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) - settings.function_choice_behavior = FunctionChoiceBehavior.Auto(maximum_auto_invoke_attempts=1) - - super().__init__( - service_id=CustomAgentBase.SERVICE_ID, - kernel=kernel, - arguments=KernelArguments(settings=settings), - name="CodeValidationAgent", - instructions=INSTRUCTION.strip(), - description=DESCRIPTION.strip(), - ) - - @override - async def invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - cloned_history = history.model_copy(deep=True) - cloned_history.add_user_message( - "Now validate the Python code in the latest document draft and summarize any errors." - ) - - async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): - yield response_message diff --git a/python/samples/demos/document_generator/agents/content_creation_agent.py b/python/samples/demos/document_generator/agents/content_creation_agent.py deleted file mode 100644 index 44dbcbea25bf..000000000000 --- a/python/samples/demos/document_generator/agents/content_creation_agent.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from collections.abc import AsyncIterable -from typing import TYPE_CHECKING, Any - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase -from samples.demos.document_generator.plugins.repo_file_plugin import RepoFilePlugin -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.functions.kernel_arguments import KernelArguments - -if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel - -INSTRUCTION = """ -You are part of a chat with multiple agents focused on creating technical content. - -Your task is to generate informative and engaging technical content, -including code snippets to explain concepts or demonstrate features. -Incorporate feedback by providing the updated full content with changes. -""" - -DESCRIPTION = """ -Select me to generate new content or to revise existing content. -""" - - -class ContentCreationAgent(CustomAgentBase): - def __init__(self): - kernel = self._create_kernel() - kernel.add_plugin(plugin=RepoFilePlugin(), plugin_name="RepoFilePlugin") - - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - - super().__init__( - service_id=CustomAgentBase.SERVICE_ID, - kernel=kernel, - arguments=KernelArguments(settings=settings), - name="ContentCreationAgent", - instructions=INSTRUCTION.strip(), - description=DESCRIPTION.strip(), - ) - - @override - async def invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - cloned_history = history.model_copy(deep=True) - cloned_history.add_user_message("Now generate new content or revise existing content to incorporate feedback.") - - async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): - yield response_message diff --git a/python/samples/demos/document_generator/agents/custom_agent_base.py b/python/samples/demos/document_generator/agents/custom_agent_base.py deleted file mode 100644 index f8900d319a75..000000000000 --- a/python/samples/demos/document_generator/agents/custom_agent_base.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from abc import ABC -from collections.abc import AsyncIterable -from typing import Any, ClassVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel - - -class CustomAgentBase(ChatCompletionAgent, ABC): - SERVICE_ID: ClassVar[str] = "chat_completion" - - def _create_kernel(self) -> Kernel: - kernel = Kernel() - kernel.add_service(OpenAIChatCompletion(service_id=self.SERVICE_ID)) - - return kernel - - @override - async def invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - # Since the history contains internal messages from other agents, - # we will do our best to filter out those. Unfortunately, there will - # be a side effect of losing the context of the conversation internal - # to the agent when the conversation is handed back to the agent, i.e. - # previous function call results. - filtered_chat_history = ChatHistory() - for message in history: - content = message.content - # We don't want to add messages whose text content is empty. - # Those messages are likely messages from function calls and function results. - if content: - filtered_chat_history.add_message(message) - - async for response in super().invoke(filtered_chat_history, arguments=arguments, kernel=kernel, **kwargs): - yield response diff --git a/python/samples/demos/document_generator/agents/user_agent.py b/python/samples/demos/document_generator/agents/user_agent.py deleted file mode 100644 index 43fd66ade0af..000000000000 --- a/python/samples/demos/document_generator/agents/user_agent.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from collections.abc import AsyncIterable -from typing import TYPE_CHECKING, Any - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from samples.demos.document_generator.agents.custom_agent_base import CustomAgentBase -from samples.demos.document_generator.plugins.user_plugin import UserPlugin -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.functions.kernel_arguments import KernelArguments - -if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel - -INSTRUCTION = """ -You are part of a chat with multiple agents working on a document. - -Your task is to summarize the user's feedback on the latest draft from the author agent. -Present the draft to the user and summarize their feedback. - -Do not try to address the user's feedback in this chat. -""" - -DESCRIPTION = """ -Select me if you want to ask the user to review the latest draft for publication. -""" - - -class UserAgent(CustomAgentBase): - def __init__(self): - kernel = self._create_kernel() - kernel.add_plugin(plugin=UserPlugin(), plugin_name="UserPlugin") - - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=CustomAgentBase.SERVICE_ID) - settings.function_choice_behavior = FunctionChoiceBehavior.Auto(maximum_auto_invoke_attempts=1) - - super().__init__( - service_id=CustomAgentBase.SERVICE_ID, - kernel=kernel, - arguments=KernelArguments(settings=settings), - name="UserAgent", - instructions=INSTRUCTION.strip(), - description=DESCRIPTION.strip(), - ) - - @override - async def invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - cloned_history = history.model_copy(deep=True) - cloned_history.add_user_message( - "Now present the latest draft to the user for feedback and summarize their feedback." - ) - - async for response_message in super().invoke(cloned_history, arguments=arguments, kernel=kernel, **kwargs): - yield response_message diff --git a/python/samples/demos/document_generator/custom_selection_strategy.py b/python/samples/demos/document_generator/custom_selection_strategy.py deleted file mode 100644 index 20b01b807979..000000000000 --- a/python/samples/demos/document_generator/custom_selection_strategy.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import TYPE_CHECKING, ClassVar - -from opentelemetry import trace -from pydantic import Field - -from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( - AzureChatPromptExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from semantic_kernel.agents import Agent - from semantic_kernel.contents.chat_message_content import ChatMessageContent - -NEWLINE = "\n" - - -@experimental -class CustomSelectionStrategy(SelectionStrategy): - """A selection strategy that selects the next agent intelligently.""" - - NUM_OF_RETRIES: ClassVar[int] = 3 - - chat_completion_service: ChatCompletionClientBase = Field(default_factory=lambda: OpenAIChatCompletion()) - - async def next(self, agents: list["Agent"], history: list["ChatMessageContent"]) -> "Agent": - """Select the next agent to interact with. - - Args: - agents: The list of agents to select from. - history: The history of messages in the conversation. - - Returns: - The next agent to interact with. - """ - if len(agents) == 0: - raise ValueError("No agents to select from") - - tracer = trace.get_tracer(__name__) - with tracer.start_as_current_span("selection_strategy"): - chat_history = ChatHistory(system_message=self.get_system_message(agents).strip()) - - for message in history: - content = message.content - # We don't want to add messages whose text content is empty. - # Those messages are likely messages from function calls and function results. - if content: - chat_history.add_message(message) - - chat_history.add_user_message("Now follow the rules and select the next agent by typing the agent's index.") - - for _ in range(self.NUM_OF_RETRIES): - completion = await self.chat_completion_service.get_chat_message_content( - chat_history, - AzureChatPromptExecutionSettings(), - ) - - if completion is None: - continue - - try: - return agents[int(completion.content)] - except ValueError as ex: - chat_history.add_message(completion) - chat_history.add_user_message(str(ex)) - chat_history.add_user_message(f"You must only say a number between 0 and {len(agents) - 1}.") - - raise ValueError("Failed to select an agent since the model did not return a valid index") - - def get_system_message(self, agents: list["Agent"]) -> str: - return f""" -You are in a multi-agent chat to create a document. -Each message in the chat history contains the agent's name and the message content. - -Initially, the chat history may be empty. - -Here are the agents with their indices, names, and descriptions: -{NEWLINE.join(f"[{index}] {agent.name}:{NEWLINE}{agent.description}" for index, agent in enumerate(agents))} - -Your task is to select the next agent based on the conversation history. - -The conversation must follow these steps: -1. The content creation agent writes a draft. -2. The code validation agent checks the code in the draft. -3. The content creation agent updates the draft based on the feedback. -4. The code validation agent checks the updated code. -... -If the code validation agent approves the code, the user agent can ask the user for final feedback. -N: The user agent provides feedback. -(If the feedback is not positive, the conversation goes back to the content creation agent.) - -Respond with a single number between 0 and {len(agents) - 1}, representing the agent's index. -Only return the index as an integer. -""" diff --git a/python/samples/demos/document_generator/custom_termination_strategy.py b/python/samples/demos/document_generator/custom_termination_strategy.py deleted file mode 100644 index ffecdaea95e3..000000000000 --- a/python/samples/demos/document_generator/custom_termination_strategy.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import TYPE_CHECKING, ClassVar - -from opentelemetry import trace -from pydantic import Field - -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( - AzureChatPromptExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory - -if TYPE_CHECKING: - from semantic_kernel.agents.agent import Agent - from semantic_kernel.contents.chat_message_content import ChatMessageContent - - -TERMINATE_TRUE_KEYWORD = "yes" -TERMINATE_FALSE_KEYWORD = "no" - -NEWLINE = "\n" - - -class CustomTerminationStrategy(TerminationStrategy): - NUM_OF_RETRIES: ClassVar[int] = 3 - - maximum_iterations: int = 20 - chat_completion_service: ChatCompletionClientBase = Field(default_factory=lambda: OpenAIChatCompletion()) - - async def should_agent_terminate(self, agent: "Agent", history: list["ChatMessageContent"]) -> bool: - """Check if the agent should terminate. - - Args: - agent: The agent to check. - history: The history of messages in the conversation. - """ - tracer = trace.get_tracer(__name__) - with tracer.start_as_current_span("terminate_strategy"): - chat_history = ChatHistory(system_message=self.get_system_message().strip()) - - for message in history: - content = message.content - # We don't want to add messages whose text content is empty. - # Those messages are likely messages from function calls and function results. - if content: - chat_history.add_message(message) - - chat_history.add_user_message( - "Is the latest content approved by all agents? " - f"Answer with '{TERMINATE_TRUE_KEYWORD}' or '{TERMINATE_FALSE_KEYWORD}'." - ) - - for _ in range(self.NUM_OF_RETRIES): - completion = await self.chat_completion_service.get_chat_message_content( - chat_history, - AzureChatPromptExecutionSettings(), - ) - - if not completion: - continue - - if TERMINATE_FALSE_KEYWORD in completion.content.lower(): - return False - if TERMINATE_TRUE_KEYWORD in completion.content.lower(): - return True - - chat_history.add_message(completion) - chat_history.add_user_message( - f"You must only say either '{TERMINATE_TRUE_KEYWORD}' or '{TERMINATE_FALSE_KEYWORD}'." - ) - - raise ValueError( - "Failed to determine if the agent should terminate because the model did not return a valid response." - ) - - def get_system_message(self) -> str: - return f""" -You are in a chat with multiple agents collaborating to create a document. -Each message in the chat history contains the agent's name and the message content. - -The chat history may start empty as no agents have spoken yet. - -Here are the agents with their indices, names, and descriptions: -{NEWLINE.join(f"[{index}] {agent.name}:{NEWLINE}{agent.description}" for index, agent in enumerate(self.agents))} - -Your task is NOT to continue the conversation. Determine if the latest content is approved by all agents. -If approved, say "{TERMINATE_TRUE_KEYWORD}". Otherwise, say "{TERMINATE_FALSE_KEYWORD}". -""" diff --git a/python/samples/demos/document_generator/main.py b/python/samples/demos/document_generator/main.py deleted file mode 100644 index 5ba38353b69f..000000000000 --- a/python/samples/demos/document_generator/main.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -import os - -from dotenv import load_dotenv -from opentelemetry import trace -from opentelemetry.sdk.resources import Resource -from opentelemetry.semconv.resource import ResourceAttributes - -from samples.demos.document_generator.agents.code_validation_agent import CodeValidationAgent -from samples.demos.document_generator.agents.content_creation_agent import ContentCreationAgent -from samples.demos.document_generator.agents.user_agent import UserAgent -from samples.demos.document_generator.custom_selection_strategy import CustomSelectionStrategy -from samples.demos.document_generator.custom_termination_strategy import CustomTerminationStrategy -from semantic_kernel.agents.group_chat.agent_group_chat import AgentGroupChat -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole - -TASK = """ -Create a blog post to share technical details about the Semantic Kernel AI connectors. -The content of the blog post should include the following: -1. What are AI connectors in Semantic Kernel? -2. How do people use AI connectors in Semantic Kernel? -3. How do devs create custom AI connectors in Semantic Kernel? - - Include a walk through of creating a custom AI connector. - The connector may not connect to a real service, but should demonstrate the process. - - Include a sample on how to use the connector. - - If a reader follows the walk through and the sample, they should be able to run the connector. - - -Here is the file that contains the source code for the base class of the AI connectors: -semantic_kernel/connectors/ai/chat_completion_client_base.py -semantic_kernel/services/ai_service_client_base.py - -Here are some files containing the source code that may be useful: -semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py -semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py -semantic_kernel/contents/chat_history.py - -If you want to reference the implementations of other AI connectors, you can find them under the following directory: -semantic_kernel/connectors/ai -""" - -load_dotenv() -AZURE_APP_INSIGHTS_CONNECTION_STRING = os.getenv("AZURE_APP_INSIGHTS_CONNECTION_STRING") - -resource = Resource.create({ResourceAttributes.SERVICE_NAME: "Document Generator"}) - - -def set_up_tracing(): - from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter - from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor - from opentelemetry.trace import set_tracer_provider - - # Initialize a trace provider for the application. This is a factory for creating tracers. - tracer_provider = TracerProvider(resource=resource) - tracer_provider.add_span_processor( - BatchSpanProcessor(AzureMonitorTraceExporter(connection_string=AZURE_APP_INSIGHTS_CONNECTION_STRING)) - ) - # Sets the global default tracer provider - set_tracer_provider(tracer_provider) - - -def set_up_logging(): - from azure.monitor.opentelemetry.exporter import AzureMonitorLogExporter - from opentelemetry._logs import set_logger_provider - from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler - from opentelemetry.sdk._logs.export import BatchLogRecordProcessor - - # Create and set a global logger provider for the application. - logger_provider = LoggerProvider(resource=resource) - logger_provider.add_log_record_processor( - BatchLogRecordProcessor(AzureMonitorLogExporter(connection_string=AZURE_APP_INSIGHTS_CONNECTION_STRING)) - ) - # Sets the global default logger provider - set_logger_provider(logger_provider) - - # Create a logging handler to write logging records, in OTLP format, to the exporter. - handler = LoggingHandler() - # Attach the handler to the root logger. `getLogger()` with no arguments returns the root logger. - # Events from all child loggers will be processed by this handler. - logger = logging.getLogger() - logger.addHandler(handler) - logger.setLevel(logging.INFO) - - -async def main(): - if AZURE_APP_INSIGHTS_CONNECTION_STRING: - set_up_tracing() - set_up_logging() - - tracer = trace.get_tracer(__name__) - with tracer.start_as_current_span("main"): - agents = [ - ContentCreationAgent(), - UserAgent(), - CodeValidationAgent(), - ] - - group_chat = AgentGroupChat( - agents=agents, - termination_strategy=CustomTerminationStrategy(agents=agents), - selection_strategy=CustomSelectionStrategy(), - ) - await group_chat.add_chat_message( - ChatMessageContent( - role=AuthorRole.USER, - content=TASK.strip(), - ) - ) - - async for response in group_chat.invoke(): - print(f"==== {response.name} just responded ====") - # print(response.content) - - content_history: list[ChatMessageContent] = [] - async for message in group_chat.get_chat_messages(agent=agents[0]): - if message.name == agents[0].name: - # The chat history contains responses from other agents. - content_history.append(message) - # The chat history is in descending order. - print("Final content:") - print(content_history[0].content) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/demos/document_generator/plugins/code_execution_plugin.py b/python/samples/demos/document_generator/plugins/code_execution_plugin.py deleted file mode 100644 index 863532d24a61..000000000000 --- a/python/samples/demos/document_generator/plugins/code_execution_plugin.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Annotated - -from ai_code_sandbox import AICodeSandbox - -from semantic_kernel.functions import kernel_function - - -class CodeExecutionPlugin: - """A plugin that runs Python code snippets.""" - - @kernel_function(description="Run a Python code snippet. You can assume all the necessary packages are installed.") - def run( - self, code: Annotated[str, "The Python code snippet."] - ) -> Annotated[str, "Returns the output of the code."]: - """Run a Python code snippet.""" - sandbox: AICodeSandbox = AICodeSandbox( - custom_image="python:3.12-slim", - packages=["semantic_kernel"], - ) - - try: - return sandbox.run_code(code) - finally: - sandbox.close() diff --git a/python/samples/demos/document_generator/plugins/repo_file_plugin.py b/python/samples/demos/document_generator/plugins/repo_file_plugin.py deleted file mode 100644 index 9391394fb4de..000000000000 --- a/python/samples/demos/document_generator/plugins/repo_file_plugin.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os -from typing import Annotated - -from semantic_kernel.functions import kernel_function - - -class RepoFilePlugin: - """A plugin that reads files from this repository. - - This plugin assumes that the code is run within the Semantic Kernel repository. - """ - - @kernel_function(description="Read a file given a relative path to the root of the repository.") - def read_file_by_path( - self, path: Annotated[str, "The relative path to the file."] - ) -> Annotated[str, "Returns the file content."]: - path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", path) - - try: - with open(path) as file: - return file.read() - except FileNotFoundError: - raise FileNotFoundError(f"File {path} not found in repository.") - - @kernel_function( - description="Read a file given the name of the file. Function will search for the file in the repository." - ) - def read_file_by_name( - self, file_name: Annotated[str, "The name of the file."] - ) -> Annotated[str, "Returns the file content."]: - path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..") - for root, dirs, files in os.walk(path): - if file_name in files: - print(f"Found file {file_name} in {root}.") - with open(os.path.join(root, file_name)) as file: - return file.read() - raise FileNotFoundError(f"File {file_name} not found in repository.") - - @kernel_function(description="List all files or subdirectories in a directory.") - def list_directory( - self, path: Annotated[str, "Path of a directory relative to the root of the repository."] - ) -> Annotated[str, "Returns a list of files and subdirectories as a string."]: - path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", path) - try: - files = os.listdir(path) - # Join the list of files into a single string - return "\n".join(files) - except FileNotFoundError: - raise FileNotFoundError(f"Directory {path} not found in repository.") diff --git a/python/samples/demos/document_generator/plugins/user_plugin.py b/python/samples/demos/document_generator/plugins/user_plugin.py deleted file mode 100644 index 3891e40a06ff..000000000000 --- a/python/samples/demos/document_generator/plugins/user_plugin.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Annotated - -from semantic_kernel.functions import kernel_function - - -class UserPlugin: - """A plugin that interacts with the user.""" - - @kernel_function(description="Present the content to user and request feedback.") - def request_user_feedback( - self, content: Annotated[str, "The content to present and request feedback on."] - ) -> Annotated[str, "The feedback provided by the user."]: - """Request user feedback on the content.""" - return input(f"Please provide feedback on the content:\n\n{content}\n\n> ") diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py index e963b0cad46a..a74b897dcb9d 100644 --- a/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py @@ -152,7 +152,7 @@ def get_agenda_for_prompt(self) -> str: return "None" agenda_str = "\n".join( [ - f"{i + 1}. [{format_resource(item['resource'], ResourceConstraintUnit.TURNS)}] {item['title']}" + f"{i+1}. [{format_resource(item['resource'], ResourceConstraintUnit.TURNS)}] {item['title']}" for i, item in enumerate(agenda_items) ] ) diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py index 581c1f0a18cd..14cf65431911 100644 --- a/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py @@ -205,7 +205,7 @@ def get_resource_instructions(self) -> tuple[str, str]: resource_instructions = "" if self.resource_constraint.mode == ResourceConstraintMode.EXACT: - exact_mode_instructions = f"""There {"are" if is_plural_remaining else "is"} {formatted_remaining_resource} remaining (including this one) - the conversation will automatically terminate when 0 turns are left. \ + exact_mode_instructions = f"""There {'are' if is_plural_remaining else 'is'} {formatted_remaining_resource} remaining (including this one) - the conversation will automatically terminate when 0 turns are left. \ You should continue the conversation until it is automatically terminated. This means you should NOT preemptively end the conversation, \ either explicitly (by selecting the "End conversation" action) or implicitly (e.g. by telling the user that you have all required information and they should wait for the next step). \ Your goal is not to maximize efficiency (i.e. complete the artifact as quickly as possible then end the conversation), but rather to make the best use of ALL remaining turns available to you""" diff --git a/python/samples/demos/process_with_dapr/fastapi_app.py b/python/samples/demos/process_with_dapr/fastapi_app.py index 56880e041c05..263356a8bcea 100644 --- a/python/samples/demos/process_with_dapr/fastapi_app.py +++ b/python/samples/demos/process_with_dapr/fastapi_app.py @@ -11,7 +11,10 @@ from samples.demos.process_with_dapr.process.process import get_process from samples.demos.process_with_dapr.process.steps import CommonEvents from semantic_kernel import Kernel -from semantic_kernel.processes.dapr_runtime import register_fastapi_dapr_actors, start +from semantic_kernel.processes.dapr_runtime import ( + register_fastapi_dapr_actors, + start, +) logging.basicConfig(level=logging.ERROR) @@ -31,16 +34,12 @@ # and returns the actor instance with the kernel injected. # ######################################################################### -# Get the process which means we have the `KernelProcess` object -# along with any defined step factories -process = get_process() - # Define a lifespan method that registers the actors with the Dapr runtime @asynccontextmanager async def lifespan(app: FastAPI): print("## actor startup ##") - await register_fastapi_dapr_actors(actor, kernel, process.factories) + await register_fastapi_dapr_actors(actor, kernel) yield @@ -57,6 +56,8 @@ async def healthcheck(): @app.get("/processes/{process_id}") async def start_process(process_id: str): try: + process = get_process() + _ = await start( process=process, kernel=kernel, diff --git a/python/samples/demos/process_with_dapr/process/process.py b/python/samples/demos/process_with_dapr/process/process.py index 1c81459b3c21..e6741a85f116 100644 --- a/python/samples/demos/process_with_dapr/process/process.py +++ b/python/samples/demos/process_with_dapr/process/process.py @@ -2,15 +2,7 @@ from typing import TYPE_CHECKING -from samples.demos.process_with_dapr.process.steps import ( - AStep, - BStep, - CommonEvents, - CStep, - CStepState, - KickOffStep, - bstep_factory, -) +from samples.demos.process_with_dapr.process.steps import AStep, BStep, CommonEvents, CStep, CStepState, KickOffStep from semantic_kernel.processes import ProcessBuilder if TYPE_CHECKING: @@ -24,7 +16,7 @@ def get_process() -> "KernelProcess": # Add the step types to the builder kickoff_step = process.add_step(step_type=KickOffStep) myAStep = process.add_step(step_type=AStep) - myBStep = process.add_step(step_type=BStep, factory_function=bstep_factory) + myBStep = process.add_step(step_type=BStep) # Initialize the CStep with an initial state and the state's current cycle set to 1 myCStep = process.add_step(step_type=CStep, initial_state=CStepState(current_cycle=1)) diff --git a/python/samples/demos/process_with_dapr/process/steps.py b/python/samples/demos/process_with_dapr/process/steps.py index d2a6313db402..083a2e78e9bb 100644 --- a/python/samples/demos/process_with_dapr/process/steps.py +++ b/python/samples/demos/process_with_dapr/process/steps.py @@ -6,11 +6,7 @@ from pydantic import Field -from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.functions import kernel_function -from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process import ( KernelProcessStep, @@ -56,43 +52,14 @@ async def do_it(self, context: KernelProcessStepContext): await context.emit_event(process_event=CommonEvents.AStepDone, data="I did A") -# Define a simple factory for the BStep that can create the dependency that the BStep requires -# As an example, this factory creates a kernel and adds an `AzureChatCompletion` service to it. -async def bstep_factory(): - """Creates a BStep instance with ephemeral references like ChatCompletionAgent.""" - kernel = Kernel() - kernel.add_service(AzureChatCompletion()) - - agent = ChatCompletionAgent(kernel=kernel, name="echo", instructions="repeat the input back") - step_instance = BStep() - step_instance.agent = agent - - return step_instance - - +# Define a sample `BStep` step that will emit an event after 2 seconds. +# The event will be sent to the `CStep` step with the data `I did B`. class BStep(KernelProcessStep): - """A sample BStep that optionally holds a ChatCompletionAgent. - - By design, the agent is ephemeral (not stored in state). - """ - - # Ephemeral references won't be persisted to Dapr - # because we do not place them in a step state model. - # We'll set this in the factory function: - agent: ChatCompletionAgent | None = None - - @kernel_function(name="do_it") + @kernel_function() async def do_it(self, context: KernelProcessStepContext): - print("##### BStep ran (do_it).") + print("##### BStep ran.") await asyncio.sleep(2) - - if self.agent: - history = ChatHistory() - history.add_user_message("Hello from BStep!") - async for msg in self.agent.invoke(history): - print(f"BStep got agent response: {msg.content}") - - await context.emit_event(process_event="BStepDone", data="I did B") + await context.emit_event(process_event=CommonEvents.BStepDone, data="I did B") # Define a sample `CStepState` that will keep track of the current cycle. diff --git a/python/samples/getting_started/05-using-the-planner.ipynb b/python/samples/getting_started/05-using-the-planner.ipynb index ba1cf7cf3a3d..dcc7330795c7 100644 --- a/python/samples/getting_started/05-using-the-planner.ipynb +++ b/python/samples/getting_started/05-using-the-planner.ipynb @@ -435,7 +435,7 @@ " Description: EmailPlugin provides a set of functions to send emails.\n", "\n", " Usage:\n", - " kernel.add_plugin(EmailPlugin(), plugin_name=\"email\")\n", + " kernel.import_plugin_from_object(EmailPlugin(), plugin_name=\"email\")\n", "\n", " Examples:\n", " {{email.SendEmail}} => Sends an email with the provided subject and body.\n", @@ -581,7 +581,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.7" + "version": "3.12.6" } }, "nbformat": 4, diff --git a/python/samples/getting_started/third_party/postgres-memory.ipynb b/python/samples/getting_started/third_party/postgres-memory.ipynb index 51ea600109e3..b0069a59a1c7 100644 --- a/python/samples/getting_started/third_party/postgres-memory.ipynb +++ b/python/samples/getting_started/third_party/postgres-memory.ipynb @@ -28,30 +28,21 @@ "import numpy as np\n", "import requests\n", "\n", - "from semantic_kernel import Kernel\n", - "from semantic_kernel.connectors.ai import FunctionChoiceBehavior\n", - "from semantic_kernel.connectors.ai.open_ai import (\n", - " AzureChatCompletion,\n", - " AzureChatPromptExecutionSettings,\n", - " AzureTextEmbedding,\n", + "from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (\n", " OpenAIEmbeddingPromptExecutionSettings,\n", - " OpenAITextEmbedding,\n", ")\n", - "from semantic_kernel.connectors.memory.postgres import PostgresCollection\n", - "from semantic_kernel.contents import ChatHistory\n", - "from semantic_kernel.data import (\n", - " DistanceFunction,\n", - " IndexKind,\n", - " VectorSearchOptions,\n", + "from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding\n", + "from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding\n", + "from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection\n", + "from semantic_kernel.data.const import DistanceFunction, IndexKind\n", + "from semantic_kernel.data.vector_store_model_decorator import vectorstoremodel\n", + "from semantic_kernel.data.vector_store_record_fields import (\n", " VectorStoreRecordDataField,\n", " VectorStoreRecordKeyField,\n", - " VectorStoreRecordUtils,\n", " VectorStoreRecordVectorField,\n", - " VectorStoreTextSearch,\n", - " vectorstoremodel,\n", ")\n", - "from semantic_kernel.functions import KernelParameterMetadata\n", - "from semantic_kernel.functions.kernel_arguments import KernelArguments" + "from semantic_kernel.data.vector_store_record_utils import VectorStoreRecordUtils\n", + "from semantic_kernel.kernel import Kernel" ] }, { @@ -64,8 +55,6 @@ "\n", "To do this, copy the `.env.example` file to `.env` and fill in the necessary information.\n", "\n", - "__Note__: If you're using VSCode to execute the notebook, the settings in `.env` in the root of the repository will be picked up automatically.\n", - "\n", "### Postgres configuration\n", "\n", "You'll need to provide a connection string to a Postgres database. You can use a local Postgres instance, or a cloud-hosted one.\n", @@ -127,18 +116,21 @@ "# -- ArXiv settings --\n", "\n", "# The search term to use when searching for papers on arXiv. All metadata fields for the papers are searched.\n", - "SEARCH_TERM = \"RAG\"\n", + "SEARCH_TERM = \"generative ai\"\n", "\n", "# The category of papers to search for on arXiv. See https://arxiv.org/category_taxonomy for a list of categories.\n", "ARVIX_CATEGORY = \"cs.AI\"\n", "\n", "# The maximum number of papers to search for on arXiv.\n", - "MAX_RESULTS = 300\n", + "MAX_RESULTS = 10\n", "\n", "# -- OpenAI settings --\n", "\n", "# Set this flag to False to use the OpenAI API instead of Azure OpenAI\n", - "USE_AZURE_OPENAI = True" + "USE_AZURE_OPENAI = True\n", + "\n", + "# The name of the OpenAI model or Azure OpenAI deployment to use\n", + "EMBEDDING_MODEL = \"text-embedding-3-small\"" ] }, { @@ -170,7 +162,7 @@ " embedding_settings={\"embedding\": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)},\n", " index_kind=IndexKind.HNSW,\n", " dimensions=1536,\n", - " distance_function=DistanceFunction.COSINE_DISTANCE,\n", + " distance_function=DistanceFunction.COSINE,\n", " property_type=\"float\",\n", " serialize_function=np.ndarray.tolist,\n", " deserialize_function=np.array,\n", @@ -248,17 +240,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 300 papers on 'RAG'\n" - ] - } - ], + "outputs": [], "source": [ "arxiv_papers: list[ArxivPaper] = [\n", " ArxivPaper.from_arxiv_info(paper)\n", @@ -282,7 +266,7 @@ "outputs": [], "source": [ "collection = PostgresCollection[str, ArxivPaper](\n", - " collection_name=\"arxiv_records\", data_model_type=ArxivPaper, env_file_path=env_file_path\n", + " collection_name=\"arxiv_papers\", data_model_type=ArxivPaper, env_file_path=env_file_path\n", ")" ] }, @@ -301,9 +285,13 @@ "source": [ "kernel = Kernel()\n", "if USE_AZURE_OPENAI:\n", - " text_embedding = AzureTextEmbedding(service_id=\"embedding\", env_file_path=env_file_path)\n", + " text_embedding = AzureTextEmbedding(\n", + " service_id=\"embedding\", deployment_name=EMBEDDING_MODEL, env_file_path=env_file_path\n", + " )\n", "else:\n", - " text_embedding = OpenAITextEmbedding(service_id=\"embedding\", env_file_path=env_file_path)\n", + " text_embedding = OpenAITextEmbedding(\n", + " service_id=\"embedding\", ai_model_id=EMBEDDING_MODEL, env_file_path=env_file_path\n", + " )\n", "\n", "kernel.add_service(text_embedding)" ] @@ -353,92 +341,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "# Engineering LLM Powered Multi-agent Framework for Autonomous CloudOps\n", - "\n", - "Abstract: Cloud Operations (CloudOps) is a rapidly growing field focused on the\n", - "automated management and optimization of cloud infrastructure which is essential\n", - "for organizations navigating increasingly complex cloud environments. MontyCloud\n", - "Inc. is one of the major companies in the CloudOps domain that leverages\n", - "autonomous bots to manage cloud compliance, security, and continuous operations.\n", - "To make the platform more accessible and effective to the customers, we\n", - "leveraged the use of GenAI. Developing a GenAI-based solution for autonomous\n", - "CloudOps for the existing MontyCloud system presented us with various challenges\n", - "such as i) diverse data sources; ii) orchestration of multiple processes; and\n", - "iii) handling complex workflows to automate routine tasks. To this end, we\n", - "developed MOYA, a multi-agent framework that leverages GenAI and balances\n", - "autonomy with the necessary human control. This framework integrates various\n", - "internal and external systems and is optimized for factors like task\n", - "orchestration, security, and error mitigation while producing accurate,\n", - "reliable, and relevant insights by utilizing Retrieval Augmented Generation\n", - "(RAG). Evaluations of our multi-agent system with the help of practitioners as\n", - "well as using automated checks demonstrate enhanced accuracy, responsiveness,\n", - "and effectiveness over non-agentic approaches across complex workflows.\n", - "Published: 2025-01-14 16:30:10\n", - "Link: http://arxiv.org/abs/2501.08243v1\n", - "PDF Link: http://arxiv.org/abs/2501.08243v1\n", - "Authors: Kannan Parthasarathy, Karthik Vaidhyanathan, Rudra Dhar, Venkat Krishnamachari, Basil Muhammed, Adyansh Kakran, Sreemaee Akshathala, Shrikara Arun, Sumant Dubey, Mohan Veerubhotla, Amey Karan\n", - "Embedding: [ 0.01063822 0.02977918 0.04532182 ... -0.00264323 0.00081101\n", - " 0.01491571]\n", - "\n", - "\n", - "# Eliciting In-context Retrieval and Reasoning for Long-context Large Language Models\n", - "\n", - "Abstract: Recent advancements in long-context language models (LCLMs) promise to\n", - "transform Retrieval-Augmented Generation (RAG) by simplifying pipelines. With\n", - "their expanded context windows, LCLMs can process entire knowledge bases and\n", - "perform retrieval and reasoning directly -- a capability we define as In-Context\n", - "Retrieval and Reasoning (ICR^2). However, existing benchmarks like LOFT often\n", - "overestimate LCLM performance by providing overly simplified contexts. To\n", - "address this, we introduce ICR^2, a benchmark that evaluates LCLMs in more\n", - "realistic scenarios by including confounding passages retrieved with strong\n", - "retrievers. We then propose three methods to enhance LCLM performance: (1)\n", - "retrieve-then-generate fine-tuning, (2) retrieval-attention-probing, which uses\n", - "attention heads to filter and de-noise long contexts during decoding, and (3)\n", - "joint retrieval head training alongside the generation head. Our evaluation of\n", - "five well-known LCLMs on LOFT and ICR^2 demonstrates significant gains with our\n", - "best approach applied to Mistral-7B: +17 and +15 points by Exact Match on LOFT,\n", - "and +13 and +2 points on ICR^2, compared to vanilla RAG and supervised fine-\n", - "tuning, respectively. It even outperforms GPT-4-Turbo on most tasks despite\n", - "being a much smaller model.\n", - "Published: 2025-01-14 16:38:33\n", - "Link: http://arxiv.org/abs/2501.08248v1\n", - "PDF Link: http://arxiv.org/abs/2501.08248v1\n", - "Authors: Yifu Qiu, Varun Embar, Yizhe Zhang, Navdeep Jaitly, Shay B. Cohen, Benjamin Han\n", - "Embedding: [-0.01305697 0.01166064 0.06267344 ... -0.01627254 0.00974741\n", - " -0.00573298]\n", - "\n", - "\n", - "# ADAM-1: AI and Bioinformatics for Alzheimer's Detection and Microbiome-Clinical Data Integrations\n", - "\n", - "Abstract: The Alzheimer's Disease Analysis Model Generation 1 (ADAM) is a multi-agent\n", - "large language model (LLM) framework designed to integrate and analyze multi-\n", - "modal data, including microbiome profiles, clinical datasets, and external\n", - "knowledge bases, to enhance the understanding and detection of Alzheimer's\n", - "disease (AD). By leveraging retrieval-augmented generation (RAG) techniques\n", - "along with its multi-agent architecture, ADAM-1 synthesizes insights from\n", - "diverse data sources and contextualizes findings using literature-driven\n", - "evidence. Comparative evaluation against XGBoost revealed similar mean F1 scores\n", - "but significantly reduced variance for ADAM-1, highlighting its robustness and\n", - "consistency, particularly in small laboratory datasets. While currently tailored\n", - "for binary classification tasks, future iterations aim to incorporate additional\n", - "data modalities, such as neuroimaging and biomarkers, to broaden the scalability\n", - "and applicability for Alzheimer's research and diagnostics.\n", - "Published: 2025-01-14 18:56:33\n", - "Link: http://arxiv.org/abs/2501.08324v1\n", - "PDF Link: http://arxiv.org/abs/2501.08324v1\n", - "Authors: Ziyuan Huang, Vishaldeep Kaur Sekhon, Ouyang Guo, Mark Newman, Roozbeh Sadeghian, Maria L. Vaida, Cynthia Jo, Doyle Ward, Vanni Bucci, John P. Haran\n", - "Embedding: [ 0.03896349 0.00422515 0.05525447 ... 0.03374933 -0.01468264\n", - " 0.01850895]\n", - "\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "async with collection:\n", " results = await collection.get_batch(keys[:3])\n", @@ -461,284 +364,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now we can search for documents with `VectorStoreTextSearch`, which uses the embedding service to vectorize a query and search for semantically similar documents:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "text_search = VectorStoreTextSearch[ArxivPaper].from_vectorized_search(collection, embedding_service=text_embedding)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `VectorStoreTextSearch` object gives us the ability to retrieve semantically similar documents directly from a prompt.\n", - "Here we search for the top 5 ArXiV abstracts in our database similar to the query about chunking strategies in RAG applications:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found 5 results for query.\n", - "Advanced ingestion process powered by LLM parsing for RAG system: 0.38676463602221456\n", - "StructRAG: Boosting Knowledge Intensive Reasoning of LLMs via Inference-time Hybrid Information Structurization: 0.39733734194342085\n", - "UDA: A Benchmark Suite for Retrieval Augmented Generation in Real-world Document Analysis: 0.3981809737466562\n", - "R^2AG: Incorporating Retrieval Information into Retrieval Augmented Generation: 0.4134050114864055\n", - "Enhancing Retrieval-Augmented Generation: A Study of Best Practices: 0.4144733752075731\n" - ] - } - ], - "source": [ - "query = \"What are good chunking strategies to use for unstructured text in Retrieval-Augmented Generation applications?\"\n", - "\n", - "async with collection:\n", - " search_results = await text_search.get_search_results(\n", - " query, options=VectorSearchOptions(top=5, include_total_count=True)\n", - " )\n", - " print(f\"Found {search_results.total_count} results for query.\")\n", - " async for search_result in search_results.results:\n", - " title = search_result.record.title\n", - " score = search_result.score\n", - " print(f\"{title}: {score}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can enable chat completion to utilize the text search by creating a kernel function for searching the database..." - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "plugin = kernel.add_functions(\n", - " plugin_name=\"arxiv_plugin\",\n", - " functions=[\n", - " text_search.create_search(\n", - " # The default parameters match the parameters of the VectorSearchOptions class.\n", - " description=\"Searches for ArXiv papers that are related to the query.\",\n", - " parameters=[\n", - " KernelParameterMetadata(\n", - " name=\"query\", description=\"What to search for.\", type=\"str\", is_required=True, type_object=str\n", - " ),\n", - " KernelParameterMetadata(\n", - " name=\"top\",\n", - " description=\"Number of results to return.\",\n", - " type=\"int\",\n", - " default_value=2,\n", - " type_object=int,\n", - " ),\n", - " ],\n", - " ),\n", - " ],\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "...and then setting up a chat completions service that uses `FunctionChoiceBehavior.Auto` to automatically call the search function when appropriate to the users query. We also create the chat function that will be invoked by the kernel." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "# Create the chat completion service. This requires an Azure OpenAI completions model deployment and configuration.\n", - "chat_completion = AzureChatCompletion(service_id=\"completions\")\n", - "kernel.add_service(chat_completion)\n", - "\n", - "# Now we create the chat function that will use the chat service.\n", - "chat_function = kernel.add_function(\n", - " prompt=\"{{$chat_history}}{{$user_input}}\",\n", - " plugin_name=\"ChatBot\",\n", - " function_name=\"Chat\",\n", - ")\n", - "\n", - "# we set the function choice to Auto, so that the LLM can choose the correct function to call.\n", - "# and we exclude the ChatBot plugin, so that it does not call itself.\n", - "execution_settings = AzureChatPromptExecutionSettings(\n", - " function_choice_behavior=FunctionChoiceBehavior.Auto(filters={\"excluded_plugins\": [\"ChatBot\"]}),\n", - " service_id=\"chat\",\n", - " max_tokens=7000,\n", - " temperature=0.7,\n", - " top_p=0.8,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we create a chat history with a system message and some initial context:" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "history = ChatHistory()\n", - "system_message = \"\"\"\n", - "You are a chat bot. Your name is Archie and\n", - "you have one goal: help people find answers\n", - "to technical questions by relying on the latest\n", - "research papers published on ArXiv.\n", - "You communicate effectively in the style of a helpful librarian. \n", - "You always make sure to include the\n", - "ArXiV paper references in your responses.\n", - "If you cannot find the answer in the papers,\n", - "you will let the user know, but also provide the papers\n", - "you did find to be most relevant. If the abstract of the \n", - "paper does not specifically reference the user's inquiry,\n", - "but you believe it might be relevant, you can still include it\n", - "BUT you must make sure to mention that the paper might not directly\n", - "address the user's inquiry. Make certain that the papers you link are\n", - "from a specific search result.\n", - "\"\"\"\n", - "history.add_system_message(system_message)\n", - "history.add_user_message(\"Hi there, who are you?\")\n", - "history.add_assistant_message(\n", - " \"I am Archie, the ArXiV chat bot. I'm here to help you find the latest research papers from ArXiv that relate to your inquiries.\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can now invoke the chat function via the Kernel to get chat completions:" + "...searching Postgres memory coming soon, to be continued!" ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "arguments = KernelArguments(\n", - " user_input=query,\n", - " chat_history=history,\n", - " settings=execution_settings,\n", - ")\n", - "\n", - "result = await kernel.invoke(chat_function, arguments=arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Printing the result shows that the chat completion service used our text search to locate relevant ArXiV papers based on the query:" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Archie:>\n", - "What an excellent and timely question! Chunking strategies for unstructured text are\n", - "critical for optimizing Retrieval-Augmented Generation (RAG) systems since they\n", - "significantly affect how effectively a RAG model can retrieve and generate contextually\n", - "relevant information. Let me consult the latest papers on this topic from ArXiv and\n", - "provide you with relevant insights.\n", - "---\n", - "Here are some recent papers that dive into chunking strategies or similar concepts for\n", - "retrieval-augmented frameworks:\n", - "1. **\"Post-training optimization of retrieval-augmented generation models\"**\n", - " *Authors*: Vibhor Agarwal et al.\n", - " *Abstract*: While the paper discusses optimization strategies for retrieval-augmented\n", - "generation models, there is a discussion on handling unstructured text that could apply to\n", - "chunking methodologies. Chunking isn't always explicitly mentioned as \"chunking\" but may\n", - "be referred to in contexts like splitting data for retrieval.\n", - " *ArXiv link*: [arXiv:2308.10701](https://arxiv.org/abs/2308.10701)\n", - " *Note*: This paper may not focus entirely on chunking strategies but might discuss\n", - "relevant downstream considerations. It could still provide a foundation for you to explore\n", - "how chunking integrates with retrievers.\n", - "2. **\"Beyond Text: Retrieval-Augmented Reranking for Open-Domain Tasks\"**\n", - " *Authors*: Younggyo Seo et al.\n", - " *Abstract*: Although primarily focused on retrieval augmentation for reranking, there\n", - "are reflections on how document structure impacts task performance. Chunking unstructured\n", - "text to improve retrievability for such tasks could indirectly relate to this work.\n", - " *ArXiv link*: [arXiv:2310.03714](https://arxiv.org/abs/2310.03714)\n", - "3. **\"ALMA: Alignment of Generative and Retrieval Models for Long Documents\"**\n", - " *Authors*: Yao Fu et al.\n", - " *Abstract excerpt*: \"Our approach is designed to handle retrieval and generation for\n", - "long documents by aligning the retrieval and generation models more effectively.\"\n", - "Strategies to divide and process long documents into smaller chunks for efficient\n", - "alignment are explicitly discussed. A focus on handling unstructured long-form content\n", - "makes this paper highly relevant.\n", - " *ArXiv link*: [arXiv:2308.05467](https://arxiv.org/abs/2308.05467)\n", - "4. **\"Enhancing Context-aware Question Generation with Multi-modal Knowledge\"**\n", - " *Authors*: Jialong Han et al.\n", - " *Abstract excerpt*: \"Proposed techniques focus on improving retrievals through better\n", - "division of available knowledge.\" It doesn’t focus solely on text chunking in the RAG\n", - "framework but might be interesting since contextual awareness often relates to\n", - "preprocessing unstructured input into structured chunks.\n", - " *ArXiv link*: [arXiv:2307.12345](https://arxiv.org/abs/2307.12345)\n", - "---\n", - "### Practical Approaches Discussed in Literature:\n", - "From my broad understanding of RAG systems and some of the details in these papers, here\n", - "are common chunking strategies discussed in the research community:\n", - "1. **Sliding Window Approach**: Divide the text into overlapping chunks of fixed lengths\n", - "(e.g., 512 tokens with an overlap of 128 tokens). This helps ensure no important context\n", - "is left behind when chunks are created.\n", - "\n", - "2. **Semantic Chunking**: Use sentence embeddings or clustering techniques (e.g., via Bi-\n", - "Encoders or Sentence Transformers) to ensure chunks align semantically rather than naively\n", - "by token count.\n", - "3. **Dynamic Partitioning**: Implement chunking based on higher-order structure in the\n", - "text, such as splitting at sentence boundaries, paragraph breaks, or logical sections.\n", - "4. **Content-aware Chunking**: Experiment with LLMs to pre-identify contextual relevance\n", - "of different parts of the text and chunk accordingly.\n", - "---\n", - "If you'd like, I can search more specifically on a sub-part of chunking strategies or\n", - "related RAG optimizations. Let me know!\n" - ] - } - ], - "source": [ - "def wrap_text(text, width=90):\n", - " paragraphs = text.split(\"\\n\\n\") # Split the text into paragraphs\n", - " wrapped_paragraphs = [\n", - " \"\\n\".join(textwrap.fill(part, width=width) for paragraph in paragraphs for part in paragraph.split(\"\\n\"))\n", - " ] # Wrap each paragraph, split by newlines\n", - " return \"\\n\\n\".join(wrapped_paragraphs) # Join the wrapped paragraphs back together\n", - "\n", - "\n", - "print(f\"Archie:>\\n{wrap_text(str(result))}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/python/samples/getting_started_with_agents/README.md b/python/samples/getting_started_with_agents/README.md index c07f1f141c24..b34d1d56c05d 100644 --- a/python/samples/getting_started_with_agents/README.md +++ b/python/samples/getting_started_with_agents/README.md @@ -3,7 +3,6 @@ This project contains a step by step guide to get started with _Semantic Kernel Agents_ in Python. #### PyPI: - - For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. - For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. - For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. @@ -17,42 +16,20 @@ This project contains a step by step guide to get started with _Semantic Kernel The getting started with agents examples include: -## Chat Completion - -Example|Description ----|--- -[step1_chat_completion_agent_simple](../getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py)|How to create and use a simple chat completion agent. -[step2_chat_completion_agent_with_kernel](../getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py)|How to create and use a a chat completion agent with the AI service created on the kernel. -[step3_chat_completion_agent_plugin_simple](../getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py)|How to create a simple chat completion agent and specify plugins via the constructor with a kernel. -[step4_chat_completion_agent_plugin_with_kernel](../getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py)|How to create and use a chat completion agent by registering plugins on the kernel. -[step5_chat_completion_agent_group_chat](../getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py)|How to create a conversation between agents. -[step6_kernel_function_strategies](../getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py)|How to utilize a `KernelFunction` as a chat strategy. -[step7_chat_completion_agent_json_result](../getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py)|How to have an agent produce JSON. -[step8_chat_completion_agent_logging](../getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py)|How to enable logging for agents. -[step9_chat_completion_agent_structured_outputs](../getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py)|How to use have a chat completion agent use structured outputs - -## OpenAI Assistant Agent - Example|Description ---|--- -[step1_assistant](../getting_started_with_agents/openai_assistant/step1_assistant.py)|How to create and use an OpenAI Assistant agent. -[step2_assistant_plugins](../getting_started_with_agents/openai_assistant/step2_assistant_plugins.py)| How to create and use an OpenAI Assistant agent with plugins. -[step3_assistant_vision](../getting_started_with_agents/openai_assistant/step3_assistant_vision.py)|How to provide an image as input to an Open AI Assistant agent. -[step4_assistant_tool_code_interpreter](../getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py)|How to use the code-interpreter tool for an Open AI Assistant agent. -[step5_assistant_tool_file_search](../getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py)|How to use the file-search tool for an Open AI Assistant agent. - -## Azure AI Agent - -Example|Description ----|--- -[step1_azure_ai_agent](../getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py)|How to create an Azure AI Agent and invoke a Semantic Kernel plugin. -[step2_azure_ai_agent_plugin](../getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py)|How to create an Azure AI Agent with plugins. -[step3_azure_ai_agent_group_chat](../getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py)|How to create an agent group chat with Azure AI Agents. -[step4_azure_ai_agent_code_interpreter](../getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py)|How to use the code-interpreter tool for an Azure AI agent. -[step5_azure_ai_agent_file_search](../getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py)|How to use the file-search tool for an Azure AI agent. -[step6_azure_ai_agent_openapi](../getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py)|How to use the Open API tool for an Azure AI agent. - -_Note: For details on configuring an Azure AI Agent, please see [here](../getting_started_with_agents/azure_ai_agent/README.md)._ +[step1_agent](../getting_started_with_agents/step1_agent.py)|How to create and use an agent. +[step2_plugins](../getting_started_with_agents/step2_plugins.py)|How to associate plugins with an agent. +[step3_chat](../getting_started_with_agents/step3_chat.py)|How to create a conversation between agents. +[step4_kernel_function_strategies](../getting_started_with_agents/step4_kernel_function_strategies.py)|How to utilize a `KernelFunction` as a chat strategy. +[step5_json_result](../getting_started_with_agents/step5_json_result.py)|How to have an agent produce JSON. +[step6_logging](../getting_started_with_agents/step6_logging.py)|How to enable logging for agents. +[step7_assistant](../getting_started_with_agents/step7_assistant.py)|How to create and use an OpenAI Assistant agent. +[step8_assistant_vision](../getting_started_with_agents/step8_assistant_vision.py)|How to provide an image as input to an Open AI Assistant agent. +[step9_assistant_tool_code_interpreter](../getting_started_with_agents/step9_assistant_tool_code_interpreter.py)|How to use the code-interpreter tool for an Open AI Assistant agent. +[step10_assistant_tool_file_search](../getting_started_with_agents/step10_assistant_tool_file_search.py)|How to use the file-search tool for an Open AI Assistant agent. + +*Note: As we strive for parity with .NET, more getting_started_with_agent samples will be added. The current steps and names may be revised to further align with our .NET counterpart.* ## Configuring the Kernel diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/.env.example b/python/samples/getting_started_with_agents/azure_ai_agent/.env.example deleted file mode 100644 index c2d16cea26aa..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" -AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" -AZURE_AI_AGENT_ENDPOINT = "" -AZURE_AI_AGENT_SUBSCRIPTION_ID = "" -AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" -AZURE_AI_AGENT_PROJECT_NAME = "" \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/README.md b/python/samples/getting_started_with_agents/azure_ai_agent/README.md deleted file mode 100644 index 2cf85976444a..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/README.md +++ /dev/null @@ -1,121 +0,0 @@ -## Azure AI Agents - -The following getting started samples show how to use Azure AI Agents with Semantic Kernel. - -To set up the required resources, follow the "Quickstart: Create a new agent" guide [here](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure). - -You will need to install the optional Semantic Kernel `azure` dependencies if you haven't already via: - -```bash -pip install semantic-kernel[azure] -``` - -Before running an Azure AI Agent, modify your .env file to include: - -```bash -AZURE_AI_AGENT_PROJECT_CONNECTION_STRING = "" -AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" -``` - -or - -```bash -AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME = "" -AZURE_AI_AGENT_ENDPOINT = "" -AZURE_AI_AGENT_SUBSCRIPTION_ID = "" -AZURE_AI_AGENT_RESOURCE_GROUP_NAME = "" -AZURE_AI_AGENT_PROJECT_NAME = "" -``` - -The project connection string is of the following format: `;;;`. See [here](https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure#configure-and-run-an-agent) for information on obtaining the values to populate the connection string. - -The .env should be placed in the root directory. - -## Configuring the AI Project Client - -Ensure that your Azure AI Agent resources are configured with at least a Basic or Standard SKU. - -To begin, create the project client as follows: - -```python -async with DefaultAzureCredential() as credential: - client = await AzureAIAgent.create_client(credential=credential) - - async with client: - # Your operational code here -``` - -### Required Imports - -The required imports for the `Azure AI Agent` include async libraries: - -```python -from azure.identity.aio import DefaultAzureCredential -``` - -### Initializing the Agent - -You can pass in a connection string (shown above) to create the client: - -```python -async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client( - credential=creds, - conn_str=ai_agent_settings.project_connection_string.get_secret_value(), - ) as client, - ): - # operational logic -``` - -### Creating an Agent Definition - -Once the client is initialized, you can define the agent: - -```python -# Create agent definition -agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name=AGENT_NAME, - instructions=AGENT_INSTRUCTIONS, -) -``` - -Then, instantiate the `AzureAIAgent` with the `client` and `agent_definition`: - -```python -# Create the AzureAI Agent -agent = AzureAIAgent( - client=client, - definition=agent_definition, -) -``` - -Now, you can create a thread, add chat messages to the agent, and invoke it with given inputs and optional parameters. - -## Requests and Rate Limits - -### Managing API Request Frequency - -Your default request limits may be low, affecting how often you can poll the status of a run. You have two options: - -1. Adjust the `polling_options` of the `AzureAIAgent` - -By default, the polling interval is 250 ms. You can slow it down to 1 second (or another preferred value) to reduce the number of API calls: - -```python -# Required imports -from datetime import timedelta -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions - -# Configure the polling options as part of the `AzureAIAgent` -agent = AzureAIAgent( - client=client, - definition=agent_definition, - polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), -) -``` - -2. Increase Rate Limits in Azure AI Foundry - -You can also adjust your deployment's Rate Limit (Tokens per minute), which impacts the Rate Limit (Requests per minute). This can be configured in Azure AI Foundry under your project's deployment settings for the "Connected Azure OpenAI Service Resource." diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py b/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py deleted file mode 100644 index bb756e4ad5b3..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step1_azure_ai_agent.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings - -""" -The following sample demonstrates how to create an Azure AI agent that answers -user questions. This sample demonstrates the basic steps to create an agent -and simulate a conversation with the agent. - -The interaction with the agent is via the `get_response` method, which sends a -user input to the agent and receives a response from the agent. The conversation -history is maintained by the agent service, i.e. the responses are automatically -associated with the thread. Therefore, client code does not need to maintain the -conversation history. -""" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello, I am John Doe.", - "What is your name?", - "What is my name?", -] - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Create an agent on the Azure AI agent service - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name="Assistant", - instructions="Answer the user's questions.", - ) - - # 2. Create a Semantic Kernel agent for the Azure AI agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - # Optionally configure polling options - # polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), - ) - - # 3. Create a new thread on the Azure AI agent service - thread = await client.agents.create_thread() - - try: - for user_input in USER_INPUTS: - # 4. Add the user input as a chat message - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: {user_input}") - # 5. Invoke the agent for the specified thread for response - response = await agent.get_response(thread_id=thread.id) - print(f"# {response.name}: {response}") - finally: - # 6. Cleanup: Delete the thread and agent - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample Output: - # User: Hello, I am John Doe. - # Assistant: Hello, John! How can I assist you today? - # User: What is your name? - # Assistant: I’m here as your assistant, so you can just call me Assistant. How can I help you today? - # User: What is my name? - # Assistant: Your name is John Doe. How can I assist you today, John? - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py b/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py deleted file mode 100644 index 33477e0d9863..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step2_azure_ai_agent_plugin.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents import AuthorRole -from semantic_kernel.functions import kernel_function - -""" -The following sample demonstrates how to create an Azure AI agent that answers -questions about a sample menu using a Semantic Kernel Plugin. -""" - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello", - "What is the special soup?", - "How much does that cost?", - "Thank you", -] - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Create an agent on the Azure AI agent service - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name="Host", - instructions="Answer questions about the menu.", - ) - - # 2. Create a Semantic Kernel agent for the Azure AI agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - # Optionally configure polling options - # polling_options=RunPollingOptions(run_polling_interval=timedelta(seconds=1)), - ) - - # 3. Add a plugin to the agent via the kernel - agent.kernel.add_plugin(MenuPlugin(), plugin_name="menu") - - # 4. Create a new thread on the Azure AI agent service - thread = await client.agents.create_thread() - - try: - for user_input in USER_INPUTS: - # 5. Add the user input as a chat message - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: {user_input}") - # 6. Invoke the agent for the specified thread for response - async for content in agent.invoke( - thread_id=thread.id, - temperature=0.2, # override the agent-level temperature setting with a run-time value - ): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}") - finally: - # 7. Cleanup: Delete the thread and agent - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample Output: - # User: Hello - # Agent: Hello! How can I assist you today? - # User: What is the special soup? - # ... - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py b/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py deleted file mode 100644 index 064fdd6415cd..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step3_azure_ai_agent_group_chat.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents import AgentGroupChat -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.agents.strategies import TerminationStrategy -from semantic_kernel.contents import AuthorRole - -""" -The following sample demonstrates how to create an OpenAI assistant using either -Azure OpenAI or OpenAI, a chat completion agent and have them participate in a -group chat to work towards the user's requirement. -""" - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. Do not use the word "approve" unless you are giving approval. -If not, provide insight on how to refine suggested copy without example. -""" - -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - -TASK = "a slogan for a new line of electric cars." - - -async def main(): - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Create the reviewer agent on the Azure AI agent service - reviewer_agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - # 2. Create a Semantic Kernel agent for the reviewer Azure AI agent - agent_reviewer = AzureAIAgent( - client=client, - definition=reviewer_agent_definition, - ) - - # 3. Create the copy writer agent on the Azure AI agent service - copy_writer_agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - # 4. Create a Semantic Kernel agent for the copy writer Azure AI agent - agent_writer = AzureAIAgent( - client=client, - definition=copy_writer_agent_definition, - ) - - # 5. Place the agents in a group chat with a custom termination strategy - chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - try: - # 6. Add the task as a message to the group chat - await chat.add_chat_message(message=TASK) - print(f"# {AuthorRole.USER}: '{TASK}'") - # 7. Invoke the chat - async for content in chat.invoke(): - print(f"# {content.role} - {content.name or '*'}: '{content.content}'") - finally: - # 8. Cleanup: Delete the agents - await chat.reset() - await client.agents.delete_agent(agent_reviewer.id) - await client.agents.delete_agent(agent_writer.id) - - """ - Sample Output: - # AuthorRole.USER: 'a slogan for a new line of electric cars.' - # AuthorRole.ASSISTANT - CopyWriter: '"Charge Ahead: Drive the Future."' - # AuthorRole.ASSISTANT - ArtDirector: 'This slogan has a nice ring to it and captures the ...' - # AuthorRole.ASSISTANT - CopyWriter: '"Plug In. Drive Green."' - ... - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py b/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py deleted file mode 100644 index 4d462f7aafe3..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step4_azure_ai_agent_code_interpreter.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from azure.ai.projects.models import CodeInterpreterTool -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents import AuthorRole - -""" -The following sample demonstrates how to create a simple, Azure AI agent that -uses the code interpreter tool to answer a coding question. -""" - -TASK = "Use code to determine the values in the Fibonacci sequence that that are less then the value of 101." - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Create an agent with a code interpreter on the Azure AI agent service - code_interpreter = CodeInterpreterTool() - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - tools=code_interpreter.definitions, - tool_resources=code_interpreter.resources, - ) - - # 2. Create a Semantic Kernel agent for the Azure AI agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # 3. Create a new thread on the Azure AI agent service - thread = await client.agents.create_thread() - - try: - # 4. Add the task as a chat message - await agent.add_chat_message(thread_id=thread.id, message=TASK) - print(f"# User: '{TASK}'") - # 5. Invoke the agent for the specified thread for response - async for content in agent.invoke(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}") - finally: - # 6. Cleanup: Delete the thread and agent - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample Output: - # User: 'Use code to determine the values in the Fibonacci sequence that that are less then the value of 101.' - # Agent: # Function to generate Fibonacci sequence values less than a given limit - def fibonacci_less_than(limit): - fib_sequence = [] - a, b = 0, 1 - while a < limit: - fib_sequence.append(a) - a, b = b, a + b - a, b = 0, 1 - while a < limit: - fib_sequence.append(a) - a, b = 0, 1 - while a < limit: - a, b = 0, 1 - a, b = 0, 1 - while a < limit: - fib_sequence.append(a) - a, b = b, a + b - return fib_sequence - - Generate Fibonacci sequence values less than 101 - fibonacci_values = fibonacci_less_than(101) - fibonacci_values - # Agent: The values in the Fibonacci sequence that are less than 101 are: - - [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89] - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py b/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py deleted file mode 100644 index 978ac19a76d2..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step5_azure_ai_agent_file_search.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from azure.ai.projects.models import FileSearchTool, OpenAIFile, VectorStore -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents import AuthorRole - -""" -The following sample demonstrates how to create a simple, Azure AI agent that -uses a file search tool to answer user questions. -""" - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Who is the youngest employee?", - "Who works in sales?", - "I have a customer request, who can help me?", -] - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Read and upload the file to the Azure AI agent service - pdf_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf" - ) - file: OpenAIFile = await client.agents.upload_file_and_poll(file_path=pdf_file_path, purpose="assistants") - vector_store: VectorStore = await client.agents.create_vector_store_and_poll( - file_ids=[file.id], name="my_vectorstore" - ) - - # 2. Create file search tool with uploaded resources - file_search = FileSearchTool(vector_store_ids=[vector_store.id]) - - # 3. Create an agent on the Azure AI agent service with the file search tool - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - tools=file_search.definitions, - tool_resources=file_search.resources, - ) - - # 4. Create a Semantic Kernel agent for the Azure AI agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # 5. Create a new thread on the Azure AI agent service - thread = await client.agents.create_thread() - - try: - for user_input in USER_INPUTS: - # 6. Add the user input as a chat message - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: '{user_input}'") - # 7. Invoke the agent for the specified thread for response - async for content in agent.invoke(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}") - finally: - # 8. Cleanup: Delete the thread and agent - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample Output: - # User: 'Who is the youngest employee?' - # Agent: The youngest employee is Teodor Britton, who is an accountant and was born on January 9, 1997... - # User: 'Who works in sales?' - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py b/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py deleted file mode 100644 index 1abfb001e93b..000000000000 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step6_azure_ai_agent_openapi.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import json -import os - -from azure.ai.projects.models import OpenApiAnonymousAuthDetails, OpenApiTool -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai import AzureAIAgent, AzureAIAgentSettings -from semantic_kernel.contents import AuthorRole - -""" -The following sample demonstrates how to create a simple, Azure AI agent that -uses OpenAPI tools to answer user questions. -""" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "What is the name and population of the country that uses currency with abbreviation THB", - "What is the current weather in the capital city of the country?", -] - - -async def main() -> None: - ai_agent_settings = AzureAIAgentSettings.create() - - async with ( - DefaultAzureCredential() as creds, - AzureAIAgent.create_client(credential=creds) as client, - ): - # 1. Read in the OpenAPI spec files - openapi_spec_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - ) - with open(os.path.join(openapi_spec_file_path, "weather.json")) as weather_file: - weather_openapi_spec = json.loads(weather_file.read()) - with open(os.path.join(openapi_spec_file_path, "countries.json")) as countries_file: - countries_openapi_spec = json.loads(countries_file.read()) - - # 2. Create OpenAPI tools - # Note that connection or managed identity auth setup requires additional setup in Azure - auth = OpenApiAnonymousAuthDetails() - openapi_weather = OpenApiTool( - name="get_weather", - spec=weather_openapi_spec, - description="Retrieve weather information for a location", - auth=auth, - ) - openapi_countries = OpenApiTool( - name="get_country", - spec=countries_openapi_spec, - description="Retrieve country information", - auth=auth, - ) - - # 3. Create an agent on the Azure AI agent service with the OpenAPI tools - agent_definition = await client.agents.create_agent( - model=ai_agent_settings.model_deployment_name, - tools=openapi_weather.definitions + openapi_countries.definitions, - ) - - # 4. Create a Semantic Kernel agent for the Azure AI agent - agent = AzureAIAgent( - client=client, - definition=agent_definition, - ) - - # 5. Create a new thread on the Azure AI agent service - thread = await client.agents.create_thread() - - try: - for user_input in USER_INPUTS: - # 6. Add the user input as a chat message - await agent.add_chat_message(thread_id=thread.id, message=user_input) - print(f"# User: '{user_input}'") - # 7. Invoke the agent for the specified thread for response - async for content in agent.invoke(thread_id=thread.id): - if content.role != AuthorRole.TOOL: - print(f"# Agent: {content.content}") - finally: - # 8. Cleanup: Delete the thread and agent - await client.agents.delete_thread(thread.id) - await client.agents.delete_agent(agent.id) - - """ - Sample Output: - # User: 'What is the name and population of the country that uses currency with abbreviation THB' - # Agent: It seems I encountered an issue while trying to retrieve data about the country that uses the ... - - As of the latest estimates, the population of Thailand is approximately 69 million people. If you ... - # User: 'What is the current weather in the capital city of the country?' - # Agent: The current weather in Bangkok, Thailand, the capital city, is as follows: - - - **Temperature**: 24°C (76°F) - - **Feels Like**: 26°C (79°F) - - **Weather Description**: Light rain - - **Humidity**: 69% - - **Cloud Cover**: 75% - - **Pressure**: 1017 hPa - - **Wind Speed**: 8 km/h (5 mph) from the east-northeast (ENE) - - **Visibility**: 10 km (approximately 6 miles) - - This weather information reflects the current conditions as of the latest observation. If you need ... - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/README.md b/python/samples/getting_started_with_agents/chat_completion/README.md deleted file mode 100644 index 5815c8a75642..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Chat Completion Agents - -The following getting started samples show how to use Chat Completion agents with Semantic Kernel. diff --git a/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py b/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py deleted file mode 100644 index 850e159a1069..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step1_chat_completion_agent_simple.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory - -""" -The following sample demonstrates how to create a chat completion agent that -answers user questions using the Azure Chat Completion service. The Chat Completion -Service is passed directly via the ChatCompletionAgent constructor. This sample -demonstrates the basic steps to create an agent and simulate a conversation -with the agent. - -The interaction with the agent is via the `get_response` method, which sends a -user input to the agent and receives a response from the agent. The conversation -history needs to be maintained by the caller in the chat history object. -""" - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello, I am John Doe.", - "What is your name?", - "What is my name?", -] - - -async def main(): - # 1. Create the agent by specifying the service - agent = ChatCompletionAgent( - service=AzureChatCompletion(), - name="Assistant", - instructions="Answer the user's questions.", - ) - - # 2. Create a chat history to hold the conversation - chat_history = ChatHistory() - - for user_input in USER_INPUTS: - # 3. Add the user input to the chat history - chat_history.add_user_message(user_input) - print(f"# User: {user_input}") - # 4. Invoke the agent for a response - response = await agent.get_response(chat_history) - print(f"# {response.name}: {response}") - # 5. Add the agent response to the chat history - chat_history.add_message(response) - - """ - Sample output: - # User: Hello, I am John Doe. - # Assistant: Hello, John Doe! How can I assist you today? - # User: What is your name? - # Assistant: I don't have a personal name like a human does, but you can call me Assistant.? - # User: What is my name? - # Assistant: You mentioned that your name is John Doe. How can I assist you further, John? - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py b/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py deleted file mode 100644 index 6d13aa4f2293..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step2_chat_completion_agent_with_kernel.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel import Kernel -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory - -""" -The following sample demonstrates how to create a chat completion agent that -answers user questions using the Azure Chat Completion service. The Chat Completion -Service is first added to the kernel, and the kernel is passed in to the -ChatCompletionAgent constructor. This sample demonstrates the basic steps to -create an agent and simulate a conversation with the agent. - -Note: if both a service and a kernel are provided, the service will be used. - -The interaction with the agent is via the `get_response` method, which sends a -user input to the agent and receives a response from the agent. The conversation -history needs to be maintained by the caller in the chat history object. -""" - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello, I am John Doe.", - "What is your name?", - "What is my name?", -] - - -async def main(): - # 1. Create the instance of the Kernel to register an AI service - kernel = Kernel() - kernel.add_service(AzureChatCompletion()) - - # 2. Create the agent - agent = ChatCompletionAgent( - kernel=kernel, - name="Assistant", - instructions="Answer the user's questions.", - ) - - # 3. Create a chat history to hold the conversation - chat_history = ChatHistory() - - for user_input in USER_INPUTS: - # 4. Add the user input to the chat history - chat_history.add_user_message(user_input) - print(f"# User: {user_input}") - # 5. Invoke the agent for a response - response = await agent.get_response(chat_history) - print(f"# {response.name}: {response}") - # 6. Add the agent response to the chat history - chat_history.add_message(response) - - """ - Sample output: - # User: Hello, I am John Doe. - # Assistant: Hello, John Doe! How can I assist you today? - # User: What is your name? - # Assistant: I don't have a personal name like a human does, but you can call me Assistant.? - # User: What is my name? - # Assistant: You mentioned that your name is John Doe. How can I assist you further, John? - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py b/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py deleted file mode 100644 index ac9d94ce84ed..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step3_chat_completion_agent_plugin_simple.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory -from semantic_kernel.functions import kernel_function - -""" -The following sample demonstrates how to create a chat completion agent that -answers questions about a sample menu using a Semantic Kernel Plugin. The Chat -Completion Service is passed directly via the ChatCompletionAgent constructor. -Additionally, the plugin is supplied via the constructor. -""" - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello", - "What is the special soup?", - "What does that cost?", - "Thank you", -] - - -async def main(): - # 1. Create the agent - agent = ChatCompletionAgent( - service=AzureChatCompletion(), - name="Host", - instructions="Answer questions about the menu.", - plugins=[MenuPlugin()], - ) - - # 2. Create a chat history to hold the conversation - chat_history = ChatHistory() - - for user_input in USER_INPUTS: - # 3. Add the user input to the chat history - chat_history.add_user_message(user_input) - print(f"# User: {user_input}") - # 4. Invoke the agent for a response - response = await agent.get_response(chat_history) - print(f"# {response.name}: {response.content} ") - - """ - Sample output: - # User: Hello - # Host: Hello! How can I assist you today? - # User: What is the special soup? - # Host: The special soup is Clam Chowder. - # User: What does that cost? - # Host: The special soup, Clam Chowder, costs $9.99. - # User: Thank you - # Host: You're welcome! If you have any more questions, feel free to ask. Enjoy your day! - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py b/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py deleted file mode 100644 index 8f2b241f6295..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step4_chat_completion_agent_plugin_with_kernel.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from typing import Annotated - -from semantic_kernel import Kernel -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistory, FunctionCallContent, FunctionResultContent -from semantic_kernel.functions import KernelArguments, kernel_function - -""" -The following sample demonstrates how to create a chat completion agent that -answers questions about a sample menu using a Semantic Kernel Plugin. The Chat -Completion Service is first added to the kernel, and the kernel is passed in to the -ChatCompletionAgent constructor. Additionally, the plugin is supplied via the kernel. -To enable auto-function calling, the prompt execution settings are retrieved from the kernel -using the specified `service_id`. The function choice behavior is set to `Auto` to allow the -agent to automatically execute the plugin's functions when needed. -""" - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello", - "What is the special soup?", - "What does that cost?", - "Thank you", -] - - -async def main(): - # 1. Create the instance of the Kernel to register the plugin and service - service_id = "agent" - kernel = Kernel() - kernel.add_plugin(MenuPlugin(), plugin_name="menu") - kernel.add_service(AzureChatCompletion(service_id=service_id)) - - # 2. Configure the function choice behavior to auto invoke kernel functions - # so that the agent can automatically execute the menu plugin functions when needed - settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) - settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - - # 3. Create the agent - agent = ChatCompletionAgent( - kernel=kernel, - name="Host", - instructions="Answer questions about the menu.", - arguments=KernelArguments(settings=settings), - ) - - # 4. Create a chat history to hold the conversation - chat_history = ChatHistory() - - for user_input in USER_INPUTS: - # 5. Add the user input to the chat history - chat_history.add_user_message(user_input) - print(f"# User: {user_input}") - # 6. Invoke the agent for a response - async for content in agent.invoke(chat_history): - print(f"# {content.name}: ", end="") - if ( - not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items) - and content.content.strip() - ): - # We only want to print the content if it's not a function call or result - print(f"{content.content}", end="", flush=True) - print("") - - """ - Sample output: - # User: Hello - # Host: Hello! How can I assist you today? - # User: What is the special soup? - # Host: The special soup is Clam Chowder. - # User: What does that cost? - # Host: The special soup, Clam Chowder, costs $9.99. - # User: Thank you - # Host: You're welcome! If you have any more questions, feel free to ask. Enjoy your day! - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py b/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py deleted file mode 100644 index 0f4e51202805..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step7_chat_completion_agent_json_result.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from pydantic import BaseModel, ValidationError - -from semantic_kernel import Kernel -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings -from semantic_kernel.functions import KernelArguments - -""" -The following sample demonstrates how to configure an Agent Group Chat, and invoke an -agent with only a single turn.A custom termination strategy is provided where the model -is to rate the user input on creativity and expressiveness and end the chat when a score -of 70 or higher is provided. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(OpenAIChatCompletion(service_id=service_id)) - return kernel - - -class InputScore(BaseModel): - """A model for the input score.""" - - score: int - notes: str - - -class ThresholdTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - threshold: int = 70 - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - try: - result = InputScore.model_validate_json(history[-1].content or "") - return result.score >= self.threshold - except ValidationError: - return False - - -INSTRUCTION = """ -Think step-by-step and rate the user input on creativity and expressiveness from 1-100 with some notes on improvements. -""" - -# Simulate a conversation with the agent -USER_INPUTS = { - "The sunset is very colorful.", - "The sunset is setting over the mountains.", - "The sunset is setting over the mountains and fills the sky with a deep red flame, setting the clouds ablaze.", -} - - -async def main(): - # 1. Create the instance of the Kernel to register a service - service_id = "agent" - kernel = _create_kernel_with_chat_completion(service_id) - - # 2. Configure the prompt execution settings to return the score in the desired format - settings = kernel.get_prompt_execution_settings_from_service_id(service_id) - assert isinstance(settings, OpenAIChatPromptExecutionSettings) # nosec - settings.response_format = InputScore - - # 3. Create the agent - agent = ChatCompletionAgent( - kernel=kernel, - name="Tutor", - instructions=INSTRUCTION, - arguments=KernelArguments(settings), - ) - - # 4. Create the group chat with the custom termination strategy - group_chat = AgentGroupChat(termination_strategy=ThresholdTerminationStrategy(maximum_iterations=10)) - - for user_input in USER_INPUTS: - # 5. Add the user input to the chat history - await group_chat.add_chat_message(message=user_input) - print(f"# User: {user_input}") - - # 6. Invoke the chat with the agent for a response - async for content in group_chat.invoke_single_turn(agent): - print(f"# {content.name}: {content.content}") - - """ - Sample output: - # User: The sunset is very colorful. - # Tutor: {"score":45,"notes":"The sentence 'The sunset is very colorful' is simple and direct. While it ..."} - # User: The sunset is setting over the mountains. - # Tutor: {"score":50,"notes":"This sentence provides a basic scene of a sunset over mountains, which ..."} - # User: The sunset is setting over the mountains and fills the sky with a deep red flame, setting the clouds ablaze. - # Tutor: {"score":75,"notes":"This sentence demonstrates improved creativity and expressiveness by ..."} - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py b/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py deleted file mode 100644 index 50ff8574ba09..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step8_chat_completion_agent_logging.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging - -from semantic_kernel import Kernel -from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion - -""" -The following sample demonstrates how to create a simple, agent group chat that -utilizes An Art Director Chat Completion Agent along with a Copy Writer Chat -Completion Agent to complete a task. The main point of this sample is to note -how to enable logging to view all interactions between the agents and the model. -""" - -# 0. Enable logging -# NOTE: This is all that is required to enable logging. -# Set the desired level to INFO, DEBUG, etc. -logging.basicConfig(level=logging.INFO) - - -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - - -REVIEWER_NAME = "ArtDirector" -REVIEWER_INSTRUCTIONS = """ -You are an art director who has opinions about copywriting born of a love for David Ogilvy. -The goal is to determine if the given copy is acceptable to print. -If so, state that it is approved. -If not, provide insight on how to refine suggested copy without example. -""" - -COPYWRITER_NAME = "CopyWriter" -COPYWRITER_INSTRUCTIONS = """ -You are a copywriter with ten years of experience and are known for brevity and a dry humor. -The goal is to refine and decide on the single best copy as an expert in the field. -Only provide a single proposal per response. -You're laser focused on the goal at hand. -Don't waste time with chit chat. -Consider suggestions when refining an idea. -""" - -TASK = "a slogan for a new line of electric cars." - - -async def main(): - # 1. Create the reviewer agent based on the chat completion service - agent_reviewer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion("artdirector"), - name=REVIEWER_NAME, - instructions=REVIEWER_INSTRUCTIONS, - ) - - # 2. Create the copywriter agent based on the chat completion service - agent_writer = ChatCompletionAgent( - kernel=_create_kernel_with_chat_completion("copywriter"), - name=COPYWRITER_NAME, - instructions=COPYWRITER_INSTRUCTIONS, - ) - - # 3. Place the agents in a group chat with a custom termination strategy - group_chat = AgentGroupChat( - agents=[agent_writer, agent_reviewer], - termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), - ) - - # 4. Add the task as a message to the group chat - await group_chat.add_chat_message(message=TASK) - print(f"# User: {TASK}") - - # 5. Invoke the chat - async for content in group_chat.invoke(): - print(f"# {content.name}: {content.content}") - - """ - Sample output: - INFO:semantic_kernel.agents.group_chat.agent_chat:Adding `1` agent chat messages - # User: a slogan for a new line of electric cars. - INFO:semantic_kernel.agents.strategies.selection.sequential_selection_strategy:Selected agent at index 0 (ID: ... - INFO:semantic_kernel.agents.group_chat.agent_chat:Invoking agent CopyWriter - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "system", "content": "\nYou are a ... - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "user", "content": "a slogan for ... - INFO:semantic_kernel.connectors.ai.open_ai.services.open_ai_handler:OpenAI usage: CompletionUsage(completion_... - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"message": {"role": "assistant", "content": ... - INFO:semantic_kernel.agents.chat_completion.chat_completion_agent:[ChatCompletionAgent] Invoked AzureChatCompl... - INFO:semantic_kernel.agents.strategies.termination.termination_strategy:Evaluating termination criteria for ... - INFO:semantic_kernel.agents.strategies.termination.termination_strategy:Agent 598d827e-ce5e-44fa-879b-42793bb... - # CopyWriter: "Drive Change. Literally." - INFO:semantic_kernel.agents.strategies.selection.sequential_selection_strategy:Selected agent at index 1 (ID: ... - INFO:semantic_kernel.agents.group_chat.agent_chat:Invoking agent ArtDirector - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "system", "content": "\nYou are an ... - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "user", "content": "a slogan for a ... - INFO:semantic_kernel.utils.telemetry.model_diagnostics.decorators:{"role": "assistant", "content": "\"Drive ... - ... - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py b/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py deleted file mode 100644 index c01085152999..000000000000 --- a/python/samples/getting_started_with_agents/chat_completion/step9_chat_completion_agent_structured_outputs.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import json - -from pydantic import BaseModel - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai.open_ai import ( - AzureChatCompletion, - AzureChatPromptExecutionSettings, -) -from semantic_kernel.contents import ChatHistory -from semantic_kernel.functions.kernel_arguments import KernelArguments - -""" -The following sample demonstrates how to create a chat completion agent that -answers user questions using structured outputs. The `Reasoning` model is defined -on the prompt execution settings. The settings are then passed into the agent -via the `KernelArguments` object. - -The interaction with the agent is via the `get_response` method, which sends a -user input to the agent and receives a response from the agent. The conversation -history needs to be maintained by the caller in the chat history object. -""" - - -# Define the BaseModel we will use for structured outputs -class Step(BaseModel): - explanation: str - output: str - - -class Reasoning(BaseModel): - steps: list[Step] - final_answer: str - - -# Simulate a conversation with the agent -USER_INPUT = "how can I solve 8x + 7y = -23, and 4x=12?" - - -async def main(): - # 1. Create the prompt settings - settings = AzureChatPromptExecutionSettings() - settings.response_format = Reasoning - - # 2. Create the agent by specifying the service - agent = ChatCompletionAgent( - service=AzureChatCompletion(), - name="Assistant", - instructions="Answer the user's questions.", - arguments=KernelArguments(settings=settings), - ) - - # 2. Create a chat history to hold the conversation - chat_history = ChatHistory() - - # 3. Add the user input to the chat history - chat_history.add_user_message(USER_INPUT) - print(f"# User: {USER_INPUT}") - # 4. Invoke the agent for a response - response = await agent.get_response(chat_history) - # 5. Validate the response and print the structured output - reasoned_result = Reasoning.model_validate(json.loads(response.content)) - print(f"# {response.name}:\n\n{reasoned_result.model_dump_json(indent=4)}") - # 6. Add the agent response to the chat history - chat_history.add_message(response) - - """ - Sample output: - # User: how can I solve 8x + 7y = -23, and 4x=12? - # Assistant: - - { - "steps": [ - { - "explanation": "The second equation 4x = 12 can be solved for x by dividing both sides by 4.", - "output": "x = 3." - }, - { - "explanation": "Substitute x = 3 from the second equation into the first equation 8x + 7y = -23.", - "output": "8(3) + 7y = -23." - }, - { - "explanation": "Calculate 8 times 3 to simplify the equation.", - "output": "24 + 7y = -23." - }, - { - "explanation": "Subtract 24 from both sides to isolate the term with y.", - "output": "7y = -23 - 24." - }, - { - "explanation": "Perform the subtraction.", - "output": "7y = -47." - }, - { - "explanation": "Divide both sides by 7 to solve for y.", - "output": "y = -47 / 7." - }, - { - "explanation": "Simplify the division to get the value of y.", - "output": "y = -6.714285714285714 (approximately -6.71)." - } - ], - "final_answer": "The solution to the system of equations is x = 3 and y = -6.71." - } - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/README.md b/python/samples/getting_started_with_agents/openai_assistant/README.md deleted file mode 100644 index 6689c05f9f4b..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/README.md +++ /dev/null @@ -1,101 +0,0 @@ -## OpenAI Assistant Agents - -The following getting started samples show how to use OpenAI Assistant agents with Semantic Kernel. - -## Assistants API Overview - -The Assistants API is a robust solution from OpenAI that empowers developers to integrate powerful, purpose-built AI assistants into their applications. It streamlines the development process by handling conversation histories, managing threads, and providing seamless access to advanced tools. - -### Key Features - -- **Purpose-Built AI Assistants:** - Assistants are specialized AIs that leverage OpenAI’s models to interact with users, access files, maintain persistent threads, and call additional tools. This enables highly tailored and effective user interactions. - -- **Simplified Conversation Management:** - The concept of a **thread** -- a dedicated conversation session between an assistant and a user -- ensures that message history is managed automatically. Threads optimize the conversation context by storing and truncating messages as needed. - -- **Integrated Tool Access:** - The API provides built-in tools such as: - - **Code Interpreter:** Allows the assistant to execute code, enhancing its ability to solve complex tasks. - - **File Search:** Implements best practices for retrieving data from uploaded files, including advanced chunking and embedding techniques. - -- **Enhanced Function Calling:** - With improved support for third-party tool integration, the Assistants API enables assistants to extend their capabilities beyond native functions. - -For more detailed technical information, refer to the [Assistants API](https://platform.openai.com/docs/assistants/overview). - -### Semantic Kernel OpenAI Assistant Agents - -OpenAI Assistant Agents are created in the following way: - -```python -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent - -# Create the client using OpenAI resources and configuration -client, model = OpenAIAssistantAgent.setup_resources() - -# Create the assistant definition -definition = await client.beta.assistants.create( - model=model, - instructions="", - name="", -) - -# Define the Semantic Kernel OpenAI Assistant Agent -agent = OpenAIAssistantAgent( - client=client, - definition=definition, -) - -# Define a thread and invoke the agent with the user input -thread = await agent.client.beta.threads.create() - -# Add a message to the thread -await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") - -# Invoke the agent -async for content in agent.invoke(thread_id=thread.id): - print(f"# {content.role}: {content.content}") -``` - -### Semantic Kernel Azure Assistant Agents - -Azure Assistant Agents are currently in preview and require a `-preview` API version (minimum version: `2024-05-01-preview`). As new features are introduced, API versions will be updated accordingly. For the latest versioning details, please refer to the [Azure OpenAI API preview lifecycle](https://learn.microsoft.com/azure/ai-services/openai/api-version-deprecation). - -To specify the correct API version, set the following environment variable (for example, in your `.env` file): - -```bash -AZURE_OPENAI_API_VERSION="2025-01-01-preview" -``` - -Alternatively, you can pass the `api_version` parameter when creating an `AzureAssistantAgent`: - -```python -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -# Create the client using Azure OpenAI resources and configuration -client, model = AzureAssistantAgent.setup_resources() - -# Create the assistant definition -definition = await client.beta.assistants.create( - model=model, - instructions="", - name="", -) - -# Define the Semantic Kernel Azure OpenAI Assistant Agent -agent = AzureAssistantAgent( - client=client, - definition=definition, -) - -# Define a thread and invoke the agent with the user input -thread = await agent.client.beta.threads.create() - -# Add a message to the thread -await agent.add_chat_message(thread_id=thread.id, message="Why is the sky blue?") - -# Invoke the agent -async for content in agent.invoke(thread_id=thread.id): - print(f"# {content.role}: {content.content}") -``` \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py b/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py deleted file mode 100644 index d46a127ee71a..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/step1_assistant.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -""" -The following sample demonstrates how to create an OpenAI assistant using either -Azure OpenAI or OpenAI. The sample shows how to have the assistant answrer -questions about the world. - -The interaction with the agent is via the `get_response` method, which sends a -user input to the agent and receives a response from the agent. The conversation -history is maintained by the agent service, i.e. the responses are automatically -associated with the thread. Therefore, client code does not need to maintain the -conversation history. -""" - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Why is the sky blue?", - "What is the speed of light?", -] - - -async def main(): - # 1. Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # 2. Create the assistant on the Azure OpenAI service - definition = await client.beta.assistants.create( - model=model, - instructions="Answer questions about the world in one sentence.", - name="Assistant", - ) - - # 3. Create a Semantic Kernel agent for the Azure OpenAI assistant - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # 4. Create a new thread on the Azure OpenAI assistant service - thread = await agent.client.beta.threads.create() - - try: - for user_input in USER_INPUTS: - # 5. Add the user input to the chat thread - await agent.add_chat_message( - thread_id=thread.id, - message=user_input, - ) - print(f"# User: '{user_input}'") - # 6. Invoke the agent for the current thread and print the response - response = await agent.get_response(thread_id=thread.id) - print(f"# {response.name}: {response.content}") - - finally: - # 7. Clean up the resources - await agent.client.beta.threads.delete(thread.id) - await agent.client.beta.assistants.delete(assistant_id=agent.id) - - """ - You should see output similar to the following: - - # User: 'Why is the sky blue?' - # Agent: The sky appears blue because molecules in the atmosphere scatter sunlight in all directions, and blue - light is scattered more than other colors because it travels in shorter, smaller waves. - # User: 'What is the speed of light?' - # Agent: The speed of light in a vacuum is approximately 299,792,458 meters per second - (about 186,282 miles per second). - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py b/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py deleted file mode 100644 index a9ea4f10c9b2..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/step2_assistant_plugins.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio -from typing import Annotated - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.functions import kernel_function - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI. The sample -shows how to use a Semantic Kernel plugin as part of the -OpenAI Assistant. -""" - - -# Define a sample plugin for the sample -class MenuPlugin: - """A sample Menu Plugin used for the concept sample.""" - - @kernel_function(description="Provides a list of specials from the menu.") - def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: - return """ - Special Soup: Clam Chowder - Special Salad: Cobb Salad - Special Drink: Chai Tea - """ - - @kernel_function(description="Provides the price of the requested menu item.") - def get_item_price( - self, menu_item: Annotated[str, "The name of the menu item."] - ) -> Annotated[str, "Returns the price of the menu item."]: - return "$9.99" - - -# Simulate a conversation with the agent -USER_INPUTS = [ - "Hello", - "What is the special soup?", - "What is the special drink?", - "How much is it?", - "Thank you", -] - - -async def main(): - # 1. Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # 2. Create the assistant on the Azure OpenAI service - definition = await client.beta.assistants.create( - model=model, - instructions="Answer questions about the menu.", - name="Host", - ) - - # 3. Create a Semantic Kernel agent for the Azure OpenAI assistant - agent = AzureAssistantAgent( - client=client, - definition=definition, - plugins=[MenuPlugin()], # The plugins can be passed in as a list to the constructor - ) - # Note: plugins can also be configured on the Kernel and passed in as a parameter to the OpenAIAssistantAgent - - # 4. Create a new thread on the Azure OpenAI assistant service - thread = await agent.client.beta.threads.create() - - try: - for user_input in USER_INPUTS: - # 5. Add the user input to the chat thread - await agent.add_chat_message( - thread_id=thread.id, - message=user_input, - ) - print(f"# User: '{user_input}'") - # 6. Invoke the agent for the current thread and print the response - async for content in agent.invoke(thread_id=thread.id): - print(f"# Agent: {content.content}") - finally: - # 7. Clean up the resources - await agent.client.beta.threads.delete(thread.id) - await agent.client.beta.assistants.delete(assistant_id=agent.id) - - """ - You should see output similar to the following: - - # User: 'Hello' - # Agent: Hello! How can I assist you today? - # User: 'What is the special soup?' - # Agent: The special soup today is Clam Chowder. Would you like to know more about any other menu items? - # User: 'What is the special drink?' - # Agent: The special drink today is Chai Tea. Would you like more information on anything else? - # User: 'Thank you' - # Agent: You're welcome! If you have any more questions or need further assistance, feel free to ask. - Enjoy your day! - """ - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py b/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py deleted file mode 100644 index 8881ee3e6d4e..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/step3_assistant_vision.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.contents import AuthorRole, ChatMessageContent, FileReferenceContent, ImageContent, TextContent - -""" -The following sample demonstrates how to create an OpenAI -assistant using OpenAI configuration, and leverage the -multi-modal content types to have the assistant describe images -and answer questions about them. This sample uses non-streaming responses. -""" - - -async def main(): - # 1. Create the OpenAI Assistant Agent client - # Note Azure OpenAI doesn't support vision files yet - client, model = OpenAIAssistantAgent.setup_resources() - - # 2. Load a sample image of a cat used for the assistant to describe - file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "cat.jpg") - - with open(file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - # 3. Create the assistant on the OpenAI service - definition = await client.beta.assistants.create( - model=model, - instructions="Answer questions about the provided images.", - name="Vision", - ) - - # 4. Create a Semantic Kernel agent for the OpenAI assistant - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - ) - - # 5. Create a new thread on the OpenAI assistant service - thread = await agent.client.beta.threads.create() - - # 6. Define the user messages with the image content to simulate the conversation - user_messages = { - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Describe this image."), - ImageContent( - uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" - ), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="What is the main color in this image?"), - ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Is there an animal in this image?"), - FileReferenceContent(file_id=file.id), - ], - ), - } - - try: - for message in user_messages: - # 7. Add the user input to the chat thread - await agent.add_chat_message(thread_id=thread.id, message=message) - print(f"# User: {str(message)}") # type: ignore - # 8. Invoke the agent for the current thread and print the response - async for content in agent.invoke(thread_id=thread.id): - print(f"# Agent: {content.content}\n") - finally: - # 9. Clean up the resources - await client.files.delete(file.id) - await agent.client.beta.threads.delete(thread.id) - await agent.client.beta.assistants.delete(assistant_id=agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py b/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py deleted file mode 100644 index e8a542a8045f..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/step4_assistant_tool_code_interpreter.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import asyncio - -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -""" -The following sample demonstrates how to create an OpenAI -assistant using either Azure OpenAI or OpenAI and leverage the -assistant's code interpreter functionality to have it write -Python code to print Fibonacci numbers. -""" - -TASK = "Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?" - - -async def main(): - # 1. Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # 2. Configure the code interpreter tool and resources for the Assistant - code_interpreter_tool, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool() - - # 3. Create the assistant on the Azure OpenAI service - definition = await client.beta.assistants.create( - model=model, - name="CodeRunner", - instructions="Run the provided request as code and return the result.", - tools=code_interpreter_tool, - tool_resources=code_interpreter_tool_resources, - ) - - # 4. Create a Semantic Kernel agent for the Azure OpenAI assistant - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # 4. Create a new thread on the Azure OpenAI assistant service - thread = await agent.client.beta.threads.create() - - print(f"# User: '{TASK}'") - try: - # 5. Add the user input to the chat thread - await agent.add_chat_message( - thread_id=thread.id, - message=TASK, - ) - # 6. Invoke the agent for the current thread and print the response - async for content in agent.invoke(thread_id=thread.id): - print(f"# Agent: {content.content}") - finally: - # 7. Clean up the resources - await agent.client.beta.threads.delete(thread.id) - await agent.client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py b/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py deleted file mode 100644 index ef841fad2bf5..000000000000 --- a/python/samples/getting_started_with_agents/openai_assistant/step5_assistant_tool_file_search.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -from semantic_kernel.agents.open_ai import AzureAssistantAgent - -""" -The following sample demonstrates how to create an OpenAI -Assistant using either Azure OpenAI or OpenAI and leverage the -assistant's file search functionality. -""" - -# Simulate a conversation with the agent -USER_INPUTS = { - "Who is the youngest employee?", - "Who works in sales?", - "I have a customer request, who can help me?", -} - - -async def main(): - # 1. Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # 2. Read and upload the file to the Azure OpenAI assistant service - pdf_file_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf" - ) - - with open(pdf_file_path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - - vector_store = await client.beta.vector_stores.create( - name="step4_assistant_file_search", - file_ids=[file.id], - ) - - # 3. Create file search tool with uploaded resources - file_search_tool, file_search_tool_resources = AzureAssistantAgent.configure_file_search_tool(vector_store.id) - - # 4. Create the assistant on the Azure OpenAI service with the file search tool - definition = await client.beta.assistants.create( - model=model, - instructions="Find answers to the user's questions in the provided file.", - name="FileSearch", - tools=file_search_tool, - tool_resources=file_search_tool_resources, - ) - - # 5. Create a Semantic Kernel agent for the Azure OpenAI assistant - agent = AzureAssistantAgent( - client=client, - definition=definition, - ) - - # 6. Create a new thread on the Azure OpenAI assistant service - thread = await agent.client.beta.threads.create() - - try: - for user_input in USER_INPUTS: - # 7. Add the user input to the chat thread - await agent.add_chat_message( - thread_id=thread.id, - message=user_input, - ) - print(f"# User: '{user_input}'") - # 8. Invoke the agent for the current thread and print the response - async for content in agent.invoke(thread_id=thread.id): - print(f"# Agent: {content.content}") - finally: - # 9. Clean up the resources - await client.files.delete(file.id) - await client.beta.vector_stores.delete(vector_store.id) - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/resources/countries.json b/python/samples/getting_started_with_agents/resources/countries.json deleted file mode 100644 index b88d5040750a..000000000000 --- a/python/samples/getting_started_with_agents/resources/countries.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "openapi": "3.1.0", - "info": { - "title": "RestCountries.NET API", - "description": "Web API version 3.1 for managing country items, based on previous implementations from restcountries.eu and restcountries.com.", - "version": "v3.1" - }, - "servers": [ - { "url": "https://restcountries.net" } - ], - "auth": [], - "paths": { - "/v3.1/currency": { - "get": { - "description": "Search by currency.", - "operationId": "LookupCountryByCurrency", - "parameters": [ - { - "name": "currency", - "in": "query", - "description": "The currency to search for.", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Success", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } - } - } - } - } - } - }, - "components": { - "schemes": {} - } -} \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/resources/weather.json b/python/samples/getting_started_with_agents/resources/weather.json deleted file mode 100644 index c3009f417de4..000000000000 --- a/python/samples/getting_started_with_agents/resources/weather.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "openapi": "3.1.0", - "info": { - "title": "get weather data", - "description": "Retrieves current weather data for a location based on wttr.in.", - "version": "v1.0.0" - }, - "servers": [ - { - "url": "https://wttr.in" - } - ], - "auth": [], - "paths": { - "/{location}": { - "get": { - "description": "Get weather information for a specific location", - "operationId": "GetCurrentWeather", - "parameters": [ - { - "name": "location", - "in": "path", - "description": "City or location to retrieve the weather for", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "format", - "in": "query", - "description": "Always use j1 value for this parameter", - "required": true, - "schema": { - "type": "string", - "default": "j1" - } - } - ], - "responses": { - "200": { - "description": "Successful response", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } - } - }, - "404": { - "description": "Location not found" - } - }, - "deprecated": false - } - } - }, - "components": { - "schemes": {} - } -} \ No newline at end of file diff --git a/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py b/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py new file mode 100644 index 000000000000..3ac413f92400 --- /dev/null +++ b/python/samples/getting_started_with_agents/step10_assistant_tool_file_search.py @@ -0,0 +1,81 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant's file search functionality. # +##################################################################### + + +AGENT_NAME = "FileSearch" +AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Get the path to the travelinfo.txt file + pdf_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources", "employees.pdf") + + # Create the agent configuration + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_file_search=True, + vector_store_filenames=[pdf_file_path], + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_file_search=True, + vector_store_filenames=[pdf_file_path], + ) + + # Define a thread and invoke the agent with the user input + thread_id = await agent.create_thread() + + try: + await invoke_agent(agent, thread_id=thread_id, input="Who is the youngest employee?") + await invoke_agent(agent, thread_id=thread_id, input="Who works in sales?") + await invoke_agent(agent, thread_id=thread_id, input="I have a customer request, who can help me?") + finally: + [await agent.delete_file(file_id) for file_id in agent.file_search_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step1_agent.py b/python/samples/getting_started_with_agents/step1_agent.py new file mode 100644 index 000000000000..28d19a45df1f --- /dev/null +++ b/python/samples/getting_started_with_agents/step1_agent.py @@ -0,0 +1,67 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from functools import reduce + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# non-group agent that repeats the user message in the voice # +# of a pirate and then ends with a parrot sound. # +################################################################### + +# To toggle streaming or non-streaming mode, change the following boolean +streaming = True + +# Define the agent name and instructions +PARROT_NAME = "Parrot" +PARROT_INSTRUCTIONS = "Repeat the user message in the voice of a pirate and then end with a parrot sound." + + +async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory): + """Invoke the agent with the user input.""" + chat.add_user_message(input) + + print(f"# {AuthorRole.USER}: '{input}'") + + if streaming: + contents = [] + content_name = "" + async for content in agent.invoke_stream(chat): + content_name = content.name + contents.append(content) + streaming_chat_message = reduce(lambda first, second: first + second, contents) + print(f"# {content.role} - {content_name or '*'}: '{streaming_chat_message}'") + chat.add_message(streaming_chat_message) + else: + async for content in agent.invoke(chat): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + chat.add_message(content) + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Add the OpenAIChatCompletion AI Service to the Kernel + kernel.add_service(AzureChatCompletion(service_id="agent")) + + # Create the agent + agent = ChatCompletionAgent(service_id="agent", kernel=kernel, name=PARROT_NAME, instructions=PARROT_INSTRUCTIONS) + + # Define the chat history + chat = ChatHistory() + + # Respond to user input + await invoke_agent(agent, "Fortune favors the bold.", chat) + await invoke_agent(agent, "I came, I saw, I conquered.", chat) + await invoke_agent(agent, "Practice makes perfect.", chat) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step2_plugins.py b/python/samples/getting_started_with_agents/step2_plugins.py new file mode 100644 index 000000000000..53772408211d --- /dev/null +++ b/python/samples/getting_started_with_agents/step2_plugins.py @@ -0,0 +1,98 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# non-group agent that utilizes plugins defined as part of # +# the Kernel. # +################################################################### + +# This sample allows for a streaming response verus a non-streaming response +streaming = True + +# Define the agent name and instructions +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory) -> None: + """Invoke the agent with the user input.""" + chat.add_user_message(input) + + print(f"# {AuthorRole.USER}: '{input}'") + + if streaming: + contents = [] + content_name = "" + async for content in agent.invoke_stream(chat): + content_name = content.name + contents.append(content) + message_content = "".join([content.content for content in contents]) + print(f"# {content.role} - {content_name or '*'}: '{message_content}'") + chat.add_assistant_message(message_content) + else: + async for content in agent.invoke(chat): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + chat.add_message(content) + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + service_id = "agent" + kernel.add_service(AzureChatCompletion(service_id=service_id)) + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + kernel.add_plugin(MenuPlugin(), plugin_name="menu") + + # Create the agent + agent = ChatCompletionAgent( + service_id="agent", kernel=kernel, name=HOST_NAME, instructions=HOST_INSTRUCTIONS, execution_settings=settings + ) + + # Define the chat history + chat = ChatHistory() + + # Respond to user input + await invoke_agent(agent, "Hello", chat) + await invoke_agent(agent, "What is the special soup?", chat) + await invoke_agent(agent, "What is the special drink?", chat) + await invoke_agent(agent, "Thank you", chat) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py b/python/samples/getting_started_with_agents/step3_chat.py similarity index 53% rename from python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py rename to python/samples/getting_started_with_agents/step3_chat.py index d7f13173d268..e81c5d0c516c 100644 --- a/python/samples/getting_started_with_agents/chat_completion/step5_chat_completion_agent_group_chat.py +++ b/python/samples/getting_started_with_agents/step3_chat.py @@ -2,22 +2,19 @@ import asyncio -from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import TerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel -""" -The following sample demonstrates how to create a simple, agent group chat that -utilizes An Art Director Chat Completion Agent along with a Copy Writer Chat -Completion Agent to complete a task. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel +################################################################### +# The following sample demonstrates how to create a simple, # +# agent group chat that utilizes An Art Director Chat Completion # +# Agent along with a Copy Writer Chat Completion Agent to # +# complete a task. # +################################################################### class ApprovalTerminationStrategy(TerminationStrategy): @@ -46,52 +43,42 @@ async def should_agent_terminate(self, agent, history): Consider suggestions when refining an idea. """ -TASK = "a slogan for a new line of electric cars." + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel async def main(): - # 1. Create the reviewer agent based on the chat completion service agent_reviewer = ChatCompletionAgent( + service_id="artdirector", kernel=_create_kernel_with_chat_completion("artdirector"), name=REVIEWER_NAME, instructions=REVIEWER_INSTRUCTIONS, ) - # 2. Create the copywriter agent based on the chat completion service agent_writer = ChatCompletionAgent( + service_id="copywriter", kernel=_create_kernel_with_chat_completion("copywriter"), name=COPYWRITER_NAME, instructions=COPYWRITER_INSTRUCTIONS, ) - # 3. Place the agents in a group chat with a custom termination strategy - group_chat = AgentGroupChat( - agents=[ - agent_writer, - agent_reviewer, - ], - termination_strategy=ApprovalTerminationStrategy( - agents=[agent_reviewer], - maximum_iterations=10, - ), + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), ) - # 4. Add the task as a message to the group chat - await group_chat.add_chat_message(message=TASK) - print(f"# User: {TASK}") - - # 5. Invoke the chat - async for content in group_chat.invoke(): - print(f"# {content.name}: {content.content}") - - """ - Sample output: - # User: a slogan for a new line of electric cars. - # CopyWriter: "Drive the Future: Shockingly Efficient." - # ArtDirector: This slogan has potential but could benefit from refinement to create a stronger ... - # CopyWriter: "Electrify Your Drive." - # ArtDirector: Approved. This slogan is concise, memorable, and effectively communicates the ... - """ + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + print(f"# IS COMPLETE: {chat.is_complete}") if __name__ == "__main__": diff --git a/python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py b/python/samples/getting_started_with_agents/step4_kernel_function_strategies.py similarity index 66% rename from python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py rename to python/samples/getting_started_with_agents/step4_kernel_function_strategies.py index 3cfabc110ba8..9ad6a9d361bf 100644 --- a/python/samples/getting_started_with_agents/chat_completion/step6_kernel_function_strategies.py +++ b/python/samples/getting_started_with_agents/step4_kernel_function_strategies.py @@ -2,26 +2,26 @@ import asyncio -from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import KernelFunctionSelectionStrategy, KernelFunctionTerminationStrategy -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.functions import KernelFunctionFromPrompt - -""" -The following sample demonstrates how to create a simple, agent group chat that utilizes -An Art Director Chat Completion Agent along with a Copy Writer Chat Completion Agent to -complete a task. The sample also shows how to specify a Kernel Function termination and -selection strategy to determine when to end the chat or how to select the next agent to -take a turn in the conversation. -""" - - -def _create_kernel_with_chat_completion(service_id: str) -> Kernel: - kernel = Kernel() - kernel.add_service(AzureChatCompletion(service_id=service_id)) - return kernel - +from semantic_kernel.agents.strategies import ( + KernelFunctionSelectionStrategy, + KernelFunctionTerminationStrategy, +) +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# agent group chat that utilizes An Art Director Chat Completion # +# Agent along with a Copy Writer Chat Completion Agent to # +# complete a task. The sample also shows how to specify a Kernel # +# Function termination and selection strategy to determine when # +# to end the chat or how to select the next agent to take a turn # +# in the conversation. # +################################################################### REVIEWER_NAME = "ArtDirector" REVIEWER_INSTRUCTIONS = """ @@ -41,25 +41,28 @@ def _create_kernel_with_chat_completion(service_id: str) -> Kernel: Consider suggestions when refining an idea. """ -TASK = "a slogan for a new line of electric cars." + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel async def main(): - # 1. Create the reviewer agent based on the chat completion service agent_reviewer = ChatCompletionAgent( + service_id="artdirector", kernel=_create_kernel_with_chat_completion("artdirector"), name=REVIEWER_NAME, instructions=REVIEWER_INSTRUCTIONS, ) - # 2. Create the copywriter agent based on the chat completion service agent_writer = ChatCompletionAgent( + service_id="copywriter", kernel=_create_kernel_with_chat_completion("copywriter"), name=COPYWRITER_NAME, instructions=COPYWRITER_INSTRUCTIONS, ) - # 3. Create a Kernel Function to determine if the copy has been approved termination_function = KernelFunctionFromPrompt( function_name="termination", prompt=""" @@ -70,7 +73,6 @@ async def main(): """, ) - # 4. Create a Kernel Function to determine which agent should take the next turn selection_function = KernelFunctionFromPrompt( function_name="selection", prompt=f""" @@ -92,7 +94,6 @@ async def main(): """, ) - # 5. Place the agents in a group chat with the custom termination and selection strategies chat = AgentGroupChat( agents=[agent_writer, agent_reviewer], termination_strategy=KernelFunctionTerminationStrategy( @@ -112,22 +113,15 @@ async def main(): ), ) - # 6. Add the task as a message to the group chat - await chat.add_chat_message(message=TASK) - print(f"# User: {TASK}") + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") - # 7. Invoke the chat async for content in chat.invoke(): - print(f"# {content.name}: {content.content}") - - """ - Sample Output: - # User: a slogan for a new line of electric cars. - # CopyWriter: "Electrify your drive. Spare the gas, not the thrill." - # ArtDirector: This slogan captures the essence of electric cars but could use refinement to ... - # CopyWriter: "Go electric. Enjoy the thrill. Skip the gas." - # ArtDirector: Approved. This slogan is clear, concise, and effectively communicates the ... - """ + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + print(f"# IS COMPLETE: {chat.is_complete}") if __name__ == "__main__": diff --git a/python/samples/getting_started_with_agents/step5_json_result.py b/python/samples/getting_started_with_agents/step5_json_result.py new file mode 100644 index 000000000000..10edc9f2198f --- /dev/null +++ b/python/samples/getting_started_with_agents/step5_json_result.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from pydantic import ValidationError + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel +from semantic_kernel.kernel_pydantic import KernelBaseModel + +################################################################### +# The following sample demonstrates how to configure an Agent # +# Group Chat, and invoke an agent with only a single turn. # +# A custom termination strategy is provided where the model is # +# to rate the user input on creativity and expressiveness # +# and end the chat when a score of 70 or higher is provided. # +################################################################### + + +SCORE_COMPLETED_THRESHOLD = 70 +TUTOR_NAME = "Tutor" +TUTOR_INSTRUCTIONS = """ +Think step-by-step and rate the user input on creativity and expressivness from 1-100. + +Respond in JSON format with the following JSON schema: + +{ + "score": "integer (1-100)", + "notes": "the reason for your score" +} +""" + + +class InputScore(KernelBaseModel): + """A model for the input score.""" + + score: int + notes: str + + +def translate_json(json_string: str) -> InputScore | None: + try: + if json_string is None: + return None + return InputScore.model_validate_json(json_string) + except ValidationError: + return None + + +class ThresholdTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + last_message_content = history[-1].content or "" + result = translate_json(last_message_content) + return result.score >= SCORE_COMPLETED_THRESHOLD if result else False + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: AgentGroupChat): + """Invoke the agent with the user input.""" + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke_single_turn(agent): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + print(f"# IS COMPLETE: {chat.is_complete}") + + +async def main(): + service_id = "tutor" + agent = ChatCompletionAgent( + service_id=service_id, + kernel=_create_kernel_with_chat_completion(service_id=service_id), + name=TUTOR_NAME, + instructions=TUTOR_INSTRUCTIONS, + ) + + # Here a TerminationStrategy subclass is used that will terminate when + # the response includes a score that is greater than or equal to 70. + termination_strategy = ThresholdTerminationStrategy(maximum_iterations=10) + + chat = AgentGroupChat(termination_strategy=termination_strategy) + + await invoke_agent(agent=agent, input="The sunset is very colorful.", chat=chat) + await invoke_agent(agent=agent, input="The sunset is setting over the mountains.", chat=chat) + await invoke_agent( + agent=agent, + input="The sunset is setting over the mountains and filled the sky with a deep red flame, setting the clouds ablaze.", # noqa: E501 + chat=chat, + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step6_logging.py b/python/samples/getting_started_with_agents/step6_logging.py new file mode 100644 index 000000000000..197bcd72ab8e --- /dev/null +++ b/python/samples/getting_started_with_agents/step6_logging.py @@ -0,0 +1,93 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging + +from semantic_kernel.agents import AgentGroupChat +from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# agent group chat that utilizes An Art Director Chat Completion # +# Agent along with a Copy Writer Chat Completion Agent to # +# complete a task. The main point of this sample is to note how # +# to enable logging to view all interactions between the agents # +# and the model. # +################################################################### + + +# NOTE: This is all that is required to enable logging +logging.basicConfig(level=logging.DEBUG) + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. +If not, provide insight on how to refine suggested copy without example. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = ChatCompletionAgent( + service_id="copywriter", + kernel=_create_kernel_with_chat_completion("copywriter"), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + print(f"# IS COMPLETE: {chat.is_complete}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step7_assistant.py b/python/samples/getting_started_with_agents/step7_assistant.py new file mode 100644 index 000000000000..67235c0dcf3c --- /dev/null +++ b/python/samples/getting_started_with_agents/step7_assistant.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +from typing import Annotated + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI. OpenAI Assistants # +# allow for function calling, the use of file search and a # +# code interpreter. Assistant Threads are used to manage the # +# conversation state, similar to a Semantic Kernel Chat History. # +##################################################################### + +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = False + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Add the sample plugin to the kernel + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + + # Create the OpenAI Assistant Agent + service_id = "agent" + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + + thread_id = await agent.create_thread() + + try: + await invoke_agent(agent, thread_id=thread_id, input="Hello") + await invoke_agent(agent, thread_id=thread_id, input="What is the special soup?") + await invoke_agent(agent, thread_id=thread_id, input="What is the special drink?") + await invoke_agent(agent, thread_id=thread_id, input="Thank you") + finally: + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step8_assistant_vision.py b/python/samples/getting_started_with_agents/step8_assistant_vision.py new file mode 100644 index 000000000000..ac7bf34d7e48 --- /dev/null +++ b/python/samples/getting_started_with_agents/step8_assistant_vision.py @@ -0,0 +1,115 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# multi-modal content types to have the assistant describe images # +# and answer questions about them. # +##################################################################### + +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + + +def create_message_with_image_url(input: str, url: str) -> ChatMessageContent: + return ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text=input), ImageContent(uri=url)], + ) + + +def create_message_with_image_reference(input: str, file_id: str) -> ChatMessageContent: + return ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text=input), FileReferenceContent(file_id=file_id)], + ) + + +streaming = False + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, message: ChatMessageContent) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=message) + + print(f"# {AuthorRole.USER}: '{message.items[0].text}'") + + if streaming: + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + else: + async for content in agent.invoke(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + service_id = "agent" + + # Create the Assistant Agent + agent = await OpenAIAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + + cat_image_file_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + "resources", + "cat.jpg", + ) + + # Upload the file for use with the assistant + file_id = await agent.add_file(cat_image_file_path, purpose="vision") + + # Create a thread for the conversation + thread_id = await agent.create_thread() + + try: + await invoke_agent( + agent, + thread_id=thread_id, + message=create_message_with_image_url( + "Describe this image.", + "https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg", + ), + ) + await invoke_agent( + agent, + thread_id=thread_id, + message=create_message_with_image_url( + "What is the main color in this image?", + "https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg", + ), + ) + await invoke_agent( + agent, + thread_id=thread_id, + message=create_message_with_image_reference("Is there an animal in this image?", file_id), + ) + finally: + await agent.delete_file(file_id) + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py b/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py new file mode 100644 index 000000000000..11c2deff8e7c --- /dev/null +++ b/python/samples/getting_started_with_agents/step9_assistant_tool_code_interpreter.py @@ -0,0 +1,76 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant's code interpreter functionality to have it write # +# Python code to print Fibonacci numbers. # +##################################################################### + + +AGENT_NAME = "CodeRunner" +AGENT_INSTRUCTIONS = "Run the provided code file and return the result." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Create the agent + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + + thread_id = await agent.create_thread() + + try: + await invoke_agent( + agent, + thread_id=thread_id, + input="Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?", # noqa: E501 + ) + finally: + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/learn_resources/agent_docs/agent_collaboration.py b/python/samples/learn_resources/agent_docs/agent_collaboration.py index 0d08c4a5b513..c8e7d04bffb6 100644 --- a/python/samples/learn_resources/agent_docs/agent_collaboration.py +++ b/python/samples/learn_resources/agent_docs/agent_collaboration.py @@ -3,143 +3,138 @@ import asyncio import os -from semantic_kernel import Kernel from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent -from semantic_kernel.agents.strategies import ( +from semantic_kernel.agents.strategies.selection.kernel_function_selection_strategy import ( KernelFunctionSelectionStrategy, +) +from semantic_kernel.agents.strategies.termination.kernel_function_termination_strategy import ( KernelFunctionTerminationStrategy, ) -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import ChatHistoryTruncationReducer -from semantic_kernel.functions import KernelFunctionFromPrompt +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# agent group chat that utilizes a Reviewer Chat Completion # +# Agent along with a Writer Chat Completion Agent to # +# complete a user's task. # +################################################################### + -""" -The following sample demonstrates how to create a simple, -agent group chat that utilizes a Reviewer Chat Completion -Agent along with a Writer Chat Completion Agent to -complete a user's task. +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" -This is the full code sample for the Semantic Kernel Learn Site: How-To: Coordinate Agent Collaboration - using Agent Group Chat + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() -https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-agent-collaboration?pivots=programming-language-python -""" -# Define agent names REVIEWER_NAME = "Reviewer" -WRITER_NAME = "Writer" +COPYWRITER_NAME = "Writer" -def create_kernel() -> Kernel: - """Creates a Kernel instance with an Azure OpenAI ChatCompletion service.""" +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: kernel = Kernel() - kernel.add_service(service=AzureChatCompletion()) + kernel.add_service(AzureChatCompletion(service_id=service_id)) return kernel async def main(): - # Create a single kernel instance for all agents. - kernel = create_kernel() - - # Create ChatCompletionAgents using the same kernel. agent_reviewer = ChatCompletionAgent( - kernel=kernel, + service_id=REVIEWER_NAME, + kernel=_create_kernel_with_chat_completion(REVIEWER_NAME), name=REVIEWER_NAME, instructions=""" -Your responsibility is to review and identify how to improve user provided content. -If the user has provided input or direction for content already provided, specify how to address this input. -Never directly perform the correction or provide an example. -Once the content has been updated in a subsequent response, review it again until it is satisfactory. - -RULES: -- Only identify suggestions that are specific and actionable. -- Verify previous suggestions have been addressed. -- Never repeat previous suggestions. -""", + Your responsibility is to review and identify how to improve user provided content. + If the user has providing input or direction for content already provided, specify how to + address this input. + Never directly perform the correction or provide example. + Once the content has been updated in a subsequent response, you will review the content + again until satisfactory. + Always copy satisfactory content to the clipboard using available tools and inform user. + + RULES: + - Only identify suggestions that are specific and actionable. + - Verify previous suggestions have been addressed. + - Never repeat previous suggestions. + """, ) agent_writer = ChatCompletionAgent( - kernel=kernel, - name=WRITER_NAME, + service_id=COPYWRITER_NAME, + kernel=_create_kernel_with_chat_completion(COPYWRITER_NAME), + name=COPYWRITER_NAME, instructions=""" -Your sole responsibility is to rewrite content according to review suggestions. -- Always apply all review directions. -- Always revise the content in its entirety without explanation. -- Never address the user. -""", + Your sole responsibility is to rewrite content according to review suggestions. + + - Always apply all review direction. + - Always revise the content in its entirety without explanation. + - Never address the user. + """, ) - # Define a selection function to determine which agent should take the next turn. selection_function = KernelFunctionFromPrompt( function_name="selection", prompt=f""" -Examine the provided RESPONSE and choose the next participant. -State only the name of the chosen participant without explanation. -Never choose the participant named in the RESPONSE. - -Choose only from these participants: -- {REVIEWER_NAME} -- {WRITER_NAME} - -Rules: -- If RESPONSE is user input, it is {REVIEWER_NAME}'s turn. -- If RESPONSE is by {REVIEWER_NAME}, it is {WRITER_NAME}'s turn. -- If RESPONSE is by {WRITER_NAME}, it is {REVIEWER_NAME}'s turn. - -RESPONSE: -{{{{$lastmessage}}}} -""", + Determine which participant takes the next turn in a conversation based on the the most recent participant. + State only the name of the participant to take the next turn. + No participant should take more than one turn in a row. + + Choose only from these participants: + - {REVIEWER_NAME} + - {COPYWRITER_NAME} + + Always follow these rules when selecting the next participant: + - After user input, it is {COPYWRITER_NAME}'s turn. + - After {COPYWRITER_NAME} replies, it is {REVIEWER_NAME}'s turn. + - After {REVIEWER_NAME} provides feedback, it is {COPYWRITER_NAME}'s turn. + + History: + {{{{$history}}}} + """, ) - # Define a termination function where the reviewer signals completion with "yes". - termination_keyword = "yes" + TERMINATION_KEYWORD = "yes" termination_function = KernelFunctionFromPrompt( function_name="termination", prompt=f""" -Examine the RESPONSE and determine whether the content has been deemed satisfactory. -If the content is satisfactory, respond with a single word without explanation: {termination_keyword}. -If specific suggestions are being provided, it is not satisfactory. -If no correction is suggested, it is satisfactory. - -RESPONSE: -{{{{$lastmessage}}}} -""", + Examine the RESPONSE and determine whether the content has been deemed satisfactory. + If content is satisfactory, respond with a single word without explanation: {TERMINATION_KEYWORD}. + If specific suggestions are being provided, it is not satisfactory. + If no correction is suggested, it is satisfactory. + + RESPONSE: + {{{{$history}}}} + """, ) - history_reducer = ChatHistoryTruncationReducer(target_count=5) - - # Create the AgentGroupChat with selection and termination strategies. chat = AgentGroupChat( - agents=[agent_reviewer, agent_writer], + agents=[agent_writer, agent_reviewer], selection_strategy=KernelFunctionSelectionStrategy( - initial_agent=agent_reviewer, function=selection_function, - kernel=kernel, - result_parser=lambda result: str(result.value[0]).strip() if result.value[0] is not None else WRITER_NAME, - history_variable_name="lastmessage", - history_reducer=history_reducer, + kernel=_create_kernel_with_chat_completion("selection"), + result_parser=lambda result: str(result.value[0]) if result.value is not None else COPYWRITER_NAME, + agent_variable_name="agents", + history_variable_name="history", ), termination_strategy=KernelFunctionTerminationStrategy( agents=[agent_reviewer], function=termination_function, - kernel=kernel, - result_parser=lambda result: termination_keyword in str(result.value[0]).lower(), - history_variable_name="lastmessage", + kernel=_create_kernel_with_chat_completion("termination"), + result_parser=lambda result: TERMINATION_KEYWORD in str(result.value[0]).lower(), + history_variable_name="history", maximum_iterations=10, - history_reducer=history_reducer, ), ) - print( - "Ready! Type your input, or 'exit' to quit, 'reset' to restart the conversation. " - "You may pass in a file path using @." - ) - - is_complete = False + is_complete: bool = False while not is_complete: - print() - user_input = input("User > ").strip() + user_input = input("User:> ") if not user_input: continue @@ -152,35 +147,26 @@ async def main(): print("[Conversation has been reset]") continue - # Try to grab files from the script's current directory - if user_input.startswith("@") and len(user_input) > 1: - file_name = user_input[1:] - script_dir = os.path.dirname(os.path.abspath(__file__)) - file_path = os.path.join(script_dir, file_name) + if user_input.startswith("@") and len(input) > 1: + file_path = input[1:] try: if not os.path.exists(file_path): print(f"Unable to access file: {file_path}") continue - with open(file_path, encoding="utf-8") as file: + with open(file_path) as file: user_input = file.read() except Exception: print(f"Unable to access file: {file_path}") continue - # Add the current user_input to the chat - await chat.add_chat_message(message=user_input) + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=user_input)) - try: - async for response in chat.invoke(): - if response is None or not response.name: - continue - print() - print(f"# {response.name.upper()}:\n{response.content}") - except Exception as e: - print(f"Error during chat invocation: {e}") + async for response in chat.invoke(): + print(f"# {response.role} - {response.name or '*'}: '{response.content}'") - # Reset the chat's complete flag for the new conversation round. - chat.is_complete = False + if chat.is_complete: + is_complete = True + break if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/assistant_code.py b/python/samples/learn_resources/agent_docs/assistant_code.py index 19c7b61140b4..06a7bc8b29fd 100644 --- a/python/samples/learn_resources/agent_docs/assistant_code.py +++ b/python/samples/learn_resources/agent_docs/assistant_code.py @@ -1,39 +1,33 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import logging import os -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents import StreamingFileReferenceContent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel -logging.basicConfig(level=logging.ERROR) - -""" -The following sample demonstrates how to create a simple, -OpenAI assistant agent that utilizes the code interpreter -to analyze uploaded files. - -This is the full code sample for the Semantic Kernel Learn Site: How-To: Open AI Assistant Agent Code Interpreter - -https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-assistant-code?pivots=programming-language-python -""" # noqa: E501 +################################################################### +# The following sample demonstrates how to create a simple, # +# OpenAI assistant agent that utilizes the code interpreter # +# to analyze uploaded files. # +################################################################### # Let's form the file paths that we will later pass to the assistant csv_file_path_1 = os.path.join( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", "PopulationByAdmin1.csv", ) csv_file_path_2 = os.path.join( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", "PopulationByCountry.csv", ) -async def download_file_content(agent: AzureAssistantAgent, file_id: str): +async def download_file_content(agent, file_id: str): try: # Fetch the content of the file using the provided method response_content = await agent.client.files.content(file_id) @@ -56,7 +50,7 @@ async def download_file_content(agent: AzureAssistantAgent, file_id: str): print(f"An error occurred while downloading file {file_id}: {str(e)}") -async def download_response_image(agent: AzureAssistantAgent, file_ids: list[str]): +async def download_response_image(agent, file_ids: list[str]): if file_ids: # Iterate over file_ids and download each one for file_id in file_ids: @@ -64,43 +58,22 @@ async def download_response_image(agent: AzureAssistantAgent, file_ids: list[str async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Upload the files to the client - file_ids: list[str] = [] - for path in [csv_file_path_1, csv_file_path_2]: - with open(path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - file_ids.append(file.id) - - # Get the code interpreter tool and resources - code_interpreter_tools, code_interpreter_tool_resources = AzureAssistantAgent.configure_code_interpreter_tool( - file_ids=file_ids - ) - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, - instructions=""" - Analyze the available data to provide an answer to the user's question. - Always format response using markdown. - Always include a numerical index that starts at 1 for any lists or tables. - Always sort lists in ascending order. - """, + agent = await AzureAssistantAgent.create( + kernel=Kernel(), + service_id="agent", name="SampleAssistantAgent", - tools=code_interpreter_tools, - tool_resources=code_interpreter_tool_resources, - ) - - # Create the agent using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, + instructions=""" + Analyze the available data to provide an answer to the user's question. + Always format response using markdown. + Always include a numerical index that starts at 1 for any lists or tables. + Always sort lists in ascending order. + """, + enable_code_interpreter=True, + code_interpreter_filenames=[csv_file_path_1, csv_file_path_2], ) print("Creating thread...") - thread = await client.beta.threads.create() + thread_id = await agent.create_thread() try: is_complete: bool = False @@ -112,44 +85,33 @@ async def main(): if user_input.lower() == "exit": is_complete = True - break - - await agent.add_chat_message(thread_id=thread.id, message=user_input) - - is_code = False - last_role = None - async for response in agent.invoke_stream(thread_id=thread.id): - current_is_code = response.metadata.get("code", False) - - if current_is_code: - if not is_code: - print("\n\n```python") - is_code = True - print(response.content, end="", flush=True) - else: - if is_code: - print("\n```") - is_code = False - last_role = None - if hasattr(response, "role") and response.role is not None and last_role != response.role: - print(f"\n# {response.role}: ", end="", flush=True) - last_role = response.role - print(response.content, end="", flush=True) + + await agent.add_chat_message( + thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) + ) + is_code: bool = False + async for response in agent.invoke_stream(thread_id=thread_id): + if is_code != response.metadata.get("code"): + print() + is_code = not is_code + + print(f"{response.content}", end="", flush=True) + file_ids.extend([ item.file_id for item in response.items if isinstance(item, StreamingFileReferenceContent) ]) - if is_code: - print("```\n") + print() await download_response_image(agent, file_ids) file_ids.clear() finally: - print("\nCleaning up resources...") - [await client.files.delete(file_id) for file_id in file_ids] - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) + print("Cleaning up resources...") + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/assistant_search.py b/python/samples/learn_resources/agent_docs/assistant_search.py index 8e1d77fca5cb..5d91786e9bc4 100644 --- a/python/samples/learn_resources/agent_docs/assistant_search.py +++ b/python/samples/learn_resources/agent_docs/assistant_search.py @@ -3,25 +3,21 @@ import asyncio import os -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.contents import StreamingAnnotationContent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel -""" -The following sample demonstrates how to create a simple, -OpenAI assistant agent that utilizes the vector store -to answer questions based on the uploaded documents. - -This is the full code sample for the Semantic Kernel Learn Site: How-To: Open AI Assistant Agent File Search - -https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-assistant-search?pivots=programming-language-python -""" +################################################################### +# The following sample demonstrates how to create a simple, # +# OpenAI assistant agent that utilizes the vector store # +# to answer questions based on the uploaded documents. # +################################################################### def get_filepath_for_filename(filename: str) -> str: - base_directory = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", - ) + base_directory = os.path.dirname(os.path.realpath(__file__)) return os.path.join(base_directory, filename) @@ -33,48 +29,22 @@ def get_filepath_for_filename(filename: str) -> str: async def main(): - # Create the client using Azure OpenAI resources and configuration - client, model = AzureAssistantAgent.setup_resources() - - # Upload the files to the client - file_ids: list[str] = [] - for path in [get_filepath_for_filename(filename) for filename in filenames]: - with open(path, "rb") as file: - file = await client.files.create(file=file, purpose="assistants") - file_ids.append(file.id) - - vector_store = await client.beta.vector_stores.create( - name="assistant_search", - file_ids=file_ids, - ) - - # Get the file search tool and resources - file_search_tools, file_search_tool_resources = AzureAssistantAgent.configure_file_search_tool( - vector_store_ids=vector_store.id - ) - - # Create the assistant definition - definition = await client.beta.assistants.create( - model=model, + agent = await AzureAssistantAgent.create( + kernel=Kernel(), + service_id="agent", + name="SampleAssistantAgent", instructions=""" The document store contains the text of fictional stories. Always analyze the document store to provide an answer to the user's question. Never rely on your knowledge of stories not included in the document store. Always format response using markdown. """, - name="SampleAssistantAgent", - tools=file_search_tools, - tool_resources=file_search_tool_resources, - ) - - # Create the agent using the client and the assistant definition - agent = AzureAssistantAgent( - client=client, - definition=definition, + enable_file_search=True, + vector_store_filenames=[get_filepath_for_filename(filename) for filename in filenames], ) print("Creating thread...") - thread = await client.beta.threads.create() + thread_id = await agent.create_thread() try: is_complete: bool = False @@ -85,12 +55,13 @@ async def main(): if user_input.lower() == "exit": is_complete = True - break - await agent.add_chat_message(thread_id=thread.id, message=user_input) + await agent.add_chat_message( + thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) + ) footnotes: list[StreamingAnnotationContent] = [] - async for response in agent.invoke_stream(thread_id=thread.id): + async for response in agent.invoke_stream(thread_id=thread_id): footnotes.extend([item for item in response.items if isinstance(item, StreamingAnnotationContent)]) print(f"{response.content}", end="", flush=True) @@ -105,10 +76,11 @@ async def main(): ) finally: - print("\nCleaning up resources...") - [await client.files.delete(file_id) for file_id in file_ids] - await client.beta.threads.delete(thread.id) - await client.beta.assistants.delete(agent.id) + print("Cleaning up resources...") + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.file_search_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() if __name__ == "__main__": diff --git a/python/samples/learn_resources/agent_docs/chat_agent.py b/python/samples/learn_resources/agent_docs/chat_agent.py index 33d4999bea80..56429d5974cb 100644 --- a/python/samples/learn_resources/agent_docs/chat_agent.py +++ b/python/samples/learn_resources/agent_docs/chat_agent.py @@ -6,10 +6,11 @@ from datetime import datetime from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.connectors.ai import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent -from semantic_kernel.functions import KernelArguments +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel # Adjust the sys.path so we can use the GitHubPlugin and GitHubSettings classes @@ -19,15 +20,11 @@ from plugins.GithubPlugin.github import GitHubPlugin, GitHubSettings # noqa: E402 -""" -The following sample demonstrates how to create a simple, -ChatCompletionAgent to use a GitHub plugin to interact -with the GitHub API. - -This is the full code sample for the Semantic Kernel Learn Site: How-To: Chat Completion Agent - -https://learn.microsoft.com/semantic-kernel/frameworks/agent/examples/example-chat-agent?pivots=programming-language-python -""" +################################################################### +# The following sample demonstrates how to create a simple, # +# ChatCompletionAgent to use a GitHub plugin to interact # +# with the GitHub API. # +################################################################### async def main(): @@ -49,6 +46,7 @@ async def main(): # Create the agent agent = ChatCompletionAgent( + service_id="agent", kernel=kernel, name="SampleAssistantAgent", instructions=f""" @@ -62,7 +60,7 @@ async def main(): The current date and time is: {current_time}. """, - arguments=KernelArguments(settings=settings), + execution_settings=settings, ) history = ChatHistory() diff --git a/python/samples/learn_resources/plugins/GithubPlugin/github.py b/python/samples/learn_resources/plugins/GithubPlugin/github.py index 0e2230653b82..4f06fe9bdd62 100644 --- a/python/samples/learn_resources/plugins/GithubPlugin/github.py +++ b/python/samples/learn_resources/plugins/GithubPlugin/github.py @@ -12,22 +12,22 @@ class Repo(BaseModel): id: int = Field(..., alias="id") name: str = Field(..., alias="full_name") - description: str | None = Field(default=None, alias="description") + description: str | None = Field(None, alias="description") url: str = Field(..., alias="html_url") class User(BaseModel): id: int = Field(..., alias="id") login: str = Field(..., alias="login") - name: str | None = Field(default=None, alias="name") - company: str | None = Field(default=None, alias="company") + name: str | None = Field(None, alias="name") + company: str | None = Field(None, alias="company") url: str = Field(..., alias="html_url") class Label(BaseModel): id: int = Field(..., alias="id") name: str = Field(..., alias="name") - description: str | None = Field(default=None, alias="description") + description: str | None = Field(None, alias="description") class Issue(BaseModel): @@ -37,12 +37,12 @@ class Issue(BaseModel): title: str = Field(..., alias="title") state: str = Field(..., alias="state") labels: list[Label] = Field(..., alias="labels") - when_created: str | None = Field(default=None, alias="created_at") - when_closed: str | None = Field(default=None, alias="closed_at") + when_created: str | None = Field(None, alias="created_at") + when_closed: str | None = Field(None, alias="closed_at") class IssueDetail(Issue): - body: str | None = Field(default=None, alias="body") + body: str | None = Field(None, alias="body") # endregion diff --git a/python/samples/learn_resources/resources/WomensSuffrage.txt b/python/samples/learn_resources/resources/WomensSuffrage.txt deleted file mode 100644 index 3100274682f2..000000000000 --- a/python/samples/learn_resources/resources/WomensSuffrage.txt +++ /dev/null @@ -1,9 +0,0 @@ -Women's suffrage is when women got the right to vote. A long time ago, only men could vote and make decisions. This was not fair because women should have the same rights as men. Women wanted to vote too, so they started asking for it. It took a long time, and they had to work very hard to make people listen to them. Many men did not think women should vote, and this made it very hard for the women. - -The women who fought for voting were called suffragets. They did many things to show they wanted the right to vote. Some gave speeches, others made signs and marched in the streets. Some even went to jail because they refused to stop fighting for what they believed was right. It was scary for some of the women, but they knew how important it was to keep trying. They wanted to change the world so that it was more fair for everyone. - -One of the most important suffragets was Susan B. Anthony. She worked very hard to help women get the right to vote. She gave speeches and wrote letters to the goverment to make them change the laws. Susan never gave up, even when people said mean things to her. Another important person was Elizabeth Cady Stanton. She also helped fight for women's rights and was friends with Susan B. Anthony. Together, they made a great team and helped make big changes. - -Finally, in 1920, the 19th amendment was passed in the United States. This law gave women the right to vote. It was a huge victory for the suffragets, and they were very happy. Many women went to vote for the first time, and it felt like they were finally equal with men. It took many years and a lot of hard work, but the women never gave up. They kept fighting until they won. - -Women's suffrage is very important because it shows that if you work hard and believe in something, you can make a change. The women who fought for the right to vote showed bravery and strengh, and they helped make the world a better place. Today, women can vote because of them, and it's important to remember their hard work. We should always stand up for what is right, just like the suffragets did. diff --git a/python/semantic_kernel/__init__.py b/python/semantic_kernel/__init__.py index ac1decedec41..b4439bca0b28 100644 --- a/python/semantic_kernel/__init__.py +++ b/python/semantic_kernel/__init__.py @@ -2,8 +2,5 @@ from semantic_kernel.kernel import Kernel -__version__ = "1.22.1" - -DEFAULT_RC_VERSION = f"{__version__}-rc2" - -__all__ = ["DEFAULT_RC_VERSION", "Kernel", "__version__"] +__version__ = "1.19.0" +__all__ = ["Kernel", "__version__"] diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index 1b410ba7c7b6..56cd115a7751 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -1,30 +1,25 @@ # Copyright (c) Microsoft. All rights reserved. -import logging import uuid -from abc import ABC, abstractmethod -from collections.abc import AsyncIterable, Iterable -from typing import Any, ClassVar +from collections.abc import Iterable +from typing import TYPE_CHECKING, ClassVar -from pydantic import Field, model_validator +from pydantic import Field from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_plugin import KernelPlugin +from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate -from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.naming import generate_random_ascii_name from semantic_kernel.utils.validation import AGENT_NAME_REGEX -logger: logging.Logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from semantic_kernel.contents.chat_history import ChatHistory -class Agent(KernelBaseModel, ABC): +@experimental_class +class Agent(KernelBaseModel): """Base abstraction for all Semantic Kernel agents. An agent instance may participate in one or more conversations. @@ -33,84 +28,35 @@ class Agent(KernelBaseModel, ABC): must define its communication protocol, or AgentChannel. Attributes: - arguments: The arguments for the agent - channel_type: The type of the agent channel - description: The description of the agent - id: The unique identifier of the agent If no id is provided, + name: The name of the agent (optional). + description: The description of the agent (optional). + id: The unique identifier of the agent (optional). If no id is provided, a new UUID will be generated. - instructions: The instructions for the agent (optional) - kernel: The kernel instance for the agent - name: The name of the agent - prompt_template: The prompt template for the agent + instructions: The instructions for the agent (optional """ - arguments: KernelArguments | None = None - channel_type: ClassVar[type[AgentChannel] | None] = None - description: str | None = None id: str = Field(default_factory=lambda: str(uuid.uuid4())) + description: str | None = None + name: str = Field(default_factory=lambda: f"agent_{generate_random_ascii_name()}", pattern=AGENT_NAME_REGEX) instructions: str | None = None kernel: Kernel = Field(default_factory=Kernel) - name: str = Field(default_factory=lambda: f"agent_{generate_random_ascii_name()}", pattern=AGENT_NAME_REGEX) - prompt_template: PromptTemplateBase | None = None - - @staticmethod - def _get_plugin_name(plugin: KernelPlugin | object) -> str: - """Helper method to get the plugin name.""" - if isinstance(plugin, KernelPlugin): - return plugin.name - return plugin.__class__.__name__ - - @model_validator(mode="before") - @classmethod - def _configure_plugins(cls, data: Any) -> Any: - """Configure any plugins passed in.""" - if isinstance(data, dict) and (plugins := data.pop("plugins", None)): - kernel = data.get("kernel", None) - if not kernel: - kernel = Kernel() - for plugin in plugins: - name = Agent._get_plugin_name(plugin) - kernel.add_plugin(plugin, plugin_name=name) - data["kernel"] = kernel - return data - - @abstractmethod - async def get_response(self, *args, **kwargs) -> ChatMessageContent: - """Get a response from the agent. - - This method returns the final result of the agent's execution - as a single ChatMessageContent object. The caller is blocked until - the final result is available. - - Note: For streaming responses, use the invoke_stream method, which returns - intermediate steps and the final result as a stream of StreamingChatMessageContent - objects. Streaming only the final result is not feasible because the timing of - the final result's availability is unknown, and blocking the caller until then - is undesirable in streaming scenarios. - """ - pass - - @abstractmethod - def invoke(self, *args, **kwargs) -> AsyncIterable[ChatMessageContent]: - """Invoke the agent. - - This invocation method will return the intermediate steps and the final results - of the agent's execution as a stream of ChatMessageContent objects to the caller. + channel_type: ClassVar[type[AgentChannel] | None] = None + history_reducer: ChatHistoryReducer | None = None - Note: A ChatMessageContent object contains an entire message. - """ - pass + async def reduce_history(self, history: "ChatHistory") -> bool: + """Perform the reduction on the provided history, returning True if reduction occurred.""" + if self.history_reducer is None: + return False - @abstractmethod - def invoke_stream(self, *args, **kwargs) -> AsyncIterable[StreamingChatMessageContent]: - """Invoke the agent as a stream. + self.history_reducer.messages = history.messages - This invocation method will return the intermediate steps and final results of the - agent's execution as a stream of StreamingChatMessageContent objects to the caller. + new_messages = await self.history_reducer.reduce() + if new_messages is not None: + history.messages.clear() + history.messages.extend(new_messages) + return True - Note: A StreamingChatMessageContent object contains a chunk of a message. - """ - pass + return False def get_channel_keys(self) -> Iterable[str]: """Get the channel keys. @@ -122,6 +68,10 @@ def get_channel_keys(self) -> Iterable[str]: raise NotImplementedError("Unable to get channel keys. Channel type not configured.") yield self.channel_type.__name__ + if self.history_reducer is not None: + yield self.history_reducer.__class__.__name__ + yield str(self.history_reducer.__hash__) + async def create_channel(self) -> AgentChannel: """Create a channel. @@ -132,51 +82,6 @@ async def create_channel(self) -> AgentChannel: raise NotImplementedError("Unable to create channel. Channel type not configured.") return self.channel_type() - async def format_instructions(self, kernel: Kernel, arguments: KernelArguments | None = None) -> str | None: - """Format the instructions. - - Args: - kernel: The kernel instance. - arguments: The kernel arguments. - - Returns: - The formatted instructions. - """ - if self.prompt_template is None: - if self.instructions is None: - return None - self.prompt_template = KernelPromptTemplate( - prompt_template_config=PromptTemplateConfig(template=self.instructions) - ) - return await self.prompt_template.render(kernel, arguments) - - def _merge_arguments(self, override_args: KernelArguments | None) -> KernelArguments: - """Merge the arguments with the override arguments. - - Args: - override_args: The arguments to override. - - Returns: - The merged arguments. If both are None, return None. - """ - if not self.arguments: - if not override_args: - return KernelArguments() - return override_args - - if not override_args: - return self.arguments - - # Both are not None, so merge with precedence for override_args. - merged_execution_settings = self.arguments.execution_settings or {} - if override_args.execution_settings: - merged_execution_settings.update(override_args.execution_settings) - - merged_params = dict(self.arguments) - merged_params.update(override_args) - - return KernelArguments(settings=merged_execution_settings, **merged_params) - def __eq__(self, other): """Check if two agents are equal.""" if isinstance(other, Agent): diff --git a/python/semantic_kernel/agents/autogen/README.md b/python/semantic_kernel/agents/autogen/README.md deleted file mode 100644 index 88ceb05bad1d..000000000000 --- a/python/semantic_kernel/agents/autogen/README.md +++ /dev/null @@ -1,20 +0,0 @@ -## AutoGen Conversable Agent (v0.2.X) - -Semantic Kernel Python supports running AutoGen Conversable Agents provided in the 0.2.X package. - -### Limitations - -Currently, there are some limitations to note: - -- AutoGen Conversable Agents in Semantic Kernel run asynchronously and do not support streaming of agent inputs or responses. -- The `AutoGenConversableAgent` in Semantic Kernel Python cannot be configured as part of a Semantic Kernel `AgentGroupChat`. As we progress towards GA for our agent group chat patterns, we will explore ways to integrate AutoGen agents into a Semantic Kernel group chat scenario. - -### Installation - -Install the `semantic-kernel` package with the `autogen` extra: - -```bash -pip install semantic-kernel[autogen] -``` - -For an example of how to integrate an AutoGen Conversable Agent using the Semantic Kernel Agent abstraction, please refer to [`autogen_conversable_agent_simple_convo.py`](../../../samples/concepts/agents/autogen_conversable_agent/autogen_conversable_agent_simple_convo.py). \ No newline at end of file diff --git a/python/semantic_kernel/agents/autogen/__init__.py b/python/semantic_kernel/agents/autogen/__init__.py deleted file mode 100644 index e25409f3c0b6..000000000000 --- a/python/semantic_kernel/agents/autogen/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent - -__all__ = ["AutoGenConversableAgent"] diff --git a/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py b/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py deleted file mode 100644 index 634cb658ab9b..000000000000 --- a/python/semantic_kernel/agents/autogen/autogen_conversable_agent.py +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import AsyncIterable, Callable -from typing import TYPE_CHECKING, Any - -from semantic_kernel.utils.feature_stage_decorator import experimental - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from autogen import ConversableAgent - -from semantic_kernel.agents.agent import Agent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( - trace_agent_get_response, - trace_agent_invocation, -) - -if TYPE_CHECKING: - from autogen.cache import AbstractCache - - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent - from semantic_kernel.kernel import Kernel - -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental -class AutoGenConversableAgent(Agent): - """A Semantic Kernel wrapper around an AutoGen 0.2 `ConversableAgent`. - - This allows one to use it as a Semantic Kernel `Agent`. Note: this agent abstraction - does not currently allow for the use of AgentGroupChat within Semantic Kernel. - """ - - conversable_agent: ConversableAgent - - def __init__(self, conversable_agent: ConversableAgent, **kwargs: Any) -> None: - """Initialize the AutoGenConversableAgent. - - Args: - conversable_agent: The existing AutoGen 0.2 ConversableAgent instance - kwargs: Other Agent base class arguments (e.g. name, id, instructions) - """ - args: dict[str, Any] = { - "name": conversable_agent.name, - "description": conversable_agent.description, - "instructions": conversable_agent.system_message, - "conversable_agent": conversable_agent, - } - - if kwargs: - args.update(kwargs) - - super().__init__(**args) - - @trace_agent_get_response - @override - async def get_response(self, message: str) -> ChatMessageContent: - """Get a response from the agent. - - Args: - message: The message to send. - - Returns: - A ChatMessageContent object with the response. - """ - reply = await self.conversable_agent.a_generate_reply( - messages=[{"role": "user", "content": message}], - ) - - logger.info("Called AutoGenConversableAgent.a_generate_reply.") - - if isinstance(reply, str): - return ChatMessageContent(content=reply, role=AuthorRole.ASSISTANT) - if isinstance(reply, dict): - return ChatMessageContent(**reply) - - raise AgentInvokeException(f"Unexpected reply type from `a_generate_reply`: {type(reply)}") - - @trace_agent_invocation - @override - async def invoke( - self, - *, - recipient: "AutoGenConversableAgent | None" = None, - clear_history: bool = True, - silent: bool = True, - cache: "AbstractCache | None" = None, - max_turns: int | None = None, - summary_method: str | Callable | None = ConversableAgent.DEFAULT_SUMMARY_METHOD, - summary_args: dict | None = {}, - message: dict | str | Callable | None = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """A direct `invoke` method for the ConversableAgent. - - Args: - recipient: The recipient ConversableAgent to chat with - clear_history: Whether to clear the chat history before starting. True by default. - silent: Whether to suppress console output. True by default. - cache: The cache to use for storing chat history - max_turns: The maximum number of turns to chat for - summary_method: The method to use for summarizing the chat - summary_args: The arguments to pass to the summary method - message: The initial message to send. If message is not provided, - the agent will wait for the user to provide the first message. - kwargs: Additional keyword arguments - """ - if recipient is not None: - if not isinstance(recipient, AutoGenConversableAgent): - raise AgentInvokeException( - f"Invalid recipient type: {type(recipient)}. " - "Recipient must be an instance of AutoGenConversableAgent." - ) - - chat_result = await self.conversable_agent.a_initiate_chat( - recipient=recipient.conversable_agent, - clear_history=clear_history, - silent=silent, - cache=cache, - max_turns=max_turns, - summary_method=summary_method, - summary_args=summary_args, - message=message, # type: ignore - **kwargs, - ) - - logger.info(f"Called AutoGenConversableAgent.a_initiate_chat with recipient: {recipient}.") - - for message in chat_result.chat_history: - yield AutoGenConversableAgent._to_chat_message_content(message) # type: ignore - else: - reply = await self.conversable_agent.a_generate_reply( - messages=[{"role": "user", "content": message}], - ) - - logger.info("Called AutoGenConversableAgent.a_generate_reply.") - - if isinstance(reply, str): - yield ChatMessageContent(content=reply, role=AuthorRole.ASSISTANT) - elif isinstance(reply, dict): - yield ChatMessageContent(**reply) - else: - raise AgentInvokeException(f"Unexpected reply type from `a_generate_reply`: {type(reply)}") - - @override - def invoke_stream( - self, - message: str, - kernel: "Kernel | None" = None, - arguments: KernelArguments | None = None, - **kwargs: Any, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Invoke the agent with a stream of messages.""" - raise NotImplementedError("The AutoGenConversableAgent does not support streaming.") - - @staticmethod - def _to_chat_message_content(message: dict[str, Any]) -> ChatMessageContent: - """Translate an AutoGen message to a Semantic Kernel ChatMessageContent.""" - items: list[TextContent | FunctionCallContent | FunctionResultContent] = [] - role = AuthorRole(message.get("role")) - name: str = message.get("name", "") - - content = message.get("content") - if content is not None: - text = TextContent(text=content) - items.append(text) - - if role == AuthorRole.ASSISTANT: - tool_calls = message.get("tool_calls") - if tool_calls is not None: - for tool_call in tool_calls: - items.append( - FunctionCallContent( - id=tool_call.get("id"), - function_name=tool_call.get("name"), - arguments=tool_call.get("function").get("arguments"), - ) - ) - - if role == AuthorRole.TOOL: - tool_responses = message.get("tool_responses") - if tool_responses is not None: - for tool_response in tool_responses: - items.append( - FunctionResultContent( - id=tool_response.get("tool_call_id"), - result=tool_response.get("content"), - ) - ) - - return ChatMessageContent(role=role, items=items, name=name) # type: ignore diff --git a/python/semantic_kernel/agents/azure_ai/__init__.py b/python/semantic_kernel/agents/azure_ai/__init__.py deleted file mode 100644 index bb074ae1499c..000000000000 --- a/python/semantic_kernel/agents/azure_ai/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent -from semantic_kernel.agents.azure_ai.azure_ai_agent_settings import AzureAIAgentSettings - -__all__ = ["AzureAIAgent", "AzureAIAgentSettings"] diff --git a/python/semantic_kernel/agents/azure_ai/agent_content_generation.py b/python/semantic_kernel/agents/azure_ai/agent_content_generation.py deleted file mode 100644 index 997ded13eb40..000000000000 --- a/python/semantic_kernel/agents/azure_ai/agent_content_generation.py +++ /dev/null @@ -1,435 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import TYPE_CHECKING, Any, cast - -from azure.ai.projects.models import ( - MessageDeltaImageFileContent, - MessageDeltaImageFileContentObject, - MessageDeltaTextContent, - MessageDeltaTextFileCitationAnnotation, - MessageDeltaTextFilePathAnnotation, - MessageImageFileContent, - MessageTextContent, - MessageTextFileCitationAnnotation, - MessageTextFilePathAnnotation, - RunStep, - RunStepDeltaCodeInterpreterDetailItemObject, - RunStepDeltaCodeInterpreterImageOutput, - RunStepDeltaCodeInterpreterLogOutput, - RunStepDeltaCodeInterpreterToolCall, - RunStepDeltaFileSearchToolCall, - RunStepDeltaFunctionToolCall, - RunStepFunctionToolCall, - ThreadMessage, - ThreadRun, -) - -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from azure.ai.projects.models import ( - MessageDeltaChunk, - RunStepDeltaToolCallObject, - ) - -################################################################### -# The methods in this file are used with Azure AI Agent # -# related code. They are used to invoke, create chat messages, # -# or generate message content. # -################################################################### - - -@experimental -def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: - """Get the message contents. - - Args: - message: The message. - """ - contents: list[dict[str, Any]] = [] - for content in message.items: - match content: - case TextContent(): - # Make sure text is a string - final_text = content.text - if not isinstance(final_text, str): - if isinstance(final_text, (list, tuple)): - final_text = " ".join(map(str, final_text)) - else: - final_text = str(final_text) - - contents.append({"type": "text", "text": final_text}) - - case ImageContent(): - if content.uri: - contents.append(content.to_dict()) - - case FileReferenceContent(): - contents.append({ - "type": "image_file", - "image_file": {"file_id": content.file_id}, - }) - - case FunctionResultContent(): - final_result = content.result - match final_result: - case str(): - contents.append({"type": "text", "text": final_result}) - case list() | tuple(): - contents.append({"type": "text", "text": " ".join(map(str, final_result))}) - case _: - contents.append({"type": "text", "text": str(final_result)}) - - return contents - - -@experimental -def generate_message_content( - assistant_name: str, message: "ThreadMessage", completed_step: "RunStep | None" = None -) -> ChatMessageContent: - """Generate message content.""" - role = AuthorRole(message.role) - - metadata = ( - { - "created_at": completed_step.created_at, - "message_id": message.id, # message needs to be defined in context - "step_id": completed_step.id, - "run_id": completed_step.run_id, - "thread_id": completed_step.thread_id, - "assistant_id": completed_step.assistant_id, - "usage": completed_step.usage, - } - if completed_step is not None - else None - ) - - content: ChatMessageContent = ChatMessageContent(role=role, name=assistant_name, metadata=metadata) # type: ignore - - messages: list[MessageImageFileContent | MessageTextContent] = cast( - list[MessageImageFileContent | MessageTextContent], message.content or [] - ) - for item_content in messages: - if item_content.type == "text": - content.items.append( - TextContent( - text=item_content.text.value, - ) - ) - for annotation in item_content.text.annotations: - content.items.append(generate_annotation_content(annotation)) # type: ignore - elif item_content.type == "image_file": - content.items.append( - FileReferenceContent( - file_id=item_content.image_file.file_id, - ) - ) - return content - - -@experimental -def generate_streaming_message_content( - assistant_name: str, message_delta_event: "MessageDeltaChunk" -) -> StreamingChatMessageContent: - """Generate streaming message content from a MessageDeltaEvent.""" - delta = message_delta_event.delta - - # Determine the role - role = AuthorRole(delta.role) if delta.role is not None else AuthorRole("assistant") - - items: list[StreamingTextContent | StreamingAnnotationContent | StreamingFileReferenceContent] = [] - - delta_chunks: list[MessageDeltaImageFileContent | MessageDeltaTextContent] = cast( - list[MessageDeltaImageFileContent | MessageDeltaTextContent], delta.content or [] - ) - - for delta_block in delta_chunks: - if delta_block.type == "text": - if delta_block.text and delta_block.text.value: # Ensure text is not None - text_value = delta_block.text.value - items.append( - StreamingTextContent( - text=text_value, - choice_index=delta_block.index, - ) - ) - # Process annotations if any - if delta_block.text.annotations: - for annotation in delta_block.text.annotations or []: - if isinstance( - annotation, - ( - MessageDeltaTextFileCitationAnnotation, - MessageDeltaTextFilePathAnnotation, - ), - ): - items.append(generate_streaming_annotation_content(annotation)) - elif delta_block.type == "image_file": - assert isinstance(delta_block, MessageDeltaImageFileContent) # nosec - if delta_block.image_file and isinstance(delta_block.image_file, MessageDeltaImageFileContentObject): - file_id = delta_block.image_file.file_id - items.append( - StreamingFileReferenceContent( - file_id=file_id, - ) - ) - - return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0) # type: ignore - - -@experimental -def get_function_call_contents( - run: "ThreadRun", function_steps: dict[str, FunctionCallContent] -) -> list[FunctionCallContent]: - """Extract function call contents from the run. - - Args: - run: The run. - function_steps: The function steps - - Returns: - The list of function call contents. - """ - function_call_contents: list[FunctionCallContent] = [] - required_action = getattr(run, "required_action", None) - if not required_action or not getattr(required_action, "submit_tool_outputs", False): - return function_call_contents - for tool_call in required_action.submit_tool_outputs.tool_calls: - tool: RunStepFunctionToolCall = tool_call - fcc = FunctionCallContent( - id=tool.id, - index=getattr(tool, "index", None), - name=tool.function.name, - arguments=tool.function.arguments, - ) - function_call_contents.append(fcc) - function_steps[tool.id] = fcc - return function_call_contents - - -@experimental -def generate_function_call_content(agent_name: str, fccs: list[FunctionCallContent]) -> ChatMessageContent: - """Generate function call content. - - Args: - agent_name: The agent name. - fccs: The function call contents. - - Returns: - ChatMessageContent: The chat message content containing the function call content as the items. - """ - return ChatMessageContent(role=AuthorRole.ASSISTANT, name=agent_name, items=fccs) # type: ignore - - -@experimental -def generate_function_call_streaming_content( - agent_name: str, - fccs: list[FunctionCallContent], -) -> StreamingChatMessageContent: - """Generate function call content. - - Args: - agent_name: The agent name. - fccs: The function call contents. - - Returns: - StreamingChatMessageContent: The chat message content containing the function call content as the items. - """ - return StreamingChatMessageContent(role=AuthorRole.ASSISTANT, choice_index=0, name=agent_name, items=fccs) # type: ignore - - -@experimental -def generate_function_result_content( - agent_name: str, function_step: FunctionCallContent, tool_call: "RunStepFunctionToolCall" -) -> ChatMessageContent: - """Generate function result content.""" - function_call_content: ChatMessageContent = ChatMessageContent(role=AuthorRole.TOOL, name=agent_name) # type: ignore - function_call_content.items.append( - FunctionResultContent( - function_name=function_step.function_name, - plugin_name=function_step.plugin_name, - id=function_step.id, - result=tool_call.function.output, # type: ignore - ) - ) - return function_call_content - - -@experimental -def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessageContent": - """Generate code interpreter content. - - Args: - agent_name: The agent name. - code: The code. - - Returns: - ChatMessageContent: The chat message content. - """ - return ChatMessageContent( - role=AuthorRole.ASSISTANT, - content=code, - name=agent_name, - metadata={"code": True}, - ) - - -@experimental -def generate_streaming_function_content( - agent_name: str, step_details: "RunStepDeltaToolCallObject" -) -> "StreamingChatMessageContent | None": - """Generate streaming function content. - - Args: - agent_name: The agent name. - step_details: The function step. - - Returns: - StreamingChatMessageContent: The chat message content. - """ - if not step_details.tool_calls: - return None - - items: list[FunctionCallContent] = [] - - tool_calls: list[ - RunStepDeltaCodeInterpreterToolCall | RunStepDeltaFileSearchToolCall | RunStepDeltaFunctionToolCall - ] = cast( - list[RunStepDeltaCodeInterpreterToolCall | RunStepDeltaFileSearchToolCall | RunStepDeltaFunctionToolCall], - step_details.tool_calls or [], - ) - - for tool in tool_calls: - if tool.type == "function" and tool.function: - items.append( - FunctionCallContent( - id=tool.id, - index=getattr(tool, "index", None), - name=tool.function.name, - arguments=tool.function.arguments, - ) - ) - - return ( - StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - name=agent_name, - items=items, # type: ignore - choice_index=0, - ) - if len(items) > 0 - else None - ) - - -@experimental -def generate_streaming_code_interpreter_content( - agent_name: str, step_details: "RunStepDeltaToolCallObject" -) -> "StreamingChatMessageContent | None": - """Generate code interpreter content. - - Args: - agent_name: The agent name. - step_details: The current step details. - - Returns: - StreamingChatMessageContent: The chat message content. - """ - items: list[StreamingTextContent | StreamingFileReferenceContent] = [] - - if not step_details.tool_calls: - return None - - metadata: dict[str, bool] = {} - for index, tool in enumerate(step_details.tool_calls): - if isinstance(tool, RunStepDeltaCodeInterpreterDetailItemObject): - code_interpreter_tool_call = tool - if code_interpreter_tool_call.input: - items.append( - StreamingTextContent( - choice_index=index, - text=code_interpreter_tool_call.input, - ) - ) - metadata["code"] = True - if code_interpreter_tool_call.outputs: - for output in code_interpreter_tool_call.outputs: - if ( - isinstance(output, RunStepDeltaCodeInterpreterImageOutput) - and output.image is not None - and output.image.file_id - ): - items.append( - StreamingFileReferenceContent( - file_id=output.image.file_id, - ) - ) - if isinstance(output, RunStepDeltaCodeInterpreterLogOutput) and output.logs: - items.append( - StreamingTextContent( - choice_index=index, - text=output.logs, - ) - ) - - return ( - StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - name=agent_name, - items=items, # type: ignore - choice_index=0, - metadata=metadata if metadata else None, - ) - if len(items) > 0 - else None - ) - - -@experimental -def generate_annotation_content( - annotation: MessageTextFilePathAnnotation | MessageTextFileCitationAnnotation, -) -> AnnotationContent: - """Generate annotation content.""" - file_id = None - if isinstance(annotation, MessageTextFilePathAnnotation) and annotation.file_path is not None: - file_id = annotation.file_path.file_id - elif isinstance(annotation, MessageTextFileCitationAnnotation) and annotation.file_citation is not None: - file_id = annotation.file_citation.file_id - - return AnnotationContent( - file_id=file_id, - quote=annotation.text, - start_index=annotation.start_index if annotation.start_index is not None else None, - end_index=annotation.end_index if annotation.end_index is not None else None, - ) - - -@experimental -def generate_streaming_annotation_content( - annotation: MessageDeltaTextFilePathAnnotation | MessageDeltaTextFileCitationAnnotation, -) -> StreamingAnnotationContent: - """Generate streaming annotation content.""" - file_id = None - if isinstance(annotation, MessageDeltaTextFilePathAnnotation) and annotation.file_path: - file_id = annotation.file_path.file_id if annotation.file_path.file_id else None - elif isinstance(annotation, MessageDeltaTextFileCitationAnnotation) and annotation.file_citation: - file_id = annotation.file_citation.file_id if annotation.file_citation.file_id else None - - return StreamingAnnotationContent( - file_id=file_id, - quote=annotation.text, - start_index=annotation.start_index if annotation.start_index is not None else None, - end_index=annotation.end_index if annotation.end_index is not None else None, - ) diff --git a/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py b/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py deleted file mode 100644 index b84f0b87e30c..000000000000 --- a/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py +++ /dev/null @@ -1,876 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from collections.abc import AsyncIterable -from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, cast - -from azure.ai.projects.models import ( - AgentsApiResponseFormat, - AgentsApiResponseFormatMode, - AgentsNamedToolChoiceType, - AgentStreamEvent, - AsyncAgentEventHandler, - AsyncAgentRunStream, - BaseAsyncAgentEventHandler, - OpenAIPageableListOfThreadMessage, - ResponseFormatJsonSchemaType, - RunStep, - RunStepCodeInterpreterToolCall, - RunStepDeltaChunk, - RunStepDeltaToolCallObject, - RunStepMessageCreationDetails, - RunStepToolCallDetails, - RunStepType, - SubmitToolOutputsAction, - ThreadMessage, - ThreadRun, - ToolDefinition, - TruncationObject, -) -from azure.ai.projects.models._enums import MessageRole - -from semantic_kernel.agents.azure_ai.agent_content_generation import ( - generate_code_interpreter_content, - generate_function_call_content, - generate_function_call_streaming_content, - generate_function_result_content, - generate_message_content, - generate_streaming_code_interpreter_content, - generate_streaming_function_content, - generate_streaming_message_content, - get_function_call_contents, -) -from semantic_kernel.agents.azure_ai.azure_ai_agent_utils import AzureAIAgentUtils -from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult -from semantic_kernel.connectors.ai.function_calling_utils import ( - kernel_function_metadata_to_function_call_format, - merge_streaming_function_results, -) -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException -from semantic_kernel.functions import KernelArguments -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from azure.ai.projects.aio import AIProjectClient - - from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent - from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent - from semantic_kernel.kernel import Kernel - -_T = TypeVar("_T", bound="AgentThreadActions") - -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental -class AgentThreadActions: - """AzureAI Agent Thread Actions.""" - - polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] - error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] - - # region Invocation Methods - - @classmethod - async def invoke( - cls: type[_T], - *, - agent: "AzureAIAgent", - thread_id: str, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - model: str | None = None, - instructions_override: str | None = None, - additional_instructions: str | None = None, - additional_messages: "list[ChatMessageContent] | None" = None, - tools: list[ToolDefinition] | None = None, - temperature: float | None = None, - top_p: float | None = None, - max_prompt_tokens: int | None = None, - max_completion_tokens: int | None = None, - truncation_strategy: TruncationObject | None = None, - response_format: AgentsApiResponseFormat - | AgentsApiResponseFormatMode - | ResponseFormatJsonSchemaType - | None = None, - parallel_tool_calls: bool | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: - """Invoke the message in the thread. - - Args: - agent: The agent to invoke. - thread_id: The thread id. - arguments: The kernel arguments. - kernel: The kernel. - model: The model. - instructions_override: The instructions override. - additional_instructions: The additional instructions. - additional_messages: The additional messages to add to the thread. Only supports messages with - role = User or Assistant. - https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages - tools: The tools. - temperature: The temperature. - top_p: The top p. - max_prompt_tokens: The max prompt tokens. - max_completion_tokens: The max completion tokens. - truncation_strategy: The truncation strategy. - response_format: The response format. - parallel_tool_calls: The parallel tool calls. - metadata: The metadata. - kwargs: Additional keyword arguments. - - Returns: - A tuple of the visibility flag and the invoked message. - """ - arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) - kernel = kernel or agent.kernel - - tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore - - base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) - - merged_instructions: str = "" - if instructions_override is not None: - merged_instructions = instructions_override - elif base_instructions and additional_instructions: - merged_instructions = f"{base_instructions}\n\n{additional_instructions}" - else: - merged_instructions = base_instructions or additional_instructions or "" - - run_options = cls._generate_options( - agent=agent, - model=model, - additional_messages=additional_messages, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - temperature=temperature, - top_p=top_p, - metadata=metadata, - truncation_strategy=truncation_strategy, - response_format=response_format, - parallel_tool_calls=parallel_tool_calls, - ) - # Remove keys with None values. - run_options = {k: v for k, v in run_options.items() if v is not None} - - run: ThreadRun = await agent.client.agents.create_run( - assistant_id=agent.id, - thread_id=thread_id, - instructions=merged_instructions or agent.instructions, - tools=tools, - **run_options, - ) - - processed_step_ids = set() - function_steps: dict[str, "FunctionCallContent"] = {} - - while run.status != "completed": - run = await cls._poll_run_status(agent=agent, run=run, thread_id=thread_id) - - if run.status in cls.error_message_states: - error_message = "" - if run.last_error and run.last_error.message: - error_message = run.last_error.message - raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " - f"with error: {error_message}" - ) - - # Check if function calling is required - if run.status == "requires_action" and isinstance(run.required_action, SubmitToolOutputsAction): - logger.debug(f"Run [{run.id}] requires tool action for agent `{agent.name}` and thread `{thread_id}`") - fccs = get_function_call_contents(run, function_steps) - if fccs: - logger.debug( - f"Yielding generate_function_call_content for agent `{agent.name}` and " - f"thread `{thread_id}`, visibility False" - ) - yield False, generate_function_call_content(agent_name=agent.name, fccs=fccs) - - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) - - tool_outputs = cls._format_tool_outputs(fccs, chat_history) - await agent.client.agents.submit_tool_outputs_to_run( - run_id=run.id, - thread_id=thread_id, - tool_outputs=tool_outputs, # type: ignore - ) - logger.debug(f"Submitted tool outputs for agent `{agent.name}` and thread `{thread_id}`") - - steps_response = await agent.client.agents.list_run_steps(run_id=run.id, thread_id=thread_id) - logger.debug(f"Called for steps_response for run [{run.id}] agent `{agent.name}` and thread `{thread_id}`") - steps: list[RunStep] = steps_response.data - - def sort_key(step: RunStep): - # Put tool_calls first, then message_creation. - # If multiple steps share a type, break ties by completed_at. - return (0 if step.type == "tool_calls" else 1, step.completed_at) - - completed_steps_to_process = sorted( - [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], - key=sort_key, - ) - - logger.debug( - f"Completed steps to process for run [{run.id}] agent `{agent.name}` and thread `{thread_id}` " - f"with length `{len(completed_steps_to_process)}`" - ) - - message_count = 0 - for completed_step in completed_steps_to_process: - match completed_step.type: - case RunStepType.TOOL_CALLS: - logger.debug( - f"Entering step type tool_calls for run [{run.id}], agent `{agent.name}` and " - f"thread `{thread_id}`" - ) - tool_call_details: RunStepToolCallDetails = cast( - RunStepToolCallDetails, completed_step.step_details - ) - for tool_call in tool_call_details.tool_calls: - is_visible = False - content: "ChatMessageContent | None" = None - match tool_call.type: - case AgentsNamedToolChoiceType.CODE_INTERPRETER: - logger.debug( - f"Entering tool_calls (code_interpreter) for run [{run.id}], agent " - f"`{agent.name}` and thread `{thread_id}`" - ) - code_call: RunStepCodeInterpreterToolCall = cast( - RunStepCodeInterpreterToolCall, tool_call - ) - content = generate_code_interpreter_content( - agent.name, - code_call.code_interpreter.input, - ) - is_visible = True - case AgentsNamedToolChoiceType.FUNCTION: - logger.debug( - f"Entering tool_calls (function) for run [{run.id}], agent `{agent.name}` " - f"and thread `{thread_id}`" - ) - function_step = function_steps.get(tool_call.id) - assert function_step is not None # nosec - content = generate_function_result_content( - agent_name=agent.name, - function_step=function_step, - tool_call=tool_call, # type: ignore - ) - - if content: - message_count += 1 - logger.debug( - f"Yielding tool_message for run [{run.id}], agent `{agent.name}`, " - f"thread `{thread_id}`, message count `{message_count}`, " - f"is_visible `{is_visible}`" - ) - yield is_visible, content - case RunStepType.MESSAGE_CREATION: - logger.debug( - f"Entering message_creation for run [{run.id}], agent `{agent.name}` and thread " - f"`{thread_id}`" - ) - message_call_details: RunStepMessageCreationDetails = cast( - RunStepMessageCreationDetails, completed_step.step_details - ) - message = await cls._retrieve_message( - agent=agent, - thread_id=thread_id, - message_id=message_call_details.message_creation.message_id, # type: ignore - ) - if message: - content = generate_message_content(agent.name, message) - if content and len(content.items) > 0: - message_count += 1 - logger.debug( - f"Yielding message_creation for run [{run.id}], agent `{agent.name}`, " - f"thread `{thread_id}`, message count `{message_count}`, is_visible `True`" - ) - yield True, content - processed_step_ids.add(completed_step.id) - - @classmethod - async def invoke_stream( - cls: type[_T], - *, - agent: "AzureAIAgent", - thread_id: str, - messages: "list[ChatMessageContent] | None" = None, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - model: str | None = None, - instructions_override: str | None = None, - additional_instructions: str | None = None, - additional_messages: "list[ChatMessageContent] | None" = None, - tools: list[ToolDefinition] | None = None, - temperature: float | None = None, - top_p: float | None = None, - max_prompt_tokens: int | None = None, - max_completion_tokens: int | None = None, - truncation_strategy: TruncationObject | None = None, - response_format: AgentsApiResponseFormat - | AgentsApiResponseFormatMode - | ResponseFormatJsonSchemaType - | None = None, - parallel_tool_calls: bool | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Invoke the agent stream and yield ChatMessageContent continuously. - - Args: - agent: The agent to invoke. - thread_id: The thread id. - messages: The messages. - arguments: The kernel arguments. - kernel: The kernel. - model: The model. - instructions_override: The instructions override. - additional_instructions: The additional instructions. - additional_messages: The additional messages to add to the thread. Only supports messages with - role = User or Assistant. - https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages - tools: The tools. - temperature: The temperature. - top_p: The top p. - max_prompt_tokens: The max prompt tokens. - max_completion_tokens: The max completion tokens. - truncation_strategy: The truncation strategy. - response_format: The response format. - parallel_tool_calls: The parallel tool calls. - metadata: The metadata. - kwargs: Additional keyword arguments. - - Returns: - An async iterable of streamed content. - """ - arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) - kernel = kernel or agent.kernel - arguments = agent._merge_arguments(arguments) - - tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore - - base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) - - merged_instructions: str = "" - if instructions_override is not None: - merged_instructions = instructions_override - elif base_instructions and additional_instructions: - merged_instructions = f"{base_instructions}\n\n{additional_instructions}" - else: - merged_instructions = base_instructions or additional_instructions or "" - - run_options = cls._generate_options( - agent=agent, - model=model, - additional_messages=additional_messages, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - temperature=temperature, - top_p=top_p, - metadata=metadata, - truncation_strategy=truncation_strategy, - response_format=response_format, - parallel_tool_calls=parallel_tool_calls, - ) - run_options = {k: v for k, v in run_options.items() if v is not None} - - stream: AsyncAgentRunStream = await agent.client.agents.create_stream( - assistant_id=agent.id, - thread_id=thread_id, - instructions=merged_instructions or agent.instructions, - tools=tools, - **run_options, - ) - - function_steps: dict[str, FunctionCallContent] = {} - active_messages: dict[str, RunStep] = {} - - async for content in cls._process_stream_events( - stream=stream, - agent=agent, - thread_id=thread_id, - messages=messages, - kernel=kernel, - function_steps=function_steps, - active_messages=active_messages, - ): - if content: - yield content - - @classmethod - async def _process_stream_events( - cls: type[_T], - stream: AsyncAgentRunStream, - agent: "AzureAIAgent", - thread_id: str, - kernel: "Kernel", - function_steps: dict[str, FunctionCallContent], - active_messages: dict[str, RunStep], - messages: "list[ChatMessageContent] | None" = None, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Process events from the main stream and delegate tool output handling as needed.""" - while True: - async with stream as response_stream: - async for event_type, event_data, _ in response_stream: - if event_type == AgentStreamEvent.THREAD_RUN_CREATED: - run = event_data - logger.info(f"Assistant run created with ID: {run.id}") - - elif event_type == AgentStreamEvent.THREAD_RUN_IN_PROGRESS: - run_step = cast(RunStep, event_data) - logger.info(f"Assistant run in progress with ID: {run_step.id}") - - elif event_type == AgentStreamEvent.THREAD_MESSAGE_DELTA: - yield generate_streaming_message_content(agent.name, event_data) - - elif event_type == AgentStreamEvent.THREAD_RUN_STEP_COMPLETED: - step_completed = cast(RunStep, event_data) - logger.info(f"Run step completed with ID: {step_completed.id}") - if isinstance(step_completed.step_details, RunStepMessageCreationDetails): - msg_id = step_completed.step_details.message_creation.message_id - active_messages.setdefault(msg_id, step_completed) - - elif event_type == AgentStreamEvent.THREAD_RUN_STEP_DELTA: - run_step_event: RunStepDeltaChunk = event_data - details = run_step_event.delta.step_details - if not details: - continue - if isinstance(details, RunStepDeltaToolCallObject) and details.tool_calls: - for tool_call in details.tool_calls: - content = None - if tool_call.type == "function": - content = generate_streaming_function_content(agent.name, details) - elif tool_call.type == "code_interpreter": - content = generate_streaming_code_interpreter_content(agent.name, details) - if content: - yield content - - elif event_type == AgentStreamEvent.THREAD_RUN_REQUIRES_ACTION: - run = cast(ThreadRun, event_data) - action_result = await cls._handle_streaming_requires_action( - agent_name=agent.name, - kernel=kernel, - run=run, - function_steps=function_steps, - ) - if action_result is None: - raise RuntimeError( - f"Function call required but no function steps found for agent `{agent.name}` " - f"thread: {thread_id}." - ) - - if action_result.function_result_streaming_content: - yield action_result.function_result_streaming_content - if messages: - messages.append(action_result.function_result_streaming_content) - - if action_result.function_call_streaming_content: - if messages: - messages.append(action_result.function_call_streaming_content) - async for sub_content in cls._stream_tool_outputs( - agent=agent, - thread_id=thread_id, - run=run, - action_result=action_result, - active_messages=active_messages, - messages=messages, - ): - if sub_content: - yield sub_content - break - - elif event_type == AgentStreamEvent.THREAD_RUN_COMPLETED: - run = cast(ThreadRun, event_data) - logger.info(f"Run completed with ID: {run.id}") - if active_messages: - for msg_id, step in active_messages.items(): - message = await cls._retrieve_message( - agent=agent, thread_id=thread_id, message_id=msg_id - ) - if message and hasattr(message, "content"): - final_content = generate_message_content(agent.name, message, step) - if messages: - messages.append(final_content) - return - - elif event_type == AgentStreamEvent.THREAD_RUN_FAILED: - run_failed = cast(ThreadRun, event_data) - error_message = ( - run_failed.last_error.message - if run_failed.last_error and run_failed.last_error.message - else "" - ) - raise RuntimeError( - f"Run failed with status: `{run_failed.status}` for agent `{agent.name}` " - f"thread `{thread_id}` with error: {error_message}" - ) - else: - break - return - - @classmethod - async def _stream_tool_outputs( - cls: type[_T], - agent: "AzureAIAgent", - thread_id: str, - run: ThreadRun, - action_result: FunctionActionResult, - active_messages: dict[str, RunStep], - messages: "list[ChatMessageContent] | None" = None, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Wrap the tool outputs stream as an async generator. - - This allows downstream consumers to iterate over the yielded content. - """ - handler: BaseAsyncAgentEventHandler = AsyncAgentEventHandler() - await agent.client.agents.submit_tool_outputs_to_stream( - run_id=run.id, - thread_id=thread_id, - tool_outputs=action_result.tool_outputs, # type: ignore - event_handler=handler, - ) - async for sub_event_type, sub_event_data, _ in handler: - if sub_event_type == AgentStreamEvent.THREAD_MESSAGE_DELTA: - yield generate_streaming_message_content(agent.name, sub_event_data) - elif sub_event_type == AgentStreamEvent.THREAD_RUN_COMPLETED: - thread_run = cast(ThreadRun, sub_event_data) - logger.info(f"Run completed with ID: {thread_run.id}") - if active_messages: - for msg_id, step in active_messages.items(): - message = await cls._retrieve_message(agent=agent, thread_id=thread_id, message_id=msg_id) - if message and hasattr(message, "content"): - final_content = generate_message_content(agent.name, message, step) - if messages: - messages.append(final_content) - return - elif sub_event_type == AgentStreamEvent.THREAD_RUN_FAILED: - run_failed = cast(ThreadRun, sub_event_data) - error_message = ( - run_failed.last_error.message if run_failed.last_error and run_failed.last_error.message else "" - ) - raise RuntimeError( - f"Run failed with status: `{run_failed.status}` for agent `{agent.name}` " - f"thread `{thread_id}` with error: {error_message}" - ) - elif sub_event_type == AgentStreamEvent.DONE: - break - - # endregion - - # region Messaging Handling Methods - - @classmethod - async def create_thread( - cls: type[_T], - client: "AIProjectClient", - **kwargs: Any, - ) -> str: - """Create a thread. - - Args: - client: The client to use to create the thread. - kwargs: Additional keyword arguments. - - Returns: - The ID of the created thread. - """ - thread = await client.agents.create_thread(**kwargs) - return thread.id - - @classmethod - async def create_message( - cls: type[_T], - client: "AIProjectClient", - thread_id: str, - message: "str | ChatMessageContent", - **kwargs: Any, - ) -> "ThreadMessage | None": - """Create a message in the thread. - - Args: - client: The client to use to create the message. - thread_id: The ID of the thread to create the message in. - message: The message to create. - kwargs: Additional keyword arguments. - - Returns: - The created message. - """ - if isinstance(message, str): - message = ChatMessageContent(role=AuthorRole.USER, content=message) - - if any(isinstance(item, FunctionCallContent) for item in message.items): - return None - - if not message.content.strip(): - return None - - return await client.agents.create_message( - thread_id=thread_id, - role=MessageRole.USER if message.role == AuthorRole.USER else MessageRole.AGENT, - content=message.content, - attachments=AzureAIAgentUtils.get_attachments(message), - metadata=AzureAIAgentUtils.get_metadata(message), - **kwargs, - ) - - @classmethod - async def get_messages( - cls: type[_T], - client: "AIProjectClient", - thread_id: str, - ) -> AsyncIterable["ChatMessageContent"]: - """Get messages from a thread. - - Args: - client: The client to use to get the messages. - thread_id: The ID of the thread to get the messages from. - - Yields: - The messages from the thread. - """ - agent_names: dict[str, Any] = {} - last_id: str | None = None - messages: OpenAIPageableListOfThreadMessage - - while True: - messages = await client.agents.list_messages( - thread_id=thread_id, - run_id=None, - limit=None, - order="desc", - after=last_id, - before=None, - ) - - if not messages: - break - - for message in messages.data: - last_id = message.id - assistant_name: str | None = None - - if message.assistant_id and message.assistant_id.strip() and message.assistant_id not in agent_names: - assistant = await client.agents.get_agent(message.assistant_id) - if assistant.name and assistant.name.strip(): - agent_names[assistant.id] = assistant.name - - assistant_name = agent_names.get(message.assistant_id) or message.assistant_id - - content = generate_message_content(assistant_name, message) - - if len(content.items) > 0: - yield content - - if not messages.has_more: - break - - # endregion - - # region Internal Methods - - @classmethod - def _merge_options( - cls: type[_T], - *, - agent: "AzureAIAgent", - model: str | None = None, - response_format: AgentsApiResponseFormat - | AgentsApiResponseFormatMode - | ResponseFormatJsonSchemaType - | None = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> dict[str, Any]: - """Merge run-time options with the agent-level options. - - Run-level parameters take precedence. - """ - return { - "model": model if model is not None else agent.definition.model, - "response_format": response_format if response_format is not None else agent.definition.response_format, - "temperature": temperature if temperature is not None else agent.definition.temperature, - "top_p": top_p if top_p is not None else agent.definition.top_p, - "metadata": metadata if metadata is not None else agent.definition.metadata, - **kwargs, - } - - @classmethod - def _generate_options(cls: type[_T], **kwargs: Any) -> dict[str, Any]: - """Generate a dictionary of options that can be passed directly to create_run.""" - merged = cls._merge_options(**kwargs) - trunc_count = merged.get("truncation_message_count", None) - max_completion_tokens = merged.get("max_completion_tokens", None) - max_prompt_tokens = merged.get("max_prompt_tokens", None) - parallel_tool_calls = merged.get("parallel_tool_calls_enabled", None) - additional_messages = cls._translate_additional_messages(merged.get("additional_messages", None)) - return { - "model": merged.get("model"), - "top_p": merged.get("top_p"), - "response_format": merged.get("response_format"), - "temperature": merged.get("temperature"), - "truncation_strategy": trunc_count, - "metadata": merged.get("metadata"), - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "parallel_tool_calls": parallel_tool_calls, - "additional_messages": additional_messages, - } - - @classmethod - def _translate_additional_messages( - cls: type[_T], messages: "list[ChatMessageContent] | None" - ) -> list[ThreadMessage] | None: - """Translate additional messages to the required format.""" - if not messages: - return None - return AzureAIAgentUtils.get_thread_messages(messages) - - @classmethod - def _prepare_tool_definition(cls: type[_T], tool: dict | ToolDefinition) -> dict | ToolDefinition: - """Prepare the tool definition.""" - if tool.get("type") == "openapi" and "openapi" in tool: - openapi_data = dict(tool["openapi"]) - openapi_data.pop("functions", None) - tool = dict(tool) - tool["openapi"] = openapi_data - return tool - - @classmethod - def _get_tools(cls: type[_T], agent: "AzureAIAgent", kernel: "Kernel") -> list[dict[str, Any] | ToolDefinition]: - """Get the tools for the agent.""" - tools: list[Any] = list(agent.definition.tools) - funcs = kernel.get_full_list_of_function_metadata() - dict_defs = [kernel_function_metadata_to_function_call_format(f) for f in funcs] - tools.extend(dict_defs) - return [cls._prepare_tool_definition(tool) for tool in tools] - - @classmethod - async def _poll_run_status(cls: type[_T], agent: "AzureAIAgent", run: ThreadRun, thread_id: str) -> ThreadRun: - """Poll the run status.""" - logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") - try: - run = await asyncio.wait_for( - cls._poll_loop(agent=agent, run=run, thread_id=thread_id), - timeout=agent.polling_options.run_polling_timeout.total_seconds(), - ) - except asyncio.TimeoutError: - timeout_duration = agent.polling_options.run_polling_timeout - error_message = ( - f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` " - f"after waiting {timeout_duration}." - ) - logger.error(error_message) - raise AgentInvokeException(error_message) - logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") - return run - - @classmethod - async def _poll_loop(cls: type[_T], agent: "AzureAIAgent", run: ThreadRun, thread_id: str) -> ThreadRun: - """Continuously poll the run status until it is no longer pending.""" - count = 0 - while True: - await asyncio.sleep(agent.polling_options.get_polling_interval(count).total_seconds()) - count += 1 - try: - run = await agent.client.agents.get_run(run_id=run.id, thread_id=thread_id) - except Exception as e: - logger.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") - if run.status not in cls.polling_status: - break - return run - - @classmethod - async def _retrieve_message( - cls: type[_T], agent: "AzureAIAgent", thread_id: str, message_id: str - ) -> ThreadMessage | None: - """Retrieve a message from a thread.""" - message: ThreadMessage | None = None - count = 0 - max_retries = 3 - while count < max_retries: - try: - message = await agent.client.agents.get_message(thread_id=thread_id, message_id=message_id) - break - except Exception as ex: - logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") - count += 1 - if count >= max_retries: - logger.error( - f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." - ) - break - backoff_time: float = agent.polling_options.message_synchronization_delay.total_seconds() * (2**count) - await asyncio.sleep(backoff_time) - return message - - @classmethod - async def _invoke_function_calls( - cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" - ) -> list[Any]: - """Invoke the function calls.""" - tasks = [ - kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) - for function_call in fccs - ] - return await asyncio.gather(*tasks) - - @classmethod - def _format_tool_outputs( - cls: type[_T], fccs: list["FunctionCallContent"], chat_history: "ChatHistory" - ) -> list[dict[str, str]]: - """Format the tool outputs for submission.""" - from semantic_kernel.contents.function_result_content import FunctionResultContent - - tool_call_lookup = { - tool_call.id: tool_call - for message in chat_history.messages - for tool_call in message.items - if isinstance(tool_call, FunctionResultContent) - } - return [ - {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} - for fcc in fccs - if fcc.id in tool_call_lookup - ] - - @classmethod - async def _handle_streaming_requires_action( - cls: type[_T], - agent_name: str, - kernel: "Kernel", - run: ThreadRun, - function_steps: dict[str, "FunctionCallContent"], - **kwargs: Any, - ) -> FunctionActionResult | None: - """Handle the requires action event for a streaming run.""" - fccs = get_function_call_contents(run, function_steps) - if fccs: - function_call_streaming_content = generate_function_call_streaming_content(agent_name=agent_name, fccs=fccs) - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) - function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] - tool_outputs = cls._format_tool_outputs(fccs, chat_history) - return FunctionActionResult( - function_call_streaming_content, function_result_streaming_content, tool_outputs - ) - return None - - # endregion diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py deleted file mode 100644 index 1d4fb805fa35..000000000000 --- a/python/semantic_kernel/agents/azure_ai/azure_ai_agent.py +++ /dev/null @@ -1,390 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import AsyncIterable, Iterable -from typing import TYPE_CHECKING, Any, ClassVar, TypeVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from azure.ai.projects.aio import AIProjectClient -from azure.ai.projects.models import Agent as AzureAIAgentModel -from azure.ai.projects.models import ( - AgentsApiResponseFormat, - AgentsApiResponseFormatMode, - ResponseFormatJsonSchemaType, - ThreadMessage, - ThreadMessageOptions, - ToolDefinition, - TruncationObject, -) -from pydantic import Field - -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions -from semantic_kernel.agents.azure_ai.azure_ai_agent_settings import AzureAIAgentSettings -from semantic_kernel.agents.azure_ai.azure_ai_channel import AzureAIChannel -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions import KernelArguments -from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel import Kernel -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig -from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.naming import generate_random_ascii_name -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( - trace_agent_get_response, - trace_agent_invocation, -) -from semantic_kernel.utils.telemetry.user_agent import APP_INFO, SEMANTIC_KERNEL_USER_AGENT - -logger: logging.Logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from azure.identity.aio import DefaultAzureCredential - - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent - -AgentsApiResponseFormatOption = ( - str | AgentsApiResponseFormatMode | AgentsApiResponseFormat | ResponseFormatJsonSchemaType -) - -_T = TypeVar("_T", bound="AzureAIAgent") - - -@experimental -class AzureAIAgent(Agent): - """Azure AI Agent class.""" - - client: AIProjectClient - definition: AzureAIAgentModel - polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) - - channel_type: ClassVar[type[AgentChannel]] = AzureAIChannel - - def __init__( - self, - *, - arguments: "KernelArguments | None" = None, - client: AIProjectClient, - definition: AzureAIAgentModel, - kernel: "Kernel | None" = None, - plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, - polling_options: RunPollingOptions | None = None, - prompt_template_config: "PromptTemplateConfig | None" = None, - **kwargs: Any, - ) -> None: - """Initialize the Azure AI Agent. - - Args: - arguments: The KernelArguments instance - client: The AzureAI Project client. See "Quickstart: Create a new agent" guide - https://learn.microsoft.com/en-us/azure/ai-services/agents/quickstart?pivots=programming-language-python-azure - for details on how to create a new agent. - definition: The AzureAI Agent model created via the AzureAI Project client. - kernel: The Kernel instance used if invoking plugins - plugins: The plugins for the agent. If plugins are included along with a kernel, any plugins - that already exist in the kernel will be overwritten. - polling_options: The polling options for the agent. - prompt_template_config: The prompt template configuration. If this is provided along with - instructions, the prompt template will be used in place of the instructions. - **kwargs: Additional keyword arguments - """ - args: dict[str, Any] = { - "client": client, - "definition": definition, - "name": definition.name or f"azure_agent_{generate_random_ascii_name(length=8)}", - "description": definition.description, - } - - if definition.id is not None: - args["id"] = definition.id - if kernel is not None: - args["kernel"] = kernel - if arguments is not None: - args["arguments"] = arguments - if ( - definition.instructions - and prompt_template_config - and definition.instructions != prompt_template_config.template - ): - logger.info( - f"Both `instructions` ({definition.instructions}) and `prompt_template_config` " - f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " - "and ignoring `instructions`." - ) - - if plugins is not None: - args["plugins"] = plugins - if definition.instructions is not None: - args["instructions"] = definition.instructions - if prompt_template_config is not None: - args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( - prompt_template_config=prompt_template_config - ) - if prompt_template_config.template is not None: - # Use the template from the prompt_template_config if it is provided - args["instructions"] = prompt_template_config.template - if polling_options is not None: - args["polling_options"] = polling_options - if kwargs: - args.update(kwargs) - - super().__init__(**args) - - @staticmethod - def create_client( - credential: "DefaultAzureCredential", - conn_str: str | None = None, - **kwargs: Any, - ) -> AIProjectClient: - """Create the Azure AI Project client using the connection string. - - Args: - credential: The credential - conn_str: The connection string - kwargs: Additional keyword arguments - - Returns: - AIProjectClient: The Azure AI Project client - """ - if conn_str is None: - ai_agent_settings = AzureAIAgentSettings.create() - if not ai_agent_settings.project_connection_string: - raise AgentInitializationException("Please provide a valid Azure AI connection string.") - conn_str = ai_agent_settings.project_connection_string.get_secret_value() - - return AIProjectClient.from_connection_string( - credential=credential, - conn_str=conn_str, - **({"user_agent": SEMANTIC_KERNEL_USER_AGENT} if APP_INFO else {}), - **kwargs, - ) - - async def add_chat_message(self, thread_id: str, message: str | ChatMessageContent) -> "ThreadMessage | None": - """Add a chat message to the thread. - - Args: - thread_id: The ID of the thread - message: The chat message to add - - Returns: - ThreadMessage | None: The thread message - """ - return await AgentThreadActions.create_message(client=self.client, thread_id=thread_id, message=message) - - @trace_agent_get_response - @override - async def get_response( - self, - thread_id: str, - arguments: KernelArguments | None = None, - kernel: Kernel | None = None, - # Run-level parameters: - *, - model: str | None = None, - instructions_override: str | None = None, - additional_instructions: str | None = None, - additional_messages: list[ThreadMessageOptions] | None = None, - tools: list[ToolDefinition] | None = None, - temperature: float | None = None, - top_p: float | None = None, - max_prompt_tokens: int | None = None, - max_completion_tokens: int | None = None, - truncation_strategy: TruncationObject | None = None, - response_format: AgentsApiResponseFormatOption | None = None, - parallel_tool_calls: bool | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> ChatMessageContent: - """Get a response from the agent on a thread.""" - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "model": model, - "instructions_override": instructions_override, - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "tools": tools, - "temperature": temperature, - "top_p": top_p, - "max_prompt_tokens": max_prompt_tokens, - "max_completion_tokens": max_completion_tokens, - "truncation_strategy": truncation_strategy, - "response_format": response_format, - "parallel_tool_calls": parallel_tool_calls, - "metadata": metadata, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - - messages: list[ChatMessageContent] = [] - async for is_visible, message in AgentThreadActions.invoke( - agent=self, - thread_id=thread_id, - kernel=kernel, - arguments=arguments, - **run_level_params, # type: ignore - ): - if is_visible and message.metadata.get("code") is not True: - messages.append(message) - - if not messages: - raise AgentInvokeException("No response messages were returned from the agent.") - return messages[-1] - - @trace_agent_invocation - @override - async def invoke( - self, - thread_id: str, - arguments: KernelArguments | None = None, - kernel: Kernel | None = None, - # Run-level parameters: - *, - model: str | None = None, - instructions_override: str | None = None, - additional_instructions: str | None = None, - additional_messages: list[ThreadMessageOptions] | None = None, - tools: list[ToolDefinition] | None = None, - temperature: float | None = None, - top_p: float | None = None, - max_prompt_tokens: int | None = None, - max_completion_tokens: int | None = None, - truncation_strategy: TruncationObject | None = None, - response_format: AgentsApiResponseFormatOption | None = None, - parallel_tool_calls: bool | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """Invoke the agent on the specified thread.""" - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "model": model, - "instructions_override": instructions_override, - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "tools": tools, - "temperature": temperature, - "top_p": top_p, - "max_prompt_tokens": max_prompt_tokens, - "max_completion_tokens": max_completion_tokens, - "truncation_strategy": truncation_strategy, - "response_format": response_format, - "parallel_tool_calls": parallel_tool_calls, - "metadata": metadata, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - - async for is_visible, message in AgentThreadActions.invoke( - agent=self, - thread_id=thread_id, - kernel=kernel, - arguments=arguments, - **run_level_params, # type: ignore - ): - if is_visible: - yield message - - @trace_agent_invocation - @override - async def invoke_stream( - self, - thread_id: str, - messages: list[ChatMessageContent] | None = None, - kernel: Kernel | None = None, - arguments: KernelArguments | None = None, - # Run-level parameters: - *, - model: str | None = None, - instructions_override: str | None = None, - additional_instructions: str | None = None, - additional_messages: list[ThreadMessageOptions] | None = None, - tools: list[ToolDefinition] | None = None, - temperature: float | None = None, - top_p: float | None = None, - max_prompt_tokens: int | None = None, - max_completion_tokens: int | None = None, - truncation_strategy: TruncationObject | None = None, - response_format: AgentsApiResponseFormatOption | None = None, - parallel_tool_calls: bool | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Invoke the agent on the specified thread with a stream of messages.""" - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "model": model, - "instructions_override": instructions_override, - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "tools": tools, - "temperature": temperature, - "top_p": top_p, - "max_prompt_tokens": max_prompt_tokens, - "max_completion_tokens": max_completion_tokens, - "truncation_strategy": truncation_strategy, - "response_format": response_format, - "parallel_tool_calls": parallel_tool_calls, - "metadata": metadata, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - - async for message in AgentThreadActions.invoke_stream( - agent=self, - thread_id=thread_id, - messages=messages, - kernel=kernel, - arguments=arguments, - **run_level_params, # type: ignore - ): - yield message - - def get_channel_keys(self) -> Iterable[str]: - """Get the channel keys. - - Returns: - Iterable[str]: The channel keys. - """ - # Distinguish from other channel types. - yield f"{AzureAIAgent.__name__}" - - # Distinguish between different agent IDs - yield self.id - - # Distinguish between agent names - yield self.name - - # Distinguish between different scopes - yield str(self.client.scope) - - async def create_channel(self) -> AgentChannel: - """Create a channel.""" - thread_id = await AgentThreadActions.create_thread(self.client) - - return AzureAIChannel(client=self.client, thread_id=thread_id) diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py deleted file mode 100644 index e17bfaaf5a5b..000000000000 --- a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_settings.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import ClassVar - -from pydantic import SecretStr - -from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class AzureAIAgentSettings(KernelBaseSettings): - """Azure AI Agent settings currently used by the AzureAIAgent. - - Args: - model_deployment_name: Azure AI Agent (Env var AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME) - project_connection_string: Azure AI Agent Project Connection String - (Env var AZURE_AI_AGENT_PROJECT_CONNECTION_STRING) - endpoint: Azure AI Agent Endpoint (Env var AZURE_AI_AGENT_ENDPOINT) - subscription_id: Azure AI Agent Subscription ID (Env var AZURE_AI_AGENT_SUBSCRIPTION_ID) - resource_group_name: Azure AI Agent Resource Group Name (Env var AZURE_AI_AGENT_RESOURCE_GROUP_NAME) - project_name: Azure AI Agent Project Name (Env var AZURE_AI_AGENT_PROJECT_NAME) - """ - - env_prefix: ClassVar[str] = "AZURE_AI_AGENT_" - - model_deployment_name: str - project_connection_string: SecretStr | None = None - endpoint: str | None = None - subscription_id: str | None = None - resource_group_name: str | None = None - project_name: str | None = None diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py b/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py deleted file mode 100644 index cfa0ebdc43b2..000000000000 --- a/python/semantic_kernel/agents/azure_ai/azure_ai_agent_utils.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Iterable, Sequence -from typing import TYPE_CHECKING, Any, ClassVar, TypeVar - -from azure.ai.projects.models import ( - CodeInterpreterTool, - FileSearchTool, - MessageAttachment, - MessageRole, - ThreadMessageOptions, - ToolDefinition, -) - -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from semantic_kernel.contents import ChatMessageContent - -_T = TypeVar("_T", bound="AzureAIAgentUtils") - - -@experimental -class AzureAIAgentUtils: - """AzureAI Agent Utility Methods.""" - - tool_metadata: ClassVar[dict[str, Sequence[ToolDefinition]]] = { - "file_search": FileSearchTool().definitions, - "code_interpreter": CodeInterpreterTool().definitions, - } - - @classmethod - def get_thread_messages(cls: type[_T], messages: list["ChatMessageContent"]) -> Any: - """Get the thread messages for an agent message.""" - if not messages: - return None - - thread_messages: list[ThreadMessageOptions] = [] - - for message in messages: - if not message.content: - continue - - thread_msg = ThreadMessageOptions( - content=message.content, - role=MessageRole.USER if message.role == AuthorRole.USER else MessageRole.AGENT, - attachments=cls.get_attachments(message), - metadata=cls.get_metadata(message) if message.metadata else None, - ) - thread_messages.append(thread_msg) - - return thread_messages - - @classmethod - def get_metadata(cls: type[_T], message: "ChatMessageContent") -> dict[str, str]: - """Get the metadata for an agent message.""" - return {k: str(v) if v is not None else "" for k, v in (message.metadata or {}).items()} - - @classmethod - def get_attachments(cls: type[_T], message: "ChatMessageContent") -> list[MessageAttachment]: - """Get the attachments for an agent message. - - Args: - message: The ChatMessageContent - - Returns: - A list of MessageAttachment - """ - return [ - MessageAttachment( - file_id=file_content.file_id, - tools=list(cls._get_tool_definition(file_content.tools)), # type: ignore - data_source=file_content.data_source if file_content.data_source else None, - ) - for file_content in message.items - if isinstance(file_content, FileReferenceContent) - ] - - @classmethod - def _get_tool_definition(cls: type[_T], tools: list[Any]) -> Iterable[ToolDefinition]: - if not tools: - return - for tool in tools: - if tool_definition := cls.tool_metadata.get(tool): - yield from tool_definition diff --git a/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py b/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py deleted file mode 100644 index f662e3ad33ac..000000000000 --- a/python/semantic_kernel/agents/azure_ai/azure_ai_channel.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from collections.abc import AsyncIterable -from typing import TYPE_CHECKING - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from azure.ai.projects.aio import AIProjectClient - - from semantic_kernel.agents.agent import Agent - from semantic_kernel.contents.chat_message_content import ChatMessageContent - - -@experimental -class AzureAIChannel(AgentChannel): - """AzureAI Channel.""" - - def __init__(self, client: "AIProjectClient", thread_id: str) -> None: - """Initialize the AzureAI Channel. - - Args: - client: The AzureAI Project client. - thread_id: The thread ID. - """ - self.client = client - self.thread_id = thread_id - - @override - async def receive(self, history: list["ChatMessageContent"]) -> None: - """Receive the conversation messages. - - Args: - history: The conversation messages. - """ - for message in history: - await AgentThreadActions.create_message(self.client, self.thread_id, message) - - @override - async def invoke(self, agent: "Agent", **kwargs) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: - """Invoke the agent. - - Args: - agent: The agent to invoke. - kwargs: The keyword arguments. - - Yields: - tuple[bool, ChatMessageContent]: The conversation messages. - """ - from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent - - if not isinstance(agent, AzureAIAgent): - raise AgentChatException(f"Agent is not of the expected type {type(AzureAIAgent)}.") - - async for is_visible, message in AgentThreadActions.invoke( - agent=agent, - thread_id=self.thread_id, - arguments=agent.arguments, - kernel=agent.kernel, - **kwargs, - ): - yield is_visible, message - - @override - async def invoke_stream( - self, - agent: "Agent", - messages: list["ChatMessageContent"], - **kwargs, - ) -> AsyncIterable["ChatMessageContent"]: - """Invoke the agent stream. - - Args: - agent: The agent to invoke. - messages: The conversation messages. - kwargs: The keyword arguments. - - Yields: - tuple[bool, StreamingChatMessageContent]: The conversation messages. - """ - from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent - - if not isinstance(agent, AzureAIAgent): - raise AgentChatException(f"Agent is not of the expected type {type(AzureAIAgent)}.") - - async for message in AgentThreadActions.invoke_stream( - agent=agent, - thread_id=self.thread_id, - messages=messages, - arguments=agent.arguments, - kernel=agent.kernel, - **kwargs, - ): - yield message - - @override - async def get_history(self) -> AsyncIterable["ChatMessageContent"]: - """Get the conversation history. - - Yields: - ChatMessageContent: The conversation history. - """ - async for message in AgentThreadActions.get_messages(self.client, thread_id=self.thread_id): - yield message - - @override - async def reset(self) -> None: - """Reset the agent's thread.""" - try: - await self.client.agents.delete_thread(thread_id=self.thread_id) - except Exception as e: - raise AgentChatException(f"Failed to delete thread: {e}") diff --git a/python/semantic_kernel/agents/bedrock/README.md b/python/semantic_kernel/agents/bedrock/README.md deleted file mode 100644 index d1e17f9245c3..000000000000 --- a/python/semantic_kernel/agents/bedrock/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Amazon Bedrock AI Agents in Semantic Kernel - -## Overview - -AWS Bedrock Agents is a managed service that allows users to stand up and run AI agents in the AWS cloud quickly. - -## Tools/Functions - -Bedrock Agents allow the use of tools via [action groups](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-action-create.html). - -The integration of Bedrock Agents with Semantic Kernel allows users to register kernel functions as tools in Bedrock Agents. - -## Enable code interpretation - -Bedrock Agents can write and execute code via a feature known as [code interpretation](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-code-interpretation.html) similar to what OpenAI also offers. - -## Enable user input - -Bedrock Agents can [request user input](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-user-input.html) in case of missing information to invoke a tool. When this is enabled, the agent will prompt the user for the missing information. When this is disabled, the agent will guess the missing information. - -## Knowledge base - -Bedrock Agents can leverage data saved on AWS to perform RAG tasks, this is referred to as the [knowledge base](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-kb-add.html) in AWS. - -## Multi-agent - -Bedrock Agents support [multi-agent workflows](https://docs.aws.amazon.com/bedrock/latest/userguide/agents-multi-agent-collaboration.html) for more complex tasks. However, it employs a different pattern than what we have in Semantic Kernel, thus this is not supported in the current integration. \ No newline at end of file diff --git a/python/semantic_kernel/agents/bedrock/__init__.py b/python/semantic_kernel/agents/bedrock/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/python/semantic_kernel/agents/bedrock/action_group_utils.py b/python/semantic_kernel/agents/bedrock/action_group_utils.py deleted file mode 100644 index 29e391e0f0f2..000000000000 --- a/python/semantic_kernel/agents/bedrock/action_group_utils.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any - -from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata -from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata - - -def kernel_function_to_bedrock_function_schema( - function_choice_configuration: FunctionCallChoiceConfiguration, -) -> dict[str, Any]: - """Convert the kernel function to bedrock function schema.""" - return { - "functions": [ - kernel_function_metadata_to_bedrock_function_schema(function_metadata) - for function_metadata in function_choice_configuration.available_functions or [] - ] - } - - -def kernel_function_metadata_to_bedrock_function_schema(function_metadata: KernelFunctionMetadata) -> dict[str, Any]: - """Convert the kernel function metadata to bedrock function schema.""" - schema = { - "description": function_metadata.description, - "name": function_metadata.fully_qualified_name, - "parameters": { - parameter.name: kernel_function_parameter_to_bedrock_function_parameter(parameter) - for parameter in function_metadata.parameters - }, - # This field controls whether user confirmation is required to invoke the function. - # If this is set to "ENABLED", the user will be prompted to confirm the function invocation. - # Only after the user confirms, the function call request will be issued by the agent. - # If the user denies the confirmation, the agent will act as if the function does not exist. - # Currently, we do not support this feature, so we set it to "DISABLED". - "requireConfirmation": "DISABLED", - } - - # Remove None values from the schema - return {key: value for key, value in schema.items() if value is not None} - - -def kernel_function_parameter_to_bedrock_function_parameter(parameter: KernelParameterMetadata): - """Convert the kernel function parameters to bedrock function parameters.""" - schema = { - "description": parameter.description, - "type": kernel_function_parameter_type_to_bedrock_function_parameter_type(parameter.schema_data), - "required": parameter.is_required, - } - - # Remove None values from the schema - return {key: value for key, value in schema.items() if value is not None} - - -# These are the allowed parameter types in bedrock function. -# https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent-runtime_ParameterDetail.html -BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES = { - "string", - "number", - "integer", - "boolean", - "array", -} - - -def kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data: dict[str, Any] | None) -> str: - """Convert the kernel function parameter type to bedrock function parameter type.""" - if schema_data is None: - raise ValueError( - "Schema data is required to convert the kernel function parameter type to bedrock function parameter type." - ) - - type_ = schema_data.get("type") - if type_ is None: - raise ValueError( - "Type is required to convert the kernel function parameter type to bedrock function parameter type." - ) - - if type_ not in BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES: - raise ValueError( - f"Type {type_} is not allowed in bedrock function parameter type. " - f"Allowed types are {BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES}." - ) - - return type_ - - -def parse_return_control_payload(return_control_payload: dict[str, Any]) -> list[FunctionCallContent]: - """Parse the return control payload to a list of function call contents for the kernel.""" - return [ - FunctionCallContent( - id=return_control_payload["invocationId"], - name=invocation_input["functionInvocationInput"]["function"], - arguments={ - parameter["name"]: parameter["value"] - for parameter in invocation_input["functionInvocationInput"]["parameters"] - }, - metadata=invocation_input, - ) - for invocation_input in return_control_payload.get("invocationInputs", []) - ] - - -def parse_function_result_contents(function_result_contents: list[FunctionResultContent]) -> list[dict[str, Any]]: - """Parse the function result contents to be returned to the agent in the session state.""" - return [ - { - "functionResult": { - "actionGroup": function_result_content.metadata["functionInvocationInput"]["actionGroup"], - "function": function_result_content.name, - "responseBody": {"TEXT": {"body": str(function_result_content.result)}}, - } - } - for function_result_content in function_result_contents - ] diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent.py b/python/semantic_kernel/agents/bedrock/bedrock_agent.py deleted file mode 100644 index 33b57363e193..000000000000 --- a/python/semantic_kernel/agents/bedrock/bedrock_agent.py +++ /dev/null @@ -1,589 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -import asyncio -import logging -import sys -import uuid -from collections.abc import AsyncIterable -from functools import partial, reduce -from typing import Any, ClassVar - -from pydantic import ValidationError - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.agents.bedrock.action_group_utils import ( - parse_function_result_contents, - parse_return_control_payload, -) -from semantic_kernel.agents.bedrock.bedrock_agent_base import BedrockAgentBase -from semantic_kernel.agents.bedrock.bedrock_agent_settings import BedrockAgentSettings -from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType -from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel -from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.agents.channels.bedrock_agent_channel import BedrockAgentChannel -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.binary_content import BinaryContent -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.async_utils import run_in_executor -from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( - trace_agent_get_response, - trace_agent_invocation, -) - -logger = logging.getLogger(__name__) - - -@experimental -class BedrockAgent(BedrockAgentBase): - """Bedrock Agent. - - Manages the interaction with Amazon Bedrock Agent Service. - """ - - channel_type: ClassVar[type[AgentChannel]] = BedrockAgentChannel - - def __init__( - self, - agent_model: BedrockAgentModel | dict[str, Any], - *, - function_choice_behavior: FunctionChoiceBehavior | None = None, - kernel: Kernel | None = None, - plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, - arguments: KernelArguments | None = None, - bedrock_runtime_client: Any | None = None, - bedrock_client: Any | None = None, - **kwargs, - ) -> None: - """Initialize the Bedrock Agent. - - Note that this only creates the agent object and does not create the agent in the service. - - Args: - agent_model (BedrockAgentModel | dict[str, Any]): The agent model. - function_choice_behavior (FunctionChoiceBehavior, optional): The function choice behavior for accessing - the kernel functions and filters. - kernel (Kernel, optional): The kernel to use. - plugins (list[KernelPlugin | object] | dict[str, KernelPlugin | object], optional): The plugins to use. - arguments (KernelArguments, optional): The kernel arguments. - Invoke method arguments take precedence over the arguments provided here. - bedrock_runtime_client: The Bedrock Runtime Client. - bedrock_client: The Bedrock Client. - **kwargs: Additional keyword arguments. - """ - args: dict[str, Any] = { - "agent_model": agent_model, - **kwargs, - } - - if function_choice_behavior: - args["function_choice_behavior"] = function_choice_behavior - if kernel: - args["kernel"] = kernel - if plugins: - args["plugins"] = plugins - if arguments: - args["arguments"] = arguments - if bedrock_runtime_client: - args["bedrock_runtime_client"] = bedrock_runtime_client - if bedrock_client: - args["bedrock_client"] = bedrock_client - - super().__init__(**args) - - # region convenience class methods - - @classmethod - async def create_and_prepare_agent( - cls, - name: str, - instructions: str, - *, - agent_resource_role_arn: str | None = None, - foundation_model: str | None = None, - bedrock_runtime_client: Any | None = None, - bedrock_client: Any | None = None, - kernel: Kernel | None = None, - plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, - function_choice_behavior: FunctionChoiceBehavior | None = None, - arguments: KernelArguments | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - ) -> "BedrockAgent": - """Create a new agent asynchronously. - - This is a convenience method that creates an instance of BedrockAgent and then creates the agent on the service. - - Args: - name (str): The name of the agent. - instructions (str, optional): The instructions for the agent. - agent_resource_role_arn (str, optional): The ARN of the agent resource role. - foundation_model (str, optional): The foundation model. - bedrock_runtime_client (Any, optional): The Bedrock Runtime Client. - bedrock_client (Any, optional): The Bedrock Client. - kernel (Kernel, optional): The kernel to use. - plugins (list[KernelPlugin | object] | dict[str, KernelPlugin | object], optional): The plugins to use. - function_choice_behavior (FunctionChoiceBehavior, optional): The function choice behavior for accessing - the kernel functions and filters. Only FunctionChoiceType.AUTO is supported. - arguments (KernelArguments, optional): The kernel arguments. - prompt_template_config (PromptTemplateConfig, optional): The prompt template configuration. - env_file_path (str, optional): The path to the environment file. - env_file_encoding (str, optional): The encoding of the environment file. - - Returns: - An instance of BedrockAgent with the created agent. - """ - try: - bedrock_agent_settings = BedrockAgentSettings.create( - agent_resource_role_arn=agent_resource_role_arn, - foundation_model=foundation_model, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as e: - raise AgentInitializationException("Failed to initialize the Amazon Bedrock Agent settings.") from e - - import boto3 - from botocore.exceptions import ClientError - - bedrock_runtime_client = bedrock_runtime_client or boto3.client("bedrock-agent-runtime") - bedrock_client = bedrock_client or boto3.client("bedrock-agent") - - try: - response = await run_in_executor( - None, - partial( - bedrock_client.create_agent, - agentName=name, - foundationModel=bedrock_agent_settings.foundation_model, - agentResourceRoleArn=bedrock_agent_settings.agent_resource_role_arn, - instruction=instructions, - ), - ) - except ClientError as e: - logger.error(f"Failed to create agent {name}.") - raise AgentInitializationException("Failed to create the Amazon Bedrock Agent.") from e - - bedrock_agent = cls( - response["agent"], - function_choice_behavior=function_choice_behavior, - kernel=kernel, - plugins=plugins, - arguments=arguments, - bedrock_runtime_client=bedrock_runtime_client, - bedrock_client=bedrock_client, - ) - - # The agent will first enter the CREATING status. - # When the operation finishes, it will enter the NOT_PREPARED status. - # We need to wait for the agent to reach the NOT_PREPARED status before we can prepare it. - await bedrock_agent._wait_for_agent_status(BedrockAgentStatus.NOT_PREPARED) - await bedrock_agent.prepare_agent_and_wait_until_prepared() - - return bedrock_agent - - @classmethod - def create_session_id(cls) -> str: - """Create a new session identifier. - - It is the caller's responsibility to maintain the session ID - to continue the session with the agent. - - Find the requirement for the session identifier here: - https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent-runtime_InvokeAgent.html#API_agent-runtime_InvokeAgent_RequestParameters - """ - return str(uuid.uuid4()) - - # endregion - - @trace_agent_get_response - @override - async def get_response( - self, - session_id: str, - input_text: str, - *, - agent_alias: str | None = None, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs, - ) -> ChatMessageContent: - """Get a response from the agent. - - Args: - session_id (str): The session identifier. This is used to maintain the session state in the service. - input_text (str): The input text. - agent_alias (str, optional): The agent alias. - arguments (KernelArguments, optional): The kernel arguments to override the current arguments. - kernel (Kernel, optional): The kernel to override the current kernel. - **kwargs: Additional keyword arguments. - - Returns: - A chat message content with the response. - """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = False - kwargs.setdefault("sessionState", {}) - - for _ in range(self.function_choice_behavior.maximum_auto_invoke_attempts): - response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) - - events: list[dict[str, Any]] = [] - for event in response.get("completion", []): - events.append(event) - - if any(BedrockAgentEventType.RETURN_CONTROL in event for event in events): - # Check if there is function call requests. If there are function calls, - # parse and invoke them and return the results back to the agent. - # Not yielding the function call results back to the user. - kwargs["sessionState"].update( - await self._handle_return_control_event( - next(event for event in events if BedrockAgentEventType.RETURN_CONTROL in event), - kernel, - arguments, - ) - ) - else: - # For the rest of the events, the chunk will become the chat message content. - # If there are files or trace, they will be added to the chat message content. - file_items: list[BinaryContent] | None = None - trace_metadata: dict[str, Any] | None = None - chat_message_content: ChatMessageContent | None = None - for event in events: - if BedrockAgentEventType.CHUNK in event: - chat_message_content = self._handle_chunk_event(event) - elif BedrockAgentEventType.FILES in event: - file_items = self._handle_files_event(event) - elif BedrockAgentEventType.TRACE in event: - trace_metadata = self._handle_trace_event(event) - - if not chat_message_content or not chat_message_content.content: - raise AgentInvokeException("Chat message content is expected but not found in the response.") - - if file_items: - chat_message_content.items.extend(file_items) - if trace_metadata: - chat_message_content.metadata.update({"trace": trace_metadata}) - - if not chat_message_content: - raise AgentInvokeException("No response from the agent.") - - return chat_message_content - - raise AgentInvokeException( - "Failed to get a response from the agent. Please consider increasing the auto invoke attempts." - ) - - @trace_agent_invocation - @override - async def invoke( - self, - session_id: str, - input_text: str, - *, - agent_alias: str | None = None, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs, - ) -> AsyncIterable[ChatMessageContent]: - """Invoke an agent. - - Args: - session_id (str): The session identifier. This is used to maintain the session state in the service. - input_text (str): The input text. - agent_alias (str, optional): The agent alias. - arguments (KernelArguments, optional): The kernel arguments to override the current arguments. - kernel (Kernel, optional): The kernel to override the current kernel. - **kwargs: Additional keyword arguments. - - Returns: - An async iterable of chat message content. - """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = False - kwargs.setdefault("sessionState", {}) - - for _ in range(self.function_choice_behavior.maximum_auto_invoke_attempts): - response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) - - events: list[dict[str, Any]] = [] - for event in response.get("completion", []): - events.append(event) - - if any(BedrockAgentEventType.RETURN_CONTROL in event for event in events): - # Check if there is function call requests. If there are function calls, - # parse and invoke them and return the results back to the agent. - # Not yielding the function call results back to the user. - kwargs["sessionState"].update( - await self._handle_return_control_event( - next(event for event in events if BedrockAgentEventType.RETURN_CONTROL in event), - kernel, - arguments, - ) - ) - else: - for event in events: - if BedrockAgentEventType.CHUNK in event: - yield self._handle_chunk_event(event) - elif BedrockAgentEventType.FILES in event: - yield ChatMessageContent( - role=AuthorRole.ASSISTANT, - items=self._handle_files_event(event), # type: ignore - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - ) - elif BedrockAgentEventType.TRACE in event: - yield ChatMessageContent( - role=AuthorRole.ASSISTANT, - name=self.name, - content="", - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - metadata=self._handle_trace_event(event), - ) - - return - - raise AgentInvokeException( - "Failed to get a response from the agent. Please consider increasing the auto invoke attempts." - ) - - @trace_agent_invocation - @override - async def invoke_stream( - self, - session_id: str, - input_text: str, - *, - agent_alias: str | None = None, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs, - ) -> AsyncIterable[StreamingChatMessageContent]: - """Invoke an agent with streaming. - - Args: - session_id (str): The session identifier. This is used to maintain the session state in the service. - input_text (str): The input text. - agent_alias (str, optional): The agent alias. - arguments (KernelArguments, optional): The kernel arguments to override the current arguments. - kernel (Kernel, optional): The kernel to override the current kernel. - **kwargs: Additional keyword arguments. - - Returns: - An async iterable of streaming chat message content - """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - kwargs.setdefault("streamingConfigurations", {})["streamFinalResponse"] = True - kwargs.setdefault("sessionState", {}) - - for request_index in range(self.function_choice_behavior.maximum_auto_invoke_attempts): - response = await self._invoke_agent(session_id, input_text, agent_alias, **kwargs) - - all_function_call_messages: list[StreamingChatMessageContent] = [] - for event in response.get("completion", []): - if BedrockAgentEventType.CHUNK in event: - yield self._handle_streaming_chunk_event(event) - continue - if BedrockAgentEventType.FILES in event: - yield self._handle_streaming_files_event(event) - continue - if BedrockAgentEventType.TRACE in event: - yield self._handle_streaming_trace_event(event) - continue - if BedrockAgentEventType.RETURN_CONTROL in event: - all_function_call_messages.append(self._handle_streaming_return_control_event(event)) - continue - - if not all_function_call_messages: - return - - full_message: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_function_call_messages) - function_calls = [item for item in full_message.items if isinstance(item, FunctionCallContent)] - function_result_contents = await self._handle_function_call_contents(function_calls) - kwargs["sessionState"].update({ - "invocationId": function_calls[0].id, - "returnControlInvocationResults": parse_function_result_contents(function_result_contents), - }) - - # region non streaming Event Handlers - - def _handle_chunk_event(self, event: dict[str, Any]) -> ChatMessageContent: - """Create a chat message content.""" - chunk = event[BedrockAgentEventType.CHUNK] - completion = chunk["bytes"].decode() - - return ChatMessageContent( - role=AuthorRole.ASSISTANT, - content=completion, - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - metadata=chunk, - ) - - async def _handle_return_control_event( - self, - event: dict[str, Any], - kernel: Kernel, - kernel_arguments: KernelArguments, - ) -> dict[str, Any]: - """Handle return control event.""" - return_control_payload = event[BedrockAgentEventType.RETURN_CONTROL] - function_calls = parse_return_control_payload(return_control_payload) - if not function_calls: - raise AgentInvokeException("Function call is expected but not found in the response.") - - function_result_contents = await self._handle_function_call_contents(function_calls) - - return { - "invocationId": function_calls[0].id, - "returnControlInvocationResults": parse_function_result_contents(function_result_contents), - } - - def _handle_files_event(self, event: dict[str, Any]) -> list[BinaryContent]: - """Handle file event.""" - files_event = event[BedrockAgentEventType.FILES] - return [ - BinaryContent( - data=file["bytes"], - data_format="base64", - mime_type=file["type"], - metadata={"name": file["name"]}, - ) - for file in files_event["files"] - ] - - def _handle_trace_event(self, event: dict[str, Any]) -> dict[str, Any]: - """Handle trace event.""" - return event[BedrockAgentEventType.TRACE] - - # endregion - - # region streaming Event Handlers - - def _handle_streaming_chunk_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: - """Handle streaming chunk event.""" - chunk = event[BedrockAgentEventType.CHUNK] - completion = chunk["bytes"].decode() - - return StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - choice_index=0, - content=completion, - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - ) - - def _handle_streaming_return_control_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: - """Handle streaming return control event.""" - return_control_payload = event[BedrockAgentEventType.RETURN_CONTROL] - function_calls = parse_return_control_payload(return_control_payload) - - return StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - choice_index=0, - items=function_calls, # type: ignore - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - ) - - def _handle_streaming_files_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: - """Handle streaming file event.""" - files_event = event[BedrockAgentEventType.FILES] - items: list[BinaryContent] = [ - BinaryContent( - data=file["bytes"], - data_format="base64", - mime_type=file["type"], - metadata={"name": file["name"]}, - ) - for file in files_event["files"] - ] - - return StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - choice_index=0, - items=items, # type: ignore - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - ) - - def _handle_streaming_trace_event(self, event: dict[str, Any]) -> StreamingChatMessageContent: - """Handle streaming trace event.""" - return StreamingChatMessageContent( - role=AuthorRole.ASSISTANT, - choice_index=0, - items=[], - name=self.name, - inner_content=event, - ai_model_id=self.agent_model.foundation_model, - metadata=event[BedrockAgentEventType.TRACE], - ) - - # endregion - - async def _handle_function_call_contents( - self, - function_call_contents: list[FunctionCallContent], - ) -> list[FunctionResultContent]: - """Handle function call contents.""" - chat_history = ChatHistory() - await asyncio.gather( - *[ - self.kernel.invoke_function_call( - function_call=function_call, - chat_history=chat_history, - arguments=self.arguments, - function_call_count=len(function_call_contents), - ) - for function_call in function_call_contents - ], - ) - - return [ - item - for chat_message in chat_history.messages - for item in chat_message.items - if isinstance(item, FunctionResultContent) - ] diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py b/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py deleted file mode 100644 index 708c0d2b01de..000000000000 --- a/python/semantic_kernel/agents/bedrock/bedrock_agent_base.py +++ /dev/null @@ -1,376 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from functools import partial -from typing import Any, ClassVar - -import boto3 -from botocore.exceptions import ClientError -from pydantic import Field, field_validator - -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.bedrock.action_group_utils import kernel_function_to_bedrock_function_schema -from semantic_kernel.agents.bedrock.models.bedrock_action_group_model import BedrockActionGroupModel -from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel -from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior, FunctionChoiceType -from semantic_kernel.utils.async_utils import run_in_executor -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger = logging.getLogger(__name__) - - -@experimental -class BedrockAgentBase(Agent): - """Bedrock Agent Base Class to provide common functionalities for Bedrock Agents.""" - - # There is a default alias created by Bedrock for the working draft version of the agent. - # https://docs.aws.amazon.com/bedrock/latest/userguide/agents-deploy.html - WORKING_DRAFT_AGENT_ALIAS: ClassVar[str] = "TSTALIASID" - - # Amazon Bedrock Clients - # Runtime Client: Use for inference - bedrock_runtime_client: Any - # Client: Use for model management - bedrock_client: Any - # Function Choice Behavior: this is primarily used to control the behavior of the kernel when - # the agent requests functions, and to configure the kernel function action group (i.e. via filters). - # When this is None, users won't be able to create a kernel function action groups. - function_choice_behavior: FunctionChoiceBehavior = Field(default=FunctionChoiceBehavior.Auto()) - # Agent Model: stores the agent information - agent_model: BedrockAgentModel - - def __init__( - self, - agent_model: BedrockAgentModel | dict[str, Any], - *, - function_choice_behavior: FunctionChoiceBehavior | None = None, - bedrock_runtime_client: Any | None = None, - bedrock_client: Any | None = None, - **kwargs, - ) -> None: - """Initialize the Bedrock Agent Base. - - Args: - agent_model: The Bedrock Agent Model. - function_choice_behavior: The function choice behavior. - bedrock_client: The Bedrock Client. - bedrock_runtime_client: The Bedrock Runtime Client. - kwargs: Additional keyword arguments. - """ - agent_model = ( - agent_model if isinstance(agent_model, BedrockAgentModel) else BedrockAgentModel.model_validate(agent_model) - ) - - args = { - "agent_model": agent_model, - "id": agent_model.agent_id, - "name": agent_model.agent_name, - "bedrock_runtime_client": bedrock_runtime_client or boto3.client("bedrock-agent-runtime"), - "bedrock_client": bedrock_client or boto3.client("bedrock-agent"), - **kwargs, - } - if function_choice_behavior: - args["function_choice_behavior"] = function_choice_behavior - - super().__init__(**args) - - @field_validator("function_choice_behavior", mode="after") - @classmethod - def validate_function_choice_behavior( - cls, function_choice_behavior: FunctionChoiceBehavior | None - ) -> FunctionChoiceBehavior | None: - """Validate the function choice behavior.""" - if function_choice_behavior and function_choice_behavior.type_ != FunctionChoiceType.AUTO: - # Users cannot specify REQUIRED or NONE for the Bedrock agents. - # Please note that the function choice behavior only control if the kernel will automatically - # execute the functions the agent requests. It does not control the behavior of the agent. - raise ValueError("Only FunctionChoiceType.AUTO is supported.") - return function_choice_behavior - - def __repr__(self): - """Return the string representation of the Bedrock Agent.""" - return f"{self.agent_model}" - - # region Agent Management - - async def prepare_agent_and_wait_until_prepared(self) -> None: - """Prepare the agent for use.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before preparing it.") - - try: - await run_in_executor( - None, - partial( - self.bedrock_client.prepare_agent, - agentId=self.agent_model.agent_id, - ), - ) - - # The agent will take some time to enter the PREPARING status after the prepare operation is called. - # We need to wait for the agent to reach the PREPARING status before we can proceed, otherwise we - # will return immediately if the agent is already in PREPARED status. - await self._wait_for_agent_status(BedrockAgentStatus.PREPARING) - # The agent will enter the PREPARED status when the preparation is complete. - await self._wait_for_agent_status(BedrockAgentStatus.PREPARED) - except ClientError as e: - logger.error(f"Failed to prepare agent {self.agent_model.agent_id}.") - raise e - - async def delete_agent(self, **kwargs) -> None: - """Delete an agent asynchronously.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before deleting it.") - - try: - await run_in_executor( - None, - partial( - self.bedrock_client.delete_agent, - agentId=self.agent_model.agent_id, - **kwargs, - ), - ) - - self.agent_model.agent_id = None - except ClientError as e: - logger.error(f"Failed to delete agent {self.agent_model.agent_id}.") - raise e - - async def _get_agent(self) -> None: - """Get an agent.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before getting it.") - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.get_agent, - agentId=self.agent_model.agent_id, - ), - ) - - # Update the agent model - self.agent_model = BedrockAgentModel(**response["agent"]) - except ClientError as e: - logger.error(f"Failed to get agent {self.agent_model.agent_id}.") - raise e - - async def _wait_for_agent_status( - self, - status: BedrockAgentStatus, - interval: int = 2, - max_attempts: int = 5, - ) -> None: - """Wait for the agent to reach a specific status.""" - for _ in range(max_attempts): - await self._get_agent() - if self.agent_model.agent_status == status: - return - - await asyncio.sleep(interval) - - raise TimeoutError( - f"Agent did not reach status {status} within the specified time." - f" Current status: {self.agent_model.agent_status}" - ) - - # endregion Agent Management - - # region Action Group Management - async def create_code_interpreter_action_group(self, **kwargs) -> BedrockActionGroupModel: - """Create a code interpreter action group.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.create_agent_action_group, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version or "DRAFT", - actionGroupName=f"{self.agent_model.agent_name}_code_interpreter", - actionGroupState="ENABLED", - parentActionGroupSignature="AMAZON.CodeInterpreter", - **kwargs, - ), - ) - - await self.prepare_agent_and_wait_until_prepared() - - return BedrockActionGroupModel(**response["agentActionGroup"]) - except ClientError as e: - logger.error(f"Failed to create code interpreter action group for agent {self.agent_model.agent_id}.") - raise e - - async def create_user_input_action_group(self, **kwargs) -> BedrockActionGroupModel: - """Create a user input action group.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.create_agent_action_group, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version or "DRAFT", - actionGroupName=f"{self.agent_model.agent_name}_user_input", - actionGroupState="ENABLED", - parentActionGroupSignature="AMAZON.UserInput", - **kwargs, - ), - ) - - await self.prepare_agent_and_wait_until_prepared() - - return BedrockActionGroupModel(**response["agentActionGroup"]) - except ClientError as e: - logger.error(f"Failed to create user input action group for agent {self.agent_model.agent_id}.") - raise e - - async def create_kernel_function_action_group(self, **kwargs) -> BedrockActionGroupModel | None: - """Create a kernel function action group.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before creating an action group for it.") - - function_call_choice_config = self.function_choice_behavior.get_config(self.kernel) - if not function_call_choice_config.available_functions: - logger.warning("No available functions. Skipping kernel function action group creation.") - return None - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.create_agent_action_group, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version or "DRAFT", - actionGroupName=f"{self.agent_model.agent_name}_kernel_function", - actionGroupState="ENABLED", - actionGroupExecutor={"customControl": "RETURN_CONTROL"}, - functionSchema=kernel_function_to_bedrock_function_schema(function_call_choice_config), - **kwargs, - ), - ) - - await self.prepare_agent_and_wait_until_prepared() - - return BedrockActionGroupModel(**response["agentActionGroup"]) - except ClientError as e: - logger.error(f"Failed to create kernel function action group for agent {self.agent_model.agent_id}.") - raise e - - # endregion Action Group Management - - # region Knowledge Base Management - - async def associate_agent_knowledge_base(self, knowledge_base_id: str, **kwargs) -> dict[str, Any]: - """Associate an agent with a knowledge base.""" - if not self.agent_model.agent_id: - raise ValueError( - "Agent does not exist. Please create the agent before associating it with a knowledge base." - ) - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.associate_agent_knowledge_base, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version, - knowledgeBaseId=knowledge_base_id, - **kwargs, - ), - ) - - await self.prepare_agent_and_wait_until_prepared() - - return response - except ClientError as e: - logger.error( - f"Failed to associate agent {self.agent_model.agent_id} with knowledge base {knowledge_base_id}." - ) - raise e - - async def disassociate_agent_knowledge_base(self, knowledge_base_id: str, **kwargs) -> None: - """Disassociate an agent with a knowledge base.""" - if not self.agent_model.agent_id: - raise ValueError( - "Agent does not exist. Please create the agent before disassociating it with a knowledge base." - ) - - try: - response = await run_in_executor( - None, - partial( - self.bedrock_client.disassociate_agent_knowledge_base, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version, - knowledgeBaseId=knowledge_base_id, - **kwargs, - ), - ) - - await self.prepare_agent_and_wait_until_prepared() - - return response - except ClientError as e: - logger.error( - f"Failed to disassociate agent {self.agent_model.agent_id} with knowledge base {knowledge_base_id}." - ) - raise e - - async def list_associated_agent_knowledge_bases(self, **kwargs) -> dict[str, Any]: - """List associated knowledge bases with an agent.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before listing associated knowledge bases.") - - try: - return await run_in_executor( - None, - partial( - self.bedrock_client.list_agent_knowledge_bases, - agentId=self.agent_model.agent_id, - agentVersion=self.agent_model.agent_version, - **kwargs, - ), - ) - except ClientError as e: - logger.error(f"Failed to list associated knowledge bases for agent {self.agent_model.agent_id}.") - raise e - - # endregion Knowledge Base Management - - async def _invoke_agent( - self, - session_id: str, - input_text: str, - agent_alias: str | None = None, - **kwargs, - ) -> dict[str, Any]: - """Invoke an agent.""" - if not self.agent_model.agent_id: - raise ValueError("Agent does not exist. Please create the agent before invoking it.") - - agent_alias = agent_alias or self.WORKING_DRAFT_AGENT_ALIAS - - try: - return await run_in_executor( - None, - partial( - self.bedrock_runtime_client.invoke_agent, - agentAliasId=agent_alias, - agentId=self.agent_model.agent_id, - sessionId=session_id, - inputText=input_text, - **kwargs, - ), - ) - except ClientError as e: - logger.error(f"Failed to invoke agent {self.agent_model.agent_id}.") - raise e diff --git a/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py b/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py deleted file mode 100644 index a3679478678b..000000000000 --- a/python/semantic_kernel/agents/bedrock/bedrock_agent_settings.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import ClassVar - -from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class BedrockAgentSettings(KernelBaseSettings): - """Amazon Bedrock Agent service settings. - - The settings are first loaded from environment variables with - the prefix 'BEDROCK_AGENT_'. - If the environment variables are not found, the settings can - be loaded from a .env file with the encoding 'utf-8'. - If the settings are not found in the .env file, the settings - are ignored; however, validation will fail alerting that the - settings are missing. - - Optional settings for prefix 'BEDROCK_' are: - - agent_resource_role_arn: str - The Amazon Bedrock agent resource role ARN. - https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started.html - (Env var BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN) - - foundation_model: str - The Amazon Bedrock foundation model ID to use. - (Env var BEDROCK_AGENT_FOUNDATION_MODEL) - """ - - env_prefix: ClassVar[str] = "BEDROCK_AGENT_" - - agent_resource_role_arn: str - foundation_model: str diff --git a/python/semantic_kernel/agents/bedrock/models/__init__.py b/python/semantic_kernel/agents/bedrock/models/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py b/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py deleted file mode 100644 index 8f80b25297fa..000000000000 --- a/python/semantic_kernel/agents/bedrock/models/bedrock_action_group_model.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from pydantic import ConfigDict, Field - -from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class BedrockActionGroupModel(KernelBaseModel): - """Bedrock Action Group Model. - - Model field definitions for the Amazon Bedrock Action Group Service: - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-agent/client/create_agent_action_group.html - """ - - # This model_config will merge with the KernelBaseModel.model_config - model_config = ConfigDict(extra="allow") - - action_group_id: str = Field(..., alias="actionGroupId", description="The unique identifier of the action group.") - action_group_name: str = Field(..., alias="actionGroupName", description="The name of the action group.") diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py deleted file mode 100644 index a62e62d6b5ce..000000000000 --- a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_event_type.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from enum import Enum - -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class BedrockAgentEventType(str, Enum): - """Bedrock Agent Event Type.""" - - # Contains the text response from the agent. - CHUNK = "chunk" - # Contains the trace information (reasoning process) from the agent. - TRACE = "trace" - # Contains the function call requests from the agent. - RETURN_CONTROL = "returnControl" - # Contains the files generated by the agent using the code interpreter. - FILES = "files" diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py deleted file mode 100644 index dd73fb145e10..000000000000 --- a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_model.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from pydantic import ConfigDict, Field - -from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class BedrockAgentModel(KernelBaseModel): - """Bedrock Agent Model. - - Model field definitions for the Amazon Bedrock Agent Service: - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-agent/client/create_agent.html - """ - - # This model_config will merge with the KernelBaseModel.model_config - model_config = ConfigDict(extra="allow") - - agent_id: str | None = Field(default=None, alias="agentId", description="The unique identifier of the agent.") - agent_name: str | None = Field(default=None, alias="agentName", description="The name of the agent.") - agent_version: str | None = Field(default=None, alias="agentVersion", description="The version of the agent.") - foundation_model: str | None = Field(default=None, alias="foundationModel", description="The foundation model.") - agent_status: str | None = Field(default=None, alias="agentStatus", description="The status of the agent.") diff --git a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py b/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py deleted file mode 100644 index 321f09c63752..000000000000 --- a/python/semantic_kernel/agents/bedrock/models/bedrock_agent_status.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from enum import Enum - -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class BedrockAgentStatus(str, Enum): - """Bedrock Agent Status. - - https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_PrepareAgent.html#API_agent_PrepareAgent_ResponseElements - """ - - CREATING = "CREATING" - PREPARING = "PREPARING" - PREPARED = "PREPARED" - NOT_PREPARED = "NOT_PREPARED" - DELETING = "DELETING" - FAILED = "FAILED" - VERSIONING = "VERSIONING" - UPDATING = "UPDATING" diff --git a/python/semantic_kernel/agents/channels/agent_channel.py b/python/semantic_kernel/agents/channels/agent_channel.py index a3a59cee579f..b7a56d1f4a32 100644 --- a/python/semantic_kernel/agents/channels/agent_channel.py +++ b/python/semantic_kernel/agents/channels/agent_channel.py @@ -2,16 +2,16 @@ from abc import ABC, abstractmethod from collections.abc import AsyncIterable -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental +@experimental_class class AgentChannel(ABC): """Defines the communication protocol for a particular Agent type. @@ -36,13 +36,11 @@ async def receive( def invoke( self, agent: "Agent", - **kwargs: Any, ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: """Perform a discrete incremental interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. - kwargs: The keyword arguments. Returns: An async iterable of a bool, ChatMessageContent. @@ -53,15 +51,13 @@ def invoke( def invoke_stream( self, agent: "Agent", - messages: "list[ChatMessageContent]", - **kwargs: Any, + history: "list[ChatMessageContent]", ) -> AsyncIterable["ChatMessageContent"]: """Perform a discrete incremental stream interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. - messages: The history of messages in the conversation. - kwargs: The keyword arguments. + history: The history of messages in the conversation. Returns: An async iterable ChatMessageContent. diff --git a/python/semantic_kernel/agents/channels/bedrock_agent_channel.py b/python/semantic_kernel/agents/channels/bedrock_agent_channel.py deleted file mode 100644 index 748496b52a14..000000000000 --- a/python/semantic_kernel/agents/channels/bedrock_agent_channel.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import AsyncIterable -from typing import Any, ClassVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.agents.agent import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger = logging.getLogger(__name__) - - -@experimental -class BedrockAgentChannel(AgentChannel, ChatHistory): - """An AgentChannel for a BedrockAgent that is based on a ChatHistory. - - This channel allows Bedrock agents to interact with other types of agents in Semantic Kernel in an AgentGroupChat. - However, since Bedrock agents require the chat history to alternate between user and agent messages, this channel - will preprocess the chat history to ensure that it meets the requirements of the Bedrock agent. When an invalid - pattern is detected, the channel will insert a placeholder user or assistant message to ensure that the chat history - alternates between user and agent messages. - """ - - MESSAGE_PLACEHOLDER: ClassVar[str] = "[SILENCE]" - - @override - async def invoke(self, agent: "Agent", **kwargs: Any) -> AsyncIterable[tuple[bool, ChatMessageContent]]: - """Perform a discrete incremental interaction between a single Agent and AgentChat. - - Args: - agent: The agent to interact with. - kwargs: Additional keyword arguments. - - Returns: - An async iterable of ChatMessageContent with a boolean indicating if the - message should be visible external to the agent. - """ - from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent - - if not isinstance(agent, BedrockAgent): - raise AgentChatException(f"Agent is not of the expected type {type(BedrockAgent)}.") - if not self.messages: - # This is not supposed to happen, as the channel won't get invoked - # before it has received messages. This is just extra safety. - raise AgentChatException("No chat history available.") - - # Preprocess chat history - self._ensure_history_alternates() - self._ensure_last_message_is_user() - - session_id = BedrockAgent.create_session_id() - async for message in agent.invoke( - session_id, - self.messages[-1].content, - sessionState=self._parse_chat_history_to_session_state(), - ): - self.messages.append(message) - # All messages from Bedrock agents are user facing, i.e., function calls are not returned as messages - yield True, message - - @override - async def invoke_stream( - self, - agent: "Agent", - messages: list[ChatMessageContent], - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """Perform a streaming interaction between a single Agent and AgentChat. - - Args: - agent: The agent to interact with. - messages: The history of messages in the conversation. - kwargs: Additional keyword arguments. - - Returns: - An async iterable of ChatMessageContent. - """ - from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent - - if not isinstance(agent, BedrockAgent): - raise AgentChatException(f"Agent is not of the expected type {type(BedrockAgent)}.") - if not self.messages: - raise AgentChatException("No chat history available.") - - # Preprocess chat history - self._ensure_history_alternates() - self._ensure_last_message_is_user() - - session_id = BedrockAgent.create_session_id() - full_message: list[StreamingChatMessageContent] = [] - async for message_chunk in agent.invoke_stream( - session_id, - self.messages[-1].content, - sessionState=self._parse_chat_history_to_session_state(), - ): - yield message_chunk - full_message.append(message_chunk) - - messages.append( - ChatMessageContent( - role=AuthorRole.ASSISTANT, - content="".join([message.content for message in full_message]), - name=agent.name, - inner_content=full_message, - ai_model_id=agent.agent_model.foundation_model, - ) - ) - - @override - async def receive( - self, - history: list[ChatMessageContent], - ) -> None: - """Receive the conversation messages. - - Bedrock requires the chat history to alternate between user and agent messages. - Thus, when receiving the history, the message sequence will be mutated by inserting - empty agent or user messages as needed. - - Args: - history: The history of messages in the conversation. - """ - for incoming_message in history: - if not self.messages or self.messages[-1].role != incoming_message.role: - self.messages.append(incoming_message) - else: - self.messages.append( - ChatMessageContent( - role=AuthorRole.ASSISTANT if incoming_message.role == AuthorRole.USER else AuthorRole.USER, - content=self.MESSAGE_PLACEHOLDER, - ) - ) - self.messages.append(incoming_message) - - @override - async def get_history( # type: ignore - self, - ) -> AsyncIterable[ChatMessageContent]: - """Retrieve the message history specific to this channel. - - Returns: - An async iterable of ChatMessageContent. - """ - for message in reversed(self.messages): - yield message - - @override - async def reset(self) -> None: - """Reset the channel state.""" - self.messages.clear() - - # region chat history preprocessing and parsing - - def _ensure_history_alternates(self): - """Ensure that the chat history alternates between user and agent messages.""" - if not self.messages or len(self.messages) == 1: - return - - current_index = 1 - while current_index < len(self.messages): - if self.messages[current_index].role == self.messages[current_index - 1].role: - self.messages.insert( - current_index, - ChatMessageContent( - role=AuthorRole.ASSISTANT - if self.messages[current_index].role == AuthorRole.USER - else AuthorRole.USER, - content=self.MESSAGE_PLACEHOLDER, - ), - ) - current_index += 2 - else: - current_index += 1 - - def _ensure_last_message_is_user(self): - """Ensure that the last message in the chat history is a user message.""" - if self.messages and self.messages[-1].role == AuthorRole.ASSISTANT: - self.messages.append( - ChatMessageContent( - role=AuthorRole.USER, - content=self.MESSAGE_PLACEHOLDER, - ) - ) - - def _parse_chat_history_to_session_state(self) -> dict[str, Any]: - """Parse the chat history to a session state.""" - session_state: dict[str, Any] = {"conversationHistory": {"messages": []}} - if len(self.messages) > 1: - # We don't take the last message as it needs to be sent separately in another parameter - for message in self.messages[:-1]: - if message.role not in [AuthorRole.USER, AuthorRole.ASSISTANT]: - logger.debug(f"Skipping message with unsupported role: {message}") - continue - session_state["conversationHistory"]["messages"].append({ - "content": [{"text": message.content}], - "role": message.role.value, - }) - - return session_state - - # endregion diff --git a/python/semantic_kernel/agents/channels/chat_history_channel.py b/python/semantic_kernel/agents/channels/chat_history_channel.py index 3f330415df25..057c005b3d3d 100644 --- a/python/semantic_kernel/agents/channels/chat_history_channel.py +++ b/python/semantic_kernel/agents/channels/chat_history_channel.py @@ -3,59 +3,70 @@ import sys from collections import deque from collections.abc import AsyncIterable -from copy import deepcopy - -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover -from typing import TYPE_CHECKING, Any, ClassVar, Deque +from abc import abstractmethod +from typing import TYPE_CHECKING, Deque, Protocol, runtime_checkable from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.contents import ChatMessageContent from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.exceptions import ServiceInvalidTypeError +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -@experimental +@experimental_class +@runtime_checkable +class ChatHistoryAgentProtocol(Protocol): + """Contract for an agent that utilizes a ChatHistoryChannel.""" + + @abstractmethod + def invoke(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: + """Invoke the chat history agent protocol.""" + ... + + @abstractmethod + def invoke_stream(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: + """Invoke the chat history agent protocol in streaming mode.""" + ... + + +@experimental_class class ChatHistoryChannel(AgentChannel, ChatHistory): """An AgentChannel specialization for that acts upon a ChatHistoryHandler.""" - ALLOWED_CONTENT_TYPES: ClassVar[tuple[type, ...]] = ( - ImageContent, - FunctionCallContent, - FunctionResultContent, - StreamingTextContent, - TextContent, - ) - @override async def invoke( self, agent: "Agent", - **kwargs: Any, ) -> AsyncIterable[tuple[bool, ChatMessageContent]]: """Perform a discrete incremental interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. - kwargs: The keyword arguments. Returns: An async iterable of ChatMessageContent. """ + if not isinstance(agent, ChatHistoryAgentProtocol): + id = getattr(agent, "id", "") + raise ServiceInvalidTypeError( + f"Invalid channel binding for agent with id: `{id}` with name: ({type(agent).__name__})" + ) + + # pre-process history reduction + await agent.reduce_history(self) + message_count = len(self.messages) mutated_history = set() message_queue: Deque[ChatMessageContent] = deque() @@ -92,18 +103,28 @@ async def invoke( @override async def invoke_stream( - self, agent: "Agent", messages: list[ChatMessageContent], **kwargs: Any - ) -> AsyncIterable["StreamingChatMessageContent"]: + self, + agent: "Agent", + messages: list[ChatMessageContent], + ) -> AsyncIterable[ChatMessageContent]: """Perform a discrete incremental stream interaction between a single Agent and AgentChat. Args: agent: The agent to interact with. messages: The history of messages in the conversation. - kwargs: The keyword arguments Returns: - An async iterable of ChatMessageContent. + An async iterable of bool, StreamingChatMessageContent. """ + if not isinstance(agent, ChatHistoryAgentProtocol): + id = getattr(agent, "id", "") + raise ServiceInvalidTypeError( + f"Invalid channel binding for agent with id: `{id}` with name: ({type(agent).__name__})" + ) + + # pre-process history reduction + await agent.reduce_history(self) + message_count = len(self.messages) async for response_message in agent.invoke_stream(self): @@ -127,23 +148,10 @@ async def receive( ) -> None: """Receive the conversation messages. - Do not include messages that only contain file references. - Args: history: The history of messages in the conversation. """ - filtered_history: list[ChatMessageContent] = [] - for message in history: - new_message = deepcopy(message) - if new_message.items is None: - new_message.items = [] - allowed_items = [item for item in new_message.items if isinstance(item, self.ALLOWED_CONTENT_TYPES)] - if not allowed_items: - continue - new_message.items.clear() - new_message.items.extend(allowed_items) - filtered_history.append(new_message) - self.messages.extend(filtered_history) + self.messages.extend(history) @override async def get_history( # type: ignore diff --git a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py index a2f74fa05d51..7ba31b598827 100644 --- a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py +++ b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py @@ -13,17 +13,16 @@ from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.open_ai.assistant_content_generation import create_chat_message, generate_message_content -from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent -@experimental +@experimental_class class OpenAIAssistantChannel(AgentChannel): """OpenAI Assistant Channel.""" @@ -45,46 +44,48 @@ async def receive(self, history: list["ChatMessageContent"]) -> None: await create_chat_message(self.client, self.thread_id, message) @override - async def invoke(self, agent: "Agent", **kwargs: Any) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + async def invoke(self, agent: "Agent") -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: """Invoke the agent. Args: agent: The agent to invoke. - kwargs: The keyword arguments. Yields: tuple[bool, ChatMessageContent]: The conversation messages. """ - from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent + from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase - if not isinstance(agent, OpenAIAssistantAgent): - raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantAgent)}.") + if not isinstance(agent, OpenAIAssistantBase): + raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantBase)}.") - async for is_visible, message in AssistantThreadActions.invoke(agent=agent, thread_id=self.thread_id, **kwargs): + if agent._is_deleted: + raise AgentChatException("Agent is deleted.") + + async for is_visible, message in agent._invoke_internal(thread_id=self.thread_id): yield is_visible, message @override async def invoke_stream( - self, agent: "Agent", messages: list[ChatMessageContent], **kwargs: Any + self, agent: "Agent", messages: list[ChatMessageContent] ) -> AsyncIterable["ChatMessageContent"]: """Invoke the agent stream. Args: agent: The agent to invoke. messages: The conversation messages. - kwargs: The keyword arguments. Yields: tuple[bool, StreamingChatMessageContent]: The conversation messages. """ - from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent + from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase + + if not isinstance(agent, OpenAIAssistantBase): + raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantBase)}.") - if not isinstance(agent, OpenAIAssistantAgent): - raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantAgent)}.") + if agent._is_deleted: + raise AgentChatException("Agent is deleted.") - async for message in AssistantThreadActions.invoke_stream( - agent=agent, thread_id=self.thread_id, messages=messages, **kwargs - ): + async for message in agent._invoke_internal_stream(thread_id=self.thread_id, messages=messages): yield message @override diff --git a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py index 0ce6b8c58946..cbdb218ad616 100644 --- a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py +++ b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py @@ -1,38 +1,23 @@ # Copyright (c) Microsoft. All rights reserved. import logging -import sys from collections.abc import AsyncGenerator, AsyncIterable from typing import TYPE_CHECKING, Any, ClassVar -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from pydantic import Field, model_validator - from semantic_kernel.agents import Agent from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions import KernelServiceNotFoundError -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig -from semantic_kernel.utils.feature_stage_decorator import release_candidate -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( - trace_agent_get_response, - trace_agent_invocation, -) +from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import trace_agent_invocation if TYPE_CHECKING: from semantic_kernel.kernel import Kernel @@ -40,53 +25,52 @@ logger: logging.Logger = logging.getLogger(__name__) -@release_candidate +@experimental_class class ChatCompletionAgent(Agent): - """A Chat Completion Agent based on ChatCompletionClientBase.""" + """A KernelAgent specialization based on ChatCompletionClientBase. + + Note: enable `function_choice_behavior` on the PromptExecutionSettings to enable function + choice behavior which allows the kernel to utilize plugins and functions registered in + the kernel. + """ - function_choice_behavior: FunctionChoiceBehavior | None = Field( - default_factory=lambda: FunctionChoiceBehavior.Auto() - ) - channel_type: ClassVar[type[AgentChannel] | None] = ChatHistoryChannel - service: ChatCompletionClientBase | None = Field(default=None, exclude=True) + service_id: str + execution_settings: PromptExecutionSettings | None = None + channel_type: ClassVar[type[AgentChannel]] = ChatHistoryChannel def __init__( self, - *, - arguments: KernelArguments | None = None, - description: str | None = None, - function_choice_behavior: FunctionChoiceBehavior | None = None, - id: str | None = None, - instructions: str | None = None, + service_id: str | None = None, kernel: "Kernel | None" = None, name: str | None = None, - plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, - prompt_template_config: PromptTemplateConfig | None = None, - service: ChatCompletionClientBase | None = None, + id: str | None = None, + description: str | None = None, + instructions: str | None = None, + execution_settings: PromptExecutionSettings | None = None, + history_reducer: ChatHistoryReducer | None = None, ) -> None: """Initialize a new instance of ChatCompletionAgent. Args: - arguments: The kernel arguments for the agent. Invoke method arguments take precedence over - the arguments provided here. - description: The description of the agent. - function_choice_behavior: The function choice behavior to determine how and which plugins are - advertised to the model. - kernel: The kernel instance. If both a kernel and a service are provided, the service will take precedence - if they share the same service_id or ai_model_id. Otherwise if separate, the first AI service - registered on the kernel will be used. - id: The unique identifier for the agent. If not provided, + service_id: The service id for the chat completion service. (optional) If not provided, + the default service name `default` will be used. + kernel: The kernel instance. (optional) + name: The name of the agent. (optional) + id: The unique identifier for the agent. (optional) If not provided, a unique GUID will be generated. - instructions: The instructions for the agent. - name: The name of the agent. - plugins: The plugins for the agent. If plugins are included along with a kernel, any plugins - that already exist in the kernel will be overwritten. - prompt_template_config: The prompt template configuration for the agent. - service: The chat completion service instance. If a kernel is provided with the same service_id or - `ai_model_id`, the service will take precedence. + description: The description of the agent. (optional) + instructions: The instructions for the agent. (optional) + execution_settings: The execution settings for the agent. (optional) + history_reducer: The history reducer for the agent. (optional) """ + if not service_id: + service_id = DEFAULT_SERVICE_NAME + args: dict[str, Any] = { + "service_id": service_id, "description": description, + "instructions": instructions, + "execution_settings": execution_settings, } if name is not None: args["name"] = name @@ -94,278 +78,139 @@ def __init__( args["id"] = id if kernel is not None: args["kernel"] = kernel - if arguments is not None: - args["arguments"] = arguments - - if instructions and prompt_template_config and instructions != prompt_template_config.template: - logger.info( - f"Both `instructions` ({instructions}) and `prompt_template_config` " - f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " - "and ignoring `instructions`." - ) - - if plugins is not None: - args["plugins"] = plugins - - if function_choice_behavior is not None: - args["function_choice_behavior"] = function_choice_behavior - - if service is not None: - args["service"] = service - - if instructions is not None: - args["instructions"] = instructions - if prompt_template_config is not None: - args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( - prompt_template_config=prompt_template_config - ) - if prompt_template_config.template is not None: - # Use the template from the prompt_template_config if it is provided - args["instructions"] = prompt_template_config.template + if history_reducer is not None: + args["history_reducer"] = history_reducer super().__init__(**args) - @model_validator(mode="after") - def configure_service(self) -> "ChatCompletionAgent": - """Configure the service used by the ChatCompletionAgent.""" - if self.service is None: - return self - if not isinstance(self.service, ChatCompletionClientBase): - raise AgentInitializationException( - f"Service provided for ChatCompletionAgent is not an instance of ChatCompletionClientBase. " - f"Service: {type(self.service)}" - ) - self.kernel.add_service(self.service, overwrite=True) - return self - - @trace_agent_get_response - @override - async def get_response( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> ChatMessageContent: - """Get a response from the agent. + @trace_agent_invocation + async def invoke(self, history: ChatHistory) -> AsyncIterable[ChatMessageContent]: + """Invoke the chat history handler. Args: + kernel: The kernel instance. history: The chat history. - arguments: The kernel arguments. (optional) - kernel: The kernel instance. (optional) - kwargs: The keyword arguments. (optional) Returns: - A chat message content. + An async iterable of ChatMessageContent. """ - responses: list[ChatMessageContent] = [] - async for response in self._inner_invoke(history, arguments, kernel, **kwargs): - responses.append(response) + # Get the chat completion service + chat_completion_service = self.kernel.get_service(service_id=self.service_id, type=ChatCompletionClientBase) - if not responses: - raise AgentInvokeException("No response from agent.") + if not chat_completion_service: + raise KernelServiceNotFoundError(f"Chat completion service not found with service_id: {self.service_id}") - return responses[0] + assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec - @trace_agent_invocation - @override - async def invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """Invoke the chat history handler. + settings = ( + self.execution_settings + or self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + or chat_completion_service.instantiate_prompt_execution_settings( + service_id=self.service_id, extension_data={"ai_model_id": chat_completion_service.ai_model_id} + ) + ) - Args: - history: The chat history. - arguments: The kernel arguments. - kernel: The kernel instance. - kwargs: The keyword arguments. + chat = self._setup_agent_chat_history(history) - Returns: - An async iterable of ChatMessageContent. - """ - async for response in self._inner_invoke(history, arguments, kernel, **kwargs): - yield response + message_count = len(chat) + + logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") + + messages = await chat_completion_service.get_chat_message_contents( + chat_history=chat, + settings=settings, + kernel=self.kernel, + ) + + logger.info( + f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " + f"with message count: {message_count}." + ) + + # Capture mutated messages related function calling / tools + for message_index in range(message_count, len(chat)): + message = chat[message_index] + message.name = self.name + history.add_message(message) + + for message in messages: + message.name = self.name + yield message @trace_agent_invocation - @override - async def invoke_stream( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[StreamingChatMessageContent]: + async def invoke_stream(self, history: ChatHistory) -> AsyncIterable[StreamingChatMessageContent]: """Invoke the chat history handler in streaming mode. Args: - history: The chat history. - arguments: The kernel arguments. kernel: The kernel instance. - kwargs: The keyword arguments. + history: The chat history. Returns: An async generator of StreamingChatMessageContent. """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - # Add the chat history to the args in the event that it is needed for prompt template configuration - if "chat_history" not in arguments: - arguments["chat_history"] = history + # Get the chat completion service + chat_completion_service = self.kernel.get_service(service_id=self.service_id, type=ChatCompletionClientBase) - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - chat_completion_service, settings = await self._get_chat_completion_service_and_settings( - kernel=kernel, arguments=arguments - ) + if not chat_completion_service: + raise KernelServiceNotFoundError(f"Chat completion service not found with service_id: {self.service_id}") - # If the user hasn't provided a function choice behavior, use the agent's default. - if settings.function_choice_behavior is None: - settings.function_choice_behavior = self.function_choice_behavior + assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec - agent_chat_history = await self._prepare_agent_chat_history( - history=history, - kernel=kernel, - arguments=arguments, + settings = ( + self.execution_settings + or self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + or chat_completion_service.instantiate_prompt_execution_settings( + service_id=self.service_id, extension_data={"ai_model_id": chat_completion_service.ai_model_id} + ) ) - # Remove the chat history from the arguments, potentially used for the prompt, - # to avoid passing it to the service - arguments.pop("chat_history", None) + chat = self._setup_agent_chat_history(history) - message_count_before_completion = len(agent_chat_history) + message_count = len(chat) logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") - responses: AsyncGenerator[list[StreamingChatMessageContent], Any] = ( + messages: AsyncGenerator[list[StreamingChatMessageContent], Any] = ( chat_completion_service.get_streaming_chat_message_contents( - chat_history=agent_chat_history, + chat_history=chat, settings=settings, - kernel=kernel, - arguments=arguments, + kernel=self.kernel, ) ) - logger.debug( + logger.info( f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " - f"with message count: {message_count_before_completion}." + f"with message count: {message_count}." ) role = None - response_builder: list[str] = [] - async for response_list in responses: - for response in response_list: - role = response.role - response.name = self.name - response_builder.append(response.content) - yield response - - self._capture_mutated_messages(history, agent_chat_history, message_count_before_completion) + message_builder: list[str] = [] + async for message_list in messages: + for message in message_list: + role = message.role + message.name = self.name + message_builder.append(message.content) + yield message + + # Capture mutated messages related function calling / tools + for message_index in range(message_count, len(chat)): + message = chat[message_index] # type: ignore + message.name = self.name + history.add_message(message) + if role != AuthorRole.TOOL: history.add_message( ChatMessageContent( - role=role if role else AuthorRole.ASSISTANT, content="".join(response_builder), name=self.name + role=role if role else AuthorRole.ASSISTANT, content="".join(message_builder), name=self.name ) ) - async def _inner_invoke( - self, - history: ChatHistory, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """Helper method to invoke the agent with a chat history in non-streaming mode.""" - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - # Add the chat history to the args in the event that it is needed for prompt template configuration - if "chat_history" not in arguments: - arguments["chat_history"] = history - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - chat_completion_service, settings = await self._get_chat_completion_service_and_settings( - kernel=kernel, arguments=arguments - ) + def _setup_agent_chat_history(self, history: ChatHistory) -> ChatHistory: + """Setup the agent chat history.""" + chat = [] - # If the user hasn't provided a function choice behavior, use the agent's default. - if settings.function_choice_behavior is None: - settings.function_choice_behavior = self.function_choice_behavior + if self.instructions is not None: + chat.append(ChatMessageContent(role=AuthorRole.SYSTEM, content=self.instructions, name=self.name)) - agent_chat_history = await self._prepare_agent_chat_history( - history=history, - kernel=kernel, - arguments=arguments, - ) + chat.extend(history.messages if history.messages else []) - # Remove the chat history from the arguments, potentially used for the prompt, - # to avoid passing it to the service - arguments.pop("chat_history", None) - - message_count_before_completion = len(agent_chat_history) - - logger.debug(f"[{type(self).__name__}] Invoking {type(chat_completion_service).__name__}.") - - responses = await chat_completion_service.get_chat_message_contents( - chat_history=agent_chat_history, - settings=settings, - kernel=kernel, - arguments=arguments, - ) - - logger.debug( - f"[{type(self).__name__}] Invoked {type(chat_completion_service).__name__} " - f"with message count: {message_count_before_completion}." - ) - - self._capture_mutated_messages(history, agent_chat_history, message_count_before_completion) - - for response in responses: - response.name = self.name - yield response - - async def _prepare_agent_chat_history( - self, history: ChatHistory, kernel: "Kernel", arguments: KernelArguments - ) -> ChatHistory: - """Prepare the agent chat history from the input history by adding the formatted instructions.""" - formatted_instructions = await self.format_instructions(kernel, arguments) - messages = [] - if formatted_instructions: - messages.append(ChatMessageContent(role=AuthorRole.SYSTEM, content=formatted_instructions, name=self.name)) - if history.messages: - messages.extend(history.messages) - - return ChatHistory(messages=messages) - - async def _get_chat_completion_service_and_settings( - self, kernel: "Kernel", arguments: KernelArguments - ) -> tuple[ChatCompletionClientBase, PromptExecutionSettings]: - """Get the chat completion service and settings.""" - chat_completion_service, settings = kernel.select_ai_service(arguments=arguments, type=ChatCompletionClientBase) - - if not chat_completion_service: - raise KernelServiceNotFoundError( - "Chat completion service not found. Check your service or kernel configuration." - ) - - assert isinstance(chat_completion_service, ChatCompletionClientBase) # nosec - assert settings is not None # nosec - - return chat_completion_service, settings - - def _capture_mutated_messages(self, caller_chat_history: ChatHistory, agent_chat_history: ChatHistory, start: int): - """Capture mutated messages related function calling/tools.""" - for message_index in range(start, len(agent_chat_history)): - message = agent_chat_history[message_index] # type: ignore - message.name = self.name - caller_chat_history.add_message(message) + return ChatHistory(messages=chat) diff --git a/python/semantic_kernel/agents/group_chat/agent_chat.py b/python/semantic_kernel/agents/group_chat/agent_chat.py index 65ad9d737905..294f695cbb1b 100644 --- a/python/semantic_kernel/agents/group_chat/agent_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_chat.py @@ -3,7 +3,7 @@ import asyncio import logging import threading -from collections.abc import AsyncIterable +from collections.abc import AsyncGenerator, AsyncIterable from pydantic import Field, PrivateAttr @@ -16,12 +16,12 @@ from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AgentChat(KernelBaseModel): """A base class chat interface for agents.""" @@ -54,19 +54,17 @@ def invoke(self, agent: Agent | None = None, is_joining: bool = True) -> AsyncIt """Invoke the agent asynchronously.""" raise NotImplementedError("Subclasses should implement this method") - async def get_messages_in_descending_order(self) -> AsyncIterable[ChatMessageContent]: + async def get_messages_in_descending_order(self): """Get messages in descending order asynchronously.""" for index in range(len(self.history.messages) - 1, -1, -1): yield self.history.messages[index] await asyncio.sleep(0) # Yield control to the event loop - async def get_chat_messages(self, agent: "Agent | None" = None) -> AsyncIterable[ChatMessageContent]: + async def get_chat_messages(self, agent: "Agent | None" = None) -> AsyncGenerator[ChatMessageContent, None]: """Get chat messages asynchronously.""" self.set_activity_or_throw() logger.info("Getting chat messages") - - messages: AsyncIterable[ChatMessageContent] | None = None try: if agent is None: messages = self.get_messages_in_descending_order() @@ -97,11 +95,8 @@ def _get_agent_hash(self, agent: Agent): return hash_value - async def add_chat_message(self, message: str | ChatMessageContent) -> None: + async def add_chat_message(self, message: ChatMessageContent) -> None: """Add a chat message.""" - if isinstance(message, str): - message = ChatMessageContent(role=AuthorRole.USER, content=message) - await self.add_chat_messages([message]) async def add_chat_messages(self, messages: list[ChatMessageContent]) -> None: diff --git a/python/semantic_kernel/agents/group_chat/agent_chat_utils.py b/python/semantic_kernel/agents/group_chat/agent_chat_utils.py index 864863cff2c3..0162bb94fe33 100644 --- a/python/semantic_kernel/agents/group_chat/agent_chat_utils.py +++ b/python/semantic_kernel/agents/group_chat/agent_chat_utils.py @@ -5,10 +5,10 @@ from collections.abc import Iterable from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KeyEncoder: """A class for encoding keys.""" diff --git a/python/semantic_kernel/agents/group_chat/agent_group_chat.py b/python/semantic_kernel/agents/group_chat/agent_group_chat.py index 00ed794018a2..38d0d73af0ab 100644 --- a/python/semantic_kernel/agents/group_chat/agent_group_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_group_chat.py @@ -2,8 +2,7 @@ import logging from collections.abc import AsyncIterable -from copy import deepcopy -from typing import TYPE_CHECKING, Any, cast +from typing import Any from pydantic import Field @@ -15,18 +14,14 @@ from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AgentGroupChat(AgentChat): """An agent chat that supports multi-turn interactions.""" @@ -34,10 +29,7 @@ class AgentGroupChat(AgentChat): agents: list[Agent] = Field(default_factory=list) is_complete: bool = False - termination_strategy: TerminationStrategy = Field( - default_factory=DefaultTerminationStrategy, - description="The termination strategy to use. The default strategy never terminates and has a max iterations of 5.", # noqa: E501 - ) + termination_strategy: TerminationStrategy = Field(default_factory=DefaultTerminationStrategy) selection_strategy: SelectionStrategy = Field(default_factory=SequentialSelectionStrategy) def __init__( @@ -45,7 +37,6 @@ def __init__( agents: list[Agent] | None = None, termination_strategy: TerminationStrategy | None = None, selection_strategy: SelectionStrategy | None = None, - chat_history: "ChatHistory | None" = None, ) -> None: """Initialize a new instance of AgentGroupChat. @@ -53,7 +44,6 @@ def __init__( agents: The agents to add to the group chat. termination_strategy: The termination strategy to use. selection_strategy: The selection strategy - chat_history: The chat history. """ agent_ids = {agent.id for agent in agents} if agents else set() @@ -69,8 +59,6 @@ def __init__( args["termination_strategy"] = termination_strategy if selection_strategy is not None: args["selection_strategy"] = selection_strategy - if chat_history is not None: - args["history"] = chat_history super().__init__(**args) @@ -211,18 +199,3 @@ async def invoke_stream( if self.is_complete: break - - async def reduce_history(self) -> bool: - """Perform the reduction on the provided history, returning True if reduction occurred.""" - if not isinstance(self.history, ChatHistoryReducer): - return False - - result = await self.history.reduce() - if result is None: - return False - - reducer = cast(ChatHistoryReducer, result) - reduced_history = deepcopy(reducer.messages) - await self.reset() - await self.add_chat_messages(reduced_history) - return True diff --git a/python/semantic_kernel/agents/group_chat/broadcast_queue.py b/python/semantic_kernel/agents/group_chat/broadcast_queue.py index fb5d7674129d..0e77b14f91e4 100644 --- a/python/semantic_kernel/agents/group_chat/broadcast_queue.py +++ b/python/semantic_kernel/agents/group_chat/broadcast_queue.py @@ -11,10 +11,10 @@ from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class QueueReference(KernelBaseModel): """Utility class to associate a queue with its specific lock.""" @@ -38,7 +38,7 @@ def validate_receive_task(cls, values: Any): return values -@experimental +@experimental_class @dataclass class ChannelReference: """Tracks a channel along with its hashed key.""" @@ -47,7 +47,7 @@ class ChannelReference: channel: AgentChannel = field(default_factory=AgentChannel) -@experimental +@experimental_class class BroadcastQueue(KernelBaseModel): """A queue for broadcasting messages to listeners.""" diff --git a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py index d7116098635b..483586fedf02 100644 --- a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py +++ b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py @@ -3,15 +3,13 @@ from typing import TYPE_CHECKING, Any from openai import AsyncOpenAI -from openai.types.beta.threads.file_citation_annotation import FileCitationAnnotation from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation -from openai.types.beta.threads.file_path_annotation import FilePathAnnotation from openai.types.beta.threads.file_path_delta_annotation import FilePathDeltaAnnotation from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock from openai.types.beta.threads.image_file_delta_block import ImageFileDeltaBlock from openai.types.beta.threads.message_delta_event import MessageDeltaEvent from openai.types.beta.threads.runs import CodeInterpreterLogs -from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreterOutputImage +from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreter from openai.types.beta.threads.text_content_block import TextContentBlock from openai.types.beta.threads.text_delta_block import TextDeltaBlock @@ -28,11 +26,12 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: - from openai.types.beta.threads.message import Message - from openai.types.beta.threads.run import Run + from openai.resources.beta.threads.messages import Message + from openai.resources.beta.threads.runs.runs import Run + from openai.types.beta.threads.annotation import Annotation from openai.types.beta.threads.runs import RunStep from openai.types.beta.threads.runs.tool_call import ToolCall from openai.types.beta.threads.runs.tool_calls_step_details import ToolCallsStepDetails @@ -45,7 +44,7 @@ ################################################################### -@experimental +@experimental_function async def create_chat_message( client: AsyncOpenAI, thread_id: str, @@ -77,7 +76,7 @@ async def create_chat_message( ) -@experimental +@experimental_function def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: """Get the message contents. @@ -121,7 +120,7 @@ def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: return contents -@experimental +@experimental_function def generate_message_content( assistant_name: str, message: "Message", completed_step: "RunStep | None" = None ) -> ChatMessageContent: @@ -164,29 +163,13 @@ def generate_message_content( return content -@experimental +@experimental_function def generate_streaming_message_content( - assistant_name: str, - message_delta_event: "MessageDeltaEvent", - completed_step: "RunStep | None" = None, + assistant_name: str, message_delta_event: "MessageDeltaEvent" ) -> StreamingChatMessageContent: """Generate streaming message content from a MessageDeltaEvent.""" delta = message_delta_event.delta - metadata = ( - { - "created_at": completed_step.created_at, - "message_id": message_delta_event.id, # message needs to be defined in context - "step_id": completed_step.id, - "run_id": completed_step.run_id, - "thread_id": completed_step.thread_id, - "assistant_id": completed_step.assistant_id, - "usage": completed_step.usage, - } - if completed_step is not None - else None - ) - # Determine the role role = AuthorRole(delta.role) if delta.role is not None else AuthorRole("assistant") @@ -219,54 +202,10 @@ def generate_streaming_message_content( ) ) - return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0, metadata=metadata) # type: ignore - - -@experimental -def generate_final_streaming_message_content( - assistant_name: str, - message: "Message", - completed_step: "RunStep | None" = None, -) -> StreamingChatMessageContent: - """Generate streaming message content from a MessageDeltaEvent.""" - metadata = ( - { - "created_at": completed_step.created_at, - "message_id": message.id, # message needs to be defined in context - "step_id": completed_step.id, - "run_id": completed_step.run_id, - "thread_id": completed_step.thread_id, - "assistant_id": completed_step.assistant_id, - "usage": completed_step.usage, - } - if completed_step is not None - else None - ) - - # Determine the role - role = AuthorRole(message.role) if message.role is not None else AuthorRole("assistant") - - items: list[StreamingTextContent | StreamingAnnotationContent | StreamingFileReferenceContent] = [] - - # Process each content block in the delta - for item_content in message.content: - if item_content.type == "text": - assert isinstance(item_content, TextContentBlock) # nosec - items.append(StreamingTextContent(text=item_content.text.value, choice_index=0)) - for annotation in item_content.text.annotations: - items.append(generate_streaming_annotation_content(annotation)) - elif item_content.type == "image_file": - assert isinstance(item_content, ImageFileContentBlock) # nosec - items.append( - StreamingFileReferenceContent( - file_id=item_content.image_file.file_id, - ) - ) - - return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0, metadata=metadata) # type: ignore + return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0) # type: ignore -@experimental +@experimental_function def generate_function_call_content(agent_name: str, fccs: list[FunctionCallContent]) -> ChatMessageContent: """Generate function call content. @@ -280,7 +219,7 @@ def generate_function_call_content(agent_name: str, fccs: list[FunctionCallConte return ChatMessageContent(role=AuthorRole.ASSISTANT, name=agent_name, items=fccs) # type: ignore -@experimental +@experimental_function def generate_function_result_content( agent_name: str, function_step: FunctionCallContent, tool_call: "ToolCall" ) -> ChatMessageContent: @@ -297,7 +236,7 @@ def generate_function_result_content( return function_call_content -@experimental +@experimental_function def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCallContent]) -> list[FunctionCallContent]: """Extract function call contents from the run. @@ -324,7 +263,7 @@ def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCal return function_call_contents -@experimental +@experimental_function def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessageContent": """Generate code interpreter content. @@ -343,7 +282,7 @@ def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessag ) -@experimental +@experimental_function def generate_streaming_function_content( agent_name: str, step_details: "ToolCallsStepDetails" ) -> "StreamingChatMessageContent": @@ -381,7 +320,7 @@ def generate_streaming_function_content( ) -@experimental +@experimental_function def generate_streaming_code_interpreter_content( agent_name: str, step_details: "ToolCallsStepDetails" ) -> "StreamingChatMessageContent | None": @@ -409,7 +348,7 @@ def generate_streaming_code_interpreter_content( metadata["code"] = True if tool.code_interpreter.outputs: for output in tool.code_interpreter.outputs: - if isinstance(output, CodeInterpreterOutputImage) and output.image.file_id: + if isinstance(output, CodeInterpreter) and output.image.file_id: items.append( StreamingFileReferenceContent( file_id=output.image.file_id, @@ -436,15 +375,14 @@ def generate_streaming_code_interpreter_content( ) -@experimental -def generate_annotation_content(annotation: FileCitationAnnotation | FilePathAnnotation) -> AnnotationContent: +@experimental_function +def generate_annotation_content(annotation: "Annotation") -> AnnotationContent: """Generate annotation content.""" file_id = None - match annotation: - case FilePathAnnotation(): - file_id = annotation.file_path.file_id - case FileCitationAnnotation(): - file_id = annotation.file_citation.file_id + if hasattr(annotation, "file_path"): + file_id = annotation.file_path.file_id + elif hasattr(annotation, "file_citation"): + file_id = annotation.file_citation.file_id return AnnotationContent( file_id=file_id, @@ -454,21 +392,14 @@ def generate_annotation_content(annotation: FileCitationAnnotation | FilePathAnn ) -@experimental -def generate_streaming_annotation_content( - annotation: FileCitationAnnotation | FilePathAnnotation | FilePathDeltaAnnotation | FileCitationDeltaAnnotation, -) -> StreamingAnnotationContent: +@experimental_function +def generate_streaming_annotation_content(annotation: "Annotation") -> StreamingAnnotationContent: """Generate streaming annotation content.""" file_id = None - match annotation: - case FilePathAnnotation(): - file_id = annotation.file_path.file_id - case FileCitationAnnotation(): - file_id = annotation.file_citation.file_id - case FilePathDeltaAnnotation(): - file_id = annotation.file_path.file_id if annotation.file_path is not None else None - case FileCitationDeltaAnnotation(): - file_id = annotation.file_citation.file_id if annotation.file_citation is not None else None + if hasattr(annotation, "file_path") and annotation.file_path: + file_id = annotation.file_path.file_id if annotation.file_path.file_id else None + elif hasattr(annotation, "file_citation") and annotation.file_citation: + file_id = annotation.file_citation.file_id if annotation.file_citation.file_id else None return StreamingAnnotationContent( file_id=file_id, diff --git a/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py b/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py deleted file mode 100644 index 92bdb43470cf..000000000000 --- a/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py +++ /dev/null @@ -1,770 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from collections.abc import AsyncIterable, Iterable, Sequence -from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeVar, cast - -from openai import AsyncOpenAI -from openai.types.beta.code_interpreter_tool import CodeInterpreterTool -from openai.types.beta.file_search_tool import FileSearchTool -from openai.types.beta.threads.run_create_params import AdditionalMessage, AdditionalMessageAttachment -from openai.types.beta.threads.runs import ( - MessageCreationStepDetails, - RunStep, - RunStepDeltaEvent, - ToolCallDeltaObject, - ToolCallsStepDetails, -) - -from semantic_kernel.agents.azure_ai.agent_content_generation import generate_function_call_streaming_content -from semantic_kernel.agents.open_ai.assistant_content_generation import ( - generate_code_interpreter_content, - generate_final_streaming_message_content, - generate_function_call_content, - generate_function_result_content, - generate_message_content, - generate_streaming_code_interpreter_content, - generate_streaming_function_content, - generate_streaming_message_content, - get_function_call_contents, - get_message_contents, -) -from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult -from semantic_kernel.connectors.ai.function_calling_utils import ( - kernel_function_metadata_to_function_call_format, - merge_streaming_function_results, -) -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import ( - AgentExecutionException, - AgentInvokeException, -) -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from openai import AsyncOpenAI - from openai.types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam - from openai.types.beta.assistant_tool_param import AssistantToolParam - from openai.types.beta.threads.message import Message - from openai.types.beta.threads.run import Run - from openai.types.beta.threads.run_create_params import AdditionalMessageAttachmentTool, TruncationStrategy - - from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent - from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.contents.chat_message_content import ChatMessageContent - from semantic_kernel.contents.function_call_content import FunctionCallContent - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent - from semantic_kernel.kernel import Kernel - -_T = TypeVar("_T", bound="AssistantThreadActions") - -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental -class AssistantThreadActions: - """Assistant Thread Actions class.""" - - polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] - error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] - - tool_metadata: ClassVar[dict[str, Sequence[Any]]] = { - "file_search": [{"type": "file_search"}], - "code_interpreter": [{"type": "code_interpreter"}], - } - - # region Messaging Handling Methods - - @classmethod - async def create_message( - cls: type[_T], - client: "AsyncOpenAI", - thread_id: str, - message: "str | ChatMessageContent", - allowed_message_roles: list[str] = [AuthorRole.USER, AuthorRole.ASSISTANT], - **kwargs: Any, - ) -> "Message | None": - """Create a message in the thread. - - Args: - client: The client to use to create the message. - thread_id: The ID of the thread to create the message in. - message: The message to create. - allowed_message_roles: The allowed message roles. - kwargs: Additional keyword arguments. - - Returns: - The created message. - """ - from semantic_kernel.contents.chat_message_content import ChatMessageContent - - if isinstance(message, str): - message = ChatMessageContent(role=AuthorRole.USER, content=message) - - if any(isinstance(item, FunctionCallContent) for item in message.items): - return None - - if message.role.value not in allowed_message_roles and message.role != AuthorRole.TOOL: - raise AgentExecutionException( - f"Invalid message role `{message.role.value}`. Allowed roles are {allowed_message_roles}." - ) - - message_contents: list[dict[str, Any]] = get_message_contents(message=message) - - return await client.beta.threads.messages.create( - thread_id=thread_id, - role="assistant" if message.role == AuthorRole.TOOL else message.role.value, # type: ignore - content=message_contents, # type: ignore - **kwargs, - ) - - # endregion - - # region Invocation Methods - - @classmethod - async def invoke( - cls: type[_T], - *, - agent: "OpenAIAssistantAgent", - thread_id: str, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - additional_instructions: str | None = None, - additional_messages: "list[ChatMessageContent] | None" = None, - instructions_override: str | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - metadata: dict[str, str] | None = None, - model: str | None = None, - parallel_tool_calls: bool | None = None, - reasoning_effort: Literal["low", "medium", "high"] | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - tools: "list[AssistantToolParam] | None" = None, - temperature: float | None = None, - top_p: float | None = None, - truncation_strategy: "TruncationStrategy | None" = None, - **kwargs: Any, - ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: - """Invoke the assistant. - - Args: - agent: The assistant agent. - thread_id: The thread ID. - arguments: The kernel arguments. - kernel: The kernel. - instructions_override: The instructions override. - additional_instructions: The additional instructions. - additional_messages: The additional messages. - max_completion_tokens: The maximum completion tokens. - max_prompt_tokens: The maximum prompt tokens. - metadata: The metadata. - model: The model. - parallel_tool_calls: The parallel tool calls. - reasoning_effort: The reasoning effort. - response_format: The response format. - tools: The tools. - temperature: The temperature. - top_p: The top p. - truncation_strategy: The truncation strategy. - kwargs: Additional keyword arguments. - - Returns: - An async iterable of tuple of the visibility of the message and the chat message content. - """ - arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) - kernel = kernel or agent.kernel - - tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore - - base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) - - merged_instructions: str = "" - if instructions_override is not None: - merged_instructions = instructions_override - elif base_instructions and additional_instructions: - merged_instructions = f"{base_instructions}\n\n{additional_instructions}" - else: - merged_instructions = base_instructions or additional_instructions or "" - - # form run options - run_options = cls._generate_options( - agent=agent, - model=model, - response_format=response_format, - temperature=temperature, - top_p=top_p, - metadata=metadata, - parallel_tool_calls_enabled=parallel_tool_calls, - truncation_message_count=truncation_strategy, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - additional_messages=additional_messages, - reasoning_effort=reasoning_effort, - ) - - run_options = {k: v for k, v in run_options.items() if v is not None} - - run = await agent.client.beta.threads.runs.create( - assistant_id=agent.id, - thread_id=thread_id, - instructions=merged_instructions or agent.instructions, - tools=tools, # type: ignore - **run_options, - ) - - processed_step_ids = set() - function_steps: dict[str, "FunctionCallContent"] = {} - - while run.status != "completed": - run = await cls._poll_run_status(agent=agent, run=run, thread_id=thread_id) - - if run.status in cls.error_message_states: - error_message = "" - if run.last_error and run.last_error.message: - error_message = run.last_error.message - incomplete_details = "" - if run.incomplete_details: - incomplete_details = str(run.incomplete_details.reason) - raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " - f"with error: {error_message} or incomplete details: {incomplete_details}" - ) - - # Check if function calling required - if run.status == "requires_action": - logger.debug(f"Run [{run.id}] requires action for agent `{agent.name}` and thread `{thread_id}`") - fccs = get_function_call_contents(run, function_steps) - if fccs: - logger.debug( - f"Yielding `generate_function_call_content` for agent `{agent.name}` and " - f"thread `{thread_id}`, visibility False" - ) - yield False, generate_function_call_content(agent_name=agent.name, fccs=fccs) - - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) - - tool_outputs = cls._format_tool_outputs(fccs, chat_history) - await agent.client.beta.threads.runs.submit_tool_outputs( - run_id=run.id, - thread_id=thread_id, - tool_outputs=tool_outputs, # type: ignore - ) - logger.debug(f"Submitted tool outputs for agent `{agent.name}` and thread `{thread_id}`") - - steps_response = await agent.client.beta.threads.runs.steps.list(run_id=run.id, thread_id=thread_id) - logger.debug(f"Called for steps_response for run [{run.id}] agent `{agent.name}` and thread `{thread_id}`") - steps: list[RunStep] = steps_response.data - - def sort_key(step: RunStep): - # Put tool_calls first, then message_creation - # If multiple steps share a type, break ties by completed_at - return (0 if step.type == "tool_calls" else 1, step.completed_at) - - completed_steps_to_process = sorted( - [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], key=sort_key - ) - - logger.debug( - f"Completed steps to process for run [{run.id}] agent `{agent.name}` and thread `{thread_id}` " - f"with length `{len(completed_steps_to_process)}`" - ) - - message_count = 0 - for completed_step in completed_steps_to_process: - if completed_step.type == "tool_calls": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], agent `{agent.name}` and " - f"thread `{thread_id}`" - ) - assert hasattr(completed_step.step_details, "tool_calls") # nosec - tool_call_details = cast(ToolCallsStepDetails, completed_step.step_details) - for tool_call in tool_call_details.tool_calls: - is_visible = False - content: "ChatMessageContent | None" = None - if tool_call.type == "code_interpreter": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], [code_interpreter] for " - f"agent `{agent.name}` and thread `{thread_id}`" - ) - content = generate_code_interpreter_content( - agent.name, - tool_call.code_interpreter.input, # type: ignore - ) - is_visible = True - elif tool_call.type == "function": - logger.debug( - f"Entering step type tool_calls for run [{run.id}], [function] for agent " - f"`{agent.name}` and thread `{thread_id}`" - ) - function_step = function_steps.get(tool_call.id) - assert function_step is not None # nosec - content = generate_function_result_content( - agent_name=agent.name, function_step=function_step, tool_call=tool_call - ) - - if content: - message_count += 1 - logger.debug( - f"Yielding tool_message for run [{run.id}], agent `{agent.name}` and thread " - f"`{thread_id}` and message count `{message_count}`, is_visible `{is_visible}`" - ) - yield is_visible, content - elif completed_step.type == "message_creation": - logger.debug( - f"Entering step type message_creation for run [{run.id}], agent `{agent.name}` and " - f"thread `{thread_id}`" - ) - message = await cls._retrieve_message( - agent=agent, - thread_id=thread_id, - message_id=completed_step.step_details.message_creation.message_id, # type: ignore - ) - if message: - content = generate_message_content(agent.name, message) - if content and len(content.items) > 0: - message_count += 1 - logger.debug( - f"Yielding message_creation for run [{run.id}], agent `{agent.name}` and " - f"thread `{thread_id}` and message count `{message_count}`, is_visible `{True}`" - ) - yield True, content - processed_step_ids.add(completed_step.id) - - @classmethod - async def invoke_stream( - cls: type[_T], - *, - agent: "OpenAIAssistantAgent", - thread_id: str, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - additional_instructions: str | None = None, - additional_messages: "list[ChatMessageContent] | None" = None, - instructions_override: str | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - messages: list["ChatMessageContent"] | None = None, - metadata: dict[str, str] | None = None, - model: str | None = None, - parallel_tool_calls: bool | None = None, - reasoning_effort: Literal["low", "medium", "high"] | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - tools: "list[AssistantToolParam] | None" = None, - temperature: float | None = None, - top_p: float | None = None, - truncation_strategy: "TruncationStrategy | None" = None, - **kwargs: Any, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Invoke the assistant. - - Args: - agent: The assistant agent. - thread_id: The thread ID. - arguments: The kernel arguments. - kernel: The kernel. - instructions_override: The instructions override. - additional_instructions: The additional instructions. - additional_messages: The additional messages. - max_completion_tokens: The maximum completion tokens. - max_prompt_tokens: The maximum prompt tokens. - messages: The messages that act as a receiver for completed messages. - metadata: The metadata. - model: The model. - parallel_tool_calls: The parallel tool calls. - reasoning_effort: The reasoning effort. - response_format: The response format. - tools: The tools. - temperature: The temperature. - top_p: The top p. - truncation_strategy: The truncation strategy. - kwargs: Additional keyword arguments. - - Returns: - An async iterable of tuple of the visibility of the message and the chat message content. - """ - arguments = KernelArguments() if arguments is None else KernelArguments(**arguments, **kwargs) - kernel = kernel or agent.kernel - - tools = cls._get_tools(agent=agent, kernel=kernel) # type: ignore - - base_instructions = await agent.format_instructions(kernel=kernel, arguments=arguments) - - merged_instructions: str = "" - if instructions_override is not None: - merged_instructions = instructions_override - elif base_instructions and additional_instructions: - merged_instructions = f"{base_instructions}\n\n{additional_instructions}" - else: - merged_instructions = base_instructions or additional_instructions or "" - - # form run options - run_options = cls._generate_options( - agent=agent, - model=model, - response_format=response_format, - temperature=temperature, - top_p=top_p, - metadata=metadata, - parallel_tool_calls_enabled=parallel_tool_calls, - truncation_message_count=truncation_strategy, - max_completion_tokens=max_completion_tokens, - max_prompt_tokens=max_prompt_tokens, - additional_messages=additional_messages, - reasoning_effort=reasoning_effort, - ) - - run_options = {k: v for k, v in run_options.items() if v is not None} - - stream = agent.client.beta.threads.runs.stream( - assistant_id=agent.id, - thread_id=thread_id, - instructions=merged_instructions or agent.instructions, - tools=tools, # type: ignore - **run_options, - ) - - function_steps: dict[str, "FunctionCallContent"] = {} - active_messages: dict[str, RunStep] = {} - - while True: - async with stream as response_stream: - async for event in response_stream: - if event.event == "thread.run.created": - run = event.data - logger.info(f"Assistant run created with ID: {run.id}") - elif event.event == "thread.run.in_progress": - run = event.data - logger.info(f"Assistant run in progress with ID: {run.id}") - elif event.event == "thread.message.delta": - content = generate_streaming_message_content(agent.name, event.data) - yield content - elif event.event == "thread.run.step.completed": - step_completed = cast(RunStep, event.data) - logger.info(f"Run step completed with ID: {event.data.id}") - if isinstance(step_completed.step_details, MessageCreationStepDetails): - message_id = step_completed.step_details.message_creation.message_id - if message_id not in active_messages: - active_messages[message_id] = event.data - elif event.event == "thread.run.step.delta": - run_step_event: RunStepDeltaEvent = event.data - details = run_step_event.delta.step_details - if not details: - continue - step_details = event.data.delta.step_details - if isinstance(details, ToolCallDeltaObject) and details.tool_calls: - for tool_call in details.tool_calls: - tool_content = None - if tool_call.type == "function": - tool_content = generate_streaming_function_content(agent.name, step_details) - elif tool_call.type == "code_interpreter": - tool_content = generate_streaming_code_interpreter_content(agent.name, step_details) - if tool_content: - yield tool_content - elif event.event == "thread.run.requires_action": - run = event.data - function_action_result = await cls._handle_streaming_requires_action( - agent.name, kernel, run, function_steps - ) - if function_action_result is None: - raise AgentInvokeException( - f"Function call required but no function steps found for agent `{agent.name}` " - f"thread: {thread_id}." - ) - if function_action_result.function_result_streaming_content: - # Yield the function result content to the caller - yield function_action_result.function_result_streaming_content - if messages is not None: - # Add the function result content to the messages list, if it exists - messages.append(function_action_result.function_result_streaming_content) - if function_action_result.function_call_streaming_content: - if messages is not None: - messages.append(function_action_result.function_call_streaming_content) - stream = agent.client.beta.threads.runs.submit_tool_outputs_stream( - run_id=run.id, - thread_id=thread_id, - tool_outputs=function_action_result.tool_outputs, # type: ignore - ) - break - elif event.event == "thread.run.completed": - run = event.data - logger.info(f"Run completed with ID: {run.id}") - if len(active_messages) > 0: - for id in active_messages: - step: RunStep = active_messages[id] - message = await cls._retrieve_message( - agent=agent, - thread_id=thread_id, - message_id=id, # type: ignore - ) - - if message and message.content: - content = generate_final_streaming_message_content(agent.name, message, step) - if messages is not None: - messages.append(content) - return - elif event.event == "thread.run.failed": - run = event.data # type: ignore - error_message = "" - if run.last_error and run.last_error.message: - error_message = run.last_error.message - raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{agent.name}` and thread `{thread_id}` " - f"with error: {error_message}" - ) - else: - # If the inner loop completes without encountering a 'break', exit the outer loop - break - - @classmethod - async def _handle_streaming_requires_action( - cls: type[_T], - agent_name: str, - kernel: "Kernel", - run: "Run", - function_steps: dict[str, "FunctionCallContent"], - **kwargs: Any, - ) -> FunctionActionResult | None: - """Handle the requires action event for a streaming run.""" - fccs = get_function_call_contents(run, function_steps) - if fccs: - function_call_streaming_content = generate_function_call_streaming_content(agent_name=agent_name, fccs=fccs) - from semantic_kernel.contents.chat_history import ChatHistory - - chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) - function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] - tool_outputs = cls._format_tool_outputs(fccs, chat_history) - return FunctionActionResult( - function_call_streaming_content, function_result_streaming_content, tool_outputs - ) - return None - - # endregion - - @classmethod - async def _retrieve_message( - cls: type[_T], agent: "OpenAIAssistantAgent", thread_id: str, message_id: str - ) -> "Message | None": - """Retrieve a message from a thread.""" - message: "Message | None" = None - count = 0 - max_retries = 3 - while count < max_retries: - try: - message = await agent.client.beta.threads.messages.retrieve(thread_id=thread_id, message_id=message_id) - break - except Exception as ex: - logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") - count += 1 - if count >= max_retries: - logger.error( - f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." - ) - break - backoff_time: float = agent.polling_options.message_synchronization_delay.total_seconds() * (2**count) - await asyncio.sleep(backoff_time) - return message - - @classmethod - async def _invoke_function_calls( - cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" - ) -> list[Any]: - """Invoke the function calls.""" - tasks = [ - kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) - for function_call in fccs - ] - return await asyncio.gather(*tasks) - - @classmethod - def _format_tool_outputs( - cls: type[_T], fccs: list["FunctionCallContent"], chat_history: "ChatHistory" - ) -> list[dict[str, str]]: - """Format the tool outputs for submission.""" - from semantic_kernel.contents.function_result_content import FunctionResultContent - - tool_call_lookup = { - tool_call.id: tool_call - for message in chat_history.messages - for tool_call in message.items - if isinstance(tool_call, FunctionResultContent) - } - return [ - {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} - for fcc in fccs - if fcc.id in tool_call_lookup - ] - - @classmethod - async def _poll_run_status(cls: type[_T], agent: "OpenAIAssistantAgent", run: "Run", thread_id: str) -> "Run": - """Poll the run status.""" - logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") - - try: - run = await asyncio.wait_for( - cls._poll_loop(agent, run, thread_id), - timeout=agent.polling_options.run_polling_timeout.total_seconds(), - ) - except asyncio.TimeoutError: - timeout_duration = agent.polling_options.run_polling_timeout - error_message = f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` after waiting {timeout_duration}." # noqa: E501 - logger.error(error_message) - raise AgentInvokeException(error_message) - - logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") - return run - - @classmethod - async def _poll_loop(cls: type[_T], agent: "OpenAIAssistantAgent", run: "Run", thread_id: str) -> "Run": - """Internal polling loop.""" - count = 0 - while True: - await asyncio.sleep(agent.polling_options.get_polling_interval(count).total_seconds()) - count += 1 - - try: - run = await agent.client.beta.threads.runs.retrieve(run.id, thread_id=thread_id) - except Exception as e: - logging.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") - # Retry anyway - - if run.status not in cls.polling_status: - break - - return run - - @classmethod - def _merge_options( - cls: type[_T], - *, - agent: "OpenAIAssistantAgent", - model: str | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - temperature: float | None = None, - top_p: float | None = None, - metadata: dict[str, str] | None = None, - **kwargs: Any, - ) -> dict[str, Any]: - """Merge run-time options with the agent-level options. - - Run-level parameters take precedence. - """ - return { - "model": model if model is not None else agent.definition.model, - "response_format": response_format if response_format is not None else None, - "temperature": temperature if temperature is not None else agent.definition.temperature, - "top_p": top_p if top_p is not None else agent.definition.top_p, - "metadata": metadata if metadata is not None else agent.definition.metadata, - **kwargs, - } - - @classmethod - def _generate_options(cls: type[_T], **kwargs: Any) -> dict[str, Any]: - """Generate a dictionary of options that can be passed directly to create_run.""" - merged = cls._merge_options(**kwargs) - agent = kwargs.get("agent") - trunc_count = merged.get("truncation_message_count", None) - max_completion_tokens = merged.get("max_completion_tokens", None) - max_prompt_tokens = merged.get("max_prompt_tokens", None) - parallel_tool_calls = merged.get("parallel_tool_calls_enabled", None) - additional_messages = cls._translate_additional_messages(agent, merged.get("additional_messages", None)) - return { - "model": merged.get("model"), - "top_p": merged.get("top_p"), - "response_format": merged.get("response_format"), - "temperature": merged.get("temperature"), - "truncation_strategy": trunc_count, - "metadata": merged.get("metadata"), - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "parallel_tool_calls": parallel_tool_calls, - "additional_messages": additional_messages, - } - - @classmethod - def _translate_additional_messages( - cls: type[_T], agent, messages: "list[ChatMessageContent] | None" - ) -> list[AdditionalMessage] | None: - """Translate additional messages to the required format.""" - if not messages: - return None - return cls._form_additional_messages(messages) - - @classmethod - def _form_additional_messages( - cls: type[_T], messages: list["ChatMessageContent"] - ) -> list[AdditionalMessage] | None: - """Form the additional messages for the specified thread.""" - if not messages: - return None - - additional_messages = [] - for message in messages: - if not message.content: - continue - - message_with_all: AdditionalMessage = { - "content": message.content, - "role": "assistant" if message.role == AuthorRole.ASSISTANT else "user", - "attachments": cls._get_attachments(message) if message.items else None, - "metadata": cls._get_metadata(message) if message.metadata else None, - } - additional_messages.append(message_with_all) - return additional_messages - - @classmethod - def _get_attachments(cls: type[_T], message: "ChatMessageContent") -> list[AdditionalMessageAttachment]: - return [ - AdditionalMessageAttachment( - file_id=file_content.file_id, - tools=list(cls._get_tool_definition(file_content.tools)), # type: ignore - data_source=file_content.data_source if file_content.data_source else None, - ) - for file_content in message.items - if isinstance(file_content, (FileReferenceContent, StreamingFileReferenceContent)) - and file_content.file_id is not None - ] - - @classmethod - def _get_metadata(cls: type[_T], message: "ChatMessageContent") -> dict[str, str]: - """Get the metadata for an agent message.""" - return {k: str(v) if v is not None else "" for k, v in (message.metadata or {}).items()} - - @classmethod - def _get_tool_definition(cls: type[_T], tools: list[Any]) -> Iterable["AdditionalMessageAttachmentTool"]: - if not tools: - return - for tool in tools: - if tool_definition := cls.tool_metadata.get(tool): - yield from tool_definition - - @classmethod - def _get_tools(cls: type[_T], agent: "OpenAIAssistantAgent", kernel: "Kernel") -> list[dict[str, str]]: - """Get the list of tools for the assistant. - - Returns: - The list of tools. - """ - tools: list[Any] = [] - - for tool in agent.definition.tools: - if isinstance(tool, CodeInterpreterTool): - tools.append({"type": "code_interpreter"}) - elif isinstance(tool, FileSearchTool): - tools.append({"type": "file_search"}) - - funcs = agent.kernel.get_full_list_of_function_metadata() - tools.extend([kernel_function_metadata_to_function_call_format(f) for f in funcs]) - - return tools diff --git a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py index f82c305acdd6..d307f0ac2466 100644 --- a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py @@ -1,77 +1,403 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import Awaitable, Callable +import logging +from collections.abc import AsyncIterable, Awaitable, Callable from copy import copy -from typing import Any +from typing import TYPE_CHECKING, Any from openai import AsyncAzureOpenAI from pydantic import ValidationError -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings +from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.kernel_pydantic import HttpsUrl from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token -from semantic_kernel.utils.feature_stage_decorator import release_candidate +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent +if TYPE_CHECKING: + from semantic_kernel.kernel import Kernel -@release_candidate -class AzureAssistantAgent(OpenAIAssistantAgent): - """An Azure Assistant Agent class that extends the OpenAI Assistant Agent class.""" - @staticmethod - def setup_resources( - *, +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental_class +class AzureAssistantAgent(OpenAIAssistantBase): + """Azure OpenAI Assistant Agent class. + + Provides the ability to interact with Azure OpenAI Assistants. + """ + + # region Agent Initialization + + def __init__( + self, + kernel: "Kernel | None" = None, + service_id: str | None = None, + deployment_name: str | None = None, + api_key: str | None = None, + endpoint: HttpsUrl | None = None, + api_version: str | None = None, ad_token: str | None = None, ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, + client: AsyncAzureOpenAI | None = None, + default_headers: dict[str, str] | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + description: str | None = None, + id: str | None = None, + instructions: str | None = None, + name: str | None = None, + enable_code_interpreter: bool | None = None, + enable_file_search: bool | None = None, + enable_json_response: bool | None = None, + file_ids: list[str] | None = None, + temperature: float | None = None, + top_p: float | None = None, + vector_store_id: str | None = None, + metadata: dict[str, Any] | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + token_endpoint: str | None = None, + **kwargs: Any, + ) -> None: + """Initialize an Azure OpenAI Assistant Agent. + + Args: + kernel: The Kernel instance. (optional) + service_id: The service ID. (optional) + deployment_name: The deployment name. (optional) + api_key: The Azure OpenAI API key. (optional) + endpoint: The Azure OpenAI endpoint. (optional) + api_version: The Azure OpenAI API version. (optional) + ad_token: The Azure AD token. (optional) + ad_token_provider: The Azure AD token provider. (optional) + client: The Azure OpenAI client. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) + description: The description. (optional) + id: The Agent ID. (optional) + instructions: The Agent instructions. (optional) + name: The Agent name. (optional) + enable_code_interpreter: Enable the code interpreter. (optional) + enable_file_search: Enable the file search. (optional) + enable_json_response: Enable the JSON response. (optional) + file_ids: The file IDs. (optional) + temperature: The temperature. (optional) + top_p: The top p. (optional) + vector_store_id: The vector store ID. (optional) + metadata: The metadata. (optional) + max_completion_tokens: The maximum completion tokens. (optional) + max_prompt_tokens: The maximum prompt tokens. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. (optional) + truncation_message_count: The truncation message count. (optional) + token_endpoint: The Azure AD token endpoint. (optional) + **kwargs: Additional keyword arguments. + + Raises: + AgentInitializationError: If the api_key is not provided in the configuration. + """ + azure_openai_settings = self._create_azure_openai_settings( + api_key=api_key, + endpoint=endpoint, + deployment_name=deployment_name, + api_version=api_version, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, + ) + + client, ad_token = self._setup_client_and_token( + azure_openai_settings=azure_openai_settings, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + client=client, + default_headers=default_headers, + ) + + service_id = service_id if service_id else DEFAULT_SERVICE_NAME + + args: dict[str, Any] = { + "kernel": kernel, + "ai_model_id": azure_openai_settings.chat_deployment_name, + "service_id": service_id, + "client": client, + "name": name, + "description": description, + "instructions": instructions, + "enable_code_interpreter": enable_code_interpreter, + "enable_file_search": enable_file_search, + "enable_json_response": enable_json_response, + "file_ids": file_ids or [], + "temperature": temperature, + "top_p": top_p, + "vector_store_id": vector_store_id, + "metadata": metadata or {}, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "parallel_tool_calls_enabled": parallel_tool_calls_enabled, + "truncation_message_count": truncation_message_count, + } + + if id is not None: + args["id"] = id + if kernel is not None: + args["kernel"] = kernel + if kwargs: + args.update(kwargs) + + super().__init__(**args) + + @classmethod + async def create( + cls, + *, + kernel: "Kernel | None" = None, + service_id: str | None = None, + deployment_name: str | None = None, api_key: str | None = None, + endpoint: HttpsUrl | None = None, api_version: str | None = None, - base_url: str | None = None, + ad_token: str | None = None, + ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, + client: AsyncAzureOpenAI | None = None, default_headers: dict[str, str] | None = None, - deployment_name: str | None = None, - endpoint: str | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - token_scope: str | None = None, + description: str | None = None, + id: str | None = None, + instructions: str | None = None, + name: str | None = None, + enable_code_interpreter: bool | None = None, + code_interpreter_filenames: list[str] | None = None, + code_interpreter_file_ids: list[str] | None = None, + enable_file_search: bool | None = None, + vector_store_filenames: list[str] | None = None, + vector_store_file_ids: list[str] | None = None, + enable_json_response: bool | None = None, + temperature: float | None = None, + top_p: float | None = None, + vector_store_id: str | None = None, + metadata: dict[str, Any] | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + token_endpoint: str | None = None, **kwargs: Any, - ) -> tuple[AsyncAzureOpenAI, str]: - """A method to create the Azure OpenAI client and the deployment name/model from the provided arguments. + ) -> "AzureAssistantAgent": + """Asynchronous class method used to create the OpenAI Assistant Agent. + + Args: + kernel: The Kernel instance. (optional) + service_id: The service ID. (optional) + deployment_name: The deployment name. (optional) + api_key: The Azure OpenAI API key. (optional) + endpoint: The Azure OpenAI endpoint. (optional) + api_version: The Azure OpenAI API version. (optional) + ad_token: The Azure AD token. (optional) + ad_token_provider: The Azure AD token provider. (optional) + client: The Azure OpenAI client. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) + description: The description. (optional) + id: The Agent ID. (optional) + instructions: The Agent instructions. (optional) + name: The Agent name. (optional) + enable_code_interpreter: Enable the code interpreter. (optional) + code_interpreter_filenames: The filenames/paths to use with the code interpreter. (optional) + code_interpreter_file_ids: The existing file IDs to use with the code interpreter. (optional) + enable_file_search: Enable the file search. (optional) + vector_store_filenames: The filenames/paths for files to use with file search. (optional) + vector_store_file_ids: The existing file IDs to use with file search. (optional) + enable_json_response: Enable the JSON response. (optional) + temperature: The temperature. (optional) + top_p: The top p. (optional) + vector_store_id: The vector store ID. (optional) + metadata: The metadata. (optional) + max_completion_tokens: The maximum completion tokens. (optional) + max_prompt_tokens: The maximum prompt tokens. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. (optional) + truncation_message_count: The truncation message count. (optional) + token_endpoint: The Azure AD token endpoint. (optional) + **kwargs: Additional keyword arguments. - Any arguments provided will override the values in the environment variables/environment file. + Returns: + An instance of the AzureAssistantAgent + """ + agent = cls( + kernel=kernel, + service_id=service_id, + deployment_name=deployment_name, + api_key=api_key, + endpoint=endpoint, + api_version=api_version, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + client=client, + default_headers=default_headers, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + description=description, + id=id, + instructions=instructions, + name=name, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + temperature=temperature, + top_p=top_p, + vector_store_id=vector_store_id, + metadata=metadata or {}, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + token_endpoint=token_endpoint, + **kwargs, + ) + + assistant_create_kwargs: dict[str, Any] = {} + + code_interpreter_file_ids_combined: list[str] = [] + if code_interpreter_file_ids is not None: + code_interpreter_file_ids_combined.extend(code_interpreter_file_ids) + if code_interpreter_filenames is not None: + for file_path in code_interpreter_filenames: + try: + file_id = await agent.add_file(file_path=file_path, purpose="assistants") + code_interpreter_file_ids_combined.append(file_id) + except FileNotFoundError as ex: + logger.error( + f"Failed to upload code interpreter file with path: `{file_path}` with exception: {ex}" + ) + raise AgentInitializationException("Failed to upload code interpreter files.", ex) from ex + if code_interpreter_file_ids_combined: + agent.code_interpreter_file_ids = code_interpreter_file_ids_combined + assistant_create_kwargs["code_interpreter_file_ids"] = code_interpreter_file_ids_combined + + vector_store_file_ids_combined: list[str] = [] + if vector_store_file_ids is not None: + vector_store_file_ids_combined.extend(vector_store_file_ids) + if vector_store_filenames is not None: + for file_path in vector_store_filenames: + try: + file_id = await agent.add_file(file_path=file_path, purpose="assistants") + vector_store_file_ids_combined.append(file_id) + except FileNotFoundError as ex: + logger.error(f"Failed to upload vector store file with path: `{file_path}` with exception: {ex}") + raise AgentInitializationException("Failed to upload vector store files.", ex) from ex + if vector_store_file_ids_combined: + agent.file_search_file_ids = vector_store_file_ids_combined + if enable_file_search or agent.enable_file_search: + vector_store_id = await agent.create_vector_store(file_ids=vector_store_file_ids_combined) + agent.vector_store_id = vector_store_id + assistant_create_kwargs["vector_store_id"] = vector_store_id + + agent.assistant = await agent.create_assistant(**assistant_create_kwargs) + return agent + + @classmethod + async def retrieve( + cls, + *, + id: str, + api_key: str | None = None, + endpoint: HttpsUrl | None = None, + api_version: str | None = None, + ad_token: str | None = None, + ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, + client: AsyncAzureOpenAI | None = None, + kernel: "Kernel | None" = None, + default_headers: dict[str, str] | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + token_endpoint: str | None = None, + ) -> "AzureAssistantAgent": + """Retrieve an assistant by ID. Args: - ad_token: The Microsoft Entra (previously Azure AD) token represented as a string - ad_token_provider: The Microsoft Entra (previously Azure AD) token provider provided as a callback - api_key: The API key - api_version: The API version - base_url: The base URL in the form https://.azure.openai.com/openai/deployments/ - default_headers: The default headers to add to the client - deployment_name: The deployment name - endpoint: The endpoint in the form https://.azure.openai.com - env_file_path: The environment file path - env_file_encoding: The environment file encoding, defaults to utf-8 - token_scope: The token scope - kwargs: Additional keyword arguments + id: The assistant ID. + api_key: The Azure OpenAI API key. (optional) + endpoint: The Azure OpenAI endpoint. (optional) + api_version: The Azure OpenAI API version. (optional) + ad_token: The Azure AD token. (optional) + ad_token_provider: The Azure AD token provider. (optional) + client: The Azure OpenAI client. (optional) + kernel: The Kernel instance. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) + token_endpoint: The Azure AD token endpoint. (optional) Returns: - An Azure OpenAI client instance and the configured deployment name (model) + An AzureAssistantAgent instance. """ - try: - azure_openai_settings = AzureOpenAISettings.create( - api_key=api_key, - base_url=base_url, - endpoint=endpoint, - chat_deployment_name=deployment_name, - api_version=api_version, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - token_endpoint=token_scope, - ) - except ValidationError as exc: - raise AgentInitializationException(f"Failed to create Azure OpenAI settings: {exc}") from exc + azure_openai_settings = cls._create_azure_openai_settings( + api_key=api_key, + endpoint=endpoint, + deployment_name=None, # Not required for retrieving an existing assistant + api_version=api_version, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, + ) + + client, ad_token = cls._setup_client_and_token( + azure_openai_settings=azure_openai_settings, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + client=client, + default_headers=default_headers, + ) + assistant = await client.beta.assistants.retrieve(id) + assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) + + return AzureAssistantAgent( + kernel=kernel, + assistant=assistant, + client=client, + ad_token=ad_token, + api_key=api_key, + endpoint=endpoint, + api_version=api_version, + default_headers=default_headers, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, + **assistant_definition, + ) + + @staticmethod + def _setup_client_and_token( + azure_openai_settings: AzureOpenAISettings, + ad_token: str | None, + ad_token_provider: Callable[[], str | Awaitable[str]] | None, + client: AsyncAzureOpenAI | None, + default_headers: dict[str, str] | None, + ) -> tuple[AsyncAzureOpenAI, str | None]: + """Helper method that ensures either an AD token or an API key is present. + + Retrieves a new AD token if needed, and configures the AsyncAzureOpenAI client. + + Returns: + A tuple of (client, ad_token), where client is guaranteed not to be None. + """ + if not azure_openai_settings.chat_deployment_name: + raise AgentInitializationException("The Azure OpenAI chat_deployment_name is required.") + + # If everything is missing, but there is a token_endpoint, try to get the token. if ( - azure_openai_settings.api_key is None + client is None + and azure_openai_settings.api_key is None and ad_token_provider is None and ad_token is None and azure_openai_settings.token_endpoint @@ -79,32 +405,114 @@ def setup_resources( ad_token = get_entra_auth_token(azure_openai_settings.token_endpoint) # If we still have no credentials, we can't proceed - if not azure_openai_settings.api_key and not ad_token and not ad_token_provider: + if not client and not azure_openai_settings.api_key and not ad_token and not ad_token_provider: raise AgentInitializationException( - "Please provide either an api_key, ad_token or ad_token_provider for authentication." + "Please provide either a client, an api_key, ad_token or ad_token_provider." ) + # Build the client if it's not supplied + if not client: + client = AzureAssistantAgent._create_client( + api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, + endpoint=azure_openai_settings.endpoint, + api_version=azure_openai_settings.api_version, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + default_headers=default_headers, + ) + + return client, ad_token + + @staticmethod + def _create_client( + api_key: str | None = None, + endpoint: HttpsUrl | None = None, + api_version: str | None = None, + ad_token: str | None = None, + ad_token_provider: Callable[[], str | Awaitable[str]] | None = None, + default_headers: dict[str, str] | None = None, + ) -> AsyncAzureOpenAI: + """Create the OpenAI client from configuration. + + Args: + api_key: The OpenAI API key. + endpoint: The OpenAI endpoint. + api_version: The OpenAI API version. + ad_token: The Azure AD token. + ad_token_provider: The Azure AD token provider. + default_headers: The default headers. + + Returns: + An AsyncAzureOpenAI client instance. + """ merged_headers = dict(copy(default_headers)) if default_headers else {} - if default_headers: - merged_headers.update(default_headers) if APP_INFO: merged_headers.update(APP_INFO) merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) - if not azure_openai_settings.endpoint: - raise AgentInitializationException("Please provide an Azure OpenAI endpoint") - - if not azure_openai_settings.chat_deployment_name: - raise AgentInitializationException("Please provide an Azure OpenAI deployment name") + if not api_key and not ad_token and not ad_token_provider: + raise AgentInitializationException( + "Please provide either AzureOpenAI api_key, an ad_token, ad_token_provider, or a client." + ) + if not endpoint: + raise AgentInitializationException("Please provide an AzureOpenAI endpoint.") - client = AsyncAzureOpenAI( - azure_endpoint=str(azure_openai_settings.endpoint), - api_version=azure_openai_settings.api_version, - api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, + return AsyncAzureOpenAI( + azure_endpoint=str(endpoint), + api_version=api_version, + api_key=api_key, azure_ad_token=ad_token, azure_ad_token_provider=ad_token_provider, default_headers=merged_headers, - **kwargs, ) - return client, azure_openai_settings.chat_deployment_name + @staticmethod + def _create_azure_openai_settings( + api_key: str | None = None, + endpoint: HttpsUrl | None = None, + deployment_name: str | None = None, + api_version: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + token_endpoint: str | None = None, + ) -> AzureOpenAISettings: + """Create the Azure OpenAI settings. + + Args: + api_key: The Azure OpenAI API key. + endpoint: The Azure OpenAI endpoint. + deployment_name: The Azure OpenAI chat deployment name. + api_version: The Azure OpenAI API version. + env_file_path: The environment file path. + env_file_encoding: The environment file encoding. + token_endpoint: The Azure AD token endpoint. + + Returns: + An instance of the AzureOpenAISettings. + """ + try: + azure_openai_settings = AzureOpenAISettings.create( + api_key=api_key, + endpoint=endpoint, + chat_deployment_name=deployment_name, + api_version=api_version, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, + ) + except ValidationError as ex: + raise AgentInitializationException("Failed to create Azure OpenAI settings.", ex) from ex + + return azure_openai_settings + + async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: + """List the assistant definitions. + + Yields: + An AsyncIterable of dictionaries representing the OpenAIAssistantDefinition. + """ + assistants = await self.client.beta.assistants.list(order="desc") + for assistant in assistants.data: + yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) + + # endregion diff --git a/python/semantic_kernel/agents/open_ai/function_action_result.py b/python/semantic_kernel/agents/open_ai/function_action_result.py index b971c8899a95..48f6eb13bf4e 100644 --- a/python/semantic_kernel/agents/open_ai/function_action_result.py +++ b/python/semantic_kernel/agents/open_ai/function_action_result.py @@ -3,17 +3,17 @@ import logging from dataclasses import dataclass -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class @dataclass class FunctionActionResult: """Function Action Result.""" - function_call_streaming_content: StreamingChatMessageContent | None - function_result_streaming_content: StreamingChatMessageContent | None + function_call_content: ChatMessageContent | None + function_result_content: ChatMessageContent | None tool_outputs: list[dict[str, str]] | None diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py index efabc6194c6c..7db30aa6f5c2 100644 --- a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py @@ -1,66 +1,29 @@ # Copyright (c) Microsoft. All rights reserved. import logging -import sys -from collections.abc import AsyncIterable, Iterable +from collections.abc import AsyncIterable from copy import copy -from typing import TYPE_CHECKING, Any, ClassVar, Literal - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover +from typing import TYPE_CHECKING, Any from openai import AsyncOpenAI -from openai.lib._parsing._completions import type_to_response_format_param -from openai.types.beta.assistant import Assistant -from openai.types.beta.assistant_create_params import ( - ToolResources, - ToolResourcesCodeInterpreter, - ToolResourcesFileSearch, -) -from openai.types.beta.assistant_response_format_option_param import AssistantResponseFormatOptionParam -from openai.types.beta.file_search_tool_param import FileSearchToolParam -from pydantic import BaseModel, Field, ValidationError - -from semantic_kernel.agents import Agent -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel -from semantic_kernel.agents.open_ai.assistant_content_generation import generate_message_content -from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from pydantic import ValidationError + +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings -from semantic_kernel.connectors.utils.structured_output_schema import generate_structured_output_response_format_schema -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions import KernelArguments -from semantic_kernel.functions.kernel_function import TEMPLATE_FORMAT_MAP -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.schema.kernel_json_schema_builder import KernelJsonSchemaBuilder -from semantic_kernel.utils.feature_stage_decorator import release_candidate -from semantic_kernel.utils.naming import generate_random_ascii_name -from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( - trace_agent_get_response, - trace_agent_invocation, -) +from semantic_kernel.const import DEFAULT_SERVICE_NAME +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: - from openai import AsyncOpenAI - from openai.types.beta.assistant_tool_param import AssistantToolParam - from openai.types.beta.code_interpreter_tool_param import CodeInterpreterToolParam - from openai.types.beta.threads.message import Message - from openai.types.beta.threads.run_create_params import TruncationStrategy - - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.kernel import Kernel - from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig + logger: logging.Logger = logging.getLogger(__name__) -@release_candidate -class OpenAIAssistantAgent(Agent): +@experimental_class +class OpenAIAssistantAgent(OpenAIAssistantBase): """OpenAI Assistant Agent class. Provides the ability to interact with OpenAI Assistants. @@ -68,580 +31,396 @@ class OpenAIAssistantAgent(Agent): # region Agent Initialization - client: AsyncOpenAI - definition: Assistant - plugins: list[Any] = Field(default_factory=list) - polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) - - channel_type: ClassVar[type[AgentChannel]] = OpenAIAssistantChannel # type: ignore - def __init__( self, *, - arguments: KernelArguments | None = None, - client: AsyncOpenAI, - definition: Assistant, kernel: "Kernel | None" = None, - plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object] | None = None, - polling_options: RunPollingOptions | None = None, - prompt_template_config: "PromptTemplateConfig | None" = None, + service_id: str | None = None, + ai_model_id: str | None = None, + api_key: str | None = None, + org_id: str | None = None, + client: AsyncOpenAI | None = None, + default_headers: dict[str, str] | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + description: str | None = None, + id: str | None = None, + instructions: str | None = None, + name: str | None = None, + enable_code_interpreter: bool | None = None, + enable_file_search: bool | None = None, + enable_json_response: bool | None = None, + code_interpreter_file_ids: list[str] | None = None, + temperature: float | None = None, + top_p: float | None = None, + vector_store_id: str | None = None, + metadata: dict[str, Any] | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, **kwargs: Any, ) -> None: """Initialize an OpenAIAssistant service. Args: - arguments: The arguments to pass to the function. - client: The OpenAI client. - definition: The assistant definition. - kernel: The Kernel instance. - plugins: The plugins to add to the kernel. If both the plugins and the kernel are supplied, - the plugins take precedence and are added to the kernel by default. - polling_options: The polling options. - prompt_template_config: The prompt template configuration. + kernel: The Kernel instance. (optional) + service_id: The service ID. (optional) If not provided the default service name (default) is used. + ai_model_id: The AI model ID. (optional) + api_key: The OpenAI API key. (optional) + org_id: The OpenAI organization ID. (optional) + client: The OpenAI client. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) + description: The assistant description. (optional) + id: The assistant ID. (optional) + instructions: The assistant instructions. (optional) + name: The assistant name. (optional) + enable_code_interpreter: Enable code interpreter. (optional) + enable_file_search: Enable file search. (optional) + enable_json_response: Enable JSON response. (optional) + code_interpreter_file_ids: The file IDs. (optional) + temperature: The temperature. (optional) + top_p: The top p. (optional) + vector_store_id: The vector store ID. (optional) + metadata: The assistant metadata. (optional) + max_completion_tokens: The max completion tokens. (optional) + max_prompt_tokens: The max prompt tokens. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. (optional) + truncation_message_count: The truncation message count. (optional) kwargs: Additional keyword arguments. + + Raises: + AgentInitializationError: If the api_key is not provided in the configuration. """ + openai_settings = OpenAIAssistantAgent._create_open_ai_settings( + api_key=api_key, + org_id=org_id, + ai_model_id=ai_model_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + + if not client and not openai_settings.api_key: + raise AgentInitializationException("The OpenAI API key is required, if a client is not provided.") + if not openai_settings.chat_model_id: + raise AgentInitializationException("The OpenAI chat model ID is required.") + + if not client: + client = self._create_client( + api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, + org_id=openai_settings.org_id, + default_headers=default_headers, + ) + + service_id = service_id if service_id else DEFAULT_SERVICE_NAME + args: dict[str, Any] = { + "ai_model_id": openai_settings.chat_model_id, + "service_id": service_id, "client": client, - "definition": definition, - "name": definition.name or f"assistant_agent_{generate_random_ascii_name(length=8)}", - "description": definition.description, + "description": description, + "instructions": instructions, + "enable_code_interpreter": enable_code_interpreter, + "enable_file_search": enable_file_search, + "enable_json_response": enable_json_response, + "code_interpreter_file_ids": code_interpreter_file_ids or [], + "temperature": temperature, + "top_p": top_p, + "vector_store_id": vector_store_id, + "metadata": metadata or {}, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "parallel_tool_calls_enabled": parallel_tool_calls_enabled, + "truncation_message_count": truncation_message_count, } - if arguments is not None: - args["arguments"] = arguments - if definition.id is not None: - args["id"] = definition.id - if definition.instructions is not None: - args["instructions"] = definition.instructions + if name is not None: + args["name"] = name + if id is not None: + args["id"] = id if kernel is not None: args["kernel"] = kernel - - if ( - definition.instructions - and prompt_template_config - and definition.instructions != prompt_template_config.template - ): - logger.info( - f"Both `instructions` ({definition.instructions}) and `prompt_template_config` " - f"({prompt_template_config.template}) were provided. Using template in `prompt_template_config` " - "and ignoring `instructions`." - ) - - if plugins is not None: - args["plugins"] = plugins - - if prompt_template_config is not None: - args["prompt_template"] = TEMPLATE_FORMAT_MAP[prompt_template_config.template_format]( - prompt_template_config=prompt_template_config - ) - if prompt_template_config.template is not None: - # Use the template from the prompt_template_config if it is provided - args["instructions"] = prompt_template_config.template - if polling_options is not None: - args["polling_options"] = polling_options if kwargs: args.update(kwargs) super().__init__(**args) - @staticmethod - def setup_resources( + @classmethod + async def create( + cls, *, + kernel: "Kernel | None" = None, + service_id: str | None = None, ai_model_id: str | None = None, api_key: str | None = None, org_id: str | None = None, + client: AsyncOpenAI | None = None, + default_headers: dict[str, str] | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - default_headers: dict[str, str] | None = None, + description: str | None = None, + id: str | None = None, + instructions: str | None = None, + name: str | None = None, + enable_code_interpreter: bool | None = None, + code_interpreter_filenames: list[str] | None = None, + code_interpreter_file_ids: list[str] | None = None, + enable_file_search: bool | None = None, + vector_store_filenames: list[str] | None = None, + vector_store_file_ids: list[str] | None = None, + enable_json_response: bool | None = None, + temperature: float | None = None, + top_p: float | None = None, + vector_store_id: str | None = None, + metadata: dict[str, Any] | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, **kwargs: Any, - ) -> tuple[AsyncOpenAI, str]: - """A method to create the OpenAI client and the model from the provided arguments. - - Any arguments provided will override the values in the environment variables/environment file. + ) -> "OpenAIAssistantAgent": + """Asynchronous class method used to create the OpenAI Assistant Agent. Args: - ai_model_id: The AI model ID - api_key: The API key - org_id: The organization ID - env_file_path: The environment file path - env_file_encoding: The environment file encoding, defaults to utf-8 - default_headers: The default headers to add to the client - kwargs: Additional keyword arguments + kernel: The Kernel instance. (optional) + service_id: The service ID. (optional) If not provided the default service name (default) is used. + ai_model_id: The AI model ID. (optional) + api_key: The OpenAI API key. (optional) + org_id: The OpenAI organization ID. (optional) + client: The OpenAI client. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) + description: The assistant description. (optional) + id: The assistant ID. (optional) + instructions: The assistant instructions. (optional) + name: The assistant name. (optional) + enable_code_interpreter: Enable code interpreter. (optional) + code_interpreter_filenames: The filenames/paths for files to use with code interpreter. (optional) + code_interpreter_file_ids: The existing file IDs to use with the code interpreter. (optional) + enable_file_search: Enable the file search. (optional) + vector_store_filenames: The filenames/paths for files to use with file search. (optional) + vector_store_file_ids: The existing file IDs to use with file search. (optional) + enable_json_response: Enable JSON response. (optional) + temperature: The temperature. (optional) + top_p: The top p. (optional) + vector_store_id: The vector store ID. (optional) + metadata: The assistant metadata. (optional) + max_completion_tokens: The max completion tokens. (optional) + max_prompt_tokens: The max prompt tokens. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. (optional) + truncation_message_count: The truncation message count. (optional) + kwargs: Additional keyword arguments. Returns: - An OpenAI client instance and the configured model name + An OpenAIAssistantAgent instance. """ - try: - openai_settings = OpenAISettings.create( - chat_model_id=ai_model_id, - api_key=api_key, - org_id=org_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise AgentInitializationException("Failed to create OpenAI settings.", ex) from ex - - if not openai_settings.api_key: - raise AgentInitializationException("The OpenAI API key is required.") - - if not openai_settings.chat_model_id: - raise AgentInitializationException("The OpenAI model ID is required.") - - merged_headers = dict(copy(default_headers)) if default_headers else {} - if default_headers: - merged_headers.update(default_headers) - if APP_INFO: - merged_headers.update(APP_INFO) - merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) - - client = AsyncOpenAI( - api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, - organization=openai_settings.org_id, - default_headers=merged_headers, + agent = cls( + kernel=kernel, + service_id=service_id, + ai_model_id=ai_model_id, + api_key=api_key, + org_id=org_id, + client=client, + default_headers=default_headers, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + description=description, + id=id, + instructions=instructions, + name=name, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + temperature=temperature, + top_p=top_p, + vector_store_id=vector_store_id, + metadata=metadata or {}, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, **kwargs, ) - return client, openai_settings.chat_model_id + assistant_create_kwargs: dict[str, Any] = {} - # endregion + code_interpreter_file_ids_combined: list[str] = [] - # region Tool Handling - - @staticmethod - def configure_code_interpreter_tool( - file_ids: str | list[str] | None = None, **kwargs: Any - ) -> tuple[list["CodeInterpreterToolParam"], ToolResources]: - """Generate tool + tool_resources for the code_interpreter.""" - if isinstance(file_ids, str): - file_ids = [file_ids] - tool: "CodeInterpreterToolParam" = {"type": "code_interpreter"} - resources: ToolResources = {} - if file_ids: - resources["code_interpreter"] = ToolResourcesCodeInterpreter(file_ids=file_ids) - return [tool], resources + if code_interpreter_file_ids is not None: + code_interpreter_file_ids_combined.extend(code_interpreter_file_ids) - @staticmethod - def configure_file_search_tool( - vector_store_ids: str | list[str], **kwargs: Any - ) -> tuple[list[FileSearchToolParam], ToolResources]: - """Generate tool + tool_resources for the file_search.""" - if isinstance(vector_store_ids, str): - vector_store_ids = [vector_store_ids] - - tool: FileSearchToolParam = { - "type": "file_search", - } - resources: ToolResources = {"file_search": ToolResourcesFileSearch(vector_store_ids=vector_store_ids, **kwargs)} # type: ignore - return [tool], resources - - @staticmethod - def configure_response_format( - response_format: dict[Literal["type"], Literal["text", "json_object"]] - | dict[str, Any] - | type[BaseModel] - | type - | AssistantResponseFormatOptionParam - | None = None, - ) -> AssistantResponseFormatOptionParam | None: - """Form the response format. - - "auto" is the default value. Not configuring the response format will result in the model - outputting text. - - Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - Outputs which ensures the model will match your supplied JSON schema. Learn more - in the [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - - Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - message the model generates is valid JSON, as long as the prompt contains "JSON." - - Args: - response_format: The response format. - - Returns: - AssistantResponseFormatOptionParam: The response format. - """ - if response_format is None or response_format == "auto": - return None - - configured_response_format = None - if isinstance(response_format, dict): - resp_type = response_format.get("type") - if resp_type == "json_object": - configured_response_format = {"type": "json_object"} - elif resp_type == "json_schema": - json_schema = response_format.get("json_schema") # type: ignore - if not isinstance(json_schema, dict): - raise AgentInitializationException( - "If response_format has type 'json_schema', 'json_schema' must be a valid dictionary." + if code_interpreter_filenames is not None: + for file_path in code_interpreter_filenames: + try: + file_id = await agent.add_file(file_path=file_path, purpose="assistants") + code_interpreter_file_ids_combined.append(file_id) + except FileNotFoundError as ex: + logger.error( + f"Failed to upload code interpreter file with path: `{file_path}` with exception: {ex}" ) - # We're assuming the response_format has already been provided in the correct format - configured_response_format = response_format # type: ignore - else: - raise AgentInitializationException( - f"Encountered unexpected response_format type: {resp_type}. Allowed types are `json_object` " - " and `json_schema`." - ) - elif isinstance(response_format, type): - # If it's a type, differentiate based on whether it's a BaseModel subclass - if issubclass(response_format, BaseModel): - configured_response_format = type_to_response_format_param(response_format) # type: ignore - else: - generated_schema = KernelJsonSchemaBuilder.build(parameter_type=response_format, structured_output=True) - assert generated_schema is not None # nosec - configured_response_format = generate_structured_output_response_format_schema( - name=response_format.__name__, schema=generated_schema - ) - else: - # If it's not a dict or a type, throw an exception - raise AgentInitializationException( - "response_format must be a dictionary, a subclass of BaseModel, a Python class/type, or None" - ) - - return configured_response_format # type: ignore + raise AgentInitializationException("Failed to upload code interpreter files.", ex) from ex - # endregion - - # region Agent Channel Methods - - def get_channel_keys(self) -> Iterable[str]: - """Get the channel keys. - - Returns: - Iterable[str]: The channel keys. - """ - # Distinguish from other channel types. - yield f"{OpenAIAssistantAgent.__name__}" + if code_interpreter_file_ids_combined: + agent.code_interpreter_file_ids = code_interpreter_file_ids_combined + assistant_create_kwargs["code_interpreter_file_ids"] = code_interpreter_file_ids_combined - # Distinguish between different agent IDs - yield self.id + vector_store_file_ids_combined: list[str] = [] - # Distinguish between agent names - yield self.name + if vector_store_file_ids is not None: + vector_store_file_ids_combined.extend(vector_store_file_ids) - # Distinguish between different API base URLs - yield str(self.client.base_url) + if vector_store_filenames is not None: + for file_path in vector_store_filenames: + try: + file_id = await agent.add_file(file_path=file_path, purpose="assistants") + vector_store_file_ids_combined.append(file_id) + except FileNotFoundError as ex: + logger.error(f"Failed to upload vector store file with path: `{file_path}` with exception: {ex}") + raise AgentInitializationException("Failed to upload vector store files.", ex) from ex - async def create_channel(self) -> AgentChannel: - """Create a channel.""" - thread = await self.client.beta.threads.create() + if vector_store_file_ids_combined: + agent.file_search_file_ids = vector_store_file_ids_combined + if enable_file_search or agent.enable_file_search: + vector_store_id = await agent.create_vector_store(file_ids=vector_store_file_ids_combined) + agent.vector_store_id = vector_store_id + assistant_create_kwargs["vector_store_id"] = vector_store_id - return OpenAIAssistantChannel(client=self.client, thread_id=thread.id) + agent.assistant = await agent.create_assistant(**assistant_create_kwargs) + return agent - # endregion - - # region Message Handling - - async def add_chat_message( - self, thread_id: str, message: "str | ChatMessageContent", **kwargs: Any - ) -> "Message | None": - """Add a chat message to the thread. + @staticmethod + def _create_client( + api_key: str | None = None, org_id: str | None = None, default_headers: dict[str, str] | None = None + ) -> AsyncOpenAI: + """An internal method to create the OpenAI client from the provided arguments. Args: - thread_id: The ID of the thread - message: The chat message to add - kwargs: Additional keyword arguments + api_key: The OpenAI API key. + org_id: The OpenAI organization ID. (optional) + default_headers: The default headers. (optional) Returns: - The thread message or None + An OpenAI client instance. """ - return await AssistantThreadActions.create_message( - client=self.client, thread_id=thread_id, message=message, **kwargs - ) - - async def get_thread_messages(self, thread_id: str) -> AsyncIterable["ChatMessageContent"]: - """Get the messages for the specified thread. - - Args: - thread_id: The thread id. - - Yields: - ChatMessageContent: The chat message. - """ - agent_names: dict[str, Any] = {} - - thread_messages = await self.client.beta.threads.messages.list(thread_id=thread_id, limit=100, order="desc") - for message in thread_messages.data: - assistant_name = None - if message.assistant_id and message.assistant_id not in agent_names: - agent = await self.client.beta.assistants.retrieve(message.assistant_id) - if agent.name: - agent_names[message.assistant_id] = agent.name - assistant_name = agent_names.get(message.assistant_id) if message.assistant_id else message.assistant_id - assistant_name = assistant_name or message.assistant_id - - content: "ChatMessageContent" = generate_message_content(str(assistant_name), message) - - if len(content.items) > 0: - yield content + merged_headers = dict(copy(default_headers)) if default_headers else {} + if default_headers: + merged_headers.update(default_headers) + if APP_INFO: + merged_headers.update(APP_INFO) + merged_headers = prepend_semantic_kernel_to_user_agent(merged_headers) - # endregion + if not api_key: + raise AgentInitializationException("Please provide an OpenAI api_key") - # region Invocation Methods + return AsyncOpenAI( + api_key=api_key, + organization=org_id, + default_headers=merged_headers, + ) - @trace_agent_get_response - @override - async def get_response( - self, - thread_id: str, - *, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - additional_instructions: str | None = None, - additional_messages: list[ChatMessageContent] | None = None, - instructions_override: str | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - metadata: dict[str, str] | None = None, - model: str | None = None, - parallel_tool_calls: bool | None = None, - reasoning_effort: Literal["low", "medium", "high"] | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - tools: "list[AssistantToolParam] | None" = None, - temperature: float | None = None, - top_p: float | None = None, - truncation_strategy: "TruncationStrategy | None" = None, - **kwargs: Any, - ) -> ChatMessageContent: - """Get a response from the agent on a thread. + @staticmethod + def _create_open_ai_settings( + api_key: str | None = None, + org_id: str | None = None, + ai_model_id: str | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + ) -> OpenAISettings: + """An internal method to create the OpenAI settings from the provided arguments. Args: - thread_id: The ID of the thread. - arguments: The kernel arguments. - kernel: The kernel. - instructions_override: The instructions override. - additional_instructions: Additional instructions. - additional_messages: Additional messages. - max_completion_tokens: The maximum completion tokens. - max_prompt_tokens: The maximum prompt tokens. - metadata: The metadata. - model: The model. - parallel_tool_calls: Parallel tool calls. - reasoning_effort: The reasoning effort. - response_format: The response format. - tools: The tools. - temperature: The temperature. - top_p: The top p. - truncation_strategy: The truncation strategy. - kwargs: Additional keyword arguments. + api_key: The OpenAI API key. + org_id: The OpenAI organization ID. (optional) + ai_model_id: The AI model ID. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional) Returns: - ChatMessageContent: The response from the agent. + An OpenAI settings instance. """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "instructions_override": instructions_override, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "metadata": metadata, - "model": model, - "parallel_tool_calls": parallel_tool_calls, - "reasoning_effort": reasoning_effort, - "response_format": response_format, - "temperature": temperature, - "tools": tools, - "top_p": top_p, - "truncation_strategy": truncation_strategy, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + try: + openai_settings = OpenAISettings.create( + api_key=api_key, + org_id=org_id, + chat_model_id=ai_model_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + except ValidationError as ex: + raise AgentInitializationException("Failed to create OpenAI settings.", ex) from ex - messages: list[ChatMessageContent] = [] - async for is_visible, message in AssistantThreadActions.invoke( - agent=self, - thread_id=thread_id, - kernel=kernel, - arguments=arguments, - **run_level_params, # type: ignore - ): - if is_visible and message.metadata.get("code") is not True: - messages.append(message) - - if not messages: - raise AgentInvokeException("No response messages were returned from the agent.") - return messages[-1] - - @trace_agent_invocation - @override - async def invoke( - self, - thread_id: str, - *, - arguments: KernelArguments | None = None, - kernel: "Kernel | None" = None, - # Run-level parameters: - additional_instructions: str | None = None, - additional_messages: list[ChatMessageContent] | None = None, - instructions_override: str | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - metadata: dict[str, str] | None = None, - model: str | None = None, - parallel_tool_calls: bool | None = None, - reasoning_effort: Literal["low", "medium", "high"] | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - tools: "list[AssistantToolParam] | None" = None, - temperature: float | None = None, - top_p: float | None = None, - truncation_strategy: "TruncationStrategy | None" = None, - **kwargs: Any, - ) -> AsyncIterable[ChatMessageContent]: - """Invoke the agent. + return openai_settings - Args: - thread_id: The ID of the thread. - arguments: The kernel arguments. - kernel: The kernel. - instructions_override: The instructions override. - additional_instructions: Additional instructions. - additional_messages: Additional messages. - max_completion_tokens: The maximum completion tokens. - max_prompt_tokens: The maximum prompt tokens. - metadata: The metadata. - model: The model. - parallel_tool_calls: Parallel tool calls. - reasoning_effort: The reasoning effort. - response_format: The response format. - tools: The tools. - temperature: The temperature. - top_p: The top p. - truncation_strategy: The truncation strategy. - kwargs: Additional keyword arguments. + async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: + """List the assistant definitions. Yields: - The chat message content. + An AsyncIterable of dictionaries representing the OpenAIAssistantDefinition. """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "instructions_override": instructions_override, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "metadata": metadata, - "model": model, - "parallel_tool_calls": parallel_tool_calls, - "reasoning_effort": reasoning_effort, - "response_format": response_format, - "temperature": temperature, - "tools": tools, - "top_p": top_p, - "truncation_strategy": truncation_strategy, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} + assistants = await self.client.beta.assistants.list(order="desc") + for assistant in assistants.data: + yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - async for is_visible, message in AssistantThreadActions.invoke( - agent=self, - thread_id=thread_id, - kernel=kernel, - arguments=arguments, - **run_level_params, # type: ignore - ): - if is_visible: - yield message - - @trace_agent_invocation - @override - async def invoke_stream( - self, - thread_id: str, + @classmethod + async def retrieve( + cls, *, - arguments: KernelArguments | None = None, + id: str, kernel: "Kernel | None" = None, - # Run-level parameters: - additional_instructions: str | None = None, - additional_messages: list[ChatMessageContent] | None = None, - instructions_override: str | None = None, - max_completion_tokens: int | None = None, - max_prompt_tokens: int | None = None, - messages: list[ChatMessageContent] | None = None, - metadata: dict[str, str] | None = None, - model: str | None = None, - parallel_tool_calls: bool | None = None, - reasoning_effort: Literal["low", "medium", "high"] | None = None, - response_format: "AssistantResponseFormatOptionParam | None" = None, - tools: "list[AssistantToolParam] | None" = None, - temperature: float | None = None, - top_p: float | None = None, - truncation_strategy: "TruncationStrategy | None" = None, - **kwargs: Any, - ) -> AsyncIterable["StreamingChatMessageContent"]: - """Invoke the agent. + api_key: str | None = None, + org_id: str | None = None, + ai_model_id: str | None = None, + client: AsyncOpenAI | None = None, + default_headers: dict[str, str] | None = None, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + ) -> "OpenAIAssistantAgent": + """Retrieve an assistant by ID. Args: - thread_id: The ID of the thread. - arguments: The kernel arguments. - kernel: The kernel. - instructions_override: The instructions override. - additional_instructions: Additional instructions. - additional_messages: Additional messages. - max_completion_tokens: The maximum completion tokens. - max_prompt_tokens: The maximum prompt tokens. - messages: The messages that act as a receiver for completed messages. - metadata: The metadata. - model: The model. - parallel_tool_calls: Parallel tool calls. - reasoning_effort: The reasoning effort. - response_format: The response format. - tools: The tools. - temperature: The temperature. - top_p: The top p. - truncation_strategy: The truncation strategy. - kwargs: Additional keyword arguments. + id: The assistant ID. + kernel: The Kernel instance. (optional) + api_key: The OpenAI API key. (optional) + org_id: The OpenAI organization ID. (optional) + ai_model_id: The AI model ID. (optional) + client: The OpenAI client. (optional) + default_headers: The default headers. (optional) + env_file_path: The environment file path. (optional) + env_file_encoding: The environment file encoding. (optional - Yields: - The chat message content. + Returns: + An OpenAIAssistantAgent instance. """ - if arguments is None: - arguments = KernelArguments(**kwargs) - else: - arguments.update(kwargs) - - kernel = kernel or self.kernel - arguments = self._merge_arguments(arguments) - - run_level_params = { - "additional_instructions": additional_instructions, - "additional_messages": additional_messages, - "instructions_override": instructions_override, - "max_completion_tokens": max_completion_tokens, - "max_prompt_tokens": max_prompt_tokens, - "metadata": metadata, - "model": model, - "parallel_tool_calls": parallel_tool_calls, - "reasoning_effort": reasoning_effort, - "response_format": response_format, - "temperature": temperature, - "tools": tools, - "top_p": top_p, - "truncation_strategy": truncation_strategy, - } - run_level_params = {k: v for k, v in run_level_params.items() if v is not None} - - async for message in AssistantThreadActions.invoke_stream( - agent=self, - thread_id=thread_id, + openai_settings = OpenAIAssistantAgent._create_open_ai_settings( + api_key=api_key, + org_id=org_id, + ai_model_id=ai_model_id, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + ) + if not client and not openai_settings.api_key: + raise AgentInitializationException("The OpenAI API key is required, if a client is not provided.") + if not openai_settings.chat_model_id: + raise AgentInitializationException("The OpenAI chat model ID is required.") + if not client: + client = OpenAIAssistantAgent._create_client( + api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, + org_id=openai_settings.org_id, + default_headers=default_headers, + ) + assistant = await client.beta.assistants.retrieve(id) + assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) + return OpenAIAssistantAgent( kernel=kernel, - arguments=arguments, - messages=messages, - **run_level_params, # type: ignore - ): - yield message + assistant=assistant, + client=client, + api_key=api_key, + default_headers=default_headers, + env_file_path=env_file_path, + env_file_encoding=env_file_encoding, + **assistant_definition, + ) # endregion diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py new file mode 100644 index 000000000000..3b072043751c --- /dev/null +++ b/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py @@ -0,0 +1,1300 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import json +import logging +from collections.abc import AsyncIterable, Iterable +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal + +from openai import AsyncOpenAI +from openai.resources.beta.assistants import Assistant +from openai.resources.beta.threads.messages import Message +from openai.resources.beta.threads.runs.runs import Run +from openai.types.beta.assistant_tool import CodeInterpreterTool, FileSearchTool +from openai.types.beta.threads.runs import RunStep +from pydantic import Field + +from semantic_kernel.agents import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel +from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel +from semantic_kernel.agents.open_ai.assistant_content_generation import ( + create_chat_message, + generate_code_interpreter_content, + generate_function_call_content, + generate_function_result_content, + generate_message_content, + generate_streaming_code_interpreter_content, + generate_streaming_function_content, + generate_streaming_message_content, + get_function_call_contents, + get_message_contents, +) +from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult +from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions +from semantic_kernel.connectors.ai.function_calling_utils import ( + kernel_function_metadata_to_function_call_format, + merge_function_results, +) +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import ( + AgentExecutionException, + AgentFileNotFoundException, + AgentInitializationException, + AgentInvokeException, +) +from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import trace_agent_invocation + +if TYPE_CHECKING: + from semantic_kernel.contents.chat_history import ChatHistory + from semantic_kernel.contents.chat_message_content import ChatMessageContent + from semantic_kernel.contents.function_call_content import FunctionCallContent + from semantic_kernel.kernel import Kernel + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental_class +class OpenAIAssistantBase(Agent): + """OpenAI Assistant Base class. + + Manages the interaction with OpenAI Assistants. + """ + + _options_metadata_key: ClassVar[str] = "__run_options" + + ai_model_id: str + client: AsyncOpenAI + assistant: Assistant | None = None + polling_options: RunPollingOptions = Field(default_factory=RunPollingOptions) + enable_code_interpreter: bool | None = False + enable_file_search: bool | None = False + enable_json_response: bool | None = False + code_interpreter_file_ids: Annotated[list[str] | None, Field(max_length=20)] = Field(default_factory=list) # type: ignore + file_search_file_ids: Annotated[ + list[str] | None, + Field( + description="There is a limit of 10000 files when using Azure Assistants API, " + "the OpenAI docs state no limit, hence this is not checked." + ), + ] = Field(default_factory=list) # type: ignore + temperature: float | None = None + top_p: float | None = None + vector_store_id: str | None = None + metadata: Annotated[dict[str, Any] | None, Field(max_length=20)] = Field(default_factory=dict) # type: ignore + max_completion_tokens: int | None = None + max_prompt_tokens: int | None = None + parallel_tool_calls_enabled: bool | None = True + truncation_message_count: int | None = None + + allowed_message_roles: ClassVar[list[str]] = [AuthorRole.USER, AuthorRole.ASSISTANT] + polling_status: ClassVar[list[str]] = ["queued", "in_progress", "cancelling"] + error_message_states: ClassVar[list[str]] = ["failed", "cancelled", "expired", "incomplete"] + + channel_type: ClassVar[type[AgentChannel]] = OpenAIAssistantChannel + + _is_deleted: bool = False + + # region Assistant Initialization + + def __init__( + self, + ai_model_id: str, + client: AsyncOpenAI, + service_id: str, + *, + kernel: "Kernel | None" = None, + id: str | None = None, + name: str | None = None, + description: str | None = None, + instructions: str | None = None, + enable_code_interpreter: bool | None = None, + enable_file_search: bool | None = None, + enable_json_response: bool | None = None, + code_interpreter_file_ids: list[str] | None = None, + temperature: float | None = None, + top_p: float | None = None, + vector_store_id: str | None = None, + metadata: dict[str, Any] | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + **kwargs: Any, + ) -> None: + """Initialize an OpenAIAssistant Base. + + Args: + ai_model_id: The AI model id. Defaults to None. + client: The client, either AsyncOpenAI or AsyncAzureOpenAI. + service_id: The service id. + kernel: The kernel. (optional) + id: The id. Defaults to None. (optional) + name: The name. Defaults to None. (optional) + description: The description. Defaults to None. (optional) + default_headers: The default headers. Defaults to None. (optional) + instructions: The instructions. Defaults to None. (optional) + enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) + enable_file_search: Enable file search. Defaults to False. (optional) + enable_json_response: Enable JSON response. Defaults to False. (optional) + code_interpreter_file_ids: The file ids. Defaults to []. (optional) + temperature: The temperature. Defaults to None. (optional) + top_p: The top p. Defaults to None. (optional) + vector_store_id: The vector store id. Defaults to None. (optional) + metadata: The metadata. Defaults to {}. (optional) + max_completion_tokens: The max completion tokens. Defaults to None. (optional) + max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) + truncation_message_count: The truncation message count. Defaults to None. (optional) + kwargs: The keyword arguments. + """ + args: dict[str, Any] = {} + + args = { + "ai_model_id": ai_model_id, + "client": client, + "service_id": service_id, + "instructions": instructions, + "description": description, + "enable_code_interpreter": enable_code_interpreter, + "enable_file_search": enable_file_search, + "enable_json_response": enable_json_response, + "code_interpreter_file_ids": code_interpreter_file_ids or [], + "temperature": temperature, + "top_p": top_p, + "vector_store_id": vector_store_id, + "metadata": metadata or {}, + "max_completion_tokens": max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens, + "parallel_tool_calls_enabled": parallel_tool_calls_enabled, + "truncation_message_count": truncation_message_count, + } + + if name is not None: + args["name"] = name + if id is not None: + args["id"] = id + if kernel is not None: + args["kernel"] = kernel + if kwargs: + args.update(kwargs) + + super().__init__(**args) + + async def create_assistant( + self, + ai_model_id: str | None = None, + description: str | None = None, + instructions: str | None = None, + name: str | None = None, + enable_code_interpreter: bool | None = None, + code_interpreter_file_ids: list[str] | None = None, + enable_file_search: bool | None = None, + vector_store_id: str | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> "Assistant": + """Create the assistant. + + Args: + ai_model_id: The AI model id. Defaults to None. (optional) + description: The description. Defaults to None. (optional) + instructions: The instructions. Defaults to None. (optional) + name: The name. Defaults to None. (optional) + enable_code_interpreter: Enable code interpreter. Defaults to None. (optional) + enable_file_search: Enable file search. Defaults to None. (optional) + code_interpreter_file_ids: The file ids. Defaults to None. (optional) + vector_store_id: The vector store id. Defaults to None. (optional) + metadata: The metadata. Defaults to None. (optional) + kwargs: Extra keyword arguments. + + Returns: + Assistant: The assistant + """ + create_assistant_kwargs: dict[str, Any] = {} + + if ai_model_id is not None: + create_assistant_kwargs["model"] = ai_model_id + elif self.ai_model_id: + create_assistant_kwargs["model"] = self.ai_model_id + + if description is not None: + create_assistant_kwargs["description"] = description + elif self.description: + create_assistant_kwargs["description"] = self.description + + if instructions is not None: + create_assistant_kwargs["instructions"] = instructions + elif self.instructions: + create_assistant_kwargs["instructions"] = self.instructions + + if name is not None: + create_assistant_kwargs["name"] = name + elif self.name: + create_assistant_kwargs["name"] = self.name + + tools = [] + if enable_code_interpreter is not None: + if enable_code_interpreter: + tools.append({"type": "code_interpreter"}) + elif self.enable_code_interpreter: + tools.append({"type": "code_interpreter"}) + + if enable_file_search is not None: + if enable_file_search: + tools.append({"type": "file_search"}) + elif self.enable_file_search: + tools.append({"type": "file_search"}) + + if tools: + create_assistant_kwargs["tools"] = tools + + tool_resources = {} + if code_interpreter_file_ids is not None: + tool_resources["code_interpreter"] = {"file_ids": code_interpreter_file_ids} + elif self.code_interpreter_file_ids: + tool_resources["code_interpreter"] = {"file_ids": self.code_interpreter_file_ids} + + if vector_store_id is not None: + tool_resources["file_search"] = {"vector_store_ids": [vector_store_id]} + elif self.vector_store_id: + tool_resources["file_search"] = {"vector_store_ids": [self.vector_store_id]} + + if tool_resources: + create_assistant_kwargs["tool_resources"] = tool_resources + + if metadata: + create_assistant_kwargs["metadata"] = metadata + elif self.metadata: + create_assistant_kwargs["metadata"] = self.metadata + + if kwargs: + create_assistant_kwargs.update(kwargs) + + execution_settings: dict[str, Any] = {} + if self.max_completion_tokens: + execution_settings["max_completion_tokens"] = self.max_completion_tokens + + if self.max_prompt_tokens: + execution_settings["max_prompt_tokens"] = self.max_prompt_tokens + + if self.top_p is not None: + execution_settings["top_p"] = self.top_p + create_assistant_kwargs["top_p"] = self.top_p + + if self.temperature is not None: + execution_settings["temperature"] = self.temperature + create_assistant_kwargs["temperature"] = self.temperature + + if self.parallel_tool_calls_enabled: + execution_settings["parallel_tool_calls_enabled"] = self.parallel_tool_calls_enabled + + if self.truncation_message_count: + execution_settings["truncation_message_count"] = self.truncation_message_count + + if execution_settings: + if "metadata" not in create_assistant_kwargs: + create_assistant_kwargs["metadata"] = {} + if self._options_metadata_key not in create_assistant_kwargs["metadata"]: + create_assistant_kwargs["metadata"][self._options_metadata_key] = {} + create_assistant_kwargs["metadata"][self._options_metadata_key] = json.dumps(execution_settings) + + self.assistant = await self.client.beta.assistants.create( + **create_assistant_kwargs, + ) + + if self._is_deleted: + self._is_deleted = False + + return self.assistant + + async def modify_assistant(self, assistant_id: str, **kwargs: Any) -> "Assistant": + """Modify the assistant. + + Args: + assistant_id: The assistant's current ID. + kwargs: Extra keyword arguments. + + Returns: + Assistant: The modified assistant. + """ + if self.assistant is None: + raise AgentInitializationException("The assistant has not been created.") + + modified_assistant = await self.client.beta.assistants.update(assistant_id=assistant_id, **kwargs) + self.assistant = modified_assistant + return self.assistant + + @classmethod + def _create_open_ai_assistant_definition(cls, assistant: "Assistant") -> dict[str, Any]: + """Create an OpenAI Assistant Definition from the provided assistant dictionary. + + Args: + assistant: The assistant dictionary. + + Returns: + An OpenAI Assistant Definition. + """ + execution_settings = {} + if isinstance(assistant.metadata, dict) and OpenAIAssistantBase._options_metadata_key in assistant.metadata: + settings_data = assistant.metadata[OpenAIAssistantBase._options_metadata_key] + if isinstance(settings_data, str): + settings_data = json.loads(settings_data) + assistant.metadata[OpenAIAssistantBase._options_metadata_key] = settings_data + execution_settings = {key: value for key, value in settings_data.items()} + + file_ids: list[str] = [] + vector_store_id = None + + tool_resources = getattr(assistant, "tool_resources", None) + if tool_resources: + if hasattr(tool_resources, "code_interpreter") and tool_resources.code_interpreter: + file_ids = getattr(tool_resources.code_interpreter, "code_interpreter_file_ids", []) + + if hasattr(tool_resources, "file_search") and tool_resources.file_search: + vector_store_ids = getattr(tool_resources.file_search, "vector_store_ids", []) + if vector_store_ids: + vector_store_id = vector_store_ids[0] + + enable_json_response = ( + hasattr(assistant, "response_format") + and assistant.response_format is not None + and getattr(assistant.response_format, "type", "") == "json_object" + ) + + enable_code_interpreter = any(isinstance(tool, CodeInterpreterTool) for tool in assistant.tools) + enable_file_search = any(isinstance(tool, FileSearchTool) for tool in assistant.tools) + + return { + "ai_model_id": assistant.model, + "description": assistant.description, + "id": assistant.id, + "instructions": assistant.instructions, + "name": assistant.name, + "enable_code_interpreter": enable_code_interpreter, + "enable_file_search": enable_file_search, + "enable_json_response": enable_json_response, + "code_interpreter_file_ids": file_ids, + "temperature": assistant.temperature, + "top_p": assistant.top_p, + "vector_store_id": vector_store_id if vector_store_id else None, + "metadata": assistant.metadata, + **execution_settings, + } + + # endregion + + # region Agent Properties + + @property + def tools(self) -> list[dict[str, str]]: + """The tools. + + Returns: + list[dict[str, str]]: The tools. + """ + if self.assistant is None: + raise AgentInitializationException("The assistant has not been created.") + return self._get_tools() + + # endregion + + # region Agent Channel Methods + + def get_channel_keys(self) -> Iterable[str]: + """Get the channel keys. + + Returns: + Iterable[str]: The channel keys. + """ + # Distinguish from other channel types. + yield f"{OpenAIAssistantBase.__name__}" + + # Distinguish between different agent IDs + yield self.id + + # Distinguish between agent names + yield self.name + + # Distinguish between different API base URLs + yield str(self.client.base_url) + + async def create_channel(self) -> AgentChannel: + """Create a channel.""" + thread_id = await self.create_thread() + + return OpenAIAssistantChannel(client=self.client, thread_id=thread_id) + + # endregion + + # region Agent Methods + + async def create_thread( + self, + *, + code_interpreter_file_ids: list[str] | None = [], + messages: list["ChatMessageContent"] | None = [], + vector_store_id: str | None = None, + metadata: dict[str, str] = {}, + ) -> str: + """Create a thread. + + Args: + code_interpreter_file_ids: The code interpreter file ids. Defaults to an empty list. (optional) + messages: The chat messages. Defaults to an empty list. (optional) + vector_store_id: The vector store id. Defaults to None. (optional) + metadata: The metadata. Defaults to an empty dictionary. (optional) + + Returns: + str: The thread id. + """ + create_thread_kwargs: dict[str, Any] = {} + + tool_resources = {} + + if code_interpreter_file_ids: + tool_resources["code_interpreter"] = {"file_ids": code_interpreter_file_ids} + + if vector_store_id: + tool_resources["file_search"] = {"vector_store_ids": [vector_store_id]} + + if tool_resources: + create_thread_kwargs["tool_resources"] = tool_resources + + if messages: + messages_to_add = [] + for message in messages: + if message.role.value not in self.allowed_message_roles: + raise AgentExecutionException( + f"Invalid message role `{message.role.value}`. Allowed roles are {self.allowed_message_roles}." + ) + message_contents = get_message_contents(message=message) + for content in message_contents: + messages_to_add.append({"role": message.role.value, "content": content}) + create_thread_kwargs["messages"] = messages_to_add + + if metadata: + create_thread_kwargs["metadata"] = metadata + + thread = await self.client.beta.threads.create(**create_thread_kwargs) + return thread.id + + async def delete_thread(self, thread_id: str) -> None: + """Delete a thread. + + Args: + thread_id: The thread id. + """ + await self.client.beta.threads.delete(thread_id) + + async def delete(self) -> bool: + """Delete the assistant. + + Returns: + bool: True if the assistant is deleted. + """ + if not self._is_deleted and self.assistant: + await self.client.beta.assistants.delete(self.assistant.id) + self._is_deleted = True + return self._is_deleted + + async def add_chat_message(self, thread_id: str, message: "ChatMessageContent") -> "Message": + """Add a chat message. + + Args: + thread_id: The thread id. + message: The chat message. + + Returns: + Message: The message. + """ + return await create_chat_message(self.client, thread_id, message, self.allowed_message_roles) + + async def get_thread_messages(self, thread_id: str) -> AsyncIterable["ChatMessageContent"]: + """Get the messages for the specified thread. + + Args: + thread_id: The thread id. + + Yields: + ChatMessageContent: The chat message. + """ + agent_names: dict[str, Any] = {} + + thread_messages = await self.client.beta.threads.messages.list(thread_id=thread_id, limit=100, order="desc") + for message in thread_messages.data: + assistant_name = None + if message.assistant_id and message.assistant_id not in agent_names: + agent = await self.client.beta.assistants.retrieve(message.assistant_id) + if agent.name: + agent_names[message.assistant_id] = agent.name + assistant_name = agent_names.get(message.assistant_id) if message.assistant_id else message.assistant_id + assistant_name = assistant_name or message.assistant_id + + content: "ChatMessageContent" = generate_message_content(str(assistant_name), message) + + if len(content.items) > 0: + yield content + + async def add_file(self, file_path: str, purpose: Literal["assistants", "vision"]) -> str: + """Add a file for use with the Assistant. + + Args: + file_path: The file path. + purpose: The purpose. Can be "assistants" or "vision". + + Returns: + str: The file id. + + Raises: + AgentInitializationError: If the client has not been initialized or the file is not found. + """ + try: + with open(file_path, "rb") as file: + file = await self.client.files.create(file=file, purpose=purpose) # type: ignore + return file.id # type: ignore + except FileNotFoundError as ex: + raise AgentFileNotFoundException(f"File not found: {file_path}") from ex + + async def delete_file(self, file_id: str) -> None: + """Delete a file. + + Args: + file_id: The file id. + """ + try: + await self.client.files.delete(file_id) + except Exception as ex: + raise AgentExecutionException("Error deleting file.") from ex + + async def create_vector_store(self, file_ids: str | list[str]) -> str: + """Create a vector store. + + Args: + file_ids: The file ids either as a str of a single file ID or a list of strings of file IDs. + + Returns: + The vector store id. + + Raises: + AgentExecutionError: If there is an error creating the vector store. + """ + if isinstance(file_ids, str): + file_ids = [file_ids] + try: + vector_store = await self.client.beta.vector_stores.create(file_ids=file_ids) + return vector_store.id + except Exception as ex: + raise AgentExecutionException("Error creating vector store.") from ex + + async def delete_vector_store(self, vector_store_id: str) -> None: + """Delete a vector store. + + Args: + vector_store_id: The vector store id. + + Raises: + AgentExecutionError: If there is an error deleting the vector store. + """ + try: + await self.client.beta.vector_stores.delete(vector_store_id) + except Exception as ex: + raise AgentExecutionException("Error deleting vector store.") from ex + + # endregion + + # region Agent Invoke Methods + + @trace_agent_invocation + async def invoke( + self, + thread_id: str, + *, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable["ChatMessageContent"]: + """Invoke the chat assistant. + + The supplied arguments will take precedence over the specified assistant level attributes. + + Args: + thread_id: The thread id. + ai_model_id: The AI model id. Defaults to None. (optional) + enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) + enable_file_search: Enable file search. Defaults to False. (optional) + enable_json_response: Enable JSON response. Defaults to False. (optional) + max_completion_tokens: The max completion tokens. Defaults to None. (optional) + max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) + truncation_message_count: The truncation message count. Defaults to None. (optional) + temperature: The temperature. Defaults to None. (optional) + top_p: The top p. Defaults to None. (optional) + metadata: The metadata. Defaults to {}. (optional) + kwargs: Extra keyword arguments. + + Yields: + ChatMessageContent: The chat message content. + """ + async for is_visible, content in self._invoke_internal( + thread_id=thread_id, + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + kwargs=kwargs, + ): + if is_visible: + yield content + + async def _invoke_internal( + self, + thread_id: str, + *, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable[tuple[bool, "ChatMessageContent"]]: + """Internal invoke method. + + The supplied arguments will take precedence over the specified assistant level attributes. + + Args: + thread_id: The thread id. + ai_model_id: The AI model id. Defaults to None. (optional) + enable_code_interpreter: Enable code interpreter. Defaults to False. (optional) + enable_file_search: Enable file search. Defaults to False. (optional) + enable_json_response: Enable JSON response. Defaults to False. (optional) + max_completion_tokens: The max completion tokens. Defaults to None. (optional) + max_prompt_tokens: The max prompt tokens. Defaults to None. (optional) + parallel_tool_calls_enabled: Enable parallel tool calls. Defaults to True. (optional) + truncation_message_count: The truncation message count. Defaults to None. (optional) + temperature: The temperature. Defaults to None. (optional) + top_p: The top p. Defaults to None. (optional) + metadata: The metadata. Defaults to {}. (optional) + kwargs: Extra keyword arguments. + + Yields: + tuple[bool, ChatMessageContent]: A tuple of visibility and chat message content. + """ + if not self.assistant: + raise AgentInitializationException("The assistant has not been created.") + + if self._is_deleted: + raise AgentInitializationException("The assistant has been deleted.") + + if metadata is None: + metadata = {} + + self._check_if_deleted() + tools = self._get_tools() + + run_options = self._generate_options( + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + kwargs=kwargs, + ) + + # Filter out None values to avoid passing them as kwargs + run_options = {k: v for k, v in run_options.items() if v is not None} + + logger.debug(f"Starting invoke for agent `{self.name}` and thread `{thread_id}`") + + run = await self.client.beta.threads.runs.create( + assistant_id=self.assistant.id, + thread_id=thread_id, + instructions=self.assistant.instructions, + tools=tools, # type: ignore + **run_options, + ) + + processed_step_ids = set() + function_steps: dict[str, "FunctionCallContent"] = {} + + while run.status != "completed": + run = await self._poll_run_status(run=run, thread_id=thread_id) + + if run.status in self.error_message_states: + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " + f"with error: {error_message}" + ) + + # Check if function calling required + if run.status == "requires_action": + logger.debug(f"Run [{run.id}] requires action for agent `{self.name}` and thread `{thread_id}`") + fccs = get_function_call_contents(run, function_steps) + if fccs: + logger.debug( + f"Yielding `generate_function_call_content` for agent `{self.name}` and " + f"thread `{thread_id}`, visibility False" + ) + yield False, generate_function_call_content(agent_name=self.name, fccs=fccs) + + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() + _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) + + tool_outputs = self._format_tool_outputs(fccs, chat_history) + await self.client.beta.threads.runs.submit_tool_outputs( + run_id=run.id, + thread_id=thread_id, + tool_outputs=tool_outputs, # type: ignore + ) + logger.debug(f"Submitted tool outputs for agent `{self.name}` and thread `{thread_id}`") + + steps_response = await self.client.beta.threads.runs.steps.list(run_id=run.id, thread_id=thread_id) + logger.debug(f"Called for steps_response for run [{run.id}] agent `{self.name}` and thread `{thread_id}`") + steps: list[RunStep] = steps_response.data + + def sort_key(step: RunStep): + # Put tool_calls first, then message_creation + # If multiple steps share a type, break ties by completed_at + return (0 if step.type == "tool_calls" else 1, step.completed_at) + + completed_steps_to_process = sorted( + [s for s in steps if s.completed_at is not None and s.id not in processed_step_ids], key=sort_key + ) + + logger.debug( + f"Completed steps to process for run [{run.id}] agent `{self.name}` and thread `{thread_id}` " + f"with length `{len(completed_steps_to_process)}`" + ) + + message_count = 0 + for completed_step in completed_steps_to_process: + if completed_step.type == "tool_calls": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], agent `{self.name}` and " + f"thread `{thread_id}`" + ) + assert hasattr(completed_step.step_details, "tool_calls") # nosec + for tool_call in completed_step.step_details.tool_calls: + is_visible = False + content: "ChatMessageContent | None" = None + if tool_call.type == "code_interpreter": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], [code_interpreter] for " + f"agent `{self.name}` and thread `{thread_id}`" + ) + content = generate_code_interpreter_content( + self.name, + tool_call.code_interpreter.input, # type: ignore + ) + is_visible = True + elif tool_call.type == "function": + logger.debug( + f"Entering step type tool_calls for run [{run.id}], [function] for agent `{self.name}` " + f"and thread `{thread_id}`" + ) + function_step = function_steps.get(tool_call.id) + assert function_step is not None # nosec + content = generate_function_result_content( + agent_name=self.name, function_step=function_step, tool_call=tool_call + ) + + if content: + message_count += 1 + logger.debug( + f"Yielding tool_message for run [{run.id}], agent `{self.name}` and thread " + f"`{thread_id}` and message count `{message_count}`, is_visible `{is_visible}`" + ) + yield is_visible, content + elif completed_step.type == "message_creation": + logger.debug( + f"Entering step type message_creation for run [{run.id}], agent `{self.name}` and " + f"thread `{thread_id}`" + ) + message = await self._retrieve_message( + thread_id=thread_id, + message_id=completed_step.step_details.message_creation.message_id, # type: ignore + ) + if message: + content = generate_message_content(self.name, message) + if content and len(content.items) > 0: + message_count += 1 + logger.debug( + f"Yielding message_creation for run [{run.id}], agent `{self.name}` and " + f"thread `{thread_id}` and message count `{message_count}`, is_visible `{True}`" + ) + yield True, content + processed_step_ids.add(completed_step.id) + + @trace_agent_invocation + async def invoke_stream( + self, + thread_id: str, + *, + messages: list["ChatMessageContent"] | None = None, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable["ChatMessageContent"]: + """Invoke the chat assistant with streaming.""" + async for content in self._invoke_internal_stream( + thread_id=thread_id, + messages=messages, + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + **kwargs, + ): + yield content + + async def _invoke_internal_stream( + self, + thread_id: str, + *, + messages: list["ChatMessageContent"] | None = None, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable["ChatMessageContent"]: + """Internal invoke method with streaming.""" + if not self.assistant: + raise AgentInitializationException("The assistant has not been created.") + + if self._is_deleted: + raise AgentInitializationException("The assistant has been deleted.") + + if metadata is None: + metadata = {} + + tools = self._get_tools() + + run_options = self._generate_options( + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + **kwargs, + ) + + # Filter out None values to avoid passing them as kwargs + run_options = {k: v for k, v in run_options.items() if v is not None} + + stream = self.client.beta.threads.runs.stream( + assistant_id=self.assistant.id, + thread_id=thread_id, + instructions=self.assistant.instructions, + tools=tools, # type: ignore + **run_options, + ) + + function_steps: dict[str, "FunctionCallContent"] = {} + active_messages: dict[str, RunStep] = {} + + while True: + async with stream as response_stream: + async for event in response_stream: + if event.event == "thread.run.created": + run = event.data + logger.info(f"Assistant run created with ID: {run.id}") + elif event.event == "thread.run.in_progress": + run = event.data + logger.info(f"Assistant run in progress with ID: {run.id}") + elif event.event == "thread.message.delta": + content = generate_streaming_message_content(self.name, event.data) + yield content + elif event.event == "thread.run.step.completed": + logger.info(f"Run step completed with ID: {event.data.id}") + if hasattr(event.data.step_details, "message_creation"): + message_id = event.data.step_details.message_creation.message_id + if message_id not in active_messages: + active_messages[message_id] = event.data + elif event.event == "thread.run.step.delta": + step_details = event.data.delta.step_details + if ( + step_details is not None + and hasattr(step_details, "tool_calls") + and step_details.tool_calls is not None + and isinstance(step_details.tool_calls, list) + ): + for tool_call in step_details.tool_calls: + tool_content = None + if tool_call.type == "function": + tool_content = generate_streaming_function_content(self.name, step_details) + elif tool_call.type == "code_interpreter": + tool_content = generate_streaming_code_interpreter_content(self.name, step_details) + if tool_content: + yield tool_content + elif event.event == "thread.run.requires_action": + run = event.data + function_action_result = await self._handle_streaming_requires_action(run, function_steps) + if function_action_result is None: + raise AgentInvokeException( + f"Function call required but no function steps found for agent `{self.name}` " + f"thread: {thread_id}." + ) + if function_action_result.function_result_content: + # Yield the function result content to the caller + yield function_action_result.function_result_content + if messages is not None: + # Add the function result content to the messages list, if it exists + messages.append(function_action_result.function_result_content) + if function_action_result.function_call_content: + if messages is not None: + messages.append(function_action_result.function_call_content) + stream = self.client.beta.threads.runs.submit_tool_outputs_stream( + run_id=run.id, + thread_id=thread_id, + tool_outputs=function_action_result.tool_outputs, # type: ignore + ) + break + elif event.event == "thread.run.completed": + run = event.data + logger.info(f"Run completed with ID: {run.id}") + if len(active_messages) > 0: + for id in active_messages: + step: RunStep = active_messages[id] + message = await self._retrieve_message( + thread_id=thread_id, + message_id=id, # type: ignore + ) + + if message and message.content: + content = generate_message_content(self.name, message, step) + if messages is not None: + messages.append(content) + return + elif event.event == "thread.run.failed": + run = event.data # type: ignore + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " + f"with error: {error_message}" + ) + else: + # If the inner loop completes without encountering a 'break', exit the outer loop + break + + async def _handle_streaming_requires_action( + self, run: Run, function_steps: dict[str, "FunctionCallContent"] + ) -> FunctionActionResult | None: + fccs = get_function_call_contents(run, function_steps) + if fccs: + function_call_content = generate_function_call_content(agent_name=self.name, fccs=fccs) + + from semantic_kernel.contents.chat_history import ChatHistory + + chat_history = ChatHistory() + _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) + + function_result_content = merge_function_results(chat_history.messages)[0] + + tool_outputs = self._format_tool_outputs(fccs, chat_history) + return FunctionActionResult(function_call_content, function_result_content, tool_outputs) + return None + + # endregion + + # region Agent Helper Methods + + def _generate_options( + self, + *, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = False, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = {}, + kwargs: Any = {}, + ) -> dict[str, Any]: + """Generate options for the assistant invocation.""" + merged_options = self._merge_options( + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + **kwargs, + ) + + truncation_message_count = merged_options.get("truncation_message_count") + + return { + "max_completion_tokens": merged_options.get("max_completion_tokens"), + "max_prompt_tokens": merged_options.get("max_prompt_tokens"), + "model": merged_options.get("ai_model_id"), + "top_p": merged_options.get("top_p"), + # TODO(evmattso): Support `parallel_tool_calls` when it is ready + "response_format": "json" if merged_options.get("enable_json_response") else None, + "temperature": merged_options.get("temperature"), + "truncation_strategy": truncation_message_count if truncation_message_count else None, + "metadata": merged_options.get("metadata", None), + } + + def _merge_options( + self, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = None, + enable_file_search: bool | None = None, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = {}, + **kwargs: Any, + ) -> dict[str, Any]: + """Merge the run-time options with the agent level attribute options.""" + merged_options = { + "ai_model_id": ai_model_id if ai_model_id is not None else self.ai_model_id, + "enable_code_interpreter": enable_code_interpreter + if enable_code_interpreter is not None + else self.enable_code_interpreter, + "enable_file_search": enable_file_search if enable_file_search is not None else self.enable_file_search, + "enable_json_response": enable_json_response + if enable_json_response is not None + else self.enable_json_response, + "max_completion_tokens": max_completion_tokens + if max_completion_tokens is not None + else self.max_completion_tokens, + "max_prompt_tokens": max_prompt_tokens if max_prompt_tokens is not None else self.max_prompt_tokens, + "parallel_tool_calls_enabled": parallel_tool_calls_enabled + if parallel_tool_calls_enabled is not None + else self.parallel_tool_calls_enabled, + "truncation_message_count": truncation_message_count + if truncation_message_count is not None + else self.truncation_message_count, + "temperature": temperature if temperature is not None else self.temperature, + "top_p": top_p if top_p is not None else self.top_p, + "metadata": metadata if metadata is not None else self.metadata, + } + + # Update merged_options with any additional kwargs + merged_options.update(kwargs) + return merged_options + + async def _poll_run_status(self, run: Run, thread_id: str) -> Run: + """Poll the run status. + + Args: + run: The run. + thread_id: The thread id. + + Returns: + The updated run. + """ + logger.info(f"Polling run status: {run.id}, threadId: {thread_id}") + + count = 0 + + try: + run = await asyncio.wait_for( + self._poll_loop(run, thread_id, count), timeout=self.polling_options.run_polling_timeout.total_seconds() + ) + except asyncio.TimeoutError: + timeout_duration = self.polling_options.run_polling_timeout + error_message = f"Polling timed out for run id: `{run.id}` and thread id: `{thread_id}` after waiting {timeout_duration}." # noqa: E501 + logger.error(error_message) + raise AgentInvokeException(error_message) + + logger.info(f"Polled run status: {run.status}, {run.id}, threadId: {thread_id}") + return run + + async def _poll_loop(self, run: Run, thread_id: str, count: int) -> Run: + """Internal polling loop.""" + while True: + await asyncio.sleep(self.polling_options.get_polling_interval(count).total_seconds()) + count += 1 + + try: + run = await self.client.beta.threads.runs.retrieve(run.id, thread_id=thread_id) + except Exception as e: + logging.warning(f"Failed to retrieve run for run id: `{run.id}` and thread id: `{thread_id}`: {e}") + # Retry anyway + + if run.status not in self.polling_status: + break + + return run + + async def _retrieve_message(self, thread_id: str, message_id: str) -> Message | None: + """Retrieve a message from a thread. + + Args: + thread_id: The thread id. + message_id: The message id. + + Returns: + The message or None. + """ + message: Message | None = None + count = 0 + max_retries = 3 + + while count < max_retries: + try: + message = await self.client.beta.threads.messages.retrieve(message_id, thread_id=thread_id) + break + except Exception as ex: + logger.error(f"Failed to retrieve message {message_id} from thread {thread_id}: {ex}") + count += 1 + if count >= max_retries: + logger.error( + f"Max retries reached. Unable to retrieve message {message_id} from thread {thread_id}." + ) + break + backoff_time: float = self.polling_options.message_synchronization_delay.total_seconds() * (2**count) + await asyncio.sleep(backoff_time) + + return message + + def _check_if_deleted(self) -> None: + """Check if the assistant has been deleted.""" + if self._is_deleted: + raise AgentInitializationException("The assistant has been deleted.") + + def _get_tools(self) -> list[dict[str, str]]: + """Get the list of tools for the assistant. + + Returns: + The list of tools. + """ + tools = [] + if self.assistant is None: + raise AgentInitializationException("The assistant has not been created.") + + for tool in self.assistant.tools: + if isinstance(tool, CodeInterpreterTool): + tools.append({"type": "code_interpreter"}) + elif isinstance(tool, FileSearchTool): + tools.append({"type": "file_search"}) + + funcs = self.kernel.get_full_list_of_function_metadata() + tools.extend([kernel_function_metadata_to_function_call_format(f) for f in funcs]) + + return tools + + async def _invoke_function_calls(self, fccs: list["FunctionCallContent"], chat_history: "ChatHistory") -> list[Any]: + """Invoke function calls and store results in chat history. + + Args: + fccs: The function call contents. + chat_history: The chat history. + + Returns: + The results as a list. + """ + tasks = [ + self.kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) + for function_call in fccs + ] + return await asyncio.gather(*tasks) + + def _format_tool_outputs( + self, fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + ) -> list[dict[str, str]]: + """Format tool outputs from chat history for submission. + + Args: + fccs: The function call contents. + chat_history: The chat history. + + Returns: + The formatted tool outputs as a list of dictionaries. + """ + from semantic_kernel.contents.function_result_content import FunctionResultContent + + tool_call_lookup = { + tool_call.id: tool_call + for message in chat_history.messages + for tool_call in message.items + if isinstance(tool_call, FunctionResultContent) + } + + return [ + {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} + for fcc in fccs + if fcc.id in tool_call_lookup + ] + + # endregion diff --git a/python/semantic_kernel/agents/open_ai/run_polling_options.py b/python/semantic_kernel/agents/open_ai/run_polling_options.py index 29da3046e32a..52c20677fc60 100644 --- a/python/semantic_kernel/agents/open_ai/run_polling_options.py +++ b/python/semantic_kernel/agents/open_ai/run_polling_options.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RunPollingOptions(KernelBaseModel): """Configuration and defaults associated with polling behavior for Assistant API requests.""" diff --git a/python/semantic_kernel/agents/strategies/__init__.py b/python/semantic_kernel/agents/strategies/__init__.py index 9a0307489103..836604a9f632 100644 --- a/python/semantic_kernel/agents/strategies/__init__.py +++ b/python/semantic_kernel/agents/strategies/__init__.py @@ -9,7 +9,6 @@ from semantic_kernel.agents.strategies.termination.kernel_function_termination_strategy import ( KernelFunctionTerminationStrategy, ) -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy __all__ = [ "AggregatorTerminationStrategy", @@ -17,5 +16,4 @@ "KernelFunctionSelectionStrategy", "KernelFunctionTerminationStrategy", "SequentialSelectionStrategy", - "TerminationStrategy", ] diff --git a/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py index 62ff44f22726..65f7dfb2ae0b 100644 --- a/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/kernel_function_selection_strategy.py @@ -21,7 +21,7 @@ from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -29,7 +29,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class KernelFunctionSelectionStrategy(SelectionStrategy): """Determines agent selection based on the evaluation of a Kernel Function.""" diff --git a/python/semantic_kernel/agents/strategies/selection/selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/selection_strategy.py index a70e23a40b19..6f453a50a876 100644 --- a/python/semantic_kernel/agents/strategies/selection/selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/selection_strategy.py @@ -6,13 +6,13 @@ from semantic_kernel.agents import Agent from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental +@experimental_class class SelectionStrategy(KernelBaseModel, ABC): """Base strategy class for selecting the next agent in a chat.""" diff --git a/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py b/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py index 739ad10b29b6..b60fc5f0f21f 100644 --- a/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py +++ b/python/semantic_kernel/agents/strategies/selection/sequential_selection_strategy.py @@ -13,7 +13,7 @@ from pydantic import PrivateAttr from semantic_kernel.agents.strategies.selection.selection_strategy import SelectionStrategy -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -23,7 +23,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class SequentialSelectionStrategy(SelectionStrategy): """Round-robin turn-taking strategy. Agent order is based on the order in which they joined.""" diff --git a/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py index 5ee26f5f6042..9b102912299e 100644 --- a/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/aggregator_termination_strategy.py @@ -9,13 +9,13 @@ from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent -@experimental +@experimental_class class AggregateTerminationCondition(str, Enum): """The condition for terminating the aggregation process.""" @@ -23,7 +23,7 @@ class AggregateTerminationCondition(str, Enum): ANY = "Any" -@experimental +@experimental_class class AggregatorTerminationStrategy(KernelBaseModel): """A strategy that aggregates multiple termination strategies.""" diff --git a/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py index 396c3849d0a8..b1232d680097 100644 --- a/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/default_termination_strategy.py @@ -2,21 +2,19 @@ from typing import TYPE_CHECKING -from pydantic import Field - from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_message_content import ChatMessageContent -@experimental +@experimental_class class DefaultTerminationStrategy(TerminationStrategy): """A default termination strategy that never terminates.""" - maximum_iterations: int = Field(default=5, description="The maximum number of iterations to run the agent.") + maximum_iterations: int = 1 async def should_agent_terminate(self, agent: "Agent", history: list["ChatMessageContent"]) -> bool: """Check if the agent should terminate. diff --git a/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py index a9a13a87c2dc..93c59e10ed84 100644 --- a/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/kernel_function_termination_strategy.py @@ -13,7 +13,7 @@ from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents import Agent @@ -21,7 +21,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class KernelFunctionTerminationStrategy(TerminationStrategy): """A termination strategy that uses a kernel function to determine termination.""" diff --git a/python/semantic_kernel/agents/strategies/termination/termination_strategy.py b/python/semantic_kernel/agents/strategies/termination/termination_strategy.py index 34fc3b40b761..ba4d0f6c341f 100644 --- a/python/semantic_kernel/agents/strategies/termination/termination_strategy.py +++ b/python/semantic_kernel/agents/strategies/termination/termination_strategy.py @@ -7,7 +7,7 @@ from semantic_kernel.agents.agent import Agent from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.contents.chat_message_content import ChatMessageContent @@ -15,7 +15,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class TerminationStrategy(KernelBaseModel): """A strategy for determining when an agent should terminate.""" diff --git a/python/semantic_kernel/connectors/ai/README.md b/python/semantic_kernel/connectors/ai/README.md index bad47044b835..997a33427c65 100644 --- a/python/semantic_kernel/connectors/ai/README.md +++ b/python/semantic_kernel/connectors/ai/README.md @@ -3,7 +3,6 @@ This directory contains the implementation of the AI connectors (aka AI services) that are used to interact with AI models. Depending on the modality, the AI connector can inherit from one of the following classes: - - [`ChatCompletionClientBase`](./chat_completion_client_base.py) for chat completion tasks. - [`TextCompletionClientBase`](./text_completion_client_base.py) for text completion tasks. - [`AudioToTextClientBase`](./audio_to_text_client_base.py) for audio to text tasks. @@ -11,6 +10,7 @@ Depending on the modality, the AI connector can inherit from one of the followin - [`TextToImageClientBase`](./text_to_image_client_base.py) for text to image tasks. - [`EmbeddingGeneratorBase`](./embeddings/embedding_generator_base.py) for text embedding tasks. + All base clients inherit from the [`AIServiceClientBase`](../../services/ai_service_client_base.py) class. ## Existing AI connectors diff --git a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py index 1c5d670c57f1..f5baec134528 100644 --- a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py @@ -35,9 +35,9 @@ from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -49,7 +49,7 @@ ServiceInvalidResponseError, ServiceResponseException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -68,7 +68,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AnthropicChatCompletion(ChatCompletionClientBase): """Anthropic ChatCompletion class.""" @@ -261,7 +261,7 @@ def _create_chat_message_content( self, response: Message, response_metadata: dict[str, Any] ) -> "ChatMessageContent": """Create a chat message content object.""" - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] items += self._get_tool_calls_from_message(response) for content_block in response.content: diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py index 9d2275a74e6e..ac290925b399 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_prompt_execution_settings.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureAIInferencePromptExecutionSettings(PromptExecutionSettings): """Azure AI Inference Prompt Execution Settings. @@ -26,7 +26,7 @@ class AzureAIInferencePromptExecutionSettings(PromptExecutionSettings): extra_parameters: dict[str, Any] | None = None -@experimental +@experimental_class class AzureAIInferenceChatPromptExecutionSettings(AzureAIInferencePromptExecutionSettings): """Azure AI Inference Chat Prompt Execution Settings.""" @@ -46,7 +46,7 @@ class AzureAIInferenceChatPromptExecutionSettings(AzureAIInferencePromptExecutio ] = None -@experimental +@experimental_class class AzureAIInferenceEmbeddingPromptExecutionSettings(PromptExecutionSettings): """Azure AI Inference Embedding Prompt Execution Settings. diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py index dc935ff18f1f..400eb31aa54c 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/azure_ai_inference_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureAIInferenceSettings(KernelBaseSettings): """Azure AI Inference settings. diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py index a732d85da5a9..64e0806804e1 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py @@ -16,7 +16,7 @@ from semantic_kernel.utils.authentication.async_default_azure_credential_wrapper import ( AsyncDefaultAzureCredentialWrapper, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT @@ -37,7 +37,7 @@ def get_client_class(cls, client_type: "AzureAIInferenceClientType") -> Any: return class_mapping[client_type] -@experimental +@experimental_class class AzureAIInferenceBase(KernelBaseModel, ABC): """Azure AI Inference Chat Completion Service.""" diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py index 88cb7ca5abd9..9a43591938e6 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py @@ -33,16 +33,16 @@ from semantic_kernel.connectors.ai.function_calling_utils import update_settings_from_function_call_configuration from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason from semantic_kernel.exceptions.service_exceptions import ServiceInvalidExecutionSettingsError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration @@ -51,7 +51,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AzureAIInferenceChatCompletion(ChatCompletionClientBase, AzureAIInferenceBase): """Azure AI Inference Chat Completion Service.""" @@ -136,8 +136,6 @@ async def _inner_get_chat_message_contents( with AzureAIInferenceTracing(): response: ChatCompletions = await self.client.complete( messages=self._prepare_chat_history_for_request(chat_history), - # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) - model=self.ai_model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -160,8 +158,6 @@ async def _inner_get_streaming_chat_message_contents( with AzureAIInferenceTracing(): response: AsyncStreamingChatCompletions = await self.client.complete( stream=True, - # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) - model=self.ai_model_id, messages=self._prepare_chat_history_for_request(chat_history), model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), @@ -240,11 +236,12 @@ def _create_chat_message_content( Returns: A chat message content object. """ - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] if choice.message.content: items.append( TextContent( text=choice.message.content, + inner_content=response, metadata=metadata, ) ) diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py index 64fea069cb1c..226fde4ec7b1 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py @@ -20,13 +20,13 @@ AzureAIInferenceClientType, ) from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental +@experimental_class class AzureAIInferenceTextEmbedding(EmbeddingGeneratorBase, AzureAIInferenceBase): """Azure AI Inference Text Embedding Service.""" @@ -86,8 +86,6 @@ async def generate_embeddings( response: EmbeddingsResult = await self.client.embed( input=texts, - # The model id will be ignored by the service if the endpoint serves only one model (i.e. MaaS) - model=self.ai_model_id, model_extras=settings.extra_parameters if settings else None, dimensions=settings.dimensions if settings else None, encoding_format=settings.encoding_format if settings else None, diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py index f47817aefc07..d8264aec8664 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/utils.py @@ -50,7 +50,11 @@ def _format_developer_message(message: ChatMessageContent) -> ChatRequestMessage Returns: The formatted developer message. """ - return ChatRequestMessage({"role": "developer", "content": message.content}) + # TODO(@ymuichiro): Add support when Azure AI Inference SDK implements developer role + raise NotImplementedError( + "Developer role is currently not supported by the Azure AI Inference SDK. " + "This feature will be implemented in a future update when SDK support is available." + ) def _format_user_message(message: ChatMessageContent) -> UserMessage: diff --git a/python/semantic_kernel/connectors/ai/bedrock/README.md b/python/semantic_kernel/connectors/ai/bedrock/README.md index bc9461aef7e8..9f48879b54cb 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/README.md +++ b/python/semantic_kernel/connectors/ai/bedrock/README.md @@ -11,27 +11,6 @@ Follow this [guide](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration) to configure your environment to use the Bedrock API. -Please configure the `aws_access_key_id`, `aws_secret_access_key`, and `region` otherwise you will need to create custom clients for the services. For example: - -```python -runtime_client=boto.client( - "bedrock-runtime", - aws_access_key_id="your_access_key", - aws_secret_access_key="your_secret_key", - region_name="your_region", - [...other parameters you may need...] -) -client=boto.client( - "bedrock", - aws_access_key_id="your_access_key", - aws_secret_access_key="your_secret_key", - region_name="your_region", - [...other parameters you may need...] -) - -bedrock_chat_completion_service = BedrockChatCompletion(runtime_client=runtime_client, client=client) -``` - ## Supports ### Region diff --git a/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py b/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py index 2c5348a14676..1f966fc08b2a 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py +++ b/python/semantic_kernel/connectors/ai/bedrock/bedrock_settings.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class BedrockSettings(KernelBaseSettings): """Amazon Bedrock service settings. diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py index 3457d9fee1a5..115ae65409dd 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_base.py @@ -4,10 +4,8 @@ from functools import partial from typing import Any, ClassVar -import boto3 - +from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.async_utils import run_in_executor class BedrockBase(KernelBaseModel, ABC): @@ -21,26 +19,6 @@ class BedrockBase(KernelBaseModel, ABC): # Client: Use for model management bedrock_client: Any - def __init__( - self, - *, - runtime_client: Any | None = None, - client: Any | None = None, - **kwargs: Any, - ) -> None: - """Initialize the Amazon Bedrock Base Class. - - Args: - runtime_client: The Amazon Bedrock runtime client to use. - client: The Amazon Bedrock client to use. - **kwargs: Additional keyword arguments. - """ - super().__init__( - bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), - bedrock_client=client or boto3.client("bedrock"), - **kwargs, - ) - async def get_foundation_model_info(self, model_id: str) -> dict[str, Any]: """Get the foundation model information.""" response = await run_in_executor( diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py index 64df31e5967b..5c4f3e6cd192 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_chat_completion.py @@ -5,6 +5,8 @@ from functools import partial from typing import TYPE_CHECKING, Any, ClassVar +import boto3 + if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: @@ -23,15 +25,16 @@ finish_reason_from_bedrock_to_semantic_kernel, format_bedrock_function_name_to_kernel_function_fully_qualified_name, remove_none_recursively, + run_in_executor, update_settings_from_function_choice_configuration, ) from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -42,7 +45,6 @@ ServiceInvalidRequestError, ServiceInvalidResponseError, ) -from semantic_kernel.utils.async_utils import run_in_executor from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -93,8 +95,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.chat_model_id, service_id=service_id or bedrock_settings.chat_model_id, - runtime_client=runtime_client, - client=client, + bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), + bedrock_client=client or boto3.client("bedrock"), ) # region Overriding base class methods @@ -238,7 +240,7 @@ def _create_chat_message_content(self, response: dict[str, Any]) -> ChatMessageC prompt_tokens=response["usage"]["inputTokens"], completion_tokens=response["usage"]["outputTokens"], ) - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] for content in response["output"]["message"]["content"]: if "text" in content: items.append(TextContent(text=content["text"], inner_content=content)) diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py index d690a3aeb644..81092a7c7fa4 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_completion.py @@ -6,6 +6,7 @@ from functools import partial from typing import TYPE_CHECKING, Any +import boto3 from pydantic import ValidationError if sys.version_info >= (3, 12): @@ -21,11 +22,11 @@ parse_streaming_text_completion_response, parse_text_completion_response, ) +from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidRequestError -from semantic_kernel.utils.async_utils import run_in_executor from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_streaming_text_completion, trace_text_completion, @@ -72,8 +73,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.text_model_id, service_id=service_id or bedrock_settings.text_model_id, - runtime_client=runtime_client, - client=client, + bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), + bedrock_client=client or boto3.client("bedrock"), ) # region Overriding base class methods diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py index 72224726acec..f963db5c5f0b 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/bedrock_text_embedding.py @@ -6,6 +6,7 @@ from functools import partial from typing import TYPE_CHECKING, Any +import boto3 from numpy import array, ndarray from pydantic import ValidationError @@ -23,10 +24,10 @@ get_text_embedding_request_body, parse_text_embedding_response, ) +from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import run_in_executor from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidRequestError -from semantic_kernel.utils.async_utils import run_in_executor if TYPE_CHECKING: pass @@ -69,8 +70,8 @@ def __init__( super().__init__( ai_model_id=bedrock_settings.embedding_model_id, service_id=service_id or bedrock_settings.embedding_model_id, - runtime_client=runtime_client, - client=client, + bedrock_runtime_client=runtime_client or boto3.client("bedrock-runtime"), + bedrock_client=client or boto3.client("bedrock"), ) @override diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py index 8655361331e5..361a37e622a5 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/bedrock_model_provider.py @@ -37,10 +37,8 @@ class BedrockModelProvider(Enum): @classmethod def to_model_provider(cls, model_id: str) -> "BedrockModelProvider": """Convert a model ID to a model provider.""" - try: - return next(provider for provider in cls if provider.value in model_id) - except StopIteration: - raise ValueError(f"Model ID {model_id} does not contain a valid model provider name.") + provider = model_id.split(".")[0] + return cls(provider) # region Text Completion diff --git a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py index e6425eda1c39..7607696559c5 100644 --- a/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py +++ b/python/semantic_kernel/connectors/ai/bedrock/services/model_provider/utils.py @@ -1,7 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. +import asyncio import json from collections.abc import Callable, Mapping +from functools import partial from typing import TYPE_CHECKING, Any from semantic_kernel.connectors.ai.bedrock.bedrock_prompt_execution_settings import BedrockChatPromptExecutionSettings @@ -21,6 +23,11 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +async def run_in_executor(executor, func, *args, **kwargs): + """Run a function in an executor.""" + return await asyncio.get_event_loop().run_in_executor(executor, partial(func, *args, **kwargs)) + + def remove_none_recursively(data: dict, max_depth: int = 5) -> dict: """Remove None values from a dictionary recursively.""" if max_depth <= 0: diff --git a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py index 974d59af92be..5c527e994564 100644 --- a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py @@ -157,7 +157,6 @@ async def get_chat_message_contents( function_call=function_call, chat_history=chat_history, arguments=kwargs.get("arguments"), - execution_settings=settings, function_call_count=fc_count, request_index=request_index, function_behavior=settings.function_choice_behavior, @@ -264,9 +263,7 @@ async def get_streaming_chat_message_contents( for msg in messages: if msg is not None: all_messages.append(msg) - if not function_call_returned and any( - isinstance(item, FunctionCallContent) for item in msg.items - ): + if any(isinstance(item, FunctionCallContent) for item in msg.items): function_call_returned = True yield messages @@ -292,8 +289,6 @@ async def get_streaming_chat_message_contents( function_call=function_call, chat_history=chat_history, arguments=kwargs.get("arguments"), - is_streaming=True, - execution_settings=settings, function_call_count=fc_count, request_index=request_index, function_behavior=settings.function_choice_behavior, @@ -434,10 +429,7 @@ def _get_ai_model_id(self, settings: "PromptExecutionSettings") -> str: return getattr(settings, "ai_model_id", self.ai_model_id) or self.ai_model_id def _yield_function_result_messages(self, function_result_messages: list) -> bool: - """Determine if the function result messages should be yielded. - - If there are messages and if the first message has items, then yield the messages. - """ + """Determine if the function result messages should be yielded.""" return len(function_result_messages) > 0 and len(function_result_messages[0].items) > 0 # endregion diff --git a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py index edc93dd326d4..3342d96baa02 100644 --- a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py +++ b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any from semantic_kernel.services.ai_service_client_base import AIServiceClientBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from numpy import ndarray @@ -12,7 +12,7 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental +@experimental_class class EmbeddingGeneratorBase(AIServiceClientBase, ABC): """Base class for embedding generators.""" diff --git a/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py b/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py index a49fa1a1a78c..d44fb946af65 100644 --- a/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py +++ b/python/semantic_kernel/connectors/ai/function_call_choice_configuration.py @@ -4,10 +4,10 @@ from pydantic.dataclasses import dataclass from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class @dataclass class FunctionCallChoiceConfiguration: """Configuration for function call choice.""" diff --git a/python/semantic_kernel/connectors/ai/function_calling_utils.py b/python/semantic_kernel/connectors/ai/function_calling_utils.py index 1e65fa59e537..7a5c2950c4e0 100644 --- a/python/semantic_kernel/connectors/ai/function_calling_utils.py +++ b/python/semantic_kernel/connectors/ai/function_calling_utils.py @@ -1,13 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from collections import OrderedDict -from collections.abc import Callable -from copy import deepcopy from typing import TYPE_CHECKING, Any from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_choice_behavior import ( @@ -18,7 +15,6 @@ from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata - from semantic_kernel.kernel import Kernel def update_settings_from_function_call_configuration( @@ -106,8 +102,8 @@ def merge_function_results( def merge_streaming_function_results( messages: list["ChatMessageContent | StreamingChatMessageContent"], - ai_model_id: str | None = None, - function_invoke_attempt: int | None = None, + ai_model_id: str, + function_invoke_attempt: int, ) -> list["StreamingChatMessageContent"]: """Combine multiple streaming function result content types to one streaming chat message content type. @@ -138,36 +134,3 @@ def merge_streaming_function_results( function_invoke_attempt=function_invoke_attempt, ) ] - - -@experimental -def prepare_settings_for_function_calling( - settings: "PromptExecutionSettings", - settings_class: type["PromptExecutionSettings"], - update_settings_callback: Callable[..., None], - kernel: "Kernel", -) -> "PromptExecutionSettings": - """Prepare settings for the service. - - Args: - settings: Prompt execution settings. - settings_class: The settings class. - update_settings_callback: The callback to update the settings. - kernel: Kernel instance. - - Returns: - PromptExecutionSettings of type settings_class. - """ - settings = deepcopy(settings) - if not isinstance(settings, settings_class): - settings = settings_class.from_prompt_execution_settings(settings) - - if settings.function_choice_behavior: - # Configure the function choice behavior into the settings object - # that will become part of the request to the AI service - settings.function_choice_behavior.configure( - kernel=kernel, - update_settings_callback=update_settings_callback, - settings=settings, - ) - return settings diff --git a/python/semantic_kernel/connectors/ai/function_choice_behavior.py b/python/semantic_kernel/connectors/ai/function_choice_behavior.py index 0e872746346f..f32a57e26952 100644 --- a/python/semantic_kernel/connectors/ai/function_choice_behavior.py +++ b/python/semantic_kernel/connectors/ai/function_choice_behavior.py @@ -7,7 +7,7 @@ from semantic_kernel.connectors.ai.function_choice_type import FunctionChoiceType from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration @@ -23,7 +23,7 @@ _T = TypeVar("_T", bound="FunctionChoiceBehavior") -@experimental +@experimental_class class FunctionChoiceBehavior(KernelBaseModel): """Class that controls function choice behavior. diff --git a/python/semantic_kernel/connectors/ai/function_choice_type.py b/python/semantic_kernel/connectors/ai/function_choice_type.py index 6fc2287b3ce8..d4bc2b3a598f 100644 --- a/python/semantic_kernel/connectors/ai/function_choice_type.py +++ b/python/semantic_kernel/connectors/ai/function_choice_type.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class FunctionChoiceType(Enum): """The type of function choice behavior.""" diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py index 9b538b26ebec..b7005c3c1f5d 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py @@ -36,9 +36,9 @@ format_gemini_function_name_to_kernel_function_fully_qualified_name, ) from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -127,10 +127,8 @@ async def _inner_get_chat_message_contents( assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - if not self.service_settings.gemini_model_id: - raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - model_name=self.service_settings.gemini_model_id, + self.service_settings.gemini_model_id, system_instruction=filter_system_message(chat_history), ) @@ -138,7 +136,7 @@ async def _inner_get_chat_message_contents( contents=self._prepare_chat_history_for_request(chat_history), generation_config=GenerationConfig(**settings.prepare_settings_dict()), tools=settings.tools, - tool_config=settings.tool_config, # type: ignore + tool_config=settings.tool_config, ) return [self._create_chat_message_content(response, candidate) for candidate in response.candidates] @@ -156,10 +154,8 @@ async def _inner_get_streaming_chat_message_contents( assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - if not self.service_settings.gemini_model_id: - raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - model_name=self.service_settings.gemini_model_id, + self.service_settings.gemini_model_id, system_instruction=filter_system_message(chat_history), ) @@ -167,7 +163,7 @@ async def _inner_get_streaming_chat_message_contents( contents=self._prepare_chat_history_for_request(chat_history), generation_config=GenerationConfig(**settings.prepare_settings_dict()), tools=settings.tools, - tool_config=settings.tool_config, # type: ignore + tool_config=settings.tool_config, stream=True, ) @@ -244,7 +240,7 @@ def _create_chat_message_content( response_metadata = self._get_metadata_from_response(response) response_metadata.update(self._get_metadata_from_candidate(candidate)) - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] for idx, part in enumerate(candidate.content.parts): if part.text: items.append(TextContent(text=part.text, inner_content=response, metadata=response_metadata)) diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py index 625ee5c3e251..2c2c25bc1910 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py @@ -99,10 +99,8 @@ async def _inner_get_text_contents( assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - if not self.service_settings.gemini_model_id: - raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - model_name=self.service_settings.gemini_model_id, + self.service_settings.gemini_model_id, ) response: AsyncGenerateContentResponse = await model.generate_content_async( @@ -124,10 +122,8 @@ async def _inner_get_streaming_text_contents( assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - if not self.service_settings.gemini_model_id: - raise ServiceInitializationError("The Google AI Gemini model ID is required.") model = GenerativeModel( - model_name=self.service_settings.gemini_model_id, + self.service_settings.gemini_model_id, ) response: AsyncGenerateContentResponse = await model.generate_content_async( diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py index 98cfc71ce173..6f2ceff601d7 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_embedding.py @@ -93,9 +93,7 @@ async def generate_raw_embeddings( assert isinstance(settings, GoogleAIEmbeddingPromptExecutionSettings) # nosec genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - if not self.service_settings.embedding_model_id: - raise ServiceInitializationError("The Google AI embedding model ID is required.") - response: BatchEmbeddingDict = await genai.embed_content_async( # type: ignore + response: BatchEmbeddingDict = await genai.embed_content_async( model=self.service_settings.embedding_model_id, content=texts, **settings.prepare_settings_dict(), diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py index beec827bfb2f..bd7c1346accf 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py @@ -34,9 +34,9 @@ ) from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_settings import VertexAISettings from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -235,7 +235,7 @@ def _create_chat_message_content(self, response: GenerationResponse, candidate: response_metadata = self._get_metadata_from_response(response) response_metadata.update(self._get_metadata_from_candidate(candidate)) - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] for idx, part in enumerate(candidate.content.parts): part_dict = part.to_dict() if "text" in part_dict: diff --git a/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py index da7b2fac26e1..89fc5525cb29 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py @@ -2,15 +2,9 @@ from typing import Any -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings - -try: - from transformers import GenerationConfig +from transformers import GenerationConfig - ready = True -except ImportError: - GenerationConfig = Any - ready = False +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings class HuggingFacePromptExecutionSettings(PromptExecutionSettings): @@ -25,10 +19,8 @@ class HuggingFacePromptExecutionSettings(PromptExecutionSettings): temperature: float = 1.0 top_p: float = 1.0 - def get_generation_config(self) -> "GenerationConfig": + def get_generation_config(self) -> GenerationConfig: """Get the generation config.""" - if not ready: - raise ImportError("transformers is not installed.") return GenerationConfig( **self.model_dump( include={"max_new_tokens", "pad_token_id", "eos_token_id", "temperature", "top_p"}, diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py index 3136e3c0f582..e40688ebad79 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py @@ -11,7 +11,6 @@ else: from typing_extensions import override # pragma: no cover - import torch from transformers import AutoTokenizer, TextIteratorStreamer, pipeline diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py index b52a87fa42f7..0bb20906be30 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py @@ -15,18 +15,17 @@ from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase from semantic_kernel.exceptions import ServiceResponseException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from torch import Tensor from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings - logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class HuggingFaceTextEmbedding(EmbeddingGeneratorBase): """Hugging Face text embedding service.""" diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py index 8153db93577f..ea709172950f 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py @@ -1,10 +1,17 @@ # Copyright (c) Microsoft. All rights reserved. import logging +import sys from typing import Annotated, Any, Literal from mistralai import utils -from pydantic import Field + +if sys.version_info >= (3, 11): + pass # pragma: no cover +else: + pass # pragma: no cover + +from pydantic import Field, field_validator from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -22,14 +29,7 @@ class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings): response_format: dict[Literal["type"], Literal["text", "json_object"]] | None = None messages: list[dict[str, Any]] | None = None - safe_mode: Annotated[ - bool, - Field( - exclude=True, - deprecated="The 'safe_mode' setting is no longer supported and is being ignored, " - "it will be removed in the Future.", - ), - ] = False + safe_mode: Annotated[bool, Field(exclude=True)] = False safe_prompt: bool = False max_tokens: Annotated[int | None, Field(gt=0)] = None seed: int | None = None @@ -56,3 +56,12 @@ class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings): "on the function choice configuration.", ), ] = None + + @field_validator("safe_mode") + @classmethod + def check_safe_mode(cls, v: bool) -> bool: + """The safe_mode setting is no longer supported.""" + logger.warning( + "The 'safe_mode' setting is no longer supported and is being ignored, it will be removed in the Future." + ) + return v diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py index efe8ee177360..2405897a6c39 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py @@ -42,7 +42,7 @@ from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( trace_chat_completion, trace_streaming_chat_completion, @@ -55,7 +55,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class MistralAIChatCompletion(MistralAIBase, ChatCompletionClientBase): """Mistral Chat completion class.""" diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py index 1cffe44c1376..f3e10e190b88 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py @@ -19,12 +19,12 @@ from semantic_kernel.connectors.ai.mistral_ai.settings.mistral_ai_settings import MistralAISettings from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class MistralAITextEmbedding(MistralAIBase, EmbeddingGeneratorBase): """Mistral AI Inference Text Embedding Service.""" diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py index 68a62e434423..103133af2c9f 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py @@ -27,9 +27,9 @@ ) from semantic_kernel.contents import AuthorRole from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import CMC_ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import STREAMING_CMC_ITEM_TYPES as STREAMING_ITEM_TYPES +from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -255,7 +255,7 @@ def _parse_tool_calls(self, tool_calls: Sequence[Message.ToolCall] | None, items def _create_chat_message_content_from_chat_response(self, response: ChatResponse) -> ChatMessageContent: """Create a chat message content from the response.""" - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] if response.message.content: items.append( TextContent( @@ -274,7 +274,7 @@ def _create_chat_message_content_from_chat_response(self, response: ChatResponse def _create_chat_message_content(self, response: Mapping[str, Any]) -> ChatMessageContent: """Create a chat message content from the response.""" - items: list[CMC_ITEM_TYPES] = [] + items: list[ITEM_TYPES] = [] if not (message := response.get("message", None)): raise ServiceInvalidResponseError("No message content found in response.") diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py index 25a30fdb8afc..7b5c0358e400 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py @@ -23,12 +23,12 @@ from numpy import array, ndarray from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class OllamaTextEmbedding(OllamaBase, EmbeddingGeneratorBase): """Ollama embeddings client. diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py index 891e6d44f53a..28521975e366 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_chat_completion.py @@ -28,12 +28,12 @@ ) from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidExecutionSettingsError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class OnnxGenAIChatCompletion(ChatCompletionClientBase, OnnxGenAICompletionBase): """OnnxGenAI text completion service.""" diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py index 79bb310bbc6d..40ce552ed88b 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_completion_base.py @@ -5,18 +5,13 @@ from collections.abc import AsyncGenerator from typing import Any +import onnxruntime_genai as OnnxRuntimeGenAi + from semantic_kernel.connectors.ai.onnx.onnx_gen_ai_prompt_execution_settings import OnnxGenAIPromptExecutionSettings from semantic_kernel.contents import ImageContent from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidResponseError from semantic_kernel.kernel_pydantic import KernelBaseModel -try: - import onnxruntime_genai as OnnxRuntimeGenAi - - ready = True -except ImportError: - ready = False - class OnnxGenAICompletionBase(KernelBaseModel): """Base class for OnnxGenAI Completion services.""" @@ -36,8 +31,6 @@ def __init__(self, ai_model_path: str, **kwargs) -> None: Raises: ServiceInitializationError: When model cannot be loaded """ - if not ready: - raise ImportError("onnxruntime-genai is not installed.") try: json_gen_ai_config = os.path.join(ai_model_path + "/genai_config.json") with open(json_gen_ai_config) as file: diff --git a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py index 0f922397a7b7..11aa88165be4 100644 --- a/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/onnx/services/onnx_gen_ai_text_completion.py @@ -20,12 +20,12 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class OnnxGenAITextCompletion(TextCompletionClientBase, OnnxGenAICompletionBase): """OnnxGenAI text completion service.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/__init__.py b/python/semantic_kernel/connectors/ai/open_ai/__init__.py index 34e11756fdb7..a3103ae86446 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/__init__.py +++ b/python/semantic_kernel/connectors/ai/open_ai/__init__.py @@ -22,12 +22,6 @@ OpenAIPromptExecutionSettings, OpenAITextPromptExecutionSettings, ) -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( - AzureRealtimeExecutionSettings, - InputAudioTranscription, - OpenAIRealtimeExecutionSettings, - TurnDetection, -) from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_text_to_audio_execution_settings import ( OpenAITextToAudioExecutionSettings, ) @@ -36,19 +30,12 @@ ) from semantic_kernel.connectors.ai.open_ai.services.azure_audio_to_text import AzureAudioToText from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.connectors.ai.open_ai.services.azure_realtime import AzureRealtimeWebsocket from semantic_kernel.connectors.ai.open_ai.services.azure_text_completion import AzureTextCompletion from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding from semantic_kernel.connectors.ai.open_ai.services.azure_text_to_audio import AzureTextToAudio from semantic_kernel.connectors.ai.open_ai.services.azure_text_to_image import AzureTextToImage from semantic_kernel.connectors.ai.open_ai.services.open_ai_audio_to_text import OpenAIAudioToText from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import ( - ListenEvents, - OpenAIRealtimeWebRTC, - OpenAIRealtimeWebsocket, - SendEvents, -) from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import OpenAITextCompletion from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_to_audio import OpenAITextToAudio @@ -68,8 +55,6 @@ "AzureDataSourceParameters", "AzureEmbeddingDependency", "AzureOpenAISettings", - "AzureRealtimeExecutionSettings", - "AzureRealtimeWebsocket", "AzureTextCompletion", "AzureTextEmbedding", "AzureTextToAudio", @@ -78,17 +63,12 @@ "DataSourceFieldsMapping", "DataSourceFieldsMapping", "ExtraBody", - "InputAudioTranscription", - "ListenEvents", "OpenAIAudioToText", "OpenAIAudioToTextExecutionSettings", "OpenAIChatCompletion", "OpenAIChatPromptExecutionSettings", "OpenAIEmbeddingPromptExecutionSettings", "OpenAIPromptExecutionSettings", - "OpenAIRealtimeExecutionSettings", - "OpenAIRealtimeWebRTC", - "OpenAIRealtimeWebsocket", "OpenAISettings", "OpenAITextCompletion", "OpenAITextEmbedding", @@ -97,6 +77,4 @@ "OpenAITextToAudioExecutionSettings", "OpenAITextToImage", "OpenAITextToImageExecutionSettings", - "SendEvents", - "TurnDetection", ] diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py index 3d5199d8bf98..543b4e2c64a5 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py @@ -39,7 +39,7 @@ class ApiKeyAuthentication(AzureChatRequestBase): """API key authentication.""" type: Annotated[Literal["APIKey", "api_key"], AfterValidator(to_snake)] = "api_key" - key: str + key: str | None = None class SystemAssignedManagedIdentityAuthentication(AzureChatRequestBase): diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py index c20080bdf44d..5be6f5d364fe 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_audio_to_text_execution_settings.py @@ -13,10 +13,9 @@ class OpenAIAudioToTextExecutionSettings(PromptExecutionSettings): """Request settings for OpenAI audio to text services.""" - ai_model_id: str | None = Field(default=None, serialization_alias="model") + ai_model_id: str | None = Field(None, serialization_alias="model") filename: str | None = Field( - default=None, - description="Do not set this manually. It is set by the service based on the audio content.", + None, description="Do not set this manually. It is set by the service based on the audio content." ) language: str | None = None prompt: str | None = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py index b1a241d2a5d3..d2b37d44bb40 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py @@ -66,7 +66,7 @@ class OpenAIChatPromptExecutionSettings(OpenAIPromptExecutionSettings): messages: Annotated[ list[dict[str, Any]] | None, Field(description="Do not set this manually. It is set by the service.") ] = None - parallel_tool_calls: bool | None = None + parallel_tool_calls: bool | None = True tools: Annotated[ list[dict[str, Any]] | None, Field( diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py deleted file mode 100644 index 2c4fc74738b5..000000000000 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_realtime_execution_settings.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Mapping, Sequence -from typing import Annotated, Any, Literal - -from pydantic import Field - -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.kernel_pydantic import KernelBaseModel - - -class InputAudioTranscription(KernelBaseModel): - """Input audio transcription settings. - - Args: - model: The model to use for transcription, currently only "whisper-1" is supported. - language: The language of the audio, should be in ISO-639-1 format, like 'en'. - prompt: An optional text to guide the model's style or continue a previous audio segment. - The prompt should match the audio language. - """ - - model: Literal["whisper-1"] | None = None - language: str | None = None - prompt: str | None = None - - -class TurnDetection(KernelBaseModel): - """Turn detection settings. - - Args: - type: The type of turn detection, currently only "server_vad" is supported. - threshold: The threshold for voice activity detection, should be between 0 and 1. - prefix_padding_ms: The padding before the detected voice activity, in milliseconds. - silence_duration_ms: The duration of silence to detect the end of a turn, in milliseconds. - create_response: Whether to create a response for each detected turn. - - """ - - type: Literal["server_vad"] = "server_vad" - threshold: Annotated[float | None, Field(ge=0.0, le=1.0)] = None - prefix_padding_ms: Annotated[int | None, Field(ge=0)] = None - silence_duration_ms: Annotated[int | None, Field(ge=0)] = None - create_response: bool | None = None - - -class OpenAIRealtimeExecutionSettings(PromptExecutionSettings): - """Request settings for OpenAI realtime services.""" - - modalities: Sequence[Literal["audio", "text"]] | None = None - ai_model_id: Annotated[str | None, Field(None, serialization_alias="model")] = None - instructions: str | None = None - voice: str | None = None - input_audio_format: Literal["pcm16", "g711_ulaw", "g711_alaw"] | None = None - output_audio_format: Literal["pcm16", "g711_ulaw", "g711_alaw"] | None = None - input_audio_transcription: InputAudioTranscription | Mapping[str, str] | None = None - turn_detection: TurnDetection | Mapping[str, str] | None = None - tools: Annotated[ - list[dict[str, Any]] | None, - Field( - description="Do not set this manually. It is set by the service based " - "on the function choice configuration.", - ), - ] = None - tool_choice: Annotated[ - str | None, - Field( - description="Do not set this manually. It is set by the service based " - "on the function choice configuration.", - ), - ] = None - temperature: Annotated[float | None, Field(ge=0.0, le=2.0)] = None - max_response_output_tokens: Annotated[int | Literal["inf"] | None, Field(gt=0)] = None - - -class AzureRealtimeExecutionSettings(OpenAIRealtimeExecutionSettings): - """Request settings for Azure OpenAI realtime services.""" - - pass diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py index 58a9a9e6a46f..d482e92ec6e8 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_text_to_image_execution_settings.py @@ -36,7 +36,7 @@ class OpenAITextToImageExecutionSettings(PromptExecutionSettings): """Request settings for OpenAI text to image services.""" prompt: str | None = None - ai_model_id: str | None = Field(default=None, serialization_alias="model") + ai_model_id: str | None = Field(None, serialization_alias="model") size: ImageSize | None = None quality: str | None = None style: str | None = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py index 94d8691534fa..da50e4ee56b6 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py @@ -3,11 +3,9 @@ import logging from collections.abc import Awaitable, Callable, Mapping from copy import copy -from typing import Any from openai import AsyncAzureOpenAI from pydantic import ConfigDict, validate_call -from pydantic_core import Url from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler, OpenAIModelTypes @@ -29,7 +27,7 @@ def __init__( deployment_name: str, ai_model_type: OpenAIModelTypes, endpoint: HttpsUrl | None = None, - base_url: Url | None = None, + base_url: HttpsUrl | None = None, api_version: str = DEFAULT_AZURE_API_VERSION, service_id: str | None = None, api_key: str | None = None, @@ -39,7 +37,6 @@ def __init__( default_headers: Mapping[str, str] | None = None, client: AsyncAzureOpenAI | None = None, instruction_role: str | None = None, - **kwargs: Any, ) -> None: """Internal class for configuring a connection to an Azure OpenAI service. @@ -50,7 +47,7 @@ def __init__( deployment_name (str): Name of the deployment. ai_model_type (OpenAIModelTypes): The type of OpenAI model to deploy. endpoint (HttpsUrl): The specific endpoint URL for the deployment. (Optional) - base_url (Url): The base URL for Azure services. (Optional) + base_url (HttpsUrl): The base URL for Azure services. (Optional) api_version (str): Azure API version. Defaults to the defined DEFAULT_AZURE_API_VERSION. service_id (str): Service ID for the deployment. (Optional) api_key (str): API key for Azure services. (Optional) @@ -62,7 +59,6 @@ def __init__( client (AsyncAzureOpenAI): An existing client to use. (Optional) instruction_role (str | None): The role to use for 'instruction' messages, for example, summarization prompts could use `developer` or `system`. (Optional) - kwargs: Additional keyword arguments. """ # Merge APP_INFO into the headers if it exists @@ -83,29 +79,18 @@ def __init__( "Please provide either api_key, ad_token or ad_token_provider or a client." ) - if not endpoint and not base_url: - raise ServiceInitializationError("Please provide an endpoint or a base_url") - - args: dict[str, Any] = { - "default_headers": merged_headers, - } - if api_version: - args["api_version"] = api_version - if ad_token: - args["azure_ad_token"] = ad_token - if ad_token_provider: - args["azure_ad_token_provider"] = ad_token_provider - if api_key: - args["api_key"] = api_key - if base_url: - args["base_url"] = str(base_url) - if endpoint and not base_url: - args["azure_endpoint"] = str(endpoint) - # TODO (eavanvalkenburg): Remove the check on model type when the package fixes: https://github.com/openai/openai-python/issues/2120 - if deployment_name and ai_model_type != OpenAIModelTypes.REALTIME: - args["azure_deployment"] = deployment_name - - client = AsyncAzureOpenAI(**args) + if not base_url: + if not endpoint: + raise ServiceInitializationError("Please provide an endpoint or a base_url") + base_url = HttpsUrl(f"{str(endpoint).rstrip('/')}/openai/deployments/{deployment_name}") + client = AsyncAzureOpenAI( + base_url=str(base_url), + api_version=api_version, + api_key=api_key, + azure_ad_token=ad_token, + azure_ad_token_provider=ad_token_provider, + default_headers=merged_headers, + ) args = { "ai_model_id": deployment_name, "client": client, @@ -115,7 +100,7 @@ def __init__( args["service_id"] = service_id if instruction_role: args["instruction_role"] = instruction_role - super().__init__(**args, **kwargs) + super().__init__(**args) def to_dict(self) -> dict[str, str]: """Convert the configuration to a dictionary.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py deleted file mode 100644 index 39e5690fb3c1..000000000000 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from collections.abc import Callable, Coroutine, Mapping -from typing import Any - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from numpy import ndarray -from openai import AsyncAzureOpenAI -from openai.lib.azure import AsyncAzureADTokenProvider -from pydantic import ValidationError - -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( - AzureRealtimeExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import AzureOpenAIConfigBase -from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import OpenAIModelTypes -from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import OpenAIRealtimeWebsocketBase -from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class AzureRealtimeWebsocket(OpenAIRealtimeWebsocketBase, AzureOpenAIConfigBase): - """Azure OpenAI Realtime service using WebSocket protocol.""" - - def __init__( - self, - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - service_id: str | None = None, - api_key: str | None = None, - deployment_name: str | None = None, - endpoint: str | None = None, - base_url: str | None = None, - api_version: str | None = None, - ad_token: str | None = None, - ad_token_provider: AsyncAzureADTokenProvider | None = None, - token_endpoint: str | None = None, - default_headers: Mapping[str, str] | None = None, - async_client: AsyncAzureOpenAI | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initialize an AzureRealtimeWebsocket service. - - Args: - audio_output_callback: The audio output callback, optional. - This should be a coroutine, that takes a ndarray with audio as input. - The goal of this function is to allow you to play the audio with the - least amount of latency possible, because it is called first before further processing. - It can also be set in the `receive` method. - Even when passed, the audio content will still be - added to the receiving queue. - service_id: The service ID for the Azure deployment. (Optional) - api_key: The optional api key. If provided, will override the value in the - env vars or .env file. - deployment_name: The optional deployment. If provided, will override the value - (chat_deployment_name) in the env vars or .env file. - endpoint: The optional deployment endpoint. If provided will override the value - in the env vars or .env file. - base_url: The optional deployment base_url. If provided will override the value - in the env vars or .env file. - api_version: The optional deployment api version. If provided will override the value - in the env vars or .env file. - ad_token: The Azure Active Directory token. (Optional) - ad_token_provider: The Azure Active Directory token provider. (Optional) - token_endpoint: The token endpoint to request an Azure token. (Optional) - default_headers: The default headers mapping of string keys to - string values for HTTP requests. (Optional) - async_client: An existing client to use. (Optional) - env_file_path: Use the environment settings file as a fallback to - environment variables. (Optional) - env_file_encoding: The encoding of the environment settings file. (Optional) - kwargs: Additional arguments. - """ - try: - azure_openai_settings = AzureOpenAISettings.create( - api_key=api_key, - base_url=base_url, - endpoint=endpoint, - realtime_deployment_name=deployment_name, - api_version=api_version, - token_endpoint=token_endpoint, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex - if not azure_openai_settings.realtime_deployment_name: - raise ServiceInitializationError("The OpenAI realtime model ID is required.") - super().__init__( - audio_output_callback=audio_output_callback, - deployment_name=azure_openai_settings.realtime_deployment_name, - endpoint=azure_openai_settings.endpoint, - base_url=azure_openai_settings.base_url, - api_version=azure_openai_settings.api_version, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - token_endpoint=azure_openai_settings.token_endpoint, - ai_model_type=OpenAIModelTypes.REALTIME, - service_id=service_id, - default_headers=default_headers, - client=async_client, - **kwargs, - ) - - @override - def get_prompt_execution_settings_class(self) -> type[PromptExecutionSettings]: - return AzureRealtimeExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py index f8f0654c741b..36486b7b8108 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py @@ -13,12 +13,12 @@ from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import OpenAITextEmbeddingBase from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AzureTextEmbedding(AzureOpenAIConfigBase, OpenAITextEmbeddingBase): """Azure Text Embedding class.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py index 7883be04f4ff..d3d72795665b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py @@ -3,7 +3,6 @@ import logging from collections.abc import Mapping from copy import copy -from typing import Any from openai import AsyncOpenAI from pydantic import ConfigDict, Field, validate_call @@ -31,7 +30,6 @@ def __init__( default_headers: Mapping[str, str] | None = None, client: AsyncOpenAI | None = None, instruction_role: str | None = None, - **kwargs: Any, ) -> None: """Initialize a client for OpenAI services. @@ -53,7 +51,6 @@ def __init__( client (AsyncOpenAI): An existing OpenAI client, optional. instruction_role (str): The role to use for 'instruction' messages, for example, summarization prompts could use `developer` or `system`. (Optional) - kwargs: Additional keyword arguments. """ # Merge APP_INFO into the headers if it exists @@ -79,7 +76,7 @@ def __init__( args["service_id"] = service_id if instruction_role: args["instruction_role"] = instruction_role - super().__init__(**args, **kwargs) + super().__init__(**args) def to_dict(self) -> dict[str, str]: """Create a dict of the service settings.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py index ea2e05deead7..7a1f43da234e 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py @@ -12,4 +12,3 @@ class OpenAIModelTypes(Enum): TEXT_TO_IMAGE = "text-to-image" AUDIO_TO_TEXT = "audio-to-text" TEXT_TO_AUDIO = "text-to-audio" - REALTIME = "realtime" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py deleted file mode 100644 index d6422066394b..000000000000 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_realtime.py +++ /dev/null @@ -1,1024 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import base64 -import contextlib -import json -import logging -import sys -from collections.abc import AsyncGenerator, Callable, Coroutine, Mapping -from enum import Enum -from typing import TYPE_CHECKING, Any, ClassVar, Literal, cast - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -import numpy as np -from aiohttp import ClientSession -from aiortc import ( - MediaStreamTrack, - RTCConfiguration, - RTCDataChannel, - RTCIceServer, - RTCPeerConnection, - RTCSessionDescription, -) -from av.audio.frame import AudioFrame -from numpy import ndarray -from openai import AsyncOpenAI -from openai._models import construct_type_unchecked -from openai.resources.beta.realtime.realtime import AsyncRealtimeConnection -from openai.types.beta.realtime import ( - ConversationItemCreateEvent, - ConversationItemDeleteEvent, - ConversationItemTruncateEvent, - InputAudioBufferAppendEvent, - InputAudioBufferClearEvent, - InputAudioBufferCommitEvent, - RealtimeClientEvent, - RealtimeServerEvent, - ResponseCancelEvent, - ResponseCreateEvent, - ResponseFunctionCallArgumentsDoneEvent, - SessionUpdateEvent, -) -from openai.types.beta.realtime.response_create_event import Response -from pydantic import Field, PrivateAttr, ValidationError - -from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration -from semantic_kernel.connectors.ai.function_calling_utils import ( - prepare_settings_for_function_calling, -) -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType -from semantic_kernel.connectors.ai.open_ai.services.open_ai_config_base import OpenAIConfigBase -from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler -from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import OpenAIModelTypes -from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.connectors.ai.realtime_client_base import RealtimeClientBase -from semantic_kernel.contents.audio_content import AudioContent -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.realtime_events import ( - RealtimeAudioEvent, - RealtimeEvent, - RealtimeEvents, - RealtimeFunctionCallEvent, - RealtimeFunctionResultEvent, - RealtimeTextEvent, -) -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.exceptions import ContentException -from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.kernel import Kernel -from semantic_kernel.utils.feature_stage_decorator import experimental - -if TYPE_CHECKING: - from aiortc.mediastreams import MediaStreamTrack - - from semantic_kernel.connectors.ai.function_choice_behavior import ( - FunctionCallChoiceConfiguration, - FunctionChoiceType, - ) - from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings - from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata - -logger: logging.Logger = logging.getLogger(__name__) - -# region constants - - -@experimental -class SendEvents(str, Enum): - """Events that can be sent.""" - - SESSION_UPDATE = "session.update" - INPUT_AUDIO_BUFFER_APPEND = "input_audio_buffer.append" - INPUT_AUDIO_BUFFER_COMMIT = "input_audio_buffer.commit" - INPUT_AUDIO_BUFFER_CLEAR = "input_audio_buffer.clear" - CONVERSATION_ITEM_CREATE = "conversation.item.create" - CONVERSATION_ITEM_TRUNCATE = "conversation.item.truncate" - CONVERSATION_ITEM_DELETE = "conversation.item.delete" - RESPONSE_CREATE = "response.create" - RESPONSE_CANCEL = "response.cancel" - - -@experimental -class ListenEvents(str, Enum): - """Events that can be listened to.""" - - ERROR = "error" - SESSION_CREATED = "session.created" - SESSION_UPDATED = "session.updated" - CONVERSATION_CREATED = "conversation.created" - INPUT_AUDIO_BUFFER_COMMITTED = "input_audio_buffer.committed" - INPUT_AUDIO_BUFFER_CLEARED = "input_audio_buffer.cleared" - INPUT_AUDIO_BUFFER_SPEECH_STARTED = "input_audio_buffer.speech_started" - INPUT_AUDIO_BUFFER_SPEECH_STOPPED = "input_audio_buffer.speech_stopped" - CONVERSATION_ITEM_CREATED = "conversation.item.created" - CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_COMPLETED = "conversation.item.input_audio_transcription.completed" - CONVERSATION_ITEM_INPUT_AUDIO_TRANSCRIPTION_FAILED = "conversation.item.input_audio_transcription.failed" - CONVERSATION_ITEM_TRUNCATED = "conversation.item.truncated" - CONVERSATION_ITEM_DELETED = "conversation.item.deleted" - RESPONSE_CREATED = "response.created" - RESPONSE_DONE = "response.done" # contains usage info -> log - RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" - RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" - RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" - RESPONSE_CONTENT_PART_DONE = "response.content_part.done" - RESPONSE_TEXT_DELTA = "response.text.delta" - RESPONSE_TEXT_DONE = "response.text.done" - RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" - RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" - RESPONSE_AUDIO_DELTA = "response.audio.delta" - RESPONSE_AUDIO_DONE = "response.audio.done" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" - RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" - RATE_LIMITS_UPDATED = "rate_limits.updated" - - -# region utils - - -def update_settings_from_function_call_configuration( - function_choice_configuration: "FunctionCallChoiceConfiguration", - settings: "PromptExecutionSettings", - type: "FunctionChoiceType", -) -> None: - """Update the settings from a FunctionChoiceConfiguration.""" - if ( - function_choice_configuration.available_functions - and hasattr(settings, "tool_choice") - and hasattr(settings, "tools") - ): - settings.tool_choice = type # type: ignore - settings.tools = [ # type: ignore - kernel_function_metadata_to_function_call_format(f) - for f in function_choice_configuration.available_functions - ] - - -def kernel_function_metadata_to_function_call_format( - metadata: "KernelFunctionMetadata", -) -> dict[str, Any]: - """Convert the kernel function metadata to function calling format. - - Function calling in the realtime API, uses a slightly different format than the chat completion API. - See https://platform.openai.com/docs/api-reference/realtime-sessions/create#realtime-sessions-create-tools - for more details. - - TLDR: there is no "function" key, and the function details are at the same level as "type". - """ - return { - "type": "function", - "name": metadata.fully_qualified_name, - "description": metadata.description or "", - "parameters": { - "type": "object", - "properties": { - param.name: param.schema_data for param in metadata.parameters if param.include_in_function_choices - }, - "required": [p.name for p in metadata.parameters if p.is_required and p.include_in_function_choices], - }, - } - - -def _create_openai_realtime_client_event(event_type: SendEvents, **kwargs: Any) -> RealtimeClientEvent: - """Create an OpenAI Realtime client event from a event type and kwargs.""" - match event_type: - case SendEvents.SESSION_UPDATE: - if "session" not in kwargs: - raise ContentException("Session is required for SessionUpdateEvent") - return SessionUpdateEvent( - type=event_type, - session=kwargs.pop("session"), - **kwargs, - ) - case SendEvents.INPUT_AUDIO_BUFFER_APPEND: - if "audio" not in kwargs: - raise ContentException("Audio is required for InputAudioBufferAppendEvent") - return InputAudioBufferAppendEvent( - type=event_type, - **kwargs, - ) - case SendEvents.INPUT_AUDIO_BUFFER_COMMIT: - return InputAudioBufferCommitEvent( - type=event_type, - **kwargs, - ) - case SendEvents.INPUT_AUDIO_BUFFER_CLEAR: - return InputAudioBufferClearEvent( - type=event_type, - **kwargs, - ) - case SendEvents.CONVERSATION_ITEM_CREATE: - if "item" not in kwargs: - raise ContentException("Item is required for ConversationItemCreateEvent") - kwargs["type"] = event_type - return ConversationItemCreateEvent(**kwargs) - case SendEvents.CONVERSATION_ITEM_TRUNCATE: - if "content_index" not in kwargs: - kwargs["content_index"] = 0 - return ConversationItemTruncateEvent( - type=event_type, - **kwargs, - ) - case SendEvents.CONVERSATION_ITEM_DELETE: - if "item_id" not in kwargs: - raise ContentException("Item ID is required for ConversationItemDeleteEvent") - return ConversationItemDeleteEvent( - type=event_type, - **kwargs, - ) - case SendEvents.RESPONSE_CREATE: - if "response" in kwargs: - response: Response | None = Response.model_validate(kwargs.pop("response")) - else: - response = None - return ResponseCreateEvent( - type=event_type, - response=response, - **kwargs, - ) - case SendEvents.RESPONSE_CANCEL: - return ResponseCancelEvent( - type=event_type, - **kwargs, - ) - - -# region Base - - -@experimental -class OpenAIRealtimeBase(OpenAIHandler, RealtimeClientBase): - """OpenAI Realtime service.""" - - SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True - kernel: Kernel | None = None - - _current_settings: PromptExecutionSettings | None = PrivateAttr(default=None) - _call_id_to_function_map: dict[str, str] = PrivateAttr(default_factory=dict) - - async def _parse_event(self, event: RealtimeServerEvent) -> AsyncGenerator[RealtimeEvents, None]: - """Handle all events but audio delta. - - Audio delta has to be handled by the implementation of the protocol as some - protocols have different ways of handling audio. - - We put all event in the output buffer, but after the interpreted one. - so when dealing with them, make sure to check the type of the event, since they - might be of different types. - """ - match event.type: - case ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DELTA.value: - yield RealtimeTextEvent( - service_type=event.type, - service_event=event, - text=StreamingTextContent( - inner_content=event, - text=event.delta, # type: ignore - choice_index=0, - ), - ) - case ListenEvents.RESPONSE_OUTPUT_ITEM_ADDED.value: - if event.item.type == "function_call" and event.item.call_id and event.item.name: # type: ignore - self._call_id_to_function_map[event.item.call_id] = event.item.name # type: ignore - yield RealtimeEvent(service_type=event.type, service_event=event) - case ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA.value: - yield RealtimeFunctionCallEvent( - service_type=event.type, - service_event=event, - function_call=FunctionCallContent( - id=event.item_id, # type: ignore - name=self._call_id_to_function_map[event.call_id], # type: ignore - arguments=event.delta, # type: ignore - index=event.output_index, # type: ignore - metadata={"call_id": event.call_id}, # type: ignore - inner_content=event, - ), - ) - case ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE.value: - async for parsed_event in self._parse_function_call_arguments_done(event): # type: ignore - if parsed_event: - yield parsed_event - case ListenEvents.ERROR.value: - logger.error("Error received: %s", event.error.model_dump_json()) # type: ignore - yield RealtimeEvent(service_type=event.type, service_event=event) - case ListenEvents.SESSION_CREATED.value | ListenEvents.SESSION_UPDATED.value: - logger.info("Session created or updated, session: %s", event.session.model_dump_json()) # type: ignore - yield RealtimeEvent(service_type=event.type, service_event=event) - case _: - logger.debug(f"Received event: {event}") - yield RealtimeEvent(service_type=event.type, service_event=event) - - @override - async def update_session( - self, - chat_history: ChatHistory | None = None, - settings: PromptExecutionSettings | None = None, - create_response: bool = False, - **kwargs: Any, - ) -> None: - """Update the session in the service. - - Args: - chat_history: Chat history. - settings: Prompt execution settings, if kernel is linked to the service or passed as - Kwargs, it will be used to update the settings for function calling. - create_response: Create a response, get the model to start responding, default is False. - kwargs: Additional arguments, if 'kernel' is passed, it will be used to update the - settings for function calling, others will be ignored. - - """ - if kwargs: - if self._create_kwargs: - kwargs = {**self._create_kwargs, **kwargs} - else: - kwargs = self._create_kwargs or {} - if settings: - self._current_settings = settings - if "kernel" in kwargs: - self.kernel = kwargs["kernel"] - - if self._current_settings: - if self.kernel: - self._current_settings = prepare_settings_for_function_calling( - self._current_settings, - self.get_prompt_execution_settings_class(), - self._update_function_choice_settings_callback(), - kernel=self.kernel, # type: ignore - ) - await self.send( - RealtimeEvent( - service_type=SendEvents.SESSION_UPDATE, - service_event={"settings": self._current_settings}, - ) - ) - - if chat_history and len(chat_history) > 0: - for msg in chat_history.messages: - for item in msg.items: - match item: - case TextContent(): - await self.send( - RealtimeTextEvent(service_type=SendEvents.CONVERSATION_ITEM_CREATE, text=item) - ) - case FunctionCallContent(): - await self.send( - RealtimeFunctionCallEvent( - service_type=SendEvents.CONVERSATION_ITEM_CREATE, function_call=item - ) - ) - case FunctionResultContent(): - await self.send( - RealtimeFunctionResultEvent( - service_type=SendEvents.CONVERSATION_ITEM_CREATE, function_result=item - ) - ) - case _: - logger.error("Unsupported item type: %s", item) - - if create_response or kwargs.get("create_response", False) is True: - await self.send(RealtimeEvent(service_type=SendEvents.RESPONSE_CREATE)) - - async def _parse_function_call_arguments_done( - self, - event: ResponseFunctionCallArgumentsDoneEvent, - ) -> AsyncGenerator[RealtimeEvents | None]: - """Handle response function call done. - - This always yields at least 1 event, either a RealtimeEvent or a RealtimeFunctionResultEvent with the raw event. - - It then also yields any function results both back to the service, through `send` and to the developer. - - """ - # Step 1: check if function calling enabled: - if not self.kernel or ( - self._current_settings - and self._current_settings.function_choice_behavior - and not self._current_settings.function_choice_behavior.auto_invoke_kernel_functions - ): - yield RealtimeEvent(service_type=event.type, service_event=event) - return - # Step 2: check if there is a function that can be found. - try: - plugin_name, function_name = self._call_id_to_function_map.pop(event.call_id, "-").split("-", 1) - except ValueError: - logger.error("Function call needs to have a plugin name and function name") - yield RealtimeEvent(service_type=event.type, service_event=event) - return - - # Step 3: Parse into the function call content, and yield that. - item = FunctionCallContent( - id=event.item_id, - plugin_name=plugin_name, - function_name=function_name, - arguments=event.arguments, - index=event.output_index, - metadata={"call_id": event.call_id}, - ) - yield RealtimeFunctionCallEvent( - service_type=ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE, function_call=item, service_event=event - ) - - # Step 4: Invoke the function call - chat_history = ChatHistory() - await self.kernel.invoke_function_call(item, chat_history) - created_output: FunctionResultContent = chat_history.messages[-1].items[0] # type: ignore - # Step 5: Create the function result event - result = RealtimeFunctionResultEvent( - service_type=SendEvents.CONVERSATION_ITEM_CREATE, - function_result=created_output, - ) - # Step 6: send the result to the service and call `create response` - await self.send(result) - await self.send(RealtimeEvent(service_type=SendEvents.RESPONSE_CREATE)) - # Step 7: yield the function result back to the developer as well - yield result - - async def _send(self, event: RealtimeClientEvent) -> None: - """Send an event to the service.""" - raise NotImplementedError - - @override - async def send(self, event: RealtimeEvents, **kwargs: Any) -> None: - match event: - case RealtimeAudioEvent(): - await self._send( - _create_openai_realtime_client_event( - event_type=SendEvents.INPUT_AUDIO_BUFFER_APPEND, audio=event.audio.data_string - ) - ) - case RealtimeTextEvent(): - await self._send( - _create_openai_realtime_client_event( - event_type=SendEvents.CONVERSATION_ITEM_CREATE, - item={ - "type": "message", - "content": [ - { - "type": "input_text", - "text": event.text.text, - } - ], - "role": "user", - }, - ) - ) - case RealtimeFunctionCallEvent(): - await self._send( - _create_openai_realtime_client_event( - event_type=SendEvents.CONVERSATION_ITEM_CREATE, - item={ - "type": "function_call", - "name": event.function_call.name or event.function_call.function_name, - "arguments": "" - if not event.function_call.arguments - else event.function_call.arguments - if isinstance(event.function_call.arguments, str) - else json.dumps(event.function_call.arguments), - "call_id": event.function_call.metadata.get("call_id"), - }, - ) - ) - case RealtimeFunctionResultEvent(): - await self._send( - _create_openai_realtime_client_event( - event_type=SendEvents.CONVERSATION_ITEM_CREATE, - item={ - "type": "function_call_output", - "output": event.function_result.result, - "call_id": event.function_result.metadata.get("call_id"), - }, - ) - ) - case _: - data = event.service_event - match event.service_type: - case SendEvents.SESSION_UPDATE: - if not data: - logger.error("Event data is empty") - return - settings = data.get("settings", None) - if not settings: - logger.error("Event data does not contain 'settings'") - return - try: - settings = self.get_prompt_execution_settings_from_settings(settings) - except Exception as e: - logger.error( - f"Failed to properly create settings from passed settings: {settings}, error: {e}" - ) - return - assert isinstance(settings, self.get_prompt_execution_settings_class()) # nosec - if not settings.ai_model_id: # type: ignore - settings.ai_model_id = self.ai_model_id # type: ignore - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - session=settings.prepare_settings_dict(), - ) - ) - case SendEvents.INPUT_AUDIO_BUFFER_APPEND: - if not data or "audio" not in data: - logger.error("Event data does not contain 'audio'") - return - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - audio=data["audio"], - ) - ) - case SendEvents.INPUT_AUDIO_BUFFER_COMMIT: - await self._send(_create_openai_realtime_client_event(event_type=event.service_type)) - case SendEvents.INPUT_AUDIO_BUFFER_CLEAR: - await self._send(_create_openai_realtime_client_event(event_type=event.service_type)) - case SendEvents.CONVERSATION_ITEM_CREATE: - if not data or "item" not in data: - logger.error("Event data does not contain 'item'") - return - content = data["item"] - contents = content.items if isinstance(content, ChatMessageContent) else [content] - for item in contents: - match item: - case TextContent(): - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - item={ - "type": "message", - "content": [ - { - "type": "input_text", - "text": item.text, - } - ], - "role": "user", - }, - ) - ) - case FunctionCallContent(): - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - item={ - "type": "function_call", - "name": item.name or item.function_name, - "arguments": "" - if not item.arguments - else item.arguments - if isinstance(item.arguments, str) - else json.dumps(item.arguments), - "call_id": item.metadata.get("call_id"), - }, - ) - ) - - case FunctionResultContent(): - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - item={ - "type": "function_call_output", - "output": item.result, - "call_id": item.metadata.get("call_id"), - }, - ) - ) - case SendEvents.CONVERSATION_ITEM_TRUNCATE: - if not data or "item_id" not in data: - logger.error("Event data does not contain 'item_id'") - return - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - item_id=data["item_id"], - content_index=0, - audio_end_ms=data.get("audio_end_ms", 0), - ) - ) - case SendEvents.CONVERSATION_ITEM_DELETE: - if not data or "item_id" not in data: - logger.error("Event data does not contain 'item_id'") - return - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - item_id=data["item_id"], - ) - ) - case SendEvents.RESPONSE_CREATE: - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, event_id=data.get("event_id", None) if data else None - ) - ) - case SendEvents.RESPONSE_CANCEL: - await self._send( - _create_openai_realtime_client_event( - event_type=event.service_type, - response_id=data.get("response_id", None) if data else None, - ) - ) - - @override - def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: - from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( # noqa - OpenAIRealtimeExecutionSettings, - ) - - return OpenAIRealtimeExecutionSettings - - @override - def _update_function_choice_settings_callback( - self, - ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: - return update_settings_from_function_call_configuration - - -# region WebRTC -@experimental -class OpenAIRealtimeWebRTCBase(OpenAIRealtimeBase): - """OpenAI WebRTC Realtime service.""" - - peer_connection: RTCPeerConnection | None = None - data_channel: RTCDataChannel | None = None - audio_track: MediaStreamTrack | None = None - _receive_buffer: asyncio.Queue[RealtimeEvents] = PrivateAttr(default_factory=asyncio.Queue) - - @override - async def receive( - self, - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - **kwargs: Any, - ) -> AsyncGenerator[RealtimeEvents, None]: - if audio_output_callback: - self.audio_output_callback = audio_output_callback - while True: - event = await self._receive_buffer.get() - yield event - - async def _send(self, event: RealtimeClientEvent) -> None: - if not self.data_channel: - logger.error("Data channel not initialized") - return - while self.data_channel.readyState != "open": - await asyncio.sleep(0.1) - try: - self.data_channel.send(event.model_dump_json(exclude_none=True)) - except Exception as e: - logger.error(f"Failed to send event {event} with error: {e!s}") - - @override - async def create_session( - self, - chat_history: "ChatHistory | None" = None, - settings: "PromptExecutionSettings | None" = None, - **kwargs: Any, - ) -> None: - """Create a session in the service.""" - if not self.audio_track: - raise Exception("Audio track not initialized") - self.peer_connection = RTCPeerConnection( - configuration=RTCConfiguration(iceServers=[RTCIceServer(urls="stun:stun.l.google.com:19302")]) - ) - - # track is the audio track being returned from the service - self.peer_connection.add_listener("track", self._on_track) - - # data channel is used to send and receive messages - self.data_channel = self.peer_connection.createDataChannel("oai-events", protocol="json") - self.data_channel.add_listener("message", self._on_data) - - # this is the incoming audio, which sends audio to the service - self.peer_connection.addTransceiver(self.audio_track) - - offer = await self.peer_connection.createOffer() - await self.peer_connection.setLocalDescription(offer) - - try: - ephemeral_token = await self._get_ephemeral_token() - headers = {"Authorization": f"Bearer {ephemeral_token}", "Content-Type": "application/sdp"} - - async with ( - ClientSession() as session, - session.post( - f"{self.client.beta.realtime._client.base_url}realtime?model={self.ai_model_id}", - headers=headers, - data=offer.sdp, - ) as response, - ): - if response.status not in [200, 201]: - error_text = await response.text() - raise Exception(f"OpenAI WebRTC error: {error_text}") - - sdp_answer = await response.text() - answer = RTCSessionDescription(sdp=sdp_answer, type="answer") - await self.peer_connection.setRemoteDescription(answer) - logger.info("Connected to OpenAI WebRTC") - - except Exception as e: - logger.error(f"Failed to connect to OpenAI: {e!s}") - raise - - if settings or chat_history or kwargs: - await self.update_session(settings=settings, chat_history=chat_history, **kwargs) - - @override - async def close_session(self) -> None: - """Close the session in the service.""" - if self.peer_connection: - with contextlib.suppress(asyncio.CancelledError): - await self.peer_connection.close() - self.peer_connection = None - if self.data_channel: - with contextlib.suppress(asyncio.CancelledError): - self.data_channel.close() - self.data_channel = None - - async def _on_track(self, track: "MediaStreamTrack") -> None: - logger.debug(f"Received {track.kind} track from remote") - if track.kind != "audio": - return - while True: - try: - # This is a MediaStreamTrack, so the type is AudioFrame - # this might need to be updated if video becomes part of this - frame: AudioFrame = await track.recv() # type: ignore - except asyncio.CancelledError: - break - except Exception as e: - logger.error(f"Error getting audio frame: {e!s}") - break - - try: - if self.audio_output_callback: - await self.audio_output_callback(frame.to_ndarray()) - - except Exception as e: - logger.error(f"Error playing remote audio frame: {e!s}") - try: - await self._receive_buffer.put( - RealtimeAudioEvent( - audio=AudioContent(data=frame.to_ndarray(), data_format="np.int16", inner_content=frame), - service_event=frame, - service_type=ListenEvents.RESPONSE_AUDIO_DELTA, - ), - ) - except Exception as e: - logger.error(f"Error processing remote audio frame: {e!s}") - await asyncio.sleep(0.01) - - async def _on_data(self, data: str) -> None: - """This method is called whenever a data channel message is received. - - The data is parsed into a RealtimeServerEvent (by OpenAI code) and then processed. - Audio data is not send through this channel, use _on_track for that. - """ - try: - event = cast( - RealtimeServerEvent, - construct_type_unchecked(value=json.loads(data), type_=cast(Any, RealtimeServerEvent)), - ) - except Exception as e: - logger.error(f"Failed to parse event {data} with error: {e!s}") - return - async for parsed_event in self._parse_event(event): - await self._receive_buffer.put(parsed_event) - - async def _get_ephemeral_token(self) -> str: - """Get an ephemeral token from OpenAI.""" - headers = {"Authorization": f"Bearer {self.client.api_key}", "Content-Type": "application/json"} - data = {"model": self.ai_model_id, "voice": "echo"} - - try: - async with ( - ClientSession() as session, - session.post( - f"{self.client.beta.realtime._client.base_url}/realtime/sessions", headers=headers, json=data - ) as response, - ): - if response.status not in [200, 201]: - error_text = await response.text() - raise Exception(f"Failed to get ephemeral token: {error_text}") - - result = await response.json() - return result["client_secret"]["value"] - - except Exception as e: - logger.error(f"Failed to get ephemeral token: {e!s}") - raise - - -@experimental -class OpenAIRealtimeWebRTC(OpenAIRealtimeWebRTCBase, OpenAIConfigBase): - """OpenAI Realtime service using WebRTC protocol.""" - - def __init__( - self, - audio_track: "MediaStreamTrack", - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - ai_model_id: str | None = None, - api_key: str | None = None, - org_id: str | None = None, - service_id: str | None = None, - default_headers: Mapping[str, str] | None = None, - client: AsyncOpenAI | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initialize an OpenAIRealtime service. - - Args: - audio_output_callback: The audio output callback, optional. - This should be a coroutine, that takes a ndarray with audio as input. - The goal of this function is to allow you to play the audio with the - least amount of latency possible, because it is called first before further processing. - It can also be set in the `receive` method. - Even when passed, the audio content will still be - added to the receiving queue. - audio_track: The audio track to use for the service, only used by WebRTC. - A default is supplied if not provided. - It can be any class that implements the AudioStreamTrack interface. - ai_model_id (str | None): OpenAI model name, see - https://platform.openai.com/docs/models - service_id (str | None): Service ID tied to the execution settings. - api_key (str | None): The optional API key to use. If provided will override, - the env vars or .env file value. - org_id (str | None): The optional org ID to use. If provided will override, - the env vars or .env file value. - default_headers: The default headers mapping of string keys to - string values for HTTP requests. (Optional) - client (Optional[AsyncOpenAI]): An existing client to use. (Optional) - env_file_path (str | None): Use the environment settings file as a fallback to - environment variables. (Optional) - env_file_encoding (str | None): The encoding of the environment settings file. (Optional) - kwargs: Additional arguments. - """ - try: - openai_settings = OpenAISettings.create( - api_key=api_key, - org_id=org_id, - realtime_model_id=ai_model_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex - if not openai_settings.realtime_model_id: - raise ServiceInitializationError("The OpenAI realtime model ID is required.") - if audio_track: - kwargs["audio_track"] = audio_track - super().__init__( - audio_output_callback=audio_output_callback, - ai_model_id=openai_settings.realtime_model_id, - service_id=service_id, - api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, - org_id=openai_settings.org_id, - ai_model_type=OpenAIModelTypes.REALTIME, - default_headers=default_headers, - client=client, - **kwargs, - ) - - -# region Websocket - - -@experimental -class OpenAIRealtimeWebsocketBase(OpenAIRealtimeBase): - """OpenAI Realtime service.""" - - protocol: ClassVar[Literal["websocket"]] = "websocket" # type: ignore - connection: AsyncRealtimeConnection | None = None - connected: asyncio.Event = Field(default_factory=asyncio.Event) - - @override - async def receive( - self, - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - **kwargs: Any, - ) -> AsyncGenerator[RealtimeEvents, None]: - if audio_output_callback: - self.audio_output_callback = audio_output_callback - await self.connected.wait() - if not self.connection: - raise ValueError("Connection is not established.") - - async for event in self.connection: - if event.type == ListenEvents.RESPONSE_AUDIO_DELTA.value: - if self.audio_output_callback: - await self.audio_output_callback(np.frombuffer(base64.b64decode(event.delta), dtype=np.int16)) - yield RealtimeAudioEvent( - audio=AudioContent(data=event.delta, data_format="base64", inner_content=event), - service_type=event.type, - service_event=event, - ) - continue - async for realtime_event in self._parse_event(event): - yield realtime_event - - async def _send(self, event: RealtimeClientEvent) -> None: - await self.connected.wait() - if not self.connection: - raise ValueError("Connection is not established.") - try: - await self.connection.send(event) - except Exception as e: - logger.error(f"Error sending response: {e!s}") - - @override - async def create_session( - self, - chat_history: "ChatHistory | None" = None, - settings: "PromptExecutionSettings | None" = None, - **kwargs: Any, - ) -> None: - """Create a session in the service.""" - self.connection = await self.client.beta.realtime.connect(model=self.ai_model_id).enter() - self.connected.set() - if settings or chat_history or kwargs: - await self.update_session(settings=settings, chat_history=chat_history, **kwargs) - - @override - async def close_session(self) -> None: - """Close the session in the service.""" - if self.connected.is_set() and self.connection: - await self.connection.close() - self.connection = None - self.connected.clear() - - -@experimental -class OpenAIRealtimeWebsocket(OpenAIRealtimeWebsocketBase, OpenAIConfigBase): - """OpenAI Realtime service using WebSocket protocol.""" - - def __init__( - self, - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - ai_model_id: str | None = None, - api_key: str | None = None, - org_id: str | None = None, - service_id: str | None = None, - default_headers: Mapping[str, str] | None = None, - client: AsyncOpenAI | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initialize an OpenAIRealtime service. - - Args: - audio_output_callback: The audio output callback, optional. - This should be a coroutine, that takes a ndarray with audio as input. - The goal of this function is to allow you to play the audio with the - least amount of latency possible, because it is called first before further processing. - It can also be set in the `receive` method. - Even when passed, the audio content will still be - added to the receiving queue. - ai_model_id (str | None): OpenAI model name, see - https://platform.openai.com/docs/models - service_id (str | None): Service ID tied to the execution settings. - api_key (str | None): The optional API key to use. If provided will override, - the env vars or .env file value. - org_id (str | None): The optional org ID to use. If provided will override, - the env vars or .env file value. - default_headers: The default headers mapping of string keys to - string values for HTTP requests. (Optional) - client (Optional[AsyncOpenAI]): An existing client to use. (Optional) - env_file_path (str | None): Use the environment settings file as a fallback to - environment variables. (Optional) - env_file_encoding (str | None): The encoding of the environment settings file. (Optional) - kwargs: Additional arguments. - """ - try: - openai_settings = OpenAISettings.create( - api_key=api_key, - org_id=org_id, - realtime_model_id=ai_model_id, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise ServiceInitializationError("Failed to create OpenAI settings.", ex) from ex - if not openai_settings.realtime_model_id: - raise ServiceInitializationError("The OpenAI realtime model ID is required.") - super().__init__( - audio_output_callback=audio_output_callback, - ai_model_id=openai_settings.realtime_model_id, - service_id=service_id, - api_key=openai_settings.api_key.get_secret_value() if openai_settings.api_key else None, - org_id=openai_settings.org_id, - ai_model_type=OpenAIModelTypes.REALTIME, - default_headers=default_headers, - client=client, - **kwargs, - ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py index 0a0f26440923..8459780b3f5a 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py @@ -12,14 +12,14 @@ from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import OpenAITextEmbeddingBase from semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings import OpenAISettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) T_ = TypeVar("T_", bound="OpenAITextEmbedding") -@experimental +@experimental_class class OpenAITextEmbedding(OpenAIConfigBase, OpenAITextEmbeddingBase): """OpenAI Text Embedding class.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py index 364d6822d819..9c686335255b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py @@ -15,13 +15,13 @@ OpenAIEmbeddingPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -@experimental +@experimental_class class OpenAITextEmbeddingBase(OpenAIHandler, EmbeddingGeneratorBase): """Base class for OpenAI text embedding services.""" diff --git a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py index 47ebc4c2b7b7..8603714804cf 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py @@ -3,7 +3,6 @@ from typing import ClassVar from pydantic import SecretStr -from pydantic_core import Url from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError @@ -56,12 +55,6 @@ class AzureOpenAISettings(KernelBaseSettings): Resource Management > Deployments in the Azure portal or, alternatively, under Management > Deployments in Azure OpenAI Studio. (Env var AZURE_OPENAI_TEXT_TO_AUDIO_DEPLOYMENT_NAME) - - realtime_deployment_name: str - The name of the Azure Realtime deployment. This value - will correspond to the custom name you chose for your deployment - when you deployed a model. This value can be found under - Resource Management > Deployments in the Azure portal or, alternatively, - under Management > Deployments in Azure OpenAI Studio. - (Env var AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME) - api_key: SecretStr - The API key for the Azure deployment. This value can be found in the Keys & Endpoint section when examining your resource in the Azure portal. You can use either KEY1 or KEY2. @@ -80,7 +73,7 @@ class AzureOpenAISettings(KernelBaseSettings): - api_version: str | None - The API version to use. The default value is "2024-02-01". (Env var AZURE_OPENAI_API_VERSION) - token_endpoint: str - The token endpoint to use to retrieve the authentication token. - The default value is "https://cognitiveservices.azure.com/.default". + The default value is "https://cognitiveservices.azure.com". (Env var AZURE_OPENAI_TOKEN_ENDPOINT) """ @@ -92,12 +85,11 @@ class AzureOpenAISettings(KernelBaseSettings): text_to_image_deployment_name: str | None = None audio_to_text_deployment_name: str | None = None text_to_audio_deployment_name: str | None = None - realtime_deployment_name: str | None = None endpoint: HttpsUrl | None = None - base_url: Url | None = None + base_url: HttpsUrl | None = None api_key: SecretStr | None = None api_version: str = DEFAULT_AZURE_API_VERSION - token_endpoint: str = "https://cognitiveservices.azure.com/.default" + token_endpoint: str = "https://cognitiveservices.azure.com" def get_azure_openai_auth_token(self, token_endpoint: str | None = None) -> str | None: """Retrieve a Microsoft Entra Auth Token for a given token endpoint for the use with Azure OpenAI. @@ -108,7 +100,7 @@ def get_azure_openai_auth_token(self, token_endpoint: str | None = None) -> str The `token_endpoint` argument takes precedence over the `token_endpoint` attribute. Args: - token_endpoint: The token endpoint to use. Defaults to `https://cognitiveservices.azure.com/.default`. + token_endpoint: The token endpoint to use. Defaults to `https://cognitiveservices.azure.com`. Returns: The Azure token or None if the token could not be retrieved. diff --git a/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py b/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py index 7276af4b1f3b..6423a5385a33 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/settings/open_ai_settings.py @@ -32,9 +32,6 @@ class OpenAISettings(KernelBaseSettings): (Env var OPENAI_AUDIO_TO_TEXT_MODEL_ID) - text_to_audio_model_id: str | None - The OpenAI text to audio model ID to use, for example, jukebox-1. (Env var OPENAI_TEXT_TO_AUDIO_MODEL_ID) - - realtime_model_id: str | None - The OpenAI realtime model ID to use, - for example, gpt-4o-realtime-preview-2024-12-17. - (Env var OPENAI_REALTIME_MODEL_ID) - env_file_path: str | None - if provided, the .env settings are read from this file path location """ @@ -48,4 +45,3 @@ class OpenAISettings(KernelBaseSettings): text_to_image_model_id: str | None = None audio_to_text_model_id: str | None = None text_to_audio_model_id: str | None = None - realtime_model_id: str | None = None diff --git a/python/semantic_kernel/connectors/ai/realtime_client_base.py b/python/semantic_kernel/connectors/ai/realtime_client_base.py deleted file mode 100644 index 3992d116a4f7..000000000000 --- a/python/semantic_kernel/connectors/ai/realtime_client_base.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator, Callable, Coroutine -from typing import Any, ClassVar - -if sys.version_info >= (3, 11): - from typing import Self # pragma: no cover -else: - from typing_extensions import Self # pragma: no cover - -from numpy import ndarray -from pydantic import PrivateAttr - -from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.realtime_events import RealtimeEvents -from semantic_kernel.services.ai_service_client_base import AIServiceClientBase -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class RealtimeClientBase(AIServiceClientBase, ABC): - """Base class for a realtime client.""" - - SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = False - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None - _chat_history: ChatHistory | None = PrivateAttr(default=None) - _settings: PromptExecutionSettings | None = PrivateAttr(default=None) - _create_kwargs: dict[str, Any] | None = PrivateAttr(default=None) - - @abstractmethod - async def send(self, event: RealtimeEvents) -> None: - """Send an event to the service. - - Args: - event: The event to send. - kwargs: Additional arguments. - """ - raise NotImplementedError - - @abstractmethod - def receive( - self, - audio_output_callback: Callable[[ndarray], Coroutine[Any, Any, None]] | None = None, - **kwargs: Any, - ) -> AsyncGenerator[RealtimeEvents, None]: - """Starts listening for messages from the service, generates events. - - Args: - audio_output_callback: The audio output callback, optional. - This should be a coroutine, that takes a ndarray with audio as input. - The goal of this function is to allow you to play the audio with the - least amount of latency possible. - It is called first in both websockets and webrtc. - Even when passed, the audio content will still be - added to the receiving queue. - This can also be set in the constructor. - When supplied here it will override any value in the class. - kwargs: Additional arguments. - """ - raise NotImplementedError - - @abstractmethod - async def create_session( - self, - chat_history: "ChatHistory | None" = None, - settings: "PromptExecutionSettings | None" = None, - **kwargs: Any, - ) -> None: - """Create a session in the service. - - Args: - settings: Prompt execution settings. - chat_history: Chat history. - kwargs: Additional arguments. - """ - raise NotImplementedError - - @abstractmethod - async def update_session( - self, - chat_history: "ChatHistory | None" = None, - settings: "PromptExecutionSettings | None" = None, - **kwargs: Any, - ) -> None: - """Update a session in the service. - - Can be used when using the context manager instead of calling create_session with these same arguments. - - Args: - settings: Prompt execution settings. - chat_history: Chat history. - kwargs: Additional arguments. - """ - raise NotImplementedError - - @abstractmethod - async def close_session(self) -> None: - """Close the session in the service.""" - pass - - def _update_function_choice_settings_callback( - self, - ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: - """Return the callback function to update the settings from a function call configuration. - - Override this method to provide a custom callback function to - update the settings from a function call configuration. - """ - return lambda configuration, settings, choice_type: None - - async def __aenter__(self) -> "Self": - """Enter the context manager. - - Default implementation calls the create session method. - """ - await self.create_session(self._chat_history, self._settings) - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: - """Exit the context manager.""" - await self.close_session() - - def __call__( - self, - chat_history: "ChatHistory | None" = None, - settings: "PromptExecutionSettings | None" = None, - **kwargs: Any, - ) -> Self: - """Call the service and set the chat history and settings. - - Args: - chat_history: Chat history. - settings: Prompt execution settings. - kwargs: Additional arguments, can include `kernel` or specific settings for the service. - Check the update_session method for the specific service for more details. - """ - self._chat_history = chat_history - self._settings = settings - self._create_kwargs = kwargs - return self diff --git a/python/semantic_kernel/connectors/memory/astradb/astra_client.py b/python/semantic_kernel/connectors/memory/astradb/astra_client.py index 739d8b3cd0d5..83dcd3b3ce9d 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astra_client.py +++ b/python/semantic_kernel/connectors/memory/astradb/astra_client.py @@ -6,7 +6,7 @@ from semantic_kernel.connectors.memory.astradb.utils import AsyncSession from semantic_kernel.exceptions import ServiceResponseException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO ASTRA_CALLER_IDENTITY: str @@ -14,7 +14,7 @@ ASTRA_CALLER_IDENTITY = f"semantic-kernel/{SEMANTIC_KERNEL_VERSION}" if SEMANTIC_KERNEL_VERSION else "semantic-kernel" -@experimental +@experimental_class class AstraClient: """AstraClient.""" diff --git a/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py index 5b6908377cf9..e640bb663903 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py +++ b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py @@ -13,7 +13,7 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class MAX_DIMENSIONALITY = 20000 MAX_UPSERT_BATCH_SIZE = 100 @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AstraDBMemoryStore(MemoryStoreBase): """A memory store that uses Astra database as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py b/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py index 637dc2589d1c..e3d190187f4c 100644 --- a/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py +++ b/python/semantic_kernel/connectors/memory/astradb/astradb_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AstraDBSettings(KernelBaseSettings): """AstraDB model settings. diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py index dc32336c8004..d103997db200 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_collection.py @@ -39,14 +39,14 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental +@experimental_class class AzureAISearchCollection( VectorSearchBase[str, TModel], VectorizableTextSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py index 6c029c4d84ba..99fc5620d289 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureAISearchSettings(KernelBaseSettings): """Azure AI Search model settings currently used by the AzureCognitiveSearchMemoryStore connector. diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py index 4df7925a8e6a..4c4693abb6d7 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/azure_ai_search_store.py @@ -20,7 +20,7 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from azure.core.credentials import AzureKeyCredential, TokenCredential @@ -34,7 +34,7 @@ TModel = TypeVar("TModel") -@experimental +@experimental_class class AzureAISearchStore(VectorStore): """Azure AI Search store implementation.""" diff --git a/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py b/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py index 16ac55a0a79e..6acd20cb18e1 100644 --- a/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py +++ b/python/semantic_kernel/connectors/memory/azure_ai_search/utils.py @@ -31,7 +31,7 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import ServiceInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: @@ -77,7 +77,7 @@ def get_search_index_client( ) -@experimental +@experimental_function def data_model_definition_to_azure_ai_search_index( collection_name: str, definition: VectorStoreRecordDefinition, diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py index 9caeec864898..c4c066407d92 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureAISearchSettings(KernelBaseSettings): """Azure AI Search model settings currently used by the AzureCognitiveSearchMemoryStore connector. diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py index f9c04d1dcd18..bc201777ec38 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py @@ -33,12 +33,12 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError, MemoryConnectorResourceNotFound from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AzureCognitiveSearchMemoryStore(MemoryStoreBase): """Azure Cognitive Search Memory Store.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py index b55443addcd4..d6979ec34c38 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/__init__.py @@ -1,12 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_collection import ( - AzureCosmosDBforMongoDBCollection, -) -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( - AzureCosmosDBforMongoDBSettings, -) -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_store import AzureCosmosDBforMongoDBStore from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_collection import ( AzureCosmosDBNoSQLCollection, ) @@ -21,7 +14,4 @@ "AzureCosmosDBNoSQLCompositeKey", "AzureCosmosDBNoSQLSettings", "AzureCosmosDBNoSQLStore", - "AzureCosmosDBforMongoDBCollection", - "AzureCosmosDBforMongoDBSettings", - "AzureCosmosDBforMongoDBStore", ] diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py deleted file mode 100644 index 89bfd0e1f9f9..000000000000 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_collection.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import AsyncIterable -from importlib import metadata -from typing import Any, TypeVar - -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from pydantic import ValidationError -from pymongo import AsyncMongoClient -from pymongo.driver_info import DriverInfo - -from semantic_kernel.connectors.memory.azure_cosmos_db.const import ( - DISTANCE_FUNCTION_MAPPING_MONGODB, - INDEX_KIND_MAPPING_MONGODB, -) -from semantic_kernel.connectors.memory.mongodb_atlas.const import ( - DEFAULT_DB_NAME, - MONGODB_SCORE_FIELD, -) -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection -from semantic_kernel.data.kernel_search_results import KernelSearchResults -from semantic_kernel.data.record_definition import VectorStoreRecordDefinition -from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions -from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult -from semantic_kernel.exceptions import ( - VectorStoreInitializationException, -) -from semantic_kernel.exceptions.vector_store_exceptions import ( - VectorSearchExecutionException, - VectorStoreModelDeserializationException, -) -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger: logging.Logger = logging.getLogger(__name__) - -TModel = TypeVar("TModel") - - -@experimental -class AzureCosmosDBforMongoDBCollection(MongoDBAtlasCollection): - """Azure Cosmos DB for MongoDB collection.""" - - def __init__( - self, - collection_name: str, - data_model_type: type[TModel], - data_model_definition: VectorStoreRecordDefinition | None = None, - mongo_client: AsyncMongoClient | None = None, - connection_string: str | None = None, - database_name: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initializes a new instance of the AzureCosmosDBforMongoDBCollection class. - - Args: - data_model_type: The type of the data model. - data_model_definition: The model definition, optional. - collection_name: The name of the collection, optional. - mongo_client: The MongoDB client for interacting with Azure CosmosDB for MongoDB, - used for creating and deleting collections. - connection_string: The connection string for MongoDB Atlas, optional. - Can be read from environment variables. - database_name: The name of the database, will be filled from the env when this is not set. - connection_string: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None - **kwargs: Additional keyword arguments - - """ - managed_client = not mongo_client - if mongo_client: - super().__init__( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - mongo_client=mongo_client, - collection_name=collection_name, - database_name=database_name or DEFAULT_DB_NAME, - managed_client=managed_client, - ) - return - - from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( - AzureCosmosDBforMongoDBSettings, - ) - - try: - settings = AzureCosmosDBforMongoDBSettings.create( - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - connection_string=connection_string, - database_name=database_name, - ) - except ValidationError as exc: - raise VectorStoreInitializationException("Failed to create Azure CosmosDB for MongoDB settings.") from exc - if not settings.connection_string: - raise VectorStoreInitializationException("The Azure CosmosDB for MongoDB connection string is required.") - - mongo_client = AsyncMongoClient( - settings.connection_string.get_secret_value(), - driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), - ) - - super().__init__( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - collection_name=collection_name, - mongo_client=mongo_client, - managed_client=managed_client, - database_name=settings.database_name, - ) - - @override - async def create_collection(self, **kwargs) -> None: - """Create a new collection in Azure CosmosDB for MongoDB. - - This first creates a collection, with the kwargs. - Then creates a search index based on the data model definition. - - By the naming convection of MongoDB indexes are created by using the field name - with a underscore. - - Args: - **kwargs: Additional keyword arguments. - These are the additional keyword arguments for creating - vector indexes in Azure Cosmos DB for MongoDB. - And they depend on the kind of index you are creating. - See https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/vcore/vector-search - for more information. - Other kwargs are passed to the create_collection method. - """ - await self._get_database().create_collection(self.collection_name, **kwargs) - await self._get_database().command(command=self._get_vector_index(**kwargs)) - - def _get_vector_index(self, **kwargs: Any) -> dict[str, Any]: - indexes = [ - {"name": f"{field.name}_", "key": {field.name: 1}} - for field in self.data_model_definition.fields.values() - if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) - ] - for vector_field in self.data_model_definition.vector_fields: - index_name = f"{vector_field.name}_" - - similarity = ( - DISTANCE_FUNCTION_MAPPING_MONGODB.get(vector_field.distance_function) - if vector_field.distance_function - else "COS" - ) - kind = INDEX_KIND_MAPPING_MONGODB.get(vector_field.index_kind) if vector_field.index_kind else "vector-ivf" - if similarity is None: - raise VectorStoreInitializationException(f"Invalid distance function: {vector_field.distance_function}") - if kind is None: - raise VectorStoreInitializationException(f"Invalid index kind: {vector_field.index_kind}") - index: dict[str, Any] = { - "name": index_name, - "key": {vector_field.name: "cosmosSearch"}, - "cosmosSearchOptions": { - "kind": kind, - "similarity": similarity, - "dimensions": vector_field.dimensions, - }, - } - match kind: - case "vector-diskann": - if "maxDegree" in kwargs: - index["cosmosSearchOptions"]["maxDegree"] = kwargs["maxDegree"] - if "lBuild" in kwargs: - index["cosmosSearchOptions"]["lBuild"] = kwargs["lBuild"] - case "vector-hnsw": - if "m" in kwargs: - index["cosmosSearchOptions"]["m"] = kwargs["m"] - if "efConstruction" in kwargs: - index["cosmosSearchOptions"]["efConstruction"] = kwargs["efConstruction"] - case "vector-ivf": - if "numList" in kwargs: - index["cosmosSearchOptions"]["numList"] = kwargs["numList"] - indexes.append(index) - - return {"createIndexes": self.collection_name, "indexes": indexes} - - @override - async def _inner_vectorized_search( - self, - options: VectorSearchOptions, - vector: list[float | int], - **kwargs: Any, - ) -> KernelSearchResults[VectorSearchResult[TModel]]: - collection = self._get_collection() - vector_search_query: dict[str, Any] = { - "k": options.top + options.skip, - "index": f"{options.vector_field_name}_", - "vector": vector, - "path": options.vector_field_name, - } - if options.filter.filters: - vector_search_query["filter"] = self._build_filter_dict(options.filter) - projection_query: dict[str, int | dict] = { - field: 1 - for field in self.data_model_definition.get_field_names( - include_vector_fields=options.include_vectors, - include_key_field=False, # _id is always included - ) - } - projection_query[MONGODB_SCORE_FIELD] = {"$meta": "searchScore"} - try: - raw_results = await collection.aggregate([ - {"$search": {"cosmosSearch": vector_search_query}}, - {"$project": projection_query}, - ]) - except Exception as exc: - raise VectorSearchExecutionException("Failed to search the collection.") from exc - return KernelSearchResults( - results=self._get_vector_search_results_from_results(raw_results, options), - total_count=None, # no way to get a count before looping through the result cursor - ) - - async def _get_vector_search_results_from_cursor( - self, - filter: dict[str, Any], - projection: dict[str, int | dict], - options: VectorSearchOptions | None = None, - ) -> AsyncIterable[VectorSearchResult[TModel]]: - collection = self._get_collection() - async for result in collection.find( - filter=filter, - projection=projection, - skip=options.skip if options else 0, - limit=options.top if options else 0, - ): - try: - record = self.deserialize( - self._get_record_from_result(result), include_vectors=options.include_vectors if options else True - ) - except VectorStoreModelDeserializationException: - raise - except Exception as exc: - raise VectorStoreModelDeserializationException( - f"An error occurred while deserializing the record: {exc}" - ) from exc - score = self._get_score_from_result(result) - if record: - # single records are always returned as single records by the deserializer - yield VectorSearchResult(record=record, score=score) # type: ignore diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py deleted file mode 100644 index c41443ca13ab..000000000000 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_settings.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import ClassVar - -from pydantic import SecretStr - -from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME -from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class AzureCosmosDBforMongoDBSettings(KernelBaseSettings): - """Azure CosmosDB for MongoDB settings. - - The settings are first loaded from environment variables with - the prefix 'AZURE_COSMOS_DB_MONGODB_'. - If the environment variables are not found, the settings can - be loaded from a .env file with the encoding 'utf-8'. - If the settings are not found in the .env file, the settings - are ignored; however, validation will fail alerting that the - settings are missing. - - Required settings for prefix 'AZURE_COSMOS_DB_MONGODB_': - - connection_string: The connection string of the Azure CosmosDB for MongoDB account. - This value can be found in the Keys & Endpoint section when examining - your resource from the Azure portal. - (Env var name: AZURE_COSMOS_DB_MONGODB_CONNECTION_STRING) - - database_name: str - The name of the database. Please refer to this documentation - on Azure CosmosDB NoSQL resource model: - https://learn.microsoft.com/en-us/azure/cosmos-db/resource-model - (Env var name: AZURE_COSMOS_DB_MONGODB_DATABASE_NAME) - """ - - env_prefix: ClassVar[str] = "AZURE_COSMOS_DB_MONGODB_" - - connection_string: SecretStr | None = None - database_name: str = DEFAULT_DB_NAME diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py deleted file mode 100644 index d3314ad2a93d..000000000000 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_mongodb_store.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import sys -from importlib import metadata -from typing import TYPE_CHECKING, Any, TypeVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from pydantic import ValidationError -from pymongo import AsyncMongoClient -from pymongo.driver_info import DriverInfo - -from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_collection import ( - AzureCosmosDBforMongoDBCollection, -) -from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_store import MongoDBAtlasStore -from semantic_kernel.data.record_definition import VectorStoreRecordDefinition -from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT - -if TYPE_CHECKING: - from semantic_kernel.data import VectorStoreRecordCollection - -TModel = TypeVar("TModel") - - -@experimental -class AzureCosmosDBforMongoDBStore(MongoDBAtlasStore): - """Azure Cosmos DB for MongoDB store implementation.""" - - def __init__( - self, - connection_string: str | None = None, - database_name: str | None = None, - mongo_client: AsyncMongoClient | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - ) -> None: - """Initializes a new instance of the AzureCosmosDBforMongoDBStore client. - - Args: - connection_string (str): The connection string for Azure CosmosDB for MongoDB, optional. - Can be read from environment variables. - database_name (str): The name of the database, optional. Can be read from environment variables. - mongo_client (MongoClient): The MongoDB client, optional. - env_file_path (str): Use the environment settings file as a fallback - to environment variables. - env_file_encoding (str): The encoding of the environment settings file. - - """ - managed_client: bool = not mongo_client - if mongo_client: - super().__init__( - mongo_client=mongo_client, - managed_client=managed_client, - database_name=database_name or DEFAULT_DB_NAME, - ) - return - from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_mongodb_settings import ( - AzureCosmosDBforMongoDBSettings, - ) - - try: - settings = AzureCosmosDBforMongoDBSettings.create( - env_file_path=env_file_path, - connection_string=connection_string, - database_name=database_name, - env_file_encoding=env_file_encoding, - ) - except ValidationError as exc: - raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc - if not settings.connection_string: - raise VectorStoreInitializationException("The connection string is missing.") - - mongo_client = AsyncMongoClient( - settings.connection_string.get_secret_value(), - driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), - ) - - super().__init__( - mongo_client=mongo_client, - managed_client=managed_client, - database_name=settings.database_name, - ) - - @override - def get_collection( - self, - collection_name: str, - data_model_type: type[TModel], - data_model_definition: VectorStoreRecordDefinition | None = None, - **kwargs: Any, - ) -> "VectorStoreRecordCollection": - """Get a AzureCosmosDBforMongoDBCollection tied to a collection. - - Args: - collection_name (str): The name of the collection. - data_model_type (type[TModel]): The type of the data model. - data_model_definition (VectorStoreRecordDefinition | None): The model fields, optional. - **kwargs: Additional keyword arguments, passed to the collection constructor. - """ - if collection_name not in self.vector_record_collections: - self.vector_record_collections[collection_name] = AzureCosmosDBforMongoDBCollection( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - mongo_client=self.mongo_client, - collection_name=collection_name, - database_name=self.database_name, - **kwargs, - ) - return self.vector_record_collections[collection_name] diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py index 2bf712283858..c9a49ba4e546 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py @@ -14,10 +14,10 @@ from semantic_kernel.utils.authentication.async_default_azure_credential_wrapper import ( AsyncDefaultAzureCredentialWrapper, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureCosmosDBNoSQLBase(KernelBaseModel): """An Azure Cosmos DB NoSQL collection stores documents in a Azure Cosmos DB NoSQL account.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py index 41865d5b060c..aa8633ecb54e 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_collection.py @@ -42,13 +42,13 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TModel = TypeVar("TModel") TKey = TypeVar("TKey", str, AzureCosmosDBNoSQLCompositeKey) -@experimental +@experimental_class class AzureCosmosDBNoSQLCollection( AzureCosmosDBNoSQLBase, VectorSearchBase[TKey, TModel], @@ -178,25 +178,23 @@ def _build_search_text_query(self, options: VectorSearchOptions) -> str: where_clauses = self._build_where_clauses_from_filter(options.filter) contains_clauses = " OR ".join( f"CONTAINS(c.{field}, @search_text)" - for field, field_def in self.data_model_definition.fields.items() - if isinstance(field_def, VectorStoreRecordDataField) and field_def.is_full_text_searchable + for field in self.data_model_definition.fields + if isinstance(field, VectorStoreRecordDataField) and field.is_full_text_searchable ) - if where_clauses: - where_clauses = f" {where_clauses} AND" return ( f"SELECT TOP @top {self._build_select_clause(options.include_vectors)} " # nosec: B608 - f"FROM c WHERE{where_clauses} ({contains_clauses})" # nosec: B608 + f"FROM c WHERE ({contains_clauses}) AND {where_clauses}" # nosec: B608 ) def _build_vector_query(self, options: VectorSearchOptions) -> str: where_clauses = self._build_where_clauses_from_filter(options.filter) if where_clauses: - where_clauses = f"WHERE {where_clauses} " + where_clauses = f"WHERE {where_clauses}" vector_field_name: str = self.data_model_definition.try_get_vector_field(options.vector_field_name).name # type: ignore return ( - f"SELECT TOP @top {self._build_select_clause(options.include_vectors)}, " # nosec: B608 - f"VectorDistance(c.{vector_field_name}, @vector) AS distance FROM c " # nosec: B608 - f"{where_clauses}ORDER BY VectorDistance(c.{vector_field_name}, @vector)" # nosec: B608 + f"SELECT TOP @top {self._build_select_clause(options.include_vectors)}," # nosec: B608 + f" VectorDistance(c.{vector_field_name}, @vector) AS distance FROM c ORDER " # nosec: B608 + f"BY VectorDistance(c.{vector_field_name}, @vector) {where_clauses}" # nosec: B608 ) def _build_select_clause(self, include_vectors: bool) -> str: @@ -220,24 +218,11 @@ def _build_where_clauses_from_filter(self, filters: VectorSearchFilter | None) - return "" clauses = [] for filter in filters.filters: - field_def = self.data_model_definition.fields[filter.field_name] match filter: case EqualTo(): - clause = "" - if field_def.property_type in ["int", "float"]: - clause = f"c.{filter.field_name} = {filter.value}" - if field_def.property_type == "str": - clause = f"c.{filter.field_name} = '{filter.value}'" - if field_def.property_type == "list[str]": - filter_value = f"ARRAY_CONTAINS(c.{filter.field_name}, '{filter.value}')" - if field_def.property_type in ["list[int]", "list[float]"]: - filter_value = f"ARRAY_CONTAINS(c.{filter.field_name}, {filter.value})" - clauses.append(clause) + clauses.append(f"c.{filter.field_name} = {filter.value}") case AnyTagsEqualTo(): - filter_value = filter.value - if field_def.property_type == "list[str]": - filter_value = f"'{filter.value}'" - clauses.append(f"{filter_value} IN c.{filter.field_name}") + clauses.append(f"{filter.value} IN c.{filter.field_name}") case _: raise ValueError(f"Unsupported filter: {filter}") return " AND ".join(clauses) diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py index a0d1f38acd95..6da66cc1cfd0 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_composite_key.py @@ -2,10 +2,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureCosmosDBNoSQLCompositeKey(KernelBaseModel): """Azure CosmosDB NoSQL composite key.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py index a30f7cc5cc0d..cbdac0036d13 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_settings.py @@ -5,10 +5,10 @@ from pydantic import HttpUrl, SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureCosmosDBNoSQLSettings(KernelBaseSettings): """Azure CosmosDB NoSQL settings. diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py index 9d191effa88e..45ca18b58c55 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_store.py @@ -19,12 +19,12 @@ from semantic_kernel.data.vector_storage.vector_store import VectorStore from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreOperationException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TModel = TypeVar("TModel") -@experimental +@experimental_class class AzureCosmosDBNoSQLStore(AzureCosmosDBNoSQLBase, VectorStore): """A VectorStore implementation that uses Azure CosmosDB NoSQL as the backend storage.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py b/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py index dfdf4976cda1..2bec006b99b5 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmos_db/const.py @@ -11,24 +11,12 @@ IndexKind.DISK_ANN: "diskANN", } -INDEX_KIND_MAPPING_MONGODB = { - IndexKind.IVF_FLAT: "vector-ivf", - IndexKind.HNSW: "vector-hnsw", - IndexKind.DISK_ANN: "vector-diskann", -} - DISTANCE_FUNCTION_MAPPING = { DistanceFunction.COSINE_SIMILARITY: "cosine", DistanceFunction.DOT_PROD: "dotproduct", DistanceFunction.EUCLIDEAN_DISTANCE: "euclidean", } -DISTANCE_FUNCTION_MAPPING_MONGODB = { - DistanceFunction.COSINE_SIMILARITY: "COS", - DistanceFunction.DOT_PROD: "IP", - DistanceFunction.EUCLIDEAN_DISTANCE: "L2", -} - DATATYPES_MAPPING = { "default": "float32", "float": "float32", diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py index dce5a29b3d04..40150463b40e 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py @@ -16,12 +16,12 @@ from semantic_kernel.exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class AzureCosmosDBMemoryStore(MemoryStoreBase): """A memory store that uses AzureCosmosDB for MongoDB vCore. diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py index eb3427cd58ae..47b8d065086c 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py @@ -5,11 +5,11 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class # Abstract class similar to the original data store that allows API level abstraction -@experimental +@experimental_class class AzureCosmosDBStoreApi(ABC): """AzureCosmosDBStoreApi.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py index dc08e27e14e8..212d45788ec7 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmosdb_settings.py @@ -5,10 +5,10 @@ from pydantic import ConfigDict, Field, SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureCosmosDBSettings(KernelBaseSettings): """Azure CosmosDB model settings. @@ -21,7 +21,7 @@ class AzureCosmosDBSettings(KernelBaseSettings): env_prefix: ClassVar[str] = "COSMOSDB_" api: str | None = None - connection_string: SecretStr | None = Field(default=None, alias="AZCOSMOS_CONNSTR") + connection_string: SecretStr | None = Field(None, alias="AZCOSMOS_CONNSTR") model_config = ConfigDict( populate_by_name=True, diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py index aab438fcb833..338bd9e7a234 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py @@ -14,10 +14,10 @@ from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_store_api import AzureCosmosDBStoreApi from semantic_kernel.connectors.memory.azure_cosmosdb.utils import CosmosDBSimilarityType, CosmosDBVectorSearchType from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MongoStoreApi(AzureCosmosDBStoreApi): """MongoStoreApi class for the Azure Cosmos DB Mongo store.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py index aa931ebac6ae..8c0cd782e1af 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/utils.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function -@experimental +@experimental_function class CosmosDBSimilarityType(str, Enum): """Cosmos DB Similarity Type as enumerator.""" @@ -17,7 +17,7 @@ class CosmosDBSimilarityType(str, Enum): """Euclidean distance""" -@experimental +@experimental_function class CosmosDBVectorSearchType(str, Enum): """Cosmos DB Vector Search Type as enumerator.""" diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py index 4bca89952828..e40abd9ed5bd 100644 --- a/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb_no_sql/azure_cosmosdb_no_sql_memory_store.py @@ -15,10 +15,10 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AzureCosmosDBNoSQLMemoryStore(MemoryStoreBase): """You can read more about vector search using AzureCosmosDBNoSQL here: https://aka.ms/CosmosVectorSearch.""" diff --git a/python/semantic_kernel/connectors/memory/chroma/__init__.py b/python/semantic_kernel/connectors/memory/chroma/__init__.py index 7cdf6cd8bda6..005203ad8e73 100644 --- a/python/semantic_kernel/connectors/memory/chroma/__init__.py +++ b/python/semantic_kernel/connectors/memory/chroma/__init__.py @@ -1,8 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.connectors.memory.chroma.chroma import ChromaCollection, ChromaStore from semantic_kernel.connectors.memory.chroma.chroma_memory_store import ( ChromaMemoryStore, ) -__all__ = ["ChromaCollection", "ChromaMemoryStore", "ChromaStore"] +__all__ = ["ChromaMemoryStore"] diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma.py b/python/semantic_kernel/connectors/memory/chroma/chroma.py deleted file mode 100644 index 6a41142e3d52..000000000000 --- a/python/semantic_kernel/connectors/memory/chroma/chroma.py +++ /dev/null @@ -1,376 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import Sequence -from typing import Any, ClassVar, Generic, TypeVar - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from chromadb import Client, Collection, QueryResult -from chromadb.api import ClientAPI -from chromadb.config import Settings - -from semantic_kernel.data.const import DistanceFunction -from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo -from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo -from semantic_kernel.data.kernel_search_results import KernelSearchResults -from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition -from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField -from semantic_kernel.data.vector_search.vector_search import VectorSearchBase -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions -from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult -from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin -from semantic_kernel.data.vector_storage.vector_store import VectorStore -from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection -from semantic_kernel.exceptions.vector_store_exceptions import ( - VectorStoreInitializationException, - VectorStoreModelValidationError, - VectorStoreOperationException, -) -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger = logging.getLogger(__name__) - -TModel = TypeVar("TModel") - -DISTANCE_FUNCTION_MAP = { - DistanceFunction.COSINE_SIMILARITY: "cosine", - DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE: "l2", - DistanceFunction.DOT_PROD: "ip", -} - - -@experimental -class ChromaCollection( - VectorSearchBase[str, TModel], - VectorizedSearchMixin[TModel], - Generic[TModel], -): - """Chroma vector store collection.""" - - client: ClientAPI - supported_key_types: ClassVar[list[str] | None] = ["str"] - - def __init__( - self, - collection_name: str, - data_model_type: type[object], - data_model_definition: VectorStoreRecordDefinition | None = None, - persist_directory: str | None = None, - client_settings: "Settings | None" = None, - client: "ClientAPI | None" = None, - **kwargs: Any, - ): - """Initialize the Chroma vector store collection.""" - managed_client = not client - if client is None: - settings = client_settings or Settings() - if persist_directory is not None: - settings.is_persistent = True - settings.persist_directory = persist_directory - client = Client(settings) - super().__init__( - collection_name=collection_name, - data_model_type=data_model_type, - data_model_definition=data_model_definition, - client=client, - managed_client=managed_client, - **kwargs, - ) - - def _get_collection(self) -> Collection: - try: - return self.client.get_collection(name=self.collection_name) - except Exception as e: - raise RuntimeError(f"Failed to get collection {self.collection_name}") from e - - @override - async def does_collection_exist(self, **kwargs: Any) -> bool: - """Check if the collection exists.""" - try: - self.client.get_collection(name=self.collection_name) - return True - except Exception: - return False - - @override - async def create_collection(self, **kwargs: Any) -> None: - """Create the collection. - - Sets the distance function if specified in the data model definition. - - Args: - kwargs: Additional arguments are passed to the metadata parameter of the create_collection method. - """ - if self.data_model_definition.vector_fields and self.data_model_definition.vector_fields[0].distance_function: - if self.data_model_definition.vector_fields[0].distance_function not in DISTANCE_FUNCTION_MAP: - raise VectorStoreInitializationException( - f"Distance function {self.data_model_definition.vector_fields[0].distance_function} is not " - "supported." - ) - kwargs["hnsw:space"] = DISTANCE_FUNCTION_MAP[self.data_model_definition.vector_fields[0].distance_function] - if kwargs: - self.client.create_collection(name=self.collection_name, metadata=kwargs) - else: - self.client.create_collection(name=self.collection_name) - - @override - async def delete_collection(self, **kwargs: Any) -> None: - """Delete the collection.""" - try: - self.client.delete_collection(name=self.collection_name) - except ValueError: - logger.info(f"Collection {self.collection_name} could not be deleted because it doesn't exist.") - except Exception as e: - raise VectorStoreOperationException( - f"Failed to delete collection {self.collection_name} with error: {e}" - ) from e - - async def _validate_data_model(self): - super()._validate_data_model() - if len(self.data_model_definition.vector_fields) > 1: - raise VectorStoreModelValidationError( - "Chroma only supports one vector field, but " - f"{len(self.data_model_definition.vector_fields)} were provided." - ) - if self.data_model_definition.vector_fields[0].index_kind != "hnsw": - raise VectorStoreModelValidationError( - "Chroma only supports hnsw index kind, but " - f"{self.data_model_definition.vector_fields[0].index_kind} was provided." - ) - - @override - def _serialize_dicts_to_store_models(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Sequence[Any]: - vector_field_name = self.data_model_definition.vector_field_names[0] - id_field_name = self.data_model_definition.key_field_name - document_field_name = next( - field.name - for field in self.data_model_definition.fields.values() - if isinstance(field, VectorStoreRecordDataField) and field.embedding_property_name == vector_field_name - ) - store_models = [] - for record in records: - store_model = { - "id": record[id_field_name], - "embedding": record[vector_field_name], - "document": record[document_field_name], - "metadata": { - k: v for k, v in record.items() if k not in [id_field_name, vector_field_name, document_field_name] - }, - } - if store_model["metadata"] == {}: - store_model.pop("metadata") - store_models.append(store_model) - return store_models - - @override - def _deserialize_store_models_to_dicts(self, records: Sequence[Any], **kwargs: Any) -> Sequence[dict[str, Any]]: - vector_field_name = self.data_model_definition.vector_field_names[0] - id_field_name = self.data_model_definition.key_field_name - document_field_name = next( - field.name - for field in self.data_model_definition.fields.values() - if isinstance(field, VectorStoreRecordDataField) and field.embedding_property_name == vector_field_name - ) - # replace back the name of the vector, content and id fields - for record in records: - record[id_field_name] = record.pop("id") - record[vector_field_name] = record.pop("embedding") - record[document_field_name] = record.pop("document") - return records - - @override - async def _inner_upsert( - self, - records: Sequence[Any], - **kwargs: Any, - ) -> Sequence[str]: - upsert_obj = {"ids": []} - for record in records: - upsert_obj["ids"].append(record["id"]) - if "embedding" in record: - if "embeddings" not in upsert_obj: - upsert_obj["embeddings"] = [] - upsert_obj["embeddings"].append(record["embedding"]) - if "document" in record: - if "documents" not in upsert_obj: - upsert_obj["documents"] = [] - upsert_obj["documents"].append(record["document"]) - if "metadata" in record: - if "metadatas" not in upsert_obj: - upsert_obj["metadatas"] = [] - upsert_obj["metadatas"].append(record["metadata"]) - self._get_collection().add(**upsert_obj) - return upsert_obj["ids"] - - @override - async def _inner_get(self, keys: Sequence[str], **kwargs: Any) -> Sequence[Any]: - include_vectors = kwargs.get("include_vectors", True) - results = self._get_collection().get( - ids=keys, - include=["documents", "metadatas", "embeddings"] if include_vectors else ["documents", "metadatas"], - ) - return self._unpack_results(results, include_vectors) - - def _unpack_results( - self, results: QueryResult, include_vectors: bool, include_distances: bool = False - ) -> Sequence[dict[str, Any]]: - try: - if isinstance(results["ids"][0], str): - for k, v in results.items(): - results[k] = [v] - except IndexError: - return [] - records = [] - if include_vectors and include_distances: - for id, document, embedding, metadata, distance in zip( - results["ids"][0], - results["documents"][0], - results["embeddings"][0], - results["metadatas"][0], - results["distances"][0], - ): - record = {"id": id, "embedding": embedding, "document": document, "distance": distance} - if metadata: - record.update(metadata) - records.append(record) - return records - if include_vectors and not include_distances: - for id, document, embedding, metadata in zip( - results["ids"][0], - results["documents"][0], - results["embeddings"][0], - results["metadatas"][0], - ): - record = { - "id": id, - "embedding": embedding, - "document": document, - } - if metadata: - record.update(metadata) - records.append(record) - return records - if not include_vectors and include_distances: - for id, document, metadata, distance in zip( - results["ids"][0], results["documents"][0], results["metadatas"][0], results["distances"][0] - ): - record = {"id": id, "document": document, "distance": distance} - if metadata: - record.update(metadata) - records.append(record) - return records - for id, document, metadata in zip( - results["ids"][0], - results["documents"][0], - results["metadatas"][0], - ): - record = { - "id": id, - "document": document, - } - if metadata: - record.update(metadata) - records.append(record) - return records - - @override - async def _inner_delete(self, keys: Sequence[str], **kwargs: Any) -> None: - self._get_collection().delete(ids=keys) - - @override - async def _inner_search( - self, - options: VectorSearchOptions, - search_text: str | None = None, - vectorizable_text: str | None = None, - vector: list[float | int] | None = None, - **kwargs: Any, - ) -> KernelSearchResults[VectorSearchResult[TModel]]: - where = self._parse_filter(options) - args = { - "n_results": options.top, - "include": ["documents", "metadatas", "embeddings", "distances"] - if options.include_vectors - else ["documents", "metadatas", "distances"], - } - if where: - args["where"] = where - if vector is not None: - args["query_embeddings"] = vector - results = self._get_collection().query(**args) - records = self._unpack_results(results, options.include_vectors, include_distances=True) - return KernelSearchResults( - results=self._get_vector_search_results_from_results(records), total_count=len(records) - ) - - @override - def _get_record_from_result(self, result: Any) -> Any: - return result - - @override - def _get_score_from_result(self, result: Any) -> float | None: - return result["distance"] - - def _parse_filter(self, options: VectorSearchOptions) -> dict[str, Any] | None: - if options.filter is None or not options.filter.filters: - return None - filter_expression = {"$and": []} - for filter in options.filter.filters: - match filter: - case EqualTo(): - filter_expression["$and"].append({filter.field_name: {"$eq": filter.value}}) - case AnyTagsEqualTo(): - filter_expression["$and"].append({filter.field_name: {"$in": filter.value}}) - if len(filter_expression["$and"]) == 1: - return filter_expression["$and"][0] - return filter_expression - - -@experimental -class ChromaStore(VectorStore): - """Chroma vector store.""" - - client: ClientAPI - - def __init__( - self, - persist_directory: str | None = None, - client_settings: "Settings | None" = None, - client: ClientAPI | None = None, - **kwargs: Any, - ): - """Initialize the Chroma vector store.""" - managed_client = not client - settings = client_settings or Settings() - if persist_directory is not None: - settings.is_persistent = True - settings.persist_directory = persist_directory - if client is None: - client = Client(settings) - super().__init__(client=client, managed_client=managed_client, **kwargs) - - @override - def get_collection( - self, - collection_name: str, - data_model_type: type[object], - data_model_definition: "VectorStoreRecordDefinition | None" = None, - **kwargs: "Any", - ) -> VectorStoreRecordCollection: - """Get a vector record store.""" - return ChromaCollection( - client=self.client, - collection_name=collection_name, - data_model_type=data_model_type, - data_model_definition=data_model_definition, - **kwargs, - ) - - @override - async def list_collection_names(self, **kwargs) -> Sequence[str]: - return self.client.list_collections() diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py index 188b73498dc7..0ff69f3fcd8c 100644 --- a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +++ b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py @@ -15,7 +15,7 @@ from semantic_kernel.exceptions import ServiceInitializationError, ServiceResourceNotFoundError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: import chromadb @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class ChromaMemoryStore(MemoryStoreBase): """ChromaMemoryStore provides an interface to store and retrieve data using ChromaDB.""" diff --git a/python/semantic_kernel/connectors/memory/in_memory/const.py b/python/semantic_kernel/connectors/memory/in_memory/const.py index 24fd25d7568c..daefb15497fc 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/const.py +++ b/python/semantic_kernel/connectors/memory/in_memory/const.py @@ -17,4 +17,5 @@ DistanceFunction.MANHATTAN: cityblock, DistanceFunction.HAMMING: hamming, DistanceFunction.DOT_PROD: dot, + "default": cosine, } diff --git a/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py b/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py index 95e2e537f9ee..789faceb8611 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py +++ b/python/semantic_kernel/connectors/memory/in_memory/in_memory_collection.py @@ -4,17 +4,18 @@ from collections.abc import AsyncIterable, Callable, Mapping, Sequence from typing import Any, ClassVar, TypeVar +from pydantic import Field + +from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo +from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo + if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover -from pydantic import Field - from semantic_kernel.connectors.memory.in_memory.const import DISTANCE_FUNCTION_MAP -from semantic_kernel.data.const import DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction -from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo -from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo +from semantic_kernel.data.const import DistanceFunction from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase from semantic_kernel.data.kernel_search_results import KernelSearchResults from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition @@ -28,7 +29,6 @@ from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin from semantic_kernel.exceptions import VectorSearchExecutionException, VectorStoreModelValidationError from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.list_handler import empty_generator KEY_TYPES = str | int | float @@ -149,10 +149,7 @@ async def _inner_search_vectorized( raise ValueError("Vector field name must be provided in options for vector search.") field = options.vector_field_name assert isinstance(self.data_model_definition.fields.get(field), VectorStoreRecordVectorField) # nosec - distance_metric = ( - self.data_model_definition.fields.get(field).distance_function # type: ignore - or DistanceFunction.COSINE_DISTANCE - ) + distance_metric = self.data_model_definition.fields.get(field).distance_function or "default" # type: ignore distance_func = DISTANCE_FUNCTION_MAP[distance_metric] for key, record in self._get_filtered_records(options).items(): @@ -163,13 +160,10 @@ async def _inner_search_vectorized( distance_func, invert_score=distance_metric == DistanceFunction.COSINE_SIMILARITY, ) - sorted_records = dict( - sorted( - return_records.items(), - key=lambda item: item[1], - reverse=DISTANCE_FUNCTION_DIRECTION_HELPER[distance_metric](1, 0), - ) - ) + if distance_metric in [DistanceFunction.COSINE_SIMILARITY, DistanceFunction.DOT_PROD]: + sorted_records = dict(sorted(return_records.items(), key=lambda item: item[1], reverse=True)) + else: + sorted_records = dict(sorted(return_records.items(), key=lambda item: item[1])) if sorted_records: return KernelSearchResults( results=self._get_vector_search_results_from_results( @@ -177,7 +171,7 @@ async def _inner_search_vectorized( ), total_count=len(return_records) if options and options.include_total_count else None, ) - return KernelSearchResults(results=empty_generator()) + return KernelSearchResults(results=None) async def _generate_return_list( self, return_records: dict[KEY_TYPES, float], options: VectorSearchOptions | None diff --git a/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py b/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py index 4828820053dd..2d8c5b6b7d9f 100644 --- a/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py +++ b/python/semantic_kernel/connectors/memory/in_memory/in_memory_store.py @@ -12,14 +12,14 @@ from semantic_kernel.connectors.memory.in_memory.in_memory_collection import InMemoryVectorCollection from semantic_kernel.data import VectorStore, VectorStoreRecordCollection, VectorStoreRecordDefinition -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental +@experimental_class class InMemoryVectorStore(VectorStore): """Create a In Memory Vector Store.""" diff --git a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py index d834b0c76e79..b3a5fe3275e7 100644 --- a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py +++ b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py @@ -10,7 +10,7 @@ from semantic_kernel.exceptions import ServiceResourceNotFoundError, ServiceResponseException from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class, experimental_function logger: logging.Logger = logging.getLogger(__name__) @@ -48,7 +48,7 @@ ] -@experimental +@experimental_function def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict[str, Any]: """Convert a memoryrecord into a dict. @@ -69,7 +69,7 @@ def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict[str, Any]: return ret_dict -@experimental +@experimental_function def milvus_dict_to_memoryrecord(milvus_dict: dict[str, Any]) -> MemoryRecord: """Convert Milvus search result dict into MemoryRecord. @@ -96,7 +96,7 @@ def milvus_dict_to_memoryrecord(milvus_dict: dict[str, Any]) -> MemoryRecord: ) -@experimental +@experimental_function def create_fields(dimensions: int) -> list[FieldSchema]: """Create the fields for the Milvus collection.""" return [ @@ -144,7 +144,7 @@ def create_fields(dimensions: int) -> list[FieldSchema]: ] -@experimental +@experimental_class class MilvusMemoryStore(MemoryStoreBase): """Memory store based on Milvus.""" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py index bbaea131089b..3e3c3775c990 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/__init__.py @@ -1,15 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import ( - MongoDBAtlasCollection, -) from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_memory_store import ( MongoDBAtlasMemoryStore, ) from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import MongoDBAtlasSettings -__all__ = [ - "MongoDBAtlasCollection", - "MongoDBAtlasMemoryStore", - "MongoDBAtlasSettings", -] +__all__ = ["MongoDBAtlasMemoryStore", "MongoDBAtlasSettings"] diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py deleted file mode 100644 index 5954e03c7bdc..000000000000 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/const.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Final - -from semantic_kernel.data.const import DistanceFunction - -DISTANCE_FUNCTION_MAPPING: Final[dict[DistanceFunction, str]] = { - DistanceFunction.EUCLIDEAN_DISTANCE: "euclidean", - DistanceFunction.COSINE_SIMILARITY: "cosine", - DistanceFunction.DOT_PROD: "dotProduct", -} - -MONGODB_ID_FIELD: Final[str] = "_id" -MONGODB_SCORE_FIELD: Final[str] = "score" -DEFAULT_DB_NAME = "default" -DEFAULT_SEARCH_INDEX_NAME = "default" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py deleted file mode 100644 index 84723a4be323..000000000000 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_collection.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from collections.abc import Sequence -from importlib import metadata -from typing import Any, ClassVar, Generic, TypeVar - -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT - -if sys.version_info >= (3, 11): - from typing import Self # pragma: no cover -else: - from typing_extensions import Self # pragma: no cover - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from pydantic import ValidationError -from pymongo import AsyncMongoClient, ReplaceOne -from pymongo.asynchronous.collection import AsyncCollection -from pymongo.asynchronous.database import AsyncDatabase -from pymongo.driver_info import DriverInfo -from pymongo.operations import SearchIndexModel - -from semantic_kernel.connectors.memory.mongodb_atlas.const import ( - DEFAULT_DB_NAME, - DEFAULT_SEARCH_INDEX_NAME, - MONGODB_ID_FIELD, - MONGODB_SCORE_FIELD, -) -from semantic_kernel.connectors.memory.mongodb_atlas.utils import create_vector_field -from semantic_kernel.data.filter_clauses import AnyTagsEqualTo, EqualTo -from semantic_kernel.data.kernel_search_results import KernelSearchResults -from semantic_kernel.data.record_definition import VectorStoreRecordDefinition -from semantic_kernel.data.record_definition.vector_store_record_fields import VectorStoreRecordDataField -from semantic_kernel.data.vector_search import ( - VectorSearchFilter, - VectorSearchOptions, -) -from semantic_kernel.data.vector_search.vector_search import VectorSearchBase -from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult -from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin -from semantic_kernel.exceptions import ( - VectorSearchExecutionException, - VectorStoreInitializationException, - VectorStoreOperationException, -) -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger: logging.Logger = logging.getLogger(__name__) - -TModel = TypeVar("TModel") - - -@experimental -class MongoDBAtlasCollection( - VectorSearchBase[str, TModel], - VectorizedSearchMixin[TModel], - Generic[TModel], -): - """MongoDB Atlas collection implementation.""" - - mongo_client: AsyncMongoClient - database_name: str - index_name: str - supported_key_types: ClassVar[list[str] | None] = ["str"] - supported_vector_types: ClassVar[list[str] | None] = ["float", "int"] - - def __init__( - self, - collection_name: str, - data_model_type: type[TModel], - data_model_definition: VectorStoreRecordDefinition | None = None, - index_name: str | None = None, - mongo_client: AsyncMongoClient | None = None, - connection_string: str | None = None, - database_name: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initializes a new instance of the MongoDBAtlasCollection class. - - Args: - data_model_type: The type of the data model. - data_model_definition: The model definition, optional. - collection_name: The name of the collection, optional. - mongo_client: The MongoDB client for interacting with MongoDB Atlas, - used for creating and deleting collections. - index_name: The name of the index to use for searching, when not passed, will use _idx. - connection_string: The connection string for MongoDB Atlas, optional. - Can be read from environment variables. - database_name: The name of the database, will be filled from the env when this is not set. - connection_string: str | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None - **kwargs: Additional keyword arguments - """ - managed_client = kwargs.get("managed_client", not mongo_client) - if mongo_client: - super().__init__( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - mongo_client=mongo_client, - collection_name=collection_name, - database_name=database_name or DEFAULT_DB_NAME, - index_name=index_name or DEFAULT_SEARCH_INDEX_NAME, - managed_client=managed_client, - ) - return - - from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import MongoDBAtlasSettings - - try: - mongodb_atlas_settings = MongoDBAtlasSettings.create( - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - connection_string=connection_string, - database_name=database_name, - index_name=index_name, - ) - except ValidationError as exc: - raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc - - mongo_client = AsyncMongoClient( - mongodb_atlas_settings.connection_string.get_secret_value(), - driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), - ) - - super().__init__( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - collection_name=collection_name, - mongo_client=mongo_client, - managed_client=managed_client, - database_name=mongodb_atlas_settings.database_name, - index_name=mongodb_atlas_settings.index_name, - ) - - def _get_database(self) -> AsyncDatabase: - """Get the database. - - If you need control over things like read preference, you can override this method. - """ - return self.mongo_client.get_database(self.database_name) - - def _get_collection(self) -> AsyncCollection: - """Get the collection. - - If you need control over things like read preference, you can override this method. - """ - return self.mongo_client.get_database(self.database_name).get_collection(self.collection_name) - - @override - async def _inner_upsert( - self, - records: Sequence[Any], - **kwargs: Any, - ) -> Sequence[str]: - operations = [] - ids = [] - for record in records: - operations.append( - ReplaceOne( - filter={MONGODB_ID_FIELD: record[MONGODB_ID_FIELD]}, - replacement=record, - upsert=True, - ) - ) - ids.append(record[MONGODB_ID_FIELD]) - result = await self._get_collection().bulk_write(operations, ordered=False) - return [str(value) for key, value in result.upserted_ids.items()] - - @override - async def _inner_get(self, keys: Sequence[str], **kwargs: Any) -> Sequence[dict[str, Any]]: - result = self._get_collection().find({MONGODB_ID_FIELD: {"$in": keys}}) - return await result.to_list(length=len(keys)) - - @override - async def _inner_delete(self, keys: Sequence[str], **kwargs: Any) -> None: - collection = self._get_collection() - await collection.delete_many({MONGODB_ID_FIELD: {"$in": keys}}) - - def _replace_key_field(self, record: dict[str, Any]) -> dict[str, Any]: - if self._key_field_name == MONGODB_ID_FIELD: - return record - return { - MONGODB_ID_FIELD: record.pop(self._key_field_name, None), - **record, - } - - def _reset_key_field(self, record: dict[str, Any]) -> dict[str, Any]: - if self._key_field_name == MONGODB_ID_FIELD: - return record - return { - self._key_field_name: record.pop(MONGODB_ID_FIELD, None), - **record, - } - - @override - def _serialize_dicts_to_store_models(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Sequence[Any]: - return [self._replace_key_field(record) for record in records] - - @override - def _deserialize_store_models_to_dicts(self, records: Sequence[Any], **kwargs: Any) -> Sequence[dict[str, Any]]: - return [self._reset_key_field(record) for record in records] - - @override - async def create_collection(self, **kwargs) -> None: - """Create a new collection in MongoDB. - - This first creates a collection, with the kwargs. - Then creates a search index based on the data model definition. - - Args: - **kwargs: Additional keyword arguments. - """ - collection = await self._get_database().create_collection(self.collection_name, **kwargs) - await collection.create_search_index(self._create_index_definition()) - - def _create_index_definition(self) -> SearchIndexModel: - """Create an index definition. - - Returns: - SearchIndexModel: The index definition. - """ - vector_fields = [create_vector_field(field) for field in self.data_model_definition.vector_fields] - data_fields = [ - {"path": field.name, "type": "filter"} - for field in self.data_model_definition.fields - if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) - ] - key_field = [{"path": self.data_model_definition.key_field.name, "type": "filter"}] - return SearchIndexModel( - type="vectorSearch", name=self.index_name, definition={"fields": vector_fields + data_fields + key_field} - ) - - @override - async def does_collection_exist(self, **kwargs) -> bool: - return bool(await self._get_database().list_collection_names(filter={"name": self.collection_name})) - - @override - async def delete_collection(self, **kwargs) -> None: - await self._get_database().drop_collection(self.collection_name, **kwargs) - - @override - async def _inner_search( - self, - options: VectorSearchOptions, - search_text: str | None = None, - vectorizable_text: str | None = None, - vector: list[float | int] | None = None, - **kwargs: Any, - ) -> KernelSearchResults[VectorSearchResult[TModel]]: - if vector is not None: - return await self._inner_vectorized_search(options, vector, **kwargs) - raise VectorStoreOperationException("Vector is required for search.") - - async def _inner_vectorized_search( - self, - options: VectorSearchOptions, - vector: list[float | int], - **kwargs: Any, - ) -> KernelSearchResults[VectorSearchResult[TModel]]: - collection = self._get_collection() - vector_search_query: dict[str, Any] = { - "limit": options.top + options.skip, - "index": f"{options.vector_field_name}_", - "queryVector": vector, - "path": options.vector_field_name, - } - if options.filter.filters: - vector_search_query["filter"] = self._build_filter_dict(options.filter) - - projection_query: dict[str, int | dict] = { - field: 1 - for field in self.data_model_definition.get_field_names( - include_vector_fields=options.include_vectors, - include_key_field=False, # _id is always included - ) - } - projection_query[MONGODB_SCORE_FIELD] = {"$meta": "vectorSearchScore"} - try: - raw_results = await collection.aggregate([ - {"$vectorSearch": vector_search_query}, - {"$project": projection_query}, - ]) - except Exception as exc: - raise VectorSearchExecutionException("Failed to search the collection.") from exc - return KernelSearchResults( - results=self._get_vector_search_results_from_results(raw_results, options), - total_count=None, # no way to get a count before looping through the result cursor - ) - - def _build_filter_dict(self, search_filter: VectorSearchFilter) -> dict[str, Any]: - """Create the filter dictionary based on the filters.""" - filter_dict = {} - for filter in search_filter.filters: - if isinstance(filter, EqualTo): - filter_dict[filter.field_name] = filter.value - elif isinstance(filter, AnyTagsEqualTo): - filter_dict[filter.field_name] = {"$in": filter.value} - return filter_dict - - @override - def _get_record_from_result(self, result: dict[str, Any]) -> dict[str, Any]: - return result - - @override - def _get_score_from_result(self, result: dict[str, Any]) -> float | None: - return result.get(MONGODB_SCORE_FIELD) - - @override - async def __aexit__(self, exc_type, exc_value, traceback) -> None: - """Exit the context manager.""" - if self.managed_client: - await self.mongo_client.close() - - async def __aenter__(self) -> Self: - """Enter the context manager.""" - await self.mongo_client.aconnect() - return self diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py index 9e46c04ad818..9c57c3341d28 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py @@ -22,12 +22,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class MongoDBAtlasMemoryStore(MemoryStoreBase): """Memory Store for MongoDB Atlas Vector Search Connections.""" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py index eaefaeb17936..0eec5591d15f 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_settings.py @@ -4,22 +4,18 @@ from pydantic import SecretStr -from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME +from semantic_kernel.connectors.memory.mongodb_atlas.utils import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MongoDBAtlasSettings(KernelBaseSettings): """MongoDB Atlas model settings. Args: - connection_string: str - MongoDB Atlas connection string (Env var MONGODB_ATLAS_CONNECTION_STRING) - - database_name: str - MongoDB Atlas database name, defaults to 'default' - (Env var MONGODB_ATLAS_DATABASE_NAME) - - index_name: str - MongoDB Atlas search index name, defaults to 'default' - (Env var MONGODB_ATLAS_INDEX_NAME) """ env_prefix: ClassVar[str] = "MONGODB_ATLAS_" diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py deleted file mode 100644 index 8e97476fbb69..000000000000 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_store.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import logging -import sys -from importlib import metadata -from typing import TYPE_CHECKING, Any, TypeVar - -if sys.version_info >= (3, 11): - from typing import Self # pragma: no cover -else: - from typing_extensions import Self # pragma: no cover - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from pydantic import ValidationError -from pymongo import AsyncMongoClient -from pymongo.asynchronous.database import AsyncDatabase -from pymongo.driver_info import DriverInfo - -from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import ( - MongoDBAtlasCollection, -) -from semantic_kernel.data.record_definition import VectorStoreRecordDefinition -from semantic_kernel.data.vector_storage import VectorStore -from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT - -if TYPE_CHECKING: - from semantic_kernel.data import VectorStoreRecordCollection - - -logger: logging.Logger = logging.getLogger(__name__) - -TModel = TypeVar("TModel") - - -@experimental -class MongoDBAtlasStore(VectorStore): - """MongoDB Atlas store implementation.""" - - mongo_client: AsyncMongoClient - database_name: str - - def __init__( - self, - connection_string: str | None = None, - database_name: str | None = None, - mongo_client: AsyncMongoClient | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - **kwargs: Any, - ) -> None: - """Initializes a new instance of the MongoDBAtlasStore client. - - Args: - connection_string: The connection string for MongoDB Atlas, optional. - Can be read from environment variables. - database_name: The name of the database, optional. Can be read from environment variables. - mongo_client: The MongoDB client, optional. - env_file_path: Use the environment settings file as a fallback - to environment variables. - env_file_encoding: The encoding of the environment settings file. - kwargs: Additional keyword arguments. - """ - managed_client = kwargs.get("managed_client", not mongo_client) - if mongo_client: - super().__init__( - mongo_client=mongo_client, - managed_client=managed_client, - database_name=database_name or DEFAULT_DB_NAME, - ) - return - from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_settings import ( - MongoDBAtlasSettings, - ) - - try: - mongodb_atlas_settings = MongoDBAtlasSettings.create( - env_file_path=env_file_path, - connection_string=connection_string, - database_name=database_name, - env_file_encoding=env_file_encoding, - ) - except ValidationError as exc: - raise VectorStoreInitializationException("Failed to create MongoDB Atlas settings.") from exc - if not mongodb_atlas_settings.connection_string: - raise VectorStoreInitializationException("The connection string is missing.") - - mongo_client = AsyncMongoClient( - mongodb_atlas_settings.connection_string.get_secret_value(), - driver=DriverInfo(SEMANTIC_KERNEL_USER_AGENT, metadata.version("semantic-kernel")), - ) - - super().__init__( - mongo_client=mongo_client, - managed_client=managed_client, - database_name=mongodb_atlas_settings.database_name, - ) - - @override - def get_collection( - self, - collection_name: str, - data_model_type: type[TModel], - data_model_definition: VectorStoreRecordDefinition | None = None, - **kwargs: Any, - ) -> "VectorStoreRecordCollection": - """Get a MongoDBAtlasCollection tied to a collection. - - Args: - collection_name (str): The name of the collection. - data_model_type (type[TModel]): The type of the data model. - data_model_definition (VectorStoreRecordDefinition | None): The model fields, optional. - **kwargs: Additional keyword arguments, passed to the collection constructor. - """ - if collection_name not in self.vector_record_collections: - self.vector_record_collections[collection_name] = MongoDBAtlasCollection( - data_model_type=data_model_type, - data_model_definition=data_model_definition, - mongo_client=self.mongo_client, - collection_name=collection_name, - database_name=self.database_name, - **kwargs, - ) - return self.vector_record_collections[collection_name] - - @override - async def list_collection_names(self, **kwargs: Any) -> list[str]: - database: AsyncDatabase = self.mongo_client.get_database(self.database_name) - return await database.list_collection_names() - - async def __aexit__(self, exc_type, exc_value, traceback) -> None: - """Exit the context manager.""" - if self.managed_client: - await self.mongo_client.close() - - async def __aenter__(self) -> Self: - """Enter the context manager.""" - await self.mongo_client.aconnect() - return self diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py index f05b94b45782..cb415f45377c 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/utils.py @@ -1,17 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. from numpy import array -from pymongo.operations import SearchIndexModel -from semantic_kernel.connectors.memory.mongodb_atlas.const import DISTANCE_FUNCTION_MAPPING -from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition -from semantic_kernel.data.record_definition.vector_store_record_fields import ( - VectorStoreRecordDataField, - VectorStoreRecordVectorField, -) -from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.memory.memory_record import MemoryRecord +DEFAULT_DB_NAME = "default" +DEFAULT_SEARCH_INDEX_NAME = "default" NUM_CANDIDATES_SCALAR = 10 MONGODB_FIELD_ID = "_id" @@ -72,44 +66,3 @@ def memory_record_to_mongo_document(record: MemoryRecord) -> dict: MONGODB_FIELD_EMBEDDING: record._embedding.tolist(), MONGODB_FIELD_TIMESTAMP: record._timestamp, } - - -def create_vector_field(field: VectorStoreRecordVectorField) -> dict: - """Create a vector field. - - Args: - field (VectorStoreRecordVectorField): The vector field. - - Returns: - dict: The vector field. - """ - if field.distance_function not in DISTANCE_FUNCTION_MAPPING: - raise ServiceInitializationError(f"Invalid distance function: {field.distance_function}") - return { - "type": "vector", - "numDimensions": field.dimensions, - "path": field.name, - "similarity": DISTANCE_FUNCTION_MAPPING[field.distance_function], - } - - -def create_index_definition(record_definition: VectorStoreRecordDefinition, index_name: str) -> SearchIndexModel: - """Create an index definition. - - Args: - record_definition (VectorStoreRecordDefinition): The record definition. - index_name (str): The index name. - - Returns: - SearchIndexModel: The index definition. - """ - vector_fields = [create_vector_field(field) for field in record_definition.vector_fields] - data_fields = [ - {"path": field.name, "type": "filter"} - for field in record_definition.fields - if isinstance(field, VectorStoreRecordDataField) and (field.is_filterable or field.is_full_text_searchable) - ] - key_field = [{"path": record_definition.key_field.name, "type": "filter"}] - return SearchIndexModel( - type="vectorSearch", name=index_name, definition={"fields": vector_fields + data_fields + key_field} - ) diff --git a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py index 0638f053e407..25f72fe05481 100644 --- a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py +++ b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py @@ -18,7 +18,7 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class # Limitations set by Pinecone at https://docs.pinecone.io/reference/known-limitations MAX_DIMENSIONALITY = 20000 @@ -31,7 +31,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class PineconeMemoryStore(MemoryStoreBase): """A memory store that uses Pinecone as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py b/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py index 03b3cf43b212..db2cd99ef88b 100644 --- a/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py +++ b/python/semantic_kernel/connectors/memory/pinecone/pinecone_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class PineconeSettings(KernelBaseSettings): """Pinecone model settings. diff --git a/python/semantic_kernel/connectors/memory/postgres/constants.py b/python/semantic_kernel/connectors/memory/postgres/constants.py index 3121b70ef72b..6c08ef2052e6 100644 --- a/python/semantic_kernel/connectors/memory/postgres/constants.py +++ b/python/semantic_kernel/connectors/memory/postgres/constants.py @@ -5,10 +5,6 @@ # Limitation based on pgvector documentation https://github.com/pgvector/pgvector#what-if-i-want-to-index-vectors-with-more-than-2000-dimensions MAX_DIMENSIONALITY = 2000 -# The name of the column that returns distance value in the database. -# It is used in the similarity search query. Must not conflict with model property. -DISTANCE_COLUMN_NAME = "sk_pg_distance" - # Environment Variables PGHOST_ENV_VAR = "PGHOST" PGPORT_ENV_VAR = "PGPORT" diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py index 5e30e8e923f1..6de863646dc3 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py @@ -1,53 +1,40 @@ # Copyright (c) Microsoft. All rights reserved. import logging -import random -import string import sys -from collections.abc import AsyncGenerator, Sequence -from typing import Any, ClassVar, Generic, TypeVar +from collections.abc import Sequence +from typing import Any, ClassVar, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover from psycopg import sql from psycopg_pool import AsyncConnectionPool from pydantic import PrivateAttr -from semantic_kernel.connectors.memory.postgres.constants import ( - DEFAULT_SCHEMA, - DISTANCE_COLUMN_NAME, - MAX_DIMENSIONALITY, -) +from semantic_kernel.connectors.memory.postgres.constants import DEFAULT_SCHEMA, MAX_DIMENSIONALITY from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings from semantic_kernel.connectors.memory.postgres.utils import ( convert_dict_to_row, convert_row_to_dict, - get_vector_distance_ops_str, get_vector_index_ops_str, python_type_to_postgres, ) -from semantic_kernel.data.const import DistanceFunction, IndexKind -from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo -from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo -from semantic_kernel.data.kernel_search_results import KernelSearchResults +from semantic_kernel.data.const import IndexKind from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.record_definition.vector_store_record_fields import ( - VectorStoreRecordField, VectorStoreRecordKeyField, VectorStoreRecordVectorField, ) -from semantic_kernel.data.vector_search.vector_search import VectorSearchBase -from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions -from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult -from semantic_kernel.data.vector_search.vectorized_search import VectorizedSearchMixin -from semantic_kernel.exceptions import VectorStoreModelValidationError, VectorStoreOperationException -from semantic_kernel.exceptions.vector_store_exceptions import VectorSearchExecutionException +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.exceptions import ( + VectorStoreModelValidationError, + VectorStoreOperationException, +) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover +from semantic_kernel.utils.experimental_decorator import experimental_class TKey = TypeVar("TKey", str, int) TModel = TypeVar("TModel") @@ -55,19 +42,14 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental -class PostgresCollection( - VectorSearchBase[TKey, TModel], - VectorizedSearchMixin[TModel], - Generic[TKey, TModel], -): +@experimental_class +class PostgresCollection(VectorStoreRecordCollection[TKey, TModel]): """PostgreSQL collection implementation.""" connection_pool: AsyncConnectionPool | None = None db_schema: str = DEFAULT_SCHEMA supported_key_types: ClassVar[list[str] | None] = ["str", "int"] supported_vector_types: ClassVar[list[str] | None] = ["float"] - _distance_column_name: str = PrivateAttr(DISTANCE_COLUMN_NAME) _settings: PostgresSettings = PrivateAttr() """Postgres settings""" @@ -102,52 +84,26 @@ def __init__( data_model_definition=data_model_definition, connection_pool=connection_pool, db_schema=db_schema, - # This controls whether the connection pool is managed by the collection - # in the __aenter__ and __aexit__ methods. - managed_client=connection_pool is None, ) self._settings = settings or PostgresSettings.create( env_file_path=env_file_path, env_file_encoding=env_file_encoding ) - @override - def model_post_init(self, __context: object | None = None) -> None: - """Post-initialization of the model. - - In addition to the base class implementation, this method resets the distance column name - to avoid collisions if necessary. - """ - super().model_post_init(__context) - - distance_column_name = DISTANCE_COLUMN_NAME - tries = 0 - while distance_column_name in self.data_model_definition.fields: - # Reset the distance column name, ensuring no collision with existing model fields - # Avoid bandit B311 - random is not used for a security/cryptographic purpose - suffix = "".join(random.choices(string.ascii_lowercase + string.digits, k=8)) # nosec B311 - distance_column_name = f"{DISTANCE_COLUMN_NAME}_{suffix}" - tries += 1 - if tries > 10: - raise VectorStoreModelValidationError("Unable to generate a unique distance column name.") - self._distance_column_name = distance_column_name - - # region: VectorStoreRecordCollection implementation - @override async def __aenter__(self) -> "PostgresCollection": # If the connection pool was not provided, create a new one. if not self.connection_pool: self.connection_pool = await self._settings.create_connection_pool() + self.managed_client = True return self @override async def __aexit__(self, *args): - # Only close the connection pool if it was created by the collection. if self.managed_client and self.connection_pool: await self.connection_pool.close() # If the pool was created by the collection, set it to None to enable reusing the collection. - if self.managed_client: + if self._settings: self.connection_pool = None @override @@ -202,16 +158,13 @@ async def _inner_upsert( # Execute the INSERT statement for each batch await cur.executemany( - sql.SQL( - "INSERT INTO {schema}.{table} ({col_names}) VALUES ({placeholders}) " - "ON CONFLICT ({key_name}) DO UPDATE SET {update_columns}" - ).format( - schema=sql.Identifier(self.db_schema), - table=sql.Identifier(self.collection_name), - col_names=sql.SQL(", ").join(sql.Identifier(field.name) for _, field in fields), - placeholders=sql.SQL(", ").join(sql.Placeholder() * len(fields)), - key_name=sql.Identifier(self.data_model_definition.key_field.name), - update_columns=sql.SQL(", ").join( + sql.SQL("INSERT INTO {}.{} ({}) VALUES ({}) ON CONFLICT ({}) DO UPDATE SET {}").format( + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.SQL(", ").join(sql.Identifier(field.name) for _, field in fields), + sql.SQL(", ").join(sql.Placeholder() * len(fields)), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join( sql.SQL("{field} = EXCLUDED.{field}").format(field=sql.Identifier(field.name)) for _, field in fields if field.name != self.data_model_definition.key_field.name @@ -241,12 +194,12 @@ async def _inner_get(self, keys: Sequence[TKey], **kwargs: Any) -> OneOrMany[dic fields = [(field.name, field) for field in self.data_model_definition.fields.values()] async with self.connection_pool.connection() as conn, conn.cursor() as cur: await cur.execute( - sql.SQL("SELECT {select_list} FROM {schema}.{table} WHERE {key_name} IN ({keys})").format( - select_list=sql.SQL(", ").join(sql.Identifier(name) for (name, _) in fields), - schema=sql.Identifier(self.db_schema), - table=sql.Identifier(self.collection_name), - key_name=sql.Identifier(self.data_model_definition.key_field.name), - keys=sql.SQL(", ").join(sql.Literal(key) for key in keys), + sql.SQL("SELECT {} FROM {}.{} WHERE {} IN ({})").format( + sql.SQL(", ").join(sql.Identifier(name) for (name, _) in fields), + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join(sql.Literal(key) for key in keys), ) ) rows = await cur.fetchall() @@ -279,11 +232,11 @@ async def _inner_delete(self, keys: Sequence[TKey], **kwargs: Any) -> None: # Execute the DELETE statement for each batch await cur.execute( - sql.SQL("DELETE FROM {schema}.{table} WHERE {name} IN ({keys})").format( - schema=sql.Identifier(self.db_schema), - table=sql.Identifier(self.collection_name), - name=sql.Identifier(self.data_model_definition.key_field.name), - keys=sql.SQL(", ").join(sql.Literal(key) for key in key_batch), + sql.SQL("DELETE FROM {}.{} WHERE {} IN ({})").format( + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join(sql.Literal(key) for key in key_batch), ) ) @@ -332,29 +285,21 @@ async def create_collection(self, **kwargs: Any) -> None: # but would need to be created outside of this method. if isinstance(field, VectorStoreRecordVectorField) and field.dimensions: column_definitions.append( - sql.SQL("{name} VECTOR({dimensions})").format( - name=sql.Identifier(field_name), dimensions=sql.Literal(field.dimensions) - ) + sql.SQL("{} VECTOR({})").format(sql.Identifier(field_name), sql.Literal(field.dimensions)) ) elif isinstance(field, VectorStoreRecordKeyField): # Use the property_type directly for key fields column_definitions.append( - sql.SQL("{name} {col_type} PRIMARY KEY").format( - name=sql.Identifier(field_name), col_type=sql.SQL(property_type) - ) + sql.SQL("{} {} PRIMARY KEY").format(sql.Identifier(field_name), sql.SQL(property_type)) ) else: # Use the property_type directly for other types - column_definitions.append( - sql.SQL("{name} {col_type}").format( - name=sql.Identifier(field_name), col_type=sql.SQL(property_type) - ) - ) + column_definitions.append(sql.SQL("{} {}").format(sql.Identifier(field_name), sql.SQL(property_type))) columns_str = sql.SQL(", ").join(column_definitions) - create_table_query = sql.SQL("CREATE TABLE {schema}.{table} ({columns})").format( - schema=sql.Identifier(self.db_schema), table=sql.Identifier(table_name), columns=columns_str + create_table_query = sql.SQL("CREATE TABLE {}.{} ({})").format( + sql.Identifier(self.db_schema), sql.Identifier(table_name), columns_str ) async with self.connection_pool.connection() as conn, conn.cursor() as cur: @@ -368,42 +313,6 @@ async def create_collection(self, **kwargs: Any) -> None: if vector_field.index_kind: await self._create_index(table_name, vector_field) - @override - async def does_collection_exist(self, **kwargs: Any) -> bool: - """Check if the collection exists.""" - if self.connection_pool is None: - raise VectorStoreOperationException( - "Connection pool is not available, use the collection as a context manager." - ) - - async with self.connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema = %s AND table_name = %s - """, - (self.db_schema, self.collection_name), - ) - row = await cur.fetchone() - return bool(row) - - @override - async def delete_collection(self, **kwargs: Any) -> None: - """Delete the collection.""" - if self.connection_pool is None: - raise VectorStoreOperationException( - "Connection pool is not available, use the collection as a context manager." - ) - - async with self.connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute( - sql.SQL("DROP TABLE {schema}.{table} CASCADE").format( - schema=sql.Identifier(self.db_schema), table=sql.Identifier(self.collection_name) - ), - ) - await conn.commit() - async def _create_index(self, table_name: str, vector_field: VectorStoreRecordVectorField) -> None: """Create an index on a column in the table. @@ -438,200 +347,51 @@ async def _create_index(self, table_name: str, vector_field: VectorStoreRecordVe async with self.connection_pool.connection() as conn, conn.cursor() as cur: await cur.execute( - sql.SQL("CREATE INDEX {index_name} ON {schema}.{table} USING {index_kind} ({column_name} {op})").format( - index_name=sql.Identifier(index_name), - schema=sql.Identifier(self.db_schema), - table=sql.Identifier(table_name), - index_kind=sql.SQL(vector_field.index_kind), - column_name=sql.Identifier(column_name), - op=sql.SQL(ops_str), + sql.SQL("CREATE INDEX {} ON {}.{} USING {} ({} {})").format( + sql.Identifier(index_name), + sql.Identifier(self.db_schema), + sql.Identifier(table_name), + sql.SQL(vector_field.index_kind), + sql.Identifier(column_name), + sql.SQL(ops_str), ) ) await conn.commit() logger.info(f"Index '{index_name}' created successfully on column '{column_name}'.") - # endregion - # region: VectorSearchBase implementation - @override - async def _inner_search( - self, - options: VectorSearchOptions, - search_text: str | None = None, - vectorizable_text: str | None = None, - vector: list[float | int] | None = None, - **kwargs: Any, - ) -> KernelSearchResults[VectorSearchResult[TModel]]: + async def does_collection_exist(self, **kwargs: Any) -> bool: + """Check if the collection exists.""" if self.connection_pool is None: raise VectorStoreOperationException( "Connection pool is not available, use the collection as a context manager." ) - if vector is not None: - query, params, return_fields = self._construct_vector_query(vector, options, **kwargs) - elif search_text: - raise VectorSearchExecutionException("Text search not supported.") - elif vectorizable_text: - raise VectorSearchExecutionException("Vectorizable text search not supported.") - - if options.include_total_count: - async with self.connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute(query, params) - # Fetch all results to get total count. - rows = await cur.fetchall() - row_dicts = [convert_row_to_dict(row, return_fields) for row in rows] - return KernelSearchResults( - results=self._get_vector_search_results_from_results(row_dicts, options), total_count=len(row_dicts) - ) - else: - # Use an asynchronous generator to fetch and yield results - connection_pool = self.connection_pool - - async def fetch_results() -> AsyncGenerator[dict[str, Any], None]: - async with connection_pool.connection() as conn, conn.cursor() as cur: - await cur.execute(query, params) - async for row in cur: - yield convert_row_to_dict(row, return_fields) - - return KernelSearchResults( - results=self._get_vector_search_results_from_results(fetch_results(), options), - total_count=None, + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema = %s AND table_name = %s + """, + (self.db_schema, self.collection_name), ) + row = await cur.fetchone() + return bool(row) - def _construct_vector_query( - self, - vector: list[float | int], - options: VectorSearchOptions, - **kwargs: Any, - ) -> tuple[sql.Composed, list[Any], list[tuple[str, VectorStoreRecordField | None]]]: - """Construct a vector search query. - - Args: - vector: The vector to search for. - options: The search options. - **kwargs: Additional arguments. - - Returns: - The query, parameters, and the fields representing the columns in the result. - """ - # Get the vector field we will be searching against, - # defaulting to the first vector field if not specified - vector_fields = self.data_model_definition.vector_fields - if not vector_fields: - raise VectorSearchExecutionException("No vector fields defined.") - if options.vector_field_name: - vector_field = next((f for f in vector_fields if f.name == options.vector_field_name), None) - if not vector_field: - raise VectorSearchExecutionException(f"Vector field '{options.vector_field_name}' not found.") - else: - vector_field = vector_fields[0] - - # Default to cosine distance if not set - distance_function = vector_field.distance_function or DistanceFunction.COSINE_DISTANCE - ops_str = get_vector_distance_ops_str(distance_function) - - # Select all fields except all vector fields if include_vectors is False - select_list = self.data_model_definition.get_field_names(include_vector_fields=options.include_vectors) - - where_clause = self._build_where_clauses_from_filter(options.filter) - - query = sql.SQL("SELECT {select_list}, {vec_col} {dist_op} %s as {dist_col} FROM {schema}.{table}").format( - select_list=sql.SQL(", ").join(sql.Identifier(name) for name in select_list), - vec_col=sql.Identifier(vector_field.name), - dist_op=sql.SQL(ops_str), - dist_col=sql.Identifier(self._distance_column_name), - schema=sql.Identifier(self.db_schema), - table=sql.Identifier(self.collection_name), - ) - - if where_clause: - query += where_clause - - query += sql.SQL(" ORDER BY {dist_col} LIMIT {limit}").format( - dist_col=sql.Identifier(self._distance_column_name), - limit=sql.Literal(options.top), - ) - - if options.skip: - query += sql.SQL(" OFFSET {offset}").format(offset=sql.Literal(options.skip)) - - # For cosine similarity, we need to take 1 - cosine distance. - # However, we can't use an expression in the ORDER BY clause or else the index won't be used. - # Instead we'll wrap the query in a subquery and modify the distance in the outer query. - if distance_function == DistanceFunction.COSINE_SIMILARITY: - query = sql.SQL( - "SELECT subquery.*, 1 - subquery.{subquery_dist_col} AS {dist_col} FROM ({subquery}) AS subquery" - ).format( - subquery_dist_col=sql.Identifier(self._distance_column_name), - dist_col=sql.Identifier(self._distance_column_name), - subquery=query, + @override + async def delete_collection(self, **kwargs: Any) -> None: + """Delete the collection.""" + if self.connection_pool is None: + raise VectorStoreOperationException( + "Connection pool is not available, use the collection as a context manager." ) - # For inner product, we need to take -1 * inner product. - # However, we can't use an expression in the ORDER BY clause or else the index won't be used. - # Instead we'll wrap the query in a subquery and modify the distance in the outer query. - if distance_function == DistanceFunction.DOT_PROD: - query = sql.SQL( - "SELECT subquery.*, -1 * subquery.{subquery_dist_col} AS {dist_col} FROM ({subquery}) AS subquery" - ).format( - subquery_dist_col=sql.Identifier(self._distance_column_name), - dist_col=sql.Identifier(self._distance_column_name), - subquery=query, + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + sql.SQL("DROP TABLE {scm}.{tbl} CASCADE").format( + scm=sql.Identifier(self.db_schema), tbl=sql.Identifier(self.collection_name) + ), ) - - # Convert the vector to a string for the query - params = ["[" + ",".join([str(float(v)) for v in vector]) + "]"] - - return ( - query, - params, - [ - *((name, f) for (name, f) in self.data_model_definition.fields.items() if name in select_list), - (self._distance_column_name, None), - ], - ) - - def _build_where_clauses_from_filter(self, filters: VectorSearchFilter | None) -> sql.Composed | None: - """Build the WHERE clause for the search query from the filter in the search options. - - Args: - filters: The filters. - - Returns: - The WHERE clause. - """ - if not filters or not filters.filters: - return None - - where_clauses = [] - for filter in filters.filters: - match filter: - case EqualTo(): - where_clauses.append( - sql.SQL("{field} = {value}").format( - field=sql.Identifier(filter.field_name), - value=sql.Literal(filter.value), - ) - ) - case AnyTagsEqualTo(): - where_clauses.append( - sql.SQL("{field} @> ARRAY[{value}::TEXT").format( - field=sql.Identifier(filter.field_name), - value=sql.Literal(filter.value), - ) - ) - case _: - raise ValueError(f"Unsupported filter: {filter}") - - return sql.SQL("WHERE {clause}").format(clause=sql.SQL(" AND ").join(where_clauses)) - - @override - def _get_record_from_result(self, result: dict[str, Any]) -> dict[str, Any]: - return {k: v for (k, v) in result.items() if k != self._distance_column_name} - - @override - def _get_score_from_result(self, result: Any) -> float | None: - return result.pop(self._distance_column_name, None) - - # endregion + await conn.commit() diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py index 89edd5926998..3d553c570846 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py @@ -21,12 +21,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class PostgresMemoryStore(MemoryStoreBase): """A memory store that uses Postgres with pgvector as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py index a3361d0a305d..02a5d24616ec 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py @@ -4,7 +4,6 @@ from psycopg.conninfo import conninfo_to_dict from psycopg_pool import AsyncConnectionPool -from psycopg_pool.abc import ACT from pydantic import Field, SecretStr from semantic_kernel.connectors.memory.postgres.constants import ( @@ -15,12 +14,15 @@ PGSSL_MODE_ENV_VAR, PGUSER_ENV_VAR, ) -from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorConnectionException +from semantic_kernel.exceptions.memory_connector_exceptions import ( + MemoryConnectorConnectionException, + MemoryConnectorInitializationError, +) from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class PostgresSettings(KernelBaseSettings): """Postgres model settings. @@ -59,12 +61,12 @@ class PostgresSettings(KernelBaseSettings): env_prefix: ClassVar[str] = "POSTGRES_" connection_string: SecretStr | None = None - host: str | None = Field(default=None, alias=PGHOST_ENV_VAR) - port: int | None = Field(default=5432, alias=PGPORT_ENV_VAR) - dbname: str | None = Field(default=None, alias=PGDATABASE_ENV_VAR) - user: str | None = Field(default=None, alias=PGUSER_ENV_VAR) - password: SecretStr | None = Field(default=None, alias=PGPASSWORD_ENV_VAR) - sslmode: str | None = Field(default=None, alias=PGSSL_MODE_ENV_VAR) + host: str | None = Field(None, alias=PGHOST_ENV_VAR) + port: int | None = Field(5432, alias=PGPORT_ENV_VAR) + dbname: str | None = Field(None, alias=PGDATABASE_ENV_VAR) + user: str | None = Field(None, alias=PGUSER_ENV_VAR) + password: SecretStr | None = Field(None, alias=PGPASSWORD_ENV_VAR) + sslmode: str | None = Field(None, alias=PGSSL_MODE_ENV_VAR) min_pool: int = 1 max_pool: int = 5 @@ -87,34 +89,30 @@ def get_connection_args(self) -> dict[str, Any]: if self.password: result["password"] = self.password.get_secret_value() - return result - - async def create_connection_pool( - self, connection_class: type[ACT] | None = None, **kwargs: Any - ) -> AsyncConnectionPool: - """Creates a connection pool based off of settings. + # Ensure required values + if "host" not in result: + raise MemoryConnectorInitializationError("host is required. Please set PGHOST or connection_string.") + if "dbname" not in result: + raise MemoryConnectorInitializationError( + "database is required. Please set PGDATABASE or connection_string." + ) + if "user" not in result: + raise MemoryConnectorInitializationError("user is required. Please set PGUSER or connection_string.") + if "password" not in result: + raise MemoryConnectorInitializationError( + "password is required. Please set PGPASSWORD or connection_string." + ) - Args: - connection_class: The connection class to use. - kwargs: Additional keyword arguments to pass to the connection class. + return result - Returns: - The connection pool. - """ + async def create_connection_pool(self) -> AsyncConnectionPool: + """Creates a connection pool based off of settings.""" try: - # Only pass connection_class if it specified, or else allow psycopg to use the default connection class - extra_args: dict[str, Any] = {} if connection_class is None else {"connection_class": connection_class} - pool = AsyncConnectionPool( min_size=self.min_pool, max_size=self.max_pool, open=False, - # kwargs are passed to the connection class - kwargs={ - **self.get_connection_args(), - **kwargs, - }, - **extra_args, + kwargs=self.get_connection_args(), ) await pool.open() except Exception as e: diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py index 6b6ec9ff427f..b5a1edcaf38b 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py @@ -4,26 +4,27 @@ import sys from typing import Any, TypeVar -from psycopg import sql -from psycopg_pool import AsyncConnectionPool - -from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection -from semantic_kernel.connectors.memory.postgres.postgres_memory_store import DEFAULT_SCHEMA -from semantic_kernel.data import VectorStore, VectorStoreRecordCollection, VectorStoreRecordDefinition -from semantic_kernel.utils.feature_stage_decorator import experimental - if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover +from psycopg import sql +from psycopg_pool import AsyncConnectionPool + +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection +from semantic_kernel.connectors.memory.postgres.postgres_memory_store import DEFAULT_SCHEMA +from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.vector_storage.vector_store import VectorStore +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental +@experimental_class class PostgresStore(VectorStore): """PostgreSQL store implementation.""" diff --git a/python/semantic_kernel/connectors/memory/postgres/utils.py b/python/semantic_kernel/connectors/memory/postgres/utils.py index 874df3e6b26a..299567a57a39 100644 --- a/python/semantic_kernel/connectors/memory/postgres/utils.py +++ b/python/semantic_kernel/connectors/memory/postgres/utils.py @@ -52,9 +52,7 @@ def python_type_to_postgres(python_type_str: str) -> str | None: return None -def convert_row_to_dict( - row: tuple[Any, ...], fields: list[tuple[str, VectorStoreRecordField | None]] -) -> dict[str, Any]: +def convert_row_to_dict(row: tuple[Any, ...], fields: list[tuple[str, VectorStoreRecordField]]) -> dict[str, Any]: """Convert a row from a PostgreSQL query to a dictionary. Uses the field information to map the row values to the corresponding field names. @@ -67,12 +65,11 @@ def convert_row_to_dict( A dictionary representation of the row. """ - def _convert(v: Any | None, field: VectorStoreRecordField | None) -> Any | None: + def _convert(v: Any | None, field: VectorStoreRecordField) -> Any | None: if v is None: return None - if isinstance(field, VectorStoreRecordVectorField) and isinstance(v, str): - # psycopg returns vector as a string if pgvector is not loaded. - # If pgvector is registered with the connection, no conversion is required. + if isinstance(field, VectorStoreRecordVectorField): + # psycopg returns vector as a string return json.loads(v) return v @@ -112,8 +109,6 @@ def get_vector_index_ops_str(distance_function: DistanceFunction) -> str: >>> get_vector_index_ops_str(DistanceFunction.COSINE) 'vector_cosine_ops' """ - if distance_function == DistanceFunction.COSINE_DISTANCE: - return "vector_cosine_ops" if distance_function == DistanceFunction.COSINE_SIMILARITY: return "vector_cosine_ops" if distance_function == DistanceFunction.DOT_PROD: @@ -126,38 +121,6 @@ def get_vector_index_ops_str(distance_function: DistanceFunction) -> str: raise ValueError(f"Unsupported distance function: {distance_function}") -def get_vector_distance_ops_str(distance_function: DistanceFunction) -> str: - """Get the PostgreSQL distance operator string for a given distance function. - - Args: - distance_function: The distance function for which the operator string is needed. - - Note: - For the COSINE_SIMILARITY and DOT_PROD distance functions, - there is additional query steps to retrieve the correct distance. - For dot product, take -1 * inner product, as <#> returns the negative inner product - since Postgres only supports ASC order index scans on operators - For cosine similarity, take 1 - cosine distance. - - Returns: - The PostgreSQL distance operator string for the given distance function. - - Raises: - ValueError: If the distance function is unsupported. - """ - if distance_function == DistanceFunction.COSINE_DISTANCE: - return "<=>" - if distance_function == DistanceFunction.COSINE_SIMILARITY: - return "<=>" - if distance_function == DistanceFunction.DOT_PROD: - return "<#>" - if distance_function == DistanceFunction.EUCLIDEAN_DISTANCE: - return "<->" - if distance_function == DistanceFunction.MANHATTAN: - return "<+>" - raise ValueError(f"Unsupported distance function: {distance_function}") - - async def ensure_open(connection_pool: AsyncConnectionPool) -> AsyncConnectionPool: """Ensure the connection pool is open. diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py index b3bcd7292cd9..b0cfd8244299 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_collection.py @@ -29,7 +29,7 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent logger: logging.Logger = logging.getLogger(__name__) @@ -38,7 +38,7 @@ TKey = TypeVar("TKey", str, int) -@experimental +@experimental_class class QdrantCollection( VectorSearchBase[str | int, TModel], VectorizedSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py index 7c5a13e9ce8a..8e5d5d0f2166 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py @@ -17,12 +17,12 @@ from semantic_kernel.exceptions import ServiceResponseException from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class QdrantMemoryStore(MemoryStoreBase): """QdrantMemoryStore.""" diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py index 7dd45b2b86a9..75cfd3f8553f 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_settings.py @@ -5,12 +5,12 @@ from pydantic import HttpUrl, SecretStr, model_validator from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class IN_MEMORY_STRING = ":memory:" -@experimental +@experimental_class class QdrantSettings(KernelBaseSettings): """Qdrant settings currently used by the Qdrant Vector Record Store.""" diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py index 8e751e9f2048..0fd00bc59532 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_store.py @@ -17,7 +17,7 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent if TYPE_CHECKING: @@ -29,7 +29,7 @@ TKey = TypeVar("TKey", str, int) -@experimental +@experimental_class class QdrantStore(VectorStore): """A QdrantStore is a memory store that uses Qdrant as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_collection.py b/python/semantic_kernel/connectors/memory/redis/redis_collection.py index 278551d80b9a..73f3a2bd4dea 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_collection.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_collection.py @@ -53,7 +53,7 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.list_handler import desync_list logger: logging.Logger = logging.getLogger(__name__) @@ -63,7 +63,7 @@ TQuery = TypeVar("TQuery", bound=BaseQuery) -@experimental +@experimental_class class RedisCollection(VectorSearchBase[str, TModel], VectorizedSearchMixin[TModel], VectorTextSearchMixin[TModel]): """A vector store record collection implementation using Redis.""" @@ -263,7 +263,7 @@ def _get_score_from_result(self, result: dict[str, Any]) -> float | None: return result.get("vector_distance") -@experimental +@experimental_class class RedisHashsetCollection(RedisCollection): """A vector store record collection implementation using Redis Hashsets.""" @@ -383,7 +383,7 @@ def _add_return_fields(self, query: TQuery, include_vectors: bool) -> TQuery: return query -@experimental +@experimental_class class RedisJsonCollection(RedisCollection): """A vector store record collection implementation using Redis Json.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py index e28335d5e022..19a361d51f50 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py @@ -25,12 +25,12 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class RedisMemoryStore(MemoryStoreBase): """A memory store implementation using Redis.""" diff --git a/python/semantic_kernel/connectors/memory/redis/redis_settings.py b/python/semantic_kernel/connectors/memory/redis/redis_settings.py index f3aadba1bc57..62ceba3dee2f 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_settings.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_settings.py @@ -5,10 +5,10 @@ from pydantic import SecretStr from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RedisSettings(KernelBaseSettings): """Redis model settings. diff --git a/python/semantic_kernel/connectors/memory/redis/redis_store.py b/python/semantic_kernel/connectors/memory/redis/redis_store.py index c56625dd2ae1..8764027e0cd8 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_store.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_store.py @@ -19,14 +19,14 @@ from semantic_kernel.data.record_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage import VectorStore, VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreInitializationException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) TModel = TypeVar("TModel") -@experimental +@experimental_class class RedisStore(VectorStore): """Create a Redis Vector Store.""" diff --git a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py index ad21b28614f2..1d00e19d5fb6 100644 --- a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py +++ b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py @@ -21,7 +21,7 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) @@ -112,7 +112,7 @@ def pyarrow_table_to_memoryrecords(table: pa.Table, vectors: ndarray | None = No ] -@experimental +@experimental_class class USearchMemoryStore(MemoryStoreBase): """Memory store for searching embeddings with USearch.""" diff --git a/python/semantic_kernel/connectors/memory/weaviate/README.md b/python/semantic_kernel/connectors/memory/weaviate/README.md index 6d763825a744..18894c00b161 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/README.md +++ b/python/semantic_kernel/connectors/memory/weaviate/README.md @@ -14,4 +14,4 @@ There are a few ways you can deploy your Weaviate database: ## Using the Connector -Once the Weaviate database is up and running, and the environment variables are set, you can use the connector in your Semantic Kernel application. Please refer to this sample to see how to use the connector: [Complex Connector Sample](../../../../samples/concepts/memory/complex_memory.py) \ No newline at end of file +Once the Weaviate database is up and running, and the environment variables are set, you can use the connector in your Semantic Kernel application. Please refer to this sample to see how to use the connector: [Weaviate Connector Sample](../../../../samples/concepts/memory/new_memory.py) \ No newline at end of file diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py index e7ef424e0016..947188a3c819 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_collection.py @@ -47,7 +47,7 @@ VectorStoreOperationException, ) from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ TKey = TypeVar("TKey", str, int) -@experimental +@experimental_class class WeaviateCollection( VectorSearchBase[TKey, TModel], VectorizedSearchMixin[TModel], diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py index ed831e1063e7..e0be96a17021 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py @@ -10,7 +10,7 @@ from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) @@ -102,7 +102,7 @@ def remove_underscore_prefix(cls, sk_dict): return {key.lstrip("_"): value for key, value in sk_dict.items()} -@experimental +@experimental_class class WeaviateMemoryStore(MemoryStoreBase): """A memory store that uses Weaviate as the backend.""" diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py index cf7a3a9c6e47..0327d9b6cbe4 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_settings.py @@ -6,10 +6,10 @@ from semantic_kernel.exceptions.service_exceptions import ServiceInvalidExecutionSettingsError from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class WeaviateSettings(KernelBaseSettings): """Weaviate model settings. diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py index dc34e0d03417..9d57a6e588c2 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_store.py @@ -23,10 +23,10 @@ VectorStoreInitializationException, VectorStoreOperationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class WeaviateStore(VectorStore): """A Weaviate store is a vector store that uses Weaviate as the backend.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/const.py b/python/semantic_kernel/connectors/openapi_plugin/const.py index 58998a797519..ac4cebb1aeab 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/const.py +++ b/python/semantic_kernel/connectors/openapi_plugin/const.py @@ -3,10 +3,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class OperationExtensions(Enum): """The operation extensions.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py index 9503cb77a6a8..5fee34f9e2c0 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_expected_response.py @@ -1,9 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental + +@experimental_class class RestApiExpectedResponse: """RestApiExpectedResponse.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py index c285bf599b60..2de8cc4162ec 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flow.py @@ -2,10 +2,10 @@ from dataclasses import dataclass -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class @dataclass class RestApiOAuthFlow: """Represents the OAuth flow used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py index 596d47c904df..f739b757cb95 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_oauth_flows.py @@ -3,10 +3,10 @@ from dataclasses import dataclass from semantic_kernel.connectors.openapi_plugin.models.rest_api_oauth_flow import RestApiOAuthFlow -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class @dataclass class RestApiOAuthFlows: """Represents the OAuth flows used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py index 7963c55883e8..f6150e70a0a7 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation.py @@ -21,10 +21,10 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_security_requirement import RestApiSecurityRequirement from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiOperation: """RestApiOperation.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py index d38e6bebae81..def469cbeadf 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter.py @@ -12,10 +12,10 @@ RestApiParameterStyle, ) from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiParameter: """RestApiParameter.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py index 59c71ddb559f..25da836bd3ce 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_location.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiParameterLocation(Enum): """The location of the REST API parameter.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py index 2651a4918a43..a5db1b921f6f 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_parameter_style.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiParameterStyle(Enum): """RestApiParameterStyle.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py index bc0bdbe086cf..21c7cb288500 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload.py @@ -4,10 +4,10 @@ RestApiPayloadProperty, ) from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiPayload: """RestApiPayload.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py index 9889e23972a6..455609fdf927 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_payload_property.py @@ -3,10 +3,10 @@ from typing import Any from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiPayloadProperty: """RestApiPayloadProperty.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py index 1e4d59bbd34f..78ce7a760ca7 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_run_options.py @@ -1,9 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiRunOptions: """The options for running the REST API operation.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py index 7d68adbdd16e..78a07ace5da6 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_requirement.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.connectors.openapi_plugin.models.rest_api_security_scheme import RestApiSecurityScheme -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiSecurityRequirement(dict[RestApiSecurityScheme, list[str]]): """Represents the security requirements used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py index 5274382af619..c57669b7f121 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_security_scheme.py @@ -4,10 +4,10 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_parameter_location import ( RestApiParameterLocation, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class RestApiSecurityScheme: """Represents the security scheme used by the REST API.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py index aba9fde6c297..16219521870e 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_uri.py @@ -2,10 +2,10 @@ from urllib.parse import urlparse -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class Uri: """The Uri class that represents the URI.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py index ccbe7627bd1f..d671d3b25573 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_function_execution_parameters.py @@ -11,12 +11,12 @@ OperationSelectionPredicateContext, ) from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class AuthCallbackType = Callable[..., Awaitable[Any]] -@experimental +@experimental_class class OpenAPIFunctionExecutionParameters(KernelBaseModel): """OpenAPI function execution parameters.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py index 407095588ca1..e2bb641b601b 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_manager.py @@ -18,7 +18,7 @@ from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata from semantic_kernel.schema.kernel_json_schema_builder import TYPE_MAPPING -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: from semantic_kernel.connectors.openapi_plugin.openapi_function_execution_parameters import ( @@ -28,7 +28,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_function def create_functions_from_openapi( plugin_name: str, openapi_document_path: str | None = None, @@ -98,7 +98,7 @@ def create_functions_from_openapi( return functions -@experimental +@experimental_function def _create_function_from_operation( runner: OpenApiRunner, operation: RestApiOperation, diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py index 9afc17eb4523..8b15ddfa5222 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_runner.py @@ -19,13 +19,13 @@ from semantic_kernel.connectors.openapi_plugin.models.rest_api_run_options import RestApiRunOptions from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import APP_INFO, prepend_semantic_kernel_to_user_agent logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class OpenApiRunner: """The OpenApiRunner that runs the operations defined in the OpenAPI manifest.""" diff --git a/python/semantic_kernel/connectors/search/bing/bing_search.py b/python/semantic_kernel/connectors/search/bing/bing_search.py index 92a0e5334721..21a5ffb7c54c 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_search.py +++ b/python/semantic_kernel/connectors/search/bing/bing_search.py @@ -25,7 +25,7 @@ from semantic_kernel.data.text_search.text_search_result import TextSearchResult from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidRequestError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT if TYPE_CHECKING: @@ -34,7 +34,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class BingSearch(KernelBaseModel, TextSearch): """A search engine connector that uses the Bing Search API to perform a web search.""" diff --git a/python/semantic_kernel/connectors/search/bing/bing_search_response.py b/python/semantic_kernel/connectors/search/bing/bing_search_response.py index 784fecd7c859..c42248f24f17 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_search_response.py +++ b/python/semantic_kernel/connectors/search/bing/bing_search_response.py @@ -6,24 +6,24 @@ from semantic_kernel.connectors.search.bing.bing_web_page import BingWebPage from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class BingWebPages(KernelBaseModel): """The web pages from a Bing search.""" id: str | None = None - some_results_removed: bool | None = Field(default=None, alias="someResultsRemoved") - total_estimated_matches: int | None = Field(default=None, alias="totalEstimatedMatches") - web_search_url: str | None = Field(default=None, alias="webSearchUrl") + some_results_removed: bool | None = Field(None, alias="someResultsRemoved") + total_estimated_matches: int | None = Field(None, alias="totalEstimatedMatches") + web_search_url: str | None = Field(None, alias="webSearchUrl") value: list[BingWebPage] = Field(default_factory=list) -@experimental +@experimental_class class BingSearchResponse(KernelBaseModel): """The response from a Bing search.""" - type_: str = Field(default="", alias="_type") + type_: str = Field("", alias="_type") query_context: dict[str, Any] = Field(default_factory=dict, validation_alias="queryContext") - web_pages: BingWebPages | None = Field(default=None, alias="webPages") + web_pages: BingWebPages | None = Field(None, alias="webPages") diff --git a/python/semantic_kernel/connectors/search/bing/bing_web_page.py b/python/semantic_kernel/connectors/search/bing/bing_web_page.py index 013462879bc6..faa8a1217e55 100644 --- a/python/semantic_kernel/connectors/search/bing/bing_web_page.py +++ b/python/semantic_kernel/connectors/search/bing/bing_web_page.py @@ -2,10 +2,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class BingWebPage(KernelBaseModel): """A Bing web page.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search.py b/python/semantic_kernel/connectors/search/google/google_search.py index 85934804df00..6219283029f2 100644 --- a/python/semantic_kernel/connectors/search/google/google_search.py +++ b/python/semantic_kernel/connectors/search/google/google_search.py @@ -20,7 +20,7 @@ from semantic_kernel.data.text_search.text_search_result import TextSearchResult from semantic_kernel.exceptions import ServiceInitializationError, ServiceInvalidRequestError from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT if TYPE_CHECKING: @@ -29,7 +29,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class GoogleSearch(KernelBaseModel, TextSearch): """A search engine connector that uses the Google Search API to perform a web search.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search_response.py b/python/semantic_kernel/connectors/search/google/google_search_response.py index 06c34f8f4bbf..df76418ae18a 100644 --- a/python/semantic_kernel/connectors/search/google/google_search_response.py +++ b/python/semantic_kernel/connectors/search/google/google_search_response.py @@ -6,10 +6,10 @@ from semantic_kernel.connectors.search.google.google_search_result import GoogleSearchResult from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class GoogleSearchInformation(KernelBaseModel): """Information about the search.""" @@ -19,7 +19,7 @@ class GoogleSearchInformation(KernelBaseModel): formatted_total_results: str = Field(alias="formattedTotalResults") -@experimental +@experimental_class class GoogleSearchResponse(KernelBaseModel): """The response from a Google search.""" diff --git a/python/semantic_kernel/connectors/search/google/google_search_result.py b/python/semantic_kernel/connectors/search/google/google_search_result.py index ce273ef208af..d2e68a67848d 100644 --- a/python/semantic_kernel/connectors/search/google/google_search_result.py +++ b/python/semantic_kernel/connectors/search/google/google_search_result.py @@ -5,25 +5,25 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class GoogleSearchResult(KernelBaseModel): """A Google web page.""" kind: str = "" title: str = "" - html_title: str = Field(default="", alias="htmlTitle") + html_title: str = Field("", alias="htmlTitle") link: str = "" - display_link: str = Field(default="", alias="displayLink") + display_link: str = Field("", alias="displayLink") snippet: str = "" - html_snippet: str = Field(default="", alias="htmlSnippet") - cache_id: str = Field(default="", alias="cacheId") - formatted_url: str = Field(default="", alias="formattedUrl") - html_formatted_url: str = Field(default="", alias="htmlFormattedUrl") + html_snippet: str = Field("", alias="htmlSnippet") + cache_id: str = Field("", alias="cacheId") + formatted_url: str = Field("", alias="formattedUrl") + html_formatted_url: str = Field("", alias="htmlFormattedUrl") pagemap: dict[str, Any] = Field(default_factory=dict) mime: str = "" - file_format: str = Field(default="", alias="fileFormat") + file_format: str = Field("", alias="fileFormat") image: dict[str, Any] = Field(default_factory=dict) labels: list[dict[str, Any]] = Field(default_factory=list) diff --git a/python/semantic_kernel/connectors/search_engine/google_connector.py b/python/semantic_kernel/connectors/search_engine/google_connector.py index a4b2d70a8bd9..9999638a1014 100644 --- a/python/semantic_kernel/connectors/search_engine/google_connector.py +++ b/python/semantic_kernel/connectors/search_engine/google_connector.py @@ -38,7 +38,7 @@ def __init__( """ try: self._settings = GoogleSearchSettings.create( - search_api_key=api_key, + api_key=api_key, search_engine_id=search_engine_id, env_file_path=env_file_path, env_file_encoding=env_file_encoding, diff --git a/python/semantic_kernel/contents/__init__.py b/python/semantic_kernel/contents/__init__.py index cb69b29ac6c3..c326115ccd86 100644 --- a/python/semantic_kernel/contents/__init__.py +++ b/python/semantic_kernel/contents/__init__.py @@ -4,22 +4,11 @@ from semantic_kernel.contents.audio_content import AudioContent from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ChatHistorySummarizationReducer from semantic_kernel.contents.history_reducer.chat_history_truncation_reducer import ChatHistoryTruncationReducer from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.realtime_events import ( - RealtimeAudioEvent, - RealtimeEvent, - RealtimeEvents, - RealtimeFunctionCallEvent, - RealtimeFunctionResultEvent, - RealtimeImageEvent, - RealtimeTextEvent, -) from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent @@ -33,22 +22,13 @@ "AudioContent", "AuthorRole", "ChatHistory", - "ChatHistoryReducer", "ChatHistorySummarizationReducer", "ChatHistoryTruncationReducer", "ChatMessageContent", - "FileReferenceContent", "FinishReason", "FunctionCallContent", "FunctionResultContent", "ImageContent", - "RealtimeAudioEvent", - "RealtimeEvent", - "RealtimeEvents", - "RealtimeFunctionCallEvent", - "RealtimeFunctionResultEvent", - "RealtimeImageEvent", - "RealtimeTextEvent", "StreamingAnnotationContent", "StreamingChatMessageContent", "StreamingFileReferenceContent", diff --git a/python/semantic_kernel/contents/annotation_content.py b/python/semantic_kernel/contents/annotation_content.py index 64ed9ffe4c4b..a33f8014d694 100644 --- a/python/semantic_kernel/contents/annotation_content.py +++ b/python/semantic_kernel/contents/annotation_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import ANNOTATION_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="AnnotationContent") -@experimental +@experimental_class class AnnotationContent(KernelContent): """Annotation content.""" diff --git a/python/semantic_kernel/contents/audio_content.py b/python/semantic_kernel/contents/audio_content.py index 12bb47af9f64..8ee4197aaa8f 100644 --- a/python/semantic_kernel/contents/audio_content.py +++ b/python/semantic_kernel/contents/audio_content.py @@ -3,17 +3,16 @@ import mimetypes from typing import Any, ClassVar, Literal, TypeVar -from numpy import ndarray from pydantic import Field from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import AUDIO_CONTENT_TAG, ContentTypes -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class _T = TypeVar("_T", bound="AudioContent") -@experimental +@experimental_class class AudioContent(BinaryContent): """Audio Content class. @@ -39,45 +38,11 @@ class AudioContent(BinaryContent): metadata (dict[str, Any]): Any metadata that should be attached to the response. """ - content_type: Literal[ContentTypes.AUDIO_CONTENT] = Field(default=AUDIO_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.AUDIO_CONTENT] = Field(AUDIO_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = AUDIO_CONTENT_TAG - def __init__( - self, - uri: str | None = None, - data_uri: str | None = None, - data: str | bytes | ndarray | None = None, - data_format: str | None = None, - mime_type: str | None = None, - **kwargs: Any, - ): - """Create an Audio Content object, either from a data_uri or data. - - Args: - uri: The reference uri of the content. - data_uri: The data uri of the content. - data: The data of the content. - data_format: The format of the data (e.g. base64). - mime_type: The mime type of the audio, only used with data. - kwargs: Any additional arguments: - inner_content: The inner content of the response, - this should hold all the information from the response so even - when not creating a subclass a developer - can leverage the full thing. - ai_model_id: The id of the AI model that generated this response. - metadata: Any metadata that should be attached to the response. - """ - super().__init__( - uri=uri, - data_uri=data_uri, - data=data, - data_format=data_format, - mime_type=mime_type, - **kwargs, - ) - @classmethod - def from_audio_file(cls: type[_T], path: str) -> _T: + def from_audio_file(cls: type[_T], path: str) -> "AudioContent": """Create an instance from an audio file.""" mime_type = mimetypes.guess_type(path)[0] with open(path, "rb") as audio_file: diff --git a/python/semantic_kernel/contents/binary_content.py b/python/semantic_kernel/contents/binary_content.py index aa161f78755f..a36535b0c120 100644 --- a/python/semantic_kernel/contents/binary_content.py +++ b/python/semantic_kernel/contents/binary_content.py @@ -2,19 +2,17 @@ import logging import os -from pathlib import Path from typing import Annotated, Any, ClassVar, Literal, TypeVar from xml.etree.ElementTree import Element # nosec -from numpy import ndarray -from pydantic import Field, FilePath, PrivateAttr, UrlConstraints, computed_field +from pydantic import Field, FilePath, UrlConstraints, computed_field from pydantic_core import Url from semantic_kernel.contents.const import BINARY_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent from semantic_kernel.contents.utils.data_uri import DataUri -from semantic_kernel.exceptions.content_exceptions import ContentException, ContentInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.exceptions.content_exceptions import ContentInitializationError +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) @@ -23,7 +21,7 @@ DataUrl = Annotated[Url, UrlConstraints(allowed_schemes=["data"])] -@experimental +@experimental_class class BinaryContent(KernelContent): """This is a base class for different types of binary content. @@ -40,18 +38,17 @@ class BinaryContent(KernelContent): """ - content_type: Literal[ContentTypes.BINARY_CONTENT] = Field(default=BINARY_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.BINARY_CONTENT] = Field(BINARY_CONTENT_TAG, init=False) # type: ignore uri: Url | str | None = None - default_mime_type: ClassVar[str] = "text/plain" tag: ClassVar[str] = BINARY_CONTENT_TAG - _data_uri: DataUri | None = PrivateAttr(default=None) + _data_uri: DataUri | None = None def __init__( self, uri: Url | str | None = None, data_uri: DataUrl | str | None = None, - data: str | bytes | ndarray | None = None, + data: str | bytes | None = None, data_format: str | None = None, mime_type: str | None = None, **kwargs: Any, @@ -59,49 +56,43 @@ def __init__( """Create a Binary Content object, either from a data_uri or data. Args: - uri: The reference uri of the content. - data_uri: The data uri of the content. - data: The data of the content. - data_format: The format of the data (e.g. base64). - mime_type: The mime type of the content, not always relevant. - kwargs: Any additional arguments: - inner_content: The inner content of the response, + uri (Url | str | None): The reference uri of the content. + data_uri (DataUrl | None): The data uri of the content. + data (str | bytes | None): The data of the content. + data_format (str | None): The format of the data (e.g. base64). + mime_type (str | None): The mime type of the image, only used with data. + kwargs (Any): Any additional arguments: + inner_content (Any): The inner content of the response, this should hold all the information from the response so even when not creating a subclass a developer can leverage the full thing. - ai_model_id: The id of the AI model that generated this response. - metadata: Any metadata that should be attached to the response. + ai_model_id (str | None): The id of the AI model that generated this response. + metadata (dict[str, Any]): Any metadata that should be attached to the response. """ - temp_data_uri: DataUri | None = None + _data_uri = None if data_uri: - temp_data_uri = DataUri.from_data_uri(data_uri, self.default_mime_type) - kwargs.setdefault("metadata", {}) - kwargs["metadata"].update(temp_data_uri.parameters) - elif data is not None: - match data: - case bytes(): - temp_data_uri = DataUri( - data_bytes=data, data_format=data_format, mime_type=mime_type or self.default_mime_type - ) - case ndarray(): - temp_data_uri = DataUri( - data_array=data, data_format=data_format, mime_type=mime_type or self.default_mime_type - ) - case str(): - temp_data_uri = DataUri( - data_str=data, data_format=data_format, mime_type=mime_type or self.default_mime_type - ) + _data_uri = DataUri.from_data_uri(data_uri, self.default_mime_type) + if "metadata" in kwargs: + kwargs["metadata"].update(_data_uri.parameters) + else: + kwargs["metadata"] = _data_uri.parameters + elif data: + if isinstance(data, str): + _data_uri = DataUri( + data_str=data, data_format=data_format, mime_type=mime_type or self.default_mime_type + ) + else: + _data_uri = DataUri( + data_bytes=data, data_format=data_format, mime_type=mime_type or self.default_mime_type + ) if uri is not None: if isinstance(uri, str) and os.path.exists(uri): - if os.path.isfile(uri): - uri = str(Path(uri)) - else: - raise ContentInitializationError("URI must be a file path, not a directory.") + uri = str(FilePath(uri)) elif isinstance(uri, str): uri = Url(uri) super().__init__(uri=uri, **kwargs) - self._data_uri = temp_data_uri + self._data_uri = _data_uri @computed_field # type: ignore @property @@ -114,43 +105,28 @@ def data_uri(self) -> str: @data_uri.setter def data_uri(self, value: str): """Set the data uri.""" - if not self._data_uri: - self._data_uri = DataUri.from_data_uri(value, self.default_mime_type) - else: - self._data_uri.update_data(value) + self._data_uri = DataUri.from_data_uri(value) self.metadata.update(self._data_uri.parameters) @property - def data_string(self) -> str: - """Returns the data as a string, using the data format.""" - if self._data_uri: - return self._data_uri._data_str() - return "" - - @property - def data(self) -> bytes | ndarray: + def data(self) -> bytes: """Get the data.""" - if self._data_uri and self._data_uri.data_array is not None: - return self._data_uri.data_array.tobytes() if self._data_uri and self._data_uri.data_bytes: return self._data_uri.data_bytes + if self._data_uri and self._data_uri.data_str: + return self._data_uri.data_str.encode("utf-8") return b"" @data.setter - def data(self, value: str | bytes | ndarray): + def data(self, value: str | bytes): """Set the data.""" if self._data_uri: self._data_uri.update_data(value) - return - match value: - case ndarray(): - self._data_uri = DataUri(data_array=value, mime_type=self.mime_type) - case str(): + else: + if isinstance(value, str): self._data_uri = DataUri(data_str=value, mime_type=self.mime_type) - case bytes(): + else: self._data_uri = DataUri(data_bytes=value, mime_type=self.mime_type) - case _: - raise ContentException("Data must be a string, bytes, or numpy array.") @property def mime_type(self) -> str: @@ -191,11 +167,7 @@ def from_element(cls: type[_T], element: Element) -> _T: def write_to_file(self, path: str | FilePath) -> None: """Write the data to a file.""" - if self._data_uri and self._data_uri.data_array is not None: - self._data_uri.data_array.tofile(path) - return with open(path, "wb") as file: - assert isinstance(self.data, bytes) # nosec file.write(self.data) def to_dict(self) -> dict[str, Any]: diff --git a/python/semantic_kernel/contents/chat_history.py b/python/semantic_kernel/contents/chat_history.py index 53d49d2d80e4..5013aae0e073 100644 --- a/python/semantic_kernel/contents/chat_history.py +++ b/python/semantic_kernel/contents/chat_history.py @@ -1,14 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import Generator, Iterable +from collections.abc import Generator from functools import singledispatchmethod from html import unescape -from typing import Any, TypeVar +from typing import Any from xml.etree.ElementTree import Element, tostring # nosec from defusedxml.ElementTree import XML, ParseError -from pydantic import Field, field_validator, model_validator +from pydantic import field_validator from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.const import CHAT_HISTORY_TAG, CHAT_MESSAGE_CONTENT_TAG @@ -19,37 +19,56 @@ logger = logging.getLogger(__name__) -_T = TypeVar("_T", bound="ChatHistory") - class ChatHistory(KernelBaseModel): """This class holds the history of chat messages from a chat conversation. - Note: the system_message is added to the messages as a ChatMessageContent instance with role=AuthorRole.SYSTEM, - but updating it will not update the messages list. + Note: the constructor takes a system_message parameter, which is not part + of the class definition. This is to allow the system_message to be passed in + as a keyword argument, but not be part of the class definition. - Args: - messages: The messages to add to the chat history. - system_message: A system message to add to the chat history, optional. - if passed, it is added to the messages - as a ChatMessageContent instance with role=AuthorRole.SYSTEM - before any other messages. + Attributes: + messages (List[ChatMessageContent]): The list of chat messages in the history. """ - messages: list[ChatMessageContent] = Field(default_factory=list, kw_only=False) - system_message: str | None = Field(default=None, kw_only=False, repr=False) + messages: list[ChatMessageContent] + + def __init__(self, **data: Any): + """Initializes a new instance of the ChatHistory class. + + Optionally incorporating a message and/or a system message at the beginning of the chat history. + + This constructor allows for flexible initialization with chat messages and an optional messages or a + system message. If both 'messages' (a list of ChatMessageContent instances) and 'system_message' are + provided, the 'system_message' is prepended to the list of messages, ensuring it appears as the first + message in the history. If only 'system_message' is provided without any 'messages', the chat history is + initialized with the 'system_message' as its first item. If 'messages' are provided without a + 'system_message', the chat history is initialized with the provided messages as is. + + Note: The 'system_message' is not retained as part of the class's attributes; it's used during + initialization and then discarded. The rest of the keyword arguments are passed to the superclass + constructor and handled according to the Pydantic model's behavior. + + Args: + **data: Arbitrary keyword arguments. + The constructor looks for two optional keys: + - 'messages': Optional[List[ChatMessageContent]], a list of chat messages to include in the history. + - 'system_message' Optional[str]: An optional string representing a system-generated message to be + included at the start of the chat history. + + """ + system_message_content = data.pop("system_message", None) + + if system_message_content: + system_message = ChatMessageContent(role=AuthorRole.SYSTEM, content=system_message_content) - @model_validator(mode="before") - @classmethod - def _parse_system_message(cls, data: Any) -> Any: - """Parse the system_message and add it to the messages.""" - if isinstance(data, dict) and (system_message := data.pop("system_message", None)): - msg = ChatMessageContent(role=AuthorRole.SYSTEM, content=system_message) if "messages" in data: - data["messages"] = [msg] + data["messages"] + data["messages"] = [system_message] + data["messages"] else: - data["messages"] = [msg] - return data + data["messages"] = [system_message] + if "messages" not in data: + data["messages"] = [] + super().__init__(**data) @field_validator("messages", mode="before") @classmethod @@ -66,107 +85,76 @@ def _validate_messages(cls, messages: list[ChatMessageContent]) -> list[ChatMess @singledispatchmethod def add_system_message(self, content: str | list[KernelContent], **kwargs) -> None: - """Add a system message to the chat history. - - Args: - content: The content of the system message, can be a string or a - list of KernelContent instances that are turned into a single ChatMessageContent. - **kwargs: Additional keyword arguments. - """ + """Add a system message to the chat history.""" raise NotImplementedError @add_system_message.register - def _(self, content: str, **kwargs: Any) -> None: + def add_system_message_str(self, content: str, **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.SYSTEM, content=content, **kwargs)) @add_system_message.register(list) - def _(self, content: list[KernelContent], **kwargs: Any) -> None: + def add_system_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.SYSTEM, items=content, **kwargs)) @singledispatchmethod def add_developer_message(self, content: str | list[KernelContent], **kwargs) -> None: - """Add a system message to the chat history. - - Args: - content: The content of the developer message, can be a string or a - list of KernelContent instances that are turned into a single ChatMessageContent. - **kwargs: Additional keyword arguments. - """ + """Add a system message to the chat history.""" raise NotImplementedError @add_developer_message.register - def _(self, content: str, **kwargs: Any) -> None: + def add_developer_message_str(self, content: str, **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.DEVELOPER, content=content, **kwargs)) @add_developer_message.register(list) - def _(self, content: list[KernelContent], **kwargs: Any) -> None: + def add_developer_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a system message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.DEVELOPER, items=content, **kwargs)) @singledispatchmethod def add_user_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add a user message to the chat history. - - Args: - content: The content of the user message, can be a string or a - list of KernelContent instances that are turned into a single ChatMessageContent. - **kwargs: Additional keyword arguments. - - """ + """Add a user message to the chat history.""" raise NotImplementedError @add_user_message.register - def _(self, content: str, **kwargs: Any) -> None: + def add_user_message_str(self, content: str, **kwargs: Any) -> None: """Add a user message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.USER, content=content, **kwargs)) @add_user_message.register(list) - def _(self, content: list[KernelContent], **kwargs: Any) -> None: + def add_user_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a user message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.USER, items=content, **kwargs)) @singledispatchmethod def add_assistant_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add an assistant message to the chat history. - - Args: - content: The content of the assistant message, can be a string or a - list of KernelContent instances that are turned into a single ChatMessageContent. - **kwargs: Additional keyword arguments. - """ + """Add an assistant message to the chat history.""" raise NotImplementedError @add_assistant_message.register - def _(self, content: str, **kwargs: Any) -> None: + def add_assistant_message_str(self, content: str, **kwargs: Any) -> None: """Add an assistant message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.ASSISTANT, content=content, **kwargs)) @add_assistant_message.register(list) - def _(self, content: list[KernelContent], **kwargs: Any) -> None: + def add_assistant_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: """Add an assistant message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.ASSISTANT, items=content, **kwargs)) @singledispatchmethod def add_tool_message(self, content: str | list[KernelContent], **kwargs: Any) -> None: - """Add a tool message to the chat history. - - Args: - content: The content of the tool message, can be a string or a - list of KernelContent instances that are turned into a single ChatMessageContent. - **kwargs: Additional keyword arguments. - """ + """Add a tool message to the chat history.""" raise NotImplementedError @add_tool_message.register - def _(self, content: str, **kwargs: Any) -> None: + def add_tool_message_str(self, content: str, **kwargs: Any) -> None: """Add a tool message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.TOOL, content=content, **kwargs)) @add_tool_message.register(list) - def _(self, content: list[KernelContent], **kwargs: Any) -> None: + def add_tool_message_list(self, content: list[KernelContent], **kwargs: Any) -> None: """Add a tool message to the chat history.""" self.add_message(message=self._prepare_for_add(role=AuthorRole.TOOL, items=content, **kwargs)) @@ -257,31 +245,6 @@ def __str__(self) -> str: chat_history_xml.append(message.to_element()) return tostring(chat_history_xml, encoding="unicode", short_empty_elements=True) - def clear(self) -> None: - """Clear the chat history.""" - self.messages.clear() - - def extend(self, messages: Iterable[ChatMessageContent]) -> None: - """Extend the chat history with a list of messages. - - Args: - messages: The messages to add to the history. - Can be a list of ChatMessageContent instances or a ChatHistory itself. - """ - self.messages.extend(messages) - - def replace(self, messages: Iterable[ChatMessageContent]) -> None: - """Replace the chat history with a list of messages. - - This calls clear() and then extend(messages=messages). - - Args: - messages: The messages to add to the history. - Can be a list of ChatMessageContent instances or a ChatHistory itself. - """ - self.clear() - self.extend(messages=messages) - def to_prompt(self) -> str: """Return a string representation of the history.""" chat_history_xml = Element(CHAT_HISTORY_TAG) @@ -301,7 +264,7 @@ def __eq__(self, other: Any) -> bool: return self.messages == other.messages @classmethod - def from_rendered_prompt(cls: type[_T], rendered_prompt: str) -> _T: + def from_rendered_prompt(cls, rendered_prompt: str) -> "ChatHistory": """Create a ChatHistory instance from a rendered prompt. Args: @@ -342,12 +305,12 @@ def serialize(self) -> str: ValueError: If the ChatHistory instance cannot be serialized to JSON. """ try: - return self.model_dump_json(exclude_none=True, indent=2) + return self.model_dump_json(indent=2, exclude_none=True) except Exception as e: # pragma: no cover raise ContentSerializationError(f"Unable to serialize ChatHistory to JSON: {e}") from e @classmethod - def restore_chat_history(cls: type[_T], chat_history_json: str) -> _T: + def restore_chat_history(cls, chat_history_json: str) -> "ChatHistory": """Restores a ChatHistory instance from a JSON string. Args: @@ -362,30 +325,26 @@ def restore_chat_history(cls: type[_T], chat_history_json: str) -> _T: fails validation. """ try: - return cls.model_validate_json(chat_history_json) + return ChatHistory.model_validate_json(chat_history_json) except Exception as e: raise ContentInitializationError(f"Invalid JSON format: {e}") def store_chat_history_to_file(self, file_path: str) -> None: """Stores the serialized ChatHistory to a file. - Uses mode "w" which means the file is created if it does not exist and gets truncated if it does. - Args: - file_path: The path to the file where the serialized data will be stored. + file_path (str): The path to the file where the serialized data will be stored. """ json_str = self.serialize() - with open(file_path, "w") as local_file: - local_file.write(json_str) + with open(file_path, "w") as file: + file.write(json_str) @classmethod - def load_chat_history_from_file(cls: type[_T], file_path: str) -> _T: + def load_chat_history_from_file(cls, file_path: str) -> "ChatHistory": """Loads the ChatHistory from a file. - Uses mode "r" which means it can only be read if it exists. - Args: - file_path: The path to the file from which to load the ChatHistory. + file_path (str): The path to the file from which to load the ChatHistory. Returns: ChatHistory: The deserialized ChatHistory instance. diff --git a/python/semantic_kernel/contents/chat_message_content.py b/python/semantic_kernel/contents/chat_message_content.py index 829b3f5c6aed..b369038cdceb 100644 --- a/python/semantic_kernel/contents/chat_message_content.py +++ b/python/semantic_kernel/contents/chat_message_content.py @@ -10,8 +10,6 @@ from pydantic import Field from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.audio_content import AudioContent -from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import ( ANNOTATION_CONTENT_TAG, CHAT_MESSAGE_CONTENT_TAG, @@ -35,7 +33,6 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions.content_exceptions import ContentInitializationError TAG_CONTENT_MAP = { @@ -49,9 +46,8 @@ STREAMING_ANNOTATION_CONTENT_TAG: StreamingAnnotationContent, } -CMC_ITEM_TYPES = Annotated[ +ITEM_TYPES = ( AnnotationContent - | BinaryContent | ImageContent | TextContent | FunctionResultContent @@ -59,10 +55,7 @@ | FileReferenceContent | StreamingAnnotationContent | StreamingFileReferenceContent - | AudioContent, - Field(discriminator=DISCRIMINATOR_FIELD), -] - +) logger = logging.getLogger(__name__) @@ -87,11 +80,11 @@ class ChatMessageContent(KernelContent): __str__: Returns the content of the response. """ - content_type: Literal[ContentTypes.CHAT_MESSAGE_CONTENT] = Field(default=CHAT_MESSAGE_CONTENT_TAG, init=False) # type: ignore + content_type: Literal[ContentTypes.CHAT_MESSAGE_CONTENT] = Field(CHAT_MESSAGE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = CHAT_MESSAGE_CONTENT_TAG role: AuthorRole name: str | None = None - items: list[CMC_ITEM_TYPES] = Field(default_factory=list) + items: list[Annotated[ITEM_TYPES, Field(discriminator=DISCRIMINATOR_FIELD)]] = Field(default_factory=list) encoding: str | None = None finish_reason: FinishReason | None = None @@ -99,7 +92,7 @@ class ChatMessageContent(KernelContent): def __init__( self, role: AuthorRole, - items: list[CMC_ITEM_TYPES], + items: list[ITEM_TYPES], name: str | None = None, inner_content: Any | None = None, encoding: str | None = None, @@ -126,7 +119,7 @@ def __init__( def __init__( # type: ignore self, role: AuthorRole, - items: list[CMC_ITEM_TYPES] | None = None, + items: list[ITEM_TYPES] | None = None, content: str | None = None, inner_content: Any | None = None, name: str | None = None, @@ -322,5 +315,4 @@ def _parse_items(self) -> str | list[dict[str, Any]]: def __hash__(self) -> int: """Return the hash of the chat message content.""" - hashable_items = [make_hashable(item) for item in self.items] if self.items else [] - return hash((self.tag, self.role, self.content, self.encoding, self.finish_reason, *hashable_items)) + return hash((self.tag, self.role, self.content, self.encoding, self.finish_reason, *self.items)) diff --git a/python/semantic_kernel/contents/file_reference_content.py b/python/semantic_kernel/contents/file_reference_content.py index 8fa87503393e..99cd15f341ef 100644 --- a/python/semantic_kernel/contents/file_reference_content.py +++ b/python/semantic_kernel/contents/file_reference_content.py @@ -8,22 +8,20 @@ from semantic_kernel.contents.const import FILE_REFERENCE_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="FileReferenceContent") -@experimental +@experimental_class class FileReferenceContent(KernelContent): """File reference content.""" content_type: Literal[ContentTypes.FILE_REFERENCE_CONTENT] = Field(FILE_REFERENCE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = FILE_REFERENCE_CONTENT_TAG file_id: str | None = None - tools: list[Any] = Field(default_factory=list) - data_source: Any | None = None def __str__(self) -> str: """Return the string representation of the file reference content.""" diff --git a/python/semantic_kernel/contents/function_call_content.py b/python/semantic_kernel/contents/function_call_content.py index 863ba6dfbaf7..7067311f4c8a 100644 --- a/python/semantic_kernel/contents/function_call_content.py +++ b/python/semantic_kernel/contents/function_call_content.py @@ -45,6 +45,7 @@ class FunctionCallContent(KernelContent): def __init__( self, + content_type: Literal[ContentTypes.FUNCTION_CALL_CONTENT] = FUNCTION_CALL_CONTENT_TAG, # type: ignore inner_content: Any | None = None, ai_model_id: str | None = None, id: str | None = None, @@ -59,6 +60,7 @@ def __init__( """Create function call content. Args: + content_type: The content type. inner_content (Any | None): The inner content. ai_model_id (str | None): The id of the AI model. id (str | None): The id of the function call. @@ -81,6 +83,7 @@ def __init__( else: function_name = name args = { + "content_type": content_type, "inner_content": inner_content, "ai_model_id": ai_model_id, "id": id, @@ -121,7 +124,6 @@ def __add__(self, other: "FunctionCallContent | None") -> "FunctionCallContent": index=self.index or other.index, name=self.name or other.name, arguments=self.combine_arguments(self.arguments, other.arguments), - metadata=self.metadata | other.metadata, ) def combine_arguments( diff --git a/python/semantic_kernel/contents/function_result_content.py b/python/semantic_kernel/contents/function_result_content.py index b1d36b2bd5f8..536fb4ff19ce 100644 --- a/python/semantic_kernel/contents/function_result_content.py +++ b/python/semantic_kernel/contents/function_result_content.py @@ -12,7 +12,6 @@ from semantic_kernel.contents.kernel_content import KernelContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions.content_exceptions import ContentInitializationError if TYPE_CHECKING: @@ -42,6 +41,7 @@ class FunctionResultContent(KernelContent): def __init__( self, + content_type: Literal[ContentTypes.FUNCTION_RESULT_CONTENT] = FUNCTION_RESULT_CONTENT_TAG, # type: ignore inner_content: Any | None = None, ai_model_id: str | None = None, id: str | None = None, @@ -56,6 +56,7 @@ def __init__( """Create function result content. Args: + content_type: The content type. inner_content (Any | None): The inner content. ai_model_id (str | None): The id of the AI model. id (str | None): The id of the function call that the result relates to. @@ -78,6 +79,7 @@ def __init__( else: function_name = name args = { + "content_type": content_type, "inner_content": inner_content, "ai_model_id": ai_model_id, "id": id, @@ -192,11 +194,10 @@ def serialize_result(self, value: Any) -> str: def __hash__(self) -> int: """Return the hash of the function result content.""" - hashable_result = make_hashable(self.result) return hash(( self.tag, self.id, - hashable_result, + tuple(self.result) if isinstance(self.result, list) else self.result, self.name, self.function_name, self.plugin_name, diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py index ed7d6deee70a..bc05c705ceda 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_reducer.py @@ -2,7 +2,6 @@ import sys from abc import ABC, abstractmethod -from typing import Any if sys.version < "3.11": from typing_extensions import Self # pragma: no cover @@ -12,21 +11,15 @@ from pydantic import Field from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.exceptions.content_exceptions import ContentInitializationError -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class ChatHistoryReducer(ChatHistory, ABC): """Defines a contract for reducing chat history.""" target_count: int = Field(..., gt=0, description="Target message count.") - threshold_count: int = Field(default=0, ge=0, description="Threshold count to avoid orphaning messages.") - auto_reduce: bool = Field( - default=False, - description="Whether to automatically reduce the chat history, this happens when using add_message_async.", - ) + threshold_count: int = Field(0, ge=0, description="Threshold count to avoid orphaning messages.") @abstractmethod async def reduce(self) -> Self | None: @@ -36,28 +29,3 @@ async def reduce(self) -> Self | None: A possibly shorter list of messages, or None if no change is needed. """ ... - - async def add_message_async( - self, - message: ChatMessageContent | dict[str, Any], - encoding: str | None = None, - metadata: dict[str, Any] | None = None, - ) -> None: - """Add a message to the chat history. - - If auto_reduce is enabled, the history will be reduced after adding the message. - """ - if isinstance(message, ChatMessageContent): - self.messages.append(message) - if self.auto_reduce: - await self.reduce() - return - if "role" not in message: - raise ContentInitializationError(f"Dictionary must contain at least the role. Got: {message}") - if encoding: - message["encoding"] = encoding - if metadata: - message["metadata"] = metadata - self.messages.append(ChatMessageContent(**message)) - if self.auto_reduce: - await self.reduce() diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py b/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py index dd82689cc1ad..6742c0b56816 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_reducer_utils.py @@ -7,7 +7,7 @@ from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function logger = logging.getLogger(__name__) @@ -15,7 +15,7 @@ SUMMARY_METADATA_KEY = "__summary__" -@experimental +@experimental_function def get_call_result_pairs(history: list[ChatMessageContent]) -> list[tuple[int, int]]: """Identify all (FunctionCallContent, FunctionResultContent) pairs in the history. @@ -45,7 +45,7 @@ def get_call_result_pairs(history: list[ChatMessageContent]) -> list[tuple[int, return pairs -@experimental +@experimental_function def locate_summarization_boundary(history: list[ChatMessageContent]) -> int: """Identify the index of the first message that is not a summary message. @@ -60,7 +60,7 @@ def locate_summarization_boundary(history: list[ChatMessageContent]) -> int: return len(history) -@experimental +@experimental_function def locate_safe_reduction_index( history: list[ChatMessageContent], target_count: int, @@ -96,11 +96,10 @@ def locate_safe_reduction_index( message_index = total_count - target_count # Move backward to avoid cutting function calls / results - # also skip over developer/system messages while message_index >= offset_count: - if history[message_index].role not in (AuthorRole.DEVELOPER, AuthorRole.SYSTEM): - break - if not contains_function_call_or_result(history[message_index]): + if not any( + isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in history[message_index].items + ): break message_index -= 1 @@ -116,7 +115,7 @@ def locate_safe_reduction_index( return target_index -@experimental +@experimental_function def extract_range( history: list[ChatMessageContent], start: int, @@ -165,11 +164,6 @@ def extract_range( i += 1 continue - # skipping system/developer message - if msg.role in (AuthorRole.DEVELOPER, AuthorRole.SYSTEM): - i += 1 - continue - # If preserve_pairs is on, and there's a paired index, skip or include them both if preserve_pairs and idx in pair_map: paired_idx = pair_map[idx] @@ -211,7 +205,7 @@ def extract_range( return extracted -@experimental +@experimental_function def contains_function_call_or_result(msg: ChatMessageContent) -> bool: """Return True if the message has any function call or function result.""" return any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in msg.items) diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py index e01594f1e0e4..1feaf1a839ad 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_summarization_reducer.py @@ -2,20 +2,20 @@ import logging import sys +from typing import Any + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.utils.experimental_decorator import experimental_class if sys.version < "3.11": from typing_extensions import Self # pragma: no cover else: from typing import Self # type: ignore # pragma: no cover -if sys.version < "3.12": - from typing_extensions import override # pragma: no cover -else: - from typing import override # type: ignore # pragma: no cover from pydantic import Field from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer @@ -27,7 +27,6 @@ locate_summarization_boundary, ) from semantic_kernel.exceptions.content_exceptions import ChatHistoryReducerException -from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) @@ -48,41 +47,75 @@ """ -@experimental +@experimental_class class ChatHistorySummarizationReducer(ChatHistoryReducer): - """A ChatHistory with logic to summarize older messages past a target count. - - This class inherits from ChatHistoryReducer, which in turn inherits from ChatHistory. - It can be used anywhere a ChatHistory is expected, while adding summarization capability. - - Args: - target_count: The target message count. - threshold_count: The threshold count to avoid orphaning messages. - auto_reduce: Whether to automatically reduce the chat history, default is False. - service: The ChatCompletion service to use for summarization. - summarization_instructions: The summarization instructions, optional. - use_single_summary: Whether to use a single summary message, default is True. - fail_on_error: Raise error if summarization fails, default is True. - include_function_content_in_summary: Whether to include function calls/results in the summary, default is False. - execution_settings: The execution settings for the summarization prompt, optional. - - """ + """A ChatHistory with logic to summarize older messages past a target count.""" service: ChatCompletionClientBase summarization_instructions: str = Field( - default=DEFAULT_SUMMARIZATION_PROMPT, + default_factory=lambda: DEFAULT_SUMMARIZATION_PROMPT, description="The summarization instructions.", - kw_only=True, ) - use_single_summary: bool = Field(default=True, description="Whether to use a single summary message.") - fail_on_error: bool = Field(default=True, description="Raise error if summarization fails.") + use_single_summary: bool = Field(True, description="Whether to use a single summary message.") + fail_on_error: bool = Field(True, description="Raise error if summarization fails.") + service_id: str = Field( + default_factory=lambda: DEFAULT_SERVICE_NAME, description="The ID of the chat completion service." + ) include_function_content_in_summary: bool = Field( - default=False, description="Whether to include function calls/results in the summary." + False, description="Whether to include function calls/results in the summary." ) execution_settings: PromptExecutionSettings | None = None - @override + def __init__( + self, + service: ChatCompletionClientBase, + target_count: int, + service_id: str | None = None, + threshold_count: int | None = None, + summarization_instructions: str | None = None, + use_single_summary: bool | None = None, + fail_on_error: bool | None = None, + include_function_content_in_summary: bool | None = None, + execution_settings: PromptExecutionSettings | None = None, + **kwargs: Any, + ): + """Initialize the ChatHistorySummarizationReducer. + + Args: + service (ChatCompletionClientBase): The chat completion service. + target_count (int): The target number of messages to retain after applying summarization. + service_id (str | None): The ID of the chat completion service. + threshold_count (int | None): The threshold beyond target_count required to trigger reduction. + summarization_instructions (str | None): The summarization instructions. + use_single_summary (bool | None): Whether to use a single summary message. + fail_on_error (bool | None): Raise error if summarization fails. + include_function_content_in_summary (bool | None): Whether to include function calls/results in the summary. + execution_settings (PromptExecutionSettings | None): The prompt execution settings. + **kwargs (Any): Additional keyword arguments. + """ + args: dict[str, Any] = { + "service": service, + "target_count": target_count, + } + if service_id is not None: + args["service_id"] = service_id + if threshold_count is not None: + args["threshold_count"] = threshold_count + if summarization_instructions is not None: + args["summarization_instructions"] = summarization_instructions + if use_single_summary is not None: + args["use_single_summary"] = use_single_summary + if fail_on_error is not None: + args["fail_on_error"] = fail_on_error + if include_function_content_in_summary is not None: + args["include_function_content_in_summary"] = include_function_content_in_summary + if execution_settings is not None: + args["execution_settings"] = execution_settings + + super().__init__(**args, **kwargs) + async def reduce(self) -> Self | None: + """Summarize the older messages past the target message count.""" history = self.messages if len(history) <= self.target_count + (self.threshold_count or 0): return None # No summarization needed @@ -154,15 +187,19 @@ async def _summarize(self, messages: list[ChatMessageContent]) -> ChatMessageCon from semantic_kernel.contents.utils.author_role import AuthorRole chat_history = ChatHistory(messages=messages) - execution_settings = self.execution_settings or self.service.get_prompt_execution_settings_from_settings( - PromptExecutionSettings() + + role = ( + getattr(self.execution_settings, "instruction_role", AuthorRole.SYSTEM) + if self.execution_settings + else AuthorRole.SYSTEM ) - chat_history.add_message( - ChatMessageContent( - role=getattr(execution_settings, "instruction_role", AuthorRole.SYSTEM), - content=self.summarization_instructions, - ) + + chat_history.add_message(ChatMessageContent(role=role, content=self.summarization_instructions)) + + execution_settings = self.execution_settings or self.service.get_prompt_execution_settings_class()( + service_id=self.service_id ) + return await self.service.get_chat_message_content(chat_history=chat_history, settings=execution_settings) def __eq__(self, other: object) -> bool: diff --git a/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py b/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py index 32c848a098d7..4faf28876748 100644 --- a/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py +++ b/python/semantic_kernel/contents/history_reducer/chat_history_truncation_reducer.py @@ -2,42 +2,47 @@ import logging import sys +from typing import Any + +from semantic_kernel.utils.experimental_decorator import experimental_class if sys.version < "3.11": from typing_extensions import Self # pragma: no cover else: from typing import Self # type: ignore # pragma: no cover -if sys.version < "3.12": - from typing_extensions import override # pragma: no cover -else: - from typing import override # type: ignore # pragma: no cover - from semantic_kernel.contents.history_reducer.chat_history_reducer import ChatHistoryReducer from semantic_kernel.contents.history_reducer.chat_history_reducer_utils import ( extract_range, locate_safe_reduction_index, ) -from semantic_kernel.utils.feature_stage_decorator import experimental logger = logging.getLogger(__name__) -@experimental +@experimental_class class ChatHistoryTruncationReducer(ChatHistoryReducer): """A ChatHistory that supports truncation logic. Because this class inherits from ChatHistoryReducer (which in turn inherits from ChatHistory), it can also be used anywhere a ChatHistory is expected, while adding truncation capability. - - Args: - target_count: The target message count. - threshold_count: The threshold count to avoid orphaning messages. - auto_reduce: Whether to automatically reduce the chat history, default is False. """ - @override + def __init__(self, target_count: int, threshold_count: int | None = None, **kwargs: Any): + """Initialize the truncation reducer.""" + args: dict[str, Any] = { + "target_count": target_count, + } + if threshold_count is not None: + args["threshold_count"] = threshold_count + super().__init__(**args, **kwargs) + async def reduce(self) -> Self | None: + """Truncate the chat history to the target message count, avoiding orphaned calls. + + Returns: + The truncated list of messages if truncation occurred, or None otherwise. + """ history = self.messages if len(history) <= self.target_count + (self.threshold_count or 0): # No need to reduce diff --git a/python/semantic_kernel/contents/image_content.py b/python/semantic_kernel/contents/image_content.py index 4b25df692440..b0d66f133abf 100644 --- a/python/semantic_kernel/contents/image_content.py +++ b/python/semantic_kernel/contents/image_content.py @@ -4,20 +4,19 @@ import mimetypes from typing import Any, ClassVar, Literal, TypeVar -from numpy import ndarray from pydantic import Field from typing_extensions import deprecated from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.const import IMAGE_CONTENT_TAG, ContentTypes -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="ImageContent") -@experimental +@experimental_class class ImageContent(BinaryContent): """Image Content class. @@ -53,40 +52,6 @@ class ImageContent(BinaryContent): content_type: Literal[ContentTypes.IMAGE_CONTENT] = Field(IMAGE_CONTENT_TAG, init=False) # type: ignore tag: ClassVar[str] = IMAGE_CONTENT_TAG - def __init__( - self, - uri: str | None = None, - data_uri: str | None = None, - data: str | bytes | ndarray | None = None, - data_format: str | None = None, - mime_type: str | None = None, - **kwargs: Any, - ): - """Create an Image Content object, either from a data_uri or data. - - Args: - uri: The reference uri of the content. - data_uri: The data uri of the content. - data: The data of the content. - data_format: The format of the data (e.g. base64). - mime_type: The mime type of the image, only used with data. - kwargs: Any additional arguments: - inner_content: The inner content of the response, - this should hold all the information from the response so even - when not creating a subclass a developer - can leverage the full thing. - ai_model_id: The id of the AI model that generated this response. - metadata: Any metadata that should be attached to the response. - """ - super().__init__( - uri=uri, - data_uri=data_uri, - data=data, - data_format=data_format, - mime_type=mime_type, - **kwargs, - ) - @classmethod @deprecated("The `from_image_path` method is deprecated; use `from_image_file` instead.", category=None) def from_image_path(cls: type[_T], image_path: str) -> _T: diff --git a/python/semantic_kernel/contents/realtime_events.py b/python/semantic_kernel/contents/realtime_events.py deleted file mode 100644 index d74287d5ccf4..000000000000 --- a/python/semantic_kernel/contents/realtime_events.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Annotated, Any, ClassVar, Literal, Union - -from pydantic import Field - -from semantic_kernel.contents.audio_content import AudioContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.kernel_pydantic import KernelBaseModel - -RealtimeEvents = Annotated[ - Union[ - "RealtimeEvent", - "RealtimeAudioEvent", - "RealtimeTextEvent", - "RealtimeFunctionCallEvent", - "RealtimeFunctionResultEvent", - "RealtimeImageEvent", - ], - Field(discriminator="event_type"), -] - - -class RealtimeEvent(KernelBaseModel): - """Base class for all service events.""" - - service_event: Any | None = Field(default=None, description="The event content.") - service_type: str | None = None - event_type: ClassVar[Literal["service"]] = "service" - - -class RealtimeAudioEvent(RealtimeEvent): - """Audio event type.""" - - event_type: ClassVar[Literal["audio"]] = "audio" # type: ignore - audio: AudioContent = Field(..., description="Audio content.") - - -class RealtimeTextEvent(RealtimeEvent): - """Text event type.""" - - event_type: ClassVar[Literal["text"]] = "text" # type: ignore - text: TextContent = Field(..., description="Text content.") - - -class RealtimeFunctionCallEvent(RealtimeEvent): - """Function call event type.""" - - event_type: ClassVar[Literal["function_call"]] = "function_call" # type: ignore - function_call: FunctionCallContent = Field(..., description="Function call content.") - - -class RealtimeFunctionResultEvent(RealtimeEvent): - """Function result event type.""" - - event_type: ClassVar[Literal["function_result"]] = "function_result" # type: ignore - function_result: FunctionResultContent = Field(..., description="Function result content.") - - -class RealtimeImageEvent(RealtimeEvent): - """Image event type.""" - - event_type: ClassVar[Literal["image"]] = "image" # type: ignore - image: ImageContent = Field(..., description="Image content.") diff --git a/python/semantic_kernel/contents/streaming_annotation_content.py b/python/semantic_kernel/contents/streaming_annotation_content.py index 14afea3e6cf3..addf4750d75c 100644 --- a/python/semantic_kernel/contents/streaming_annotation_content.py +++ b/python/semantic_kernel/contents/streaming_annotation_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import STREAMING_ANNOTATION_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="StreamingAnnotationContent") -@experimental +@experimental_class class StreamingAnnotationContent(KernelContent): """Streaming Annotation content.""" diff --git a/python/semantic_kernel/contents/streaming_chat_message_content.py b/python/semantic_kernel/contents/streaming_chat_message_content.py index 88c31ef31473..683b498d0c69 100644 --- a/python/semantic_kernel/contents/streaming_chat_message_content.py +++ b/python/semantic_kernel/contents/streaming_chat_message_content.py @@ -1,15 +1,12 @@ # Copyright (c) Microsoft. All rights reserved. from enum import Enum -from typing import Annotated, Any, overload +from typing import Any, Union, overload from xml.etree.ElementTree import Element # nosec from pydantic import Field -from semantic_kernel.contents.audio_content import AudioContent -from semantic_kernel.contents.binary_content import BinaryContent from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.const import DISCRIMINATOR_FIELD from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent @@ -19,19 +16,15 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.contents.utils.hashing import make_hashable from semantic_kernel.exceptions import ContentAdditionException -STREAMING_CMC_ITEM_TYPES = Annotated[ - BinaryContent - | AudioContent - | ImageContent - | FunctionResultContent - | FunctionCallContent - | StreamingTextContent - | StreamingAnnotationContent - | StreamingFileReferenceContent, - Field(discriminator=DISCRIMINATOR_FIELD), +ITEM_TYPES = Union[ + ImageContent, + StreamingTextContent, + FunctionCallContent, + FunctionResultContent, + StreamingFileReferenceContent, + StreamingAnnotationContent, ] @@ -70,7 +63,7 @@ class StreamingChatMessageContent(ChatMessageContent, StreamingContentMixin): def __init__( self, role: AuthorRole, - items: list[STREAMING_CMC_ITEM_TYPES], + items: list[ITEM_TYPES], choice_index: int, name: str | None = None, inner_content: Any | None = None, @@ -100,7 +93,7 @@ def __init__( # type: ignore self, role: AuthorRole, choice_index: int, - items: list[STREAMING_CMC_ITEM_TYPES] | None = None, + items: list[ITEM_TYPES] | None = None, content: str | None = None, inner_content: Any | None = None, name: str | None = None, @@ -229,7 +222,6 @@ def to_element(self) -> "Element": def __hash__(self) -> int: """Return the hash of the streaming chat message content.""" - hashable_items = [make_hashable(item) for item in self.items] if self.items else [] return hash(( self.tag, self.role, @@ -238,5 +230,5 @@ def __hash__(self) -> int: self.finish_reason, self.choice_index, self.function_invoke_attempt, - *hashable_items, + *self.items, )) diff --git a/python/semantic_kernel/contents/streaming_file_reference_content.py b/python/semantic_kernel/contents/streaming_file_reference_content.py index f7be0a179335..4f934848e174 100644 --- a/python/semantic_kernel/contents/streaming_file_reference_content.py +++ b/python/semantic_kernel/contents/streaming_file_reference_content.py @@ -8,14 +8,14 @@ from semantic_kernel.contents.const import STREAMING_FILE_REFERENCE_CONTENT_TAG, ContentTypes from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) _T = TypeVar("_T", bound="StreamingFileReferenceContent") -@experimental +@experimental_class class StreamingFileReferenceContent(KernelContent): """Streaming File reference content.""" @@ -25,8 +25,6 @@ class StreamingFileReferenceContent(KernelContent): ) tag: ClassVar[str] = STREAMING_FILE_REFERENCE_CONTENT_TAG file_id: str | None = None - tools: list[Any] = Field(default_factory=list) - data_source: Any | None = None def __str__(self) -> str: """Return the string representation of the file reference content.""" diff --git a/python/semantic_kernel/contents/utils/data_uri.py b/python/semantic_kernel/contents/utils/data_uri.py index 1695491e9110..d49022a6b104 100644 --- a/python/semantic_kernel/contents/utils/data_uri.py +++ b/python/semantic_kernel/contents/utils/data_uri.py @@ -4,11 +4,15 @@ import binascii import logging import re -from collections.abc import Mapping, MutableMapping, Sequence +import sys from typing import Any, TypeVar -from numpy import ndarray -from pydantic import Field, ValidationError, field_validator +if sys.version < "3.11": + from typing_extensions import Self # pragma: no cover +else: + from typing import Self # type: ignore # pragma: no cover + +from pydantic import Field, ValidationError, field_validator, model_validator from pydantic_core import Url from semantic_kernel.exceptions import ContentInitializationError @@ -20,92 +24,49 @@ class DataUri(KernelBaseModel, validate_assignment=True): - """A class to represent a data uri. - - If a array is provided, that will be used as the data since it is the most efficient, - otherwise the bytes will be used, or the string will be converted to bytes. - - When updating either array or bytes, the other will not be updated. - - Args: - data_bytes: The data as bytes. - data_str: The data as a string. - data_array: The data as a numpy array. - mime_type: The mime type of the data. - parameters: Any parameters for the data. - data_format: The format of the data (e.g. base64). + """A class to represent a data uri.""" - """ - - data_array: ndarray | None = None data_bytes: bytes | None = None + data_str: str | None = None mime_type: str | None = None - parameters: MutableMapping[str, str] = Field(default_factory=dict) + parameters: dict[str, str] = Field(default_factory=dict) data_format: str | None = None - def __init__( - self, - data_bytes: bytes | None = None, - data_str: str | None = None, - data_array: ndarray | None = None, - mime_type: str | None = None, - parameters: Sequence[str] | Mapping[str, str] | None = None, - data_format: str | None = None, - **kwargs: Any, - ): - """Initialize the data uri. - - Make sure to set the data_format to base64 so that it can be decoded properly. - - Args: - data_bytes: The data as bytes. - data_str: The data as a string. - data_array: The data as a numpy array. - mime_type: The mime type of the data. - parameters: Any parameters for the data. - data_format: The format of the data (e.g. base64). - kwargs: Any additional arguments. - """ - args: dict[str, Any] = {} - if data_bytes is not None: - args["data_bytes"] = data_bytes - if data_array is not None: - args["data_array"] = data_array - - if mime_type is not None: - args["mime_type"] = mime_type - if parameters is not None: - args["parameters"] = parameters - if data_format is not None: - args["data_format"] = data_format - - if data_str is not None and not data_bytes: - if data_format and data_format.lower() == "base64": - try: - args["data_bytes"] = base64.b64decode(data_str, validate=True) - except binascii.Error as exc: - raise ContentInitializationError("Invalid base64 data.") from exc - else: - args["data_bytes"] = data_str.encode("utf-8") - if "data_array" not in args and "data_bytes" not in args: - raise ContentInitializationError("Either data_bytes, data_str or data_array must be provided.") - super().__init__(**args, **kwargs) - - def update_data(self, value: str | bytes | ndarray) -> None: + def update_data(self, value: str | bytes): """Update the data, using either a string or bytes.""" - match value: - case ndarray(): - self.data_array = value - case str(): - if self.data_format and self.data_format.lower() == "base64": - self.data_bytes = base64.b64decode(value, validate=True) + if isinstance(value, str): + self.data_str = value + else: + self.data_bytes = value + + @model_validator(mode="before") + @classmethod + def _validate_data(cls, values: Any) -> dict[str, Any]: + """Validate the data.""" + if isinstance(values, dict) and not values.get("data_bytes") and not values.get("data_str"): + raise ContentInitializationError("Either data_bytes or data_str must be provided.") + return values + + @model_validator(mode="after") + def _parse_data(self) -> Self: + """Parse the data bytes to str.""" + if not self.data_str and self.data_bytes: + if self.data_format and self.data_format.lower() == "base64": + self.data_str = base64.b64encode(self.data_bytes).decode("utf-8") + else: + self.data_str = self.data_bytes.decode("utf-8") + if self.data_format and self.data_format.lower() == "base64" and self.data_str: + try: + if not self.data_bytes: + self.data_bytes = base64.b64decode(self.data_str, validate=True) else: - self.data_bytes = value.encode("utf-8") - case _: - self.data_bytes = value + base64.b64decode(self.data_str, validate=True) + except binascii.Error as exc: + raise ContentInitializationError("Invalid base64 data.") from exc + return self @field_validator("parameters", mode="before") - def _validate_parameters(cls, value: list[str] | dict[str, str] | None) -> dict[str, str]: + def _validate_parameters(cls, value: list[str] | dict[str, str] | None = None) -> dict[str, str]: if not value: return {} if isinstance(value, dict): @@ -148,29 +109,17 @@ def from_data_uri(cls: type[_T], data_uri: str | Url, default_mime_type: str = " matches["parameters"] = matches["parameters"].strip(";").split(";") if not matches.get("mime_type"): matches["mime_type"] = default_mime_type - return cls(**matches) # type: ignore + return cls(**matches) def to_string(self, metadata: dict[str, str] = {}) -> str: """Return the data uri as a string.""" parameters = ";".join([f"{key}={val}" for key, val in metadata.items()]) parameters = f";{parameters}" if parameters else "" data_format = f"{self.data_format}" if self.data_format else "" - return f"data:{self.mime_type or ''}{parameters};{data_format},{self._data_str()}" + return f"data:{self.mime_type or ''}{parameters};{data_format},{self.data_str}" def __eq__(self, value: object) -> bool: """Check if the data uri is equal to another.""" if not isinstance(value, DataUri): return False return self.to_string() == value.to_string() - - def _data_str(self) -> str: - """Return the data as a string.""" - if self.data_array is not None: - if self.data_format and self.data_format.lower() == "base64": - return base64.b64encode(self.data_array.tobytes()).decode("utf-8") - return self.data_array.tobytes().decode("utf-8") - if self.data_bytes is not None: - if self.data_format and self.data_format.lower() == "base64": - return base64.b64encode(self.data_bytes).decode("utf-8") - return self.data_bytes.decode("utf-8") - return "" diff --git a/python/semantic_kernel/contents/utils/hashing.py b/python/semantic_kernel/contents/utils/hashing.py deleted file mode 100644 index 02b341427027..000000000000 --- a/python/semantic_kernel/contents/utils/hashing.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any - -from pydantic import BaseModel - - -def make_hashable(input: Any, visited=None) -> Any: - """Recursively convert unhashable types to hashable equivalents. - - Args: - input: The input to convert to a hashable type. - visited: A dictionary of visited objects to prevent infinite recursion. - - Returns: - Any: The input converted to a hashable type. - """ - if visited is None: - visited = {} - - # If we've seen this object before, return the stored placeholder or final result - unique_obj_id = id(input) - if unique_obj_id in visited: - return visited[unique_obj_id] - - # Handle Pydantic models by manually traversing fields - if isinstance(input, BaseModel): - visited[unique_obj_id] = None - data = {} - for field_name in input.model_fields: - value = getattr(input, field_name) - data[field_name] = make_hashable(value, visited) - result = tuple(sorted(data.items())) - visited[unique_obj_id] = result - return result - - # Convert dictionaries - if isinstance(input, dict): - visited[unique_obj_id] = None - items = tuple(sorted((k, make_hashable(v, visited)) for k, v in input.items())) - visited[unique_obj_id] = items - return items - - # Convert lists, sets, and tuples to tuples - if isinstance(input, (list, set, tuple)): - visited[unique_obj_id] = None - items = tuple(make_hashable(item, visited) for item in input) - visited[unique_obj_id] = items - return items - - # If it's already something hashable, just return it - return input diff --git a/python/semantic_kernel/core_plugins/crew_ai/__init__.py b/python/semantic_kernel/core_plugins/crew_ai/__init__.py deleted file mode 100644 index 08c61da6d2fa..000000000000 --- a/python/semantic_kernel/core_plugins/crew_ai/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise import CrewAIEnterprise -from semantic_kernel.core_plugins.crew_ai.crew_ai_models import ( - CrewAIStatusResponse, -) -from semantic_kernel.core_plugins.crew_ai.crew_ai_settings import ( - CrewAISettings, -) - -__all__ = ["CrewAIEnterprise", "CrewAISettings", "CrewAIStatusResponse"] diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py deleted file mode 100644 index 643e08b586ea..000000000000 --- a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise.py +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import logging -from typing import Any - -import aiohttp -from pydantic import Field, ValidationError - -from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise_client import CrewAIEnterpriseClient -from semantic_kernel.core_plugins.crew_ai.crew_ai_models import CrewAIEnterpriseKickoffState, CrewAIStatusResponse -from semantic_kernel.core_plugins.crew_ai.crew_ai_settings import CrewAISettings -from semantic_kernel.exceptions.function_exceptions import ( - FunctionExecutionException, - FunctionResultError, - PluginInitializationError, -) -from semantic_kernel.functions import kernel_function -from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod -from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental - -logger: logging.Logger = logging.getLogger(__name__) - - -@experimental -class CrewAIEnterprise(KernelBaseModel): - """Class to interface with Crew.AI Crews from Semantic Kernel. - - This object can be used directly or as a plugin in the Kernel. - """ - - client: CrewAIEnterpriseClient - polling_interval: float = Field(default=1.0) - polling_timeout: float = Field(default=30.0) - - def __init__( - self, - endpoint: str | None = None, - auth_token: str | None = None, - polling_interval: float | None = 1.0, - polling_timeout: float | None = 30.0, - session: aiohttp.ClientSession | None = None, - env_file_path: str | None = None, - env_file_encoding: str | None = None, - ): - """Initialize a new instance of the class. This object can be used directly or as a plugin in the Kernel. - - Args: - endpoint (str | None, optional): The API endpoint. - auth_token (str | None, optional): The authentication token. - polling_interval (float, optional): The polling interval in seconds. Defaults to 1.0. - polling_timeout (float, optional): The polling timeout in seconds. Defaults to 30.0. - session (aiohttp.ClientSession | None, optional): The HTTP client session. Defaults to None. - env_file_path (str | None): Use the environment settings file as a - fallback to environment variables. (Optional) - env_file_encoding (str | None): The encoding of the environment settings file. (Optional) - """ - try: - settings = CrewAISettings.create( - endpoint=endpoint, - auth_token=auth_token, - polling_interval=polling_interval, - polling_timeout=polling_timeout, - env_file_path=env_file_path, - env_file_encoding=env_file_encoding, - ) - except ValidationError as ex: - raise PluginInitializationError("Failed to initialize CrewAI settings.") from ex - - client = CrewAIEnterpriseClient( - endpoint=settings.endpoint, auth_token=settings.auth_token.get_secret_value(), session=session - ) - - super().__init__( - client=client, - polling_interval=settings.polling_interval, - polling_timeout=settings.polling_timeout, - ) - - async def kickoff( - self, - inputs: dict[str, Any] | None = None, - task_webhook_url: str | None = None, - step_webhook_url: str | None = None, - crew_webhook_url: str | None = None, - ) -> str: - """Kickoff a new Crew AI task. - - Args: - inputs (dict[str, Any], optional): The inputs for the task. Defaults to None. - task_webhook_url (str | None, optional): The webhook URL for task updates. Defaults to None. - step_webhook_url (str | None, optional): The webhook URL for step updates. Defaults to None. - crew_webhook_url (str | None, optional): The webhook URL for crew updates. Defaults to None. - - Returns: - str: The ID of the kickoff response. - """ - try: - kickoff_response = await self.client.kickoff(inputs, task_webhook_url, step_webhook_url, crew_webhook_url) - logger.info(f"CrewAI Crew kicked off with Id: {kickoff_response.kickoff_id}") - return kickoff_response.kickoff_id - except Exception as ex: - raise FunctionExecutionException("Failed to kickoff CrewAI Crew.") from ex - - @kernel_function(description="Get the status of a Crew AI kickoff.") - async def get_crew_kickoff_status(self, kickoff_id: str) -> CrewAIStatusResponse: - """Get the status of a Crew AI task. - - Args: - kickoff_id (str): The ID of the kickoff response. - - Returns: - CrewAIStatusResponse: The status response of the task. - """ - try: - status_response = await self.client.get_status(kickoff_id) - logger.info(f"CrewAI Crew status for kickoff Id: {kickoff_id} is {status_response.state}") - return status_response - except Exception as ex: - raise FunctionExecutionException( - f"Failed to get status of CrewAI Crew with kickoff Id: {kickoff_id}." - ) from ex - - @kernel_function(description="Wait for the completion of a Crew AI kickoff.") - async def wait_for_crew_completion(self, kickoff_id: str) -> str: - """Wait for the completion of a Crew AI task. - - Args: - kickoff_id (str): The ID of the kickoff response. - - Returns: - str: The result of the task. - - Raises: - FunctionExecutionException: If the task fails or an error occurs while waiting for completion. - """ - status_response: CrewAIStatusResponse | None = None - state: str = CrewAIEnterpriseKickoffState.Pending - - async def poll_status(): - nonlocal state, status_response - while state not in [ - CrewAIEnterpriseKickoffState.Failed, - CrewAIEnterpriseKickoffState.Failure, - CrewAIEnterpriseKickoffState.Success, - CrewAIEnterpriseKickoffState.Not_Found, - ]: - logger.debug( - f"Waiting for CrewAI Crew with kickoff Id: {kickoff_id} to complete. Current state: {state}" - ) - - await asyncio.sleep(self.polling_interval) - - try: - status_response = await self.client.get_status(kickoff_id) - state = status_response.state - except Exception as ex: - raise FunctionExecutionException( - f"Failed to wait for completion of CrewAI Crew with kickoff Id: {kickoff_id}." - ) from ex - - await asyncio.wait_for(poll_status(), timeout=self.polling_timeout) - - logger.info(f"CrewAI Crew with kickoff Id: {kickoff_id} completed with status: {state}") - result = status_response.result if status_response is not None and status_response.result is not None else "" - - if state in ["Failed", "Failure"]: - raise FunctionResultError(f"CrewAI Crew failed with error: {result}") - - return result - - def create_kernel_plugin( - self, - name: str, - description: str, - parameters: list[KernelParameterMetadata] | None = None, - task_webhook_url: str | None = None, - step_webhook_url: str | None = None, - crew_webhook_url: str | None = None, - ) -> KernelPlugin: - """Creates a kernel plugin that can be used to invoke the CrewAI Crew. - - Args: - name (str): The name of the kernel plugin. - description (str): The description of the kernel plugin. - parameters (List[KernelParameterMetadata] | None, optional): The definitions of the Crew's - required inputs. Defaults to None. - task_webhook_url (Optional[str], optional): The task level webhook URL. Defaults to None. - step_webhook_url (Optional[str], optional): The step level webhook URL. Defaults to None. - crew_webhook_url (Optional[str], optional): The crew level webhook URL. Defaults to None. - - Returns: - dict[str, Any]: A dictionary representing the kernel plugin. - """ - - @kernel_function(description="Kickoff the CrewAI task.") - async def kickoff(**kwargs: Any) -> str: - args = self._build_arguments(parameters, kwargs) - return await self.kickoff( - inputs=args, - task_webhook_url=task_webhook_url, - step_webhook_url=step_webhook_url, - crew_webhook_url=crew_webhook_url, - ) - - @kernel_function(description="Kickoff the CrewAI task and wait for completion.") - async def kickoff_and_wait(**kwargs: Any) -> str: - args = self._build_arguments(parameters, kwargs) - kickoff_id = await self.kickoff( - inputs=args, - task_webhook_url=task_webhook_url, - step_webhook_url=step_webhook_url, - crew_webhook_url=crew_webhook_url, - ) - return await self.wait_for_crew_completion(kickoff_id) - - return KernelPlugin( - name, - description, - { - "kickoff": KernelFunctionFromMethod(kickoff, stream_method=None, parameters=parameters), - "kickoff_and_wait": KernelFunctionFromMethod( - kickoff_and_wait, stream_method=None, parameters=parameters - ), - "get_status": self.get_crew_kickoff_status, - "wait_for_completion": self.wait_for_crew_completion, - }, - ) - - def _build_arguments( - self, parameters: list[KernelParameterMetadata] | None, arguments: dict[str, Any] - ) -> dict[str, Any]: - """Builds the arguments for the CrewAI task from the provided parameters and arguments. - - Args: - parameters (List[KernelParameterMetadata] | None): The metadata for the inputs. - arguments (dict[str, Any]): The provided arguments. - - Returns: - dict[str, Any]: The built arguments. - """ - args = {} - if parameters: - for input in parameters: - name = input.name - if name not in arguments: - raise PluginInitializationError(f"Missing required input '{name}' for CrewAI.") - args[name] = arguments[name] - return args - - async def __aenter__(self): - """Enter the session.""" - await self.client.__aenter__() - return self - - async def __aexit__(self, *args, **kwargs): - """Close the session.""" - await self.client.__aexit__() diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py deleted file mode 100644 index f52efa77e17d..000000000000 --- a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_enterprise_client.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any - -import aiohttp - -from semantic_kernel.core_plugins.crew_ai.crew_ai_models import ( - CrewAIKickoffResponse, - CrewAIRequiredInputs, - CrewAIStatusResponse, -) -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT - - -class CrewAIEnterpriseClient: - """Client to interact with the Crew AI Enterprise API.""" - - def __init__( - self, - endpoint: str, - auth_token: str, - session: aiohttp.ClientSession | None = None, - ): - """Initializes a new instance of the CrewAIEnterpriseClient class. - - Args: - endpoint (str): The API endpoint. - auth_token (str): The authentication token. - session (aiohttp.ClientSession | None, optional): The HTTP client session. Defaults to None. - """ - self.endpoint = endpoint - self.auth_token = auth_token - self.session = session if session is None else aiohttp.ClientSession() - self.request_header = { - "Authorization": f"Bearer {auth_token}", - "Content-Type": "application/json", - "user_agent": SEMANTIC_KERNEL_USER_AGENT, - } - - async def get_inputs(self) -> CrewAIRequiredInputs: - """Get the required inputs for Crew AI. - - Returns: - CrewAIRequiredInputs: The required inputs for Crew AI. - """ - async with ( - self.session.get(f"{self.endpoint}/inputs", headers=self.request_header) as response, # type: ignore - ): - response.raise_for_status() - return CrewAIRequiredInputs.model_validate_json(await response.text()) - - async def kickoff( - self, - inputs: dict[str, Any] | None = None, - task_webhook_url: str | None = None, - step_webhook_url: str | None = None, - crew_webhook_url: str | None = None, - ) -> CrewAIKickoffResponse: - """Kickoff a new Crew AI task. - - Args: - inputs (Optional[dict[str, Any]], optional): The inputs for the task. Defaults to None. - task_webhook_url (Optional[str], optional): The webhook URL for task updates. Defaults to None. - step_webhook_url (Optional[str], optional): The webhook URL for step updates. Defaults to None. - crew_webhook_url (Optional[str], optional): The webhook URL for crew updates. Defaults to None. - - Returns: - CrewAIKickoffResponse: The response from the kickoff request. - """ - content = { - "inputs": inputs, - "taskWebhookUrl": task_webhook_url, - "stepWebhookUrl": step_webhook_url, - "crewWebhookUrl": crew_webhook_url, - } - async with ( - self.session.post(f"{self.endpoint}/kickoff", json=content, headers=self.request_header) as response, # type: ignore - ): - response.raise_for_status() - body = await response.text() - return CrewAIKickoffResponse.model_validate_json(body) - - async def get_status(self, task_id: str) -> CrewAIStatusResponse: - """Get the status of a Crew AI task. - - Args: - task_id (str): The ID of the task. - - Returns: - CrewAIStatusResponse: The status response of the task. - """ - async with ( - self.session.get(f"{self.endpoint}/status/{task_id}", headers=self.request_header) as response, # type: ignore - ): - response.raise_for_status() - body = await response.text() - return CrewAIStatusResponse.model_validate_json(body) - - async def __aenter__(self): - """Enter the session.""" - await self.session.__aenter__() # type: ignore - return self - - async def __aexit__(self, *args, **kwargs): - """Close the session.""" - await self.session.close() # type: ignore diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py deleted file mode 100644 index 540b3e5293af..000000000000 --- a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_models.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from enum import Enum -from typing import Any - -from semantic_kernel.kernel_pydantic import KernelBaseModel - - -class CrewAIEnterpriseKickoffState(str, Enum): - """The Crew.AI Enterprise kickoff state.""" - - Pending = "PENDING" - Started = "STARTED" - Running = "RUNNING" - Success = "SUCCESS" - Failed = "FAILED" - Failure = "FAILURE" - Not_Found = "NOT FOUND" - - -class CrewAIStatusResponse(KernelBaseModel): - """Represents the status response from Crew AI.""" - - state: CrewAIEnterpriseKickoffState - result: str | None = None - last_step: dict[str, Any] | None = None - - -class CrewAIKickoffResponse(KernelBaseModel): - """Represents the kickoff response from Crew AI.""" - - kickoff_id: str - - -class CrewAIRequiredInputs(KernelBaseModel): - """Represents the required inputs for Crew AI.""" - - inputs: dict[str, str] diff --git a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py b/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py deleted file mode 100644 index 7b54b6b9a90e..000000000000 --- a/python/semantic_kernel/core_plugins/crew_ai/crew_ai_settings.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import ClassVar - -from pydantic import SecretStr - -from semantic_kernel.kernel_pydantic import KernelBaseSettings - - -class CrewAISettings(KernelBaseSettings): - """The Crew.AI settings. - - Required: - - endpoint: str - The API endpoint. - """ - - env_prefix: ClassVar[str] = "CREW_AI_" - - endpoint: str - auth_token: SecretStr - polling_interval: float = 1.0 - polling_timeout: float = 30.0 diff --git a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py index f43d7784396b..d7e4195f49a5 100644 --- a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py +++ b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py @@ -59,7 +59,7 @@ def _validate_endpoint(cls, endpoint: str) -> str: else: endpoint_parsed = urlsplit(endpoint)._asdict() if endpoint_parsed["path"]: - endpoint_parsed["path"] = re.sub(r"/{2,}", "/", endpoint_parsed["path"]) + endpoint_parsed["path"] = re.sub("/{2,}", "/", endpoint_parsed["path"]) else: endpoint_parsed["path"] = "/" return str(urlunsplit(endpoint_parsed.values())) diff --git a/python/semantic_kernel/data/__init__.py b/python/semantic_kernel/data/__init__.py index a8f14d067e7d..4bc216e8dc84 100644 --- a/python/semantic_kernel/data/__init__.py +++ b/python/semantic_kernel/data/__init__.py @@ -3,7 +3,6 @@ from semantic_kernel.data.const import ( DEFAULT_DESCRIPTION, DEFAULT_FUNCTION_NAME, - DISTANCE_FUNCTION_DIRECTION_HELPER, DistanceFunction, IndexKind, ) @@ -42,7 +41,6 @@ __all__ = [ "DEFAULT_DESCRIPTION", "DEFAULT_FUNCTION_NAME", - "DISTANCE_FUNCTION_DIRECTION_HELPER", "AnyTagsEqualTo", "DistanceFunction", "EqualTo", diff --git a/python/semantic_kernel/data/const.py b/python/semantic_kernel/data/const.py index 1354ff276fae..ae5246938834 100644 --- a/python/semantic_kernel/data/const.py +++ b/python/semantic_kernel/data/const.py @@ -1,7 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -import operator -from collections.abc import Callable from enum import Enum from typing import Final @@ -93,14 +91,3 @@ class DistanceFunction(str, Enum): EUCLIDEAN_SQUARED_DISTANCE = "euclidean_squared_distance" MANHATTAN = "manhattan" HAMMING = "hamming" - - -DISTANCE_FUNCTION_DIRECTION_HELPER: Final[dict[DistanceFunction, Callable[[int | float, int | float], bool]]] = { - DistanceFunction.COSINE_SIMILARITY: operator.gt, - DistanceFunction.COSINE_DISTANCE: operator.le, - DistanceFunction.DOT_PROD: operator.gt, - DistanceFunction.EUCLIDEAN_DISTANCE: operator.le, - DistanceFunction.EUCLIDEAN_SQUARED_DISTANCE: operator.le, - DistanceFunction.MANHATTAN: operator.le, - DistanceFunction.HAMMING: operator.le, -} diff --git a/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py b/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py index 0da3299adbb7..e63f972232dd 100644 --- a/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py +++ b/python/semantic_kernel/data/filter_clauses/any_tags_equal_to_filter_clause.py @@ -4,10 +4,10 @@ from typing import ClassVar from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class AnyTagsEqualTo(FilterClauseBase): """A filter clause for a any tags equals comparison. diff --git a/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py b/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py index 9b6f956faa93..d60ac6d723cc 100644 --- a/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py +++ b/python/semantic_kernel/data/filter_clauses/equal_to_filter_clause.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class EqualTo(FilterClauseBase): """A filter clause for an equals comparison. diff --git a/python/semantic_kernel/data/filter_clauses/filter_clause_base.py b/python/semantic_kernel/data/filter_clauses/filter_clause_base.py index 16505a209be1..2337784a6bda 100644 --- a/python/semantic_kernel/data/filter_clauses/filter_clause_base.py +++ b/python/semantic_kernel/data/filter_clauses/filter_clause_base.py @@ -5,10 +5,10 @@ from typing import Any, ClassVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class FilterClauseBase(ABC, KernelBaseModel): """A base for all filter clauses.""" diff --git a/python/semantic_kernel/data/kernel_search_results.py b/python/semantic_kernel/data/kernel_search_results.py index 14f41a5b0f52..361a730df8d2 100644 --- a/python/semantic_kernel/data/kernel_search_results.py +++ b/python/semantic_kernel/data/kernel_search_results.py @@ -4,12 +4,12 @@ from typing import Any, Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class T = TypeVar("T") -@experimental +@experimental_class class KernelSearchResults(KernelBaseModel, Generic[T]): """The result of a kernel search.""" diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py b/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py index 0b6893116389..b3c6a6a412fc 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_decorator.py @@ -3,7 +3,7 @@ import logging from inspect import Parameter, _empty, signature from types import MappingProxyType, NoneType -from typing import TypeVar +from typing import Any from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.record_definition.vector_store_record_fields import ( @@ -11,17 +11,15 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import VectorStoreModelException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function logger = logging.getLogger(__name__) -_T = TypeVar("_T") - -@experimental +@experimental_function def vectorstoremodel( - cls: type[_T] | None = None, -) -> type[_T]: + cls: Any | None = None, +): """Returns the class as a vector store model. This decorator makes a class a vector store model. @@ -46,18 +44,18 @@ def vectorstoremodel( VectorStoreModelException: If there is a ndarray field without a serialize or deserialize function. """ - def wrap(cls: type[_T]) -> type[_T]: + def wrap(cls: Any): # get fields and annotations cls_sig = signature(cls) setattr(cls, "__kernel_vectorstoremodel__", True) setattr(cls, "__kernel_vectorstoremodel_definition__", _parse_signature_to_definition(cls_sig.parameters)) - return cls # type: ignore + return cls # See if we're being called as @vectorstoremodel or @vectorstoremodel(). if cls is None: # We're called with parens. - return wrap # type: ignore + return wrap # We're called as @vectorstoremodel without parens. return wrap(cls) diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_definition.py b/python/semantic_kernel/data/record_definition/vector_store_model_definition.py index aab27edbc15f..adc993ff22e3 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_definition.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_definition.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from dataclasses import dataclass, field -from typing import TypeAlias, TypeVar +from typing import TypeVar from semantic_kernel.data.record_definition.vector_store_model_protocols import ( DeserializeFunctionProtocol, @@ -16,13 +16,13 @@ VectorStoreRecordVectorField, ) from semantic_kernel.exceptions import VectorStoreModelException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class VectorStoreRecordFields = TypeVar("VectorStoreRecordFields", bound=VectorStoreRecordField) -FieldsType: TypeAlias = dict[str, VectorStoreRecordFields] +FieldsType = dict[str, VectorStoreRecordFields] -@experimental +@experimental_class @dataclass class VectorStoreRecordDefinition: """Memory record definition. diff --git a/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py b/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py index 1d190bd4fedc..0c83a131c965 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py +++ b/python/semantic_kernel/data/record_definition/vector_store_model_protocols.py @@ -3,12 +3,12 @@ from collections.abc import Sequence from typing import Any, Protocol, TypeVar, runtime_checkable -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TModel = TypeVar("TModel", bound=object) -@experimental +@experimental_class @runtime_checkable class SerializeMethodProtocol(Protocol): """Data model serialization protocol. @@ -22,7 +22,7 @@ def serialize(self, **kwargs: Any) -> Any: ... # pragma: no cover -@experimental +@experimental_class @runtime_checkable class ToDictMethodProtocol(Protocol): """Class used internally to check if a model has a to_dict method.""" @@ -32,7 +32,7 @@ def to_dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... # pragma: no cover -@experimental +@experimental_class @runtime_checkable class ToDictFunctionProtocol(Protocol): """Protocol for to_dict function. @@ -48,7 +48,7 @@ class ToDictFunctionProtocol(Protocol): def __call__(self, record: Any, **kwargs: Any) -> Sequence[dict[str, Any]]: ... # pragma: no cover # noqa: D102 -@experimental +@experimental_class @runtime_checkable class FromDictFunctionProtocol(Protocol): """Protocol for from_dict function. @@ -64,7 +64,7 @@ class FromDictFunctionProtocol(Protocol): def __call__(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Any: ... # noqa: D102 -@experimental +@experimental_class @runtime_checkable class SerializeFunctionProtocol(Protocol): """Protocol for serialize function. @@ -81,7 +81,7 @@ class SerializeFunctionProtocol(Protocol): def __call__(self, record: Any, **kwargs: Any) -> Any: ... # noqa: D102 -@experimental +@experimental_class @runtime_checkable class DeserializeFunctionProtocol(Protocol): """Protocol for deserialize function. diff --git a/python/semantic_kernel/data/record_definition/vector_store_record_fields.py b/python/semantic_kernel/data/record_definition/vector_store_record_fields.py index 7af7519e96d5..536482b1069d 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_record_fields.py +++ b/python/semantic_kernel/data/record_definition/vector_store_record_fields.py @@ -9,10 +9,10 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.data import DistanceFunction, IndexKind -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class @dataclass class VectorStoreRecordField(ABC): """Base class for all Vector Store Record Fields.""" @@ -21,13 +21,13 @@ class VectorStoreRecordField(ABC): property_type: str | None = None -@experimental +@experimental_class @dataclass class VectorStoreRecordKeyField(VectorStoreRecordField): """Memory record key field.""" -@experimental +@experimental_class @dataclass class VectorStoreRecordDataField(VectorStoreRecordField): """Memory record data field.""" @@ -38,7 +38,7 @@ class VectorStoreRecordDataField(VectorStoreRecordField): is_full_text_searchable: bool | None = None -@experimental +@experimental_class @dataclass class VectorStoreRecordVectorField(VectorStoreRecordField): """Memory record vector field. diff --git a/python/semantic_kernel/data/record_definition/vector_store_record_utils.py b/python/semantic_kernel/data/record_definition/vector_store_record_utils.py index 8812edca0943..00436fe8e199 100644 --- a/python/semantic_kernel/data/record_definition/vector_store_record_utils.py +++ b/python/semantic_kernel/data/record_definition/vector_store_record_utils.py @@ -10,7 +10,7 @@ ) from semantic_kernel.exceptions import VectorStoreModelException from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -20,7 +20,7 @@ TModel = TypeVar("TModel", bound=object) -@experimental +@experimental_class class VectorStoreRecordUtils: """Helper class to easily add embeddings to a (set of) vector store record.""" diff --git a/python/semantic_kernel/data/search_filter.py b/python/semantic_kernel/data/search_filter.py index 1e91e5e0e2ed..4d0d84b5a7b9 100644 --- a/python/semantic_kernel/data/search_filter.py +++ b/python/semantic_kernel/data/search_filter.py @@ -10,12 +10,12 @@ from semantic_kernel.data.filter_clauses.equal_to_filter_clause import EqualTo from semantic_kernel.data.filter_clauses.filter_clause_base import FilterClauseBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class _T = TypeVar("_T", bound="SearchFilter") -@experimental +@experimental_class class SearchFilter: """A filter clause for a search.""" diff --git a/python/semantic_kernel/data/search_options.py b/python/semantic_kernel/data/search_options.py index e054cd082be5..292fce607111 100644 --- a/python/semantic_kernel/data/search_options.py +++ b/python/semantic_kernel/data/search_options.py @@ -5,10 +5,10 @@ from semantic_kernel.data.search_filter import SearchFilter from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class SearchOptions(KernelBaseModel): """Options for a search.""" diff --git a/python/semantic_kernel/data/text_search/text_search.py b/python/semantic_kernel/data/text_search/text_search.py index 5c7a27eb223a..85791877f046 100644 --- a/python/semantic_kernel/data/text_search/text_search.py +++ b/python/semantic_kernel/data/text_search/text_search.py @@ -24,7 +24,7 @@ from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.data.search_options import SearchOptions @@ -35,7 +35,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class TextSearch: """The base class for all text searches.""" diff --git a/python/semantic_kernel/data/text_search/text_search_filter.py b/python/semantic_kernel/data/text_search/text_search_filter.py index a156577d5747..f43b81da84ff 100644 --- a/python/semantic_kernel/data/text_search/text_search_filter.py +++ b/python/semantic_kernel/data/text_search/text_search_filter.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.data.search_filter import SearchFilter -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class TextSearchFilter(SearchFilter): """A filter clause for a text search query.""" diff --git a/python/semantic_kernel/data/text_search/text_search_options.py b/python/semantic_kernel/data/text_search/text_search_options.py index 075f27ffe06e..3d5afc187dab 100644 --- a/python/semantic_kernel/data/text_search/text_search_options.py +++ b/python/semantic_kernel/data/text_search/text_search_options.py @@ -7,10 +7,10 @@ from semantic_kernel.data.search_options import SearchOptions from semantic_kernel.data.text_search.text_search_filter import TextSearchFilter -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class TextSearchOptions(SearchOptions): """Options for a text search.""" diff --git a/python/semantic_kernel/data/text_search/text_search_result.py b/python/semantic_kernel/data/text_search/text_search_result.py index 08222daa0bff..3ca56f1bf7d0 100644 --- a/python/semantic_kernel/data/text_search/text_search_result.py +++ b/python/semantic_kernel/data/text_search/text_search_result.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class TextSearchResult(KernelBaseModel): """The result of a text search.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search.py b/python/semantic_kernel/data/vector_search/vector_search.py index ffc53b39d50f..166676136ef9 100644 --- a/python/semantic_kernel/data/vector_search/vector_search.py +++ b/python/semantic_kernel/data/vector_search/vector_search.py @@ -11,7 +11,7 @@ from semantic_kernel.data.vector_search.vector_search_result import VectorSearchResult from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.exceptions import VectorStoreModelDeserializationException -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.list_handler import desync_list TModel = TypeVar("TModel") @@ -20,7 +20,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class VectorSearchBase(VectorStoreRecordCollection[TKey, TModel], Generic[TKey, TModel]): """Method for searching vectors.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_filter.py b/python/semantic_kernel/data/vector_search/vector_search_filter.py index 5e6fe31ce533..6944fe69ba4d 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_filter.py +++ b/python/semantic_kernel/data/vector_search/vector_search_filter.py @@ -9,10 +9,10 @@ from semantic_kernel.data.filter_clauses.any_tags_equal_to_filter_clause import AnyTagsEqualTo from semantic_kernel.data.search_filter import SearchFilter -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class VectorSearchFilter(SearchFilter): """A filter clause for a vector search query.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_options.py b/python/semantic_kernel/data/vector_search/vector_search_options.py index 786f7627d68d..1c3ec85d69c7 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_options.py +++ b/python/semantic_kernel/data/vector_search/vector_search_options.py @@ -7,10 +7,10 @@ from semantic_kernel.data.search_options import SearchOptions from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class VectorSearchOptions(SearchOptions): """Options for vector search, builds on TextSearchOptions.""" diff --git a/python/semantic_kernel/data/vector_search/vector_search_result.py b/python/semantic_kernel/data/vector_search/vector_search_result.py index 04272bed7935..6495fe5f0921 100644 --- a/python/semantic_kernel/data/vector_search/vector_search_result.py +++ b/python/semantic_kernel/data/vector_search/vector_search_result.py @@ -3,12 +3,12 @@ from typing import Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TModel = TypeVar("TModel") -@experimental +@experimental_class class VectorSearchResult(KernelBaseModel, Generic[TModel]): """The result of a vector search.""" diff --git a/python/semantic_kernel/data/vector_search/vector_text_search.py b/python/semantic_kernel/data/vector_search/vector_text_search.py index 19ec8e1f22ef..f2a29b2908b8 100644 --- a/python/semantic_kernel/data/vector_search/vector_text_search.py +++ b/python/semantic_kernel/data/vector_search/vector_text_search.py @@ -11,7 +11,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class VectorTextSearchMixin(Generic[TModel]): """The mixin for text search, to be used in combination with VectorSearchBase.""" diff --git a/python/semantic_kernel/data/vector_search/vectorizable_text_search.py b/python/semantic_kernel/data/vector_search/vectorizable_text_search.py index 57960fca33d8..9c5b882cf6f4 100644 --- a/python/semantic_kernel/data/vector_search/vectorizable_text_search.py +++ b/python/semantic_kernel/data/vector_search/vectorizable_text_search.py @@ -10,7 +10,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class VectorizableTextSearchMixin(Generic[TModel]): """The mixin for searching with text that get's vectorized downstream. diff --git a/python/semantic_kernel/data/vector_search/vectorized_search.py b/python/semantic_kernel/data/vector_search/vectorized_search.py index b0e8329c795f..1b3e5aa25f9e 100644 --- a/python/semantic_kernel/data/vector_search/vectorized_search.py +++ b/python/semantic_kernel/data/vector_search/vectorized_search.py @@ -10,7 +10,7 @@ VectorStoreMixinException, VectorStoreModelDeserializationException, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.data.kernel_search_results import KernelSearchResults @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class VectorizedSearchMixin(Generic[TModel]): """The mixin for searching with vectors. To be used in combination with VectorSearchBase.""" diff --git a/python/semantic_kernel/data/vector_storage/vector_store.py b/python/semantic_kernel/data/vector_storage/vector_store.py index d0e24a0bf5da..796973a63854 100644 --- a/python/semantic_kernel/data/vector_storage/vector_store.py +++ b/python/semantic_kernel/data/vector_storage/vector_store.py @@ -10,10 +10,10 @@ from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class VectorStore(KernelBaseModel): """Base class for vector stores.""" diff --git a/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py b/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py index 49f7dacbf272..195d472d8155 100644 --- a/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py +++ b/python/semantic_kernel/data/vector_storage/vector_store_record_collection.py @@ -3,17 +3,10 @@ import asyncio import contextlib import logging -import sys from abc import abstractmethod from collections.abc import Awaitable, Callable, Mapping, Sequence from typing import Any, ClassVar, Generic, TypeVar -if sys.version_info >= (3, 11): - from typing import Self # pragma: no cover -else: - from typing_extensions import Self # pragma: no cover - - from pydantic import BaseModel, model_validator from semantic_kernel.data.record_definition.vector_store_model_definition import VectorStoreRecordDefinition @@ -29,7 +22,7 @@ ) from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.kernel_types import OneOrMany -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TModel = TypeVar("TModel", bound=object) TKey = TypeVar("TKey") @@ -38,7 +31,7 @@ logger = logging.getLogger(__name__) -@experimental +@experimental_class class VectorStoreRecordCollection(KernelBaseModel, Generic[TKey, TModel]): """Base class for a vector store record collection.""" @@ -71,7 +64,7 @@ def model_post_init(self, __context: object | None = None): """Post init function that sets the key field and container mode values, and validates the datamodel.""" self._validate_data_model() - async def __aenter__(self) -> Self: + async def __aenter__(self) -> "VectorStoreRecordCollection": """Enter the context manager.""" return self diff --git a/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py b/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py index 2997c3d055dd..5a0f7c300b46 100644 --- a/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py +++ b/python/semantic_kernel/filters/auto_function_invocation/auto_function_invocation_context.py @@ -5,13 +5,12 @@ from semantic_kernel.filters.filter_context_base import FilterContextBase if TYPE_CHECKING: - from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.functions.function_result import FunctionResult class AutoFunctionInvocationContext(FilterContextBase): - """The context for auto function invocation filtering. + """Class for auto function invocation context. This is the context supplied to the auto function invocation filters. @@ -20,11 +19,10 @@ class AutoFunctionInvocationContext(FilterContextBase): Another option is to terminate, this can be done by setting terminate to True. - Args: + Attributes: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. - is_streaming: Whether the function is streaming. chat_history: The chat history or None. function_result: The function result or None. request_sequence_index: The request sequence index. @@ -36,7 +34,6 @@ class AutoFunctionInvocationContext(FilterContextBase): chat_history: "ChatHistory | None" = None function_result: "FunctionResult | None" = None - execution_settings: "PromptExecutionSettings | None" = None request_sequence_index: int = 0 function_sequence_index: int = 0 function_count: int = 0 diff --git a/python/semantic_kernel/filters/filter_context_base.py b/python/semantic_kernel/filters/filter_context_base.py index 23d3806aafc3..b7f1b8da82c8 100644 --- a/python/semantic_kernel/filters/filter_context_base.py +++ b/python/semantic_kernel/filters/filter_context_base.py @@ -16,4 +16,3 @@ class FilterContextBase(KernelBaseModel): function: "KernelFunction" kernel: "Kernel" arguments: "KernelArguments" - is_streaming: bool = False diff --git a/python/semantic_kernel/filters/functions/function_invocation_context.py b/python/semantic_kernel/filters/functions/function_invocation_context.py index 3a557609571d..5c9dedce50cb 100644 --- a/python/semantic_kernel/filters/functions/function_invocation_context.py +++ b/python/semantic_kernel/filters/functions/function_invocation_context.py @@ -9,17 +9,16 @@ class FunctionInvocationContext(FilterContextBase): - """The context for function invocation filtering. + """Class for function invocation context. This filter can be used to monitor which functions are called. To log what function was called with which parameters and what output. Finally it can be used for caching by setting the result value. - Args: + Attributes: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. - is_streaming: Whether the function is streaming. result: The result of the function, or None. """ diff --git a/python/semantic_kernel/filters/kernel_filters_extension.py b/python/semantic_kernel/filters/kernel_filters_extension.py index 78152039724c..82bf489f8c5f 100644 --- a/python/semantic_kernel/filters/kernel_filters_extension.py +++ b/python/semantic_kernel/filters/kernel_filters_extension.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Callable, Coroutine from functools import partial from typing import Any, Literal, TypeVar @@ -13,9 +13,7 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel FILTER_CONTEXT_TYPE = TypeVar("FILTER_CONTEXT_TYPE", bound=FilterContextBase) -CALLABLE_FILTER_TYPE = Callable[ - [FILTER_CONTEXT_TYPE, Callable[[FILTER_CONTEXT_TYPE], Awaitable[None]]], Awaitable[None] -] +CALLABLE_FILTER_TYPE = Callable[[FILTER_CONTEXT_TYPE, Callable[[FILTER_CONTEXT_TYPE], None]], None] ALLOWED_FILTERS_LITERAL = Literal[ FilterTypes.AUTO_FUNCTION_INVOCATION, FilterTypes.FUNCTION_INVOCATION, FilterTypes.PROMPT_RENDERING @@ -119,7 +117,6 @@ def construct_call_stack( def _rebuild_auto_function_invocation_context() -> None: - from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings # noqa: F401 from semantic_kernel.contents.chat_history import ChatHistory # noqa: F401 from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( AutoFunctionInvocationContext, diff --git a/python/semantic_kernel/filters/prompts/prompt_render_context.py b/python/semantic_kernel/filters/prompts/prompt_render_context.py index 6afa5fa33766..dde178ad42d9 100644 --- a/python/semantic_kernel/filters/prompts/prompt_render_context.py +++ b/python/semantic_kernel/filters/prompts/prompt_render_context.py @@ -9,16 +9,15 @@ class PromptRenderContext(FilterContextBase): - """The context for prompt rendering filtering. + """Context for prompt rendering filters. When prompt rendering is expensive (for instance when there are expensive functions being called.) - This filter can be used to set the rendered_prompt or function result directly and returning. + This filter can be used to set the rendered_prompt directly and returning. - Args: + Attributes: function: The function invoked. kernel: The kernel used. arguments: The arguments used to call the function. - is_streaming: Whether the function is streaming. rendered_prompt: The result of the prompt rendering. function_result: The result of the function that used the prompt. diff --git a/python/semantic_kernel/functions/function_result.py b/python/semantic_kernel/functions/function_result.py index 1d3dd0c29f59..f86e201c9ee8 100644 --- a/python/semantic_kernel/functions/function_result.py +++ b/python/semantic_kernel/functions/function_result.py @@ -17,10 +17,9 @@ class FunctionResult(KernelBaseModel): """The result of a function. Args: - function: The metadata of the function that was invoked. - value: The value of the result. - rendered_prompt: The rendered prompt of the result. - metadata: The metadata of the result. + function (KernelFunctionMetadata): The metadata of the function that was invoked. + value (Any): The value of the result. + metadata (Mapping[str, Any]): The metadata of the result. Methods: __str__: Get the string representation of the result, will call str() on the value, @@ -32,7 +31,6 @@ class FunctionResult(KernelBaseModel): function: KernelFunctionMetadata value: Any - rendered_prompt: str | None = None metadata: dict[str, Any] = Field(default_factory=dict) def __str__(self) -> str: diff --git a/python/semantic_kernel/functions/kernel_function.py b/python/semantic_kernel/functions/kernel_function.py index a75459099290..04c95e1dc873 100644 --- a/python/semantic_kernel/functions/kernel_function.py +++ b/python/semantic_kernel/functions/kernel_function.py @@ -30,7 +30,6 @@ from semantic_kernel.prompt_template.handlebars_prompt_template import HandlebarsPromptTemplate from semantic_kernel.prompt_template.jinja2_prompt_template import Jinja2PromptTemplate from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate -from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -38,6 +37,7 @@ from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt from semantic_kernel.kernel import Kernel + from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig # Logger, tracer and meter for observability @@ -46,7 +46,7 @@ meter: metrics.Meter = metrics.get_meter_provider().get_meter(__name__) MEASUREMENT_FUNCTION_TAG_NAME: str = "semantic_kernel.function.name" -TEMPLATE_FORMAT_MAP: dict[TEMPLATE_FORMAT_TYPES, type[PromptTemplateBase]] = { +TEMPLATE_FORMAT_MAP = { KERNEL_TEMPLATE_FORMAT_NAME: KernelPromptTemplate, HANDLEBARS_TEMPLATE_FORMAT_NAME: HandlebarsPromptTemplate, JINJA2_TEMPLATE_FORMAT_NAME: Jinja2PromptTemplate, @@ -292,9 +292,7 @@ async def invoke_stream( if arguments is None: arguments = KernelArguments(**kwargs) _rebuild_function_invocation_context() - function_context = FunctionInvocationContext( - function=self, kernel=kernel, arguments=arguments, is_streaming=True - ) + function_context = FunctionInvocationContext(function=self, kernel=kernel, arguments=arguments) with tracer.start_as_current_span(self.fully_qualified_name) as current_span: KernelFunctionLogMessages.log_function_streaming_invoking(logger, self.fully_qualified_name) diff --git a/python/semantic_kernel/functions/kernel_function_extension.py b/python/semantic_kernel/functions/kernel_function_extension.py index 7cc963f94e28..84439dc7fea1 100644 --- a/python/semantic_kernel/functions/kernel_function_extension.py +++ b/python/semantic_kernel/functions/kernel_function_extension.py @@ -86,8 +86,6 @@ def add_plugin( return self.plugins[plugin.name] if not plugin_name: raise ValueError("plugin_name must be provided if a plugin is not supplied.") - if not isinstance(plugin_name, str): - raise TypeError("plugin_name must be a string.") if plugin: self.plugins[plugin_name] = KernelPlugin.from_object( plugin_name=plugin_name, plugin_instance=plugin, description=description diff --git a/python/semantic_kernel/functions/kernel_function_from_prompt.py b/python/semantic_kernel/functions/kernel_function_from_prompt.py index 2e35e7413618..2dc9420f29e5 100644 --- a/python/semantic_kernel/functions/kernel_function_from_prompt.py +++ b/python/semantic_kernel/functions/kernel_function_from_prompt.py @@ -183,10 +183,7 @@ async def _invoke_internal(self, context: FunctionInvocationContext) -> None: raise FunctionExecutionException(f"No completions returned while invoking function {self.name}") context.result = self._create_function_result( - completions=chat_message_contents, - chat_history=chat_history, - arguments=context.arguments, - prompt=prompt_render_result.rendered_prompt, + completions=chat_message_contents, chat_history=chat_history, arguments=context.arguments ) return @@ -208,10 +205,7 @@ async def _invoke_internal(self, context: FunctionInvocationContext) -> None: async def _invoke_internal_stream(self, context: FunctionInvocationContext) -> None: """Invokes the function stream with the given arguments.""" - prompt_render_result = await self._render_prompt(context, is_streaming=True) - if prompt_render_result.function_result is not None: - context.result = prompt_render_result.function_result - return + prompt_render_result = await self._render_prompt(context) if isinstance(prompt_render_result.ai_service, ChatCompletionClientBase): chat_history = ChatHistory.from_rendered_prompt(prompt_render_result.rendered_prompt) @@ -229,20 +223,14 @@ async def _invoke_internal_stream(self, context: FunctionInvocationContext) -> N f"Service `{type(prompt_render_result.ai_service)}` is not a valid AI service" ) - context.result = FunctionResult( - function=self.metadata, value=value, rendered_prompt=prompt_render_result.rendered_prompt - ) + context.result = FunctionResult(function=self.metadata, value=value) - async def _render_prompt( - self, context: FunctionInvocationContext, is_streaming: bool = False - ) -> PromptRenderingResult: + async def _render_prompt(self, context: FunctionInvocationContext) -> PromptRenderingResult: """Render the prompt and apply the prompt rendering filters.""" self.update_arguments_with_defaults(context.arguments) _rebuild_prompt_render_context() - prompt_render_context = PromptRenderContext( - function=self, kernel=context.kernel, arguments=context.arguments, is_streaming=is_streaming - ) + prompt_render_context = PromptRenderContext(function=self, kernel=context.kernel, arguments=context.arguments) stack = context.kernel.construct_call_stack( filter_type=FilterTypes.PROMPT_RENDERING, @@ -259,7 +247,6 @@ async def _render_prompt( rendered_prompt=prompt_render_context.rendered_prompt, ai_service=selected_service[0], execution_settings=selected_service[1], - function_result=prompt_render_context.function_result, ) async def _inner_render_prompt(self, context: PromptRenderContext) -> None: @@ -286,7 +273,6 @@ def _create_function_result( function=self.metadata, value=completions, metadata=metadata, - rendered_prompt=prompt, ) def update_arguments_with_defaults(self, arguments: KernelArguments) -> None: diff --git a/python/semantic_kernel/functions/kernel_function_metadata.py b/python/semantic_kernel/functions/kernel_function_metadata.py index 2204f710a251..50cdaa76e944 100644 --- a/python/semantic_kernel/functions/kernel_function_metadata.py +++ b/python/semantic_kernel/functions/kernel_function_metadata.py @@ -14,7 +14,7 @@ class KernelFunctionMetadata(KernelBaseModel): """The kernel function metadata.""" name: str = Field(..., pattern=FUNCTION_NAME_REGEX) - plugin_name: str | None = Field(default=None, pattern=PLUGIN_NAME_REGEX) + plugin_name: str | None = Field(None, pattern=PLUGIN_NAME_REGEX) description: str | None = Field(default=None) parameters: list[KernelParameterMetadata] = Field(default_factory=list) is_prompt: bool diff --git a/python/semantic_kernel/functions/kernel_parameter_metadata.py b/python/semantic_kernel/functions/kernel_parameter_metadata.py index 6eb28074879e..7572d5d924b2 100644 --- a/python/semantic_kernel/functions/kernel_parameter_metadata.py +++ b/python/semantic_kernel/functions/kernel_parameter_metadata.py @@ -13,9 +13,9 @@ class KernelParameterMetadata(KernelBaseModel): """The kernel parameter metadata.""" name: str | None = Field(..., pattern=FUNCTION_PARAM_NAME_REGEX) - description: str | None = None + description: str | None = Field(None) default_value: Any | None = None - type_: str | None = Field(default="str", alias="type") + type_: str | None = Field("str", alias="type") is_required: bool | None = False type_object: Any | None = None schema_data: dict[str, Any] | None = None diff --git a/python/semantic_kernel/functions/kernel_plugin.py b/python/semantic_kernel/functions/kernel_plugin.py index c373de0b1238..33e81c4ed8ee 100644 --- a/python/semantic_kernel/functions/kernel_plugin.py +++ b/python/semantic_kernel/functions/kernel_plugin.py @@ -398,16 +398,7 @@ def from_python_file( for name, cls_instance in inspect.getmembers(module, inspect.isclass): if cls_instance.__module__ != module_name: continue - # Check whether this class has at least one @kernel_function decorated method - has_kernel_function = False - for _, method in inspect.getmembers(cls_instance, inspect.isfunction): - if getattr(method, "__kernel_function__", False): - has_kernel_function = True - break - if not has_kernel_function: - continue - init_args = class_init_arguments.get(name, {}) if class_init_arguments else {} - instance = getattr(module, name)(**init_args) + instance = getattr(module, name)(**class_init_arguments.get(name, {}) if class_init_arguments else {}) return cls.from_object(plugin_name=plugin_name, description=description, plugin_instance=instance) raise PluginInitializationError(f"No class found in file: {py_file}") diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py index ad71ffccfedb..a827fb8dbf1c 100644 --- a/python/semantic_kernel/kernel.py +++ b/python/semantic_kernel/kernel.py @@ -315,13 +315,10 @@ async def invoke_function_call( self, function_call: FunctionCallContent, chat_history: ChatHistory, - *, arguments: "KernelArguments | None" = None, - execution_settings: "PromptExecutionSettings | None" = None, function_call_count: int | None = None, request_index: int | None = None, - is_streaming: bool = False, - function_behavior: "FunctionChoiceBehavior | None" = None, + function_behavior: "FunctionChoiceBehavior" = None, # type: ignore ) -> "AutoFunctionInvocationContext | None": """Processes the provided FunctionCallContent and updates the chat history.""" args_cloned = copy(arguments) if arguments else KernelArguments() @@ -385,9 +382,7 @@ async def invoke_function_call( function=function_to_call, kernel=self, arguments=args_cloned, - is_streaming=is_streaming, chat_history=chat_history, - execution_settings=execution_settings, function_result=FunctionResult(function=function_to_call.metadata, value=None), function_count=function_call_count or 0, request_sequence_index=request_index or 0, diff --git a/python/semantic_kernel/kernel_pydantic.py b/python/semantic_kernel/kernel_pydantic.py index f3a2a4338bf6..50e6e047a172 100644 --- a/python/semantic_kernel/kernel_pydantic.py +++ b/python/semantic_kernel/kernel_pydantic.py @@ -35,8 +35,8 @@ class KernelBaseSettings(BaseSettings): """ env_prefix: ClassVar[str] = "" - env_file_path: str | None = Field(default=None, exclude=True) - env_file_encoding: str = Field(default="utf-8", exclude=True) + env_file_path: str | None = Field(None, exclude=True) + env_file_encoding: str = Field("utf-8", exclude=True) model_config = SettingsConfigDict( extra="ignore", diff --git a/python/semantic_kernel/memory/memory_query_result.py b/python/semantic_kernel/memory/memory_query_result.py index 7884392bd51e..23467885a257 100644 --- a/python/semantic_kernel/memory/memory_query_result.py +++ b/python/semantic_kernel/memory/memory_query_result.py @@ -3,10 +3,10 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MemoryQueryResult: """The memory query result.""" diff --git a/python/semantic_kernel/memory/memory_record.py b/python/semantic_kernel/memory/memory_record.py index 95e647b03e99..877953a336cd 100644 --- a/python/semantic_kernel/memory/memory_record.py +++ b/python/semantic_kernel/memory/memory_record.py @@ -4,10 +4,10 @@ from numpy import ndarray -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MemoryRecord: """The in-built memory record.""" diff --git a/python/semantic_kernel/memory/memory_store_base.py b/python/semantic_kernel/memory/memory_store_base.py index c762ef9f080e..8a79472e1b00 100644 --- a/python/semantic_kernel/memory/memory_store_base.py +++ b/python/semantic_kernel/memory/memory_store_base.py @@ -5,10 +5,10 @@ from numpy import ndarray from semantic_kernel.memory.memory_record import MemoryRecord -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MemoryStoreBase(ABC): """Base class for memory store.""" diff --git a/python/semantic_kernel/memory/null_memory.py b/python/semantic_kernel/memory/null_memory.py index e202c0ad9310..78fb88d74c42 100644 --- a/python/semantic_kernel/memory/null_memory.py +++ b/python/semantic_kernel/memory/null_memory.py @@ -2,10 +2,10 @@ from semantic_kernel.memory.memory_query_result import MemoryQueryResult from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class NullMemory(SemanticTextMemoryBase): """Class for null memory.""" diff --git a/python/semantic_kernel/memory/semantic_text_memory.py b/python/semantic_kernel/memory/semantic_text_memory.py index 0c3fdb0d66b7..454727a8c987 100644 --- a/python/semantic_kernel/memory/semantic_text_memory.py +++ b/python/semantic_kernel/memory/semantic_text_memory.py @@ -9,10 +9,10 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class SemanticTextMemory(SemanticTextMemoryBase): """Class for semantic text memory.""" diff --git a/python/semantic_kernel/memory/semantic_text_memory_base.py b/python/semantic_kernel/memory/semantic_text_memory_base.py index af35d34635e0..74c4c48a67c9 100644 --- a/python/semantic_kernel/memory/semantic_text_memory_base.py +++ b/python/semantic_kernel/memory/semantic_text_memory_base.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.memory.memory_query_result import MemoryQueryResult @@ -12,7 +12,7 @@ SemanticTextMemoryT = TypeVar("SemanticTextMemoryT", bound="SemanticTextMemoryBase") -@experimental +@experimental_class class SemanticTextMemoryBase(KernelBaseModel): """Base class for semantic text memory.""" diff --git a/python/semantic_kernel/memory/volatile_memory_store.py b/python/semantic_kernel/memory/volatile_memory_store.py index 9736efc6235d..9b3ab4ccb65d 100644 --- a/python/semantic_kernel/memory/volatile_memory_store.py +++ b/python/semantic_kernel/memory/volatile_memory_store.py @@ -8,12 +8,12 @@ from semantic_kernel.exceptions import ServiceResourceNotFoundError from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class VolatileMemoryStore(MemoryStoreBase): """A volatile memory store that stores data in memory.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py b/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py index 0be0701795c0..f1701c338184 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/actor_state_key.py @@ -2,10 +2,10 @@ from enum import Enum -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class ActorStateKeys(Enum): """Keys used to store actor state in Dapr.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py index b74b3d233539..1a5f235e0628 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/event_buffer_actor.py @@ -8,12 +8,12 @@ from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.interfaces.message_buffer_interface import MessageBufferInterface -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) -@experimental +@experimental_class class EventBufferActor(Actor, MessageBufferInterface): """Represents a message buffer actor that manages a queue of JSON strings representing events.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py index 213d491e413e..1938056b4f23 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/external_event_buffer_actor.py @@ -11,12 +11,12 @@ from semantic_kernel.processes.dapr_runtime.interfaces.external_event_buffer_interface import ( ExternalEventBufferInterface, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger = logging.getLogger(__name__) -@experimental +@experimental_class class ExternalEventBufferActor(Actor, ExternalEventBufferInterface): """Represents a message buffer actor that follows the MessageBuffer abstract class.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py index bc1a2bca9bbf..b94616a7ec6e 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/message_buffer_actor.py @@ -9,12 +9,12 @@ from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.interfaces.message_buffer_interface import MessageBufferInterface -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class MessageBufferActor(Actor, MessageBufferInterface): """Represents a message buffer actor that follows the MessageBuffer abstract class.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py index 09e2ab083bdd..a57e44a4ba58 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/process_actor.py @@ -5,7 +5,6 @@ import json import logging import uuid -from collections.abc import Callable, MutableSequence, Sequence from queue import Queue from typing import Any @@ -38,29 +37,27 @@ from semantic_kernel.processes.process_event import ProcessEvent from semantic_kernel.processes.process_message import ProcessMessage from semantic_kernel.processes.process_message_factory import ProcessMessageFactory -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class ProcessActor(StepActor, ProcessInterface): """A local process that contains a collection of steps.""" - def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel, factories: dict[str, Callable]): + def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel): """Initializes a new instance of ProcessActor. Args: ctx: The actor runtime context. actor_id: The unique ID for the actor. kernel: The Kernel dependency to be injected. - factories: The factory dictionary that contains step types to factory methods. """ - super().__init__(ctx, actor_id, kernel, factories) + super().__init__(ctx, actor_id, kernel) self.kernel = kernel - self.factories = factories - self.steps: MutableSequence[StepInterface] = [] - self.step_infos: MutableSequence[DaprStepInfo] = [] + self.steps: list[StepInterface] = [] + self.step_infos: list[DaprStepInfo] = [] self.initialize_task: bool | None = False self.external_event_queue: Queue = Queue() self.process_task: asyncio.Task | None = None @@ -133,7 +130,7 @@ async def start(self, keep_alive: bool = True) -> None: if not self.process_task or self.process_task.done(): self.process_task = asyncio.create_task(self.internal_execute(keep_alive=keep_alive)) - async def run_once(self, process_event: KernelProcessEvent | str | None) -> None: + async def run_once(self, process_event: str) -> None: """Starts the process with an initial event and waits for it to finish. Args: @@ -148,9 +145,7 @@ async def run_once(self, process_event: KernelProcessEvent | str | None) -> None actor_interface=ExternalEventBufferInterface, ) try: - await external_event_queue.enqueue( - process_event.model_dump_json() if isinstance(process_event, KernelProcessEvent) else process_event - ) + await external_event_queue.enqueue(process_event) logger.info(f"Run once for process event: {process_event}") @@ -174,7 +169,7 @@ async def stop(self): with contextlib.suppress(asyncio.CancelledError): await self.process_task - async def initialize_step(self, input: str) -> None: + async def initialize_step(self): """Initializes the step.""" # The process does not need any further initialization pass @@ -220,13 +215,9 @@ async def to_dapr_process_info(self) -> DaprProcessInfo: process_state = KernelProcessState(self.name, self.id.id) step_tasks = [step.to_dapr_step_info() for step in self.steps] - steps: Sequence[str] = await asyncio.gather(*step_tasks) + steps = await asyncio.gather(*step_tasks) return DaprProcessInfo( - inner_step_python_type=self.inner_step_type, - edges=self.process.edges, - state=process_state, - # steps are model dumps of the classes, which pydantic can parse back. - steps=steps, # type: ignore + inner_step_python_type=self.inner_step_type, edges=self.process.edges, state=process_state, steps=steps ) async def handle_message(self, message: ProcessMessage) -> None: @@ -260,7 +251,7 @@ async def _initialize_process_actor( self.output_edges = {kvp[0]: list(kvp[1]) for kvp in self.process.edges.items()} for step in self.step_infos: - step_actor: StepInterface | None = None + step_actor = None # The current step should already have a name. assert step.state and step.state.name is not None # nosec @@ -292,7 +283,7 @@ async def _initialize_process_actor( assert step.state and step.state.id is not None # nosec scoped_step_id = self._scoped_actor_id(ActorId(step.state.id)) - step_actor = ActorProxy.create( # type: ignore + step_actor: StepInterface = ActorProxy.create( # type: ignore actor_type=f"{StepActor.__name__}", actor_id=scoped_step_id, actor_interface=StepInterface, diff --git a/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py b/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py index 08e719237caf..7b38c7633a31 100644 --- a/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py +++ b/python/semantic_kernel/processes/dapr_runtime/actors/step_actor.py @@ -4,8 +4,6 @@ import importlib import json import logging -from collections.abc import Callable -from inspect import isawaitable from queue import Queue from typing import Any @@ -39,27 +37,25 @@ from semantic_kernel.processes.process_message_factory import ProcessMessageFactory from semantic_kernel.processes.process_types import get_generic_state_type from semantic_kernel.processes.step_utils import find_input_channels, get_fully_qualified_name -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class StepActor(Actor, StepInterface, KernelProcessMessageChannel): """Represents a step actor that follows the Step abstract class.""" - def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel, factories: dict[str, Callable]): + def __init__(self, ctx: ActorRuntimeContext, actor_id: ActorId, kernel: Kernel): """Initializes a new instance of StepActor. Args: ctx: The actor runtime context. actor_id: The unique ID for the actor. kernel: The Kernel dependency to be injected. - factories: The factory dictionary to use for creating the step. """ super().__init__(ctx, actor_id) self.kernel = kernel - self.factories: dict[str, Callable] = factories self.parent_process_id: str | None = None self.step_info: DaprStepInfo | None = None self.initialize_task: bool | None = False @@ -176,38 +172,31 @@ def _get_class_from_string(self, full_class_name: str): async def activate_step(self): """Initializes the step.""" - # Instantiate an instance of the inner step object and retrieve its class reference. - if self.factories and self.inner_step_type in self.factories: - step_object = self.factories[self.inner_step_type]() - if isawaitable(step_object): - step_object = await step_object - step_cls = step_object.__class__ - step_instance: KernelProcessStep = step_object # type: ignore - else: - step_cls = self._get_class_from_string(self.inner_step_type) - step_instance: KernelProcessStep = step_cls() # type: ignore + # Instantiate an instance of the inner step object + step_cls = self._get_class_from_string(self.inner_step_type) + + step_instance: KernelProcessStep = step_cls() # type: ignore kernel_plugin = self.kernel.add_plugin( - step_instance, - self.step_info.state.name if self.step_info.state else "default_name", + step_instance, self.step_info.state.name if self.step_info.state else "default_name" ) - # Load the kernel functions. + # Load the kernel functions for name, f in kernel_plugin.functions.items(): self.functions[name] = f - # Initialize the input channels. + # Initialize the input channels self.initial_inputs = find_input_channels(channel=self, functions=self.functions) self.inputs = {k: {kk: vv for kk, vv in v.items()} if v else {} for k, v in self.initial_inputs.items()} - # Use the existing state or create a new one if not provided. + # Use the existing state or create a new one if not provided state_object = self.step_info.state - # Extract TState from inner_step_type using the class reference. + # Extract TState from inner_step_type t_state = get_generic_state_type(step_cls) if t_state is not None: - # Create state_type as KernelProcessStepState[TState]. + # Create state_type as KernelProcessStepState[TState] state_type = KernelProcessStepState[t_state] if state_object is None: @@ -217,7 +206,7 @@ async def activate_step(self): state=None, ) else: - # Ensure that state_object is an instance of the expected type. + # Make sure state_object is an instance of state_type if not isinstance(state_object, KernelProcessStepState): error_message = "State object is not of the expected type." raise KernelException(error_message) @@ -226,13 +215,15 @@ async def activate_step(self): ActorStateKeys.StepStateType.value, get_fully_qualified_name(t_state), ) + await self._state_manager.try_add_state( ActorStateKeys.StepStateJson.value, json.dumps(state_object.model_dump()), ) + await self._state_manager.save_state() - # Initialize state_object.state if it is not already set. + # Make sure that state_object.state is not None if state_object.state is None: try: state_object.state = t_state() @@ -240,8 +231,9 @@ async def activate_step(self): error_message = f"Cannot instantiate state of type {t_state}: {e}" raise KernelException(error_message) else: - # The step has no user-defined state; use the base KernelProcessStepState. + # The step has no user-defined state; use the base KernelProcessStepState state_type = KernelProcessStepState + if state_object is None: state_object = state_type( name=step_cls.__name__, @@ -253,7 +245,7 @@ async def activate_step(self): error_message = "The state object for the KernelProcessStep could not be created." raise KernelException(error_message) - # Set the step state and activate the step with the state object. + # Set the step state and activate the step with the state object self.step_state = state_object await step_instance.activate(state_object) @@ -332,7 +324,7 @@ async def handle_message(self, message: ProcessMessage): raise ProcessFunctionNotFoundException(f"Function {target_function} not found in plugin {self.name}") invoke_result = None - event_name: str = "" + event_name = None event_value = None try: @@ -340,8 +332,6 @@ async def handle_message(self, message: ProcessMessage): f"Invoking plugin `{function.plugin_name}` and function `{function.name}` with arguments: {arguments}" ) invoke_result = await self.invoke_function(function, self.kernel, arguments) - if invoke_result is None: - raise KernelException(f"Function {target_function} returned None.") event_name = f"{target_function}.OnResult" event_value = invoke_result.value @@ -408,9 +398,7 @@ async def to_dapr_step_info(self) -> str: raise ValueError("The inner step type must be initialized before converting to DaprStepInfo.") step_info = DaprStepInfo( - inner_step_python_type=self.inner_step_type, - state=self.step_info.state, - edges=self.step_info.edges, + inner_step_python_type=self.inner_step_type, state=self.step_info.state, edges=self.step_info.edges ) return step_info.model_dump_json() diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py b/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py index 6e2dfb13562b..88a0378f5395 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_actor_registration.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import Callable from typing import TYPE_CHECKING from dapr.actor import ActorId @@ -20,32 +19,22 @@ from semantic_kernel.kernel import Kernel -def create_actor_factories(kernel: "Kernel", factories: dict[str, Callable] | None = None) -> tuple: +def create_actor_factories(kernel: "Kernel") -> tuple: """Creates actor factories for ProcessActor and StepActor.""" - if factories is None: - factories = {} - def process_actor_factory( - ctx: ActorRuntimeContext, - actor_id: ActorId, - ) -> ProcessActor: - return ProcessActor(ctx, actor_id, kernel=kernel, factories=factories) + def process_actor_factory(ctx: ActorRuntimeContext, actor_id: ActorId) -> ProcessActor: + return ProcessActor(ctx, actor_id, kernel) - def step_actor_factory( - ctx: ActorRuntimeContext, - actor_id: ActorId, - ) -> StepActor: - return StepActor(ctx, actor_id, kernel=kernel, factories=factories) + def step_actor_factory(ctx: ActorRuntimeContext, actor_id: ActorId) -> StepActor: + return StepActor(ctx, actor_id, kernel=kernel) return process_actor_factory, step_actor_factory # Asynchronous registration for FastAPI -async def register_fastapi_dapr_actors( - actor: FastAPIDaprActor, kernel: "Kernel", factories: dict[str, Callable] | None = None -) -> None: +async def register_fastapi_dapr_actors(actor: FastAPIDaprActor, kernel: "Kernel") -> None: """Registers the actors with the Dapr runtime for use with a FastAPI app.""" - process_actor_factory, step_actor_factory = create_actor_factories(kernel, factories) + process_actor_factory, step_actor_factory = create_actor_factories(kernel) await actor.register_actor(ProcessActor, actor_factory=process_actor_factory) await actor.register_actor(StepActor, actor_factory=step_actor_factory) await actor.register_actor(EventBufferActor) @@ -54,11 +43,9 @@ async def register_fastapi_dapr_actors( # Synchronous registration for Flask -def register_flask_dapr_actors( - actor: FlaskDaprActor, kernel: "Kernel", factory: dict[str, Callable] | None = None -) -> None: +def register_flask_dapr_actors(actor: FlaskDaprActor, kernel: "Kernel") -> None: """Registers the actors with the Dapr runtime for use with a Flask app.""" - process_actor_factory, step_actor_factory = create_actor_factories(kernel, factory) + process_actor_factory, step_actor_factory = create_actor_factories(kernel) actor.register_actor(ProcessActor, actor_factory=process_actor_factory) actor.register_actor(StepActor, actor_factory=step_actor_factory) actor.register_actor(EventBufferActor) diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py index f95e0db2bd94..cf8a3dabb09a 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process.py @@ -6,13 +6,13 @@ from semantic_kernel.exceptions.process_exceptions import ProcessInvalidConfigurationException from semantic_kernel.processes.dapr_runtime.dapr_kernel_process_context import DaprKernelProcessContext from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental +@experimental_function async def start( process: "KernelProcess", initial_event: KernelProcessEvent | str | Enum, diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py index 6cbb4f369f00..b6f5780daab2 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_kernel_process_context.py @@ -9,10 +9,10 @@ from semantic_kernel.processes.dapr_runtime.interfaces.process_interface import ProcessInterface from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class DaprKernelProcessContext: """A Dapr kernel process context.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py b/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py index f573c20c2726..4a93ad10e66a 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_process_info.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import MutableSequence from typing import Literal from pydantic import Field @@ -10,15 +9,15 @@ from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.kernel_process.kernel_process_state import KernelProcessState from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class DaprProcessInfo(DaprStepInfo): """A Dapr process info.""" - type: Literal["DaprProcessInfo"] = "DaprProcessInfo" # type: ignore - steps: MutableSequence["DaprStepInfo | DaprProcessInfo"] = Field(default_factory=list) + type: Literal["DaprProcessInfo"] = Field("DaprProcessInfo") # type: ignore + steps: list["DaprStepInfo | DaprProcessInfo"] = Field(default_factory=list) def to_kernel_process(self) -> KernelProcess: """Converts the Dapr process info to a kernel process.""" @@ -41,7 +40,7 @@ def from_kernel_process(cls, kernel_process: KernelProcess) -> "DaprProcessInfo" raise ValueError("Kernel process must be provided") dapr_step_info = DaprStepInfo.from_kernel_step_info(kernel_process) - dapr_steps: MutableSequence[DaprProcessInfo | DaprStepInfo] = [] + dapr_steps: list[DaprProcessInfo | DaprStepInfo] = [] for step in kernel_process.steps: if isinstance(step, KernelProcess): diff --git a/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py b/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py index c0cb7490f899..85f149709490 100644 --- a/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py +++ b/python/semantic_kernel/processes/dapr_runtime/dapr_step_info.py @@ -11,14 +11,14 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.step_utils import get_fully_qualified_name -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class DaprStepInfo(KernelBaseModel): """A Dapr step info.""" - type: Literal["DaprStepInfo"] = "DaprStepInfo" + type: Literal["DaprStepInfo"] = Field("DaprStepInfo") inner_step_python_type: str state: KernelProcessStepState edges: dict[str, list[KernelProcessEdge]] = Field(default_factory=dict) diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py index e0de52953d8d..3f6312b7ff37 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/event_buffer_interface.py @@ -4,10 +4,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class EventBufferInterface(ActorInterface, ABC): """Abstract base class for an event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py index 3e9fbe212de5..af1bb8686d01 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/external_event_buffer_interface.py @@ -5,10 +5,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class ExternalEventBufferInterface(ActorInterface, ABC): """Abstract base class for an external event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py index a69bc55da549..c1591c219b85 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/message_buffer_interface.py @@ -4,10 +4,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class MessageBufferInterface(ActorInterface, ABC): """Abstract base class for a message event buffer that follows the ActorInterface.""" diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py index 7eac1ce61384..63268ed58fbc 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/process_interface.py @@ -2,18 +2,15 @@ from abc import ABC, abstractmethod -from typing import TYPE_CHECKING from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.processes.dapr_runtime.dapr_process_info import DaprProcessInfo +from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent +from semantic_kernel.utils.experimental_decorator import experimental_class -if TYPE_CHECKING: - from semantic_kernel.processes.dapr_runtime.dapr_process_info import DaprProcessInfo - from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent - -@experimental +@experimental_class class ProcessInterface(ActorInterface, ABC): """Abstract base class for a process that follows the ActorInterface.""" @@ -38,7 +35,7 @@ async def start(self, keep_alive: bool) -> None: @abstractmethod @actormethod(name="run_once") - async def run_once(self, process_event: "KernelProcessEvent | str | None") -> None: + async def run_once(self, process_event: KernelProcessEvent) -> None: """Starts the process with an initial event and then waits for the process to finish. :param process_event: Required. The KernelProcessEvent to start the process with. diff --git a/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py b/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py index 630a254207a1..987e52103cb1 100644 --- a/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py +++ b/python/semantic_kernel/processes/dapr_runtime/interfaces/step_interface.py @@ -5,10 +5,10 @@ from dapr.actor import ActorInterface, actormethod -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class StepInterface(ActorInterface, ABC): """Abstract base class for a step in the process workflow.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process.py b/python/semantic_kernel/processes/kernel_process/kernel_process.py index 6b6dea0fc21d..c7f90e85959c 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import Callable from typing import TYPE_CHECKING, Any from pydantic import Field @@ -8,35 +7,25 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_state import KernelProcessState from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge -@experimental +@experimental_class class KernelProcess(KernelProcessStepInfo): """A kernel process.""" steps: list[KernelProcessStepInfo] = Field(default_factory=list) - factories: dict[str, Callable] = Field(default_factory=dict) def __init__( self, state: KernelProcessState, steps: list[KernelProcessStepInfo], edges: dict[str, list["KernelProcessEdge"]] | None = None, - factories: dict[str, Callable] | None = None, ): - """Initialize the kernel process. - - Args: - state: The state of the process. - steps: The steps of the process. - edges: The edges of the process. Defaults to None. - factories: The factories of the process. This allows for the creation of - steps that require complex dependencies that cannot be JSON serialized or deserialized. - """ + """Initialize the kernel process.""" if not state: raise ValueError("state cannot be None") if not steps: @@ -54,7 +43,4 @@ def __init__( "output_edges": edges or {}, } - if factories: - args["factories"] = factories - super().__init__(**args) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py b/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py index 8e654c388bd0..26f41dd9ab34 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_edge.py @@ -3,10 +3,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_function_target import KernelProcessFunctionTarget -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessEdge(KernelBaseModel): """Represents an edge between steps.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_event.py b/python/semantic_kernel/processes/kernel_process/kernel_process_event.py index 2dc14331b2a4..b51efb334f64 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_event.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_event.py @@ -6,10 +6,10 @@ from pydantic import ConfigDict from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessEventVisibility(Enum): """Visibility of a kernel process event.""" @@ -21,7 +21,7 @@ class KernelProcessEventVisibility(Enum): Internal = "Internal" -@experimental +@experimental_class class KernelProcessEvent(KernelBaseModel): """A kernel process event.""" @@ -29,4 +29,4 @@ class KernelProcessEvent(KernelBaseModel): data: Any | None = None visibility: KernelProcessEventVisibility = KernelProcessEventVisibility.Internal - model_config = ConfigDict(use_enum_values=False) + model_config = ConfigDict(use_enum_values=True) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py b/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py index 2372253e26b6..ead28bb8ff55 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_function_target.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessFunctionTarget(KernelBaseModel): """The target of a function call in a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py b/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py index 7337666118a3..566a787878ad 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_message_channel.py @@ -3,10 +3,10 @@ from abc import ABC, abstractmethod from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessMessageChannel(ABC): """Abstract base class for emitting events from a step.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_state.py b/python/semantic_kernel/processes/kernel_process/kernel_process_state.py index 0cb2e53d2aab..641e29e518f6 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_state.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_state.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessState(KernelProcessStepState): """The state of a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step.py index 653a2e0ba19e..887dcfac47e1 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState @@ -12,7 +12,7 @@ TState = TypeVar("TState") -@experimental +@experimental_class class KernelProcessStep(ABC, KernelBaseModel, Generic[TState]): """A KernelProcessStep Base class for process steps.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py index 4ff52477ca0e..8b90b204a5cc 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_context.py @@ -6,10 +6,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_message_channel import KernelProcessMessageChannel from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessStepContext(KernelBaseModel): """The context of a step in a kernel process.""" @@ -17,9 +17,9 @@ class KernelProcessStepContext(KernelBaseModel): def __init__(self, channel: KernelProcessMessageChannel): """Initialize the step context.""" - super().__init__(step_message_channel=channel) # type: ignore + super().__init__(step_message_channel=channel) - async def emit_event(self, process_event: "KernelProcessEvent | str | Enum | None", **kwargs) -> None: + async def emit_event(self, process_event: "KernelProcessEvent | str | Enum", **kwargs) -> None: """Emit an event from the current step. It is possible to either specify a `KernelProcessEvent` object or the ID of the event @@ -34,9 +34,10 @@ async def emit_event(self, process_event: "KernelProcessEvent | str | Enum | Non if process_event is None: raise ProcessEventUndefinedException("Process event cannot be None") + if isinstance(process_event, Enum): + process_event = process_event.value + if not isinstance(process_event, KernelProcessEvent): - process_event = KernelProcessEvent( - id=process_event.value if isinstance(process_event, Enum) else process_event, **kwargs - ) + process_event = KernelProcessEvent(id=process_event, **kwargs) await self.step_message_channel.emit_event(process_event) diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py index f60499d2bc8d..e4591e67f257 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_info.py @@ -4,10 +4,10 @@ from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class KernelProcessStepInfo(KernelBaseModel): """Information about a step in a kernel process.""" diff --git a/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py b/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py index a0c494f9e749..802823e4de64 100644 --- a/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py +++ b/python/semantic_kernel/processes/kernel_process/kernel_process_step_state.py @@ -3,12 +3,12 @@ from typing import Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class TState = TypeVar("TState") -@experimental +@experimental_class class KernelProcessStepState(KernelBaseModel, Generic[TState]): """The state of a step in a kernel process.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_event.py b/python/semantic_kernel/processes/local_runtime/local_event.py index 6075e49f798d..cee3d64321e9 100644 --- a/python/semantic_kernel/processes/local_runtime/local_event.py +++ b/python/semantic_kernel/processes/local_runtime/local_event.py @@ -7,10 +7,10 @@ KernelProcessEvent, KernelProcessEventVisibility, ) -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class LocalEvent(KernelBaseModel): """An event that is local to a namespace.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_kernel_process.py b/python/semantic_kernel/processes/local_runtime/local_kernel_process.py index aee1b8faa693..6a7408b5b29b 100644 --- a/python/semantic_kernel/processes/local_runtime/local_kernel_process.py +++ b/python/semantic_kernel/processes/local_runtime/local_kernel_process.py @@ -6,14 +6,14 @@ from semantic_kernel.exceptions.process_exceptions import ProcessInvalidConfigurationException from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent from semantic_kernel.processes.local_runtime.local_kernel_process_context import LocalKernelProcessContext -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: from semantic_kernel.kernel import Kernel from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental +@experimental_function async def start( process: "KernelProcess", kernel: "Kernel", initial_event: KernelProcessEvent | str | Enum, **kwargs ) -> LocalKernelProcessContext: diff --git a/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py b/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py index f3ca76107512..e0e5d0b1cc80 100644 --- a/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py +++ b/python/semantic_kernel/processes/local_runtime/local_kernel_process_context.py @@ -5,14 +5,14 @@ from semantic_kernel.kernel import Kernel from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.processes.local_runtime.local_process import LocalProcess -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess from semantic_kernel.processes.local_runtime.local_event import KernelProcessEvent -@experimental +@experimental_class class LocalKernelProcessContext(KernelBaseModel): """A local kernel process context.""" @@ -33,10 +33,9 @@ def __init__(self, process: "KernelProcess", kernel: "Kernel"): process=process, kernel=kernel, parent_process_id=None, - factories=process.factories, ) - super().__init__(local_process=local_process) # type: ignore + super().__init__(local_process=local_process) async def start_with_event(self, initial_event: "KernelProcessEvent") -> None: """Starts the local process with an initial event.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_message.py b/python/semantic_kernel/processes/local_runtime/local_message.py index 26c50f62d833..ea67aad3a3aa 100644 --- a/python/semantic_kernel/processes/local_runtime/local_message.py +++ b/python/semantic_kernel/processes/local_runtime/local_message.py @@ -5,10 +5,10 @@ from pydantic import Field from semantic_kernel.kernel_pydantic import KernelBaseModel -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class LocalMessage(KernelBaseModel): """A message that is local to a namespace.""" @@ -16,5 +16,5 @@ class LocalMessage(KernelBaseModel): destination_id: str = Field(...) function_name: str = Field(...) values: dict[str, Any | None] = Field(...) - target_event_id: str | None = Field(default=None) - target_event_data: Any | None = Field(default=None) + target_event_id: str | None = Field(None) + target_event_data: Any | None = Field(None) diff --git a/python/semantic_kernel/processes/local_runtime/local_message_factory.py b/python/semantic_kernel/processes/local_runtime/local_message_factory.py index 2d4ac42e01a6..78e6d6260857 100644 --- a/python/semantic_kernel/processes/local_runtime/local_message_factory.py +++ b/python/semantic_kernel/processes/local_runtime/local_message_factory.py @@ -4,10 +4,10 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.local_runtime.local_message import LocalMessage -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class LocalMessageFactory: """Factory class to create LocalMessage instances.""" diff --git a/python/semantic_kernel/processes/local_runtime/local_process.py b/python/semantic_kernel/processes/local_runtime/local_process.py index ac47fc30716b..eeb03b7888c2 100644 --- a/python/semantic_kernel/processes/local_runtime/local_process.py +++ b/python/semantic_kernel/processes/local_runtime/local_process.py @@ -4,7 +4,6 @@ import contextlib import logging import uuid -from collections.abc import Callable from queue import Queue from typing import TYPE_CHECKING, Any @@ -24,7 +23,7 @@ from semantic_kernel.processes.local_runtime.local_message import LocalMessage from semantic_kernel.processes.local_runtime.local_message_factory import LocalMessageFactory from semantic_kernel.processes.local_runtime.local_step import LocalStep -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess @@ -32,7 +31,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class LocalProcess(LocalStep): """A local process that contains a collection of steps.""" @@ -43,15 +42,8 @@ class LocalProcess(LocalStep): initialize_task: bool | None = False external_event_queue: Queue = Field(default_factory=Queue) process_task: asyncio.Task | None = None - factories: dict[str, Callable] = Field(default_factory=dict) - - def __init__( - self, - process: "KernelProcess", - kernel: Kernel, - factories: dict[str, Callable] | None = None, - parent_process_id: str | None = None, - ): + + def __init__(self, process: "KernelProcess", kernel: Kernel, parent_process_id: str | None = None): """Initializes the local process.""" args: dict[str, Any] = { "step_info": process, @@ -62,9 +54,6 @@ def __init__( "initialize_task": False, } - if factories: - args["factories"] = factories - super().__init__(**args) def ensure_initialized(self): @@ -135,7 +124,6 @@ def initialize_process(self): process = LocalProcess( process=step, kernel=self.kernel, - factories=self.factories, parent_process_id=self.id, ) @@ -145,10 +133,9 @@ def initialize_process(self): assert step.state and step.state.id is not None # nosec # Create a LocalStep for the step - local_step = LocalStep( # type: ignore + local_step = LocalStep( step_info=step, kernel=self.kernel, - factories=self.factories, parent_process_id=self.id, ) @@ -229,17 +216,16 @@ async def enqueue_step_messages(self, step: LocalStep, message_channel: Queue[Lo """Processes events emitted by the given step and enqueues them.""" all_step_events = step.get_all_events() for step_event in all_step_events: - # must come first because emitting the step event modifies its namespace - for edge in step.get_edge_for_event(step_event.id): - message = LocalMessageFactory.create_from_edge(edge, step_event.data) - message_channel.put(message) - if step_event.visibility == KernelProcessEventVisibility.Public: if isinstance(step_event, KernelProcessEvent): await self.emit_event(step_event) # type: ignore elif isinstance(step_event, LocalEvent): await self.emit_local_event(step_event) # type: ignore + for edge in step.get_edge_for_event(step_event.id): + message = LocalMessageFactory.create_from_edge(edge, step_event.data) + message_channel.put(message) + def dispose(self): """Clean up resources.""" if self.process_task: diff --git a/python/semantic_kernel/processes/local_runtime/local_step.py b/python/semantic_kernel/processes/local_runtime/local_step.py index 1b6d6e43c2ed..a5856d1fb812 100644 --- a/python/semantic_kernel/processes/local_runtime/local_step.py +++ b/python/semantic_kernel/processes/local_runtime/local_step.py @@ -3,8 +3,6 @@ import asyncio import logging import uuid -from collections.abc import Callable -from inspect import isawaitable from queue import Queue from typing import Any @@ -21,18 +19,19 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.kernel_process.kernel_process_event import KernelProcessEvent from semantic_kernel.processes.kernel_process.kernel_process_message_channel import KernelProcessMessageChannel +from semantic_kernel.processes.kernel_process.kernel_process_step import KernelProcessStep from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.local_runtime.local_event import LocalEvent from semantic_kernel.processes.local_runtime.local_message import LocalMessage from semantic_kernel.processes.process_types import get_generic_state_type -from semantic_kernel.processes.step_utils import find_input_channels, get_fully_qualified_name -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.processes.step_utils import find_input_channels +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class LocalStep(KernelProcessMessageChannel, KernelBaseModel): """A local step that is part of a local process.""" @@ -48,7 +47,6 @@ class LocalStep(KernelProcessMessageChannel, KernelBaseModel): output_edges: dict[str, list[KernelProcessEdge]] = Field(default_factory=dict) parent_process_id: str | None = None init_lock: asyncio.Lock = Field(default_factory=asyncio.Lock, exclude=True) - factories: dict[str, Callable] @model_validator(mode="before") @classmethod @@ -150,7 +148,7 @@ async def handle_message(self, message: LocalMessage): raise ProcessFunctionNotFoundException(f"Function {target_function} not found in plugin {self.name}") invoke_result = None - event_name: str = "" + event_name = None event_value = None try: @@ -158,8 +156,6 @@ async def handle_message(self, message: LocalMessage): f"Invoking plugin `{function.plugin_name}` and function `{function.name}` with arguments: {arguments}" ) invoke_result = await self.invoke_function(function, self.kernel, arguments) - if invoke_result is None: - raise KernelException(f"Function {target_function} returned None.") event_name = f"{target_function}.OnResult" event_value = invoke_result.value except Exception as ex: @@ -189,16 +185,8 @@ async def initialize_step(self): """Initializes the step.""" # Instantiate an instance of the inner step object step_cls = self.step_info.inner_step_type - factory = ( - self.factories.get(get_fully_qualified_name(self.step_info.inner_step_type)) if self.factories else None - ) - if factory: - step_instance = factory() - if isawaitable(step_instance): - step_instance = await step_instance - step_cls = type(step_instance) - else: - step_instance = step_cls() # type: ignore + + step_instance: KernelProcessStep = step_cls() # type: ignore kernel_plugin = self.kernel.add_plugin( step_instance, self.step_info.state.name if self.step_info.state else "default_name" diff --git a/python/semantic_kernel/processes/process_builder.py b/python/semantic_kernel/processes/process_builder.py index 934bee91ab42..c4e024298af3 100644 --- a/python/semantic_kernel/processes/process_builder.py +++ b/python/semantic_kernel/processes/process_builder.py @@ -2,7 +2,6 @@ import contextlib import inspect -from collections.abc import Callable from copy import copy from enum import Enum from typing import TYPE_CHECKING @@ -18,14 +17,13 @@ from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder from semantic_kernel.processes.process_types import TState, TStep -from semantic_kernel.processes.step_utils import get_fully_qualified_name -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.kernel_process.kernel_process import KernelProcess -@experimental +@experimental_class class ProcessBuilder(ProcessStepBuilder): """A builder for a process.""" @@ -34,38 +32,20 @@ class ProcessBuilder(ProcessStepBuilder): has_parent_process: bool = False steps: list["ProcessStepBuilder"] = Field(default_factory=list) - factories: dict[str, Callable] = Field(default_factory=dict) def add_step( self, step_type: type[TStep], name: str | None = None, initial_state: TState | None = None, - factory_function: Callable | None = None, **kwargs, ) -> ProcessStepBuilder[TState, TStep]: - """Register a step type with optional constructor arguments. - - Args: - step_type: The step type. - name: The name of the step. Defaults to None. - initial_state: The initial state of the step. Defaults to None. - factory_function: The factory function. Allows for a callable that is used to create the step instance - that may have complex dependencies that cannot be JSON serialized or deserialized. Defaults to None. - kwargs: Additional keyword arguments. - - Returns: - The process step builder. - """ + """Register a step type with optional constructor arguments.""" if not inspect.isclass(step_type): raise ProcessInvalidConfigurationException( f"Expected a class type, but got an instance of {type(step_type).__name__}" ) - if factory_function: - fq_name = get_fully_qualified_name(step_type) - self.factories[fq_name] = factory_function - name = name or step_type.__name__ process_step_builder = ProcessStepBuilder(type=step_type, name=name, initial_state=initial_state, **kwargs) self.steps.append(process_step_builder) @@ -137,4 +117,4 @@ def build(self) -> "KernelProcess": built_edges = {key: [edge.build() for edge in edges] for key, edges in self.edges.items()} built_steps = [step.build_step() for step in self.steps] process_state = KernelProcessState(name=self.name, id=self.id if self.has_parent_process else None) - return KernelProcess(state=process_state, steps=built_steps, edges=built_edges, factories=self.factories) + return KernelProcess(state=process_state, steps=built_steps, edges=built_edges) diff --git a/python/semantic_kernel/processes/process_edge_builder.py b/python/semantic_kernel/processes/process_edge_builder.py index 56e1bf642210..0cd95d8f3b9a 100644 --- a/python/semantic_kernel/processes/process_edge_builder.py +++ b/python/semantic_kernel/processes/process_edge_builder.py @@ -6,13 +6,13 @@ from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.process_builder import ProcessBuilder -@experimental +@experimental_class class ProcessEdgeBuilder(KernelBaseModel): """A builder for a process edge.""" @@ -32,9 +32,7 @@ def send_event_to( raise TypeError("Target cannot be None") if isinstance(target, ProcessStepBuilder): - target = ProcessFunctionTargetBuilder( - step=target, parameter_name=kwargs.get("parameter_name"), function_name=kwargs.get("function_name") - ) + target = ProcessFunctionTargetBuilder(step=target, parameter_name=kwargs.get("parameter_name")) self.target = target edge_builder = ProcessStepEdgeBuilder(source=self.source, event_id=self.event_id) diff --git a/python/semantic_kernel/processes/process_end_step.py b/python/semantic_kernel/processes/process_end_step.py index 3306418ef479..fea2074286cd 100644 --- a/python/semantic_kernel/processes/process_end_step.py +++ b/python/semantic_kernel/processes/process_end_step.py @@ -4,14 +4,14 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.functions import KernelFunctionMetadata from semantic_kernel.kernel import Kernel -@experimental +@experimental_class class EndStep(ProcessStepBuilder): """An end step in a process.""" diff --git a/python/semantic_kernel/processes/process_function_target_builder.py b/python/semantic_kernel/processes/process_function_target_builder.py index 00325e346858..30a695273721 100644 --- a/python/semantic_kernel/processes/process_function_target_builder.py +++ b/python/semantic_kernel/processes/process_function_target_builder.py @@ -6,10 +6,10 @@ from semantic_kernel.processes.kernel_process.kernel_process_function_target import KernelProcessFunctionTarget from semantic_kernel.processes.process_end_step import EndStep from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class ProcessFunctionTargetBuilder(KernelBaseModel): """A builder for a process function target.""" diff --git a/python/semantic_kernel/processes/process_step_builder.py b/python/semantic_kernel/processes/process_step_builder.py index fa259ab022ed..ee09eb5f92dd 100644 --- a/python/semantic_kernel/processes/process_step_builder.py +++ b/python/semantic_kernel/processes/process_step_builder.py @@ -15,7 +15,7 @@ from semantic_kernel.processes.kernel_process.kernel_process_step_info import KernelProcessStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_types import TState, TStep, get_generic_state_type -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.functions import KernelFunctionMetadata @@ -25,7 +25,7 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental +@experimental_class class ProcessStepBuilder(KernelBaseModel, Generic[TState, TStep]): """A builder for a process step.""" @@ -189,18 +189,9 @@ def build_step(self) -> "KernelProcessStepInfo": # Return an instance of KernelProcessStepInfo with the built state and edges. return KernelProcessStepInfo(inner_step_type=step_cls, state=state_object, output_edges=built_edges) - def on_function_result(self, function_name: str | Enum) -> "ProcessStepEdgeBuilder": - """Creates a new ProcessStepEdgeBuilder for the function result. - - Args: - function_name: The function name as a string or Enum. - - Returns: - ProcessStepEdgeBuilder: The ProcessStepEdgeBuilder instance. - """ - function_name_str: str = function_name.value if isinstance(function_name, Enum) else function_name - - return self.on_event(f"{function_name_str}.OnResult") + def on_function_result(self, function_name: str) -> "ProcessStepEdgeBuilder": + """Creates a new ProcessStepEdgeBuilder for the function result.""" + return self.on_event(f"{function_name}.OnResult") def get_function_metadata_map( self, plugin_type, name: str | None = None, kernel: "Kernel | None" = None diff --git a/python/semantic_kernel/processes/process_step_edge_builder.py b/python/semantic_kernel/processes/process_step_edge_builder.py index e42422914f6f..ca1284fa49a8 100644 --- a/python/semantic_kernel/processes/process_step_edge_builder.py +++ b/python/semantic_kernel/processes/process_step_edge_builder.py @@ -6,13 +6,13 @@ from semantic_kernel.processes.kernel_process.kernel_process_edge import KernelProcessEdge from semantic_kernel.processes.process_end_step import EndStep from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.processes.process_step_builder import ProcessStepBuilder -@experimental +@experimental_class class ProcessStepEdgeBuilder: """A builder for a process step edge.""" diff --git a/python/semantic_kernel/prompt_template/const.py b/python/semantic_kernel/prompt_template/const.py index 46f4b4154948..ecc64e31402d 100644 --- a/python/semantic_kernel/prompt_template/const.py +++ b/python/semantic_kernel/prompt_template/const.py @@ -1,9 +1,16 @@ # Copyright (c) Microsoft. All rights reserved. -from typing import Literal +from typing import Literal, get_args -KERNEL_TEMPLATE_FORMAT_NAME: Literal["semantic-kernel"] = "semantic-kernel" -HANDLEBARS_TEMPLATE_FORMAT_NAME: Literal["handlebars"] = "handlebars" -JINJA2_TEMPLATE_FORMAT_NAME: Literal["jinja2"] = "jinja2" +KERNEL_TEMPLATE_FORMAT_NAME_TYPE = Literal["semantic-kernel"] +KERNEL_TEMPLATE_FORMAT_NAME: KERNEL_TEMPLATE_FORMAT_NAME_TYPE = get_args(KERNEL_TEMPLATE_FORMAT_NAME_TYPE)[0] +HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE = Literal["handlebars"] +HANDLEBARS_TEMPLATE_FORMAT_NAME: HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE = get_args(HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE)[ + 0 +] +JINJA2_TEMPLATE_FORMAT_NAME_TYPE = Literal["jinja2"] +JINJA2_TEMPLATE_FORMAT_NAME: JINJA2_TEMPLATE_FORMAT_NAME_TYPE = get_args(JINJA2_TEMPLATE_FORMAT_NAME_TYPE)[0] -TEMPLATE_FORMAT_TYPES = Literal["semantic-kernel", "handlebars", "jinja2"] +TEMPLATE_FORMAT_TYPES = Literal[ + KERNEL_TEMPLATE_FORMAT_NAME_TYPE, HANDLEBARS_TEMPLATE_FORMAT_NAME_TYPE, JINJA2_TEMPLATE_FORMAT_NAME_TYPE +] diff --git a/python/semantic_kernel/prompt_template/handlebars_prompt_template.py b/python/semantic_kernel/prompt_template/handlebars_prompt_template.py index 30d840bb8cc7..e2b7fc80a1d7 100644 --- a/python/semantic_kernel/prompt_template/handlebars_prompt_template.py +++ b/python/semantic_kernel/prompt_template/handlebars_prompt_template.py @@ -2,7 +2,7 @@ import logging from collections.abc import Callable -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Optional from pybars import Compiler, PybarsError from pydantic import PrivateAttr, field_validator @@ -63,7 +63,7 @@ def model_post_init(self, __context: Any) -> None: f"Invalid handlebars template: {self.prompt_template_config.template}" ) from e - async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: + async def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: """Render the prompt template. Using the prompt template, replace the variables with their values diff --git a/python/semantic_kernel/prompt_template/jinja2_prompt_template.py b/python/semantic_kernel/prompt_template/jinja2_prompt_template.py index 6c4a6601e0d6..6ba85e5e2eea 100644 --- a/python/semantic_kernel/prompt_template/jinja2_prompt_template.py +++ b/python/semantic_kernel/prompt_template/jinja2_prompt_template.py @@ -2,7 +2,7 @@ import logging from collections.abc import Callable -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Optional from jinja2 import BaseLoader, TemplateError from jinja2.sandbox import ImmutableSandboxedEnvironment @@ -65,7 +65,7 @@ def model_post_init(self, _: Any) -> None: return self._env = ImmutableSandboxedEnvironment(loader=BaseLoader(), enable_async=True) - async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: + async def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: """Render the prompt template. Using the prompt template, replace the variables with their values diff --git a/python/semantic_kernel/prompt_template/prompt_template_base.py b/python/semantic_kernel/prompt_template/prompt_template_base.py index 5261cd24a923..6cac84ba4693 100644 --- a/python/semantic_kernel/prompt_template/prompt_template_base.py +++ b/python/semantic_kernel/prompt_template/prompt_template_base.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC, abstractmethod -from collections.abc import Sequence from html import escape from typing import TYPE_CHECKING @@ -21,7 +20,7 @@ class PromptTemplateBase(KernelBaseModel, ABC): allow_dangerously_set_content: bool = False @abstractmethod - async def render(self, kernel: "Kernel", arguments: "KernelArguments | None" = None) -> str: + async def render(self, kernel: "Kernel", arguments: "KernelArguments") -> str: """Render the prompt template.""" pass @@ -63,7 +62,7 @@ def _get_allow_dangerously_set_function_output(self) -> bool: allow_dangerously_set_content = True return allow_dangerously_set_content - def _should_escape(self, name: str, input_variables: Sequence["InputVariable"]) -> bool: + def _should_escape(self, name: str, input_variables: list["InputVariable"]) -> bool: """Check if the variable should be escaped. If the PromptTemplate allows dangerously set content, then the variable will not be escaped, diff --git a/python/semantic_kernel/prompt_template/prompt_template_config.py b/python/semantic_kernel/prompt_template/prompt_template_config.py index aaec7739541e..4a0823b7a07a 100644 --- a/python/semantic_kernel/prompt_template/prompt_template_config.py +++ b/python/semantic_kernel/prompt_template/prompt_template_config.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import MutableMapping, MutableSequence, Sequence +from collections.abc import Mapping, Sequence from typing import TypeVar from pydantic import Field, field_validator, model_validator @@ -16,8 +16,6 @@ logger: logging.Logger = logging.getLogger(__name__) -_T = TypeVar("_T", bound="PromptTemplateConfig") - class PromptTemplateConfig(KernelBaseModel): """Configuration for a prompt template. @@ -39,9 +37,9 @@ class PromptTemplateConfig(KernelBaseModel): description: str | None = "" template: str | None = None template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME - input_variables: MutableSequence[InputVariable] = Field(default_factory=list) + input_variables: list[InputVariable] = Field(default_factory=list) allow_dangerously_set_content: bool = False - execution_settings: MutableMapping[str, PromptExecutionSettings] = Field(default_factory=dict) + execution_settings: dict[str, PromptExecutionSettings] = Field(default_factory=dict) @model_validator(mode="after") def check_input_variables(self): @@ -54,12 +52,12 @@ def check_input_variables(self): @field_validator("execution_settings", mode="before") @classmethod def rewrite_execution_settings( - cls: type[_T], + cls, settings: PromptExecutionSettings | Sequence[PromptExecutionSettings] - | MutableMapping[str, PromptExecutionSettings] + | Mapping[str, PromptExecutionSettings] | None, - ) -> MutableMapping[str, PromptExecutionSettings]: + ) -> Mapping[str, PromptExecutionSettings]: """Rewrite execution settings to a dictionary.""" if not settings: return {} @@ -83,14 +81,14 @@ def get_kernel_parameter_metadata(self) -> Sequence[KernelParameterMetadata]: name=variable.name, description=variable.description, default_value=variable.default, - type_=variable.json_schema, # TODO (moonbox3): update to handle complex JSON schemas # type: ignore + type_=variable.json_schema, # TODO (moonbox3): update to handle complex JSON schemas is_required=variable.is_required, ) for variable in self.input_variables ] @classmethod - def from_json(cls: type[_T], json_str: str) -> _T: + def from_json(cls, json_str: str) -> "PromptTemplateConfig": """Create a PromptTemplateConfig instance from a JSON string.""" if not json_str: raise ValueError("json_str is empty") @@ -104,15 +102,15 @@ def from_json(cls: type[_T], json_str: str) -> _T: @classmethod def restore( - cls: type[_T], + cls, name: str, description: str, template: str, template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME, - input_variables: MutableSequence[InputVariable] = [], - execution_settings: MutableMapping[str, PromptExecutionSettings] = {}, + input_variables: Sequence[InputVariable] = [], + execution_settings: Mapping[str, PromptExecutionSettings] = {}, allow_dangerously_set_content: bool = False, - ) -> _T: + ) -> "PromptTemplateConfig": """Restore a PromptTemplateConfig instance from the specified parameters. Args: diff --git a/python/semantic_kernel/services/ai_service_client_base.py b/python/semantic_kernel/services/ai_service_client_base.py index efe90bb633d1..69c12ea0b1ca 100644 --- a/python/semantic_kernel/services/ai_service_client_base.py +++ b/python/semantic_kernel/services/ai_service_client_base.py @@ -3,7 +3,7 @@ from abc import ABC from typing import TYPE_CHECKING, Annotated -from pydantic.types import StringConstraints +from pydantic import Field, StringConstraints from semantic_kernel.kernel_pydantic import KernelBaseModel @@ -23,7 +23,7 @@ class AIServiceClientBase(KernelBaseModel, ABC): """ ai_model_id: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] - service_id: str = "" + service_id: str = Field("") def model_post_init(self, __context: object | None = None): """Update the service_id if it is not set.""" diff --git a/python/semantic_kernel/services/ai_service_selector.py b/python/semantic_kernel/services/ai_service_selector.py index 8b688e7ae17d..0cdb5347f239 100644 --- a/python/semantic_kernel/services/ai_service_selector.py +++ b/python/semantic_kernel/services/ai_service_selector.py @@ -24,8 +24,8 @@ class AIServiceSelector: def select_ai_service( self, kernel: "KernelServicesExtension", - function: "KernelFunction | None" = None, - arguments: "KernelArguments | None" = None, + function: "KernelFunction", + arguments: "KernelArguments", type_: type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None = None, ) -> tuple["AIServiceClientBase", "PromptExecutionSettings"]: """Select an AI Service on a first come, first served basis. @@ -33,12 +33,6 @@ def select_ai_service( Starts with execution settings in the arguments, followed by the execution settings from the function. If the same service_id is in both, the one in the arguments will be used. - - Args: - kernel: The kernel used. - function: The function used. (optional) - arguments: The arguments used. (optional) - type_: The type of service to select. (optional) """ if type_ is None: from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase @@ -46,7 +40,7 @@ def select_ai_service( type_ = (TextCompletionClientBase, ChatCompletionClientBase) # type: ignore - execution_settings_dict = arguments.execution_settings if arguments and arguments.execution_settings else {} + execution_settings_dict = arguments.execution_settings or {} if func_exec_settings := getattr(function, "prompt_execution_settings", None): for id, settings in func_exec_settings.items(): if id not in execution_settings_dict: diff --git a/python/semantic_kernel/services/kernel_services_extension.py b/python/semantic_kernel/services/kernel_services_extension.py index 37d425ce16d8..13b6aea5e3ae 100644 --- a/python/semantic_kernel/services/kernel_services_extension.py +++ b/python/semantic_kernel/services/kernel_services_extension.py @@ -49,20 +49,10 @@ def rewrite_services( return services def select_ai_service( - self, - function: "KernelFunction | None" = None, - arguments: "KernelArguments | None" = None, - type: type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None = None, + self, function: "KernelFunction", arguments: "KernelArguments" ) -> tuple[AIServiceClientBase, PromptExecutionSettings]: - """Uses the AI service selector to select a service for the function. - - Args: - function (KernelFunction | None): The function used. - arguments (KernelArguments | None): The arguments used. - type (Type[AI_SERVICE_CLIENT_TYPE] | tuple[type[AI_SERVICE_CLIENT_TYPE], ...] | None): The type of - service to select. Defaults to None. - """ - return self.ai_service_selector.select_ai_service(self, function=function, arguments=arguments, type_=type) + """Uses the AI service selector to select a service for the function.""" + return self.ai_service_selector.select_ai_service(self, function, arguments) def get_service( self, diff --git a/python/semantic_kernel/template_engine/protocols/code_renderer.py b/python/semantic_kernel/template_engine/protocols/code_renderer.py index 4f196e69f48d..f88d7d74571e 100644 --- a/python/semantic_kernel/template_engine/protocols/code_renderer.py +++ b/python/semantic_kernel/template_engine/protocols/code_renderer.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from abc import abstractmethod from typing import TYPE_CHECKING, Protocol, runtime_checkable if TYPE_CHECKING: @@ -12,7 +11,6 @@ class CodeRenderer(Protocol): """Protocol for dynamic code blocks that need async IO to be rendered.""" - @abstractmethod async def render_code(self, kernel: "Kernel", arguments: "KernelArguments") -> str: """Render the block using the given context. diff --git a/python/semantic_kernel/template_engine/protocols/text_renderer.py b/python/semantic_kernel/template_engine/protocols/text_renderer.py index 4483bf7b6d2a..5c9e94e3c1a3 100644 --- a/python/semantic_kernel/template_engine/protocols/text_renderer.py +++ b/python/semantic_kernel/template_engine/protocols/text_renderer.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from abc import abstractmethod from typing import TYPE_CHECKING, Optional, Protocol, runtime_checkable if TYPE_CHECKING: @@ -12,7 +11,6 @@ class TextRenderer(Protocol): """Protocol for static (text) blocks that don't need async rendering.""" - @abstractmethod def render(self, kernel: "Kernel", arguments: Optional["KernelArguments"] = None) -> str: """Render the block using only the given variables. diff --git a/python/semantic_kernel/utils/async_utils.py b/python/semantic_kernel/utils/async_utils.py deleted file mode 100644 index 4d2d2d50249a..000000000000 --- a/python/semantic_kernel/utils/async_utils.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from collections.abc import Callable -from functools import partial -from typing import Any - - -async def run_in_executor(executor: Any, func: Callable, *args, **kwargs) -> Any: - """Run a function in an executor.""" - return await asyncio.get_event_loop().run_in_executor(executor, partial(func, *args, **kwargs)) diff --git a/python/semantic_kernel/utils/experimental_decorator.py b/python/semantic_kernel/utils/experimental_decorator.py new file mode 100644 index 000000000000..7b28cc0e4064 --- /dev/null +++ b/python/semantic_kernel/utils/experimental_decorator.py @@ -0,0 +1,32 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Callable +from typing import TypeVar + +T = TypeVar("T", bound=type) + + +def experimental_function(func: Callable) -> Callable: + """Decorator to mark a function as experimental.""" + if callable(func): + if func.__doc__: + func.__doc__ += "\n\nNote: This function is experimental and may change in the future." + else: + func.__doc__ = "Note: This function is experimental and may change in the future." + + setattr(func, "is_experimental", True) + + return func + + +def experimental_class(cls: T) -> T: + """Decorator to mark a class as experimental.""" + if isinstance(cls, type): + if cls.__doc__: + cls.__doc__ += "\n\nNote: This class is experimental and may change in the future." + else: + cls.__doc__ = "Note: This class is experimental and may change in the future." + + setattr(cls, "is_experimental", True) + + return cls diff --git a/python/semantic_kernel/utils/feature_stage_decorator.py b/python/semantic_kernel/utils/feature_stage_decorator.py deleted file mode 100644 index 322934617cd8..000000000000 --- a/python/semantic_kernel/utils/feature_stage_decorator.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Callable -from typing import Any, TypeVar, cast - -T = TypeVar("T", bound=type[Any] | Callable[..., Any]) - -DEFAULT_RC_NOTE = ( - "Features marked with this status are nearing completion and are considered " - "stable for most purposes, but may still incur minor refinements or " - "optimizations before achieving full general availability." -) - -""" -Example usage: - -@experimental -class MyExperimentalClass: - '''A class that is still evolving rapidly.''' - pass - -@stage(status="experimental") -class MyExperimentalClass: - '''A class that is still evolving rapidly.''' - pass - -@experimental -def my_experimental_function(): - '''A function that is still evolving rapidly.''' - pass - -@release_candidate -class MyRCClass: - '''A class that is nearly final, but still in release-candidate stage.''' - pass - -@release_candidate("1.23.1-rc1") -class MyRCClass: - '''A class that is nearly final, but still in release-candidate stage.''' - pass -""" - - -def _update_docstring(obj: T, note: str) -> None: - """Append or set the docstring of the given object with the specified note.""" - if obj.__doc__: - obj.__doc__ += f"\n\n{note}" - else: - obj.__doc__ = note - - -def stage( - status: str = "experimental", - version: str | None = None, - note: str | None = None, -) -> Callable[[T], T]: - """A general-purpose decorator for marking a function or a class. - - It updates the docstring and attaches 'stage_status' (and optionally - 'stage_version') as metadata. A custom 'note' may be provided to - override the default appended text. - - Args: - status: The development stage (e.g., 'experimental', 'release_candidate', etc.). - version: Optional version or release info (e.g., '1.21.0-rc4'). - note: A custom note to append to the docstring. If omitted, a default - note is used to indicate the stage and possible changes. - - Returns: - A decorator that updates the docstring and metadata of - the target function/class. - """ - - def decorator(obj: T) -> T: - entity_type = "class" if isinstance(obj, type) else "function" - ver_text = f" (Version: {version})" if version else "" - default_note = f"Note: This {entity_type} is marked as '{status}'{ver_text} and may change in the future." - final_note = note if note else default_note - - _update_docstring(obj, final_note) - setattr(obj, "stage_status", status) - if version: - setattr(obj, "stage_version", version) - - return obj - - return decorator - - -def experimental(obj: T) -> T: - """Decorator specifically for 'experimental' features. - - It uses the general 'stage' decorator but also attaches - 'is_experimental = True'. - """ - decorated = stage(status="experimental")(obj) - setattr(decorated, "is_experimental", True) - return decorated - - -def release_candidate( - func: T | str | None = None, - *, - version: str | None = None, - doc_string: str | None = None, -) -> T: - """Decorator that designates a function/class as being in a 'release candidate' state. - - By default, applies a descriptive note indicating near-completion and possible minor refinements - before achieving general availability. You may override this with a custom 'doc_string' if needed. - - Usage: - 1) @release_candidate - 2) @release_candidate() - 3) @release_candidate("1.21.3-rc1") - 4) @release_candidate(version="1.21.3-rc1") - 5) @release_candidate(doc_string="Custom RC note...") - 6) @release_candidate(version="1.21.3-rc1", doc_string="Custom RC note...") - - Args: - func: - - In cases (1) or (2), this is the function/class being decorated. - - In cases (3) or (4), this may be a version string or None. - version: - The RC version string, if provided. - doc_string: - An optional custom note to append to the docstring, overriding - the default RC descriptive note. - - Returns: - The decorated object, with an updated docstring and - 'is_release_candidate = True'. - """ - from semantic_kernel import DEFAULT_RC_VERSION - - def _apply(obj: T, ver: str, note: str | None) -> T: - ver_text = f" (Version: {ver})" if ver else "" - rc_note = note if note is not None else f"{DEFAULT_RC_NOTE}{ver_text}" - - decorated = stage(status="release_candidate", version=ver, note=rc_note)(obj) - setattr(decorated, "is_release_candidate", True) - return decorated - - if func is not None and callable(func): - ver = version or DEFAULT_RC_VERSION - return _apply(cast(T, func), ver, doc_string) - - ver_str = func if isinstance(func, str) else version - - def wrapper(obj: T) -> T: - return _apply(obj, ver_str or DEFAULT_RC_VERSION, doc_string) - - return wrapper # type: ignore diff --git a/python/semantic_kernel/utils/list_handler.py b/python/semantic_kernel/utils/list_handler.py index 9876f63b024d..713ef87721d3 100644 --- a/python/semantic_kernel/utils/list_handler.py +++ b/python/semantic_kernel/utils/list_handler.py @@ -1,8 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio -from collections.abc import AsyncGenerator, AsyncIterable, Sequence +from collections.abc import AsyncIterable, Sequence from typing import TypeVar _T = TypeVar("_T") @@ -12,10 +11,3 @@ async def desync_list(sync_list: Sequence[_T]) -> AsyncIterable[_T]: # noqa: RU """De synchronize a list of synchronous objects.""" for x in sync_list: yield x - - -async def empty_generator() -> AsyncGenerator[_T, None]: - """An empty generator, can be used to return an empty generator.""" - if False: - yield None - await asyncio.sleep(0) diff --git a/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py b/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py index 884464cb7efb..91e5f28197f4 100644 --- a/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py +++ b/python/semantic_kernel/utils/telemetry/agent_diagnostics/decorators.py @@ -6,10 +6,7 @@ from opentelemetry.trace import get_tracer -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.utils.feature_stage_decorator import experimental -from semantic_kernel.utils.telemetry.agent_diagnostics import gen_ai_attributes +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent @@ -19,27 +16,15 @@ tracer = get_tracer(__name__) -@experimental +@experimental_function def trace_agent_invocation(invoke_func: Callable) -> Callable: """Decorator to trace agent invocation.""" - OPERATION_NAME = "invoke_agent" @functools.wraps(invoke_func) - async def wrapper_decorator( - *args: Any, **kwargs: Any - ) -> AsyncIterable[ChatMessageContent | StreamingChatMessageContent]: + async def wrapper_decorator(*args: Any, **kwargs: Any) -> AsyncIterable: agent: "Agent" = args[0] - with tracer.start_as_current_span(f"{OPERATION_NAME} {agent.name}") as span: - span.set_attributes({ - gen_ai_attributes.OPERATION: OPERATION_NAME, - gen_ai_attributes.AGENT_ID: agent.id, - gen_ai_attributes.AGENT_NAME: agent.name, - }) - - if agent.description: - span.set_attribute(gen_ai_attributes.AGENT_DESCRIPTION, agent.description) - + with tracer.start_as_current_span(agent.name): async for response in invoke_func(*args, **kwargs): yield response @@ -47,30 +32,3 @@ async def wrapper_decorator( wrapper_decorator.__agent_diagnostics__ = True # type: ignore return wrapper_decorator - - -@experimental -def trace_agent_get_response(get_response_func: Callable) -> Callable: - """Decorator to trace agent invocation.""" - OPERATION_NAME = "invoke_agent" - - @functools.wraps(get_response_func) - async def wrapper_decorator(*args: Any, **kwargs: Any) -> ChatMessageContent: - agent: "Agent" = args[0] - - with tracer.start_as_current_span(f"{OPERATION_NAME} {agent.name}") as span: - span.set_attributes({ - gen_ai_attributes.OPERATION: OPERATION_NAME, - gen_ai_attributes.AGENT_ID: agent.id, - gen_ai_attributes.AGENT_NAME: agent.name, - }) - - if agent.description: - span.set_attribute(gen_ai_attributes.AGENT_DESCRIPTION, agent.description) - - return await get_response_func(*args, **kwargs) - - # Mark the wrapper decorator as an agent diagnostics decorator - wrapper_decorator.__agent_diagnostics__ = True # type: ignore - - return wrapper_decorator diff --git a/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py b/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py deleted file mode 100644 index 09062516c9ed..000000000000 --- a/python/semantic_kernel/utils/telemetry/agent_diagnostics/gen_ai_attributes.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -# Constants for tracing agent activities with semantic conventions. -# Ideally, we should use the attributes from the semcov package. -# However, many of the attributes are not yet available in the package, -# so we define them here for now. - -# Activity tags -OPERATION = "gen_ai.operation.name" -AGENT_ID = "gen_ai.agent.id" -AGENT_NAME = "gen_ai.agent.name" -AGENT_DESCRIPTION = "gen_ai.agent.description" diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py index a813348e704f..11ff9279faa7 100644 --- a/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py @@ -13,10 +13,9 @@ from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.streaming_content_mixin import StreamingContentMixin from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_function from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes from semantic_kernel.utils.telemetry.model_diagnostics.model_diagnostics_settings import ModelDiagnosticSettings @@ -70,7 +69,7 @@ def filter(self, record: logging.LogRecord) -> bool: logger.addFilter(ChatHistoryMessageTimestampFilter()) -@experimental +@experimental_function def are_model_diagnostics_enabled() -> bool: """Check if model diagnostics are enabled. @@ -82,7 +81,7 @@ def are_model_diagnostics_enabled() -> bool: ) -@experimental +@experimental_function def are_sensitive_events_enabled() -> bool: """Check if sensitive events are enabled. @@ -91,7 +90,7 @@ def are_sensitive_events_enabled() -> bool: return MODEL_DIAGNOSTICS_SETTINGS.enable_otel_diagnostics_sensitive -@experimental +@experimental_function def trace_chat_completion(model_provider: str) -> Callable: """Decorator to trace chat completion activities. @@ -142,7 +141,7 @@ async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[ChatMessageConten return inner_trace_chat_completion -@experimental +@experimental_function def trace_streaming_chat_completion(model_provider: str) -> Callable: """Decorator to trace streaming chat completion activities. @@ -207,7 +206,7 @@ async def wrapper_decorator( return inner_trace_streaming_chat_completion -@experimental +@experimental_function def trace_text_completion(model_provider: str) -> Callable: """Decorator to trace text completion activities. @@ -258,7 +257,7 @@ async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[TextContent]: return inner_trace_text_completion -@experimental +@experimental_function def trace_streaming_text_completion(model_provider: str) -> Callable: """Decorator to trace streaming text completion activities. @@ -435,9 +434,9 @@ def _set_completion_response( "message": completion.to_dict(), } - if isinstance(completion, ChatMessageContent): + if hasattr(completion, "finish_reason"): full_response["finish_reason"] = completion.finish_reason - if isinstance(completion, StreamingContentMixin): + if hasattr(completion, "choice_index"): full_response["index"] = completion.choice_index logger.info( diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py index d4efd5de3a9f..ef716960153f 100644 --- a/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py @@ -3,10 +3,10 @@ from typing import ClassVar from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental +from semantic_kernel.utils.experimental_decorator import experimental_class -@experimental +@experimental_class class ModelDiagnosticSettings(KernelBaseSettings): """Settings for model diagnostics. diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 60bb1bda97da..3be0430a8ad4 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -107,9 +107,9 @@ def decorated_native_function(self) -> str: @fixture(scope="session") def experimental_plugin_class(): from semantic_kernel.functions.kernel_function_decorator import kernel_function - from semantic_kernel.utils.feature_stage_decorator import experimental + from semantic_kernel.utils.experimental_decorator import experimental_class - @experimental + @experimental_class class ExperimentalPlugin: @kernel_function(name="getLightStatus") def decorated_native_function(self) -> str: @@ -220,7 +220,6 @@ def azure_openai_unit_test_env(monkeypatch, exclude_list, override_env_param_dic "AZURE_OPENAI_TEXT_TO_IMAGE_DEPLOYMENT_NAME": "test_text_to_image_deployment", "AZURE_OPENAI_AUDIO_TO_TEXT_DEPLOYMENT_NAME": "test_audio_to_text_deployment", "AZURE_OPENAI_TEXT_TO_AUDIO_DEPLOYMENT_NAME": "test_text_to_audio_deployment", - "AZURE_OPENAI_REALTIME_DEPLOYMENT_NAME": "test_realtime_deployment", "AZURE_OPENAI_API_KEY": "test_api_key", "AZURE_OPENAI_ENDPOINT": "https://test-endpoint.com", "AZURE_OPENAI_API_VERSION": "2023-03-15-preview", @@ -257,7 +256,6 @@ def openai_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): "OPENAI_TEXT_TO_IMAGE_MODEL_ID": "test_text_to_image_model_id", "OPENAI_AUDIO_TO_TEXT_MODEL_ID": "test_audio_to_text_model_id", "OPENAI_TEXT_TO_AUDIO_MODEL_ID": "test_text_to_audio_model_id", - "OPENAI_REALTIME_MODEL_ID": "test_realtime_model_id", } env_vars.update(override_env_param_dict) @@ -369,28 +367,6 @@ def azure_ai_search_unit_test_env(monkeypatch, exclude_list, override_env_param_ return env_vars -@fixture() -def mongodb_atlas_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): - """Fixture to set environment variables for MongoDB Atlas Unit Tests.""" - if exclude_list is None: - exclude_list = [] - - if override_env_param_dict is None: - override_env_param_dict = {} - - env_vars = {"MONGODB_ATLAS_CONNECTION_STRING": "mongodb://test", "MONGODB_ATLAS_DATABASE_NAME": "test-database"} - - env_vars.update(override_env_param_dict) - - for key, value in env_vars.items(): - if key not in exclude_list: - monkeypatch.setenv(key, value) - else: - monkeypatch.delenv(key, raising=False) - - return env_vars - - @fixture() def bing_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): """Fixture to set environment variables for BingConnector.""" diff --git a/python/tests/integration/agents/bedrock_agent/conftest.py b/python/tests/integration/agents/bedrock_agent/conftest.py deleted file mode 100644 index 9583a1e1819a..000000000000 --- a/python/tests/integration/agents/bedrock_agent/conftest.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Annotated - -import pytest - -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.kernel import Kernel - - -class WeatherPlugin: - """Mock weather plugin.""" - - @kernel_function(description="Get real-time weather information.") - def current(self, location: Annotated[str, "The location to get the weather"]) -> str: - """Returns the current weather.""" - return f"The weather in {location} is sunny." - - -@pytest.fixture -def kernel_with_dummy_function() -> Kernel: - kernel = Kernel() - kernel.add_plugin(WeatherPlugin(), plugin_name="weather") - - return kernel diff --git a/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py b/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py deleted file mode 100644 index 04d35ccd1bc3..000000000000 --- a/python/tests/integration/agents/bedrock_agent/test_bedrock_agent_integration.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import uuid - -import pytest - -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.contents.binary_content import BinaryContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole - - -class TestBedrockAgentIntegration: - @pytest.fixture(autouse=True) - async def setup_and_teardown(self, request): - """Setup and teardown for the test. - - This is run for each test function, i.e. each test function will have its own instance of the agent. - """ - try: - self.bedrock_agent = await BedrockAgent.create_and_prepare_agent( - f"semantic-kernel-integration-test-agent-{uuid.uuid4()}", - "You are a helpful assistant that help users with their questions.", - ) - if hasattr(request, "param"): - if "enable_code_interpreter" in request.param: - await self.bedrock_agent.create_code_interpreter_action_group() - if "kernel" in request.param: - self.bedrock_agent.kernel = request.getfixturevalue(request.param.get("kernel")) - if "enable_kernel_function" in request.param: - await self.bedrock_agent.create_kernel_function_action_group() - except Exception as e: - pytest.fail("Failed to create agent") - raise e - # Yield control to the test - yield - # Clean up - try: - await self.bedrock_agent.delete_agent() - except Exception as e: - pytest.fail(f"Failed to delete agent: {e}") - raise e - - @pytest.mark.asyncio - async def test_invoke(self): - """Test invoke of the agent.""" - async for message in self.bedrock_agent.invoke(BedrockAgent.create_session_id(), "Hello"): - assert isinstance(message, ChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - assert message.content is not None - - @pytest.mark.asyncio - async def test_invoke_stream(self): - """Test invoke stream of the agent.""" - async for message in self.bedrock_agent.invoke_stream(BedrockAgent.create_session_id(), "Hello"): - assert isinstance(message, StreamingChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - assert message.content is not None - - @pytest.mark.asyncio - @pytest.mark.parametrize("setup_and_teardown", [{"enable_code_interpreter": True}], indirect=True) - async def test_code_interpreter(self): - """Test code interpreter.""" - input_text = """ -Create a bar chart for the following data: -Panda 5 -Tiger 8 -Lion 3 -Monkey 6 -Dolphin 2 -""" - binary_item: BinaryContent | None = None - async for message in self.bedrock_agent.invoke(BedrockAgent.create_session_id(), input_text): - assert isinstance(message, ChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - if not binary_item: - binary_item = next((item for item in message.items if isinstance(item, BinaryContent)), None) - - assert binary_item - - @pytest.mark.asyncio - @pytest.mark.parametrize("setup_and_teardown", [{"enable_code_interpreter": True}], indirect=True) - async def test_code_interpreter_stream(self): - """Test code interpreter streaming.""" - input_text = """ -Create a bar chart for the following data: -Panda 5 -Tiger 8 -Lion 3 -Monkey 6 -Dolphin 2 -""" - binary_item: BinaryContent | None = None - async for message in self.bedrock_agent.invoke_stream(BedrockAgent.create_session_id(), input_text): - assert isinstance(message, StreamingChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - binary_item = next((item for item in message.items if isinstance(item, BinaryContent)), None) - assert binary_item - - @pytest.mark.asyncio - @pytest.mark.parametrize( - "setup_and_teardown", - [ - { - "enable_kernel_function": True, - "kernel": "kernel_with_dummy_function", - }, - ], - indirect=True, - ) - async def test_function_calling(self): - """Test function calling.""" - async for message in self.bedrock_agent.invoke( - BedrockAgent.create_session_id(), - "What is the weather in Seattle?", - ): - assert isinstance(message, ChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - assert "sunny" in message.content - - @pytest.mark.asyncio - @pytest.mark.parametrize( - "setup_and_teardown", - [ - { - "enable_kernel_function": True, - "kernel": "kernel_with_dummy_function", - }, - ], - indirect=True, - ) - async def test_function_calling_stream(self): - """Test function calling streaming.""" - full_message: str = "" - async for message in self.bedrock_agent.invoke_stream( - BedrockAgent.create_session_id(), - "What is the weather in Seattle?", - ): - assert isinstance(message, StreamingChatMessageContent) - assert message.role == AuthorRole.ASSISTANT - full_message += message.content - assert "sunny" in full_message diff --git a/python/tests/integration/completions/chat_completion_test_base.py b/python/tests/integration/completions/chat_completion_test_base.py index 890298556605..7a4db5b8fcfe 100644 --- a/python/tests/integration/completions/chat_completion_test_base.py +++ b/python/tests/integration/completions/chat_completion_test_base.py @@ -5,11 +5,6 @@ import sys from typing import Annotated -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - import pytest from azure.ai.inference.aio import ChatCompletionsClient from azure.identity import DefaultAzureCredential @@ -46,6 +41,11 @@ from tests.integration.completions.completion_test_base import CompletionTestBase, ServiceType from tests.utils import is_service_setup_for_testing +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + # Make sure all services are setup for before running the tests # The following exceptions apply: # 1. OpenAI and Azure OpenAI services are always setup for testing. @@ -67,6 +67,8 @@ ["ONNX_GEN_AI_CHAT_MODEL_FOLDER"], raise_if_not_set=False ) # Tests are optional for ONNX anthropic_setup: bool = is_service_setup_for_testing(["ANTHROPIC_API_KEY", "ANTHROPIC_CHAT_MODEL_ID"]) +# When testing Bedrock, after logging into AWS CLI this has been set, so we can use it to check if the service is setup +bedrock_setup: bool = is_service_setup_for_testing(["AWS_DEFAULT_REGION"], raise_if_not_set=False) # A mock plugin that contains a function that returns a complex object. @@ -88,9 +90,7 @@ class ChatCompletionTestBase(CompletionTestBase): """Base class for testing completion services.""" @override - @pytest.fixture( - scope="function" - ) # This needs to be scoped to function to avoid resources getting cleaned up after each test + @pytest.fixture(scope="function") def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSettings] | None]]: azure_openai_setup = True azure_openai_settings = AzureOpenAISettings.create() @@ -152,27 +152,27 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe OnnxGenAIPromptExecutionSettings, ), "bedrock_amazon_titan": ( - self._try_create_bedrock_chat_completion_client("amazon.titan-text-premier-v1:0"), + BedrockChatCompletion(model_id="amazon.titan-text-premier-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), "bedrock_ai21labs": ( - self._try_create_bedrock_chat_completion_client("ai21.jamba-1-5-mini-v1:0"), + BedrockChatCompletion(model_id="ai21.jamba-1-5-mini-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), "bedrock_anthropic_claude": ( - self._try_create_bedrock_chat_completion_client("anthropic.claude-3-5-sonnet-20240620-v1:0"), + BedrockChatCompletion(model_id="anthropic.claude-3-5-sonnet-20240620-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), "bedrock_cohere_command": ( - self._try_create_bedrock_chat_completion_client("cohere.command-r-v1:0"), + BedrockChatCompletion(model_id="cohere.command-r-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), "bedrock_meta_llama": ( - self._try_create_bedrock_chat_completion_client("meta.llama3-70b-instruct-v1:0"), + BedrockChatCompletion(model_id="meta.llama3-70b-instruct-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), "bedrock_mistralai": ( - self._try_create_bedrock_chat_completion_client("mistral.mistral-small-2402-v1:0"), + BedrockChatCompletion(model_id="mistral.mistral-small-2402-v1:0") if bedrock_setup else None, BedrockChatPromptExecutionSettings, ), } @@ -218,13 +218,3 @@ async def get_chat_completion_response( if parts: return sum(parts[1:], parts[0]) raise AssertionError("No response") - - def _try_create_bedrock_chat_completion_client(self, model_id: str) -> BedrockChatCompletion | None: - try: - return BedrockChatCompletion(model_id=model_id) - except Exception as ex: - from conftest import logger - - logger.warning(ex) - # Returning None so that the test that uses this service will be skipped - return None diff --git a/python/tests/integration/completions/test_chat_completions.py b/python/tests/integration/completions/test_chat_completions.py index f0e9a4fdb5ea..67eab08b0a90 100644 --- a/python/tests/integration/completions/test_chat_completions.py +++ b/python/tests/integration/completions/test_chat_completions.py @@ -12,12 +12,15 @@ from typing_extensions import override # pragma: no cover from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import PromptExecutionSettings -from semantic_kernel.contents import AuthorRole, ChatHistory, ChatMessageContent, TextContent +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents import ChatMessageContent, TextContent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel_pydantic import KernelBaseModel from tests.integration.completions.chat_completion_test_base import ( ChatCompletionTestBase, anthropic_setup, + bedrock_setup, mistral_ai_setup, ollama_setup, onnx_setup, @@ -189,6 +192,7 @@ class Reasoning(KernelBaseModel): ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="How are you today?")]), ], {}, + marks=pytest.mark.skipif(not bedrock_setup, reason="Bedrock Environment Variables not set"), id="bedrock_amazon_titan_text_input", ), pytest.param( diff --git a/python/tests/integration/completions/test_text_completion.py b/python/tests/integration/completions/test_text_completion.py index c2e1e06d523a..7e6792de4465 100644 --- a/python/tests/integration/completions/test_text_completion.py +++ b/python/tests/integration/completions/test_text_completion.py @@ -2,7 +2,6 @@ import sys from functools import partial -from importlib import util from typing import Any if sys.version_info >= (3, 12): @@ -33,9 +32,6 @@ from tests.integration.completions.completion_test_base import CompletionTestBase, ServiceType from tests.utils import is_service_setup_for_testing, is_test_running_on_supported_platforms, retry -hugging_face_setup = util.find_spec("torch") is not None - - azure_openai_setup = True ollama_setup: bool = is_service_setup_for_testing(["OLLAMA_TEXT_MODEL_ID"]) and is_test_running_on_supported_platforms([ "Linux" @@ -45,6 +41,7 @@ onnx_setup: bool = is_service_setup_for_testing( ["ONNX_GEN_AI_TEXT_MODEL_FOLDER"], raise_if_not_set=False ) # Tests are optional for ONNX +bedrock_setup = is_service_setup_for_testing(["AWS_DEFAULT_REGION"], raise_if_not_set=False) pytestmark = pytest.mark.parametrize( "service_id, execution_settings_kwargs, inputs, kwargs", @@ -137,6 +134,7 @@ {}, ["Repeat the word Hello once"], {}, + marks=pytest.mark.skipif(not bedrock_setup, reason="Not setup"), id="bedrock_amazon_titan_text_completion", ), pytest.param( @@ -221,9 +219,7 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="patrickvonplaten/t5-tiny-random", ai_model_id="patrickvonplaten/t5-tiny-random", task="text2text-generation", - ) - if hugging_face_setup - else None, + ), HuggingFacePromptExecutionSettings, ), "hf_summ": ( @@ -231,9 +227,7 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="jotamunz/billsum_tiny_summarization", ai_model_id="jotamunz/billsum_tiny_summarization", task="summarization", - ) - if hugging_face_setup - else None, + ), HuggingFacePromptExecutionSettings, ), "hf_gen": ( @@ -241,9 +235,7 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe service_id="HuggingFaceM4/tiny-random-LlamaForCausalLM", ai_model_id="HuggingFaceM4/tiny-random-LlamaForCausalLM", task="text-generation", - ) - if hugging_face_setup - else None, + ), HuggingFacePromptExecutionSettings, ), "onnx_gen_ai": ( @@ -253,27 +245,27 @@ def services(self) -> dict[str, tuple[ServiceType | None, type[PromptExecutionSe # Amazon Bedrock supports models from multiple providers but requests to and responses from the models are # inconsistent. So we need to test each model separately. "bedrock_amazon_titan": ( - self._try_create_bedrock_text_completion_client("amazon.titan-text-premier-v1:0"), + BedrockTextCompletion(model_id="amazon.titan-text-premier-v1:0") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), "bedrock_anthropic_claude": ( - self._try_create_bedrock_text_completion_client("anthropic.claude-v2"), + BedrockTextCompletion(model_id="anthropic.claude-v2") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), "bedrock_cohere_command": ( - self._try_create_bedrock_text_completion_client("cohere.command-text-v14"), + BedrockTextCompletion(model_id="cohere.command-text-v14") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), "bedrock_ai21labs": ( - self._try_create_bedrock_text_completion_client("ai21.j2-mid-v1"), + BedrockTextCompletion(model_id="ai21.j2-mid-v1") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), "bedrock_meta_llama": ( - self._try_create_bedrock_text_completion_client("meta.llama3-70b-instruct-v1:0"), + BedrockTextCompletion(model_id="meta.llama3-70b-instruct-v1:0") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), "bedrock_mistralai": ( - self._try_create_bedrock_text_completion_client("mistral.mistral-7b-instruct-v0:2"), + BedrockTextCompletion(model_id="mistral.mistral-7b-instruct-v0:2") if bedrock_setup else None, BedrockTextPromptExecutionSettings, ), } @@ -371,13 +363,3 @@ async def _test_helper( name="text completions", ) self.evaluate(response) - - def _try_create_bedrock_text_completion_client(self, model_id: str) -> BedrockTextCompletion | None: - try: - return BedrockTextCompletion(model_id=model_id) - except Exception as ex: - from conftest import logger - - logger.warning(ex) - # Returning None so that the test that uses this service will be skipped - return None diff --git a/python/tests/integration/cross_language/test_cross_language.py b/python/tests/integration/cross_language/test_cross_language.py index 1e00e855eaa8..18724c17a6c9 100644 --- a/python/tests/integration/cross_language/test_cross_language.py +++ b/python/tests/integration/cross_language/test_cross_language.py @@ -733,7 +733,7 @@ async def test_openapi_put_light_by_id(kernel: Kernel): assert request_content.get("method") == "PUT" assert request_content.get("url") == "https://127.0.0.1/Lights/1" - assert request_content.get("body") == '{"hexColor":"11EE11"}' + assert request_content.get("body") == '{"hexColor": "11EE11"}' # endregion diff --git a/python/tests/integration/embeddings/test_embedding_service_base.py b/python/tests/integration/embeddings/test_embedding_service_base.py index 160370d10195..1ecca21657e3 100644 --- a/python/tests/integration/embeddings/test_embedding_service_base.py +++ b/python/tests/integration/embeddings/test_embedding_service_base.py @@ -1,7 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from importlib import util - import pytest from azure.ai.inference.aio import EmbeddingsClient from azure.identity import DefaultAzureCredential @@ -34,8 +32,6 @@ from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token from tests.utils import is_service_setup_for_testing -hugging_face_setup = util.find_spec("torch") is not None - # Make sure all services are setup for before running the tests # The following exceptions apply: # 1. OpenAI and Azure OpenAI services are always setup for testing. @@ -95,9 +91,7 @@ def services(self) -> dict[str, tuple[EmbeddingGeneratorBase | None, type[Prompt PromptExecutionSettings, ), "hugging_face": ( - HuggingFaceTextEmbedding(ai_model_id="sentence-transformers/all-MiniLM-L6-v2") - if hugging_face_setup - else None, + HuggingFaceTextEmbedding(ai_model_id="sentence-transformers/all-MiniLM-L6-v2"), PromptExecutionSettings, ), "ollama": (OllamaTextEmbedding() if ollama_setup else None, OllamaEmbeddingPromptExecutionSettings), diff --git a/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py b/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py index e860d6387e87..5ecdddd6835d 100644 --- a/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py +++ b/python/tests/integration/memory/vector_stores/azure_cosmos_db/test_azure_cosmos_db_no_sql.py @@ -2,7 +2,6 @@ import os import platform -from collections.abc import Callable from typing import Any import pytest @@ -27,11 +26,11 @@ class TestCosmosDBNoSQL(VectorStoreTestBase): async def test_list_collection_names( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type: type, ): """Test list collection names.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: assert await store.list_collection_names() == [] collection_name = "list_collection_names" @@ -51,12 +50,12 @@ async def test_list_collection_names( async def test_collection_not_created( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type: type, data_record: dict[str, Any], ): """Test get without collection.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: collection_name = "collection_not_created" collection = store.get_collection(collection_name, data_model_type) @@ -80,12 +79,12 @@ async def test_collection_not_created( async def test_custom_partition_key( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type: type, data_record: dict[str, Any], ): """Test custom partition key.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: collection_name = "custom_partition_key" collection = store.get_collection( collection_name, @@ -117,12 +116,12 @@ async def test_custom_partition_key( async def test_get_include_vector( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type: type, data_record: dict[str, Any], ): """Test get with include_vector.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: collection_name = "get_include_vector" collection = store.get_collection(collection_name, data_model_type) @@ -147,12 +146,12 @@ async def test_get_include_vector( async def test_get_not_include_vector( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type: type, data_record: dict[str, Any], ): """Test get with include_vector.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: collection_name = "get_not_include_vector" collection = store.get_collection(collection_name, data_model_type) @@ -177,12 +176,12 @@ async def test_get_not_include_vector( async def test_collection_with_key_as_key_field( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], data_model_type_with_key_as_key_field: type, data_record_with_key_as_key_field: dict[str, Any], ): """Test collection with key as key field.""" - async with stores["azure_cosmos_db_no_sql"]() as store: + async with stores["azure_cosmos_db_no_sql"] as store: collection_name = "collection_with_key_as_key_field" collection = store.get_collection(collection_name, data_model_type_with_key_as_key_field) diff --git a/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py b/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py index 4748a878a928..fb280e047a39 100644 --- a/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py +++ b/python/tests/integration/memory/vector_stores/postgres/test_postgres_int.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import uuid -from collections.abc import AsyncGenerator, Sequence +from collections.abc import AsyncGenerator from contextlib import asynccontextmanager from typing import Annotated, Any @@ -11,7 +11,6 @@ from pydantic import BaseModel from semantic_kernel.connectors.memory.postgres import PostgresSettings, PostgresStore -from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.data import ( DistanceFunction, IndexKind, @@ -21,7 +20,6 @@ VectorStoreRecordVectorField, vectorstoremodel, ) -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions from semantic_kernel.exceptions.memory_connector_exceptions import ( MemoryConnectorConnectionException, MemoryConnectorInitializationError, @@ -51,13 +49,13 @@ class SimpleDataModel(BaseModel): id: Annotated[int, VectorStoreRecordKeyField()] embedding: Annotated[ - list[float] | None, + list[float], VectorStoreRecordVectorField( index_kind=IndexKind.HNSW, dimensions=3, distance_function=DistanceFunction.COSINE_SIMILARITY, ), - ] = None + ] data: Annotated[ dict[str, Any], VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), @@ -99,9 +97,7 @@ async def vector_store() -> AsyncGenerator[PostgresStore, None]: @asynccontextmanager -async def create_simple_collection( - vector_store: PostgresStore, -) -> AsyncGenerator[PostgresCollection[int, SimpleDataModel], None]: +async def create_simple_collection(vector_store: PostgresStore): """Returns a collection with a unique name that is deleted after the context. This can be moved to use a fixture with scope=function and loop_scope=session @@ -111,7 +107,6 @@ async def create_simple_collection( suffix = str(uuid.uuid4()).replace("-", "")[:8] collection_id = f"test_collection_{suffix}" collection = vector_store.get_collection(collection_id, SimpleDataModel) - assert isinstance(collection, PostgresCollection) await collection.create_collection() try: yield collection @@ -218,7 +213,6 @@ async def test_upsert_get_and_delete_batch(vector_store: PostgresStore): # this should return only the two existing records. result = await simple_collection.get_batch([1, 2, 3]) assert result is not None - assert isinstance(result, Sequence) assert len(result) == 2 assert result[0] is not None assert result[0].id == record1.id @@ -232,28 +226,3 @@ async def test_upsert_get_and_delete_batch(vector_store: PostgresStore): await simple_collection.delete_batch([1, 2]) result_after_delete = await simple_collection.get_batch([1, 2]) assert result_after_delete is None - - -async def test_search(vector_store: PostgresStore): - async with create_simple_collection(vector_store) as simple_collection: - records = [ - SimpleDataModel(id=1, embedding=[1.0, 0.0, 0.0], data={"key": "value1"}), - SimpleDataModel(id=2, embedding=[0.8, 0.2, 0.0], data={"key": "value2"}), - SimpleDataModel(id=3, embedding=[0.6, 0.0, 0.4], data={"key": "value3"}), - SimpleDataModel(id=4, embedding=[1.0, 1.0, 0.0], data={"key": "value4"}), - SimpleDataModel(id=5, embedding=[0.0, 1.0, 1.0], data={"key": "value5"}), - SimpleDataModel(id=6, embedding=[1.0, 0.0, 1.0], data={"key": "value6"}), - ] - - await simple_collection.upsert_batch(records) - - try: - search_results = await simple_collection.vectorized_search( - [1.0, 0.0, 0.0], options=VectorSearchOptions(top=3, include_total_count=True) - ) - assert search_results is not None - assert search_results.total_count == 3 - assert {result.record.id async for result in search_results.results} == {1, 2, 3} - - finally: - await simple_collection.delete_batch([r.id for r in records]) diff --git a/python/tests/integration/memory/vector_stores/test_vector_store.py b/python/tests/integration/memory/vector_stores/test_vector_store.py index d47b0466af6f..23b5f9dca1c0 100644 --- a/python/tests/integration/memory/vector_stores/test_vector_store.py +++ b/python/tests/integration/memory/vector_stores/test_vector_store.py @@ -2,7 +2,6 @@ import logging import platform -from collections.abc import Callable from typing import Any import pandas as pd @@ -24,18 +23,8 @@ class TestVectorStore(VectorStoreTestBase): """ @pytest.mark.parametrize( - [ - "store_id", - "collection_name", - "collection_options", - "data_model_type", - "data_model_definition", - "distance_function", - "index_kind", - "vector_property_type", - "dimensions", - "record", - ], + "store_id, collection_name, collection_options, data_model_type, data_model_definition, distance_function, " + "index_kind, vector_property_type, dimensions, record", [ # region Redis pytest.param( @@ -333,7 +322,7 @@ class TestVectorStore(VectorStoreTestBase): id="weaviate_local_pandas_data_model", ), # endregion - # region Azure Cosmos DB + # region Azure Cosmos DB NoSQL pytest.param( "azure_cosmos_db_no_sql", "azure_cosmos_db_no_sql_array_data_model", @@ -386,53 +375,11 @@ class TestVectorStore(VectorStoreTestBase): id="azure_cosmos_db_no_sql_pandas_data_model", ), # endregion - # region Chroma - pytest.param( - "chroma", - "chroma_array_data_model", - {}, - "dataclass_vector_data_model_array", - None, - None, - None, - None, - 5, - RAW_RECORD_ARRAY, - id="chroma_array_data_model", - ), - pytest.param( - "chroma", - "chroma_list_data_model", - {}, - "dataclass_vector_data_model", - None, - None, - None, - None, - 5, - RAW_RECORD_LIST, - id="chroma_list_data_model", - ), - pytest.param( - "chroma", - "chroma_pandas_data_model", - {}, - pd.DataFrame, - "data_model_definition_pandas", - None, - None, - None, - 5, - RAW_RECORD_LIST, - id="chroma_pandas_data_model", - ), - # endregion ], ) - # region test function async def test_vector_store( self, - stores: dict[str, Callable[[], VectorStore]], + stores: dict[str, VectorStore], store_id: str, collection_name: str, collection_options: dict[str, Any], @@ -452,7 +399,7 @@ async def test_vector_store( data_model_definition = request.getfixturevalue(data_model_definition) try: async with ( - stores[store_id]() as vector_store, + stores[store_id] as vector_store, vector_store.get_collection( collection_name, data_model_type, data_model_definition, **collection_options ) as collection, diff --git a/python/tests/integration/memory/vector_stores/vector_store_test_base.py b/python/tests/integration/memory/vector_stores/vector_store_test_base.py index fc1a33d91517..abd78a525796 100644 --- a/python/tests/integration/memory/vector_stores/vector_store_test_base.py +++ b/python/tests/integration/memory/vector_stores/vector_store_test_base.py @@ -1,66 +1,24 @@ # Copyright (c) Microsoft. All rights reserved. -from collections.abc import Callable - import pytest +from semantic_kernel.connectors.memory.azure_ai_search.azure_ai_search_store import AzureAISearchStore +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import AzureCosmosDBNoSQLStore +from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore +from semantic_kernel.connectors.memory.redis.redis_store import RedisStore +from semantic_kernel.connectors.memory.weaviate.weaviate_store import WeaviateStore from semantic_kernel.data import VectorStore -def get_redis_store(): - from semantic_kernel.connectors.memory.redis.redis_store import RedisStore - - return RedisStore() - - -def get_azure_ai_search_store(): - from semantic_kernel.connectors.memory.azure_ai_search.azure_ai_search_store import AzureAISearchStore - - return AzureAISearchStore() - - -def get_qdrant_store(): - from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore - - return QdrantStore() - - -def get_qdrant_store_in_memory(): - from semantic_kernel.connectors.memory.qdrant.qdrant_store import QdrantStore - - return QdrantStore(location=":memory:") - - -def get_weaviate_store(): - from semantic_kernel.connectors.memory.weaviate.weaviate_store import WeaviateStore - - return WeaviateStore(local_host="localhost") - - -def get_azure_cosmos_db_no_sql_store(): - from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import ( - AzureCosmosDBNoSQLStore, - ) - - return AzureCosmosDBNoSQLStore(database_name="test_database", create_database=True) - - -def get_chroma_store(): - from semantic_kernel.connectors.memory.chroma.chroma import ChromaStore - - return ChromaStore() - - class VectorStoreTestBase: @pytest.fixture - def stores(self) -> dict[str, Callable[[], VectorStore]]: + def stores(self) -> dict[str, VectorStore]: """Return a dictionary of vector stores to test.""" return { - "redis": get_redis_store, - "azure_ai_search": get_azure_ai_search_store, - "qdrant": get_qdrant_store, - "qdrant_in_memory": get_qdrant_store_in_memory, - "weaviate_local": get_weaviate_store, - "azure_cosmos_db_no_sql": get_azure_cosmos_db_no_sql_store, - "chroma": get_chroma_store, + "redis": RedisStore(), + "azure_ai_search": AzureAISearchStore(), + "qdrant": QdrantStore(), + "qdrant_in_memory": QdrantStore(location=":memory:"), + "weaviate_local": WeaviateStore(local_host="localhost"), + "azure_cosmos_db_no_sql": AzureCosmosDBNoSQLStore(database_name="test_database", create_database=True), } diff --git a/python/tests/samples/test_concepts.py b/python/tests/samples/test_concepts.py index 85c11c43902d..d2455d4f4d6d 100644 --- a/python/tests/samples/test_concepts.py +++ b/python/tests/samples/test_concepts.py @@ -17,7 +17,6 @@ from samples.concepts.auto_function_calling.functions_defined_in_yaml_prompt import ( main as function_defined_in_yaml_prompt, ) -from samples.concepts.caching.semantic_caching import main as semantic_caching from samples.concepts.chat_completion.simple_chatbot import main as simple_chatbot from samples.concepts.chat_completion.simple_chatbot_kernel_function import main as simple_chatbot_kernel_function from samples.concepts.chat_completion.simple_chatbot_logit_bias import main as simple_chatbot_logit_bias @@ -27,14 +26,21 @@ from samples.concepts.filtering.function_invocation_filters import main as function_invocation_filters from samples.concepts.filtering.function_invocation_filters_stream import main as function_invocation_filters_stream from samples.concepts.filtering.prompt_filters import main as prompt_filters -from samples.concepts.filtering.retry_with_different_model import main as retry_with_different_model from samples.concepts.functions.kernel_arguments import main as kernel_arguments from samples.concepts.grounding.grounded import main as grounded from samples.concepts.images.image_generation import main as image_generation from samples.concepts.local_models.lm_studio_chat_completion import main as lm_studio_chat_completion from samples.concepts.local_models.lm_studio_text_embedding import main as lm_studio_text_embedding from samples.concepts.local_models.ollama_chat_completion import main as ollama_chat_completion -from samples.concepts.memory.simple_memory import main as simple_memory +from samples.concepts.memory.azure_cognitive_search_memory import main as azure_cognitive_search_memory +from samples.concepts.memory.memory import main as memory +from samples.concepts.planners.azure_openai_function_calling_stepwise_planner import ( + main as azure_openai_function_calling_stepwise_planner, +) +from samples.concepts.planners.openai_function_calling_stepwise_planner import ( + main as openai_function_calling_stepwise_planner, +) +from samples.concepts.planners.sequential_planner import main as sequential_planner from samples.concepts.plugins.openai_function_calling_with_custom_plugin import ( main as openai_function_calling_with_custom_plugin, ) @@ -47,22 +53,10 @@ from samples.concepts.rag.rag_with_text_memory_plugin import main as rag_with_text_memory_plugin from samples.concepts.search.bing_search_plugin import main as bing_search_plugin from samples.concepts.service_selector.custom_service_selector import main as custom_service_selector -from samples.getting_started_with_agents.chat_completion.step1_chat_completion_agent_simple import ( - main as step1_chat_completion_agent_simple, -) -from samples.getting_started_with_agents.chat_completion.step2_chat_completion_agent_with_kernel import ( - main as step2_chat_completion_agent_with_kernel, -) -from samples.getting_started_with_agents.chat_completion.step3_chat_completion_agent_plugin_simple import ( - main as step3_chat_completion_agent_plugin_simple, -) -from samples.getting_started_with_agents.chat_completion.step4_chat_completion_agent_plugin_with_kernel import ( - main as step4_chat_completion_agent_plugin_with_kernel, -) -from samples.getting_started_with_agents.chat_completion.step5_chat_completion_agent_group_chat import ( - main as step5_chat_completion_agent_group_chat, -) -from samples.getting_started_with_agents.openai_assistant.step1_assistant import main as step1_openai_assistant +from samples.getting_started_with_agents.step1_agent import main as step1_agent +from samples.getting_started_with_agents.step2_plugins import main as step2_plugins +from samples.getting_started_with_agents.step3_chat import main as step3_chat +from samples.getting_started_with_agents.step7_assistant import main as step7_assistant from tests.utils import retry # These environment variable names are used to control which samples are run during integration testing. @@ -71,14 +65,6 @@ MEMORY_CONCEPT_SAMPLE = "MEMORY_CONCEPT_SAMPLE" concepts = [ - param( - semantic_caching, - [], - id="semantic_caching", - marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), param( simple_chatbot, ["Why is the sky blue in one sentence?", "exit"], @@ -160,26 +146,41 @@ ), ), param( - retry_with_different_model, + kernel_arguments, [], - id="retry_with_different_model", + id="kernel_arguments", marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, - reason="Not running completion samples.", + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - kernel_arguments, + grounded, [], - id="kernel_arguments", + id="grounded", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - grounded, + azure_openai_function_calling_stepwise_planner, [], - id="grounded", + id="azure_openai_function_calling_stepwise_planner", + marks=pytest.mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), + param( + openai_function_calling_stepwise_planner, + [], + id="openai_function_calling_stepwise_planner", + marks=pytest.mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), + param( + sequential_planner, + [], + id="sequential_planner", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), @@ -241,9 +242,15 @@ ), ), param( - simple_memory, + azure_cognitive_search_memory, [], - id="simple_memory", + id="azure_cognitive_search_memory", + marks=pytest.mark.skipif(os.getenv(MEMORY_CONCEPT_SAMPLE, None) is None, reason="Not running memory samples."), + ), + param( + memory, + ["What are my investments?", "exit"], + id="memory", marks=pytest.mark.skipif(os.getenv(MEMORY_CONCEPT_SAMPLE, None) is None, reason="Not running memory samples."), ), param(rag_with_text_memory_plugin, [], id="rag_with_text_memory_plugin"), @@ -278,49 +285,33 @@ ), ), param( - step1_chat_completion_agent_simple, - [], - id="step1_chat_completion_agent_simple", - marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), - param( - step2_chat_completion_agent_with_kernel, - [], - id="step2_chat_completion_agent_with_kernel", - marks=pytest.mark.skipif( - os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." - ), - ), - param( - step3_chat_completion_agent_plugin_simple, + step1_agent, [], - id="step3_chat_completion_agent_plugin_simple", + id="step1_agent", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step4_chat_completion_agent_plugin_with_kernel, + step2_plugins, [], - id="step4_chat_completion_agent_plugin_with_kernel", + id="step2_agent_plugins", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step5_chat_completion_agent_group_chat, + step3_chat, [], - id="step5_chat_completion_agent_group_chat", + id="step3_chat", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), param( - step1_openai_assistant, + step7_assistant, [], - id="step1_openai_assistant", + id="step7_assistant", marks=pytest.mark.skipif( os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), diff --git a/python/tests/samples/test_learn_resources.py b/python/tests/samples/test_learn_resources.py index 77e045e8cc6a..0a3cab0dae3c 100644 --- a/python/tests/samples/test_learn_resources.py +++ b/python/tests/samples/test_learn_resources.py @@ -9,6 +9,7 @@ from samples.learn_resources.configuring_prompts import main as configuring_prompts from samples.learn_resources.creating_functions import main as creating_functions from samples.learn_resources.functions_within_prompts import main as functions_within_prompts +from samples.learn_resources.planner import main as planner from samples.learn_resources.plugin import main as plugin from samples.learn_resources.serializing_prompts import main as serializing_prompts from samples.learn_resources.templates import main as templates @@ -53,6 +54,14 @@ os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." ), ), + param( + planner, + [], + id="planner", + marks=mark.skipif( + os.getenv(COMPLETIONS_CONCEPT_SAMPLE, None) is None, reason="Not running completion samples." + ), + ), param( plugin, [], diff --git a/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py b/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py deleted file mode 100644 index 224b5ac42931..000000000000 --- a/python/tests/unit/agents/autogen_conversable_agent/test_autogen_conversable_agent.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, MagicMock - -import pytest -from autogen import ConversableAgent - -from semantic_kernel.agents.autogen.autogen_conversable_agent import AutoGenConversableAgent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException - - -@pytest.fixture -def mock_conversable_agent(): - agent = MagicMock(spec=ConversableAgent) - agent.name = "MockName" - agent.description = "MockDescription" - agent.system_message = "MockSystemMessage" - return agent - - -async def test_autogen_conversable_agent_initialization(mock_conversable_agent): - agent = AutoGenConversableAgent(mock_conversable_agent, id="mock_id") - assert agent.name == "MockName" - assert agent.description == "MockDescription" - assert agent.instructions == "MockSystemMessage" - assert agent.conversable_agent == mock_conversable_agent - - -async def test_autogen_conversable_agent_get_response(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock(return_value="Mocked assistant response") - agent = AutoGenConversableAgent(mock_conversable_agent) - - response = await agent.get_response("Hello") - assert response.role == AuthorRole.ASSISTANT - assert response.content == "Mocked assistant response" - - -async def test_autogen_conversable_agent_get_response_exception(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock(return_value=None) - agent = AutoGenConversableAgent(mock_conversable_agent) - - with pytest.raises(AgentInvokeException): - await agent.get_response("Hello") - - -async def test_autogen_conversable_agent_invoke_with_recipient(mock_conversable_agent): - mock_conversable_agent.a_initiate_chat = AsyncMock() - mock_conversable_agent.a_initiate_chat.return_value = MagicMock( - chat_history=[ - {"role": "user", "content": "Hello from user!"}, - {"role": "assistant", "content": "Hello from assistant!"}, - ] - ) - agent = AutoGenConversableAgent(mock_conversable_agent) - recipient_agent = MagicMock(spec=AutoGenConversableAgent) - recipient_agent.conversable_agent = MagicMock(spec=ConversableAgent) - - messages = [] - async for msg in agent.invoke(recipient=recipient_agent, message="Test message", arg1="arg1"): - messages.append(msg) - - mock_conversable_agent.a_initiate_chat.assert_awaited_once() - assert len(messages) == 2 - assert messages[0].role == AuthorRole.USER - assert messages[0].content == "Hello from user!" - assert messages[1].role == AuthorRole.ASSISTANT - assert messages[1].content == "Hello from assistant!" - - -async def test_autogen_conversable_agent_invoke_without_recipient_string_reply(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock(return_value="Mocked assistant response") - agent = AutoGenConversableAgent(mock_conversable_agent) - - messages = [] - async for msg in agent.invoke(message="Hello"): - messages.append(msg) - - mock_conversable_agent.a_generate_reply.assert_awaited_once() - assert len(messages) == 1 - assert messages[0].role == AuthorRole.ASSISTANT - assert messages[0].content == "Mocked assistant response" - - -async def test_autogen_conversable_agent_invoke_without_recipient_dict_reply(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock( - return_value={ - "content": "Mocked assistant response", - "role": "assistant", - "name": "AssistantName", - } - ) - agent = AutoGenConversableAgent(mock_conversable_agent) - - messages = [] - async for msg in agent.invoke(message="Hello"): - messages.append(msg) - - mock_conversable_agent.a_generate_reply.assert_awaited_once() - assert len(messages) == 1 - assert messages[0].role == AuthorRole.ASSISTANT - assert messages[0].content == "Mocked assistant response" - assert messages[0].name == "AssistantName" - - -async def test_autogen_conversable_agent_invoke_without_recipient_unexpected_type(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock(return_value=12345) - agent = AutoGenConversableAgent(mock_conversable_agent) - - with pytest.raises(AgentInvokeException): - async for _ in agent.invoke(message="Hello"): - pass - - -async def test_autogen_conversable_agent_invoke_with_invalid_recipient_type(mock_conversable_agent): - mock_conversable_agent.a_generate_reply = AsyncMock(return_value=12345) - agent = AutoGenConversableAgent(mock_conversable_agent) - - recipient = MagicMock() - - with pytest.raises(AgentInvokeException): - async for _ in agent.invoke(recipient=recipient, message="Hello"): - pass diff --git a/python/tests/unit/agents/azure_ai_agent/conftest.py b/python/tests/unit/agents/azure_ai_agent/conftest.py deleted file mode 100644 index 5f5b5082ae52..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/conftest.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock - -import pytest -from azure.ai.projects.aio import AIProjectClient -from azure.ai.projects.models import Agent as AzureAIAgentModel - - -@pytest.fixture -def ai_project_client() -> AsyncMock: - return AsyncMock(spec=AIProjectClient) - - -@pytest.fixture -def ai_agent_definition() -> AsyncMock: - definition = AsyncMock(spec=AzureAIAgentModel) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - - return definition diff --git a/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py b/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py deleted file mode 100644 index 3d912605cae6..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py +++ /dev/null @@ -1,279 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from azure.ai.projects.models import ( - MessageDelta, - MessageDeltaChunk, - MessageDeltaImageFileContent, - MessageDeltaImageFileContentObject, - MessageDeltaTextContent, - MessageDeltaTextContentObject, - MessageDeltaTextFileCitationAnnotation, - MessageDeltaTextFileCitationAnnotationObject, - MessageDeltaTextFilePathAnnotation, - MessageDeltaTextFilePathAnnotationObject, - MessageImageFileContent, - MessageImageFileDetails, - MessageTextContent, - MessageTextDetails, - MessageTextFileCitationAnnotation, - MessageTextFileCitationDetails, - MessageTextFilePathAnnotation, - MessageTextFilePathDetails, - RunStep, - RunStepDeltaFunction, - RunStepDeltaFunctionToolCall, - RunStepDeltaToolCallObject, - ThreadMessage, -) - -from semantic_kernel.agents.azure_ai.agent_content_generation import ( - generate_annotation_content, - generate_code_interpreter_content, - generate_function_call_content, - generate_function_result_content, - generate_message_content, - generate_streaming_code_interpreter_content, - generate_streaming_function_content, - generate_streaming_message_content, - get_function_call_contents, - get_message_contents, -) -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole - - -def test_get_message_contents_all_types(): - chat_msg = ChatMessageContent(role=AuthorRole.USER, content="") - chat_msg.items.append(TextContent(text="hello world")) - chat_msg.items.append(ImageContent(uri="http://example.com/image.png")) - chat_msg.items.append(FileReferenceContent(file_id="file123")) - chat_msg.items.append(FunctionResultContent(id="func1", result={"a": 1})) - results = get_message_contents(chat_msg) - assert len(results) == 4 - assert results[0]["type"] == "text" - assert results[1]["type"] == "image_url" - assert results[2]["type"] == "image_file" - assert results[3]["type"] == "text" - - -def test_generate_message_content_text_and_image(): - thread_msg = ThreadMessage( - content=[], - role="user", - ) - - image = MessageImageFileContent(image_file=MessageImageFileDetails(file_id="test_file_id")) - - text = MessageTextContent( - text=MessageTextDetails( - value="some text", - annotations=[ - MessageTextFileCitationAnnotation( - text="text", - file_citation=MessageTextFileCitationDetails(file_id="file_id", quote="some quote"), - start_index=0, - end_index=9, - ), - MessageTextFilePathAnnotation( - text="text again", - file_path=MessageTextFilePathDetails(file_id="file_id_2"), - start_index=1, - end_index=10, - ), - ], - ) - ) - - thread_msg.content = [image, text] - step = RunStep(id="step_id", run_id="run_id", thread_id="thread_id", assistant_id="assistant_id") - out = generate_message_content("assistant", thread_msg, step) - assert len(out.items) == 4 - assert isinstance(out.items[0], FileReferenceContent) - assert isinstance(out.items[1], TextContent) - assert isinstance(out.items[2], AnnotationContent) - assert isinstance(out.items[3], AnnotationContent) - - assert out.items[0].file_id == "test_file_id" - - assert out.items[1].text == "some text" - - assert out.items[2].file_id == "file_id" - assert out.items[2].quote == "text" - assert out.items[2].start_index == 0 - assert out.items[2].end_index == 9 - - assert out.items[3].file_id == "file_id_2" - assert out.items[3].quote == "text again" - assert out.items[3].start_index == 1 - assert out.items[3].end_index == 10 - - assert out.metadata["step_id"] == "step_id" - assert out.role == AuthorRole.USER - - -def test_generate_annotation_content(): - message_text_file_path_ann = MessageTextFilePathAnnotation( - text="some text", - file_path=MessageTextFilePathDetails(file_id="file123"), - start_index=0, - end_index=9, - ) - - message_text_file_citation_ann = MessageTextFileCitationAnnotation( - text="some text", - file_citation=MessageTextFileCitationDetails(file_id="file123"), - start_index=0, - end_index=9, - ) - - for fake_ann in [message_text_file_path_ann, message_text_file_citation_ann]: - out = generate_annotation_content(fake_ann) - assert out.file_id == "file123" - assert out.quote == "some text" - assert out.start_index == 0 - assert out.end_index == 9 - - -def test_generate_streaming_message_content_text_annotations(): - message_delta_image_file_content = MessageDeltaImageFileContent( - index=0, - image_file=MessageDeltaImageFileContentObject(file_id="image_file"), - ) - - MessageDeltaTextFileCitationAnnotation, MessageDeltaTextFilePathAnnotation - - message_delta_text_content = MessageDeltaTextContent( - index=0, - text=MessageDeltaTextContentObject( - value="some text", - annotations=[ - MessageDeltaTextFileCitationAnnotation( - index=0, - file_citation=MessageDeltaTextFileCitationAnnotationObject(file_id="file123"), - start_index=0, - end_index=9, - text="some text", - ), - MessageDeltaTextFilePathAnnotation( - index=0, - file_path=MessageDeltaTextFilePathAnnotationObject(file_id="file123"), - start_index=0, - end_index=9, - text="some text", - ), - ], - ), - ) - - delta = MessageDeltaChunk( - id="chunk123", - delta=MessageDelta(role="user", content=[message_delta_image_file_content, message_delta_text_content]), - ) - - out = generate_streaming_message_content("assistant", delta) - assert out is not None - assert out.content == "some text" - assert len(out.items) == 4 - assert out.items[0].file_id == "image_file" - assert isinstance(out.items[0], StreamingFileReferenceContent) - assert isinstance(out.items[1], StreamingTextContent) - assert isinstance(out.items[2], StreamingAnnotationContent) - - assert out.items[2].file_id == "file123" - assert out.items[2].quote == "some text" - assert out.items[2].start_index == 0 - assert out.items[2].end_index == 9 - - assert isinstance(out.items[3], StreamingAnnotationContent) - assert out.items[3].file_id == "file123" - assert out.items[3].quote == "some text" - assert out.items[3].start_index == 0 - assert out.items[3].end_index == 9 - - -def test_generate_streaming_function_content_with_function(): - step_details = RunStepDeltaToolCallObject( - tool_calls=[ - RunStepDeltaFunctionToolCall( - index=0, id="tool123", function=RunStepDeltaFunction(name="some_func", arguments={"arg": "val"}) - ) - ] - ) - - out = generate_streaming_function_content("my_agent", step_details) - assert out is not None - assert len(out.items) == 1 - assert isinstance(out.items[0], FunctionCallContent) - assert out.items[0].function_name == "some_func" - assert out.items[0].arguments == "{'arg': 'val'}" - - -def test_get_function_call_contents_no_action(): - run = type("ThreadRunFake", (), {"required_action": None})() - fc = get_function_call_contents(run, {}) - assert fc == [] - - -def test_get_function_call_contents_submit_tool_outputs(): - class FakeFunction: - name = "test_function" - arguments = {"arg": "val"} - - class FakeToolCall: - id = "tool_id" - function = FakeFunction() - - run = type( - "ThreadRunFake", - (), - { - "required_action": type( - "RequiredAction", (), {"submit_tool_outputs": type("FakeSubmit", (), {"tool_calls": [FakeToolCall()]})} - ) - }, - )() - function_steps = {} - fc = get_function_call_contents(run, function_steps) - assert len(fc) == 1 - assert function_steps["tool_id"].function_name == "test_function" - - -def test_generate_function_call_content(): - fcc = FunctionCallContent(id="id123", name="func_name", arguments={"x": 1}) - msg = generate_function_call_content("my_agent", [fcc]) - assert len(msg.items) == 1 - assert msg.role == AuthorRole.ASSISTANT - - -def test_generate_function_result_content(): - step = FunctionCallContent(id="123", name="func_name", arguments={"k": "v"}) - - class FakeToolCall: - function = type("Function", (), {"output": "result_data"}) - - tool_call = FakeToolCall() - msg = generate_function_result_content("my_agent", step, tool_call) - assert len(msg.items) == 1 - assert msg.items[0].result == "result_data" - assert msg.role == AuthorRole.TOOL - - -def test_generate_code_interpreter_content(): - msg = generate_code_interpreter_content("my_agent", "some_code()") - assert msg.content == "some_code()" - assert msg.metadata["code"] is True - - -def test_generate_streaming_code_interpreter_content_no_calls(): - step_details = type("Details", (), {"tool_calls": None}) - assert generate_streaming_code_interpreter_content("my_agent", step_details) is None diff --git a/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py b/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py deleted file mode 100644 index e26bf13a16d7..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_agent_thread_actions.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from datetime import datetime, timezone -from unittest.mock import AsyncMock, MagicMock, patch - -from azure.ai.projects.models import ( - MessageTextContent, - MessageTextDetails, - OpenAIPageableListOfRunStep, - RequiredFunctionToolCall, - RequiredFunctionToolCallDetails, - RunStep, - RunStepCodeInterpreterToolCall, - RunStepCodeInterpreterToolCallDetails, - RunStepFunctionToolCall, - RunStepFunctionToolCallDetails, - RunStepMessageCreationDetails, - RunStepMessageCreationReference, - RunStepToolCallDetails, - SubmitToolOutputsAction, - SubmitToolOutputsDetails, - ThreadMessage, - ThreadRun, -) - -from semantic_kernel.agents.azure_ai.agent_thread_actions import AgentThreadActions -from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent -from semantic_kernel.contents import FunctionCallContent, FunctionResultContent, TextContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - - -async def test_agent_thread_actions_create_thread(): - class FakeAgentClient: - create_thread = AsyncMock(return_value=type("FakeThread", (), {"id": "thread123"})) - - class FakeClient: - agents = FakeAgentClient() - - client = FakeClient() - thread_id = await AgentThreadActions.create_thread(client) - assert thread_id == "thread123" - - -async def test_agent_thread_actions_create_message(): - class FakeAgentClient: - create_message = AsyncMock(return_value="someMessage") - - class FakeClient: - agents = FakeAgentClient() - - msg = ChatMessageContent(role=AuthorRole.USER, content="some content") - out = await AgentThreadActions.create_message(FakeClient(), "threadXYZ", msg) - assert out == "someMessage" - - -async def test_agent_thread_actions_create_message_no_content(): - class FakeAgentClient: - create_message = AsyncMock(return_value="should_not_be_called") - - class FakeClient: - agents = FakeAgentClient() - - message = ChatMessageContent(role=AuthorRole.USER, content=" ") - out = await AgentThreadActions.create_message(FakeClient(), "threadXYZ", message) - assert out is None - assert FakeAgentClient.create_message.await_count == 0 - - -async def test_agent_thread_actions_invoke(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - - agent.client.agents = MagicMock() - - mock_thread_run = ThreadRun( - id="run123", - thread_id="thread123", - status="running", - instructions="test agent", - created_at=int(datetime.now(timezone.utc).timestamp()), - model="model", - ) - - agent.client.agents.create_run = AsyncMock(return_value=mock_thread_run) - - mock_run_steps = OpenAIPageableListOfRunStep( - data=[ - RunStep( - type="message_creation", - id="msg123", - thread_id="thread123", - run_id="run123", - created_at=int(datetime.now(timezone.utc).timestamp()), - completed_at=int(datetime.now(timezone.utc).timestamp()), - status="completed", - assistant_id="assistant123", - step_details=RunStepMessageCreationDetails( - message_creation=RunStepMessageCreationReference( - message_id="msg123", - ), - ), - ), - ] - ) - - agent.client.agents.list_run_steps = AsyncMock(return_value=mock_run_steps) - - mock_message = ThreadMessage( - id="msg123", - thread_id="thread123", - run_id="run123", - created_at=int(datetime.now(timezone.utc).timestamp()), - completed_at=int(datetime.now(timezone.utc).timestamp()), - status="completed", - assistant_id="assistant123", - role="assistant", - content=[MessageTextContent(text=MessageTextDetails(value="some message", annotations=[]))], - ) - - agent.client.agents.get_message = AsyncMock(return_value=mock_message) - - async for message in AgentThreadActions.invoke(agent=agent, thread_id="thread123", kernel=AsyncMock(spec=Kernel)): - assert message is not None - break - - -async def test_agent_thread_actions_invoke_with_requires_action(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - agent.client.agents = MagicMock() - - mock_thread_run = ThreadRun( - id="run123", - thread_id="thread123", - status="running", - instructions="test agent", - created_at=int(datetime.now(timezone.utc).timestamp()), - model="model", - ) - - agent.client.agents.create_run = AsyncMock(return_value=mock_thread_run) - - poll_count = 0 - - async def mock_poll_run_status(*args, **kwargs): - nonlocal poll_count - if poll_count == 0: - mock_thread_run.status = "requires_action" - mock_thread_run.required_action = SubmitToolOutputsAction( - submit_tool_outputs=SubmitToolOutputsDetails( - tool_calls=[ - RequiredFunctionToolCall( - id="tool_call_id", - function=RequiredFunctionToolCallDetails( - name="mock_function_call", arguments={"arg": "value"} - ), - ) - ] - ) - ) - else: - mock_thread_run.status = "completed" - poll_count += 1 - return mock_thread_run - - def mock_get_function_call_contents(run: ThreadRun, function_steps: dict): - function_call_content = FunctionCallContent( - name="mock_function_call", - arguments={"arg": "value"}, - id="tool_call_id", - ) - function_steps[function_call_content.id] = function_call_content - return [function_call_content] - - mock_run_step_tool_calls = RunStep( - type="tool_calls", - id="tool_step123", - thread_id="thread123", - run_id="run123", - created_at=int(datetime.now(timezone.utc).timestamp()), - completed_at=int(datetime.now(timezone.utc).timestamp()), - status="completed", - assistant_id="assistant123", - step_details=RunStepToolCallDetails( - tool_calls=[ - RunStepCodeInterpreterToolCall( - id="tool_call_id", - code_interpreter=RunStepCodeInterpreterToolCallDetails( - input="some code", - ), - ), - RunStepFunctionToolCall( - id="tool_call_id", - function=RunStepFunctionToolCallDetails( - name="mock_function_call", - arguments={"arg": "value"}, - output="some output", - ), - ), - ] - ), - ) - - mock_run_step_message_creation = RunStep( - type="message_creation", - id="msg_step123", - thread_id="thread123", - run_id="run123", - created_at=int(datetime.now(timezone.utc).timestamp()), - completed_at=int(datetime.now(timezone.utc).timestamp()), - status="completed", - assistant_id="assistant123", - step_details=RunStepMessageCreationDetails( - message_creation=RunStepMessageCreationReference(message_id="msg123") - ), - ) - - mock_run_steps = OpenAIPageableListOfRunStep(data=[mock_run_step_tool_calls, mock_run_step_message_creation]) - agent.client.agents.list_run_steps = AsyncMock(return_value=mock_run_steps) - - mock_message = ThreadMessage( - id="msg123", - thread_id="thread123", - run_id="run123", - created_at=int(datetime.now(timezone.utc).timestamp()), - completed_at=int(datetime.now(timezone.utc).timestamp()), - status="completed", - assistant_id="assistant123", - role="assistant", - content=[MessageTextContent(text=MessageTextDetails(value="some message", annotations=[]))], - ) - agent.client.agents.get_message = AsyncMock(return_value=mock_message) - - agent.client.agents.submit_tool_outputs_to_run = AsyncMock() - - with ( - patch.object(AgentThreadActions, "_poll_run_status", side_effect=mock_poll_run_status), - patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.get_function_call_contents", - side_effect=mock_get_function_call_contents, - ), - ): - messages = [] - async for is_visible, content in AgentThreadActions.invoke( - agent=agent, - thread_id="thread123", - kernel=AsyncMock(spec=Kernel), - ): - messages.append((is_visible, content)) - - assert len(messages) == 4, "There should be four yields in total." - - assert isinstance(messages[0][1].items[0], FunctionCallContent) - assert isinstance(messages[1][1].items[0], TextContent) - assert messages[1][1].items[0].metadata.get("code") is True - assert isinstance(messages[2][1].items[0], FunctionResultContent) - assert isinstance(messages[3][1].items[0], TextContent) - - agent.client.agents.submit_tool_outputs_to_run.assert_awaited_once() - - -class MockEvent: - def __init__(self, event, data): - self.event = event - self.data = data - - def __iter__(self): - return iter((self.event, self.data, None)) - - -class MockRunData: - def __init__(self, id, status): - self.id = id - self.status = status - - -class MockAsyncIterable: - def __init__(self, items): - self.items = items.copy() - - def __aiter__(self): - self._iter = iter(self.items) - return self - - async def __anext__(self): - try: - return next(self._iter) - except StopIteration: - raise StopAsyncIteration - - -class MockStream: - def __init__(self, events): - self.events = events - - async def __aenter__(self): - return MockAsyncIterable(self.events) - - async def __aexit__(self, exc_type, exc_val, exc_tb): - pass - - -async def test_agent_thread_actions_invoke_stream(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - agent.client.agents = AsyncMock() - - events = [ - MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), - MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), - MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), - ] - - main_run_stream = MockStream(events) - agent.client.agents.create_stream.return_value = main_run_stream - - with ( - patch.object(AgentThreadActions, "_invoke_function_calls", return_value=None), - patch.object(AgentThreadActions, "_format_tool_outputs", return_value=[{"type": "mock_tool_output"}]), - ): - collected_messages = [] - async for content in AgentThreadActions.invoke_stream( - agent=agent, - thread_id="thread123", - kernel=AsyncMock(spec=Kernel), - ): - collected_messages.append(content) diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py deleted file mode 100644 index 0e2e676bc33f..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import MagicMock, patch - -import pytest -from azure.ai.projects.aio import AIProjectClient -from azure.identity.aio import DefaultAzureCredential - -from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent -from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException - - -async def test_azure_ai_agent_init(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - assert agent.id == "agent123" - assert agent.name == "agentName" - assert agent.description == "desc" - - -async def test_azure_ai_agent_init_with_plugins_via_constructor( - ai_project_client, ai_agent_definition, custom_plugin_class -): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition, plugins=[custom_plugin_class()]) - assert agent.id == "agent123" - assert agent.name == "agentName" - assert agent.description == "desc" - assert agent.kernel.plugins is not None - assert len(agent.kernel.plugins) == 1 - - -async def test_azure_ai_agent_add_chat_message(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.create_message", - ): - await agent.add_chat_message("threadId", ChatMessageContent(role="user", content="text")) # pass anything - - -async def test_azure_ai_agent_get_response(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", - side_effect=fake_invoke, - ): - response = await agent.get_response("thread_id") - assert response.role == AuthorRole.ASSISTANT - assert response.content == "content" - - -async def test_azure_ai_agent_get_response_exception(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - - async def fake_invoke(*args, **kwargs): - yield False, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with ( - patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", - side_effect=fake_invoke, - ), - pytest.raises(AgentInvokeException), - ): - await agent.get_response("thread_id") - - -async def test_azure_ai_agent_invoke(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", - side_effect=fake_invoke, - ): - async for item in agent.invoke("thread_id"): - results.append(item) - - assert len(results) == 1 - - -async def test_azure_ai_agent_invoke_stream(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke_stream", - side_effect=fake_invoke, - ): - async for item in agent.invoke_stream("thread_id"): - results.append(item) - - assert len(results) == 1 - - -def test_azure_ai_agent_get_channel_keys(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - keys = list(agent.get_channel_keys()) - assert len(keys) >= 3 - - -async def test_azure_ai_agent_create_channel(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.create_thread", - side_effect="t", - ): - ch = await agent.create_channel() - assert isinstance(ch, AgentChannel) - assert ch.thread_id == "t" - - -def test_create_client(): - conn_str = "endpoint;subscription_id;resource_group;project_name" - credential = MagicMock(spec=DefaultAzureCredential) - - with patch("azure.ai.projects.aio.AIProjectClient.from_connection_string") as mock_from_conn_str: - mock_client = MagicMock(spec=AIProjectClient) - mock_from_conn_str.return_value = mock_client - - client = AzureAIAgent.create_client( - credential=credential, - conn_str=conn_str, - extra_arg="extra_value", - ) - - mock_from_conn_str.assert_called_once() - _, actual_kwargs = mock_from_conn_str.call_args - - assert actual_kwargs["credential"] is credential - assert actual_kwargs["conn_str"] == conn_str - assert actual_kwargs["extra_arg"] == "extra_value" - assert actual_kwargs["user_agent"] is not None - assert client is mock_client diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py deleted file mode 100644 index b70159d08f3f..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_settings.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest -from pydantic import Field, SecretStr, ValidationError - -from semantic_kernel.kernel_pydantic import KernelBaseSettings -from semantic_kernel.utils.feature_stage_decorator import experimental - - -@experimental -class AzureAIAgentSettings(KernelBaseSettings): - """Slightly modified to ensure invalid data raises ValidationError.""" - - env_prefix = "AZURE_AI_AGENT_" - model_deployment_name: str = Field(min_length=1) - project_connection_string: SecretStr = Field(..., min_length=1) - - -def test_azure_ai_agent_settings_valid(): - settings = AzureAIAgentSettings( - model_deployment_name="test_model", - project_connection_string="secret_value", - ) - assert settings.model_deployment_name == "test_model" - assert settings.project_connection_string.get_secret_value() == "secret_value" - - -def test_azure_ai_agent_settings_invalid(): - with pytest.raises(ValidationError): - # Should fail due to min_length=1 constraints - AzureAIAgentSettings( - model_deployment_name="", # empty => invalid - project_connection_string="", - ) diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py deleted file mode 100644 index 74237e1e0b33..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_agent_utils.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from azure.ai.projects.models import MessageAttachment, MessageRole - -from semantic_kernel.agents.azure_ai.azure_ai_agent_utils import AzureAIAgentUtils -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole - - -def test_azure_ai_agent_utils_get_thread_messages_none(): - msgs = AzureAIAgentUtils.get_thread_messages([]) - assert msgs is None - - -def test_azure_ai_agent_utils_get_thread_messages(): - msg1 = ChatMessageContent(role=AuthorRole.USER, content="Hello!") - msg1.items.append(FileReferenceContent(file_id="file123")) - results = AzureAIAgentUtils.get_thread_messages([msg1]) - assert len(results) == 1 - assert results[0].content == "Hello!" - assert results[0].role == MessageRole.USER - assert len(results[0].attachments) == 1 - assert isinstance(results[0].attachments[0], MessageAttachment) - - -def test_azure_ai_agent_utils_get_attachments_empty(): - msg1 = ChatMessageContent(role=AuthorRole.USER, content="No file items") - atts = AzureAIAgentUtils.get_attachments(msg1) - assert atts == [] - - -def test_azure_ai_agent_utils_get_attachments_file(): - msg1 = ChatMessageContent(role=AuthorRole.USER, content="One file item") - msg1.items.append(FileReferenceContent(file_id="file123")) - atts = AzureAIAgentUtils.get_attachments(msg1) - assert len(atts) == 1 - assert atts[0].file_id == "file123" - - -def test_azure_ai_agent_utils_get_metadata(): - msg1 = ChatMessageContent(role=AuthorRole.USER, content="has meta", metadata={"k": 123}) - meta = AzureAIAgentUtils.get_metadata(msg1) - assert meta["k"] == "123" - - -def test_azure_ai_agent_utils_get_tool_definition(): - gen = AzureAIAgentUtils._get_tool_definition(["file_search", "code_interpreter", "non_existent"]) - # file_search & code_interpreter exist, non_existent yields nothing - tools_list = list(gen) - assert len(tools_list) == 2 diff --git a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py b/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py deleted file mode 100644 index 1bb85c8b5e9a..000000000000 --- a/python/tests/unit/agents/azure_ai_agent/test_azure_ai_channel.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, patch - -import pytest -from azure.ai.projects.aio import AIProjectClient - -from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent -from semantic_kernel.agents.azure_ai.azure_ai_channel import AzureAIChannel -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentChatException - - -async def test_azure_ai_channel_receive(): - class FakeAgentClient: - create_message = AsyncMock() - - class FakeClient: - agents = FakeAgentClient() - - channel = AzureAIChannel(FakeClient(), "thread123") - await channel.receive([ChatMessageContent(role=AuthorRole.USER, content="Hello")]) - FakeAgentClient.create_message.assert_awaited_once() - - -async def test_azure_ai_channel_invoke_invalid_agent(): - channel = AzureAIChannel(AsyncMock(spec=AIProjectClient), "thread123") - with pytest.raises(AgentChatException): - async for _ in channel.invoke(object()): - pass - - -async def test_azure_ai_channel_invoke_valid_agent(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke", - side_effect=fake_invoke, - ): - channel = AzureAIChannel(ai_project_client, "thread123") - results = [] - async for is_visible, msg in channel.invoke(agent): - results.append((is_visible, msg)) - - assert len(results) == 1 - - -async def test_azure_ai_channel_invoke_stream_valid_agent(ai_project_client, ai_agent_definition): - agent = AzureAIAgent(client=ai_project_client, definition=ai_agent_definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.invoke_stream", - side_effect=fake_invoke, - ): - channel = AzureAIChannel(ai_project_client, "thread123") - results = [] - async for is_visible, msg in channel.invoke_stream(agent, messages=[]): - results.append((is_visible, msg)) - - assert len(results) == 1 - - -async def test_azure_ai_channel_get_history(): - # We need to return an async iterable, so let's do an AsyncMock returning an _async_gen - class FakeAgentClient: - delete_thread = AsyncMock() - # We'll patch get_messages directly below - - class FakeClient: - agents = FakeAgentClient() - - channel = AzureAIChannel(FakeClient(), "threadXYZ") - - async def fake_get_messages(client, thread_id): - # Must produce an async iterable - yield ChatMessageContent(role=AuthorRole.ASSISTANT, content="Previous msg") - - with patch( - "semantic_kernel.agents.azure_ai.agent_thread_actions.AgentThreadActions.get_messages", - new=fake_get_messages, # direct replacement with a coroutine - ): - results = [] - async for item in channel.get_history(): - results.append(item) - - assert len(results) == 1 - assert results[0].content == "Previous msg" - - -async def test_azure_ai_channel_reset(): - class FakeAgentClient: - delete_thread = AsyncMock() - - class FakeClient: - agents = FakeAgentClient() - - channel = AzureAIChannel(FakeClient(), "threadXYZ") - await channel.reset() - FakeAgentClient.delete_thread.assert_awaited_once_with(thread_id="threadXYZ") - - -# Helper for returning an async generator -async def _async_gen(items): - for i in items: - yield i diff --git a/python/tests/unit/agents/bedrock_agent/conftest.py b/python/tests/unit/agents/bedrock_agent/conftest.py deleted file mode 100644 index b76ae70b88a5..000000000000 --- a/python/tests/unit/agents/bedrock_agent/conftest.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Callable - -import pytest - -from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType -from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel -from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus -from semantic_kernel.kernel import Kernel - - -@pytest.fixture() -def bedrock_agent_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): - """Fixture to set environment variables for Amazon Bedrock Agent unit tests.""" - if exclude_list is None: - exclude_list = [] - - if override_env_param_dict is None: - override_env_param_dict = {} - - env_vars = { - "BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN": "TEST_BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN", - "BEDROCK_AGENT_FOUNDATION_MODEL": "TEST_BEDROCK_AGENT_FOUNDATION_MODEL", - } - - env_vars.update(override_env_param_dict) - - for key, value in env_vars.items(): - if key not in exclude_list: - monkeypatch.setenv(key, value) - else: - monkeypatch.delenv(key, raising=False) - - return env_vars - - -@pytest.fixture -def kernel_with_function(kernel: Kernel, decorated_native_function: Callable) -> Kernel: - kernel.add_function("test_plugin", function=decorated_native_function) - - return kernel - - -@pytest.fixture -def new_agent_name(): - return "test_agent_name" - - -@pytest.fixture -def bedrock_agent_model(): - return BedrockAgentModel( - agent_name="test_agent_name", - foundation_model="test_foundation_model", - agent_status=BedrockAgentStatus.NOT_PREPARED, - ) - - -@pytest.fixture -def bedrock_agent_model_with_id(): - return BedrockAgentModel( - agent_id="test_agent_id", - agent_name="test_agent_name", - foundation_model="test_foundation_model", - agent_status=BedrockAgentStatus.NOT_PREPARED, - ) - - -@pytest.fixture -def bedrock_agent_model_with_id_prepared_dict(): - return { - "agent": { - "agentId": "test_agent_id", - "agentName": "test_agent_name", - "foundationModel": "test_foundation_model", - "agentStatus": "PREPARED", - } - } - - -@pytest.fixture -def bedrock_agent_model_with_id_preparing_dict(): - return { - "agent": { - "agentId": "test_agent_id", - "agentName": "test_agent_name", - "foundationModel": "test_foundation_model", - "agentStatus": "PREPARING", - } - } - - -@pytest.fixture -def bedrock_agent_model_with_id_not_prepared_dict(): - return { - "agent": { - "agentId": "test_agent_id", - "agentName": "test_agent_name", - "foundationModel": "test_foundation_model", - "agentStatus": "NOT_PREPARED", - } - } - - -@pytest.fixture -def existing_agent_not_prepared_model(): - return BedrockAgentModel( - agent_id="test_agent_id", - agent_name="test_agent_name", - foundation_model="test_foundation_model", - agent_status=BedrockAgentStatus.NOT_PREPARED, - ) - - -@pytest.fixture -def bedrock_action_group_mode_dict(): - return { - "agentActionGroup": { - "actionGroupId": "test_action_group_id", - "actionGroupName": "test_action_group_name", - } - } - - -@pytest.fixture -def simple_response(): - return "test response" - - -@pytest.fixture -def bedrock_agent_non_streaming_empty_response(): - return { - "completion": [], - } - - -@pytest.fixture -def bedrock_agent_non_streaming_simple_response(simple_response): - return { - "completion": [ - { - "chunk": {"bytes": bytes(simple_response, "utf-8")}, - }, - ], - } - - -@pytest.fixture -def bedrock_agent_streaming_simple_response(simple_response): - return { - "completion": [ - { - "chunk": {"bytes": bytes(chunk, "utf-8")}, - } - for chunk in simple_response - ] - } - - -@pytest.fixture -def bedrock_agent_function_call_response(): - return { - "completion": [ - { - BedrockAgentEventType.RETURN_CONTROL: { - "invocationId": "test_invocation_id", - "invocationInputs": [ - { - "functionInvocationInput": { - "function": "test_function", - "parameters": [ - {"name": "test_parameter_name", "value": "test_parameter_value"}, - ], - }, - }, - ], - }, - }, - ], - } diff --git a/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py b/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py deleted file mode 100644 index 9898e457c40e..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_action_group_utils.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel.agents.bedrock.action_group_utils import ( - BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES, - kernel_function_parameter_type_to_bedrock_function_parameter_type, - kernel_function_to_bedrock_function_schema, - parse_function_result_contents, - parse_return_control_payload, -) -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.kernel import Kernel - - -def test_kernel_function_to_bedrock_function_schema(kernel_with_function: Kernel): - # Test the conversion of kernel function to bedrock function schema - function_choice_behavior = FunctionChoiceBehavior.Auto() - function_choice_configuration = function_choice_behavior.get_config(kernel_with_function) - result = kernel_function_to_bedrock_function_schema(function_choice_configuration) - assert result == { - "functions": [ - { - "name": "test_plugin-getLightStatus", - "parameters": { - "arg1": { - "type": "string", - "required": True, - } - }, - "requireConfirmation": "DISABLED", - } - ] - } - - -def test_kernel_function_parameter_type_to_bedrock_function_parameter_type(): - # Test the conversion of kernel function parameter type to bedrock function parameter type - schema_data = {"type": "string"} - result = kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data) - assert result == "string" - - -def test_kernel_function_parameter_type_to_bedrock_function_parameter_type_invalid(): - # Test the conversion of invalid kernel function parameter type to bedrock function parameter type - schema_data = {"type": "invalid_type"} - with pytest.raises( - ValueError, - match="Type invalid_type is not allowed in bedrock function parameter type. " - f"Allowed types are {BEDROCK_FUNCTION_ALLOWED_PARAMETER_TYPES}.", - ): - kernel_function_parameter_type_to_bedrock_function_parameter_type(schema_data) - - -def test_parse_return_control_payload(): - # Test the parsing of return control payload to function call contents - return_control_payload = { - "invocationId": "test_invocation_id", - "invocationInputs": [ - { - "functionInvocationInput": { - "function": "test_function", - "parameters": [ - {"name": "param1", "value": "value1"}, - {"name": "param2", "value": "value2"}, - ], - } - } - ], - } - result = parse_return_control_payload(return_control_payload) - assert len(result) == 1 - assert result[0].id == "test_invocation_id" - assert result[0].name == "test_function" - assert result[0].arguments == {"param1": "value1", "param2": "value2"} - - -def test_parse_function_result_contents(): - # Test the parsing of function result contents to be returned to the agent - function_result_contents = [ - FunctionResultContent( - id="test_id", - name="test_function", - result="test_result", - metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, - ) - ] - result = parse_function_result_contents(function_result_contents) - assert len(result) == 1 - assert result[0]["functionResult"]["actionGroup"] == "test_action_group" - assert result[0]["functionResult"]["function"] == "test_function" - assert result[0]["functionResult"]["responseBody"]["TEXT"]["body"] == "test_result" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py deleted file mode 100644 index e76abea04cb4..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_action_group_model.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest -from pydantic import ValidationError - -from semantic_kernel.agents.bedrock.models.bedrock_action_group_model import BedrockActionGroupModel - - -def test_bedrock_action_group_model_valid(): - """Test case to verify the BedrockActionGroupModel with valid data.""" - model = BedrockActionGroupModel(actionGroupId="test_id", actionGroupName="test_name") - assert model.action_group_id == "test_id" - assert model.action_group_name == "test_name" - - -def test_bedrock_action_group_model_missing_action_group_id(): - """Test case to verify error handling when actionGroupId is missing.""" - with pytest.raises(ValidationError): - BedrockActionGroupModel(actionGroupName="test_name") - - -def test_bedrock_action_group_model_missing_action_group_name(): - """Test case to verify error handling when actionGroupName is missing.""" - with pytest.raises(ValidationError): - BedrockActionGroupModel(actionGroupId="test_id") - - -def test_bedrock_action_group_model_extra_field(): - """Test case to verify the BedrockActionGroupModel with an extra field.""" - model = BedrockActionGroupModel(actionGroupId="test_id", actionGroupName="test_name", extraField="extra_value") - assert model.action_group_id == "test_id" - assert model.action_group_name == "test_name" - assert model.extraField == "extra_value" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py deleted file mode 100644 index ddf49aca36ad..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent.py +++ /dev/null @@ -1,633 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, Mock, patch - -import boto3 -import pytest - -from semantic_kernel.agents.bedrock.action_group_utils import ( - kernel_function_to_bedrock_function_schema, - parse_function_result_contents, -) -from semantic_kernel.agents.bedrock.bedrock_agent import BedrockAgent -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel - -# region Agent Initialization Tests - - -# Test case to verify BedrockAgent initialization -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_initialization(client, bedrock_agent_model_with_id): - agent = BedrockAgent(bedrock_agent_model_with_id) - - assert agent.name == bedrock_agent_model_with_id.agent_name - assert agent.agent_model.agent_name == bedrock_agent_model_with_id.agent_name - assert agent.agent_model.agent_id == bedrock_agent_model_with_id.agent_id - assert agent.agent_model.foundation_model == bedrock_agent_model_with_id.foundation_model - - -# Test case to verify error handling during BedrockAgent initialization with non-auto function choice -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_initialization_error_with_non_auto_function_choice(client, bedrock_agent_model_with_id): - with pytest.raises(ValueError, match="Only FunctionChoiceType.AUTO is supported."): - BedrockAgent( - bedrock_agent_model_with_id, - function_choice_behavior=FunctionChoiceBehavior.NoneInvoke(), - ) - - -# Test case to verify the creation of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -@pytest.mark.parametrize( - "kernel, function_choice_behavior, arguments", - [ - (None, None, None), - (Kernel(), None, None), - (Kernel(), FunctionChoiceBehavior.Auto(), None), - (Kernel(), FunctionChoiceBehavior.Auto(), KernelArguments()), - ], -) -async def test_bedrock_agent_create_and_prepare_agent( - client, - bedrock_agent_model_with_id_not_prepared_dict, - bedrock_agent_unit_test_env, - kernel, - function_choice_behavior, - arguments, -): - with ( - patch.object(client, "create_agent") as mock_create_agent, - patch.object(BedrockAgent, "_wait_for_agent_status", new_callable=AsyncMock), - patch.object(BedrockAgent, "prepare_agent_and_wait_until_prepared", new_callable=AsyncMock), - ): - mock_create_agent.return_value = bedrock_agent_model_with_id_not_prepared_dict - - agent = await BedrockAgent.create_and_prepare_agent( - name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], - instructions="test_instructions", - bedrock_client=client, - env_file_path="fake_path", - kernel=kernel, - function_choice_behavior=function_choice_behavior, - arguments=arguments, - ) - - mock_create_agent.assert_called_once_with( - agentName=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], - foundationModel=bedrock_agent_unit_test_env["BEDROCK_AGENT_FOUNDATION_MODEL"], - agentResourceRoleArn=bedrock_agent_unit_test_env["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], - instruction="test_instructions", - ) - assert agent.agent_model.agent_id == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentId"] - assert agent.id == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentId"] - assert agent.agent_model.agent_name == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"] - assert agent.name == bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"] - assert ( - agent.agent_model.foundation_model - == bedrock_agent_model_with_id_not_prepared_dict["agent"]["foundationModel"] - ) - assert agent.kernel is not None - assert agent.function_choice_behavior is not None - if arguments: - assert agent.arguments is not None - - -# Test case to verify the creation of BedrockAgent -@pytest.mark.parametrize( - "exclude_list", - [ - ["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], - ["BEDROCK_AGENT_FOUNDATION_MODEL"], - ], - indirect=True, -) -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_create_and_prepare_agent_settings_validation_error( - client, - bedrock_agent_model_with_id_not_prepared_dict, - bedrock_agent_unit_test_env, -): - with pytest.raises(AgentInitializationException): - await BedrockAgent.create_and_prepare_agent( - name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], - instructions="test_instructions", - env_file_path="fake_path", - ) - - -# Test case to verify the creation of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_create_and_prepare_agent_service_exception( - client, - bedrock_agent_model_with_id_not_prepared_dict, - bedrock_agent_unit_test_env, -): - with ( - patch.object(client, "create_agent") as mock_create_agent, - patch.object(BedrockAgent, "prepare_agent_and_wait_until_prepared", new_callable=AsyncMock), - ): - from botocore.exceptions import ClientError - - mock_create_agent.side_effect = ClientError({}, "create_agent") - - with pytest.raises(AgentInitializationException): - await BedrockAgent.create_and_prepare_agent( - name=bedrock_agent_model_with_id_not_prepared_dict["agent"]["agentName"], - instructions="test_instructions", - bedrock_client=client, - env_file_path="fake_path", - ) - - -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_prepare_agent_and_wait_until_prepared( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_prepared_dict, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(client, "get_agent") as mock_get_agent, - patch.object(client, "prepare_agent") as mock_prepare_agent, - ): - mock_get_agent.side_effect = [ - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_prepared_dict, - ] - - await agent.prepare_agent_and_wait_until_prepared() - - mock_prepare_agent.assert_called_once_with(agentId=bedrock_agent_model_with_id.agent_id) - assert mock_get_agent.call_count == 2 - assert agent.agent_model.agent_status == "PREPARED" - - -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_prepare_agent_and_wait_until_prepared_fail( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_model_with_id_preparing_dict, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(client, "get_agent") as mock_get_agent, - patch.object(client, "prepare_agent"), - ): - mock_get_agent.side_effect = [ - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_preparing_dict, - bedrock_agent_model_with_id_preparing_dict, - ] - - with pytest.raises(TimeoutError): - await agent.prepare_agent_and_wait_until_prepared() - - -# Test case to verify the creation of a code interpreter action group -@patch.object(boto3, "client", return_value=Mock()) -async def test_create_code_interpreter_action_group( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_action_group_mode_dict, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(client, "create_agent_action_group") as mock_create_action_group, - patch.object( - BedrockAgent, "prepare_agent_and_wait_until_prepared" - ) as mock_prepare_agent_and_wait_until_prepared, - ): - mock_create_action_group.return_value = bedrock_action_group_mode_dict - action_group_model = await agent.create_code_interpreter_action_group() - - mock_create_action_group.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version or "DRAFT", - actionGroupName=f"{agent.agent_model.agent_name}_code_interpreter", - actionGroupState="ENABLED", - parentActionGroupSignature="AMAZON.CodeInterpreter", - ) - assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] - mock_prepare_agent_and_wait_until_prepared.assert_called_once() - - -# Test case to verify the creation of BedrockAgent with plugins -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_create_with_plugin_via_constructor( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - custom_plugin_class, -): - agent = BedrockAgent( - bedrock_agent_model_with_id, - plugins=[custom_plugin_class()], - bedrock_client=client, - ) - - assert agent.kernel.plugins is not None - assert len(agent.kernel.plugins) == 1 - - -# Test case to verify the creation of a user input action group -@patch.object(boto3, "client", return_value=Mock()) -async def test_create_user_input_action_group( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_action_group_mode_dict, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(agent.bedrock_client, "create_agent_action_group") as mock_create_action_group, - patch.object( - BedrockAgent, "prepare_agent_and_wait_until_prepared" - ) as mock_prepare_agent_and_wait_until_prepared, - ): - mock_create_action_group.return_value = bedrock_action_group_mode_dict - action_group_model = await agent.create_user_input_action_group() - - mock_create_action_group.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version or "DRAFT", - actionGroupName=f"{agent.agent_model.agent_name}_user_input", - actionGroupState="ENABLED", - parentActionGroupSignature="AMAZON.UserInput", - ) - assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] - mock_prepare_agent_and_wait_until_prepared.assert_called_once() - - -# Test case to verify the creation of a kernel function action group -@patch.object(boto3, "client", return_value=Mock()) -async def test_create_kernel_function_action_group( - client, - kernel_with_function, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_action_group_mode_dict, -): - agent = BedrockAgent(bedrock_agent_model_with_id, kernel=kernel_with_function, bedrock_client=client) - - with ( - patch.object(agent.bedrock_client, "create_agent_action_group") as mock_create_action_group, - patch.object( - BedrockAgent, "prepare_agent_and_wait_until_prepared" - ) as mock_prepare_agent_and_wait_until_prepared, - ): - mock_create_action_group.return_value = bedrock_action_group_mode_dict - - action_group_model = await agent.create_kernel_function_action_group() - - mock_create_action_group.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version or "DRAFT", - actionGroupName=f"{agent.agent_model.agent_name}_kernel_function", - actionGroupState="ENABLED", - actionGroupExecutor={"customControl": "RETURN_CONTROL"}, - functionSchema=kernel_function_to_bedrock_function_schema( - agent.function_choice_behavior.get_config(kernel_with_function) - ), - ) - assert action_group_model.action_group_id == bedrock_action_group_mode_dict["agentActionGroup"]["actionGroupId"] - mock_prepare_agent_and_wait_until_prepared.assert_called_once() - - -# Test case to verify the association of an agent with a knowledge base -@patch.object(boto3, "client", return_value=Mock()) -async def test_associate_agent_knowledge_base( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(agent.bedrock_client, "associate_agent_knowledge_base") as mock_associate_knowledge_base, - patch.object( - BedrockAgent, "prepare_agent_and_wait_until_prepared" - ) as mock_prepare_agent_and_wait_until_prepared, - ): - await agent.associate_agent_knowledge_base("test_knowledge_base_id") - - mock_associate_knowledge_base.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version, - knowledgeBaseId="test_knowledge_base_id", - ) - mock_prepare_agent_and_wait_until_prepared.assert_called_once() - - -# Test case to verify the disassociation of an agent with a knowledge base -@patch.object(boto3, "client", return_value=Mock()) -async def test_disassociate_agent_knowledge_base( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with ( - patch.object(agent.bedrock_client, "disassociate_agent_knowledge_base") as mock_disassociate_knowledge_base, - patch.object( - BedrockAgent, "prepare_agent_and_wait_until_prepared" - ) as mock_prepare_agent_and_wait_until_prepared, - ): - await agent.disassociate_agent_knowledge_base("test_knowledge_base_id") - mock_disassociate_knowledge_base.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version, - knowledgeBaseId="test_knowledge_base_id", - ) - mock_prepare_agent_and_wait_until_prepared.assert_called_once() - - -# Test case to verify listing associated knowledge bases with an agent -@patch.object(boto3, "client", return_value=Mock()) -async def test_list_associated_agent_knowledge_bases( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with patch.object(agent.bedrock_client, "list_agent_knowledge_bases") as mock_list_knowledge_bases: - await agent.list_associated_agent_knowledge_bases() - - mock_list_knowledge_bases.assert_called_once_with( - agentId=agent.agent_model.agent_id, - agentVersion=agent.agent_model.agent_version, - ) - - -# endregion - -# region Agent Deletion Tests - - -@patch.object(boto3, "client", return_value=Mock()) -async def test_delete_agent( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - agent_id = bedrock_agent_model_with_id.agent_id - with patch.object(agent.bedrock_client, "delete_agent") as mock_delete_agent: - await agent.delete_agent() - - mock_delete_agent.assert_called_once_with(agentId=agent_id) - assert agent.agent_model.agent_id is None - - -# Test case to verify error handling when deleting an agent that does not exist -@patch.object(boto3, "client", return_value=Mock()) -async def test_delete_agent_twice_error( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with patch.object(agent.bedrock_client, "delete_agent"): - await agent.delete_agent() - - with pytest.raises(ValueError): - await agent.delete_agent() - - -# Test case to verify error handling when there is a client error during agent deletion -@patch.object(boto3, "client", return_value=Mock()) -async def test_delete_agent_client_error( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, -): - agent = BedrockAgent(bedrock_agent_model_with_id, bedrock_client=client) - - with patch.object(agent.bedrock_client, "delete_agent") as mock_delete_agent: - from botocore.exceptions import ClientError - - mock_delete_agent.side_effect = ClientError({"Error": {"Code": "500"}}, "delete_agent") - - with pytest.raises(ClientError): - await agent.delete_agent() - - -# endregion - -# region Agent Invoke Tests - - -# Test case to verify the `get_response` method of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_get_response( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_non_streaming_simple_response, - simple_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - mock_invoke_agent.return_value = bedrock_agent_non_streaming_simple_response - response = await agent.get_response("test_session_id", "test_input_text") - assert response.content == simple_response - - mock_invoke_agent.assert_called_once_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": False}, - sessionState={}, - ) - - -# Test case to verify the `get_response` method of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_get_response_exception( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_non_streaming_empty_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - mock_invoke_agent.return_value = bedrock_agent_non_streaming_empty_response - with pytest.raises(AgentInvokeException): - await agent.get_response("test_session_id", "test_input_text") - - mock_invoke_agent.assert_called_once_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": False}, - sessionState={}, - ) - - -# Test case to verify the invocation of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_invoke( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_non_streaming_simple_response, - simple_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - mock_invoke_agent.return_value = bedrock_agent_non_streaming_simple_response - async for message in agent.invoke("test_session_id", "test_input_text"): - assert message.content == simple_response - - mock_invoke_agent.assert_called_once_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": False}, - sessionState={}, - ) - - -# Test case to verify the streaming invocation of BedrockAgent -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_invoke_stream( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_streaming_simple_response, - simple_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - mock_invoke_agent.return_value = bedrock_agent_streaming_simple_response - full_message = "" - async for message in agent.invoke_stream("test_session_id", "test_input_text"): - full_message += message.content - - assert full_message == simple_response - mock_invoke_agent.assert_called_once_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": True}, - sessionState={}, - ) - - -# Test case to verify the invocation of BedrockAgent with function call -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_invoke_with_function_call( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_function_call_response, - bedrock_agent_non_streaming_simple_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - patch.object(BedrockAgent, "_handle_function_call_contents") as mock_handle_function_call_contents, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - function_result_contents = [ - FunctionResultContent( - id="test_id", - name="test_function", - result="test_result", - metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, - ) - ] - mock_handle_function_call_contents.return_value = function_result_contents - agent.function_choice_behavior.maximum_auto_invoke_attempts = 2 - - mock_invoke_agent.side_effect = [ - bedrock_agent_function_call_response, - bedrock_agent_non_streaming_simple_response, - ] - async for _ in agent.invoke("test_session_id", "test_input_text"): - mock_invoke_agent.assert_called_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": False}, - sessionState={ - "invocationId": "test_invocation_id", - "returnControlInvocationResults": parse_function_result_contents(function_result_contents), - }, - ) - - -# Test case to verify the streaming invocation of BedrockAgent with function call -@patch.object(boto3, "client", return_value=Mock()) -async def test_bedrock_agent_invoke_stream_with_function_call( - client, - bedrock_agent_unit_test_env, - bedrock_agent_model_with_id, - bedrock_agent_function_call_response, - bedrock_agent_streaming_simple_response, -): - with ( - patch.object(BedrockAgent, "_invoke_agent", new_callable=AsyncMock) as mock_invoke_agent, - patch.object(BedrockAgent, "_handle_function_call_contents") as mock_handle_function_call_contents, - ): - agent = BedrockAgent(bedrock_agent_model_with_id) - - function_result_contents = [ - FunctionResultContent( - id="test_id", - name="test_function", - result="test_result", - metadata={"functionInvocationInput": {"actionGroup": "test_action_group"}}, - ) - ] - mock_handle_function_call_contents.return_value = function_result_contents - agent.function_choice_behavior.maximum_auto_invoke_attempts = 2 - - mock_invoke_agent.side_effect = [ - bedrock_agent_function_call_response, - bedrock_agent_streaming_simple_response, - ] - async for _ in agent.invoke_stream("test_session_id", "test_input_text"): - mock_invoke_agent.assert_called_with( - "test_session_id", - "test_input_text", - None, - streamingConfigurations={"streamFinalResponse": True}, - sessionState={ - "invocationId": "test_invocation_id", - "returnControlInvocationResults": parse_function_result_contents(function_result_contents), - }, - ) - - -# endregion diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py deleted file mode 100644 index 66e203d93065..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_channel.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel.contents.chat_message_content import ChatMessageContent - - -@pytest.fixture -def mock_channel(): - from semantic_kernel.agents.channels.bedrock_agent_channel import BedrockAgentChannel - - return BedrockAgentChannel() - - -@pytest.fixture -def chat_history() -> list[ChatMessageContent]: - return [ - ChatMessageContent(role="user", content="Hello, Bedrock!"), - ChatMessageContent(role="assistant", content="Hello, User!"), - ChatMessageContent(role="user", content="How are you?"), - ChatMessageContent(role="assistant", content="I'm good, thank you!"), - ] - - -@pytest.fixture -def chat_history_not_alternate_role() -> list[ChatMessageContent]: - return [ - ChatMessageContent(role="user", content="Hello, Bedrock!"), - ChatMessageContent(role="user", content="Hello, User!"), - ChatMessageContent(role="assistant", content="How are you?"), - ChatMessageContent(role="assistant", content="I'm good, thank you!"), - ] - - -async def test_receive_message(mock_channel, chat_history): - # Test to verify the receive_message functionality - await mock_channel.receive(chat_history) - assert len(mock_channel) == len(chat_history) - - -async def test_channel_receive_message_with_no_message(mock_channel): - # Test to verify receive_message when no message is received - await mock_channel.receive([]) - assert len(mock_channel) == 0 - - -async def test_chat_history_alternation(mock_channel, chat_history_not_alternate_role): - # Test to verify chat history alternates between user and assistant messages - await mock_channel.receive(chat_history_not_alternate_role) - assert all( - mock_channel.messages[i].role != mock_channel.messages[i + 1].role - for i in range(len(chat_history_not_alternate_role) - 1) - ) - assert mock_channel.messages[1].content == mock_channel.MESSAGE_PLACEHOLDER - assert mock_channel.messages[4].content == mock_channel.MESSAGE_PLACEHOLDER - - -async def test_channel_reset(mock_channel, chat_history): - # Test to verify the reset functionality - await mock_channel.receive(chat_history) - assert len(mock_channel) == len(chat_history) - await mock_channel.reset() - assert len(mock_channel) == 0 diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py deleted file mode 100644 index 08bf1b704cb6..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_event_type.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel.agents.bedrock.models.bedrock_agent_event_type import BedrockAgentEventType - - -def test_bedrock_agent_event_type_values(): - """Test case to verify the values of BedrockAgentEventType enum.""" - assert BedrockAgentEventType.CHUNK.value == "chunk" - assert BedrockAgentEventType.TRACE.value == "trace" - assert BedrockAgentEventType.RETURN_CONTROL.value == "returnControl" - assert BedrockAgentEventType.FILES.value == "files" - - -def test_bedrock_agent_event_type_enum(): - """Test case to verify the type of BedrockAgentEventType enum members.""" - assert isinstance(BedrockAgentEventType.CHUNK, BedrockAgentEventType) - assert isinstance(BedrockAgentEventType.TRACE, BedrockAgentEventType) - assert isinstance(BedrockAgentEventType.RETURN_CONTROL, BedrockAgentEventType) - assert isinstance(BedrockAgentEventType.FILES, BedrockAgentEventType) - - -def test_bedrock_agent_event_type_invalid(): - """Test case to verify error handling for invalid BedrockAgentEventType value.""" - with pytest.raises(ValueError): - BedrockAgentEventType("invalid_value") diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py deleted file mode 100644 index 42098654eaee..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_model.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.agents.bedrock.models.bedrock_agent_model import BedrockAgentModel - - -def test_bedrock_agent_model_valid(): - """Test case to verify the BedrockAgentModel with valid data.""" - model = BedrockAgentModel( - agentId="test_id", - agentName="test_name", - agentVersion="1.0", - foundationModel="test_model", - agentStatus="CREATING", - ) - assert model.agent_id == "test_id" - assert model.agent_name == "test_name" - assert model.agent_version == "1.0" - assert model.foundation_model == "test_model" - assert model.agent_status == "CREATING" - - -def test_bedrock_agent_model_missing_agent_id(): - """Test case to verify the BedrockAgentModel with missing agentId.""" - model = BedrockAgentModel( - agentName="test_name", - agentVersion="1.0", - foundationModel="test_model", - agentStatus="CREATING", - ) - assert model.agent_id is None - assert model.agent_name == "test_name" - assert model.agent_version == "1.0" - assert model.foundation_model == "test_model" - assert model.agent_status == "CREATING" - - -def test_bedrock_agent_model_missing_agent_name(): - """Test case to verify the BedrockAgentModel with missing agentName.""" - model = BedrockAgentModel( - agentId="test_id", - agentVersion="1.0", - foundationModel="test_model", - agentStatus="CREATING", - ) - assert model.agent_id == "test_id" - assert model.agent_name is None - assert model.agent_version == "1.0" - assert model.foundation_model == "test_model" - assert model.agent_status == "CREATING" - - -def test_bedrock_agent_model_extra_field(): - """Test case to verify the BedrockAgentModel with an extra field.""" - model = BedrockAgentModel( - agentId="test_id", - agentName="test_name", - agentVersion="1.0", - foundationModel="test_model", - agentStatus="CREATING", - extraField="extra_value", - ) - assert model.agent_id == "test_id" - assert model.agent_name == "test_name" - assert model.agent_version == "1.0" - assert model.foundation_model == "test_model" - assert model.agent_status == "CREATING" - assert model.extraField == "extra_value" diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py deleted file mode 100644 index c56e3fcb878f..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_settings.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest -from pydantic import ValidationError - -from semantic_kernel.agents.bedrock.bedrock_agent_settings import BedrockAgentSettings - - -def test_bedrock_agent_settings_from_env_vars(bedrock_agent_unit_test_env): - """Test loading BedrockAgentSettings from environment variables.""" - settings = BedrockAgentSettings.create(env_file_path="fake_path") - - assert settings.agent_resource_role_arn == bedrock_agent_unit_test_env["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"] - assert settings.foundation_model == bedrock_agent_unit_test_env["BEDROCK_AGENT_FOUNDATION_MODEL"] - - -@pytest.mark.parametrize( - "exclude_list", - [ - ["BEDROCK_AGENT_AGENT_RESOURCE_ROLE_ARN"], - ["BEDROCK_AGENT_FOUNDATION_MODEL"], - ], - indirect=True, -) -def test_bedrock_agent_settings_from_env_vars_missing_required(bedrock_agent_unit_test_env): - """Test loading BedrockAgentSettings from environment variables with missing required fields.""" - with pytest.raises(ValidationError): - BedrockAgentSettings.create(env_file_path="fake_path") diff --git a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py b/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py deleted file mode 100644 index c02b11178713..000000000000 --- a/python/tests/unit/agents/bedrock_agent/test_bedrock_agent_status.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel.agents.bedrock.models.bedrock_agent_status import BedrockAgentStatus - - -def test_bedrock_agent_status_values(): - """Test case to verify the values of BedrockAgentStatus enum.""" - assert BedrockAgentStatus.CREATING == "CREATING" - assert BedrockAgentStatus.PREPARING == "PREPARING" - assert BedrockAgentStatus.PREPARED == "PREPARED" - assert BedrockAgentStatus.NOT_PREPARED == "NOT_PREPARED" - assert BedrockAgentStatus.DELETING == "DELETING" - assert BedrockAgentStatus.FAILED == "FAILED" - assert BedrockAgentStatus.VERSIONING == "VERSIONING" - assert BedrockAgentStatus.UPDATING == "UPDATING" - - -def test_bedrock_agent_status_invalid_value(): - """Test case to verify error handling for invalid BedrockAgentStatus value.""" - with pytest.raises(ValueError): - BedrockAgentStatus("INVALID_STATUS") diff --git a/python/tests/unit/agents/chat_completion/conftest.py b/python/tests/unit/agents/chat_completion/conftest.py deleted file mode 100644 index 5e5784bc1b9c..000000000000 --- a/python/tests/unit/agents/chat_completion/conftest.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, create_autospec - -import pytest - -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.kernel import Kernel - - -@pytest.fixture -def kernel_with_ai_service(): - kernel = create_autospec(Kernel) - mock_ai_service_client = create_autospec(ChatCompletionClientBase) - mock_prompt_execution_settings = create_autospec(PromptExecutionSettings) - mock_prompt_execution_settings.function_choice_behavior = None - kernel.select_ai_service.return_value = (mock_ai_service_client, mock_prompt_execution_settings) - mock_ai_service_client.get_chat_message_contents = AsyncMock( - return_value=[ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message")] - ) - - return kernel, mock_ai_service_client diff --git a/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py b/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py deleted file mode 100644 index 2a0798f342ad..000000000000 --- a/python/tests/unit/agents/chat_completion/test_chat_completion_agent.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from collections.abc import AsyncGenerator, Callable -from unittest.mock import AsyncMock, create_autospec, patch - -import pytest -from pydantic import ValidationError - -from semantic_kernel.agents import ChatCompletionAgent -from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions import KernelServiceNotFoundError -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel - - -@pytest.fixture -def mock_streaming_chat_completion_response() -> Callable[..., AsyncGenerator[list[ChatMessageContent], None]]: - async def mock_response( - chat_history: ChatHistory, - settings: PromptExecutionSettings, - kernel: Kernel, - arguments: KernelArguments, - ) -> AsyncGenerator[list[ChatMessageContent], None]: - content1 = ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message 1") - content2 = ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2") - chat_history.messages.append(content1) - chat_history.messages.append(content2) - yield [content1] - yield [content2] - - return mock_response - - -async def test_initialization(): - agent = ChatCompletionAgent( - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -async def test_initialization_invalid_name_throws(): - with pytest.raises(ValidationError): - _ = ChatCompletionAgent( - name="Test Agent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - -def test_initialization_with_kernel(kernel: Kernel): - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert kernel == agent.kernel - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -def test_initialization_with_kernel_and_service(kernel: Kernel, azure_openai_unit_test_env, openai_unit_test_env): - kernel.add_service(AzureChatCompletion(service_id="test_azure")) - agent = ChatCompletionAgent( - service=OpenAIChatCompletion(), - kernel=kernel, - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - ) - - assert kernel == agent.kernel - assert len(kernel.services) == 2 - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - - -def test_initialization_with_plugins_via_constructor(custom_plugin_class): - agent = ChatCompletionAgent( - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - plugins=[custom_plugin_class()], - ) - - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - assert agent.kernel.plugins is not None - assert len(agent.kernel.plugins) == 1 - - -def test_initialization_with_service_via_constructor(openai_unit_test_env): - agent = ChatCompletionAgent( - name="TestAgent", - id="test_id", - description="Test Description", - instructions="Test Instructions", - service=OpenAIChatCompletion(), - ) - - assert agent.name == "TestAgent" - assert agent.id == "test_id" - assert agent.description == "Test Description" - assert agent.instructions == "Test Instructions" - assert agent.service is not None - assert agent.kernel.services["test_chat_model_id"] == agent.service - - -async def test_get_response(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): - kernel, _ = kernel_with_ai_service - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - instructions="Test Instructions", - ) - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - response = await agent.get_response(history) - - assert response.content == "Processed Message" - - -async def test_get_response_exception(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): - kernel, mock_ai_service_client = kernel_with_ai_service - mock_ai_service_client.get_chat_message_contents = AsyncMock(return_value=[]) - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - instructions="Test Instructions", - ) - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with pytest.raises(AgentInvokeException): - await agent.get_response(history) - - -async def test_invoke(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): - kernel, _ = kernel_with_ai_service - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - instructions="Test Instructions", - ) - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - messages = [message async for message in agent.invoke(history)] - - assert len(messages) == 1 - assert messages[0].content == "Processed Message" - - -async def test_invoke_tool_call_added(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): - kernel, mock_ai_service_client = kernel_with_ai_service - agent = ChatCompletionAgent( - kernel=kernel, - name="TestAgent", - ) - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - async def mock_get_chat_message_contents( - chat_history: ChatHistory, - settings: PromptExecutionSettings, - kernel: Kernel, - arguments: KernelArguments, - ): - new_messages = [ - ChatMessageContent(role=AuthorRole.ASSISTANT, content="Processed Message 1"), - ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2"), - ] - chat_history.messages.extend(new_messages) - return new_messages - - mock_ai_service_client.get_chat_message_contents = AsyncMock(side_effect=mock_get_chat_message_contents) - - messages = [message async for message in agent.invoke(history)] - - assert len(messages) == 2 - assert messages[0].content == "Processed Message 1" - assert messages[1].content == "Processed Message 2" - - assert len(history.messages) == 3 - assert history.messages[1].content == "Processed Message 1" - assert history.messages[2].content == "Processed Message 2" - assert history.messages[1].name == "TestAgent" - assert history.messages[2].name == "TestAgent" - - -async def test_invoke_no_service_throws(kernel: Kernel): - agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with pytest.raises(KernelServiceNotFoundError): - async for _ in agent.invoke(history): - pass - - -async def test_invoke_stream(kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase]): - kernel, _ = kernel_with_ai_service - agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with patch( - "semantic_kernel.connectors.ai.chat_completion_client_base.ChatCompletionClientBase.get_streaming_chat_message_contents", - return_value=AsyncMock(), - ) as mock: - mock.return_value.__aiter__.return_value = [ - [ChatMessageContent(role=AuthorRole.USER, content="Initial Message")] - ] - - async for message in agent.invoke_stream(history): - assert message.role == AuthorRole.USER - assert message.content == "Initial Message" - - -async def test_invoke_stream_tool_call_added( - kernel_with_ai_service: tuple[Kernel, ChatCompletionClientBase], - mock_streaming_chat_completion_response, -): - kernel, mock_ai_service_client = kernel_with_ai_service - agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - mock_ai_service_client.get_streaming_chat_message_contents = mock_streaming_chat_completion_response - - async for message in agent.invoke_stream(history): - print(f"Message role: {message.role}, content: {message.content}") - assert message.role in [AuthorRole.SYSTEM, AuthorRole.TOOL] - assert message.content in ["Processed Message 1", "Processed Message 2"] - - assert len(history.messages) == 3 - - -async def test_invoke_stream_no_service_throws(kernel: Kernel): - agent = ChatCompletionAgent(kernel=kernel, name="TestAgent") - - history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) - - with pytest.raises(KernelServiceNotFoundError): - async for _ in agent.invoke_stream(history): - pass - - -def test_get_channel_keys(): - agent = ChatCompletionAgent() - keys = agent.get_channel_keys() - - for key in keys: - assert isinstance(key, str) - - -async def test_create_channel(): - agent = ChatCompletionAgent() - channel = await agent.create_channel() - - assert isinstance(channel, ChatHistoryChannel) - - -async def test_prepare_agent_chat_history_with_formatted_instructions(): - agent = ChatCompletionAgent( - name="TestAgent", id="test_id", description="Test Description", instructions="Test Instructions" - ) - with patch.object( - ChatCompletionAgent, "format_instructions", new=AsyncMock(return_value="Formatted instructions for testing") - ) as mock_format_instructions: - dummy_kernel = create_autospec(Kernel) - dummy_args = KernelArguments(param="value") - user_message = ChatMessageContent(role=AuthorRole.USER, content="User message") - history = ChatHistory(messages=[user_message]) - result_history = await agent._prepare_agent_chat_history(history, dummy_kernel, dummy_args) - mock_format_instructions.assert_awaited_once_with(dummy_kernel, dummy_args) - assert len(result_history.messages) == 2 - system_message = result_history.messages[0] - assert system_message.role == AuthorRole.SYSTEM - assert system_message.content == "Formatted instructions for testing" - assert system_message.name == agent.name - assert result_history.messages[1] == user_message - - -async def test_prepare_agent_chat_history_without_formatted_instructions(): - agent = ChatCompletionAgent( - name="TestAgent", id="test_id", description="Test Description", instructions="Test Instructions" - ) - with patch.object( - ChatCompletionAgent, "format_instructions", new=AsyncMock(return_value=None) - ) as mock_format_instructions: - dummy_kernel = create_autospec(Kernel) - dummy_args = KernelArguments(param="value") - user_message = ChatMessageContent(role=AuthorRole.USER, content="User message") - history = ChatHistory(messages=[user_message]) - result_history = await agent._prepare_agent_chat_history(history, dummy_kernel, dummy_args) - mock_format_instructions.assert_awaited_once_with(dummy_kernel, dummy_args) - assert len(result_history.messages) == 1 - assert result_history.messages[0] == user_message diff --git a/python/tests/unit/agents/openai_assistant/conftest.py b/python/tests/unit/agents/openai_assistant/conftest.py deleted file mode 100644 index d21ae4e4e0fc..000000000000 --- a/python/tests/unit/agents/openai_assistant/conftest.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any -from unittest.mock import AsyncMock, MagicMock - -import pytest -from openai import AsyncOpenAI -from openai.types.beta.assistant import Assistant -from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation -from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation -from openai.types.beta.threads.image_file import ImageFile -from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock -from openai.types.beta.threads.text import Text -from openai.types.beta.threads.text_content_block import TextContentBlock - - -@pytest.fixture -def mock_thread(): - class MockThread: - id = "test_thread_id" - - return MockThread() - - -@pytest.fixture -def mock_thread_messages(): - class MockMessage: - def __init__(self, id, role, content, assistant_id=None): - self.id = id - self.role = role - self.content = content - self.assistant_id = assistant_id - - return [ - MockMessage( - id="test_message_id_1", - role="user", - content=[ - TextContentBlock( - type="text", - text=Text( - value="Hello", - annotations=[ - FilePathAnnotation( - type="file_path", - file_path=FilePath(file_id="test_file_id"), - end_index=5, - start_index=0, - text="Hello", - ), - FileCitationAnnotation( - type="file_citation", - file_citation=FileCitation(file_id="test_file_id"), - text="Hello", - start_index=0, - end_index=5, - ), - ], - ), - ) - ], - ), - MockMessage( - id="test_message_id_2", - role="assistant", - content=[ - ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) - ], - assistant_id="assistant_1", - ), - ] - - -@pytest.fixture -def openai_client(assistant_definition, mock_thread, mock_thread_messages) -> AsyncMock: - async def mock_list_messages(*args, **kwargs) -> Any: - return MagicMock(data=mock_thread_messages) - - async def mock_retrieve_assistant(*args, **kwargs) -> Any: - asst = AsyncMock(spec=Assistant) - asst.name = "test-assistant" - return asst - - client = AsyncMock(spec=AsyncOpenAI) - client.beta = MagicMock() - client.beta.assistants = MagicMock() - client.beta.assistants.create = AsyncMock(return_value=assistant_definition) - client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) - client.beta.threads = MagicMock() - client.beta.threads.create = AsyncMock(return_value=mock_thread) - client.beta.threads.messages = MagicMock() - client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) - - return client - - -@pytest.fixture -def assistant_definition() -> AsyncMock: - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - - return definition diff --git a/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py b/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py deleted file mode 100644 index dd75ab7ddb0f..000000000000 --- a/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py +++ /dev/null @@ -1,770 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from datetime import datetime, timedelta, timezone -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from openai import AsyncOpenAI -from openai.types.beta.assistant import Assistant -from openai.types.beta.assistant_stream_event import ( - ThreadMessageDelta, - ThreadRunRequiresAction, - ThreadRunStepCompleted, - ThreadRunStepDelta, -) -from openai.types.beta.code_interpreter_tool import CodeInterpreterTool -from openai.types.beta.file_search_tool import FileSearchTool -from openai.types.beta.function_tool import FunctionTool -from openai.types.beta.threads import ImageFileDelta, ImageFileDeltaBlock, MessageDelta, TextDelta, TextDeltaBlock -from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation -from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation -from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation -from openai.types.beta.threads.image_file import ImageFile -from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock -from openai.types.beta.threads.message import Message -from openai.types.beta.threads.message_delta_event import MessageDeltaEvent -from openai.types.beta.threads.required_action_function_tool_call import Function, RequiredActionFunctionToolCall -from openai.types.beta.threads.run import ( - RequiredAction, - RequiredActionSubmitToolOutputs, - Run, -) -from openai.types.beta.threads.run_create_params import TruncationStrategy -from openai.types.beta.threads.runs import ( - FunctionToolCallDelta, - RunStep, - RunStepDelta, - RunStepDeltaEvent, - ToolCallDeltaObject, - ToolCallsStepDetails, -) -from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreter, CodeInterpreterToolCall -from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreter as CodeInterpreterDelta -from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta -from openai.types.beta.threads.runs.function_tool_call import Function as RunsFunction -from openai.types.beta.threads.runs.function_tool_call import FunctionToolCall -from openai.types.beta.threads.runs.function_tool_call_delta import Function as FunctionForToolCallDelta -from openai.types.beta.threads.runs.message_creation_step_details import MessageCreation, MessageCreationStepDetails -from openai.types.beta.threads.runs.run_step import Usage -from openai.types.beta.threads.text import Text -from openai.types.beta.threads.text_content_block import TextContentBlock -from openai.types.shared.function_definition import FunctionDefinition - -from semantic_kernel.agents.open_ai.assistant_thread_actions import AssistantThreadActions -from semantic_kernel.agents.open_ai.function_action_result import FunctionActionResult -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel import Kernel -from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig - - -def mock_thread_run_step_completed(): - return ThreadRunStepCompleted( - data=RunStep( - id="step_id_2", - type="message_creation", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), - step_details=MessageCreationStepDetails( - type="message_creation", message_creation=MessageCreation(message_id="test") - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), - ), - event="thread.run.step.completed", - ) - - -def create_thread_message_delta_mock(): - return ThreadMessageDelta( - data=MessageDeltaEvent( - id="mock_msg_id", - delta=MessageDelta( - content=[ - TextDeltaBlock( - index=0, - type="text", - text=TextDelta( - annotations=[ - FileCitationDeltaAnnotation( - index=0, - type="file_citation", - start_index=1, - end_index=3, - text="annotation", - ) - ], - value="Hello", - ), - ), - ImageFileDeltaBlock( - index=0, - type="image_file", - image_file=ImageFileDelta( - file_id="test_file_id", - detail="auto", - ), - ), - ], - role=None, - ), - object="thread.message.delta", - ), - event="thread.message.delta", - ) - - -def create_thread_run_step_delta_mock(): - function = FunctionForToolCallDelta(name="math-Add", arguments="", output=None) - function_tool_call = FunctionToolCallDelta( - index=0, type="function", id="call_RcvYVzsppjjnUZcC47fAlwTW", function=function - ) - code = CodeInterpreterDelta(input="import os") - code_tool_call = CodeInterpreterToolCallDelta( - index=1, type="code_interpreter", id="call_RcvYVzsppjjnUZcC47fAlwTW", code_interpreter=code - ) - - step_details = ToolCallDeltaObject(type="tool_calls", tool_calls=[function_tool_call, code_tool_call]) - delta = RunStepDelta(step_details=step_details) - run_step_delta_event = RunStepDeltaEvent( - id="step_FXzQ44kRmoeHOPUstkEI1UL5", delta=delta, object="thread.run.step.delta" - ) - return ThreadRunStepDelta(data=run_step_delta_event, event="thread.run.step.delta") - - -class MockError: - def __init__(self, message: str): - self.message = message - - -class MockRunData: - def __init__(self, id, status): - self.id = id - self.status = status - - -class ErrorMockRunData(MockRunData): - def __init__(self, id, status, last_error=None): - super().__init__(id, status) - self.last_error = last_error - - -class MockEvent: - def __init__(self, event, data): - self.event = event - self.data = data - - -class MockAsyncIterable: - def __init__(self, items): - self.items = items.copy() - - def __aiter__(self): - self._iter = iter(self.items) - return self - - async def __anext__(self): - try: - return next(self._iter) - except StopIteration: - raise StopAsyncIteration - - -class MockStream: - def __init__(self, events): - self.events = events - - async def __aenter__(self): - return MockAsyncIterable(self.events) - - async def __aexit__(self, exc_type, exc_val, exc_tb): - pass - - -@pytest.fixture -def mock_run_step_tool_call(): - class MockToolCall: - def __init__(self): - self.type = "code_interpreter" - self.code_interpreter = MagicMock(input="print('Hello, world!')") - - return RunStep( - id="step_id_1", - type="tool_calls", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), - step_details=ToolCallsStepDetails( - tool_calls=[ - CodeInterpreterToolCall( # type: ignore - type="code_interpreter", - id="tool_call_id", - code_interpreter=CodeInterpreter(input="test code", outputs=[]), - ), - FunctionToolCall( - type="function", - id="tool_call_id", - function=RunsFunction(arguments="{}", name="function_name", output="test output"), - ), - ], - type="tool_calls", - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - ) - - -def mock_thread_requires_action_run(): - return ThreadRunRequiresAction( - data=Run( - id="run_00OwjJnEg2SGJy8sky7ip35P", - assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", - cancelled_at=None, - completed_at=None, - created_at=1727798684, - expires_at=1727799284, - failed_at=None, - incomplete_details=None, - instructions="Answer questions about the menu.", - last_error=None, - max_completion_tokens=None, - max_prompt_tokens=None, - metadata={}, - model="gpt-4o-2024-08-06", - object="thread.run", - parallel_tool_calls=True, - required_action=RequiredAction( - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="call_OTcZMjhm7WbhFnGkrmUjs68T", - function=Function(arguments="{}", name="menu-get_specials"), - type="function", - ) - ] - ), - type="submit_tool_outputs", - ), - response_format="auto", - started_at=1727798685, - status="requires_action", - thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", - tool_choice="auto", - tools=[ - FunctionTool( - function=FunctionDefinition( - name="menu-get_item_price", - description="Provides the price of the requested menu item.", - parameters={ - "type": "object", - "properties": { - "menu_item": {"type": "string", "description": "The name of the menu item."} - }, - "required": ["menu_item"], - }, - strict=False, - ), - type="function", - ), - FunctionTool( - function=FunctionDefinition( - name="menu-get_specials", - description="Provides a list of specials from the menu.", - parameters={"type": "object", "properties": {}, "required": []}, - strict=False, - ), - type="function", - ), - ], - truncation_strategy=TruncationStrategy(type="auto", last_messages=None), - usage=None, - temperature=1.0, - top_p=1.0, - tool_resources={"code_interpreter": {"file_ids": []}}, # type: ignore - ), - event="thread.run.requires_action", - ) - - -@pytest.fixture -def mock_thread_messages(): - class MockMessage: - def __init__(self, id, role, content, assistant_id=None): - self.id = id - self.role = role - self.content = content - self.assistant_id = assistant_id - - return [ - MockMessage( - id="test_message_id_1", - role="user", - content=[ - TextContentBlock( - type="text", - text=Text( - value="Hello", - annotations=[ - FilePathAnnotation( - type="file_path", - file_path=FilePath(file_id="test_file_id"), - end_index=5, - start_index=0, - text="Hello", - ), - FileCitationAnnotation( - type="file_citation", - file_citation=FileCitation(file_id="test_file_id"), - text="Hello", - start_index=0, - end_index=5, - ), - ], - ), - ) - ], - ), - MockMessage( - id="test_message_id_2", - role="assistant", - content=[ - ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) - ], - assistant_id="assistant_1", - ), - ] - - -@pytest.fixture -def mock_run_step_message_creation(): - class MockMessageCreation: - def __init__(self): - self.message_id = "message_id" - - class MockStepDetails: - def __init__(self): - self.message_creation = MockMessageCreation() - - return RunStep( - id="step_id_2", - type="message_creation", - completed_at=int(datetime.now(timezone.utc).timestamp()), - created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), - step_details=MessageCreationStepDetails( - type="message_creation", message_creation=MessageCreation(message_id="test") - ), - assistant_id="assistant_id", - object="thread.run.step", - run_id="run_id", - status="completed", - thread_id="thread_id", - ) - - -@pytest.fixture -def mock_run_in_progress(): - class MockRun: - def __init__(self): - self.id = "run_id" - self.status = "requires_action" - self.assistant_id = "assistant_id" - self.created_at = int(datetime.now(timezone.utc).timestamp()) - self.instructions = "instructions" - self.model = "model" - self.object = "run" - self.thread_id = "thread_id" - self.tools = [] - self.poll_count = 0 - self.required_action = RequiredAction( - type="submit_tool_outputs", - submit_tool_outputs=RequiredActionSubmitToolOutputs( - tool_calls=[ - RequiredActionFunctionToolCall( - id="tool_call_id", - type="function", - function=Function(arguments="{}", name="function_name"), - ) - ] - ), - ) - self.last_error = None - - def update_status(self): - self.poll_count += 1 - if self.poll_count > 2: - self.status = "completed" - - return MockRun() - - -class SamplePlugin: - @kernel_function - def test_plugin(self, *args, **kwargs): - pass - - -async def test_agent_thread_actions_create_message(): - client = AsyncMock(spec=AsyncOpenAI) - client.beta = MagicMock() - client.beta.assistants = MagicMock() - client.beta.threads.messages = MagicMock() - client.beta.threads.messages.create = AsyncMock(spec=Message) - - msg = ChatMessageContent(role=AuthorRole.USER, content="some content") - created_message = await AssistantThreadActions.create_message(client, "threadXYZ", msg) - assert created_message is not None - - -async def test_assistant_thread_actions_invoke( - mock_run_step_message_creation, mock_run_step_tool_call, mock_run_in_progress, mock_thread_messages -): - async def mock_poll_run_status(agent, run, thread_id): - run.update_status() - return run - - sample_prompt_template_config = PromptTemplateConfig( - template="template", - ) - - kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - definition.tools = [FileSearchTool(type="file_search"), CodeInterpreterTool(type="code_interpreter")] - definition.model = "gpt-4o" - definition.temperature = (1.0,) - definition.top_p = 1.0 - definition.metadata = {} - - client.beta = MagicMock() - client.beta.threads = MagicMock() - client.beta.threads.runs = MagicMock() - client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) - client.beta.threads.runs.submit_tool_outputs = AsyncMock() - client.beta.threads.runs.steps = MagicMock() - client.beta.threads.runs.steps.list = AsyncMock( - return_value=MagicMock(data=[mock_run_step_message_creation, mock_run_step_tool_call]) - ) - - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - arguments=KernelArguments(test="test"), - kernel=AsyncMock(spec=Kernel), - plugins=[SamplePlugin(), kernel_plugin], - polling_options=AsyncMock(spec=RunPollingOptions), - prompt_template_config=sample_prompt_template_config, - other_arg="test", - ) - - with ( - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions._poll_run_status", - new=AsyncMock(side_effect=mock_poll_run_status), - ), - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions._retrieve_message", - new=AsyncMock(side_effect=AsyncMock(return_value=mock_thread_messages[0])), - ), - ): - async for message in AssistantThreadActions.invoke( - agent=agent, - thread_id="thread123", - kernel=AsyncMock(spec=Kernel), - additional_messages=[ - ChatMessageContent( - role=AuthorRole.USER, - content="additional content", - items=[FileReferenceContent(file_id="file_id", tools=["file_search"])], - metadata={"sample_metadata_key": "sample_metadata_val"}, - ) - ], - ): - assert message is not None - - -async def test_assistant_thread_actions_stream( - mock_thread_messages, -): - events = [ - MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), - MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), - mock_thread_run_step_completed(), - MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), - MockEvent( - "thread.run.failed", ErrorMockRunData(id="run_1", status="failed", last_error=MockError("Test error")) - ), - ] - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - definition.tools = [] - definition.model = "gpt-4o" - definition.temperature = 0.7 - definition.top_p = 0.9 - definition.metadata = {} - definition.response_format = {"type": "json_object"} - - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - ) - - client.beta = MagicMock() - client.beta.threads = MagicMock() - client.beta.assistants = MagicMock() - client.beta.threads.runs = MagicMock() - client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - # Set up agent prompts - agent.instructions = "Base instructions" - agent.prompt_template = KernelPromptTemplate( - prompt_template_config=PromptTemplateConfig(template="Template instructions") - ) - - # Scenario A: Use only prompt template - messages = [] - async for content in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): - assert content is not None - - -async def test_assistant_thread_actions_stream_run_fails( - mock_thread_messages, -): - events = [ - MockEvent("thread.run.failed", ErrorMockRunData(id=1, status="failed", last_error=MockError("Test error"))), - ] - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - definition.tools = [] - definition.model = "gpt-4o" - definition.temperature = 0.7 - definition.top_p = 0.9 - definition.metadata = {} - definition.response_format = {"type": "json_object"} - - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - ) - - client.beta = MagicMock() - client.beta.threads = MagicMock() - client.beta.assistants = MagicMock() - client.beta.threads.runs = MagicMock() - client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - # Set up agent prompts - agent.instructions = "Base instructions" - agent.prompt_template = KernelPromptTemplate( - prompt_template_config=PromptTemplateConfig(template="Template instructions") - ) - - # Scenario A: Use only prompt template - messages = [] - with pytest.raises(AgentInvokeException): - async for _ in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): - pass - - -async def test_assistant_thread_actions_stream_with_instructions( - mock_thread_messages, -): - events = [ - MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), - MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), - create_thread_message_delta_mock(), - create_thread_run_step_delta_mock(), - mock_thread_requires_action_run(), - mock_thread_run_step_completed(), - MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), - ] - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - definition.tools = [] - definition.model = "gpt-4o" - definition.temperature = 0.7 - definition.top_p = 0.9 - definition.metadata = {} - definition.response_format = {"type": "json_object"} - - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - ) - - client.beta = MagicMock() - client.beta.threads = MagicMock() - client.beta.assistants = MagicMock() - client.beta.threads.runs = MagicMock() - client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) - client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) - - # Set up agent prompts - agent.instructions = "Base instructions" - agent.prompt_template = KernelPromptTemplate( - prompt_template_config=PromptTemplateConfig(template="Template instructions") - ) - - # Scenario A: Use only prompt template - messages = [] - async for content in AssistantThreadActions.invoke_stream(agent=agent, thread_id="thread_id", messages=messages): - assert content is not None - - assert len(messages) > 0, "Expected messages to be populated during the stream." - client.beta.threads.runs.stream.assert_called_once_with( - assistant_id=agent.id, - thread_id="thread_id", - instructions="Template instructions", - tools=[], - temperature=0.7, - top_p=0.9, - model="gpt-4o", - metadata={}, - ) - - client.beta.threads.runs.stream.reset_mock() - - # Scenario B: Use prompt template with additional instructions - messages = [] - async for content in AssistantThreadActions.invoke_stream( - agent=agent, - thread_id="thread_id", - messages=messages, - additional_instructions="My additional instructions", - ): - assert content is not None - - assert len(messages) > 0, "Expected messages to be populated during the stream." - client.beta.threads.runs.stream.assert_called_once_with( - assistant_id=agent.id, - thread_id="thread_id", - instructions="Template instructions\n\nMy additional instructions", - tools=[], - temperature=0.7, - top_p=0.9, - model="gpt-4o", - metadata={}, - ) - - client.beta.threads.runs.stream.reset_mock() - - -async def test_poll_loop_exits_on_status_change(): - AssistantThreadActions.polling_status = {"in_progress"} # type: ignore - - polling_interval = timedelta(seconds=0.01) - dummy_polling_options = MagicMock() - dummy_polling_options.get_polling_interval = lambda count: polling_interval - - run_id = "run_123" - initial_run = MagicMock() - initial_run.id = run_id - - run_in_progress = MagicMock() - run_in_progress.id = run_id - run_in_progress.status = "in_progress" - - run_completed = MagicMock() - run_completed.id = run_id - run_completed.status = "completed" - - dummy_agent = MagicMock() - dummy_agent.polling_options = dummy_polling_options - dummy_agent.client.beta.threads.runs.retrieve = AsyncMock(side_effect=[run_in_progress, run_completed]) - - thread_id = "thread_123" - - result_run = await AssistantThreadActions._poll_loop(dummy_agent, initial_run, thread_id) - - assert result_run.status == "completed" - - -async def test_handle_streaming_requires_action_returns_result(): - dummy_run = MagicMock() - dummy_run.id = "dummy_run_id" - dummy_function_steps = {"step1": MagicMock()} - dummy_fccs = {"fcc_key": "fcc_value"} - dummy_function_call_streaming_content = MagicMock() - dummy_function_result_streaming_content = MagicMock() - dummy_tool_outputs = {"output": "value"} - dummy_kernel = MagicMock() - dummy_agent_name = "TestAgent" - with ( - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", - return_value=dummy_fccs, - ), - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.generate_function_call_streaming_content", - return_value=dummy_function_call_streaming_content, - ), - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.merge_streaming_function_results", - return_value=[dummy_function_result_streaming_content], - ), - patch.object(AssistantThreadActions, "_invoke_function_calls", new=AsyncMock(return_value=None)), - patch.object(AssistantThreadActions, "_format_tool_outputs", return_value=dummy_tool_outputs), - ): - result = await AssistantThreadActions._handle_streaming_requires_action( - dummy_agent_name, - dummy_kernel, - dummy_run, - dummy_function_steps, # type: ignore - ) - assert result is not None - assert isinstance(result, FunctionActionResult) - assert result.function_call_streaming_content == dummy_function_call_streaming_content - assert result.function_result_streaming_content == dummy_function_result_streaming_content - assert result.tool_outputs == dummy_tool_outputs - - -async def test_handle_streaming_requires_action_returns_none(): - dummy_run = MagicMock() - dummy_run.id = "dummy_run_id" - dummy_function_steps = {"step1": MagicMock()} - dummy_kernel = MagicMock() - dummy_agent_name = "TestAgent" - with patch("semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", return_value=None): - result = await AssistantThreadActions._handle_streaming_requires_action( - dummy_agent_name, - dummy_kernel, - dummy_run, - dummy_function_steps, # type: ignore - ) - assert result is None diff --git a/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py b/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py deleted file mode 100644 index 123a19bd34cc..000000000000 --- a/python/tests/unit/agents/openai_assistant/test_azure_assistant_agent.py +++ /dev/null @@ -1,387 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from openai import AsyncOpenAI -from openai.types.beta.assistant import Assistant -from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation -from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation -from openai.types.beta.threads.image_file import ImageFile -from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock -from openai.types.beta.threads.text import Text -from openai.types.beta.threads.text_content_block import TextContentBlock -from pydantic import BaseModel, ValidationError - -from semantic_kernel.agents.open_ai import AzureAssistantAgent -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel import Kernel -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig - - -class SamplePlugin: - @kernel_function - def test_plugin(self, *args, **kwargs): - pass - - -class ResponseModelPydantic(BaseModel): - response: str - items: list[str] - - -class ResponseModelNonPydantic: - response: str - items: list[str] - - -@pytest.fixture -def mock_thread_messages(): - class MockMessage: - def __init__(self, id, role, content, assistant_id=None): - self.id = id - self.role = role - self.content = content - self.assistant_id = assistant_id - - return [ - MockMessage( - id="test_message_id_1", - role="user", - content=[ - TextContentBlock( - type="text", - text=Text( - value="Hello", - annotations=[ - FilePathAnnotation( - type="file_path", - file_path=FilePath(file_id="test_file_id"), - end_index=5, - start_index=0, - text="Hello", - ), - FileCitationAnnotation( - type="file_citation", - file_citation=FileCitation(file_id="test_file_id"), - text="Hello", - start_index=0, - end_index=5, - ), - ], - ), - ) - ], - ), - MockMessage( - id="test_message_id_2", - role="assistant", - content=[ - ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) - ], - assistant_id="assistant_1", - ), - ] - - -async def test_open_ai_assistant_agent_init(): - sample_prompt_template_config = PromptTemplateConfig( - template="template", - ) - - kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent( - client=client, - definition=definition, - arguments=KernelArguments(test="test"), - kernel=AsyncMock(spec=Kernel), - plugins=[SamplePlugin(), kernel_plugin], - polling_options=AsyncMock(spec=RunPollingOptions), - prompt_template_config=sample_prompt_template_config, # type: ignore - other_arg="test", # type: ignore - ) - assert agent.id == "agent123" - assert agent.name == "agentName" - assert agent.description == "desc" - - -def test_azure_open_ai_settings_create_throws(azure_openai_unit_test_env): - with patch( - "semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings.AzureOpenAISettings.create" - ) as mock_create: - mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") - - with pytest.raises(AgentInitializationException, match="Failed to create Azure OpenAI settings."): - _, _ = AzureAssistantAgent.setup_resources(api_key="test_api_key") - - -def test_open_ai_assistant_with_code_interpreter_tool(): - tools, resources = AzureAssistantAgent.configure_code_interpreter_tool(file_ids=["file_id"]) - assert tools is not None - assert resources is not None - - -def test_open_ai_assistant_with_file_search_tool(): - tools, resources = AzureAssistantAgent.configure_file_search_tool(vector_store_ids=["vector_store_id"]) - assert tools is not None - assert resources is not None - - -@pytest.mark.parametrize( - "model, json_schema_expected", - [ - pytest.param(ResponseModelPydantic, True), - pytest.param(ResponseModelNonPydantic, True), - pytest.param({"type": "json_object"}, False), - pytest.param({"type": "json_schema", "json_schema": {"schema": {}}}, False), - ], -) -def test_configure_response_format(model, json_schema_expected): - response_format = AzureAssistantAgent.configure_response_format(model) - assert response_format is not None - if json_schema_expected: - assert response_format["json_schema"] is not None # type: ignore - - -def test_configure_response_format_unexpected_type(): - with pytest.raises(AgentInitializationException) as exc_info: - AzureAssistantAgent.configure_response_format({"type": "invalid_type"}) - assert "Encountered unexpected response_format type" in str(exc_info.value) - - -def test_configure_response_format_json_schema_invalid_schema(): - with pytest.raises(AgentInitializationException) as exc_info: - AzureAssistantAgent.configure_response_format({"type": "json_schema", "json_schema": "not_a_dict"}) - assert "If response_format has type 'json_schema'" in str(exc_info.value) - - -def test_configure_response_format_invalid_input_type(): - with pytest.raises(AgentInitializationException) as exc_info: - AzureAssistantAgent.configure_response_format(3) # type: ignore - assert "response_format must be a dictionary" in str(exc_info.value) - - -@pytest.mark.parametrize( - "message", - [ - pytest.param(ChatMessageContent(role=AuthorRole.USER, content="text")), - pytest.param("text"), - ], -) -async def test_open_ai_assistant_agent_add_chat_message(message): - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent(client=client, definition=definition) - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.create_message", - ): - await agent.add_chat_message("threadId", message) - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_invoke(arguments, include_args): - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - definition.tools = [] - definition.model = "gpt-4o" - definition.response_format = {"type": "json_object"} - definition.temperature = 0.1 - definition.top_p = 0.9 - definition.metadata = {} - agent = AzureAssistantAgent(client=client, definition=definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", - side_effect=fake_invoke, - ): - async for item in agent.invoke("thread_id", **(kwargs or {})): - results.append(item) - - assert len(results) == 1 - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_invoke_stream(arguments, include_args): - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent(client=client, definition=definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", - side_effect=fake_invoke, - ): - async for item in agent.invoke_stream("thread_id", **(kwargs or {})): - results.append(item) - - assert len(results) == 1 - - -def test_open_ai_assistant_agent_get_channel_keys(): - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent(client=client, definition=definition) - keys = list(agent.get_channel_keys()) - assert len(keys) >= 3 - - -@pytest.fixture -def mock_thread(): - class MockThread: - id = "test_thread_id" - - return MockThread() - - -async def test_open_ai_assistant_agent_create_channel(mock_thread): - from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent(client=client, definition=definition) - client.beta = MagicMock() - client.beta.assistants = MagicMock() - client.beta.assistants.create = AsyncMock(return_value=definition) - client.beta.threads = MagicMock() - client.beta.threads.create = AsyncMock(return_value=mock_thread) - ch = await agent.create_channel() - assert isinstance(ch, OpenAIAssistantChannel) - assert ch.thread_id == "test_thread_id" - - -def test_create_openai_client(azure_openai_unit_test_env): - client, model = AzureAssistantAgent.setup_resources(api_key="test_api_key", default_headers={"user_agent": "test"}) - assert client is not None - assert client.api_key == "test_api_key" - assert model is not None - - -def test_create_azure_openai_client(azure_openai_unit_test_env): - client, model = AzureAssistantAgent.setup_resources( - api_key="test_api_key", endpoint="https://test_endpoint.com", default_headers={"user_agent": "test"} - ) - assert model is not None - assert client is not None - assert client.api_key == "test_api_key" - assert str(client.base_url) == "https://test_endpoint.com/openai/" - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_ENDPOINT"]], indirect=True) -async def test_retrieve_agent_missing_endpoint_throws(kernel, azure_openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="Please provide an Azure OpenAI endpoint"): - _, _ = AzureAssistantAgent.setup_resources( - env_file_path="./", api_key="test_api_key", default_headers={"user_agent": "test"} - ) - - -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) -async def test_retrieve_agent_missing_chat_deployment_name_throws(kernel, azure_openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="Please provide an Azure OpenAI deployment name"): - _, _ = AzureAssistantAgent.setup_resources( - env_file_path="./", - api_key="test_api_key", - endpoint="https://test_endpoint.com", - default_headers={"user_agent": "test"}, - ) - - -async def test_get_thread_messages(mock_thread_messages, openai_unit_test_env): - async def mock_list_messages(*args, **kwargs) -> Any: - return MagicMock(data=mock_thread_messages) - - async def mock_retrieve_assistant(*args, **kwargs) -> Any: - asst = AsyncMock(spec=Assistant) - asst.name = "test-assistant" - return asst - - mock_client = AsyncMock(spec=AsyncOpenAI) - mock_client.beta = MagicMock() - mock_client.beta.threads = MagicMock() - mock_client.beta.threads.messages = MagicMock() - mock_client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) - mock_client.beta.assistants = MagicMock() - mock_client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) - - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = AzureAssistantAgent(client=mock_client, definition=definition) - - messages = [message async for message in agent.get_thread_messages("test_thread_id")] - - assert len(messages) == 2 - assert len(messages[0].items) == 3 - assert isinstance(messages[0].items[0], TextContent) - assert isinstance(messages[0].items[1], AnnotationContent) - assert isinstance(messages[0].items[2], AnnotationContent) - assert messages[0].items[0].text == "Hello" - - assert len(messages[1].items) == 1 - assert isinstance(messages[1].items[0], FileReferenceContent) - assert str(messages[1].items[0].file_id) == "test_file_id" diff --git a/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py b/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py deleted file mode 100644 index 45b46d02aff4..000000000000 --- a/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_agent.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, patch - -import pytest -from pydantic import BaseModel, ValidationError - -from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.contents.annotation_content import AnnotationContent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException, AgentInvokeException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.functions.kernel_function_decorator import kernel_function -from semantic_kernel.functions.kernel_plugin import KernelPlugin -from semantic_kernel.kernel import Kernel -from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig - - -class SamplePlugin: - @kernel_function - def test_plugin(self, *args, **kwargs): - pass - - -class ResponseModelPydantic(BaseModel): - response: str - items: list[str] - - -class ResponseModelNonPydantic: - response: str - items: list[str] - - -async def test_open_ai_assistant_agent_init(openai_client, assistant_definition): - sample_prompt_template_config = PromptTemplateConfig( - template="template", - ) - - kernel_plugin = KernelPlugin(name="expected_plugin_name", description="expected_plugin_description") - - agent = OpenAIAssistantAgent( - client=openai_client, - definition=assistant_definition, - arguments=KernelArguments(test="test"), - kernel=AsyncMock(spec=Kernel), - plugins=[SamplePlugin(), kernel_plugin], - polling_options=AsyncMock(spec=RunPollingOptions), - prompt_template_config=sample_prompt_template_config, - other_arg="test", - ) - assert agent.id == "agent123" - assert agent.name == "agentName" - assert agent.description == "desc" - - -def test_open_ai_settings_create_throws(openai_unit_test_env): - with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: - mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") - - with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): - _, _ = OpenAIAssistantAgent.setup_resources(api_key="test_api_key") - - -def test_open_ai_assistant_with_code_interpreter_tool(): - tools, resources = OpenAIAssistantAgent.configure_code_interpreter_tool(file_ids=["file_id"]) - assert tools is not None - assert resources is not None - - -def test_open_ai_assistant_with_file_search_tool(): - tools, resources = OpenAIAssistantAgent.configure_file_search_tool(vector_store_ids=["vector_store_id"]) - assert tools is not None - assert resources is not None - - -@pytest.mark.parametrize( - "model, json_schema_expected", - [ - pytest.param(ResponseModelPydantic, True), - pytest.param(ResponseModelNonPydantic, True), - pytest.param({"type": "json_object"}, False), - pytest.param({"type": "json_schema", "json_schema": {"schema": {}}}, False), - ], -) -def test_configure_response_format(model, json_schema_expected): - response_format = OpenAIAssistantAgent.configure_response_format(model) - assert response_format is not None - if json_schema_expected: - assert response_format["json_schema"] is not None # type: ignore - - -def test_configure_response_format_unexpected_type(): - with pytest.raises(AgentInitializationException) as exc_info: - OpenAIAssistantAgent.configure_response_format({"type": "invalid_type"}) - assert "Encountered unexpected response_format type" in str(exc_info.value) - - -def test_configure_response_format_json_schema_invalid_schema(): - with pytest.raises(AgentInitializationException) as exc_info: - OpenAIAssistantAgent.configure_response_format({"type": "json_schema", "json_schema": "not_a_dict"}) - assert "If response_format has type 'json_schema'" in str(exc_info.value) - - -def test_configure_response_format_invalid_input_type(): - with pytest.raises(AgentInitializationException) as exc_info: - OpenAIAssistantAgent.configure_response_format(3) # type: ignore - assert "response_format must be a dictionary" in str(exc_info.value) - - -@pytest.mark.parametrize( - "message", - [ - pytest.param(ChatMessageContent(role=AuthorRole.USER, content="text")), - pytest.param("text"), - ], -) -async def test_open_ai_assistant_agent_add_chat_message(message, openai_client, assistant_definition): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.create_message", - ): - await agent.add_chat_message("threadId", message) - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_get_response(arguments, include_args, openai_client, assistant_definition): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", - side_effect=fake_invoke, - ): - response = await agent.get_response("thread_id", **(kwargs or {})) - - assert response is not None - assert response.content == "content" - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_get_response_exception( - arguments, include_args, openai_client, assistant_definition -): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - - async def fake_invoke(*args, **kwargs): - yield False, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with ( - patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", - side_effect=fake_invoke, - ), - pytest.raises(AgentInvokeException), - ): - await agent.get_response("thread_id", **(kwargs or {})) - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_invoke(arguments, include_args, openai_client, assistant_definition): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", - side_effect=fake_invoke, - ): - async for item in agent.invoke("thread_id", **(kwargs or {})): - results.append(item) - - assert len(results) == 1 - - -@pytest.mark.parametrize( - "arguments, include_args", - [ - pytest.param({"extra_args": "extra_args"}, True), - pytest.param(None, False), - ], -) -async def test_open_ai_assistant_agent_invoke_stream(arguments, include_args, openai_client, assistant_definition): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - results = [] - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - - kwargs = None - if include_args: - kwargs = arguments - - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", - side_effect=fake_invoke, - ): - async for item in agent.invoke_stream("thread_id", **(kwargs or {})): - results.append(item) - - assert len(results) == 1 - - -def test_open_ai_assistant_agent_get_channel_keys(openai_client, assistant_definition): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - keys = list(agent.get_channel_keys()) - assert len(keys) >= 3 - - -async def test_open_ai_assistant_agent_create_channel(openai_client, assistant_definition): - from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - ch = await agent.create_channel() - assert isinstance(ch, OpenAIAssistantChannel) - assert ch.thread_id == "test_thread_id" - - -def test_create_openai_client(openai_unit_test_env): - client, model = OpenAIAssistantAgent.setup_resources(env_file_path="./", default_headers={"user_agent": "test"}) - assert client is not None - assert client.api_key == "test_api_key" - assert model is not None - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) -async def test_open_ai_agent_missing_api_key_throws(kernel, openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI API key is required."): - _, _ = OpenAIAssistantAgent.setup_resources(env_file_path="./", default_headers={"user_agent": "test"}) - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) -async def test_open_ai_agent_missing_chat_deployment_name_throws(kernel, openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI model ID is required."): - _, _ = OpenAIAssistantAgent.setup_resources( - env_file_path="./", - api_key="test_api_key", - default_headers={"user_agent": "test"}, - ) - - -async def test_get_thread_messages(mock_thread_messages, openai_client, assistant_definition, openai_unit_test_env): - agent = OpenAIAssistantAgent(client=openai_client, definition=assistant_definition) - - messages = [message async for message in agent.get_thread_messages("test_thread_id")] - - assert len(messages) == 2 - assert len(messages[0].items) == 3 - assert isinstance(messages[0].items[0], TextContent) - assert isinstance(messages[0].items[1], AnnotationContent) - assert isinstance(messages[0].items[2], AnnotationContent) - assert messages[0].items[0].text == "Hello" - - assert len(messages[1].items) == 1 - assert isinstance(messages[1].items[0], FileReferenceContent) - assert str(messages[1].items[0].file_id) == "test_file_id" diff --git a/python/tests/unit/agents/test_agent.py b/python/tests/unit/agents/test_agent.py index ecbfc63e9277..d01a6a9ba0e8 100644 --- a/python/tests/unit/agents/test_agent.py +++ b/python/tests/unit/agents/test_agent.py @@ -1,38 +1,15 @@ # Copyright (c) Microsoft. All rights reserved. -import sys import uuid -from typing import ClassVar from unittest.mock import AsyncMock -import pytest - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - from semantic_kernel.agents import Agent from semantic_kernel.agents.channels.agent_channel import AgentChannel -from semantic_kernel.functions.kernel_arguments import KernelArguments - - -class MockChatHistory: - """Minimal mock for ChatHistory to hold messages.""" - - def __init__(self, messages=None): - self.messages = messages if messages is not None else [] - - -class MockChannel(AgentChannel): - """Mock channel for testing get_channel_keys and create_channel.""" class MockAgent(Agent): """A mock agent for testing purposes.""" - channel_type: ClassVar[type[AgentChannel]] = MockChannel - def __init__(self, name: str = "Test-Agent", description: str = "A test agent", id: str = None): args = { "name": name, @@ -42,25 +19,12 @@ def __init__(self, name: str = "Test-Agent", description: str = "A test agent", args["id"] = id super().__init__(**args) + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + async def create_channel(self) -> AgentChannel: return AsyncMock(spec=AgentChannel) - @override - async def get_response(self, *args, **kwargs): - raise NotImplementedError - - @override - async def invoke(self, *args, **kwargs): - raise NotImplementedError - - @override - async def invoke_stream(self, *args, **kwargs): - raise NotImplementedError - - -class MockAgentWithoutChannelType(MockAgent): - channel_type = None - async def test_agent_initialization(): name = "TestAgent" @@ -85,7 +49,7 @@ def test_get_channel_keys(): agent = MockAgent() keys = agent.get_channel_keys() - assert len(list(keys)) == 1, "Should return a single key" + assert keys == ["key1", "key2"] async def test_create_channel(): @@ -127,47 +91,3 @@ async def test_agent_hash(): agent3 = MockAgent(name="TestAgent", description="A different description", id=id_value) assert hash(agent1) != hash(agent3) - - -def test_get_channel_keys_no_channel_type(): - agent = MockAgentWithoutChannelType() - with pytest.raises(NotImplementedError): - list(agent.get_channel_keys()) - - -def test_merge_arguments_both_none(): - agent = MockAgent() - merged = agent._merge_arguments(None) - assert isinstance(merged, KernelArguments) - assert len(merged) == 0, "If both arguments are None, should return an empty KernelArguments object" - - -def test_merge_arguments_agent_none_override_not_none(): - agent = MockAgent() - override = KernelArguments(settings={"key": "override"}, param1="val1") - - merged = agent._merge_arguments(override) - assert merged is override, "If agent.arguments is None, just return override_args" - - -def test_merge_arguments_override_none_agent_not_none(): - agent = MockAgent() - agent.arguments = KernelArguments(settings={"key": "base"}, param1="baseVal") - - merged = agent._merge_arguments(None) - assert merged is agent.arguments, "If override_args is None, should return the agent's arguments" - - -def test_merge_arguments_both_not_none(): - agent = MockAgent() - agent.arguments = KernelArguments(settings={"key1": "val1", "common": "base"}, param1="baseVal") - override = KernelArguments(settings={"key2": "override_val", "common": "override"}, param2="override_param") - - merged = agent._merge_arguments(override) - - assert merged.execution_settings["key1"] == "val1", "Should retain original setting from agent" - assert merged.execution_settings["key2"] == "override_val", "Should include new setting from override" - assert merged.execution_settings["common"] == "override", "Override should take precedence" - - assert merged["param1"] == "baseVal", "Should retain base param from agent" - assert merged["param2"] == "override_param", "Should include param from override" diff --git a/python/tests/unit/agents/test_group_chat/test_agent_channel.py b/python/tests/unit/agents/test_agent_channel.py similarity index 100% rename from python/tests/unit/agents/test_group_chat/test_agent_channel.py rename to python/tests/unit/agents/test_agent_channel.py diff --git a/python/tests/unit/agents/test_group_chat/test_agent_chat.py b/python/tests/unit/agents/test_agent_chat.py similarity index 100% rename from python/tests/unit/agents/test_group_chat/test_agent_chat.py rename to python/tests/unit/agents/test_agent_chat.py diff --git a/python/tests/unit/agents/test_group_chat/test_agent_chat_utils.py b/python/tests/unit/agents/test_agent_chat_utils.py similarity index 100% rename from python/tests/unit/agents/test_group_chat/test_agent_chat_utils.py rename to python/tests/unit/agents/test_agent_chat_utils.py diff --git a/python/tests/unit/agents/test_group_chat/test_agent_group_chat.py b/python/tests/unit/agents/test_agent_group_chat.py similarity index 100% rename from python/tests/unit/agents/test_group_chat/test_agent_group_chat.py rename to python/tests/unit/agents/test_agent_group_chat.py diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py b/python/tests/unit/agents/test_aggregator_termination_strategy.py similarity index 84% rename from python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py rename to python/tests/unit/agents/test_aggregator_termination_strategy.py index bb659c417ebd..103f734fa0ed 100644 --- a/python/tests/unit/agents/test_group_chat_strategies/test_aggregator_termination_strategy.py +++ b/python/tests/unit/agents/test_aggregator_termination_strategy.py @@ -2,13 +2,33 @@ from unittest.mock import AsyncMock, MagicMock +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.termination.aggregator_termination_strategy import ( AggregateTerminationCondition, AggregatorTerminationStrategy, ) from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent -from tests.unit.agents.test_agent import MockAgent + + +class MockAgent(Agent): + """A mock agent for testing purposes.""" + + def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): + args = { + "name": name, + "description": description, + } + if id is not None: + args["id"] = id + super().__init__(**args) + + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + + async def create_channel(self) -> AgentChannel: + return AsyncMock(spec=AgentChannel) async def test_aggregate_termination_condition_all_true(): diff --git a/python/tests/unit/agents/test_azure_assistant_agent.py b/python/tests/unit/agents/test_azure_assistant_agent.py new file mode 100644 index 000000000000..3748065ac761 --- /dev/null +++ b/python/tests/unit/agents/test_azure_assistant_agent.py @@ -0,0 +1,570 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, MagicMock, mock_open, patch + +import pytest +from openai import AsyncAzureOpenAI +from openai.resources.beta.assistants import Assistant +from openai.types.beta.assistant import ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch +from pydantic import ValidationError + +from semantic_kernel.agents.open_ai import AzureAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.kernel import Kernel + + +@pytest.fixture +def azure_openai_assistant_agent(kernel: Kernel, azure_openai_unit_test_env): + return AzureAssistantAgent( + kernel=kernel, + service_id="test_service", + name="test_name", + instructions="test_instructions", + api_key="test_api_key", + endpoint="https://test.endpoint", + ai_model_id="test_model", + api_version="2024-05-01-preview", + default_headers={"User-Agent": "test-agent"}, + ) + + +@pytest.fixture +def mock_assistant(): + return Assistant( + created_at=123456789, + object="assistant", + metadata={ + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + model="test_model", + description="test_description", + id="test_id", + instructions="test_instructions", + name="test_name", + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + temperature=0.7, + top_p=0.9, + response_format={"type": "json_object"}, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + ) + + +def test_initialization(azure_openai_assistant_agent: AzureAssistantAgent, azure_openai_unit_test_env): + agent = azure_openai_assistant_agent + assert agent is not None + + +def test_create_client(azure_openai_assistant_agent, azure_openai_unit_test_env): + assert isinstance(azure_openai_assistant_agent.client, AsyncAzureOpenAI) + + +def test_create_client_from_configuration(azure_openai_assistant_agent, azure_openai_unit_test_env): + assert isinstance(azure_openai_assistant_agent.client, AsyncAzureOpenAI) + assert azure_openai_assistant_agent.client.api_key == "test_api_key" + + +def test_create_client_from_configuration_missing_api_key(): + with pytest.raises( + AgentInitializationException, + match="Please provide either AzureOpenAI api_key, an ad_token, ad_token_provider, or a client.", + ): + AzureAssistantAgent._create_client(None) + + +def test_create_client_from_configuration_missing_endpoint(): + with pytest.raises( + AgentInitializationException, + match="Please provide an AzureOpenAI endpoint.", + ): + AzureAssistantAgent._create_client(api_key="test") + + +async def test_create_agent(kernel: Kernel, azure_openai_unit_test_env): + with patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant: + mock_create_assistant.return_value = MagicMock(spec=Assistant) + agent = await AzureAssistantAgent.create( + kernel=kernel, service_id="test_service", name="test_name", api_key="test_api_key", api_version="2024-05-01" + ) + assert agent.assistant is not None + mock_create_assistant.assert_called_once() + await agent.client.close() + + +async def test_create_agent_with_files(kernel: Kernel, azure_openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + return_value="test_file_id", + ), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.create_vector_store", + return_value="vector_store_id", + ), + patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id="test_service", + name="test_name", + api_key="test_api_key", + api_version="2024-05-01", + code_interpreter_filenames=["file1", "file2"], + vector_store_filenames=["file3", "file4"], + enable_code_interpreter=True, + enable_file_search=True, + ) + assert agent.assistant is not None + mock_create_assistant.assert_called_once() + + +async def test_create_agent_with_code_files_not_found_raises_exception(kernel: Kernel, azure_openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + side_effect=FileNotFoundError("File not found"), + ), + patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + with pytest.raises(AgentInitializationException, match="Failed to upload code interpreter files."): + _ = await AzureAssistantAgent.create( + kernel=kernel, + service_id="test_service", + deployment_name="test_deployment_name", + name="test_name", + api_key="test_api_key", + api_version="2024-05-01", + code_interpreter_filenames=["file1", "file2"], + ) + + +async def test_create_agent_with_search_files_not_found_raises_exception(kernel: Kernel, azure_openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + side_effect=FileNotFoundError("File not found"), + ), + patch.object(AzureAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + with pytest.raises(AgentInitializationException, match="Failed to upload vector store files."): + _ = await AzureAssistantAgent.create( + kernel=kernel, + service_id="test_service", + deployment_name="test_deployment_name", + name="test_name", + api_key="test_api_key", + api_version="2024-05-01", + vector_store_filenames=["file3", "file4"], + ) + + +async def test_list_definitions(kernel: Kernel, mock_assistant, azure_openai_unit_test_env): + agent = AzureAssistantAgent( + kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" + ) + + with patch.object( + AzureAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncAzureOpenAI) + ) as mock_create_client: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[mock_assistant])) + + agent.client = mock_client_instance + + definitions = [] + async for definition in agent.list_definitions(): + definitions.append(definition) + + mock_client_instance.beta.assistants.list.assert_called() + + assert len(definitions) == 1 + assert definitions[0] == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + + +async def test_retrieve_agent(kernel, azure_openai_unit_test_env): + with patch.object( + AzureAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncAzureOpenAI) + ) as mock_create_client: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) + + OpenAIAssistantBase._create_open_ai_assistant_definition = MagicMock( + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + ) + + retrieved_agent = await AzureAssistantAgent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) + assert retrieved_agent.model_dump( + include={ + "ai_model_id", + "description", + "id", + "instructions", + "name", + "enable_code_interpreter", + "enable_file_search", + "enable_json_response", + "code_interpreter_file_ids", + "temperature", + "top_p", + "vector_store_id", + "metadata", + "max_completion_tokens", + "max_prompt_tokens", + "parallel_tool_calls_enabled", + "truncation_message_count", + } + ) == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") + OpenAIAssistantBase._create_open_ai_assistant_definition.assert_called_once() + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) +async def test_retrieve_agent_missing_chat_deployment_name_throws(kernel, azure_openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The Azure OpenAI chat_deployment_name is required."): + _ = await AzureAssistantAgent.retrieve( + id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" + ) + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) +async def test_retrieve_agent_missing_api_key_throws(kernel, azure_openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." + ): + _ = await AzureAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") + + +def test_open_ai_settings_create_throws(azure_openai_unit_test_env): + with patch( + "semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings.AzureOpenAISettings.create" + ) as mock_create: + mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") + + with pytest.raises(AgentInitializationException, match="Failed to create Azure OpenAI settings."): + AzureAssistantAgent(service_id="test", api_key="test_api_key", deployment_name="test_deployment_name") + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]], indirect=True) +def test_azure_openai_agent_create_missing_deployment_name(azure_openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The Azure OpenAI chat_deployment_name is required."): + AzureAssistantAgent( + service_id="test_service", api_key="test_key", endpoint="https://example.com", env_file_path="test.env" + ) + + +@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) +def test_azure_openai_agent_create_missing_api_key(azure_openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." + ): + AzureAssistantAgent(service_id="test_service", endpoint="https://example.com", env_file_path="test.env") + + +async def test_setup_client_and_token_with_existing_client(): + """Test that if a client is already provided, _setup_client_and_token + simply returns that client (and doesn't create a new one). + """ + mock_settings = MagicMock() + mock_settings.chat_deployment_name = "test_deployment_name" + mock_settings.api_key = None + mock_settings.token_endpoint = None + mock_client = MagicMock(spec=AsyncAzureOpenAI) + + returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( + azure_openai_settings=mock_settings, + ad_token=None, + ad_token_provider=None, + client=mock_client, + default_headers=None, + ) + + assert returned_client == mock_client + assert returned_token is None + + +async def test_setup_client_and_token_with_api_key_creates_client(): + """Test that providing an API key (and no client) results + in creating a new client via _create_client. + """ + mock_settings = MagicMock() + mock_settings.chat_deployment_name = "test_deployment_name" + mock_settings.api_key.get_secret_value.return_value = "test_api_key" + mock_settings.endpoint = "https://test.endpoint" + mock_settings.api_version = "2024-05-01" + mock_settings.token_endpoint = None + + with patch.object(AzureAssistantAgent, "_create_client", return_value="mock_client") as mock_create_client: + returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( + azure_openai_settings=mock_settings, + ad_token=None, + ad_token_provider=None, + client=None, + default_headers=None, + ) + + mock_create_client.assert_called_once_with( + api_key="test_api_key", + endpoint="https://test.endpoint", + api_version="2024-05-01", + ad_token=None, + ad_token_provider=None, + default_headers=None, + ) + assert returned_client == "mock_client" + assert returned_token is None + + +async def test_setup_client_and_token_fetches_ad_token_when_token_endpoint_present(): + """Test that if no credentials are provided except a token endpoint, + _setup_client_and_token fetches an AD token. + """ + mock_settings = MagicMock() + mock_settings.chat_deployment_name = "test_deployment_name" + mock_settings.api_key = None + mock_settings.endpoint = "https://test.endpoint" + mock_settings.api_version = "2024-05-01" + mock_settings.token_endpoint = "https://login.microsoftonline.com" + + with ( + patch( + "semantic_kernel.agents.open_ai.azure_assistant_agent.get_entra_auth_token", + return_value="fetched_ad_token", + ) as mock_get_token, + patch.object(AzureAssistantAgent, "_create_client", return_value="mock_client") as mock_create_client, + ): + returned_client, returned_token = AzureAssistantAgent._setup_client_and_token( + azure_openai_settings=mock_settings, + ad_token=None, + ad_token_provider=None, + client=None, + default_headers=None, + ) + + mock_get_token.assert_called_once_with("https://login.microsoftonline.com") + mock_create_client.assert_called_once_with( + api_key=None, + endpoint="https://test.endpoint", + api_version="2024-05-01", + ad_token="fetched_ad_token", + ad_token_provider=None, + default_headers=None, + ) + assert returned_client == "mock_client" + assert returned_token == "fetched_ad_token" + + +async def test_setup_client_and_token_no_credentials_raises_exception(): + """Test that if there's no client, no API key, no AD token/provider, + and no token endpoint, an AgentInitializationException is raised. + """ + mock_settings = MagicMock() + mock_settings.chat_deployment_name = "test_deployment_name" + mock_settings.api_key = None + mock_settings.endpoint = "https://test.endpoint" + mock_settings.api_version = "2024-05-01" + mock_settings.token_endpoint = None + + with pytest.raises( + AgentInitializationException, match="Please provide either a client, an api_key, ad_token or ad_token_provider." + ): + _ = AzureAssistantAgent._setup_client_and_token( + azure_openai_settings=mock_settings, + ad_token=None, + ad_token_provider=None, + client=None, + default_headers=None, + ) + + +@pytest.mark.parametrize( + "exclude_list, client, api_key, should_raise, expected_exception_msg, should_create_client_call", + [ + ([], None, "test_api_key", False, None, True), + ([], AsyncMock(spec=AsyncAzureOpenAI), None, False, None, False), + ( + [], + AsyncMock(spec=AsyncAzureOpenAI), + "test_api_key", + False, + None, + False, + ), + ( + ["AZURE_OPENAI_API_KEY"], + None, + None, + True, + "Please provide either a client, an api_key, ad_token or ad_token_provider.", + False, + ), + ], + indirect=["exclude_list"], +) +async def test_retrieve_agent_handling_api_key_and_client( + azure_openai_unit_test_env, + exclude_list, + kernel, + client, + api_key, + should_raise, + expected_exception_msg, + should_create_client_call, +): + is_api_key_present = "AZURE_OPENAI_API_KEY" not in exclude_list + + with ( + patch.object( + AzureAssistantAgent, + "_create_azure_openai_settings", + return_value=MagicMock( + chat_model_id="test_model", + api_key=MagicMock( + get_secret_value=MagicMock(return_value="test_api_key" if is_api_key_present else None) + ) + if is_api_key_present + else None, + ), + ), + patch.object( + AzureAssistantAgent, + "_create_client", + return_value=AsyncMock(spec=AsyncAzureOpenAI), + ) as mock_create_client, + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "name": "test_name", + }, + ) as mock_create_def, + ): + if client: + client.beta = MagicMock() + client.beta.assistants = MagicMock() + client.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) + else: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) + + if should_raise: + with pytest.raises(AgentInitializationException, match=expected_exception_msg): + await AzureAssistantAgent.retrieve(id="test_id", kernel=kernel, api_key=api_key, client=client) + return + + retrieved_agent = await AzureAssistantAgent.retrieve( + id="test_id", kernel=kernel, api_key=api_key, client=client + ) + + if should_create_client_call: + mock_create_client.assert_called_once() + else: + mock_create_client.assert_not_called() + + assert retrieved_agent.ai_model_id == "test_model" + mock_create_def.assert_called_once() + if client: + client.beta.assistants.retrieve.assert_called_once_with("test_id") + else: + mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") diff --git a/python/tests/unit/agents/test_group_chat/test_broadcast_queue.py b/python/tests/unit/agents/test_broadcast_queue.py similarity index 100% rename from python/tests/unit/agents/test_group_chat/test_broadcast_queue.py rename to python/tests/unit/agents/test_broadcast_queue.py diff --git a/python/tests/unit/agents/test_chat_completion_agent.py b/python/tests/unit/agents/test_chat_completion_agent.py new file mode 100644 index 000000000000..01f9813acf83 --- /dev/null +++ b/python/tests/unit/agents/test_chat_completion_agent.py @@ -0,0 +1,217 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, create_autospec, patch + +import pytest +from pydantic import ValidationError + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions import KernelServiceNotFoundError +from semantic_kernel.kernel import Kernel + + +@pytest.fixture +def mock_streaming_chat_completion_response() -> AsyncMock: + """A fixture that returns a mock response for a streaming chat completion response.""" + + async def mock_response(chat_history, settings, kernel): + content1 = ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message 1") + content2 = ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2") + chat_history.messages.append(content1) + chat_history.messages.append(content2) + yield [content1] + yield [content2] + + return mock_response + + +async def test_initialization(): + agent = ChatCompletionAgent( + service_id="test_service", + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert agent.service_id == "test_service" + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +async def test_initialization_invalid_name_throws(): + with pytest.raises(ValidationError): + _ = ChatCompletionAgent( + service_id="test_service", + name="Test Agent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + +async def test_initialization_no_service_id(): + agent = ChatCompletionAgent( + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert agent.service_id == "default" + assert agent.kernel is not None + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +async def test_initialization_with_kernel(kernel: Kernel): + agent = ChatCompletionAgent( + kernel=kernel, + name="TestAgent", + id="test_id", + description="Test Description", + instructions="Test Instructions", + ) + + assert agent.service_id == "default" + assert kernel == agent.kernel + assert agent.name == "TestAgent" + assert agent.id == "test_id" + assert agent.description == "Test Description" + assert agent.instructions == "Test Instructions" + + +async def test_invoke(): + kernel = create_autospec(Kernel) + kernel.get_service.return_value = create_autospec(ChatCompletionClientBase) + kernel.get_service.return_value.get_chat_message_contents = AsyncMock( + return_value=[ChatMessageContent(role=AuthorRole.SYSTEM, content="Processed Message")] + ) + agent = ChatCompletionAgent( + kernel=kernel, service_id="test_service", name="TestAgent", instructions="Test Instructions" + ) + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + messages = [message async for message in agent.invoke(history)] + + assert len(messages) == 1 + assert messages[0].content == "Processed Message" + + +async def test_invoke_tool_call_added(): + kernel = create_autospec(Kernel) + chat_completion_service = create_autospec(ChatCompletionClientBase) + kernel.get_service.return_value = chat_completion_service + agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + async def mock_get_chat_message_contents(chat_history, settings, kernel): + new_messages = [ + ChatMessageContent(role=AuthorRole.ASSISTANT, content="Processed Message 1"), + ChatMessageContent(role=AuthorRole.TOOL, content="Processed Message 2"), + ] + chat_history.messages.extend(new_messages) + return new_messages + + chat_completion_service.get_chat_message_contents = AsyncMock(side_effect=mock_get_chat_message_contents) + + messages = [message async for message in agent.invoke(history)] + + assert len(messages) == 2 + assert messages[0].content == "Processed Message 1" + assert messages[1].content == "Processed Message 2" + + assert len(history.messages) == 3 + assert history.messages[1].content == "Processed Message 1" + assert history.messages[2].content == "Processed Message 2" + assert history.messages[1].name == "TestAgent" + assert history.messages[2].name == "TestAgent" + + +async def test_invoke_no_service_throws(): + kernel = create_autospec(Kernel) + kernel.get_service.return_value = None + agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with pytest.raises(KernelServiceNotFoundError): + async for _ in agent.invoke(history): + pass + + +async def test_invoke_stream(): + kernel = create_autospec(Kernel) + kernel.get_service.return_value = create_autospec(ChatCompletionClientBase) + + agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with patch( + "semantic_kernel.connectors.ai.chat_completion_client_base.ChatCompletionClientBase.get_streaming_chat_message_contents", + return_value=AsyncMock(), + ) as mock: + mock.return_value.__aiter__.return_value = [ + [ChatMessageContent(role=AuthorRole.USER, content="Initial Message")] + ] + + async for message in agent.invoke_stream(history): + assert message.role == AuthorRole.USER + assert message.content == "Initial Message" + + +async def test_invoke_stream_tool_call_added(mock_streaming_chat_completion_response): + kernel = create_autospec(Kernel) + chat_completion_service = create_autospec(ChatCompletionClientBase) + kernel.get_service.return_value = chat_completion_service + agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + chat_completion_service.get_streaming_chat_message_contents = mock_streaming_chat_completion_response + + async for message in agent.invoke_stream(history): + print(f"Message role: {message.role}, content: {message.content}") + assert message.role in [AuthorRole.SYSTEM, AuthorRole.TOOL] + assert message.content in ["Processed Message 1", "Processed Message 2"] + + assert len(history.messages) == 3 + + +async def test_invoke_stream_no_service_throws(): + kernel = create_autospec(Kernel) + kernel.get_service.return_value = None + agent = ChatCompletionAgent(kernel=kernel, service_id="test_service", name="TestAgent") + + history = ChatHistory(messages=[ChatMessageContent(role=AuthorRole.USER, content="Initial Message")]) + + with pytest.raises(KernelServiceNotFoundError): + async for _ in agent.invoke_stream(history): + pass + + +def test_get_channel_keys(): + agent = ChatCompletionAgent() + keys = agent.get_channel_keys() + + for key in keys: + assert isinstance(key, str) + + +async def test_create_channel(): + agent = ChatCompletionAgent() + channel = await agent.create_channel() + + assert isinstance(channel, ChatHistoryChannel) diff --git a/python/tests/unit/agents/chat_completion/test_chat_history_channel.py b/python/tests/unit/agents/test_chat_history_channel.py similarity index 79% rename from python/tests/unit/agents/chat_completion/test_chat_history_channel.py rename to python/tests/unit/agents/test_chat_history_channel.py index b03dc892dd57..4ba15f01a062 100644 --- a/python/tests/unit/agents/chat_completion/test_chat_history_channel.py +++ b/python/tests/unit/agents/test_chat_history_channel.py @@ -3,12 +3,13 @@ from collections.abc import AsyncIterable from unittest.mock import AsyncMock -from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryChannel +import pytest + +from semantic_kernel.agents.channels.chat_history_channel import ChatHistoryAgentProtocol, ChatHistoryChannel from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions import ServiceInvalidTypeError class MockChatHistoryHandler: @@ -32,6 +33,9 @@ class MockNonChatHistoryHandler: id: str = "mock_non_chat_history_handler" +ChatHistoryAgentProtocol.register(MockChatHistoryHandler) + + class AsyncIterableMock: def __init__(self, async_gen): self.async_gen = async_gen @@ -126,6 +130,24 @@ async def mock_invoke(history: list[ChatMessageContent]): assert received_messages[2].items[0].id == "test_id" +async def test_invoke_incorrect_instance_throws(): + channel = ChatHistoryChannel() + agent = MockNonChatHistoryHandler() + + with pytest.raises(ServiceInvalidTypeError): + async for _ in channel.invoke(agent): + pass + + +async def test_invoke_stream_incorrect_instance_throws(): + channel = ChatHistoryChannel() + agent = MockNonChatHistoryHandler() + + with pytest.raises(ServiceInvalidTypeError): + async for _ in channel.invoke_stream(agent, []): + pass + + async def test_receive(): channel = ChatHistoryChannel() history = [ @@ -178,47 +200,3 @@ async def test_reset_history(): await channel.reset() assert len(channel.messages) == 0 - - -async def test_receive_skips_file_references(): - channel = ChatHistoryChannel() - - file_ref_item = FileReferenceContent() - streaming_file_ref_item = StreamingFileReferenceContent() - normal_item_1 = FunctionResultContent(id="test_id", result="normal content 1") - normal_item_2 = FunctionResultContent(id="test_id_2", result="normal content 2") - - msg_with_file_only = ChatMessageContent( - role=AuthorRole.USER, - content="Normal message set as TextContent", - items=[file_ref_item], - ) - - msg_with_mixed = ChatMessageContent( - role=AuthorRole.USER, - content="Mixed content message", - items=[streaming_file_ref_item, normal_item_1], - ) - - msg_with_normal = ChatMessageContent( - role=AuthorRole.USER, - content="Normal message", - items=[normal_item_2], - ) - - history = [msg_with_file_only, msg_with_mixed, msg_with_normal] - await channel.receive(history) - - assert len(channel.messages) == 3 - - assert channel.messages[0].content == "Normal message set as TextContent" - assert len(channel.messages[0].items) == 1 - - assert channel.messages[1].content == "Mixed content message" - assert len(channel.messages[0].items) == 1 - assert channel.messages[1].items[0].result == "normal content 1" - - assert channel.messages[2].content == "Normal message" - assert len(channel.messages[2].items) == 2 - assert channel.messages[2].items[0].result == "normal content 2" - assert channel.messages[2].items[1].text == "Normal message" diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_default_termination_strategy.py b/python/tests/unit/agents/test_default_termination_strategy.py similarity index 100% rename from python/tests/unit/agents/test_group_chat_strategies/test_default_termination_strategy.py rename to python/tests/unit/agents/test_default_termination_strategy.py diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py b/python/tests/unit/agents/test_kernel_function_selection_strategy.py similarity index 85% rename from python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py rename to python/tests/unit/agents/test_kernel_function_selection_strategy.py index 8953593c4b29..2523c06cb05e 100644 --- a/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_selection_strategy.py +++ b/python/tests/unit/agents/test_kernel_function_selection_strategy.py @@ -4,6 +4,8 @@ import pytest +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.selection.kernel_function_selection_strategy import ( KernelFunctionSelectionStrategy, ) @@ -11,7 +13,25 @@ from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from tests.unit.agents.test_agent import MockAgent + + +class MockAgent(Agent): + """A mock agent for testing purposes.""" + + def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): + args = { + "name": name, + "description": description, + } + if id is not None: + args["id"] = id + super().__init__(**args) + + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + + async def create_channel(self) -> AgentChannel: + return AsyncMock(spec=AgentChannel) @pytest.fixture diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py b/python/tests/unit/agents/test_kernel_function_termination_strategy.py similarity index 85% rename from python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py rename to python/tests/unit/agents/test_kernel_function_termination_strategy.py index 5e4d96da0e60..e273c40e2501 100644 --- a/python/tests/unit/agents/test_group_chat_strategies/test_kernel_function_termination_strategy.py +++ b/python/tests/unit/agents/test_kernel_function_termination_strategy.py @@ -2,12 +2,32 @@ from unittest.mock import AsyncMock, MagicMock, patch +from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies import KernelFunctionTerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function import KernelFunction from semantic_kernel.kernel import Kernel -from tests.unit.agents.test_agent import MockAgent + + +class MockAgent(Agent): + """A mock agent for testing purposes.""" + + def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): + args = { + "name": name, + "description": description, + } + if id is not None: + args["id"] = id + super().__init__(**args) + + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + + async def create_channel(self) -> AgentChannel: + return AsyncMock(spec=AgentChannel) async def test_should_agent_terminate_with_result_true(): diff --git a/python/tests/unit/agents/test_open_ai_assistant_agent.py b/python/tests/unit/agents/test_open_ai_assistant_agent.py new file mode 100644 index 000000000000..6392d3345ea1 --- /dev/null +++ b/python/tests/unit/agents/test_open_ai_assistant_agent.py @@ -0,0 +1,601 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +from unittest.mock import AsyncMock, MagicMock, mock_open, patch + +import pytest +from openai import AsyncOpenAI +from openai.resources.beta.assistants import Assistant +from openai.types.beta.assistant import ( + ToolResources, + ToolResourcesCodeInterpreter, + ToolResourcesFileSearch, +) +from pydantic import ValidationError + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException +from semantic_kernel.kernel import Kernel + + +@pytest.fixture(scope="function") +def openai_assistant_agent(kernel: Kernel, openai_unit_test_env): + return OpenAIAssistantAgent( + kernel=kernel, + service_id="test_service", + name="test_name", + instructions="test_instructions", + api_key="test_api_key", + kwargs={"temperature": 0.1}, + max_completion_tokens=100, + max_prompt_tokens=100, + parallel_tool_calls_enabled=True, + truncation_message_count=2, + ) + + +@pytest.fixture(scope="function") +def mock_assistant(): + return Assistant( + created_at=123456789, + object="assistant", + metadata={ + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + model="test_model", + description="test_description", + id="test_id", + instructions="test_instructions", + name="test_name", + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + temperature=0.7, + top_p=0.9, + response_format={"type": "json_object"}, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + ) + + +@pytest.fixture(scope="function") +def mock_assistant_json(): + return Assistant( + created_at=123456789, + object="assistant", + metadata={ + "__run_options": json.dumps({ + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + }) + }, + model="test_model", + description="test_description", + id="test_id", + instructions="test_instructions", + name="test_name", + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + temperature=0.7, + top_p=0.9, + response_format={"type": "json_object"}, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + ) + + +def test_initialization(openai_assistant_agent: OpenAIAssistantAgent, openai_unit_test_env): + agent = openai_assistant_agent + assert agent is not None + agent.kernel is not None + + +def test_create_client(openai_unit_test_env): + client = OpenAIAssistantAgent._create_client(api_key="test_api_key", default_headers={"User-Agent": "test-agent"}) + assert isinstance(client, AsyncOpenAI) + assert client.api_key == "test_api_key" + + +def test_create_client_from_configuration_missing_api_key(): + with pytest.raises( + AgentInitializationException, + match="Please provide an OpenAI api_key", + ): + OpenAIAssistantAgent._create_client(None) + + +async def test_create_agent(kernel: Kernel, openai_unit_test_env): + with patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant: + mock_create_assistant.return_value = MagicMock(spec=Assistant) + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + ai_model_id="test_model_id", + service_id="test_service", + name="test_name", + api_key="test_api_key", + ) + assert agent.assistant is not None + mock_create_assistant.assert_called_once() + + +async def test_create_agent_with_files(kernel: Kernel, openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + return_value="test_file_id", + ), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.create_vector_store", + return_value="vector_store_id", + ), + patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + ai_model_id="test_model_id", + service_id="test_service", + name="test_name", + api_key="test_api_key", + code_interpreter_filenames=["file1", "file2"], + vector_store_filenames=["file3", "file4"], + enable_code_interpreter=True, + enable_file_search=True, + ) + assert agent.assistant is not None + mock_create_assistant.assert_called_once() + + +async def test_create_agent_with_code_files_not_found_raises_exception(kernel: Kernel, openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + side_effect=FileNotFoundError("File not found"), + ), + patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + with pytest.raises(AgentInitializationException, match="Failed to upload code interpreter files."): + _ = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id="test_service", + ai_model_id="test_model_id", + name="test_name", + api_key="test_api_key", + api_version="2024-05-01", + code_interpreter_filenames=["file1", "file2"], + ) + + +async def test_create_agent_with_search_files_not_found_raises_exception(kernel: Kernel, openai_unit_test_env): + mock_open_file = mock_open(read_data="file_content") + with ( + patch("builtins.open", mock_open_file), + patch( + "semantic_kernel.agents.open_ai.open_ai_assistant_base.OpenAIAssistantBase.add_file", + side_effect=FileNotFoundError("File not found"), + ), + patch.object(OpenAIAssistantAgent, "create_assistant", new_callable=AsyncMock) as mock_create_assistant, + ): + mock_create_assistant.return_value = MagicMock(spec=Assistant) + with pytest.raises(AgentInitializationException, match="Failed to upload vector store files."): + _ = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id="test_service", + ai_model_id="test_model_id", + name="test_name", + api_key="test_api_key", + api_version="2024-05-01", + vector_store_filenames=["file3", "file4"], + ) + + +async def test_create_agent_second_way(kernel: Kernel, mock_assistant, openai_unit_test_env): + agent = OpenAIAssistantAgent( + kernel=kernel, + ai_model_id="test_model_id", + service_id="test_service", + name="test_name", + api_key="test_api_key", + max_completion_tokens=100, + max_prompt_tokens=100, + parallel_tool_calls_enabled=True, + truncation_message_count=2, + ) + + with patch.object( + OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) + ) as mock_create_client: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + agent.client = mock_client_instance + + assistant = await agent.create_assistant() + + mock_client_instance.beta.assistants.create.assert_called_once() + + assert assistant == mock_assistant + + assert json.loads( + mock_client_instance.beta.assistants.create.call_args[1]["metadata"][agent._options_metadata_key] + ) == { + "max_completion_tokens": 100, + "max_prompt_tokens": 100, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 2, + } + + +async def test_list_definitions(kernel: Kernel, openai_unit_test_env): + agent = OpenAIAssistantAgent( + kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" + ) + + assistant = Assistant( + id="test_id", + created_at=123456789, + description="test_description", + instructions="test_instructions", + metadata={ + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + model="test_model", + name="test_name", + object="assistant", + temperature=0.7, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + top_p=0.9, + response_format={"type": "json_object"}, + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + ) + + with patch.object( + OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) + ) as mock_create_client: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[assistant])) + + agent.client = mock_client_instance + + definitions = [] + async for definition in agent.list_definitions(): + definitions.append(definition) + + mock_client_instance.beta.assistants.list.assert_called() + + assert len(definitions) == 1 + assert definitions[0] == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) +async def test_retrieve_agent_missing_chat_model_id_throws(kernel, openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): + _ = await OpenAIAssistantAgent.retrieve( + id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" + ) + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) +async def test_retrieve_agent_missing_api_key_throws(kernel, openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." + ): + _ = await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") + + +def test_open_ai_settings_create_throws(openai_unit_test_env): + with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: + mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") + + with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): + OpenAIAssistantAgent( + service_id="test", api_key="test_api_key", org_id="test_org_id", ai_model_id="test_model_id" + ) + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) +def test_azure_openai_agent_create_missing_chat_model_id_throws(openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): + OpenAIAssistantAgent(service_id="test_service", env_file_path="test.env") + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) +def test_azure_openai_agent_create_missing_api_key_throws(openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." + ): + OpenAIAssistantAgent(env_file_path="test.env") + + +def test_create_open_ai_assistant_definition_with_json_metadata(mock_assistant_json, openai_unit_test_env): + with ( + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + }, + ) as mock_create_def, + ): + assert mock_create_def.return_value == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + + +async def test_retrieve_agent(kernel, openai_unit_test_env): + with ( + patch.object( + OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) + ) as mock_create_client, + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + }, + ) as mock_create_def, + ): + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) + + retrieved_agent = await OpenAIAssistantAgent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) + assert retrieved_agent.model_dump( + include={ + "ai_model_id", + "description", + "id", + "instructions", + "name", + "enable_code_interpreter", + "enable_file_search", + "enable_json_response", + "code_interpreter_file_ids", + "temperature", + "top_p", + "vector_store_id", + "metadata", + "max_completion_tokens", + "max_prompt_tokens", + "parallel_tool_calls_enabled", + "truncation_message_count", + } + ) == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") + mock_create_def.assert_called_once() + + +@pytest.mark.parametrize( + "exclude_list, client, api_key, should_raise, expected_exception_msg, should_create_client_call", + [ + ([], None, "test_api_key", False, None, True), + ([], AsyncMock(spec=AsyncOpenAI), None, False, None, False), + ([], AsyncMock(spec=AsyncOpenAI), "test_api_key", False, None, False), + ( + ["OPENAI_API_KEY"], + None, + None, + True, + "The OpenAI API key is required, if a client is not provided.", + False, + ), + ], + indirect=["exclude_list"], +) +async def test_retrieve_agent_handling_api_key_and_client( + openai_unit_test_env, + exclude_list, + kernel, + client, + api_key, + should_raise, + expected_exception_msg, + should_create_client_call, +): + is_api_key_present = "OPENAI_API_KEY" not in exclude_list + + with ( + patch.object( + OpenAIAssistantAgent, + "_create_open_ai_settings", + return_value=MagicMock( + chat_model_id="test_model", + api_key=MagicMock( + get_secret_value=MagicMock(return_value="test_api_key" if is_api_key_present else None) + ) + if is_api_key_present + else None, + ), + ), + patch.object( + OpenAIAssistantAgent, + "_create_client", + return_value=AsyncMock(spec=AsyncOpenAI), + ) as mock_create_client, + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "name": "test_name", + }, + ) as mock_create_def, + ): + if client: + client.beta = MagicMock() + client.beta.assistants = MagicMock() + client.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) + else: + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=MagicMock(spec=Assistant)) + + if should_raise: + with pytest.raises(AgentInitializationException, match=expected_exception_msg): + await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, api_key=api_key, client=client) + return + + retrieved_agent = await OpenAIAssistantAgent.retrieve( + id="test_id", kernel=kernel, api_key=api_key, client=client + ) + + if should_create_client_call: + mock_create_client.assert_called_once() + else: + mock_create_client.assert_not_called() + + assert retrieved_agent.ai_model_id == "test_model" + mock_create_def.assert_called_once() + if client: + client.beta.assistants.retrieve.assert_called_once_with("test_id") + else: + mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") diff --git a/python/tests/unit/agents/test_open_ai_assistant_base.py b/python/tests/unit/agents/test_open_ai_assistant_base.py new file mode 100644 index 000000000000..332d9a33d30f --- /dev/null +++ b/python/tests/unit/agents/test_open_ai_assistant_base.py @@ -0,0 +1,1776 @@ +# Copyright (c) Microsoft. All rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any +from unittest.mock import AsyncMock, MagicMock, mock_open, patch + +import pytest +from openai import AsyncAzureOpenAI, AsyncOpenAI +from openai.lib.streaming._assistants import AsyncAssistantEventHandler, AsyncAssistantStreamManager +from openai.resources.beta.threads.runs.runs import Run +from openai.types.beta.assistant import Assistant, ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch +from openai.types.beta.assistant_stream_event import ( + MessageDeltaEvent, + ThreadMessageDelta, + ThreadRunFailed, + ThreadRunRequiresAction, + ThreadRunStepCompleted, + ThreadRunStepDelta, +) +from openai.types.beta.assistant_tool import CodeInterpreterTool, FileSearchTool +from openai.types.beta.function_tool import FunctionDefinition, FunctionTool +from openai.types.beta.threads import ImageFileDelta, ImageFileDeltaBlock, MessageDelta, TextDelta, TextDeltaBlock +from openai.types.beta.threads.annotation import FileCitationAnnotation, FilePathAnnotation +from openai.types.beta.threads.file_citation_annotation import FileCitation +from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation +from openai.types.beta.threads.file_path_annotation import FilePath +from openai.types.beta.threads.image_file import ImageFile +from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock +from openai.types.beta.threads.required_action_function_tool_call import Function +from openai.types.beta.threads.required_action_function_tool_call import Function as RequiredActionFunction +from openai.types.beta.threads.run import ( + LastError, + RequiredAction, + RequiredActionFunctionToolCall, + RequiredActionSubmitToolOutputs, + TruncationStrategy, +) +from openai.types.beta.threads.runs import ( + FunctionToolCallDelta, + RunStep, + RunStepDelta, + RunStepDeltaEvent, + ToolCallDeltaObject, + ToolCallsStepDetails, +) +from openai.types.beta.threads.runs.code_interpreter_tool_call import ( + CodeInterpreter, + CodeInterpreterToolCall, +) +from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreter as CodeInterpreterDelta +from openai.types.beta.threads.runs.code_interpreter_tool_call_delta import CodeInterpreterToolCallDelta +from openai.types.beta.threads.runs.function_tool_call import Function as RunsFunction +from openai.types.beta.threads.runs.function_tool_call import FunctionToolCall +from openai.types.beta.threads.runs.function_tool_call_delta import Function as FunctionForToolCallDelta +from openai.types.beta.threads.runs.message_creation_step_details import MessageCreation, MessageCreationStepDetails +from openai.types.beta.threads.runs.run_step import Usage +from openai.types.beta.threads.text import Text +from openai.types.beta.threads.text_content_block import TextContentBlock +from openai.types.shared.response_format_json_object import ResponseFormatJSONObject + +from semantic_kernel.agents.open_ai.assistant_content_generation import ( + generate_function_call_content, + generate_function_result_content, + generate_message_content, + get_function_call_contents, + get_message_contents, +) +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.annotation_content import AnnotationContent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import ( + AgentExecutionException, + AgentFileNotFoundException, + AgentInitializationException, + AgentInvokeException, +) +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions.kernel_function_from_method import KernelFunctionFromMethod +from semantic_kernel.kernel import Kernel + +# region Test Fixtures + + +@pytest.fixture +def azure_openai_assistant_agent(kernel: Kernel, azure_openai_unit_test_env): + return AzureAssistantAgent( + kernel=kernel, + service_id="test_service", + name="test_name", + instructions="test_instructions", + api_key="test", + metadata={"key": "value"}, + api_version="2024-05-01", + description="test_description", + ai_model_id="test_model", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + file_ids=["file1", "file2"], + temperature=0.7, + top_p=0.9, + enable_json_response=True, + ) + + +@pytest.fixture +def mock_assistant(): + return Assistant( + created_at=123456789, + object="assistant", + metadata={ + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + model="test_model", + description="test_description", + id="test_id", + instructions="test_instructions", + name="test_name", + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + temperature=0.7, + top_p=0.9, + response_format={"type": "json_object"}, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + ) + + +@pytest.fixture +def mock_thread(): + class MockThread: + id = "test_thread_id" + + return MockThread() + + +@pytest.fixture +def mock_chat_message_content(): + return ChatMessageContent(role=AuthorRole.USER, content="test message", metadata={"key": "value"}) + + +@pytest.fixture +def mock_message(): + class MockMessage: + id = "test_message_id" + role = "user" + + return MockMessage() + + +@pytest.fixture +def mock_thread_messages(): + class MockMessage: + def __init__(self, id, role, content, assistant_id=None): + self.id = id + self.role = role + self.content = content + self.assistant_id = assistant_id + + return [ + MockMessage( + id="test_message_id_1", + role="user", + content=[ + TextContentBlock( + type="text", + text=Text( + value="Hello", + annotations=[ + FilePathAnnotation( + type="file_path", + file_path=FilePath(file_id="test_file_id"), + end_index=5, + start_index=0, + text="Hello", + ), + FileCitationAnnotation( + type="file_citation", + file_citation=FileCitation(file_id="test_file_id", quote="test quote"), + text="Hello", + start_index=0, + end_index=5, + ), + ], + ), + ) + ], + ), + MockMessage( + id="test_message_id_2", + role="assistant", + content=[ + ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) + ], + assistant_id="assistant_1", + ), + ] + + +@pytest.fixture +def mock_run_failed(): + return Run( + id="run_id", + status="failed", + assistant_id="assistant_id", + created_at=123456789, + instructions="instructions", + model="model", + object="thread.run", + thread_id="thread_id", + tools=[], + parallel_tool_calls=True, + ) + + +@pytest.fixture +def mock_run_required_action(): + return Run( + id="run_id", + status="requires_action", + assistant_id="assistant_id", + created_at=123456789, + instructions="instructions", + model="model", + object="thread.run", + thread_id="thread_id", + tools=[], + required_action=RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", + type="function", + function=RequiredActionFunction(arguments="{}", name="function_name"), + ) + ] + ), + ), + parallel_tool_calls=True, + ) + + +@pytest.fixture +def mock_run_completed(): + return Run( + id="run_id", + status="completed", + assistant_id="assistant_id", + created_at=123456789, + instructions="instructions", + model="model", + object="thread.run", + thread_id="thread_id", + tools=[], + required_action=RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") + ) + ] + ), + ), + parallel_tool_calls=True, + ) + + +@pytest.fixture +def mock_run_incomplete(): + return Run( + id="run_id", + status="incomplete", + assistant_id="assistant_id", + created_at=123456789, + instructions="instructions", + model="model", + object="thread.run", + thread_id="thread_id", + tools=[], + required_action=RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") + ) + ] + ), + ), + parallel_tool_calls=True, + ) + + +@pytest.fixture +def mock_run_cancelled(): + return Run( + id="run_id", + status="cancelled", + assistant_id="assistant_id", + created_at=123456789, + instructions="instructions", + model="model", + object="thread.run", + thread_id="thread_id", + tools=[], + required_action=RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name") + ) + ] + ), + ), + parallel_tool_calls=True, + ) + + +@pytest.fixture +def mock_function_call_content(): + return FunctionCallContent(id="function_call_id", name="function_name", arguments={}) + + +@pytest.fixture +def mock_run_in_progress(): + class MockRun: + def __init__(self): + self.id = "run_id" + self.status = "requires_action" + self.assistant_id = "assistant_id" + self.created_at = int(datetime.now(timezone.utc).timestamp()) + self.instructions = "instructions" + self.model = "model" + self.object = "run" + self.thread_id = "thread_id" + self.tools = [] + self.poll_count = 0 + self.required_action = RequiredAction( + type="submit_tool_outputs", + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="tool_call_id", + type="function", + function=Function(arguments="{}", name="function_name"), + ) + ] + ), + ) + self.last_error = None + + def update_status(self): + self.poll_count += 1 + if self.poll_count > 2: + self.status = "completed" + + return MockRun() + + +@pytest.fixture +def mock_run_step_tool_call(): + class MockToolCall: + def __init__(self): + self.type = "code_interpreter" + self.code_interpreter = MagicMock(input="print('Hello, world!')") + + return RunStep( + id="step_id_1", + type="tool_calls", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), + step_details=ToolCallsStepDetails( + tool_calls=[ + CodeInterpreterToolCall( + type="code_interpreter", + id="tool_call_id", + code_interpreter=CodeInterpreter(input="test code", outputs=[]), + ), + FunctionToolCall( + type="function", + id="tool_call_id", + function=RunsFunction(arguments="{}", name="function_name", outpt="test output"), + ), + ], + type="tool_calls", + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + ) + + +@pytest.fixture +def mock_run_step_function_tool_call(): + class MockToolCall: + def __init__(self): + self.type = "function" + + return RunStep( + id="step_id_1", + type="tool_calls", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=1)).timestamp()), + step_details=ToolCallsStepDetails( + tool_calls=[ + FunctionToolCall( + type="function", + id="tool_call_id", + function=RunsFunction(arguments="{}", name="function_name", outpt="test output"), + ), + ], + type="tool_calls", + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + ) + + +@pytest.fixture +def mock_run_step_message_creation(): + class MockMessageCreation: + def __init__(self): + self.message_id = "message_id" + + class MockStepDetails: + def __init__(self): + self.message_creation = MockMessageCreation() + + return RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=MessageCreationStepDetails( + type="message_creation", message_creation=MessageCreation(message_id="test") + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + ) + + +class MockEvent: + def __init__(self, event, data): + self.event = event + self.data = data + + +class MockRunData: + def __init__(self, id, status): + self.id = id + self.status = status + # Add other attributes as needed + + +def create_thread_message_delta_mock(): + return ThreadMessageDelta( + data=MessageDeltaEvent( + id="mock_msg_id", + delta=MessageDelta( + content=[ + TextDeltaBlock( + index=0, + type="text", + text=TextDelta( + annotations=[ + FileCitationDeltaAnnotation( + index=0, + type="file_citation", + start_index=1, + end_index=3, + text="annotation", + ) + ], + value="Hello", + ), + ), + ImageFileDeltaBlock( + index=0, + type="image_file", + image_file=ImageFileDelta( + file_id="test_file_id", + detail="auto", + ), + ), + ], + role=None, + ), + object="thread.message.delta", + ), + event="thread.message.delta", + ) + + +def create_thread_run_step_delta_mock(): + function = FunctionForToolCallDelta(name="math-Add", arguments="", output=None) + function_tool_call = FunctionToolCallDelta( + index=0, type="function", id="call_RcvYVzsppjjnUZcC47fAlwTW", function=function + ) + code = CodeInterpreterDelta(input="import os") + code_tool_call = CodeInterpreterToolCallDelta( + index=1, type="code_interpreter", id="call_RcvYVzsppjjnUZcC47fAlwTW", code_interpreter=code + ) + + step_details = ToolCallDeltaObject(type="tool_calls", tool_calls=[function_tool_call, code_tool_call]) + delta = RunStepDelta(step_details=step_details) + run_step_delta_event = RunStepDeltaEvent( + id="step_FXzQ44kRmoeHOPUstkEI1UL5", delta=delta, object="thread.run.step.delta" + ) + return ThreadRunStepDelta(data=run_step_delta_event, event="thread.run.step.delta") + + +def mock_thread_requires_action_run(): + return ThreadRunRequiresAction( + data=Run( + id="run_00OwjJnEg2SGJy8sky7ip35P", + assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", + cancelled_at=None, + completed_at=None, + created_at=1727798684, + expires_at=1727799284, + failed_at=None, + incomplete_details=None, + instructions="Answer questions about the menu.", + last_error=None, + max_completion_tokens=None, + max_prompt_tokens=None, + metadata={}, + model="gpt-4o-2024-08-06", + object="thread.run", + parallel_tool_calls=True, + required_action=RequiredAction( + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="call_OTcZMjhm7WbhFnGkrmUjs68T", + function=Function(arguments="{}", name="menu-get_specials"), + type="function", + ) + ] + ), + type="submit_tool_outputs", + ), + response_format="auto", + started_at=1727798685, + status="requires_action", + thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", + tool_choice="auto", + tools=[ + FunctionTool( + function=FunctionDefinition( + name="menu-get_item_price", + description="Provides the price of the requested menu item.", + parameters={ + "type": "object", + "properties": { + "menu_item": {"type": "string", "description": "The name of the menu item."} + }, + "required": ["menu_item"], + }, + strict=False, + ), + type="function", + ), + FunctionTool( + function=FunctionDefinition( + name="menu-get_specials", + description="Provides a list of specials from the menu.", + parameters={"type": "object", "properties": {}, "required": []}, + strict=False, + ), + type="function", + ), + ], + truncation_strategy=TruncationStrategy(type="auto", last_messages=None), + usage=None, + temperature=1.0, + top_p=1.0, + tool_resources={"code_interpreter": {"file_ids": []}}, + ), + event="thread.run.requires_action", + ) + + +def mock_thread_run_step_completed(): + return ThreadRunStepCompleted( + data=RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=MessageCreationStepDetails( + type="message_creation", message_creation=MessageCreation(message_id="test") + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), + ), + event="thread.run.step.completed", + ) + + +def mock_thread_run_step_completed_with_code(): + return ThreadRunStepCompleted( + data=RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=ToolCallsStepDetails( + type="tool_calls", + tool_calls=[ + CodeInterpreterToolCall( + id="tool_call_id", + code_interpreter=CodeInterpreter(input="test code", outputs=[]), + type="code_interpreter", + ) + ], + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), + ), + event="thread.run.step.completed", + ) + + +def mock_run_with_last_error(): + return ThreadRunFailed( + data=Run( + id="run_00OwjJnEg2SGJy8sky7ip35P", + assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", + cancelled_at=None, + completed_at=None, + created_at=1727798684, + expires_at=1727799284, + failed_at=None, + incomplete_details=None, + instructions="Answer questions about the menu.", + last_error=LastError(code="server_error", message="Server error"), + max_completion_tokens=None, + max_prompt_tokens=None, + metadata={}, + model="gpt-4o-2024-08-06", + object="thread.run", + parallel_tool_calls=True, + required_action=None, + response_format="auto", + started_at=1727798685, + status="failed", + thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", + tool_choice="auto", + tools=[], + truncation_strategy=TruncationStrategy(type="auto", last_messages=None), + usage=None, + temperature=1.0, + top_p=1.0, + tool_resources={"code_interpreter": {"file_ids": []}}, + ), + event="thread.run.failed", + ) + + +class MockAsyncIterable: + def __init__(self, items): + self.items = items.copy() + + def __aiter__(self): + self._iter = iter(self.items) + return self + + async def __anext__(self): + try: + return next(self._iter) + except StopIteration: + raise StopAsyncIteration + + +class MockStream: + def __init__(self, events): + self.events = events + + async def __aenter__(self): + return MockAsyncIterable(self.events) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +# endregion + +# region Tests + + +async def test_create_assistant( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + assistant = await azure_openai_assistant_agent.create_assistant( + ai_model_id="test_model", + description="test_description", + instructions="test_instructions", + name="test_name", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + code_interpreter_file_ids=["file1", "file2"], + metadata={"key": "value"}, + ) + + assert assistant.model == "test_model" + assert assistant.description == "test_description" + assert assistant.id == "test_id" + assert assistant.instructions == "test_instructions" + assert assistant.name == "test_name" + assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] + assert assistant.temperature == 0.7 + assert assistant.top_p == 0.9 + assert assistant.response_format == ResponseFormatJSONObject(type="json_object") + assert assistant.tool_resources == ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ) + + +async def test_modify_assistant( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + assistant = await azure_openai_assistant_agent.create_assistant( + ai_model_id="test_model", + description="test_description", + instructions="test_instructions", + name="test_name", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + code_interpreter_file_ids=["file1", "file2"], + metadata={"key": "value"}, + ) + + mock_client.beta.assistants.update = AsyncMock(return_value=mock_assistant) + + assistant = await azure_openai_assistant_agent.modify_assistant( + assistant_id=assistant.id, + ai_model_id="test_model", + description="test_description", + instructions="test_instructions", + name="test_name", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + code_interpreter_file_ids=["file1", "file2"], + metadata={"key": "value"}, + ) + + assert assistant.model == "test_model" + assert assistant.description == "test_description" + assert assistant.id == "test_id" + assert assistant.instructions == "test_instructions" + assert assistant.name == "test_name" + assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] + assert assistant.temperature == 0.7 + assert assistant.top_p == 0.9 + assert assistant.response_format == ResponseFormatJSONObject(type="json_object") + assert assistant.tool_resources == ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ) + + +async def test_modify_assistant_not_initialized_throws( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with pytest.raises(AgentInitializationException, match="The assistant has not been created."): + _ = await azure_openai_assistant_agent.modify_assistant( + assistant_id="id", + ai_model_id="test_model", + description="test_description", + instructions="test_instructions", + name="test_name", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + code_interpreter_file_ids=["file1", "file2"], + metadata={"key": "value"}, + ) + + +async def test_create_assistant_with_model_attributes( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + assistant = await azure_openai_assistant_agent.create_assistant( + ai_model_id="test_model", + description="test_description", + instructions="test_instructions", + name="test_name", + enable_code_interpreter=True, + enable_file_search=True, + vector_store_id="vector_store1", + code_interpreter_file_ids=["file1", "file2"], + metadata={"key": "value"}, + kwargs={"temperature": 0.1}, + ) + + assert assistant.model == "test_model" + assert assistant.description == "test_description" + assert assistant.id == "test_id" + assert assistant.instructions == "test_instructions" + assert assistant.name == "test_name" + assert assistant.tools == [CodeInterpreterTool(type="code_interpreter"), FileSearchTool(type="file_search")] + assert assistant.temperature == 0.7 + assert assistant.top_p == 0.9 + assert assistant.response_format == ResponseFormatJSONObject(type="json_object") + assert assistant.tool_resources == ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ) + + +async def test_create_assistant_delete_and_recreate( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + mock_client.beta.assistants.delete = AsyncMock() + + assistant = await azure_openai_assistant_agent.create_assistant() + + assert assistant is not None + + await azure_openai_assistant_agent.delete() + + assert azure_openai_assistant_agent._is_deleted + + assistant = await azure_openai_assistant_agent.create_assistant() + + assert azure_openai_assistant_agent._is_deleted is False + + +async def test_get_channel_keys(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + keys = azure_openai_assistant_agent.get_channel_keys() + for key in keys: + assert isinstance(key, str) + + +async def test_create_channel( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, mock_thread, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) + + channel = await azure_openai_assistant_agent.create_channel() + + assert channel is not None + + +async def test_get_assistant_metadata( + azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + assistant = await azure_openai_assistant_agent.create_assistant() + + assistant.metadata is not None + + +async def test_get_agent_tools(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + func = KernelFunctionFromMethod(method=kernel_function(lambda x: x**2, name="square"), plugin_name="math") + azure_openai_assistant_agent.kernel.add_function(plugin_name="test", function=func) + + assistant = await azure_openai_assistant_agent.create_assistant() + + assert assistant.tools is not None + assert len(assistant.tools) == 2 + tools = azure_openai_assistant_agent.tools + assert len(tools) == 3 + assert tools[0] == {"type": "code_interpreter"} + assert tools[1] == {"type": "file_search"} + assert tools[2]["type"].startswith("function") + + +async def test_get_assistant_tools_throws_when_no_assistant( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with pytest.raises(AgentInitializationException, match="The assistant has not been created."): + _ = azure_openai_assistant_agent.tools + + +async def test_create_thread(azure_openai_assistant_agent, mock_thread, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) + + thread_id = await azure_openai_assistant_agent.create_thread( + code_interpreter_file_ids=["file1", "file2"], + vector_store_id="vector_store1", + messages=[ + ChatMessageContent(role=AuthorRole.USER, content="test message"), + ], + metadata={"key": "value"}, + ) + + assert thread_id == "test_thread_id" + mock_client.beta.threads.create.assert_called_once() + _, called_kwargs = mock_client.beta.threads.create.call_args + assert "tool_resources" in called_kwargs + assert called_kwargs["tool_resources"] == { + "code_interpreter": {"file_ids": ["file1", "file2"]}, + "file_search": {"vector_store_ids": ["vector_store1"]}, + } + assert "messages" in called_kwargs + assert called_kwargs["messages"] == [{"role": "user", "content": {"type": "text", "text": "test message"}}] + assert "metadata" in called_kwargs + assert called_kwargs["metadata"] == {"key": "value"} + + +async def test_create_thread_throws_with_invalid_role(azure_openai_assistant_agent, mock_thread, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.create = AsyncMock(return_value=mock_thread) + + with pytest.raises( + AgentExecutionException, + match="Invalid message role `tool`", + ): + _ = await azure_openai_assistant_agent.create_thread( + messages=[ChatMessageContent(role=AuthorRole.TOOL, content="test message")] + ) + + +async def test_delete_thread(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.delete = AsyncMock() + + await azure_openai_assistant_agent.delete_thread("test_thread_id") + + mock_client.beta.threads.delete.assert_called_once_with("test_thread_id") + + +async def test_delete(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): + azure_openai_assistant_agent.assistant = mock_assistant + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.delete = AsyncMock() + + azure_openai_assistant_agent._is_deleted = False + result = await azure_openai_assistant_agent.delete() + + assert result == azure_openai_assistant_agent._is_deleted + mock_client.beta.assistants.delete.assert_called_once_with(mock_assistant.id) + + +async def test_add_file(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.files = MagicMock() + mock_client.files.create = AsyncMock(return_value=MagicMock(id="test_file_id")) + + mock_open_file = mock_open(read_data="file_content") + with patch("builtins.open", mock_open_file): + file_id = await azure_openai_assistant_agent.add_file("test_file_path", "assistants") + + assert file_id == "test_file_id" + mock_open_file.assert_called_once_with("test_file_path", "rb") + mock_client.files.create.assert_called_once() + + +async def test_add_file_not_found(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.files = MagicMock() + + with patch("builtins.open", mock_open(read_data="file_content")) as mock_open_file: + mock_open_file.side_effect = FileNotFoundError + + with pytest.raises(AgentFileNotFoundException, match="File not found: test_file_path"): + await azure_openai_assistant_agent.add_file("test_file_path", "assistants") + + +async def test_delete_file(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.files = MagicMock() + mock_client.files.delete = AsyncMock() + + await azure_openai_assistant_agent.delete_file("test_file_id") + + mock_client.files.delete.assert_called_once_with("test_file_id") + + +async def test_delete_file_raises_exception(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.files = MagicMock() + mock_client.files.delete = AsyncMock(side_effect=Exception("Deletion failed")) + + with pytest.raises(AgentExecutionException, match="Error deleting file."): + await azure_openai_assistant_agent.delete_file("test_file_id") + + mock_client.files.delete.assert_called_once_with("test_file_id") + + +async def test_create_vector_store(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.vector_stores = MagicMock() + mock_client.beta.vector_stores.create = AsyncMock(return_value=MagicMock(id="test_vector_store_id")) + + vector_store_id = await azure_openai_assistant_agent.create_vector_store(["file_id1", "file_id2"]) + + assert vector_store_id == "test_vector_store_id" + mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1", "file_id2"]) + + +async def test_create_vector_store_single_file_id( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.vector_stores = MagicMock() + mock_client.beta.vector_stores.create = AsyncMock(return_value=MagicMock(id="test_vector_store_id")) + + vector_store_id = await azure_openai_assistant_agent.create_vector_store("file_id1") + + assert vector_store_id == "test_vector_store_id" + mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1"]) + + +async def test_create_vector_store_raises_exception( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.vector_stores = MagicMock() + mock_client.beta.vector_stores.create = AsyncMock(side_effect=Exception("Creation failed")) + + with pytest.raises(AgentExecutionException, match="Error creating vector store."): + await azure_openai_assistant_agent.create_vector_store("file_id1") + + mock_client.beta.vector_stores.create.assert_called_once_with(file_ids=["file_id1"]) + + +async def test_delete_vector_store(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.vector_stores = MagicMock() + mock_client.beta.vector_stores.delete = AsyncMock() + + await azure_openai_assistant_agent.delete_vector_store("test_vector_store_id") + + mock_client.beta.vector_stores.delete.assert_called_once_with("test_vector_store_id") + + +async def test_delete_vector_store_raises_exception( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.vector_stores = MagicMock() + mock_client.beta.vector_stores.delete = AsyncMock(side_effect=Exception("Deletion failed")) + + with pytest.raises(AgentExecutionException, match="Error deleting vector store."): + await azure_openai_assistant_agent.delete_vector_store("test_vector_store_id") + + mock_client.beta.vector_stores.delete.assert_called_once_with("test_vector_store_id") + + +async def test_add_chat_message( + azure_openai_assistant_agent, mock_chat_message_content, mock_message, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.messages = MagicMock() + mock_client.beta.threads.messages.create = AsyncMock(return_value=mock_message) + + result = await azure_openai_assistant_agent.add_chat_message("test_thread_id", mock_chat_message_content) + + assert result.id == "test_message_id" + mock_client.beta.threads.messages.create.assert_called_once_with( + thread_id="test_thread_id", + role="user", + content=[{"type": "text", "text": "test message"}], + ) + + +async def test_add_chat_message_invalid_role( + azure_openai_assistant_agent, mock_chat_message_content, openai_unit_test_env +): + mock_chat_message_content.role = AuthorRole.SYSTEM + + with pytest.raises(AgentExecutionException, match="Invalid message role `system`"): + await azure_openai_assistant_agent.add_chat_message("test_thread_id", mock_chat_message_content) + + +async def test_get_thread_messages( + azure_openai_assistant_agent, mock_thread_messages, mock_assistant, openai_unit_test_env +): + async def mock_list_messages(*args, **kwargs) -> Any: + return MagicMock(data=mock_thread_messages) + + async def mock_retrieve_assistant(*args, **kwargs) -> Any: + return mock_assistant + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.threads.messages = MagicMock() + mock_client.beta.threads.messages.list = AsyncMock(side_effect=mock_list_messages) + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.retrieve = AsyncMock(side_effect=mock_retrieve_assistant) + + messages = [message async for message in azure_openai_assistant_agent.get_thread_messages("test_thread_id")] + + assert len(messages) == 2 + assert len(messages[0].items) == 3 + assert isinstance(messages[0].items[0], TextContent) + assert isinstance(messages[0].items[1], AnnotationContent) + assert isinstance(messages[0].items[2], AnnotationContent) + assert messages[0].items[0].text == "Hello" + + assert len(messages[1].items) == 1 + assert isinstance(messages[1].items[0], FileReferenceContent) + assert str(messages[1].items[0].file_id) == "test_file_id" + + +async def test_invoke( + azure_openai_assistant_agent, + mock_assistant, + mock_run_in_progress, + mock_run_required_action, + mock_chat_message_content, + mock_run_step_tool_call, + mock_run_step_message_creation, + mock_thread_messages, + mock_function_call_content, + openai_unit_test_env, +): + async def mock_poll_run_status(run, thread_id): + run.update_status() + return run + + def mock_get_function_call_contents(run, function_steps): + function_call_content = mock_function_call_content + function_call_content.id = "tool_call_id" # Set expected ID + function_steps[function_call_content.id] = function_call_content + return [function_call_content] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) + mock_client.beta.threads.runs.submit_tool_outputs = AsyncMock() + mock_client.beta.threads.runs.steps = MagicMock() + mock_client.beta.threads.runs.steps.list = AsyncMock( + return_value=MagicMock(data=[mock_run_step_message_creation, mock_run_step_tool_call]) + ) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + azure_openai_assistant_agent._get_tools = MagicMock(return_value=["tool"]) + azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) + azure_openai_assistant_agent._invoke_function_calls = AsyncMock() + azure_openai_assistant_agent._format_tool_outputs = MagicMock( + return_value=[{"tool_call_id": "id", "output": "output"}] + ) + azure_openai_assistant_agent._retrieve_message = AsyncMock(return_value=mock_thread_messages[0]) + + with patch( + "semantic_kernel.agents.open_ai.assistant_content_generation.get_function_call_contents", + side_effect=mock_get_function_call_contents, + ): + _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] + + +async def test_invoke_order( + azure_openai_assistant_agent, + mock_assistant, + mock_run_required_action, + mock_run_step_function_tool_call, + mock_run_step_message_creation, + mock_thread_messages, + mock_function_call_content, +): + poll_count = 0 + + async def mock_poll_run_status(run, thread_id): + nonlocal poll_count + if run.status == "requires_action": + if poll_count == 0: + pass + else: + run.status = "completed" + poll_count += 1 + return run + + def mock_get_function_call_contents(run, function_steps): + function_call_content = mock_function_call_content + function_call_content.id = "tool_call_id" + function_steps[function_call_content.id] = function_call_content + return [function_call_content] + + azure_openai_assistant_agent.assistant = mock_assistant + azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) + azure_openai_assistant_agent._retrieve_message = AsyncMock(return_value=mock_thread_messages[0]) + + with patch( + "semantic_kernel.agents.open_ai.assistant_content_generation.get_function_call_contents", + side_effect=mock_get_function_call_contents, + ): + client = azure_openai_assistant_agent.client + + with patch.object(client.beta.threads.runs, "create", new_callable=AsyncMock) as mock_runs_create: + mock_runs_create.return_value = mock_run_required_action + + with ( + patch.object(client.beta.threads.runs, "submit_tool_outputs", new_callable=AsyncMock), + patch.object(client.beta.threads.runs.steps, "list", new_callable=AsyncMock) as mock_steps_list, + ): + mock_steps_list.return_value = MagicMock( + data=[mock_run_step_message_creation, mock_run_step_function_tool_call] + ) + + messages = [] + async for _, content in azure_openai_assistant_agent._invoke_internal("thread_id"): + messages.append(content) + + assert len(messages) == 3 + assert isinstance(messages[0].items[0], FunctionCallContent) + assert isinstance(messages[1].items[0], FunctionResultContent) + assert isinstance(messages[2].items[0], TextContent) + + +async def test_invoke_stream( + azure_openai_assistant_agent, + mock_assistant, + mock_thread_messages, + azure_openai_unit_test_env, +): + events = [ + MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), + MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), + create_thread_message_delta_mock(), + mock_thread_run_step_completed(), + MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), + mock_thread_requires_action_run(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + + assert len(messages) > 0 + + +async def test_invoke_stream_with_function_call( + azure_openai_assistant_agent, + mock_assistant, + mock_thread_messages, + azure_openai_unit_test_env, +): + events = [create_thread_run_step_delta_mock()] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + async for content in azure_openai_assistant_agent.invoke_stream("thread_id"): + assert content is not None + + +async def test_invoke_stream_code_output( + azure_openai_assistant_agent, + mock_assistant, + azure_openai_unit_test_env, +): + events = [mock_thread_run_step_completed_with_code()] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + assert content.metadata.get("code") is True + + +async def test_invoke_stream_requires_action( + azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env +): + events = [ + mock_thread_requires_action_run(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + + assert len(messages) > 0 + + +async def test_invoke_stream_throws_exception( + azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env +): + events = [ + mock_run_with_last_error(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + with pytest.raises(AgentInvokeException): + async for _ in azure_openai_assistant_agent.invoke_stream("thread_id"): + pass + + +async def test_invoke_assistant_not_initialized_throws(azure_openai_assistant_agent, openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The assistant has not been created."): + _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] + + +async def test_invoke_agent_deleted_throws(azure_openai_assistant_agent, mock_assistant, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + azure_openai_assistant_agent._is_deleted = True + + with pytest.raises(AgentInitializationException, match="The assistant has been deleted."): + _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] + + +async def test_invoke_raises_error( + azure_openai_assistant_agent, + mock_assistant, + mock_run_in_progress, + mock_run_step_tool_call, + mock_run_step_message_creation, + openai_unit_test_env, +): + async def mock_poll_run_status(run, thread_id): + run.status = "failed" + return run + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.create = AsyncMock(return_value=mock_run_in_progress) + mock_client.beta.threads.runs.submit_tool_outputs = AsyncMock() + mock_client.beta.threads.runs.steps = MagicMock() + mock_client.beta.threads.runs.steps.list = AsyncMock( + return_value=MagicMock(data=[mock_run_step_tool_call, mock_run_step_message_creation]) + ) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + azure_openai_assistant_agent._get_tools = MagicMock(return_value=["tool"]) + azure_openai_assistant_agent._poll_run_status = AsyncMock(side_effect=mock_poll_run_status) + + with pytest.raises( + AgentInvokeException, match="Run failed with status: `failed` for agent `test_name` and thread `thread_id`" + ): + _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] + + +@pytest.fixture +def mock_streaming_assistant_stream_manager() -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + assistant_event_handler = AsyncAssistantEventHandler() + + mock_stream = AsyncMock() + mock_stream.__aiter__.return_value = [assistant_event_handler] + + mock_manager = AsyncMock(spec=AsyncAssistantStreamManager) + mock_manager.__aenter__.return_value = mock_stream + mock_manager.__aexit__.return_value = None + + return mock_manager + + +def test_format_tool_outputs(azure_openai_assistant_agent, openai_unit_test_env): + chat_history = ChatHistory() + fcc = FunctionCallContent( + id="test", name="test-function", arguments='{"input": "world"}', metadata={"test": "test"} + ) + frc = FunctionResultContent.from_function_call_content_and_result(fcc, 123, {"test2": "test2"}) + chat_history.add_message(message=frc.to_chat_message_content()) + + tool_outputs = azure_openai_assistant_agent._format_tool_outputs([fcc], chat_history) + assert tool_outputs[0] == {"tool_call_id": "test", "output": "123"} + + +async def test_invoke_function_calls(azure_openai_assistant_agent, openai_unit_test_env): + chat_history = ChatHistory() + fcc = FunctionCallContent( + id="test", name="test-function", arguments='{"input": "world"}', metadata={"test": "test"} + ) + + with patch( + "semantic_kernel.kernel.Kernel.invoke_function_call", new_callable=AsyncMock + ) as mock_invoke_function_call: + mock_invoke_function_call.return_value = "mocked_result" + results = await azure_openai_assistant_agent._invoke_function_calls([fcc], chat_history) + assert results == ["mocked_result"] + mock_invoke_function_call.assert_called_once_with(function_call=fcc, chat_history=chat_history) + + +def test_get_function_call_contents(azure_openai_assistant_agent, mock_run_required_action, openai_unit_test_env): + result = get_function_call_contents(run=mock_run_required_action, function_steps={}) + assert result is not None + + +def test_get_function_call_contents_no_action_required( + azure_openai_assistant_agent, mock_run_required_action, openai_unit_test_env +): + mock_run_required_action.required_action = None + result = get_function_call_contents(run=mock_run_required_action, function_steps={}) + assert result == [] + + +async def test_get_tools(azure_openai_assistant_agent: AzureAssistantAgent, mock_assistant, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + tools = azure_openai_assistant_agent._get_tools() + assert tools is not None + + +async def test_get_tools_no_assistant_returns_empty_list( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with pytest.raises(AgentInitializationException, match="The assistant has not been created."): + _ = azure_openai_assistant_agent._get_tools() + + +def test_generate_message_content(azure_openai_assistant_agent, mock_thread_messages, openai_unit_test_env): + for message in mock_thread_messages: + result = generate_message_content(assistant_name="test", message=message) + assert result is not None + + +def test_check_if_deleted_throws(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + azure_openai_assistant_agent._is_deleted = True + with pytest.raises(AgentInitializationException, match="The assistant has been deleted."): + azure_openai_assistant_agent._check_if_deleted() + + +def test_get_message_contents(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + message = ChatMessageContent(role=AuthorRole.USER, content="test message") + message.items = [ + ImageContent(role=AuthorRole.ASSISTANT, content="test message", uri="http://image.url"), + TextContent(role=AuthorRole.ASSISTANT, text="test message"), + FileReferenceContent(role=AuthorRole.ASSISTANT, file_id="test_file_id"), + TextContent(role=AuthorRole.USER, text="test message"), + FunctionResultContent(role=AuthorRole.ASSISTANT, result=["test result"], id="test_id"), + ] + + result = get_message_contents(message) + assert result is not None + + +async def test_retrieve_message(azure_openai_assistant_agent, mock_thread_messages, openai_unit_test_env): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + message = await azure_openai_assistant_agent._retrieve_message( + thread_id="test_thread_id", message_id="test_message_id" + ) + assert message is not None + + +async def test_retrieve_message_fails_polls_again( + azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env +): + with ( + patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client, + patch("semantic_kernel.agents.open_ai.open_ai_assistant_agent.logger", autospec=True), + ): + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=Exception("Unable to retrieve message")) + + message = await azure_openai_assistant_agent._retrieve_message( + thread_id="test_thread_id", message_id="test_message_id" + ) + assert message is None + + +async def test_poll_run_status( + azure_openai_assistant_agent, mock_run_required_action, mock_run_completed, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_completed) + + # Test successful polling + run = await azure_openai_assistant_agent._poll_run_status( + run=mock_run_required_action, thread_id="test_thread_id" + ) + assert run.status == "completed", f"Expected status 'completed', but got '{run.status}'" + + # Test timeout scenario + mock_client.beta.threads.runs.retrieve = AsyncMock(side_effect=TimeoutError) + azure_openai_assistant_agent.polling_options.run_polling_timeout = timedelta(milliseconds=10) + + with pytest.raises(AgentInvokeException) as excinfo: + await azure_openai_assistant_agent._poll_run_status( + run=mock_run_required_action, thread_id="test_thread_id" + ) + + assert "Polling timed out" in str(excinfo.value) + assert f"after waiting {azure_openai_assistant_agent.polling_options.run_polling_timeout}" in str(excinfo.value) + + +async def test_poll_run_status_incomplete( + azure_openai_assistant_agent, mock_run_required_action, mock_run_incomplete, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_incomplete) + + run = await azure_openai_assistant_agent._poll_run_status( + run=mock_run_required_action, thread_id="test_thread_id" + ) + + assert run.status in azure_openai_assistant_agent.error_message_states + + +async def test_poll_run_status_cancelled( + azure_openai_assistant_agent, mock_run_required_action, mock_run_cancelled, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.runs.retrieve = AsyncMock(return_value=mock_run_cancelled) + + run = await azure_openai_assistant_agent._poll_run_status( + run=mock_run_required_action, thread_id="test_thread_id" + ) + + assert run.status in azure_openai_assistant_agent.error_message_states + + +async def test_poll_run_status_exception_polls_again( + azure_openai_assistant_agent, mock_run_required_action, mock_run_completed, openai_unit_test_env +): + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.assistants = MagicMock() + + mock_client.beta.threads.runs.retrieve = AsyncMock( + side_effect=[Exception("Failed to retrieve message"), mock_run_completed] + ) + + run = await azure_openai_assistant_agent._poll_run_status( + run=mock_run_required_action, thread_id="test_thread_id" + ) + assert run.status == "requires_action" + + +def test_generate_function_result_content( + azure_openai_assistant_agent, mock_function_call_content, openai_unit_test_env +): + mock_tool_call = RequiredActionFunctionToolCall( + id="tool_call_id", type="function", function=Function(arguments="{}", name="function_name", output="result") + ) + + message = generate_function_result_content( + agent_name="test", function_step=mock_function_call_content, tool_call=mock_tool_call + ) + assert message is not None + assert isinstance(message.items[0], FunctionResultContent) + + +def test_generate_function_call_content(azure_openai_assistant_agent, mock_function_call_content, openai_unit_test_env): + message = generate_function_call_content(agent_name="test", fccs=[mock_function_call_content]) + assert message is not None + assert isinstance(message, ChatMessageContent) + assert isinstance(message.items[0], FunctionCallContent) + + +def test_merge_options(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + merged_options = azure_openai_assistant_agent._merge_options( + ai_model_id="model-id", + enable_json_response=True, + enable_code_interpreter=True, + enable_file_search=True, + max_completion_tokens=150, + parallel_tool_calls_enabled=True, + ) + + expected_options = { + "ai_model_id": "model-id", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "max_completion_tokens": 150, + "max_prompt_tokens": None, + "parallel_tool_calls_enabled": True, + "truncation_message_count": None, + "temperature": 0.7, + "top_p": 0.9, + "metadata": {}, + } + + assert merged_options == expected_options, f"Expected {expected_options}, but got {merged_options}" + + +def test_generate_options(azure_openai_assistant_agent: AzureAssistantAgent, openai_unit_test_env): + options = azure_openai_assistant_agent._generate_options( + ai_model_id="model-id", max_completion_tokens=150, metadata={"key1": "value1"} + ) + + expected_options = { + "max_completion_tokens": 150, + "max_prompt_tokens": None, + "model": "model-id", + "top_p": 0.9, + "response_format": None, + "temperature": 0.7, + "truncation_strategy": None, + "metadata": {"key1": "value1"}, + } + + assert options == expected_options, f"Expected {expected_options}, but got {options}" + + +def test_generate_function_call_content_sets_assistant_role(): + fcc1 = FunctionCallContent(name="function_name1", arguments={"input": "some input"}) + fcc2 = FunctionCallContent(name="function_name2", arguments={"input": "other input"}) + agent_name = "TestAgent" + + result = generate_function_call_content(agent_name=agent_name, fccs=[fcc1, fcc2]) + + assert result.role == AuthorRole.ASSISTANT + assert result.name == agent_name + assert len(result.items) == 2 + assert isinstance(result.items[0], FunctionCallContent) + assert isinstance(result.items[1], FunctionCallContent) + assert result.items[0].name == "function_name1" + assert result.items[1].name == "function_name2" + + +# endregion diff --git a/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py b/python/tests/unit/agents/test_open_ai_assistant_channel.py similarity index 82% rename from python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py rename to python/tests/unit/agents/test_open_ai_assistant_channel.py index 64026abf4724..92f076de53da 100644 --- a/python/tests/unit/agents/openai_assistant/test_open_ai_assistant_channel.py +++ b/python/tests/unit/agents/test_open_ai_assistant_channel.py @@ -1,27 +1,25 @@ # Copyright (c) Microsoft. All rights reserved. -import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest from openai import AsyncOpenAI from openai.types.beta.assistant import Assistant, ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch -from openai.types.beta.threads.file_citation_annotation import FileCitation, FileCitationAnnotation -from openai.types.beta.threads.file_path_annotation import FilePath, FilePathAnnotation +from openai.types.beta.threads.annotation import FileCitationAnnotation, FilePathAnnotation +from openai.types.beta.threads.file_citation_annotation import FileCitation +from openai.types.beta.threads.file_path_annotation import FilePath from openai.types.beta.threads.image_file import ImageFile from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock from openai.types.beta.threads.text import Text from openai.types.beta.threads.text_content_block import TextContentBlock from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel @pytest.fixture @@ -50,7 +48,7 @@ def __init__(self, role, content, assistant_id=None): ), FileCitationAnnotation( type="file_citation", - file_citation=FileCitation(file_id="test_file_id"), + file_citation=FileCitation(file_id="test_file_id", quote="test quote"), text="Hello", start_index=0, end_index=5, @@ -76,22 +74,22 @@ def mock_assistant(): created_at=123456789, object="assistant", metadata={ - "__run_options": json.dumps({ + "__run_options": { "max_completion_tokens": 100, "max_prompt_tokens": 50, "parallel_tool_calls_enabled": True, "truncation_message_count": 10, - }) + } }, model="test_model", description="test_description", id="test_id", instructions="test_instructions", name="test_name", - tools=[{"type": "code_interpreter"}, {"type": "file_search"}], # type: ignore + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], temperature=0.7, top_p=0.9, - response_format={"type": "json_object"}, # type: ignore + response_format={"type": "json_object"}, tool_resources=ToolResources( code_interpreter=ToolResourcesCodeInterpreter(file_ids=["file1", "file2"]), file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), @@ -111,38 +109,27 @@ async def test_receive_messages(): ] with patch("semantic_kernel.agents.open_ai.assistant_content_generation.create_chat_message"): - await channel.receive(history) # type: ignore + await channel.receive(history) async def test_invoke_agent(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - arguments=KernelArguments(test="test"), - kernel=AsyncMock(spec=Kernel), - ) - - channel = OpenAIAssistantChannel(client=client, thread_id="test_thread_id") + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = False + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) async def mock_invoke_internal(*args, **kwargs): for _ in range(3): yield True, MagicMock(spec=ChatMessageContent) + agent._invoke_internal.side_effect = mock_invoke_internal + results = [] - with patch( - "semantic_kernel.agents.channels.open_ai_assistant_channel.AssistantThreadActions.invoke", - side_effect=mock_invoke_internal, - ): - async for is_visible, message in channel.invoke(agent): - results.append((is_visible, message)) + async for is_visible, message in channel.invoke(agent): + results.append((is_visible, message)) assert len(results) == 3 for is_visible, message in results: @@ -159,7 +146,7 @@ async def test_invoke_agent_invalid_instance_throws(): agent._is_deleted = False channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantAgent)}."): + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): async for _, _ in channel.invoke(agent): pass @@ -167,20 +154,11 @@ async def test_invoke_agent_invalid_instance_throws(): async def test_invoke_streaming_agent(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.id = "agent123" - definition.name = "agentName" - definition.description = "desc" - definition.instructions = "test agent" - agent = OpenAIAssistantAgent( - client=client, - definition=definition, - arguments=KernelArguments(test="test"), - kernel=AsyncMock(spec=Kernel), - ) - - channel = OpenAIAssistantChannel(client=client, thread_id="test_thread_id") + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = False + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) results = [] @@ -190,12 +168,10 @@ async def mock_invoke_internal(*args, **kwargs): yield msg results.append(msg) - with patch( - "semantic_kernel.agents.channels.open_ai_assistant_channel.AssistantThreadActions.invoke_stream", - side_effect=mock_invoke_internal, - ): - async for message in channel.invoke_stream(agent, results): - assert message is not None + agent._invoke_internal_stream.side_effect = mock_invoke_internal + + async for message in channel.invoke_stream(agent, results): + assert message is not None assert len(results) == 3 for message in results: @@ -211,7 +187,35 @@ async def test_invoke_streaming_agent_invalid_instance_throws(): agent._is_deleted = False channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) - with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantAgent)}."): + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): + async for _ in channel.invoke_stream(agent, []): + pass + + +async def test_invoke_agent_deleted(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = True + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + with pytest.raises(AgentChatException, match="Agent is deleted"): + async for _ in channel.invoke(agent): + pass + + +async def test_invoke_streaming_agent_deleted(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = True + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + with pytest.raises(AgentChatException, match="Agent is deleted"): async for _ in channel.invoke_stream(agent, []): pass diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py b/python/tests/unit/agents/test_sequential_strategy_selection.py similarity index 82% rename from python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py rename to python/tests/unit/agents/test_sequential_strategy_selection.py index 9df2214ebb3f..17754bd389fd 100644 --- a/python/tests/unit/agents/test_group_chat_strategies/test_sequential_strategy_selection.py +++ b/python/tests/unit/agents/test_sequential_strategy_selection.py @@ -1,13 +1,32 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from semantic_kernel.agents.agent import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.selection.sequential_selection_strategy import SequentialSelectionStrategy from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException -from tests.unit.agents.test_agent import MockAgent + + +class MockAgent(Agent): + """A mock agent for testing purposes.""" + + def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): + args = { + "name": name, + "description": description, + } + if id is not None: + args["id"] = id + super().__init__(**args) + + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + + async def create_channel(self) -> AgentChannel: + return AsyncMock(spec=AgentChannel) @pytest.fixture @@ -82,12 +101,10 @@ async def test_sequential_selection_avoid_selecting_same_agent_twice(): agent_0 = MagicMock(spec=Agent) agent_0.id = "agent-0" agent_0.name = "Agent0" - agent_0.plugins = [] agent_1 = MagicMock(spec=Agent) agent_1.id = "agent-1" agent_1.name = "Agent1" - agent_1.plugins = [] agents = [agent_0, agent_1] diff --git a/python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py b/python/tests/unit/agents/test_termination_strategy.py similarity index 77% rename from python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py rename to python/tests/unit/agents/test_termination_strategy.py index 6a49818fa199..6888745453a8 100644 --- a/python/tests/unit/agents/test_group_chat_strategies/test_termination_strategy.py +++ b/python/tests/unit/agents/test_termination_strategy.py @@ -1,13 +1,32 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from semantic_kernel.agents import Agent +from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.contents.chat_message_content import ChatMessageContent -from tests.unit.agents.test_agent import MockAgent + + +class MockAgent(Agent): + """A mock agent for testing purposes.""" + + def __init__(self, id: str = None, name: str = "TestAgent", description: str = "A test agent"): + args = { + "name": name, + "description": description, + } + if id is not None: + args["id"] = id + super().__init__(**args) + + def get_channel_keys(self) -> list[str]: + return ["key1", "key2"] + + async def create_channel(self) -> AgentChannel: + return AsyncMock(spec=AgentChannel) class TerminationStrategyTest(TerminationStrategy): diff --git a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py index 8b9df3f6b77f..05fa5773729a 100644 --- a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py +++ b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_chat_completion.py @@ -109,7 +109,6 @@ def test_prompt_execution_settings_class(azure_ai_inference_unit_test_env, model async def test_azure_ai_inference_chat_completion( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -124,7 +123,6 @@ async def test_azure_ai_inference_chat_completion( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -142,7 +140,6 @@ async def test_azure_ai_inference_chat_completion( async def test_azure_ai_inference_chat_completion_with_standard_parameters( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -166,7 +163,6 @@ async def test_azure_ai_inference_chat_completion_with_standard_parameters( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, frequency_penalty=settings.frequency_penalty, max_tokens=settings.max_tokens, @@ -190,7 +186,6 @@ async def test_azure_ai_inference_chat_completion_with_standard_parameters( async def test_azure_ai_inference_chat_completion_with_extra_parameters( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, ) -> None: @@ -207,7 +202,6 @@ async def test_azure_ai_inference_chat_completion_with_extra_parameters( mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -302,7 +296,6 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior( async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_no_tool_call( mock_complete, azure_ai_inference_service, - model_id, kernel, chat_history: ChatHistory, mock_azure_ai_inference_chat_completion_response, @@ -326,7 +319,6 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_ mock_complete.assert_awaited_once_with( messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -350,7 +342,6 @@ async def test_azure_ai_inference_chat_completion_with_function_choice_behavior_ async def test_azure_ai_inference_streaming_chat_completion( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -369,7 +360,6 @@ async def test_azure_ai_inference_streaming_chat_completion( mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) @@ -384,7 +374,6 @@ async def test_azure_ai_inference_streaming_chat_completion( async def test_azure_ai_inference_chat_streaming_completion_with_standard_parameters( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -412,7 +401,6 @@ async def test_azure_ai_inference_chat_streaming_completion_with_standard_parame mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, frequency_penalty=settings.frequency_penalty, max_tokens=settings.max_tokens, @@ -433,7 +421,6 @@ async def test_azure_ai_inference_chat_streaming_completion_with_standard_parame async def test_azure_ai_inference_streaming_chat_completion_with_extra_parameters( mock_complete, azure_ai_inference_service, - model_id, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, ) -> None: @@ -454,7 +441,6 @@ async def test_azure_ai_inference_streaming_chat_completion_with_extra_parameter mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=settings.extra_parameters, **settings.prepare_settings_dict(), ) @@ -560,7 +546,6 @@ async def test_azure_ai_inference_streaming_chat_completion_with_function_choice async def test_azure_ai_inference_streaming_chat_completion_with_function_choice_behavior_no_tool_call( mock_complete, azure_ai_inference_service, - model_id, kernel, chat_history: ChatHistory, mock_azure_ai_inference_streaming_chat_completion_response, @@ -588,7 +573,6 @@ async def test_azure_ai_inference_streaming_chat_completion_with_function_choice mock_complete.assert_awaited_once_with( stream=True, messages=[UserMessage(content=user_message_content)], - model=model_id, model_extras=None, **settings.prepare_settings_dict(), ) diff --git a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py index a0abdd573b9e..c17510fec13b 100644 --- a/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py +++ b/python/tests/unit/connectors/ai/azure_ai_inference/services/test_azure_ai_inference_text_embedding.py @@ -83,7 +83,6 @@ def test_azure_ai_inference_text_embedding_init_with_empty_endpoint(azure_ai_inf async def test_azure_ai_inference_text_embedding( mock_embed, azure_ai_inference_service, - model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding without settings""" texts = ["hello", "world"] @@ -91,7 +90,6 @@ async def test_azure_ai_inference_text_embedding( mock_embed.assert_awaited_once_with( input=texts, - model=model_id, model_extras=None, dimensions=None, encoding_format=None, @@ -108,7 +106,6 @@ async def test_azure_ai_inference_text_embedding( async def test_azure_ai_inference_text_embedding_with_standard_settings( mock_embed, azure_ai_inference_service, - model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding with standard settings""" texts = ["hello", "world"] @@ -119,7 +116,6 @@ async def test_azure_ai_inference_text_embedding_with_standard_settings( mock_embed.assert_awaited_once_with( input=texts, - model=model_id, model_extras=None, dimensions=settings.dimensions, encoding_format=settings.encoding_format, @@ -136,7 +132,6 @@ async def test_azure_ai_inference_text_embedding_with_standard_settings( async def test_azure_ai_inference_text_embedding_with_extra_parameters( mock_embed, azure_ai_inference_service, - model_id, ) -> None: """Test text embedding generation of AzureAIInferenceTextEmbedding with extra parameters""" texts = ["hello", "world"] @@ -146,7 +141,6 @@ async def test_azure_ai_inference_text_embedding_with_extra_parameters( mock_embed.assert_awaited_once_with( input=texts, - model=model_id, model_extras=extra_parameters, dimensions=settings.dimensions, encoding_format=settings.encoding_format, diff --git a/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py b/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py index 4a5728be554c..cfd588c74e61 100644 --- a/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py +++ b/python/tests/unit/connectors/ai/bedrock/services/test_bedrock_model_provider_utils.py @@ -1,11 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -import pytest - from semantic_kernel.connectors.ai.bedrock.bedrock_prompt_execution_settings import BedrockChatPromptExecutionSettings -from semantic_kernel.connectors.ai.bedrock.services.model_provider.bedrock_model_provider import ( - BedrockModelProvider, -) from semantic_kernel.connectors.ai.bedrock.services.model_provider.utils import ( remove_none_recursively, update_settings_from_function_choice_configuration, @@ -129,20 +124,3 @@ def test_update_settings_from_function_choice_configuration_required_with_more_t assert "any" in settings.tool_choice assert len(settings.tools) == 2 - - -def test_inference_profile_with_bedrock_model() -> None: - """Test the BedrockModelProvider class returns the correct model for a given inference profile.""" - - us_amazon_inference_profile = "us.amazon.nova-lite-v1:0" - assert BedrockModelProvider.to_model_provider(us_amazon_inference_profile) == BedrockModelProvider.AMAZON - - us_anthropic_inference_profile = "us.anthropic.claude-3-sonnet-20240229-v1:0" - assert BedrockModelProvider.to_model_provider(us_anthropic_inference_profile) == BedrockModelProvider.ANTHROPIC - - eu_meta_inference_profile = "eu.meta.llama3-2-3b-instruct-v1:0" - assert BedrockModelProvider.to_model_provider(eu_meta_inference_profile) == BedrockModelProvider.META - - unknown_inference_profile = "unknown" - with pytest.raises(ValueError, match="Model ID unknown does not contain a valid model provider name."): - BedrockModelProvider.to_model_provider(unknown_inference_profile) diff --git a/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py b/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py index aa2a11ea0556..ddf8d48caa43 100644 --- a/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py +++ b/python/tests/unit/connectors/ai/hugging_face/test_hf_text_completions.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, Mock, patch import pytest -from transformers import AutoTokenizer, TextIteratorStreamer +from transformers import TextIteratorStreamer from semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion import HuggingFaceTextCompletion from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -135,10 +135,6 @@ async def test_text_completion_streaming(model_name, task, input_str): "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.Thread", side_effect=Mock(spec=Thread), ), - patch( - "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.AutoTokenizer", - side_effect=Mock(spec=AutoTokenizer), - ), patch( "semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion.TextIteratorStreamer", return_value=mock_streamer, diff --git a/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py b/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py index 991ec195fd1e..30c9573fef6c 100644 --- a/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py +++ b/python/tests/unit/connectors/ai/onnx/services/test_onnx_chat_completion.py @@ -11,15 +11,6 @@ from semantic_kernel.kernel import Kernel from tests.unit.connectors.ai.onnx.conftest import gen_ai_config, gen_ai_config_vision -try: - import onnxruntime_genai # noqa: F401 - - ready = True -except ImportError: - ready = False - -pytestmark = pytest.mark.skipif(not ready, reason="ONNX Runtime is not installed.") - @patch("builtins.open", new_callable=mock_open, read_data=json.dumps(gen_ai_config)) @patch("onnxruntime_genai.Model") diff --git a/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py b/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py index 506ccaa175f4..09435f02667f 100644 --- a/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py +++ b/python/tests/unit/connectors/ai/onnx/services/test_onnx_text_completion.py @@ -12,15 +12,6 @@ from semantic_kernel.exceptions import ServiceInitializationError from tests.unit.connectors.ai.onnx.conftest import gen_ai_config -try: - import onnxruntime_genai # noqa: F401 - - ready = True -except ImportError: - ready = False - -pytestmark = pytest.mark.skipif(not ready, reason="ONNX Runtime is not installed.") - @patch("builtins.open", new_callable=mock_open, read_data=json.dumps(gen_ai_config)) @patch("onnxruntime_genai.Model") diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py b/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py index 1b7d0c13bafe..c512a38f1b10 100644 --- a/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py +++ b/python/tests/unit/connectors/ai/open_ai/services/test_azure_chat_completion.py @@ -669,6 +669,7 @@ def test_tool(self, key: str): model=azure_openai_unit_test_env["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"], stream=False, messages=azure_chat_completion._prepare_chat_history_for_request(orig_chat_history), + parallel_tool_calls=True, tools=[ { "type": "function", diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py b/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py index 76de816fc65f..c0b1000ae159 100644 --- a/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py +++ b/python/tests/unit/connectors/ai/open_ai/services/test_openai_chat_completion_base.py @@ -265,6 +265,7 @@ def test_tool(self, key: str): model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=False, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), + parallel_tool_calls=True, tools=[ { "type": "function", @@ -756,6 +757,7 @@ def test_tool(self, key: str): mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + parallel_tool_calls=True, tools=[ { "type": "function", diff --git a/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py b/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py deleted file mode 100644 index a341f2bb5c4c..000000000000 --- a/python/tests/unit/connectors/ai/open_ai/services/test_openai_realtime.py +++ /dev/null @@ -1,656 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -from collections.abc import AsyncIterable -from typing import Any -from unittest.mock import AsyncMock, patch - -from aiortc import AudioStreamTrack -from openai.resources.beta.realtime.realtime import AsyncRealtimeConnection, AsyncRealtimeConnectionManager -from openai.types.beta.realtime import ( - ConversationItem, - ConversationItemContent, - ConversationItemCreatedEvent, - ConversationItemCreateEvent, - ConversationItemDeletedEvent, - ConversationItemDeleteEvent, - ConversationItemTruncatedEvent, - ConversationItemTruncateEvent, - ErrorEvent, - InputAudioBufferAppendEvent, - InputAudioBufferClearedEvent, - InputAudioBufferClearEvent, - InputAudioBufferCommitEvent, - InputAudioBufferCommittedEvent, - InputAudioBufferSpeechStartedEvent, - RealtimeResponse, - RealtimeServerEvent, - ResponseAudioDeltaEvent, - ResponseAudioDoneEvent, - ResponseAudioTranscriptDeltaEvent, - ResponseCancelEvent, - ResponseCreatedEvent, - ResponseCreateEvent, - ResponseFunctionCallArgumentsDeltaEvent, - ResponseFunctionCallArgumentsDoneEvent, - ResponseOutputItemAddedEvent, - Session, - SessionCreatedEvent, - SessionUpdatedEvent, - SessionUpdateEvent, -) -from pytest import fixture, mark, param, raises - -from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior -from semantic_kernel.connectors.ai.function_choice_type import FunctionChoiceType -from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_realtime_execution_settings import ( - OpenAIRealtimeExecutionSettings, -) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_realtime import ( - ListenEvents, - OpenAIRealtimeWebRTC, - OpenAIRealtimeWebsocket, - SendEvents, - _create_openai_realtime_client_event, - update_settings_from_function_call_configuration, -) -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.audio_content import AudioContent -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent -from semantic_kernel.contents.realtime_events import ( - RealtimeAudioEvent, - RealtimeEvent, - RealtimeFunctionCallEvent, - RealtimeFunctionResultEvent, - RealtimeTextEvent, -) -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.exceptions.content_exceptions import ContentException -from semantic_kernel.functions import kernel_function -from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata - -events = [ - SessionCreatedEvent(type=ListenEvents.SESSION_CREATED, session=Session(id="session_id"), event_id="1"), - SessionUpdatedEvent(type=ListenEvents.SESSION_UPDATED, session=Session(id="session_id"), event_id="2"), - ConversationItemCreatedEvent( - type=ListenEvents.CONVERSATION_ITEM_CREATED, - item=ConversationItem(id="item_id"), - event_id="3", - previous_item_id="2", - ), - ConversationItemDeletedEvent(type=ListenEvents.CONVERSATION_ITEM_DELETED, item_id="item_id", event_id="4"), - ConversationItemTruncatedEvent( - type=ListenEvents.CONVERSATION_ITEM_TRUNCATED, event_id="5", audio_end_ms=0, content_index=0, item_id="item_id" - ), - InputAudioBufferClearedEvent(type=ListenEvents.INPUT_AUDIO_BUFFER_CLEARED, event_id="7"), - InputAudioBufferCommittedEvent( - type=ListenEvents.INPUT_AUDIO_BUFFER_COMMITTED, - event_id="8", - item_id="item_id", - previous_item_id="previous_item_id", - ), - ResponseCreatedEvent(type=ListenEvents.RESPONSE_CREATED, event_id="10", response=RealtimeResponse()), - ResponseFunctionCallArgumentsDoneEvent( - type=ListenEvents.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE, - event_id="11", - arguments="{}", - call_id="call_id", - item_id="item_id", - output_index=0, - response_id="response_id", - ), - ResponseAudioTranscriptDeltaEvent( - type=ListenEvents.RESPONSE_AUDIO_TRANSCRIPT_DELTA, - event_id="12", - content_index=0, - delta="text", - item_id="item_id", - output_index=0, - response_id="response_id", - ), - ResponseAudioDoneEvent( - type=ListenEvents.RESPONSE_AUDIO_DONE, - event_id="13", - item_id="item_id", - output_index=0, - response_id="response_id", - content_index=0, - ), - ResponseAudioDeltaEvent( - type=ListenEvents.RESPONSE_AUDIO_DELTA, - event_id="14", - item_id="item_id", - output_index=0, - response_id="response_id", - content_index=0, - delta="audio data", - ), -] - - -async def websocket_stream(**kwargs) -> AsyncIterable[RealtimeServerEvent]: - for event in events: - yield event - await asyncio.sleep(0) - - -@fixture -def audio_track(): - class AudioTrack(AudioStreamTrack): - kind = "audio" - - async def recv(self): - await asyncio.sleep(0) - return - - return AudioTrack() - - -@fixture -def OpenAIWebsocket(openai_unit_test_env): - client = OpenAIRealtimeWebsocket() - client._call_id_to_function_map["call_id"] = "function_name" - return client - - -@fixture -def OpenAIWebRTC(openai_unit_test_env, audio_track): - client = OpenAIRealtimeWebRTC(audio_track=audio_track) - client._call_id_to_function_map["call_id"] = "function_name" - return client - - -def test_update_settings_from_function_call_config(): - config = FunctionCallChoiceConfiguration( - available_functions=[ - KernelFunctionMetadata(name="function_name", description="function_description", is_prompt=False) - ] - ) - - settings = OpenAIRealtimeExecutionSettings() - - update_settings_from_function_call_configuration(config, settings, FunctionChoiceType.AUTO) - - assert len(settings.tools) == 1 - assert settings.tools[0]["type"] == "function" - assert settings.tools[0]["name"] == "function_name" - assert settings.tools[0]["description"] == "function_description" - assert settings.tool_choice == FunctionChoiceType.AUTO.value - - -def test_openai_realtime_websocket(openai_unit_test_env): - realtime_client = OpenAIRealtimeWebsocket() - assert realtime_client is not None - - -def test_openai_realtime_webrtc(openai_unit_test_env, audio_track): - realtime_client = OpenAIRealtimeWebRTC(audio_track=audio_track) - assert realtime_client is not None - - -@mark.parametrize( - ["event_type", "event_kwargs", "expected_event", "expected_exception"], - [ - param( - SendEvents.SESSION_UPDATE, - {"session": {"id": "session_id"}}, - SessionUpdateEvent, - None, - id="session_update", - ), - param( - SendEvents.SESSION_UPDATE, - {}, - SessionUpdateEvent, - ContentException, - id="session_update_missing", - ), - param( - SendEvents.INPUT_AUDIO_BUFFER_APPEND, - {"audio": "audio_buffer_as_string"}, - InputAudioBufferAppendEvent, - None, - id="input_audio_buffer_append", - ), - param( - SendEvents.INPUT_AUDIO_BUFFER_APPEND, - {}, - InputAudioBufferAppendEvent, - ContentException, - id="input_audio_buffer_append_missing_audio", - ), - param( - SendEvents.INPUT_AUDIO_BUFFER_COMMIT, - {}, - InputAudioBufferCommitEvent, - None, - id="input_audio_buffer_commit", - ), - param( - SendEvents.INPUT_AUDIO_BUFFER_CLEAR, - {}, - InputAudioBufferClearEvent, - None, - id="input_audio_buffer_Clear", - ), - param( - SendEvents.CONVERSATION_ITEM_CREATE, - { - "event_id": "event_id", - "previous_item_id": "previous_item_id", - "item": {"id": "item_id"}, - }, - ConversationItemCreateEvent, - None, - id="conversation_item_create_event", - ), - param( - SendEvents.CONVERSATION_ITEM_CREATE, - {}, - ConversationItemCreateEvent, - ContentException, - id="conversation_item_create_event_no_item", - ), - param( - SendEvents.CONVERSATION_ITEM_TRUNCATE, - {"audio_end_ms": 1000, "item_id": "item_id"}, - ConversationItemTruncateEvent, - None, - id="conversation_item_truncate", - ), - param( - SendEvents.CONVERSATION_ITEM_DELETE, - {"item_id": "item_id"}, - ConversationItemDeleteEvent, - None, - id="conversation_item_delete", - ), - param( - SendEvents.CONVERSATION_ITEM_DELETE, - {}, - ConversationItemDeleteEvent, - ContentException, - id="conversation_item_delete_fail", - ), - param( - SendEvents.RESPONSE_CREATE, - {"response": {"instructions": "instructions"}}, - ResponseCreateEvent, - None, - id="response_create", - ), - param( - SendEvents.RESPONSE_CANCEL, - {}, - ResponseCancelEvent, - None, - id="response_cancel", - ), - ], -) -def test_create_openai_realtime_event( - event_type: SendEvents, event_kwargs: dict[str, Any], expected_event: Any, expected_exception: Exception | None -): - if expected_exception: - with raises(expected_exception): - _create_openai_realtime_client_event(event_type, **event_kwargs) - else: - event = _create_openai_realtime_client_event(event_type, **event_kwargs) - assert isinstance(event, expected_event) - - -@mark.parametrize( - ["event", "expected_type"], - [ - param( - ResponseAudioTranscriptDeltaEvent( - content_index=0, - delta="text", - item_id="item_id", - event_id="event_id", - output_index=0, - response_id="response_id", - type="response.audio_transcript.delta", - ), - [RealtimeTextEvent], - id="response_audio_transcript_delta", - ), - param( - ResponseOutputItemAddedEvent( - item=ConversationItem(id="item_id"), - event_id="event_id", - output_index=0, - response_id="response_id", - type="response.output_item.added", - ), - [RealtimeEvent], - id="response_output_item_added", - ), - param( - ResponseOutputItemAddedEvent( - item=ConversationItem(id="item_id", type="function_call", call_id="call_id", name="function_to_call"), - event_id="event_id", - output_index=0, - response_id="response_id", - type="response.output_item.added", - ), - [RealtimeEvent], - id="response_output_item_added_function_call", - ), - param( - ResponseFunctionCallArgumentsDeltaEvent( - call_id="call_id", - delta="argument delta", - event_id="event_id", - output_index=0, - item_id="item_id", - response_id="response_id", - type="response.function_call_arguments.delta", - ), - [RealtimeFunctionCallEvent], - id="response_function_call_arguments_delta", - ), - param( - ResponseFunctionCallArgumentsDoneEvent( - call_id="call_id", - arguments="argument delta", - event_id="event_id", - output_index=0, - item_id="item_id", - response_id="response_id", - type="response.function_call_arguments.done", - ), - [RealtimeEvent], - id="response_function_call_arguments_done_no_kernel", - ), - param( - ErrorEvent( - error={"code": "error_code", "message": "error_message", "type": "invalid_request_error"}, - event_id="event_id", - type="error", - ), - [RealtimeEvent], - id="error", - ), - param( - SessionCreatedEvent( - session=Session(id="session_id"), - event_id="event_id", - type="session.created", - ), - [RealtimeEvent], - id="session_created", - ), - param( - SessionUpdatedEvent( - session=Session(id="session_id"), - event_id="event_id", - type="session.updated", - ), - [RealtimeEvent], - id="session_updated", - ), - param( - InputAudioBufferSpeechStartedEvent( - audio_start_ms=0, - event_id="event_id", - item_id="item_id", - type="input_audio_buffer.speech_started", - ), - [RealtimeEvent], - id="other", - ), - ], -) -async def test_parse_event(OpenAIWebsocket, event: RealtimeServerEvent, expected_type: list[type]): - iter = 0 - async for result in OpenAIWebsocket._parse_event(event): - assert isinstance(result, expected_type[iter]) - iter += 1 - - -async def test_update_session(OpenAIWebsocket, kernel): - chat_history = ChatHistory( - messages=[ - ChatMessageContent(role="user", content="Hello"), - ChatMessageContent( - role="assistant", - items=[ - FunctionCallContent( - function_name="function_name", plugin_name="plugin", arguments={"arg1": "value"}, id="1" - ) - ], - ), - ChatMessageContent( - role="tool", - items=[ - FunctionResultContent(function_name="function_name", plugin_name="plugin", result="result", id="1") - ], - ), - ChatMessageContent( - role="user", - items=[ - TextContent(text="Hello again"), - ImageContent(uri="https://example.com/image.png"), - ], - ), - ] - ) - settings = OpenAIRealtimeExecutionSettings(instructions="instructions", ai_model_id="gpt-4o-realtime-preview") - with patch.object(OpenAIWebsocket, "_send") as mock_send: - await OpenAIWebsocket.update_session( - chat_history=chat_history, settings=settings, create_response=True, kernel=kernel - ) - mock_send.assert_awaited() - # session update, 4 conversation item create events, response create - # images are not supported, so ignored - assert len(mock_send.await_args_list) == 6 - assert OpenAIWebsocket._current_settings == settings - assert OpenAIWebsocket.kernel == kernel - - -async def test_parse_function_call_arguments_done(OpenAIWebsocket, kernel): - func_result = "result" - event = ResponseFunctionCallArgumentsDoneEvent( - call_id="call_id", - arguments='{"x": "' + func_result + '"}', - event_id="event_id", - output_index=0, - item_id="item_id", - response_id="response_id", - type="response.function_call_arguments.done", - ) - response_events = [RealtimeFunctionCallEvent, RealtimeFunctionResultEvent] - OpenAIWebsocket._current_settings = OpenAIRealtimeExecutionSettings( - instructions="instructions", ai_model_id="gpt-4o-realtime-preview" - ) - OpenAIWebsocket._current_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - OpenAIWebsocket._call_id_to_function_map["call_id"] = "plugin_name-function_name" - func = kernel_function(name="function_name", description="function_description")(lambda x: x) - kernel.add_function(plugin_name="plugin_name", function_name="function_name", function=func) - OpenAIWebsocket.kernel = kernel - iter = 0 - with patch.object(OpenAIWebsocket, "_send") as mock_send: - async for event in OpenAIWebsocket._parse_function_call_arguments_done(event): - assert isinstance(event, response_events[iter]) - iter += 1 - mock_send.assert_awaited() - assert len(mock_send.await_args_list) == 2 - mock_send.assert_any_await( - ConversationItemCreateEvent( - type="conversation.item.create", - item=ConversationItem( - type="function_call_output", - output=func_result, - call_id="call_id", - ), - ) - ) - - -async def test_parse_function_call_arguments_done_fail(OpenAIWebsocket, kernel): - func_result = "result" - event = ResponseFunctionCallArgumentsDoneEvent( - call_id="call_id", - arguments='{"x": "' + func_result + '"}', - event_id="event_id", - output_index=0, - item_id="item_id", - response_id="response_id", - type="response.function_call_arguments.done", - ) - response_events = [RealtimeEvent] - OpenAIWebsocket._current_settings = OpenAIRealtimeExecutionSettings( - instructions="instructions", ai_model_id="gpt-4o-realtime-preview" - ) - OpenAIWebsocket._current_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - # This function name is invalid - OpenAIWebsocket._call_id_to_function_map["call_id"] = "function_name" - func = kernel_function(name="function_name", description="function_description")(lambda x: x) - kernel.add_function(plugin_name="plugin_name", function_name="function_name", function=func) - OpenAIWebsocket.kernel = kernel - iter = 0 - async for event in OpenAIWebsocket._parse_function_call_arguments_done(event): - assert isinstance(event, response_events[iter]) - iter += 1 - - -async def test_send_audio(OpenAIWebsocket): - audio_event = RealtimeAudioEvent( - audio=AudioContent(data=b"audio data", mime_type="audio/wav"), - ) - with patch.object(OpenAIWebsocket, "_send") as mock_send: - await OpenAIWebsocket.send(audio_event) - mock_send.assert_awaited() - assert len(mock_send.await_args_list) == 1 - mock_send.assert_any_await( - InputAudioBufferAppendEvent( - audio="audio data", - type="input_audio_buffer.append", - ) - ) - - -@mark.parametrize("client", ["OpenAIWebRTC", "OpenAIWebsocket"]) -async def test_send_session_update(client, OpenAIWebRTC, OpenAIWebsocket): - openai_client = OpenAIWebRTC if client == "OpenAIWebRTC" else OpenAIWebsocket - settings = PromptExecutionSettings(ai_model_id="gpt-4o-realtime-preview") - session_event = RealtimeEvent( - service_type=SendEvents.SESSION_UPDATE, - service_event={"settings": settings}, - ) - with patch.object(openai_client, "_send") as mock_send: - await openai_client.send(event=session_event) - mock_send.assert_awaited() - assert len(mock_send.await_args_list) == 1 - mock_send.assert_any_await( - SessionUpdateEvent( - session={"model": "gpt-4o-realtime-preview"}, - type="session.update", - ) - ) - - -@mark.parametrize("client", ["OpenAIWebRTC", "OpenAIWebsocket"]) -async def test_send_conversation_item_create(client, OpenAIWebRTC, OpenAIWebsocket): - openai_client = OpenAIWebRTC if client == "OpenAIWebRTC" else OpenAIWebsocket - event = RealtimeEvent( - service_type=SendEvents.CONVERSATION_ITEM_CREATE, - service_event={ - "item": ChatMessageContent( - role="user", - items=[ - TextContent(text="Hello"), - FunctionCallContent( - function_name="function_name", - plugin_name="plugin", - arguments={"arg1": "value"}, - id="1", - metadata={"call_id": "call_id"}, - ), - FunctionResultContent( - function_name="function_name", - plugin_name="plugin", - result="result", - id="1", - metadata={"call_id": "call_id"}, - ), - ], - ) - }, - ) - - with patch.object(openai_client, "_send") as mock_send: - await openai_client.send(event=event) - mock_send.assert_awaited() - assert len(mock_send.await_args_list) == 3 - mock_send.assert_any_await( - ConversationItemCreateEvent( - item=ConversationItem( - content=[ConversationItemContent(text="Hello", type="input_text")], - role="user", - type="message", - ), - type="conversation.item.create", - ) - ) - mock_send.assert_any_await( - ConversationItemCreateEvent( - item=ConversationItem( - arguments='{"arg1": "value"}', - call_id="call_id", - name="plugin-function_name", - type="function_call", - ), - type="conversation.item.create", - ) - ) - mock_send.assert_any_await( - ConversationItemCreateEvent( - item=ConversationItem( - call_id="call_id", - output="result", - type="function_call_output", - ), - type="conversation.item.create", - ) - ) - - -async def test_receive_websocket(OpenAIWebsocket): - connection_mock = AsyncMock(spec=AsyncRealtimeConnection) - connection_mock.recv = websocket_stream - - manager = AsyncMock(spec=AsyncRealtimeConnectionManager) - manager.enter.return_value = connection_mock - - with patch("openai.resources.beta.realtime.realtime.AsyncRealtime.connect") as mock_connect: - mock_connect.return_value = manager - async with OpenAIWebsocket(): - async for msg in OpenAIWebsocket.receive(): - assert isinstance(msg, RealtimeEvent) - - -async def test_receive_webrtc(OpenAIWebRTC): - counter = len(events) - with patch.object(OpenAIRealtimeWebRTC, "create_session"): - recv_task = asyncio.create_task(_stream_to_webrtc(OpenAIWebRTC)) - async with OpenAIWebRTC(): - async for msg in OpenAIWebRTC.receive(): - assert isinstance(msg, RealtimeEvent) - counter -= 1 - if counter == 0: - break - recv_task.cancel() - - -async def _stream_to_webrtc(client: OpenAIRealtimeWebRTC): - async for msg in websocket_stream(): - async for parsed_msg in client._parse_event(msg): - await client._receive_buffer.put(parsed_msg) - await asyncio.sleep(0) diff --git a/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py b/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py index 8d4e9e4cea4b..25cb379bff12 100644 --- a/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py +++ b/python/tests/unit/connectors/ai/open_ai/test_openai_request_settings.py @@ -311,20 +311,6 @@ def test_azure_open_ai_chat_prompt_execution_settings_with_aisearch_data_sources pytest.param({"type": "access_token"}, marks=pytest.mark.xfail), pytest.param({"type": "invalid"}, marks=pytest.mark.xfail), ], - ids=[ - "APIKey", - "api_key", - "api_key_no_key", - "SystemAssignedManagedIdentity", - "system_assigned_managed_identity", - "UserAssignedManagedIdentity", - "user_assigned_managed_identity", - "user_assigned_managed_identity_no_id", - "AccessToken", - "access_token", - "access_token_no_token", - "invalid", - ], ) def test_aisearch_data_source_parameters(authentication) -> None: AzureAISearchDataSourceParameters(index_name="test_index", authentication=authentication) diff --git a/python/tests/unit/connectors/memory/chroma/test_chroma.py b/python/tests/unit/connectors/memory/chroma/test_chroma.py deleted file mode 100644 index 2fb29afd1e40..000000000000 --- a/python/tests/unit/connectors/memory/chroma/test_chroma.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import MagicMock - -import pytest -from chromadb.api import ClientAPI - -from semantic_kernel.connectors.memory.chroma.chroma import ChromaCollection, ChromaStore -from semantic_kernel.data.vector_search.vector_search_filter import VectorSearchFilter -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions - - -@pytest.fixture -def mock_client(): - return MagicMock(spec=ClientAPI) - - -@pytest.fixture -def chroma_collection(mock_client, data_model_definition): - return ChromaCollection( - collection_name="test_collection", - data_model_type=dict, - data_model_definition=data_model_definition, - client=mock_client, - ) - - -@pytest.fixture -def chroma_store(mock_client): - return ChromaStore(client=mock_client) - - -def test_chroma_collection_initialization(chroma_collection): - assert chroma_collection.collection_name == "test_collection" - assert chroma_collection.data_model_type is dict - - -def test_chroma_store_initialization(chroma_store): - assert chroma_store.client is not None - - -def test_chroma_collection_get_collection(chroma_collection, mock_client): - mock_client.get_collection.return_value = "mock_collection" - collection = chroma_collection._get_collection() - assert collection == "mock_collection" - - -def test_chroma_store_get_collection(chroma_store, mock_client, data_model_definition): - collection = chroma_store.get_collection( - collection_name="test_collection", data_model_type=dict, data_model_definition=data_model_definition - ) - assert collection is not None - assert isinstance(collection, ChromaCollection) - - -async def test_chroma_collection_does_collection_exist(chroma_collection, mock_client): - mock_client.get_collection.return_value = "mock_collection" - exists = await chroma_collection.does_collection_exist() - assert exists - - -async def test_chroma_store_list_collection_names(chroma_store, mock_client): - mock_client.list_collections.return_value = ["collection1", "collection2"] - collections = await chroma_store.list_collection_names() - assert collections == ["collection1", "collection2"] - - -async def test_chroma_collection_create_collection(chroma_collection, mock_client): - await chroma_collection.create_collection() - mock_client.create_collection.assert_called_once_with(name="test_collection", metadata={"hnsw:space": "cosine"}) - - -async def test_chroma_collection_delete_collection(chroma_collection, mock_client): - await chroma_collection.delete_collection() - mock_client.delete_collection.assert_called_once_with(name="test_collection") - - -async def test_chroma_collection_upsert(chroma_collection, mock_client): - records = [{"id": "1", "vector": [0.1, 0.2, 0.3, 0.4, 0.5], "content": "test document"}] - ids = await chroma_collection.upsert(records) - assert ids == "1" - mock_client.get_collection().add.assert_called_once() - - -async def test_chroma_collection_get(chroma_collection, mock_client): - mock_client.get_collection().get.return_value = { - "ids": [["1"]], - "documents": [["test document"]], - "embeddings": [[[0.1, 0.2, 0.3, 0.4, 0.5]]], - "metadatas": [[{}]], - } - records = await chroma_collection._inner_get(["1"]) - assert len(records) == 1 - assert records[0]["id"] == "1" - - -async def test_chroma_collection_delete(chroma_collection, mock_client): - await chroma_collection._inner_delete(["1"]) - mock_client.get_collection().delete.assert_called_once_with(ids=["1"]) - - -async def test_chroma_collection_search(chroma_collection, mock_client): - options = VectorSearchOptions(top=1, include_vectors=True) - mock_client.get_collection().query.return_value = { - "ids": [["1"]], - "documents": [["test document"]], - "embeddings": [[[0.1, 0.2, 0.3, 0.4, 0.5]]], - "metadatas": [[{}]], - "distances": [[0.1]], - } - results = await chroma_collection.vectorized_search(options=options, vector=[0.1, 0.2, 0.3, 0.4, 0.5]) - async for res in results.results: - assert res.record["id"] == "1" - assert res.score == 0.1 - - -@pytest.mark.parametrize( - "filter_expression, expected", - [ - pytest.param( - VectorSearchFilter.equal_to("field1", "value1"), {"field1": {"$eq": "value1"}}, id="single_filter" - ), - pytest.param(VectorSearchFilter(), None, id="empty_filter"), - pytest.param( - VectorSearchFilter.equal_to("field1", "value1").any_tag_equal_to("field2", ["value2", "value3"]), - {"$and": [{"field1": {"$eq": "value1"}}, {"field2": {"$in": ["value2", "value3"]}}]}, - id="multiple_filters", - ), - ], -) -def test_chroma_collection_parse_filter(chroma_collection, filter_expression, expected): - options = VectorSearchOptions(top=1, include_vectors=True, filter=filter_expression) - filter_expression = chroma_collection._parse_filter(options) - assert filter_expression == expected diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py b/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py deleted file mode 100644 index 23f637104710..000000000000 --- a/python/tests/unit/connectors/memory/mongodb_atlas/conftest.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from unittest.mock import patch - -import pytest -from pymongo import AsyncMongoClient -from pymongo.asynchronous.collection import AsyncCollection -from pymongo.asynchronous.database import AsyncDatabase - -BASE_PATH = "pymongo.asynchronous.mongo_client.AsyncMongoClient" -DATABASE_PATH = "pymongo.asynchronous.database.AsyncDatabase" -COLLECTION_PATH = "pymongo.asynchronous.collection.AsyncCollection" - - -@pytest.fixture(autouse=True) -def mock_mongo_client(): - with patch(BASE_PATH, spec=AsyncMongoClient) as mock: - yield mock - - -@pytest.fixture(autouse=True) -def mock_get_database(mock_mongo_client): - with ( - patch(DATABASE_PATH, spec=AsyncDatabase) as mock_db, - patch.object(mock_mongo_client, "get_database", new_callable=lambda: mock_db) as mock, - ): - yield mock - - -@pytest.fixture(autouse=True) -def mock_get_collection(mock_get_database): - with ( - patch(COLLECTION_PATH, spec=AsyncCollection) as mock_collection, - patch.object(mock_get_database, "get_collection", new_callable=lambda: mock_collection) as mock, - ): - yield mock diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py deleted file mode 100644 index 5a084ed7ea76..000000000000 --- a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_collection.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import AsyncMock, patch - -from pymongo import AsyncMongoClient -from pymongo.asynchronous.cursor import AsyncCursor -from pymongo.results import UpdateResult -from pytest import mark, raises - -from semantic_kernel.connectors.memory.mongodb_atlas.const import DEFAULT_DB_NAME, DEFAULT_SEARCH_INDEX_NAME -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection -from semantic_kernel.exceptions.vector_store_exceptions import VectorStoreInitializationException - - -def test_mongodb_atlas_collection_initialization(mongodb_atlas_unit_test_env, data_model_definition, mock_mongo_client): - collection = MongoDBAtlasCollection( - data_model_type=dict, - data_model_definition=data_model_definition, - collection_name="test_collection", - mongo_client=mock_mongo_client, - ) - assert collection.mongo_client is not None - assert isinstance(collection.mongo_client, AsyncMongoClient) - - -@mark.parametrize("exclude_list", [["MONGODB_ATLAS_CONNECTION_STRING"]], indirect=True) -def test_mongodb_atlas_collection_initialization_fail(mongodb_atlas_unit_test_env, data_model_definition): - with raises(VectorStoreInitializationException): - MongoDBAtlasCollection( - collection_name="test_collection", - data_model_type=dict, - data_model_definition=data_model_definition, - ) - - -@mark.parametrize("exclude_list", [["MONGODB_ATLAS_DATABASE_NAME", "MONGODB_ATLAS_INDEX_NAME"]], indirect=True) -def test_mongodb_atlas_collection_initialization_defaults(mongodb_atlas_unit_test_env, data_model_definition): - collection = MongoDBAtlasCollection( - collection_name="test_collection", - data_model_type=dict, - data_model_definition=data_model_definition, - ) - assert collection.database_name == DEFAULT_DB_NAME - assert collection.index_name == DEFAULT_SEARCH_INDEX_NAME - - -async def test_mongodb_atlas_collection_upsert(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): - collection = MongoDBAtlasCollection( - data_model_type=dict, - data_model_definition=data_model_definition, - collection_name="test_collection", - ) - with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: - result_mock = AsyncMock(spec=UpdateResult) - result_mock.upserted_ids = {0: "test_id"} - mock_get.return_value.bulk_write.return_value = result_mock - result = await collection._inner_upsert([{"_id": "test_id", "data": "test_data"}]) - assert result == ["test_id"] - - -async def test_mongodb_atlas_collection_get(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): - collection = MongoDBAtlasCollection( - data_model_type=dict, - data_model_definition=data_model_definition, - collection_name="test_collection", - ) - with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: - result_mock = AsyncMock(spec=AsyncCursor) - result_mock.to_list.return_value = [{"_id": "test_id", "data": "test_data"}] - mock_get.return_value.find.return_value = result_mock - result = await collection._inner_get(["test_id"]) - assert result == [{"_id": "test_id", "data": "test_data"}] - - -async def test_mongodb_atlas_collection_delete(mongodb_atlas_unit_test_env, data_model_definition, mock_get_collection): - collection = MongoDBAtlasCollection( - data_model_type=dict, - data_model_definition=data_model_definition, - collection_name="test_collection", - ) - with patch.object(collection, "_get_collection", new=mock_get_collection) as mock_get: - await collection._inner_delete(["test_id"]) - mock_get.return_value.delete_many.assert_called_with({"_id": {"$in": ["test_id"]}}) - - -async def test_mongodb_atlas_collection_collection_exists( - mongodb_atlas_unit_test_env, data_model_definition, mock_get_database -): - collection = MongoDBAtlasCollection( - data_model_type=dict, - data_model_definition=data_model_definition, - collection_name="test_collection", - ) - with patch.object(collection, "_get_database", new=mock_get_database) as mock_get: - mock_get.return_value.list_collection_names.return_value = ["test_collection"] - assert await collection.does_collection_exist() diff --git a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py b/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py deleted file mode 100644 index a06e68a99699..000000000000 --- a/python/tests/unit/connectors/memory/mongodb_atlas/test_mongodb_atlas_store.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from pymongo import AsyncMongoClient - -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_collection import MongoDBAtlasCollection -from semantic_kernel.connectors.memory.mongodb_atlas.mongodb_atlas_store import MongoDBAtlasStore - - -def test_mongodb_atlas_store_initialization(mongodb_atlas_unit_test_env): - store = MongoDBAtlasStore() - assert store.mongo_client is not None - assert isinstance(store.mongo_client, AsyncMongoClient) - - -def test_mongodb_atlas_store_get_collection(mongodb_atlas_unit_test_env, data_model_definition): - store = MongoDBAtlasStore() - collection = store.get_collection( - collection_name="test_collection", - data_model_type=dict, - data_model_definition=data_model_definition, - ) - assert collection is not None - assert isinstance(collection, MongoDBAtlasCollection) - - -async def test_mongodb_atlas_store_list_collection_names(mongodb_atlas_unit_test_env, mock_mongo_client): - store = MongoDBAtlasStore(mongo_client=mock_mongo_client, database_name="test_db") - store.mongo_client.get_database().list_collection_names.return_value = ["test_collection"] - result = await store.list_collection_names() - assert result == ["test_collection"] diff --git a/python/tests/unit/connectors/memory/postgres/test_postgres_store.py b/python/tests/unit/connectors/memory/postgres/test_postgres_store.py index 755281e9f2bb..a2b23bb662c4 100644 --- a/python/tests/unit/connectors/memory/postgres/test_postgres_store.py +++ b/python/tests/unit/connectors/memory/postgres/test_postgres_store.py @@ -5,7 +5,6 @@ from typing import Annotated, Any from unittest.mock import AsyncMock, MagicMock, Mock, patch -import pytest import pytest_asyncio from psycopg import AsyncConnection, AsyncCursor from psycopg_pool import AsyncConnectionPool @@ -14,8 +13,6 @@ from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIEmbeddingPromptExecutionSettings, ) -from semantic_kernel.connectors.memory.postgres.constants import DISTANCE_COLUMN_NAME -from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings from semantic_kernel.connectors.memory.postgres.postgres_store import PostgresStore from semantic_kernel.data.const import DistanceFunction, IndexKind @@ -25,7 +22,6 @@ VectorStoreRecordKeyField, VectorStoreRecordVectorField, ) -from semantic_kernel.data.vector_search.vector_search_options import VectorSearchOptions @fixture(scope="function") @@ -80,9 +76,6 @@ class SimpleDataModel: ] -# region VectorStore Tests - - async def test_vector_store_defaults(vector_store: PostgresStore) -> None: assert vector_store.connection_pool is not None async with vector_store.connection_pool.connection() as conn: @@ -243,130 +236,7 @@ async def test_get_records(vector_store: PostgresStore, mock_cursor: Mock) -> No assert records[2].data == {"key": "value3"} -# endregion - -# region Vector Search tests - - -@pytest.mark.parametrize( - "distance_function, operator, subquery_distance, include_vectors, include_total_count", - [ - (DistanceFunction.COSINE_SIMILARITY, "<=>", f'1 - subquery."{DISTANCE_COLUMN_NAME}"', False, False), - (DistanceFunction.COSINE_DISTANCE, "<=>", None, False, False), - (DistanceFunction.DOT_PROD, "<#>", f'-1 * subquery."{DISTANCE_COLUMN_NAME}"', True, False), - (DistanceFunction.EUCLIDEAN_DISTANCE, "<->", None, False, True), - (DistanceFunction.MANHATTAN, "<+>", None, True, True), - ], -) -async def test_vector_search( - vector_store: PostgresStore, - mock_cursor: Mock, - distance_function: DistanceFunction, - operator: str, - subquery_distance: str | None, - include_vectors: bool, - include_total_count: bool, -) -> None: - @vectorstoremodel - @dataclass - class SimpleDataModel: - id: Annotated[int, VectorStoreRecordKeyField()] - embedding: Annotated[ - list[float], - VectorStoreRecordVectorField( - embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, - index_kind=IndexKind.HNSW, - dimensions=1536, - distance_function=distance_function, - property_type="float", - ), - ] - data: Annotated[ - dict[str, Any], - VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), - ] - - collection = vector_store.get_collection("test_collection", SimpleDataModel) - assert isinstance(collection, PostgresCollection) - - search_results = await collection.vectorized_search( - [1.0, 2.0, 3.0], - options=VectorSearchOptions( - top=10, skip=5, include_vectors=include_vectors, include_total_count=include_total_count - ), - ) - if include_total_count: - # Including total count issues query directly - assert mock_cursor.execute.call_count == 1 - else: - # Total count is not included, query is issued when iterating over results - assert mock_cursor.execute.call_count == 0 - async for _ in search_results.results: - pass - assert mock_cursor.execute.call_count == 1 - - execute_args, _ = mock_cursor.execute.call_args - - assert (search_results.total_count is not None) == include_total_count - - statement = execute_args[0] - statement_str = statement.as_string() - - expected_columns = '"id", "data"' - if include_vectors: - expected_columns = '"id", "embedding", "data"' - - expected_statement = ( - f'SELECT {expected_columns}, "embedding" {operator} %s as "{DISTANCE_COLUMN_NAME}" ' - 'FROM "public"."test_collection" ' - f'ORDER BY "{DISTANCE_COLUMN_NAME}" LIMIT 10 OFFSET 5' - ) - - if subquery_distance: - expected_statement = ( - f'SELECT subquery.*, {subquery_distance} AS "{DISTANCE_COLUMN_NAME}" FROM (' - + expected_statement - + ") AS subquery" - ) - - assert statement_str == expected_statement - - -async def test_model_post_init_conflicting_distance_column_name(vector_store: PostgresStore) -> None: - @vectorstoremodel - @dataclass - class ConflictingDataModel: - id: Annotated[int, VectorStoreRecordKeyField()] - sk_pg_distance: Annotated[ - float, VectorStoreRecordDataField() - ] # Note: test depends on value of DISTANCE_COLUMN_NAME constant - - embedding: Annotated[ - list[float], - VectorStoreRecordVectorField( - embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, - index_kind=IndexKind.HNSW, - dimensions=1536, - distance_function=DistanceFunction.COSINE_SIMILARITY, - property_type="float", - ), - ] - data: Annotated[ - dict[str, Any], - VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), - ] - - collection = vector_store.get_collection("test_collection", ConflictingDataModel) - assert isinstance(collection, PostgresCollection) - - # Ensure that the distance column name has been changed to avoid conflict - assert collection._distance_column_name != DISTANCE_COLUMN_NAME - assert collection._distance_column_name.startswith(f"{DISTANCE_COLUMN_NAME}_") - - -# endregion - -# region Settings tests +# Test settings def test_settings_connection_string(monkeypatch) -> None: @@ -420,6 +290,3 @@ def test_settings_env_vars(monkeypatch) -> None: assert conn_info["dbname"] == "dbname" assert conn_info["user"] == "user" assert conn_info["password"] == "password" - - -# endregion diff --git a/python/tests/unit/contents/test_binary_content.py b/python/tests/unit/contents/test_binary_content.py index 57ee6dad5d3f..14719d5f5754 100644 --- a/python/tests/unit/contents/test_binary_content.py +++ b/python/tests/unit/contents/test_binary_content.py @@ -2,7 +2,6 @@ import pytest -from numpy import array from semantic_kernel.contents.binary_content import BinaryContent @@ -17,9 +16,6 @@ BinaryContent(data_uri="data:image/jpeg;foo=bar;base64,dGVzdF9kYXRh", metadata={"bar": "baz"}), id="data_uri_with_params_and_metadata", ), - pytest.param( - BinaryContent(data=array([1, 2, 3]), mime_type="application/json", data_format="base64"), id="data_array" - ), ] @@ -76,7 +72,6 @@ def test_update_data_str(): def test_update_existing_data(): binary = BinaryContent(data_uri="data:image/jpeg;foo=bar;base64,dGVzdF9kYXRh", metadata={"bar": "baz"}) - assert binary._data_uri is not None binary._data_uri.data_format = None binary.data = "test_data" binary.data = b"test_data" diff --git a/python/tests/unit/contents/test_chat_history_summarization_reducer.py b/python/tests/unit/contents/test_chat_history_summarization_reducer.py index c61d044a9811..35e13c969522 100644 --- a/python/tests/unit/contents/test_chat_history_summarization_reducer.py +++ b/python/tests/unit/contents/test_chat_history_summarization_reducer.py @@ -5,7 +5,7 @@ import pytest from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.const import DEFAULT_SERVICE_NAME from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.history_reducer.chat_history_reducer_utils import SUMMARY_METADATA_KEY from semantic_kernel.contents.history_reducer.chat_history_summarization_reducer import ( @@ -49,6 +49,7 @@ def test_summarization_reducer_init(mock_service): reducer = ChatHistorySummarizationReducer( service=mock_service, target_count=10, + service_id="my_service", threshold_count=5, summarization_instructions="Custom instructions", use_single_summary=False, @@ -57,6 +58,7 @@ def test_summarization_reducer_init(mock_service): assert reducer.service == mock_service assert reducer.target_count == 10 + assert reducer.service_id == "my_service" assert reducer.threshold_count == 5 assert reducer.summarization_instructions == "Custom instructions" assert reducer.use_single_summary is False @@ -70,6 +72,7 @@ def test_summarization_reducer_defaults(mock_service): assert reducer.summarization_instructions in reducer.summarization_instructions assert reducer.use_single_summary is True assert reducer.fail_on_error is True + assert reducer.service_id == DEFAULT_SERVICE_NAME def test_summarization_reducer_eq_and_hash(mock_service): @@ -112,7 +115,6 @@ async def test_summarization_reducer_reduce_needed(mock_service): # Mock that the service will return a single summary message summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="This is a summary.") mock_service.get_chat_message_content.return_value = summary_content - mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() result = await reducer.reduce() assert result is not None, "We expect a shortened list with a new summary inserted." @@ -122,33 +124,6 @@ async def test_summarization_reducer_reduce_needed(mock_service): ) -async def test_summarization_reducer_reduce_needed_auto(mock_service): - # Mock that the service will return a single summary message - summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="This is a summary.") - mock_service.get_chat_message_content.return_value = summary_content - mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() - - messages = [ - # A summary message (as in the original test) - ChatMessageContent(role=AuthorRole.SYSTEM, content="Existing summary", metadata={SUMMARY_METADATA_KEY: True}), - # Enough additional messages so total is > 4 - ChatMessageContent(role=AuthorRole.USER, content="User says hello"), - ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds"), - ChatMessageContent(role=AuthorRole.USER, content="User says more"), - ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds again"), - ChatMessageContent(role=AuthorRole.USER, content="User says more"), - ChatMessageContent(role=AuthorRole.ASSISTANT, content="Assistant responds again"), - ] - - reducer = ChatHistorySummarizationReducer(auto_reduce=True, service=mock_service, target_count=3, threshold_count=1) - - for msg in messages: - await reducer.add_message_async(msg) - assert len(reducer.messages) <= 5, ( - "We should auto-reduce after each message, we have one summary, and then 4 other messages." - ) - - async def test_summarization_reducer_reduce_no_messages_to_summarize(mock_service): # If we do use_single_summary=False, the older_range_start is insertion_point # In that scenario, if insertion_point == older_range_end => no messages to summarize => return None @@ -221,7 +196,6 @@ async def test_summarization_reducer_private_summarize(mock_service): summary_content = ChatMessageContent(role=AuthorRole.ASSISTANT, content="Mock Summary") mock_service.get_chat_message_content.return_value = summary_content - mock_service.get_prompt_execution_settings_from_settings.return_value = PromptExecutionSettings() actual_summary = await reducer._summarize(chat_messages) assert actual_summary is not None, "We should get a summary message back." diff --git a/python/tests/unit/contents/test_chat_message_content.py b/python/tests/unit/contents/test_chat_message_content.py index 18e160c69304..9e7dcaa07b8a 100644 --- a/python/tests/unit/contents/test_chat_message_content.py +++ b/python/tests/unit/contents/test_chat_message_content.py @@ -4,7 +4,6 @@ from defusedxml.ElementTree import XML from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent @@ -381,37 +380,3 @@ def test_cmc_to_dict_keys(): def test_cmc_to_dict_items(input_args, expected_dict): message = ChatMessageContent(**input_args) assert message.to_dict() == expected_dict - - -def test_cmc_with_unhashable_types_can_hash(): - user_messages = [ - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Describe this image."), - ImageContent( - uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" - ), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="What is the main color in this image?"), - ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - items=[ - TextContent(text="Is there an animal in this image?"), - FileReferenceContent(file_id="test_file_id"), - ], - ), - ChatMessageContent( - role=AuthorRole.USER, - ), - ] - - for message in user_messages: - assert hash(message) is not None diff --git a/python/tests/unit/contents/test_data_uri.py b/python/tests/unit/contents/test_data_uri.py index fc98679a9765..c4879b305593 100644 --- a/python/tests/unit/contents/test_data_uri.py +++ b/python/tests/unit/contents/test_data_uri.py @@ -2,7 +2,6 @@ from typing import Any -import numpy as np import pytest from semantic_kernel.contents.utils.data_uri import DataUri @@ -21,15 +20,7 @@ "base64", id="basic_image", ), - pytest.param( - "data:text/plain;,test_data", - b"test_data", - "test_data", - "text/plain", - {}, - None, - id="basic_text", - ), + pytest.param("data:text/plain;,test_data", None, "test_data", "text/plain", {}, None, id="basic_text"), pytest.param( "data:application/octet-stream;base64,AQIDBA==", b"\x01\x02\x03\x04", @@ -50,22 +41,13 @@ ), pytest.param( "data:application/octet-stream;utf8,01-02-03-04", - b"01-02-03-04", + None, "01-02-03-04", "application/octet-stream", {}, "utf8", id="utf8", ), - pytest.param( - "data:text/plain;key=value;base64,U29t\r\nZQ==\t", - b"Some", - "U29tZQ==", - "text/plain", - {"key": "value"}, - "base64", - id="with_params", - ), ], ) def test_data_uri_from_data_uri_str( @@ -78,10 +60,10 @@ def test_data_uri_from_data_uri_str( ): data_uri = DataUri.from_data_uri(uri) assert data_uri.data_bytes == data_bytes + assert data_uri.data_str == data_str assert data_uri.mime_type == mime_type assert data_uri.parameters == parameters assert data_uri.data_format == data_format - assert data_uri._data_str() == data_str @pytest.mark.parametrize( @@ -92,6 +74,11 @@ def test_data_uri_from_data_uri_str( pytest.param("data:", ContentInitializationError, id="missing_comma"), pytest.param("data:something,", ContentInitializationError, id="mime_type_without_subtype"), pytest.param("data:something;else,data", ContentInitializationError, id="mime_type_without_subtype2"), + pytest.param( + "data:type/subtype;parameterwithoutvalue;else,", ContentInitializationError, id="param_without_value" + ), + pytest.param("data:type/subtype;parameter=va=lue;else,", ContentInitializationError, id="param_multiple_eq"), + pytest.param("data:type/subtype;=value;else,", ContentInitializationError, id="param_without_name"), pytest.param("data:image/jpeg;base64,dGVzdF9kYXRh;foo=bar", ContentInitializationError, id="wrong_order"), pytest.param("data:text/plain;test_data", ContentInitializationError, id="missing_comma"), pytest.param( @@ -243,70 +230,3 @@ def test_eq(): assert data_uri1 == data_uri2 assert data_uri1 != "data:image/jpeg;base64,dGVzdF9kYXRh" assert data_uri1 != DataUri.from_data_uri("data:image/jpeg;base64,dGVzdF9kYXRi") - - -def test_array(): - arr = np.array([[1, 2], [3, 4]], dtype=np.uint8) - data_uri = DataUri(data_array=arr, mime_type="application/octet-stream", data_format="base64") - encoded = data_uri.to_string() - assert data_uri.data_array is not None - assert "data:application/octet-stream;base64," in encoded - assert data_uri.data_array.tobytes() == b"\x01\x02\x03\x04" - - -@pytest.mark.parametrize( - "data_bytes, data_str, data_array, data_format, expected_output", - [ - pytest.param( - b"test_data", - None, - None, - "base64", - "dGVzdF9kYXRh", - id="bytes_base64", - ), - pytest.param( - b"test_data", - None, - None, - "plain", - "test_data", - id="bytes_non_base64", - ), - pytest.param( - None, - "dGVzdF9kYXRh", - None, - "base64", - "dGVzdF9kYXRh", - id="string_base64", - ), - pytest.param( - None, - "plain_data", - None, - None, - "plain_data", - id="string_non_base64", - ), - pytest.param( - None, - None, - np.array([1, 2, 3], dtype=np.uint8), - "base64", - "AQID", - id="array_base64", - ), - pytest.param( - None, - None, - np.array([1, 2, 3], dtype=np.uint8), - "plain", - "\1\2\3", - id="array_non_base64", - ), - ], -) -def test__data_str(data_bytes, data_str, data_array, data_format, expected_output): - data_uri = DataUri(data_bytes=data_bytes, data_str=data_str, data_array=data_array, data_format=data_format) - assert data_uri._data_str() == expected_output diff --git a/python/tests/unit/contents/test_function_result_content.py b/python/tests/unit/contents/test_function_result_content.py index 2e83dc9737a9..5bb549924d81 100644 --- a/python/tests/unit/contents/test_function_result_content.py +++ b/python/tests/unit/contents/test_function_result_content.py @@ -4,7 +4,6 @@ from unittest.mock import Mock import pytest -from pydantic import BaseModel, ConfigDict, Field from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent @@ -13,7 +12,6 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.functions.function_result import FunctionResult from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata -from semantic_kernel.kernel_pydantic import KernelBaseModel class CustomResultClass: @@ -36,17 +34,6 @@ def __str__(self): return f"CustomObjectWithList({self.items})" -class AccountBalanceFrozen(KernelBaseModel): - # Make the model frozen so it's hashable - balance: int = Field(default=..., alias="account_balance") - model_config = ConfigDict(frozen=True) - - -class AccountBalanceNonFrozen(KernelBaseModel): - # This model is not frozen and thus not hashable by default - balance: int = Field(default=..., alias="account_balance") - - def test_init(): frc = FunctionResultContent(id="test", name="test-function", result="test-result", metadata={"test": "test"}) assert frc.name == "test-function" @@ -137,83 +124,3 @@ def __str__(self) -> str: frc.model_dump_json(exclude_none=True) == """{"metadata":{},"content_type":"function_result","id":"test","result":"test","name":"test-function","function_name":"function","plugin_name":"test"}""" # noqa: E501 ) - - -def test_hash_with_frozen_account_balance(): - balance = AccountBalanceFrozen(account_balance=100) - content = FunctionResultContent( - id="test_id", - result=balance, - function_name="TestFunction", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with frozen model should not raise errors." - - -def test_hash_with_dict_result(): - balance_dict = {"account_balance": 100} - content = FunctionResultContent( - id="test_id", - result=balance_dict, - function_name="TestFunction", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with dict result should not raise errors." - - -def test_hash_with_nested_dict_result(): - nested_dict = {"account_balance": 100, "details": {"currency": "USD", "last_updated": "2025-01-28"}} - content = FunctionResultContent( - id="test_id_nested", - result=nested_dict, - function_name="TestFunctionNested", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with nested dict result should not raise errors." - - -def test_hash_with_list_result(): - balance_list = [100, 200, 300] - content = FunctionResultContent( - id="test_id_list", - result=balance_list, - function_name="TestFunctionList", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with list result should not raise errors." - - -def test_hash_with_set_result(): - balance_set = {100, 200, 300} - content = FunctionResultContent( - id="test_id_set", - result=balance_set, - function_name="TestFunctionSet", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with set result should not raise errors." - - -def test_hash_with_custom_object_result(): - class CustomObject(BaseModel): - field1: str - field2: int - - custom_obj = CustomObject(field1="value1", field2=42) - content = FunctionResultContent( - id="test_id_custom", - result=custom_obj, - function_name="TestFunctionCustom", - ) - _ = hash(content) - assert True, "Hashing FunctionResultContent with custom object result should not raise errors." - - -def test_unhashable_non_frozen_model_raises_type_error(): - balance = AccountBalanceNonFrozen(account_balance=100) - content = FunctionResultContent( - id="test_id_unhashable", - result=balance, - function_name="TestFunctionUnhashable", - ) - _ = hash(content) diff --git a/python/tests/unit/contents/test_hashing_utils.py b/python/tests/unit/contents/test_hashing_utils.py deleted file mode 100644 index 66bcf52c13d9..000000000000 --- a/python/tests/unit/contents/test_hashing_utils.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Any - -from pydantic import BaseModel - -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.function_result_content import FunctionResultContent - - -class SimpleModel(BaseModel): - field1: str - field2: int - - -class NestedModel(BaseModel): - name: str - values: list[int] - - -class ModelContainer(BaseModel): - container_name: str - nested_model: NestedModel - - -def test_hash_with_nested_structures(): - """ - Deeply nested dictionaries and lists, but with no cyclical references. - Ensures multiple levels of nested transformations work. - """ - data = { - "level1": { - "list1": [1, 2, 3], - "dict1": {"keyA": "valA", "keyB": "valB"}, - }, - "level2": [ - {"sub_dict1": {"x": 99}}, - {"sub_dict2": {"y": 100}}, - ], - } - content = FunctionResultContent( - id="test_nested_structures", - result=data, - function_name="TestNestedStructures", - ) - _ = hash(content) - assert True, "Hashing deeply nested structures succeeded." - - -def test_hash_with_repeated_references(): - """ - Multiple references to the same object, but no cycle. - Ensures repeated objects are handled consistently and do not cause duplication. - """ - shared_dict = {"common_key": "common_value"} - data = { - "ref1": shared_dict, - "ref2": shared_dict, # same object, repeated reference - } - content = FunctionResultContent( - id="test_repeated_references", - result=data, - function_name="TestRepeatedRefs", - ) - _ = hash(content) - assert True, "Hashing repeated references (no cycles) succeeded." - - -def test_hash_with_simple_pydantic_model(): - """ - Hash a Pydantic model that doesn't reference itself or another model. - """ - model_instance = SimpleModel(field1="hello", field2=42) - content = FunctionResultContent( - id="test_simple_model", - result=model_instance, - function_name="TestSimpleModel", - ) - _ = hash(content) - assert True, "Hashing a simple Pydantic model succeeded." - - -def test_hash_with_nested_pydantic_models(): - """ - Hash a Pydantic model containing another Pydantic model, no cycles. - """ - nested = NestedModel(name="MyNestedModel", values=[1, 2, 3]) - container = ModelContainer(container_name="TopLevel", nested_model=nested) - content = FunctionResultContent( - id="test_nested_models", - result=container, - function_name="TestNestedModels", - ) - _ = hash(content) - assert True, "Hashing nested Pydantic models succeeded." - - -def test_hash_with_triple_cycle(): - """ - Three dictionaries referencing each other to form a cycle. - This ensures that multi-node cycles are also handled. - """ - dict_a: dict[str, Any] = {"a_key": 1} - dict_b: dict[str, Any] = {"b_key": 2} - dict_c: dict[str, Any] = {"c_key": 3} - - dict_a["ref_to_b"] = dict_b - dict_b["ref_to_c"] = dict_c - dict_c["ref_to_a"] = dict_a - - content = FunctionResultContent( - id="test_triple_cycle", - result=dict_a, - function_name="TestTripleCycle", - ) - - _ = hash(content) - assert True, "Hashing triple cyclical references succeeded." - - -def test_hash_with_cyclical_references(): - """ - The original cyclical references test for thorough coverage. - """ - - class CyclicalModel(BaseModel): - name: str - partner: "CyclicalModel" = None # type: ignore - - CyclicalModel.model_rebuild() - - model_a = CyclicalModel(name="ModelA") - model_b = CyclicalModel(name="ModelB") - model_a.partner = model_b - model_b.partner = model_a - - dict_x = {"x_key": 42} - dict_y = {"y_key": 99, "ref_to_x": dict_x} - dict_x["ref_to_y"] = dict_y # type: ignore - - giant_data_structure = { - "models": [model_a, model_b], - "nested": {"cyclical_dict_x": dict_x, "cyclical_dict_y": dict_y}, - } - - content = FunctionResultContent( - id="test_id_cyclical", - result=giant_data_structure, - function_name="TestFunctionCyclical", - ) - - _ = hash(content) - - -def test_hash_with_large_structure(): - """ - Tests performance or at least correctness when dealing with - a large structure, ensuring we don't crash or exceed recursion. - """ - large_list = list(range(1000)) - large_dict = {f"key_{i}": i for i in range(1000)} - combined = { - "big_list": large_list, - "big_dict": large_dict, - "nested": [ - {"inner_list": large_list}, - {"inner_dict": large_dict}, - ], - } - content = FunctionResultContent( - id="test_large_structure", - result=combined, - function_name="TestLargeStructure", - ) - - _ = hash(content) - - -def test_hash_function_call_content(): - call_content = FunctionCallContent( - inner_content=None, - ai_model_id=None, - metadata={}, - id="call_LAbz", - index=None, - name="menu-get_specials", - function_name="get_specials", - plugin_name="menu", - arguments="{}", - ) - - content = FunctionResultContent( - id="test_function_call_content", result=call_content, function_name="TestFunctionCallContent" - ) - - _ = hash(content) diff --git a/python/tests/unit/contents/test_streaming_chat_message_content.py b/python/tests/unit/contents/test_streaming_chat_message_content.py index 9d9fc8c0f4aa..d9651a45489d 100644 --- a/python/tests/unit/contents/test_streaming_chat_message_content.py +++ b/python/tests/unit/contents/test_streaming_chat_message_content.py @@ -4,10 +4,8 @@ from defusedxml.ElementTree import XML from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.file_reference_content import FileReferenceContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent -from semantic_kernel.contents.image_content import ImageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent @@ -412,41 +410,3 @@ def test_scmc_bytes(): message = StreamingChatMessageContent(choice_index=0, role=AuthorRole.USER, content="Hello, world!") assert bytes(message) == b"Hello, world!" assert bytes(message.items[0]) == b"Hello, world!" - - -def test_scmc_with_unhashable_types_can_hash(): - user_messages = [ - StreamingChatMessageContent( - role=AuthorRole.USER, - items=[ - StreamingTextContent(text="Describe this image.", choice_index=0), - ImageContent( - uri="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg" - ), - ], - choice_index=0, - ), - StreamingChatMessageContent( - role=AuthorRole.USER, - items=[ - StreamingTextContent(text="What is the main color in this image?", choice_index=0), - ImageContent(uri="https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"), - ], - choice_index=0, - ), - StreamingChatMessageContent( - role=AuthorRole.USER, - items=[ - StreamingTextContent(text="Is there an animal in this image?", choice_index=0), - FileReferenceContent(file_id="test_file_id"), - ], - choice_index=0, - ), - StreamingChatMessageContent( - role=AuthorRole.USER, - choice_index=0, - ), - ] - - for message in user_messages: - assert hash(message) is not None diff --git a/python/tests/unit/core_plugins/test_crew_ai_enterprise.py b/python/tests/unit/core_plugins/test_crew_ai_enterprise.py deleted file mode 100644 index 1dbf8ee40679..000000000000 --- a/python/tests/unit/core_plugins/test_crew_ai_enterprise.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import patch - -import pytest - -from semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise import CrewAIEnterprise -from semantic_kernel.core_plugins.crew_ai.crew_ai_models import CrewAIEnterpriseKickoffState, CrewAIStatusResponse -from semantic_kernel.exceptions.function_exceptions import PluginInitializationError -from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata -from semantic_kernel.functions.kernel_plugin import KernelPlugin - - -@pytest.fixture -def crew_ai_enterprise(): - return CrewAIEnterprise(endpoint="https://test.com", auth_token="FakeToken") - - -def test_it_can_be_instantiated(crew_ai_enterprise): - assert crew_ai_enterprise is not None - - -def test_create_kernel_plugin(crew_ai_enterprise): - plugin = crew_ai_enterprise.create_kernel_plugin( - name="test_plugin", - description="Test plugin", - parameters=[KernelParameterMetadata(name="param1")], - ) - assert isinstance(plugin, KernelPlugin) - assert "kickoff" in plugin.functions - assert "kickoff_and_wait" in plugin.functions - assert "get_status" in plugin.functions - assert "wait_for_completion" in plugin.functions - - -@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.kickoff") -async def test_kickoff(mock_kickoff, crew_ai_enterprise): - mock_kickoff.return_value.kickoff_id = "123" - kickoff_id = await crew_ai_enterprise.kickoff(inputs={"param1": "value"}) - assert kickoff_id == "123" - - -@pytest.mark.parametrize( - "state", - [ - CrewAIEnterpriseKickoffState.Pending, - CrewAIEnterpriseKickoffState.Started, - CrewAIEnterpriseKickoffState.Running, - CrewAIEnterpriseKickoffState.Success, - CrewAIEnterpriseKickoffState.Failed, - CrewAIEnterpriseKickoffState.Failure, - CrewAIEnterpriseKickoffState.Not_Found, - ], -) -@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.get_status") -async def test_get_crew_kickoff_status(mock_get_status, crew_ai_enterprise, state): - mock_get_status.return_value = CrewAIStatusResponse(state=state.value) - status_response = await crew_ai_enterprise.get_crew_kickoff_status(kickoff_id="123") - assert status_response.state == state - - -@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.get_status") -async def test_wait_for_crew_completion(mock_get_status, crew_ai_enterprise): - mock_get_status.side_effect = [ - CrewAIStatusResponse(state=CrewAIEnterpriseKickoffState.Pending), - CrewAIStatusResponse(state=CrewAIEnterpriseKickoffState.Success, result="result"), - ] - result = await crew_ai_enterprise.wait_for_crew_completion(kickoff_id="123") - assert result == "result" - - -def test_build_arguments(crew_ai_enterprise): - parameters = [KernelParameterMetadata(name="param1")] - arguments = {"param1": "value"} - args = crew_ai_enterprise._build_arguments(parameters, arguments) - assert args["param1"] == "value" - - -def test_build_arguments_missing_param(crew_ai_enterprise): - parameters = [KernelParameterMetadata(name="param1")] - arguments = {} - with pytest.raises(PluginInitializationError): - crew_ai_enterprise._build_arguments(parameters, arguments) - - -@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.__aenter__") -async def test_aenter(mock_aenter, crew_ai_enterprise): - await crew_ai_enterprise.__aenter__() - mock_aenter.assert_called_once() - - -@patch("semantic_kernel.core_plugins.crew_ai.crew_ai_enterprise.CrewAIEnterpriseClient.__aexit__") -async def test_aexit(mock_aexit, crew_ai_enterprise): - await crew_ai_enterprise.__aexit__() - mock_aexit.assert_called_once() diff --git a/python/tests/unit/functions/test_kernel_experimental_decorator.py b/python/tests/unit/functions/test_kernel_experimental_decorator.py new file mode 100644 index 000000000000..a7391fed504f --- /dev/null +++ b/python/tests/unit/functions/test_kernel_experimental_decorator.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.utils.experimental_decorator import experimental_function + + +@experimental_function +def my_function() -> None: + """This is a sample function docstring.""" + pass + + +@experimental_function +def my_function_no_doc_string() -> None: + pass + + +def test_function_experimental_decorator() -> None: + assert ( + my_function.__doc__ + == "This is a sample function docstring.\n\nNote: This function is experimental and may change in the future." + ) + assert hasattr(my_function, "is_experimental") + assert my_function.is_experimental is True + + +def test_function_experimental_decorator_with_no_doc_string() -> None: + assert my_function_no_doc_string.__doc__ == "Note: This function is experimental and may change in the future." + assert hasattr(my_function_no_doc_string, "is_experimental") + assert my_function_no_doc_string.is_experimental is True diff --git a/python/tests/unit/kernel/test_kernel.py b/python/tests/unit/kernel/test_kernel.py index c40c1f7153d9..0e7148ddf2c0 100644 --- a/python/tests/unit/kernel/test_kernel.py +++ b/python/tests/unit/kernel/test_kernel.py @@ -1,7 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import os -from pathlib import Path from typing import Union from unittest.mock import AsyncMock, MagicMock, patch @@ -283,12 +282,12 @@ async def test_invoke_function_call(kernel: Kernel, get_tool_call_mock): patch("semantic_kernel.kernel.Kernel.get_list_of_function_metadata", return_value=[func_meta]), ): await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(filters={"included_functions": ["function"]}), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(filters={"included_functions": ["function"]}), ) @@ -314,12 +313,12 @@ async def test_invoke_function_call_throws_during_invoke(kernel: Kernel, get_too patch("semantic_kernel.kernel.Kernel.get_function", return_value=func_mock), ): await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(), ) @@ -340,12 +339,12 @@ async def test_invoke_function_call_non_allowed_func_throws(kernel: Kernel, get_ with patch("semantic_kernel.kernel.logger", autospec=True): await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(filters={"included_functions": ["unknown"]}), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(filters={"included_functions": ["unknown"]}), ) @@ -369,12 +368,12 @@ async def test_invoke_function_call_no_name_throws(kernel: Kernel, get_tool_call patch("semantic_kernel.kernel.logger", autospec=True), ): await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(), ) @@ -400,12 +399,12 @@ async def test_invoke_function_call_not_enough_parsed_args(kernel: Kernel, get_t patch("semantic_kernel.kernel.Kernel.get_function", return_value=func_mock), ): await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(), ) @@ -435,12 +434,12 @@ async def test_invoke_function_call_with_continuation_on_malformed_arguments(ker with patch("semantic_kernel.kernel.logger", autospec=True) as logger_mock: await kernel.invoke_function_call( - function_call=tool_call_mock, - chat_history=chat_history_mock, - arguments=arguments, - function_call_count=1, - request_index=0, - function_behavior=FunctionChoiceBehavior.Auto(), + tool_call_mock, + chat_history_mock, + arguments, + 1, + 0, + FunctionChoiceBehavior.Auto(), ) logger_mock.info.assert_any_call( @@ -484,11 +483,6 @@ def test_plugin_name_error(kernel: Kernel): kernel.add_plugin(" ", None) -def test_plugin_name_not_string_error(kernel: Kernel): - with pytest.raises(TypeError): - kernel.add_plugin(" ", plugin_name=Path(__file__).parent) - - def test_plugins_add_plugins(kernel: Kernel): plugin1 = KernelPlugin(name="TestPlugin") plugin2 = KernelPlugin(name="TestPlugin2") @@ -761,7 +755,7 @@ def test_instantiate_prompt_execution_settings_through_kernel(kernel_with_servic def test_experimental_class_has_decorator_and_flag(experimental_plugin_class): assert hasattr(experimental_plugin_class, "is_experimental") assert experimental_plugin_class.is_experimental - assert "This class is marked as 'experimental' and may change in the future" in experimental_plugin_class.__doc__ + assert "This class is experimental and may change in the future." in experimental_plugin_class.__doc__ # endregion diff --git a/python/tests/unit/processes/dapr_runtime/test_process_actor.py b/python/tests/unit/processes/dapr_runtime/test_process_actor.py index f01266a9fe48..78fcb464334f 100644 --- a/python/tests/unit/processes/dapr_runtime/test_process_actor.py +++ b/python/tests/unit/processes/dapr_runtime/test_process_actor.py @@ -27,7 +27,7 @@ def actor_context(): actor_client=MagicMock(), ) kernel_mock = MagicMock() - actor = ProcessActor(runtime_context, actor_id, kernel=kernel_mock, factories={}) + actor = ProcessActor(runtime_context, actor_id, kernel=kernel_mock) actor._state_manager = AsyncMock() actor._state_manager.try_add_state = AsyncMock(return_value=True) diff --git a/python/tests/unit/processes/dapr_runtime/test_step_actor.py b/python/tests/unit/processes/dapr_runtime/test_step_actor.py index 786d14e0f60f..4a5c306dc2fa 100644 --- a/python/tests/unit/processes/dapr_runtime/test_step_actor.py +++ b/python/tests/unit/processes/dapr_runtime/test_step_actor.py @@ -6,28 +6,18 @@ import pytest from dapr.actor import ActorId -from semantic_kernel.processes.dapr_runtime.actors.actor_state_key import ActorStateKeys from semantic_kernel.processes.dapr_runtime.actors.step_actor import StepActor from semantic_kernel.processes.dapr_runtime.dapr_step_info import DaprStepInfo from semantic_kernel.processes.kernel_process.kernel_process_step_state import KernelProcessStepState from semantic_kernel.processes.process_message import ProcessMessage -class FakeStep: - async def activate(self, state): - self.activated_state = state - - -class FakeState: - pass - - @pytest.fixture def actor_context(): ctx = MagicMock() actor_id = ActorId("test_actor") kernel = MagicMock() - return StepActor(ctx, actor_id, kernel, factories={}) + return StepActor(ctx, actor_id, kernel) async def test_initialize_step(actor_context): @@ -107,98 +97,3 @@ async def test_process_incoming_messages(actor_context): expected_messages = [] expected_messages = [json.dumps(msg.model_dump()) for msg in list(actor_context.incoming_messages.queue)] mock_try_add_state.assert_any_call("incomingMessagesState", expected_messages) - - -async def test_activate_step_with_factory_creates_state(actor_context): - fake_step_instance = FakeStep() - fake_step_instance.activate = AsyncMock(side_effect=fake_step_instance.activate) - - fake_plugin = MagicMock() - fake_plugin.functions = {"test_function": lambda x: x} - - with ( - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_generic_state_type", - return_value=FakeState, - ), - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_fully_qualified_name", - return_value="FakeStateFullyQualified", - ), - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.find_input_channels", - return_value={"channel": {"input": "value"}}, - ), - ): - actor_context.factories = {"FakeStep": lambda: fake_step_instance} - actor_context.inner_step_type = "FakeStep" - actor_context.step_info = DaprStepInfo( - state=KernelProcessStepState(name="default_name", id="step_123"), - inner_step_python_type="FakeStep", - edges={}, - ) - actor_context.kernel.add_plugin = MagicMock(return_value=fake_plugin) - actor_context._state_manager.try_add_state = AsyncMock() - actor_context._state_manager.save_state = AsyncMock() - - await actor_context.activate_step() - - actor_context.kernel.add_plugin.assert_called_once_with(fake_step_instance, "default_name") - assert actor_context.functions == fake_plugin.functions - assert actor_context.initial_inputs == {"channel": {"input": "value"}} - assert actor_context.inputs == {"channel": {"input": "value"}} - assert actor_context.step_state is not None - assert isinstance(actor_context.step_state.state, FakeState) - fake_step_instance.activate.assert_awaited_once_with(actor_context.step_state) - - -async def test_activate_step_with_factory_uses_existing_state(actor_context): - fake_step_instance = FakeStep() - fake_step_instance.activate = AsyncMock(side_effect=fake_step_instance.activate) - - fake_plugin = MagicMock() - fake_plugin.functions = {"test_function": lambda x: x} - - pre_existing_state = KernelProcessStepState(name="ExistingState", id="ExistingState", state=None) - - with ( - patch.object( - KernelProcessStepState, - "model_dump", - return_value={"name": "ExistingState", "id": "ExistingState", "state": None}, - ), - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_generic_state_type", - return_value=FakeState, - ), - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.get_fully_qualified_name", - return_value="FakeStateFullyQualified", - ), - patch( - "semantic_kernel.processes.dapr_runtime.actors.step_actor.find_input_channels", - return_value={"channel": {"input": "value"}}, - ), - ): - actor_context.factories = {"FakeStep": lambda: fake_step_instance} - actor_context.inner_step_type = "FakeStep" - actor_context.step_info = DaprStepInfo(state=pre_existing_state, inner_step_python_type="FakeStep", edges={}) - actor_context.kernel.add_plugin = MagicMock(return_value=fake_plugin) - actor_context._state_manager.try_add_state = AsyncMock() - actor_context._state_manager.save_state = AsyncMock() - - await actor_context.activate_step() - - actor_context.kernel.add_plugin.assert_called_once_with(fake_step_instance, pre_existing_state.name) - assert actor_context.functions == fake_plugin.functions - assert actor_context.initial_inputs == {"channel": {"input": "value"}} - assert actor_context.inputs == {"channel": {"input": "value"}} - actor_context._state_manager.try_add_state.assert_any_await( - ActorStateKeys.StepStateType.value, "FakeStateFullyQualified" - ) - actor_context._state_manager.try_add_state.assert_any_await( - ActorStateKeys.StepStateJson.value, json.dumps(pre_existing_state.model_dump()) - ) - actor_context._state_manager.save_state.assert_awaited_once() - assert isinstance(actor_context.step_state.state, FakeState) - fake_step_instance.activate.assert_awaited_once_with(actor_context.step_state) diff --git a/python/tests/unit/processes/kernel_process/test_kernel_process_event.py b/python/tests/unit/processes/kernel_process/test_kernel_process_event.py index 8b6fbeb43440..f5bbecd048af 100644 --- a/python/tests/unit/processes/kernel_process/test_kernel_process_event.py +++ b/python/tests/unit/processes/kernel_process/test_kernel_process_event.py @@ -34,7 +34,7 @@ def test_initialization_with_visibility(): # Assert assert event.id == event_id assert event.data == event_data - assert event.visibility == KernelProcessEventVisibility.Public + assert event.visibility == KernelProcessEventVisibility.Public.value def test_invalid_visibility(): diff --git a/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py b/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py index a60a65abfcc5..2f73faaf2741 100644 --- a/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py +++ b/python/tests/unit/processes/local_runtime/test_local_kernel_process_context.py @@ -26,7 +26,6 @@ def mock_process(): process = MagicMock(spec=KernelProcess) process.state = state process.steps = [step_info] - process.factories = {} return process diff --git a/python/tests/unit/processes/local_runtime/test_local_process.py b/python/tests/unit/processes/local_runtime/test_local_process.py index 00844643fa17..605f88255dd8 100644 --- a/python/tests/unit/processes/local_runtime/test_local_process.py +++ b/python/tests/unit/processes/local_runtime/test_local_process.py @@ -271,7 +271,6 @@ def test_initialize_process(mock_process, mock_kernel, build_model): mock_local_step_init.assert_called_with( step_info=step_info, kernel=mock_kernel, - factories={}, parent_process_id=local_process.id, ) @@ -311,7 +310,7 @@ async def test_handle_message_with_valid_event_id(mock_process_with_output_edges assert isinstance(event, KernelProcessEvent) assert event.id == "valid_event_id" assert event.data == message.target_event_data - assert event.visibility == KernelProcessEventVisibility.Internal + assert event.visibility == KernelProcessEventVisibility.Internal.value END_PROCESS_ID = "END" diff --git a/python/tests/unit/processes/test_process_edge_builder.py b/python/tests/unit/processes/test_process_edge_builder.py index 6d6ab89326f2..8be3d5445a51 100644 --- a/python/tests/unit/processes/test_process_edge_builder.py +++ b/python/tests/unit/processes/test_process_edge_builder.py @@ -7,9 +7,7 @@ from semantic_kernel.processes.process_builder import ProcessBuilder from semantic_kernel.processes.process_edge_builder import ProcessEdgeBuilder -from semantic_kernel.processes.process_function_target_builder import ( - ProcessFunctionTargetBuilder, -) +from semantic_kernel.processes.process_function_target_builder import ProcessFunctionTargetBuilder from semantic_kernel.processes.process_step_builder import ProcessStepBuilder from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder @@ -90,55 +88,12 @@ def test_send_event_to_with_step_builder(): assert linked_edge_builder.target == edge_builder.target -def test_send_event_to_step_with_multiple_functions(): - from semantic_kernel.functions.kernel_function_metadata import ( - KernelFunctionMetadata, - ) # noqa: F401 - - # Arrange - source = MagicMock(spec=ProcessBuilder) - source.link_to = MagicMock() - - target_step = ProcessStepBuilder(name="test_step") - target_step.functions_dict = { - "func_1": MagicMock(spec=KernelFunctionMetadata), - "func_2": MagicMock(spec=KernelFunctionMetadata), - } - - event_id = "event_004" - edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) - - # Act - Create edges to both functions in the step - result1 = edge_builder.send_event_to(target_step, function_name="func_1", parameter_name="input_param1") - result2 = edge_builder.send_event_to(target_step, function_name="func_2", parameter_name="input_param2") - - # Assert - # Verify both edges were created - assert len(source.link_to.call_args_list) == 2 - - # Check first edge - first_edge = source.link_to.call_args_list[0][0][1] - assert isinstance(first_edge, ProcessStepEdgeBuilder) - assert first_edge.target.function_name == "func_1" - assert first_edge.target.parameter_name == "input_param1" - assert first_edge.target.step == target_step - assert isinstance(result1, ProcessEdgeBuilder) - - # Check second edge - second_edge = source.link_to.call_args_list[1][0][1] - assert isinstance(second_edge, ProcessStepEdgeBuilder) - assert second_edge.target.function_name == "func_2" - assert second_edge.target.parameter_name == "input_param2" - assert second_edge.target.step == target_step - assert isinstance(result2, ProcessEdgeBuilder) - - def test_send_event_to_creates_step_edge(): # Arrange source = MagicMock(spec=ProcessBuilder) source.link_to = MagicMock() target = MagicMock(spec=ProcessFunctionTargetBuilder) - event_id = "event_005" + event_id = "event_004" edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) # Act @@ -155,7 +110,7 @@ def test_send_event_to_creates_step_edge(): def test_send_event_to_raises_error_on_invalid_target(): # Arrange source = MagicMock(spec=ProcessBuilder) - event_id = "event_006" + event_id = "event_005" edge_builder = ProcessEdgeBuilder(source=source, event_id=event_id) # Act & Assert diff --git a/python/tests/unit/processes/test_process_step_builder.py b/python/tests/unit/processes/test_process_step_builder.py index 63a38e9ffcab..f0b5a87d4eb4 100644 --- a/python/tests/unit/processes/test_process_step_builder.py +++ b/python/tests/unit/processes/test_process_step_builder.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from enum import Enum from unittest.mock import MagicMock import pytest @@ -15,10 +14,6 @@ from semantic_kernel.processes.process_step_edge_builder import ProcessStepEdgeBuilder -class TestFunctionEnum(Enum): - MY_FUNCTION = "my_function" - - class MockKernelProcessStep(KernelProcessStep): """A mock class to use as a step type.""" @@ -182,24 +177,3 @@ def test_link_to_multiple_edges(): # Assert assert step_builder.edges[event_id] == [edge_builder_1, edge_builder_2] - - -@pytest.mark.parametrize( - "function_name, expected_function_name", - [ - ("my_function", "my_function"), - (TestFunctionEnum.MY_FUNCTION, TestFunctionEnum.MY_FUNCTION.value), - ], -) -def test_on_function_result(function_name, expected_function_name): - # Arrange - name = "test_step" - step_builder = ProcessStepBuilder(name=name) - - # Act - edge_builder = step_builder.on_function_result(function_name=function_name) - - # Assert - assert isinstance(edge_builder, ProcessStepEdgeBuilder) - assert edge_builder.source == step_builder - assert edge_builder.event_id == f"{step_builder.event_namespace}.{expected_function_name}.OnResult" diff --git a/python/tests/unit/prompt_template/test_prompt_templates.py b/python/tests/unit/prompt_template/test_prompt_templates.py index 80e345f2431c..7a0247d2d131 100644 --- a/python/tests/unit/prompt_template/test_prompt_templates.py +++ b/python/tests/unit/prompt_template/test_prompt_templates.py @@ -326,4 +326,3 @@ def test_from_yaml_with_function_choice_behavior(): def test_multiple_param_in_prompt(): func = KernelFunctionFromPrompt("test", prompt="{{$param}}{{$param}}") assert len(func.parameters) == 1 - assert func.metadata.parameters[0].schema_data == {"type": "object"} diff --git a/python/tests/unit/test_serialization.py b/python/tests/unit/test_serialization.py index c2bc66eb0c40..4f206899ea4d 100644 --- a/python/tests/unit/test_serialization.py +++ b/python/tests/unit/test_serialization.py @@ -113,14 +113,14 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: PROTOCOLS = [ - ConversationSummaryPlugin, + pytest.param(ConversationSummaryPlugin, marks=pytest.mark.xfail(reason="Contains data")), HttpPlugin, MathPlugin, TextMemoryPlugin, TextPlugin, TimePlugin, WaitPlugin, - WebSearchEnginePlugin, + pytest.param(WebSearchEnginePlugin, marks=pytest.mark.xfail(reason="Contains data")), ] BASE_CLASSES = [ @@ -146,20 +146,17 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: KernelParameterMetadata, KernelFunctionMetadata, ChatHistory, -] -KERNEL_FUNCTION_OPTIONAL = [KernelFunction] -KERNEL_FUNCTION_REQUIRED = [ pytest.param( KernelFunction, marks=pytest.mark.xfail(reason="Need to implement Pickle serialization."), - ) + ), ] class TestUsageInPydanticFields: @pytest.mark.parametrize( "kernel_type", - BASE_CLASSES + PROTOCOLS + ENUMS + PYDANTIC_MODELS + STATELESS_CLASSES + KERNEL_FUNCTION_OPTIONAL, + BASE_CLASSES + PROTOCOLS + ENUMS + PYDANTIC_MODELS + STATELESS_CLASSES, ) def test_usage_as_optional_field( self, @@ -173,11 +170,11 @@ def test_usage_as_optional_field( class TestModel(KernelBaseModel): """A test model.""" - field: kernel_type | None = None + field: t.Optional[kernel_type] = None assert_serializable(TestModel(), TestModel) - @pytest.mark.parametrize("kernel_type", PYDANTIC_MODELS + STATELESS_CLASSES + KERNEL_FUNCTION_REQUIRED) + @pytest.mark.parametrize("kernel_type", PYDANTIC_MODELS + STATELESS_CLASSES) def test_usage_as_required_field( self, kernel_factory: t.Callable[[t.Type[KernelBaseModelFieldT]], KernelBaseModelFieldT], diff --git a/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py b/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py index 36fc5f9ee2b4..747bf38c52b1 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py +++ b/python/tests/unit/utils/agent_diagnostics/test_agent_decorated.py @@ -3,7 +3,7 @@ import pytest from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent -from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase pytestmark = pytest.mark.parametrize( "decorated_method, expected_attribute", @@ -22,12 +22,12 @@ # endregion # region OpenAIAssistantAgent pytest.param( - OpenAIAssistantAgent.invoke, + OpenAIAssistantBase.invoke, "__agent_diagnostics__", id="OpenAIAssistantBase.invoke", ), pytest.param( - OpenAIAssistantAgent.invoke_stream, + OpenAIAssistantBase.invoke_stream, "__agent_diagnostics__", id="OpenAIAssistantBase.invoke_stream", ), diff --git a/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py b/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py index f222e51227a3..3c1df16efa14 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py +++ b/python/tests/unit/utils/agent_diagnostics/test_trace_chat_completion_agent.py @@ -18,7 +18,7 @@ async def test_chat_completion_agent_invoke(mock_tracer, chat_history): async for _ in chat_completion_agent.invoke(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {chat_completion_agent.name}") + mock_tracer.start_as_current_span.assert_called_once_with(chat_completion_agent.name) @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") @@ -30,4 +30,4 @@ async def test_chat_completion_agent_invoke_stream(mock_tracer, chat_history): async for _ in chat_completion_agent.invoke_stream(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {chat_completion_agent.name}") + mock_tracer.start_as_current_span.assert_called_once_with(chat_completion_agent.name) diff --git a/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py b/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py index 4c20e4d6da42..d4a7ae6134ef 100644 --- a/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py +++ b/python/tests/unit/utils/agent_diagnostics/test_trace_open_ai_assistant_agent.py @@ -1,71 +1,33 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import AsyncMock, patch +from unittest.mock import patch -from openai import AsyncOpenAI -from openai.types.beta.assistant import Assistant +import pytest from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions.agent_exceptions import AgentInitializationException @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") async def test_open_ai_assistant_agent_invoke(mock_tracer, chat_history, openai_unit_test_env): # Arrange - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.name = "agentName" - definition.description = "agentDescription" - definition.id = "agentId" - definition.instructions = "agentInstructions" - definition.tools = [] - definition.model = "agentModel" - definition.temperature = 1.0 - definition.top_p = 1.0 - definition.metadata = {} - open_ai_assistant_agent = OpenAIAssistantAgent(client=client, definition=definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - + open_ai_assistant_agent = OpenAIAssistantAgent() # Act - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke", - side_effect=fake_invoke, - ): - async for item in open_ai_assistant_agent.invoke("thread_id"): + with pytest.raises(AgentInitializationException): + async for _ in open_ai_assistant_agent.invoke(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {open_ai_assistant_agent.name}") + mock_tracer.start_as_current_span.assert_called_once_with(open_ai_assistant_agent.name) @patch("semantic_kernel.utils.telemetry.agent_diagnostics.decorators.tracer") async def test_open_ai_assistant_agent_invoke_stream(mock_tracer, chat_history, openai_unit_test_env): # Arrange - client = AsyncMock(spec=AsyncOpenAI) - definition = AsyncMock(spec=Assistant) - definition.name = "agentName" - definition.description = "agentDescription" - definition.id = "agentId" - definition.instructions = "agentInstructions" - definition.tools = [] - definition.model = "agentModel" - definition.temperature = 1.0 - definition.top_p = 1.0 - definition.metadata = {} - open_ai_assistant_agent = OpenAIAssistantAgent(client=client, definition=definition) - - async def fake_invoke(*args, **kwargs): - yield True, ChatMessageContent(role=AuthorRole.ASSISTANT, content="content") - + open_ai_assistant_agent = OpenAIAssistantAgent() # Act - with patch( - "semantic_kernel.agents.open_ai.assistant_thread_actions.AssistantThreadActions.invoke_stream", - side_effect=fake_invoke, - ): - async for item in open_ai_assistant_agent.invoke_stream("thread_id"): + with pytest.raises(AgentInitializationException): + async for _ in open_ai_assistant_agent.invoke_stream(chat_history): pass # Assert - mock_tracer.start_as_current_span.assert_called_once_with(f"invoke_agent {open_ai_assistant_agent.name}") + mock_tracer.start_as_current_span.assert_called_once_with(open_ai_assistant_agent.name) diff --git a/python/tests/unit/utils/test_feature_stage_decorator.py b/python/tests/unit/utils/test_feature_stage_decorator.py deleted file mode 100644 index 7c82fa12cdc2..000000000000 --- a/python/tests/unit/utils/test_feature_stage_decorator.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.utils.feature_stage_decorator import experimental, release_candidate - - -@experimental -def my_function() -> None: - """This is a sample function docstring.""" - pass - - -@release_candidate -def my_function_release_candidate() -> None: - """This is a sample function docstring.""" - pass - - -@release_candidate -def my_function_release_candidate_no_doc_string() -> None: - pass - - -@release_candidate(version="1.0.0-rc2") -def my_function_release_candidate_with_version() -> None: - """This is a sample function docstring.""" - pass - - -@experimental -def my_function_no_doc_string() -> None: - pass - - -@experimental -class MyExperimentalClass: - """A class that is still evolving rapidly.""" - - pass - - -@release_candidate -class MyRCClass: - """A class that is nearly final, but still in release-candidate stage.""" - - pass - - -@release_candidate(version="1.0.0-rc2") -class MyRCClassTwo: - """A class that is nearly final, but still in release-candidate stage.""" - - pass - - -def test_function_experimental_decorator(): - assert ( - my_function.__doc__ - == "This is a sample function docstring.\n\nNote: This function is marked as 'experimental' and may change in the future." # noqa: E501 - ) - assert hasattr(my_function, "is_experimental") - assert my_function.is_experimental is True - - -def test_function_experimental_decorator_with_no_doc_string(): - assert ( - my_function_no_doc_string.__doc__ - == "Note: This function is marked as 'experimental' and may change in the future." - ) - assert hasattr(my_function_no_doc_string, "is_experimental") - assert my_function_no_doc_string.is_experimental is True - - -def test_function_release_candidate_decorator(): - assert ( - "Features marked with this status are nearing completion and are considered" - in my_function_release_candidate_no_doc_string.__doc__ - ) - assert hasattr(my_function_release_candidate, "is_release_candidate") - assert my_function_release_candidate.is_release_candidate is True - assert "Version:" in my_function_release_candidate_no_doc_string.__doc__ - - -def test_function_release_candidate_decorator_and_version(): - assert ( - "Features marked with this status are nearing completion and are considered" - in my_function_release_candidate_with_version.__doc__ - ) - assert hasattr(my_function_release_candidate, "is_release_candidate") - assert my_function_release_candidate.is_release_candidate is True - assert "Version:" in my_function_release_candidate_with_version.__doc__ - - -def test_function_release_candidate_decorator_with_no_doc_string(): - assert ( - "Features marked with this status are nearing completion" in my_function_release_candidate_no_doc_string.__doc__ - ) - assert hasattr(my_function_release_candidate_no_doc_string, "is_release_candidate") - assert my_function_release_candidate_no_doc_string.is_release_candidate is True - assert "Version:" in my_function_release_candidate_no_doc_string.__doc__ - - -def test_class_experimental_decorator(): - assert MyExperimentalClass.__doc__ == ( - "A class that is still evolving rapidly.\n\nNote: This class is marked as " - "'experimental' and may change in the future." - ) - assert hasattr(MyExperimentalClass, "is_experimental") - assert MyExperimentalClass.is_experimental is True - - -def test_class_release_candidate_decorator(): - assert "Features marked with this status are nearing completion" in MyRCClass.__doc__ - assert hasattr(MyRCClass, "is_release_candidate") - assert MyRCClass.is_release_candidate is True - assert "Version:" in MyRCClass.__doc__ - - -def test_class_release_candidate_decorator_with_version(): - assert "Features marked with this status are nearing completion" in MyRCClassTwo.__doc__ - expected_version = "1.0.0-rc2" - assert expected_version in MyRCClassTwo.__doc__ - assert hasattr(MyRCClassTwo, "is_release_candidate") - assert MyRCClassTwo.is_release_candidate is True - assert "Version:" in MyRCClassTwo.__doc__ diff --git a/python/uv.lock b/python/uv.lock index 696e0d1cf6ef..6daaa60a388a 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -22,7 +22,7 @@ supported-markers = [ [[package]] name = "accelerate" -version = "1.4.0" +version = "1.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -33,23 +33,23 @@ dependencies = [ { name = "safetensors", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/02/24a4c4edb9cf0f1e0bc32bb6829e2138f1cc201442e7a24f0daf93b8a15a/accelerate-1.4.0.tar.gz", hash = "sha256:37d413e1b64cb8681ccd2908ae211cf73e13e6e636a2f598a96eccaa538773a5", size = 348745 } +sdist = { url = "https://files.pythonhosted.org/packages/42/09/7947691b7d44bfc739da4a44cc47d6a6d75e6fe9adf047c5234d7cb6be64/accelerate-1.2.1.tar.gz", hash = "sha256:03e161fc69d495daf2b9b5c8d5b43d06e2145520c04727b5bda56d49f1a43ab5", size = 341652 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/f6/791b9d7eb371a2f385da3b7f1769ced72ead7bf09744637ea2985c83d7ee/accelerate-1.4.0-py3-none-any.whl", hash = "sha256:f6e1e7dfaf9d799a20a1dc45efbf4b1546163eac133faa5acd0d89177c896e55", size = 342129 }, + { url = "https://files.pythonhosted.org/packages/c2/60/a585c806d6c0ec5f8149d44eb202714792802f484e6e2b1bf96b23bd2b00/accelerate-1.2.1-py3-none-any.whl", hash = "sha256:be1cbb958cf837e7cdfbde46b812964b1b8ae94c9c7d94d921540beafcee8ddf", size = 336355 }, ] [[package]] name = "aiohappyeyeballs" -version = "2.4.6" +version = "2.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/08/07/508f9ebba367fc3370162e53a3cfd12f5652ad79f0e0bfdf9f9847c6f159/aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0", size = 21726 } +sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977 } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/4c/03fb05f56551828ec67ceb3665e5dc51638042d204983a03b0a1541475b6/aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1", size = 14543 }, + { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756 }, ] [[package]] name = "aiohttp" -version = "3.11.13" +version = "3.11.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -61,110 +61,68 @@ dependencies = [ { name = "propcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "yarl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/3f/c4a667d184c69667b8f16e0704127efc5f1e60577df429382b4d95fd381e/aiohttp-3.11.13.tar.gz", hash = "sha256:8ce789231404ca8fff7f693cdce398abf6d90fd5dae2b1847477196c243b1fbb", size = 7674284 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/49/18bde4fbe1f98a12fb548741e65b27c5f0991c1af4ad15c86b537a4ce94a/aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d", size = 708941 }, - { url = "https://files.pythonhosted.org/packages/99/24/417e5ab7074f5c97c9a794b6acdc59f47f2231d43e4d5cec06150035e61e/aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef", size = 468823 }, - { url = "https://files.pythonhosted.org/packages/76/93/159d3a2561bc6d64d32f779d08b17570b1c5fe55b985da7e2df9b3a4ff8f/aiohttp-3.11.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9840be675de208d1f68f84d578eaa4d1a36eee70b16ae31ab933520c49ba1325", size = 455984 }, - { url = "https://files.pythonhosted.org/packages/18/bc/ed0dce45da90d4618ae14e677abbd704aec02e0f54820ea3815c156f0759/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28a772757c9067e2aee8a6b2b425d0efaa628c264d6416d283694c3d86da7689", size = 1585022 }, - { url = "https://files.pythonhosted.org/packages/75/10/c1e6d59030fcf04ccc253193607b5b7ced0caffd840353e109c51134e5e9/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b88aca5adbf4625e11118df45acac29616b425833c3be7a05ef63a6a4017bfdb", size = 1632761 }, - { url = "https://files.pythonhosted.org/packages/2d/8e/da1a20fbd2c961f824dc8efeb8d31c32ed4af761c87de83032ad4c4f5237/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce10ddfbe26ed5856d6902162f71b8fe08545380570a885b4ab56aecfdcb07f4", size = 1668720 }, - { url = "https://files.pythonhosted.org/packages/fa/9e/d0bbdc82236c3fe43b28b3338a13ef9b697b0f7a875b33b950b975cab1f6/aiohttp-3.11.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa48dac27f41b36735c807d1ab093a8386701bbf00eb6b89a0f69d9fa26b3671", size = 1589941 }, - { url = "https://files.pythonhosted.org/packages/ed/14/248ed0385baeee854e495ca7f33b48bb151d1b226ddbf1585bdeb2301fbf/aiohttp-3.11.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89ce611b1eac93ce2ade68f1470889e0173d606de20c85a012bfa24be96cf867", size = 1544978 }, - { url = "https://files.pythonhosted.org/packages/20/b0/b2ad9d24fe85db8330034ac45dde67799af40ca2363c0c9b30126e204ef3/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78e4dd9c34ec7b8b121854eb5342bac8b02aa03075ae8618b6210a06bbb8a115", size = 1529641 }, - { url = "https://files.pythonhosted.org/packages/11/c6/03bdcb73a67a380b9593d52613ea88edd21ddc4ff5aaf06d4f807dfa2220/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:66047eacbc73e6fe2462b77ce39fc170ab51235caf331e735eae91c95e6a11e4", size = 1558027 }, - { url = "https://files.pythonhosted.org/packages/0d/ae/e45491c8ca4d1e30ff031fb25b44842e16c326f8467026c3eb2a9c167608/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ad8f1c19fe277eeb8bc45741c6d60ddd11d705c12a4d8ee17546acff98e0802", size = 1536991 }, - { url = "https://files.pythonhosted.org/packages/19/89/10eb37351dd2b52928a54768a70a58171e43d7914685fe3feec8f681d905/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64815c6f02e8506b10113ddbc6b196f58dbef135751cc7c32136df27b736db09", size = 1607848 }, - { url = "https://files.pythonhosted.org/packages/a4/fd/492dec170df6ea57bef4bcd26374befdc170b10ba9ac7f51a0214943c20a/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:967b93f21b426f23ca37329230d5bd122f25516ae2f24a9cea95a30023ff8283", size = 1629208 }, - { url = "https://files.pythonhosted.org/packages/70/46/ef8a02cb171d4779ca1632bc8ac0c5bb89729b091e2a3f4b895d688146b5/aiohttp-3.11.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf1f31f83d16ec344136359001c5e871915c6ab685a3d8dee38e2961b4c81730", size = 1564684 }, - { url = "https://files.pythonhosted.org/packages/8a/03/b1b552d1112b72da94bd1f9f5efb8adbcbbafaa8d495fc0924cd80493f17/aiohttp-3.11.13-cp310-cp310-win32.whl", hash = "sha256:00c8ac69e259c60976aa2edae3f13d9991cf079aaa4d3cd5a49168ae3748dee3", size = 416982 }, - { url = "https://files.pythonhosted.org/packages/b0/2d/b6be8e7905ceba64121268ce28208bafe508a742c1467bf636a41d152284/aiohttp-3.11.13-cp310-cp310-win_amd64.whl", hash = "sha256:90d571c98d19a8b6e793b34aa4df4cee1e8fe2862d65cc49185a3a3d0a1a3996", size = 442389 }, - { url = "https://files.pythonhosted.org/packages/3b/93/8e012ae31ff1bda5d43565d6f9e0bad325ba6f3f2d78f298bd39645be8a3/aiohttp-3.11.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b35aab22419ba45f8fc290d0010898de7a6ad131e468ffa3922b1b0b24e9d2e", size = 709013 }, - { url = "https://files.pythonhosted.org/packages/d8/be/fc7c436678ffe547d038319add8e44fd5e33090158752e5c480aed51a8d0/aiohttp-3.11.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81cba651db8795f688c589dd11a4fbb834f2e59bbf9bb50908be36e416dc760", size = 468896 }, - { url = "https://files.pythonhosted.org/packages/d9/1c/56906111ac9d4dab4baab43c89d35d5de1dbb38085150257895005b08bef/aiohttp-3.11.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f55d0f242c2d1fcdf802c8fabcff25a9d85550a4cf3a9cf5f2a6b5742c992839", size = 455968 }, - { url = "https://files.pythonhosted.org/packages/ba/16/229d36ed27c2bb350320364efb56f906af194616cc15fc5d87f3ef21dbef/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4bea08a6aad9195ac9b1be6b0c7e8a702a9cec57ce6b713698b4a5afa9c2e33", size = 1686082 }, - { url = "https://files.pythonhosted.org/packages/3a/44/78fd174509c56028672e5dfef886569cfa1fced0c5fd5c4480426db19ac9/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6070bcf2173a7146bb9e4735b3c62b2accba459a6eae44deea0eb23e0035a23", size = 1744056 }, - { url = "https://files.pythonhosted.org/packages/a3/11/325145c6dce8124b5caadbf763e908f2779c14bb0bc5868744d1e5cb9cb7/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:718d5deb678bc4b9d575bfe83a59270861417da071ab44542d0fcb6faa686636", size = 1785810 }, - { url = "https://files.pythonhosted.org/packages/95/de/faba18a0af09969e10eb89fdbd4cb968bea95e75449a7fa944d4de7d1d2f/aiohttp-3.11.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f6b2c5b4a4d22b8fb2c92ac98e0747f5f195e8e9448bfb7404cd77e7bfa243f", size = 1675540 }, - { url = "https://files.pythonhosted.org/packages/ea/53/0437c46e960b79ae3b1ff74c1ec12f04bf4f425bd349c8807acb38aae3d7/aiohttp-3.11.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747ec46290107a490d21fe1ff4183bef8022b848cf9516970cb31de6d9460088", size = 1620210 }, - { url = "https://files.pythonhosted.org/packages/04/2f/31769ed8e29cc22baaa4005bd2749a7fd0f61ad0f86024d38dff8e394cf6/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:01816f07c9cc9d80f858615b1365f8319d6a5fd079cd668cc58e15aafbc76a54", size = 1654399 }, - { url = "https://files.pythonhosted.org/packages/b0/24/acb24571815b9a86a8261577c920fd84f819178c02a75b05b1a0d7ab83fb/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a08ad95fcbd595803e0c4280671d808eb170a64ca3f2980dd38e7a72ed8d1fea", size = 1660424 }, - { url = "https://files.pythonhosted.org/packages/91/45/30ca0c3ba5bbf7592eee7489eae30437736f7ff912eaa04cfdcf74edca8c/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c97be90d70f7db3aa041d720bfb95f4869d6063fcdf2bb8333764d97e319b7d0", size = 1650415 }, - { url = "https://files.pythonhosted.org/packages/86/8d/4d887df5e732cc70349243c2c9784911979e7bd71c06f9e7717b8a896f75/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ab915a57c65f7a29353c8014ac4be685c8e4a19e792a79fe133a8e101111438e", size = 1733292 }, - { url = "https://files.pythonhosted.org/packages/40/c9/bd950dac0a4c84d44d8da8d6e0f9c9511d45e02cf908a4e1fca591f46a25/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:35cda4e07f5e058a723436c4d2b7ba2124ab4e0aa49e6325aed5896507a8a42e", size = 1755536 }, - { url = "https://files.pythonhosted.org/packages/32/04/aafeda6b4ed3693a44bb89eae002ebaa74f88b2265a7e68f8a31c33330f5/aiohttp-3.11.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:af55314407714fe77a68a9ccaab90fdb5deb57342585fd4a3a8102b6d4370080", size = 1693126 }, - { url = "https://files.pythonhosted.org/packages/a1/4f/67729187e884b0f002a0317d2cc7962a5a0416cadc95ea88ba92477290d9/aiohttp-3.11.13-cp311-cp311-win32.whl", hash = "sha256:42d689a5c0a0c357018993e471893e939f555e302313d5c61dfc566c2cad6185", size = 416800 }, - { url = "https://files.pythonhosted.org/packages/29/23/d98d491ca073ee92cc6a741be97b6b097fb06dacc5f95c0c9350787db549/aiohttp-3.11.13-cp311-cp311-win_amd64.whl", hash = "sha256:b73a2b139782a07658fbf170fe4bcdf70fc597fae5ffe75e5b67674c27434a9f", size = 442891 }, - { url = "https://files.pythonhosted.org/packages/9a/a9/6657664a55f78db8767e396cc9723782ed3311eb57704b0a5dacfa731916/aiohttp-3.11.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2eabb269dc3852537d57589b36d7f7362e57d1ece308842ef44d9830d2dc3c90", size = 705054 }, - { url = "https://files.pythonhosted.org/packages/3b/06/f7df1fe062d16422f70af5065b76264f40b382605cf7477fa70553a9c9c1/aiohttp-3.11.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b77ee42addbb1c36d35aca55e8cc6d0958f8419e458bb70888d8c69a4ca833d", size = 464440 }, - { url = "https://files.pythonhosted.org/packages/22/3a/8773ea866735754004d9f79e501fe988bdd56cfac7fdecbc8de17fc093eb/aiohttp-3.11.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55789e93c5ed71832e7fac868167276beadf9877b85697020c46e9a75471f55f", size = 456394 }, - { url = "https://files.pythonhosted.org/packages/7f/61/8e2f2af2327e8e475a2b0890f15ef0bbfd117e321cce1e1ed210df81bbac/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c929f9a7249a11e4aa5c157091cfad7f49cc6b13f4eecf9b747104befd9f56f2", size = 1682752 }, - { url = "https://files.pythonhosted.org/packages/24/ed/84fce816bc8da39aa3f6c1196fe26e47065fea882b1a67a808282029c079/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d33851d85537bbf0f6291ddc97926a754c8f041af759e0aa0230fe939168852b", size = 1737375 }, - { url = "https://files.pythonhosted.org/packages/d9/de/35a5ba9e3d21ebfda1ebbe66f6cc5cbb4d3ff9bd6a03e5e8a788954f8f27/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9229d8613bd8401182868fe95688f7581673e1c18ff78855671a4b8284f47bcb", size = 1793660 }, - { url = "https://files.pythonhosted.org/packages/ff/fe/0f650a8c7c72c8a07edf8ab164786f936668acd71786dd5885fc4b1ca563/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669dd33f028e54fe4c96576f406ebb242ba534dd3a981ce009961bf49960f117", size = 1692233 }, - { url = "https://files.pythonhosted.org/packages/a8/20/185378b3483f968c6303aafe1e33b0da0d902db40731b2b2b2680a631131/aiohttp-3.11.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1b20a1ace54af7db1f95af85da530fe97407d9063b7aaf9ce6a32f44730778", size = 1619708 }, - { url = "https://files.pythonhosted.org/packages/a4/f9/d9c181750980b17e1e13e522d7e82a8d08d3d28a2249f99207ef5d8d738f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5724cc77f4e648362ebbb49bdecb9e2b86d9b172c68a295263fa072e679ee69d", size = 1641802 }, - { url = "https://files.pythonhosted.org/packages/50/c7/1cb46b72b1788710343b6e59eaab9642bd2422f2d87ede18b1996e0aed8f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:aa36c35e94ecdb478246dd60db12aba57cfcd0abcad43c927a8876f25734d496", size = 1684678 }, - { url = "https://files.pythonhosted.org/packages/71/87/89b979391de840c5d7c34e78e1148cc731b8aafa84b6a51d02f44b4c66e2/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b5b37c863ad5b0892cc7a4ceb1e435e5e6acd3f2f8d3e11fa56f08d3c67b820", size = 1646921 }, - { url = "https://files.pythonhosted.org/packages/a7/db/a463700ac85b72f8cf68093e988538faaf4e865e3150aa165cf80ee29d6e/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e06cf4852ce8c4442a59bae5a3ea01162b8fcb49ab438d8548b8dc79375dad8a", size = 1702493 }, - { url = "https://files.pythonhosted.org/packages/b8/32/1084e65da3adfb08c7e1b3e94f3e4ded8bd707dee265a412bc377b7cd000/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5194143927e494616e335d074e77a5dac7cd353a04755330c9adc984ac5a628e", size = 1735004 }, - { url = "https://files.pythonhosted.org/packages/a0/bb/a634cbdd97ce5d05c2054a9a35bfc32792d7e4f69d600ad7e820571d095b/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afcb6b275c2d2ba5d8418bf30a9654fa978b4f819c2e8db6311b3525c86fe637", size = 1694964 }, - { url = "https://files.pythonhosted.org/packages/fd/cf/7d29db4e5c28ec316e5d2ac9ac9df0e2e278e9ea910e5c4205b9b64c2c42/aiohttp-3.11.13-cp312-cp312-win32.whl", hash = "sha256:7104d5b3943c6351d1ad7027d90bdd0ea002903e9f610735ac99df3b81f102ee", size = 411746 }, - { url = "https://files.pythonhosted.org/packages/65/a9/13e69ad4fd62104ebd94617f9f2be58231b50bb1e6bac114f024303ac23b/aiohttp-3.11.13-cp312-cp312-win_amd64.whl", hash = "sha256:47dc018b1b220c48089b5b9382fbab94db35bef2fa192995be22cbad3c5730c8", size = 438078 }, - { url = "https://files.pythonhosted.org/packages/87/dc/7d58d33cec693f1ddf407d4ab975445f5cb507af95600f137b81683a18d8/aiohttp-3.11.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9862d077b9ffa015dbe3ce6c081bdf35135948cb89116e26667dd183550833d1", size = 698372 }, - { url = "https://files.pythonhosted.org/packages/84/e7/5d88514c9e24fbc8dd6117350a8ec4a9314f4adae6e89fe32e3e639b0c37/aiohttp-3.11.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbfef0666ae9e07abfa2c54c212ac18a1f63e13e0760a769f70b5717742f3ece", size = 461057 }, - { url = "https://files.pythonhosted.org/packages/96/1a/8143c48a929fa00c6324f85660cb0f47a55ed9385f0c1b72d4b8043acf8e/aiohttp-3.11.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a1f7d857c4fcf7cabb1178058182c789b30d85de379e04f64c15b7e88d66fb", size = 453340 }, - { url = "https://files.pythonhosted.org/packages/2f/1c/b8010e4d65c5860d62681088e5376f3c0a940c5e3ca8989cae36ce8c3ea8/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba40b7ae0f81c7029583a338853f6607b6d83a341a3dcde8bed1ea58a3af1df9", size = 1665561 }, - { url = "https://files.pythonhosted.org/packages/19/ed/a68c3ab2f92fdc17dfc2096117d1cfaa7f7bdded2a57bacbf767b104165b/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5b95787335c483cd5f29577f42bbe027a412c5431f2f80a749c80d040f7ca9f", size = 1718335 }, - { url = "https://files.pythonhosted.org/packages/27/4f/3a0b6160ce663b8ebdb65d1eedff60900cd7108838c914d25952fe2b909f/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7d474c5c1f0b9405c1565fafdc4429fa7d986ccbec7ce55bc6a330f36409cad", size = 1775522 }, - { url = "https://files.pythonhosted.org/packages/0b/58/9da09291e19696c452e7224c1ce8c6d23a291fe8cd5c6b247b51bcda07db/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e83fb1991e9d8982b3b36aea1e7ad27ea0ce18c14d054c7a404d68b0319eebb", size = 1677566 }, - { url = "https://files.pythonhosted.org/packages/3d/18/6184f2bf8bbe397acbbbaa449937d61c20a6b85765f48e5eddc6d84957fe/aiohttp-3.11.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4586a68730bd2f2b04a83e83f79d271d8ed13763f64b75920f18a3a677b9a7f0", size = 1603590 }, - { url = "https://files.pythonhosted.org/packages/04/94/91e0d1ca0793012ccd927e835540aa38cca98bdce2389256ab813ebd64a3/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fe4eb0e7f50cdb99b26250d9328faef30b1175a5dbcfd6d0578d18456bac567", size = 1618688 }, - { url = "https://files.pythonhosted.org/packages/71/85/d13c3ea2e48a10b43668305d4903838834c3d4112e5229177fbcc23a56cd/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2a8a6bc19818ac3e5596310ace5aa50d918e1ebdcc204dc96e2f4d505d51740c", size = 1658053 }, - { url = "https://files.pythonhosted.org/packages/12/6a/3242a35100de23c1e8d9e05e8605e10f34268dee91b00d9d1e278c58eb80/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f27eec42f6c3c1df09cfc1f6786308f8b525b8efaaf6d6bd76c1f52c6511f6a", size = 1616917 }, - { url = "https://files.pythonhosted.org/packages/f5/b3/3f99b6f0a9a79590a7ba5655dbde8408c685aa462247378c977603464d0a/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2a4a13dfbb23977a51853b419141cd0a9b9573ab8d3a1455c6e63561387b52ff", size = 1685872 }, - { url = "https://files.pythonhosted.org/packages/8a/2e/99672181751f280a85e24fcb9a2c2469e8b1a0de1746b7b5c45d1eb9a999/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:02876bf2f69b062584965507b07bc06903c2dc93c57a554b64e012d636952654", size = 1715719 }, - { url = "https://files.pythonhosted.org/packages/7a/cd/68030356eb9a7d57b3e2823c8a852709d437abb0fbff41a61ebc351b7625/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b992778d95b60a21c4d8d4a5f15aaab2bd3c3e16466a72d7f9bfd86e8cea0d4b", size = 1673166 }, - { url = "https://files.pythonhosted.org/packages/03/61/425397a9a2839c609d09fdb53d940472f316a2dbeaa77a35b2628dae6284/aiohttp-3.11.13-cp313-cp313-win32.whl", hash = "sha256:507ab05d90586dacb4f26a001c3abf912eb719d05635cbfad930bdbeb469b36c", size = 410615 }, - { url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452 }, -] - -[[package]] -name = "aioice" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "ifaddr", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/33/b6/e2b0e48ccb5b04fe29265e93f14a0915f416e359c897ae87d570566c430b/aioice-0.9.0.tar.gz", hash = "sha256:fc2401b1c4b6e19372eaaeaa28fd1bd9cbf6b0e412e48625297c53b495eebd1e", size = 40324 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/35/d21e48d3ba25d32aba5d142d54c4491376c659dd74d052a30dd25198007b/aioice-0.9.0-py3-none-any.whl", hash = "sha256:b609597a3a5a611e0004ff04772e16aceb881d51c25c0afc4ceac05d5e50024e", size = 24177 }, -] - -[[package]] -name = "aiortc" -version = "1.10.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aioice", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "av", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "google-crc32c", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pyee", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pylibsrtp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pyopenssl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8a/f8/408e092748521889c9d33dddcef920afd9891cf6db4615ba6b6bfe114ff8/aiortc-1.10.1.tar.gz", hash = "sha256:64926ad86bde20c1a4dacb7c3a164e57b522606b70febe261fada4acf79641b5", size = 1179406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/6b/74547a30d1ddcc81f905ef4ff7fcc2c89b7482cb2045688f2aaa4fa918aa/aiortc-1.10.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3bef536f38394b518aefae9dbf9cdd08f39e4c425f316f9692f0d8dc724810bd", size = 1218457 }, - { url = "https://files.pythonhosted.org/packages/46/92/b4ccf39cd18e366ace2a11dc7d98ed55967b4b325707386b5788149db15e/aiortc-1.10.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8842c02e38513d9432ef22982572833487bb015f23348fa10a690616dbf55143", size = 898855 }, - { url = "https://files.pythonhosted.org/packages/a4/e9/2676de48b493787d8b03129713e6bb2dfbacca2a565090f2a89cbad71f96/aiortc-1.10.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954a420de01c0bf6b07a0c58b662029b1c4204ddbd8f5c4162bbdebd43f882b1", size = 1750403 }, - { url = "https://files.pythonhosted.org/packages/c3/9d/ab6d09183cdaf5df060923d9bd5c9ed5fb1802661d9401dba35f3c85a57b/aiortc-1.10.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7c0d46fb30307a9d7deb4b7d66f0b0e73b77a7221b063fb6dc78821a5d2aa1e", size = 1867886 }, - { url = "https://files.pythonhosted.org/packages/c2/71/0b5666e6b965dbd9a7f331aa827a6c3ab3eb4d582fefb686a7f4227b7954/aiortc-1.10.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89582f6923046f79f15d9045f432bc78191eacc95f6bed18714e86ec935188d9", size = 1893709 }, - { url = "https://files.pythonhosted.org/packages/9d/0a/8c0c78fad79ef595a0ed6e2ab413900e6bd0eac65fc5c31c9d8736bff909/aiortc-1.10.1-cp39-abi3-win32.whl", hash = "sha256:d1cbe87f740b33ffaa8e905f21092773e74916be338b64b81c8b79af4c3847eb", size = 923265 }, - { url = "https://files.pythonhosted.org/packages/73/12/a27dd588a4988021da88cb4d338d8ee65ac097afc14e9193ab0be4a48790/aiortc-1.10.1-cp39-abi3-win_amd64.whl", hash = "sha256:c9a5a0b23f8a77540068faec8837fa0a65b0396c20f09116bdb874b75e0b6abe", size = 1009488 }, +sdist = { url = "https://files.pythonhosted.org/packages/fe/ed/f26db39d29cd3cb2f5a3374304c713fe5ab5a0e4c8ee25a0c45cc6adf844/aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e", size = 7669618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/7d/ff2e314b8f9e0b1df833e2d4778eaf23eae6b8cc8f922495d110ddcbf9e1/aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8", size = 708550 }, + { url = "https://files.pythonhosted.org/packages/09/b8/aeb4975d5bba233d6f246941f5957a5ad4e3def8b0855a72742e391925f2/aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5", size = 468430 }, + { url = "https://files.pythonhosted.org/packages/9c/5b/5b620279b3df46e597008b09fa1e10027a39467387c2332657288e25811a/aiohttp-3.11.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2", size = 455593 }, + { url = "https://files.pythonhosted.org/packages/d8/75/0cdf014b816867d86c0bc26f3d3e3f194198dbf33037890beed629cd4f8f/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43", size = 1584635 }, + { url = "https://files.pythonhosted.org/packages/df/2f/95b8f4e4dfeb57c1d9ad9fa911ede35a0249d75aa339edd2c2270dc539da/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f", size = 1632363 }, + { url = "https://files.pythonhosted.org/packages/39/cb/70cf69ea7c50f5b0021a84f4c59c3622b2b3b81695f48a2f0e42ef7eba6e/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d", size = 1668315 }, + { url = "https://files.pythonhosted.org/packages/2f/cc/3a3fc7a290eabc59839a7e15289cd48f33dd9337d06e301064e1e7fb26c5/aiohttp-3.11.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef", size = 1589546 }, + { url = "https://files.pythonhosted.org/packages/15/b4/0f7b0ed41ac6000e283e7332f0f608d734b675a8509763ca78e93714cfb0/aiohttp-3.11.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438", size = 1544581 }, + { url = "https://files.pythonhosted.org/packages/58/b9/4d06470fd85c687b6b0e31935ef73dde6e31767c9576d617309a2206556f/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3", size = 1529256 }, + { url = "https://files.pythonhosted.org/packages/61/a2/6958b1b880fc017fd35f5dfb2c26a9a50c755b75fd9ae001dc2236a4fb79/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55", size = 1536592 }, + { url = "https://files.pythonhosted.org/packages/0f/dd/b974012a9551fd654f5bb95a6dd3f03d6e6472a17e1a8216dd42e9638d6c/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e", size = 1607446 }, + { url = "https://files.pythonhosted.org/packages/e0/d3/6c98fd87e638e51f074a3f2061e81fcb92123bcaf1439ac1b4a896446e40/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33", size = 1628809 }, + { url = "https://files.pythonhosted.org/packages/a8/2e/86e6f85cbca02be042c268c3d93e7f35977a0e127de56e319bdd1569eaa8/aiohttp-3.11.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c", size = 1564291 }, + { url = "https://files.pythonhosted.org/packages/0b/8d/1f4ef3503b767717f65e1f5178b0173ab03cba1a19997ebf7b052161189f/aiohttp-3.11.11-cp310-cp310-win32.whl", hash = "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745", size = 416601 }, + { url = "https://files.pythonhosted.org/packages/ad/86/81cb83691b5ace3d9aa148dc42bacc3450d749fc88c5ec1973573c1c1779/aiohttp-3.11.11-cp310-cp310-win_amd64.whl", hash = "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9", size = 442007 }, + { url = "https://files.pythonhosted.org/packages/34/ae/e8806a9f054e15f1d18b04db75c23ec38ec954a10c0a68d3bd275d7e8be3/aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76", size = 708624 }, + { url = "https://files.pythonhosted.org/packages/c7/e0/313ef1a333fb4d58d0c55a6acb3cd772f5d7756604b455181049e222c020/aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538", size = 468507 }, + { url = "https://files.pythonhosted.org/packages/a9/60/03455476bf1f467e5b4a32a465c450548b2ce724eec39d69f737191f936a/aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/be/f9/469588603bd75bf02c8ffb8c8a0d4b217eed446b49d4a767684685aa33fd/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9", size = 1685694 }, + { url = "https://files.pythonhosted.org/packages/88/b9/1b7fa43faf6c8616fa94c568dc1309ffee2b6b68b04ac268e5d64b738688/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03", size = 1743660 }, + { url = "https://files.pythonhosted.org/packages/2a/8b/0248d19dbb16b67222e75f6aecedd014656225733157e5afaf6a6a07e2e8/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287", size = 1785421 }, + { url = "https://files.pythonhosted.org/packages/c4/11/f478e071815a46ca0a5ae974651ff0c7a35898c55063305a896e58aa1247/aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e", size = 1675145 }, + { url = "https://files.pythonhosted.org/packages/26/5d/284d182fecbb5075ae10153ff7374f57314c93a8681666600e3a9e09c505/aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665", size = 1619804 }, + { url = "https://files.pythonhosted.org/packages/1b/78/980064c2ad685c64ce0e8aeeb7ef1e53f43c5b005edcd7d32e60809c4992/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b", size = 1654007 }, + { url = "https://files.pythonhosted.org/packages/21/8d/9e658d63b1438ad42b96f94da227f2e2c1d5c6001c9e8ffcc0bfb22e9105/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34", size = 1650022 }, + { url = "https://files.pythonhosted.org/packages/85/fd/a032bf7f2755c2df4f87f9effa34ccc1ef5cea465377dbaeef93bb56bbd6/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d", size = 1732899 }, + { url = "https://files.pythonhosted.org/packages/c5/0c/c2b85fde167dd440c7ba50af2aac20b5a5666392b174df54c00f888c5a75/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2", size = 1755142 }, + { url = "https://files.pythonhosted.org/packages/bc/78/91ae1a3b3b3bed8b893c5d69c07023e151b1c95d79544ad04cf68f596c2f/aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773", size = 1692736 }, + { url = "https://files.pythonhosted.org/packages/77/89/a7ef9c4b4cdb546fcc650ca7f7395aaffbd267f0e1f648a436bec33c9b95/aiohttp-3.11.11-cp311-cp311-win32.whl", hash = "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62", size = 416418 }, + { url = "https://files.pythonhosted.org/packages/fc/db/2192489a8a51b52e06627506f8ac8df69ee221de88ab9bdea77aa793aa6a/aiohttp-3.11.11-cp311-cp311-win_amd64.whl", hash = "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac", size = 442509 }, + { url = "https://files.pythonhosted.org/packages/69/cf/4bda538c502f9738d6b95ada11603c05ec260807246e15e869fc3ec5de97/aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886", size = 704666 }, + { url = "https://files.pythonhosted.org/packages/46/7b/87fcef2cad2fad420ca77bef981e815df6904047d0a1bd6aeded1b0d1d66/aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2", size = 464057 }, + { url = "https://files.pythonhosted.org/packages/5a/a6/789e1f17a1b6f4a38939fbc39d29e1d960d5f89f73d0629a939410171bc0/aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c", size = 455996 }, + { url = "https://files.pythonhosted.org/packages/b7/dd/485061fbfef33165ce7320db36e530cd7116ee1098e9c3774d15a732b3fd/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a", size = 1682367 }, + { url = "https://files.pythonhosted.org/packages/e9/d7/9ec5b3ea9ae215c311d88b2093e8da17e67b8856673e4166c994e117ee3e/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231", size = 1736989 }, + { url = "https://files.pythonhosted.org/packages/d6/fb/ea94927f7bfe1d86178c9d3e0a8c54f651a0a655214cce930b3c679b8f64/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e", size = 1793265 }, + { url = "https://files.pythonhosted.org/packages/40/7f/6de218084f9b653026bd7063cd8045123a7ba90c25176465f266976d8c82/aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8", size = 1691841 }, + { url = "https://files.pythonhosted.org/packages/77/e2/992f43d87831cbddb6b09c57ab55499332f60ad6fdbf438ff4419c2925fc/aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8", size = 1619317 }, + { url = "https://files.pythonhosted.org/packages/96/74/879b23cdd816db4133325a201287c95bef4ce669acde37f8f1b8669e1755/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c", size = 1641416 }, + { url = "https://files.pythonhosted.org/packages/30/98/b123f6b15d87c54e58fd7ae3558ff594f898d7f30a90899718f3215ad328/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab", size = 1646514 }, + { url = "https://files.pythonhosted.org/packages/d7/38/257fda3dc99d6978ab943141d5165ec74fd4b4164baa15e9c66fa21da86b/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da", size = 1702095 }, + { url = "https://files.pythonhosted.org/packages/0c/f4/ddab089053f9fb96654df5505c0a69bde093214b3c3454f6bfdb1845f558/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853", size = 1734611 }, + { url = "https://files.pythonhosted.org/packages/c3/d6/f30b2bc520c38c8aa4657ed953186e535ae84abe55c08d0f70acd72ff577/aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e", size = 1694576 }, + { url = "https://files.pythonhosted.org/packages/bc/97/b0a88c3f4c6d0020b34045ee6d954058abc870814f6e310c4c9b74254116/aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600", size = 411363 }, + { url = "https://files.pythonhosted.org/packages/7f/23/cc36d9c398980acaeeb443100f0216f50a7cfe20c67a9fd0a2f1a5a846de/aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d", size = 437666 }, + { url = "https://files.pythonhosted.org/packages/49/d1/d8af164f400bad432b63e1ac857d74a09311a8334b0481f2f64b158b50eb/aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9", size = 697982 }, + { url = "https://files.pythonhosted.org/packages/92/d1/faad3bf9fa4bfd26b95c69fc2e98937d52b1ff44f7e28131855a98d23a17/aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194", size = 460662 }, + { url = "https://files.pythonhosted.org/packages/db/61/0d71cc66d63909dabc4590f74eba71f91873a77ea52424401c2498d47536/aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f", size = 452950 }, + { url = "https://files.pythonhosted.org/packages/07/db/6d04bc7fd92784900704e16b745484ef45b77bd04e25f58f6febaadf7983/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104", size = 1665178 }, + { url = "https://files.pythonhosted.org/packages/54/5c/e95ade9ae29f375411884d9fd98e50535bf9fe316c9feb0f30cd2ac8f508/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff", size = 1717939 }, + { url = "https://files.pythonhosted.org/packages/6f/1c/1e7d5c5daea9e409ed70f7986001b8c9e3a49a50b28404498d30860edab6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3", size = 1775125 }, + { url = "https://files.pythonhosted.org/packages/5d/66/890987e44f7d2f33a130e37e01a164168e6aff06fce15217b6eaf14df4f6/aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1", size = 1677176 }, + { url = "https://files.pythonhosted.org/packages/8f/dc/e2ba57d7a52df6cdf1072fd5fa9c6301a68e1cd67415f189805d3eeb031d/aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4", size = 1603192 }, + { url = "https://files.pythonhosted.org/packages/6c/9e/8d08a57de79ca3a358da449405555e668f2c8871a7777ecd2f0e3912c272/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d", size = 1618296 }, + { url = "https://files.pythonhosted.org/packages/56/51/89822e3ec72db352c32e7fc1c690370e24e231837d9abd056490f3a49886/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87", size = 1616524 }, + { url = "https://files.pythonhosted.org/packages/2c/fa/e2e6d9398f462ffaa095e84717c1732916a57f1814502929ed67dd7568ef/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2", size = 1685471 }, + { url = "https://files.pythonhosted.org/packages/ae/5f/6bb976e619ca28a052e2c0ca7b0251ccd893f93d7c24a96abea38e332bf6/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12", size = 1715312 }, + { url = "https://files.pythonhosted.org/packages/79/c1/756a7e65aa087c7fac724d6c4c038f2faaa2a42fe56dbc1dd62a33ca7213/aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5", size = 1672783 }, + { url = "https://files.pythonhosted.org/packages/73/ba/a6190ebb02176c7f75e6308da31f5d49f6477b651a3dcfaaaca865a298e2/aiohttp-3.11.11-cp313-cp313-win32.whl", hash = "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d", size = 410229 }, + { url = "https://files.pythonhosted.org/packages/b8/62/c9fa5bafe03186a0e4699150a7fed9b1e73240996d0d2f0e5f70f3fdf471/aiohttp-3.11.11-cp313-cp313-win_amd64.whl", hash = "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99", size = 436081 }, ] [[package]] @@ -190,7 +148,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.47.2" +version = "0.43.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -201,9 +159,9 @@ dependencies = [ { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/65/175bf024bd9866ef96470620e164dcf8c3e0a2892178e59d1532465c8315/anthropic-0.47.2.tar.gz", hash = "sha256:452f4ca0c56ffab8b6ce9928bf8470650f88106a7001b250895eb65c54cfa44c", size = 208066 } +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/973d2ac6c9f7d1be41829c7b878cbe399385b25cc2ebe80ad0eec9999b8c/anthropic-0.43.0.tar.gz", hash = "sha256:06801f01d317a431d883230024318d48981758058bf7e079f33fb11f64b5a5c1", size = 194826 } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/ad/feddd3ed83804b7f05c90b343e2d9df8f4a28028d6820c1a034de79dcdab/anthropic-0.47.2-py3-none-any.whl", hash = "sha256:61b712a56308fce69f04d92ba0230ab2bc187b5bce17811d400843a8976bb67f", size = 239536 }, + { url = "https://files.pythonhosted.org/packages/d7/88/ded3ba979a2218a448cbc1a1e762d998b92f30529452c5104b35b6cb71f8/anthropic-0.43.0-py3-none-any.whl", hash = "sha256:f748a703f77b3244975e1aace3a935840dc653a4714fb6bba644f97cc76847b4", size = 207867 }, ] [[package]] @@ -262,11 +220,11 @@ wheels = [ [[package]] name = "attrs" -version = "25.1.0" +version = "24.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/7c/fdf464bcc51d23881d110abd74b512a42b3d5d376a55a831b44c603ae17f/attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e", size = 810562 } +sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a", size = 63152 }, + { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 }, ] [[package]] @@ -281,91 +239,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/1f/bc95e43ffb57c05b8efcc376dd55a0240bf58f47ddf5a0f92452b6457b75/Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377", size = 223827 }, ] -[[package]] -name = "autogen-agentchat" -version = "0.2.40" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "diskcache", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "docker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "flaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "termcolor", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2a/82/91a2a38a7188c216cf6c2ff1177b47eb0ec9451a5f60b83dc5f1669ae5f1/autogen-agentchat-0.2.40.tar.gz", hash = "sha256:bfdd25ab63fb75a701095315d0d7214f1616411b9edbcdf6183da35a956cc42e", size = 335172 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/27/198414c4c24e886770a03e0bed349582c40e3bfc2ec327034cc5d22c185f/autogen_agentchat-0.2.40-py3-none-any.whl", hash = "sha256:03f11ab89442a3b2408e7e46aa4a66d0be44e6f4447467efbb3ef4e35940176e", size = 382317 }, -] - -[[package]] -name = "av" -version = "13.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/9d/486d31e76784cc0ad943f420c5e05867263b32b37e2f4b0f7f22fdc1ca3a/av-13.1.0.tar.gz", hash = "sha256:d3da736c55847d8596eb8c26c60e036f193001db3bc5c10da8665622d906c17e", size = 3957908 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d6/1c4a8056a88e006681ac6a3d5ac6082f0a48e52bd565bfd350bfc7c6a37d/av-13.1.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2af44fae6d16c3a40dd1c85bda41b449be08a2c172d8f44fb63395ccf6e6fb4", size = 24260057 }, - { url = "https://files.pythonhosted.org/packages/23/be/cf89545117172d75a0c48066e6f368403237df623b2e3e93590fdeaef8bf/av-13.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0fea71fe06fd0dfe90a089200eb6468034797f860a321fa2d62e07d619c74749", size = 19475039 }, - { url = "https://files.pythonhosted.org/packages/4b/d0/8e261547f7763f320a4f5f68e139fea5f31814fddfe5503c8372123ebb8b/av-13.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:756997810dcca11811b598d209561cabd2071e5b472b867c295bb3e7022eecde", size = 31289005 }, - { url = "https://files.pythonhosted.org/packages/82/a3/00cacfe80ebbe0664876dd26558fb23b65d034ffd2ce0ddb12f1c746e7cb/av-13.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f432102edaec4ee90087a675acf486bff0c81b47d98b85eb3218afe84575b60", size = 30705668 }, - { url = "https://files.pythonhosted.org/packages/d7/37/faa98dca1a8f6c2e3f4ad3a935037872aff49a679b76918c5258cf5a1c70/av-13.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d568c4d7a36df52c0774d52e6d730148775ead16daed81c10dafc2569b5a38d", size = 33122108 }, - { url = "https://files.pythonhosted.org/packages/25/81/c3a842477b558e23c7249f81cf723764c193636b6523267c2c02321da6b0/av-13.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa6f76e7c5e77bc5f99a27ada29f78c64fd4e0d42da2c4d203badc650bc0a686", size = 25775920 }, - { url = "https://files.pythonhosted.org/packages/39/54/c4227080c9700384db90072ace70d89b6a288b3748bd2ec0e32580a49e7f/av-13.1.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:867385e6701464a5c95903e24d2e0df1c7e0dbf211ed91d0ce639cd687373e10", size = 24255112 }, - { url = "https://files.pythonhosted.org/packages/32/4a/eb9348231655ca99b200b380f4edbceff7358c927a285badcc84b18fb1c9/av-13.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb7a3f319401a46b0017771268ff4928501e77cf00b1a2aa0721e20b2fd1146e", size = 19467930 }, - { url = "https://files.pythonhosted.org/packages/14/c7/48c80252bdbc3a75a54dd205a7fab8f613914009b9e5416202757208e040/av-13.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad904f860147bceaca65b0d3174a8153f35c570d465161d210f1879970b15559", size = 32207671 }, - { url = "https://files.pythonhosted.org/packages/f9/66/3332c7fa8c43b65680a94f279ea3e832b5500de3a1392bac6112881e984b/av-13.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a906e017b29d0eb80d9ccf7a98d19268122da792dbb68eb741cfebba156e6aed", size = 31520911 }, - { url = "https://files.pythonhosted.org/packages/e5/bb/2e03acb9b27591d97f700a3a6c27cfd1bc53fa148177747eda8a70cca1e9/av-13.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce894d7847897da7be63277a0875bd93c51327134ac226c67978de014c7979f", size = 34048399 }, - { url = "https://files.pythonhosted.org/packages/85/44/527aa3b65947d42cfe829326026edf0cd1a8c459390076034be275616c36/av-13.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:384bcdb5fc3238a263a5a25cc9efc690859fa4148cc4b07e00fae927178db22a", size = 25779569 }, - { url = "https://files.pythonhosted.org/packages/9b/aa/4bdd8ce59173574fc6e0c282c71ee6f96fca82643d97bf172bc4cb5a5674/av-13.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:261dbc3f4b55f4f8f3375b10b2258fca7f2ab7a6365c01bc65e77a0d5327a195", size = 24268674 }, - { url = "https://files.pythonhosted.org/packages/17/b4/b267dd5bad99eed49ec6731827c6bcb5ab03864bf732a7ebb81e3df79911/av-13.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83d259ef86b9054eb914bc7c6a7f6092a6d75cb939295e70ee979cfd92a67b99", size = 19475617 }, - { url = "https://files.pythonhosted.org/packages/68/32/4209e51f54d7b54a1feb576d309c671ed1ff437b54fcc4ec68c239199e0a/av-13.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b4d3ca159eceab97e3c0fb08fe756520fb95508417f76e48198fda2a5b0806", size = 32468873 }, - { url = "https://files.pythonhosted.org/packages/b6/d8/c174da5f06b24f3c9e36f91fd02a7411c39da9ce792c17964260d4be675e/av-13.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40e8f757e373b73a2dc4640852a00cce4a4a92ef19b2e642a96d6994cd1fffbf", size = 31818484 }, - { url = "https://files.pythonhosted.org/packages/7f/22/0dd8d1d5cad415772bb707d16aea8b81cf75d340d11d3668eea43468c730/av-13.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8aaec2c0bfd024359db3821d679009d4e637e1bee0321d20f61c54ed6b20f41", size = 34398652 }, - { url = "https://files.pythonhosted.org/packages/7b/ff/48fa68888b8d5bae36d915556ff18f9e5fdc6b5ff5ae23dc4904c9713168/av-13.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ea0deab0e6a739cb742fba2a3983d8102f7516a3cdf3c46669f3cac0ed1f351", size = 25781343 }, - { url = "https://files.pythonhosted.org/packages/82/6e/cdce12e534570df37d3fdcb3a74851d39e9ab79d388f3174dea9785a011a/av-13.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47642ebaebfe20519b2391bd5b7c38b596efcd052bfd09c8d33058f94ddd0fd6", size = 24229340 }, - { url = "https://files.pythonhosted.org/packages/7c/88/5359aeada9ea509426f2db63b6531833824a1b02470667b103479ddea7ae/av-13.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f079c2daa3ae06557b3f6e9bed4fb9c876e8012175bec645ccd007199a302db", size = 19436445 }, - { url = "https://files.pythonhosted.org/packages/b4/d4/64995e5b800476c86dae4ea1444a0eac44e2c4985fac6401b08401e2df11/av-13.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f0de8252deeeb1887637e88d4d9d18514e5cfe276bdb9e6ca8e9eef89d1667a", size = 32120549 }, - { url = "https://files.pythonhosted.org/packages/68/76/9910694cf87d2d308d851f5b2b5c5b20f7f55411f596e2c158fb13bf84a3/av-13.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ad0024f4def11b0cedfeee478fa6c6fd7ed3955e13387e0f27261fdda6121b4", size = 31495305 }, - { url = "https://files.pythonhosted.org/packages/6a/a8/cd92de947b9595a0eb2c64e6f7ba295aac2687972050ae092173c2f6ea0c/av-13.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb88e2590eaed45233eb117f1dfab1a43ed9a997b2c46da9f08468dd00f14895", size = 34065325 }, - { url = "https://files.pythonhosted.org/packages/9d/d0/9869fcbd66422df2033d4b78a663e3c64aa6fe7eb9189c811d60f69d9871/av-13.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:c927e4fa4f6aeed4340b3e3b16b237d7cb743e5c1a55b92307407590ca4112aa", size = 25754728 }, - { url = "https://files.pythonhosted.org/packages/63/62/09859d91bc2309918d548ac4585973c53e7db27010c432d050f02206f9bd/av-13.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fc5118f78ee712b2c396f345e4c51e60e61e28f1f606adbd4060c4dc44b0b652", size = 23861117 }, - { url = "https://files.pythonhosted.org/packages/c7/43/f186435a0acad3a2bdf271ce51d3af97ac3153a410e54a623529d39a1818/av-13.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:81bcbd3545e523e7a350613be1866b515a5ee3fafa1d9d257d7ed02531fc2636", size = 19115008 }, - { url = "https://files.pythonhosted.org/packages/31/eb/a1b4af95a615ba73dfc3cfcb9387e40826c92d7d6d383a1b68685a7ef920/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b2bc641e8e16bbf058de35f1ba79ebed358ac6fe3cb5a665366294774fdb18", size = 22852637 }, - { url = "https://files.pythonhosted.org/packages/0b/a6/94a34aa672af7fef2939e4a5d6c4c6c28e33da0c623aaa9485d977eeaa95/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d956ae3e68fabdc45eb2b986c2e842a31df084d8cfc90336509f07a727a9df62", size = 22703888 }, - { url = "https://files.pythonhosted.org/packages/b9/69/08a72ceed2c8a6e689dea2ef8e941df9469cbe144a600b83d45f821477fc/av-13.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ef076fcdf172aafcb21ea3ef7bd68cc9151b050016a8ace13b3dae3d08a4427", size = 24657784 }, - { url = "https://files.pythonhosted.org/packages/b7/8c/c20894580a4341a76c7c74b59c43e26e6652b0fc60f7248f2c1bc5fdbb5e/av-13.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bbf90397b7a466ff2879bd0944d55f796ad76c073fce50304315b83ad00113bd", size = 25562492 }, -] - [[package]] name = "azure-ai-inference" -version = "1.0.0b9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4e/6a/ed85592e5c64e08c291992f58b1a94dab6869f28fb0f40fd753dced73ba6/azure_ai_inference-1.0.0b9.tar.gz", hash = "sha256:1feb496bd84b01ee2691befc04358fa25d7c344d8288e99364438859ad7cd5a4", size = 182408 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/0f/27520da74769db6e58327d96c98e7b9a07ce686dff582c9a5ec60b03f9dd/azure_ai_inference-1.0.0b9-py3-none-any.whl", hash = "sha256:49823732e674092dad83bb8b0d1b65aa73111fab924d61349eb2a8cdc0493990", size = 124885 }, -] - -[[package]] -name = "azure-ai-projects" -version = "1.0.0b6" +version = "1.0.0b7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/72/5f9a78c913af66c55222ff912227b494707c4adfbdca27a78c3687a1b8ba/azure_ai_projects-1.0.0b6.tar.gz", hash = "sha256:ce6cfb2403eeb1a80e5dd84193fb2864953cd95a351f3d4572a5451bbb4c30d2", size = 298737 } +sdist = { url = "https://files.pythonhosted.org/packages/af/37/233eee0bebbf631d2f911a9f1ebbc3784b100d9bfb84efc275e71c1ea636/azure_ai_inference-1.0.0b7.tar.gz", hash = "sha256:bd912f71f7f855036ca46c9a21439f290eed5e61da418fd26bbb32e3c68bcce3", size = 175883 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/d9/14b31fc773072b63493d55a1a5b60e656f11aeea2b603fef2eb567686d96/azure_ai_projects-1.0.0b6-py3-none-any.whl", hash = "sha256:b0689825065648b54b4405e9edd22b1de3ea0dfc1ca3baf99db5173fd6208692", size = 187221 }, + { url = "https://files.pythonhosted.org/packages/cd/b6/5ba830eddc59f820c654694d476c14a3dd9c1f828ff9b48eb8b21dfd5f01/azure_ai_inference-1.0.0b7-py3-none-any.whl", hash = "sha256:59bb6a9ee62bd7654a69ca2bf12fe9335d7045df95b491cb8b5f9e3791c86175", size = 123030 }, ] [[package]] @@ -419,7 +304,7 @@ wheels = [ [[package]] name = "azure-identity" -version = "1.20.0" +version = "1.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -428,9 +313,9 @@ dependencies = [ { name = "msal-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ee/89/7d170fab0b85d9650cdb7abda087e849644beb52bd28f6804620dd0cecd9/azure_identity-1.20.0.tar.gz", hash = "sha256:40597210d56c83e15031b0fe2ea3b26420189e1e7f3e20bdbb292315da1ba014", size = 264447 } +sdist = { url = "https://files.pythonhosted.org/packages/aa/91/cbaeff9eb0b838f0d35b4607ac1c6195c735c8eb17db235f8f60e622934c/azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83", size = 263058 } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/aa/819513c1dbef990af690bb5eefb5e337f8698d75dfdb7302528f50ce1994/azure_identity-1.20.0-py3-none-any.whl", hash = "sha256:5f23fc4889a66330e840bd78830287e14f3761820fe3c5f77ac875edcb9ec998", size = 188243 }, + { url = "https://files.pythonhosted.org/packages/f0/d5/3995ed12f941f4a41a273d9b1709282e825ef87ed8eab3833038fee54d59/azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81", size = 187587 }, ] [[package]] @@ -491,15 +376,14 @@ wheels = [ [[package]] name = "beautifulsoup4" -version = "4.13.3" +version = "4.12.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/3c/adaf39ce1fb4afdd21b611e3d530b183bb7759c9b673d60db0e347fd4439/beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b", size = 619516 } +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/49/6abb616eb3cbab6a7cca303dc02fdf3836de2e0b834bf966a7f5271a34d8/beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16", size = 186015 }, + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, ] [[package]] @@ -530,30 +414,30 @@ wheels = [ [[package]] name = "boto3" -version = "1.37.2" +version = "1.36.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "s3transfer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/75/afe885605fef5b624d68869864f9af2595ff0b646377e1fdc9bba35aa49b/boto3-1.37.2.tar.gz", hash = "sha256:d64491bd4142c2c6dfe44479bf89c4ab7fa8d00210c2aaa7361931e61898b608", size = 111201 } +sdist = { url = "https://files.pythonhosted.org/packages/bf/04/0c6cea060653eee75f4348152dfc0aa0b241f7d1f99a530079ee44d61e4b/boto3-1.36.1.tar.gz", hash = "sha256:258ab77225a81d3cf3029c9afe9920cd9dec317689dfadec6f6f0a23130bb60a", size = 110959 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/5d/e4d5ed68b3b2f421754cc5ea0b9276f0c1568cd1a45dd7df3aadce028f60/boto3-1.37.2-py3-none-any.whl", hash = "sha256:e58136d52d79425ce26c3c1578bf94d4b2e91ead55fed9f6950406ee9713e6af", size = 139345 }, + { url = "https://files.pythonhosted.org/packages/2b/ed/464e1df3901fbfedd5a0786e551240216f0c867440fa6156595178227b3f/boto3-1.36.1-py3-none-any.whl", hash = "sha256:eb21380d73fec6645439c0d802210f72a0cdb3295b02953f246ff53f512faa8f", size = 139163 }, ] [[package]] name = "botocore" -version = "1.37.2" +version = "1.36.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/20/5f8f74ac3db553f713d640d0af4131162846123c955ac7118e727ef7441b/botocore-1.37.2.tar.gz", hash = "sha256:3f460f3c32cd6d747d5897a9cbde011bf1715abc7bf0a6ea6fdb0b812df63287", size = 13568710 } +sdist = { url = "https://files.pythonhosted.org/packages/39/aa/556720b3ee9629b7c4366b5a0d9797a84e83a97f78435904cbb9bdc41939/botocore-1.36.1.tar.gz", hash = "sha256:f789a6f272b5b3d8f8756495019785e33868e5e00dd9662a3ee7959ac939bb12", size = 13498150 } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/27/c579234944cb1e9a34e7803b3a45efa309d44280ba5e2b1069d604b2b266/botocore-1.37.2-py3-none-any.whl", hash = "sha256:5f59b966f3cd0c8055ef6f7c2600f7db5f8218071d992e5f95da3f9156d4370f", size = 13333985 }, + { url = "https://files.pythonhosted.org/packages/be/bb/5431f12e2dadd881fd023fb57e7e3ab82f7b697c38dc837fc8d70cca51bd/botocore-1.36.1-py3-none-any.whl", hash = "sha256:dec513b4eb8a847d79bbefdcdd07040ed9d44c20b0001136f0890a03d595705a", size = 13297686 }, ] [[package]] @@ -574,20 +458,20 @@ wheels = [ [[package]] name = "cachetools" -version = "5.5.2" +version = "5.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080 }, + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, ] [[package]] name = "certifi" -version = "2025.1.31" +version = "2024.12.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, ] [[package]] @@ -858,62 +742,61 @@ wheels = [ [[package]] name = "coverage" -version = "7.6.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/d6/2b53ab3ee99f2262e6f0b8369a43f6d66658eab45510331c0b3d5c8c4272/coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2", size = 805941 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/67/81dc41ec8f548c365d04a29f1afd492d3176b372c33e47fa2a45a01dc13a/coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8", size = 208345 }, - { url = "https://files.pythonhosted.org/packages/33/43/17f71676016c8829bde69e24c852fef6bd9ed39f774a245d9ec98f689fa0/coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879", size = 208775 }, - { url = "https://files.pythonhosted.org/packages/86/25/c6ff0775f8960e8c0840845b723eed978d22a3cd9babd2b996e4a7c502c6/coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe", size = 237925 }, - { url = "https://files.pythonhosted.org/packages/b0/3d/5f5bd37046243cb9d15fff2c69e498c2f4fe4f9b42a96018d4579ed3506f/coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674", size = 235835 }, - { url = "https://files.pythonhosted.org/packages/b5/f1/9e6b75531fe33490b910d251b0bf709142e73a40e4e38a3899e6986fe088/coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb", size = 236966 }, - { url = "https://files.pythonhosted.org/packages/4f/bc/aef5a98f9133851bd1aacf130e754063719345d2fb776a117d5a8d516971/coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c", size = 236080 }, - { url = "https://files.pythonhosted.org/packages/eb/d0/56b4ab77f9b12aea4d4c11dc11cdcaa7c29130b837eb610639cf3400c9c3/coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c", size = 234393 }, - { url = "https://files.pythonhosted.org/packages/0d/77/28ef95c5d23fe3dd191a0b7d89c82fea2c2d904aef9315daf7c890e96557/coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e", size = 235536 }, - { url = "https://files.pythonhosted.org/packages/29/62/18791d3632ee3ff3f95bc8599115707d05229c72db9539f208bb878a3d88/coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425", size = 211063 }, - { url = "https://files.pythonhosted.org/packages/fc/57/b3878006cedfd573c963e5c751b8587154eb10a61cc0f47a84f85c88a355/coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa", size = 211955 }, - { url = "https://files.pythonhosted.org/packages/64/2d/da78abbfff98468c91fd63a73cccdfa0e99051676ded8dd36123e3a2d4d5/coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015", size = 208464 }, - { url = "https://files.pythonhosted.org/packages/31/f2/c269f46c470bdabe83a69e860c80a82e5e76840e9f4bbd7f38f8cebbee2f/coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45", size = 208893 }, - { url = "https://files.pythonhosted.org/packages/47/63/5682bf14d2ce20819998a49c0deadb81e608a59eed64d6bc2191bc8046b9/coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702", size = 241545 }, - { url = "https://files.pythonhosted.org/packages/6a/b6/6b6631f1172d437e11067e1c2edfdb7238b65dff965a12bce3b6d1bf2be2/coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0", size = 239230 }, - { url = "https://files.pythonhosted.org/packages/c7/01/9cd06cbb1be53e837e16f1b4309f6357e2dfcbdab0dd7cd3b1a50589e4e1/coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f", size = 241013 }, - { url = "https://files.pythonhosted.org/packages/4b/26/56afefc03c30871326e3d99709a70d327ac1f33da383cba108c79bd71563/coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f", size = 239750 }, - { url = "https://files.pythonhosted.org/packages/dd/ea/88a1ff951ed288f56aa561558ebe380107cf9132facd0b50bced63ba7238/coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d", size = 238462 }, - { url = "https://files.pythonhosted.org/packages/6e/d4/1d9404566f553728889409eff82151d515fbb46dc92cbd13b5337fa0de8c/coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba", size = 239307 }, - { url = "https://files.pythonhosted.org/packages/12/c1/e453d3b794cde1e232ee8ac1d194fde8e2ba329c18bbf1b93f6f5eef606b/coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f", size = 211117 }, - { url = "https://files.pythonhosted.org/packages/d5/db/829185120c1686fa297294f8fcd23e0422f71070bf85ef1cc1a72ecb2930/coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558", size = 212019 }, - { url = "https://files.pythonhosted.org/packages/e2/7f/4af2ed1d06ce6bee7eafc03b2ef748b14132b0bdae04388e451e4b2c529b/coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad", size = 208645 }, - { url = "https://files.pythonhosted.org/packages/dc/60/d19df912989117caa95123524d26fc973f56dc14aecdec5ccd7d0084e131/coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3", size = 208898 }, - { url = "https://files.pythonhosted.org/packages/bd/10/fecabcf438ba676f706bf90186ccf6ff9f6158cc494286965c76e58742fa/coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574", size = 242987 }, - { url = "https://files.pythonhosted.org/packages/4c/53/4e208440389e8ea936f5f2b0762dcd4cb03281a7722def8e2bf9dc9c3d68/coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985", size = 239881 }, - { url = "https://files.pythonhosted.org/packages/c4/47/2ba744af8d2f0caa1f17e7746147e34dfc5f811fb65fc153153722d58835/coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750", size = 242142 }, - { url = "https://files.pythonhosted.org/packages/e9/90/df726af8ee74d92ee7e3bf113bf101ea4315d71508952bd21abc3fae471e/coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea", size = 241437 }, - { url = "https://files.pythonhosted.org/packages/f6/af/995263fd04ae5f9cf12521150295bf03b6ba940d0aea97953bb4a6db3e2b/coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3", size = 239724 }, - { url = "https://files.pythonhosted.org/packages/1c/8e/5bb04f0318805e190984c6ce106b4c3968a9562a400180e549855d8211bd/coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a", size = 241329 }, - { url = "https://files.pythonhosted.org/packages/9e/9d/fa04d9e6c3f6459f4e0b231925277cfc33d72dfab7fa19c312c03e59da99/coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95", size = 211289 }, - { url = "https://files.pythonhosted.org/packages/53/40/53c7ffe3c0c3fff4d708bc99e65f3d78c129110d6629736faf2dbd60ad57/coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288", size = 212079 }, - { url = "https://files.pythonhosted.org/packages/76/89/1adf3e634753c0de3dad2f02aac1e73dba58bc5a3a914ac94a25b2ef418f/coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1", size = 208673 }, - { url = "https://files.pythonhosted.org/packages/ce/64/92a4e239d64d798535c5b45baac6b891c205a8a2e7c9cc8590ad386693dc/coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd", size = 208945 }, - { url = "https://files.pythonhosted.org/packages/b4/d0/4596a3ef3bca20a94539c9b1e10fd250225d1dec57ea78b0867a1cf9742e/coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9", size = 242484 }, - { url = "https://files.pythonhosted.org/packages/1c/ef/6fd0d344695af6718a38d0861408af48a709327335486a7ad7e85936dc6e/coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e", size = 239525 }, - { url = "https://files.pythonhosted.org/packages/0c/4b/373be2be7dd42f2bcd6964059fd8fa307d265a29d2b9bcf1d044bcc156ed/coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4", size = 241545 }, - { url = "https://files.pythonhosted.org/packages/a6/7d/0e83cc2673a7790650851ee92f72a343827ecaaea07960587c8f442b5cd3/coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6", size = 241179 }, - { url = "https://files.pythonhosted.org/packages/ff/8c/566ea92ce2bb7627b0900124e24a99f9244b6c8c92d09ff9f7633eb7c3c8/coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3", size = 239288 }, - { url = "https://files.pythonhosted.org/packages/7d/e4/869a138e50b622f796782d642c15fb5f25a5870c6d0059a663667a201638/coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc", size = 241032 }, - { url = "https://files.pythonhosted.org/packages/ae/28/a52ff5d62a9f9e9fe9c4f17759b98632edd3a3489fce70154c7d66054dd3/coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3", size = 211315 }, - { url = "https://files.pythonhosted.org/packages/bc/17/ab849b7429a639f9722fa5628364c28d675c7ff37ebc3268fe9840dda13c/coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef", size = 212099 }, - { url = "https://files.pythonhosted.org/packages/d2/1c/b9965bf23e171d98505eb5eb4fb4d05c44efd256f2e0f19ad1ba8c3f54b0/coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e", size = 209511 }, - { url = "https://files.pythonhosted.org/packages/57/b3/119c201d3b692d5e17784fee876a9a78e1b3051327de2709392962877ca8/coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703", size = 209729 }, - { url = "https://files.pythonhosted.org/packages/52/4e/a7feb5a56b266304bc59f872ea07b728e14d5a64f1ad3a2cc01a3259c965/coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0", size = 253988 }, - { url = "https://files.pythonhosted.org/packages/65/19/069fec4d6908d0dae98126aa7ad08ce5130a6decc8509da7740d36e8e8d2/coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924", size = 249697 }, - { url = "https://files.pythonhosted.org/packages/1c/da/5b19f09ba39df7c55f77820736bf17bbe2416bbf5216a3100ac019e15839/coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b", size = 252033 }, - { url = "https://files.pythonhosted.org/packages/1e/89/4c2750df7f80a7872267f7c5fe497c69d45f688f7b3afe1297e52e33f791/coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d", size = 251535 }, - { url = "https://files.pythonhosted.org/packages/78/3b/6d3ae3c1cc05f1b0460c51e6f6dcf567598cbd7c6121e5ad06643974703c/coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827", size = 249192 }, - { url = "https://files.pythonhosted.org/packages/6e/8e/c14a79f535ce41af7d436bbad0d3d90c43d9e38ec409b4770c894031422e/coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9", size = 250627 }, - { url = "https://files.pythonhosted.org/packages/cb/79/b7cee656cfb17a7f2c1b9c3cee03dd5d8000ca299ad4038ba64b61a9b044/coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3", size = 212033 }, - { url = "https://files.pythonhosted.org/packages/b6/c3/f7aaa3813f1fa9a4228175a7bd368199659d392897e184435a3b66408dd3/coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f", size = 213240 }, - { url = "https://files.pythonhosted.org/packages/7a/7f/05818c62c7afe75df11e0233bd670948d68b36cdbf2a339a095bc02624a8/coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf", size = 200558 }, - { url = "https://files.pythonhosted.org/packages/fb/b2/f655700e1024dec98b10ebaafd0cedbc25e40e4abe62a3c8e2ceef4f8f0a/coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953", size = 200552 }, +version = "7.6.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/84/ba/ac14d281f80aab516275012e8875991bb06203957aa1e19950139238d658/coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23", size = 803868 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/12/2a2a923edf4ddabdffed7ad6da50d96a5c126dae7b80a33df7310e329a1e/coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78", size = 207982 }, + { url = "https://files.pythonhosted.org/packages/ca/49/6985dbca9c7be3f3cb62a2e6e492a0c88b65bf40579e16c71ae9c33c6b23/coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c", size = 208414 }, + { url = "https://files.pythonhosted.org/packages/35/93/287e8f1d1ed2646f4e0b2605d14616c9a8a2697d0d1b453815eb5c6cebdb/coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a", size = 236860 }, + { url = "https://files.pythonhosted.org/packages/de/e1/cfdb5627a03567a10031acc629b75d45a4ca1616e54f7133ca1fa366050a/coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165", size = 234758 }, + { url = "https://files.pythonhosted.org/packages/6d/85/fc0de2bcda3f97c2ee9fe8568f7d48f7279e91068958e5b2cc19e0e5f600/coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988", size = 235920 }, + { url = "https://files.pythonhosted.org/packages/79/73/ef4ea0105531506a6f4cf4ba571a214b14a884630b567ed65b3d9c1975e1/coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5", size = 234986 }, + { url = "https://files.pythonhosted.org/packages/c6/4d/75afcfe4432e2ad0405c6f27adeb109ff8976c5e636af8604f94f29fa3fc/coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3", size = 233446 }, + { url = "https://files.pythonhosted.org/packages/86/5b/efee56a89c16171288cafff022e8af44f8f94075c2d8da563c3935212871/coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5", size = 234566 }, + { url = "https://files.pythonhosted.org/packages/f2/db/67770cceb4a64d3198bf2aa49946f411b85ec6b0a9b489e61c8467a4253b/coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244", size = 210675 }, + { url = "https://files.pythonhosted.org/packages/8d/27/e8bfc43f5345ec2c27bc8a1fa77cdc5ce9dcf954445e11f14bb70b889d14/coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e", size = 211518 }, + { url = "https://files.pythonhosted.org/packages/85/d2/5e175fcf6766cf7501a8541d81778fd2f52f4870100e791f5327fd23270b/coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3", size = 208088 }, + { url = "https://files.pythonhosted.org/packages/4b/6f/06db4dc8fca33c13b673986e20e466fd936235a6ec1f0045c3853ac1b593/coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43", size = 208536 }, + { url = "https://files.pythonhosted.org/packages/0d/62/c6a0cf80318c1c1af376d52df444da3608eafc913b82c84a4600d8349472/coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132", size = 240474 }, + { url = "https://files.pythonhosted.org/packages/a3/59/750adafc2e57786d2e8739a46b680d4fb0fbc2d57fbcb161290a9f1ecf23/coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f", size = 237880 }, + { url = "https://files.pythonhosted.org/packages/2c/f8/ef009b3b98e9f7033c19deb40d629354aab1d8b2d7f9cfec284dbedf5096/coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994", size = 239750 }, + { url = "https://files.pythonhosted.org/packages/a6/e2/6622f3b70f5f5b59f705e680dae6db64421af05a5d1e389afd24dae62e5b/coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99", size = 238642 }, + { url = "https://files.pythonhosted.org/packages/2d/10/57ac3f191a3c95c67844099514ff44e6e19b2915cd1c22269fb27f9b17b6/coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd", size = 237266 }, + { url = "https://files.pythonhosted.org/packages/ee/2d/7016f4ad9d553cabcb7333ed78ff9d27248ec4eba8dd21fa488254dff894/coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377", size = 238045 }, + { url = "https://files.pythonhosted.org/packages/a7/fe/45af5c82389a71e0cae4546413266d2195c3744849669b0bab4b5f2c75da/coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8", size = 210647 }, + { url = "https://files.pythonhosted.org/packages/db/11/3f8e803a43b79bc534c6a506674da9d614e990e37118b4506faf70d46ed6/coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609", size = 211508 }, + { url = "https://files.pythonhosted.org/packages/86/77/19d09ea06f92fdf0487499283b1b7af06bc422ea94534c8fe3a4cd023641/coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853", size = 208281 }, + { url = "https://files.pythonhosted.org/packages/b6/67/5479b9f2f99fcfb49c0d5cf61912a5255ef80b6e80a3cddba39c38146cf4/coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078", size = 208514 }, + { url = "https://files.pythonhosted.org/packages/15/d1/febf59030ce1c83b7331c3546d7317e5120c5966471727aa7ac157729c4b/coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0", size = 241537 }, + { url = "https://files.pythonhosted.org/packages/4b/7e/5ac4c90192130e7cf8b63153fe620c8bfd9068f89a6d9b5f26f1550f7a26/coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50", size = 238572 }, + { url = "https://files.pythonhosted.org/packages/dc/03/0334a79b26ecf59958f2fe9dd1f5ab3e2f88db876f5071933de39af09647/coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022", size = 240639 }, + { url = "https://files.pythonhosted.org/packages/d7/45/8a707f23c202208d7b286d78ad6233f50dcf929319b664b6cc18a03c1aae/coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b", size = 240072 }, + { url = "https://files.pythonhosted.org/packages/66/02/603ce0ac2d02bc7b393279ef618940b4a0535b0868ee791140bda9ecfa40/coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0", size = 238386 }, + { url = "https://files.pythonhosted.org/packages/04/62/4e6887e9be060f5d18f1dd58c2838b2d9646faf353232dec4e2d4b1c8644/coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852", size = 240054 }, + { url = "https://files.pythonhosted.org/packages/5c/74/83ae4151c170d8bd071924f212add22a0e62a7fe2b149edf016aeecad17c/coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359", size = 210904 }, + { url = "https://files.pythonhosted.org/packages/c3/54/de0893186a221478f5880283119fc40483bc460b27c4c71d1b8bba3474b9/coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247", size = 211692 }, + { url = "https://files.pythonhosted.org/packages/25/6d/31883d78865529257bf847df5789e2ae80e99de8a460c3453dbfbe0db069/coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9", size = 208308 }, + { url = "https://files.pythonhosted.org/packages/70/22/3f2b129cc08de00c83b0ad6252e034320946abfc3e4235c009e57cfeee05/coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b", size = 208565 }, + { url = "https://files.pythonhosted.org/packages/97/0a/d89bc2d1cc61d3a8dfe9e9d75217b2be85f6c73ebf1b9e3c2f4e797f4531/coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690", size = 241083 }, + { url = "https://files.pythonhosted.org/packages/4c/81/6d64b88a00c7a7aaed3a657b8eaa0931f37a6395fcef61e53ff742b49c97/coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18", size = 238235 }, + { url = "https://files.pythonhosted.org/packages/9a/0b/7797d4193f5adb4b837207ed87fecf5fc38f7cc612b369a8e8e12d9fa114/coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c", size = 240220 }, + { url = "https://files.pythonhosted.org/packages/65/4d/6f83ca1bddcf8e51bf8ff71572f39a1c73c34cf50e752a952c34f24d0a60/coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd", size = 239847 }, + { url = "https://files.pythonhosted.org/packages/30/9d/2470df6aa146aff4c65fee0f87f58d2164a67533c771c9cc12ffcdb865d5/coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e", size = 237922 }, + { url = "https://files.pythonhosted.org/packages/08/dd/723fef5d901e6a89f2507094db66c091449c8ba03272861eaefa773ad95c/coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694", size = 239783 }, + { url = "https://files.pythonhosted.org/packages/3d/f7/64d3298b2baf261cb35466000628706ce20a82d42faf9b771af447cd2b76/coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6", size = 210965 }, + { url = "https://files.pythonhosted.org/packages/d5/58/ec43499a7fc681212fe7742fe90b2bc361cdb72e3181ace1604247a5b24d/coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e", size = 211719 }, + { url = "https://files.pythonhosted.org/packages/ab/c9/f2857a135bcff4330c1e90e7d03446b036b2363d4ad37eb5e3a47bbac8a6/coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe", size = 209050 }, + { url = "https://files.pythonhosted.org/packages/aa/b3/f840e5bd777d8433caa9e4a1eb20503495709f697341ac1a8ee6a3c906ad/coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273", size = 209321 }, + { url = "https://files.pythonhosted.org/packages/85/7d/125a5362180fcc1c03d91850fc020f3831d5cda09319522bcfa6b2b70be7/coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8", size = 252039 }, + { url = "https://files.pythonhosted.org/packages/a9/9c/4358bf3c74baf1f9bddd2baf3756b54c07f2cfd2535f0a47f1e7757e54b3/coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098", size = 247758 }, + { url = "https://files.pythonhosted.org/packages/cf/c7/de3eb6fc5263b26fab5cda3de7a0f80e317597a4bad4781859f72885f300/coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb", size = 250119 }, + { url = "https://files.pythonhosted.org/packages/3e/e6/43de91f8ba2ec9140c6a4af1102141712949903dc732cf739167cfa7a3bc/coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0", size = 249597 }, + { url = "https://files.pythonhosted.org/packages/08/40/61158b5499aa2adf9e37bc6d0117e8f6788625b283d51e7e0c53cf340530/coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf", size = 247473 }, + { url = "https://files.pythonhosted.org/packages/50/69/b3f2416725621e9f112e74e8470793d5b5995f146f596f133678a633b77e/coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2", size = 248737 }, + { url = "https://files.pythonhosted.org/packages/3c/6e/fe899fb937657db6df31cc3e61c6968cb56d36d7326361847440a430152e/coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312", size = 211611 }, + { url = "https://files.pythonhosted.org/packages/1c/55/52f5e66142a9d7bc93a15192eba7a78513d2abf6b3558d77b4ca32f5f424/coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d", size = 212781 }, + { url = "https://files.pythonhosted.org/packages/a1/70/de81bfec9ed38a64fc44a77c7665e20ca507fc3265597c28b0d989e4082e/coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f", size = 200223 }, ] [package.optional-dependencies] @@ -923,43 +806,39 @@ toml = [ [[package]] name = "cryptography" -version = "44.0.1" +version = "44.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "(platform_python_implementation != 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c7/67/545c79fe50f7af51dbad56d16b23fe33f63ee6a5d956b3cb68ea110cbe64/cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14", size = 710819 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/72/27/5e3524053b4c8889da65cf7814a9d0d8514a05194a25e1e34f46852ee6eb/cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009", size = 6642022 }, - { url = "https://files.pythonhosted.org/packages/34/b9/4d1fa8d73ae6ec350012f89c3abfbff19fc95fe5420cf972e12a8d182986/cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f", size = 3943865 }, - { url = "https://files.pythonhosted.org/packages/6e/57/371a9f3f3a4500807b5fcd29fec77f418ba27ffc629d88597d0d1049696e/cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2", size = 4162562 }, - { url = "https://files.pythonhosted.org/packages/c5/1d/5b77815e7d9cf1e3166988647f336f87d5634a5ccecec2ffbe08ef8dd481/cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911", size = 3951923 }, - { url = "https://files.pythonhosted.org/packages/28/01/604508cd34a4024467cd4105887cf27da128cba3edd435b54e2395064bfb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69", size = 3685194 }, - { url = "https://files.pythonhosted.org/packages/c6/3d/d3c55d4f1d24580a236a6753902ef6d8aafd04da942a1ee9efb9dc8fd0cb/cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026", size = 4187790 }, - { url = "https://files.pythonhosted.org/packages/ea/a6/44d63950c8588bfa8594fd234d3d46e93c3841b8e84a066649c566afb972/cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd", size = 3951343 }, - { url = "https://files.pythonhosted.org/packages/c1/17/f5282661b57301204cbf188254c1a0267dbd8b18f76337f0a7ce1038888c/cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0", size = 4187127 }, - { url = "https://files.pythonhosted.org/packages/f3/68/abbae29ed4f9d96596687f3ceea8e233f65c9645fbbec68adb7c756bb85a/cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf", size = 4070666 }, - { url = "https://files.pythonhosted.org/packages/0f/10/cf91691064a9e0a88ae27e31779200b1505d3aee877dbe1e4e0d73b4f155/cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864", size = 4288811 }, - { url = "https://files.pythonhosted.org/packages/38/78/74ea9eb547d13c34e984e07ec8a473eb55b19c1451fe7fc8077c6a4b0548/cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a", size = 2771882 }, - { url = "https://files.pythonhosted.org/packages/cf/6c/3907271ee485679e15c9f5e93eac6aa318f859b0aed8d369afd636fafa87/cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00", size = 3206989 }, - { url = "https://files.pythonhosted.org/packages/9f/f1/676e69c56a9be9fd1bffa9bc3492366901f6e1f8f4079428b05f1414e65c/cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008", size = 6643714 }, - { url = "https://files.pythonhosted.org/packages/ba/9f/1775600eb69e72d8f9931a104120f2667107a0ee478f6ad4fe4001559345/cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862", size = 3943269 }, - { url = "https://files.pythonhosted.org/packages/25/ba/e00d5ad6b58183829615be7f11f55a7b6baa5a06910faabdc9961527ba44/cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3", size = 4166461 }, - { url = "https://files.pythonhosted.org/packages/b3/45/690a02c748d719a95ab08b6e4decb9d81e0ec1bac510358f61624c86e8a3/cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7", size = 3950314 }, - { url = "https://files.pythonhosted.org/packages/e6/50/bf8d090911347f9b75adc20f6f6569ed6ca9b9bff552e6e390f53c2a1233/cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a", size = 3686675 }, - { url = "https://files.pythonhosted.org/packages/e1/e7/cfb18011821cc5f9b21efb3f94f3241e3a658d267a3bf3a0f45543858ed8/cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c", size = 4190429 }, - { url = "https://files.pythonhosted.org/packages/07/ef/77c74d94a8bfc1a8a47b3cafe54af3db537f081742ee7a8a9bd982b62774/cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62", size = 3950039 }, - { url = "https://files.pythonhosted.org/packages/6d/b9/8be0ff57c4592382b77406269b1e15650c9f1a167f9e34941b8515b97159/cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41", size = 4189713 }, - { url = "https://files.pythonhosted.org/packages/78/e1/4b6ac5f4100545513b0847a4d276fe3c7ce0eacfa73e3b5ebd31776816ee/cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b", size = 4071193 }, - { url = "https://files.pythonhosted.org/packages/3d/cb/afff48ceaed15531eab70445abe500f07f8f96af2bb35d98af6bfa89ebd4/cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7", size = 4289566 }, - { url = "https://files.pythonhosted.org/packages/30/6f/4eca9e2e0f13ae459acd1ca7d9f0257ab86e68f44304847610afcb813dc9/cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9", size = 2772371 }, - { url = "https://files.pythonhosted.org/packages/d2/05/5533d30f53f10239616a357f080892026db2d550a40c393d0a8a7af834a9/cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f", size = 3207303 }, - { url = "https://files.pythonhosted.org/packages/15/06/507bfb5c7e048114a0185dd65f7814677a2ba285d15705c3d69e660c21d7/cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183", size = 3380782 }, - { url = "https://files.pythonhosted.org/packages/e0/f1/7fb4982d59aa86e1a116c812b545e7fc045352be07738ae3fb278835a9a4/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12", size = 3888155 }, - { url = "https://files.pythonhosted.org/packages/60/7b/cbc203838d3092203493d18b923fbbb1de64e0530b332a713ba376905b0b/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83", size = 4106417 }, - { url = "https://files.pythonhosted.org/packages/12/c7/2fe59fb085ab418acc82e91e040a6acaa7b1696fcc1c1055317537fbf0d3/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420", size = 3887540 }, - { url = "https://files.pythonhosted.org/packages/48/89/09fc7b115f60f5bd970b80e32244f8e9aeeb9244bf870b63420cec3b5cd5/cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4", size = 4106040 }, - { url = "https://files.pythonhosted.org/packages/2e/38/3fd83c4690dc7d753a442a284b3826ea5e5c380a411443c66421cd823898/cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7", size = 3134657 }, +sdist = { url = "https://files.pythonhosted.org/packages/91/4c/45dfa6829acffa344e3967d6006ee4ae8be57af746ae2eba1c431949b32c/cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02", size = 710657 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/09/8cc67f9b84730ad330b3b72cf867150744bf07ff113cda21a15a1c6d2c7c/cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123", size = 6541833 }, + { url = "https://files.pythonhosted.org/packages/7e/5b/3759e30a103144e29632e7cb72aec28cedc79e514b2ea8896bb17163c19b/cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092", size = 3922710 }, + { url = "https://files.pythonhosted.org/packages/5f/58/3b14bf39f1a0cfd679e753e8647ada56cddbf5acebffe7db90e184c76168/cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f", size = 4137546 }, + { url = "https://files.pythonhosted.org/packages/98/65/13d9e76ca19b0ba5603d71ac8424b5694415b348e719db277b5edc985ff5/cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb", size = 3915420 }, + { url = "https://files.pythonhosted.org/packages/b1/07/40fe09ce96b91fc9276a9ad272832ead0fddedcba87f1190372af8e3039c/cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b", size = 4154498 }, + { url = "https://files.pythonhosted.org/packages/75/ea/af65619c800ec0a7e4034207aec543acdf248d9bffba0533342d1bd435e1/cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543", size = 3932569 }, + { url = "https://files.pythonhosted.org/packages/c7/af/d1deb0c04d59612e3d5e54203159e284d3e7a6921e565bb0eeb6269bdd8a/cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e", size = 4016721 }, + { url = "https://files.pythonhosted.org/packages/bd/69/7ca326c55698d0688db867795134bdfac87136b80ef373aaa42b225d6dd5/cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e", size = 4240915 }, + { url = "https://files.pythonhosted.org/packages/ef/d4/cae11bf68c0f981e0413906c6dd03ae7fa864347ed5fac40021df1ef467c/cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053", size = 2757925 }, + { url = "https://files.pythonhosted.org/packages/64/b1/50d7739254d2002acae64eed4fc43b24ac0cc44bf0a0d388d1ca06ec5bb1/cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd", size = 3202055 }, + { url = "https://files.pythonhosted.org/packages/11/18/61e52a3d28fc1514a43b0ac291177acd1b4de00e9301aaf7ef867076ff8a/cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591", size = 6542801 }, + { url = "https://files.pythonhosted.org/packages/1a/07/5f165b6c65696ef75601b781a280fc3b33f1e0cd6aa5a92d9fb96c410e97/cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7", size = 3922613 }, + { url = "https://files.pythonhosted.org/packages/28/34/6b3ac1d80fc174812486561cf25194338151780f27e438526f9c64e16869/cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc", size = 4137925 }, + { url = "https://files.pythonhosted.org/packages/d0/c7/c656eb08fd22255d21bc3129625ed9cd5ee305f33752ef2278711b3fa98b/cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289", size = 3915417 }, + { url = "https://files.pythonhosted.org/packages/ef/82/72403624f197af0db6bac4e58153bc9ac0e6020e57234115db9596eee85d/cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7", size = 4155160 }, + { url = "https://files.pythonhosted.org/packages/a2/cd/2f3c440913d4329ade49b146d74f2e9766422e1732613f57097fea61f344/cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c", size = 3932331 }, + { url = "https://files.pythonhosted.org/packages/7f/df/8be88797f0a1cca6e255189a57bb49237402b1880d6e8721690c5603ac23/cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64", size = 4017372 }, + { url = "https://files.pythonhosted.org/packages/af/36/5ccc376f025a834e72b8e52e18746b927f34e4520487098e283a719c205e/cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285", size = 4239657 }, + { url = "https://files.pythonhosted.org/packages/46/b0/f4f7d0d0bcfbc8dd6296c1449be326d04217c57afb8b2594f017eed95533/cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417", size = 2758672 }, + { url = "https://files.pythonhosted.org/packages/97/9b/443270b9210f13f6ef240eff73fd32e02d381e7103969dc66ce8e89ee901/cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede", size = 3202071 }, + { url = "https://files.pythonhosted.org/packages/77/d4/fea74422326388bbac0c37b7489a0fcb1681a698c3b875959430ba550daa/cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731", size = 3338857 }, + { url = "https://files.pythonhosted.org/packages/1a/aa/ba8a7467c206cb7b62f09b4168da541b5109838627f582843bbbe0235e8e/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4", size = 3850615 }, + { url = "https://files.pythonhosted.org/packages/89/fa/b160e10a64cc395d090105be14f399b94e617c879efd401188ce0fea39ee/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756", size = 4081622 }, + { url = "https://files.pythonhosted.org/packages/47/8f/20ff0656bb0cf7af26ec1d01f780c5cfbaa7666736063378c5f48558b515/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c", size = 3867546 }, + { url = "https://files.pythonhosted.org/packages/38/d9/28edf32ee2fcdca587146bcde90102a7319b2f2c690edfa627e46d586050/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa", size = 4090937 }, + { url = "https://files.pythonhosted.org/packages/cc/9d/37e5da7519de7b0b070a3fedd4230fe76d50d2a21403e0f2153d70ac4163/cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c", size = 3128774 }, ] [[package]] @@ -1020,11 +899,11 @@ wheels = [ [[package]] name = "decorator" -version = "5.2.1" +version = "5.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, ] [[package]] @@ -1038,14 +917,14 @@ wheels = [ [[package]] name = "deprecated" -version = "1.2.18" +version = "1.2.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 } +sdist = { url = "https://files.pythonhosted.org/packages/2e/a3/53e7d78a6850ffdd394d7048a31a6f14e44900adedf190f9a165f6b69439/deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d", size = 2977612 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 }, + { url = "https://files.pythonhosted.org/packages/1d/8f/c7f227eb42cfeaddce3eb0c96c60cbca37797fa7b34f8e1aeadf6c5c0983/Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320", size = 9941 }, ] [[package]] @@ -1060,15 +939,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, ] -[[package]] -name = "diskcache" -version = "5.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550 }, -] - [[package]] name = "distlib" version = "0.3.9" @@ -1096,20 +966,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, ] -[[package]] -name = "docker" -version = "7.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "sys_platform == 'win32'" }, - { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, -] - [[package]] name = "docstring-parser" version = "0.16" @@ -1157,25 +1013,25 @@ wheels = [ [[package]] name = "executing" -version = "2.2.0" +version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693 } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702 }, + { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, ] [[package]] name = "fastapi" -version = "0.115.8" +version = "0.115.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/b2/5a5dc4affdb6661dea100324e19a7721d5dc524b464fe8e366c093fd7d87/fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9", size = 295403 } +sdist = { url = "https://files.pythonhosted.org/packages/93/72/d83b98cd106541e8f5e5bfab8ef2974ab45a62e8a6c5b5e6940f26d2ed4b/fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654", size = 301336 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/7d/2d6ce181d7a5f51dedb8c06206cbf0ec026a99bf145edd309f9e17c3282f/fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf", size = 94814 }, + { url = "https://files.pythonhosted.org/packages/52/b3/7e4df40e585df024fac2f80d1a2d579c854ac37109675db2b0cc22c0bb9e/fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305", size = 94843 }, ] [[package]] @@ -1189,23 +1045,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.17.0" +version = "3.16.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/9c/0b15fb47b464e1b663b1acd1253a062aa5feecb07d4e597daea542ebd2b5/filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e", size = 18027 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/ec/00d68c4ddfedfe64159999e5f8a98fb8442729a63e2077eb9dcd89623d27/filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338", size = 16164 }, -] - -[[package]] -name = "flaml" -version = "2.3.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/20/a8/17322311b77f3012194f92c47c81455463f99c48d358c463fa45bd3c8541/flaml-2.3.4.tar.gz", hash = "sha256:308c3e769976d8a0272f2fd7d98258d7d4a4fd2e4525ba540d1ba149ae266c54", size = 284728 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/5c/c1e734b36d0f068708836238fbf1e8c34716a61e1a844482f37e277ba476/FLAML-2.3.4-py3-none-any.whl", hash = "sha256:dceab62194d469889c4584531049ac0a43480056f4f39c6ea207bfc12a157d76", size = 314250 }, + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, ] [[package]] @@ -1239,11 +1083,11 @@ wheels = [ [[package]] name = "flatbuffers" -version = "25.2.10" +version = "24.12.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/83/9ae01534f7e92a0c04f86586a0d62a4a0266e51d8bb2bfd5b8ea8165abba/flatbuffers-24.12.23.tar.gz", hash = "sha256:2910b0bc6ae9b6db78dd2b18d0b7a0709ba240fb5585f286a3a2b30785c22dac", size = 22164 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 }, + { url = "https://files.pythonhosted.org/packages/fb/b4/31c461eef98b96b8ab736d97274548eaf2b2e349bf09e4de3902f7d53084/flatbuffers-24.12.23-py2.py3-none-any.whl", hash = "sha256:c418e0d48890f4142b92fd3e343e73a48f194e1f80075ddcc5793779b3585444", size = 30962 }, ] [[package]] @@ -1317,16 +1161,16 @@ wheels = [ [[package]] name = "fsspec" -version = "2025.2.0" +version = "2024.12.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/79/68612ed99700e6413de42895aa725463e821a6b3be75c87fcce1b4af4c70/fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd", size = 292283 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/11/de70dee31455c546fbc88301971ec03c328f3d1138cfba14263f651e9551/fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f", size = 291600 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/94/758680531a00d06e471ef649e4ec2ed6bf185356a7f9fbfbb7368a40bd49/fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b", size = 184484 }, + { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 }, ] [[package]] name = "google-ai-generativelanguage" -version = "0.6.15" +version = "0.6.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1334,14 +1178,14 @@ dependencies = [ { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/d1/48fe5d7a43d278e9f6b5ada810b0a3530bbeac7ed7fcbcd366f932f05316/google_ai_generativelanguage-0.6.15.tar.gz", hash = "sha256:8f6d9dc4c12b065fe2d0289026171acea5183ebf2d0b11cefe12f3821e159ec3", size = 1375443 } +sdist = { url = "https://files.pythonhosted.org/packages/a5/71/46543c398629bb883b769041fc10278d4d63aaa2c34744dede1b84ec0207/google_ai_generativelanguage-0.6.10.tar.gz", hash = "sha256:6fa642c964d8728006fe7e8771026fc0b599ae0ebeaf83caf550941e8e693455", size = 795200 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/a3/67b8a6ff5001a1d8864922f2d6488dc2a14367ceb651bc3f09a947f2f306/google_ai_generativelanguage-0.6.15-py3-none-any.whl", hash = "sha256:5a03ef86377aa184ffef3662ca28f19eeee158733e45d7947982eb953c6ebb6c", size = 1327356 }, + { url = "https://files.pythonhosted.org/packages/af/6d/db99a295f9caf027bbdd90c41e6ea650a7468392a0e8713719e7abc5f647/google_ai_generativelanguage-0.6.10-py3-none-any.whl", hash = "sha256:854a2bf833d18be05ad5ef13c755567b66a4f4a870f099b62c61fe11bddabcf4", size = 760045 }, ] [[package]] name = "google-api-core" -version = "2.24.1" +version = "2.24.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1350,9 +1194,9 @@ dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/b7/481c83223d7b4f02c7651713fceca648fa3336e1571b9804713f66bca2d8/google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a", size = 163508 } +sdist = { url = "https://files.pythonhosted.org/packages/81/56/d70d66ed1b5ab5f6c27bf80ec889585ad8f865ff32acbafd3b2ef0bfb5d0/google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf", size = 162647 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/a6/8e30ddfd3d39ee6d2c76d3d4f64a83f77ac86a4cab67b286ae35ce9e4369/google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1", size = 160059 }, + { url = "https://files.pythonhosted.org/packages/a1/76/65b8b94e74bf1b6d1cc38d916089670c4da5029d25762441d8c5c19e51dd/google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9", size = 158576 }, ] [package.optional-dependencies] @@ -1363,7 +1207,7 @@ grpc = [ [[package]] name = "google-api-python-client" -version = "2.162.0" +version = "2.159.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1372,23 +1216,23 @@ dependencies = [ { name = "httplib2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "uritemplate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/d0/4a82e36c514437fa977d9b24f15328cd4505a0d92fcab9a18c81210b0f72/google_api_python_client-2.162.0.tar.gz", hash = "sha256:5f8bc934a5b6eea73a7d12d999e6585c1823179f48340234acb385e2502e735a", size = 12562719 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/12b58cca5a93d63fd6a7abed570423bdf2db4349eb9361ac5214d42ed7d6/google_api_python_client-2.159.0.tar.gz", hash = "sha256:55197f430f25c907394b44fa078545ffef89d33fd4dca501b7db9f0d8e224bd6", size = 12302576 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/b9/69e1f64714da8b312448f6c425c346189f377ee6a5ee06fa8b5371e08b6c/google_api_python_client-2.162.0-py2.py3-none-any.whl", hash = "sha256:49365fa4f7795fe81a747f5544d6528ea94314fa59664e0ea1005f603facf1ec", size = 13072387 }, + { url = "https://files.pythonhosted.org/packages/51/ab/d0671375afe79e6e8c51736e115a69bb6b4bcdc80cd5c01bf667486cd24c/google_api_python_client-2.159.0-py2.py3-none-any.whl", hash = "sha256:baef0bb631a60a0bd7c0bf12a5499e3a40cd4388484de7ee55c1950bf820a0cf", size = 12814228 }, ] [[package]] name = "google-auth" -version = "2.38.0" +version = "2.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyasn1-modules", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rsa", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/eb/d504ba1daf190af6b204a9d4714d457462b486043744901a6eeea711f913/google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", size = 270866 } +sdist = { url = "https://files.pythonhosted.org/packages/46/af/b25763b9d35dfc2c6f9c3ec34d8d3f1ba760af3a7b7e8d5c5f0579522c45/google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00", size = 268878 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/47/603554949a37bca5b7f894d51896a9c534b9eab808e2520a748e081669d0/google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a", size = 210770 }, + { url = "https://files.pythonhosted.org/packages/8d/8d/4d5d5f9f500499f7bd4c93903b43e8d6976f3fc6f064637ded1a85d09b07/google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0", size = 209829 }, ] [[package]] @@ -1406,7 +1250,7 @@ wheels = [ [[package]] name = "google-cloud-aiplatform" -version = "1.80.0" +version = "1.77.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1422,14 +1266,14 @@ dependencies = [ { name = "shapely", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/88/d36384280cc4653e190a4a30025e66b285fbaef06024f68a4264cc588a33/google_cloud_aiplatform-1.80.0.tar.gz", hash = "sha256:bcaa4570a6fb56d3d29cb6b8f92588d4d1a1931de5f90cf07761853dab4c76fd", size = 8459480 } +sdist = { url = "https://files.pythonhosted.org/packages/4d/45/7ffd099ff7554d9f4f3665611afb44d3ea59f8a3dd071e4284381d0ac3c1/google_cloud_aiplatform-1.77.0.tar.gz", hash = "sha256:1e5b77fe6c7f276d7aae65bcf08a273122a71f6c4af1f43cf45821f603a74080", size = 8287282 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/57/5e761e7a8b03efc8e7faa4c0b2775991177bbd4dae7a6656a60dfd092ca8/google_cloud_aiplatform-1.80.0-py2.py3-none-any.whl", hash = "sha256:45d2a170f22431dae977551eccb740400bdb899807d0c8d4c16c53b2c1dbc6a5", size = 7089949 }, + { url = "https://files.pythonhosted.org/packages/7b/b6/f7a3c8bdb08a3636d216c49768eff3369b5475edd71f6dbe590a942252b9/google_cloud_aiplatform-1.77.0-py2.py3-none-any.whl", hash = "sha256:e9dd1bcb1b9a85eddd452916cd6ad1d9ce2d487772a9e45b1814aa0ac5633689", size = 6939280 }, ] [[package]] name = "google-cloud-bigquery" -version = "3.29.0" +version = "3.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1440,27 +1284,27 @@ dependencies = [ { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/36/87875a9775985849f18d4b3e320e4acdeb5232db3d49cfa6269e7c7867b8/google_cloud_bigquery-3.29.0.tar.gz", hash = "sha256:fafc2b455ffce3bcc6ce0e884184ef50b6a11350a83b91e327fadda4d5566e72", size = 467180 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/05/633ce6686b1fed2cd364fa4698bfa6d586263cd4795d012584f8097061e1/google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c", size = 456964 } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/60/9e1430f0fe17f8e8e931eff468021516f74f2573f261221529767dd59591/google_cloud_bigquery-3.29.0-py2.py3-none-any.whl", hash = "sha256:5453a4eabe50118254eda9778f3d7dad413490de5f7046b5e66c98f5a1580308", size = 244605 }, + { url = "https://files.pythonhosted.org/packages/f5/40/4b11a4a8839de8ce802a3ccd60b34e70ce10d13d434a560534ba98f0ea3f/google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3", size = 240100 }, ] [[package]] name = "google-cloud-core" -version = "2.4.2" +version = "2.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/96/16cc0a34f75899ace6a42bb4ef242ac4aa263089b018d1c18c007d1fd8f2/google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35", size = 35854 } +sdist = { url = "https://files.pythonhosted.org/packages/b8/1f/9d1e0ba6919668608570418a9a51e47070ac15aeff64261fb092d8be94c0/google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073", size = 35587 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/0f/76e813cee7568ac467d929f4f0da7ab349596e7fc4ee837b990611e07d99/google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180", size = 29343 }, + { url = "https://files.pythonhosted.org/packages/5e/0f/2e2061e3fbcb9d535d5da3f58cc8de4947df1786fe6a1355960feb05a681/google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61", size = 29233 }, ] [[package]] name = "google-cloud-resource-manager" -version = "1.14.1" +version = "1.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1469,9 +1313,9 @@ dependencies = [ { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/9d/da2e07d064926fc0d84c5f179006148cfa6fcffe6fd7aabdbf86dd20c46c/google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87", size = 443094 } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/db14f34283b325b775b3287cd72ce8c43688bdea26801d02017a2ccded08/google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30", size = 430148 } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/be/ffdba56168f7e3778cd002a35fc0e94c608f088f6df24d2b980538389d71/google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518", size = 392325 }, + { url = "https://files.pythonhosted.org/packages/64/c4/2275ca35419f9a2ae66846f389490b356856bf55a9ad9f95a88399a89294/google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c", size = 384138 }, ] [[package]] @@ -1519,7 +1363,7 @@ wheels = [ [[package]] name = "google-generativeai" -version = "0.8.4" +version = "0.8.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-ai-generativelanguage", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1532,7 +1376,7 @@ dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/b0/6c6af327a8a6ef3be6fe79be1d6f1e2914d6c363aa6b081b93396f4460a7/google_generativeai-0.8.4-py3-none-any.whl", hash = "sha256:e987b33ea6decde1e69191ddcaec6ef974458864d243de7191db50c21a7c5b82", size = 175409 }, + { url = "https://files.pythonhosted.org/packages/e9/2f/b5c1d62e94409ed98d5425e83b8e6d3dd475b611be272f561b1a545d273a/google_generativeai-0.8.3-py3-none-any.whl", hash = "sha256:1108ff89d5b8e59f51e63d1a8bf84701cd84656e17ca28d73aeed745e736d9b7", size = 160822 }, ] [[package]] @@ -1549,14 +1393,14 @@ wheels = [ [[package]] name = "googleapis-common-protos" -version = "1.68.0" +version = "1.66.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/d2/c08f0d9f94b45faca68e355771329cba2411c777c8713924dd1baee0e09c/googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c", size = 57367 } +sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/8e9cccdb1c49870de6faea2a2764fa23f627dd290633103540209f03524c/googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c", size = 114376 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/85/c99a157ee99d67cc6c9ad123abb8b1bfb476fab32d2f3511c59314548e4f/googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac", size = 164985 }, + { url = "https://files.pythonhosted.org/packages/a0/0f/c0713fb2b3d28af4b2fded3291df1c4d4f79a00d15c2374a9e010870016c/googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed", size = 221682 }, ] [package.optional-dependencies] @@ -1709,15 +1553,15 @@ wheels = [ [[package]] name = "h2" -version = "4.2.0" +version = "4.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "hpack", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "hyperframe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 } +sdist = { url = "https://files.pythonhosted.org/packages/2a/32/fec683ddd10629ea4ea46d206752a95a2d8a48c22521edd70b142488efe1/h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb", size = 2145593 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 }, + { url = "https://files.pythonhosted.org/packages/2a/e5/db6d438da759efbb488c4f3fbdab7764492ff3c3f953132efa6b9f0e9e53/h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", size = 57488 }, ] [[package]] @@ -1796,11 +1640,11 @@ wheels = [ [[package]] name = "hpack" -version = "4.1.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9b/fda93fb4d957db19b0f6b370e79d586b3e8528b20252c729c476a2c02954/hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095", size = 49117 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, + { url = "https://files.pythonhosted.org/packages/d5/34/e8b383f35b77c402d28563d2b8f83159319b509bc5f760b15d60b0abf165/hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", size = 32611 }, ] [[package]] @@ -1866,17 +1710,18 @@ wheels = [ [[package]] name = "httpx" -version = "0.28.1" +version = "0.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "httpcore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, ] [package.optional-dependencies] @@ -1886,7 +1731,7 @@ http2 = [ [[package]] name = "huggingface-hub" -version = "0.29.1" +version = "0.27.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1897,9 +1742,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/37/797d6476f13e5ef6af5fc48a5d641d32b39c37e166ccf40c3714c5854a85/huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250", size = 389776 } +sdist = { url = "https://files.pythonhosted.org/packages/e1/d2/d6976de7542792fc077b498d64af64882b6d8bb40679284ec0bff77d5929/huggingface_hub-0.27.1.tar.gz", hash = "sha256:c004463ca870283909d715d20f066ebd6968c2207dae9393fdffb3c1d4d8f98b", size = 379407 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/05/75b90de9093de0aadafc868bb2fa7c57651fd8f45384adf39bd77f63980d/huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5", size = 468049 }, + { url = "https://files.pythonhosted.org/packages/6c/3f/50f6b25fafdcfb1c089187a328c95081abf882309afd86f4053951507cd1/huggingface_hub-0.27.1-py3-none-any.whl", hash = "sha256:1c5155ca7d60b60c2e2fc38cbb3ffb7f7c3adf48f824015b219af9061771daec", size = 450658 }, ] [[package]] @@ -1916,20 +1761,20 @@ wheels = [ [[package]] name = "hyperframe" -version = "6.1.0" +version = "6.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/2a/4747bff0a17f7281abe73e955d60d80aae537a5d203f417fa1c2e7578ebb/hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914", size = 25008 } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, + { url = "https://files.pythonhosted.org/packages/d7/de/85a784bcc4a3779d1753a7ec2dee5de90e18c7bcf402e71b51fcf150b129/hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", size = 12389 }, ] [[package]] name = "identify" -version = "2.6.8" +version = "2.6.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/fa/5eb460539e6f5252a7c5a931b53426e49258cde17e3d50685031c300a8fd/identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc", size = 99249 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/92/69934b9ef3c31ca2470980423fda3d00f0460ddefdf30a67adf7f17e2e00/identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc", size = 99213 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/8c/4bfcab2d8286473b8d83ea742716f4b79290172e75f91142bc1534b05b9a/identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255", size = 99109 }, + { url = "https://files.pythonhosted.org/packages/ec/fa/dce098f4cdf7621aa8f7b4f919ce545891f489482f0bfa5102f3eca8608b/identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566", size = 99078 }, ] [[package]] @@ -1941,15 +1786,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] -[[package]] -name = "ifaddr" -version = "0.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/ac/fb4c578f4a3256561548cd825646680edcadb9440f3f68add95ade1eb791/ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4", size = 10485 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/1f/19ebc343cc71a7ffa78f17018535adc5cbdd87afb31d7c34874680148b32/ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748", size = 12314 }, -] - [[package]] name = "importlib-metadata" version = "8.5.0" @@ -2006,7 +1842,7 @@ wheels = [ [[package]] name = "ipython" -version = "8.32.0" +version = "8.31.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -2021,9 +1857,9 @@ dependencies = [ { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.12' and sys_platform == 'darwin') or (python_full_version < '3.12' and sys_platform == 'linux') or (python_full_version < '3.12' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/80/4d2a072e0db7d250f134bc11676517299264ebe16d62a8619d49a78ced73/ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251", size = 5507441 } +sdist = { url = "https://files.pythonhosted.org/packages/01/35/6f90fdddff7a08b7b715fccbd2427b5212c9525cd043d26fdc45bee0708d/ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b", size = 5501011 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e1/f4474a7ecdb7745a820f6f6039dc43c66add40f1bcc66485607d93571af6/ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa", size = 825524 }, + { url = "https://files.pythonhosted.org/packages/04/60/d0feb6b6d9fe4ab89fe8fe5b47cbf6cd936bfd9f1e7ffa9d0015425aeed6/ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6", size = 821583 }, ] [[package]] @@ -2171,7 +2007,7 @@ wheels = [ [[package]] name = "jsonschema-path" -version = "0.3.4" +version = "0.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pathable", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2179,9 +2015,9 @@ dependencies = [ { name = "referencing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159 } +sdist = { url = "https://files.pythonhosted.org/packages/85/39/3a58b63a997b0cf824536d6f84fff82645a1ca8de222ee63586adab44dfa/jsonschema_path-0.3.3.tar.gz", hash = "sha256:f02e5481a4288ec062f8e68c808569e427d905bedfecb7f2e4c69ef77957c382", size = 11589 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810 }, + { url = "https://files.pythonhosted.org/packages/53/b0/69237e85976916b2e37586b7ddc48b9547fc38b440e25103d084b2b02ab3/jsonschema_path-0.3.3-py3-none-any.whl", hash = "sha256:203aff257f8038cd3c67be614fe6b2001043408cb1b4e36576bc4921e09d83c4", size = 14817 }, ] [[package]] @@ -2237,7 +2073,7 @@ wheels = [ [[package]] name = "kubernetes" -version = "32.0.1" +version = "31.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2252,9 +2088,9 @@ dependencies = [ { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "websocket-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/e8/0598f0e8b4af37cd9b10d8b87386cf3173cb8045d834ab5f6ec347a758b3/kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28", size = 946691 } +sdist = { url = "https://files.pythonhosted.org/packages/7e/bd/ffcd3104155b467347cd9b3a64eb24182e459579845196b3a200569c8912/kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0", size = 916096 } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/10/9f8af3e6f569685ce3af7faab51c8dd9d93b9c38eba339ca31c746119447/kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998", size = 1988070 }, + { url = "https://files.pythonhosted.org/packages/fb/a8/17f5e28cecdbd6d48127c22abdb794740803491f422a11905c4569d8e139/kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1", size = 1857013 }, ] [[package]] @@ -2404,7 +2240,7 @@ wheels = [ [[package]] name = "mistralai" -version = "1.5.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "eval-type-backport", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2414,21 +2250,21 @@ dependencies = [ { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-inspect", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/9d/aba193fdfe0fc7403efa380189143d965becfb1bc7df3230e5c7664f8c53/mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d", size = 131647 } +sdist = { url = "https://files.pythonhosted.org/packages/2f/50/59669ee8d21fd27a4f887148b1efb19d9be5ed22ec19c8e6eb842407ac0f/mistralai-1.3.1.tar.gz", hash = "sha256:1c30385656393f993625943045ad20de2aff4c6ab30fc6e8c727d735c22b1c08", size = 133338 } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/e7/7147c75c383a975c58c33f8e7ee7dbbb0e7390fbcb1ecd321f63e4c73efd/mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97", size = 271559 }, + { url = "https://files.pythonhosted.org/packages/1a/b4/a76b6942b78383d5499f776d880a166296542383f6f952feeef96d0ea692/mistralai-1.3.1-py3-none-any.whl", hash = "sha256:35e74feadf835b7d2145095114b9cf3ba86c4cf1044f28f49b02cd6ddd0a5733", size = 261271 }, ] [[package]] name = "mistune" -version = "3.1.2" +version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/f7/f6d06304c61c2a73213c0a4815280f70d985429cda26272f490e42119c1a/mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff", size = 94613 } +sdist = { url = "https://files.pythonhosted.org/packages/79/6e/96fc7cb3288666c5de2c396eb0e338dc95f7a8e4920e43e38783a22d0084/mistune-3.1.0.tar.gz", hash = "sha256:dbcac2f78292b9dc066cd03b7a3a26b62d85f8159f2ea5fd28e55df79908d667", size = 94401 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/92/30b4e54c4d7c48c06db61595cffbbf4f19588ea177896f9b78f0fbe021fd/mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319", size = 53696 }, + { url = "https://files.pythonhosted.org/packages/b4/b3/743ffc3f59da380da504d84ccd1faf9a857a1445991ff19bf2ec754163c2/mistune-3.1.0-py3-none-any.whl", hash = "sha256:b05198cf6d671b3deba6c87ec6cf0d4eb7b72c524636eddb6dbf13823b52cee1", size = 53694 }, ] [[package]] @@ -2456,74 +2292,74 @@ wheels = [ [[package]] name = "mmh3" -version = "5.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1fc6888c74cbd8abad1292dde2ddfcf8fc059e114c97dd6bf16d12f36293/mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c", size = 33728 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/01/9d06468928661765c0fc248a29580c760a4a53a9c6c52cf72528bae3582e/mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec", size = 56095 }, - { url = "https://files.pythonhosted.org/packages/e4/d7/7b39307fc9db867b2a9a20c58b0de33b778dd6c55e116af8ea031f1433ba/mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a", size = 40512 }, - { url = "https://files.pythonhosted.org/packages/4f/85/728ca68280d8ccc60c113ad119df70ff1748fbd44c89911fed0501faf0b8/mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d", size = 40110 }, - { url = "https://files.pythonhosted.org/packages/e4/96/beaf0e301472ffa00358bbbf771fe2d9c4d709a2fe30b1d929e569f8cbdf/mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4", size = 100151 }, - { url = "https://files.pythonhosted.org/packages/c3/ee/9381f825c4e09ffafeffa213c3865c4bf7d39771640de33ab16f6faeb854/mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf", size = 106312 }, - { url = "https://files.pythonhosted.org/packages/67/dc/350a54bea5cf397d357534198ab8119cfd0d8e8bad623b520f9c290af985/mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0", size = 104232 }, - { url = "https://files.pythonhosted.org/packages/b2/5d/2c6eb4a4ec2f7293b98a9c07cb8c64668330b46ff2b6511244339e69a7af/mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01", size = 91663 }, - { url = "https://files.pythonhosted.org/packages/f1/ac/17030d24196f73ecbab8b5033591e5e0e2beca103181a843a135c78f4fee/mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150", size = 99166 }, - { url = "https://files.pythonhosted.org/packages/b9/ed/54ddc56603561a10b33da9b12e95a48a271d126f4a4951841bbd13145ebf/mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096", size = 101555 }, - { url = "https://files.pythonhosted.org/packages/1c/c3/33fb3a940c9b70908a5cc9fcc26534aff8698180f9f63ab6b7cc74da8bcd/mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb", size = 94813 }, - { url = "https://files.pythonhosted.org/packages/61/88/c9ff76a23abe34db8eee1a6fa4e449462a16c7eb547546fc5594b0860a72/mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c", size = 109611 }, - { url = "https://files.pythonhosted.org/packages/0b/8e/27d04f40e95554ebe782cac7bddda2d158cf3862387298c9c7b254fa7beb/mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732", size = 100515 }, - { url = "https://files.pythonhosted.org/packages/7b/00/504ca8f462f01048f3c87cd93f2e1f60b93dac2f930cd4ed73532a9337f5/mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce", size = 100177 }, - { url = "https://files.pythonhosted.org/packages/6f/1d/2efc3525fe6fdf8865972fcbb884bd1f4b0f923c19b80891cecf7e239fa5/mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182", size = 40815 }, - { url = "https://files.pythonhosted.org/packages/38/b5/c8fbe707cb0fea77a6d2d58d497bc9b67aff80deb84d20feb34d8fdd8671/mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf", size = 41479 }, - { url = "https://files.pythonhosted.org/packages/a1/f1/663e16134f913fccfbcea5b300fb7dc1860d8f63dc71867b013eebc10aec/mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26", size = 38883 }, - { url = "https://files.pythonhosted.org/packages/56/09/fda7af7fe65928262098382e3bf55950cfbf67d30bf9e47731bf862161e9/mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d", size = 56098 }, - { url = "https://files.pythonhosted.org/packages/0c/ab/84c7bc3f366d6f3bd8b5d9325a10c367685bc17c26dac4c068e2001a4671/mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7", size = 40513 }, - { url = "https://files.pythonhosted.org/packages/4f/21/25ea58ca4a652bdc83d1528bec31745cce35802381fb4fe3c097905462d2/mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1", size = 40112 }, - { url = "https://files.pythonhosted.org/packages/bd/78/4f12f16ae074ddda6f06745254fdb50f8cf3c85b0bbf7eaca58bed84bf58/mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894", size = 102632 }, - { url = "https://files.pythonhosted.org/packages/48/11/8f09dc999cf2a09b6138d8d7fc734efb7b7bfdd9adb9383380941caadff0/mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a", size = 108884 }, - { url = "https://files.pythonhosted.org/packages/bd/91/e59a66538a3364176f6c3f7620eee0ab195bfe26f89a95cbcc7a1fb04b28/mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769", size = 106835 }, - { url = "https://files.pythonhosted.org/packages/25/14/b85836e21ab90e5cddb85fe79c494ebd8f81d96a87a664c488cc9277668b/mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2", size = 93688 }, - { url = "https://files.pythonhosted.org/packages/ac/aa/8bc964067df9262740c95e4cde2d19f149f2224f426654e14199a9e47df6/mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a", size = 101569 }, - { url = "https://files.pythonhosted.org/packages/70/b6/1fb163cbf919046a64717466c00edabebece3f95c013853fec76dbf2df92/mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3", size = 98483 }, - { url = "https://files.pythonhosted.org/packages/70/49/ba64c050dd646060f835f1db6b2cd60a6485f3b0ea04976e7a29ace7312e/mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33", size = 96496 }, - { url = "https://files.pythonhosted.org/packages/9e/07/f2751d6a0b535bb865e1066e9c6b80852571ef8d61bce7eb44c18720fbfc/mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7", size = 105109 }, - { url = "https://files.pythonhosted.org/packages/b7/02/30360a5a66f7abba44596d747cc1e6fb53136b168eaa335f63454ab7bb79/mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a", size = 98231 }, - { url = "https://files.pythonhosted.org/packages/8c/60/8526b0c750ff4d7ae1266e68b795f14b97758a1d9fcc19f6ecabf9c55656/mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258", size = 97548 }, - { url = "https://files.pythonhosted.org/packages/6d/4c/26e1222aca65769280d5427a1ce5875ef4213449718c8f03958d0bf91070/mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372", size = 40810 }, - { url = "https://files.pythonhosted.org/packages/98/d5/424ba95062d1212ea615dc8debc8d57983f2242d5e6b82e458b89a117a1e/mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759", size = 41476 }, - { url = "https://files.pythonhosted.org/packages/bd/08/0315ccaf087ba55bb19a6dd3b1e8acd491e74ce7f5f9c4aaa06a90d66441/mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1", size = 38880 }, - { url = "https://files.pythonhosted.org/packages/f4/47/e5f452bdf16028bfd2edb4e2e35d0441e4a4740f30e68ccd4cfd2fb2c57e/mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d", size = 56152 }, - { url = "https://files.pythonhosted.org/packages/60/38/2132d537dc7a7fdd8d2e98df90186c7fcdbd3f14f95502a24ba443c92245/mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae", size = 40564 }, - { url = "https://files.pythonhosted.org/packages/c0/2a/c52cf000581bfb8d94794f58865658e7accf2fa2e90789269d4ae9560b16/mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322", size = 40104 }, - { url = "https://files.pythonhosted.org/packages/83/33/30d163ce538c54fc98258db5621447e3ab208d133cece5d2577cf913e708/mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00", size = 102634 }, - { url = "https://files.pythonhosted.org/packages/94/5c/5a18acb6ecc6852be2d215c3d811aa61d7e425ab6596be940877355d7f3e/mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06", size = 108888 }, - { url = "https://files.pythonhosted.org/packages/1f/f6/11c556324c64a92aa12f28e221a727b6e082e426dc502e81f77056f6fc98/mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968", size = 106968 }, - { url = "https://files.pythonhosted.org/packages/5d/61/ca0c196a685aba7808a5c00246f17b988a9c4f55c594ee0a02c273e404f3/mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83", size = 93771 }, - { url = "https://files.pythonhosted.org/packages/b4/55/0927c33528710085ee77b808d85bbbafdb91a1db7c8eaa89cac16d6c513e/mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd", size = 101726 }, - { url = "https://files.pythonhosted.org/packages/49/39/a92c60329fa470f41c18614a93c6cd88821412a12ee78c71c3f77e1cfc2d/mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559", size = 98523 }, - { url = "https://files.pythonhosted.org/packages/81/90/26adb15345af8d9cf433ae1b6adcf12e0a4cad1e692de4fa9f8e8536c5ae/mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63", size = 96628 }, - { url = "https://files.pythonhosted.org/packages/8a/4d/340d1e340df972a13fd4ec84c787367f425371720a1044220869c82364e9/mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3", size = 105190 }, - { url = "https://files.pythonhosted.org/packages/d3/7c/65047d1cccd3782d809936db446430fc7758bda9def5b0979887e08302a2/mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b", size = 98439 }, - { url = "https://files.pythonhosted.org/packages/72/d2/3c259d43097c30f062050f7e861075099404e8886b5d4dd3cebf180d6e02/mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df", size = 97780 }, - { url = "https://files.pythonhosted.org/packages/29/29/831ea8d4abe96cdb3e28b79eab49cac7f04f9c6b6e36bfc686197ddba09d/mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76", size = 40835 }, - { url = "https://files.pythonhosted.org/packages/12/dd/7cbc30153b73f08eeac43804c1dbc770538a01979b4094edbe1a4b8eb551/mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776", size = 41509 }, - { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888 }, - { url = "https://files.pythonhosted.org/packages/05/06/a098a42870db16c0a54a82c56a5bdc873de3165218cd5b3ca59dbc0d31a7/mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c", size = 56165 }, - { url = "https://files.pythonhosted.org/packages/5a/65/eaada79a67fde1f43e1156d9630e2fb70655e1d3f4e8f33d7ffa31eeacfd/mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40", size = 40569 }, - { url = "https://files.pythonhosted.org/packages/36/7e/2b6c43ed48be583acd68e34d16f19209a9f210e4669421b0321e326d8554/mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997", size = 40104 }, - { url = "https://files.pythonhosted.org/packages/11/2b/1f9e962fdde8e41b0f43d22c8ba719588de8952f9376df7d73a434827590/mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd", size = 102497 }, - { url = "https://files.pythonhosted.org/packages/46/94/d6c5c3465387ba077cccdc028ab3eec0d86eed1eebe60dcf4d15294056be/mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a", size = 108834 }, - { url = "https://files.pythonhosted.org/packages/34/1e/92c212bb81796b69dddfd50a8a8f4b26ab0d38fdaf1d3e8628a67850543b/mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676", size = 106936 }, - { url = "https://files.pythonhosted.org/packages/f4/41/f2f494bbff3aad5ffd2085506255049de76cde51ddac84058e32768acc79/mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb", size = 93709 }, - { url = "https://files.pythonhosted.org/packages/9e/a9/a2cc4a756d73d9edf4fb85c76e16fd56b0300f8120fd760c76b28f457730/mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6", size = 101623 }, - { url = "https://files.pythonhosted.org/packages/5e/6f/b9d735533b6a56b2d56333ff89be6a55ac08ba7ff33465feb131992e33eb/mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4", size = 98521 }, - { url = "https://files.pythonhosted.org/packages/99/47/dff2b54fac0d421c1e6ecbd2d9c85b2d0e6f6ee0d10b115d9364116a511e/mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2", size = 96696 }, - { url = "https://files.pythonhosted.org/packages/be/43/9e205310f47c43ddf1575bb3a1769c36688f30f1ac105e0f0c878a29d2cd/mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b", size = 105234 }, - { url = "https://files.pythonhosted.org/packages/6b/44/90b11fd2b67dcb513f5bfe9b476eb6ca2d5a221c79b49884dc859100905e/mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107", size = 98449 }, - { url = "https://files.pythonhosted.org/packages/f0/d0/25c4b0c7b8e49836541059b28e034a4cccd0936202800d43a1cc48495ecb/mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59", size = 97796 }, - { url = "https://files.pythonhosted.org/packages/23/fa/cbbb7fcd0e287a715f1cd28a10de94c0535bd94164e38b852abc18da28c6/mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692", size = 40828 }, - { url = "https://files.pythonhosted.org/packages/09/33/9fb90ef822f7b734955a63851907cf72f8a3f9d8eb3c5706bfa6772a2a77/mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f", size = 41504 }, - { url = "https://files.pythonhosted.org/packages/16/71/4ad9a42f2772793a03cb698f0fc42499f04e6e8d2560ba2f7da0fb059a8e/mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7", size = 38890 }, +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/08/04ad6419f072ea3f51f9a0f429dd30f5f0a0b02ead7ca11a831117b6f9e8/mmh3-5.0.1.tar.gz", hash = "sha256:7dab080061aeb31a6069a181f27c473a1f67933854e36a3464931f2716508896", size = 32008 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b9/9a91b0a0e330557cdbf51fc43ca0ba306633f2ec6d2b15e871e288592a32/mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa", size = 52867 }, + { url = "https://files.pythonhosted.org/packages/da/28/6b37f0d6707872764e1af49f327b0940b6a3ad995d91b3839b90ba35f559/mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6", size = 38352 }, + { url = "https://files.pythonhosted.org/packages/76/84/a98f59a620b522f218876a0630b02fc345ecf078f6393595756ddb3aa0b5/mmh3-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a2583b5521ca49756d8d8bceba80627a9cc295f255dcab4e3df7ccc2f09679a", size = 38214 }, + { url = "https://files.pythonhosted.org/packages/35/cb/4980c7eb6cd31f49d1913a4066562bc9e0af28526750f1232be9688a9cd4/mmh3-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081a8423fe53c1ac94f87165f3e4c500125d343410c1a0c5f1703e898a3ef038", size = 93502 }, + { url = "https://files.pythonhosted.org/packages/65/f3/29726296fadeaf06134a6978f7c453dfa562cf2f0f1faf9ae28b9b8ef76e/mmh3-5.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b4d72713799755dc8954a7d36d5c20a6c8de7b233c82404d122c7c7c1707cc", size = 98394 }, + { url = "https://files.pythonhosted.org/packages/35/fd/e181f4f4b250f7b63ee27a7d65e5e290a3ea0e26cc633f4bfd906f04558b/mmh3-5.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389a6fd51efc76d3182d36ec306448559c1244f11227d2bb771bdd0e6cc91321", size = 98052 }, + { url = "https://files.pythonhosted.org/packages/61/5c/8a5d838da3eb3fb91035ef5eaaea469abab4e8e3fae55607c27a1a07d162/mmh3-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39f4128edaa074bff721b1d31a72508cba4d2887ee7867f22082e1fe9d4edea0", size = 86320 }, + { url = "https://files.pythonhosted.org/packages/10/80/3f33a8f4de12cea322607da1a84d001513affb741b3c3cc1277ecb85d34b/mmh3-5.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5d23a94d91aabba3386b3769048d5f4210fdfef80393fece2f34ba5a7b466c", size = 93232 }, + { url = "https://files.pythonhosted.org/packages/9e/1c/d0ce5f498493be4de2e7e7596e1cbf63315a4c0bb8bb94e3c37c4fad965d/mmh3-5.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16347d038361f8b8f24fd2b7ef378c9b68ddee9f7706e46269b6e0d322814713", size = 93590 }, + { url = "https://files.pythonhosted.org/packages/d9/66/770b5ad35b5a2eb7965f3fcaeaa76148e59543575d2e27b80690c1b0795c/mmh3-5.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e299408565af7d61f2d20a5ffdd77cf2ed902460fe4e6726839d59ba4b72316", size = 88433 }, + { url = "https://files.pythonhosted.org/packages/14/58/e0d258b18749d8640233976493716a40aa27352dcb1cea941836357dac24/mmh3-5.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42050af21ddfc5445ee5a66e73a8fc758c71790305e3ee9e4a85a8e69e810f94", size = 99339 }, + { url = "https://files.pythonhosted.org/packages/38/26/7267146122deb584cf377975b994d80c6d72c4c8d0e8eedff4d0cc5cd4c8/mmh3-5.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2ae9b1f5ef27ec54659920f0404b7ceb39966e28867c461bfe83a05e8d18ddb0", size = 93944 }, + { url = "https://files.pythonhosted.org/packages/8d/6b/df60b14a2dd383d8848f6f35496c86c7003be3ffb236789e98d002c542c6/mmh3-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50c2495a02045f3047d71d4ae9cdd7a15efc0bcbb7ff17a18346834a8e2d1d19", size = 92798 }, + { url = "https://files.pythonhosted.org/packages/0a/3f/d5fecf13915163a15b449e5cc89232a4df90e836ecad1c38121318119d27/mmh3-5.0.1-cp310-cp310-win32.whl", hash = "sha256:c028fa77cddf351ca13b4a56d43c1775652cde0764cadb39120b68f02a23ecf6", size = 39185 }, + { url = "https://files.pythonhosted.org/packages/74/8e/4bb5ade332a87de633cda21dae09d6002d69601f2b93e9f40302ab2d9acf/mmh3-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c5e741e421ec14400c4aae30890515c201f518403bdef29ae1e00d375bb4bbb5", size = 39766 }, + { url = "https://files.pythonhosted.org/packages/16/2b/cd5cfa4d7ad40a37655af491f9270909d63fc27bcf0558ec36000ee5347f/mmh3-5.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:b17156d56fabc73dbf41bca677ceb6faed435cc8544f6566d72ea77d8a17e9d0", size = 36540 }, + { url = "https://files.pythonhosted.org/packages/fb/8a/f3b9cf8b7110fef0f130158d7602af6f5b09f2cf568130814b7c92e2507b/mmh3-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a6d5a9b1b923f1643559ba1fc0bf7a5076c90cbb558878d3bf3641ce458f25d", size = 52867 }, + { url = "https://files.pythonhosted.org/packages/bf/06/f466e0da3c5bd6fbb1e047f70fd4e9e9563d0268aa56de511f363478dbf2/mmh3-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3349b968be555f7334bbcce839da98f50e1e80b1c615d8e2aa847ea4a964a012", size = 38349 }, + { url = "https://files.pythonhosted.org/packages/13/f0/2d3daca276a4673f82af859e4b0b18befd4e6e54f1017ba48ea9735b2f1b/mmh3-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bd3c94b110e55db02ab9b605029f48a2f7f677c6e58c09d44e42402d438b7e1", size = 38211 }, + { url = "https://files.pythonhosted.org/packages/e3/56/a2d203ca97702d4e045ac1a46a608393da1a1dddb24f81de664dae940518/mmh3-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ba84d48608f79adbb10bb09986b6dc33eeda5c2d1bd75d00820081b73bde9", size = 95104 }, + { url = "https://files.pythonhosted.org/packages/ec/45/c7c8ae64e3ae024776a0ce5377c16c6741a3359f3e9505fc35fc5012beb2/mmh3-5.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0217987a8b8525c8d9170f66d036dec4ab45cfbd53d47e8d76125791ceb155e", size = 100049 }, + { url = "https://files.pythonhosted.org/packages/d5/74/681113776fe406c09870ab2152ffbd214a15bbc8f1d1da9ad73ce594b878/mmh3-5.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2797063a34e78d1b61639a98b0edec1c856fa86ab80c7ec859f1796d10ba429", size = 99671 }, + { url = "https://files.pythonhosted.org/packages/bf/4f/dbb8be18ce9b6ff8df14bc14348c0404b3091fb51df9c673ebfcf5877db3/mmh3-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bba16340adcbd47853a2fbe5afdb397549e8f2e79324ff1dced69a3f8afe7c3", size = 87549 }, + { url = "https://files.pythonhosted.org/packages/5f/82/274d646f3f604c35b7e3d4eb7f3ff08b3bdc6a2c87d797709bb6f084a611/mmh3-5.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:282797957c9f60b51b9d768a602c25f579420cc9af46feb77d457a27823d270a", size = 94780 }, + { url = "https://files.pythonhosted.org/packages/c9/a1/f094ca8b8fb5e2ac53201070bda42b0fee80ceb92c153eb99a1453e3aed3/mmh3-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4fb670c29e63f954f9e7a2cdcd57b36a854c2538f579ef62681ccbaa1de2b69", size = 90430 }, + { url = "https://files.pythonhosted.org/packages/d9/23/4732ba68c6ef7242b69bb53b9e1bcb2ef065d68ed85fd26e829fb911ab5a/mmh3-5.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ee7d85438dc6aff328e19ab052086a3c29e8a9b632998a49e5c4b0034e9e8d6", size = 89451 }, + { url = "https://files.pythonhosted.org/packages/3c/c5/daea5d534fcf20b2399c2a7b1cd00a8d29d4d474247c15c2c94548a1a272/mmh3-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7fb5db231f3092444bc13901e6a8d299667126b00636ffbad4a7b45e1051e2f", size = 94703 }, + { url = "https://files.pythonhosted.org/packages/5e/4a/34d5691e7be7c63c34181387bc69bdcc0005ca93c8b562d68cb5775e0e78/mmh3-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c100dd441703da5ec136b1d9003ed4a041d8a1136234c9acd887499796df6ad8", size = 91054 }, + { url = "https://files.pythonhosted.org/packages/5c/3a/ab31bb5e9e1a19a4a997593cbe6ce56710308218ff36c7f76d40ff9c8d2e/mmh3-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71f3b765138260fd7a7a2dba0ea5727dabcd18c1f80323c9cfef97a7e86e01d0", size = 89571 }, + { url = "https://files.pythonhosted.org/packages/0b/79/b986bb067dbfcba6879afe6e723aad1bd53f223450532dd9a4606d0af389/mmh3-5.0.1-cp311-cp311-win32.whl", hash = "sha256:9a76518336247fd17689ce3ae5b16883fd86a490947d46a0193d47fb913e26e3", size = 39187 }, + { url = "https://files.pythonhosted.org/packages/48/69/97029eda3df0f84edde16a496a2e71bac508fc5d1f0a31e163da071e2670/mmh3-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:336bc4df2e44271f1c302d289cc3d78bd52d3eed8d306c7e4bff8361a12bf148", size = 39766 }, + { url = "https://files.pythonhosted.org/packages/c7/51/538f2b8412303281d8ce2a9a5c4ea84ff81f06de98af0b7c72059727a3bb/mmh3-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:af6522722fbbc5999aa66f7244d0986767a46f1fb05accc5200f75b72428a508", size = 36540 }, + { url = "https://files.pythonhosted.org/packages/75/c7/5b52d0882e7c0dccfaf8786a648e2b26c5307c594abe5cbe98c092607c97/mmh3-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f2730bb263ed9c388e8860438b057a53e3cc701134a6ea140f90443c4c11aa40", size = 52907 }, + { url = "https://files.pythonhosted.org/packages/01/b5/9609fa353c27188292748db033323c206f3fc6fbfa124bccf6a42af0da08/mmh3-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6246927bc293f6d56724536400b85fb85f5be26101fa77d5f97dd5e2a4c69bf2", size = 38389 }, + { url = "https://files.pythonhosted.org/packages/33/99/49bf3c86244857b3b250c2f54aff22a5a78ef12258af556fa39bb1e80699/mmh3-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbca322519a6e6e25b6abf43e940e1667cf8ea12510e07fb4919b48a0cd1c411", size = 38204 }, + { url = "https://files.pythonhosted.org/packages/f8/04/8860cab35b48aaefe40cf88344437e79ddc93cf7ff745dacd1cd56a2be1e/mmh3-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae8c19903ed8a1724ad9e67e86f15d198a7a1271a4f9be83d47e38f312ed672", size = 95091 }, + { url = "https://files.pythonhosted.org/packages/fa/e9/4ac56001a5bab6d26aa3dfabeddea6d7f037fd2972c76803259f51a5af75/mmh3-5.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09fd6cc72c07c0c07c3357714234b646d78052487c4a3bd5f7f6e08408cff60", size = 100055 }, + { url = "https://files.pythonhosted.org/packages/18/e8/7d5fd73f559c423ed5b72f940130c27803a406ee0ffc32ef5422f733df67/mmh3-5.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ff8551fee7ae3b11c5d986b6347ade0dccaadd4670ffdb2b944dee120ffcc84", size = 99764 }, + { url = "https://files.pythonhosted.org/packages/54/d8/c0d89da6c729feec997a9b3b68698894cef12359ade0da95eba9e03b1d5d/mmh3-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39694c73a5a20c8bf36dfd8676ed351e5234d55751ba4f7562d85449b21ef3f", size = 87650 }, + { url = "https://files.pythonhosted.org/packages/dd/41/ec0ee3fd5124c83cb767dcea8569bb326f8981cc88c991e3e4e948a31e24/mmh3-5.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba6001989a92f72a89c7cf382fda831678bd780707a66b4f8ca90239fdf2123", size = 94976 }, + { url = "https://files.pythonhosted.org/packages/8e/fa/e8059199fe6fbb2fd6494302904cb1209b2f8b6899d58059858a280e89a5/mmh3-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0771f90c9911811cc606a5c7b7b58f33501c9ee896ed68a6ac22c7d55878ecc0", size = 90485 }, + { url = "https://files.pythonhosted.org/packages/3a/a0/eb9da5f93dea3f44b8e970f013279d1543ab210ccf63bb030830968682aa/mmh3-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:09b31ed0c0c0920363e96641fac4efde65b1ab62b8df86293142f35a254e72b4", size = 89554 }, + { url = "https://files.pythonhosted.org/packages/e7/e8/5803181eac4e015b4caf307af22fea74292dca48e580d93afe402dcdc138/mmh3-5.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cf4a8deda0235312db12075331cb417c4ba163770edfe789bde71d08a24b692", size = 94872 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/4d55063f9dcaed41524f078a85989efdf1d335159af5e70af29942ebae67/mmh3-5.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41f7090a95185ef20ac018581a99337f0cbc84a2135171ee3290a9c0d9519585", size = 91326 }, + { url = "https://files.pythonhosted.org/packages/80/75/0a5acab5291480acd939db80e94448ac937fc7fbfddc0a67b3e721ebfc9c/mmh3-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b97b5b368fb7ff22194ec5854f5b12d8de9ab67a0f304728c7f16e5d12135b76", size = 89810 }, + { url = "https://files.pythonhosted.org/packages/9b/fd/eb1a3573cda74d4c2381d10ded62c128e869954ced1881c15e2bcd97a48f/mmh3-5.0.1-cp312-cp312-win32.whl", hash = "sha256:842516acf04da546f94fad52db125ee619ccbdcada179da51c326a22c4578cb9", size = 39206 }, + { url = "https://files.pythonhosted.org/packages/66/e8/542ed252924002b84c43a68a080cfd4facbea0d5df361e4f59637638d3c7/mmh3-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:d963be0dbfd9fca209c17172f6110787ebf78934af25e3694fe2ba40e55c1e2b", size = 39799 }, + { url = "https://files.pythonhosted.org/packages/bd/25/ff2cd36c82a23afa57a05cdb52ab467a911fb12c055c8a8238c0d426cbf0/mmh3-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:a5da292ceeed8ce8e32b68847261a462d30fd7b478c3f55daae841404f433c15", size = 36537 }, + { url = "https://files.pythonhosted.org/packages/09/e0/fb19c46265c18311b422ba5ce3e18046ad45c48cfb213fd6dbec23ae6b51/mmh3-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:673e3f1c8d4231d6fb0271484ee34cb7146a6499fc0df80788adb56fd76842da", size = 52909 }, + { url = "https://files.pythonhosted.org/packages/c3/94/54fc591e7a24c7ce2c531ecfc5715cff932f9d320c2936550cc33d67304d/mmh3-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f795a306bd16a52ad578b663462cc8e95500b3925d64118ae63453485d67282b", size = 38396 }, + { url = "https://files.pythonhosted.org/packages/1f/9a/142bcc9d0d28fc8ae45bbfb83926adc069f984cdf3495a71534cc22b8e27/mmh3-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5ed57a5e28e502a1d60436cc25c76c3a5ba57545f250f2969af231dc1221e0a5", size = 38207 }, + { url = "https://files.pythonhosted.org/packages/f8/5b/f1c9110aa70321bb1ee713f17851b9534586c63bc25e0110e4fc03ae2450/mmh3-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632c28e7612e909dbb6cbe2fe496201ada4695b7715584005689c5dc038e59ad", size = 94988 }, + { url = "https://files.pythonhosted.org/packages/87/e5/4dc67e7e0e716c641ab0a5875a659e37258417439590feff5c3bd3ff4538/mmh3-5.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53fd6bd525a5985e391c43384672d9d6b317fcb36726447347c7fc75bfed34ec", size = 99969 }, + { url = "https://files.pythonhosted.org/packages/ac/68/d148327337687c53f04ad9ceaedfa9ad155ee0111d0cb06220f044d66720/mmh3-5.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dceacf6b0b961a0e499836af3aa62d60633265607aef551b2a3e3c48cdaa5edd", size = 99662 }, + { url = "https://files.pythonhosted.org/packages/13/79/782adb6df6397947c1097b1e94b7f8d95629a4a73df05cf7207bd5148c1f/mmh3-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0738d478fdfb5d920f6aff5452c78f2c35b0eff72caa2a97dfe38e82f93da2", size = 87606 }, + { url = "https://files.pythonhosted.org/packages/f2/c2/0404383281df049d0e4ccf07fabd659fc1f3da834df6708d934116cbf45d/mmh3-5.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e70285e7391ab88b872e5bef632bad16b9d99a6d3ca0590656a4753d55988af", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/c8/33/fda67c5f28e4c2131891cf8cbc3513cfc55881e3cfe26e49328e38ffacb3/mmh3-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27e5fc6360aa6b828546a4318da1a7da6bf6e5474ccb053c3a6aa8ef19ff97bd", size = 90492 }, + { url = "https://files.pythonhosted.org/packages/64/2f/0ed38aefe2a87f30bb1b12e5b75dc69fcffdc16def40d1752d6fc7cbbf96/mmh3-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7989530c3c1e2c17bf5a0ec2bba09fd19819078ba90beedabb1c3885f5040b0d", size = 89594 }, + { url = "https://files.pythonhosted.org/packages/95/ab/6e7a5e765fc78e3dbd0a04a04cfdf72e91eb8e31976228e69d82c741a5b4/mmh3-5.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cdad7bee649950da7ecd3cbbbd12fb81f1161072ecbdb5acfa0018338c5cb9cf", size = 94929 }, + { url = "https://files.pythonhosted.org/packages/74/51/f748f00c072006f4a093d9b08853a0e2e3cd5aeaa91343d4e2d942851978/mmh3-5.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e143b8f184c1bb58cecd85ab4a4fd6dc65a2d71aee74157392c3fddac2a4a331", size = 91317 }, + { url = "https://files.pythonhosted.org/packages/df/a1/21ee8017a7feb0270c49f756ff56da9f99bd150dcfe3b3f6f0d4b243423d/mmh3-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5eb12e886f3646dd636f16b76eb23fc0c27e8ff3c1ae73d4391e50ef60b40f6", size = 89861 }, + { url = "https://files.pythonhosted.org/packages/c2/d2/46a6d070de4659bdf91cd6a62d659f8cc547dadee52b6d02bcbacb3262ed/mmh3-5.0.1-cp313-cp313-win32.whl", hash = "sha256:16e6dddfa98e1c2d021268e72c78951234186deb4df6630e984ac82df63d0a5d", size = 39201 }, + { url = "https://files.pythonhosted.org/packages/ed/07/316c062f09019b99b248a4183c5333f8eeebe638345484774908a8f2c9c0/mmh3-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d3ffb792d70b8c4a2382af3598dad6ae0c5bd9cee5b7ffcc99aa2f5fd2c1bf70", size = 39807 }, + { url = "https://files.pythonhosted.org/packages/9d/d3/f7e6d7d062b8d7072c3989a528d9d47486ee5d5ae75250f6e26b4976d098/mmh3-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:122fa9ec148383f9124292962bda745f192b47bfd470b2af5fe7bb3982b17896", size = 36539 }, ] [[package]] @@ -2546,14 +2382,14 @@ wheels = [ [[package]] name = "motor" -version = "3.7.0" +version = "3.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymongo", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2b/c0/b94558a88fb8406b092bb180c6fa5fb3068f8ec2c7e84dd2b0625f4f4f6e/motor-3.7.0.tar.gz", hash = "sha256:0dfa1f12c812bd90819c519b78bed626b5a9dbb29bba079ccff2bfa8627e0fec", size = 279745 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/5d/be1f10b4ecc259503bcc9c5642a61b39715796343c771f3f61b84f79ee21/motor-3.6.1.tar.gz", hash = "sha256:ee2b18386292f9ceb3cc8279a4cd34e4c641c5ac8de3500c30374081c76a9d03", size = 279031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/a6/e915e3225cc431c7ff07fd3e5ae138f6eb1c3ef4f8e8356cab1ea5dc1ed5/motor-3.7.0-py3-none-any.whl", hash = "sha256:61bdf1afded179f008d423f98066348157686f25a90776ea155db5f47f57d605", size = 74811 }, + { url = "https://files.pythonhosted.org/packages/ce/aa/a255c74c702477a8610fa24fb83af45ce1a2dd0bce7bca4e2230e2c9a23b/motor-3.6.1-py3-none-any.whl", hash = "sha256:7fe552353aded4fa9f05ae515a179df5b1d192b1da56726f422dbb2d8c3b5962", size = 74813 }, ] [[package]] @@ -2666,40 +2502,40 @@ wheels = [ [[package]] name = "mypy" -version = "1.15.0" +version = "1.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433 }, - { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472 }, - { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424 }, - { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450 }, - { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765 }, - { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701 }, - { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338 }, - { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540 }, - { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051 }, - { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751 }, - { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783 }, - { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618 }, - { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, - { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, - { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, - { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, - { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, - { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, - { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, - { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, - { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, - { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, - { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, - { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, - { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, +sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 }, + { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 }, + { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 }, + { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 }, + { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 }, + { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 }, + { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 }, + { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 }, + { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 }, + { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 }, + { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 }, + { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 }, + { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 }, + { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 }, + { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 }, + { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 }, + { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 }, + { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 }, + { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097 }, + { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728 }, + { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965 }, + { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660 }, + { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198 }, + { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276 }, + { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 }, ] [[package]] @@ -2728,7 +2564,7 @@ wheels = [ [[package]] name = "nbconvert" -version = "7.16.6" +version = "7.16.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2746,9 +2582,9 @@ dependencies = [ { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715 } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/d026c0367f2be2463d4c2f5b538e28add2bc67bc13730abb7f364ae4eb8b/nbconvert-7.16.5.tar.gz", hash = "sha256:c83467bb5777fdfaac5ebbb8e864f300b277f68692ecc04d6dab72f2d8442344", size = 856367 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525 }, + { url = "https://files.pythonhosted.org/packages/8f/9e/2dcc9fe00cf55d95a8deae69384e9cea61816126e345754f6c75494d32ec/nbconvert-7.16.5-py3-none-any.whl", hash = "sha256:e12eac052d6fd03040af4166c563d76e7aeead2e9aadf5356db552a1784bd547", size = 258061 }, ] [[package]] @@ -2795,34 +2631,64 @@ wheels = [ [[package]] name = "numpy" -version = "1.26.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, - { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, - { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, - { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, - { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, - { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, - { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, - { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, - { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, - { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, - { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, - { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, - { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, - { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, - { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, - { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/fdbf6a7871703df6160b5cf3dd774074b086d278172285c52c2758b76305/numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918", size = 20227662 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/c4/5588367dc9f91e1a813beb77de46ea8cab13f778e1b3a0e661ab031aba44/numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440", size = 21213214 }, + { url = "https://files.pythonhosted.org/packages/d8/8b/32dd9f08419023a4cf856c5ad0b4eba9b830da85eafdef841a104c4fc05a/numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab", size = 14352248 }, + { url = "https://files.pythonhosted.org/packages/84/2d/0e895d02940ba6e12389f0ab5cac5afcf8dc2dc0ade4e8cad33288a721bd/numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675", size = 5391007 }, + { url = "https://files.pythonhosted.org/packages/11/b9/7f1e64a0d46d9c2af6d17966f641fb12d5b8ea3003f31b2308f3e3b9a6aa/numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308", size = 6926174 }, + { url = "https://files.pythonhosted.org/packages/2e/8c/043fa4418bc9364e364ab7aba8ff6ef5f6b9171ade22de8fbcf0e2fa4165/numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957", size = 14330914 }, + { url = "https://files.pythonhosted.org/packages/f7/b6/d8110985501ca8912dfc1c3bbef99d66e62d487f72e46b2337494df77364/numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf", size = 16379607 }, + { url = "https://files.pythonhosted.org/packages/e2/57/bdca9fb8bdaa810c3a4ff2eb3231379b77f618a7c0d24be9f7070db50775/numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2", size = 15541760 }, + { url = "https://files.pythonhosted.org/packages/97/55/3b9147b3cbc3b6b1abc2a411dec5337a46c873deca0dd0bf5bef9d0579cc/numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528", size = 18168476 }, + { url = "https://files.pythonhosted.org/packages/00/e7/7c2cde16c9b87a8e14fdd262ca7849c4681cf48c8a774505f7e6f5e3b643/numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95", size = 6570985 }, + { url = "https://files.pythonhosted.org/packages/a1/a8/554b0e99fc4ac11ec481254781a10da180d0559c2ebf2c324232317349ee/numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf", size = 12913384 }, + { url = "https://files.pythonhosted.org/packages/59/14/645887347124e101d983e1daf95b48dc3e136bf8525cb4257bf9eab1b768/numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484", size = 21217379 }, + { url = "https://files.pythonhosted.org/packages/9f/fd/2279000cf29f58ccfd3778cbf4670dfe3f7ce772df5e198c5abe9e88b7d7/numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7", size = 14388520 }, + { url = "https://files.pythonhosted.org/packages/58/b0/034eb5d5ba12d66ab658ff3455a31f20add0b78df8203c6a7451bd1bee21/numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb", size = 5389286 }, + { url = "https://files.pythonhosted.org/packages/5d/69/6f3cccde92e82e7835fdb475c2bf439761cbf8a1daa7c07338e1e132dfec/numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5", size = 6930345 }, + { url = "https://files.pythonhosted.org/packages/d1/72/1cd38e91ab563e67f584293fcc6aca855c9ae46dba42e6b5ff4600022899/numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73", size = 14335748 }, + { url = "https://files.pythonhosted.org/packages/f2/d4/f999444e86986f3533e7151c272bd8186c55dda554284def18557e013a2a/numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591", size = 16391057 }, + { url = "https://files.pythonhosted.org/packages/99/7b/85cef6a3ae1b19542b7afd97d0b296526b6ef9e3c43ea0c4d9c4404fb2d0/numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8", size = 15556943 }, + { url = "https://files.pythonhosted.org/packages/69/7e/b83cc884c3508e91af78760f6b17ab46ad649831b1fa35acb3eb26d9e6d2/numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0", size = 18180785 }, + { url = "https://files.pythonhosted.org/packages/b2/9f/eb4a9a38867de059dcd4b6e18d47c3867fbd3795d4c9557bb49278f94087/numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd", size = 6568983 }, + { url = "https://files.pythonhosted.org/packages/6d/1e/be3b9f3073da2f8c7fa361fcdc231b548266b0781029fdbaf75eeab997fd/numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16", size = 12917260 }, + { url = "https://files.pythonhosted.org/packages/62/12/b928871c570d4a87ab13d2cc19f8817f17e340d5481621930e76b80ffb7d/numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab", size = 20909861 }, + { url = "https://files.pythonhosted.org/packages/3d/c3/59df91ae1d8ad7c5e03efd63fd785dec62d96b0fe56d1f9ab600b55009af/numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa", size = 14095776 }, + { url = "https://files.pythonhosted.org/packages/af/4e/8ed5868efc8e601fb69419644a280e9c482b75691466b73bfaab7d86922c/numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315", size = 5126239 }, + { url = "https://files.pythonhosted.org/packages/1a/74/dd0bbe650d7bc0014b051f092f2de65e34a8155aabb1287698919d124d7f/numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355", size = 6659296 }, + { url = "https://files.pythonhosted.org/packages/7f/11/4ebd7a3f4a655764dc98481f97bd0a662fb340d1001be6050606be13e162/numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7", size = 14047121 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/c1f1d978166eb6b98ad009503e4d93a8c1962d0eb14a885c352ee0276a54/numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d", size = 16096599 }, + { url = "https://files.pythonhosted.org/packages/3d/6d/0e22afd5fcbb4d8d0091f3f46bf4e8906399c458d4293da23292c0ba5022/numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51", size = 15243932 }, + { url = "https://files.pythonhosted.org/packages/03/39/e4e5832820131ba424092b9610d996b37e5557180f8e2d6aebb05c31ae54/numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046", size = 17861032 }, + { url = "https://files.pythonhosted.org/packages/5f/8a/3794313acbf5e70df2d5c7d2aba8718676f8d054a05abe59e48417fb2981/numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2", size = 6274018 }, + { url = "https://files.pythonhosted.org/packages/17/c1/c31d3637f2641e25c7a19adf2ae822fdaf4ddd198b05d79a92a9ce7cb63e/numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8", size = 12613843 }, + { url = "https://files.pythonhosted.org/packages/20/d6/91a26e671c396e0c10e327b763485ee295f5a5a7a48c553f18417e5a0ed5/numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780", size = 20896464 }, + { url = "https://files.pythonhosted.org/packages/8c/40/5792ccccd91d45e87d9e00033abc4f6ca8a828467b193f711139ff1f1cd9/numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821", size = 14111350 }, + { url = "https://files.pythonhosted.org/packages/c0/2a/fb0a27f846cb857cef0c4c92bef89f133a3a1abb4e16bba1c4dace2e9b49/numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e", size = 5111629 }, + { url = "https://files.pythonhosted.org/packages/eb/e5/8e81bb9d84db88b047baf4e8b681a3e48d6390bc4d4e4453eca428ecbb49/numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348", size = 6645865 }, + { url = "https://files.pythonhosted.org/packages/7a/1a/a90ceb191dd2f9e2897c69dde93ccc2d57dd21ce2acbd7b0333e8eea4e8d/numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59", size = 14043508 }, + { url = "https://files.pythonhosted.org/packages/f1/5a/e572284c86a59dec0871a49cd4e5351e20b9c751399d5f1d79628c0542cb/numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af", size = 16094100 }, + { url = "https://files.pythonhosted.org/packages/0c/2c/a79d24f364788386d85899dd280a94f30b0950be4b4a545f4fa4ed1d4ca7/numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51", size = 15239691 }, + { url = "https://files.pythonhosted.org/packages/cf/79/1e20fd1c9ce5a932111f964b544facc5bb9bde7865f5b42f00b4a6a9192b/numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716", size = 17856571 }, + { url = "https://files.pythonhosted.org/packages/be/5b/cc155e107f75d694f562bdc84a26cc930569f3dfdfbccb3420b626065777/numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e", size = 6270841 }, + { url = "https://files.pythonhosted.org/packages/44/be/0e5cd009d2162e4138d79a5afb3b5d2341f0fe4777ab6e675aa3d4a42e21/numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60", size = 12606618 }, + { url = "https://files.pythonhosted.org/packages/a8/87/04ddf02dd86fb17c7485a5f87b605c4437966d53de1e3745d450343a6f56/numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e", size = 20921004 }, + { url = "https://files.pythonhosted.org/packages/6e/3e/d0e9e32ab14005425d180ef950badf31b862f3839c5b927796648b11f88a/numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712", size = 14119910 }, + { url = "https://files.pythonhosted.org/packages/b5/5b/aa2d1905b04a8fb681e08742bb79a7bddfc160c7ce8e1ff6d5c821be0236/numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008", size = 5153612 }, + { url = "https://files.pythonhosted.org/packages/ce/35/6831808028df0648d9b43c5df7e1051129aa0d562525bacb70019c5f5030/numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84", size = 6668401 }, + { url = "https://files.pythonhosted.org/packages/b1/38/10ef509ad63a5946cc042f98d838daebfe7eaf45b9daaf13df2086b15ff9/numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631", size = 14014198 }, + { url = "https://files.pythonhosted.org/packages/df/f8/c80968ae01df23e249ee0a4487fae55a4c0fe2f838dfe9cc907aa8aea0fa/numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d", size = 16076211 }, + { url = "https://files.pythonhosted.org/packages/09/69/05c169376016a0b614b432967ac46ff14269eaffab80040ec03ae1ae8e2c/numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5", size = 15220266 }, + { url = "https://files.pythonhosted.org/packages/f1/ff/94a4ce67ea909f41cf7ea712aebbe832dc67decad22944a1020bb398a5ee/numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71", size = 17852844 }, + { url = "https://files.pythonhosted.org/packages/46/72/8a5dbce4020dfc595592333ef2fbb0a187d084ca243b67766d29d03e0096/numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2", size = 6326007 }, + { url = "https://files.pythonhosted.org/packages/7b/9c/4fce9cf39dde2562584e4cfd351a0140240f82c0e3569ce25a250f47037d/numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268", size = 12693107 }, + { url = "https://files.pythonhosted.org/packages/f1/65/d36a76b811ffe0a4515e290cb05cb0e22171b1b0f0db6bee9141cf023545/numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3", size = 21044672 }, + { url = "https://files.pythonhosted.org/packages/aa/3f/b644199f165063154df486d95198d814578f13dd4d8c1651e075bf1cb8af/numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964", size = 6789873 }, + { url = "https://files.pythonhosted.org/packages/d7/df/2adb0bb98a3cbe8a6c3c6d1019aede1f1d8b83927ced228a46cc56c7a206/numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800", size = 16194933 }, + { url = "https://files.pythonhosted.org/packages/13/3e/1959d5219a9e6d200638d924cedda6a606392f7186a4ed56478252e70d55/numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e", size = 12820057 }, ] [[package]] @@ -2911,14 +2777,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/f7/97a9ea26ed4bbbfc2d470994b8b4f338ef663be97b8f677519ac195e113d/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1", size = 207454763 }, ] -[[package]] -name = "nvidia-cusparselt-cu12" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/a8/bcbb63b53a4b1234feeafb65544ee55495e1bb37ec31b999b963cbccfd1d/nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9", size = 150057751 }, -] - [[package]] name = "nvidia-nccl-cu12" version = "2.21.5" @@ -2954,15 +2812,15 @@ wheels = [ [[package]] name = "ollama" -version = "0.4.7" +version = "0.4.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b0/6d/dc77539c735bbed5d0c873fb029fb86aa9f0163df169b34152914331c369/ollama-0.4.7.tar.gz", hash = "sha256:891dcbe54f55397d82d289c459de0ea897e103b86a3f1fad0fdb1895922a75ff", size = 12843 } +sdist = { url = "https://files.pythonhosted.org/packages/75/d6/2bd7cffbabc81282576051ebf66ebfaa97e6b541975cd4e886bfd6c0f83d/ollama-0.4.6.tar.gz", hash = "sha256:b00717651c829f96094ed4231b9f0d87e33cc92dc235aca50aeb5a2a4e6e95b7", size = 12710 } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/83/c3ffac86906c10184c88c2e916460806b072a2cfe34cdcaf3a0c0e836d39/ollama-0.4.7-py3-none-any.whl", hash = "sha256:85505663cca67a83707be5fb3aeff0ea72e67846cea5985529d8eca4366564a1", size = 13210 }, + { url = "https://files.pythonhosted.org/packages/4a/60/ac0e47c4c400fbd1a72a3c6e4a76cf5ef859d60677e7c4b9f0203c5657d3/ollama-0.4.6-py3-none-any.whl", hash = "sha256:cbb4ebe009e10dd12bdd82508ab415fd131945e185753d728a7747c9ebe762e9", size = 13086 }, ] [[package]] @@ -3003,36 +2861,32 @@ wheels = [ [[package]] name = "onnxruntime-genai" -version = "0.6.0" +version = "0.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "onnxruntime", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "onnxruntime", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/7f/3e1edde3318458aabdd6070c44bedc2caa913949530d90ec89c32c76a036/onnxruntime_genai-0.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b820e20e438fc2679db24e432c5652e20a972709e4002210a46b4f6282fd57d4", size = 871347 }, - { url = "https://files.pythonhosted.org/packages/62/9e/695c96d4023c1a826f64a61fd3b7e11c6d1059df04baeea99cd9695afb0c/onnxruntime_genai-0.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:02cd58bd85f2cc3d9a017af095dee2b3d7cd2be3b5c1701ebc6fc5204e6dffef", size = 986678 }, - { url = "https://files.pythonhosted.org/packages/b8/15/a62e1096413d17c24fac161f15002b94406be4b6b17663c576742eb15b44/onnxruntime_genai-0.6.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22342e7262bcdc2337e0a3c1125ec7d42e8cf18b2eca5e5debd17d17f0e6154d", size = 1523539 }, - { url = "https://files.pythonhosted.org/packages/da/bd/f638c512712f776ec536878905dd4316ae94bd1b9f29b3511d9cc00ebf9b/onnxruntime_genai-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a7ceef0afdd605d2816cea2b54bab629cfd5f8005800c09db4d213f2c32be43", size = 866056 }, - { url = "https://files.pythonhosted.org/packages/28/b5/60c3128cba49884f64fa8f77bc75560e14b0092af15915a4fec5983d22e2/onnxruntime_genai-0.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:55e74c544573e78cd86b9bc440a12cb0e1442978a0446dfc438b3e4d6c40f498", size = 871291 }, - { url = "https://files.pythonhosted.org/packages/1d/7d/a26c1a6517b6d58131d8068b557702d1227699e0ddc77c5f8d3ce3e58afb/onnxruntime_genai-0.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:4a7afcc99657bcbf66ee8a33e9cf7b4540edef01480c7d7267b4c8f086bcf1ee", size = 986693 }, - { url = "https://files.pythonhosted.org/packages/9d/d3/f43608d2b64cc0122f5eea4e3c3efb1a4d82ae69cca6a8e17a7b10e77752/onnxruntime_genai-0.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f250e986af81d014305037a485c57388dbab680bb08624ddb938b5df1fbb011d", size = 1523215 }, - { url = "https://files.pythonhosted.org/packages/96/ca/941cf8ff6549097e0e0395e933160fdcb13445f0040122295951b70aa60c/onnxruntime_genai-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9d32192ac9012864629066d0ce8a4ff6326557201ca5f4d3238bffa06580f313", size = 866007 }, - { url = "https://files.pythonhosted.org/packages/55/13/15cfbd1c5d1163f25894b5f959f698f72d54be5da31efcee0210973069f0/onnxruntime_genai-0.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:04a10f74a912809de859104f27555c1bfd88936314adf1dae1c107f7fdd4e557", size = 839485 }, - { url = "https://files.pythonhosted.org/packages/bf/20/c88f2ea675a37854fef5986201599c2d910195aff93644678e12fecc1991/onnxruntime_genai-0.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ac2caaafe79e664fcd105a61051d46755aa9242a08485c065772bcb8a7436819", size = 873021 }, - { url = "https://files.pythonhosted.org/packages/ac/ac/ee559e8250dc039a5b63ad294894f110d9db53646a3d8a5bf9e4733bd130/onnxruntime_genai-0.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:01ce0b0fc068d5b33166c45a7b554f4ffd3774357ace53e1fabf6476468eb45a", size = 989275 }, - { url = "https://files.pythonhosted.org/packages/89/4f/a98cb43eb90dbdb31abc9b97ec0f903f70b0164c696ed013bb867ab7a0f4/onnxruntime_genai-0.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecdc72e2f9ec6386c08390a55eb9d9d0fe4b0b5d5d042cbeac34c666e28d0a3f", size = 1524001 }, - { url = "https://files.pythonhosted.org/packages/fe/24/357a31a821b706e340a8ac1bac8c3d9a27fdc53ae6c41e1cdbfb16055aa7/onnxruntime_genai-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:5039138c7510ca84acb879f1da943709dc37ed2364b5cf280fec52eb94d8e137", size = 866519 }, - { url = "https://files.pythonhosted.org/packages/b7/77/bcb9f3c7e95945fc457238548c0b8fe656c54ead0153d67c0610b7d9fb57/onnxruntime_genai-0.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:12fc1c412e57011904f4942d176ae50dc8d52445dc83bc0bd25c097debfa1712", size = 839461 }, - { url = "https://files.pythonhosted.org/packages/08/77/d3722046f4dca6046f89e61c4ec6ddba08ee28371d7f923c9b061df6f419/onnxruntime_genai-0.6.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:f69785e5f8b1fcd678616dea51dccd27c9fa45c840f8681f08ab97fbc9502636", size = 873022 }, - { url = "https://files.pythonhosted.org/packages/ef/2e/84c15a067b2e82d11cb763b287f62f499130323ddb9e97b3ce627c0521e2/onnxruntime_genai-0.6.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:40131e948ccd3d282c33a86418308288f66f465ef679632a6483230b38da2566", size = 989229 }, - { url = "https://files.pythonhosted.org/packages/36/60/360866f47d523ee688dedbda98b4a09bf7e2008cc31148c2037e2456b643/onnxruntime_genai-0.6.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7f314dec25125cb11a32683a120a63f570d2209b20cab4a28a8e93867f576a9", size = 1523999 }, - { url = "https://files.pythonhosted.org/packages/b0/d7/2a802365c6c11cbd01dfa96b9d58f315d76c2c1ecdd4397cd082104fc765/onnxruntime_genai-0.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:4870cca3701603efd5842398e574146000e92325a99f0f28ddbb4c904b513071", size = 866464 }, + { url = "https://files.pythonhosted.org/packages/52/35/22a421f852eb14f47c33a4dd4c3ef58a2f3d5a96be8bb6d6cc271b2a0e83/onnxruntime_genai-0.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cd322ead0027fbfa309e7be76c4512157ad369dc189ab3334a58a199b4f58a02", size = 769921 }, + { url = "https://files.pythonhosted.org/packages/7f/1b/5166ed4a73c5e9f92e6db4d7838923ffd595cea164661fae20d82e3a6966/onnxruntime_genai-0.5.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:79d721a33e80a9664aeeb87c0ceec75801fc81e48e8ff7940e3658d0b28f25cc", size = 869111 }, + { url = "https://files.pythonhosted.org/packages/12/5b/6f08f9435f0c3977046cb4292ab1e836c22cd7d56fc87ace4d2a90dfb828/onnxruntime_genai-0.5.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd7954f9dc829e69dabd7f676443529ac18171ec8077438c16364d381733070e", size = 1380370 }, + { url = "https://files.pythonhosted.org/packages/57/d6/91e486424f924c2a99e8f1bd201180979101ecc09bee1ca7f53dae1c8a38/onnxruntime_genai-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:4d2968df6d8064664a5f095006c70520f4ca689204b695e88951f088477bc1e0", size = 776263 }, + { url = "https://files.pythonhosted.org/packages/3e/3d/e2d8f89c05c6cf35e2ade2b335b1b97725327591b8fb141d266ab98615f9/onnxruntime_genai-0.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:217c748f8ebd1a5082e1ad8ee8fc90fc1a4e9ce7839189f4c2c2545d1390af15", size = 769888 }, + { url = "https://files.pythonhosted.org/packages/33/13/66ffa143cc82f8352ec87ba0501bc21e05dd9e84fbbad530e74a705ac911/onnxruntime_genai-0.5.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6194aabd589b3ffb571b325f504266ac47c33c434abfd87575c30d7a3e1179c9", size = 869092 }, + { url = "https://files.pythonhosted.org/packages/6a/17/a29c0cf89d90374234b8e510fcb970f2e043b42689b5ea23cbdab5a414b6/onnxruntime_genai-0.5.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:88edb36c9e2d670316f1e6e4ce27a86f212648a92053a94a31f88b1f4d6c0935", size = 1380461 }, + { url = "https://files.pythonhosted.org/packages/59/b1/acb1daf1a08c8098c828e7ea9e187b9728a8fc151a4df4911f988c08a874/onnxruntime_genai-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:09b697f955616156948f21366d13d02884a15521926f68a259722d9fa4437db4", size = 776308 }, + { url = "https://files.pythonhosted.org/packages/22/57/d249827c3e37abe528674bfa97de4c61b18afb452d2afced690a745e0866/onnxruntime_genai-0.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:893be15d2113438e60b8a1c0095892e0fd4f2b01dd470d6197337db2a5778c88", size = 751552 }, + { url = "https://files.pythonhosted.org/packages/cf/72/259de19e93e72b14d0a3910f1025f71da006a8dfc76c97792646b335a8a3/onnxruntime_genai-0.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:6b438d7f4901081b8f3ff99db6c6ea15a3fcc107abce79859ff635e1278e26b0", size = 771097 }, + { url = "https://files.pythonhosted.org/packages/8c/72/73c95e357ada258025236437fb2b4d56fb7e8594db6361f4560ea97ca06c/onnxruntime_genai-0.5.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:d7bffb799d44656b2615fc43130a1a287d57e8893b80523e560924cf05770f1d", size = 871450 }, + { url = "https://files.pythonhosted.org/packages/79/3d/43211c8a66d7ce54dea137ad7bec30767e3f2dc5e1e22befdcca290ebbe0/onnxruntime_genai-0.5.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb5b9650512e21a71d965e582d616b33df07978b0c3ecbd5bef0912a7b5f7832", size = 1380898 }, + { url = "https://files.pythonhosted.org/packages/9f/7b/53b217ed0db401877fafa2f63d2ce7de754899f2bdf4cb415931e2019f18/onnxruntime_genai-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:422e9af27f182247378e9423f5745becfaffcdf7a4f452da17fd5d9390770ca7", size = 776974 }, + { url = "https://files.pythonhosted.org/packages/08/c1/a69aeba29f40febd8d70d45044d4eb97905beb37fc8491b1628c8714ecc1/onnxruntime_genai-0.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:315b23cb04749202c9cc3eb34f281bb4943de477a5aa46c99b940603b6a5d272", size = 751246 }, ] [[package]] name = "openai" -version = "1.64.0" +version = "1.59.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3044,9 +2898,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7b/1d/aae78d8ecc571d672c4a27794a8f248bc46437a22ddcb9c4eb6fd6616c03/openai-1.64.0.tar.gz", hash = "sha256:2861053538704d61340da56e2f176853d19f1dc5704bc306b7597155f850d57a", size = 357058 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/d5/25cf04789c7929b476c4d9ef711f8979091db63d30bfc093828fe4bf5c72/openai-1.59.7.tar.gz", hash = "sha256:043603def78c00befb857df9f0a16ee76a3af5984ba40cb7ee5e2f40db4646bf", size = 345007 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/1a/e62718f311daa26d208800976d7944e5ee6d503e1ea474522b2a15a904bb/openai-1.64.0-py3-none-any.whl", hash = "sha256:20f85cde9e95e9fbb416e3cb5a6d3119c0b28308afd6e3cc47bf100623dac623", size = 472289 }, + { url = "https://files.pythonhosted.org/packages/6d/47/7b92f1731c227f4139ef0025b5996062e44f9a749c54315c8bdb34bad5ec/openai-1.59.7-py3-none-any.whl", hash = "sha256:cfa806556226fa96df7380ab2e29814181d56fea44738c2b0e581b462c268692", size = 454844 }, ] [[package]] @@ -3099,32 +2953,32 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.30.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "importlib-metadata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2b/6d/bbbf879826b7f3c89a45252010b5796fb1f1a0d45d9dc4709db0ef9a06c8/opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240", size = 63703 } +sdist = { url = "https://files.pythonhosted.org/packages/bc/8e/b886a5e9861afa188d1fe671fb96ff9a1d90a23d57799331e137cc95d573/opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf", size = 62900 } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/0a/eea862fae6413d8181b23acf8e13489c90a45f17986ee9cf4eab8a0b9ad9/opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09", size = 64955 }, + { url = "https://files.pythonhosted.org/packages/43/53/5249ea860d417a26a3a6f1bdedfc0748c4f081a3adaec3d398bc0f7c6a71/opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8", size = 64304 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.30.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/d7/44098bf1ef89fc5810cdbda05faa2ae9322a0dbda4921cdc965dc68a9856/opentelemetry_exporter_otlp_proto_common-1.30.0.tar.gz", hash = "sha256:ddbfbf797e518411857d0ca062c957080279320d6235a279f7b64ced73c13897", size = 19640 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/58/f7fd7eaf592b2521999a4271ab3ce1c82fe37fe9b0dc25c348398d95d66a/opentelemetry_exporter_otlp_proto_common-1.29.0.tar.gz", hash = "sha256:e7c39b5dbd1b78fe199e40ddfe477e6983cb61aa74ba836df09c3869a3e3e163", size = 19133 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/54/f4b3de49f8d7d3a78fd6e6e1a6fd27dd342eb4d82c088b9078c6a32c3808/opentelemetry_exporter_otlp_proto_common-1.30.0-py3-none-any.whl", hash = "sha256:5468007c81aa9c44dc961ab2cf368a29d3475977df83b4e30aeed42aa7bc3b38", size = 18747 }, + { url = "https://files.pythonhosted.org/packages/9e/75/7609bda3d72bf307839570b226180513e854c01443ebe265ed732a4980fc/opentelemetry_exporter_otlp_proto_common-1.29.0-py3-none-any.whl", hash = "sha256:a9d7376c06b4da9cf350677bcddb9618ed4b8255c3f6476975f5e38274ecd3aa", size = 18459 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.30.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3135,14 +2989,14 @@ dependencies = [ { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-sdk", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/3e/c7246df92c25e6ce95c349ad21597b4471b01ec9471e95d5261f1629fe92/opentelemetry_exporter_otlp_proto_grpc-1.30.0.tar.gz", hash = "sha256:d0f10f0b9b9a383b7d04a144d01cb280e70362cccc613987e234183fd1f01177", size = 26256 } +sdist = { url = "https://files.pythonhosted.org/packages/41/aa/b3f2190613141f35fe15145bf438334fdd1eac8aeeee4f7ecbc887999443/opentelemetry_exporter_otlp_proto_grpc-1.29.0.tar.gz", hash = "sha256:3d324d07d64574d72ed178698de3d717f62a059a93b6b7685ee3e303384e73ea", size = 26224 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/35/d9f63fd84c2ed8dbd407bcbb933db4ed6e1b08e7fbdaca080b9ac309b927/opentelemetry_exporter_otlp_proto_grpc-1.30.0-py3-none-any.whl", hash = "sha256:2906bcae3d80acc54fd1ffcb9e44d324e8631058b502ebe4643ca71d1ff30830", size = 18550 }, + { url = "https://files.pythonhosted.org/packages/f2/de/4b4127a25d1594851d99032f3a9acb09cb512d11edec713410fb906607f4/opentelemetry_exporter_otlp_proto_grpc-1.29.0-py3-none-any.whl", hash = "sha256:5a2a3a741a2543ed162676cf3eefc2b4150e6f4f0a193187afb0d0e65039c69c", size = 18520 }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.51b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3150,14 +3004,14 @@ dependencies = [ { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ec/5a/4c7f02235ac1269b48f3855f6be1afc641f31d4888d28b90b732fbce7141/opentelemetry_instrumentation-0.51b0.tar.gz", hash = "sha256:4ca266875e02f3988536982467f7ef8c32a38b8895490ddce9ad9604649424fa", size = 27760 } +sdist = { url = "https://files.pythonhosted.org/packages/79/2e/2e59a7cb636dc394bd7cf1758ada5e8ed87590458ca6bb2f9c26e0243847/opentelemetry_instrumentation-0.50b0.tar.gz", hash = "sha256:7d98af72de8dec5323e5202e46122e5f908592b22c6d24733aad619f07d82979", size = 26539 } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/2c/48fa93f1acca9f79a06da0df7bfe916632ecc7fce1971067b3e46bcae55b/opentelemetry_instrumentation-0.51b0-py3-none-any.whl", hash = "sha256:c6de8bd26b75ec8b0e54dff59e198946e29de6a10ec65488c357d4b34aa5bdcf", size = 30923 }, + { url = "https://files.pythonhosted.org/packages/ff/b1/55a77152a83ec8998e520a3a575f44af1020cfe4bdc000b7538583293b85/opentelemetry_instrumentation-0.50b0-py3-none-any.whl", hash = "sha256:b8f9fc8812de36e1c6dffa5bfc6224df258841fb387b6dfe5df15099daa10630", size = 30728 }, ] [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.51b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3166,14 +3020,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/67/8aa6e1129f641f0f3f8786e6c5d18c1f2bbe490bd4b0e91a6879e85154d2/opentelemetry_instrumentation_asgi-0.51b0.tar.gz", hash = "sha256:b3fe97c00f0bfa934371a69674981d76591c68d937b6422a5716ca21081b4148", size = 24201 } +sdist = { url = "https://files.pythonhosted.org/packages/49/cc/a7b2fd243c6d2621803092eba62e450071b6752dfe4f64f530bbfd91a328/opentelemetry_instrumentation_asgi-0.50b0.tar.gz", hash = "sha256:3ca4cb5616ae6a3e8ce86e7d5c360a8d8cc8ed722cf3dc8a5e44300774e87d49", size = 24105 } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/7e/0a95ab37302729543631a789ba8e71dea75c520495739dbbbdfdc580b401/opentelemetry_instrumentation_asgi-0.51b0-py3-none-any.whl", hash = "sha256:e8072993db47303b633c6ec1bc74726ba4d32bd0c46c28dfadf99f79521a324c", size = 16340 }, + { url = "https://files.pythonhosted.org/packages/d2/81/0899c6b56b1023835f266d909250d439174afa0c34ed5944c5021d3da263/opentelemetry_instrumentation_asgi-0.50b0-py3-none-any.whl", hash = "sha256:2ba1297f746e55dec5a17fe825689da0613662fb25c004c3965a6c54b1d5be22", size = 16304 }, ] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.51b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3182,117 +3036,113 @@ dependencies = [ { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2d/dc/8db4422b5084177d1ef6c7855c69bf2e9e689f595a4a9b59e60588e0d427/opentelemetry_instrumentation_fastapi-0.51b0.tar.gz", hash = "sha256:1624e70f2f4d12ceb792d8a0c331244cd6723190ccee01336273b4559bc13abc", size = 19249 } +sdist = { url = "https://files.pythonhosted.org/packages/8d/f8/1917b0b3e414e23c7d71c9a33f0ce020f94bc47d22a30f54ace704e07588/opentelemetry_instrumentation_fastapi-0.50b0.tar.gz", hash = "sha256:16b9181682136da210295def2bb304a32fb9bdee9a935cdc9da43567f7c1149e", size = 19214 } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/1c/ec2d816b78edf2404d7b3df6d09eefb690b70bfd191b7da06f76634f1bdc/opentelemetry_instrumentation_fastapi-0.51b0-py3-none-any.whl", hash = "sha256:10513bbc11a1188adb9c1d2c520695f7a8f2b5f4de14e8162098035901cd6493", size = 12117 }, + { url = "https://files.pythonhosted.org/packages/cb/d6/37784bb30b213e2dd6838b9f96c2940907022c1b75ef1ff18a99afe42433/opentelemetry_instrumentation_fastapi-0.50b0-py3-none-any.whl", hash = "sha256:8f03b738495e4705fbae51a2826389c7369629dace89d0f291c06ffefdff5e52", size = 12079 }, ] [[package]] name = "opentelemetry-proto" -version = "1.30.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/6e/c1ff2e3b0cd3a189a6be03fd4d63441d73d7addd9117ab5454e667b9b6c7/opentelemetry_proto-1.30.0.tar.gz", hash = "sha256:afe5c9c15e8b68d7c469596e5b32e8fc085eb9febdd6fb4e20924a93a0389179", size = 34362 } +sdist = { url = "https://files.pythonhosted.org/packages/80/52/fd3b3d79e1b00ad2dcac92db6885e49bedbf7a6828647954e4952d653132/opentelemetry_proto-1.29.0.tar.gz", hash = "sha256:3c136aa293782e9b44978c738fff72877a4b78b5d21a64e879898db7b2d93e5d", size = 34320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/d7/85de6501f7216995295f7ec11e470142e6a6e080baacec1753bbf272e007/opentelemetry_proto-1.30.0-py3-none-any.whl", hash = "sha256:c6290958ff3ddacc826ca5abbeb377a31c2334387352a259ba0df37c243adc11", size = 55854 }, + { url = "https://files.pythonhosted.org/packages/bd/66/a500e38ee322d89fce61c74bd7769c8ef3bebc6c2f43fda5f3fc3441286d/opentelemetry_proto-1.29.0-py3-none-any.whl", hash = "sha256:495069c6f5495cbf732501cdcd3b7f60fda2b9d3d4255706ca99b7ca8dec53ff", size = 55818 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.30.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/ee/d710062e8a862433d1be0b85920d0c653abe318878fef2d14dfe2c62ff7b/opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18", size = 158633 } +sdist = { url = "https://files.pythonhosted.org/packages/0c/5a/1ed4c3cf6c09f80565fc085f7e8efa0c222712fd2a9412d07424705dcf72/opentelemetry_sdk-1.29.0.tar.gz", hash = "sha256:b0787ce6aade6ab84315302e72bd7a7f2f014b0fb1b7c3295b88afe014ed0643", size = 157229 } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/28/64d781d6adc6bda2260067ce2902bd030cf45aec657e02e28c5b4480b976/opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091", size = 118717 }, + { url = "https://files.pythonhosted.org/packages/d1/1d/512b86af21795fb463726665e2f61db77d384e8779fdcf4cb0ceec47866d/opentelemetry_sdk-1.29.0-py3-none-any.whl", hash = "sha256:173be3b5d3f8f7d671f20ea37056710217959e774e2749d984355d1f9391a30a", size = 118078 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.51b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1e/c0/0f9ef4605fea7f2b83d55dd0b0d7aebe8feead247cd6facd232b30907b4f/opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47", size = 107191 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/4e/d7c7c91ff47cd96fe4095dd7231701aec7347426fd66872ff320d6cd1fcc/opentelemetry_semantic_conventions-0.50b0.tar.gz", hash = "sha256:02dc6dbcb62f082de9b877ff19a3f1ffaa3c306300fa53bfac761c4567c83d38", size = 100459 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/75/d7bdbb6fd8630b4cafb883482b75c4fc276b6426619539d266e32ac53266/opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae", size = 177416 }, + { url = "https://files.pythonhosted.org/packages/da/fb/dc15fad105450a015e913cfa4f5c27b6a5f1bea8fb649f8cae11e699c8af/opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl", hash = "sha256:e87efba8fdb67fb38113efea6a349531e75ed7ffc01562f65b802fcecb5e115e", size = 166602 }, ] [[package]] name = "opentelemetry-util-http" -version = "0.51b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/64/32510c0a803465eb6ef1f5bd514d0f5627f8abc9444ed94f7240faf6fcaa/opentelemetry_util_http-0.51b0.tar.gz", hash = "sha256:05edd19ca1cc3be3968b1e502fd94816901a365adbeaab6b6ddb974384d3a0b9", size = 8043 } +sdist = { url = "https://files.pythonhosted.org/packages/69/10/ce3f0d1157cedbd819194f0b27a6bbb7c19a8bceb3941e4a4775014076cf/opentelemetry_util_http-0.50b0.tar.gz", hash = "sha256:dc4606027e1bc02aabb9533cc330dd43f874fca492e4175c31d7154f341754af", size = 7859 } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/dd/c371eeb9cc78abbdad231a27ce1a196a37ef96328d876ccbb381dea4c8ee/opentelemetry_util_http-0.51b0-py3-none-any.whl", hash = "sha256:0561d7a6e9c422b9ef9ae6e77eafcfcd32a2ab689f5e801475cbb67f189efa20", size = 7304 }, + { url = "https://files.pythonhosted.org/packages/64/8a/9e1b54f50d1fddebbeac9a9b0632f8db6ece7add904fb593ee2e268ee4de/opentelemetry_util_http-0.50b0-py3-none-any.whl", hash = "sha256:21f8aedac861ffa3b850f8c0a6c373026189eb8630ac6e14a2bf8c55695cc090", size = 6942 }, ] [[package]] name = "orjson" -version = "3.10.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/5dea21763eeff8c1590076918a446ea3d6140743e0e36f58f369928ed0f4/orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e", size = 5282482 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/09/e5ff18ad009e6f97eb7edc5f67ef98b3ce0c189da9c3eaca1f9587cd4c61/orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04", size = 249532 }, - { url = "https://files.pythonhosted.org/packages/bd/b8/a75883301fe332bd433d9b0ded7d2bb706ccac679602c3516984f8814fb5/orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8", size = 125229 }, - { url = "https://files.pythonhosted.org/packages/83/4b/22f053e7a364cc9c685be203b1e40fc5f2b3f164a9b2284547504eec682e/orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8", size = 150148 }, - { url = "https://files.pythonhosted.org/packages/63/64/1b54fc75ca328b57dd810541a4035fe48c12a161d466e3cf5b11a8c25649/orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814", size = 139748 }, - { url = "https://files.pythonhosted.org/packages/5e/ff/ff0c5da781807bb0a5acd789d9a7fbcb57f7b0c6e1916595da1f5ce69f3c/orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164", size = 154559 }, - { url = "https://files.pythonhosted.org/packages/4e/9a/11e2974383384ace8495810d4a2ebef5f55aacfc97b333b65e789c9d362d/orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf", size = 130349 }, - { url = "https://files.pythonhosted.org/packages/2d/c4/dd9583aea6aefee1b64d3aed13f51d2aadb014028bc929fe52936ec5091f/orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061", size = 138514 }, - { url = "https://files.pythonhosted.org/packages/53/3e/dcf1729230654f5c5594fc752de1f43dcf67e055ac0d300c8cdb1309269a/orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3", size = 130940 }, - { url = "https://files.pythonhosted.org/packages/e8/2b/b9759fe704789937705c8a56a03f6c03e50dff7df87d65cba9a20fec5282/orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d", size = 414713 }, - { url = "https://files.pythonhosted.org/packages/a7/6b/b9dfdbd4b6e20a59238319eb203ae07c3f6abf07eef909169b7a37ae3bba/orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182", size = 141028 }, - { url = "https://files.pythonhosted.org/packages/7c/b5/40f5bbea619c7caf75eb4d652a9821875a8ed04acc45fe3d3ef054ca69fb/orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e", size = 129715 }, - { url = "https://files.pythonhosted.org/packages/38/60/2272514061cbdf4d672edbca6e59c7e01cd1c706e881427d88f3c3e79761/orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab", size = 142473 }, - { url = "https://files.pythonhosted.org/packages/11/5d/be1490ff7eafe7fef890eb4527cf5bcd8cfd6117f3efe42a3249ec847b60/orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806", size = 133564 }, - { url = "https://files.pythonhosted.org/packages/7a/a2/21b25ce4a2c71dbb90948ee81bd7a42b4fbfc63162e57faf83157d5540ae/orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6", size = 249533 }, - { url = "https://files.pythonhosted.org/packages/b2/85/2076fc12d8225698a51278009726750c9c65c846eda741e77e1761cfef33/orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef", size = 125230 }, - { url = "https://files.pythonhosted.org/packages/06/df/a85a7955f11274191eccf559e8481b2be74a7c6d43075d0a9506aa80284d/orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334", size = 150148 }, - { url = "https://files.pythonhosted.org/packages/37/b3/94c55625a29b8767c0eed194cb000b3787e3c23b4cdd13be17bae6ccbb4b/orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d", size = 139749 }, - { url = "https://files.pythonhosted.org/packages/53/ba/c608b1e719971e8ddac2379f290404c2e914cf8e976369bae3cad88768b1/orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0", size = 154558 }, - { url = "https://files.pythonhosted.org/packages/b2/c4/c1fb835bb23ad788a39aa9ebb8821d51b1c03588d9a9e4ca7de5b354fdd5/orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13", size = 130349 }, - { url = "https://files.pythonhosted.org/packages/78/14/bb2b48b26ab3c570b284eb2157d98c1ef331a8397f6c8bd983b270467f5c/orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5", size = 138513 }, - { url = "https://files.pythonhosted.org/packages/4a/97/d5b353a5fe532e92c46467aa37e637f81af8468aa894cd77d2ec8a12f99e/orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b", size = 130942 }, - { url = "https://files.pythonhosted.org/packages/b5/5d/a067bec55293cca48fea8b9928cfa84c623be0cce8141d47690e64a6ca12/orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399", size = 414717 }, - { url = "https://files.pythonhosted.org/packages/6f/9a/1485b8b05c6b4c4db172c438cf5db5dcfd10e72a9bc23c151a1137e763e0/orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388", size = 141033 }, - { url = "https://files.pythonhosted.org/packages/f8/d2/fc67523656e43a0c7eaeae9007c8b02e86076b15d591e9be11554d3d3138/orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c", size = 129720 }, - { url = "https://files.pythonhosted.org/packages/79/42/f58c7bd4e5b54da2ce2ef0331a39ccbbaa7699b7f70206fbf06737c9ed7d/orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e", size = 142473 }, - { url = "https://files.pythonhosted.org/packages/00/f8/bb60a4644287a544ec81df1699d5b965776bc9848d9029d9f9b3402ac8bb/orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e", size = 133570 }, - { url = "https://files.pythonhosted.org/packages/66/85/22fe737188905a71afcc4bf7cc4c79cd7f5bbe9ed1fe0aac4ce4c33edc30/orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a", size = 249504 }, - { url = "https://files.pythonhosted.org/packages/48/b7/2622b29f3afebe938a0a9037e184660379797d5fd5234e5998345d7a5b43/orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d", size = 125080 }, - { url = "https://files.pythonhosted.org/packages/ce/8f/0b72a48f4403d0b88b2a41450c535b3e8989e8a2d7800659a967efc7c115/orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0", size = 150121 }, - { url = "https://files.pythonhosted.org/packages/06/ec/acb1a20cd49edb2000be5a0404cd43e3c8aad219f376ac8c60b870518c03/orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4", size = 139796 }, - { url = "https://files.pythonhosted.org/packages/33/e1/f7840a2ea852114b23a52a1c0b2bea0a1ea22236efbcdb876402d799c423/orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767", size = 154636 }, - { url = "https://files.pythonhosted.org/packages/fa/da/31543337febd043b8fa80a3b67de627669b88c7b128d9ad4cc2ece005b7a/orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41", size = 130621 }, - { url = "https://files.pythonhosted.org/packages/ed/78/66115dc9afbc22496530d2139f2f4455698be444c7c2475cb48f657cefc9/orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514", size = 138516 }, - { url = "https://files.pythonhosted.org/packages/22/84/cd4f5fb5427ffcf823140957a47503076184cb1ce15bcc1165125c26c46c/orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17", size = 130762 }, - { url = "https://files.pythonhosted.org/packages/93/1f/67596b711ba9f56dd75d73b60089c5c92057f1130bb3a25a0f53fb9a583b/orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b", size = 414700 }, - { url = "https://files.pythonhosted.org/packages/7c/0c/6a3b3271b46443d90efb713c3e4fe83fa8cd71cda0d11a0f69a03f437c6e/orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7", size = 141077 }, - { url = "https://files.pythonhosted.org/packages/3b/9b/33c58e0bfc788995eccd0d525ecd6b84b40d7ed182dd0751cd4c1322ac62/orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a", size = 129898 }, - { url = "https://files.pythonhosted.org/packages/01/c1/d577ecd2e9fa393366a1ea0a9267f6510d86e6c4bb1cdfb9877104cac44c/orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665", size = 142566 }, - { url = "https://files.pythonhosted.org/packages/ed/eb/a85317ee1732d1034b92d56f89f1de4d7bf7904f5c8fb9dcdd5b1c83917f/orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa", size = 133732 }, - { url = "https://files.pythonhosted.org/packages/06/10/fe7d60b8da538e8d3d3721f08c1b7bff0491e8fa4dd3bf11a17e34f4730e/orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6", size = 249399 }, - { url = "https://files.pythonhosted.org/packages/6b/83/52c356fd3a61abd829ae7e4366a6fe8e8863c825a60d7ac5156067516edf/orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a", size = 125044 }, - { url = "https://files.pythonhosted.org/packages/55/b2/d06d5901408e7ded1a74c7c20d70e3a127057a6d21355f50c90c0f337913/orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9", size = 150066 }, - { url = "https://files.pythonhosted.org/packages/75/8c/60c3106e08dc593a861755781c7c675a566445cc39558677d505878d879f/orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0", size = 139737 }, - { url = "https://files.pythonhosted.org/packages/6a/8c/ae00d7d0ab8a4490b1efeb01ad4ab2f1982e69cc82490bf8093407718ff5/orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307", size = 154804 }, - { url = "https://files.pythonhosted.org/packages/22/86/65dc69bd88b6dd254535310e97bc518aa50a39ef9c5a2a5d518e7a223710/orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e", size = 130583 }, - { url = "https://files.pythonhosted.org/packages/bb/00/6fe01ededb05d52be42fabb13d93a36e51f1fd9be173bd95707d11a8a860/orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7", size = 138465 }, - { url = "https://files.pythonhosted.org/packages/db/2f/4cc151c4b471b0cdc8cb29d3eadbce5007eb0475d26fa26ed123dca93b33/orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8", size = 130742 }, - { url = "https://files.pythonhosted.org/packages/9f/13/8a6109e4b477c518498ca37963d9c0eb1508b259725553fb53d53b20e2ea/orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca", size = 414669 }, - { url = "https://files.pythonhosted.org/packages/22/7b/1d229d6d24644ed4d0a803de1b0e2df832032d5beda7346831c78191b5b2/orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561", size = 141043 }, - { url = "https://files.pythonhosted.org/packages/cc/d3/6dc91156cf12ed86bed383bcb942d84d23304a1e57b7ab030bf60ea130d6/orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825", size = 129826 }, - { url = "https://files.pythonhosted.org/packages/b3/38/c47c25b86f6996f1343be721b6ea4367bc1c8bc0fc3f6bbcd995d18cb19d/orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890", size = 142542 }, - { url = "https://files.pythonhosted.org/packages/27/f1/1d7ec15b20f8ce9300bc850de1e059132b88990e46cd0ccac29cbf11e4f9/orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf", size = 133444 }, +version = "3.10.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/f7/3219b56f47b4f5e864fb11cdf4ac0aaa3de608730ad2dc4c6e16382f35ec/orjson-3.10.14.tar.gz", hash = "sha256:cf31f6f071a6b8e7aa1ead1fa27b935b48d00fbfa6a28ce856cfff2d5dd68eed", size = 5282116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/62/64348b8b29a14c7342f6aa45c8be0a87fdda2ce7716bc123717376537077/orjson-3.10.14-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:849ea7845a55f09965826e816cdc7689d6cf74fe9223d79d758c714af955bcb6", size = 249439 }, + { url = "https://files.pythonhosted.org/packages/9f/51/48f4dfbca7b4db630316b170db4a150a33cd405650258bd62a2d619b43b4/orjson-3.10.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5947b139dfa33f72eecc63f17e45230a97e741942955a6c9e650069305eb73d", size = 135811 }, + { url = "https://files.pythonhosted.org/packages/a1/1c/e18770843e6d045605c8e00a1be801da5668fa934b323b0492a49c9dee4f/orjson-3.10.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cde6d76910d3179dae70f164466692f4ea36da124d6fb1a61399ca589e81d69a", size = 150154 }, + { url = "https://files.pythonhosted.org/packages/51/1e/3817dc79164f1fc17fc53102f74f62d31f5f4ec042abdd24d94c5e06e51c/orjson-3.10.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6dfbaeb7afa77ca608a50e2770a0461177b63a99520d4928e27591b142c74b1", size = 139740 }, + { url = "https://files.pythonhosted.org/packages/ff/fc/fbf9e25448f7a2d67c1a2b6dad78a9340666bf9fda3339ff59b1e93f0b6f/orjson-3.10.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa45e489ef80f28ff0e5ba0a72812b8cfc7c1ef8b46a694723807d1b07c89ebb", size = 154479 }, + { url = "https://files.pythonhosted.org/packages/d4/df/c8b7ea21ff658f6a9a26d562055631c01d445bda5eb613c02c7d0934607d/orjson-3.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5007abfdbb1d866e2aa8990bd1c465f0f6da71d19e695fc278282be12cffa5", size = 130414 }, + { url = "https://files.pythonhosted.org/packages/df/f7/e29c2d42bef8fbf696a5e54e6339b0b9ea5179326950fee6ae80acf59d09/orjson-3.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b49e2af011c84c3f2d541bb5cd1e3c7c2df672223e7e3ea608f09cf295e5f8a", size = 138545 }, + { url = "https://files.pythonhosted.org/packages/8e/97/afdf2908fe8eaeecb29e97fa82dc934f275acf330e5271def0b8fbac5478/orjson-3.10.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:164ac155109226b3a2606ee6dda899ccfbe6e7e18b5bdc3fbc00f79cc074157d", size = 130952 }, + { url = "https://files.pythonhosted.org/packages/4a/dd/04e01c1305694f47e9794c60ec7cece02e55fa9d57c5d72081eaaa62ad1d/orjson-3.10.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6b1225024cf0ef5d15934b5ffe9baf860fe8bc68a796513f5ea4f5056de30bca", size = 414673 }, + { url = "https://files.pythonhosted.org/packages/fa/12/28c4d5f6a395ac9693b250f0662366968c47fc99c8f3cd803a65b1f5ba46/orjson-3.10.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6546e8073dc382e60fcae4a001a5a1bc46da5eab4a4878acc2d12072d6166d5", size = 141002 }, + { url = "https://files.pythonhosted.org/packages/21/f6/357cb167c2d2fd9542251cfd9f68681b67ed4dcdac82aa6ee2f4f3ab952e/orjson-3.10.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9f1d2942605c894162252d6259b0121bf1cb493071a1ea8cb35d79cb3e6ac5bc", size = 129626 }, + { url = "https://files.pythonhosted.org/packages/df/07/d9062353500df9db8bfa7c6a5982687c97d0b69a5b158c4166d407ac94e2/orjson-3.10.14-cp310-cp310-win32.whl", hash = "sha256:397083806abd51cf2b3bbbf6c347575374d160331a2d33c5823e22249ad3118b", size = 142429 }, + { url = "https://files.pythonhosted.org/packages/50/ba/6ba2bf69ac0526d143aebe78bc39e6e5fbb51d5336fbc5efb9aab6687cd9/orjson-3.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:fa18f949d3183a8d468367056be989666ac2bef3a72eece0bade9cdb733b3c28", size = 133512 }, + { url = "https://files.pythonhosted.org/packages/bf/18/26721760368e12b691fb6811692ed21ae5275ea918db409ba26866cacbe8/orjson-3.10.14-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f506fd666dd1ecd15a832bebc66c4df45c1902fd47526292836c339f7ba665a9", size = 249437 }, + { url = "https://files.pythonhosted.org/packages/d5/5b/2adfe7cc301edeb3bffc1942956659c19ec00d51a21c53c17c0767bebf47/orjson-3.10.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efe5fd254cfb0eeee13b8ef7ecb20f5d5a56ddda8a587f3852ab2cedfefdb5f6", size = 135812 }, + { url = "https://files.pythonhosted.org/packages/8a/68/07df7787fd9ff6dba815b2d793eec5e039d288fdf150431ed48a660bfcbb/orjson-3.10.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ddc8c866d7467f5ee2991397d2ea94bcf60d0048bdd8ca555740b56f9042725", size = 150153 }, + { url = "https://files.pythonhosted.org/packages/02/71/f68562734461b801b53bacd5365e079dcb3c78656a662f0639494880e522/orjson-3.10.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af8e42ae4363773658b8d578d56dedffb4f05ceeb4d1d4dd3fb504950b45526", size = 139742 }, + { url = "https://files.pythonhosted.org/packages/04/03/1355fb27652582f00d3c62e93a32b982fa42bc31d2e07f0a317867069096/orjson-3.10.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84dd83110503bc10e94322bf3ffab8bc49150176b49b4984dc1cce4c0a993bf9", size = 154479 }, + { url = "https://files.pythonhosted.org/packages/7c/47/1c2a840f27715e8bc2bbafffc851512ede6e53483593eded190919bdcaf4/orjson-3.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f5bfc0399cd4811bf10ec7a759c7ab0cd18080956af8ee138097d5b5296a95", size = 130413 }, + { url = "https://files.pythonhosted.org/packages/dd/b2/5bb51006cbae85b052d1bbee7ff43ae26fa155bb3d31a71b0c07d384d5e3/orjson-3.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868943660fb2a1e6b6b965b74430c16a79320b665b28dd4511d15ad5038d37d5", size = 138545 }, + { url = "https://files.pythonhosted.org/packages/79/30/7841a5dd46bb46b8e868791d5469c9d4788d3e26b7e69d40256647997baf/orjson-3.10.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33449c67195969b1a677533dee9d76e006001213a24501333624623e13c7cc8e", size = 130953 }, + { url = "https://files.pythonhosted.org/packages/08/49/720e7c2040c0f1df630a36d83d449bd7e4d4471071d5ece47a4f7211d570/orjson-3.10.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e4c9f60f9fb0b5be66e416dcd8c9d94c3eabff3801d875bdb1f8ffc12cf86905", size = 414675 }, + { url = "https://files.pythonhosted.org/packages/50/b0/ca7619f34280e7dcbd50dbc9c5fe5200c12cd7269b8858652beb3887483f/orjson-3.10.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0de4d6315cfdbd9ec803b945c23b3a68207fd47cbe43626036d97e8e9561a436", size = 141004 }, + { url = "https://files.pythonhosted.org/packages/75/1b/7548e3a711543f438e87a4349e00439ab7f37807942e5659f29363f35765/orjson-3.10.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:83adda3db595cb1a7e2237029b3249c85afbe5c747d26b41b802e7482cb3933e", size = 129629 }, + { url = "https://files.pythonhosted.org/packages/b0/1e/4930a6ff46debd6be1ff18e869b7bc43a7ad762c865610b7e745038d6f68/orjson-3.10.14-cp311-cp311-win32.whl", hash = "sha256:998019ef74a4997a9d741b1473533cdb8faa31373afc9849b35129b4b8ec048d", size = 142430 }, + { url = "https://files.pythonhosted.org/packages/28/e0/6cc1cd1dfde36555e81ac869f7847e86bb11c27f97b72fde2f1509b12163/orjson-3.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:9d034abdd36f0f0f2240f91492684e5043d46f290525d1117712d5b8137784eb", size = 133516 }, + { url = "https://files.pythonhosted.org/packages/8c/dc/dc5a882be016ee8688bd867ad3b4e3b2ab039d91383099702301a1adb6ac/orjson-3.10.14-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2ad4b7e367efba6dc3f119c9a0fcd41908b7ec0399a696f3cdea7ec477441b09", size = 249396 }, + { url = "https://files.pythonhosted.org/packages/f0/95/4c23ff5c0505cd687928608e0b7910ccb44ce59490079e1c17b7610aa0d0/orjson-3.10.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f496286fc85e93ce0f71cc84fc1c42de2decf1bf494094e188e27a53694777a7", size = 135689 }, + { url = "https://files.pythonhosted.org/packages/ad/39/b4bdd19604dce9d6509c4d86e8e251a1373a24204b4c4169866dcecbe5f5/orjson-3.10.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c7f189bbfcded40e41a6969c1068ba305850ba016665be71a217918931416fbf", size = 150136 }, + { url = "https://files.pythonhosted.org/packages/1d/92/7b9bad96353abd3e89947960252dcf1022ce2df7f29056e434de05e18b6d/orjson-3.10.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cc8204f0b75606869c707da331058ddf085de29558b516fc43c73ee5ee2aadb", size = 139766 }, + { url = "https://files.pythonhosted.org/packages/a6/bd/abb13c86540b7a91b40d7d9f8549d03a026bc22d78fa93f71d68b8f4c36e/orjson-3.10.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deaa2899dff7f03ab667e2ec25842d233e2a6a9e333efa484dfe666403f3501c", size = 154533 }, + { url = "https://files.pythonhosted.org/packages/c0/02/0bcb91ec9c7143012359983aca44f567f87df379957cd4af11336217b12f/orjson-3.10.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1c3ea52642c9714dc6e56de8a451a066f6d2707d273e07fe8a9cc1ba073813d", size = 130658 }, + { url = "https://files.pythonhosted.org/packages/b4/1e/b304596bb1f800d47d6e92305bd09f0eef693ed4f7b2095db63f9808b229/orjson-3.10.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d3f9ed72e7458ded9a1fb1b4d4ed4c4fdbaf82030ce3f9274b4dc1bff7ace2b", size = 138546 }, + { url = "https://files.pythonhosted.org/packages/56/c7/65d72b22080186ef618a46afeb9386e20056f3237664090f3a2f8da1cd6d/orjson-3.10.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:07520685d408a2aba514c17ccc16199ff2934f9f9e28501e676c557f454a37fe", size = 130774 }, + { url = "https://files.pythonhosted.org/packages/4d/85/1ab35a832f32b37ccd673721e845cf302f23453603112255af611c91d1d1/orjson-3.10.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:76344269b550ea01488d19a2a369ab572c1ac4449a72e9f6ac0d70eb1cbfb953", size = 414649 }, + { url = "https://files.pythonhosted.org/packages/d1/7d/1d6575f779bab8fe698fa6d52e8aa3aa0a9fca4885d0bf6197700455713a/orjson-3.10.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e2979d0f2959990620f7e62da6cd954e4620ee815539bc57a8ae46e2dacf90e3", size = 141060 }, + { url = "https://files.pythonhosted.org/packages/f8/26/68513e28b3bd1d7633318ed2818e86d1bfc8b782c87c520c7b363092837f/orjson-3.10.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03f61ca3674555adcb1aa717b9fc87ae936aa7a63f6aba90a474a88701278780", size = 129798 }, + { url = "https://files.pythonhosted.org/packages/44/ca/020fb99c98ff7267ba18ce798ff0c8c3aa97cd949b611fc76cad3c87e534/orjson-3.10.14-cp312-cp312-win32.whl", hash = "sha256:d5075c54edf1d6ad81d4c6523ce54a748ba1208b542e54b97d8a882ecd810fd1", size = 142524 }, + { url = "https://files.pythonhosted.org/packages/70/7f/f2d346819a273653825e7c92dc26418c8da506003c9fc1dfe8157e733b2e/orjson-3.10.14-cp312-cp312-win_amd64.whl", hash = "sha256:175cafd322e458603e8ce73510a068d16b6e6f389c13f69bf16de0e843d7d406", size = 133663 }, + { url = "https://files.pythonhosted.org/packages/46/bb/f1b037d89f580c79eda0940772384cc226a697be1cb4eb94ae4e792aa34c/orjson-3.10.14-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:0905ca08a10f7e0e0c97d11359609300eb1437490a7f32bbaa349de757e2e0c7", size = 249333 }, + { url = "https://files.pythonhosted.org/packages/e4/72/12958a073cace3f8acef0f9a30739d95f46bbb1544126fecad11527d4508/orjson-3.10.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92d13292249f9f2a3e418cbc307a9fbbef043c65f4bd8ba1eb620bc2aaba3d15", size = 125038 }, + { url = "https://files.pythonhosted.org/packages/c0/ae/461f78b1c98de1bc034af88bc21c6a792cc63373261fbc10a6ee560814fa/orjson-3.10.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90937664e776ad316d64251e2fa2ad69265e4443067668e4727074fe39676414", size = 130604 }, + { url = "https://files.pythonhosted.org/packages/ae/d2/17f50513f56bff7898840fddf7fb88f501305b9b2605d2793ff224789665/orjson-3.10.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9ed3d26c4cb4f6babaf791aa46a029265850e80ec2a566581f5c2ee1a14df4f1", size = 130756 }, + { url = "https://files.pythonhosted.org/packages/fa/bc/673856e4af94c9890dfd8e2054c05dc2ddc16d1728c2aa0c5bd198943105/orjson-3.10.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:56ee546c2bbe9599aba78169f99d1dc33301853e897dbaf642d654248280dc6e", size = 414613 }, + { url = "https://files.pythonhosted.org/packages/09/01/08c5b69b0756dd1790fcffa569d6a28dedcd7b97f825e4b46537b788908c/orjson-3.10.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:901e826cb2f1bdc1fcef3ef59adf0c451e8f7c0b5deb26c1a933fb66fb505eae", size = 141010 }, + { url = "https://files.pythonhosted.org/packages/5b/98/72883bb6cf88fd364996e62d2026622ca79bfb8dbaf96ccdd2018ada25b1/orjson-3.10.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:26336c0d4b2d44636e1e1e6ed1002f03c6aae4a8a9329561c8883f135e9ff010", size = 129732 }, + { url = "https://files.pythonhosted.org/packages/e4/99/347418f7ef56dcb478ba131a6112b8ddd5b747942652b6e77a53155a7e21/orjson-3.10.14-cp313-cp313-win32.whl", hash = "sha256:e2bc525e335a8545c4e48f84dd0328bc46158c9aaeb8a1c2276546e94540ea3d", size = 142504 }, + { url = "https://files.pythonhosted.org/packages/59/ac/5e96cad01083015f7bfdb02ccafa489da8e6caa7f4c519e215f04d2bd856/orjson-3.10.14-cp313-cp313-win_amd64.whl", hash = "sha256:eca04dfd792cedad53dc9a917da1a522486255360cb4e77619343a20d9f35364", size = 133388 }, ] [[package]] @@ -3546,19 +3396,18 @@ wheels = [ [[package]] name = "posthog" -version = "3.16.0" +version = "3.8.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "distro", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "monotonic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b4/cd/d349468731e2cdbd61bc9655acae5dac961156f4b9c652f011b8433d906e/posthog-3.16.0.tar.gz", hash = "sha256:953176a443b30b1404c0f36010a95caad60a83c31ecb17b427f6d986f6f765c1", size = 65192 } +sdist = { url = "https://files.pythonhosted.org/packages/51/5a/057ebd6b279940e2cf2cbe8b10a4b34bc832f6f82b10649dcd12210219e9/posthog-3.8.3.tar.gz", hash = "sha256:263df03ea312d4b47a3d5ea393fdb22ff2ed78140d5ce9af9dd0618ae245a44b", size = 56864 } wheels = [ - { url = "https://files.pythonhosted.org/packages/75/89/5524d64b421e946f85a42d9e95348bfd1b43335eadb9f3ee4a0e368a1b47/posthog-3.16.0-py2.py3-none-any.whl", hash = "sha256:6d2140f58823e540855885a77474a32045f77c2276351791db4dca844f278b37", size = 75934 }, + { url = "https://files.pythonhosted.org/packages/83/3a/ff36f067367de4477d114ab04f42d5830849bad1b0949eb70c9858cdb7e2/posthog-3.8.3-py2.py3-none-any.whl", hash = "sha256:7215c4d7649b0c87905b42f460403311564996d776ab48d39852f46539a50f22", size = 64665 }, ] [[package]] @@ -3595,115 +3444,99 @@ wheels = [ [[package]] name = "prompt-toolkit" -version = "3.0.50" +version = "3.0.48" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/e1/bd15cb8ffdcfeeb2bdc215de3c3cffca11408d829e4b8416dcfe71ba8854/prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", size = 429087 } +sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/feb5e137aff82f7c7f3248267b97451da3644f6cdc218edfe549fb354127/prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90", size = 424684 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/ea/d836f008d33151c7a1f62caf3d8dd782e4d15f6a43897f64480c2b8de2ad/prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198", size = 387816 }, + { url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 }, ] [[package]] name = "propcache" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/76/f941e63d55c0293ff7829dd21e7cf1147e90a526756869a9070f287a68c9/propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5", size = 42722 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/f0/dc9ec44d2e63c13f816a16398c039329736712440ff82b682dd9a78d2258/propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d", size = 79574 }, - { url = "https://files.pythonhosted.org/packages/99/3a/33a207dfcb3ee1131ea23a2aeb726c3c4994f89546d7eadf8c50627c8b63/propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c", size = 45898 }, - { url = "https://files.pythonhosted.org/packages/af/68/0bde765c9f5dc02b4466d2838600af38c81b184c26c6d3cd44643ac668e3/propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc", size = 45418 }, - { url = "https://files.pythonhosted.org/packages/06/a6/c682669bae41199358e16cc7b1c818f91c5f9e925cc863dabd98ce32716a/propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d", size = 205116 }, - { url = "https://files.pythonhosted.org/packages/fb/ae/82cfb50267d9a1baa0340728eb9e32245a68538fef929d7bb786d01c11a8/propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f", size = 219405 }, - { url = "https://files.pythonhosted.org/packages/ab/16/7b6b2bf8c207cfd0e5ca3d41aea397392de9899867ec024f88c94f9ae2ab/propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf", size = 217656 }, - { url = "https://files.pythonhosted.org/packages/f4/eb/41447de61eb5454891658d0fb9b1d7d35d49a4a5dd2e0c86f2c332e8b7e1/propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9", size = 205414 }, - { url = "https://files.pythonhosted.org/packages/03/b6/9719878f8b5b20d37ee663a40f8dcbf888559e4d3be2ba2fe5c790fc28d2/propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc", size = 195746 }, - { url = "https://files.pythonhosted.org/packages/bb/ec/b79c3210ba459800d1a8f1afeb81d7b503893555a7b79c24082ff26d3314/propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0", size = 198651 }, - { url = "https://files.pythonhosted.org/packages/48/f6/2b0140bc47013e43575973068e72ad51ee9f22f2dad42e6d6e362d715125/propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b", size = 195858 }, - { url = "https://files.pythonhosted.org/packages/97/3d/2fa19303d87aa21f9a42dcd870d6088a2a776ff5518e394d50412c3679a6/propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f", size = 197181 }, - { url = "https://files.pythonhosted.org/packages/09/f3/a2170ffc9fa774c1dfd52294113c0fa6cdc5b71dbfd7129bb9378fdd8b42/propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a", size = 207411 }, - { url = "https://files.pythonhosted.org/packages/d6/1e/cb8a6c82178efffa0b00dc463f36cd086f747345585140aeb95d5cb93666/propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25", size = 210724 }, - { url = "https://files.pythonhosted.org/packages/2b/72/6e273543337a3e22cf462eb836f065a9830b4d41baeb1f58db2695c934f3/propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f", size = 203511 }, - { url = "https://files.pythonhosted.org/packages/f3/ea/7412c79bcec06597c967d49789f5a1f7fd76a8654908feeaefafb7447c9a/propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c", size = 40600 }, - { url = "https://files.pythonhosted.org/packages/a3/42/488c90190491f3e61bd2c2fb0b3d91c1c78778270dde2f0b6633fc9ff723/propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340", size = 44714 }, - { url = "https://files.pythonhosted.org/packages/45/c9/cf09ff7e6d09f14149094f7cd50d2dec032b24e61af21fc4540da2b17bfb/propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51", size = 79568 }, - { url = "https://files.pythonhosted.org/packages/c8/32/2424d89da88cd81b7d148e0d2b3131461b570a02aa9d84a2e567509adb0d/propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e", size = 45895 }, - { url = "https://files.pythonhosted.org/packages/f6/91/ee5b6aa7aa31754fefcf0c5180e09223cac380ef195c4ddc8c266eb641ea/propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa", size = 45427 }, - { url = "https://files.pythonhosted.org/packages/bf/73/38f0128462b8b616181d8c53bd5d04eac41c50c449b07615c65d56ba0a9b/propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf", size = 232427 }, - { url = "https://files.pythonhosted.org/packages/59/82/f3d4e84f4539dcfc9c3d338282b9e915f5b63c921986ecfdf7af2d12f87c/propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b", size = 239985 }, - { url = "https://files.pythonhosted.org/packages/42/e8/029f58cccbae83c9969a7ee7a06558d5b83a93dfc54e0f4f70234bbaea1b/propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9", size = 238827 }, - { url = "https://files.pythonhosted.org/packages/8b/a2/c373561777c0cb9b9e7b9b9a10b9b3a7b6bde75a2535b962231cecc8fdb8/propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6", size = 231348 }, - { url = "https://files.pythonhosted.org/packages/d7/d2/4673f715beedf6038b485bcd976813149231d9df5bb6196cb69a09c185c9/propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c", size = 220426 }, - { url = "https://files.pythonhosted.org/packages/e0/f6/1da65f900927bafd4675a16e890618ec7643f2f922bf0e4d84bb38645618/propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075", size = 220294 }, - { url = "https://files.pythonhosted.org/packages/ff/86/620451bdc02e91b1712cd71890c17077ee97e2a28493836a87e47b8e70ff/propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c", size = 212492 }, - { url = "https://files.pythonhosted.org/packages/6e/1b/e8f86921ed4016da80faf3b8f515f7829decabdbff106736bfff353bceba/propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810", size = 215113 }, - { url = "https://files.pythonhosted.org/packages/1a/95/a61d86cc49aa0945f6c06f3a4614fc543e311a50558c92861f5e9691a37c/propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3", size = 228330 }, - { url = "https://files.pythonhosted.org/packages/8f/7d/10dbae48ff2bb189e92c2b3487a48f3229146a25941ad0d485934d1104d4/propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7", size = 231942 }, - { url = "https://files.pythonhosted.org/packages/39/ce/82d16aec96c5513ae7db13ab901a65a1e54c915292fb5b2390e33275b61d/propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c", size = 223077 }, - { url = "https://files.pythonhosted.org/packages/c8/e0/cb077e8e7a583c733df7f53327fcbdb92e42be59b976ce60bf1d904a0efe/propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d", size = 40455 }, - { url = "https://files.pythonhosted.org/packages/d8/35/57abeb6146fe3c19081eeaf3d9d4cfea256f87f1e5101acf80d3332c1820/propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32", size = 44705 }, - { url = "https://files.pythonhosted.org/packages/8d/2c/921f15dc365796ec23975b322b0078eae72995c7b4d49eba554c6a308d70/propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e", size = 79867 }, - { url = "https://files.pythonhosted.org/packages/11/a5/4a6cc1a559d1f2fb57ea22edc4245158cdffae92f7f92afcee2913f84417/propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af", size = 46109 }, - { url = "https://files.pythonhosted.org/packages/e1/6d/28bfd3af3a567ad7d667348e7f46a520bda958229c4d545ba138a044232f/propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5", size = 45635 }, - { url = "https://files.pythonhosted.org/packages/73/20/d75b42eaffe5075eac2f4e168f6393d21c664c91225288811d85451b2578/propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b", size = 242159 }, - { url = "https://files.pythonhosted.org/packages/a5/fb/4b537dd92f9fd4be68042ec51c9d23885ca5fafe51ec24c58d9401034e5f/propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667", size = 248163 }, - { url = "https://files.pythonhosted.org/packages/e7/af/8a9db04ac596d531ca0ef7dde518feaadfcdabef7b17d6a5ec59ee3effc2/propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7", size = 248794 }, - { url = "https://files.pythonhosted.org/packages/9d/c4/ecfc988879c0fd9db03228725b662d76cf484b6b46f7e92fee94e4b52490/propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7", size = 243912 }, - { url = "https://files.pythonhosted.org/packages/04/a2/298dd27184faa8b7d91cc43488b578db218b3cc85b54d912ed27b8c5597a/propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf", size = 229402 }, - { url = "https://files.pythonhosted.org/packages/be/0d/efe7fec316ca92dbf4bc4a9ba49ca889c43ca6d48ab1d6fa99fc94e5bb98/propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138", size = 226896 }, - { url = "https://files.pythonhosted.org/packages/60/63/72404380ae1d9c96d96e165aa02c66c2aae6072d067fc4713da5cde96762/propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86", size = 221447 }, - { url = "https://files.pythonhosted.org/packages/9d/18/b8392cab6e0964b67a30a8f4dadeaff64dc7022b5a34bb1d004ea99646f4/propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d", size = 222440 }, - { url = "https://files.pythonhosted.org/packages/6f/be/105d9ceda0f97eff8c06bac1673448b2db2a497444de3646464d3f5dc881/propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e", size = 234104 }, - { url = "https://files.pythonhosted.org/packages/cb/c9/f09a4ec394cfcce4053d8b2a04d622b5f22d21ba9bb70edd0cad061fa77b/propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64", size = 239086 }, - { url = "https://files.pythonhosted.org/packages/ea/aa/96f7f9ed6def82db67c972bdb7bd9f28b95d7d98f7e2abaf144c284bf609/propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c", size = 230991 }, - { url = "https://files.pythonhosted.org/packages/5a/11/bee5439de1307d06fad176f7143fec906e499c33d7aff863ea8428b8e98b/propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d", size = 40337 }, - { url = "https://files.pythonhosted.org/packages/e4/17/e5789a54a0455a61cb9efc4ca6071829d992220c2998a27c59aeba749f6f/propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57", size = 44404 }, - { url = "https://files.pythonhosted.org/packages/3a/0f/a79dd23a0efd6ee01ab0dc9750d8479b343bfd0c73560d59d271eb6a99d4/propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568", size = 77287 }, - { url = "https://files.pythonhosted.org/packages/b8/51/76675703c90de38ac75adb8deceb3f3ad99b67ff02a0fa5d067757971ab8/propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9", size = 44923 }, - { url = "https://files.pythonhosted.org/packages/01/9b/fd5ddbee66cf7686e73c516227c2fd9bf471dbfed0f48329d095ea1228d3/propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767", size = 44325 }, - { url = "https://files.pythonhosted.org/packages/13/1c/6961f11eb215a683b34b903b82bde486c606516c1466bf1fa67f26906d51/propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8", size = 225116 }, - { url = "https://files.pythonhosted.org/packages/ef/ea/f8410c40abcb2e40dffe9adeed017898c930974650a63e5c79b886aa9f73/propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0", size = 229905 }, - { url = "https://files.pythonhosted.org/packages/ef/5a/a9bf90894001468bf8e6ea293bb00626cc9ef10f8eb7996e9ec29345c7ed/propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d", size = 233221 }, - { url = "https://files.pythonhosted.org/packages/dd/ce/fffdddd9725b690b01d345c1156b4c2cc6dca09ab5c23a6d07b8f37d6e2f/propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05", size = 227627 }, - { url = "https://files.pythonhosted.org/packages/58/ae/45c89a5994a334735a3032b48e8e4a98c05d9536ddee0719913dc27da548/propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe", size = 214217 }, - { url = "https://files.pythonhosted.org/packages/01/84/bc60188c3290ff8f5f4a92b9ca2d93a62e449c8daf6fd11ad517ad136926/propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1", size = 212921 }, - { url = "https://files.pythonhosted.org/packages/14/b3/39d60224048feef7a96edabb8217dc3f75415457e5ebbef6814f8b2a27b5/propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92", size = 208200 }, - { url = "https://files.pythonhosted.org/packages/9d/b3/0a6720b86791251273fff8a01bc8e628bc70903513bd456f86cde1e1ef84/propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787", size = 208400 }, - { url = "https://files.pythonhosted.org/packages/e9/4f/bb470f3e687790547e2e78105fb411f54e0cdde0d74106ccadd2521c6572/propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545", size = 218116 }, - { url = "https://files.pythonhosted.org/packages/34/71/277f7f9add469698ac9724c199bfe06f85b199542121a71f65a80423d62a/propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e", size = 222911 }, - { url = "https://files.pythonhosted.org/packages/92/e3/a7b9782aef5a2fc765b1d97da9ec7aed2f25a4e985703608e73232205e3f/propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626", size = 216563 }, - { url = "https://files.pythonhosted.org/packages/ab/76/0583ca2c551aa08ffcff87b2c6849c8f01c1f6fb815a5226f0c5c202173e/propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374", size = 39763 }, - { url = "https://files.pythonhosted.org/packages/80/ec/c6a84f9a36f608379b95f0e786c111d5465926f8c62f12be8cdadb02b15c/propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a", size = 43650 }, - { url = "https://files.pythonhosted.org/packages/ee/95/7d32e3560f5bf83fc2f2a4c1b0c181d327d53d5f85ebd045ab89d4d97763/propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf", size = 82140 }, - { url = "https://files.pythonhosted.org/packages/86/89/752388f12e6027a5e63f5d075f15291ded48e2d8311314fff039da5a9b11/propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0", size = 47296 }, - { url = "https://files.pythonhosted.org/packages/1b/4c/b55c98d586c69180d3048984a57a5ea238bdeeccf82dbfcd598e935e10bb/propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829", size = 46724 }, - { url = "https://files.pythonhosted.org/packages/0f/b6/67451a437aed90c4e951e320b5b3d7eb584ade1d5592f6e5e8f678030989/propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa", size = 291499 }, - { url = "https://files.pythonhosted.org/packages/ee/ff/e4179facd21515b24737e1e26e02615dfb5ed29416eed4cf5bc6ac5ce5fb/propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6", size = 293911 }, - { url = "https://files.pythonhosted.org/packages/76/8d/94a8585992a064a23bd54f56c5e58c3b8bf0c0a06ae10e56f2353ae16c3d/propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db", size = 293301 }, - { url = "https://files.pythonhosted.org/packages/b0/b8/2c860c92b4134f68c7716c6f30a0d723973f881c32a6d7a24c4ddca05fdf/propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54", size = 281947 }, - { url = "https://files.pythonhosted.org/packages/cd/72/b564be7411b525d11757b713c757c21cd4dc13b6569c3b2b8f6d3c96fd5e/propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121", size = 268072 }, - { url = "https://files.pythonhosted.org/packages/37/68/d94649e399e8d7fc051e5a4f2334efc567993525af083db145a70690a121/propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e", size = 275190 }, - { url = "https://files.pythonhosted.org/packages/d8/3c/446e125f5bbbc1922964dd67cb541c01cdb678d811297b79a4ff6accc843/propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e", size = 254145 }, - { url = "https://files.pythonhosted.org/packages/f4/80/fd3f741483dc8e59f7ba7e05eaa0f4e11677d7db2077522b92ff80117a2a/propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a", size = 257163 }, - { url = "https://files.pythonhosted.org/packages/dc/cf/6292b5ce6ed0017e6a89024a827292122cc41b6259b30ada0c6732288513/propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac", size = 280249 }, - { url = "https://files.pythonhosted.org/packages/e8/f0/fd9b8247b449fe02a4f96538b979997e229af516d7462b006392badc59a1/propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e", size = 288741 }, - { url = "https://files.pythonhosted.org/packages/64/71/cf831fdc2617f86cfd7f414cfc487d018e722dac8acc098366ce9bba0941/propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf", size = 277061 }, - { url = "https://files.pythonhosted.org/packages/42/78/9432542a35d944abeca9e02927a0de38cd7a298466d8ffa171536e2381c3/propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863", size = 42252 }, - { url = "https://files.pythonhosted.org/packages/6f/45/960365f4f8978f48ebb56b1127adf33a49f2e69ecd46ac1f46d6cf78a79d/propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46", size = 46425 }, - { url = "https://files.pythonhosted.org/packages/b5/35/6c4c6fc8774a9e3629cd750dc24a7a4fb090a25ccd5c3246d127b70f9e22/propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043", size = 12101 }, +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/a5/0ea64c9426959ef145a938e38c832fc551843481d356713ececa9a8a64e8/propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6", size = 79296 }, + { url = "https://files.pythonhosted.org/packages/76/5a/916db1aba735f55e5eca4733eea4d1973845cf77dfe67c2381a2ca3ce52d/propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2", size = 45622 }, + { url = "https://files.pythonhosted.org/packages/2d/62/685d3cf268b8401ec12b250b925b21d152b9d193b7bffa5fdc4815c392c2/propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea", size = 45133 }, + { url = "https://files.pythonhosted.org/packages/4d/3d/31c9c29ee7192defc05aa4d01624fd85a41cf98e5922aaed206017329944/propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212", size = 204809 }, + { url = "https://files.pythonhosted.org/packages/10/a1/e4050776f4797fc86140ac9a480d5dc069fbfa9d499fe5c5d2fa1ae71f07/propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3", size = 219109 }, + { url = "https://files.pythonhosted.org/packages/c9/c0/e7ae0df76343d5e107d81e59acc085cea5fd36a48aa53ef09add7503e888/propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d", size = 217368 }, + { url = "https://files.pythonhosted.org/packages/fc/e1/e0a2ed6394b5772508868a977d3238f4afb2eebaf9976f0b44a8d347ad63/propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634", size = 205124 }, + { url = "https://files.pythonhosted.org/packages/50/c1/e388c232d15ca10f233c778bbdc1034ba53ede14c207a72008de45b2db2e/propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2", size = 195463 }, + { url = "https://files.pythonhosted.org/packages/0a/fd/71b349b9def426cc73813dbd0f33e266de77305e337c8c12bfb0a2a82bfb/propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958", size = 198358 }, + { url = "https://files.pythonhosted.org/packages/02/f2/d7c497cd148ebfc5b0ae32808e6c1af5922215fe38c7a06e4e722fe937c8/propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c", size = 195560 }, + { url = "https://files.pythonhosted.org/packages/bb/57/f37041bbe5e0dfed80a3f6be2612a3a75b9cfe2652abf2c99bef3455bbad/propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583", size = 196895 }, + { url = "https://files.pythonhosted.org/packages/83/36/ae3cc3e4f310bff2f064e3d2ed5558935cc7778d6f827dce74dcfa125304/propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf", size = 207124 }, + { url = "https://files.pythonhosted.org/packages/8c/c4/811b9f311f10ce9d31a32ff14ce58500458443627e4df4ae9c264defba7f/propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034", size = 210442 }, + { url = "https://files.pythonhosted.org/packages/18/dd/a1670d483a61ecac0d7fc4305d91caaac7a8fc1b200ea3965a01cf03bced/propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b", size = 203219 }, + { url = "https://files.pythonhosted.org/packages/f9/2d/30ced5afde41b099b2dc0c6573b66b45d16d73090e85655f1a30c5a24e07/propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4", size = 40313 }, + { url = "https://files.pythonhosted.org/packages/23/84/bd9b207ac80da237af77aa6e153b08ffa83264b1c7882495984fcbfcf85c/propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba", size = 44428 }, + { url = "https://files.pythonhosted.org/packages/bc/0f/2913b6791ebefb2b25b4efd4bb2299c985e09786b9f5b19184a88e5778dd/propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16", size = 79297 }, + { url = "https://files.pythonhosted.org/packages/cf/73/af2053aeccd40b05d6e19058419ac77674daecdd32478088b79375b9ab54/propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717", size = 45611 }, + { url = "https://files.pythonhosted.org/packages/3c/09/8386115ba7775ea3b9537730e8cf718d83bbf95bffe30757ccf37ec4e5da/propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3", size = 45146 }, + { url = "https://files.pythonhosted.org/packages/03/7a/793aa12f0537b2e520bf09f4c6833706b63170a211ad042ca71cbf79d9cb/propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9", size = 232136 }, + { url = "https://files.pythonhosted.org/packages/f1/38/b921b3168d72111769f648314100558c2ea1d52eb3d1ba7ea5c4aa6f9848/propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787", size = 239706 }, + { url = "https://files.pythonhosted.org/packages/14/29/4636f500c69b5edea7786db3c34eb6166f3384b905665ce312a6e42c720c/propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465", size = 238531 }, + { url = "https://files.pythonhosted.org/packages/85/14/01fe53580a8e1734ebb704a3482b7829a0ef4ea68d356141cf0994d9659b/propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af", size = 231063 }, + { url = "https://files.pythonhosted.org/packages/33/5c/1d961299f3c3b8438301ccfbff0143b69afcc30c05fa28673cface692305/propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7", size = 220134 }, + { url = "https://files.pythonhosted.org/packages/00/d0/ed735e76db279ba67a7d3b45ba4c654e7b02bc2f8050671ec365d8665e21/propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f", size = 220009 }, + { url = "https://files.pythonhosted.org/packages/75/90/ee8fab7304ad6533872fee982cfff5a53b63d095d78140827d93de22e2d4/propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54", size = 212199 }, + { url = "https://files.pythonhosted.org/packages/eb/ec/977ffaf1664f82e90737275873461695d4c9407d52abc2f3c3e24716da13/propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505", size = 214827 }, + { url = "https://files.pythonhosted.org/packages/57/48/031fb87ab6081764054821a71b71942161619549396224cbb242922525e8/propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82", size = 228009 }, + { url = "https://files.pythonhosted.org/packages/1a/06/ef1390f2524850838f2390421b23a8b298f6ce3396a7cc6d39dedd4047b0/propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca", size = 231638 }, + { url = "https://files.pythonhosted.org/packages/38/2a/101e6386d5a93358395da1d41642b79c1ee0f3b12e31727932b069282b1d/propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e", size = 222788 }, + { url = "https://files.pythonhosted.org/packages/db/81/786f687951d0979007e05ad9346cd357e50e3d0b0f1a1d6074df334b1bbb/propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034", size = 40170 }, + { url = "https://files.pythonhosted.org/packages/cf/59/7cc7037b295d5772eceb426358bb1b86e6cab4616d971bd74275395d100d/propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3", size = 44404 }, + { url = "https://files.pythonhosted.org/packages/4c/28/1d205fe49be8b1b4df4c50024e62480a442b1a7b818e734308bb0d17e7fb/propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", size = 79588 }, + { url = "https://files.pythonhosted.org/packages/21/ee/fc4d893f8d81cd4971affef2a6cb542b36617cd1d8ce56b406112cb80bf7/propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", size = 45825 }, + { url = "https://files.pythonhosted.org/packages/4a/de/bbe712f94d088da1d237c35d735f675e494a816fd6f54e9db2f61ef4d03f/propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", size = 45357 }, + { url = "https://files.pythonhosted.org/packages/7f/14/7ae06a6cf2a2f1cb382586d5a99efe66b0b3d0c6f9ac2f759e6f7af9d7cf/propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", size = 241869 }, + { url = "https://files.pythonhosted.org/packages/cc/59/227a78be960b54a41124e639e2c39e8807ac0c751c735a900e21315f8c2b/propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", size = 247884 }, + { url = "https://files.pythonhosted.org/packages/84/58/f62b4ffaedf88dc1b17f04d57d8536601e4e030feb26617228ef930c3279/propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", size = 248486 }, + { url = "https://files.pythonhosted.org/packages/1c/07/ebe102777a830bca91bbb93e3479cd34c2ca5d0361b83be9dbd93104865e/propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", size = 243649 }, + { url = "https://files.pythonhosted.org/packages/ed/bc/4f7aba7f08f520376c4bb6a20b9a981a581b7f2e385fa0ec9f789bb2d362/propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", size = 229103 }, + { url = "https://files.pythonhosted.org/packages/fe/d5/04ac9cd4e51a57a96f78795e03c5a0ddb8f23ec098b86f92de028d7f2a6b/propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", size = 226607 }, + { url = "https://files.pythonhosted.org/packages/e3/f0/24060d959ea41d7a7cc7fdbf68b31852331aabda914a0c63bdb0e22e96d6/propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", size = 221153 }, + { url = "https://files.pythonhosted.org/packages/77/a7/3ac76045a077b3e4de4859a0753010765e45749bdf53bd02bc4d372da1a0/propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", size = 222151 }, + { url = "https://files.pythonhosted.org/packages/e7/af/5e29da6f80cebab3f5a4dcd2a3240e7f56f2c4abf51cbfcc99be34e17f0b/propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", size = 233812 }, + { url = "https://files.pythonhosted.org/packages/8c/89/ebe3ad52642cc5509eaa453e9f4b94b374d81bae3265c59d5c2d98efa1b4/propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", size = 238829 }, + { url = "https://files.pythonhosted.org/packages/e9/2f/6b32f273fa02e978b7577159eae7471b3cfb88b48563b1c2578b2d7ca0bb/propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", size = 230704 }, + { url = "https://files.pythonhosted.org/packages/5c/2e/f40ae6ff5624a5f77edd7b8359b208b5455ea113f68309e2b00a2e1426b6/propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246", size = 40050 }, + { url = "https://files.pythonhosted.org/packages/3b/77/a92c3ef994e47180862b9d7d11e37624fb1c00a16d61faf55115d970628b/propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1", size = 44117 }, + { url = "https://files.pythonhosted.org/packages/0f/2a/329e0547cf2def8857157f9477669043e75524cc3e6251cef332b3ff256f/propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc", size = 77002 }, + { url = "https://files.pythonhosted.org/packages/12/2d/c4df5415e2382f840dc2ecbca0eeb2293024bc28e57a80392f2012b4708c/propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9", size = 44639 }, + { url = "https://files.pythonhosted.org/packages/d0/5a/21aaa4ea2f326edaa4e240959ac8b8386ea31dedfdaa636a3544d9e7a408/propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439", size = 44049 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/021b6cd86c0acc90d74784ccbb66808b0bd36067a1bf3e2deb0f3845f618/propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536", size = 224819 }, + { url = "https://files.pythonhosted.org/packages/3c/57/c2fdeed1b3b8918b1770a133ba5c43ad3d78e18285b0c06364861ef5cc38/propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629", size = 229625 }, + { url = "https://files.pythonhosted.org/packages/9d/81/70d4ff57bf2877b5780b466471bebf5892f851a7e2ca0ae7ffd728220281/propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b", size = 232934 }, + { url = "https://files.pythonhosted.org/packages/3c/b9/bb51ea95d73b3fb4100cb95adbd4e1acaf2cbb1fd1083f5468eeb4a099a8/propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052", size = 227361 }, + { url = "https://files.pythonhosted.org/packages/f1/20/3c6d696cd6fd70b29445960cc803b1851a1131e7a2e4ee261ee48e002bcd/propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce", size = 213904 }, + { url = "https://files.pythonhosted.org/packages/a1/cb/1593bfc5ac6d40c010fa823f128056d6bc25b667f5393781e37d62f12005/propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d", size = 212632 }, + { url = "https://files.pythonhosted.org/packages/6d/5c/e95617e222be14a34c709442a0ec179f3207f8a2b900273720501a70ec5e/propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce", size = 207897 }, + { url = "https://files.pythonhosted.org/packages/8e/3b/56c5ab3dc00f6375fbcdeefdede5adf9bee94f1fab04adc8db118f0f9e25/propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95", size = 208118 }, + { url = "https://files.pythonhosted.org/packages/86/25/d7ef738323fbc6ebcbce33eb2a19c5e07a89a3df2fded206065bd5e868a9/propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf", size = 217851 }, + { url = "https://files.pythonhosted.org/packages/b3/77/763e6cef1852cf1ba740590364ec50309b89d1c818e3256d3929eb92fabf/propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f", size = 222630 }, + { url = "https://files.pythonhosted.org/packages/4f/e9/0f86be33602089c701696fbed8d8c4c07b6ee9605c5b7536fd27ed540c5b/propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30", size = 216269 }, + { url = "https://files.pythonhosted.org/packages/cc/02/5ac83217d522394b6a2e81a2e888167e7ca629ef6569a3f09852d6dcb01a/propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6", size = 39472 }, + { url = "https://files.pythonhosted.org/packages/f4/33/d6f5420252a36034bc8a3a01171bc55b4bff5df50d1c63d9caa50693662f/propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1", size = 43363 }, + { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 }, ] [[package]] name = "proto-plus" -version = "1.26.0" +version = "1.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/79/a5c6cbb42268cfd3ddc652dc526889044a8798c688a03ff58e5e92b743c8/proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22", size = 56136 } +sdist = { url = "https://files.pythonhosted.org/packages/7e/05/74417b2061e1bf1b82776037cad97094228fa1c1b6e82d08a78d3fb6ddb6/proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91", size = 56124 } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/c3/59308ccc07b34980f9d532f7afc718a9f32b40e52cde7a740df8d55632fb/proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7", size = 50166 }, + { url = "https://files.pythonhosted.org/packages/dd/25/0b7cc838ae3d76d46539020ec39fc92bfc9acc29367e58fe912702c2a79e/proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961", size = 50126 }, ] [[package]] @@ -3722,30 +3555,30 @@ wheels = [ [[package]] name = "psutil" -version = "7.0.0" +version = "6.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/5a/07871137bb752428aa4b659f910b399ba6f291156bdea939be3e96cae7cb/psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5", size = 508502 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, + { url = "https://files.pythonhosted.org/packages/61/99/ca79d302be46f7bdd8321089762dd4476ee725fce16fc2b2e1dbba8cac17/psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8", size = 247511 }, + { url = "https://files.pythonhosted.org/packages/0b/6b/73dbde0dd38f3782905d4587049b9be64d76671042fdcaf60e2430c6796d/psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377", size = 248985 }, + { url = "https://files.pythonhosted.org/packages/17/38/c319d31a1d3f88c5b79c68b3116c129e5133f1822157dd6da34043e32ed6/psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003", size = 284488 }, + { url = "https://files.pythonhosted.org/packages/9c/39/0f88a830a1c8a3aba27fededc642da37613c57cbff143412e3536f89784f/psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160", size = 287477 }, + { url = "https://files.pythonhosted.org/packages/47/da/99f4345d4ddf2845cb5b5bd0d93d554e84542d116934fde07a0c50bd4e9f/psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3", size = 289017 }, + { url = "https://files.pythonhosted.org/packages/38/53/bd755c2896f4461fd4f36fa6a6dcb66a88a9e4b9fd4e5b66a77cf9d4a584/psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53", size = 250602 }, + { url = "https://files.pythonhosted.org/packages/7b/d7/7831438e6c3ebbfa6e01a927127a6cb42ad3ab844247f3c5b96bea25d73d/psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649", size = 254444 }, ] [[package]] name = "psycopg" -version = "3.2.5" +version = "3.2.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/cf/dc1a4d45e3c6222fe272a245c5cea9a969a7157639da606ac7f2ab5de3a1/psycopg-3.2.5.tar.gz", hash = "sha256:f5f750611c67cb200e85b408882f29265c66d1de7f813add4f8125978bfd70e8", size = 156158 } +sdist = { url = "https://files.pythonhosted.org/packages/e0/f2/954b1467b3e2ca5945b83b5e320268be1f4df486c3e8ffc90f4e4b707979/psycopg-3.2.4.tar.gz", hash = "sha256:f26f1346d6bf1ef5f5ef1714dd405c67fb365cfd1c6cea07de1792747b167b92", size = 156109 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl", hash = "sha256:b782130983e5b3de30b4c529623d3687033b4dafa05bb661fc6bf45837ca5879", size = 198749 }, + { url = "https://files.pythonhosted.org/packages/40/49/15114d5f7ee68983f4e1a24d47e75334568960352a07c6f0e796e912685d/psycopg-3.2.4-py3-none-any.whl", hash = "sha256:43665368ccd48180744cab26b74332f46b63b7e06e8ce0775547a3533883d381", size = 198716 }, ] [package.optional-dependencies] @@ -3758,65 +3591,65 @@ pool = [ [[package]] name = "psycopg-binary" -version = "3.2.5" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/30/af3806081adc75b5a8addde839d4c6b171a8c5d0d07dd92de20ca4dd6717/psycopg_binary-3.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a82211a43372cba9b1555a110e84e679deec2dc9463ae4c736977dad99dca5ed", size = 3868990 }, - { url = "https://files.pythonhosted.org/packages/31/77/31968655db2efe83c519e6296ff3a85a0c9e50432e0c11c8ffae1b404870/psycopg_binary-3.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7d215a43343d91ba08301865f059d9518818d66a222a85fb425e4156716f5a6", size = 3938253 }, - { url = "https://files.pythonhosted.org/packages/b5/d7/c898cd7d5c672d1c16b10dfde6ab220a6d295ff136711bf8ebcd1bebe91e/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f893c0ed3d5c7b83b76b1f8f7d3ca5a03e38bcd3cab5d65b5c25a0d1064aca4", size = 4523098 }, - { url = "https://files.pythonhosted.org/packages/98/d7/84517d0f62ddb10ca15254b6a63596f0e47ebd462b3ed30473b191a2a57f/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d10ce4c39eb9631381a0c3792727946a4391e843625a7ee9579ac6bb11495a5", size = 4329658 }, - { url = "https://files.pythonhosted.org/packages/3d/65/9c6addcf00ba80d2355ffa825d6537d60313c24d4b6db438f631f9ff0ac7/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a602d9fdb567cca090ca19ac3ebf10219065be2a4f8cf9eb8356cffb5a7ab1d", size = 4575351 }, - { url = "https://files.pythonhosted.org/packages/a5/90/9f2c41b3b42d8cd8b9866f0bbd27a5796a1ca8042a1a019b39a6645df523/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37eb3be7a6be93f4925ccf52bbfa60244da6c63201770a709dd81a3d2d08534", size = 4287136 }, - { url = "https://files.pythonhosted.org/packages/20/e6/2476e30ff4b02588799dc6d0cff244cea448f9a2a80e37b48c39a64a81be/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7d5f1bfc848a94e0d63fe693adee4f88bd9e5c415ecb4c9c17d2d44eba6795a6", size = 3872875 }, - { url = "https://files.pythonhosted.org/packages/ba/bc/93272521e571df3a6ce85553e2eba424c7abb2ded006b8d6643c2a3cc0f2/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b5e0acbc991472188c9df40eb56d8a97ad3ad00d4de560b8b74bdc2d94041a8f", size = 3341000 }, - { url = "https://files.pythonhosted.org/packages/a2/d7/930a127d2b4817445a08153a1b203655d3da52e79e4c66843d8bd7e3643f/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d4e0c1b1aa5283f6d9a384ffc7a8400d25386bb98fdb9bddae446e4ef4da7366", size = 3439711 }, - { url = "https://files.pythonhosted.org/packages/aa/4a/73ea25870d0b4cac60ad768e6cdf4014e7a44036ec29d3820876c62efea0/psycopg_binary-3.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c3c5fa3d4fa0a651cefab391b783f89bc5e331afa0a4e93c9b16141993fa05c8", size = 3464993 }, - { url = "https://files.pythonhosted.org/packages/55/1d/790223b15283904759ef48279dd7201dc4a9d088c5196f7b529a52c5b40d/psycopg_binary-3.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:7efe6c732fd2d7e22d72dc4f7cf9b644020adacfff61b0a8a151343da8e661c0", size = 2791126 }, - { url = "https://files.pythonhosted.org/packages/27/ac/201a9bcfe4a2ae0cc1999c55dff9a2da8daf829e9baca103045ed1c41876/psycopg_binary-3.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:393ab353196d364858b47317d27804ecc58ab56dbde32217bd67f0f2f2980662", size = 3876607 }, - { url = "https://files.pythonhosted.org/packages/4a/ef/2d7722bee81c0a2619b8748070cea8ec299979f677479554e299a864d171/psycopg_binary-3.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:71d82dbc7c6c7f5746468e7992e5483aa45b12250d78d220a2431ab88795825c", size = 3942789 }, - { url = "https://files.pythonhosted.org/packages/f6/dc/a1fe4b61d0f614ab6283a9c5a35747b8fd2b72d7c21f201d6772394c0c09/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39e2cd10bf15442d95c3f48376b25dc33360418ea6c3c05884d8bf42407768c0", size = 4519457 }, - { url = "https://files.pythonhosted.org/packages/2c/5a/bbf5ec9fea9cc81c77d37957777d9b15492884437929fc634fc6dc16aade/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7623659d44a6aa032be4a066c658ba45009d768c2481526fbef7c609702af116", size = 4324376 }, - { url = "https://files.pythonhosted.org/packages/4b/17/c785b4a795860bf67f0dc1e03129cb8e9a3be45d21049ccbffeae9c576e9/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cd9ebf335262e864d740f9dad3f672f61162cc0d4825a5eb5cf50df334a688f", size = 4578729 }, - { url = "https://files.pythonhosted.org/packages/e8/bb/c7bcb17b60040777fb26efd2db5f61bc84453e380114be480ebbedc20829/psycopg_binary-3.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc8bc40d82d1ee8dec136e10707c7f3147a6322fd8014e174a0f3446fb793649", size = 4281876 }, - { url = "https://files.pythonhosted.org/packages/2c/a2/ea6d36644fbccd462f4e3bd79149e94b284d4f90f24671bd50ce5e9e9dc5/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:11e3ed8b94c750d54fc3e4502dd930fb0fd041629845b6a7ce089873ac9756b0", size = 3871313 }, - { url = "https://files.pythonhosted.org/packages/09/38/b32728e13d65bac03d556f730af02509310f451ee873f8662bfc40b3f6ef/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:48fcb12a0a72fdfe4102bdb1252a7366e8d73a2c89fe6ce5923be890de367c2f", size = 3334458 }, - { url = "https://files.pythonhosted.org/packages/ca/69/fcd3d845ff2a39fad7783249c8add4966cb12a50f40df3cbcd743fa24c10/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:51a96d9fe51f718912b4a0089784f1f32d800217499fd0f0095b888506aba4c5", size = 3432832 }, - { url = "https://files.pythonhosted.org/packages/f6/9c/90baa71833da03c08ff9d4e12a4bcebfb15c1b0259738f7d3970c2292ab9/psycopg_binary-3.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb8293d66c6a4ddc72fceb7ad0e111cb196cc394954ae0f9b63c251d97f1b00e", size = 3463280 }, - { url = "https://files.pythonhosted.org/packages/4f/42/f40ca24a89de58a47e54f82d7124d7dcf996781c89a5ed7bfe722e96da55/psycopg_binary-3.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:5b81342e139ddccfa417832089cd213bd4beacd7a1462ca4019cafe71682d177", size = 2794275 }, - { url = "https://files.pythonhosted.org/packages/84/eb/175a81bfd26734eeaaa39b651bc44a3c5e3fce1190963ace21e428c4d2ee/psycopg_binary-3.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a4321ee8180982d70458d3e8378e31448901bf0ee40fe0d410a87413578f4098", size = 3857964 }, - { url = "https://files.pythonhosted.org/packages/ca/2e/0d57047372c3dd31becc1a48185862d7e6714ffbdc1401742a32f2294f79/psycopg_binary-3.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2cc86657c05e09c701e97f87132cd58e0d55381dd568520081ac1fe7580a9bbb", size = 3940056 }, - { url = "https://files.pythonhosted.org/packages/c5/2f/339a18b28787d33fe892d1ae1fbaa83739c6274327cbf9ada4158322ad9d/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244bebaa9734a236b7157fb57c065b6c0f2344281916187bd73f951df1899e0", size = 4499081 }, - { url = "https://files.pythonhosted.org/packages/42/21/32d7115b2cbd87d043ad494254fd7c4c8652ac3c32f49bb571fd8111caf3/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21b839f9bfd77ed074f7f71464a43f453400c57d038a0ba0716329a28e335897", size = 4307502 }, - { url = "https://files.pythonhosted.org/packages/00/67/e99b58f616dd02c5e52c179b3df047d9683a9f699993cb1795ee435db598/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7376b13504396da9678b646f5338462347da01286b2a688a0d8493ec764683a2", size = 4547821 }, - { url = "https://files.pythonhosted.org/packages/0d/64/9d13ee0fed78a47c506a93d1e67ee53cc7ffd75c1f5885b59d17810fe5cd/psycopg_binary-3.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f6827cf1faf3924eb77146d1e85126a1b5e48a88053b8d8b78dd29e971d78", size = 4259849 }, - { url = "https://files.pythonhosted.org/packages/ea/f2/172b6ebcd60a1a86f5ce1a539cfb93ffbe42fc9bc7ab2e1ed79e99a75d71/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28bd5cb2324567e5e70f07fe1d646398d6b0e210e28b49be0e69593590a59980", size = 3847280 }, - { url = "https://files.pythonhosted.org/packages/0f/51/9cd26c6b862d499b4b25ea173ae6e21c9d460ddce6b09cbe9501dff66211/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48f97936145cb7de18b95d85670b2d3e2c257277263272be05815b74fb0ef195", size = 3320262 }, - { url = "https://files.pythonhosted.org/packages/51/7d/2dac61ff16476e77c6ce0a49a30b130e2ba6ad08c83c4950591b4bc49cf2/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e6f2bef5aed021fbdf46323d3cd8847bf960efb56394698644a8ee2306f8892", size = 3400254 }, - { url = "https://files.pythonhosted.org/packages/45/67/bd36932c24f96dc1bc21fb18b1bdebcda7b9791067f7151a1c5dc1193e6b/psycopg_binary-3.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d2e57a1d06f3968e49e948ba374f21a7d8dcf44f37d582a4aeddeb7c85ce239", size = 3438916 }, - { url = "https://files.pythonhosted.org/packages/00/ab/882b861cfcf83d7faffe583e1e092117cd66eacc86fb4517d27973e52f35/psycopg_binary-3.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:2cbb8649cfdacbd14e17f5ab78edc52d33350013888518c73e90c5d17d7bea55", size = 2782504 }, - { url = "https://files.pythonhosted.org/packages/81/3d/26483d75e1a5daa93cbb47ee7cde96fac07a9b026058b036b00a04f5c012/psycopg_binary-3.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2dbaf32c18c0d11c4480016b89c9c5cadb7b64c55de7f181d222b189bd13a558", size = 3852616 }, - { url = "https://files.pythonhosted.org/packages/90/cb/542bd0eab110ed2ddcc02cbe8f5df0afe3e86bd843c533fc6a795ffd7c0f/psycopg_binary-3.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ca5e36a3e7480a5c09aed99ecdb8e6554b21485c3b064297fe77f7b1b5806106", size = 3936563 }, - { url = "https://files.pythonhosted.org/packages/e1/43/2b347816983a5b0f1cc3e608eae4650422476187e047e574981081bcf9ec/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9abe093a303e25ac58774a11241150e2fe2947358d1ca12521ad03c90b131060", size = 4499166 }, - { url = "https://files.pythonhosted.org/packages/3f/0d/d7ac5289dfa1163b0fcce9aeb848a7f4499d7b3ef34f1de565d0ba9a51bd/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a91b0e096fdfeb52d86bb8f5ee25dc22483d6960af9b968e6b381a8ec5bfbf82", size = 4311647 }, - { url = "https://files.pythonhosted.org/packages/7b/a2/b238d91cbbc5953ff6910737b5a598cc4d5aad84453052005891cec329b3/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3eb71cfc35116e4a8e336b7e785f1fe06ca23b4516a48ea91facd577d1a1fdf6", size = 4547848 }, - { url = "https://files.pythonhosted.org/packages/d7/33/e78ae02d8f23753af2884303370b914a5d172f76fed13bfde380ec473f53/psycopg_binary-3.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98efaedf2bf79f4d563ca039a57a025b72847bd80568f54709cc39fc1404772c", size = 4261732 }, - { url = "https://files.pythonhosted.org/packages/44/9a/1745ff5c6e4c715aa71f3da3f393022ec0c7cc972fa0ee7296df8871d6d6/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba4a610882171bdaae0779f14e0ff45f3ee271fd2dbf16cdadfc81bd67323232", size = 3850803 }, - { url = "https://files.pythonhosted.org/packages/7b/1c/933fb04560e7bcf5f24c632f9381e8700dcf8462adcd32eabd6192480d66/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1494827c43265820d5dcdc6f8086521bc7dd04b9da8831310978a788cdcd2e62", size = 3320315 }, - { url = "https://files.pythonhosted.org/packages/5d/36/111e2db9c3ff5123da4ce814aa9462d242a7c393f132a4005ec427e09903/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7a94020821723a6a210206ddb458001f3ed27e1e6a0555b9422bebf7ead8ff37", size = 3403225 }, - { url = "https://files.pythonhosted.org/packages/90/04/246efe587463d13b015202ab344e12e8e30ea9ba90ca952def0469b95a9e/psycopg_binary-3.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:659f2c675d478b1bc01b95a8d3ded74fa939b370e71ffbecd496f617b215eb05", size = 3440446 }, - { url = "https://files.pythonhosted.org/packages/92/75/5e15e7a6ad4c6a00fe1a28fe704310dc7f7b26dbd5e6e14c817e7899451b/psycopg_binary-3.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:6b581da13126b8715c0c0585cd37ce934c9864d44b2a4019f5487c0b943275e6", size = 2783095 }, +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/7b/6d7a4626b49e227125f8edf6f114dd8e9a9b22fc4f0abc3b2b0068d5f2bd/psycopg_binary-3.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c716f75b5c0388fc5283b5124046292c727511dd8c6aa59ca2dc644b9a2ed0cd", size = 3862864 }, + { url = "https://files.pythonhosted.org/packages/2b/7b/bc0dbb8384997e1321ffb265f96e68ba8584c2af58229816c16809218bdf/psycopg_binary-3.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2e8050347018f596a63f5dccbb92fb68bca52b13912cb8fc40184b24c0e534f", size = 3934048 }, + { url = "https://files.pythonhosted.org/packages/42/c0/8a8034650e4618efc8c0be32c30469933a1ddac1656525c0c6b2b2151736/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04171f9af9ab567c0fd339bac06f2c75836db839cebac5bd07824778dafa7f0e", size = 4516741 }, + { url = "https://files.pythonhosted.org/packages/b8/6c/714572fc7c59295498287b9b4b965e3b1d6ff5758c310535a2f02d159688/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7ba7b2ff25a6405826f627fb7d0f1e06e5c08ae25ffabc74a5e9ec7b0a63b85", size = 4323332 }, + { url = "https://files.pythonhosted.org/packages/64/19/a807021e48719cf226a7b520fd0c9c741577ad8974ecd264efe03862d80c/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e58eeba520d405b2ad72dffaafd04d0b592bef870e718bf37c261e89a75450a", size = 4569646 }, + { url = "https://files.pythonhosted.org/packages/67/78/70c515175c623bbc505d015ef1ee55b1ee4d0878985a95d4d6317fdd6894/psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb18cfbb1cfc8172786ceefd314f0faa05c40ea93b3db7194d0f6bbbbfedb42a", size = 4279629 }, + { url = "https://files.pythonhosted.org/packages/0f/02/8a0395ac8f69320ca26f4f7ec7fd16620671ba002072e01ed5fb13c29a38/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:769804b4f753ddec9403183a6d4577d5b696fc49c2451421013fb06d6fa2f288", size = 3868189 }, + { url = "https://files.pythonhosted.org/packages/b9/a8/fa254c48513580c9cae242b5fac4af4dd1227178061a27a2eb260ff61a27/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7d4f0c9b01eb933ce35bb32a54205f48d7bc36bf455565afe269cabcb7973955", size = 3335018 }, + { url = "https://files.pythonhosted.org/packages/d6/c1/98c239f40851c67eb4813d6a7eb90b39f717de2fd48f23fe3121899eb70b/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:26aed7ff8691ba810de95718d3bc81a43fd48a4036c3641ef711eb5f71fc7106", size = 3432703 }, + { url = "https://files.pythonhosted.org/packages/91/08/5b6fa2247bf964ac14d10cff3f7163d901dd008b7b6300e13eace8394751/psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8a4b65eaf44dfed0b47e6ebd392e88cd3cff62ea11652d92db6fefeb2608ed25", size = 3457676 }, + { url = "https://files.pythonhosted.org/packages/2f/55/79db2b10f87eb7a913b59bbcdd10f794c4c964141f2db31f8eb1f567c7d9/psycopg_binary-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9fa48a2dc54c4e906d7dd781031d227d1b13966deff7e5ece5b037588643190", size = 2787324 }, + { url = "https://files.pythonhosted.org/packages/f3/9a/8013aa4ad4d76dfcf9b822da549d51aab96abfc77afc44b200ef295685dc/psycopg_binary-3.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d092b0aa80b8c3ee0701a7252cbfb0bdb742e1f74aaf0c1a13ef22c05c9266ab", size = 3871518 }, + { url = "https://files.pythonhosted.org/packages/1e/65/2422036d0169e33e5f06d868a36235340f85e42afe153d59b0edf4b4210f/psycopg_binary-3.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3955381dacc6d15f3838d5f25445ee99f80882876a163f8de0c01ffc54aeef4a", size = 3938511 }, + { url = "https://files.pythonhosted.org/packages/bf/ab/4f6c815862d62d9d06353abfbf36fef69ad7e6ca0763eed1629f47579e83/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04144d1963aa3309247980f1a742b98e15f60d68ea9745143c433f99aaeb70d7", size = 4512971 }, + { url = "https://files.pythonhosted.org/packages/27/ef/0e5e9ea6122f61f9e0c4e70b7f7a28ef51404c98bbb32096ad99f79f85b5/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eac61931bc90c1c6fdc648452894d3a434a005ffefaf12819b4709548c894bf2", size = 4318297 }, + { url = "https://files.pythonhosted.org/packages/93/cd/05d71e4f2f7f69fd185d2ec44b66de13734ff70c426ead14523e206258bb/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c09b765960480c4586758a3c16f0ee0db6f7e2f31c88cccb5e7d7024215468cd", size = 4570696 }, + { url = "https://files.pythonhosted.org/packages/af/7c/f5099ad491f78ba491e56cd686b38b0737eb09a719e919661a9f8d08e754/psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220de8efcc276e42ba7cc7ed613145b1274b6b5de321a1396fb6b6ce1758d34c", size = 4275069 }, + { url = "https://files.pythonhosted.org/packages/2d/95/a1a2f861d90f3394f98d032329a1e44a67c8d1f5bded0ec343b664c65ba5/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b558d3de315d18819ce477908e27518cbdd3275717c6193b58dde36f0443e167", size = 3865827 }, + { url = "https://files.pythonhosted.org/packages/ab/72/0b395ad2db2adc6009d2a1cdc2707b1764a3e870d6895cf92dc87e251aa9/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3b4c9b9a112d43533f7dbdedbb1188107d4ddcd262e2a2af41b4de0caf7d053", size = 3329276 }, + { url = "https://files.pythonhosted.org/packages/ba/5d/8e9904664e5bae3852989a0f1b0517c781ff0a9cba64416ffa68952129ac/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:870df866f789bb641a350897c1751c293b9420f46be4eb366d190ff5f2f2ffd8", size = 3426059 }, + { url = "https://files.pythonhosted.org/packages/46/6a/9abc03e01c1cb97878e6e87d5ea9e3d925790b04fa03d72b2d6e3455f124/psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89506e268fb95428fb0f8f7abe48032e66cf47390469e11a4fe989f7407a5d88", size = 3456766 }, + { url = "https://files.pythonhosted.org/packages/12/c5/1be474bfa7282aa9177c3e498eb641b1441724f0155953f3872c69deddf0/psycopg_binary-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:7ddf1494cc3bf60761c01265c44dfc7a7fd63f21308c403c14f5dd91702df84d", size = 2790400 }, + { url = "https://files.pythonhosted.org/packages/48/f8/f30cf36bc9bc672894413f10f0498d5e81b0813c87f1b963d85e7c5cc9f1/psycopg_binary-3.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ac24b3d421127ebe8662eba2c1e149a12f0f5b6795e66c1811a3f59111456bb", size = 3852023 }, + { url = "https://files.pythonhosted.org/packages/2f/23/88a265ca4a35def6f53cb239e352bf52f01ea418f57f4272b3913ecd6fd2/psycopg_binary-3.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f702f36204127984dd212eb57bb328676abdfe8a56f179e408a806d5e520aa11", size = 3935919 }, + { url = "https://files.pythonhosted.org/packages/0f/2b/2ac3456208c255a6fad9fec4fea0e411e34a0b4b0ecd1e60c0ba36fb78c4/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:610cd2013ee0849154fcff34b0cca17f720c91c7430ca094a61f1e5ff1d38e15", size = 4493108 }, + { url = "https://files.pythonhosted.org/packages/55/f5/725b786b7cf1b91f1afbe03545f0b14857c0a5cc03b4f8a6735ec289ff89/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95da59edd95f6b6488799c9710fafc2d5750e3ec6328ec991f7a9be04efe6886", size = 4300141 }, + { url = "https://files.pythonhosted.org/packages/09/80/72b3a1ec912b8be51e6af858fcd2a016d25145aca400e75bba6ab91025c4/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b71e98e3186f08473962e1ea4bfbc4387ecc398644b794cb112ad0a4276e3789", size = 4540559 }, + { url = "https://files.pythonhosted.org/packages/0b/8e/6cd6643f04e033bcdab008d5175c9356ade1eecff53fa4558d383dd9866c/psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccf4f71c3a0d46bc74207bf7997f010a6586414161dd10f3dd026ec059942ef", size = 4253687 }, + { url = "https://files.pythonhosted.org/packages/85/47/50d93bef98d32eba1f7b95e3c4e671a7f59b1d0b9ed01fdb43e951d6012b/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:244e1dd33b694792b7bc7a3d412a535ba39116218b07d8936b4591567f4121e9", size = 3842084 }, + { url = "https://files.pythonhosted.org/packages/2e/a0/2cf0dda5634d14219a24c05bc85cb928a5b2ea29684d167aebc974df016c/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f8dc8f4de5130c6278dd5e34b18ad8324a74658a7adb72d4e67ca97f9aeaaf3c", size = 3315357 }, + { url = "https://files.pythonhosted.org/packages/14/65/13b3dd91dd62f6e4ee3cb00bd24ab60a251592c03a8fb090c28057f21e38/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c336e58a48061a9189d3ba8c19f00fe5d9570219e6f7f954b923ad5c33e5bc71", size = 3394512 }, + { url = "https://files.pythonhosted.org/packages/07/cc/90b5307ff833892c8985aefd73c1894b1a9d8b5df4965650e95636ba8161/psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9633c5dc6796d11766d2475e62335b67e5f99f119f40ba1675c1d23208d7709d", size = 3431893 }, + { url = "https://files.pythonhosted.org/packages/40/dc/5ab8fec2fc2e0599fd7a60abe046c853477bbb7cd978b818f795c5423848/psycopg_binary-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:295c25e56b430d786a475c5c2cef266b0b27c0a6fcaadf9d83a4cdcfb76f971f", size = 2778464 }, + { url = "https://files.pythonhosted.org/packages/25/e2/f56675aada063762f08559b6969e47e1313f269fc1682c16457c13da8186/psycopg_binary-3.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:81ab801c0d35830c876bf0d1edc8e7dd2f73aa2b04fe24eb812159c0b054d149", size = 3846854 }, + { url = "https://files.pythonhosted.org/packages/7b/8b/8c4a66b2b3db494367df0299535b7d2df78f303334228c517b8d00c411d5/psycopg_binary-3.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c09e02ce1124eb6638b3381df050a8cf88aedfad4522f939945cda49050a990c", size = 3932292 }, + { url = "https://files.pythonhosted.org/packages/84/e8/618d45f77cebce73d75497c95685a0902aea3783386d9335ce486c69e13a/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a249cdc6a5c2b5088a8677acba66b291e5237524739ab3d27498e1ef189312f5", size = 4493785 }, + { url = "https://files.pythonhosted.org/packages/c4/87/fc30318e6b97e723e017e7dc88d0f721bbfb749de1a6e414e52d4ac54c9a/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2960ba8a5c0ad75e184f6d8bf76bdf023708999efe75fe4e13445136c1cd206", size = 4304874 }, + { url = "https://files.pythonhosted.org/packages/91/30/1d127e651c21cd77befaf361c7c3b9001bfff51ac38027e8fce598ba0701/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dae2e50b0d3425c167eebbedc3553f7c811dbc0dbfc737b6877f68a03be7daf", size = 4541296 }, + { url = "https://files.pythonhosted.org/packages/0d/5e/22c824cb38745c1c744cec85d227190727c564afb75960ce0057ca15fd84/psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bf7ee7e0002c2cce43ecb923ec510358056eb2e44a96afaeb0424518f35206", size = 4255756 }, + { url = "https://files.pythonhosted.org/packages/b3/83/ae8783dec3f7e39df8a4056e4d383926ffec531970c0b415d48d9fd4a2c2/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f5c85eeb63b1a8a6b026eef57f5da36ff215ce9a6a3bb8e20a409670d6cfbda", size = 3845918 }, + { url = "https://files.pythonhosted.org/packages/be/f7/fb7bffb0c4c45a5a82fe324e4f7b176075a4c5372e546a038858dd13c7ab/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8c7b95899d4d6d23c5cc46cb3419e8e6ca68d867509432ee1487042564a1ea55", size = 3315429 }, + { url = "https://files.pythonhosted.org/packages/81/a3/29f4993a239d6a3fb18ef8681d9990c007f5f73bdd2e21f65f07ac55ad6f/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fa4acea9ca20a567c3872a5afab2084751530bb57b8fb6b52820d5c54e7c8c3b", size = 3399388 }, + { url = "https://files.pythonhosted.org/packages/25/5b/925171cbfa2e3d1ccb7f4c005d0d5db609ba796c1d08a23c42825b09c554/psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c487f35a1905bb15da927c1fc05f70f3d29f0e21fb4ba21d360a0da9c755f20", size = 3436702 }, + { url = "https://files.pythonhosted.org/packages/b6/47/25b2b85b8fcabf99bfa92b4b0d587894c01576bf0b2bf137c243d1eb1070/psycopg_binary-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:80297c3a9f7b5a6afdb0d8f220661ccd796e5c9128c44b32c41267f7daefd37f", size = 2779196 }, ] [[package]] name = "psycopg-pool" -version = "3.2.6" +version = "3.2.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770 } +sdist = { url = "https://files.pythonhosted.org/packages/49/71/01d4e589dc5fd1f21368b7d2df183ed0e5bbc160ce291d745142b229797b/psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed", size = 29749 } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252 }, + { url = "https://files.pythonhosted.org/packages/bb/28/2b56ac94c236ee033c7b291bcaa6a83089d0cc0fe7830c35f6521177c199/psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224", size = 38240 }, ] [[package]] @@ -3839,44 +3672,44 @@ wheels = [ [[package]] name = "pyarrow" -version = "19.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/09/a9046344212690f0632b9c709f9bf18506522feb333c894d0de81d62341a/pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e", size = 1129437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/01/b23b514d86b839956238d3f8ef206fd2728eee87ff1b8ce150a5678d9721/pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69", size = 30688914 }, - { url = "https://files.pythonhosted.org/packages/c6/68/218ff7cf4a0652a933e5f2ed11274f724dd43b9813cb18dd72c0a35226a2/pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec", size = 32102866 }, - { url = "https://files.pythonhosted.org/packages/98/01/c295050d183014f4a2eb796d7d2bbfa04b6cccde7258bb68aacf6f18779b/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89", size = 41147682 }, - { url = "https://files.pythonhosted.org/packages/40/17/a6c3db0b5f3678f33bbb552d2acbc16def67f89a72955b67b0109af23eb0/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a", size = 42179192 }, - { url = "https://files.pythonhosted.org/packages/cf/75/c7c8e599300d8cebb6cb339014800e1c720c9db2a3fcb66aa64ec84bac72/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a", size = 40517272 }, - { url = "https://files.pythonhosted.org/packages/ef/c9/68ab123ee1528699c4d5055f645ecd1dd68ff93e4699527249d02f55afeb/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608", size = 42069036 }, - { url = "https://files.pythonhosted.org/packages/54/e3/d5cfd7654084e6c0d9c3ce949e5d9e0ccad569ae1e2d5a68a3ec03b2be89/pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866", size = 25277951 }, - { url = "https://files.pythonhosted.org/packages/a0/55/f1a8d838ec07fe3ca53edbe76f782df7b9aafd4417080eebf0b42aab0c52/pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90", size = 30713987 }, - { url = "https://files.pythonhosted.org/packages/13/12/428861540bb54c98a140ae858a11f71d041ef9e501e6b7eb965ca7909505/pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00", size = 32135613 }, - { url = "https://files.pythonhosted.org/packages/2f/8a/23d7cc5ae2066c6c736bce1db8ea7bc9ac3ef97ac7e1c1667706c764d2d9/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae", size = 41149147 }, - { url = "https://files.pythonhosted.org/packages/a2/7a/845d151bb81a892dfb368bf11db584cf8b216963ccce40a5cf50a2492a18/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5", size = 42178045 }, - { url = "https://files.pythonhosted.org/packages/a7/31/e7282d79a70816132cf6cae7e378adfccce9ae10352d21c2fecf9d9756dd/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3", size = 40532998 }, - { url = "https://files.pythonhosted.org/packages/b8/82/20f3c290d6e705e2ee9c1fa1d5a0869365ee477e1788073d8b548da8b64c/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6", size = 42084055 }, - { url = "https://files.pythonhosted.org/packages/ff/77/e62aebd343238863f2c9f080ad2ef6ace25c919c6ab383436b5b81cbeef7/pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466", size = 25283133 }, - { url = "https://files.pythonhosted.org/packages/78/b4/94e828704b050e723f67d67c3535cf7076c7432cd4cf046e4bb3b96a9c9d/pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b", size = 30670749 }, - { url = "https://files.pythonhosted.org/packages/7e/3b/4692965e04bb1df55e2c314c4296f1eb12b4f3052d4cf43d29e076aedf66/pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294", size = 32128007 }, - { url = "https://files.pythonhosted.org/packages/22/f7/2239af706252c6582a5635c35caa17cb4d401cd74a87821ef702e3888957/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14", size = 41144566 }, - { url = "https://files.pythonhosted.org/packages/fb/e3/c9661b2b2849cfefddd9fd65b64e093594b231b472de08ff658f76c732b2/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34", size = 42202991 }, - { url = "https://files.pythonhosted.org/packages/fe/4f/a2c0ed309167ef436674782dfee4a124570ba64299c551e38d3fdaf0a17b/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6", size = 40507986 }, - { url = "https://files.pythonhosted.org/packages/27/2e/29bb28a7102a6f71026a9d70d1d61df926887e36ec797f2e6acfd2dd3867/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832", size = 42087026 }, - { url = "https://files.pythonhosted.org/packages/16/33/2a67c0f783251106aeeee516f4806161e7b481f7d744d0d643d2f30230a5/pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960", size = 25250108 }, - { url = "https://files.pythonhosted.org/packages/2b/8d/275c58d4b00781bd36579501a259eacc5c6dfb369be4ddeb672ceb551d2d/pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c", size = 30653552 }, - { url = "https://files.pythonhosted.org/packages/a0/9e/e6aca5cc4ef0c7aec5f8db93feb0bde08dbad8c56b9014216205d271101b/pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae", size = 32103413 }, - { url = "https://files.pythonhosted.org/packages/6a/fa/a7033f66e5d4f1308c7eb0dfcd2ccd70f881724eb6fd1776657fdf65458f/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4", size = 41134869 }, - { url = "https://files.pythonhosted.org/packages/2d/92/34d2569be8e7abdc9d145c98dc410db0071ac579b92ebc30da35f500d630/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2", size = 42192626 }, - { url = "https://files.pythonhosted.org/packages/0a/1f/80c617b1084fc833804dc3309aa9d8daacd46f9ec8d736df733f15aebe2c/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6", size = 40496708 }, - { url = "https://files.pythonhosted.org/packages/e6/90/83698fcecf939a611c8d9a78e38e7fed7792dcc4317e29e72cf8135526fb/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136", size = 42075728 }, - { url = "https://files.pythonhosted.org/packages/40/49/2325f5c9e7a1c125c01ba0c509d400b152c972a47958768e4e35e04d13d8/pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef", size = 25242568 }, - { url = "https://files.pythonhosted.org/packages/3f/72/135088d995a759d4d916ec4824cb19e066585b4909ebad4ab196177aa825/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0", size = 30702371 }, - { url = "https://files.pythonhosted.org/packages/2e/01/00beeebd33d6bac701f20816a29d2018eba463616bbc07397fdf99ac4ce3/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9", size = 32116046 }, - { url = "https://files.pythonhosted.org/packages/1f/c9/23b1ea718dfe967cbd986d16cf2a31fe59d015874258baae16d7ea0ccabc/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3", size = 41091183 }, - { url = "https://files.pythonhosted.org/packages/3a/d4/b4a3aa781a2c715520aa8ab4fe2e7fa49d33a1d4e71c8fc6ab7b5de7a3f8/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6", size = 42171896 }, - { url = "https://files.pythonhosted.org/packages/23/1b/716d4cd5a3cbc387c6e6745d2704c4b46654ba2668260d25c402626c5ddb/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a", size = 40464851 }, - { url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 }, +version = "18.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/bb/8d4a1573f66e0684f190dd2b55fd0b97a7214de8882d58a3867e777bf640/pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c", size = 29531620 }, + { url = "https://files.pythonhosted.org/packages/30/90/893acfad917533b624a97b9e498c0e8393908508a0a72d624fe935e632bf/pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4", size = 30836521 }, + { url = "https://files.pythonhosted.org/packages/a3/2a/526545a7464b5fb2fa6e2c4bad16ca90e59e1843025c534fd907b7f73e5a/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b", size = 39213905 }, + { url = "https://files.pythonhosted.org/packages/8a/77/4b3fab91a30e19e233e738d0c5eca5a8f6dd05758bc349a2ca262c65de79/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71", size = 40128881 }, + { url = "https://files.pythonhosted.org/packages/aa/e2/a88e16c5e45e562449c52305bd3bc2f9d704295322d3434656e7ccac1444/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470", size = 38627517 }, + { url = "https://files.pythonhosted.org/packages/6d/84/8037c20005ccc7b869726465be0957bd9c29cfc88612962030f08292ad06/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56", size = 40060187 }, + { url = "https://files.pythonhosted.org/packages/2a/38/d6435c723ff73df8ae74626ea778262fbcc2b9b0d1a4f3db915b61711b05/pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812", size = 25118314 }, + { url = "https://files.pythonhosted.org/packages/9e/4d/a4988e7d82f4fbc797715db4185939a658eeffb07a25bab7262bed1ea076/pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854", size = 29554860 }, + { url = "https://files.pythonhosted.org/packages/59/03/3a42c5c1e4bd4c900ab62aa1ff6b472bdb159ba8f1c3e5deadab7222244f/pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c", size = 30867076 }, + { url = "https://files.pythonhosted.org/packages/75/7e/332055ac913373e89256dce9d14b7708f55f7bd5be631456c897f0237738/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21", size = 39212135 }, + { url = "https://files.pythonhosted.org/packages/8c/64/5099cdb325828722ef7ffeba9a4696f238eb0cdeae227f831c2d77fcf1bd/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6", size = 40125195 }, + { url = "https://files.pythonhosted.org/packages/83/88/1938d783727db1b178ff71bc6a6143d7939e406db83a9ec23cad3dad325c/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe", size = 38641884 }, + { url = "https://files.pythonhosted.org/packages/5e/b5/9e14e9f7590e0eaa435ecea84dabb137284a4dbba7b3c337b58b65b76d95/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0", size = 40076877 }, + { url = "https://files.pythonhosted.org/packages/4d/a3/817ac7fe0891a2d66e247e223080f3a6a262d8aefd77e11e8c27e6acf4e1/pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a", size = 25119811 }, + { url = "https://files.pythonhosted.org/packages/6a/50/12829e7111b932581e51dda51d5cb39207a056c30fe31ef43f14c63c4d7e/pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d", size = 29514620 }, + { url = "https://files.pythonhosted.org/packages/d1/41/468c944eab157702e96abab3d07b48b8424927d4933541ab43788bb6964d/pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee", size = 30856494 }, + { url = "https://files.pythonhosted.org/packages/68/f9/29fb659b390312a7345aeb858a9d9c157552a8852522f2c8bad437c29c0a/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992", size = 39203624 }, + { url = "https://files.pythonhosted.org/packages/6e/f6/19360dae44200e35753c5c2889dc478154cd78e61b1f738514c9f131734d/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54", size = 40139341 }, + { url = "https://files.pythonhosted.org/packages/bb/e6/9b3afbbcf10cc724312e824af94a2e993d8ace22994d823f5c35324cebf5/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33", size = 38618629 }, + { url = "https://files.pythonhosted.org/packages/3a/2e/3b99f8a3d9e0ccae0e961978a0d0089b25fb46ebbcfb5ebae3cca179a5b3/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30", size = 40078661 }, + { url = "https://files.pythonhosted.org/packages/76/52/f8da04195000099d394012b8d42c503d7041b79f778d854f410e5f05049a/pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99", size = 25092330 }, + { url = "https://files.pythonhosted.org/packages/cb/87/aa4d249732edef6ad88899399047d7e49311a55749d3c373007d034ee471/pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b", size = 29497406 }, + { url = "https://files.pythonhosted.org/packages/3c/c7/ed6adb46d93a3177540e228b5ca30d99fc8ea3b13bdb88b6f8b6467e2cb7/pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2", size = 30835095 }, + { url = "https://files.pythonhosted.org/packages/41/d7/ed85001edfb96200ff606943cff71d64f91926ab42828676c0fc0db98963/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191", size = 39194527 }, + { url = "https://files.pythonhosted.org/packages/59/16/35e28eab126342fa391593415d79477e89582de411bb95232f28b131a769/pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa", size = 40131443 }, + { url = "https://files.pythonhosted.org/packages/0c/95/e855880614c8da20f4cd74fa85d7268c725cf0013dc754048593a38896a0/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c", size = 38608750 }, + { url = "https://files.pythonhosted.org/packages/54/9d/f253554b1457d4fdb3831b7bd5f8f00f1795585a606eabf6fec0a58a9c38/pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c", size = 40066690 }, + { url = "https://files.pythonhosted.org/packages/2f/58/8912a2563e6b8273e8aa7b605a345bba5a06204549826f6493065575ebc0/pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181", size = 25081054 }, + { url = "https://files.pythonhosted.org/packages/82/f9/d06ddc06cab1ada0c2f2fd205ac8c25c2701182de1b9c4bf7a0a44844431/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc", size = 29525542 }, + { url = "https://files.pythonhosted.org/packages/ab/94/8917e3b961810587ecbdaa417f8ebac0abb25105ae667b7aa11c05876976/pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386", size = 30829412 }, + { url = "https://files.pythonhosted.org/packages/5e/e3/3b16c3190f3d71d3b10f6758d2d5f7779ef008c4fd367cedab3ed178a9f7/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324", size = 39119106 }, + { url = "https://files.pythonhosted.org/packages/1d/d6/5d704b0d25c3c79532f8c0639f253ec2803b897100f64bcb3f53ced236e5/pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8", size = 40090940 }, + { url = "https://files.pythonhosted.org/packages/37/29/366bc7e588220d74ec00e497ac6710c2833c9176f0372fe0286929b2d64c/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9", size = 38548177 }, + { url = "https://files.pythonhosted.org/packages/c8/11/fabf6ecabb1fe5b7d96889228ca2a9158c4c3bb732e3b8ee3f7f6d40b703/pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba", size = 40043567 }, ] [[package]] @@ -3920,16 +3753,16 @@ wheels = [ [[package]] name = "pydantic" -version = "2.10.6" +version = "2.10.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/ca334c2ef6f2e046b1144fe4bb2a5da8a4c574e7f2ebf7e16b34a6a2fa92/pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff", size = 761287 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, + { url = "https://files.pythonhosted.org/packages/58/26/82663c79010b28eddf29dcdd0ea723439535fa917fce5905885c0e9ba562/pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53", size = 431426 }, ] [[package]] @@ -4009,27 +3842,15 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.8.0" +version = "2.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/a2/ad2511ede77bb424f3939e5148a56d968cdc6b1462620d24b2a1f4ab65b4/pydantic_settings-2.8.0.tar.gz", hash = "sha256:88e2ca28f6e68ea102c99c3c401d6c9078e68a5df600e97b43891c34e089500a", size = 83347 } +sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/a9/3b9642025174bbe67e900785fb99c9bfe91ea584b0b7126ff99945c24a0e/pydantic_settings-2.8.0-py3-none-any.whl", hash = "sha256:c782c7dc3fb40e97b238e713c25d26f64314aece2e91abcff592fcac15f71820", size = 30746 }, -] - -[[package]] -name = "pyee" -version = "12.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0a/37/8fb6e653597b2b67ef552ed49b438d5398ba3b85a9453f8ada0fd77d455c/pyee-12.1.1.tar.gz", hash = "sha256:bbc33c09e2ff827f74191e3e5bbc6be7da02f627b7ec30d86f5ce1a6fb2424a3", size = 30915 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/68/7e150cba9eeffdeb3c5cecdb6896d70c8edd46ce41c0491e12fb2b2256ff/pyee-12.1.1-py3-none-any.whl", hash = "sha256:18a19c650556bb6b32b406d7f017c8f513aceed1ef7ca618fb65de7bd2d347ef", size = 15527 }, + { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718 }, ] [[package]] @@ -4055,24 +3876,6 @@ crypto = [ { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -[[package]] -name = "pylibsrtp" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/49/1c5101ecfeda540699e0754dddfc91c401fbf736ebe99d66e59fe3dad2ba/pylibsrtp-0.11.0.tar.gz", hash = "sha256:5a8d19b1448baebde5ae3cedfa51f10e8ada3d9d99f43046ced0ecf1c105b8ec", size = 10786 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/95/65650bf56e1080beb5f7c963a0bb11a6ee7599bfd89b33ff4525d2b5824b/pylibsrtp-0.11.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:36c6b33347d47c889b7dd465c6ae1f44d7705d00436ca613fd2a8f5dd401b104", size = 1727506 }, - { url = "https://files.pythonhosted.org/packages/4e/b0/f12c489ea8716e74343559abc5d0dfb94d66bcfe1924d64d58424a50f496/pylibsrtp-0.11.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cf18b80f9513484a70e55136ece6ec80e7d21c03cc69abbb428e4f2745ca3cee", size = 2058008 }, - { url = "https://files.pythonhosted.org/packages/e1/2e/6040cd6da6f82f3aa1763c8c45f7fcfdfe08db5560c73f5e1deb4c36c2bb/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bbe0cd777979f7fc45c85f0c619c9cbe709faffbf91675d9dcce560734b353", size = 2566705 }, - { url = "https://files.pythonhosted.org/packages/2b/c9/fd313ac3a23e9c45493131d9fa3463770289e59bb8422c6c6877ab3add40/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78fcdfe63925ea9a5017884c31fe9687b9b8b9f7d9beb7e25e3be47aa6ece495", size = 2168163 }, - { url = "https://files.pythonhosted.org/packages/f9/b3/ae0bac50cc0cca4b8c14de8063ba410ed3edd82c71a2315f284c9be7d679/pylibsrtp-0.11.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1909f7e781a7675d5c92cbad9e7ed3642e626e2bea5834243e423976e5420ac3", size = 2224343 }, - { url = "https://files.pythonhosted.org/packages/51/c4/650c2cecd5810f84adc89f3a94a28ea02d7ac8eaf3ee718a629c6f8ebf09/pylibsrtp-0.11.0-cp39-abi3-win32.whl", hash = "sha256:15123cecd377248747c95de9305ac314f3bcccdae46022bb4b9d60a552a26a10", size = 1156330 }, - { url = "https://files.pythonhosted.org/packages/fe/78/724307095b95c937e54c48133be3e85779cebea770f7536be555217b31f2/pylibsrtp-0.11.0-cp39-abi3-win_amd64.whl", hash = "sha256:bea2fb98029d19de516538b13c4827b6474d6f85d9ea50fae349e9671b946f7a", size = 1486448 }, -] - [[package]] name = "pymeta3" version = "0.5.1" @@ -4081,7 +3884,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/ce/af/409edba35fc597f1e [[package]] name = "pymilvus" -version = "2.5.4" +version = "2.5.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4092,78 +3895,56 @@ dependencies = [ { name = "setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ujson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/64/b00289d52e33a6ebc645cf0d60a7a0a3ce4db74648ceb1f55d776971e34d/pymilvus-2.5.4.tar.gz", hash = "sha256:611732428ff669d57ded3d1f823bdeb10febf233d0251cce8498b287e5a10ce8", size = 1250160 } +sdist = { url = "https://files.pythonhosted.org/packages/a9/8a/a10d29f5d9c9c33ac71db4594e3e6230279d557d6bd5fde6f99d1edfc360/pymilvus-2.5.3.tar.gz", hash = "sha256:68bc3797b7a14c494caf116cee888894ffd6eba7b96a3ac841be85d60694cc5d", size = 1258217 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/e6/1ba3cae7c723ecf9ede7a30c78824953afc2fe4bab5fce8ec5d8e233f541/pymilvus-2.5.4-py3-none-any.whl", hash = "sha256:3f7ddaeae0c8f63554b8e316b73f265d022e05a457d47c366ce47293434a3aea", size = 222399 }, + { url = "https://files.pythonhosted.org/packages/7e/ef/2a5682e02ef69465f7a50aa48fd9ac3fe12a3f653f51cbdc211a28557efc/pymilvus-2.5.3-py3-none-any.whl", hash = "sha256:64ca63594284586937274800be27a402f3be2d078130bf81d94ab8d7798ac9c8", size = 229867 }, ] [[package]] name = "pymongo" -version = "4.11.1" +version = "4.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c5/18/63fd06769a2f47842c374fc5d937445fe8dc2f31b3a859c8bf7df73daa14/pymongo-4.11.1.tar.gz", hash = "sha256:3757ce9257c3486eead45680a8895a0ed9ba27efaf1791fc0cf854367c21c638", size = 2054021 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/98/5030f36a22f602b8ed8fa0921b80c5d1f1e2cb271a5e70e9b4269e54e6c9/pymongo-4.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e596caec72db62a3f438559dfa46d22faefea1967279f553f936ddcb873903df", size = 786132 }, - { url = "https://files.pythonhosted.org/packages/c8/a1/971f4ce571d2e4622ff3360592ec9e674337c1feea2941ee88094b842015/pymongo-4.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15a88b25efcd61c5e539e9204932849b20f393efa330771676e860c4466fe8ad", size = 786420 }, - { url = "https://files.pythonhosted.org/packages/f6/d0/df9b520c1b702b6229a36fa58d7d2d5791bb1d5b9d585eed1ef3d0bad524/pymongo-4.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7073a740aad257f9d2c12cb95a08f17db1f273d422e7ddfed9895738571cac7", size = 1163863 }, - { url = "https://files.pythonhosted.org/packages/32/be/3b7890e9cca9b1218043a656f6d05d2569741ad3e144c877fb6a0c01e9fc/pymongo-4.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25b7cadae1d5287b2eed3d901a347f3fa9bc3f898532e1cb7f28a1c9237d824d", size = 1198081 }, - { url = "https://files.pythonhosted.org/packages/17/68/23e88bf9781c2eaa38d17f61c0b86c3191c73420a91deba5030930f2c27b/pymongo-4.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fe9589d9a83f6e2abe88f32daa410276eddd038eb8f8f75975cf8ce834cea1f", size = 1181002 }, - { url = "https://files.pythonhosted.org/packages/4d/9c/9d19ea4187eecce995ea261ca6ead9b85082246370da10b5d3e8cb0b09c1/pymongo-4.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc6d48b74e9abe544dd71b000453ad06e65cbfcfd57c7342a9f012f65532eb2", size = 1167024 }, - { url = "https://files.pythonhosted.org/packages/69/5c/453d8815521b1a1c81e83a2083bd49255d96648e5b24fc0ceda131deb717/pymongo-4.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1518931a4a26d3cb31a97b9187087c6378cd0b0401d7a7cc160e92223a2a3059", size = 1146171 }, - { url = "https://files.pythonhosted.org/packages/81/ff/cf195d0c7786fd26f1ea654e728b189ae5622f462e4672db17073a688ebe/pymongo-4.11.1-cp310-cp310-win32.whl", hash = "sha256:163c887384cb9fd16e0463128600867138a5a9a5344fc0903db08494b39a2d6e", size = 772072 }, - { url = "https://files.pythonhosted.org/packages/78/c3/8ae8e05e72e3349c2ca935fd7aec22a6e4011dff3e03f97a89e36d90e734/pymongo-4.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e147e08df329a7d23cbcb6213bc2fd360e51551626be828092fe2027f3473abc", size = 781414 }, - { url = "https://files.pythonhosted.org/packages/20/ee/8caede1100c5d59eee723980e39acfad04c5267d45b4f0827cc42f5de994/pymongo-4.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac125f2782d8fe3f3ff93a396af5482d694093b3be3e06052197096c83acadc", size = 840509 }, - { url = "https://files.pythonhosted.org/packages/33/2f/0df9ff0bb6a7b2812697dd9a3fb728fc0c7b4d035c85acf10eeb0b38579d/pymongo-4.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:681806d3ecaf29b11e16a45c1f4c28f99d9d8283238f7b6ea9eee93b5d7bc6d2", size = 840802 }, - { url = "https://files.pythonhosted.org/packages/58/fb/167e3fef60d2269a1e536cf6edeb871a4b53683f9d03681d2744983e0540/pymongo-4.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50210249a9bf67937e97205a312b96a4b1250b111cbaaff532d7a61bc2b1562d", size = 1409951 }, - { url = "https://files.pythonhosted.org/packages/31/ff/f02900dac6d0374a98319cbbf3d6de3b3cd8cf5d1508d62062efb2084bcc/pymongo-4.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd0e404d5c3b1203ee61fcfee40a1f062f3780ce272febdc2378797b00401d1", size = 1460907 }, - { url = "https://files.pythonhosted.org/packages/a5/4b/2eed7b9b7f65278123f0e73b39d38df7d99f477cc1eef49b5aa62485b0a1/pymongo-4.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6e46bcd3c2f86f442b721551ed5e5812294e4a93fce42517e173bd41d4cd2d8", size = 1435332 }, - { url = "https://files.pythonhosted.org/packages/55/dc/1ddce3af1dd5156f1f1178857f768c8a88a44f8cc791c1490192ce7fd24c/pymongo-4.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f28d179e7d434869e23f4582c941cb400f75e996cfea472693ec756ee213c685", size = 1414459 }, - { url = "https://files.pythonhosted.org/packages/bb/b9/cfb32aea974c7656d81a47c1a52d7c94bf491b057ffb66ecec070c4f207b/pymongo-4.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b56dbb6883ce7adad8588464948e0723a3d881e5549f48c4767f1654e8e4cb7d", size = 1383103 }, - { url = "https://files.pythonhosted.org/packages/76/63/3768c99383e24ca16d59d860a1f799eccd02fc55a4e7588a72bf65740fe5/pymongo-4.11.1-cp311-cp311-win32.whl", hash = "sha256:27bc58e0b1bebb17d2426d0cc191c579f2eeaf9692be880f93fe4180cf850ca7", size = 817671 }, - { url = "https://files.pythonhosted.org/packages/1d/2d/044b8511853c8d439817dfee4b1d99060fb76cb08c980877fcb6a6bc1da1/pymongo-4.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:7751e6e99c79057b09441c6ab2a93fae10b4028478aac5b455db8b12f884a3c0", size = 831620 }, - { url = "https://files.pythonhosted.org/packages/cc/8a/81fdd61a0764c0ba1072cd70f67c7f4a83008ceaa61305e20add2ad580c6/pymongo-4.11.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f96683f1dec7d28f12fe43a4d5c0df35d6b80348a9fbf5aac47fa284332a1f92", size = 895365 }, - { url = "https://files.pythonhosted.org/packages/05/60/32910044b2329b7a580a1b4d4f895ecb9616cdffeb57c2d7622214659ac5/pymongo-4.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:157e6a722d051c4bab3e6bc34a1f80fc98101cf2d12139a94e51638d023198c5", size = 895061 }, - { url = "https://files.pythonhosted.org/packages/00/11/30d3351f24cf8e652a0d5fe76e56a50478ea7e81dabcfea7339b1338cccd/pymongo-4.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74503e853758e1eaa1cad2df9c08c8c35a3d26222cf6426d2cde4b2e8593b9b3", size = 1673794 }, - { url = "https://files.pythonhosted.org/packages/a7/90/5ff61e8bad861621361868addeb34c4d2539a4c973a5767d1a266878cb32/pymongo-4.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b630596089106c968ddd252bde3fe692c420e24f214dd39ca517d26343d81012", size = 1738027 }, - { url = "https://files.pythonhosted.org/packages/d0/91/1fdf2843a664f01b8ca83d22cd7accb48f3a5371e61813a5451bc33f93c3/pymongo-4.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7007669eef871079d39a9bbcda0fbcd4252f9b575592804343d0b5c05849d65b", size = 1707052 }, - { url = "https://files.pythonhosted.org/packages/c6/f7/1bd23ea674c957b24256f9ef87875892801cf77b3d2535e59dd78b04db2e/pymongo-4.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d1da6201e1350cfcd4deab599b32237ac2ac591180d44553a2c8e614f2c0e", size = 1677027 }, - { url = "https://files.pythonhosted.org/packages/62/42/077b138efd223ed3cd03f3b8622d2315096e7cd1d9476cd8f1cf219c420e/pymongo-4.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:908e65ab42cd4bf1ffeaafe8f11bb86b3f804d54227058794e33fff2963ccc86", size = 1636150 }, - { url = "https://files.pythonhosted.org/packages/c9/a5/f958fcdc944f97d02b6a46c94dbbcdde0d355639c8564974b31b4685e97a/pymongo-4.11.1-cp312-cp312-win32.whl", hash = "sha256:2d1d956c15dd05f1e41c61f0dbcaec59f274db4814cff2c3d9c2508f58004c39", size = 864029 }, - { url = "https://files.pythonhosted.org/packages/d2/e2/b1747eabad8bf172aa66fae50ed7290c4992b8adbeaddbe31944755dbed4/pymongo-4.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:c71655f4188c70032ba56ac7ead688449e4f86a4ccd8e57201ee283f2f591e1d", size = 882299 }, - { url = "https://files.pythonhosted.org/packages/71/b6/dc403a4dda2adaf0f0088d3fcfe6eb17c9e16098eca98f705f2a8e73e693/pymongo-4.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f845b46d77a5bcf0c9ee16f11c5bc84c63f4668d9ea4fc54cd923c8d48a1d521", size = 949622 }, - { url = "https://files.pythonhosted.org/packages/52/54/0572ffa3d1c43fec0bdd065c5008b57f7ce4da90e6c6ade0a3c32f34c21e/pymongo-4.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aadea45e01103f6ee4e80d76d4a27393a4e2bd93472ce4ebb894781f395e1053", size = 949301 }, - { url = "https://files.pythonhosted.org/packages/7f/d6/5bf309a20892f47898e7bc626cb3169a1120b16b2d7b7a60c3fab607907c/pymongo-4.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63348c850df796199abef7e9afbd86c34449f56731c7ec70b3901df1f5c135b", size = 1937689 }, - { url = "https://files.pythonhosted.org/packages/16/03/1c792ab1e1e5a48fde005bbf739f04846ae48c8c8543a2f1e74ce42d465b/pymongo-4.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dd7656794bfbfbe10723813332ec33eed29bd9bb7fc122c63829fd445eb8425", size = 2015119 }, - { url = "https://files.pythonhosted.org/packages/ad/cc/8765bbec58392929f414b5b26f4c3fe333bfb75ad2f03e92fc48c81bd25f/pymongo-4.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7146ae04300ce6f83b75c639e97c3d0ce873f30edaac4b719ae173e886b9ff90", size = 1978788 }, - { url = "https://files.pythonhosted.org/packages/77/a6/b700ccb2695f3233a12943e78760f68adc19516cf120949ad7c67fdc81a9/pymongo-4.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698fb3d13126c0719077c98b40378cb9a6f4ab4a72b7691779aa01f1f6c66493", size = 1939607 }, - { url = "https://files.pythonhosted.org/packages/d1/10/c0e4c38c7a6d318a80a4dcd8cfc42bfb8a072145f152089e5bc8d60db902/pymongo-4.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f415d9569720f408cc4dcc171f60299d454b0414f120666e6fdd349d414bf010", size = 1889041 }, - { url = "https://files.pythonhosted.org/packages/1d/86/7145841c425e4f6b012116db38e3bf4652dce7b8537961b2391e3c52e051/pymongo-4.11.1-cp313-cp313-win32.whl", hash = "sha256:4aa2c40e391ca29a337bef2b46b495c3f24b5696a87a58f0a0676a8bf131f9f8", size = 910368 }, - { url = "https://files.pythonhosted.org/packages/a0/d4/97632e8f230e95a877220c785a69478cae97610e1ec48b5f9be59a926b29/pymongo-4.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:1f871efa14a1f368559edff39ec03799ca108bfa8e1ba330b7ffc05eb958661f", size = 932942 }, - { url = "https://files.pythonhosted.org/packages/db/61/f719841bc59d3d33c6002950e8b9978705b6f9f1dd5efb66e73fe6919a7d/pymongo-4.11.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d293cec18624825937bd7f1d8bacf16104c79ced45a8ada93f08ec8a7a2ad17a", size = 1006140 }, - { url = "https://files.pythonhosted.org/packages/cb/05/f43900c675e158cc024bc82a062dfcaaf12d4d7f574947b73f41d843d189/pymongo-4.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7b3ea3494f3e166a524529bb05a4fdda97afd77031fed3a63862fd815288c9df", size = 1006124 }, - { url = "https://files.pythonhosted.org/packages/31/2f/7bccadbcf272b5e8c617a6a329b07671ecfd1faea080d9ab311240b93737/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12f4c4579076b7351c63378e22f43d4ce4ed4f2c93208b653c4752f18f47309", size = 2266399 }, - { url = "https://files.pythonhosted.org/packages/78/ac/6bf48a7c99b574c9afcb0f68b7a8b9bf9617a1a54773d0f8b1568f8a079c/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a8aba4818350d2a463e084ae2426d395e725525fe86bd0219240b265dc1ca52", size = 2353616 }, - { url = "https://files.pythonhosted.org/packages/3c/f3/c4cd608ddda2dbc7fa668dd8356bb728313b8eec5b118eca3fa937d4fc8c/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f97f62e6edde15d1d3d08abd7e43f1787ee9e672b1bb8e9d9f5fd6ded24f5599", size = 2312480 }, - { url = "https://files.pythonhosted.org/packages/c8/3e/2261ac8e0b6a150d92d35ba2db30b8387c78f9ecba725b0b6a363250f9c6/pymongo-4.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a4e82dce301c97bb132dec28a487c1a609dc67948e9db7cbd23485875367204", size = 2263792 }, - { url = "https://files.pythonhosted.org/packages/73/80/41568f1ff09cb73976f7e6f9d11dae63003e4c1156834366ad03f91f27df/pymongo-4.11.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:985a614ec24519f4a3d82aafb766c3f782a452fc46b32112d508a4e19b33fff3", size = 2202805 }, - { url = "https://files.pythonhosted.org/packages/0c/d3/d7ca22d5eb654a451e18f616442b7c6d472ffe76560d6623a2a4ddfd4854/pymongo-4.11.1-cp313-cp313t-win32.whl", hash = "sha256:889d20850d5aaa4f19814462c06488553e70ed4c62195dbaad5d5662884778af", size = 959247 }, - { url = "https://files.pythonhosted.org/packages/95/7b/8d0767251e687966cf19a4ad032d597ab135d26af5ecebbdb8895ea92cf0/pymongo-4.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:3854db4be39cb9e0c34add1fd7e515deab0b4ee30f3cc3978e057746d119ac12", size = 987871 }, -] - -[[package]] -name = "pyopenssl" -version = "25.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9f/26/e25b4a374b4639e0c235527bbe31c0524f26eda701d79456a7e1877f4cc5/pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16", size = 179573 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/d7/eb76863d2060dcbe7c7e6cccfd95ac02ea0b9acc37745a0d99ff6457aefb/pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90", size = 56453 }, +sdist = { url = "https://files.pythonhosted.org/packages/fb/43/d5e8993bd43e6f9cbe985e8ae1398eb73309e88694ac2ea618eacbc9cea2/pymongo-4.9.2.tar.gz", hash = "sha256:3e63535946f5df7848307b9031aa921f82bb0cbe45f9b0c3296f2173f9283eb0", size = 1889366 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/af/1ce26b971e520de621239842f2be302749eb752a5cb29dd253f4c210eb0a/pymongo-4.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab8d54529feb6e29035ba8f0570c99ad36424bc26486c238ad7ce28597bc43c8", size = 833709 }, + { url = "https://files.pythonhosted.org/packages/a6/bd/7bc8224ae96fd9ffe8b2a193469200b9c75787178c5b1955bd20e5d024c7/pymongo-4.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f928bdc152a995cbd0b563fab201b2df873846d11f7a41d1f8cc8a01b35591ab", size = 833974 }, + { url = "https://files.pythonhosted.org/packages/87/2e/3cc96aec7a1d6151677bb108af606ea220205a47255ed53255bfe1d8f31f/pymongo-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6e7251d59fa3dcbb1399a71a3aec63768cebc6b22180b671601c2195fe1f90a", size = 1405440 }, + { url = "https://files.pythonhosted.org/packages/e8/9c/2d5db2fcabc873daead275729c17ddeb2b437010858fe101e8d59a276209/pymongo-4.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e759ed0459e7264a11b6896016f616341a8e4c6ab7f71ae651bd21ffc7e9524", size = 1454720 }, + { url = "https://files.pythonhosted.org/packages/6f/84/b382e7f817fd39dcd02ae69e21afd538251acf5de1904606a9908d8895fe/pymongo-4.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3fc60f242191840ccf02b898bc615b5141fbb70064f38f7e60fcaa35d3b5efd", size = 1431625 }, + { url = "https://files.pythonhosted.org/packages/87/f5/653f9af6a7625353138bded4548a5a48729352b963fc2a059e07241b37c2/pymongo-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c798351666ac97a0ddaa823689061c3af949c2d6acf7fb2d9ab0a7f465ced79", size = 1409027 }, + { url = "https://files.pythonhosted.org/packages/36/26/f4159209cf6229ce0a5ac37f093dab49495c51daad8ca835279f0058b060/pymongo-4.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aac78b5fdd49ed8cae49adf76befacb02293a23b412676775c4715148e166d85", size = 1378524 }, + { url = "https://files.pythonhosted.org/packages/57/3c/78c60e721a975b836922467410dd4b9616ac84f096eec00f7bde9e889b2b/pymongo-4.9.2-cp310-cp310-win32.whl", hash = "sha256:bf77bf175c315e299a91332c2bbebc097c4d4fcc8713e513a9861684aa39023a", size = 810564 }, + { url = "https://files.pythonhosted.org/packages/71/cf/790c8da7fdd55e5e824b08eaf63355732bbf278ebcb98615e723feb05702/pymongo-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:c42b5aad8971256365bfd0a545fb1c7a199c93db80decd298ea2f987419e2a6d", size = 825019 }, + { url = "https://files.pythonhosted.org/packages/a8/b4/7af80304a0798526fac959e3de651b0747472c049c8b89a6c15fed2026f6/pymongo-4.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:99e40f44877b32bf4b3c46ceed2228f08c222cf7dec8a4366dd192a1429143fa", size = 887499 }, + { url = "https://files.pythonhosted.org/packages/33/ee/5389229774f842bd92a123fd3ea4f2d72b474bde9315ff00e889fe104a0d/pymongo-4.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f6834d575ed87edc7dfcab4501d961b6a423b3839edd29ecb1382eee7736777", size = 887755 }, + { url = "https://files.pythonhosted.org/packages/d4/fd/3f0ae0fd3a7049ec67ab8f952020bc9fad841791d52d8c51405bd91b3c9b/pymongo-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3010018f5672e5b7e8d096dea9f1ea6545b05345ff0eb1754f6ee63785550773", size = 1647336 }, + { url = "https://files.pythonhosted.org/packages/00/b7/0472d51778e9e22b2ffd5ae9a401888525c4872cb2073f1bff8d5ae9659b/pymongo-4.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69394ee9f0ce38ff71266bad01b7e045cd75e58500ebad5d72187cbabf2e652a", size = 1713193 }, + { url = "https://files.pythonhosted.org/packages/8c/ac/aa41cb291107bb16bae286d7b9f2c868e393765830bc173609ae4dc9a3ae/pymongo-4.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87b18094100f21615d9db99c255dcd9e93e476f10fb03c1d3632cf4b82d201d2", size = 1681720 }, + { url = "https://files.pythonhosted.org/packages/dc/70/ac12eb58bd46a7254daaa4d39e7c4109983ee2227dac44df6587954fe345/pymongo-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3039e093d28376d6a54bdaa963ca12230c8a53d7b19c8e6368e19bcfbd004176", size = 1652109 }, + { url = "https://files.pythonhosted.org/packages/d3/20/38f71e0f1c7878b287305b2965cebe327fc5626ecca83ea52a272968cbe2/pymongo-4.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab42d9ee93fe6b90020c42cba5bfb43a2b4660951225d137835efc21940da48", size = 1611503 }, + { url = "https://files.pythonhosted.org/packages/9b/4c/d3b26e1040c9538b9c8aed005ec18af7515c6dd3091aabfbf6c30a3b3b1a/pymongo-4.9.2-cp311-cp311-win32.whl", hash = "sha256:a663ca60e187a248d370c58961e40f5463077d2b43831eb92120ea28a79ecf96", size = 855570 }, + { url = "https://files.pythonhosted.org/packages/40/3d/7de1a4cf51bf2b10bb9f43ffa208acad0d64c18994ca8d83f490edef6834/pymongo-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:24e7b6887bbfefd05afed26a99a2c69459e2daa351a43a410de0d6c0ee3cce4e", size = 874715 }, + { url = "https://files.pythonhosted.org/packages/a1/08/7d95aab0463dc5a2c460a0b4e50a45a743afbe20986f47f87a9a88f43c0c/pymongo-4.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8083bbe8cb10bb33dca4d93f8223dd8d848215250bb73867374650bac5fe69e1", size = 941617 }, + { url = "https://files.pythonhosted.org/packages/bb/28/40613d8d97fc33bf2b9187446a6746925623aa04a9a27c9b058e97076f7a/pymongo-4.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b8c636bf557c7166e3799bbf1120806ca39e3f06615b141c88d9c9ceae4d8c", size = 941394 }, + { url = "https://files.pythonhosted.org/packages/df/b2/7f1a0d75f538c0dcaa004ea69e28706fa3ca72d848e0a5a7dafd30939fff/pymongo-4.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8aac5dce28454f47576063fbad31ea9789bba67cab86c95788f97aafd810e65b", size = 1907396 }, + { url = "https://files.pythonhosted.org/packages/ba/70/9304bae47a361a4b12adb5be714bad41478c0e5bc3d6cf403b328d6398a0/pymongo-4.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1d5e7123af1fddf15b2b53e58f20bf5242884e671bcc3860f5e954fe13aeddd", size = 1986029 }, + { url = "https://files.pythonhosted.org/packages/ae/51/ac0378d001995c4a705da64a4a2b8e1732f95de5080b752d69f452930cc7/pymongo-4.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe97c847b56d61e533a7af0334193d6b28375b9189effce93129c7e4733794a9", size = 1949088 }, + { url = "https://files.pythonhosted.org/packages/1a/30/e93dc808039dc29fc47acee64f128aa650aacae3e4b57b68e01ff1001cda/pymongo-4.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ad54433a996e2d1985a9cd8fc82538ca8747c95caae2daf453600cc8c317f9", size = 1910516 }, + { url = "https://files.pythonhosted.org/packages/2b/34/895b9cad3bd5342d5ab51a853ed3a814840ce281d55c6928968e9f3f49f5/pymongo-4.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98b9cade40f5b13e04492a42ae215c3721099be1014ddfe0fbd23f27e4f62c0c", size = 1860499 }, + { url = "https://files.pythonhosted.org/packages/24/7e/167818f324bf2122d45551680671a3c6406a345d3fcace4e737f57bda4e4/pymongo-4.9.2-cp312-cp312-win32.whl", hash = "sha256:dde6068ae7c62ea8ee2c5701f78c6a75618cada7e11f03893687df87709558de", size = 901282 }, + { url = "https://files.pythonhosted.org/packages/12/6b/b7ffa7114177fc1c60ae529512b82629ff7e25d19be88e97f2d0ddd16717/pymongo-4.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:e1ab6cd7cd2d38ffc7ccdc79fdc166c7a91a63f844a96e3e6b2079c054391c68", size = 924925 }, + { url = "https://files.pythonhosted.org/packages/5b/d6/b57ef5f376e2e171218a98b8c30dfd001aa5cac6338aa7f3ca76e6315667/pymongo-4.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1ad79d6a74f439a068caf9a1e2daeabc20bf895263435484bbd49e90fbea7809", size = 995233 }, + { url = "https://files.pythonhosted.org/packages/32/80/4ec79e36e99f86a063d297a334883fb5115ad70e9af46142b8dc33f636fa/pymongo-4.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:877699e21703717507cbbea23e75b419f81a513b50b65531e1698df08b2d7094", size = 995025 }, + { url = "https://files.pythonhosted.org/packages/c4/fd/8f5464321fdf165700f10aec93b07a75c3537be593291ac2f8c8f5f69bd0/pymongo-4.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc9322ce7cf116458a637ac10517b0c5926a8211202be6dbdc51dab4d4a9afc8", size = 2167429 }, + { url = "https://files.pythonhosted.org/packages/da/42/0f749d805d17f5b17f48f2ee1aaf2a74e67939607b87b245e5ec9b4c1452/pymongo-4.9.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cca029f46acf475504eedb33c7839f030c4bc4f946dcba12d9a954cc48850b79", size = 2258834 }, + { url = "https://files.pythonhosted.org/packages/b8/52/b0c1b8e9cbeae234dd1108a906f30b680755533b7229f9f645d7e7adad25/pymongo-4.9.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c8c861e77527eec5a4b7363c16030dd0374670b620b08a5300f97594bbf5a40", size = 2216412 }, + { url = "https://files.pythonhosted.org/packages/4d/20/53395473a1023bb6a670b68fbfa937664c75b354c2444463075ff43523e2/pymongo-4.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc70326ae71b3c7b8d6af82f46bb71dafdba3c8f335b29382ae9cf263ef3a5c", size = 2168891 }, + { url = "https://files.pythonhosted.org/packages/01/b7/fa4030279d8a4a9c0a969a719b6b89da8a59795b5cdf129ef553fce6d1f2/pymongo-4.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba9d2f6df977fee24437f82f7412460b0628cd6b961c4235c9cff71577a5b61f", size = 2109380 }, + { url = "https://files.pythonhosted.org/packages/f3/55/f252972a039fc6bfca748625c5080d6f88801eb61f118fe79cde47342d6a/pymongo-4.9.2-cp313-cp313-win32.whl", hash = "sha256:b3254769e708bc4aa634745c262081d13c841a80038eff3afd15631540a1d227", size = 946962 }, + { url = "https://files.pythonhosted.org/packages/7b/36/88d8438699ba09b714dece00a4a7462330c1d316f5eaa28db450572236f6/pymongo-4.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:169b85728cc17800344ba17d736375f400ef47c9fbb4c42910c4b3e7c0247382", size = 975113 }, ] [[package]] @@ -4218,14 +3999,14 @@ wheels = [ [[package]] name = "pytest-asyncio" -version = "0.25.3" +version = "0.25.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f2/a8/ecbc8ede70921dd2f544ab1cadd3ff3bf842af27f87bbdea774c7baa1d38/pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a", size = 54239 } +sdist = { url = "https://files.pythonhosted.org/packages/72/df/adcc0d60f1053d74717d21d58c0048479e9cab51464ce0d2965b086bd0e2/pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f", size = 53950 } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/17/3493c5624e48fd97156ebaec380dcaafee9506d7e2c46218ceebbb57d7de/pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3", size = 19467 }, + { url = "https://files.pythonhosted.org/packages/61/d8/defa05ae50dcd6019a95527200d3b3980043df5aa445d40cb0ef9f7f98ab/pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075", size = 19400 }, ] [[package]] @@ -4292,22 +4073,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, ] -[[package]] -name = "python-ulid" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9a/db/e5e67aeca9c2420cb91f94007f30693cc3628ae9783a565fd33ffb3fbfdd/python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f", size = 28822 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/4e/cc2ba2c0df2589f35a4db8473b8c2ba9bbfc4acdec4a94f1c78934d2350f/python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31", size = 11194 }, -] - [[package]] name = "pytz" -version = "2025.1" +version = "2024.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 }, + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, ] [[package]] @@ -4375,127 +4147,93 @@ wheels = [ [[package]] name = "pyzmq" -version = "26.2.1" +version = "26.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "(implementation_name == 'pypy' and sys_platform == 'darwin') or (implementation_name == 'pypy' and sys_platform == 'linux') or (implementation_name == 'pypy' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/e3/8d0382cb59feb111c252b54e8728257416a38ffcb2243c4e4775a3c990fe/pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca", size = 278433 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/3d/c2d9d46c033d1b51692ea49a22439f7f66d91d5c938e8b5c56ed7a2151c2/pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb", size = 1345451 }, - { url = "https://files.pythonhosted.org/packages/0e/df/4754a8abcdeef280651f9bb51446c47659910940b392a66acff7c37f5cef/pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641", size = 942766 }, - { url = "https://files.pythonhosted.org/packages/74/da/e6053a3b13c912eded6c2cdeee22ff3a4c33820d17f9eb24c7b6e957ffe7/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257", size = 678488 }, - { url = "https://files.pythonhosted.org/packages/9e/50/614934145244142401ca174ca81071777ab93aa88173973ba0154f491e09/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff", size = 917115 }, - { url = "https://files.pythonhosted.org/packages/80/2b/ebeb7bc4fc8e9e61650b2e09581597355a4341d413fa9b2947d7a6558119/pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24", size = 874162 }, - { url = "https://files.pythonhosted.org/packages/79/48/93210621c331ad16313dc2849801411fbae10d91d878853933f2a85df8e7/pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459", size = 874180 }, - { url = "https://files.pythonhosted.org/packages/f0/8b/40924b4d8e33bfdd54c1970fb50f327e39b90b902f897cf09b30b2e9ac48/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c", size = 1208139 }, - { url = "https://files.pythonhosted.org/packages/c8/b2/82d6675fc89bd965eae13c45002c792d33f06824589844b03f8ea8fc6d86/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e", size = 1520666 }, - { url = "https://files.pythonhosted.org/packages/9d/e2/5ff15f2d3f920dcc559d477bd9bb3faacd6d79fcf7c5448e585c78f84849/pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3", size = 1420056 }, - { url = "https://files.pythonhosted.org/packages/40/a2/f9bbeccf7f75aa0d8963e224e5730abcefbf742e1f2ae9ea60fd9d6ff72b/pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa", size = 583874 }, - { url = "https://files.pythonhosted.org/packages/56/b1/44f513135843272f0e12f5aebf4af35839e2a88eb45411f2c8c010d8c856/pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473", size = 647367 }, - { url = "https://files.pythonhosted.org/packages/27/9c/1bef14a37b02d651a462811bbdb1390b61cd4a5b5e95cbd7cc2d60ef848c/pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594", size = 561784 }, - { url = "https://files.pythonhosted.org/packages/b9/03/5ecc46a6ed5971299f5c03e016ca637802d8660e44392bea774fb7797405/pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a", size = 1346032 }, - { url = "https://files.pythonhosted.org/packages/40/51/48fec8f990ee644f461ff14c8fe5caa341b0b9b3a0ad7544f8ef17d6f528/pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a", size = 943324 }, - { url = "https://files.pythonhosted.org/packages/c1/f4/f322b389727c687845e38470b48d7a43c18a83f26d4d5084603c6c3f79ca/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454", size = 678418 }, - { url = "https://files.pythonhosted.org/packages/a8/df/2834e3202533bd05032d83e02db7ac09fa1be853bbef59974f2b2e3a8557/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99", size = 915466 }, - { url = "https://files.pythonhosted.org/packages/b5/e2/45c0f6e122b562cb8c6c45c0dcac1160a4e2207385ef9b13463e74f93031/pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4", size = 873347 }, - { url = "https://files.pythonhosted.org/packages/de/b9/3e0fbddf8b87454e914501d368171466a12550c70355b3844115947d68ea/pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa", size = 874545 }, - { url = "https://files.pythonhosted.org/packages/1f/1c/1ee41d6e10b2127263b1994bc53b9e74ece015b0d2c0a30e0afaf69b78b2/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f", size = 1208630 }, - { url = "https://files.pythonhosted.org/packages/3d/a9/50228465c625851a06aeee97c74f253631f509213f979166e83796299c60/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba", size = 1519568 }, - { url = "https://files.pythonhosted.org/packages/c6/f2/6360b619e69da78863c2108beb5196ae8b955fe1e161c0b886b95dc6b1ac/pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd", size = 1419677 }, - { url = "https://files.pythonhosted.org/packages/da/d5/f179da989168f5dfd1be8103ef508ade1d38a8078dda4f10ebae3131a490/pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7", size = 582682 }, - { url = "https://files.pythonhosted.org/packages/60/50/e5b2e9de3ffab73ff92bee736216cf209381081fa6ab6ba96427777d98b1/pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1", size = 648128 }, - { url = "https://files.pythonhosted.org/packages/d9/fe/7bb93476dd8405b0fc9cab1fd921a08bd22d5e3016aa6daea1a78d54129b/pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7", size = 562465 }, - { url = "https://files.pythonhosted.org/packages/9c/b9/260a74786f162c7f521f5f891584a51d5a42fd15f5dcaa5c9226b2865fcc/pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3", size = 1348495 }, - { url = "https://files.pythonhosted.org/packages/bf/73/8a0757e4b68f5a8ccb90ddadbb76c6a5f880266cdb18be38c99bcdc17aaa/pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e", size = 945035 }, - { url = "https://files.pythonhosted.org/packages/cf/de/f02ec973cd33155bb772bae33ace774acc7cc71b87b25c4829068bec35de/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8", size = 671213 }, - { url = "https://files.pythonhosted.org/packages/d1/80/8fc583085f85ac91682744efc916888dd9f11f9f75a31aef1b78a5486c6c/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09", size = 908750 }, - { url = "https://files.pythonhosted.org/packages/c3/25/0b4824596f261a3cc512ab152448b383047ff5f143a6906a36876415981c/pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da", size = 865416 }, - { url = "https://files.pythonhosted.org/packages/a1/d1/6fda77a034d02034367b040973fd3861d945a5347e607bd2e98c99f20599/pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435", size = 865922 }, - { url = "https://files.pythonhosted.org/packages/ad/81/48f7fd8a71c427412e739ce576fc1ee14f3dc34527ca9b0076e471676183/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a", size = 1201526 }, - { url = "https://files.pythonhosted.org/packages/c7/d8/818f15c6ef36b5450e435cbb0d3a51599fc884a5d2b27b46b9c00af68ef1/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4", size = 1512808 }, - { url = "https://files.pythonhosted.org/packages/d9/c4/b3edb7d0ae82ad6fb1a8cdb191a4113c427a01e85139906f3b655b07f4f8/pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e", size = 1411836 }, - { url = "https://files.pythonhosted.org/packages/69/1c/151e3d42048f02cc5cd6dfc241d9d36b38375b4dee2e728acb5c353a6d52/pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a", size = 581378 }, - { url = "https://files.pythonhosted.org/packages/b6/b9/d59a7462848aaab7277fddb253ae134a570520115d80afa85e952287e6bc/pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13", size = 643737 }, - { url = "https://files.pythonhosted.org/packages/55/09/f37e707937cce328944c1d57e5e50ab905011d35252a0745c4f7e5822a76/pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5", size = 558303 }, - { url = "https://files.pythonhosted.org/packages/4f/2e/fa7a91ce349975971d6aa925b4c7e1a05abaae99b97ade5ace758160c43d/pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23", size = 942331 }, - { url = "https://files.pythonhosted.org/packages/64/2b/1f10b34b6dc7ff4b40f668ea25ba9b8093ce61d874c784b90229b367707b/pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be", size = 1345831 }, - { url = "https://files.pythonhosted.org/packages/4c/8d/34884cbd4a8ec050841b5fb58d37af136766a9f95b0b2634c2971deb09da/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399", size = 670773 }, - { url = "https://files.pythonhosted.org/packages/0f/f4/d4becfcf9e416ad2564f18a6653f7c6aa917da08df5c3760edb0baa1c863/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9", size = 908836 }, - { url = "https://files.pythonhosted.org/packages/07/fa/ab105f1b86b85cb2e821239f1d0900fccd66192a91d97ee04661b5436b4d/pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab", size = 865369 }, - { url = "https://files.pythonhosted.org/packages/c9/48/15d5f415504572dd4b92b52db5de7a5befc76bb75340ba9f36f71306a66d/pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce", size = 865676 }, - { url = "https://files.pythonhosted.org/packages/7e/35/2d91bcc7ccbb56043dd4d2c1763f24a8de5f05e06a134f767a7fb38e149c/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a", size = 1201457 }, - { url = "https://files.pythonhosted.org/packages/6d/bb/aa7c5119307a5762b8dca6c9db73e3ab4bccf32b15d7c4f376271ff72b2b/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9", size = 1513035 }, - { url = "https://files.pythonhosted.org/packages/4f/4c/527e6650c2fccec7750b783301329c8a8716d59423818afb67282304ce5a/pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad", size = 1411881 }, - { url = "https://files.pythonhosted.org/packages/89/9f/e4412ea1b3e220acc21777a5edba8885856403d29c6999aaf00a9459eb03/pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb", size = 581354 }, - { url = "https://files.pythonhosted.org/packages/55/cd/f89dd3e9fc2da0d1619a82c4afb600c86b52bc72d7584953d460bc8d5027/pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf", size = 643560 }, - { url = "https://files.pythonhosted.org/packages/a7/99/5de4f8912860013f1116f818a0047659bc20d71d1bc1d48f874bdc2d7b9c/pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce", size = 558037 }, - { url = "https://files.pythonhosted.org/packages/06/0b/63b6d7a2f07a77dbc9768c6302ae2d7518bed0c6cee515669ca0d8ec743e/pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e", size = 938580 }, - { url = "https://files.pythonhosted.org/packages/85/38/e5e2c3ffa23ea5f95f1c904014385a55902a11a67cd43c10edf61a653467/pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891", size = 1339670 }, - { url = "https://files.pythonhosted.org/packages/d2/87/da5519ed7f8b31e4beee8f57311ec02926822fe23a95120877354cd80144/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6", size = 660983 }, - { url = "https://files.pythonhosted.org/packages/f6/e8/1ca6a2d59562e04d326a026c9e3f791a6f1a276ebde29da478843a566fdb/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a", size = 896509 }, - { url = "https://files.pythonhosted.org/packages/5c/e5/0b4688f7c74bea7e4f1e920da973fcd7d20175f4f1181cb9b692429c6bb9/pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3", size = 853196 }, - { url = "https://files.pythonhosted.org/packages/8f/35/c17241da01195001828319e98517683dad0ac4df6fcba68763d61b630390/pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e", size = 855133 }, - { url = "https://files.pythonhosted.org/packages/d2/14/268ee49bbecc3f72e225addeac7f0e2bd5808747b78c7bf7f87ed9f9d5a8/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7", size = 1191612 }, - { url = "https://files.pythonhosted.org/packages/5e/02/6394498620b1b4349b95c534f3ebc3aef95f39afbdced5ed7ee315c49c14/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8", size = 1500824 }, - { url = "https://files.pythonhosted.org/packages/17/fc/b79f0b72891cbb9917698add0fede71dfb64e83fa3481a02ed0e78c34be7/pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460", size = 1399943 }, - { url = "https://files.pythonhosted.org/packages/65/d1/e630a75cfb2534574a1258fda54d02f13cf80b576d4ce6d2aa478dc67829/pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d", size = 847743 }, - { url = "https://files.pythonhosted.org/packages/27/df/f94a711b4f6c4b41e227f9a938103f52acf4c2e949d91cbc682495a48155/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99", size = 570991 }, - { url = "https://files.pythonhosted.org/packages/bf/08/0c6f97fb3c9dbfa23382f0efaf8f9aa1396a08a3358974eaae3ee659ed5c/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c", size = 799664 }, - { url = "https://files.pythonhosted.org/packages/05/14/f4d4fd8bb8988c667845734dd756e9ee65b9a17a010d5f288dfca14a572d/pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53", size = 758156 }, - { url = "https://files.pythonhosted.org/packages/e3/fe/72e7e166bda3885810bee7b23049133e142f7c80c295bae02c562caeea16/pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9", size = 556563 }, -] - -[[package]] -name = "qdrant-client" -version = "1.12.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", -] -dependencies = [ - { name = "grpcio", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "grpcio-tools", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "httpx", extra = ["http2"], marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "numpy", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "portalocker", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "pydantic", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "urllib3", marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/15/5e/ec560881e086f893947c8798949c72de5cfae9453fd05c2250f8dfeaa571/qdrant_client-1.12.1.tar.gz", hash = "sha256:35e8e646f75b7b883b3d2d0ee4c69c5301000bba41c82aa546e985db0f1aeb72", size = 237441 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/c0/eef4fe9dad6d41333f7dc6567fa8144ffc1837c8a0edfc2317d50715335f/qdrant_client-1.12.1-py3-none-any.whl", hash = "sha256:b2d17ce18e9e767471368380dd3bbc4a0e3a0e2061fedc9af3542084b48451e0", size = 267171 }, +sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/a8/9837c39aba390eb7d01924ace49d761c8dbe7bc2d6082346d00c8332e431/pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629", size = 1340058 }, + { url = "https://files.pythonhosted.org/packages/a2/1f/a006f2e8e4f7d41d464272012695da17fb95f33b54342612a6890da96ff6/pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b", size = 1008818 }, + { url = "https://files.pythonhosted.org/packages/b6/09/b51b6683fde5ca04593a57bbe81788b6b43114d8f8ee4e80afc991e14760/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764", size = 673199 }, + { url = "https://files.pythonhosted.org/packages/c9/78/486f3e2e824f3a645238332bf5a4c4b4477c3063033a27c1e4052358dee2/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c", size = 911762 }, + { url = "https://files.pythonhosted.org/packages/5e/3b/2eb1667c9b866f53e76ee8b0c301b0469745a23bd5a87b7ee3d5dd9eb6e5/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a", size = 868773 }, + { url = "https://files.pythonhosted.org/packages/16/29/ca99b4598a9dc7e468b5417eda91f372b595be1e3eec9b7cbe8e5d3584e8/pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88", size = 868834 }, + { url = "https://files.pythonhosted.org/packages/ad/e5/9efaeb1d2f4f8c50da04144f639b042bc52869d3a206d6bf672ab3522163/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f", size = 1202861 }, + { url = "https://files.pythonhosted.org/packages/c3/62/c721b5608a8ac0a69bb83cbb7d07a56f3ff00b3991a138e44198a16f94c7/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282", size = 1515304 }, + { url = "https://files.pythonhosted.org/packages/87/84/e8bd321aa99b72f48d4606fc5a0a920154125bd0a4608c67eab742dab087/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea", size = 1414712 }, + { url = "https://files.pythonhosted.org/packages/cd/cd/420e3fd1ac6977b008b72e7ad2dae6350cc84d4c5027fc390b024e61738f/pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2", size = 578113 }, + { url = "https://files.pythonhosted.org/packages/5c/57/73930d56ed45ae0cb4946f383f985c855c9b3d4063f26416998f07523c0e/pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971", size = 641631 }, + { url = "https://files.pythonhosted.org/packages/61/d2/ae6ac5c397f1ccad59031c64beaafce7a0d6182e0452cc48f1c9c87d2dd0/pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa", size = 543528 }, + { url = "https://files.pythonhosted.org/packages/12/20/de7442172f77f7c96299a0ac70e7d4fb78cd51eca67aa2cf552b66c14196/pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218", size = 1340639 }, + { url = "https://files.pythonhosted.org/packages/98/4d/5000468bd64c7910190ed0a6c76a1ca59a68189ec1f007c451dc181a22f4/pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4", size = 1008710 }, + { url = "https://files.pythonhosted.org/packages/e1/bf/c67fd638c2f9fbbab8090a3ee779370b97c82b84cc12d0c498b285d7b2c0/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef", size = 673129 }, + { url = "https://files.pythonhosted.org/packages/86/94/99085a3f492aa538161cbf27246e8886ff850e113e0c294a5b8245f13b52/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317", size = 910107 }, + { url = "https://files.pythonhosted.org/packages/31/1d/346809e8a9b999646d03f21096428453465b1bca5cd5c64ecd048d9ecb01/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf", size = 867960 }, + { url = "https://files.pythonhosted.org/packages/ab/68/6fb6ae5551846ad5beca295b7bca32bf0a7ce19f135cb30e55fa2314e6b6/pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e", size = 869204 }, + { url = "https://files.pythonhosted.org/packages/0f/f9/18417771dee223ccf0f48e29adf8b4e25ba6d0e8285e33bcbce078070bc3/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37", size = 1203351 }, + { url = "https://files.pythonhosted.org/packages/e0/46/f13e67fe0d4f8a2315782cbad50493de6203ea0d744610faf4d5f5b16e90/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3", size = 1514204 }, + { url = "https://files.pythonhosted.org/packages/50/11/ddcf7343b7b7a226e0fc7b68cbf5a5bb56291fac07f5c3023bb4c319ebb4/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6", size = 1414339 }, + { url = "https://files.pythonhosted.org/packages/01/14/1c18d7d5b7be2708f513f37c61bfadfa62161c10624f8733f1c8451b3509/pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4", size = 576928 }, + { url = "https://files.pythonhosted.org/packages/3b/1b/0a540edd75a41df14ec416a9a500b9fec66e554aac920d4c58fbd5756776/pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5", size = 642317 }, + { url = "https://files.pythonhosted.org/packages/98/77/1cbfec0358078a4c5add529d8a70892db1be900980cdb5dd0898b3d6ab9d/pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003", size = 543834 }, + { url = "https://files.pythonhosted.org/packages/28/2f/78a766c8913ad62b28581777ac4ede50c6d9f249d39c2963e279524a1bbe/pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9", size = 1343105 }, + { url = "https://files.pythonhosted.org/packages/b7/9c/4b1e2d3d4065be715e007fe063ec7885978fad285f87eae1436e6c3201f4/pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52", size = 1008365 }, + { url = "https://files.pythonhosted.org/packages/4f/ef/5a23ec689ff36d7625b38d121ef15abfc3631a9aecb417baf7a4245e4124/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08", size = 665923 }, + { url = "https://files.pythonhosted.org/packages/ae/61/d436461a47437d63c6302c90724cf0981883ec57ceb6073873f32172d676/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5", size = 903400 }, + { url = "https://files.pythonhosted.org/packages/47/42/fc6d35ecefe1739a819afaf6f8e686f7f02a4dd241c78972d316f403474c/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae", size = 860034 }, + { url = "https://files.pythonhosted.org/packages/07/3b/44ea6266a6761e9eefaa37d98fabefa112328808ac41aa87b4bbb668af30/pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711", size = 860579 }, + { url = "https://files.pythonhosted.org/packages/38/6f/4df2014ab553a6052b0e551b37da55166991510f9e1002c89cab7ce3b3f2/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6", size = 1196246 }, + { url = "https://files.pythonhosted.org/packages/38/9d/ee240fc0c9fe9817f0c9127a43238a3e28048795483c403cc10720ddef22/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3", size = 1507441 }, + { url = "https://files.pythonhosted.org/packages/85/4f/01711edaa58d535eac4a26c294c617c9a01f09857c0ce191fd574d06f359/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b", size = 1406498 }, + { url = "https://files.pythonhosted.org/packages/07/18/907134c85c7152f679ed744e73e645b365f3ad571f38bdb62e36f347699a/pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7", size = 575533 }, + { url = "https://files.pythonhosted.org/packages/ce/2c/a6f4a20202a4d3c582ad93f95ee78d79bbdc26803495aec2912b17dbbb6c/pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a", size = 637768 }, + { url = "https://files.pythonhosted.org/packages/5f/0e/eb16ff731632d30554bf5af4dbba3ffcd04518219d82028aea4ae1b02ca5/pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b", size = 540675 }, + { url = "https://files.pythonhosted.org/packages/04/a7/0f7e2f6c126fe6e62dbae0bc93b1bd3f1099cf7fea47a5468defebe3f39d/pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726", size = 1006564 }, + { url = "https://files.pythonhosted.org/packages/31/b6/a187165c852c5d49f826a690857684333a6a4a065af0a6015572d2284f6a/pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3", size = 1340447 }, + { url = "https://files.pythonhosted.org/packages/68/ba/f4280c58ff71f321602a6e24fd19879b7e79793fb8ab14027027c0fb58ef/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50", size = 665485 }, + { url = "https://files.pythonhosted.org/packages/77/b5/c987a5c53c7d8704216f29fc3d810b32f156bcea488a940e330e1bcbb88d/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb", size = 903484 }, + { url = "https://files.pythonhosted.org/packages/29/c9/07da157d2db18c72a7eccef8e684cefc155b712a88e3d479d930aa9eceba/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187", size = 859981 }, + { url = "https://files.pythonhosted.org/packages/43/09/e12501bd0b8394b7d02c41efd35c537a1988da67fc9c745cae9c6c776d31/pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b", size = 860334 }, + { url = "https://files.pythonhosted.org/packages/eb/ff/f5ec1d455f8f7385cc0a8b2acd8c807d7fade875c14c44b85c1bddabae21/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18", size = 1196179 }, + { url = "https://files.pythonhosted.org/packages/ec/8a/bb2ac43295b1950fe436a81fc5b298be0b96ac76fb029b514d3ed58f7b27/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115", size = 1507668 }, + { url = "https://files.pythonhosted.org/packages/a9/49/dbc284ebcfd2dca23f6349227ff1616a7ee2c4a35fe0a5d6c3deff2b4fed/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e", size = 1406539 }, + { url = "https://files.pythonhosted.org/packages/00/68/093cdce3fe31e30a341d8e52a1ad86392e13c57970d722c1f62a1d1a54b6/pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5", size = 575567 }, + { url = "https://files.pythonhosted.org/packages/92/ae/6cc4657148143412b5819b05e362ae7dd09fb9fe76e2a539dcff3d0386bc/pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad", size = 637551 }, + { url = "https://files.pythonhosted.org/packages/6c/67/fbff102e201688f97c8092e4c3445d1c1068c2f27bbd45a578df97ed5f94/pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797", size = 540378 }, + { url = "https://files.pythonhosted.org/packages/3f/fe/2d998380b6e0122c6c4bdf9b6caf490831e5f5e2d08a203b5adff060c226/pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a", size = 1007378 }, + { url = "https://files.pythonhosted.org/packages/4a/f4/30d6e7157f12b3a0390bde94d6a8567cdb88846ed068a6e17238a4ccf600/pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc", size = 1329532 }, + { url = "https://files.pythonhosted.org/packages/82/86/3fe917870e15ee1c3ad48229a2a64458e36036e64b4afa9659045d82bfa8/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5", size = 653242 }, + { url = "https://files.pythonhosted.org/packages/50/2d/242e7e6ef6c8c19e6cb52d095834508cd581ffb925699fd3c640cdc758f1/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672", size = 888404 }, + { url = "https://files.pythonhosted.org/packages/ac/11/7270566e1f31e4ea73c81ec821a4b1688fd551009a3d2bab11ec66cb1e8f/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797", size = 845858 }, + { url = "https://files.pythonhosted.org/packages/91/d5/72b38fbc69867795c8711bdd735312f9fef1e3d9204e2f63ab57085434b9/pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386", size = 847375 }, + { url = "https://files.pythonhosted.org/packages/dd/9a/10ed3c7f72b4c24e719c59359fbadd1a27556a28b36cdf1cd9e4fb7845d5/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306", size = 1183489 }, + { url = "https://files.pythonhosted.org/packages/72/2d/8660892543fabf1fe41861efa222455811adac9f3c0818d6c3170a1153e3/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6", size = 1492932 }, + { url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 }, + { url = "https://files.pythonhosted.org/packages/53/fb/36b2b2548286e9444e52fcd198760af99fd89102b5be50f0660fcfe902df/pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072", size = 906955 }, + { url = "https://files.pythonhosted.org/packages/77/8f/6ce54f8979a01656e894946db6299e2273fcee21c8e5fa57c6295ef11f57/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1", size = 565701 }, + { url = "https://files.pythonhosted.org/packages/ee/1c/bf8cd66730a866b16db8483286078892b7f6536f8c389fb46e4beba0a970/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d", size = 794312 }, + { url = "https://files.pythonhosted.org/packages/71/43/91fa4ff25bbfdc914ab6bafa0f03241d69370ef31a761d16bb859f346582/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca", size = 752775 }, + { url = "https://files.pythonhosted.org/packages/ec/d2/3b2ab40f455a256cb6672186bea95cd97b459ce4594050132d71e76f0d6f/pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c", size = 550762 }, ] [[package]] name = "qdrant-client" -version = "1.13.2" +version = "1.12.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and sys_platform == 'darwin'", - "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", -] dependencies = [ - { name = "grpcio", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "grpcio-tools", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "httpx", extra = ["http2"], marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "numpy", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "portalocker", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "pydantic", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, - { name = "urllib3", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/07/3eaf3777d524d555ba14e56a30c3e393ad78ed93f6c87c6a3ddc70ec2e49/qdrant_client-1.13.2.tar.gz", hash = "sha256:c8cce87ce67b006f49430a050a35c85b78e3b896c0c756dafc13bdeca543ec13", size = 266257 } +sdist = { url = "https://files.pythonhosted.org/packages/b9/0b/7b6ddc9ade365b644a023ca225300662766732e1e9db7f5962a6cf9530bd/qdrant_client-1.12.2.tar.gz", hash = "sha256:2777e09b3e89bb22bb490384d8b1fa8140f3915287884f18984f7031a346aba5", size = 237512 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/26/89ebaee5fcbd99bf1c0a627a9447b440118b2d31dea423d074cb0481be5c/qdrant_client-1.13.2-py3-none-any.whl", hash = "sha256:db97e759bd3f8d483a383984ba4c2a158eef56f2188d83df7771591d43de2201", size = 306637 }, + { url = "https://files.pythonhosted.org/packages/e5/00/27c6eb6fc764e2b3d26ddeab4aedab855c050c906ec018bdd669b18f3157/qdrant_client-1.12.2-py3-none-any.whl", hash = "sha256:a0ae500a46a679ff3521ba3f1f1cf3d72b57090a768cec65fc317066bcbac1e6", size = 267173 }, ] [[package]] @@ -4517,36 +4255,34 @@ hiredis = [ [[package]] name = "redisvl" -version = "0.4.1" +version = "0.3.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ml-dtypes", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "python-ulid", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tabulate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/33/ab14865a0b2a31b1d003c29e7e8ea3a7a2f2c8ecb24e58e58d606e1f031b/redisvl-0.4.1.tar.gz", hash = "sha256:fd6a36426ba94792c0efca20915c31232d4ee3cc58eb23794a62c142696401e6", size = 77688 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/e2/95d61ccf79dd6fed45a2cdabbc09fd78453bd744ac8ac8fe53a678959470/redisvl-0.3.8.tar.gz", hash = "sha256:b07fc3c36cdd2d6304ab8b3e759a733d3332df868a926ffa7691803c195eab42", size = 72757 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/34/2b4bb30fabb6e37c3c57d2cb760f19aec23567f9622372c8a7617071f204/redisvl-0.4.1-py3-none-any.whl", hash = "sha256:6db5d5bc95b1fe8032a1cdae74ce1c65bc7fe9054e5429b5d34d5a91d28bae5f", size = 108525 }, + { url = "https://files.pythonhosted.org/packages/09/c4/b0cb6d49794e6e2ea69bde5877cd566dc448e4067932de570a09c6fb09de/redisvl-0.3.8-py3-none-any.whl", hash = "sha256:2dffd4ed9a4de5a384b5cbcfb2d4bc5a12bd109811ac672b9d89225dc0a0fd63", size = 99287 }, ] [[package]] name = "referencing" -version = "0.36.2" +version = "0.35.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rpds-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, ] [[package]] @@ -4674,87 +4410,87 @@ wheels = [ [[package]] name = "rpds-py" -version = "0.23.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/79/2ce611b18c4fd83d9e3aecb5cba93e1917c050f556db39842889fa69b79f/rpds_py-0.23.1.tar.gz", hash = "sha256:7f3240dcfa14d198dba24b8b9cb3b108c06b68d45b7babd9eefc1038fdf7e707", size = 26806 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/fe/e5326459863bd525122f4e9c80ac8d7c6cfa171b7518d04cc27c12c209b0/rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed", size = 372123 }, - { url = "https://files.pythonhosted.org/packages/f9/db/f10a3795f7a89fb27594934012d21c61019bbeb516c5bdcfbbe9e9e617a7/rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c", size = 356778 }, - { url = "https://files.pythonhosted.org/packages/21/27/0d3678ad7f432fa86f8fac5f5fc6496a4d2da85682a710d605219be20063/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ee9d6f0b38efb22ad94c3b68ffebe4c47865cdf4b17f6806d6c674e1feb4246", size = 385775 }, - { url = "https://files.pythonhosted.org/packages/99/a0/1786defa125b2ad228027f22dff26312ce7d1fee3c7c3c2682f403db2062/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7356a6da0562190558c4fcc14f0281db191cdf4cb96e7604c06acfcee96df15", size = 391181 }, - { url = "https://files.pythonhosted.org/packages/f1/5c/1240934050a7ffd020a915486d0cc4c7f6e7a2442a77aedf13664db55d36/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9441af1d25aed96901f97ad83d5c3e35e6cd21a25ca5e4916c82d7dd0490a4fa", size = 444607 }, - { url = "https://files.pythonhosted.org/packages/b7/1b/cee6905b47817fd0a377716dbe4df35295de46df46ee2ff704538cc371b0/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d8abf7896a91fb97e7977d1aadfcc2c80415d6dc2f1d0fca5b8d0df247248f3", size = 445550 }, - { url = "https://files.pythonhosted.org/packages/54/f7/f0821ca34032892d7a67fcd5042f50074ff2de64e771e10df01085c88d47/rpds_py-0.23.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b08027489ba8fedde72ddd233a5ea411b85a6ed78175f40285bd401bde7466d", size = 386148 }, - { url = "https://files.pythonhosted.org/packages/eb/ef/2afe53bc857c4bcba336acfd2629883a5746e7291023e017ac7fc98d85aa/rpds_py-0.23.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fee513135b5a58f3bb6d89e48326cd5aa308e4bcdf2f7d59f67c861ada482bf8", size = 416780 }, - { url = "https://files.pythonhosted.org/packages/ae/9a/38d2236cf669789b8a3e1a014c9b6a8d7b8925b952c92e7839ae2749f9ac/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:35d5631ce0af26318dba0ae0ac941c534453e42f569011585cb323b7774502a5", size = 558265 }, - { url = "https://files.pythonhosted.org/packages/e6/0a/f2705530c42578f20ed0b5b90135eecb30eef6e2ba73e7ba69087fad2dba/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a20cb698c4a59c534c6701b1c24a968ff2768b18ea2991f886bd8985ce17a89f", size = 585270 }, - { url = "https://files.pythonhosted.org/packages/29/4e/3b597dc84ed82c3d757ac9aa620de224a94e06d2e102069795ae7e81c015/rpds_py-0.23.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e9c206a1abc27e0588cf8b7c8246e51f1a16a103734f7750830a1ccb63f557a", size = 553850 }, - { url = "https://files.pythonhosted.org/packages/00/cc/6498b6f79e4375e6737247661e52a2d18f6accf4910e0c8da978674b4241/rpds_py-0.23.1-cp310-cp310-win32.whl", hash = "sha256:d9f75a06ecc68f159d5d7603b734e1ff6daa9497a929150f794013aa9f6e3f12", size = 220660 }, - { url = "https://files.pythonhosted.org/packages/17/2b/08db023d23e8c7032c99d8d2a70d32e450a868ab73d16e3ff5290308a665/rpds_py-0.23.1-cp310-cp310-win_amd64.whl", hash = "sha256:f35eff113ad430b5272bbfc18ba111c66ff525828f24898b4e146eb479a2cdda", size = 232551 }, - { url = "https://files.pythonhosted.org/packages/1c/67/6e5d4234bb9dee062ffca2a5f3c7cd38716317d6760ec235b175eed4de2c/rpds_py-0.23.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b79f5ced71efd70414a9a80bbbfaa7160da307723166f09b69773153bf17c590", size = 372264 }, - { url = "https://files.pythonhosted.org/packages/a7/0a/3dedb2daee8e783622427f5064e2d112751d8276ee73aa5409f000a132f4/rpds_py-0.23.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c9e799dac1ffbe7b10c1fd42fe4cd51371a549c6e108249bde9cd1200e8f59b4", size = 356883 }, - { url = "https://files.pythonhosted.org/packages/ed/fc/e1acef44f9c24b05fe5434b235f165a63a52959ac655e3f7a55726cee1a4/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721f9c4011b443b6e84505fc00cc7aadc9d1743f1c988e4c89353e19c4a968ee", size = 385624 }, - { url = "https://files.pythonhosted.org/packages/97/0a/a05951f6465d01622720c03ef6ef31adfbe865653e05ed7c45837492f25e/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f88626e3f5e57432e6191cd0c5d6d6b319b635e70b40be2ffba713053e5147dd", size = 391500 }, - { url = "https://files.pythonhosted.org/packages/ea/2e/cca0583ec0690ea441dceae23c0673b99755710ea22f40bccf1e78f41481/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:285019078537949cecd0190f3690a0b0125ff743d6a53dfeb7a4e6787af154f5", size = 444869 }, - { url = "https://files.pythonhosted.org/packages/cc/e6/95cda68b33a6d814d1e96b0e406d231ed16629101460d1740e92f03365e6/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b92f5654157de1379c509b15acec9d12ecf6e3bc1996571b6cb82a4302060447", size = 444930 }, - { url = "https://files.pythonhosted.org/packages/5f/a7/e94cdb73411ae9c11414d3c7c9a6ad75d22ad4a8d094fb45a345ba9e3018/rpds_py-0.23.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e768267cbe051dd8d1c5305ba690bb153204a09bf2e3de3ae530de955f5b5580", size = 386254 }, - { url = "https://files.pythonhosted.org/packages/dd/c5/a4a943d90a39e85efd1e04b1ad5129936786f9a9aa27bb7be8fc5d9d50c9/rpds_py-0.23.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5334a71f7dc1160382d45997e29f2637c02f8a26af41073189d79b95d3321f1", size = 417090 }, - { url = "https://files.pythonhosted.org/packages/0c/a0/80d0013b12428d1fce0ab4e71829400b0a32caec12733c79e6109f843342/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6adb81564af0cd428910f83fa7da46ce9ad47c56c0b22b50872bc4515d91966", size = 557639 }, - { url = "https://files.pythonhosted.org/packages/a6/92/ec2e6980afb964a2cd7a99cbdef1f6c01116abe94b42cbe336ac93dd11c2/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cafa48f2133d4daa028473ede7d81cd1b9f9e6925e9e4003ebdf77010ee02f35", size = 584572 }, - { url = "https://files.pythonhosted.org/packages/3d/ce/75b6054db34a390789a82523790717b27c1bd735e453abb429a87c4f0f26/rpds_py-0.23.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fced9fd4a07a1ded1bac7e961ddd9753dd5d8b755ba8e05acba54a21f5f1522", size = 553028 }, - { url = "https://files.pythonhosted.org/packages/cc/24/f45abe0418c06a5cba0f846e967aa27bac765acd927aabd857c21319b8cc/rpds_py-0.23.1-cp311-cp311-win32.whl", hash = "sha256:243241c95174b5fb7204c04595852fe3943cc41f47aa14c3828bc18cd9d3b2d6", size = 220862 }, - { url = "https://files.pythonhosted.org/packages/2d/a6/3c0880e8bbfc36451ef30dc416266f6d2934705e468db5d21c8ba0ab6400/rpds_py-0.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:11dd60b2ffddba85715d8a66bb39b95ddbe389ad2cfcf42c833f1bcde0878eaf", size = 232953 }, - { url = "https://files.pythonhosted.org/packages/f3/8c/d17efccb9f5b9137ddea706664aebae694384ae1d5997c0202093e37185a/rpds_py-0.23.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3902df19540e9af4cc0c3ae75974c65d2c156b9257e91f5101a51f99136d834c", size = 364369 }, - { url = "https://files.pythonhosted.org/packages/6e/c0/ab030f696b5c573107115a88d8d73d80f03309e60952b64c584c70c659af/rpds_py-0.23.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66f8d2a17e5838dd6fb9be6baaba8e75ae2f5fa6b6b755d597184bfcd3cb0eba", size = 349965 }, - { url = "https://files.pythonhosted.org/packages/b3/55/b40170f5a079c4fb0b6a82b299689e66e744edca3c3375a8b160fb797660/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:112b8774b0b4ee22368fec42749b94366bd9b536f8f74c3d4175d4395f5cbd31", size = 389064 }, - { url = "https://files.pythonhosted.org/packages/ab/1c/b03a912c59ec7c1e16b26e587b9dfa8ddff3b07851e781e8c46e908a365a/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0df046f2266e8586cf09d00588302a32923eb6386ced0ca5c9deade6af9a149", size = 397741 }, - { url = "https://files.pythonhosted.org/packages/52/6f/151b90792b62fb6f87099bcc9044c626881fdd54e31bf98541f830b15cea/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3288930b947cbebe767f84cf618d2cbe0b13be476e749da0e6a009f986248c", size = 448784 }, - { url = "https://files.pythonhosted.org/packages/71/2a/6de67c0c97ec7857e0e9e5cd7c52405af931b303eb1e5b9eff6c50fd9a2e/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce473a2351c018b06dd8d30d5da8ab5a0831056cc53b2006e2a8028172c37ce5", size = 440203 }, - { url = "https://files.pythonhosted.org/packages/db/5e/e759cd1c276d98a4b1f464b17a9bf66c65d29f8f85754e27e1467feaa7c3/rpds_py-0.23.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d550d7e9e7d8676b183b37d65b5cd8de13676a738973d330b59dc8312df9c5dc", size = 391611 }, - { url = "https://files.pythonhosted.org/packages/1c/1e/2900358efcc0d9408c7289769cba4c0974d9db314aa884028ed7f7364f61/rpds_py-0.23.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e14f86b871ea74c3fddc9a40e947d6a5d09def5adc2076ee61fb910a9014fb35", size = 423306 }, - { url = "https://files.pythonhosted.org/packages/23/07/6c177e6d059f5d39689352d6c69a926ee4805ffdb6f06203570234d3d8f7/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf5be5ba34e19be579ae873da515a2836a2166d8d7ee43be6ff909eda42b72b", size = 562323 }, - { url = "https://files.pythonhosted.org/packages/70/e4/f9097fd1c02b516fff9850792161eb9fc20a2fd54762f3c69eae0bdb67cb/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7031d493c4465dbc8d40bd6cafefef4bd472b17db0ab94c53e7909ee781b9ef", size = 588351 }, - { url = "https://files.pythonhosted.org/packages/87/39/5db3c6f326bfbe4576ae2af6435bd7555867d20ae690c786ff33659f293b/rpds_py-0.23.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55ff4151cfd4bc635e51cfb1c59ac9f7196b256b12e3a57deb9e5742e65941ad", size = 557252 }, - { url = "https://files.pythonhosted.org/packages/fd/14/2d5ad292f144fa79bafb78d2eb5b8a3a91c358b6065443cb9c49b5d1fedf/rpds_py-0.23.1-cp312-cp312-win32.whl", hash = "sha256:a9d3b728f5a5873d84cba997b9d617c6090ca5721caaa691f3b1a78c60adc057", size = 222181 }, - { url = "https://files.pythonhosted.org/packages/a3/4f/0fce63e0f5cdd658e71e21abd17ac1bc9312741ebb8b3f74eeed2ebdf771/rpds_py-0.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:b03a8d50b137ee758e4c73638b10747b7c39988eb8e6cd11abb7084266455165", size = 237426 }, - { url = "https://files.pythonhosted.org/packages/13/9d/b8b2c0edffb0bed15be17b6d5ab06216f2f47f9ee49259c7e96a3ad4ca42/rpds_py-0.23.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4caafd1a22e5eaa3732acb7672a497123354bef79a9d7ceed43387d25025e935", size = 363672 }, - { url = "https://files.pythonhosted.org/packages/bd/c2/5056fa29e6894144d7ba4c938b9b0445f75836b87d2dd00ed4999dc45a8c/rpds_py-0.23.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:178f8a60fc24511c0eb756af741c476b87b610dba83270fce1e5a430204566a4", size = 349602 }, - { url = "https://files.pythonhosted.org/packages/b0/bc/33779a1bb0ee32d8d706b173825aab75c628521d23ce72a7c1e6a6852f86/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c632419c3870507ca20a37c8f8f5352317aca097639e524ad129f58c125c61c6", size = 388746 }, - { url = "https://files.pythonhosted.org/packages/62/0b/71db3e36b7780a619698ec82a9c87ab44ad7ca7f5480913e8a59ff76f050/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:698a79d295626ee292d1730bc2ef6e70a3ab135b1d79ada8fde3ed0047b65a10", size = 397076 }, - { url = "https://files.pythonhosted.org/packages/bb/2e/494398f613edf77ba10a916b1ddea2acce42ab0e3b62e2c70ffc0757ce00/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271fa2184cf28bdded86bb6217c8e08d3a169fe0bbe9be5e8d96e8476b707122", size = 448399 }, - { url = "https://files.pythonhosted.org/packages/dd/53/4bd7f5779b1f463243ee5fdc83da04dd58a08f86e639dbffa7a35f969a84/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b91cceb5add79ee563bd1f70b30896bd63bc5f78a11c1f00a1e931729ca4f1f4", size = 439764 }, - { url = "https://files.pythonhosted.org/packages/f6/55/b3c18c04a460d951bf8e91f2abf46ce5b6426fb69784166a6a25827cb90a/rpds_py-0.23.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a6cb95074777f1ecda2ca4fa7717caa9ee6e534f42b7575a8f0d4cb0c24013", size = 390662 }, - { url = "https://files.pythonhosted.org/packages/2a/65/cc463044a3cbd616029b2aa87a651cdee8288d2fdd7780b2244845e934c1/rpds_py-0.23.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50fb62f8d8364978478b12d5f03bf028c6bc2af04082479299139dc26edf4c64", size = 422680 }, - { url = "https://files.pythonhosted.org/packages/fa/8e/1fa52990c7836d72e8d70cd7753f2362c72fbb0a49c1462e8c60e7176d0b/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8f7e90b948dc9dcfff8003f1ea3af08b29c062f681c05fd798e36daa3f7e3e8", size = 561792 }, - { url = "https://files.pythonhosted.org/packages/57/b8/fe3b612979b1a29d0c77f8585903d8b3a292604b26d4b300e228b8ac6360/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5b98b6c953e5c2bda51ab4d5b4f172617d462eebc7f4bfdc7c7e6b423f6da957", size = 588127 }, - { url = "https://files.pythonhosted.org/packages/44/2d/fde474de516bbc4b9b230f43c98e7f8acc5da7fc50ceed8e7af27553d346/rpds_py-0.23.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2893d778d4671ee627bac4037a075168b2673c57186fb1a57e993465dbd79a93", size = 556981 }, - { url = "https://files.pythonhosted.org/packages/18/57/767deeb27b81370bbab8f74ef6e68d26c4ea99018f3c71a570e506fede85/rpds_py-0.23.1-cp313-cp313-win32.whl", hash = "sha256:2cfa07c346a7ad07019c33fb9a63cf3acb1f5363c33bc73014e20d9fe8b01cdd", size = 221936 }, - { url = "https://files.pythonhosted.org/packages/7d/6c/3474cfdd3cafe243f97ab8474ea8949236eb2a1a341ca55e75ce00cd03da/rpds_py-0.23.1-cp313-cp313-win_amd64.whl", hash = "sha256:3aaf141d39f45322e44fc2c742e4b8b4098ead5317e5f884770c8df0c332da70", size = 237145 }, - { url = "https://files.pythonhosted.org/packages/ec/77/e985064c624230f61efa0423759bb066da56ebe40c654f8b5ba225bd5d63/rpds_py-0.23.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:759462b2d0aa5a04be5b3e37fb8183615f47014ae6b116e17036b131985cb731", size = 359623 }, - { url = "https://files.pythonhosted.org/packages/62/d9/a33dcbf62b29e40559e012d525bae7d516757cf042cc9234bd34ca4b6aeb/rpds_py-0.23.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3e9212f52074fc9d72cf242a84063787ab8e21e0950d4d6709886fb62bcb91d5", size = 345900 }, - { url = "https://files.pythonhosted.org/packages/92/eb/f81a4be6397861adb2cb868bb6a28a33292c2dcac567d1dc575226055e55/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e9f3a3ac919406bc0414bbbd76c6af99253c507150191ea79fab42fdb35982a", size = 386426 }, - { url = "https://files.pythonhosted.org/packages/09/47/1f810c9b5e83be005341201b5389f1d240dfa440346ea7189f9b3fd6961d/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c04ca91dda8a61584165825907f5c967ca09e9c65fe8966ee753a3f2b019fe1e", size = 392314 }, - { url = "https://files.pythonhosted.org/packages/83/bd/bc95831432fd6c46ed8001f01af26de0763a059d6d7e6d69e3c5bf02917a/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab923167cfd945abb9b51a407407cf19f5bee35001221f2911dc85ffd35ff4f", size = 447706 }, - { url = "https://files.pythonhosted.org/packages/19/3e/567c04c226b1802dc6dc82cad3d53e1fa0a773258571c74ac5d8fbde97ed/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed6f011bedca8585787e5082cce081bac3d30f54520097b2411351b3574e1219", size = 437060 }, - { url = "https://files.pythonhosted.org/packages/fe/77/a77d2c6afe27ae7d0d55fc32f6841502648070dc8d549fcc1e6d47ff8975/rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959bb9928c5c999aba4a3f5a6799d571ddc2c59ff49917ecf55be2bbb4e3722", size = 389347 }, - { url = "https://files.pythonhosted.org/packages/3f/47/6b256ff20a74cfebeac790ab05586e0ac91f88e331125d4740a6c86fc26f/rpds_py-0.23.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ed7de3c86721b4e83ac440751329ec6a1102229aa18163f84c75b06b525ad7e", size = 415554 }, - { url = "https://files.pythonhosted.org/packages/fc/29/d4572469a245bc9fc81e35166dca19fc5298d5c43e1a6dd64bf145045193/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb89edee2fa237584e532fbf78f0ddd1e49a47c7c8cfa153ab4849dc72a35e6", size = 557418 }, - { url = "https://files.pythonhosted.org/packages/9c/0a/68cf7228895b1a3f6f39f51b15830e62456795e61193d2c8b87fd48c60db/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7e5413d2e2d86025e73f05510ad23dad5950ab8417b7fc6beaad99be8077138b", size = 583033 }, - { url = "https://files.pythonhosted.org/packages/14/18/017ab41dcd6649ad5db7d00155b4c212b31ab05bd857d5ba73a1617984eb/rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d31ed4987d72aabdf521eddfb6a72988703c091cfc0064330b9e5f8d6a042ff5", size = 554880 }, - { url = "https://files.pythonhosted.org/packages/2e/dd/17de89431268da8819d8d51ce67beac28d9b22fccf437bc5d6d2bcd1acdb/rpds_py-0.23.1-cp313-cp313t-win32.whl", hash = "sha256:f3429fb8e15b20961efca8c8b21432623d85db2228cc73fe22756c6637aa39e7", size = 219743 }, - { url = "https://files.pythonhosted.org/packages/68/15/6d22d07e063ce5e9bfbd96db9ec2fbb4693591b4503e3a76996639474d02/rpds_py-0.23.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d", size = 235415 }, - { url = "https://files.pythonhosted.org/packages/95/a9/6fafd35fc6bac05f59bcbc800b57cef877911ff1c015397c519fec888642/rpds_py-0.23.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c1f8afa346ccd59e4e5630d5abb67aba6a9812fddf764fd7eb11f382a345f8cc", size = 373463 }, - { url = "https://files.pythonhosted.org/packages/5b/ac/44f00029b8fbe0903a19e9a87a9b86063bf8700df2cc58868373d378418c/rpds_py-0.23.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fad784a31869747df4ac968a351e070c06ca377549e4ace94775aaa3ab33ee06", size = 358400 }, - { url = "https://files.pythonhosted.org/packages/5e/9c/3da199346c68d785f10dccab123b74c8c5f73be3f742c9e33d1116e07931/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a96fcac2f18e5a0a23a75cd27ce2656c66c11c127b0318e508aab436b77428", size = 386815 }, - { url = "https://files.pythonhosted.org/packages/d3/45/8f6533c33c0d33da8c2c8b2fb8f2ee90b23c05c679b86b0ac6aee4653749/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e77febf227a1dc3220159355dba68faa13f8dca9335d97504abf428469fb18b", size = 392974 }, - { url = "https://files.pythonhosted.org/packages/ca/56/6a9ac1bf0455ba07385d8fe98c571c519b4f2000cff6581487bf9fab9272/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26bb3e8de93443d55e2e748e9fd87deb5f8075ca7bc0502cfc8be8687d69a2ec", size = 446019 }, - { url = "https://files.pythonhosted.org/packages/f4/83/5d9a3f9731cdccf49088bcc4ce821a5cf50bd1737cdad83e9959a7b9054d/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db7707dde9143a67b8812c7e66aeb2d843fe33cc8e374170f4d2c50bd8f2472d", size = 445811 }, - { url = "https://files.pythonhosted.org/packages/44/50/f2e0a98c62fc1fe68b176caca587714dc5c8bb2c3d1dd1eeb2bd4cc787ac/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eedaaccc9bb66581d4ae7c50e15856e335e57ef2734dbc5fd8ba3e2a4ab3cb6", size = 388070 }, - { url = "https://files.pythonhosted.org/packages/f2/d0/4981878f8f157e6dbea01d95e0119bf3d6b4c2c884fe64a9e6987f941104/rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28358c54fffadf0ae893f6c1050e8f8853e45df22483b7fff2f6ab6152f5d8bf", size = 419173 }, - { url = "https://files.pythonhosted.org/packages/ce/13/fc971c470da96b270d2f64fedee987351bd935dc3016932a5cdcb1a88a2a/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:633462ef7e61d839171bf206551d5ab42b30b71cac8f10a64a662536e057fdef", size = 559048 }, - { url = "https://files.pythonhosted.org/packages/42/02/be91e1de139ec8b4f9fec4192fd779ba48af281cfc762c0ca4c15b945484/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a98f510d86f689fcb486dc59e6e363af04151e5260ad1bdddb5625c10f1e95f8", size = 584773 }, - { url = "https://files.pythonhosted.org/packages/27/28/3af8a1956df3edc41d884267d766dc096496dafc83f02f764a475eca0b4a/rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e0397dd0b3955c61ef9b22838144aa4bef6f0796ba5cc8edfc64d468b93798b4", size = 555153 }, - { url = "https://files.pythonhosted.org/packages/5e/bb/e45f51c4e1327dea3c72b846c6de129eebacb7a6cb309af7af35d0578c80/rpds_py-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:75307599f0d25bf6937248e5ac4e3bde5ea72ae6618623b86146ccc7845ed00b", size = 233827 }, +version = "0.22.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/80/cce854d0921ff2f0a9fa831ba3ad3c65cee3a46711addf39a2af52df2cfd/rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d", size = 26771 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/2a/ead1d09e57449b99dcc190d8d2323e3a167421d8f8fdf0f217c6f6befe47/rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967", size = 359514 }, + { url = "https://files.pythonhosted.org/packages/8f/7e/1254f406b7793b586c68e217a6a24ec79040f85e030fff7e9049069284f4/rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37", size = 349031 }, + { url = "https://files.pythonhosted.org/packages/aa/da/17c6a2c73730d426df53675ff9cc6653ac7a60b6438d03c18e1c822a576a/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24", size = 381485 }, + { url = "https://files.pythonhosted.org/packages/aa/13/2dbacd820466aa2a3c4b747afb18d71209523d353cf865bf8f4796c969ea/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff", size = 386794 }, + { url = "https://files.pythonhosted.org/packages/6d/62/96905d0a35ad4e4bc3c098b2f34b2e7266e211d08635baa690643d2227be/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c", size = 423523 }, + { url = "https://files.pythonhosted.org/packages/eb/1b/d12770f2b6a9fc2c3ec0d810d7d440f6d465ccd8b7f16ae5385952c28b89/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e", size = 446695 }, + { url = "https://files.pythonhosted.org/packages/4d/cf/96f1fd75512a017f8e07408b6d5dbeb492d9ed46bfe0555544294f3681b3/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec", size = 381959 }, + { url = "https://files.pythonhosted.org/packages/ab/f0/d1c5b501c8aea85aeb938b555bfdf7612110a2f8cdc21ae0482c93dd0c24/rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c", size = 410420 }, + { url = "https://files.pythonhosted.org/packages/33/3b/45b6c58fb6aad5a569ae40fb890fc494c6b02203505a5008ee6dc68e65f7/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09", size = 557620 }, + { url = "https://files.pythonhosted.org/packages/83/62/3fdd2d3d47bf0bb9b931c4c73036b4ab3ec77b25e016ae26fab0f02be2af/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00", size = 584202 }, + { url = "https://files.pythonhosted.org/packages/04/f2/5dced98b64874b84ca824292f9cee2e3f30f3bcf231d15a903126684f74d/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf", size = 552787 }, + { url = "https://files.pythonhosted.org/packages/67/13/2273dea1204eda0aea0ef55145da96a9aa28b3f88bb5c70e994f69eda7c3/rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652", size = 220088 }, + { url = "https://files.pythonhosted.org/packages/4e/80/8c8176b67ad7f4a894967a7a4014ba039626d96f1d4874d53e409b58d69f/rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8", size = 231737 }, + { url = "https://files.pythonhosted.org/packages/15/ad/8d1ddf78f2805a71253fcd388017e7b4a0615c22c762b6d35301fef20106/rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f", size = 359773 }, + { url = "https://files.pythonhosted.org/packages/c8/75/68c15732293a8485d79fe4ebe9045525502a067865fa4278f178851b2d87/rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a", size = 349214 }, + { url = "https://files.pythonhosted.org/packages/3c/4c/7ce50f3070083c2e1b2bbd0fb7046f3da55f510d19e283222f8f33d7d5f4/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5", size = 380477 }, + { url = "https://files.pythonhosted.org/packages/9a/e9/835196a69cb229d5c31c13b8ae603bd2da9a6695f35fe4270d398e1db44c/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb", size = 386171 }, + { url = "https://files.pythonhosted.org/packages/f9/8e/33fc4eba6683db71e91e6d594a2cf3a8fbceb5316629f0477f7ece5e3f75/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2", size = 422676 }, + { url = "https://files.pythonhosted.org/packages/37/47/2e82d58f8046a98bb9497a8319604c92b827b94d558df30877c4b3c6ccb3/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0", size = 446152 }, + { url = "https://files.pythonhosted.org/packages/e1/78/79c128c3e71abbc8e9739ac27af11dc0f91840a86fce67ff83c65d1ba195/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1", size = 381300 }, + { url = "https://files.pythonhosted.org/packages/c9/5b/2e193be0e8b228c1207f31fa3ea79de64dadb4f6a4833111af8145a6bc33/rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d", size = 409636 }, + { url = "https://files.pythonhosted.org/packages/c2/3f/687c7100b762d62186a1c1100ffdf99825f6fa5ea94556844bbbd2d0f3a9/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648", size = 556708 }, + { url = "https://files.pythonhosted.org/packages/8c/a2/c00cbc4b857e8b3d5e7f7fc4c81e23afd8c138b930f4f3ccf9a41a23e9e4/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74", size = 583554 }, + { url = "https://files.pythonhosted.org/packages/d0/08/696c9872cf56effdad9ed617ac072f6774a898d46b8b8964eab39ec562d2/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a", size = 552105 }, + { url = "https://files.pythonhosted.org/packages/18/1f/4df560be1e994f5adf56cabd6c117e02de7c88ee238bb4ce03ed50da9d56/rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64", size = 220199 }, + { url = "https://files.pythonhosted.org/packages/b8/1b/c29b570bc5db8237553002788dc734d6bd71443a2ceac2a58202ec06ef12/rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c", size = 231775 }, + { url = "https://files.pythonhosted.org/packages/75/47/3383ee3bd787a2a5e65a9b9edc37ccf8505c0a00170e3a5e6ea5fbcd97f7/rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e", size = 352334 }, + { url = "https://files.pythonhosted.org/packages/40/14/aa6400fa8158b90a5a250a77f2077c0d0cd8a76fce31d9f2b289f04c6dec/rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56", size = 342111 }, + { url = "https://files.pythonhosted.org/packages/7d/06/395a13bfaa8a28b302fb433fb285a67ce0ea2004959a027aea8f9c52bad4/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45", size = 384286 }, + { url = "https://files.pythonhosted.org/packages/43/52/d8eeaffab047e6b7b7ef7f00d5ead074a07973968ffa2d5820fa131d7852/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e", size = 391739 }, + { url = "https://files.pythonhosted.org/packages/83/31/52dc4bde85c60b63719610ed6f6d61877effdb5113a72007679b786377b8/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d", size = 427306 }, + { url = "https://files.pythonhosted.org/packages/70/d5/1bab8e389c2261dba1764e9e793ed6830a63f830fdbec581a242c7c46bda/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38", size = 442717 }, + { url = "https://files.pythonhosted.org/packages/82/a1/a45f3e30835b553379b3a56ea6c4eb622cf11e72008229af840e4596a8ea/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15", size = 385721 }, + { url = "https://files.pythonhosted.org/packages/a6/27/780c942de3120bdd4d0e69583f9c96e179dfff082f6ecbb46b8d6488841f/rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059", size = 415824 }, + { url = "https://files.pythonhosted.org/packages/94/0b/aa0542ca88ad20ea719b06520f925bae348ea5c1fdf201b7e7202d20871d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e", size = 561227 }, + { url = "https://files.pythonhosted.org/packages/0d/92/3ed77d215f82c8f844d7f98929d56cc321bb0bcfaf8f166559b8ec56e5f1/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61", size = 587424 }, + { url = "https://files.pythonhosted.org/packages/09/42/cacaeb047a22cab6241f107644f230e2935d4efecf6488859a7dd82fc47d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7", size = 555953 }, + { url = "https://files.pythonhosted.org/packages/e6/52/c921dc6d5f5d45b212a456c1f5b17df1a471127e8037eb0972379e39dff4/rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627", size = 221339 }, + { url = "https://files.pythonhosted.org/packages/f2/c7/f82b5be1e8456600395366f86104d1bd8d0faed3802ad511ef6d60c30d98/rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4", size = 235786 }, + { url = "https://files.pythonhosted.org/packages/d0/bf/36d5cc1f2c609ae6e8bf0fc35949355ca9d8790eceb66e6385680c951e60/rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84", size = 351657 }, + { url = "https://files.pythonhosted.org/packages/24/2a/f1e0fa124e300c26ea9382e59b2d582cba71cedd340f32d1447f4f29fa4e/rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25", size = 341829 }, + { url = "https://files.pythonhosted.org/packages/cf/c2/0da1231dd16953845bed60d1a586fcd6b15ceaeb965f4d35cdc71f70f606/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4", size = 384220 }, + { url = "https://files.pythonhosted.org/packages/c7/73/a4407f4e3a00a9d4b68c532bf2d873d6b562854a8eaff8faa6133b3588ec/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5", size = 391009 }, + { url = "https://files.pythonhosted.org/packages/a9/c3/04b7353477ab360fe2563f5f0b176d2105982f97cd9ae80a9c5a18f1ae0f/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc", size = 426989 }, + { url = "https://files.pythonhosted.org/packages/8d/e6/e4b85b722bcf11398e17d59c0f6049d19cd606d35363221951e6d625fcb0/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b", size = 441544 }, + { url = "https://files.pythonhosted.org/packages/27/fc/403e65e56f65fff25f2973216974976d3f0a5c3f30e53758589b6dc9b79b/rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518", size = 385179 }, + { url = "https://files.pythonhosted.org/packages/57/9b/2be9ff9700d664d51fd96b33d6595791c496d2778cb0b2a634f048437a55/rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd", size = 415103 }, + { url = "https://files.pythonhosted.org/packages/bb/a5/03c2ad8ca10994fcf22dd2150dd1d653bc974fa82d9a590494c84c10c641/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2", size = 560916 }, + { url = "https://files.pythonhosted.org/packages/ba/2e/be4fdfc8b5b576e588782b56978c5b702c5a2307024120d8aeec1ab818f0/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16", size = 587062 }, + { url = "https://files.pythonhosted.org/packages/67/e0/2034c221937709bf9c542603d25ad43a68b4b0a9a0c0b06a742f2756eb66/rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f", size = 555734 }, + { url = "https://files.pythonhosted.org/packages/ea/ce/240bae07b5401a22482b58e18cfbabaa392409b2797da60223cca10d7367/rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de", size = 220663 }, + { url = "https://files.pythonhosted.org/packages/cb/f0/d330d08f51126330467edae2fa4efa5cec8923c87551a79299380fdea30d/rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9", size = 235503 }, + { url = "https://files.pythonhosted.org/packages/f7/c4/dbe1cc03df013bf2feb5ad00615038050e7859f381e96fb5b7b4572cd814/rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b", size = 347698 }, + { url = "https://files.pythonhosted.org/packages/a4/3a/684f66dd6b0f37499cad24cd1c0e523541fd768576fa5ce2d0a8799c3cba/rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b", size = 337330 }, + { url = "https://files.pythonhosted.org/packages/82/eb/e022c08c2ce2e8f7683baa313476492c0e2c1ca97227fe8a75d9f0181e95/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1", size = 380022 }, + { url = "https://files.pythonhosted.org/packages/e4/21/5a80e653e4c86aeb28eb4fea4add1f72e1787a3299687a9187105c3ee966/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83", size = 390754 }, + { url = "https://files.pythonhosted.org/packages/37/a4/d320a04ae90f72d080b3d74597074e62be0a8ecad7d7321312dfe2dc5a6a/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd", size = 423840 }, + { url = "https://files.pythonhosted.org/packages/87/70/674dc47d93db30a6624279284e5631be4c3a12a0340e8e4f349153546728/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1", size = 438970 }, + { url = "https://files.pythonhosted.org/packages/3f/64/9500f4d66601d55cadd21e90784cfd5d5f4560e129d72e4339823129171c/rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3", size = 383146 }, + { url = "https://files.pythonhosted.org/packages/4d/45/630327addb1d17173adcf4af01336fd0ee030c04798027dfcb50106001e0/rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130", size = 408294 }, + { url = "https://files.pythonhosted.org/packages/5f/ef/8efb3373cee54ea9d9980b772e5690a0c9e9214045a4e7fa35046e399fee/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c", size = 556345 }, + { url = "https://files.pythonhosted.org/packages/54/01/151d3b9ef4925fc8f15bfb131086c12ec3c3d6dd4a4f7589c335bf8e85ba/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b", size = 582292 }, + { url = "https://files.pythonhosted.org/packages/30/89/35fc7a6cdf3477d441c7aca5e9bbf5a14e0f25152aed7f63f4e0b141045d/rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333", size = 553855 }, + { url = "https://files.pythonhosted.org/packages/8f/e0/830c02b2457c4bd20a8c5bb394d31d81f57fbefce2dbdd2e31feff4f7003/rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730", size = 219100 }, + { url = "https://files.pythonhosted.org/packages/f8/30/7ac943f69855c2db77407ae363484b915d861702dbba1aa82d68d57f42be/rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf", size = 233794 }, + { url = "https://files.pythonhosted.org/packages/8b/63/e29f8ee14fcf383574f73b6bbdcbec0fbc2e5fc36b4de44d1ac389b1de62/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d", size = 360786 }, + { url = "https://files.pythonhosted.org/packages/d3/e0/771ee28b02a24e81c8c0e645796a371350a2bb6672753144f36ae2d2afc9/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd", size = 350589 }, + { url = "https://files.pythonhosted.org/packages/cf/49/abad4c4a1e6f3adf04785a99c247bfabe55ed868133e2d1881200aa5d381/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493", size = 381848 }, + { url = "https://files.pythonhosted.org/packages/3a/7d/f4bc6d6fbe6af7a0d2b5f2ee77079efef7c8528712745659ec0026888998/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96", size = 387879 }, + { url = "https://files.pythonhosted.org/packages/13/b0/575c797377fdcd26cedbb00a3324232e4cb2c5d121f6e4b0dbf8468b12ef/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123", size = 423916 }, + { url = "https://files.pythonhosted.org/packages/54/78/87157fa39d58f32a68d3326f8a81ad8fb99f49fe2aa7ad9a1b7d544f9478/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad", size = 448410 }, + { url = "https://files.pythonhosted.org/packages/59/69/860f89996065a88be1b6ff2d60e96a02b920a262d8aadab99e7903986597/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9", size = 382841 }, + { url = "https://files.pythonhosted.org/packages/bd/d7/bc144e10d27e3cb350f98df2492a319edd3caaf52ddfe1293f37a9afbfd7/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e", size = 409662 }, + { url = "https://files.pythonhosted.org/packages/14/2a/6bed0b05233c291a94c7e89bc76ffa1c619d4e1979fbfe5d96024020c1fb/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338", size = 558221 }, + { url = "https://files.pythonhosted.org/packages/11/23/cd8f566de444a137bc1ee5795e47069a947e60810ba4152886fe5308e1b7/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566", size = 583780 }, + { url = "https://files.pythonhosted.org/packages/8d/63/79c3602afd14d501f751e615a74a59040328da5ef29ed5754ae80d236b84/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe", size = 553619 }, + { url = "https://files.pythonhosted.org/packages/9f/2e/c5c1689e80298d4e94c75b70faada4c25445739d91b94c211244a3ed7ed1/rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d", size = 233338 }, ] [[package]] @@ -4827,61 +4563,61 @@ wheels = [ [[package]] name = "ruff" -version = "0.9.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/39/8b/a86c300359861b186f18359adf4437ac8e4c52e42daa9eedc731ef9d5b53/ruff-0.9.7.tar.gz", hash = "sha256:643757633417907510157b206e490c3aa11cab0c087c912f60e07fbafa87a4c6", size = 3669813 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/f3/3a1d22973291226df4b4e2ff70196b926b6f910c488479adb0eeb42a0d7f/ruff-0.9.7-py3-none-linux_armv6l.whl", hash = "sha256:99d50def47305fe6f233eb8dabfd60047578ca87c9dcb235c9723ab1175180f4", size = 11774588 }, - { url = "https://files.pythonhosted.org/packages/8e/c9/b881f4157b9b884f2994fd08ee92ae3663fb24e34b0372ac3af999aa7fc6/ruff-0.9.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d59105ae9c44152c3d40a9c40d6331a7acd1cdf5ef404fbe31178a77b174ea66", size = 11746848 }, - { url = "https://files.pythonhosted.org/packages/14/89/2f546c133f73886ed50a3d449e6bf4af27d92d2f960a43a93d89353f0945/ruff-0.9.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f313b5800483770bd540cddac7c90fc46f895f427b7820f18fe1822697f1fec9", size = 11177525 }, - { url = "https://files.pythonhosted.org/packages/d7/93/6b98f2c12bf28ab9def59c50c9c49508519c5b5cfecca6de871cf01237f6/ruff-0.9.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042ae32b41343888f59c0a4148f103208bf6b21c90118d51dc93a68366f4e903", size = 11996580 }, - { url = "https://files.pythonhosted.org/packages/8e/3f/b3fcaf4f6d875e679ac2b71a72f6691a8128ea3cb7be07cbb249f477c061/ruff-0.9.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87862589373b33cc484b10831004e5e5ec47dc10d2b41ba770e837d4f429d721", size = 11525674 }, - { url = "https://files.pythonhosted.org/packages/f0/48/33fbf18defb74d624535d5d22adcb09a64c9bbabfa755bc666189a6b2210/ruff-0.9.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a17e1e01bee0926d351a1ee9bc15c445beae888f90069a6192a07a84af544b6b", size = 12739151 }, - { url = "https://files.pythonhosted.org/packages/63/b5/7e161080c5e19fa69495cbab7c00975ef8a90f3679caa6164921d7f52f4a/ruff-0.9.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7c1f880ac5b2cbebd58b8ebde57069a374865c73f3bf41f05fe7a179c1c8ef22", size = 13416128 }, - { url = "https://files.pythonhosted.org/packages/4e/c8/b5e7d61fb1c1b26f271ac301ff6d9de5e4d9a9a63f67d732fa8f200f0c88/ruff-0.9.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e63fc20143c291cab2841dbb8260e96bafbe1ba13fd3d60d28be2c71e312da49", size = 12870858 }, - { url = "https://files.pythonhosted.org/packages/da/cb/2a1a8e4e291a54d28259f8fc6a674cd5b8833e93852c7ef5de436d6ed729/ruff-0.9.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91ff963baed3e9a6a4eba2a02f4ca8eaa6eba1cc0521aec0987da8d62f53cbef", size = 14786046 }, - { url = "https://files.pythonhosted.org/packages/ca/6c/c8f8a313be1943f333f376d79724260da5701426c0905762e3ddb389e3f4/ruff-0.9.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88362e3227c82f63eaebf0b2eff5b88990280fb1ecf7105523883ba8c3aaf6fb", size = 12550834 }, - { url = "https://files.pythonhosted.org/packages/9d/ad/f70cf5e8e7c52a25e166bdc84c082163c9c6f82a073f654c321b4dff9660/ruff-0.9.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0372c5a90349f00212270421fe91874b866fd3626eb3b397ede06cd385f6f7e0", size = 11961307 }, - { url = "https://files.pythonhosted.org/packages/52/d5/4f303ea94a5f4f454daf4d02671b1fbfe2a318b5fcd009f957466f936c50/ruff-0.9.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d76b8ab60e99e6424cd9d3d923274a1324aefce04f8ea537136b8398bbae0a62", size = 11612039 }, - { url = "https://files.pythonhosted.org/packages/eb/c8/bd12a23a75603c704ce86723be0648ba3d4ecc2af07eecd2e9fa112f7e19/ruff-0.9.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0c439bdfc8983e1336577f00e09a4e7a78944fe01e4ea7fe616d00c3ec69a3d0", size = 12168177 }, - { url = "https://files.pythonhosted.org/packages/cc/57/d648d4f73400fef047d62d464d1a14591f2e6b3d4a15e93e23a53c20705d/ruff-0.9.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:115d1f15e8fdd445a7b4dc9a30abae22de3f6bcabeb503964904471691ef7606", size = 12610122 }, - { url = "https://files.pythonhosted.org/packages/49/79/acbc1edd03ac0e2a04ae2593555dbc9990b34090a9729a0c4c0cf20fb595/ruff-0.9.7-py3-none-win32.whl", hash = "sha256:e9ece95b7de5923cbf38893f066ed2872be2f2f477ba94f826c8defdd6ec6b7d", size = 9988751 }, - { url = "https://files.pythonhosted.org/packages/6d/95/67153a838c6b6ba7a2401241fd8a00cd8c627a8e4a0491b8d853dedeffe0/ruff-0.9.7-py3-none-win_amd64.whl", hash = "sha256:3770fe52b9d691a15f0b87ada29c45324b2ace8f01200fb0c14845e499eb0c2c", size = 11002987 }, - { url = "https://files.pythonhosted.org/packages/63/6a/aca01554949f3a401991dc32fe22837baeaccb8a0d868256cbb26a029778/ruff-0.9.7-py3-none-win_arm64.whl", hash = "sha256:b075a700b2533feb7a01130ff656a4ec0d5f340bb540ad98759b8401c32c2037", size = 10177763 }, +version = "0.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/63/77ecca9d21177600f551d1c58ab0e5a0b260940ea7312195bd2a4798f8a8/ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0", size = 3553799 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b9/0e168e4e7fb3af851f739e8f07889b91d1a33a30fca8c29fa3149d6b03ec/ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347", size = 11652408 }, + { url = "https://files.pythonhosted.org/packages/2c/22/08ede5db17cf701372a461d1cb8fdde037da1d4fa622b69ac21960e6237e/ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00", size = 11587553 }, + { url = "https://files.pythonhosted.org/packages/42/05/dedfc70f0bf010230229e33dec6e7b2235b2a1b8cbb2a991c710743e343f/ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4", size = 11020755 }, + { url = "https://files.pythonhosted.org/packages/df/9b/65d87ad9b2e3def67342830bd1af98803af731243da1255537ddb8f22209/ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d", size = 11826502 }, + { url = "https://files.pythonhosted.org/packages/93/02/f2239f56786479e1a89c3da9bc9391120057fc6f4a8266a5b091314e72ce/ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c", size = 11390562 }, + { url = "https://files.pythonhosted.org/packages/c9/37/d3a854dba9931f8cb1b2a19509bfe59e00875f48ade632e95aefcb7a0aee/ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f", size = 12548968 }, + { url = "https://files.pythonhosted.org/packages/fa/c3/c7b812bb256c7a1d5553433e95980934ffa85396d332401f6b391d3c4569/ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684", size = 13187155 }, + { url = "https://files.pythonhosted.org/packages/bd/5a/3c7f9696a7875522b66aa9bba9e326e4e5894b4366bd1dc32aa6791cb1ff/ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d", size = 12704674 }, + { url = "https://files.pythonhosted.org/packages/be/d6/d908762257a96ce5912187ae9ae86792e677ca4f3dc973b71e7508ff6282/ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df", size = 14529328 }, + { url = "https://files.pythonhosted.org/packages/2d/c2/049f1e6755d12d9cd8823242fa105968f34ee4c669d04cac8cea51a50407/ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247", size = 12385955 }, + { url = "https://files.pythonhosted.org/packages/91/5a/a9bdb50e39810bd9627074e42743b00e6dc4009d42ae9f9351bc3dbc28e7/ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e", size = 11810149 }, + { url = "https://files.pythonhosted.org/packages/e5/fd/57df1a0543182f79a1236e82a79c68ce210efb00e97c30657d5bdb12b478/ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe", size = 11479141 }, + { url = "https://files.pythonhosted.org/packages/dc/16/bc3fd1d38974f6775fc152a0554f8c210ff80f2764b43777163c3c45d61b/ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb", size = 12014073 }, + { url = "https://files.pythonhosted.org/packages/47/6b/e4ca048a8f2047eb652e1e8c755f384d1b7944f69ed69066a37acd4118b0/ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a", size = 12435758 }, + { url = "https://files.pythonhosted.org/packages/c2/40/4d3d6c979c67ba24cf183d29f706051a53c36d78358036a9cd21421582ab/ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145", size = 9796916 }, + { url = "https://files.pythonhosted.org/packages/c3/ef/7f548752bdb6867e6939489c87fe4da489ab36191525fadc5cede2a6e8e2/ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5", size = 10773080 }, + { url = "https://files.pythonhosted.org/packages/0e/4e/33df635528292bd2d18404e4daabcd74ca8a9853b2e1df85ed3d32d24362/ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6", size = 10001738 }, ] [[package]] name = "s3transfer" -version = "0.11.3" +version = "0.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/24/1390172471d569e281fcfd29b92f2f73774e95972c965d14b6c802ff2352/s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a", size = 148042 } +sdist = { url = "https://files.pythonhosted.org/packages/1a/aa/fdd958c626b00e3f046d4004363e7f1a2aba4354f78d65ceb3b217fa5eb8/s3transfer-0.11.1.tar.gz", hash = "sha256:3f25c900a367c8b7f7d8f9c34edc87e300bde424f779dc9f0a8ae4f9df9264f6", size = 146952 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/81/48c41b554a54d75d4407740abb60e3a102ae416284df04d1dbdcbe3dbf24/s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d", size = 84246 }, + { url = "https://files.pythonhosted.org/packages/5f/ce/22673f4a85ccc640735b4f8d12178a0f41b5d3c6eda7f33756d10ce56901/s3transfer-0.11.1-py3-none-any.whl", hash = "sha256:8fa0aa48177be1f3425176dfe1ab85dcd3d962df603c3dbfc585e6bf857ef0ff", size = 84111 }, ] [[package]] name = "safetensors" -version = "0.5.3" +version = "0.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210 } +sdist = { url = "https://files.pythonhosted.org/packages/f4/4f/2ef9ef1766f8c194b01b67a63a444d2e557c8fe1d82faf3ebd85f370a917/safetensors-0.5.2.tar.gz", hash = "sha256:cb4a8d98ba12fa016f4241932b1fc5e702e5143f5374bba0bbcf7ddc1c4cf2b8", size = 66957 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917 }, - { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419 }, - { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493 }, - { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400 }, - { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891 }, - { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694 }, - { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642 }, - { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241 }, - { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001 }, - { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013 }, - { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687 }, - { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147 }, - { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677 }, - { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878 }, + { url = "https://files.pythonhosted.org/packages/96/d1/017e31e75e274492a11a456a9e7c171f8f7911fe50735b4ec6ff37221220/safetensors-0.5.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:45b6092997ceb8aa3801693781a71a99909ab9cc776fbc3fa9322d29b1d3bef2", size = 427067 }, + { url = "https://files.pythonhosted.org/packages/24/84/e9d3ff57ae50dd0028f301c9ee064e5087fe8b00e55696677a0413c377a7/safetensors-0.5.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6d0d6a8ee2215a440e1296b843edf44fd377b055ba350eaba74655a2fe2c4bae", size = 408856 }, + { url = "https://files.pythonhosted.org/packages/f1/1d/fe95f5dd73db16757b11915e8a5106337663182d0381811c81993e0014a9/safetensors-0.5.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86016d40bcaa3bcc9a56cd74d97e654b5f4f4abe42b038c71e4f00a089c4526c", size = 450088 }, + { url = "https://files.pythonhosted.org/packages/cf/21/e527961b12d5ab528c6e47b92d5f57f33563c28a972750b238b871924e49/safetensors-0.5.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:990833f70a5f9c7d3fc82c94507f03179930ff7d00941c287f73b6fcbf67f19e", size = 458966 }, + { url = "https://files.pythonhosted.org/packages/a5/8b/1a037d7a57f86837c0b41905040369aea7d8ca1ec4b2a77592372b2ec380/safetensors-0.5.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dfa7c2f3fe55db34eba90c29df94bcdac4821043fc391cb5d082d9922013869", size = 509915 }, + { url = "https://files.pythonhosted.org/packages/61/3d/03dd5cfd33839df0ee3f4581a20bd09c40246d169c0e4518f20b21d5f077/safetensors-0.5.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46ff2116150ae70a4e9c490d2ab6b6e1b1b93f25e520e540abe1b81b48560c3a", size = 527664 }, + { url = "https://files.pythonhosted.org/packages/c5/dc/8952caafa9a10a3c0f40fa86bacf3190ae7f55fa5eef87415b97b29cb97f/safetensors-0.5.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab696dfdc060caffb61dbe4066b86419107a24c804a4e373ba59be699ebd8d5", size = 461978 }, + { url = "https://files.pythonhosted.org/packages/60/da/82de1fcf1194e3dbefd4faa92dc98b33c06bed5d67890e0962dd98e18287/safetensors-0.5.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03c937100f38c9ff4c1507abea9928a6a9b02c9c1c9c3609ed4fb2bf413d4975", size = 491253 }, + { url = "https://files.pythonhosted.org/packages/5a/9a/d90e273c25f90c3ba1b0196a972003786f04c39e302fbd6649325b1272bb/safetensors-0.5.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a00e737948791b94dad83cf0eafc09a02c4d8c2171a239e8c8572fe04e25960e", size = 628644 }, + { url = "https://files.pythonhosted.org/packages/70/3c/acb23e05aa34b4f5edd2e7f393f8e6480fbccd10601ab42cd03a57d4ab5f/safetensors-0.5.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:d3a06fae62418ec8e5c635b61a8086032c9e281f16c63c3af46a6efbab33156f", size = 721648 }, + { url = "https://files.pythonhosted.org/packages/71/45/eaa3dba5253a7c6931230dc961641455710ab231f8a89cb3c4c2af70f8c8/safetensors-0.5.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:1506e4c2eda1431099cebe9abf6c76853e95d0b7a95addceaa74c6019c65d8cf", size = 659588 }, + { url = "https://files.pythonhosted.org/packages/b0/71/2f9851164f821064d43b481ddbea0149c2d676c4f4e077b178e7eeaa6660/safetensors-0.5.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5c5b5d9da594f638a259fca766046f44c97244cc7ab8bef161b3e80d04becc76", size = 632533 }, + { url = "https://files.pythonhosted.org/packages/00/f1/5680e2ef61d9c61454fad82c344f0e40b8741a9dbd1e31484f0d31a9b1c3/safetensors-0.5.2-cp38-abi3-win32.whl", hash = "sha256:fe55c039d97090d1f85277d402954dd6ad27f63034fa81985a9cc59655ac3ee2", size = 291167 }, + { url = "https://files.pythonhosted.org/packages/86/ca/aa489392ec6fb59223ffce825461e1f811a3affd417121a2088be7a5758b/safetensors-0.5.2-cp38-abi3-win_amd64.whl", hash = "sha256:78abdddd03a406646107f973c7843276e7b64e5e32623529dc17f3d94a20f589", size = 303756 }, ] [[package]] @@ -4924,58 +4660,52 @@ wheels = [ [[package]] name = "scipy" -version = "1.15.2" +version = "1.15.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/b9/31ba9cd990e626574baf93fbc1ac61cf9ed54faafd04c479117517661637/scipy-1.15.2.tar.gz", hash = "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec", size = 59417316 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/df/ef233fff6838fe6f7840d69b5ef9f20d2b5c912a8727b21ebf876cb15d54/scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9", size = 38692502 }, - { url = "https://files.pythonhosted.org/packages/5c/20/acdd4efb8a68b842968f7bc5611b1aeb819794508771ad104de418701422/scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5", size = 30085508 }, - { url = "https://files.pythonhosted.org/packages/42/55/39cf96ca7126f1e78ee72a6344ebdc6702fc47d037319ad93221063e6cf4/scipy-1.15.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e", size = 22359166 }, - { url = "https://files.pythonhosted.org/packages/51/48/708d26a4ab8a1441536bf2dfcad1df0ca14a69f010fba3ccbdfc02df7185/scipy-1.15.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9", size = 25112047 }, - { url = "https://files.pythonhosted.org/packages/dd/65/f9c5755b995ad892020381b8ae11f16d18616208e388621dfacc11df6de6/scipy-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3", size = 35536214 }, - { url = "https://files.pythonhosted.org/packages/de/3c/c96d904b9892beec978562f64d8cc43f9cca0842e65bd3cd1b7f7389b0ba/scipy-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d", size = 37646981 }, - { url = "https://files.pythonhosted.org/packages/3d/74/c2d8a24d18acdeae69ed02e132b9bc1bb67b7bee90feee1afe05a68f9d67/scipy-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58", size = 37230048 }, - { url = "https://files.pythonhosted.org/packages/42/19/0aa4ce80eca82d487987eff0bc754f014dec10d20de2f66754fa4ea70204/scipy-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa", size = 40010322 }, - { url = "https://files.pythonhosted.org/packages/d0/d2/f0683b7e992be44d1475cc144d1f1eeae63c73a14f862974b4db64af635e/scipy-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65", size = 41233385 }, - { url = "https://files.pythonhosted.org/packages/40/1f/bf0a5f338bda7c35c08b4ed0df797e7bafe8a78a97275e9f439aceb46193/scipy-1.15.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4", size = 38703651 }, - { url = "https://files.pythonhosted.org/packages/de/54/db126aad3874601048c2c20ae3d8a433dbfd7ba8381551e6f62606d9bd8e/scipy-1.15.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1", size = 30102038 }, - { url = "https://files.pythonhosted.org/packages/61/d8/84da3fffefb6c7d5a16968fe5b9f24c98606b165bb801bb0b8bc3985200f/scipy-1.15.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971", size = 22375518 }, - { url = "https://files.pythonhosted.org/packages/44/78/25535a6e63d3b9c4c90147371aedb5d04c72f3aee3a34451f2dc27c0c07f/scipy-1.15.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655", size = 25142523 }, - { url = "https://files.pythonhosted.org/packages/e0/22/4b4a26fe1cd9ed0bc2b2cb87b17d57e32ab72c346949eaf9288001f8aa8e/scipy-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e", size = 35491547 }, - { url = "https://files.pythonhosted.org/packages/32/ea/564bacc26b676c06a00266a3f25fdfe91a9d9a2532ccea7ce6dd394541bc/scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0", size = 37634077 }, - { url = "https://files.pythonhosted.org/packages/43/c2/bfd4e60668897a303b0ffb7191e965a5da4056f0d98acfb6ba529678f0fb/scipy-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40", size = 37231657 }, - { url = "https://files.pythonhosted.org/packages/4a/75/5f13050bf4f84c931bcab4f4e83c212a36876c3c2244475db34e4b5fe1a6/scipy-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462", size = 40035857 }, - { url = "https://files.pythonhosted.org/packages/b9/8b/7ec1832b09dbc88f3db411f8cdd47db04505c4b72c99b11c920a8f0479c3/scipy-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737", size = 41217654 }, - { url = "https://files.pythonhosted.org/packages/4b/5d/3c78815cbab499610f26b5bae6aed33e227225a9fa5290008a733a64f6fc/scipy-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd", size = 38756184 }, - { url = "https://files.pythonhosted.org/packages/37/20/3d04eb066b471b6e171827548b9ddb3c21c6bbea72a4d84fc5989933910b/scipy-1.15.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301", size = 30163558 }, - { url = "https://files.pythonhosted.org/packages/a4/98/e5c964526c929ef1f795d4c343b2ff98634ad2051bd2bbadfef9e772e413/scipy-1.15.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93", size = 22437211 }, - { url = "https://files.pythonhosted.org/packages/1d/cd/1dc7371e29195ecbf5222f9afeedb210e0a75057d8afbd942aa6cf8c8eca/scipy-1.15.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20", size = 25232260 }, - { url = "https://files.pythonhosted.org/packages/f0/24/1a181a9e5050090e0b5138c5f496fee33293c342b788d02586bc410c6477/scipy-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e", size = 35198095 }, - { url = "https://files.pythonhosted.org/packages/c0/53/eaada1a414c026673eb983f8b4a55fe5eb172725d33d62c1b21f63ff6ca4/scipy-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8", size = 37297371 }, - { url = "https://files.pythonhosted.org/packages/e9/06/0449b744892ed22b7e7b9a1994a866e64895363572677a316a9042af1fe5/scipy-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11", size = 36872390 }, - { url = "https://files.pythonhosted.org/packages/6a/6f/a8ac3cfd9505ec695c1bc35edc034d13afbd2fc1882a7c6b473e280397bb/scipy-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53", size = 39700276 }, - { url = "https://files.pythonhosted.org/packages/f5/6f/e6e5aff77ea2a48dd96808bb51d7450875af154ee7cbe72188afb0b37929/scipy-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded", size = 40942317 }, - { url = "https://files.pythonhosted.org/packages/53/40/09319f6e0f276ea2754196185f95cd191cb852288440ce035d5c3a931ea2/scipy-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf", size = 38717587 }, - { url = "https://files.pythonhosted.org/packages/fe/c3/2854f40ecd19585d65afaef601e5e1f8dbf6758b2f95b5ea93d38655a2c6/scipy-1.15.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37", size = 30100266 }, - { url = "https://files.pythonhosted.org/packages/dd/b1/f9fe6e3c828cb5930b5fe74cb479de5f3d66d682fa8adb77249acaf545b8/scipy-1.15.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d", size = 22373768 }, - { url = "https://files.pythonhosted.org/packages/15/9d/a60db8c795700414c3f681908a2b911e031e024d93214f2d23c6dae174ab/scipy-1.15.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb", size = 25154719 }, - { url = "https://files.pythonhosted.org/packages/37/3b/9bda92a85cd93f19f9ed90ade84aa1e51657e29988317fabdd44544f1dd4/scipy-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27", size = 35163195 }, - { url = "https://files.pythonhosted.org/packages/03/5a/fc34bf1aa14dc7c0e701691fa8685f3faec80e57d816615e3625f28feb43/scipy-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0", size = 37255404 }, - { url = "https://files.pythonhosted.org/packages/4a/71/472eac45440cee134c8a180dbe4c01b3ec247e0338b7c759e6cd71f199a7/scipy-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32", size = 36860011 }, - { url = "https://files.pythonhosted.org/packages/01/b3/21f890f4f42daf20e4d3aaa18182dddb9192771cd47445aaae2e318f6738/scipy-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d", size = 39657406 }, - { url = "https://files.pythonhosted.org/packages/0d/76/77cf2ac1f2a9cc00c073d49e1e16244e389dd88e2490c91d84e1e3e4d126/scipy-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f", size = 40961243 }, - { url = "https://files.pythonhosted.org/packages/4c/4b/a57f8ddcf48e129e6054fa9899a2a86d1fc6b07a0e15c7eebff7ca94533f/scipy-1.15.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9", size = 38870286 }, - { url = "https://files.pythonhosted.org/packages/0c/43/c304d69a56c91ad5f188c0714f6a97b9c1fed93128c691148621274a3a68/scipy-1.15.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f", size = 30141634 }, - { url = "https://files.pythonhosted.org/packages/44/1a/6c21b45d2548eb73be9b9bff421aaaa7e85e22c1f9b3bc44b23485dfce0a/scipy-1.15.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6", size = 22415179 }, - { url = "https://files.pythonhosted.org/packages/74/4b/aefac4bba80ef815b64f55da06f62f92be5d03b467f2ce3668071799429a/scipy-1.15.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af", size = 25126412 }, - { url = "https://files.pythonhosted.org/packages/b1/53/1cbb148e6e8f1660aacd9f0a9dfa2b05e9ff1cb54b4386fe868477972ac2/scipy-1.15.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274", size = 34952867 }, - { url = "https://files.pythonhosted.org/packages/2c/23/e0eb7f31a9c13cf2dca083828b97992dd22f8184c6ce4fec5deec0c81fcf/scipy-1.15.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776", size = 36890009 }, - { url = "https://files.pythonhosted.org/packages/03/f3/e699e19cabe96bbac5189c04aaa970718f0105cff03d458dc5e2b6bd1e8c/scipy-1.15.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828", size = 36545159 }, - { url = "https://files.pythonhosted.org/packages/af/f5/ab3838e56fe5cc22383d6fcf2336e48c8fe33e944b9037fbf6cbdf5a11f8/scipy-1.15.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28", size = 39136566 }, - { url = "https://files.pythonhosted.org/packages/0a/c8/b3f566db71461cabd4b2d5b39bcc24a7e1c119535c8361f81426be39bb47/scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db", size = 40477705 }, +sdist = { url = "https://files.pythonhosted.org/packages/76/c6/8eb0654ba0c7d0bb1bf67bf8fbace101a8e4f250f7722371105e8b6f68fc/scipy-1.15.1.tar.gz", hash = "sha256:033a75ddad1463970c96a88063a1df87ccfddd526437136b6ee81ff0312ebdf6", size = 59407493 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/53/b204ce5a4433f1864001b9d16f103b9c25f5002a602ae83585d0ea5f9c4a/scipy-1.15.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:c64ded12dcab08afff9e805a67ff4480f5e69993310e093434b10e85dc9d43e1", size = 41414518 }, + { url = "https://files.pythonhosted.org/packages/c7/fc/54ffa7a8847f7f303197a6ba65a66104724beba2e38f328135a78f0dc480/scipy-1.15.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5b190b935e7db569960b48840e5bef71dc513314cc4e79a1b7d14664f57fd4ff", size = 32519265 }, + { url = "https://files.pythonhosted.org/packages/f1/77/a98b8ba03d6f371dc31a38719affd53426d4665729dcffbed4afe296784a/scipy-1.15.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:4b17d4220df99bacb63065c76b0d1126d82bbf00167d1730019d2a30d6ae01ea", size = 24792859 }, + { url = "https://files.pythonhosted.org/packages/a7/78/70bb9f0df7444b18b108580934bfef774822e28fd34a68e5c263c7d2828a/scipy-1.15.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:63b9b6cd0333d0eb1a49de6f834e8aeaefe438df8f6372352084535ad095219e", size = 27886506 }, + { url = "https://files.pythonhosted.org/packages/14/a7/f40f6033e06de4176ddd6cc8c3ae9f10a226c3bca5d6b4ab883bc9914a14/scipy-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f151e9fb60fbf8e52426132f473221a49362091ce7a5e72f8aa41f8e0da4f25", size = 38375041 }, + { url = "https://files.pythonhosted.org/packages/17/03/390a1c5c61fd76b0fa4b3c5aa3bdd7e60f6c46f712924f1a9df5705ec046/scipy-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e10b1dd56ce92fba3e786007322542361984f8463c6d37f6f25935a5a6ef52", size = 40597556 }, + { url = "https://files.pythonhosted.org/packages/4e/70/fa95b3ae026b97eeca58204a90868802e5155ac71b9d7bdee92b68115dd3/scipy-1.15.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5dff14e75cdbcf07cdaa1c7707db6017d130f0af9ac41f6ce443a93318d6c6e0", size = 42938505 }, + { url = "https://files.pythonhosted.org/packages/d6/07/427859116bdd71847c898180f01802691f203c3e2455a1eb496130ff07c5/scipy-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:f82fcf4e5b377f819542fbc8541f7b5fbcf1c0017d0df0bc22c781bf60abc4d8", size = 43909663 }, + { url = "https://files.pythonhosted.org/packages/8e/2e/7b71312da9c2dabff53e7c9a9d08231bc34d9d8fdabe88a6f1155b44591c/scipy-1.15.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:5bd8d27d44e2c13d0c1124e6a556454f52cd3f704742985f6b09e75e163d20d2", size = 41424362 }, + { url = "https://files.pythonhosted.org/packages/81/8c/ab85f1aa1cc200c796532a385b6ebf6a81089747adc1da7482a062acc46c/scipy-1.15.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:be3deeb32844c27599347faa077b359584ba96664c5c79d71a354b80a0ad0ce0", size = 32535910 }, + { url = "https://files.pythonhosted.org/packages/3b/9c/6f4b787058daa8d8da21ddff881b4320e28de4704a65ec147adb50cb2230/scipy-1.15.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:5eb0ca35d4b08e95da99a9f9c400dc9f6c21c424298a0ba876fdc69c7afacedf", size = 24809398 }, + { url = "https://files.pythonhosted.org/packages/16/2b/949460a796df75fc7a1ee1becea202cf072edbe325ebe29f6d2029947aa7/scipy-1.15.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:74bb864ff7640dea310a1377d8567dc2cb7599c26a79ca852fc184cc851954ac", size = 27918045 }, + { url = "https://files.pythonhosted.org/packages/5f/36/67fe249dd7ccfcd2a38b25a640e3af7e59d9169c802478b6035ba91dfd6d/scipy-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:667f950bf8b7c3a23b4199db24cb9bf7512e27e86d0e3813f015b74ec2c6e3df", size = 38332074 }, + { url = "https://files.pythonhosted.org/packages/fc/da/452e1119e6f720df3feb588cce3c42c5e3d628d4bfd4aec097bd30b7de0c/scipy-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395be70220d1189756068b3173853029a013d8c8dd5fd3d1361d505b2aa58fa7", size = 40588469 }, + { url = "https://files.pythonhosted.org/packages/7f/71/5f94aceeac99a4941478af94fe9f459c6752d497035b6b0761a700f5f9ff/scipy-1.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ce3a000cd28b4430426db2ca44d96636f701ed12e2b3ca1f2b1dd7abdd84b39a", size = 42965214 }, + { url = "https://files.pythonhosted.org/packages/af/25/caa430865749d504271757cafd24066d596217e83326155993980bc22f97/scipy-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:3fe1d95944f9cf6ba77aa28b82dd6bb2a5b52f2026beb39ecf05304b8392864b", size = 43896034 }, + { url = "https://files.pythonhosted.org/packages/d8/6e/a9c42d0d39e09ed7fd203d0ac17adfea759cba61ab457671fe66e523dbec/scipy-1.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c09aa9d90f3500ea4c9b393ee96f96b0ccb27f2f350d09a47f533293c78ea776", size = 41478318 }, + { url = "https://files.pythonhosted.org/packages/04/ee/e3e535c81828618878a7433992fecc92fa4df79393f31a8fea1d05615091/scipy-1.15.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0ac102ce99934b162914b1e4a6b94ca7da0f4058b6d6fd65b0cef330c0f3346f", size = 32596696 }, + { url = "https://files.pythonhosted.org/packages/c4/5e/b1b0124be8e76f87115f16b8915003eec4b7060298117715baf13f51942c/scipy-1.15.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:09c52320c42d7f5c7748b69e9f0389266fd4f82cf34c38485c14ee976cb8cb04", size = 24870366 }, + { url = "https://files.pythonhosted.org/packages/14/36/c00cb73eefda85946172c27913ab995c6ad4eee00fa4f007572e8c50cd51/scipy-1.15.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:cdde8414154054763b42b74fe8ce89d7f3d17a7ac5dd77204f0e142cdc9239e9", size = 28007461 }, + { url = "https://files.pythonhosted.org/packages/68/94/aff5c51b3799349a9d1e67a056772a0f8a47db371e83b498d43467806557/scipy-1.15.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c9d8fc81d6a3b6844235e6fd175ee1d4c060163905a2becce8e74cb0d7554ce", size = 38068174 }, + { url = "https://files.pythonhosted.org/packages/b0/3c/0de11ca154e24a57b579fb648151d901326d3102115bc4f9a7a86526ce54/scipy-1.15.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb57b30f0017d4afa5fe5f5b150b8f807618819287c21cbe51130de7ccdaed2", size = 40249869 }, + { url = "https://files.pythonhosted.org/packages/15/09/472e8d0a6b33199d1bb95e49bedcabc0976c3724edd9b0ef7602ccacf41e/scipy-1.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491d57fe89927fa1aafbe260f4cfa5ffa20ab9f1435025045a5315006a91b8f5", size = 42629068 }, + { url = "https://files.pythonhosted.org/packages/ff/ba/31c7a8131152822b3a2cdeba76398ffb404d81d640de98287d236da90c49/scipy-1.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:900f3fa3db87257510f011c292a5779eb627043dd89731b9c461cd16ef76ab3d", size = 43621992 }, + { url = "https://files.pythonhosted.org/packages/2b/bf/dd68965a4c5138a630eeed0baec9ae96e5d598887835bdde96cdd2fe4780/scipy-1.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:100193bb72fbff37dbd0bf14322314fc7cbe08b7ff3137f11a34d06dc0ee6b85", size = 41441136 }, + { url = "https://files.pythonhosted.org/packages/ef/5e/4928581312922d7e4d416d74c416a660addec4dd5ea185401df2269ba5a0/scipy-1.15.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:2114a08daec64980e4b4cbdf5bee90935af66d750146b1d2feb0d3ac30613692", size = 32533699 }, + { url = "https://files.pythonhosted.org/packages/32/90/03f99c43041852837686898c66767787cd41c5843d7a1509c39ffef683e9/scipy-1.15.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:6b3e71893c6687fc5e29208d518900c24ea372a862854c9888368c0b267387ab", size = 24807289 }, + { url = "https://files.pythonhosted.org/packages/9d/52/bfe82b42ae112eaba1af2f3e556275b8727d55ac6e4932e7aef337a9d9d4/scipy-1.15.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:837299eec3d19b7e042923448d17d95a86e43941104d33f00da7e31a0f715d3c", size = 27929844 }, + { url = "https://files.pythonhosted.org/packages/f6/77/54ff610bad600462c313326acdb035783accc6a3d5f566d22757ad297564/scipy-1.15.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82add84e8a9fb12af5c2c1a3a3f1cb51849d27a580cb9e6bd66226195142be6e", size = 38031272 }, + { url = "https://files.pythonhosted.org/packages/f1/26/98585cbf04c7cf503d7eb0a1966df8a268154b5d923c5fe0c1ed13154c49/scipy-1.15.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070d10654f0cb6abd295bc96c12656f948e623ec5f9a4eab0ddb1466c000716e", size = 40210217 }, + { url = "https://files.pythonhosted.org/packages/fd/3f/3d2285eb6fece8bc5dbb2f9f94d61157d61d155e854fd5fea825b8218f12/scipy-1.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:55cc79ce4085c702ac31e49b1e69b27ef41111f22beafb9b49fea67142b696c4", size = 42587785 }, + { url = "https://files.pythonhosted.org/packages/48/7d/5b5251984bf0160d6533695a74a5fddb1fa36edd6f26ffa8c871fbd4782a/scipy-1.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:c352c1b6d7cac452534517e022f8f7b8d139cd9f27e6fbd9f3cbd0bfd39f5bef", size = 43640439 }, + { url = "https://files.pythonhosted.org/packages/e7/b8/0e092f592d280496de52e152582030f8a270b194f87f890e1a97c5599b81/scipy-1.15.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0458839c9f873062db69a03de9a9765ae2e694352c76a16be44f93ea45c28d2b", size = 41619862 }, + { url = "https://files.pythonhosted.org/packages/f6/19/0b6e1173aba4db9e0b7aa27fe45019857fb90d6904038b83927cbe0a6c1d/scipy-1.15.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:af0b61c1de46d0565b4b39c6417373304c1d4f5220004058bdad3061c9fa8a95", size = 32610387 }, + { url = "https://files.pythonhosted.org/packages/e7/02/754aae3bd1fa0f2479ade3cfdf1732ecd6b05853f63eee6066a32684563a/scipy-1.15.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:71ba9a76c2390eca6e359be81a3e879614af3a71dfdabb96d1d7ab33da6f2364", size = 24883814 }, + { url = "https://files.pythonhosted.org/packages/1f/ac/d7906201604a2ea3b143bb0de51b3966f66441ba50b7dc182c4505b3edf9/scipy-1.15.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14eaa373c89eaf553be73c3affb11ec6c37493b7eaaf31cf9ac5dffae700c2e0", size = 27944865 }, + { url = "https://files.pythonhosted.org/packages/84/9d/8f539002b5e203723af6a6f513a45e0a7671e9dabeedb08f417ac17e4edc/scipy-1.15.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f735bc41bd1c792c96bc426dece66c8723283695f02df61dcc4d0a707a42fc54", size = 39883261 }, + { url = "https://files.pythonhosted.org/packages/97/c0/62fd3bab828bcccc9b864c5997645a3b86372a35941cdaf677565c25c98d/scipy-1.15.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2722a021a7929d21168830790202a75dbb20b468a8133c74a2c0230c72626b6c", size = 42093299 }, + { url = "https://files.pythonhosted.org/packages/e4/1f/5d46a8d94e9f6d2c913cbb109e57e7eed914de38ea99e2c4d69a9fc93140/scipy-1.15.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bc7136626261ac1ed988dca56cfc4ab5180f75e0ee52e58f1e6aa74b5f3eacd5", size = 43181730 }, ] [[package]] @@ -4997,22 +4727,17 @@ dependencies = [ { name = "pybars4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic-settings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "scipy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] [package.optional-dependencies] anthropic = [ { name = "anthropic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -autogen = [ - { name = "autogen-agentchat", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] aws = [ { name = "boto3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] azure = [ { name = "azure-ai-inference", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "azure-ai-projects", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-core-tracing-opentelemetry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-cosmos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "azure-identity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5053,7 +4778,7 @@ ollama = [ { name = "ollama", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] onnx = [ - { name = "onnxruntime-genai", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "onnxruntime-genai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] pandas = [ { name = "pandas", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5065,12 +4790,7 @@ postgres = [ { name = "psycopg", extra = ["binary", "pool"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] qdrant = [ - { name = "qdrant-client", version = "1.12.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.13' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform == 'win32')" }, - { name = "qdrant-client", version = "1.13.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, -] -realtime = [ - { name = "aiortc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "websockets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "qdrant-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] redis = [ { name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5104,36 +4824,33 @@ dev = [ [package.metadata] requires-dist = [ { name = "aiohttp", specifier = "~=3.8" }, - { name = "aiortc", marker = "extra == 'realtime'", specifier = ">=1.9.0" }, { name = "anthropic", marker = "extra == 'anthropic'", specifier = "~=0.32" }, - { name = "autogen-agentchat", marker = "extra == 'autogen'", specifier = ">=0.2,<0.4" }, { name = "azure-ai-inference", marker = "extra == 'azure'", specifier = ">=1.0.0b6" }, - { name = "azure-ai-projects", marker = "extra == 'azure'", specifier = ">=1.0.0b5" }, { name = "azure-core-tracing-opentelemetry", marker = "extra == 'azure'", specifier = ">=1.0.0b11" }, { name = "azure-cosmos", marker = "extra == 'azure'", specifier = "~=4.7" }, { name = "azure-identity", specifier = "~=1.13" }, { name = "azure-identity", marker = "extra == 'azure'", specifier = "~=1.13" }, { name = "azure-search-documents", marker = "extra == 'azure'", specifier = ">=11.6.0b4" }, - { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.36.4,<1.38.0" }, + { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.28.57" }, { name = "chromadb", marker = "extra == 'chroma'", specifier = ">=0.5,<0.7" }, { name = "cloudevents", specifier = "~=1.0" }, { name = "dapr", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, { name = "dapr-ext-fastapi", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, { name = "defusedxml", specifier = "~=0.7" }, { name = "flask-dapr", marker = "extra == 'dapr'", specifier = ">=1.14.0" }, - { name = "google-cloud-aiplatform", marker = "extra == 'google'", specifier = "==1.80.0" }, - { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.8" }, + { name = "google-cloud-aiplatform", marker = "extra == 'google'", specifier = "~=1.60" }, + { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.7" }, { name = "ipykernel", marker = "extra == 'notebooks'", specifier = "~=6.29" }, { name = "jinja2", specifier = "~=3.1" }, { name = "milvus", marker = "sys_platform != 'win32' and extra == 'milvus'", specifier = ">=2.3,<2.3.8" }, { name = "mistralai", marker = "extra == 'mistralai'", specifier = ">=1.2,<2.0" }, - { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.8.0" }, + { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.7.0" }, { name = "nest-asyncio", specifier = "~=1.6" }, { name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.25.0" }, { name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0" }, { name = "ollama", marker = "extra == 'ollama'", specifier = "~=0.4" }, - { name = "onnxruntime-genai", marker = "python_full_version < '3.13' and extra == 'onnx'", specifier = "~=0.5" }, - { name = "openai", specifier = "~=1.61" }, + { name = "onnxruntime-genai", marker = "extra == 'onnx'", specifier = "~=0.5" }, + { name = "openai", specifier = "~=1.0" }, { name = "openapi-core", specifier = ">=0.18,<0.20" }, { name = "opentelemetry-api", specifier = "~=1.24" }, { name = "opentelemetry-sdk", specifier = "~=1.24" }, @@ -5146,18 +4863,16 @@ requires-dist = [ { name = "pydantic", specifier = ">=2.0,!=2.10.0,!=2.10.1,!=2.10.2,!=2.10.3,<2.11" }, { name = "pydantic-settings", specifier = "~=2.0" }, { name = "pymilvus", marker = "extra == 'milvus'", specifier = ">=2.3,<2.6" }, - { name = "pymongo", marker = "extra == 'mongo'", specifier = ">=4.8.0,<4.12" }, + { name = "pymongo", marker = "extra == 'mongo'", specifier = ">=4.8.0,<4.11" }, { name = "qdrant-client", marker = "extra == 'qdrant'", specifier = "~=1.9" }, { name = "redis", extras = ["hiredis"], marker = "extra == 'redis'", specifier = "~=5.0" }, { name = "redisvl", marker = "extra == 'redis'", specifier = ">=0.3.6" }, - { name = "scipy", specifier = ">=1.15.1" }, { name = "sentence-transformers", marker = "extra == 'hugging-face'", specifier = ">=2.2,<4.0" }, - { name = "torch", marker = "extra == 'hugging-face'", specifier = "==2.6.0" }, + { name = "torch", marker = "extra == 'hugging-face'", specifier = "==2.5.1" }, { name = "transformers", extras = ["torch"], marker = "extra == 'hugging-face'", specifier = "~=4.28" }, { name = "types-redis", marker = "extra == 'redis'", specifier = "~=4.6.0.20240425" }, - { name = "usearch", marker = "extra == 'usearch'", specifier = "~=2.16" }, - { name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=4.10,<5.0" }, - { name = "websockets", marker = "extra == 'realtime'", specifier = ">=13,<15" }, + { name = "usearch", marker = "extra == 'usearch'", specifier = "~=2.9" }, + { name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=4.7,<5.0" }, ] [package.metadata.requires-dev] @@ -5171,14 +4886,14 @@ dev = [ { name = "pytest-cov", specifier = ">=5.0" }, { name = "pytest-timeout", specifier = ">=2.3.1" }, { name = "pytest-xdist", extras = ["psutil"], specifier = "~=3.6" }, - { name = "ruff", specifier = "~=0.9" }, + { name = "ruff", specifier = "~=0.7" }, { name = "snoop", specifier = "~=0.4" }, { name = "types-pyyaml", specifier = "~=6.0.12.20240311" }, ] [[package]] name = "sentence-transformers" -version = "3.4.1" +version = "3.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5189,53 +4904,53 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/74/aca6f8a2b8d62b4daf8c9a0c49d2aa573381caf47dc35cbb343389229376/sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b", size = 223898 } +sdist = { url = "https://files.pythonhosted.org/packages/79/0a/c677efe908b20e7e8d4ed6cce3a3447eebc7dc5e348e458f5f9a44a72b00/sentence_transformers-3.3.1.tar.gz", hash = "sha256:9635dbfb11c6b01d036b9cfcee29f7716ab64cf2407ad9f403a2e607da2ac48b", size = 217914 } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/89/7eb147a37b7f31d3c815543df539d8b8d0425e93296c875cc87719d65232/sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da", size = 275896 }, + { url = "https://files.pythonhosted.org/packages/8b/c8/990e22a465e4771338da434d799578865d6d7ef1fdb50bd844b7ecdcfa19/sentence_transformers-3.3.1-py3-none-any.whl", hash = "sha256:abffcc79dab37b7d18d21a26d5914223dd42239cfe18cb5e111c66c54b658ae7", size = 268797 }, ] [[package]] name = "setuptools" -version = "75.8.2" +version = "75.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d1/53/43d99d7687e8cdef5ab5f9ec5eaf2c0423c2b35133a2b7e7bc276fc32b21/setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2", size = 1344083 } +sdist = { url = "https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6/setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", size = 1343222 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/38/7d7362e031bd6dc121e5081d8cb6aa6f6fedf2b67bf889962134c6da4705/setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f", size = 1229385 }, + { url = "https://files.pythonhosted.org/packages/69/8a/b9dc7678803429e4a3bc9ba462fa3dd9066824d3c607490235c6a796be5a/setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3", size = 1228782 }, ] [[package]] name = "shapely" -version = "2.0.7" +version = "2.0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/c0/a911d1fd765d07a2b6769ce155219a281bfbe311584ebe97340d75c5bdb1/shapely-2.0.7.tar.gz", hash = "sha256:28fe2997aab9a9dc026dc6a355d04e85841546b2a5d232ed953e3321ab958ee5", size = 283413 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/2e/02c694d6ddacd4f13b625722d313d2838f23c5b988cbc680132983f73ce3/shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691", size = 1478310 }, - { url = "https://files.pythonhosted.org/packages/87/69/b54a08bcd25e561bdd5183c008ace4424c25e80506e80674032504800efd/shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147", size = 1336082 }, - { url = "https://files.pythonhosted.org/packages/b3/f9/40473fcb5b66ff849e563ca523d2a26dafd6957d52dd876ffd0eded39f1c/shapely-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf6c50cd879831955ac47af9c907ce0310245f9d162e298703f82e1785e38c98", size = 2371047 }, - { url = "https://files.pythonhosted.org/packages/d6/f3/c9cc07a7a03b5f5e83bd059f9adf3e21cf086b0e41d7f95e6464b151e798/shapely-2.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04a65d882456e13c8b417562c36324c0cd1e5915f3c18ad516bb32ee3f5fc895", size = 2469112 }, - { url = "https://files.pythonhosted.org/packages/5d/b9/fc63d6b0b25063a3ff806857a5dc88851d54d1c278288f18cef1b322b449/shapely-2.0.7-cp310-cp310-win32.whl", hash = "sha256:7e97104d28e60b69f9b6a957c4d3a2a893b27525bc1fc96b47b3ccef46726bf2", size = 1296057 }, - { url = "https://files.pythonhosted.org/packages/fe/d1/8df43f94cf4cda0edbab4545f7cdd67d3f1d02910eaff152f9f45c6d00d8/shapely-2.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:35524cc8d40ee4752520819f9894b9f28ba339a42d4922e92c99b148bed3be39", size = 1441787 }, - { url = "https://files.pythonhosted.org/packages/1d/ad/21798c2fec013e289f8ab91d42d4d3299c315b8c4460c08c75fef0901713/shapely-2.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5cf23400cb25deccf48c56a7cdda8197ae66c0e9097fcdd122ac2007e320bc34", size = 1473091 }, - { url = "https://files.pythonhosted.org/packages/15/63/eef4f180f1b5859c70e7f91d2f2570643e5c61e7d7c40743d15f8c6cbc42/shapely-2.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8f1da01c04527f7da59ee3755d8ee112cd8967c15fab9e43bba936b81e2a013", size = 1332921 }, - { url = "https://files.pythonhosted.org/packages/fe/67/77851dd17738bbe7762a0ef1acf7bc499d756f68600dd68a987d78229412/shapely-2.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f623b64bb219d62014781120f47499a7adc30cf7787e24b659e56651ceebcb0", size = 2427949 }, - { url = "https://files.pythonhosted.org/packages/0b/a5/2c8dbb0f383519771df19164e3bf3a8895d195d2edeab4b6040f176ee28e/shapely-2.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6d95703efaa64aaabf278ced641b888fc23d9c6dd71f8215091afd8a26a66e3", size = 2529282 }, - { url = "https://files.pythonhosted.org/packages/dc/4e/e1d608773c7fe4cde36d48903c0d6298e3233dc69412403783ac03fa5205/shapely-2.0.7-cp311-cp311-win32.whl", hash = "sha256:2f6e4759cf680a0f00a54234902415f2fa5fe02f6b05546c662654001f0793a2", size = 1295751 }, - { url = "https://files.pythonhosted.org/packages/27/57/8ec7c62012bed06731f7ee979da7f207bbc4b27feed5f36680b6a70df54f/shapely-2.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:b52f3ab845d32dfd20afba86675c91919a622f4627182daec64974db9b0b4608", size = 1442684 }, - { url = "https://files.pythonhosted.org/packages/4f/3e/ea100eec5811bafd0175eb21828a3be5b0960f65250f4474391868be7c0f/shapely-2.0.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4c2b9859424facbafa54f4a19b625a752ff958ab49e01bc695f254f7db1835fa", size = 1482451 }, - { url = "https://files.pythonhosted.org/packages/ce/53/c6a3487716fd32e1f813d2a9608ba7b72a8a52a6966e31c6443480a1d016/shapely-2.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5aed1c6764f51011d69a679fdf6b57e691371ae49ebe28c3edb5486537ffbd51", size = 1345765 }, - { url = "https://files.pythonhosted.org/packages/fd/dd/b35d7891d25cc11066a70fb8d8169a6a7fca0735dd9b4d563a84684969a3/shapely-2.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73c9ae8cf443187d784d57202199bf9fd2d4bb7d5521fe8926ba40db1bc33e8e", size = 2421540 }, - { url = "https://files.pythonhosted.org/packages/62/de/8dbd7df60eb23cb983bb698aac982944b3d602ef0ce877a940c269eae34e/shapely-2.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9469f49ff873ef566864cb3516091881f217b5d231c8164f7883990eec88b73", size = 2525741 }, - { url = "https://files.pythonhosted.org/packages/96/64/faf0413ebc7a84fe7a0790bf39ec0b02b40132b68e57aba985c0b6e4e7b6/shapely-2.0.7-cp312-cp312-win32.whl", hash = "sha256:6bca5095e86be9d4ef3cb52d56bdd66df63ff111d580855cb8546f06c3c907cd", size = 1296552 }, - { url = "https://files.pythonhosted.org/packages/63/05/8a1c279c226d6ad7604d9e237713dd21788eab96db97bf4ce0ea565e5596/shapely-2.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:f86e2c0259fe598c4532acfcf638c1f520fa77c1275912bbc958faecbf00b108", size = 1443464 }, - { url = "https://files.pythonhosted.org/packages/c6/21/abea43effbfe11f792e44409ee9ad7635aa93ef1c8ada0ef59b3c1c3abad/shapely-2.0.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a0c09e3e02f948631c7763b4fd3dd175bc45303a0ae04b000856dedebefe13cb", size = 1481618 }, - { url = "https://files.pythonhosted.org/packages/d9/71/af688798da36fe355a6e6ffe1d4628449cb5fa131d57fc169bcb614aeee7/shapely-2.0.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:06ff6020949b44baa8fc2e5e57e0f3d09486cd5c33b47d669f847c54136e7027", size = 1345159 }, - { url = "https://files.pythonhosted.org/packages/67/47/f934fe2b70d31bb9774ad4376e34f81666deed6b811306ff574faa3d115e/shapely-2.0.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6dbf096f961ca6bec5640e22e65ccdec11e676344e8157fe7d636e7904fd36", size = 2410267 }, - { url = "https://files.pythonhosted.org/packages/f5/8a/2545cc2a30afc63fc6176c1da3b76af28ef9c7358ed4f68f7c6a9d86cf5b/shapely-2.0.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adeddfb1e22c20548e840403e5e0b3d9dc3daf66f05fa59f1fcf5b5f664f0e98", size = 2514128 }, - { url = "https://files.pythonhosted.org/packages/87/54/2344ce7da39676adec94e84fbaba92a8f1664e4ae2d33bd404dafcbe607f/shapely-2.0.7-cp313-cp313-win32.whl", hash = "sha256:a7f04691ce1c7ed974c2f8b34a1fe4c3c5dfe33128eae886aa32d730f1ec1913", size = 1295783 }, - { url = "https://files.pythonhosted.org/packages/d7/1e/6461e5cfc8e73ae165b8cff6eb26a4d65274fad0e1435137c5ba34fe4e88/shapely-2.0.7-cp313-cp313-win_amd64.whl", hash = "sha256:aaaf5f7e6cc234c1793f2a2760da464b604584fb58c6b6d7d94144fd2692d67e", size = 1442300 }, +sdist = { url = "https://files.pythonhosted.org/packages/4a/89/0d20bac88016be35ff7d3c0c2ae64b477908f1b1dfa540c5d69ac7af07fe/shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6", size = 282361 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d4/f84bbbdb7771f5b9ade94db2398b256cf1471f1eb0ca8afbe0f6ca725d5a/shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b", size = 1449635 }, + { url = "https://files.pythonhosted.org/packages/03/10/bd6edb66ed0a845f0809f7ce653596f6fd9c6be675b3653872f47bf49f82/shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b", size = 1296756 }, + { url = "https://files.pythonhosted.org/packages/af/09/6374c11cb493a9970e8c04d7be25f578a37f6494a2fecfbed3a447b16b2c/shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde", size = 2381960 }, + { url = "https://files.pythonhosted.org/packages/2b/a6/302e0d9c210ccf4d1ffadf7ab941797d3255dcd5f93daa73aaf116a4db39/shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e", size = 2468133 }, + { url = "https://files.pythonhosted.org/packages/8c/be/e448681dc485f2931d4adee93d531fce93608a3ee59433303cc1a46e21a5/shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e", size = 1294982 }, + { url = "https://files.pythonhosted.org/packages/cd/4c/6f4a6fc085e3be01c4c9de0117a2d373bf9fec5f0426cf4d5c94090a5a4d/shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4", size = 1441141 }, + { url = "https://files.pythonhosted.org/packages/37/15/269d8e1f7f658a37e61f7028683c546f520e4e7cedba1e32c77ff9d3a3c7/shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e", size = 1449578 }, + { url = "https://files.pythonhosted.org/packages/37/63/e182e43081fffa0a2d970c480f2ef91647a6ab94098f61748c23c2a485f2/shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2", size = 1296792 }, + { url = "https://files.pythonhosted.org/packages/6e/5a/d019f69449329dcd517355444fdb9ddd58bec5e080b8bdba007e8e4c546d/shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855", size = 2443997 }, + { url = "https://files.pythonhosted.org/packages/25/aa/53f145e5a610a49af9ac49f2f1be1ec8659ebd5c393d66ac94e57c83b00e/shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0", size = 2528334 }, + { url = "https://files.pythonhosted.org/packages/64/64/0c7b0a22b416d36f6296b92bb4219d82b53d0a7c47e16fd0a4c85f2f117c/shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d", size = 1294669 }, + { url = "https://files.pythonhosted.org/packages/b1/5a/6a67d929c467a1973b6bb9f0b00159cc343b02bf9a8d26db1abd2f87aa23/shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b", size = 1442032 }, + { url = "https://files.pythonhosted.org/packages/46/77/efd9f9d4b6a762f976f8b082f54c9be16f63050389500fb52e4f6cc07c1a/shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0", size = 1450326 }, + { url = "https://files.pythonhosted.org/packages/68/53/5efa6e7a4036a94fe6276cf7bbb298afded51ca3396b03981ad680c8cc7d/shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3", size = 1298480 }, + { url = "https://files.pythonhosted.org/packages/88/a2/1be1db4fc262e536465a52d4f19d85834724fedf2299a1b9836bc82fe8fa/shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8", size = 2439311 }, + { url = "https://files.pythonhosted.org/packages/d5/7d/9a57e187cbf2fbbbdfd4044a4f9ce141c8d221f9963750d3b001f0ec080d/shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726", size = 2524835 }, + { url = "https://files.pythonhosted.org/packages/6d/0a/f407509ab56825f39bf8cfce1fb410238da96cf096809c3e404e5bc71ea1/shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f", size = 1295613 }, + { url = "https://files.pythonhosted.org/packages/7b/b3/857afd9dfbfc554f10d683ac412eac6fa260d1f4cd2967ecb655c57e831a/shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48", size = 1442539 }, + { url = "https://files.pythonhosted.org/packages/34/e8/d164ef5b0eab86088cde06dee8415519ffd5bb0dd1bd9d021e640e64237c/shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013", size = 1445344 }, + { url = "https://files.pythonhosted.org/packages/ce/e2/9fba7ac142f7831757a10852bfa465683724eadbc93d2d46f74a16f9af04/shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7", size = 1296182 }, + { url = "https://files.pythonhosted.org/packages/cf/dc/790d4bda27d196cd56ec66975eaae3351c65614cafd0e16ddde39ec9fb92/shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381", size = 2423426 }, + { url = "https://files.pythonhosted.org/packages/af/b0/f8169f77eac7392d41e231911e0095eb1148b4d40c50ea9e34d999c89a7e/shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805", size = 2513249 }, + { url = "https://files.pythonhosted.org/packages/f6/1d/a8c0e9ab49ff2f8e4dedd71b0122eafb22a18ad7e9d256025e1f10c84704/shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a", size = 1294848 }, + { url = "https://files.pythonhosted.org/packages/23/38/2bc32dd1e7e67a471d4c60971e66df0bdace88656c47a9a728ace0091075/shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2", size = 1441371 }, ] [[package]] @@ -5381,14 +5096,14 @@ wheels = [ [[package]] name = "starlette" -version = "0.45.3" +version = "0.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/fb/2984a686808b89a6781526129a4b51266f678b2d2b97ab2d325e56116df8/starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f", size = 2574076 } +sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/61/f2b52e107b1fc8944b33ef56bf6ac4ebbe16d91b94d2b87ce013bf63fb84/starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d", size = 71507 }, + { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 }, ] [[package]] @@ -5421,15 +5136,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, ] -[[package]] -name = "termcolor" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/72/88311445fd44c455c7d553e61f95412cf89054308a1aa2434ab835075fc5/termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f", size = 13057 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/be/df630c387a0a054815d60be6a97eb4e8f17385d5d6fe660e1c02750062b4/termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8", size = 7755 }, -] - [[package]] name = "threadpoolctl" version = "3.5.0" @@ -5439,42 +5145,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4b/2c/ffbf7a134b9ab11a67b0cf0726453cedd9c5043a4fe7a35d1cefa9a1bcfb/threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467", size = 18414 }, ] -[[package]] -name = "tiktoken" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/f3/50ec5709fad61641e4411eb1b9ac55b99801d71f1993c29853f256c726c9/tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382", size = 1065770 }, - { url = "https://files.pythonhosted.org/packages/d6/f8/5a9560a422cf1755b6e0a9a436e14090eeb878d8ec0f80e0cd3d45b78bf4/tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108", size = 1009314 }, - { url = "https://files.pythonhosted.org/packages/bc/20/3ed4cfff8f809cb902900ae686069e029db74567ee10d017cb254df1d598/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd", size = 1143140 }, - { url = "https://files.pythonhosted.org/packages/f1/95/cc2c6d79df8f113bdc6c99cdec985a878768120d87d839a34da4bd3ff90a/tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de", size = 1197860 }, - { url = "https://files.pythonhosted.org/packages/c7/6c/9c1a4cc51573e8867c9381db1814223c09ebb4716779c7f845d48688b9c8/tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990", size = 1259661 }, - { url = "https://files.pythonhosted.org/packages/cd/4c/22eb8e9856a2b1808d0a002d171e534eac03f96dbe1161978d7389a59498/tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4", size = 894026 }, - { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987 }, - { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155 }, - { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898 }, - { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535 }, - { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548 }, - { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895 }, - { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 }, - { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 }, - { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 }, - { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 }, - { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 }, - { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 }, - { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919 }, - { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877 }, - { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095 }, - { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649 }, - { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465 }, - { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669 }, -] - [[package]] name = "tinycss2" version = "1.4.0" @@ -5553,7 +5223,7 @@ wheels = [ [[package]] name = "torch" -version = "2.6.0" +version = "2.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5569,32 +5239,28 @@ dependencies = [ { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform == 'darwin') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')" }, { name = "sympy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "triton", marker = "python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/37/81/aa9ab58ec10264c1abe62c8b73f5086c3c558885d6beecebf699f0dbeaeb/torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961", size = 766685561 }, - { url = "https://files.pythonhosted.org/packages/86/86/e661e229df2f5bfc6eab4c97deb1286d598bbeff31ab0cdb99b3c0d53c6f/torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab", size = 95751887 }, - { url = "https://files.pythonhosted.org/packages/20/e0/5cb2f8493571f0a5a7273cd7078f191ac252a402b5fb9cb6091f14879109/torch-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:56eeaf2ecac90da5d9e35f7f35eb286da82673ec3c582e310a8d1631a1c02341", size = 204165139 }, - { url = "https://files.pythonhosted.org/packages/e5/16/ea1b7842413a7b8a5aaa5e99e8eaf3da3183cc3ab345ad025a07ff636301/torch-2.6.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:09e06f9949e1a0518c5b09fe95295bc9661f219d9ecb6f9893e5123e10696628", size = 66520221 }, - { url = "https://files.pythonhosted.org/packages/78/a9/97cbbc97002fff0de394a2da2cdfa859481fdca36996d7bd845d50aa9d8d/torch-2.6.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:7979834102cd5b7a43cc64e87f2f3b14bd0e1458f06e9f88ffa386d07c7446e1", size = 766715424 }, - { url = "https://files.pythonhosted.org/packages/6d/fa/134ce8f8a7ea07f09588c9cc2cea0d69249efab977707cf67669431dcf5c/torch-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ccbd0320411fe1a3b3fec7b4d3185aa7d0c52adac94480ab024b5c8f74a0bf1d", size = 95759416 }, - { url = "https://files.pythonhosted.org/packages/11/c5/2370d96b31eb1841c3a0883a492c15278a6718ccad61bb6a649c80d1d9eb/torch-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:46763dcb051180ce1ed23d1891d9b1598e07d051ce4c9d14307029809c4d64f7", size = 204164970 }, - { url = "https://files.pythonhosted.org/packages/0b/fa/f33a4148c6fb46ca2a3f8de39c24d473822d5774d652b66ed9b1214da5f7/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:94fc63b3b4bedd327af588696559f68c264440e2503cc9e6954019473d74ae21", size = 66530713 }, - { url = "https://files.pythonhosted.org/packages/e5/35/0c52d708144c2deb595cd22819a609f78fdd699b95ff6f0ebcd456e3c7c1/torch-2.6.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:2bb8987f3bb1ef2675897034402373ddfc8f5ef0e156e2d8cfc47cacafdda4a9", size = 766624563 }, - { url = "https://files.pythonhosted.org/packages/01/d6/455ab3fbb2c61c71c8842753b566012e1ed111e7a4c82e0e1c20d0c76b62/torch-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b789069020c5588c70d5c2158ac0aa23fd24a028f34a8b4fcb8fcb4d7efcf5fb", size = 95607867 }, - { url = "https://files.pythonhosted.org/packages/18/cf/ae99bd066571656185be0d88ee70abc58467b76f2f7c8bfeb48735a71fe6/torch-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7e1448426d0ba3620408218b50aa6ada88aeae34f7a239ba5431f6c8774b1239", size = 204120469 }, - { url = "https://files.pythonhosted.org/packages/81/b4/605ae4173aa37fb5aa14605d100ff31f4f5d49f617928c9f486bb3aaec08/torch-2.6.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:9a610afe216a85a8b9bc9f8365ed561535c93e804c2a317ef7fabcc5deda0989", size = 66532538 }, - { url = "https://files.pythonhosted.org/packages/24/85/ead1349fc30fe5a32cadd947c91bda4a62fbfd7f8c34ee61f6398d38fb48/torch-2.6.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:4874a73507a300a5d089ceaff616a569e7bb7c613c56f37f63ec3ffac65259cf", size = 766626191 }, - { url = "https://files.pythonhosted.org/packages/dd/b0/26f06f9428b250d856f6d512413e9e800b78625f63801cbba13957432036/torch-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a0d5e1b9874c1a6c25556840ab8920569a7a4137afa8a63a32cee0bc7d89bd4b", size = 95611439 }, - { url = "https://files.pythonhosted.org/packages/c2/9c/fc5224e9770c83faed3a087112d73147cd7c7bfb7557dcf9ad87e1dda163/torch-2.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:510c73251bee9ba02ae1cb6c9d4ee0907b3ce6020e62784e2d7598e0cfa4d6cc", size = 204126475 }, - { url = "https://files.pythonhosted.org/packages/88/8b/d60c0491ab63634763be1537ad488694d316ddc4a20eaadd639cedc53971/torch-2.6.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:ff96f4038f8af9f7ec4231710ed4549da1bdebad95923953a25045dcf6fd87e2", size = 66536783 }, + { url = "https://files.pythonhosted.org/packages/2a/ef/834af4a885b31a0b32fff2d80e1e40f771e1566ea8ded55347502440786a/torch-2.5.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:71328e1bbe39d213b8721678f9dcac30dfc452a46d586f1d514a6aa0a99d4744", size = 906446312 }, + { url = "https://files.pythonhosted.org/packages/69/f0/46e74e0d145f43fa506cb336eaefb2d240547e4ce1f496e442711093ab25/torch-2.5.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:34bfa1a852e5714cbfa17f27c49d8ce35e1b7af5608c4bc6e81392c352dbc601", size = 91919522 }, + { url = "https://files.pythonhosted.org/packages/a5/13/1eb674c8efbd04d71e4a157ceba991904f633e009a584dd65dccbafbb648/torch-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:32a037bd98a241df6c93e4c789b683335da76a2ac142c0973675b715102dc5fa", size = 203088048 }, + { url = "https://files.pythonhosted.org/packages/a9/9d/e0860474ee0ff8f6ef2c50ec8f71a250f38d78a9b9df9fd241ad3397a65b/torch-2.5.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:23d062bf70776a3d04dbe74db950db2a5245e1ba4f27208a87f0d743b0d06e86", size = 63877046 }, + { url = "https://files.pythonhosted.org/packages/d1/35/e8b2daf02ce933e4518e6f5682c72fd0ed66c15910ea1fb4168f442b71c4/torch-2.5.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:de5b7d6740c4b636ef4db92be922f0edc425b65ed78c5076c43c42d362a45457", size = 906474467 }, + { url = "https://files.pythonhosted.org/packages/40/04/bd91593a4ca178ece93ca55f27e2783aa524aaccbfda66831d59a054c31e/torch-2.5.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:340ce0432cad0d37f5a31be666896e16788f1adf8ad7be481196b503dad675b9", size = 91919450 }, + { url = "https://files.pythonhosted.org/packages/0d/4a/e51420d46cfc90562e85af2fee912237c662ab31140ab179e49bd69401d6/torch-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:603c52d2fe06433c18b747d25f5c333f9c1d58615620578c326d66f258686f9a", size = 203098237 }, + { url = "https://files.pythonhosted.org/packages/d0/db/5d9cbfbc7968d79c5c09a0bc0bc3735da079f2fd07cc10498a62b320a480/torch-2.5.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:31f8c39660962f9ae4eeec995e3049b5492eb7360dd4f07377658ef4d728fa4c", size = 63884466 }, + { url = "https://files.pythonhosted.org/packages/8b/5c/36c114d120bfe10f9323ed35061bc5878cc74f3f594003854b0ea298942f/torch-2.5.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:ed231a4b3a5952177fafb661213d690a72caaad97d5824dd4fc17ab9e15cec03", size = 906389343 }, + { url = "https://files.pythonhosted.org/packages/6d/69/d8ada8b6e0a4257556d5b4ddeb4345ea8eeaaef3c98b60d1cca197c7ad8e/torch-2.5.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:3f4b7f10a247e0dcd7ea97dc2d3bfbfc90302ed36d7f3952b0008d0df264e697", size = 91811673 }, + { url = "https://files.pythonhosted.org/packages/5f/ba/607d013b55b9fd805db2a5c2662ec7551f1910b4eef39653eeaba182c5b2/torch-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:73e58e78f7d220917c5dbfad1a40e09df9929d3b95d25e57d9f8558f84c9a11c", size = 203046841 }, + { url = "https://files.pythonhosted.org/packages/57/6c/bf52ff061da33deb9f94f4121fde7ff3058812cb7d2036c97bc167793bd1/torch-2.5.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:8c712df61101964eb11910a846514011f0b6f5920c55dbf567bff8a34163d5b1", size = 63858109 }, + { url = "https://files.pythonhosted.org/packages/69/72/20cb30f3b39a9face296491a86adb6ff8f1a47a897e4d14667e6cf89d5c3/torch-2.5.1-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:9b61edf3b4f6e3b0e0adda8b3960266b9009d02b37555971f4d1c8f7a05afed7", size = 906393265 }, ] [[package]] @@ -5638,7 +5304,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.49.0" +version = "4.48.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5652,9 +5318,9 @@ dependencies = [ { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/50/46573150944f46df8ec968eda854023165a84470b42f69f67c7d475dabc5/transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e", size = 8610952 } +sdist = { url = "https://files.pythonhosted.org/packages/ea/71/93a6331682d6f15adf7d646956db0c43e5f1759bbbd05f2ef53029bae107/transformers-4.48.0.tar.gz", hash = "sha256:03fdfcbfb8b0367fb6c9fbe9d1c9aa54dfd847618be9b52400b2811d22799cb1", size = 8372101 } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/37/1f29af63e9c30156a3ed6ebc2754077016577c094f31de7b2631e5d379eb/transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03", size = 9970275 }, + { url = "https://files.pythonhosted.org/packages/45/d6/a69764e89fc5c2c957aa473881527c8c35521108d553df703e9ba703daeb/transformers-4.48.0-py3-none-any.whl", hash = "sha256:6d3de6d71cb5f2a10f9775ccc17abce9620195caaf32ec96542bd2a6937f25b0", size = 9673380 }, ] [package.optional-dependencies] @@ -5665,13 +5331,15 @@ torch = [ [[package]] name = "triton" -version = "3.2.0" +version = "3.1.0" source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "python_full_version < '3.13' and sys_platform == 'linux'" }, +] wheels = [ - { url = "https://files.pythonhosted.org/packages/01/65/3ffa90e158a2c82f0716eee8d26a725d241549b7d7aaf7e4f44ac03ebd89/triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62", size = 253090354 }, - { url = "https://files.pythonhosted.org/packages/a7/2e/757d2280d4fefe7d33af7615124e7e298ae7b8e3bc4446cdb8e88b0f9bab/triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220", size = 253157636 }, - { url = "https://files.pythonhosted.org/packages/06/00/59500052cb1cf8cf5316be93598946bc451f14072c6ff256904428eaf03c/triton-3.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d9b215efc1c26fa7eefb9a157915c92d52e000d2bf83e5f69704047e63f125c", size = 253159365 }, - { url = "https://files.pythonhosted.org/packages/c7/30/37a3384d1e2e9320331baca41e835e90a3767303642c7a80d4510152cbcf/triton-3.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5dfa23ba84541d7c0a531dfce76d8bcd19159d50a4a8b14ad01e91734a5c1b0", size = 253154278 }, + { url = "https://files.pythonhosted.org/packages/98/29/69aa56dc0b2eb2602b553881e34243475ea2afd9699be042316842788ff5/triton-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b0dd10a925263abbe9fa37dcde67a5e9b2383fc269fdf59f5657cac38c5d1d8", size = 209460013 }, + { url = "https://files.pythonhosted.org/packages/86/17/d9a5cf4fcf46291856d1e90762e36cbabd2a56c7265da0d1d9508c8e3943/triton-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f34f6e7885d1bf0eaaf7ba875a5f0ce6f3c13ba98f9503651c1e6dc6757ed5c", size = 209506424 }, + { url = "https://files.pythonhosted.org/packages/78/eb/65f5ba83c2a123f6498a3097746607e5b2f16add29e36765305e4ac7fdd8/triton-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8182f42fd8080a7d39d666814fa36c5e30cc00ea7eeeb1a2983dbb4c99a0fdc", size = 209551444 }, ] [[package]] @@ -5738,11 +5406,11 @@ wheels = [ [[package]] name = "types-setuptools" -version = "75.8.0.20250225" +version = "75.8.0.20250110" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/ad/0747cfa03acc6cbeee3ce15704ac65fb4c7444f3cd5596c34d581e7366a7/types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636", size = 48448 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/42/5713e90d4f9683f2301d900f33e4fc2405ad8ac224dda30f6cb7f4cd215b/types_setuptools-75.8.0.20250110.tar.gz", hash = "sha256:96f7ec8bbd6e0a54ea180d66ad68ad7a1d7954e7281a710ea2de75e355545271", size = 48185 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/f2/6259d7d302d66a1df119baac81a06649c2cf5fa0a671278c408d43711cee/types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af", size = 71839 }, + { url = "https://files.pythonhosted.org/packages/cf/a3/dbfd106751b11c728cec21cc62cbfe7ff7391b935c4b6e8f0bdc2e6fd541/types_setuptools-75.8.0.20250110-py3-none-any.whl", hash = "sha256:a9f12980bbf9bcdc23ecd80755789085bad6bfce4060c2275bc2b4ca9f2bc480", size = 71521 }, ] [[package]] @@ -5769,11 +5437,11 @@ wheels = [ [[package]] name = "tzdata" -version = "2025.1" +version = "2024.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 }, + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, ] [[package]] @@ -5964,16 +5632,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/88/dacc875dd54a8acadb4bcbfd4e3e86df8be75527116c91d8f9784f5e9cab/virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728", size = 4320272 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/5d/8d625ebddf9d31c301f85125b78002d4e4401fe1c15c04dca58a54a3056a/virtualenv-20.29.0.tar.gz", hash = "sha256:6345e1ff19d4b1296954cee076baaf58ff2a12a84a338c62b02eda39f20aa982", size = 7658081 } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/fa/849483d56773ae29740ae70043ad88e068f98a6401aa819b5d6bee604683/virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a", size = 4301478 }, + { url = "https://files.pythonhosted.org/packages/f0/d3/12687ab375bb0e077ea802a5128f7b45eb5de7a7c6cb576ccf9dd59ff80a/virtualenv-20.29.0-py3-none-any.whl", hash = "sha256:c12311863497992dc4b8644f8ea82d3b35bb7ef8ee82e6630d76d0197c39baf9", size = 4282443 }, ] [[package]] @@ -6052,7 +5720,7 @@ wheels = [ [[package]] name = "weaviate-client" -version = "4.11.0" +version = "4.10.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -6063,9 +5731,9 @@ dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "validators", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/0e/e12a41d1a272d30184f8deaaec3b27a4e98aaf13a4aa1f3fc40ee0ce294d/weaviate_client-4.11.0.tar.gz", hash = "sha256:27cb82326a1b69b1de764614973f7c1c8f1e3e86459b75c6db1be9fac004d68a", size = 609019 } +sdist = { url = "https://files.pythonhosted.org/packages/de/ce/e34426eeda39a77b45df86f9ab901a7232096a071ee379a046a8072e2a35/weaviate_client-4.10.4.tar.gz", hash = "sha256:a1e799fc41d9f43a56c95490f6c14f475861f27d2a62b9b6de28a1db5494751d", size = 594549 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/54/b5e80a1708e4973332c149565729010bc0d9674f9f5f301445d56b9c550c/weaviate_client-4.11.0-py3-none-any.whl", hash = "sha256:de97f34a953974f9f294a371a7057a1d6c908af92561b797d70e9c4eed2bba02", size = 350057 }, + { url = "https://files.pythonhosted.org/packages/50/e9/5b6ffbdee0d0f1444d0ce142c70a70bf22ba43bf2d6b35913a8d7e674431/weaviate_client-4.10.4-py3-none-any.whl", hash = "sha256:d9808456ba109fcd99331bc833b61cf520bf6ad9db442db621e12f78c8480c4c", size = 330450 }, ] [[package]] @@ -6088,61 +5756,61 @@ wheels = [ [[package]] name = "websockets" -version = "15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/7a/8bc4d15af7ff30f7ba34f9a172063bfcee9f5001d7cef04bee800a658f33/websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab", size = 175574 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/f1/b20cc4c1ff84911c791f36fa511a78203836bb4d603f56290de08c067437/websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0", size = 174701 }, - { url = "https://files.pythonhosted.org/packages/f9/e8/4de59ee85ec86052ca574f4e5327ef948e4f77757d3c9c1503f5a0e9c039/websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3", size = 172358 }, - { url = "https://files.pythonhosted.org/packages/2f/ea/b0f95815cdc83d61b1a895858671c6af38a76c23f3ea5d91e2ba11bbedc7/websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b", size = 172610 }, - { url = "https://files.pythonhosted.org/packages/09/ed/c5d8f1f296f475c00611a40eff6a952248785efb125f91a0b29575f36ba6/websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453", size = 181579 }, - { url = "https://files.pythonhosted.org/packages/b7/fc/2444b5ae792d92179f20cec53475bcc25d1d7f00a2be9947de9837ef230a/websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4", size = 180588 }, - { url = "https://files.pythonhosted.org/packages/ff/b5/0945a31562d351cff26d76a2ae9a4ba4536e698aa059a4262afd793b2a1d/websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb", size = 180902 }, - { url = "https://files.pythonhosted.org/packages/b6/7c/e9d844b87754bc83b294cc1c695cbc6c5d42e329b85d2bf2d7bb9554d09c/websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5", size = 181282 }, - { url = "https://files.pythonhosted.org/packages/9e/6c/6a5d3272f494fa2fb4806b896ecb312bd6c72bab632df4ace19946c079dc/websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f", size = 180694 }, - { url = "https://files.pythonhosted.org/packages/b2/32/1fb4b62c2ec2c9844d4ddaa4021d993552c7c493a0acdcec95551679d501/websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8", size = 180631 }, - { url = "https://files.pythonhosted.org/packages/e4/9b/5ef1ddb8857ce894217bdd9572ad98c1cef20d8f9f0f43823b782b7ded6b/websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f", size = 175664 }, - { url = "https://files.pythonhosted.org/packages/29/63/c320572ccf813ed2bc3058a0e0291ee95eb258dc5e6b3446ca45dc1af0fd/websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133", size = 176109 }, - { url = "https://files.pythonhosted.org/packages/ee/16/81a7403c8c0a33383de647e89c07824ea6a654e3877d6ff402cbae298cb8/websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965", size = 174702 }, - { url = "https://files.pythonhosted.org/packages/ef/40/4629202386a3bf1195db9fe41baeb1d6dfd8d72e651d9592d81dae7fdc7c/websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7", size = 172359 }, - { url = "https://files.pythonhosted.org/packages/7b/33/dfb650e822bc7912d8c542c452497867af91dec81e7b5bf96aca5b419d58/websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad", size = 172604 }, - { url = "https://files.pythonhosted.org/packages/2e/52/666743114513fcffd43ee5df261a1eb5d41f8e9861b7a190b730732c19ba/websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3", size = 182145 }, - { url = "https://files.pythonhosted.org/packages/9c/63/5273f146b13aa4a057a95ab0855d9990f3a1ced63693f4365135d1abfacc/websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1", size = 181152 }, - { url = "https://files.pythonhosted.org/packages/0f/ae/075697f3f97de7c26b73ae96d952e13fa36393e0db3f028540b28954e0a9/websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55", size = 181523 }, - { url = "https://files.pythonhosted.org/packages/25/87/06d091bbcbe01903bed3dad3bb4a1a3c516f61e611ec31fffb28abe4974b/websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596", size = 181791 }, - { url = "https://files.pythonhosted.org/packages/77/08/5063b6cc1b2aa1fba2ee3b578b777db22fde7145f121d07fd878811e983b/websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3", size = 181231 }, - { url = "https://files.pythonhosted.org/packages/86/ff/af23084df0a7405bb2add12add8c17d6192a8de9480f1b90d12352ba2b7d/websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4", size = 181191 }, - { url = "https://files.pythonhosted.org/packages/21/ce/b2bdfcf49201dee0b899edc6a814755763ec03d74f2714923d38453a9e8d/websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680", size = 175666 }, - { url = "https://files.pythonhosted.org/packages/8d/7b/444edcd5365538c226b631897975a65bbf5ccf27c77102e17d8f12a306ea/websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37", size = 176105 }, - { url = "https://files.pythonhosted.org/packages/22/1e/92c4547d7b2a93f848aedaf37e9054111bc00dc11bff4385ca3f80dbb412/websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f", size = 174709 }, - { url = "https://files.pythonhosted.org/packages/9f/37/eae4830a28061ba552516d84478686b637cd9e57d6a90b45ad69e89cb0af/websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d", size = 172372 }, - { url = "https://files.pythonhosted.org/packages/46/2f/b409f8b8aa9328d5a47f7a301a43319d540d70cf036d1e6443675978a988/websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276", size = 172607 }, - { url = "https://files.pythonhosted.org/packages/d6/81/d7e2e4542d4b4df849b0110df1b1f94f2647b71ab4b65d672090931ad2bb/websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc", size = 182422 }, - { url = "https://files.pythonhosted.org/packages/b6/91/3b303160938d123eea97f58be363f7dbec76e8c59d587e07b5bc257dd584/websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72", size = 181362 }, - { url = "https://files.pythonhosted.org/packages/f2/8b/df6807f1ca339c567aba9a7ab03bfdb9a833f625e8d2b4fc7529e4c701de/websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d", size = 181787 }, - { url = "https://files.pythonhosted.org/packages/21/37/e6d3d5ebb0ebcaf98ae84904205c9dcaf3e0fe93e65000b9f08631ed7309/websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab", size = 182058 }, - { url = "https://files.pythonhosted.org/packages/c9/df/6aca296f2be4c638ad20908bb3d7c94ce7afc8d9b4b2b0780d1fc59b359c/websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99", size = 181434 }, - { url = "https://files.pythonhosted.org/packages/88/f1/75717a982bab39bbe63c83f9df0e7753e5c98bab907eb4fb5d97fe5c8c11/websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc", size = 181431 }, - { url = "https://files.pythonhosted.org/packages/e7/15/cee9e63ed9ac5bfc1a3ae8fc6c02c41745023c21eed622eef142d8fdd749/websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904", size = 175678 }, - { url = "https://files.pythonhosted.org/packages/4e/00/993974c60f40faabb725d4dbae8b072ef73b4c4454bd261d3b1d34ace41f/websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa", size = 176119 }, - { url = "https://files.pythonhosted.org/packages/12/23/be28dc1023707ac51768f848d28a946443041a348ee3a54abdf9f6283372/websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1", size = 174714 }, - { url = "https://files.pythonhosted.org/packages/8f/ff/02b5e9fbb078e7666bf3d25c18c69b499747a12f3e7f2776063ef3fb7061/websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7", size = 172374 }, - { url = "https://files.pythonhosted.org/packages/8e/61/901c8d4698e0477eff4c3c664d53f898b601fa83af4ce81946650ec2a4cb/websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081", size = 172605 }, - { url = "https://files.pythonhosted.org/packages/d2/4b/dc47601a80dff317aecf8da7b4ab278d11d3494b2c373b493e4887561f90/websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9", size = 182380 }, - { url = "https://files.pythonhosted.org/packages/83/f7/b155d2b38f05ed47a0b8de1c9ea245fcd7fc625d89f35a37eccba34b42de/websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b", size = 181325 }, - { url = "https://files.pythonhosted.org/packages/d3/ff/040a20c01c294695cac0e361caf86f33347acc38f164f6d2be1d3e007d9f/websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f", size = 181763 }, - { url = "https://files.pythonhosted.org/packages/cb/6a/af23e93678fda8341ac8775e85123425e45c608389d3514863c702896ea5/websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6", size = 182097 }, - { url = "https://files.pythonhosted.org/packages/7e/3e/1069e159c30129dc03c01513b5830237e576f47cedb888777dd885cae583/websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375", size = 181485 }, - { url = "https://files.pythonhosted.org/packages/9a/a7/c91c47103f1cd941b576bbc452601e9e01f67d5c9be3e0a9abe726491ab5/websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72", size = 181466 }, - { url = "https://files.pythonhosted.org/packages/16/32/a4ca6e3d56c24aac46b0cf5c03b841379f6409d07fc2044b244f90f54105/websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c", size = 175673 }, - { url = "https://files.pythonhosted.org/packages/c0/31/25a417a23e985b61ffa5544f9facfe4a118cb64d664c886f1244a8baeca5/websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8", size = 176115 }, - { url = "https://files.pythonhosted.org/packages/42/52/359467c7ca12721a04520da9ba9fc29da2cd176c30992f6f81fa881bb3e5/websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506", size = 172384 }, - { url = "https://files.pythonhosted.org/packages/7c/ff/36fd8a45fac404d8f109e03ca06328f49847d71c0c048414c76bb2db91c4/websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31", size = 172616 }, - { url = "https://files.pythonhosted.org/packages/b1/a8/65496a87984815e2837835d5ac3c9f81ea82031036877e8f80953c59dbd9/websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03", size = 173871 }, - { url = "https://files.pythonhosted.org/packages/23/89/9441e1e0818d46fe22d78b3e5c8fe2316516211330e138231c90dce5559e/websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3", size = 173477 }, - { url = "https://files.pythonhosted.org/packages/2f/1b/80460b3ac9795ef7bbaa074c603d64e009dbb2ceb11008416efab0dcc811/websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842", size = 173425 }, - { url = "https://files.pythonhosted.org/packages/56/d1/8da7e733ed266f342e8c544c3b8338449de9b860d85d9a0bfd4fe1857d6e/websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5", size = 176160 }, - { url = "https://files.pythonhosted.org/packages/e8/b2/31eec524b53f01cd8343f10a8e429730c52c1849941d1f530f8253b6d934/websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3", size = 169023 }, +version = "14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/1b/380b883ce05bb5f45a905b61790319a28958a9ab1e4b6b95ff5464b60ca1/websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8", size = 162840 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/91/b1b375dbd856fd5fff3f117de0e520542343ecaf4e8fc60f1ac1e9f5822c/websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29", size = 161950 }, + { url = "https://files.pythonhosted.org/packages/61/8f/4d52f272d3ebcd35e1325c646e98936099a348374d4a6b83b524bded8116/websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179", size = 159601 }, + { url = "https://files.pythonhosted.org/packages/c4/b1/29e87b53eb1937992cdee094a0988aadc94f25cf0b37e90c75eed7123d75/websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250", size = 159854 }, + { url = "https://files.pythonhosted.org/packages/3f/e6/752a2f5e8321ae2a613062676c08ff2fccfb37dc837a2ee919178a372e8a/websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0", size = 168835 }, + { url = "https://files.pythonhosted.org/packages/60/27/ca62de7877596926321b99071639275e94bb2401397130b7cf33dbf2106a/websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0", size = 167844 }, + { url = "https://files.pythonhosted.org/packages/7e/db/f556a1d06635c680ef376be626c632e3f2bbdb1a0189d1d1bffb061c3b70/websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199", size = 168157 }, + { url = "https://files.pythonhosted.org/packages/b3/bc/99e5f511838c365ac6ecae19674eb5e94201aa4235bd1af3e6fa92c12905/websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58", size = 168561 }, + { url = "https://files.pythonhosted.org/packages/c6/e7/251491585bad61c79e525ac60927d96e4e17b18447cc9c3cfab47b2eb1b8/websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078", size = 167979 }, + { url = "https://files.pythonhosted.org/packages/ac/98/7ac2e4eeada19bdbc7a3a66a58e3ebdf33648b9e1c5b3f08c3224df168cf/websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434", size = 167925 }, + { url = "https://files.pythonhosted.org/packages/ab/3d/09e65c47ee2396b7482968068f6e9b516221e1032b12dcf843b9412a5dfb/websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10", size = 162831 }, + { url = "https://files.pythonhosted.org/packages/8a/67/59828a3d09740e6a485acccfbb66600632f2178b6ed1b61388ee96f17d5a/websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e", size = 163266 }, + { url = "https://files.pythonhosted.org/packages/97/ed/c0d03cb607b7fe1f7ff45e2cd4bb5cd0f9e3299ced79c2c303a6fff44524/websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512", size = 161949 }, + { url = "https://files.pythonhosted.org/packages/06/91/bf0a44e238660d37a2dda1b4896235d20c29a2d0450f3a46cd688f43b239/websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac", size = 159606 }, + { url = "https://files.pythonhosted.org/packages/ff/b8/7185212adad274c2b42b6a24e1ee6b916b7809ed611cbebc33b227e5c215/websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280", size = 159854 }, + { url = "https://files.pythonhosted.org/packages/5a/8a/0849968d83474be89c183d8ae8dcb7f7ada1a3c24f4d2a0d7333c231a2c3/websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1", size = 169402 }, + { url = "https://files.pythonhosted.org/packages/bd/4f/ef886e37245ff6b4a736a09b8468dae05d5d5c99de1357f840d54c6f297d/websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3", size = 168406 }, + { url = "https://files.pythonhosted.org/packages/11/43/e2dbd4401a63e409cebddedc1b63b9834de42f51b3c84db885469e9bdcef/websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6", size = 168776 }, + { url = "https://files.pythonhosted.org/packages/6d/d6/7063e3f5c1b612e9f70faae20ebaeb2e684ffa36cb959eb0862ee2809b32/websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0", size = 169083 }, + { url = "https://files.pythonhosted.org/packages/49/69/e6f3d953f2fa0f8a723cf18cd011d52733bd7f6e045122b24e0e7f49f9b0/websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89", size = 168529 }, + { url = "https://files.pythonhosted.org/packages/70/ff/f31fa14561fc1d7b8663b0ed719996cf1f581abee32c8fb2f295a472f268/websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23", size = 168475 }, + { url = "https://files.pythonhosted.org/packages/f1/15/b72be0e4bf32ff373aa5baef46a4c7521b8ea93ad8b49ca8c6e8e764c083/websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e", size = 162833 }, + { url = "https://files.pythonhosted.org/packages/bc/ef/2d81679acbe7057ffe2308d422f744497b52009ea8bab34b6d74a2657d1d/websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09", size = 163263 }, + { url = "https://files.pythonhosted.org/packages/55/64/55698544ce29e877c9188f1aee9093712411a8fc9732cca14985e49a8e9c/websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed", size = 161957 }, + { url = "https://files.pythonhosted.org/packages/a2/b1/b088f67c2b365f2c86c7b48edb8848ac27e508caf910a9d9d831b2f343cb/websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d", size = 159620 }, + { url = "https://files.pythonhosted.org/packages/c1/89/2a09db1bbb40ba967a1b8225b07b7df89fea44f06de9365f17f684d0f7e6/websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707", size = 159852 }, + { url = "https://files.pythonhosted.org/packages/ca/c1/f983138cd56e7d3079f1966e81f77ce6643f230cd309f73aa156bb181749/websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a", size = 169675 }, + { url = "https://files.pythonhosted.org/packages/c1/c8/84191455d8660e2a0bdb33878d4ee5dfa4a2cedbcdc88bbd097303b65bfa/websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45", size = 168619 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/62e551fdcd7d44ea74a006dc193aba370505278ad76efd938664531ce9d6/websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58", size = 169042 }, + { url = "https://files.pythonhosted.org/packages/ad/ed/1532786f55922c1e9c4d329608e36a15fdab186def3ca9eb10d7465bc1cc/websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058", size = 169345 }, + { url = "https://files.pythonhosted.org/packages/ea/fb/160f66960d495df3de63d9bcff78e1b42545b2a123cc611950ffe6468016/websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4", size = 168725 }, + { url = "https://files.pythonhosted.org/packages/cf/53/1bf0c06618b5ac35f1d7906444b9958f8485682ab0ea40dee7b17a32da1e/websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05", size = 168712 }, + { url = "https://files.pythonhosted.org/packages/e5/22/5ec2f39fff75f44aa626f86fa7f20594524a447d9c3be94d8482cd5572ef/websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0", size = 162838 }, + { url = "https://files.pythonhosted.org/packages/74/27/28f07df09f2983178db7bf6c9cccc847205d2b92ced986cd79565d68af4f/websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f", size = 163277 }, + { url = "https://files.pythonhosted.org/packages/34/77/812b3ba5110ed8726eddf9257ab55ce9e85d97d4aa016805fdbecc5e5d48/websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9", size = 161966 }, + { url = "https://files.pythonhosted.org/packages/8d/24/4fcb7aa6986ae7d9f6d083d9d53d580af1483c5ec24bdec0978307a0f6ac/websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b", size = 159625 }, + { url = "https://files.pythonhosted.org/packages/f8/47/2a0a3a2fc4965ff5b9ce9324d63220156bd8bedf7f90824ab92a822e65fd/websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3", size = 159857 }, + { url = "https://files.pythonhosted.org/packages/dd/c8/d7b425011a15e35e17757e4df75b25e1d0df64c0c315a44550454eaf88fc/websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59", size = 169635 }, + { url = "https://files.pythonhosted.org/packages/93/39/6e3b5cffa11036c40bd2f13aba2e8e691ab2e01595532c46437b56575678/websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2", size = 168578 }, + { url = "https://files.pythonhosted.org/packages/cf/03/8faa5c9576299b2adf34dcccf278fc6bbbcda8a3efcc4d817369026be421/websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da", size = 169018 }, + { url = "https://files.pythonhosted.org/packages/8c/05/ea1fec05cc3a60defcdf0bb9f760c3c6bd2dd2710eff7ac7f891864a22ba/websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9", size = 169383 }, + { url = "https://files.pythonhosted.org/packages/21/1d/eac1d9ed787f80754e51228e78855f879ede1172c8b6185aca8cef494911/websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7", size = 168773 }, + { url = "https://files.pythonhosted.org/packages/0e/1b/e808685530185915299740d82b3a4af3f2b44e56ccf4389397c7a5d95d39/websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a", size = 168757 }, + { url = "https://files.pythonhosted.org/packages/b6/19/6ab716d02a3b068fbbeb6face8a7423156e12c446975312f1c7c0f4badab/websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6", size = 162834 }, + { url = "https://files.pythonhosted.org/packages/6c/fd/ab6b7676ba712f2fc89d1347a4b5bdc6aa130de10404071f2b2606450209/websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0", size = 163277 }, + { url = "https://files.pythonhosted.org/packages/fb/cd/382a05a1ba2a93bd9fb807716a660751295df72e77204fb130a102fcdd36/websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8", size = 159633 }, + { url = "https://files.pythonhosted.org/packages/b7/a0/fa7c62e2952ef028b422fbf420f9353d9dd4dfaa425de3deae36e98c0784/websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e", size = 159867 }, + { url = "https://files.pythonhosted.org/packages/c1/94/954b4924f868db31d5f0935893c7a8446515ee4b36bb8ad75a929469e453/websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098", size = 161121 }, + { url = "https://files.pythonhosted.org/packages/7a/2e/f12bbb41a8f2abb76428ba4fdcd9e67b5b364a3e7fa97c88f4d6950aa2d4/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb", size = 160731 }, + { url = "https://files.pythonhosted.org/packages/13/97/b76979401f2373af1fe3e08f960b265cecab112e7dac803446fb98351a52/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7", size = 160681 }, + { url = "https://files.pythonhosted.org/packages/39/9c/16916d9a436c109a1d7ba78817e8fee357b78968be3f6e6f517f43afa43d/websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d", size = 163316 }, + { url = "https://files.pythonhosted.org/packages/b0/0b/c7e5d11020242984d9d37990310520ed663b942333b83a033c2f20191113/websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e", size = 156277 }, ] [[package]] From 0e8adb72ded4fb1fe6032b686b4183f33c819b1d Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Thu, 6 Mar 2025 17:49:53 +0000 Subject: [PATCH 08/63] .Net: Hybrid Search (#10814) ### Motivation and Context Adding hybrid search to March release branch ### Description Adding hybrid search to March release branch ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- docs/decisions/00NN-hybrid-search.md | 395 ++++++++++++ dotnet/Directory.Packages.props | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 - ...LVectorStoreCollectionQueryBuilderTests.cs | 130 ++-- ...zureAISearchVectorStoreRecordCollection.cs | 119 ++-- ...mosDBMongoDBVectorStoreRecordCollection.cs | 35 +- ...BNoSQLVectorStoreCollectionQueryBuilder.cs | 48 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 132 ++-- .../InMemoryVectorStoreRecordCollection.cs | 14 +- ...ngoDBVectorStoreCollectionCreateMapping.cs | 28 + ...ngoDBVectorStoreCollectionSearchMapping.cs | 258 +++++++- .../MongoDBVectorStoreRecordCollection.cs | 199 +++--- ...ngoDBVectorStoreRecordCollectionOptions.cs | 5 + .../PostgresVectorStoreRecordCollection.cs | 34 +- .../QdrantVectorStoreRecordCollection.cs | 135 +++- ...RedisHashSetVectorStoreRecordCollection.cs | 13 +- .../RedisJsonVectorStoreRecordCollection.cs | 13 +- .../SqliteVectorStoreRecordCollection.cs | 35 +- .../WeaviateConstants.cs | 3 + ...viateVectorStoreCollectionSearchMapping.cs | 11 +- .../WeaviateVectorStoreRecordCollection.cs | 110 ++-- ...VectorStoreRecordCollectionQueryBuilder.cs | 64 ++ ...MongoDBVectorStoreRecordCollectionTests.cs | 2 - ...disJsonVectorStoreRecordCollectionTests.cs | 1 + ...VectorStoreCollectionSearchMappingTests.cs | 2 +- .../VectorSearch/HybridSearchOptions.cs | 67 ++ .../VectorSearch/IKeywordHybridSearch.cs | 29 + .../VectorSearch/VectorSearchOptions.cs | 3 +- .../AzureAISearchConfigConditionAttribute.cs | 25 + ...earchKeywordVectorizedHybridSearchTests.cs | 30 + .../AzureAISearchVectorStoreFixture.cs | 17 +- ...QLConnectionStringSetConditionAttribute.cs | 24 + ...NoSQLKeywordVectorizedHybridSearchTests.cs | 30 + .../AzureCosmosDBNoSQLVectorStoreFixture.cs | 36 +- .../BaseKeywordVectorizedHybridSearchTests.cs | 341 +++++++++++ ...ngoDBKeywordVectorizedHybridSearchTests.cs | 29 + ...ctorsKeywordVectorizedHybridSearchTests.cs | 28 + ...ectorKeywordVectorizedHybridSearchTests.cs | 28 + ...viateKeywordVectorizedHybridSearchTests.cs | 34 ++ .../Memory/MongoDB/MongoDBConstants.cs | 3 + .../Data/VectorStoreRecordPropertyReader.cs | 85 +++ .../CompatibilitySuppressions.xml | 74 --- .../VolatileVectorStoreRecordCollection.cs | 380 ------------ .../VectorStoreRecordPropertyReaderTests.cs | 89 +++ ...olatileVectorStoreRecordCollectionTests.cs | 577 ------------------ 45 files changed, 2234 insertions(+), 1485 deletions(-) create mode 100644 docs/decisions/00NN-hybrid-search.md create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchConfigConditionAttribute.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConnectionStringSetConditionAttribute.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantNamedVectorsKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantSingleVectorKeywordVectorizedHybridSearchTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateKeywordVectorizedHybridSearchTests.cs delete mode 100644 dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml delete mode 100644 dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs diff --git a/docs/decisions/00NN-hybrid-search.md b/docs/decisions/00NN-hybrid-search.md new file mode 100644 index 000000000000..486530972c1a --- /dev/null +++ b/docs/decisions/00NN-hybrid-search.md @@ -0,0 +1,395 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: {proposed | rejected | accepted | deprecated | � | superseded by [ADR-0001](0001-madr-architecture-decisions.md)} +contact: westey-m +date: 2024-11-27 +deciders: {list everyone involved in the decision} +consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} +informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} +--- + +# Support Hybrid Search in VectorStore abstractions + +## Context and Problem Statement + +In addition to simple vector search, many databases also support Hybrid search. +Hybrid search typically results in higher quality search results, and therefore the ability to do Hybrid search via VectorStore abstractions +is an important feature to add. + +The way in which Hybrid search is supported varies by database. The two most common ways of supporting hybrid search is: + +1. Using dense vector search and keyword/fulltext search in parallel, and then combining the results. +1. Using dense vector search and sparse vector search in parallel, and then combining the results. + +Sparse vectors are different from dense vectors in that they typically have many more dimensions, but with many of the dimensions being zero. +Sparse vectors, when used with text search, have a dimension for each word/token in a vocabulary, with the value indicating the importance of the word +in the source text. +The more common the word in a specific chunk of text, and the less common the word is in the corpus, the higher the value in the sparse vector. + +There are various mechanisms for generating sparse vectors, such as + +- [TF-IDF](https://en.wikipedia.org/wiki/Tf%E2%80%93idf) +- [SPLADE](https://www.pinecone.io/learn/splade/) +- [BGE-m3 sparse embedding model](https://huggingface.co/BAAI/bge-m3). +- [pinecone-sparse-english-v0](https://docs.pinecone.io/models/pinecone-sparse-english-v0) + +While these are supported well in Python, they are not well supported in .net today. +Adding support for generating sparse vectors is out of scope of this ADR. + +More background information: + +- [Background article from Qdrant about using sparse vectors for Hybrid Search](https://qdrant.tech/articles/sparse-vectors) +- [TF-IDF explainer for beginners](https://medium.com/@coldstart_coder/understanding-and-implementing-tf-idf-in-python-a325d1301484) + +ML.Net contains an implementation of TF-IDF that could be used to generate sparse vectors in .net. See [here](https://github.com/dotnet/machinelearning/blob/886e2ff125c0060f5a251056c7eb2a7d28738984/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceWordBags.cs#L55-L105) for an example. + +### Hybrid search support in different databases + +|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-|-|-|-|-| +|Hybrid search supported|Y|Y|N (No parallel execution with fusion)|N|Y|Y|Y|Y|Y|Y|Y| +|Hybrid search definition|Vector + FullText|[Vector + Keyword (BM25F)](https://weaviate.io/developers/weaviate/search/hybrid)|||[Vector + Sparse Vector for keywords](https://docs.pinecone.io/guides/get-started/key-features#hybrid-search)|[Vector + Keyword](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|[Vector + SparseVector / Keyword](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + FullText|[Vector + Fulltext (BM25)](https://learn.microsoft.com/en-us/azure/cosmos-db/gen-ai/hybrid-search)|[Vector + FullText](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| +|Fusion method configurable|N|Y|||?|Y|Y|Y|Y, but only one option|Y, but only one option|N| +|Fusion methods|[RRF](https://learn.microsoft.com/en-us/azure/search/hybrid-search-ranking)|Ranked/RelativeScore|||?|[Build your own](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|RRF / DBSF|[RRF / Weighted](https://milvus.io/docs/multi-vector-search.md)|[RRF](https://www.elastic.co/search-labs/tutorials/search-tutorial/vector-search/hybrid-search)|[RRF](https://learn.microsoft.com/en-us/azure/cosmos-db/nosql/query/rrf)|[RRF](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| +|Hybrid Search Input Params|Vector + string|[Vector + string](https://weaviate.io/developers/weaviate/api/graphql/search-operators#hybrid)|||Vector + SparseVector|Vector + String|[Vector + SparseVector](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + string|Vector + string array|Vector + string| +|Sparse Distance Function|n/a|n/a|||[dotproduct only for both dense and sparse, 1 setting for both](https://docs.pinecone.io/guides/data/understanding-hybrid-search#sparse-dense-workflow)|n/a|dotproduct|Inner Product|n/a|n/a|n/a| +|Sparse Indexing options|n/a|n/a|||no separate config to dense|n/a|ondisk / inmemory + IDF|[SPARSE_INVERTED_INDEX / SPARSE_WAND](https://milvus.io/docs/index.md?tab=sparse)|n/a|n/a|n/a| +|Sparse data model|n/a|n/a|||[indices & values arrays](https://docs.pinecone.io/guides/data/upsert-sparse-dense-vectors)|n/a|indices & values arrays|[sparse matrix / List of dict / list of tuples](https://milvus.io/docs/sparse_vector.md#Use-sparse-vectors-in-Milvus)|n/a|n/a|n/a| +|Keyword matching behavior|[Space Separated with SearchMode=any does OR, searchmode=all does AND](https://learn.microsoft.com/en-us/azure/search/search-lucene-query-architecture)|[Tokenization with split by space, affects ranking](https://weaviate.io/developers/weaviate/search/bm25)|||n/a|[Tokenization](https://www.postgresql.org/docs/current/textsearch-controls.html)|[

No FTS Index: Exact Substring match

FTS Index present: All words must be present

](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|[And/Or capabilities](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-bool-prefix-query.html)|-|[Allows multiple multi-word phrases with OR](https://www.mongodb.com/docs/atlas/atlas-search/phrase/) and [a single multi-word prhase where the words can be OR'd or AND'd](https://www.mongodb.com/docs/atlas/atlas-search/text/)| + +Glossary: + +- RRF = Reciprical Rank Fusion +- DBSF = Distribution-Based Score Fusion +- IDF = Inverse Document Frequency + +### Language required for Cosmos DB NoSQL full text search configuration + +Cosmos DB NoSQL requires a language to be specified for full text search and it requires full text search indexing for hybrid search to be enabled. +We therefore need to support a way of specifying the language when creating the index. + +Cosmos DB NoSQL is the only database from our sample that has a required setting of this type. + +|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-|-|-|-|-| +|Requires FullTextSearch indexing for hybrid search|Y|Y|n/a|n/a|n/a|Y|N [optional](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|Y|Y|[Y](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search/?msockid=04b550d92f2f619c271a45a42e066050#create-the-atlas-vector-search-and-fts-indexes)| +|Required FullTextSearch index options|None required, [many optional](https://learn.microsoft.com/en-us/rest/api/searchservice/indexes/create?view=rest-searchservice-2024-07-01&tabs=HTTP)|None required, [none optional](https://weaviate.io/developers/weaviate/concepts/indexing#collections-without-indexes)||||[language required](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|none required, [some optional](https://qdrant.tech/documentation/concepts/indexing/#full-text-index)||None required, [many optional](https://elastic.github.io/elasticsearch-net/8.16.3/api/Elastic.Clients.Elasticsearch.Mapping.TextProperty.html)|Language Required|None required, [many optional](https://www.mongodb.com/docs/atlas/atlas-search/field-types/string-type/#configure-fts-field-type-field-properties)| + +### Keyword Search interface options + +Each DB has different keyword search capabilities. Some only support a very basic interface when it comes to listing keywords for hybrid search. The following table is to list the compatibility of each DB with a specific keyword public interface we may want to support. + +|Feature|Azure AI Search|Weaviate|PostgreSql|Qdrant|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-| +|

string[] keyword

One word per element

Any matching word boosts ranking.

|Y|Y (have to join with spaces)|[Y (have to join with spaces)](https://www.postgresql.org/docs/current/textsearch-controls.html)|Y (via filter with multiple OR'd matches)|Y|Y|[Y (have to join with spaces)](https://www.mongodb.com/docs/drivers/node/current/fundamentals/crud/read-operations/text/)| +|

string[] keyword

One or more words per element

All words in a single element have to be present to boost the ranking.

|Y|N|Y|Y (via filter with multiple OR'd matches and FTS Index)|-|N|N| +|

string[] keyword

One or more words per element

Multiple words in a single element is a phrase that must match exactly to boost the ranking.

|Y|N|Y|Only via filter with multiple OR'd matches and NO Index|-|N|Y| +|

string keyword

Space separated words

Any matching word boosts ranking.

|Y|Y|Y|N (would need to split words)|-|N (would need to split words)|Y| + +### Naming Options + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|KeywordVectorizedHybridSearch|KeywordVectorizedHybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizedHybridSearch|SparseVectorizedHybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| +|KeywordVectorizableTextHybridSearch|KeywordVectorizableTextHybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizableTextHybridSearch|SparseVectorizableTextHybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|KeywordVectorizedHybridSearch|HybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizedHybridSearch|HybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| +|KeywordVectorizableTextHybridSearch|HybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizableTextHybridSearch|HybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|HybridSearchWithKeywords|HybridSearch|string[] + Dense Vector|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|HybridSearchWithSparseVector|HybridSearchWithSparseVector|Sparse Vector + Dense Vector|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| +|HybridSearchWithKeywordsAndVectorizableText|HybridSearch|string[] + string|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|HybridSearchWithVectorizableKeywordsAndText|HybridSearchWithSparseVector|string[] + string|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| + +|Area|Type of search|Method Name| +|-|-|-| +|**Non-vector Search**||| +|Non-vector Search||Search| +|**Vector Search**||| +|Vector Search|With Vector|VectorSearch| +|Vector Search|With Vectorizable Text (string)|VectorSearchWithText| +|Vector Search|With Vectorizable Image (string/byte[]/other)|VectorSearchWithImage| +|**Hybrid Search**||| +|Hybrid Search|With DenseVector and string[] keywords|HybridSearch| +|Hybrid Search|With vectorizable string and string[] keywords|HybridSearch| +|Hybrid Search|With DenseVector and SparseVector|HybridSearchWithSparseVector| +|Hybrid Search|With vectorizable string and sparse vectorisable string[] keywords|HybridSearchWithSparseVector| + +### Keyword based hybrid search + +```csharp +interface IKeywordVectorizedHybridSearch +{ + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +} + +class KeywordVectorizedHybridSearchOptions +{ + // The name of the property to target the vector search against. + public string? VectorPropertyName { get; init; } + + // The name of the property to target the text search against. + public string? FullTextPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Sparse Vector based hybrid search + +```csharp +interface ISparseVectorizedHybridSearch +{ + Task> SparseVectorizedHybridSearch( + TVector vector, + TSparseVector sparsevector, + SparseVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +} + +class SparseVectorizedHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the sparse vector search against. + public string? SparseVectorPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Keyword Vectorizable text based hybrid search + +```csharp +interface IKeywordVectorizableHybridSearch +{ + Task> KeywordVectorizableHybridSearch( + string searchText, + ICollection keywords, + KeywordVectorizableHybridSearchOptions options = default, + CancellationToken cancellationToken = default); +} + +class KeywordVectorizableHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the text search against. + public string? FullTextPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Sparse Vector based Vectorizable text hybrid search + +```csharp +interface ISparseVectorizableTextHybridSearch +{ + Task> SparseVectorizableTextHybridSearch( + string searchText, + ICollection keywords, + SparseVectorizableTextHybridSearchOptions options = default, + CancellationToken cancellationToken = default); +} + +class SparseVectorizableTextHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the sparse vector search against. + public string? SparseVectorPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +## Decision Drivers + +- Support for generating sparse vectors is required to make sparse vector based hybrid search viable. +- Multiple vectors per record scenarios need to be supported. +- No database in our evaluation set have been identified as supporting converting text to sparse vectors in the database on upsert and storing those sparse vectors in a retrievable field. Of course some of these DBs may use sparse vectors internally to implement keyword search, without exposing them to the caller. + +## Scoping Considered Options + +### 1. Keyword Hybrid Search Only + +Only implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch for now, until +we can add support for generating sparse vectors. + +### 2. Keyword and SparseVectorized Hybrid Search + +Implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch but only +KeywordVectorizableTextHybridSearch, since no database in our evaluation set supports generating sparse vectors in the database. +This will require us to produce code that can generate sparse vectors from text. + +### 3. All abovementioned Hybrid Search + +Create all four interfaces and implement an implementation of SparseVectorizableTextHybridSearch that +generates the sparse vector in the client code. +This will require us to produce code that can generate sparse vectors from text. + +### 4. Generalized Hybrid Search + +Some databases support a more generalized version of hybrid search, where you can take two (or sometimes more) searches of any type and combine the results of these using your chosen fusion method. +You can implement Vector + Keyword search using this more generalized search. +For databases that support only Vector + Keyword hybrid search though, it is not possible to implement the generalized hybrid search on top of those databases. + +## PropertyName Naming Considered Options + +### 1. Explicit Dense naming + +DenseVectorPropertyName +SparseVectorPropertyName + +DenseVectorPropertyName +FullTextPropertyName + +- Pros: This is more explicit, considering that there are also sparse vectors involved. +- Cons: It is inconsistent with the naming in the non-hybrid vector search. + +### 2. Implicit Dense naming + +VectorPropertyName +SparseVectorPropertyName + +VectorPropertyName +FullTextPropertyName + +- Pros: This is consistent with the naming in the non-hybrid vector search. +- Cons: It is internally inconsistent, i.e. we have sparse vector, but for dense it's just vector. + +## Keyword splitting Considered Options + +### 1. Accept Split keywords in interface + +Accept an ICollection of string where each value is a separate keyword. +A version that takes a single keyword and calls the `ICollection` version can also be provided as an extension method. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier to use in the connector if the underlying DB requires split keywords +- Pros: Only solution broadly supported, see comparison table above. + +### 2. Accept single string in interface + +Accept a single string containing all the keywords. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + string keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier for a user to use, since they don't need to do any keyword splitting. +- Cons: We don't have the capabilities to properly sanitise the string, e.g. splitting words appropriately for the language, and potentially removing filler words. + +### 3. Accept either in interface + +Accept either option and either combine or split the keywords in the connector as needed by the underlying db. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); + Task> KeywordVectorizedHybridSearch( + TVector vector, + string keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier for a user to use, since they can pick whichever suits them better +- Cons: We have to still convert to/from the internal presentation by either combining keywords or splitting them. +- Cons: We don't have the capabilities to properly sanitise the single string, e.g. splitting words appropriately for the language, and potentially removing filler words. + +### 4. Accept either in interface but throw for not supported + +Accept either option but throw for the one not supported by the underlying DB. + +- Pros: Easier for us to implement. +- Cons: Harder for users to use. + +### 5. Separate interfaces for each + +Create a separate interface for the Enumerable and single string options, and only implement the one that is supported by the underlying system for each db. + +- Pros: Easier for us to implement. +- Cons: Harder for users to use. + +## Full text search index mandatory configuration Considered Options + +Cosmos DB NoSQL requires a language to be specified when creating a full text search index. +Other DBs have optional values that can be set. + +### 1. Pass option in via collection options + +This option does the minimum by just adding a language option to the collection's options class. +This language would then be used for all full text search indexes created by the collection. + +- Pros: Simplest to implement +- Cons: Doesn't allow multiple languages to be used for different fields in one record +- Cons: Doesn't add support for all full text search options for all dbs + +### 2. Add extensions for RecordDefinition and data model Attributes + +Add a property bag to the VectorStoreRecordProperty allowing database specific metadata to be provided. +Add an abstract base attribute that can be inherited from that allows extra metadata to be added to the data model, +where each database has their own attributes to specify their settings, with a method to convert the contents to +the property bag required by VectorStoreRecordProperty. + +- Pros: Allows multiple languages to be used for different fields in one record +- Pros: Allows other DBs to add their own settings via their own attributes +- Cons: More work to implement + +## Decision Outcome + +### Scoping + +Chosen option "1. Keyword Hybrid Search Only", since enterprise support for generating sparse vectors is poor and without an end to end story, the value is low. + +### PropertyName Naming + +Chosen option "2. Implicit Dense naming", since it is consistent with the existing vector search options naming. + +### Keyword splitting + +Chosen option "1. Accept Split keywords in interface", since it is the only one with broad support amongst databases. diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index 47342f11b503..4e70ab8174b9 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -136,7 +136,7 @@ - + diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index ab2fa157b212..155f34334096 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -569,8 +569,6 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje } [Theory] - [InlineData(null, "TestEmbedding1", 1, 1)] - [InlineData("", "TestEmbedding1", 2, 2)] [InlineData("TestEmbedding1", "TestEmbedding1", 3, 3)] [InlineData("TestEmbedding2", "test_embedding_2", 4, 4)] public async Task VectorizedSearchUsesValidQueryAsync( diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index db55fca4baeb..55d062441674 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -37,16 +37,19 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { OldFilter = filter, Skip = 5, Top = 10 }; - // Act - var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, + keywords: null, fields, this._storagePropertyNames, vectorPropertyName, + textPropertyName: null, ScorePropertyName, - searchOptions); + oldFilter: filter, + filter: null, + 10, + 5); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); @@ -56,22 +59,16 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() Assert.Contains("FROM x", queryText); Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); Assert.Contains("ORDER BY VectorDistance(x.test_property_1, @vector)", queryText); - Assert.Contains("OFFSET @offset LIMIT @limit", queryText); + Assert.Contains("OFFSET 5 LIMIT 10", queryText); Assert.Equal("@vector", queryParameters[0].Name); Assert.Equal(vector, queryParameters[0].Value); - Assert.Equal("@offset", queryParameters[1].Name); - Assert.Equal(5, queryParameters[1].Value); - - Assert.Equal("@limit", queryParameters[2].Name); - Assert.Equal(10, queryParameters[2].Value); - - Assert.Equal("@cv0", queryParameters[3].Name); - Assert.Equal("test-value-2", queryParameters[3].Value); + Assert.Equal("@cv0", queryParameters[1].Name); + Assert.Equal("test-value-2", queryParameters[1].Value); - Assert.Equal("@cv1", queryParameters[4].Name); - Assert.Equal("test-value-3", queryParameters[4].Value); + Assert.Equal("@cv1", queryParameters[2].Name); + Assert.Equal("test-value-3", queryParameters[2].Value); } [Fact] @@ -86,39 +83,39 @@ public void BuildSearchQueryWithoutOffsetReturnsQueryDefinitionWithTopParameter( .EqualTo("TestProperty2", "test-value-2") .AnyTagEqualTo("TestProperty3", "test-value-3"); - var searchOptions = new VectorSearchOptions { OldFilter = filter, Top = 10 }; - // Act - var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, + keywords: null, fields, this._storagePropertyNames, vectorPropertyName, + textPropertyName: null, ScorePropertyName, - searchOptions); + oldFilter: filter, + filter: null, + 10, + 0); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); // Assert - Assert.Contains("SELECT TOP @top x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); + Assert.Contains("SELECT TOP 10 x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); Assert.Contains("FROM x", queryText); Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); Assert.Contains("ORDER BY VectorDistance(x.test_property_1, @vector)", queryText); - Assert.DoesNotContain("OFFSET @offset LIMIT @limit", queryText); + Assert.DoesNotContain("OFFSET 0 LIMIT 10", queryText); Assert.Equal("@vector", queryParameters[0].Name); Assert.Equal(vector, queryParameters[0].Value); - Assert.Equal("@top", queryParameters[1].Name); - Assert.Equal(10, queryParameters[1].Value); - - Assert.Equal("@cv0", queryParameters[2].Name); - Assert.Equal("test-value-2", queryParameters[2].Value); + Assert.Equal("@cv0", queryParameters[1].Name); + Assert.Equal("test-value-2", queryParameters[1].Value); - Assert.Equal("@cv1", queryParameters[3].Name); - Assert.Equal("test-value-3", queryParameters[3].Value); + Assert.Equal("@cv1", queryParameters[2].Name); + Assert.Equal("test-value-3", queryParameters[2].Value); } [Fact] @@ -131,17 +128,20 @@ public void BuildSearchQueryWithInvalidFilterThrowsException() var filter = new VectorSearchFilter().EqualTo("non-existent-property", "test-value-2"); - var searchOptions = new VectorSearchOptions { OldFilter = filter, Skip = 5, Top = 10 }; - // Act & Assert Assert.Throws(() => - AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, + keywords: null, fields, this._storagePropertyNames, vectorPropertyName, + textPropertyName: null, ScorePropertyName, - searchOptions)); + oldFilter: filter, + filter: null, + 10, + 5)); } [Fact] @@ -152,31 +152,29 @@ public void BuildSearchQueryWithoutFilterDoesNotContainWhereClause() var vectorPropertyName = "test_property_1"; var fields = this._storagePropertyNames.Values.ToList(); - var searchOptions = new VectorSearchOptions { Skip = 5, Top = 10 }; - // Act - var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, + keywords: null, fields, this._storagePropertyNames, vectorPropertyName, + textPropertyName: null, ScorePropertyName, - searchOptions); + oldFilter: null, + filter: null, + 10, + 5); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); // Assert Assert.DoesNotContain("WHERE", queryText); + Assert.Contains("OFFSET 5 LIMIT 10", queryText); Assert.Equal("@vector", queryParameters[0].Name); Assert.Equal(vector, queryParameters[0].Value); - - Assert.Equal("@offset", queryParameters[1].Name); - Assert.Equal(5, queryParameters[1].Value); - - Assert.Equal("@limit", queryParameters[2].Name); - Assert.Equal(10, queryParameters[2].Value); } [Fact] @@ -215,6 +213,54 @@ FROM x Assert.Equal("partition_key", queryParameters[1].Value); } + [Fact] + public void BuildSearchQueryWithHybridFieldsReturnsValidHybridQueryDefinition() + { + // Arrange + var vector = new ReadOnlyMemory([1f, 2f, 3f]); + var keywordText = "hybrid"; + var vectorPropertyName = "test_property_1"; + var textPropertyName = "test_property_2"; + var fields = this._storagePropertyNames.Values.ToList(); + + var filter = new VectorSearchFilter() + .EqualTo("TestProperty2", "test-value-2") + .AnyTagEqualTo("TestProperty3", "test-value-3"); + + // Act + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( + vector, + [keywordText], + fields, + this._storagePropertyNames, + vectorPropertyName, + textPropertyName, + ScorePropertyName, + oldFilter: filter, + filter: null, + 10, + 5); + + var queryText = queryDefinition.QueryText; + var queryParameters = queryDefinition.GetQueryParameters(); + + // Assert + Assert.Contains("SELECT x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); + Assert.Contains("FROM x", queryText); + Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); + Assert.Contains("ORDER BY RANK RRF(VectorDistance(x.test_property_1, @vector), FullTextScore(x.test_property_2, [\"hybrid\"]))", queryText); + Assert.Contains("OFFSET 5 LIMIT 10", queryText); + + Assert.Equal("@vector", queryParameters[0].Name); + Assert.Equal(vector, queryParameters[0].Value); + + Assert.Equal("@cv0", queryParameters[1].Name); + Assert.Equal("test-value-2", queryParameters[1].Value); + + Assert.Equal("@cv1", queryParameters[2].Name); + Assert.Equal("test-value-3", queryParameters[2].Value); + } + #pragma warning disable CA1812 // An internal class that is apparently never instantiated. If so, remove the code from the assembly. private sealed class DummyType; #pragma warning restore CA1812 diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index d011d5a6c127..11541e5843dd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -23,7 +23,10 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class AzureAISearchVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch +public class AzureAISearchVectorStoreRecordCollection : + IVectorStoreRecordCollection, + IVectorizableTextSearch, + IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -68,6 +71,9 @@ public class AzureAISearchVectorStoreRecordCollection : IVectorStoreRec /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// The default options for hybrid vector search. + private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); + /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. private readonly SearchIndexClient _searchIndexClient; @@ -316,25 +322,16 @@ public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNull(vector); - - if (this._propertyReader.FirstVectorPropertyName is null) - { - throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); - } - - if (vector is not ReadOnlyMemory floatVector) - { - throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Azure AI Search connector."); - } + var floatVector = VerifyVectorParam(vector); // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - string? vectorFieldName = this.ResolveVectorFieldName(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); // Configure search settings. var vectorQueries = new List(); - vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorFieldName } }); + vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. @@ -385,11 +382,12 @@ public virtual Task> VectorizableTextSearchAsync(st // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - string? vectorFieldName = this.ResolveVectorFieldName(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); // Configure search settings. var vectorQueries = new List(); - vectorQueries.Add(new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorFieldName } }); + vectorQueries.Add(new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. @@ -428,6 +426,58 @@ public virtual Task> VectorizableTextSearchAsync(st return this.SearchAndMapToDataModelAsync(null, searchOptions, internalOptions.IncludeVectors, cancellationToken); } + /// + public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(keywords); + var floatVector = VerifyVectorParam(vector); + + // Resolve options. + var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalPropertyName); + var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); + + // Configure search settings. + var vectorQueries = new List(); + vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); + +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + // Build filter object. + var filter = internalOptions switch + { + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => null + }; +#pragma warning restore CS0618 + + // Build search options. + var searchOptions = new SearchOptions + { + VectorSearch = new(), + Size = internalOptions.Top, + Skip = internalOptions.Skip, + Filter = filter, + IncludeTotalCount = internalOptions.IncludeTotalCount, + }; + searchOptions.VectorSearch.Queries.AddRange(vectorQueries); + searchOptions.SearchFields.Add(textDataPropertyName); + + // Filter out vector fields if requested. + if (!internalOptions.IncludeVectors) + { + searchOptions.Select.Add(this._propertyReader.KeyPropertyJsonName); + searchOptions.Select.AddRange(this._propertyReader.DataPropertyJsonNames); + } + + var keywordsCombined = string.Join(" ", keywords); + + return this.SearchAndMapToDataModelAsync(keywordsCombined, searchOptions, internalOptions.IncludeVectors, cancellationToken); + } + /// /// Get the document with the given key and map it to the data model using the configured mapper type. /// @@ -586,31 +636,6 @@ private GetDocumentOptions ConvertGetDocumentOptions(GetRecordOptions? options) return innerOptions; } - /// - /// Resolve the vector field name to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector field name if not. - /// - /// The vector field name provided via options. - /// The resolved vector field name. - /// Thrown if the provided field name is not a valid field name. - private string ResolveVectorFieldName(string? optionsVectorFieldName) - { - string? vectorFieldName; - if (!string.IsNullOrWhiteSpace(optionsVectorFieldName)) - { - if (!this._propertyReader.JsonPropertyNamesMap.TryGetValue(optionsVectorFieldName!, out vectorFieldName)) - { - throw new InvalidOperationException($"The collection does not have a vector field named '{optionsVectorFieldName}'."); - } - } - else - { - vectorFieldName = this._propertyReader.FirstVectorPropertyJsonName; - } - - return vectorFieldName!; - } - /// /// Get a document with the given key, and return null if it is not found. /// @@ -668,4 +693,16 @@ private async Task RunOperationAsync(string operationName, Func> o }; } } + + private static ReadOnlyMemory VerifyVectorParam(TVector vector) + { + Verify.NotNull(vector); + + if (vector is not ReadOnlyMemory floatVector) + { + throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Azure AI Search connector."); + } + + return floatVector; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 794495cd1548..4198a1b376e3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -263,13 +263,7 @@ public virtual async Task> VectorizedSearchAsync GetStoragePropertyNames( return storagePropertyNames; } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - /// /// Returns custom mapper, generic data model mapper or default record mapper. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index 4a5d2ec901fe..d3ae19517db5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -24,18 +24,24 @@ internal static class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder /// public static QueryDefinition BuildSearchQuery( TVector vector, + ICollection? keywords, List fields, Dictionary storagePropertyNames, string vectorPropertyName, + string? textPropertyName, string scorePropertyName, - VectorSearchOptions searchOptions) +#pragma warning disable CS0618 // Type or member is obsolete + VectorSearchFilter? oldFilter, +#pragma warning restore CS0618 // Type or member is obsolete + Expression>? filter, + int top, + int skip) { Verify.NotNull(vector); const string VectorVariableName = "@vector"; - const string OffsetVariableName = "@offset"; - const string LimitVariableName = "@limit"; - const string TopVariableName = "@top"; + // TODO: Use parameterized query for keywords when FullTextScore with parameters is supported. + //const string KeywordsVariableName = "@keywords"; var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; @@ -43,11 +49,18 @@ public static QueryDefinition BuildSearchQuery( var vectorDistanceArgument = $"VectorDistance({tableVariableName}.{vectorPropertyName}, {VectorVariableName})"; var vectorDistanceArgumentWithAlias = $"{vectorDistanceArgument} AS {scorePropertyName}"; + // Passing keywords using a parameter is not yet supported for FullTextScore so doing some crude string sanitization in the mean time to frustrate script injection. + var sanitizedKeywords = keywords is not null ? keywords.Select(x => x.Replace("\"", "")) : null; + var formattedKeywords = sanitizedKeywords is not null ? $"[\"{string.Join("\", \"", sanitizedKeywords)}\"]" : null; + var fullTextScoreArgument = textPropertyName is not null && keywords is not null ? $"FullTextScore({tableVariableName}.{textPropertyName}, {formattedKeywords})" : null; + + var rankingArgument = fullTextScoreArgument is null ? vectorDistanceArgument : $"RANK RRF({vectorDistanceArgument}, {fullTextScoreArgument})"; + var selectClauseArguments = string.Join(SelectClauseDelimiter, [.. fieldsArgument, vectorDistanceArgumentWithAlias]); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. - var (whereClause, filterParameters) = searchOptions switch + var (whereClause, filterParameters) = (OldFilter: oldFilter, Filter: filter) switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, storagePropertyNames), @@ -62,8 +75,9 @@ public static QueryDefinition BuildSearchQuery( }; // If Offset is not configured, use Top parameter instead of Limit/Offset - // since it's more optimized. - var topArgument = searchOptions.Skip == 0 ? $"TOP {TopVariableName} " : string.Empty; + // since it's more optimized. Hybrid search doesn't allow top to be passed as a parameter + // so directly add it to the query here. + var topArgument = skip == 0 ? $"TOP {top} " : string.Empty; var builder = new StringBuilder(); @@ -75,19 +89,21 @@ public static QueryDefinition BuildSearchQuery( builder.Append("WHERE ").AppendLine(whereClause); } - builder.AppendLine($"ORDER BY {vectorDistanceArgument}"); + builder.AppendLine($"ORDER BY {rankingArgument}"); - if (!string.IsNullOrEmpty(topArgument)) + if (string.IsNullOrEmpty(topArgument)) { - queryParameters.Add(TopVariableName, searchOptions.Top); - } - else - { - builder.AppendLine($"OFFSET {OffsetVariableName} LIMIT {LimitVariableName}"); - queryParameters.Add(OffsetVariableName, searchOptions.Skip); - queryParameters.Add(LimitVariableName, searchOptions.Top); + // Hybrid search doesn't allow offset and limit to be passed as parameters + // so directly add it to the query here. + builder.AppendLine($"OFFSET {skip} LIMIT {top}"); } + // TODO: Use parameterized query for keywords when FullTextScore with parameters is supported. + //if (fullTextScoreArgument is not null) + //{ + // queryParameters.Add(KeywordsVariableName, keywords!.ToArray()); + //} + var queryDefinition = new QueryDefinition(builder.ToString()); if (filterParameters is { Count: > 0 }) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index aaacacde37fb..4c8955c4ce14 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -24,7 +24,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public class AzureCosmosDBNoSQLVectorStoreRecordCollection : IVectorStoreRecordCollection, - IVectorStoreRecordCollection + IVectorStoreRecordCollection, + IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect { /// The name of this database for telemetry purposes. @@ -71,6 +72,9 @@ public class AzureCosmosDBNoSQLVectorStoreRecordCollection : /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// The default options for hybrid vector search. + private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); + /// that can be used to manage the collections in Azure CosmosDB NoSQL. private readonly Database _database; @@ -361,42 +365,77 @@ public virtual Task> VectorizedSearchAsync const string OperationName = "VectorizedSearch"; const string ScorePropertyName = "SimilarityScore"; - Verify.NotNull(vector); + this.VerifyVectorType(vector); - var vectorType = vector.GetType(); + var searchOptions = options ?? s_defaultVectorSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - if (!s_supportedVectorTypes.Contains(vectorType)) - { - throw new NotSupportedException( - $"The provided vector type {vectorType.FullName} is not supported by the Azure CosmosDB NoSQL connector. " + - $"Supported types are: {string.Join(", ", s_supportedVectorTypes.Select(l => l.FullName))}"); - } + var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this.GetVectorPropertyForSearch(searchOptions.VectorPropertyName); +#pragma warning disable CS0618 // Type or member is obsolete + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + vector, + null, + fields, + this._storagePropertyNames, + vectorPropertyName, + null, + ScorePropertyName, + searchOptions.OldFilter, + searchOptions.Filter, + searchOptions.Top, + searchOptions.Skip); +#pragma warning restore CS0618 // Type or member is obsolete - if (vectorProperty is null) - { - throw new InvalidOperationException("The collection does not have any vector properties, so vector search is not possible."); - } + var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); + var mappedResults = this.MapSearchResultsAsync( + searchResults, + ScorePropertyName, + OperationName, + searchOptions.IncludeVectors, + cancellationToken); + return Task.FromResult(new VectorSearchResults(mappedResults)); + } - var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + /// + public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + const string OperationName = "VectorizedSearch"; + const string ScorePropertyName = "SimilarityScore"; + + this.VerifyVectorType(vector); + + var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + var textProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); + var textPropertyName = this._storagePropertyNames[textProperty.DataModelPropertyName]; + + var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + +#pragma warning disable CS0618 // Type or member is obsolete + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( vector, + keywords, fields, this._storagePropertyNames, vectorPropertyName, + textPropertyName, ScorePropertyName, - searchOptions); + searchOptions.OldFilter, + searchOptions.Filter, + searchOptions.Top, + searchOptions.Skip); +#pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); var mappedResults = this.MapSearchResultsAsync( searchResults, ScorePropertyName, OperationName, - searchOptions, + searchOptions.IncludeVectors, cancellationToken); return Task.FromResult(new VectorSearchResults(mappedResults)); } @@ -405,6 +444,20 @@ public virtual Task> VectorizedSearchAsync #region private + private void VerifyVectorType(TVector? vector) + { + Verify.NotNull(vector); + + var vectorType = vector.GetType(); + + if (!s_supportedVectorTypes.Contains(vectorType)) + { + throw new NotSupportedException( + $"The provided vector type {vectorType.FullName} is not supported by the Azure CosmosDB NoSQL connector. " + + $"Supported types are: {string.Join(", ", s_supportedVectorTypes.Select(l => l.FullName))}"); + } + } + private async Task RunOperationAsync(string operationName, Func> operation) { try @@ -493,6 +546,7 @@ private ContainerProperties GetContainerProperties() indexingPolicy.VectorIndexes = vectorIndexPaths; + var fullTextPolicy = new FullTextPolicy() { FullTextPaths = new Collection() }; var vectorEmbeddingPolicy = new VectorEmbeddingPolicy(embeddings); // Process Data properties. @@ -502,6 +556,12 @@ private ContainerProperties GetContainerProperties() { indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}/?" }); } + if (property.IsFullTextSearchable) + { + indexingPolicy.FullTextIndexes.Add(new FullTextIndexPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}" }); + // TODO: Switch to using language from a setting. + fullTextPolicy.FullTextPaths.Add(new FullTextPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}", Language = "en-US" }); + } } // Adding special mandatory indexing path. @@ -516,7 +576,8 @@ private ContainerProperties GetContainerProperties() return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyStoragePropertyName}") { VectorEmbeddingPolicy = vectorEmbeddingPolicy, - IndexingPolicy = indexingPolicy + IndexingPolicy = indexingPolicy, + FullTextPolicy = fullTextPolicy }; } @@ -679,7 +740,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn IAsyncEnumerable jsonObjects, string scorePropertyName, string operationName, - VectorSearchOptions searchOptions, + bool includeVectors, [EnumeratorCancellation] CancellationToken cancellationToken) { await foreach (var jsonObject in jsonObjects.ConfigureAwait(false)) @@ -693,39 +754,12 @@ private async IAsyncEnumerable> MapSearchResultsAsyn DatabaseName, this.CollectionName, operationName, - () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = searchOptions.IncludeVectors })); + () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); yield return new VectorSearchResult(record, score); } } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - /// /// Returns custom mapper, generic data model mapper or default record mapper. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index b330c873fdbd..cea018652ca2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -216,11 +216,6 @@ public async Task> VectorizedSearchAsync(T { Verify.NotNull(vector); - if (this._propertyReader.FirstVectorPropertyName is null) - { - throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); - } - if (vector is not ReadOnlyMemory floatVector) { throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the InMemory Vector Store."); @@ -228,12 +223,7 @@ public async Task> VectorizedSearchAsync(T // Resolve options and get requested vector property or first as default. var internalOptions = options ?? s_defaultVectorSearchOptions; - - var vectorPropertyName = string.IsNullOrWhiteSpace(internalOptions.VectorPropertyName) ? this._propertyReader.FirstVectorPropertyName : internalOptions.VectorPropertyName; - if (!this._vectorProperties.TryGetValue(vectorPropertyName!, out var vectorProperty)) - { - throw new InvalidOperationException($"The collection does not have a vector field named '{internalOptions.VectorPropertyName}', so vector search is not possible."); - } + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Filter records using the provided filter before doing the vector comparison. @@ -250,7 +240,7 @@ public async Task> VectorizedSearchAsync(T // Compare each vector in the filtered results with the provided vector. var results = filteredRecords.Select(record => { - var vectorObject = this._vectorResolver(vectorPropertyName!, record); + var vectorObject = this._vectorResolver(vectorProperty.DataModelPropertyName!, record); if (vectorObject is not ReadOnlyMemory dbVector) { return null; diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs index 36d0b9ad8c1e..3d6b634a14e1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs @@ -75,6 +75,34 @@ public static BsonArray GetFilterableDataIndexFields( return indexArray; } + /// + /// Returns a list of of fields to index for full text search data properties. + /// + /// Collection of data properties for index creation. + /// A dictionary that maps from a property name to the storage name. + public static List GetFullTextSearchableDataIndexFields( + IReadOnlyList dataProperties, + Dictionary storagePropertyNames) + { + var fieldElements = new List(); + + // Create separate index for each data property + foreach (var property in dataProperties) + { + if (property.IsFullTextSearchable) + { + var dataPropertyName = storagePropertyNames[property.DataModelPropertyName]; + + fieldElements.Add(new BsonElement(dataPropertyName, new BsonArray() + { + new BsonDocument() { { "type", "string" }, } + })); + } + } + + return fieldElements; + } + /// /// More information about MongoDB distance functions here: . /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs index de47f6723b23..8e0258f0aa21 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs @@ -13,8 +13,8 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// internal static class MongoDBVectorStoreCollectionSearchMapping { - /// Returns distance function specified on vector property or default . - public static string GetVectorPropertyDistanceFunction(string? distanceFunction) => !string.IsNullOrWhiteSpace(distanceFunction) ? distanceFunction! : MongoDBConstants.DefaultDistanceFunction; + /// Returns distance function specified on vector property or default. + public static string GetVectorPropertyDistanceFunction(string? distanceFunction) => !string.IsNullOrWhiteSpace(distanceFunction) ? distanceFunction! : DistanceFunction.CosineSimilarity; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// @@ -119,8 +119,8 @@ public static BsonDocument GetProjectionQuery(string scorePropertyName, string d { return new BsonDocument { - { "$project", - new BsonDocument + { + "$project", new BsonDocument { { scorePropertyName, new BsonDocument { { "$meta", "vectorSearchScore" } } }, { documentPropertyName, "$$ROOT" } @@ -128,4 +128,254 @@ public static BsonDocument GetProjectionQuery(string scorePropertyName, string d } }; } + + /// Returns a pipeline for hybrid search using vector search and full text search. + public static BsonDocument[] GetHybridSearchPipeline( + TVector vector, + ICollection keywords, + string collectionName, + string vectorIndexName, + string fullTextSearchIndexName, + string vectorPropertyName, + string textPropertyName, + string scorePropertyName, + string documentPropertyName, + int limit, + int numCandidates, + BsonDocument? filter) + { + // Create the FullTextSearch pipeline first. + var ftsPipeline = new List + { + // The full text search stage. + GetFullTextSearchQuery(keywords, fullTextSearchIndexName, textPropertyName, filter), + // Limit the results to the maximum that we may require. + new() + { + { + "$limit", limit + } + }, + // Converts the list of documents to a single document with an array property containing all the source documents. + GroupDocsSection(), + // Creates separate documents again where each has a new rank property based on the index of the document. + UnwindDocsArraySection(), + // Add a weighted score based on the rank of the document. + AddScore("fts_score", 0.9), + // Project the score, the id and the original document as properties, so that we can join with the vector search results on id. + ProjectWithScore("fts_score"), + }; + + // Add filtering to the FullTextSearch pipeline if filter is provided. + if (filter is not null) + { + // Insert filter at the second position, since + // MongoDB requires search to be the first stage. + ftsPipeline.Insert(1, new BsonDocument + { + { + "$match", filter + } + }); + } + + // Create combined pipeline with the vector search part first. + var pipeline = new BsonDocument[] + { + // The vector search stage. + GetSearchQuery(vector, vectorIndexName, vectorPropertyName, limit, numCandidates, filter), + // Converts the list of documents to a single document with an array property containing all the source documents. + GroupDocsSection(), + // Creates separate documents again where each has a new rank property based on the index of the document. + UnwindDocsArraySection(), + // Add a weighted score based on the rank of the document. + AddScore("vs_score", 0.1), + // Project the score, the id and the original document as properties, so that we can join with the vector search results on id. + ProjectWithScore("vs_score"), + // Union the vector search results with the results from the full text search pipeilne. + new() + { + { + "$unionWith", new BsonDocument + { + { "coll", collectionName }, + { "pipeline", new BsonArray(ftsPipeline) } + } + } + }, + // Group by id and store scores from each pipeline, so that we don't have duplicate documents. + new() + { + { + "$group", new BsonDocument + { + { "_id", "$_id" }, + { "docs", new BsonDocument { { "$first", "$docs" } } }, + { "vs_score", new BsonDocument { { "$max", "$vs_score" } } }, + { "fts_score", new BsonDocument { { "$max", "$fts_score" } } } + } + } + }, + // If a score is missing (i.e. the document was only found in the other pipeline), default the missing score to 0. + new() + { + { + "$project", new BsonDocument + { + { "_id", 1 }, + { "docs", 1 }, + { "vs_score", new BsonDocument { { "$ifNull", new BsonArray { "$vs_score", 0 } } } }, + { "fts_score", new BsonDocument { { "$ifNull", new BsonArray { "$fts_score", 0 } } } } + } + } + }, + // Calculate a combined score based on the vector search and full text search scores. + new() + { + { + "$project", new BsonDocument + { + { scorePropertyName, new BsonDocument { { "$add", new BsonArray { "$fts_score", "$vs_score" } } } }, + { "vs_score", 1 }, + { "fts_score", 1 }, + { documentPropertyName, "$docs" } + } + } + }, + // Sort by score desc. + new() + { + { + "$sort", new BsonDocument + { + { scorePropertyName, -1 } + } + } + }, + // Take the required N results. + new() + { + { + "$limit", limit + } + }, + }; + + return pipeline; + } + + /// Builds the full text search query stage. + private static BsonDocument GetFullTextSearchQuery( + ICollection keywords, + string fullTextSearchIndexName, + string textPropertyName, + BsonDocument? filter) + { + var fullTextSearchQuery = new BsonDocument + { + { + "$search", new BsonDocument + { + { "index", fullTextSearchIndexName }, + { "text", + new BsonDocument + { + { "query", new BsonArray(keywords) }, + { "path", textPropertyName }, + { "matchCriteria", "any" } + } + } + } + } + }; + + return fullTextSearchQuery; + } + + /// Create a stage that groups all documents into a single document with an array property containing all the source documents. + private static BsonDocument GroupDocsSection() + { + return new BsonDocument + { + { + "$group", new BsonDocument + { + { "_id", BsonNull.Value }, + { "docs", new BsonDocument { { "$push", "$$ROOT" } } } + } + } + }; + } + + /// Creates a stage that splits an array of documents from a single document into separate documents and adds an index property for each document based on its index in the array. + private static BsonDocument UnwindDocsArraySection() + { + return new BsonDocument + { + { + "$unwind", new BsonDocument + { + { "path", "$docs" }, + { "includeArrayIndex", "rank" } + } + } + }; + } + + /// Adds a weighted score to each document based on the rank property on the document. + private static BsonDocument AddScore(string scoreName, double weight) + { + return new() + { + { + "$addFields", new BsonDocument + { + { + scoreName, new BsonDocument + { + { + "$multiply", new BsonArray() + { + weight, + new BsonDocument + { + { "$divide", new BsonArray() + { + 1.0, + new BsonDocument + { + { "$add", new BsonArray() + { + "$rank", + 60 + } + } + } + } + } + } + } + } + } + }, + } + } + }; + } + + /// Projects the score, the id and the original document as properties. + private static BsonDocument ProjectWithScore(string scoreName) + { + return new() + { + { + "$project", new BsonDocument + { + { scoreName, 1 }, + { "_id", "$docs._id" }, + { "docs", "$docs" } + } + } + }; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 6b86db514047..ff89ebff04f8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection +public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -36,6 +36,9 @@ public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCol /// The default options for vector search. private static readonly MEVD.VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// The default options for hybrid vector search. + private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); + /// that can be used to manage the collections in MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -252,27 +255,10 @@ public virtual async Task> VectorizedSearchAsync? options = null, CancellationToken cancellationToken = default) { - Verify.NotNull(vector); - - Array vectorArray = vector switch - { - ReadOnlyMemory memoryFloat => memoryFloat.ToArray(), - ReadOnlyMemory memoryDouble => memoryDouble.ToArray(), - _ => throw new NotSupportedException( - $"The provided vector type {vector.GetType().FullName} is not supported by the MongoDB connector. " + - $"Supported types are: {string.Join(", ", [ - typeof(ReadOnlyMemory).FullName, - typeof(ReadOnlyMemory).FullName])}") - }; + Array vectorArray = VerifyVectorParam(vector); var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this.GetVectorPropertyForSearch(searchOptions.VectorPropertyName); - - if (vectorProperty is null) - { - throw new InvalidOperationException("The collection does not have any vector properties, so vector search is not possible."); - } - + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; #pragma warning disable CS0618 // VectorSearchFilter is obsolete @@ -315,7 +301,63 @@ public virtual async Task> VectorizedSearchAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions, cancellationToken)); + return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken)); + }, + cancellationToken).ConfigureAwait(false); + } + + /// + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + Array vectorArray = VerifyVectorParam(vector); + + var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); + var textDataPropertyName = this._storagePropertyNames[textDataProperty.DataModelPropertyName]; + +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = searchOptions switch + { + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), + { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + _ => null + }; +#pragma warning restore CS0618 + + // Constructing a query to fetch "skip + top" total items + // to perform skip logic locally, since skip option is not part of API. + var itemsAmount = searchOptions.Skip + searchOptions.Top; + + var numCandidates = this._options.NumCandidates ?? itemsAmount * MongoDBConstants.DefaultNumCandidatesRatio; + + BsonDocument[] pipeline = MongoDBVectorStoreCollectionSearchMapping.GetHybridSearchPipeline( + vectorArray, + keywords, + this.CollectionName, + this._options.VectorIndexName, + this._options.FullTextSearchIndexName, + vectorPropertyName, + textDataPropertyName, + ScorePropertyName, + DocumentPropertyName, + itemsAmount, + numCandidates, + filter); + + return await this.RunOperationWithRetryAsync( + "KeywordVectorizedHybridSearch", + this._options.MaxRetries, + this._options.DelayInMilliseconds, + async () => + { + var cursor = await this._mongoCollection + .AggregateAsync(pipeline, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken)); }, cancellationToken).ConfigureAwait(false); } @@ -327,34 +369,64 @@ private async Task CreateIndexesAsync(string collectionName, CancellationToken c var indexCursor = await this._mongoCollection.Indexes.ListAsync(cancellationToken).ConfigureAwait(false); var indexes = indexCursor.ToList(cancellationToken).Select(index => index["name"].ToString()) ?? []; - if (indexes.Contains(this._options.VectorIndexName)) - { - // Vector index already exists. - return; - } + var indexArray = new BsonArray(); - var fieldsArray = new BsonArray(); + // Create the vector index config if the index does not exist + if (!indexes.Contains(this._options.VectorIndexName)) + { + var fieldsArray = new BsonArray(); - fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetVectorIndexFields( - this._propertyReader.VectorProperties, - this._storagePropertyNames)); + fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetVectorIndexFields( + this._propertyReader.VectorProperties, + this._storagePropertyNames)); - fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFilterableDataIndexFields( - this._propertyReader.DataProperties, - this._storagePropertyNames)); + fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFilterableDataIndexFields( + this._propertyReader.DataProperties, + this._storagePropertyNames)); - if (fieldsArray.Count > 0) - { - var indexArray = new BsonArray + if (fieldsArray.Count > 0) { - new BsonDocument + indexArray.Add(new BsonDocument { { "name", this._options.VectorIndexName }, { "type", "vectorSearch" }, { "definition", new BsonDocument { ["fields"] = fieldsArray } }, - } - }; + }); + } + } + + // Create the full text search index config if the index does not exist + if (!indexes.Contains(this._options.FullTextSearchIndexName)) + { + var fieldsDocument = new BsonDocument(); + + fieldsDocument.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFullTextSearchableDataIndexFields( + this._propertyReader.DataProperties, + this._storagePropertyNames)); + + if (fieldsDocument.ElementCount > 0) + { + indexArray.Add(new BsonDocument + { + { "name", this._options.FullTextSearchIndexName }, + { "type", "search" }, + { + "definition", new BsonDocument + { + ["mappings"] = new BsonDocument + { + ["dynamic"] = false, + ["fields"] = fieldsDocument + } + } + }, + }); + } + } + // Create any missing indexes. + if (indexArray.Count > 0) + { var createIndexCommand = new BsonDocument { { "createSearchIndexes", collectionName }, @@ -391,7 +463,8 @@ private async Task> FindAsync(FilterDefinition> EnumerateAndMapSearchResultsAsync( IAsyncCursor cursor, - MEVD.VectorSearchOptions searchOptions, + int skip, + bool includeVectors, [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "Aggregate"; @@ -402,14 +475,14 @@ private async IAsyncEnumerable> EnumerateAndMapSearc { foreach (var response in cursor.Current) { - if (skipCounter >= searchOptions.Skip) + if (skipCounter >= skip) { var score = response[ScorePropertyName].AsDouble; var record = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this.CollectionName, OperationName, - () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new() { IncludeVectors = searchOptions.IncludeVectors })); + () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new() { IncludeVectors = includeVectors })); yield return new VectorSearchResult(record, score); } @@ -571,33 +644,6 @@ private static Dictionary GetStoragePropertyNames( return storagePropertyNames; } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - /// /// Returns custom mapper, generic data model mapper or default record mapper. /// @@ -616,5 +662,20 @@ private IVectorStoreRecordMapper InitializeMapper() return new MongoDBVectorStoreRecordMapper(this._propertyReader); } + private static Array VerifyVectorParam(TVector vector) + { + Verify.NotNull(vector); + + return vector switch + { + ReadOnlyMemory memoryFloat => memoryFloat.ToArray(), + ReadOnlyMemory memoryDouble => memoryDouble.ToArray(), + _ => throw new NotSupportedException( + $"The provided vector type {vector.GetType().FullName} is not supported by the MongoDB connector. " + + $"Supported types are: {string.Join(", ", [ + typeof(ReadOnlyMemory).FullName, + typeof(ReadOnlyMemory).FullName])}") + }; + } #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs index 97f48a53dfa1..bc591f87cdc0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs @@ -30,6 +30,11 @@ public sealed class MongoDBVectorStoreRecordCollectionOptions /// public string VectorIndexName { get; init; } = MongoDBConstants.DefaultVectorIndexName; + /// + /// Full text search index name to use. If null, the default "full_text_search_index" name will be used. + /// + public string FullTextSearchIndexName { get; init; } = MongoDBConstants.DefaultFullTextSearchIndexName; + /// /// Number of max retries for vector collection operation. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 81cdf1e1cc88..6a87d2454179 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -266,12 +266,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this.GetVectorPropertyForSearch(searchOptions.VectorPropertyName); - - if (vectorProperty is null) - { - throw new InvalidOperationException("The collection does not have any vector properties, so vector search is not possible."); - } + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); var pgVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); @@ -318,33 +313,6 @@ private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken c return this._client.CreateTableAsync(this.CollectionName, this._propertyReader.RecordDefinition.Properties, ifNotExists, cancellationToken); } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - private async Task RunOperationAsync(string operationName, Func operation) { try diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 401740a0e7a4..033c88914966 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -19,7 +19,10 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection +public class QdrantVectorStoreRecordCollection : + IVectorStoreRecordCollection, + IVectorStoreRecordCollection, + IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// A set of types that a key on the provided model may have. @@ -32,6 +35,9 @@ public class QdrantVectorStoreRecordCollection : IVectorStoreRecordColl /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// The default options for hybrid vector search. + private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); + /// The name of this database for telemetry purposes. private const string DatabaseName = "Qdrant"; @@ -460,19 +466,11 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( /// public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNull(vector); - - if (this._propertyReader.FirstVectorPropertyName is null) - { - throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); - } - - if (vector is not ReadOnlyMemory floatVector) - { - throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Qdrant connector."); - } + var floatVector = VerifyVectorParam(vector); + // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); #pragma warning disable CS0618 // Type or member is obsolete // Build filter object. @@ -489,7 +487,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync(mappedResults.ToAsyncEnumerable()); } - /// - /// Resolve the vector field name to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector field name if not. - /// - /// The vector field name provided via options. - /// The resolved vector field name. - /// Thrown if the provided field name is not a valid field name. - private string ResolveVectorFieldName(string? optionsVectorFieldName) + /// + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { - string? vectorFieldName; - if (!string.IsNullOrWhiteSpace(optionsVectorFieldName)) + var floatVector = VerifyVectorParam(vector); + + // Resolve options. + var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalPropertyName); + var textDataPropertyName = this._propertyReader.GetStoragePropertyName(textDataProperty.DataModelPropertyName); + + // Build filter object. +#pragma warning disable CS0618 // Type or member is obsolete + // Build filter object. + var filter = internalOptions switch + { + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), + { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => new Filter() + }; +#pragma warning restore CS0618 // Type or member is obsolete + + // Specify the vector name if named vectors are used. + string? vectorName = null; + if (this._options.HasNamedVectors) { - if (!this._propertyReader.StoragePropertyNamesMap.TryGetValue(optionsVectorFieldName!, out vectorFieldName)) + vectorName = this._propertyReader.GetStoragePropertyName(vectorProperty.DataModelPropertyName); + } + + // Specify whether to include vectors in the search results. + var vectorsSelector = new WithVectorsSelector(); + vectorsSelector.Enable = internalOptions.IncludeVectors; + + // Build the vector query. + var vectorQuery = new PrefetchQuery + { + Filter = filter, + Query = new Query { - throw new InvalidOperationException($"The collection does not have a vector field named '{optionsVectorFieldName}'."); - } + Nearest = new VectorInput(floatVector.ToArray()), + }, + }; + + if (this._options.HasNamedVectors) + { + vectorQuery.Using = vectorName; } - else + + // Build the keyword query. + var keywordFilter = filter.Clone(); + var keywordSubFilter = new Filter(); + foreach (string keyword in keywords) { - vectorFieldName = this._propertyReader.FirstVectorPropertyStoragePropertyName; + keywordSubFilter.Should.Add(new Condition() { Field = new FieldCondition() { Key = textDataPropertyName, Match = new Match { Text = keyword } } }); } + keywordFilter.Must.Add(new Condition() { Filter = keywordSubFilter }); + var keywordQuery = new PrefetchQuery + { + Filter = keywordFilter, + }; + + // Build the fusion query. + var fusionQuery = new Query + { + Fusion = Fusion.Rrf, + }; + + // Execute Search. + var points = await this.RunOperationAsync( + "Query", + () => this._qdrantClient.QueryAsync( + this.CollectionName, + prefetch: new List() { vectorQuery, keywordQuery }, + query: fusionQuery, + limit: (ulong)internalOptions.Top, + offset: (ulong)internalOptions.Skip, + vectorsSelector: vectorsSelector, + cancellationToken: cancellationToken)).ConfigureAwait(false); - return vectorFieldName!; + // Map to data model. + var mappedResults = points.Select(point => QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult( + point, + this._mapper, + internalOptions.IncludeVectors, + DatabaseName, + this._collectionName, + "Query")); + + return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); } /// @@ -597,4 +662,16 @@ private async Task RunOperationAsync(string operationName, Func> o }; } } + + private static ReadOnlyMemory VerifyVectorParam(TVector vector) + { + Verify.NotNull(vector); + + if (vector is not ReadOnlyMemory floatVector) + { + throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Qdrant connector."); + } + + return floatVector; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 68276cb97530..a7deb35dc901 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -333,17 +333,18 @@ public virtual async Task> VectorizedSearchAsync this._database diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 45356d638f30..3befe91242e5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -378,16 +378,17 @@ public virtual async Task> VectorizedSearchAsync this._database diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index e91b1db1bf84..6cbd1a27d474 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -161,7 +161,6 @@ public virtual Task> VectorizedSearchAsync Verify.NotNull(vector); var vectorType = vector.GetType(); - if (!SqliteConstants.SupportedVectorTypes.Contains(vectorType)) { throw new NotSupportedException( @@ -170,12 +169,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this.GetVectorPropertyForSearch(searchOptions.VectorPropertyName); - - if (vectorProperty is null) - { - throw new InvalidOperationException("The collection does not have any vector properties, so vector search is not possible."); - } + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); var mappedArray = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); @@ -753,32 +747,5 @@ private static string GetVectorTableName( return $"{DefaultVirtualTableNamePrefix}{dataTableName}"; } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs index affb79c41fe7..a260b4e9fc2c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs @@ -19,6 +19,9 @@ internal sealed class WeaviateConstants /// Score property name in Weaviate. internal const string ScorePropertyName = "distance"; + /// Score property name for hybrid search in Weaviate. + internal const string HybridScorePropertyName = "score"; + /// Additional properties property name in Weaviate. internal const string AdditionalPropertiesPropertyName = "_additional"; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs index e962aabb5a91..3842a3aded97 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Text.Json; using System.Text.Json.Nodes; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -12,11 +13,17 @@ internal static class WeaviateVectorStoreCollectionSearchMapping /// /// Maps vector search result to the format, which is processable by . /// - public static (JsonObject StorageModel, double? Score) MapSearchResult(JsonNode result) + public static (JsonObject StorageModel, double? Score) MapSearchResult(JsonNode result, string scorePropertyName) { var additionalProperties = result[WeaviateConstants.AdditionalPropertiesPropertyName]; - var score = additionalProperties?[WeaviateConstants.ScorePropertyName]?.GetValue(); + var scoreProperty = additionalProperties?[scorePropertyName]; + double? score = scoreProperty?.GetValueKind() switch + { + JsonValueKind.Number => scoreProperty.GetValue(), + JsonValueKind.String => double.Parse(scoreProperty.GetValue()), + _ => null + }; var id = additionalProperties?[WeaviateConstants.ReservedKeyPropertyName]; var vectors = additionalProperties?[WeaviateConstants.ReservedVectorPropertyName]; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 3e4343eeb7aa..52c0ad29e832 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection +public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// The name of this database for telemetry purposes. @@ -84,6 +84,9 @@ public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCo /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// The default options for hybrid vector search. + private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); + /// that is used to interact with Weaviate API. private readonly HttpClient _httpClient; @@ -338,39 +341,63 @@ public virtual async Task> VectorizedSearchAsync l.FullName))}"); - } + var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); + var fields = this._propertyReader.DataPropertyJsonNames; - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this.GetVectorPropertyForSearch(searchOptions.VectorPropertyName); + var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildSearchQuery( + vector, + this.CollectionName, + vectorPropertyName, + this._propertyReader.KeyPropertyName, + s_jsonSerializerOptions, + searchOptions, + this._propertyReader.JsonPropertyNamesMap, + this._propertyReader.VectorPropertyJsonNames, + this._propertyReader.DataPropertyJsonNames); - if (vectorProperty is null) - { - throw new InvalidOperationException("The collection does not have any vector properties, so vector search is not possible."); - } + return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.ScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); + } + + /// + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + const string OperationName = "HybridSearch"; + + VerifyVectorParam(vector); + + var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); + var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); var fields = this._propertyReader.DataPropertyJsonNames; - var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildSearchQuery( + var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildHybridSearchQuery( vector, + string.Join(" ", keywords), this.CollectionName, vectorPropertyName, this._propertyReader.KeyPropertyName, + textDataPropertyName, s_jsonSerializerOptions, searchOptions, this._propertyReader.JsonPropertyNamesMap, this._propertyReader.VectorPropertyJsonNames, this._propertyReader.DataPropertyJsonNames); + return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); + } + + #region private + + private async Task> ExecuteQueryAsync(string query, bool includeVectors, string scorePropertyName, string operationName, CancellationToken cancellationToken) + { using var request = new WeaviateVectorSearchRequest(query).Build(); var (responseModel, content) = await this.ExecuteRequestWithResponseContentAsync(request, cancellationToken).ConfigureAwait(false); @@ -383,19 +410,19 @@ public virtual async Task> VectorizedSearchAsync x is not null).Select(result => { - var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result!); + var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result!, scorePropertyName); var record = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this.CollectionName, - OperationName, - () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = searchOptions.IncludeVectors })); + operationName, + () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); return new VectorSearchResult(record, score); }); @@ -403,8 +430,6 @@ public virtual async Task> VectorizedSearchAsync(mappedResults.ToAsyncEnumerable()); } - #region private - private Task ExecuteRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) { request.RequestUri = new Uri(this._endpoint, request.RequestUri!); @@ -464,33 +489,6 @@ private async Task RunOperationAsync(string operationName, Func> o } } - /// - /// Get vector property to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector property in if not. - /// - /// The vector field name. - /// Thrown if the provided field name is not a valid field name. - private VectorStoreRecordVectorProperty? GetVectorPropertyForSearch(string? vectorFieldName) - { - // If vector property name is provided in options, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorFieldName)) - { - // Check vector properties by data model property name. - var vectorProperty = this._propertyReader.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorFieldName, StringComparison.Ordinal)); - - if (vectorProperty is not null) - { - return vectorProperty; - } - - throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{vectorFieldName}'."); - } - - // If vector property is not provided in options, return first vector property from schema. - return this._propertyReader.VectorProperty; - } - /// /// Returns custom mapper, generic data model mapper or default record mapper. /// @@ -523,5 +521,19 @@ private IVectorStoreRecordMapper InitializeMapper() s_jsonSerializerOptions); } + private static void VerifyVectorParam(TVector vector) + { + Verify.NotNull(vector); + + var vectorType = vector.GetType(); + + if (!s_supportedVectorTypes.Contains(vectorType)) + { + throw new NotSupportedException( + $"The provided vector type {vectorType.FullName} is not supported by the Weaviate connector. " + + $"Supported types are: {string.Join(", ", s_supportedVectorTypes.Select(l => l.FullName))}"); + } + } + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 1b38e708ab9f..7ef8907e4969 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -73,6 +73,70 @@ public static string BuildSearchQuery( """; } + /// + /// Builds Weaviate hybrid search query. + /// More information here: . + /// + public static string BuildHybridSearchQuery( + TVector vector, + string keywords, + string collectionName, + string vectorPropertyName, + string keyPropertyName, + string textPropertyName, + JsonSerializerOptions jsonSerializerOptions, + HybridSearchOptions searchOptions, + IReadOnlyDictionary storagePropertyNames, + IReadOnlyList vectorPropertyStorageNames, + IReadOnlyList dataPropertyStorageNames) + { + var vectorsQuery = searchOptions.IncludeVectors ? + $"vectors {{ {string.Join(" ", vectorPropertyStorageNames)} }}" : + string.Empty; + +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = searchOptions switch + { + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( + legacyFilter, + jsonSerializerOptions, + keyPropertyName, + storagePropertyNames), + { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), + _ => null + }; +#pragma warning restore CS0618 + + var vectorArray = JsonSerializer.Serialize(vector, jsonSerializerOptions); + + return $$""" + { + Get { + {{collectionName}} ( + limit: {{searchOptions.Top}} + offset: {{searchOptions.Skip}} + {{(filter is null ? "" : "where: " + filter)}} + hybrid: { + query: "{{keywords}}" + properties: ["{{textPropertyName}}"] + targetVectors: ["{{vectorPropertyName}}"] + vector: {{vectorArray}} + fusionType: rankedFusion + } + ) { + {{string.Join(" ", dataPropertyStorageNames)}} + {{WeaviateConstants.AdditionalPropertiesPropertyName}} { + {{WeaviateConstants.ReservedKeyPropertyName}} + {{WeaviateConstants.HybridScorePropertyName}} + {{vectorsQuery}} + } + } + } + } + """; + } + #region private #pragma warning disable CS0618 // Type or member is obsolete diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 7fa33bbd9967..d41db6897385 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -569,8 +569,6 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje } [Theory] - [InlineData(null, "TestEmbedding1", 1, 1)] - [InlineData("", "TestEmbedding1", 2, 2)] [InlineData("TestEmbedding1", "TestEmbedding1", 3, 3)] [InlineData("TestEmbedding2", "test_embedding_2", 4, 4)] public async Task VectorizedSearchUsesValidQueryAsync( diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 49daf149c6e3..6742bc6ac4c9 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -472,6 +472,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) { IncludeVectors = true, OldFilter = filter, + VectorPropertyName = nameof(MultiPropsModel.Vector1), Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs index 91c902934d57..35a00c0376fc 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs @@ -38,7 +38,7 @@ public void MapSearchResultByDefaultReturnsValidResult() }; // Act - var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(jsonObject); + var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(jsonObject, "distance"); // Assert Assert.Equal(0.5, score); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs new file mode 100644 index 000000000000..8fde779af23c --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq.Expressions; + +namespace Microsoft.Extensions.VectorData; + +/// +/// Options for hybrid search when using a dense vector and string keywords to do the search. +/// +public class HybridSearchOptions +{ + /// + /// Gets or sets a search filter to use before doing the hybrid search. + /// +#pragma warning disable CS0618 // Type or member is obsolete + [Obsolete("Use Filter instead")] + public VectorSearchFilter? OldFilter { get; init; } +#pragma warning restore CS0618 // Type or member is obsolete + + /// + /// Gets or sets a search filter to use before doing the vector search. + /// + public Expression>? Filter { get; init; } + + /// + /// Gets or sets the name of the target dense vector property to search on. + /// Use the name of the vector property from your data model or as provided in the record definition. + /// If not provided will look if there is a vector property, and + /// will throw if either none or multiple exist. + /// + public string? VectorPropertyName { get; init; } + + /// + /// Gets or sets the name of the additional target property to do the text/keyword search on. + /// The property must have full text search enabled. + /// Use the name of the data property from your data model or as provided in the record definition. + /// If not provided will look if there is a text property with full text search enabled, and + /// will throw if either none or multiple exist. + /// + public string? AdditionalPropertyName { get; init; } + + /// + /// Gets or sets the maximum number of results to return. + /// + public int Top { get; init; } = 3; + + /// + /// Gets or sets the number of results to skip before returning results, i.e. the index of the first result to return. + /// + public int Skip { get; init; } = 0; + + /// + /// Gets or sets a value indicating whether to include vectors in the retrieval result. + /// + public bool IncludeVectors { get; init; } = false; + + /// + /// Gets or sets a value indicating whether the total count should be included in the results. + /// + /// + /// Default value is false. + /// Not all vector search implementations will support this option in which case the total + /// count will be null even if requested via this option. + /// + public bool IncludeTotalCount { get; init; } = false; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs new file mode 100644 index 000000000000..53d2e062fcda --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Extensions.VectorData; + +/// +/// Contains a method for doing a hybrid search using a vector and keywords. +/// +/// The record data model to use for retrieving data from the store. +public interface IKeywordHybridSearch +{ + /// + /// Performs a hybrid search for records that match the given embedding and keywords, after applying the provided filters. + /// + /// The type of the vector. + /// The vector to search the store with. + /// A collection of keywords to search the store with. + /// The options that control the behavior of the search. + /// The to monitor for cancellation requests. The default is . + /// The records found by the hybrid search, including their result scores. + Task> HybridSearchAsync( + TVector vector, + ICollection keywords, + HybridSearchOptions? options = default, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 72b54d263a39..54db3c74ca21 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -26,7 +26,8 @@ public class VectorSearchOptions /// Use the name of the vector property from your data model or as provided in the record definition. /// /// - /// The default value is the first vector property in the schema. + /// If not provided will check if there is a vector property to use by default, and + /// will throw if either none or multiple exist. /// public string? VectorPropertyName { get; init; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchConfigConditionAttribute.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchConfigConditionAttribute.cs new file mode 100644 index 000000000000..59fd061adaf0 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchConfigConditionAttribute.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureAISearch; + +/// +/// Attribute to use to skip tests if the settings for Azure AI Search is not set. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)] +public sealed class AzureAISearchConfigConditionAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() + { + var config = AzureAISearchVectorStoreFixture.GetAzureAISearchConfiguration(); + var isMet = config is not null && !string.IsNullOrWhiteSpace(config.ServiceUrl) && !string.IsNullOrWhiteSpace(config.ApiKey); + + return ValueTask.FromResult(isMet); + } + + public string SkipReason + => "Azure AI Search ServiceUrl or ApiKey was not specified in user secrets. Use the following command to set them: dotnet user-secrets set \"AzureAISearch:ServiceUrl\" \"your_service_url\" and dotnet user-secrets set \"AzureAISearch:ApiKey\" \"your_api_key\""; +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..d42561ac8ee6 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.AzureAISearch; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureAISearch; + +/// +/// Inherits common integration tests that should pass for any . +/// +/// Azure AI Search setup and teardown. +[Collection("AzureAISearchVectorStoreCollection")] +[AzureAISearchConfigCondition] +public class AzureAISearchKeywordVectorizedHybridSearchTests(AzureAISearchVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override string Key1 => "1"; + protected override string Key2 => "2"; + protected override string Key3 => "3"; + protected override string Key4 => "4"; + protected override int DelayAfterUploadInMilliseconds => 2000; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + return new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, recordCollectionName + AzureAISearchVectorStoreFixture.TestIndexPostfix, new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs index 0c247faeea57..9ec17a64412e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs @@ -29,8 +29,13 @@ public class AzureAISearchVectorStoreFixture : IAsyncLifetime /// /// Test index name which consists out of "hotels-" and the machine name with any non-alphanumeric characters removed. /// + private readonly string _testIndexName = "hotels-" + TestIndexPostfix; + + /// + /// Gets the test index name postfix that is derived from the local machine name used to avoid clashes between test runs from different callers. + /// #pragma warning disable CA1308 // Normalize strings to uppercase - private readonly string _testIndexName = "hotels-" + new Regex("[^a-zA-Z0-9]").Replace(Environment.MachineName.ToLowerInvariant(), ""); + public static string TestIndexPostfix { get; private set; } = new Regex("[^a-zA-Z0-9]").Replace(Environment.MachineName.ToLowerInvariant(), ""); #pragma warning restore CA1308 // Normalize strings to uppercase /// @@ -43,12 +48,20 @@ public class AzureAISearchVectorStoreFixture : IAsyncLifetime .AddUserSecrets() .Build(); + /// + /// Get the test configuration for Azure AI Search. + /// + public static AzureAISearchConfiguration? GetAzureAISearchConfiguration() + { + return s_configuration.GetRequiredSection("AzureAISearch").Get(); + } + /// /// Initializes a new instance of the class. /// public AzureAISearchVectorStoreFixture() { - var config = s_configuration.GetRequiredSection("AzureAISearch").Get(); + var config = GetAzureAISearchConfiguration(); Assert.NotNull(config); this.Config = config; this.SearchIndexClient = new SearchIndexClient(new Uri(config.ServiceUrl), new AzureKeyCredential(config.ApiKey)); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConnectionStringSetConditionAttribute.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConnectionStringSetConditionAttribute.cs new file mode 100644 index 000000000000..ee8f8aca16d3 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConnectionStringSetConditionAttribute.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +/// +/// Attribute to use to skip tests if the connection string for CosmosDB NoSQL is not set. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)] +public sealed class AzureCosmosDBNoSQLConnectionStringSetConditionAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() + { + var isMet = AzureCosmosDBNoSQLVectorStoreFixture.GetConnectionString() is not null; + + return ValueTask.FromResult(isMet); + } + + public string SkipReason + => $"CosmosDB NoSQL connection string was not specified in user secrets. Use the following command to set it: dotnet user-secrets set \"{AzureCosmosDBNoSQLVectorStoreFixture.ConnectionStringKey}\" \"your_connection_string\""; +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..3ce18873790f --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +/// +/// Inherits common integration tests that should pass for any . +/// +[Collection("AzureCosmosDBNoSQLVectorStoreCollection")] +[AzureCosmosDBNoSQLConnectionStringSetCondition] +public class AzureCosmosDBNoSQLKeywordVectorizedHybridSearchTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override string Key1 => "1"; + protected override string Key2 => "2"; + protected override string Key3 => "3"; + protected override string Key4 => "4"; + protected override int DelayAfterUploadInMilliseconds => 2000; + protected override string? IndexKind { get; } = Microsoft.Extensions.VectorData.IndexKind.Flat; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + return new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, recordCollectionName, new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs index 85e5a90c384d..155451ee10d5 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs @@ -11,6 +11,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; public class AzureCosmosDBNoSQLVectorStoreFixture : IAsyncLifetime, IDisposable { + public const string ConnectionStringKey = "AzureCosmosDBNoSQL:ConnectionString"; private const string DatabaseName = "testdb"; private readonly CosmosClient _cosmosClient; @@ -19,21 +20,33 @@ public class AzureCosmosDBNoSQLVectorStoreFixture : IAsyncLifetime, IDisposable public Database? Database { get; private set; } public AzureCosmosDBNoSQLVectorStoreFixture() + { + var connectionString = GetConnectionString(); + var options = new CosmosClientOptions { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }; + + this._cosmosClient = new CosmosClient(connectionString, options); + } + + public static string GetConnectionString() { var configuration = new ConfigurationBuilder() .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile( path: "testsettings.development.json", - optional: false, + optional: true, reloadOnChange: true ) .AddEnvironmentVariables() + .AddUserSecrets() .Build(); - var connectionString = GetConnectionString(configuration); - var options = new CosmosClientOptions { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }; + var settingValue = configuration[ConnectionStringKey]; + if (string.IsNullOrWhiteSpace(settingValue)) + { + throw new ArgumentNullException($"{settingValue} string is not configured"); + } - this._cosmosClient = new CosmosClient(connectionString, options); + return settingValue; } public async Task InitializeAsync() @@ -61,19 +74,4 @@ protected virtual void Dispose(bool disposing) this._cosmosClient.Dispose(); } } - - #region private - - private static string GetConnectionString(IConfigurationRoot configuration) - { - var settingValue = configuration["AzureCosmosDBNoSQL:ConnectionString"]; - if (string.IsNullOrWhiteSpace(settingValue)) - { - throw new ArgumentNullException($"{settingValue} string is not configured"); - } - - return settingValue; - } - - #endregion } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..c1c223382774 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,341 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.VectorData; +using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory; + +/// +/// Base class for common integration tests that should pass for any . +/// +/// The type of key to use with the record collection. +public abstract class BaseKeywordVectorizedHybridSearchTests + where TKey : notnull +{ + protected abstract TKey Key1 { get; } + protected abstract TKey Key2 { get; } + protected abstract TKey Key3 { get; } + protected abstract TKey Key4 { get; } + + protected virtual int DelayAfterIndexCreateInMilliseconds { get; } = 0; + + protected virtual int DelayAfterUploadInMilliseconds { get; } = 0; + + protected virtual string? IndexKind { get; } = null; + + protected abstract IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition); + + [VectorStoreFact] + public async Task SearchShouldReturnExpectedResultsAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwhybrid", + this.KeyWithVectorAndStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + // All records have the same vector, but the third contains Grapes, so searching for + // Grapes should return the third record first. + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Grapes"]); + + // Assert + var results = await searchResult.Results.ToListAsync(); + Assert.Equal(3, results.Count); + + Assert.Equal(this.Key3, results[0].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + [VectorStoreFact] + public async Task SearchWithFilterShouldReturnExpectedResultsAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwfilteredhybrid", + this.KeyWithVectorAndStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + // All records have the same vector, but the second contains Oranges, however + // adding the filter should limit the results to only the first. +#pragma warning disable CS0618 // Type or member is obsolete + var options = new HybridSearchOptions> + { + OldFilter = new VectorSearchFilter().EqualTo("Code", 1) + }; +#pragma warning restore CS0618 // Type or member is obsolete + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], options); + + // Assert + var results = await searchResult.Results.ToListAsync(); + Assert.Single(results); + + Assert.Equal(this.Key1, results[0].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + [VectorStoreFact] + public async Task SearchWithTopShouldReturnExpectedResultsAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwtophybrid", + this.KeyWithVectorAndStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + // All records have the same vector, but the second contains Oranges, so the + // second should be returned first. + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { Top = 1 }); + + // Assert + var results = await searchResult.Results.ToListAsync(); + Assert.Single(results); + + Assert.Equal(this.Key2, results[0].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + [VectorStoreFact] + public async Task SearchWithSkipShouldReturnExpectedResultsAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwskiphybrid", + this.KeyWithVectorAndStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + // All records have the same vector, but the first and third contain healthy, + // so when skipping the first two results, we should get the second record. + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["healthy"], new() { Skip = 2 }); + + // Assert + var results = await searchResult.Results.ToListAsync(); + Assert.Single(results); + + Assert.Equal(this.Key2, results[0].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + [VectorStoreFact] + public async Task SearchWithMultipleKeywordsShouldRankMatchedKeywordsHigherAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwmultikeywordhybrid", + this.KeyWithVectorAndStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["tangy", "nourishing"]); + + // Assert + var results = await searchResult.Results.ToListAsync(); + Assert.Equal(3, results.Count); + + Assert.True(results[0].Record.Key.Equals(this.Key1) || results[0].Record.Key.Equals(this.Key2)); + Assert.True(results[1].Record.Key.Equals(this.Key1) || results[1].Record.Key.Equals(this.Key2)); + Assert.Equal(this.Key3, results[2].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + [VectorStoreFact] + public async Task SearchWithMultiTextRecordSearchesRequestedFieldAsync() + { + // Arrange + var sut = this.GetTargetRecordCollection>( + "kwmultitexthybrid", + this.MultiSearchStringRecordDefinition); + + var hybridSearch = sut as IKeywordHybridSearch>; + + try + { + var vector = new ReadOnlyMemory([1, 0, 0, 0]); + await this.CreateCollectionAndAddDataAsync(sut, vector); + + // Act + var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], new() { AdditionalPropertyName = nameof(MultiSearchStringRecord.Text2) }); + var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { AdditionalPropertyName = nameof(MultiSearchStringRecord.Text2) }); + + // Assert + var results1 = await searchResult1.Results.ToListAsync(); + Assert.Equal(2, results1.Count); + + Assert.Equal(this.Key2, results1[0].Record.Key); + Assert.Equal(this.Key1, results1[1].Record.Key); + + var results2 = await searchResult2.Results.ToListAsync(); + Assert.Equal(2, results2.Count); + + Assert.Equal(this.Key1, results2[0].Record.Key); + Assert.Equal(this.Key2, results2[1].Record.Key); + } + finally + { + // Cleanup + await sut.DeleteCollectionAsync(); + } + } + + private async Task CreateCollectionAndAddDataAsync(IVectorStoreRecordCollection> sut, ReadOnlyMemory vector) + { + await sut.CreateCollectionIfNotExistsAsync(); + await Task.Delay(this.DelayAfterIndexCreateInMilliseconds); + + var record1 = new KeyWithVectorAndStringRecord + { + Key = this.Key1, + Text = "Apples are a healthy and nourishing snack", + Vector = vector, + Code = 1 + }; + var record2 = new KeyWithVectorAndStringRecord + { + Key = this.Key2, + Text = "Oranges are tangy and contain vitamin c", + Vector = vector, + Code = 2 + }; + var record3 = new KeyWithVectorAndStringRecord + { + Key = this.Key3, + Text = "Grapes are healthy, sweet and juicy", + Vector = vector, + Code = 3 + }; + + await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); + await Task.Delay(this.DelayAfterUploadInMilliseconds); + } + + private async Task CreateCollectionAndAddDataAsync(IVectorStoreRecordCollection> sut, ReadOnlyMemory vector) + { + await sut.CreateCollectionIfNotExistsAsync(); + await Task.Delay(this.DelayAfterIndexCreateInMilliseconds); + + var record1 = new MultiSearchStringRecord + { + Key = this.Key1, + Text1 = "Apples", + Text2 = "Oranges", + Vector = vector + }; + var record2 = new MultiSearchStringRecord + { + Key = this.Key2, + Text1 = "Oranges", + Text2 = "Apples", + Vector = vector + }; + + await sut.UpsertBatchAsync([record1, record2]).ToListAsync(); + await Task.Delay(this.DelayAfterUploadInMilliseconds); + } + + private VectorStoreRecordDefinition KeyWithVectorAndStringRecordDefinition => new() + { + Properties = new List() + { + new VectorStoreRecordKeyProperty("Key", typeof(TKey)), + new VectorStoreRecordDataProperty("Text", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("Code", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, + } + }; + + private sealed class KeyWithVectorAndStringRecord + { + public TRecordKey Key { get; set; } = default!; + + public string Text { get; set; } = string.Empty; + + public int Code { get; set; } + + public ReadOnlyMemory Vector { get; set; } + } + + private VectorStoreRecordDefinition MultiSearchStringRecordDefinition => new() + { + Properties = new List() + { + new VectorStoreRecordKeyProperty("Key", typeof(TKey)), + new VectorStoreRecordDataProperty("Text1", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("Text2", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, + } + }; + + private sealed class MultiSearchStringRecord + { + public TRecordKey Key { get; set; } = default!; + + public string Text1 { get; set; } = string.Empty; + + public string Text2 { get; set; } = string.Empty; + + public ReadOnlyMemory Vector { get; set; } + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..c6e326a14f76 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using SemanticKernel.IntegrationTests.Connectors.Memory; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; + +/// +/// Inherits common integration tests that should pass for any . +/// +[Collection("MongoDBVectorStoreCollection")] +public class MongoDBKeywordVectorizedHybridSearchTests(MongoDBVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override string Key1 => "1"; + protected override string Key2 => "2"; + protected override string Key3 => "3"; + protected override string Key4 => "4"; + protected override int DelayAfterUploadInMilliseconds => 1000; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + return new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, recordCollectionName, new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantNamedVectorsKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantNamedVectorsKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..20fd1097b957 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantNamedVectorsKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Qdrant; + +/// +/// Inherits common integration tests that should pass for any . +/// +[Collection("QdrantVectorStoreCollection")] +public class QdrantNamedVectorsKeywordVectorizedHybridSearchTests(QdrantVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override ulong Key1 => 1; + protected override ulong Key2 => 2; + protected override ulong Key3 => 3; + protected override ulong Key4 => 4; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + return new QdrantVectorStoreRecordCollection(fixture.QdrantClient, recordCollectionName, new() + { + HasNamedVectors = true, + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantSingleVectorKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantSingleVectorKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..4579e47b56ad --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantSingleVectorKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Qdrant; + +/// +/// Inherits common integration tests that should pass for any . +/// +[Collection("QdrantVectorStoreCollection")] +public class QdrantSingleVectorKeywordVectorizedHybridSearchTests(QdrantVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override ulong Key1 => 1; + protected override ulong Key2 => 2; + protected override ulong Key3 => 3; + protected override ulong Key4 => 4; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + return new QdrantVectorStoreRecordCollection(fixture.QdrantClient, recordCollectionName, new() + { + HasNamedVectors = false, + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateKeywordVectorizedHybridSearchTests.cs new file mode 100644 index 000000000000..2ec29b428053 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateKeywordVectorizedHybridSearchTests.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +/// +/// Inherits common integration tests that should pass for any . +/// +/// Weaviate setup and teardown. +[Collection("WeaviateVectorStoreCollection")] +public class WeaviateKeywordVectorizedHybridSearchTests(WeaviateVectorStoreFixture fixture) : BaseKeywordVectorizedHybridSearchTests +{ + protected override Guid Key1 => new("11111111-1111-1111-1111-111111111111"); + protected override Guid Key2 => new("22222222-2222-2222-2222-222222222222"); + protected override Guid Key3 => new("33333333-3333-3333-3333-333333333333"); + protected override Guid Key4 => new("44444444-4444-4444-4444-444444444444"); + protected override int DelayAfterUploadInMilliseconds => 1000; + + protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + // Weaviate collection names must start with an upper case letter. + var recordCollectionNameChars = recordCollectionName.ToCharArray(); + recordCollectionNameChars[0] = char.ToUpperInvariant(recordCollectionNameChars[0]); + + return new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, new string(recordCollectionNameChars), new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }); + } +} diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs index 5fdcbcd91389..7acd839dd0e3 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs @@ -19,6 +19,9 @@ internal static class MongoDBConstants /// Default vector index name. internal const string DefaultVectorIndexName = "vector_index"; + /// Default full text search index name. + internal const string DefaultFullTextSearchIndexName = "full_text_search_index"; + /// Default index kind for vector search. internal const string DefaultIndexKind = IndexKind.IvfFlat; diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs index 48287af8b963..15047fe23b91 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs @@ -337,6 +337,91 @@ public string GetJsonPropertyName(string dataModelPropertyName) return this._jsonPropertyNamesMap.Value[dataModelPropertyName]; } + /// + /// Get the vector property with the provided name if a name is provided, and fall back + /// to a vector property in the schema if not. If no name is provided and there is more + /// than one vector property, an exception will be thrown. + /// + /// The vector property name. + /// Thrown if the provided property name is not a valid vector property name. + public VectorStoreRecordVectorProperty GetVectorPropertyOrSingle(string? vectorPropertyName) + { + // If vector property name is provided, try to find it in schema or throw an exception. + if (!string.IsNullOrWhiteSpace(vectorPropertyName)) + { + // Check vector properties by data model property name. + var vectorProperty = this.VectorProperties + .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorPropertyName, StringComparison.Ordinal)); + + if (vectorProperty is not null) + { + return vectorProperty; + } + + throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a vector property named '{vectorPropertyName}'."); + } + + // If vector property name is not provided, return first vector property from schema, or throw if there are no vectors. + if (this.VectorProperty is null) + { + throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have any vector properties."); + } + + if (this.VectorProperties.Count > 1) + { + throw new InvalidOperationException($"The {this._dataModelType.FullName} type has multiple vector properties, please specify your chosen property via options."); + } + + return this.VectorProperty; + } + + /// + /// Get the text data property, that has full text search indexing enabled, with the provided name if a name is provided, and fall back + /// to a text data property in the schema if not. If no name is provided and there is more than one text data property with + /// full text search indexing enabled, an exception will be thrown. + /// + /// The property name. + /// Thrown if the provided property name is not a valid text data property name. + public VectorStoreRecordDataProperty GetFullTextDataPropertyOrSingle(string? propertyName) + { + // If text data property name is provided, try to find it in schema or throw an exception. + if (!string.IsNullOrWhiteSpace(propertyName)) + { + // Check string data properties by data model property name. + var dataProperty = this.DataProperties + .FirstOrDefault(l => l.DataModelPropertyName.Equals(propertyName, StringComparison.Ordinal) && l.PropertyType == typeof(string)); + + if (dataProperty is null) + { + throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a text data property named '{propertyName}'."); + } + + if (!dataProperty.IsFullTextSearchable) + { + throw new InvalidOperationException($"The text data property named '{propertyName}' on the {this._dataModelType.FullName} type must have full text search enabled."); + } + + return dataProperty; + } + + // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. + var fullTextStringProperties = this.DataProperties + .Where(l => l.PropertyType == typeof(string) && l.IsFullTextSearchable) + .ToList(); + + if (fullTextStringProperties.Count == 0) + { + throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have any text data properties that have full text search enabled."); + } + + if (fullTextStringProperties.Count > 1) + { + throw new InvalidOperationException($"The {this._dataModelType.FullName} type has multiple text data properties that have full text search enabled, please specify your chosen property via options."); + } + + return fullTextStringProperties[0]; + } + /// /// Check if we have previously loaded the objects from the data model and if not, load them. /// diff --git a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml deleted file mode 100644 index 6c9084abb2ce..000000000000 --- a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml +++ /dev/null @@ -1,74 +0,0 @@ - - - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) - lib/net8.0/Microsoft.SemanticKernel.Core.dll - lib/net8.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) - lib/net8.0/Microsoft.SemanticKernel.Core.dll - lib/net8.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/net8.0/Microsoft.SemanticKernel.Core.dll - lib/net8.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/net8.0/Microsoft.SemanticKernel.Core.dll - lib/net8.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) - lib/net8.0/Microsoft.SemanticKernel.Core.dll - lib/net8.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - true - - - CP0002 - M:Microsoft.SemanticKernel.Data.VolatileVectorStoreRecordCollection`2.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll - true - - \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs deleted file mode 100644 index b6e5454dc4d6..000000000000 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ /dev/null @@ -1,380 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Data; - -/// -/// Service for storing and retrieving vector records, that uses an in memory dictionary as the underlying storage. -/// -/// The data type of the record key. -/// The data model to use for adding, updating and retrieving data from storage. -[Obsolete("This has been replaced by InMemoryVectorStoreRecordCollection in the Microsoft.SemanticKernel.Connectors.InMemory nuget package.")] -#pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class VolatileVectorStoreRecordCollection : IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix - where TKey : notnull -{ - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - ]; - - /// The default options for vector search. - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); - - /// Internal storage for all of the record collections. - private readonly ConcurrentDictionary> _internalCollections; - - /// The data type of each collection, to enforce a single type per collection. - private readonly ConcurrentDictionary _internalCollectionTypes; - - /// Optional configuration options for this class. - private readonly VolatileVectorStoreRecordCollectionOptions _options; - - /// The name of the collection that this will access. - private readonly string _collectionName; - - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// A dictionary of vector properties on the provided model, keyed by the property name. - private readonly Dictionary _vectorProperties; - - /// An function to look up vectors from the records. - private readonly VolatileVectorStoreVectorResolver _vectorResolver; - - /// An function to look up keys from the records. - private readonly VolatileVectorStoreKeyResolver _keyResolver; - - /// - /// Initializes a new instance of the class. - /// - /// The name of the collection that this will access. - /// Optional configuration options for this class. - [UnconditionalSuppressMessage("Trimming", "IL2087:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - public VolatileVectorStoreRecordCollection(string collectionName, VolatileVectorStoreRecordCollectionOptions? options = default) - { - // Verify. - Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); - - // Assign. - this._collectionName = collectionName; - this._internalCollections = new(); - this._internalCollectionTypes = new(); - this._options = options ?? new VolatileVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); - - // Validate property types. - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); - this._vectorProperties = this._propertyReader.VectorProperties.ToDictionary(x => x.DataModelPropertyName); - - // Assign resolvers. - this._vectorResolver = CreateVectorResolver(this._options.VectorResolver, this._vectorProperties); - this._keyResolver = CreateKeyResolver(this._options.KeyResolver, this._propertyReader.KeyProperty); - } - - /// - /// Initializes a new instance of the class. - /// - /// Internal storage for the record collection. - /// The data type of each collection, to enforce a single type per collection. - /// The name of the collection that this will access. - /// Optional configuration options for this class. - internal VolatileVectorStoreRecordCollection( - ConcurrentDictionary> internalCollection, - ConcurrentDictionary internalCollectionTypes, - string collectionName, - VolatileVectorStoreRecordCollectionOptions? options = default) - : this(collectionName, options) - { - this._internalCollections = internalCollection; - this._internalCollectionTypes = internalCollectionTypes; - } - - /// - public string CollectionName => this._collectionName; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return this._internalCollections.ContainsKey(this._collectionName) ? Task.FromResult(true) : Task.FromResult(false); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - if (!this._internalCollections.ContainsKey(this._collectionName)) - { - this._internalCollections.TryAdd(this._collectionName, new ConcurrentDictionary()); - this._internalCollectionTypes.TryAdd(this._collectionName, typeof(TRecord)); - } - - return Task.CompletedTask; - } - - /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) - { - await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); - } - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - this._internalCollections.TryRemove(this._collectionName, out _); - return Task.CompletedTask; - } - - /// - public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - if (collectionDictionary.TryGetValue(key, out var record)) - { - return Task.FromResult((TRecord?)record); - } - - return Task.FromResult(default); - } - - /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var key in keys) - { - var record = await this.GetAsync(key, options, cancellationToken).ConfigureAwait(false); - - if (record is not null) - { - yield return record; - } - } - } - - /// - public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - collectionDictionary.TryRemove(key, out _); - return Task.CompletedTask; - } - - /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - var collectionDictionary = this.GetCollectionDictionary(); - - foreach (var key in keys) - { - collectionDictionary.TryRemove(key, out _); - } - - return Task.CompletedTask; - } - - /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - Verify.NotNull(record); - - var collectionDictionary = this.GetCollectionDictionary(); - - var key = (TKey)this._keyResolver(record)!; - collectionDictionary.AddOrUpdate(key!, record, (key, currentValue) => record); - - return Task.FromResult(key!); - } - - /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var record in records) - { - yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - } - } - - /// -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) -#pragma warning restore CS1998 - { - Verify.NotNull(vector); - - if (this._propertyReader.FirstVectorPropertyName is null) - { - throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); - } - - if (vector is not ReadOnlyMemory floatVector) - { - throw new NotSupportedException($"The provided vector type {vector.GetType().FullName} is not supported by the Volatile Vector Store."); - } - - // Resolve options and get requested vector property or first as default. - var internalOptions = options ?? s_defaultVectorSearchOptions; - - var vectorPropertyName = string.IsNullOrWhiteSpace(internalOptions.VectorPropertyName) ? this._propertyReader.FirstVectorPropertyName : internalOptions.VectorPropertyName; - if (!this._vectorProperties.TryGetValue(vectorPropertyName!, out var vectorProperty)) - { - throw new InvalidOperationException($"The collection does not have a vector field named '{internalOptions.VectorPropertyName}', so vector search is not possible."); - } - - // Filter records using the provided filter before doing the vector comparison. - if (internalOptions.Filter is not null) - { - throw new NotSupportedException("LINQ-based filtering is not supported with VolatileVectorStore, use Microsoft.SemanticKernel.Connectors.InMemory instead"); - } - - var filteredRecords = VolatileVectorStoreCollectionSearchMapping.FilterRecords(internalOptions.OldFilter, this.GetCollectionDictionary().Values); - - // Compare each vector in the filtered results with the provided vector. - var results = filteredRecords.Select((record) => - { - var vectorObject = this._vectorResolver(vectorPropertyName!, (TRecord)record); - if (vectorObject is not ReadOnlyMemory dbVector) - { - return null; - } - - var score = VolatileVectorStoreCollectionSearchMapping.CompareVectors(floatVector.Span, dbVector.Span, vectorProperty.DistanceFunction); - var convertedscore = VolatileVectorStoreCollectionSearchMapping.ConvertScore(score, vectorProperty.DistanceFunction); - return (record, convertedscore); - }); - - // Get the non-null results since any record with a null vector results in a null result. - var nonNullResults = results.Where(x => x.HasValue).Select(x => x!.Value); - - // Calculate the total results count if requested. - long? count = null; - if (internalOptions.IncludeTotalCount) - { - count = nonNullResults.Count(); - } - - // Sort the results appropriately for the selected distance function and get the right page of results . - var sortedScoredResults = VolatileVectorStoreCollectionSearchMapping.ShouldSortDescending(vectorProperty.DistanceFunction) ? - nonNullResults.OrderByDescending(x => x.score) : - nonNullResults.OrderBy(x => x.score); - var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(internalOptions.Top); - - // Build the response. - var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); - return new VectorSearchResults(vectorSearchResultList) { TotalCount = count }; - } - - /// - /// Get the collection dictionary from the internal storage, throws if it does not exist. - /// - /// The retrieved collection dictionary. - internal ConcurrentDictionary GetCollectionDictionary() - { - if (!this._internalCollections.TryGetValue(this._collectionName, out var collectionDictionary)) - { - throw new VectorStoreOperationException($"Call to vector store failed. Collection '{this._collectionName}' does not exist."); - } - - return collectionDictionary; - } - - /// - /// Pick / create a vector resolver that will read a vector from a record in the store based on the vector name. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that looks up the vector in its dictionary. - /// 3. Otherwise, create a resolver that assumes the vector is a property directly on the record and use the record definition to determine the name. - /// - /// The override vector resolver if one was provided. - /// A dictionary of vector properties from the record definition. - /// The . - [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - private static VolatileVectorStoreVectorResolver CreateVectorResolver(VolatileVectorStoreVectorResolver? overrideVectorResolver, Dictionary vectorProperties) - { - // Custom resolver. - if (overrideVectorResolver is not null) - { - return overrideVectorResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (vectorName, record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - var vectorsDictionary = genericDataModelRecord!.Vectors; - if (vectorsDictionary != null && vectorsDictionary.TryGetValue(vectorName, out var vector)) - { - return vector; - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - // Default resolver. - var vectorPropertiesInfo = vectorProperties.Values - .Select(x => x.DataModelPropertyName) - .Select(x => typeof(TRecord).GetProperty(x) ?? throw new ArgumentException($"Vector property '{x}' was not found on {typeof(TRecord).Name}")) - .ToDictionary(x => x.Name); - - return (vectorName, record) => - { - if (vectorPropertiesInfo.TryGetValue(vectorName, out var vectorPropertyInfo)) - { - return vectorPropertyInfo.GetValue(record); - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - /// - /// Pick / create a key resolver that will read a key from a record in the store. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that reads the Key property from it. - /// 3. Otherwise, create a resolver that assumes the key is a property directly on the record and use the record definition to determine the name. - /// - /// The override key resolver if one was provided. - /// They key property from the record definition. - /// The . - [UnconditionalSuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This class is obsolete")] - private static VolatileVectorStoreKeyResolver CreateKeyResolver(VolatileVectorStoreKeyResolver? overrideKeyResolver, VectorStoreRecordKeyProperty keyProperty) - { - // Custom resolver. - if (overrideKeyResolver is not null) - { - return overrideKeyResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - return genericDataModelRecord!.Key; - }; - } - - // Default resolver. - var keyPropertyInfo = typeof(TRecord).GetProperty(keyProperty.DataModelPropertyName) ?? throw new ArgumentException($"Key property {keyProperty.DataModelPropertyName} not found on {typeof(TRecord).Name}"); - return (record) => (TKey)keyPropertyInfo.GetValue(record)!; - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs index a3b26a313b10..2938b2855800 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs @@ -430,6 +430,95 @@ public void GetJsonPropertyNameReturnsJsonWithFallback(Type type, VectorStoreRec Assert.Equal("json_data2", sut.GetJsonPropertyName("Data2")); } + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void GetVectorPropertyOrSingleReturnsRequestedVectorAndThrowsForInvalidVector(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Equal("Vector2", sut.GetVectorPropertyOrSingle("Vector2").DataModelPropertyName); + Assert.Throws(() => sut.GetVectorPropertyOrSingle("DoesNotExist")); + } + + [Theory] + [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] + public void GetVectorPropertyOrSingleThrowsForMultipleVectors(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); + } + + [Theory] + [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] + public void GetVectorPropertyOrSingleThrowsForNoVectors(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void GetFullTextDataPropertyOrOnlyReturnsRequestedPropOrOnlyTextDataPropAndThrowsForInvalidProp(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle("Data1").DataModelPropertyName); + Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(null).DataModelPropertyName); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle("DoesNotExist")); + } + + [Theory] + [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] + public void GetFullTextDataPropertyOrOnlyThrowsForNoTextDataProps(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void GetFullTextDataPropertyOrOnlyThrowsForNonFullTextSearchProp(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle("Data2")); + } + + [Fact] + public void GetFullTextDataPropertyOrOnlyThrowsForMultipleMatchingProps() + { + // Arrange. + var properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("Data2", typeof(string)) { IsFullTextSearchable = true } + }; + var definition = new VectorStoreRecordDefinition + { + Properties = properties + }; + var sut = new VectorStoreRecordPropertyReader(typeof(object), definition, null); + + // Act & Assert. + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); + } + public static IEnumerable NoKeyTypeAndDefinitionCombos() { yield return new object?[] { typeof(NoKeyModel), s_noKeyDefinition }; diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs deleted file mode 100644 index b93c00952705..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VolatileVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,577 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Data; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -/// -/// Contains tests for the class. -/// -[Obsolete("The VolatileVectorStoreRecordCollection is obsolete so these tests are as well.")] -public class VolatileVectorStoreRecordCollectionTests -{ - private const string TestCollectionName = "testcollection"; - private const string TestRecordKey1 = "testid1"; - private const string TestRecordKey2 = "testid2"; - private const int TestRecordIntKey1 = 1; - private const int TestRecordIntKey2 = 2; - - private readonly CancellationToken _testCancellationToken = new(false); - - private readonly ConcurrentDictionary> _collectionStore; - private readonly ConcurrentDictionary _collectionStoreTypes; - - public VolatileVectorStoreRecordCollectionTests() - { - this._collectionStore = new(); - this._collectionStoreTypes = new(); - } - - [Theory] - [InlineData(TestCollectionName, true)] - [InlineData("nonexistentcollection", false)] - public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - collectionName); - - // Act - var actual = await sut.CollectionExistsAsync(this._testCancellationToken); - - // Assert - Assert.Equal(expectedExists, actual); - } - - [Fact] - public async Task CanCreateCollectionAsync() - { - // Arrange - var sut = this.CreateRecordCollection(false); - - // Act - await sut.CreateCollectionAsync(this._testCancellationToken); - - // Assert - Assert.True(this._collectionStore.ContainsKey(TestCollectionName)); - } - - [Fact] - public async Task DeleteCollectionRemovesCollectionFromDictionaryAsync() - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(false); - - // Act - await sut.DeleteCollectionAsync(this._testCancellationToken); - - // Assert - Assert.Empty(this._collectionStore); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanGetRecordWithVectorsAsync(bool useDefinition, TKey testKey) - where TKey : notnull - { - // Arrange - var record = CreateModel(testKey, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey!, record); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetAsync( - testKey, - new() - { - IncludeVectors = true - }, - this._testCancellationToken); - - // Assert - var expectedArgs = new object[] { TestRecordKey1 }; - - Assert.NotNull(actual); - Assert.Equal(testKey, actual.Key); - Assert.Equal($"data {testKey}", actual.Data); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1!, record1); - collection.TryAdd(testKey2!, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetBatchAsync( - [testKey1, testKey2], - new() - { - IncludeVectors = true - }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0].Key); - Assert.Equal($"data {testKey1}", actual[0].Data); - Assert.Equal(testKey2, actual[1].Key); - Assert.Equal($"data {testKey2}", actual[1].Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteRecordAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteAsync( - testKey1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.True(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteBatchAsync( - [testKey1, testKey2], - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.False(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanUpsertRecordAsync(bool useDefinition, TKey testKey1) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var upsertResult = await sut.UpsertAsync( - record1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.Equal(testKey1, upsertResult); - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.UpsertBatchAsync( - [record1, record2], - cancellationToken: this._testCancellationToken).ToListAsync(); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0]); - Assert.Equal(testKey2, actual[1]); - - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Null(actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data); - Assert.Equal(-1, actualResults[1].Score); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(true, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TKey testKey1, TKey testKey2, string filterType) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(1, actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); - Assert.Single(actualResults); - Assert.Equal(testKey2, actualResults[0].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); - Assert.Equal(-1, actualResults[0].Score); - } - - [Theory] - [InlineData(DistanceFunction.CosineSimilarity, 1, -1)] - [InlineData(DistanceFunction.CosineDistance, 0, 2)] - [InlineData(DistanceFunction.DotProductSimilarity, 4, -4)] - [InlineData(DistanceFunction.EuclideanDistance, 0, 4)] - public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFunction, double expectedScoreResult1, double expectedScoreResult2) - { - // Arrange - var record1 = CreateModel(TestRecordKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(TestRecordKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(TestRecordKey1, record1); - collection.TryAdd(TestRecordKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - VectorStoreRecordDefinition singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } - ] - }; - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = singlePropsDefinition - }); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(TestRecordKey1, actualResults[0].Record.Key); - Assert.Equal($"data {TestRecordKey1}", actualResults[0].Record.Data); - Assert.Equal(expectedScoreResult1, actualResults[0].Score); - Assert.Equal(TestRecordKey2, actualResults[1].Record.Key); - Assert.Equal($"data {TestRecordKey2}", actualResults[1].Record.Data); - Assert.Equal(expectedScoreResult2, actualResults[1].Score); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task CanSearchManyRecordsAsync(bool useDefinition) - { - // Arrange - var collection = new ConcurrentDictionary(); - for (int i = 0; i < 1000; i++) - { - if (i <= 14) - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { 1, 1, 1, 1 })); - } - else - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { -1, -1, -1, -1 })); - } - } - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(1000, actual.TotalCount); - - // Assert that top was respected - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(10, actualResults.Count); - var actualIds = actualResults.Select(r => r.Record.Key).ToList(); - for (int i = 0; i < 10; i++) - { - // Assert that skip was respected - Assert.Contains(i + 10, actualIds); - if (i <= 4) - { - Assert.Equal(1, actualResults[i].Score); - } - else - { - Assert.Equal(-1, actualResults[i].Score); - } - } - } - - [Theory] - [InlineData(TestRecordKey1, TestRecordKey2)] - [InlineData(TestRecordIntKey1, TestRecordIntKey2)] - public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = new VectorStoreGenericDataModel(testKey1) - { - Data = new Dictionary - { - ["Data"] = $"data {testKey1}", - ["Tags"] = new List { "default tag", "tag " + testKey1 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) - } - }; - var record2 = new VectorStoreGenericDataModel(testKey2) - { - Data = new Dictionary - { - ["Data"] = $"data {testKey2}", - ["Tags"] = new List { "default tag", "tag " + testKey2 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) - } - }; - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = this._singlePropsDefinition - }); - - // Act - var actual = await sut.VectorizedSearchAsync( - new ReadOnlyMemory([1, 1, 1, 1]), - new() { IncludeVectors = true, VectorPropertyName = "Vector" }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data["Data"]); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data["Data"]); - Assert.Equal(-1, actualResults[1].Score); - } - - private static SinglePropsModel CreateModel(TKey key, bool withVectors, float[]? vector = null) - { - return new SinglePropsModel - { - Key = key, - Data = "data " + key, - Tags = new List { "default tag", "tag " + key }, - Vector = vector ?? (withVectors ? new float[] { 1, 2, 3, 4 } : null), - NotAnnotated = null, - }; - } - - private VolatileVectorStoreRecordCollection> CreateRecordCollection(bool useDefinition) - where TKey : notnull - { - return new VolatileVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = useDefinition ? this._singlePropsDefinition : null - }); - } - - private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Data", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - - public sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public TKey? Key { get; set; } - - [VectorStoreRecordData(IsFilterable = true)] - public List Tags { get; set; } = new List(); - - [VectorStoreRecordData(IsFilterable = true)] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector] - public ReadOnlyMemory? Vector { get; set; } - - public string? NotAnnotated { get; set; } - } -} From 7aa7d29720b84eb86020ff9ac7c0c32495848433 Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Fri, 7 Mar 2025 18:26:05 +0100 Subject: [PATCH 09/63] .Net: IVectorStore implementation for Azure SQL (#10623) fixes #10416 --------- Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> Co-authored-by: Shay Rojansky --- dotnet/SK-dotnet.sln | 10 + .../SqlFilterTranslator.cs | 326 +++++++++++ .../Connectors.Memory.Postgres.csproj | 4 + ...PostgresVectorStoreCollectionSqlBuilder.cs | 3 +- .../PostgresFilterTranslator.cs | 323 +---------- ...PostgresVectorStoreCollectionSqlBuilder.cs | 23 +- .../PostgresVectorStoreDbClient.cs | 20 +- .../PostgresVectorStoreRecordCollection.cs | 13 +- .../Connectors.Memory.SqlServer.csproj | 10 +- .../ExceptionWrapper.cs | 65 +++ .../GenericRecordMapper.cs | 93 +++ .../RecordMapper.cs | 99 ++++ .../SqlDataReaderDictionary.cs | 148 +++++ .../SqlServerClient.cs | 26 +- .../SqlServerCommandBuilder.cs | 547 ++++++++++++++++++ .../SqlServerConstants.cs | 46 ++ .../SqlServerFilterTranslator.cs | 110 ++++ .../SqlServerVectorStore.cs | 67 +++ .../SqlServerVectorStoreOptions.cs | 16 + .../SqlServerVectorStoreRecordCollection.cs | 369 ++++++++++++ ...erverVectorStoreRecordCollectionOptions.cs | 35 ++ .../Connectors.Memory.Sqlite.csproj | 4 + .../SqliteFilterTranslator.cs | 356 ++---------- .../SqliteVectorStoreRecordCollection.cs | 5 +- ...resVectorStoreCollectionSqlBuilderTests.cs | 30 +- .../VectorStoreRecordKeyProperty.cs | 5 + .../VectorSearch/VectorSearchOptions.cs | 32 +- .../IntegrationTests/IntegrationTests.csproj | 2 +- .../VectorStoreRecordPropertyVerification.cs | 5 +- .../CRUD/PostgresBatchConformanceTests.cs | 12 + ...ostgresGenericDataModelConformanceTests.cs | 12 + .../Support/PostgresFixture.cs | 10 + ...orSearchDistanceFunctionComplianceTests.cs | 16 + ...rchDistanceFunctionComplianceTests_Hnsw.cs | 10 + .../CRUD/SqlServerBatchConformanceTests.cs | 12 + ...lServerGenericDataModelConformanceTests.cs | 12 + .../Filter/SqlServerBasicFilterTests.cs | 70 +++ .../Properties/AssemblyAttributes.cs | 3 + .../SqlServerCommandBuilderTests.cs | 367 ++++++++++++ .../SqlServerIntegrationTests.csproj | 47 ++ .../SqlServerMemoryStoreTests.cs | 41 +- .../SqlServerVectorStoreTests.cs | 503 ++++++++++++++++ ...ServerConnectionStringRequiredAttribute.cs | 18 + .../Support/SqlServerFixture.cs | 10 + .../Support/SqlServerTestEnvironment.cs | 25 + .../Support/SqlServerTestStore.cs | 37 ++ ...orSearchDistanceFunctionComplianceTests.cs | 21 + ...rchDistanceFunctionComplianceTests_Hnsw.cs | 31 + .../CRUD/BatchConformanceTests.cs | 69 +++ .../CRUD/ConformanceTestsBase.cs | 34 ++ .../CRUD/GenericDataModelConformanceTests.cs | 66 +++ .../Filter/BasicFilterTests.cs | 12 + .../Models/SimpleModel.cs | 25 + ...orSearchDistanceFunctionComplianceTests.cs | 155 +++++ 54 files changed, 3717 insertions(+), 693 deletions(-) create mode 100644 dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests_Hnsw.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj rename dotnet/src/{IntegrationTests/Connectors/Memory/SqlServer => VectorDataIntegrationTests/SqlServerIntegrationTests}/SqlServerMemoryStoreTests.cs (94%) create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerConnectionStringRequiredAttribute.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests_Hnsw.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/ConformanceTestsBase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 6d0f08aa9b5b..bad51cba9c8e 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -462,6 +462,7 @@ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CosmosMongoDBIntegrationTests", "src\VectorDataIntegrationTests\CosmosMongoDBIntegrationTests\CosmosMongoDBIntegrationTests.csproj", "{11DFBF14-6FBA-41F0-B7F3-A288952D6FDB}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AzureAISearchIntegrationTests", "src\VectorDataIntegrationTests\AzureAISearchIntegrationTests\AzureAISearchIntegrationTests.csproj", "{06181F0F-A375-43AE-B45F-73CBCFC30C14}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.AzureAI", "src\Agents\AzureAI\Agents.AzureAI.csproj", "{EA35F1B5-9148-4189-BE34-5E00AED56D65}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.AI", "src\Plugins\Plugins.AI\Plugins.AI.csproj", "{0C64EC81-8116-4388-87AD-BA14D4B59974}" @@ -491,6 +492,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.Bedrock", "src\Agent EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocol", "samples\Demos\ModelContextProtocol\ModelContextProtocol.csproj", "{B16AC373-3DA8-4505-9510-110347CD635D}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqlServerIntegrationTests", "src\VectorDataIntegrationTests\SqlServerIntegrationTests\SqlServerIntegrationTests.csproj", "{A5E6193C-8431-4C6E-B674-682CB41EAA0C}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1356,6 +1359,12 @@ Global {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.Build.0 = Debug|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.ActiveCfg = Release|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.Build.0 = Release|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.Build.0 = Debug|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1541,6 +1550,7 @@ Global {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} {B16AC373-3DA8-4505-9510-110347CD635D} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {A5E6193C-8431-4C6E-B674-682CB41EAA0C} = {4F381919-F1BE-47D8-8558-3187ED04A84F} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs new file mode 100644 index 000000000000..cad9bd1048c2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs @@ -0,0 +1,326 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors; + +internal abstract class SqlFilterTranslator +{ + private readonly IReadOnlyDictionary _storagePropertyNames; + private readonly LambdaExpression _lambdaExpression; + private readonly ParameterExpression _recordParameter; + protected readonly StringBuilder _sql; + + internal SqlFilterTranslator( + IReadOnlyDictionary storagePropertyNames, + LambdaExpression lambdaExpression, + StringBuilder? sql = null) + { + this._storagePropertyNames = storagePropertyNames; + this._lambdaExpression = lambdaExpression; + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + this._sql = sql ?? new(); + } + + internal StringBuilder Clause => this._sql; + + internal void Translate(bool appendWhere) + { + if (appendWhere) + { + this._sql.Append("WHERE "); + } + + this.Translate(this._lambdaExpression.Body, null); + } + + protected void Translate(Expression? node, Expression? parent) + { + switch (node) + { + case BinaryExpression binary: + this.TranslateBinary(binary); + return; + + case ConstantExpression constant: + this.TranslateConstant(constant.Value); + return; + + case MemberExpression member: + this.TranslateMember(member, parent); + return; + + case MethodCallExpression methodCall: + this.TranslateMethodCall(methodCall); + return; + + case UnaryExpression unary: + this.TranslateUnary(unary); + return; + + default: + throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); + } + } + + protected void TranslateBinary(BinaryExpression binary) + { + // Special handling for null comparisons + switch (binary.NodeType) + { + case ExpressionType.Equal when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left, binary); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Right): + this._sql.Append('('); + this.Translate(binary.Left, binary); + this._sql.Append(" IS NOT NULL)"); + return; + + case ExpressionType.Equal when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right, binary); + this._sql.Append(" IS NULL)"); + return; + case ExpressionType.NotEqual when IsNull(binary.Left): + this._sql.Append('('); + this.Translate(binary.Right, binary); + this._sql.Append(" IS NOT NULL)"); + return; + } + + this._sql.Append('('); + this.Translate(binary.Left, binary); + + this._sql.Append(binary.NodeType switch + { + ExpressionType.Equal => " = ", + ExpressionType.NotEqual => " <> ", + + ExpressionType.GreaterThan => " > ", + ExpressionType.GreaterThanOrEqual => " >= ", + ExpressionType.LessThan => " < ", + ExpressionType.LessThanOrEqual => " <= ", + + ExpressionType.AndAlso => " AND ", + ExpressionType.OrElse => " OR ", + + _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) + }); + + this.Translate(binary.Right, binary); + this._sql.Append(')'); + + static bool IsNull(Expression expression) + => expression is ConstantExpression { Value: null } + || (TryGetCapturedValue(expression, out _, out var capturedValue) && capturedValue is null); + } + + protected virtual void TranslateConstant(object? value) + { + // TODO: Nullable + switch (value) + { + case byte b: + this._sql.Append(b); + return; + case short s: + this._sql.Append(s); + return; + case int i: + this._sql.Append(i); + return; + case long l: + this._sql.Append(l); + return; + + case string s: + this._sql.Append('\'').Append(s.Replace("'", "''")).Append('\''); + return; + case bool b: + this._sql.Append(b ? "TRUE" : "FALSE"); + return; + case Guid g: + this._sql.Append('\'').Append(g.ToString()).Append('\''); + return; + + case DateTime dateTime: + case DateTimeOffset dateTimeOffset: + case Array: + throw new NotImplementedException(); + + case null: + this._sql.Append("NULL"); + return; + + default: + throw new NotSupportedException("Unsupported constant type: " + value.GetType().Name); + } + } + + private void TranslateMember(MemberExpression memberExpression, Expression? parent) + { + switch (memberExpression) + { + case var _ when this.TryGetColumn(memberExpression, out var column): + this.TranslateColumn(column, memberExpression, parent); + return; + + case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): + this.TranslateCapturedVariable(name, value); + return; + + default: + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + } + } + + protected virtual void TranslateColumn(string column, MemberExpression memberExpression, Expression? parent) + => this._sql.Append('"').Append(column).Append('"'); + + protected abstract void TranslateCapturedVariable(string name, object? capturedValue); + + private void TranslateMethodCall(MethodCallExpression methodCall) + { + switch (methodCall) + { + // Enumerable.Contains() + case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable): + this.TranslateContains(source, item, methodCall); + return; + + // List.Contains() + case + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>): + this.TranslateContains(source, item, methodCall); + return; + + default: + throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); + } + } + + private void TranslateContains(Expression source, Expression item, MethodCallExpression parent) + { + switch (source) + { + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryGetColumn(source, out _): + this.TranslateContainsOverArrayColumn(source, item, parent); + return; + + // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) + case NewArrayExpression newArray: + this.Translate(item, parent); + this._sql.Append(" IN ("); + + var isFirst = true; + foreach (var element in newArray.Expressions) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._sql.Append(", "); + } + + this.Translate(element, parent); + } + + this._sql.Append(')'); + return; + + // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) + case var _ when TryGetCapturedValue(source, out _, out var value): + this.TranslateContainsOverCapturedArray(source, item, parent, value); + return; + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + } + + protected abstract void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent); + + protected abstract void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value); + + private void TranslateUnary(UnaryExpression unary) + { + switch (unary.NodeType) + { + case ExpressionType.Not: + // Special handling for !(a == b) and !(a != b) + if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) + { + this.TranslateBinary( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + return; + } + + this._sql.Append("(NOT "); + this.Translate(unary.Operand, unary); + this._sql.Append(')'); + return; + + default: + throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + } + } + + private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) + { + if (expression is MemberExpression member && member.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + { + throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + column = null; + return false; + } + + private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + name = fieldInfo.Name; + value = fieldInfo.GetValue(constant.Value); + return true; + } + + name = null; + value = null; + return false; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj index b1904c6cc1cd..03b36f7525b1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj @@ -19,6 +19,10 @@ Postgres(with pgvector extension) connector for Semantic Kernel plugins and semantic memory + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs index 3c864cc6537f..0175243131cd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs @@ -48,8 +48,9 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// The name of the vector column. /// The kind of index to create. /// The distance function to use for the index. + /// Specifies whether to include IF NOT EXISTS in the command. /// The built SQL command info. - PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction); + PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction, bool ifNotExists); /// /// Builds a SQL command to drop a table in the Postgres vector store. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs index 6c68527da5c1..b4b9707c1c99 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -1,332 +1,53 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Text; namespace Microsoft.SemanticKernel.Connectors.Postgres; -internal class PostgresFilterTranslator +internal sealed class PostgresFilterTranslator : SqlFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; - private ParameterExpression _recordParameter = null!; - private readonly List _parameterValues = new(); private int _parameterIndex; - private readonly StringBuilder _sql = new(); - - internal (string Clause, List Parameters) Translate( + internal PostgresFilterTranslator( IReadOnlyDictionary storagePropertyNames, LambdaExpression lambdaExpression, - int startParamIndex) + int startParamIndex) : base(storagePropertyNames, lambdaExpression, sql: null) { - Debug.Assert(this._sql.Length == 0); - - this._storagePropertyNames = storagePropertyNames; - this._parameterIndex = startParamIndex; - - Debug.Assert(lambdaExpression.Parameters.Count == 1); - this._recordParameter = lambdaExpression.Parameters[0]; - - this._sql.Append("WHERE "); - this.Translate(lambdaExpression.Body); - return (this._sql.ToString(), this._parameterValues); } - private void Translate(Expression? node) - { - switch (node) - { - case BinaryExpression binary: - this.TranslateBinary(binary); - return; - - case ConstantExpression constant: - this.TranslateConstant(constant); - return; - - case MemberExpression member: - this.TranslateMember(member); - return; + internal List ParameterValues => this._parameterValues; - case MethodCallExpression methodCall: - this.TranslateMethodCall(methodCall); - return; - - case UnaryExpression unary: - this.TranslateUnary(unary); - return; - - default: - throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); - } - } - - private void TranslateBinary(BinaryExpression binary) + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) { - // Special handling for null comparisons - switch (binary.NodeType) - { - case ExpressionType.Equal when IsNull(binary.Right): - this._sql.Append('('); - this.Translate(binary.Left); - this._sql.Append(" IS NULL)"); - return; - case ExpressionType.NotEqual when IsNull(binary.Right): - this._sql.Append('('); - this.Translate(binary.Left); - this._sql.Append(" IS NOT NULL)"); - return; - - case ExpressionType.Equal when IsNull(binary.Left): - this._sql.Append('('); - this.Translate(binary.Right); - this._sql.Append(" IS NULL)"); - return; - case ExpressionType.NotEqual when IsNull(binary.Left): - this._sql.Append('('); - this.Translate(binary.Right); - this._sql.Append(" IS NOT NULL)"); - return; - } - - this._sql.Append('('); - this.Translate(binary.Left); - - this._sql.Append(binary.NodeType switch - { - ExpressionType.Equal => " = ", - ExpressionType.NotEqual => " <> ", - - ExpressionType.GreaterThan => " > ", - ExpressionType.GreaterThanOrEqual => " >= ", - ExpressionType.LessThan => " < ", - ExpressionType.LessThanOrEqual => " <= ", - - ExpressionType.AndAlso => " AND ", - ExpressionType.OrElse => " OR ", - - _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) - }); - - this.Translate(binary.Right); - this._sql.Append(')'); - - static bool IsNull(Expression expression) - => expression is ConstantExpression { Value: null } - || (TryGetCapturedValue(expression, out var capturedValue) && capturedValue is null); - } - - private void TranslateConstant(ConstantExpression constant) - { - // TODO: Nullable - switch (constant.Value) - { - case byte b: - this._sql.Append(b); - return; - case short s: - this._sql.Append(s); - return; - case int i: - this._sql.Append(i); - return; - case long l: - this._sql.Append(l); - return; - - case string s: - this._sql.Append('\'').Append(s.Replace("'", "''")).Append('\''); - return; - case bool b: - this._sql.Append(b ? "TRUE" : "FALSE"); - return; - case Guid g: - this._sql.Append('\'').Append(g.ToString()).Append('\''); - return; - - case DateTime: - case DateTimeOffset: - throw new NotImplementedException(); - - case Array: - throw new NotImplementedException(); - - case null: - this._sql.Append("NULL"); - return; - - default: - throw new NotSupportedException("Unsupported constant type: " + constant.Value.GetType().Name); - } + this.Translate(source, parent); + this._sql.Append(" @> ARRAY["); + this.Translate(item, parent); + this._sql.Append(']'); } - private void TranslateMember(MemberExpression memberExpression) + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) { - switch (memberExpression) - { - case var _ when this.TryGetColumn(memberExpression, out var column): - this._sql.Append('"').Append(column).Append('"'); - return; - - // Identify captured lambda variables, translate to PostgreSQL parameters ($1, $2...) - case var _ when TryGetCapturedValue(memberExpression, out var capturedValue): - // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, - // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) - if (capturedValue is null) - { - this._sql.Append("NULL"); - } - else - { - this._parameterValues.Add(capturedValue); - this._sql.Append('$').Append(this._parameterIndex++); - } - return; - - default: - throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); - } - } - - private void TranslateMethodCall(MethodCallExpression methodCall) - { - switch (methodCall) - { - // Enumerable.Contains() - case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains - when contains.Method.DeclaringType == typeof(Enumerable): - this.TranslateContains(source, item); - return; - - // List.Contains() - case - { - Method: - { - Name: nameof(Enumerable.Contains), - DeclaringType: { IsGenericType: true } declaringType - }, - Object: Expression source, - Arguments: [var item] - } when declaringType.GetGenericTypeDefinition() == typeof(List<>): - this.TranslateContains(source, item); - return; - - default: - throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); - } - } - - private void TranslateContains(Expression source, Expression item) - { - switch (source) - { - // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryGetColumn(source, out _): - this.Translate(source); - this._sql.Append(" @> ARRAY["); - this.Translate(item); - this._sql.Append(']'); - return; - - // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) - case NewArrayExpression newArray: - this.Translate(item); - this._sql.Append(" IN ("); - - var isFirst = true; - foreach (var element in newArray.Expressions) - { - if (isFirst) - { - isFirst = false; - } - else - { - this._sql.Append(", "); - } - - this.Translate(element); - } - - this._sql.Append(')'); - return; - - // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) - case var _ when TryGetCapturedValue(source, out _): - this.Translate(item); - this._sql.Append(" = ANY ("); - this.Translate(source); - this._sql.Append(')'); - return; - - default: - throw new NotSupportedException("Unsupported Contains expression"); - } - } - - private void TranslateUnary(UnaryExpression unary) - { - switch (unary.NodeType) - { - case ExpressionType.Not: - // Special handling for !(a == b) and !(a != b) - if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) - { - this.TranslateBinary( - Expression.MakeBinary( - binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, - binary.Left, - binary.Right)); - return; - } - - this._sql.Append("(NOT "); - this.Translate(unary.Operand); - this._sql.Append(')'); - return; - - default: - throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); - } + this.Translate(item, parent); + this._sql.Append(" = ANY ("); + this.Translate(source, parent); + this._sql.Append(')'); } - private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) + protected override void TranslateCapturedVariable(string name, object? capturedValue) { - if (expression is MemberExpression member && member.Expression == this._recordParameter) + // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, + // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) + if (capturedValue is null) { - if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) - { - throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); - } - - return true; + this._sql.Append("NULL"); } - - column = null; - return false; - } - - private static bool TryGetCapturedValue(Expression expression, out object? capturedValue) - { - if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + else { - capturedValue = fieldInfo.GetValue(constant.Value); - return true; + this._parameterValues.Add(capturedValue); + this._sql.Append('$').Append(this._parameterIndex++); } - - capturedValue = null; - return false; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs index 521dc5633cb0..f661c09ebf44 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs @@ -124,7 +124,7 @@ public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tabl } /// - public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction) + public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction, bool ifNotExists) { // Only support creating HNSW index creation through the connector. var indexTypeName = indexKind switch @@ -149,7 +149,7 @@ public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, strin return new PostgresSqlCommandInfo( commandText: $@" - CREATE INDEX ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});" + CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : "")} ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});" ); } @@ -281,11 +281,6 @@ public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string t { NpgsqlDbType? keyType = PostgresVectorStoreRecordPropertyMapping.GetNpgsqlDbType(typeof(TKey)) ?? throw new ArgumentException($"Unsupported key type {typeof(TKey).Name}"); - if (keys == null || keys.Count == 0) - { - throw new ArgumentException("Keys cannot be null or empty", nameof(keys)); - } - var keyProperty = properties.OfType().FirstOrDefault() ?? throw new ArgumentException("Properties must contain a key property", nameof(properties)); var keyColumn = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; @@ -327,10 +322,6 @@ DELETE FROM {schema}."{tableName}" public PostgresSqlCommandInfo BuildDeleteBatchCommand(string schema, string tableName, string keyColumn, List keys) { NpgsqlDbType? keyType = PostgresVectorStoreRecordPropertyMapping.GetNpgsqlDbType(typeof(TKey)) ?? throw new ArgumentException($"Unsupported key type {typeof(TKey).Name}"); - if (keys == null || keys.Count == 0) - { - throw new ArgumentException("Keys cannot be null or empty", nameof(keys)); - } for (int i = 0; i < keys.Count; i++) { @@ -383,7 +374,7 @@ public PostgresSqlCommandInfo BuildGetNearestMatchCommand( { (not null, not null) => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, propertyReader.RecordDefinition.Properties, legacyFilter, startParamIndex: 2), - (null, not null) => new PostgresFilterTranslator().Translate(propertyReader.StoragePropertyNamesMap, newFilter, startParamIndex: 2), + (null, not null) => GenerateNewFilterWhereClause(propertyReader, newFilter), _ => (Clause: string.Empty, Parameters: []) }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete @@ -424,6 +415,14 @@ ORDER BY {PostgresConstants.DistanceColumnName} Parameters = [new NpgsqlParameter { Value = vectorValue }, .. parameters.Select(p => new NpgsqlParameter { Value = p })] }; } + + internal static (string Clause, List Parameters) GenerateNewFilterWhereClause(VectorStoreRecordPropertyReader propertyReader, LambdaExpression newFilter) + { + PostgresFilterTranslator translator = new(propertyReader.StoragePropertyNamesMap, newFilter, startParamIndex: 2); + translator.Translate(appendWhere: true); + return (translator.Clause.ToString(), translator.ParameterValues); + } + #pragma warning disable CS0618 // VectorSearchFilter is obsolete internal static (string Clause, List Parameters) GenerateLegacyFilterWhereClause(string schema, string tableName, IReadOnlyList properties, VectorSearchFilter legacyFilter, int startParamIndex) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index b97b24708b25..07c228540038 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -74,7 +74,7 @@ public async Task CreateTableAsync(string tableName, IReadOnlyList - this._sqlBuilder.BuildCreateVectorIndexCommand(this._schema, tableName, index.column, index.kind, index.function) + this._sqlBuilder.BuildCreateVectorIndexCommand(this._schema, tableName, index.column, index.kind, index.function, ifNotExists) ); // Execute the commands in a transaction. @@ -152,11 +152,19 @@ public async Task UpsertBatchAsync(string tableName, IEnumerable> GetBatchAsync(string tableName, IEnumerable keys, IReadOnlyList properties, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) where TKey : notnull { + Verify.NotNull(keys); + + List listOfKeys = keys.ToList(); + if (listOfKeys.Count == 0) + { + yield break; + } + NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetBatchCommand(this._schema, tableName, properties, keys.ToList(), includeVectors); + var commandInfo = this._sqlBuilder.BuildGetBatchCommand(this._schema, tableName, properties, listOfKeys, includeVectors); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) @@ -198,7 +206,13 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key /// public async Task DeleteBatchAsync(string tableName, string keyColumn, IEnumerable keys, CancellationToken cancellationToken = default) { - var commandInfo = this._sqlBuilder.BuildDeleteBatchCommand(this._schema, tableName, keyColumn, keys.ToList()); + var listOfKeys = keys.ToList(); + if (listOfKeys.Count == 0) + { + return; + } + + var commandInfo = this._sqlBuilder.BuildDeleteBatchCommand(this._schema, tableName, keyColumn, listOfKeys); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 6a87d2454179..5db73b801275 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -168,6 +168,8 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancella /// public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { + Verify.NotNull(records); + const string OperationName = "UpsertBatch"; var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( @@ -176,6 +178,11 @@ public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable this._mapper.MapFromDataToStorageModel(record))).ToList(); + if (storageModels.Count == 0) + { + yield break; + } + var keys = storageModels.Select(model => model[this._propertyReader.KeyPropertyStoragePropertyName]!).ToList(); await this.RunOperationAsync(OperationName, () => @@ -243,6 +250,8 @@ public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = /// public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { + Verify.NotNull(keys); + const string OperationName = "DeleteBatch"; return this.RunOperationAsync(OperationName, () => this._client.DeleteBatchAsync(this.CollectionName, this._propertyReader.KeyPropertyStoragePropertyName, keys, cancellationToken) @@ -319,7 +328,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { await operation.Invoke().ConfigureAwait(false); } - catch (Exception ex) + catch (Exception ex) when (ex is not NotSupportedException) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { @@ -336,7 +345,7 @@ private async Task RunOperationAsync(string operationName, Func> o { return await operation.Invoke().ConfigureAwait(false); } - catch (Exception ex) + catch (Exception ex) when (ex is not NotSupportedException) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj index ba73f9641bd9..045fd37fc3cf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.SqlServer $(AssemblyName) - netstandard2.0 + netstandard2.0;net8.0 alpha @@ -18,6 +18,10 @@ SQL Server connector for Semantic Kernel plugins and semantic memory + + + + @@ -26,4 +30,8 @@ + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs new file mode 100644 index 000000000000..452887ea7dd1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +#pragma warning disable CA1068 // CancellationToken parameters must come last + +internal static class ExceptionWrapper +{ + private const string VectorStoreType = "SqlServer"; + + internal static async Task WrapAsync( + SqlConnection connection, + SqlCommand command, + Func> func, + CancellationToken cancellationToken, + string operationName, + string? collectionName = null) + { + if (connection.State != System.Data.ConnectionState.Open) + { + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + } + + try + { + return await func(command, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException(ex.Message, ex) + { + OperationName = operationName, + VectorStoreType = VectorStoreType, + CollectionName = collectionName + }; + } + } + + internal static async Task WrapReadAsync( + SqlDataReader reader, + CancellationToken cancellationToken, + string operationName, + string? collectionName = null) + { + try + { + return await reader.ReadAsync(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException(ex.Message, ex) + { + OperationName = operationName, + VectorStoreType = VectorStoreType, + CollectionName = collectionName + }; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs new file mode 100644 index 000000000000..ff9c7851f4cb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +internal sealed class GenericRecordMapper : IVectorStoreRecordMapper, IDictionary> + where TKey : notnull +{ + private readonly VectorStoreRecordPropertyReader _propertyReader; + + internal GenericRecordMapper(VectorStoreRecordPropertyReader propertyReader) => this._propertyReader = propertyReader; + + public IDictionary MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + Dictionary properties = new() + { + { SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty), dataModel.Key } + }; + + foreach (var property in this._propertyReader.DataProperties) + { + string name = SqlServerCommandBuilder.GetColumnName(property); + if (dataModel.Data.TryGetValue(name, out var dataValue)) + { + properties.Add(name, dataValue); + } + } + + // Add vector properties + if (dataModel.Vectors is not null) + { + foreach (var property in this._propertyReader.VectorProperties) + { + string name = SqlServerCommandBuilder.GetColumnName(property); + if (dataModel.Vectors.TryGetValue(name, out var vectorValue)) + { + if (vectorValue is ReadOnlyMemory floats) + { + properties.Add(name, floats); + } + else if (vectorValue is not null) + { + throw new VectorStoreRecordMappingException($"Vector property '{name}' contained value of non supported type: '{vectorValue.GetType().FullName}'."); + } + } + } + } + + return properties; + } + + public VectorStoreGenericDataModel MapFromStorageToDataModel(IDictionary storageModel, StorageToDataModelMapperOptions options) + { + TKey key; + var dataProperties = new Dictionary(); + var vectorProperties = new Dictionary(); + + if (storageModel.TryGetValue(SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty), out var keyObject) && keyObject is not null) + { + key = (TKey)keyObject; + } + else + { + throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + } + + foreach (var property in this._propertyReader.DataProperties) + { + string name = SqlServerCommandBuilder.GetColumnName(property); + if (storageModel.TryGetValue(name, out var dataValue)) + { + dataProperties.Add(name, dataValue); + } + } + + if (options.IncludeVectors) + { + foreach (var property in this._propertyReader.VectorProperties) + { + string name = SqlServerCommandBuilder.GetColumnName(property); + if (storageModel.TryGetValue(name, out var vectorValue)) + { + vectorProperties.Add(name, vectorValue); + } + } + } + + return new(key) { Data = dataProperties, Vectors = vectorProperties }; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs new file mode 100644 index 000000000000..240f2814e044 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Reflection; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +internal sealed class RecordMapper : IVectorStoreRecordMapper> +{ + private readonly VectorStoreRecordPropertyReader _propertyReader; + + internal RecordMapper(VectorStoreRecordPropertyReader propertyReader) => this._propertyReader = propertyReader; + + public IDictionary MapFromDataToStorageModel(TRecord dataModel) + { + Dictionary map = new(StringComparer.Ordinal); + + map[SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty)] = this._propertyReader.KeyPropertyInfo.GetValue(dataModel); + + var dataProperties = this._propertyReader.DataProperties; + var dataPropertiesInfo = this._propertyReader.DataPropertiesInfo; + for (int i = 0; i < dataProperties.Count; i++) + { + object? value = dataPropertiesInfo[i].GetValue(dataModel); + map[SqlServerCommandBuilder.GetColumnName(dataProperties[i])] = value; + } + var vectorProperties = this._propertyReader.VectorProperties; + var vectorPropertiesInfo = this._propertyReader.VectorPropertiesInfo; + for (int i = 0; i < vectorProperties.Count; i++) + { + // We restrict the vector properties to ReadOnlyMemory so the cast here is safe. + ReadOnlyMemory floats = (ReadOnlyMemory)vectorPropertiesInfo[i].GetValue(dataModel)!; + map[SqlServerCommandBuilder.GetColumnName(vectorProperties[i])] = floats; + } + + return map; + } + + public TRecord MapFromStorageToDataModel(IDictionary storageModel, StorageToDataModelMapperOptions options) + { + TRecord record = Activator.CreateInstance()!; + SetValue(storageModel, record, this._propertyReader.KeyPropertyInfo, this._propertyReader.KeyProperty); + var data = this._propertyReader.DataProperties; + var dataInfo = this._propertyReader.DataPropertiesInfo; + for (int i = 0; i < data.Count; i++) + { + SetValue(storageModel, record, dataInfo[i], data[i]); + } + + if (options.IncludeVectors) + { + var vector = this._propertyReader.VectorProperties; + var vectorInfo = this._propertyReader.VectorPropertiesInfo; + for (int i = 0; i < vector.Count; i++) + { + object? value = storageModel[SqlServerCommandBuilder.GetColumnName(vector[i])]; + if (value is not null) + { + if (value is ReadOnlyMemory floats) + { + vectorInfo[i].SetValue(record, floats); + } + else + { + // When deserializing a string to a ReadOnlyMemory fails in SqlDataReaderDictionary, + // we store the raw value so the user can handle the error in a custom mapper. + throw new VectorStoreRecordMappingException($"Failed to deserialize vector property '{vector[i].DataModelPropertyName}', it contained value '{value}'."); + } + } + } + } + + return record; + + static void SetValue(IDictionary storageModel, object record, PropertyInfo propertyInfo, VectorStoreRecordProperty property) + { + // If we got here, there should be no column name mismatch (the query would fail). + object? value = storageModel[SqlServerCommandBuilder.GetColumnName(property)]; + + if (value is null) + { + // There is no need to call the reflection to set the null, + // as it's the default value of every .NET reference type field. + return; + } + + try + { + propertyInfo.SetValue(record, value); + } + catch (Exception ex) + { + throw new VectorStoreRecordMappingException($"Failed to set value '{value}' on property '{propertyInfo.Name}' of type '{propertyInfo.PropertyType.FullName}'.", ex); + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs new file mode 100644 index 000000000000..414ff8de4afd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.Data.SqlClient; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +/// +/// This class is used to provide a dictionary-like interface to a . +/// The goal is to avoid the need of allocating a new dictionary for each row read from the database. +/// +internal sealed class SqlDataReaderDictionary : IDictionary +{ + private readonly SqlDataReader _sqlDataReader; + private readonly IReadOnlyList _vectorPropertyStoragePropertyNames; + + // This field will get instantiated lazily, only if needed by a custom mapper. + private Dictionary? _dictionary; + + internal SqlDataReaderDictionary(SqlDataReader sqlDataReader, IReadOnlyList vectorPropertyStoragePropertyNames) + { + this._sqlDataReader = sqlDataReader; + this._vectorPropertyStoragePropertyNames = vectorPropertyStoragePropertyNames; + } + + private object? Unwrap(string storageName, object? value) + { + // Let's make sure our users don't need to learn what DBNull is. + if (value is DBNull) + { + return null; + } + + // If the value is a vector, we need to deserialize it. + if (this._vectorPropertyStoragePropertyNames.Count > 0 && value is string text) + { + for (int i = 0; i < this._vectorPropertyStoragePropertyNames.Count; i++) + { + if (string.Equals(storageName, this._vectorPropertyStoragePropertyNames[i], StringComparison.Ordinal)) + { + try + { + return JsonSerializer.Deserialize>(text); + } + catch (JsonException) + { + // This may fail if the user has stored a non-float array in the database + // (or serialized it in a different way). + // We need to return the raw value, so the user can handle the error in a custom mapper. + return text; + } + } + } + } + +#if NET + // The SqlClient accepts TimeOnly as parameters, but returns them as TimeSpan. + // Since we don't support TimeSpan, we can convert it back to TimeOnly. + if (value is TimeSpan timeSpan) + { + return new TimeOnly(timeSpan.Ticks); + } +#endif + + return value; + } + + // This is the only method used by the default mapper. + public object? this[string key] + { + get => this.Unwrap(key, this._sqlDataReader[key]); + set => throw new InvalidOperationException(); + } + + public ICollection Keys => this.GetDictionary().Keys; + + public ICollection Values => this.GetDictionary().Values; + + public int Count => this._sqlDataReader.FieldCount; + + public bool IsReadOnly => true; + + public void Add(string key, object? value) => throw new InvalidOperationException(); + + public void Add(KeyValuePair item) => throw new InvalidOperationException(); + + public void Clear() => throw new InvalidOperationException(); + + public bool Contains(KeyValuePair item) + => this.TryGetValue(item.Key, out var value) && Equals(value, item.Value); + + public bool ContainsKey(string key) + { + try + { + return this._sqlDataReader.GetOrdinal(key) >= 0; + } + catch (IndexOutOfRangeException) + { + return false; + } + } + + public void CopyTo(KeyValuePair[] array, int arrayIndex) + => ((ICollection>)this.GetDictionary()).CopyTo(array, arrayIndex); + + public IEnumerator> GetEnumerator() + => this.GetDictionary().GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() + => this.GetDictionary().GetEnumerator(); + + public bool Remove(string key) => throw new InvalidOperationException(); + + public bool Remove(KeyValuePair item) => throw new InvalidOperationException(); + + public bool TryGetValue(string key, out object? value) + { + try + { + value = this.Unwrap(key, this._sqlDataReader[key]); + return true; + } + catch (IndexOutOfRangeException) + { + value = default; + return false; + } + } + + private Dictionary GetDictionary() + { + if (this._dictionary is null) + { + Dictionary dictionary = new(this._sqlDataReader.FieldCount, StringComparer.Ordinal); + for (int i = 0; i < this._sqlDataReader.FieldCount; i++) + { + string name = this._sqlDataReader.GetName(i); + dictionary.Add(name, this.Unwrap(name, this._sqlDataReader[i])); + } + this._dictionary = dictionary; + } + return this._dictionary; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs index 4a1225f0a46f..42f65ef55cf8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs @@ -80,14 +80,7 @@ public async IAsyncEnumerable GetTablesAsync([EnumeratorCancellation] Ca { using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false)) { - using var cmd = this._connection.CreateCommand(); - cmd.CommandText = """ - SELECT TABLE_NAME - FROM INFORMATION_SCHEMA.TABLES - WHERE TABLE_TYPE = 'BASE TABLE' - AND TABLE_SCHEMA = @schema - """; - cmd.Parameters.AddWithValue("@schema", this._schema); + using var cmd = SqlServerCommandBuilder.SelectTableNames(this._connection, this._schema); using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { @@ -101,16 +94,7 @@ public async Task DoesTableExistsAsync(string tableName, CancellationToken { using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false)) { - using var cmd = this._connection.CreateCommand(); - cmd.CommandText = """ - SELECT TABLE_NAME - FROM INFORMATION_SCHEMA.TABLES - WHERE TABLE_TYPE = 'BASE TABLE' - AND TABLE_SCHEMA = @schema - AND TABLE_NAME = @tableName - """; - cmd.Parameters.AddWithValue("@schema", this._schema); - cmd.Parameters.AddWithValue("@tableName", tableName); + using var cmd = SqlServerCommandBuilder.SelectTableName(this._connection, this._schema, tableName); using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); return await reader.ReadAsync(cancellationToken).ConfigureAwait(false); } @@ -121,11 +105,7 @@ public async Task DeleteTableAsync(string tableName, CancellationToken cancellat { using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false)) { - using var cmd = this._connection.CreateCommand(); - var fullTableName = this.GetSanitizedFullTableName(tableName); - cmd.CommandText = $""" - DROP TABLE IF EXISTS {fullTableName} - """; + using var cmd = SqlServerCommandBuilder.DropTableIfExists(this._connection, this._schema, tableName); await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs new file mode 100644 index 000000000000..17b23d7d7477 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -0,0 +1,547 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; + +#pragma warning disable CA2100 // Review SQL queries for security vulnerabilities + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +internal static class SqlServerCommandBuilder +{ + internal static SqlCommand CreateTable( + SqlConnection connection, + string? schema, + string tableName, + bool ifNotExists, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList dataProperties, + IReadOnlyList vectorProperties) + { + StringBuilder sb = new(200); + if (ifNotExists) + { + sb.Append("IF OBJECT_ID(N'"); + sb.AppendTableName(schema, tableName); + sb.AppendLine("', N'U') IS NULL"); + } + sb.AppendLine("BEGIN"); + sb.Append("CREATE TABLE "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(" ("); + string keyColumnName = GetColumnName(keyProperty); + var keyMapping = Map(keyProperty.PropertyType); + sb.AppendFormat("[{0}] {1} NOT NULL,", keyColumnName, keyMapping.sqlName); + sb.AppendLine(); + for (int i = 0; i < dataProperties.Count; i++) + { + sb.AppendFormat("[{0}] {1},", GetColumnName(dataProperties[i]), Map(dataProperties[i].PropertyType).sqlName); + sb.AppendLine(); + } + for (int i = 0; i < vectorProperties.Count; i++) + { + sb.AppendFormat("[{0}] VECTOR({1}),", GetColumnName(vectorProperties[i]), vectorProperties[i].Dimensions); + sb.AppendLine(); + } + sb.AppendFormat("PRIMARY KEY ([{0}])", keyColumnName); + sb.AppendLine(); + sb.AppendLine(");"); // end the table definition + + foreach (var vectorProperty in vectorProperties) + { + switch (vectorProperty.IndexKind) + { + case null: + case "": + case IndexKind.Flat: + break; + default: + throw new NotSupportedException($"Index kind {vectorProperty.IndexKind} is not supported."); + } + } + sb.Append("END;"); + + return connection.CreateCommand(sb); + } + + internal static SqlCommand DropTableIfExists(SqlConnection connection, string? schema, string tableName) + { + StringBuilder sb = new(50); + sb.Append("DROP TABLE IF EXISTS "); + sb.AppendTableName(schema, tableName); + + return connection.CreateCommand(sb); + } + + internal static SqlCommand SelectTableName(SqlConnection connection, string? schema, string tableName) + { + SqlCommand command = connection.CreateCommand(); + command.CommandText = """ + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_TYPE = 'BASE TABLE' + AND (@schema is NULL or TABLE_SCHEMA = @schema) + AND TABLE_NAME = @tableName + """; + command.Parameters.AddWithValue("@schema", string.IsNullOrEmpty(schema) ? DBNull.Value : schema); + command.Parameters.AddWithValue("@tableName", tableName); // the name is not escaped by us, just provided as parameter + return command; + } + + internal static SqlCommand SelectTableNames(SqlConnection connection, string? schema) + { + SqlCommand command = connection.CreateCommand(); + command.CommandText = """ + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_TYPE = 'BASE TABLE' + AND (@schema is NULL or TABLE_SCHEMA = @schema) + """; + command.Parameters.AddWithValue("@schema", string.IsNullOrEmpty(schema) ? DBNull.Value : schema); + return command; + } + + internal static SqlCommand MergeIntoSingle( + SqlConnection connection, + string? schema, + string tableName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList properties, + IDictionary record) + { + SqlCommand command = connection.CreateCommand(); + StringBuilder sb = new(200); + sb.Append("MERGE INTO "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(" AS t"); + sb.Append("USING (VALUES ("); + int paramIndex = 0; + foreach (VectorStoreRecordProperty property in properties) + { + sb.AppendParameterName(property, ref paramIndex, out string paramName).Append(','); + command.AddParameter(property, paramName, record[GetColumnName(property)]); + } + sb[sb.Length - 1] = ')'; // replace the last comma with a closing parenthesis + sb.Append(") AS s ("); + sb.AppendColumnNames(properties); + sb.AppendLine(")"); + sb.AppendFormat("ON (t.[{0}] = s.[{0}])", GetColumnName(keyProperty)).AppendLine(); + sb.AppendLine("WHEN MATCHED THEN"); + sb.Append("UPDATE SET "); + foreach (VectorStoreRecordProperty property in properties) + { + if (property != keyProperty) // don't update the key + { + sb.AppendFormat("t.[{0}] = s.[{0}],", GetColumnName(property)); + } + } + --sb.Length; // remove the last comma + sb.AppendLine(); + + sb.Append("WHEN NOT MATCHED THEN"); + sb.AppendLine(); + sb.Append("INSERT ("); + sb.AppendColumnNames(properties); + sb.AppendLine(")"); + sb.Append("VALUES ("); + sb.AppendColumnNames(properties, prefix: "s."); + sb.AppendLine(")"); + sb.AppendFormat("OUTPUT inserted.[{0}];", GetColumnName(keyProperty)); + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand? MergeIntoMany( + SqlConnection connection, + string? schema, + string tableName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList properties, + IEnumerable> records) + { + SqlCommand command = connection.CreateCommand(); + + StringBuilder sb = new(200); + // The DECLARE statement creates a table variable to store the keys of the inserted rows. + sb.AppendFormat("DECLARE @InsertedKeys TABLE (KeyColumn {0});", Map(keyProperty.PropertyType).sqlName); + sb.AppendLine(); + // The MERGE statement performs the upsert operation and outputs the keys of the inserted rows into the table variable. + sb.Append("MERGE INTO "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(" AS t"); // t stands for target + sb.AppendLine("USING (VALUES"); + int rowIndex = 0, paramIndex = 0; + foreach (var record in records) + { + sb.Append('('); + foreach (VectorStoreRecordProperty property in properties) + { + sb.AppendParameterName(property, ref paramIndex, out string paramName).Append(','); + command.AddParameter(property, paramName, record[GetColumnName(property)]); + } + sb[sb.Length - 1] = ')'; // replace the last comma with a closing parenthesis + sb.AppendLine(","); + rowIndex++; + } + + if (rowIndex == 0) + { + return null; // there is nothing to do! + } + + sb.Length -= (1 + Environment.NewLine.Length); // remove the last comma and newline + + sb.Append(") AS s ("); // s stands for source + sb.AppendColumnNames(properties); + sb.AppendLine(")"); + sb.AppendFormat("ON (t.[{0}] = s.[{0}])", GetColumnName(keyProperty)).AppendLine(); + sb.AppendLine("WHEN MATCHED THEN"); + sb.Append("UPDATE SET "); + foreach (VectorStoreRecordProperty property in properties) + { + if (property != keyProperty) // don't update the key + { + sb.AppendFormat("t.[{0}] = s.[{0}],", GetColumnName(property)); + } + } + --sb.Length; // remove the last comma + sb.AppendLine(); + sb.Append("WHEN NOT MATCHED THEN"); + sb.AppendLine(); + sb.Append("INSERT ("); + sb.AppendColumnNames(properties); + sb.AppendLine(")"); + sb.Append("VALUES ("); + sb.AppendColumnNames(properties, prefix: "s."); + sb.AppendLine(")"); + sb.AppendFormat("OUTPUT inserted.[{0}] INTO @InsertedKeys (KeyColumn);", GetColumnName(keyProperty)); + sb.AppendLine(); + + // The SELECT statement returns the keys of the inserted rows. + sb.Append("SELECT KeyColumn FROM @InsertedKeys;"); + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand DeleteSingle( + SqlConnection connection, string? schema, string tableName, + VectorStoreRecordKeyProperty keyProperty, object key) + { + SqlCommand command = connection.CreateCommand(); + + int paramIndex = 0; + StringBuilder sb = new(100); + sb.Append("DELETE FROM "); + sb.AppendTableName(schema, tableName); + sb.AppendFormat(" WHERE [{0}] = ", GetColumnName(keyProperty)); + sb.AppendParameterName(keyProperty, ref paramIndex, out string keyParamName); + command.AddParameter(keyProperty, keyParamName, key); + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand? DeleteMany( + SqlConnection connection, string? schema, string tableName, + VectorStoreRecordKeyProperty keyProperty, IEnumerable keys) + { + SqlCommand command = connection.CreateCommand(); + + StringBuilder sb = new(100); + sb.Append("DELETE FROM "); + sb.AppendTableName(schema, tableName); + sb.AppendFormat(" WHERE [{0}] IN (", GetColumnName(keyProperty)); + sb.AppendKeyParameterList(keys, command, keyProperty, out bool emptyKeys); + sb.Append(')'); // close the IN clause + + if (emptyKeys) + { + return null; // there is nothing to do! + } + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand SelectSingle( + SqlConnection sqlConnection, string? schema, string collectionName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList properties, + object key, + bool includeVectors) + { + SqlCommand command = sqlConnection.CreateCommand(); + + int paramIndex = 0; + StringBuilder sb = new(200); + sb.AppendFormat("SELECT "); + sb.AppendColumnNames(properties, includeVectors: includeVectors); + sb.AppendLine(); + sb.Append("FROM "); + sb.AppendTableName(schema, collectionName); + sb.AppendLine(); + sb.AppendFormat("WHERE [{0}] = ", GetColumnName(keyProperty)); + sb.AppendParameterName(keyProperty, ref paramIndex, out string keyParamName); + command.AddParameter(keyProperty, keyParamName, key); + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand? SelectMany( + SqlConnection connection, string? schema, string tableName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList properties, + IEnumerable keys, + bool includeVectors) + { + SqlCommand command = connection.CreateCommand(); + + StringBuilder sb = new(200); + sb.AppendFormat("SELECT "); + sb.AppendColumnNames(properties, includeVectors: includeVectors); + sb.AppendLine(); + sb.Append("FROM "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(); + sb.AppendFormat("WHERE [{0}] IN (", GetColumnName(keyProperty)); + sb.AppendKeyParameterList(keys, command, keyProperty, out bool emptyKeys); + sb.Append(')'); // close the IN clause + + if (emptyKeys) + { + return null; // there is nothing to do! + } + + command.CommandText = sb.ToString(); + return command; + } + + internal static SqlCommand SelectVector( + SqlConnection connection, string? schema, string tableName, + VectorStoreRecordVectorProperty vectorProperty, + IReadOnlyList properties, + IReadOnlyDictionary storagePropertyNamesMap, + VectorSearchOptions options, + ReadOnlyMemory vector) + { + string distanceFunction = vectorProperty.DistanceFunction ?? DistanceFunction.CosineDistance; + (string distanceMetric, string sorting) = MapDistanceFunction(distanceFunction); + + SqlCommand command = connection.CreateCommand(); + command.Parameters.AddWithValue("@vector", JsonSerializer.Serialize(vector)); + + StringBuilder sb = new(200); + sb.AppendFormat("SELECT "); + sb.AppendColumnNames(properties, includeVectors: options.IncludeVectors); + sb.AppendLine(","); + sb.AppendFormat("VECTOR_DISTANCE('{0}', {1}, CAST(@vector AS VECTOR({2}))) AS [score]", + distanceMetric, GetColumnName(vectorProperty), vector.Length); + sb.AppendLine(); + sb.Append("FROM "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(); + if (options.Filter is not null) + { + int startParamIndex = command.Parameters.Count; + + SqlServerFilterTranslator translator = new(storagePropertyNamesMap, options.Filter, sb, startParamIndex: startParamIndex); + translator.Translate(appendWhere: true); + List parameters = translator.ParameterValues; + + foreach (object parameter in parameters) + { + command.AddParameter(vectorProperty, $"@_{startParamIndex++}", parameter); + } + sb.AppendLine(); + } + sb.AppendFormat("ORDER BY [score] {0}", sorting); + sb.AppendLine(); + // Negative Skip and Top values are rejected by the VectorSearchOptions property setters. + // 0 is a legal value for OFFSET. + sb.AppendFormat("OFFSET {0} ROWS FETCH NEXT {1} ROWS ONLY;", options.Skip, options.Top); + + command.CommandText = sb.ToString(); + return command; + } + + internal static string GetColumnName(VectorStoreRecordProperty property) + => property.StoragePropertyName ?? property.DataModelPropertyName; + + internal static StringBuilder AppendParameterName(this StringBuilder sb, VectorStoreRecordProperty property, ref int paramIndex, out string parameterName) + { + // In SQL Server, parameter names cannot be just a number like "@1". + // Parameter names must start with an alphabetic character or an underscore + // and can be followed by alphanumeric characters or underscores. + // Since we can't guarantee that the value returned by StoragePropertyName and DataModelPropertyName + // is valid parameter name (it can contain whitespaces, or start with a number), + // we just append the ASCII letters, stop on the first non-ASCII letter + // and append the index. + string columnName = GetColumnName(property); + int index = sb.Length; + sb.Append('@'); + foreach (char character in columnName) + { + // We don't call APIs like char.IsWhitespace as they are expensive + // as they need to handle all Unicode characters. + if (!((character is >= 'a' and <= 'z') || (character is >= 'A' and <= 'Z'))) + { + break; + } + sb.Append(character); + } + // In case the column name is empty or does not start with ASCII letters, + // we provide the underscore as a prefix (allowed). + sb.Append('_'); + // To ensure the generated parameter id is unique, we append the index. + sb.Append(paramIndex++); + parameterName = sb.ToString(index, sb.Length - index); + + return sb; + } + + internal static StringBuilder AppendTableName(this StringBuilder sb, string? schema, string tableName) + { + // If the column name contains a ], then escape it by doubling it. + // "Name with [brackets]" becomes [Name with [brackets]]]. + + sb.Append('['); + int index = sb.Length; // store the index, so we replace ] only for the appended part + + if (!string.IsNullOrEmpty(schema)) + { + sb.Append(schema); + sb.Replace("]", "]]", index, schema.Length); // replace the ] for schema + sb.Append("].["); + index = sb.Length; + } + + sb.Append(tableName); + sb.Replace("]", "]]", index, tableName.Length); + sb.Append(']'); + + return sb; + } + + private static StringBuilder AppendColumnNames(this StringBuilder sb, + IEnumerable properties, + string? prefix = null, + bool includeVectors = true) + { + bool any = false; + foreach (VectorStoreRecordProperty property in properties) + { + if (!includeVectors && property is VectorStoreRecordVectorProperty) + { + continue; + } + + if (prefix is not null) + { + sb.Append(prefix); + } + // Use square brackets to escape column names. + sb.AppendFormat("[{0}],", GetColumnName(property)); + any = true; + } + + if (any) + { + --sb.Length; // remove the last comma + } + + return sb; + } + + private static StringBuilder AppendKeyParameterList(this StringBuilder sb, + IEnumerable keys, SqlCommand command, VectorStoreRecordKeyProperty keyProperty, out bool emptyKeys) + { + int keyIndex = 0; + foreach (TKey key in keys) + { + // The caller ensures that keys collection is not null. + // We need to ensure that none of the keys is null. + Verify.NotNull(key); + + sb.AppendParameterName(keyProperty, ref keyIndex, out string keyParamName); + sb.Append(','); + command.AddParameter(keyProperty, keyParamName, key); + } + + emptyKeys = keyIndex == 0; + sb.Length--; // remove the last comma + return sb; + } + + private static SqlCommand CreateCommand(this SqlConnection connection, StringBuilder sb) + { + SqlCommand command = connection.CreateCommand(); + command.CommandText = sb.ToString(); + return command; + } + + private static void AddParameter(this SqlCommand command, VectorStoreRecordProperty property, string name, object? value) + { + switch (value) + { + case null when property.PropertyType == typeof(byte[]): + command.Parameters.Add(name, System.Data.SqlDbType.VarBinary).Value = DBNull.Value; + break; + case null: + case ReadOnlyMemory vector when vector.Length == 0: + command.Parameters.AddWithValue(name, DBNull.Value); + break; + case byte[] buffer: + command.Parameters.Add(name, System.Data.SqlDbType.VarBinary).Value = buffer; + break; + case ReadOnlyMemory vector: + command.Parameters.AddWithValue(name, JsonSerializer.Serialize(vector)); + break; + default: + command.Parameters.AddWithValue(name, value); + break; + } + } + + private static (string sqlName, string? autoGenerate) Map(Type type) + { + return type switch + { + Type t when t == typeof(byte) => ("TINYINT", null), + Type t when t == typeof(short) => ("SMALLINT", null), + Type t when t == typeof(int) => ("INT", "IDENTITY(1,1)"), + Type t when t == typeof(long) => ("BIGINT", "IDENTITY(1,1)"), + Type t when t == typeof(Guid) => ("UNIQUEIDENTIFIER", "DEFAULT NEWSEQUENTIALID()"), + Type t when t == typeof(string) => ("NVARCHAR(255)", null), + Type t when t == typeof(byte[]) => ("VARBINARY(MAX)", null), + Type t when t == typeof(bool) => ("BIT", null), + Type t when t == typeof(DateTime) => ("DATETIME2", null), +#if NET + Type t when t == typeof(TimeOnly) => ("TIME", null), +#endif + Type t when t == typeof(decimal) => ("DECIMAL", null), + Type t when t == typeof(double) => ("FLOAT", null), + Type t when t == typeof(float) => ("REAL", null), + _ => throw new NotSupportedException($"Type {type} is not supported.") + }; + } + + // Source: https://learn.microsoft.com/sql/t-sql/functions/vector-distance-transact-sql + private static (string distanceMetric, string sorting) MapDistanceFunction(string name) => name switch + { + // A value of 0 indicates that the vectors are identical in direction (cosine similarity of 1), + // while a value of 1 indicates that the vectors are orthogonal (cosine similarity of 0). + DistanceFunction.CosineDistance => ("COSINE", "ASC"), + // A value of 0 indicates that the vectors are identical, while larger values indicate greater dissimilarity. + DistanceFunction.EuclideanDistance => ("EUCLIDEAN", "ASC"), + // A value closer to 0 indicates higher similarity, while more negative values indicate greater dissimilarity. + DistanceFunction.NegativeDotProductSimilarity => ("DOT", "DESC"), + _ => throw new NotSupportedException($"Distance function {name} is not supported.") + }; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs new file mode 100644 index 000000000000..6b81cbac1ef6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +internal static class SqlServerConstants +{ + internal static readonly HashSet SupportedKeyTypes = + [ + typeof(int), // INT + typeof(long), // BIGINT + typeof(string), // VARCHAR + typeof(Guid), // UNIQUEIDENTIFIER + typeof(DateTime), // DATETIME2 + typeof(byte[]) // VARBINARY + ]; + + internal static readonly HashSet SupportedDataTypes = + [ + typeof(int), // INT + typeof(short), // SMALLINT + typeof(byte), // TINYINT + typeof(long), // BIGINT. + typeof(Guid), // UNIQUEIDENTIFIER. + typeof(string), // NVARCHAR + typeof(byte[]), // VARBINARY + typeof(bool), // BIT + typeof(DateTime), // DATETIME2 +#if NET + // We don't support mapping TimeSpan to TIME on purpose + // See https://github.com/microsoft/semantic-kernel/pull/10623#discussion_r1980350721 + typeof(TimeOnly), // TIME +#endif + typeof(decimal), // DECIMAL + typeof(double), // FLOAT + typeof(float), // REAL + ]; + + internal static readonly HashSet SupportedVectorTypes = + [ + typeof(ReadOnlyMemory), // VECTOR + typeof(ReadOnlyMemory?) + ]; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs new file mode 100644 index 000000000000..3bd3b2f97e0b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq.Expressions; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +internal sealed class SqlServerFilterTranslator : SqlFilterTranslator +{ + private readonly List _parameterValues = new(); + private int _parameterIndex; + + internal SqlServerFilterTranslator( + IReadOnlyDictionary storagePropertyNames, + LambdaExpression lambdaExpression, + StringBuilder sql, + int startParamIndex) + : base(storagePropertyNames, lambdaExpression, sql) + { + this._parameterIndex = startParamIndex; + } + + internal List ParameterValues => this._parameterValues; + + protected override void TranslateConstant(object? value) + { + switch (value) + { + case bool boolValue: + this._sql.Append(boolValue ? "1" : "0"); + return; + case DateTime dateTime: + this._sql.AppendFormat("'{0:yyyy-MM-dd HH:mm:ss}'", dateTime); + return; + case DateTimeOffset dateTimeOffset: + this._sql.AppendFormat("'{0:yyy-MM-dd HH:mm:ss zzz}'", dateTimeOffset); + return; + default: + base.TranslateConstant(value); + break; + } + } + + protected override void TranslateColumn(string column, MemberExpression memberExpression, Expression? parent) + { + // "SELECT * FROM MyTable WHERE BooleanColumn;" is not supported. + // "SELECT * FROM MyTable WHERE BooleanColumn = 1;" is supported. + if (memberExpression.Type == typeof(bool) + && (parent is null // Where(x => x.Bool) + || parent is UnaryExpression { NodeType: ExpressionType.Not } // Where(x => !x.Bool) + || parent is BinaryExpression { NodeType: ExpressionType.AndAlso or ExpressionType.OrElse })) // Where(x => x.Bool && other) + { + this.TranslateBinary(Expression.Equal(memberExpression, Expression.Constant(true))); + } + else + { + this._sql.Append('[').Append(column).Append(']'); + } + } + + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) + => throw new NotSupportedException("Unsupported Contains expression"); + + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) + { + if (value is not IEnumerable elements) + { + throw new NotSupportedException("Unsupported Contains expression"); + } + + this.Translate(item, parent); + this._sql.Append(" IN ("); + + var isFirst = true; + foreach (var element in elements) + { + if (isFirst) + { + isFirst = false; + } + else + { + this._sql.Append(", "); + } + + this.TranslateConstant(element); + } + + this._sql.Append(')'); + } + + protected override void TranslateCapturedVariable(string name, object? capturedValue) + { + // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, + // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) + if (capturedValue is null) + { + this._sql.Append("NULL"); + } + else + { + this._parameterValues.Add(capturedValue); + // SQL Server parameters can't start with a digit (but underscore is OK). + this._sql.Append("@_").Append(this._parameterIndex++); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs new file mode 100644 index 000000000000..754f4380160c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +/// +/// An implementation of backed by a SQL Server or Azure SQL database. +/// +public sealed class SqlServerVectorStore : IVectorStore, IDisposable +{ + private readonly SqlConnection _connection; + private readonly SqlServerVectorStoreOptions _options; + + /// + /// Initializes a new instance of the class. + /// + /// Database connection. + /// Optional configuration options. + public SqlServerVectorStore(SqlConnection connection, SqlServerVectorStoreOptions? options = null) + { + this._connection = connection; + // We need to create a copy, so any changes made to the option bag after + // the ctor call do not affect this instance. + this._options = options is not null + ? new() { Schema = options.Schema } + : SqlServerVectorStoreOptions.Defaults; + } + + /// + public void Dispose() => this._connection.Dispose(); + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + { + Verify.NotNull(name); + + return new SqlServerVectorStoreRecordCollection( + this._connection, + name, + new() + { + Schema = this._options.Schema, + RecordDefinition = vectorStoreRecordDefinition + }); + } + + /// + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using SqlCommand command = SqlServerCommandBuilder.SelectTableNames(this._connection, this._options.Schema); + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(this._connection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + cancellationToken, "ListCollection").ConfigureAwait(false); + + while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "ListCollection").ConfigureAwait(false)) + { + yield return reader.GetString(reader.GetOrdinal("table_name")); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs new file mode 100644 index 000000000000..a90b474a3d5f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +/// +/// Options for creating a . +/// +public sealed class SqlServerVectorStoreOptions +{ + internal static readonly SqlServerVectorStoreOptions Defaults = new(); + + /// + /// Gets or sets the database schema. + /// + public string? Schema { get; init; } = null; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..d2a87a34bf6d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -0,0 +1,369 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +/// +/// An implementation of backed by a SQL Server or Azure SQL database. +/// +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix (Collection) +public sealed class SqlServerVectorStoreRecordCollection +#pragma warning restore CA1711 + : IVectorStoreRecordCollection where TKey : notnull +{ + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + private static readonly SqlServerVectorStoreRecordCollectionOptions s_defaultOptions = new(); + + private readonly SqlConnection _sqlConnection; + private readonly SqlServerVectorStoreRecordCollectionOptions _options; + private readonly VectorStoreRecordPropertyReader _propertyReader; + private readonly IVectorStoreRecordMapper> _mapper; + + /// + /// Initializes a new instance of the class. + /// + /// Database connection. + /// The name of the collection. + /// Optional configuration options. + public SqlServerVectorStoreRecordCollection( + SqlConnection connection, + string name, + SqlServerVectorStoreRecordCollectionOptions? options = null) + { + Verify.NotNull(connection); + Verify.NotNull(name); + + VectorStoreRecordPropertyReader propertyReader = new(typeof(TRecord), + options?.RecordDefinition, + new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + }); + + if (VectorStoreRecordPropertyVerification.IsGenericDataModel(typeof(TRecord))) + { + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.Mapper is not null, SqlServerConstants.SupportedKeyTypes); + } + else + { + propertyReader.VerifyKeyProperties(SqlServerConstants.SupportedKeyTypes); + } + propertyReader.VerifyDataProperties(SqlServerConstants.SupportedDataTypes, supportEnumerable: false); + propertyReader.VerifyVectorProperties(SqlServerConstants.SupportedVectorTypes); + + this._sqlConnection = connection; + this.CollectionName = name; + // We need to create a copy, so any changes made to the option bag after + // the ctor call do not affect this instance. + this._options = options is null ? s_defaultOptions + : new() + { + Schema = options.Schema, + Mapper = options.Mapper, + RecordDefinition = options.RecordDefinition, + }; + this._propertyReader = propertyReader; + + if (options is not null && options.Mapper is not null) + { + this._mapper = options.Mapper; + } + else if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + this._mapper = (new GenericRecordMapper(propertyReader) as IVectorStoreRecordMapper>)!; + } + else + { + propertyReader.VerifyHasParameterlessConstructor(); + + this._mapper = new RecordMapper(propertyReader); + } + } + + /// + public string CollectionName { get; } + + /// + public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + using SqlCommand command = SqlServerCommandBuilder.SelectTableName( + this._sqlConnection, this._options.Schema, this.CollectionName); + + return await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static async (cmd, ct) => + { + using SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); + return await reader.ReadAsync(ct).ConfigureAwait(false); + }, cancellationToken, "CollectionExists", this.CollectionName).ConfigureAwait(false); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + => this.CreateCollectionAsync(ifNotExists: false, cancellationToken); + + /// + public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + => this.CreateCollectionAsync(ifNotExists: true, cancellationToken); + + private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken) + { + foreach (var vectorProperty in this._propertyReader.VectorProperties) + { + if (vectorProperty.Dimensions is not > 0) + { + throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); + } + } + + using SqlCommand command = SqlServerCommandBuilder.CreateTable( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + ifNotExists, + this._propertyReader.KeyProperty, + this._propertyReader.DataProperties, + this._propertyReader.VectorProperties); + + await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), + cancellationToken, "CreateCollection", this.CollectionName).ConfigureAwait(false); + } + + /// + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + using SqlCommand command = SqlServerCommandBuilder.DropTableIfExists( + this._sqlConnection, this._options.Schema, this.CollectionName); + + await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), + cancellationToken, "DeleteCollection", this.CollectionName).ConfigureAwait(false); + } + + /// + public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + { + Verify.NotNull(key); + + using SqlCommand command = SqlServerCommandBuilder.DeleteSingle( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + key); + + await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), + cancellationToken, "Delete", this.CollectionName).ConfigureAwait(false); + } + + /// + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + { + Verify.NotNull(keys); + + using SqlCommand? command = SqlServerCommandBuilder.DeleteMany( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + keys); + + if (command is null) + { + return; // keys is empty, there is nothing to delete + } + + await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), + cancellationToken, "DeleteBatch", this.CollectionName).ConfigureAwait(false); + } + + /// + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(key); + + bool includeVectors = options?.IncludeVectors is true; + + using SqlCommand command = SqlServerCommandBuilder.SelectSingle( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + key, + includeVectors); + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static async (cmd, ct) => + { + SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); + await reader.ReadAsync(ct).ConfigureAwait(false); + return reader; + }, cancellationToken, "Get", this.CollectionName).ConfigureAwait(false); + + return reader.HasRows + ? this._mapper.MapFromStorageToDataModel( + new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), + new() { IncludeVectors = includeVectors }) + : default; + } + + /// + public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(keys); + + bool includeVectors = options?.IncludeVectors is true; + + using SqlCommand? command = SqlServerCommandBuilder.SelectMany( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + keys, + includeVectors); + + if (command is null) + { + yield break; // keys is empty + } + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + + while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) + { + yield return this._mapper.MapFromStorageToDataModel( + new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), + new() { IncludeVectors = includeVectors }); + } + } + + /// + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + Verify.NotNull(record); + + using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + this._mapper.MapFromDataToStorageModel(record)); + + return await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + async static (cmd, ct) => + { + using SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); + await reader.ReadAsync(ct).ConfigureAwait(false); + return reader.GetFieldValue(0); + }, cancellationToken, "Upsert", this.CollectionName).ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(records); + + using SqlCommand? command = SqlServerCommandBuilder.MergeIntoMany( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + records.Select(record => this._mapper.MapFromDataToStorageModel(record))); + + if (command is null) + { + yield break; // records is empty + } + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + + while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) + { + yield return reader.GetFieldValue(0); + } + } + + /// + public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(vector); + + if (vector is not ReadOnlyMemory allowed) + { + throw new NotSupportedException( + $"The provided vector type {vector.GetType().FullName} is not supported by the SQL Server connector. " + + $"Supported types are: {string.Join(", ", SqlServerConstants.SupportedVectorTypes.Select(l => l.FullName))}"); + } +#pragma warning disable CS0618 // Type or member is obsolete + else if (options is not null && options.OldFilter is not null) +#pragma warning restore CS0618 // Type or member is obsolete + { + throw new NotSupportedException("The obsolete Filter is not supported by the SQL Server connector, use NewFilter instead."); + } + + var searchOptions = options ?? s_defaultVectorSearchOptions; + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + + using SqlCommand command = SqlServerCommandBuilder.SelectVector( + this._sqlConnection, + this._options.Schema, + this.CollectionName, + vectorProperty, + this._propertyReader.Properties, + this._propertyReader.StoragePropertyNamesMap, + searchOptions, + allowed); + + return await ExceptionWrapper.WrapAsync(this._sqlConnection, command, + (cmd, ct) => + { + var results = this.ReadVectorSearchResultsAsync(cmd, searchOptions.IncludeVectors, ct); + return Task.FromResult(new VectorSearchResults(results)); + }, cancellationToken, "VectorizedSearch", this.CollectionName).ConfigureAwait(false); + } + + private async IAsyncEnumerable> ReadVectorSearchResultsAsync( + SqlCommand command, + bool includeVectors, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + StorageToDataModelMapperOptions options = new() { IncludeVectors = includeVectors }; + var vectorPropertyStoragePropertyNames = includeVectors ? this._propertyReader.VectorPropertyStoragePropertyNames : []; + using SqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + + int scoreIndex = -1; + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + if (scoreIndex < 0) + { + scoreIndex = reader.GetOrdinal("score"); + } + + yield return new VectorSearchResult( + this._mapper.MapFromStorageToDataModel(new SqlDataReaderDictionary(reader, vectorPropertyStoragePropertyNames), options), + reader.GetDouble(scoreIndex)); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs new file mode 100644 index 000000000000..6b21a5e35842 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.SqlServer; + +/// +/// Options when creating a . +/// +public sealed class SqlServerVectorStoreRecordCollectionOptions +{ + /// + /// Gets or sets the database schema. + /// + public string? Schema { get; init; } + + /// + /// Gets or sets an optional custom mapper to use when converting between the data model and the SQL Server record. + /// + /// + /// If not set, the default mapper will be used. + /// + public IVectorStoreRecordMapper>? Mapper { get; init; } + + /// + /// Gets or sets an optional record definition that defines the schema of the record type. + /// + /// + /// If not provided, the schema will be inferred from the record model class using reflection. + /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. + /// See , and . + /// + public VectorStoreRecordDefinition? RecordDefinition { get; init; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj index 2b17f3e0bbe3..fec218bfc49d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj @@ -18,6 +18,10 @@ SQLite connector for Semantic Kernel plugins and semantic memory + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs index 2cb6b16fc8cd..963c1184d274 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs @@ -3,357 +3,77 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Text; namespace Microsoft.SemanticKernel.Connectors.Sqlite; -internal class SqliteFilterTranslator +internal sealed class SqliteFilterTranslator : SqlFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; - private ParameterExpression _recordParameter = null!; - private readonly Dictionary _parameters = new(); - private readonly StringBuilder _sql = new(); - - internal (string Clause, Dictionary) Translate(IReadOnlyDictionary storagePropertyNames, LambdaExpression lambdaExpression) - { - Debug.Assert(this._sql.Length == 0); - - this._storagePropertyNames = storagePropertyNames; - - Debug.Assert(lambdaExpression.Parameters.Count == 1); - this._recordParameter = lambdaExpression.Parameters[0]; - - this.Translate(lambdaExpression.Body); - return (this._sql.ToString(), this._parameters); - } - - private void Translate(Expression? node) - { - switch (node) - { - case BinaryExpression binary: - this.TranslateBinary(binary); - return; - - case ConstantExpression constant: - this.TranslateConstant(constant); - return; - - case MemberExpression member: - this.TranslateMember(member); - return; - - case MethodCallExpression methodCall: - this.TranslateMethodCall(methodCall); - return; - - case UnaryExpression unary: - this.TranslateUnary(unary); - return; - - default: - throw new NotSupportedException("Unsupported NodeType in filter: " + node?.NodeType); - } - } - - private void TranslateBinary(BinaryExpression binary) + internal SqliteFilterTranslator(IReadOnlyDictionary storagePropertyNames, + LambdaExpression lambdaExpression) : base(storagePropertyNames, lambdaExpression, sql: null) { - // Special handling for null comparisons - switch (binary.NodeType) - { - case ExpressionType.Equal when IsNull(binary.Right): - this._sql.Append('('); - this.Translate(binary.Left); - this._sql.Append(" IS NULL)"); - return; - case ExpressionType.NotEqual when IsNull(binary.Right): - this._sql.Append('('); - this.Translate(binary.Left); - this._sql.Append(" IS NOT NULL)"); - return; - - case ExpressionType.Equal when IsNull(binary.Left): - this._sql.Append('('); - this.Translate(binary.Right); - this._sql.Append(" IS NULL)"); - return; - case ExpressionType.NotEqual when IsNull(binary.Left): - this._sql.Append('('); - this.Translate(binary.Right); - this._sql.Append(" IS NOT NULL)"); - return; - } - - this._sql.Append('('); - this.Translate(binary.Left); - - this._sql.Append(binary.NodeType switch - { - ExpressionType.Equal => " = ", - ExpressionType.NotEqual => " <> ", - - ExpressionType.GreaterThan => " > ", - ExpressionType.GreaterThanOrEqual => " >= ", - ExpressionType.LessThan => " < ", - ExpressionType.LessThanOrEqual => " <= ", - - ExpressionType.AndAlso => " AND ", - ExpressionType.OrElse => " OR ", - - _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) - }); - - this.Translate(binary.Right); - this._sql.Append(')'); - - static bool IsNull(Expression expression) - => expression is ConstantExpression { Value: null } - || (TryGetCapturedValue(expression, out _, out var capturedValue) && capturedValue is null); } - private void TranslateConstant(ConstantExpression constant) - => this.GenerateLiteral(constant.Value); + internal Dictionary Parameters => this._parameters; - private void GenerateLiteral(object? value) - { - // TODO: Nullable - switch (value) - { - case byte b: - this._sql.Append(b); - return; - case short s: - this._sql.Append(s); - return; - case int i: - this._sql.Append(i); - return; - case long l: - this._sql.Append(l); - return; - - case string s: - this._sql.Append('\'').Append(s.Replace("'", "''")).Append('\''); - return; - case bool b: - this._sql.Append(b ? "TRUE" : "FALSE"); - return; - case Guid g: - this._sql.Append('\'').Append(g.ToString()).Append('\''); - return; - - case DateTime: - case DateTimeOffset: - throw new NotImplementedException(); - - case Array: - throw new NotImplementedException(); + // TODO: support Contains over array fields (#10343) + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) + => throw new NotSupportedException("Unsupported Contains expression"); - case null: - this._sql.Append("NULL"); - return; - - default: - throw new NotSupportedException("Unsupported constant type: " + value.GetType().Name); - } - } - - private void TranslateMember(MemberExpression memberExpression) + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) { - switch (memberExpression) + if (value is not IEnumerable elements) { - case var _ when this.TryGetColumn(memberExpression, out var column): - this._sql.Append('"').Append(column).Append('"'); - return; - - // Identify captured lambda variables, translate to PostgreSQL parameters ($1, $2...) - case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): - // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, - // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) - if (value is null) - { - this._sql.Append("NULL"); - } - else - { - // Duplicate parameter name, create a new parameter with a different name - // TODO: Share the same parameter when it references the same captured value - if (this._parameters.ContainsKey(name)) - { - var baseName = name; - var i = 0; - do - { - name = baseName + (i++); - } while (this._parameters.ContainsKey(name)); - } - - this._parameters.Add(name, value); - this._sql.Append('@').Append(name); - } - return; - - default: - throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + throw new NotSupportedException("Unsupported Contains expression"); } - } - - private void TranslateMethodCall(MethodCallExpression methodCall) - { - switch (methodCall) - { - // Enumerable.Contains() - case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains - when contains.Method.DeclaringType == typeof(Enumerable): - this.TranslateContains(source, item); - return; - - // List.Contains() - case - { - Method: - { - Name: nameof(Enumerable.Contains), - DeclaringType: { IsGenericType: true } declaringType - }, - Object: Expression source, - Arguments: [var item] - } when declaringType.GetGenericTypeDefinition() == typeof(List<>): - this.TranslateContains(source, item); - return; - default: - throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}"); - } - } + this.Translate(item, parent); + this._sql.Append(" IN ("); - private void TranslateContains(Expression source, Expression item) - { - switch (source) + var isFirst = true; + foreach (var element in elements) { - // TODO: support Contains over array fields (#10343) - // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryGetColumn(source, out _): - goto default; - - // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) - case NewArrayExpression newArray: + if (isFirst) { - this.Translate(item); - this._sql.Append(" IN ("); - - var isFirst = true; - foreach (var element in newArray.Expressions) - { - if (isFirst) - { - isFirst = false; - } - else - { - this._sql.Append(", "); - } - - this.Translate(element); - } - - this._sql.Append(')'); - return; + isFirst = false; } - - // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) - case var _ when TryGetCapturedValue(source, out _, out var value) && value is IEnumerable elements: + else { - this.Translate(item); - this._sql.Append(" IN ("); - - var isFirst = true; - foreach (var element in elements) - { - if (isFirst) - { - isFirst = false; - } - else - { - this._sql.Append(", "); - } - - this.GenerateLiteral(element); - } - - this._sql.Append(')'); - return; + this._sql.Append(", "); } - default: - throw new NotSupportedException("Unsupported Contains expression"); + this.TranslateConstant(element); } + + this._sql.Append(')'); } - private void TranslateUnary(UnaryExpression unary) + protected override void TranslateCapturedVariable(string name, object? capturedValue) { - switch (unary.NodeType) + // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, + // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) + if (capturedValue is null) { - case ExpressionType.Not: - // Special handling for !(a == b) and !(a != b) - if (unary.Operand is BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary) - { - this.TranslateBinary( - Expression.MakeBinary( - binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, - binary.Left, - binary.Right)); - return; - } - - this._sql.Append("(NOT "); - this.Translate(unary.Operand); - this._sql.Append(')'); - return; - - default: - throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); + this._sql.Append("NULL"); } - } - - private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) - { - if (expression is MemberExpression member && member.Expression == this._recordParameter) + else { - if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + // Duplicate parameter name, create a new parameter with a different name + // TODO: Share the same parameter when it references the same captured value + if (this._parameters.ContainsKey(name)) { - throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); + var baseName = name; + var i = 0; + do + { + name = baseName + (i++); + } while (this._parameters.ContainsKey(name)); } - return true; - } - - column = null; - return false; - } - - private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) - { - if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) - { - name = fieldInfo.Name; - value = fieldInfo.GetValue(constant.Value); - return true; + this._parameters.Add(name, capturedValue); + this._sql.Append('@').Append(name); } - - name = null; - value = null; - return false; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 6cbd1a27d474..835073cf9c59 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -204,7 +204,10 @@ public virtual Task> VectorizedSearchAsync } else if (searchOptions.Filter is not null) { - (extraWhereFilter, extraParameters) = new SqliteFilterTranslator().Translate(this._propertyReader.StoragePropertyNamesMap, searchOptions.Filter); + SqliteFilterTranslator translator = new(this._propertyReader.StoragePropertyNamesMap, searchOptions.Filter); + translator.Translate(appendWhere: false); + extraWhereFilter = translator.Clause.ToString(); + extraParameters = translator.Parameters; } #pragma warning restore CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index e1958f934c5d..60dd98f45e7a 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -76,10 +76,13 @@ public void TestBuildCreateTableCommand(bool ifNotExists) } [Theory] - [InlineData(IndexKind.Hnsw, DistanceFunction.EuclideanDistance)] - [InlineData(IndexKind.IvfFlat, DistanceFunction.DotProductSimilarity)] - [InlineData(IndexKind.Hnsw, DistanceFunction.CosineDistance)] - public void TestBuildCreateIndexCommand(string indexKind, string distanceFunction) + [InlineData(IndexKind.Hnsw, DistanceFunction.EuclideanDistance, true)] + [InlineData(IndexKind.Hnsw, DistanceFunction.EuclideanDistance, false)] + [InlineData(IndexKind.IvfFlat, DistanceFunction.DotProductSimilarity, true)] + [InlineData(IndexKind.IvfFlat, DistanceFunction.DotProductSimilarity, false)] + [InlineData(IndexKind.Hnsw, DistanceFunction.CosineDistance, true)] + [InlineData(IndexKind.Hnsw, DistanceFunction.CosineDistance, false)] + public void TestBuildCreateIndexCommand(string indexKind, string distanceFunction, bool ifNotExists) { var builder = new PostgresVectorStoreCollectionSqlBuilder(); @@ -87,15 +90,28 @@ public void TestBuildCreateIndexCommand(string indexKind, string distanceFunctio if (indexKind != IndexKind.Hnsw) { - Assert.Throws(() => builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction)); + Assert.Throws(() => builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists)); + Assert.Throws(() => builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists)); return; } - var cmdInfo = builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction); + var cmdInfo = builder.BuildCreateVectorIndexCommand("public", "1testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("CREATE INDEX ", cmdInfo.CommandText); - Assert.Contains("ON public.\"testcollection\" USING hnsw (\"embedding1\" ", cmdInfo.CommandText); + // Make sure ifNotExists is respected + if (ifNotExists) + { + Assert.Contains("CREATE INDEX IF NOT EXISTS", cmdInfo.CommandText); + } + else + { + Assert.DoesNotContain("CREATE INDEX IF NOT EXISTS", cmdInfo.CommandText); + } + // Make sure the name is escaped, so names starting with a digit are OK. + Assert.Contains($"\"1testcollection_{vectorColumn}_index\"", cmdInfo.CommandText); + + Assert.Contains("ON public.\"1testcollection\" USING hnsw (\"embedding1\" ", cmdInfo.CommandText); if (distanceFunction == null) { // Check for distance function defaults to cosine distance diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs index 4973d6e637cb..81b6f0124c30 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordKeyProperty.cs @@ -30,4 +30,9 @@ public VectorStoreRecordKeyProperty(VectorStoreRecordKeyProperty source) : base(source) { } + + /// + /// Gets or sets a value indicating whether the key should be auto-generated by the vector store. + /// + public bool AutoGenerate { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 54db3c74ca21..7976184f8ebf 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -10,6 +10,8 @@ namespace Microsoft.Extensions.VectorData; /// public class VectorSearchOptions { + private int _top = 3, _skip = 0; + /// /// Gets or sets a search filter to use before doing the vector search. /// @@ -34,12 +36,38 @@ public class VectorSearchOptions /// /// Gets or sets the maximum number of results to return. /// - public int Top { get; init; } = 3; + /// Thrown when the value is less than 1. + public int Top + { + get => this._top; + init + { + if (value < 1) + { + throw new ArgumentOutOfRangeException(nameof(value), "Top must be greater than or equal to 1."); + } + + this._top = value; + } + } /// /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return. /// - public int Skip { get; init; } = 0; + /// Thrown when the value is less than 0. + public int Skip + { + get => this._skip; + init + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Skip must be greater than or equal to 0."); + } + + this._skip = value; + } + } /// /// Gets or sets a value indicating whether to include vectors in the retrieval result. diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 06b2e839116b..6a75257b503a 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -41,6 +41,7 @@ + @@ -89,7 +90,6 @@ - diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs index b85800e6a244..08337bd0f138 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs @@ -191,6 +191,9 @@ var enumerableType when GetGenericEnumerableInterface(enumerableType) is Type en return null; } + internal static bool IsGenericDataModel(Type recordType) + => recordType.IsGenericType && recordType.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>); + /// /// Checks that if the provided is a that the key type is supported by the default mappers. /// If not supported, a custom mapper must be supplied, otherwise an exception is thrown. @@ -202,7 +205,7 @@ var enumerableType when GetGenericEnumerableInterface(enumerableType) is Type en public static void VerifyGenericDataModelKeyType(Type recordType, bool customMapperSupplied, IEnumerable allowedKeyTypes) { // If we are not dealing with a generic data model, no need to check anything else. - if (!recordType.IsGenericType || recordType.GetGenericTypeDefinition() != typeof(VectorStoreGenericDataModel<>)) + if (!IsGenericDataModel(recordType)) { return; } diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresBatchConformanceTests.cs new file mode 100644 index 000000000000..b798bab8e437 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PostgresIntegrationTests.CRUD; + +public class PostgresBatchConformanceTests(PostgresFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs new file mode 100644 index 000000000000..1a72c1b59e01 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PostgresIntegrationTests.CRUD; + +public class PostgresGenericDataModelConformanceTests(PostgresFixture fixture) + : GenericDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresFixture.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresFixture.cs new file mode 100644 index 000000000000..6c8ce87ad984 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace PostgresIntegrationTests.Support; + +public class PostgresFixture : VectorStoreFixture +{ + public override TestStore TestStore => PostgresTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests.cs new file mode 100644 index 000000000000..97767626c5cf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.VectorSearch; +using Xunit; + +namespace PostgresIntegrationTests.VectorSearch; + +public class PostgresVectorSearchDistanceFunctionComplianceTests(PostgresFixture fixture) : VectorSearchDistanceFunctionComplianceTests(fixture), IClassFixture +{ + public override Task EuclideanSquaredDistance() => Assert.ThrowsAsync(base.EuclideanSquaredDistance); + + public override Task Hamming() => Assert.ThrowsAsync(base.Hamming); + + public override Task NegativeDotProductSimilarity() => Assert.ThrowsAsync(base.NegativeDotProductSimilarity); +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests_Hnsw.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests_Hnsw.cs new file mode 100644 index 000000000000..2daf5cc958c2 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/VectorSearch/PostgresVectorSearchDistanceFunctionComplianceTests_Hnsw.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; + +namespace PostgresIntegrationTests.VectorSearch; + +public class PostgresVectorSearchDistanceFunctionComplianceTests_Hnsw(PostgresFixture fixture) : PostgresVectorSearchDistanceFunctionComplianceTests(fixture) +{ + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs new file mode 100644 index 000000000000..1e8ee17dd6f4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace SqlServerIntegrationTests.CRUD; + +public class SqlServerBatchConformanceTests(SqlServerFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs new file mode 100644 index 000000000000..5b98a7d46a11 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace SqlServerIntegrationTests.CRUD; + +public class SqlServerGenericDataModelConformanceTests(SqlServerFixture fixture) + : GenericDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs new file mode 100644 index 000000000000..3bae6cc48552 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace SqlServerIntegrationTests.Filter; + +public class SqlServerBasicFilterTests(SqlServerBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + [Fact(Skip = "Not supported")] + [Obsolete("Legacy filters are not supported")] + public override Task Legacy_And() => throw new NotSupportedException(); + + [Fact(Skip = "Not supported")] + [Obsolete("Legacy filters are not supported")] + public override Task Legacy_AnyTagEqualTo_array() => throw new NotSupportedException(); + + [Fact(Skip = "Not supported")] + [Obsolete("Legacy filters are not supported")] + public override Task Legacy_AnyTagEqualTo_List() => throw new NotSupportedException(); + + [Fact(Skip = "Not supported")] + [Obsolete("Legacy filters are not supported")] + public override Task Legacy_equality() => throw new NotSupportedException(); + + public new class Fixture : BasicFilterTests.Fixture + { + private static readonly string s_uniqueName = Guid.NewGuid().ToString(); + + public override TestStore TestStore => SqlServerTestStore.Instance; + + protected override string CollectionName => s_uniqueName; + + // Override to remove the string collection properties, which aren't (currently) supported on SqlServer + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() + }; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..8f36e2be3f06 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: SqlServerIntegrationTests.Support.SqlServerConnectionStringRequiredAttribute] diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs new file mode 100644 index 000000000000..0d421b6ba314 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -0,0 +1,367 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.SqlServer; +using Xunit; + +namespace SqlServerIntegrationTests; + +public class SqlServerCommandBuilderTests +{ + [Theory] + [InlineData("schema", "name", "[schema].[name]")] + [InlineData(null, "name", "[name]")] + [InlineData("schema", "[brackets]", "[schema].[[brackets]]]")] + [InlineData(null, "[needsEscaping]", "[[needsEscaping]]]")] + [InlineData("needs]escaping", "[brackets]", "[needs]]escaping].[[brackets]]]")] + public void AppendTableName(string? schema, string table, string expectedFullName) + { + StringBuilder result = new(); + + SqlServerCommandBuilder.AppendTableName(result, schema, table); + + Assert.Equal(expectedFullName, result.ToString()); + } + + [Theory] + [InlineData("name", "@name_")] // typical name + [InlineData("na me", "@na_")] // contains a whitespace, an illegal parameter name character + [InlineData("123", "@_")] // starts with a digit, also not allowed + [InlineData("ĄŻŚĆ_doesNotStartWithAscii", "@_")] // starts with a non-ASCII character + public void AppendParameterName(string propertyName, string expectedPrefix) + { + StringBuilder builder = new(); + StringBuilder expectedBuilder = new(); + VectorStoreRecordKeyProperty keyProperty = new(propertyName, typeof(string)); + + int paramIndex = 0; // we need a dedicated variable to ensure that AppendParameterName increments the index + for (int i = 0; i < 10; i++) + { + Assert.Equal(paramIndex, i); + SqlServerCommandBuilder.AppendParameterName(builder, keyProperty, ref paramIndex, out string parameterName); + Assert.Equal($"{expectedPrefix}{i}", parameterName); + expectedBuilder.Append(parameterName); + } + + Assert.Equal(expectedBuilder.ToString(), builder.ToString()); + } + + [Theory] + [InlineData("schema", "simpleName", "[simpleName]")] + [InlineData("schema", "[needsEscaping]", "[[needsEscaping]]]")] + public void DropTable(string schema, string table, string expectedTable) + { + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.DropTableIfExists(connection, schema, table); + + Assert.Equal($"DROP TABLE IF EXISTS [{schema}].{expectedTable}", command.CommandText); + } + + [Theory] + [InlineData("schema", "simpleName")] + [InlineData("schema", "[needsEscaping]")] + public void SelectTableName(string schema, string table) + { + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.SelectTableName(connection, schema, table); + + Assert.Equal( + """ + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_TYPE = 'BASE TABLE' + AND (@schema is NULL or TABLE_SCHEMA = @schema) + AND TABLE_NAME = @tableName + """ + , command.CommandText); + Assert.Equal(schema, command.Parameters[0].Value); + Assert.Equal(table, command.Parameters[1].Value); + } + + [Fact] + public void SelectTableNames() + { + const string SchemaName = "theSchemaName"; + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.SelectTableNames(connection, SchemaName); + + Assert.Equal( + """ + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_TYPE = 'BASE TABLE' + AND (@schema is NULL or TABLE_SCHEMA = @schema) + """ + , command.CommandText); + Assert.Equal(SchemaName, command.Parameters[0].Value); + Assert.Equal("@schema", command.Parameters[0].ParameterName); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void CreateTable(bool ifNotExists) + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordDataProperty[] dataProperties = + [ + new VectorStoreRecordDataProperty("simpleName", typeof(string)), + new VectorStoreRecordDataProperty("with space", typeof(int)) + ]; + VectorStoreRecordVectorProperty[] vectorProperties = + [ + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) + { + Dimensions = 10 + } + ]; + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.CreateTable(connection, "schema", "table", + ifNotExists, keyProperty, dataProperties, vectorProperties); + + string expectedCommand = + """ + BEGIN + CREATE TABLE [schema].[table] ( + [id] BIGINT NOT NULL, + [simpleName] NVARCHAR(255), + [with space] INT, + [embedding] VECTOR(10), + PRIMARY KEY ([id]) + ); + END; + """; + if (ifNotExists) + { + expectedCommand = "IF OBJECT_ID(N'[schema].[table]', N'U') IS NULL" + Environment.NewLine + expectedCommand; + } + + AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + } + + [Fact] + public void MergeIntoSingle() + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordProperty[] properties = + [ + keyProperty, + new VectorStoreRecordDataProperty("simpleString", typeof(string)), + new VectorStoreRecordDataProperty("simpleInt", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) + { + Dimensions = 10 + } + ]; + + using SqlConnection connection = CreateConnection(); + using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle(connection, "schema", "table", + keyProperty, properties, + new Dictionary + { + { "id", null }, + { "simpleString", "nameValue" }, + { "simpleInt", 134 }, + { "embedding", "{ 10.0 }" } + }); + + string expectedCommand = + """" + MERGE INTO [schema].[table] AS t + USING (VALUES (@id_0,@simpleString_1,@simpleInt_2,@embedding_3)) AS s ([id],[simpleString],[simpleInt],[embedding]) + ON (t.[id] = s.[id]) + WHEN MATCHED THEN + UPDATE SET t.[simpleString] = s.[simpleString],t.[simpleInt] = s.[simpleInt],t.[embedding] = s.[embedding] + WHEN NOT MATCHED THEN + INSERT ([id],[simpleString],[simpleInt],[embedding]) + VALUES (s.[id],s.[simpleString],s.[simpleInt],s.[embedding]) + OUTPUT inserted.[id]; + """"; + + AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + Assert.Equal("@id_0", command.Parameters[0].ParameterName); + Assert.Equal(DBNull.Value, command.Parameters[0].Value); + Assert.Equal("@simpleString_1", command.Parameters[1].ParameterName); + Assert.Equal("nameValue", command.Parameters[1].Value); + Assert.Equal("@simpleInt_2", command.Parameters[2].ParameterName); + Assert.Equal(134, command.Parameters[2].Value); + Assert.Equal("@embedding_3", command.Parameters[3].ParameterName); + Assert.Equal("{ 10.0 }", command.Parameters[3].Value); + } + + [Fact] + public void MergeIntoMany() + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordProperty[] properties = + [ + keyProperty, + new VectorStoreRecordDataProperty("simpleString", typeof(string)), + new VectorStoreRecordDataProperty("simpleInt", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) + { + Dimensions = 10 + } + ]; + Dictionary[] records = + [ + new Dictionary + { + { "id", 0L }, + { "simpleString", "nameValue0" }, + { "simpleInt", 134 }, + { "embedding", "{ 10.0 }" } + }, + new Dictionary + { + { "id", 1L }, + { "simpleString", "nameValue1" }, + { "simpleInt", 135 }, + { "embedding", "{ 11.0 }" } + } + ]; + + using SqlConnection connection = CreateConnection(); + using SqlCommand command = SqlServerCommandBuilder.MergeIntoMany(connection, "schema", "table", + keyProperty, properties, records)!; + + string expectedCommand = + """" + DECLARE @InsertedKeys TABLE (KeyColumn BIGINT); + MERGE INTO [schema].[table] AS t + USING (VALUES + (@id_0,@simpleString_1,@simpleInt_2,@embedding_3), + (@id_4,@simpleString_5,@simpleInt_6,@embedding_7)) AS s ([id],[simpleString],[simpleInt],[embedding]) + ON (t.[id] = s.[id]) + WHEN MATCHED THEN + UPDATE SET t.[simpleString] = s.[simpleString],t.[simpleInt] = s.[simpleInt],t.[embedding] = s.[embedding] + WHEN NOT MATCHED THEN + INSERT ([id],[simpleString],[simpleInt],[embedding]) + VALUES (s.[id],s.[simpleString],s.[simpleInt],s.[embedding]) + OUTPUT inserted.[id] INTO @InsertedKeys (KeyColumn); + SELECT KeyColumn FROM @InsertedKeys; + """"; + + AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + + for (int i = 0; i < records.Length; i++) + { + Assert.Equal($"@id_{4 * i + 0}", command.Parameters[4 * i + 0].ParameterName); + Assert.Equal((long)i, command.Parameters[4 * i + 0].Value); + Assert.Equal($"@simpleString_{4 * i + 1}", command.Parameters[4 * i + 1].ParameterName); + Assert.Equal($"nameValue{i}", command.Parameters[4 * i + 1].Value); + Assert.Equal($"@simpleInt_{4 * i + 2}", command.Parameters[4 * i + 2].ParameterName); + Assert.Equal(134 + i, command.Parameters[4 * i + 2].Value); + Assert.Equal($"@embedding_{4 * i + 3}", command.Parameters[4 * i + 3].ParameterName); + Assert.Equal($"{{ 1{i}.0 }}", command.Parameters[4 * i + 3].Value); + } + } + + [Fact] + public void DeleteSingle() + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.DeleteSingle(connection, + "schema", "tableName", keyProperty, 123L); + + Assert.Equal("DELETE FROM [schema].[tableName] WHERE [id] = @id_0", command.CommandText); + Assert.Equal(123L, command.Parameters[0].Value); + Assert.Equal("@id_0", command.Parameters[0].ParameterName); + } + + [Fact] + public void DeleteMany() + { + string[] keys = ["key1", "key2"]; + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(string)); + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.DeleteMany(connection, + "schema", "tableName", keyProperty, keys)!; + + Assert.Equal("DELETE FROM [schema].[tableName] WHERE [id] IN (@id_0,@id_1)", command.CommandText); + for (int i = 0; i < keys.Length; i++) + { + Assert.Equal(keys[i], command.Parameters[i].Value); + Assert.Equal($"@id_{i}", command.Parameters[i].ParameterName); + } + } + + [Fact] + public void SelectSingle() + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordProperty[] properties = [ + keyProperty, + new VectorStoreRecordDataProperty("name", typeof(string)), + new VectorStoreRecordDataProperty("age", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) + { + Dimensions = 10 + } + ]; + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.SelectSingle(connection, + "schema", "tableName", keyProperty, properties, 123L, includeVectors: true); + + AssertEqualIgnoreNewLines( + """"" + SELECT [id],[name],[age],[embedding] + FROM [schema].[tableName] + WHERE [id] = @id_0 + """"", command.CommandText); + Assert.Equal(123L, command.Parameters[0].Value); + Assert.Equal("@id_0", command.Parameters[0].ParameterName); + } + + [Fact] + public void SelectMany() + { + VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordProperty[] properties = [ + keyProperty, + new VectorStoreRecordDataProperty("name", typeof(string)), + new VectorStoreRecordDataProperty("age", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) + { + Dimensions = 10 + } + ]; + long[] keys = [123L, 456L, 789L]; + using SqlConnection connection = CreateConnection(); + + using SqlCommand command = SqlServerCommandBuilder.SelectMany(connection, + "schema", "tableName", keyProperty, properties, keys, includeVectors: true)!; + + AssertEqualIgnoreNewLines( + """"" + SELECT [id],[name],[age],[embedding] + FROM [schema].[tableName] + WHERE [id] IN (@id_0,@id_1,@id_2) + """"", command.CommandText); + for (int i = 0; i < keys.Length; i++) + { + Assert.Equal(keys[i], command.Parameters[i].Value); + Assert.Equal($"@id_{i}", command.Parameters[i].ParameterName); + } + } + + // This repo is configured with eol=lf, so the expected string should always use \n + // as long given IDE does not use \r\n. + // The actual string may use \r\n, so we just normalize both. + private static void AssertEqualIgnoreNewLines(string expected, string actual) + => Assert.Equal(expected.Replace("\r\n", "\n"), actual.Replace("\r\n", "\n")); + + // We create a connection using a fake connection string just to be able to create the SqlCommand. + private static SqlConnection CreateConnection() + => new("Server=localhost;Database=master;Integrated Security=True;"); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj new file mode 100644 index 000000000000..4752d82818dc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj @@ -0,0 +1,47 @@ + + + + net8.0;net472 + enable + enable + + false + true + + $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 + b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + + + Always + + + Always + + + + diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/SqlServer/SqlServerMemoryStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs similarity index 94% rename from dotnet/src/IntegrationTests/Connectors/Memory/SqlServer/SqlServerMemoryStoreTests.cs rename to dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs index 32c0f6742546..23e714ff60bd 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/SqlServer/SqlServerMemoryStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs @@ -1,9 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; using Microsoft.Data.SqlClient; using Microsoft.Extensions.Configuration; using Microsoft.SemanticKernel.Connectors.SqlServer; @@ -36,14 +32,8 @@ public async Task InitializeAsync() .AddUserSecrets() .Build(); - var connectionString = configuration["SqlServer:ConnectionString"]; - - if (string.IsNullOrWhiteSpace(connectionString)) - { - throw new ArgumentException("SqlServer memory connection string is not configured."); - } - - this._connectionString = connectionString; + this._connectionString = configuration["SqlServer:ConnectionString"] + ?? throw new ArgumentException("SqlServer memory connection string is not configured."); await this.CleanupDatabaseAsync(); await this.InitializeDatabaseAsync(); @@ -79,7 +69,7 @@ public async Task GetCollectionsAsync() await this.Store.CreateCollectionAsync("collection1"); await this.Store.CreateCollectionAsync("collection2"); - var collections = await this.Store.GetCollectionsAsync().ToListAsync(); + var collections = await this.Store.GetCollectionsAsync().ToArrayAsync(); Assert.Contains("collection1", collections); Assert.Contains("collection2", collections); } @@ -212,8 +202,8 @@ public async Task GetNearestMatchesAsync(bool withEmbeddings) await this.Store.CreateCollectionAsync(DefaultCollectionName); await this.InsertSampleDataAsync(); - List<(MemoryRecord Record, double SimilarityScore)> results = - await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, withEmbeddings: withEmbeddings).ToListAsync(); + (MemoryRecord Record, double SimilarityScore)[] results = + await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, withEmbeddings: withEmbeddings).ToArrayAsync(); Assert.All(results, t => Assert.True(t.SimilarityScore > 0)); @@ -254,13 +244,13 @@ public async Task GetNearestMatchesWithMinRelevanceScoreAsync() await this.Store.CreateCollectionAsync(DefaultCollectionName); await this.InsertSampleDataAsync(); - List<(MemoryRecord Record, double SimilarityScore)> results = - await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2).ToListAsync(); + (MemoryRecord Record, double SimilarityScore)[] results = + await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2).ToArrayAsync(); var firstId = results[0].Record.Metadata.Id; var firstSimilarityScore = results[0].SimilarityScore; - results = await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, minRelevanceScore: firstSimilarityScore + 0.0001).ToListAsync(); + results = await this.Store.GetNearestMatchesAsync(DefaultCollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, minRelevanceScore: firstSimilarityScore + 0.0001).ToArrayAsync(); Assert.DoesNotContain(firstId, results.Select(r => r.Record.Metadata.Id)); } @@ -324,18 +314,29 @@ private async Task> InsertSampleDataAsync() private async Task InitializeDatabaseAsync() { +#if NET // IAsyncDisposable is not present in Full Framework await using var connection = new SqlConnection(this._connectionString); - await connection.OpenAsync(); await using var cmd = connection.CreateCommand(); +#else + using var connection = new SqlConnection(this._connectionString); + using var cmd = connection.CreateCommand(); +#endif + + await connection.OpenAsync(); cmd.CommandText = $"CREATE SCHEMA {SchemaName}"; await cmd.ExecuteNonQueryAsync(); } private async Task CleanupDatabaseAsync() { +#if NET await using var connection = new SqlConnection(this._connectionString); - await connection.OpenAsync(); await using var cmd = connection.CreateCommand(); +#else + using var connection = new SqlConnection(this._connectionString); + using var cmd = connection.CreateCommand(); +#endif + await connection.OpenAsync(); cmd.CommandText = $""" DECLARE tables_cursor CURSOR FOR SELECT table_name diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs new file mode 100644 index 000000000000..bb658f486c87 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -0,0 +1,503 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.SqlServer; +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace SqlServerIntegrationTests; + +public class SqlServerVectorStoreTests(SqlServerFixture fixture) : IClassFixture +{ + // this test may be once executed by multiple users against a shared db instance + private static string GetUniqueCollectionName() => Guid.NewGuid().ToString(); + + [ConditionalFact] + public async Task CollectionCRUD() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + var collection = testStore.DefaultVectorStore.GetCollection(collectionName); + + try + { + Assert.False(await collection.CollectionExistsAsync()); + + Assert.False(await testStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionName)); + + await collection.CreateCollectionAsync(); + + Assert.True(await collection.CollectionExistsAsync()); + Assert.True(await testStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionName)); + + await collection.CreateCollectionIfNotExistsAsync(); + + Assert.True(await collection.CollectionExistsAsync()); + + await collection.DeleteCollectionAsync(); + + Assert.False(await collection.CollectionExistsAsync()); + Assert.False(await testStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionName)); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + [ConditionalFact] + public async Task RecordCRUD() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + var collection = testStore.DefaultVectorStore.GetCollection(collectionName); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + TestModel inserted = new() + { + Id = "MyId", + Number = 100, + Floats = Enumerable.Range(0, 10).Select(i => (float)i).ToArray() + }; + string key = await collection.UpsertAsync(inserted); + Assert.Equal(inserted.Id, key); + + TestModel? received = await collection.GetAsync(inserted.Id, new() { IncludeVectors = true }); + AssertEquality(inserted, received); + + TestModel updated = new() + { + Id = inserted.Id, + Number = inserted.Number + 200, // change one property + Floats = inserted.Floats + }; + key = await collection.UpsertAsync(updated); + Assert.Equal(inserted.Id, key); + + received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); + AssertEquality(updated, received); + + VectorSearchResult vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() + { + VectorPropertyName = nameof(TestModel.Floats), + IncludeVectors = true + })).Results.SingleAsync(); + AssertEquality(updated, vectorSearchResult.Record); + + vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() + { + VectorPropertyName = nameof(TestModel.Floats), + IncludeVectors = false + })).Results.SingleAsync(); + // Make sure the vectors are not included in the result. + Assert.Equal(0, vectorSearchResult.Record.Floats.Length); + + await collection.DeleteAsync(inserted.Id); + + Assert.Null(await collection.GetAsync(inserted.Id)); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + [ConditionalFact] + public async Task WrongModels() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + var collection = testStore.DefaultVectorStore.GetCollection(collectionName); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + TestModel inserted = new() + { + Id = "MyId", + Text = "NotAnInt", + Number = 100, + Floats = Enumerable.Range(0, 10).Select(i => (float)i).ToArray() + }; + Assert.Equal(inserted.Id, await collection.UpsertAsync(inserted)); + + // Let's use a model with different storage names to trigger an SQL exception + // which should be mapped to VectorStoreOperationException. + var differentNamesCollection = testStore.DefaultVectorStore.GetCollection(collectionName); + VectorStoreOperationException operationEx = await Assert.ThrowsAsync(() => differentNamesCollection.GetAsync(inserted.Id)); + Assert.IsType(operationEx.InnerException); + + // Let's use a model with the same storage names, but different types + // to trigger a mapping exception (casting a string to an int). + var sameNameDifferentModelCollection = testStore.DefaultVectorStore.GetCollection(collectionName); + VectorStoreRecordMappingException mappingEx = await Assert.ThrowsAsync(() => sameNameDifferentModelCollection.GetAsync(inserted.Id)); + Assert.IsType(mappingEx.InnerException); + + // Let's use a model with the same storage names, but different types + // to trigger a mapping exception (deserializing a string to Memory). + var invalidJsonCollection = testStore.DefaultVectorStore.GetCollection(collectionName); + await Assert.ThrowsAsync(() => invalidJsonCollection.GetAsync(inserted.Id, new() { IncludeVectors = true })); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + [ConditionalFact] + public async Task CustomMapper() + { + string collectionName = GetUniqueCollectionName(); + TestModelMapper mapper = new(); + SqlServerVectorStoreRecordCollectionOptions options = new() + { + Mapper = mapper + }; + using SqlConnection connection = new(SqlServerTestEnvironment.ConnectionString); + SqlServerVectorStoreRecordCollection collection = new(connection, collectionName, options); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + TestModel inserted = new() + { + Id = "MyId", + Number = 100, + Floats = Enumerable.Range(0, 10).Select(i => (float)i).ToArray() + }; + string key = await collection.UpsertAsync(inserted); + Assert.Equal(inserted.Id, key); + Assert.True(mapper.MapFromDataToStorageModel_WasCalled); + Assert.False(mapper.MapFromStorageToDataModel_WasCalled); + + TestModel? received = await collection.GetAsync(inserted.Id, new() { IncludeVectors = true }); + AssertEquality(inserted, received); + Assert.True(mapper.MapFromStorageToDataModel_WasCalled); + + TestModel updated = new() + { + Id = inserted.Id, + Number = inserted.Number + 200, // change one property + Floats = inserted.Floats + }; + key = await collection.UpsertAsync(updated); + Assert.Equal(inserted.Id, key); + + received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); + AssertEquality(updated, received); + + await collection.DeleteAsync(inserted.Id); + + Assert.Null(await collection.GetAsync(inserted.Id)); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + [ConditionalFact] + public async Task BatchCRUD() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + var collection = testStore.DefaultVectorStore.GetCollection(collectionName); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + TestModel[] inserted = Enumerable.Range(0, 10).Select(i => new TestModel() + { + Id = $"MyId{i}", + Number = 100 + i, + Floats = Enumerable.Range(0, 10).Select(j => (float)(i + j)).ToArray() + }).ToArray(); + + string[] keys = await collection.UpsertBatchAsync(inserted).ToArrayAsync(); + for (int i = 0; i < inserted.Length; i++) + { + Assert.Equal(inserted[i].Id, keys[i]); + } + + TestModel[] received = await collection.GetBatchAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); + for (int i = 0; i < inserted.Length; i++) + { + AssertEquality(inserted[i], received[i]); + } + + TestModel[] updated = inserted.Select(i => new TestModel() + { + Id = i.Id, + Number = i.Number + 200, // change one property + Floats = i.Floats + }).ToArray(); + + keys = await collection.UpsertBatchAsync(updated).ToArrayAsync(); + for (int i = 0; i < updated.Length; i++) + { + Assert.Equal(updated[i].Id, keys[i]); + } + + received = await collection.GetBatchAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); + for (int i = 0; i < updated.Length; i++) + { + AssertEquality(updated[i], received[i]); + } + + await collection.DeleteBatchAsync(keys); + + Assert.False(await collection.GetBatchAsync(keys).AnyAsync()); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + private static void AssertEquality(TestModel inserted, TestModel? received) + { + Assert.NotNull(received); + Assert.Equal(inserted.Number, received.Number); + Assert.Equal(inserted.Id, received.Id); + Assert.Equal(inserted.Floats.ToArray(), received.Floats.ToArray()); + Assert.Null(received.Text); // testing DBNull code path + } + + public sealed class TestModel + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public string? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "text")] + public string? Text { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "column")] + public int Number { get; set; } + + [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "embedding")] + public ReadOnlyMemory Floats { get; set; } + } + + public sealed class SameStorageNameButDifferentType + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public string? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "text")] + public int Number { get; set; } + } + + public sealed class SameStorageNameButInvalidVector + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public string? Id { get; set; } + + [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "text")] + public ReadOnlyMemory Floats { get; set; } + } + + public sealed class DifferentStorageNames + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public string? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "text2")] + public string? Text { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "column2")] + public int Number { get; set; } + + [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "embedding2")] + public ReadOnlyMemory Floats { get; set; } + } + +#if NETFRAMEWORK + [ConditionalFact] + public void TimeSpanIsNotSupported() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + + Assert.Throws(() => testStore.DefaultVectorStore.GetCollection(collectionName)); + } +#else + [ConditionalFact] + public async Task TimeOnlyIsSupported() + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + + var collection = testStore.DefaultVectorStore.GetCollection(collectionName); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + TimeModel inserted = new() + { + Id = "MyId", + Time = new TimeOnly(12, 34, 56) + }; + string key = await collection.UpsertAsync(inserted); + Assert.Equal(inserted.Id, key); + + TimeModel? received = await collection.GetAsync(inserted.Id, new() { IncludeVectors = true }); + Assert.NotNull(received); + Assert.Equal(inserted.Time, received.Time); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } +#endif + + public sealed class TimeModel + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public string? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "time")] +#if NETFRAMEWORK + public TimeSpan Time { get; set; } +#else + public TimeOnly Time { get; set; } +#endif + } + + [ConditionalFact] + public Task CanUseFancyModels_Int() => this.CanUseFancyModels(); + + [ConditionalFact] + public Task CanUseFancyModels_Long() => this.CanUseFancyModels(); + + [ConditionalFact] + public Task CanUseFancyModels_Guid() => this.CanUseFancyModels(); + + private async Task CanUseFancyModels() where TKey : notnull + { + string collectionName = GetUniqueCollectionName(); + var testStore = fixture.TestStore; + var collection = testStore.DefaultVectorStore.GetCollection>(collectionName); + + try + { + await collection.CreateCollectionIfNotExistsAsync(); + + FancyTestModel inserted = new() + { + Id = testStore.GenerateKey(1), + Number8 = byte.MaxValue, + Number16 = short.MaxValue, + Number32 = int.MaxValue, + Number64 = long.MaxValue, + Floats = Enumerable.Range(0, 10).Select(i => (float)i).ToArray(), + Bytes = [1, 2, 3], + }; + TKey key = await collection.UpsertAsync(inserted); + Assert.NotEqual(default, key); + + FancyTestModel? received = await collection.GetAsync(key, new() { IncludeVectors = true }); + AssertEquality(inserted, received, key); + + FancyTestModel updated = new() + { + Id = key, + Number16 = short.MinValue, // change one property + Floats = inserted.Floats + }; + key = await collection.UpsertAsync(updated); + Assert.Equal(updated.Id, key); + + received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); + AssertEquality(updated, received, key); + + await collection.DeleteAsync(key); + + Assert.Null(await collection.GetAsync(key)); + } + finally + { + await collection.DeleteCollectionAsync(); + } + + void AssertEquality(FancyTestModel expected, FancyTestModel? received, TKey expectedKey) + { + Assert.NotNull(received); + Assert.Equal(expectedKey, received.Id); + Assert.Equal(expected.Number8, received.Number8); + Assert.Equal(expected.Number16, received.Number16); + Assert.Equal(expected.Number32, received.Number32); + Assert.Equal(expected.Number64, received.Number64); + Assert.Equal(expected.Floats.ToArray(), received.Floats.ToArray()); + Assert.Equal(expected.Bytes, received.Bytes); + } + } + + public sealed class FancyTestModel + { + [VectorStoreRecordKey(StoragePropertyName = "key")] + public TKey? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "byte")] + public byte Number8 { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "short")] + public short Number16 { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "int")] + public int Number32 { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "long")] + public long Number64 { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "bytes")] +#pragma warning disable CA1819 // Properties should not return arrays + public byte[]? Bytes { get; set; } +#pragma warning restore CA1819 // Properties should not return arrays + + [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "embedding")] + public ReadOnlyMemory Floats { get; set; } + } + + private sealed class TestModelMapper : IVectorStoreRecordMapper> + { + internal bool MapFromDataToStorageModel_WasCalled { get; set; } + internal bool MapFromStorageToDataModel_WasCalled { get; set; } + + public IDictionary MapFromDataToStorageModel(TestModel dataModel) + { + this.MapFromDataToStorageModel_WasCalled = true; + + return new Dictionary() + { + { "key", dataModel.Id }, + { "text", dataModel.Text }, + { "column", dataModel.Number }, + // Please note that we are not dealing with JSON directly here. + { "embedding", dataModel.Floats } + }; + } + + public TestModel MapFromStorageToDataModel(IDictionary storageModel, StorageToDataModelMapperOptions options) + { + this.MapFromStorageToDataModel_WasCalled = true; + + return new() + { + Id = (string)storageModel["key"]!, + Text = (string?)storageModel["text"], + Number = (int)storageModel["column"]!, + Floats = (ReadOnlyMemory)storageModel["embedding"]! + }; + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerConnectionStringRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerConnectionStringRequiredAttribute.cs new file mode 100644 index 000000000000..80885df9e18c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerConnectionStringRequiredAttribute.cs @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Xunit; + +namespace SqlServerIntegrationTests.Support; + +/// +/// Checks whether the connection string for Sql Server is provided, and skips the test(s) otherwise. +/// +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Assembly)] +public sealed class SqlServerConnectionStringRequiredAttribute : Attribute, ITestCondition +{ + public ValueTask IsMetAsync() => new(SqlServerTestEnvironment.IsConnectionStringDefined); + + public string Skip { get; set; } = "ConnectionString is not configured, set SqlServer:ConnectionString."; + + public string SkipReason => this.Skip; +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerFixture.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerFixture.cs new file mode 100644 index 000000000000..dabf7b40609e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace SqlServerIntegrationTests.Support; + +public class SqlServerFixture : VectorStoreFixture +{ + public override TestStore TestStore => SqlServerTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs new file mode 100644 index 000000000000..043f4882e640 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.SqlServer; + +namespace SqlServerIntegrationTests.Support; + +internal static class SqlServerTestEnvironment +{ + public static readonly string? ConnectionString = GetConnectionString(); + + public static bool IsConnectionStringDefined => !string.IsNullOrEmpty(ConnectionString); + + private static string? GetConnectionString() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true) + .AddJsonFile(path: "testsettings.development.json", optional: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + return configuration.GetSection("SqlServer")["ConnectionString"]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs new file mode 100644 index 000000000000..93a329b2438a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.SqlServer; +using VectorDataSpecificationTests.Support; + +namespace SqlServerIntegrationTests.Support; + +public sealed class SqlServerTestStore : TestStore, IDisposable +{ + public static readonly SqlServerTestStore Instance = new(); + + public override IVectorStore DefaultVectorStore + => this._connectedStore ?? throw new InvalidOperationException("Not initialized"); + + public override string DefaultDistanceFunction => DistanceFunction.CosineDistance; + + private SqlServerVectorStore? _connectedStore; + + protected override async Task StartAsync() + { + if (string.IsNullOrWhiteSpace(SqlServerTestEnvironment.ConnectionString)) + { + throw new InvalidOperationException("Connection string is not configured, set the SqlServer:ConnectionString environment variable"); + } + +#pragma warning disable CA2000 // Dispose objects before losing scope + SqlConnection connection = new(SqlServerTestEnvironment.ConnectionString); +#pragma warning restore CA2000 // Dispose objects before losing scope + await connection.OpenAsync(); + + this._connectedStore = new(connection); + } + + public void Dispose() => this._connectedStore?.Dispose(); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests.cs new file mode 100644 index 000000000000..b1564100eb84 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.VectorSearch; +using Xunit; + +namespace SqlServerIntegrationTests.VectorSearch; + +public class SqlServerVectorSearchDistanceFunctionComplianceTests(SqlServerFixture fixture) + : VectorSearchDistanceFunctionComplianceTests(fixture), IClassFixture +{ + public override Task CosineSimilarity() => Assert.ThrowsAsync(base.CosineSimilarity); + + public override Task DotProductSimilarity() => Assert.ThrowsAsync(base.DotProductSimilarity); + + public override Task EuclideanSquaredDistance() => Assert.ThrowsAsync(base.EuclideanSquaredDistance); + + public override Task Hamming() => Assert.ThrowsAsync(base.Hamming); + + public override Task ManhattanDistance() => Assert.ThrowsAsync(base.ManhattanDistance); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests_Hnsw.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests_Hnsw.cs new file mode 100644 index 000000000000..fe771d73278f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/VectorSearch/SqlServerVectorSearchDistanceFunctionComplianceTests_Hnsw.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using Xunit; + +namespace SqlServerIntegrationTests.VectorSearch; + +public class SqlServerVectorSearchDistanceFunctionComplianceTests_Hnsw(SqlServerFixture fixture) + : SqlServerVectorSearchDistanceFunctionComplianceTests(fixture) +{ + // Creating such a collection is not supported. + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; + + public override async Task CosineDistance() + { + NotSupportedException ex = await Assert.ThrowsAsync(() => base.CosineDistance()); + Assert.Equal($"Index kind {this.IndexKind} is not supported.", ex.Message); + } + + public override async Task EuclideanDistance() + { + NotSupportedException ex = await Assert.ThrowsAsync(() => base.EuclideanDistance()); + Assert.Equal($"Index kind {this.IndexKind} is not supported.", ex.Message); + } + + public override async Task NegativeDotProductSimilarity() + { + NotSupportedException ex = await Assert.ThrowsAsync(() => base.NegativeDotProductSimilarity()); + Assert.Equal($"Index kind {this.IndexKind} is not supported.", ex.Message); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs new file mode 100644 index 000000000000..ace837591a74 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Models; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.CRUD; + +public abstract class BatchConformanceTests(VectorStoreFixture fixture) + : ConformanceTestsBase>(fixture) where TKey : notnull +{ + [ConditionalFact] + public async Task UpsertBatchAsync_EmptyBatch_DoesNotThrow() + { + await this.ExecuteAsync(async collection => + { + Assert.Empty(await collection.UpsertBatchAsync([]).ToArrayAsync()); + }); + } + + [ConditionalFact] + public async Task DeleteBatchAsync_EmptyBatch_DoesNotThrow() + { + await this.ExecuteAsync(async collection => + { + await collection.DeleteBatchAsync([]); + }); + } + + [ConditionalFact] + public async Task GetBatchAsync_EmptyBatch_DoesNotThrow() + { + await this.ExecuteAsync(async collection => + { + Assert.Empty(await collection.GetBatchAsync([]).ToArrayAsync()); + }); + } + + [ConditionalFact] + public async Task UpsertBatchAsync_NullBatch_ThrowsArgumentNullException() + { + await this.ExecuteAsync(async collection => + { + ArgumentNullException ex = await Assert.ThrowsAsync(() => collection.UpsertBatchAsync(records: null!).ToArrayAsync().AsTask()); + Assert.Equal("records", ex.ParamName); + }); + } + + [ConditionalFact] + public async Task DeleteBatchAsync_NullKeys_ThrowsArgumentNullException() + { + await this.ExecuteAsync(async collection => + { + ArgumentNullException ex = await Assert.ThrowsAsync(() => collection.DeleteBatchAsync(keys: null!)); + Assert.Equal("keys", ex.ParamName); + }); + } + + [ConditionalFact] + public async Task GetBatchAsync_NullKeys_ThrowsArgumentNullException() + { + await this.ExecuteAsync(async collection => + { + ArgumentNullException ex = await Assert.ThrowsAsync(() => collection.GetBatchAsync(keys: null!).ToArrayAsync().AsTask()); + Assert.Equal("keys", ex.ParamName); + }); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/ConformanceTestsBase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/ConformanceTestsBase.cs new file mode 100644 index 000000000000..21a6c95f8986 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/ConformanceTestsBase.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; + +namespace VectorDataSpecificationTests.CRUD; + +// TKey is a generic parameter because different connectors support different key types. +public abstract class ConformanceTestsBase(VectorStoreFixture fixture) where TKey : notnull +{ + protected VectorStoreFixture Fixture { get; } = fixture; + + protected virtual string GetUniqueCollectionName() => Guid.NewGuid().ToString(); + + protected virtual VectorStoreRecordDefinition? GetRecordDefinition() => null; + + protected async Task ExecuteAsync(Func, Task> test) + { + string collectionName = this.GetUniqueCollectionName(); + var collection = this.Fixture.TestStore.DefaultVectorStore.GetCollection(collectionName, + this.GetRecordDefinition()); + + await collection.CreateCollectionAsync(); + + try + { + await test(collection); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs new file mode 100644 index 000000000000..91ac166aafd4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.CRUD; + +public abstract class GenericDataModelConformanceTests(VectorStoreFixture fixture) + : ConformanceTestsBase>(fixture) where TKey : notnull +{ + private const string KeyPropertyName = "key"; + private const string StringPropertyName = "text"; + private const string IntegerPropertyName = "integer"; + private const string EmbeddingPropertyName = "embedding"; + private const int DimensionCount = 10; + + protected override VectorStoreRecordDefinition? GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(KeyPropertyName, typeof(TKey)), + new VectorStoreRecordDataProperty(StringPropertyName, typeof(string)), + new VectorStoreRecordDataProperty(IntegerPropertyName, typeof(int)), + new VectorStoreRecordVectorProperty(EmbeddingPropertyName, typeof(ReadOnlyMemory)) + { + Dimensions = DimensionCount + } + ] + }; + + [ConditionalFact] + public async Task CanInsertUpdateAndDelete() + { + await this.ExecuteAsync(async collection => + { + VectorStoreGenericDataModel inserted = new(key: this.Fixture.GenerateNextKey()); + inserted.Data.Add(StringPropertyName, "some"); + inserted.Data.Add(IntegerPropertyName, 123); + inserted.Vectors.Add(EmbeddingPropertyName, new ReadOnlyMemory(Enumerable.Repeat(0.1f, DimensionCount).ToArray())); + + TKey key = await collection.UpsertAsync(inserted); + Assert.Equal(inserted.Key, key); + + VectorStoreGenericDataModel? received = await collection.GetAsync(key, new() { IncludeVectors = true }); + Assert.NotNull(received); + + Assert.Equal(received.Key, key); + foreach (var pair in inserted.Data) + { + Assert.Equal(pair.Value, received.Data[pair.Key]); + } + + Assert.Equal( + ((ReadOnlyMemory)inserted.Vectors[EmbeddingPropertyName]!).ToArray(), + ((ReadOnlyMemory)received.Vectors[EmbeddingPropertyName]!).ToArray()); + + await collection.DeleteAsync(key); + + received = await collection.GetAsync(key); + Assert.Null(received); + }); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 0f87d2ae7c5d..dd03c1b1bda7 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -73,6 +73,14 @@ public virtual Task NotEqual_with_null_captured() public virtual Task Bool() => this.TestFilterAsync(r => r.Bool); + [ConditionalFact] + public virtual Task Bool_And_Bool() + => this.TestFilterAsync(r => r.Bool && r.Bool); + + [ConditionalFact] + public virtual Task Bool_Or_Not_Bool() + => this.TestFilterAsync(r => r.Bool || !r.Bool, expectAllResults: true); + #endregion Equality #region Comparison @@ -139,6 +147,10 @@ public virtual Task Not_over_Or() public virtual Task Not_over_bool() => this.TestFilterAsync(r => !r.Bool); + [ConditionalFact] + public virtual Task Not_over_bool_And_Comparison() + => this.TestFilterAsync(r => !r.Bool && r.Int != int.MaxValue); + #endregion Logical operators #region Contains diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs new file mode 100644 index 000000000000..0646f0fe2f1f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; + +namespace VectorDataSpecificationTests.Models; + +/// +/// This class represents bare minimum that each connector should support: +/// a key, int, string and an embedding. +/// +/// TKey is a generic parameter because different connectors support different key types. +public sealed class SimpleModel +{ + [VectorStoreRecordKey(StoragePropertyName = "key")] + public TKey? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "text")] + public string? Text { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "number")] + public int Number { get; set; } + + [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "embedding")] + public ReadOnlyMemory Floats { get; set; } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs new file mode 100644 index 000000000000..285c93c23e92 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.VectorSearch; + +public abstract class VectorSearchDistanceFunctionComplianceTests(VectorStoreFixture fixture) + where TKey : notnull +{ + [ConditionalFact] + public virtual Task CosineDistance() + => this.SimpleSearch(DistanceFunction.CosineDistance, 0, 2, 1, [0, 2, 1]); + + [ConditionalFact] + public virtual Task CosineSimilarity() + => this.SimpleSearch(DistanceFunction.CosineSimilarity, 1, -1, 0, [0, 2, 1]); + + [ConditionalFact] + public virtual Task DotProductSimilarity() + => this.SimpleSearch(DistanceFunction.DotProductSimilarity, 1, -1, 0, [0, 2, 1]); + + [ConditionalFact] + public virtual Task NegativeDotProductSimilarity() + => this.SimpleSearch(DistanceFunction.NegativeDotProductSimilarity, -1, 1, 0, [1, 2, 0]); + + [ConditionalFact] + public virtual Task EuclideanDistance() + => this.SimpleSearch(DistanceFunction.EuclideanDistance, 0, 2, 1.73, [0, 2, 1]); + + [ConditionalFact] + public virtual Task EuclideanSquaredDistance() + => this.SimpleSearch(DistanceFunction.EuclideanSquaredDistance, 0, 4, 3, [0, 2, 1]); + + [ConditionalFact] + public virtual Task Hamming() + => this.SimpleSearch(DistanceFunction.Hamming, 0, 1, 3, [0, 1, 2]); + + [ConditionalFact] + public virtual Task ManhattanDistance() + => this.SimpleSearch(DistanceFunction.ManhattanDistance, 0, 2, 3, [0, 1, 2]); + + protected virtual string? IndexKind => null; + + protected async Task SimpleSearch(string distanceFunction, double expectedExactMatchScore, + double expectedOppositeScore, double expectedOrthogonalScore, int[] resultOrder) + { + ReadOnlyMemory baseVector = new([1, 0, 0, 0]); + ReadOnlyMemory oppositeVector = new([-1, 0, 0, 0]); + ReadOnlyMemory orthogonalVector = new([0f, -1f, -1f, 0f]); + + double[] scoreDictionary = [expectedExactMatchScore, expectedOppositeScore, expectedOrthogonalScore]; + + List records = + [ + new() + { + Key = fixture.GenerateNextKey(), + Int = 8, + Vector = baseVector, + }, + new() + { + Key = fixture.GenerateNextKey(), + Int = 9, + String = "bar", + Vector = oppositeVector, + }, + new() + { + Key = fixture.GenerateNextKey(), + Int = 9, + String = "foo", + Vector = orthogonalVector, + } + ]; + + // The record definition describes the distance function, + // so we need a dedicated collection per test. + string uniqueCollectionName = Guid.NewGuid().ToString(); + var collection = fixture.TestStore.DefaultVectorStore.GetCollection( + uniqueCollectionName, this.GetRecordDefinition(distanceFunction)); + + await collection.CreateCollectionAsync(); + + await collection.CreateCollectionIfNotExistsAsync(); // just to make sure it's idempotent + + try + { + await collection.UpsertBatchAsync(records).ToArrayAsync(); + + var searchResult = await collection.VectorizedSearchAsync(baseVector); + var results = await searchResult.Results.ToListAsync(); + VerifySearchResults(resultOrder, scoreDictionary, records, results, includeVectors: false); + + searchResult = await collection.VectorizedSearchAsync(baseVector, new() { IncludeVectors = true }); + results = await searchResult.Results.ToListAsync(); + VerifySearchResults(resultOrder, scoreDictionary, records, results, includeVectors: true); + } + finally + { + await collection.DeleteCollectionAsync(); + } + + static void VerifySearchResults(int[] resultOrder, double[] scoreDictionary, List records, + List> results, bool includeVectors) + { + Assert.Equal(records.Count, results.Count); + for (int i = 0; i < results.Count; i++) + { + Assert.Equal(records[resultOrder[i]].Key, results[i].Record.Key); + Assert.Equal(records[resultOrder[i]].Int, results[i].Record.Int); + Assert.Equal(records[resultOrder[i]].String, results[i].Record.String); + Assert.Equal(Math.Round(scoreDictionary[resultOrder[i]], 2), Math.Round(results[i].Score!.Value, 2)); + + if (includeVectors) + { + Assert.Equal(records[resultOrder[i]].Vector.ToArray(), results[i].Record.Vector.ToArray()); + } + else + { + Assert.Equal(0, results[i].Record.Vector.Length); + } + } + } + } + + private VectorStoreRecordDefinition GetRecordDefinition(string distanceFunction) + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(SearchRecord.Key), typeof(TKey)), + new VectorStoreRecordVectorProperty(nameof(SearchRecord.Vector), typeof(ReadOnlyMemory)) + { + Dimensions = 4, + DistanceFunction = distanceFunction, + IndexKind = this.IndexKind + }, + new VectorStoreRecordDataProperty(nameof(SearchRecord.Int), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(SearchRecord.String), typeof(string)) { IsFilterable = true }, + ] + }; + + public class SearchRecord + { + public TKey Key { get; set; } = default!; + public ReadOnlyMemory Vector { get; set; } + + public int Int { get; set; } + public string? String { get; set; } + } +} From 864b707892150d1c00af5a181c5c2381870a9719 Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Fri, 7 Mar 2025 09:36:53 -0800 Subject: [PATCH 10/63] Enable integration tests on Windows --- .github/workflows/dotnet-build-and-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index dde2fad80b39..11ee27b0651a 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -56,13 +56,13 @@ jobs: include: - { dotnet: "8.0", - os: "ubuntu-latest", + os: "windows-latest", configuration: Release, integration-tests: true, environment: "integration", } - { dotnet: "8.0", os: "windows-latest", configuration: Debug } - - { dotnet: "8.0", os: "windows-latest", configuration: Release } + - { dotnet: "8.0", os: "ubuntu-latest", configuration: Release } runs-on: ${{ matrix.os }} environment: ${{ matrix.environment }} From fd53ae8f182605a107999e9654742f0bb1a514de Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Fri, 7 Mar 2025 10:06:25 -0800 Subject: [PATCH 11/63] Revert "Enable integration tests on Windows" This reverts commit 864b707892150d1c00af5a181c5c2381870a9719. --- .github/workflows/dotnet-build-and-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index 11ee27b0651a..dde2fad80b39 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -56,13 +56,13 @@ jobs: include: - { dotnet: "8.0", - os: "windows-latest", + os: "ubuntu-latest", configuration: Release, integration-tests: true, environment: "integration", } - { dotnet: "8.0", os: "windows-latest", configuration: Debug } - - { dotnet: "8.0", os: "ubuntu-latest", configuration: Release } + - { dotnet: "8.0", os: "windows-latest", configuration: Release } runs-on: ${{ matrix.os }} environment: ${{ matrix.environment }} From a14ad8c831549be0204593e324714f6ab56773b4 Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Fri, 7 Mar 2025 19:15:45 +0100 Subject: [PATCH 12/63] .Net MEVD: Strongly-typed property selectors (#10850) fixes #10420 --------- Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> --- .../VectorStore_VectorSearch_MultiVector.cs | 4 +- ...ISearchVectorStoreRecordCollectionTests.cs | 4 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 12 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- ...nMemoryVectorStoreRecordCollectionTests.cs | 2 +- ...zureAISearchVectorStoreRecordCollection.cs | 8 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 2 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 6 +- .../InMemoryVectorStoreRecordCollection.cs | 2 +- .../MongoDBVectorStoreRecordCollection.cs | 6 +- .../PostgresVectorStoreRecordCollection.cs | 2 +- .../QdrantVectorStoreRecordCollection.cs | 6 +- ...RedisHashSetVectorStoreRecordCollection.cs | 5 +- .../RedisJsonVectorStoreRecordCollection.cs | 5 +- ...RedisVectorStoreCollectionSearchMapping.cs | 55 +------ .../SqlServerVectorStoreRecordCollection.cs | 2 +- .../SqliteVectorStoreRecordCollection.cs | 2 +- .../WeaviateVectorStoreRecordCollection.cs | 6 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 12 +- ...disJsonVectorStoreRecordCollectionTests.cs | 2 +- ...VectorStoreCollectionSearchMappingTests.cs | 48 +------ ...rStoreRecordCollectionQueryBuilderTests.cs | 5 - ...eaviateVectorStoreRecordCollectionTests.cs | 2 +- .../VectorSearch/HybridSearchOptions.cs | 17 +-- .../VectorSearch/VectorSearchOptions.cs | 11 ++ ...ISearchVectorStoreRecordCollectionTests.cs | 4 +- .../BaseKeywordVectorizedHybridSearchTests.cs | 4 +- .../Data/VectorStoreRecordPropertyReader.cs | 134 ++++++++++++++---- .../VectorStoreRecordPropertyReaderTests.cs | 107 ++++++++++++-- .../SqlServerVectorStoreTests.cs | 4 +- 30 files changed, 295 insertions(+), 186 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs index 84818be8b5ba..645a1040c115 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs @@ -58,7 +58,7 @@ public async Task VectorSearchWithMultiVectorRecordAsync() searchVector, new() { Top = 1, - VectorPropertyName = nameof(Product.DescriptionEmbedding) + VectorProperty = r => r.DescriptionEmbedding }); var resultRecords = await searchResult.Results.ToListAsync(); @@ -75,7 +75,7 @@ public async Task VectorSearchWithMultiVectorRecordAsync() new() { Top = 1, - VectorPropertyName = nameof(Product.FeatureListEmbedding) + VectorProperty = r => r.FeatureListEmbedding }); resultRecords = await searchResult.Results.ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index b919f00dc1fd..b786c8d8fa58 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -575,7 +575,7 @@ public async Task CanSearchWithVectorAndFilterAsync() Top = 5, Skip = 3, OldFilter = filter, - VectorPropertyName = nameof(MultiPropsModel.Vector1) + VectorProperty = record => record.Vector1 }, this._testCancellationToken); @@ -617,7 +617,7 @@ public async Task CanSearchWithTextAndFilterAsync() Top = 5, Skip = 3, OldFilter = filter, - VectorPropertyName = nameof(MultiPropsModel.Vector1) + VectorProperty = record => record.Vector1 }, this._testCancellationToken); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 155f34334096..6ef6337e3426 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -615,10 +616,17 @@ public async Task VectorizedSearchUsesValidQueryAsync( this._mockMongoDatabase.Object, "collection"); + Expression>? vectorSelector = vectorPropertyName switch + { + "TestEmbedding1" => record => record.TestEmbedding1, + "TestEmbedding2" => record => record.TestEmbedding2, + _ => null + }; + // Act var actual = await sut.VectorizedSearchAsync(vector, new() { - VectorPropertyName = vectorPropertyName, + VectorProperty = vectorSelector, Top = actualTop, }); @@ -642,7 +650,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa this._mockMongoDatabase.Object, "collection"); - var options = new MEVD.VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 24e4a2083f0b..6f33a19e0b28 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -612,7 +612,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti this._mockDatabase.Object, "collection"); - var searchOptions = new VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var searchOptions = new VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index d48730696fce..b6ac78086915 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -508,7 +508,7 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), - new() { IncludeVectors = true, VectorPropertyName = "Vector" }, + new() { IncludeVectors = true, VectorProperty = r => r.Vectors["Vector"] }, this._testCancellationToken); // Assert diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 11541e5843dd..5f99cbb16a67 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -326,7 +326,7 @@ public virtual Task> VectorizedSearchAsync // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); // Configure search settings. @@ -382,7 +382,7 @@ public virtual Task> VectorizableTextSearchAsync(st // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); // Configure search settings. @@ -434,9 +434,9 @@ public Task> HybridSearchAsync(TVector vec // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalPropertyName); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); // Configure search settings. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 4198a1b376e3..67bf3197395d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -263,7 +263,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync this.VerifyVectorType(vector); var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); @@ -407,10 +407,10 @@ public Task> HybridSearchAsync(TVector vec this.VerifyVectorType(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var textProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); + var textProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); var textPropertyName = this._storagePropertyNames[textProperty.DataModelPropertyName]; var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index cea018652ca2..449a50796a13 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -223,7 +223,7 @@ public async Task> VectorizedSearchAsync(T // Resolve options and get requested vector property or first as default. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Filter records using the provided filter before doing the vector comparison. diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index ff89ebff04f8..7b749c04dc16 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -258,7 +258,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect Array vectorArray = VerifyVectorParam(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); var textDataPropertyName = this._storagePropertyNames[textDataProperty.DataModelPropertyName]; #pragma warning disable CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 5db73b801275..ce619398bf99 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -275,7 +275,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); var pgVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 033c88914966..4d8db3e63e81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -470,7 +470,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions.VectorPropertyName); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); var textDataPropertyName = this._propertyReader.GetStoragePropertyName(textDataProperty.DataModelPropertyName); // Build filter object. diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index a7deb35dc901..6c99d9b03d8e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -334,7 +334,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync(dataModel, score); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 3befe91242e5..14a8e56222d9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -379,7 +379,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync(mappedRecord, score); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs index f7663e0e7e44..b9d199eb3361 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs @@ -48,14 +48,11 @@ public static byte[] ValidateVectorAndConvertToBytes(TVector vector, st /// The vector to search the database with as a byte array. /// The options to configure the behavior of the search. /// A mapping of data model property names to the names under which they are stored. - /// The name of the first vector property in the data model. + /// The storage name of the vector property. /// The set of fields to limit the results to. Null for all. /// The . - public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, IReadOnlyDictionary storagePropertyNames, string firstVectorPropertyName, string[]? selectFields) + public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, IReadOnlyDictionary storagePropertyNames, string vectorStoragePropertyName, string[]? selectFields) { - // Resolve options. - var vectorPropertyName = ResolveVectorFieldName(options.VectorPropertyName, storagePropertyNames, firstVectorPropertyName); - // Build search query. var redisLimit = options.Top + options.Skip; @@ -69,7 +66,7 @@ public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions< }; #pragma warning restore CS0618 // Type or member is obsolete - var query = new Query($"{filter}=>[KNN {redisLimit} @{vectorPropertyName} $embedding AS vector_score]") + var query = new Query($"{filter}=>[KNN {redisLimit} @{vectorStoragePropertyName} $embedding AS vector_score]") .AddParam("embedding", vectorBytes) .SetSortBy("vector_score") .Limit(options.Skip, redisLimit) @@ -129,23 +126,10 @@ public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilte /// Resolve the distance function to use for a search by checking the distance function of the vector property specified in options /// or by falling back to the distance function of the first vector property, or by falling back to the default distance function. /// - /// The search options potentially containing a vector field to search. - /// The list of all vector properties. - /// The first vector property in the record. + /// The vector property to be used. /// The distance function for the vector we want to search. - /// Thrown when a user asked for a vector property that doesn't exist on the record. - public static string ResolveDistanceFunction(VectorSearchOptions options, IReadOnlyList vectorProperties, VectorStoreRecordVectorProperty firstVectorProperty) - { - if (options.VectorPropertyName == null || vectorProperties.Count == 1) - { - return firstVectorProperty.DistanceFunction ?? DistanceFunction.CosineSimilarity; - } - - var vectorProperty = vectorProperties.FirstOrDefault(p => p.DataModelPropertyName == options.VectorPropertyName) - ?? throw new InvalidOperationException($"The collection does not have a vector field named '{options.VectorPropertyName}'."); - - return vectorProperty.DistanceFunction ?? DistanceFunction.CosineSimilarity; - } + public static string ResolveDistanceFunction(VectorStoreRecordVectorProperty vectorProperty) + => vectorProperty.DistanceFunction ?? DistanceFunction.CosineSimilarity; /// /// Convert the score from redis into the appropriate output score based on the distance function. @@ -172,33 +156,6 @@ public static string ResolveDistanceFunction(VectorSearchOptions - /// Resolve the vector field name to use for a search by using the storage name for the field name from options - /// if available, and falling back to the first vector field name if not. - /// - /// The vector field name provided via options. - /// A mapping of data model property names to the names under which they are stored. - /// The name of the first vector property in the data model. - /// The resolved vector field name. - /// Thrown if the provided field name is not a valid field name. - private static string ResolveVectorFieldName(string? optionsVectorFieldName, IReadOnlyDictionary storagePropertyNames, string firstVectorPropertyName) - { - string? vectorFieldName; - if (!string.IsNullOrWhiteSpace(optionsVectorFieldName)) - { - if (!storagePropertyNames.TryGetValue(optionsVectorFieldName!, out vectorFieldName)) - { - throw new InvalidOperationException($"The collection does not have a vector field named '{optionsVectorFieldName}'."); - } - } - else - { - vectorFieldName = firstVectorPropertyName; - } - - return vectorFieldName!; - } - /// /// Gets the name of the name under which the property with the given name is stored. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index d2a87a34bf6d..3293bf2f15a1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -324,7 +324,7 @@ public async Task> VectorizedSearchAsync(T } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); using SqlCommand command = SqlServerCommandBuilder.SelectVector( this._sqlConnection, diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 835073cf9c59..22ad3b67c403 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -169,7 +169,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); var mappedArray = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 52c0ad29e832..98d38fc9a8c1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -344,7 +344,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect VerifyVectorParam(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions.VectorPropertyName); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalPropertyName); + var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); + var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index d41db6897385..6265c18bd6b9 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -611,10 +612,17 @@ public async Task VectorizedSearchUsesValidQueryAsync( this._mockMongoDatabase.Object, "collection"); + Expression>? vectorSelector = vectorPropertyName switch + { + "TestEmbedding1" => record => record.TestEmbedding1, + "TestEmbedding2" => record => record.TestEmbedding2, + _ => null + }; + // Act var actual = await sut.VectorizedSearchAsync(vector, new() { - VectorPropertyName = vectorPropertyName, + VectorProperty = vectorSelector, Top = actualTop, }); @@ -638,7 +646,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa this._mockMongoDatabase.Object, "collection"); - var options = new MEVD.VectorSearchOptions { VectorPropertyName = "non-existent-property" }; + var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 6742bc6ac4c9..aa47dc512b8c 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -472,7 +472,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) { IncludeVectors = true, OldFilter = filter, - VectorPropertyName = nameof(MultiPropsModel.Vector1), + VectorProperty = r => r.Vector1, Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index 1301ee6a7eb9..087b707a4b7c 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Linq; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; using Xunit; @@ -69,10 +70,9 @@ public void BuildQueryBuildsRedisQueryWithDefaults() { { "Vector", "storage_Vector" }, }; - var firstVectorPropertyName = "storage_Vector"; // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), storagePropertyNames, firstVectorPropertyName, null); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), storagePropertyNames, storagePropertyNames.Values.Single(), null); // Assert. Assert.NotNull(query); @@ -88,42 +88,21 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() // Arrange. var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3, VectorPropertyName = "Vector" }; + var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3 }; var storagePropertyNames = new Dictionary() { { "Vector", "storage_Vector" }, }; - var firstVectorPropertyName = "storage_FirstVector"; var selectFields = new string[] { "storage_Field1", "storage_Field2" }; // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, storagePropertyNames, firstVectorPropertyName, selectFields); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, storagePropertyNames, storagePropertyNames.Values.Single(), selectFields); // Assert. Assert.NotNull(query); Assert.Equal("*=>[KNN 8 @storage_Vector $embedding AS vector_score]", query.QueryString); } - [Fact] - public void BuildQueryFailsForInvalidVectorName() - { - // Arrange. - var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); - var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var vectorSearchOptions = new VectorSearchOptions { VectorPropertyName = "UnknownVector" }; - var storagePropertyNames = new Dictionary() - { - { "Vector", "storage_Vector" }, - }; - var firstVectorPropertyName = "storage_FirstVector"; - - // Act & Assert. - Assert.Throws(() => - { - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, storagePropertyNames, firstVectorPropertyName, null); - }); - } - [Theory] [InlineData("stringEquality")] [InlineData("intEquality")] @@ -213,32 +192,19 @@ public void ResolveDistanceFunctionReturnsCosineSimilarityIfNoDistanceFunctionSp var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)); // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); + var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(property); // Assert. Assert.Equal(DistanceFunction.CosineSimilarity, resolvedDistanceFunction); } [Fact] - public void ResolveDistanceFunctionReturnsDistanceFunctionFromFirstPropertyIfNoFieldChosen() + public void ResolveDistanceFunctionReturnsDistanceFunctionFromProvidedProperty() { var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions(), [property], property); - - // Assert. - Assert.Equal(DistanceFunction.DotProductSimilarity, resolvedDistanceFunction); - } - - [Fact] - public void ResolveDistanceFunctionReturnsDistanceFunctionFromChosenPropertyIfFieldChosen() - { - var property1 = new VectorStoreRecordVectorProperty("Prop1", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.CosineDistance }; - var property2 = new VectorStoreRecordVectorProperty("Prop2", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; - - // Act. - var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(new VectorSearchOptions { VectorPropertyName = "Prop2" }, [property1, property2], property1); + var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(property); // Assert. Assert.Equal(DistanceFunction.DotProductSimilarity, resolvedDistanceFunction); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 1ee9d928599a..5a009649ab1b 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -78,7 +78,6 @@ hotelName hotelCode { Skip = 2, Top = 3, - VectorPropertyName = "DescriptionEmbedding" }; // Act @@ -108,7 +107,6 @@ public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() { Skip = 2, Top = 3, - VectorPropertyName = "DescriptionEmbedding", IncludeVectors = true }; @@ -139,7 +137,6 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() { Skip = 2, Top = 3, - VectorPropertyName = "DescriptionEmbedding", OldFilter = new VectorSearchFilter() .EqualTo("HotelName", "Test Name") .AnyTagEqualTo("Tags", "t1") @@ -170,7 +167,6 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() { Skip = 2, Top = 3, - VectorPropertyName = "DescriptionEmbedding", OldFilter = new VectorSearchFilter().EqualTo("HotelName", new TestFilterValue()) }; @@ -195,7 +191,6 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() { Skip = 2, Top = 3, - VectorPropertyName = "DescriptionEmbedding", OldFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "value") }; diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 8f7ea996101d..f31cd9929360 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -534,7 +534,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync( new ReadOnlyMemory([1f, 2f, 3f]), - new() { VectorPropertyName = "non-existent-property" })) + new() { VectorProperty = r => "non-existent-property" })) .Results.ToListAsync()); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs index 8fde779af23c..0711cd0aba43 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs @@ -24,21 +24,22 @@ public class HybridSearchOptions public Expression>? Filter { get; init; } /// - /// Gets or sets the name of the target dense vector property to search on. - /// Use the name of the vector property from your data model or as provided in the record definition. - /// If not provided will look if there is a vector property, and - /// will throw if either none or multiple exist. + /// Gets or sets the target dense vector property to search on. + /// Only needs to be set when the collection has multiple vector properties. /// - public string? VectorPropertyName { get; init; } + /// + /// If not provided will check if there is a vector property to use by default, and + /// will throw if either none or multiple exist. + /// + public Expression>? VectorProperty { get; init; } /// - /// Gets or sets the name of the additional target property to do the text/keyword search on. + /// Gets or sets the additional target property to do the text/keyword search on. /// The property must have full text search enabled. - /// Use the name of the data property from your data model or as provided in the record definition. /// If not provided will look if there is a text property with full text search enabled, and /// will throw if either none or multiple exist. /// - public string? AdditionalPropertyName { get; init; } + public Expression>? AdditionalProperty { get; init; } /// /// Gets or sets the maximum number of results to return. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 7976184f8ebf..7f6cc16f5dfa 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -31,8 +31,19 @@ public class VectorSearchOptions /// If not provided will check if there is a vector property to use by default, and /// will throw if either none or multiple exist. /// + [Obsolete("Use VectorProperty instead")] public string? VectorPropertyName { get; init; } + /// + /// Gets or sets the vector property to search on. + /// Only needs to be set when the collection has multiple vector properties. + /// + /// + /// If not provided will check if there is a vector property to use by default, and + /// will throw if either none or multiple exist. + /// + public Expression>? VectorProperty { get; init; } + /// /// Gets or sets the maximum number of results to return. /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 09e47ca8c61d..8fa45147398b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -350,7 +350,7 @@ await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), new() { IncludeVectors = includeVectors, - VectorPropertyName = "DescriptionEmbedding", + VectorProperty = r => r.DescriptionEmbedding, OldFilter = filter, }); @@ -389,7 +389,7 @@ public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() "A hotel with great views.", new() { - VectorPropertyName = "DescriptionEmbedding", + VectorProperty = r => r.DescriptionEmbedding, OldFilter = filter, }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs index c1c223382774..149159ad46c0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseKeywordVectorizedHybridSearchTests.cs @@ -217,8 +217,8 @@ public async Task SearchWithMultiTextRecordSearchesRequestedFieldAsync() await this.CreateCollectionAndAddDataAsync(sut, vector); // Act - var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], new() { AdditionalPropertyName = nameof(MultiSearchStringRecord.Text2) }); - var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { AdditionalPropertyName = nameof(MultiSearchStringRecord.Text2) }); + var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], new() { AdditionalProperty = r => r.Text2 }); + var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { AdditionalProperty = r => r.Text2 }); // Assert var results1 = await searchResult1.Results.ToListAsync(); diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs index 15047fe23b91..d259a1ac0f4f 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs @@ -4,7 +4,9 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; +using System.Linq.Expressions; using System.Reflection; +using System.Runtime.CompilerServices; using System.Text.Json; using System.Text.Json.Serialization; @@ -342,26 +344,34 @@ public string GetJsonPropertyName(string dataModelPropertyName) /// to a vector property in the schema if not. If no name is provided and there is more /// than one vector property, an exception will be thrown. /// - /// The vector property name. + /// The search options. /// Thrown if the provided property name is not a valid vector property name. - public VectorStoreRecordVectorProperty GetVectorPropertyOrSingle(string? vectorPropertyName) + public VectorStoreRecordVectorProperty GetVectorPropertyOrSingle(VectorSearchOptions? searchOptions) { - // If vector property name is provided, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorPropertyName)) + if (searchOptions is not null) { - // Check vector properties by data model property name. - var vectorProperty = this.VectorProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorPropertyName, StringComparison.Ordinal)); +#pragma warning disable CS0618 // Type or member is obsolete + string? vectorPropertyName = searchOptions.VectorPropertyName; +#pragma warning restore CS0618 // Type or member is obsolete - if (vectorProperty is not null) + // If vector property name is provided, try to find it in schema or throw an exception. + if (!string.IsNullOrWhiteSpace(vectorPropertyName)) { - return vectorProperty; + // Check vector properties by data model property name. + return this.VectorProperties.FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorPropertyName, StringComparison.Ordinal)) + ?? throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a vector property named '{vectorPropertyName}'."); } + else if (searchOptions.VectorProperty is Expression> expression) + { + // VectorPropertiesInfo is not available for VectorStoreGenericDataModel. + IReadOnlyList infos = typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) + ? [] : this.VectorPropertiesInfo; - throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a vector property named '{vectorPropertyName}'."); + return GetMatchingProperty(expression, infos, this.VectorProperties); + } } - // If vector property name is not provided, return first vector property from schema, or throw if there are no vectors. + // If vector property name is not provided, check if there is a single vector property, or throw if there are no vectors or more than one. if (this.VectorProperty is null) { throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have any vector properties."); @@ -380,28 +390,20 @@ public VectorStoreRecordVectorProperty GetVectorPropertyOrSingle(string? vectorP /// to a text data property in the schema if not. If no name is provided and there is more than one text data property with /// full text search indexing enabled, an exception will be thrown. /// - /// The property name. + /// The full text search property selector. /// Thrown if the provided property name is not a valid text data property name. - public VectorStoreRecordDataProperty GetFullTextDataPropertyOrSingle(string? propertyName) + public VectorStoreRecordDataProperty GetFullTextDataPropertyOrSingle(Expression>? expression) { - // If text data property name is provided, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(propertyName)) + if (expression is not null) { - // Check string data properties by data model property name. - var dataProperty = this.DataProperties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(propertyName, StringComparison.Ordinal) && l.PropertyType == typeof(string)); - - if (dataProperty is null) - { - throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a text data property named '{propertyName}'."); - } - - if (!dataProperty.IsFullTextSearchable) - { - throw new InvalidOperationException($"The text data property named '{propertyName}' on the {this._dataModelType.FullName} type must have full text search enabled."); - } + // DataPropertiesInfo is not available for VectorStoreGenericDataModel. + IReadOnlyList infos = typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) + ? [] : this.DataPropertiesInfo; - return dataProperty; + var dataProperty = GetMatchingProperty(expression, this.DataPropertiesInfo, this.DataProperties); + return dataProperty.IsFullTextSearchable + ? dataProperty + : throw new InvalidOperationException($"The text data property named '{dataProperty.DataModelPropertyName}' on the {this._dataModelType.FullName} type must have full text search enabled."); } // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. @@ -422,6 +424,80 @@ public VectorStoreRecordDataProperty GetFullTextDataPropertyOrSingle(string? pro return fullTextStringProperties[0]; } + private static TProperty GetMatchingProperty(Expression> expression, + IReadOnlyList propertyInfos, IReadOnlyList properties) + where TProperty : VectorStoreRecordProperty + { + bool data = typeof(TProperty) == typeof(VectorStoreRecordDataProperty); + string expectedGenericModelPropertyName = data + ? nameof(VectorStoreGenericDataModel.Data) + : nameof(VectorStoreGenericDataModel.Vectors); + + MemberExpression? member = expression.Body as MemberExpression; + // (TRecord r) => r.PropertyName is translated into + // (TRecord r) => (object)r.PropertyName for properties that return struct like ReadOnlyMemory. + if (member is null && expression.Body is UnaryExpression unary + && unary.Operand.NodeType == ExpressionType.MemberAccess) + { + member = unary.Operand as MemberExpression; + } + + if (member is not null + && expression.Parameters.Count == 1 + && member.Expression == expression.Parameters[0] + && member.Member is PropertyInfo property) + { + for (int i = 0; i < propertyInfos.Count; i++) + { + if (propertyInfos[i] == property) + { + return properties[i]; + } + } + + throw new InvalidOperationException($"The property {property.Name} of {typeof(TRecord).FullName} is not a {(data ? "Data" : "Vector")} property."); + } + // (VectorStoreGenericDataModel r) => r.Vectors["PropertyName"] + else if (expression.Body is MethodCallExpression methodCall + // It's a Func, object> + && expression.Type.IsGenericType + && expression.Type.GenericTypeArguments.Length == 2 + && expression.Type.GenericTypeArguments[0].IsGenericType + && expression.Type.GenericTypeArguments[0].GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) + // It's accessing VectorStoreGenericDataModel.Vectors (or Data) + && methodCall.Object is MemberExpression memberAccess + && memberAccess.Member.Name == expectedGenericModelPropertyName + // and has a single argument + && methodCall.Arguments.Count == 1) + { + string name = methodCall.Arguments[0] switch + { + ConstantExpression constant when constant.Value is string text => text, + MemberExpression field when TryGetCapturedValue(field, out object? capturedValue) && capturedValue is string text => text, + _ => throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression.") + }; + + return properties.FirstOrDefault(l => l.DataModelPropertyName.Equals(name, StringComparison.Ordinal)) + ?? throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{name}'."); + } + + throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression."); + + static bool TryGetCapturedValue(Expression expression, out object? capturedValue) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + capturedValue = fieldInfo.GetValue(constant.Value); + return true; + } + + capturedValue = null; + return false; + } + } + /// /// Check if we have previously loaded the objects from the data model and if not, load them. /// diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs index 2938b2855800..bbaabdd3d844 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs @@ -436,21 +436,23 @@ public void GetVectorPropertyOrSingleReturnsRequestedVectorAndThrowsForInvalidVe { // Arrange. var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var validVector = new VectorSearchOptions() { VectorProperty = r => r.Vector2 }; + var invalidVector = new VectorSearchOptions() { VectorProperty = r => r.Data2 }; // Act & Assert. - Assert.Equal("Vector2", sut.GetVectorPropertyOrSingle("Vector2").DataModelPropertyName); - Assert.Throws(() => sut.GetVectorPropertyOrSingle("DoesNotExist")); + Assert.Equal("Vector2", sut.GetVectorPropertyOrSingle(validVector).DataModelPropertyName); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(invalidVector)); } [Theory] - [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] public void GetVectorPropertyOrSingleThrowsForMultipleVectors(Type type, VectorStoreRecordDefinition? definition) { // Arrange. var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act & Assert. - Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); } [Theory] @@ -461,7 +463,89 @@ public void GetVectorPropertyOrSingleThrowsForNoVectors(Type type, VectorStoreRe var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act & Assert. - Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); + } + + [Fact] + public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingConst() + { + const string TheConst = "FloatVector"; + VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(TheConst); + VectorSearchOptions> expectedConst = new() + { + VectorProperty = r => r.Vectors[TheConst] + }; + VectorSearchOptions> wrongConst = new() + { + VectorProperty = r => r.Vectors["Different"] + }; + + Assert.Equal(TheConst, sut.GetVectorPropertyOrSingle(expectedConst).DataModelPropertyName); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongConst)); + } + + [Fact] + public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingVariable() + { + string theVariable = "FloatVector"; + string theWrongVariable = "Different"; + VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(theVariable); + VectorSearchOptions> expectedVariable = new() + { + VectorProperty = r => r.Vectors[theVariable] + }; + VectorSearchOptions> wrongVariable = new() + { + VectorProperty = r => r.Vectors[theWrongVariable] + }; + + Assert.Equal(theVariable, sut.GetVectorPropertyOrSingle(expectedVariable).DataModelPropertyName); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongVariable)); + } + + [Theory] + [InlineData("FloatVector", "Different")] + // it's a Theory just for the need of testing a method expected being captured by the lambda property selector + public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingArgument(string expected, string wrong) + { + VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(expected); + VectorSearchOptions> expectedArgument = new() + { + VectorProperty = r => r.Vectors[expected] + }; + VectorSearchOptions> wrongArgument = new() + { + VectorProperty = r => r.Vectors[wrong] + }; + + Assert.Equal("FloatVector", sut.GetVectorPropertyOrSingle(expectedArgument).DataModelPropertyName); + Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongArgument)); + } + + private static VectorStoreRecordPropertyReader CreateReaderForGenericModel(string vectorPropertyName) + { + VectorStoreGenericDataModel genericRecord = new("key") + { + Data = + { + ["Text"] = "data" + }, + Vectors = + { + [vectorPropertyName] = new ReadOnlyMemory([-1, -1, -1, -1]) + } + }; + VectorStoreRecordDefinition definition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("Text", typeof(string)), + new VectorStoreRecordVectorProperty(vectorPropertyName, typeof(ReadOnlyMemory)), + ] + }; + + return new(genericRecord.GetType(), definition, null); } [Theory] @@ -472,9 +556,10 @@ public void GetFullTextDataPropertyOrOnlyReturnsRequestedPropOrOnlyTextDataPropA var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act & Assert. - Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle("Data1").DataModelPropertyName); - Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(null).DataModelPropertyName); - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle("DoesNotExist")); + Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(r => r.Data1).DataModelPropertyName); + Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(null).DataModelPropertyName); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => r.Vector1)); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => "DoesNotExist")); } [Theory] @@ -485,7 +570,7 @@ public void GetFullTextDataPropertyOrOnlyThrowsForNoTextDataProps(Type type, Vec var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); } [Theory] @@ -496,7 +581,7 @@ public void GetFullTextDataPropertyOrOnlyThrowsForNonFullTextSearchProp(Type typ var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle("Data2")); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => r.Data2)); } [Fact] @@ -516,7 +601,7 @@ public void GetFullTextDataPropertyOrOnlyThrowsForMultipleMatchingProps() var sut = new VectorStoreRecordPropertyReader(typeof(object), definition, null); // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); + Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); } public static IEnumerable NoKeyTypeAndDefinitionCombos() diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index bb658f486c87..99c34d3376cf 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -84,14 +84,14 @@ public async Task RecordCRUD() VectorSearchResult vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() { - VectorPropertyName = nameof(TestModel.Floats), + VectorProperty = r => r.Floats, IncludeVectors = true })).Results.SingleAsync(); AssertEquality(updated, vectorSearchResult.Record); vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() { - VectorPropertyName = nameof(TestModel.Floats), + VectorProperty = r => r.Floats, IncludeVectors = false })).Results.SingleAsync(); // Make sure the vectors are not included in the result. From 261fec44bbb973249bceefa7c49ae55b9cfcb16c Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Fri, 7 Mar 2025 10:50:38 -0800 Subject: [PATCH 13/63] Updated integration test pipeline to use net8.0 target framework --- .github/workflows/dotnet-build-and-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index dde2fad80b39..a60632e4cf59 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -106,7 +106,7 @@ jobs: run: | export INTEGRATION_TEST_PROJECTS=$(find ./dotnet -type f -name "*IntegrationTests.csproj" | grep -v "Experimental.Orchestration.Flow.IntegrationTests.csproj" | tr '\n' ' ') for project in $INTEGRATION_TEST_PROJECTS; do - dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx + dotnet test -f net8.0 -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx done env: # Azure OpenAI Deployments From 3fca31faa8ab9e741c9d0388d50bc13e3b00e879 Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Wed, 12 Mar 2025 13:52:28 -0700 Subject: [PATCH 14/63] .Net: [Feature Branch] Vector Store Logging (#10865) ### Motivation and Context Related: https://github.com/microsoft/semantic-kernel/issues/10596 This PR enhances the vector store functionality by adding builder pattern implementations and logging decorators for key vector data interfaces, along with supporting utilities and DI integration. Interfaces handled: - `IKeywordHybridSearch` - `IVectorizableTextSearch` - `IVectorizedSearch` - `IVectorStore` - `IVectorStoreRecordCollection` Class types added: - Builders: implement a pipeline pattern with `Use` and `Build` methods. - Logging Extensions: provide reusable logging methods for tasks and enumerables, handling success, failure and cancellation. - Logging Decorators: wrap the interfaces to add logging by using `LoggingExtensions` reusable logging methods. - Builder Extensions: Add `AsBuilder` methods to convert service instances into builders. - Logging Builder Extensions: Add `UseLogging` methods to integrate logging into builder pipelines. - Service Collection Extensions: Add `Add{T}` and `AddKeyed{T}` methods for DI registration with configurable lifetimes. Other changes: - Split `Verify` class into `Verify` and `KernelVerify` in order to be able to use `Verify` methods in `Microsoft.Extensions.VectorData` package without a reference to Semantic Kernel specific logic, like `ValidPluginName`, `ValidFunctionName` etc. - Added unit tests for new classes and methods. - Added usage example in `Concepts/Memory` folder. ### Usage example Logging with manual registration: ```csharp var vectorStore = new InMemoryVectorStore() .AsBuilder() .UseLogging(this.LoggerFactory) .Build(); ``` Logging with DI: ```csharp serviceCollection.AddInMemoryVectorStore(); serviceCollection .AddVectorStore(s => s.GetRequiredService()) .UseLogging(this.LoggerFactory); ``` ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- dotnet/SK-dotnet.sln | 25 +- dotnet/samples/Concepts/Concepts.csproj | 1 + .../Concepts/Memory/VectorStore_Telemetry.cs | 180 +++++++++++++ .../VectorData.Abstractions/PACKAGE.md | 15 +- .../VectorData.UnitTests/.editorconfig | 6 + .../VectorData.UnitTests.csproj | 41 +++ ...ywordHybridSearchBuilderExtensionsTests.cs | 24 ++ ...BuilderServiceCollectionExtensionsTests.cs | 96 +++++++ .../KeywordHybridSearchBuilderTests.cs | 87 +++++++ ...ywordHybridSearchBuilderExtensionsTests.cs | 64 +++++ .../LoggingKeywordHybridSearchTests.cs | 60 +++++ ...rizableTextSearchBuilderExtensionsTests.cs | 64 +++++ .../LoggingVectorizableTextSearchTests.cs | 56 ++++ ...gVectorizedSearchBuilderExtensionsTests.cs | 64 +++++ .../LoggingVectorizedSearchTests.cs | 56 ++++ ...rizableTextSearchBuilderExtensionsTests.cs | 24 ++ ...BuilderServiceCollectionExtensionsTests.cs | 96 +++++++ .../VectorizableTextSearchBuilderTests.cs | 87 +++++++ .../VectorizedSearchBuilderExtensionsTests.cs | 24 ++ ...BuilderServiceCollectionExtensionsTests.cs | 96 +++++++ .../VectorizedSearchBuilderTests.cs | 87 +++++++ ...oggingVectorStoreBuilderExtensionsTests.cs | 64 +++++ ...eRecordCollectionBuilderExtensionsTests.cs | 64 +++++ ...LoggingVectorStoreRecordCollectionTests.cs | 245 ++++++++++++++++++ .../VectorStorage/LoggingVectorStoreTests.cs | 72 +++++ .../VectorStoreBuilderExtensionsTests.cs | 24 ++ ...BuilderServiceCollectionExtensionsTests.cs | 96 +++++++ .../VectorStorage/VectorStoreBuilderTests.cs | 87 +++++++ ...eRecordCollectionBuilderExtensionsTests.cs | 24 ++ ...BuilderServiceCollectionExtensionsTests.cs | 96 +++++++ ...VectorStoreRecordCollectionBuilderTests.cs | 87 +++++++ dotnet/src/Connectors/VectorData/PACKAGE.md | 40 +++ .../Connectors/VectorData/VectorData.csproj | 62 +++++ .../KeywordHybridSearchBuilder.cs | 87 +++++++ .../KeywordHybridSearchBuilderExtensions.cs | 25 ++ ...earchBuilderServiceCollectionExtensions.cs | 89 +++++++ .../LoggingKeywordHybridSearch.cs | 47 ++++ ...ingKeywordHybridSearchBuilderExtensions.cs | 41 +++ .../LoggingVectorizableTextSearch.cs | 46 ++++ ...VectorizableTextSearchBuilderExtensions.cs | 41 +++ .../VectorSearch/LoggingVectorizedSearch.cs | 46 ++++ ...oggingVectorizedSearchBuilderExtensions.cs | 41 +++ .../VectorizableTextSearchBuilder.cs | 87 +++++++ ...VectorizableTextSearchBuilderExtensions.cs | 25 ++ ...earchBuilderServiceCollectionExtensions.cs | 89 +++++++ .../VectorSearch/VectorizedSearchBuilder.cs | 87 +++++++ .../VectorizedSearchBuilderExtensions.cs | 25 ++ ...earchBuilderServiceCollectionExtensions.cs | 89 +++++++ .../VectorStorage/LoggingVectorStore.cs | 53 ++++ .../LoggingVectorStoreBuilderExtensions.cs | 41 +++ .../LoggingVectorStoreRecordCollection.cs | 144 ++++++++++ ...rStoreRecordCollectionBuilderExtensions.cs | 41 +++ .../VectorStorage/VectorStoreBuilder.cs | 87 +++++++ .../VectorStoreBuilderExtensions.cs | 25 ++ ...StoreBuilderServiceCollectionExtensions.cs | 89 +++++++ .../VectorStoreRecordCollectionBuilder.cs | 87 +++++++ ...rStoreRecordCollectionBuilderExtensions.cs | 25 ++ ...ctionBuilderServiceCollectionExtensions.cs | 89 +++++++ dotnet/src/Connectors/VectorData/neticon.png | Bin 0 -> 7006 bytes .../Extensions/GrpcKernelExtensions.cs | 4 +- .../Extensions/ApiManifestKernelExtensions.cs | 2 +- .../CopilotAgentPluginKernelExtensions.cs | 2 +- .../Extensions/OpenApiKernelExtensions.cs | 8 +- .../OpenApiKernelPluginFactory.cs | 10 +- .../src/Diagnostics/KernelVerify.cs | 78 ++++++ .../src/Diagnostics/LoggingExtensions.cs | 137 ++++++++++ .../src/Diagnostics/Verify.cs | 65 +---- .../src/System/EmptyKeyedServiceProvider.cs | 23 ++ .../Functions/KernelFunction.cs | 4 +- .../Functions/KernelFunctionMetadata.cs | 2 +- .../Functions/KernelPlugin.cs | 2 +- .../Functions/KernelFunctionFromMethod.cs | 8 +- .../Functions/KernelPluginFactory.cs | 2 +- .../Utilities/FakeLogger.cs | 27 ++ .../Utilities/LoggingExtensionsTests.cs | 239 +++++++++++++++++ 75 files changed, 4283 insertions(+), 90 deletions(-) create mode 100644 dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/.editorconfig create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData/PACKAGE.md create mode 100644 dotnet/src/Connectors/VectorData/VectorData.csproj create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/neticon.png create mode 100644 dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs create mode 100644 dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs create mode 100644 dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index bad51cba9c8e..0bea49655ebe 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -112,12 +112,15 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Diagnostics", "Diagnostics" src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs = src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs = src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs src\InternalUtilities\src\Diagnostics\IsExternalInit.cs = src\InternalUtilities\src\Diagnostics\IsExternalInit.cs + src\InternalUtilities\src\Diagnostics\KernelVerify.cs = src\InternalUtilities\src\Diagnostics\KernelVerify.cs src\InternalUtilities\src\Diagnostics\NullableAttributes.cs = src\InternalUtilities\src\Diagnostics\NullableAttributes.cs src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs = src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs - src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs + src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs + src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs = src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs + src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs = src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{B00AD427-0047-4850-BEF9-BA8237EA9D8B}" @@ -138,10 +141,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "System", "System", "{3CDE10 ProjectSection(SolutionItems) = preProject src\InternalUtilities\src\System\EnvExtensions.cs = src\InternalUtilities\src\System\EnvExtensions.cs src\InternalUtilities\src\System\IListExtensions.cs = src\InternalUtilities\src\System\IListExtensions.cs + src\InternalUtilities\src\System\IndexRange.cs = src\InternalUtilities\src\System\IndexRange.cs src\InternalUtilities\src\System\InternalTypeConverter.cs = src\InternalUtilities\src\System\InternalTypeConverter.cs src\InternalUtilities\src\System\NonNullCollection.cs = src\InternalUtilities\src\System\NonNullCollection.cs src\InternalUtilities\src\System\TypeConverterFactory.cs = src\InternalUtilities\src\System\TypeConverterFactory.cs - src\InternalUtilities\src\System\IndexRange.cs = src\InternalUtilities\src\System\IndexRange.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Type", "Type", "{E85EA4D0-BB7E-4DFD-882F-A76EB8C0B8FF}" @@ -492,8 +495,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.Bedrock", "src\Agent EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocol", "samples\Demos\ModelContextProtocol\ModelContextProtocol.csproj", "{B16AC373-3DA8-4505-9510-110347CD635D}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData.UnitTests", "src\Connectors\VectorData.UnitTests\VectorData.UnitTests.csproj", "{89FC596F-CB81-4733-829B-4527D0FFC291}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqlServerIntegrationTests", "src\VectorDataIntegrationTests\SqlServerIntegrationTests\SqlServerIntegrationTests.csproj", "{A5E6193C-8431-4C6E-B674-682CB41EAA0C}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData", "src\Connectors\VectorData\VectorData.csproj", "{8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1359,12 +1366,24 @@ Global {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.Build.0 = Debug|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.ActiveCfg = Release|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.Build.0 = Release|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.Build.0 = Debug|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.Build.0 = Publish|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.ActiveCfg = Release|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.Build.0 = Release|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.Build.0 = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.ActiveCfg = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.Build.0 = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.ActiveCfg = Release|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.Build.0 = Release|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.Build.0 = Publish|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1550,7 +1569,9 @@ Global {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} {B16AC373-3DA8-4505-9510-110347CD635D} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {89FC596F-CB81-4733-829B-4527D0FFC291} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} {A5E6193C-8431-4C6E-B674-682CB41EAA0C} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3} = {24503383-A8C4-4255-9998-28D70FE8E99A} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 728dce6b41fb..464d8df616b5 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -77,6 +77,7 @@ + diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs b/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs new file mode 100644 index 000000000000..2165c96aabb2 --- /dev/null +++ b/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.Identity; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.InMemory; +using Microsoft.SemanticKernel.Embeddings; + +namespace Memory; + +/// +/// A simple example showing how to ingest data into a vector store and then use vector search to find related records to a given string +/// with enabled telemetry. +/// +public class VectorStore_Telemetry(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task LoggingManualRegistrationAsync() + { + // Create an embedding generation service. + var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Manually construct an InMemory vector store with enabled logging. + var vectorStore = new InMemoryVectorStore() + .AsBuilder() + .UseLogging(this.LoggerFactory) + .Build(); + + await RunExampleAsync(textEmbeddingGenerationService, vectorStore); + + // Output: + // CreateCollectionIfNotExistsAsync invoked. + // CreateCollectionIfNotExistsAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // VectorizedSearchAsync invoked. + // VectorizedSearchAsync completed. + + // Search string: What is an Application Programming Interface + // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. + } + + [Fact] + public async Task LoggingDependencyInjectionAsync() + { + var serviceCollection = new ServiceCollection(); + + // Add an embedding generation service. + serviceCollection.AddAzureOpenAITextEmbeddingGeneration( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Add InMemory vector store + serviceCollection.AddInMemoryVectorStore(); + + // Register InMemoryVectorStore with enabled logging. + serviceCollection + .AddVectorStore(s => s.GetRequiredService()) + .UseLogging(this.LoggerFactory); + + var services = serviceCollection.BuildServiceProvider(); + + var vectorStore = services.GetRequiredService(); + var textEmbeddingGenerationService = services.GetRequiredService(); + + await RunExampleAsync(textEmbeddingGenerationService, vectorStore); + + // Output: + // CreateCollectionIfNotExistsAsync invoked. + // CreateCollectionIfNotExistsAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // VectorizedSearchAsync invoked. + // VectorizedSearchAsync completed. + + // Search string: What is an Application Programming Interface + // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. + } + + private async Task RunExampleAsync( + ITextEmbeddingGenerationService textEmbeddingGenerationService, + IVectorStore vectorStore) + { + // Get and create collection if it doesn't exist. + var collection = vectorStore.GetCollection("skglossary"); + await collection.CreateCollectionIfNotExistsAsync(); + + // Create glossary entries and generate embeddings for them. + var glossaryEntries = CreateGlossaryEntries().ToList(); + var tasks = glossaryEntries.Select(entry => Task.Run(async () => + { + entry.DefinitionEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(entry.Definition); + })); + await Task.WhenAll(tasks); + + // Upsert the glossary entries into the collection and return their keys. + var upsertedKeysTasks = glossaryEntries.Select(x => collection.UpsertAsync(x)); + var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); + + // Search the collection using a vector search. + var searchString = "What is an Application Programming Interface"; + var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); + var searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + var resultRecords = await searchResult.Results.ToListAsync(); + + Console.WriteLine("Search string: " + searchString); + Console.WriteLine("Result: " + resultRecords.First().Record.Definition); + Console.WriteLine(); + } + + /// + /// Sample model class that represents a glossary entry. + /// + /// + /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. + /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. + /// + private sealed class Glossary + { + [VectorStoreRecordKey] + public ulong Key { get; set; } + + [VectorStoreRecordData(IsFilterable = true)] + public string Category { get; set; } + + [VectorStoreRecordData] + public string Term { get; set; } + + [VectorStoreRecordData] + public string Definition { get; set; } + + [VectorStoreRecordVector(1536)] + public ReadOnlyMemory DefinitionEmbedding { get; set; } + } + + /// + /// Create some sample glossary entries. + /// + /// A list of sample glossary entries. + private static IEnumerable CreateGlossaryEntries() + { + yield return new Glossary + { + Key = 1, + Category = "External Definitions", + Term = "API", + Definition = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." + }; + + yield return new Glossary + { + Key = 2, + Category = "Core Definitions", + Term = "Connectors", + Definition = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." + }; + + yield return new Glossary + { + Key = 3, + Category = "External Definitions", + Term = "RAG", + Definition = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." + }; + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md b/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md index df87cb1b8586..da8da52c5eb5 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md +++ b/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md @@ -4,7 +4,7 @@ Contains abstractions for accessing Vector Databases and Vector Indexes. ## Key Features -- Interfaces for Vector Database implementations which are provided in other packages including `Microsoft.SemanticKernel.Connectors.AzureAISearch`. +- Interfaces for Vector Database implementation. Vector Database implementations are provided separately in other packages, for example `Microsoft.SemanticKernel.Connectors.AzureAISearch`. ## How to Use @@ -22,14 +22,25 @@ The main types provided by this library are: ## Related Packages +Vector Database utilities: + +- `Microsoft.Extensions.VectorData` + +Vector Database implementations: + - `Microsoft.SemanticKernel.Connectors.AzureAISearch` - `Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB` - `Microsoft.SemanticKernel.Connectors.AzureCosmosNoSQL` +- `Microsoft.SemanticKernel.Connectors.InMemory` +- `Microsoft.SemanticKernel.Connectors.MongoDB` - `Microsoft.SemanticKernel.Connectors.Pinecone` +- `Microsoft.SemanticKernel.Connectors.Postgres` - `Microsoft.SemanticKernel.Connectors.Qdrant` - `Microsoft.SemanticKernel.Connectors.Redis` +- `Microsoft.SemanticKernel.Connectors.Sqlite` +- `Microsoft.SemanticKernel.Connectors.SqlServer` - `Microsoft.SemanticKernel.Connectors.Weaviate` ## Feedback & Contributing -Microsoft.Extensions.DependencyInjection.Abstractions is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). +Microsoft.Extensions.VectorData.Abstractions is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). diff --git a/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig b/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj new file mode 100644 index 000000000000..d374de2022ba --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj @@ -0,0 +1,41 @@ + + + + VectorData.UnitTests + VectorData.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..ded7e8b44d28 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsKeywordHybridSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..35f776517dfc --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddKeywordHybridSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddKeywordHybridSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeywordHybridSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IKeywordHybridSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddKeywordHybridSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedKeywordHybridSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedKeywordHybridSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IKeywordHybridSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedKeywordHybridSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IKeywordHybridSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedKeywordHybridSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IKeywordHybridSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs new file mode 100644 index 000000000000..90fb4560cad2 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IKeywordHybridSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new KeywordHybridSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..8cf6587b3dd7 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingKeywordHybridSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs new file mode 100644 index 000000000000..85231becf613 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingKeywordHybridSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingKeywordHybridSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingKeywordHybridSearch(innerSearch, null!)); + } + + [Fact] + public async Task HybridSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var vector = new float[] { 1.0f }; + var keywords = new List { "test" }; + var options = new HybridSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + + innerSearch.Setup(s => s.HybridSearchAsync(vector, keywords, options, default)) + .ReturnsAsync(results); + + var decorator = new LoggingKeywordHybridSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.HybridSearchAsync(vector, keywords, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.HybridSearchAsync(vector, keywords, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..026949453211 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizableTextSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs new file mode 100644 index 000000000000..e7de933156b7 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizableTextSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizableTextSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizableTextSearch(innerSearch, null!)); + } + + [Fact] + public async Task VectorizableTextSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var searchText = "test"; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerSearch.Setup(s => s.VectorizableTextSearchAsync(searchText, options, default)) + .ReturnsAsync(results); + var decorator = new LoggingVectorizableTextSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.VectorizableTextSearchAsync(searchText, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.VectorizableTextSearchAsync(searchText, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..a6380a9c5303 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizedSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs new file mode 100644 index 000000000000..ceb801060e7e --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizedSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizedSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizedSearch(innerSearch, null!)); + } + + [Fact] + public async Task VectorizedSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var vector = new float[] { 1.0f }; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerSearch.Setup(s => s.VectorizedSearchAsync(vector, options, default)) + .ReturnsAsync(results); + var decorator = new LoggingVectorizedSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.VectorizedSearchAsync(vector, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.VectorizedSearchAsync(vector, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..197256c39108 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorizableTextSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..42d153fcbb5b --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorizableTextSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddVectorizableTextSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorizableTextSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizableTextSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddVectorizableTextSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorizableTextSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizableTextSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizableTextSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorizableTextSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizableTextSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizableTextSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizableTextSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs new file mode 100644 index 000000000000..1d556abe5b26 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorizableTextSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new VectorizableTextSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..eb0548f2097a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorizedSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..b183d1cba162 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorizedSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddVectorizedSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorizedSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizedSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddVectorizedSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorizedSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizedSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizedSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorizedSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizedSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizedSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizedSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs new file mode 100644 index 000000000000..d883db85a33d --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new VectorizedSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorizedSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new VectorizedSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs new file mode 100644 index 000000000000..8db4dbf35c2a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerStore = new Mock().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerStore = new Mock().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerStore() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerStore, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs new file mode 100644 index 000000000000..638e91fc17ad --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreRecordCollectionBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerCollection = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerCollection = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerCollection() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerCollection, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..294ee64555a0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreRecordCollectionTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerCollection() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStoreRecordCollection(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerCollection = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStoreRecordCollection(innerCollection, null!)); + } + + [Fact] + public void CollectionNameReturnsInnerCollectionName() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CollectionName).Returns("test"); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var name = decorator.CollectionName; + + // Assert + Assert.Equal("test", name); + innerCollection.Verify(c => c.CollectionName, Times.Once()); + } + + [Fact] + public async Task CollectionExistsDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CollectionExistsAsync(default)).ReturnsAsync(true); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var exists = await decorator.CollectionExistsAsync(); + + // Assert + Assert.True(exists); + innerCollection.Verify(c => c.CollectionExistsAsync(default), Times.Once()); + } + + [Fact] + public async Task CreateCollectionDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CreateCollectionAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.CreateCollectionAsync(); + + // Assert + innerCollection.Verify(c => c.CreateCollectionAsync(default), Times.Once()); + } + + [Fact] + public async Task CreateCollectionIfNotExistsDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CreateCollectionIfNotExistsAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.CreateCollectionIfNotExistsAsync(); + + // Assert + innerCollection.Verify(c => c.CreateCollectionIfNotExistsAsync(default), Times.Once()); + } + + [Fact] + public async Task DeleteDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.DeleteAsync("key", default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteAsync("key"); + + // Assert + innerCollection.Verify(c => c.DeleteAsync("key", default), Times.Once()); + } + + [Fact] + public async Task DeleteBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var keys = new[] { "key1", "key2" }; + innerCollection.Setup(c => c.DeleteBatchAsync(keys, default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteBatchAsync(keys); + + // Assert + innerCollection.Verify(c => c.DeleteBatchAsync(keys, default), Times.Once()); + } + + [Fact] + public async Task DeleteCollectionDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.DeleteCollectionAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteCollectionAsync(); + + // Assert + innerCollection.Verify(c => c.DeleteCollectionAsync(default), Times.Once()); + } + + [Fact] + public async Task GetDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var record = new object(); + innerCollection.Setup(c => c.GetAsync("key", null, default)).ReturnsAsync(record); + + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.GetAsync("key"); + + // Assert + Assert.Same(record, result); + innerCollection.Verify(c => c.GetAsync("key", null, default), Times.Once()); + } + + [Fact] + public async Task GetBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var keys = new[] { "key1", "key2" }; + var records = new[] { new object(), new object() }; + innerCollection.Setup(c => c.GetBatchAsync(keys, null, default)).Returns(records.ToAsyncEnumerable()); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.GetBatchAsync(keys).ToListAsync(); + + // Assert + Assert.Equal(records, result); + innerCollection.Verify(c => c.GetBatchAsync(keys, null, default), Times.Once()); + } + + [Fact] + public async Task UpsertDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var record = new object(); + innerCollection.Setup(c => c.UpsertAsync(record, default)).ReturnsAsync("key"); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var key = await decorator.UpsertAsync(record); + + // Assert + Assert.Equal("key", key); + innerCollection.Verify(c => c.UpsertAsync(record, default), Times.Once()); + } + + [Fact] + public async Task UpsertBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var records = new[] { new object(), new object() }; + var keys = new[] { "key1", "key2" }; + innerCollection.Setup(c => c.UpsertBatchAsync(records, default)).Returns(keys.ToAsyncEnumerable()); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.UpsertBatchAsync(records).ToListAsync(); + + // Assert + Assert.Equal(keys, result); + innerCollection.Verify(c => c.UpsertBatchAsync(records, default), Times.Once()); + } + + [Fact] + public async Task VectorizedSearchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var vector = new float[] { 1.0f }; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerCollection.Setup(c => c.VectorizedSearchAsync(vector, options, default)).ReturnsAsync(results); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var actualResults = await decorator.VectorizedSearchAsync(vector, options); + + // Assert + Assert.Same(results, actualResults); + innerCollection.Verify(c => c.VectorizedSearchAsync(vector, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs new file mode 100644 index 000000000000..058fc56b6ad1 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerStore() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStore(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerStore = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStore(innerStore, null!)); + } + + [Fact] + public void GetCollectionDelegatesToInnerStore() + { + // Arrange + var innerStore = new Mock(); + var logger = new Mock().Object; + var collection = new Mock>().Object; + innerStore.Setup(s => s.GetCollection("test", null)) + .Returns(collection); + var decorator = new LoggingVectorStore(innerStore.Object, logger); + + // Act + var result = decorator.GetCollection("test"); + + // Assert + Assert.IsType>(result); + innerStore.Verify(s => s.GetCollection("test", null), Times.Once()); + } + + [Fact] + public async Task ListCollectionNamesDelegatesToInnerStoreAsync() + { + // Arrange + var innerStore = new Mock(); + var logger = new Mock().Object; + string[] names = ["col1", "col2"]; + innerStore.Setup(s => s.ListCollectionNamesAsync(default)) + .Returns(names.ToAsyncEnumerable()); + var decorator = new LoggingVectorStore(innerStore.Object, logger); + + // Act + var result = await decorator.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Equal(names, result); + innerStore.Verify(s => s.ListCollectionNamesAsync(default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs new file mode 100644 index 000000000000..cd6aa33e6af1 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorStoreBuilder() + { + // Arrange + var store = new Mock().Object; + + // Act + var builder = store.AsBuilder(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..4658cbe2ea6a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorStoreWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + + // Act + var builder = services.AddVectorStore(store); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetService(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorStoreWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + IVectorStore Factory(IServiceProvider _) => store; + + // Act + var builder = services.AddVectorStore(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetService(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorStoreWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStore(key, store); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetKeyedService(key); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStore) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorStoreWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + IVectorStore Factory(IServiceProvider _) => store; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStore(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetKeyedService(key); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStore) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs new file mode 100644 index 000000000000..88da238bef5d --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerStore() + { + // Arrange + var innerStore = new Mock().Object; + + // Act + var builder = new VectorStoreBuilder(innerStore); + + // Assert + var builtStore = builder.Build(); + Assert.Same(innerStore, builtStore); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerStore = new Mock().Object; + var serviceProvider = new Mock(); + IVectorStore Factory(IServiceProvider _) => innerStore; + + // Act + var builder = new VectorStoreBuilder(Factory); + var builtStore = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerStore, builtStore); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerStore = new Mock().Object; + var mockStore1 = new Mock().Object; + var mockStore2 = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + builder.Use(s => mockStore1); + builder.Use(s => mockStore2); + + // Act + var builtStore = builder.Build(); + + // Assert + Assert.Same(mockStore1, builtStore); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + // Act + var builtStore = builder.Build(null); + + // Assert + Assert.Same(innerStore, builtStore); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs new file mode 100644 index 000000000000..52fb53ba849f --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorStoreRecordCollectionBuilder() + { + // Arrange + var collection = new Mock>().Object; + + // Act + var builder = collection.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..973f2ab3baf2 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorStoreRecordCollectionWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + + // Act + var builder = services.AddVectorStoreRecordCollection(collection); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorStoreRecordCollectionWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; + + // Act + var builder = services.AddVectorStoreRecordCollection(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorStoreRecordCollectionWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStoreRecordCollection(key, collection); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStoreRecordCollection) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorStoreRecordCollectionWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStoreRecordCollection(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStoreRecordCollection) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs new file mode 100644 index 000000000000..8246b0fa88b0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerCollection() + { + // Arrange + var innerCollection = new Mock>().Object; + + // Act + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Assert + var builtCollection = builder.Build(); + Assert.Same(innerCollection, builtCollection); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerCollection = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorStoreRecordCollection Factory(IServiceProvider _) => innerCollection; + + // Act + var builder = new VectorStoreRecordCollectionBuilder(Factory); + var builtCollection = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerCollection, builtCollection); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerCollection = new Mock>().Object; + var mockCollection1 = new Mock>().Object; + var mockCollection2 = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + builder.Use(c => mockCollection1); + builder.Use(c => mockCollection2); + + // Act + var builtCollection = builder.Build(); + + // Assert + Assert.Same(mockCollection1, builtCollection); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + builder.Use((c, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + var builtCollection = builder.Build(null); + + // Assert + Assert.Same(innerCollection, builtCollection); + } +} diff --git a/dotnet/src/Connectors/VectorData/PACKAGE.md b/dotnet/src/Connectors/VectorData/PACKAGE.md new file mode 100644 index 000000000000..6c6c756412d9 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/PACKAGE.md @@ -0,0 +1,40 @@ +## About + +Contains utilities for accessing Vector Databases and Vector Indexes. + +## Key Features + +- Telemetry for any Vector Database implementation. Vector Database implementations are provided separately in other packages, for example `Microsoft.SemanticKernel.Connectors.AzureAISearch`. + +## How to Use + +This package is typically used with an implementation of the vector database abstractions such as `Microsoft.SemanticKernel.Connectors.AzureAISearch`. + +## Additional Documentation + +- [Conceptual documentation](https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors) + +## Related Packages + +Vector Database abstractions: + +- `Microsoft.Extensions.VectorData.Abstractions` + +Vector Database implementations: + +- `Microsoft.SemanticKernel.Connectors.AzureAISearch` +- `Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB` +- `Microsoft.SemanticKernel.Connectors.AzureCosmosNoSQL` +- `Microsoft.SemanticKernel.Connectors.InMemory` +- `Microsoft.SemanticKernel.Connectors.MongoDB` +- `Microsoft.SemanticKernel.Connectors.Pinecone` +- `Microsoft.SemanticKernel.Connectors.Postgres` +- `Microsoft.SemanticKernel.Connectors.Qdrant` +- `Microsoft.SemanticKernel.Connectors.Redis` +- `Microsoft.SemanticKernel.Connectors.Sqlite` +- `Microsoft.SemanticKernel.Connectors.SqlServer` +- `Microsoft.SemanticKernel.Connectors.Weaviate` + +## Feedback & Contributing + +Microsoft.Extensions.VectorData is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). diff --git a/dotnet/src/Connectors/VectorData/VectorData.csproj b/dotnet/src/Connectors/VectorData/VectorData.csproj new file mode 100644 index 000000000000..31e84c2533ce --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorData.csproj @@ -0,0 +1,62 @@ + + + + Microsoft.Extensions.VectorData + Microsoft.Extensions.VectorData + net8.0;netstandard2.0;net462 + + + + + + + 9.0.0-preview.1.25078.1 + 9.0.0.0 + + 9.0.0-preview.1.24518.1 + Microsoft.Extensions.VectorData + $(AssemblyName) + + Utilities for vector database access. + + neticon.png + neticon.png + PACKAGE.md + + Vector, Database, SDK + $(PackageDescription) + https://dot.net/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs new file mode 100644 index 000000000000..8a0295a65e98 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class KeywordHybridSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IKeywordHybridSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public KeywordHybridSearchBuilder(IKeywordHybridSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public KeywordHybridSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IKeywordHybridSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(KeywordHybridSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IKeywordHybridSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate keyword hybrid search to the pipeline. + /// The search factory function. + /// The updated instance. + public KeywordHybridSearchBuilder Use(Func, IKeywordHybridSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate keyword hybrid search to the pipeline. + /// The search factory function. + /// The updated instance. + public KeywordHybridSearchBuilder Use(Func, IServiceProvider, IKeywordHybridSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs new file mode 100644 index 000000000000..8c8b31ec6762 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class KeywordHybridSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static KeywordHybridSearchBuilder AsBuilder(this IKeywordHybridSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new KeywordHybridSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..6bb2209b0812 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class KeywordHybridSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static KeywordHybridSearchBuilder AddKeywordHybridSearch( + this IServiceCollection serviceCollection, + IKeywordHybridSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeywordHybridSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static KeywordHybridSearchBuilder AddKeywordHybridSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new KeywordHybridSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IKeywordHybridSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedKeywordHybridSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new KeywordHybridSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs new file mode 100644 index 000000000000..c05ffde310cc --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A keyword hybrid search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingKeywordHybridSearch : IKeywordHybridSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IKeywordHybridSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingKeywordHybridSearch(IKeywordHybridSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(HybridSearchAsync), + () => this._innerSearch.HybridSearchAsync(vector, keywords, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs new file mode 100644 index 000000000000..1614b6f6a57a --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingKeywordHybridSearchBuilderExtensions +{ + /// Adds logging to the keyword hybrid search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static KeywordHybridSearchBuilder UseLogging( + this KeywordHybridSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingKeywordHybridSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingKeywordHybridSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs new file mode 100644 index 000000000000..0dc81080e496 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vectorizable text search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorizableTextSearch : IVectorizableTextSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorizableTextSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance used for all logging. + public LoggingVectorizableTextSearch(IVectorizableTextSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizableTextSearchAsync), + () => this._innerSearch.VectorizableTextSearchAsync(searchText, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs new file mode 100644 index 000000000000..0f2a1b704474 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorizableTextSearchBuilderExtensions +{ + /// Adds logging to the vectorizable text search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorizableTextSearchBuilder UseLogging( + this VectorizableTextSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingVectorizableTextSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizableTextSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs new file mode 100644 index 000000000000..f0198534d421 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vectorized search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorizedSearch : IVectorizedSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorizedSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance used for all logging. + public LoggingVectorizedSearch(IVectorizedSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizedSearchAsync), + () => this._innerSearch.VectorizedSearchAsync(vector, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs new file mode 100644 index 000000000000..47fa7c61ebe4 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorizedSearchBuilderExtensions +{ + /// Adds logging to the vectorized search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorizedSearchBuilder UseLogging( + this VectorizedSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingVectorizedSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizedSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs new file mode 100644 index 000000000000..a2681267d216 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorizableTextSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IVectorizableTextSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorizableTextSearchBuilder(IVectorizableTextSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorizableTextSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorizableTextSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorizableTextSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizableTextSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate vectorizable text search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizableTextSearchBuilder Use(Func, IVectorizableTextSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate vectorizable text search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizableTextSearchBuilder Use(Func, IServiceProvider, IVectorizableTextSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs new file mode 100644 index 000000000000..7f44251ddc38 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorizableTextSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static VectorizableTextSearchBuilder AsBuilder(this IVectorizableTextSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new VectorizableTextSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..423919c561b0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorizableTextSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizableTextSearchBuilder AddVectorizableTextSearch( + this IServiceCollection serviceCollection, + IVectorizableTextSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddVectorizableTextSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizableTextSearchBuilder AddVectorizableTextSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizableTextSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorizableTextSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedVectorizableTextSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizableTextSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs new file mode 100644 index 000000000000..fe02e2535482 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorizedSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IVectorizedSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorizedSearchBuilder(IVectorizedSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorizedSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorizedSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorizedSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizedSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate vectorized search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizedSearchBuilder Use(Func, IVectorizedSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate vectorized search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizedSearchBuilder Use(Func, IServiceProvider, IVectorizedSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs new file mode 100644 index 000000000000..a7721578716f --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorizedSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static VectorizedSearchBuilder AsBuilder(this IVectorizedSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new VectorizedSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..75f4bc7b1355 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorizedSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizedSearchBuilder AddVectorizedSearch( + this IServiceCollection serviceCollection, + IVectorizedSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddVectorizedSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizedSearchBuilder AddVectorizedSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizedSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizedSearchBuilder AddKeyedVectorizedSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorizedSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedVectorizedSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizedSearchBuilder AddKeyedVectorizedSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizedSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs new file mode 100644 index 000000000000..ea2bca4ca106 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vector store that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorStore : IVectorStore +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorStore _innerStore; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingVectorStore(IVectorStore innerStore, ILogger logger) + { + Verify.NotNull(innerStore); + Verify.NotNull(logger); + + this._innerStore = innerStore; + this._logger = logger; + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + => new LoggingVectorStoreRecordCollection( + this._innerStore.GetCollection(name, vectorStoreRecordDefinition), + this._logger); + + /// + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(ListCollectionNamesAsync), + () => this._innerStore.ListCollectionNamesAsync(cancellationToken), + cancellationToken); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs new file mode 100644 index 000000000000..9e8b7636a16c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorStoreBuilderExtensions +{ + /// Adds logging to the vector store pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorStoreBuilder UseLogging( + this VectorStoreBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerStore, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerStore; + } + + return new LoggingVectorStore(innerStore, loggerFactory.CreateLogger(typeof(LoggingVectorStore))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..3d6919280861 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vector store record collection that logs operations to an +/// +[Experimental("SKEXP0020")] +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public class LoggingVectorStoreRecordCollection : IVectorStoreRecordCollection where TKey : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorStoreRecordCollection _innerCollection; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingVectorStoreRecordCollection(IVectorStoreRecordCollection innerCollection, ILogger logger) + { + Verify.NotNull(innerCollection); + Verify.NotNull(logger); + + this._innerCollection = innerCollection; + this._logger = logger; + } + + /// + public string CollectionName => this._innerCollection.CollectionName; + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CollectionExistsAsync), + () => this._innerCollection.CollectionExistsAsync(cancellationToken)); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CreateCollectionAsync), + () => this._innerCollection.CreateCollectionAsync(cancellationToken)); + } + + /// + public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CreateCollectionIfNotExistsAsync), + () => this._innerCollection.CreateCollectionIfNotExistsAsync(cancellationToken)); + } + + /// + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteAsync), + () => this._innerCollection.DeleteAsync(key, cancellationToken)); + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteBatchAsync), + () => this._innerCollection.DeleteBatchAsync(keys, cancellationToken)); + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteCollectionAsync), + () => this._innerCollection.DeleteCollectionAsync(cancellationToken)); + } + + /// + public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(GetAsync), + () => this._innerCollection.GetAsync(key, options, cancellationToken)); + } + + /// + public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(GetBatchAsync), + () => this._innerCollection.GetBatchAsync(keys, options, cancellationToken), + cancellationToken); + } + + /// + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(UpsertAsync), + () => this._innerCollection.UpsertAsync(record, cancellationToken)); + } + + /// + public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(UpsertBatchAsync), + () => this._innerCollection.UpsertBatchAsync(records, cancellationToken), + cancellationToken); + } + + /// + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizedSearchAsync), + () => this._innerCollection.VectorizedSearchAsync(vector, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs new file mode 100644 index 000000000000..33d7d3760d32 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorStoreRecordCollectionBuilderExtensions +{ + /// Adds logging to the vector store record collection pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorStoreRecordCollectionBuilder UseLogging( + this VectorStoreRecordCollectionBuilder builder, + ILoggerFactory? loggerFactory = null) where TKey : notnull + { + Verify.NotNull(builder); + + return builder.Use((innerCollection, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerCollection; + } + + return new LoggingVectorStoreRecordCollection(innerCollection, loggerFactory.CreateLogger(typeof(LoggingVectorStoreRecordCollection))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs new file mode 100644 index 000000000000..71d98fcb276c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorStoreBuilder +{ + private readonly Func _innerStoreFactory; + + /// The registered store factory instances. + private List>? _storeFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorStoreBuilder(IVectorStore innerStore) + { + Verify.NotNull(innerStore); + + this._innerStoreFactory = _ => innerStore; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorStoreBuilder(Func innerStoreFactory) + { + Verify.NotNull(innerStoreFactory); + + this._innerStoreFactory = innerStoreFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorStore Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var vectorStore = this._innerStoreFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._storeFactories is not null) + { + for (var i = this._storeFactories.Count - 1; i >= 0; i--) + { + vectorStore = this._storeFactories[i](vectorStore, services); + if (vectorStore is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorStoreBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStore)} instances."); + } + } + } + + return vectorStore; + } + + /// Adds a factory for an intermediate vector store to the vector store pipeline. + /// The store factory function. + /// The updated instance. + public VectorStoreBuilder Use(Func storeFactory) + { + Verify.NotNull(storeFactory); + + return this.Use((innerStore, _) => storeFactory(innerStore)); + } + + /// Adds a factory for an intermediate vector store to the vector store pipeline. + /// The store factory function. + /// The updated instance. + public VectorStoreBuilder Use(Func storeFactory) + { + Verify.NotNull(storeFactory); + + (this._storeFactories ??= []).Add(storeFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs new file mode 100644 index 000000000000..f5666cd2d6ac --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorStoreBuilderExtensions +{ + /// Creates a new using as its inner store. + /// The store to use as the inner store. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner store. + /// + public static VectorStoreBuilder AsBuilder(this IVectorStore innerStore) + { + Verify.NotNull(innerStore); + + return new VectorStoreBuilder(innerStore); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..a76d3256585b --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorStoreBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the store should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a singleton service. + public static VectorStoreBuilder AddVectorStore( + this IServiceCollection serviceCollection, + IVectorStore innerStore, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStore); + + return AddVectorStore(serviceCollection, _ => innerStore, lifetime); + } + + /// Registers a singleton in the . + /// The to which the store should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a singleton service. + public static VectorStoreBuilder AddVectorStore( + this IServiceCollection serviceCollection, + Func innerStoreFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStoreFactory); + + var builder = new VectorStoreBuilder(innerStoreFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the store should be added. + /// The key with which to associate the store. + /// The inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a scoped service. + public static VectorStoreBuilder AddKeyedVectorStore( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorStore innerStore, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStore); + + return AddKeyedVectorStore(serviceCollection, serviceKey, _ => innerStore, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the store should be added. + /// The key with which to associate the store. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a scoped service. + public static VectorStoreBuilder AddKeyedVectorStore( + this IServiceCollection serviceCollection, + object? serviceKey, + Func innerStoreFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStoreFactory); + + var builder = new VectorStoreBuilder(innerStoreFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs new file mode 100644 index 000000000000..2ae1048ff83e --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorStoreRecordCollectionBuilder where TKey : notnull +{ + private readonly Func> _innerCollectionFactory; + + /// The registered collection factory instances. + private List, IServiceProvider, IVectorStoreRecordCollection>>? _collectionFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorStoreRecordCollectionBuilder(IVectorStoreRecordCollection innerCollection) + { + Verify.NotNull(innerCollection); + + this._innerCollectionFactory = _ => innerCollection; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorStoreRecordCollectionBuilder(Func> innerCollectionFactory) + { + Verify.NotNull(innerCollectionFactory); + + this._innerCollectionFactory = innerCollectionFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorStoreRecordCollection Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var collection = this._innerCollectionFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._collectionFactories is not null) + { + for (var i = this._collectionFactories.Count - 1; i >= 0; i--) + { + collection = this._collectionFactories[i](collection, services); + if (collection is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorStoreRecordCollectionBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStoreRecordCollection)} instances."); + } + } + } + + return collection; + } + + /// Adds a factory for an intermediate vector store record collection to the pipeline. + /// The collection factory function. + /// The updated instance. + public VectorStoreRecordCollectionBuilder Use(Func, IVectorStoreRecordCollection> collectionFactory) + { + Verify.NotNull(collectionFactory); + + return this.Use((innerCollection, _) => collectionFactory(innerCollection)); + } + + /// Adds a factory for an intermediate vector store record collection to the pipeline. + /// The collection factory function. + /// The updated instance. + public VectorStoreRecordCollectionBuilder Use(Func, IServiceProvider, IVectorStoreRecordCollection> collectionFactory) + { + Verify.NotNull(collectionFactory); + + (this._collectionFactories ??= []).Add(collectionFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs new file mode 100644 index 000000000000..1e950685253c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorStoreRecordCollectionBuilderExtensions +{ + /// Creates a new using as its inner collection. + /// The collection to use as the inner collection. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner collection. + /// + public static VectorStoreRecordCollectionBuilder AsBuilder(this IVectorStoreRecordCollection innerCollection) where TKey : notnull + { + Verify.NotNull(innerCollection); + + return new VectorStoreRecordCollectionBuilder(innerCollection); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..2b0af209ebd6 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorStoreRecordCollectionBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the collection should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a singleton service. + public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + IVectorStoreRecordCollection innerCollection, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollection); + + return AddVectorStoreRecordCollection(serviceCollection, _ => innerCollection, lifetime); + } + + /// Registers a singleton in the . + /// The to which the collection should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a singleton service. + public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + Func> innerCollectionFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollectionFactory); + + var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the collection should be added. + /// The key with which to associate the collection. + /// The inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a scoped service. + public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorStoreRecordCollection innerCollection, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollection); + + return AddKeyedVectorStoreRecordCollection(serviceCollection, serviceKey, _ => innerCollection, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the collection should be added. + /// The key with which to associate the collection. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a scoped service. + public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerCollectionFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollectionFactory); + + var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/neticon.png b/dotnet/src/Connectors/VectorData/neticon.png new file mode 100644 index 0000000000000000000000000000000000000000..a0f1fdbf4d5eae0e561018cccee74f6a454cdb9c GIT binary patch literal 7006 zcmeHMXH-+`n%)#eMU;C)kZw7O2nvFLpcE@A^-u+AN(mh$UH*JD5Jjm{4}uUR zs5C(zdURn*zrcHqdVxK)P)7322TAMVbNR4HRzo3_~zdgjvf?Ot98@H{LHdy zK*)TM=g&B9f}+9IKfm=aF5e3_{PQJ$ zY4?9DHvtd+Y14o8TQs=)&+P)Wjb3|LIT@*NDqyYm#gu^q*EFSow<%yKVx`_Ka)!0 z2YAaQr%LYyQ%n$Rjx)e%JeM5_ov70FUMveJTS(J+%C4(L)~h*MQ8!wJtf_X{`Ol?k z;{27%#**2uiR&R6-eaRK1Mdgl2xHQ=uS(~VqsTVrsUnQhc zRIK5>@(05w3gHYdsI0;;sOO66pUEl)DGyD(D4>$7drUDFZ|uxx;-nWj7d|rj=u+D@ z-HU+mLOInrsXdSL1Z6nVB&D z@>f4!yq=_B+16+qw5k=4o#*tf;6Oe*F;`&L!)bT{U7Wc3YmG2;NRxb%woCt~*Yr2E zfwiUdS=7SK&5>df-aqY8lp~SEUG*ziXGvHMLp_#vgvVMQ*&{+d@(a>v4;7p_%Jte0Ga5zNbUI28WAgY5f?FX^;q`1WTw2~t|P54N&e^@=nFqDj}W#o z_-kZBWDQ%($YJH43Y7YrbjfsUrAEjla>?j0;YLdXxjK}P@xDGc%r&c)6`t?XW=*{r z%Z^p)?6*7obKU_;NZK_ejh9n&?qzO0#(}Uo+KSm|e}q1+f$wM!G8>lLvKK1UK^uz5 zDk&5(DuUnzQy{aQ8%b~*_4Ri`TOj}Dd{0OCls}^VD8=qDC%Q9tSSt5LZoxd!|ai3oGtf&cOy(`^W9zMNR;bII|OS+Pe(-9=f!m6}w zV>f(mH^BYE-=Wl=)Q2s2TF*j&tRkN0KOu3-(VN?4?-v|?W^Xj)@u4^bNB%bN+f|D= z?r1ey$UbahYv!qISaxV8>+1Mnz!M&S1o+~titx|65MA`iQMjscL!+LOGjZ?p>}x6d z4`FiZV9i-E6F8c|Fq37-TTTtJOdIZ9<*YrJU86UuQr6dipNC%AxT?lXa9U=`iq+2= zOT!CFUlJM1&INj~InR!=@x@{Z8BnvgL~_>nN)y@!r<0$uGCJ<0B-q!vZn@~#5^Ig8B}}g&dYBee=x50Wv$R^^f%aTE~g_a7&8Y(5L>! zkYgCl@1ZVqFSwkH(ns-EtYbOFLrarf#r6W9#x8rO<<_6h33faYV{<&_gBahO#ga9j z$|}=ea)vEm|Hb`E%L9Gn#Osxg( z&sxXz7lsse+_i@<_LUl@8$916h*m6!R?~zr_ZQU^H3F(aC1is#I$VP$GO(s!pT&Y# z85JYcwQqu6Ja6sje&x*)nOdx;bt1hNMTSwSikFeKE)+MRrW?mg=8mp^AR_kz{C%e* z32H_>c600^d$9)ob+$yzpyxHa+k0Sz7GG41I0A59bKJf?X}E6mX$pU~Wc%_?$2w1s zZEbk$svZ4U+WH;XPEb^-IqhGQX1U|z8KWp8&jVlWFPP+7Um6;oMy?>TFU`cMT5bYx z;7_~MfZ(sumPQHg++U)9PT=+=zxu+qmP==xJ&oI%XgD8=YZo%*rGq2U_J^D4d%7H`}jau-;<_^n?THcf9*rKD^J#%p%l zA8DILPr+wPY^MpxQbxGXG2f0xcjxSw;wjl53EsXe0poYHgfc(T;v5J;H$neUhElxe zrX0NdQ4e#4L4e-JmsN$%C+#BKX8TYA1YlhN`|QyqnlH{Igil*i0?NrD9qi2Fw_&~eMSk3UGyWzcay4oPaWE~nJ{R}-u+%oE z^4pk7G%~M66x6$a(@21!KD)Us1JG?!Xn4Zb;NYOn2SGc%JK!@mQv*PGMGxMb{#a4F z_#t!~GhhJR9)$w;fi20azFx86@7j4yB zpC7-bK<170rK@aOPg zDv69Iy;oMY0yq-ORy`~=Y8>ZQ_}+6m=ElBFD(BO@q9)h-K%)s9-^rh(;7T`vu={0p zCzf*G!~Iex?wWwWS?rOOYx{i!_Lh~OXJ7gYPR(bWfke`)l(GCjjtT06t7+0hHGHhh zA9y}JSM5#_xw|dqtlV?PVqZwGRm*pM)dvDj|LAzkF?4x}RLkCA#>G3V21ZLIt^gG< zQI&0O8}Rf;Def0;ZbweV+|x(R-?(Vnj5F9~eOT)4!nDr7Yq-5!y1bz1t;HjQSLn-A zt1qf%FzvKZ`+#!ufUYj;;FE!eL$>Pcse)qp0BW@>*U{2zo_CWHpgvHpnGofD&KYKY z+!}avbdRD^hZQf zU#$@f{W=^JvL7g)bcEZ<)O9tw4?Dxp&lksZ;$I_{?{l;o=>&}=tF-5MU&27^*rhJT zcd0DiLPxBSPJ<5cx}JGQAds^*(&j4-nHoTwx>dVUGJHkMM7w*nPbN5n_W)JJ zoSF~F)URWm1xS-QkhpAB(#}xq`0?;AQ=#^xj8iv{-*?l`8a;)kpuatAQXeVT+=;#A zT0rvGu`_`{>KMvxzgLkb$EeCy`RyvAx+nC!D381cssru;3nBjt{S>AGvQAs(kxLO{ zIp*xXImIAQJ>kiL&b~R(P_(nAu2z<~Dc*-_c3=C`sjCz@AZVOwgE5s@G#uy{iQNJ} z*pY1bjnx4K{yik#93ftw2}MI#Dt>w>)q5vp~-G zX7!=BUrYpB-3#04(mvmC$-Y!WY8${8gcraWB}q}i z(|PAS*SoXp)9`8tTYTuy7`=#uWFoR#J2(AVcxr-9uF+7kB$GxNkA$Vfoz}l40*Ydo zXReR;i`X4$Te~{&2?RE~^39WlS?>E>my@CS3|paiTe-zGjS$iwI*YbAHOwW*PD@wI z=Nl-L-*Y(4b+hX{-tb98arKb!Q^EK+RA0Lfp4`cv&x7o<`~ghNZ#@Z$`B6O*2R6%R z+kg>9tGG(TtYgVXWD_X)ySeq_3Tq2*GEPMlF@o;BBxfbxC%!xOuwUa+?wXac%Dce> z+d&$P_VsrSw*$bMY#z8~U%K$AIc8vOosw2D4`XdBe5NKVuc+s10x-cw)v;&2Yd`@# z6UL-Y1G;FY$G$?{@cwL6zaRL5p_lTzugeI5PB@eSk^x^LJ=N!qHsScr*=1fnx>1;L zY5eqB8dlecz6GSs<7{=#sl?FWEY66Ejk>f}1odw~P?}i0yH&4d%vKKZ@hTi7-IW8%;{(vI`&L;i z@`wN4O!SHFV&u%JzXt*g%E%4J$^z@6FOtA7Yc(*Rz2%_90Exxp+}r^Vb|pF?C;F8w zu&f+_Jsvg^Wp?I6!+uV$Bi#fzohClm^T{PdQzz%Nn}GENT0zaz{xqo+NWJ!QdLYKf zBHdX|LMnBh5jXZ;>OoAWv*rOX&O8Sbzjyl*y-%<2V2oE_*lEG(1GlpzBZ6aoOp%y8 ze&=uJp63A7*h}C9j-sY70bc4bHQr`@q#!@&!5LxUu`)c;-&WVK?$9+vP%D`7v^_`5 zrOcY7w(+sWUl!hkCI>q|qg_*OZ$os^0Fsg`di5ki_Tzr$8gh}#WNKHtX|hlAupfW6 zk_ZWVB&Hjb9ZbLk!Ie1lMyGd?qhgq8>{#iC>Kg^*taLx^YuW+VQG;}IK{6+Y@0i7& z6iRAQBlI8*LwK}P>x0;cL*en^{8^OvUg%KTXIa~~>xA%u_2)y{h_+YQ?tpDgX9rIe zOo3t5%oVK)PzXFaqN#F2^qJbgB3HzT`{nJcFO`#ATLWNBXfYU5CYHs&PnH^f*Wl6k z?<0KM*e@M?auAvtBi}A#6V#ej{yvSOE8v?4^Jb8y4~i{ zSIC{Kc9#!&HhKqJI9L>s*NbwiwWXI+w-X6TM}&3$PlPOE+G8HP8Hi(#UMtyKy= zLo(ZOb7qTQ^r{NHBg^h=C`gbboZigk0*;z5+XW@P;EzUwQZv5|SZ6W0tBbATVDt$& z4th!!{t_tBc>V9qZE^8&@=VbaMh;!ivCF~IC28PzN2Z{@`)H;y3+{?j%eQl6gP|I9 z-agi;Y>P($m>0yG48Z>=AC0W_h5((46THSuk)X||?u=A_N-{J)`M9Q^WnUMh84VTQ zIvQlFtG4Z5X~3!o0K!K+^E@{TZ;5W3XkNzy z*j?DZB4J)s(LK@K0K1T4u&xvPHDTX zs$=NfQalJo9RXF+0@j1~t~aK@*DAWgsI@Sl{8AP8%T`P`Vu~Tv_%ZmbJz^#V>NJZl-TbST^RMK5DlNOs$kegkbICLYRJk-}g{l-Wn^Vya`SL3T1tiIw^Z zm~h)cx+UimpKrqQ=$a*_BCrvMGi%5Nr5qU)hq|P1Tjp!gLgpIqRRIs`qsDGjcel*OH-c~&6W812bsUI z>umkx8_8Ottu&n?L`^t@;63h8!Nb19V4*G1v2?3e;$WrvvX7%#JaxH?R) zN@KLmgq3q$NONDrj=7c`8~kK5VTf>xS$Q2C8@T{(7ygTX1N^6hZ&3*F7Z@!5FaMz+ n@b3Qu^xx$8Uk}h2jH{d|uJ4jrSC|P(2)ca1@;v^m$K8JeR7TPQ literal 0 HcmV?d00001 diff --git a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs index 20f928cb7bcb..2b2b7488db6a 100644 --- a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs @@ -88,7 +88,7 @@ public static KernelPlugin CreatePluginFromGrpcDirectory( { const string ProtoFile = "grpc.proto"; - Verify.ValidPluginName(pluginDirectoryName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginDirectoryName, kernel.Plugins); var pluginDir = Path.Combine(parentDirectory, pluginDirectoryName); Verify.DirectoryExists(pluginDir); @@ -151,7 +151,7 @@ public static KernelPlugin CreatePluginFromGrpc( string pluginName) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); // Parse var parser = new ProtoDocumentParser(); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs index 0bc7dbc73fb0..b472992cd0f1 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs @@ -62,7 +62,7 @@ public static async Task CreatePluginFromApiManifestAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(pluginParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs index fcea1ef3a387..b20b8968c90b 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs @@ -62,7 +62,7 @@ public static async Task CreatePluginFromCopilotAgentPluginAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(pluginParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs index 2e7fb3d2214f..93709fc09a77 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs @@ -117,7 +117,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -156,7 +156,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -199,7 +199,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -233,7 +233,7 @@ public static KernelPlugin CreatePluginFromOpenApi( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs index 0e69ffefcc16..d76998d0976e 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs @@ -36,7 +36,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -73,7 +73,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -114,7 +114,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -143,7 +143,7 @@ public static KernelPlugin CreateFromOpenApi( RestApiSpecification specification, OpenApiFunctionExecutionParameters? executionParameters = null) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -394,7 +394,7 @@ private static string ConvertOperationIdToValidFunctionName(string operationId, { try { - Verify.ValidFunctionName(operationId); + KernelVerify.ValidFunctionName(operationId); return operationId; } catch (ArgumentException) diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs new file mode 100644 index 000000000000..1cb1c96ae181 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Text.RegularExpressions; + +namespace Microsoft.SemanticKernel; + +[ExcludeFromCodeCoverage] +internal static partial class KernelVerify +{ +#if NET + [GeneratedRegex("^[0-9A-Za-z_]*$")] + private static partial Regex AsciiLettersDigitsUnderscoresRegex(); +#else + private static Regex AsciiLettersDigitsUnderscoresRegex() => s_asciiLettersDigitsUnderscoresRegex; + private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$", RegexOptions.Compiled); +#endif + + internal static void ValidPluginName([NotNull] string? pluginName, IReadOnlyKernelPluginCollection? plugins = null, [CallerArgumentExpression(nameof(pluginName))] string? paramName = null) + { + Verify.NotNullOrWhiteSpace(pluginName); + if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(pluginName)) + { + Verify.ThrowArgumentInvalidName("plugin name", pluginName, paramName); + } + + if (plugins is not null && plugins.Contains(pluginName)) + { + throw new ArgumentException($"A plugin with the name '{pluginName}' already exists."); + } + } + + internal static void ValidFunctionName([NotNull] string? functionName, [CallerArgumentExpression(nameof(functionName))] string? paramName = null) + { + Verify.NotNullOrWhiteSpace(functionName); + if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(functionName)) + { + Verify.ThrowArgumentInvalidName("function name", functionName, paramName); + } + } + + /// + /// Make sure every function parameter name is unique + /// + /// List of parameters + internal static void ParametersUniqueness(IReadOnlyList parameters) + { + int count = parameters.Count; + if (count > 0) + { + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + for (int i = 0; i < count; i++) + { + KernelParameterMetadata p = parameters[i]; + if (string.IsNullOrWhiteSpace(p.Name)) + { + string paramName = $"{nameof(parameters)}[{i}].{p.Name}"; + if (p.Name is null) + { + Verify.ThrowArgumentNullException(paramName); + } + else + { + Verify.ThrowArgumentWhiteSpaceException(paramName); + } + } + + if (!seen.Add(p.Name)) + { + throw new ArgumentException($"The function has two or more parameters with the same name '{p.Name}'"); + } + } + } + } +} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs b/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs new file mode 100644 index 000000000000..8fa8c4a4125c --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Diagnostics; + +[ExcludeFromCodeCoverage] +internal static partial class LoggingExtensions +{ + internal static async Task RunWithLoggingAsync( + ILogger logger, + string operationName, + Func operation) + { + logger.LogInvoked(operationName); + + try + { + await operation().ConfigureAwait(false); + + logger.LogCompleted(operationName); + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + } + + internal static async Task RunWithLoggingAsync( + ILogger logger, + string operationName, + Func> operation) + { + logger.LogInvoked(operationName); + + try + { + var result = await operation().ConfigureAwait(false); + + logger.LogCompleted(operationName); + + return result; + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + } + + internal static async IAsyncEnumerable RunWithLoggingAsync( + ILogger logger, + string operationName, + Func> operation, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + logger.LogInvoked(operationName); + + IAsyncEnumerator enumerator; + + try + { + enumerator = operation().GetAsyncEnumerator(cancellationToken); + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + + try + { + while (true) + { + try + { + if (!await enumerator.MoveNextAsync().ConfigureAwait(false)) + { + break; + } + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + + yield return enumerator.Current; + } + + logger.LogCompleted(operationName); + } + finally + { + await enumerator.DisposeAsync().ConfigureAwait(false); + } + } + + [LoggerMessage(LogLevel.Debug, "{OperationName} invoked.")] + private static partial void LogInvoked(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Debug, "{OperationName} completed.")] + private static partial void LogCompleted(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Debug, "{OperationName} canceled.")] + private static partial void LogInvocationCanceled(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Error, "{OperationName} failed.")] + private static partial void LogInvocationFailed(this ILogger logger, string operationName, Exception exception); +} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs index f90895504ead..e5f12e73c411 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs @@ -14,15 +14,9 @@ namespace Microsoft.SemanticKernel; internal static partial class Verify { #if NET - [GeneratedRegex("^[0-9A-Za-z_]*$")] - private static partial Regex AsciiLettersDigitsUnderscoresRegex(); - [GeneratedRegex("^[^.]+\\.[^.]+$")] private static partial Regex FilenameRegex(); #else - private static Regex AsciiLettersDigitsUnderscoresRegex() => s_asciiLettersDigitsUnderscoresRegex; - private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$", RegexOptions.Compiled); - private static Regex FilenameRegex() => s_filenameRegex; private static readonly Regex s_filenameRegex = new("^[^.]+\\.[^.]+$", RegexOptions.Compiled); #endif @@ -74,29 +68,6 @@ public static void True(bool condition, string message, [CallerArgumentExpressio } } - internal static void ValidPluginName([NotNull] string? pluginName, IReadOnlyKernelPluginCollection? plugins = null, [CallerArgumentExpression(nameof(pluginName))] string? paramName = null) - { - NotNullOrWhiteSpace(pluginName); - if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(pluginName)) - { - ThrowArgumentInvalidName("plugin name", pluginName, paramName); - } - - if (plugins is not null && plugins.Contains(pluginName)) - { - throw new ArgumentException($"A plugin with the name '{pluginName}' already exists."); - } - } - - internal static void ValidFunctionName([NotNull] string? functionName, [CallerArgumentExpression(nameof(functionName))] string? paramName = null) - { - NotNullOrWhiteSpace(functionName); - if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(functionName)) - { - ThrowArgumentInvalidName("function name", functionName, paramName); - } - } - internal static void ValidFilename([NotNull] string? filename, [CallerArgumentExpression(nameof(filename))] string? paramName = null) { NotNullOrWhiteSpace(filename); @@ -145,42 +116,8 @@ internal static void DirectoryExists(string path) } } - /// - /// Make sure every function parameter name is unique - /// - /// List of parameters - internal static void ParametersUniqueness(IReadOnlyList parameters) - { - int count = parameters.Count; - if (count > 0) - { - var seen = new HashSet(StringComparer.OrdinalIgnoreCase); - for (int i = 0; i < count; i++) - { - KernelParameterMetadata p = parameters[i]; - if (string.IsNullOrWhiteSpace(p.Name)) - { - string paramName = $"{nameof(parameters)}[{i}].{p.Name}"; - if (p.Name is null) - { - ThrowArgumentNullException(paramName); - } - else - { - ThrowArgumentWhiteSpaceException(paramName); - } - } - - if (!seen.Add(p.Name)) - { - throw new ArgumentException($"The function has two or more parameters with the same name '{p.Name}'"); - } - } - } - } - [DoesNotReturn] - private static void ThrowArgumentInvalidName(string kind, string name, string? paramName) => + internal static void ThrowArgumentInvalidName(string kind, string name, string? paramName) => throw new ArgumentException($"A {kind} can contain only ASCII letters, digits, and underscores: '{name}' is not a valid name.", paramName); [DoesNotReturn] diff --git a/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs b/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs new file mode 100644 index 000000000000..c7aaf6b4fd3b --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.Extensions.DependencyInjection; + +/// Provides an implementation of that contains no services. +internal sealed class EmptyKeyedServiceProvider : IKeyedServiceProvider +{ + /// Gets a singleton instance of . + public static EmptyKeyedServiceProvider Instance { get; } = new(); + + /// + public object? GetService(Type serviceType) => null; + + /// + public object? GetKeyedService(Type serviceType, object? serviceKey) => null; + + /// + public object GetRequiredKeyedService(Type serviceType, object? serviceKey) => + this.GetKeyedService(serviceType, serviceKey) ?? + throw new InvalidOperationException($"No service for type '{serviceType}' and key '{serviceKey}' has been registered."); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs index fddac8f48282..5e9ccce63116 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -151,7 +151,7 @@ internal KernelFunction(string name, string description, IReadOnlyList parameters, KernelReturnParameterMetadata? returnParameter = null, Dictionary? executionSettings = null, ReadOnlyDictionary? additionalMetadata = null) { Verify.NotNull(name); - Verify.ParametersUniqueness(parameters); + KernelVerify.ParametersUniqueness(parameters); this.Metadata = new KernelFunctionMetadata(name) { @@ -187,7 +187,7 @@ internal KernelFunction(string name, string? pluginName, string description, IRe internal KernelFunction(string name, string? pluginName, string description, IReadOnlyList parameters, JsonSerializerOptions jsonSerializerOptions, KernelReturnParameterMetadata? returnParameter = null, Dictionary? executionSettings = null, ReadOnlyDictionary? additionalMetadata = null) { Verify.NotNull(name); - Verify.ParametersUniqueness(parameters); + KernelVerify.ParametersUniqueness(parameters); Verify.NotNull(jsonSerializerOptions); this.Metadata = new KernelFunctionMetadata(name) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs index cae651f74fea..034eeb72833a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs @@ -58,7 +58,7 @@ public string Name init { Verify.NotNull(value); - Verify.ValidFunctionName(value); + KernelVerify.ValidFunctionName(value); this._name = value; } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs index c86faaf03065..1b6aab3c87a3 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs @@ -29,7 +29,7 @@ public abstract class KernelPlugin : IEnumerable /// is an invalid plugin name. protected KernelPlugin(string name, string? description = null) { - Verify.ValidPluginName(name); + KernelVerify.ValidPluginName(name); this.Name = name; this.Description = !string.IsNullOrWhiteSpace(description) ? description! : ""; diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs index 80e9652519b6..bd61fe1697f2 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs @@ -464,7 +464,7 @@ private KernelFunctionFromMethod( ReadOnlyDictionary? additionalMetadata = null) : base(functionName, pluginName, description, parameters, returnParameter, additionalMetadata: additionalMetadata) { - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); this._function = implementationFunc; } @@ -480,7 +480,7 @@ private KernelFunctionFromMethod( ReadOnlyDictionary? additionalMetadata = null) : base(functionName, pluginName, description, parameters, jsonSerializerOptions, returnParameter, additionalMetadata: additionalMetadata) { - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); this._function = implementationFunc; } @@ -519,7 +519,7 @@ private static MethodDetails GetMethodDetails(string? functionName, MethodInfo m } } - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); // Build up a list of KernelParameterMetadata for the parameters we expect to be populated // from arguments. Some arguments are populated specially, not from arguments, and thus @@ -540,7 +540,7 @@ private static MethodDetails GetMethodDetails(string? functionName, MethodInfo m } // Check for param names conflict - Verify.ParametersUniqueness(argParameterViews); + KernelVerify.ParametersUniqueness(argParameterViews); // Get the return type and a marshaling func for the return value. (Type returnType, Func> returnFunc) = GetReturnValueMarshalerDelegate(method); diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs index ce3262130ddb..d03f958c1fa0 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs @@ -239,7 +239,7 @@ static void AppendWithoutArity(StringBuilder builder, string name) Verify.NotNull(target); pluginName ??= CreatePluginName(target.GetType()); - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); MethodInfo[] methods = target.GetType().GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static); diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs new file mode 100644 index 000000000000..d9c44bf6a560 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Logging; + +namespace SemanticKernel.UnitTests.Utilities; + +public class FakeLogger : ILogger +{ + public List<(LogLevel Level, string Message, Exception? Exception)> Logs { get; } = new(); + + public IDisposable? BeginScope(TState state) where TState : notnull => null; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log( + LogLevel logLevel, + EventId eventId, + TState state, + Exception? exception, + Func formatter) + { + var message = formatter(state, exception); + this.Logs.Add((logLevel, message, exception)); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs new file mode 100644 index 000000000000..8a6e09c013c8 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Diagnostics; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities; + +public class LoggingExtensionsTests +{ + [Fact] + public async Task RunWithLoggingVoidLogsSuccess() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => Task.CompletedTask; + + // Act + await LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation); + + // Assert + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingVoidLogsException() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => throw new InvalidOperationException("Test error"); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + Assert.Equal("Test error", exception.Message); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingVoidLogsCancellation() + { + // Arrange + var logger = new FakeLogger(); + using var cts = new CancellationTokenSource(); + Task Operation() => Task.FromCanceled(cts.Token); + cts.Cancel(); + + // Act & Assert + await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation canceled.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingWithResultReturnsValue() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => Task.FromResult(42); + + // Act + var result = await LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation); + + // Assert + Assert.Equal(42, result); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingWithResultLogsException() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => throw new InvalidOperationException("Test error"); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + Assert.Equal("Test error", exception.Message); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingEnumerableYieldsValues() + { + // Arrange + var logger = new FakeLogger(); + static async IAsyncEnumerable Operation() + { + yield return 1; + yield return 2; + await Task.CompletedTask; // Ensure async behavior + } + + // Act + var results = new List(); + await foreach (var item in LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation, default)) + { + results.Add(item); + } + + // Assert + Assert.Equal(new[] { 1, 2 }, results); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingEnumerableLogsException() + { + // Arrange + var logger = new FakeLogger(); + static async IAsyncEnumerable Operation() + { + yield return 1; + await Task.CompletedTask; + throw new InvalidOperationException("Test error"); + } + + // Act & Assert + var results = new List(); + var exception = await Assert.ThrowsAsync(async () => + { + await foreach (var item in LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation, default)) + { + results.Add(item); + } + }); + + Assert.Equal("Test error", exception.Message); + Assert.Equal(new[] { 1 }, results); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingEnumerableLogsCancellation() + { + // Arrange + var logger = new FakeLogger(); + using var cts = new CancellationTokenSource(); + static async IAsyncEnumerable Operation([EnumeratorCancellation] CancellationToken token) + { + yield return 1; + await Task.Delay(10, token); // Simulate async work + yield return 2; + } + cts.Cancel(); + + // Act & Assert + var results = new List(); + var exception = await Assert.ThrowsAsync(async () => + { + await foreach (var item in LoggingExtensions.RunWithLoggingAsync( + logger, + "TestOperation", + () => Operation(cts.Token), + cts.Token)) + { + results.Add(item); + } + }); + + Assert.Equal(new[] { 1 }, results); // Should yield first value before cancellation + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation canceled.", logs[1].Message); + Assert.Null(logs[1].Exception); + } +} From ce572c871119f324c62f672bdc03e515afd860af Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Fri, 14 Mar 2025 22:50:43 +0100 Subject: [PATCH 15/63] .Net: Change Sqlite connector to accept connection string instead of DbConnection (#10972) Closes #10454 --- ...qliteVectorStoreRecordCollectionFactory.cs | 5 +- .../SqliteServiceCollectionExtensions.cs | 103 ++------- .../SqliteVectorStore.cs | 33 ++- ...liteVectorStoreCollectionCommandBuilder.cs | 58 +++-- .../SqliteVectorStoreRecordCollection.cs | 208 +++++++++++------- .../Fakes/FakeDBConnection.cs | 32 --- .../Fakes/FakeDbCommand.cs | 45 ---- .../Fakes/FakeDbParameterCollection.cs | 105 --------- .../SqliteServiceCollectionExtensionsTests.cs | 69 +----- ...ectorStoreCollectionCommandBuilderTests.cs | 29 +-- .../SqliteVectorStoreRecordCollectionTests.cs | 6 + .../SqliteVectorStoreTests.cs | 6 + .../SqliteServiceCollectionExtensionsTests.cs | 23 -- .../Memory/Sqlite/SqliteVectorStoreFixture.cs | 32 +-- .../SqliteVectorStoreRecordCollectionTests.cs | 7 +- .../Memory/Sqlite/SqliteVectorStoreTests.cs | 4 +- .../Support/SqliteTestStore.cs | 35 +-- 17 files changed, 256 insertions(+), 544 deletions(-) delete mode 100644 dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDBConnection.cs delete mode 100644 dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbCommand.cs delete mode 100644 dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbParameterCollection.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs index 6310489ac118..5c75ea0ec6eb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Data.Common; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -17,12 +16,12 @@ public interface ISqliteVectorStoreRecordCollectionFactory /// /// The data type of the record key. /// The data model to use for adding, updating and retrieving data from storage. - /// that will be used to manage the data in SQLite. + /// The connection string for the SQLite database represented by this . /// The name of the collection to connect to. /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection( - DbConnection connection, + string connectionString, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TKey : notnull; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs index 9c962c0786d5..11c7ed589ba7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Data; +using System; using Microsoft.Data.Sqlite; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; @@ -22,29 +22,12 @@ public static class SqliteServiceCollectionExtensions /// Optional options to further configure the . /// An optional service id to use as the service key. /// Service collection. + [Obsolete("Use AddSqliteVectorStore with connectionString instead.", error: true)] public static IServiceCollection AddSqliteVectorStore( this IServiceCollection services, SqliteVectorStoreOptions? options = default, string? serviceId = default) - { - services.AddKeyedTransient( - serviceId, - (sp, obj) => - { - var connection = sp.GetRequiredService(); - - if (connection.State != ConnectionState.Open) - { - connection.Open(); - } - - var selectedOptions = options ?? sp.GetService(); - - return new SqliteVectorStore(connection, options); - }); - - return services; - } + => throw new InvalidOperationException("Use AddSqliteVectorStore with connectionString instead."); /// /// Register a SQLite with the specified service ID. @@ -60,24 +43,9 @@ public static IServiceCollection AddSqliteVectorStore( string connectionString, SqliteVectorStoreOptions? options = default, string? serviceId = default) - { - services.AddKeyedTransient( + => services.AddKeyedSingleton( serviceId, - (sp, obj) => - { - var connection = new SqliteConnection(connectionString); - var extensionName = GetExtensionName(options?.VectorSearchExtensionName); - - connection.Open(); - - connection.LoadExtension(extensionName); - - var selectedOptions = options ?? sp.GetService(); - return new SqliteVectorStore(connection, options); - }); - - return services; - } + (sp, _) => new SqliteVectorStore(connectionString, options ?? sp.GetService())); /// /// Register a SQLite and with the specified service ID @@ -91,33 +59,14 @@ public static IServiceCollection AddSqliteVectorStore( /// Optional options to further configure the . /// An optional service id to use as the service key. /// Service collection. + [Obsolete("Use AddSqliteVectorStoreRecordCollection with connectionString instead.", error: true)] public static IServiceCollection AddSqliteVectorStoreRecordCollection( this IServiceCollection services, string collectionName, SqliteVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) where TKey : notnull - { - services.AddKeyedTransient>( - serviceId, - (sp, obj) => - { - var connection = sp.GetRequiredService(); - - if (connection.State != ConnectionState.Open) - { - connection.Open(); - } - - var selectedOptions = options ?? sp.GetService>(); - - return (new SqliteVectorStoreRecordCollection(connection, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; - }); - - AddVectorizedSearch(services, serviceId); - - return services; - } + => throw new InvalidOperationException("Use AddSqliteVectorStore with connectionString instead."); /// /// Register a SQLite and with the specified service ID. @@ -139,21 +88,14 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollection>( + services.AddKeyedSingleton>( serviceId, - (sp, obj) => - { - var connection = new SqliteConnection(connectionString); - var extensionName = GetExtensionName(options?.VectorSearchExtensionName); - - connection.Open(); - - connection.LoadExtension(extensionName); - - var selectedOptions = options ?? sp.GetService>(); - - return (new SqliteVectorStoreRecordCollection(connection, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; - }); + (sp, _) => ( + new SqliteVectorStoreRecordCollection( + connectionString, + collectionName, + options ?? sp.GetService>()) + as IVectorStoreRecordCollection)!); AddVectorizedSearch(services, serviceId); @@ -169,20 +111,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollectionThe service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TKey : notnull - { - services.AddKeyedTransient>( + => services.AddKeyedSingleton>( serviceId, - (sp, obj) => - { - return sp.GetRequiredKeyedService>(serviceId); - }); - } - - /// - /// Returns extension name for vector search. - /// - private static string GetExtensionName(string? extensionName) - { - return !string.IsNullOrWhiteSpace(extensionName) ? extensionName! : SqliteConstants.VectorSearchExtensionName; - } + (sp, _) => sp.GetRequiredKeyedService>(serviceId)); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index 43b1a29b52d2..f5b9615884ff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -18,8 +18,8 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// public class SqliteVectorStore : IVectorStore { - /// that will be used to manage the data in SQLite. - private readonly DbConnection _connection; + /// The connection string for the SQLite database represented by this . + private readonly string _connectionString; /// Optional configuration options for this class. private readonly SqliteVectorStoreOptions _options; @@ -27,18 +27,27 @@ public class SqliteVectorStore : IVectorStore /// /// Initializes a new instance of the class. /// - /// that will be used to manage the data in SQLite. + /// The connection string for the SQLite database represented by this . /// Optional configuration options for this class. - public SqliteVectorStore( - DbConnection connection, - SqliteVectorStoreOptions? options = default) + public SqliteVectorStore(string connectionString, SqliteVectorStoreOptions? options = default) { - Verify.NotNull(connection); + Verify.NotNull(connectionString); - this._connection = connection; + this._connectionString = connectionString; this._options = options ?? new(); } + /// + /// Initializes a new instance of the class. + /// + /// that will be used to manage the data in SQLite. + /// Optional configuration options for this class. + [Obsolete("Use the constructor that accepts a connection string instead.", error: true)] + public SqliteVectorStore( + DbConnection connection, + SqliteVectorStoreOptions? options = default) + => throw new InvalidOperationException("Use the constructor that accepts a connection string instead."); + /// public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull @@ -47,7 +56,7 @@ public virtual IVectorStoreRecordCollection GetCollection( - this._connection, + this._connectionString, name, vectorStoreRecordDefinition); } @@ -59,7 +68,7 @@ public virtual IVectorStoreRecordCollection GetCollection( - this._connection, + this._connectionString, name, new() { @@ -77,7 +86,9 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat const string TablePropertyName = "name"; const string Query = $"SELECT {TablePropertyName} FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"; - using var command = this._connection.CreateCommand(); + using var connection = new SqliteConnection(this._connectionString); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + using var command = connection.CreateCommand(); command.CommandText = Query; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 837e3044ddc7..a48ae571e6a6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -15,23 +15,9 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// Command builder for queries in SQLite database. /// [SuppressMessage("Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "User input is passed using command parameters.")] -internal sealed class SqliteVectorStoreCollectionCommandBuilder +internal static class SqliteVectorStoreCollectionCommandBuilder { - /// that will be used to manage the data in SQLite. - private readonly DbConnection _connection; - - /// - /// Initializes a new instance of the class. - /// - /// that will be used to manage the data in SQLite. - public SqliteVectorStoreCollectionCommandBuilder(DbConnection connection) - { - Verify.NotNull(connection); - - this._connection = connection; - } - - public DbCommand BuildTableCountCommand(string tableName) + public static DbCommand BuildTableCountCommand(SqliteConnection connection, string tableName) { Verify.NotNullOrWhiteSpace(tableName); @@ -40,7 +26,7 @@ public DbCommand BuildTableCountCommand(string tableName) var query = $"SELECT count(*) FROM {SystemTable} WHERE type='table' AND name={ParameterName};"; - var command = this._connection.CreateCommand(); + var command = connection.CreateCommand(); command.CommandText = query; @@ -49,7 +35,7 @@ public DbCommand BuildTableCountCommand(string tableName) return command; } - public DbCommand BuildCreateTableCommand(string tableName, IReadOnlyList columns, bool ifNotExists) + public static DbCommand BuildCreateTableCommand(SqliteConnection connection, string tableName, IReadOnlyList columns, bool ifNotExists) { var builder = new StringBuilder(); @@ -58,14 +44,15 @@ public DbCommand BuildCreateTableCommand(string tableName, IReadOnlyList columns, bool ifNotExists, @@ -78,25 +65,26 @@ public DbCommand BuildCreateVirtualTableCommand( builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); builder.Append(");"); - var command = this._connection.CreateCommand(); + var command = connection.CreateCommand(); command.CommandText = builder.ToString(); return command; } - public DbCommand BuildDropTableCommand(string tableName) + public static DbCommand BuildDropTableCommand(SqliteConnection connection, string tableName) { string query = $"DROP TABLE [{tableName}];"; - var command = this._connection.CreateCommand(); + var command = connection.CreateCommand(); command.CommandText = query; return command; } - public DbCommand BuildInsertCommand( + public static DbCommand BuildInsertCommand( + SqliteConnection connection, string tableName, string rowIdentifier, IReadOnlyList columnNames, @@ -104,7 +92,7 @@ public DbCommand BuildInsertCommand( bool replaceIfExists = false) { var builder = new StringBuilder(); - var command = this._connection.CreateCommand(); + var command = connection.CreateCommand(); var replacePlaceholder = replaceIfExists ? " OR REPLACE" : string.Empty; @@ -132,7 +120,8 @@ public DbCommand BuildInsertCommand( return command; } - public DbCommand BuildSelectCommand( + public static DbCommand BuildSelectCommand( + SqliteConnection connection, string tableName, IReadOnlyList columnNames, List conditions, @@ -140,7 +129,7 @@ public DbCommand BuildSelectCommand( { var builder = new StringBuilder(); - var (command, whereClause) = this.GetCommandWithWhereClause(conditions); + var (command, whereClause) = GetCommandWithWhereClause(connection, conditions); builder.AppendLine($"SELECT {string.Join(", ", columnNames)}"); builder.AppendLine($"FROM {tableName}"); @@ -153,7 +142,8 @@ public DbCommand BuildSelectCommand( return command; } - public DbCommand BuildSelectLeftJoinCommand( + public static DbCommand BuildSelectLeftJoinCommand( + SqliteConnection connection, string leftTable, string rightTable, string joinColumnName, @@ -172,7 +162,7 @@ .. leftTablePropertyNames.Select(property => $"{leftTable}.{property}"), .. rightTablePropertyNames.Select(property => $"{rightTable}.{property}"), ]; - var (command, whereClause) = this.GetCommandWithWhereClause(conditions, extraWhereFilter, extraParameters); + var (command, whereClause) = GetCommandWithWhereClause(connection, conditions, extraWhereFilter, extraParameters); builder.AppendLine($"SELECT {string.Join(", ", propertyNames)}"); builder.AppendLine($"FROM {leftTable} "); @@ -186,13 +176,14 @@ .. rightTablePropertyNames.Select(property => $"{rightTable}.{property}"), return command; } - public DbCommand BuildDeleteCommand( + public static DbCommand BuildDeleteCommand( + SqliteConnection connection, string tableName, List conditions) { var builder = new StringBuilder(); - var (command, whereClause) = this.GetCommandWithWhereClause(conditions); + var (command, whereClause) = GetCommandWithWhereClause(connection, conditions); builder.AppendLine($"DELETE FROM [{tableName}]"); @@ -241,14 +232,15 @@ private static string GetColumnDefinition(SqliteColumn column) return string.Join(" ", columnDefinitionParts); } - private (DbCommand Command, string WhereClause) GetCommandWithWhereClause( + private static (DbCommand Command, string WhereClause) GetCommandWithWhereClause( + SqliteConnection connection, List conditions, string? extraWhereFilter = null, Dictionary? extraParameters = null) { const string WhereClauseOperator = " AND "; - var command = this._connection.CreateCommand(); + var command = connection.CreateCommand(); var whereClauseParts = new List(); foreach (var condition in conditions) diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 22ad3b67c403..16dbd7238aca 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -7,6 +7,7 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Data.Sqlite; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -24,8 +25,8 @@ public class SqliteVectorStoreRecordCollection : /// The name of this database for telemetry purposes. private const string DatabaseName = "SQLite"; - /// that will be used to manage the data in SQLite. - private readonly DbConnection _connection; + /// The connection string for the SQLite database represented by this . + private readonly string _connectionString; /// Optional configuration options for this class. private readonly SqliteVectorStoreRecordCollectionOptions _options; @@ -36,9 +37,6 @@ public class SqliteVectorStoreRecordCollection : /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); - /// Command builder for queries in SQLite database. - private readonly SqliteVectorStoreCollectionCommandBuilder _commandBuilder; - /// Contains helpers for reading vector store model properties and their attributes. private readonly VectorStoreRecordPropertyReader _propertyReader; @@ -63,30 +61,34 @@ public class SqliteVectorStoreRecordCollection : /// Table name in SQLite for vector properties. private readonly string _vectorTableName; + /// The sqlite_vec extension name to use. + private readonly string _vectorSearchExtensionName; + /// public string CollectionName { get; } /// /// Initializes a new instance of the class. /// - /// that will be used to manage the data in SQLite. + /// The connection string for the SQLite database represented by this . /// The name of the collection/table that this will access. /// Optional configuration options for this class. public SqliteVectorStoreRecordCollection( - DbConnection connection, + string connectionString, string collectionName, SqliteVectorStoreRecordCollectionOptions? options = default) { // Verify. - Verify.NotNull(connection); + Verify.NotNull(connectionString); Verify.NotNullOrWhiteSpace(collectionName); VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.DictionaryCustomMapper is not null, SqliteConstants.SupportedKeyTypes); VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. - this._connection = connection; + this._connectionString = connectionString; this.CollectionName = collectionName; this._options = options ?? new(); + this._vectorSearchExtensionName = this._options.VectorSearchExtensionName ?? SqliteConstants.VectorSearchExtensionName; this._dataTableName = this.CollectionName; this._vectorTableName = GetVectorTableName(this._dataTableName, this._options); @@ -110,8 +112,6 @@ public SqliteVectorStoreRecordCollection( this._vectorTableStoragePropertyNames = new(() => [this._propertyReader.KeyPropertyStoragePropertyName, .. this._propertyReader.VectorPropertyStoragePropertyNames]); this._mapper = this.InitializeMapper(); - - this._commandBuilder = new SqliteVectorStoreCollectionCommandBuilder(this._connection); } /// @@ -119,7 +119,8 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { const string OperationName = "TableCount"; - using var command = this._commandBuilder.BuildTableCountCommand(this._dataTableName); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildTableCountCommand(connection, this._dataTableName); var result = await this .RunOperationAsync(OperationName, () => command.ExecuteScalarAsync(cancellationToken)) @@ -131,25 +132,31 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { - return this.InternalCreateCollectionAsync(ifNotExists: false, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalCreateCollectionAsync(connection, ifNotExists: false, cancellationToken) + .ConfigureAwait(false); } /// - public virtual Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { - return this.InternalCreateCollectionAsync(ifNotExists: true, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalCreateCollectionAsync(connection, ifNotExists: true, cancellationToken) + .ConfigureAwait(false); } /// public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { - await this.DropTableAsync(this._dataTableName, cancellationToken).ConfigureAwait(false); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + + await this.DropTableAsync(connection, this._dataTableName, cancellationToken).ConfigureAwait(false); if (this._vectorPropertiesExist) { - await this.DropTableAsync(this._vectorTableName, cancellationToken).ConfigureAwait(false); + await this.DropTableAsync(connection, this._vectorTableName, cancellationToken).ConfigureAwait(false); } } @@ -187,7 +194,7 @@ public virtual Task> VectorizedSearchAsync string? extraWhereFilter = null; Dictionary? extraParameters = null; - if (searchOptions.Filter is not null) + if (searchOptions.OldFilter is not null) { if (searchOptions.Filter is not null) { @@ -224,39 +231,52 @@ public virtual Task> VectorizedSearchAsync #region Implementation of IVectorStoreRecordCollection /// - public virtual Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - return this.InternalGetAsync(key, options, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + return await this.InternalGetAsync(connection, key, options, cancellationToken).ConfigureAwait(false); } /// - public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - return this.InternalGetBatchAsync(keys, options, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } } /// - public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - return this.InternalUpsertAsync(record, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + return await this.InternalUpsertAsync(connection, record, cancellationToken).ConfigureAwait(false); } /// - public virtual IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - return this.InternalUpsertBatchAsync(records, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await foreach (var record in this.InternalUpsertBatchAsync(connection, records, cancellationToken) + .ConfigureAwait(false)) + { + yield return record; + } } /// - public virtual Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { - return this.InternalDeleteAsync(key, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalDeleteAsync(connection, key, cancellationToken).ConfigureAwait(false); } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this.InternalDeleteBatchAsync(keys, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); } #endregion @@ -264,45 +284,70 @@ public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken #region Implementation of IVectorStoreRecordCollection /// - public virtual Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - return this.InternalGetAsync(key, options, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + return await this.InternalGetAsync(connection, key, options, cancellationToken).ConfigureAwait(false); } /// - public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - return this.InternalGetBatchAsync(keys, options, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } } /// - Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) + async Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) { - return this.InternalUpsertAsync(record, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + return await this.InternalUpsertAsync(connection, record, cancellationToken) + .ConfigureAwait(false); } /// - IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken) + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync( + IEnumerable records, + [EnumeratorCancellation] CancellationToken cancellationToken) { - return this.InternalUpsertBatchAsync(records, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await foreach (var record in this.InternalUpsertBatchAsync(connection, records, cancellationToken) + .ConfigureAwait(false)) + { + yield return record; + } } /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { - return this.InternalDeleteAsync(key, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalDeleteAsync(connection, key, cancellationToken) + .ConfigureAwait(false); } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this.InternalDeleteBatchAsync(keys, cancellationToken); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); } #endregion #region private + private async ValueTask GetConnectionAsync(CancellationToken cancellationToken = default) + { + var connection = new SqliteConnection(this._connectionString); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + connection.LoadExtension(this._vectorSearchExtensionName); + return connection; + } + private async IAsyncEnumerable> EnumerateAndMapSearchResultsAsync( List conditions, string? extraWhereFilter, @@ -323,7 +368,9 @@ private async IAsyncEnumerable> EnumerateAndMapSearc properties.AddRange(this._propertyReader.VectorProperties); } - using var command = this._commandBuilder.BuildSelectLeftJoinCommand( + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + connection, this._vectorTableName, this._dataTableName, this._propertyReader.KeyPropertyStoragePropertyName, @@ -353,13 +400,14 @@ private async IAsyncEnumerable> EnumerateAndMapSearc } } - private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken) + private Task InternalCreateCollectionAsync(SqliteConnection connection, bool ifNotExists, CancellationToken cancellationToken) { List dataTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns( this._dataTableProperties.Value, this._propertyReader.StoragePropertyNamesMap); List tasks = [this.CreateTableAsync( + connection, this._dataTableName, dataTableColumns, ifNotExists, @@ -376,6 +424,7 @@ private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken c this._propertyReader.StoragePropertyNamesMap); tasks.Add(this.CreateVirtualTableAsync( + connection, this._vectorTableName, vectorTableColumns, ifNotExists, @@ -386,34 +435,35 @@ private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken c return Task.WhenAll(tasks); } - private Task CreateTableAsync(string tableName, List columns, bool ifNotExists, CancellationToken cancellationToken) + private Task CreateTableAsync(SqliteConnection connection, string tableName, List columns, bool ifNotExists, CancellationToken cancellationToken) { const string OperationName = "CreateTable"; - using var command = this._commandBuilder.BuildCreateTableCommand(tableName, columns, ifNotExists); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildCreateTableCommand(connection, tableName, columns, ifNotExists); return this.RunOperationAsync(OperationName, () => command.ExecuteNonQueryAsync(cancellationToken)); } - private Task CreateVirtualTableAsync(string tableName, List columns, bool ifNotExists, string extensionName, CancellationToken cancellationToken) + private Task CreateVirtualTableAsync(SqliteConnection connection, string tableName, List columns, bool ifNotExists, string extensionName, CancellationToken cancellationToken) { const string OperationName = "CreateVirtualTable"; - using var command = this._commandBuilder.BuildCreateVirtualTableCommand(tableName, columns, ifNotExists, extensionName); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildCreateVirtualTableCommand(connection, tableName, columns, ifNotExists, extensionName); return this.RunOperationAsync(OperationName, () => command.ExecuteNonQueryAsync(cancellationToken)); } - private Task DropTableAsync(string tableName, CancellationToken cancellationToken) + private Task DropTableAsync(SqliteConnection connection, string tableName, CancellationToken cancellationToken) { const string OperationName = "DropTable"; - using var command = this._commandBuilder.BuildDropTableCommand(tableName); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildDropTableCommand(connection, tableName); return this.RunOperationAsync(OperationName, () => command.ExecuteNonQueryAsync(cancellationToken)); } private async Task InternalGetAsync( + SqliteConnection connection, TKey key, GetRecordOptions? options, CancellationToken cancellationToken) @@ -425,12 +475,13 @@ private Task DropTableAsync(string tableName, CancellationToken cancellatio TableName = this._dataTableName }; - return await this.InternalGetBatchAsync(condition, options, cancellationToken) + return await this.InternalGetBatchAsync(connection, condition, options, cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); } private IAsyncEnumerable InternalGetBatchAsync( + SqliteConnection connection, IEnumerable keys, GetRecordOptions? options, CancellationToken cancellationToken) @@ -446,10 +497,11 @@ private IAsyncEnumerable InternalGetBatchAsync( TableName = this._dataTableName }; - return this.InternalGetBatchAsync(condition, options, cancellationToken); + return this.InternalGetBatchAsync(connection, condition, options, cancellationToken); } private async IAsyncEnumerable InternalGetBatchAsync( + SqliteConnection connection, SqliteWhereCondition condition, GetRecordOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) @@ -463,7 +515,8 @@ private async IAsyncEnumerable InternalGetBatchAsync( if (includeVectors) { - command = this._commandBuilder.BuildSelectLeftJoinCommand( + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + connection, this._dataTableName, this._vectorTableName, this._propertyReader.KeyPropertyStoragePropertyName, @@ -475,7 +528,8 @@ private async IAsyncEnumerable InternalGetBatchAsync( } else { - command = this._commandBuilder.BuildSelectCommand( + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectCommand( + connection, this._dataTableName, this._dataTableStoragePropertyNames.Value, [condition]); @@ -496,7 +550,7 @@ private async IAsyncEnumerable InternalGetBatchAsync( } } - private async Task InternalUpsertAsync(TRecord record, CancellationToken cancellationToken) + private async Task InternalUpsertAsync(SqliteConnection connection, TRecord record, CancellationToken cancellationToken) { const string OperationName = "Upsert"; @@ -512,14 +566,14 @@ private async Task InternalUpsertAsync(TRecord record, CancellationT var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); - var upsertedRecordKey = await this.InternalUpsertBatchAsync([storageModel], condition, cancellationToken) + var upsertedRecordKey = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); return upsertedRecordKey ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); } - private IAsyncEnumerable InternalUpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken) + private IAsyncEnumerable InternalUpsertBatchAsync(SqliteConnection connection, IEnumerable records, CancellationToken cancellationToken) { const string OperationName = "UpsertBatch"; @@ -533,10 +587,11 @@ private IAsyncEnumerable InternalUpsertBatchAsync(IEnumerable(storageModels, condition, cancellationToken); + return this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken); } private async IAsyncEnumerable InternalUpsertBatchAsync( + SqliteConnection connection, List> storageModels, SqliteWhereCondition condition, [EnumeratorCancellation] CancellationToken cancellationToken) @@ -548,13 +603,15 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( { // Deleting vector records first since current version of vector search extension // doesn't support Upsert operation, only Delete/Insert. - using var vectorDeleteCommand = this._commandBuilder.BuildDeleteCommand( + using var vectorDeleteCommand = SqliteVectorStoreCollectionCommandBuilder.BuildDeleteCommand( + connection, this._vectorTableName, [condition]); await vectorDeleteCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); - using var vectorInsertCommand = this._commandBuilder.BuildInsertCommand( + using var vectorInsertCommand = SqliteVectorStoreCollectionCommandBuilder.BuildInsertCommand( + connection, this._vectorTableName, this._propertyReader.KeyPropertyStoragePropertyName, this._vectorTableStoragePropertyNames.Value, @@ -563,12 +620,13 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( await vectorInsertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } - using var dataCommand = this._commandBuilder.BuildInsertCommand( - this._dataTableName, - this._propertyReader.KeyPropertyStoragePropertyName, - this._dataTableStoragePropertyNames.Value, - storageModels, - replaceIfExists: true); + using var dataCommand = SqliteVectorStoreCollectionCommandBuilder.BuildInsertCommand( + connection, + this._dataTableName, + this._propertyReader.KeyPropertyStoragePropertyName, + this._dataTableStoragePropertyNames.Value, + storageModels, + replaceIfExists: true); using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); @@ -585,16 +643,16 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( } } - private Task InternalDeleteAsync(TKey key, CancellationToken cancellationToken) + private Task InternalDeleteAsync(SqliteConnection connection, TKey key, CancellationToken cancellationToken) { Verify.NotNull(key); var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); - return this.InternalDeleteBatchAsync(condition, cancellationToken); + return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); } - private Task InternalDeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken) + private Task InternalDeleteBatchAsync(SqliteConnection connection, IEnumerable keys, CancellationToken cancellationToken) { Verify.NotNull(keys); @@ -606,10 +664,10 @@ private Task InternalDeleteBatchAsync(IEnumerable keys, Cancellation this._propertyReader.KeyPropertyStoragePropertyName, keysList); - return this.InternalDeleteBatchAsync(condition, cancellationToken); + return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); } - private Task InternalDeleteBatchAsync(SqliteWhereCondition condition, CancellationToken cancellationToken) + private Task InternalDeleteBatchAsync(SqliteConnection connection, SqliteWhereCondition condition, CancellationToken cancellationToken) { const string OperationName = "Delete"; @@ -617,14 +675,16 @@ private Task InternalDeleteBatchAsync(SqliteWhereCondition condition, Cancellati if (this._vectorPropertiesExist) { - using var vectorCommand = this._commandBuilder.BuildDeleteCommand( + using var vectorCommand = SqliteVectorStoreCollectionCommandBuilder.BuildDeleteCommand( + connection, this._vectorTableName, [condition]); tasks.Add(this.RunOperationAsync(OperationName, () => vectorCommand.ExecuteNonQueryAsync(cancellationToken))); } - using var dataCommand = this._commandBuilder.BuildDeleteCommand( + using var dataCommand = SqliteVectorStoreCollectionCommandBuilder.BuildDeleteCommand( + connection, this._dataTableName, [condition]); diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDBConnection.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDBConnection.cs deleted file mode 100644 index 7c318e1ef413..000000000000 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDBConnection.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Data; -using System.Data.Common; - -namespace SemanticKernel.Connectors.Sqlite.UnitTests; - -#pragma warning disable CS8618, CS8765 - -internal sealed class FakeDBConnection(DbCommand command) : DbConnection -{ - public override string ConnectionString { get; set; } - - public override string Database => throw new NotImplementedException(); - - public override string DataSource => throw new NotImplementedException(); - - public override string ServerVersion => throw new NotImplementedException(); - - public override ConnectionState State => throw new NotImplementedException(); - - public override void ChangeDatabase(string databaseName) => throw new NotImplementedException(); - - public override void Close() => throw new NotImplementedException(); - - public override void Open() => throw new NotImplementedException(); - - protected override DbTransaction BeginDbTransaction(IsolationLevel isolationLevel) => throw new NotImplementedException(); - - protected override DbCommand CreateDbCommand() => command; -} diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbCommand.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbCommand.cs deleted file mode 100644 index df6062d9a4c1..000000000000 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbCommand.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Data; -using System.Data.Common; - -namespace SemanticKernel.Connectors.Sqlite.UnitTests; - -#pragma warning disable CS8618, CS8765 - -internal sealed class FakeDbCommand( - DbDataReader? dataReader = null, - object? scalarResult = null) : DbCommand -{ - public int ExecuteNonQueryCallCount { get; private set; } = 0; - - private readonly FakeDbParameterCollection _parameterCollection = []; - - public override string CommandText { get; set; } - public override int CommandTimeout { get; set; } - public override CommandType CommandType { get; set; } - public override bool DesignTimeVisible { get; set; } - public override UpdateRowSource UpdatedRowSource { get; set; } - protected override DbConnection? DbConnection { get; set; } - - protected override DbParameterCollection DbParameterCollection => this._parameterCollection; - - protected override DbTransaction? DbTransaction { get; set; } - - public override void Cancel() => throw new NotImplementedException(); - - public override int ExecuteNonQuery() - { - this.ExecuteNonQueryCallCount++; - return 0; - } - - public override object? ExecuteScalar() => scalarResult; - - public override void Prepare() => throw new NotImplementedException(); - - protected override DbParameter CreateDbParameter() => throw new NotImplementedException(); - - protected override DbDataReader ExecuteDbDataReader(CommandBehavior behavior) => dataReader ?? throw new NotImplementedException(); -} diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbParameterCollection.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbParameterCollection.cs deleted file mode 100644 index 246b97a3360b..000000000000 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Fakes/FakeDbParameterCollection.cs +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections; -using System.Collections.Generic; -using System.Data.Common; - -namespace SemanticKernel.Connectors.Sqlite.UnitTests; - -#pragma warning disable CA1812 - -internal sealed class FakeDbParameterCollection : DbParameterCollection -{ - private readonly List _parameters = []; - - public override int Count => this._parameters.Count; - - public override object SyncRoot => throw new NotImplementedException(); - - public override int Add(object value) - { - this._parameters.Add(value); - return default; - } - - public override void AddRange(Array values) - { - this._parameters.AddRange([.. values]); - } - - public override void Clear() - { - this._parameters.Clear(); - } - - public override bool Contains(object value) - { - return this._parameters.Contains(value); - } - - public override bool Contains(string value) - { - return this._parameters.Contains(value); - } - - public override void CopyTo(Array array, int index) - { - this._parameters.CopyTo([.. array], index); - } - - public override IEnumerator GetEnumerator() - { - return this._parameters.GetEnumerator(); - } - - public override int IndexOf(object value) - { - return this._parameters.IndexOf(value); - } - - public override int IndexOf(string parameterName) - { - return this._parameters.IndexOf(parameterName); - } - - public override void Insert(int index, object value) - { - this._parameters.Insert(index, value); - } - - public override void Remove(object value) - { - this._parameters.Remove(value); - } - - public override void RemoveAt(int index) - { - this._parameters.RemoveAt(index); - } - - public override void RemoveAt(string parameterName) - { - throw new NotImplementedException(); - } - - protected override DbParameter GetParameter(int index) - { - return (this._parameters[index] as DbParameter)!; - } - - protected override DbParameter GetParameter(string parameterName) - { - throw new NotImplementedException(); - } - - protected override void SetParameter(int index, DbParameter value) - { - this._parameters[index] = value; - } - - protected override void SetParameter(string parameterName, DbParameter value) - { - throw new NotImplementedException(); - } -} diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs index 69488cf4d8d4..e7f78e388c02 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs @@ -1,12 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Data; -using Microsoft.Data.Sqlite; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.Sqlite; -using Moq; using Xunit; namespace SemanticKernel.Connectors.Sqlite.UnitTests; @@ -18,23 +15,11 @@ public sealed class SqliteServiceCollectionExtensionsTests { private readonly IServiceCollection _serviceCollection = new ServiceCollection(); - [Theory] - [InlineData(ConnectionState.Open)] - [InlineData(ConnectionState.Closed)] - public void AddVectorStoreRegistersClass(ConnectionState connectionState) + [Fact] + public void AddVectorStoreRegistersClass() { - // Arrange - var expectedOpenCalls = connectionState == ConnectionState.Closed ? 1 : 0; - - var mockConnection = new Mock(); - - mockConnection.Setup(l => l.State).Returns(connectionState); - mockConnection.Setup(l => l.Open()); - - this._serviceCollection.AddTransient((_) => mockConnection.Object); - // Act - this._serviceCollection.AddSqliteVectorStore(); + this._serviceCollection.AddSqliteVectorStore("Data Source=:memory:"); var serviceProvider = this._serviceCollection.BuildServiceProvider(); var vectorStore = serviceProvider.GetRequiredService(); @@ -42,30 +27,13 @@ public void AddVectorStoreRegistersClass(ConnectionState connectionState) // Assert Assert.NotNull(vectorStore); Assert.IsType(vectorStore); - - mockConnection.Verify(l => l.Open(), Times.Exactly(expectedOpenCalls)); } - [Theory] - [InlineData(ConnectionState.Open)] - [InlineData(ConnectionState.Closed)] - public void AddVectorStoreRecordCollectionWithStringKeyRegistersClass(ConnectionState connectionState) + [Fact] + public void AddVectorStoreRecordCollectionWithStringKeyRegistersClass() { - // Arrange - var expectedOpenCalls = connectionState == ConnectionState.Closed ? 1 : 0; - - var mockConnection = new Mock(); - - mockConnection.SetupSequence(l => l.State) - .Returns(connectionState) - .Returns(ConnectionState.Open); - - mockConnection.Setup(l => l.Open()); - - this._serviceCollection.AddTransient((_) => mockConnection.Object); - // Act - this._serviceCollection.AddSqliteVectorStoreRecordCollection("testcollection"); + this._serviceCollection.AddSqliteVectorStoreRecordCollection("testcollection", "Data Source=:memory:"); var serviceProvider = this._serviceCollection.BuildServiceProvider(); @@ -77,30 +45,13 @@ public void AddVectorStoreRecordCollectionWithStringKeyRegistersClass(Connection var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); - - mockConnection.Verify(l => l.Open(), Times.Exactly(expectedOpenCalls)); } - [Theory] - [InlineData(ConnectionState.Open)] - [InlineData(ConnectionState.Closed)] - public void AddVectorStoreRecordCollectionWithNumericKeyRegistersClass(ConnectionState connectionState) + [Fact] + public void AddVectorStoreRecordCollectionWithNumericKeyRegistersClass() { - // Arrange - var expectedOpenCalls = connectionState == ConnectionState.Closed ? 1 : 0; - - var mockConnection = new Mock(); - - mockConnection.SetupSequence(l => l.State) - .Returns(connectionState) - .Returns(ConnectionState.Open); - - mockConnection.Setup(l => l.Open()); - - this._serviceCollection.AddTransient((_) => mockConnection.Object); - // Act - this._serviceCollection.AddSqliteVectorStoreRecordCollection("testcollection"); + this._serviceCollection.AddSqliteVectorStoreRecordCollection("testcollection", "Data Source=:memory:"); var serviceProvider = this._serviceCollection.BuildServiceProvider(); @@ -112,8 +63,6 @@ public void AddVectorStoreRecordCollectionWithNumericKeyRegistersClass(Connectio var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); - - mockConnection.Verify(l => l.Open(), Times.Exactly(expectedOpenCalls)); } #region private diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs index 370756cb4344..5cba1e805e86 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Data.Sqlite; using Microsoft.SemanticKernel.Connectors.Sqlite; using Xunit; @@ -12,15 +13,13 @@ namespace SemanticKernel.Connectors.Sqlite.UnitTests; /// public sealed class SqliteVectorStoreCollectionCommandBuilderTests : IDisposable { - private readonly FakeDbCommand _command; - private readonly FakeDBConnection _connection; - private readonly SqliteVectorStoreCollectionCommandBuilder _commandBuilder; + private readonly SqliteCommand _command; + private readonly SqliteConnection _connection; public SqliteVectorStoreCollectionCommandBuilderTests() { - this._command = new(); - this._connection = new(this._command); - this._commandBuilder = new(this._connection); + this._command = new() { Connection = this._connection }; + this._connection = new(); } [Fact] @@ -30,7 +29,7 @@ public void ItBuildsTableCountCommand() const string TableName = "TestTable"; // Act - var command = this._commandBuilder.BuildTableCountCommand(TableName); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildTableCountCommand(this._connection, TableName); // Assert Assert.Equal("SELECT count(*) FROM sqlite_master WHERE type='table' AND name=@tableName;", command.CommandText); @@ -53,7 +52,7 @@ public void ItBuildsCreateTableCommand(bool ifNotExists) }; // Act - var command = this._commandBuilder.BuildCreateTableCommand(TableName, columns, ifNotExists); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildCreateTableCommand(this._connection, TableName, columns, ifNotExists); // Assert Assert.Contains("CREATE TABLE", command.CommandText); @@ -81,7 +80,7 @@ public void ItBuildsCreateVirtualTableCommand(bool ifNotExists) }; // Act - var command = this._commandBuilder.BuildCreateVirtualTableCommand(TableName, columns, ifNotExists, ExtensionName); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildCreateVirtualTableCommand(this._connection, TableName, columns, ifNotExists, ExtensionName); // Assert Assert.Contains("CREATE VIRTUAL TABLE", command.CommandText); @@ -101,7 +100,7 @@ public void ItBuildsDropTableCommand() const string TableName = "TestTable"; // Act - var command = this._commandBuilder.BuildDropTableCommand(TableName); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildDropTableCommand(this._connection, TableName); // Assert Assert.Equal("DROP TABLE [TestTable];", command.CommandText); @@ -124,7 +123,8 @@ public void ItBuildsInsertCommand(bool replaceIfExists) }; // Act - var command = this._commandBuilder.BuildInsertCommand( + var command = SqliteVectorStoreCollectionCommandBuilder.BuildInsertCommand( + this._connection, TableName, RowIdentifier, columnNames, @@ -181,7 +181,7 @@ public void ItBuildsSelectCommand(string? orderByPropertyName) }; // Act - var command = this._commandBuilder.BuildSelectCommand(TableName, columnNames, conditions, orderByPropertyName); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectCommand(this._connection, TableName, columnNames, conditions, orderByPropertyName); // Assert Assert.Contains("SELECT Id, Name, Age, Address", command.CommandText); @@ -226,7 +226,8 @@ public void ItBuildsSelectLeftJoinCommand(string? orderByPropertyName) }; // Act - var command = this._commandBuilder.BuildSelectLeftJoinCommand( + var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + this._connection, LeftTable, RightTable, JoinColumnName, @@ -274,7 +275,7 @@ public void ItBuildsDeleteCommand() }; // Act - var command = this._commandBuilder.BuildDeleteCommand(TableName, conditions); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildDeleteCommand(this._connection, TableName, conditions); // Assert Assert.Contains("DELETE FROM [TestTable]", command.CommandText); diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs index 631bf6cebf3d..59cc3c3401e4 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs @@ -1,5 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. +// TODO: Reimplement these as integration tests, #10464 + +#if DISABLED + using System; using System.Collections.Generic; using System.Data.Common; @@ -400,3 +404,5 @@ private sealed class TestRecordWithoutVectorProperty #endregion } + +#endif diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs index 44180405aaa3..74b27b4ef046 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreTests.cs @@ -1,5 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. +// TODO: Reimplement these as integration tests, #10464 + +#if DISABLED + using System; using System.Data.Common; using System.Linq; @@ -104,3 +108,5 @@ public async Task ListCollectionNamesReturnsCollectionNamesAsync() Assert.Contains("collection2", collections); } } + +#endif diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs index 2e3e6b32fe52..bfded601d8ec 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs @@ -69,29 +69,6 @@ public void AddVectorStoreRecordCollectionWithNumericKeyAndSqliteConnectionRegis Assert.IsType>(vectorizedSearch); } - [Fact(Skip = SkipReason)] - public void ItClosesConnectionWhenDIServiceIsDisposed() - { - // Act - using var connection = new SqliteConnection("Data Source=:memory:"); - - this._serviceCollection.AddTransient(_ => connection); - - this._serviceCollection.AddSqliteVectorStore(); - - var serviceProvider = this._serviceCollection.BuildServiceProvider(); - - using (var scope = serviceProvider.CreateScope()) - { - scope.ServiceProvider.GetRequiredService(); - - Assert.Equal(ConnectionState.Open, connection.State); - } - - // Assert - Assert.Equal(ConnectionState.Closed, connection.State); - } - #region private #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs index 6f07f20ddf67..c3a702c5a7c0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.IO; using System.Threading.Tasks; using Microsoft.Data.Sqlite; using Microsoft.SemanticKernel.Connectors.Sqlite; @@ -8,43 +9,22 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Sqlite; -public class SqliteVectorStoreFixture : IAsyncLifetime, IDisposable +public class SqliteVectorStoreFixture : IDisposable { - /// - /// SQLite extension name for vector search. - /// More information here: . - /// - private const string VectorSearchExtensionName = "vec0"; + private readonly string _databasePath = Path.GetTempFileName(); - public SqliteConnection Connection { get; } - - public SqliteVectorStoreFixture() - { - this.Connection = new SqliteConnection("Data Source=:memory:"); - } + public string ConnectionString => $"Data Source={this._databasePath}"; public SqliteVectorStoreRecordCollection GetCollection( string collectionName, SqliteVectorStoreRecordCollectionOptions? options = default) { return new SqliteVectorStoreRecordCollection( - this.Connection, + this.ConnectionString, collectionName, options); } - public Task DisposeAsync() - { - return Task.CompletedTask; - } - - public async Task InitializeAsync() - { - await this.Connection.OpenAsync(); - - this.Connection.LoadExtension(VectorSearchExtensionName); - } - public void Dispose() { this.Dispose(true); @@ -55,7 +35,7 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - this.Connection.Dispose(); + File.Delete(this._databasePath); } } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index f799fd26eaa8..76e05b71d0d9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Runtime.InteropServices; using System.Threading.Tasks; +using Microsoft.Data.Sqlite; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Sqlite; using Xunit; @@ -245,7 +246,9 @@ public async Task ItCanGetExistingRecordAsync(bool includeVectors) var record = CreateTestHotel(HotelId); - var commandData = fixture.Connection.CreateCommand(); + using var connection = new SqliteConnection(fixture.ConnectionString); + await connection.OpenAsync(); + var commandData = connection.CreateCommand(); commandData.CommandText = $"INSERT INTO {collectionName} " + @@ -262,7 +265,7 @@ public async Task ItCanGetExistingRecordAsync(bool includeVectors) if (includeVectors) { - var commandVector = fixture.Connection.CreateCommand(); + var commandVector = connection.CreateCommand(); commandVector.CommandText = $"INSERT INTO vec_{collectionName} " + diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreTests.cs index 8a173250f7fe..6eca22778b02 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreTests.cs @@ -17,7 +17,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Sqlite; [Collection("SqliteVectorStoreCollection")] [DisableVectorStoreTests(Skip = "SQLite vector search extension is required")] public sealed class SqliteVectorStoreTests(SqliteVectorStoreFixture fixture) - : BaseVectorStoreTests>(new SqliteVectorStore(fixture.Connection!)) + : BaseVectorStoreTests>(new SqliteVectorStore(fixture.ConnectionString)) { [VectorStoreFact] public async Task ItCanGetAListOfExistingCollectionNamesWhenRegisteredWithDIAsync() @@ -25,7 +25,7 @@ public async Task ItCanGetAListOfExistingCollectionNamesWhenRegisteredWithDIAsyn // Arrange var serviceCollection = new ServiceCollection(); - serviceCollection.AddSqliteVectorStore(connectionString: "Data Source=:memory:"); + serviceCollection.AddSqliteVectorStore(fixture.ConnectionString); var provider = serviceCollection.BuildServiceProvider(); diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs index 526eeac3b2d8..9b025c66610f 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs @@ -1,21 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Data.Sqlite; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Sqlite; using VectorDataSpecificationTests.Support; namespace SqliteIntegrationTests.Support; -#pragma warning disable CA1001 // Type owns disposable fields (_connection) but is not disposable - internal sealed class SqliteTestStore : TestStore { - public static SqliteTestStore Instance { get; } = new(); + private string? _databasePath; - private SqliteConnection? _connection; - public SqliteConnection Connection - => this._connection ?? throw new InvalidOperationException("Call InitializeAsync() first"); + public static SqliteTestStore Instance { get; } = new(); private SqliteVectorStore? _defaultVectorStore; public override IVectorStore DefaultVectorStore @@ -25,31 +20,17 @@ private SqliteTestStore() { } - protected override async Task StartAsync() + protected override Task StartAsync() { - this._connection = new SqliteConnection("Data Source=:memory:"); - - await this.Connection.OpenAsync(); - - if (!SqliteTestEnvironment.TryLoadSqliteVec(this.Connection)) - { - this.Connection.Dispose(); - - // Note that we ignore sqlite_vec loading failures; the tests are decorated with [SqliteVecRequired], which causes - // them to be skipped if sqlite_vec isn't installed (better than an exception triggering failure here) - } - - this._defaultVectorStore = new SqliteVectorStore(this.Connection); + this._databasePath = Path.GetTempFileName(); + this._defaultVectorStore = new SqliteVectorStore($"Data Source={this._databasePath}"); + return Task.CompletedTask; } -#if NET8_0_OR_GREATER - protected override async Task StopAsync() - => await this.Connection.DisposeAsync(); -#else protected override Task StopAsync() { - this.Connection.Dispose(); + File.Delete(this._databasePath!); + this._databasePath = null; return Task.CompletedTask; } -#endif } From 89aab09362ca0501d17b5b5dc40d9fba165f28c5 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Mon, 17 Mar 2025 15:53:05 +0100 Subject: [PATCH 16/63] .Net: [TINY] Document the default thread-safety expectation for the MEVD abstractions (#10991) Closes #10454 --- .../VectorData.Abstractions/VectorStorage/IVectorStore.cs | 3 ++- .../VectorStorage/IVectorStoreRecordCollection.cs | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs index 007dcf79da03..a3ac5466323c 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs @@ -9,7 +9,8 @@ namespace Microsoft.Extensions.VectorData; /// Defines an interface for accessing the list of collections in a vector store. /// /// -/// This interface can be used with collections of any schema type, but requires you to provide schema information when getting a collection. +/// This interface can be used with collections of any schema type, but requires you to provide schema information when getting a collection. +/// Unless otherwise documented, implementations of this interface can be expected to be thread-safe, and can be used concurrently from multiple threads. /// public interface IVectorStore { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index b8e410d4afd5..f891dcba26b0 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -11,6 +11,9 @@ namespace Microsoft.Extensions.VectorData; /// /// The data type of the record key. /// The record data model to use for adding, updating, and retrieving data from the store. +/// +/// Unless otherwise documented, implementations of this interface can be expected to be thread-safe, and can be used concurrently from multiple threads. +/// #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public interface IVectorStoreRecordCollection : IVectorizedSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix From e896415b77b04435dc5287cd4a1ef242f40d9454 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Wed, 19 Mar 2025 16:09:35 +0100 Subject: [PATCH 17/63] .Net: Obsolete the IMemoryStore where an IVectorStore implementation exists (#11040) Closes #10808 --- .../HuggingFace_TextEmbeddingCustomHttpHandler.cs | 1 + .../Concepts/Memory/SemanticTextMemory_Building.cs | 1 + .../Memory/TextMemoryPlugin_MultipleMemoryStore.cs | 1 + ...ectorStore_ConsumeFromMemoryStore_AzureAISearch.cs | 1 + .../VectorStore_ConsumeFromMemoryStore_Common.cs | 1 + .../VectorStore_ConsumeFromMemoryStore_Qdrant.cs | 1 + .../VectorStore_ConsumeFromMemoryStore_Redis.cs | 1 + .../VectorStore_MigrateFromMemoryStore_Redis.cs | 1 + .../AzureAISearchMemoryStoreTests.cs | 1 + .../AzureAISearchMemoryRecord.cs | 5 +++-- .../AzureAISearchMemoryStore.cs | 5 +++-- .../AzureCosmosDBMongoDBConfig.cs | 4 ++-- .../AzureCosmosDBMongoDBMemoryRecord.cs | 5 +++-- .../AzureCosmosDBMongoDBMemoryRecordMetadata.cs | 6 ++++-- .../AzureCosmosDBMongoDBMemoryStore.cs | 5 +++-- .../AzureCosmosDBSimilarityType.cs | 6 +++--- .../AzureCosmosDBVectorSearchType.cs | 6 +++--- .../AzureCosmosDBNoSQLMemoryStore.cs | 11 ++++++----- .../Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs | 5 +++-- .../MongoDBMemoryRecordMetadata.cs | 6 ++++-- .../Connectors.Memory.MongoDB/MongoDBMemoryStore.cs | 5 +++-- .../Http/ApiSchema/ConfigureIndexRequest.cs | 4 ++-- .../Http/ApiSchema/DeleteIndexRequest.cs | 4 ++-- .../Http/ApiSchema/DeleteRequest.cs | 4 ++-- .../Http/ApiSchema/DescribeIndexRequest.cs | 4 ++-- .../Http/ApiSchema/DescribeIndexStatsRequest.cs | 4 ++-- .../Http/ApiSchema/FetchRequest.cs | 4 ++-- .../Http/ApiSchema/FetchResponse.cs | 4 ++-- .../Http/ApiSchema/ListIndexesRequest.cs | 4 ++-- .../Http/ApiSchema/QueryRequest.cs | 3 +-- .../Http/ApiSchema/QueryResponse.cs | 4 ++-- .../Http/ApiSchema/UpdateVectorRequest.cs | 3 +-- .../Http/ApiSchema/UpsertRequest.cs | 4 ++-- .../Http/ApiSchema/UpsertResponse.cs | 4 ++-- .../Connectors.Memory.Pinecone/IPineconeClient.cs | 3 +-- .../IPineconeMemoryStore.cs | 5 +++-- .../Model/IndexDefinition.cs | 4 ++-- .../Model/IndexMetadataConfig.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/IndexMetric.cs | 4 ++-- .../Model/IndexNamespaceStats.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/IndexState.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/IndexStats.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/IndexStatus.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/OperationType.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/PineconeIndex.cs | 4 ++-- .../Connectors.Memory.Pinecone/Model/PodType.cs | 5 ++--- .../Connectors.Memory.Pinecone/Model/Query.cs | 3 +-- .../Model/SparseVectorData.cs | 3 +-- .../Connectors.Memory.Pinecone/PineconeClient.cs | 3 +-- .../Connectors.Memory.Pinecone/PineconeDocument.cs | 3 +-- .../PineconeDocumentExtensions.cs | 5 +++-- .../PineconeMemoryBuilderExtensions.cs | 6 ++++-- .../Connectors.Memory.Pinecone/PineconeMemoryStore.cs | 5 +++-- .../Connectors.Memory.Pinecone/PineconeUtils.cs | 3 +-- .../Connectors.Memory.Postgres/IPostgresDbClient.cs | 5 +++-- .../Connectors.Memory.Postgres/PostgresDbClient.cs | 3 +-- .../PostgresMemoryBuilderExtensions.cs | 6 ++++-- .../Connectors.Memory.Postgres/PostgresMemoryEntry.cs | 3 +-- .../Connectors.Memory.Postgres/PostgresMemoryStore.cs | 5 +++-- .../Http/ApiSchema/CreateCollectionRequest.cs | 3 +-- .../Http/ApiSchema/DeleteCollectionRequest.cs | 4 ++-- .../Http/ApiSchema/DeleteVectorsRequest.cs | 4 ++-- .../Http/ApiSchema/DeleteVectorsResponse.cs | 4 ++-- .../Http/ApiSchema/GetCollectionRequest.cs | 4 ++-- .../Http/ApiSchema/GetVectorsRequest.cs | 4 ++-- .../Http/ApiSchema/GetVectorsResponse.cs | 3 +-- .../Http/ApiSchema/ListCollectionsRequest.cs | 4 ++-- .../Http/ApiSchema/ListCollectionsResponse.cs | 4 ++-- .../Http/ApiSchema/NumberToStringConverter.cs | 3 +-- .../Http/ApiSchema/QdrantResponse.cs | 4 ++-- .../Http/ApiSchema/SearchVectorsRequest.cs | 3 +-- .../Http/ApiSchema/SearchVectorsResponse.cs | 3 +-- .../Http/ApiSchema/UpsertVectorRequest.cs | 3 +-- .../Http/ApiSchema/UpsertVectorResponse.cs | 4 ++-- .../Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs | 3 +-- .../Connectors.Memory.Qdrant/QdrantDistanceType.cs | 4 ++-- .../QdrantMemoryBuilderExtensions.cs | 6 ++++-- .../Connectors.Memory.Qdrant/QdrantMemoryStore.cs | 5 +++-- .../Connectors.Memory.Qdrant/QdrantVectorDbClient.cs | 3 +-- .../Connectors.Memory.Qdrant/QdrantVectorRecord.cs | 3 +-- .../Connectors.Memory.Redis/RedisMemoryStore.cs | 5 +++-- .../RedisVectorDistanceMetric.cs | 4 ++-- .../Connectors.Memory.SqlServer/ISqlServerClient.cs | 3 +-- .../Connectors.Memory.SqlServer/SqlServerClient.cs | 2 +- .../SqlServerMemoryBuilderExtensions.cs | 6 ++++-- .../SqlServerMemoryEntry.cs | 3 +-- .../SqlServerMemoryStore.cs | 5 +++-- .../Connectors/Connectors.Memory.Sqlite/Database.cs | 6 +++--- .../Connectors.Memory.Sqlite/SqliteMemoryStore.cs | 5 +++-- .../Http/ApiSchema/BatchRequest.cs | 6 ++++-- .../Http/ApiSchema/BatchResponse.cs | 3 +-- .../Http/ApiSchema/CreateClassSchemaRequest.cs | 4 ++-- .../Http/ApiSchema/CreateClassSchemaResponse.cs | 4 ++-- .../Http/ApiSchema/CreateGraphRequest.cs | 3 +-- .../Http/ApiSchema/DeleteObjectRequest.cs | 4 ++-- .../Http/ApiSchema/DeleteSchemaRequest.cs | 4 ++-- .../Http/ApiSchema/GetClassRequest.cs | 4 ++-- .../Http/ApiSchema/GetClassResponse.cs | 4 ++-- .../Http/ApiSchema/GetObjectRequest.cs | 4 ++-- .../Http/ApiSchema/GetSchemaRequest.cs | 4 ++-- .../Http/ApiSchema/GetSchemaResponse.cs | 4 ++-- .../Http/ApiSchema/GraphResponse.cs | 4 ++-- .../Http/ApiSchema/ObjectResponseResult.cs | 4 ++-- .../JsonConverter/UnixSecondsDateTimeJsonConverter.cs | 3 +-- .../Connectors.Memory.Weaviate/Model/Deprecation.cs | 3 +-- .../Connectors.Memory.Weaviate/Model/GraphError.cs | 4 ++-- .../Model/GraphErrorLocationsItems.cs | 4 ++-- .../Connectors.Memory.Weaviate/Model/Property.cs | 4 ++-- .../Model/WeaviateObject.cs | 3 +-- .../WeaviateMemoryBuilderExtensions.cs | 6 ++++-- .../Connectors.Memory.Weaviate/WeaviateMemoryStore.cs | 5 +++-- .../PineconeMemoryStoreTests.cs | 1 + .../PineconeUtilsTests.cs | 1 + .../QdrantMemoryBuilderExtensionsTests.cs | 1 + .../QdrantMemoryStoreTests.cs | 1 + .../QdrantMemoryStoreTests2.cs | 1 + .../QdrantMemoryStoreTests3.cs | 1 + .../QdrantVectorDbClientTests.cs | 1 + .../RedisMemoryStoreTests.cs | 1 + .../SqliteMemoryStoreTests.cs | 1 + .../Memory/MongoDB/MongoDBMemoryStoreTests.cs | 1 + .../Memory/Postgres/PostgresMemoryStoreTests.cs | 1 + .../Memory/Sqlite/SqliteMemoryStoreTests.cs | 1 + .../WeaviateMemoryBuilderExtensionsTests.cs | 1 + .../WeaviateMemoryStoreTests.cs | 1 + .../AzureCosmosDBMongoDBMemoryStoreTests.cs | 1 + .../AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs | 1 + .../AzureCosmosDBNoSQLMemoryStoreTests.cs | 1 + .../AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs | 1 + .../Memory/MongoDB/MongoDBMemoryStoreTests.cs | 1 + .../Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs | 1 + .../Memory/Postgres/PostgresMemoryStoreTests.cs | 1 + .../Memory/Weaviate/WeaviateMemoryStoreTests.cs | 1 + .../SqlServerMemoryStoreTests.cs | 3 ++- .../Support/SqlServerTestEnvironment.cs | 2 +- 135 files changed, 250 insertions(+), 212 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/HuggingFace_TextEmbeddingCustomHttpHandler.cs b/dotnet/samples/Concepts/Memory/HuggingFace_TextEmbeddingCustomHttpHandler.cs index 744274d4c527..d9ebbf568c3a 100644 --- a/dotnet/samples/Concepts/Memory/HuggingFace_TextEmbeddingCustomHttpHandler.cs +++ b/dotnet/samples/Concepts/Memory/HuggingFace_TextEmbeddingCustomHttpHandler.cs @@ -15,6 +15,7 @@ namespace Memory; /// For example, the cointegrated/LaBSE-en-ru model returns results as a 1 * 1 * 4 * 768 matrix, which is different from Hugging Face embedding generation service implementation. /// To address this, a custom can be used to modify the response before sending it back. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class HuggingFace_TextEmbeddingCustomHttpHandler(ITestOutputHelper output) : BaseTest(output) { public async Task RunInferenceApiEmbeddingCustomHttpHandlerAsync() diff --git a/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs b/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs index 72cb44af516a..f4c1ea45407b 100644 --- a/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs +++ b/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs @@ -14,6 +14,7 @@ namespace Memory; * Semantic Memory allows to store your data like traditional DBs, * adding the ability to query it using natural language. */ +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class SemanticTextMemory_Building(ITestOutputHelper output) : BaseTest(output) { private const string MemoryCollectionName = "SKGitHub"; diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs index 0c0f4da85bff..c7b9d2cb40ee 100644 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs +++ b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs @@ -20,6 +20,7 @@ namespace Memory; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class TextMemoryPlugin_MultipleMemoryStore(ITestOutputHelper output) : BaseTest(output) { private const string MemoryCollectionName = "aboutMe"; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_AzureAISearch.cs b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_AzureAISearch.cs index da4ae9cf7a76..12ce70374a0b 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_AzureAISearch.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_AzureAISearch.cs @@ -26,6 +26,7 @@ namespace Memory; /// dotnet user-secrets set "AzureAISearch:Endpoint" "https://myazureaisearchinstance.search.windows.net" /// dotnet user-secrets set "AzureAISearch:ApiKey" "samplesecret" /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class VectorStore_ConsumeFromMemoryStore_AzureAISearch(ITestOutputHelper output, VectorStoreQdrantContainerFixture qdrantFixture) : BaseTest(output), IClassFixture { private const int VectorSize = 1536; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Common.cs index 772327889f49..50782b075af6 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Common.cs @@ -19,6 +19,7 @@ namespace Memory; /// /// /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public static class VectorStore_ConsumeFromMemoryStore_Common { public static async Task CreateCollectionAndAddSampleDataAsync(IMemoryStore memoryStore, string collectionName, ITextEmbeddingGenerationService textEmbeddingService) diff --git a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Qdrant.cs b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Qdrant.cs index 1f21c404e312..00b85bb6b494 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Qdrant.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Qdrant.cs @@ -23,6 +23,7 @@ namespace Memory; /// To run this sample, you need a local instance of Docker running, since the associated fixture /// will try and start a Qdrant container in the local docker instance to run against. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class VectorStore_ConsumeFromMemoryStore_Qdrant(ITestOutputHelper output, VectorStoreQdrantContainerFixture qdrantFixture) : BaseTest(output), IClassFixture { private const int VectorSize = 1536; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Redis.cs b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Redis.cs index 91ecae46c124..669f5d2dfa7e 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Redis.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Redis.cs @@ -22,6 +22,7 @@ namespace Memory; /// To run this sample, you need a local instance of Docker running, since the associated fixture /// will try and start a Redis container in the local docker instance to run against. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class VectorStore_ConsumeFromMemoryStore_Redis(ITestOutputHelper output, VectorStoreRedisContainerFixture redisFixture) : BaseTest(output), IClassFixture { private const int VectorSize = 1536; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_MigrateFromMemoryStore_Redis.cs b/dotnet/samples/Concepts/Memory/VectorStore_MigrateFromMemoryStore_Redis.cs index c5ee0d648d91..2a0aea7e47cf 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_MigrateFromMemoryStore_Redis.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_MigrateFromMemoryStore_Redis.cs @@ -29,6 +29,7 @@ namespace Memory; /// /// To run this sample, you need a local instance of Docker running, since the associated fixture will try and start a Redis container in the local docker instance to run against. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class VectorStore_MigrateFromMemoryStore_Redis(ITestOutputHelper output, VectorStoreRedisContainerFixture redisFixture) : BaseTest(output), IClassFixture { private const int VectorSize = 1536; diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchMemoryStoreTests.cs index 0ebda1fc706e..95b0801d23ad 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchMemoryStoreTests.cs @@ -21,6 +21,7 @@ namespace SemanticKernel.Connectors.UnitTests.Memory.AzureAISearch; /// /// Unit tests for class. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class AzureAISearchMemoryStoreTests { private readonly Mock _mockSearchIndexClient = new(); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs index 88a57a2ed4a0..49da4d8e1a4b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs @@ -1,18 +1,19 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Text; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Azure AI Search record and index definition. /// Note: once defined, index cannot be modified. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureAISearchVectorStore")] internal sealed class AzureAISearchMemoryRecord { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs index 3dab67ba52b4..eae807bb53a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -21,10 +20,12 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// is a memory store implementation using Azure AI Search. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureAISearchVectorStore")] public partial class AzureAISearchMemoryStore : IMemoryStore { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConfig.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConfig.cs index 7cb62b601075..289eb2ff4240 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConfig.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConfig.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// Initialize the with default values. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] public class AzureCosmosDBMongoDBConfig(int dimensions) { private const string DefaultIndexName = "default_index"; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecord.cs index d9e181d95e5e..4bd5ac65a802 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecord.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Linq; using Microsoft.SemanticKernel.Memory; using MongoDB.Bson; @@ -10,10 +9,12 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// A MongoDB memory record. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] internal sealed class AzureCosmosDBMongoDBMemoryRecord { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecordMetadata.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecordMetadata.cs index a614ad0d8c87..095d8f62f17a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecordMetadata.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryRecordMetadata.cs @@ -1,16 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using Microsoft.SemanticKernel.Memory; using MongoDB.Bson.Serialization.Attributes; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// A MongoDB memory record metadata. /// #pragma warning disable CA1815 // Override equals and operator equals on value types -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] internal struct AzureCosmosDBMongoDBMemoryRecordMetadata #pragma warning restore CA1815 // Override equals and operator equals on value types { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStore.cs index e91048e780d6..931c7fe3d792 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Runtime.CompilerServices; @@ -14,11 +13,13 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of backed by a Azure CosmosDB Mongo vCore database. /// Get more details about Azure Cosmos Mongo vCore vector search https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/vcore/vector-search /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] public class AzureCosmosDBMongoDBMemoryStore : IMemoryStore, IDisposable { private readonly MongoClient _mongoClient; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBSimilarityType.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBSimilarityType.cs index d6ae10c7bbb8..72cf0b2774ee 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBSimilarityType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBSimilarityType.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Reflection; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// Similarity metric to use with the index. Possible options are COS (cosine distance), L2 (Euclidean distance), and IP (inner product). /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] public enum AzureCosmosDBSimilarityType { /// @@ -33,7 +33,7 @@ public enum AzureCosmosDBSimilarityType Euclidean } -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] internal static class AzureCosmosDBSimilarityTypeExtensions { public static string GetCustomName(this AzureCosmosDBSimilarityType type) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBVectorSearchType.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBVectorSearchType.cs index 0bd827257304..a07fe1aebabc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBVectorSearchType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBVectorSearchType.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Reflection; using MongoDB.Bson.Serialization.Attributes; @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// Type of vector index to create. The options are vector-ivf and vector-hnsw. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] public enum AzureCosmosDBVectorSearchType { /// @@ -26,7 +26,7 @@ public enum AzureCosmosDBVectorSearchType VectorHNSW } -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being phased out, use Microsoft.Extensions.VectorData and AzureMongoDBMongoDBVectorStore")] internal static class AzureCosmosDBVectorSearchTypeExtensions { public static string GetCustomName(this AzureCosmosDBVectorSearchType type) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs index ab898fad6c13..08a2948e8cff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Text; @@ -18,11 +17,13 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of backed by a Azure Cosmos DB database. /// Get more details about Azure Cosmos DB vector search https://learn.microsoft.com/en-us/azure/cosmos-db/ /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and AzureCosmosDBNoSQLVectorStore")] public class AzureCosmosDBNoSQLMemoryStore : IMemoryStore, IDisposable { private const string EmbeddingPath = "/embedding"; @@ -289,7 +290,7 @@ public async IAsyncEnumerable GetBatchAsync( var queryStart = $""" SELECT x.id,x.key,x.metadata,x.timestamp{(withEmbeddings ? ",x.embedding" : "")} FROM x - WHERE + WHERE """; // NOTE: Cosmos DB queries are limited to 512kB, so we'll break this into chunks // of around 500kB. We don't go all the way to 512kB so that we don't have to @@ -446,7 +447,7 @@ protected virtual void Dispose(bool disposing) /// [DebuggerDisplay("{GetDebuggerDisplay()}")] #pragma warning disable CA1812 // 'MemoryRecordWithSimilarityScore' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). (https://learn.microsoft.com/dotnet/fundamentals/code-analysis/quality-rules/ca1812) -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and AzureCosmosDBNoSQLVectorStore")] internal sealed class MemoryRecordWithSimilarityScore( #pragma warning restore CA1812 MemoryRecordMetadata metadata, @@ -468,7 +469,7 @@ private string GetDebuggerDisplay() /// /// Creates a new record that also serializes an "id" property. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and AzureCosmosDBNoSQLVectorStore")] [DebuggerDisplay("{GetDebuggerDisplay()}")] internal sealed class MemoryRecordWithId : MemoryRecord { diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs index ef2c88da699c..066424dc1d83 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs @@ -1,17 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using Microsoft.SemanticKernel.Memory; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; namespace Microsoft.SemanticKernel.Connectors.MongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// A MongoDB memory entry. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and MongoDBVectorStore")] public sealed class MongoDBMemoryEntry { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs index cf7e4f7894d5..f7a3f5b55548 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs @@ -1,16 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using Microsoft.SemanticKernel.Memory; using MongoDB.Bson.Serialization.Attributes; namespace Microsoft.SemanticKernel.Connectors.MongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// A MongoDB record metadata. /// #pragma warning disable CA1815 // Override equals and operator equals on value types -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and MongoDBVectorStore")] public struct MongoDBMemoryRecordMetadata #pragma warning restore CA1815 // Override equals and operator equals on value types { diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs index 3e81afd0efde..72bfe4deead2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -12,10 +11,12 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of backed by a MongoDB database. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and MongoDBVectorStore")] public class MongoDBMemoryStore : IMemoryStore, IDisposable { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs index cf6278a4154f..7a2a0c8a5f2c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using System.Text.Json.Serialization; @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// This operation specifies the pod type and number of replicas for an index. /// See https://docs.pinecone.io/reference/configure_index /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class ConfigureIndexRequest { public string IndexName { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs index 63746ca62f88..e01c8e77e4b4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Deletes an index and all its data. /// See https://docs.pinecone.io/reference/delete_index /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class DeleteIndexRequest { public static DeleteIndexRequest Create(string indexName) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs index 3b45cbddbdec..82149dedd2cc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Text; @@ -13,7 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// DeleteRequest /// See https://docs.pinecone.io/reference/delete_post /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class DeleteRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs index 9955adebb078..fa49d7c5cf8c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Get information about an index. /// See https://docs.pinecone.io/reference/describe_index /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class DescribeIndexRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs index 90481a5d8129..184c6f9914bf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// DescribeIndexStatsRequest /// See https://docs.pinecone.io/reference/describe_index_stats_post /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class DescribeIndexStatsRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs index da10d86c7d3c..caf1d1fb7c20 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Text.Json.Serialization; @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// FetchRequest /// See https://docs.pinecone.io/reference/fetch /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class FetchRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs index afa2534e65d8..f740ea341b81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text.Json.Serialization; @@ -13,7 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// FetchResponse /// See https://docs.pinecone.io/reference/fetch /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class FetchResponse { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs index 77919ddcf15a..9ed2d9bd0dc3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// ListIndexesRequest /// See https://docs.pinecone.io/reference/list_indexes /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class ListIndexesRequest { public static ListIndexesRequest Create() diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs index a75309d2c266..cadffd29c5c0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; @@ -12,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// QueryRequest /// See https://docs.pinecone.io/reference/query /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class QueryRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs index f7ede69bccad..5baee10108b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// QueryResponse /// See https://docs.pinecone.io/reference/query /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class QueryResponse { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs index a8988b01f7eb..b3d2d8e124b5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; @@ -14,7 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. /// See https://docs.pinecone.io/reference/update /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class UpdateVectorRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs index f4a407d0ef66..8d4ad95213c3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// UpsertRequest /// See https://docs.pinecone.io/reference/upsert /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class UpsertRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs index 616e2746b5c8..066075b42883 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// UpsertResponse /// See https://docs.pinecone.io/reference/upsert /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class UpsertResponse { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs index d9642325b51c..1375353de9f0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -11,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Interface for a Pinecone client /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public interface IPineconeClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs index c23c52b68760..b17b85fec853 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs @@ -2,18 +2,19 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Pinecone; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Interface for Pinecone memory store that extends the memory store interface /// to add support for namespaces /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public interface IPineconeMemoryStore : IMemoryStore { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs index b6924f0a3ea3..de0094d1b8b7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using System.Text; using System.Text.Json.Serialization; @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Used to create a new index. /// See https://docs.pinecone.io/reference/create_index /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class IndexDefinition { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs index 0152df016d9f..733542974bff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Memory; @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Configuration for the behavior of Pinecone's internal metadata index. By default, all metadata is indexed; when metadata_config is present, only specified metadata fields are indexed. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class MetadataIndexConfig { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs index 0cfb54e5bfc2..a9ebc98663e6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Runtime.Serialization; using System.Text.Json.Serialization; @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// The vector similarity metric of the index /// /// The vector similarity metric of the index -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] [JsonConverter(typeof(JsonStringEnumConverter))] public enum IndexMetric { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs index 1099d27ace24..7f166ac997b1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index namespace parameters. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class IndexNamespaceStats { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs index 6998fb950c0d..7d1af1db0c39 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Runtime.Serialization; using System.Text.Json.Serialization; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The current status of a index. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] [JsonConverter(typeof(JsonStringEnumConverter))] public enum IndexState { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs index fcecab22f888..eb3b4c53e1be 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index parameters. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class IndexStats { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs index 80a805118ceb..8e027aa47417 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Status of the index. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class IndexStatus { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs index 2ab24223baa3..be2da0ef5306 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Pinecone; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal enum OperationType { Upsert, diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs index a018d6163446..451401722b70 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index entity. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public sealed class PineconeIndex { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs index a1ca09720942..828a23952ec5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Reflection; using System.Runtime.Serialization; @@ -13,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Pod type of the index, see https://docs.pinecone.io/docs/indexes#pods-pod-types-and-pod-sizes. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] [JsonConverter(typeof(PodTypeJsonConverter))] public enum PodType { @@ -108,7 +107,7 @@ public enum PodType } #pragma warning disable CA1812 // Avoid uninstantiated internal classes -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] internal sealed class PodTypeJsonConverter : JsonConverter #pragma warning restore CA1812 { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs index deed1cd706fe..382a12e76f62 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -10,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Query parameters for use in a query request. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public sealed class Query { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs index 93fdf06fa985..4b9f606b0714 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -10,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Represents a sparse vector data, which is a list of indices and a list of corresponding values, both of the same length. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class SparseVectorData { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs index bc6fede851bf..0911e6d16934 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net; using System.Net.Http; @@ -20,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// A client for the Pinecone API /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public sealed class PineconeClient : IPineconeClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs index 147d7d96b741..8a619813dcf5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text.Json; using System.Text.Json.Serialization; @@ -13,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Pinecone Document entity. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class PineconeDocument { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs index 5cba6c227717..8c22facbeb50 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs @@ -2,17 +2,18 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Text.Json; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Pinecone; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Extensions for class. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public static class PineconeDocumentExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs index cec2c1391560..be3b1c800297 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs @@ -1,16 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Pinecone; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Provides extension methods for the class to configure Pinecone connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public static class PineconeMemoryBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs index 0d1118ded656..aacc9e1d0492 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; @@ -13,6 +12,8 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of for Pinecone Vector database. /// @@ -23,7 +24,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// For that reason, we use the term "Index" in Pinecone to refer to what is a "Collection" in IMemoryStore. So, in the case of Pinecone, /// "Collection" is synonymous with "Index" when referring to IMemoryStore. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public class PineconeMemoryStore : IPineconeMemoryStore { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs index 18d495399986..4058079097c2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs @@ -3,7 +3,6 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.IO; using System.Text; using System.Text.Encodings.Web; @@ -16,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Utils for Pinecone connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public static class PineconeUtils { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs index 1d13a847ba4b..bf9301aad476 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs @@ -2,17 +2,18 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; using Pgvector; namespace Microsoft.SemanticKernel.Connectors.Postgres; +#pragma warning disable SKEXP0020 + /// /// Interface for client managing postgres database operations for . /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PostgresVectorStore")] public interface IPostgresDbClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs index 20b384ec06f2..bcfd4443622c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; @@ -17,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// An implementation of a client for Postgres. This class is used to managing postgres database operations for . /// [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "We need to build the full table name using schema and collection, it does not support parameterized passing.")] -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PostgresVectorStore")] public class PostgresDbClient : IPostgresDbClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs index ad04abe0b7de..64d6d3070d23 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs @@ -1,15 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using Microsoft.SemanticKernel.Memory; using Npgsql; namespace Microsoft.SemanticKernel.Connectors.Postgres; +#pragma warning disable SKEXP0001 + /// /// Provides extension methods for the class to configure Postgres connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PostgresVectorStore")] public static class PostgresMemoryBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs index 449856653dbb..8e80f9b49ef3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using Pgvector; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -9,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// A postgres memory entry. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PostgresVectorStore")] public record struct PostgresMemoryEntry { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs index f981ba926d96..f6a59ed5e463 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -14,6 +13,8 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of backed by a Postgres database with pgvector extension. /// @@ -21,7 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// The embedded data is saved to the Postgres database specified in the constructor. /// Similarity search capability is provided through the pgvector extension. Use Postgres's "Table" to implement "Collection". /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PostgresVectorStore")] public class PostgresMemoryStore : IMemoryStore, IDisposable { internal const string DefaultSchema = "public"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs index 6aaab2f26256..1f8e0945bc71 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs @@ -1,13 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class CreateCollectionRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs index fd6df2fe945d..47ff25e408e6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class DeleteCollectionRequest { public static DeleteCollectionRequest Create(string collectionName) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs index 6993168b84fe..80a1002e951d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs @@ -1,13 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class DeleteVectorsRequest { [JsonPropertyName("points")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs index c227f407babf..fe0817bc78b1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -8,6 +8,6 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// Empty qdrant response for requests that return nothing but status / error. /// #pragma warning disable CA1812 // Avoid uninstantiated internal classes. Justification: deserialized by QdrantVectorDbClient.DeleteVectorsByIdAsync & QdrantVectorDbClient.DeleteVectorByPayloadIdAsync -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class DeleteVectorsResponse : QdrantResponse; #pragma warning restore CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs index 5648ae22212f..6c1b8f701414 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class GetCollectionsRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs index 60015e496cb7..69768e306df5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs @@ -1,14 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class GetVectorsRequest { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs index 35c0584c73d7..20fb32e7b0db 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs @@ -2,13 +2,12 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class GetVectorsResponse : QdrantResponse { internal sealed class Record diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs index 831f9213f2d7..f577c42a0b8f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class ListCollectionsRequest { public static ListCollectionsRequest Create() diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs index 250c2b06698e..6a5dda325887 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs @@ -1,13 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class ListCollectionsResponse : QdrantResponse { internal sealed class CollectionResult diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs index 5e1223fab1cc..5cb5502d8e7f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Text.Json; using System.Text.Json.Serialization; @@ -9,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class NumberToStringConverter : JsonConverter { public override string Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs index 16717281120d..380f152d45f0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Base class for Qdrant response schema. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal abstract class QdrantResponse { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs index 6aaf2645eb34..f7109a8c8a85 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs @@ -2,13 +2,12 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class SearchVectorsRequest { [JsonPropertyName("vector")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs index 81652d032caa..9a0414109aa1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs @@ -2,13 +2,12 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class SearchVectorsResponse : QdrantResponse { internal sealed class ScoredPoint diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs index 4b03bbf047e2..6f6661c80902 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs @@ -2,13 +2,12 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class UpsertVectorRequest { public static UpsertVectorRequest Create(string collectionName) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs index 59f61f3ae94b..3caa14c65efe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] internal sealed class UpsertVectorResponse : QdrantResponse { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs index aa9ad3f72190..414900d2f2b0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -11,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Interface for a Qdrant vector database client. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public interface IQdrantVectorDbClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs index 16a3f58a7daf..3c948fc84c7c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// The vector distance type used by Qdrant. /// [JsonConverter(typeof(JsonStringEnumConverter))] -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public enum QdrantDistanceType { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs index f4233c47a6c0..56989090334c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs @@ -1,16 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Qdrant; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Provides extension methods for the class to configure Qdrant connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public static class QdrantMemoryBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs index fdd4a2eaff9b..f08b8a9d4241 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Runtime.CompilerServices; @@ -14,13 +13,15 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of for Qdrant Vector Database. /// /// The Embedding data is saved to a Qdrant Vector Database instance specified in the constructor by url and port. /// The embedding data persists between subsequent instances and has similarity search capability. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public class QdrantMemoryStore : IMemoryStore { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs index 9e158551bf24..ef4a7642f298 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net; using System.Net.Http; @@ -21,7 +20,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// connect, create, delete, and get embeddings data from a Qdrant Vector Database instance. /// #pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public sealed class QdrantVectorDbClient : IQdrantVectorDbClient #pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs index 93ab5d24deb6..c7dc2189e4de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; @@ -11,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// A record structure used by Qdrant that contains an embedding and metadata. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and QdrantVectorStore")] public class QdrantVectorRecord { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs index 29dfb78da922..a6a626419aa7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -18,13 +17,15 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of for Redis. /// /// The embedded data is saved to the Redis server database specified in the constructor. /// Similarity search capability is provided through the RediSearch module. Use RediSearch's "Index" to implement "Collection". /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and RedisVectorStore")] public class RedisMemoryStore : IMemoryStore, IDisposable { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs index 551d3e2e844d..96bcbd1bc917 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// Supported distance metrics are {L2, IP, COSINE}. The default value is "COSINE". /// /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and RedisVectorStore")] public enum VectorDistanceMetric { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ISqlServerClient.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ISqlServerClient.cs index a457cddd3859..a351e929ab3a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ISqlServerClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ISqlServerClient.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -11,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; /// /// Interface for client managing SQL Server or Azure SQL database operations. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqlServerVectorStore")] internal interface ISqlServerClient { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs index a6d6912a4b98..0263eaf39fbc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; /// Implementation of database client managing SQL Server or Azure SQL database operations. /// [SuppressMessage("Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "We need to build the full table name using schema and collection, it does not support parameterized passing.")] -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqlServerVectorStore")] internal sealed class SqlServerClient : ISqlServerClient { private readonly SqlConnection _connection; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryBuilderExtensions.cs index dcaf6dd22734..4b9f34c1a1eb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryBuilderExtensions.cs @@ -1,14 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.SqlServer; +#pragma warning disable SKEXP0001 + /// /// Provides extension methods for the class to configure SQL Server or Azure SQL connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqlServerVectorStore")] public static class SqlServerMemoryBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryEntry.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryEntry.cs index e88c1b91e994..41778af5960f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryEntry.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryEntry.cs @@ -1,14 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.SqlServer; /// /// A SQL Server or Azure SQL memory entry. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqlServerVectorStore")] internal record struct SqlServerMemoryEntry { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryStore.cs index d5891dc2e96a..ca5a760a4005 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -11,10 +10,12 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; +#pragma warning disable SKEXP0001 + /// /// An implementation of backed by a SQL Server or Azure SQL database. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqlServerVectorStore")] public class SqlServerMemoryStore : IMemoryStore, IDisposable { internal const string DefaultSchema = "dbo"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs index c1e32e16a30d..9c3117cecd06 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqliteVectorStore")] internal struct DatabaseEntry { public string Key { get; set; } @@ -22,7 +22,7 @@ internal struct DatabaseEntry public string? Timestamp { get; set; } } -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqliteVectorStore")] internal sealed class Database { private const string TableName = "SKMemoryTable"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs index b8908818a049..7107020c0ddd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Numerics.Tensors; @@ -16,13 +15,15 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of backed by a SQLite database. /// /// The data is saved to a database file, specified in the constructor. /// The data persists between subsequent instances. Only one instance may access the file at a time. /// The caller is responsible for deleting the file. -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and SqliteVectorStore")] public class SqliteMemoryStore : IMemoryStore, IDisposable { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs index 24441684ceb9..ff56e3bad8f8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs @@ -1,13 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Net.Http; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class BatchRequest { private readonly string _class; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs index af63e8c62e20..2aa03939c188 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs @@ -1,14 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassNeverInstantiated.Global #pragma warning disable CA1812 // 'BatchResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class BatchResponse : WeaviateObject #pragma warning restore CA1812 // 'BatchResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs index 93cd60dfb0ff..9efaeafb7f75 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class CreateClassSchemaRequest { private CreateClassSchemaRequest(string @class, string description) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs index 181d69db467e..d7b65b1393ac 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'CreateClassSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class CreateClassSchemaResponse #pragma warning restore CA1812 // 'CreateClassSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs index 301ee42170f3..c25da8ada8a0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; @@ -9,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassCannotBeInstantiated -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class CreateGraphRequest { #pragma warning disable CS8618 diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs index dfbdd158f819..6580e6e9f355 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class DeleteObjectRequest { public string? Class { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs index 73d7e2fae456..950e05e7e59d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class DeleteSchemaRequest { private readonly string _class; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs index 01669e527ced..48de160feca0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GetClassRequest { private GetClassRequest(string @class) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs index e176a1f1b619..e3aea355ba2a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GetClassResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GetClassResponse #pragma warning restore CA1812 // 'GetClassResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs index 4c4317e8a1ab..cc608e62f557 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GetObjectRequest { public string? Id { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs index 3a4be14541eb..94d0e57c180c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GetSchemaRequest { public static GetSchemaRequest Create() diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs index 76620d603b5c..0503bb11aa22 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GetSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GetSchemaResponse #pragma warning restore CA1812 // 'GetSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs index e31c5645c7de..cf3c1073b64d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Nodes; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GraphResponse #pragma warning restore CA1812 // 'GraphResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs index 3ed41a0a0187..33f1bfffe6b4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs @@ -1,13 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Text.Json.Nodes; namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassNeverInstantiated.Global #pragma warning disable CA1812 // 'ObjectResponseResult' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class ObjectResponseResult #pragma warning restore CA1812 // 'ObjectResponseResult' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs index 457ce2114a6d..5c3db6795970 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs @@ -1,14 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'UnixSecondsDateTimeJsonConverter' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class UnixSecondsDateTimeJsonConverter : JsonConverter #pragma warning restore CA1812 // 'UnixSecondsDateTimeJsonConverter' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs index 1eaf7dc297fb..57033407ea6e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs @@ -1,12 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'Deprecation' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class Deprecation #pragma warning restore CA1812 // 'Deprecation' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs index c44560665c6b..a2b31dcbdee0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphError' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GraphError #pragma warning restore CA1812 // 'GraphError' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs index 7fa5d0989cfe..d1fe85990bd3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphErrorLocationsItems' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class GraphErrorLocationsItems #pragma warning restore CA1812 // 'GraphErrorLocationsItems' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs index 09f23300d6c3..b9b95302286a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal sealed class Property { public string? Name { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs index 0314fb4f2b5f..557bc290e9bb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs @@ -2,11 +2,10 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] internal class WeaviateObject { public string? Id { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs index 40795d21eb30..b5247d94058b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs @@ -1,16 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System; using System.Net.Http; using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Connectors.Weaviate; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// Provides extension methods for the class to configure Weaviate connector. /// -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] public static class WeaviateMemoryBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs index ca45d0b828f3..9382d081d481 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net; @@ -21,6 +20,8 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; +#pragma warning disable SKEXP0001 // IMemoryStore is experimental (but we're obsoleting) + /// /// An implementation of for Weaviate. /// @@ -29,7 +30,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// // ReSharper disable once ClassWithVirtualMembersNeverInherited.Global #pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -[Experimental("SKEXP0020")] +[Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and WeaviateVectorStore")] public partial class WeaviateMemoryStore : IMemoryStore #pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeMemoryStoreTests.cs index 05e942e857da..85755510e3ff 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeMemoryStoreTests.cs @@ -14,6 +14,7 @@ namespace SemanticKernel.Connectors.Pinecone.UnitTests; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class PineconeMemoryStoreTests { private readonly string _id = "Id"; diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeUtilsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeUtilsTests.cs index 9f106c91124e..9c1a181cde92 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeUtilsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeUtilsTests.cs @@ -11,6 +11,7 @@ namespace SemanticKernel.Connectors.Pinecone.UnitTests; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class PineconeUtilsTests { [Fact] diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs index 897a09087f09..f51abc6f2432 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs @@ -13,6 +13,7 @@ namespace SemanticKernel.Connectors.Qdrant.UnitTests; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class QdrantMemoryBuilderExtensionsTests : IDisposable { private readonly HttpMessageHandlerStub _messageHandlerStub; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs index 6ae498561065..b0c7a448b668 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs @@ -17,6 +17,7 @@ namespace SemanticKernel.Connectors.Qdrant.UnitTests; /// /// Tests for collection and upsert operations. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class QdrantMemoryStoreTests { private readonly string _id = "Id"; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs index 8af2061c5d3a..8db0a7c0a839 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.Connectors.Qdrant.UnitTests; /// /// Tests for Get and Remove operations. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class QdrantMemoryStoreTests2 { private readonly string _id = "Id"; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs index ad7d54e2d5bb..7558098b2713 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs @@ -20,6 +20,7 @@ namespace SemanticKernel.Connectors.Qdrant.UnitTests; /// /// Tests for Search operations. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class QdrantMemoryStoreTests3 { private readonly string _id = "Id"; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs index 41a95178a588..bf3a52f7683b 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs @@ -8,6 +8,7 @@ namespace SemanticKernel.Connectors.Qdrant.UnitTests; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class QdrantVectorDbClientTests : IDisposable { private readonly HttpMessageHandlerStub _messageHandlerStub; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs index 5c63e568a3a9..892c8c0d495f 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs @@ -20,6 +20,7 @@ namespace SemanticKernel.Connectors.Redis.UnitTests; /// /// Unit tests of . /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class RedisMemoryStoreTests { private readonly Mock _mockDatabase; diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteMemoryStoreTests.cs index 5086709937ac..c231e711d6bf 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteMemoryStoreTests.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.Connectors.Sqlite.UnitTests; /// Unit tests of . /// [Collection("Sequential")] +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class SqliteMemoryStoreTests : IDisposable { private const string DatabaseFile = "SqliteMemoryStoreTests.db"; diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs index 4abfbf941498..a9e1589161c4 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs @@ -17,6 +17,7 @@ namespace SemanticKernel.Connectors.UnitTests.MongoDB; /// /// Unit tests for class. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class MongoDBMemoryStoreTests { private const string CollectionName = "test-collection"; diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs index 928a30568ae6..e1f0f17df187 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs @@ -15,6 +15,7 @@ namespace SemanticKernel.Connectors.UnitTests.Postgres; /// /// Unit tests for class. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class PostgresMemoryStoreTests { private const string CollectionName = "fake-collection-name"; diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs index e91a1794d2a8..0de180a013e8 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.Connectors.UnitTests.Sqlite; /// Unit tests of . /// [Collection("Sequential")] +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class SqliteMemoryStoreTests : IDisposable { private const string DatabaseFile = "SqliteMemoryStoreTests.db"; diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs index d3c4a2a0c92f..172b84bc9196 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.Connectors.UnitTests.Weaviate; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class WeaviateMemoryBuilderExtensionsTests : IDisposable { private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs index 97134f46818a..b1eea5a6c6b3 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.Connectors.UnitTests.Weaviate; /// /// Unit tests for class. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class WeaviateMemoryStoreTests : IDisposable { private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTests.cs index cc0d1238b95a..c0282c454ee8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTests.cs @@ -13,6 +13,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; /// /// Integration tests of . /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class AzureCosmosDBMongoDBMemoryStoreTests : IClassFixture { private const string? SkipReason = "Azure CosmosDB Mongo vCore cluster is required"; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs index 6854e7e7fdf8..d1fc295e3b1d 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs @@ -10,6 +10,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class AzureCosmosDBMongoDBMemoryStoreTestsFixture : IAsyncLifetime { public AzureCosmosDBMongoDBMemoryStore MemoryStore { get; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs index e75116e34893..fcd910889785 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs @@ -18,6 +18,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBNoSQL; /// /// Integration tests of . /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class AzureCosmosDBNoSQLMemoryStoreTests : IClassFixture { private const string? SkipReason = "Azure Cosmos DB Account with Vector indexing enabled required"; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs index 7e6f376a8684..e8bbecd47533 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs @@ -9,6 +9,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBNoSQL; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class AzureCosmosDBNoSQLMemoryStoreTestsFixture : IAsyncLifetime { public AzureCosmosDBNoSQLMemoryStore MemoryStore { get; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs index 6f4c834ecf7c..f744484316ef 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs @@ -13,6 +13,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; /// /// Integration tests of . /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class MongoDBMemoryStoreTests(MongoDBMemoryStoreTestsFixture fixture) : IClassFixture { // If null, all tests will be enabled diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs index f96acb8fd77b..ec678690d09f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs @@ -12,6 +12,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class MongoDBMemoryStoreTestsFixture : IAsyncLifetime { #pragma warning disable CA1859 // Use concrete types when possible for improved performance diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs index 71474ff0ebc6..a9fe0532b895 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs @@ -16,6 +16,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Postgres; /// /// Integration tests of . /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class PostgresMemoryStoreTests : IAsyncLifetime { // If null, all tests will be enabled diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs index b88795e9a3d6..bd366a06dfc3 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs @@ -17,6 +17,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; /// The Weaviate instance API key is set in the Docker Container as "my-secret-key". /// [Collection("Sequential")] +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public sealed class WeaviateMemoryStoreTests : IDisposable { // If null, all tests will be enabled diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs index 23e714ff60bd..f1b23dcfed79 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerMemoryStoreTests.cs @@ -11,6 +11,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.SqlServer; /// /// Unit tests for class. /// +[Obsolete("The IMemoryStore abstraction is being obsoleted")] public class SqlServerMemoryStoreTests : IAsyncLifetime { private const string? SkipReason = "Configure SQL Server or Azure SQL connection string and then set this to 'null'."; @@ -339,7 +340,7 @@ private async Task CleanupDatabaseAsync() await connection.OpenAsync(); cmd.CommandText = $""" DECLARE tables_cursor CURSOR FOR - SELECT table_name + SELECT table_name FROM information_schema.tables WHERE table_type = 'BASE TABLE' AND table_schema = '{SchemaName}' diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs index 043f4882e640..584a289f2902 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestEnvironment.cs @@ -17,7 +17,7 @@ internal static class SqlServerTestEnvironment .AddJsonFile(path: "testsettings.json", optional: true) .AddJsonFile(path: "testsettings.development.json", optional: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); return configuration.GetSection("SqlServer")["ConnectionString"]; From 973254377c38a60ef8c7676a7ba0b0316c5efd37 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Thu, 20 Mar 2025 08:40:31 +0100 Subject: [PATCH 18/63] .Net: Target net462 in all MEVD connectors (#11077) Aligning with MEAI; this helps netfx users under net472 ([link](https://github.com/roji/semantic-kernel/pull/new/net462)). --- .../Connectors.Memory.AzureAISearch.csproj | 2 +- .../Connectors.Memory.AzureCosmosDBMongoDB.csproj | 2 +- .../Connectors.Memory.AzureCosmosDBNoSQL.csproj | 2 +- .../Connectors.Memory.Chroma.csproj | 2 +- .../Connectors.Memory.DuckDB.csproj | 2 +- .../Connectors.Memory.InMemory.csproj | 2 +- .../Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj | 2 +- .../Connectors.Memory.Milvus.csproj | 2 +- .../Connectors.Memory.MongoDB.csproj | 2 +- .../Connectors.Memory.Pinecone.csproj | 2 +- .../Connectors.Memory.Postgres.csproj | 2 +- .../PostgresVectorStoreDbClient.cs | 4 ++-- .../Connectors.Memory.Qdrant.csproj | 2 +- .../Http/ApiSchema/GetVectorsRequest.cs | 6 ++++++ .../Connectors.Memory.Redis/Connectors.Memory.Redis.csproj | 2 +- .../Connectors.Memory.Redis/RedisFilterTranslator.cs | 6 +++--- .../Connectors.Memory.SqlServer.csproj | 2 +- .../Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs | 2 +- .../Connectors.Memory.Sqlite.csproj | 2 +- .../SqliteVectorStoreCollectionCommandBuilder.cs | 2 +- .../Connectors.Memory.Weaviate.csproj | 2 +- .../Connectors.OpenAI/Services/OpenAIFileService.cs | 6 +++--- .../src/InternalUtilities/src/Diagnostics/IsExternalInit.cs | 4 ++++ .../src/Diagnostics/UnreachableException.cs | 2 +- dotnet/src/InternalUtilities/src/System/IndexRange.cs | 2 +- .../Services/EmptyServiceProvider.cs | 6 +++--- 26 files changed, 41 insertions(+), 31 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj index 3a53cd12212f..9b4aa8a97866 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj @@ -3,7 +3,7 @@ Microsoft.SemanticKernel.Connectors.AzureAISearch Microsoft.SemanticKernel.Connectors.AzureAISearch - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj index 3822d58d8b27..8167b955e13c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj index a3dbe540101c..b4d0a9ba609b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 $(NoWarn);NU5104 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj b/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj index e89013694aae..96ed5812cfe4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Chroma $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 alpha diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj index d793de68dc3a..ce8c96eb0be3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.DuckDB $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 alpha diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj b/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj index 1815446a8f80..4a630b90b5bb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj @@ -3,7 +3,7 @@ Microsoft.SemanticKernel.Connectors.InMemory $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj index dddcbcd37c5f..3e4edcde8b69 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj @@ -3,7 +3,7 @@ Microsoft.SemanticKernel.Connectors.Kusto Microsoft.SemanticKernel.Connectors.Kusto - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 alpha $(NoWarn);NU5104 diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj b/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj index 9df2ba3e4db3..07b6696cea8a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Milvus $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 enable alpha diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj index b091931d6e9e..3498c97b1130 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.MongoDB $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj index b2127f5131b0..d835350684d9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Pinecone $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj index 4a97b2962a14..10c7683a8eb6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Postgres $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview $(NoWarn);CS0436 diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index 07c228540038..a167aad9cd02 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -82,7 +82,7 @@ public async Task CreateTableAsync(string tableName, IReadOnlyList Microsoft.SemanticKernel.Connectors.Qdrant $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs index 69768e306df5..455ff62c1d5b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs @@ -52,7 +52,13 @@ public static GetVectorsRequest Create(string collectionName) public GetVectorsRequest WithPointId(string pointId) { +#if NET462 + var points = this.PointIds.ToList(); + points.Add(pointId); + this.PointIds = points; +#else this.PointIds = this.PointIds.Append(pointId); +#endif return this; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj index 3f2fd4360fe8..61f91e1cae6d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Redis $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs index ec5bcd73514f..bd8d833f75f8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs @@ -110,10 +110,10 @@ bool TryProcessEqualityComparison(Expression first, Expression second) { ExpressionType.Equal when constantValue is int or long or float or double => $" == {constantValue}", ExpressionType.Equal when constantValue is string stringValue -#if NETSTANDARD2_0 - => $$""":{"{{stringValue.Replace("\"", "\"\"")}}"}""", -#else +#if NET8_0_OR_GREATER => $$""":{"{{stringValue.Replace("\"", "\\\"", StringComparison.Ordinal)}}"}""", +#else + => $$""":{"{{stringValue.Replace("\"", "\"\"")}}"}""", #endif ExpressionType.Equal when constantValue is null => throw new NotSupportedException("Null value type not supported"), // TODO diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj index b188e9a2d2aa..0f2cdabb6d2d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.SqlServer $(AssemblyName) - netstandard2.0;net8.0 + netstandard2.0;net8.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index 4fb9c32d4fe5..fee4686bf548 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -416,7 +416,7 @@ internal static StringBuilder AppendTableName(this StringBuilder sb, string? sch if (!string.IsNullOrEmpty(schema)) { sb.Append(schema); - sb.Replace("]", "]]", index, schema.Length); // replace the ] for schema + sb.Replace("]", "]]", index, schema!.Length); // replace the ] for schema sb.Append("].["); index = sb.Length; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj index fec218bfc49d..056f868262c5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Sqlite $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 1f4559f5848c..7f8090bd345d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -271,7 +271,7 @@ private static (DbCommand Command, string WhereClause) GetCommandWithWhereClause whereClause += extraWhereFilter; Debug.Assert(extraParameters is not null, "extraParameters must be provided when extraWhereFilter is provided."); - foreach (var p in extraParameters) + foreach (var p in extraParameters!) { command.Parameters.Add(new SqliteParameter(p.Key, p.Value)); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj index 26b63c694dff..a7ab0b153735 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Connectors.Weaviate $(AssemblyName) - net8.0;netstandard2.0 + net8.0;netstandard2.0;net462 preview diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs index 83a544920a62..e2f691713f22 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs @@ -121,11 +121,11 @@ public async Task GetFileContentAsync(string id, CancellationToke using (stream) { using var memoryStream = new MemoryStream(); -#if NETSTANDARD2_0 +#if NET8_0_OR_GREATER + await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false); +#else const int DefaultCopyBufferSize = 81920; await stream.CopyToAsync(memoryStream, DefaultCopyBufferSize, cancellationToken).ConfigureAwait(false); -#else - await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false); #endif return new(memoryStream.ToArray(), mimetype) diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs b/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs index 7bd800e1dd6f..bf1c27afb2ab 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +#if !NET8_0_OR_GREATER + namespace System.Runtime.CompilerServices; /// @@ -7,3 +9,5 @@ namespace System.Runtime.CompilerServices; /// This class should not be used by developers in source code. /// internal static class IsExternalInit; + +#endif diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs b/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs index 616073f54705..1e2d9f9b0b02 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/UnreachableException.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -#if NETSTANDARD2_0 +#if !NET8_0_OR_GREATER // Polyfill for using UnreachableException with .NET Standard 2.0 diff --git a/dotnet/src/InternalUtilities/src/System/IndexRange.cs b/dotnet/src/InternalUtilities/src/System/IndexRange.cs index 439e6e844fb6..32c6c9c12538 100644 --- a/dotnet/src/InternalUtilities/src/System/IndexRange.cs +++ b/dotnet/src/InternalUtilities/src/System/IndexRange.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -#if NETSTANDARD2_0 +#if !NET8_0_OR_GREATER // Polyfill for using Index and Range with .NET Standard 2.0 (see https://www.meziantou.net/how-to-use-csharp-8-indices-and-ranges-in-dotnet-standard-2-0-and-dotn.htm) diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs b/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs index 08305ca9df83..ff676289a399 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs @@ -53,9 +53,9 @@ private static Array CreateArray(Type elementType, int length) } private static bool VerifyAotCompatibility => -#if NETFRAMEWORK || NETSTANDARD2_0 - false; -#else +#if NET8_0_OR_GREATER !System.Runtime.CompilerServices.RuntimeFeature.IsDynamicCodeSupported; +#else + false; #endif } From d9d601bf249b68bcbbc1e3adec191b9bc0e8d7e1 Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Thu, 20 Mar 2025 10:16:55 -0700 Subject: [PATCH 19/63] .Net: Merge telemetry branch to preb2 (#11095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: dependabot[bot] Signed-off-by: Vincent Biret Co-authored-by: Shay Rojansky Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> Co-authored-by: Evan Mattson <35585003+moonbox3@users.noreply.github.com> Co-authored-by: Chris <66376200+crickman@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tao Chen Co-authored-by: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Co-authored-by: Estefanía Tenorio <8483207+esttenorio@users.noreply.github.com> Co-authored-by: davidpene Co-authored-by: ThDuquennoy Co-authored-by: Thomas DUQUENNOY Co-authored-by: Eduard van Valkenburg Co-authored-by: Evan Mattson Co-authored-by: Rob Emanuele Co-authored-by: K. Andrew Parker Co-authored-by: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com> Co-authored-by: jenfoxbot Co-authored-by: Ben Thomas Co-authored-by: Ben Thomas Co-authored-by: Adit Sheth Co-authored-by: Adit Sheth Co-authored-by: ふぁー <47295014+ymuichiro@users.noreply.github.com> Co-authored-by: Vincent Biret Co-authored-by: David A. Torres <10944960+davidatorres@users.noreply.github.com> Co-authored-by: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Co-authored-by: Atiqur Rahman Foyshal <113086917+atiq-bs23@users.noreply.github.com> Co-authored-by: Md. Atiqur Rahman Foyshal Co-authored-by: Fabian Williams <92543063+fabianwilliams@users.noreply.github.com> Co-authored-by: fabian.williams@microsoft.com Co-authored-by: Ram.Type-0 <39725073+RamType0@users.noreply.github.com> Co-authored-by: Tommy Falgout Co-authored-by: Gary Tang <113477176+gtang31-te@users.noreply.github.com> Co-authored-by: Eirik Tsarpalis Co-authored-by: Tommaso Stocchi Co-authored-by: Chris Rickman Co-authored-by: Devis Lucato Co-authored-by: K. Andrew Parker Co-authored-by: Jose Luis Latorre Millas Co-authored-by: Carsten Lemm Co-authored-by: Stephen Toub Co-authored-by: Musale Martin Co-authored-by: Ross Smith Co-authored-by: Adam Sitnik Co-authored-by: Rodrigo Martins Racanicci <59115712+rracanicci@users.noreply.github.com> Co-authored-by: unsafecode Co-authored-by: QuocDatHoang Co-authored-by: Dat Hoang Quoc --- .github/.linkspector.yml | 12 +- .github/workflows/codeql-analysis.yml | 2 + .github/workflows/dotnet-build-and-test.yml | 4 + .github/workflows/dotnet-ci.yml | 2 + .github/workflows/dotnet-format.yml | 1 + .../workflows/dotnet-integration-tests.yml | 1 + .github/workflows/markdown-link-check.yml | 2 + .github/workflows/typos.yaml | 2 + README.md | 2 +- docs/PLUGINS.md | 4 +- docs/PROMPT_TEMPLATE_LANGUAGE.md | 4 +- ...08-support-generic-llm-request-settings.md | 2 +- docs/decisions/0065-realtime-api-clients.md | 2 +- docs/decisions/00NN-hybrid-search.md | 395 ++++++++++ dotnet/Directory.Packages.props | 6 +- dotnet/SK-dotnet.sln | 33 + .../AzureOpenAIWithData_ChatCompletion.cs | 12 +- .../ChatCompletion/ChatHistoryInFunctions.cs | 4 +- .../OpenAI_ChatCompletionWebSearch.cs | 53 ++ dotnet/samples/Concepts/Concepts.csproj | 1 + .../Concepts/Memory/VectorStore_Telemetry.cs | 180 +++++ dotnet/samples/Concepts/README.md | 23 +- dotnet/samples/Demos/AIModelRouter/README.md | 2 +- .../samples/Demos/BookingRestaurant/README.md | 4 +- .../samples/Demos/OpenAIRealtime/Program.cs | 3 +- dotnet/samples/Demos/TimePlugin/README.md | 4 +- .../GettingStartedWithAgents/README.md | 10 +- ...rockTextEmbeddingGenerationServiceTests.cs | 34 +- .../AzureOpenAIChatCompletionServiceTests.cs | 5 + .../Connectors.AzureOpenAI.csproj | 1 - .../Core/AzureClientCore.cs | 1 + .../Connectors.Memory.Pinecone.csproj | 2 +- .../PineconeFilterTranslator.cs | 267 +++++++ .../PineconeGenericDataModelMapper.cs | 21 +- .../PineconeVectorStore.cs | 17 +- ...econeVectorStoreCollectionCreateMapping.cs | 46 -- ...econeVectorStoreCollectionSearchMapping.cs | 15 +- .../PineconeVectorStoreRecordCollection.cs | 373 ++++++---- .../PineconeVectorStoreRecordFieldMapping.cs | 20 +- .../PineconeVectorStoreRecordMapper.cs | 8 +- .../ExceptionWrapper.cs | 2 +- .../SqlServerCommandBuilder.cs | 30 +- .../SqlServerConstants.cs | 3 + .../SqlServerVectorStoreRecordCollection.cs | 206 ++++-- .../Connectors.Onnx.UnitTests.csproj | 2 +- .../Connectors.Onnx/Connectors.Onnx.csproj | 1 + .../OpenAIChatCompletionServiceTests.cs | 71 ++ .../Connectors.OpenAI.csproj | 1 - .../Core/ClientCore.ChatCompletion.cs | 27 + .../Settings/OpenAIPromptExecutionSettings.cs | 34 +- .../PineconeGenericDataModelMapperTests.cs | 48 +- .../PineconeKernelBuilderExtensionsTests.cs | 4 +- ...ineconeServiceCollectionExtensionsTests.cs | 4 +- ...ineconeVectorStoreRecordCollectionTests.cs | 2 +- .../VectorData.Abstractions/PACKAGE.md | 15 +- .../VectorData.UnitTests/.editorconfig | 6 + .../VectorData.UnitTests.csproj | 41 ++ ...ywordHybridSearchBuilderExtensionsTests.cs | 24 + ...BuilderServiceCollectionExtensionsTests.cs | 96 +++ .../KeywordHybridSearchBuilderTests.cs | 87 +++ ...ywordHybridSearchBuilderExtensionsTests.cs | 64 ++ .../LoggingKeywordHybridSearchTests.cs | 60 ++ ...rizableTextSearchBuilderExtensionsTests.cs | 64 ++ .../LoggingVectorizableTextSearchTests.cs | 56 ++ ...gVectorizedSearchBuilderExtensionsTests.cs | 64 ++ .../LoggingVectorizedSearchTests.cs | 56 ++ ...rizableTextSearchBuilderExtensionsTests.cs | 24 + ...BuilderServiceCollectionExtensionsTests.cs | 96 +++ .../VectorizableTextSearchBuilderTests.cs | 87 +++ .../VectorizedSearchBuilderExtensionsTests.cs | 24 + ...BuilderServiceCollectionExtensionsTests.cs | 96 +++ .../VectorizedSearchBuilderTests.cs | 87 +++ ...oggingVectorStoreBuilderExtensionsTests.cs | 64 ++ ...eRecordCollectionBuilderExtensionsTests.cs | 64 ++ ...LoggingVectorStoreRecordCollectionTests.cs | 245 +++++++ .../VectorStorage/LoggingVectorStoreTests.cs | 72 ++ .../VectorStoreBuilderExtensionsTests.cs | 24 + ...BuilderServiceCollectionExtensionsTests.cs | 96 +++ .../VectorStorage/VectorStoreBuilderTests.cs | 87 +++ ...eRecordCollectionBuilderExtensionsTests.cs | 24 + ...BuilderServiceCollectionExtensionsTests.cs | 96 +++ ...VectorStoreRecordCollectionBuilderTests.cs | 87 +++ dotnet/src/Connectors/VectorData/PACKAGE.md | 40 + .../Connectors/VectorData/VectorData.csproj | 62 ++ .../KeywordHybridSearchBuilder.cs | 87 +++ .../KeywordHybridSearchBuilderExtensions.cs | 25 + ...earchBuilderServiceCollectionExtensions.cs | 89 +++ .../LoggingKeywordHybridSearch.cs | 47 ++ ...ingKeywordHybridSearchBuilderExtensions.cs | 41 ++ .../LoggingVectorizableTextSearch.cs | 46 ++ ...VectorizableTextSearchBuilderExtensions.cs | 41 ++ .../VectorSearch/LoggingVectorizedSearch.cs | 46 ++ ...oggingVectorizedSearchBuilderExtensions.cs | 41 ++ .../VectorizableTextSearchBuilder.cs | 87 +++ ...VectorizableTextSearchBuilderExtensions.cs | 25 + ...earchBuilderServiceCollectionExtensions.cs | 89 +++ .../VectorSearch/VectorizedSearchBuilder.cs | 87 +++ .../VectorizedSearchBuilderExtensions.cs | 25 + ...earchBuilderServiceCollectionExtensions.cs | 89 +++ .../VectorStorage/LoggingVectorStore.cs | 53 ++ .../LoggingVectorStoreBuilderExtensions.cs | 41 ++ .../LoggingVectorStoreRecordCollection.cs | 144 ++++ ...rStoreRecordCollectionBuilderExtensions.cs | 41 ++ .../VectorStorage/VectorStoreBuilder.cs | 87 +++ .../VectorStoreBuilderExtensions.cs | 25 + ...StoreBuilderServiceCollectionExtensions.cs | 89 +++ .../VectorStoreRecordCollectionBuilder.cs | 87 +++ ...rStoreRecordCollectionBuilderExtensions.cs | 25 + ...ctionBuilderServiceCollectionExtensions.cs | 89 +++ dotnet/src/Connectors/VectorData/neticon.png | Bin 0 -> 7006 bytes .../Process.IntegrationTestRunner.Dapr.csproj | 1 - .../Extensions/GrpcKernelExtensions.cs | 4 +- .../Extensions/ApiManifestKernelExtensions.cs | 2 +- .../CopilotAgentPluginKernelExtensions.cs | 2 +- .../Extensions/OpenApiKernelExtensions.cs | 8 +- .../OpenApiKernelPluginFactory.cs | 10 +- ...ineconeVectorStoreRecordCollectionTests.cs | 42 -- .../Memory/Pinecone/PineconeAllTypes.cs | 64 -- .../Memory/Pinecone/PineconeHotel.cs | 40 - .../Pinecone/PineconeUserSecretsExtensions.cs | 37 - .../Pinecone/PineconeVectorStoreFixture.cs | 350 --------- ...ineconeVectorStoreRecordCollectionTests.cs | 684 ------------------ .../Pinecone/PineconeVectorStoreTests.cs | 54 -- .../PineconeApiKeySetConditionAttribute.cs | 21 - .../OpenAIChatCompletion_NonStreamingTests.cs | 26 +- .../IntegrationTests/IntegrationTests.csproj | 2 - .../src/Diagnostics/KernelVerify.cs | 78 ++ .../src/Diagnostics/LoggingExtensions.cs | 137 ++++ .../src/Diagnostics/Verify.cs | 65 +- .../src/System/EmptyKeyedServiceProvider.cs | 23 + .../Functions/KernelFunction.cs | 4 +- .../Functions/KernelFunctionMetadata.cs | 2 +- .../Functions/KernelPlugin.cs | 2 +- .../Functions/KernelFunctionFromMethod.cs | 8 +- .../Functions/KernelPluginFactory.cs | 2 +- .../SemanticKernel.Core.csproj | 1 - .../Utilities/ExceptionConverterTests.cs | 2 + .../Utilities/FakeLogger.cs | 27 + .../Utilities/LoggingExtensionsTests.cs | 239 ++++++ .../CRUD/PineconeAllSupportedTypesTests.cs | 74 ++ .../CRUD/PineconeBatchConformanceTests.cs | 12 + ...ineconeGenericDataModelConformanceTests.cs | 12 + .../CRUD/PineconeRecordConformanceTests.cs | 12 + .../PineconeCollectionConformanceTests.cs | 12 + .../Filter/PineconeBasicFilterTests.cs | 70 ++ .../PineconeIntegrationTests.csproj | 41 ++ .../Properties/AssemblyAttributes.cs | 5 + .../Support/PineconeAllTypes.cs | 102 +++ .../Support/PineconeFixture.cs | 10 + .../PineconeGenericDataModelFixture.cs | 10 + .../Support/PineconeSimpleModelFixture.cs | 10 + .../Support/PineconeTestStore.cs | 141 ++++ ...orSearchDistanceFunctionComplianceTests.cs | 49 ++ .../CRUD/SqlServerBatchConformanceTests.cs | 62 ++ .../SqlServerCommandBuilderTests.cs | 15 +- .../CRUD/BatchConformanceTests.cs | 2 +- ...orSearchDistanceFunctionComplianceTests.cs | 56 +- python/samples/concepts/README.md | 3 - .../concepts/search/bing_plugin_examples.py | 121 ---- .../concepts/search/bing_search_plugin.py | 69 -- .../concepts/search/bing_text_search.py | 48 -- .../search/bing_text_search_as_plugin.py | 13 +- .../concepts/search/google_search_plugin.py | 92 --- .../search/google_text_search_as_plugin.py | 5 +- python/samples/demos/README.md | 2 + .../demos/copilot_studio_agent/.env.sample | 2 + .../demos/copilot_studio_agent/README.md | 50 ++ .../demos/copilot_studio_agent/image.png | Bin 0 -> 83821 bytes .../demos/copilot_studio_agent/src/chat.py | 44 ++ .../src/direct_line_agent.py | 236 ++++++ .../copilot_studio_agent/src/requirements.txt | 4 + .../demos/copilot_studio_skill/README.md | 99 +++ .../demos/copilot_studio_skill/azure.yaml | 11 + .../demos/copilot_studio_skill/image.png | Bin 0 -> 138093 bytes .../copilot_studio_skill/infra/aca.bicep | 115 +++ .../copilot_studio_skill/infra/acr.bicep | 29 + .../copilot_studio_skill/infra/appin.bicep | 46 ++ .../copilot_studio_skill/infra/bot.bicep | 37 + .../infra/fetch-container-image.bicep | 8 + .../copilot_studio_skill/infra/main.bicep | 141 ++++ .../infra/main.parameters.json | 42 ++ .../copilot_studio_skill/infra/openAI.bicep | 20 + .../copilot_studio_skill/infra/uami.bicep | 13 + .../src/api/.dockerignore | 47 ++ .../copilot_studio_skill/src/api/adapter.py | 74 ++ .../demos/copilot_studio_skill/src/api/app.py | 55 ++ .../copilot_studio_skill/src/api/auth.py | 41 ++ .../demos/copilot_studio_skill/src/api/bot.py | 79 ++ .../copilot_studio_skill/src/api/config.py | 45 ++ .../src/api/copilot-studio.manifest.json | 25 + .../copilot_studio_skill/src/api/dockerfile | 23 + .../src/api/requirements.txt | 4 + .../src/api/sk_conversation_agent.py | 10 + .../step7_azure_ai_agent_retrieval.py | 8 +- .../ai/open_ai/services/azure_realtime.py | 1 + .../functions/kernel_function.py | 6 +- .../functions/kernel_function_from_method.py | 6 +- .../functions/kernel_parameter_metadata.py | 2 +- python/tests/samples/test_concepts.py | 7 - .../test_agent_content_generation.py | 2 +- .../ai/ollama/services/test_utils.py | 240 ++++++ .../test_kernel_function_from_method.py | 22 + .../test_kernel_function_from_prompt.py | 36 + .../test_dapr_actor_registration.py | 134 ++++ 204 files changed, 8178 insertions(+), 2140 deletions(-) create mode 100644 docs/decisions/00NN-hybrid-search.md create mode 100644 dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWebSearch.cs create mode 100644 dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionCreateMapping.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/.editorconfig create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs create mode 100644 dotnet/src/Connectors/VectorData/PACKAGE.md create mode 100644 dotnet/src/Connectors/VectorData/VectorData.csproj create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData/neticon.png delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/CommonPineconeVectorStoreRecordCollectionTests.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeAllTypes.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeHotel.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeUserSecretsExtensions.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreFixture.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/Xunit/PineconeApiKeySetConditionAttribute.cs create mode 100644 dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs create mode 100644 dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs create mode 100644 dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Collections/PineconeCollectionConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Properties/AssemblyAttributes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeSimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/VectorSearch/PineconeVectorSearchDistanceFunctionComplianceTests.cs delete mode 100644 python/samples/concepts/search/bing_plugin_examples.py delete mode 100644 python/samples/concepts/search/bing_search_plugin.py delete mode 100644 python/samples/concepts/search/bing_text_search.py delete mode 100644 python/samples/concepts/search/google_search_plugin.py create mode 100644 python/samples/demos/copilot_studio_agent/.env.sample create mode 100644 python/samples/demos/copilot_studio_agent/README.md create mode 100644 python/samples/demos/copilot_studio_agent/image.png create mode 100644 python/samples/demos/copilot_studio_agent/src/chat.py create mode 100644 python/samples/demos/copilot_studio_agent/src/direct_line_agent.py create mode 100644 python/samples/demos/copilot_studio_agent/src/requirements.txt create mode 100644 python/samples/demos/copilot_studio_skill/README.md create mode 100644 python/samples/demos/copilot_studio_skill/azure.yaml create mode 100644 python/samples/demos/copilot_studio_skill/image.png create mode 100644 python/samples/demos/copilot_studio_skill/infra/aca.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/acr.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/appin.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/bot.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/fetch-container-image.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/main.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/main.parameters.json create mode 100644 python/samples/demos/copilot_studio_skill/infra/openAI.bicep create mode 100644 python/samples/demos/copilot_studio_skill/infra/uami.bicep create mode 100644 python/samples/demos/copilot_studio_skill/src/api/.dockerignore create mode 100644 python/samples/demos/copilot_studio_skill/src/api/adapter.py create mode 100644 python/samples/demos/copilot_studio_skill/src/api/app.py create mode 100644 python/samples/demos/copilot_studio_skill/src/api/auth.py create mode 100644 python/samples/demos/copilot_studio_skill/src/api/bot.py create mode 100644 python/samples/demos/copilot_studio_skill/src/api/config.py create mode 100644 python/samples/demos/copilot_studio_skill/src/api/copilot-studio.manifest.json create mode 100644 python/samples/demos/copilot_studio_skill/src/api/dockerfile create mode 100644 python/samples/demos/copilot_studio_skill/src/api/requirements.txt create mode 100644 python/samples/demos/copilot_studio_skill/src/api/sk_conversation_agent.py create mode 100644 python/tests/unit/connectors/ai/ollama/services/test_utils.py create mode 100644 python/tests/unit/processes/dapr_runtime/test_dapr_actor_registration.py diff --git a/.github/.linkspector.yml b/.github/.linkspector.yml index 89cf32cfb050..6bde302ae51a 100644 --- a/.github/.linkspector.yml +++ b/.github/.linkspector.yml @@ -7,10 +7,14 @@ ignorePatterns: - pattern: "./issues" - pattern: "./discussions" - pattern: "./pulls" - - pattern: "^http://localhost" - - pattern: "^https://localhost" - - pattern: "^https://platform.openai.com" - - pattern: "^https://outlook.office.com/bookings" + - pattern: "https:\/\/platform.openai.com" + - pattern: "https:\/\/outlook.office.com/bookings" +excludedDirs: + # Folders which include links to localhost, since it's not ignored with regular expressions + - ./python/samples/demos/telemetry + - ./python/samples/demos/process_with_dapr + - ./dotnet/samples/Demos/ProcessWithDapr + - ./dotnet/samples/Demos/CopilotAgentPlugins baseUrl: https://github.com/microsoft/semantic-kernel/ aliveStatusCodes: - 200 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index aa95f8a37010..fa41222416ab 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -32,6 +32,8 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + persist-credentials: false # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index 505538145810..55f9f2dd37d3 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -30,6 +30,8 @@ jobs: dotnetChanges: ${{ steps.filter.outputs.dotnet }} steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: dorny/paths-filter@v2 id: filter with: @@ -68,6 +70,8 @@ jobs: environment: ${{ matrix.environment }} steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - name: Setup dotnet ${{ matrix.dotnet }} uses: actions/setup-dotnet@v3 with: diff --git a/.github/workflows/dotnet-ci.yml b/.github/workflows/dotnet-ci.yml index 70fdb2807bbb..4b385b1f438f 100644 --- a/.github/workflows/dotnet-ci.yml +++ b/.github/workflows/dotnet-ci.yml @@ -27,6 +27,7 @@ jobs: - uses: actions/checkout@v4 with: clean: true + persist-credentials: false - name: Find solutions shell: bash @@ -74,6 +75,7 @@ jobs: - uses: actions/checkout@v4 with: clean: true + persist-credentials: false - name: Setup .NET SDK ${{ matrix.dotnet-version }} uses: actions/setup-dotnet@v4 diff --git a/.github/workflows/dotnet-format.yml b/.github/workflows/dotnet-format.yml index 49cdd7a2827b..401d4ebd336d 100644 --- a/.github/workflows/dotnet-format.yml +++ b/.github/workflows/dotnet-format.yml @@ -37,6 +37,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false - name: Get changed files id: changed-files diff --git a/.github/workflows/dotnet-integration-tests.yml b/.github/workflows/dotnet-integration-tests.yml index ff1c6e23b5a9..5cd6a02cc94a 100644 --- a/.github/workflows/dotnet-integration-tests.yml +++ b/.github/workflows/dotnet-integration-tests.yml @@ -26,6 +26,7 @@ jobs: if: ${{ github.event_name != 'pull_request' }} with: clean: true + persist-credentials: false - name: Setup .NET uses: actions/setup-dotnet@v4 diff --git a/.github/workflows/markdown-link-check.yml b/.github/workflows/markdown-link-check.yml index ca3dde9accfa..11c8f8db4172 100644 --- a/.github/workflows/markdown-link-check.yml +++ b/.github/workflows/markdown-link-check.yml @@ -14,6 +14,8 @@ jobs: # check out the latest version of the code steps: - uses: actions/checkout@v4 + with: + persist-credentials: false # Checks the status of hyperlinks in all files - name: Run linkspector diff --git a/.github/workflows/typos.yaml b/.github/workflows/typos.yaml index 99b81e261480..dd3362cc4081 100644 --- a/.github/workflows/typos.yaml +++ b/.github/workflows/typos.yaml @@ -21,6 +21,8 @@ jobs: steps: - name: Check out code uses: actions/checkout@v4 + with: + persist-credentials: false - name: Use custom config file uses: crate-ci/typos@master diff --git a/README.md b/README.md index 85f771ffb218..ee2694e58b53 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ - Python
[![Python package](https://img.shields.io/pypi/v/semantic-kernel)](https://pypi.org/project/semantic-kernel/) - .NET
- [![Nuget package](https://img.shields.io/nuget/vpre/Microsoft.SemanticKernel)](https://www.nuget.org/packages/Microsoft.SemanticKernel/)[![dotnet Docker](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml)[![dotnet Windows](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml) + [![Nuget package](https://img.shields.io/nuget/vpre/Microsoft.SemanticKernel)](https://www.nuget.org/packages/Microsoft.SemanticKernel/) ## Overview diff --git a/docs/PLUGINS.md b/docs/PLUGINS.md index 2407b01ce3e4..7926c645c1bd 100644 --- a/docs/PLUGINS.md +++ b/docs/PLUGINS.md @@ -1,5 +1,5 @@ # What are plugins? -This document has been moved to the Semantic Kernel Documentation site. You can find it by navigating to the [Using AI plugins in Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/plugins) page. +This document has been moved to the Semantic Kernel Documentation site. You can find it by navigating to the [What is a Plugin?](https://learn.microsoft.com/en-us/semantic-kernel/concepts/plugins) page. -To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/ai-orchestration/plugins.md) +To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/concepts/plugins/index.md) diff --git a/docs/PROMPT_TEMPLATE_LANGUAGE.md b/docs/PROMPT_TEMPLATE_LANGUAGE.md index 4a5c00784cdc..42201f7e8523 100644 --- a/docs/PROMPT_TEMPLATE_LANGUAGE.md +++ b/docs/PROMPT_TEMPLATE_LANGUAGE.md @@ -1,5 +1,5 @@ # SK Prompt Template Syntax -This document has been moved to the Semantic Kernel Documentation site. You can find it by navigating to the [Prompt template syntax](https://learn.microsoft.com/en-us/semantic-kernel/prompt-engineering/prompt-template-syntax) page. +This document has been moved to the Semantic Kernel Documentation site. You can find it by navigating to the [What are prompts?](https://learn.microsoft.com/en-us/semantic-kernel/concepts/prompts) page. -To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/prompt-engineering/prompt-template-syntax.md) +To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/concepts/prompts/index.md) diff --git a/docs/decisions/0008-support-generic-llm-request-settings.md b/docs/decisions/0008-support-generic-llm-request-settings.md index 6fae2fdf3ef5..f786884f5d9d 100644 --- a/docs/decisions/0008-support-generic-llm-request-settings.md +++ b/docs/decisions/0008-support-generic-llm-request-settings.md @@ -12,7 +12,7 @@ informed: ## Context and Problem Statement -The Semantic Kernel abstractions package includes a number of classes ([CompleteRequestSettings](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/CompleteRequestSettings.cs), [ChatRequestSettings](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatRequestSettings.cs) [PromptTemplateConfig.CompletionConfig](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/SemanticFunctions/PromptTemplateConfig.cs#L18C1-L82C6)) which are used to support: +The Semantic Kernel abstractions package includes a number of classes (`CompleteRequestSettings`, `ChatRequestSettings`, `PromptTemplateConfig.CompletionConfig`) which are used to support: 1. Passing LLM request settings when invoking an AI service 2. Deserialization of LLM requesting settings when loading the `config.json` associated with a Semantic Function diff --git a/docs/decisions/0065-realtime-api-clients.md b/docs/decisions/0065-realtime-api-clients.md index a27987aeaf00..3a746035641d 100644 --- a/docs/decisions/0065-realtime-api-clients.md +++ b/docs/decisions/0065-realtime-api-clients.md @@ -1767,4 +1767,4 @@ Example of events coming from a few seconds of conversation with the OpenAI Real [openai-realtime-api]: https://platform.openai.com/docs/guides/realtime -[google-gemini]: https://ai.google.dev/api/multimodal-live \ No newline at end of file +[google-gemini]: https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/multimodal-live \ No newline at end of file diff --git a/docs/decisions/00NN-hybrid-search.md b/docs/decisions/00NN-hybrid-search.md new file mode 100644 index 000000000000..486530972c1a --- /dev/null +++ b/docs/decisions/00NN-hybrid-search.md @@ -0,0 +1,395 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: {proposed | rejected | accepted | deprecated | � | superseded by [ADR-0001](0001-madr-architecture-decisions.md)} +contact: westey-m +date: 2024-11-27 +deciders: {list everyone involved in the decision} +consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} +informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} +--- + +# Support Hybrid Search in VectorStore abstractions + +## Context and Problem Statement + +In addition to simple vector search, many databases also support Hybrid search. +Hybrid search typically results in higher quality search results, and therefore the ability to do Hybrid search via VectorStore abstractions +is an important feature to add. + +The way in which Hybrid search is supported varies by database. The two most common ways of supporting hybrid search is: + +1. Using dense vector search and keyword/fulltext search in parallel, and then combining the results. +1. Using dense vector search and sparse vector search in parallel, and then combining the results. + +Sparse vectors are different from dense vectors in that they typically have many more dimensions, but with many of the dimensions being zero. +Sparse vectors, when used with text search, have a dimension for each word/token in a vocabulary, with the value indicating the importance of the word +in the source text. +The more common the word in a specific chunk of text, and the less common the word is in the corpus, the higher the value in the sparse vector. + +There are various mechanisms for generating sparse vectors, such as + +- [TF-IDF](https://en.wikipedia.org/wiki/Tf%E2%80%93idf) +- [SPLADE](https://www.pinecone.io/learn/splade/) +- [BGE-m3 sparse embedding model](https://huggingface.co/BAAI/bge-m3). +- [pinecone-sparse-english-v0](https://docs.pinecone.io/models/pinecone-sparse-english-v0) + +While these are supported well in Python, they are not well supported in .net today. +Adding support for generating sparse vectors is out of scope of this ADR. + +More background information: + +- [Background article from Qdrant about using sparse vectors for Hybrid Search](https://qdrant.tech/articles/sparse-vectors) +- [TF-IDF explainer for beginners](https://medium.com/@coldstart_coder/understanding-and-implementing-tf-idf-in-python-a325d1301484) + +ML.Net contains an implementation of TF-IDF that could be used to generate sparse vectors in .net. See [here](https://github.com/dotnet/machinelearning/blob/886e2ff125c0060f5a251056c7eb2a7d28738984/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceWordBags.cs#L55-L105) for an example. + +### Hybrid search support in different databases + +|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-|-|-|-|-| +|Hybrid search supported|Y|Y|N (No parallel execution with fusion)|N|Y|Y|Y|Y|Y|Y|Y| +|Hybrid search definition|Vector + FullText|[Vector + Keyword (BM25F)](https://weaviate.io/developers/weaviate/search/hybrid)|||[Vector + Sparse Vector for keywords](https://docs.pinecone.io/guides/get-started/key-features#hybrid-search)|[Vector + Keyword](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|[Vector + SparseVector / Keyword](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + FullText|[Vector + Fulltext (BM25)](https://learn.microsoft.com/en-us/azure/cosmos-db/gen-ai/hybrid-search)|[Vector + FullText](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| +|Fusion method configurable|N|Y|||?|Y|Y|Y|Y, but only one option|Y, but only one option|N| +|Fusion methods|[RRF](https://learn.microsoft.com/en-us/azure/search/hybrid-search-ranking)|Ranked/RelativeScore|||?|[Build your own](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|RRF / DBSF|[RRF / Weighted](https://milvus.io/docs/multi-vector-search.md)|[RRF](https://www.elastic.co/search-labs/tutorials/search-tutorial/vector-search/hybrid-search)|[RRF](https://learn.microsoft.com/en-us/azure/cosmos-db/nosql/query/rrf)|[RRF](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| +|Hybrid Search Input Params|Vector + string|[Vector + string](https://weaviate.io/developers/weaviate/api/graphql/search-operators#hybrid)|||Vector + SparseVector|Vector + String|[Vector + SparseVector](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + string|Vector + string array|Vector + string| +|Sparse Distance Function|n/a|n/a|||[dotproduct only for both dense and sparse, 1 setting for both](https://docs.pinecone.io/guides/data/understanding-hybrid-search#sparse-dense-workflow)|n/a|dotproduct|Inner Product|n/a|n/a|n/a| +|Sparse Indexing options|n/a|n/a|||no separate config to dense|n/a|ondisk / inmemory + IDF|[SPARSE_INVERTED_INDEX / SPARSE_WAND](https://milvus.io/docs/index.md?tab=sparse)|n/a|n/a|n/a| +|Sparse data model|n/a|n/a|||[indices & values arrays](https://docs.pinecone.io/guides/data/upsert-sparse-dense-vectors)|n/a|indices & values arrays|[sparse matrix / List of dict / list of tuples](https://milvus.io/docs/sparse_vector.md#Use-sparse-vectors-in-Milvus)|n/a|n/a|n/a| +|Keyword matching behavior|[Space Separated with SearchMode=any does OR, searchmode=all does AND](https://learn.microsoft.com/en-us/azure/search/search-lucene-query-architecture)|[Tokenization with split by space, affects ranking](https://weaviate.io/developers/weaviate/search/bm25)|||n/a|[Tokenization](https://www.postgresql.org/docs/current/textsearch-controls.html)|[

No FTS Index: Exact Substring match

FTS Index present: All words must be present

](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|[And/Or capabilities](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-bool-prefix-query.html)|-|[Allows multiple multi-word phrases with OR](https://www.mongodb.com/docs/atlas/atlas-search/phrase/) and [a single multi-word prhase where the words can be OR'd or AND'd](https://www.mongodb.com/docs/atlas/atlas-search/text/)| + +Glossary: + +- RRF = Reciprical Rank Fusion +- DBSF = Distribution-Based Score Fusion +- IDF = Inverse Document Frequency + +### Language required for Cosmos DB NoSQL full text search configuration + +Cosmos DB NoSQL requires a language to be specified for full text search and it requires full text search indexing for hybrid search to be enabled. +We therefore need to support a way of specifying the language when creating the index. + +Cosmos DB NoSQL is the only database from our sample that has a required setting of this type. + +|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-|-|-|-|-| +|Requires FullTextSearch indexing for hybrid search|Y|Y|n/a|n/a|n/a|Y|N [optional](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|Y|Y|[Y](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search/?msockid=04b550d92f2f619c271a45a42e066050#create-the-atlas-vector-search-and-fts-indexes)| +|Required FullTextSearch index options|None required, [many optional](https://learn.microsoft.com/en-us/rest/api/searchservice/indexes/create?view=rest-searchservice-2024-07-01&tabs=HTTP)|None required, [none optional](https://weaviate.io/developers/weaviate/concepts/indexing#collections-without-indexes)||||[language required](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|none required, [some optional](https://qdrant.tech/documentation/concepts/indexing/#full-text-index)||None required, [many optional](https://elastic.github.io/elasticsearch-net/8.16.3/api/Elastic.Clients.Elasticsearch.Mapping.TextProperty.html)|Language Required|None required, [many optional](https://www.mongodb.com/docs/atlas/atlas-search/field-types/string-type/#configure-fts-field-type-field-properties)| + +### Keyword Search interface options + +Each DB has different keyword search capabilities. Some only support a very basic interface when it comes to listing keywords for hybrid search. The following table is to list the compatibility of each DB with a specific keyword public interface we may want to support. + +|Feature|Azure AI Search|Weaviate|PostgreSql|Qdrant|Elasticsearch|CosmosDB NoSql|MongoDB| +|-|-|-|-|-|-|-|-| +|

string[] keyword

One word per element

Any matching word boosts ranking.

|Y|Y (have to join with spaces)|[Y (have to join with spaces)](https://www.postgresql.org/docs/current/textsearch-controls.html)|Y (via filter with multiple OR'd matches)|Y|Y|[Y (have to join with spaces)](https://www.mongodb.com/docs/drivers/node/current/fundamentals/crud/read-operations/text/)| +|

string[] keyword

One or more words per element

All words in a single element have to be present to boost the ranking.

|Y|N|Y|Y (via filter with multiple OR'd matches and FTS Index)|-|N|N| +|

string[] keyword

One or more words per element

Multiple words in a single element is a phrase that must match exactly to boost the ranking.

|Y|N|Y|Only via filter with multiple OR'd matches and NO Index|-|N|Y| +|

string keyword

Space separated words

Any matching word boosts ranking.

|Y|Y|Y|N (would need to split words)|-|N (would need to split words)|Y| + +### Naming Options + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|KeywordVectorizedHybridSearch|KeywordVectorizedHybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizedHybridSearch|SparseVectorizedHybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| +|KeywordVectorizableTextHybridSearch|KeywordVectorizableTextHybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizableTextHybridSearch|SparseVectorizableTextHybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|KeywordVectorizedHybridSearch|HybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizedHybridSearch|HybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| +|KeywordVectorizableTextHybridSearch|HybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|SparseVectorizableTextHybridSearch|HybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| + +|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| +|-|-|-|-|-|-| +|HybridSearchWithKeywords|HybridSearch|string[] + Dense Vector|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|HybridSearchWithSparseVector|HybridSearchWithSparseVector|Sparse Vector + Dense Vector|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| +|HybridSearchWithKeywordsAndVectorizableText|HybridSearch|string[] + string|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| +|HybridSearchWithVectorizableKeywordsAndText|HybridSearchWithSparseVector|string[] + string|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| + +|Area|Type of search|Method Name| +|-|-|-| +|**Non-vector Search**||| +|Non-vector Search||Search| +|**Vector Search**||| +|Vector Search|With Vector|VectorSearch| +|Vector Search|With Vectorizable Text (string)|VectorSearchWithText| +|Vector Search|With Vectorizable Image (string/byte[]/other)|VectorSearchWithImage| +|**Hybrid Search**||| +|Hybrid Search|With DenseVector and string[] keywords|HybridSearch| +|Hybrid Search|With vectorizable string and string[] keywords|HybridSearch| +|Hybrid Search|With DenseVector and SparseVector|HybridSearchWithSparseVector| +|Hybrid Search|With vectorizable string and sparse vectorisable string[] keywords|HybridSearchWithSparseVector| + +### Keyword based hybrid search + +```csharp +interface IKeywordVectorizedHybridSearch +{ + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +} + +class KeywordVectorizedHybridSearchOptions +{ + // The name of the property to target the vector search against. + public string? VectorPropertyName { get; init; } + + // The name of the property to target the text search against. + public string? FullTextPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Sparse Vector based hybrid search + +```csharp +interface ISparseVectorizedHybridSearch +{ + Task> SparseVectorizedHybridSearch( + TVector vector, + TSparseVector sparsevector, + SparseVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +} + +class SparseVectorizedHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the sparse vector search against. + public string? SparseVectorPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Keyword Vectorizable text based hybrid search + +```csharp +interface IKeywordVectorizableHybridSearch +{ + Task> KeywordVectorizableHybridSearch( + string searchText, + ICollection keywords, + KeywordVectorizableHybridSearchOptions options = default, + CancellationToken cancellationToken = default); +} + +class KeywordVectorizableHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the text search against. + public string? FullTextPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +### Sparse Vector based Vectorizable text hybrid search + +```csharp +interface ISparseVectorizableTextHybridSearch +{ + Task> SparseVectorizableTextHybridSearch( + string searchText, + ICollection keywords, + SparseVectorizableTextHybridSearchOptions options = default, + CancellationToken cancellationToken = default); +} + +class SparseVectorizableTextHybridSearchOptions +{ + // The name of the property to target the dense vector search against. + public string? VectorPropertyName { get; init; } + // The name of the property to target the sparse vector search against. + public string? SparseVectorPropertyName { get; init; } + + public VectorSearchFilter? Filter { get; init; } + public int Top { get; init; } = 3; + public int Skip { get; init; } = 0; + public bool IncludeVectors { get; init; } = false; + public bool IncludeTotalCount { get; init; } = false; +} +``` + +## Decision Drivers + +- Support for generating sparse vectors is required to make sparse vector based hybrid search viable. +- Multiple vectors per record scenarios need to be supported. +- No database in our evaluation set have been identified as supporting converting text to sparse vectors in the database on upsert and storing those sparse vectors in a retrievable field. Of course some of these DBs may use sparse vectors internally to implement keyword search, without exposing them to the caller. + +## Scoping Considered Options + +### 1. Keyword Hybrid Search Only + +Only implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch for now, until +we can add support for generating sparse vectors. + +### 2. Keyword and SparseVectorized Hybrid Search + +Implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch but only +KeywordVectorizableTextHybridSearch, since no database in our evaluation set supports generating sparse vectors in the database. +This will require us to produce code that can generate sparse vectors from text. + +### 3. All abovementioned Hybrid Search + +Create all four interfaces and implement an implementation of SparseVectorizableTextHybridSearch that +generates the sparse vector in the client code. +This will require us to produce code that can generate sparse vectors from text. + +### 4. Generalized Hybrid Search + +Some databases support a more generalized version of hybrid search, where you can take two (or sometimes more) searches of any type and combine the results of these using your chosen fusion method. +You can implement Vector + Keyword search using this more generalized search. +For databases that support only Vector + Keyword hybrid search though, it is not possible to implement the generalized hybrid search on top of those databases. + +## PropertyName Naming Considered Options + +### 1. Explicit Dense naming + +DenseVectorPropertyName +SparseVectorPropertyName + +DenseVectorPropertyName +FullTextPropertyName + +- Pros: This is more explicit, considering that there are also sparse vectors involved. +- Cons: It is inconsistent with the naming in the non-hybrid vector search. + +### 2. Implicit Dense naming + +VectorPropertyName +SparseVectorPropertyName + +VectorPropertyName +FullTextPropertyName + +- Pros: This is consistent with the naming in the non-hybrid vector search. +- Cons: It is internally inconsistent, i.e. we have sparse vector, but for dense it's just vector. + +## Keyword splitting Considered Options + +### 1. Accept Split keywords in interface + +Accept an ICollection of string where each value is a separate keyword. +A version that takes a single keyword and calls the `ICollection` version can also be provided as an extension method. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier to use in the connector if the underlying DB requires split keywords +- Pros: Only solution broadly supported, see comparison table above. + +### 2. Accept single string in interface + +Accept a single string containing all the keywords. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + string keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier for a user to use, since they don't need to do any keyword splitting. +- Cons: We don't have the capabilities to properly sanitise the string, e.g. splitting words appropriately for the language, and potentially removing filler words. + +### 3. Accept either in interface + +Accept either option and either combine or split the keywords in the connector as needed by the underlying db. + +```csharp + Task> KeywordVectorizedHybridSearch( + TVector vector, + ICollection keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); + Task> KeywordVectorizedHybridSearch( + TVector vector, + string keywords, + KeywordVectorizedHybridSearchOptions options, + CancellationToken cancellationToken); +``` + +- Pros: Easier for a user to use, since they can pick whichever suits them better +- Cons: We have to still convert to/from the internal presentation by either combining keywords or splitting them. +- Cons: We don't have the capabilities to properly sanitise the single string, e.g. splitting words appropriately for the language, and potentially removing filler words. + +### 4. Accept either in interface but throw for not supported + +Accept either option but throw for the one not supported by the underlying DB. + +- Pros: Easier for us to implement. +- Cons: Harder for users to use. + +### 5. Separate interfaces for each + +Create a separate interface for the Enumerable and single string options, and only implement the one that is supported by the underlying system for each db. + +- Pros: Easier for us to implement. +- Cons: Harder for users to use. + +## Full text search index mandatory configuration Considered Options + +Cosmos DB NoSQL requires a language to be specified when creating a full text search index. +Other DBs have optional values that can be set. + +### 1. Pass option in via collection options + +This option does the minimum by just adding a language option to the collection's options class. +This language would then be used for all full text search indexes created by the collection. + +- Pros: Simplest to implement +- Cons: Doesn't allow multiple languages to be used for different fields in one record +- Cons: Doesn't add support for all full text search options for all dbs + +### 2. Add extensions for RecordDefinition and data model Attributes + +Add a property bag to the VectorStoreRecordProperty allowing database specific metadata to be provided. +Add an abstract base attribute that can be inherited from that allows extra metadata to be added to the data model, +where each database has their own attributes to specify their settings, with a method to convert the contents to +the property bag required by VectorStoreRecordProperty. + +- Pros: Allows multiple languages to be used for different fields in one record +- Pros: Allows other DBs to add their own settings via their own attributes +- Cons: More work to implement + +## Decision Outcome + +### Scoping + +Chosen option "1. Keyword Hybrid Search Only", since enterprise support for generating sparse vectors is poor and without an end to end story, the value is low. + +### PropertyName Naming + +Chosen option "2. Implicit Dense naming", since it is consistent with the existing vector search options naming. + +### Keyword splitting + +Chosen option "1. Accept Split keywords in interface", since it is the only one with broad support amongst databases. diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index b3941784aa08..290c5501bc07 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -15,7 +15,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -65,7 +65,7 @@ - + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index f9e83098f6ba..8ff421ca629d 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -112,12 +112,17 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Diagnostics", "Diagnostics" src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs = src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs = src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs src\InternalUtilities\src\Diagnostics\IsExternalInit.cs = src\InternalUtilities\src\Diagnostics\IsExternalInit.cs + src\InternalUtilities\src\Diagnostics\KernelVerify.cs = src\InternalUtilities\src\Diagnostics\KernelVerify.cs src\InternalUtilities\src\Diagnostics\NullableAttributes.cs = src\InternalUtilities\src\Diagnostics\NullableAttributes.cs src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs = src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs + src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs = src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs + src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs = src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs + src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs + src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{B00AD427-0047-4850-BEF9-BA8237EA9D8B}" @@ -485,8 +490,14 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.Bedrock", "src\Agent EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocol", "samples\Demos\ModelContextProtocol\ModelContextProtocol.csproj", "{B16AC373-3DA8-4505-9510-110347CD635D}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData.UnitTests", "src\Connectors\VectorData.UnitTests\VectorData.UnitTests.csproj", "{89FC596F-CB81-4733-829B-4527D0FFC291}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqlServerIntegrationTests", "src\VectorDataIntegrationTests\SqlServerIntegrationTests\SqlServerIntegrationTests.csproj", "{A5E6193C-8431-4C6E-B674-682CB41EAA0C}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData", "src\Connectors\VectorData\VectorData.csproj", "{8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PineconeIntegrationTests", "src\VectorDataIntegrationTests\PineconeIntegrationTests\PineconeIntegrationTests.csproj", "{E9A74E0C-BC02-4DDD-A487-89847EDF8026}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1328,12 +1339,30 @@ Global {B16AC373-3DA8-4505-9510-110347CD635D}.Publish|Any CPU.Build.0 = Debug|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.ActiveCfg = Release|Any CPU {B16AC373-3DA8-4505-9510-110347CD635D}.Release|Any CPU.Build.0 = Release|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.Build.0 = Debug|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.Build.0 = Publish|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.ActiveCfg = Release|Any CPU + {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.Build.0 = Release|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Debug|Any CPU.Build.0 = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.ActiveCfg = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.Build.0 = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.ActiveCfg = Release|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.Build.0 = Release|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.Build.0 = Publish|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.Build.0 = Release|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Publish|Any CPU.ActiveCfg = Release|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Publish|Any CPU.Build.0 = Release|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1515,7 +1544,11 @@ Global {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} {B16AC373-3DA8-4505-9510-110347CD635D} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {89FC596F-CB81-4733-829B-4527D0FFC291} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} + {A5E6193C-8431-4C6E-B674-682CB41EAA0C} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3} = {24503383-A8C4-4255-9998-28D70FE8E99A} {A5E6193C-8431-4C6E-B674-682CB41EAA0C} = {4F381919-F1BE-47D8-8558-3187ED04A84F} + {E9A74E0C-BC02-4DDD-A487-89847EDF8026} = {4F381919-F1BE-47D8-8558-3187ED04A84F} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs index 5ee813e34e2b..56eea431e4f9 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs @@ -214,7 +214,7 @@ public async Task ExampleWithFunctionCallingAsync() // Output // Ask: Can I have their emails? - // Response: Emily's email is emily@test.com and David's email is david@test.com. + // Response: Emily's email is emily@contoso.com and David's email is david@contoso.com. Console.WriteLine($"Ask: {ask}"); Console.WriteLine($"Response: {modelResult?.Message}"); } @@ -236,7 +236,7 @@ private static AzureSearchChatDataSource GetAzureSearchDataSource() /// /// Returns a collection of . /// - private static IReadOnlyList GetCitations(ChatMessageContent chatMessageContent) + private static IList GetCitations(ChatMessageContent chatMessageContent) { var message = chatMessageContent.InnerContent as OpenAI.Chat.ChatCompletion; var messageContext = message.GetMessageContext(); @@ -247,7 +247,7 @@ private static IReadOnlyList GetCitations(ChatMessageContent chatM /// /// Returns a collection of . /// - private static IReadOnlyList? GetCitations(StreamingChatMessageContent streamingContent) + private static IList? GetCitations(StreamingChatMessageContent streamingContent) { var message = streamingContent.InnerContent as OpenAI.Chat.StreamingChatCompletionUpdate; var messageContext = message?.GetMessageContext(); @@ -258,7 +258,7 @@ private static IReadOnlyList GetCitations(ChatMessageContent chatM /// /// Outputs a collection of . /// - private void OutputCitations(IReadOnlyList? citations) + private void OutputCitations(IList? citations) { if (citations is not null) { @@ -389,8 +389,8 @@ private sealed class DataPlugin { private readonly Dictionary _emails = new() { - ["Emily"] = "emily@test.com", - ["David"] = "david@test.com", + ["Emily"] = "emily@contoso.com", + ["David"] = "david@contoso.com", }; [KernelFunction] diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryInFunctions.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryInFunctions.cs index ffa8ae41db16..19177ef80c9d 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryInFunctions.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryInFunctions.cs @@ -120,13 +120,13 @@ public async Task UsingKernelArgumentsAndFilterOption2Async() }; // Send a request. - var result = await kernel.InvokePromptAsync("Send email to test@test.com", new(executionSettings)); + var result = await kernel.InvokePromptAsync("Send email to test@contoso.com", new(executionSettings)); Console.WriteLine($"Result: {result}"); // Output: // SendEmail - Chat History Message Count: 2 - // Result: Email has been sent to test@test.com. + // Result: Email has been sent to test@contoso.com. } #region private diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWebSearch.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWebSearch.cs new file mode 100644 index 000000000000..1a75898d1719 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWebSearch.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; + +namespace ChatCompletion; + +/// +/// These examples demonstrate how to do web search with OpenAI Chat Completion +/// +/// +/// Currently, web search is only supported with the following models: +/// +/// gpt-4o-search-preview +/// gpt-4o-mini-search-preview +/// +/// +public class OpenAI_ChatCompletioWebSearch(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task UsingChatCompletionWithWebSearchEnabled() + { + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + // Ensure you use a supported model + var modelId = "gpt-4o-mini-search-preview"; + var settings = new OpenAIPromptExecutionSettings + { + WebSearchOptions = new ChatWebSearchOptions() + }; + + Console.WriteLine($"======== Open AI - {nameof(UsingChatCompletionWithWebSearchEnabled)} ========"); + + OpenAIChatCompletionService chatService = new(modelId, TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var result = await chatService.GetChatMessageContentAsync("What are the top 3 trending news currently", settings); + + // To retrieve the new annotations property from the result we need to use access the OpenAI.Chat.ChatCompletion directly + var chatCompletion = result.InnerContent as OpenAI.Chat.ChatCompletion; + + for (var i = 0; i < chatCompletion!.Annotations.Count; i++) + { + var annotation = chatCompletion!.Annotations[i]; + Console.WriteLine($"--- Annotation [{i + 1}] ---"); + Console.WriteLine($"Title: {annotation.WebResourceTitle}"); + Console.WriteLine($"Uri: {annotation.WebResourceUri}"); + } + } +} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 0ac3dc6a4586..0a8add50d2d2 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -79,6 +79,7 @@ + diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs b/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs new file mode 100644 index 000000000000..2165c96aabb2 --- /dev/null +++ b/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.Identity; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.InMemory; +using Microsoft.SemanticKernel.Embeddings; + +namespace Memory; + +/// +/// A simple example showing how to ingest data into a vector store and then use vector search to find related records to a given string +/// with enabled telemetry. +/// +public class VectorStore_Telemetry(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task LoggingManualRegistrationAsync() + { + // Create an embedding generation service. + var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Manually construct an InMemory vector store with enabled logging. + var vectorStore = new InMemoryVectorStore() + .AsBuilder() + .UseLogging(this.LoggerFactory) + .Build(); + + await RunExampleAsync(textEmbeddingGenerationService, vectorStore); + + // Output: + // CreateCollectionIfNotExistsAsync invoked. + // CreateCollectionIfNotExistsAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // VectorizedSearchAsync invoked. + // VectorizedSearchAsync completed. + + // Search string: What is an Application Programming Interface + // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. + } + + [Fact] + public async Task LoggingDependencyInjectionAsync() + { + var serviceCollection = new ServiceCollection(); + + // Add an embedding generation service. + serviceCollection.AddAzureOpenAITextEmbeddingGeneration( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Add InMemory vector store + serviceCollection.AddInMemoryVectorStore(); + + // Register InMemoryVectorStore with enabled logging. + serviceCollection + .AddVectorStore(s => s.GetRequiredService()) + .UseLogging(this.LoggerFactory); + + var services = serviceCollection.BuildServiceProvider(); + + var vectorStore = services.GetRequiredService(); + var textEmbeddingGenerationService = services.GetRequiredService(); + + await RunExampleAsync(textEmbeddingGenerationService, vectorStore); + + // Output: + // CreateCollectionIfNotExistsAsync invoked. + // CreateCollectionIfNotExistsAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // UpsertAsync invoked. + // UpsertAsync completed. + // VectorizedSearchAsync invoked. + // VectorizedSearchAsync completed. + + // Search string: What is an Application Programming Interface + // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. + } + + private async Task RunExampleAsync( + ITextEmbeddingGenerationService textEmbeddingGenerationService, + IVectorStore vectorStore) + { + // Get and create collection if it doesn't exist. + var collection = vectorStore.GetCollection("skglossary"); + await collection.CreateCollectionIfNotExistsAsync(); + + // Create glossary entries and generate embeddings for them. + var glossaryEntries = CreateGlossaryEntries().ToList(); + var tasks = glossaryEntries.Select(entry => Task.Run(async () => + { + entry.DefinitionEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(entry.Definition); + })); + await Task.WhenAll(tasks); + + // Upsert the glossary entries into the collection and return their keys. + var upsertedKeysTasks = glossaryEntries.Select(x => collection.UpsertAsync(x)); + var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); + + // Search the collection using a vector search. + var searchString = "What is an Application Programming Interface"; + var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); + var searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + var resultRecords = await searchResult.Results.ToListAsync(); + + Console.WriteLine("Search string: " + searchString); + Console.WriteLine("Result: " + resultRecords.First().Record.Definition); + Console.WriteLine(); + } + + /// + /// Sample model class that represents a glossary entry. + /// + /// + /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. + /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. + /// + private sealed class Glossary + { + [VectorStoreRecordKey] + public ulong Key { get; set; } + + [VectorStoreRecordData(IsFilterable = true)] + public string Category { get; set; } + + [VectorStoreRecordData] + public string Term { get; set; } + + [VectorStoreRecordData] + public string Definition { get; set; } + + [VectorStoreRecordVector(1536)] + public ReadOnlyMemory DefinitionEmbedding { get; set; } + } + + /// + /// Create some sample glossary entries. + /// + /// A list of sample glossary entries. + private static IEnumerable CreateGlossaryEntries() + { + yield return new Glossary + { + Key = 1, + Category = "External Definitions", + Term = "API", + Definition = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." + }; + + yield return new Glossary + { + Key = 2, + Category = "Core Definitions", + Term = "Connectors", + Definition = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." + }; + + yield return new Glossary + { + Key = 3, + Category = "External Definitions", + Term = "RAG", + Definition = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." + }; + } +} diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 2c213d423790..e59ec178c9ce 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -7,7 +7,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem You can run those tests using the IDE or the command line. To run the tests using the command line run the following command from the root of Concepts project: ```text -dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=NameSpace.TestClass.TestMethod" +dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=NameSpace.TestClass.TestMethod" ``` Example for `ChatCompletion/OpenAI_ChatCompletion.cs` file, targeting the `ChatPromptSync` test: @@ -21,17 +21,8 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom ### Agents - Different ways of using [`Agents`](./Agents/README.md) - [ComplexChat_NestedShopper](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs) -- [Legacy_AgentAuthoring](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs) -- [Legacy_AgentCharts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs) -- [Legacy_AgentCollaboration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs) -- [Legacy_AgentDelegation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs) -- [Legacy_AgentTools](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs) -- [Legacy_Agents](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_Agents.cs) -- [Legacy_ChatCompletionAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_ChatCompletionAgent.cs) - [MixedChat_Agents](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs) - [OpenAIAssistant_ChartMaker](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs) -- [OpenAIAssistant_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs) -- [OpenAIAssistant_Retrieval](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs) ### AudioToText - Different ways of using [`AudioToText`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/AudioToText/IAudioToTextService.cs) services to extract text from audio @@ -78,13 +69,14 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs) - [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs) - [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs) -- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MuiltipleProviders_ChatHistoryReducer.cs) +- [MultipleProviders_ChatHistoryReducer](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs) - [Ollama_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs) - [Ollama_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs) - [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) - [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) - [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) - [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) +- [OpenAI_ChatCompletionWebSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWebSearch.cs) - [OpenAI_ChatCompletionWithReasoning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithReasoning.cs) - [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) - [OpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs) @@ -106,9 +98,8 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [AutoFunctionInvocationFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs) - [FunctionInvocationFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/FunctionInvocationFiltering.cs) -- [Legacy_KernelHooks](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/Legacy_KernelHooks.cs) - [MaxTokensWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/MaxTokensWithFilters.cs) -- [PIIDetectionWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PIIDetectionWithFilters.cs) +- [PIIDetection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PIIDetection.cs) - [PromptRenderFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PromptRenderFiltering.cs) - [RetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/RetryWithFilters.cs) - [TelemetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs) @@ -232,7 +223,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom ### Option 1: Use Secret Manager Concept samples will require secrets and credentials, to access OpenAI, Azure OpenAI, -Bing and other resources. +Bing and other resources. We suggest using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests. @@ -251,6 +242,7 @@ dotnet user-secrets set "OpenAI:ApiKey" "..." ``` ### Option 2: Use Configuration File + 1. Create a `appsettings.Development.json` file next to the `Concepts.csproj` file. This file will be ignored by git, the content will not end up in pull requests, so it's safe for personal settings. Keep the file safe. 2. Edit `appsettings.Development.json` and set the appropriate configuration for the samples you are running. @@ -271,12 +263,13 @@ For example: "ChatDeploymentName": "gpt-4", "Endpoint": "https://contoso.openai.azure.com/", "ApiKey": "...." - }, + } // etc. } ``` ### Option 3: Use Environment Variables + You may also set the settings in your environment variables. The environment variables will override the settings in the `appsettings.Development.json` file. When setting environment variables, use a double underscore (i.e. "\_\_") to delineate between parent and child properties. For example: diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md index 18c556db1e15..6ee34e609bec 100644 --- a/dotnet/samples/Demos/AIModelRouter/README.md +++ b/dotnet/samples/Demos/AIModelRouter/README.md @@ -7,7 +7,7 @@ This sample demonstrates how to implement an AI Model Router using Semantic Kern ## Semantic Kernel Features Used -- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM. +- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs) to generate responses from the LLM. - [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs), using to capture selected service and log in the console. ## Prerequisites diff --git a/dotnet/samples/Demos/BookingRestaurant/README.md b/dotnet/samples/Demos/BookingRestaurant/README.md index 34cefed54126..a0d60175b505 100644 --- a/dotnet/samples/Demos/BookingRestaurant/README.md +++ b/dotnet/samples/Demos/BookingRestaurant/README.md @@ -5,9 +5,9 @@ This sample provides a practical demonstration of how to leverage features from ## Semantic Kernel Features Used - [Plugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs) - Creating a Plugin from a native C# Booking class to be used by the Kernel to interact with Bookings API. -- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM. +- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs) to generate responses from the LLM. - [Chat History](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs) Using the Chat History abstraction to create, update and retrieve chat history from Chat Completion Models. -- [Auto Function Calling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/AutoFunctionCalling/OpenAI_FunctionCalling.cs) Enables the LLM to have knowledge of current importedUsing the Function Calling feature automatically call the Booking Plugin from the LLM. +- [Auto Function Calling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) Enables the LLM to have knowledge of current importedUsing the Function Calling feature automatically call the Booking Plugin from the LLM. ## Prerequisites diff --git a/dotnet/samples/Demos/OpenAIRealtime/Program.cs b/dotnet/samples/Demos/OpenAIRealtime/Program.cs index e03ff7c69660..fb17b4bbfd3e 100644 --- a/dotnet/samples/Demos/OpenAIRealtime/Program.cs +++ b/dotnet/samples/Demos/OpenAIRealtime/Program.cs @@ -348,9 +348,8 @@ private static IEnumerable ConvertFunctions(Kernel kernel) { var toolDefinition = metadata.ToOpenAIFunction().ToFunctionDefinition(false); - yield return new ConversationFunctionTool() + yield return new ConversationFunctionTool(name: toolDefinition.FunctionName) { - Name = toolDefinition.FunctionName, Description = toolDefinition.FunctionDescription, Parameters = toolDefinition.FunctionParameters }; diff --git a/dotnet/samples/Demos/TimePlugin/README.md b/dotnet/samples/Demos/TimePlugin/README.md index 335d2429aaea..3ed1cd6fb97d 100644 --- a/dotnet/samples/Demos/TimePlugin/README.md +++ b/dotnet/samples/Demos/TimePlugin/README.md @@ -8,9 +8,9 @@ Here we have a simple Time Plugin created in C# that can be called from the AI M ## Semantic Kernel Features Used - [Plugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs) - Creating a Plugin from a native C# Booking class to be used by the Kernel to interact with Bookings API. -- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM. +- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs) to generate responses from the LLM. - [Chat History](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs) Using the Chat History abstraction to create, update and retrieve chat history from Chat Completion Models. -- [Auto Function Calling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/AutoFunctionCalling/OpenAI_FunctionCalling.cs) Enables the LLM to have knowledge of current importedUsing the Function Calling feature automatically call the Booking Plugin from the LLM. +- [Auto Function Calling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) Enables the LLM to have knowledge of current importedUsing the Function Calling feature automatically call the Booking Plugin from the LLM. ## Prerequisites diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md index 6c54a26c0d90..207dfde552be 100644 --- a/dotnet/samples/GettingStartedWithAgents/README.md +++ b/dotnet/samples/GettingStartedWithAgents/README.md @@ -37,7 +37,7 @@ Example|Description [Step01_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step01_Assistant.cs)|How to create an Open AI Assistant agent. [Step02_Assistant_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step02_Assistant_Plugins.cs)|How to create an Open AI Assistant agent. [Step03_Assistant_Vision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step03_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent. -[Step04_AssistantTool_CodeInterpreter_](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent. +[Step04_AssistantTool_CodeInterpreter_](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step04_AssistantTool_CodeInterpreter.cs)|How to use the code-interpreter tool for an Open AI Assistant agent. [Step05_AssistantTool_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/OpenAIAssistant/Step05_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent. ### Azure AI Agent @@ -46,10 +46,10 @@ Example|Description ---|--- [Step01_AzureAIAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step01_AzureAIAgent.cs)|How to create an Azure AI agent. [Step02_AzureAIAgent_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Plugins.cs)|How to create an Azure AI agent. -[Step03_AzureAIAgent_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step02_AzureAIAgent_Chat.cs)|How create a conversation with Azure AI agents. -[Step04_AzureAIAgent_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_CodeInterpreter.cs)|How to use the code-interpreter tool for an Azure AI agent. -[Step05_AzureAIAgent_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_FileSearch.cs)|How to use the file-search tool for an Azure AI agent. -[Step06_AzureAIAgent_OpenAPI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_OpenAPI.cs)|How to use the Open API tool for an Azure AI agent. +[Step03_AzureAIAgent_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step03_AzureAIAgent_Chat.cs)|How create a conversation with Azure AI agents. +[Step04_AzureAIAgent_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step04_AzureAIAgent_CodeInterpreter.cs)|How to use the code-interpreter tool for an Azure AI agent. +[Step05_AzureAIAgent_FileSearch](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step05_AzureAIAgent_FileSearch.cs)|How to use the file-search tool for an Azure AI agent. +[Step06_AzureAIAgent_OpenAPI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/AzureAIAgent/Step06_AzureAIAgent_OpenAPI.cs)|How to use the Open API tool for an Azure AI agent. ### Bedrock Agent diff --git a/dotnet/src/Connectors/Connectors.Amazon.UnitTests/Services/BedrockTextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.Amazon.UnitTests/Services/BedrockTextEmbeddingGenerationServiceTests.cs index 8100633103e4..4c48ea2458e3 100644 --- a/dotnet/src/Connectors/Connectors.Amazon.UnitTests/Services/BedrockTextEmbeddingGenerationServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.Amazon.UnitTests/Services/BedrockTextEmbeddingGenerationServiceTests.cs @@ -4,6 +4,8 @@ using System.Collections.Generic; using System.Threading.Tasks; using Amazon.BedrockRuntime; +using Amazon.Runtime; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel.Embeddings; using Microsoft.SemanticKernel.Services; using Moq; @@ -72,19 +74,37 @@ public void ShouldThrowExceptionForEmptyModelId() /// Checks that an invalid BedrockRuntime object will throw an exception. ///
[Fact] - public async Task ShouldThrowExceptionForNullBedrockRuntimeAsync() + public async Task ShouldThrowExceptionForNullBedrockRuntimeWhenNotConfiguredAsync() { // Arrange string modelId = "amazon.titan-embed-text-v2:0"; List prompts = new() { "King", "Queen", "Prince" }; IAmazonBedrockRuntime? nullBedrockRuntime = null; + bool notConfigured = false; - // Act & Assert - await Assert.ThrowsAnyAsync(async () => + try { - var kernel = Kernel.CreateBuilder().AddBedrockTextEmbeddingGenerationService(modelId, nullBedrockRuntime).Build(); - var service = kernel.GetRequiredService(); - await service.GenerateEmbeddingsAsync(prompts).ConfigureAwait(true); - }).ConfigureAwait(true); + var runtime = new ServiceCollection() + .TryAddAWSService() + .BuildServiceProvider() + .GetService(); + } + catch (AmazonClientException) + { + // If cannot grab the runtime from the container then we are not configured + notConfigured = true; + } + + // Act + if (notConfigured) + { + // If No RegionEndpoint or ServiceURL is configured, the runtime will throw an exception + await Assert.ThrowsAnyAsync(async () => + { + var kernel = Kernel.CreateBuilder().AddBedrockTextEmbeddingGenerationService(modelId, nullBedrockRuntime).Build(); + var service = kernel.GetRequiredService(); + await service.GenerateEmbeddingsAsync(prompts).ConfigureAwait(true); + }).ConfigureAwait(true); + } } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index bcfa9aef4ecd..7b5faf13a141 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -1768,6 +1768,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen public static TheoryData Versions => new() { + { "V2025_03_01_preview", "2025-03-01-preview" }, + { "V2025_03_01_PREVIEW", "2025-03-01-preview" }, + { "2025_03_01_Preview", "2025-03-01-preview" }, + { "2025-03-01-preview", "2025-03-01-preview" }, { "V2025_01_01_preview", "2025-01-01-preview" }, { "V2025_01_01_PREVIEW", "2025-01-01-preview" }, { "2025_01_01_Preview", "2025-01-01-preview" }, @@ -1794,6 +1798,7 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAndEmptyArgumen { "V2024_10_21", "2024-10-21" }, { "2024_10_21", "2024-10-21" }, { "2024-10-21", "2024-10-21" }, + { AzureOpenAIClientOptions.ServiceVersion.V2025_03_01_Preview.ToString(), null }, { AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview.ToString(), null }, { AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview.ToString(), null }, { AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview.ToString(), null }, diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj index 9fcbdecf530e..d5e590afabbe 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj @@ -5,7 +5,6 @@ Microsoft.SemanticKernel.Connectors.AzureOpenAI $(AssemblyName) net8.0;netstandard2.0 - true $(NoWarn);NU5104;SKEXP0001,SKEXP0010 true diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs index a3dbbe730057..30f2a417e0df 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs @@ -141,6 +141,7 @@ internal static AzureOpenAIClientOptions GetAzureOpenAIClientOptions(HttpClient? "2024-10-01-PREVIEW" or "V2024_10_01_PREVIEW" or "2024_10_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_10_01_Preview, "2024-12-01-PREVIEW" or "V2024_12_01_PREVIEW" or "2024_12_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2024_12_01_Preview, "2025-01-01-PREVIEW" or "V2025_01_01_PREVIEW" or "2025_01_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2025_01_01_Preview, + "2025-03-01-PREVIEW" or "V2025_03_01_PREVIEW" or "2025_03_01_PREVIEW" => AzureOpenAIClientOptions.ServiceVersion.V2025_03_01_Preview, _ => throw new NotSupportedException($"The service version '{serviceVersion}' is not supported.") }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj index d835350684d9..171643fece44 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj @@ -19,7 +19,7 @@ - + diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs new file mode 100644 index 000000000000..54a7202eaa07 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs @@ -0,0 +1,267 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; +using Pinecone; + +namespace Microsoft.SemanticKernel.Connectors.Pinecone; + +// This class is a modification of MongoDBFilterTranslator that uses the same query language +// (https://docs.pinecone.io/guides/data/understanding-metadata#metadata-query-language), +// with the difference of representing everything as Metadata rather than BsonDocument. +// For representing collections of any kinds, we use List, +// as we sometimes need to extend the collection (with for example another condition). +internal class PineconeFilterTranslator +{ + private IReadOnlyDictionary _storagePropertyNames = null!; + private ParameterExpression _recordParameter = null!; + + internal Metadata Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + { + this._storagePropertyNames = storagePropertyNames; + + Debug.Assert(lambdaExpression.Parameters.Count == 1); + this._recordParameter = lambdaExpression.Parameters[0]; + + return this.Translate(lambdaExpression.Body); + } + + private Metadata Translate(Expression? node) + => node switch + { + BinaryExpression + { + NodeType: ExpressionType.Equal or ExpressionType.NotEqual + or ExpressionType.GreaterThan or ExpressionType.GreaterThanOrEqual + or ExpressionType.LessThan or ExpressionType.LessThanOrEqual + } binary + => this.TranslateEqualityComparison(binary), + + BinaryExpression { NodeType: ExpressionType.AndAlso or ExpressionType.OrElse } andOr + => this.TranslateAndOr(andOr), + UnaryExpression { NodeType: ExpressionType.Not } not + => this.TranslateNot(not), + + // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) + MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) + => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + + MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), + + _ => throw new NotSupportedException("The following NodeType is unsupported: " + node?.NodeType) + }; + + private Metadata TranslateEqualityComparison(BinaryExpression binary) + { + if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) + || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + { + if (value is null) + { + throw new NotSupportedException("Pincone does not support null checks in vector search pre-filters"); + } + + // Short form of equality (instead of $eq) + if (binary.NodeType is ExpressionType.Equal) + { + return new Metadata { [storagePropertyName] = ToMetadata(value) }; + } + + var filterOperator = binary.NodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", + + _ => throw new UnreachableException() + }; + + return new Metadata { [storagePropertyName] = new Metadata { [filterOperator] = ToMetadata(value) } }; + } + + throw new NotSupportedException("Invalid equality/comparison"); + } + + private Metadata TranslateAndOr(BinaryExpression andOr) + { + var mongoOperator = andOr.NodeType switch + { + ExpressionType.AndAlso => "$and", + ExpressionType.OrElse => "$or", + _ => throw new UnreachableException() + }; + + var (left, right) = (this.Translate(andOr.Left), this.Translate(andOr.Right)); + + List? nestedLeft = GetListOrNull(left, mongoOperator); + List? nestedRight = GetListOrNull(right, mongoOperator); + + switch ((nestedLeft, nestedRight)) + { + case (not null, not null): + nestedLeft.AddRange(nestedRight); + return left; + case (not null, null): + nestedLeft.Add(right); + return left; + case (null, not null): + nestedRight.Insert(0, left); + return right; + case (null, null): + return new Metadata { [mongoOperator] = new MetadataValue(new List { left, right }) }; + } + } + + private Metadata TranslateNot(UnaryExpression not) + { + switch (not.Operand) + { + // Special handling for !(a == b) and !(a != b) + case BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary: + return this.TranslateEqualityComparison( + Expression.MakeBinary( + binary.NodeType is ExpressionType.Equal ? ExpressionType.NotEqual : ExpressionType.Equal, + binary.Left, + binary.Right)); + + // Not over bool field (Filter => r => !r.Bool) + case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + } + + var operand = this.Translate(not.Operand); + + // Identify NOT over $in, transform to $nin (https://www.mongodb.com/docs/manual/reference/operator/query/nin/#mongodb-query-op.-nin) + if (operand.Count == 1 && operand.First() is { Key: var fieldName, Value: MetadataValue nested } && nested.Value is Metadata nestedMetadata + && GetListOrNull(nestedMetadata, "$in") is List values) + { + return new Metadata { [fieldName] = new Metadata { ["$nin"] = values } }; + } + + throw new NotSupportedException("Pinecone does not support the NOT operator in vector search pre-filters"); + } + + private Metadata TranslateMethodCall(MethodCallExpression methodCall) + => methodCall switch + { + // Enumerable.Contains() + { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains + when contains.Method.DeclaringType == typeof(Enumerable) + => this.TranslateContains(source, item), + + // List.Contains() + { + Method: + { + Name: nameof(Enumerable.Contains), + DeclaringType: { IsGenericType: true } declaringType + }, + Object: Expression source, + Arguments: [var item] + } when declaringType.GetGenericTypeDefinition() == typeof(List<>) => this.TranslateContains(source, item), + + _ => throw new NotSupportedException($"Unsupported method call: {methodCall.Method.DeclaringType?.Name}.{methodCall.Method.Name}") + }; + + private Metadata TranslateContains(Expression source, Expression item) + { + switch (source) + { + // Contains over array column (r => r.Strings.Contains("foo")) + case var _ when this.TryTranslateFieldAccess(source, out _): + throw new NotSupportedException("Pinecone does not support Contains within array fields ($elemMatch) in vector search pre-filters"); + + // Contains over inline enumerable + case NewArrayExpression newArray: + var elements = new object?[newArray.Expressions.Count]; + + for (var i = 0; i < newArray.Expressions.Count; i++) + { + if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + { + throw new NotSupportedException("Invalid element in array"); + } + + elements[i] = elementValue; + } + + return ProcessInlineEnumerable(elements, item); + + // Contains over captured enumerable (we inline) + case var _ when TryGetConstant(source, out var constantEnumerable) + && constantEnumerable is IEnumerable enumerable and not string: + return ProcessInlineEnumerable(enumerable, item); + + default: + throw new NotSupportedException("Unsupported Contains expression"); + } + + Metadata ProcessInlineEnumerable(IEnumerable elements, Expression item) + { + if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + { + throw new NotSupportedException("Unsupported item type in Contains"); + } + + return new Metadata + { + [storagePropertyName] = new Metadata + { + ["$in"] = new MetadataValue(elements.Cast().Select(ToMetadata).ToList()) + } + }; + } + } + + private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + { + if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + { + if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + { + throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); + } + + return true; + } + + storagePropertyName = null; + return false; + } + + private static bool TryGetConstant(Expression expression, out object? constantValue) + { + switch (expression) + { + case ConstantExpression { Value: var v }: + constantValue = v; + return true; + + // This identifies compiler-generated closure types which contain captured variables. + case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): + constantValue = fieldInfo.GetValue(constant.Value); + return true; + + default: + constantValue = null; + return false; + } + } + + private static MetadataValue? ToMetadata(object? value) + => value is null ? null : PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(value); + + private static List? GetListOrNull(Metadata value, string mongoOperator) + => value.Count == 1 && value.First() is var element && element.Key == mongoOperator ? element.Value?.Value as List : null; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs index 496a848ed394..df783a230498 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs @@ -34,7 +34,7 @@ public PineconeGenericDataModelMapper( /// public Vector MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) { - var metadata = new MetadataMap(); + var metadata = new Metadata(); // Map data properties. foreach (var dataProperty in this._propertyReader.DataProperties) @@ -42,9 +42,9 @@ public Vector MapFromDataToStorageModel(VectorStoreGenericDataModel data if (dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var propertyValue)) { var propertyStorageName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); - metadata[propertyStorageName] = propertyValue == null ? - new MetadataValue() : - PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(propertyValue); + metadata[propertyStorageName] = propertyValue is not null + ? PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(propertyValue) + : null; } } @@ -62,8 +62,8 @@ public Vector MapFromDataToStorageModel(VectorStoreGenericDataModel data // TODO: what about sparse values? var result = new Vector { - Id = (string)dataModel.Key, - Values = values.ToArray(), + Id = dataModel.Key, + Values = values, Metadata = metadata, SparseValues = null }; @@ -80,7 +80,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(Vector stor // Set Vector. if (options?.IncludeVectors is true) { - dataModel.Vectors.Add(this._propertyReader.FirstVectorPropertyName!, new ReadOnlyMemory(storageModel.Values)); + dataModel.Vectors.Add(this._propertyReader.FirstVectorPropertyName!, storageModel.Values); } // Set Data. @@ -91,9 +91,10 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(Vector stor var propertyStorageName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); if (storageModel.Metadata.TryGetValue(propertyStorageName, out var propertyValue)) { - dataModel.Data[dataProperty.DataModelPropertyName] = PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType( - propertyValue, - dataProperty.PropertyType); + dataModel.Data[dataProperty.DataModelPropertyName] = + propertyValue is not null + ? PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType(propertyValue, dataProperty.PropertyType) + : null; } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index 4f79810e641b..a072ea6e7336 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; -using Grpc.Core; using Microsoft.Extensions.VectorData; using Pinecone; using Sdk = Pinecone; @@ -20,7 +19,6 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; public class PineconeVectorStore : IVectorStore { private const string DatabaseName = "Pinecone"; - private const string ListCollectionsName = "ListCollections"; private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreOptions _options; @@ -63,24 +61,27 @@ public virtual IVectorStoreRecordCollection GetCollection public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { - IndexDetails[] collections; + IndexList indexList; try { - collections = await this._pineconeClient.ListIndexes(cancellationToken).ConfigureAwait(false); + indexList = await this._pineconeClient.ListIndexesAsync(cancellationToken: cancellationToken).ConfigureAwait(false); } - catch (RpcException ex) + catch (PineconeApiException ex) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { VectorStoreType = DatabaseName, - OperationName = ListCollectionsName + OperationName = "ListCollections" }; } - foreach (var collection in collections) + if (indexList.Indexes is not null) { - yield return collection.Name; + foreach (var index in indexList.Indexes) + { + yield return index.Name; + } } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionCreateMapping.cs deleted file mode 100644 index 5f8d6bf6137d..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionCreateMapping.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Pinecone; - -namespace Microsoft.SemanticKernel.Connectors.Pinecone; - -/// -/// Contains mapping helpers to use when creating a Pinecone vector collection. -/// -internal static class PineconeVectorStoreCollectionCreateMapping -{ - /// - /// Maps information stored in to a structure used by Pinecone SDK to create a serverless index. - /// - /// The property to map. - /// The structure containing settings used to create a serverless index. - /// Thrown if the property is missing information or has unsupported options specified. - public static (uint Dimension, Metric Metric) MapServerlessIndex(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty!.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); - } - - return (Dimension: (uint)vectorProperty.Dimensions, Metric: GetSDKMetricAlgorithm(vectorProperty)); - } - - /// - /// Get the configured from the given . - /// If none is configured, the default is . - /// - /// The vector property definition. - /// The chosen . - /// Thrown if a distance function is chosen that isn't supported by Pinecone. - public static Metric GetSDKMetricAlgorithm(VectorStoreRecordVectorProperty vectorProperty) - => vectorProperty.DistanceFunction switch - { - DistanceFunction.CosineSimilarity => Metric.Cosine, - DistanceFunction.DotProductSimilarity => Metric.DotProduct, - DistanceFunction.EuclideanSquaredDistance => Metric.Euclidean, - null => Metric.Cosine, - _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Pinecone VectorStore.") - }; -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs index 5b3d511c6b08..8e633c76e47e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs @@ -14,15 +14,15 @@ internal static class PineconeVectorStoreCollectionSearchMapping { #pragma warning disable CS0618 // FilterClause is obsolete /// - /// Build a Pinecone from a set of filter clauses. + /// Build a Pinecone from a set of filter clauses. /// - /// The filter clauses to build the Pinecone from. + /// The filter clauses to build the Pinecone from. /// A mapping from property name to the name under which the property would be stored. - /// The Pinecone . + /// The Pinecone . /// Thrown for invalid property names, value types or filter clause types. - public static MetadataMap BuildSearchFilter(IEnumerable? filterClauses, IReadOnlyDictionary storagePropertyNamesMap) + public static Metadata BuildSearchFilter(IEnumerable? filterClauses, IReadOnlyDictionary storagePropertyNamesMap) { - var metadataMap = new MetadataMap(); + var metadataMap = new Metadata(); if (filterClauses is null) { @@ -46,15 +46,14 @@ public static MetadataMap BuildSearchFilter(IEnumerable? filterCla bool boolValue => (MetadataValue)boolValue, float floatValue => (MetadataValue)floatValue, double doubleValue => (MetadataValue)doubleValue, - decimal decimalValue => (MetadataValue)decimalValue, - _ => throw new InvalidOperationException($"Unsupported filter value type '{equalToFilterClause.Value.GetType().Name}'.") + _ => throw new NotSupportedException($"Unsupported filter value type '{equalToFilterClause.Value.GetType().Name}'.") }; metadataMap.Add(storagePropertyName, metadataValue); } else { - throw new InvalidOperationException($"Unsupported filter clause type '{filterClause.GetType().Name}'."); + throw new NotSupportedException($"Unsupported filter clause type '{filterClause.GetType().Name}'."); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 6e44feda9334..3da753575141 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -3,13 +3,12 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; -using Grpc.Core; using Microsoft.Extensions.VectorData; using Pinecone; -using Pinecone.Grpc; using Sdk = Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -23,14 +22,6 @@ public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCo #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { private const string DatabaseName = "Pinecone"; - private const string CreateCollectionName = "CreateCollection"; - private const string CollectionExistsName = "CollectionExists"; - private const string DeleteCollectionName = "DeleteCollection"; - - private const string UpsertOperationName = "Upsert"; - private const string DeleteOperationName = "Delete"; - private const string GetOperationName = "Get"; - private const string QueryOperationName = "Query"; private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -38,8 +29,7 @@ public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCo private readonly PineconeVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordPropertyReader _propertyReader; private readonly IVectorStoreRecordMapper _mapper; - - private Sdk.Index? _index; + private IndexClient? _indexClient; /// public string CollectionName { get; } @@ -55,7 +45,8 @@ public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCo public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string collectionName, PineconeVectorStoreRecordCollectionOptions? options = null) { Verify.NotNull(pineconeClient); - Verify.NotNullOrWhiteSpace(collectionName); + VerifyCollectionName(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.VectorCustomMapper is not null, PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes); VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); @@ -90,36 +81,45 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st } /// - public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - var result = await this.RunOperationAsync( - CollectionExistsName, + public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + => this.RunCollectionOperationAsync( + "CollectionExists", async () => { - var collections = await this._pineconeClient.ListIndexes(cancellationToken).ConfigureAwait(false); + var collections = await this._pineconeClient.ListIndexesAsync(cancellationToken: cancellationToken).ConfigureAwait(false); - return collections.Any(x => x.Name == this.CollectionName); - }).ConfigureAwait(false); - - return result; - } + return collections.Indexes?.Any(x => x.Name == this.CollectionName) is true; + }); /// - public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // we already run through record property validation, so a single VectorStoreRecordVectorProperty is guaranteed. var vectorProperty = this._propertyReader.VectorProperty!; - var (dimension, metric) = PineconeVectorStoreCollectionCreateMapping.MapServerlessIndex(vectorProperty); - - await this.RunOperationAsync( - CreateCollectionName, - () => this._pineconeClient.CreateServerlessIndex( - this.CollectionName, - dimension, - metric, - this._options.ServerlessIndexCloud, - this._options.ServerlessIndexRegion, - cancellationToken)).ConfigureAwait(false); + + if (!string.IsNullOrEmpty(vectorProperty.IndexKind) && vectorProperty.IndexKind != "PGA") + { + throw new InvalidOperationException( + $"IndexKind of '{vectorProperty.IndexKind}' for property '{vectorProperty.DataModelPropertyName}' is not supported. Pinecone only supports 'PGA' (Pinecone Graph Algorithm), which is always enabled."); + } + + CreateIndexRequest request = new() + { + Name = this.CollectionName, + Dimension = vectorProperty.Dimensions ?? throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."), + Metric = MapDistanceFunction(vectorProperty), + Spec = new ServerlessIndexSpec + { + Serverless = new ServerlessSpec + { + Cloud = MapCloud(this._options.ServerlessIndexCloud), + Region = this._options.ServerlessIndexRegion, + } + }, + }; + + return this.RunCollectionOperationAsync("CreateCollection", + () => this._pineconeClient.CreateIndexAsync(request, cancellationToken: cancellationToken)); } /// @@ -127,24 +127,66 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { - await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + try + { + await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + } + catch (VectorStoreOperationException ex) when (ex.InnerException is PineconeApiException apiEx && apiEx.InnerException is ConflictError) + { + // If the collection got created in the meantime, we should ignore the exception. + } } } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - => this.RunOperationAsync( - DeleteCollectionName, - () => this._pineconeClient.DeleteIndex(this.CollectionName, cancellationToken)); + public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + try + { + await this._pineconeClient.DeleteIndexAsync(this.CollectionName, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (NotFoundError) + { + // If the collection does not exist, we should ignore the exception. + } + catch (PineconeApiException other) + { + throw new VectorStoreOperationException("Call to vector store failed.", other) + { + VectorStoreType = DatabaseName, + CollectionName = this.CollectionName, + OperationName = "DeleteCollection" + }; + } + } /// public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); - var records = await this.GetBatchAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); + Sdk.FetchRequest request = new() + { + Namespace = this._options.IndexNamespace, + Ids = [key] + }; - return records.FirstOrDefault(); + var response = await this.RunIndexOperationAsync( + "Get", + indexClient => indexClient.FetchAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); + + var result = response.Vectors?.Values.FirstOrDefault(); + if (result is null) + { + return default; + } + + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; + return VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + "Get", + () => this._mapper.MapFromStorageToDataModel(result, mapperOptions)); } /// @@ -155,20 +197,32 @@ public virtual async IAsyncEnumerable GetBatchAsync( { Verify.NotNull(keys); - var indexNamespace = this.GetIndexNamespace(); - var mapperOptions = new StorageToDataModelMapperOptions { IncludeVectors = options?.IncludeVectors ?? false }; - - var index = await this.GetIndexAsync(this.CollectionName, cancellationToken).ConfigureAwait(false); + List keysList = keys.ToList(); + if (keysList.Count == 0) + { + yield break; + } - var results = await this.RunOperationAsync( - GetOperationName, - () => index.Fetch(keys, indexNamespace, cancellationToken)).ConfigureAwait(false); + Sdk.FetchRequest request = new() + { + Namespace = this._options.IndexNamespace, + Ids = keysList + }; + + var response = await this.RunIndexOperationAsync( + "GetBatch", + indexClient => indexClient.FetchAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); + if (response.Vectors is null || response.Vectors.Count == 0) + { + yield break; + } + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; var records = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this.CollectionName, - GetOperationName, - () => results.Values.Select(x => this._mapper.MapFromStorageToDataModel(x, mapperOptions))); + "GetBatch", + () => response.Vectors.Values.Select(x => this._mapper.MapFromStorageToDataModel(x, mapperOptions))); foreach (var record in records) { @@ -181,21 +235,37 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken { Verify.NotNullOrWhiteSpace(key); - return this.DeleteBatchAsync([key], cancellationToken); + Sdk.DeleteRequest request = new() + { + Namespace = this._options.IndexNamespace, + Ids = [key] + }; + + return this.RunIndexOperationAsync( + "Delete", + indexClient => indexClient.DeleteAsync(request, cancellationToken: cancellationToken)); } /// - public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - var indexNamespace = this.GetIndexNamespace(); + List keysList = keys.ToList(); + if (keysList.Count == 0) + { + return Task.CompletedTask; + } - var index = await this.GetIndexAsync(this.CollectionName, cancellationToken).ConfigureAwait(false); + Sdk.DeleteRequest request = new() + { + Namespace = this._options.IndexNamespace, + Ids = keysList + }; - await this.RunOperationAsync( - DeleteOperationName, - () => index.Delete(keys, indexNamespace, cancellationToken)).ConfigureAwait(false); + return this.RunIndexOperationAsync( + "DeleteBatch", + indexClient => indexClient.DeleteAsync(request, cancellationToken: cancellationToken)); } /// @@ -203,19 +273,21 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken { Verify.NotNull(record); - var indexNamespace = this.GetIndexNamespace(); - - var index = await this.GetIndexAsync(this.CollectionName, cancellationToken).ConfigureAwait(false); - var vector = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this.CollectionName, - UpsertOperationName, + "Upsert", () => this._mapper.MapFromDataToStorageModel(record)); - await this.RunOperationAsync( - UpsertOperationName, - () => index.Upsert([vector], indexNamespace, cancellationToken)).ConfigureAwait(false); + Sdk.UpsertRequest request = new() + { + Namespace = this._options.IndexNamespace, + Vectors = [vector], + }; + + await this.RunIndexOperationAsync( + "Upsert", + indexClient => indexClient.UpsertAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); return vector.Id; } @@ -225,19 +297,26 @@ public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records.Select(this._mapper.MapFromDataToStorageModel).ToList()); - await this.RunOperationAsync( - UpsertOperationName, - () => index.Upsert(vectors, indexNamespace, cancellationToken)).ConfigureAwait(false); + if (vectors.Count == 0) + { + yield break; + } + + Sdk.UpsertRequest request = new() + { + Namespace = this._options.IndexNamespace, + Vectors = vectors, + }; + + await this.RunIndexOperationAsync( + "UpsertBatch", + indexClient => indexClient.UpsertAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); foreach (var vector in vectors) { @@ -256,70 +335,73 @@ public virtual async Task> VectorizedSearchAsync).FullName}"); } - // Resolve options and build filter clause. - var internalOptions = options ?? s_defaultVectorSearchOptions; - var mapperOptions = new StorageToDataModelMapperOptions { IncludeVectors = options?.IncludeVectors ?? false }; + options ??= s_defaultVectorSearchOptions; -#pragma warning disable CS0618 // FilterClause is obsolete - var filter = PineconeVectorStoreCollectionSearchMapping.BuildSearchFilter( - internalOptions.OldFilter?.FilterClauses, - this._propertyReader.StoragePropertyNamesMap); +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + var filter = options switch + { + { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), + { OldFilter: VectorSearchFilter legacyFilter } => PineconeVectorStoreCollectionSearchMapping.BuildSearchFilter(options.OldFilter?.FilterClauses, this._propertyReader.StoragePropertyNamesMap), + { Filter: Expression> newFilter } => new PineconeFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + _ => null + }; #pragma warning restore CS0618 - // Get the current index. - var indexNamespace = this.GetIndexNamespace(); - var index = await this.GetIndexAsync(this.CollectionName, cancellationToken).ConfigureAwait(false); - - // Search. - var results = await this.RunOperationAsync( - QueryOperationName, - () => index.Query( - floatVector.ToArray(), - (uint)(internalOptions.Skip + internalOptions.Top), - filter, - sparseValues: null, - indexNamespace, - internalOptions.IncludeVectors, - includeMetadata: true, - cancellationToken)).ConfigureAwait(false); - - // Skip the required results for paging. - var skippedResults = results.Skip(internalOptions.Skip); - - // Map the results. + Sdk.QueryRequest request = new() + { + TopK = (uint)(options.Top + options.Skip), + Namespace = this._options.IndexNamespace, + IncludeValues = options.IncludeVectors, + IncludeMetadata = true, + Vector = floatVector, + Filter = filter, + }; + + Sdk.QueryResponse response = await this.RunIndexOperationAsync( + "Query", + indexClient => indexClient.QueryAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); + + if (response.Matches is null) + { + return new VectorSearchResults(Array.Empty>().ToAsyncEnumerable()); + } + + // Pinecone does not provide a way to skip results, so we need to do it manually. + var skippedResults = response.Matches + .Skip(options.Skip); + + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors is true }; var records = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this.CollectionName, - QueryOperationName, - () => + "Query", + () => skippedResults.Select(x => new VectorSearchResult(this._mapper.MapFromStorageToDataModel(new Sdk.Vector() { - // First convert to Vector objects, since the - // mapper requires these as input. - var vectorResults = skippedResults.Select(x => ( - Vector: new Vector() - { - Id = x.Id, - Values = x.Values ?? Array.Empty(), - Metadata = x.Metadata, - SparseValues = x.SparseValues - }, - x.Score)); - - return vectorResults.Select(x => new VectorSearchResult( - this._mapper.MapFromStorageToDataModel(x.Vector, mapperOptions), - x.Score)); - }); - - return new VectorSearchResults(records.ToAsyncEnumerable()); + Id = x.Id, + Values = x.Values ?? Array.Empty(), + Metadata = x.Metadata, + SparseValues = x.SparseValues + }, mapperOptions), x.Score))) + .ToAsyncEnumerable(); + + return new(records); } - private async Task RunOperationAsync(string operationName, Func> operation) + private async Task RunIndexOperationAsync(string operationName, Func> operation) { try { - return await operation.Invoke().ConfigureAwait(false); + if (this._indexClient is null) + { + // If we don't provide "host" to the Index method, it's going to perform + // a blocking call to DescribeIndexAsync!! + string hostName = (await this._pineconeClient.DescribeIndexAsync(this.CollectionName).ConfigureAwait(false)).Host; + this._indexClient = this._pineconeClient.Index(host: hostName); + } + + return await operation.Invoke(this._indexClient).ConfigureAwait(false); } - catch (RpcException ex) + catch (PineconeApiException ex) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { @@ -330,13 +412,13 @@ private async Task RunOperationAsync(string operationName, Func> o } } - private async Task RunOperationAsync(string operationName, Func operation) + private async Task RunCollectionOperationAsync(string operationName, Func> operation) { try { - await operation.Invoke().ConfigureAwait(false); + return await operation.Invoke().ConfigureAwait(false); } - catch (RpcException ex) + catch (PineconeApiException ex) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { @@ -347,13 +429,36 @@ private async Task RunOperationAsync(string operationName, Func operation) } } - private async Task> GetIndexAsync(string indexName, CancellationToken cancellationToken) + private static ServerlessSpecCloud MapCloud(string serverlessIndexCloud) + => serverlessIndexCloud switch + { + "aws" => ServerlessSpecCloud.Aws, + "azure" => ServerlessSpecCloud.Azure, + "gcp" => ServerlessSpecCloud.Gcp, + _ => throw new ArgumentException($"Invalid serverless index cloud: {serverlessIndexCloud}.", nameof(serverlessIndexCloud)) + }; + + private static CreateIndexRequestMetric MapDistanceFunction(VectorStoreRecordVectorProperty vectorProperty) + => vectorProperty.DistanceFunction switch + { + DistanceFunction.CosineSimilarity => CreateIndexRequestMetric.Cosine, + DistanceFunction.DotProductSimilarity => CreateIndexRequestMetric.Dotproduct, + DistanceFunction.EuclideanSquaredDistance => CreateIndexRequestMetric.Euclidean, + null => CreateIndexRequestMetric.Cosine, + _ => throw new NotSupportedException($"Distance function '{vectorProperty.DistanceFunction}' is not supported.") + }; + + private static void VerifyCollectionName(string collectionName) { - this._index ??= await this._pineconeClient.GetIndex(indexName, cancellationToken).ConfigureAwait(false); + Verify.NotNullOrWhiteSpace(collectionName); - return this._index; + // Based on https://docs.pinecone.io/troubleshooting/restrictions-on-index-names + foreach (char character in collectionName) + { + if (!((character is >= 'a' and <= 'z') || character is '-' || (character is >= '0' and <= '9'))) + { + throw new ArgumentException("Collection name must contain only ASCII lowercase letters, digits and dashes.", nameof(collectionName)); + } + } } - - private string? GetIndexNamespace() - => this._options.IndexNamespace; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs index 6acbec24d72c..9573740f8580 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs @@ -48,7 +48,7 @@ internal static class PineconeVectorStoreRecordFieldMapping ]; public static object? ConvertFromMetadataValueToNativeType(MetadataValue metadataValue, Type targetType) - => metadataValue.Inner switch + => metadataValue.Value switch { null => null, bool boolValue => boolValue, @@ -59,26 +59,28 @@ internal static class PineconeVectorStoreRecordFieldMapping long longValue => ConvertToNumericValue(longValue, targetType), float floatValue => ConvertToNumericValue(floatValue, targetType), double doubleValue => ConvertToNumericValue(doubleValue, targetType), - decimal decimalValue => ConvertToNumericValue(decimalValue, targetType), MetadataValue[] array => VectorStoreRecordMapping.CreateEnumerable(array.Select(x => ConvertFromMetadataValueToNativeType(x, VectorStoreRecordPropertyVerification.GetCollectionElementType(targetType))), targetType), List list => VectorStoreRecordMapping.CreateEnumerable(list.Select(x => ConvertFromMetadataValueToNativeType(x, VectorStoreRecordPropertyVerification.GetCollectionElementType(targetType))), targetType), - _ => throw new VectorStoreRecordMappingException($"Unsupported metadata type: '{metadataValue.Inner?.GetType().FullName}'."), + _ => throw new VectorStoreRecordMappingException($"Unsupported metadata type: '{metadataValue.Value?.GetType().FullName}'."), }; - // TODO: take advantage of MetadataValue.TryCreate once we upgrade the version of Pinecone.NET public static MetadataValue ConvertToMetadataValue(object? sourceValue) => sourceValue switch { bool boolValue => boolValue, + bool[] bools => bools, + List bools => bools, string stringValue => stringValue, + string[] stringArray => stringArray, + List stringList => stringList, + double doubleValue => doubleValue, + double[] doubles => doubles, + List doubles => doubles, + // Other numeric types are simply cast into double in implicit way. + // We could consider supporting arrays of these types. int intValue => intValue, long longValue => longValue, float floatValue => floatValue, - double doubleValue => doubleValue, - decimal decimalValue => decimalValue, - string[] stringArray => stringArray, - List stringList => stringList, - IEnumerable stringEnumerable => stringEnumerable.ToArray(), _ => throw new VectorStoreRecordMappingException($"Unsupported source value type '{sourceValue?.GetType().FullName}'.") }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index 501937eaf50d..1163c1a66bea 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -40,7 +40,7 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) throw new VectorStoreRecordMappingException($"Key property {this._propertyReader.KeyPropertyName} on provided record of type {typeof(TRecord).FullName} may not be null."); } - var metadata = new MetadataMap(); + var metadata = new Metadata(); foreach (var dataPropertyInfo in this._propertyReader.DataPropertiesInfo) { var propertyName = this._propertyReader.GetStoragePropertyName(dataPropertyInfo.Name); @@ -61,7 +61,7 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) var result = new Vector { Id = (string)keyObject, - Values = values.ToArray(), + Values = values, Metadata = metadata, SparseValues = null }; @@ -83,7 +83,7 @@ public TRecord MapFromStorageToDataModel(Vector storageModel, StorageToDataModel { this._propertyReader.FirstVectorPropertyInfo!.SetValue( outputRecord, - new ReadOnlyMemory(storageModel.Values)); + storageModel.Values); } // Set Data. @@ -94,7 +94,7 @@ public TRecord MapFromStorageToDataModel(Vector storageModel, StorageToDataModel this._propertyReader.DataPropertiesInfo, this._propertyReader.StoragePropertyNamesMap, storageModel.Metadata, - PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType); + PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType!); } return outputRecord; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs index 939e3530e797..6690f1d564a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; internal static class ExceptionWrapper { - private const string VectorStoreType = "SqlServer"; + internal const string VectorStoreType = "SqlServer"; internal static async Task WrapAsync( SqlConnection connection, diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index fee4686bf548..9e97e37c0cb3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -155,16 +155,14 @@ internal static SqlCommand MergeIntoSingle( return command; } - internal static SqlCommand? MergeIntoMany( - SqlConnection connection, + internal static bool MergeIntoMany( + SqlCommand command, string? schema, string tableName, VectorStoreRecordKeyProperty keyProperty, IReadOnlyList properties, IEnumerable> records) { - SqlCommand command = connection.CreateCommand(); - StringBuilder sb = new(200); // The DECLARE statement creates a table variable to store the keys of the inserted rows. sb.AppendFormat("DECLARE @InsertedKeys TABLE (KeyColumn {0});", Map(keyProperty)); @@ -190,7 +188,7 @@ internal static SqlCommand MergeIntoSingle( if (rowIndex == 0) { - return null; // there is nothing to do! + return false; // there is nothing to do! } sb.Length -= (1 + Environment.NewLine.Length); // remove the last comma and newline @@ -225,7 +223,7 @@ internal static SqlCommand MergeIntoSingle( sb.Append("SELECT KeyColumn FROM @InsertedKeys;"); command.CommandText = sb.ToString(); - return command; + return true; } internal static SqlCommand DeleteSingle( @@ -246,12 +244,10 @@ internal static SqlCommand DeleteSingle( return command; } - internal static SqlCommand? DeleteMany( - SqlConnection connection, string? schema, string tableName, + internal static bool DeleteMany( + SqlCommand command, string? schema, string tableName, VectorStoreRecordKeyProperty keyProperty, IEnumerable keys) { - SqlCommand command = connection.CreateCommand(); - StringBuilder sb = new(100); sb.Append("DELETE FROM "); sb.AppendTableName(schema, tableName); @@ -261,11 +257,11 @@ internal static SqlCommand DeleteSingle( if (emptyKeys) { - return null; // there is nothing to do! + return false; } command.CommandText = sb.ToString(); - return command; + return true; } internal static SqlCommand SelectSingle( @@ -293,15 +289,13 @@ internal static SqlCommand SelectSingle( return command; } - internal static SqlCommand? SelectMany( - SqlConnection connection, string? schema, string tableName, + internal static bool SelectMany( + SqlCommand command, string? schema, string tableName, VectorStoreRecordKeyProperty keyProperty, IReadOnlyList properties, IEnumerable keys, bool includeVectors) { - SqlCommand command = connection.CreateCommand(); - StringBuilder sb = new(200); sb.AppendFormat("SELECT "); sb.AppendColumnNames(properties, includeVectors: includeVectors); @@ -315,11 +309,11 @@ internal static SqlCommand SelectSingle( if (emptyKeys) { - return null; // there is nothing to do! + return false; // there is nothing to do! } command.CommandText = sb.ToString(); - return command; + return true; } internal static SqlCommand SelectVector( diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs index 6b81cbac1ef6..d8ce0f1354e7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -7,6 +7,9 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; internal static class SqlServerConstants { + // The actual number is actually higher (2_100), but we want to avoid any kind of "off by one" errors. + internal const int MaxParameterCount = 2_000; + internal static readonly HashSet SupportedKeyTypes = [ typeof(int), // INT diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index f68318a92836..9b4ce3b29078 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -177,21 +177,65 @@ public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken can Verify.NotNull(keys); using SqlConnection connection = new(this._connectionString); - using SqlCommand? command = SqlServerCommandBuilder.DeleteMany( - connection, - this._options.Schema, - this.CollectionName, - this._propertyReader.KeyProperty, - keys); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + + using SqlTransaction transaction = connection.BeginTransaction(); + int taken = 0; - if (command is null) + try { - return; // keys is empty, there is nothing to delete + while (true) + { +#if NET + SqlCommand command = new("", connection, transaction); + await using (command.ConfigureAwait(false)) +#else + using (SqlCommand command = new("", connection, transaction)) +#endif + { + if (!SqlServerCommandBuilder.DeleteMany( + command, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount))) + { + break; // keys is empty, there is nothing to delete + } + + checked + { + taken += command.Parameters.Count; + } + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + } + } + + if (taken > 0) + { +#if NET + await transaction.CommitAsync(cancellationToken).ConfigureAwait(false); +#else + transaction.Commit(); +#endif + } } + catch (Exception ex) + { +#if NET + await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false); +#else + transaction.Rollback(); +#endif - await ExceptionWrapper.WrapAsync(connection, command, - static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), - cancellationToken, "DeleteBatch", this.CollectionName).ConfigureAwait(false); + throw new VectorStoreOperationException(ex.Message, ex) + { + OperationName = "DeleteBatch", + VectorStoreType = ExceptionWrapper.VectorStoreType, + CollectionName = this.CollectionName + }; + } } /// @@ -235,30 +279,44 @@ public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, Get bool includeVectors = options?.IncludeVectors is true; using SqlConnection connection = new(this._connectionString); - using SqlCommand? command = SqlServerCommandBuilder.SelectMany( - connection, - this._options.Schema, - this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, - keys, - includeVectors); + using SqlCommand command = connection.CreateCommand(); + int taken = 0; - if (command is null) + do { - yield break; // keys is empty - } + if (command.Parameters.Count > 0) + { + command.Parameters.Clear(); // We reuse the same command for the next batch. + } - using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, - static (cmd, ct) => cmd.ExecuteReaderAsync(ct), - cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + if (!SqlServerCommandBuilder.SelectMany( + command, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount), + includeVectors)) + { + yield break; // keys is empty + } - while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) - { - yield return this._mapper.MapFromStorageToDataModel( - new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), - new() { IncludeVectors = includeVectors }); - } + checked + { + taken += command.Parameters.Count; + } + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + + while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) + { + yield return this._mapper.MapFromStorageToDataModel( + new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), + new() { IncludeVectors = includeVectors }); + } + } while (command.Parameters.Count == SqlServerConstants.MaxParameterCount); } /// @@ -291,26 +349,84 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable record Verify.NotNull(records); using SqlConnection connection = new(this._connectionString); - using SqlCommand? command = SqlServerCommandBuilder.MergeIntoMany( - connection, - this._options.Schema, - this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, - records.Select(record => this._mapper.MapFromDataToStorageModel(record))); + await connection.OpenAsync(cancellationToken).ConfigureAwait(false); + + using SqlTransaction transaction = connection.BeginTransaction(); + int parametersPerRecord = this._propertyReader.Properties.Count; + int taken = 0; - if (command is null) + try { - yield break; // records is empty + while (true) + { +#if NET + SqlCommand command = new("", connection, transaction); + await using (command.ConfigureAwait(false)) +#else + using (SqlCommand command = new("", connection, transaction)) +#endif + { + if (!SqlServerCommandBuilder.MergeIntoMany( + command, + this._options.Schema, + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.Properties, + records.Skip(taken) + .Take(SqlServerConstants.MaxParameterCount / parametersPerRecord) + .Select(this._mapper.MapFromDataToStorageModel))) + { + break; // records is empty + } + + checked + { + taken += (command.Parameters.Count / parametersPerRecord); + } + + await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); + } + } + + if (taken > 0) + { +#if NET + await transaction.CommitAsync(cancellationToken).ConfigureAwait(false); +#else + transaction.Commit(); +#endif + } } + catch (Exception ex) + { +#if NET + await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false); +#else + transaction.Rollback(); +#endif - using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, - static (cmd, ct) => cmd.ExecuteReaderAsync(ct), - cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + throw new VectorStoreOperationException(ex.Message, ex) + { + OperationName = "UpsertBatch", + VectorStoreType = ExceptionWrapper.VectorStoreType, + CollectionName = this.CollectionName + }; + } - while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) + if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + foreach (var record in records) + { + yield return ((VectorStoreGenericDataModel)(object)record!).Key; + } + } + else { - yield return reader.GetFieldValue(0); + var keyProperty = this._propertyReader.KeyPropertyInfo; + foreach (var record in records) + { + yield return (TKey)keyProperty.GetValue(record)!; + } } } diff --git a/dotnet/src/Connectors/Connectors.Onnx.UnitTests/Connectors.Onnx.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Onnx.UnitTests/Connectors.Onnx.UnitTests.csproj index 6333d7dd4322..aa771a2af5df 100644 --- a/dotnet/src/Connectors/Connectors.Onnx.UnitTests/Connectors.Onnx.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Onnx.UnitTests/Connectors.Onnx.UnitTests.csproj @@ -7,7 +7,7 @@ true enable false - $(NoWarn);SKEXP0001;SKEXP0070;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111 + $(NoWarn);SKEXP0001;SKEXP0070;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111;SYSLIB1222 diff --git a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj index 275d4b37931a..c988ac39a7ad 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj +++ b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj @@ -7,6 +7,7 @@ net8.0;netstandard2.0 true alpha + SYSLIB1222 diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs index d6b83f21a391..f0f56fd5e7b0 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs @@ -2,6 +2,7 @@ using System; using System.ClientModel; +using System.ClientModel.Primitives; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; @@ -1660,6 +1661,76 @@ public async Task ItSendsEmptyStringWhenAssistantMessageContentIsNull() Assert.Equal(string.Empty, assistantMessageContent); } + [Theory] + [MemberData(nameof(WebSearchOptionsData))] + public async Task ItCreatesCorrectWebSearchOptionsAsync(object webSearchOptions, string expectedJson) + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + var settings = new OpenAIPromptExecutionSettings + { + WebSearchOptions = webSearchOptions + }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, settings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.True(optionsJson.TryGetProperty("web_search_options", out var property)); + Assert.Equal(JsonValueKind.Object, property.ValueKind); + Assert.Equal(expectedJson, property.GetRawText()); + } + + [Theory] + [MemberData(nameof(WebSearchOptionsData))] + public async Task ItCreatesCorrectWebSearchOptionsStreamingAsync(object webSearchOptions, string expectedJson) + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + var settings = new OpenAIPromptExecutionSettings + { + WebSearchOptions = webSearchOptions + }; + + // Act + var asyncEnumerable = chatCompletion.GetStreamingChatMessageContentsAsync(this._chatHistoryForTest, settings); + await asyncEnumerable.GetAsyncEnumerator().MoveNextAsync(); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.True(optionsJson.TryGetProperty("web_search_options", out var property)); + Assert.Equal(JsonValueKind.Object, property.ValueKind); + Assert.Equal(expectedJson, property.GetRawText()); + } + + public static TheoryData WebSearchOptionsData => new() + { + { new ChatWebSearchOptions(), "{}" }, + { JsonSerializer.Deserialize("{}"), "{}" }, + { "{}", "{}" }, + { """{"user_location":{"type":"approximate","approximate":{"country":"GB","city":"London","region":"London"}}}""", + """{"user_location":{"type":"approximate","approximate":{"country":"GB","region":"London","city":"London"}}}""" }, + { JsonSerializer.Deserialize("""{"user_location":{"type":"approximate","approximate":{"country":"GB","city":"London","region":"London"}}}"""), + """{"user_location":{"type":"approximate","approximate":{"country":"GB","region":"London","city":"London"}}}""" }, + { ModelReaderWriter.Read(BinaryData.FromString("""{"user_location":{"type":"approximate","approximate":{"country":"GB","city":"London","region":"London"}}}"""))!, + """{"user_location":{"type":"approximate","approximate":{"country":"GB","region":"London","city":"London"}}}""" }, + }; + public void Dispose() { this._httpClient.Dispose(); diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj index 0f884f0df59c..64a0e72bde6d 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -5,7 +5,6 @@ Microsoft.SemanticKernel.Connectors.OpenAI $(AssemblyName) net8.0;netstandard2.0 - true $(NoWarn);NU5104;SKEXP0001,SKEXP0010 true diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs index 0c9e0280cc77..6b375e94cc62 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs @@ -2,6 +2,7 @@ using System; using System.ClientModel; +using System.ClientModel.Primitives; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Metrics; @@ -460,6 +461,7 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions( { var options = new ChatCompletionOptions { + WebSearchOptions = GetWebSearchOptions(executionSettings), MaxOutputTokenCount = executionSettings.MaxTokens, Temperature = (float?)executionSettings.Temperature, TopP = (float?)executionSettings.TopP, @@ -550,6 +552,31 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions( throw new NotSupportedException($"The provided reasoning effort '{effortLevelObject.GetType()}' is not supported."); } + protected static ChatWebSearchOptions? GetWebSearchOptions(OpenAIPromptExecutionSettings executionSettings) + { + if (executionSettings.WebSearchOptions is null) + { + return null; + } + + if (executionSettings.WebSearchOptions is ChatWebSearchOptions webSearchOptions) + { + return webSearchOptions; + } + + if (executionSettings.WebSearchOptions is string webSearchOptionsString) + { + return ModelReaderWriter.Read(BinaryData.FromString(webSearchOptionsString)); + } + + if (executionSettings.WebSearchOptions is JsonElement webSearchOptionsElement) + { + return ModelReaderWriter.Read(BinaryData.FromString(webSearchOptionsElement.GetRawText())); + } + + throw new NotSupportedException($"The provided web search options '{executionSettings.WebSearchOptions.GetType()}' is not supported."); + } + /// /// Retrieves the response format based on the provided settings. /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index bd3187b936d6..12b44717113a 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -364,6 +364,36 @@ public bool? Store } } + /// + /// An object to allow models to search the web for the latest information before generating a response. + /// + /// + /// Supported types are: + /// - object; + /// - , which will be used to automatically deserialize into . + /// - , which will be used to automatically deserialize into . + /// + /// Currently, you need to use one of these models to use web search in Chat Completions: + /// + /// gpt-4o-search-preview + /// gpt-4o-mini-search-preview + /// + /// + /// + [Experimental("SKEXP0010")] + [JsonPropertyName("web_search_options")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public object? WebSearchOptions + { + get => this._webSearchOptions; + + set + { + this.ThrowIfFrozen(); + this._webSearchOptions = value; + } + } + /// public override void Freeze() { @@ -456,12 +486,14 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio TopLogprobs = this.TopLogprobs, Store = this.Store, Metadata = this.Metadata is not null ? new Dictionary(this.Metadata) : null, - ReasoningEffort = this.ReasoningEffort + ReasoningEffort = this.ReasoningEffort, + WebSearchOptions = this.WebSearchOptions, }; } #region private ================================================================================ + private object? _webSearchOptions; private object? _reasoningEffort; private double? _temperature; private double? _topP; diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs index 9a41450d9649..0a96bb41cc3b 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs @@ -77,20 +77,20 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() // Assert Assert.Equal(TestKeyString, storageModel.Id); - Assert.Equal("string", (string?)storageModel.Metadata!["StringDataProp"].Inner); + Assert.Equal("string", (string?)storageModel.Metadata!["StringDataProp"]!.Value); // MetadataValue converts all numeric types to double. - Assert.Equal(1, (double?)storageModel.Metadata["IntDataProp"].Inner); - Assert.Equal(2, (double?)storageModel.Metadata["NullableIntDataProp"].Inner); - Assert.Equal(3L, (double?)storageModel.Metadata["LongDataProp"].Inner); - Assert.Equal(4L, (double?)storageModel.Metadata["NullableLongDataProp"].Inner); - Assert.Equal(5.0f, (double?)storageModel.Metadata["FloatDataProp"].Inner); - Assert.Equal(6.0f, (double?)storageModel.Metadata["NullableFloatDataProp"].Inner); - Assert.Equal(7.0, (double?)storageModel.Metadata["DoubleDataProp"].Inner); - Assert.Equal(8.0, (double?)storageModel.Metadata["NullableDoubleDataProp"].Inner); - Assert.Equal(true, (bool?)storageModel.Metadata["BoolDataProp"].Inner); - Assert.Equal(false, (bool?)storageModel.Metadata["NullableBoolDataProp"].Inner); - Assert.Equal(s_taglist, ((IEnumerable?)(storageModel.Metadata["TagListDataProp"].Inner!)) - .Select(x => x.Inner as string) + Assert.Equal(1, (double?)storageModel.Metadata["IntDataProp"]!.Value); + Assert.Equal(2, (double?)storageModel.Metadata["NullableIntDataProp"]!.Value); + Assert.Equal(3L, (double?)storageModel.Metadata["LongDataProp"]!.Value); + Assert.Equal(4L, (double?)storageModel.Metadata["NullableLongDataProp"]!.Value); + Assert.Equal(5.0f, (double?)storageModel.Metadata["FloatDataProp"]!.Value); + Assert.Equal(6.0f, (double?)storageModel.Metadata["NullableFloatDataProp"]!.Value); + Assert.Equal(7.0, (double?)storageModel.Metadata["DoubleDataProp"]!.Value); + Assert.Equal(8.0, (double?)storageModel.Metadata["NullableDoubleDataProp"]!.Value); + Assert.Equal(true, (bool?)storageModel.Metadata["BoolDataProp"]!.Value); + Assert.Equal(false, (bool?)storageModel.Metadata["NullableBoolDataProp"]!.Value); + Assert.Equal(s_taglist, ((IEnumerable?)(storageModel.Metadata["TagListDataProp"]!.Value!)) + .Select(x => x.Value as string) .ToArray()); Assert.Equal(s_vector, storageModel.Values); } @@ -136,9 +136,9 @@ public void MapFromDataToStorageModelMapsNullValues() // Assert Assert.Equal(TestKeyString, storageModel.Id); - Assert.True(storageModel.Metadata!["StringDataProp"].Inner == null); - Assert.True(storageModel.Metadata["NullableIntDataProp"].Inner == null); - Assert.True(storageModel.Metadata["NullableTagListDataProp"].Inner == null); + Assert.Null(storageModel.Metadata!["StringDataProp"]); + Assert.Null(storageModel.Metadata["NullableIntDataProp"]); + Assert.Null(storageModel.Metadata["NullableTagListDataProp"]); } [Fact] @@ -153,7 +153,7 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var storageModel = new Vector() { Id = TestKeyString, - Metadata = new MetadataMap() + Metadata = new Metadata() { ["StringDataProp"] = (MetadataValue)"string", ["IntDataProp"] = (MetadataValue)1, @@ -168,7 +168,7 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() ["NullableBoolDataProp"] = (MetadataValue)false, ["TagListDataProp"] = (MetadataValue)new MetadataValue[] { "tag1", "tag2" } }, - Values = [1.0f, 2.0f, 3.0f] + Values = new float[] { 1.0f, 2.0f, 3.0f } }; // Act @@ -210,13 +210,13 @@ public void MapFromStorageToDataModelMapsNullValues() var storageModel = new Vector() { Id = TestKeyString, - Metadata = new MetadataMap() + Metadata = new Metadata() { - ["StringDataProp"] = new MetadataValue(), - ["NullableIntDataProp"] = new MetadataValue(), - ["NullableTagListDataProp"] = new MetadataValue(), + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableTagListDataProp"] = null, }, - Values = [1.0f, 2.0f, 3.0f] + Values = new float[] { 1.0f, 2.0f, 3.0f } }; var reader = new VectorStoreRecordPropertyReader( @@ -327,7 +327,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() var storageModel = new Vector() { Id = TestKeyString, - Values = [1.0f, 2.0f, 3.0f] + Values = new float[] { 1.0f, 2.0f, 3.0f } }; // Act diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs index 6777470e537d..21f7b6649da5 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs @@ -26,7 +26,7 @@ public PineconeKernelBuilderExtensionsTests() public void AddVectorStoreRegistersClass() { // Arrange. - using var client = new Sdk.PineconeClient("fake api key"); + var client = new Sdk.PineconeClient("fake api key"); this._kernelBuilder.Services.AddSingleton(client); // Act. @@ -50,7 +50,7 @@ public void AddVectorStoreWithApiKeyRegistersClass() public void AddVectorStoreRecordCollectionRegistersClass() { // Arrange. - using var client = new Sdk.PineconeClient("fake api key"); + var client = new Sdk.PineconeClient("fake api key"); this._kernelBuilder.Services.AddSingleton(client); // Act. diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs index 084222d37bda..736cc3e3839d 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs @@ -26,7 +26,7 @@ public PineconeServiceCollectionExtensionsTests() public void AddVectorStoreRegistersClass() { // Arrange. - using var client = new Sdk.PineconeClient("fake api key"); + var client = new Sdk.PineconeClient("fake api key"); this._serviceCollection.AddSingleton(client); // Act. @@ -49,7 +49,7 @@ public void AddVectorStoreWithApiKeyRegistersClass() public void AddVectorStoreRecordCollectionRegistersClass() { // Arrange. - using var client = new Sdk.PineconeClient("fake api key"); + var client = new Sdk.PineconeClient("fake api key"); this._serviceCollection.AddSingleton(client); // Act. diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index 85ed14f7a468..0dc2620140f3 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -35,7 +35,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, } }; - using var pineconeClient = new Sdk.PineconeClient("fake api key"); + var pineconeClient = new Sdk.PineconeClient("fake api key"); // Act. var sut = new PineconeVectorStoreRecordCollection( diff --git a/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md b/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md index df87cb1b8586..da8da52c5eb5 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md +++ b/dotnet/src/Connectors/VectorData.Abstractions/PACKAGE.md @@ -4,7 +4,7 @@ Contains abstractions for accessing Vector Databases and Vector Indexes. ## Key Features -- Interfaces for Vector Database implementations which are provided in other packages including `Microsoft.SemanticKernel.Connectors.AzureAISearch`. +- Interfaces for Vector Database implementation. Vector Database implementations are provided separately in other packages, for example `Microsoft.SemanticKernel.Connectors.AzureAISearch`. ## How to Use @@ -22,14 +22,25 @@ The main types provided by this library are: ## Related Packages +Vector Database utilities: + +- `Microsoft.Extensions.VectorData` + +Vector Database implementations: + - `Microsoft.SemanticKernel.Connectors.AzureAISearch` - `Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB` - `Microsoft.SemanticKernel.Connectors.AzureCosmosNoSQL` +- `Microsoft.SemanticKernel.Connectors.InMemory` +- `Microsoft.SemanticKernel.Connectors.MongoDB` - `Microsoft.SemanticKernel.Connectors.Pinecone` +- `Microsoft.SemanticKernel.Connectors.Postgres` - `Microsoft.SemanticKernel.Connectors.Qdrant` - `Microsoft.SemanticKernel.Connectors.Redis` +- `Microsoft.SemanticKernel.Connectors.Sqlite` +- `Microsoft.SemanticKernel.Connectors.SqlServer` - `Microsoft.SemanticKernel.Connectors.Weaviate` ## Feedback & Contributing -Microsoft.Extensions.DependencyInjection.Abstractions is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). +Microsoft.Extensions.VectorData.Abstractions is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). diff --git a/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig b/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj new file mode 100644 index 000000000000..d374de2022ba --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj @@ -0,0 +1,41 @@ + + + + VectorData.UnitTests + VectorData.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..ded7e8b44d28 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsKeywordHybridSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..35f776517dfc --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddKeywordHybridSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddKeywordHybridSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeywordHybridSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IKeywordHybridSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddKeywordHybridSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedKeywordHybridSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedKeywordHybridSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IKeywordHybridSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedKeywordHybridSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IKeywordHybridSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedKeywordHybridSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IKeywordHybridSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs new file mode 100644 index 000000000000..90fb4560cad2 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class KeywordHybridSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IKeywordHybridSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new KeywordHybridSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..8cf6587b3dd7 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingKeywordHybridSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new KeywordHybridSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs new file mode 100644 index 000000000000..85231becf613 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingKeywordHybridSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingKeywordHybridSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingKeywordHybridSearch(innerSearch, null!)); + } + + [Fact] + public async Task HybridSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var vector = new float[] { 1.0f }; + var keywords = new List { "test" }; + var options = new HybridSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + + innerSearch.Setup(s => s.HybridSearchAsync(vector, keywords, options, default)) + .ReturnsAsync(results); + + var decorator = new LoggingKeywordHybridSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.HybridSearchAsync(vector, keywords, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.HybridSearchAsync(vector, keywords, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..026949453211 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizableTextSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs new file mode 100644 index 000000000000..e7de933156b7 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizableTextSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizableTextSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizableTextSearch(innerSearch, null!)); + } + + [Fact] + public async Task VectorizableTextSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var searchText = "test"; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerSearch.Setup(s => s.VectorizableTextSearchAsync(searchText, options, default)) + .ReturnsAsync(results); + var decorator = new LoggingVectorizableTextSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.VectorizableTextSearchAsync(searchText, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.VectorizableTextSearchAsync(searchText, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..a6380a9c5303 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizedSearchBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerSearch, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs new file mode 100644 index 000000000000..ceb801060e7e --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorizedSearchTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerSearch() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizedSearch(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorizedSearch(innerSearch, null!)); + } + + [Fact] + public async Task VectorizedSearchDelegatesToInnerSearchAsync() + { + // Arrange + var innerSearch = new Mock>(); + var logger = new Mock().Object; + var vector = new float[] { 1.0f }; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerSearch.Setup(s => s.VectorizedSearchAsync(vector, options, default)) + .ReturnsAsync(results); + var decorator = new LoggingVectorizedSearch(innerSearch.Object, logger); + + // Act + var actualResults = await decorator.VectorizedSearchAsync(vector, options); + + // Assert + Assert.Same(results, actualResults); + innerSearch.Verify(s => s.VectorizedSearchAsync(vector, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..197256c39108 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorizableTextSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..42d153fcbb5b --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorizableTextSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddVectorizableTextSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorizableTextSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizableTextSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddVectorizableTextSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorizableTextSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizableTextSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizableTextSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorizableTextSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizableTextSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizableTextSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizableTextSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs new file mode 100644 index 000000000000..1d556abe5b26 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizableTextSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorizableTextSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new VectorizableTextSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizableTextSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs new file mode 100644 index 000000000000..eb0548f2097a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorizedSearchBuilder() + { + // Arrange + var search = new Mock>().Object; + + // Act + var builder = search.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..b183d1cba162 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorizedSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + + // Act + var builder = services.AddVectorizedSearch(search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorizedSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizedSearch Factory(IServiceProvider _) => search; + + // Act + var builder = services.AddVectorizedSearch(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorizedSearchWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizedSearch(key, search); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizedSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorizedSearchWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var search = new Mock>().Object; + IVectorizedSearch Factory(IServiceProvider _) => search; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorizedSearch(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedSearch = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(search, resolvedSearch); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorizedSearch) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs new file mode 100644 index 000000000000..d883db85a33d --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorizedSearchBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerSearch() + { + // Arrange + var innerSearch = new Mock>().Object; + + // Act + var builder = new VectorizedSearchBuilder(innerSearch); + + // Assert + var builtSearch = builder.Build(); + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerSearch = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorizedSearch Factory(IServiceProvider _) => innerSearch; + + // Act + var builder = new VectorizedSearchBuilder(Factory); + var builtSearch = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerSearch, builtSearch); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerSearch = new Mock>().Object; + var mockSearch1 = new Mock>().Object; + var mockSearch2 = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + builder.Use(s => mockSearch1); + builder.Use(s => mockSearch2); + + // Act + var builtSearch = builder.Build(); + + // Assert + Assert.Same(mockSearch1, builtSearch); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerSearch = new Mock>().Object; + var builder = new VectorizedSearchBuilder(innerSearch); + + // Act + var builtSearch = builder.Build(null); + + // Assert + Assert.Same(innerSearch, builtSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs new file mode 100644 index 000000000000..8db4dbf35c2a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerStore = new Mock().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerStore = new Mock().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerStore() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerStore, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs new file mode 100644 index 000000000000..638e91fc17ad --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreRecordCollectionBuilderExtensionsTests +{ + [Fact] + public void UseLoggingWithFactoryAddsDecorator() + { + // Arrange + var innerCollection = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(loggerFactory.Object); + var result = builder.Build(); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullFactoryResolvesFromServiceProvider() + { + // Arrange + var innerCollection = new Mock>().Object; + var loggerFactory = new Mock(); + loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); + var serviceProvider = new Mock(); + serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(); + var result = builder.Build(serviceProvider.Object); + + // Assert + Assert.IsType>(result); + } + + [Fact] + public void UseLoggingWithNullLoggerFactoryReturnsInnerCollection() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + builder.UseLogging(NullLoggerFactory.Instance); + var result = builder.Build(); + + // Assert + Assert.Same(innerCollection, result); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..294ee64555a0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreRecordCollectionTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerCollection() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStoreRecordCollection(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerCollection = new Mock>().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStoreRecordCollection(innerCollection, null!)); + } + + [Fact] + public void CollectionNameReturnsInnerCollectionName() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CollectionName).Returns("test"); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var name = decorator.CollectionName; + + // Assert + Assert.Equal("test", name); + innerCollection.Verify(c => c.CollectionName, Times.Once()); + } + + [Fact] + public async Task CollectionExistsDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CollectionExistsAsync(default)).ReturnsAsync(true); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var exists = await decorator.CollectionExistsAsync(); + + // Assert + Assert.True(exists); + innerCollection.Verify(c => c.CollectionExistsAsync(default), Times.Once()); + } + + [Fact] + public async Task CreateCollectionDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CreateCollectionAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.CreateCollectionAsync(); + + // Assert + innerCollection.Verify(c => c.CreateCollectionAsync(default), Times.Once()); + } + + [Fact] + public async Task CreateCollectionIfNotExistsDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.CreateCollectionIfNotExistsAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.CreateCollectionIfNotExistsAsync(); + + // Assert + innerCollection.Verify(c => c.CreateCollectionIfNotExistsAsync(default), Times.Once()); + } + + [Fact] + public async Task DeleteDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.DeleteAsync("key", default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteAsync("key"); + + // Assert + innerCollection.Verify(c => c.DeleteAsync("key", default), Times.Once()); + } + + [Fact] + public async Task DeleteBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var keys = new[] { "key1", "key2" }; + innerCollection.Setup(c => c.DeleteBatchAsync(keys, default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteBatchAsync(keys); + + // Assert + innerCollection.Verify(c => c.DeleteBatchAsync(keys, default), Times.Once()); + } + + [Fact] + public async Task DeleteCollectionDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + innerCollection.Setup(c => c.DeleteCollectionAsync(default)).Returns(Task.CompletedTask); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + await decorator.DeleteCollectionAsync(); + + // Assert + innerCollection.Verify(c => c.DeleteCollectionAsync(default), Times.Once()); + } + + [Fact] + public async Task GetDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var record = new object(); + innerCollection.Setup(c => c.GetAsync("key", null, default)).ReturnsAsync(record); + + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.GetAsync("key"); + + // Assert + Assert.Same(record, result); + innerCollection.Verify(c => c.GetAsync("key", null, default), Times.Once()); + } + + [Fact] + public async Task GetBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var keys = new[] { "key1", "key2" }; + var records = new[] { new object(), new object() }; + innerCollection.Setup(c => c.GetBatchAsync(keys, null, default)).Returns(records.ToAsyncEnumerable()); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.GetBatchAsync(keys).ToListAsync(); + + // Assert + Assert.Equal(records, result); + innerCollection.Verify(c => c.GetBatchAsync(keys, null, default), Times.Once()); + } + + [Fact] + public async Task UpsertDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var record = new object(); + innerCollection.Setup(c => c.UpsertAsync(record, default)).ReturnsAsync("key"); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var key = await decorator.UpsertAsync(record); + + // Assert + Assert.Equal("key", key); + innerCollection.Verify(c => c.UpsertAsync(record, default), Times.Once()); + } + + [Fact] + public async Task UpsertBatchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var records = new[] { new object(), new object() }; + var keys = new[] { "key1", "key2" }; + innerCollection.Setup(c => c.UpsertBatchAsync(records, default)).Returns(keys.ToAsyncEnumerable()); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var result = await decorator.UpsertBatchAsync(records).ToListAsync(); + + // Assert + Assert.Equal(keys, result); + innerCollection.Verify(c => c.UpsertBatchAsync(records, default), Times.Once()); + } + + [Fact] + public async Task VectorizedSearchDelegatesToInnerCollectionAsync() + { + // Arrange + var innerCollection = new Mock>(); + var vector = new float[] { 1.0f }; + var options = new VectorSearchOptions(); + var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); + var results = new VectorSearchResults(searchResults); + innerCollection.Setup(c => c.VectorizedSearchAsync(vector, options, default)).ReturnsAsync(results); + var logger = new Mock().Object; + var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); + + // Act + var actualResults = await decorator.VectorizedSearchAsync(vector, options); + + // Assert + Assert.Same(results, actualResults); + innerCollection.Verify(c => c.VectorizedSearchAsync(vector, options, default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs new file mode 100644 index 000000000000..058fc56b6ad1 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class LoggingVectorStoreTests +{ + [Fact] + public void ConstructorThrowsOnNullInnerStore() + { + // Arrange + var logger = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStore(null!, logger)); + } + + [Fact] + public void ConstructorThrowsOnNullLogger() + { + // Arrange + var innerStore = new Mock().Object; + + // Act & Assert + Assert.Throws(() => new LoggingVectorStore(innerStore, null!)); + } + + [Fact] + public void GetCollectionDelegatesToInnerStore() + { + // Arrange + var innerStore = new Mock(); + var logger = new Mock().Object; + var collection = new Mock>().Object; + innerStore.Setup(s => s.GetCollection("test", null)) + .Returns(collection); + var decorator = new LoggingVectorStore(innerStore.Object, logger); + + // Act + var result = decorator.GetCollection("test"); + + // Assert + Assert.IsType>(result); + innerStore.Verify(s => s.GetCollection("test", null), Times.Once()); + } + + [Fact] + public async Task ListCollectionNamesDelegatesToInnerStoreAsync() + { + // Arrange + var innerStore = new Mock(); + var logger = new Mock().Object; + string[] names = ["col1", "col2"]; + innerStore.Setup(s => s.ListCollectionNamesAsync(default)) + .Returns(names.ToAsyncEnumerable()); + var decorator = new LoggingVectorStore(innerStore.Object, logger); + + // Act + var result = await decorator.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Equal(names, result); + innerStore.Verify(s => s.ListCollectionNamesAsync(default), Times.Once()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs new file mode 100644 index 000000000000..cd6aa33e6af1 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorStoreBuilder() + { + // Arrange + var store = new Mock().Object; + + // Act + var builder = store.AsBuilder(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..4658cbe2ea6a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorStoreWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + + // Act + var builder = services.AddVectorStore(store); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetService(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorStoreWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + IVectorStore Factory(IServiceProvider _) => store; + + // Act + var builder = services.AddVectorStore(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetService(); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorStoreWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStore(key, store); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetKeyedService(key); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStore) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorStoreWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var store = new Mock().Object; + IVectorStore Factory(IServiceProvider _) => store; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStore(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedStore = provider.GetKeyedService(key); + + // Assert + Assert.IsType(builder); + Assert.Same(store, resolvedStore); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStore) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs new file mode 100644 index 000000000000..88da238bef5d --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerStore() + { + // Arrange + var innerStore = new Mock().Object; + + // Act + var builder = new VectorStoreBuilder(innerStore); + + // Assert + var builtStore = builder.Build(); + Assert.Same(innerStore, builtStore); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerStore = new Mock().Object; + var serviceProvider = new Mock(); + IVectorStore Factory(IServiceProvider _) => innerStore; + + // Act + var builder = new VectorStoreBuilder(Factory); + var builtStore = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerStore, builtStore); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerStore = new Mock().Object; + var mockStore1 = new Mock().Object; + var mockStore2 = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + builder.Use(s => mockStore1); + builder.Use(s => mockStore2); + + // Act + var builtStore = builder.Build(); + + // Assert + Assert.Same(mockStore1, builtStore); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + builder.Use((s, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerStore = new Mock().Object; + var builder = new VectorStoreBuilder(innerStore); + + // Act + var builtStore = builder.Build(null); + + // Assert + Assert.Same(innerStore, builtStore); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs new file mode 100644 index 000000000000..52fb53ba849f --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderExtensionsTests +{ + [Fact] + public void AsBuilderReturnsVectorStoreRecordCollectionBuilder() + { + // Arrange + var collection = new Mock>().Object; + + // Act + var builder = collection.AsBuilder(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, builder.Build()); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..973f2ab3baf2 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests +{ + [Fact] + public void AddVectorStoreRecordCollectionWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + + // Act + var builder = services.AddVectorStoreRecordCollection(collection); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddVectorStoreRecordCollectionWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; + + // Act + var builder = services.AddVectorStoreRecordCollection(Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetService>(); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Scoped); + } + + [Fact] + public void AddKeyedVectorStoreRecordCollectionWithInstanceReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStoreRecordCollection(key, collection); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStoreRecordCollection) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Singleton); + } + + [Fact] + public void AddKeyedVectorStoreRecordCollectionWithFactoryReturnsBuilder() + { + // Arrange + var services = new ServiceCollection(); + var collection = new Mock>().Object; + IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; + var key = "testKey"; + + // Act + var builder = services.AddKeyedVectorStoreRecordCollection(key, Factory, ServiceLifetime.Scoped); + var provider = services.BuildServiceProvider(); + var resolvedCollection = provider.GetKeyedService>(key); + + // Assert + Assert.IsType>(builder); + Assert.Same(collection, resolvedCollection); + Assert.Single(services, + d => d.ServiceType == typeof(IVectorStoreRecordCollection) && + d.ServiceKey is not null && + d.ServiceKey.Equals(key) && + d.Lifetime == ServiceLifetime.Scoped); + } +} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs new file mode 100644 index 000000000000..8246b0fa88b0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData; +using Moq; +using Xunit; + +namespace VectorData.UnitTests; + +public class VectorStoreRecordCollectionBuilderTests +{ + [Fact] + public void ConstructorWithInstanceSetsInnerCollection() + { + // Arrange + var innerCollection = new Mock>().Object; + + // Act + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Assert + var builtCollection = builder.Build(); + Assert.Same(innerCollection, builtCollection); + } + + [Fact] + public void ConstructorWithFactoryCallsFactoryOnBuild() + { + // Arrange + var innerCollection = new Mock>().Object; + var serviceProvider = new Mock(); + IVectorStoreRecordCollection Factory(IServiceProvider _) => innerCollection; + + // Act + var builder = new VectorStoreRecordCollectionBuilder(Factory); + var builtCollection = builder.Build(serviceProvider.Object); + + // Assert + Assert.Same(innerCollection, builtCollection); + } + + [Fact] + public void BuildWithMultipleFactoriesAppliesInReverseOrder() + { + // Arrange + var innerCollection = new Mock>().Object; + var mockCollection1 = new Mock>().Object; + var mockCollection2 = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + builder.Use(c => mockCollection1); + builder.Use(c => mockCollection2); + + // Act + var builtCollection = builder.Build(); + + // Assert + Assert.Same(mockCollection1, builtCollection); + } + + [Fact] + public void BuildWithNullReturningFactoryThrowsInvalidOperationException() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + builder.Use((c, _) => null!); + + // Act & Assert + var exception = Assert.Throws(() => builder.Build()); + Assert.Contains("returned null", exception.Message); + } + + [Fact] + public void BuildWithNullServiceProviderUsesEmptyServiceProvider() + { + // Arrange + var innerCollection = new Mock>().Object; + var builder = new VectorStoreRecordCollectionBuilder(innerCollection); + + // Act + var builtCollection = builder.Build(null); + + // Assert + Assert.Same(innerCollection, builtCollection); + } +} diff --git a/dotnet/src/Connectors/VectorData/PACKAGE.md b/dotnet/src/Connectors/VectorData/PACKAGE.md new file mode 100644 index 000000000000..6c6c756412d9 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/PACKAGE.md @@ -0,0 +1,40 @@ +## About + +Contains utilities for accessing Vector Databases and Vector Indexes. + +## Key Features + +- Telemetry for any Vector Database implementation. Vector Database implementations are provided separately in other packages, for example `Microsoft.SemanticKernel.Connectors.AzureAISearch`. + +## How to Use + +This package is typically used with an implementation of the vector database abstractions such as `Microsoft.SemanticKernel.Connectors.AzureAISearch`. + +## Additional Documentation + +- [Conceptual documentation](https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors) + +## Related Packages + +Vector Database abstractions: + +- `Microsoft.Extensions.VectorData.Abstractions` + +Vector Database implementations: + +- `Microsoft.SemanticKernel.Connectors.AzureAISearch` +- `Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB` +- `Microsoft.SemanticKernel.Connectors.AzureCosmosNoSQL` +- `Microsoft.SemanticKernel.Connectors.InMemory` +- `Microsoft.SemanticKernel.Connectors.MongoDB` +- `Microsoft.SemanticKernel.Connectors.Pinecone` +- `Microsoft.SemanticKernel.Connectors.Postgres` +- `Microsoft.SemanticKernel.Connectors.Qdrant` +- `Microsoft.SemanticKernel.Connectors.Redis` +- `Microsoft.SemanticKernel.Connectors.Sqlite` +- `Microsoft.SemanticKernel.Connectors.SqlServer` +- `Microsoft.SemanticKernel.Connectors.Weaviate` + +## Feedback & Contributing + +Microsoft.Extensions.VectorData is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). diff --git a/dotnet/src/Connectors/VectorData/VectorData.csproj b/dotnet/src/Connectors/VectorData/VectorData.csproj new file mode 100644 index 000000000000..31e84c2533ce --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorData.csproj @@ -0,0 +1,62 @@ + + + + Microsoft.Extensions.VectorData + Microsoft.Extensions.VectorData + net8.0;netstandard2.0;net462 + + + + + + + 9.0.0-preview.1.25078.1 + 9.0.0.0 + + 9.0.0-preview.1.24518.1 + Microsoft.Extensions.VectorData + $(AssemblyName) + + Utilities for vector database access. + + neticon.png + neticon.png + PACKAGE.md + + Vector, Database, SDK + $(PackageDescription) + https://dot.net/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs new file mode 100644 index 000000000000..8a0295a65e98 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class KeywordHybridSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IKeywordHybridSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public KeywordHybridSearchBuilder(IKeywordHybridSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public KeywordHybridSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IKeywordHybridSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(KeywordHybridSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IKeywordHybridSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate keyword hybrid search to the pipeline. + /// The search factory function. + /// The updated instance. + public KeywordHybridSearchBuilder Use(Func, IKeywordHybridSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate keyword hybrid search to the pipeline. + /// The search factory function. + /// The updated instance. + public KeywordHybridSearchBuilder Use(Func, IServiceProvider, IKeywordHybridSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs new file mode 100644 index 000000000000..8c8b31ec6762 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class KeywordHybridSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static KeywordHybridSearchBuilder AsBuilder(this IKeywordHybridSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new KeywordHybridSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..6bb2209b0812 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class KeywordHybridSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static KeywordHybridSearchBuilder AddKeywordHybridSearch( + this IServiceCollection serviceCollection, + IKeywordHybridSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeywordHybridSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static KeywordHybridSearchBuilder AddKeywordHybridSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new KeywordHybridSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IKeywordHybridSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedKeywordHybridSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new KeywordHybridSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs new file mode 100644 index 000000000000..c05ffde310cc --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A keyword hybrid search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingKeywordHybridSearch : IKeywordHybridSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IKeywordHybridSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingKeywordHybridSearch(IKeywordHybridSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(HybridSearchAsync), + () => this._innerSearch.HybridSearchAsync(vector, keywords, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs new file mode 100644 index 000000000000..1614b6f6a57a --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingKeywordHybridSearchBuilderExtensions +{ + /// Adds logging to the keyword hybrid search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static KeywordHybridSearchBuilder UseLogging( + this KeywordHybridSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingKeywordHybridSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingKeywordHybridSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs new file mode 100644 index 000000000000..0dc81080e496 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vectorizable text search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorizableTextSearch : IVectorizableTextSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorizableTextSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance used for all logging. + public LoggingVectorizableTextSearch(IVectorizableTextSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizableTextSearchAsync), + () => this._innerSearch.VectorizableTextSearchAsync(searchText, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs new file mode 100644 index 000000000000..0f2a1b704474 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorizableTextSearchBuilderExtensions +{ + /// Adds logging to the vectorizable text search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorizableTextSearchBuilder UseLogging( + this VectorizableTextSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingVectorizableTextSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizableTextSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs new file mode 100644 index 000000000000..f0198534d421 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vectorized search that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorizedSearch : IVectorizedSearch +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorizedSearch _innerSearch; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance used for all logging. + public LoggingVectorizedSearch(IVectorizedSearch innerSearch, ILogger logger) + { + Verify.NotNull(innerSearch); + Verify.NotNull(logger); + + this._innerSearch = innerSearch; + this._logger = logger; + } + + /// + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizedSearchAsync), + () => this._innerSearch.VectorizedSearchAsync(vector, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs new file mode 100644 index 000000000000..47fa7c61ebe4 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorizedSearchBuilderExtensions +{ + /// Adds logging to the vectorized search pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorizedSearchBuilder UseLogging( + this VectorizedSearchBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerSearch, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerSearch; + } + + return new LoggingVectorizedSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizedSearch))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs new file mode 100644 index 000000000000..a2681267d216 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorizableTextSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IVectorizableTextSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorizableTextSearchBuilder(IVectorizableTextSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorizableTextSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorizableTextSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorizableTextSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizableTextSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate vectorizable text search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizableTextSearchBuilder Use(Func, IVectorizableTextSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate vectorizable text search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizableTextSearchBuilder Use(Func, IServiceProvider, IVectorizableTextSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs new file mode 100644 index 000000000000..7f44251ddc38 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorizableTextSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static VectorizableTextSearchBuilder AsBuilder(this IVectorizableTextSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new VectorizableTextSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..423919c561b0 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorizableTextSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizableTextSearchBuilder AddVectorizableTextSearch( + this IServiceCollection serviceCollection, + IVectorizableTextSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddVectorizableTextSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizableTextSearchBuilder AddVectorizableTextSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizableTextSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorizableTextSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedVectorizableTextSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizableTextSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs new file mode 100644 index 000000000000..fe02e2535482 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorizedSearchBuilder +{ + private readonly Func> _innerSearchFactory; + + /// The registered search factory instances. + private List, IServiceProvider, IVectorizedSearch>>? _searchFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorizedSearchBuilder(IVectorizedSearch innerSearch) + { + Verify.NotNull(innerSearch); + + this._innerSearchFactory = _ => innerSearch; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorizedSearchBuilder(Func> innerSearchFactory) + { + Verify.NotNull(innerSearchFactory); + + this._innerSearchFactory = innerSearchFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorizedSearch Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var search = this._innerSearchFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._searchFactories is not null) + { + for (var i = this._searchFactories.Count - 1; i >= 0; i--) + { + search = this._searchFactories[i](search, services); + if (search is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorizedSearchBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizedSearch)} instances."); + } + } + } + + return search; + } + + /// Adds a factory for an intermediate vectorized search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizedSearchBuilder Use(Func, IVectorizedSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + return this.Use((innerSearch, _) => searchFactory(innerSearch)); + } + + /// Adds a factory for an intermediate vectorized search to the pipeline. + /// The search factory function. + /// The updated instance. + public VectorizedSearchBuilder Use(Func, IServiceProvider, IVectorizedSearch> searchFactory) + { + Verify.NotNull(searchFactory); + + (this._searchFactories ??= []).Add(searchFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs new file mode 100644 index 000000000000..a7721578716f --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorizedSearchBuilderExtensions +{ + /// Creates a new using as its inner search. + /// The search to use as the inner search. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner search. + /// + public static VectorizedSearchBuilder AsBuilder(this IVectorizedSearch innerSearch) + { + Verify.NotNull(innerSearch); + + return new VectorizedSearchBuilder(innerSearch); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..75f4bc7b1355 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorizedSearchBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the search should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizedSearchBuilder AddVectorizedSearch( + this IServiceCollection serviceCollection, + IVectorizedSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddVectorizedSearch(serviceCollection, _ => innerSearch, lifetime); + } + + /// Registers a singleton in the . + /// The to which the search should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a singleton service. + public static VectorizedSearchBuilder AddVectorizedSearch( + this IServiceCollection serviceCollection, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizedSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// The inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizedSearchBuilder AddKeyedVectorizedSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorizedSearch innerSearch, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearch); + + return AddKeyedVectorizedSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the search should be added. + /// The key with which to associate the search. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the search. Defaults to . + /// A that can be used to build a pipeline around the inner search. + /// The search is registered as a scoped service. + public static VectorizedSearchBuilder AddKeyedVectorizedSearch( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerSearchFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerSearchFactory); + + var builder = new VectorizedSearchBuilder(innerSearchFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs new file mode 100644 index 000000000000..ea2bca4ca106 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vector store that logs operations to an +/// +[Experimental("SKEXP0020")] +public class LoggingVectorStore : IVectorStore +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorStore _innerStore; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingVectorStore(IVectorStore innerStore, ILogger logger) + { + Verify.NotNull(innerStore); + Verify.NotNull(logger); + + this._innerStore = innerStore; + this._logger = logger; + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + => new LoggingVectorStoreRecordCollection( + this._innerStore.GetCollection(name, vectorStoreRecordDefinition), + this._logger); + + /// + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(ListCollectionNamesAsync), + () => this._innerStore.ListCollectionNamesAsync(cancellationToken), + cancellationToken); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs new file mode 100644 index 000000000000..9e8b7636a16c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorStoreBuilderExtensions +{ + /// Adds logging to the vector store pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorStoreBuilder UseLogging( + this VectorStoreBuilder builder, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(builder); + + return builder.Use((innerStore, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerStore; + } + + return new LoggingVectorStore(innerStore, loggerFactory.CreateLogger(typeof(LoggingVectorStore))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..3d6919280861 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.Extensions.VectorData; + +/// +/// A vector store record collection that logs operations to an +/// +[Experimental("SKEXP0020")] +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public class LoggingVectorStoreRecordCollection : IVectorStoreRecordCollection where TKey : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +{ + /// An instance used for all logging. + private readonly ILogger _logger; + + /// The underlying . + private readonly IVectorStoreRecordCollection _innerCollection; + + /// + /// Initializes a new instance of the class. + /// + /// The underlying . + /// An instance that will be used for all logging. + public LoggingVectorStoreRecordCollection(IVectorStoreRecordCollection innerCollection, ILogger logger) + { + Verify.NotNull(innerCollection); + Verify.NotNull(logger); + + this._innerCollection = innerCollection; + this._logger = logger; + } + + /// + public string CollectionName => this._innerCollection.CollectionName; + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CollectionExistsAsync), + () => this._innerCollection.CollectionExistsAsync(cancellationToken)); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CreateCollectionAsync), + () => this._innerCollection.CreateCollectionAsync(cancellationToken)); + } + + /// + public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(CreateCollectionIfNotExistsAsync), + () => this._innerCollection.CreateCollectionIfNotExistsAsync(cancellationToken)); + } + + /// + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteAsync), + () => this._innerCollection.DeleteAsync(key, cancellationToken)); + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteBatchAsync), + () => this._innerCollection.DeleteBatchAsync(keys, cancellationToken)); + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(DeleteCollectionAsync), + () => this._innerCollection.DeleteCollectionAsync(cancellationToken)); + } + + /// + public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(GetAsync), + () => this._innerCollection.GetAsync(key, options, cancellationToken)); + } + + /// + public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(GetBatchAsync), + () => this._innerCollection.GetBatchAsync(keys, options, cancellationToken), + cancellationToken); + } + + /// + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(UpsertAsync), + () => this._innerCollection.UpsertAsync(record, cancellationToken)); + } + + /// + public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(UpsertBatchAsync), + () => this._innerCollection.UpsertBatchAsync(records, cancellationToken), + cancellationToken); + } + + /// + public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + { + return LoggingExtensions.RunWithLoggingAsync( + this._logger, + nameof(VectorizedSearchAsync), + () => this._innerCollection.VectorizedSearchAsync(vector, options, cancellationToken)); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs new file mode 100644 index 000000000000..33d7d3760d32 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extensions for configuring instances. +[Experimental("SKEXP0020")] +public static class LoggingVectorStoreRecordCollectionBuilderExtensions +{ + /// Adds logging to the vector store record collection pipeline. + /// The . + /// + /// An optional used to create a logger with which logging should be performed. + /// If not supplied, a required instance will be resolved from the service provider. + /// If resolved is , it will be skipped and the inner service will be used instead. + /// + /// The . + public static VectorStoreRecordCollectionBuilder UseLogging( + this VectorStoreRecordCollectionBuilder builder, + ILoggerFactory? loggerFactory = null) where TKey : notnull + { + Verify.NotNull(builder); + + return builder.Use((innerCollection, services) => + { + loggerFactory ??= services.GetRequiredService(); + + if (loggerFactory == NullLoggerFactory.Instance) + { + return innerCollection; + } + + return new LoggingVectorStoreRecordCollection(innerCollection, loggerFactory.CreateLogger(typeof(LoggingVectorStoreRecordCollection))); + }); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs new file mode 100644 index 000000000000..71d98fcb276c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorStoreBuilder +{ + private readonly Func _innerStoreFactory; + + /// The registered store factory instances. + private List>? _storeFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorStoreBuilder(IVectorStore innerStore) + { + Verify.NotNull(innerStore); + + this._innerStoreFactory = _ => innerStore; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorStoreBuilder(Func innerStoreFactory) + { + Verify.NotNull(innerStoreFactory); + + this._innerStoreFactory = innerStoreFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorStore Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var vectorStore = this._innerStoreFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._storeFactories is not null) + { + for (var i = this._storeFactories.Count - 1; i >= 0; i--) + { + vectorStore = this._storeFactories[i](vectorStore, services); + if (vectorStore is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorStoreBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStore)} instances."); + } + } + } + + return vectorStore; + } + + /// Adds a factory for an intermediate vector store to the vector store pipeline. + /// The store factory function. + /// The updated instance. + public VectorStoreBuilder Use(Func storeFactory) + { + Verify.NotNull(storeFactory); + + return this.Use((innerStore, _) => storeFactory(innerStore)); + } + + /// Adds a factory for an intermediate vector store to the vector store pipeline. + /// The store factory function. + /// The updated instance. + public VectorStoreBuilder Use(Func storeFactory) + { + Verify.NotNull(storeFactory); + + (this._storeFactories ??= []).Add(storeFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs new file mode 100644 index 000000000000..f5666cd2d6ac --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorStoreBuilderExtensions +{ + /// Creates a new using as its inner store. + /// The store to use as the inner store. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner store. + /// + public static VectorStoreBuilder AsBuilder(this IVectorStore innerStore) + { + Verify.NotNull(innerStore); + + return new VectorStoreBuilder(innerStore); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..a76d3256585b --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorStoreBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the store should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a singleton service. + public static VectorStoreBuilder AddVectorStore( + this IServiceCollection serviceCollection, + IVectorStore innerStore, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStore); + + return AddVectorStore(serviceCollection, _ => innerStore, lifetime); + } + + /// Registers a singleton in the . + /// The to which the store should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a singleton service. + public static VectorStoreBuilder AddVectorStore( + this IServiceCollection serviceCollection, + Func innerStoreFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStoreFactory); + + var builder = new VectorStoreBuilder(innerStoreFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the store should be added. + /// The key with which to associate the store. + /// The inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a scoped service. + public static VectorStoreBuilder AddKeyedVectorStore( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorStore innerStore, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStore); + + return AddKeyedVectorStore(serviceCollection, serviceKey, _ => innerStore, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the store should be added. + /// The key with which to associate the store. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the store. Defaults to . + /// A that can be used to build a pipeline around the inner store. + /// The store is registered as a scoped service. + public static VectorStoreBuilder AddKeyedVectorStore( + this IServiceCollection serviceCollection, + object? serviceKey, + Func innerStoreFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerStoreFactory); + + var builder = new VectorStoreBuilder(innerStoreFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs new file mode 100644 index 000000000000..2ae1048ff83e --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// A builder for creating pipelines of . +[Experimental("SKEXP0020")] +public sealed class VectorStoreRecordCollectionBuilder where TKey : notnull +{ + private readonly Func> _innerCollectionFactory; + + /// The registered collection factory instances. + private List, IServiceProvider, IVectorStoreRecordCollection>>? _collectionFactories; + + /// Initializes a new instance of the class. + /// The inner that represents the underlying backend. + public VectorStoreRecordCollectionBuilder(IVectorStoreRecordCollection innerCollection) + { + Verify.NotNull(innerCollection); + + this._innerCollectionFactory = _ => innerCollection; + } + + /// Initializes a new instance of the class. + /// A callback that produces the inner that represents the underlying backend. + public VectorStoreRecordCollectionBuilder(Func> innerCollectionFactory) + { + Verify.NotNull(innerCollectionFactory); + + this._innerCollectionFactory = innerCollectionFactory; + } + + /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. + /// + /// The that should provide services to the instances. + /// If null, an empty will be used. + /// + /// An instance of that represents the entire pipeline. + public IVectorStoreRecordCollection Build(IServiceProvider? services = null) + { + services ??= EmptyKeyedServiceProvider.Instance; + var collection = this._innerCollectionFactory(services); + + // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. + if (this._collectionFactories is not null) + { + for (var i = this._collectionFactories.Count - 1; i >= 0; i--) + { + collection = this._collectionFactories[i](collection, services); + if (collection is null) + { + throw new InvalidOperationException( + $"The {nameof(VectorStoreRecordCollectionBuilder)} entry at index {i} returned null. " + + $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStoreRecordCollection)} instances."); + } + } + } + + return collection; + } + + /// Adds a factory for an intermediate vector store record collection to the pipeline. + /// The collection factory function. + /// The updated instance. + public VectorStoreRecordCollectionBuilder Use(Func, IVectorStoreRecordCollection> collectionFactory) + { + Verify.NotNull(collectionFactory); + + return this.Use((innerCollection, _) => collectionFactory(innerCollection)); + } + + /// Adds a factory for an intermediate vector store record collection to the pipeline. + /// The collection factory function. + /// The updated instance. + public VectorStoreRecordCollectionBuilder Use(Func, IServiceProvider, IVectorStoreRecordCollection> collectionFactory) + { + Verify.NotNull(collectionFactory); + + (this._collectionFactories ??= []).Add(collectionFactory); + return this; + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs new file mode 100644 index 000000000000..1e950685253c --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for working with in the context of . +[Experimental("SKEXP0020")] +public static class VectorStoreRecordCollectionBuilderExtensions +{ + /// Creates a new using as its inner collection. + /// The collection to use as the inner collection. + /// The new instance. + /// + /// This method is equivalent to using the constructor directly, + /// specifying as the inner collection. + /// + public static VectorStoreRecordCollectionBuilder AsBuilder(this IVectorStoreRecordCollection innerCollection) where TKey : notnull + { + Verify.NotNull(innerCollection); + + return new VectorStoreRecordCollectionBuilder(innerCollection); + } +} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs new file mode 100644 index 000000000000..2b0af209ebd6 --- /dev/null +++ b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.VectorData; + +/// Provides extension methods for registering with a . +[Experimental("SKEXP0020")] +public static class VectorStoreRecordCollectionBuilderServiceCollectionExtensions +{ + /// Registers a singleton in the . + /// The to which the collection should be added. + /// The inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a singleton service. + public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + IVectorStoreRecordCollection innerCollection, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollection); + + return AddVectorStoreRecordCollection(serviceCollection, _ => innerCollection, lifetime); + } + + /// Registers a singleton in the . + /// The to which the collection should be added. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a singleton service. + public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + Func> innerCollectionFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollectionFactory); + + var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), builder.Build, lifetime)); + return builder; + } + + /// Registers a keyed singleton in the . + /// The to which the collection should be added. + /// The key with which to associate the collection. + /// The inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a scoped service. + public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + object? serviceKey, + IVectorStoreRecordCollection innerCollection, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollection); + + return AddKeyedVectorStoreRecordCollection(serviceCollection, serviceKey, _ => innerCollection, lifetime); + } + + /// Registers a keyed singleton in the . + /// The to which the collection should be added. + /// The key with which to associate the collection. + /// A callback that produces the inner that represents the underlying backend. + /// The service lifetime for the collection. Defaults to . + /// A that can be used to build a pipeline around the inner collection. + /// The collection is registered as a scoped service. + public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( + this IServiceCollection serviceCollection, + object? serviceKey, + Func> innerCollectionFactory, + ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull + { + Verify.NotNull(serviceCollection); + Verify.NotNull(innerCollectionFactory); + + var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); + serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); + return builder; + } +} diff --git a/dotnet/src/Connectors/VectorData/neticon.png b/dotnet/src/Connectors/VectorData/neticon.png new file mode 100644 index 0000000000000000000000000000000000000000..a0f1fdbf4d5eae0e561018cccee74f6a454cdb9c GIT binary patch literal 7006 zcmeHMXH-+`n%)#eMU;C)kZw7O2nvFLpcE@A^-u+AN(mh$UH*JD5Jjm{4}uUR zs5C(zdURn*zrcHqdVxK)P)7322TAMVbNR4HRzo3_~zdgjvf?Ot98@H{LHdy zK*)TM=g&B9f}+9IKfm=aF5e3_{PQJ$ zY4?9DHvtd+Y14o8TQs=)&+P)Wjb3|LIT@*NDqyYm#gu^q*EFSow<%yKVx`_Ka)!0 z2YAaQr%LYyQ%n$Rjx)e%JeM5_ov70FUMveJTS(J+%C4(L)~h*MQ8!wJtf_X{`Ol?k z;{27%#**2uiR&R6-eaRK1Mdgl2xHQ=uS(~VqsTVrsUnQhc zRIK5>@(05w3gHYdsI0;;sOO66pUEl)DGyD(D4>$7drUDFZ|uxx;-nWj7d|rj=u+D@ z-HU+mLOInrsXdSL1Z6nVB&D z@>f4!yq=_B+16+qw5k=4o#*tf;6Oe*F;`&L!)bT{U7Wc3YmG2;NRxb%woCt~*Yr2E zfwiUdS=7SK&5>df-aqY8lp~SEUG*ziXGvHMLp_#vgvVMQ*&{+d@(a>v4;7p_%Jte0Ga5zNbUI28WAgY5f?FX^;q`1WTw2~t|P54N&e^@=nFqDj}W#o z_-kZBWDQ%($YJH43Y7YrbjfsUrAEjla>?j0;YLdXxjK}P@xDGc%r&c)6`t?XW=*{r z%Z^p)?6*7obKU_;NZK_ejh9n&?qzO0#(}Uo+KSm|e}q1+f$wM!G8>lLvKK1UK^uz5 zDk&5(DuUnzQy{aQ8%b~*_4Ri`TOj}Dd{0OCls}^VD8=qDC%Q9tSSt5LZoxd!|ai3oGtf&cOy(`^W9zMNR;bII|OS+Pe(-9=f!m6}w zV>f(mH^BYE-=Wl=)Q2s2TF*j&tRkN0KOu3-(VN?4?-v|?W^Xj)@u4^bNB%bN+f|D= z?r1ey$UbahYv!qISaxV8>+1Mnz!M&S1o+~titx|65MA`iQMjscL!+LOGjZ?p>}x6d z4`FiZV9i-E6F8c|Fq37-TTTtJOdIZ9<*YrJU86UuQr6dipNC%AxT?lXa9U=`iq+2= zOT!CFUlJM1&INj~InR!=@x@{Z8BnvgL~_>nN)y@!r<0$uGCJ<0B-q!vZn@~#5^Ig8B}}g&dYBee=x50Wv$R^^f%aTE~g_a7&8Y(5L>! zkYgCl@1ZVqFSwkH(ns-EtYbOFLrarf#r6W9#x8rO<<_6h33faYV{<&_gBahO#ga9j z$|}=ea)vEm|Hb`E%L9Gn#Osxg( z&sxXz7lsse+_i@<_LUl@8$916h*m6!R?~zr_ZQU^H3F(aC1is#I$VP$GO(s!pT&Y# z85JYcwQqu6Ja6sje&x*)nOdx;bt1hNMTSwSikFeKE)+MRrW?mg=8mp^AR_kz{C%e* z32H_>c600^d$9)ob+$yzpyxHa+k0Sz7GG41I0A59bKJf?X}E6mX$pU~Wc%_?$2w1s zZEbk$svZ4U+WH;XPEb^-IqhGQX1U|z8KWp8&jVlWFPP+7Um6;oMy?>TFU`cMT5bYx z;7_~MfZ(sumPQHg++U)9PT=+=zxu+qmP==xJ&oI%XgD8=YZo%*rGq2U_J^D4d%7H`}jau-;<_^n?THcf9*rKD^J#%p%l zA8DILPr+wPY^MpxQbxGXG2f0xcjxSw;wjl53EsXe0poYHgfc(T;v5J;H$neUhElxe zrX0NdQ4e#4L4e-JmsN$%C+#BKX8TYA1YlhN`|QyqnlH{Igil*i0?NrD9qi2Fw_&~eMSk3UGyWzcay4oPaWE~nJ{R}-u+%oE z^4pk7G%~M66x6$a(@21!KD)Us1JG?!Xn4Zb;NYOn2SGc%JK!@mQv*PGMGxMb{#a4F z_#t!~GhhJR9)$w;fi20azFx86@7j4yB zpC7-bK<170rK@aOPg zDv69Iy;oMY0yq-ORy`~=Y8>ZQ_}+6m=ElBFD(BO@q9)h-K%)s9-^rh(;7T`vu={0p zCzf*G!~Iex?wWwWS?rOOYx{i!_Lh~OXJ7gYPR(bWfke`)l(GCjjtT06t7+0hHGHhh zA9y}JSM5#_xw|dqtlV?PVqZwGRm*pM)dvDj|LAzkF?4x}RLkCA#>G3V21ZLIt^gG< zQI&0O8}Rf;Def0;ZbweV+|x(R-?(Vnj5F9~eOT)4!nDr7Yq-5!y1bz1t;HjQSLn-A zt1qf%FzvKZ`+#!ufUYj;;FE!eL$>Pcse)qp0BW@>*U{2zo_CWHpgvHpnGofD&KYKY z+!}avbdRD^hZQf zU#$@f{W=^JvL7g)bcEZ<)O9tw4?Dxp&lksZ;$I_{?{l;o=>&}=tF-5MU&27^*rhJT zcd0DiLPxBSPJ<5cx}JGQAds^*(&j4-nHoTwx>dVUGJHkMM7w*nPbN5n_W)JJ zoSF~F)URWm1xS-QkhpAB(#}xq`0?;AQ=#^xj8iv{-*?l`8a;)kpuatAQXeVT+=;#A zT0rvGu`_`{>KMvxzgLkb$EeCy`RyvAx+nC!D381cssru;3nBjt{S>AGvQAs(kxLO{ zIp*xXImIAQJ>kiL&b~R(P_(nAu2z<~Dc*-_c3=C`sjCz@AZVOwgE5s@G#uy{iQNJ} z*pY1bjnx4K{yik#93ftw2}MI#Dt>w>)q5vp~-G zX7!=BUrYpB-3#04(mvmC$-Y!WY8${8gcraWB}q}i z(|PAS*SoXp)9`8tTYTuy7`=#uWFoR#J2(AVcxr-9uF+7kB$GxNkA$Vfoz}l40*Ydo zXReR;i`X4$Te~{&2?RE~^39WlS?>E>my@CS3|paiTe-zGjS$iwI*YbAHOwW*PD@wI z=Nl-L-*Y(4b+hX{-tb98arKb!Q^EK+RA0Lfp4`cv&x7o<`~ghNZ#@Z$`B6O*2R6%R z+kg>9tGG(TtYgVXWD_X)ySeq_3Tq2*GEPMlF@o;BBxfbxC%!xOuwUa+?wXac%Dce> z+d&$P_VsrSw*$bMY#z8~U%K$AIc8vOosw2D4`XdBe5NKVuc+s10x-cw)v;&2Yd`@# z6UL-Y1G;FY$G$?{@cwL6zaRL5p_lTzugeI5PB@eSk^x^LJ=N!qHsScr*=1fnx>1;L zY5eqB8dlecz6GSs<7{=#sl?FWEY66Ejk>f}1odw~P?}i0yH&4d%vKKZ@hTi7-IW8%;{(vI`&L;i z@`wN4O!SHFV&u%JzXt*g%E%4J$^z@6FOtA7Yc(*Rz2%_90Exxp+}r^Vb|pF?C;F8w zu&f+_Jsvg^Wp?I6!+uV$Bi#fzohClm^T{PdQzz%Nn}GENT0zaz{xqo+NWJ!QdLYKf zBHdX|LMnBh5jXZ;>OoAWv*rOX&O8Sbzjyl*y-%<2V2oE_*lEG(1GlpzBZ6aoOp%y8 ze&=uJp63A7*h}C9j-sY70bc4bHQr`@q#!@&!5LxUu`)c;-&WVK?$9+vP%D`7v^_`5 zrOcY7w(+sWUl!hkCI>q|qg_*OZ$os^0Fsg`di5ki_Tzr$8gh}#WNKHtX|hlAupfW6 zk_ZWVB&Hjb9ZbLk!Ie1lMyGd?qhgq8>{#iC>Kg^*taLx^YuW+VQG;}IK{6+Y@0i7& z6iRAQBlI8*LwK}P>x0;cL*en^{8^OvUg%KTXIa~~>xA%u_2)y{h_+YQ?tpDgX9rIe zOo3t5%oVK)PzXFaqN#F2^qJbgB3HzT`{nJcFO`#ATLWNBXfYU5CYHs&PnH^f*Wl6k z?<0KM*e@M?auAvtBi}A#6V#ej{yvSOE8v?4^Jb8y4~i{ zSIC{Kc9#!&HhKqJI9L>s*NbwiwWXI+w-X6TM}&3$PlPOE+G8HP8Hi(#UMtyKy= zLo(ZOb7qTQ^r{NHBg^h=C`gbboZigk0*;z5+XW@P;EzUwQZv5|SZ6W0tBbATVDt$& z4th!!{t_tBc>V9qZE^8&@=VbaMh;!ivCF~IC28PzN2Z{@`)H;y3+{?j%eQl6gP|I9 z-agi;Y>P($m>0yG48Z>=AC0W_h5((46THSuk)X||?u=A_N-{J)`M9Q^WnUMh84VTQ zIvQlFtG4Z5X~3!o0K!K+^E@{TZ;5W3XkNzy z*j?DZB4J)s(LK@K0K1T4u&xvPHDTX zs$=NfQalJo9RXF+0@j1~t~aK@*DAWgsI@Sl{8AP8%T`P`Vu~Tv_%ZmbJz^#V>NJZl-TbST^RMK5DlNOs$kegkbICLYRJk-}g{l-Wn^Vya`SL3T1tiIw^Z zm~h)cx+UimpKrqQ=$a*_BCrvMGi%5Nr5qU)hq|P1Tjp!gLgpIqRRIs`qsDGjcel*OH-c~&6W812bsUI z>umkx8_8Ottu&n?L`^t@;63h8!Nb19V4*G1v2?3e;$WrvvX7%#JaxH?R) zN@KLmgq3q$NONDrj=7c`8~kK5VTf>xS$Q2C8@T{(7ygTX1N^6hZ&3*F7Z@!5FaMz+ n@b3Qu^xx$8Uk}h2jH{d|uJ4jrSC|P(2)ca1@;v^m$K8JeR7TPQ literal 0 HcmV?d00001 diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj index ba07a9a9dcad..2d35183b3648 100644 --- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj +++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj @@ -32,7 +32,6 @@ - diff --git a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs index 20f928cb7bcb..2b2b7488db6a 100644 --- a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs @@ -88,7 +88,7 @@ public static KernelPlugin CreatePluginFromGrpcDirectory( { const string ProtoFile = "grpc.proto"; - Verify.ValidPluginName(pluginDirectoryName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginDirectoryName, kernel.Plugins); var pluginDir = Path.Combine(parentDirectory, pluginDirectoryName); Verify.DirectoryExists(pluginDir); @@ -151,7 +151,7 @@ public static KernelPlugin CreatePluginFromGrpc( string pluginName) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); // Parse var parser = new ProtoDocumentParser(); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs index 0bc7dbc73fb0..b472992cd0f1 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs @@ -62,7 +62,7 @@ public static async Task CreatePluginFromApiManifestAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(pluginParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs index fcea1ef3a387..b20b8968c90b 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/CopilotAgentPluginKernelExtensions.cs @@ -62,7 +62,7 @@ public static async Task CreatePluginFromCopilotAgentPluginAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(pluginParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs index 2e7fb3d2214f..93709fc09a77 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs @@ -117,7 +117,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -156,7 +156,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -199,7 +199,7 @@ public static async Task CreatePluginFromOpenApiAsync( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); @@ -233,7 +233,7 @@ public static KernelPlugin CreatePluginFromOpenApi( CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); + KernelVerify.ValidPluginName(pluginName, kernel.Plugins); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs index 0e69ffefcc16..d76998d0976e 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs @@ -36,7 +36,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -73,7 +73,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -114,7 +114,7 @@ public static async Task CreateFromOpenApiAsync( OpenApiFunctionExecutionParameters? executionParameters = null, CancellationToken cancellationToken = default) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -143,7 +143,7 @@ public static KernelPlugin CreateFromOpenApi( RestApiSpecification specification, OpenApiFunctionExecutionParameters? executionParameters = null) { - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); #pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient); @@ -394,7 +394,7 @@ private static string ConvertOperationIdToValidFunctionName(string operationId, { try { - Verify.ValidFunctionName(operationId); + KernelVerify.ValidFunctionName(operationId); return operationId; } catch (ArgumentException) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/CommonPineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/CommonPineconeVectorStoreRecordCollectionTests.cs deleted file mode 100644 index 21964a61e3d0..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/CommonPineconeVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone.Xunit; -using Xunit; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -/// -/// Inherits common integration tests that should pass for any . -/// -/// Pinecone setup and teardown. -[Collection("PineconeVectorStoreTests")] -[PineconeApiKeySetCondition] -public class CommonPineconeVectorStoreRecordCollectionTests(PineconeVectorStoreFixture fixture) : BaseVectorStoreRecordCollectionTests, IClassFixture -{ - protected override string Key1 => "1"; - protected override string Key2 => "2"; - protected override string Key3 => "3"; - protected override string Key4 => "4"; - - protected override int DelayAfterIndexCreateInMilliseconds => 2000; - - protected override int DelayAfterUploadInMilliseconds => 15000; - - [SuppressMessage("Globalization", "CA1308:Normalize strings to uppercase", Justification = "Pinecone collection names should be lower case.")] - protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - { - return new PineconeVectorStoreRecordCollection(fixture.Client, recordCollectionName.ToLowerInvariant(), new() - { - VectorStoreRecordDefinition = vectorStoreRecordDefinition - }); - } - - protected override HashSet GetSupportedDistanceFunctions() - { - return [DistanceFunction.CosineSimilarity, DistanceFunction.DotProductSimilarity, DistanceFunction.EuclideanSquaredDistance]; - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeAllTypes.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeAllTypes.cs deleted file mode 100644 index 7067781987bc..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeAllTypes.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. -public record PineconeAllTypes() -{ - [VectorStoreRecordKey] - public string Id { get; init; } - - [VectorStoreRecordData] - public bool BoolProperty { get; set; } - [VectorStoreRecordData] - public bool? NullableBoolProperty { get; set; } - [VectorStoreRecordData] - public string StringProperty { get; set; } - [VectorStoreRecordData] - public string? NullableStringProperty { get; set; } - [VectorStoreRecordData] - public int IntProperty { get; set; } - [VectorStoreRecordData] - public int? NullableIntProperty { get; set; } - [VectorStoreRecordData] - public long LongProperty { get; set; } - [VectorStoreRecordData] - public long? NullableLongProperty { get; set; } - [VectorStoreRecordData] - public float FloatProperty { get; set; } - [VectorStoreRecordData] - public float? NullableFloatProperty { get; set; } - [VectorStoreRecordData] - public double DoubleProperty { get; set; } - [VectorStoreRecordData] - public double? NullableDoubleProperty { get; set; } - [VectorStoreRecordData] - public decimal DecimalProperty { get; set; } - [VectorStoreRecordData] - public decimal? NullableDecimalProperty { get; set; } - -#pragma warning disable CA1819 // Properties should not return arrays - [VectorStoreRecordData] - public string[] StringArray { get; set; } - [VectorStoreRecordData] - public string[]? NullableStringArray { get; set; } -#pragma warning restore CA1819 // Properties should not return arrays - - [VectorStoreRecordData] - public List StringList { get; set; } - [VectorStoreRecordData] - public List? NullableStringList { get; set; } - - [VectorStoreRecordData] - public IReadOnlyCollection Collection { get; set; } - [VectorStoreRecordData] - public IEnumerable Enumerable { get; set; } - - [VectorStoreRecordVector(Dimensions: 8, DistanceFunction: DistanceFunction.DotProductSimilarity)] - public ReadOnlyMemory? Embedding { get; set; } -} -#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeHotel.cs deleted file mode 100644 index 54185830d5c0..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeHotel.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; -using Microsoft.Extensions.VectorData; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. -public record PineconeHotel() - -{ - [VectorStoreRecordKey] - public string HotelId { get; init; } - - [VectorStoreRecordData] - public string HotelName { get; set; } - - [JsonPropertyName("code_of_the_hotel")] - [VectorStoreRecordData] - public int HotelCode { get; set; } - - [VectorStoreRecordData] - public float HotelRating { get; set; } - - [JsonPropertyName("json_parking")] - [VectorStoreRecordData(StoragePropertyName = "parking_is_included")] - public bool ParkingIncluded { get; set; } - - [VectorStoreRecordData] - public List Tags { get; set; } = []; - - [VectorStoreRecordData] - public string Description { get; set; } - - [VectorStoreRecordVector(Dimensions: 8, DistanceFunction: DistanceFunction.DotProductSimilarity)] - public ReadOnlyMemory DescriptionEmbedding { get; set; } -} -#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeUserSecretsExtensions.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeUserSecretsExtensions.cs deleted file mode 100644 index 1644b7427e99..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeUserSecretsExtensions.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.IO; -using System.Reflection; -using System.Text.Json; -using Microsoft.Extensions.Configuration.UserSecrets; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; -public static class PineconeUserSecretsExtensions -{ - public const string PineconeApiKeyUserSecretEntry = "PineconeApiKey"; - - public static string ReadPineconeApiKey() - => JsonSerializer.Deserialize>( - File.ReadAllText(PathHelper.GetSecretsPathFromSecretsId( - typeof(PineconeUserSecretsExtensions).Assembly.GetCustomAttribute()! - .UserSecretsId)))![PineconeApiKeyUserSecretEntry].Trim(); - - public static bool ContainsPineconeApiKey() - { - var userSecretsIdAttribute = typeof(PineconeUserSecretsExtensions).Assembly.GetCustomAttribute(); - if (userSecretsIdAttribute == null) - { - return false; - } - - var path = PathHelper.GetSecretsPathFromSecretsId(userSecretsIdAttribute.UserSecretsId); - if (!File.Exists(path)) - { - return false; - } - - return JsonSerializer.Deserialize>( - File.ReadAllText(path))!.ContainsKey(PineconeApiKeyUserSecretEntry); - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreFixture.cs deleted file mode 100644 index c0c002f22ba0..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreFixture.cs +++ /dev/null @@ -1,350 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Text.RegularExpressions; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using Pinecone.Grpc; -using Xunit; -using Sdk = Pinecone; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -public class PineconeVectorStoreFixture : IAsyncLifetime -{ - private const int MaxAttemptCount = 100; - private const int DelayInterval = 300; - - public string IndexName { get; } = "sk-index" -#pragma warning disable CA1308 // Normalize strings to uppercase - + new Regex("[^a-zA-Z0-9]", RegexOptions.None, matchTimeout: new TimeSpan(0, 0, 10)).Replace(Environment.MachineName.ToLowerInvariant(), ""); -#pragma warning restore CA1308 // Normalize strings to uppercase - - public Sdk.PineconeClient Client { get; private set; } = null!; - public PineconeVectorStore VectorStore { get; private set; } = null!; - public PineconeVectorStoreRecordCollection HotelRecordCollection { get; set; } = null!; - public PineconeVectorStoreRecordCollection AllTypesRecordCollection { get; set; } = null!; - public PineconeVectorStoreRecordCollection HotelRecordCollectionWithCustomNamespace { get; set; } = null!; - public IVectorStoreRecordCollection HotelRecordCollectionFromVectorStore { get; set; } = null!; - public IVectorStoreRecordCollection> HotelRecordCollectionWithGenericDataModel { get; set; } = null!; - - public virtual Sdk.Index Index { get; set; } = null!; - - public virtual async Task InitializeAsync() - { - this.Client = new Sdk.PineconeClient(PineconeUserSecretsExtensions.ReadPineconeApiKey()); - this.VectorStore = new PineconeVectorStore(this.Client); - - var hotelRecordDefinition = new VectorStoreRecordDefinition - { - Properties = - [ - new VectorStoreRecordKeyProperty(nameof(PineconeHotel.HotelId), typeof(string)), - new VectorStoreRecordDataProperty(nameof(PineconeHotel.HotelName), typeof(string)), - new VectorStoreRecordDataProperty(nameof(PineconeHotel.HotelCode), typeof(int)), - new VectorStoreRecordDataProperty(nameof(PineconeHotel.ParkingIncluded), typeof(bool)) { StoragePropertyName = "parking_is_included" }, - new VectorStoreRecordDataProperty(nameof(PineconeHotel.HotelRating), typeof(float)), - new VectorStoreRecordDataProperty(nameof(PineconeHotel.Tags), typeof(List)), - new VectorStoreRecordDataProperty(nameof(PineconeHotel.Description), typeof(string)), - new VectorStoreRecordVectorProperty(nameof(PineconeHotel.DescriptionEmbedding), typeof(ReadOnlyMemory)) { Dimensions = 8, DistanceFunction = DistanceFunction.DotProductSimilarity } - ] - }; - - var allTypesRecordDefinition = new VectorStoreRecordDefinition - { - Properties = - [ - new VectorStoreRecordKeyProperty(nameof(PineconeAllTypes.Id), typeof(string)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.BoolProperty), typeof(bool)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableBoolProperty), typeof(bool?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringProperty), typeof(string)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringProperty), typeof(string)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.IntProperty), typeof(int)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableIntProperty), typeof(int?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.LongProperty), typeof(long)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableLongProperty), typeof(long?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.FloatProperty), typeof(float)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableFloatProperty), typeof(float?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.DoubleProperty), typeof(double)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableDoubleProperty), typeof(double?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.DecimalProperty), typeof(decimal)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableDecimalProperty), typeof(decimal?)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringArray), typeof(string[])), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringArray), typeof(string[])), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringList), typeof(List)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringList), typeof(List)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.Collection), typeof(IReadOnlyCollection)), - new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.Enumerable), typeof(IEnumerable)), - new VectorStoreRecordVectorProperty(nameof(PineconeAllTypes.Embedding), typeof(ReadOnlyMemory?)) { Dimensions = 8, DistanceFunction = DistanceFunction.DotProductSimilarity } - ] - }; - - this.HotelRecordCollection = new PineconeVectorStoreRecordCollection( - this.Client, - this.IndexName, - new PineconeVectorStoreRecordCollectionOptions - { - VectorStoreRecordDefinition = hotelRecordDefinition - }); - - this.AllTypesRecordCollection = new PineconeVectorStoreRecordCollection( - this.Client, - this.IndexName, - new PineconeVectorStoreRecordCollectionOptions - { - VectorStoreRecordDefinition = allTypesRecordDefinition - }); - - this.HotelRecordCollectionWithCustomNamespace = new PineconeVectorStoreRecordCollection( - this.Client, - this.IndexName, - new PineconeVectorStoreRecordCollectionOptions - { - VectorStoreRecordDefinition = hotelRecordDefinition, - IndexNamespace = "my-namespace" - }); - - this.HotelRecordCollectionFromVectorStore = this.VectorStore.GetCollection( - this.IndexName, - hotelRecordDefinition); - - this.HotelRecordCollectionWithGenericDataModel = this.VectorStore.GetCollection>( - this.IndexName, - hotelRecordDefinition); - - await this.ClearIndexesAsync(); - await this.CreateIndexAndWaitAsync(); - await this.AddSampleDataAsync(); - } - - private async Task CreateIndexAndWaitAsync() - { - var attemptCount = 0; - - await this.HotelRecordCollection.CreateCollectionAsync(); - - do - { - await Task.Delay(DelayInterval); - attemptCount++; - this.Index = await this.Client.GetIndex(this.IndexName); - } while (!this.Index.Status.IsReady && attemptCount <= MaxAttemptCount); - - if (!this.Index.Status.IsReady) - { - throw new InvalidOperationException("'Create index' operation didn't complete in time. Index name: " + this.IndexName); - } - } - - public async Task DisposeAsync() - { - if (this.Client is not null) - { - await this.ClearIndexesAsync(); - this.Client.Dispose(); - } - } - - private async Task AddSampleDataAsync() - { - var fiveSeasons = new PineconeHotel - { - HotelId = "five-seasons", - HotelName = "Five Seasons Hotel", - Description = "Great service any season.", - HotelCode = 7, - HotelRating = 4.5f, - ParkingIncluded = true, - DescriptionEmbedding = new ReadOnlyMemory([7.5f, 71.0f, 71.5f, 72.0f, 72.5f, 73.0f, 73.5f, 74.0f]), - Tags = ["wi-fi", "sauna", "gym", "pool"] - }; - - var vacationInn = new PineconeHotel - { - HotelId = "vacation-inn", - HotelName = "Vacation Inn Hotel", - Description = "On vacation? Stay with us.", - HotelCode = 11, - HotelRating = 4.3f, - ParkingIncluded = true, - DescriptionEmbedding = new ReadOnlyMemory([17.5f, 721.0f, 731.5f, 742.0f, 762.5f, 783.0f, 793.5f, 704.0f]), - Tags = ["wi-fi", "breakfast", "gym"] - }; - - var bestEastern = new PineconeHotel - { - HotelId = "best-eastern", - HotelName = "Best Eastern Hotel", - Description = "Best hotel east of New York.", - HotelCode = 42, - HotelRating = 4.7f, - ParkingIncluded = true, - DescriptionEmbedding = new ReadOnlyMemory([47.5f, 421.0f, 741.5f, 744.0f, 742.5f, 483.0f, 743.5f, 744.0f]), - Tags = ["wi-fi", "breakfast", "gym"] - }; - - var stats = await this.Index.DescribeStats(); - var vectorCountBefore = stats.TotalVectorCount; - - // use both Upsert and BatchUpsert methods and also use record collections created directly and using vector store - await this.HotelRecordCollection.UpsertAsync(fiveSeasons); - vectorCountBefore = await this.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 1); - - await this.HotelRecordCollectionFromVectorStore.UpsertBatchAsync([vacationInn, bestEastern]).ToListAsync(); - vectorCountBefore = await this.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 2); - - var allTypes1 = new PineconeAllTypes - { - Id = "all-types-1", - BoolProperty = true, - NullableBoolProperty = false, - StringProperty = "string prop 1", - NullableStringProperty = "nullable prop 1", - IntProperty = 1, - NullableIntProperty = 10, - LongProperty = 100L, - NullableLongProperty = 1000L, - FloatProperty = 10.5f, - NullableFloatProperty = 100.5f, - DoubleProperty = 23.75d, - NullableDoubleProperty = 233.75d, - DecimalProperty = 50.75m, - NullableDecimalProperty = 500.75m, - StringArray = ["one", "two"], - NullableStringArray = ["five", "six"], - StringList = ["eleven", "twelve"], - NullableStringList = ["fifteen", "sixteen"], - Collection = ["Foo", "Bar"], - Enumerable = ["another", "and another"], - Embedding = new ReadOnlyMemory([1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 7.5f, 8.5f]) - }; - - var allTypes2 = new PineconeAllTypes - { - Id = "all-types-2", - BoolProperty = false, - NullableBoolProperty = null, - StringProperty = "string prop 2", - NullableStringProperty = null, - IntProperty = 2, - NullableIntProperty = null, - LongProperty = 200L, - NullableLongProperty = null, - FloatProperty = 20.5f, - NullableFloatProperty = null, - DoubleProperty = 43.75, - NullableDoubleProperty = null, - DecimalProperty = 250.75M, - NullableDecimalProperty = null, - StringArray = [], - NullableStringArray = null, - StringList = [], - NullableStringList = null, - Collection = [], - Enumerable = [], - Embedding = new ReadOnlyMemory([10.5f, 20.5f, 30.5f, 40.5f, 50.5f, 60.5f, 70.5f, 80.5f]) - }; - - await this.AllTypesRecordCollection.UpsertBatchAsync([allTypes1, allTypes2]).ToListAsync(); - vectorCountBefore = await this.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 2); - - var custom = new PineconeHotel - { - HotelId = "custom-hotel", - HotelName = "Custom Hotel", - Description = "Everything customizable!", - HotelCode = 17, - HotelRating = 4.25f, - ParkingIncluded = true, - DescriptionEmbedding = new ReadOnlyMemory([147.5f, 1421.0f, 1741.5f, 1744.0f, 1742.5f, 1483.0f, 1743.5f, 1744.0f]), - }; - - await this.HotelRecordCollectionWithCustomNamespace.UpsertAsync(custom); - vectorCountBefore = await this.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 1); - } - - public async Task VerifyVectorCountModifiedAsync(uint vectorCountBefore, int delta) - { - var attemptCount = 0; - Sdk.IndexStats stats; - - do - { - await Task.Delay(DelayInterval); - attemptCount++; - stats = await this.Index.DescribeStats(); - } while (stats.TotalVectorCount != vectorCountBefore + delta && attemptCount <= MaxAttemptCount); - - if (stats.TotalVectorCount != vectorCountBefore + delta) - { - throw new InvalidOperationException("'Upsert'/'Delete' operation didn't complete in time."); - } - - return stats.TotalVectorCount; - } - - public async Task DeleteAndWaitAsync(IEnumerable ids, string? indexNamespace = null) - { - var stats = await this.Index.DescribeStats(); - var vectorCountBefore = stats.Namespaces.Single(x => x.Name == (indexNamespace ?? "")).VectorCount; - var idCount = ids.Count(); - - var attemptCount = 0; - await this.Index.Delete(ids, indexNamespace); - long vectorCount; - do - { - await Task.Delay(DelayInterval); - attemptCount++; - stats = await this.Index.DescribeStats(); - vectorCount = stats.Namespaces.Single(x => x.Name == (indexNamespace ?? "")).VectorCount; - } while (vectorCount > vectorCountBefore - idCount && attemptCount <= MaxAttemptCount); - - if (vectorCount > vectorCountBefore - idCount) - { - throw new InvalidOperationException("'Delete' operation didn't complete in time."); - } - } - - private async Task ClearIndexesAsync() - { - var indexes = await this.Client.ListIndexes(); - var deletions = indexes.Select(x => this.DeleteExistingIndexAndWaitAsync(x.Name)); - - await Task.WhenAll(deletions); - } - - private async Task DeleteExistingIndexAndWaitAsync(string indexName) - { - var exists = true; - try - { - var attemptCount = 0; - await this.Client.DeleteIndex(indexName); - - do - { - await Task.Delay(DelayInterval); - var indexes = (await this.Client.ListIndexes()).Select(x => x.Name).ToArray(); - if (indexes.Length == 0 || !indexes.Contains(indexName)) - { - exists = false; - } - } while (exists && attemptCount <= MaxAttemptCount); - } - catch (HttpRequestException ex) when (ex.Message.Contains("NOT_FOUND")) - { - // index was already deleted - exists = false; - } - - if (exists) - { - throw new InvalidOperationException("'Delete index' operation didn't complete in time. Index name: " + indexName); - } - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs deleted file mode 100644 index 9b68eaf8d863..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,684 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Threading.Tasks; -using Grpc.Core; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using Pinecone; -using SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone.Xunit; -using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; -using Xunit; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -#pragma warning disable CS0618 // VectorSearchFilter is obsolete - -[Collection("PineconeVectorStoreTests")] -[PineconeApiKeySetCondition] -public class PineconeVectorStoreRecordCollectionTests(PineconeVectorStoreFixture fixture) : IClassFixture -{ - private PineconeVectorStoreFixture Fixture { get; } = fixture; - - [VectorStoreFact] - public async Task TryCreateExistingIndexIsNoopAsync() - { - await this.Fixture.HotelRecordCollection.CreateCollectionIfNotExistsAsync(); - } - - [VectorStoreFact] - public async Task CollectionExistsReturnsTrueForExistingCollectionAsync() - { - var result = await this.Fixture.HotelRecordCollection.CollectionExistsAsync(); - - Assert.True(result); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task BasicGetAsync(bool includeVectors) - { - var fiveSeasons = await this.Fixture.HotelRecordCollection.GetAsync("five-seasons", new GetRecordOptions { IncludeVectors = includeVectors }); - - Assert.NotNull(fiveSeasons); - Assert.Equal("five-seasons", fiveSeasons.HotelId); - Assert.Equal("Five Seasons Hotel", fiveSeasons.HotelName); - Assert.Equal("Great service any season.", fiveSeasons.Description); - Assert.Equal(7, fiveSeasons.HotelCode); - Assert.Equal(4.5f, fiveSeasons.HotelRating); - Assert.True(fiveSeasons.ParkingIncluded); - Assert.Contains("wi-fi", fiveSeasons.Tags); - Assert.Contains("sauna", fiveSeasons.Tags); - Assert.Contains("gym", fiveSeasons.Tags); - Assert.Contains("pool", fiveSeasons.Tags); - - if (includeVectors) - { - Assert.Equal(new ReadOnlyMemory([7.5f, 71.0f, 71.5f, 72.0f, 72.5f, 73.0f, 73.5f, 74.0f]), fiveSeasons.DescriptionEmbedding); - } - else - { - Assert.Equal(new ReadOnlyMemory([]), fiveSeasons.DescriptionEmbedding); - } - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task BatchGetAsync(bool collectionFromVectorStore) - { - var hotelsCollection = collectionFromVectorStore - ? this.Fixture.HotelRecordCollection - : this.Fixture.HotelRecordCollectionFromVectorStore; - - var hotels = await hotelsCollection.GetBatchAsync(["five-seasons", "vacation-inn", "best-eastern"]).ToListAsync(); - - var fiveSeasons = hotels.Single(x => x.HotelId == "five-seasons"); - var vacationInn = hotels.Single(x => x.HotelId == "vacation-inn"); - var bestEastern = hotels.Single(x => x.HotelId == "best-eastern"); - - Assert.Equal("Five Seasons Hotel", fiveSeasons.HotelName); - Assert.Equal("Great service any season.", fiveSeasons.Description); - Assert.Equal(7, fiveSeasons.HotelCode); - Assert.Equal(4.5f, fiveSeasons.HotelRating); - Assert.True(fiveSeasons.ParkingIncluded); - Assert.Contains("wi-fi", fiveSeasons.Tags); - Assert.Contains("sauna", fiveSeasons.Tags); - Assert.Contains("gym", fiveSeasons.Tags); - Assert.Contains("pool", fiveSeasons.Tags); - - Assert.Equal("Vacation Inn Hotel", vacationInn.HotelName); - Assert.Equal("On vacation? Stay with us.", vacationInn.Description); - Assert.Equal(11, vacationInn.HotelCode); - Assert.Equal(4.3f, vacationInn.HotelRating); - Assert.True(vacationInn.ParkingIncluded); - Assert.Contains("wi-fi", vacationInn.Tags); - Assert.Contains("breakfast", vacationInn.Tags); - Assert.Contains("gym", vacationInn.Tags); - - Assert.Equal("Best Eastern Hotel", bestEastern.HotelName); - Assert.Equal("Best hotel east of New York.", bestEastern.Description); - Assert.Equal(42, bestEastern.HotelCode); - Assert.Equal(4.7f, bestEastern.HotelRating); - Assert.True(bestEastern.ParkingIncluded); - Assert.Contains("wi-fi", bestEastern.Tags); - Assert.Contains("breakfast", bestEastern.Tags); - Assert.Contains("gym", bestEastern.Tags); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task AllTypesBatchGetAsync(bool includeVectors) - { - var allTypes = await this.Fixture.AllTypesRecordCollection.GetBatchAsync(["all-types-1", "all-types-2"], new GetRecordOptions { IncludeVectors = includeVectors }).ToListAsync(); - - var allTypes1 = allTypes.Single(x => x.Id == "all-types-1"); - var allTypes2 = allTypes.Single(x => x.Id == "all-types-2"); - - Assert.True(allTypes1.BoolProperty); - Assert.Equal("string prop 1", allTypes1.StringProperty); - Assert.Equal(1, allTypes1.IntProperty); - Assert.Equal(100L, allTypes1.LongProperty); - Assert.Equal(10.5f, allTypes1.FloatProperty); - Assert.Equal(23.75d, allTypes1.DoubleProperty); - Assert.Equal(50.75m, allTypes1.DecimalProperty); - Assert.Contains("one", allTypes1.StringArray); - Assert.Contains("two", allTypes1.StringArray); - Assert.Contains("eleven", allTypes1.StringList); - Assert.Contains("twelve", allTypes1.StringList); - Assert.Contains("Foo", allTypes1.Collection); - Assert.Contains("Bar", allTypes1.Collection); - Assert.Contains("another", allTypes1.Enumerable); - Assert.Contains("and another", allTypes1.Enumerable); - - Assert.False(allTypes2.BoolProperty); - Assert.Equal("string prop 2", allTypes2.StringProperty); - Assert.Equal(2, allTypes2.IntProperty); - Assert.Equal(200L, allTypes2.LongProperty); - Assert.Equal(20.5f, allTypes2.FloatProperty); - Assert.Equal(43.75d, allTypes2.DoubleProperty); - Assert.Equal(250.75m, allTypes2.DecimalProperty); - Assert.Empty(allTypes2.StringArray); - Assert.Empty(allTypes2.StringList); - Assert.Empty(allTypes2.Collection); - Assert.Empty(allTypes2.Enumerable); - - if (includeVectors) - { - Assert.True(allTypes1.Embedding.HasValue); - Assert.Equal(new ReadOnlyMemory([1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 7.5f, 8.5f]), allTypes1.Embedding.Value); - - Assert.True(allTypes2.Embedding.HasValue); - Assert.Equal(new ReadOnlyMemory([10.5f, 20.5f, 30.5f, 40.5f, 50.5f, 60.5f, 70.5f, 80.5f]), allTypes2.Embedding.Value); - } - else - { - Assert.Null(allTypes1.Embedding); - Assert.Null(allTypes2.Embedding); - } - } - - [VectorStoreFact] - public async Task BatchGetIncludingNonExistingRecordAsync() - { - var hotels = await this.Fixture.HotelRecordCollection.GetBatchAsync(["vacation-inn", "non-existing"]).ToListAsync(); - - Assert.Single(hotels); - var vacationInn = hotels.Single(x => x.HotelId == "vacation-inn"); - - Assert.Equal("Vacation Inn Hotel", vacationInn.HotelName); - Assert.Equal("On vacation? Stay with us.", vacationInn.Description); - Assert.Equal(11, vacationInn.HotelCode); - Assert.Equal(4.3f, vacationInn.HotelRating); - Assert.True(vacationInn.ParkingIncluded); - Assert.Contains("wi-fi", vacationInn.Tags); - Assert.Contains("breakfast", vacationInn.Tags); - Assert.Contains("gym", vacationInn.Tags); - } - - [VectorStoreFact] - public async Task GetNonExistingRecordAsync() - { - var result = await this.Fixture.HotelRecordCollection.GetAsync("non-existing"); - Assert.Null(result); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task GetFromCustomNamespaceAsync(bool includeVectors) - { - var custom = await this.Fixture.HotelRecordCollectionWithCustomNamespace.GetAsync("custom-hotel", new GetRecordOptions { IncludeVectors = includeVectors }); - - Assert.NotNull(custom); - Assert.Equal("custom-hotel", custom.HotelId); - Assert.Equal("Custom Hotel", custom.HotelName); - if (includeVectors) - { - Assert.Equal(new ReadOnlyMemory([147.5f, 1421.0f, 1741.5f, 1744.0f, 1742.5f, 1483.0f, 1743.5f, 1744.0f]), custom.DescriptionEmbedding); - } - else - { - Assert.Equal(new ReadOnlyMemory([]), custom.DescriptionEmbedding); - } - } - - [VectorStoreFact] - public async Task TryGetVectorLocatedInDefaultNamespaceButLookInCustomNamespaceAsync() - { - var badFiveSeasons = await this.Fixture.HotelRecordCollectionWithCustomNamespace.GetAsync("five-seasons"); - - Assert.Null(badFiveSeasons); - } - - [VectorStoreFact] - public async Task TryGetVectorLocatedInCustomNamespaceButLookInDefaultNamespaceAsync() - { - var badCustomHotel = await this.Fixture.HotelRecordCollection.GetAsync("custom-hotel"); - - Assert.Null(badCustomHotel); - } - - [VectorStoreFact] - public async Task DeleteNonExistingRecordAsync() - { - await this.Fixture.HotelRecordCollection.DeleteAsync("non-existing"); - } - - [VectorStoreFact] - public async Task TryDeleteExistingVectorLocatedInDefaultNamespaceButUseCustomNamespaceDoesNotDoAnythingAsync() - { - await this.Fixture.HotelRecordCollectionWithCustomNamespace.DeleteAsync("five-seasons"); - - var stillThere = await this.Fixture.HotelRecordCollection.GetAsync("five-seasons"); - Assert.NotNull(stillThere); - Assert.Equal("five-seasons", stillThere.HotelId); - } - - [VectorStoreFact] - public async Task TryDeleteExistingVectorLocatedInCustomNamespaceButUseDefaultNamespaceDoesNotDoAnythingAsync() - { - await this.Fixture.HotelRecordCollection.DeleteAsync("custom-hotel"); - - var stillThere = await this.Fixture.HotelRecordCollectionWithCustomNamespace.GetAsync("custom-hotel"); - Assert.NotNull(stillThere); - Assert.Equal("custom-hotel", stillThere.HotelId); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task InsertGetModifyDeleteVectorAsync(bool collectionFromVectorStore) - { - var langriSha = new PineconeHotel - { - HotelId = "langri-sha", - HotelName = "Langri-Sha Hotel", - Description = "Lorem ipsum", - HotelCode = 100, - HotelRating = 4.2f, - ParkingIncluded = false, - DescriptionEmbedding = new ReadOnlyMemory([1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f]) - }; - - var stats = await this.Fixture.Index.DescribeStats(); - var vectorCountBefore = stats.TotalVectorCount; - - var hotelRecordCollection = collectionFromVectorStore - ? this.Fixture.HotelRecordCollectionFromVectorStore - : this.Fixture.HotelRecordCollection; - - // insert - await hotelRecordCollection.UpsertAsync(langriSha); - - vectorCountBefore = await this.Fixture.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 1); - - var inserted = await hotelRecordCollection.GetAsync("langri-sha", new GetRecordOptions { IncludeVectors = true }); - - Assert.NotNull(inserted); - Assert.Equal(langriSha.HotelName, inserted.HotelName); - Assert.Equal(langriSha.Description, inserted.Description); - Assert.Equal(langriSha.HotelCode, inserted.HotelCode); - Assert.Equal(langriSha.HotelRating, inserted.HotelRating); - Assert.Equal(langriSha.ParkingIncluded, inserted.ParkingIncluded); - Assert.Equal(langriSha.DescriptionEmbedding, inserted.DescriptionEmbedding); - - langriSha.Description += " dolor sit amet"; - langriSha.ParkingIncluded = true; - langriSha.DescriptionEmbedding = new ReadOnlyMemory([11f, 12f, 13f, 14f, 15f, 16f, 17f, 18f]); - - // update - await hotelRecordCollection.UpsertAsync(langriSha); - - // this is not great but no vectors are added so we can't query status for number of vectors like we do for insert/delete - await Task.Delay(2000); - - var updated = await hotelRecordCollection.GetAsync("langri-sha", new GetRecordOptions { IncludeVectors = true }); - - Assert.NotNull(updated); - Assert.Equal(langriSha.HotelName, updated.HotelName); - Assert.Equal(langriSha.Description, updated.Description); - Assert.Equal(langriSha.HotelCode, updated.HotelCode); - Assert.Equal(langriSha.HotelRating, updated.HotelRating); - Assert.Equal(langriSha.ParkingIncluded, updated.ParkingIncluded); - Assert.Equal(langriSha.DescriptionEmbedding, updated.DescriptionEmbedding); - - // delete - await hotelRecordCollection.DeleteAsync("langri-sha"); - - await this.Fixture.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: -1); - } - - [VectorStoreTheory] - [InlineData(true, true)] - [InlineData(true, false)] - [InlineData(false, true)] - [InlineData(false, false)] - public async Task VectorizedSearchAsync(bool collectionFromVectorStore, bool includeVectors) - { - // Arrange. - var hotelRecordCollection = collectionFromVectorStore - ? this.Fixture.HotelRecordCollectionFromVectorStore - : this.Fixture.HotelRecordCollection; - var searchVector = new ReadOnlyMemory([17.5f, 721.0f, 731.5f, 742.0f, 762.5f, 783.0f, 793.5f, 704.0f]); - - // Act. - var actual = await hotelRecordCollection.VectorizedSearchAsync(searchVector, new() { IncludeVectors = includeVectors }); - var searchResults = await actual.Results.ToListAsync(); - var searchResultRecord = searchResults.First().Record; - - Assert.Equal("Vacation Inn Hotel", searchResultRecord.HotelName); - Assert.Equal("On vacation? Stay with us.", searchResultRecord.Description); - Assert.Equal(11, searchResultRecord.HotelCode); - Assert.Equal(4.3f, searchResultRecord.HotelRating); - Assert.True(searchResultRecord.ParkingIncluded); - Assert.Contains("wi-fi", searchResultRecord.Tags); - Assert.Contains("breakfast", searchResultRecord.Tags); - Assert.Contains("gym", searchResultRecord.Tags); - Assert.Equal(includeVectors, searchResultRecord.DescriptionEmbedding.Length > 0); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task VectorizedSearchWithTopSkipAsync(bool collectionFromVectorStore) - { - // Arrange. - var hotelRecordCollection = collectionFromVectorStore - ? this.Fixture.HotelRecordCollectionFromVectorStore - : this.Fixture.HotelRecordCollection; - var searchVector = new ReadOnlyMemory([17.5f, 721.0f, 731.5f, 742.0f, 762.5f, 783.0f, 793.5f, 704.0f]); - - // Act. - var actual = await hotelRecordCollection.VectorizedSearchAsync(searchVector, new() { Skip = 1, Top = 1 }); - var searchResults = await actual.Results.ToListAsync(); - Assert.Single(searchResults); - var searchResultRecord = searchResults.First().Record; - Assert.Equal("Best Eastern Hotel", searchResultRecord.HotelName); - } - - [VectorStoreTheory] - [InlineData(true)] - [InlineData(false)] - public async Task VectorizedSearchWithFilterAsync(bool collectionFromVectorStore) - { - // Arrange. - var hotelRecordCollection = collectionFromVectorStore - ? this.Fixture.HotelRecordCollectionFromVectorStore - : this.Fixture.HotelRecordCollection; - var searchVector = new ReadOnlyMemory([17.5f, 721.0f, 731.5f, 742.0f, 762.5f, 783.0f, 793.5f, 704.0f]); - - // Act. - var filter = new VectorSearchFilter().EqualTo(nameof(PineconeHotel.HotelCode), 42); - var actual = await hotelRecordCollection.VectorizedSearchAsync(searchVector, new() { Top = 1, OldFilter = filter }); - var searchResults = await actual.Results.ToListAsync(); - Assert.Single(searchResults); - var searchResultRecord = searchResults.First().Record; - Assert.Equal("Best Eastern Hotel", searchResultRecord.HotelName); - } - - [VectorStoreFact] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() - { - var merryYacht = new VectorStoreGenericDataModel("merry-yacht") - { - Data = - { - ["HotelName"] = "Merry Yacht Hotel", - ["Description"] = "Stay afloat at the Merry Yacht Hotel", - ["HotelCode"] = 101, - ["HotelRating"] = 4.2f, - ["ParkingIncluded"] = true, - ["Tags"] = new[] { "wi-fi", "breakfast", "gym" } - }, - Vectors = - { - ["DescriptionEmbedding"] = new ReadOnlyMemory([1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f]) - } - }; - - var stats = await this.Fixture.Index.DescribeStats(); - var vectorCountBefore = stats.TotalVectorCount; - - var hotelRecordCollection = this.Fixture.HotelRecordCollectionWithGenericDataModel; - - // insert - await hotelRecordCollection.UpsertAsync(merryYacht); - - vectorCountBefore = await this.Fixture.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: 1); - - var inserted = await hotelRecordCollection.GetAsync("merry-yacht", new GetRecordOptions { IncludeVectors = true }); - - Assert.NotNull(inserted); - Assert.Equal(merryYacht.Data["HotelName"], inserted.Data["HotelName"]); - Assert.Equal(merryYacht.Data["Description"], inserted.Data["Description"]); - Assert.Equal(merryYacht.Data["HotelCode"], inserted.Data["HotelCode"]); - Assert.Equal(merryYacht.Data["HotelRating"], inserted.Data["HotelRating"]); - Assert.Equal(merryYacht.Data["ParkingIncluded"], inserted.Data["ParkingIncluded"]); - Assert.Equal(merryYacht.Data["Tags"], inserted.Data["Tags"]); - Assert.Equal( - ((ReadOnlyMemory)merryYacht.Vectors["DescriptionEmbedding"]!).ToArray(), - ((ReadOnlyMemory)inserted.Vectors["DescriptionEmbedding"]!).ToArray()); - - // delete - await hotelRecordCollection.DeleteAsync("merry-yacht"); - - await this.Fixture.VerifyVectorCountModifiedAsync(vectorCountBefore, delta: -1); - } - - [VectorStoreFact] - public async Task UseCollectionExistsOnNonExistingStoreReturnsFalseAsync() - { - var incorrectRecordStore = new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "incorrect"); - - var result = await incorrectRecordStore.CollectionExistsAsync(); - - Assert.False(result); - } - - [VectorStoreFact] - public async Task UseNonExistingIndexThrowsAsync() - { - var incorrectRecordStore = new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "incorrect"); - - var statusCode = (await Assert.ThrowsAsync( - () => incorrectRecordStore.GetAsync("best-eastern"))).StatusCode; - - Assert.Equal(HttpStatusCode.NotFound, statusCode); - } - - [VectorStoreFact] - public async Task UseRecordStoreWithCustomMapperAsync() - { - var recordStore = new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - this.Fixture.IndexName, - new PineconeVectorStoreRecordCollectionOptions { VectorCustomMapper = new CustomHotelRecordMapper() }); - - var vacationInn = await recordStore.GetAsync("vacation-inn", new GetRecordOptions { IncludeVectors = true }); - - Assert.NotNull(vacationInn); - Assert.Equal("Custom Vacation Inn Hotel", vacationInn.HotelName); - Assert.Equal("On vacation? Stay with us.", vacationInn.Description); - Assert.Equal(11, vacationInn.HotelCode); - Assert.Equal(4.3f, vacationInn.HotelRating); - Assert.True(vacationInn.ParkingIncluded); - Assert.Contains("wi-fi", vacationInn.Tags); - Assert.Contains("breakfast", vacationInn.Tags); - Assert.Contains("gym", vacationInn.Tags); - } - - private sealed class CustomHotelRecordMapper : IVectorStoreRecordMapper - { - public Vector MapFromDataToStorageModel(PineconeHotel dataModel) - { - var metadata = new MetadataMap - { - [nameof(PineconeHotel.HotelName)] = dataModel.HotelName, - [nameof(PineconeHotel.Description)] = dataModel.Description, - [nameof(PineconeHotel.HotelCode)] = dataModel.HotelCode, - [nameof(PineconeHotel.HotelRating)] = dataModel.HotelRating, - ["parking_is_included"] = dataModel.ParkingIncluded, - [nameof(PineconeHotel.Tags)] = dataModel.Tags.ToArray(), - }; - - return new Vector - { - Id = dataModel.HotelId, - Values = dataModel.DescriptionEmbedding.ToArray(), - Metadata = metadata, - }; - } - - public PineconeHotel MapFromStorageToDataModel(Vector storageModel, StorageToDataModelMapperOptions options) - { - if (storageModel.Metadata == null) - { - throw new InvalidOperationException("Missing metadata."); - } - - return new PineconeHotel - { - HotelId = storageModel.Id, - HotelName = "Custom " + (string)storageModel.Metadata[nameof(PineconeHotel.HotelName)].Inner!, - Description = (string)storageModel.Metadata[nameof(PineconeHotel.Description)].Inner!, - HotelCode = (int)(double)storageModel.Metadata[nameof(PineconeHotel.HotelCode)].Inner!, - HotelRating = (float)(double)storageModel.Metadata[nameof(PineconeHotel.HotelRating)].Inner!, - ParkingIncluded = (bool)storageModel.Metadata["parking_is_included"].Inner!, - Tags = ((MetadataValue[])storageModel.Metadata[nameof(PineconeHotel.Tags)].Inner!)!.Select(x => (string)x.Inner!).ToList(), - }; - } - } - - #region Negative - - [VectorStoreFact] - public void UseRecordWithNoEmbeddingThrows() - { - var exception = Assert.Throws( - () => new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "Whatever")); - - Assert.Equal( - $"No vector property found on type {nameof(PineconeRecordNoEmbedding)} or the provided VectorStoreRecordDefinition while at least one is required.", - exception.Message); - } - -#pragma warning disable CA1812 - private sealed record PineconeRecordNoEmbedding - { - [VectorStoreRecordKey] - public int Id { get; set; } - - [VectorStoreRecordData] - public string? Name { get; set; } - } -#pragma warning restore CA1812 - - [VectorStoreFact] - public void UseRecordWithMultipleEmbeddingsThrows() - { - var exception = Assert.Throws( - () => new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "Whatever")); - - Assert.Equal( - $"Multiple vector properties found on type {nameof(PineconeRecordMultipleEmbeddings)} or the provided VectorStoreRecordDefinition while only one is supported.", - exception.Message); - } - -#pragma warning disable CA1812 - private sealed record PineconeRecordMultipleEmbeddings - { - [VectorStoreRecordKey] - public string Id { get; set; } = null!; - - [VectorStoreRecordVector] - public ReadOnlyMemory Embedding1 { get; set; } - - [VectorStoreRecordVector] - public ReadOnlyMemory Embedding2 { get; set; } - } -#pragma warning restore CA1812 - - [VectorStoreFact] - public void UseRecordWithUnsupportedKeyTypeThrows() - { - var message = Assert.Throws( - () => new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "Whatever")).Message; - - Assert.Equal( - $"Key properties must be one of the supported types: {typeof(string).FullName}. Type of the property '{nameof(PineconeRecordUnsupportedKeyType.Id)}' is {typeof(int).FullName}.", - message); - } - -#pragma warning disable CA1812 - private sealed record PineconeRecordUnsupportedKeyType - { - [VectorStoreRecordKey] - public int Id { get; set; } - - [VectorStoreRecordData] - public string? Name { get; set; } - - [VectorStoreRecordVector] - public ReadOnlyMemory Embedding { get; set; } - } -#pragma warning restore CA1812 - - [VectorStoreFact] - public async Task TryAddingVectorWithUnsupportedValuesAsync() - { - var badAllTypes = new PineconeAllTypes - { - Id = "bad", - BoolProperty = true, - DecimalProperty = 1m, - DoubleProperty = 1.5d, - FloatProperty = 2.5f, - IntProperty = 1, - LongProperty = 11L, - NullableStringArray = ["foo", null!, "bar",], - Embedding = new ReadOnlyMemory([1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f]) - }; - - var exception = await Assert.ThrowsAsync( - () => this.Fixture.AllTypesRecordCollection.UpsertAsync(badAllTypes)); - - Assert.Equal("Microsoft.SemanticKernel.Connectors.Pinecone", exception.Source); - Assert.Equal("Pinecone", exception.VectorStoreType); - Assert.Equal("Upsert", exception.OperationName); - Assert.Equal(this.Fixture.IndexName, exception.CollectionName); - - var inner = exception.InnerException as RpcException; - Assert.NotNull(inner); - Assert.Equal(StatusCode.InvalidArgument, inner.StatusCode); - } - - [VectorStoreFact] - public async Task TryCreateIndexWithIncorrectDimensionFailsAsync() - { - var recordCollection = new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "negative-dimension"); - - var message = (await Assert.ThrowsAsync(() => recordCollection.CreateCollectionAsync())).Message; - - Assert.Equal("Property Dimensions on VectorStoreRecordVectorProperty 'Embedding' must be set to a positive integer to create a collection.", message); - } - -#pragma warning disable CA1812 - private sealed record PineconeRecordWithIncorrectDimension - { - [VectorStoreRecordKey] - public string Id { get; set; } = null!; - - [VectorStoreRecordData] - public string? Name { get; set; } - - [VectorStoreRecordVector(Dimensions: -7)] - public ReadOnlyMemory Embedding { get; set; } - } -#pragma warning restore CA1812 - - [VectorStoreFact] - public async Task TryCreateIndexWithUnsSupportedMetricFailsAsync() - { - var recordCollection = new PineconeVectorStoreRecordCollection( - this.Fixture.Client, - "bad-metric"); - - var message = (await Assert.ThrowsAsync(() => recordCollection.CreateCollectionAsync())).Message; - - Assert.Equal("Distance function 'just eyeball it' for VectorStoreRecordVectorProperty 'Embedding' is not supported by the Pinecone VectorStore.", message); - } - -#pragma warning disable CA1812 - private sealed record PineconeRecordWithUnsupportedMetric - { - [VectorStoreRecordKey] - public string Id { get; set; } = null!; - - [VectorStoreRecordData] - public string? Name { get; set; } - - [VectorStoreRecordVector(Dimensions: 5, DistanceFunction: "just eyeball it")] - public ReadOnlyMemory Embedding { get; set; } - } -#pragma warning restore CA1812 - - #endregion -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs deleted file mode 100644 index 4cd63ec6b8a9..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreTests.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone.Xunit; -using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; -using Xunit; -using Sdk = Pinecone; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone; - -[Collection("PineconeVectorStoreTests")] -[PineconeApiKeySetCondition] -public class PineconeVectorStoreTests(PineconeVectorStoreFixture fixture) - : BaseVectorStoreTests(new PineconeVectorStore(fixture.Client)), IClassFixture -{ - private PineconeVectorStoreFixture Fixture { get; } = fixture; - -#pragma warning disable CS0618 // IPineconeVectorStoreRecordCollectionFactory is obsolete - [VectorStoreFact] - public void CreateCollectionUsingFactory() - { - var vectorStore = new PineconeVectorStore( - this.Fixture.Client, - new PineconeVectorStoreOptions - { - VectorStoreCollectionFactory = new MyVectorStoreRecordCollectionFactory() - }); - - var factoryCollection = vectorStore.GetCollection(this.Fixture.IndexName); - - Assert.NotNull(factoryCollection); - Assert.Equal("factory" + this.Fixture.IndexName, factoryCollection.CollectionName); - } - - private sealed class MyVectorStoreRecordCollectionFactory : IPineconeVectorStoreRecordCollectionFactory - { - public IVectorStoreRecordCollection CreateVectorStoreRecordCollection( - Sdk.PineconeClient pineconeClient, - string name, - VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull - { - if (typeof(TKey) != typeof(string)) - { - throw new InvalidOperationException("Only string keys are supported."); - } - - return (new PineconeVectorStoreRecordCollection(pineconeClient, "factory" + name) as IVectorStoreRecordCollection)!; - } - } -#pragma warning restore CS0618 -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/Xunit/PineconeApiKeySetConditionAttribute.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/Xunit/PineconeApiKeySetConditionAttribute.cs deleted file mode 100644 index b677c47c378f..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/Xunit/PineconeApiKeySetConditionAttribute.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; - -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Pinecone.Xunit; - -[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)] -public sealed class PineconeApiKeySetConditionAttribute : Attribute, ITestCondition -{ - public ValueTask IsMetAsync() - { - var isMet = PineconeUserSecretsExtensions.ContainsPineconeApiKey(); - - return ValueTask.FromResult(isMet); - } - - public string SkipReason - => $"Pinecone API key was not specified in user secrets. Use the following command to set it: dotnet user-secrets set \"{PineconeUserSecretsExtensions.PineconeApiKeyUserSecretEntry}\" \"your_Pinecone_API_key\""; -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs index e0f371252982..80dc48fbf39f 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs @@ -140,19 +140,39 @@ public async Task TextGenerationShouldReturnMetadataAsync() Assert.Empty((logProbabilityInfo as IReadOnlyList)!); } + [Fact] + public async Task ChatCompletionWithWebSearchAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(modelIdOverride: "gpt-4o-mini-search-preview"); + var chatService = kernel.Services.GetRequiredService(); + var settings = new OpenAIPromptExecutionSettings + { + WebSearchOptions = new ChatWebSearchOptions() + }; + + // Act + var result = await chatService.GetChatMessageContentAsync("What are the top 3 trending news currently", settings, kernel); + + // Assert + var chatCompletion = Assert.IsType(result.InnerContent); + Assert.NotNull(chatCompletion); + Assert.NotEmpty(chatCompletion.Annotations); + } + #region internals - private Kernel CreateAndInitializeKernel() + private Kernel CreateAndInitializeKernel(string? modelIdOverride = null) { var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); Assert.NotNull(OpenAIConfiguration); - Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(modelIdOverride ?? OpenAIConfiguration.ChatModelId!); Assert.NotNull(OpenAIConfiguration.ApiKey); var kernelBuilder = base.CreateKernelBuilder(); kernelBuilder.AddOpenAIChatCompletion( - modelId: OpenAIConfiguration.ChatModelId, + modelId: modelIdOverride ?? OpenAIConfiguration.ChatModelId!, apiKey: OpenAIConfiguration.ApiKey); return kernelBuilder.Build(); diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 9f4e835df255..917e2447fea3 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -53,7 +53,6 @@ - @@ -70,7 +69,6 @@ - diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs new file mode 100644 index 000000000000..1cb1c96ae181 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/KernelVerify.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Text.RegularExpressions; + +namespace Microsoft.SemanticKernel; + +[ExcludeFromCodeCoverage] +internal static partial class KernelVerify +{ +#if NET + [GeneratedRegex("^[0-9A-Za-z_]*$")] + private static partial Regex AsciiLettersDigitsUnderscoresRegex(); +#else + private static Regex AsciiLettersDigitsUnderscoresRegex() => s_asciiLettersDigitsUnderscoresRegex; + private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$", RegexOptions.Compiled); +#endif + + internal static void ValidPluginName([NotNull] string? pluginName, IReadOnlyKernelPluginCollection? plugins = null, [CallerArgumentExpression(nameof(pluginName))] string? paramName = null) + { + Verify.NotNullOrWhiteSpace(pluginName); + if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(pluginName)) + { + Verify.ThrowArgumentInvalidName("plugin name", pluginName, paramName); + } + + if (plugins is not null && plugins.Contains(pluginName)) + { + throw new ArgumentException($"A plugin with the name '{pluginName}' already exists."); + } + } + + internal static void ValidFunctionName([NotNull] string? functionName, [CallerArgumentExpression(nameof(functionName))] string? paramName = null) + { + Verify.NotNullOrWhiteSpace(functionName); + if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(functionName)) + { + Verify.ThrowArgumentInvalidName("function name", functionName, paramName); + } + } + + /// + /// Make sure every function parameter name is unique + /// + /// List of parameters + internal static void ParametersUniqueness(IReadOnlyList parameters) + { + int count = parameters.Count; + if (count > 0) + { + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + for (int i = 0; i < count; i++) + { + KernelParameterMetadata p = parameters[i]; + if (string.IsNullOrWhiteSpace(p.Name)) + { + string paramName = $"{nameof(parameters)}[{i}].{p.Name}"; + if (p.Name is null) + { + Verify.ThrowArgumentNullException(paramName); + } + else + { + Verify.ThrowArgumentWhiteSpaceException(paramName); + } + } + + if (!seen.Add(p.Name)) + { + throw new ArgumentException($"The function has two or more parameters with the same name '{p.Name}'"); + } + } + } + } +} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs b/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs new file mode 100644 index 000000000000..8fa8c4a4125c --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/LoggingExtensions.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Diagnostics; + +[ExcludeFromCodeCoverage] +internal static partial class LoggingExtensions +{ + internal static async Task RunWithLoggingAsync( + ILogger logger, + string operationName, + Func operation) + { + logger.LogInvoked(operationName); + + try + { + await operation().ConfigureAwait(false); + + logger.LogCompleted(operationName); + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + } + + internal static async Task RunWithLoggingAsync( + ILogger logger, + string operationName, + Func> operation) + { + logger.LogInvoked(operationName); + + try + { + var result = await operation().ConfigureAwait(false); + + logger.LogCompleted(operationName); + + return result; + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + } + + internal static async IAsyncEnumerable RunWithLoggingAsync( + ILogger logger, + string operationName, + Func> operation, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + logger.LogInvoked(operationName); + + IAsyncEnumerator enumerator; + + try + { + enumerator = operation().GetAsyncEnumerator(cancellationToken); + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + + try + { + while (true) + { + try + { + if (!await enumerator.MoveNextAsync().ConfigureAwait(false)) + { + break; + } + } + catch (OperationCanceledException) + { + logger.LogInvocationCanceled(operationName); + throw; + } + catch (Exception ex) + { + logger.LogInvocationFailed(operationName, ex); + throw; + } + + yield return enumerator.Current; + } + + logger.LogCompleted(operationName); + } + finally + { + await enumerator.DisposeAsync().ConfigureAwait(false); + } + } + + [LoggerMessage(LogLevel.Debug, "{OperationName} invoked.")] + private static partial void LogInvoked(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Debug, "{OperationName} completed.")] + private static partial void LogCompleted(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Debug, "{OperationName} canceled.")] + private static partial void LogInvocationCanceled(this ILogger logger, string operationName); + + [LoggerMessage(LogLevel.Error, "{OperationName} failed.")] + private static partial void LogInvocationFailed(this ILogger logger, string operationName, Exception exception); +} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs index f90895504ead..e5f12e73c411 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs @@ -14,15 +14,9 @@ namespace Microsoft.SemanticKernel; internal static partial class Verify { #if NET - [GeneratedRegex("^[0-9A-Za-z_]*$")] - private static partial Regex AsciiLettersDigitsUnderscoresRegex(); - [GeneratedRegex("^[^.]+\\.[^.]+$")] private static partial Regex FilenameRegex(); #else - private static Regex AsciiLettersDigitsUnderscoresRegex() => s_asciiLettersDigitsUnderscoresRegex; - private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$", RegexOptions.Compiled); - private static Regex FilenameRegex() => s_filenameRegex; private static readonly Regex s_filenameRegex = new("^[^.]+\\.[^.]+$", RegexOptions.Compiled); #endif @@ -74,29 +68,6 @@ public static void True(bool condition, string message, [CallerArgumentExpressio } } - internal static void ValidPluginName([NotNull] string? pluginName, IReadOnlyKernelPluginCollection? plugins = null, [CallerArgumentExpression(nameof(pluginName))] string? paramName = null) - { - NotNullOrWhiteSpace(pluginName); - if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(pluginName)) - { - ThrowArgumentInvalidName("plugin name", pluginName, paramName); - } - - if (plugins is not null && plugins.Contains(pluginName)) - { - throw new ArgumentException($"A plugin with the name '{pluginName}' already exists."); - } - } - - internal static void ValidFunctionName([NotNull] string? functionName, [CallerArgumentExpression(nameof(functionName))] string? paramName = null) - { - NotNullOrWhiteSpace(functionName); - if (!AsciiLettersDigitsUnderscoresRegex().IsMatch(functionName)) - { - ThrowArgumentInvalidName("function name", functionName, paramName); - } - } - internal static void ValidFilename([NotNull] string? filename, [CallerArgumentExpression(nameof(filename))] string? paramName = null) { NotNullOrWhiteSpace(filename); @@ -145,42 +116,8 @@ internal static void DirectoryExists(string path) } } - /// - /// Make sure every function parameter name is unique - /// - /// List of parameters - internal static void ParametersUniqueness(IReadOnlyList parameters) - { - int count = parameters.Count; - if (count > 0) - { - var seen = new HashSet(StringComparer.OrdinalIgnoreCase); - for (int i = 0; i < count; i++) - { - KernelParameterMetadata p = parameters[i]; - if (string.IsNullOrWhiteSpace(p.Name)) - { - string paramName = $"{nameof(parameters)}[{i}].{p.Name}"; - if (p.Name is null) - { - ThrowArgumentNullException(paramName); - } - else - { - ThrowArgumentWhiteSpaceException(paramName); - } - } - - if (!seen.Add(p.Name)) - { - throw new ArgumentException($"The function has two or more parameters with the same name '{p.Name}'"); - } - } - } - } - [DoesNotReturn] - private static void ThrowArgumentInvalidName(string kind, string name, string? paramName) => + internal static void ThrowArgumentInvalidName(string kind, string name, string? paramName) => throw new ArgumentException($"A {kind} can contain only ASCII letters, digits, and underscores: '{name}' is not a valid name.", paramName); [DoesNotReturn] diff --git a/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs b/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs new file mode 100644 index 000000000000..c7aaf6b4fd3b --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/EmptyKeyedServiceProvider.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.Extensions.DependencyInjection; + +/// Provides an implementation of that contains no services. +internal sealed class EmptyKeyedServiceProvider : IKeyedServiceProvider +{ + /// Gets a singleton instance of . + public static EmptyKeyedServiceProvider Instance { get; } = new(); + + /// + public object? GetService(Type serviceType) => null; + + /// + public object? GetKeyedService(Type serviceType, object? serviceKey) => null; + + /// + public object GetRequiredKeyedService(Type serviceType, object? serviceKey) => + this.GetKeyedService(serviceType, serviceKey) ?? + throw new InvalidOperationException($"No service for type '{serviceType}' and key '{serviceKey}' has been registered."); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs index a0a425aca1ec..dc9fabf8ca45 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -151,7 +151,7 @@ internal KernelFunction(string name, string description, IReadOnlyList parameters, KernelReturnParameterMetadata? returnParameter = null, Dictionary? executionSettings = null, ReadOnlyDictionary? additionalMetadata = null) { Verify.NotNull(name); - Verify.ParametersUniqueness(parameters); + KernelVerify.ParametersUniqueness(parameters); this.Metadata = new KernelFunctionMetadata(name) { @@ -187,7 +187,7 @@ internal KernelFunction(string name, string? pluginName, string description, IRe internal KernelFunction(string name, string? pluginName, string description, IReadOnlyList parameters, JsonSerializerOptions jsonSerializerOptions, KernelReturnParameterMetadata? returnParameter = null, Dictionary? executionSettings = null, ReadOnlyDictionary? additionalMetadata = null) { Verify.NotNull(name); - Verify.ParametersUniqueness(parameters); + KernelVerify.ParametersUniqueness(parameters); Verify.NotNull(jsonSerializerOptions); this.Metadata = new KernelFunctionMetadata(name) diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs index cae651f74fea..034eeb72833a 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs @@ -58,7 +58,7 @@ public string Name init { Verify.NotNull(value); - Verify.ValidFunctionName(value); + KernelVerify.ValidFunctionName(value); this._name = value; } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs index c86faaf03065..1b6aab3c87a3 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs @@ -29,7 +29,7 @@ public abstract class KernelPlugin : IEnumerable /// is an invalid plugin name. protected KernelPlugin(string name, string? description = null) { - Verify.ValidPluginName(name); + KernelVerify.ValidPluginName(name); this.Name = name; this.Description = !string.IsNullOrWhiteSpace(description) ? description! : ""; diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs index 80e9652519b6..bd61fe1697f2 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs @@ -464,7 +464,7 @@ private KernelFunctionFromMethod( ReadOnlyDictionary? additionalMetadata = null) : base(functionName, pluginName, description, parameters, returnParameter, additionalMetadata: additionalMetadata) { - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); this._function = implementationFunc; } @@ -480,7 +480,7 @@ private KernelFunctionFromMethod( ReadOnlyDictionary? additionalMetadata = null) : base(functionName, pluginName, description, parameters, jsonSerializerOptions, returnParameter, additionalMetadata: additionalMetadata) { - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); this._function = implementationFunc; } @@ -519,7 +519,7 @@ private static MethodDetails GetMethodDetails(string? functionName, MethodInfo m } } - Verify.ValidFunctionName(functionName); + KernelVerify.ValidFunctionName(functionName); // Build up a list of KernelParameterMetadata for the parameters we expect to be populated // from arguments. Some arguments are populated specially, not from arguments, and thus @@ -540,7 +540,7 @@ private static MethodDetails GetMethodDetails(string? functionName, MethodInfo m } // Check for param names conflict - Verify.ParametersUniqueness(argParameterViews); + KernelVerify.ParametersUniqueness(argParameterViews); // Get the return type and a marshaling func for the return value. (Type returnType, Func> returnFunc) = GetReturnValueMarshalerDelegate(method); diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs index ce3262130ddb..d03f958c1fa0 100644 --- a/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs @@ -239,7 +239,7 @@ static void AppendWithoutArity(StringBuilder builder, string name) Verify.NotNull(target); pluginName ??= CreatePluginName(target.GetType()); - Verify.ValidPluginName(pluginName); + KernelVerify.ValidPluginName(pluginName); MethodInfo[] methods = target.GetType().GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static); diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index 268c2e470314..2a5d5d03d961 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -5,7 +5,6 @@ Microsoft.SemanticKernel.Core Microsoft.SemanticKernel net8.0;netstandard2.0 - true true $(NoWarn);SKEXP0001,SKEXP0120 true diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/ExceptionConverterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/ExceptionConverterTests.cs index 0cfb12d22b15..26b0425ad781 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/ExceptionConverterTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/ExceptionConverterTests.cs @@ -21,7 +21,9 @@ public ExceptionJsonConverterTests() #pragma warning disable CA1031 // Do not catch general exception types try { +#pragma warning disable JSON001 // Invalid JSON pattern JsonSerializer.Deserialize("invalid_json"); +#pragma warning restore JSON001 // Invalid JSON pattern } catch (Exception ex) { diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs new file mode 100644 index 000000000000..d9c44bf6a560 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/FakeLogger.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Logging; + +namespace SemanticKernel.UnitTests.Utilities; + +public class FakeLogger : ILogger +{ + public List<(LogLevel Level, string Message, Exception? Exception)> Logs { get; } = new(); + + public IDisposable? BeginScope(TState state) where TState : notnull => null; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log( + LogLevel logLevel, + EventId eventId, + TState state, + Exception? exception, + Func formatter) + { + var message = formatter(state, exception); + this.Logs.Add((logLevel, message, exception)); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs new file mode 100644 index 000000000000..8a6e09c013c8 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/LoggingExtensionsTests.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Diagnostics; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities; + +public class LoggingExtensionsTests +{ + [Fact] + public async Task RunWithLoggingVoidLogsSuccess() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => Task.CompletedTask; + + // Act + await LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation); + + // Assert + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingVoidLogsException() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => throw new InvalidOperationException("Test error"); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + Assert.Equal("Test error", exception.Message); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingVoidLogsCancellation() + { + // Arrange + var logger = new FakeLogger(); + using var cts = new CancellationTokenSource(); + Task Operation() => Task.FromCanceled(cts.Token); + cts.Cancel(); + + // Act & Assert + await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation canceled.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingWithResultReturnsValue() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => Task.FromResult(42); + + // Act + var result = await LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation); + + // Assert + Assert.Equal(42, result); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingWithResultLogsException() + { + // Arrange + var logger = new FakeLogger(); + static Task Operation() => throw new InvalidOperationException("Test error"); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => + LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation)); + + Assert.Equal("Test error", exception.Message); + + var logs = logger.Logs; + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingEnumerableYieldsValues() + { + // Arrange + var logger = new FakeLogger(); + static async IAsyncEnumerable Operation() + { + yield return 1; + yield return 2; + await Task.CompletedTask; // Ensure async behavior + } + + // Act + var results = new List(); + await foreach (var item in LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation, default)) + { + results.Add(item); + } + + // Assert + Assert.Equal(new[] { 1, 2 }, results); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation completed.", logs[1].Message); + Assert.Null(logs[1].Exception); + } + + [Fact] + public async Task RunWithLoggingEnumerableLogsException() + { + // Arrange + var logger = new FakeLogger(); + static async IAsyncEnumerable Operation() + { + yield return 1; + await Task.CompletedTask; + throw new InvalidOperationException("Test error"); + } + + // Act & Assert + var results = new List(); + var exception = await Assert.ThrowsAsync(async () => + { + await foreach (var item in LoggingExtensions.RunWithLoggingAsync(logger, "TestOperation", Operation, default)) + { + results.Add(item); + } + }); + + Assert.Equal("Test error", exception.Message); + Assert.Equal(new[] { 1 }, results); + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Error, logs[1].Level); + Assert.Equal("TestOperation failed.", logs[1].Message); + Assert.Equal("Test error", logs[1].Exception?.Message); + } + + [Fact] + public async Task RunWithLoggingEnumerableLogsCancellation() + { + // Arrange + var logger = new FakeLogger(); + using var cts = new CancellationTokenSource(); + static async IAsyncEnumerable Operation([EnumeratorCancellation] CancellationToken token) + { + yield return 1; + await Task.Delay(10, token); // Simulate async work + yield return 2; + } + cts.Cancel(); + + // Act & Assert + var results = new List(); + var exception = await Assert.ThrowsAsync(async () => + { + await foreach (var item in LoggingExtensions.RunWithLoggingAsync( + logger, + "TestOperation", + () => Operation(cts.Token), + cts.Token)) + { + results.Add(item); + } + }); + + Assert.Equal(new[] { 1 }, results); // Should yield first value before cancellation + + var logs = logger.Logs; + + Assert.Equal(2, logs.Count); + Assert.Equal(LogLevel.Debug, logs[0].Level); + Assert.Equal("TestOperation invoked.", logs[0].Message); + Assert.Null(logs[0].Exception); + Assert.Equal(LogLevel.Debug, logs[1].Level); + Assert.Equal("TestOperation canceled.", logs[1].Message); + Assert.Null(logs[1].Exception); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs new file mode 100644 index 000000000000..e1dbf9efa81e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeAllSupportedTypesTests(PineconeFixture fixture) : IClassFixture +{ + [ConditionalFact] + public async Task AllTypesBatchGetAsync() + { + var collection = fixture.TestStore.DefaultVectorStore.GetCollection("all-types", PineconeAllTypes.GetRecordDefinition()); + await collection.CreateCollectionIfNotExistsAsync(); + + List records = + [ + new() + { + Id = "all-types-1", + BoolProperty = true, + NullableBoolProperty = false, + StringProperty = "string prop 1", + NullableStringProperty = "nullable prop 1", + IntProperty = 1, + NullableIntProperty = 10, + LongProperty = 100L, + NullableLongProperty = 1000L, + FloatProperty = 10.5f, + NullableFloatProperty = 100.5f, + DoubleProperty = 23.75d, + NullableDoubleProperty = 233.75d, + StringArray = ["one", "two"], + NullableStringArray = ["five", "six"], + StringList = ["eleven", "twelve"], + NullableStringList = ["fifteen", "sixteen"], + Embedding = new ReadOnlyMemory([1.5f, 2.5f, 3.5f, 4.5f, 5.5f, 6.5f, 7.5f, 8.5f]) + }, + new() + { + Id = "all-types-2", + BoolProperty = false, + NullableBoolProperty = null, + StringProperty = "string prop 2", + NullableStringProperty = null, + IntProperty = 2, + NullableIntProperty = null, + LongProperty = 200L, + NullableLongProperty = null, + FloatProperty = 20.5f, + NullableFloatProperty = null, + DoubleProperty = 43.75, + NullableDoubleProperty = null, + StringArray = [], + NullableStringArray = null, + StringList = [], + NullableStringList = null, + Embedding = new ReadOnlyMemory([10.5f, 20.5f, 30.5f, 40.5f, 50.5f, 60.5f, 70.5f, 80.5f]) + } + ]; + + await collection.UpsertBatchAsync(records).ToArrayAsync(); + + var allTypes = await collection.GetBatchAsync(records.Select(r => r.Id), new GetRecordOptions { IncludeVectors = true }).ToListAsync(); + + var allTypes1 = allTypes.Single(x => x.Id == records[0].Id); + var allTypes2 = allTypes.Single(x => x.Id == records[1].Id); + + records[0].AssertEqual(allTypes1); + records[1].AssertEqual(allTypes2); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeBatchConformanceTests.cs new file mode 100644 index 000000000000..16832369b26d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeBatchConformanceTests(PineconeSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs new file mode 100644 index 000000000000..d18cdb99b38f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeGenericDataModelConformanceTests(PineconeGenericDataModelFixture fixture) + : GenericDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeRecordConformanceTests.cs new file mode 100644 index 000000000000..6721ea6dd359 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeRecordConformanceTests(PineconeSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Collections/PineconeCollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Collections/PineconeCollectionConformanceTests.cs new file mode 100644 index 000000000000..2fbed4bfcd11 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Collections/PineconeCollectionConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.Collections; +using Xunit; + +namespace PineconeIntegrationTests.Collections; + +public class PineconeCollectionConformanceTests(PineconeFixture fixture) + : CollectionConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs new file mode 100644 index 000000000000..095b0d03ebd0 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace PineconeIntegrationTests.Filter; + +public class PineconeBasicFilterTests(PineconeBasicFilterTests.Fixture fixture) + : BasicFilterTests(fixture), IClassFixture +{ + // Specialized Pinecone syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + #region Null checking + + // Pinecone currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + #endregion + + #region Not + + // Pinecone currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + #endregion + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + // AnyTagEqualTo not (currently) supported on Pinecone + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_array() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_array()); + + [Obsolete("Legacy filter support")] + public override Task Legacy_AnyTagEqualTo_List() + => Assert.ThrowsAsync(() => base.Legacy_AnyTagEqualTo_List()); + + public new class Fixture : BasicFilterTests.Fixture + { + public override TestStore TestStore => PineconeTestStore.Instance; + + // https://docs.pinecone.io/troubleshooting/restrictions-on-index-names + protected override string CollectionName => "filter-tests"; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj new file mode 100644 index 000000000000..bc92e1816858 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj @@ -0,0 +1,41 @@ + + + + + net8.0 + enable + enable + + false + true + + $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 + b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Properties/AssemblyAttributes.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Properties/AssemblyAttributes.cs new file mode 100644 index 000000000000..c94e7a708820 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Properties/AssemblyAttributes.cs @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +[assembly: CollectionBehavior(DisableTestParallelization = true)] diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs new file mode 100644 index 000000000000..54d98f72c251 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using Xunit; + +namespace PineconeIntegrationTests.Support; + +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. +public record PineconeAllTypes() +{ + [VectorStoreRecordKey] + public string Id { get; set; } + + [VectorStoreRecordData] + public bool BoolProperty { get; set; } + [VectorStoreRecordData] + public bool? NullableBoolProperty { get; set; } + [VectorStoreRecordData] + public string StringProperty { get; set; } + [VectorStoreRecordData] + public string? NullableStringProperty { get; set; } + [VectorStoreRecordData] + public int IntProperty { get; set; } + [VectorStoreRecordData] + public int? NullableIntProperty { get; set; } + [VectorStoreRecordData] + public long LongProperty { get; set; } + [VectorStoreRecordData] + public long? NullableLongProperty { get; set; } + [VectorStoreRecordData] + public float FloatProperty { get; set; } + [VectorStoreRecordData] + public float? NullableFloatProperty { get; set; } + [VectorStoreRecordData] + public double DoubleProperty { get; set; } + [VectorStoreRecordData] + public double? NullableDoubleProperty { get; set; } + +#pragma warning disable CA1819 // Properties should not return arrays + [VectorStoreRecordData] + public string[] StringArray { get; set; } + [VectorStoreRecordData] + public string[]? NullableStringArray { get; set; } +#pragma warning restore CA1819 // Properties should not return arrays + + [VectorStoreRecordData] + public List StringList { get; set; } + [VectorStoreRecordData] + public List? NullableStringList { get; set; } + + [VectorStoreRecordVector(Dimensions: 8, DistanceFunction: DistanceFunction.DotProductSimilarity)] + public ReadOnlyMemory? Embedding { get; set; } + + internal void AssertEqual(PineconeAllTypes other) + { + Assert.Equal(this.Id, other.Id); + Assert.Equal(this.BoolProperty, other.BoolProperty); + Assert.Equal(this.NullableBoolProperty, other.NullableBoolProperty); + Assert.Equal(this.StringProperty, other.StringProperty); + Assert.Equal(this.NullableStringProperty, other.NullableStringProperty); + Assert.Equal(this.IntProperty, other.IntProperty); + Assert.Equal(this.NullableIntProperty, other.NullableIntProperty); + Assert.Equal(this.LongProperty, other.LongProperty); + Assert.Equal(this.NullableLongProperty, other.NullableLongProperty); + Assert.Equal(this.FloatProperty, other.FloatProperty); + Assert.Equal(this.NullableFloatProperty, other.NullableFloatProperty); + Assert.Equal(this.DoubleProperty, other.DoubleProperty); + Assert.Equal(this.NullableDoubleProperty, other.NullableDoubleProperty); + Assert.Equal(this.StringArray, other.StringArray); + Assert.Equal(this.NullableStringArray, other.NullableStringArray); + Assert.Equal(this.StringList, other.StringList); + Assert.Equal(this.NullableStringList, other.NullableStringList); + Assert.Equal(this.Embedding!.Value.ToArray(), other.Embedding!.Value.ToArray()); + } + + internal static VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(PineconeAllTypes.Id), typeof(string)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.BoolProperty), typeof(bool)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableBoolProperty), typeof(bool?)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringProperty), typeof(string)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringProperty), typeof(string)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.IntProperty), typeof(int)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableIntProperty), typeof(int?)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.LongProperty), typeof(long)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableLongProperty), typeof(long?)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.FloatProperty), typeof(float)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableFloatProperty), typeof(float?)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.DoubleProperty), typeof(double)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableDoubleProperty), typeof(double?)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringArray), typeof(string[])), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringArray), typeof(string[])), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringList), typeof(List)), + new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringList), typeof(List)), + new VectorStoreRecordVectorProperty(nameof(PineconeAllTypes.Embedding), typeof(ReadOnlyMemory?)) { Dimensions = 8, DistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction.DotProductSimilarity } + ] + }; +} +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeFixture.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeFixture.cs new file mode 100644 index 000000000000..6b3c068b597e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace PineconeIntegrationTests.Support; + +public class PineconeFixture : VectorStoreFixture +{ + public override TestStore TestStore => PineconeTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs new file mode 100644 index 000000000000..91768966c9ff --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace PineconeIntegrationTests.Support; + +public class PineconeGenericDataModelFixture : GenericDataModelFixture +{ + public override TestStore TestStore => PineconeTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeSimpleModelFixture.cs new file mode 100644 index 000000000000..4835b3aa6eeb --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace PineconeIntegrationTests.Support; + +public class PineconeSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => PineconeTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs new file mode 100644 index 000000000000..40d3e221e777 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs @@ -0,0 +1,141 @@ +// Copyright (c) Microsoft. All rights reserved. + +#pragma warning disable IDE0005 // Using directive is unnecessary. +using System.Net.Http; +#pragma warning restore IDE0005 // Using directive is unnecessary. +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Pinecone; +using Pinecone; +using VectorDataSpecificationTests.Support; + +namespace PineconeIntegrationTests.Support; + +#pragma warning disable CA1001 // Type owns disposable fields but is not disposable +#pragma warning disable CA2000 // Dispose objects before losing scope + +internal sealed class PineconeTestStore : TestStore +{ + // Values taken from https://docs.pinecone.io/guides/operations/local-development + // v0.7.0 works with 2.1 client + // v1.0.0 works with 3.0 client + // We use hardcoded version to avoid breaking changes. + private const string Image = "ghcr.io/pinecone-io/pinecone-local:v1.0.0.rc0"; + private const ushort FirstPort = 5080; + private const int IndexServiceCount = 10; + + public static PineconeTestStore Instance { get; } = new(); + + private IContainer? _container; + private Pinecone.PineconeClient? _client; + private PineconeVectorStore? _defaultVectorStore; + + public Pinecone.PineconeClient Client => this._client ?? throw new InvalidOperationException("Not initialized"); + + public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + // Pinecone does not support distance functions other than PGA which is always enabled. + public override string DefaultIndexKind => ""; + + private PineconeTestStore() + { + } + + protected override async Task StartAsync() + { + this._container = await this.StartContainerAsync(); + + Dictionary containerToHostPort = Enumerable.Range(FirstPort, IndexServiceCount + 1) + .ToDictionary(port => port, port => (int)this._container.GetMappedPublicPort(port)); + + UriBuilder baseAddress = new() + { + Scheme = "http", + Host = this._container.Hostname, + Port = this._container.GetMappedPublicPort(FirstPort) + }; + + ClientOptions clientOptions = new() + { + BaseUrl = baseAddress.Uri.ToString(), + MaxRetries = 0, + IsTlsEnabled = false, + GrpcOptions = new() + { + HttpClient = new(new RedirectHandler(containerToHostPort), disposeHandler: true) + { + BaseAddress = baseAddress.Uri + }, + } + }; + + this._client = new Pinecone.PineconeClient( + apiKey: "ForPineconeLocalTheApiKeysAreIgnored", + clientOptions: clientOptions); + + this._defaultVectorStore = new(this._client); + } + + protected override async Task StopAsync() + { + if (this._container is not null) + { + await this._container.DisposeAsync(); + } + } + + private async Task StartContainerAsync() + { + ContainerBuilder builder = new ContainerBuilder() + .WithImage(Image) + // Pinecone Local will run on port $FirstPort. + .WithPortBinding(FirstPort, assignRandomHostPort: true) + // We are currently using the default Pinecone port (5080), but we can change it to a random port. + // In such case, we are going to need to set the PORT environment variable to the new port. + .WithEnvironment("PORT", FirstPort.ToString()); + + for (int indexService = 1; indexService <= IndexServiceCount; indexService++) + { + // And the index services on the following ports. + builder = builder.WithPortBinding(FirstPort + indexService, assignRandomHostPort: true); + } + + var container = builder.Build(); + + await container.StartAsync(); + + return container; + } + + private sealed class RedirectHandler : DelegatingHandler + { + private readonly Dictionary _containerToHostPort; + + public RedirectHandler(Dictionary portRedirections) + : base(new HttpClientHandler()) + { + this._containerToHostPort = portRedirections; + } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + // When "host" argument is not provided for PineconeClient.Index, + // it will try to get the host from the Pinecone service. + // In the cloud environment it's fine, but with the local emulator + // it reports the address with the container port, not the host port. + if (request.RequestUri != null && request.RequestUri.IsAbsoluteUri + && request.RequestUri.Host == "localhost" + && this._containerToHostPort.TryGetValue(request.RequestUri.Port, out int hostPort)) + { + UriBuilder builder = new(request.RequestUri) + { + Port = hostPort + }; + request.RequestUri = builder.Uri; + } + + return base.SendAsync(request, cancellationToken); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/VectorSearch/PineconeVectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/VectorSearch/PineconeVectorSearchDistanceFunctionComplianceTests.cs new file mode 100644 index 000000000000..2899e0cfbd7d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/VectorSearch/PineconeVectorSearchDistanceFunctionComplianceTests.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.VectorSearch; +using Xunit; + +namespace PineconeIntegrationTests.VectorSearch; + +public class PineconeVectorSearchDistanceFunctionComplianceTests(PineconeFixture fixture) + : VectorSearchDistanceFunctionComplianceTests(fixture), IClassFixture +{ + public override Task CosineDistance() + => Assert.ThrowsAsync(base.CosineDistance); + + public override async Task CosineSimilarity() + { + await base.CosineSimilarity(); + await ArtificialDelayToWorkaroundEmulatorLimitations(); + } + + public override async Task DotProductSimilarity() + { + await base.DotProductSimilarity(); + await ArtificialDelayToWorkaroundEmulatorLimitations(); + } + + public override Task EuclideanDistance() + => Assert.ThrowsAsync(base.EuclideanDistance); + + public override async Task EuclideanSquaredDistance() + { + await base.EuclideanSquaredDistance(); + await ArtificialDelayToWorkaroundEmulatorLimitations(); + } + + public override Task Hamming() + => Assert.ThrowsAsync(base.Hamming); + + public override Task ManhattanDistance() + => Assert.ThrowsAsync(base.ManhattanDistance); + + public override Task NegativeDotProductSimilarity() + => Assert.ThrowsAsync(base.NegativeDotProductSimilarity); + + // The Pinecone emulator needs some extra time to spawn a new index service + // that uses a different distance function. + private static Task ArtificialDelayToWorkaroundEmulatorLimitations() + => Task.Delay(TimeSpan.FromSeconds(5)); +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index 1914a08b74fe..c2d71d49281b 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -1,7 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.VectorData; using SqlServerIntegrationTests.Support; using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Models; +using VectorDataSpecificationTests.Xunit; using Xunit; namespace SqlServerIntegrationTests.CRUD; @@ -9,4 +12,63 @@ namespace SqlServerIntegrationTests.CRUD; public class SqlServerBatchConformanceTests(SqlServerSimpleModelFixture fixture) : BatchConformanceTests(fixture), IClassFixture { + private const int SqlServerMaxParameters = 2_100; + + [ConditionalFact] + public Task CanSplitBatchToAccountForMaxParameterLimit_WithVectors() + => this.CanSplitBatchToAccountForMaxParameterLimit(includeVectors: true); + + [ConditionalFact] + public Task CanSplitBatchToAccountForMaxParameterLimit_WithoutVectors() + => this.CanSplitBatchToAccountForMaxParameterLimit(includeVectors: false); + + private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVectors) + { + var collection = fixture.Collection; + SimpleModel[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleModel() + { + Id = fixture.GenerateNextKey(), + Number = 100 + i, + Text = i.ToString(), + Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + }).ToArray(); + var keys = inserted.Select(record => record.Id).ToArray(); + + Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + var receivedKeys = await collection.UpsertBatchAsync(inserted).ToArrayAsync(); + Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order + + var received = await collection.GetBatchAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + foreach (var record in inserted) + { + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + } + + await collection.DeleteBatchAsync(keys); + Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + } + + [ConditionalFact] + public async Task UpsertBatchIsAtomic() + { + var collection = fixture.Collection; + SimpleModel[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleModel() + { + // The last Id is set to NULL, so it must not be inserted and the whole batch should fail + Id = i < SqlServerMaxParameters ? fixture.GenerateNextKey() : null!, + Number = 100 + i, + Text = i.ToString(), + Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + }).ToArray(); + + var keys = inserted.Select(record => record.Id).Where(key => key is not null).ToArray(); + Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + + VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertBatchAsync(inserted).ToArrayAsync().AsTask()); + Assert.Equal("UpsertBatch", ex.OperationName); + Assert.Equal(collection.CollectionName, ex.CollectionName); + + // Make sure that no records were inserted! + Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index e22d9f98a201..c8ed9a0cdda1 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -228,8 +228,10 @@ public void MergeIntoMany() ]; using SqlConnection connection = CreateConnection(); - using SqlCommand command = SqlServerCommandBuilder.MergeIntoMany(connection, "schema", "table", - keyProperty, properties, records)!; + using SqlCommand command = connection.CreateCommand(); + + Assert.True(SqlServerCommandBuilder.MergeIntoMany(command, "schema", "table", + keyProperty, properties, records)); string expectedCommand = """" @@ -283,9 +285,9 @@ public void DeleteMany() string[] keys = ["key1", "key2"]; VectorStoreRecordKeyProperty keyProperty = new("id", typeof(string)); using SqlConnection connection = CreateConnection(); + using SqlCommand command = connection.CreateCommand(); - using SqlCommand command = SqlServerCommandBuilder.DeleteMany(connection, - "schema", "tableName", keyProperty, keys)!; + Assert.True(SqlServerCommandBuilder.DeleteMany(command, "schema", "tableName", keyProperty, keys)); Assert.Equal("DELETE FROM [schema].[tableName] WHERE [id] IN (@id_0,@id_1)", command.CommandText); for (int i = 0; i < keys.Length; i++) @@ -338,9 +340,10 @@ public void SelectMany() ]; long[] keys = [123L, 456L, 789L]; using SqlConnection connection = CreateConnection(); + using SqlCommand command = connection.CreateCommand(); - using SqlCommand command = SqlServerCommandBuilder.SelectMany(connection, - "schema", "tableName", keyProperty, properties, keys, includeVectors: true)!; + Assert.True(SqlServerCommandBuilder.SelectMany(command, + "schema", "tableName", keyProperty, properties, keys, includeVectors: true)); AssertEqualIgnoreNewLines( """"" diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index ee5ded61809e..b8fe0a30afe4 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -196,6 +196,6 @@ public async Task DeleteBatchAsyncDeletesTheRecords() // The order of records in the received array is not guaranteed // to match the order of keys in the requested keys array. - private SimpleModel GetRecord(SimpleModel[] received, TKey key) + protected SimpleModel GetRecord(SimpleModel[] received, TKey key) => received.Single(r => r.Id!.Equals(key)); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 285c93c23e92..16c6a5f46c7b 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -52,8 +52,14 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM ReadOnlyMemory orthogonalVector = new([0f, -1f, -1f, 0f]); double[] scoreDictionary = [expectedExactMatchScore, expectedOppositeScore, expectedOrthogonalScore]; + double[] expectedScores = + [ + scoreDictionary[resultOrder[0]], + scoreDictionary[resultOrder[1]], + scoreDictionary[resultOrder[2]] + ]; - List records = + List insertedRecords = [ new() { @@ -76,10 +82,16 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM Vector = orthogonalVector, } ]; + SearchRecord[] expectedRecords = + [ + insertedRecords[resultOrder[0]], + insertedRecords[resultOrder[1]], + insertedRecords[resultOrder[2]] + ]; // The record definition describes the distance function, // so we need a dedicated collection per test. - string uniqueCollectionName = Guid.NewGuid().ToString(); + string uniqueCollectionName = fixture.GetUniqueCollectionName(); var collection = fixture.TestStore.DefaultVectorStore.GetCollection( uniqueCollectionName, this.GetRecordDefinition(distanceFunction)); @@ -89,35 +101,55 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM try { - await collection.UpsertBatchAsync(records).ToArrayAsync(); + await collection.UpsertBatchAsync(insertedRecords).ToArrayAsync(); var searchResult = await collection.VectorizedSearchAsync(baseVector); var results = await searchResult.Results.ToListAsync(); - VerifySearchResults(resultOrder, scoreDictionary, records, results, includeVectors: false); + VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: false); searchResult = await collection.VectorizedSearchAsync(baseVector, new() { IncludeVectors = true }); results = await searchResult.Results.ToListAsync(); - VerifySearchResults(resultOrder, scoreDictionary, records, results, includeVectors: true); + VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: true); + + for (int skip = 0; skip <= insertedRecords.Count; skip++) + { + for (int top = Math.Max(1, skip); top <= insertedRecords.Count; top++) + { + searchResult = await collection.VectorizedSearchAsync(baseVector, + new() + { + Skip = skip, + Top = top, + IncludeVectors = true + }); + results = await searchResult.Results.ToListAsync(); + + VerifySearchResults( + expectedRecords.Skip(skip).Take(top).ToArray(), + expectedScores.Skip(skip).Take(top).ToArray(), + results, includeVectors: true); + } + } } finally { await collection.DeleteCollectionAsync(); } - static void VerifySearchResults(int[] resultOrder, double[] scoreDictionary, List records, + static void VerifySearchResults(SearchRecord[] expectedRecords, double[] expectedScores, List> results, bool includeVectors) { - Assert.Equal(records.Count, results.Count); + Assert.Equal(expectedRecords.Length, results.Count); for (int i = 0; i < results.Count; i++) { - Assert.Equal(records[resultOrder[i]].Key, results[i].Record.Key); - Assert.Equal(records[resultOrder[i]].Int, results[i].Record.Int); - Assert.Equal(records[resultOrder[i]].String, results[i].Record.String); - Assert.Equal(Math.Round(scoreDictionary[resultOrder[i]], 2), Math.Round(results[i].Score!.Value, 2)); + Assert.Equal(expectedRecords[i].Key, results[i].Record.Key); + Assert.Equal(expectedRecords[i].Int, results[i].Record.Int); + Assert.Equal(expectedRecords[i].String, results[i].Record.String); + Assert.Equal(Math.Round(expectedScores[i], 2), Math.Round(results[i].Score!.Value, 2)); if (includeVectors) { - Assert.Equal(records[resultOrder[i]].Vector.ToArray(), results[i].Record.Vector.ToArray()); + Assert.Equal(expectedRecords[i].Vector.ToArray(), results[i].Record.Vector.ToArray()); } else { diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index 205fed6b0cdf..f31c833413cb 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -172,10 +172,7 @@ ### Search - Using [`Search`](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/connectors/search) services information -- [Bing Search Plugin](./search/bing_search_plugin.py) -- [Bing Text Search](./search/bing_text_search.py) - [Bing Text Search as Plugin](./search/bing_text_search_as_plugin.py) -- [Google Search Plugin](./search/google_search_plugin.py) - [Google Text Search as Plugin](./search/google_text_search_as_plugin.py) ### Service Selector - Shows how to create and use a custom service selector class diff --git a/python/samples/concepts/search/bing_plugin_examples.py b/python/samples/concepts/search/bing_plugin_examples.py deleted file mode 100644 index 6e6c201981c7..000000000000 --- a/python/samples/concepts/search/bing_plugin_examples.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -##################################################### -# This sample should be considered obsolete, as we are moving things towards the new text search model. -# Please check out the bing_text_search_as_plugin.py sample for the latest implementation. -##################################################### - -import asyncio - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings -from semantic_kernel.connectors.search_engine import BingConnector -from semantic_kernel.core_plugins import WebSearchEnginePlugin -from semantic_kernel.functions import KernelArguments -from semantic_kernel.prompt_template import KernelPromptTemplate, PromptTemplateConfig - - -async def example1(kernel: Kernel, search_plugin_name: str): - print("======== Bing and Google Search Plugins ========") - - question = "What's the largest building in the world?" - function = kernel.get_function(plugin_name=search_plugin_name, function_name="search") - result = await kernel.invoke(function, query=question) - - print(question) - print(f"----{search_plugin_name}----") - print(result) - - -async def example2(kernel: Kernel, service_id: str): - print("======== Use the Search Plugin to Answer User Questions ========") - - prompt = """ - Answer questions only when you know the facts or the information is provided. - When you don't have sufficient information you reply with a list of commands to find the information needed. - When answering multiple questions, use a bullet point list. - Note: make sure single and double quotes are escaped using a backslash char. - - [COMMANDS AVAILABLE] - - bing.search - - [INFORMATION PROVIDED] - {{ $externalInformation }} - - [EXAMPLE 1] - Question: what's the biggest lake in Italy? - Answer: Lake Garda, also known as Lago di Garda. - - [EXAMPLE 2] - Question: what's the biggest lake in Italy? What's the smallest positive number? - Answer: - * Lake Garda, also known as Lago di Garda. - * The smallest positive number is 1. - - [EXAMPLE 3] - Question: what's Ferrari stock price? Who is the current number one female tennis player in the world? - Answer: - {{ '{{' }} bing.search ""what\\'s Ferrari stock price?"" {{ '}}' }}. - {{ '{{' }} bing.search ""Who is the current number one female tennis player in the world?"" {{ '}}' }}. - - [END OF EXAMPLES] - - [TASK] - Question: {{ $question }}. - Answer: - """ - question = "Who is the most followed person on TikTok right now? What's the exchange rate EUR:USD?" - print(question) - - oracle = kernel.add_function( - function_name="oracle", - plugin_name="OraclePlugin", - prompt=prompt, - execution_settings=OpenAIChatPromptExecutionSettings( - service_id=service_id, max_tokens=150, temperature=0, top_p=1 - ), - ) - answer = await kernel.invoke( - oracle, - question=question, - externalInformation="", - ) - - result = str(answer) - - if "bing.search" in result: - prompt_template = KernelPromptTemplate(prompt_template_config=PromptTemplateConfig(template=result)) - - print("--- Fetching information from Bing... ---") - information = await prompt_template.render(kernel, KernelArguments()) - - print("Information found:\n") - print(information) - - answer = await kernel.invoke(oracle, question=question, externalInformation=information) - print("\n---- Oracle's Answer ----:\n") - print(answer) - else: - print("AI had all of the information, there was no need to query Bing.") - - -async def main(): - kernel = Kernel() - - model = "gpt-3.5-turbo" - service_id = model - - kernel.add_service( - OpenAIChatCompletion(service_id=service_id, ai_model_id=model), - ) - - bing_connector = BingConnector() - bing = WebSearchEnginePlugin(bing_connector) - kernel.add_plugin(bing, "bing") - - await example1(kernel, "bing") - await example2(kernel, service_id) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/concepts/search/bing_search_plugin.py b/python/samples/concepts/search/bing_search_plugin.py deleted file mode 100644 index 00acd9a9f546..000000000000 --- a/python/samples/concepts/search/bing_search_plugin.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.connectors.search_engine import BingConnector -from semantic_kernel.core_plugins import WebSearchEnginePlugin -from semantic_kernel.prompt_template import PromptTemplateConfig - - -async def main(): - kernel = Kernel() - service_id = "chat-gpt" - kernel.add_service(AzureChatCompletion(service_id=service_id)) - connector = BingConnector() - web_plugin = kernel.add_plugin(WebSearchEnginePlugin(connector), "WebSearch") - - print("---------------- Question 1 -----------------\n") - - query = "Which country receives the most rain per year?" - search = web_plugin["search"] - result = await kernel.invoke(search, query=query) - print(f"Question: {query}\n") - print(f"Answer: {result}\n") - - print("---------------- Question 2 -----------------\n") - - prompt = """ - Answer the question using only the data that is provided in the data section. - Do not use any prior knowledge to answer the question. - Data: {{WebSearch.search "What is semantic kernel?"}} - Question: {{$question}}? - Answer: - """ - - req_settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) - req_settings.temperature = 0.2 - - prompt_template_config = PromptTemplateConfig( - template=prompt, - name="qna", - template_format="semantic-kernel", - execution_settings=req_settings, - ) - - question = "What is Semantic Kernel?" - qna = kernel.add_function( - function_name="qna", - plugin_name="WebSearch", - prompt_template_config=prompt_template_config, - ) - result = await qna.invoke(kernel, question=question, num_results=10, offset=0) - - print(f"Question: {question}\n") - print(f"Answer: {result}\n") - - """ - Output: - Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, - Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. - By doing so, you can create AI apps that combine the best of both worlds. - Semantic Kernel is at the center of the copilot stack. - """ - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) diff --git a/python/samples/concepts/search/bing_text_search.py b/python/samples/concepts/search/bing_text_search.py deleted file mode 100644 index d8c84529b188..000000000000 --- a/python/samples/concepts/search/bing_text_search.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion -from semantic_kernel.connectors.search.bing.bing_search import BingSearch - - -async def main(): - kernel = Kernel() - service_id = "chat-gpt" - kernel.add_service(AzureChatCompletion(service_id=service_id)) - connector = BingSearch() - query = "What is semantic kernel?" - print("Query: ", query) - print("\n============================\n") - results = await connector.search(query, top=2) - print("String search results: ") - if results.metadata and results.metadata.get("altered_query", None): - print(f" Altered query: {results.metadata['altered_query']}") - async for result in results.results: - print(f" result: {result}") - print("\n============================\n") - results = await connector.get_text_search_results(query, top=2) - print("Text search results: ") - if results.metadata and results.metadata.get("altered_query", None): - print(f" Altered query: {results.metadata['altered_query']}") - async for result in results.results: - print(f" name: {result.name}") - print(f" value: {result.value}") - print(f" link: {result.link}") - print("\n============================\n") - results = await connector.get_search_results(query, top=2) - print("BingWebPage results: ") - if results.metadata and results.metadata.get("altered_query", None): - print(f" Altered query: {results.metadata['altered_query']}") - async for result in results.results: - print(f" name: {result.name}") - print(f" url: {result.url}") - print(f" language: {result.language}") - print(f" snippet: {result.snippet}") - print("\n============================\n") - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) diff --git a/python/samples/concepts/search/bing_text_search_as_plugin.py b/python/samples/concepts/search/bing_text_search_as_plugin.py index 9f9c23ac2008..53968f10ec21 100644 --- a/python/samples/concepts/search/bing_text_search_as_plugin.py +++ b/python/samples/concepts/search/bing_text_search_as_plugin.py @@ -1,19 +1,16 @@ # Copyright (c) Microsoft. All rights reserved. - -from collections.abc import Coroutine -from typing import Any +from collections.abc import Awaitable, Callable from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import ( OpenAIChatCompletion, OpenAIChatPromptExecutionSettings, ) from semantic_kernel.connectors.search.bing import BingSearch from semantic_kernel.contents import ChatHistory -from semantic_kernel.filters.filter_types import FilterTypes -from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext +from semantic_kernel.filters import FilterTypes, FunctionInvocationContext from semantic_kernel.functions import KernelArguments, KernelParameterMetadata, KernelPlugin kernel = Kernel() @@ -84,7 +81,9 @@ @kernel.filter(filter_type=FilterTypes.FUNCTION_INVOCATION) -async def log_bing_filter(context: FunctionInvocationContext, next: Coroutine[FunctionInvocationContext, Any, None]): +async def log_bing_filter( + context: FunctionInvocationContext, next: Callable[[FunctionInvocationContext], Awaitable[None]] +): if context.function.plugin_name == "bing": print("Calling Bing search with arguments:") if "query" in context.arguments: diff --git a/python/samples/concepts/search/google_search_plugin.py b/python/samples/concepts/search/google_search_plugin.py deleted file mode 100644 index 0c24f34238e1..000000000000 --- a/python/samples/concepts/search/google_search_plugin.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os - -from dotenv import load_dotenv - -from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import PromptExecutionSettings -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.connectors.search_engine import GoogleConnector -from semantic_kernel.core_plugins import WebSearchEnginePlugin -from semantic_kernel.functions import KernelArguments - -load_dotenv() - - -async def main(): - kernel = Kernel() - kernel.add_service(OpenAIChatCompletion(service_id="chat-gpt", ai_model_id="gpt-3.5-turbo")) - - """ - Instantiate a Google Connector - Make sure to have the following keys in a .env file or set as environment variables - - GOOGLE_API_KEY - - GOOGLE_SEARCH_ENGINE_ID - - A Google Custom Search API has to be created in order to have an API key and a search engine ID. - To create a Google Custom Search API, follow the guide - https://developers.google.com/custom-search/v1/overview. - If you have already created the service, the credentials can be found in the Credentials tab on the page - https://console.cloud.google.com/apis/api/customsearch.googleapis.com - """ - connector = GoogleConnector( - api_key=os.getenv("GOOGLE_API_KEY"), - search_engine_id=os.getenv("GOOGLE_SEARCH_ENGINE_ID"), - ) - - # Import the WebSearchEnginePlugin and pass the Google Connector to it. - web_plugin = kernel.add_plugin(WebSearchEnginePlugin(connector), "WebSearch") - - # The search query - search = web_plugin["searchAsync"] - prompt = "Who is Leonardo DiCaprio's current girlfriend?" - - # By default, only one search result is provided - result = await search.invoke(kernel, query=prompt) - print(str(result)) - - """ - Output: - ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's - Relationship From the beginning of their romance to today, we track their relationship here. By..."] - """ - - # Following example demonstrates the use of the plugin within a semantic function - prompt = """ - Answer the question using only the data that is provided in the data section. - Do not use any prior knowledge to answer the question. - Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} - Question: What is semantic kernel? - Answer: - """ - - qna = kernel.add_function( - plugin_name="qa", - function_name="qna", - prompt=prompt, - prompt_execution_settings=PromptExecutionSettings(temperature=0.2), - ) - - """ - Two context parameters can be passed to the search engine plugin. - - num_results controls the number of results returned by the web search. - - offset controls the number of results to omit. - """ - arguments = KernelArguments(num_results="10", offset="0") - - result = await qna.invoke(kernel, arguments) - print(str(result)) - - """ - Output: - Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, - Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. - By doing so, you can create AI apps that combine the best of both worlds. - Semantic Kernel is at the center of the copilot stack. - """ - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) diff --git a/python/samples/concepts/search/google_text_search_as_plugin.py b/python/samples/concepts/search/google_text_search_as_plugin.py index d9f2c6fb0867..f815eb0fd310 100644 --- a/python/samples/concepts/search/google_text_search_as_plugin.py +++ b/python/samples/concepts/search/google_text_search_as_plugin.py @@ -5,15 +5,14 @@ from typing import Any from semantic_kernel import Kernel -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai import ( OpenAIChatCompletion, OpenAIChatPromptExecutionSettings, ) from semantic_kernel.connectors.search.google import GoogleSearch from semantic_kernel.contents import ChatHistory -from semantic_kernel.filters.filter_types import FilterTypes -from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext +from semantic_kernel.filters import FilterTypes, FunctionInvocationContext from semantic_kernel.functions import KernelArguments, KernelParameterMetadata, KernelPlugin # This sample shows how to setup Google Search as a plugin in the Semantic Kernel. diff --git a/python/samples/demos/README.md b/python/samples/demos/README.md index fa23e14b3c4d..e16894460735 100644 --- a/python/samples/demos/README.md +++ b/python/samples/demos/README.md @@ -6,6 +6,8 @@ Demonstration applications that leverage the usage of one or many SK features | ----------------- | ----------------------------------------------- | | assistants_group_chat | A sample Agent demo that shows a chat functionality with an OpenAI Assistant agent. | | booking_restaurant | A sample chat bot that leverages the Microsoft Graph and Bookings API as a Semantic Kernel plugin to make a fake booking at a restaurant. | +| copilot_studio_agent | A sample that shows how to invoke Microsoft Copilot Studio agents as first-party Agent in Semantic Kernel| +| copilot_studio_skill | A sample demonstrating how to extend Microsoft Copilot Studio to invoke Semantic Kernel agents | | guided_conversations | A sample showing a framework for a pattern of use cases referred to as guided conversations. | | processes_with_dapr | A sample showing the Semantic Kernel process framework used with the Python Dapr runtime. | | telemetry_with_application_insights | A sample project that shows how a Python application can be configured to send Semantic Kernel telemetry to Application Insights. | \ No newline at end of file diff --git a/python/samples/demos/copilot_studio_agent/.env.sample b/python/samples/demos/copilot_studio_agent/.env.sample new file mode 100644 index 000000000000..652168b135e7 --- /dev/null +++ b/python/samples/demos/copilot_studio_agent/.env.sample @@ -0,0 +1,2 @@ +BOT_SECRET="copy from Copilot Studio Agent, under Settings > Security > Web Channel" +BOT_ENDPOINT="https://europe.directline.botframework.com/v3/directline" \ No newline at end of file diff --git a/python/samples/demos/copilot_studio_agent/README.md b/python/samples/demos/copilot_studio_agent/README.md new file mode 100644 index 000000000000..187396158e5b --- /dev/null +++ b/python/samples/demos/copilot_studio_agent/README.md @@ -0,0 +1,50 @@ +# Copilot Studio Agents interaction + +This is a simple example of how to interact with Copilot Studio Agents as they were first-party agents in Semantic Kernel. + +![alt text](image.png) + +## Rationale + +Semantic Kernel already features many different types of agents, including `ChatCompletionAgent`, `AzureAIAgent`, `OpenAIAssistantAgent` or `AutoGenConversableAgent`. All of them though involve code-based agents. + +Instead, [Microsoft Copilot Studio](https://learn.microsoft.com/en-us/microsoft-copilot-studio/fundamentals-what-is-copilot-studio) allows you to create declarative, low-code, and easy-to-maintain agents and publish them over multiple channels. + +This way, you can create any amount of agents in Copilot Studio and interact with them along with code-based agents in Semantic Kernel, thus being able to use the best of both worlds. + +## Implementation + +The implementation is quite simple, since Copilot Studio can publish agents over DirectLine API, which we can use in Semantic Kernel to define a new subclass of `Agent` named [`DirectLineAgent`](src/direct_line_agent.py). + +Additionally, we do enforce [authentication to the DirectLine API](https://learn.microsoft.com/en-us/microsoft-copilot-studio/configure-web-security). + +## Usage + +> [!NOTE] +> Working with Copilot Studio Agents requires a [subscription](https://learn.microsoft.com/en-us/microsoft-copilot-studio/requirements-licensing-subscriptions) to Microsoft Copilot Studio. + +> [!TIP] +> In this case, we suggest to start with a simple Q&A Agent and supply a PDF to answer some questions. You can find a free sample like [Microsoft Surface Pro 4 User Guide](https://download.microsoft.com/download/2/9/B/29B20383-302C-4517-A006-B0186F04BE28/surface-pro-4-user-guide-EN.pdf) + +1. [Create a new agent](https://learn.microsoft.com/en-us/microsoft-copilot-studio/fundamentals-get-started?tabs=web) in Copilot Studio +2. [Publish the agent](https://learn.microsoft.com/en-us/microsoft-copilot-studio/publication-fundamentals-publish-channels?tabs=web) +3. Turn off default authentication under the agent Settings > Security +4. [Setup web channel security](https://learn.microsoft.com/en-us/microsoft-copilot-studio/configure-web-security) and copy the secret value + +Once you're done with the above steps, you can use the following code to interact with the Copilot Studio Agent: + +1. Copy the `.env.sample` file to `.env` and set the `BOT_SECRET` environment variable to the secret value +2. Run the following code: + +```bash +python -m venv .venv + +# On Mac/Linux +source .venv/bin/activate +# On Windows +.venv\Scripts\Activate.ps1 + +pip install -r requirements.txt + +chainlit run --port 8081 .\chat.py +``` diff --git a/python/samples/demos/copilot_studio_agent/image.png b/python/samples/demos/copilot_studio_agent/image.png new file mode 100644 index 0000000000000000000000000000000000000000..2d97b73bcaa995c18b839d83ba0c2fa2d5435224 GIT binary patch literal 83821 zcmeFY^EG)PNGONU6efPzSOch`)7l$3OLOG$T0Ne$hdLk~R+1H&EO z`nkW|zu1_9_St9cwbxpE?dMrL?1PFd?o+a-C@3hn@^Vt@C@5$aC@7E2Fwv1u z^e?_IApfAcsLQ@bsQ^*zA#WaANGMC7pj1U;-x@tZ-eWn+X}h4HJnMe=q4qlzo1&oH z7syLVeDu^mTn73QPPbj(6QlE;nV9VM729TOpTuUrNMJ8uc<1m#KXJ)=s@k~@dx6A3 zvX*h8Z_NzrY0memrEk_sI&yCgr#BX6cJJOfcOD~#<1AVvlMI-H&&=mJWa_Zm$X6dpF^&XB~U`S$!hCQZ*1w=nmBhb!c5 zC`Z;~i@%qc_^9*(1pgjmzUoT(*F*{h3-sTM-=5Pxl5=GIca8rpfah6Y~{F(sL)};^JD+ zeviqOA4MtL2hIYrt4`j?|L5Twtgrd_601!5t(mZCdXo14#L!WQdVdtwzIccJ7#}&i z?eATMC^!2e<2scv@$vhjD2E!IHpli{7qoN9{{{vFe3QP2E=%4)AG7zNp)V*YBgZXG zhm#+^DEaRjYt=*Sn`c8Dvnnr3-<}hfCnuAzH%a|| z@i)S8Hupj ztmElstul%j_yQ1sVG0AU2!Ruz*4<^$C{Qq@ z3UU`i)Ui^KA^0u+=qyPj(_3CxRoC*AH2RnIIPZomO3~_OP%mqHfF1DFMw8e4XwyKk zNk{#WO6q7Jbm@0(Pu1>%s^e~(-ffjJPm36q0uSahYO&$d&HlQ$4$ZBBhsDAvhU9yZ zd6gC^mx|ol)1wG%#&D(vk)=R_>{QH`C~R@%HhZN<>*z@zj9XZ)%I~mAb#;TXY)bc3p*z@+-6PciAbsW{tHtIG6v7N05|n9(w`n_J8c$(-UvJKCZ{fK~oQRBc(fK*1i&O&oju zYIli~pD@^X{+*D@wAgb!9kqwC{z!S0kRx32VAK6GGSBjWMMM(1AnF){Dm+d@9IrrL1 zX-U~%?1i++FTsvHU02ACzqI3rT3jtd&$U4f4z4t6u8sH}ou1cxZZ>==k4{;v_lNJ^n5V1G=*!6~MV`VK&M)#c z_mv!{hm}9E*dwj5fuN4z78UtT@c{6V*6X`ZZ$lozO-Q;zUkkY0;J6+}Yra&qPZ`?v ztE~3fV%t$63$M7tR2Jqtor@as;=EUBC#XcbrSLv~0yr(Te&)nv!@sO_+t1;&nwK+L zU=O@#V50zrYC-h6uxVT#DGlH6sn=CBmw^9MUS8lrkPs{koT6w*-CS9@O5GIGQb}G~ zTY1HkE+{z$fKH!QO_>&byfII%bua~&?KU;z7

ym=-a7;p6?`;PY zB=T~8$eMKNgOAN%p6245Nvu)Fn~@Ev0x!%=zx1h7$zu;^^-aWPxqur8PJpc;{3}aC zImM67Eh-AOm}XH=Ru15T&Q>CfHD~;tBUiiqa6kRu3jhb#IKZD}=96`URl`asP9D?9&as)|l!#vcgh2&^O%&Gh zdRy!14*8)d*^t{gWM@@qrd*QW`!OBk@Xj$LYG*e`lEV59=W7$p#_I%QGE{WFCzF3Q zyF@fBEeVue85Q3glSk@g^{57dc$ViEBH+6FJPY-dwA7Q^Moc~ruL?Q>VmB4%*gJgx zGOOzQr<9s7@lB)AG>4NuAEwZ(xZa-${2pb%FDh8#2E2jSlM1PAA#TY#rl(tZ`~=ZS zehzJ2E2^MQD-NGk-#~scvBS@N@n`fdhi>O7HUd5E{LnQ)(ZmCQBZq25W11E3X8h5a7}e$ zSV#H=O5*_`cC?>?7}&J=-c2G+_pPEriz@bg!7m1kgutjv#8taV0MvE5aK;U9&ZxW^%Uah3&- zH)#GXKqP#&$?g-cNV^ajcyZ8O4cpK6BvA3Tw7i~JUPUi*X0daFWE+aEoD5Z``@y2Q zlhS^#>-M6#%K^-Yn&U0^uffZQ;!jTr9{JDNR`5ja{FueoO8r%=)9m8>DE;b8-@{{3 zkK@<67F-ZD{lwm|yOQtBS~7VN(i=LdY}*PkqPm|(r!r^{_=yMN{LLFl{V02La)j?ubhYUIWqYA_bu^hyW;-jJx? zS))bY9~$uian_$bJuVsEY;P4g10WYHV|FNFO-Ke<9bPnbD|WeiC$fFU{aYZ7n*C#9 zic?x>+MGO(4&(RSw}`G;*HW$8KxlbE zE-+s(u0EzP-b{ly?Mw_Td3uZ~9Jj{|BxSC*@$(0ZMDFtjT7~5ao`R^G?#GL!ny=~J zIvjPon4@riw5e*^Gu4(e66WFZUn_-o>9G z`jz8etX=rDB1+f78$@j#eb&_JG=M0-!k07iGW}t!fR-2c>vl0?_)|y#uKa%J#i35E8Jlo*i;z8oRhb*L5pvFQ@Z37doLz^imn?ZL^XG=A$?8rh^qCTYH=igtzlf;Ihc# z7z~;-C|(VD-a}U8CX#|C;MB-$dIxs#Rf6xe4Tp+%wGZ|OM#W?v!e1c z^TTk-z5ibh2L|aS&-R$eMt^k|*F$f#7wy85;c*W*vrz5^s7NvVsKwl{V0aE(81m8a zsJY*@d{L2s>rz2y#Cr&=6wlGVUZgIzs+WB|e<7k$6lXzoCy2Kqn)FT4|Ft(Yn@R0i zMZ*0z^BYi}B~y^uZ%Y}pJ};GY(Awxoa;{L|wqEb3`kjB)HMxVuUf$@MFG3Ii?+F~v z%CG3>OvR7bX;vEX1_&wd*_Z6S5a31h*g&YBOuF5kD69?YB#WR%wKs%o)yWGVZj?k$ zd#d#E9Mu<;-6X_;v*2%35w2p4b`uga7UY}}=29MRlx)K{2VwUS^+Ax$9#X@LU41C9 zvtM>lJ<*pxlK>Zp18J*zqQRyAGz4)&NNzy=?iWQcwp&y<{l64Htj1#0n#oUap^o6A z8j6L$_)o-Lmd|M>mA}*A$mbW4j=#c)=X%8;SjJ4lh82STbMAPMPazeBs_XW^S39@q z1*DUq;*(Nty~sFa-+RO15d?)Ri*B=;@uHE{OY3EMfYA$vGIq|(cuq1`sQB1BFb*E# z0JwDiZUsw5DOFi>C2)`+(fupIp-;(y5Yma!(%?D>w2$XEAr!rT+`=n1o2B`UzY9rC z0-2I^fVfjR<`Ny7mFS zSkzd#5fIz48159q*gKC1FsTNR=5ugX5`#_pSejJ z`X7SQBc0pUR#f^cr*6mxQ-;Y=uOEfx`{Iav#Ljd0Jrn1)>LGj=Cy(R)nf&RRUB#od zX^*yat{#`kgwvC-5Ut->z5(Al?ob=q{XeUWU1UX9IlI zb&B;tDz(U;1=%EL7lp>l*_!db159mrLcmJyM@o_GFS%IkoRd{LOme7Jj-X_$`8j z(6;Sl1;D9fezq&=OR{@YTNiehoNOB)nW-m=gO2a`2q@gc#BPR2C)M>ZyRTn$fXAZ@ zT`VJ$)_`jpaBSU`O5x?(C0d1(DsUc8&r0LBx)hft`w6$<5nbuyNE5+4W%5K~PrFq$ z8Kw4=Pp}4_%>_@ToyCVN*S^CJOPrUrq;R?Z$D%_=Qr0RTF2SiTzXNj(Q)F;n*2Lqn zUn$F(JUu|D#Edz}VjavrtG-wHhec=8!OX29;mDkyG->ggS2bw34@5| zqOJL8Qw9dG31#vt)u*5TO#+LXXqHu+l2Xf&gu+XZ`PsFn<*UP+gYn&xr6#`crhR8| z11@5XDYdKP-8S-*chS*$9r`&qi7(FPyJ^-wNE-e~RhhlZ{|3%b0i;!8eZUwG66%+J z@o(!brK6MPK9RH;HzM*$Gm^=g`<#b*eSnE-9V93N;%y%cmX*ZFScAgjX&UgniuW43 z^@G3!UnM&TM8%Y|XEywMZWu8d*6 z01G(!Wm~qSt^WNZpQs44KE3FZ{Bu&xJ5zSXj*xu=40Jbvo}Ei%m_=36|ItgQD$@Nk zYZ%--E<0CqNlY22a=4)1-JVya)N&v}}P-Hj=;W@K=Z&9e+MG0j|`na?QlL$CwsrQ8{`B8!$)wIuec2w>G`f!#i73g_!OS=o!su;r+Ho zTj2JKl|4?4JRSCFy*V_Ygp6P60Y!yRSIB)ygp&&Poq5>ByL=Kx&l(le)Jv88| z=-x4@_ni}JGvTJ|VZ8ckTjxM!#=ePC;~aabj{5?m#OLLx+p4nhVd%<%AY$QwbLmT!9%#mVmUxNKx-&C&a(>;|SU5~`W>NBMmeR(nA1V)lkpczXJRdAM1eCBt(<18YX9Ef{_?vPZ&W&WF!N~aEuX2 z7&!UY)z(@vVbSzROM;B_{H(gP)4RU13#H~|0UzW&vc=E2wE#I@oR5x6%PM|F?uGID zAW)a<6Qc*MgnEYHQ6ukdn1Dxq%w6+HB#%VUnO-bQRajA%-dBtoIjDbNAyS>$oBh$} z^A=fFNH9{W=jO0xbSoi+$`|<_e_z!0)GDs)} zKjb0Js3Ml6dQh

8)flxBkV0Wc6)&nVDEfto{bsxF9_5Ic+YbnD6IDHDWkcj_ydv zwJ(B{?oPhr{VhNoiDbqYI1;}P#dN*Bc52nyYU}FOFTD#`O-O=y1<=MK zNxB|@*|=Wm{lod1U73h6B(#Z%PlN0O;~B*o5=MP^?X*DHc?%Qy;77wi8nZ z78Zrzq5G#+4h~E#EKdXWKV#BJZ3%i`yhd6*SAP`cBsk( zHCJUqJ*`5a6eFabH^3WXCJrY}>hC`X3B%d|! zw{tfxzvE=9SC0XHWyUnefJWho>OF7P=_Zq<1@0W|IPEVnwlPFpr3ylVwj+M`Hw5OR zDc!qjc~-WzTX;avOn$^!5*65C<(9$+aifJo1#2K8iNrcwUhHgz0IBatL>*dBi3c;i zf+a)57~E#qj@zJO$XA2_@H7JtFXxxkz$b#x)o8C9SCkb1+>Y<=s=-r)`s`0fxuLl= zOI4N0sWj<_on3*q9rwNo#0>}<`mFU>rhoh)PJ5`I^Hq5N>}hw9 z<}**clUY%q*CukS+??5W}K?E6<2z&V;Kx*pjHPTqiJWT~Ve1DQa037I& zx((;|es>&UQSI?h_U@OeGL5=}tK1H#|J=`WpE&~uv+gfs+hHRPXWcP-`jUnvIH->$M{Nx_w|N&3S5T( zpoc1D>d0p`K@Z9h4rys`H(3DQX}uMP&9dqH6;v<6K7Cpg^u6YcW7J#bI%*v4Z-F4n zHz9sLQ0J>dZAPWtySoak)mz3Q)r^fwpqq!15B#V>z-dEZU_j&j#ML%ZxQ%Nk>IT!L z!J6$3qk(1UFTX{;eQTRK`R=n32S6N&AC7V)#2rU!- zo+1ibJ;){Oq`~6~Nf(hc?xfn(hX0kEhW%?O4oD`FO#0);kI@~GW0SXWOe)WbrkfNE z(&4mQ7{3-57OrU{sC%Nu`(3rKYBaZ@M(j=bKH< zAO^r(WwONaJXxOiMF(fyEA3PO)7qj7Ce_TXr6w1g12E#>mxE=b^izD=^xLmD!hbO7 zGEPb%U>N)a)IjNm^GallWsmMiAB_gAaM?{2kL!AEf1(7YF{(%rQ$~)050YmxqQ!26 zlk|nSP3H&VmlDiq>f!Lcs@Kia>-tGNZ8w|FLYR`Qk6_b0`GI^=3< zYLt@?92mRWJz{H86&ZgYYw5jZw`4AWn|g@d>_V=6`t~K(aZcI58-?EMPp-Q_PqM5%c#o;3c*+% zhqlOrLY3|25{)W0Iq=p10gY9&#{EuOx~NN`iFTY-?pfwSlgr^v*8QFwzwtGu@dX#n zkoEHUrV`)Xu^?g_B#g@G+ud0llXn8YqqGXvbH=KwszP~Ky1>b*q+X|42rIG4_RBXr zIxnA7QsQQQoblsAy!y>fFuDC! zMn@aZ+Hu0)5r%dV#dM|gpVM^QjW{4tR+0kp%pLItJFw3xTozH<{C!m%+F}nLO*wCi zf}zTa!6SARIm|SAT|V9(Lv^W$`)9;&X}0w0j&GsBmg_r6mdWk)>BIk!3s#S~Qs!_t zhy8X3qLl0_GByY-yQeZ`vhB>Dty*f0cfmF^R{#fZj$038TlZGF<5cfrHHuVELx?jm z);fNc!nWd6|M*yva38gN~`T&h*GN!a^P`vf7nC2C= zouQ%;BwVznH^|leAqZW&!D@Q?4Z3jompUzE#SVA`aAx1m>UJk+=}()j`WSO7Z&4%G zsjoZk;T=(366o&%;J>9Jpl7S{JLGFhp zU-C}m|Bdg)jyPV9?#OfxNhZ5jH_Gj~3L$fxiie#0!GZ)-kut%2#e!v}b0#B9U$cuf z!J{?}2@bCT*cB_CAa?upTYN*3c)^-zBJrnD5l z@#Cr_J(@rpbR=jM)Yp}_OAIWcVr%FF`AiR zbVaB1@gpH};Ic)NTp}#rkwR`Fb3$av0UQ0RxPR3=p0W7KjIV6db2Xoj(6SKjb%%~H?U%|u|0 zp+V5QqlPR5U5%<7HF$$H zm;nUI5!=cn%##zLykKe36;?Xun-VhFy-+uRE_V_KJDfqUO1PsOHy%mdlyI5<*@l|dp8o4`1jbXAtJJ}p% zI>Q5qI!3vqZ;CF1TZ^G5a!XY9lv>^2c=9~$?x6$X&TlvAfh+9-leFN(+;)Q{GGjX&Z zG&%K^P7)T$#Uv5@U;gMG^fa-&6*wJ}|5bI?Z$*!HLPCBG>ytA+IW<%2;T`zKrI~iD zIJV5?N^{N=T6Vx%;!{9a>K^?3y|1#Ibvwj{_JG}Y?5MMG7orKG-pXEZm(8CP2%jp2YWktXN1 ze%vmK$)PeKJ?DPy1m_(7RqYz=9)_Bl&xCFXYmE45_i3uVRM`>L@5Z|y*4ykTW+h&` zDU{vK3(@DUvkLio6yw7si=K zqd9<~|AiM~%|p;>K>MY^r=h#k%T^)t4tIQn!?);F{VOqW06OM4&O3U02Zg#fpTLxmcmi^+z?cds|AqCOU&kqV++|D*sp(^k<^AKRxLsp1|elugqrqH*LD`e*7i7$A^Z}s3)7KZ7Jz`ZZsZB@Zoc347U~*<`GcdEm6|h zy)=rZfVQ?Ht%)loNoeK&MnVv1CPEX@|n@=W&79 z=by(8lod$|1Cwk?t;M0=wF0iHT2XAq#N2i4C5D9RnTML|1z&%eYtoJOX5)F)r6A#D z;q}SkDAi!|HoJaIFT?UcdGh6dX3-fzCAx$09$F-9)ZpJzMrfyY@aI#cNnM>H`Sd=p z&D`)i+Oy;B`HZf~HUWjBIc!Bw?Y0SsfO|k($x*elUv?1j+T17E?VI!{KJ6w1vBIJs zvPaGlt%WF)re?W-pjLIJY4$p0JC_gX({)dZ%!xc|Bj0&hu2wviugQ!3&N*171228; zwTMlp>D~i=V#2f&Q)7pW&om`m1Ep|_jU+1jnyPH#kpuPl&D^QN}6zhFm zqOB)yjJ$GVoAsvSEuJX1md5Yvv9nNNrWWp>laQ@ItHu_&$e#NGNQe2R&IP3e#hmS@ zJZ44{vqcXMZl{Kt-lahtuauEE4y-3a*4cfZ6uVCa)FCYS^u^d3hIh=~mDPyC$)19R*8nTCe&Y7(iZHg% zd`Zc9$YAiy`8q?$TK6q4)e{x3Vvp$$8d}jzOCrcBJ#s74n-}ErwQHhd3gLLKQZ3t_ z>1{l!Hm@CD`LymS^_118h%ie$Low_zTB{{JsacD zGS-}Q)v^ETloI&e7n>k2-fS7;Vpz|eL%ut(el;U=kgZDw=JMpThVjX9NlsyX*yL%+ zT&80+w<9XA1p`B=t&-M3Q{jx{uq%0x1nj8vjps^5Ug1h0wRq+YnYFGPFS;4W=WDw) z=fYL!^GU1xnNOo30MUUxyefNbj-)WOpR)=GsEIgzezlBG;KuTopY=_SrBLg5?{LP8 zmk|@}CNfVlv35hLO1iHp$?NGY1=kAd+{7>4-ddAN*l6IstsvW9LH(tM(8aHZ^f;B3 zB$8>p$*dueA7anFtpJthS2(MuXY}>dP+9A}oq~jf?LGNudsJ5Rj%;&)kzu4uQC@|k zQ%&qwMaqghaLi!&C<#x_Y+JE+D_?o*0;6uKasO0CxhK}Z-$nYBMcY93h5tt!3|$fJ z&!rjj4d$M9SfoM`Tq1S;6%0^Qye;-lh!4!5=lt^2VQ3J z9by1pHVlBVHA?Lvg$4T^lvMx3m`( zQ%P$LZ^#qZNP^K?dTrmY3VSv2!JZGSty8Cr!SJ{*nZy^rACdBlNwUn;`RNhv zt>1C)aDaOClU1+E-G5#ka29YByz&QKLMmy2?aF#UbXcYL7z6v-%34d;yG?V}bK);m z?1M!c)7*za?jmAqwyc*y?uV)Zj!>S_5RaW$_i&;E+N1E3NZU{6@h!#Q7BtZY2t;gK z8RBl=3pa(IIZ>i?PuB>sf2oT=$dE0uE4`MEtk0o=_qUuK>Rim(RW zQudF9z^wD?hq=#)O=f&bzKBulR8YdtY`GJ)Ho~~848s<*-@JJe2T~kqvj!wtRBtd4 zU*hW5VXLp&b+vg>UM-i`j>{a_3t#=_W|6EZ+*c(ZTetaKR6nk33vHL%WTqV8@jf=? zAM-Bvtc_TZ9M50+BxRNnJ{Va*xD@9N@L1Nd9#9_o&_a{6?)keS5ud`Q-h`u8UQ0rsmD`x5Z2k^;g*g!!h_Zg(#m%I8bp*c0HGF_ET z(9b?wvYv`yl9i+N|PLj3E?X{^B3M(Y03>ON8On=W|O6LjcE(Z1 z_jMM84!~AsqrZj3QL`n1uEH?ub{ArcpI%>vyZKACcWqWfWr%TijYGK}mw!Q4`XZV7 zO9ij}>1A+meS65VC+p4Y$D&S?6`P7&CNIO#PmWhY>2||cT3}{mU%Ha>anWj98-ZZV zo0gY8pU@D+Vf1&|E_a#UK7XFX!K=QIExIM!;{eJR!;3^B_(g6KET}Qay&FHmrzF9g zPZI_Sa(`IaBX`_w?fv045JiS9b8sn4j`8jC`yF~Tp$ihFg()&>{wns@_jL|lC#c1c zuqpf6xQrGR=7z^SR(qQE+S|f7lWC>Syk+BBL3T=(hY=`&n^ojp?oQ z$9j%+xv;^S7X3vNpT0A6MO!S^#6M=3Jl5)aOR)YCZJI{4&AJL=i?jb?K2T6vB;CBH z!3Vpk(%K@k2vAp-8Pot&xoTInc$Jv(yXQ3ru5jBm?q+f(3O(B^CkoH|dLoap)Zdf+ zRnK?07B_7_&K6hn@){i zaecz7hA;863}iU;*=DM?M|d-TYebo>v4?z}Z7fQ7M~j$D_!!o{V={>bnvpYk~LteB}oH^R${9>WhLRTY{m7S1ip0yD&FwY)?o zA3yjsy^oIKyY1%&caNpYhiC zNg|y-O|BWOkq(QXoEI{+WAXf(SA^zvWgpC*yl+ZL6B!pPT-4eMFb@dbMA7Ud{W2t* ziFROE*)8=Qa1y{PMolbqCqwO0k6m*EW~vrT|e zz5;l{RgHHZtLgZ!bdO97)s;;=qmeR3$_3z4tuc6+PX0#kTww_a*}fc1r3 z_ZJw~RH`-G;+pI1I#FX3;o|7Gc{|;EQKl5s4HBRg7%Yr=I#S{lFy#q2456r*f{d0011>h^X zT=^xwcS&C8_6VIjAaib{Irp4dzp4=K+HnzIiKws3oXMLPclbo0A3^_xJK26>@&;kk z=rnP`rIem337QHUTaX+1AX0|~Cn+~u-ryZ>Fps;hCD7{Ta@AlX6w%LjMWuEm-`}K| zD0duHWjPY$lxRzFtCr3f>u*O=s81NXv^m7Wg(PMs%%GzM z&H#5A(+NsojJ*Af%_!(y2u%pbQ;;-GE`lM@8H9~ z9cRWUIP3v>FKBf=ho==`Vl4TA(ta@<@LMW3nyLqzezYn!&aPLEu1;s@bDRyGi|p8I zzrgvI!L_(Jzw`+-H(Xmv7Gr14I?*<#(J%bTUw%Cp<9a8nbP`ufI-om!fbMP4(ii#+ z?``G^v}gi~aL}*3fi{Ym%PtMI-M;?f%uS zc+^F%;kU3j^F?wy(ZW@v2cvOMG_>vK$$wiKPAt&xlUM`_ULT}8_5?keG}TiTyh&l; zSO+9LJrZ&Svv=*ce6M>GH@}ksy6GNJx{5X?F*q4z+VBGQU?JXAgk|YgB^9-w=D1%< zB9E!uXvz&{mkGr2x^V9-K7DQQmN;)!a^&34jo^%jt1V)~M_pcpGcLwJLjNZ!FfKfH z*|p4of4Z<@RqiZZ={q1ra!1t5^iqh|OBKXF8rUnbiE5ahvcX^;K_BO>%4ODod$^qY z;gZ7g$=$RIbE6>XRP_iBIJwTfFLiys0{b`8O*bOuOAe>YLCZEC*#i%}fS-^DGlj!ZB&l8@0mvPh6sk5rYDxk1atvG5)Ybqkx45;2ID=5A+ z@>dH_q?3M3YF{z(91I)x{ad_)=WZje;zzmT4zJ!e1O!Nz(5uQbxFC6Z{@s%%){%&VOmxkkVc*Y!5R)F?MFqCm{hUh_T%1rV0Na_Kd@L{@Sp#Np0&L#AJ+20bydj}Y7;ET5+qC# zer;gXTL#_ulGUI3W7TFqDrvz^Y*dDVpghQmlXUW8h`JQ~ z{36GxG~;k>eDP-e_DsZ60dSOGH6phVxc#x2GM-&Ir>@=!WqGysr{|ft^=XtsSM#7h zeA`)b#XNgG@?;@=6VP(DBK752feuBy7$>owp4mMR&b&6Q>pWy5Xe&nt4s&K{h>wl4 z5AevCn}+(bB$fBU*z?)G$is&0DGqqJqR-TI^xuVfuaj!(%+&`ec(4XfQGC}Vj+yPI zJkPO3Lo3zdV0bbJX9*_f{-doYHNH>lY_BM>y3tof8&}==NqPxd9+FNAg-=zQvIl%}8{vG0cx#u+Aa_|vI+qpmH7&MG1D0>(Nkmg#CyB|9%F zn_5Wnv&1_FLXn+(E|w=Y0On_Pk+4;F!_jAypN$O*N*(#dC9_q1`ncJlZw65Koq<=QXE&yH9~9Fq6RCJbqbJtHEeZ+N?* zn^r0|PX3wvDVRdNuI`0Lss9wl_t)NRs$Q!c5$AEgWxJzfa2m?}AL*ioJCAT+Go1Rf9etMfxD;5suqjv@9<52{ji+T@8CWGB;*+lA?Av z-j$gp*J%nDaluY7XSJC!6RagpYAG5jHdUV_7>Dc3mAVrNJKPPQe~`~Kp>a|oe}kLR zVfI!aA{ngApaRbG(y{Y|)IUy({ob434=%~A52=v4n+CW4T+E*p!YX=0Z^8$6gwADt zl$=AqS(>FPfn)e3`c=Kxod59 zXe$#h^CNg4iv2F2=KRB)wedjJF%WWi&2aSj#mYupG2c;HxQv`*VFt4PE5nAcJ0oe8&@F_2eG2rz zRBmo8_M8%;mW4tnCM~2J1hfV4Qq!#xv7g$upezBJ0d5 zJ}a<$(Ni9nd^^-}xPJDJ5g``-d!p#!75sOZ|Nl9BeZ9b$I{7QVD7Gwv=7u?C$`#wn zJkLE?0Po*XNT(QuU+NdTl=OQ!u=|?W4z;vwjub)%y?43p)g1|i%ZpATdh&VhE3?X) zTp$S3BQY{S)=N9~w+31IV1C`t zDZ*V7o5WnL7h@Rr5x2;a)0Ht&d{ry*;NbAGDCy{kbC*9gp#F4M!SUL4;jx|rAdAt+ zOxJCpT!En?{tWY=|5`bS(IiMGZFgw;RFtADa;8v7AE8Y82`*`lLqk{3G!tX={n-J0 zq%l9f`tW0f(Aq~dvl<@wDdj6~I#p0RZ3K3BiEkCk&_{wodBztQt_0n38}w$dAESgk zi9VX@Rc;I=3)1cO5n+_u#u3w?$HtLuqmW9+*;%djU+Omsln*$&uES9}#qkZsR_R=( z5Y)=B|FhN;op|q(MfK;rbs;y+&!_k%CJ}v+{6?wqa(Fd9&qJJIvEoW93*yg?t&nP& z6&F(xrgLUvDR99=JYt;qIwsWw(CvXsQK= zqoE8~aE62;V=pp!l84!Qd|*&eVlw21(}2QpM$*y&tUWHJHK>GSk*xT2t%<>D{$uk% zxqzC=Zwc{is+(ttUj_jOeMK3fD9EM$XZkr$n;AwT;vmc1OCeV} zx1NaaGaw4*iA?W8@{*}=`+RoG{h)G=Q-;w0mspWJoSdy#I+us%P|l)K7J5Nr zOO)YnUx>1xXNaZ#$UAyJJw6*16aqArfw*Wm_uocc2?E{OKfy7Y?DTVFL!tvA0@DyW zy8!I|79CM`V(qHJqL`}RL0T5ZU9l32aQ(T@O+8AkH6fZuW%dlnjcz&B;DgjIoLTly zu|VxFe0iH@V!RaDiJ0on(F@B9sF1rWpNGAX{qJ%a-##0MUJ~a9AVWPuL1tqhvZloaX}o9G>VnBC6xHbG59_{!rosMi*J(e5ZE>$fi5*$coQs#Z^5)K$>; zXewU%{LM{4F4z8o-{6UW`?jGbWN^=6SPg0rSJnZ;EfV|aSI2}>_?da(bID9d{qo%F zdTEmdU zNljTrz(VvF{VoWH>%A1)7-HX%O)E3%4M`=4tm9qAimad8Y@g4Z`?fc030~sHQqD0{ zv0%%O)We9Nj4&P}>Wy2=IlP&uw7wE`>#dg>{@5_8!vlF23~tN9>G5K7=gWYD8-;x; z&OF&@n16GHUHPa9egro5Ydl+3(U(3e#vE5#YG?JtGO5>BdKqycxn-DitNv%YO~2&q zu>B}QzGdPySdQila?NgIT{wp{+iwdDH>u%y^9b&9E&5Ov#A;K_6)x-qj57H%t?E2Q zcfAdYkGWYUbk5W}{s6-AxLj5d+Wt_L7qx#@L41?4Jw0_){ECS)XLPfnlKay)gQQs= z_LsiOm7ADYiPm2Auc_giX~ls*oN;sfev`h=Iusc+a4G?-UM>l*KBjbAXo~oc5u2`F zLQcFQSBl>L&q{9y;|MDe;hV!IP4CTeb|VO2O!l~k@Xa)Pdpo@VT;X+BCWPWQ)I+z{w*ujjpIOr70Vg2-!87_z z%rQh9$!-+V+Lkcl>}LS|TEnJVz88D;6#|@0KisB)cu|DfP&Ix={*Ha8)!;Z`Nk8`* z$ZokPMA`hAjv1K+JLr|@b3Tw0lKkAkP7SP9mvnBsGW%Re^2fVxrb=j$XH@-1X4!EF zRX48Y)5+hW^U`s(TQqDMIDbpQ2+wN%MyE zjote)!|Y#&vx5i$V{$z0WN&R#!oVRGgeQIp`68Xq>`0f z=BsA3;~gh4p!nO=yu|C5eq5Q7@S;LqX^302V%Li$Jw}$)|r# zyw*V5ajL!H=jmKI;-wxpYTYRWt!-48DtS{j%geLvT5f8_R?Ljg zOoXzY^q6f)aZ?Q$>V~JKE};KE+`Uy)TVccfiPNGj#kDQPTijg=6nA%bceetC;_mM5 z5+Jy{Lm)_T*Wl0zuYKSDow=E-S!*uIB8S7u$s>FJ_I@zLQ-A0v{x}#9FcJ3nKkX@@ zy)$z~->3g}ruLX@|6u?oVt3R{Hp&!-Ni`}wDPryo@Vl=(fAYgr(Fl$|6ZD|}A7hWR z24MSL&xZkXd|_)@M`WCoLAp-G^*ij-BF7td6rt0nm3tzD8lC1_G15KK~~3dtD&>WRK8O#hKzUV z>~JoCenbk0DcPX2HImB8Ns?3DX%nb+lw0iLb1f7Ug~<-otqR@8WgQlB=z=DYo|jL2 zgSE^$T3Qn_xRBbbsA|vVCXPq0TuIofJb8=#^CRa<2GKS%{{1ce;KMvIX=J(ayQ*Jg zsC<#Ud(XA5YD#uEn`l_Q3BMlp+^a-0Hefj;z$YD9KtIL6O^;+^`0p;^Yu!fV(d`3fxoc)HbtOXFe2P5tXqoeSd!>f za5-eFPl&&W_*}sf#!v9zy*D_WM|&W%T_7I^w}X_eomQ|d&t(ZDOE4@ zqM|sY0{V5X4Z2h$_VK`2e<`1RB5MyLcL$4u6TYzqjG6`w5qCYk#= zhH5fJmVd}ZwA*i0VyQ}R3*j(6w+FDB$PpVCrgbDUJuH@=yP|ojj_+tLkl4(=o8ds!yW+%Z8x_l5v*S`}7XqjMKsPax4@5-HhrMA1mq1m-f=x{ zWoI4*-x-d7ojCi3fU-Mq@iX%wiJfyr(g!L)&nEIM#4`EO5QK5JuUrlfji?=(`vPa4 zC*nSM@=yExm^Y+b9!>OVY}4$H>4CGU4#uz~xPSF$%S?>>Zz71YKjB#Y;5Y|@!jh6* zs*s+@l~P*2nqStX7DQanZfJ(O5TF-&8ApGi%gW#?=Y)-0T{S=o7U;agW8#C3G=Aa&%if3QL{kWV5*1R#-piCqB1P zRP5ib-t#<<9y8KyOdWtQ31aIXt&9 zDSW39lnQpV(+%yY$y%uQpUl4w_W&@DIf)koSNa!|5zO+mB-LnlF5GuOtPq5Q-siuP z!XHRWOhj+EGgit3vvghmidz-!G-PF=>`aaltFmd$mxGHUeavo-VK&#km&O{z01YAQ!W={238 zH)pDb)g@|=2Sh)fChoOmuLpWotrAsxUo=&#s(Yl!C%lUZhYnIWRc0+Q<`nA|B<9(v zjz;Ev#G<En`Ops|odX*ws9OR+!pR_2Wq}`th6&jrv|c|l zX%C!J#zvkmXw}Fo5du+PE#)q^F43!E%SSkCj+CiP%RU{_hR?jLILIew6My@+luq|= zvoWELXvgxaBPzI2+7+Skg|JJB48U+YXqo8KTKni<(~DCt9C#$DMw5gfkYzKe*z+z) zrStNnUx{>$Yy2+({dq|PMOtH)cC zu6v?Ix|4aH1;{No*#>to6?kR4U5&CfSWedaG{m1;duI+P#XMEQL5QsjHnfH}ta41r z;RSrXa7=YhwL`y+4*)6~S?`=j1lj0DN}ai=_r-79W%)7KuU(POwtY%}t{!_gjwR-9 zvQQCBA!gEfmEAsRnZ?~~VG+VvM6q9J+AqKNT@GG@DeqIU?p`|0yWMCS^{_>r=QA)0dX$4WAgWty+?w~F{}fRgTN`bk>xH~+u*nMNd~48Pz2 zrK(WzvXqRD=1}LjrQ8~-Rqx5}1Zv`C3xo2A!xwn$&s;aru2V6oWbjJ26V()(S7b~) zm_*B+YGhRyOxrdE=2M++OQ&r4_TlkVba{pF4AusM3@}<|LIg^-ZfcvFHd6Z{6-VdY zVg_;;3JrZ%1oGMFOGj9|e810<3T(DOt$?U^amuL0*`>sM#l&Td$VvnJ$3Gn#bSvmN# zs;*7LH~}yU(VJum_u*5H`{T8fL0O;dtPb{z$v%*|x0%-Tzb7$g&t^qJWWs2-M981D z(v&&BXnn*_Q(a@oI!qnQ07I0D*5u_&_WWb60`m^9j7EM1fL-dR#ohF7gmyrc>V>E( z=d~5)o9^e2&1(D8F3rAdnf$UHa9)k1(U_F7;|keh4;Sq^16F6vc^oo>fx!oqeBN_{ zsjez|NfMrhFtawctEU^$k=rGVaKf4VUs|Fr*fu~dY3+k^qoujD^ICfs4}^ z<~~H^l1LS)17qT$t!Y#)YU{|6|A4A70?@QaTllD{x3|o%BHvn!6E|i|Ne$jC(nYUC zUzw+3f#>x_mLZ?`qs?q5VYG`z)GF6@E=Fb4SWVClD+Gwo0k)~_4V<{j0|37=lp(Yt zC!}Db4rHc%mv>f09}9fiXwqg+nmzDxIl2rvn-)d;jXWw28p}65=9VK8x5rujtaOwf zHG&0ydY!)WiQABN9&KLC#n{jv1dfGxxFRhRJpfSWkq*I!F*`y0ySTJ@Cu&NP>752H{WE(aRm7bUCLiZ| z6gjPps+SU`PiJ%L^r(cp05y^L>MZB&#YO-8bkD|W@Gq%)=^FbLr7@Fkc(gty(}aih z8}?W0Y~D;@tBA#_o%Gz%e(#928BR^RKTqNi@9@#f!L{lQ%vqG}rOvAtE7QYxAch1^ zmfQE9{IfKzyY_-6x#*WUn5;`SWBChJ+4p=l4>gY^T62J!#2#%O8N?-c?uFBB+IOw1 z%V&*hIEC2hv@rXi;;XlDpAp0%aGozE-R&xlMaec)?dWw*fYo}ioBo4UaBBs$0HIL4 zU9PXR(mLfGy!o?r@~PVe^euHeSyhdEBQYNnH{1;4hS9C->29XuS#ulj?Pdp}bcZ;*&4{rmeo%zNCc!@i+ zwCwb{Du}4^$n>vm+Z~t|5MaoYR!d31GFt0uXL?9L#=&%&w0xPx$jxcKN%*ee^usnA z?aZR84-rzf6rwilYqxDSY3adF)Pcrfa4oF!-z6@nMd5jlAXF0p!*MmXS!_+T}A{hnUGhY_+t6 z?rDj31=E(w8A&iL*H_BUBh!akwkr2X)ZG|XxsOKNx`OuL83sMyoIX^N?O{ywKs7}a ztg(W^S6jBGkLEVjg?zqqR*9$0wS=k*kdr%vtTjw#nYhudU$hZynW2@b+!-SdLN}e+ zO_hj(<&WMfqoIu08Z)*nRbKO=Mny|KEnLPiB}A9|?U|U#nLHA?lVDeA+Ux&NMyID= z?U#dbsXvilR}}>SD1N=zn`QbsG$3QG#?TOXP}rhd+6OVr;`Rn$-ezZ=yhyos-8;HC z`^=_a0mvjfIgT>tXmNgXtxG_pWHVMWw#TKqdF|+T%LmPq_mMd!UnDxDN;%?hh1n8U zv=fTTH)XF}*=>~LPkm)PBykHP9G4y@T+3hvS4f}s=WQEx)5_uSJXLr#z$FMh@fDn| z%4I(Hl__>V`s0#>S7H422CB1VyMMbJ#ynf{SXCA)j#~w5J<~=p8)}>tRaM31&5gD> ze>*mBev6yY29+R-2Or9o+I0wiQjGj6ToT=(qtCcaPyGEW-!|j(&hO%#({b`?RdqUHEWsD?M#aCyf`dwUk;qUVU(=y#qA$+&6Vy@{UHr)(Z^Lz< zMh`rz)qlHsd{`8TNFV-2Fw%5kz`7k_Q-x$g0OYbx&Vq~-LCgVP6=WasSwAVa4g%Z;6IisITefFFB zOyxE<4~-;CR{)akD$+*bORp+^@(Ab>uHpc<;(n(hldJ2@1BRHD6lNfmM%QrQ2XpBy zwvwyX{`+O6j&&Y^-~xAhw*sa()si9!`3a-o)Kp`z9Z8(q$G2@0fHKYW<@+{ElI+C` zth@0>UyQlwY^TuuL(}4sWa84 z{)qt_naCX2uH3Fz9@X;(5Gyti?8o*-InL{ho1#;J8!e1d`hwg1c60gWY`gNrOg?85 zoDu0$uT^>$ixhD&!Y8$ktn%cwmX?$Jd%oCsM4Q(Z(oubh#E+&OLFUe|FI&UBFZyEI zTz6J0294hA=Xd#aMH;!dM(b(&Tr2>81TpOM0Tz&mt$6gxZg2&DZ=Foo5_7lDB`rF8 z^z6F>!^K>Zp@s_n2OGY{#R<0U%IY2`<%2&~#Kq!x?F`TDjK`Dakp^CD$*Y%|7=y*J zSKqm!E)&)5=C;5WXPIYgF7M43r~g3;Bsr?mvu-XE;$s(?MLnKyuG{pcB#9j0LVfZv zQ8hIQks&*NPZ}Ea$Sa*68p=6E_4=0_&PuivE$hg6?d`XcW^Jy(J=m_5JI7lN*$1l? zTv~7?zM>Pgrp|X=0f3DP{b*su4P)};aBfRG*5Cv2*RZLU#`N33N1Oc-OX|xLtCHO5 zm~KfI9}Bg6^QHE*RW*2;_JpSgQ9?~f%1mP2MCzuIS7qFwe_3T>ixzqTzx?-UBae|b z9q|103l<8g(+qxMAX_K)V3MNajLh9X$O7J4vX&@j$vs7QxGXWD_c_yAnvL&?4k z?qJOm5hp+02(W&0dxu^v-K!>Ho8|bFKEq6-{t7_AG0#e$3*j6&Uh)9pX&3GK=FyPq z69eZ(u~!Xqavz>_^{DR~xfQqn!*Hm0olI$UtXKQ~MwrPd@@UOS{5`g>Wl8h6oYu`{ z=S)?>{m6X1xBAJG<+V_JRu0yYX9HpoM?-Flv&B`Xn`ZAwEP)QnG&}q z=QL=SpHWOxs!nfrQpB_NQGzi(2}}I?V)wXhUXV!dlGaGS^LS0-Am33Y)!uaQRC6DEjxv zT#^zoD`gZ@<|q4%M*oQ_7OGXXTXi_B+$E&Ltj0Ltq?VSx*k9V2oPg$NE4O=nx>!P5a9RK@rJ?$FY*qL>%)c)150n4@ zi?a3W_~1mMOwQjdjVzyk{>!+7T7JEa;}QqVsA4wEAE9;_PBp>|7xjeXJmr13(qWxp zuqY3Cp1P{?#%fF4%MNq=pUT)yJ3gov-r5+LcmPeC(Ve-uw1DRm)+#bzqm#evp}o}) zG#Fc7K2R#TZB9(lEGdeEli*H_(aD<}kwx7KvD_AsSy_z47slV? zp6xu25^eoruD0w8_xH*tiw0?K3|u5{Su~uo37%o`S}h2SP3aD3)4LOZ34uRj8pD4T zlY16K?VF!CUzj*SRd|CqJws%JbCERzH~5Vv&k(Y$y5|xd?1UMGkPOEk|lci!~E&Yn_Fq2Zuj1oBuyRU)6{68Ni~;DpCojBZFRwWDa$k+dT5MJ zJuqlc@E&YZXfDN8mE&U3oYG!WV&qlz`d&*cm@lvHG40Vop{_{DC_=J?Ii5lx2HoOH z;K+n1Bd`JNcx{`mGW%3kR|GQ`kM^PkN1 zj#h|>2gBN)HCcpe=#WZ>6g>X#!zfPXioM$5FBGXwZk8pbHXA9Qa8}B&RFQPL3jd#C zMmiFMpuBvj#rBE2%!x}lD}haMuPpb1+P)zcdQ}%YFX0c!?OIav?0BcoLbF)bV*pLe zN@R0+Dh*1a>CM9X{v2nRYW7V&Lc8V)JaN<>3^aZ0 zd`M?m&}3JmyO@mHp^KP3K%adio37_3lheh|>LMfaho-B7d@8kE6MeQX)DH6>fc{~Q z$7#9R^C>|4MAxTJn1fK~GG4x^6Uz0jiL83LFih&MmF5r3|Dc-!DT_pwM?LILEKe`q zG|CX|OycOvDBQOGJ9p;Vw8J>s@_XOhnLJWj&yYL}w;fs2;v%?poW93GZIigupkWhs zN6tr)otcp2zx}on8T!}3!WOy_4*b4lW0~V>#eapjXUNbH-$qULidGQe{iK{1nVr3= z-FdY&D2-Hb)~G)a2++vWd;d>52MT+?zQvvYex-e%^4mu$I%gNY@`8$~s6pgA-X+wN zik0I9%EQm~OJPn{6jcJd21W1pK-#qP9uE1=_GO9i-9p4LgXh_ZJ{b}`B8dYPU-MMBgzL4c6fe_~SO z%``_NBOm<+nXbu$RC~1*v0U8UjA2YErCSh9tyo^jwsA!%*9mrcx&enE z$92lv63Z=>+F1nESI*&qY6VC*A71CgX`;6i-(xgsQR;`ksd7IOj9aMVg8NiFKQ7=u z82*LjAj-yk@G8zc?7TM+#v*|Ek4i1SC{*&zd3vd;*BGP2o(xMNS(t}$WN0om?LE=t z0~hX-o4V$CCY(=r;tVn~BNYs%dVIRvu#_$gb5-ga@pN~a`pf2?e@W!1`-Rp?Py19hIiiuGea&%iJC~Hu?J2P?Ebfv-$-C- zNnGBu!jjqeGEQg@j(901(kHb^4NdBZ^iOO!3*GS043AUlf!DXWaKGNBFzyQqu(J+9 zJPw1J&I-4mH<;@fIhJ9N370M{r?xX)wa<{B!EzmTv;U4G-P+v}TCOAO{AmsXd6Su} zl0#!rfmW>oE63QXZ74@~C*|mvU;SQzL40DYjIehBw?nJ89Em&qP_9+~5B1Jx*6q!( zzxbd%wLuVwd;J9PkC2Ma=N#Oq?BkPzxs8&cW*RRE?(kRP6ld$zw!!8 z@$-A$Y6teeuRH?kG~Riq(F9QtbdaVbyjN7SZ6?xE~Q35gGRwng)#8ImxCXk%W(iCC26=Q{$g<$zU!Nz6!yfte38g2m* zK+ps)4pznn)wk}3=m;Uz}b!WDJQs!U|xNWDh7FunT^Gu_mWuW- z-Y2`pUa@!Cgv?g;@cDY#^umv2zw)I>!BE$2_WC>8ow>bny{#2a;~AXsyW&Mkgeu8C zT;@X^q*)oyyX%kD@Wf4rl$?R z8V0(2u+=l;=!MEteY(L1?l>wd+yX#{4+L)QDsM|XY15&=4YgjF4N^@b>?94NjOzL( zYD#TF>DvvS^G=;CMu@o;`Pz-`LLxL6a+g^T-2{*gb*I5Z@?<7dj`zFTq3e2{hFJcu zP=^sT)M1*yZlagL>WdN~H}jMrM7dk;<*|3%&iS8EiM`K>sDQ!eX>m-f)i8Vee?BDk zQS1CG|Hnh3?B}m~{l%x#KY~V|)lB}pjaX5{OO^&iW#|D&8RHtZ1n?@N`x5?b`~H^D zHF?AG(&)!{{^xbc|HK-wouQ9_ts=vPCE>+`OTkx1MdxTlDNjmKExg!@R?^!>ShX*; zjyS$e`}u^ALPYV`z3$H3Hx zCr1<(mSMnp8QSWU((AS6G8yx?R6omqNH;`NtEk!FlcLTwq3C3EC{jRcCWcF8oSgfc^F?2sC27VL$ z2$sg{}+8#k?(&S3_#Z9}WJ)sFSb?j*cX+8Y3j@B|k&9Dyz0Pr0FDNC<;vIx^*pq zL3zQ$`|z*|ZDe*f1YD!U;3T(2i15)0JqDE7X=STk{v#w%Ay$XGre=?;J-QFiHewmsP3kBm|c>{z(U6I|Si4beQq@fC{=l552y2XpS5?ODi``@W6d;w?y^5X} zbfT$x6@L5P`?RN)Wp;Ef8$&D^$Tz&n9(qcBau@_tNO7rLB{#a<4v*kg94ckO1XBNgpAzx=;&RW zMxSIhgH!RPxszKjzsQtv&(+hHud_5BQNevo9g7G0L__S4)@lZg$>*SR!OO*dZ0IiD zYh(1a5lD~xe;-BW_WzTE%ie$f59$s*%&GKW5s#KP6zW66jj;wi^-{gHJZl-#V-f*fbe&gx3 z@BL5w-E-qtdfl;03o8u(;C}eEerY)Ht$%fZjE2=4n#?Gp_SBQ#m!)sA7Rf|B$Mg_P zw-tb2aYUrP9i1`=t@OY|NJ-L+%J!GQ%d*L{^EhqnJev&t1Di)Ioorv9H%LFszp1_7 z29{+$?$lJ2Y^)_LT=CF*&N#FqSmigCMkV_xiOs*n5D258csSl9#*B=_2{X@9_T(ZR zu3H31SQuB)8tf3;fY$IOpv6B0&(cgrt!5NBT554+I@?DBDN73U1L`Nz9~YJ$pI%Vh ze0+9L=?bgo6rlYuBanrO;ms;PL16evsZ%x<@^xMCR+TtBC8Do~aOwA<6Mj_hx%h=f^v;@J9y z9%cIb(JL1FivXuGlWV(x4Sc(V!p))mys-=e~i&)KZpZGT_eJqb4*J<1diwR~U z-=dh@xu`|Z)4W_^x-SB%qY#KVE89Py(gd{*BR%<*7j7m5YynEewi5kj9iy2KcwnwO`BaOrhfgSLTEf$2WJfo#i8cpJP8TtwB21Oala1F zQR4h*m$1LVi7DcV+6&UuWxzb_Y3W3pEEaU$@4d5cY@CA@H@r4AJTAzzBpz8rEUtw! zZQK5pw3g#^KwM{&sfkfLEC3Z2IM_HT0>-dB)uiH!CR8HpSuv%A1!m1Hl9v@EL>)}0 zJ`nC;lsIy5B(^J3oVm1Vb4p#Y$~nf>R=JiOIVyD2!bthiGlyu=fD|h$)?d4NRGiEv zf0<~|OxVy+kWlgrud@3@sM5(N8IMfSS};vXC!;;R{xm3Zht5;ujU1(AD);O znP}-_z7iQZV{>@D?y_cIlc-AP;n9l!n|r*aCpW1$KCgjv;aL(<^miQ3!UxA>fj0ke z8~Q_6t~b6(^D6R>3Zr!9k$k-GQ5!nBiANKuFi;1|>jxO*i*FV5z>OEAO**Y>l7~Ts zoNgtL&x$l29cu?N0{}pvB%4oWIiZ;c>_QIr=_DW?4`)+;<+qgJ;eW*&Ga!x$5QBP8HCU%;bSB=0IiSC8YpgTVS ze{rg>Z|^YF?0voLe)FO5@4A_fREARNEkF`SUfLDI8;hMr?MUYM^i|se510hr{ir^y zpLlS}It^{ZjWIl*ZQ)4{iSA`6{7(b z(q6~dvQsLD(_GO9&;*CZu;UrGitzp2R(J`QavIa3p+5T>bXchq-B|T7LZlOYOCmdx zY&H4Vz0TnR%{mBAWJ;#GWYO`wWW|dgNLj9|Zx&n&*;JOUQ+?WtUdxiFHA!{KC%rax z1kjE0H@_{_=qK4L=xSBcqTM`KO|Eeh*r)U%(we%CfmlWLC#c9Cx~eEK51e zO>GkADk=3as1!NT7{E{iyaY$PQqrSfW%wYrWDO0JomgeYh|l0`2!OKPCOrS-?KnX? z$sy9{z?JM4{|pbx>wvbRx9lSvI0jN|2J&Y`hg&jGDfD|k3p>O(PUyl8y7t+>Mv^XN zN0EZl0ryDc$Li-lwdU9yIqw@r78LBrx4RUAL?v6{%ZLbiTUHVq9$rCmNQ$BjHs7pA zS|i`pSEyum=)2Ffs$a zEYAoIb?T)#FSW3jJ*nkA3%rc!C~rY|FrlLJS;wY%gszQJk! zplspx&}R%8of?Bs2E`@+93E4=((jL~&(>BaBG?`dXc~+v=1RXtLCvC?L?s|@2!qjt~4}L|dW^*DOexhaUNPalny_@ML zl#pO}b3jSBWIOhghgvf(oJJLV)~9S8yz>d~L#Cp}y&_a0q`^0u-x9a$GHy~l*WP0` zsD7++%o=Zwsc6FU@kUX7Tm>@x%<{`D5q>kr->fsip##z3 z$UUI%1W9)HBt#iaBxLoEn2iWW_tM~!YR+o>EiKQ0Qt>|s7CX0iyfg1Rcq?z4mOjiz z2S(ki7p{&hSVo6UIf-7E;G6hePk+C4ww)HuN2LJ?KG+|9eY$d9J3F**(@9vLZ*9#o zo{$k*fBA&IBY(LlhMziCQ896!)qGUqFu%oxNNiIupdvEPb0)uKS%711QQKsJmk;c<`7Ck>RD$PC-ELg;-2P5z?z zyBu|?3KH3CRlN85Ob~6tSk*5BjS+MS%}qM`e3=W2WYCHqZh}u=1&FI z4pJrjm*hwCv``m|=wr>|k{JKzm^Feby~Jf+-tSy7ZWh%(;nurqKQ+0jK356uDBFD@ zzrs&IT6uEZG$*$#wHK-{X3Y}(TCP$6H65fOW_q-chi%8&GywCRS5JA?pUutw@9zDyi2gFXO`I7l&15OA` z9o?wAOKsycW<@2VEi#fv40&!0snEseOBcr!>FLd|4Kj1rF!(nN!G_QcAhEZ5Q}ZZ9 zpZogckOvh?jy)t=6dfGK=cSXGUZ6DwE$AZOXpJw0>$tDLW`-dO3?IFW8VcTP2F+{F zTr`Y8O9ZTC$$8GVVIN}kpunXo^Oz~Ks`BOqJQB-7;ijA1F(@ly``H+ll4Rrd4{ehH za6W2FhoU!x;J3$}DfP-@Ze7F+ZB;fbv?E5LLmins%nfGghidV?fcppWb3inJM$3A@ zx=CU8hUSsuY%PXoFGs{}L1cc`^|&9yQg}C*c|TeKl7fEqhfSG{PUqtKm195GnbzeR zl4Vg^u6SA24qLV!cu=2&%sY5*=5KoN5SS7jX$PBhwIcuTAp6fRN(JLRC04zD zWjJoF=vQbg1VlvV@O?#$96YQZXGiQx&Y=&`rAdiw`idvII}Sz@M+5C&^h1z2(4@{& zx?!dzAFIcjBS?sYKBQ>RaoA@JdNN&?zlRvwDfA~D)c`F%zx^uq_y9*I>jUo(++BP0 zy{7C&y4pnrM1fWE@K(P)(G<{?jm=_W!L67B9#U^RS-dfAAhvY75L?LG63kmqmfeQq zsV$}o?xQW<$oGXpqAW$DQl6SzW0EtQf3-*`V2+N=ISa`tDxK_J;dX|KV#wExD{8sN ztIHZZPfn;e4h7w{Lyb7_uae_pmqH#1h!&2$kAqv6c9S=64w%+~tk*unH)6zYV`_lD zo!gg8Z+A0#i)Qs&dPSt$p&`8pjk7|_dvEJj*W=OpVRhXws`h4dtYVGS)Z(KDxVbD%Ui%O`LrPx)7a)W7S))R4x{ywas+Wg1$gYQCXa zJMo*?l6ls9+RTtSg)>GYW3bY4gwH{aZq#D^q)Z1Nu9`ijWi$n4acdsg(zLqWT|U06 zX9O1vt{ewkNRY@SITgwNR3atJIx_=4PEUL{j^8+@8~kPB@|J7YoVU7)^n71J@)o;M zY7$DNfB*CUhDs-f7aaUPN*I2{lIm&sVWHt(9dAdmj$@2fb8^Q>M$X7IEzwMi%1C0; zcmXH5jKye54n^j)@*6ak3#GZM^2aLq4d#d#VGEFS>2GFhzLD*TL3dUC*BH zS?^>9?_HK+w2M1w5L@_DeADo88<08~!795E*;??%*jvmf z10Fe*y}u=5WCQoil`J}xA4Et5FkGGfi%O?o_Cc4uY-aCyp_L>p6p!#SvA*}em>8Oxi@2ZiSkKu8hpIq>CkD&z(jSbR?$RpVbVHo>L^2a1r(}^rO*GgC^Kw?B+ zS798+GfvudN0k?Dm>SJ6bm4zmB+S{CSX$@<3hd?)e8bA!NQLBSqrJkv*DZs8ei5GdmF-E5YNagjD!xy0o0_Jg zO&t#aSM_b@_2sPTj1ZG)nUQ;zg_5df#C4jfVs#py zkMK@`by;k!_X+KDE3wD9rJUw3|A6PS2H@-mmE-cgS(TF!GBMjZeepP>(Y}+dJPBca zfWF#F{AtC$eMXQ@EN&N3<~YP5FCkLI+ILAurdQiBuft8c+lp45^kCY*XO++tBnOtt!ib1h3J~?{9 zcyb?p(6hb)WnGhii!?Ei&@XE`V}lN5`}-*2Wz+aoU!sWYdbw3R zm^WSKo$#lk+en=bRU}vB+vo#eJ3u!P7IZu-+u?%#C4=XfBi^FsT-UoxdqR^j&5b1- zA3jn7ulNGGam1Z-mc3{2uSHL~%K6P_4%jLw@9tK8q&-j;$n!6OJ+>UD-wLrS=|4JU zDGq=SUBHA&1Z5MB~?a>RAT1uN;J z_90zHj9-$bhnZl%{Of_7MWKpRxMm`^sbX{4X0uaR#ZDzElb_bTC$2_PaC!T#`Vsb5 zxb9|iC#sz+$tH{r%IBWtLw-FHTN31@w-wL=5x3;q5~MHZd_@PiOAub>UsY52BGFs| z4brRSN{MHnO%S5+fng;6AR<2#@yb!&pIY7pguq?8>wUoUi}~iA#MB@SvdWGvCa@%| zz&L0+?$<>^XvpQFx$V3Wv(`r+ZYX=-qQ+;JWd`i;mw=}1W9E?Qr~w*2Pnjg1$%UQX zUL=Pw7|pQ^rX#mnzZxBs_&4|~*|kQ;bm8hJEp{@i8$#NbxGe(kr zzt}W>rcx#pw!BMU{zX3$THF*DZphQR6Hvtlj}yw|z7at;cs+CS+1tjFi^)=YC}ufn zbnat`+<$TFOGh=kn@z4heSMju6TvI7MjW*~Zd7ln(V4TOK-63I%61O8D+ zmOdqZZ{f2Nd}YCNzpUckaPUAw&u=_+G{JqMg0r%&kY9fxpV6GfiV4sJ+CK4aAVHdisEW^=f-$18RNC%j+nS`wP9Wt$J&yTZva-D_5 zH(wi~Bo`Q7b8LKVif>zr@EDM8D(C3pKGqJk$x4MT4n~>)=6BlS&|TNjgjoT&>mnEZ zt7#wF8dHf2ZejC)n2iT@HV}x+JI|8J#d9t}PEIFq?PlBdjlL zQ>Aa@BaM{gt!?$%Lu8D{B>JY-hSmgqCs@}1{P_Z1jQ@n<4x&A(MLweZmdS^Q$0p-W@1W3utP`)JJWUtwsQG9Ek*zE zkd83;WFy8!cI8?@s`nRS+xwuvy;Q3i7aWI z-^5j=nC5)Mk0B#6ED#Z*HJ0e4sV%l%(kQ8RvOS&2H^3^BEIU@(rn5OGivx*-iGDio zt^Y2bpYd$KG4Y6<(S~hp72s#PT{W({jgA70XUfZ0FzhXyq~2>uC~+bRI&OGrnQf0> zx?tR#uJ?AkWiEfHjeok9IT2D<1cVhT7AT6eEL2bB{NAGC%oi{pwGsIuJNTo0snRbF z;Qx?sWYQ}-`jrQ|-^;a&o8ddVTKdgRNKs1hHC$k1SxF(!NCDustl)a`y>)MPfG7durNjp55RO65L zqqUgT8@tPQTzIp4sP6KoP8*IO3+-L}=e&l*uHQ7z@z0+8qjK+Nd%5!_#y1^ER3r)w ze?oUrL`^K|cIn#R%>>AG_&UV01oC};777ip#e8(-7P=;S2<50M`yQ<_&gLSK5Hcs} zdJ`4$mMl8SCndo?IsvzFj+7_EN)Hh=Pz@-0S|!EU zp&iPkn-D#+n_b_hEQFGOkrfZil4YE$(syfFYAvii6mhZrC@K7xNzdYUXjVLJ86o+# zdFiHdhpqZoh6Q8DJBh*{g`eFytY~yHr0tJCGkS*1xM@hbM0(DY-|8ooXQ^>zi;OB@ zjc`b2&*=H4&DY&665^0PWuOJKZmC37j@ufL?{w!v$~2d=BebMJwfYHBt9!Un}RD}K*YuhiLe%;=Jo|L={ zm(k|pJr|B?4nyD6a6Q^;XVan7Z^zj)9HF{8hyz|#VS7S5=MTfWU+r{#zt)3{>LV`Y z{m;9f?|^HO&}ZPv<9bVr82oTy!%2$y{<(}~RwZ#MtQAY(=}^nQn4~SisLr_>rPu?N z=mvL4CM&g^%+{qPSVe+$f*&bke+R@%g=mX^{n zSeqRQu4)163KJ*`rGF?~ZU;CRVxpWo11TRUy_iUc8B zt?^pdNJzI@ARI`r#{&)>rBpxaFa~MXmA#aZ%3i4+hMkGCdv^*`r`+HU9dU6E4 zXX64SYT45758Mq5Hs3_(?>`?1FY^Mt(}rT{efY7rk)qyvi27L~N^k!N8QkMb5ZC>6 zqz5y{52okvvMOG@?{y}NjPEEzzewcxkup+kl8P?lzseHyU5^2V@@qPF!sF?NrsMsG z>v?i8TK41tUmvA(5dm{PL!9qqi^(@yh7MKV8{&iMaJzo`PwUEM95zV|gBxnk-<3}u z3ei<7h`zKad?^-t_;>3o11Z5G(RE|L zxKW7*Y7~{o&#VJo_bR7g;-CQjyy~D1zOJcZ{7n&3>;shHnip$FrHH|3#_Ix9D<-~9 zTmCZ3j%20I-NNOoq+R8`JIKN1suAz0WT=*UaFq8V+?P9jU@itj31G z8FrRVzr^ncQrbumYbDtg;80=h_W#A(TSmpP?d#qo1W1B~V8PvjyEX(3?(XjH(m?Rw z?(Xi|xNC5CcXx-|thM&u=e+N^-|oF*^yq<9RabYa3XJ25d*`Nw)*Rb%N|)7?g8+O|uLOszP0M{1NVRWf5mKrcH<&E}|QbJ9N> zH7d9}wuEio_L1{)qh{v(kV0#yYTj$eRJDVxYTdXxLaXpXx$9!0uC)fl&Vp?*EZk$F z38=5>J?WgC;+J>OgQ}vXbWSTwRYD`pK3ec*SK`DQ2=Zz2NG|#6FFHMH)iv&j&ZtnP zvwo-+s*fVbM}6HK7q?PWWn7VRr^;mRFukjaYr;f*yd*Y*LUGCr%;}QnJHIf8ov&*cAWrf%mV64(gXKOsKzq z=KZ4hZk!GIpa1YGL;d&dLK*+HnXkeRrjdVb_J3uk%#h%;O_?l6?#<8Z(Ymh?~+7soH}va*S;YA}?jezwfEaJ71okIQteo)f%3 zX4%Zc`&7nH6uc#()7mpA?odxvc6zA4cbY8_<{WBf2ujB#;NTFrb?;@KUaqPr%c;W^ zLK6;%ZSq%B%P&z}UX{u$GQxNWvKcNt8wlC7rHt3|>-pWAD4mDo5{+7{z*451c0-_x zt_xQiPYd@26hUZP0Zgng?H^WVWtQWF>1VR4(rsxPZ_vd!0L0a4*bV=1hqza?>}?zf zYA!QZ(J0!!buD*u(Jw|dvm?rzCn3c=byz$9$@U01eQ(ml!MtLjK`cchSbO!WeE_8} z+H24c&m=e&;|x1Ak^|tIZ$3ZR>yW6VE1q0Br|LB0Tya^o`)bi=3ZC0RDde!I$v{U# z1l;e!=5!5ebPdi>OkW61yPos20Nnlf);~|Sv6g{Qclyrz%2$-<2Dg=T=)vuBi38qn zrc5OmlnKoIbcu;D+uhP#j#L$Exf zMkT`bc%1I9ADw?3sY=X z^6^2EMJN0qYPFIk9}yv)CT=o>C?u>g=Yv{tA{kla47=OT`eogmk7rleH$vxI+^(@W zr=%GxfBx4TnjGQOV1_G+pFNJjlzywh`Q`Q_AB?2@&&+8SkF*6~^?tIhPX-R2vABHn z%_gSVdkjMuUskQ7#XqHi-Szu0o@uErv8^^YIHL8Ov|GKtk42D|FL9tAbH*z*pG<|w zpI=+=ayc>fg*!>z9&uQM-@YI@w|lF(p}2uX4_jgA=X8Gq0@c?Os6S0R-*@ef@bDS( z^2Ii&BDoQBmCKW?hMZc5`S-R|j+3t3chh<6wLNC3s1b^Hs~_~gJ*As`?yrdnA5@_W z1^SNW2?N`j%wf)~B;gzm_IR}hHaj^0knRH~W)5La2Hg?f}ojyP&(gX3V6(yZg z#sT2@I`)dqZH*HGl~g+$%gdaaCCHLPhkXx4f!l6lD& z>#TN_;@`lIuBV?_1+nFE_dOAd^hk?pR0?wy-8`W`3pd#;i5l%zsQO$16q7l*ZNkB` zn){W$bJhMwbikOgimN;*Em!k0D)xjue5Bk6t?|b8@*u|amWdz0Gz%#Yu%c3?rBM#3 zsBw#u=>eF&REC;pK6s9HWr8k4Os|VJlXY}+-H!LIdqAOmP}Jt1Sn0u% z;a^;xJ;Ky#N~n@fNW`2;ZTx?03L_oM!BL{{WmCxJ^RdC3`91{pCeo!FkY#!r>Fw0o z^hTSMsbD*&TiwG1rmxy63z$O^deI*l;?hv((i}7 z7qOV{CV_wf+d&m;F*=As`FXhqsy&;OPZO2%k7;9EE^v6l(I zHEk1BuHj_}v%=5cU=+S>xOU3tj1vl8lybnC5Z;kClXixcs`m-x&Q%M~fZm(fATO|R zv{CqS0>YA}2s^RTf7n&!PO4)&y9uR}zkFy1(DCibz=WQHV@tgvs}-Ba%OV~1Qx|A4EfuB~si_R^iWxcjPC6hof zUpD2Pu&I-$Q};ajztScN5#{9nXzbIR*;^2`i~bay>o+=@mmOTn0-IN?qPyLvLARue z5!1HTv-o~ej{V}E&%^*V^ytX_!{Rm5W+3st6P69v!qD1eu8>dKX{c4@(CUqnL^xbw zH(O$H`!pQ*vy&~?&kSqsl$a73%p@oBKxe7emebK+MxPj8A%7d)K`7$<0e8X|5NfYET zKqb_LWGA6DcM~T(24eJL+qd6aPF4>(p37HwC5Q*q+aR}#R|zkct01xETj7*@i6*#yFi{Peg*Z3(&H2llKQi$2AWQ$+C{1M;>Uj7WSBe(Ocx^1S5Zp3v{0kZUKQU zT;dh1L*O|~a`8J>JP6TwZ+N_#<^?SOguAt+Yg?hOEPHIkODJok`m=u~UBSHE*gree z5sT>aURF3hp6L{yU@}5z+-LhnJh$D0u1HZLH+NXxtf9`n>Ua4LG0hP#ps)RSHJEVw zHy%}Rwhw(3yy6X49++|WnGVvoj*2N+MXd4S1xS)42=88H5=uJO2%}6PH*h|EaPzUR zO~;RuA3!ndsY+bZPueO-Uv_J{a(^Pu6IVW6l>-&-Q*XEd?6KiCPOXu}w>orsZM!|$ zeUV0|_y=xi*x|lKJlf1qXEmzil0s+Pb6#z%!b(l?9jqB;($sNQ$?v(w4SVd_!(QK| zJ3h5EoriDJS%9qyFPQg^Hy=np?|T4Fn$Aw#BL6rb*I7&(jY|`?#ZRFI1aDV24mhi+ zMf5W=XM)>YsK1sw?2K|{yyZxEjdn{KNpgI}cUSbKI@)n_uO%#_ero2WNs-1BORi&F z;LnZN!|%dpy%o5(9vrxErq}$7$o??TQ1TMt-@00OtDrI>NanC|UR`R-ni-7t9ir}? zqI05Uo?IVbnz-(e*~OEO<-++N~Uq2|H!rD_PCD+LBs9)Nbkvn9QH8nch`WY<#dT^djvm8q>o zNSs^E&wILvS0UsscWXtaUYYojF#1-n*372;p$dLJ+v8yHaxO6W9pcXe-~jtVChvb+ zHw=xg6;>S*qpiyH22B{fmr<_9hOLMZm`ho{{RXcT&u zF()i8_0ZNg-Xk*o@%8=Z`M@e6v9J!-!LE|N@tDE$-D2#v!k#mTA~${5I)B1MSf2%c zObB+u{K+ZQqMnQ6c(u7XX<{721CtDq8`AzIIvoX5PZ3Eue7DLYe><0mfz{!jec9>1 z+N!FhRfpV5ZEv+qBWj?xYm`;Ej3;Kx+xWr`zH)>bYu(8Nj5Dax=Y!z-%MA5xpC0-S zqPjE^9t;Fkr{egZJ#vm-MZC|Mdi+CcfFSd8Q59KD71z`lF5H~#Vl7U`Xz%LbuK2Cm zg+7b#xQgiPH^Xs>MQH51f1RT?0}=%%*#o0sBSf7=>pZ%9>Y{lNO z;HoU4+}UC!=g5qau|kNiO`()LSl|0LSe>!#?zjmhqTJd5LUEBbbJ@uKkY9fY^d#@MgLK6^0HG! zA@)*XYmt*gd^&8D>9iF_Hha2ZgJ0f=yEvD3BMhFQ>SflMDBxk!BW;fypgxl*6i+1H z3Lodz>g%oK7X93d0S)f8PAoehow?2Yo=7}wD~}q6;GHA{%P_2NW`q7{^EFOZ07;e( znq`wF9b^REDgOM>g19AZDKFmXGDT%oGmP7m{Dk~9sExsWnma&o#FFtunx-?+9ybZK zrM@uv_%?}L@+D?{g0!CFBz`lm%%!kM{jtXk#jExZt&x-JHmy$GVTNFcMHr0PjOx}s zY=8eQQcN^!d?=X$ED$DYr}DK+hx@AzrFANT2cDaWTO0JU(b|=#9k6GysdH2jAU4_a zul&j!Z#T%MVfB`aCi?7_c@-1Z5Lo;AcPJE3BMlphk@Tz3C`Jh^Wl7B0ez&qI+E&&t ztC5=zY=_v9kd}2;mlMo%d`CuBGD~aiSX^AJ;+H_46^+j)2s~ZcpC(?-J7>@`UWG%Q zQY;{^xOaZg}BU1%IcFrwe|VbAha&Ane^Q*OSG<( z`43cqP9;$7@UBr;=iVKjvU~-o$j@tV7rZ!M_R7ArRdifp)fCiv{-yrU>DTF+G8@Qn?;C{m{I zgFH_Mq$Y}>&ZCM&*<#exXQLJaL-rLI@(kSasMZ!}wW~K3j${z>^0n zaWP|tdEb)eB;Eux`()^4*1o`_xnZS;f#oK#04&&Wt34H;p0<-xD|0=J*d79``aK4J zUV(fLd14z-W7WqFzp-@|>AL3rLFpdeTErkr(gx)Q6KD%NI4GMg(m(#%#)v;Z55fbU zlq!QS^4QyE*danpqfVd`^QZ(qbo;Pk@rO3Ifb{nfBw7&FE*AGfcPC{J#?1wc^Vq_Q zb(p=AkE+MQTm3KLgWHsXRhFgLdGlMj_cY{^5(kJ$(*h!G8or#Y74$e{+=>rwsQZjq z^jhu#*xF{-WAp30r2Mw_30)eRz+GFrE~aFZlyIjFGryE1ZT)tsJ|8k^Sp0fCdms22-eVAviK>c$RVXbJ3knUA+at|SM23w-2}v(GhYq=PR^%^IoS7S z!4wz5%Jm8eX$rV^=GQ{;Awc(g^ZNrSqU)>j0fNa3bgT-L zD)Eqa@uSuzgT5u?{bqv!ZYxFBLrn3ba_^Ts4Fw7NI7NPan0$pxm{EjVmF0GteQ#He zW-WF(sF*a%Pv9OE=0$#fsTw{1>?uE!?*N)-sK$0(TAEfT7h7dJcVexuarb&U?8SlW zm5fSze~5;+tj5CU;jKI$QN7*kzHvJXfowLki)2x4!ary^V_SZ|LF zx!vZ9hQP)lb6@sV*k8X)hHF{tta_^$S3^S^(y-cRP5Bjcu27M{AUT`z8Z{2QphDTxa0BVYS#8J z3C2#^>%eLS?cmCzILg%A5qk5%X4bBA%$_u>cTrw25Fz6()}1cB{T{{c>Y;U9jI!?p2<=+mjlI+jt^? zO5Py-!tJf+KHjij1@~&~4fKJp$NibRd20SW>RF47A|1eFc0|nMUj4ggeE9cjXq8IG zF`}LRBhKG<{T|-0(Yc#$*5-BV&Rhal#g5`i3c6%hhWyjOCiTlpx0ZteBi8H_rp7yD znr(@>lUiBeCEoS+cODm_@NI6+xI>kfO7pVS(oas2p%z_mTqo@_IcqoJW5*TJDdKd_ zNcer-YA$nYYr<^5@`wHU@OJtg^^kL2T~Q2wajNZ^IIYWXJzNe}iPOcx?5CxEXw8au zfBl%Mr7XnT8py!-tSRHhd)WXR&rMrLC7RXvFuk2_;F_fV;j1XN}_VZ{FBXtModO_j~l~a zO$4!w1wD-n=6Bkaj+Wey=SQniHw)wpD* zooOse=kS@dVEvSq2RoT)#X2{4?i=xgm`jV;GlObo)ciO&lqnEIrS$|cPGI4ggF{;i}Su3FT9`rPD_(GHNK+G;p+kHIH z9m;sGj#>V!gmf3sf^lnNapbg}3b1tx(`Rd6GdR}!>SY~8d6>G8QWM{K#ig&eb4~6J zQ`ClGc(jy8Ed)UW!m1A`-?~>);e=yEgv+c2bK+c?wx;0YyEEDlq{*=7WPli$lmZlgn*jZWl+XFAb67X%_K|h z_hX7A(ufV6`;OL$Mf_ijt~$QywU)^I z(*^J8?lNr6CeUp&rl=g)wORz5}zgAh<1|t)IUEziC-SDJB-6L z=2B{`PHfbdH7oIJJRa;bTs|s^$ZrkVPG20`DvX|9t1_i8$86`y02H~FTvvAUCQDsi z`-8l5iehb~l{8htVnc5mFWi}vQDeLtYurT@YiNpe6K$;IP(B}&7K~b+B-Ju8%(kZG zXBTLh2mlUpHleV>>I`!XQoyK3b-LM7p4m#CQj0PUxD6esZZ zL2_xKnML%CsEqTKdRaPk-U6eEXP) z%=p=4QPKhB5{NL3{yq@+x%k(&8S=lBEbyT#K7*x{zjZC}t9-}*Z`*+j>VNH3^8O#W z?ClT#hj!v~NXBV+yGQq;{_ACyK6~ods=oF#NZ%1hpX1`>RV(ylZDJI-&9H2Zwc7_A0HELw$c3p3~-j<`~f3=4W_oOgVKJj=v1#7dwG{(${I!mrn z2FCJ`FGuhjuZxI>9V9=t_=5U*H!nm(ozyBVO@&y>?jW4QdWBPQJN49MI{dB3dw;L` zwakB0#G+O!QKrrd0&ttuFvXZKl{JWXMN`O0;?L3lL z2*K)9EzLFD&~mx|zh$hFa)dVx(AO6Hq0282SE40&xFy^vr+~w#@Z992f6T-ahI_Y* z1MY8X6NnjMp~f#;^z1oK)&@Zy(4H~{>hn{-ry%BG!C)yZs7*?=e7;m)Rj;#VCwlqp zxk07YWI@~TIH@)PYY@Hch2u?#RCWiKv|}#cI8q6(bG$z&JLL;2G+#TcVg0sd$)uJF z5&T%YxO3`R1tX;u(+#zWbs~CkV(Bk5YPzTaPK|Csv?{u9cYG{_b*mWh@46FmMUudM zuLFQX|C)k>lhtH@G;aacVgxD6jI`Ub4_Fp#WQir=$6M6WT4qKbNg_}<&>Lr-h|i4x z4TwUaW*N1SwSSdqx;Aw;Um`0%6v9%-2389^aK+?9|I&MHUt-zS4Y_jJxz_GSg!!00 zQ4fm1=a>o|VjHa>{FDLE9wPCk97AId-Q!NB7 z4d4XnbV}erAb4cF?Sclso8@BsX;%G2TVQAuXW3VZ?EB?S)^16!G>sHk=9o(uWZ`lr z>K-v!%o!2yFf2`x4i>cR+iSa<8=R9kTG<8U+j_eTT{k2dt=fuQ z79*tIq_N*$$geYa3h@atxe_PhXq5w|)#9Od0*0+6E>5bPQ_giOG!Y#T?3jJd!Zl+(yy_4%_Vk_s82Ya11 zDNGw*T9p<~)}Lt6fFSgdNAa^4-{{^)FBYtHS0nH9ZZ_n5p_qVlUF*7|8RXsTl;Suc z#~W_%W7ni}fvBsdP&V9SgR^~5JOT6NMj!!zmL?@{2^28KkJVqiNvzfMei)%Sz~6e* zLMHVe1&x{7N~@Z!hQU&Jx3(UuvF+`g{v`Z5qNHe9p^8c!J17<(<}m-{U{dY2ZuT$n zDn${7q`GAJAoteFjT}b=e8u1GA%Wcl`CwEme5RX*@wYO7-$mp)n7(bLB&IdTz4G?|1;oR$B&`X)9Y zA6;*{;qyH-3W%8%s#N_0jU7|K{Mks8m_Cx!XlBDy*A;qYlyMx3_)fiq>zjiNF*1H8 zR}IXghM7u}2hkuu_@ebary2WUXKBUzy&%Q80@BkeLOVVHnU+k3%&10)SMC?tLdl;3 zA7{Loy8VSF@Vr64a-5xYv3@06xhsHofjhY#o5t#1UGI2&X7clIx|4gRZUdjcfyTRQ zH%%^Apz94nR;y9Rxp2V-!Pu2%(#x`DqpOeo@%8z}p`bW**!VDwY6?82Qj z9`0WBzRShLK)jRj667xBCr1U;dNkJmjqoTOmHe7__{%i z?p|Fbg$IXhR=ejJ53&|(4*KBo*|mk8|3x=K`;@K{(_t>^V4|mN;BQSwYRI$0hZ`YP zlrD0Fnr%w;?H;lWzK1BsJg$UV1BT~U^yeptJ1#Ml^|mj!0hnz=3_#G)tHPEf{KWJ+ znJq@UfA+ASRWuaxSF~cA!(8b(o!WMi*tPqr9(2TLAf)(k`YiW~4f~+B63okPEiScM zG3@N7A{A-GOAgAf_l$xO^|zCk4g6RP&Ng2>XLW=I!`+cr*qI9eB`=G|;K@mnmq(86 z0qL3AGIQuDhfs`3G$_1Pn-~Y(rN-eAkw%SYo~)t>YeUrL=HzsVzl~pK1ts$bh$gFG z=jwb6VYg^-XD4~1VIQqImhXNh29Hw2Zn~+GXG$cp>HlTV0ym;uHaxIE8 znTBRuoa4yX+ZL?#17aQuDXb8a=zpXF;J>7TUk+6sOFf&-t#;E<{~zS;;?;cjB$jiD zUrz!N>4W$pM{lQYHfN5ldfETQlKh? z(oLafO=h;`w+=U)%g#j*Ye!=#C{VsMQ!ns`-Uz9rrD|mteUHp0;FDsDz#7Ecf&Szh zz4eD>NeTf;W;)5vAdWJh^mjOk^blTqD;B;BMvj}KygNS3@5jgAt=v9GXXD6WnR!eMJlNdUJ%sa}>AIEmuyy1G!U9>%i1}%t}d*>?6{x-#TJX({j&Pun^T|$5I4*HU4 zVm!%7!d~ae6K}y3^7e~|NGW^%$xHz&VrkBH>g3qYf41i-+La3vyMjF9W%X zey6U0L2tQ=MLfqTjFMsTYo!mnzlUku3tz(xe1ePYmv!sP!96a3-J${Cc{Xt+&rghU z%H*_z309Dwp^8TL)Zf)77Ppy$=&Rpf%b?blIjA_8g!R8;U95N6$NWZcQ(aGPX}t!I zt7)%X`c_T2f^lEPKZw^O*A`Sh9VQTV^je_aN=I8qimZ;=H_6HOlp=}{u`MOHU7rw7 zF8XjxxCHVF-Df+moLDUuPM`mMf55o~aoWN_S`|_cgF$hv&O88oxT)f|a9ne#LY6|- zANh&FJ>loH%Em6P92@HrzWRLo{lE{2gEG#wk~MiPJ>QHgVvxKEXn*1au1&iUAI_Z` z*fY^~{Nq!b1P}t7l3)#NzL5RPi;we7BDw`(W)yR0#{7^J2>d-#PKTcERKI8EBZ`t% zF(mCApoQrsU>X#4kE0N_%@4{qA5n?+zC4!%ma2|!^#1~X+fo^52iPl)lLtfmlx8lw zT+zcFjZGD;YOPW!o&KTb&aCCz>7~QUAjfDJ};mSHHe+7=m5HQ1Gd`<;$5c*juIx6)4cOLPb5BL{{DKq^T zW33^^X4mv*DOhUvPcd$|IQ>YdaWnkrZk0HgX^?Du?wUUyy8pR zz~o$s_un&jS~oq_-^-OmDSXQiFyje30WEmASL?qyFL}XZY-i%Cr$^)S2x3}Ts{e1C z<5=x+C+(#UKxvfm}xABImzL~e@M2} z*uP2k!&1-5X}@!s z`nEp%JZ#y~t_!e=a{FJLt%E{~OyIw`PAzWA(J7&eAe{Lq2MKgywk@lVK&h%f4T`%? z1Jj+7uWv?kUyio*{K|D>Goh^Z+)DlHb+tDfr}~WTPz9!jJ73Q)6-K4stm9QH&DM8d zVeUrg&nb=dk!u~V(0r|*(B%7bX;y&bA%|A%l^J66+2R21Y8b70T{E(3kSq8WU~0E6ALssIT~9=4e!Fwpl(Fo~2OZQ% zRI)M_n5>-7-uQoRe<~JVYlXo2g=dhWFTc1SRSpT)4@)PA+8J&w!leyk)RzoQqOV7y zG13WclsA5BRaf2A(*|2~+y7wEHwO?_KQ_D}JDPVcQ`Rugf%6K}7O+TS;t*RRHz#>!e;SsDqn(^Csmp!fNi!mL-dLX)WwN@xBcSu-(PBx* zKanr&3WN-NG){TYd)(Bf(L#AcfQg}*s5>O~6B4k|-8ayQL+%EukOFDv+R+fU-EHfn z&f^W^rdd6y+i|^W@D`BWX^ysiSV&x2I8W9WM_{A7rzOGjDiB-q{Nj!;`_(I|GRpKN>Zj`X?QW!}gUi`lICGV!Fc z>hcx|kkEJ3NMICr_tWApy+2B_2g1z)+X!w z;@db!kKxec8|-s0{0^T0c1NW5WO6x@Gzr<<0dbpe%$HpMhO2U|6${Mny5iC4H1b>NMKDoeZ-xdn+VOF{ zkGr4pzQSa?=|0+ippz`jIK>^V0pUncC7x*AX=7!PS1BN1Ib2{syW{?}F*JEL4rp{^ zNa^WV_GxEcciOE8VxQ*4UTvSpvc&1&^+}_ym9-!q=A1jrZ`@6uCwV@^B=plH=H-%0 z(R|}OSYrTDxVh2eNm#7sp^I_Ek7U%q_xHf@1~e;cydbCyVH$eziu&}uVkn3lG*&6> zFHRw`B%3Xx&~hSCa@tL!YS&(ago$e{PW>Uhf$4GsUTOJge`M0d(P7Sy7e-FXirgb8 z%RBxoFq_e{ep3Q6C0J<%i!kWXXx$;T1hK z3#zjjxnB+fiAHb3#`hb1D0d$)p8OoO5^Jfu>sxjGlzyHa-HeT|ceOZuxi0d(n6_Fk zTT%sgk92QAQs-+wqffY8f55f6U%XxOBd+$0fB4k)hEzk$>>YM zf)zO@Eb0C)fURwP$KHyn=+yMdO6nSG1ESRL_5HUMA2 zGTmY47tg>-a(^>Uu0%^{I zNplq6t{+O_h)h zJZ;WzoZ}o$Y$6#OT$om`eszvCT!O2!4V$$&9y+YP+$)9?1Mx>=uqJQv!cyHQl0qUI zd6H{=r~~*mKG>6u%SSdEMZTE70NTCy;XBr)=`X3&YsLQdGGwU4#4%*W-P;zy>2{S@ zs=xEgW1v5cq`|GvtJhW8FZ;haoRvyf_M5emhSbO)gVm!6miq|(8Oml2z`w<@x;U}& z=N`8HP2D3#V8K(ilQP!=nr)O;NiI)gc>lvp1v|Z1bCw3P&R{3I=pwp1{d$f2 z8*>@2i!m&N3u#uD9bhG%2%9_k9hh8&D_Uv|C@P{AZHr|L@Z)1o59$`KuxyEhI7~G+ z=8jk#l-XT94!qy~uyf*|I(K~>=QfJ;S9FRS+SXLJu0BIxMy)Y{4O!*c zVi|0>f~W}CfroHmurfZ}GCamfeeyGGA)^F4unv~aL<3NR^*i+$bX3XooE#bG7P47Y z#S^Y^;j2|Vla*&cGA;F8Au0^RtCF6u`_3Fi@R0NP`;TJEEGAtXMgeY22`Wv!z9z#s z=FdeikFJHTQX0AuH2Lu_rgqm0$I92e#y3jO2In}*!Szi^^Yl%?Fd#kyx)Ac1Hs6AF z-!=~mx{!PSmibdGd?NP!ih8)r^x^VRw-R?q?O>(&q5hG@J0T%ALp)2d^OhS%uyFCm ztCmT=dfuy^m5bH)dYrEBiN^e})=u>B*x8$N7tr4TBlHn)(B7!oIR%k$Rpaa$kv1W5 zolKM?AKQJq<7#9klO|npa8Kz$!;hOtjTi$^ZjICBqLCNIWqMJ zqwYebDv17Yzd-CO?RZ;p;$9KS7cZfJcrQ6S8{JlS>O5wwc{^<~v+|vOiI%Qd5M$i< zMb#4&?Fw2#+F;};`riQ$^H>l|qCOWr?}&La&oe|JKY zJtwzixWv!%X?K!o<#fHUjXIY07&H7{qhIitZFAqO-{M`^aBn@@Np1aMs9CiNCA#N5 zirPSgxcHf(Oy|rE%CiBWBbTeWl9#jo0qawrm;2mn&XF?oypn8)ls<(lF(>_qGohfN zV<|W&fyQoxv#$_*pXSbWEf{#zW7F%I<|9G#{*^tX{z}_|MXzY$ky#1GAW=A!@!Zjd z#!BN4m9>OxBPZ)c0PD5A)!$JSX$ps{#*+VE0>rKbuM_~seQZ$dvAj7zWmfOHPc63F zld~D$P$jfeU25m%d|FFbO0U!6G^#%Do&&X*gmF<$0^yf7j`ZGwh5LHs16U;@y$B^o zZKq_jm7nUl3TrB_E&s zr28|a#2ql`1bu*M&PI75ie^EV9#RXrS7FL}e#kD{mIU2yUyp;hHlL%Em5bF|#?vE9 zB~7Ith-aH^F1N>#zicd?jIvNH9}|f{HaN84!>Auf@p-T-AXII9U4F-E_XvMtY8bhx zRwuq^=+ndP_Xb44ew1I(VzKMEq~fPWfjsEMY+rPmb#bh@MJ5_bwxq-#B4Xa(%Szh^ zu4+xbjBDqqYUNrbOPGCp;pUyK1Hro*?g}L=29>EzDH~+$$%(G2 z;q2>g3Qa3vNK?fN*Bq}J5TGYz6ydfn-KEa^QZW;{qs{B_ven`zeb1;}Oe(XTVSNxN zeBtB^4$J+K-_EkQz7gA+^o})LSL}L}f;52uZ?{Lsy4OYgKryHbxiRN-9Ef`s3);(t zo}CXAHCB;2--ie2wU3DRDI#K98bp(u-;yE1brAoA1?JoG@@q=|i5j3!a->_zy8WHjL)C_JIe4dgkt@V ziX+V_SuxZV7$02_&3K79X(x^{@qDzhm>g`C7eXhuLJ`l8|+$uca;K>F;cjLgih}Z)Z1S4pG zX+WkejfS^z;cwlBcDurMdegY|O%C#oFY7RGzD5QW zsgGJbfTPXAx`YyCC!K^Ji-W-fZ0u#|9xUxvGg!|D&_cMLg5PAb17y4JlL~s-=mFl^ zokrXoNKDmgys_!-i3O3xy;bF0R`t<)1@Q@N;WT%0Ws7^IDr3RpJb8-oSgRFI79~^R z4JV%yHx7-!yANzd09E^ZpxLv+qaHpgFh)TW33vVYf-ebVQfwkn3>8dToy;3rPT=7w zReu4vKfC&!Wa4rxo^Is7&RV7*I-gw&z4ODl-GfgSiA`rQ@0hNG^2mzdDL6xp;K z(DkK3IE06c7v^REG6z69ay3ZuNh;{wV_sLCRuD&y^ubg2DB5+TX-qtfk^B z8f7~|(&+>X<;wxp%Q&2R7~|cnNEZxA70Sxwi@`!oK-Vbr**t8D^W4Bm0Knc|>9A)Q z=ekNmX0Fw#Lif~V;YN2A0;CmsV5cT8q&4i-)0;*D+%uf1!(mdx=R_(IcDha^2B$zv zZsQT72P5H?Jbf(W!Lf{<^QBu$-{R<}t^12-qUNh$E~eUVxL+K!EOqqwy#@qw2qlT~ zC$}O^=5wEUT)2dPDz!Pbl0q3J2ha|fc?s!1cX%{Pe;5ceDnPD(u|xh_Bx`Jk`SDqw zHV#0iiK~C%^rMh9!-9+jERcCL=~|jp-BRrz8oRZ1~;^pigxmfucBe>1@so- z_G={d9A=O{Sdx-q$)63}{`?1pcV$Itbn{`9QT?AF${~_(_!n?BrHs+BdNPhF9F7>> z>%XflH9r>NDgesy6c{J8GAJ(b^xh0*+%nwF8CaTr$Y09!G&wF+s$Jw@+e7FR){qbM zgZX&vurZ&-`qdJ(@XFL2JTZt}uw9~4O+czkXeUOHE7@$6!Q}%z3HNjPcO{n@+9g<~ zr{{#3a=(ifySWK!$a~M`@~U1>Qb((Dr7ukvmAYuu)&#p6cC9{ zrQ-BrY@Ss>KtUwLzi9Mc`IlQgJZ!}b8a{Y}AMoQ0RQn5RvCP$tXV5%K8CDrjR%ZHw zhqey7bk>eUnHaf5?xTDPKgm?%50wVFPG8>*R^QUmOn97DiPbWupF3GW53&hftq>;? zuD#=vefn!6gRH_uLiJFXzz20TEr5}YS zn%NmBJHWN|GhuJ_#iA1h728os&?)pc?)0uF6;`lWw*Wn!HJsFuUMv$FxIl=|81A2~ z>S=m(}bnYRIslR4i;pFmj|xDLS~m1208oHG47JN#6?Riiuv1m5gnZ2m4D+j^uC7R!{A+`00nbPic;r_;5dT5l3RPVVnIDAx?0F$@s#sY%h7VlY< zjXQA?O+ah%oLMqVa$1EftYorkMu-AEJiD4NNca9*S2ev&R=tFQTF>SKPiIr6iMMY3 z9Yz+GGwuqsi_POM=ILEOS6L22el|iYk3X^%b2I1B=RRCAUGMqRQ*Z}!ucx>_0rRtd zG`$eCx4oZ23$I=9AMS5uxs||;$F_&7rLh1kgCVQld+dewk->5m@6PnWhxrRR@tD#? zhgQ7uNwHaQ+ZbS)Up1@LUC3AieBskEWJ&Q;A_C_+FJB(Q$V|0P4Fx@kKF8pQMGazw z(qM{3R{dc!QP+AoX^xay)x6QdzXhT<;q9-TA?gxO!GVfZJtA2^&IhkW-{xvRvT|CW zHOO?EM@F*ld7?3bZzA_R%duVE2bo%Sj^-XqBfY1h4R3N~p>3G}=Yl@b3_JgB(__}a-6)neJ{ znvOndRC<=o)b+5WI|R_`TwVy-7V=lpnyRNP~1YOS-$eyK~XJ z6ZKzv@9~~5`$$J{`fUql^e-?mc5KCz0dUE3#2Ps@c#2Uc!ZOIW zSD@t81kYr3)tPAN{~RfI4;u@q6p5_ulMQtYt72RL{bIY6)2_4^#j>>qDrEt+KatB+ zjXP*fiPXfy&Gw%9tj{EaKbhaHGo7H_AFL@AEV9{{;>%3w`iZUes~r90fhnk`TnH z)y+=ba_Y7+<>%#sO3^lmn0C6FyClTZh8YAeiJkZZ?W~hW9%SQQ=X}&YX@p2t5GmN4 zb5{|Ds9L>PfgM>PylfQw25t*>iwmz6An&dmywU-GxwooTBX&MavUps*3=$m5M<^^S z)6eHz9ATf?BVKBXQ0_XvA_zv4k+hgM$vIZnFBv{f?%C8?&{SDs;O$cR8{TT)e?0^m zK;l3nCvW;+xv>u2z+U=y0g0}6`Els;kN%g5B4nBSrz3Ss&*KzP1-HAFRyKXq^7ymL z2{Rw_Iu<-)v_D4kqFccCb2mKQ!Xh2b4O6E7mY&QE*^_d=;K;k4uO!{Ney8Ax8u)5p zSfJZo^sfvZMZLg(5DA$TCo;1Q|L}B`H!ew#&rbF~ym4BHB3eCijc1JW&V9$1>+_{e z|ID3nsjhjxxE*E>ejFQ{z5$wK`-PS=rP&gAowe_xGJ`<9;(=0U;F7|qr_K^>lT9I~ z1zmEuSs>7AeM8q9g+cLs(2_%H1JNRr^3~Q_%9UDGxPK_Z)sD;YOS5!#cD8>+`PsLF zD#=&Vf3p0a4`{p~0>cF(mr)n?cJW%6qv}`HGk=_SlJVvcplziXmd$rO(|bO>|Le?v zDwVQ3Ih@r!3;w9t)(+cW7OqvyA6R&C#L7gs_vw!x+i$thW-*O&{31@)$eb*^pFN z8`dtaDgI`s)EsfRfR?Jj^UDUoGek~njWR8Dk}cKMy%(v$Z+MlRn!sGbxY(`_2s5O^#A~?XD7-1eH;DI{ zgvyE;k~b$)TvS7QL_rbg^^Xmgb*t&sywpmKnyERq-iUk9I2-0~^fF~La>EBaop>OA z57#j~&6C@=-w@GM$7xsk?~fH(Ii;pk;J4OV5p0a%`41SAgAfqVlDhS0R}Ec$`O8O? zRo%eeTT(T5HOy!qpbhl>?pH_7re=HQubSGyioa?rJPiwPCZTS}72H}1V> zkGh_#(p#fW7izPr5xWXa1!YWzaN&y$D;DTJ(c7RXXG7!MkxXq@q*6I->_8dSwsOSJSLjdWYZ{l0oq8dTl$xj$lLYXnHDPyn*| zR$y&0=IO3@Sg8-}cy(StTXGGkPnXubvyywnLI3jl;A`!_=lNYP$n5el#2iOP<1uSe zJ2@DZ1e?(6;tlV7vmY?EJq$(UpJIL`>GOksWc+eo(^iy33>_zE$#n`MPM55!5EICC zx^AO7&|!1w!0-IXI}LFpx_VQQ1V{EzS57qDNd#9n z&^z--nYZRQGW6Sy(H$d2Q?eZR;r;;++3>ggtQs0RNhPx^nu>ubDp$TAeV-JL1EXd0 z)JavK^X^MtTK|QmGuT@q$cU>7-JXTh@b#2>1Wn{^)nvos{XP26#Q0mDY~n%ySifZX z&NnUbsDhc8a&<@Y-s8Eubiyi~?9GGOq5pf+yGjlx_Yqz3wk$6;2zWlp5UPxE=py4? z#6F!p=aBgJ%bt8vVv?oYAM8mkbP1~NDr@jmf#70mLsnh0@=1JULG3VcvF`zinJ!RL zdxrF=Hkt37_IVuyE2hs+C_r`aOBJ@|%2kiC0=L@9azKShK?*-DO20NYe0%TJFQ4q0 zpsJZ~uc;devrEJt7ZOLy;D6!5PkvIOH~&(gx+>Oi5srsl?3@zPSiBDk-_G}VU|tZr zu;CB~gpD$DO!$Mdh1c3P$h+9HQ>(-gS zdf$jaW8SLi%X9LoOWDw9-haKc6Wqo7#UJ#bShf2f4J4cZf^9u6S6^N=UL4r20Q>`B z2z$Z^F#!{Hc&XH%;+Ho;xK}{8q%LR{BWs7eZDXVCeOSvoqLHbVMM9lzgOFnq?3pO?-w+GB1m484UORN9QABS{&s%NqlC(@6^{bU5&Z1LM#4@$ameN}z_?Aw^pCLtL} zl(0j!Doryg$+YZsdR#|4QuOIJ4l(L~_RoH3QC2TnchxWUpVM_VA%^`_vpyIvaW!=c zQ;hDZc@B6898|;bZ#k%jwwSF?hVA7j{#y4YdUX?kh%k4C-N}g+C2adzKy1~7(?9M` z$RBu2NrbJdJl_bE!>Q75j$Y>dT8I339HTm^M@%wu-3G$kvm`M+2Q}-lXbYn}4%=c) zKOE3hIM7pHzL-Ct?fx%;-@B4GT89~Q$?HKK5?d9zdZ5LP3zM0$P)7xzwWX;NYlpJRodUuFy&@_NRstrlS1P#iVl~XvU^ewVMgWi8|Ym zZ1Y3wrO@n zS{6`flKEVV0SfqOWu*1_nDA}!Q;kNT7wA+0;DqiQ)4_)$1Z6AbZNB_SAO(*JdSh16 zn#9t~fNMll=s&4fpQ>9vDMtJo1)=|=@*B^UXt`e|tS+AN^foag?LF?(dlvD;dlaHS zu_AwY43$@Z4)Ocj|NCyf|8)u^y&AY+mcYZ_LW2xAH`p{C4*b;z>iHM{tYZb3BceR` zearqsS%mJH&j`LdA4&?9M1;$FOadtN&W%8e`6neFX7e6TCpd=f2ck(0T?T zcuVO2iEiu(OFJ*Ui{?*bVN4+rOjzwl1{_6J8qUG)+5T@z2ws1YOg4qE>CGFFDE6s+ zrMW=pf(G7mu{Ei`3t-pTUBdjcAna@8>{`Z=(YtIg9;IiUUdL{Y*O%=ybrF6Ln#NEk z>_NSEKRh8YVWV@Rl`0W&DBX(Cwvt%VD8BCfCYG_Qt%>0t+;tg=A1{74nexttI2_~)6)N)!>1$zs^32L>*rqF?>}$Bu zG0*JXkw50bAj65#?KOiNh%4L_{JNK06ms%k{DQMsCG1%QJPj#KXrC`~n0Kl?*-Jt> z6L+t~d4;ffH}f)H43o|i=V zR8>cZWKa2qt>;aSt=LMeIoz>GlA<}i<^YNtqj;%v{Pdn0PZGz$N}x5abNy>|PMzn( zVs{Zhva(PHXC+`*Y%H;R&62)J{Vcw)7TM=>$XqeA*iwy+6*Hq|*o!D8&~c@?*P-Uz zi9lMqiR>|-Z9$3Nb$?As2YCXGxw0p*9oE)r@7QL$2er_kf#Hz8z*Pj$$2 zp#s#qvP6SShdFz%))Fbqm@AwH)cO%SG5T~{8#{fj*Kl`;UcmsH>lUP(lYkAYso3RL z;B5n~FGi8Fr8w&MAyz(2Do;<0P{*qUnXTO(f9JKk8X;79%EE@r6QS|wMew4(-eBZ? zlWk}zIA^7}RPc0I{8v;p_ zE+Dyg;`1@KpP_uZ{#j4uG?P;l9# z$BdpXHTZI)BSNSEL)Gg`x7M-U6^!kwJ{kTZulW81{S2o)It5nrVwN^DVJ`ZXy|vR; z^)DwcL5{OA+KTknI$>!<xj@%{ciD{vu0Y)ualqvWBZK7{I@yINqZU^aCqK2r<|=#`Qem%0TcKQ%NMag60XyMm_+d1e19k^;#+>`5_OUvC4 z0yd{{^;jm9B?`LOtyCiNX*`4Dt|Jq8gzMa_a;*AjrxXW&{hRxd zO#eEx4~cZZNXv0~=nzMVnDX$l!d*z`wb;+L4KDAJb)C?dA;I3(>!QKZiSrneORK2v z6g=|#RNL&|?~gK)v4uz2YCO}7^Z?t9Al9X3B{y~T6Qjux)zQV};wDXXo`!`qi;g*~ zu8b0V{{-It^%UWDsbV>zKO*wFoONrZzE4Z}M9b-KM#2S-F1_*k@3yg$G~L1oE`moY@-K zIQnHg$hfM<|5E-qVHnb;J+>Hm9cEGiFKInW7cL)kR>#>;G57W;-99cB+T2<@)3?lG z>U7VBkHE0~sI^(B)nK%Ep%Ay;#F7`M7(CCV7tq)SVFjvIo)nqyX^1+BaLI1$wL&JJO6k@e6-r5_R2X z5DBmyJKsY3yb499E2OeI6I>c#rz&Pym}0>!u`_2ONr+>#Yd!4BxP@GVdxqO|*%wG4 zX~f|fz-stJ2YgGO@!Qk!Io1yrFjI=q7xns@H4=U_-%YeQ9==riCeF(zPis?53)s49 zTa=v@Guq>4&3TKd7F`GG%APJQOrX4t^j!bj4oxC|5?Q9m=$gZ$yp99br54t+ZSoD1 zto$)#kU6@FuB0KUK7E;$@8x*_62aKkS^ccbC|tR>&B$YLYu?i+U%|^joT9TwV*)hR z1%L-yu0=+;J-?ZiA1XNs;vrG)mWiEAF0WItY0c-&F53TlpH$MY2KR42R#s+?d-h1I%evOa4LbY@MBEW%M{DU`Lg{?*0J-JU!W`zI5rX$}KYqX!v+A_LYhbDB;xoYQxn za=er66h>ar|5v@hE9tVcia*2#x-fd zhj8|t76H}uEQX2GGC#Xd!)ZVTs$fzEkjKUP(;j8^3aghE(=cCDh-ex{)Sq6xaMZyl zQ?^Hsu*K(ScIf^-E~fhH?oM9Lb!mK5`r#XvHN2F}M)wxHTC;?d30d&DU2aSnsP~tK zX8Fl|p;Fwop_f6W0@F~8VuO}571iUr%(lK?`5T|KyBDaz$`gau7d}NCoWe3ZrBW1E zPS%ENDo@6)+xYLiNN3{k2A=b6$8gdh%{S&!{f#)rwVdLQUU*FEB)SXV$v98|0E5Cq zQ5$p=R_^c1s(p>=R%7aY09PUkE0DKLApX4}9no~|j3^WtnRU|fCjNK}bv@`AWuZDz z;pz7`YwJwsLAR+46`@eBX#_>cG+pf`?l`)6@&jC>p&m(5prh}mP2aY5V0S+hZR!4K zt7^25E&TcoHyg~^y&V3Sij@29)J{n|fg*>RdJu_}YsNM%Fk$b*=`Qo4C0wsJSyl;@ z`1E5V+B3-4CTh&e@UF%umV=uwHXAmc<7h=)n?|OObf%2h>O+eMwRANsz2lWcRPf>L zId-kBuf_E|mww}-$W0GS{Y*vA%XM*lM7jG*vP^!{`0MjXN_VsME~jBqp$AShjJ}d& zCd9E30aBoJ`XR!u9r>_x4uc%1y+T^BXZ*@_J*9b-PMVHHgwnc;|7tCz) zPac2@FiIBO|3(5-7%&w`hoBSUQxo>;+!>Xs7u(hKwfG1;8Te$Vy7gNQM;E{JJv><% ze|X9)<Zr+X1}M}B-U8Pmo6&*)a9$3 z*CcnN2{%{>nuHV{XS>leI|j1FZTX=OJ+*QVe=Xr)Wvp5hEF%`O>#665kI!dW5pu{l-;Z#k4dL0;hb% z%jW{ll`G{s#?KG&7;`hdaW=osF)shklF$<_EQAj*AGqM$f z&1a5U>YBwjUzMaT)gK??8I`A>!b97!2&;W!uALI@5@J}YPm__?06-kIP%;(vN1XhB zxmGW9SGNt#HHG?eK{KcO1R@?o-_{~<+j6uCDW{V*aBU7EIa0KA;kY?A=Z*FmrW z;Yq)a*k=sUwlts^(~N3Wq&_^gtw!QDxntP^xo~_^u@B?(LJk1 z>hgM@V{8Wn>*QBOf+O84cfvDFru4_+1tR}D0sM=Qpgcn!iMTQT+ZPHR&0bap5n6}e zhB|g>p(@7E7`sjcmy?Pl@3!o8%F5RSHhXtBxICK>Zg}UfvB@? z@wizx+P;Kd7b#75+QoAmq}xZF@uXg?l&(uWD0_vvW&T;P4zFkDE+AQ_>e{^ZtlI@8 znz((BDB~K5Lu^Bg^u{L)MI$;eu02I(TTIc0o_@ZL64fUhj_W=*;0Is5eVDeRXJ>tMpj9`r6oMk)kHHR3RK zZc+lRZ%pJpS`#IiEk2bkVfJ1D+mcYh$h z{SIAGCUZvDqbl0ZLQ_2A<|uuJTj$l=L7i(SKNn4>o41a3S0s3p#9p2+A6?g=DY&7* z^#mVg!XOQuj-sdx;Ua-yyI#42sP{SF927U!M^3E^F@gGTD%D@Ns0v|Nu)jp}^PJ5g zbK%dArlM}acbxn{RFbN6k(X9XMs_n`|Ia>gCi`;{uK5k)<8iy)2>K`}!`qa{>`5Dw z;lyu;Zk(+X5}v2-*JXe4k5cgAB({=LR4vtqBHfCwG5X*9%_pNdV zoy_JdcgjRf7J%iIX(%^tZz#7CFU|4D?5r zBp z`8OlcOD0mR*ZqY>hVt{weA1gyzILUkOhsFd1k|B_01rg-_jh6+9vBufqygl$ftgu!eDo?IL2Dh#)|g^sNI?QEOR0VgAh;Z8_%Oop!uh#o+&Fz6H*^B z)N_x5a9q6~jK3!1WX}`UeBSZpAlCAzGV=pN#34@T<~r0|sZYE#Ob$!WL>e!|QoPwW|qx6a<mkUxNZ_KjD}>H zTKMptX~KkTpa3Fy<|A)^A(bmURM8i-mp$C46vM;vFf>)kw8~JYdm_|uG4+E`GNMkm z<-xjAV{Or}&5q0)^uFL^k1n`I`f#7Q1qrz!WzNFt( zgXc159h+t#{W>J&aP)iUo@wU=XL*eP!UUjCGBLfqCoD4FD*w0&p({3y;*X4vcM+P(i|T{xI@?d?<`oUC8`HMOXDYeFn)Rxl*$x%ut4fNfE7<~zn4i<; z1}~X*y#^Otc#zP58FaJfT+QfdMAjI#ab$MC1}-hN>Y+sg?)Mpj@bF!{{kQxuQjWSB zBwH3%j9u7;Zqhyh2AP^ol1;zKS^^fo8ctvv>3g27oA>@qdF@RG8pP9U!x|DjI=Rj9 zh17S+^l_kr3h;97+f_`^MtH4J<%uM?<8fH+WZCzLb{k_eA*O(c7>dI1+6wOxo4K5S zQm}BapR=mD%XrC_Mw)dXvTn|OdB3vhseY7n&o^Pzc7AK?=XUSFm-oX0FXudkFpmJb zgSo5vA65YFB2Qa7xL*68lS%e{0~6Y!za6o<6JAAEP_=%2_j%S|&g)XHj#nN88PL6A z68F9q*m~-#-Vv{lW8dF`h08x8p9KBPMNsIEC=v>9*q^?urv%n=M_1pRMO|*CRQc-& zwx9&=R$M-mcqs<^6sZgov4_k;D(s^IR}tY4%Ly9@oChyf5$RdU<>F-kcIebj1H>kI z34w?36lKl8Xyp46+=zJ2acZ7<-q~b-@Jb(0&uHf{p4lb21(o9`qIw!?kN*!Um%v_x#k|ZK_%F{gZAT8yK=jsa?SxbzqDR|TI zskec_|6mH9`~d$Ig;2hnDE^BjkmOQg;~bBjEWaKI_s*Uoj`q zX+K;YHt)>3Jg4(OoIs=;MW0f+z8g#Hs?QOp*=8oR z4m?zAGgP%!dr>*Pwu>_h!KPX7Zcd=c^3q>An-6@`}KpJjYcwcRijW5`ObKjzEg-2@3Bf8pQbr4mJ z{@E)l)EUg(cIQwIE3PsLwVn_V^J%b{4S|k8tPYK3ya4zdXbCjO zH*kmL-RN6+hwSBjq{SdxuhqXa5ASml@V}JSZhaui@M$A!M@^Zv0ke_~rHu&ek34Y~ zHkaqB8B3ol{R;r%_8jKdNZ#U_vCxSaw;Sz82l|f?M&91y6l#cj{pk<7?7{N_^bvu` z4D1I4qEEI?Cc->Q=jNerpIonZfYD$J`uITZJ16h)1>$Jkk{D#F5+{ZMU?pn6QEi@< z0trP`QK^=!==KJKQ2T#p>HQdx0B8%MQ7vL%lpaT_-6Q}s9JK7&VljF!s`HO;p&A6S z4lcfVae!p;3C_wZUMns9e(6}@akv%Lq^o9mdo&#UFBvX2S%vTiMbA@F!ZG6ml@jA? zF^|Yb&;zP#FhN7MxDo?is=e*^c7r#r$^mb{FkMD~VBy({_w9*C6JGd9)T(1-wG->A zb0+(Y4WGcTV_}YN85DT)J^TMAa4kn2jR<%V?(z`2I-lQOd$0-T3E`_fN zc*I`SFfvKAGr-I2g47^X|o2lk`jBGe~k(p@qX>y<&_6{z{SSgvACSRxY^q2RZQ zix+;k0LvfVU{h6R!w&>IOZ?v~KSi9sYVz{@ufz#&b&_~2sP|nyV;NWdMSVLUrbQV6 zJLrPN{DS*q6~{13w3G}1@$h@Exxk|nt4pS<4=YKZ=`6IX^y`13j`UE#~2T% zJy;!bPTh4q*FZ>Cl9iVQ#i|;6Mgr{^Dv7n+sygb<6?Vys(;Dt4KW@ppPl&HrGnqV} zfz2a;h}$^;oWIbYjmR4Lx;9nVLqJ@t!yh`<+JKLr(}yx`MXKZF6i-{}sI3F{`z|kB zN?QqJRpFVf!xE!FWIiGPhwlk^T%zkSbtJHqcI0hq!RI0qE2p~9Vr=MEUwZUD$nMX} zikn%(_N53)K}NV{3f_797<#{wheFR)AoU=)@%}T;kdGt2lwG~7n6oAH#Z1}Ju!n&s znyKMcbj3RAk*d(76 zbqVx|{VnMvv)(DwF>6;l+(F-8 zfs)a+15x(#?&8JnH2mD5(3d`Md1`oAyuba@et}>%<4w7ajS#)c`fp?SYyhxe0$!Hb zgV7tXjkGd-zy^&5YKWt8kG`GK&z&MsLFaowNr$qc-{Jb1j)ZgP&s7JFXXO>})mva4 z*A$(1xWncY&Y26~J;ctyY$#PhW%j}J zx45ySoi3E&rlEm%=El7O#1%f}D#nB*%7-Rjn&okf=$+Ta6s&>rfMa zrxF6j2u@z&-XAm~`xKVjI)@f8hr(|O@%G@_iUP=<{H^3h@zQ&3R6l{agT4j>r3`US zo?}PaDYpypxW6e1Be)r^9(IKRIMu9Rmuf#eb@C^Hr+h7G1rK9yyzpB5)KA zoW~-YR3guD;nJwSi_O+m#0l83L~1_W9k>(Dxq zjjn>5>$IeFFU(I-1!h87F*@!i9*~PyfZ+exljoJ^fZ>OL?5*h6{(R!auQ9kUNP)43 z5}U*}tUksSFFQy(@Mec}ZOFoJ_?O@iXfOq^;h=KcX00VLE83AjR-P(_#IP8EOAN)K zeAS@lOtO2*tLI84qsE{%25?+bB@|8gyG@B6KneLGFx@kkFWYeHRz=7Xo|PLPsILmp zz}5u(hk16@JoHJ<`unTk%YVG|^vnKz?SDy1`+q!=ZupMcvu9$m5+Xth|7$AT|Km~r zpB$-`)~F2_g`3~~B;<7;wRp0AWU%8X1Ak(7_q&J*vwN(y_bX%>Mg9?wI%2GhO!DGs zznsa#{%Pd}v|$RP9Pjo`shk_%w46-{*>zsD!T~ZQccM!#9`7z~wzo=?|1xM0vK7-; zJRutuHUigQ0K|f2e&rp<^QkceuMtSP|4z<$5;Wal*RO5Dt2Dy9uFyT7aMr2xH^pc^>fR*W1kYmGCZJUKzDz0NUOwzFPXU<3OBeBV6rQGR( zAs1`VX&PsJpI$yfNhfRbhp1Jbw;4Xc4Ko%R4=hxv`96lIp%pTTlU9Dx6adQ{!GmSE zofK9cGdou_Eju)y@muPE`Xpn>5>Lj>aD%)kS^llOWjW9DAMUA`_55w`o&IS#HS+1o z;r*?1Lvt}u*KfSyCb)$O2-%sEJHY>PXY<8o2ZUxEh~y~m+3~FUa=5ii)eQgHlOFev zYM`K(?{ycae*lo>TkSP693zTwB@Nf$1f!TSA*cAKk9Bh9zPm*!JLM|MS*@>R#$7XN zmLog?JixwmEi+Lhd`Y2mxUHTbK!&h!q6FuLoGQSNPX+blan{@xiTKh0F|p-x^9b&M zxq0|4Vz9*{c~`|32YkAaa;EF9m^0hRU`?KwK0Xgcp_`*}AC}h+U$Z>x6W#Cx*2$no zQ$PfP*(WawPcZa(aIgbAA9vAr%s{JNas3s8%t4(SV-wx(57*rxsIRZ~8a@t^$;x46 z_*CO4S-`((dJ-jYH<*mn=SEK;NS%dZ&e?t*xY=w7Wwujm7#-zsQ4Q z1v*Nj-kY$0*j0cvcD;+Sl$DK}{5gin11aBjJ(GC#y7X{zw9yP%DrHKPe@agIW}@8X zAuEip(uJ^C(JBPuQ}@W^0M3uy%=QSqwmacT@BgcA&e1k(WW+?SqDP0m+p)f+O2A&;TAT|LAnA#-~Z$y;I9Zp0i?o!2Vs4cyr=yissN86b)K=&wI5af zExAn=6GObI{pC&taf{#-c)VxMha~62(5JgxJyXT+Dxi+iNe()n&(_Hx4(~k0*nmGc zo_OkalD)nEsSB__G;_bg?Jv}L);JX_<3zsJ8jo=I*$2A#COoZ0&` zK(Uj_Xg36z`Og&0eeTkzDrNiioW&J5dsln&hNOlZh0Z9Ppip2?&l^8vuH~m0oH)hv zRvnH%(yZ3Jx9I;zk$}#1+dD<0eQmd(A_H+sN|yp^T>yu8fXe2|- zk+Dg>#9-^2vaYG?uO=N2Wtx);8aVSp3)ehW#2Ei-9Zh+iUE|kz6^A3Ivv;S;GkxhX z6xZ-tjSS2{>Yv7>aQGR3rq)HCO;J{Yw)K!qr{wn=x`r`ie+nLeJYtg9k@pB((n3ea z;K(WCK|?+l&6mCPChENE90}Cxl;ZuJX$f-Tl8*U+Bw=@SD#!HhTih^mYqVMKhoL5V zRZ2NPq+mev;=0F{!K=||zWBksz?Ff@Z?ibN_`D#e`3`VHtMMUE(Q5_k$#%j2wZ2t)qq z7QT#l3Dm%Q>tF0b8eQstPm!GA4Y)~t)kQ{9)N1ua;bDwza6Q1d-*7vv{q2H3uf3jB z#>ncky=!Q4xPS&MnJ3`nj`2o+kK3v<-kKU>3{M?29H>bN&Bf0O80 zS8o^(e4ZE`(v(a+tberWeHr4ry*ma8?~5}2f8^8Vfd*ihtsBw=tj1d2N`1FWB-WYI z;Vr|>_$<>we1|bxX7SR#V5UMjmE6*)k_Dd1(jk+bJffurH|#3n^t%qU7|zANaq{Yo z*ypq(s%c)K2an%l6#=8PMbAlu>tY*F$abL>u+m%o*ju7`CPdUsWc;HC`ZYLp$-Nec zec3JBA2x#8VpyV*q@wUID%`BGfQPr{f#(y@2RUM~x_h)fdhnk6dF1{mm~b4!=@7%z zgv2beDIM*A=Zn-lte7a>4(haUS~ZY-1qtrf{8^fB;~dgAPU);*j5R|OSkZC8Ud20p zl7eW(G;&};n}a2c*Dp0$k47tF?_nj`@i{pK)392WTE{C+ls^B0q78yf&@o!(iO(zj z(?vTx0ilgsOe}5a)?I)orRMw^U!AL){I-9}5ygESMoQCWc_x|Z{>7kWAReOT%f#KD z)G*Y3NK=XswC4w4GeA>r)jj;shRqxS#hrL5t|q~Q`&VYt^Tawg@2HTIkeM)2hkbXy z5uX-?NvWlu)xNTRAkr*W`>BSbnNhTI%t>fWuEi#o=$?w@dUGs>IF9PlJw~kLl=bak zjLy_GQ~bS}F{>~-c-E93nKRn9xb=dK1mXIA?Nc=yo#{;^cWzzWNA%4xm;TNxjyX7U zNrJ_QkBhQ$bn_NoTrWfH#>LwbMV$`f-8B0KMK!V#?mY9m#Nzc$J9TuFf8k3)y#yzb zSF9>ML~&~=dL4Bz2MA!0*qQmMF5e}5RzQA5S%%PBvICsSP`TndEQ(gaCN3gI6NG=)daDNk!s%C-@H%;r^@>Q5AqIKZ1 z#9wG2rPW8d{V%z)`=q#;=JlKj`f z^S_!iG-4xkIpB*KJRFWLH#~O*-@dC|dqzf8)P>m_$tTOT#Q*8M0ors|b&n1oiM26C z3jGco*RXJ5Gq~Jc@oIw)Z{s>`=v^ChHI%y*)Si-oMA!#vcSpUFGzN8Hghn`v=A^2N z?}_vRoi3~`;~!uS8|*ku#uDhc@LnTSuW=Gh5qgw+AJ^z_zF7RwzN~HBv)T%@vEF~G zqk-;BXC{CV3+ipPmL79Vjmh$F3s=D$7sCmdzXCVk7cWor2W54dK zZn_jhC$ayP4F@!~d%v~7Dqru{FlMNs4Xvsl)t@_BXMer#;`QFrmJx39Ke%R!I9v$J zC6p5sOvW4P4dv%QnXg@KI-d)+Ot1%26Jl+et`N%bCrfUkwl_w-Sq0NxJ>Ffpb`r~R zais9PTUB0Gxy5<$#dezb1AnUJ_0jUZ#iPQZqrYSk3tQLdc|Ms4{Hz#=OT@k+?U=;^ z42dynJjUJm=t!p=XPidG4ONT?ZyI_QW1hBhTRnW&@jQ~g5|N=2QTg-6pMS$o@Ik`Q z8w|M|zYK2W=y<0^fG=11u(t5riuj9Wn|j~RR$X}JylgPcy}2kKn~b&Fk13GsNl;*Z zH)Zq9AqD19-&+KcjC0FFMsjBdKCVBK-+IPjmrqq^B6RY4*tg&Rox#wMdN5ILSjVq^VNf zXROah717sVvr3y`1M!i`(C69My!lLbo8kI@sEW z?KI7^o=;8cgbX|3?&tdvuQN5d zD6Y#74gOs1fqCOynNFc+E>&t;W~=fJXVf2rBe<8lIG;$1B6y`z8LHC zrP+RERW`oc#;%+lB}R7I!LLVSt#58O*w<;V7aO-*A)k?9dOvY@S0J$>*46FLAm=df z!VR>3*sI2$f`hS1?JxO?T;wLnJQT%~ngp$XU-Kw#bq`CvJvR{oY368^=?b74TAQAJ zKQ%Q#6CE5Rl(xBG@vve{PZh{`2PiV$dE9 zt*xvx|MwI_+_SqQoVu>%pH#@G={&Y#?lSJwIaac#242|b_ zdtZZXxVIRmm^ezx3(3lMguftkzS+CzyT`lw>N?*e@J+qxYOWQ5AUNtS1!^da@#a*% zJ+_=dV~4}08hK55`EH!w2X^JrfqzC@y5GZFld&_$qI1$Z8eYLuLqqZWiubIRO`l>F zpTzDMwoaDm(-NIT`Jm{bsqZjIlX0`;A5&gTROqY|m^(HQR3@yt4-xzx&V zPcKY60U_=}W4e5c=@`aTShD>vUZ+cMt@I0P?WIUg-=y@9=Q+}y*gVrh#}T%p3h{Q&zMw(U1{u^}+1yX4X&6X6 z+V8x@;AV*g_q1B#ZPS9Vz2n62G{3tBYbU>uP9_OOgKVxsmeaLF{Tg0dL5ERDXg-^= z+;cg&!q&g`lAzj~oQSx##I;#jz|ClyfH6i~UD_j?-`v6>V}nFJlIks+!VNEC{^~#v z)ECCzhDGIxU}QP?Sso{6YEwUBpqh(iX4Wo|I3E6NRq7GRrjYh#`NVT~GH5S^a*T~) z@L)qAyh$69eE8`Ve3X4~&eS5@jD0RnU)+7Cx-D26<#tWqV6squ;Fc}8{&_i8c!AEC z+HTMHkm?a<`bQOO!2-Hh^TmDKkf9+K`3!9r!N;Cc@TrpCWrg~bQBd*9^sTvyv%Oye z=ND+Efrh!)Z-r(9*hYzR4w{XbS?&K^=}-=46q!iPolUk|0dF_M*!=e4`9V8c|k3K<)Oh-~`J zWQ1eKA9+4T&nK2#?a*H|ON+KXcgN$ua)O}V%E5=G*|gAnArYlAwwZrpkS8coG3A>} zBXYz!_xj*4Yr?&EbV1vK0{w>ezQM!AW~B+76f486yQyL2@rzWibfsD8ebE6v?AGM6 zOXBR#;{G>_#C*HG@sG@@VQeu)T~jRYr=*EeLj~U7>hIT&33Xx{uG7Cu?BO)l2@Usd z(|qYvi2{pkZzH)ecT%P=ND?Cmxwp5yTu81WR7UT))9iz7ihc z-X=03CiLaN$rx~fO&*^e6p`?NxgfdEbbLnA27ud`6K)EdRG89D=3re}G@_?H{l)rh zOEb*LVzVFU3|VmwbVP6;lFAyNcgo(}C!cS%v1^B1Y!P&Fl;jm|7BN+#aqDiNPKIYi z8Fw{fDa7+0CMk(J?;)v&BO+&!@mmk1**!Wky1J(J4u8AWOCCtL-_74z9f`%e zjV*nAI51Et_*TlHHtClpunKPeuoZQUvt|lie~IP>h3o9Z;=daIjoV!I6%6nvd4AMd-VD3%;0c2rnoRDK@uTO`gIw^H6p4v+}EKXReV=p#$^TjbtoK zA1+$obQD4UET*Ao8X6#gjsp)oZCfYU7CuJu;+Z`O*T}ESMJsF|84}%hD%xT`V1+~_*~?=Ua?T}p|7Gt3;glmOzzIInk32M0*_bg4q8k)>&K!b z@x0^_q2NUiO9(O?a(Qf1d;I6oXJu%n)62Q$UV@t8wI7rFtndp!bPHOqkNtX8>ftk|+Z%V}=E@@EGqD)R{B~x?fx%_UQ~=(F)j+?U zL9Xk8kL)S*;D+U9SkfMCvhj14ZnnP8j9u;ivGLYf!Bj&YoG$9y^o^Fi0i9@iFK(k` z8lohvvg1ay#m$U0^-#SX#cx=`6+i&R&j&|gl zxt`uvoxUv_!_?4Xl5V4(OFb<%z&jRr+k32peOsO;UWZ4it{;AXGp+9>)kJ!dKZ|5n z-0jBHzUe6n9z$TNS@Fu~XUH3~fOMooR1i^`f=cgATIjt*ks@6IiAa+! z(n0AEdZjmG&Q=@oho5eX6ohL6ZIyb6qQQHc^CZ3=F6?+j@*u2>#Jf9ai2yma^wJ?EMP zZy4?%WSS2xc0UpxYKj^a)pWd0N7t%Oq@w8BOZVN-i6Kby*pA&q>*g?b-ASL3(}B?n ziZOh29FB?ypDvnJfvy|;@PT7Q$_SyEr)Y_^qJ2u4rJDydD{w+gW}|zQ#EL+v14CVx zF$_vD8}q=#0^EG(994+%UeM9{jhTjw4IvGYbFGlFiFF|Y(wKiAn_LEvWh^rkoNWEO|#nIi4OBGi5&|>-I z97M1sOm%g}0{MKiw<9-fn_J8huO5K7ScSBGBs>xaQjP0;&(50fQRQWq5q+pEW(l*5 zQ5doWHFTjJztq3)ddw319WtXFw4Yqkpyu}!UTQzmuVL4%p`BPw+U0J+xYz`3>m1h5 z*pl^}*Od9TIkJMSKR2bv+Ib(k?pA;EsY&$jnmV^Kvw%-7qYdwy#UFT{hMD6Og28$ascI;jV_P*9avWG@BNZmw-6+@YCMr&WUY0W;^l&YjT zmJcAb8jQ7`8sm7D^w(M{hAoiBI)@LFvD!Fw0{vFdXf$qtS|X*bWb80VvuT73(AYUz zA=ahEoXf;qwycT|a7#)v897hgY>rp$QZ@%CK^7S_8pR)+`Xm|U=h^LPJ~VIzegQI~Bm&-f|I-BL@Qj z(NSKdfkNkA3#=CKSyBh*sR;6Kn(?W!@H@DieD-D-cS&x?ax6`4dr!X!m{NHFE`DLA zUW|&h&+B?%zQUQqgOwi}L`%Qa3%@(BuU9hgvdl%RQ|ZM$_{lhm2-M%Hqgz?dsl$|X zcb&LVO&-5$3^W#~>2t8s3pU=atVSvd?RB;G;%k3IVZsEQt3#uLMz_VJx=Jg@I9|Wb z@(_JBT8%X-+CRwlU0!d*OU+epX}lV66FB!rML~zEA{Uvqv&X9J!Gas1OvXPoE?Md zXDnYYbDI9uFK${IR0Jvy9DJ@9VP(S(X~x-K5;j1pCMJKb7{GHaoq7#{0~frp!-kFP=L- z>|Awmv_(en(_`Ft2ixk|O=8#teaq_Q*aY>S*JST#{D%JiPWooWecBi&7| zk;`3+iMbmJKQ?2M>U z!-HS!-;vJG^wF!uC{DM}HF-Q0fi}BIS;ZtcO7Z`uEBfkDyXkKP}mX=IJc8mvFIOY?3A4tbR8V zOkv>>0N9F##q}752$A18p?Pc@pb688fr+BBSfv*{a1UpxBcimke<)14eN=E!Bco?w zdc18h=;FrkO}R*=kw<<0?qV{Z+D`1^_tX|OKS6JlF++B@hT-F%SRp1q_7&c*JWRm& zhAiQVFp8t?EzNawrT)N-GKavdhp9qY#(*YzE4>Lvfc0@e(*x0EiuNy}d0q9m;THP`&Mv%}%W}j(1 z`fBP}&grpkol<_(mSn-YpJ}GK0w6UoAj3B{b0Svnw|IX%RTA?`Kki-Evt>8ZK_>;) zn?;9VRW`Vq7X?dB2KK1!t6Jh+5+pT8#s6;{AT`F5TI1A`BIb)CiBF1jTHmh=4fPG95ZR?7GVl!^hPd+R3)+n(Y;<>7-9#p*}*P>l~ii^9>PkUL*onF zif6TO^os8-1Z)vMxm)gAhGVa8sKp*%-WqVGf{i?bCN~V2wXM%6X^PWvHx3j-{bHT)U$&3<}RipyI zMJE_;?e4D{*NIb*H=MVvJAVOc^`Y>HPDWqJ`G`b3n#2+XizZO#8&uVrDQl!q&9Qfb z`pt64U^W69}R`;GKFr_)Y7I&%Y3rfyVQ*KIaK(pC=<9jGTZmmP= zo%@PL&b#xM7R&6++mo{^xRz#B@L{(Z@L{lI9h85d^b_08lGB1Vv#ydw?K-SJ`l>+n z$Q3t!g+WpXb}M>nRtgBopa#tMSK${cnNp(2gg$t7w4oVciMsVB;bAAxg-=o7!G7%Btv8}NfVqBdLPFT@Z;Q1=}K#ZE?;{wZ+e`X+t6 z$jE?2r{D@jeHD3HFl#qBp!x)aKqxy@%J6n2MjC5ui@RI`nNgzCQJEP+jo9ZI#1m>s zr*58){{$}-S0>fItWrI(y<%z;ZBTkfI0oU)T0opa4~whwxT&uvS=B(ggp7_pqDncm zT555Ywa7IlPstA|t7-Otg`NbKEQG`V*?05%>&#xP2~rh%JUIhpjfYDj?$Qi`{YD}G zVmkgS>&uqyh>+MnFlp_f!NvwOh}~Fi*IoPhAg{aol~?sA8m-cVL+d9QcZ~ZIc(Eq$ zBoB%YK4C4*BVqLd&hS@3Ovs4dzFgiER?T7R)9*V^Yh2?CIN(~lv27wTTB^P$kcp71 zJmazIw^FV2v~Dgogl(+Qt8^t;>lItIuecjAPRBDbn>nVYmyt5EyOrJ(J1c6>A9PZK zXnTh&o0?RAN*UlboLzUj%JVN6XISn=mJNi*-bnR^D}Kae$g^2%6GLzHRqQ1-aJ1k0 zl8O(Fzrp6n4(|O8^LKexl0K<8xcu#}uC@kTwuG)Wb9(d38olbe1iWFjPR`VfL{bA? z`coYh7-zIMV-1X(uOZuWm>E#mkl1A1mrJc{7Z5lx>YPVK zt%z*qf~NynWpEjV*3nM?n?!NxbJ!aHZ%~;qOw7qG?`Q_fD(gWeC{4Z}k=(UZ3g}x} zeQLa|OIjSZ@1xOe(3#V#ou}7I3~M?K(bhNSnxV&7X4PwhQFW|n{{nK*{;*A5T$AR3 z$BH}RLE$k+W>^Vx_d1s(s#qXKCi(ve24Sw`Dc z{j?=Co3FR$kk8u6%7vhYFPNDz)#Ej0()^N*)o2x4Z!@X#sLGX2MnW~0PnI-i6h^l6kXd9qf$M?KzK&Ldr};lAe*t3cb>WZgaMZ@W4p*cd1`Vcm&O<%&SqYQCTZha;oTJk^B7}HVD+_%07 zntA zmvD{UJI;90Iu{i#N(A4D6a2kitq)7c6@If(?bYbLhK&(R_N)Y%u&2zTn#sZZpTb#P zI`P5tUToY|w(O2GnLSqtm4ibeG0 zMDm8`Jga&tIqr^*zE6}a1dw&45ZLGRUwT$;;YUk&5GrzRM0?DE+dqj|;lV>apW>>Q zvyYClI-@YqLF=sfxCK6SsUM4YmE|%!#v4qebQ>4_6Z}%)$_a5aHc`@r#~sHs9S>1x zUdF9{Vrh6vHM8CJAPSRCry2(ngP*@DOgIh?dDk11tTfu!?JE6XBQcDL6A82^b=k;w z6UsFM&zQ22Cc0YT@zlenwk3C7e2ZV6`RXA5(=Vg%`A=#+>$IX_aBo*|MbLvy9rxcvqL$;ST7zB5J$3$chl9aPRnI+V5H;+HPv;Pw7gWH*F3jj+CWxBWZ%ep`ANRfkd zx;=g~PPir{Z+K}dzGClifp-26%c=Z0H-ZnX_IGpMr<(-c4{&@B7eR55LH`9_pniXY zms4!)nT(XZZwT;l_OeduVd0B4Vs(RSh#Qt8qIJnTwCxIyyu|5Ad3?3CrU5d}bknWT zPV`GAmyFUQxs9)Gb}=(Zp&y%q*N z6fM=Ux1>`Z!4dLO#ksk+MsL1Ex%IuOMvR?C)KAF%|>rXZLXJ(|lpz*0xVMF+G6VW;Fbw0O{?dfEozX&fprDXMFxa zF(5>ox-u}$+BS?5I)PF!1)8)|`ngs`+#~vI--O7cGQ#f`61|dk?J~>!|LSmN=Nh~+ zz(P*qIEbTT*#y+M&at}Y)2~@idi|ZNM&T3Db69KXx9kI`$?;+Litn&7i!O(p_+JF( zhHcLeVyY~%R3B`22UzqM7w24=-m(i_XlrMi*1;B$ujC(2AK3o~=mt-S^^zJdDnlw+ z1L4186max~#nGq&Q(n1ob+m4KM;wLQ;42QY%?_L3KT}d`gBq(lV~+>Ksz>y+4?WY! zBvK4qfz|A0xZZ;y!F%4H>JNv&S}O^^319t$9_|CT1~qRC2p$<=^B8Xe=g&*zrx?s7 zU-d*gWlb2v^pMwpvgw_E5T>I}eqNfXrV z)0cg&kGkp{@CC-3TSs9hc%#i)_?9==8hK|dinAS~)hLnU3y?YYW@G+A_+()175X^R zIhtGzyR`Y2#AQa~Tr^wB(3*5pfLswU!_a6?H^YOQdwv}n39pBrl(VQdk-4KUWFQ`3 z36l|yd3CK=)8kdSNp*9p)+0Hl;oF3)bQ+K6?yfRg1wLPfaOc^+iIcYFIu=JZ``c=% zj#tBWg?yB$Ow7;%!DT78hu+Gw4<9Cgk`Vl?+g4)OG3T&cWDmj{|(LgIRg^rZ#+a7$i5yE&2X5fQ)40VSDU^WETc zX8ocE#-ZlK=}MEaYGG^}nS!+Gq{^XjNY-Io-J=$Sw}%_WV#AYlqty9FB5(OtOpUBP z(sPsI#| zkAxiv3_+u??kS&>=YcX$NiXmmXg96$?O1nlGf1>AmUSG{Lg`jlClA%7m9G3NrOnt zz|WGzvf!SPb|A_{1a3QV^!q~*qM`z@5_kX zD}jqvxLYwHf{mlnkCP)!SiG^X53Dw8a!$sAw;CPA_8iiD#-tbd70e2oLHN5(HIN~w zbI+%}{yPLpntr4B&w`1NwYI^C07^fAZFQE&n9{*dDE*%^$i( zD`LQijlHdLYd+KJ-tzrLsTY48oKQ6Wf+?`by= z_9}Fv>hx!z#(mJ2+%bFe7b(?C_6`w_3j5kkejYCY$v;~50!ZwX#f%)Dxc<(B zHEG9+H{R%K-sIB(MMa#L-q^TU+i02=UeUsD9sLo?$O326P6`4Vc^OI4HnTgr+5RCw z3(B|nW_iU6#prEH{kEBV%{JVn{4usM3uBVL_8? zN~vDLUP7CO`}YJp!^So`L^sn5)@8`wP?kx*dww975@h-I;G1Fi6)WV!mq8+%rtx8& zez!u}F#cS2)`=S(=u{B9KOB#eX=czMS+tPnv9G@$H zn9oV~9qilzXO%gHxM>=~y-0+{)M|qhpQWn99Y5@S+s6tuiRf!-W}HUH40)U7u0tHv zOGZV~xQN@IQVgu}2)a{0P^3x`^WRb}dcP%o_FDtDw)i$PxR^u@5v$V@Aj`(mbOyo+ zR*4##WAo0@mKqwOBt1Qc(#SQ@a{t$F?S2)NWC^@G?>%_f(LCprUQ^F8urt|wT~Em1 ztU1e_VMoEhDYT(7yC@ERyQ1K+p!QIGi2KO(Ds|BVZEKc^Gg9|Wf!z>afa3of@O(ks z;W{fifT?@9TK5x2?rSEf>hiRrn=!c|V zvHTyfv?Ok_;@tqO1SL5}M$cv4noKp8d9!)e>)9(fJWoi-Mv0|m@q(Jfp$+h(F5+GC zeMevg^c&yFtTLMn2WE$5G_0vhyq_37q4)Ux zzRD%=-j&<7_i;>kUyzyv0&uJH=v)MZOKeKEdsQ%pdi(D9KN-%&spRMsNr{xZDQosq zkp^?W?l6c@)0b~wJ7Z)vBnI;n)Rg$LW#1*7SzbwX;pzsNdd|$+8`>Gm^>Jn^KI*BS zM~?*8^?$nG@Dd~ut3qA!&}xu{(c{tjg5rACSM#oj=XS_jXTj`?%fDw@MS?0jGO4fR zU1i=lC2v0R-GdojQo1lna(0Wkzg(9KZVh@z9pZ%T9Ho zvZ(CXnZ#F)`@H5{=k&=Ys0>KwT{Rz^&{ziKXq@IH_7vbf*-`_Zkg(I~(%?&MQ1ihy zBL}g#=V|N#F3U>Hyc@Dd^{}aq2e>SE+$*hpJHoAgpcnURih-NyKsdYX*i9>#ikQKC zuGBd_@IXCS3aLZcpFo(nGq_JH>Wzq*61wOnQmd4a-V}I1rKA9mHao&H16Q}Wy`}Wx z0+Ek1MfLY??44HG^O`Y#U7Wh;bw02JuYk>LvgtKm_&nj0^5y-oa~;Q7(B1v^`zA>F zWr_6uNg{vcKlEslfJ-x%gR2XkOdZ4E%=nn&wG&J$;*xhRRq!>UHa5O_$hj8J!WWJ7 z#Dau1u{CH$=*5-|h8P$u#h6gQ7`ldB2ma0Fl<>270m%g7PF<>2-*0DBQHOZ7q ztOEs}R85y(f%SSh&3e!l&8R0T9WnAaT)MzQFcoPXEJdBJU0B3iD5M=5nnv{PD4$pb zdeZJ+oBk2CKiK5~n=X%>YbL9gUWs#6ROp{~r7GRDClQ3M#1*%#)rB3O5R@$2Xo+X} zI>nw(c|M5Z@Zy9u!b0Jr#o5g+N7LD!S?iUBU;(CEXa+IWHX+~j{xQ7Mm)|}Y zeNR)7#`X2KNW3=IK*CuxLv45aiga^C0CVG*!lj+9=%HCee?6`4?FEu9ALb=r&nzqD zQ+g^^O6z{`C2%8tXLFTHj?H>I>MXl-FYCB_4~YLq)3`{8;OQO0Pnuy^`>|2Gioe_? z5S4^C;%!4-`m=d^VJ*5KSJt=bWasDUH-4M4aSb!v`b4IbW>LxaaH;T4_(841U9^?L zlwERAa*8e;m!fivDak^}I{~XUVXvzM?vm#V%j`J8A=86;(78@O@0guxPIg7QHs|0r zk$l%;B%HehqIIEVw2G`+@fv5jghu4ehwXQD%UC<047YzV@(wfb?w{Q0q0*9)TxKW= zcx}AF8N{Vh&Yd8S>H%EK)JTx>C_xFay4q4_)Ro1$YKN*leq(23aFW6G+p(N5Ip5dN(=OvlNly{JFieV8rnx`T47 z(h8W`K}Sm{E5-_;u2hE$b0Fut+(?aIQ4S3tL4irtW0jlh$myV!X|X?wv-`pN_|Mo| z-0wzM=blT5-8f5PeI%T~vinbaxhvK%q`QY-pX3jO1YbQrqa|jo71of=xUWP~D^WW4 z;54pSXHFs&64hW}L|fR?oyQ+@t}Bsb^_6p-eR+HwBUv~vkcftNF|WSnK6Tob(blCB zYCw+4x16}u_&p1sxLaGjKh!i85ixzU(iKLgs)dUC47YBbP%|%Qhe|`@9%XKFo^FYc z5D8w%cZCDZ(7e_Jzw?XFKF7V|esoaz^dD-fe15C?UfenGw&BI%u9wHIH+0grMSNEh zq1`+ag8fdr9<2H`Tu=pQ&)uu3 zf6KG)SS0Tu*r7q0)LQGrj{3!$R!SE-gP?CMt`n#h44!`o97l1L1AF^0m#N!1{l7tG z9tYLFx_ `q#ZQPLq{Z;yCB?cJl?5m{vt0Nbn6-Dm33=*Ii-Kc@6W=V*}5aF@rfZ|Knm#mK?AKLU`kRX;W_ZYAH{khle;uW8P% zEZfH%)6mBGbg`^o9`Vj3S%q)KXkaYX?BsWdDUi7gB~^l5h_p!k4hej{RUf&-u)pQd z6=23XUtrZi$`4LXSsNQo5QNJObq3_3T_1{*9nT#@*w?Id47|Wi8C8(MXrBdkxix-{Z*m@Y%hFl)sBH#SOCU z{Wq{9SIU-EzDHT~J+A87MuXk(BmG z_H6upJ&@vrDqsuTaGA+qn3#sk+ujzbroR&OiGWm6sNI>F)$A)s6ZMF*ZY|uEnjU-l z?Pof<1@xqO%hSdCfz-f@VD>4>3FGXhhOlH_TG79yjf4B|n80V^2Es_&3|UCFP*63B zlViC|x#PS~^4F?wstZX5w_Pki#ko0e-HUf4nP8*mn@U0n`HA-lZ#m6aEZVviUpzXS z_A!#CQSP4JZ%X;BG)i9IzQ%Fe-}A#EJE*cuHS)&|;P`xH8vEQ1kT<2SRkP2kX@A?r zP8Y>P53S(P@51E7+>{u_O|Btl)$JzCqEW2Kk=ADmCLjlg0SlqHVO|lJjg0 zhe>Bvd)-gVfpn65e@$^zYtb{Lu+2VJ#Jik0etnvwyM#|(POW?6c98q~Qe}J2f%{|s z*%3+OeO)2a9ln}DdJp4GS2_}f3*VXfz$-T0`6lIlJUeUW=R(*K$r`BbO>Z6JavJ}~ z@VFUmsX*F|1%GfGT>rV~i|toIvv6UGcvY6b;)^~~Pdve_b-+)~indAfeCK`B(Fx4k zOD%=dDaKPMr{|VE5aHxOEp5)K+!+5(CtuG@7rPm;1(IcW!1lNinj>Z}4 z@QTkD+TXdr4AUm;P30|&O&98h9!F30Y|59>i5M~N;k#W?zS-76CNs9%wRNSvepf!D z`*3!`n!0IrIZQ5HcWT2)rYyAi&zwM6#ttncYonxeX@04W?Ac?o9{Sgk;E!vKlzjeM z9bx}h)@J5r*B^mOo&%H{1$jq8OhSpfNt)~QjU=E*-rC<{Nn~PQf5&tZP=FE;ANws z^qKMF^Fexz<$1fdq-O)G4t{Z=8~13m6%VdYI3eoQ&$^pRDLrXqd4Er~VGk~E@9dmcR1^)y)qtxw zXj))OhhkVPR>pnlQdu+h`(Ld#=J|rCjae>`O;+!}Gp^_GH1|(ijTQfmZIBcyA1IFf zb9^7uS))EWMxRA*Kzeqmq-KVTKGJo{xyHcfPh{1u>bh6Q1nv(#W>fI};1amV+DK{% z0(llqE}eE#IF;8tzf4RRY}lSvMOSF?6rNlWuhfhG6dE72X@xL>EFCS!pG|V;J_4!n z=~Xrw;84S}e{E?pYso3aLxycS7`J6ZjGK522h($ literal 0 HcmV?d00001 diff --git a/python/samples/demos/copilot_studio_agent/src/chat.py b/python/samples/demos/copilot_studio_agent/src/chat.py new file mode 100644 index 000000000000..39c0cabd0739 --- /dev/null +++ b/python/samples/demos/copilot_studio_agent/src/chat.py @@ -0,0 +1,44 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import os + +import chainlit as cl +from direct_line_agent import DirectLineAgent +from dotenv import load_dotenv + +from semantic_kernel.contents.chat_history import ChatHistory + +load_dotenv(override=True) + +logging.basicConfig(level=logging.INFO) +logging.getLogger("direct_line_agent").setLevel(logging.DEBUG) +logger = logging.getLogger(__name__) + +agent = DirectLineAgent( + id="copilot_studio", + name="copilot_studio", + description="copilot_studio", + bot_secret=os.getenv("BOT_SECRET"), + bot_endpoint=os.getenv("BOT_ENDPOINT"), +) + + +@cl.on_chat_start +async def on_chat_start(): + cl.user_session.set("chat_history", ChatHistory()) + + +@cl.on_message +async def on_message(message: cl.Message): + chat_history: ChatHistory = cl.user_session.get("chat_history") + + chat_history.add_user_message(message.content) + + response = await agent.get_response(history=chat_history) + + cl.user_session.set("chat_history", chat_history) + + logger.info(f"Response: {response}") + + await cl.Message(content=response.content, author=agent.name).send() diff --git a/python/samples/demos/copilot_studio_agent/src/direct_line_agent.py b/python/samples/demos/copilot_studio_agent/src/direct_line_agent.py new file mode 100644 index 000000000000..718610492cea --- /dev/null +++ b/python/samples/demos/copilot_studio_agent/src/direct_line_agent.py @@ -0,0 +1,236 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +import sys +from collections.abc import AsyncIterable +from typing import Any + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover +import aiohttp + +from semantic_kernel.agents import Agent +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.exceptions.agent_exceptions import AgentInvokeException +from semantic_kernel.utils.telemetry.agent_diagnostics.decorators import ( + trace_agent_get_response, + trace_agent_invocation, +) + +logger = logging.getLogger(__name__) + + +class DirectLineAgent(Agent): + """ + An Agent subclass that connects to a DirectLine Bot from Microsoft Bot Framework. + Instead of directly supplying a secret and conversation ID, the agent queries a token_endpoint + to retrieve the token and then starts a conversation. + """ + + token_endpoint: str | None = None + bot_secret: str | None = None + bot_endpoint: str + conversation_id: str | None = None + directline_token: str | None = None + session: aiohttp.ClientSession = None + + async def _ensure_session(self) -> None: + """ + Lazily initialize the aiohttp ClientSession. + """ + if self.session is None: + self.session = aiohttp.ClientSession() + + async def _fetch_token_and_conversation(self) -> None: + """ + Retrieve the DirectLine token either by using the bot_secret or by querying the token_endpoint. + If bot_secret is provided, it posts to "https://directline.botframework.com/v3/directline/tokens/generate". + """ + await self._ensure_session() + try: + if self.bot_secret: + url = f"{self.bot_endpoint}/tokens/generate" + headers = {"Authorization": f"Bearer {self.bot_secret}"} + async with self.session.post(url, headers=headers) as resp: + if resp.status == 200: + data = await resp.json() + self.directline_token = data.get("token") + if not self.directline_token: + logger.error("Token generation response missing token: %s", data) + raise AgentInvokeException("No token received from token generation.") + else: + logger.error("Token generation endpoint error status: %s", resp.status) + raise AgentInvokeException("Failed to generate token using bot_secret.") + else: + async with self.session.get(self.token_endpoint) as resp: + if resp.status == 200: + data = await resp.json() + self.directline_token = data.get("token") + if not self.directline_token: + logger.error("Token endpoint returned no token: %s", data) + raise AgentInvokeException("No token received.") + else: + logger.error("Token endpoint error status: %s", resp.status) + raise AgentInvokeException("Failed to fetch token from token endpoint.") + except Exception as ex: + logger.exception("Exception fetching token: %s", ex) + raise AgentInvokeException("Exception occurred while fetching token.") from ex + + @trace_agent_get_response + @override + async def get_response( + self, + history: ChatHistory, + arguments: dict[str, Any] | None = None, + **kwargs: Any, + ) -> ChatMessageContent: + """ + Get a response from the DirectLine Bot. + """ + responses = [] + async for response in self.invoke(history, arguments, **kwargs): + responses.append(response) + + if not responses: + raise AgentInvokeException("No response from DirectLine Bot.") + + return responses[0] + + @trace_agent_invocation + @override + async def invoke( + self, + history: ChatHistory, + arguments: dict[str, Any] | None = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """ + Send the latest message from the chat history to the DirectLine Bot + and yield responses. This sends the payload after ensuring that: + 1. The token is fetched. + 2. A conversation is started. + 3. The activity payload is posted. + 4. Activities are polled until an event "DynamicPlanFinished" is received. + """ + payload = self._build_payload(history, arguments, **kwargs) + response_data = await self._send_message(payload) + if response_data is None or "activities" not in response_data: + raise AgentInvokeException(f"Invalid response from DirectLine Bot.\n{response_data}") + + logger.debug("DirectLine Bot response: %s", response_data) + + # NOTE DirectLine Activities have different formats + # than ChatMessageContent. We need to convert them and + # remove unsupported activities. + for activity in response_data["activities"]: + if activity.get("type") != "message" or activity.get("from", {}).get("role") == "user": + continue + role = activity.get("from", {}).get("role", "assistant") + if role == "bot": + role = "assistant" + message = ChatMessageContent( + role=role, + content=activity.get("text", ""), + name=activity.get("from", {}).get("name", self.name), + ) + yield message + + def _build_payload( + self, + history: ChatHistory, + arguments: dict[str, Any] | None = None, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Build the message payload for the DirectLine Bot. + Uses the latest message from the chat history. + """ + latest_message = history.messages[-1] if history.messages else None + text = latest_message.content if latest_message else "Hello" + payload = { + "type": "message", + "from": {"id": "user"}, + "text": text, + } + # Optionally include conversationId if available. + if self.conversation_id: + payload["conversationId"] = self.conversation_id + return payload + + async def _send_message(self, payload: dict[str, Any]) -> dict[str, Any] | None: + """ + 1. Ensure the token is fetched. + 2. Start a conversation by posting to the bot_endpoint /conversations endpoint (without a payload) + 3. Post the payload to /conversations/{conversationId}/activities + 4. Poll GET /conversations/{conversationId}/activities every 1s using a watermark + to fetch only the latest messages until an activity with type="event" + and name="DynamicPlanFinished" is found. + """ + await self._ensure_session() + if not self.directline_token: + await self._fetch_token_and_conversation() + + headers = { + "Authorization": f"Bearer {self.directline_token}", + "Content-Type": "application/json", + } + + # Step 2: Start a conversation if one hasn't already been started. + if not self.conversation_id: + start_conv_url = f"{self.bot_endpoint}/conversations" + async with self.session.post(start_conv_url, headers=headers) as resp: + if resp.status not in (200, 201): + logger.error("Failed to start conversation. Status: %s", resp.status) + raise AgentInvokeException("Failed to start conversation.") + conv_data = await resp.json() + self.conversation_id = conv_data.get("conversationId") + if not self.conversation_id: + raise AgentInvokeException("Conversation ID not found in start response.") + + # Step 3: Post the message payload. + activities_url = f"{self.bot_endpoint}/conversations/{self.conversation_id}/activities" + async with self.session.post(activities_url, json=payload, headers=headers) as resp: + if resp.status != 200: + logger.error("Failed to post activity. Status: %s", resp.status) + raise AgentInvokeException("Failed to post activity.") + _ = await resp.json() # Response from posting activity is ignored. + + # Step 4: Poll for new activities using watermark until DynamicPlanFinished event is found. + finished = False + collected_data = None + watermark = None + while not finished: + url = activities_url if watermark is None else f"{activities_url}?watermark={watermark}" + async with self.session.get(url, headers=headers) as resp: + if resp.status == 200: + data = await resp.json() + watermark = data.get("watermark", watermark) + activities = data.get("activities", []) + if any( + activity.get("type") == "event" and activity.get("name") == "DynamicPlanFinished" + for activity in activities + ): + collected_data = data + finished = True + break + else: + logger.error("Error polling activities. Status: %s", resp.status) + await asyncio.sleep(0.3) + + return collected_data + + async def close(self) -> None: + """ + Clean up the aiohttp session. + """ + await self.session.close() + + # NOTE not implemented yet, possibly use websockets + @trace_agent_invocation + @override + async def invoke_stream(self, *args, **kwargs): + return super().invoke_stream(*args, **kwargs) diff --git a/python/samples/demos/copilot_studio_agent/src/requirements.txt b/python/samples/demos/copilot_studio_agent/src/requirements.txt new file mode 100644 index 000000000000..5b17fe631d9d --- /dev/null +++ b/python/samples/demos/copilot_studio_agent/src/requirements.txt @@ -0,0 +1,4 @@ +chainlit>=2.0.1 +python-dotenv>=1.0.1 +aiohttp>=3.10.5 +semantic-kernel>=1.22.0 \ No newline at end of file diff --git a/python/samples/demos/copilot_studio_skill/README.md b/python/samples/demos/copilot_studio_skill/README.md new file mode 100644 index 000000000000..344179e1e4b1 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/README.md @@ -0,0 +1,99 @@ +# Extend Copilot Studio with Semantic Kernel + +This template demonstrates how to build a [Copilot Studio Skill](https://learn.microsoft.com/en-us/microsoft-copilot-studio/advanced-use-skills) that allows to extend agent capabilities with a custom API running in Azure with the help of the Semantic Kernel. + +![Copilot Studio using the Semantic Kernel skill within a topic](image.png) + +## Rationale + +[Microsoft Copilot Studio](https://learn.microsoft.com/en-us/microsoft-copilot-studio/fundamentals-what-is-copilot-studio) is a graphical, low-code tool for both creating an agent — including building automation with Power Automate — and extending a Microsoft 365 Copilot with your own enterprise data and scenarios. + +However, in some cases you may need to extend the default agent capabilities by leveraing a pro-code approach, where specific requirements apply. + +## Prerequisites + +- Azure Subscription +- Azure CLI +- Azure Developer CLI +- Python 3.12 or later +- A Microsoft 365 tenant with Copilot Studio enabled + +> [!NOTE] +> You don't need the Azure subscription to be on the same tenant as the Microsoft 365 tenant where Copilot Studio is enabled. +> +> However, you need to have the necessary permissions to register an application in the Azure Active Directory of the tenant where Copilot Studio is enabled. + +## Getting Started + +1. Clone this repository to your local machine. + +```bash +git clone https://github.com/microsoft/semantic-kernel +cd semantic-kernel/python/samples/demos/copilot_studio_skill +``` + +2. Create a App Registration in Azure Entra ID, with a client secret. + +```powershell +az login --tenant +$appId = az ad app create --display-name "SKCopilotSkill" --query appId -o tsv +$secret = az ad app credential reset --id $appId --append --query password -o tsv +``` + +4. Run `azd up` to deploy the Azure resources. + +```bash +azd auth login --tenant +azd up +``` + +> [!NOTE] +> When prompted, provide the `botAppId`, `botPassword` and `botTenantId` values from above. +> +> You will also need to input and existing Azure OpenAI resource name and its resource group. + +> [!TIP] +> Once the deployment is complete, you can find the URL of the deployed API in the `output` section of the Azure Developer CLI. Copy this URL. + +5. Ensure the App Registration `homeUrl` is set to the URL of the deployed API. This is required for the bot to be able to respond to requests from Copilot Studio. + +6. Register the bot in Copilot Studio as skill + + - Open the Copilot Studio in your Microsoft 365 tenant. + - Create a new agent or reuse an existing one. + - Go to "Settings" in the upper right corner of the agent page. + - Go to the "Skills" tab and click on "Add a skill". + - Now input as URL `API_URL/manifest` where `API_URL` is the URL of the deployed API. + - Click on "Next" to register the skill. + - Once the skill is registered, you can [start using it in your agent](https://learn.microsoft.com/en-us/microsoft-copilot-studio/advanced-use-skills). Edit a Topic or create a new one, and add the skill as a node to the topic flow. + +## Architecture + +The architecture features `Azure Bot Service` as the main entry point for the requests. The bot service is responsible for routing the requests to the appropriate backend service, which in this case is a custom API running in `Azure Container Apps` leveraging Semantic Kernel. + +Below is the updated markdown content with the new call included: + +```mermaid +flowchart LR + subgraph Clients + A[Copilot Studio] + end + + C[Azure Bot Service] + D["SK App
(Azure Container Apps)"] + + A -- "Initiates Request" --> C + C -- "Forwards Request" --> D + D -- "Processes & Returns Response" --> C + C -- "Routes Response" --> A + + %% Una tantum call to fetch manifest directly from SK App + A -- "Fetch Manifest" --> D +``` + +### Implementation + +Please refer to the original [Bot Framework documentation](https://learn.microsoft.com/en-us/azure/bot-service/skill-implement-skill?view=azure-bot-service-4.0&tabs=python) for more details on how to implement the bot service skill and the custom API. + +> [!NOTE] +> As of today, Bot Framework SDK offers only `aiohttp` support for Python. diff --git a/python/samples/demos/copilot_studio_skill/azure.yaml b/python/samples/demos/copilot_studio_skill/azure.yaml new file mode 100644 index 000000000000..f786ce314506 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/azure.yaml @@ -0,0 +1,11 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json + +name: azure-bot +services: + api: + project: src/api + host: containerapp + language: python + docker: + path: dockerfile + remoteBuild: true diff --git a/python/samples/demos/copilot_studio_skill/image.png b/python/samples/demos/copilot_studio_skill/image.png new file mode 100644 index 0000000000000000000000000000000000000000..9afed1fae832fd66097f62d3dead8260e9d7deb0 GIT binary patch literal 138093 zcmcG$bzD_z*DkzJ5s^|*LTV!lA}!qlA|L_^NT*15EIK3v0coU5x;qywB`uu`q`SMn zd!qY!_w#(`yzlS(edqk)-mJOinseUw7~>k(xW-t^=Z%yo*6jzkVK5k$xY!FB80pgq;9D6P`d+$SG0D}{ zs`RL7Sy{LF91i4S&?!7WB%%91uz(&W`c}4l?srn#3AY~pQ_NB9^%zqG%|`XElyOP# zOGBSKxD>K!uN$g1YyH4d<=(wRfw50HSsB+JFL2HU3I9x&Cf|SfsAXVySX*-4ur!MG zTJBSoMcz9tf=oiU&_7<}iu5)>e;PcTa9GVfIv6*z*zMxbu39hK9jm*%a8z|Up0`^o zY%OTUSNRnp!O(!OV(iwzP(?StW1%})ZC|ljd%D$#BaoPvCq=K9BI1Wn$f|#b0kQPZ zdg-SieQUsIvEFP08hKNah#!-~o_=ZUcZ2jeFH098^FEtB-WyZC?g}|O}=|mP^r<7`AtPP zXb;14l{35Cu55WD@n1U!d-v`g=WZv9#b(Xn`}8=QSD8qs-L8=Wt?Qd}k-gbPo%>u56rveNG)lwYc7n8`~N&VI*WY?^f3^beKd z7N^(w&>6{mw9~=3{mTO<>oG3n%IfO&qQE5)DUVH)PK26QcN|Z4HVkHHXlOpHW~U$>su)-OaX0}CT8)=O_NiJskP^v01kAO|OVVG_B26ZTFkA>c_C4SWf% zZr(v(u8&p^OSD?-uGk+^>CIE);RX9`79x zIE@>^a-=6%Pjt0orP`e?tpzEA!c@NZnHPJ%V+ICK6(0S@K^oH zVD@&O1mkp32SX#Y!^EZ)DCT;e=i7msP&-Avzwl)QfbfI40drLf(TB zZP-UY2E;As#uXN`TVqD~)153mO(3RJ!>&J-j1oAT#;I6Ni_sisHy&>RSTb{(9rB2* z`gkFRbF2P(Z=tqmsi*V7C>k2MQ^D95sW|TG`s+AN<7Rca;bKic0`Eh{c;rL1_fG5z z-9A=L)esN_*4zV|U-}`1|81aOtGX{Zs6fy5Ai?#5>tMo4S}`0fc~mc9x?(m!7;>s^ zo=r_}Qk#0P)9Zud0MyP8#&JqZOKFM&#Ab_J&QD5qi%Fff8kPn!@zm|6QF#w1td=G! zS+M!`eulxHK7Hz|o2EkZ%<;h3xbFPGkmc8;EkZ*dspU<|q}NSS%2h_ETyo0~pZ@kB zgt$S3p*k>Q0560rx|j?Vznn;{f4XrlL{81_&@!~*N%OjWrB%2F>lV1=-G7hj4MHd= zM0c-A&wQ7-Jfq94^4MSHHaq1SZc=4%L)5=CgZa826HVpW3`R-Em03_i0NGoa=@+iEmq@j_wnGsfacNGG$Du<5ctH(C6!su8C&cAeAeC02=6W(zFw@*%>G#)L|Q7khiDnr_+ zlzKAO9$h0gt7c{&H)iRKVnGADPJ9!GEGN$k`1LoHa#MXIqEz{zf0OTh+Mj8X#HFL3 z&wdCpzg1TcI63JoW>^Jb(|ulMJ}snbGpfV6R?uiZsuQ8r;EqbO+8Vde730kvp(Y|C zlF@NjC>k4h0_S;2e*v;=+%UrJ`c2K9z|cWIT%q@#Sid$Cy6Qy*F1?H{&grwl^W+r- z)xNiMk)Pq5@|`fGF(~JSp9kyIKb>;oTQ=goIGQE2XvC`6H!Gj9MLWmlb3Awv!*0T4 zUjO{;xW~2StAg4m$IVL78vQ2>Rcp__R8QaRJhM+z-ibWkeLZ14>tAs+<2NYtXOqh7 zoJBM8pRZ&{56m8v_e)!oJ_b(;``F(8s?-x5k4m$L@9@UNu9r#WeE5iQao51$*8Z?Y z`gztP1J%6R6T>g$0_r9$aki5_lmVRAL`f`eY#>${O22@Q{_>^GdlRGG)k19copGhGBMiX zi`{X|HfxGsNO^`%_a*8~JcGXP&IX1uo7clgtbe)j(hHGk)#1Vv3oNl*1WvRkjv5X# zBt{}x^KXL9Ba1&?P%w+*>X4r;pl+G|){};^@ljDzNc9oj;NW1zdD*ym$JB+F^UclV za1&5f3|^-!O&cw)yQN# z!Za{2@T=xtj+j`qCtT9*KYpmzyGmN`ogx3$*xjd-A>y(1bD{w_a-P z1YS1wu(*DAn2!{CkkBv9gXN0Y`u?WQK}AW)CtWlUYy}IGa|@=bMe|qK$9O)+nSLKU zW^ll1e}8zXN?2n*$?a%nx2Mp_{znw`3yjmR)mx3mYrBWG+ah0CZW)fV)4@uKSIm#c zL(SWWLz&>cx7QAr9FJTC8<~^n&hOO~&h$%DYbH1!G-ErnBy6C-TE5uhZ`NI+OuAlP zKRcb;%$R>;+;2oYVb?ENq1ANjb$lPwb(@{AHs}R(u=ZwsJxDxanL8)tW*L(9lMl;Q6(rO*DrT|Q5UY7KtN0k&EngZw-=)-4!l&!_4&o!9&s&b! zqEDQ9dQd^5dACO(5KvFx6WD-n*%pvdM}SJ1|Df>Bcr4UcU_xxQ*xB3;uqjCd!+K?a zh?Lv$_Rm*@KpoM|G{2jPphz3L5ZIg^s<_cF3e` zY-|uJnJ>n5V;%q^s=Zlr0MXBIo6e9`a!yT{%j=Vq)Dmiqmq>i1PKchE^WLD%68M)pq!RSiNpRFy0{K$RMg{rSH43=C^8LJ8Puysj1ViHu=I&a46Hx$}p|Mb6c|ZoA+1OG@OnfV{?@-<-aXk zF2#C@w4*d7WWF|axy4s|Iqmr60T*tvY4zRly`9Tx3bck>6p!1|1tjHSwwm|x3g^Cw zRP1_=*OYmjkbNd9muwzTx$t%_v+7PCOw%F$Qg=exS(ScUZ_`S*kB##KV&e(d$x`w* zpmwwIjvEa$>9N;f6&EL~76ANyK}UXm6YecD8UIoza|osRA3+pEby7ss7klXf6v1Me zdrQ3)0QQ)`p4U5HKicb;o(0%fzto%DAHK6=c?JlWTs}j1SVV+22)OvmRgKG6QEbJ% zFNsr~NX^er_C@R`GbA~X_X7D22!XXM)=P(H3L3E+fhz?*mI(h9EJ`L`+2wlNp|J(( zkJ%rTflW|7Bf(h1I**v?riw9kpE}>LtvEmM()KkUcq^KUd4%qIzN%rd^vyfT@o>Y? zCx~?@U&R}Nwc48`B4J$H9KKZBMm1tx+*L3yukuYKxC_^$olda;HmvH*wx%j&mpi?Z zop*3k)l)&k85Q^po1u}>b}I%${bGV^ybMlzBHzLD(}VRwr(;W}X>|n?6M8BqZWI`k zo!R34+nry51>g-5+coB+rN%&+vGbkg#e~^l(ThBG$AO`t_YL3oc10%b7fC074Fw8F zwM6N~#y?{*n)^Y_N285v)&QG*n0Keotv=Th3@8rOY3j8anMnaa)J*i zNNIQ>7e2;ut#5NZ5^fJHJu<-PI63hg3p=3Q2-nd$!`F^96LzW6L=PWeul9OvdTG;u zTgt_1G?deDIOXgu$uTNNZ%}ALOrYizJVp}DUvv7aCC2o-fAUMMfgizyT!!~f_BN99 z!tX;={Z&ZjZhDp^>?77be8f51O~1d7OZ@m751Bju{jS1VmzwhK0ypYJNAC^ADlPhh zwXgghuWXTTi$G@6T8y)80oeS7-DFI2eXMlEelZS07VYWppGKSyk^S*b>93>Qarlp5 z)bV!jUpbPDeh8H`2c_nfPwK{)c)fV&rqdBV&FFfJadB98IqgFozjgCGBqGMFwo?Bl zUfGx-Z8gX@8QHssKEEK)$t@duWx{Zq8@}R+!B~9b~Q;s zJIF^4bX{9VS)gE_;IgXDeSJ-I=MZ5!>0K9z_@igD4@uP{2m>Be`{MkCmGR3OcU2 zG1eXR5&soxa(qy=$@C-6;9_qnUAV_HWVLGjV0-`<?(YMRQT(#kj9?f0EnrVC*_ZIDLJU zK&|(M%3k9~`qOk(?+pa8hp+E#Ofp`=OF;aB#2!J#xaLfg58<;W`|YQ%W7x4MMFW}y z(MUbJIHq`0@ohnN3Zs!V2Z8s^_41-(HLvPtt+OM%G--Cis;3#0#DT@#+!FMu7GnnK zT74TtdQpkB9O zoF|8($$CBUlMZbx?X^c|R%?h32AW|uE8Ug+KCCfqoHc%Z+rKi+#CVrMHQ2&?N(cTK zUm+PS{!P;%)I)MU7&R{s*%ZB^JL3Xfi#?;Oo1XWkKS~VztpbiD5Wjv_Nt2(Q`CabD zU9j#_8{=v&MEYd*r@f!U;^3riC+EWD2zc-*v)^6v`LAp`pjyN9Z{(^=3UVFKg))Qk6jYjm zoQttmpxn-@T!;>{@=~*#{;o78LS!2cvp#)zth|%xdoIB{;J`efr?$1p2lxkPHfe3ir(}O_4u=1Y^B4au>KSkU#j8%MCn0spjaRQJO8EWZLLZ zVmNr4c*yLz$+}^Q@!;(k(=sA%>oreoQl2Ehw`moY(Q?I~@~jnHcQ{z{IVHD)sUhYz z{WF5ry*a`eH3ERDGF>)qCK0v22Uxy#o%5mOsY8Mf5^X&I!lU_)2<@T#ItPtUVJLsW zgPv#@7cHkjb>`UGcCRB9+j$v-*L8<~#D4b7_49~Xxy#Yv_@#VIX*J+6M6PlC zm;zmA=*cewnj|@jNDBlcVK}!f|m3GA>E}tSqGjY{yhcD$-6^_`=`UAFe zM!BBt*4A+yhxa3}QR$QP1srmIk{%#N*o;t|Qa7e>60#dd9+v)i4WzW2)UoF7W2#}0 z>`}4TCt)$=e0as1K{WZEZdBn!s5{V{?4jH6V+e&9jAlJ{FfBwao3J!uHS4`btB_N_ zqazV4_NYMf!l^TopHw#eb<1~Dj36;i4SyO&M$wouli-Z})~t8gKs8H84a(qVRxif^)jm1*KtO>^{XSaEh^XyipGba_ z2QHVVQ`xEYX8;JG(tpBww`-}&xiNp{09nguia6GtV0wSUC2rrST_;3*!}+(x`D2!* z%xib+E)zvWBrqooVUr0vg0D=r*d1phn(H)+>x|v732Yi>6L{Tn5hkC6ZHyytC#)lg z{qoeR92ReMl4t)LC0-AG>hXQma?H`mI> zO(oyA5L5UIcF5EBrWLW_mid>i4gnT~s)(>gSAJLa0w~@EfEuSeA<*1k9 z0=_2b7f#<>rp8lZ+zXlo-`?pwW}kZB5tgcbgC$yw(}ujH>TtDt2mWJDLsZUn`AzN9 zC|=ZZAU^N>>ZfP2nBwC_s|;0ki+$Xn_<5_Ow0vGOzk$9xn9t}n z4=OhC(-`x*TKI`U`gQSgiz<_HNgg*>9i7y2^_^jga#@$QK1RWludL2bW8Ni-dJir2 z=Xxmb==WD&!?|$z>%=rgqZMb|+Q6@iXGKqX;Z}F)Xct61m>VMMQp!>j5T69zNc|}%5;3aTEoAS{SFl*Ny>z*)pV&FhPIxD z3e+E3xztyu?ryh~n>nmsFDLn^PQ3p;MQKWNPC4Lq{C=l%$<;K zbn4iTAg^P9Y|YTky-KHQ-hgU%h_ton=33l(wC(u;WXyMw$NiX{aXiEr3q7sXTvi3v zqcUL;3W<`duLi0FN7Xe}M}#yywV zRW;L(fzyD_bHCNz!FJ4on#6wYZN*AEt;K3?`TH3?yh7vAXLRb-gb7k-_sS&6Fw*p(OGg%HifijsP4OWNz;{079F?1%Y-kv4bWGkV5 zCl-Ubh^4{YY1-SMEjA+!%Kt?2+e)RDpT-83a+Hd10s_%|vcC%83up&Ga|*~qB9er? z0~AXwQ5Sj=0Jji& zCf7;c93@$d@v^t_@?Jx^D%$;B4vC<1HX72BE;(*vDEn7dR@P5pq8k*td9^Es)h8>o zgkW#&=7L3i4`|)VosaH9f^q%%(@f01yCFC(iUs%3_%2Ww#{b%0lB4hdYgqz7V7LZ#l9!GL94@gYv}0gX!b*_ zeao_9trJ&X#muW%=kC#!r-gH^*wvLNOV~+VX)uA&Qj_Ys|7^QuBQm0`x3KK5j6?TTJBgLsh5k=8U|&7U%@f+JJu* zYlENqeFAmMgk=Xkc;{dYs{R2}A(Z~Y1I}1K4_S67E?pGbaefSrP9)`}rIwZ9534(x zWSnwFwCdh-4)zBZ#w6$G%?~uP$7DS)*=__Uh8%Eafc{W3fd)`wV>R3;Jz3R!&y9lI z9rB(Di7j8rV&Ya~>gk$IZ-+r+kjP>H&S{eGh1c}5A(B~p3u+^QnxP5mw*dV%(ROok z#ZYOvFBL??rHl3U_NuxIn#oy6;Cf&P)CB_M{#}ePKvEy@DH7Rqsr|7usW@NoV&bS} zdM;ENDt&Xt=DABmY?hp69TD}ot=Pp2hdhAG zY60qjwm|JTAld3TH*R;+V9q znF47&j7Xwwn9u3RTuwnjd#uEOCW09Bbs%Txm*kkvEuZ|pHQl%k%xUNRz~-gQ<;6L( zLEoc#WPt6YJ=*!{;bGa^GOlAA$iX0y5HRjMTB!P>DW{2=YxUR@-Z_uyFpe`m{Cc7o zYI9sS5&oJb^_j`;OtW7Nk$h2C)R2cIl0~<3YmEAJ=cD0V*cP_A{{O_&D3pZS=+^-us?BjAW`he%Th zB!hlk^TDB^nS{$TYDj4{!*2)Il{O2~-=1lvSqF4LypneUB@1vvbck0%ElLcA3K-Pm zWPaati^RMh;%mexZ&VpMOx=F|J|-a(3~C|*y~Kw`HqGw34QK)KO*-vLI-Tzi z!#-NfwrKg$mu+>=7wY#u1X{;E=*ndFb9X@@!fywp7?eYsnw@r9LT>9P&<#O@wSk!T zXuFl%0!S*q0O3E(I|Xgm`n#eeZlGcMexWm}V#b#iIC8~L7E7pNX!aw3>>CMqA|%X% z&7FmW7AVfZkbtAd2TL9(!bJ6uq!i5#0zo)b3tVu3STxk0AmWt053o#z|luG{`44uv)cevOGpBT zq(89wrLod%j^u~^+b_tHK}YjDR40HoHv6ozlD>QFy#%NqyDdrzyAJ8PDQ*zL&g$`c ziBr}!eNN76yl_5fY+2_JvRC42?`SWyIQ1`ZO*m(o$yj9q>DKP-OY1`cG{t9r1dLKd ziXVAy4I$u0d)BVgS%7wcS_@J#A*7lOu)4V)GX4%M4XPULuFC;29%TaMt5b-Um%ov;sNNFljAp|C{=RMM?Q4n@#^$>5!(1a5aEDN8=@uXXex?xi`t{A5 z>^pcGi*}myUuiL>JGu`(Jsx-dx?ULW#C3mvo0`OCL<=Y@7zpd-88wZx@$h8?0ll(k zpuh!Tg!)S7&m54IKwH;`GLRP+v_%>~)%FSGl}+YwvpI#E*;!dzVB|v!=-!;$O?b@q zJ98x$`BL2JBjCJwzOr{v{Cj|b4!}k@S~;eiJOC_F&lhG-CdjB%+2nHE$*W=nnf;E? zjz}5`(^ZqbZ+Xzf2*Tg(l?3(gC3TW7yFBW?Ei_Cqt(%2j9n-+BEqRMp_ba{wV`I2H zBr_+c3Dm?jqm;#ZJuTobh_`ojza4)E7Uix~tZVPVx5JhvpQ}t`vCU0Wu0YJgt!DwURl~?Il7sWYTHB2hGwzO&JhbKY`zT zvC^|GHAO1deD*d)f?~#K6X-3<-^`|JC+SF{*alK12tcH;n=B~4u|E8|{Aa(@g>P~X zLYrI#U-bebAv5`|=d{4>dco;ypbO}4$&Y}Vg3v%poy&Ovm{f3FlqCic41)in#Chwh zf_ek=Y#`33KVHWc%X}P~f6AmmV)=u@ZaFmqc1WA(?%>+4T>=E*Np8ex$khmg;ng&j zh&$%5AC)%HFi>YaA8~rT!_&O#omY@2av-hopr)`H`vB-X+CqmE)a@SwU1i|Qbrm&) zHPq>-Q6hqdi5P13WXyg&C11SZp?eA?KoFt$8m{MT(3lJ4C%{+rCvQIa@G|18%Xm>{ zWwe+YG|WDMUZdk;IXCc=LZI2CV5uI7=rV&X9E%y;oAG0O51goCyM=ggQ1(aJhCj#T zmHz?>x&a8{i>2j2eu3tjx_NhBV~TT=hsH(#YM$Q&96Z`mP4FjDA&ShX%^A4$eyrxo zthBAm6Qqf3zDhUx5E$elaaj5Wlm)1V0^Qt>rD2CJW@R=|;DAgUT9awbzC5Ty-f#VC z$jHjtF|>@&zc@d2T)dH;le1@f3gpWy^-|H=jYcQPQc#SVP{o0L-hLGI%61{qK?9%o z{{5~2sy?xTP2kU-%|#|E91>s#P3*1}7@=B#Kliyq>XZ{KlLE9NBg5nD$WMvybd16x z&@P!{K0xHVk?$)Eh`wwjy=|ZKR57l zR~$9OodsZ_kn3pAYoO73Nz089oe}@u1|Ui9|FO znT(fU|G5Aw5!f@xsSO^Ozo15ywcT0``rq9w=zW8f@~hanysduHe~uy2+&ufT7j{bv zaWzNw_l8(dcV^&-|N9>O56^xDTYs5PxqN5m!g z|IGt^P>;!uUU~iVtd#%3?2yZn%n#FFJw$&M!^#`|RZZS)hf+II>G z1@s;6CgTN~4PO2Q*oq$$5aVPMHC;AUB3+65X)90DxPAD_961ap%1}B2t0MaCb7a{S zX5D+QZ}54(kQzB3FA&I9|H-;mM@2Aj{thnW9_ox%f*k%v6(O;#7Fu@7&4uPYf0mj< zwkn4MGtAmAh>r$J4YamL@mtN3DQcR7)N4=j6K;)aXcMWaV~9B_2Etd=i;t`Y#~QhP zC|9tPl7{>S2g8c0lEtcKcGASg8XW{>C-0qDFQ>EG4)=4h7~eLm$|F-yU>7`?3I)wCf+kX#up5c95G<`g6uT2u^zQESm?Ujo8jJ~APQEHq)?lr zySi=rf}v{AL4lN5vh3~Ca;;0Y%EXQQtkCx_kSKBk4&QjK+BK`%in~g7=DAYV#Ha!p zQh6-lY?5$}PZ?@%5X=?+)nSnp=2N5+-Tj)${u$Qtj`H&{K90fr3jK0{@7fowKC#@^jxdY)|aZ#xbA6QCnl&UF!x0wfh4w-H(dm+cm z30QTkSQ#<`(-Nhk$-Iz_yFzK2miN2Cz*v*JF~^eB(i*oq7mxlbD5KJ zYLADxncr6-3H#=|f=X6-hiGDm`%w3LGq@@I1EBS-QLkL&IOMQMpXRrrqx*Iq8Jfdw z!-g|=>@ZfN!*D1E4pO-37NcL2Ey1+1TA%7zd2s200_Z)8M#hIn?WP9YEDbYKC)sJw z8G80tj6xF0Ry)rgG?!!akqwE{ByD_rYFHDjW^>QKBj!_>cB=$6GViZ>^*Hq@mSQ6)!XgBUVuGP!B^!@R48FoL1f;e%Q> z_WX`Txgvbu2lz~o-Cy|*LWt^-h=T&M)v&I?3QgO$_HIpp*) z6~BTnGr||-Ja{tpy^Ix;E7Y3hsDmkSii1o?&RGPe*lpSuJb^ouWJ$fkY228$r;uf` ze*@)JTntODAU3U}HT?Nuinw&Vnks^41N{lL1L-titIV?ULX6^?$i{1kg%Q<6A%Cwt zaRW;*F6d?J=0z?-%T>N6(O|??k|E9rvIw;#`4mqqW7MAlom-*rz>ka!Tmas8z& zWBRUQx^Z3$YBI69H7+jC*~X>U`S={FWaEX2r64ll9d$E!l#Gmhp~`5}K~m$01FtC6 zybevL@F@9ORY_Ss8XeJR_SQ7WyQM}lpU194)FenTsL>)P^XG`#Pe8Ti+L%&0tSUd*@W=3!QF^S-Nj7xti1PG zp|OQX)1hxe_aiuXK;&r06;(bU_(HbTD=juhcwiRV;gYn`lhw+7zA@!>K0N0+_v&Qh z2}_5J+R*N%Vf2b=yZ3!@?2gKmwqi0Sxj=aP!oo1ul_MPqrS(=NZtJ=?$L%Tbxv9oy z4-VfGtjS>6HNu=?0GLOe`WfMobt9Lfs3Rc7!XEF@I@X4&{}cqU1D~u%`%~2xJ1f!> z%T`w`;9paySp`kQr4Mav%hmoFpe~p=wz(X-w|_@y8(JD}${mh`#T%5(Zq*LG(6R`2 z`!*^w^Xx$G;7Gw(@q00d?nCNzJ~dZFo_SieQrOMAK1D_SHj)4NJNKd46~kAf&VQv3 z#)UOlhw8;}ePw^LnNIeXjwp-$QCJM|l^KbNe(&lvGBga4Z)TQ_P?X|bL+~m{ILfIH z&~Q+L`xtp8g55oBtVu2bo|>H;!xFsruQ)jja_g(_e^P) zy|sUZB|HP(Jm2MC;=TXJZ_y@BZ_#k37F-eoTVepf<)%1(sq2IaAN3~&4vLgKE_nE{ z(K2szZky|QWS{xRA2CKUZ^o`nNaSRZ{1{tJmUr<+D0OZSLr*iblVm?39|2h}uR=42 z7H1<-%J;60w}ddXNE#hm(GZ*FdC7oYWXCdqe6tf)hI3s2Ufgy3JCFsL z_El&d9{NFxE|aTg^ior4F#7m)c|nnAiybJNg;e|3%g%R#%LCqIbV`qiP@qTrSz4c zC!~lniWd>K@fqN3#yY}55#%>4b1J|DHs{P=KfOQmQ>3;BovNp_1#pld5OjV|=dUob z8wYw~-Zy0C27l~W^Fqev1;{Ey1~{vQQgtk)m@I{_$@rmM@;hz*M@S~4Ia!CKd288j zR|nBV@+v?$8)Ca-A7q%+^vy5K$~~&DN|ACm5MIk5|F?h*Za0O86=m z@Ab_~29|JC?CL%1Uh`FHk3j@6h+Z=o=3>da+B;RX$IIx_K{50FHAtk%usr}(;PHlL zSr9w;2dK`)&3Rz9iIE^agLw4!sD6OHyz^cSP^iB{u{u#oY>kP8{58XA6xd?Ep|}1Y~ z!}!ZSt3ux8Mcvc%${@vGPSR16dBDoyz6j+A2Sr}<}vRdF~(R~v&vEABn zE_yOisyzL#e*C;70(bqYy86g+P7A*e%iFuN=g>l8RMj72C7CTF6j<=^rvsEJ2Y%m| zbwMyk%tx4jX7R>uwD!i+$|3R4RvYr0fYRlNRV}6=X8!o9qsklVM>d{({5SU8)3h$x z0ki7?ZC_UzTd^n<{DFDpy*zOchX2J???7CY>=|k^6ow(TA%R?vfDs>_=L=_DS@6?; zI4K~j2EI>tK0FWv3e=~u~+Yrs#kt*}RMtR>S!b?qPKr%P@N(@5M-wEO>_!DcFScbGM znI69In?BOC91R?p6@0^}|KPwC!kIj7nwJBl5Jj<2(|sZC7=4u#d$@ce?GOG8DJA}kSY+WdX)#q%x zQ2t2f%fQ3s!E8kXnK6wF2c$g9OKOo<|A@rMe~&~sTk%y!46!vXb^FT;nIu517_Sz; znu4?fY?)&jWR*Tt040Db@jcJ@oendA^|S$MVr`s7ePOAa33t(pj^Ztf((jy*c{AP! z($rIN=}7kpx`Yi<>asF5QztAD=Eubc1Mm-?$5g-8`YufiLydXHN$4sF>o*2z~N~y#9X~h$V>|0`m)5S0LJ- zi(vrg7`%d049wqt^Ci&UUxeA(?VWE@Sfnpy^$02ugDLnL^v=`Vv+;GjG~54H9oeyocYHjk`x;&*^2|I8C{` z`l%unB4qo$B!K6%7F6li08p(ioW4RknmIvws(Zo&p1jnNuLj^UJ3q2BgBZ+Srtsyo za_d+LBN8|K0aG2(;^dE1Jz&|lyEA|h4RHZ?0IO1B7j3m3?O&KeJ5r&DWTO0!hy<9q zk#^_jzk>4dE3isiDJFY{Q0lJ*;N8g-nZwxi{u&+;=Wh1F>mVpAc%s7-_us?WQr9*z z`tmBO4VV#*Tyd@3^OA8bsZ&>u+gSimRSRIxeOW#nF%E+XbGt1lGeR}Up%-M)5k6kV z54Wjs{sbliS}K(gvo#86(4JA`zXYkV#UEb&uO-%IunhUlX$pVF2hA_Qa7s0IJDHm5_HW+opwU zQH8fPrb7&_87k(PAL4`tnR4}58XC5vb7=OGb z22@%^!N2Pbkh9>;sBkl;p~}Oc_v!Zn$p2Un4gRl`2!~SQ-+_2U_S{l({v&P_U^cN( zf#oL7pBBd1JaJ#%xyD4#u^Veel6!Dbm2J@B`Qb*`TF>Z9zz)!vXE&CV!A6bhh=O7| z?d2Hp#4!tzo=r3Tyrha)1f@@jH(d7{h(|auFd(R^f~IBChuCPQRoxiTMG%p9l!f>0 zT}c4{vfj1e4wQ7Cw`*z*R_<)~=LUrUto8#~_2d3cH{Rk8pwn<;wh{n%$sm(A5p3`$ldnR1*MxY6JKw&3uQI*l4di;xa5I8|-5B33;7TYlA8i)Q_m5WL^)z@~EJy2oxJoB`Rw5hB_7G#T|&*R5SeuPve0wz)AMV zLn-IU!N#xGOnsb78XlZ0wWCZ3$I7cp8Dca^EE#w*)`S6!R1xQI_|!AVR_C_e?$M9h zzKOc4YgrjK$wou2JNK&~wk~YYj~fduvbvJ$6YuYfhdR+wh)j`gpTK{p${xl>Qy+5E zpIT-0zIzRqxjnK$DJc5Wp%4)*V`FP=a5y1nt2$XCR@Hq~LAH0Y45n(N^5w8JUKJLQ zTOx2$fOk!A{TlycwN0$S|Ei$k%^Q#mhwQxPOX-Ng`a?YnU#k~HDY}p4 zWdrgZxzhBR_cgSX>EA+`P&6@FZdPbtW5Ma~YLMgiS~$r&Z$B@z_2$8Q?Hv8A5&$Kx z-${cM3i4Ioqam-q?LmETA{H_DL|F9)2S?;>z!c{K5!|A4CwG z|EadCm(iE|68hpdu+tn`{?qvguAJ|8X>*kU05;ZKVIw1EQ7ZR1fBLM4bA?S`!qh z1IbhL+iI_Oe0lmO2^0teH2hnwi0MP2GRqT(>X=5vwJT2#omZ6cz2|7lI+ia?g$ur& zweNESHop6%Czt|R_10^#UNtVr+W0UyiBi8N6LLzNdr%snxk>{Z^4Fd#DG||i?C|zG z(9oE*3nBKhAygT;H(;Z76?Q$>`}`kpknlS}(gjpk)_mhOa3Hk7>6s04Z%e)^2Wt&F zJ!J2`Swr?dVQDZPfB7)D9)WNeP@yl#t3Bd2ogFg7pzvHOG|}uDD}BP4zjd?87vKI6 z7wB@epiul1${hAU6M+;@K(3zH%IJ#OZ(v>M9U8ObI@x2>@FcY*ODqRusD%m*kO`*{ zKf9{ZuHD_8RnzXhhoA_ta(}En&aOG0p;m`tw1z*nKZ6Z*JhU>@it$1BM^j0sfG(x0 zT87W*wxQz)h)>^4sV_eoAEk6Ce*CY85KUf5*<)Jck0*GB8rq6i`FQy%{ZP-i!a9j1 z*VDpL%1*C!9pEowDcK#4t=}-_al?WW2R)&kL;*THxLW7dH2IgRoGFHQJ*`3`jd@PirOL(!92G+q8zKpgrX*K`Gr2gJ;_~}lxf09 zWDP&odbIUsO36VAV!eOeI4@1rqc3k)i0ao`CzWV-p12Y)5nnU1F1S$Y7DdwwSzhIE z6nr`i^S*Za;7QYQ^V-`=`4x0e?A|t# zh`7k1c)4;X#MdqWYr<4c4h7aWX&EH(9a7m9bgz&Sr5ocwc@KLS9EaCgW&&opPfi?i zH0sDobxa1{`QCOR4Y`9GK2IzE4Scck2_vKJVqF1EGqOI(rCggiYb#`va+Q>mA*)b@|K4sM03O0RapQic)=x0Kf-+lxpxET0y z^k-$4?^;*MB-j|+-U01$Y%uQ9IObbMV?;mo&sk~|{54OZw44*0h zWRDI~_?^=$zn}t{=ue5_T!IFd0glhugAwK4hT&nHiCwxeWiJJQQ|jIYZ1f3hbBxl$ zM2-{PkDm*4WpQsV=)ewO+w4^7Z`vtA;)RVC_cnnh=nKO01{h!P=Jt#Xa}YC9+o;Qx7moWI1IOb(i3!s4k$ISOtjMXNG zt6b)RVK83XfCMaX_i%&x1t|hH5CCh-ohRaEI4TMgJgAbK5vcN}rbKFLui@l=;{;l2 zCo)juq2K2x!;|QRPl8BaIWx|ekTV#mg_=^&&xDgJ^s7N%3psWiQ?`R|Brkjg^&^0-JT^9J5J5wbYDX!xZQgTA4~knBZ%%FGNI8 z+uPeUkL|YA%*O35(~pl#FJ#Ut%!#0>Mp%K>VZg{YN6|HexG2d;Hcz{gYQ)`f`}{<;G4k{_jiQWK(FU6xf&92W`Wv+4Dh3UB; zLoKkNf*Voj>+V`r#+iAz zFiNpMV*gA|ieQyc*e(Te!x;il&u8N?5ZH#h6$(CB#i45;#s z4mmjApLT6cSJpKiAh}ZpLK(~ZTjI7FxX`A9OgNfT%_P6GT%JGHsZmT#H5eYeZ;MF$ z#+O6D9(U-myl(1{o^e+?7m5tz(~4}rQ(uZoNsDD3KsWwGM|A7meYV8znsJgbwq=|5 z%8oL~on?7;&`iRK+FEYt8y!uuv}52Yohf<(XN2lP0-;-OJUiOTVxgcJ?{rm@792&P z%a>Z;KX6{-Bk6Ty;oUoRGvuOmb=sGx=F#To=fBk4)7Mz$RiZveKdztphY|*Iqs*-^ zG^YoKQ0IfDibqHpDISHcW4Yc*Qy1SFM#_@mE&V4=se6iXMDf1fi3K)$FYt09MrJu6 z#map-UjBQf33ZUz2xQ-sIdRe(p;qqb__#LLy!n~2>vEMn&>c%s<^o51sQy+b z{2&3(*e}SVf(B`Qngt6I!&bT&txORUPRFitAShD<9C%B%yQ&|Y6`0eIaXKVEgDniO z98b)bn)5dn^p=UROt=ofocPKgOm*jXSJcLLk{M~Xp{gpK{mA-3g7@yd$zRwzmQZ;>l=ysS!D*wJIWd!5oV z2sbq>(7l_hL9;T!_eJF#IdOdmTm4=$dm3X|;Rk=jO)zYXy2!Z{|M+n;bysFXc-)v~ z0zJnoZuOO2k0^|S#l}s|wemW`d*})qDc4=hP z7ujTZ**lkqpf$}AI~#I3>gemrVuoI}GQggy7D0Yq4b!VRBD%S^2&9>a-yfj5a$LK1Kmt&N>Bz*_Vy{V*8l+AH*54+aA4#bdp?YIMtbSv;BUO8N$ zTQuVBu*1+WRRWA<6B>yNJ9}UM>?w&vX9*0-1KLJs@2}Gv!tLhgE;<>;TGPI^cy`)P z6DDp^SiA`E#yVa4-Q8!Cq3F)MCDvR_8}}#BH(rGo77iV z54S!;EnAtuzl>8zCb1-5;XdTQV;eZD#b>pkd>AE8zUFPA1bO&}x2D2x4o{w+P^A#C z#@$+XPs-yciC$^T_1`q~O)X2W97pdXi&_i{mkwlzBYF`CUW;c7<0SS()Ujx}6kS*+DAOE0A=6Yta|1grnj7*AWowXVaM}c3h>Pr#hSLaqr zC@8GW<4@(&8YkSH(lB4yO?a=S#>anRPa)-c!GgKgi>fU(ModrJ{&m%YK{i64kk#r< zVBK9iue1;1r}b(Iz<7mHw(o@$BSgloS6Y$^BFArzmIbN*w0>BE;d}Eyrb*sGa8W%0 zqGGo1*~^^~ab1I9MsLR8>OXG8#v$i^jQ%DEXqL9b)sNez`Rg7H%EsOi5)%)pi_&a) zEa0@0?cc6jzj>-yp)GWcdQ;VF*ehbTbb6lCp!L}VKfl|oZNbm1@5k*QC?I+SO*Ha? z8RaAJC3s(GWDtB2r4@YN@-z5xo;zqH<{usILf^~;v&hfkr4BMiV63~I3tM?|yP)CJ zbl`3%0fU4A`9LvoJg=7Y>)6s;t$e%aQjRW3JTUeVQkX;yS^amM-H4>km!$n;l+v_S zJmeLp8CTqshxuEBl2m`@T?hiG!SW)X&D$l(uVzmCnmpS{)Uw3RwnqULI%)=a` z!jA;A4ACq6cock!+&n6B*gZPw@~Zf$8>#;fVecJ}_5S~l6B%U`Az3MrLR$7H${wMz zcO}_-WfUb16p>xXNLKb`lTbo-xX9`{pSXMaRn#4zBIl;|?Dq)cnKu5pd6Rk_CNOT5PPWo2b|(NnS%#$Ccb7DIfq zsTjArI-=wPUOW%Ji;MW$)rtLjy!9=|(?2i$^>;rF4(`^hM|t?N*HzfS@O15iPhaR~ z@NW~i4b&`;0i`wq7+URb5Yyh?X*`#2{gaM?f#Kxo(@sVKJb#L4cLI4T;)4>h#lBc7 zh;91_IuSR!n;s)u9)F5^OzT>)Cu(Tby7miWQN_dk%l$Okv!$}XM+&NU#{|23=y^YN zwD_{pbUjpnRqZRpnk%^$nbr~xfLPPD<3C5G*WI+9^gcT%+1VFIF7D%rbJqO2L?@GYMUq{w$8>}y4SOvR=lkZh^QMbPPB-^q z-)&As9FbObjp%^X3O2hZRKXJvGeKFQ1?AUlW8f8TOR|Uu~{qr${DZ zLfHqMEn>NfQj6bgo+;xg5~{G$w9W4o(9iC05hzcC#n2LwxGIo0pUN$y%x8r?V_ zHFP-%5m3uPU&a$Vqt0r>A&2DFVwq9@up!_XU*{kJaxFpPTsE4wbOm&vXVirULwXz7q7G zN9_HUYM}zL`7@iWy1o*tt2_w+d~jg}kFQSIk}chL7#Ct>%qElC?)<_}{@PU5P?rBC zk?oct^Or3%^FQ}~h1!g4kZJ>D9wgt=zZ3Z(n&|fCT94T?{?C-^PZgCIJDV6KhK`a{wwoCIKw-`NkREtgd!L z`)#IyF#UEz)$UtK4GBx?mPv5#LM9Mp0%X={vnWq7yOFrOxE+8@*pxv}`sV zR?aO>L|0b!y+IUpL_c`AcRwhi?I+SHb+!50eOMTD%uwnCvQA95kni^W*&QTrhSWc_ z)I|4!kdp4#Pe4TSm^!HW)p>S+E$`0v8XzIImX|sS0*x!tY zbm^hwA@-w~PfmT^mpf!EvH$T~_@vrW0eQ1qY-MBup6yyziq*i2aQ&oW{mA7831vM_ zN#BX4O%w)M{=4A%HnGUnVz6TCBo&9s4oznqDT$w!TY>$c>FtxJNu<`_4`HM?>*S_> zSCIrp4voe=TWQQh{Ay5D`2%d;Aq4avZ47hR*;*L^BF{n>Gb8~JJw!x16HvKu2f3ZO z@uU#XC6HP&0}Y-kIJXDDjXAKiv;;Wunx0&s3UslooH!*x5J6gypm(?Nl$jRR(7atetVi4^VQ%Z-~bh zFGMJIhi7HB>s7-$q zSZT9@ULg$C0&nZ~fS}|Y@TnM(ydRRWLDUi7_4b5oMn-skZ9^9ZL}ft=sfQ)p1(7h) zJD_Ah!S`q#N5T2>c>M_{+pY?_GjX3dpec4MUJAvwrdoO3pKHIb61&9W+LK0NEA1?N z-*JG`eQ4A>M|Y#kGLkm-rY-*d+ksa0uKm#v!N_QDLK$0&xnZx(v>$&*i0`Rv|92Qf zILk~uOngv&AgwPHpUyvQ#4-9EI(kPT!dUUR(&}P=+fU|&u~y9V;?r-59p%@jObrc9 zsx^C^e{B_=DsSWFVn?A-6iFt*uYt=+mg*N>z z-G=T!-9wTWixcfU@6{-HSubAv*81s&GKgFNy|>AGr1e_l2)8xvqA#F!5quU_7ou@t zBEwSG3ewQd6~M8#GgqHDhc)isEy>E7 z=C}hslj++WH7QEgxTDKRgbOp_QO!!J^sM-qL_YUjf<^eb@=;R~N|s_DgFOj1fA(Os zws(xo7m%tYQoRCghB=^#Frl}Lxcuh9qGXwR1v0>HLh`WpL4{)#IFbat2E@}tvX~$d zC^Xr5P-b6{r-G7$sF6^9Dkgp4SFYOHFQbu9hb5O}w=euhOt@TbKhbi(1KiXFt(2lRe)m~;&PqbO-g$VK|uuH{thfU02Fas|;+pXx9Dt_PEw$PIyKgwE_EWlc#r(@-$h8*k4u0UMslUOm9K@kHdWR$1DskdkhG z)Oi3JJFG|qv@6T-84a7*FHmBV6sTAo4mS?s3OE!WeFEO>?KYL1`<3fLtO=@kwo2?j z!?L3@rg5QP-6XI(laE0_)rP-_eH>U15E0d0v&B8}{>|+>eLc zH9zF&x8+Vw&M2(c720soHTs*(bl#Y+390;9JU&CpN8~GgDwUc2OMZE05ET;EJ+;20wUv#EyA&(PYs~dZPrq2j?qTUt1pQxhHE`ix&*L z0H7|$fd2kMdIhfO23Sop5{Bw0mZn0_(W`_;j=UH{tMm3at~e^;CcRCu5k zpRxo%?s@lPMOOl$HG>}{#Mm?f532fbm4kwX!8I`ITxyJUQBnU6%sj59?O-OMZwnoA ze?zWrd9K8h!LiKtF2cZxmwq!_Oy04WfBtN@SsoMG3wx#A3MukfVcY(D2@2S$g;v7K z(~HN#sV}wfXTNH|BCJzbsZ@ev4Z}VmRNm84-&%F7n(<)JV@z{<`*k;4DH!RrC5^XG zcIgmSLFfq0VKf?u;D%hPnRKIfHk|w7ew>(rhLkskzHM(o(G9|zLGT`UGc+)LQ=o^} zVnEizuzi;fM`}fdm>!hMF5k~VQ9#Eq1|8uUl3&{rr5gfQC4Ih}yX|yQ;P4djpmWqD z7fnN-4ckA{MkSaw>O0Y#>9kp5HMlHU6VCQ?bVv(x{O_<zCF2Ci(r~_X9O-HQ+q#pdxSAW<0aCEC{-2u{`b4vy zEr}4PecWidsZoj$8IDKy4H5oqQhl4d(cul>tq*;T9k3P&jD&wX$Q5uP@v`Ij|}Pj-({oM@ierS{yQj*R#1Om3wtBF z^X^q&_m%Sa0_c;#K;hzhZ?7f@aI%39r6c0T=!?7WWllPWNQy^a&DrU*LND9+PJQF_vs>pSVU#eCBfnI8%*vV~*mpCcs5 zd(}zN>Ra4z-d6CzgQ_{otvKV_$I8o4Lz-%Biy#hplL2XmU8P$_Vjnh+1ohLg{R3Mg z|3KA@p8EK(Z_a6v>BLQq-(~A6(t+Btzf)nlOUfJ^h%dPoo^t4HNBLz^VSqihOI}s^ zUo$1h!)T_IBC1#~`R(P8aw%H0b^QEy>v(|X&**~W?YWv1CqV@}H##Ll92zYtpZw2q zB{dabNsBJrJg^LSML0TyCeVrgB3ivK?k_fgOEnJNlnqQ^FBv-{Y12?V-s1(UdUxQgY79!i& z2>U`nl!Oqssa}>;<9%cB2=Jj?a<}?)m04X?w;^QHOBqR$wmgO|v#PgX3Vl+VpOOI& zLhKL>wF521qr+J}_79pMBu1%GY?^ zr_b?A9(TKwy?A~P7jmF&#}9Zbt)enwZ3G~-u1pDR0*l%D<1#Goc9#gfZ%MM3p$IxF zp>mbHh4ME&y7B=h*m0!v9m_cgF=Q|$6>3X0QRs6__UPXNri)}?BYDESr#zoM6*dTP zF($4}QSW|b?t;n>um|}O+VqHjMMKjyby1$4-hxYJ-fbsr7y}8S%hUi>_vt5v{C=>_i(^fKL0|m zXtkINA0E*u=_00ar02_3fJLHid?a#Z@-JFr_lf`t2(3Ll;h`UTO1#u(EC^H)4Si%! z_?%2LsNYkU9@hf^A{@tIoJiV2vQtgo4S zydo00)UQq&CCLAVOEGF~yWuPzlfm2lHh-!tXM&aL^T9S#XBY=1liauqH-e+69jp6& z`kfZuUl1*am#$ChYo-VNxmLb{~z~ZHLVZdaq2SW>UHUtoS}cQfy;$}KDg$} zGl&jf?Hs3M6d`#K4^^Mu>ncBDW|O9Tr42WER0pTV)^P*QoGUF$_l^|Lo3iV)6Ayo^ zaetsp^bNxuKW5(V`5hyjkq!s9ZvEZ zHqr89%#OqWy5OZM?7h|te@;S&(?gHJm$W$72r($wPc=K_G-#<2CpqXoUxIAy=<;Aj z!aq5m+}V!hR-X)nC$wKa(xXgYz@&+l+z-)cl@m^(tsiNYRD=W}9B}~v;ilA24a%D^-28&D0ZK8O zfJOgXS~+HJT*919?u!nhy!;O!bzmO>1L6~3&->^tT!EG`!j^v@DXlg8^wSzd(2-I2 z93aN?CK|S?)#Jp2x1uJK4_zC)0$dWNubs{r_>#P2tZU2ISP7ohcx@T5sL@i;0c zxvGABpaN&GFuwmcm}-Q|)!k%KqIJD2@nkX9q!BIno;tDe&4+6>1URD|3gqC*cb>gM z;|7I>BmYDfo{2OIjcLNzEne}t;sO|xEABl(v?wN@DFiXEqrfZL8`Dh5nVj2j!RoY@zR#;Akn<3e8K0y~ znBMY|KYSZs`9EZg>x5k`0YjKO1Zl?d%D$33rZ;pI6S*gD;z;wb#5oASqL;g;&@ej- zI`h5nK7M4qsmlz?XL0~p_(()zdIDmfhbINxmH%NFpu0l()O2M@qInmd2*1)ULQ*vJ z>I~XZ5H5@*t98M<{F8rTIFvt9yHM)KE0xLlZ&cHqrB@NiGp!}-$kKbDW{648fJw?_ z1uiNn88@-eF;$^=HUl&V;u zl5AADW^*Th^Rh%BNbP^T)v^jmCjoTp@ENkq2FsC4#;~J@hYB z=dp_ws+EJ-ls_=NyL#Hw8#I(H=P79s^Zu;fFp%z z(vV&*|D%aH{cl;k+q(dB!n_ue$!L%azTxa+ES>RJzWq6TxC~Lz6BlM|*Q*QT0-tih zcKPL%V+Y4YZtFH@c#9so4N|G+RC^|ix)caTvUQ-2(B3<|Pd z(Oz-k*fGa|Qvpjx(YTy=&HUQxL{D}&rwSB1U4&JiEJI~==}rC8;`$5oK( zu4%}A{vG6THnf>~eXftpq%X(DVvbiUda}@_R+nG4G{(n4V8quOE}Qj3_M}@&3hm9` zZ@+YHQFW>7+*`PysH5a{X{GRTA@;u>@IhSiNLkIssJ8YdZe@f&`4px|0%AV#yv%}d>&NQ8h>>m;O$o9kO0sGAr?Uu@nXCaupDC%d!b5pjH ze+OjL(+*eS&L=u$tIH6&3w&JYcm4!h=zmm~LW4pPl;|Q<|Fpm7CVIn9+|y>#$q5&V zV-kpyYWf|a?iP^@X(jX(7hF+gqe`DzF_gFH#3Qbsk@3i5ROG=3C$LsxQieXSj@`a( zZ&W9)V=wiFr|$X8K$zHHY9lru>!fbk-xXV{?4mD89yy67p(;AfGv|PA2V^byp0I}! zmycb+K-%CRU=;0$;6J zno~pVHT!S&1I0BfJCg_S2;s*ZDVm}kpGN#op#yls7oTQ00V0p^hEV4yE*o8I*{1I* z-%vs6cGRFBGQcM*IuDRX6n!(!D#a#88}%swcFF%$ULJQ|${p?AN_NU^Qln@j>5uB! zdsQLowrB#QM?%AuQB>tNaJDMqR^R-_lW&6f+QZk-Re~ASTgff z6Xplyg0}|uxirh`tgNHw5Eo@ClEJ*5-V39^X~4Yw@cJFlcsr<9mM=n`Ho=x7rC@4u z!uFA~#ldc_3p0!qoIW|!%0V>(Cv6F7l#&Q)nj;*2-pS(>e^FqQ^Zk-;5siQEr*h5L z`^{(mD>46PF)CpTtxUk-Ckr$NmwNc?)YnC;`xpDK)Bb|r>#0b+hQ;N!o%0@iOk+Z% zF4?sGR4|6%X(KQLu3|l9t@I?#GZ6ahTc$?;RPwYyKzQB-}04@nxHT+>PbZY zd9Qe0ga18z4n(WAztQP8W|)g@X%3hk70 zR@8Ah6!mmP)ggTo_>Xs%39a_VG4i6VVgrH3+GMAT_$4rX;Z^)wpoo%+O|Wvbx2?5G zHvY4mX9*sBs!ksMZMg;DEh>O_Efvk+v0w?cfoo1Ks5apN@N`+@xKMC{j zqOiP@vGz;@iNpZ)7XG+6(Nlpq$JT~vbk|#Qbfn7dDRnrpx|^P!bTXf+UJgIc(T^s( z-JHtb%p*X((^45JhO7y8$`)GfiVV*SB*{x>8jpcD zvU)eZ9hO|ToURNB@K?k0!N*_|R74ku#0&)E38Mpo*P*v70r?|;y z92_OR^E~>*#d|@Y{ni$6BM%o^YMspMKb)WiyOJj9ogk2;{M9oOY_;gk`LzBTI3GEH zp~VPCh~#6C!AraSiWnD$+aoY3MCv>%1g+6X4E}k@=<*@q?!7qQA<2@0KZd}b)z%cd zIznAvJ}pqr+Iw8(TWX-XTbGhvX%>EPs68(Q!TA=ZKLOWaf1ZeUnm*tAEGGh7_^bCv zfD-yG^iT}l6bd=n>3t0s_3FSY;^4)c{Yrl4itRdBjw?rj=fdLDAO|R%9VMI1qXn(* zALU+dtl%$({Gf6{vHSy5wJS)kZLLUG!7?K;LV*fNOoCG5<0Y1^j1wW~HwTA-YKMC2 zt6;9v;W&C_>F^N|dpFoc2IW=XAHF|7O;~UpZYe(LdpymnZ~P97?g^kZ5-&@$$wwU+ zwzhwwV7{v%Hn7iB;#N(nd`xpS*X|g~`7n$M0~}ZER@3{Ux>_`gu*}L8{f!qQrEW|b zeORI}@cuXmJC`R5iR5 zHUY<%9^Xpu@@%E9&Q8;&Hx@aA6&ohM{I(A}Y%LW}w<+5%YRD&IFh_{yK$-DZ0DZ80 zCDyf$l7}?}T;52xT>BVC>V021vXQfikdmyg)GF4heO|O|C#u@;D#E&+ZkC$joAm@W zFnfFnj_~J7xZ8pDoNQV~21$Rm^niD_VUJVDW%NHd?2TNXEwNeG350BFmRw<_8Xu%Z zmjY=vg54exz9anX6BnDCPV^Nz)Xx;9=2ODX&3&Sd$zk+hVA7y%sacC8*$}8db=bnL z7yI;li-_b{Ml`1HCg^`Bg^1C08+uX4yASW=;Mo1qulwsglW~Ae-(l%(`q<6DA)Q>a zz3jz1J~P@97GQg`zAiSMBWrv#J!I!kkGcEY1M;#dw)4N4?Mmf?%ZTOKJV@C+$1hW{ zmkG;-_~?060+5)hR!xF9(%}Btu5355k>nZl)UZv|VwWwn838rOrS<8C<23c3gXlFt z>OUh3Lzf!oc?vcK#6<3EW08VyeE}jBw9peJ0~9dO%!^Yy)4!Ar-S4RV4Le zgq1vQZ|F$km-3N2B6cG9NdrTe)blBEpnQNOc*<26i)AUGg6PK|r`$%e%|!J9EeYM!FG^4FfSxnbqI zeOYXBFOCF(lZpG(i;Z6-n`?>B=GIy4%xWJ~7jK@4BqoaY9lC1Z*m%O-c3P)wxz`HZ zZ9+D;X3+uKuTNNjX~mbf6zD)wiV2Fyb|0@MFMps^>>q$koX9zz*QTe& zr<4x@e0^E^7m-Sw*xrTT-%0wbKX^dUnX3%}9*u6B@m|0y$s)ThWGwav9Tu26)qNMn zK7qX|a>;umS-KDYLs@Tc$qEqi<)Wj!ztkB?d3hWoZQXsZATR{IbP0UU-?dv^Rt?7Mzim`@p%&*7d4)GYUj=f!^l zcblC65}LygQC^I7YQB2o2E(1^O=bv$^>-vaf3;K6gXIB|wM9ytRh;A>v>$+yqk-qd z?H-qIY%8P~&<*B5R&hL#);5h|{TeC8?-_=g7+Xqh`xwUXXi3(w^!TQhN~<7M@vBr(nwb;(d@Q`02ilN&EuTK2lQ_7Vy zMAEUElRk#*TJI!oeb{*0eEtO5nZP*I`vMCJlq-M8lT!9C!xQ&!kz zqvy4=WAoz}pCJY*zMff}En-=pDLlRHUcIIR0b_D1E_;~xQju-*QF0<=O{W~e_ou?F zr2Z}TtZ9uq7YL&p7`{N(4v?bInwSV3C~V5-1rmagS|t3a_#)f(BSBa8EG@Z=ca)q* z8j6a8Dl%Y{BDFn!+#0fL&qw)?@F^lcn1mqJ#}MB#qCgJUlWO8ihh{T8d(%0r-`DK?npx5d;d!Hgg1bNHzDM71Hx4BAi{RH!wCOAK3)!%NcUB8Zj}Jqn*A9c# z-RhmTqn4lSslP@n=86wmH81gIB?zNw`{o>*XI+ zYHbd`ZMnQoO)NFxu@>(P!|?t`GbPh)agD$~Q(RqyHBipMKuvi)$-Hl4H2_`t&v601 zd*(uS@?rr@f&=aQLIPEAEm4wM2uQAaE|#7zuh!0Ri;Fwm=q|`kOQP@E9iTnAV0{Fc zXSw=fw}F4<=Gd7Yfav9d=KU}v=~}SudJdwb^5yQAEq{JGfh0o*8MlAOwcG>!R-Nd@ zi(kMA>d@(%AFGfgav}H?dhQIS=?gng_Z;Mj7XdT26`!x??zXanjSJyIfS78$r>FDR z^&+f~xKQui;~>s{B+n;qto*G`=^g3_KAWBt73-cH;;Amt^O?VF%>ixIn)vAaTen}K zB)Rv-lSQX9y`Vr*39>;tO|IVg|T+XEoZ==+ecXc!@B-gC&DZFkZz^*FX zN99G!{@k~6LG9}jtM4Wy1eMRh;xkXuwNjCh;MIhg4TPfu=MsLwi5Nwla?cxIRWOjO zO}5ruJf<>Io-YIyz~7*~U-Z<|`h&QGucp(J zyU#;?0`J_GC$)g5GgG4E_4ngS0gEOR)6>vSdI|=rcc<`alwzv9spIljVS|%Oi`uui zt2k(EZDnQwk>lE?In3p@xAc8}gS}c#6HTSdSXjiUCnqF7YGeb0g8g4;)*sLZHGy=1 zj3eeC`3@WJtTRDc$N>}PU-|87$}Ghvxn5`LCmHW-!+|ThPydiVYtwsOV!mMz;ln(p z7C8iopzJK($SR$K7&DAKZMD>;COZ4v#{G!F?>NqF`vv7leG;g80{-a>_^!PORFyRhm_#xJTpa=1Hp>E&$g6}-Fs2v`pv=`zwNC~FiO6z zeFo^TmtaPWkS9#gi^foPy;qLRw<#S77Qc4UD#C5ey6F4-pGkPZRA6uO=A6elbd>?e zqHVZW-=>5C90dYjpVq28x7rKGgjB@=kc9l1&EUflS^Xue*>!NU-_C2MfC;%ZgL!ij z*mN>mG#5Zb^}?6%R772%%&uG(L%(Wt^brYRSH7++-4VkUfFjNT%r41yhZtdl0b%Xm zZR|x2QCK!4c`8m6T#533d>Q}PByxPBhnHhpJi$-cP@BoaNI_4t|GN65tZ+LPOD1dB z+pd+#RK-R;lU1@qFWhMu`9krYx|%-1_W+2-v{y>z}(9fc|Rb79|$hx$yTTMQez!YJ*F0?M1HZ&(z6oIkaj^M z`7#80nEHz2i;rh@n3Tbmn#K%j6Jq zRgz3Evj_WL(^T-F1Ecg{X~R`{h+?h-Dmf~BLU&t_PM3WX0O$xHICSk|I0^?mtv0t; z>VJW2LrnvVoXL!y#ZlgQq*InjIuY2+ib9n z=hDO5x`#m~8w#U9sKdNget>b$Tw^_lc}w|nhtH$P3%~B~iMy@QO*uX=2-8p)b=LD> znc4o21JPdmomwY%*GMCpnS&`~?X{`mr%z-WYOl=BP3X3_72vMJGD-Jxny%0Wu08m<(;&z(VS}o9 ztvh=#hb9hlAvWd|A9A-xuD36QdMThj5qJy?4zNvR2YL$;8P#DwAk>I*Y zg0I-H-5lK6;_%s+5#9uvhb50t2+6ky{@Sa)+w@?9{tV0>d1ed@py7N6M)6kJkD>rlKB?`1!@MQbvZ8E5>AkKyn9`z~Zmpfz8+ z`J3v)og~a1-M34WyaLwAv}Gs4^omTy{f`qHW?h}VhjHn9eEaqX5Ji+NUdZbGbv4dW z7>Vn^$O;J!B+VaaxDzd*23V%zT>EX-UFF|oOFD2uB+3z2iaEExVJ=AVf-c4OLc-2l zFuG8O*6cA2+fz2z9%AsJyolVX%fRCgOq`=U4}ys!75b;~o|C*Vf;%OIBi}%V^B|-@ zgo`?lY~Ibm!x4sbr-DU>DNtFc3~ux2te^nA0_8os_vk_J;ntyIE%y3BbsgmChihr; zhknSjUjR612wiG-NkXjGOE4QBT5Zjpo$T(r4)cGcP@!)Xl@(x38YKf8Q7< zeMw0X?H#~opj`^JyIoUKv&`H?YCM^1!$cFxgu0L-WUerXPED&;nn&ixd*}Xbv+o5$So2z*SHVbKb z$BtOzI!@z{;kbY74*QdBlHLKX1+7Au1X!9m!bIa1nv(NVi}lpqs@II&)orQqQF%k6 z2HSeSG5PX;o7a`it1v6^ydEE)gWcirdJO*t;P?@5+3#p%*JeZe+BmIX`K(zsMdGn6 zrYOV$G6$K|fR>5d0V`R({fFV{0A0@)3LMIq}{8F|e-!eGRZ!QHbn6_?;cYt6GzN3MGA z!$CY98QcNkM@=hebn`yXxhV8ST{{3m2KtJy0ESsU2=bP25`;q)D$Y)C$JdMgv%p6i zxv@*k2#~R%;*nrFFW8tZ`|4E{?D1J#$sfcX9)z8wZ1aTF$F^Q9A1Nvr!Dq2Um3Mb8znq)y@Pu`8;C<#0QUzf`Ty}Z2uaZVFn(su}NaUO<53xcN>+g%vRGg+#d&$t0fKTR7y3t% zX7dvP`adNSudgV^S*YKIp@XlQLRgKO9tEeaBG`saUmV5@Z?;bj426lDj%_&Xv##14=!!)a`GWw~@|DDV5VJeMK?xA_l+q z{z7nQ@CAUeyJrNJKvl>m{lZzwUa7dy1K}LvVT%aQE9=JQagGI@f zDEI5xI4PPA4aFcoOuz8cx0l7ugu;P zb2&L<`FA?qDL8UE4c@0zb2YzlRtl8#uUerjBR0g!OKML>!F4Aluaf9t_l*$F?dp9@CYGH@WTET%^lNj3N;2Pwt}gkbss%dU@s z2=JQp?k_LQztkXR*wLI?Ao5w0lR{rAlfaCe<3-|=Gt5}qyCFyn8XI0WivEaJk62B{ z`k}tK6T57V5L1MiAD=v1<@7$Dmy%b>`Zlja4+9q!fvnOPbjy_3F;SnZBt5>G-gQq< zzzYn9TBpdBP0Zz9_V&!7X$rOH2?|{H$b|k8lhGycM$G|NBf>2O%EAleFkkMq@&#(n zKnwzdQrro$-UpLgJ`d)2NjmHdG}a{g)!BbZa8@cd-MsDoPdXkAdhCSa7I*<(G4 zXfCH{qY5|t8Ms^c$0j>qr{)lUIN=c6E<{j4=Uf7Plb1q??T3x!FYloRhEbjbxUviI z>3$;R@ZCI%XumKLK)*E^5#+UZ6%|J1C#BZMkE8zw42y)JJ*jITuR=H5_Wq7nna}!m z&nGPwXS=?AJ5p8@>}&VMg6~frm$=_ll@BUS3V%%csGoIVsiv;}JN42?sO^+e&bDx! zm4Y=r#%1MlsF9miriDGC2^grBkbxAmgIblSP85J-2wm#m4dBF-_>XHVQ?AigZ8QF` zFq91F^O3kE=G#170+1pMW(1*m!nnQF1cxBd$_LVbar{!ZIfN^t4HyDH45H@&ca@4N zXj*XvJam-(&h4t>V95f8e)qE_PYuMLV-hakAsLVuA+rA2xOR#=wq>vtML2@l~*uhE}<&0#Up72@-`tEha^`!+0G#Uq^V zF}S^SdL01kY^Qfz{JgdR{7U+dpf~blfkJ8ddfQYKc51f(mRcI{GoVq5n))AHP9>n7mOU7OzLhsl03@TZT^ASl>r1L||0@1KeW3 z|1d+R*q#$PV1YN-rScDD^S{GOit*Bep69Cqu4Z%%ubv%oXnDESYS-S(8>xuGLe**M z=H)W^4W%;gt^@ftKd5Ts*+NJP2YNOUnKI@Mx_odJKpKbo3roklZPwO9t8GLjFYAN|nu->ou=u_-kpQ#; zTZFpnXZ9qssIub#%{#vza`aOg_5% zzcIfEB=;{ZouK)4;(hxC=%Y)RWV`|_A1!Xahu%Bjc&L7_XIK7_Mr`=i$@mPHI>PY( z&f;S!iYZPlb`o7eHFf6NhmDWTc|6Bl-0{gGoXN#Gu={qbW+T<&P}1-+MPGHi&$?w( zf;0!>0wZ6ze5-C67&g@+6LOf{ngN&CeKI|@HA$X!Y1rc`S(TAEzYEdF6xTRmqC&40 z0p3W;noWj+yWO9_NBnL|1mG~MTxtEU#{8XLpL~%SJx^ZGSK-!fDhB)0f-Oy`xr-#D>I7eQp=mDbo~On(U%w} zbC%vZO3peGH~m-YngFEtxj@&YnxI4<$lQ6_e#W<=E+cg&&<2yg)SIE5U7KUU^Q%zS z?Ykur6_Jpob`XP=u*n#P+Nkd#gOz41;qZA{2k2Tk@hP|z&rAQ}V-8QHAL-dIyPO${ z8#cgBozKje=)$V0?LPn5?(^G?0!}8rx%#N5c6++4rMi>uz8OwN1j8r_0p^TLo<3|} zjtQ?UYxv8*4EW5bp9kJuGGDW0F$w!0x&9Pia@j+ay(L1awgbG5`3barIbQks7; zAH6JY8uGm~p+({%idOQ z?pK|pAY%rM+kh*!WEMHg=J6rl~>=+2+%uhU0wz#9h4e*w>QB;rvHJ^YRN%3T5`c~5Z zO2P>{71kheYfrOzW8gP)fp9|q=EBkCVH*_7dITZQqr;oBWxoVfxK%!w6_DJc<*Ogu z@_}*WJ;w*Z;i|Zt*_$ls8FVsca+cd-T)cOzSG13^NGwU3cw)CY72L`S z*>FCIFQ))8php35myVexW!aKIa}ub=)|cTc{z#LVG(!(_#A|0u`fgt$vyhc>kWY-u zmpQ|ts$dqk>)IFFmcB4`q?9>qg|a#P8Sw77Bcl{A=x?%1f^A#RiLE{Hi&c0`HSuVm zmc~l?$6X5vU@c`q1%Da{Y@kC6Wx-c`){;o7i6b4^ge58qXrmn8b`?|Ii;?I%TD(%h zW#V|?`DZ!;MJ7}N;JGiFCYM~^K;PloQ%n*aSvn|pBH$3UDOq)50%b~tr3NL^ZBA*y ztaGa=Vq*&5#xJL+AyBtO%5%o(T^`mhXcPFDwpm)b@bJH9?;0~| zLmU?Nb~Mj%qDc`9zJ!m*@uG(2#VAw`NYjxpncP23?LE{@0P+c!GPGshf(D^Q>%&G4 zgh;3iJbl>W#nTt^xdJ}Z(fGYgbC5Ud2-NF9nEgjH>}1E4f%;>zc%6Ehi~io3_XJc@ zPmxud_W`twdAYxSk zh6H)h{gPkXR>e(d`1bf`V%#jmY`CKt<)COB4-f6)!{WB2x4>phRbwz8+gJuKx_zSdHo zXq4JK68q_vYjXbXe(I~a#4+uVgMWYYBihq4PstEmSmp@v+Aq;hs3nq9ztPR2r@A!S zHN4RDQ!t~-Mb)tPIKmdMLx7P*fJ<3{aXJJOc&1QclJNO7`_Q*@~} zYY9BSH*Z>8?r3eM0V;wl&>MGBhEizBgeDb4e67srl-gS@&ZvG|#PNvcQx8>s2LaZ*W}Bcm{dojWe$qL(dt!R$?O&1b#z z2r6FTQ6?Jy=eKI~T*T>I6Mjh-VCXAqN>=J0^gSH^&d`V2ogl<(UZF|IQY9#jp4vrZ zPAobghW_6YkJJgrN#EbzcD+TGPG*?AK26cQRgk{PRcoXz5b?fw{k6Z=_E2q0Xn<%yy8E@&q;cGp1x_N$A} zEk6irel?p{d4^Hb3DH6-W&C%!2p#t_Cn(Ts?eWwD*tUYqJQ;_kZY~L5zy&hu^VOPC zhZI-xX;qzFgtfZ0uN3J^~^HqrBD@;4^bV=H<0j$ic20xjm#>D~_=KbhXV1d9h+ z1l9G}#W&tmB;XqlHt@H^S*Ka3X1p{|Eka~HiQs2r{g$F$vV*AFHN<~3-_VQ<~mJuVE-WAoiq&(*cf2zlqv|Q723K_8|AQ`wDK`4zNvFVZV7;|A zoXs34M2a)OI$<3f1HL@&g_6(*tB7}iAxcY211wgQ!`5aJYim1#erNFB=wl0Y=xy&W z#BOly=6Je@h0n1WK+1|k!L2SFY#+aCN#2ei|i z+Go^~d3HOB+k!U<`6(VdgQriQh7-fzcRWkhSdB8_QaW-cB1n9; zU~#EwU@o2?4KrBoq?{-zvux>nut~e;ko?#0{Q00?5A)tM36f9D+=v&>^I|`FwSv(v zwQ0iPoTKLhH5k%74eg@pZWTeI0Tb&#@TjVC@u)iPu39z30(U|<98Vt;UwjRn!KL!plNi{((dbZl&8%n2a=H$0rovyFb$6eg17@JCYm}C zNlMWYL}6bsu(O`4vE@{KpySP^30$2pF5tVh>O{Q$v*V+%Eq2R8H)`R_;8kz>JB~6> zshOV(dcTw?TjD=?LSaYMCXVHWLEEijub1iZJh-EW z8G&=e2P=UjS9uMe2Sl)WHGZ5HeGFga|nkkzpKL(6X^hU^?nGXqr=!x9{ zNjYBi>z7n_Nmw1nWb6Y>=#Fb*HZhG!r~rkc`zup#G{ z5WH*J9h%|?Iur79m^eo@L4u@#0EiRt-o9mD^MAQJ67Q*k$(Q;yvjn6CunG8PPYgx~ z`T49&0Hcjr(Wfc&LXTuJ@3r3naobPS9GtL19Wc2_=AiZ$Ym_c*`z99IHEs}wr?-Zz z5MOPRV2c}$2AKk?Q+zy9O)yS}l8>Tj%RRnrZJ}DS z=!y|`;+}LIAxZ%I@8C3z#vWksV7=`K^OIza8uD5rjZtXQQ1&Z6ZYxAiWr8GTh08~H;unw^yBgaZ?lew!e9Ilk^_Ku_ju1C z#OT&a9}^;#M3~hAp25EJwf3(dp6wQo>IjlLdoM~$y<3A8ae0HIcxrWhG4%4=C4c`{ zea=G1DcS4XLi-x(##FKSQCN=&$8MZ5hm_NRwV4}nwRg)(c~@SY*IgdIaRq|=ar>p* zBV8zM-)V_QL-jS6)bmGrU9!MZ1L?I`*3@6!H%+7YG`xBb5w4%e&xytll;P9ynM|*c zFrDzuQg-oHu((6h*?O~@fIlK@;bDJ<0!KP-90%}&$<0naKH#{(g5k!ut93oOEyEzk z)=HC@X;4Cdmo?hcNfvGtgV(t^OTyGXzb+`}sCCN3X% z#?ZEa)@rD-JQE)js~z?;bZBgZoI$9EGUJvFpL0wCoYIDId0he>+w4>^|fu|%`= z#fh{ZRDQ%&faOB+lyYJ{{~uj%9uH;vzK;`;C_O}kewpwNwye! zgpe(X?2>Klr5KHMWM2~!k!2FHjeY-~_vm?l-k&#<5qo;O6+uUj zhCNP@DWT5z3`=SC#ml}eIi)xL<$t###-a8NLy{cb-SH=&|9z+N)L|NBW+&mepdj#E zw7F;E3vc!0gByW77puV(K-2J&)$mxO8#4XEa&hc<)9bU0?=7*kH21*`Om~p1?`6Tqf4HBVnfu+_Q7}puPU!X#zDZ1hKyRwbj8}Z7&{V?p^_gT zx=re6*UVx3guC1?Iz!{9FIPgX$J&&S>=Nqfr|A-Aa1zsaf#&_3dyWN}fnb=Q_K)z3 zkGB2Q0cPSy-!s#P@L9Q6!OBka6P%hAn`@InU{BmZgZT3Q@t*;?Y6uy!fM5QoCpF5s z$f=)5C>|LKDu72pMz(CLm}<=-#`?Rj54g;!BQiDP)>Kkqp8bt@Y0g?!*}bqapY?6Z z0z5uV2F?!4&yIh{vbKfS)tkb2%r~0>_nBEGKHLX9UO3KmN(v>JB+tOw&x70n2Y1X9 zOT_F(_Bj9CTmxg}Ze*+k85KheOkjdrU&xa%pZQtZjywhotx#t|jA(*+H_j~%*cihN zYK)RgefG#4NZg~USwteGIWj4NFIkgd$6zp$b4_QF|98lxu2QIod&)7GM`EwwA^ESL za>3Sh5G>h3-N~OeA_O&V^n{eluRr?oA>zv=*?kAHEN@JAxY$Bt$qXKcNI|%(__YXP zuY`fXe+|~~LcE~*jsx1V3_*RjN-r)wftf%ubtbR?UhDvOJw7B4v8B4OwBi2tdV?KT z()T^Fymw5UcJDoS`@xaPI1}h#JkBOMOn&FI?#ix&{mQ@JDf}E|D~pUvdCF``J}e*J z>jpz$h_SNZ;ScFP+qKDPc~7|nXNu7C&C#>S{0FR}7}yC6+}f{wyO$C?>!~3VP~LCH z98+{}Y9ko2)=}qSP)kSQU`0@td;{3!uXSeYA(jZ+p?uTj?(zf@=o>V>AH_lRoBhon z`g5IGw1OF3_VnZAchj#K;a2avzkjgyKm53lwj*8r3OF*6>$>>1miUqX3TX?_(ZF@G z`VT(pu|6mnOY^#zzXbM}!LHtir26zYZzgtupFfKYMSmox$kdZs!;pq`$7x*O~tr&~a(a3B;iWqrueWveQ0df!Fi`*ZB zl*YUYur~N0gC@!Td^P_R4TQj@0mzb{4+YE)*Sba-f-g3~ zg`yI-xb>&hHI4%tzqzH;`p6n(XMTiEdE)mt57;Le}Cr+0H z{PfU^Xcqo-hS`E0>uIWFsPMcJb2Rs0)I9-_YibQ~K`SRoqckw|kma8+dCvt02v;*HjA*VoDid^gWp&g5;q+x_E*zhNuw zrBCiB(BhBbh$eg%_ z3Tn{qB)_`N_tY2O{v^SS!`tnB%llO@EA5Ra$8jv~>7tG!A@yB*i7>iBzwu{n?23_E z+R_TX@$`o-O(Vld#+fueG~eiqW=3>QzrXg$aXe8HeJiA&@s>(of+;FGq(T}yO?eIl=K@?rB`BYg@r5y4Pw9$X$(XVx_d;^V&%Rzn#H&eP|#h*;HC?KAAQ7<72pX zxmJD$O>Xhf?o%6I(~-OK2=;l^aYf~a4GsNgzFX%Ff<3I}SvY@~YU(49%#zd6YCaqS zh=XLeaMe^XvyolTA$O7dM{mIWhRq};`BhB=+s~ZQO9@L!pFZ&z8XDfScBRiNTNQ>m z(iA{ zOzB7<+T;z_$zTj0W_}1~#_=Ds;h^X^;;sgIl}UbZIZW=v;0k#^a>dL|8Dt`%7~1H1 zj+Cx(9o3Z?b)74qtFfC1|vYU15=%{VcX)DcXe6 z5M34PVShcoL#f`rRo1`2y!H)j;^LpNYABpDgjSE5P0spYRvE7+PiS>8*x?}0ttCmD zKFL!n%Vxx_n5oEcLe1uwuP4lnTsOapuyR;5Jt_apRsv4dYgaE z(5%~gr~OBV57r2`Q+|C=vB{;8?A%DUsY&0O5{Q#_+)YQfQnU*xwU+UY2jEU$-}J;y zb|a_uiASC5OM|a16KcxbU2S1@(3TuIU6{_5S9*X3oTB)5L_ zT$+g`NdOvdZJb5rBY)YE| zw=ur$ure4~emx$ZqBz)R-}1Q~8#?qgQ=7(16TSe$Tm|XevKHsw^*rd8UU=_*!l?Ch z*O(Yhh4pKey-tpURY9V5rN8D{5}9=>?M_;%TjT8+30Z&nW)zK!2XGnS#f zX0IzlIvXz?;*~9RE$LgUnY4O{TXX-FLs&AEw2^ncnV1wq>+W!0cpNVys)32p7uKgi zm$7q950V>o?snkv6w{f;8};sXteEWK{h)v|kV|SscjBBR9ZMwHdEvq(~4VGs2v?D?QJzo<};8;$puFtB6t*!nbJ z4#qC$-rjz{`Ut_4@^ZfNtsm3kZp)7hz&tT;iaR9lacFCXj2_O6*BHz)*7sG9<*PQ{ z=rnGxKV}IB3?=dK(5Cf9X>RxGLTA<_FUHq-V*;%x5K+0gu{IfFkm%^>xG2*SC*`A^ zZYP_ymrw4wDap^Y`dRqnJv54`9c4u}K$eB@$%iB)@P1l^*^h6s-*o1r-Inv+kKI~Y z9iY4_`U$eP&H7J9)}*Z_?rg(3o9|>Z2A=s#85Erpg*U#+wYg-upO<}GwEBU5i3!iv zrj&jwFPvB+yfm%(l8diNs+qjAQ@N{AU_#h9pm`t9`NE>^*I%Hp*bUtLiXr$*xVfZ+ zX(*40UnVn)a5!RkbNnZL@kFf?wyuHU@+vp;wERp_7Qamw=%W=1*PXm=t=+?x=#Jmpo7MZUNhYrK2 z@$zC(nJ+TYG<2ZCzLk#V`5~$O>gsbd>GB&)MFSo@$a(wtQE6s|r2o5v>keE|zTEFr z9#rbO_>;%UgAOgTC6inB>)`XOoX`3Lix<`@QLLuDT{$Qesseu5EL>b(XP>_e(3dz7 zKds=pHr>ARKv0x0uca0(Mmt>7X*TyuJ$lhk;wN#g`W@HMe5STD6k|{bb$aKa8DFs z*mv4ki+5fxsGV6h4;|Fsn&o>u71u4%mHQdF@O6ENtNhZ0=cXX?QUSFD83LdVGC69t z*`_~uV{OTN`5D{y1S=Np1P-|{{qC+Su6lc>IE7s~+e(C4e>R_VyA)Z<6S$Cy(bmM0 zAF?L$-rmjur}}R*$oHS>59%2imzr*g6fR=Q$)ccSemME89uc-kS@Pu!geuS#6d9hC zwH-yGmMA~dXc}GF%4`w4Fs$#}^xF>-<}UYqazd9;LbslIE>m{PY<@Fe{$nX0w6OFL z;g4xds-7|}7kTRYXpWtG zZaFXtsK-2o$*;ji<4s1HwW#u+HgW~@TdU|Tg~Z#9k@llQkyxY3g>H%Af`59Us(A7u z?q9@USGK}jy!f4!8e!cA8*-<%zC>jkT_0LhX*dCI5kL&lF!WRjWAWiP(#1E_J8Wlu zUM3DtP9yd_cV*tuI9E}*#R8sun*lkud49ec8_zZUrnnW0?sI%AGfKz}14AEs%e-Mo zqA9X6rgov*lVvTT*I(ae#r4ow^TxrnKqMoTm6hy-`Z3!r;l{$ZUh&~uiK9phCa&1E z)wMs!&u>ik{jY(Uh{Sc&Xq^4t&5hhoZEcUZr`d$Q!(F2P@n_D(Ves1e zz-9eEbT?D=3(QvR;*6c-%07-88x#qtu0I@5i4YiE^emz4B>4?88y9mAZ1g+Lc6)Ag z=kh*)*}1C2Tk9B@nkgsM=GxT2RoQvY18cc8@}3LUaMZ6>cFR9le&q1@@eqlxWG~-F zoxi5+1JuK<^ouf4ueoFB<)vS}sJbq+diCkjyMuiqUso?RWMxrk`kz%rWmcu5vv1|4 zK!yr!)aH~@mThf(Rmj@Lk|-zl;p1h%t)s5|`}uCN<#2t9^DokgjC!vSI9Y117uw2| zsdl$YP>I=0_#jD4>UoFdyEIeDTn4?wJ!#sUgh^RSw$-uN<~NYW({ChH<3?>+Q38@n z{hxRzwkFDFCby<}$$5@v)ChJXsaWWNPwHT?{E@191HJJBRb)2oXy&+Ga+=j`&*!?l zX^uN;;u~GeA>QfWw#cX{mfE+Yz3nIVz%Z;&N{%x)1Y#23_oP zN54L+6HI?Is=ALg(I}$@LHh@1U3whK_Q$&A+_(A1=bJ3XP;@DHz2vf8dH=3qb1j$W zH&-6*zGfK7H9FV*fM@6hJ0_ud^Rw=iv-SEVcn_&iIw(sx=gn|!4b6sbaXD!OO zBF5ay#_g`>IKISU$+X0=+QwSWnaes&iqj+^s1-(WTq0Md>9USL)MzKOoW1(9Xa|YI z-!@HtVpz_9sPo+WIt%srrNqkmi1bJO1k3hO3zQ%C{zJGi3}pQ;wik~Z`&bAwu8if* zk+WaqCj`(DQgzy7MEP#2PkglSmB{JMC9TQ?&pf7>ar!yOZ>Bv5{XxI?Td;2#~Pd zvjCI~Y#lq7FKc1fZ^`7+Y=))XALLHLw#j%&q|DHAvaQ_dJjkay$QJJ3C8BfTHkN!1 zXU)AnlGZ)H(~i%mNdVi5bIR2rG~Y9fto=DwcIYdkrZ`V2IvdJ84z9p_Ztx?o2=k&a zly;TFLpJBE+KI|FH0i0&b0&+ZE3V=K(SMj~%)P}~&N>Vxn5e}?ccnklHEt1Rns|Mr z?sRw|Yerh%5dk+4eGRnbYXC|Mi2MKvu3||>mC5#DIWH7@leyu{kzfk0vy!DdWd#_cLcbr ze@nQavu3G}GvRYQFSkLKIMxwsN>mu^h$^~0tU>Pd^&Yt_tKjO}pg4gip0qjblh`*O z(;pvXH!Eihjd_=J6E)|}50i%v$?fKR-?{epJs&c%m_qAvCvS-7qvR;|K@vo&fNBIT3?&r@lfzBrbpR0hu@Ah)!37QH$ zVrZn$yp?h?nl*Pm^cen}MuUis0z{~gJbifV<(kva!PaKu=a1G z3oAI7^A!Aq4pW~~tqFFUqPVHa(FKd|P%wzTXzLvjhYEJ)5wSp1Q0Yf z3r%>G=In=fwOzRkkslOxnV9%;hmkvr_U-Cula^#*nHbTf4!{b2qVq?7FPb^T%oyG3 zOu&m4eU*MT3XLX>jpAVDZS9fHfMdrJvjbUs*Gq(*T$ktDT<|4v9(p%VHV09ti7TAo z&&4`NQNS2w+QpvuJyyp1|2zZ8hZW_cn-}t4O==KJt=2i`XQ~=xp_czMfx2vLk2J*6 z_N6Td;pZZ=_s3ffU3wVs*8S4?98oPcoc7}Jl>V3-P@Wn@@7#y?61m0yPw+mIfzP*Z z)Wt$(5K}EirbF@(3;Wr!TKW!%DB>PqhnM}7g4%79lf*z&n%j)LK=k_8o0sh(sEP88Fv^?(h#xFK$=hYgv#1(9|#WRozsk|-dHx$OV+X)`m=NX#5$W8 z8R75Wzvny8n%z0WeGvEM`*&bQZ?&BviK*4+E7saNORNxzqB@tUJb+_ZYgU#WtIg`X z1uF06)saR$+F8>>UHpYmVGbJ~H`^{A9-_yi7#E&&-uAb3=?D!~{@lU!KScwB6b&mr z8tI@Uxr{R`B(W=@FHLF0SWte&`vh;#13P7rVOHVJ@>Z#Oy|1_NzT0*b)IbzaI~IOW z$k@VJ(}(D!L|F&=SIv^@B8Bz8BwRp=DWA>&_Tj?Gkv)kr^*N%*8u^D{R$ zY6~pHck?duPqy$S9YdpIk*Bkl?mNUJmBnE`nz^6bb9aai?-nXTbZ221o70Jkx;fb4 zn#m+tqLf+MmJg5aFIf|?C2ZdjnN_M{pC;*?y5fiBB)yanB0VzRcjnZGQc-kO?Pcg1 z;kpLo-X-8ik=d5RhYvfPsSwYZd)&~~b!)8vL8~@4Xqu#=B4lpu{d@T883{;No^4r+ zmvs(ZC#@}i9q$Z>TD@8N`}Dqke=UAJbZmC7Q53szg8Bn~%vojLue}pGV}NuBHbUe0 zJ`xXY>EJvNIsq?veRRt56#(tGyL>asFt&jWnx>7vN{-N;EnjyLg+8&pPimN5E{i1A zbfjvWN#J?0f&vkqTuB|CTwUVFQ_w^J625u=p8fRc)5Y*EuE_{JqmGAgZN!+v!JM3& zZy*1mws$QpH_k~&n8Uq3G+uD~OcH^>9UmY6?SnN}|9qMR;a9B~%P?1Wk0s^-9P5FC!$r=yI}^_}V==p3{0txB2+hh5MVf~xI)|m) z(HU-sgcB5}|8}=rA|lwOiGL!GPhO{SS1p$ly1Uy%8t^21rYWWaPC*E-mYkQz3YQ;b z8OF-GUO<|PAD^5)rIjK#69AJ4uXm#BQh^rYqiOw%6UylwG+yA#Ps++_lzZ~gvXz(S zl5~6x%v%jc^FSi^q~zVn`g?AIJ9spgcVm|4eKBq*_!u~Y1bp4)aa{lX5(Ey?#z0Kx zc3nWQ@=)nuqjqF)R{*jj*IWvMR%c$}DR#H+mF@#1jGzlumaLIF!w8NUifuCd{FaPC zc4HP2i`kz96WgMbnORv^q~jGeG+03%6N9(4T)dv%0XLhG`b%9Wzk}G(*4gO`R|ova zjhD3D!N|&6{qAn#AAR1@rq@2m3(|i4__0|3Zgq9_jYsecQ1| z%3HUxs>03bd0s-c;5Y=+^}Eay!}sQ;bjs{F#Z-Pw+7s?=C&LxqHvkjg7KNHh+hhu( zEq%Ly{ySMZM(OgDrFaYTAt_kP<{ww3AB)Ginq_5&Y4Z*Z6fd2wEvpq%(@Wul^vAce zB|;y$M!?vZ??#;DUs=qrqh~% zXpF0iUxno(H&RxhsW8_@vbs=;>*lhpu<%N@-NJKp%AQn!vu-``>+|2NLQMuU8C4~0xx?CJ4!B0Ula1YPX zY_tt0U(JeR@)1G=UdSgxOgq{_wU~oUsA6lRyE*0zl9pZ z{dHf)&H)Yfpc|WW+xLTY_jhxDaTXGXPw~{BvNMO9kifoO`%|I+Ssjn5n?m3-H%U&u z=#_0+Du?yqwOk$K$P1EXV+K0})6IBB;1rc1xk(2}797Am2pawI{_I*Xb{M{ZPOeJF zW`!-wVsOvvI%P}WUJ{)saA7_CCHk#6t_@QS+rz-P-8?j={o6bWY9v$_&$XmQh{17U zatS4R0}q>Ag5M1S=bL*dFRLmsmiE{R%53IBbIzYqg%besBPbk(a=XG}k7noHiTo`? zx*80ysmu+tsWJT{FGE*@^S7J>4VRHaxcT>^PgR&%e4*g57-@Me^StF~fbNh({7@;6 z19ZSH|5QVjL1!Sbc>T}a2*=(DyCIPRZnN}Vlar1Izrh084KPpdN$fv4k~vO!25H-S zwJPwc>;tq8ITApn>m@O9oHTzEEpfuKlsK$j-xD zCUX0xkK3z>#v^Fz_V7jE@{zpCnuE=F03V5eq3g`*tVKBCHP0v2X-w4?&Zh|RUf zK`w8MviqK$VMggUKWUr1j&E+4c2<`idLmKf4E#a*7xR}5wOP+PN$Jpm&=ZyFJEHP# zuf?QHEfNe0f4hCIf?@s2tWZ-FFZ*-oLoC!Lj@%0#ipV>+G-GkK|I|okgH)TeW=xjb z-K_IvI!1;km2)Y(CLt2~zUd~wG07}?)gCG_XMdC+lfjzZB^}Zc>p^rgcp~#GuxH$z z7^qvSi*uoZL7Fx_`UP_KI~;DC;0GI4E~&8e_>6{7aY65a|Jkap*KyRb#wMLMos9-i$YU^ zoJ(q7AqphHLE*=0`TO!-4|YO+e+wLm#+Am|XcNe8Ua|t;ly$HeE)=^OuVMsM+eDfL z6SXaA^L>8rL5ahGMC8o@n{uhwSpPp zJ1BuF@ycr1#g&CPYT&b6d9H9w#|3{bv?b-FO>oLcIG4^9*tZjl@9Ik?9i?U?d$L*0 zd?B5<#xo#K(6Pj(D~@va&a#|+Do$RO?$Xp6GuOI05`nRHO9vLr z$@S_;@&Ya9qb~8oa}T24ohj1)kgGy~-|1!C^OhI>s<Md(nN{ggb3TCC;z)->dG7=kkVd9l> zA}@t@xjzS7b)n!vFiMU32NFS|z46iX_!L zrY@TCj1x;+%#ovr?3vQT<#|OEd^AQK7q!`mzhsra4O%CXN}6H5;HLFn68>}Ws2^1K zdL5GS&j<3bt0{XJMzXh36Ek^PrtAD{g;DaeI_$e0UCUZWq3Oq$$VgmxWoa0xG@f&J zezZ3vbK5XF9~azWdQx>e{9@ye0(t5qb_pV`2X+Irf~rhD7tDc+9YyM-a$W8 z?;-e&OHYn(j?R?yMIjy38{YAhQ?n=^N`wBHL zZSL~Hc%`zU{_DJDe=?u>-a+|g%BWrt_|{_??~4~M8?S;*HZ!TH7RlpP2bzg}D9}~+ ze`QiK7e7MfxpW--q&ksPn+aHdWvBM!=f-w20gbnX&H0z1+%5P~ypmy7T7=7)^DGOy z-wtzc&A>+Wj;j}9KS0W!$SzRkW=Py+jt$2Vd z=WBgL@rXR{W3@^+Ar8)~@3E#YQv^}j;wr}j^A>$nPnd4BUCpgW3hhIo4gQzH%@Gvw z%s=fq*`c?{f2C_*-F5a_`)me-dSV+~uD0r#G z@FJX%E4!N|uYAFNygjV+ALt|OrcwXBGaiJnMe@2 zfTndIu>L-!Q}#j!8u>8A(PWAd{^~_qb}r&Lx1Z3?5M%%6*w%wmYKVe$v|5kfzj9{z zbMZ+aVISS!cwqv~LsS=OQNg}HUIc?su`0OlCKX3MB*{r30X$dA))w@un^6b6H*wAd z%oC;GFuMT;{G}pZEUnyMo_z;hoa!88plhP~1!Z5G+2xocP}E<~i7_TjE_&|YI$y7( zaIXKHIt$C4f~83IWqX@Me&mF$8=TGAJN;Uw(pKaxCn|)LHWzFB1v0fi&3s4jzaWZ5 z8&J)mF2QsG78LJ-f#_!X(iB!F8`rf+?exrCw$bW%H++7 z^-Dor=~_a>(Kq%43E(8J1in|?OL7&J#p!K_r+}M5xp~qmm$E3$ z`UmL=lQR&O=;}x?f(p>#ft|_Q;vo88g;~WlZ~;cEFa19XD5q~xDBID}42~JzhL?Ng zXlumOqz0EA>+XrGiyPHdRCsMLhlftdK3CdH4E*T+L(+x9;=T(KY0}T^a8lbj+>RJg z8J_6`J2}M=)RS>j(#=TKO662L;|keaHAXCMFcu-){)0?}AnVD6q0j!UQZEk@635+6 zWL+H{zAUOTU$)jpYv7!0X5uEmF+0r#u~^AgzSnAFj~Un4!12$&Zv80qIBI8-#0Tb_ z%GYNtAFa+?vCJ>^3f7|pZdh#8q7BM6-cLK>`~rYxFhMck@mYw)fQ9D*3#f0V(T*42 znvOiQ4qsmIK`7SVQYe>2_sZGJ3TLcOBU#b*!8d`h|T{O(2z&|w&9CK}_%XRmZ< zFGMw>W8aC^hUL2#5UP63wU?VB7CWZb`D&%Bo)fhDC68e#eV~9;t*Qbk*(#Q* zI8`fcOc36R%DR3}^jRaL&Wco`*R~EOh00$8j6?Y}M2-JpSE;!>U&1@oc`By7N#TRj4%m zs?sq;%7K+j6hFh2@q^G0lkRH5y zyGSV}%~>_G>8Fy%*v;^X*8;K|`>)!2ZY~&a1sKwKjh4B^FOX};2k&}v3rD6OyilaG zS#Wg%I5g6}R*o*Z5cxCiBZyNbD*0gM%N!w6el1OeqqO^&PHu}GwP*heXNSouBW7F^ zB2<1IVvTQtDsgBqtf&Sqx$QqC{bGO>mSufdRsgjT#vDsmcbh#rZH$5+BRTtp8!V&c zAFt4DO}a&~(*<8qvk@1s z6DgD+lSdFj<{N~Ue?bL+*em-8+(emBrUQGfxN(`I(ss;3)&-@~hn)6$p`|{mMrP0J zMzl4447lpnH7%YG@Zr(+@rC0`A@2K}XSdwzTIR~1Dh)?y6D}fdL@e^_g_9SOaP<9R z)hJVZUx&7Xl(|h!{$ErkWsB_?n;xVHC7e&rI4r#xFP9l_IHeK)fo9 zcHx)^(q5UlL+m?zhg#E5@P6VVQr^mF+t&S19&OYF@o)x|41mkDZ|^+`VE$2iwSdqY zB;q?2*c(~Xc>j?`r)NU5g?JdJe8rJSg}Enxyqt*(SZ~<@dgz*?=1zxw`WkaA&*<%& zt7XJa9q`dm8)s_eM2MHS{kIhn;ze;CiT=1u3k`pe4B=>jg`eZQAw#KmsUo%bkIzKio7mY-^#@+Kv{ zkU#$z+?aMF+HVTr>L7MUWu@XD^MO#*oR`Z7zDEag8qG{ihoAw-)NIWX9c9GrC}7GkI}w z_^VN2F(lJ94>hBQ=JBj=)J&^gMEl?)2(v-kl&Kt7B;BxO`Ccz;344HFkwr=PZiRB{ z&B*Nv%_UX49_{VgxYDoscyACLrb?CbbTyKqhnV^$nyoe2a-Ib-tkd+}5y2x+!nSBHNJXo*UpM4Q|P0SJoy)ftui zGnIuNrhNc!GWX(~PAcB6Rb=a%?J583R#Stf&#?QE)U_W;UFRRPjNc@b0HA^eNBUR@ zSu}R~i5s`6`w2vZMWEtvKE$V1Hkx%rB%oKD9AP5dV8aIlalipNrWzfNLPp&SvDZ93 z<^A?#DY$%9?k@DGCqCY9*}{@kQu4j?EK47M_Jbs2E1`3&ux+YUE@}J2UA=d2=k4p{ zdJ9;z>c*f0rtZl>M+m$|*G5k0;NIzvb_!z|sj4tQ%1o+U=CSLTgii~jrX15N^Jmi* z(7B0&C4@nZV_6HEoo+ntfUB47ES}q~wVE0w3|fG2b4=Rgdpf$h7=hK+xXPKq&Le97j}f(^(@ksLCZl^WKA3P z5EA_t5VB~nbd8g1f>XHMe;ZSFlJ6${Fho;S#H_Caxyf>(SP-Sn=Da|2(S7xtA8TIC z@A;G6g_otmaeLA=z=<_8c(BvW2PYL)7RnXxLK|-eP zfA!Qx=EzU}P4)@D9_Hz&Aeis??PLJ6vFoZ44&hP8`{JGxGLURiop}QYE?~a7BD0O*) zf9RFZ9Z&yS!Wx-(*8kV@@_Wf$y9tl0UmzFAup(Qt*#9@PTt94cOwS1yGg>;v!00p4 z2_K`Hd5PYKx^c~l?-|hV3cGbJ2alF_~PyHERLX?@N&*uGsdhPT_L)9u+8?ij& z&^vw6U7TI{9w(8oZD1p$V%lVcwyUCH4AL?DQSY%=8%6po)mwxTdn2Mz%xFpW2{Ikj zy=}S!_Iu22zjU;V_iLn5kWQ-9lfWI+P?nFBzvP$`h!46zI;^?s-21W{FgtZtR;gfA zneVjg8F;*Civx6YbhuXQs(FKipE!aGe)Z7Xsj<5vVT~?&Kp*s_Hyr2^Z2x_R9=b3v zPTw3MKTK@AcAZL{G?>#bm>TEO|^TqP0p4LXY?kFy!ng( zp6lc?4WLNO&`Y#>MY8n~HxWwj-isIA<}4(wMPJ#@d>m6Jyq*`|$ed{v^D(C9lv+pi zB~N)k$05ZiDYIfpCJWib*Ui9eN9);QeppNS9)xbG75B1^xf6#swZFax(y-DWNcL78 z|+MkaeA%YQ#44VR;Z6b z#@xNl8sqBRp(Gb}IG>@KI=Y|5a_FdpFUywz9!)n;wJ@Zh$7|Jr$p%K^=L`7dz zcluZT#@`pwKmYPI>N%_#+j1Lf+uKyTwe7qm{($)#k=l zY9h(==x}KR#dW7$N1==NU2sM%rQq?qZDE=sa)Rkq^8$TDqh%P`5pf78=onqz1H~Z= ztFhf<7IEPlB87P%EbJBvE4?n?zwEvu4COgt8pz3e!boUSZDC`d^{-BU&b?IudFq7D z(?x|lvc?E(!;(UyPAE-j#ne*c1#H`QVipp#&xv1iMjK}#zAw;So)b%CqZ<4)ybwty z)OLK{%jzV^@dsS zi#vBdL#1ch=*iQg;b&OZM;+hKRyK;hWur0K+VoKl>BZ4K-di^1E9}3Bxd&`K zH=IA+iahgZqbW$-_7CLc7w)Z-N-%Hb$I2H1vIHuV1AnM|zfk!bRSsjr4=Xpd5O-3N z$D?E2FL%fMEtH;3)KY1jCX8MLu)Xc{f?i<6&gq3?B=$c!1xeD1rtXNN3H?fS%dt)@ zFCI?nN%KVd#_|`hUZx*(f|P&e#>C~hGxK8AF*C7@6uYvWc2NzduTG=|WDC_y)GyHU zRE$BcTVJ*%PQ6hnvkAy99WI|ubu#iJPnSGbOkiPQ`RTiXHdAZXW#PNf)Xtv0TTESl zjB=~2q4_fKKU&nnf}}ZuOQ7h|6N5db8z$6dEz-}8@AtorZ@_NYa60^Y0(_5*!hriT za3n!7yY@%Ck_Gh&I@FqY{re}!ktbCQ#~neB>K}2+Jb%q7KEo=Q$pYG3EulC$_k*^! z&<+FYpd@r#6W(Y66C#s=&PL=dkLEKCdG!ry(wv#Hw}#x|1;FUdiXp-K28Sf>=wLnXCD{*VWL|)LQIe+ zrthwJpP&swZqIJGv$q`*)Kca<-vyO(B`lX=xq67QZliBbY$spwXRw&nMu&#noUdbW zCqxR3o@FbIHNPP)7>5#*8mO?~TjQ_E9QVdq$)Z;Bze(HG2fi;HfM-t8(#~mO9OrK1 zmX}LkV3~C;2(7s=5rnO>MA7``5g3j@TmKMhR$c*jdoRMwxai0)mma^1%24Gl%pU1X zQ3|({F~+C@JyVg-ag>^k|K{zvSjGjMdKBvui)RwSqzOA48=LplRCeEuwY`L2;uu9S zPELR`OYr}jy1`uY9B>;zY`*<%;JpjSuTb|9LS zkrw5WwIN>tE0H{b1c`9o_b)C6;M)lTmf9j5rZ0Y!aaQ`4f+J&4TcGFV5 zpZ@wR6O32`eB<>Dv1%8l2>`a(FJRmQP$Ee7KVo;5V*>icJSkxCwrST2mLU4?X; z6A_6+s6wv(PuW$)FOA zU%J&7BN^6gmQ>DXxwDs=)(Y9n!Fah6v&aLgk#;<7d zZDtU4%P<%}exD^P*DqYPlIB5bquyTOfL>Phk(gVT=j^uIJZ7_+wF@BC?TL1HeVYpu@N5$k<7UbYWkBAF=Oe0wOM6iC8!G&o+V35FE-}t&^Q# zqmH^nq2DSB`@S?qS~?hMG$r|)J4gp7@S_KPH`YvV_YzS6lbF|7lhT)VeHm*<$vr>; zc{e$#!mSy?WD#tGxS~G;M!GUVEnrA>R+bHH`d^3`@8FVpyBDdL5r`d0_Q1{ss-(c0 zevXp!5Y-Xc!l+Xldvnycdc^1c>zhW`!5@6K z7%}Je@Yga-Oi#D+@H{Xvp@BZMIzWv+0>R_ZW!|$pla*I?34{7!N$a zAGdqU*+MQ^cR+`t)0=0V=f4>CdF*Vu_#?%>W4P6MA>w1odEzVFp3Y7T%2+=L=J=TT zm4d7C$M4fryx#PPbDKO57oBR(&(FV+zv!jOuVkv@!r{lt6Jpx=hU?o58!7yZRtE2_ z(=|J?RB6-9FS!xo7C&WF$((o~M=zGsndJQoN!|!C>9c1=j%h*!OWv|`WNO|?wJW7V zoE!Zo&I$5~Bhe;QCQ9rCI4Ae8jbPsuBUu$2Z|?KDCtLPH^j8>2jFWOdnBEen_}GL- zP$OPp>-AY@;|bCby(kSyY^E_4Yr33oijp&<2hLRN!M8{4b6NP^ogv~C5K5Yns*#<# zCNGL*+36AQJGJSX(kAnX{G|zSSA|`V`!3Al-AbH+&6+ooxd961zy9d6cSHMv9e`AO zGe=g8PB0;_mmow@6)L@jB|1S~<;I6^2s_E=?*T{1YdADI13H0~Bkg}GymAP}LFd{% zf3d7GGKf9;#lrAT51jg!FIewBuimt=h^DcXI1#qv ziPG7L`0^0;X22Zu^Mi$gpw_dQ#@IH$2KY=7XBoBBmZUb!(KKq-5Aq*euwL2~Vg9t< zIJC(E$RgCGt_Dl@IW8}&_?>69?rrt0g}-xg#$Yf#<{sRcFE>QO-Z4fWqHD4~CD!Ez z-8AF_mYpLBX@6Pb^e^(`8DHX@aqU9cXov9Yi*9hFJYdxw_G%|5|F#o&Jn|6LMP&zA&VCJ>PTAJ$h)(50 zW>hq16*#RzO@GjnUdwco+WtGaerCNhn8qAB*nRugT&+Fj)y@VX@EaR6CK^@Z)_bW0L=#D;P&kElg+b zEkkN{*Y5}zBM)l}i7!Sm>5d!mg7Km`)2|sZ%lZ5Ycexq`Qm{WF1@p)QYQW;%Sle6> z-k>wQ!`$#DpMPLF)3%M&=r$|D>l#iis{X4Z;+T?x_90U6fi6ZX2i}!_7qqqAUvtv! z+GCc6VT~IuM`>YcjgFmn|9Db7N1sn4UfOpME|5hG|Hfd0dNN4zAB6GA#hj7LP`Iwp zca*$I2lapDBxaHR?4fervFzBMQv!LL0p>zpr6Rd=f5yn`3rgSVvJI#Xn~e0|b{1ks z5a6**!r#Ab_-dQ&$g0Ka%12`!3u(I&q(@fO5uQN~$XR5#x1IdF^mP^0MQ(h*g!OR6 zPC4+2`&z|Q2`eZMZ#zBAuYrXWE_@d0(*Sp#hDyyhqOcGS4T5non*{mPo>Hg zmZueSW+N!$W-Mrucv^SJsO!}m;_5P+3alnEizpMj=i*^2j0+Up(3}qG*DIJWxjd&V z`=(JnZ;W|V!47xR=05e%e+Zrfz)NehI!m$LNs-i%L8yyu2lw)#0tROS8X-91cKp`P zfTr$&DI#!d61i%3=(S>zX8xDmm`v%gdR8MK3;6IEh1JowH1gT_{W_jOauf1~zYT*K zl`tK1E$oa*g;()yYZ{ofP#L{)6|`NL&;c4h2Rf3ATTx0EXTljjnhx_NGCqHqC7Zvt z(`}xyx9!~mcj(HAiNRjxJu!$t&Wbe5%}Ni$Zr?N}AbbVP`W-{xzR7nJ`GLnuorC
PXaS#$4RQf+|1_-mx;ObW2K|j3}#b;)PrW>wfv`&V1P$S$ffrje^B;S9Vg}xlvzJ>WE$v{4IYo z=~mxl6ejQX z+%o{O-Rn6Oj669FJ%MxoPh> z<8Eqq48WFJNU6Ci7KT3cPDsmtJL#w)CdI2eP+%S{W-9Y{Gf6*WQ5PSdf~#ZT_7D*H z+|>|+|5=z6`wxmS&?H#MB;v<8<|VqREJt7lzALHQ3S>3Hn}mlKPY0-!~J1F20CMY z?DXk{XcJ^45$58&6X>sVQ?N!XSJoe1Z|Z-#eB3VUPnG(FCZzmziE>`7k%iI6V%dL6 z`o5BRC+Td6`<70{1dqRrdlE3t@u>Ap&vFG1x+&HpUxJW>+0&JH89z2>Z-eH;mQiUC z3hL^(-_cNaFVknnbIcQD@rwt51yFkXtgbeTI%LSpwo*aWO%$dz?OuBmYOiNrjco4K zpXTHPxec~Z^s5|$YF;C!k(}rSzLdc9j z71UZjyr9gTey!A7(z<%35TVM|aNXn^{t&=d;3kUG@ypp20 zTwFsBB7x^b@jauS!O2KuA;3HVmAQ@INjrYq*jzY6HN8fvw?&u;eJk2ulVWipNd54d z_#5dUbW{AUlpWh7Y~$0Ia9yV#4-VDGRj0`x_IlkM7{T3Q{+1MLRNJ-#_ZO3F`y2l^ zCIJxUfMAj&iVt#0hvdQR-dg+qi%|JE*3sqa}nb~?=1qg>|`gMH@4UjUfN8aWR14M<-Vd4q??)^&E_C{}S zFY%yN^Z9DBW6#M_h>Q)3nR$7& zMBH3kZ!a@PfsNSgi!ve2 zX(}ivO+Z9N2py78EC?b^MS6*J1f-~R0@9l_DWP`=5PAtEly|PcDfiy*8{_;qLrt=i zz4lsjKJzIfnhfppk3h?Fh7~rqTbO-)-&SyQt8$pE@b+fmgBC6L{^z`ZPfBL#HC&$r z;y_p9YJGe>c#2$f9kei~ZLiP)%$D;JonTJDcK!*|3B_Q!u=hHXy`^cLueVj#*6NZ3 z{FuOf%MrNEQ+gk&sIW4mHQf0cLWyG&5fMo=?z&35=FR2J zt7Vi>-rxTOR9>tz)&L0dF`1k#NsNDgQM~?$>3q#6A^AYgl4iQh&jfclkl~HRF~PXl zn(hzJ=3rI>?n3WuzJsR01|Je5mWZG+pKAPP^~gt~6L(K5ew%#{ba_D)d`&lMByUd; zt6>G^l@}NG>ncx}sf7NNJ>gWycZ9VNK{RJz83`qfZ`=n6KwLp(+)h8 z2o;oML5m^fUr7qbAZfWo^l*x9@ebtGhs^nGXWPhSj*P;s#v70-iPn44Q|$PDMCpGL zeH8clBYJ{YdHHYe4gr-p{oARZ#?Y7?nZ!8k@YH@BxR zUK9Z86Fah98dGlGeqZgW{byp=Lp3#VQu1hn61gX^iU(h6VQ&5iTy=t56pJA$$atXo z@WT`niBvN}L_(9_l_boshfnHpIvcUNuM0!qk(4?Ja-<7PCaTAR`>zn`(=wVeZRXVH ze(8nQg>B>z;EiMVpE@%6eDB?539aptns*QR%v_^amrynfREV57>F;|a3++(|>3e=~ zD$n{Al$(gldF__Ro!qP|?tYiF27mtinYL`4kdPoFE9(-ZFR&j47a`@@7~B$Tbq$Rm zTUixZ*==Ar@?_*DlcWtnvsr$A(Qf#$K) z-mZy)7t+2u4?PA^+yzKYC~!;_Qi+qwFtj9Sxn85$Zsef~TTGIeXJt6xGg z5`L6bgwtmH3cQ(h`|eiIE`v0)sU;0$rQq?nE^fvO41QV?^#6q4vj}93_C)AqusQbBjycfM!uFuFo+qX3CT5qAu zy>(FV_aQ4CX0t;CJ>!itl z#i+{T#~iip$E!OxH$MciolZmC17Beb=YazU7(CkVD?IiH;|{i7pNQix_1yMdrgpn< z>m$XSN4e`sH+c?%qfw!oaQlt&3zfllCO5XiO5lgP#Q|=7@X=;xJqa@%uw0NXlq%eA z9Tr<$2XU$<-3412eu1JjMC?JI19db8o@U6U?-{Du>)`Ib!)7{+t5CzL?JekfTP;W$ zX^=!!)6npQuxRG~#uDYS_W$++nJoMVkAhLIErc$Q=A$iT40)-7@s5G*muUA!pp%GH9>u(HyH*i#Uhey8*{!mIeL zM%BY>2f+#o9F?NKeq9%*c2fmb&n2_|Jr-&$@GoFEwBxrge}27GHFmwUfL__>*X2Jz z$Bd^{dn#|=zFjrPPCR?Q0@Dpz&)n8@vk^~TI2E5_FjhfK&!0WZh0xOOc@cQP;db!p zmF+pOshz9XgA0=&ySFkJuV7A*cADCl{QUAV3%5dNXYL!6cT9ALHGzc2&76|IvE^Yu ze4yLL z+3cRlWxv@FzoVzb+4G`l_c6|<+P9gU_|3A*G?O!FW5O4WmDSgoN=QUAN1@K-8FB8B zX~QwP(9b1F5zEt2 zk|D9N+iIf}w=f3&gx|kZG4n zBr}WIIfyj_1_U@p^gVuFH*6u-eOQXge77+{CS-#tQj9LoVtK%AATsas+9f( zmJfVT&}5Dd_igpC)rD+%B(DZX=*bk~lngUlCO2C_{VqzGLLI$uI~p%WnOTks&c{Er ze;bvd8Tq56hnbf%H*3X)O|o?+3okJ}uLH&I|7ZfB_lsAV*YY9taYFBRpWZeCt46E> zH+Gu6!euISH`tR>8dvRxE`xIlW3N|qH`|k)3FT8nn(qPhql1~*emLRMR+eecly_dT z^W+Hlt3g$2RY)x&(D1S?BdM_aOGwDN+DQ5QGA|wtagW|o4zd9eUI9O^6L|f#aEtbC z_0^=P>~fKg57^FrAx+_z;Drm_usq>}{Gs@J{=tf<<#3|nWj%rYa6)wY8^m8RH2fyc zARfo5gM1;eKZW6@%HZ*t2_s5S6?*Zg=rn!Tiy!Pu(;=ywxl!)dL!p>a)b-rJKnjk0 z>R1QHLuFH}Pw?{oRJp!o1hkE`Sx%?Ws`hp9~xR{{#J+*ENt8Noil>9m~HUe*eECV zq>$$f5>q-cfae2BY=!D(q$O-Oowv>lZaf!#*J8We^y-A%~YN_q08R3WGjY|*U_-Jlv8Nreu zCOtlL6dB)-kk6QlMsO^)U0Q-P=|uY#JCbuYJc`i9WEzdB!!`)!gnG;;w7{Yx!EPpY z4gw0tn%>0SuKEH03e|p^Qv)?&$+WQE_dM~5m`#D_^>_FO1lZ2Je3)VWm10o=mgT5; z;^I-v+Pq{@a`lWx#vUl1;?-1Cz}5MuP^4ehy-6qn&D)y4HVbE^+zHVt$!p|HalEmNP6qXB+) zG+D9Xvp@8bsGKuPG&v!H++|)g3H&mDWo(Po?b)_q?8?Dw);f} zB@@X@g-5MpD1D3MkSeWknQNLh$M%e)p5p6b+%}+i9Lu4P|1HvIw1Ry0l<0}yft?Q= zRaFUcKiv11^;b$q01eN;nCtd8T^%6&;jORSP^`y!XW{%#i5FpMt!x_Yx26WflpX;3 z5gOfs2dv7r1rsZb?p1IaI38#_v~zLIrY9GlLCujKWmH(C4 zxsSyL-Adl7`-_W`b5T}C z#iaHnyh;Fnm+io%2{EKClM5}0tyB;?+lD=v!`=8h!)#(EmbZ93=Cg)@KTaZhHssP% z`-$U=GeyP5xI+{i6NA`%=*5r|%*1rEEL|h0=|h{|{i$VDWPAMOxvRH90IH_SCG%#$ zsux7FEpQi*v_kfquAXMw$8FP^H{|R-#9X{i&aR5hn)-28g6sB3u$D5yMMK_!#x)Hj zC>*DVSwnXpa|+Qk4U46PYVQOc=#L?k1{V?B#om_}<+R*`2kGVklfI_A-CM-6T|J`X z^JdQT7TH#e{Th$i(=HydqhqO>W3j7KR&_DI*!V90JG-1y3a*#P7`*H~0IB95gi`^) zOO>oGYXI0B=e1%1_X0E+I;*4{&|pM87&Fuh179`eJem!iNE%O?er3ci^h5hbK0CNudTe7< z&WpSzbJIoYWnS-PYti_$#C?*>6szt`uIB|~%vlcX!ZC_3*Cmqs0&_-2Vuui>_V4B1 z8dxRZiG41608(X-RRUfM%=6AYT$TQ1TY)}GW*Bg_8ESTa5U9IPOaKdAX0VXIB%x}? zJT&`@KQc3I1oPY!QeVN}OGes#D|Px~$Cu^SOFvyDEmwG-1WFRT%RdoT7Rku|{%;>G zS|m(Y2((u6_z1>6K(cy&q^pTmy(LADrP5TM2b=a|n?0>(#9FB>oxbcTZY_+#3>#iL zPF1ZFBgpicU}Gr`W9s0>3z~66j=jES_UQA_8R&Sd;--tes|>`D55$WRq&`MoF4h3( zHC%4d&8^+MKp0E40o^Z zEsD^RIm$6aJI}c|b$YQU3{$AWY0KUmY!Ko3c@uEjt>?O&Oq%q3w5x)}=fG+8D)*hq zSgj>SPU;z=%~@-AM-sM_?eVAOFZi|aZMXdy31T-sODWUV~e*g zZD!M3m`@LWWV9En&M6IWClco)YKtihi$^zrH<%sYKU2$8+?kJJQAkHq(f0?7E50J- zv+ZVy&)0tUByF2`{d?cr)#M{!scSnPY4ClNn81ep8WK$-4Gh>|x3n8;+VS!+s7_E< z3ejV2Goz6so$oN)*+<$@eY5N}5kIR#t_+%M0XJj0z>B*)Bo@@Pz{dLf)^%amWMK0y@9e{!k{q>fZy>HScU!0mysV8YqrVC9UzEH(^DRN}pW(HQm z`ORZ~54U`nm*w42x(SW0uAi1P=$^VdT=i2d0-L^IBNRIRU3nDd2dT=S`SN>m&}@&q z+^+U=(t%bqqbeIqf=x|4>rfVJ=(t|Ykgg`b1nI}=2V>tH|D3<+S^5|)wV%n!HPKH) z169}nCPSGX52?~nGVVL4{tken1N{7`M_xn9uS+WpX8}~pXV7p4EXK-7y6_sQjw?mI z;~Xrm7c}AxcTEnM)jNyyS6M?TYm=mXTto0J~+c?2y=77lUT9u#g2_GU>?RY8`0l|`a0 zFx1AsD%vta8C`tbNBy+{_^5+n?HB`Py9lOT%x1zxs{mpE3=yJ*!IrCyzk$^Dg!lYX z?f_0^EzcwrQ?yoa<`x!v@A2%b75pX@IuO)3eKaO^oIt9uT-yCOi7DY$mnfIfPjXja z&U7{6)w?R&H57EoGo)7@ALZc0-sTlF;&OUO`kW`7wn4S0ETC|L6XaOi)?WnBe|o@|@%pL}kt@3F0BuLW*F{|gXk^`AelsZ>ir zLBXd!hJ$}18WXiQ0Q%+T%W$!1eMdart^2`M#ntxqD>=*U`h}G#pM)^cAIw|? z6Jf`DZs8`~(=#(&`jWK$?F$X5-NTSl0OJSa&R^?>+lei~kx*X|7FH7txWE(~p=;M& z*dM5$8P?O%0T{aZ8j=b5N6L&+^#BFkqMbF36oqjsYaK1=D=p zBG*^iY!|_6*|_o&x!3lc`AYLPN>ogV1hJ0lVE?4VI2DI9IE+`Clz_H z#t|+FX%{R5BO`^|c(XcvS@N0eATwrsajL2ZysBk1g3{coG2c{sLvCx@QYQ&EYT^x9 z_WO#M5>mLL!k!fH#bH;@oWYjw`SNQ~+GkVo;U=p!rV-G3YWnE41CV)O$8g_VGgi_C z?#PPBJvcr1cCdxaxoZ4j{#6kPN4{A{s#)>$Rcv;Zt)&CZ3(_UMD!`MtuTbD3mrpql zJC>#eK}ACpiU#q<xKbAgb zJm=&I#XVlrtpB%(JgKeC4Lj8zlASFvM&-k@x{xR*9c%w|U>G|TJf)Je;6+I?Z$tkMo7$S3l7c&T{;h=1)XYVnq~I4)3GMhw zP1(t0`~LHUD@0wbP@1oe#Dc#vZJStR1Vu5GAo|ft*|7O%X>uS#B7W8g|6ow$%+Gdn zYr_BlCe>|c>Zv%5ENgzF^uB$4-){~`^b28%BtyAybB-8dk zNi=~Twky=-9HTPNW^CTWC{k4xn_+o;o*FNkzT|!B9?J8H0KKUiv7Q<1)G1*v!lviE zZELG5 zyb28>00&5IHQffgE}B*$z@K|*ZOGZ2?eb)-ortjJ_HWN&!Jz>jvfqc7;)!@-ibkfT zHk|k9es(6giHeF&00!6f>s6~S>0g&hJ>DsQAp;KC56GvIN@-bhr(D+{a84_m1K>ML z!+aeD>TqMy29k!=K@m~W8u+dV4qu)k6fewBMn)_lMv6>~yh?KkOo@-Lv|C!U#nj-o zQKnten;EKC&9=F;@9e|Jyxa_Ej)P(mk}N3qhnthPd7hB}PCDFn-ELNLPvZrEVdo$$ zz!`8pa5&u0e&NM0h=&sQkw+&zCvM;{J|}}VnmKsPb#o1ioaU}m)lW4)d-(cjioP7G z`>T#gl9^$AV5*~#fNLHvEhlMD17VKC>-|v~cZlu_{snNycm>9;*2sR=%H9d?x7d8I zCmUEQLS)%HGQpk`@HX>(4^jA^kCxqjNVp2s!1sBnfZ%egT#!h;(Aj*x2q{eqY8Z>b zpT?{?4eMT|vBH>nOwpEeM24PX*Mjh*XJ@Q?-p-L`HQZAaHQsWyjVriv9aVUOuZRj% zw%Zqt@TZZg&%1FyPGXiwN7r8^@HJ01TQATWQL=zrw zlIup^@P805+>=K6tySgofbH1HZ43&N8&(DBsc`FKHd+SPiUw^%mXS*PCF3cbCf84BO8maT2`sI@R(tKwh9C zX>9s*Tz1&g&JJd*Kee_nv|4r8rY$c}Mz9Tlfk7rx1z!FwmG#mHBqly5!S`~5kbrc@ zU4KT%BB-wDEB84Xq|j1c3(lc(Cp=T#7KiQsN;7=l2RobP7N7ESnZMhM+3Xv2U+?#h z@lX5GJuTh4Yico8v3o{eYX^h%iqUx@qFFNQ z*;8C{+5k?|nM?pfASqOrt+9no$FwpaN`?G&&`916#a=-gE=?UU;{|$P@~z0Is4WK@ z4Bt+AN5iQ-5juNELrRr8KNo5LkibIo?ND znI|Nf^b~~}kpl$u-jkhufsu*kOn|2&W{LM%wQ)$hLKO&_KL7>Ca;)8rU$>~2sEN&c z(w1R?E$^tS>zxlo6UC7aiFV_^u}5McvP{NOO;Q8jpPfF2!TGBVsLz& zyQcb4xz6{|C$jr!fo#ITUkS5I_H(^`6ywSgXY)_rk-)iQu1I)%Aj~79h_W6@Vo=4b zC^oSXB`(6FNgp4tEOo4efN3xK^Y)eH*$oV+=N5yq5O_%RE>IovO09u*qK+v-RTdQK z63Z|f>p^il*(*z9EvZ*w8|#;c@9v<$HOmFgXdKx3Ca8WiUjZjgTO7374*i#)>&C@= z7iRhfaB#4u_u`s&5cD3CFLbzW3n4u;N!pl>D9)Nd=fbJEbCu{Jb~vI~&R&8vRjRfM zdVYfqnOE&&_9s7Z^aZD6q?cYHmc)zJw4`VjZPWn6m|Y{GXPGtzHphgv{8+fX_VeXh#&hG^Xx;^L^61pf zw+NE<*}6uTP{{-H6Kwl>VThqbyC_PQWQg5TkLnKf_Sf^BZ*C zc-N$>aKyAi`W4l@((}d?xys5d%qywLHQgs)H~_oN%Xu7+_xZ!K&+i18z`HSDmSe`xCc zxC8vUj)XVVH_Y9b_N*ObH%5uuLV)jm{ZM?>$NxQU)AKIPzm~=C)qW;QU)!HjB~K1w z9qY5{LBJxHkoJ;_UqHNAny?f+Uae6NDT%|jVAOI$l@RA20*=l9vM7Q<0{Bf;vhm?@ zPlc!?%aMohyVhj4m5mwR36t?CstghxDTai#?GP^|%Fw8*Q`(XPo60kfD}B;Q<)~Q$ zx>w(lNcekTR3HKaRAuSnn1yYxG)s!%98s8$byI*T-Uy3+<&0^S_-v#2XJFa6!rZ)> z+v$;o0w0qU_HN|YuUlbXj&E7%orGuP5zKC>2AMF-F+uDT-Pf12-q@UJ zilunMjK4yZ3EW?N2AYM9G$+H^1$r&5z)##E#* z^}5mw*iTTOnctzwHfI>7g78N9|AQwA^_AAO&sJ z<;PN4t)86P(NpIaH*@OI`pFoTjqPhGE&mxQrTnjv5*|a`SAVZQI_8$MZp7go0gmf@ zC&wR@$j0G<|A3lY+ULCM=J^XCrh9n9;WPQ~Jm~Y7W;U4cIn2qH8yP81g+ew8 z%sNE&G&>dO5FL84_96E(z=m=6W!A&dpf*EjdRW!RF=>y(8s{jb^Dl=kM$eUPv;Mmu zp1xRS%awvZwD&he(y3=jd}$Vsb?`stBTw$@Y1KRvrp0?dC@%nr;$99}@+J`Qh7`?L z?-nFTIgTUfnggqMrN zkowL1{FC1hi4|md8w8cp${7tuBJ&8B4hI=dlr+w)#?g?-;x%+CqjihdOwEV#W0r#`V7 zptAKtao(3(_+7Unu(yFt#?Ul)glqwK)k7AITkPr_X0-H-GA29Qnfl~yIj?=Q@ckQ` zj4HD|8RtPa$YKR^+$xn{$7U|K|E_02`)9=WL9x_sFdSq$FL={=8~^puAx#e8Z8+|# z*46n6&n#YUcOA3l{~V&xlqu&WL8I*A-Eoh_n^=P?>iT65=Ln?j*jz2m zG~|Rxt;IEm7WQ7sIyXz5G?}1p@XcDEO)Y$)ud9dUxS9+jqwja?g$k7_Ra0O^Ph%>3*prLk}V-XM+ZRd zi{6PRC?Cu_6W86KW7YywvHxCj#O#wh^d~vCEDGorsB1`NrIgJ7J-C|v+92@>7Qrrj zy2o=Okrz*X^Pi{rE<;K4)kxX>=;IxFqKTkfV=$~>N_c@v|4KaBIs4tP1E#wv97V5B@2}8>|@Dit!AwDAVVP z82gWRPtxUoSik7DqtRcgW;p(R==GK1p?8_!D->{?Nqn9welKbHv==myg)Xc;2yjPt z6OvZ|-XphJ0z~;-AL2;kgVih5YI_Z>n8n5!o>ch1McFKXS^)!&Y6i&TR={+8g$(w= zOhw(prxsPLh>Zz=>d4@+$p$_@arl$6_M33qFT=rp!LYd(r%{Q$2;@eC<6nRm6SY*5(9?rm3kqR?F_&Q{7k7&cN@H5 z0JI`CwSXmc0AZ7~-UJp=W|ZI}hL`f@af4z5-}i0{BABh18(EPvyjt`?)?hi>S`{>h z?f99J(tVyV&jeq07$+VxySuHSviajtnMtY?IVBjb9mO-~k2eN0yn`KO~+r`Cj2@HG4AvxtY*x%C_Ip2qQ#-gby}z}_VR8lL~LA13ES z?VC;?`vDzNcvk8G5sbYSFZlB5w1*vb;9_buDL#nsFz82;uv1TLMhvvfmTIygQUc{| z0u`e%+g22CLtm@B>!!nv{E+;aQvkgDBm>epgz5v=_X+?cu)-68tb7JQx>a7=7v?Pt zfAg#nc!?g%h=@W7=Ndp0p{zoAHEO9wg}nCjvH-%lR<37A1R4W+0~l#u1$Yz4SHoB! z0foW8gzErcrs!;HYq!X{b%^A&)|3r5|BEys7EHMhZ!zL1!zTjX2pquJXqFv7Df7cB zqV)%0u0xjrcYC~Ain+;y2!*~z`8o!WyfkKE&jS-U5%}aN_XMJ{h1<+o5G@wBq)9_M z7@wP?GAt+L63HNc(8n;7VWDV5LNkiMa)y@x8??|WVS0-KF2yDaSg5+fhRqHD!=Nt@ zz%JqgG6uWyOlPlqiGsZ2ZHd)*XCn$cM>7oy)*Ho<9VX8BCHkzmQ(iQY=dDQh+F%Owun*jX2_ z5EyfAOM(73l{HI3+nClZ7I4UT+?R)f>i_H1+e=A*i!l${*Ry28?S6q!#$xJ4#BrIgxO=h_Ytdhpq->7V8+GwU@TC5@zW9UViRJ6pzD{!y%3Ytz-_yZX)3J)nhzmz%#3~UboY=F8(yQT}52)mmeFEaxIMl1CJ z{S(ffaHD3-UY*JQh$gm`7Gq$H-q+KI^Mkw^amlT14Tu~JWL~((H{td1>HP;t-}!4k z>}Hi_Vr!@m7jrwb<>jkvW$9EOk7;8d`>B12e52{%UjBdUj(I=XHMaC|2RP4Vqu-Up z-kZK%nm}hCsfs^0h0>+DkbM?r}o1l-t<6EHkDcm+(yu6uBf_@iNg z#p^O&%@&nUhf{AFHUD&0*v&YfJ&{+1A1q?W6euZ*0|NU7DL8T1&j_%pJSCTKFu1YPNRx?+xiOyd?UAd z_U6@swPcL>?;pOcT@ZgjQ%-k>?gMVcwdExBf!+^8*Q8pjSWVY)gGWPnU+D-ejVXH4FqQ2pEFrkPlE3 z`~%qa^*h#|WG5EhJ)#(S12(+s_I7uWg6P7InXTa9wo1$13&X2EoI-tqZR9sK9hmd1 zGx(iym3#ZPpn*Ff&Dr}Y7fO%IdN-D*!+Ajx)IMeYsk93J43u*(>pf+IJHw-?qGU5g zKY_yRO^r<;t{K5Gv2a3MLZb4B+mW@h4Mw<}EMCFS8`TpgbAF!l0)5dPMkrr>ZlEgV zS!nU6j2g)NJWXGwbQeWv0}n49x;G(y*CzEhRwH<RNg!AY&XpZk}b{=Fx zQzvTpU0JO+Dvls8k*Js=n|e`pGUL;xfRXDL|4??K!a*1`PCk z;#T{WCCiaDNXd5h>m!)6i%#})Thy)EET~(9VNcI2aw}8q(JWQl!@UOna96>pk_cQ| zQkDh6o#3t%Fx$^b(ZWk$_BVmX<-k|9R186{PGRW-$y^%9IbQH;t-$tF^JKdXGv#qb z_)X8@K}K|n0huAaA>uvoP!RBx;zNu?$O#~tFgh=)?3Ea>;Q6v6VT1sF$*m)PTd;ppPw8j}Y+6)aD6FTsOBVbpY0HZds$&#a%61EHGpq-+Ld@Oay zhxpPof!$81O`R-}3OR*$ibf`EaE$P-^~HWZ@(qTVtJ6le`;6BN+z4ypbg<9V1}8QP zj8^GCyylLcMJT9w|4%`EGMYK`%`4%Y`HD2acu;5`j7Kty+=1wO3MjuLYVAni0OiGL zJlh}|8v%hi<8lr|Bfw37zMoM`VS?%sy%ym^tQ;&aV0HtVid4*0>?->iK;Y}N`zU6v z3rN!w!^DlnrCam))VZHPJ%V^9K)RINXsfF z)|9iuZxv7@WqhXi2|$Y6aBF&PawR5b$uj&~%dyOijA6MQor;)E({D&r+o7*f#%P)i zds0&BJvdv(JZ(=i5;0lhQ<~2~*M~x}Nu~E0gP7YA08cZ48lkD>r^IWm*2h_3o3o#9 zosK)$6uB0 z)&8ulleh4B$HKOd8ah}<*>ZWpV%^~a@$-35a3 zU;Z8D@(6Lx6z19sMJ@xdTJ++2>vR#cZa;%D8C(~JLG#6J1*1M6} zSL>(yzO{?Hw<2T!qN{HXN#mQ=LyGU7OAzOj`21W#xdv1@vtLcwt25sDI($g=tOybn zZeV!?qDkw{S6hsG+=TzU19`kFT*K_r%r0n?K+q(%r%YD|9Qg?B!83t>T5DyIbzuYj zod754YbuZkHxjhM-`P6Wm%&Ik)9b?{>p})zpo}b=`6S#<&XGYSdH^cX#qND%57+$Z zbM~2Hz&T;2Ur`=Xet)1TM2&4zsx>0BBZ<|B)19OC^2S|xZ|92b7i9qZtOhrU{dsKM zip&M3XA8{DX+ht;F;0)!o(vqpIK{=pU%z+yc(#L-`n!0hh}5XtUMnsg+mP`^Yn1&8*@;kQO}#C<9Bk<&6zHRn&AKKbP(L z;+1=Tr&Orri)7w8lOq!W7zKT`xs0qQH&`d#o#w2gGgm)-ex?!hP6^N=HlgW@`{}2r5%72GNoOHB z2C5uc#KMuN|6L*9pb^}^DeHqTkp3iY)CtNnoS68RXKjC4jgQwmS5V&f7%Mn9^WKlp z4KD26!3=h^rUz@0Z)M@F8ayf6V95u$OyR97Mp5`Zm5)oQ|Zq3 zlUv}hkqQ4^I09mN{pj!i9yWyMw#6mD-e${M)zQ4UBaLByWBvbk;20;T++jih(SHAM zx&M3|=iUwa0t1q^uHNvqPp_|CC{}s~_7Py8ZoBy+r^-c;Vbi;QMK8ify70e?1U{Wx zeq5!^U0hI0LHs$Y-G|zxT_zKYFydTw6Te9gpd*K2Lcjc+xNXOOQ8Ms2q0Ge$<%Q#l~-X z%CtAr$Z8T9eqK?0-B+}68#rhzot3#F_rv~15Q8q;2`q>ro>z*n zgGG1b9X{y8nQnd>JHE;ELBdnG^PnMU%@zxo*&qAje>~WP=LF^ZCo9&Nqo73ai(Yyw z@Hw?e*R;ZQlP1 zHmQYa0V1-drutm}h_nXKP8 z);ika_wQJ;Y1p+@2a-`G1j(~_yvvGqKuV3Rdv~EJZj^g4J5mgY-Bay&yV`U7=VB!M zHhVk_fr2-ZY0Qiw*Qa~eVOA%?VOn{+G?7gYfx5>|{mMA<`oABIb7Q)TY>PtIg~})b zT@q|md;;ls{L+Kq*i5ksm`*IiU<3AvYsxmP&sgH)c=Is zr)`P|_58zMrDN*k)KDD$*I41Lt%F}-j>a$Y7rJSHIiWfP;Tx%`!eidC&>{e~6$W4g z1EK(R&Ae(tD`6rXqu`MU^-5I73_HbjgZ zZfOf_QMTIF3i8me{L2l4Y#Rp#X$-N}mmSNRxLDL5`i#Tk~ zUc2;}8aOmEl00iwZ~38mWUEIDE{GAl) z^F{1gml{I*7oQ%ufg>8-zq zWzzgYx6lfS?iTgCl69_XwsOpJV%{1!+3kNJ$8(7TM<#RYn;pTzh?D3PdyaTcrts*B z_2{ZcM)vqy-s+oTz)#o?rQaIj(Ot3W)U+-atvv3^@fLrOoNmk z{!48`lgqh5Mf+R&m>-x92|D>P3xyYN;j*c&v;tQPAPF>Xsqf>j$bRp5voDw zP7ZhEaDipOI)*?91ZLo~U6{_&Gko?8=yxuZW93k0B4!#mt*t|?7yu-RfV;u7vgXK*xgY;zn7 zAJh-3>88gK8 zS1IkK(CHvSC(xCfl7ZEQlBA)vrzR7IjE%NnTMje8h5(ivr1tF9wgSmD#a9q3czJQY z*_O3|wyA+8)j#>zhqT;(48WFr$L*>=pY8%NzWXYTd#<^PO_&NjUwrEg-gAk)VihhE zaALWp8E>bozc2&SE{gB4wR)HOh%^t%y@B1$?>g*m1)O%oh!`6_o}ca0UgfyYnl|K# z05zhO*yQbh2x@57Mn~tNnTSWTylKw(jOh&ZEDatn)ZHMR6tA{ z89W7$Aix!Ktz-!&cuN|#PYfC?fGBhb4j;HBdkqYqKK%tz(CSm>f1nGXB;;@!2UdSb z;9@wy&Tf*wUAVEf65wy7@iWVa?-lD6g#RrBr0*HrYz}zyOslc$8X56;kDCPHv*L3qs%k_dIv(&K=^B6Jmr*`c!> z$=*g;LUJ~hY~;n^u(u5?yYwY>TOqbG@3a^N)`Hw<-VYxLppo|2x02rMav%Az!Qk`V zDx=mHHXpk9-xg?Nb||jcT+@bx)`AjR3*d)NCo7`000()aW6X&Q$?YwaGZ(l#%WNgq zd+@=#B^dwf22gv$g~L~~{ZHi3+3pz*z@MQX59U72@Cf=mBtCloFdjX9fJ!GThR;k- zH>T>9wPXTo$Ps9r9G6TtYG`Qq3g|Hg$zKUfWdk_5S`~!C@_quFK!lFs<>8*`65wSR zF2F2xW*LFFjyBn))-`;~6}XC|sT*i(Gwr(tCrH$QS~DV#Kv&-)it$iOGd>KrWwR=G zz(33{_m*QomvjNZ>QRyF{&!0WJ!D^v^W)!?4swJZ)el z6@h6?b#?X5r;=yAfneNO9U_G;3|aI50HF&lHuPdnlyLnJTa_7qWIBKmu3x_Y?htLi zr|=0?)wlX!ukTo@VI6;seg@a*K~ER|^kqq|F&vOrsVW4rb?JFuMvRb(}K)sNaw;wJ)R$npc-<0ul zXn(JgOxXr;AlxC!fa3%BUp~5hBL$m~F?5=iBHD!bUqJD~2DchhheMIM6?ms4lYiLE zn~L1LlGWgqB|b*VSr&rd>WvF;Hrsjq$^sA^^PoqJ4oeoqsijuVh`_!%!eXZhs#V0D zqL31bk%tOcs>7oW`{EW=iUp#fh_u2~02rJGim0)NeGLs%Kwy3q3IT{xe+_#^-&qQ9 zv-zRJ2N#tzg_sggUQKIxUPzHwqNbg8n7W{h4c&FTSrLjpeo#>?0>v^9yk5`~g3u>G zLXmF_vJbfYV=P9~Oi|H;*!HEdgK?L+B>T|G;*BnUPzTkWCm6S_E{Q(xg@#Mz35&S@ z>e#4@sO|$JIcV8XT?YDBYu;-R3o*%fM+M{mt9?kQx1?=#Y@|(maT5<=0on|^=I2o0 zH*b5sPt<8@!Y%3GovJ^cxM_f#-p&yqCpm!bgfwfGW;;A!ZCC_QC3u?`BGFDlfPko=O2 zYNw<^ERHL_UmSOmENJOiw){2pbvmG9adV>PhPW0f$T#o9zlZ^+qhAt(LI7I^S~NT` z-yMKJTcBJsaHM|L1jh#Cj070H26q4~YVFyL@B{zA+ao?Ffcxv?)KtLcOuV@F6Zga3 zG#!TV0v5$a+TP=aZ306!BXFhxwcX2hGDfxc+Z!!fGYN%4&{E_jX@dZrYNT?+KlrhE z%MOmjsM&l3-~AvgF@dTD85t1W?56@cz5vZ^JZUU%fqz4~Tb~H*(pwo$&I%ta9MzY5 z#h)8G4fjmypwb1F(A3Lk=r}?ow8V7(#9d&QR2uUY^w-^pj1^PbJMf2 zHf}NP;%Ht*Dn=yv^eWpOF!eAO_$4w(^&~8AoM{-rzz$4RRob@i%w;GqmI4*UXFj*F zyny`}5y_S=VtGJ>a2ANT=R{+6kqNIHKlto*~6yjFS~ zS#d1Lr=5=(~*~*E7)*Yuw2JZXS4L-f=kA3yq&Tk(2Vv*(R;69FDZ(5S>#k? z6l?i_B=A>a%sfm$*E~)&P=@dbqp+Qtai2c~!}|8|6kt-igWzh;odoT?of8RCI3B>B zJSq5{*8t-$|FNGl`6^xfj88vTf9dZn48i>3_b3l0hu+8)T7|T-U87osy~bu+*{8lQ zN7s7Kz3LkM8m_knU(gs0DjUR@+7h+p z*>65+7Wzf7Fax2HeUxY=epaTk;ag6we7U!@sn!I{^2c+T)m>86m!D+yO$ztC;Y!;l z2(mj!YF6y!o!BEsUK~YuJrHq#*b-eO;7D>rp~r!wpY4??{-dhm#%fcpZ;)M zBl*CvZ~O^d=jYX*7m^OK&R`oNd+*Xc5irt!N*v_znYoLb(71++wQ?U5BX6T`2?A5# z0a8{QS)SgP5cWkPjc!VepPQUCdCqmCk3YBkJd@I{60P57rg>bLl$hd!A5F%*n@-<$ zqCH)!ijQIZ_xY+$jKsuN8(WKQ%j3NYi(REbX(K5u<)c1l2DTn#Nb7O8FFI(o_`@JW zrxbi2i@kV~1~MpDt719k=8AnfgDog?`6b+rvCfB}n4`5)&wp&R}lk&cH_Pr?dLrQ*=$E61fMWM=luo<)!h~e z_#h2@dEA4v=|3S!oGJ;+A}0bR*Kw_+&Fxoz@>E-7hqLCbsV@@F*L^~)F*9tZ1^xk! zH%(5SC&`=(mxCt`%x`IxEJbQW9gcK02zG+i{MKKD7r*J5=>y;z>M8dfN-xWgNzXdR zTUH!y%D#78=lCGepo15&$s*{vU^fIF7jIH4m-l^}ddB9j zWY2`cxv$&C`TIzC9aBu@GKccNm zx7s;-v)q<5Fc`Q%GIT|MXV;q9F{7Zue40BsxyJ+?`m#W&BDkKT{1~|}D@dW;dv*k4 zTRtOJNw!o_Buo2{F7c9e3E2MqHgi&>EoGvs~6S)K563tN!K z)Frg!jfj2wT7SOS-@X)Yg~Z!w&zb}l=ZYm$FsC9YnbnJoS1?NV4DHD&yymgTJUz69 z=Ivu3a**eTGpFp({fn$h1!ILPdEnpI8ig`-X2)NTYrb1)1+%tE0Vp~Apyb%QT5V^YUABzi6V)1e=98VFaUK|=a(H+o{#SZ1Zo7n++3tyX(y28l-I&L+SF(M^t zkXtg>+sXc|q*ywO8t3xf3roG+@t#jUftB0#HmA&qUqxIxt!XBW88gDFnNQr78bz1q z2_~eRl}xhpM_FFto>aTR`=rw2H&1xxcGyq2JPoXUhu*7&6VCy!*C-zc#{G`4V%H+B z;8y43gt?8&3yvOLA7wam(I1R;tZW#zF^#glI`EcN^B2CIris_IuwVhH&SI;XW=GD- zmd1sEjz^S+j?O=_yoDZii!AE6!HBu#CCTxuHji@@^e(emv&8skfnr&G+QW_68+h*9 zG~J7un!r`S1X9#!IgfRGU}b4D#yX#!fnnA#jkRN`-M))iakRi|%qEJMz4TkLP&fxG zyida_XerIHeA3?Htr0%*qTJ=_tK)Z(bBPyy81#Pk{9_c_iZ$Wfg?(xk1H$fCS$?s2 z6jP3--+&ag-s%{*@Yhg}66(*anYDsr0*f>NFV*ENl~>}32dK|C#A#sYT7u&Z6mc4p z#kBvzkeIrd!6i-61rE)|`7!IRQc>}iWsQ!a=p@#j?gv`Cxqr2%uGT;UYm7rui-zg%r4yliB>nj@=}Q-jKgg| zibuE40EY9$#C@PJMVGdt)s*s$>g)#ngm#SuDP6&T-LHj}1zvNm(IVl|tXc{FY53QV z;IEoIf`lmFz$4c6dn(CGC7Z?G+LZeMSx{$GwB2a-BChK#tMQ^W7sxMi)-&MegUZJn zQnR;nEsx7EFktX4dwm55*9!`3F*8`VQ8b-kPXiowOQJ%W?SK^5cXIv3s9k_@O$Cx7 zd^F$lbj&M=fnT4c=3m(LcO*C&B&I`{`-0AR!ax{HeK|Gdp8%Y!DI-=3quBm_iV1wK zfS+r^{NAha1mL8k{`T)H%SsVf;@Er$x7`}799D-odsnbEuzm9$mA)pC_Kq+w+BNtt1--ZOwDmk0=w#5@!R#%FK549z$}a4 z9tl5n>!Fj6+cR%G z+oRQfR`f&k*xT>VJ6uZ{97gRTT^WXl9576wqQ49if|Qn*L%FQh1fZ$3_EtN}YfG)N zFTRhqdg(~tO@T~EA%HXn4T=w0W8l2E#`g_73`gM zY7NG-Gqd(y$+gISR)EBU;~S6>xn|8V-2o0cF;|D@P8TE zjblSbk`j~$%>HG|#&B+Qo(iMP?Y-RsmAh#X7xb1u5oL-Vn|#AT0rJcW?#(?-wWNE0 z)@dS-Pt6o7Ogw3S_r=Txw%;7G4Y7Z&l`hHgcb1w#S1XtJ#+b#!bo_19zKacz-l zZ$Fy=65=6Wb;(44NWl}^VY}RS7x36t8O@e0O6MoHfA))9awfBWUu_} zBgaK_oW~>T-0LM(23*w|mhuND`^tGI!NZD(RldCL_XBE%zMqkrVOdV(&%njQKc7`v zx=abOJV1zk(9G~xfyd@rRi_3Nb>4V3qTYVZx1sJX#}J-m=1I#2LS7j?WZNI#v7duB#u4Wp)p(N4=ADXB0$1PXg^^up z?aEJoesA{iyHmqLeTIdyBm1B&6es1Nz>lp0VIbr~e({;2EL~Gze^HQ}syut>D*5<< z*7a8FLxkCWR28M^m=KGEFkEr`8Y8oss(odQQ`Qk2qLUqv|(p#yNf_!(tXD%KejCZp-`Amxw%8pDJ_Xf3fKIH{tVf z9}O{1M#6C^Mk8C_cR@#A_7KJ;g^W+XJ!vv2YBNu)KK@zmU;Q_g17$+F zE<;fv)qwy+`rz408^=!~#Y;q>ny5ySR?{1#8Iz#V8>wyrfl*{QNiq;2{npHWfF@Db z<1AZE()qxP0pXf%H#1;XB5mUUR2(`$!J!50e4IR@Ee}XKx}C<>4RQfp!J%O!FF(7> z|F{;Qla=t6?^U}3cH^fwoh`4meORTS*IzAzvr!}_wdPm2Y&)3FDoEVG-SWFm=8J#! z$o~pTe^ruFKfn|H+kEKq=IXBe`20z8PTV9SLH*IEtr^R&fTUO_3x;=ty(%lbO$g$q`HBKG5}^+VLyKlT1Y_zXuc8nLjPNV;44TV z0RHNsy|)r+1ON<}V5H1SINT51y?+Woh!g{GuaBX`ASk-TE^y_EO4VCIn^{%V8a^~} zzgv-4V@+ECw=-n!o!nkYeQSTVF@yfGSBUREX(hpJ!Rb0wQ@$P6xfdj@r#|tPdL9Nc zxpF-C^X@=ut`p>zfmZlzV$XZcFn1vn;maVpNN0O(u2oBwgdyK`vO{-@7RHxLK~;Vt zmu&4pcw>uz%Mqkl{35~i?16T21qc&?gA9Zskq+QLa4$qTx?IRpMWiFdT@bw&U_VI< z#TDaob8|5l?&t(FRRH1-jmzIeUloCVG6)SpU6uB#HS)CYD?7?^1Giu$prIy{%6F-| zUCjGcSL1TphAYd9xc%F)&bm}1F7paHDOm*sciM?blF*-IkTUlB)&1FTANfEgqZntp zpShs>*hliP#E~QC4*eMS9QU#Er@vf^r9VTZufnU=YOmU~%llm}=>8=-Q1%WoO1dKT z`TiLPDt}MfOLxnTXIN3I)DV*zoHJZp*xt(!;#0L;`=D&o0yt< z#Fx?9FIiOY+tI%W>#*X+J!fZOg#MhJTYenyDQI#0y4=VT+NbT#1SN;$K(IZRn3Ji3 zm`^IA12`1UC#P%p8yC9uS!T1KDAv3hb${iI>V9oG(q#B6EGw&SEMc1@Eq06OCkg4r zuF4bQl&E7f#3O84FJOAyDXw=u(bCCOQ<+&Se%(TsM%5e~qkWfrV7^fnK*^d)4p?l{ zTU7H@`>>VlC@9pMCwD8WPBNP-4E>JGYWWN3Ho`NN?GcE2Cq-fq-5y5Bp^( zD2eh)-wKg(UX!})w)j|;IV>O<50{|7L^^`UjL;jLZ@;EUV4ByPjJDm{DH{})n)!y! z%(4#6x<8v1aJJXfia;0L&#&wHl5cV1qLaYv*KF$&%9I8#jFA@Ah)Oer(s+|&NE`&v zy%2WN3Cgz63lso>C6(CzRy!iKTEoax411o@ZFB4ub|MSl(lK9_5MZ`TSUc%MhZ8c? zppnr4Zt3+m#$#B~l{1+N=VN+D0@V^qGe&z0oJoZ2EnoH+50*L+)31N<+rH5Du@V0D zAtr7qMWT+@B((HYOQ!l~MCY)freJz~*Nw562RbW~kff2D#+lff)>;sA|QU66bj1?@Qy*ohcH z(}cX@4hXb{LIXgo(`+VE2t|6zp;)2|g!?D}R-rB8q1wAh7`irr=xl*9Q|Lp^vctf! z1ML`2sbKoo&=~x*kL?OpEzHstuD)IZH~W3ILkIKpY0r{Y``Rw`(uEQcAId$v#^9m7 zr^}D1ZX8Ln8qE|~!89)S298QJ$5ekT@JeoTJ|kp{SxJA&Yt3ykULW;xoO1a`g5gK= zMs<3!@d-7I!b2Fi{VLSnOnM@*_RCx#^w7V+*bJMcfRL+vYEE)T`TS(zkm{+D7jz9= z;~%0~n1tpg3_Z82`n+KH497-BIfY!aa>-;vQ_>Q1hX?W+7duUiIPNowzPi%j76qe7 zgHuj}f9{4W7a|R^&LHT?iA>kqe2y!gr{gs`1)2zN9;61H?qEK8n=J}JICAANzeq{( zwKHofs_2A(R1);CvxUq(SZ1v&S<>UJid_C1Dyn54%YVekg$I_4Sq@1(rFgr6vqMw7 zB_Que(`=mnG-$2Q_IhKShp7pU(&bd2Qe64zftAFON8?D#Wq|h-GJuDjC_34g!V-e! z25nuiILu+{_!61-STyF<1}cMJVjK?dO+{RW##PHU_xFx)Y8kRJF#{@P8MLikX+QW* z`ZduioUZU?Q&8nq4}B6$hBni}&&q{QqQjy3mnoAYSi3Ev1XF>+@QajU{)79)0TBfy z6=YK*yU$Ps#pg^%Mgx+os*l3o4`>79;QizKdrR-WkVlnhKEF}1G5&s#`D^q>iI7Ew z+H=B%>+7zxf9!e4CCL(F@`6?BO$~>tO0yo$-nDTbaTH&PCtI_4El8h|$J~T-wNpUD zxNTL7EF)c!l0SS-EJxPrmA@^FeOIPw7Qo%Dgr7Dk5K(=W`)+6jHkhoWo~>uCfXgj?q7()6THGAB(#nv(Lkt31+c`xU#40w;)I*rW zm<;a^JIKzw-MDH?Omk$#y64hbB60zA$$VyOceHTX&OhGGf~ioJJb?@BdXwI(z0l4; z`FaUVee-{tdSjE&J@`KUR1XSWw;mbjWaY};^BhUwU^V5r)4m8;w_GKvxwcp$Fxy_e z#(ZU=giGoq^E3#2Dr*EKw~bHRj4Dp=*ukWXo`{SQ2lJ$bC0@$0lm6dfQlOgOY&k^@ z<2gMv9N=+lExhBwueQ5CtkUU;p12L~#{Kb0FM};bQ;b`EH;920=wO$Ri>BB;Wv*cm zmYc8F7lM}19}gd#wCXcJC-kwd_++_!`vF^88J4kIy*OfhLCyBg z^S9v&{gNq1SVF6N9e|R)z>M0@b6C#IY~V8frNBqMRpzjNmY}&$HRrIakfVI24^vH( zKD7RZFDu7jLrb=LQWUpZy(dqmf!}*9TRLN58n+d3I#O(7VN}o3L7_VRL00LmWp)lu z2zf<8t%TKG&%7{P0sdvteeR1V%zl)#9ArE-UAho$Wd00lGW^ ztfKit1G@x{kO-KQi#a-SXY7+A@ygIgY*FbfO>Y&N2bW)mIO1Au9Q<{Q!U*WXLphO<$V^O}MWBV>J zv1|hSx@4p-LesqWDvz*ph4ag0*M~1x8YvojU+=#vu`D~wGG|gpWg64TYt2HQ^{GHA zymH0u#O=i)eY^Z3yEUTX&KK);dwu#`zt`JiUwt~UPhwhScbS6j=d?O@oDiY2e<64i z&zDeJ6Kx{B;ESBZ_I;ct#YRa$DmgA~19(<$aXP(jSky!vy zbTh9}i->4vE4uyn4(k=YJvN1!XQS1m(m?~1t%IUp1-q_1AxwB&2!68DzE*77(!Rm0 z?GyMywt&;2`|iSSD>wB7pCV(AVX6s6x8F5iqx-~6aOWkhNa3n8KBc3?@e+3ZWH--QwBp(;2xeV_99d9sejg>d){stKiS%OAs#O}nr8ybp%6tY*L@@Jhd)=V52~oj5Jmmf*)*XM$|o|j#Y6r6V8|N46JldYH;$2I@JrCF+kW5Zfb z&q>w!Nrfx9$dgGazrh0ihSli&?94SXR8GRCAmGPIO0=otz%XJo@QT%DtVGQL*Jtu)`Q^%5kq@b6cxG zclXWvrd!BSr=OCvo%UD0xna-1I!Tu%_RTcuqpS4CIM^JE$_2JsTvB9?O_|tMu*g2G z4<*)QFu5&~A_MR?zls{VFw}-nmYWBor*zEG>>6_)ggALz$?|pf-iW8%(=oT-)M`To*0|)*`F{Us9pKxEDOK% zyExR?`RlHD%^D0U6&%IT&Nu631vheb9=CYo~w&vUkOa|HECSal0g^juy`1W$F`d{T7 zlF@_Gr7J0{R$E@7y45&$PgYAU?^i_&<|*VknNrOQmA&T3lGTVLB!oNca|fLmR5bbk z0RTbeUAGtZ{+fzz)pDm(!cWwjk`bNDz!X9X^i1W%!TQO9gOj_unC6TlBw6lZ$8akilyD!wzdyZjgKNYfN zn{rcJmCO{>^o?)+$p5Z(bC9`?Y#+^8X`!9tcy*uMeDX(Qo+HAZU_Zn%-yhO&*UNZ- zmaxCn?no#f0QDT!@COVmReXcj(sJ4AAP_1%6iuqx_wi}PL&UQ;fY=KXHqQ?ZVam?u zl;mFJR8X8Gr@B?X#Z8WT`1DA3NwHbOtK<{-2`NZb`|O|{!Pdbc8B{XY?Cj`3=U4MR zT(KhPpfcOq+Jm|oGw8WhNqZY>iaZ@&%RTj~&qm0)j1g)O6l3z-qZvZA>lJ%!U%pX? zvBbI`=M=8pOue6m@ynu`i^CbR!o+M_jw4ah?ReKGlv$poAdK?;9)`kutya~S^oD|F z1no#*bx`@z0_sh`h`M^@Wc>;x3imVn8R2W6QLP$zZlVsmi3-KPT*`dVsv~D8rX;=1 z+aU?2p6;0Q;NUVOmAb!cULhnX4`LEXKu``I#HcgZ3*h2l8OasxJS$y?%~VpmWKzvp zYggQ!gg%^!B+7Ebe?<>8W$aWf8ag*$j=`zTw5C76d!t|dO%u6;sD@qQz1LE6Jo80O zjtib4_t58kqM$$t{K5`^g=)P>b)h0{YX#TglW#M{IX6;B2JLuo9RNcXEh_5#7KGog zd)&{_iZkG^mm6_Z#?RKrB=%w2eU)&nYDv8RkE2uh>*$bF{P_Lh2nJyuj5HnWIa-+_ zKbL(Iv)l^K<`X!+W(NB1hmfg6P8we0jW$HFDQ8kc`t=Ap+CV(-%lvSlTnxYdRGTI+ zQGG1LR%#$>tDP76S`h&UO+QzGAx8Yt9oE3X&wSX4LS?(yts90h*Z?n^lLnbp_{o@ zxYkiV|M}NA9rAbNNwbGf2(v|<2Pq}l&!_T8bX}LhayEud z)O?T^>eOV zb``A}-;Q&lD2ojt4?TS1u!AR$>`i3%&f`R1XW53E%X4pkM6t)4jLI-8(vDuZxVDM~ zksZi}Ld$BxV}NS`;hDSNlukG(S7m~!aguz2eQAExpP;dFixH4F%tiqLupr_jo^mJRlJ zs_P$LveDID@{$p*oz4>vP;0?xXFaLRT29rTYpas?0QnpKvm5cWjR-tdWNCxPdghO; zrSh5}4^;WNEnb(tH}doGuTIs}HkYE9hF!v_AM6`K?xr%wq^W4@cFgh8lY3wSeD0Hm z*?Uf&J9c(m9E7#%C0#yHLJmPED20O@LTksM`?cDn1&fTNCFiH+6$B*2q^Outz}H_l z^+FFPzigxhzZrW~sQNJbu#0v_Yv{SZazFf{E+lF6T5R#y*Gmm#mU&vSX=8kg_>bK} zA`%DZrZwxDHG)Hx(NuL3&I9}XO}7DhOoY*HOtox~Oldh<8kaVEiE{RDB>ia352I1P zkOW8&UBcmoDja8y`q~*Cw^}ITKs)=^v-c^y69QDcMrl%uh-qmRIDRrJn$Z$d;J7v( z++;d3C7!uPdQ~x+ttHT~ho%e(;8O=6`|S(zRdO6HS)4#jm&EZRetT<2gxiIJzV3R+l77_{K7~Tjxc|Kg5e5 zLxQP<%QSKqAL?9+%JUOKAEANI%RAM>x{S@BdbXf~IPH@$3|ZL+00sfyVE%scJo*u3 zAr>;WNZ2N(fz6TxweBY{&Y=fQkL`pSbG*yx$Lid?>JKpAOh@fnzS|=tUN(Ns?v4!F!owfPk|I9+FBQ8>4MMn z$0S>ul_?v?t3VuOk@45{fklYn=;J+qQ5*H2I6?|{I(zvNB-v#Od6IuLA?22-$F2kJ zm?1s+4w(>pmUD+EI%d>*3q(X*_6hv@3p9}j|Mg@==9~09EVb;sxHG!{skW$3!(3B- zX_{ruk@`=8qnq284n9|Q~*7(_+p^v;=s<7BGCWEdY z;V^Vwj8E5f70kXfd5-P=rd<;}@)KR5@gbl+IR?!uU(2TA1JlxuK^*mEN1DaeyJue` z*fh7Y*J?mDTkFR&-+^Prwlg-Hzxt{}NGKVD2Q8lY!Eg*FAQRV_m)!XF@f}6|5n8RI zQ6;+F8H!Oi?T#OaSwc%gJ=9p1IzTe-NhL9C`+EZU+9zf{^oZ!{bklrkQ3pOhGq_(kdI};s!r9byI^iuK`>AJ1*JUepS0no>JB3#98wuh+_dOc{?Cd#`Qr9z*n0 z5?t3m8j#7!QN8RN7CS6$(a)ZaS6i8-a#5*OIBPcCZk0Jzci3%~r+<6VAci4a-} z>GI?xn5=iGjJ`UEt}z~2(tjp7?AgE{l!9SlmJfU#WxE%HpBh{3QfSQA1<>L}ESfZe z)0M&|uauEeWb?caIn(i|bdiAjbNj$2({Fnrc7ljbtm@ceRRz!Dx6PZ#kc7p3H$ISo z*Zo9Q+;xQ#^^zBYwwFzt;zW2MBAp-InZ-o_^1Z(u)_N?h4y__Nbr7i$#Zv{{N3Ajq z`CGJ!(DVkH`nQ8~Jk1_A5twDsCP(v%%*;k}3xTqqX7+0wIN$>xEt5za!*z;(Ft0uR zeiSmE*@qnkpX`I;b4J#0c^Z37@0DsK>_KkJ5c+~i`BZc!l36?w$)RmEU1(+)Jl}kG zC6U>%6+9OdAmMb4^W4D!&hdS$(y{Fb;#i($URWr%kSGQyn>Pmxe6k{-b|R#fTERV@ z+b#!?+VU?Kjn`%>0abud3{%Z=(wJEm63hjXVTaHu%ZU7KQ%8PJ!V1a8nu7ex2;c z)>K5WNoCg0&c2>{j1Cqh_1qq3zhw7xVYKeWLL)AMpEo1jWU?2REMW|WqbC9f4*6a7 z_yR{Ul6R?r`%AldpZHJ4BXVXUrA%HVSUZU;Qn)3p8pW!82AAp7B55;OeEoPr3*bO| z*6z$lR$Mnev>-tX0-I6(4vw1@q5P8x@wI>ES$}?e-(hGb0IM+cNN%zK*@xqN66DlMnC~#4Bhq^AN>B=w@#8-7w%%1!}wJuk>K5AX9kQ+ zg4UP(ksg(@c`z*X2yr&;W8jt@F4kg^Zu0FDWIrS)J=F>)Th*&9K>t!fA3ljT*`2%#r8ySy+ym(F;B3KfHKK1} z`v5kB3d=VpSA^kU6CE%y3xOBnwB|zcwUptyQ5R`~d--aHm>yVKHN~Iq1um*H8V#l`&eEhy3-9v07Y6Nnv^2-R1a97Dq}KJ5d8I+3?Pz+!!(9gy&qR z?3#UB{a?^sc{+{7^X729pvgcy^|L ztUE6?mq94`i??QoJo~%LnS%Pr111!mrsj=|3?lNU`NA?}Yi){Qu-Q z%Fn-b-9T>7do75H`B;O5|H5W|4YzE!QMA;(C%ic>|9HoX?H}%R)^oX-v7-eurw|k+p z4ykWsCh>phH8`mIXen{r_Jywtz&_?aF8HwPw5&4C*6v_aN)_bqX?FXBW?xvVVOaBI zVjv56$}7C%&dMC&_k`kn(u`$dNREF^w6lNF?b}b15#M~5?9I%4{oA#erO|79uZdMv zQS?DjLerfS4W4d&zEm`E<+tZ|{WrN;_a1Z+;i+R>Sbi9A_<^{jvbtZNe|}Z0>Z#sa zR_7Pk&+&$~*|5=3`9Puve}Gvk%Xxyy*r@pQK6#ylIA_f3_||(}g&7ah;eKC(2)_!4 zB<-5)1DA7w4~tZePss}rr3W*GXx#QnmO4q(*9vP?%*bbQ)p~$jo7nL7$ z#8;#|pR#`Si_GBjyl(h^8G%g12%JW24N}Yz!AQF!Zs@~*z!*3WmRbK*;?q6#I#i*R z(60(uUZsbV&#?!bk>a;w5Y00un>$(M6f?6X3q<-+bTX-34fyOdib}!6ii$yc^ttya z8&Apfa*99JPoLj>3L;*=+w&}IE&D1%?FI)sVs7O{z`<^zd-IbUEx{^k({B{(+;60! zs;XP#!C@3_)t$62SOBu79HBDx*}|V&E+h=zI|vpJD8cEQE9` zEq_Ud`^wRB!C?isFzXo(m6I81fshceeu!sIu|5aP-Y3efn@C7IyV{E4c%93dj%zX~ zY*66~gzyM8pV%tzUW1#{BFMQ4}A+g`P(?#gA5(M}i-a8IVm! zKs(g>d9H3*16HjUb=#Zo+g?F_{Tgbh9Dm*Q^o=yTrx4&n1`+zg5W=7|qE&l4-8;uKa&wET^tUb)3)XrEl+}Lz5Y%%a zqRTE@KJvv4gVY3}45>J0eCY00D3NW8=wcTdf60;OZKLT(_2Qtpn~kCacERa@-H46mR1qU^8b(x05HPV-a+@y@Ev;C&v%@J*OPLK&ytXmAMH4Hc0z{xBe%5oOu0pD zMJ7X|>TC68e}>D?D8Ij(W!JU2!sxy|VM$ChrMYzl3qx~r^*fweseE(Qdgdv_>*(% z$(}V0t<>iFwDCGnBDnT~B{cmw)5E=`XGB76`>QYT*j=soo_7IU-+}WnvHW!%2|8Su zxo@*fJKtTf+%h^aaV_a2pPH>B5hJ&tgyfW8v5@+I>v}}m>b0%(m>Ayt@Z#+e07cw7 zT5Az0OqrhU zwt%M)FXQ!ER5-`&gmSpz1HyCxfZtdQuKxPh8CABz6(XS0%4em5GWZ(e-7M>(fY!Ob zK-z3KmysB%BDNoKIzei-Hq=kxxB~aggq{rl0_DSgvphGs!kiz-L$2+Wu~jo^miua> z6%spHwT@~(@~{la*;+Z6FBt4Cu86+(f~qI6dhs@6YZ?@mRRzo)ul#+Pecq=DT9>J^PRc*=-RN61e|{b6+qZXrNPe8}$U$P|DwAG2Z+%NQ&MMU{oFz_Yy-(Htei z5&!ra+1P393ve>htUkH{5}c8R@iA^PcNxP@OA`X7p^789+^g96yG&f<4;q)_A=ntZ zRHo6!IAdUR<=x#YF?m3s(`T7{-FPd_4IB?6RF!qFQusig9vsB+r!{iJAgB~0*7gOP5&mD zNuA}SQ5CFTq(QhFZs^E?+!GR9#VPKaJ^7x2e$u4M^|(#Tad^j0C=b2Q++=7I(f*Y6 zo^_LfjYnE?ZaICX?QvD?B{WeYuDs`1??4ZU5xtW7ad@?yB1Nu;_fD5m2##v)(F6X3 ztWnHY%vzE`?+upQY=Gf3gjj%P;a|hYF8Pi1E@^Av!_U%GRGh zefyUQxiO<(bhHV1MwT_|G_PEpM;1r7ikf~?;;W3jOey(xLF`c~9G{O$0FGb!nm2I# zD5KR5inwrnRW}({x-nA?%9k-E3pSzX)q@4PkU3~QA^+xj0vqM*Y3Qi@L6xL_X{2An z`7gf3XC2(_<&+tK;D}G+Z=dAm?=rdTYTe@!R66U5)Hc+1oj=@^)kW*?I>ox|kTatD zCDfdkXg50RZwLtsEloCa)_Dmlw>_LlYjb88CSj`aH-F?NJUE=DJnXK71Lw^5gkU8B-x2-rI@g zP99;-@P8us=KQCbrq%UwENQZh?`y?Ij8Fku^FyGMTPivgIfjq)Ij;lJuL5Hy>bzf; zam_}1q`x7tY`NpaqHZq++lizZ7imIy)PZ&tcmAoU0&_R3{DHFti~=Y1=LZ4R>g6Pk zq{^s^e0#Qiax^z`Aank;S~bY2{^$U7K9|NAW!tao=vL2x`Mf8$voS1I$8#&Bet!>7 zgS_Cf{Q27S_~q7=I*K~|_8ppc6v9DL2CWVjJMC`5ydTCVqNrLt7~|&HxYR8DHUK`P zchHhDNho|6MAZDeFyoVa@<-841seYDzK_Le$3Mt6sORlEdF+%lh6+hN`-bH+SfG;} zSBh54f=76=2g&^oYGAo74-XQ}2`dFtBwMjY^0m+r)-(SQtk}vRFJWDeZr4CqR+Bk; zHUtbu`(pj%I3V9;>aDHQqk+8&V8I~a1|HZW$U5s(u4GmvJC<9u)MJBLmsXIr9aW2O zTeB;uFeqj^w=&K0Z$M{q&ZJG*_0I#>oysc|HN)?@YA#W5-*wew%F0%Pa!7HnWwof2 zGmIXt!=?p1Hfs-l)-AV2myCXtpn({wK|%u3X$!Lqrn>Sws*!LCs|kzPcCiyOgq86 z4ebl(Z9I+J>Lh4znOQbBEH&fVeww=4<&}x=Vu>*pZNuTMh98Y$2)@gFJO7QFR61KAl44h?DzmDMIXRm-GNY{hv8V@TTR{Gya-5~(zc+&1*0l29C zC5G)y4x_CQ{+1kHIDZTW?bBMn+4cLXKKCh_6R=Z-sX$Sj}49{trUYJZWTDoo$#K?C%FUP&BJbtUswXWx! zp7wgYHp-)X+X)5hmA_I?`w3UMloYrmR>7+qsx) z=J0KeeYq9P?RihaH&h;#S6f!j+oQHNwR1Fvd~nrU3CJBE-1+)U`clL8n!x<(t%EBX zT!HT#E4FU52u%&SaY-W^aCv@zLvEr#XCdAE#*L7WoMI94C!5JEFoD>u-_9LfmfN$x zLsn%!d=fDnL^Xv9Vg}}T&xDL1XZokk`JSE|}CzSt|*he|B{ ztVyr|b?j>4i>2xE4P!yBlXgXrctCNlca4}^%I9v0MPzAZ>I^VaP9%d@ydK)iK)BOq zH+_j8Gm3;V^sYGD&HS=fapa${0;LBjrY}%0q8}YF2DKDT{J=I;Is-d8;|G%EWiF?~ z3$9Lpd1+1pTv|K`#i_#MDxx3WH+*_dN94GW*FOF=kn?L5fr5;P(Y1w~9JGPp(fpJQ zXS_A>M1yzA20Pj~=0PH@-!>&mNhIJn_k{6f4Y85=FvTWj1fH(OZcpCF+z~pzmn}gP z9mORl=b7eh70DK1#$<7&`Q{J)y}B$W_~5zf{XIw5VWGy)V&#*D$Zi>}ob>e=U@wNP zv0V1B%1CoD$EK~mIO41t8GTDxlj!aTK@YTZsPM9cnd21GH8I16f=z7j=;xi-c2{Og z{_59ote3aT?fO9~aH5qK2eII-{<>6F~~` zuibOh{l(c7?1v(dua@Pg@)xV>2$hVJW!0jAT$((6vnkoxpya~2m6YuD$?6fS1#G3p ziF-rZTDiE1^w`N9g0Bqi^-3*61+!BB0AhpIRYA(oR_IUt@x}Q7P90fUMzZpF z6hg1w7ldPdKbZD3ctN%n)2}fmXjOR^3EMm_iUL^zp)>gq2W%};z%Zjcw)mbtz=sQsxeE0LT<@DLf}OCq08#=FUP z`b@=w7sdlQB?g(-HgbA<=ZzTq{d~4bvmdKzhlV9f=}!1mQDimleK`!D#u%pp4;ZGy zQ7)~S?hTiZ+!7U~vaH(j>B+pe)fT*Oe8U-jpYfc|X47f-oTEhjrDII+w13Q+!Ep6$ zQeFzP_Y0I_e!C*}Ll<~IBk%6$_dFCBn`UJi%!hggW1L(Z6VnsIZ1&?RL-)u+){2vL zzM<14;>ttXOIDhm_kEdsj)-kp?huBWUp^O0?>Kw><{8-%rXx4FENx93+w0Js{z@5` zcG&(^DnYY}Yo*r1u_sz$sjf%zVP}6lcT#*dv8#4|SLxX;A}Fr+uK5erT0gb1$ak}_ zjMd`J&TRtvoGW5SOI9pbc}_+CS5Ql6hXg|b=mjcqf9_T;+}fCjko-^dx^_mZM7WYjzF$gc5GBY){irW^*FF0N2TOoXU z_if$;rAIQ}Xs%S8n}ESe8ov3Zjgw-#ZD?f~yl=1Nl3O$>yqxx)|;QyGz?kOR$+@=4- za%>Vsekj!!7IPsk&6Dkr>e3_SoJxdC_B8&yrA1l~qgME!%iqZM^U(Ye-AZtGDo4I( z85u7&T;e{bBCf`32oF*){I=6id@KkndJ8c?dRF(F=HIj|(I8%!baa}6Ny=P`O? zFSuxD@$5dS^Wd+SOZ^4C8QfOCMj;Y;vdM~WR1j@ovOc=ESuy&QKnPyLEq@)2RNwme zj!nO{nd_mjzeIZ9xc2^WC9v&8f)69BKJTzg4HhP+IpBUDzV+l~ zd0!NcD*ZRIVD-(pHL(2|(?>hCOTEo4u363gd72<_goGikWHIVo<2ctt=K3&fW&*2p z@K;){ibLE}#oP6HEDbE~8^SoJ$5opbEvj+p8QFQ#nyw;<^G4S9E1Y@B#Nhs7Os{!<-b35tRWL|Vw^wk= zsVX+^oxJFgQ9^C#%VkMgb;4LFmBN>Gk`}>QsgR*|^*-bhT-<9z71gvCapgK8hpC$b za6kBe=07ru6M5=Jsek4nr042L-u>>bw6ib;vkj3ZH8)$WyFS4}-9GFm?~wCYfxt$7 zC+{I53}NqVH$QJ8Om+k%T$xm*$NEtZ#Mv5KCRVzWmZ$zuW%T+pcLps{>EpcQI zf&TX0x(;32(^n`hbF@Pj-8TkHC(<|m%unmRZH67b`Kw2}L2h(QYeZT8-917ZC67_Z zc^skKj?~9RYIm*b;e!Xvq6oRBhSX z+36*G6rT5IH8iQ?7cmKSjJ?s<&q3z(p!4-xbYEi$>)7*g|rMVKh~ZCSm#*v`=& z64LYG@~m+Wh@-83MdadSj2j}AUzJvB0bNe^?Ww$qn;;0ThH!DN`1?5nNeF?+R=hKE z8Ca-|MT{))OwNPcMoT(_RXTKnNvt_ArnAS+^v)$Pu8uF%hOMH$QnFuXU?fwQ z0xAS~&UMn>2`6rFI#mcE<3Q{@(DR2U!gOTyzE4_51SE4+iij8u4*C@6Q%L-pU=ORS zCI}BLE!S}`KDVsw2X+<1=x9*Gt6;iEn)7tWNIj&zCM8{_-;V+KQ{Qwh53NzPm|W5V z6cy z!T|4cyc8wIyi@Orq&CRIoS6xc{mi}*rlbDCLCV4X$qg|Y?Q)~x4}l6yv$45c@k7=8 zkEcWwKKHFL+A?+6F&$9OZZ)kMc61KB+Lf9M+%6Z|c5DkAmCr|5Ci$OeNEqnfJWvCa z9-*GeqzAJo)BJL9K9P)&tH!(Xcqyl6(oIz31rSP99W~9cOSI2gI07x}CP|}|PAnpT zZ9p5sID@jZA0$o%T6ySklzA402qGs)bsb)(F!L(JBkchDfQ)UVGz7kf;1~P`LIXg6 z?#N>L7#tKA(`hTN<@DYAiy<5o^)x?jZQ<5 zu5AFn$_x7)|FZk_MglyY@bAXe^p6JU7WuP92iJolA?ChTDx&(x7Y_v+Jhu{vY`9G?1>fhGq(OOKGnk0gT60%M zO|`QgV6yWAvZTY#8z6DlxiRzRNDP&%%l#y1hq_t0slwWR5+~vSK-`x~SNVKkJDT10 z8W|zKSy?$fJ$J`afuAC7lV>}Mq8?|tbE2mRRQfW-?d14=LT6EQ+S)BPy2{;J6_@Cf zb8=s?xe7huSSU33=x$5$CrIhKUd}$B^CIdiEAw|a{BcwA+zr~xoOC+5X4)tpY94dOgI=}4C+S>&zgpFM z)&B*3%f6&Y|Ix&GaysrBzT+*TPyyUIzkihZlS1xabUt*1!Tt4`N>3T_C+VD|lDLDD z3W5GVtOwvp13vTD_UehDM`DygL>zix2eFN_??|CopmjP<2W-Sa$N0j%i4NBp;CxcQ zDUK64 z2J6aUQ23LE2|xSt+)(hn^uY!1=^uhDsjZ!Z?&3%#-ECX$h9Ktotyf%NZWb+nc~d$p zG}OEzQGBKxNK8UnDpb=Qr!y{|Kwgdhq7~%AUQAo^U;)XBQ?R}!YMK#^zw0A?N=1I} zy+wGDu|2*4!}p=vG7|wu-u%C2@zM#njLp0JQ%69;MN%ztc{kZ@=~Q=bPZyA({ljwG zC~W6O3|t)Y_^K)hA1KN5fob59c_tx~fLUh06JHow2XLl>k2>W8C0ZLV0EGWo3qCJA z_uMJCQ>Wqd2Tt2B4u_h?5@7(ZS-rjXV|u$NA?pwhM*uQ=0D|fkeW~$;Y30Stk7ML3g;uC)63F z!o_g1x2wwwlw@Jm6Vu$zlR9mF-}96emkUK$t#Mr4-`%_mrR%riHM}3Nog{ z)1gL(5NGnP(o}f;m%kdeU%N;-i?@*Pu97hs9D3rKTFZ*m7>l+2=Vo})3}>K#MEHoDfKygw3|WOx9-*A{!6=m2yYIgj+eJ*wk<*)~F!|!k)*vqGX zqTn8l(}9P{e#D3k;W}Rv`u6$h^Un=ed3e-;_&hd{9#2FRYt?%_TmPZ0wQ6d15Ji3# z)N2c5zaR%9E@%&5Md`Zr@gPvhEI~OTjX3A|#B3{OMXi@FU$CJ3v!!$<94bsNiZc5( z|4m5`WqQC<@Wk}H9*-7e`hxCA0`sODNhj-DM7(GdRgLuaYL+jhgrHYd_2dt68qsM_nRM6Nxa4q)yZ0$V z59>cTE_9K6I)+;;viWXF@FB0QD{GtD5ky3yc5Pd|RFXlYU1qFR2!;K6`Dq>U6_@A8 z?nSCm()CT;>XhIFgR936GDjVn%3WBshln4hN!4@cKmuksmOUKy;?-KHaKd-|NNmcM zhsmIe0z8DD$O_U)NJm!QYFRi3o3%1IFl6WWFJe-^Fr5JY`g_g+94|OdI7>%8`eplU z{hL0Bm>9NJ)y#H(c5oYDFk{jl0u_?fR~A(Y-{zlA0u%m&k_76^^Ns^srr`HeOFYZX zW<2;7F=omNO;{z`t1KO8b0LF!zIeyhaX`2VxGcI!wIiJdyG%*=hj8fWsFpvkAbpOacLm~^7O+*H&QRe-E@;w za%{DC9j{Y)!c}~4A+sTlR%XUm3EDk~v|Nlfq8lfPHzJQBb=(|Ll{tA>Hi%Y`NEM0t z?ypg+E)W+md_4$6A?}k!*MR}LIu^p^<4-$L@r+jnd-$3%4NI2Mu!?EA#QVT&@3E3A ztI7!+3R$$QwTZ-KIa?0#VS4Uvt0hDpHe8$JZ!A+HNmIU5DQf4Q)>+*K=B-O&3>PcH z!`8YFD|PM)+E3aE`nOf%$!W+cjF=8+d_jT#l7-LR3Q1L*oBnc&m7OQ2<&jm0!G|n6 z3S&12Aic2B99(Z{OPWpNRk+;F-4h=jUBBWc#TS=*Pe{`M|9n8@SD7fmZn9$UBJXm4 zI3L_QBo5QmNXO+W3SpV%l?}o|#CJQsLpq>=m1vk7cF>y&L8NhJfiM(l7gqHgeAd(v zA5SX-n8itMhZjbn_YmUkMVfE(c=1S%2&gEpJgEA@VP-uY=;s}@GbNVbB3M8%< z^waw%p-xXDrqG^>8w0Irmx?o4iiTHRDC>C2v!eTU)WgYwlOlonmQ&5T{b|L0!hy9k z!W%>5_DYuv8eF1exwf>v5jcm~VTEz6sd(u!VZ^7tN(2l|I6QnlRB^kyRfRH$gHrLP z6Oplg$-bTJdv=vDSPjd&Rr8^kcO#Rb^^PH!0Ub}bWGZr~n~k$pmam{(M(BYHG)Wk- zAM1Cl-r#*tew6^;H&O}C%K9-2fpG43^%o|$3*sG74YSkqf+eVvfE;a zx+FxRg<9Z6ijJ+((r44W@fQNAnFwLWU7C| zw$oSMVX>~<1$I-MgxC3flFN#ZLU9CQd1_6oii1lHBWg8e{P^mp$6)1SAy1Yl5XrS@ zk7$Xf2{SBJL%nifo`$*rYL7$WAV&laVNqp~>n0JW zm*AVUxebwtW${8+pxaa-F9vl7Jd9RYkcY;C0!gV%5Nv|9+^Flf6Xszy2z0-SR^TEp zJg$-`>u;!U*nU~&9#4L*q^i z$xA~IM{%h|(d9g{Jmpz2=}a&*f#zRT$8CXo$^!uGg?{@A)Vkv$>a4J?|Hae(rV7Hh zukV8+IFLB3l)V?F?>p`lf3hIlf=V``?k++jna6Y`XU0IF*9yJ=^3Xd~m5L|ZS#QAf zUVeMj2z1quopRGb1-toQS*b(Hasy0Oou2>+1zTDx@DH|ZBlxf?i__`p>5Ytye@l-n z6f&(<>-fF;`g{HBe>(GmEFar&O~=liM{LVrHezu4d8p5`b;tgLPu8r|3M5|L2WJ90 ztOIqA;cgfd@bf7Dy>#ud7SgvnofPC}i+IL5-Nh?L>Z^)|r32ABiQWeaL^-F%p364R z8Q&jKDz`A`5Yxu;YmAmvO_dPqycgC!;oDNcwgXx6@po3x3Dy z8+afs_y?Vusd`inMA5DjK63*UmfG~)_QAWb%eaaZKQp@(cK7Ap8)Ab=MHMugfxt3Omf!tuz%B8q*P=_ay>MOJwtdb1Ce|9x52e^Jn!bo@ z4Xv!WgGPpF+91P#e9t-Zc2z`Ip#EufyH}?#&EG#mGLvY`k8hAc#?h6%;KzA^*t(yI z^DGmnbVJWv`S0uuI?}z=G#wo-qJc|af?Y*tzUFFUOMt6a6Z0pCM8`X`Syg10o~TpF zS&hcyd6|i6Ddq1@sAk%gD`HAz%YBSy-_;O+nI6iTJFgy@ zD`eGxce#>DIXyT*x?8l^f4YcZ!>z58OONqk;eCYgT9AK#GIRa~hJ?3i*@uppT#`h2a`3~}Nd`DMQ}!_Bl(G!x9v=Cm+dW}h2)(TSq54G;z7 zv{0*M&HsQcje*vKIQvWuDDo+6`R!>?op$f8_eZ(@?j&mm6S%`0-kpV?3w+QM>~aGMD54l|B<;*OxPNWwkTecG|1sLXS^~9!Vw_;4Gp>>mQ%_F8v{wS#V#4 zpAPF+vQVof5dv#~?^Z^>yu$1d-*c_VocK&^ihAP}Z&+|Ln_HPBURtHaZLJqOSp@?k z02nJvoBNz4c&zAl6H9XK_$CV|%wS>D!m*pAZ~G|7b0l``ARF%+ieS{SKq7qu%S5vD zEeM+%IHF&4T?dW~VlC_cgN_;OLF|K!b5`#YI(-4YhQDALJ8<+e&`M!Px?CPJ!O9#STz^$KABDr)|%jmz${bUR`w#PlT`SZmtiG z&XY;e-D2nFb3*3ZnTuOt9^NMP8|rn+M$OVbe6h*=pgemC^K*j1l8M14d`_Ojb^6(# z%S6Tr19!Uv7{k_a2cZX@;u?ZQO^>Vm%Ts?1V=!wA6{hJ$e`rC;H1r~XE{PMqMBflm zF<432iA71DBh=H4o#$}sJav9^`x{}JEkwn#ys+BObbqo!>B?Re<(dPtOE*tlq^8_)Z!6Eb4pH5%EP=oLl%BRh#a)i*+92x0 z;|F_49WETzUuwn-?3j;^dcNu8hs@7P%3Ur*6w0)G96l2B>gPbM$+Ti-lyuL~#PRpo zcfi$v+dcas)Z~G9S(WL>9fEx?8x~xA)vLz$iqD5_YTCA+}Gs# z`KS9+C+KOq0VBh91+E1CpevsHz9@PJxdE@yi`EBIbbrTXnK1Xo>D`!z*TcJmPfpE; zixsnADorVaaiqQze~$uqcD|b^G(7#6he!PrlvX+ENMqp@^4VlOh0b@}BPd3jYXgaT`Ec9)j-(6x`SeT>ajObiEg3SNwP{Di zU9PjZ9sZQrx*uRso95LQViH&mP3D(P;=P;bs;c@>qf*0|9@+?Yd_2!j-=l@0ph!eI zeCvx1vP{22hY~2t&T&BfXic zf+cq4TivPgkfXW}7m5wMnz_CzXuI%`qL9gtO+=~lsShszws=g6+X&cN3zMsgo)ong zZ_3od+w&6M0GA(erqxcToeH<>9TEPbdml(sKxhRBMKJiWV7xXgVB;o{f$?x4@H|B*|Htfhr6Q5&E82FCWQ)V*?xp0thlK26qixWmx-~P=Ou`gdd_O}C2 zyJ%NItm)O8Qe~hwupv3{N0%9@MIeF@o1FYYuvX~W7I0&SU$1NKe2BZ1n4Ekb`X2a$ zV*IW&n?I`YgSM7a)r38^`KMRV7$n=@j_y|gPio4R+%=4)fyX{jKA)Em2R;V-8PY!^ zO_@up3z^Vt>e8QCg2BWqhx?F+*X3%K86mAM8lG7su=sxNOoHZ&Ft$rhUF^aaAOwUG zq`E{3%f7<55c;F8F6p8dUF{ic*o&)!I9r?Vh}Cq{y0A(WD8(V?Z`Hjv%-3ZGa8yP{ zCXq)70|tqXyJ1&^q^H~b0eGmO0Q9?@;kdE3lG_r%`gE zFRIx&e}~Il7coR0{d8@c2?1=fSP_ z1VXNu*jUzIx(yt=G@U)eqFPw6+0Jy7tjo)kzvJx>J@j`ZPOEV- zL4PVwFjlL@X+V>umru-Hn6&u^tr9dcI$7CU_V`{M$dwo`Ov)stCzXNvfNrE=LAkJg zCTTNqy;bK5fA^nz#hMb-#Nxjp$jsnI0;(sb_@@a@Y#?sLZj@GgE%ayldv* z*0X1|-wv=9@Rf94k{|!}0$#hK_G)9fnLbo+BZo8G>RI8|drSg;ccb(tG4=N_qN&~L z;|;RBXAvLu4HThgr!6Mq*NX404#wzzN5!y2Z)568;`WYRkm z#7OQhdUW_)q~YGk{B@S)3DIN*$}M91Sc3CBqO)JY{N|`0Tp0iLcvUgr1MMAu6wQ(<5?<5yZETG%n@EkI8;Mg4~-gDsE)rnH+&)Mmkm zqDiA{W^Yt8NQ=aB4(1GH63>y+p>y+DD5Nv9~BZugBT zoaYAG3vA}cB-!G7c5Yo}lN$jM6`PYsZhqKm6Ypaz(2)P1%t0!p4))t5u?1sj<^;z-BhjWYg7oc(4 z{&pdUvoDlXBlSBY?cD0kCVODUx7-482Ku_fa$0!srWY}ylK!z#5w^FzdZg#?LtJ$H z`w(w`8o5_G4neml*euJHF4lDpqkQ)Y(xR;0#67Ywz(lLk+qVuT!iOLU9=ZO|e>P4k z34z(VGHA0ZkO+SN{|yrQ_ML4cQ&RE@0idoLVoalffpMD}x9>lN8{VvN1mV|OGHCi} zS{j{6mE{$BK($X!7(3N=Nk&kmrS{_6lfzP2KHJHoDH8C;eF7Tw!_QK>gBfSBQw@(H zN-RMQAh!$WJ(?OaM4Y>*|9J+WL6!Q=DFzMCEvci(QX`V2EsEiplRJR+zC z^=>%aW#rwMO*^Cz^9WIA7DD{?=eGIvR^<56&Iq%h6aU^lfX&Up!`Ch#YqL123T7*#1C4?DfAp^Xi3zNmW6?A#LuYN6?oH_3$<| z3ClbDlL7zB<~#`bzBvS%ZfelEG@A!)sGMkrLf<4^+Oezk8j}Cnp#c6*he@)nU$9w- zs|8atVKBN>N&_wKgvbXR?*;Y760zD}wo!%~mHx`qYVhpqca4dGuNaQ2aoBu|V~V&l$< z1;+1f=JZ{RC0_lZsy!^S6S}s>@u92AzY!nVLgeN*^Q!WvkJaiI8o(xWmmxxCqRm8WEHp3t39eC&*dS z5>8n$(`%Elfy8653SXrlcKxOdoDd4e^(jvhkHYLy|5RRLo5JbpOD6;JlIYx+zh5@6 z0=8tgn!u|JkyECMFWDb~q5<+%y*+_UeT;m$Nh4g~LR_I^Tl{n&K&s(^xldn1b6$uv z?)2Td5IPLBrnfVUe9~B{oOdP$R#~w^gT}?hX3AebT{z51lk#kI@5wQkXV=ANI zuIo&IW_+IDHg#aJ=3DES`jhN%xAQlOXmQUi1ZYwIIdWVaNeaEkz^rLXfsLeF7YUK$ z7Zc`lUTfp+6WTn612SV|;hugtfuDQMa8Cb1o z1$X6Gb1F+sh#)=T}yL8a%);S=nhY5ilq$!)qlkL(-iM(nfZ2;0Mn7txX=`kTSW56lTeld!omBcgM^~Y@l(O_mnxudrJM8 z{j(O^VIg<@Mz&|B&99`V#_M%aay^-8^AicwR{RigRhBS1WF*VXiyPeS-p6S0VYlj? zWl!!~^vgb*#i1^B{$SEEQ8R-Z8QXy;1*A!(adiC3q2Qt*S67z+Qk( z?~M~!-~p;G0ObCEAHU{MUl>bCxST3T8NSSll%J@|VOF)@>d2nhtj&pt3l zp2S3hwbOSSBFs;ffJzyFvNhUw!AKU|`y})|=F}}cz+CVfEQ}_Y`)tcLZQ>fTzx-tu zf6)SOPx}B;+^utgkJk1)Oeint(h)Xi)9t&~An=8sp7v^aGY_orEju2efGF0P_^<}>)K>bmqj1p~uM zaMBAjDJJ}@cVlhG-mDtjRTjDlC6XM9BE|CC7xj4pPbYtx9&i(bzUu= zBpOFK7xI7B=ze*5Bm?CLwFpoL)d!mTa;PCQRRu=dmfW&D*J=+%mD1;%Gw|S8ND`W@zafb(XW<|Hh@ThyDm(7Yo{Kp z0kDv}${qQ>irYEDiPts%)Ps>VHgaww*33ZesXou(8vR366`7W?P7Yaj@pzS}0CbN{_^vHg#BSdeD@;U4?2;0F%DLs0`~A1UGEmj zf^m2mJI9!9a56N3x4Hte#J8KdZ*4bg1H}p`M`<@L9^}~e1XNZkf+Ks=HcV4@cL3TS z$tmfdmF1+?76)cvVobo5Ab=SU`^Xq7A83Wo+CP}>@_}i1$st0u*3Nsvwguw_Ft{MfwIfT zwY9aUBd6Sb6<1G#Pr|hbap9>6b6Z;_Xpe^bvM-U>zWN|69As`SEkn?N1&ws*JF-BK zzpB%~-`_u-Z1{#A3ZWd)&}x|C&}D{Kv%U8nrR(Q6c+eYN9k5&SQT5I_h89d}Jx=+@ zW+R{jMk1h%5YUzH?F!DNv9SA`oCMdNclL4TZyd@_QG}ldVErIx=Ob6&gSA7&*!Z4E zlSWzUNy7)SxQmV-(hN$vGnJ)22OM-`csO$71NZUI0gra`DQfb4-wb0|KG{`B=fDXj z2+!aHG!r^R;ozMcYe6?X?7#q6iALaUj>4E!X8D$(@xj8K-fNN_#p#xDL*c9OE-wtmimq#Hs z9iEGsd+P_yR+Pc#e*pjGp;aA_faliaxWC~?&8>lb9~TKlDDcW>$w+dTQ5j^OsQf!xxOIju57>`Pv`ZsDr$ z1y1B?Fg(y&f(q3qQ{sNgecn{}2Exr{~~z->c%a{8pU`q0CeG6XtHh zSNIZ}dV}|Y$n}9mdK&sM8i+%ijctUfT>4jo1uMX_f;;jN`j>3M*HLhS0OS8|RTDC8 zKh&1~qMSErdjj#M^#T2`MMbxp=Q3^ty+ zuH7KZ1EIAIm);*d8!@X^t%wWMs%QAN z5QjJfTJ)xKrZ^8X^BUPef9$8V*}S4`CmzbZ;%<$FcjU36;|LIlcYTClVGd;?TD^wT zy=CbQ>1bTS#cV4fh#1!FX4Dr1X00y7rB#w1HgNa_ER~=&T;a!NGSgqtI%`}iuZ*u8lS+mxN12- zynkDS`RYow=`ioUvS7MoB}0I0VJJA>Oqf8l1ki^x3K70TYMF zHY8ni>x9EzLDs$C67!s!`<#l|u?L=c+}sn^pm7Kw>3;}@A)gP1*BiGrM7IC_yoG_wahrt&8ABfZ2haN#)Ze6? z+a(QzdwnH*mpbC;9J^p$c;+^Zu`YD^sgk!usGOH9kH?>zb>U(x4#N#`b`|CSK1HUY z_j8sXn?e(y<6H3e=Oq~?9+7S3^IdH@EljH2?s+B*52gTUVwQfU$TUDdbA`X$md1xP zn~SNeKH#kP-fy=w)IFU8m~IU}o$GIP0Z4|s(p6yy5Uc3^$&Uz(H9M6`D-K@SGP&9B z{a#GziWBx(voVwEyFi#Gv-&1G_E3?%4{yygZD5+pk6PKCk!cN=~KSC zZr~X{wUBglWNaY;W|Aju`Nv`J~TUvmZ?(Bj@p@%=FT zuq>MD6CT^hrj7G_`%%=3Kik#5-wkQBvA{TIxoRMk;R8OVR)D@mpXQK3gBer~Blrwq zmP2s`P{Z_a(Tnu+I^^tPkU!D3j{@PR{x;ixAK~&2jwZ2p@=VW<%VjLp#m+OS);vpi z6kSPCaG7y`Jrsxd4A2@GPF;w3T79uN6%e|~o2c69H*_Mo^Bj;#{=J2ZzUCoV5iL#V zUxSXQKGBW4*kdT?^=%vyLCK+kEN|c0VQQp#V#t+=TjyW9`6PkhOX-b)rJDp;;9R8U z33n%3iA(r4KKFnaH;QPR15YWu3r|ZaNMJDAc0ThS^l<^^JFsGMbNM zYwa!IO@OYOIp#xXl|mgKHEt?tzZ65cnsiv&J|LfjWeB655LfJUSk(aH!qllTDaAsE zV-$eJY(zgLYiWWl^C8>+0Rm`f6khb%`CDQ?7qOnU?)#^(3CWLWif-V{;)o&1SKR1s z8ksdjY;vJ%6|o)KfVH~Q3Jt+n6*Zm9sD6uTH^Dw(QM+_$bpl&31`6Txgbo@+4qQ=n z>$r48C0{P1s=q#MpVJ%8^|=;W6Xigb3y4dZtRuVTAm1qiUo}Jtvu{p>mS<$AEo}b% zFglH!j@b4Wqz@#>t)2*w96Jt5L1oQ#3Gy+Z)7)m&(JLHC% z>y4-RBxc|zO(V>`9@;XeMz!yIfm%T?B)Z&aE=clVu$+YrwZL3Y?`mo5TE|ER1Q&yC z<+M~*59&~49@^MYrl``c8v1TD!q7;jWg4f6Qfw8EF;#JZATsaN?7@`2IdWA9#;7h# zlrcpkV=E?AznCNIZvEH6nzv~~Y9}G(hAZZFxVYURS@4cBfneP6W;`#@{-=X7+_%WB zj;aLlrX%|=tmlm1B4pa?#agJ|Spo%iV>`4+}3@7vpw6AO|S0hNE2?ljKO@b6of?>geJ7M-sY%a zE}n#vjaB<6ItY3d1cPJ!q}q$!#JXL{r^eee>5tFKp4?lIwq-80LRG%=@0l|lzJZPd zGO|5-gzy;uMXkZGS z7&sTQb!V!b1^e1^TS-6gib}zd`7)S-!2g~@ddUP{sUkdXawRxOBIY)%kK(73oJ)o(qL=ss@;z zfavr%%CL!og-qzhHis>VRA*FDph}>Ogm;SczwF9S9M+Rn(SL)s%b|UHJD@%6k;RFU zjdDxy`>?3ZFD3<`*76@DcXFCw-*G8LI-gVCh; zC*7JqK{G@18~WEN?TB!F4)%adC)5zpdQ11l_fC`q66OE*~8e*0hK7lQ#X*mv|tf(d{hr_9GxKkJ2Co`7jeg%d|<5WTFu1 z8FFxkE&7cSThyCpjX@&v$SMqFbKPF+9w^@nfy)|W2^Y#l&(ZS>RBO)i7ySlZT|W533}9=4jYA|Z!>4Bc)dDn17UuEF6M{JDEy0NlNzG} z_X*tYZBk&V=&$fgBWnX5gI*JTeWu*(M0wQexLL?)(;e?m>D<-x1Y<*S2>-c7%K$61 z`)!`k=UhVNR+gT*fF1=NBM|W^-Pb5lRp1vaY}tCUXl$69#=2^*%^=WVeYs71EHM^1iF-TkiKkjuN7)8&|ESJLM?1+u6~O1K8ST1Rd*ApL zDC`~k^zcY7^L$VKDK=(HT9_)?aC1z z-u&3EsQPDVx7FoFpmYA1yKIjo>~&ch3Z4Nj>;PNI;uanNDup--OmRC<#gMAB?v@Dx4y4maNTX2!jy5`&^1aw&cTcuw)omUFfO$`jl~Ih0-`scc$?h zxQnI+y+GX==ZYgTHzm#R--KEaHKO2R2UwiQvr-y+drH7 znH;$OL&$OD2B=0Kic~h~@mQz;)NLBv0KxY+Z4vy>pF+O09YLglo`2w61r6^BO$N-* zwzh%ro|U6$+0ZkKW6)*2m6_|=PR_H2!wr#|pYLo}mYGxQbf?sZh?wtLo9J2F2`b@^ z-Q=7f03sv4F9=o~MPO7U_$>3TFEXGfGb;Cyc%PP(WH(U`M^NuORpqi|^^|ECeN>M? zXz$%dyg$HKRYO}4#Zz5F?`b)+6cbp93qx-S4GnKi)eL!ba&03Pf;ajgw6Kq=;9r6w`1(aNvxEWH;(ixpwGXJ2 zQZ*#Nih^Ze%52icVv#W;jr31ctav7F(6-*mjI6H&8-=&V+mw z7E_2msx}mf&wFxz{@Eh^X&{+R4!gw@4ka2k1c%vV5k(owCi^hc02)N6q%gv?=g%>| zVUhW>3rJ3Xc9qwV$z;7qB_s}U{2ob`@;myd3w0tl6R@e?QhJsz77D)1sI3y9Op@k) z(r7@VG=&Lyvb(Pg@K!;LXO2z*%qJ)EZJ=rHMA`aO2OTB~PQhkqZxgj_?S(_X6uw=^ zZQjbVV_D&cNF82=6VgGK)wwo=@S(Cnh)F=SpEf{=hOqcB4}pRUZWub;cY$flE@A&M z+X|d@Q9#w`PC6q#^NrOGN^m-%hQP@MIr1%a%tSx}2JQ1p7xIsWcHhLeWsW{9K{ z|IzDV7uepOhJf>0q~ty@TIRyL^l#^K8_yIUe5EjA{3s71y3G51=TXV3G-~b7F?2{f z?nX8O9bIrMLk+ZvV|D*#JrRo0u5aaL-NXLF#*Rn;#p@0Z=qm(W@eGUrUR2ea%o|`v@$ztl12Z(sjKtt~sXXq7iZVlem;SEYTn*~ln_)1ZL~5y8u}y^C zj7veJbfttIB=$HL#@1>n3rSz9SSyeUoC6}F|DHrz& zvW<*J5!Vl8skD0~AwUQ4&4B1|M@YwU&&4ZSiml>q)vY1}T!`pC4)Xe5TVkDvFtkg7 z78stJf*hcvss|b`n&6v+tmRQTU2eHK@IF)|2bAQNcc$L(I--cxu zB%qI08l;f`MT}vq(lU#D$`>fTN36&F4B<~4)(9S@b>gSU&Ne+~ZuHzMa4nQe3`aah zkqpS`VrF%nWdo#VFTtUPIoGW$E>G~-hB=0JIkc69=jCfYrG5wt;@BALD%cEYe?*m2 z!cx6${{Zj0xZY-nRyLspjj~0wnoz(L;nb?-DBSogK|ViugHIzpex&5@HFBzjwO@;> ziogfiBKDgg8u)Jw;`PQIa?>@gyuoMgE z;J>s+Em`M;Mz9B|z9(eRtLxg=9La-;L9~IO%1pGbpqgk1nZ;}OY0#A7;5u!dOdUPA z|CNkK?MC-vPGPw!H2ET>3ZZMGhq`_Hr~lG=b3|l;^$B8LN8gs=W!ygXRPT1&-2d1YrPiBEc^h*%vR)2A90NiY?xofOqBhr&bV6*-mLc$?fr!Y8vi< zoRzjh;fN5%&E6>;-%Leuj@wi*8#E;)ux-CcAXB`9S9h_&S%He=2zI_mkmHX`>&P)_ z`hWx$4hCJ3|1q9n_0r&0!P3{d9V4Vi2c2ag=Z4CwF&v0_Ku_W3HWWJM;RR%hX$UU& z0Q*;b;>(X|D2xY4m0pPe*>N&6-jd!BThpHY{EYxeo=3Hx57|$ajk# zz)GniJ3huYoc%Hj{Z#$fV(=Pk?VbryBKyDor`tisU4JjE3OCv+vP0eYLD}s)u=*lV zIEiMs5*T<~P=J=yLU&Zzk2T@y_uV(3z~L03d_p-&=gYmD-L8b!mr=>%NU*Lq)QTTQ z#F%H~LRR_ZhjP|uNDH;dBP^2kL;@ai#R15y?fk#}&an10-!%{X11ZwM=_^yPw7sSoQatDr{_$b28w^M{t zed(gh?;Wpvjq1Zh(X5}Gm}!CSt?RI}+ht&PeW z0fouf=?Q|3{)?%B7!o^o?nI4ph;TEOl7WZ`2!Nqykv4R?J68oL8TwAWd2q;~vod4v zbQ$nRvRBjiM7wi{7_74}gerh=!u+gdgR2Iy5UA#3Ka&E8XRdv> ztJ{85yA$@s!i2fNxdtIm@a$1-2G$82&zq{a9GLt~Ord1&GMPWTY3IHvmKl*(Ngvt? zr*BSd`Fq-CoP^UBFv8C_g^m4MsrA;~wm=&68tXj!y$XOzvaEvvJ~=2{N1(=keHqEs z$QHBr9R|U<+1WPxQrTnei!%dhyA@!}5WvzPH6@KX2Z9eMuts4`YhotEcFmHe*0X~( zA;KnG$H?Rve_Qu*j_VH~N{dcQ+X+!(068cVvQAu#B)of+X#NSJ<#T`X9A3*B=NG-;LUuK$N3_O4xNkgimWFtd|< z_Il{)EswYE{0fo#CkWDu=2~^~0OCk9HEYCoO(i`+!H3~w6{I>ZEB7<*Klw>HHUj23 zKz8|_3bhEF6aheRwkij*%?rei$c$*mLNo>M;=^YW^3h~X^5fR}T?3_;>bk`x0qT%e zQ73l{FsVjAGkbKkUQTyzkLv$~?pK%*%N!W6rb#o*8qmW6g(r)!(Url!%u^>D&J#l? z7KTNmz&M6hRde6!3>`6EwzArL!Ch;V=#Q>?DEC8vMh~f?O=SErWv#-ol&U36AvB)^gj|{+ZQ-KgGfIzzKIQjtynLix}R@Kwg!r zI3O!bEzHnS7#Iy3bgbnjf{drm4C&{yzI zLjCLZO@y}Il`9Ty#lvDOBWZ5sCis&X)H-^X%x+BwK*3m~4Hnr&3`Pjp5)pQl2UD?pr7m zf@lfLEg8mL1x_e{PF3ACajN$nRVz)0g_i89B~F~ zv%%=X0ah6LrZMNkoaKif)q+9@_|cwu5TBxK?~|mvjNe>oFoSawb)_*W9a_&P-YwoX ztPy5uM0toJSNuuafX~wrhe6^tZ)OZP4vws(*Ap`-jMX-Uyoka}LohHxqb5W0-U1!Fn_=`K7S8IgdIJXzwtWM2?s zJ8~}rvElg2fP&@=kAZ?zr_fxLm}tXl$r zEUiPteewRaDA_K%CAgt$3VNh~(+BsHPH4M<5U&!1FUiCCRbW{)7|D9gtfwU5x)=3 zw!v>fvE|$$fYqmPhAN7uo4I76wGX8D4TyBY!k((CtSD>muqoULn;+3pt#iA4pwh2{ zdpp6l3Gf|1bTTJg)ps3@0~>`>;KBLApy?3Td)8N@ME6{)E1Lt}A=ls9JHp|}IWM^+ z_CWkeaH{P=C5LNx=>rgML6yJr5nM@HldtUCi2zvVt%Rl3DX%~u zpRH|k=3zbnYiJTrL7v&O)Zkp5J!kiAw^HxNx60??g%MRzYBH~!Biad)Jo=mt$HhKh zo!spYv-Mxgp|d^05XPdtz$qoD3&Fqx0kGxMVx?l~E8XP17%s*%5aF!*V&%#5m|^GF zPaX@|>IdSBmhM+EBWqPEx63YnV2QII-86Dc@R+2d$zWibu>qcBw?t_41!}khuCuTkn;Z>_g-3ZzxvYo+h{nDPFS43e!JwmWEl%f#=P609 z^qSWlk6ZzuARyI#7PQYCphD@$v(R9_hn16hyi7=~gV%4d7>%L7Hvbf@T!>Cz9_?zGmHPK2(J@2{>?O zD@F-#W-Z+9=7EdpFbOielh6&>ww-@eTP<=Tq`69Lc=V^Za~e@ydB z2wzzpi(k$;@W8!#Vsk*{b$;;<`Yo;p6^eN#HS(>kgOsd zESBn>G$!$}DNWsrjcz>c;pCv9fATf2gcWX(6F4;V-ZO605VwS}o}4UNOb$;FMYeda zawl8*ha@AMgH4vCC8!u$T`Rk*>-8X#oI#a1A-jX!N`Au?+foP%3gAMeiRb?mYkCGG za@gY!gFa95EqMp;=3MJ5-|gosX4D2y8%R25F*I1EN*iZ{JBS0BY%C4S#skEMOr2s< zf2(h`ABxoOsY&Z?jBx9V8+6dOM*K#JFoe?Ij!x(*(ROoIOqg;?O_l%YdIKhC>_l<| zjH(eo+l+D)Z}Tr*tDXhi1&~Lzzn#H9f2XrMWA=Z(`9z2c1 zSN^5~OrnI%`84+im=yLFryol-C|Z0BWo>*zXk6_oPDU9zAL@$_{u?Xqb~_FXhlVlr z>#T(f85Ig`e(v-z!v_}U4!O>*OWxV;#+~y+j(M=Gc zH5HvBY_#7&RtmLTH5T@V@{c)?_MMR6JTz?KyO}sRIKW%WJKnrW#9y#-@!|01lz*}( z9Rs1P37%>2g-ohk%aTJ+#kzB$w}UW62|y-Sspl*3;9JWRypZyg2Pi9&aBWFSNde02 z|C-N5AKLnVT6GY(TUtP3FhxN!lBE1;8+cFiUqVC?+Js)-lUc5rFvJNPExO1dQy-b*?+mIaB}u9z)=t7biMOMS(hb~oxXOg(>n05R~3 zyiPGEsqFyM&+PX=B>m?r(m-w#i?CeAip@+lte%m@C3@3Bd=j~ijdlVx`{r!Rdxg^qVx2}lC zHK2}U>ya059Gl8k*kvhbV%Cj2^8ar{2a=u~vR!l*+ z^OZaQtJyby2vl2)7w~7HeiI9tc%Ub@q*lFcM)X*SQ}m{xp?^0}$d!v?awi6r9B+3B z%nyOK5nCI7W0E9lUNu(vjwnJ^38;&;+dW#zn@&+FWHG23kLN*d>>%J8ww1|>{n_%e zBvb<^!MZ+4LU5PC^8hRPQ*JSL0W;M6KE%gwZE0y)NqM!qyo9+%=;1{9yO)U$^GiVD zl@PEp@PUF0Fa%)U;=jLk#@mN4gfmPxy)^sWraNiP*Csu*CT(c#(ntGh%cqv;F+5^X zw>7LUkbpx~3=lhn{c@?t%{rcI#LD*0GFKd~^lF^a7l9x*P~ z_-f321&GP!NqvQ%4?yYXU#aS>MEIF&&jQlE{Q4E7jJ)JbT_!PpTkx!@gVHU=!>Oq; zY*H#+Y9@D3Ltlk&Py1|0(Oh(wiN3bJ6#x%gGe9HvML7Hb*s*;7wPWcZy8D28x@i1z zGwaA*r4So36LH8sKoST(kMBOPhe$__e`svwyNJL3lkJa8l!a0>N$7y1y~LYK1_>q8 zmBSo3oss!(1aQR~#yi><~1o3l)&m}0Zft43IwvR8IDS40}jy!BER3QEu(10r{U>Y-Wz zny(i+A@?$LxktU1r;HSjFild6no#E%O+D_E|8F+8!|ZXI+y>tL`cmaWn8=*bplZBIC|HLMkfb@*H+Km@3V(5t?-pw#?rbIr=wgTc4 zQ55^B?NGzj7J%mO;1=>1`s`1+<1azGC^&-v)nj8ZKfQ5j%!gyWCO847%To*JJ(f>& za<4OpVEhPY+0h6jfs>WJ;By*&%4)LXM|}0w2yj#W+U4AJkx1xchSQSYfA1+FyJ%Ox z6OM=#47bEmxb-x>CoZMs5TGMYuDu4Hj}3= z+pL&1R&>`#RSiYg!R6SR_)~;dT&qfeZZgndL7Jlnx_5$S&7pluVUcurZ94zwLywvevg@D*#W1Q zu^^8%wfq{@?13`yqm+!|{p*@@dv32gh^7i*M?myXa{ZQ{r(r@gnQe^~jsb@?9y_?z+jM?L07cgJ z+f~Bf_t(;Tn{IqT%bf>rzfhgC8P}!15Z3Vy8m*7N67!pluox*4ahb^05O%D-ak5~_ z6a9w-zwW}5Ak$*dUE1W_wW_Mb5Ml_3S>8xYV{gN{uyUw7d{bIO8c1XHbyFv+K=_LV zX-(WJ^4;>;`iI%&EgYGyNpCToPWb!Yh%bBoTgB^fv#;6NN5yv=l_loXM6QQC{9PblT#RSSWGE!5H1ZDr$x;#X^E|C5k#u>w((6G+ExoHl z@n>D#45*{in(FmiL@q8=s17s^Ql+);{4Zi%=|%na$k)R0^(Ewf-EL=+3D39TeqDFY z+B!|xJ2iz7N*8~vuWwa52z~qxTfUPsIeV`pvL%F=K0_j9W_{(aTwj)AQW%J-ZSYfZ z_1xVWGz`lb*t+^RAuQf7)&&UOBY!F6{mJ158tC)==8tca5X??%>;7wc4HZWukOmwZ zq#kA%Q2DzcyF&Xm!~L?SQI$yJYEDk0qqQHQPyherxL8pY<&C!;SB4Q8v>GvNm0>{ob1`CEDL~ z#@Z4TufU0w2?csO>6SbP)tmpoQc`G(_MN$SjseNpA}4!yhc)R4&*jl}vv#2olKFEl zgMz&qzSn@lbfRt)9Mz97!{_N?d2VVg!8AFRBzv#=n6elyb~oLOsdNx zmDM}87c2?bA_Xy!&%~=bfeKR83mH(DqLGW;XsqXtAliZze7fdTJvg}?`rd25o;}9y zVSd*j3`LlXL0S9H`?K^R0STl^)xhzZ!?(3-&eI`0P!jdaXXJM-_#4i=S0c~FxH#B8 zmxTDgXc(|!i2ofRq88;B^{2JiLb1RZxt{$s!gX(9Y=;;kbV>#e23I9bSIES|ZUe?UWkN<2EB*+gp&K!h*aps!61h__8vH z_H;L;v|HDcd||WL;RdvBX9pAW;%H)8LGd(Y67Z|0NvMRiE|5xPd2BrrpC89!ZR=KJ z&7am*be}TbplEXJi?RkQf`7;8J&aN9j(5<(2V->loS4`dnv6;_l6r@6dhvIhuHB3_ zAV)lFa7P;5Q&juWQwWue$iBfp`P`VgNE6CHX{7)s2w%hU3>^$*eF+OpORG3#=3pz_ z)&SEXp`%x01*0)_kX-6-hK^td$prOw+GYK8hSa?W5xH@VK;-6|pAIcV>rwl-==Ej~ zcbb6JEYQL?`bZh-H`p2f8Kie73naQh<}7xh^4Ssa(yW0Q!7U}We>bMi0%#?XMXy_PZ{5lB(x_bN zd%GBIYJ80Lyc~HM4BFaD1Z;4(eJ&b`|6XWM|1Bi&XIQwx-9e1EeO6nrNZN6J~6;o6~B;(bCVDhl=X3qr?25SEoX^r6yM4MHpcw~!dv zJGK93RTk}tDjaT_eS547jW)fTLorejt+?eO0m%r0v;J;qTp#K#+_+hBPZRHb(Z6%n z)|ExUhq+=zm?1ja=pW~Yu!k-I4{zsAdSTZ(dG^ze$Kgu?1^bcyIa<8eYGPpZz8J1} z7DuoX)1Q@%qm8S+B!AAwc7nb74NU_OTH?*IQBZLfQw)@V_LXv>m)_T>N_@w{!b%eq zE!+y-oOT*3^K|%nUvM?Ci6BTkd4u=MUEDmpZxj@Rm16s|IR91nf$VS}1hZjJ2FD-c zuCy01HPzR@lLx#>!UE`~9gFii?duQeo0^1PZTy(|`Qk-G;{KQ1a*q`88yoK0b3Q`r zpb}T=CyjC9k0MRN6frcn6QN$M=ace|!Fk2@Hyfm(L5D*juy=FlN)flJ6#;*7@IGZ% ze3VYsESXoo79s8bf2#WOc&NAke=WD@66Gr0ELXOQtl7=D5hY3_v=}60$z&PZSh{l4 zN|)?gS+j(i$}Up&CCk{x5JCts%-DTjXLNt}^Sh7xhaPyF_c^cgI_eO*KCWU6vMsw=tY5-P_qKH|7BkvC9H3; zCGjCqJir;O^j-zG{QCh=!oW!iCjA~z{Ih-sKN=lqhq0wg9+!?=fF;-%s+*&4-;;k8 z_(7PNd(#73n`R}!jj2pYM!}T^`ZvNi zVxSeAt?f1KvTS==W#7YnWVmTS`Oub%Rhjd-I0Li9Kc$^LwZ{PC;vkDcn}&R~5X=() zV2?EW>yaHH~B{L%7o_|w!}EI8i*>|=bCejL^`GIzr%BZLuMN8p+sz3H3S zCfadYW_HxcmsM`PKI8A^9uTg8)##-a=1TZwe)Rd^x+{!2qM=jEd|xoRi$~DufXY+Q zo#!gukaCXdx++PI0%kKwCoSCtImy7*ARYhTu;+1ssdKB6T*Kf+q_faNEpz(Z&zyas z^4q8v4`j;H{lbyU)nw%erl}cha<4=`(Ic||9qE3J&BX1_CcjsoLLp~A>{(D~Qr7&C z=+H*{@M1CN1Z*w<(Op!LkdSa}B`gf`W=7?YvhkN$C5$X#HFu#+psS>}NQy_swOM zRzQ~rSx4cB1HxA~J*+pA2r7HM??Vz1I3D^r4nOt^tb_z=FDyJ^>bsD=I4T#oHU|!_ zwGa-0G67)@;w>-@QK1ixF%83*QTs1gR{92RpMiXlruX6>%j#h6Kb1Hi^E%Mtch-W2 ziIH9mwJYd9Rym}#qlrLUe6KDYg8aOhmP9#!8@U)jAToQ&UzuRh zCRpt1#q<^(-`jLr#-#IssX21M{TvspSoM{8JSE%&?y1kgj6}FukC~32H=@e_gqurA z>FBj1_S4L+<~g%t{rPZa)5YMq>u}caO5TuKyh`3wb_!=B9 zE&@gHk$H(n>piy9XYRm-hBh|91uvXqE!&6PzX2$_pnYyp6la85**=%%{$TJM+H42v zXrpeiU;WN7t0LHg?H{0Kpx%&K;G7z3DhLQwVTb<43*Dyk+&A+zFbsiyHTbp*mtv^> zr`j)f*$)K9ITy2?;;T%Vy#BNb>jNZ@A8`F&n_ShRSXx~0$((<-C>kh$1&=z~;vym< z-iE;mvhe<6Epys+`~sgB${X&NwzMSi_3?o&)%dk!{TwQn5j41*qwT@~0E=mWVjwm{)cesiC}J)Me3`HZbH&)?xSup^pGlQQ%l1 z#-Aa$DI-=E2)^N@j_a9t_fwN}F>9{;7#shq%4fv6DUtjRDbgSmjv07Hz01x^(C zkc1W-fcCS1TJ?}GkXov{Us-RV(WT+=59ynrTSUbIdN?$-J6QNUx~&%Bc2U-7;qvMC z8~*e2^DLuNEVoYycyDL&!jqO7?f59c>XcjcM>5PPN1dy_wWhoRdlE;?kPmIp#t$qA zy?|ezy3$Y{0-PN#_%~=69$90qQ9z{Q0ve7hD^ry_1wSq?OulTWrzh2}uQT2wp~NXL zG(L6!81>#O#ujR_7gZJ_9&_ikT{^pbK27-qaA5h})CExFj zY%wcnD965H_mg@+=^QAZno$vzY6*(tmR#E?UwFBZEa=f-`stL(<*sLws>hGn7d43= zGk6wktNtRTexzSLkoOYKjML0BCqo??-!?Zm0W;}b1UL%qyi-#H;2GIv6~C1i)wrRT z)ELZ5=da;;p7K))u6^}SxzWQ7bQei5eR%*j2q(G-IhRN~+SAW!fksUc?k$CrVgaPR zkp=$wBRIE1czIUV2as(&y0crA6%4i!#m;5$@BP={kkNeswe?7>1}cfYh^-_S^JrN0 z`#tGNB#}V8witQ4ArxK6G^bIvhJ^*^J;KB1^X)mWs{3IUOW>?|ZLBd|It)(3zfrxWcO{nHj z>bs55G^EVv%(Dmu46Ag2VjaL|eDUI7N+|ZjU5^9pz3c_h+}8j5Ydz=?Er<5B}hHc zyH2h<Jko7-WtsB5&gCiCFqn8)%xNPz$u@nr&kx_ zf%Rie-3baB&6_umA#P;_-(Mn##Qky-xYn+Z`(C>N({)HbY3b33f$F%3(?B_YiN};t zzknKI3WE{^L7Hx3=N<`Qen-R)5cd_fS zK*BE`J2SuMZv92_a`NhLDYZKk&bR(9(QQl-dzb#|?DvNzk9%Y3+JcALu1#u*j1NDI z?Y;4~oBYVV^-9~Zj930@rTsXL+bhi57B5Xv0URv}2{Ilm#NXRlyQ4s$OdYg~SOIl-rV>)EvRLB`6{j`Swiq`jHG zwm3pe8%ey~Mrc>!)g7X6~}|!#urCo)nhv&_d`xR_A@z^u6>8JkHb9 z3Hv;Ey1+_0vTmoD$>U3yB;zRixklTxU-TNYOXg+i@z0y3il7Jq%=j{%>fU~~zenvB z=XO*V!chh#-+yg6YnxTlO5fNIkgX3FjU{FmjyQDS3^kVO3`!|o34a?+{_)|<7t1f( z_QPsaUYl<(TQQmhMVEG_v1X?62gI8L`q)+;;#qQv@zLigDJi3su{yh@KDrR!I&1MS z8kNHxD~xg|ZeDs;yw1qji84E)c|!0Af18=fTUtCdSo@DKFF!xr8Wub|l?nG||BCiI z@J%X}=V?)$lw4)J-}pqqzkhs*WLr026fPr3e0v^$3XT5qa%t4IE}vDN69d6kha=8J ztGJ0d7mufB)j;Z*`3OYZON>`97qA>O745)Q+bZv)K?g)`HmPT0wWxl(q#l@Vd>YC2 z$?2zCXc$^hr=FDa6w@RMgIkrlsg#=?*##0(dAVr|5U}^ z^uK6vfUottdGwmM%&@0s36PaOlc)9-H?V)91^J9W^Yx~{{)9xv>+AlINvLYyE*^~l z3G;l`q=$#c(3OE@6Y4grIsK>Q<=82WiW0bf10rL_##>le7?HHb$d$qHt%|jpRVcVH z8g|Wm5RN7{O>%kp_&z3WtlH)6#5^%N8j%m4QaNDbr1tIkv?s`*=O%Q93NeHydS%9( zEFLrcQ#WlnSsv~?mQ@^j1}IEJ32SN1oq@y(H!rWkiCM~P8@(@guFaG&aO9O~7_v&F zCCSarEyf!h1Aa;n=m~iMo`ULRj^5f!vg2%qN%3BY4O;4MM-_x6Y^9*BHzhYXot>#d zDk`eyz``nUm$Lgc(5xVi^DJy|9ljPFM5nV!7eoZaUS3`sR$z&@yApHv&2d`JGCP5B zr0_6qZGTkW#H;1G1IkUaWa_T*p15usTk1~z8!+@`)Ba7Y;|)jra3zf|uo~n02Cj~^ zPw1Pg?q73D2rbIKSd}gRB&)bnI2!*fn{v!O%KnGoh?DgylE_9Y-bB8lmlaG8hT|ct zBQrqjPz)3Uik3i8M_Nxvi9=au%aIrW(aXm(lB%J7@I9E&0>x9D;>`~rKYa-2@n|FC z=H`Z9xj*v=?TARp0m+aR`E=f-yt6_I?pt425Z64^@4I!hg5$nK|7Clml>+;rk8>jp z7>lD3Nar~Oe}q1r2WmWd+r|or7NiCG0uBIw!XFjL zrM4VdQt7Tg*~2@qeCvA7Tn!5dRm8#%1o#XP&7J~d2{XnQ7k-0M^m12G{IuMsNd9coYuRiZ3W^u zaXgK7Vk{|RM9YH_E|%Zo;2C_x-7}^%b|p9INoZMv&0K5LMQf85=7&p*i7JcuXIj!6 z63?he&~KazZRe>56C&F$5(A5*!=znJHxwEsi3^D*$5%!@)NGTorP^uJr!L&-U*f4Q zF&{2-o;xtfI=*IZYuqAvrudUm*HR9BzOCq6(xfl0L_fSmH%oL(cZrL-Hfd&=WL%)N ze6qOlMfo3@k$K#jyR=}`o!_r{l&;P-AW6~U>k~%%C%=7De$Th?CfX9gOgwDxMAK6ii4_4G7b^>vt& z^dn_6v1jMLkGt!(VRtyeO_ZlMx3iqoV&g;!hn4CQty_X}?28 z_;r*S8rOH|E&L+_Lq~>fYp*wPXq9HznHPLSKNv>RtRPG!f&gUSfo&r=hR1%bo@< zrxy2C9eH7p+VE)N*Dwe8+`$tv-i(>2#Ur$lbF4zC3WG70K~HMCm&Gst^*feGRT-}N zO78nUGDW6;n*14PFuwML%~+%9_?eV^{K{4*rOy3`OLI=|seEOjIB$5}b-AW-zpmPQ zb}hD1sZ-V!9y4N7o7+1fQ{A+KUPlPlK`X)E)d1$GPWqqSfwTHDTUp+%JSq<wK2idSTlf1HErF6#7OvSm!U+n6X$V-vcM67TqvmHE*hnKRY-6urzkTPrWFnd%82zt;olE)^p$KkjR^GInV})|2t@vSm`&^zLmdi^V0$ zJ_-zT4WWT~si->f_c_0le6TOkyfmHBlb0rDk6s-MpKRVWoegt?H`0{HchvOlX*2cE zWEFa;8^`_>?y#y~51k)9>9cLOzpPwflR5Xz{$P=XUxz6I>ExH^(+xm6jOgmM0zc z$wXT{k>fDs^kN^ne5|S4#)9KYCo?O2c7KS^rCmZ#C!nmbbKYS*SFj-T)s>6<$#K5E z<_Wyq9pY^eHxT2Z?FpfkfAn91A5RG7gco^?2!Dd>KZ3hUJoAiWf{B5$G5WSo=F1&m zk<|!sxP=C7vliCt*~SoHlpWkOJ#FaAg9mvsmT|pR0$48;O#i8y~g0af1~MD22+w z0w9@0x`0#QdiYNE-R+$y?FF;3-zJy*q)xTHw_q4?057b^K!}g3_^@O%(EG6X!s60 zlv=*0GSdu7r{1>DV0jEqV}65m!$YRAxf9+WqyE2ytV<~3lcs;2J2tN6(ZOIHmms!+ zS@V7*_^gu^O8s+nbxflMpyOE(30eV4$8tOKLdFo7091l>J9b&&3$p)09ur_J*+=Ly z?=Ex^22&eX7nw#t>2MYJZK>q^1o5kHgBCtbz76%q6ws zEXuN^UQt7g(8-#?HKN;sYwhf;i#k|%Ate{W7?GihGDs)3umIk_@grDcbqip6708MK zP~{J=C9k@C;wk+8y5YxY9DWFhX4ODAaiRCv4TGKa?;(O3OmiG`=iE8KDz17`3DIhv zxpXY6(6}p9|3iS)SAj{C8uC*9F<6BJb7KXdcmUD$HE{U6_+FazaVekcwQF19@*HF% zAPaU(c&v=fZD?va4KNG8A@i_d0M$GWNryk0cRfAxNLoz3hOMczKAWeafzptcHv!w! z!h!0jlKJm%)vnRm++*xlZnHIQCL@7&99QI_B=Lw56O>AbLn{>)N6PXa_Ku@A_^z_@ z0daByPoF8&6D^WHqmPj#D#)G8NVXiP*jKqjF{bd{F899=@XuX76}L2Fvmm6vnYA_TSJy?_81A( z74j1H_AmtG{~dyX)F4~fK8u8f`8noGU7!&Fnj-SILO4}*b#>`l0I~!_!e*rFcP?K- z7Ygi$f*V@0e92$o3z&Z?7$o>VL;&*u)MHGy8h9yWP73G=xr|fuH0{;RgV`o8=XT#F zxVu||X@S{W2Oa_6A1BPFyIxz@gbX65M3cwd~UUp??L9IBlBpC)x<0*g& z;FzPQk&oT+j=77NW>aIaom@=QX=Y2^1<4DKgDwtosv+9X#33_hKxXzhon&Ha&?(jn ztMv-X%UTIB*XhlBF*%_{Rg>o;OX_MUt$#}I67*jb+OcEDt!9-mFA~7W&!M3LFO8E# zB9XknCbTpN7F|{7rUDR=SE#YP^I32}Jrgp*X0e0_KyKp5Y0Rb}vhL?QG(;X-gyzfK z4Fj19$zVkx2n{J_<@iZ{k}Fu^Z*% z>ImoACT0jj9gMbv5_m7ivTrUcyd`cX!4o`D3fPyq=hSuIH0bvhoX(2S_r&Vl@v^f) z8El?(c%H#XDJ$cLrcaLL3xck}1tJg_>PS5~HxcxN&jk#zh5-++fZnQ1z5>YxXIQ$w zeLKu_YycZtj;UYv@w9if4Gvn(y!IJSA4yDz6+1=}=RHnK^__+7OZrmYsI4?U@10%V zx8@5OJMuux_23bu!0c2bHk-;nQP%jzj~!N@szpgQPZYy*VI^?c9hCsF?cVGXWB(@m zTL_<`C3;(fK$Z9R-#>s!*6qs56Z#njgOHFl#>&CFsGo7ot<6N%8gM0t)M3kOnZtT?sFAra|3N5x~SEf zyd@v{F9Bri)Z$N#=8i`azT3o9qHLSpe8-^8c$zv~>xi`rbHd znnA9bL?LGv6c#7;#umk~j$6%fc%ZWENupA-@-RDc&_HK};M#H@g3AhEKzK2_e-`D- zmp{ z_y!$Brya;uE#d)}5zB6V)gW}Z1tw7E$p5O%k7Un{yK)C#Nziyn?Ba~1V)v&VX0PQ*B!BE!Wt z=fz1 zPFifl#>l@H+5AvBxR)%rew^O&dH(#VxvA=r26G?!byeohcZsouJGQ794WW0pq7!U_ zDA)Uzj`yVnJ}OLpK=un4Dc_wV0lgJkW! zvjW+?rTMV1PwNRqNn3>Yt#q9@@W>BYe}7kEXLLS^?7S_%ZCVZr_%NQC>FveE;v~;nX8UvKJGAXbun--K00=*Z zOCQi(3Pv@W!oSK;QQ2%oy=^}m*LtUvzkog)x+mp#D;ITw=UwPkD~!26pr<+R#U*(& znQuFdzTM5gC@(yiqwqk1sT(P(pex$0<$S>%F9m#LUTqGd>iOvw?~uJ{wy@AByZU8 zoe$a0M9%waQu(V#AGT9@;#zK);yth7B*UW$qALD(epHEjmd1Q6m>DU%6*eDKdUJjg zQrAZ(1WIvKIif;dAyz2pNoOjDV!fS-Ow5t=E51KB+;Zj3w&-QAjm|=2I<)1x3+*%( z$5dw${xED2|4d%nv2YAw7}jUrLf;riA;)MHi~4~#puh*MNveFp*M+y(tEc))^fS2N z^PlLIYzS1n@KKkXpihaC&#{xXg*66UBXuZl6K8_26CSR^fY+J7{JkH;cIa7f``Pj# z`ozi?b1MG)Kp0HWW>;HYL|x+TlpbfU3`Iz?%{4u2LP2lD$Qt}@aj!|Fj80K+hKSs| z6`4uaNoK7?I-Kdx5)>a*<_0JXwrrYfvtYbl9kC0hI|B%9m;hA~W}{lY*cqH+{f-vZ zGG`#9CqY({P%&sADRsgyEa}d)XXz%Ec|?g!u?eOA{fa6bt~x}Q^`zW&yL2k ze4=7JTL7a$#gj$HW~I@ry`lchRL^tG7K#g=dNsQ}+Zj!hCjZ~x=+C8_$r0^FPJtmWbFJz|Y7!F&grB-zlJC zl1rs>r)g{i!u!8bY0vvFD&@*0*^y`o1ruokja)eWWjK1LBQ3ZRc%HocKMr*&@d50A z&wB7-|H_??Dmw>*7kY$zrleuS%Z^iQEew18)N#(i=L;})+6*$k_6&Nw+*ydlsZyaJ zE=|pA6h+B~u;B_b!H^J2B<)xw#=6FDRP-$>0b-(zXLKPvpksVA0cSlk6it0d2Ybof znx|d6r%i?UA=g22mY-W&8&6>Cd_P8L#Ekr=A4iwXWEBeHRrZ)h*-$bUM`P4qmB`(ac#avQ zL8*7A{9`)z6yRN9prhNSCz^ImXaNSYvNesibD@QR%}pFlw7k`_V{&4;x-o`?o}E1S z`S~4Qb=TBEQU!~*7(&S2Irv!cxDCa1-=rMa2_2Z?(RCM|ii|d!bg055uzXvWyh zb_N-MyFm0@-bYzee10OqK1IvN+YOs^-js0#s(gDvYgNy$0H0sQOrg~fl&W>mNqJ|X zlk&hGhtxXxxCj)*$Iui(3fy;y9OhQIahP>*O4)ZSN_8#$f5JELMu5Ak4)h@ePa=o2 z$8bP6HMmo4yrjG{c=9l;_cx)k&tWgQnt-6F512~LRG*2{&Ea$39}{8>5n|}q;~0h~ zOWmJhfv`^<}k}ddt`z{*}tTch5qU7>o$&vl8nbiIXdx{{`|q(1`~G ze`8LnW;1GA_J)fuz#fg@M6_gj*73>Tm!`wsU|=H8qxuBQ#^VP8tJ{BMy2o5%=yy+8 zXI5Vw8=jg-Y{VH+5HjrjZ;DBwp$V}+>#*)#o4g6;nfW9|{+C!z22c6(kcIp%m*v}+ z80@I^Hp2e3yTRL{uKDMmFB9h_u1P)5n{m56wJrafCQol)8{+kf*kt vF$l*TiFJ)57;rvwANOt9Vl?7D{67u-G$p7X6N@Sm>sxs$n?mVy5Vf>V7E literal 0 HcmV?d00001 diff --git a/python/samples/demos/copilot_studio_skill/infra/aca.bicep b/python/samples/demos/copilot_studio_skill/infra/aca.bicep new file mode 100644 index 000000000000..53385861474a --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/aca.bicep @@ -0,0 +1,115 @@ +param uniqueId string +param prefix string +param userAssignedIdentityResourceId string +param userAssignedIdentityClientId string +param openAiEndpoint string +param openAiApiKey string +param openAiApiVersion string = '2024-08-01-preview' +param openAiModel string = 'gpt-4o' +param applicationInsightsConnectionString string +param containerRegistry string = '${prefix}acr${uniqueId}' +param location string = resourceGroup().location +param logAnalyticsWorkspaceName string +param apiAppExists bool +param emptyContainerImage string = 'mcr.microsoft.com/azuredocs/containerapps-helloworld:latest' +param botAppId string +@secure() +param botPassword string +param botTenantId string + +resource logAnalyticsWorkspace 'Microsoft.OperationalInsights/workspaces@2023-09-01' existing = { + name: logAnalyticsWorkspaceName +} + +// see https://azureossd.github.io/2023/01/03/Using-Managed-Identity-and-Bicep-to-pull-images-with-Azure-Container-Apps/ +resource containerAppEnv 'Microsoft.App/managedEnvironments@2023-11-02-preview' = { + name: '${prefix}-containerAppEnv-${uniqueId}' + location: location + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${userAssignedIdentityResourceId}': {} + } + } + properties: { + appLogsConfiguration: { + destination: 'log-analytics' + logAnalyticsConfiguration: { + customerId: logAnalyticsWorkspace.properties.customerId + sharedKey: logAnalyticsWorkspace.listKeys().primarySharedKey + } + } + } +} + +// When azd passes parameters, it will tell if apps were already created +// In this case, we don't overwrite the existing image +// See https://johnnyreilly.com/using-azd-for-faster-incremental-azure-container-app-deployments-in-azure-devops#the-does-your-service-exist-parameter +module fetchLatestImageApi './fetch-container-image.bicep' = { + name: 'api-app-image' + params: { + exists: apiAppExists + name: '${prefix}-api-${uniqueId}' + } +} + +resource apiContainerApp 'Microsoft.App/containerApps@2023-11-02-preview' = { + name: '${prefix}-api-${uniqueId}' + location: location + tags: { 'azd-service-name': 'api' } + identity: { + type: 'UserAssigned' + userAssignedIdentities: { + '${userAssignedIdentityResourceId}': {} + } + } + properties: { + managedEnvironmentId: containerAppEnv.id + configuration: { + activeRevisionsMode: 'Single' + ingress: { + external: true + targetPort: 80 + transport: 'auto' + } + registries: [ + { + server: '${containerRegistry}.azurecr.io' + identity: userAssignedIdentityResourceId + } + ] + } + template: { + scale: { + minReplicas: 1 + maxReplicas: 1 + } + containers: [ + { + name: 'api' + image: apiAppExists ? fetchLatestImageApi.outputs.containers[0].image : emptyContainerImage + resources: { + cpu: 1 + memory: '2Gi' + } + env: [ + { name: 'AZURE_CLIENT_ID', value: userAssignedIdentityClientId } + { name: 'BOT_APP_ID', value: botAppId } + { name: 'BOT_PASSWORD', value: botPassword } + { name: 'BOT_TENANT_ID', value: botTenantId } + { name: 'APPLICATIONINSIGHTS_CONNECTIONSTRING', value: applicationInsightsConnectionString } + { name: 'APPLICATIONINSIGHTS_SERVICE_NAME', value: 'api' } + { name: 'AZURE_OPENAI_ENDPOINT', value: openAiEndpoint } + { name: 'AZURE_OPENAI_CHAT_DEPLOYMENT_NAME', value: openAiModel } + { name: 'AZURE_OPENAI_API_KEY', value: '' } + { name: 'AZURE_OPENAI_API_VERSION', value: openAiApiVersion } + ] + } + ] + } + } +} + +output messagesEndpoint string = 'https://${apiContainerApp.properties.configuration.ingress.fqdn}/api/messages' +output manifestUrl string = 'https://${apiContainerApp.properties.configuration.ingress.fqdn}/manifest' +output homeUrl string = 'https://${apiContainerApp.properties.configuration.ingress.fqdn}' diff --git a/python/samples/demos/copilot_studio_skill/infra/acr.bicep b/python/samples/demos/copilot_studio_skill/infra/acr.bicep new file mode 100644 index 000000000000..c159cc1c231a --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/acr.bicep @@ -0,0 +1,29 @@ +param uniqueId string +param prefix string +param userAssignedIdentityPrincipalId string +param acrName string = '${prefix}acr${uniqueId}' +param location string = resourceGroup().location + +resource acr 'Microsoft.ContainerRegistry/registries@2021-06-01-preview' = { + name: acrName + location: location + sku: { + name: 'Standard' // Choose between Basic, Standard, and Premium based on your needs + } + properties: { + adminUserEnabled: false + } +} + +resource acrPullRoleAssignment 'Microsoft.Authorization/roleAssignments@2020-04-01-preview' = { + name: guid(acr.id, userAssignedIdentityPrincipalId, 'acrpull') + scope: acr + properties: { + roleDefinitionId: resourceId('Microsoft.Authorization/roleDefinitions', '7f951dda-4ed3-4680-a7ca-43fe172d538d') // Role definition ID for AcrPull + principalId: userAssignedIdentityPrincipalId + principalType: 'ServicePrincipal' + } +} + +output acrName string = acrName +output acrEndpoint string = acr.properties.loginServer diff --git a/python/samples/demos/copilot_studio_skill/infra/appin.bicep b/python/samples/demos/copilot_studio_skill/infra/appin.bicep new file mode 100644 index 000000000000..a0990ee0c29f --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/appin.bicep @@ -0,0 +1,46 @@ +param uniqueId string +param prefix string +@secure() +param userAssignedIdentityPrincipalId string +param location string = resourceGroup().location +param appInsightsName string = '${prefix}-appin-${uniqueId}' +param logAnalyticsWorkspaceName string = '${prefix}-law-${uniqueId}' + +// Create or reference an existing Log Analytics Workspace +resource logAnalyticsWorkspace 'Microsoft.OperationalInsights/workspaces@2020-08-01' = { + name: logAnalyticsWorkspaceName + location: location + properties: { + sku: { + name: 'PerGB2018' + } + retentionInDays: 30 + } +} + +// Create Application Insights resource linked to the Log Analytics Workspace +resource applicationInsights 'Microsoft.Insights/components@2020-02-02-preview' = { + name: appInsightsName + location: location + kind: 'web' + properties: { + Application_Type: 'web' + WorkspaceResourceId: logAnalyticsWorkspace.id + } +} + +// Assign "Monitoring Metrics Publisher" role to the Application Insights resource +resource acrPullRoleAssignment 'Microsoft.Authorization/roleAssignments@2020-04-01-preview' = { + name: guid(applicationInsights.id, userAssignedIdentityPrincipalId, 'appinsightsPublisher') + scope: applicationInsights + properties: { + roleDefinitionId: resourceId('Microsoft.Authorization/roleDefinitions', '3913510d-42f4-4e42-8a64-420c390055eb') // Role definition ID for Monitoring Metrics Publisher + principalId: userAssignedIdentityPrincipalId + principalType: 'ServicePrincipal' + } +} + +output logAnalyticsWorkspaceId string = logAnalyticsWorkspace.id +output logAnalyticsWorkspaceName string = logAnalyticsWorkspaceName +output applicationInsightsInstrumentationKey string = applicationInsights.properties.InstrumentationKey +output applicationInsightsConnectionString string = applicationInsights.properties.ConnectionString diff --git a/python/samples/demos/copilot_studio_skill/infra/bot.bicep b/python/samples/demos/copilot_studio_skill/infra/bot.bicep new file mode 100644 index 000000000000..3df436523333 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/bot.bicep @@ -0,0 +1,37 @@ +param uniqueId string +param prefix string +param messagesEndpoint string +param botAppId string +param botTenantId string + +resource bot 'Microsoft.BotService/botServices@2023-09-15-preview' = { + name: '${prefix}bot${uniqueId}' + location: 'global' + sku: { + name: 'F0' + } + kind: 'azurebot' + properties: { + iconUrl: 'https://docs.botframework.com/static/devportal/client/images/bot-framework-default.png' + displayName: '${prefix}bot${uniqueId}' + endpoint: messagesEndpoint + description: 'Bot created by Bicep' + publicNetworkAccess: 'Enabled' + msaAppId: botAppId + msaAppTenantId: botTenantId + msaAppType: 'SingleTenant' + msaAppMSIResourceId: null + schemaTransformationVersion: '1.3' + isStreamingSupported: false + } +} + +// Connect the bot service to Microsoft Teams +resource botServiceMsTeamsChannel 'Microsoft.BotService/botServices/channels@2021-03-01' = { + parent: bot + location: 'global' + name: 'MsTeamsChannel' + properties: { + channelName: 'MsTeamsChannel' + } +} diff --git a/python/samples/demos/copilot_studio_skill/infra/fetch-container-image.bicep b/python/samples/demos/copilot_studio_skill/infra/fetch-container-image.bicep new file mode 100644 index 000000000000..c74bf1a00265 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/fetch-container-image.bicep @@ -0,0 +1,8 @@ +param exists bool +param name string + +resource existingApp 'Microsoft.App/containerApps@2023-05-01' existing = if (exists) { + name: name +} + +output containers array = exists ? existingApp.properties.template.containers : [] diff --git a/python/samples/demos/copilot_studio_skill/infra/main.bicep b/python/samples/demos/copilot_studio_skill/infra/main.bicep new file mode 100644 index 000000000000..ea4bb691c2db --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/main.bicep @@ -0,0 +1,141 @@ +targetScope = 'subscription' + +@minLength(1) +@maxLength(64) +@description('Name of the environment that can be used as part of naming resource convention') +param environmentName string + +@description('The current user ID, to assign RBAC permissions to') +param currentUserId string + +// Main deployment parameters +param prefix string = 'copstsk' +@minLength(1) +@description('Primary location for all resources') +param location string + +@minLength(1) +@description('Name of the Azure OpenAI resource') +param openAIName string + +@minLength(1) +@description('Name of the Azure Resource Group where the OpenAI resource is located') +param openAIResourceGroupName string + +@description('Azure Bot app ID') +param botAppId string +@description('Azure Bot app password') +@secure() +param botPassword string +@description('Azure Bot tenant ID') +param botTenantId string + +param openAIModel string +param openAIApiVersion string +param apiAppExists bool = false +param runningOnGh string = '' + +var tags = { + 'azd-env-name': environmentName +} + +resource rg 'Microsoft.Resources/resourceGroups@2022-09-01' = { + name: 'rg-${environmentName}' + location: location + tags: tags +} + +var uniqueId = uniqueString(rg.id) +var principalType = empty(runningOnGh) ? 'User' : 'ServicePrincipal' + +module uami './uami.bicep' = { + name: 'uami' + scope: rg + params: { + uniqueId: uniqueId + prefix: prefix + location: location + } +} + +module appin './appin.bicep' = { + name: 'appin' + scope: rg + params: { + uniqueId: uniqueId + prefix: prefix + location: location + userAssignedIdentityPrincipalId: uami.outputs.principalId + } +} + +module acrModule './acr.bicep' = { + name: 'acr' + scope: rg + params: { + uniqueId: uniqueId + prefix: prefix + userAssignedIdentityPrincipalId: uami.outputs.principalId + location: location + } +} + +module openAI './openAI.bicep' = { + name: 'openAI' + scope: resourceGroup(openAIResourceGroupName) + params: { + openAIName: openAIName + userAssignedIdentityPrincipalId: uami.outputs.principalId + } +} + +module aca './aca.bicep' = { + name: 'aca' + scope: rg + params: { + uniqueId: uniqueId + prefix: prefix + userAssignedIdentityResourceId: uami.outputs.identityId + containerRegistry: acrModule.outputs.acrName + location: location + logAnalyticsWorkspaceName: appin.outputs.logAnalyticsWorkspaceName + applicationInsightsConnectionString: appin.outputs.applicationInsightsConnectionString + openAiApiKey: '' // Force ManId, otherwise set openAI.listKeys().key1 + openAiEndpoint: openAI.outputs.openAIEndpoint + openAiModel: openAIModel + openAiApiVersion: openAIApiVersion + userAssignedIdentityClientId: uami.outputs.clientId + apiAppExists: apiAppExists + botAppId: botAppId + botPassword: botPassword + botTenantId: botTenantId + } +} + +module bot 'bot.bicep' = { + name: 'bot' + scope: rg + params: { + uniqueId: uniqueId + prefix: prefix + botAppId: botAppId + botTenantId: botTenantId + messagesEndpoint: aca.outputs.messagesEndpoint + } +} + +// These outputs are copied by azd to .azure//.env file +// post provision script will use these values, too +output AZURE_RESOURCE_GROUP string = rg.name +output APPLICATIONINSIGHTS_CONNECTIONSTRING string = appin.outputs.applicationInsightsConnectionString +output AZURE_TENANT_ID string = subscription().tenantId +output AZURE_USER_ASSIGNED_IDENTITY_ID string = uami.outputs.identityId +output AZURE_CONTAINER_REGISTRY_ENDPOINT string = acrModule.outputs.acrEndpoint +output AZURE_OPENAI_MODEL string = openAIModel +output AZURE_OPENAI_ENDPOINT string = openAI.outputs.openAIEndpoint +output AZURE_OPENAI_API_VERSION string = openAIApiVersion +output ENDPOINT_URL string = aca.outputs.messagesEndpoint +output MANIFEST_URL string = aca.outputs.manifestUrl +output HOME_URL string = aca.outputs.homeUrl +output BOT_APP_ID string = botAppId +output BOT_TENANT_ID string = botTenantId diff --git a/python/samples/demos/copilot_studio_skill/infra/main.parameters.json b/python/samples/demos/copilot_studio_skill/infra/main.parameters.json new file mode 100644 index 000000000000..58f7ce97282e --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/main.parameters.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "environmentName": { + "value": "${AZURE_ENV_NAME}" + }, + "currentUserId": { + "value": "${AZURE_PRINCIPAL_ID}" + }, + "runningOnGh": { + "value": "${GITHUB_ACTIONS}" + }, + "location": { + "value": "${AZURE_LOCATION}" + }, + "openAiName": { + "value": "${AZURE_OPENAI_NAME}" + }, + "openAiResourceGroupName": { + "value": "${AZURE_OPENAI_RG}" + }, + "openAIModel": { + "value": "${AZURE_OPENAI_MODEL=gpt-4o}" + }, + "openAIApiVersion": { + "value": "${AZURE_OPENAI_API_VERSION=2024-08-01-preview}" + }, + "apiAppExists": { + "value": "${SERVICE_API_RESOURCE_EXISTS=false}" + }, + "botAppId": { + "value": "${BOT_APPID}" + }, + "botPassword": { + "value": "${BOT_PASSWORD}" + }, + "botTenantId": { + "value": "${BOT_TENANT_ID}" + } + } +} diff --git a/python/samples/demos/copilot_studio_skill/infra/openAI.bicep b/python/samples/demos/copilot_studio_skill/infra/openAI.bicep new file mode 100644 index 000000000000..f6f25fe46b65 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/openAI.bicep @@ -0,0 +1,20 @@ +targetScope = 'resourceGroup' + +param openAIName string +param userAssignedIdentityPrincipalId string + +resource openAI 'Microsoft.CognitiveServices/accounts@2022-03-01' existing = { + name: openAIName +} + +resource roleAssignment 'Microsoft.Authorization/roleAssignments@2020-04-01-preview' = { + name: guid(openAI.id, userAssignedIdentityPrincipalId, 'Cognitive Services OpenAI User') + scope: openAI + properties: { + roleDefinitionId: resourceId('Microsoft.Authorization/roleDefinitions', '5e0bd9bd-7b93-4f28-af87-19fc36ad61bd') // Role definition ID for Cognitive Services OpenAI User + principalId: userAssignedIdentityPrincipalId + principalType: 'ServicePrincipal' + } +} + +output openAIEndpoint string = openAI.properties.endpoint diff --git a/python/samples/demos/copilot_studio_skill/infra/uami.bicep b/python/samples/demos/copilot_studio_skill/infra/uami.bicep new file mode 100644 index 000000000000..613252ba6527 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/infra/uami.bicep @@ -0,0 +1,13 @@ +param uniqueId string +param prefix string +param location string = resourceGroup().location +param identityName string = '${prefix}uami${uniqueId}' + +resource userAssignedIdentity 'Microsoft.ManagedIdentity/userAssignedIdentities@2018-11-30' = { + name: identityName + location: location +} + +output identityId string = userAssignedIdentity.id +output clientId string = userAssignedIdentity.properties.clientId +output principalId string = userAssignedIdentity.properties.principalId diff --git a/python/samples/demos/copilot_studio_skill/src/api/.dockerignore b/python/samples/demos/copilot_studio_skill/src/api/.dockerignore new file mode 100644 index 000000000000..3018d39f305f --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/.dockerignore @@ -0,0 +1,47 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*.pyd + +# Caches of various types +.cache/ +.pip/ + +# Development environments +.env +.venv/ +venv/ +ENV/ + +# Version control +.git/ +.gitignore +.github/ + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Project backups +*.bak + +# Log files +*.log + +# OS generated files +.DS_Store +Thumbs.db + +# Editor directories and files +.idea/ +.vscode/ +*.swp +*.swo +*~ diff --git a/python/samples/demos/copilot_studio_skill/src/api/adapter.py b/python/samples/demos/copilot_studio_skill/src/api/adapter.py new file mode 100644 index 000000000000..d0e844a4cac8 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/adapter.py @@ -0,0 +1,74 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +import traceback + +from botbuilder.core import ( + MessageFactory, + TurnContext, +) +from botbuilder.integration.aiohttp import ( + CloudAdapter, + ConfigurationBotFrameworkAuthentication, +) +from botbuilder.schema import Activity, ActivityTypes, InputHints + + +class AdapterWithErrorHandler(CloudAdapter): + def __init__( + self, + settings: ConfigurationBotFrameworkAuthentication, + ): + super().__init__(settings) + + self.on_turn_error = self._handle_turn_error + + async def _handle_turn_error(self, turn_context: TurnContext, error: Exception): + # This check writes out errors to console log + # NOTE: In production environment, you should consider logging this to Azure + # application insights. + print(f"\n [on_turn_error] unhandled error: {error}", file=sys.stderr) + traceback.print_exc() + await self._send_error_message(turn_context, error) + await self._send_eoc_to_parent(turn_context, error) + + async def _send_error_message(self, turn_context: TurnContext, error: Exception): + try: + # Send a message to the user. + error_message_text = "The skill encountered an error or bug." + error_message = MessageFactory.text(error_message_text, error_message_text, InputHints.ignoring_input) + await turn_context.send_activity(error_message) + + error_message_text = "To continue to run this bot, please fix the bot source code." + error_message = MessageFactory.text(error_message_text, error_message_text, InputHints.ignoring_input) + await turn_context.send_activity(error_message) + + # Send a trace activity, which will be displayed in Bot Framework Emulator. + await turn_context.send_trace_activity( + label="TurnError", + name="on_turn_error Trace", + value=f"{error}", + value_type="https://www.botframework.com/schemas/error", + ) + except Exception as exception: + print( + f"\n Exception caught on _send_error_message : {exception}", + file=sys.stderr, + ) + traceback.print_exc() + + async def _send_eoc_to_parent(self, turn_context: TurnContext, error: Exception): + try: + # Send an EndOfConversation activity to the skill caller with the error to end the conversation, + # and let the caller decide what to do. + end_of_conversation = Activity(type=ActivityTypes.end_of_conversation) + end_of_conversation.code = "SkillError" + end_of_conversation.text = str(error) + + await turn_context.send_activity(end_of_conversation) + except Exception as exception: + print( + f"\n Exception caught on _send_eoc_to_parent : {exception}", + file=sys.stderr, + ) + traceback.print_exc() diff --git a/python/samples/demos/copilot_studio_skill/src/api/app.py b/python/samples/demos/copilot_studio_skill/src/api/app.py new file mode 100644 index 000000000000..f1ebbfb82135 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/app.py @@ -0,0 +1,55 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import os + +from aiohttp import web +from aiohttp.web import Request, Response +from bot import bot +from config import config + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +# Endpoint for processing messages +async def messages(req: Request): + """ + Endpoint for processing messages with the Skill Bot. + """ + logger.info("Received a message.") + body = await req.json() + logger.info("Request body: %s", body) + + # Process the incoming request + # NOTE in the context of Skills, we MUST return the response to the Copilot Studio as the response to the request + # In other channel (ex. Teams), this would not be required, and activities would be sent to the Bot Framework + return await bot.process(req) + + +async def copilot_manifest(req: Request): + # load manifest from file and interpolate with env vars + with open("copilot-studio.manifest.json") as f: + manifest = f.read() + + # Get container app current ingress fqdn + # See https://learn.microsoft.com/en-us/azure/container-apps/environment-variables?tabs=portal + fqdn = f"https://{os.getenv('CONTAINER_APP_NAME')}.{os.getenv('CONTAINER_APP_ENV_DNS_SUFFIX')}/api/messages" + + manifest = manifest.replace("__botEndpoint", fqdn).replace("__botAppId", config.APP_ID) + + return Response( + text=manifest, + content_type="application/json", + ) + + +APP = web.Application() +APP.router.add_post("/api/messages", messages) +APP.router.add_get("/manifest", copilot_manifest) + +if __name__ == "__main__": + try: + web.run_app(APP, host=config.HOST, port=config.PORT) + except Exception as error: + raise error diff --git a/python/samples/demos/copilot_studio_skill/src/api/auth.py b/python/samples/demos/copilot_studio_skill/src/api/auth.py new file mode 100644 index 000000000000..76479c1c025d --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/auth.py @@ -0,0 +1,41 @@ +# Copyright (c) Microsoft. All rights reserved. + +# See https://github.com/microsoft/BotBuilder-Samples/blob/main/samples/python/80.skills-simple-bot-to-bot/echo-skill-bot/authentication/allowed_callers_claims_validator.py +from collections.abc import Awaitable, Callable + +from botframework.connector.auth import JwtTokenValidation, SkillValidation +from config import Config + + +class AllowedCallersClaimsValidator: + config_key = "ALLOWED_CALLERS" + + def __init__(self, config: Config): + if not config: + raise TypeError("AllowedCallersClaimsValidator: config object cannot be None.") + + # ALLOWED_CALLERS is the setting in config.py file + # that consists of the list of parent bot ids that are allowed to access the skill + # to add a new parent bot simply go to the AllowedCallers and add + # the parent bot's microsoft app id to the list + caller_list = getattr(config, self.config_key) + if caller_list is None: + raise TypeError(f'"{self.config_key}" not found in configuration.') + self._allowed_callers = frozenset(caller_list) + + @property + def claims_validator(self) -> Callable[[list[dict]], Awaitable]: + async def allow_callers_claims_validator(claims: dict[str, object]): + # if allowed_callers is None we allow all calls + if "*" not in self._allowed_callers and SkillValidation.is_skill_claim(claims): + # Check that the appId claim in the skill request is in the list of skills configured for this bot. + app_id = JwtTokenValidation.get_app_id_from_claims(claims) + if app_id not in self._allowed_callers: + raise PermissionError( + f'Received a request from a bot with an app ID of "{app_id}".' + f" To enable requests from this caller, add the app ID to your configuration file." + ) + + return + + return allow_callers_claims_validator diff --git a/python/samples/demos/copilot_studio_skill/src/api/bot.py b/python/samples/demos/copilot_studio_skill/src/api/bot.py new file mode 100644 index 000000000000..693e8c1fca38 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/bot.py @@ -0,0 +1,79 @@ +# Copyright (c) Microsoft. All rights reserved. + +from adapter import AdapterWithErrorHandler + +# Custom classes to handle errors and claims validation +from auth import AllowedCallersClaimsValidator +from botbuilder.core import MemoryStorage, MessageFactory, TurnContext +from botbuilder.integration.aiohttp import ConfigurationBotFrameworkAuthentication +from botbuilder.schema import ( + Activity, + EndOfConversationCodes, + InputHints, +) +from botframework.connector.auth import AuthenticationConfiguration +from config import config + +# This is the SK agent that will be used to handle the conversation +from sk_conversation_agent import agent +from teams import Application, ApplicationOptions +from teams.state import TurnState + +from semantic_kernel.contents import ChatHistory + +# This is required for bot to work as Copilot Skill, +# not adding a claims validator will result in an error +claims_validator = AllowedCallersClaimsValidator(config) +auth = AuthenticationConfiguration(tenant_id=config.APP_TENANTID, claims_validator=claims_validator.claims_validator) + +# Create the bot application +# We use the Teams Application class to create the bot application, +# then we added a custom adapter for skill errors handling. +bot = Application[TurnState]( + ApplicationOptions( + bot_app_id=config.APP_ID, + storage=MemoryStorage(), + # CANNOT PASS A DICT HERE; MUST PASS A CLASS WITH APP_ID, APP_PASSWORD, AND APP_TENANTID ATTRIBUTES + adapter=AdapterWithErrorHandler(ConfigurationBotFrameworkAuthentication(config, auth_configuration=auth)), + ) +) + + +@bot.before_turn +async def setup_chathistory(context: TurnContext, state: TurnState): + chat_history = state.conversation.get("chat_history") or ChatHistory() + + state.conversation["chat_history"] = chat_history + + return state + + +@bot.activity("message") +async def on_message(context: TurnContext, state: TurnState): + user_message = context.activity.text + + # Get the chat_history from the conversation state + chat_history: ChatHistory = state.conversation.get("chat_history") + + # Add the new user message + chat_history.add_user_message(user_message) + + # Get the response from the semantic kernel agent (v1.22.0 and later) + sk_response = await agent.get_response(history=chat_history, user_input=user_message) + + # Store the updated chat_history back into conversation state + state.conversation["chat_history"] = chat_history + + # Send the response back to the user + # NOTE in the context of a Copilot Skill, + # the response is sent as a Response from /api/messages endpoint + await context.send_activity(MessageFactory.text(sk_response, input_hint=InputHints.ignoring_input)) + + # Skills must send an EndOfConversation activity to indicate the conversation is complete + # NOTE: this is a simple example, in a real skill you would likely want to send this + # only when the user has completed their task + end = Activity.create_end_of_conversation_activity() + end.code = EndOfConversationCodes.completed_successfully + await context.send_activity(end) + + return True diff --git a/python/samples/demos/copilot_studio_skill/src/api/config.py b/python/samples/demos/copilot_studio_skill/src/api/config.py new file mode 100644 index 000000000000..9b94f8a941c2 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/config.py @@ -0,0 +1,45 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os + +from dotenv import load_dotenv + +load_dotenv(override=True) + + +class Config: + """Bot Configuration""" + + HOST = os.getenv("HOST", "localhost") + PORT = int(os.getenv("PORT", 8080)) + + # DO NOT CHANGE THIS KEYS!! + # These keys are used to validate the bot's identity + # and must match these named as Bot configuration expects + APP_ID = os.getenv("BOT_APP_ID") + APP_PASSWORD = os.getenv("BOT_PASSWORD") + APP_TENANTID = os.getenv("BOT_TENANT_ID") + APP_TYPE = os.getenv("APP_TYPE", "singletenant") + + # Required for Copilot Skill + # Can be a list of allowed agent Ids, + # or "*" to allow any agent + ALLOWED_CALLERS = os.getenv("ALLOWED_CALLERS", ["*"]) + + # Required for Azure OpenAI + AZURE_OPENAI_CHAT_DEPLOYMENT_NAME = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME") + AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT") + AZURE_OPENAI_API_VERSION = os.getenv("AZURE_OPENAI_API_VERSION") + + def validate(self): + if not self.HOST or not self.PORT: + raise Exception("Missing required configuration. HOST and PORT must be set.") + if not self.APP_ID or not self.APP_PASSWORD or not self.APP_TENANTID: + raise Exception("Missing required configuration. APP_ID, APP_PASSWORD, and APP_TENANT_ID must be set.") + + if not self.ALLOWED_CALLERS: + raise Exception("Missing required configuration. ALLOWED_CALLERS must be set.") + + +config = Config() +config.validate() diff --git a/python/samples/demos/copilot_studio_skill/src/api/copilot-studio.manifest.json b/python/samples/demos/copilot_studio_skill/src/api/copilot-studio.manifest.json new file mode 100644 index 000000000000..9dae565c1f5c --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/copilot-studio.manifest.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://schemas.botframework.com/schemas/skills/v2.2/skill-manifest.json", + "$id": "SKCopilotSkill", + "name": "SK Copilot Skill", + "version": "1.0", + "description": "This is a sample skill using Semantic Kernel", + "publisherName": "Microsoft", + "privacyUrl": "https://www.microsoft.com/en-us/privacy/privacystatement", + "iconUrl": "https://docs.botframework.com/static/devportal/client/images/bot-framework-default.png", + "endpoints": [ + { + "name": "default", + "protocol": "BotFrameworkV3", + "description": "Default endpoint for the bot", + "endpointUrl": "__botEndpoint", + "msAppId": "__botAppId" + } + ], + "activities": { + "message": { + "type": "message", + "description": "Invoke Semantic Kernel skill" + } + } +} diff --git a/python/samples/demos/copilot_studio_skill/src/api/dockerfile b/python/samples/demos/copilot_studio_skill/src/api/dockerfile new file mode 100644 index 000000000000..ed7b918da351 --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim + +# Step 1 - Install dependencies +WORKDIR /app + +# Step 2 - Copy only requirements.txt +COPY requirements.txt /app + +# Step 4 - Install pip dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Step 5 - Copy the rest of the files +COPY . . +ENV PYTHONUNBUFFERED=1 + +# Expose the application port +EXPOSE 80 + +ENV HOST 0.0.0.0 +ENV PORT 80 + +# do not change the arguments +CMD ["python", "app.py"] \ No newline at end of file diff --git a/python/samples/demos/copilot_studio_skill/src/api/requirements.txt b/python/samples/demos/copilot_studio_skill/src/api/requirements.txt new file mode 100644 index 000000000000..fd7fbd63619c --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/requirements.txt @@ -0,0 +1,4 @@ +python-dotenv>=1.0.1 +botbuilder-integration-aiohttp>=4.15.0 +teams-ai>=1.4.0,<2.0.0 +semantic-kernel>=1.22.0 diff --git a/python/samples/demos/copilot_studio_skill/src/api/sk_conversation_agent.py b/python/samples/demos/copilot_studio_skill/src/api/sk_conversation_agent.py new file mode 100644 index 000000000000..f341d2318a6f --- /dev/null +++ b/python/samples/demos/copilot_studio_skill/src/api/sk_conversation_agent.py @@ -0,0 +1,10 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion + +agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="ChatAgent", + instructions="You invent jokes to have a fun conversation with the user.", +) diff --git a/python/samples/getting_started_with_agents/azure_ai_agent/step7_azure_ai_agent_retrieval.py b/python/samples/getting_started_with_agents/azure_ai_agent/step7_azure_ai_agent_retrieval.py index 7ad50f857a66..8c33b8b80c2d 100644 --- a/python/samples/getting_started_with_agents/azure_ai_agent/step7_azure_ai_agent_retrieval.py +++ b/python/samples/getting_started_with_agents/azure_ai_agent/step7_azure_ai_agent_retrieval.py @@ -25,11 +25,11 @@ async def main() -> None: DefaultAzureCredential() as creds, AzureAIAgent.create_client(credential=creds) as client, ): - # 1. Retrieve the agent definition based on the `assistant_id` - # Replace the "your-assistant-id" with the actual assistant ID + # 1. Retrieve the agent definition based on the `agent_id` + # Replace the "your-agent-id" with the actual agent ID # you want to use. agent_definition = await client.agents.get_agent( - assistant_id="your-assistant-id", + agent_id="your-agent-id", ) # 2. Create a Semantic Kernel agent for the Azure AI agent @@ -52,7 +52,7 @@ async def main() -> None: finally: # 6. Cleanup: Delete the thread and agent await client.agents.delete_thread(thread.id) - # Do not clean up the assistant so it can be used again + # Do not clean up the agent so it can be used again """ Sample Output: diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py index dd36eebda2d2..2e7ed1a4009b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_realtime.py @@ -103,6 +103,7 @@ def __init__( if not azure_openai_settings.realtime_deployment_name: raise ServiceInitializationError("The OpenAI realtime model ID is required.") super().__init__( + api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, audio_output_callback=audio_output_callback, deployment_name=azure_openai_settings.realtime_deployment_name, endpoint=azure_openai_settings.endpoint, diff --git a/python/semantic_kernel/functions/kernel_function.py b/python/semantic_kernel/functions/kernel_function.py index a75459099290..4e8e7161c6fc 100644 --- a/python/semantic_kernel/functions/kernel_function.py +++ b/python/semantic_kernel/functions/kernel_function.py @@ -92,9 +92,11 @@ class KernelFunction(KernelBaseModel): metadata: KernelFunctionMetadata - invocation_duration_histogram: metrics.Histogram = Field(default_factory=_create_function_duration_histogram) + invocation_duration_histogram: metrics.Histogram = Field( + default_factory=_create_function_duration_histogram, exclude=True + ) streaming_duration_histogram: metrics.Histogram = Field( - default_factory=_create_function_streaming_duration_histogram + default_factory=_create_function_streaming_duration_histogram, exclude=True ) @classmethod diff --git a/python/semantic_kernel/functions/kernel_function_from_method.py b/python/semantic_kernel/functions/kernel_function_from_method.py index 0daea5af0ab0..07f020d9d04b 100644 --- a/python/semantic_kernel/functions/kernel_function_from_method.py +++ b/python/semantic_kernel/functions/kernel_function_from_method.py @@ -6,7 +6,7 @@ from inspect import isasyncgen, isasyncgenfunction, isawaitable, iscoroutinefunction, isgenerator, isgeneratorfunction from typing import Any -from pydantic import ValidationError +from pydantic import Field, ValidationError from semantic_kernel.exceptions import FunctionExecutionException, FunctionInitializationError from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext @@ -21,8 +21,8 @@ class KernelFunctionFromMethod(KernelFunction): """Semantic Kernel Function from a method.""" - method: Callable[..., Any] - stream_method: Callable[..., Any] | None = None + method: Callable[..., Any] = Field(exclude=True) + stream_method: Callable[..., Any] | None = Field(default=None, exclude=True) def __init__( self, diff --git a/python/semantic_kernel/functions/kernel_parameter_metadata.py b/python/semantic_kernel/functions/kernel_parameter_metadata.py index 6eb28074879e..1a487490e282 100644 --- a/python/semantic_kernel/functions/kernel_parameter_metadata.py +++ b/python/semantic_kernel/functions/kernel_parameter_metadata.py @@ -17,7 +17,7 @@ class KernelParameterMetadata(KernelBaseModel): default_value: Any | None = None type_: str | None = Field(default="str", alias="type") is_required: bool | None = False - type_object: Any | None = None + type_object: Any | None = Field(default=None, exclude=True) schema_data: dict[str, Any] | None = None include_in_function_choices: bool = True diff --git a/python/tests/samples/test_concepts.py b/python/tests/samples/test_concepts.py index 86a1347fb157..d8f7f0d9861a 100644 --- a/python/tests/samples/test_concepts.py +++ b/python/tests/samples/test_concepts.py @@ -46,7 +46,6 @@ from samples.concepts.prompt_templates.load_yaml_prompt import main as load_yaml_prompt from samples.concepts.prompt_templates.template_language import main as template_language from samples.concepts.rag.rag_with_text_memory_plugin import main as rag_with_text_memory_plugin -from samples.concepts.search.bing_search_plugin import main as bing_search_plugin from samples.concepts.service_selector.custom_service_selector import main as custom_service_selector from samples.concepts.text_completion.text_completion import main as text_completion from samples.getting_started_with_agents.chat_completion.step1_chat_completion_agent_simple import ( @@ -249,12 +248,6 @@ marks=pytest.mark.skipif(os.getenv(MEMORY_CONCEPT_SAMPLE, None) is None, reason="Not running memory samples."), ), param(rag_with_text_memory_plugin, [], id="rag_with_text_memory_plugin"), - param( - bing_search_plugin, - [], - id="bing_search_plugin", - marks=pytest.mark.skip(reason="Flaky test due to Azure OpenAI content policy"), - ), param( custom_service_selector, [], diff --git a/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py b/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py index 0d23fcc7f3eb..aeb68471d68e 100644 --- a/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py +++ b/python/tests/unit/agents/azure_ai_agent/test_agent_content_generation.py @@ -105,7 +105,7 @@ def test_generate_message_content_text_and_image(): ) thread_msg.content = [image, text] - step = RunStep(id="step_id", run_id="run_id", thread_id="thread_id", agent_id="assistant_id") + step = RunStep(id="step_id", run_id="run_id", thread_id="thread_id", agent_id="agent_id") out = generate_message_content("assistant", thread_msg, step) assert len(out.items) == 5 assert isinstance(out.items[0], FileReferenceContent) diff --git a/python/tests/unit/connectors/ai/ollama/services/test_utils.py b/python/tests/unit/connectors/ai/ollama/services/test_utils.py new file mode 100644 index 000000000000..a4829f4f858d --- /dev/null +++ b/python/tests/unit/connectors/ai/ollama/services/test_utils.py @@ -0,0 +1,240 @@ +# Copyright (c) Microsoft. All rights reserved. +from unittest.mock import MagicMock, patch + +import pytest + +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType + +# The code under test +from semantic_kernel.connectors.ai.ollama.services.utils import ( + MESSAGE_CONVERTERS, + update_settings_from_function_choice_configuration, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.utils.author_role import AuthorRole + + +@pytest.fixture +def mock_chat_message_content() -> ChatMessageContent: + """Fixture to create a basic ChatMessageContent object with role=USER and simple text content.""" + return ChatMessageContent( + role=AuthorRole.USER, + content="Hello, I am a user message.", # The text content + ) + + +@pytest.fixture +def mock_system_message_content() -> ChatMessageContent: + """Fixture to create a ChatMessageContent object with role=SYSTEM.""" + return ChatMessageContent(role=AuthorRole.SYSTEM, content="This is a system message.") + + +@pytest.fixture +def mock_assistant_message_content() -> ChatMessageContent: + """Fixture to create a ChatMessageContent object with role=ASSISTANT.""" + return ChatMessageContent(role=AuthorRole.ASSISTANT, content="This is an assistant message.") + + +@pytest.fixture +def mock_tool_message_content() -> ChatMessageContent: + """Fixture to create a ChatMessageContent object with role=TOOL.""" + return ChatMessageContent(role=AuthorRole.TOOL, content="This is a tool message.") + + +def test_message_converters_system(mock_system_message_content: ChatMessageContent) -> None: + """Test that passing a system message returns the correct dictionary structure for 'system' role.""" + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.SYSTEM] + result = converter(mock_system_message_content) + + # Assert + assert result["role"] == "system", "Expected role to be 'system' on the returned message." + assert result["content"] == mock_system_message_content.content, ( + "Expected content to match the system message content." + ) + + +def test_message_converters_user_no_images(mock_chat_message_content: ChatMessageContent) -> None: + """Test that passing a user message without images returns correct dictionary structure for 'user' role.""" + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.USER] + result = converter(mock_chat_message_content) + + # Assert + assert result["role"] == "user", "Expected role to be 'user' on the returned message." + assert result["content"] == mock_chat_message_content.content, "Expected content to match the user message content." + # Ensure that no 'images' field is added + assert "images" not in result, "No images should be present if no ImageContent is added." + + +def test_message_converters_user_with_images() -> None: + """Test user message with multiple images, verifying the 'images' field is populated.""" + # Arrange + img1 = ImageContent(data="some_base64_data") + img2 = ImageContent(data="other_base64_data") + content = ChatMessageContent(role=AuthorRole.USER, items=[img1, img2], content="User with images") + + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.USER] + result = converter(content) + + # Assert + assert result["role"] == "user" + assert result["content"] == content.content + assert "images" in result, "Images field expected when ImageContent is present." + assert len(result["images"]) == 2, "Two images should be in the 'images' field." + assert result["images"] == [b"some_base64_data", b"other_base64_data"], ( + "Image data should match the content from ImageContent." + ) + + +def test_message_converters_user_with_image_missing_data() -> None: + """Test user message with image content that has missing data, expecting ValueError.""" + # Arrange + bad_image = ImageContent(data="") # empty data for image + content = ChatMessageContent(role=AuthorRole.USER, items=[bad_image]) + + # Act & Assert + converter = MESSAGE_CONVERTERS[AuthorRole.USER] + with pytest.raises(ValueError) as exc_info: + converter(content) + + assert "Image item must contain data encoded as base64." in str(exc_info.value), ( + "Should raise ValueError for missing base64 data in image." + ) + + +def test_message_converters_assistant_basic(mock_assistant_message_content: ChatMessageContent) -> None: + """Test assistant message without images or tool calls.""" + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.ASSISTANT] + result = converter(mock_assistant_message_content) + + # Assert + assert result["role"] == "assistant", "Assistant role expected." + assert result["content"] == mock_assistant_message_content.content + assert "images" not in result, "No images included, so should not have an 'images' field." + assert "tool_calls" not in result, "No FunctionCallContent, so 'tool_calls' field shouldn't be present." + + +def test_message_converters_assistant_with_image() -> None: + """Test assistant message containing images. Verify 'images' field is added.""" + # Arrange + img = ImageContent(data="assistant_base64_data") + content = ChatMessageContent(role=AuthorRole.ASSISTANT, items=[img], content="Assistant image message") + + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.ASSISTANT] + result = converter(content) + + # Assert + assert result["role"] == "assistant" + assert result["content"] == content.content + assert "images" in result, "Images should be included for assistant messages with ImageContent." + assert result["images"] == [b"assistant_base64_data"], "Expected matching base64 data in images." + + +def test_message_converters_assistant_with_tool_calls() -> None: + """Test assistant message with FunctionCallContent should populate 'tool_calls'.""" + # Arrange + tool_call_1 = FunctionCallContent(function_name="foo", arguments='{"key": "value"}') + tool_call_2 = FunctionCallContent(function_name="bar", arguments='{"another": "123"}') + + content = ChatMessageContent( + role=AuthorRole.ASSISTANT, items=[tool_call_1, tool_call_2], content="Assistant with tools" + ) + + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.ASSISTANT] + result = converter(content) + + # Assert + assert result["role"] == "assistant" + assert result["content"] == content.content + assert "tool_calls" in result, "tool_calls field should be present for assistant messages with FunctionCallContent." + assert len(result["tool_calls"]) == 2, "Expected two tool calls in the result." + assert result["tool_calls"][0]["function"]["name"] == "foo", "First tool call function name mismatched." + assert result["tool_calls"][0]["function"]["arguments"] == {"key": "value"}, "Expected arguments to be JSON loaded." + assert result["tool_calls"][1]["function"]["name"] == "bar", "Second tool call function name mismatched." + assert result["tool_calls"][1]["function"]["arguments"] == {"another": "123"}, ( + "Expected arguments to be JSON loaded." + ) + + +def test_message_converters_tool_with_result() -> None: + """Test tool message with a FunctionResultContent, verifying the message content is set.""" + # Arrange + fr_content = FunctionResultContent(id="some_id", result="some result", function_name="test_func") + tool_message = ChatMessageContent(role=AuthorRole.TOOL, items=[fr_content]) + + # Act + converter = MESSAGE_CONVERTERS[AuthorRole.TOOL] + result = converter(tool_message) + + # Assert + assert result["role"] == "tool", "Expected role to be 'tool' for a tool message." + # The code takes the first FunctionResultContent's result as the content + assert result["content"] == fr_content.result, "Expected content to match the function result." + + +def test_message_converters_tool_missing_function_result_content(mock_tool_message_content: ChatMessageContent) -> None: + """Test that if no FunctionResultContent is present, ValueError is raised.""" + # Arrange + mock_tool_message_content.items = [] # no FunctionResultContent in items + converter = MESSAGE_CONVERTERS[AuthorRole.TOOL] + + # Act & Assert + with pytest.raises(ValueError) as exc_info: + converter(mock_tool_message_content) + assert "Tool message must have a function result content item." in str(exc_info.value) + + +@pytest.mark.parametrize("choice_type", [FunctionChoiceType.AUTO, FunctionChoiceType.NONE, FunctionChoiceType.REQUIRED]) +def test_update_settings_from_function_choice_configuration(choice_type: FunctionChoiceType) -> None: + """Test that update_settings_from_function_choice_configuration updates the settings with the correct tools.""" + # Arrange + # We'll create a mock configuration with some available functions. + mock_config = FunctionCallChoiceConfiguration() + mock_config.available_functions = [MagicMock() for _ in range(2)] + + # We also patch the kernel_function_metadata_to_function_call_format function. + # The function returns a dict object describing each function. + mock_tool_description = {"type": "function", "function": {"name": "mocked_function"}} + + with patch( + "semantic_kernel.connectors.ai.ollama.services.utils.kernel_function_metadata_to_function_call_format", + return_value=mock_tool_description, + ): + settings = PromptExecutionSettings() + + # Act + update_settings_from_function_choice_configuration( + function_choice_configuration=mock_config, + settings=settings, + type=choice_type, + ) + + # Assert + # After the call, either settings.tools or settings.extension_data["tools"] should be set. + # The code tries settings.tools first and if it fails, it sets extension_data["tools"]. + # We'll check both possibilities. + possible_tools = getattr(settings, "tools", None) + + if possible_tools is not None: + # If settings.tools exists, ensure it got updated + assert len(possible_tools) == 2, "Should have exactly two tools set in the settings.tools attribute." + assert possible_tools[0]["function"]["name"] == "mocked_function", ( + "Expected mocked function name in settings.tools." + ) + else: + # Otherwise check for extension_data + assert "tools" in settings.extension_data, "Expected 'tools' in extension_data if settings.tools not present." + assert len(settings.extension_data["tools"]) == 2, "Should have exactly two tools in extension_data." + assert settings.extension_data["tools"][0]["function"]["name"] == "mocked_function", ( + "Expected mocked function name in extension_data." + ) diff --git a/python/tests/unit/functions/test_kernel_function_from_method.py b/python/tests/unit/functions/test_kernel_function_from_method.py index 7e92f702ddf6..3b868625d87c 100644 --- a/python/tests/unit/functions/test_kernel_function_from_method.py +++ b/python/tests/unit/functions/test_kernel_function_from_method.py @@ -544,3 +544,25 @@ def test_gather_function_parameters_exception_handling(get_custom_type_function_ with pytest.raises(FunctionExecutionException, match=r"Parameter param is expected to be parsed to .* but is not."): func.gather_function_parameters(context) + + +@pytest.mark.parametrize( + ("mode"), + [ + ("python"), + ("json"), + ], +) +def test_function_model_dump(get_custom_type_function_pydantic, mode): + func: KernelFunctionFromMethod = get_custom_type_function_pydantic + model_dump = func.model_dump(mode=mode) + assert isinstance(model_dump, dict) + assert "metadata" in model_dump + assert len(model_dump["metadata"]["parameters"]) == 1 + + +def test_function_model_dump_json(get_custom_type_function_pydantic): + func = get_custom_type_function_pydantic + model_dump = func.model_dump_json() + assert isinstance(model_dump, str) + assert "metadata" in model_dump diff --git a/python/tests/unit/functions/test_kernel_function_from_prompt.py b/python/tests/unit/functions/test_kernel_function_from_prompt.py index d9f8e18282ac..b33acae82248 100644 --- a/python/tests/unit/functions/test_kernel_function_from_prompt.py +++ b/python/tests/unit/functions/test_kernel_function_from_prompt.py @@ -382,3 +382,39 @@ async def prompt_rendering_filter(context: PromptRenderContext, next): context = FunctionInvocationContext(function=function, kernel=kernel, arguments=KernelArguments()) prompt_render_result = await function._render_prompt(context) assert prompt_render_result.rendered_prompt == "preface test" + + +@pytest.mark.parametrize( + ("mode"), + [ + ("python"), + ("json"), + ], +) +def test_function_model_dump(mode: str): + function = KernelFunctionFromPrompt( + function_name="test", + plugin_name="test", + prompt="test", + template_format="semantic-kernel", + prompt_template_config=PromptTemplateConfig( + template="test", + input_variables=[InputVariable(name="input", type="str", default="test", is_required=False)], + ), + ) + model_dump = function.model_dump(mode=mode) + assert isinstance(model_dump, dict) + assert "metadata" in model_dump + assert len(model_dump["metadata"]["parameters"]) == 1 + + +def test_function_model_dump_json(): + function = KernelFunctionFromPrompt( + function_name="test", + plugin_name="test", + prompt="test", + template_format="semantic-kernel", + ) + model_dump_json = function.model_dump_json() + assert isinstance(model_dump_json, str) + assert "test" in model_dump_json diff --git a/python/tests/unit/processes/dapr_runtime/test_dapr_actor_registration.py b/python/tests/unit/processes/dapr_runtime/test_dapr_actor_registration.py new file mode 100644 index 000000000000..ba1ccbc39f62 --- /dev/null +++ b/python/tests/unit/processes/dapr_runtime/test_dapr_actor_registration.py @@ -0,0 +1,134 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import MagicMock + +import pytest + +from semantic_kernel.processes.dapr_runtime import ( + EventBufferActor, + ExternalEventBufferActor, + MessageBufferActor, + ProcessActor, + StepActor, +) +from semantic_kernel.processes.dapr_runtime.dapr_actor_registration import ( + create_actor_factories, + register_fastapi_dapr_actors, + register_flask_dapr_actors, +) + + +class MockActor: + """Mock actor to record register_actor calls.""" + + def __init__(self): + self.registrations = [] + + async def register_actor(self, actor_type, actor_factory=None): + # Record registration details + self.registrations.append({"actor_type": actor_type, "actor_factory": actor_factory}) + + def register_actor_sync(self, actor_type, actor_factory=None): + # Synchronous version for Flask + self.registrations.append({"actor_type": actor_type, "actor_factory": actor_factory}) + + +class MockFlaskDaprActor: + """Mock Flask actor with synchronous register_actor method.""" + + def __init__(self): + self.registrations = [] + + def register_actor(self, actor_type, actor_factory=None): + self.registrations.append({"actor_type": actor_type, "actor_factory": actor_factory}) + + +@pytest.fixture +def mock_kernel() -> MagicMock: + """Provides a mock kernel object.""" + return MagicMock(name="Kernel") + + +@pytest.fixture +def mock_factories() -> dict: + """Provides a mock factories dictionary.""" + return {"mock": lambda: "mock_factory"} + + +def test_create_actor_factories_returns_factories(mock_kernel, mock_factories): + """ + Test that create_actor_factories returns callable factory functions that construct actors + with the provided kernel and factories. + """ + process_factory, step_factory = create_actor_factories(mock_kernel, mock_factories) + + # Check that the returned factories are callable + assert callable(process_factory) + assert callable(step_factory) + + # Create mock context and actor_id + mock_ctx = MagicMock() + mock_actor_id = MagicMock() + mock_actor_id.id = "actor_1" + + # Call the factories to create ProcessActor and StepActor objects + process_actor = process_factory(mock_ctx, mock_actor_id) + step_actor = step_factory(mock_ctx, mock_actor_id) + + # Check that the actors have the kernel and factories set correctly + assert hasattr(process_actor, "kernel") + assert process_actor.kernel == mock_kernel + assert hasattr(process_actor, "factories") + assert process_actor.factories == mock_factories + + assert hasattr(step_actor, "kernel") + assert step_actor.kernel == mock_kernel + assert hasattr(step_actor, "factories") + assert step_actor.factories == mock_factories + + +async def test_register_fastapi_dapr_actors(mock_kernel, mock_factories): + """ + Test that register_fastapi_dapr_actors registers all the required actors with appropriate + factories for FastAPI. + """ + mock_actor = MockActor() + + # Call the registration function + await register_fastapi_dapr_actors(mock_actor, mock_kernel, mock_factories) + + # There should be 5 registrations: ProcessActor, StepActor (with factories) and + # three registrations without factories + expected_actor_types = {ProcessActor, StepActor, EventBufferActor, MessageBufferActor, ExternalEventBufferActor} + registered_actor_types = {reg["actor_type"] for reg in mock_actor.registrations} + + assert expected_actor_types == registered_actor_types + + # Verify that ProcessActor and StepActor registrations have an actor_factory + for reg in mock_actor.registrations: + if reg["actor_type"] in {ProcessActor, StepActor}: + assert reg["actor_factory"] is not None + else: + assert reg.get("actor_factory") is None + + +def test_register_flask_dapr_actors(mock_kernel, mock_factories): + """Test that register_flask_dapr_actors registers all the required actors with appropriate factories for Flask.""" + mock_actor = MockFlaskDaprActor() + + # Call the synchronous registration function + register_flask_dapr_actors(mock_actor, mock_kernel, mock_factories) + + # There should be 5 registrations: ProcessActor, StepActor (with factories) and + # three registrations without factories + expected_actor_types = {ProcessActor, StepActor, EventBufferActor, MessageBufferActor, ExternalEventBufferActor} + registered_actor_types = {reg["actor_type"] for reg in mock_actor.registrations} + + assert expected_actor_types == registered_actor_types + + # Check that ProcessActor and StepActor registrations have non-null actor_factory + for reg in mock_actor.registrations: + if reg["actor_type"] in {ProcessActor, StepActor}: + assert reg["actor_factory"] is not None + else: + assert reg.get("actor_factory") is None From 9560b51d69129af7da53ec24a7c39285d1241c96 Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Fri, 28 Mar 2025 10:41:26 +0100 Subject: [PATCH 20/63] .Net MEVD: Create indexes for relational DBs (#11233) fixes #11185 --- ...PostgresVectorStoreCollectionSqlBuilder.cs | 5 ++- ...PostgresVectorStoreCollectionSqlBuilder.cs | 15 +++++-- .../PostgresVectorStoreDbClient.cs | 4 +- ...ostgresVectorStoreRecordPropertyMapping.cs | 17 +++++--- .../SqlServerCommandBuilder.cs | 43 +++++++++++++++++++ .../SqlServerConstants.cs | 2 + .../Connectors.Memory.Sqlite/SqliteColumn.cs | 2 + ...liteVectorStoreCollectionCommandBuilder.cs | 8 ++++ .../SqliteVectorStoreRecordPropertyMapping.cs | 3 +- ...resVectorStoreCollectionSqlBuilderTests.cs | 22 ++++++++-- ...esVectorStoreRecordPropertyMappingTests.cs | 20 ++++++--- ...teVectorStoreRecordPropertyMappingTests.cs | 5 ++- .../SqlServerCommandBuilderTests.cs | 4 ++ 13 files changed, 125 insertions(+), 25 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs index 0175243131cd..933c6b0ca1e7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs @@ -45,12 +45,13 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// /// The schema of the table. /// The name of the table. - /// The name of the vector column. + /// The name of the column. /// The kind of index to create. /// The distance function to use for the index. + /// Specifies whether the column is a vector column. /// Specifies whether to include IF NOT EXISTS in the command. /// The built SQL command info. - PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction, bool ifNotExists); + PostgresSqlCommandInfo BuildCreateIndexCommand(string schema, string tableName, string columnName, string indexKind, string distanceFunction, bool isVector, bool ifNotExists); ///

/// Builds a SQL command to drop a table in the Postgres vector store. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs index f661c09ebf44..ae8b0a1e9e21 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs @@ -124,8 +124,17 @@ public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tabl } /// - public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, string tableName, string vectorColumnName, string indexKind, string distanceFunction, bool ifNotExists) + public PostgresSqlCommandInfo BuildCreateIndexCommand(string schema, string tableName, string columnName, string indexKind, string distanceFunction, bool isVector, bool ifNotExists) { + var indexName = $"{tableName}_{columnName}_index"; + + if (!isVector) + { + return new PostgresSqlCommandInfo(commandText: + $@"CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : "")}""{indexName}"" ON {schema}.""{tableName}"" (""{columnName}"");" + ); + } + // Only support creating HNSW index creation through the connector. var indexTypeName = indexKind switch { @@ -145,11 +154,9 @@ public PostgresSqlCommandInfo BuildCreateVectorIndexCommand(string schema, strin _ => throw new NotSupportedException($"Distance function {distanceFunction} is not supported.") }; - var indexName = $"{tableName}_{vectorColumnName}_index"; - return new PostgresSqlCommandInfo( commandText: $@" - CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : "")} ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{vectorColumnName}"" {indexOps});" + CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : "")} ""{indexName}"" ON {schema}.""{tableName}"" USING {indexTypeName} (""{columnName}"" {indexOps});" ); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index a167aad9cd02..cbdd55fd97a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -72,9 +72,9 @@ public async Task CreateTableAsync(string tableName, IReadOnlyList - this._sqlBuilder.BuildCreateVectorIndexCommand(this._schema, tableName, index.column, index.kind, index.function, ifNotExists) + this._sqlBuilder.BuildCreateIndexCommand(this._schema, tableName, index.column, index.kind, index.function, index.isVector, ifNotExists) ); // Execute the commands in a transaction. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index 5e8509236e31..d0a76147bf4d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -199,21 +199,22 @@ public static NpgsqlParameter GetNpgsqlParameter(object? value) } /// - /// Returns information about vector indexes to create, validating that the dimensions of the vector are supported. + /// Returns information about indexes to create, validating that the dimensions of the vector are supported. /// /// The properties of the vector store record. - /// A list of tuples containing the column name, index kind, and distance function for each vector property. + /// A list of tuples containing the column name, index kind, and distance function for each property. /// /// The default index kind is "Flat", which prevents the creation of an index. /// - public static List<(string column, string kind, string function)> GetVectorIndexInfo(IReadOnlyList properties) + public static List<(string column, string kind, string function, bool isVector)> GetIndexInfo(IReadOnlyList properties) { - var vectorIndexesToCreate = new List<(string column, string kind, string function)>(); + var vectorIndexesToCreate = new List<(string column, string kind, string function, bool isVector)>(); foreach (var property in properties) { + var columnName = property.StoragePropertyName ?? property.DataModelPropertyName; + if (property is VectorStoreRecordVectorProperty vectorProperty) { - var vectorColumnName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; var indexKind = vectorProperty.IndexKind ?? PostgresConstants.DefaultIndexKind; var distanceFunction = vectorProperty.DistanceFunction ?? PostgresConstants.DefaultDistanceFunction; @@ -231,9 +232,13 @@ public static NpgsqlParameter GetNpgsqlParameter(object? value) ); } - vectorIndexesToCreate.Add((vectorColumnName, indexKind, distanceFunction)); + vectorIndexesToCreate.Add((columnName, indexKind, distanceFunction, isVector: true)); } } + else if (property is VectorStoreRecordDataProperty { IsFilterable: true }) + { + vectorIndexesToCreate.Add((columnName, "", "", isVector: false)); + } } return vectorIndexesToCreate; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index 9e97e37c0cb3..aebcf8fe8787 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -50,6 +50,18 @@ internal static SqlCommand CreateTable( sb.AppendLine(); sb.AppendLine(");"); // end the table definition + foreach (var dataProperty in dataProperties) + { + if (dataProperty.IsFilterable) + { + sb.AppendFormat("CREATE INDEX "); + sb.AppendIndexName(tableName, GetColumnName(dataProperty)); + sb.AppendFormat(" ON ").AppendTableName(schema, tableName); + sb.AppendFormat("([{0}]);", GetColumnName(dataProperty)); + sb.AppendLine(); + } + } + foreach (var vectorProperty in vectorProperties) { switch (vectorProperty.IndexKind) @@ -472,6 +484,37 @@ private static StringBuilder AppendKeyParameterList(this StringBuilder sb, return sb; } + private static StringBuilder AppendIndexName(this StringBuilder sb, string tableName, string columnName) + { + int length = sb.Length; + + // "Index names must start with a letter or an underscore (_)." + sb.Append("index"); + sb.Append('_'); + AppendAllowedOnly(tableName); + sb.Append('_'); + AppendAllowedOnly(columnName); + + if (sb.Length > length + SqlServerConstants.MaxIndexNameLength) + { + sb.Length = length + SqlServerConstants.MaxIndexNameLength; + } + + return sb; + + void AppendAllowedOnly(string value) + { + foreach (char c in value) + { + // Index names can include letters, numbers, and underscores. + if (char.IsLetterOrDigit(c) || c == '_') + { + sb.Append(c); + } + } + } + } + private static SqlCommand CreateCommand(this SqlConnection connection, StringBuilder sb) { SqlCommand command = connection.CreateCommand(); diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs index d8ce0f1354e7..3c8c49b663d4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -10,6 +10,8 @@ internal static class SqlServerConstants // The actual number is actually higher (2_100), but we want to avoid any kind of "off by one" errors. internal const int MaxParameterCount = 2_000; + internal const int MaxIndexNameLength = 128; + internal static readonly HashSet SupportedKeyTypes = [ typeof(int), // INT diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteColumn.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteColumn.cs index ae551cf65b2b..df9122f9c63d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteColumn.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteColumn.cs @@ -18,5 +18,7 @@ internal sealed class SqliteColumn( public bool IsPrimary { get; set; } = isPrimary; + public bool HasIndex { get; set; } + public Dictionary? Configuration { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 7f8090bd345d..6707bf482fed 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -44,6 +44,14 @@ public static DbCommand BuildCreateTableCommand(SqliteConnection connection, str builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); builder.Append(");"); + foreach (var column in columns) + { + if (column.HasIndex) + { + builder.AppendLine($"CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}{tableName}_{column.Name}_index ON {tableName}({column.Name});"); + } + } + var command = connection.CreateCommand(); command.CommandText = builder.ToString(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs index e468d14c3e65..0cb9137394d7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs @@ -66,7 +66,8 @@ public static List GetColumns( var column = new SqliteColumn(propertyName, propertyType, isPrimary) { - Configuration = configuration + Configuration = configuration, + HasIndex = property is VectorStoreRecordDataProperty { IsFilterable: true } }; columns.Add(column); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 60dd98f45e7a..8f0058a1c8ba 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -90,12 +90,12 @@ public void TestBuildCreateIndexCommand(string indexKind, string distanceFunctio if (indexKind != IndexKind.Hnsw) { - Assert.Throws(() => builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists)); - Assert.Throws(() => builder.BuildCreateVectorIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists)); + Assert.Throws(() => builder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); + Assert.Throws(() => builder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); return; } - var cmdInfo = builder.BuildCreateVectorIndexCommand("public", "1testcollection", vectorColumn, indexKind, distanceFunction, ifNotExists); + var cmdInfo = builder.BuildCreateIndexCommand("public", "1testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("CREATE INDEX ", cmdInfo.CommandText); @@ -133,6 +133,22 @@ public void TestBuildCreateIndexCommand(string indexKind, string distanceFunctio this._output.WriteLine(cmdInfo.CommandText); } + [Theory] + [InlineData(true)] + [InlineData(false)] + public void TestBuildCreateNonVectorIndexCommand(bool ifNotExists) + { + var builder = new PostgresVectorStoreCollectionSqlBuilder(); + + var cmdInfo = builder.BuildCreateIndexCommand("schema", "tableName", "columnName", indexKind: "", distanceFunction: "", isVector: false, ifNotExists); + + var expectedCommandText = ifNotExists + ? "CREATE INDEX IF NOT EXISTS \"tableName_columnName_index\" ON schema.\"tableName\" (\"columnName\");" + : "CREATE INDEX \"tableName_columnName_index\" ON schema.\"tableName\" (\"columnName\");"; + + Assert.Equal(expectedCommandText, cmdInfo.CommandText); + } + [Fact] public void TestBuildDropTableCommand() { diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs index 0631cc2c0df4..1cccf9a5cf12 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs @@ -101,32 +101,40 @@ public void GetPropertyValueReturnsCorrectNullableValue() } [Fact] - public void GetVectorIndexInfoReturnsCorrectValues() + public void GetIndexInfoReturnsCorrectValues() { // Arrange - List vectorProperties = [ + List vectorProperties = [ new VectorStoreRecordVectorProperty("vector1", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 1000 }, new VectorStoreRecordVectorProperty("vector2", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Flat, Dimensions = 3000 }, new VectorStoreRecordVectorProperty("vector3", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 900, DistanceFunction = DistanceFunction.ManhattanDistance }, + new VectorStoreRecordDataProperty("data1", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("data2", typeof(string)) { IsFilterable = false }, ]; // Act - var indexInfo = PostgresVectorStoreRecordPropertyMapping.GetVectorIndexInfo(vectorProperties); + var indexInfo = PostgresVectorStoreRecordPropertyMapping.GetIndexInfo(vectorProperties); // Assert - Assert.Equal(2, indexInfo.Count); - foreach (var (columnName, indexKind, distanceFunction) in indexInfo) + Assert.Equal(3, indexInfo.Count); + foreach (var (columnName, indexKind, distanceFunction, isVector) in indexInfo) { if (columnName == "vector1") { + Assert.True(isVector); Assert.Equal(IndexKind.Hnsw, indexKind); Assert.Equal(DistanceFunction.CosineDistance, distanceFunction); } else if (columnName == "vector3") { + Assert.True(isVector); Assert.Equal(IndexKind.Hnsw, indexKind); Assert.Equal(DistanceFunction.ManhattanDistance, distanceFunction); } + else if (columnName == "data1") + { + Assert.False(isVector); + } else { Assert.Fail("Unexpected column name"); @@ -142,6 +150,6 @@ public void GetVectorIndexInfoReturnsThrowsForInvalidDimensions(string indexKind var vectorProperty = new VectorStoreRecordVectorProperty("vector", typeof(ReadOnlyMemory?)) { IndexKind = indexKind, Dimensions = dimensions }; // Act & Assert - Assert.Throws(() => PostgresVectorStoreRecordPropertyMapping.GetVectorIndexInfo([vectorProperty])); + Assert.Throws(() => PostgresVectorStoreRecordPropertyMapping.GetIndexInfo([vectorProperty])); } } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs index 19ec51b2f1a2..ddd47cce94de 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs @@ -58,7 +58,7 @@ public void GetColumnsReturnsCollectionOfColumns() var properties = new List() { new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(int)), + new VectorStoreRecordDataProperty("Data", typeof(int)) { IsFilterable = true }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.ManhattanDistance }, }; @@ -77,16 +77,19 @@ public void GetColumnsReturnsCollectionOfColumns() Assert.Equal("TEXT", columns[0].Type); Assert.True(columns[0].IsPrimary); Assert.Null(columns[0].Configuration); + Assert.False(columns[0].HasIndex); Assert.Equal("my_data", columns[1].Name); Assert.Equal("INTEGER", columns[1].Type); Assert.False(columns[1].IsPrimary); Assert.Null(columns[1].Configuration); + Assert.True(columns[1].HasIndex); Assert.Equal("Vector", columns[2].Name); Assert.Equal("FLOAT[4]", columns[2].Type); Assert.False(columns[2].IsPrimary); Assert.NotNull(columns[2].Configuration); + Assert.False(columns[2].HasIndex); Assert.Equal("l1", columns[2].Configuration!["distance_metric"]); } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index c8ed9a0cdda1..4cdaea3c7a20 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -112,6 +112,9 @@ public void CreateTable(bool ifNotExists) [ new VectorStoreRecordDataProperty("simpleName", typeof(string)), new VectorStoreRecordDataProperty("with space", typeof(int)) + { + IsFilterable = true + } ]; VectorStoreRecordVectorProperty[] vectorProperties = [ @@ -135,6 +138,7 @@ [simpleName] NVARCHAR(MAX), [embedding] VECTOR(10), PRIMARY KEY ([id]) ); + CREATE INDEX index_table_withspace ON [schema].[table]([with space]); END; """; if (ifNotExists) From d4742a37c9bdc27c581c65bf1000386529b28428 Mon Sep 17 00:00:00 2001 From: Devis Lucato Date: Fri, 28 Mar 2025 10:18:08 -0700 Subject: [PATCH 21/63] .Net: Fix InMemory collection deletion. (#11256) --- .../InMemoryVectorStoreRecordCollection.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 6fbcdf2633bf..d0bd5bd309c9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -141,6 +141,7 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { this._internalCollections.TryRemove(this._collectionName, out _); + this._internalCollectionTypes.TryRemove(this._collectionName, out _); return Task.CompletedTask; } From 15a29314f5621ceddad29dd0807371186a68b9dd Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Fri, 28 Mar 2025 20:42:13 +0000 Subject: [PATCH 22/63] .Net: Preb2 fix obsolete warnings (#11268) ### Motivation and Context ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../Connectors.Pinecone.UnitTests/PineconeClientTests.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeClientTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeClientTests.cs index 0426ccff1765..eaf0a040a100 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeClientTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeClientTests.cs @@ -18,6 +18,7 @@ public sealed class PineconeClientTests [InlineData("//bypass.com")] [InlineData("javascript:alert(1)")] [InlineData("data:text/html,")] + [Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public void ItThrowsOnEnvironmentUrlInjectionAttempt(string maliciousEnvironment) { // Arrange & Act & Assert @@ -37,6 +38,7 @@ public void ItThrowsOnEnvironmentUrlInjectionAttempt(string maliciousEnvironment [InlineData("asia-southeast-1-pncn")] [InlineData("eu-west-1-pncn")] [InlineData("northamerica-northeast1-pncn")] + [Obsolete("The IMemoryStore abstraction is being obsoleted, use Microsoft.Extensions.VectorData and PineconeVectorStore")] public void ItAcceptsValidEnvironmentNames(string validEnvironment) { // Arrange & Act & Assert From 5ce4285eed9cd7f4723234bc7ba7783e422f1919 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Sun, 30 Mar 2025 16:07:30 +0200 Subject: [PATCH 23/63] .Net: Start decoupling MEVD providers from SK (#11076) Part of #10855 --- .../AzureAISearchKernelBuilderExtensions.cs | 1 + .../AzureAISearchVectorStore.cs | 2 +- .../Connectors.Memory.AzureAISearch.csproj | 9 +++++- ...eCosmosDBMongoDBKernelBuilderExtensions.cs | 2 ++ ...nectors.Memory.AzureCosmosDBMongoDB.csproj | 8 +++++ ...ureCosmosDBNoSQLKernelBuilderExtensions.cs | 2 ++ ...onnectors.Memory.AzureCosmosDBNoSQL.csproj | 7 +++++ .../Connectors.Memory.InMemory.csproj | 7 +++++ .../InMemoryKernelBuilderExtensions.cs | 2 ++ .../Connectors.Memory.MongoDB.csproj | 7 +++++ .../Connectors.Memory.Pinecone.csproj | 7 +++++ .../PineconeKernelBuilderExtensions.cs | 2 ++ .../Connectors.Memory.Postgres.csproj | 4 +++ .../IPostgresVectorStoreDbClient.cs | 2 +- .../PostgresVectorStoreDbClient.cs | 2 +- .../Connectors.Memory.Qdrant.csproj | 9 +++++- .../QdrantKernelBuilderExtensions.cs | 2 ++ .../Connectors.Memory.Redis.csproj | 9 +++++- .../Connectors.Memory.SqlServer.csproj | 4 +++ .../Connectors.Memory.Sqlite.csproj | 4 +++ .../Connectors.Memory.Weaviate.csproj | 7 +++++ .../WeaviateConstants.cs | 3 ++ .../WeaviateKernelBuilderExtensions.cs | 2 ++ .../WeaviateVectorStore.cs | 24 +++++++++++---- .../WeaviateVectorStoreRecordCollection.cs | 30 +++++++++---------- .../VectorData.Abstractions.csproj | 10 +++---- .../SqliteServiceCollectionExtensionsTests.cs | 2 -- .../Memory/Sqlite/SqliteVectorStoreFixture.cs | 3 -- .../src/Http/HttpContentExtensions.cs | 12 -------- .../src/Http/HttpContentPolyfills.cs | 25 ++++++++++++++++ .../src/RestrictedInternalUtilities.props | 22 ++++++++++++++ 31 files changed, 182 insertions(+), 50 deletions(-) create mode 100644 dotnet/src/InternalUtilities/src/Http/HttpContentPolyfills.cs create mode 100644 dotnet/src/InternalUtilities/src/RestrictedInternalUtilities.props diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs index 5096c1486f1f..5ebd018354be 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs @@ -12,6 +12,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Azure AI Search instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class AzureAISearchKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index 5329cdf3cee4..409c36d1e05a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -85,7 +85,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat /// /// Helper method to get the next index name from the enumerator with a try catch around the move next call to convert - /// any to , since try catch is not supported + /// any to , since try catch is not supported /// around a yield return. /// /// The enumerator to get the next result from. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj index 9b4aa8a97866..f4036fe33e1e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj @@ -10,6 +10,7 @@ + @@ -19,15 +20,21 @@ + + + + + + + - diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs index af73629568ec..d4e77e583ff3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; using MongoDB.Driver; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Azure CosmosDB MongoDB instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class AzureCosmosDBMongoDBKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj index 8167b955e13c..8e7eda4105ac 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj @@ -11,6 +11,7 @@ + @@ -31,6 +32,13 @@ + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs index 12f7c0118538..1e4905af255b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Azure CosmosDB NoSQL instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class AzureCosmosDBNoSQLKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj index b4d0a9ba609b..606b8a2fe866 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj @@ -12,6 +12,7 @@ + @@ -25,6 +26,12 @@ + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj b/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj index 4a630b90b5bb..68455976f42f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/Connectors.Memory.InMemory.csproj @@ -10,6 +10,7 @@ + @@ -27,6 +28,12 @@ + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs index 85311ceba4fb..65c754b13a3f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Data services on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class InMemoryKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj index 3498c97b1130..bc85cd441115 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj @@ -11,6 +11,7 @@ + @@ -27,6 +28,12 @@ + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj index 171643fece44..e085ae887d7e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj @@ -11,6 +11,7 @@ + @@ -20,6 +21,11 @@ + + + + + @@ -29,6 +35,7 @@ + diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs index 50048c8dfa6f..ed6dfc37033a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Pinecone; using Sdk = Pinecone; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Pinecone instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class PineconeKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj index 10c7683a8eb6..17398d17217e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj @@ -12,6 +12,7 @@ + @@ -24,12 +25,15 @@ + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index 020aa46dbda6..e679ef030a03 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -129,7 +129,7 @@ internal interface IPostgresVectorStoreDbClient /// The number of entries to skip. /// If true, the vectors will be returned in the entries. /// The to monitor for cancellation requests. The default is . - /// An asynchronous stream of objects that the nearest matches to the . + /// An asynchronous stream of result objects that the nearest matches to the . #pragma warning disable CS0618 // VectorSearchFilter is obsolete IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index cbdd55fd97a4..6efc7aa8f037 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// An implementation of a client for Postgres. This class is used to managing postgres database operations. /// /// -/// Initializes a new instance of the class. +/// Initializes a new instance of the class. /// /// Postgres data source. /// Schema of collection tables. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj index adbf015199e2..499c656ad41c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj @@ -11,6 +11,7 @@ + @@ -19,16 +20,22 @@ - + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs index c8dd0b6070b9..2f71d42ab074 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Qdrant; using Qdrant.Client; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Qdrant instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class QdrantKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj index 61f91e1cae6d..3c9e83f89056 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj @@ -11,6 +11,7 @@ + @@ -19,15 +20,21 @@ - + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj index 0f2cdabb6d2d..5afb4f42b560 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/Connectors.Memory.SqlServer.csproj @@ -11,6 +11,7 @@ + @@ -23,10 +24,13 @@ + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj index 056f868262c5..3e35c126dfc8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj @@ -11,6 +11,7 @@ + @@ -23,11 +24,14 @@ + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj index a7ab0b153735..1fb9a74fbe3c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj @@ -11,6 +11,7 @@ + @@ -19,10 +20,16 @@ + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs index a260b4e9fc2c..3bb4b18c8991 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs @@ -4,6 +4,9 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class WeaviateConstants { + /// The name of this database for telemetry purposes. + public const string DatabaseName = "Weaviate"; + /// Reserved key property name in Weaviate. internal const string ReservedKeyPropertyName = "id"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs index 45c320df959c..0d150619a82d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Net.Http; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Weaviate; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; /// /// Extension methods to register Weaviate instances on the . /// +[Obsolete("The IKernelBuilder extensions are being obsoleted, call the appropriate function on the Services property of your IKernelBuilder")] public static class WeaviateKernelBuilderExtensions { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 1c45d1e3ac65..6df456e872b2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -7,7 +7,6 @@ using System.Text.Json; using System.Threading; using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -79,14 +78,27 @@ public virtual IVectorStoreRecordCollection GetCollection ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var request = new WeaviateGetCollectionsRequest().Build(); + WeaviateGetCollectionsResponse collectionsResponse; - var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - var responseContent = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken).ConfigureAwait(false); - var collectionResponse = JsonSerializer.Deserialize(responseContent); + try + { + var httpResponse = await this._httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false); + var httpResponseContent = await httpResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + + collectionsResponse = JsonSerializer.Deserialize(httpResponseContent)!; + } + catch (Exception e) + { + throw new VectorStoreOperationException("Call to vector store failed.", e) + { + VectorStoreType = WeaviateConstants.DatabaseName, + OperationName = "ListCollectionNames" + }; + } - if (collectionResponse?.Collections is not null) + if (collectionsResponse?.Collections is not null) { - foreach (var collection in collectionResponse.Collections) + foreach (var collection in collectionsResponse.Collections) { yield return collection.CollectionName; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 393b9a841cbb..42b1e56fe4ce 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -12,7 +12,6 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -24,9 +23,6 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Weaviate"; - /// A set of types that a key on the provided model may have. private static readonly HashSet s_supportedKeyTypes = [ @@ -270,7 +266,7 @@ public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken c } return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + WeaviateConstants.DatabaseName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject!, new() { IncludeVectors = includeVectors })); @@ -312,7 +308,7 @@ public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable { var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + WeaviateConstants.DatabaseName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -409,7 +405,7 @@ private async Task> ExecuteQueryAsync(string query, { throw new VectorStoreOperationException($"Error occurred during vector search. Response: {content}") { - VectorStoreType = DatabaseName, + VectorStoreType = WeaviateConstants.DatabaseName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -420,7 +416,7 @@ private async Task> ExecuteQueryAsync(string query, var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result!, scorePropertyName); var record = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + WeaviateConstants.DatabaseName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -440,14 +436,14 @@ private Task ExecuteRequestAsync(HttpRequestMessage request request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", this._apiKey); } - return this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken); + return this._httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken); } private async Task<(TResponse?, string)> ExecuteRequestWithResponseContentAsync(HttpRequestMessage request, CancellationToken cancellationToken) { var response = await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); - var responseContent = await response.Content.ReadAsStringWithExceptionMappingAsync(cancellationToken).ConfigureAwait(false); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); var responseModel = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); @@ -463,14 +459,16 @@ private Task ExecuteRequestAsync(HttpRequestMessage request private async Task ExecuteRequestWithNotFoundHandlingAsync(HttpRequestMessage request, CancellationToken cancellationToken) { - try - { - return await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) when (ex.StatusCode == HttpStatusCode.NotFound) + var response = await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) { return default; } + + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var responseModel = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + + return responseModel; } private async Task RunOperationAsync(string operationName, Func> operation) @@ -483,7 +481,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = WeaviateConstants.DatabaseName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj index f1dc235aa5bd..873fc5d455b6 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj @@ -30,17 +30,17 @@ Microsoft.Extensions.VectorData.IVectorStoreRecordCollection<TKey, TRecord> https://dot.net/ - + + + + + - - - - diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs index bfded601d8ec..7c6badf59820 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs @@ -1,7 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Data; -using Microsoft.Data.Sqlite; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs index c3a702c5a7c0..c62d07b62041 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs @@ -2,10 +2,7 @@ using System; using System.IO; -using System.Threading.Tasks; -using Microsoft.Data.Sqlite; using Microsoft.SemanticKernel.Connectors.Sqlite; -using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.Memory.Sqlite; diff --git a/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs b/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs index 51d9acf0509d..dd7ac895b984 100644 --- a/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs @@ -24,11 +24,7 @@ public static async Task ReadAsStringWithExceptionMappingAsync(this Http { try { -#if NET5_0_OR_GREATER return await httpContent.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); -#else - return await httpContent.ReadAsStringAsync().ConfigureAwait(false); -#endif } catch (HttpRequestException ex) { @@ -46,11 +42,7 @@ public static async Task ReadAsStreamAndTranslateExceptionAsync(this Htt { try { -#if NET5_0_OR_GREATER return await httpContent.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); -#else - return await httpContent.ReadAsStreamAsync().ConfigureAwait(false); -#endif } catch (HttpRequestException ex) { @@ -68,11 +60,7 @@ public static async Task ReadAsByteArrayAndTranslateExceptionAsync(this { try { -#if NET5_0_OR_GREATER return await httpContent.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); -#else - return await httpContent.ReadAsByteArrayAsync().ConfigureAwait(false); -#endif } catch (HttpRequestException ex) { diff --git a/dotnet/src/InternalUtilities/src/Http/HttpContentPolyfills.cs b/dotnet/src/InternalUtilities/src/Http/HttpContentPolyfills.cs new file mode 100644 index 000000000000..aea223102298 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Http/HttpContentPolyfills.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +#if !NET5_0_OR_GREATER + +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace System.Net.Http; + +[ExcludeFromCodeCoverage] +internal static class HttpContentPolyfills +{ + internal static Task ReadAsStringAsync(this HttpContent httpContent, CancellationToken cancellationToken) + => httpContent.ReadAsStringAsync(); + + internal static Task ReadAsStreamAsync(this HttpContent httpContent, CancellationToken cancellationToken) + => httpContent.ReadAsStreamAsync(); + + internal static Task ReadAsByteArrayAsync(this HttpContent httpContent, CancellationToken cancellationToken) + => httpContent.ReadAsByteArrayAsync(); +} + +#endif diff --git a/dotnet/src/InternalUtilities/src/RestrictedInternalUtilities.props b/dotnet/src/InternalUtilities/src/RestrictedInternalUtilities.props new file mode 100644 index 000000000000..f4304f16a9be --- /dev/null +++ b/dotnet/src/InternalUtilities/src/RestrictedInternalUtilities.props @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file From 77c83c50cc1268ae453685ef597bb75d3b3d7d3c Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Mon, 31 Mar 2025 13:58:07 +0200 Subject: [PATCH 24/63] .Net MEVD: More conformance tests coverage (#11257) contributes to https://github.com/microsoft/semantic-kernel/issues/10194 --------- Co-authored-by: Shay Rojansky --- .../AzureAISearchVectorStoreRecordCollection.cs | 8 ++++++++ .../AzureAISearchIntegrationTests.csproj | 1 + .../CRUD/AzureAISearchBatchConformanceTests.cs | 12 ++++++++++++ .../CRUD/AzureAISearchRecordConformanceTests.cs | 12 ++++++++++++ .../Support/AzureAISearchSimpleModelFixture.cs | 10 ++++++++++ .../Support/AzureAISearchTestEnvironment.cs | 2 +- .../Support/AzureAISearchTestStore.cs | 10 +++++++--- .../Support/AzureAISearchUrlRequiredAttribute.cs | 2 +- .../CRUD/CosmosMongoDBBatchConformanceTests.cs | 12 ++++++++++++ .../CRUD/CosmosMongoDBRecordConformanceTests.cs | 12 ++++++++++++ .../Support/CosmosMongoDBSimpleModelFixture.cs | 14 ++++++++++++++ .../CRUD/MongoDBBatchConformanceTests.cs | 12 ++++++++++++ .../CRUD/MongoDBRecordConformanceTests.cs | 12 ++++++++++++ .../Support/MongoDBSimpleModelFixture.cs | 10 ++++++++++ 14 files changed, 124 insertions(+), 5 deletions(-) create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchSimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBSimpleModelFixture.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index eda71258ef24..9103cd5f2a2d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -293,6 +293,10 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); + if (!keys.Any()) + { + return Task.CompletedTask; + } // Remove records. return this.RunOperationAsync( @@ -317,6 +321,10 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); + if (!records.Any()) + { + yield break; + } // Create Options var innerOptions = new IndexDocumentsOptions { ThrowOnAnyError = true }; diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj index 688796758267..1572f6821fd8 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj @@ -17,6 +17,7 @@ all + diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchBatchConformanceTests.cs new file mode 100644 index 000000000000..4da84cc7e99a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace AzureAISearchIntegrationTests.CRUD; + +public class AzureAISearchBatchConformanceTests(AzureAISearchSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchRecordConformanceTests.cs new file mode 100644 index 000000000000..49b8b88b4b5f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace AzureAISearchIntegrationTests.CRUD; + +public class AzureAISearchRecordConformanceTests(AzureAISearchSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchSimpleModelFixture.cs new file mode 100644 index 000000000000..fd26563901ff --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace AzureAISearchIntegrationTests.Support; + +public class AzureAISearchSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => AzureAISearchTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs index 27e905656870..cad8632f873b 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestEnvironment.cs @@ -10,7 +10,7 @@ internal static class AzureAISearchTestEnvironment { public static readonly string? ServiceUrl, ApiKey; - public static bool IsConnectionInfoDefined => ServiceUrl is not null && ApiKey is not null; + public static bool IsConnectionInfoDefined => ServiceUrl is not null; static AzureAISearchTestEnvironment() { diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs index 791005d55c9a..75f07161ba6f 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using Azure; +using Azure.Identity; using Azure.Search.Documents.Indexes; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -32,12 +33,15 @@ protected override Task StartAsync() { (string? serviceUrl, string? apiKey) = (AzureAISearchTestEnvironment.ServiceUrl, AzureAISearchTestEnvironment.ApiKey); - if (string.IsNullOrWhiteSpace(serviceUrl) || string.IsNullOrWhiteSpace(apiKey)) + if (string.IsNullOrWhiteSpace(serviceUrl)) { - throw new InvalidOperationException("Service URL and API key are not configured, set AzureAISearch:ServiceUrl and AzureAISearch:ApiKey"); + throw new InvalidOperationException("Service URL is not configured, set AzureAISearch:ServiceUrl (and AzureAISearch:ApiKey if you want)"); } - this._client = new SearchIndexClient(new Uri(serviceUrl), new AzureKeyCredential(apiKey)); + this._client = string.IsNullOrWhiteSpace(apiKey) + ? new SearchIndexClient(new Uri(serviceUrl), new DefaultAzureCredential()) + : new SearchIndexClient(new Uri(serviceUrl), new AzureKeyCredential(apiKey)); + this._defaultVectorStore = new(this._client); return Task.CompletedTask; diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs index 1b30639bc1be..9b2fb5a9b223 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchUrlRequiredAttribute.cs @@ -12,7 +12,7 @@ public sealed class AzureAISearchUrlRequiredAttribute : Attribute, ITestConditio { public ValueTask IsMetAsync() => new(AzureAISearchTestEnvironment.IsConnectionInfoDefined); - public string Skip { get; set; } = "Service URL and API key are not configured, set AzureAISearch:ServiceUrl and AzureAISearch:ApiKey."; + public string Skip { get; set; } = "Service URL is not configured, set AzureAISearch:ServiceUrl (and AzureAISearch:ApiKey if you don't use managed identity)."; public string SkipReason => this.Skip; diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBBatchConformanceTests.cs new file mode 100644 index 000000000000..bf5dbe318eac --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace CosmosMongoDBIntegrationTests.CRUD; + +public class CosmosMongoDBBatchConformanceTests(CosmosMongoDBSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBRecordConformanceTests.cs new file mode 100644 index 000000000000..a7f89cfc05c6 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace CosmosMongoDBIntegrationTests.CRUD; + +public class CosmosMongoDBRecordConformanceTests(CosmosMongoDBSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs new file mode 100644 index 000000000000..90c0e3efc659 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace CosmosMongoDBIntegrationTests.Support; + +public class CosmosMongoDBSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBBatchConformanceTests.cs new file mode 100644 index 000000000000..67ea1ee7e2c4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace MongoDBIntegrationTests.CRUD; + +public class MongoDBBatchConformanceTests(MongoDBSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBRecordConformanceTests.cs new file mode 100644 index 000000000000..22d642b7c16a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace MongoDBIntegrationTests.CRUD; + +public class MongoDBRecordConformanceTests(MongoDBSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBSimpleModelFixture.cs new file mode 100644 index 000000000000..143f5497d7cf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Support/MongoDBSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace MongoDBIntegrationTests.Support; + +public class MongoDBSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => MongoDBTestStore.Instance; +} From bfee7495f8e13ada4319e7209fc78be6417f5e0d Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Tue, 1 Apr 2025 12:34:05 -0700 Subject: [PATCH 25/63] .Net: [MEVD] Temporarily remove logging (#11316) ### Motivation and Context Temporarily remove logging to add it back later when API is stable. ### Description ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- dotnet/SK-dotnet.sln | 26 +- dotnet/samples/Concepts/Concepts.csproj | 1 - .../Concepts/Memory/VectorStore_Telemetry.cs | 180 ------------- .../VectorData.UnitTests/.editorconfig | 6 - .../VectorData.UnitTests.csproj | 41 --- ...ywordHybridSearchBuilderExtensionsTests.cs | 24 -- ...BuilderServiceCollectionExtensionsTests.cs | 96 ------- .../KeywordHybridSearchBuilderTests.cs | 87 ------- ...ywordHybridSearchBuilderExtensionsTests.cs | 64 ----- .../LoggingKeywordHybridSearchTests.cs | 60 ----- ...rizableTextSearchBuilderExtensionsTests.cs | 64 ----- .../LoggingVectorizableTextSearchTests.cs | 56 ---- ...gVectorizedSearchBuilderExtensionsTests.cs | 64 ----- .../LoggingVectorizedSearchTests.cs | 56 ---- ...rizableTextSearchBuilderExtensionsTests.cs | 24 -- ...BuilderServiceCollectionExtensionsTests.cs | 96 ------- .../VectorizableTextSearchBuilderTests.cs | 87 ------- .../VectorizedSearchBuilderExtensionsTests.cs | 24 -- ...BuilderServiceCollectionExtensionsTests.cs | 96 ------- .../VectorizedSearchBuilderTests.cs | 87 ------- ...oggingVectorStoreBuilderExtensionsTests.cs | 64 ----- ...eRecordCollectionBuilderExtensionsTests.cs | 64 ----- ...LoggingVectorStoreRecordCollectionTests.cs | 245 ------------------ .../VectorStorage/LoggingVectorStoreTests.cs | 72 ----- .../VectorStoreBuilderExtensionsTests.cs | 24 -- ...BuilderServiceCollectionExtensionsTests.cs | 96 ------- .../VectorStorage/VectorStoreBuilderTests.cs | 87 ------- ...eRecordCollectionBuilderExtensionsTests.cs | 24 -- ...BuilderServiceCollectionExtensionsTests.cs | 96 ------- ...VectorStoreRecordCollectionBuilderTests.cs | 87 ------- dotnet/src/Connectors/VectorData/PACKAGE.md | 40 --- .../Connectors/VectorData/VectorData.csproj | 62 ----- .../KeywordHybridSearchBuilder.cs | 87 ------- .../KeywordHybridSearchBuilderExtensions.cs | 25 -- ...earchBuilderServiceCollectionExtensions.cs | 89 ------- .../LoggingKeywordHybridSearch.cs | 47 ---- ...ingKeywordHybridSearchBuilderExtensions.cs | 41 --- .../LoggingVectorizableTextSearch.cs | 46 ---- ...VectorizableTextSearchBuilderExtensions.cs | 41 --- .../VectorSearch/LoggingVectorizedSearch.cs | 46 ---- ...oggingVectorizedSearchBuilderExtensions.cs | 41 --- .../VectorizableTextSearchBuilder.cs | 87 ------- ...VectorizableTextSearchBuilderExtensions.cs | 25 -- ...earchBuilderServiceCollectionExtensions.cs | 89 ------- .../VectorSearch/VectorizedSearchBuilder.cs | 87 ------- .../VectorizedSearchBuilderExtensions.cs | 25 -- ...earchBuilderServiceCollectionExtensions.cs | 89 ------- .../VectorStorage/LoggingVectorStore.cs | 53 ---- .../LoggingVectorStoreBuilderExtensions.cs | 41 --- .../LoggingVectorStoreRecordCollection.cs | 144 ---------- ...rStoreRecordCollectionBuilderExtensions.cs | 41 --- .../VectorStorage/VectorStoreBuilder.cs | 87 ------- .../VectorStoreBuilderExtensions.cs | 25 -- ...StoreBuilderServiceCollectionExtensions.cs | 89 ------- .../VectorStoreRecordCollectionBuilder.cs | 87 ------- ...rStoreRecordCollectionBuilderExtensions.cs | 25 -- ...ctionBuilderServiceCollectionExtensions.cs | 89 ------- dotnet/src/Connectors/VectorData/neticon.png | Bin 7006 -> 0 bytes 58 files changed, 2 insertions(+), 3744 deletions(-) delete mode 100644 dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/.editorconfig delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs delete mode 100644 dotnet/src/Connectors/VectorData/PACKAGE.md delete mode 100644 dotnet/src/Connectors/VectorData/VectorData.csproj delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs delete mode 100644 dotnet/src/Connectors/VectorData/neticon.png diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index e1de8ac6e41d..8e8129257967 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -107,22 +107,20 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{958AD708-F04 EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Diagnostics", "Diagnostics", "{29E7D971-1308-4171-9872-E8E4669A1134}" ProjectSection(SolutionItems) = preProject + src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs = src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs src\InternalUtilities\src\Diagnostics\CompilerServicesAttributes.cs = src\InternalUtilities\src\Diagnostics\CompilerServicesAttributes.cs src\InternalUtilities\src\Diagnostics\DynamicallyAccessedMembersAttribute.cs = src\InternalUtilities\src\Diagnostics\DynamicallyAccessedMembersAttribute.cs src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs = src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs = src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs src\InternalUtilities\src\Diagnostics\IsExternalInit.cs = src\InternalUtilities\src\Diagnostics\IsExternalInit.cs src\InternalUtilities\src\Diagnostics\KernelVerify.cs = src\InternalUtilities\src\Diagnostics\KernelVerify.cs + src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs = src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs src\InternalUtilities\src\Diagnostics\NullableAttributes.cs = src\InternalUtilities\src\Diagnostics\NullableAttributes.cs src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresDynamicCodeAttribute.cs src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs = src\InternalUtilities\src\Diagnostics\RequiresUnreferencedCodeAttribute.cs src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs = src\InternalUtilities\src\Diagnostics\UnconditionalSuppressMessageAttribute.cs src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs - src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs = src\InternalUtilities\src\Diagnostics\ActivityExtensions.cs - src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs = src\InternalUtilities\src\Diagnostics\LoggingExtensions.cs - src\InternalUtilities\src\Diagnostics\UnreachableException.cs = src\InternalUtilities\src\Diagnostics\UnreachableException.cs - src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{B00AD427-0047-4850-BEF9-BA8237EA9D8B}" @@ -507,12 +505,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StructuredDataPlugin", "sam EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Plugins.StructuredData.EntityFramework", "src\Plugins\Plugins.StructuredData.EntityFramework\Plugins.StructuredData.EntityFramework.csproj", "{0C81C377-3CDC-46A8-BED1-4B50BDA2202E}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData.UnitTests", "src\Connectors\VectorData.UnitTests\VectorData.UnitTests.csproj", "{89FC596F-CB81-4733-829B-4527D0FFC291}" -EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqlServerIntegrationTests", "src\VectorDataIntegrationTests\SqlServerIntegrationTests\SqlServerIntegrationTests.csproj", "{A5E6193C-8431-4C6E-B674-682CB41EAA0C}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData", "src\Connectors\VectorData\VectorData.csproj", "{8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}" -EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PineconeIntegrationTests", "src\VectorDataIntegrationTests\PineconeIntegrationTests\PineconeIntegrationTests.csproj", "{E9A74E0C-BC02-4DDD-A487-89847EDF8026}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ModelContextProtocolPlugin", "samples\Demos\ModelContextProtocolPlugin\ModelContextProtocolPlugin.csproj", "{801C9CE4-53AF-D2DB-E0D6-9A6BB47E9654}" @@ -1364,12 +1358,6 @@ Global {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Publish|Any CPU.Build.0 = Publish|Any CPU {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.ActiveCfg = Release|Any CPU {8C658E1E-83C8-4127-B8BF-27A638A45DDD}.Release|Any CPU.Build.0 = Release|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Debug|Any CPU.Build.0 = Debug|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Publish|Any CPU.Build.0 = Publish|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.ActiveCfg = Release|Any CPU - {89FC596F-CB81-4733-829B-4527D0FFC291}.Release|Any CPU.Build.0 = Release|Any CPU {65F6D8C0-EFC9-669B-0901-EB9FA68E9D47}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {65F6D8C0-EFC9-669B-0901-EB9FA68E9D47}.Debug|Any CPU.Build.0 = Debug|Any CPU {65F6D8C0-EFC9-669B-0901-EB9FA68E9D47}.Publish|Any CPU.ActiveCfg = Release|Any CPU @@ -1418,12 +1406,6 @@ Global {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Publish|Any CPU.Build.0 = Debug|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.ActiveCfg = Release|Any CPU {A5E6193C-8431-4C6E-B674-682CB41EAA0C}.Release|Any CPU.Build.0 = Release|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Publish|Any CPU.Build.0 = Publish|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3}.Release|Any CPU.Build.0 = Release|Any CPU {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Debug|Any CPU.Build.0 = Debug|Any CPU {E9A74E0C-BC02-4DDD-A487-89847EDF8026}.Publish|Any CPU.ActiveCfg = Release|Any CPU @@ -1628,10 +1610,6 @@ Global {37381352-4F10-427F-AB8A-51FEAB265201} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} {DAD5FC6A-8CA0-43AC-87E1-032DFBD6B02A} = {3F260A77-B6C9-97FD-1304-4B34DA936CF4} {8C658E1E-83C8-4127-B8BF-27A638A45DDD} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} - {89FC596F-CB81-4733-829B-4527D0FFC291} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} - {A5E6193C-8431-4C6E-B674-682CB41EAA0C} = {4F381919-F1BE-47D8-8558-3187ED04A84F} - {8A40AE00-4A7D-4ED0-A9DA-BB7A98EFABD3} = {24503383-A8C4-4255-9998-28D70FE8E99A} - {E9A74E0C-BC02-4DDD-A487-89847EDF8026} = {4F381919-F1BE-47D8-8558-3187ED04A84F} {41AC66EE-B656-4CDF-A512-C7259C2F6CF7} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {65F6D8C0-EFC9-669B-0901-EB9FA68E9D47} = {41AC66EE-B656-4CDF-A512-C7259C2F6CF7} {0A2828B3-EA70-6F90-63DD-6E9A1020302D} = {41AC66EE-B656-4CDF-A512-C7259C2F6CF7} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 0a8add50d2d2..0ac3dc6a4586 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -79,7 +79,6 @@ - diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs b/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs deleted file mode 100644 index 2165c96aabb2..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStore_Telemetry.cs +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Identity; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using Microsoft.SemanticKernel.Connectors.InMemory; -using Microsoft.SemanticKernel.Embeddings; - -namespace Memory; - -/// -/// A simple example showing how to ingest data into a vector store and then use vector search to find related records to a given string -/// with enabled telemetry. -/// -public class VectorStore_Telemetry(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task LoggingManualRegistrationAsync() - { - // Create an embedding generation service. - var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - new AzureCliCredential()); - - // Manually construct an InMemory vector store with enabled logging. - var vectorStore = new InMemoryVectorStore() - .AsBuilder() - .UseLogging(this.LoggerFactory) - .Build(); - - await RunExampleAsync(textEmbeddingGenerationService, vectorStore); - - // Output: - // CreateCollectionIfNotExistsAsync invoked. - // CreateCollectionIfNotExistsAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // VectorizedSearchAsync invoked. - // VectorizedSearchAsync completed. - - // Search string: What is an Application Programming Interface - // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. - } - - [Fact] - public async Task LoggingDependencyInjectionAsync() - { - var serviceCollection = new ServiceCollection(); - - // Add an embedding generation service. - serviceCollection.AddAzureOpenAITextEmbeddingGeneration( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - new AzureCliCredential()); - - // Add InMemory vector store - serviceCollection.AddInMemoryVectorStore(); - - // Register InMemoryVectorStore with enabled logging. - serviceCollection - .AddVectorStore(s => s.GetRequiredService()) - .UseLogging(this.LoggerFactory); - - var services = serviceCollection.BuildServiceProvider(); - - var vectorStore = services.GetRequiredService(); - var textEmbeddingGenerationService = services.GetRequiredService(); - - await RunExampleAsync(textEmbeddingGenerationService, vectorStore); - - // Output: - // CreateCollectionIfNotExistsAsync invoked. - // CreateCollectionIfNotExistsAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // UpsertAsync invoked. - // UpsertAsync completed. - // VectorizedSearchAsync invoked. - // VectorizedSearchAsync completed. - - // Search string: What is an Application Programming Interface - // Result: Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data. - } - - private async Task RunExampleAsync( - ITextEmbeddingGenerationService textEmbeddingGenerationService, - IVectorStore vectorStore) - { - // Get and create collection if it doesn't exist. - var collection = vectorStore.GetCollection("skglossary"); - await collection.CreateCollectionIfNotExistsAsync(); - - // Create glossary entries and generate embeddings for them. - var glossaryEntries = CreateGlossaryEntries().ToList(); - var tasks = glossaryEntries.Select(entry => Task.Run(async () => - { - entry.DefinitionEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(entry.Definition); - })); - await Task.WhenAll(tasks); - - // Upsert the glossary entries into the collection and return their keys. - var upsertedKeysTasks = glossaryEntries.Select(x => collection.UpsertAsync(x)); - var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); - - // Search the collection using a vector search. - var searchString = "What is an Application Programming Interface"; - var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); - var resultRecords = await searchResult.Results.ToListAsync(); - - Console.WriteLine("Search string: " + searchString); - Console.WriteLine("Result: " + resultRecords.First().Record.Definition); - Console.WriteLine(); - } - - /// - /// Sample model class that represents a glossary entry. - /// - /// - /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. - /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. - /// - private sealed class Glossary - { - [VectorStoreRecordKey] - public ulong Key { get; set; } - - [VectorStoreRecordData(IsFilterable = true)] - public string Category { get; set; } - - [VectorStoreRecordData] - public string Term { get; set; } - - [VectorStoreRecordData] - public string Definition { get; set; } - - [VectorStoreRecordVector(1536)] - public ReadOnlyMemory DefinitionEmbedding { get; set; } - } - - /// - /// Create some sample glossary entries. - /// - /// A list of sample glossary entries. - private static IEnumerable CreateGlossaryEntries() - { - yield return new Glossary - { - Key = 1, - Category = "External Definitions", - Term = "API", - Definition = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." - }; - - yield return new Glossary - { - Key = 2, - Category = "Core Definitions", - Term = "Connectors", - Definition = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." - }; - - yield return new Glossary - { - Key = 3, - Category = "External Definitions", - Term = "RAG", - Definition = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." - }; - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig b/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig deleted file mode 100644 index 394eef685f21..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/.editorconfig +++ /dev/null @@ -1,6 +0,0 @@ -# Suppressing errors for Test projects under dotnet folder -[*.cs] -dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task -dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave -dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member -dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj deleted file mode 100644 index d374de2022ba..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj +++ /dev/null @@ -1,41 +0,0 @@ - - - - VectorData.UnitTests - VectorData.UnitTests - net8.0 - true - enable - disable - false - $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 - - - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs deleted file mode 100644 index ded7e8b44d28..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class KeywordHybridSearchBuilderExtensionsTests -{ - [Fact] - public void AsBuilderReturnsKeywordHybridSearchBuilder() - { - // Arrange - var search = new Mock>().Object; - - // Act - var builder = search.AsBuilder(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, builder.Build()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs deleted file mode 100644 index 35f776517dfc..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class KeywordHybridSearchBuilderServiceCollectionExtensionsTests -{ - [Fact] - public void AddKeywordHybridSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - - // Act - var builder = services.AddKeywordHybridSearch(search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeywordHybridSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IKeywordHybridSearch Factory(IServiceProvider _) => search; - - // Act - var builder = services.AddKeywordHybridSearch(Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IKeywordHybridSearch) && d.Lifetime == ServiceLifetime.Scoped); - } - - [Fact] - public void AddKeyedKeywordHybridSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - var key = "testKey"; - - // Act - var builder = services.AddKeyedKeywordHybridSearch(key, search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IKeywordHybridSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeyedKeywordHybridSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IKeywordHybridSearch Factory(IServiceProvider _) => search; - var key = "testKey"; - - // Act - var builder = services.AddKeyedKeywordHybridSearch(key, Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IKeywordHybridSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Scoped); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs deleted file mode 100644 index 90fb4560cad2..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/KeywordHybridSearchBuilderTests.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class KeywordHybridSearchBuilderTests -{ - [Fact] - public void ConstructorWithInstanceSetsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act - var builder = new KeywordHybridSearchBuilder(innerSearch); - - // Assert - var builtSearch = builder.Build(); - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void ConstructorWithFactoryCallsFactoryOnBuild() - { - // Arrange - var innerSearch = new Mock>().Object; - var serviceProvider = new Mock(); - IKeywordHybridSearch Factory(IServiceProvider _) => innerSearch; - - // Act - var builder = new KeywordHybridSearchBuilder(Factory); - var builtSearch = builder.Build(serviceProvider.Object); - - // Assert - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void BuildWithMultipleFactoriesAppliesInReverseOrder() - { - // Arrange - var innerSearch = new Mock>().Object; - var mockSearch1 = new Mock>().Object; - var mockSearch2 = new Mock>().Object; - var builder = new KeywordHybridSearchBuilder(innerSearch); - - builder.Use(s => mockSearch1); - builder.Use(s => mockSearch2); - - // Act - var builtSearch = builder.Build(); - - // Assert - Assert.Same(mockSearch1, builtSearch); - } - - [Fact] - public void BuildWithNullReturningFactoryThrowsInvalidOperationException() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new KeywordHybridSearchBuilder(innerSearch); - builder.Use((s, _) => null!); - - // Act & Assert - var exception = Assert.Throws(() => builder.Build()); - Assert.Contains("returned null", exception.Message); - } - - [Fact] - public void BuildWithNullServiceProviderUsesEmptyServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new KeywordHybridSearchBuilder(innerSearch); - - // Act - var builtSearch = builder.Build(null); - - // Assert - Assert.Same(innerSearch, builtSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs deleted file mode 100644 index 8cf6587b3dd7..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingKeywordHybridSearchBuilderExtensionsTests -{ - [Fact] - public void UseLoggingWithFactoryAddsDecorator() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var builder = new KeywordHybridSearchBuilder(innerSearch); - - // Act - builder.UseLogging(loggerFactory.Object); - var result = builder.Build(); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullFactoryResolvesFromServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var serviceProvider = new Mock(); - serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); - var builder = new KeywordHybridSearchBuilder(innerSearch); - - // Act - builder.UseLogging(); - var result = builder.Build(serviceProvider.Object); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new KeywordHybridSearchBuilder(innerSearch); - - // Act - builder.UseLogging(NullLoggerFactory.Instance); - var result = builder.Build(); - - // Assert - Assert.Same(innerSearch, result); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs deleted file mode 100644 index 85231becf613..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingKeywordHybridSearchTests.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingKeywordHybridSearchTests -{ - [Fact] - public void ConstructorThrowsOnNullInnerSearch() - { - // Arrange - var logger = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingKeywordHybridSearch(null!, logger)); - } - - [Fact] - public void ConstructorThrowsOnNullLogger() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act & Assert - Assert.Throws(() => new LoggingKeywordHybridSearch(innerSearch, null!)); - } - - [Fact] - public async Task HybridSearchDelegatesToInnerSearchAsync() - { - // Arrange - var innerSearch = new Mock>(); - var logger = new Mock().Object; - var vector = new float[] { 1.0f }; - var keywords = new List { "test" }; - var options = new HybridSearchOptions(); - var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); - var results = new VectorSearchResults(searchResults); - - innerSearch.Setup(s => s.HybridSearchAsync(vector, keywords, options, default)) - .ReturnsAsync(results); - - var decorator = new LoggingKeywordHybridSearch(innerSearch.Object, logger); - - // Act - var actualResults = await decorator.HybridSearchAsync(vector, keywords, options); - - // Assert - Assert.Same(results, actualResults); - innerSearch.Verify(s => s.HybridSearchAsync(vector, keywords, options, default), Times.Once()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs deleted file mode 100644 index 026949453211..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorizableTextSearchBuilderExtensionsTests -{ - [Fact] - public void UseLoggingWithFactoryAddsDecorator() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var builder = new VectorizableTextSearchBuilder(innerSearch); - - // Act - builder.UseLogging(loggerFactory.Object); - var result = builder.Build(); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullFactoryResolvesFromServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var serviceProvider = new Mock(); - serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); - var builder = new VectorizableTextSearchBuilder(innerSearch); - - // Act - builder.UseLogging(); - var result = builder.Build(serviceProvider.Object); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizableTextSearchBuilder(innerSearch); - - // Act - builder.UseLogging(NullLoggerFactory.Instance); - var result = builder.Build(); - - // Assert - Assert.Same(innerSearch, result); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs deleted file mode 100644 index e7de933156b7..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizableTextSearchTests.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorizableTextSearchTests -{ - [Fact] - public void ConstructorThrowsOnNullInnerSearch() - { - // Arrange - var logger = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorizableTextSearch(null!, logger)); - } - - [Fact] - public void ConstructorThrowsOnNullLogger() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorizableTextSearch(innerSearch, null!)); - } - - [Fact] - public async Task VectorizableTextSearchDelegatesToInnerSearchAsync() - { - // Arrange - var innerSearch = new Mock>(); - var logger = new Mock().Object; - var searchText = "test"; - var options = new VectorSearchOptions(); - var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); - var results = new VectorSearchResults(searchResults); - innerSearch.Setup(s => s.VectorizableTextSearchAsync(searchText, options, default)) - .ReturnsAsync(results); - var decorator = new LoggingVectorizableTextSearch(innerSearch.Object, logger); - - // Act - var actualResults = await decorator.VectorizableTextSearchAsync(searchText, options); - - // Assert - Assert.Same(results, actualResults); - innerSearch.Verify(s => s.VectorizableTextSearchAsync(searchText, options, default), Times.Once()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs deleted file mode 100644 index a6380a9c5303..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorizedSearchBuilderExtensionsTests -{ - [Fact] - public void UseLoggingWithFactoryAddsDecorator() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var builder = new VectorizedSearchBuilder(innerSearch); - - // Act - builder.UseLogging(loggerFactory.Object); - var result = builder.Build(); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullFactoryResolvesFromServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var serviceProvider = new Mock(); - serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); - var builder = new VectorizedSearchBuilder(innerSearch); - - // Act - builder.UseLogging(); - var result = builder.Build(serviceProvider.Object); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullLoggerFactoryReturnsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizedSearchBuilder(innerSearch); - - // Act - builder.UseLogging(NullLoggerFactory.Instance); - var result = builder.Build(); - - // Assert - Assert.Same(innerSearch, result); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs deleted file mode 100644 index ceb801060e7e..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/LoggingVectorizedSearchTests.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorizedSearchTests -{ - [Fact] - public void ConstructorThrowsOnNullInnerSearch() - { - // Arrange - var logger = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorizedSearch(null!, logger)); - } - - [Fact] - public void ConstructorThrowsOnNullLogger() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorizedSearch(innerSearch, null!)); - } - - [Fact] - public async Task VectorizedSearchDelegatesToInnerSearchAsync() - { - // Arrange - var innerSearch = new Mock>(); - var logger = new Mock().Object; - var vector = new float[] { 1.0f }; - var options = new VectorSearchOptions(); - var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); - var results = new VectorSearchResults(searchResults); - innerSearch.Setup(s => s.VectorizedSearchAsync(vector, options, default)) - .ReturnsAsync(results); - var decorator = new LoggingVectorizedSearch(innerSearch.Object, logger); - - // Act - var actualResults = await decorator.VectorizedSearchAsync(vector, options); - - // Assert - Assert.Same(results, actualResults); - innerSearch.Verify(s => s.VectorizedSearchAsync(vector, options, default), Times.Once()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs deleted file mode 100644 index 197256c39108..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizableTextSearchBuilderExtensionsTests -{ - [Fact] - public void AsBuilderReturnsVectorizableTextSearchBuilder() - { - // Arrange - var search = new Mock>().Object; - - // Act - var builder = search.AsBuilder(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, builder.Build()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs deleted file mode 100644 index 42d153fcbb5b..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizableTextSearchBuilderServiceCollectionExtensionsTests -{ - [Fact] - public void AddVectorizableTextSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - - // Act - var builder = services.AddVectorizableTextSearch(search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddVectorizableTextSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IVectorizableTextSearch Factory(IServiceProvider _) => search; - - // Act - var builder = services.AddVectorizableTextSearch(Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IVectorizableTextSearch) && d.Lifetime == ServiceLifetime.Scoped); - } - - [Fact] - public void AddKeyedVectorizableTextSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorizableTextSearch(key, search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorizableTextSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeyedVectorizableTextSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IVectorizableTextSearch Factory(IServiceProvider _) => search; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorizableTextSearch(key, Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorizableTextSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Scoped); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs deleted file mode 100644 index 1d556abe5b26..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizableTextSearchBuilderTests.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizableTextSearchBuilderTests -{ - [Fact] - public void ConstructorWithInstanceSetsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act - var builder = new VectorizableTextSearchBuilder(innerSearch); - - // Assert - var builtSearch = builder.Build(); - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void ConstructorWithFactoryCallsFactoryOnBuild() - { - // Arrange - var innerSearch = new Mock>().Object; - var serviceProvider = new Mock(); - IVectorizableTextSearch Factory(IServiceProvider _) => innerSearch; - - // Act - var builder = new VectorizableTextSearchBuilder(Factory); - var builtSearch = builder.Build(serviceProvider.Object); - - // Assert - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void BuildWithMultipleFactoriesAppliesInReverseOrder() - { - // Arrange - var innerSearch = new Mock>().Object; - var mockSearch1 = new Mock>().Object; - var mockSearch2 = new Mock>().Object; - var builder = new VectorizableTextSearchBuilder(innerSearch); - - builder.Use(s => mockSearch1); - builder.Use(s => mockSearch2); - - // Act - var builtSearch = builder.Build(); - - // Assert - Assert.Same(mockSearch1, builtSearch); - } - - [Fact] - public void BuildWithNullReturningFactoryThrowsInvalidOperationException() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizableTextSearchBuilder(innerSearch); - builder.Use((s, _) => null!); - - // Act & Assert - var exception = Assert.Throws(() => builder.Build()); - Assert.Contains("returned null", exception.Message); - } - - [Fact] - public void BuildWithNullServiceProviderUsesEmptyServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizableTextSearchBuilder(innerSearch); - - // Act - var builtSearch = builder.Build(null); - - // Assert - Assert.Same(innerSearch, builtSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs deleted file mode 100644 index eb0548f2097a..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderExtensionsTests.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizedSearchBuilderExtensionsTests -{ - [Fact] - public void AsBuilderReturnsVectorizedSearchBuilder() - { - // Arrange - var search = new Mock>().Object; - - // Act - var builder = search.AsBuilder(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, builder.Build()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs deleted file mode 100644 index b183d1cba162..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizedSearchBuilderServiceCollectionExtensionsTests -{ - [Fact] - public void AddVectorizedSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - - // Act - var builder = services.AddVectorizedSearch(search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddVectorizedSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IVectorizedSearch Factory(IServiceProvider _) => search; - - // Act - var builder = services.AddVectorizedSearch(Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, d => d.ServiceType == typeof(IVectorizedSearch) && d.Lifetime == ServiceLifetime.Scoped); - } - - [Fact] - public void AddKeyedVectorizedSearchWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorizedSearch(key, search); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorizedSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeyedVectorizedSearchWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var search = new Mock>().Object; - IVectorizedSearch Factory(IServiceProvider _) => search; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorizedSearch(key, Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedSearch = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(search, resolvedSearch); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorizedSearch) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Scoped); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs deleted file mode 100644 index d883db85a33d..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorSearch/VectorizedSearchBuilderTests.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorizedSearchBuilderTests -{ - [Fact] - public void ConstructorWithInstanceSetsInnerSearch() - { - // Arrange - var innerSearch = new Mock>().Object; - - // Act - var builder = new VectorizedSearchBuilder(innerSearch); - - // Assert - var builtSearch = builder.Build(); - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void ConstructorWithFactoryCallsFactoryOnBuild() - { - // Arrange - var innerSearch = new Mock>().Object; - var serviceProvider = new Mock(); - IVectorizedSearch Factory(IServiceProvider _) => innerSearch; - - // Act - var builder = new VectorizedSearchBuilder(Factory); - var builtSearch = builder.Build(serviceProvider.Object); - - // Assert - Assert.Same(innerSearch, builtSearch); - } - - [Fact] - public void BuildWithMultipleFactoriesAppliesInReverseOrder() - { - // Arrange - var innerSearch = new Mock>().Object; - var mockSearch1 = new Mock>().Object; - var mockSearch2 = new Mock>().Object; - var builder = new VectorizedSearchBuilder(innerSearch); - - builder.Use(s => mockSearch1); - builder.Use(s => mockSearch2); - - // Act - var builtSearch = builder.Build(); - - // Assert - Assert.Same(mockSearch1, builtSearch); - } - - [Fact] - public void BuildWithNullReturningFactoryThrowsInvalidOperationException() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizedSearchBuilder(innerSearch); - builder.Use((s, _) => null!); - - // Act & Assert - var exception = Assert.Throws(() => builder.Build()); - Assert.Contains("returned null", exception.Message); - } - - [Fact] - public void BuildWithNullServiceProviderUsesEmptyServiceProvider() - { - // Arrange - var innerSearch = new Mock>().Object; - var builder = new VectorizedSearchBuilder(innerSearch); - - // Act - var builtSearch = builder.Build(null); - - // Assert - Assert.Same(innerSearch, builtSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs deleted file mode 100644 index 8db4dbf35c2a..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreBuilderExtensionsTests.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorStoreBuilderExtensionsTests -{ - [Fact] - public void UseLoggingWithFactoryAddsDecorator() - { - // Arrange - var innerStore = new Mock().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var builder = new VectorStoreBuilder(innerStore); - - // Act - builder.UseLogging(loggerFactory.Object); - var result = builder.Build(); - - // Assert - Assert.IsType(result); - } - - [Fact] - public void UseLoggingWithNullFactoryResolvesFromServiceProvider() - { - // Arrange - var innerStore = new Mock().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var serviceProvider = new Mock(); - serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); - var builder = new VectorStoreBuilder(innerStore); - - // Act - builder.UseLogging(); - var result = builder.Build(serviceProvider.Object); - - // Assert - Assert.IsType(result); - } - - [Fact] - public void UseLoggingWithNullLoggerFactoryReturnsInnerStore() - { - // Arrange - var innerStore = new Mock().Object; - var builder = new VectorStoreBuilder(innerStore); - - // Act - builder.UseLogging(NullLoggerFactory.Instance); - var result = builder.Build(); - - // Assert - Assert.Same(innerStore, result); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs deleted file mode 100644 index 638e91fc17ad..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensionsTests.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorStoreRecordCollectionBuilderExtensionsTests -{ - [Fact] - public void UseLoggingWithFactoryAddsDecorator() - { - // Arrange - var innerCollection = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - // Act - builder.UseLogging(loggerFactory.Object); - var result = builder.Build(); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullFactoryResolvesFromServiceProvider() - { - // Arrange - var innerCollection = new Mock>().Object; - var loggerFactory = new Mock(); - loggerFactory.Setup(f => f.CreateLogger(It.IsAny())).Returns(new Mock().Object); - var serviceProvider = new Mock(); - serviceProvider.Setup(sp => sp.GetService(typeof(ILoggerFactory))).Returns(loggerFactory.Object); - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - // Act - builder.UseLogging(); - var result = builder.Build(serviceProvider.Object); - - // Assert - Assert.IsType>(result); - } - - [Fact] - public void UseLoggingWithNullLoggerFactoryReturnsInnerCollection() - { - // Arrange - var innerCollection = new Mock>().Object; - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - // Act - builder.UseLogging(NullLoggerFactory.Instance); - var result = builder.Build(); - - // Assert - Assert.Same(innerCollection, result); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs deleted file mode 100644 index 294ee64555a0..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorStoreRecordCollectionTests -{ - [Fact] - public void ConstructorThrowsOnNullInnerCollection() - { - // Arrange - var logger = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorStoreRecordCollection(null!, logger)); - } - - [Fact] - public void ConstructorThrowsOnNullLogger() - { - // Arrange - var innerCollection = new Mock>().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorStoreRecordCollection(innerCollection, null!)); - } - - [Fact] - public void CollectionNameReturnsInnerCollectionName() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.CollectionName).Returns("test"); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var name = decorator.CollectionName; - - // Assert - Assert.Equal("test", name); - innerCollection.Verify(c => c.CollectionName, Times.Once()); - } - - [Fact] - public async Task CollectionExistsDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.CollectionExistsAsync(default)).ReturnsAsync(true); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var exists = await decorator.CollectionExistsAsync(); - - // Assert - Assert.True(exists); - innerCollection.Verify(c => c.CollectionExistsAsync(default), Times.Once()); - } - - [Fact] - public async Task CreateCollectionDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.CreateCollectionAsync(default)).Returns(Task.CompletedTask); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - await decorator.CreateCollectionAsync(); - - // Assert - innerCollection.Verify(c => c.CreateCollectionAsync(default), Times.Once()); - } - - [Fact] - public async Task CreateCollectionIfNotExistsDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.CreateCollectionIfNotExistsAsync(default)).Returns(Task.CompletedTask); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - await decorator.CreateCollectionIfNotExistsAsync(); - - // Assert - innerCollection.Verify(c => c.CreateCollectionIfNotExistsAsync(default), Times.Once()); - } - - [Fact] - public async Task DeleteDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.DeleteAsync("key", default)).Returns(Task.CompletedTask); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - await decorator.DeleteAsync("key"); - - // Assert - innerCollection.Verify(c => c.DeleteAsync("key", default), Times.Once()); - } - - [Fact] - public async Task DeleteBatchDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var keys = new[] { "key1", "key2" }; - innerCollection.Setup(c => c.DeleteBatchAsync(keys, default)).Returns(Task.CompletedTask); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - await decorator.DeleteBatchAsync(keys); - - // Assert - innerCollection.Verify(c => c.DeleteBatchAsync(keys, default), Times.Once()); - } - - [Fact] - public async Task DeleteCollectionDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - innerCollection.Setup(c => c.DeleteCollectionAsync(default)).Returns(Task.CompletedTask); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - await decorator.DeleteCollectionAsync(); - - // Assert - innerCollection.Verify(c => c.DeleteCollectionAsync(default), Times.Once()); - } - - [Fact] - public async Task GetDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var record = new object(); - innerCollection.Setup(c => c.GetAsync("key", null, default)).ReturnsAsync(record); - - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var result = await decorator.GetAsync("key"); - - // Assert - Assert.Same(record, result); - innerCollection.Verify(c => c.GetAsync("key", null, default), Times.Once()); - } - - [Fact] - public async Task GetBatchDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var keys = new[] { "key1", "key2" }; - var records = new[] { new object(), new object() }; - innerCollection.Setup(c => c.GetBatchAsync(keys, null, default)).Returns(records.ToAsyncEnumerable()); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var result = await decorator.GetBatchAsync(keys).ToListAsync(); - - // Assert - Assert.Equal(records, result); - innerCollection.Verify(c => c.GetBatchAsync(keys, null, default), Times.Once()); - } - - [Fact] - public async Task UpsertDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var record = new object(); - innerCollection.Setup(c => c.UpsertAsync(record, default)).ReturnsAsync("key"); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var key = await decorator.UpsertAsync(record); - - // Assert - Assert.Equal("key", key); - innerCollection.Verify(c => c.UpsertAsync(record, default), Times.Once()); - } - - [Fact] - public async Task UpsertBatchDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var records = new[] { new object(), new object() }; - var keys = new[] { "key1", "key2" }; - innerCollection.Setup(c => c.UpsertBatchAsync(records, default)).Returns(keys.ToAsyncEnumerable()); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var result = await decorator.UpsertBatchAsync(records).ToListAsync(); - - // Assert - Assert.Equal(keys, result); - innerCollection.Verify(c => c.UpsertBatchAsync(records, default), Times.Once()); - } - - [Fact] - public async Task VectorizedSearchDelegatesToInnerCollectionAsync() - { - // Arrange - var innerCollection = new Mock>(); - var vector = new float[] { 1.0f }; - var options = new VectorSearchOptions(); - var searchResults = new[] { new VectorSearchResult("result", 0.9f) }.ToAsyncEnumerable(); - var results = new VectorSearchResults(searchResults); - innerCollection.Setup(c => c.VectorizedSearchAsync(vector, options, default)).ReturnsAsync(results); - var logger = new Mock().Object; - var decorator = new LoggingVectorStoreRecordCollection(innerCollection.Object, logger); - - // Act - var actualResults = await decorator.VectorizedSearchAsync(vector, options); - - // Assert - Assert.Same(results, actualResults); - innerCollection.Verify(c => c.VectorizedSearchAsync(vector, options, default), Times.Once()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs deleted file mode 100644 index 058fc56b6ad1..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/LoggingVectorStoreTests.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class LoggingVectorStoreTests -{ - [Fact] - public void ConstructorThrowsOnNullInnerStore() - { - // Arrange - var logger = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorStore(null!, logger)); - } - - [Fact] - public void ConstructorThrowsOnNullLogger() - { - // Arrange - var innerStore = new Mock().Object; - - // Act & Assert - Assert.Throws(() => new LoggingVectorStore(innerStore, null!)); - } - - [Fact] - public void GetCollectionDelegatesToInnerStore() - { - // Arrange - var innerStore = new Mock(); - var logger = new Mock().Object; - var collection = new Mock>().Object; - innerStore.Setup(s => s.GetCollection("test", null)) - .Returns(collection); - var decorator = new LoggingVectorStore(innerStore.Object, logger); - - // Act - var result = decorator.GetCollection("test"); - - // Assert - Assert.IsType>(result); - innerStore.Verify(s => s.GetCollection("test", null), Times.Once()); - } - - [Fact] - public async Task ListCollectionNamesDelegatesToInnerStoreAsync() - { - // Arrange - var innerStore = new Mock(); - var logger = new Mock().Object; - string[] names = ["col1", "col2"]; - innerStore.Setup(s => s.ListCollectionNamesAsync(default)) - .Returns(names.ToAsyncEnumerable()); - var decorator = new LoggingVectorStore(innerStore.Object, logger); - - // Act - var result = await decorator.ListCollectionNamesAsync().ToListAsync(); - - // Assert - Assert.Equal(names, result); - innerStore.Verify(s => s.ListCollectionNamesAsync(default), Times.Once()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs deleted file mode 100644 index cd6aa33e6af1..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderExtensionsTests.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreBuilderExtensionsTests -{ - [Fact] - public void AsBuilderReturnsVectorStoreBuilder() - { - // Arrange - var store = new Mock().Object; - - // Act - var builder = store.AsBuilder(); - - // Assert - Assert.IsType(builder); - Assert.Same(store, builder.Build()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs deleted file mode 100644 index 4658cbe2ea6a..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreBuilderServiceCollectionExtensionsTests -{ - [Fact] - public void AddVectorStoreWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var store = new Mock().Object; - - // Act - var builder = services.AddVectorStore(store); - var provider = services.BuildServiceProvider(); - var resolvedStore = provider.GetService(); - - // Assert - Assert.IsType(builder); - Assert.Same(store, resolvedStore); - Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddVectorStoreWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var store = new Mock().Object; - IVectorStore Factory(IServiceProvider _) => store; - - // Act - var builder = services.AddVectorStore(Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedStore = provider.GetService(); - - // Assert - Assert.IsType(builder); - Assert.Same(store, resolvedStore); - Assert.Single(services, d => d.ServiceType == typeof(IVectorStore) && d.Lifetime == ServiceLifetime.Scoped); - } - - [Fact] - public void AddKeyedVectorStoreWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var store = new Mock().Object; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorStore(key, store); - var provider = services.BuildServiceProvider(); - var resolvedStore = provider.GetKeyedService(key); - - // Assert - Assert.IsType(builder); - Assert.Same(store, resolvedStore); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorStore) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeyedVectorStoreWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var store = new Mock().Object; - IVectorStore Factory(IServiceProvider _) => store; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorStore(key, Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedStore = provider.GetKeyedService(key); - - // Assert - Assert.IsType(builder); - Assert.Same(store, resolvedStore); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorStore) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Scoped); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs deleted file mode 100644 index 88da238bef5d..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreBuilderTests.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreBuilderTests -{ - [Fact] - public void ConstructorWithInstanceSetsInnerStore() - { - // Arrange - var innerStore = new Mock().Object; - - // Act - var builder = new VectorStoreBuilder(innerStore); - - // Assert - var builtStore = builder.Build(); - Assert.Same(innerStore, builtStore); - } - - [Fact] - public void ConstructorWithFactoryCallsFactoryOnBuild() - { - // Arrange - var innerStore = new Mock().Object; - var serviceProvider = new Mock(); - IVectorStore Factory(IServiceProvider _) => innerStore; - - // Act - var builder = new VectorStoreBuilder(Factory); - var builtStore = builder.Build(serviceProvider.Object); - - // Assert - Assert.Same(innerStore, builtStore); - } - - [Fact] - public void BuildWithMultipleFactoriesAppliesInReverseOrder() - { - // Arrange - var innerStore = new Mock().Object; - var mockStore1 = new Mock().Object; - var mockStore2 = new Mock().Object; - var builder = new VectorStoreBuilder(innerStore); - - builder.Use(s => mockStore1); - builder.Use(s => mockStore2); - - // Act - var builtStore = builder.Build(); - - // Assert - Assert.Same(mockStore1, builtStore); - } - - [Fact] - public void BuildWithNullReturningFactoryThrowsInvalidOperationException() - { - // Arrange - var innerStore = new Mock().Object; - var builder = new VectorStoreBuilder(innerStore); - builder.Use((s, _) => null!); - - // Act & Assert - var exception = Assert.Throws(() => builder.Build()); - Assert.Contains("returned null", exception.Message); - } - - [Fact] - public void BuildWithNullServiceProviderUsesEmptyServiceProvider() - { - // Arrange - var innerStore = new Mock().Object; - var builder = new VectorStoreBuilder(innerStore); - - // Act - var builtStore = builder.Build(null); - - // Assert - Assert.Same(innerStore, builtStore); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs deleted file mode 100644 index 52fb53ba849f..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderExtensionsTests.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreRecordCollectionBuilderExtensionsTests -{ - [Fact] - public void AsBuilderReturnsVectorStoreRecordCollectionBuilder() - { - // Arrange - var collection = new Mock>().Object; - - // Act - var builder = collection.AsBuilder(); - - // Assert - Assert.IsType>(builder); - Assert.Same(collection, builder.Build()); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs deleted file mode 100644 index 973f2ab3baf2..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreRecordCollectionBuilderServiceCollectionExtensionsTests -{ - [Fact] - public void AddVectorStoreRecordCollectionWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var collection = new Mock>().Object; - - // Act - var builder = services.AddVectorStoreRecordCollection(collection); - var provider = services.BuildServiceProvider(); - var resolvedCollection = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(collection, resolvedCollection); - Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddVectorStoreRecordCollectionWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var collection = new Mock>().Object; - IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; - - // Act - var builder = services.AddVectorStoreRecordCollection(Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedCollection = provider.GetService>(); - - // Assert - Assert.IsType>(builder); - Assert.Same(collection, resolvedCollection); - Assert.Single(services, d => d.ServiceType == typeof(IVectorStoreRecordCollection) && d.Lifetime == ServiceLifetime.Scoped); - } - - [Fact] - public void AddKeyedVectorStoreRecordCollectionWithInstanceReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var collection = new Mock>().Object; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorStoreRecordCollection(key, collection); - var provider = services.BuildServiceProvider(); - var resolvedCollection = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(collection, resolvedCollection); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorStoreRecordCollection) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Singleton); - } - - [Fact] - public void AddKeyedVectorStoreRecordCollectionWithFactoryReturnsBuilder() - { - // Arrange - var services = new ServiceCollection(); - var collection = new Mock>().Object; - IVectorStoreRecordCollection Factory(IServiceProvider _) => collection; - var key = "testKey"; - - // Act - var builder = services.AddKeyedVectorStoreRecordCollection(key, Factory, ServiceLifetime.Scoped); - var provider = services.BuildServiceProvider(); - var resolvedCollection = provider.GetKeyedService>(key); - - // Assert - Assert.IsType>(builder); - Assert.Same(collection, resolvedCollection); - Assert.Single(services, - d => d.ServiceType == typeof(IVectorStoreRecordCollection) && - d.ServiceKey is not null && - d.ServiceKey.Equals(key) && - d.Lifetime == ServiceLifetime.Scoped); - } -} diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs deleted file mode 100644 index 8246b0fa88b0..000000000000 --- a/dotnet/src/Connectors/VectorData.UnitTests/VectorStorage/VectorStoreRecordCollectionBuilderTests.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Moq; -using Xunit; - -namespace VectorData.UnitTests; - -public class VectorStoreRecordCollectionBuilderTests -{ - [Fact] - public void ConstructorWithInstanceSetsInnerCollection() - { - // Arrange - var innerCollection = new Mock>().Object; - - // Act - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - // Assert - var builtCollection = builder.Build(); - Assert.Same(innerCollection, builtCollection); - } - - [Fact] - public void ConstructorWithFactoryCallsFactoryOnBuild() - { - // Arrange - var innerCollection = new Mock>().Object; - var serviceProvider = new Mock(); - IVectorStoreRecordCollection Factory(IServiceProvider _) => innerCollection; - - // Act - var builder = new VectorStoreRecordCollectionBuilder(Factory); - var builtCollection = builder.Build(serviceProvider.Object); - - // Assert - Assert.Same(innerCollection, builtCollection); - } - - [Fact] - public void BuildWithMultipleFactoriesAppliesInReverseOrder() - { - // Arrange - var innerCollection = new Mock>().Object; - var mockCollection1 = new Mock>().Object; - var mockCollection2 = new Mock>().Object; - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - builder.Use(c => mockCollection1); - builder.Use(c => mockCollection2); - - // Act - var builtCollection = builder.Build(); - - // Assert - Assert.Same(mockCollection1, builtCollection); - } - - [Fact] - public void BuildWithNullReturningFactoryThrowsInvalidOperationException() - { - // Arrange - var innerCollection = new Mock>().Object; - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - builder.Use((c, _) => null!); - - // Act & Assert - var exception = Assert.Throws(() => builder.Build()); - Assert.Contains("returned null", exception.Message); - } - - [Fact] - public void BuildWithNullServiceProviderUsesEmptyServiceProvider() - { - // Arrange - var innerCollection = new Mock>().Object; - var builder = new VectorStoreRecordCollectionBuilder(innerCollection); - - // Act - var builtCollection = builder.Build(null); - - // Assert - Assert.Same(innerCollection, builtCollection); - } -} diff --git a/dotnet/src/Connectors/VectorData/PACKAGE.md b/dotnet/src/Connectors/VectorData/PACKAGE.md deleted file mode 100644 index 6c6c756412d9..000000000000 --- a/dotnet/src/Connectors/VectorData/PACKAGE.md +++ /dev/null @@ -1,40 +0,0 @@ -## About - -Contains utilities for accessing Vector Databases and Vector Indexes. - -## Key Features - -- Telemetry for any Vector Database implementation. Vector Database implementations are provided separately in other packages, for example `Microsoft.SemanticKernel.Connectors.AzureAISearch`. - -## How to Use - -This package is typically used with an implementation of the vector database abstractions such as `Microsoft.SemanticKernel.Connectors.AzureAISearch`. - -## Additional Documentation - -- [Conceptual documentation](https://learn.microsoft.com/en-us/semantic-kernel/concepts/vector-store-connectors) - -## Related Packages - -Vector Database abstractions: - -- `Microsoft.Extensions.VectorData.Abstractions` - -Vector Database implementations: - -- `Microsoft.SemanticKernel.Connectors.AzureAISearch` -- `Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB` -- `Microsoft.SemanticKernel.Connectors.AzureCosmosNoSQL` -- `Microsoft.SemanticKernel.Connectors.InMemory` -- `Microsoft.SemanticKernel.Connectors.MongoDB` -- `Microsoft.SemanticKernel.Connectors.Pinecone` -- `Microsoft.SemanticKernel.Connectors.Postgres` -- `Microsoft.SemanticKernel.Connectors.Qdrant` -- `Microsoft.SemanticKernel.Connectors.Redis` -- `Microsoft.SemanticKernel.Connectors.Sqlite` -- `Microsoft.SemanticKernel.Connectors.SqlServer` -- `Microsoft.SemanticKernel.Connectors.Weaviate` - -## Feedback & Contributing - -Microsoft.Extensions.VectorData is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/microsoft/semantic-kernel). diff --git a/dotnet/src/Connectors/VectorData/VectorData.csproj b/dotnet/src/Connectors/VectorData/VectorData.csproj deleted file mode 100644 index 31e84c2533ce..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorData.csproj +++ /dev/null @@ -1,62 +0,0 @@ - - - - Microsoft.Extensions.VectorData - Microsoft.Extensions.VectorData - net8.0;netstandard2.0;net462 - - - - - - - 9.0.0-preview.1.25078.1 - 9.0.0.0 - - 9.0.0-preview.1.24518.1 - Microsoft.Extensions.VectorData - $(AssemblyName) - - Utilities for vector database access. - - neticon.png - neticon.png - PACKAGE.md - - Vector, Database, SDK - $(PackageDescription) - https://dot.net/ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs deleted file mode 100644 index 8a0295a65e98..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilder.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// A builder for creating pipelines of . -[Experimental("SKEXP0020")] -public sealed class KeywordHybridSearchBuilder -{ - private readonly Func> _innerSearchFactory; - - /// The registered search factory instances. - private List, IServiceProvider, IKeywordHybridSearch>>? _searchFactories; - - /// Initializes a new instance of the class. - /// The inner that represents the underlying backend. - public KeywordHybridSearchBuilder(IKeywordHybridSearch innerSearch) - { - Verify.NotNull(innerSearch); - - this._innerSearchFactory = _ => innerSearch; - } - - /// Initializes a new instance of the class. - /// A callback that produces the inner that represents the underlying backend. - public KeywordHybridSearchBuilder(Func> innerSearchFactory) - { - Verify.NotNull(innerSearchFactory); - - this._innerSearchFactory = innerSearchFactory; - } - - /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. - /// - /// The that should provide services to the instances. - /// If null, an empty will be used. - /// - /// An instance of that represents the entire pipeline. - public IKeywordHybridSearch Build(IServiceProvider? services = null) - { - services ??= EmptyKeyedServiceProvider.Instance; - var search = this._innerSearchFactory(services); - - // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. - if (this._searchFactories is not null) - { - for (var i = this._searchFactories.Count - 1; i >= 0; i--) - { - search = this._searchFactories[i](search, services); - if (search is null) - { - throw new InvalidOperationException( - $"The {nameof(KeywordHybridSearchBuilder)} entry at index {i} returned null. " + - $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IKeywordHybridSearch)} instances."); - } - } - } - - return search; - } - - /// Adds a factory for an intermediate keyword hybrid search to the pipeline. - /// The search factory function. - /// The updated instance. - public KeywordHybridSearchBuilder Use(Func, IKeywordHybridSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - return this.Use((innerSearch, _) => searchFactory(innerSearch)); - } - - /// Adds a factory for an intermediate keyword hybrid search to the pipeline. - /// The search factory function. - /// The updated instance. - public KeywordHybridSearchBuilder Use(Func, IServiceProvider, IKeywordHybridSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - (this._searchFactories ??= []).Add(searchFactory); - return this; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs deleted file mode 100644 index 8c8b31ec6762..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for working with in the context of . -[Experimental("SKEXP0020")] -public static class KeywordHybridSearchBuilderExtensions -{ - /// Creates a new using as its inner search. - /// The search to use as the inner search. - /// The new instance. - /// - /// This method is equivalent to using the constructor directly, - /// specifying as the inner search. - /// - public static KeywordHybridSearchBuilder AsBuilder(this IKeywordHybridSearch innerSearch) - { - Verify.NotNull(innerSearch); - - return new KeywordHybridSearchBuilder(innerSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs deleted file mode 100644 index 6bb2209b0812..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/KeywordHybridSearchBuilderServiceCollectionExtensions.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for registering with a . -[Experimental("SKEXP0020")] -public static class KeywordHybridSearchBuilderServiceCollectionExtensions -{ - /// Registers a singleton in the . - /// The to which the search should be added. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static KeywordHybridSearchBuilder AddKeywordHybridSearch( - this IServiceCollection serviceCollection, - IKeywordHybridSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddKeywordHybridSearch(serviceCollection, _ => innerSearch, lifetime); - } - - /// Registers a singleton in the . - /// The to which the search should be added. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static KeywordHybridSearchBuilder AddKeywordHybridSearch( - this IServiceCollection serviceCollection, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new KeywordHybridSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), builder.Build, lifetime)); - return builder; - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - IKeywordHybridSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddKeyedKeywordHybridSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static KeywordHybridSearchBuilder AddKeyedKeywordHybridSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new KeywordHybridSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IKeywordHybridSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); - return builder; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs deleted file mode 100644 index c05ffde310cc..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearch.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.Extensions.VectorData; - -/// -/// A keyword hybrid search that logs operations to an -/// -[Experimental("SKEXP0020")] -public class LoggingKeywordHybridSearch : IKeywordHybridSearch -{ - /// An instance used for all logging. - private readonly ILogger _logger; - - /// The underlying . - private readonly IKeywordHybridSearch _innerSearch; - - /// - /// Initializes a new instance of the class. - /// - /// The underlying . - /// An instance that will be used for all logging. - public LoggingKeywordHybridSearch(IKeywordHybridSearch innerSearch, ILogger logger) - { - Verify.NotNull(innerSearch); - Verify.NotNull(logger); - - this._innerSearch = innerSearch; - this._logger = logger; - } - - /// - public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(HybridSearchAsync), - () => this._innerSearch.HybridSearchAsync(vector, keywords, options, cancellationToken)); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs deleted file mode 100644 index 1614b6f6a57a..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingKeywordHybridSearchBuilderExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extensions for configuring instances. -[Experimental("SKEXP0020")] -public static class LoggingKeywordHybridSearchBuilderExtensions -{ - /// Adds logging to the keyword hybrid search pipeline. - /// The . - /// - /// An optional used to create a logger with which logging should be performed. - /// If not supplied, a required instance will be resolved from the service provider. - /// If resolved is , it will be skipped and the inner service will be used instead. - /// - /// The . - public static KeywordHybridSearchBuilder UseLogging( - this KeywordHybridSearchBuilder builder, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(builder); - - return builder.Use((innerSearch, services) => - { - loggerFactory ??= services.GetRequiredService(); - - if (loggerFactory == NullLoggerFactory.Instance) - { - return innerSearch; - } - - return new LoggingKeywordHybridSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingKeywordHybridSearch))); - }); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs deleted file mode 100644 index 0dc81080e496..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearch.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.Extensions.VectorData; - -/// -/// A vectorizable text search that logs operations to an -/// -[Experimental("SKEXP0020")] -public class LoggingVectorizableTextSearch : IVectorizableTextSearch -{ - /// An instance used for all logging. - private readonly ILogger _logger; - - /// The underlying . - private readonly IVectorizableTextSearch _innerSearch; - - /// - /// Initializes a new instance of the class. - /// - /// The underlying . - /// An instance used for all logging. - public LoggingVectorizableTextSearch(IVectorizableTextSearch innerSearch, ILogger logger) - { - Verify.NotNull(innerSearch); - Verify.NotNull(logger); - - this._innerSearch = innerSearch; - this._logger = logger; - } - - /// - public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(VectorizableTextSearchAsync), - () => this._innerSearch.VectorizableTextSearchAsync(searchText, options, cancellationToken)); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs deleted file mode 100644 index 0f2a1b704474..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizableTextSearchBuilderExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extensions for configuring instances. -[Experimental("SKEXP0020")] -public static class LoggingVectorizableTextSearchBuilderExtensions -{ - /// Adds logging to the vectorizable text search pipeline. - /// The . - /// - /// An optional used to create a logger with which logging should be performed. - /// If not supplied, a required instance will be resolved from the service provider. - /// If resolved is , it will be skipped and the inner service will be used instead. - /// - /// The . - public static VectorizableTextSearchBuilder UseLogging( - this VectorizableTextSearchBuilder builder, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(builder); - - return builder.Use((innerSearch, services) => - { - loggerFactory ??= services.GetRequiredService(); - - if (loggerFactory == NullLoggerFactory.Instance) - { - return innerSearch; - } - - return new LoggingVectorizableTextSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizableTextSearch))); - }); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs deleted file mode 100644 index f0198534d421..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearch.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.Extensions.VectorData; - -/// -/// A vectorized search that logs operations to an -/// -[Experimental("SKEXP0020")] -public class LoggingVectorizedSearch : IVectorizedSearch -{ - /// An instance used for all logging. - private readonly ILogger _logger; - - /// The underlying . - private readonly IVectorizedSearch _innerSearch; - - /// - /// Initializes a new instance of the class. - /// - /// The underlying . - /// An instance used for all logging. - public LoggingVectorizedSearch(IVectorizedSearch innerSearch, ILogger logger) - { - Verify.NotNull(innerSearch); - Verify.NotNull(logger); - - this._innerSearch = innerSearch; - this._logger = logger; - } - - /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(VectorizedSearchAsync), - () => this._innerSearch.VectorizedSearchAsync(vector, options, cancellationToken)); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs deleted file mode 100644 index 47fa7c61ebe4..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/LoggingVectorizedSearchBuilderExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extensions for configuring instances. -[Experimental("SKEXP0020")] -public static class LoggingVectorizedSearchBuilderExtensions -{ - /// Adds logging to the vectorized search pipeline. - /// The . - /// - /// An optional used to create a logger with which logging should be performed. - /// If not supplied, a required instance will be resolved from the service provider. - /// If resolved is , it will be skipped and the inner service will be used instead. - /// - /// The . - public static VectorizedSearchBuilder UseLogging( - this VectorizedSearchBuilder builder, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(builder); - - return builder.Use((innerSearch, services) => - { - loggerFactory ??= services.GetRequiredService(); - - if (loggerFactory == NullLoggerFactory.Instance) - { - return innerSearch; - } - - return new LoggingVectorizedSearch(innerSearch, loggerFactory.CreateLogger(typeof(LoggingVectorizedSearch))); - }); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs deleted file mode 100644 index a2681267d216..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilder.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// A builder for creating pipelines of . -[Experimental("SKEXP0020")] -public sealed class VectorizableTextSearchBuilder -{ - private readonly Func> _innerSearchFactory; - - /// The registered search factory instances. - private List, IServiceProvider, IVectorizableTextSearch>>? _searchFactories; - - /// Initializes a new instance of the class. - /// The inner that represents the underlying backend. - public VectorizableTextSearchBuilder(IVectorizableTextSearch innerSearch) - { - Verify.NotNull(innerSearch); - - this._innerSearchFactory = _ => innerSearch; - } - - /// Initializes a new instance of the class. - /// A callback that produces the inner that represents the underlying backend. - public VectorizableTextSearchBuilder(Func> innerSearchFactory) - { - Verify.NotNull(innerSearchFactory); - - this._innerSearchFactory = innerSearchFactory; - } - - /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. - /// - /// The that should provide services to the instances. - /// If null, an empty will be used. - /// - /// An instance of that represents the entire pipeline. - public IVectorizableTextSearch Build(IServiceProvider? services = null) - { - services ??= EmptyKeyedServiceProvider.Instance; - var search = this._innerSearchFactory(services); - - // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. - if (this._searchFactories is not null) - { - for (var i = this._searchFactories.Count - 1; i >= 0; i--) - { - search = this._searchFactories[i](search, services); - if (search is null) - { - throw new InvalidOperationException( - $"The {nameof(VectorizableTextSearchBuilder)} entry at index {i} returned null. " + - $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizableTextSearch)} instances."); - } - } - } - - return search; - } - - /// Adds a factory for an intermediate vectorizable text search to the pipeline. - /// The search factory function. - /// The updated instance. - public VectorizableTextSearchBuilder Use(Func, IVectorizableTextSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - return this.Use((innerSearch, _) => searchFactory(innerSearch)); - } - - /// Adds a factory for an intermediate vectorizable text search to the pipeline. - /// The search factory function. - /// The updated instance. - public VectorizableTextSearchBuilder Use(Func, IServiceProvider, IVectorizableTextSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - (this._searchFactories ??= []).Add(searchFactory); - return this; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs deleted file mode 100644 index 7f44251ddc38..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for working with in the context of . -[Experimental("SKEXP0020")] -public static class VectorizableTextSearchBuilderExtensions -{ - /// Creates a new using as its inner search. - /// The search to use as the inner search. - /// The new instance. - /// - /// This method is equivalent to using the constructor directly, - /// specifying as the inner search. - /// - public static VectorizableTextSearchBuilder AsBuilder(this IVectorizableTextSearch innerSearch) - { - Verify.NotNull(innerSearch); - - return new VectorizableTextSearchBuilder(innerSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs deleted file mode 100644 index 423919c561b0..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizableTextSearchBuilderServiceCollectionExtensions.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for registering with a . -[Experimental("SKEXP0020")] -public static class VectorizableTextSearchBuilderServiceCollectionExtensions -{ - /// Registers a singleton in the . - /// The to which the search should be added. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static VectorizableTextSearchBuilder AddVectorizableTextSearch( - this IServiceCollection serviceCollection, - IVectorizableTextSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddVectorizableTextSearch(serviceCollection, _ => innerSearch, lifetime); - } - - /// Registers a singleton in the . - /// The to which the search should be added. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static VectorizableTextSearchBuilder AddVectorizableTextSearch( - this IServiceCollection serviceCollection, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new VectorizableTextSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), builder.Build, lifetime)); - return builder; - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - IVectorizableTextSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddKeyedVectorizableTextSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static VectorizableTextSearchBuilder AddKeyedVectorizableTextSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new VectorizableTextSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizableTextSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); - return builder; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs deleted file mode 100644 index fe02e2535482..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilder.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// A builder for creating pipelines of . -[Experimental("SKEXP0020")] -public sealed class VectorizedSearchBuilder -{ - private readonly Func> _innerSearchFactory; - - /// The registered search factory instances. - private List, IServiceProvider, IVectorizedSearch>>? _searchFactories; - - /// Initializes a new instance of the class. - /// The inner that represents the underlying backend. - public VectorizedSearchBuilder(IVectorizedSearch innerSearch) - { - Verify.NotNull(innerSearch); - - this._innerSearchFactory = _ => innerSearch; - } - - /// Initializes a new instance of the class. - /// A callback that produces the inner that represents the underlying backend. - public VectorizedSearchBuilder(Func> innerSearchFactory) - { - Verify.NotNull(innerSearchFactory); - - this._innerSearchFactory = innerSearchFactory; - } - - /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. - /// - /// The that should provide services to the instances. - /// If null, an empty will be used. - /// - /// An instance of that represents the entire pipeline. - public IVectorizedSearch Build(IServiceProvider? services = null) - { - services ??= EmptyKeyedServiceProvider.Instance; - var search = this._innerSearchFactory(services); - - // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. - if (this._searchFactories is not null) - { - for (var i = this._searchFactories.Count - 1; i >= 0; i--) - { - search = this._searchFactories[i](search, services); - if (search is null) - { - throw new InvalidOperationException( - $"The {nameof(VectorizedSearchBuilder)} entry at index {i} returned null. " + - $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorizedSearch)} instances."); - } - } - } - - return search; - } - - /// Adds a factory for an intermediate vectorized search to the pipeline. - /// The search factory function. - /// The updated instance. - public VectorizedSearchBuilder Use(Func, IVectorizedSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - return this.Use((innerSearch, _) => searchFactory(innerSearch)); - } - - /// Adds a factory for an intermediate vectorized search to the pipeline. - /// The search factory function. - /// The updated instance. - public VectorizedSearchBuilder Use(Func, IServiceProvider, IVectorizedSearch> searchFactory) - { - Verify.NotNull(searchFactory); - - (this._searchFactories ??= []).Add(searchFactory); - return this; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs deleted file mode 100644 index a7721578716f..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for working with in the context of . -[Experimental("SKEXP0020")] -public static class VectorizedSearchBuilderExtensions -{ - /// Creates a new using as its inner search. - /// The search to use as the inner search. - /// The new instance. - /// - /// This method is equivalent to using the constructor directly, - /// specifying as the inner search. - /// - public static VectorizedSearchBuilder AsBuilder(this IVectorizedSearch innerSearch) - { - Verify.NotNull(innerSearch); - - return new VectorizedSearchBuilder(innerSearch); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs deleted file mode 100644 index 75f4bc7b1355..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorSearch/VectorizedSearchBuilderServiceCollectionExtensions.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for registering with a . -[Experimental("SKEXP0020")] -public static class VectorizedSearchBuilderServiceCollectionExtensions -{ - /// Registers a singleton in the . - /// The to which the search should be added. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static VectorizedSearchBuilder AddVectorizedSearch( - this IServiceCollection serviceCollection, - IVectorizedSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddVectorizedSearch(serviceCollection, _ => innerSearch, lifetime); - } - - /// Registers a singleton in the . - /// The to which the search should be added. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a singleton service. - public static VectorizedSearchBuilder AddVectorizedSearch( - this IServiceCollection serviceCollection, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new VectorizedSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), builder.Build, lifetime)); - return builder; - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// The inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static VectorizedSearchBuilder AddKeyedVectorizedSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - IVectorizedSearch innerSearch, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearch); - - return AddKeyedVectorizedSearch(serviceCollection, serviceKey, _ => innerSearch, lifetime); - } - - /// Registers a keyed singleton in the . - /// The to which the search should be added. - /// The key with which to associate the search. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the search. Defaults to . - /// A that can be used to build a pipeline around the inner search. - /// The search is registered as a scoped service. - public static VectorizedSearchBuilder AddKeyedVectorizedSearch( - this IServiceCollection serviceCollection, - object? serviceKey, - Func> innerSearchFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerSearchFactory); - - var builder = new VectorizedSearchBuilder(innerSearchFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorizedSearch), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); - return builder; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs deleted file mode 100644 index ea2bca4ca106..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStore.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.Extensions.VectorData; - -/// -/// A vector store that logs operations to an -/// -[Experimental("SKEXP0020")] -public class LoggingVectorStore : IVectorStore -{ - /// An instance used for all logging. - private readonly ILogger _logger; - - /// The underlying . - private readonly IVectorStore _innerStore; - - /// - /// Initializes a new instance of the class. - /// - /// The underlying . - /// An instance that will be used for all logging. - public LoggingVectorStore(IVectorStore innerStore, ILogger logger) - { - Verify.NotNull(innerStore); - Verify.NotNull(logger); - - this._innerStore = innerStore; - this._logger = logger; - } - - /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull - => new LoggingVectorStoreRecordCollection( - this._innerStore.GetCollection(name, vectorStoreRecordDefinition), - this._logger); - - /// - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(ListCollectionNamesAsync), - () => this._innerStore.ListCollectionNamesAsync(cancellationToken), - cancellationToken); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs deleted file mode 100644 index 9e8b7636a16c..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreBuilderExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extensions for configuring instances. -[Experimental("SKEXP0020")] -public static class LoggingVectorStoreBuilderExtensions -{ - /// Adds logging to the vector store pipeline. - /// The . - /// - /// An optional used to create a logger with which logging should be performed. - /// If not supplied, a required instance will be resolved from the service provider. - /// If resolved is , it will be skipped and the inner service will be used instead. - /// - /// The . - public static VectorStoreBuilder UseLogging( - this VectorStoreBuilder builder, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(builder); - - return builder.Use((innerStore, services) => - { - loggerFactory ??= services.GetRequiredService(); - - if (loggerFactory == NullLoggerFactory.Instance) - { - return innerStore; - } - - return new LoggingVectorStore(innerStore, loggerFactory.CreateLogger(typeof(LoggingVectorStore))); - }); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs deleted file mode 100644 index 3d6919280861..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollection.cs +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.Extensions.VectorData; - -/// -/// A vector store record collection that logs operations to an -/// -[Experimental("SKEXP0020")] -#pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class LoggingVectorStoreRecordCollection : IVectorStoreRecordCollection where TKey : notnull -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix -{ - /// An instance used for all logging. - private readonly ILogger _logger; - - /// The underlying . - private readonly IVectorStoreRecordCollection _innerCollection; - - /// - /// Initializes a new instance of the class. - /// - /// The underlying . - /// An instance that will be used for all logging. - public LoggingVectorStoreRecordCollection(IVectorStoreRecordCollection innerCollection, ILogger logger) - { - Verify.NotNull(innerCollection); - Verify.NotNull(logger); - - this._innerCollection = innerCollection; - this._logger = logger; - } - - /// - public string CollectionName => this._innerCollection.CollectionName; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(CollectionExistsAsync), - () => this._innerCollection.CollectionExistsAsync(cancellationToken)); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(CreateCollectionAsync), - () => this._innerCollection.CreateCollectionAsync(cancellationToken)); - } - - /// - public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(CreateCollectionIfNotExistsAsync), - () => this._innerCollection.CreateCollectionIfNotExistsAsync(cancellationToken)); - } - - /// - public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(DeleteAsync), - () => this._innerCollection.DeleteAsync(key, cancellationToken)); - } - - /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(DeleteBatchAsync), - () => this._innerCollection.DeleteBatchAsync(keys, cancellationToken)); - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(DeleteCollectionAsync), - () => this._innerCollection.DeleteCollectionAsync(cancellationToken)); - } - - /// - public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(GetAsync), - () => this._innerCollection.GetAsync(key, options, cancellationToken)); - } - - /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(GetBatchAsync), - () => this._innerCollection.GetBatchAsync(keys, options, cancellationToken), - cancellationToken); - } - - /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(UpsertAsync), - () => this._innerCollection.UpsertAsync(record, cancellationToken)); - } - - /// - public IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(UpsertBatchAsync), - () => this._innerCollection.UpsertBatchAsync(records, cancellationToken), - cancellationToken); - } - - /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - return LoggingExtensions.RunWithLoggingAsync( - this._logger, - nameof(VectorizedSearchAsync), - () => this._innerCollection.VectorizedSearchAsync(vector, options, cancellationToken)); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs deleted file mode 100644 index 33d7d3760d32..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/LoggingVectorStoreRecordCollectionBuilderExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extensions for configuring instances. -[Experimental("SKEXP0020")] -public static class LoggingVectorStoreRecordCollectionBuilderExtensions -{ - /// Adds logging to the vector store record collection pipeline. - /// The . - /// - /// An optional used to create a logger with which logging should be performed. - /// If not supplied, a required instance will be resolved from the service provider. - /// If resolved is , it will be skipped and the inner service will be used instead. - /// - /// The . - public static VectorStoreRecordCollectionBuilder UseLogging( - this VectorStoreRecordCollectionBuilder builder, - ILoggerFactory? loggerFactory = null) where TKey : notnull - { - Verify.NotNull(builder); - - return builder.Use((innerCollection, services) => - { - loggerFactory ??= services.GetRequiredService(); - - if (loggerFactory == NullLoggerFactory.Instance) - { - return innerCollection; - } - - return new LoggingVectorStoreRecordCollection(innerCollection, loggerFactory.CreateLogger(typeof(LoggingVectorStoreRecordCollection))); - }); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs deleted file mode 100644 index 71d98fcb276c..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilder.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// A builder for creating pipelines of . -[Experimental("SKEXP0020")] -public sealed class VectorStoreBuilder -{ - private readonly Func _innerStoreFactory; - - /// The registered store factory instances. - private List>? _storeFactories; - - /// Initializes a new instance of the class. - /// The inner that represents the underlying backend. - public VectorStoreBuilder(IVectorStore innerStore) - { - Verify.NotNull(innerStore); - - this._innerStoreFactory = _ => innerStore; - } - - /// Initializes a new instance of the class. - /// A callback that produces the inner that represents the underlying backend. - public VectorStoreBuilder(Func innerStoreFactory) - { - Verify.NotNull(innerStoreFactory); - - this._innerStoreFactory = innerStoreFactory; - } - - /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. - /// - /// The that should provide services to the instances. - /// If null, an empty will be used. - /// - /// An instance of that represents the entire pipeline. - public IVectorStore Build(IServiceProvider? services = null) - { - services ??= EmptyKeyedServiceProvider.Instance; - var vectorStore = this._innerStoreFactory(services); - - // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. - if (this._storeFactories is not null) - { - for (var i = this._storeFactories.Count - 1; i >= 0; i--) - { - vectorStore = this._storeFactories[i](vectorStore, services); - if (vectorStore is null) - { - throw new InvalidOperationException( - $"The {nameof(VectorStoreBuilder)} entry at index {i} returned null. " + - $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStore)} instances."); - } - } - } - - return vectorStore; - } - - /// Adds a factory for an intermediate vector store to the vector store pipeline. - /// The store factory function. - /// The updated instance. - public VectorStoreBuilder Use(Func storeFactory) - { - Verify.NotNull(storeFactory); - - return this.Use((innerStore, _) => storeFactory(innerStore)); - } - - /// Adds a factory for an intermediate vector store to the vector store pipeline. - /// The store factory function. - /// The updated instance. - public VectorStoreBuilder Use(Func storeFactory) - { - Verify.NotNull(storeFactory); - - (this._storeFactories ??= []).Add(storeFactory); - return this; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs deleted file mode 100644 index f5666cd2d6ac..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for working with in the context of . -[Experimental("SKEXP0020")] -public static class VectorStoreBuilderExtensions -{ - /// Creates a new using as its inner store. - /// The store to use as the inner store. - /// The new instance. - /// - /// This method is equivalent to using the constructor directly, - /// specifying as the inner store. - /// - public static VectorStoreBuilder AsBuilder(this IVectorStore innerStore) - { - Verify.NotNull(innerStore); - - return new VectorStoreBuilder(innerStore); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs deleted file mode 100644 index a76d3256585b..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreBuilderServiceCollectionExtensions.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for registering with a . -[Experimental("SKEXP0020")] -public static class VectorStoreBuilderServiceCollectionExtensions -{ - /// Registers a singleton in the . - /// The to which the store should be added. - /// The inner that represents the underlying backend. - /// The service lifetime for the store. Defaults to . - /// A that can be used to build a pipeline around the inner store. - /// The store is registered as a singleton service. - public static VectorStoreBuilder AddVectorStore( - this IServiceCollection serviceCollection, - IVectorStore innerStore, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerStore); - - return AddVectorStore(serviceCollection, _ => innerStore, lifetime); - } - - /// Registers a singleton in the . - /// The to which the store should be added. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the store. Defaults to . - /// A that can be used to build a pipeline around the inner store. - /// The store is registered as a singleton service. - public static VectorStoreBuilder AddVectorStore( - this IServiceCollection serviceCollection, - Func innerStoreFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerStoreFactory); - - var builder = new VectorStoreBuilder(innerStoreFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), builder.Build, lifetime)); - return builder; - } - - /// Registers a keyed singleton in the . - /// The to which the store should be added. - /// The key with which to associate the store. - /// The inner that represents the underlying backend. - /// The service lifetime for the store. Defaults to . - /// A that can be used to build a pipeline around the inner store. - /// The store is registered as a scoped service. - public static VectorStoreBuilder AddKeyedVectorStore( - this IServiceCollection serviceCollection, - object? serviceKey, - IVectorStore innerStore, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerStore); - - return AddKeyedVectorStore(serviceCollection, serviceKey, _ => innerStore, lifetime); - } - - /// Registers a keyed singleton in the . - /// The to which the store should be added. - /// The key with which to associate the store. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the store. Defaults to . - /// A that can be used to build a pipeline around the inner store. - /// The store is registered as a scoped service. - public static VectorStoreBuilder AddKeyedVectorStore( - this IServiceCollection serviceCollection, - object? serviceKey, - Func innerStoreFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerStoreFactory); - - var builder = new VectorStoreBuilder(innerStoreFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStore), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); - return builder; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs deleted file mode 100644 index 2ae1048ff83e..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilder.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// A builder for creating pipelines of . -[Experimental("SKEXP0020")] -public sealed class VectorStoreRecordCollectionBuilder where TKey : notnull -{ - private readonly Func> _innerCollectionFactory; - - /// The registered collection factory instances. - private List, IServiceProvider, IVectorStoreRecordCollection>>? _collectionFactories; - - /// Initializes a new instance of the class. - /// The inner that represents the underlying backend. - public VectorStoreRecordCollectionBuilder(IVectorStoreRecordCollection innerCollection) - { - Verify.NotNull(innerCollection); - - this._innerCollectionFactory = _ => innerCollection; - } - - /// Initializes a new instance of the class. - /// A callback that produces the inner that represents the underlying backend. - public VectorStoreRecordCollectionBuilder(Func> innerCollectionFactory) - { - Verify.NotNull(innerCollectionFactory); - - this._innerCollectionFactory = innerCollectionFactory; - } - - /// Builds an that represents the entire pipeline. Calls to this instance will pass through each of the pipeline stages in turn. - /// - /// The that should provide services to the instances. - /// If null, an empty will be used. - /// - /// An instance of that represents the entire pipeline. - public IVectorStoreRecordCollection Build(IServiceProvider? services = null) - { - services ??= EmptyKeyedServiceProvider.Instance; - var collection = this._innerCollectionFactory(services); - - // To match intuitive expectations, apply the factories in reverse order, so that the first factory added is the outermost. - if (this._collectionFactories is not null) - { - for (var i = this._collectionFactories.Count - 1; i >= 0; i--) - { - collection = this._collectionFactories[i](collection, services); - if (collection is null) - { - throw new InvalidOperationException( - $"The {nameof(VectorStoreRecordCollectionBuilder)} entry at index {i} returned null. " + - $"Ensure that the callbacks passed to {nameof(Use)} return non-null {nameof(IVectorStoreRecordCollection)} instances."); - } - } - } - - return collection; - } - - /// Adds a factory for an intermediate vector store record collection to the pipeline. - /// The collection factory function. - /// The updated instance. - public VectorStoreRecordCollectionBuilder Use(Func, IVectorStoreRecordCollection> collectionFactory) - { - Verify.NotNull(collectionFactory); - - return this.Use((innerCollection, _) => collectionFactory(innerCollection)); - } - - /// Adds a factory for an intermediate vector store record collection to the pipeline. - /// The collection factory function. - /// The updated instance. - public VectorStoreRecordCollectionBuilder Use(Func, IServiceProvider, IVectorStoreRecordCollection> collectionFactory) - { - Verify.NotNull(collectionFactory); - - (this._collectionFactories ??= []).Add(collectionFactory); - return this; - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs deleted file mode 100644 index 1e950685253c..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for working with in the context of . -[Experimental("SKEXP0020")] -public static class VectorStoreRecordCollectionBuilderExtensions -{ - /// Creates a new using as its inner collection. - /// The collection to use as the inner collection. - /// The new instance. - /// - /// This method is equivalent to using the constructor directly, - /// specifying as the inner collection. - /// - public static VectorStoreRecordCollectionBuilder AsBuilder(this IVectorStoreRecordCollection innerCollection) where TKey : notnull - { - Verify.NotNull(innerCollection); - - return new VectorStoreRecordCollectionBuilder(innerCollection); - } -} diff --git a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs b/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs deleted file mode 100644 index 2b0af209ebd6..000000000000 --- a/dotnet/src/Connectors/VectorData/VectorStorage/VectorStoreRecordCollectionBuilderServiceCollectionExtensions.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace Microsoft.Extensions.VectorData; - -/// Provides extension methods for registering with a . -[Experimental("SKEXP0020")] -public static class VectorStoreRecordCollectionBuilderServiceCollectionExtensions -{ - /// Registers a singleton in the . - /// The to which the collection should be added. - /// The inner that represents the underlying backend. - /// The service lifetime for the collection. Defaults to . - /// A that can be used to build a pipeline around the inner collection. - /// The collection is registered as a singleton service. - public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( - this IServiceCollection serviceCollection, - IVectorStoreRecordCollection innerCollection, - ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerCollection); - - return AddVectorStoreRecordCollection(serviceCollection, _ => innerCollection, lifetime); - } - - /// Registers a singleton in the . - /// The to which the collection should be added. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the collection. Defaults to . - /// A that can be used to build a pipeline around the inner collection. - /// The collection is registered as a singleton service. - public static VectorStoreRecordCollectionBuilder AddVectorStoreRecordCollection( - this IServiceCollection serviceCollection, - Func> innerCollectionFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerCollectionFactory); - - var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), builder.Build, lifetime)); - return builder; - } - - /// Registers a keyed singleton in the . - /// The to which the collection should be added. - /// The key with which to associate the collection. - /// The inner that represents the underlying backend. - /// The service lifetime for the collection. Defaults to . - /// A that can be used to build a pipeline around the inner collection. - /// The collection is registered as a scoped service. - public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( - this IServiceCollection serviceCollection, - object? serviceKey, - IVectorStoreRecordCollection innerCollection, - ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerCollection); - - return AddKeyedVectorStoreRecordCollection(serviceCollection, serviceKey, _ => innerCollection, lifetime); - } - - /// Registers a keyed singleton in the . - /// The to which the collection should be added. - /// The key with which to associate the collection. - /// A callback that produces the inner that represents the underlying backend. - /// The service lifetime for the collection. Defaults to . - /// A that can be used to build a pipeline around the inner collection. - /// The collection is registered as a scoped service. - public static VectorStoreRecordCollectionBuilder AddKeyedVectorStoreRecordCollection( - this IServiceCollection serviceCollection, - object? serviceKey, - Func> innerCollectionFactory, - ServiceLifetime lifetime = ServiceLifetime.Singleton) where TKey : notnull - { - Verify.NotNull(serviceCollection); - Verify.NotNull(innerCollectionFactory); - - var builder = new VectorStoreRecordCollectionBuilder(innerCollectionFactory); - serviceCollection.Add(new ServiceDescriptor(typeof(IVectorStoreRecordCollection), serviceKey, factory: (services, serviceKey) => builder.Build(services), lifetime)); - return builder; - } -} diff --git a/dotnet/src/Connectors/VectorData/neticon.png b/dotnet/src/Connectors/VectorData/neticon.png deleted file mode 100644 index a0f1fdbf4d5eae0e561018cccee74f6a454cdb9c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7006 zcmeHMXH-+`n%)#eMU;C)kZw7O2nvFLpcE@A^-u+AN(mh$UH*JD5Jjm{4}uUR zs5C(zdURn*zrcHqdVxK)P)7322TAMVbNR4HRzo3_~zdgjvf?Ot98@H{LHdy zK*)TM=g&B9f}+9IKfm=aF5e3_{PQJ$ zY4?9DHvtd+Y14o8TQs=)&+P)Wjb3|LIT@*NDqyYm#gu^q*EFSow<%yKVx`_Ka)!0 z2YAaQr%LYyQ%n$Rjx)e%JeM5_ov70FUMveJTS(J+%C4(L)~h*MQ8!wJtf_X{`Ol?k z;{27%#**2uiR&R6-eaRK1Mdgl2xHQ=uS(~VqsTVrsUnQhc zRIK5>@(05w3gHYdsI0;;sOO66pUEl)DGyD(D4>$7drUDFZ|uxx;-nWj7d|rj=u+D@ z-HU+mLOInrsXdSL1Z6nVB&D z@>f4!yq=_B+16+qw5k=4o#*tf;6Oe*F;`&L!)bT{U7Wc3YmG2;NRxb%woCt~*Yr2E zfwiUdS=7SK&5>df-aqY8lp~SEUG*ziXGvHMLp_#vgvVMQ*&{+d@(a>v4;7p_%Jte0Ga5zNbUI28WAgY5f?FX^;q`1WTw2~t|P54N&e^@=nFqDj}W#o z_-kZBWDQ%($YJH43Y7YrbjfsUrAEjla>?j0;YLdXxjK}P@xDGc%r&c)6`t?XW=*{r z%Z^p)?6*7obKU_;NZK_ejh9n&?qzO0#(}Uo+KSm|e}q1+f$wM!G8>lLvKK1UK^uz5 zDk&5(DuUnzQy{aQ8%b~*_4Ri`TOj}Dd{0OCls}^VD8=qDC%Q9tSSt5LZoxd!|ai3oGtf&cOy(`^W9zMNR;bII|OS+Pe(-9=f!m6}w zV>f(mH^BYE-=Wl=)Q2s2TF*j&tRkN0KOu3-(VN?4?-v|?W^Xj)@u4^bNB%bN+f|D= z?r1ey$UbahYv!qISaxV8>+1Mnz!M&S1o+~titx|65MA`iQMjscL!+LOGjZ?p>}x6d z4`FiZV9i-E6F8c|Fq37-TTTtJOdIZ9<*YrJU86UuQr6dipNC%AxT?lXa9U=`iq+2= zOT!CFUlJM1&INj~InR!=@x@{Z8BnvgL~_>nN)y@!r<0$uGCJ<0B-q!vZn@~#5^Ig8B}}g&dYBee=x50Wv$R^^f%aTE~g_a7&8Y(5L>! zkYgCl@1ZVqFSwkH(ns-EtYbOFLrarf#r6W9#x8rO<<_6h33faYV{<&_gBahO#ga9j z$|}=ea)vEm|Hb`E%L9Gn#Osxg( z&sxXz7lsse+_i@<_LUl@8$916h*m6!R?~zr_ZQU^H3F(aC1is#I$VP$GO(s!pT&Y# z85JYcwQqu6Ja6sje&x*)nOdx;bt1hNMTSwSikFeKE)+MRrW?mg=8mp^AR_kz{C%e* z32H_>c600^d$9)ob+$yzpyxHa+k0Sz7GG41I0A59bKJf?X}E6mX$pU~Wc%_?$2w1s zZEbk$svZ4U+WH;XPEb^-IqhGQX1U|z8KWp8&jVlWFPP+7Um6;oMy?>TFU`cMT5bYx z;7_~MfZ(sumPQHg++U)9PT=+=zxu+qmP==xJ&oI%XgD8=YZo%*rGq2U_J^D4d%7H`}jau-;<_^n?THcf9*rKD^J#%p%l zA8DILPr+wPY^MpxQbxGXG2f0xcjxSw;wjl53EsXe0poYHgfc(T;v5J;H$neUhElxe zrX0NdQ4e#4L4e-JmsN$%C+#BKX8TYA1YlhN`|QyqnlH{Igil*i0?NrD9qi2Fw_&~eMSk3UGyWzcay4oPaWE~nJ{R}-u+%oE z^4pk7G%~M66x6$a(@21!KD)Us1JG?!Xn4Zb;NYOn2SGc%JK!@mQv*PGMGxMb{#a4F z_#t!~GhhJR9)$w;fi20azFx86@7j4yB zpC7-bK<170rK@aOPg zDv69Iy;oMY0yq-ORy`~=Y8>ZQ_}+6m=ElBFD(BO@q9)h-K%)s9-^rh(;7T`vu={0p zCzf*G!~Iex?wWwWS?rOOYx{i!_Lh~OXJ7gYPR(bWfke`)l(GCjjtT06t7+0hHGHhh zA9y}JSM5#_xw|dqtlV?PVqZwGRm*pM)dvDj|LAzkF?4x}RLkCA#>G3V21ZLIt^gG< zQI&0O8}Rf;Def0;ZbweV+|x(R-?(Vnj5F9~eOT)4!nDr7Yq-5!y1bz1t;HjQSLn-A zt1qf%FzvKZ`+#!ufUYj;;FE!eL$>Pcse)qp0BW@>*U{2zo_CWHpgvHpnGofD&KYKY z+!}avbdRD^hZQf zU#$@f{W=^JvL7g)bcEZ<)O9tw4?Dxp&lksZ;$I_{?{l;o=>&}=tF-5MU&27^*rhJT zcd0DiLPxBSPJ<5cx}JGQAds^*(&j4-nHoTwx>dVUGJHkMM7w*nPbN5n_W)JJ zoSF~F)URWm1xS-QkhpAB(#}xq`0?;AQ=#^xj8iv{-*?l`8a;)kpuatAQXeVT+=;#A zT0rvGu`_`{>KMvxzgLkb$EeCy`RyvAx+nC!D381cssru;3nBjt{S>AGvQAs(kxLO{ zIp*xXImIAQJ>kiL&b~R(P_(nAu2z<~Dc*-_c3=C`sjCz@AZVOwgE5s@G#uy{iQNJ} z*pY1bjnx4K{yik#93ftw2}MI#Dt>w>)q5vp~-G zX7!=BUrYpB-3#04(mvmC$-Y!WY8${8gcraWB}q}i z(|PAS*SoXp)9`8tTYTuy7`=#uWFoR#J2(AVcxr-9uF+7kB$GxNkA$Vfoz}l40*Ydo zXReR;i`X4$Te~{&2?RE~^39WlS?>E>my@CS3|paiTe-zGjS$iwI*YbAHOwW*PD@wI z=Nl-L-*Y(4b+hX{-tb98arKb!Q^EK+RA0Lfp4`cv&x7o<`~ghNZ#@Z$`B6O*2R6%R z+kg>9tGG(TtYgVXWD_X)ySeq_3Tq2*GEPMlF@o;BBxfbxC%!xOuwUa+?wXac%Dce> z+d&$P_VsrSw*$bMY#z8~U%K$AIc8vOosw2D4`XdBe5NKVuc+s10x-cw)v;&2Yd`@# z6UL-Y1G;FY$G$?{@cwL6zaRL5p_lTzugeI5PB@eSk^x^LJ=N!qHsScr*=1fnx>1;L zY5eqB8dlecz6GSs<7{=#sl?FWEY66Ejk>f}1odw~P?}i0yH&4d%vKKZ@hTi7-IW8%;{(vI`&L;i z@`wN4O!SHFV&u%JzXt*g%E%4J$^z@6FOtA7Yc(*Rz2%_90Exxp+}r^Vb|pF?C;F8w zu&f+_Jsvg^Wp?I6!+uV$Bi#fzohClm^T{PdQzz%Nn}GENT0zaz{xqo+NWJ!QdLYKf zBHdX|LMnBh5jXZ;>OoAWv*rOX&O8Sbzjyl*y-%<2V2oE_*lEG(1GlpzBZ6aoOp%y8 ze&=uJp63A7*h}C9j-sY70bc4bHQr`@q#!@&!5LxUu`)c;-&WVK?$9+vP%D`7v^_`5 zrOcY7w(+sWUl!hkCI>q|qg_*OZ$os^0Fsg`di5ki_Tzr$8gh}#WNKHtX|hlAupfW6 zk_ZWVB&Hjb9ZbLk!Ie1lMyGd?qhgq8>{#iC>Kg^*taLx^YuW+VQG;}IK{6+Y@0i7& z6iRAQBlI8*LwK}P>x0;cL*en^{8^OvUg%KTXIa~~>xA%u_2)y{h_+YQ?tpDgX9rIe zOo3t5%oVK)PzXFaqN#F2^qJbgB3HzT`{nJcFO`#ATLWNBXfYU5CYHs&PnH^f*Wl6k z?<0KM*e@M?auAvtBi}A#6V#ej{yvSOE8v?4^Jb8y4~i{ zSIC{Kc9#!&HhKqJI9L>s*NbwiwWXI+w-X6TM}&3$PlPOE+G8HP8Hi(#UMtyKy= zLo(ZOb7qTQ^r{NHBg^h=C`gbboZigk0*;z5+XW@P;EzUwQZv5|SZ6W0tBbATVDt$& z4th!!{t_tBc>V9qZE^8&@=VbaMh;!ivCF~IC28PzN2Z{@`)H;y3+{?j%eQl6gP|I9 z-agi;Y>P($m>0yG48Z>=AC0W_h5((46THSuk)X||?u=A_N-{J)`M9Q^WnUMh84VTQ zIvQlFtG4Z5X~3!o0K!K+^E@{TZ;5W3XkNzy z*j?DZB4J)s(LK@K0K1T4u&xvPHDTX zs$=NfQalJo9RXF+0@j1~t~aK@*DAWgsI@Sl{8AP8%T`P`Vu~Tv_%ZmbJz^#V>NJZl-TbST^RMK5DlNOs$kegkbICLYRJk-}g{l-Wn^Vya`SL3T1tiIw^Z zm~h)cx+UimpKrqQ=$a*_BCrvMGi%5Nr5qU)hq|P1Tjp!gLgpIqRRIs`qsDGjcel*OH-c~&6W812bsUI z>umkx8_8Ottu&n?L`^t@;63h8!Nb19V4*G1v2?3e;$WrvvX7%#JaxH?R) zN@KLmgq3q$NONDrj=7c`8~kK5VTf>xS$Q2C8@T{(7ygTX1N^6hZ&3*F7Z@!5FaMz+ n@b3Qu^xx$8Uk}h2jH{d|uJ4jrSC|P(2)ca1@;v^m$K8JeR7TPQ From da00e8d7c91f1f83ff3a6ff64fa729fe7c4bdef9 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Thu, 3 Apr 2025 11:24:23 +0100 Subject: [PATCH 26/63] .Net: MEVD: Remove batch from batch CRUD operation names. (#11287) ### Motivation and Context Changing this simplifies the interface to make methods more discoverable. #11276 ### Description Removing batch from the batch CRUD operation name, since they do not clash with the non batch overloads. ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- ...extEmbeddingVectorStoreRecordCollection.cs | 12 ++--- .../Memory/VectorStore_EmbeddingGeneration.cs | 2 +- .../Optimization/FrugalGPTWithFilters.cs | 2 +- .../PluginSelectionWithFilters.cs | 2 +- dotnet/samples/Demos/OnnxSimpleRAG/Program.cs | 2 +- .../Demos/VectorStoreRAG/DataLoader.cs | 2 +- .../Step4_NonStringKey_VectorStore.cs | 12 ++--- ...ISearchVectorStoreRecordCollectionTests.cs | 6 +-- ...MongoDBVectorStoreRecordCollectionTests.cs | 6 +-- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 6 +-- ...nMemoryVectorStoreRecordCollectionTests.cs | 6 +-- ...zureAISearchVectorStoreRecordCollection.cs | 6 +-- ...mosDBMongoDBVectorStoreRecordCollection.cs | 6 +-- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 12 ++--- .../InMemoryVectorStoreRecordCollection.cs | 6 +-- .../MongoDBVectorStoreRecordCollection.cs | 6 +-- .../PineconeVectorStoreRecordCollection.cs | 6 +-- .../PostgresVectorStoreRecordCollection.cs | 6 +-- .../QdrantVectorStoreRecordCollection.cs | 16 +++--- ...RedisHashSetVectorStoreRecordCollection.cs | 6 +-- .../RedisJsonVectorStoreRecordCollection.cs | 6 +-- .../SqlServerVectorStoreRecordCollection.cs | 6 +-- .../SqliteVectorStoreRecordCollection.cs | 12 ++--- .../WeaviateVectorStoreRecordCollection.cs | 8 +-- ...MongoDBVectorStoreRecordCollectionTests.cs | 6 +-- .../QdrantVectorStoreRecordCollectionTests.cs | 8 +-- ...HashSetVectorStoreRecordCollectionTests.cs | 6 +-- ...disJsonVectorStoreRecordCollectionTests.cs | 6 +-- ...eaviateVectorStoreRecordCollectionTests.cs | 6 +-- .../CompatibilitySuppressions.xml | 54 ++++++++++++++++--- .../RecordOptions/GetRecordOptions.cs | 6 ++- .../IVectorStoreRecordCollection.cs | 8 +-- ...ISearchVectorStoreRecordCollectionTests.cs | 6 +-- ...MongoDBVectorStoreRecordCollectionTests.cs | 14 ++--- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 14 ++--- .../BaseVectorStoreRecordCollectionTests.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 14 ++--- ...ostgresVectorStoreRecordCollectionTests.cs | 14 ++--- .../QdrantVectorStoreRecordCollectionTests.cs | 4 +- ...HashSetVectorStoreRecordCollectionTests.cs | 6 +-- ...disJsonVectorStoreRecordCollectionTests.cs | 6 +-- .../SqliteVectorStoreRecordCollectionTests.cs | 22 ++++---- ...eaviateVectorStoreRecordCollectionTests.cs | 16 +++--- .../CRUD/PineconeAllSupportedTypesTests.cs | 4 +- .../CRUD/SqlServerBatchConformanceTests.cs | 16 +++--- .../SqlServerVectorStoreTests.cs | 12 ++--- .../CRUD/BatchConformanceTests.cs | 38 ++++++------- .../CRUD/GenericDataModelConformanceTests.cs | 2 +- .../CRUD/RecordConformanceTests.cs | 2 +- .../Support/VectorStoreCollectionFixture.cs | 2 +- ...orSearchDistanceFunctionComplianceTests.cs | 2 +- 51 files changed, 251 insertions(+), 205 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index 000cb1ebba07..618e06bc05b5 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -83,9 +83,9 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this._decoratedVectorStoreRecordCollection.DeleteBatchAsync(keys, cancellationToken); + return this._decoratedVectorStoreRecordCollection.DeleteAsync(keys, cancellationToken); } /// @@ -95,9 +95,9 @@ public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellat } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - return this._decoratedVectorStoreRecordCollection.GetBatchAsync(keys, options, cancellationToken); + return this._decoratedVectorStoreRecordCollection.GetAsync(keys, options, cancellationToken); } /// @@ -108,11 +108,11 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var recordWithEmbeddingsTasks = records.Select(r => this.AddEmbeddingsAsync(r, cancellationToken)); var recordWithEmbeddings = await Task.WhenAll(recordWithEmbeddingsTasks).ConfigureAwait(false); - var upsertResults = this._decoratedVectorStoreRecordCollection.UpsertBatchAsync(recordWithEmbeddings, cancellationToken); + var upsertResults = this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken); await foreach (var upsertResult in upsertResults.ConfigureAwait(false)) { yield return upsertResult; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs index b641443e878a..b9796e3709b9 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs @@ -45,7 +45,7 @@ public async Task UseEmbeddingGenerationViaDecoratorAsync() await collection.CreateCollectionIfNotExistsAsync(); // Create and upsert glossary entries into the collection. - await collection.UpsertBatchAsync(CreateGlossaryEntries()).ToListAsync(); + await collection.UpsertAsync(CreateGlossaryEntries()).ToListAsync(); // Search the collection using a vectorizable text search. var search = collection as IVectorizableTextSearch; diff --git a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs index a5b9917e6ce0..4ab891833e06 100644 --- a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs @@ -210,7 +210,7 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func(CollectionName); await collection.CreateCollectionIfNotExistsAsync(context.CancellationToken); - await collection.UpsertBatchAsync(exampleRecords, cancellationToken: context.CancellationToken).ToListAsync(context.CancellationToken); + await collection.UpsertAsync(exampleRecords, cancellationToken: context.CancellationToken).ToListAsync(context.CancellationToken); // Generate embedding for original request. var requestEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(request, cancellationToken: context.CancellationToken); diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index 695ff675e17f..cdaff88edeb9 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -341,7 +341,7 @@ public async Task SaveAsync(string collectionName, KernelPluginCollection plugin var collection = vectorStore.GetCollection(collectionName); await collection.CreateCollectionIfNotExistsAsync(cancellationToken); - await collection.UpsertBatchAsync(functionRecords, cancellationToken: cancellationToken).ToListAsync(cancellationToken); + await collection.UpsertAsync(functionRecords, cancellationToken: cancellationToken).ToListAsync(cancellationToken); } private static List<(KernelFunction Function, string TextToVectorize)> GetFunctionsData(KernelPluginCollection plugins) diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs index 0a8b76360850..83525c1a2e77 100644 --- a/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs +++ b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs @@ -55,7 +55,7 @@ foreach (var factTextFile in Directory.GetFiles("Facts", "*.txt")) { var factContent = File.ReadAllText(factTextFile); - await collection.UpsertAsync(new() + await collection.UpsertAsync(new InformationItem() { Id = Guid.NewGuid().ToString(), Text = factContent, diff --git a/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs b/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs index 2cd7d43ce746..678044fd8fc9 100644 --- a/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs +++ b/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs @@ -66,7 +66,7 @@ public async Task LoadPdf(string pdfPath, int batchSize, int betweenBatchDelayIn // Upsert the records into the vector store. var records = await Task.WhenAll(recordTasks).ConfigureAwait(false); - var upsertedKeys = vectorStoreRecordCollection.UpsertBatchAsync(records, cancellationToken: cancellationToken); + var upsertedKeys = vectorStoreRecordCollection.UpsertAsync(records, cancellationToken: cancellationToken); await foreach (var key in upsertedKeys.ConfigureAwait(false)) { Console.WriteLine($"Upserted record '{key}' into VectorDB"); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs index 35ca4822a824..86b6273e0e86 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs @@ -133,9 +133,9 @@ public Task DeleteAsync(TPublicKey key, CancellationToken cancellationToken = de } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); + return this._collection.DeleteAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); } /// @@ -157,9 +157,9 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - var internalRecords = this._collection.GetBatchAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); + var internalRecords = this._collection.GetAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); return internalRecords.Select(this._internalToPublicRecordMapper); } @@ -172,10 +172,10 @@ public async Task UpsertAsync(TPublicRecord record, CancellationToke } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var internalRecords = records.Select(this._publicToInternalRecordMapper); - var internalKeys = this._collection.UpsertBatchAsync(internalRecords, cancellationToken); + var internalKeys = this._collection.UpsertAsync(internalRecords, cancellationToken); await foreach (var internalKey in internalKeys.ConfigureAwait(false)) { yield return this._internalToPublicKeyMapper(internalKey); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index b786c8d8fa58..da69f4590c47 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -257,7 +257,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act. - var actual = await sut.GetBatchAsync( + var actual = await sut.GetAsync( [TestRecordKey1, TestRecordKey2], new() { IncludeVectors = true }, this._testCancellationToken).ToListAsync(); @@ -368,7 +368,7 @@ public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act. - await sut.DeleteBatchAsync( + await sut.DeleteAsync( [TestRecordKey1, TestRecordKey2], cancellationToken: this._testCancellationToken); @@ -455,7 +455,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) var model2 = CreateModel(TestRecordKey2, true); // Act. - var actual = await sut.UpsertBatchAsync( + var actual = await sut.UpsertAsync( [model1, model2], cancellationToken: this._testCancellationToken).ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 60e2584bf754..30c380c0a9fa 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -264,7 +264,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() var expectedDefinition = Builders.Filter.In(document => document["_id"].AsString, recordKeys); // Act - await sut.DeleteBatchAsync(recordKeys); + await sut.DeleteAsync(recordKeys); // Assert this._mockMongoCollection.Verify(l => l.DeleteManyAsync( @@ -359,7 +359,7 @@ public async Task GetBatchReturnsValidRecordAsync() "collection"); // Act - var results = await sut.GetBatchAsync(["key1", "key2", "key3"]).ToListAsync(); + var results = await sut.GetAsync(["key1", "key2", "key3"]).ToListAsync(); // Assert Assert.NotNull(results[0]); @@ -418,7 +418,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertBatchAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 6f33a19e0b28..37c72f38ace1 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -279,7 +279,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() "collection"); // Act - await sut.DeleteBatchAsync(recordKeys); + await sut.DeleteAsync(recordKeys); // Assert foreach (var key in recordKeys) @@ -389,7 +389,7 @@ public async Task GetBatchReturnsValidRecordAsync() "collection"); // Act - var results = await sut.GetBatchAsync(["key1", "key2", "key3"]).ToListAsync(); + var results = await sut.GetAsync(["key1", "key2", "key3"]).ToListAsync(); // Assert Assert.NotNull(results[0]); @@ -444,7 +444,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertBatchAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index b6ac78086915..8a84216757f7 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -137,7 +137,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, TK var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.GetBatchAsync( + var actual = await sut.GetAsync( [testKey1, testKey2], new() { @@ -201,7 +201,7 @@ public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition, var sut = this.CreateRecordCollection(useDefinition); // Act - await sut.DeleteBatchAsync( + await sut.DeleteAsync( [testKey1, testKey2], cancellationToken: this._testCancellationToken); @@ -255,7 +255,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testK var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.UpsertBatchAsync( + var actual = await sut.UpsertAsync( [record1, record2], cancellationToken: this._testCancellationToken).ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 9103cd5f2a2d..c6796e3627a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -258,7 +258,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -290,7 +290,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); if (!keys.Any()) @@ -318,7 +318,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); if (!records.Any()) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 13e31475447d..b45c9213a789 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -137,7 +137,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -180,7 +180,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -236,7 +236,7 @@ await this._mongoCollection } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index dd0e245c8004..b6e0200a6137 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -228,7 +228,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { // Use record keys as partition keys var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); @@ -248,7 +248,7 @@ public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken } /// - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -274,7 +274,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -304,7 +304,7 @@ public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -325,7 +325,7 @@ public virtual Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, Cancellation } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(keys, cancellationToken); } @@ -337,7 +337,7 @@ Task IVectorStoreRecordCollection - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync( + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync( IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index d0bd5bd309c9..933abb888f61 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -159,7 +159,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { foreach (var key in keys) { @@ -182,7 +182,7 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) } /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { var collectionDictionary = this.GetCollectionDictionary(); @@ -208,7 +208,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { foreach (var record in records) { diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index dc2aa163a803..f9e2fa091642 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -145,7 +145,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public virtual async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -188,7 +188,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -244,7 +244,7 @@ await this._mongoCollection } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 3da753575141..080d22ed9b4f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -190,7 +190,7 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo } /// - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -247,7 +247,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -293,7 +293,7 @@ await this.RunIndexOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index ce619398bf99..6d731a05153d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -166,7 +166,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancella } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -215,7 +215,7 @@ await this.RunOperationAsync(OperationName, () => } /// - public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetBatch"; @@ -248,7 +248,7 @@ public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 760aeaae24f4..d32713c4111c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -264,7 +264,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = { Verify.NotNull(key); - var retrievedPoints = await this.GetBatchAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); + var retrievedPoints = await this.GetAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); return retrievedPoints.FirstOrDefault(); } @@ -273,18 +273,18 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = { Verify.NotNull(key); - var retrievedPoints = await this.GetBatchAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); + var retrievedPoints = await this.GetAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); return retrievedPoints.FirstOrDefault(); } /// - public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Num = key }, options, cancellationToken); } /// - public virtual IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Uuid = key.ToString("D") }, options, cancellationToken); } @@ -318,7 +318,7 @@ public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -332,7 +332,7 @@ public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -384,7 +384,7 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -407,7 +407,7 @@ await this.RunOperationAsync( } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index a08fe1e86628..650bbce15586 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -261,7 +261,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -293,7 +293,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -328,7 +328,7 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index af6a0a7d220f..941f18e0f7a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -244,7 +244,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); var keysList = keys.ToList(); @@ -310,7 +310,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -351,7 +351,7 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 9b4ce3b29078..891e65410fad 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -172,7 +172,7 @@ await ExceptionWrapper.WrapAsync(connection, command, } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -271,7 +271,7 @@ static async (cmd, ct) => } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -343,7 +343,7 @@ async static (cmd, ct) => } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 16dbd7238aca..07df93d008d5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -238,7 +238,7 @@ public virtual Task> VectorizedSearchAsync } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) @@ -255,7 +255,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellat } /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); await foreach (var record in this.InternalUpsertBatchAsync(connection, records, cancellationToken) @@ -273,7 +273,7 @@ public async Task DeleteAsync(ulong key, CancellationToken cancellationToken = d } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); @@ -291,7 +291,7 @@ public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken ca } /// - public async IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) @@ -309,7 +309,7 @@ async Task IVectorStoreRecordCollection.UpsertAsync(TRe } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync( + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync( IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) { @@ -330,7 +330,7 @@ await this.InternalDeleteAsync(connection, key, cancellationToken) } /// - public async Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 42b1e56fe4ce..bef5d217e356 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -224,7 +224,7 @@ public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = } /// - public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObjectBatch"; const string ContainsAnyOperator = "ContainsAny"; @@ -274,7 +274,7 @@ public virtual Task DeleteBatchAsync(IEnumerable keys, CancellationToken c } /// - public virtual async IAsyncEnumerable GetBatchAsync( + public virtual async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -295,13 +295,13 @@ public virtual async IAsyncEnumerable GetBatchAsync( /// public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - return await this.UpsertBatchAsync([record], cancellationToken) + return await this.UpsertAsync([record], cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); } /// - public virtual async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index ddf71955621e..18cc1a999c28 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -264,7 +264,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() var expectedDefinition = Builders.Filter.In(document => document["_id"].AsString, recordKeys); // Act - await sut.DeleteBatchAsync(recordKeys); + await sut.DeleteAsync(recordKeys); // Assert this._mockMongoCollection.Verify(l => l.DeleteManyAsync( @@ -359,7 +359,7 @@ public async Task GetBatchReturnsValidRecordAsync() "collection"); // Act - var results = await sut.GetBatchAsync(["key1", "key2", "key3"]).ToListAsync(); + var results = await sut.GetAsync(["key1", "key2", "key3"]).ToListAsync(); // Assert Assert.NotNull(results[0]); @@ -418,7 +418,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertBatchAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 3d071066ae2b..e27674b8f3d7 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -226,7 +226,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, bo this.SetupRetrieveMock(testRecordKeys.Select(x => CreateRetrievedPoint(hasNamedVectors, x)).ToList()); // Act. - var actual = await sut.GetBatchAsync( + var actual = await sut.GetAsync( testRecordKeys, new() { IncludeVectors = true }, this._testCancellationToken).ToListAsync(); @@ -369,7 +369,7 @@ public async Task CanDeleteManyUlongRecordsAsync(bool useDefinition, bool hasNam this.SetupDeleteMocks(); // Act - await sut.DeleteBatchAsync( + await sut.DeleteAsync( [UlongTestRecordKey1, UlongTestRecordKey2], cancellationToken: this._testCancellationToken); @@ -398,7 +398,7 @@ public async Task CanDeleteManyGuidRecordsAsync(bool useDefinition, bool hasName this.SetupDeleteMocks(); // Act - await sut.DeleteBatchAsync( + await sut.DeleteAsync( [s_guidTestRecordKey1, s_guidTestRecordKey2], cancellationToken: this._testCancellationToken); @@ -454,7 +454,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, bool hasNa var models = testRecordKeys.Select(x => CreateModel(x, true)); // Act - var actual = await sut.UpsertBatchAsync( + var actual = await sut.UpsertAsync( models, cancellationToken: this._testCancellationToken).ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 117d3d1fcd4b..e8ae022b6fc6 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -216,7 +216,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.GetBatchAsync( + var actual = await sut.GetAsync( [TestRecordKey1, TestRecordKey2], new() { IncludeVectors = true }).ToListAsync(); @@ -311,7 +311,7 @@ public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act - await sut.DeleteBatchAsync([TestRecordKey1, TestRecordKey2]); + await sut.DeleteAsync([TestRecordKey1, TestRecordKey2]); // Assert this._redisDatabaseMock.Verify(x => x.KeyDeleteAsync(TestRecordKey1, CommandFlags.None), Times.Once); @@ -353,7 +353,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) var model2 = CreateModel(TestRecordKey2, true); // Act - var actual = await sut.UpsertBatchAsync([model1, model2]).ToListAsync(); + var actual = await sut.UpsertAsync([model1, model2]).ToListAsync(); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index aa47dc512b8c..3277f802cc66 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -229,7 +229,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.GetBatchAsync( + var actual = await sut.GetAsync( [TestRecordKey1, TestRecordKey2], new() { IncludeVectors = true }).ToListAsync(); @@ -331,7 +331,7 @@ public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act - await sut.DeleteBatchAsync([TestRecordKey1, TestRecordKey2]); + await sut.DeleteAsync([TestRecordKey1, TestRecordKey2]); // Assert var expectedArgs1 = new object[] { TestRecordKey1 }; @@ -389,7 +389,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) var model2 = CreateModel(TestRecordKey2, true); // Act - var actual = await sut.UpsertBatchAsync([model1, model2]).ToListAsync(); + var actual = await sut.UpsertAsync([model1, model2]).ToListAsync(); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 0b3e39cac291..a77944379b5a 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -195,7 +195,7 @@ public async Task DeleteBatchUsesValidQueryMatchAsync() var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act - await sut.DeleteBatchAsync(ids); + await sut.DeleteAsync(ids); // Assert var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); @@ -261,7 +261,7 @@ public async Task GetExistingBatchRecordsReturnsValidRecordsAsync() var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act - var results = await sut.GetBatchAsync([id1, id2]).ToListAsync(); + var results = await sut.GetAsync([id1, id2]).ToListAsync(); // Assert Assert.NotNull(results[0]); @@ -329,7 +329,7 @@ public async Task UpsertReturnsRecordKeysAsync() var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act - var results = await sut.UpsertBatchAsync([hotel1, hotel2]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2]).ToListAsync(); // Assert Assert.Contains(id1, results); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index cd9bfbaa3ca7..332b2c435565 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -92,6 +92,13 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) @@ -134,6 +141,13 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) @@ -176,6 +190,13 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) @@ -213,7 +234,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -227,7 +255,7 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -255,7 +283,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -269,7 +304,7 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -297,7 +332,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -311,7 +353,7 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs index e623cb676247..a6bf3d9f3b12 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetRecordOptions.cs @@ -1,9 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Threading; + namespace Microsoft.Extensions.VectorData; /// -/// Defines options for calling . +/// Defines options for calling +/// or . /// public class GetRecordOptions { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index f891dcba26b0..b834176834f3 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -78,7 +78,7 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// /// The command fails to execute for any reason. /// The mapping between the storage model and record data model fails. - IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default); + IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default); /// /// Deletes a record from the vector store. Does not guarantee that the collection exists. @@ -101,7 +101,7 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// If any record can't be deleted for any other reason, the operation throws. Some records might have already been deleted while others might not have, so the entire operation should be retried. /// /// The command fails to execute for any reason other than that a record does not exist. - Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default); + Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default); /// /// Upserts a record into the vector store. Does not guarantee that the collection exists. @@ -116,7 +116,7 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default); /// - /// Upserts a group of records into the vector store. Does not guarantee that the collection exists. + /// Upserts a batch of records into the vector store. Does not guarantee that the collection exists. /// If the record already exists, it is updated. /// If the record does not exist, it is created. /// @@ -128,5 +128,5 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// /// The command fails to execute for any reason. /// The mapping between the storage model and record data model fails. - IAsyncEnumerable UpsertBatchAsync(IEnumerable records, CancellationToken cancellationToken = default); + IAsyncEnumerable UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 8fa45147398b..115740f99576 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -165,7 +165,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync() var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act - var results = sut.UpsertBatchAsync( + var results = sut.UpsertAsync( [ await this.CreateTestHotelAsync("UpsertMany-1"), await this.CreateTestHotelAsync("UpsertMany-2"), @@ -237,7 +237,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. - var hotels = sut.GetBatchAsync(["BaseSet-1", "BaseSet-2", "BaseSet-3", "BaseSet-5", "BaseSet-4"], new GetRecordOptions { IncludeVectors = true }); + var hotels = sut.GetAsync(["BaseSet-1", "BaseSet-2", "BaseSet-3", "BaseSet-5", "BaseSet-4"], new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(hotels); @@ -284,7 +284,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() // Act // Also include a non-existing key to test that the operation does not fail for these. - await sut.DeleteBatchAsync(["RemoveMany-1", "RemoveMany-2", "RemoveMany-3", "RemoveMany-4"]); + await sut.DeleteAsync(["RemoveMany-1", "RemoveMany-2", "RemoveMany-3", "RemoveMany-4"]); // Assert Assert.Null(await sut.GetAsync("RemoveMany-1", new GetRecordOptions { IncludeVectors = true })); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index f873991177d3..73f1799a5706 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -157,8 +157,8 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -167,9 +167,9 @@ public async Task ItCanGetAndDeleteBatchAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -340,7 +340,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); @@ -371,7 +371,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -404,7 +404,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 546b957c68ae..4bff2e354b1b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -212,8 +212,8 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -222,9 +222,9 @@ public async Task ItCanGetAndDeleteBatchAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -273,7 +273,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); @@ -304,7 +304,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -338,7 +338,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index cf29e88625ab..2305590062e4 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -93,7 +93,7 @@ public async Task VectorSearchShouldReturnExpectedScoresAsync(string distanceFun Vector = orthogonalVector, }; - await sut.UpsertBatchAsync([baseRecord, oppositeRecord, orthogonalRecord]).ToListAsync(); + await sut.UpsertAsync([baseRecord, oppositeRecord, orthogonalRecord]).ToListAsync(); await Task.Delay(this.DelayAfterUploadInMilliseconds); // Act diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index c8cab7cb477e..83732c19c620 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -158,8 +158,8 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -168,9 +168,9 @@ public async Task ItCanGetAndDeleteBatchAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -341,7 +341,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); @@ -372,7 +372,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -405,7 +405,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index a80519c85a57..f322339faaed 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -192,8 +192,8 @@ public async Task ItCanGetUpsertDeleteBatchAsync() var record2 = new PostgresHotel { HotelId = HotelId2, HotelName = "Hotel 2", HotelCode = 1, ParkingIncluded = false, HotelRating = 3.5f, Tags = ["tag1", "tag3"] }; var record3 = new PostgresHotel { HotelId = HotelId3, HotelName = "Hotel 3", HotelCode = 1, ParkingIncluded = true, HotelRating = 2.5f, Tags = ["tag1", "tag4"] }; - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -202,9 +202,9 @@ public async Task ItCanGetUpsertDeleteBatchAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -364,7 +364,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), new() @@ -402,7 +402,7 @@ public async Task VectorizedSearchWithEqualToFilterReturnsValidResultsAsync() await sut.CreateCollectionAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), new() @@ -435,7 +435,7 @@ public async Task VectorizedSearchWithAnyTagFilterReturnsValidResultsAsync() await sut.CreateCollectionAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index c34128bfdcbe..727cb3185257 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -289,7 +289,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. - var hotels = sut.GetBatchAsync([11, 15, 12], new GetRecordOptions { IncludeVectors = true }); + var hotels = sut.GetAsync([11, 15, 12], new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(hotels); @@ -348,7 +348,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync(bool useRecordDef // Act. // Also delete a non-existing key to test that the operation does not fail for these. - await sut.DeleteBatchAsync([20, 21]); + await sut.DeleteAsync([20, 21]); // Assert. Assert.Null(await sut.GetAsync(20)); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 91723c852047..af0a2382c7dd 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -168,7 +168,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. - var results = sut.UpsertBatchAsync( + var results = sut.UpsertAsync( [ CreateTestHotel("HUpsertMany-1", 1), CreateTestHotel("HUpsertMany-2", 2), @@ -238,7 +238,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. - var hotels = sut.GetBatchAsync(["HBaseSet-1", "HBaseSet-5", "HBaseSet-2"], new GetRecordOptions { IncludeVectors = true }); + var hotels = sut.GetAsync(["HBaseSet-1", "HBaseSet-5", "HBaseSet-2"], new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(hotels); @@ -296,7 +296,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() // Act // Also include a non-existing key to test that the operation does not fail for these. - await sut.DeleteBatchAsync(["HRemoveMany-1", "HRemoveMany-2", "HRemoveMany-3", "HRemoveMany-4"]); + await sut.DeleteAsync(["HRemoveMany-1", "HRemoveMany-2", "HRemoveMany-3", "HRemoveMany-4"]); // Assert Assert.Null(await sut.GetAsync("HRemoveMany-1", new GetRecordOptions { IncludeVectors = true })); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 266948738ef6..611c74593d18 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -182,7 +182,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. - var results = sut.UpsertBatchAsync( + var results = sut.UpsertAsync( [ CreateTestHotel("UpsertMany-1", 1), CreateTestHotel("UpsertMany-2", 2), @@ -256,7 +256,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. - var hotels = sut.GetBatchAsync(["BaseSet-1", "BaseSet-5", "BaseSet-2"], new GetRecordOptions { IncludeVectors = true }); + var hotels = sut.GetAsync(["BaseSet-1", "BaseSet-5", "BaseSet-2"], new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(hotels); @@ -326,7 +326,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() // Act // Also include a non-existing key to test that the operation does not fail for these. - await sut.DeleteBatchAsync(["RemoveMany-1", "RemoveMany-2", "RemoveMany-3", "RemoveMany-4"]); + await sut.DeleteAsync(["RemoveMany-1", "RemoveMany-2", "RemoveMany-3", "RemoveMany-4"]); // Assert Assert.Null(await sut.GetAsync("RemoveMany-1", new GetRecordOptions { IncludeVectors = true })); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index 76e05b71d0d9..d910cec21cf5 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -178,8 +178,8 @@ public async Task ItCanGetUpsertDeleteBatchWithNumericKeyAsync() var record2 = CreateTestHotel(HotelId2); var record3 = CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -188,9 +188,9 @@ public async Task ItCanGetUpsertDeleteBatchWithNumericKeyAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -212,8 +212,8 @@ public async Task ItCanGetUpsertDeleteBatchWithStringKeyAsync() var record2 = CreateTestHotel(HotelId2); var record3 = CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -222,9 +222,9 @@ public async Task ItCanGetUpsertDeleteBatchWithStringKeyAsync() Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); // Act - await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + await sut.DeleteAsync([HotelId1, HotelId2, HotelId3]); - getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -350,7 +350,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -387,7 +387,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -421,7 +421,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 494967b21fc7..64646640b1df 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -160,8 +160,8 @@ public async Task ItCanUpsertAndGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(hotelId2); var record3 = this.CreateTestHotel(hotelId3); - var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); - var getResults = await sut.GetBatchAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); Assert.Equal([hotelId1, hotelId2, hotelId3], upsertResults); @@ -170,9 +170,9 @@ public async Task ItCanUpsertAndGetAndDeleteBatchAsync() Assert.NotNull(getResults.First(l => l.HotelId == hotelId3)); // Act - await sut.DeleteBatchAsync([hotelId1, hotelId2, hotelId3]); + await sut.DeleteAsync([hotelId1, hotelId2, hotelId3]); - getResults = await sut.GetBatchAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); + getResults = await sut.GetAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); // Assert Assert.Empty(getResults); @@ -223,7 +223,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -261,7 +261,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -295,7 +295,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() @@ -340,7 +340,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertBatchAsync([hotel4, hotel2, hotel5, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel5, hotel3, hotel1]).ToListAsync(); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), new() diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs index e1dbf9efa81e..10f0f5c858cf 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs @@ -61,9 +61,9 @@ public async Task AllTypesBatchGetAsync() } ]; - await collection.UpsertBatchAsync(records).ToArrayAsync(); + await collection.UpsertAsync(records).ToArrayAsync(); - var allTypes = await collection.GetBatchAsync(records.Select(r => r.Id), new GetRecordOptions { IncludeVectors = true }).ToListAsync(); + var allTypes = await collection.GetAsync(records.Select(r => r.Id), new GetRecordOptions { IncludeVectors = true }).ToListAsync(); var allTypes1 = allTypes.Single(x => x.Id == records[0].Id); var allTypes2 = allTypes.Single(x => x.Id == records[1].Id); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index c2d71d49281b..b9e6f54eb752 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -34,18 +34,18 @@ private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVector }).ToArray(); var keys = inserted.Select(record => record.Id).ToArray(); - Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); - var receivedKeys = await collection.UpsertBatchAsync(inserted).ToArrayAsync(); + Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); + var receivedKeys = await collection.UpsertAsync(inserted).ToArrayAsync(); Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order - var received = await collection.GetBatchAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in inserted) { record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); } - await collection.DeleteBatchAsync(keys); - Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + await collection.DeleteAsync(keys); + Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); } [ConditionalFact] @@ -62,13 +62,13 @@ public async Task UpsertBatchIsAtomic() }).ToArray(); var keys = inserted.Select(record => record.Id).Where(key => key is not null).ToArray(); - Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); - VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertBatchAsync(inserted).ToArrayAsync().AsTask()); + VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertAsync(inserted).ToArrayAsync().AsTask()); Assert.Equal("UpsertBatch", ex.OperationName); Assert.Equal(collection.CollectionName, ex.CollectionName); // Make sure that no records were inserted! - Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); + Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index 084159af79ce..0c2f56a59c3e 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -220,13 +220,13 @@ public async Task BatchCRUD() Floats = Enumerable.Range(0, 10).Select(j => (float)(i + j)).ToArray() }).ToArray(); - string[] keys = await collection.UpsertBatchAsync(inserted).ToArrayAsync(); + string[] keys = await collection.UpsertAsync(inserted).ToArrayAsync(); for (int i = 0; i < inserted.Length; i++) { Assert.Equal(inserted[i].Id, keys[i]); } - TestModel[] received = await collection.GetBatchAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); + TestModel[] received = await collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); for (int i = 0; i < inserted.Length; i++) { AssertEquality(inserted[i], received[i]); @@ -239,21 +239,21 @@ public async Task BatchCRUD() Floats = i.Floats }).ToArray(); - keys = await collection.UpsertBatchAsync(updated).ToArrayAsync(); + keys = await collection.UpsertAsync(updated).ToArrayAsync(); for (int i = 0; i < updated.Length; i++) { Assert.Equal(updated[i].Id, keys[i]); } - received = await collection.GetBatchAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); + received = await collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); for (int i = 0; i < updated.Length; i++) { AssertEquality(updated[i], received[i]); } - await collection.DeleteBatchAsync(keys); + await collection.DeleteAsync(keys); - Assert.False(await collection.GetBatchAsync(keys).AnyAsync()); + Assert.False(await collection.GetAsync(keys).AnyAsync()); } finally { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index b8fe0a30afe4..849789b5e910 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -12,14 +12,14 @@ public abstract class BatchConformanceTests(SimpleModelFixture fixtu [ConditionalFact] public async Task GetBatchAsyncThrowsArgumentNullExceptionForNullKeys() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetBatchAsync(keys: null!).ToArrayAsync().AsTask()); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync(keys: null!).ToArrayAsync().AsTask()); Assert.Equal("keys", ex.ParamName); } [ConditionalFact] public async Task GetBatchAsyncDoesNotThrowForEmptyBatch() { - Assert.Empty(await fixture.Collection.GetBatchAsync([]).ToArrayAsync()); + Assert.Empty(await fixture.Collection.GetAsync([]).ToArrayAsync()); } [ConditionalFact] @@ -35,7 +35,7 @@ private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) var expectedRecords = fixture.TestData.Take(2); // the last two records can get deleted by other tests var ids = expectedRecords.Select(record => record.Id); - var received = await fixture.Collection.GetBatchAsync(ids, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await fixture.Collection.GetAsync(ids, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in expectedRecords) { @@ -46,14 +46,14 @@ private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) [ConditionalFact] public async Task UpsertBatchAsyncThrowsArgumentNullExceptionForNullBatch() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.UpsertBatchAsync(records: null!).ToArrayAsync().AsTask()); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.UpsertAsync(records: null!).ToArrayAsync().AsTask()); Assert.Equal("records", ex.ParamName); } [ConditionalFact] public async Task UpsertBatchAsyncDoesNotThrowForEmptyBatch() { - Assert.Empty(await fixture.Collection.UpsertBatchAsync([]).ToArrayAsync()); + Assert.Empty(await fixture.Collection.UpsertAsync([]).ToArrayAsync()); } [ConditionalFact] @@ -76,11 +76,11 @@ private async Task UpsertBatchAsyncCanInsertNewRecords(bool includeVectors) }).ToArray(); var keys = inserted.Select(record => record.Id).ToArray(); - Assert.Empty(await collection.GetBatchAsync(keys).ToArrayAsync()); - var receivedKeys = await collection.UpsertBatchAsync(inserted).ToArrayAsync(); + Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); + var receivedKeys = await collection.UpsertAsync(inserted).ToArrayAsync(); Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order - var received = await collection.GetBatchAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in inserted) { record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); @@ -104,7 +104,7 @@ private async Task UpsertBatchAsyncCanUpdateExistingRecords(bool includeVectors) Text = i.ToString(), Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); - await fixture.Collection.UpsertBatchAsync(inserted).ToArrayAsync(); + await fixture.Collection.UpsertAsync(inserted).ToArrayAsync(); SimpleModel[] updated = inserted.Select(i => new SimpleModel() { @@ -114,12 +114,12 @@ private async Task UpsertBatchAsyncCanUpdateExistingRecords(bool includeVectors) Floats = i.Floats }).ToArray(); - var keys = await fixture.Collection.UpsertBatchAsync(updated).ToArrayAsync(); + var keys = await fixture.Collection.UpsertAsync(updated).ToArrayAsync(); Assert.Equal( updated.Select(r => r.Id).OrderBy(id => id).ToArray(), keys.OrderBy(id => id).ToArray()); - var received = await fixture.Collection.GetBatchAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await fixture.Collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in updated) { record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); @@ -146,7 +146,7 @@ private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool incl // We take first half of the records and insert them. SimpleModel[] firstHalf = records.Take(records.Length / 2).ToArray(); - TKey[] insertedKeys = await fixture.Collection.UpsertBatchAsync(firstHalf).ToArrayAsync(); + TKey[] insertedKeys = await fixture.Collection.UpsertAsync(firstHalf).ToArrayAsync(); Assert.Equal( firstHalf.Select(r => r.Id).OrderBy(id => id).ToArray(), insertedKeys.OrderBy(id => id).ToArray()); @@ -159,12 +159,12 @@ private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool incl } // And now we upsert all the records (the first half is an update, the second is an insert). - TKey[] mixedKeys = await fixture.Collection.UpsertBatchAsync(records).ToArrayAsync(); + TKey[] mixedKeys = await fixture.Collection.UpsertAsync(records).ToArrayAsync(); Assert.Equal( records.Select(r => r.Id).OrderBy(id => id).ToArray(), mixedKeys.OrderBy(id => id).ToArray()); - var received = await fixture.Collection.GetBatchAsync(mixedKeys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await fixture.Collection.GetAsync(mixedKeys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in records) { record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); @@ -174,13 +174,13 @@ private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool incl [ConditionalFact] public async Task DeleteBatchAsyncDoesNotThrowForEmptyBatch() { - await fixture.Collection.DeleteBatchAsync([]); + await fixture.Collection.DeleteAsync([]); } [ConditionalFact] public async Task DeleteBatchAsyncThrowsArgumentNullExceptionForNullKeys() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.DeleteBatchAsync(keys: null!)); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.DeleteAsync(keys: null!)); Assert.Equal("keys", ex.ParamName); } @@ -189,9 +189,9 @@ public async Task DeleteBatchAsyncDeletesTheRecords() { TKey[] idsToRemove = [fixture.TestData[2].Id, fixture.TestData[3].Id]; - Assert.NotEmpty(await fixture.Collection.GetBatchAsync(idsToRemove).ToArrayAsync()); - await fixture.Collection.DeleteBatchAsync(idsToRemove); - Assert.Empty(await fixture.Collection.GetBatchAsync(idsToRemove).ToArrayAsync()); + Assert.NotEmpty(await fixture.Collection.GetAsync(idsToRemove).ToArrayAsync()); + await fixture.Collection.DeleteAsync(idsToRemove); + Assert.Empty(await fixture.Collection.GetAsync(idsToRemove).ToArrayAsync()); } // The order of records in the received array is not guaranteed diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs index dc905f82aea8..672d25e08aa7 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs @@ -12,7 +12,7 @@ public abstract class GenericDataModelConformanceTests(GenericDataModelFix [ConditionalFact] public async Task GetAsyncThrowsArgumentNullExceptionForNullKey() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync(default!)); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync((TKey)default!)); Assert.Equal("key", ex.ParamName); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs index 5a3d0d0081ea..c2a63b86cae8 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs @@ -12,7 +12,7 @@ public class RecordConformanceTests(SimpleModelFixture fixture) wher [ConditionalFact] public async Task GetAsyncThrowsArgumentNullExceptionForNullKey() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync(default!)); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync((TKey)default!)); Assert.Equal("key", ex.ParamName); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs index 9ae5703056f2..6bbc59c59e42 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -45,7 +45,7 @@ public override async Task InitializeAsync() protected virtual async Task SeedAsync() { // TODO: UpsertBatchAsync returns IAsyncEnumerable (to support server-generated keys?), but this makes it quite hard to use: - await foreach (var _ in this.Collection.UpsertBatchAsync(this.TestData)) + await foreach (var _ in this.Collection.UpsertAsync(this.TestData)) { } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 16c6a5f46c7b..ee23bb92dc9c 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -101,7 +101,7 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM try { - await collection.UpsertBatchAsync(insertedRecords).ToArrayAsync(); + await collection.UpsertAsync(insertedRecords).ToArrayAsync(); var searchResult = await collection.VectorizedSearchAsync(baseVector); var results = await searchResult.Results.ToListAsync(); From a312b5946dd2a9bc5ce74ce399693751b4e4fce9 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Thu, 3 Apr 2025 21:32:49 +0200 Subject: [PATCH 27/63] .Net: Introduce new record model (#11264) Closes #11140 --- dotnet/Directory.Packages.props | 1 + dotnet/docs/EXPERIMENTS.md | 2 + ...zureAISearchGenericDataModelMapperTests.cs | 144 ++-- ...VectorStoreCollectionCreateMappingTests.cs | 94 +- ...VectorStoreCollectionSearchMappingTests.cs | 77 -- ...ISearchVectorStoreRecordCollectionTests.cs | 8 +- .../Connectors.AzureAISearch.UnitTests.csproj | 1 + ...VectorStoreCollectionSearchMappingTests.cs | 40 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 +- ...osmosDBNoSQLGenericDataModelMapperTests.cs | 106 +-- ...LVectorStoreCollectionQueryBuilderTests.cs | 104 ++- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- ...smosDBNoSQLVectorStoreRecordMapperTests.cs | 23 +- ...ectors.AzureCosmosDBNoSQL.UnitTests.csproj | 1 + .../AzureAISearchConstants.cs | 51 ++ .../AzureAISearchFilterTranslator.cs | 10 +- .../AzureAISearchGenericDataModelMapper.cs | 131 ++- ...earchVectorStoreCollectionCreateMapping.cs | 56 +- ...earchVectorStoreCollectionSearchMapping.cs | 22 +- ...zureAISearchVectorStoreRecordCollection.cs | 193 ++--- .../AzureCosmosDBMongoDBFilterTranslator.cs | 10 +- ...ngoDBVectorStoreCollectionCreateMapping.cs | 37 +- ...ngoDBVectorStoreCollectionSearchMapping.cs | 20 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 56 +- ...zureCosmosDBNoSQLGenericDataModelMapper.cs | 122 ++- ...BNoSQLVectorStoreCollectionQueryBuilder.cs | 43 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 234 ++--- ...ureCosmosDBNoSQLVectorStoreRecordMapper.cs | 40 +- .../AzureCosmosDBNoSqlFilterTranslator.cs | 10 +- ...ureCosmosDBNoSqlVectorStoreModelBuilder.cs | 50 ++ .../SqlFilterTranslator.cs | 13 +- .../InMemoryVectorStoreRecordCollection.cs | 146 +--- ...emoryVectorStoreRecordCollectionOptions.cs | 3 + .../MongoDBFilterTranslator.cs | 10 +- ...ngoDBVectorStoreCollectionCreateMapping.cs | 42 +- ...ngoDBVectorStoreCollectionSearchMapping.cs | 18 +- .../MongoDBVectorStoreRecordCollection.cs | 106 +-- .../PineconeFilterTranslator.cs | 10 +- .../PineconeGenericDataModelMapper.cs | 104 --- ...econeVectorStoreCollectionSearchMapping.cs | 9 +- .../PineconeVectorStoreRecordCollection.cs | 48 +- .../PineconeVectorStoreRecordFieldMapping.cs | 21 +- .../PineconeVectorStoreRecordMapper.cs | 63 +- ...PostgresVectorStoreCollectionSqlBuilder.cs | 17 +- .../IPostgresVectorStoreDbClient.cs | 17 +- .../PostgresConstants.cs | 98 ++- .../PostgresFilterTranslator.cs | 6 +- .../PostgresGenericDataModelMapper.cs | 104 --- .../PostgresVectorStore.cs | 5 - ...PostgresVectorStoreCollectionSqlBuilder.cs | 126 +-- .../PostgresVectorStoreDbClient.cs | 38 +- .../PostgresVectorStoreRecordCollection.cs | 65 +- .../PostgresVectorStoreRecordMapper.cs | 98 +-- ...ostgresVectorStoreRecordPropertyMapping.cs | 112 +-- .../QdrantFilterTranslator.cs | 10 +- .../QdrantGenericDataModelMapper.cs | 217 ----- ...drantVectorStoreCollectionCreateMapping.cs | 32 +- ...drantVectorStoreCollectionSearchMapping.cs | 11 +- .../QdrantVectorStoreRecordCollection.cs | 113 +-- .../QdrantVectorStoreRecordFieldMapping.cs | 23 +- .../QdrantVectorStoreRecordMapper.cs | 134 ++- .../RedisFilterTranslator.cs | 10 +- .../RedisHashSetGenericDataModelMapper.cs | 2 +- ...RedisHashSetVectorStoreRecordCollection.cs | 106 +-- .../RedisHashSetVectorStoreRecordMapper.cs | 106 +-- .../RedisJsonGenericDataModelMapper.cs | 95 +- .../RedisJsonVectorStoreRecordCollection.cs | 68 +- .../RedisJsonVectorStoreRecordMapper.cs | 61 +- ...RedisVectorStoreCollectionCreateMapping.cs | 156 ++-- ...RedisVectorStoreCollectionSearchMapping.cs | 53 +- .../GenericRecordMapper.cs | 93 -- .../RecordMapper.cs | 64 +- .../SqlDataReaderDictionary.cs | 35 +- .../SqlServerCommandBuilder.cs | 153 ++-- .../SqlServerConstants.cs | 17 +- .../SqlServerFilterTranslator.cs | 5 +- .../SqlServerVectorStoreRecordCollection.cs | 86 +- .../SqliteConstants.cs | 24 +- .../SqliteFilterTranslator.cs | 5 +- .../SqliteGenericDataModelMapper.cs | 152 ---- ...liteVectorStoreCollectionCommandBuilder.cs | 4 +- .../SqliteVectorStoreRecordCollection.cs | 179 ++-- .../SqliteVectorStoreRecordMapper.cs | 75 +- .../SqliteVectorStoreRecordPropertyMapping.cs | 53 +- .../WeaviateFilterTranslator.cs | 10 +- .../WeaviateGenericDataModelMapper.cs | 70 +- .../WeaviateModelBuilder.cs | 49 ++ ...viateVectorStoreCollectionCreateMapping.cs | 26 +- .../WeaviateVectorStoreRecordCollection.cs | 121 +-- ...VectorStoreRecordCollectionQueryBuilder.cs | 58 +- .../WeaviateVectorStoreRecordMapper.cs | 67 +- .../Connectors.MongoDB.UnitTests.csproj | 1 + .../MongoDBGenericDataModelMapperTests.cs | 133 ++- ...VectorStoreCollectionSearchMappingTests.cs | 30 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 +- .../MongoDBVectorStoreRecordMapperTests.cs | 2 +- .../PineconeGenericDataModelMapperTests.cs | 341 -------- ...ineconeVectorStoreRecordCollectionTests.cs | 6 +- .../Connectors.Postgres.UnitTests.csproj | 1 + .../PostgresGenericDataModelMapperTests.cs | 190 ---- ...resVectorStoreCollectionSqlBuilderTests.cs | 13 +- ...ostgresVectorStoreRecordCollectionTests.cs | 2 +- .../PostgresVectorStoreRecordMapperTests.cs | 42 +- ...esVectorStoreRecordPropertyMappingTests.cs | 31 +- .../Connectors.Qdrant.UnitTests.csproj | 1 + .../QdrantGenericDataModelMapperTests.cs | 405 --------- ...VectorStoreCollectionCreateMappingTests.cs | 33 +- ...VectorStoreCollectionSearchMappingTests.cs | 24 +- .../QdrantVectorStoreRecordCollectionTests.cs | 6 +- .../QdrantVectorStoreRecordMapperTests.cs | 42 +- .../Connectors.Redis.UnitTests.csproj | 1 + ...HashSetVectorStoreRecordCollectionTests.cs | 6 +- ...edisHashSetVectorStoreRecordMapperTests.cs | 11 +- .../RedisJsonGenericDataModelMapperTests.cs | 71 +- ...disJsonVectorStoreRecordCollectionTests.cs | 26 - .../RedisJsonVectorStoreRecordMapperTests.cs | 13 +- ...VectorStoreCollectionCreateMappingTests.cs | 68 +- ...VectorStoreCollectionSearchMappingTests.cs | 72 +- .../Connectors.Sqlite.UnitTests.csproj | 1 + .../SqliteGenericDataModelMapperTests.cs | 189 ---- .../SqliteVectorStoreRecordMapperTests.cs | 28 +- ...teVectorStoreRecordPropertyMappingTests.cs | 23 +- .../Connectors.Weaviate.UnitTests.csproj | 1 + .../WeaviateGenericDataModelMapperTests.cs | 305 +++---- ...VectorStoreCollectionCreateMappingTests.cs | 123 +-- ...rStoreRecordCollectionQueryBuilderTests.cs | 62 +- ...eaviateVectorStoreRecordCollectionTests.cs | 2 +- .../WeaviateVectorStoreRecordMapperTests.cs | 45 +- dotnet/src/Connectors/Directory.Build.props | 9 + .../VectorStoreRecordDataPropertyModel.cs | 76 ++ .../VectorStoreRecordJsonModelBuilder.cs | 92 ++ .../VectorStoreRecordKeyPropertyModel.cs | 55 ++ .../VectorStoreRecordModel.cs | 241 ++++++ .../VectorStoreRecordModelBuilder.cs | 502 +++++++++++ .../VectorStoreRecordModelBuildingOptions.cs | 61 ++ .../VectorStoreRecordPropertyModel.cs | 97 +++ .../VectorStoreRecordVectorPropertyModel.cs | 87 ++ .../VectorData.Abstractions.csproj | 20 +- .../VectorStoreGenericDataModel.cs | 22 +- .../AzureCosmosDBNoSQLVectorStoreFixture.cs | 10 +- ...eaviateVectorStoreRecordCollectionTests.cs | 6 +- dotnet/src/IntegrationTests/README.md | 2 +- .../Memory/MongoDB/MongoDBConstants.cs | 7 - .../MongoDB/MongoDBGenericDataModelMapper.cs | 102 +-- .../Memory/MongoDB/MongoDBModelBuilder.cs | 42 + .../MongoDB/MongoDBVectorStoreRecordMapper.cs | 28 +- .../src/Data/VectorStoreRecordMapping.cs | 78 -- .../Data/VectorStoreRecordPropertyReader.cs | 806 ----------------- .../VectorStoreRecordPropertyReaderOptions.cs | 33 - .../VectorStoreRecordPropertyVerification.cs | 56 -- .../Diagnostics/CompilerServicesAttributes.cs | 35 + .../SemanticKernel.Core.csproj | 2 - .../Data/VectorStoreRecordMappingTests.cs | 97 --- .../VectorStoreRecordPropertyReaderTests.cs | 814 ------------------ ...torStoreRecordPropertyVerificationTests.cs | 170 ---- .../Support/AzureAISearchTestStore.cs | 15 + .../SqlServerCommandBuilderTests.cs | 120 ++- .../SqlServerIntegrationTests.csproj | 2 + 158 files changed, 4027 insertions(+), 7524 deletions(-) delete mode 100644 dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionSearchMappingTests.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresGenericDataModelMapper.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteGenericDataModelMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs delete mode 100644 dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresGenericDataModelMapperTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteGenericDataModelMapperTests.cs create mode 100644 dotnet/src/Connectors/Directory.Build.props create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs create mode 100644 dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBModelBuilder.cs delete mode 100644 dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs delete mode 100644 dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs delete mode 100644 dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index 2c75d33b385e..380d18f96e24 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -81,6 +81,7 @@ + diff --git a/dotnet/docs/EXPERIMENTS.md b/dotnet/docs/EXPERIMENTS.md index 99fd9b56afb4..1e143695e0f0 100644 --- a/dotnet/docs/EXPERIMENTS.md +++ b/dotnet/docs/EXPERIMENTS.md @@ -25,6 +25,8 @@ You can use the following diagnostic IDs to ignore warnings or errors for a part | SKEXP0100 | Advanced Semantic Kernel features | | SKEXP0110 | Semantic Kernel Agents | | SKEXP0120 | Native-AOT | +| MEVD9000 | Microsoft.Extensions.VectorData experimental user-facing APIs | +| MEVD9001 | Microsoft.Extensions.VectorData experimental connector-facing APIs | ## Experimental Features Tracking diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs index 8326be0dd639..d1840740da54 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureAISearch; using Xunit; @@ -15,39 +16,36 @@ namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; /// public class AzureAISearchGenericDataModelMapperTests { - private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), - new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; - - private static readonly float[] s_vector1 = new float[] { 1.0f, 2.0f, 3.0f }; - private static readonly float[] s_vector2 = new float[] { 4.0f, 5.0f, 6.0f }; - private static readonly string[] s_taglist = new string[] { "tag1", "tag2" }; + private static readonly VectorStoreRecordModel s_model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + ]); + + private static readonly float[] s_vector1 = [1.0f, 2.0f, 3.0f]; + private static readonly float[] s_vector2 = [4.0f, 5.0f, 6.0f]; + private static readonly string[] s_taglist = ["tag1", "tag2"]; [Fact] public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(s_model); var dataModel = new VectorStoreGenericDataModel("key") { Data = @@ -101,16 +99,13 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() public void MapFromDataToStorageModelMapsNullValues() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + ]); var dataModel = new VectorStoreGenericDataModel("key") { @@ -125,7 +120,7 @@ public void MapFromDataToStorageModelMapsNullValues() }, }; - var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -140,7 +135,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(s_model); var storageModel = new JsonObject(); storageModel["Key"] = "key"; storageModel["StringDataProp"] = "string"; @@ -187,16 +182,13 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() public void MapFromStorageToDataModelMapsNullValues() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + ]); var storageModel = new JsonObject(); storageModel["Key"] = "key"; @@ -204,7 +196,7 @@ public void MapFromStorageToDataModelMapsNullValues() storageModel["NullableIntDataProp"] = null; storageModel["NullableFloatVector"] = null; - var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -220,7 +212,15 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + ]); + + var sut = new AzureAISearchGenericDataModelMapper(model); var storageModel = new JsonObject(); // Act @@ -234,18 +234,15 @@ public void MapFromStorageToDataModelThrowsForMissingKey() public void MapFromDataToStorageModelSkipsMissingProperties() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + ]); var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -260,20 +257,17 @@ public void MapFromDataToStorageModelSkipsMissingProperties() public void MapFromStorageToDataModelSkipsMissingProperties() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + ]); var storageModel = new JsonObject(); storageModel["Key"] = "key"; - var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new AzureAISearchGenericDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -283,4 +277,10 @@ public void MapFromStorageToDataModelSkipsMissingProperties() Assert.False(dataModel.Data.ContainsKey("StringDataProp")); Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); } + + private static VectorStoreRecordModel BuildModel(List properties) + => new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) + .Build( + typeof(VectorStoreGenericDataModel), + new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs index 24bed31f87ed..b2adb694bf58 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using Azure.Search.Documents.Indexes.Models; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureAISearch; using Xunit; @@ -18,15 +19,14 @@ public class AzureAISearchVectorStoreCollectionCreateMappingTests public void MapKeyFieldCreatesSearchableField() { // Arrange - var keyProperty = new VectorStoreRecordKeyProperty("testkey", typeof(string)); - var storagePropertyName = "test_key"; + var keyProperty = new VectorStoreRecordKeyPropertyModel("testkey", typeof(string)) { StorageName = "test_key" }; // Act - var result = AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField(keyProperty, storagePropertyName); + var result = AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField(keyProperty); // Assert Assert.NotNull(result); - Assert.Equal(storagePropertyName, result.Name); + Assert.Equal("test_key", result.Name); Assert.True(result.IsKey); Assert.True(result.IsFilterable); } @@ -37,16 +37,19 @@ public void MapKeyFieldCreatesSearchableField() public void MapFilterableStringDataFieldCreatesSimpleField(bool isFilterable) { // Arrange - var dataProperty = new VectorStoreRecordDataProperty("testdata", typeof(string)) { IsFilterable = isFilterable }; - var storagePropertyName = "test_data"; + var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(string)) + { + IsFilterable = isFilterable, + StorageName = "test_data" + }; // Act - var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty, storagePropertyName); + var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty); // Assert Assert.NotNull(result); Assert.IsType(result); - Assert.Equal(storagePropertyName, result.Name); + Assert.Equal("test_data", result.Name); Assert.False(result.IsKey); Assert.Equal(isFilterable, result.IsFilterable); } @@ -57,16 +60,20 @@ public void MapFilterableStringDataFieldCreatesSimpleField(bool isFilterable) public void MapFullTextSearchableStringDataFieldCreatesSearchableField(bool isFilterable) { // Arrange - var dataProperty = new VectorStoreRecordDataProperty("testdata", typeof(string)) { IsFilterable = isFilterable, IsFullTextSearchable = true }; - var storagePropertyName = "test_data"; + var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(string)) + { + IsFilterable = isFilterable, + IsFullTextSearchable = true, + StorageName = "test_data" + }; // Act - var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty, storagePropertyName); + var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty); // Assert Assert.NotNull(result); Assert.IsType(result); - Assert.Equal(storagePropertyName, result.Name); + Assert.Equal("test_data", result.Name); Assert.False(result.IsKey); Assert.Equal(isFilterable, result.IsFilterable); } @@ -75,11 +82,14 @@ public void MapFullTextSearchableStringDataFieldCreatesSearchableField(bool isFi public void MapFullTextSearchableStringDataFieldThrowsForInvalidType() { // Arrange - var dataProperty = new VectorStoreRecordDataProperty("testdata", typeof(int)) { IsFullTextSearchable = true }; - var storagePropertyName = "test_data"; + var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(int)) + { + IsFullTextSearchable = true, + StorageName = "test_data" + }; // Act & Assert - Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty, storagePropertyName)); + Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty)); } [Theory] @@ -88,16 +98,19 @@ public void MapFullTextSearchableStringDataFieldThrowsForInvalidType() public void MapDataFieldCreatesSimpleField(bool isFilterable) { // Arrange - var dataProperty = new VectorStoreRecordDataProperty("testdata", typeof(int)) { IsFilterable = isFilterable }; - var storagePropertyName = "test_data"; + var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(int)) + { + IsFilterable = isFilterable, + StorageName = "test_data" + }; // Act - var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty, storagePropertyName); + var result = AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty); // Assert Assert.NotNull(result); Assert.IsType(result); - Assert.Equal(storagePropertyName, result.Name); + Assert.Equal("test_data", result.Name); Assert.Equal(SearchFieldDataType.Int32, result.Type); Assert.False(result.IsKey); Assert.Equal(isFilterable, result.IsFilterable); @@ -107,17 +120,22 @@ public void MapDataFieldCreatesSimpleField(bool isFilterable) public void MapVectorFieldCreatesVectorSearchField() { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 10, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.DotProductSimilarity }; - var storagePropertyName = "test_vector"; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) + { + Dimensions = 10, + IndexKind = IndexKind.Flat, + DistanceFunction = DistanceFunction.DotProductSimilarity, + StorageName = "test_vector" + }; // Act - var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty, storagePropertyName); + var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty); // Assert Assert.NotNull(vectorSearchField); Assert.NotNull(algorithmConfiguration); Assert.NotNull(vectorSearchProfile); - Assert.Equal(storagePropertyName, vectorSearchField.Name); + Assert.Equal("test_vector", vectorSearchField.Name); Assert.Equal(vectorProperty.Dimensions, vectorSearchField.VectorSearchDimensions); Assert.Equal("test_vectorAlgoConfig", algorithmConfiguration.Name); @@ -135,11 +153,16 @@ public void MapVectorFieldCreatesVectorSearchField() public void MapVectorFieldCreatesExpectedAlgoConfigTypes(string indexKind, Type algoConfigType) { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 10, IndexKind = indexKind, DistanceFunction = DistanceFunction.DotProductSimilarity }; - var storagePropertyName = "test_vector"; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) + { + Dimensions = 10, + IndexKind = indexKind, + DistanceFunction = DistanceFunction.DotProductSimilarity, + StorageName = "test_vector" + }; // Act - var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty, storagePropertyName); + var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty); // Assert Assert.Equal("test_vectorAlgoConfig", algorithmConfiguration.Name); @@ -150,11 +173,10 @@ public void MapVectorFieldCreatesExpectedAlgoConfigTypes(string indexKind, Type public void MapVectorFieldDefaultsToHsnwAndCosine() { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 10 }; - var storagePropertyName = "test_vector"; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = 10 }; // Act - var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty, storagePropertyName); + var (vectorSearchField, algorithmConfiguration, vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty); // Assert Assert.IsType(algorithmConfiguration); @@ -166,22 +188,24 @@ public void MapVectorFieldDefaultsToHsnwAndCosine() public void MapVectorFieldThrowsForUnsupportedDistanceFunction() { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 10, DistanceFunction = DistanceFunction.ManhattanDistance }; - var storagePropertyName = "test_vector"; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) + { + Dimensions = 10, + DistanceFunction = DistanceFunction.ManhattanDistance, + }; // Act & Assert - Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty, storagePropertyName)); + Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty)); } [Fact] public void MapVectorFieldThrowsForMissingDimensionsCount() { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)); - var storagePropertyName = "test_vector"; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)); // Act & Assert - Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty, storagePropertyName)); + Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty)); } [Theory] diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionSearchMappingTests.cs deleted file mode 100644 index 13216b9ec8be..000000000000 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionSearchMappingTests.cs +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.AzureAISearch; -using Xunit; - -namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; - -#pragma warning disable CS0618 // VectorSearchFilter is obsolete - -/// -/// Contains tests for the class. -/// -public class AzureAISearchVectorStoreCollectionSearchMappingTests -{ - [Theory] - [MemberData(nameof(DataTypeMappingOptions))] - public void BuildFilterStringBuildsCorrectEqualityStringForEachFilterType(string fieldName, object? fieldValue, string expected) - { - // Arrange. - var filter = new VectorSearchFilter().EqualTo(fieldName, fieldValue!); - - // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { fieldName, "storage_" + fieldName } }); - - // Assert. - Assert.Equal(expected, actual); - } - - [Fact] - public void BuildFilterStringBuildsCorrectTagContainsString() - { - // Arrange. - var filter = new VectorSearchFilter().AnyTagEqualTo("Tags", "mytag"); - - // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { "Tags", "storage_tags" } }); - - // Assert. - Assert.Equal("storage_tags/any(t: t eq 'mytag')", actual); - } - - [Fact] - public void BuildFilterStringCombinesFilterOptions() - { - // Arrange. - var filter = new VectorSearchFilter().EqualTo("intField", 5).AnyTagEqualTo("Tags", "mytag"); - - // Act. - var actual = AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(filter, new Dictionary { { "Tags", "storage_tags" }, { "intField", "storage_intField" } }); - - // Assert. - Assert.Equal("storage_intField eq 5 and storage_tags/any(t: t eq 'mytag')", actual); - } - - [Fact] - public void BuildFilterStringThrowsForUnknownPropertyName() - { - // Act and assert. - Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(new VectorSearchFilter().EqualTo("unknown", "value"), new Dictionary())); - Assert.Throws(() => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(new VectorSearchFilter().AnyTagEqualTo("unknown", "value"), new Dictionary())); - } - - public static IEnumerable DataTypeMappingOptions() - { - yield return new object[] { "stringField", "value", "storage_stringField eq 'value'" }; - yield return new object[] { "boolField", true, "storage_boolField eq true" }; - yield return new object[] { "intField", 5, "storage_intField eq 5" }; - yield return new object[] { "longField", 5L, "storage_longField eq 5" }; - yield return new object[] { "floatField", 5.5f, "storage_floatField eq 5.5" }; - yield return new object[] { "doubleField", 5.5d, "storage_doubleField eq 5.5" }; - yield return new object[] { "dateTimeOffSetField", new DateTimeOffset(2000, 10, 20, 5, 55, 55, TimeSpan.Zero), "storage_dateTimeOffSetField eq 2000-10-20T05:55:55.0000000Z" }; - yield return new object[] { "nullField", null!, "storage_nullField eq null" }; - } -} diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index da69f4590c47..37545e0c67e0 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -208,7 +208,7 @@ public async Task CanGetRecordWithoutVectorsAsync(bool useDefinition, bool useCu // Arrange. var storageObject = JsonSerializer.SerializeToNode(CreateModel(TestRecordKey1, false))!.AsObject(); - var expectedSelectFields = useCustomJsonSerializerOptions ? new[] { "key", "storage_data1", "data2" } : new[] { "Key", "storage_data1", "Data2" }; + string[] expectedSelectFields = useCustomJsonSerializerOptions ? ["key", "storage_data1", "data2"] : ["Key", "storage_data1", "Data2"]; this._searchClientMock.Setup( x => x.GetDocumentAsync( TestRecordKey1, @@ -538,9 +538,9 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { Properties = new List { - new VectorStoreRecordKeyProperty("Id", typeof(string)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data1", typeof(string)), + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4 }, } }; diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj index 8583008891e7..b4d5908dbed9 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs index 9dee844e61d2..0b9330c668fc 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs @@ -3,7 +3,9 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using Xunit; @@ -16,11 +18,17 @@ namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; /// public sealed class AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests { - private readonly Dictionary _storagePropertyNames = new() - { - ["Property1"] = "property_1", - ["Property2"] = "property_2", - }; + private readonly VectorStoreRecordModel _model = new MongoDBModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Property1", typeof(string)) { StoragePropertyName = "property_1" }, + new VectorStoreRecordDataProperty("Property2", typeof(string)) { StoragePropertyName = "property_2" } + ] + }); [Fact] public void BuildFilterWithNullVectorSearchFilterReturnsNull() @@ -29,7 +37,7 @@ public void BuildFilterWithNullVectorSearchFilterReturnsNull() VectorSearchFilter? vectorSearchFilter = null; // Act - var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); + var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model); // Assert Assert.Null(filter); @@ -42,7 +50,7 @@ public void BuildFilterWithoutFilterClausesReturnsNull() VectorSearchFilter vectorSearchFilter = new(); // Act - var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); + var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model); // Assert Assert.Null(filter); @@ -55,7 +63,7 @@ public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() var vectorSearchFilter = new VectorSearchFilter().AnyTagEqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model)); } [Fact] @@ -65,7 +73,7 @@ public void BuildFilterThrowsExceptionWithNonExistentPropertyName() var vectorSearchFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model)); } [Fact] @@ -77,19 +85,25 @@ public void BuildFilterThrowsExceptionWithMultipleFilterClausesOfSameType() .EqualTo("Property1", "TestValue2"); // Act & Assert - Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model)); } [Fact] public void BuilderFilterByDefaultReturnsValidFilter() { // Arrange - var expectedFilter = new BsonDocument() { ["property_1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; + var expectedFilter = new BsonDocument() { ["Property1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; var vectorSearchFilter = new VectorSearchFilter().EqualTo("Property1", "TestValue1"); // Act - var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._storagePropertyNames); + var filter = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(vectorSearchFilter, this._model); - Assert.Equal(filter.ToJson(), expectedFilter.ToJson()); + Assert.Equal(expectedFilter.ToJson(), filter.ToJson()); } + + private static VectorStoreRecordModel BuildModel(List properties) + => new MongoDBModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 30c380c0a9fa..840acf4a3c78 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -37,7 +37,7 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); + var exception = Assert.Throws(() => new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs index cbfc8e57f131..800fee716c53 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs @@ -6,6 +6,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; using Xunit; @@ -18,42 +19,40 @@ public sealed class AzureCosmosDBNoSQLGenericDataModelMapperTests { private static readonly JsonSerializerOptions s_jsonSerializerOptions = JsonSerializerOptions.Default; - private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), - new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), -#if NET5_0_OR_GREATER - new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?)), -#endif - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableByteVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?)), - }, - }; - - private static readonly Dictionary s_storagePropertyNames = - s_vectorStoreRecordDefinition.Properties.ToDictionary( - k => k.DataModelPropertyName, - v => v is VectorStoreRecordKeyProperty ? "id" : v.DataModelPropertyName); + private static readonly VectorStoreRecordModel s_model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), + #if NET5_0_OR_GREATER + new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?)), + #endif + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableByteVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?)), + }, + }); #if NET5_0_OR_GREATER private static readonly Half[] s_halfVector = [(Half)1.0f, (Half)2.0f, (Half)3.0f]; @@ -67,10 +66,7 @@ public sealed class AzureCosmosDBNoSQLGenericDataModelMapperTests public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); var dataModel = new VectorStoreGenericDataModel("key") { @@ -165,10 +161,7 @@ public void MapFromDataToStorageModelMapsNullValues() }, }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -183,10 +176,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -271,10 +261,7 @@ public void MapFromStorageToDataModelMapsNullValues() ["NullableFloatVector"] = null }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -290,10 +277,7 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); var storageModel = new JsonObject(); @@ -317,10 +301,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() }; var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -350,10 +331,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ["id"] = "key" }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( - s_vectorStoreRecordDefinition.Properties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index 55d062441674..14f15a8ee264 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -2,8 +2,8 @@ using System; using System.Collections.Generic; -using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; using Xunit; @@ -18,12 +18,18 @@ public sealed class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests { private const string ScorePropertyName = "TestScore"; - private readonly Dictionary _storagePropertyNames = new() - { - ["TestProperty1"] = "test_property_1", - ["TestProperty2"] = "test_property_2", - ["TestProperty3"] = "test_property_3", - }; + private readonly VectorStoreRecordModel _model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordVectorProperty("TestProperty1", typeof(string)) { StoragePropertyName = "test_property_1" }, + new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) { StoragePropertyName = "test_property_2" }, + new VectorStoreRecordDataProperty("TestProperty3", typeof(string)) { StoragePropertyName = "test_property_3" } + ] + }); [Fact] public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() @@ -31,7 +37,6 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() // Arrange var vector = new ReadOnlyMemory([1f, 2f, 3f]); var vectorPropertyName = "test_property_1"; - var fields = this._storagePropertyNames.Values.ToList(); var filter = new VectorSearchFilter() .EqualTo("TestProperty2", "test-value-2") @@ -41,23 +46,23 @@ public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, keywords: null, - fields, - this._storagePropertyNames, + this._model, vectorPropertyName, textPropertyName: null, ScorePropertyName, oldFilter: filter, filter: null, 10, - 5); + 5, + includeVectors: true); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); // Assert - Assert.Contains("SELECT x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); + Assert.Contains("SELECT x.id,x.TestProperty1,x.TestProperty2,x.TestProperty3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); Assert.Contains("FROM x", queryText); - Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); + Assert.Contains("WHERE x.TestProperty2 = @cv0 AND ARRAY_CONTAINS(x.TestProperty3, @cv1)", queryText); Assert.Contains("ORDER BY VectorDistance(x.test_property_1, @vector)", queryText); Assert.Contains("OFFSET 5 LIMIT 10", queryText); @@ -77,7 +82,6 @@ public void BuildSearchQueryWithoutOffsetReturnsQueryDefinitionWithTopParameter( // Arrange var vector = new ReadOnlyMemory([1f, 2f, 3f]); var vectorPropertyName = "test_property_1"; - var fields = this._storagePropertyNames.Values.ToList(); var filter = new VectorSearchFilter() .EqualTo("TestProperty2", "test-value-2") @@ -87,23 +91,23 @@ public void BuildSearchQueryWithoutOffsetReturnsQueryDefinitionWithTopParameter( var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, keywords: null, - fields, - this._storagePropertyNames, + this._model, vectorPropertyName, textPropertyName: null, ScorePropertyName, oldFilter: filter, filter: null, 10, - 0); + 0, + includeVectors: true); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); // Assert - Assert.Contains("SELECT TOP 10 x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); + Assert.Contains("SELECT TOP 10 x.id,x.TestProperty1,x.TestProperty2,x.TestProperty3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); Assert.Contains("FROM x", queryText); - Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); + Assert.Contains("WHERE x.TestProperty2 = @cv0 AND ARRAY_CONTAINS(x.TestProperty3, @cv1)", queryText); Assert.Contains("ORDER BY VectorDistance(x.test_property_1, @vector)", queryText); Assert.DoesNotContain("OFFSET 0 LIMIT 10", queryText); @@ -124,7 +128,6 @@ public void BuildSearchQueryWithInvalidFilterThrowsException() // Arrange var vector = new ReadOnlyMemory([1f, 2f, 3f]); var vectorPropertyName = "test_property_1"; - var fields = this._storagePropertyNames.Values.ToList(); var filter = new VectorSearchFilter().EqualTo("non-existent-property", "test-value-2"); @@ -133,15 +136,15 @@ public void BuildSearchQueryWithInvalidFilterThrowsException() AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, keywords: null, - fields, - this._storagePropertyNames, + this._model, vectorPropertyName, textPropertyName: null, ScorePropertyName, oldFilter: filter, filter: null, 10, - 5)); + 5, + includeVectors: true)); } [Fact] @@ -150,21 +153,20 @@ public void BuildSearchQueryWithoutFilterDoesNotContainWhereClause() // Arrange var vector = new ReadOnlyMemory([1f, 2f, 3f]); var vectorPropertyName = "test_property_1"; - var fields = this._storagePropertyNames.Values.ToList(); // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, keywords: null, - fields, - this._storagePropertyNames, + this._model, vectorPropertyName, textPropertyName: null, ScorePropertyName, oldFilter: null, filter: null, 10, - 5); + 5, + includeVectors: true); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); @@ -182,23 +184,34 @@ public void BuildSelectQueryByDefaultReturnsValidQueryDefinition() { // Arrange const string ExpectedQueryText = """ - SELECT x.key,x.property_1,x.property_2 + SELECT x.id,x.TestProperty1,x.TestProperty2 FROM x - WHERE (x.key_property = @rk0 AND x.partition_key_property = @pk0) + WHERE (x.id = @rk0 AND x.TestProperty1 = @pk0) """; - const string KeyStoragePropertyName = "key_property"; - const string PartitionKeyPropertyName = "partition_key_property"; - - var keys = new List { new("key", "partition_key") }; - var fields = new List { "key", "property_1", "property_2" }; + const string KeyStoragePropertyName = "id"; + const string PartitionKeyPropertyName = "TestProperty1"; + + var model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("TestProperty1", typeof(string)), + new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) + ] + }); + var keys = new List { new("id", "TestProperty1") }; // Act var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSelectQuery( + model, KeyStoragePropertyName, PartitionKeyPropertyName, keys, - fields); + includeVectors: true); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); @@ -207,10 +220,10 @@ FROM x Assert.Equal(ExpectedQueryText, queryText); Assert.Equal("@rk0", queryParameters[0].Name); - Assert.Equal("key", queryParameters[0].Value); + Assert.Equal("id", queryParameters[0].Value); Assert.Equal("@pk0", queryParameters[1].Name); - Assert.Equal("partition_key", queryParameters[1].Value); + Assert.Equal("TestProperty1", queryParameters[1].Value); } [Fact] @@ -219,9 +232,8 @@ public void BuildSearchQueryWithHybridFieldsReturnsValidHybridQueryDefinition() // Arrange var vector = new ReadOnlyMemory([1f, 2f, 3f]); var keywordText = "hybrid"; - var vectorPropertyName = "test_property_1"; - var textPropertyName = "test_property_2"; - var fields = this._storagePropertyNames.Values.ToList(); + var vectorPropertyName = "TestProperty1"; + var textPropertyName = "TestProperty2"; var filter = new VectorSearchFilter() .EqualTo("TestProperty2", "test-value-2") @@ -231,24 +243,24 @@ public void BuildSearchQueryWithHybridFieldsReturnsValidHybridQueryDefinition() var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery, DummyType>( vector, [keywordText], - fields, - this._storagePropertyNames, + this._model, vectorPropertyName, textPropertyName, ScorePropertyName, oldFilter: filter, filter: null, 10, - 5); + 5, + includeVectors: true); var queryText = queryDefinition.QueryText; var queryParameters = queryDefinition.GetQueryParameters(); // Assert - Assert.Contains("SELECT x.test_property_1,x.test_property_2,x.test_property_3,VectorDistance(x.test_property_1, @vector) AS TestScore", queryText); + Assert.Contains("SELECT x.id,x.TestProperty1,x.TestProperty2,x.TestProperty3,VectorDistance(x.TestProperty1, @vector) AS TestScore", queryText); Assert.Contains("FROM x", queryText); - Assert.Contains("WHERE x.test_property_2 = @cv0 AND ARRAY_CONTAINS(x.test_property_3, @cv1)", queryText); - Assert.Contains("ORDER BY RANK RRF(VectorDistance(x.test_property_1, @vector), FullTextScore(x.test_property_2, [\"hybrid\"]))", queryText); + Assert.Contains("WHERE x.TestProperty2 = @cv0 AND ARRAY_CONTAINS(x.TestProperty3, @cv1)", queryText); + Assert.Contains("ORDER BY RANK RRF(VectorDistance(x.TestProperty1, @vector), FullTextScore(x.TestProperty2, [\"hybrid\"]))", queryText); Assert.Contains("OFFSET 5 LIMIT 10", queryText); Assert.Equal("@vector", queryParameters[0].Name); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 37c72f38ace1..ea45cc39b158 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -35,7 +35,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(this._mockDatabase.Object, "collection")); + var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(this._mockDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs index 9c2b7de29b41..3ecbfeccc4ce 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; using Xunit; @@ -15,20 +16,14 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; /// public sealed class AzureCosmosDBNoSQLVectorStoreRecordMapperTests { - private readonly AzureCosmosDBNoSQLVectorStoreRecordMapper _sut; - - public AzureCosmosDBNoSQLVectorStoreRecordMapperTests() - { - var storagePropertyNames = new Dictionary - { - ["HotelId"] = "HotelId", - ["HotelName"] = "HotelName", - ["Tags"] = "Tags", - ["DescriptionEmbedding"] = "description_embedding", - }; - - this._sut = new("HotelId", storagePropertyNames, JsonSerializerOptions.Default); - } + private readonly AzureCosmosDBNoSQLVectorStoreRecordMapper _sut + = new( + new VectorStoreRecordKeyPropertyModel("HotelId", typeof(string)) + { + StorageName = "id", + TemporaryStorageName = "HotelId" + }, + JsonSerializerOptions.Default); [Fact] public void MapFromDataToStorageModelReturnsValidObject() diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj index ff8643740f11..3ee93b149127 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs new file mode 100644 index 000000000000..75ceb3b8a632 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +internal static class AzureAISearchConstants +{ + /// A set of types that a key on the provided model may have. + private static readonly HashSet s_supportedKeyTypes = [typeof(string)]; + + /// A set of types that data properties on the provided model may have. + private static readonly HashSet s_supportedDataTypes = + [ + typeof(string), + typeof(int), + typeof(long), + typeof(double), + typeof(float), + typeof(bool), + typeof(DateTimeOffset) + ]; + + /// A set of types that vectors on the provided model may have. + /// + /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the + /// SDK yet. We will update this list as the SDK is updated. + /// + /// + private static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; + + internal static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = s_supportedKeyTypes, + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + UsesExternalSerializer = true + }; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs index 16164c2a3eca..c491d43bcb6f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs @@ -10,23 +10,24 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Text; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; internal class AzureAISearchFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; private readonly StringBuilder _filter = new(); private static readonly char[] s_searchInDefaultDelimiter = [' ', ',']; - internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { Debug.Assert(this._filter.Length == 0); - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -320,11 +321,12 @@ private bool TryGetField(Expression expression, [NotNullWhen(true)] out string? { if (expression is MemberExpression member && member.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out field)) + if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); } + field = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs index 502edaed2605..f63fdd32bc00 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs @@ -3,32 +3,20 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure AI Search. /// -internal class AzureAISearchGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +internal sealed class AzureAISearchGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, JsonObject> { - /// A that defines the schema of the data in the database. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; - - /// - /// Initializes a new instance of the class. - /// - /// A that defines the schema of the data in the database. - public AzureAISearchGenericDataModelMapper(VectorStoreRecordDefinition vectorStoreRecordDefinition) - { - Verify.NotNull(vectorStoreRecordDefinition); - - this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; - } - /// public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) { @@ -37,30 +25,32 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel var storageJsonObject = new JsonObject(); // Loop through all known properties and map each from the data model json to the storage json. - foreach (var property in this._vectorStoreRecordDefinition.Properties) + foreach (var property in model.Properties) { - if (property is VectorStoreRecordKeyProperty keyProperty) - { - var storagePropertyName = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; - storageJsonObject.Add(storagePropertyName, dataModel.Key); - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var dataValue)) - { - var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); - storageJsonObject.Add(storagePropertyName, serializedJsonNode); - } - } - else if (property is VectorStoreRecordVectorProperty vectorProperty) + switch (property) { - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.DataModelPropertyName, out var vectorValue)) - { - var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; - var serializedJsonNode = JsonSerializer.SerializeToNode(vectorValue); - storageJsonObject.Add(storagePropertyName, serializedJsonNode); - } + case VectorStoreRecordKeyPropertyModel keyProperty: + storageJsonObject.Add(keyProperty.StorageName, dataModel.Key); + continue; + + case VectorStoreRecordDataPropertyModel dataProperty: + if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.ModelName, out var dataValue)) + { + var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); + storageJsonObject.Add(dataProperty.ModelName, serializedJsonNode); + } + continue; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.ModelName, out var vectorValue)) + { + var serializedJsonNode = JsonSerializer.SerializeToNode(vectorValue); + storageJsonObject.Add(vectorProperty.StorageName, serializedJsonNode); + } + continue; + + default: + throw new UnreachableException(); } } @@ -78,53 +68,48 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject string? key = null; // Loop through all known properties and map each from json to the data type. - foreach (var property in this._vectorStoreRecordDefinition.Properties) + foreach (var property in model.Properties) { - if (property is VectorStoreRecordKeyProperty keyProperty) + switch (property) { - var storagePropertyName = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; - var value = storageModel[storagePropertyName]; - if (value is null) - { - throw new VectorStoreRecordMappingException($"The key property '{storagePropertyName}' is missing from the record retrieved from storage."); - } + case VectorStoreRecordKeyPropertyModel keyProperty: + key = (string?)storageModel[keyProperty.StorageName] + ?? throw new VectorStoreRecordMappingException($"The key property '{keyProperty.StorageName}' is missing from the record retrieved from storage."); - key = (string)value!; - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - if (!storageModel.TryGetPropertyValue(storagePropertyName, out var value)) - { continue; - } - if (value is not null) - { - dataProperties.Add(dataProperty.DataModelPropertyName, GetDataPropertyValue(property.PropertyType, value)); - } - else - { - dataProperties.Add(dataProperty.DataModelPropertyName, null); - } - } - else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) - { - var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; - if (!storageModel.TryGetPropertyValue(storagePropertyName, out var value)) + case VectorStoreRecordDataPropertyModel dataProperty: { + if (storageModel.TryGetPropertyValue(dataProperty.StorageName, out var value)) + { + dataProperties.Add(dataProperty.ModelName, value is null ? null : GetDataPropertyValue(property.Type, value)); + } continue; } - if (value is not null) - { - ReadOnlyMemory vector = value.AsArray().Select(x => (float)x!).ToArray(); - vectorProperties.Add(vectorProperty.DataModelPropertyName, vector); - } - else + case VectorStoreRecordVectorPropertyModel vectorProperty when options.IncludeVectors: { - vectorProperties.Add(vectorProperty.DataModelPropertyName, null); + if (storageModel.TryGetPropertyValue(vectorProperty.StorageName, out var value)) + { + if (value is not null) + { + ReadOnlyMemory vector = value.AsArray().Select(x => (float)x!).ToArray(); + vectorProperties.Add(vectorProperty.ModelName, vector); + } + else + { + vectorProperties.Add(vectorProperty.ModelName, null); + } + } + + continue; } + + case VectorStoreRecordVectorPropertyModel vectorProperty when !options.IncludeVectors: + break; + + default: + throw new UnreachableException(); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs index 93f95ca69c48..b7e2b90fbdf8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs @@ -6,6 +6,7 @@ using System.Linq; using Azure.Search.Documents.Indexes.Models; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -18,53 +19,50 @@ internal static class AzureAISearchVectorStoreCollectionCreateMapping /// Map from a to an Azure AI Search . /// /// The key property definition. - /// The name of the property in storage. /// The for the provided property definition. - public static SearchableField MapKeyField(VectorStoreRecordKeyProperty keyProperty, string storagePropertyName) + public static SearchableField MapKeyField(VectorStoreRecordKeyPropertyModel keyProperty) { - return new SearchableField(storagePropertyName) { IsKey = true, IsFilterable = true }; + return new SearchableField(keyProperty.StorageName) { IsKey = true, IsFilterable = true }; } /// /// Map from a to an Azure AI Search . /// /// The data property definition. - /// The name of the property in storage. /// The for the provided property definition. /// Throws when the definition is missing required information. - public static SimpleField MapDataField(VectorStoreRecordDataProperty dataProperty, string storagePropertyName) + public static SimpleField MapDataField(VectorStoreRecordDataPropertyModel dataProperty) { if (dataProperty.IsFullTextSearchable) { - if (dataProperty.PropertyType != typeof(string)) + if (dataProperty.Type != typeof(string)) { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.DataModelPropertyName}' is set to true, but the property type is not a string. The Azure AI Search VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Azure AI Search VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); } - return new SearchableField(storagePropertyName) { IsFilterable = dataProperty.IsFilterable }; + return new SearchableField(dataProperty.StorageName) { IsFilterable = dataProperty.IsFilterable }; } - return new SimpleField(storagePropertyName, AzureAISearchVectorStoreCollectionCreateMapping.GetSDKFieldDataType(dataProperty.PropertyType)) { IsFilterable = dataProperty.IsFilterable }; + return new SimpleField(dataProperty.StorageName, AzureAISearchVectorStoreCollectionCreateMapping.GetSDKFieldDataType(dataProperty.Type)) { IsFilterable = dataProperty.IsFilterable }; } /// /// Map form a to an Azure AI Search and generate the required index configuration. /// /// The vector property definition. - /// The name of the property in storage. /// The and required index configuration. /// Throws when the definition is missing required information, or unsupported options are configured. - public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) MapVectorField(VectorStoreRecordVectorProperty vectorProperty, string storagePropertyName) + public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) MapVectorField(VectorStoreRecordVectorPropertyModel vectorProperty) { if (vectorProperty.Dimensions is not > 0) { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); + throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); } // Build a name for the profile and algorithm configuration based on the property name // since we'll just create a separate one for each vector property. - var vectorSearchProfileName = $"{storagePropertyName}Profile"; - var algorithmConfigName = $"{storagePropertyName}AlgoConfig"; + var vectorSearchProfileName = $"{vectorProperty.StorageName}Profile"; + var algorithmConfigName = $"{vectorProperty.StorageName}AlgoConfig"; // Read the vector index settings from the property definition and create the right index configuration. var indexKind = AzureAISearchVectorStoreCollectionCreateMapping.GetSKIndexKind(vectorProperty); @@ -74,11 +72,11 @@ public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfigu { IndexKind.Hnsw => new HnswAlgorithmConfiguration(algorithmConfigName) { Parameters = new HnswParameters { Metric = algorithmMetric } }, IndexKind.Flat => new ExhaustiveKnnAlgorithmConfiguration(algorithmConfigName) { Parameters = new ExhaustiveKnnParameters { Metric = algorithmMetric } }, - _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Azure AI Search VectorStore.") + _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Azure AI Search VectorStore.") }; var vectorSearchProfile = new VectorSearchProfile(vectorSearchProfileName, algorithmConfigName); - return (new VectorSearchField(storagePropertyName, vectorProperty.Dimensions.Value, vectorSearchProfileName), algorithmConfiguration, vectorSearchProfile); + return (new VectorSearchField(vectorProperty.StorageName, vectorProperty.Dimensions.Value, vectorSearchProfileName), algorithmConfiguration, vectorSearchProfile); } /// @@ -87,15 +85,8 @@ public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfigu /// /// The vector property definition. /// The configured or default . - public static string GetSKIndexKind(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.IndexKind is null) - { - return IndexKind.Hnsw; - } - - return vectorProperty.IndexKind; - } + public static string GetSKIndexKind(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.IndexKind ?? IndexKind.Hnsw; /// /// Get the configured from the given . @@ -104,21 +95,14 @@ public static string GetSKIndexKind(VectorStoreRecordVectorProperty vectorProper /// The vector property definition. /// The chosen . /// Thrown if a distance function is chosen that isn't supported by Azure AI Search. - public static VectorSearchAlgorithmMetric GetSDKDistanceAlgorithm(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.DistanceFunction is null) + public static VectorSearchAlgorithmMetric GetSDKDistanceAlgorithm(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.DistanceFunction switch { - return VectorSearchAlgorithmMetric.Cosine; - } - - return vectorProperty.DistanceFunction switch - { - DistanceFunction.CosineSimilarity => VectorSearchAlgorithmMetric.Cosine, + DistanceFunction.CosineSimilarity or null => VectorSearchAlgorithmMetric.Cosine, DistanceFunction.DotProductSimilarity => VectorSearchAlgorithmMetric.DotProduct, DistanceFunction.EuclideanDistance => VectorSearchAlgorithmMetric.Euclidean, - _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Azure AI Search VectorStore.") + _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Azure AI Search VectorStore.") }; - } /// /// Maps the given property type to the corresponding . diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs index 732b6aeae42c..4e9240f98bb1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionSearchMapping.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -17,10 +17,10 @@ internal static class AzureAISearchVectorStoreCollectionSearchMapping /// Build an OData filter string from the provided . /// /// The to build an OData filter string from. - /// A mapping of data model property names to the names under which they are stored. + /// The model. /// The OData filter string. /// Thrown when a provided filter value is not supported. - public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) + public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearchFilter, VectorStoreRecordModel model) { var filterString = string.Empty; if (basicVectorSearchFilter.FilterClauses is not null) @@ -28,7 +28,7 @@ public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearc // Map Equality clauses. var filterStrings = basicVectorSearchFilter?.FilterClauses.OfType().Select(x => { - string storageFieldName = GetStoragePropertyName(storagePropertyNames, x.FieldName); + string storageFieldName = GetStoragePropertyName(model, x.FieldName); return x.Value switch { @@ -49,11 +49,7 @@ public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearc // Map tag contains clauses. var tagListContainsStrings = basicVectorSearchFilter?.FilterClauses .OfType() - .Select(x => - { - string storageFieldName = GetStoragePropertyName(storagePropertyNames, x.FieldName); - return $"{storageFieldName}/any(t: t eq '{x.Value}')"; - }); + .Select(x => $"{GetStoragePropertyName(model, x.FieldName)}/any(t: t eq '{x.Value}')"); // Combine clauses. filterString = string.Join(" and ", filterStrings!.Concat(tagListContainsStrings!)); @@ -66,17 +62,17 @@ public static string BuildLegacyFilterString(VectorSearchFilter basicVectorSearc /// /// Gets the name of the name under which the property with the given name is stored. /// - /// A mapping of data model property names to the names under which they are stored. + /// The model. /// The name of the property in the data model. /// The name that the property os stored under. /// Thrown when the property name is not found. - private static string GetStoragePropertyName(IReadOnlyDictionary storagePropertyNames, string fieldName) + private static string GetStoragePropertyName(VectorStoreRecordModel model, string fieldName) { - if (!storagePropertyNames.TryGetValue(fieldName, out var storageFieldName)) + if (!model.PropertyMap.TryGetValue(fieldName, out var property)) { throw new InvalidOperationException($"Property name '{fieldName}' provided as part of the filter clause is not a valid property name."); } - return storageFieldName; + return property.StorageName; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index c6796e3627a2..82d3bcc5fe2a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -2,10 +2,10 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; -using System.Text.Json; using System.Text.Json.Nodes; using System.Threading; using System.Threading.Tasks; @@ -15,6 +15,7 @@ using Azure.Search.Documents.Indexes.Models; using Azure.Search.Documents.Models; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -32,42 +33,6 @@ public class AzureAISearchVectorStoreRecordCollection : /// The name of this database for telemetry purposes. private const string DatabaseName = "AzureAISearch"; - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(string) - ]; - - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(int), - typeof(long), - typeof(double), - typeof(float), - typeof(bool), - typeof(DateTimeOffset), - typeof(int?), - typeof(long?), - typeof(double?), - typeof(float?), - typeof(bool?), - typeof(DateTimeOffset?), - ]; - - /// A set of types that vectors on the provided model may have. - /// - /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the - /// SDK yet. We will update this list as the SDK is updated. - /// - /// - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -89,8 +54,8 @@ public class AzureAISearchVectorStoreRecordCollection : /// A mapper to use for converting between the data model and the Azure AI Search record. private readonly IVectorStoreRecordMapper? _mapper; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// /// Initializes a new instance of the class. @@ -105,29 +70,15 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli // Verify. Verify.NotNull(searchIndexClient); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonObjectCustomMapper is not null, s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._searchIndexClient = searchIndexClient; this._collectionName = collectionName; this._options = options ?? new AzureAISearchVectorStoreRecordCollectionOptions(); this._searchClient = this._searchIndexClient.GetSearchClient(collectionName); - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - JsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default - }); - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); - this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + this._model = new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.JsonSerializerOptions); // Resolve mapper. // First, if someone has provided a custom mapper, use that. @@ -139,7 +90,7 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli } else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { - this._mapper = new AzureAISearchGenericDataModelMapper(this._propertyReader.RecordDefinition) as IVectorStoreRecordMapper; + this._mapper = new AzureAISearchGenericDataModelMapper(this._model) as IVectorStoreRecordMapper; } } @@ -176,35 +127,29 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = var searchFields = new List(); // Loop through all properties and create the search fields. - foreach (var property in this._propertyReader.Properties) + foreach (var property in this._model.Properties) { - // Key property. - if (property is VectorStoreRecordKeyProperty keyProperty) + switch (property) { - searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField( - keyProperty, - this._propertyReader.KeyPropertyJsonName)); - } + case VectorStoreRecordKeyPropertyModel p: + searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField(p)); + break; - // Data property. - if (property is VectorStoreRecordDataProperty dataProperty) - { - searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapDataField( - dataProperty, - this._propertyReader.GetJsonPropertyName(dataProperty.DataModelPropertyName))); - } + case VectorStoreRecordDataPropertyModel p: + searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(p)); + break; - // Vector property. - if (property is VectorStoreRecordVectorProperty vectorProperty) - { - (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField( - vectorProperty, - this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName)); - - // Add the search field, plus its profile and algorithm configuration to the search config. - searchFields.Add(vectorSearchField); - vectorSearchConfig.Algorithms.Add(algorithmConfiguration); - vectorSearchConfig.Profiles.Add(vectorSearchProfile); + case VectorStoreRecordVectorPropertyModel p: + (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(p); + + // Add the search field, plus its profile and algorithm configuration to the search config. + searchFields.Add(vectorSearchField); + vectorSearchConfig.Algorithms.Add(algorithmConfiguration); + vectorSearchConfig.Profiles.Add(vectorSearchProfile); + break; + + default: + throw new UnreachableException(); } } @@ -286,7 +231,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken // Remove record. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._propertyReader.KeyPropertyJsonName, [key], new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, [key], new IndexDocumentsOptions(), cancellationToken)); } /// @@ -301,7 +246,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc // Remove records. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._propertyReader.KeyPropertyJsonName, keys, new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, keys, new IndexDocumentsOptions(), cancellationToken)); } /// @@ -344,20 +289,21 @@ public virtual Task> VectorizedSearchAsync // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); - var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); + var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); // Configure search settings. - var vectorQueries = new List(); - vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); + var vectorQueries = new List + { + new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. var filter = internalOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), - { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -381,8 +327,12 @@ public virtual Task> VectorizedSearchAsync // Filter out vector fields if requested. if (!internalOptions.IncludeVectors) { - searchOptions.Select.Add(this._propertyReader.KeyPropertyJsonName); - searchOptions.Select.AddRange(this._propertyReader.DataPropertyJsonNames); + searchOptions.Select.Add(this._model.KeyProperty.StorageName); + + foreach (var dataProperty in this._model.DataProperties) + { + searchOptions.Select.Add(dataProperty.StorageName); + } } return this.SearchAndMapToDataModelAsync(null, searchOptions, internalOptions.IncludeVectors, cancellationToken); @@ -393,27 +343,28 @@ public virtual Task> VectorizableTextSearchAsync(st { Verify.NotNull(searchText); - if (this._propertyReader.FirstVectorPropertyName is null) + if (this._model.VectorProperties.Count == 0) { throw new InvalidOperationException("The collection does not have any vector fields, so vector search is not possible."); } // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); - var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty!.DataModelPropertyName); + var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); // Configure search settings. - var vectorQueries = new List(); - vectorQueries.Add(new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); + var vectorQueries = new List + { + new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. var filter = internalOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), - { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -437,8 +388,12 @@ public virtual Task> VectorizableTextSearchAsync(st // Filter out vector fields if requested. if (!internalOptions.IncludeVectors) { - searchOptions.Select.Add(this._propertyReader.KeyPropertyJsonName); - searchOptions.Select.AddRange(this._propertyReader.DataPropertyJsonNames); + searchOptions.Select.Add(this._model.KeyProperty.StorageName); + + foreach (var dataProperty in this._model.DataProperties) + { + searchOptions.Select.Add(dataProperty.StorageName); + } } return this.SearchAndMapToDataModelAsync(null, searchOptions, internalOptions.IncludeVectors, cancellationToken); @@ -452,22 +407,22 @@ public Task> HybridSearchAsync(TVector vec // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); - var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); - var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); // Configure search settings. - var vectorQueries = new List(); - vectorQueries.Add(new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorPropertyName } }); + var vectorQueries = new List + { + new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. var filter = internalOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._propertyReader.JsonPropertyNamesMap), - { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), + { Filter: Expression> newFilter } => new AzureAISearchFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -482,13 +437,17 @@ public Task> HybridSearchAsync(TVector vec IncludeTotalCount = internalOptions.IncludeTotalCount, }; searchOptions.VectorSearch.Queries.AddRange(vectorQueries); - searchOptions.SearchFields.Add(textDataPropertyName); + searchOptions.SearchFields.Add(textDataProperty.StorageName); // Filter out vector fields if requested. if (!internalOptions.IncludeVectors) { - searchOptions.Select.Add(this._propertyReader.KeyPropertyJsonName); - searchOptions.Select.AddRange(this._propertyReader.DataPropertyJsonNames); + searchOptions.Select.Add(this._model.KeyProperty.StorageName); + + foreach (var dataProperty in this._model.DataProperties) + { + searchOptions.Select.Add(dataProperty.StorageName); + } } var keywordsCombined = string.Join(" ", keywords); @@ -554,7 +513,7 @@ private async Task> SearchAndMapToDataModelAsync( const string OperationName = "Search"; // Execute search and map using the user provided mapper. - if (this._options.JsonObjectCustomMapper is not null) + if (this._mapper is not null) { var jsonObjectResults = await this.RunOperationAsync( OperationName, @@ -619,7 +578,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn DatabaseName, this._collectionName, operationName, - () => this._options.JsonObjectCustomMapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); + () => this._mapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); yield return new VectorSearchResult(document, result.Score); } } @@ -647,8 +606,12 @@ private GetDocumentOptions ConvertGetDocumentOptions(GetRecordOptions? options) var innerOptions = new GetDocumentOptions(); if (options?.IncludeVectors is not true) { - innerOptions.SelectedFields.AddRange(this._propertyReader.KeyPropertyJsonNames); - innerOptions.SelectedFields.AddRange(this._propertyReader.DataPropertyJsonNames); + innerOptions.SelectedFields.Add(this._model.KeyProperty.StorageName); + + foreach (var dataProperty in this._model.DataProperties) + { + innerOptions.SelectedFields.Add(dataProperty.StorageName); + } } return innerOptions; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs index 6c0b4e44e23b..ade730443c81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs @@ -9,6 +9,7 @@ using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -17,12 +18,12 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; // Information specific to vector search pre-filter: https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter internal class AzureCosmosDBMongoDBFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; - internal BsonDocument Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal BsonDocument Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -223,11 +224,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs index 8bd883163870..c1c8f1b5d02e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; @@ -16,13 +17,11 @@ internal static class AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping /// Returns an array of indexes to create for vector properties. /// /// Collection of vector properties for index creation. - /// A dictionary that maps from a property name to the storage name. /// Collection of unique existing indexes to avoid creating duplicates. /// Number of clusters that the inverted file (IVF) index uses to group the vector data. /// The size of the dynamic candidate list for constructing the graph. public static BsonArray GetVectorIndexes( - IReadOnlyList vectorProperties, - Dictionary storagePropertyNames, + IReadOnlyList vectorProperties, HashSet uniqueIndexes, int numLists, int efConstruction) @@ -32,9 +31,10 @@ public static BsonArray GetVectorIndexes( // Create separate index for each vector property foreach (var property in vectorProperties) { + var storageName = property.StorageName; + // Use index name same as vector property name with underscore - var vectorPropertyName = storagePropertyNames[property.DataModelPropertyName]; - var indexName = $"{vectorPropertyName}_"; + var indexName = $"{storageName}_"; // If index already exists, proceed to the next vector property if (uniqueIndexes.Contains(indexName)) @@ -45,9 +45,9 @@ public static BsonArray GetVectorIndexes( // Otherwise, create a new index var searchOptions = new BsonDocument { - { "kind", GetIndexKind(property.IndexKind, vectorPropertyName) }, + { "kind", GetIndexKind(property.IndexKind, storageName) }, { "numLists", numLists }, - { "similarity", GetDistanceFunction(property.DistanceFunction, vectorPropertyName) }, + { "similarity", GetDistanceFunction(property.DistanceFunction, storageName) }, { "dimensions", property.Dimensions }, { "efConstruction", efConstruction } }; @@ -55,7 +55,7 @@ public static BsonArray GetVectorIndexes( var indexDocument = new BsonDocument { ["name"] = indexName, - ["key"] = new BsonDocument { [vectorPropertyName] = "cosmosSearch" }, + ["key"] = new BsonDocument { [storageName] = "cosmosSearch" }, ["cosmosSearchOptions"] = searchOptions }; @@ -69,11 +69,9 @@ public static BsonArray GetVectorIndexes( /// Returns an array of indexes to create for filterable data properties. /// /// Collection of data properties for index creation. - /// A dictionary that maps from a property name to the storage name. /// Collection of unique existing indexes to avoid creating duplicates. public static BsonArray GetFilterableDataIndexes( - IReadOnlyList dataProperties, - Dictionary storagePropertyNames, + IReadOnlyList dataProperties, HashSet uniqueIndexes) { var indexArray = new BsonArray(); @@ -84,8 +82,7 @@ public static BsonArray GetFilterableDataIndexes( if (property.IsFilterable) { // Use index name same as data property name with underscore - var dataPropertyName = storagePropertyNames[property.DataModelPropertyName]; - var indexName = $"{dataPropertyName}_"; + var indexName = $"{property.StorageName}_"; // If index already exists, proceed to the next data property if (uniqueIndexes.Contains(indexName)) @@ -97,7 +94,7 @@ public static BsonArray GetFilterableDataIndexes( var indexDocument = new BsonDocument { ["name"] = indexName, - ["key"] = new BsonDocument { [dataPropertyName] = 1 } + ["key"] = new BsonDocument { [property.StorageName] = 1 } }; indexArray.Add(indexDocument); @@ -111,30 +108,22 @@ public static BsonArray GetFilterableDataIndexes( /// More information about Azure CosmosDB for MongoDB index kinds here: . /// private static string GetIndexKind(string? indexKind, string vectorPropertyName) - { - var vectorPropertyIndexKind = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyIndexKind(indexKind); - - return vectorPropertyIndexKind switch + => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyIndexKind(indexKind) switch { IndexKind.Hnsw => "vector-hnsw", IndexKind.IvfFlat => "vector-ivf", _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB for MongoDB VectorStore.") }; - } /// /// More information about Azure CosmosDB for MongoDB distance functions here: . /// private static string GetDistanceFunction(string? distanceFunction, string vectorPropertyName) - { - var vectorPropertyDistanceFunction = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyDistanceFunction(distanceFunction); - - return vectorPropertyDistanceFunction switch + => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyDistanceFunction(distanceFunction) switch { DistanceFunction.CosineDistance => "COS", DistanceFunction.DotProductSimilarity => "IP", DistanceFunction.EuclideanDistance => "L2", _ => throw new InvalidOperationException($"Distance function '{distanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB for MongoDB VectorStore.") }; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs index 32377244112c..a78ce746f736 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; @@ -25,12 +25,10 @@ internal static class AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping /// Build Azure CosmosDB MongoDB filter from the provided . /// /// The to build Azure CosmosDB MongoDB filter from. - /// A dictionary that maps from a property name to the storage name. + /// The model. /// Thrown when the provided filter type is unsupported. /// Thrown when property name specified in filter doesn't exist. - public static BsonDocument? BuildFilter( - VectorSearchFilter? vectorSearchFilter, - Dictionary storagePropertyNames) + public static BsonDocument? BuildFilter(VectorSearchFilter? vectorSearchFilter, VectorStoreRecordModel model) { const string EqualOperator = "$eq"; @@ -63,25 +61,27 @@ internal static class AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping nameof(EqualToFilterClause)])}"); } - if (!storagePropertyNames.TryGetValue(propertyName, out var storagePropertyName)) + if (!model.PropertyMap.TryGetValue(propertyName, out var property)) { throw new InvalidOperationException($"Property name '{propertyName}' provided as part of the filter clause is not a valid property name."); } - if (filter.Contains(storagePropertyName)) + var storageName = property.StorageName; + + if (filter.Contains(storageName)) { - if (filter[storagePropertyName] is BsonDocument document && document.Contains(filterOperator)) + if (filter[storageName] is BsonDocument document && document.Contains(filterOperator)) { throw new NotSupportedException( $"Filter with operator '{filterOperator}' is already added to '{propertyName}' property. " + "Multiple filters of the same type in the same property are not supported."); } - filter[storagePropertyName][filterOperator] = propertyValue; + filter[storageName][filterOperator] = propertyValue; } else { - filter[storagePropertyName] = new BsonDocument() { [filterOperator] = propertyValue }; + filter[storageName] = new BsonDocument() { [filterOperator] = propertyValue }; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index b45c9213a789..5c1d705f0f1c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -9,6 +9,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; @@ -49,14 +50,8 @@ public class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorS /// Interface for mapping between a storage model, and the consumer record data model. private readonly IVectorStoreRecordMapper _mapper; - /// A dictionary that maps from a property name to the storage name that should be used when serializing it for data and vector properties. - private readonly Dictionary _storagePropertyNames; - - /// Collection of vector storage property names. - private readonly List _vectorStoragePropertyNames; - - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// public string CollectionName { get; } @@ -75,23 +70,13 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( // Verify. Verify.NotNull(mongoDatabase); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.BsonDocumentCustomMapper is not null, MongoDBConstants.SupportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._mongoDatabase = mongoDatabase; this._mongoCollection = mongoDatabase.GetCollection(collectionName); this.CollectionName = collectionName; this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); - - this._storagePropertyNames = GetStoragePropertyNames(this._propertyReader.Properties, typeof(TRecord)); - - // Use Mongo reserved key property name as storage key property name - this._storagePropertyNames[this._propertyReader.KeyPropertyName] = MongoDBConstants.MongoReservedKeyPropertyName; - - this._vectorStoragePropertyNames = this._propertyReader.VectorProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); - + this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = this.InitializeMapper(); } @@ -272,17 +257,14 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter( - legacyFilter, - this._storagePropertyNames), - { Filter: Expression> newFilter } => new AzureCosmosDBMongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(legacyFilter, this._model), + { Filter: Expression> newFilter } => new AzureCosmosDBMongoDBFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -297,17 +279,17 @@ public virtual async Task> VectorizedSearchAsync AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetSearchQueryForHnswIndex( vectorArray, - vectorPropertyName, + vectorProperty.StorageName, itemsAmount, this._options.EfSearch, filter), IndexKind.IvfFlat => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetSearchQueryForIvfIndex( vectorArray, - vectorPropertyName, + vectorProperty.StorageName, itemsAmount, filter), _ => throw new InvalidOperationException( - $"Index kind '{vectorProperty.IndexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB for MongoDB VectorStore. " + + $"Index kind '{vectorProperty.IndexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.StorageName}' is not supported by the Azure CosmosDB for MongoDB VectorStore. " + $"Supported index kinds are: {string.Join(", ", [IndexKind.Hnsw, IndexKind.IvfFlat])}") }; @@ -335,15 +317,13 @@ private async Task CreateIndexesAsync(string collectionName, CancellationToken c var indexArray = new BsonArray(); indexArray.AddRange(AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.GetVectorIndexes( - this._propertyReader.VectorProperties, - this._storagePropertyNames, + this._model.VectorProperties, uniqueIndexes, this._options.NumLists, this._options.EfConstruction)); indexArray.AddRange(AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.GetFilterableDataIndexes( - this._propertyReader.DataProperties, - this._storagePropertyNames, + this._model.DataProperties, uniqueIndexes)); if (indexArray.Count > 0) @@ -365,13 +345,13 @@ private async Task> FindAsync(FilterDefinition 0) + if (!includeVectors && this._model.VectorProperties.Count > 0) { - foreach (var vectorPropertyName in this._vectorStoragePropertyNames) + foreach (var vectorProperty in this._model.VectorProperties) { projectionDefinition = projectionDefinition is not null ? - projectionDefinition.Exclude(vectorPropertyName) : - projectionBuilder.Exclude(vectorPropertyName); + projectionDefinition.Exclude(vectorProperty.StorageName) : + projectionBuilder.Exclude(vectorProperty.StorageName); } } @@ -505,10 +485,10 @@ private IVectorStoreRecordMapper InitializeMapper() if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { - return (new MongoDBGenericDataModelMapper(this._propertyReader.RecordDefinition) as IVectorStoreRecordMapper)!; + return (new MongoDBGenericDataModelMapper(this._model) as IVectorStoreRecordMapper)!; } - return new MongoDBVectorStoreRecordMapper(this._propertyReader); + return new MongoDBVectorStoreRecordMapper(this._model); } #endregion diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs index 2a52c2604a4d..b98177845ac2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs @@ -1,16 +1,19 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Diagnostics; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB NoSQL. /// -internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) + : IVectorStoreRecordMapper, JsonObject> { /// A default for serialization/deserialization of vector properties. private static readonly JsonSerializerOptions s_vectorJsonSerializerOptions = new() @@ -18,27 +21,6 @@ internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper : IVectorStoreRec Converters = { new AzureCosmosDBNoSQLReadOnlyMemoryByteConverter() } }; - /// A for serialization/deserialization of data properties - private readonly JsonSerializerOptions _jsonSerializerOptions; - - /// The list of properties from the record definition. - private readonly IReadOnlyList _properties; - - /// A dictionary that maps from a property name to the storage name. - public readonly Dictionary _storagePropertyNames; - - public AzureCosmosDBNoSQLGenericDataModelMapper( - IReadOnlyList properties, - Dictionary storagePropertyNames, - JsonSerializerOptions jsonSerializerOptions) - { - Verify.NotNull(properties); - - this._properties = properties; - this._storagePropertyNames = storagePropertyNames; - this._jsonSerializerOptions = jsonSerializerOptions; - } - public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) { Verify.NotNull(dataModel); @@ -46,31 +28,34 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel var jsonObject = new JsonObject(); // Loop through all known properties and map each from the data model to the storage model. - foreach (var property in this._properties) + foreach (var property in model.Properties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - - if (property is VectorStoreRecordKeyProperty keyProperty) + switch (property) { - jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = dataModel.Key; - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var dataValue)) - { - jsonObject[storagePropertyName] = dataValue is not null ? - JsonSerializer.SerializeToNode(dataValue, property.PropertyType, this._jsonSerializerOptions) : - null; - } - } - else if (property is VectorStoreRecordVectorProperty vectorProperty) - { - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.DataModelPropertyName, out var vectorValue)) - { - jsonObject[storagePropertyName] = vectorValue is not null ? - JsonSerializer.SerializeToNode(vectorValue, property.PropertyType, s_vectorJsonSerializerOptions) : - null; - } + case VectorStoreRecordKeyPropertyModel keyProperty: + jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = dataModel.Key; + break; + + case VectorStoreRecordDataPropertyModel dataProperty: + if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.StorageName, out var dataValue)) + { + jsonObject[dataProperty.StorageName] = dataValue is not null ? + JsonSerializer.SerializeToNode(dataValue, property.Type, jsonSerializerOptions) : + null; + } + break; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.StorageName, out var vectorValue)) + { + jsonObject[vectorProperty.StorageName] = vectorValue is not null ? + JsonSerializer.SerializeToNode(vectorValue, property.Type, s_vectorJsonSerializerOptions) : + null; + } + break; + + default: + throw new UnreachableException(); } } @@ -87,30 +72,33 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject var vectorProperties = new Dictionary(); // Loop through all known properties and map each from the storage model to the data model. - foreach (var property in this._properties) + foreach (var property in model.Properties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - - if (property is VectorStoreRecordKeyProperty keyProperty) - { - if (storageModel.TryGetPropertyValue(AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, out var keyValue)) - { - key = keyValue?.GetValue(); - } - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - if (storageModel.TryGetPropertyValue(storagePropertyName, out var dataValue)) - { - dataProperties.Add(property.DataModelPropertyName, dataValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); - } - } - else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) + switch (property) { - if (storageModel.TryGetPropertyValue(storagePropertyName, out var vectorValue)) - { - vectorProperties.Add(property.DataModelPropertyName, vectorValue.Deserialize(property.PropertyType, s_vectorJsonSerializerOptions)); - } + case VectorStoreRecordKeyPropertyModel keyProperty: + if (storageModel.TryGetPropertyValue(AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, out var keyValue)) + { + key = keyValue?.GetValue(); + } + break; + + case VectorStoreRecordDataPropertyModel dataProperty: + if (storageModel.TryGetPropertyValue(dataProperty.StorageName, out var dataValue)) + { + dataProperties.Add(property.ModelName, dataValue.Deserialize(property.Type, jsonSerializerOptions)); + } + break; + + case VectorStoreRecordVectorPropertyModel vectorProperty when options.IncludeVectors: + if (options.IncludeVectors && storageModel.TryGetPropertyValue(vectorProperty.StorageName, out var vectorValue)) + { + vectorProperties.Add(property.ModelName, vectorValue.Deserialize(property.Type, s_vectorJsonSerializerOptions)); + } + break; + + default: + throw new UnreachableException(); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index d3ae19517db5..beea8cf3f368 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -7,6 +7,7 @@ using System.Text; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -25,8 +26,7 @@ internal static class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder public static QueryDefinition BuildSearchQuery( TVector vector, ICollection? keywords, - List fields, - Dictionary storagePropertyNames, + VectorStoreRecordModel model, string vectorPropertyName, string? textPropertyName, string scorePropertyName, @@ -35,7 +35,8 @@ public static QueryDefinition BuildSearchQuery( #pragma warning restore CS0618 // Type or member is obsolete Expression>? filter, int top, - int skip) + int skip, + bool includeVectors) { Verify.NotNull(vector); @@ -45,7 +46,12 @@ public static QueryDefinition BuildSearchQuery( var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; - var fieldsArgument = fields.Select(field => $"{tableVariableName}.{field}"); + IEnumerable projectionProperties = model.Properties; + if (!includeVectors) + { + projectionProperties = projectionProperties.Where(p => p is not VectorStoreRecordVectorPropertyModel); + } + var fieldsArgument = projectionProperties.Select(p => $"{tableVariableName}.{p.StorageName}"); var vectorDistanceArgument = $"VectorDistance({tableVariableName}.{vectorPropertyName}, {VectorVariableName})"; var vectorDistanceArgumentWithAlias = $"{vectorDistanceArgument} AS {scorePropertyName}"; @@ -63,8 +69,8 @@ public static QueryDefinition BuildSearchQuery( var (whereClause, filterParameters) = (OldFilter: oldFilter, Filter: filter) switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, storagePropertyNames), - { Filter: Expression> newFilter } => new AzureCosmosDBNoSqlFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => BuildSearchFilter(legacyFilter, model), + { Filter: Expression> newFilter } => new AzureCosmosDBNoSqlFilterTranslator().Translate(newFilter, model), _ => (null, []) }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete @@ -123,10 +129,11 @@ public static QueryDefinition BuildSearchQuery( /// Builds to get items from Azure CosmosDB NoSQL. /// public static QueryDefinition BuildSelectQuery( + VectorStoreRecordModel model, string keyStoragePropertyName, string partitionKeyStoragePropertyName, List keys, - List fields) + bool includeVectors) { Verify.True(keys.Count > 0, "At least one key should be provided.", nameof(keys)); @@ -135,8 +142,14 @@ public static QueryDefinition BuildSelectQuery( var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; - var selectClauseArguments = string.Join(SelectClauseDelimiter, - fields.Select(field => $"{tableVariableName}.{field}")); + IEnumerable projectionProperties = model.Properties; + if (!includeVectors) + { + projectionProperties = projectionProperties.Where(p => p is not VectorStoreRecordVectorPropertyModel); + } + var fields = projectionProperties.Select(field => field.StorageName); + + var selectClauseArguments = string.Join(SelectClauseDelimiter, fields.Select(field => $"{tableVariableName}.{field}")); var whereClauseArguments = string.Join(OrConditionDelimiter, keys.Select((key, index) => @@ -171,7 +184,7 @@ public static QueryDefinition BuildSelectQuery( #pragma warning disable CS0618 // VectorSearchFilter is obsolete private static (string WhereClause, Dictionary Parameters) BuildSearchFilter( VectorSearchFilter filter, - Dictionary storagePropertyNames) + VectorStoreRecordModel model) { const string EqualOperator = "="; const string ArrayContainsOperator = "ARRAY_CONTAINS"; @@ -197,13 +210,13 @@ private static (string WhereClause, Dictionary Parameters) Buil if (filterClause is EqualToFilterClause equalToFilterClause) { - var propertyName = GetStoragePropertyName(equalToFilterClause.FieldName, storagePropertyNames); + var propertyName = GetStoragePropertyName(equalToFilterClause.FieldName, model); whereClauseBuilder.Append($"{tableVariableName}.{propertyName} {EqualOperator} {queryParameterName}"); queryParameterValue = equalToFilterClause.Value; } else if (filterClause is AnyTagEqualToFilterClause anyTagEqualToFilterClause) { - var propertyName = GetStoragePropertyName(anyTagEqualToFilterClause.FieldName, storagePropertyNames); + var propertyName = GetStoragePropertyName(anyTagEqualToFilterClause.FieldName, model); whereClauseBuilder.Append($"{ArrayContainsOperator}({tableVariableName}.{propertyName}, {queryParameterName})"); queryParameterValue = anyTagEqualToFilterClause.Value; } @@ -223,14 +236,14 @@ private static (string WhereClause, Dictionary Parameters) Buil } #pragma warning restore CS0618 // VectorSearchFilter is obsolete - private static string GetStoragePropertyName(string propertyName, Dictionary storagePropertyNames) + private static string GetStoragePropertyName(string propertyName, VectorStoreRecordModel model) { - if (!storagePropertyNames.TryGetValue(propertyName, out var storagePropertyName)) + if (!model.PropertyMap.TryGetValue(propertyName, out var property)) { throw new InvalidOperationException($"Property name '{propertyName}' provided as part of the filter clause is not a valid property name."); } - return storagePropertyName; + return property.StorageName; } #endregion diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index b6e0200a6137..c291f84c3aa4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -11,6 +11,7 @@ using System.Threading.Tasks; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction; using IndexKind = Microsoft.Extensions.VectorData.IndexKind; using SKDistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction; @@ -31,44 +32,6 @@ public class AzureCosmosDBNoSQLVectorStoreRecordCollection : /// The name of this database for telemetry purposes. private const string DatabaseName = "AzureCosmosDBNoSQL"; - /// A of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(string) - ]; - - /// A of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(bool), - typeof(bool?), - typeof(string), - typeof(int), - typeof(int?), - typeof(long), - typeof(long?), - typeof(float), - typeof(float?), - typeof(double), - typeof(double?), - typeof(DateTimeOffset), - typeof(DateTimeOffset?), - ]; - - /// A of types that vector properties on the provided model may have, based on enumeration. - private static readonly HashSet s_supportedVectorTypes = - [ - // Float32 - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - // Uint8 - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - // Int8 - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -81,23 +44,12 @@ public class AzureCosmosDBNoSQLVectorStoreRecordCollection : /// Optional configuration options for this class. private readonly AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// The storage names of all non vector fields on the current model. - private readonly List _nonVectorStoragePropertyNames = []; - - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _storagePropertyNames = []; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; - /// The storage name of the key field for the collections that this class is used with. - private readonly string _keyStoragePropertyName; - - /// The property name to use as partition key. - private readonly string _partitionKeyPropertyName; - - /// The storage property name to use as partition key. - private readonly string _partitionKeyStoragePropertyName; + // TODO: Refactor this into the model (Co) + /// The property to use as partition key. + private readonly VectorStoreRecordPropertyModel _partitionKeyProperty; /// The mapper to use when mapping between the consumer data model and the Azure CosmosDB NoSQL record. private readonly IVectorStoreRecordMapper _mapper; @@ -119,51 +71,38 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( // Verify. Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonObjectCustomMapper is not null, s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._database = database; this.CollectionName = collectionName; this._options = options ?? new(); var jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - JsonSerializerOptions = jsonSerializerOptions - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); - this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); - - // Get storage names and store for later use. - this._storagePropertyNames = this._propertyReader.JsonPropertyNamesMap.ToDictionary(x => x.Key, x => x.Value); + this._model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, jsonSerializerOptions); // Assign mapper. this._mapper = this.InitializeMapper(jsonSerializerOptions); - // Use Azure CosmosDB NoSQL reserved key property name as storage key property name. - this._storagePropertyNames[this._propertyReader.KeyPropertyName] = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName; - this._keyStoragePropertyName = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName; - - // If partition key is not provided, use key property as a partition key. - this._partitionKeyPropertyName = !string.IsNullOrWhiteSpace(this._options.PartitionKeyPropertyName) ? - this._options.PartitionKeyPropertyName! : - this._propertyReader.KeyPropertyName; - - VerifyPartitionKeyProperty(this._partitionKeyPropertyName, this._propertyReader.Properties); + // Setup partition key property + if (this._options.PartitionKeyPropertyName is not null) + { + if (!this._model.PropertyMap.TryGetValue(this._options.PartitionKeyPropertyName, out var property)) + { + throw new ArgumentException($"Partition key property '{this._options.PartitionKeyPropertyName}' is not part of the record definition."); + } - this._partitionKeyStoragePropertyName = this._storagePropertyNames[this._partitionKeyPropertyName]; + if (property.Type != typeof(string)) + { + throw new ArgumentException("Partition key property must be string."); + } - this._nonVectorStoragePropertyNames = this._propertyReader.DataProperties - .Cast() - .Concat([this._propertyReader.KeyProperty]) - .Select(x => this._storagePropertyNames[x.DataModelPropertyName]) - .ToList(); + this._partitionKeyProperty = property; + } + else + { + // If partition key is not provided, use key property as a partition key. + this._partitionKeyProperty = this._model.KeyProperty; + } } /// @@ -368,24 +307,21 @@ public virtual Task> VectorizedSearchAsync this.VerifyVectorType(vector); var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); - var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - - var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); #pragma warning disable CS0618 // Type or member is obsolete var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( vector, null, - fields, - this._storagePropertyNames, - vectorPropertyName, + this._model, + vectorProperty.StorageName, null, ScorePropertyName, searchOptions.OldFilter, searchOptions.Filter, searchOptions.Top, - searchOptions.Skip); + searchOptions.Skip, + searchOptions.IncludeVectors); #pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); @@ -407,27 +343,22 @@ public Task> HybridSearchAsync(TVector vec this.VerifyVectorType(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); - var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - - var textProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); - var textPropertyName = this._storagePropertyNames[textProperty.DataModelPropertyName]; - - var fields = new List(searchOptions.IncludeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); + var textProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); #pragma warning disable CS0618 // Type or member is obsolete var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( vector, keywords, - fields, - this._storagePropertyNames, - vectorPropertyName, - textPropertyName, + this._model, + vectorProperty.StorageName, + textProperty.StorageName, ScorePropertyName, searchOptions.OldFilter, searchOptions.Filter, searchOptions.Top, - searchOptions.Skip); + searchOptions.Skip, + searchOptions.IncludeVectors); #pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); @@ -450,11 +381,11 @@ private void VerifyVectorType(TVector? vector) var vectorType = vector.GetType(); - if (!s_supportedVectorTypes.Contains(vectorType)) + if (!AzureCosmosDBNoSqlVectorStoreModelBuilder.s_supportedVectorTypes.Contains(vectorType)) { throw new NotSupportedException( $"The provided vector type {vectorType.FullName} is not supported by the Azure CosmosDB NoSQL connector. " + - $"Supported types are: {string.Join(", ", s_supportedVectorTypes.Select(l => l.FullName))}"); + $"Supported types are: {string.Join(", ", AzureCosmosDBNoSqlVectorStoreModelBuilder.s_supportedVectorTypes.Select(l => l.FullName))}"); } } @@ -475,22 +406,6 @@ private async Task RunOperationAsync(string operationName, Func> o } } - private static void VerifyPartitionKeyProperty(string partitionKeyPropertyName, IReadOnlyList properties) - { - var partitionKeyProperty = properties - .FirstOrDefault(l => l.DataModelPropertyName.Equals(partitionKeyPropertyName, StringComparison.Ordinal)); - - if (partitionKeyProperty is null) - { - throw new ArgumentException("Partition key property must be part of record definition."); - } - - if (partitionKeyProperty.PropertyType != typeof(string)) - { - throw new ArgumentException("Partition key property must be string."); - } - } - /// /// Returns instance of with applied indexing policy. /// More information here: . @@ -509,34 +424,32 @@ private ContainerProperties GetContainerProperties() if (this._options.IndexingMode == IndexingMode.None) { - return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyStoragePropertyName}") + return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") { IndexingPolicy = indexingPolicy }; } - foreach (var property in this._propertyReader.VectorProperties) + foreach (var property in this._model.VectorProperties) { - var vectorPropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - if (property.Dimensions is not > 0) { - throw new VectorStoreOperationException($"Property {nameof(property.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{property.DataModelPropertyName}' must be set to a positive integer to create a collection."); + throw new VectorStoreOperationException($"Property {nameof(property.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{property.ModelName}' must be set to a positive integer to create a collection."); } - var path = $"/{vectorPropertyName}"; + var path = $"/{property.StorageName}"; var embedding = new Embedding { - DataType = GetDataType(property.PropertyType, vectorPropertyName), + DataType = GetDataType(property.Type, property.StorageName), Dimensions = (int)property.Dimensions, - DistanceFunction = GetDistanceFunction(property.DistanceFunction, vectorPropertyName), + DistanceFunction = GetDistanceFunction(property.DistanceFunction, property.StorageName), Path = path }; var vectorIndexPath = new VectorIndexPath { - Type = GetIndexKind(property.IndexKind, vectorPropertyName), + Type = GetIndexKind(property.IndexKind, property.StorageName), Path = path }; @@ -550,17 +463,17 @@ private ContainerProperties GetContainerProperties() var vectorEmbeddingPolicy = new VectorEmbeddingPolicy(embeddings); // Process Data properties. - foreach (var property in this._propertyReader.DataProperties) + foreach (var property in this._model.DataProperties) { if (property.IsFilterable || property.IsFullTextSearchable) { - indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}/?" }); + indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = $"/{property.StorageName}/?" }); } if (property.IsFullTextSearchable) { - indexingPolicy.FullTextIndexes.Add(new FullTextIndexPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}" }); + indexingPolicy.FullTextIndexes.Add(new FullTextIndexPath { Path = $"/{property.StorageName}" }); // TODO: Switch to using language from a setting. - fullTextPolicy.FullTextPaths.Add(new FullTextPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}", Language = "en-US" }); + fullTextPolicy.FullTextPaths.Add(new FullTextPath { Path = $"/{property.StorageName}", Language = "en-US" }); } } @@ -573,7 +486,7 @@ private ContainerProperties GetContainerProperties() indexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = $"{vectorIndexPath.Path}/*" }); } - return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyStoragePropertyName}") + return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") { VectorEmbeddingPolicy = vectorEmbeddingPolicy, IndexingPolicy = indexingPolicy, @@ -585,21 +498,13 @@ private ContainerProperties GetContainerProperties() /// More information about Azure CosmosDB NoSQL index kinds here: . /// private static VectorIndexType GetIndexKind(string? indexKind, string vectorPropertyName) - { - if (string.IsNullOrWhiteSpace(indexKind)) - { - // Use default index kind. - return VectorIndexType.DiskANN; - } - - return indexKind switch + => indexKind switch { + IndexKind.DiskAnn or null => VectorIndexType.DiskANN, IndexKind.Flat => VectorIndexType.Flat, IndexKind.QuantizedFlat => VectorIndexType.QuantizedFlat, - IndexKind.DiskAnn => VectorIndexType.DiskANN, _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") }; - } /// /// More information about Azure CosmosDB NoSQL distance functions here: . @@ -625,15 +530,13 @@ private static DistanceFunction GetDistanceFunction(string? distanceFunction, st /// Returns based on vector property type. /// private static VectorDataType GetDataType(Type vectorDataType, string vectorPropertyName) - { - return vectorDataType switch + => vectorDataType switch { Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Float32, Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Uint8, Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Int8, _ => throw new InvalidOperationException($"Data type '{vectorDataType}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") }; - } private async IAsyncEnumerable InternalGetAsync( IEnumerable keys, @@ -645,12 +548,13 @@ private async IAsyncEnumerable InternalGetAsync( const string OperationName = "GetItemQueryIterator"; var includeVectors = options?.IncludeVectors ?? false; - var fields = new List(includeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSelectQuery( - this._keyStoragePropertyName, - this._partitionKeyStoragePropertyName, + this._model, + this._model.KeyProperty.StorageName, + this._partitionKeyProperty.StorageName, keys.ToList(), - fields); + includeVectors); await foreach (var jsonObject in this.GetItemsAsync(queryDefinition, cancellationToken).ConfigureAwait(false)) { @@ -676,17 +580,17 @@ private async Task InternalUpsertAsync( OperationName, () => this._mapper.MapFromDataToStorageModel(record)); - var keyValue = jsonObject.TryGetPropertyValue(this._keyStoragePropertyName, out var jsonKey) ? jsonKey?.ToString() : null; - var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyStoragePropertyName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; + var keyValue = jsonObject.TryGetPropertyValue(this._model.KeyProperty.StorageName!, out var jsonKey) ? jsonKey?.ToString() : null; + var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyProperty.StorageName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; if (string.IsNullOrWhiteSpace(keyValue)) { - throw new VectorStoreOperationException($"Key property {this._propertyReader.KeyPropertyName} is not initialized."); + throw new VectorStoreOperationException($"Key property {this._model.KeyProperty.ModelName} is not initialized."); } if (string.IsNullOrWhiteSpace(partitionKeyValue)) { - throw new VectorStoreOperationException($"Partition key property {this._partitionKeyPropertyName} is not initialized."); + throw new VectorStoreOperationException($"Partition key property {this._partitionKeyProperty.ModelName} is not initialized."); } await this.RunOperationAsync(OperationName, () => @@ -760,9 +664,6 @@ private async IAsyncEnumerable> MapSearchResultsAsyn } } - /// - /// Returns custom mapper, generic data model mapper or default record mapper. - /// private IVectorStoreRecordMapper InitializeMapper(JsonSerializerOptions jsonSerializerOptions) { if (this._options.JsonObjectCustomMapper is not null) @@ -772,14 +673,11 @@ private IVectorStoreRecordMapper InitializeMapper(JsonSeria if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { - var mapper = new AzureCosmosDBNoSQLGenericDataModelMapper(this._propertyReader.Properties, this._storagePropertyNames, jsonSerializerOptions); + var mapper = new AzureCosmosDBNoSQLGenericDataModelMapper(this._model, jsonSerializerOptions); return (mapper as IVectorStoreRecordMapper)!; } - return new AzureCosmosDBNoSQLVectorStoreRecordMapper( - this._storagePropertyNames[this._propertyReader.KeyPropertyName], - this._storagePropertyNames, - jsonSerializerOptions); + return new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model.KeyProperty, this._options.JsonSerializerOptions); } #endregion diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs index 4f6da286d51b..4c1ad9e7071f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -11,45 +11,29 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// Class for mapping between a json node stored in Azure CosmosDB NoSQL and the consumer data model. /// /// The consumer data model to map to or from. -internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper(VectorStoreRecordKeyPropertyModel keyProperty, JsonSerializerOptions? jsonSerializerOptions) + : IVectorStoreRecordMapper { - /// The JSON serializer options to use when converting between the data model and the Azure CosmosDB NoSQL record. - private readonly JsonSerializerOptions _jsonSerializerOptions; - - /// The storage property name of the key field of consumer data model. - private readonly string _keyStoragePropertyName; - - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _storagePropertyNames = []; - - public AzureCosmosDBNoSQLVectorStoreRecordMapper( - string keyStoragePropertyName, - Dictionary storagePropertyNames, - JsonSerializerOptions jsonSerializerOptions) - { - Verify.NotNull(jsonSerializerOptions); - - this._keyStoragePropertyName = keyStoragePropertyName; - this._storagePropertyNames = storagePropertyNames; - this._jsonSerializerOptions = jsonSerializerOptions; - } + private readonly VectorStoreRecordKeyPropertyModel _keyProperty = keyProperty; public JsonObject MapFromDataToStorageModel(TRecord dataModel) { - var jsonObject = JsonSerializer.SerializeToNode(dataModel, this._jsonSerializerOptions)!.AsObject(); + var jsonObject = JsonSerializer.SerializeToNode(dataModel, jsonSerializerOptions)!.AsObject(); - // Key property in Azure CosmosDB NoSQL has a reserved name. - RenameJsonProperty(jsonObject, this._keyStoragePropertyName, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName); + // The key property in Azure CosmosDB NoSQL is always named 'id'. + // But the external JSON serializer used just above isn't aware of that, and will produce a JSON object with another name, taking into + // account e.g. naming policies. TemporaryStorageName gets populated in the model builder - containing that name - once VectorStoreModelBuildingOptions.ReservedKeyPropertyName is set + RenameJsonProperty(jsonObject, this._keyProperty.TemporaryStorageName!, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName); return jsonObject; } public TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) { - // Rename key property for valid deserialization. - RenameJsonProperty(storageModel, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, this._keyStoragePropertyName); + // See above comment. + RenameJsonProperty(storageModel, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, this._keyProperty.TemporaryStorageName!); - return storageModel.Deserialize(this._jsonSerializerOptions)!; + return storageModel.Deserialize(jsonSerializerOptions)!; } #region private diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs index e18f176c2ea7..b5c3ce30fbfb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs @@ -9,22 +9,23 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Text; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; internal class AzureCosmosDBNoSqlFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; private readonly Dictionary _parameters = new(); private readonly StringBuilder _sql = new(); - internal (string WhereClause, Dictionary Parameters) Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal (string WhereClause, Dictionary Parameters) Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { Debug.Assert(this._sql.Length == 0); - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -252,11 +253,12 @@ private bool TryGetPropertyAccess(Expression expression, [NotNullWhen(true)] out { if (expression is MemberExpression member && member.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); } + column = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs new file mode 100644 index 000000000000..6bff6d46c5cf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +internal class AzureCosmosDBNoSqlVectorStoreModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) +{ + private static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + UsesExternalSerializer = true, + + // TODO: Cosmos supports other key types (int, Guid...) + SupportedKeyPropertyTypes = [typeof(string)], + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + ReservedKeyStorageName = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, + }; + + private static readonly HashSet s_supportedDataTypes = + [ + typeof(bool), + typeof(string), + typeof(int), + typeof(long), + typeof(float), + typeof(double), + typeof(DateTimeOffset) + ]; + + internal static readonly HashSet s_supportedVectorTypes = + [ + // Float32 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + // Uint8 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + // Int8 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + ]; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs index cad9bd1048c2..50dbce83aa4b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs @@ -9,22 +9,25 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Text; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors; +#pragma warning disable MEVD9001 // Microsoft.Extensions.VectorData experimental connector-facing APIs + internal abstract class SqlFilterTranslator { - private readonly IReadOnlyDictionary _storagePropertyNames; + private readonly VectorStoreRecordModel _model; private readonly LambdaExpression _lambdaExpression; private readonly ParameterExpression _recordParameter; protected readonly StringBuilder _sql; internal SqlFilterTranslator( - IReadOnlyDictionary storagePropertyNames, + VectorStoreRecordModel model, LambdaExpression lambdaExpression, StringBuilder? sql = null) { - this._storagePropertyNames = storagePropertyNames; + this._model = model; this._lambdaExpression = lambdaExpression; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -296,11 +299,13 @@ private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? { if (expression is MemberExpression member && member.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(member.Member.Name, out column)) + if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); } + column = property.StorageName; + return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 933abb888f61..de7d1a83ad5e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -9,6 +9,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.InMemory; @@ -22,13 +23,6 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull { - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -44,11 +38,8 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector /// The name of the collection that this will access. private readonly string _collectionName; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// A dictionary of vector properties on the provided model, keyed by the property name. - private readonly Dictionary _vectorProperties; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// An function to look up vectors from the records. private readonly InMemoryVectorStoreVectorResolver _vectorResolver; @@ -56,6 +47,19 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector /// An function to look up keys from the records. private readonly InMemoryVectorStoreKeyResolver _keyResolver; + private static readonly VectorStoreRecordModelBuildingOptions s_validationOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + // Disable property type validation + SupportedKeyPropertyTypes = null, + SupportedDataPropertyTypes = null, + SupportedEnumerableDataPropertyElementTypes = null, + SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory)] + }; + /// /// Initializes a new instance of the class. /// @@ -65,22 +69,40 @@ public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVector { // Verify. Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._collectionName = collectionName; this._internalCollections = new(); this._internalCollectionTypes = new(); this._options = options ?? new InMemoryVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); - // Validate property types. - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); - this._vectorProperties = this._propertyReader.VectorProperties.ToDictionary(x => x.DataModelPropertyName); + this._model = new VectorStoreRecordModelBuilder(s_validationOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); // Assign resolvers. - this._vectorResolver = CreateVectorResolver(this._options.VectorResolver, this._vectorProperties); - this._keyResolver = CreateKeyResolver(this._options.KeyResolver, this._propertyReader.KeyProperty); + // TODO: Make generic to avoid boxing +#pragma warning disable MEVD9000 // KeyResolver and VectorResolver are experimental + this._keyResolver = this._options.KeyResolver is null + ? record => (TKey)this._model.KeyProperty.GetValueAsObject(record!)! + : this._options.KeyResolver; + + this._vectorResolver = this._options.VectorResolver is not null + ? this._options.VectorResolver + : (vectorPropertyName, record) => + { + if (!this._model.PropertyMap.TryGetValue(vectorPropertyName, out var property)) + { + throw new InvalidOperationException($"The collection does not have a vector field named '{vectorPropertyName}', so vector search is not possible."); + } + + if (property is not VectorStoreRecordVectorPropertyModel vectorProperty) + { + throw new InvalidOperationException($"The property '{vectorPropertyName}' isn't a vector property."); + } + + return property.GetValueAsObject(record!); + }; +#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } /// @@ -230,7 +252,7 @@ public async Task> VectorizedSearchAsync(T // Resolve options and get requested vector property or first as default. var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(internalOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Filter records using the provided filter before doing the vector comparison. @@ -247,7 +269,7 @@ public async Task> VectorizedSearchAsync(T // Compare each vector in the filtered results with the provided vector. var results = filteredRecords.Select(record => { - var vectorObject = this._vectorResolver(vectorProperty.DataModelPropertyName!, record); + var vectorObject = this._vectorResolver(vectorProperty.ModelName!, record); if (vectorObject is not ReadOnlyMemory dbVector) { return null; @@ -292,86 +314,4 @@ internal ConcurrentDictionary GetCollectionDictionary() return collectionDictionary; } - - /// - /// Pick / create a vector resolver that will read a vector from a record in the store based on the vector name. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that looks up the vector in its dictionary. - /// 3. Otherwise, create a resolver that assumes the vector is a property directly on the record and use the record definition to determine the name. - /// - /// The override vector resolver if one was provided. - /// A dictionary of vector properties from the record definition. - /// The . - private static InMemoryVectorStoreVectorResolver CreateVectorResolver(InMemoryVectorStoreVectorResolver? overrideVectorResolver, Dictionary vectorProperties) - { - // Custom resolver. - if (overrideVectorResolver is not null) - { - return overrideVectorResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (vectorName, record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - var vectorsDictionary = genericDataModelRecord!.Vectors; - if (vectorsDictionary != null && vectorsDictionary.TryGetValue(vectorName, out var vector)) - { - return vector; - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - // Default resolver. - var vectorPropertiesInfo = vectorProperties.Values - .Select(x => x.DataModelPropertyName) - .Select(x => typeof(TRecord).GetProperty(x) ?? throw new ArgumentException($"Vector property '{x}' was not found on {typeof(TRecord).Name}")) - .ToDictionary(x => x.Name); - - return (vectorName, record) => - { - if (vectorPropertiesInfo.TryGetValue(vectorName, out var vectorPropertyInfo)) - { - return vectorPropertyInfo.GetValue(record); - } - - throw new InvalidOperationException($"The collection does not have a vector field named '{vectorName}', so vector search is not possible."); - }; - } - - /// - /// Pick / create a key resolver that will read a key from a record in the store. - /// 1. If an override resolver is provided, use that. - /// 2. If the record type is create a resolver that reads the Key property from it. - /// 3. Otherwise, create a resolver that assumes the key is a property directly on the record and use the record definition to determine the name. - /// - /// The override key resolver if one was provided. - /// They key property from the record definition. - /// The . - private static InMemoryVectorStoreKeyResolver CreateKeyResolver(InMemoryVectorStoreKeyResolver? overrideKeyResolver, VectorStoreRecordKeyProperty keyProperty) - { - // Custom resolver. - if (overrideKeyResolver is not null) - { - return overrideKeyResolver; - } - - // Generic data model resolver. - if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - return (record) => - { - var genericDataModelRecord = record as VectorStoreGenericDataModel; - return genericDataModelRecord!.Key; - }; - } - - // Default resolver. - var keyPropertyInfo = typeof(TRecord).GetProperty(keyProperty.DataModelPropertyName) ?? throw new ArgumentException($"Key property {keyProperty.DataModelPropertyName} not found on {typeof(TRecord).Name}"); - return (record) => (TKey)keyPropertyInfo.GetValue(record)!; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs index 5e5dfc7e166a..7a98830df1bf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.InMemory; @@ -30,6 +31,7 @@ public sealed class InMemoryVectorStoreRecordCollectionOptions /// using reflection. This delegate can be used to provide a custom implementation if /// the vector properties are located somewhere else on the record. /// + [Experimental("MEVD9000")] public InMemoryVectorStoreVectorResolver? VectorResolver { get; init; } = null; /// @@ -40,5 +42,6 @@ public sealed class InMemoryVectorStoreRecordCollectionOptions /// using reflection. This delegate can be used to provide a custom implementation if /// the key property is located somewhere else on the record. /// + [Experimental("MEVD9000")] public InMemoryVectorStoreKeyResolver? KeyResolver { get; init; } = null; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs index 202908de1c0b..80414a43910c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs @@ -9,6 +9,7 @@ using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -17,12 +18,12 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; // Information specific to vector search pre-filter: https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter internal class MongoDBFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; - internal BsonDocument Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal BsonDocument Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -223,11 +224,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs index 3d6b634a14e1..d7462cbe2b7b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -16,25 +17,19 @@ internal static class MongoDBVectorStoreCollectionCreateMapping /// Returns an array of indexes to create for vector properties. /// /// Collection of vector properties for index creation. - /// A dictionary that maps from a property name to the storage name. - public static BsonArray GetVectorIndexFields( - IReadOnlyList vectorProperties, - Dictionary storagePropertyNames) + public static BsonArray GetVectorIndexFields(IReadOnlyList vectorProperties) { var indexArray = new BsonArray(); // Create separate index for each vector property foreach (var property in vectorProperties) { - // Use index name same as vector property name with underscore - var vectorPropertyName = storagePropertyNames[property.DataModelPropertyName]; - var indexDocument = new BsonDocument { { "type", "vector" }, { "numDimensions", property.Dimensions }, - { "path", vectorPropertyName }, - { "similarity", GetDistanceFunction(property.DistanceFunction, vectorPropertyName) }, + { "path", property.StorageName }, + { "similarity", GetDistanceFunction(property.DistanceFunction, property.ModelName) }, }; indexArray.Add(indexDocument); @@ -47,10 +42,7 @@ public static BsonArray GetVectorIndexFields( /// Returns an array of indexes to create for filterable data properties. /// /// Collection of data properties for index creation. - /// A dictionary that maps from a property name to the storage name. - public static BsonArray GetFilterableDataIndexFields( - IReadOnlyList dataProperties, - Dictionary storagePropertyNames) + public static BsonArray GetFilterableDataIndexFields(IReadOnlyList dataProperties) { var indexArray = new BsonArray(); @@ -59,13 +51,10 @@ public static BsonArray GetFilterableDataIndexFields( { if (property.IsFilterable) { - // Use index name same as data property name with underscore - var dataPropertyName = storagePropertyNames[property.DataModelPropertyName]; - var indexDocument = new BsonDocument { { "type", "filter" }, - { "path", dataPropertyName }, + { "path", property.StorageName }, }; indexArray.Add(indexDocument); @@ -79,10 +68,7 @@ public static BsonArray GetFilterableDataIndexFields( /// Returns a list of of fields to index for full text search data properties. /// /// Collection of data properties for index creation. - /// A dictionary that maps from a property name to the storage name. - public static List GetFullTextSearchableDataIndexFields( - IReadOnlyList dataProperties, - Dictionary storagePropertyNames) + public static List GetFullTextSearchableDataIndexFields(IReadOnlyList dataProperties) { var fieldElements = new List(); @@ -91,11 +77,9 @@ public static List GetFullTextSearchableDataIndexFields( { if (property.IsFullTextSearchable) { - var dataPropertyName = storagePropertyNames[property.DataModelPropertyName]; - - fieldElements.Add(new BsonElement(dataPropertyName, new BsonArray() + fieldElements.Add(new BsonElement(property.StorageName, new BsonArray() { - new BsonDocument() { { "type", "string" }, } + new BsonDocument() { { "type", "string" } } })); } } @@ -107,15 +91,11 @@ public static List GetFullTextSearchableDataIndexFields( /// More information about MongoDB distance functions here: . /// private static string GetDistanceFunction(string? distanceFunction, string vectorPropertyName) - { - var vectorPropertyDistanceFunction = MongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyDistanceFunction(distanceFunction); - - return vectorPropertyDistanceFunction switch + => distanceFunction switch { - DistanceFunction.CosineSimilarity => "cosine", + DistanceFunction.CosineSimilarity or null => "cosine", DistanceFunction.DotProductSimilarity => "dotProduct", DistanceFunction.EuclideanDistance => "euclidean", _ => throw new InvalidOperationException($"Distance function '{distanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the MongoDB VectorStore.") }; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs index 8e0258f0aa21..75d617155c5a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionSearchMapping.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -13,20 +14,17 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// internal static class MongoDBVectorStoreCollectionSearchMapping { - /// Returns distance function specified on vector property or default. - public static string GetVectorPropertyDistanceFunction(string? distanceFunction) => !string.IsNullOrWhiteSpace(distanceFunction) ? distanceFunction! : DistanceFunction.CosineSimilarity; - #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// /// Build MongoDB filter from the provided . /// /// The to build MongoDB filter from. - /// A dictionary that maps from a property name to the storage name. + /// The model. /// Thrown when the provided filter type is unsupported. /// Thrown when property name specified in filter doesn't exist. public static BsonDocument? BuildLegacyFilter( VectorSearchFilter vectorSearchFilter, - Dictionary storagePropertyNames) + VectorStoreRecordModel model) { const string EqualOperator = "$eq"; @@ -59,25 +57,25 @@ internal static class MongoDBVectorStoreCollectionSearchMapping nameof(EqualToFilterClause)])}"); } - if (!storagePropertyNames.TryGetValue(propertyName, out var storagePropertyName)) + if (!model.PropertyMap.TryGetValue(propertyName, out var property)) { throw new InvalidOperationException($"Property name '{propertyName}' provided as part of the filter clause is not a valid property name."); } - if (filter.Contains(storagePropertyName)) + if (filter.Contains(property.StorageName)) { - if (filter[storagePropertyName] is BsonDocument document && document.Contains(filterOperator)) + if (filter[property.StorageName] is BsonDocument document && document.Contains(filterOperator)) { throw new NotSupportedException( $"Filter with operator '{filterOperator}' is already added to '{propertyName}' property. " + "Multiple filters of the same type in the same property are not supported."); } - filter[storagePropertyName][filterOperator] = propertyValue; + filter[property.StorageName][filterOperator] = propertyValue; } else { - filter[storagePropertyName] = new BsonDocument() { [filterOperator] = propertyValue }; + filter[property.StorageName] = new BsonDocument() { [filterOperator] = propertyValue }; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index f9e2fa091642..05577cf8a7d6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -4,13 +4,12 @@ using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; -using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; using MongoDB.Driver; using MEVD = Microsoft.Extensions.VectorData; @@ -51,14 +50,8 @@ public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCol /// Interface for mapping between a storage model, and the consumer record data model. private readonly IVectorStoreRecordMapper _mapper; - /// A dictionary that maps from a property name to the storage name that should be used when serializing it for data and vector properties. - private readonly Dictionary _storagePropertyNames; - - /// Collection of vector storage property names. - private readonly List _vectorStoragePropertyNames; - - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// public string CollectionName { get; } @@ -77,23 +70,13 @@ public MongoDBVectorStoreRecordCollection( // Verify. Verify.NotNull(mongoDatabase); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.BsonDocumentCustomMapper is not null, MongoDBConstants.SupportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._mongoDatabase = mongoDatabase; this._mongoCollection = mongoDatabase.GetCollection(collectionName); this.CollectionName = collectionName; this._options = options ?? new MongoDBVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); - - this._storagePropertyNames = GetStoragePropertyNames(this._propertyReader.Properties, typeof(TRecord)); - - // Use Mongo reserved key property name as storage key property name - this._storagePropertyNames[this._propertyReader.KeyPropertyName] = MongoDBConstants.MongoReservedKeyPropertyName; - - this._vectorStoragePropertyNames = this._propertyReader.VectorProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); - + this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = this.InitializeMapper(); } @@ -269,15 +252,14 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), - { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._model), + { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -291,7 +273,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect Array vectorArray = VerifyVectorParam(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); - var vectorPropertyName = this._storagePropertyNames[vectorProperty.DataModelPropertyName]; - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); - var textDataPropertyName = this._storagePropertyNames[textDataProperty.DataModelPropertyName]; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._storagePropertyNames), - { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._model), + { Filter: Expression> newFilter } => new MongoDBFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 // Constructing a query to fetch "skip + top" total items - // to perform skip logic locally, since skip option is not part of API. + // to perform skip logic locally, since skip option is not part of API. var itemsAmount = searchOptions.Skip + searchOptions.Top; var numCandidates = this._options.NumCandidates ?? itemsAmount * MongoDBConstants.DefaultNumCandidatesRatio; @@ -350,8 +330,8 @@ public async Task> HybridSearchAsync(TVect this.CollectionName, this._options.VectorIndexName, this._options.FullTextSearchIndexName, - vectorPropertyName, - textDataPropertyName, + vectorProperty.StorageName, + textDataProperty.StorageName, ScorePropertyName, DocumentPropertyName, itemsAmount, @@ -387,13 +367,8 @@ private async Task CreateIndexesAsync(string collectionName, CancellationToken c { var fieldsArray = new BsonArray(); - fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetVectorIndexFields( - this._propertyReader.VectorProperties, - this._storagePropertyNames)); - - fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFilterableDataIndexFields( - this._propertyReader.DataProperties, - this._storagePropertyNames)); + fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetVectorIndexFields(this._model.VectorProperties)); + fieldsArray.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFilterableDataIndexFields(this._model.DataProperties)); if (fieldsArray.Count > 0) { @@ -411,9 +386,7 @@ private async Task CreateIndexesAsync(string collectionName, CancellationToken c { var fieldsDocument = new BsonDocument(); - fieldsDocument.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFullTextSearchableDataIndexFields( - this._propertyReader.DataProperties, - this._storagePropertyNames)); + fieldsDocument.AddRange(MongoDBVectorStoreCollectionCreateMapping.GetFullTextSearchableDataIndexFields(this._model.DataProperties)); if (fieldsDocument.ElementCount > 0) { @@ -455,13 +428,13 @@ private async Task> FindAsync(FilterDefinition 0) + if (!includeVectors) { - foreach (var vectorPropertyName in this._vectorStoragePropertyNames) + foreach (var vectorPropertyName in this._model.VectorProperties) { projectionDefinition = projectionDefinition is not null ? - projectionDefinition.Exclude(vectorPropertyName) : - projectionBuilder.Exclude(vectorPropertyName); + projectionDefinition.Exclude(vectorPropertyName.StorageName) : + projectionBuilder.Exclude(vectorPropertyName.StorageName); } } @@ -624,37 +597,6 @@ private async Task RunOperationWithRetryAsync( throw new VectorStoreOperationException("Retry logic failed."); } - /// - /// Gets storage property names taking into account BSON serialization attributes. - /// - private static Dictionary GetStoragePropertyNames( - IReadOnlyList properties, - Type dataModel) - { - var storagePropertyNames = new Dictionary(); - - foreach (var property in properties) - { - var propertyInfo = dataModel.GetProperty(property.DataModelPropertyName); - string propertyName; - - if (propertyInfo != null) - { - var bsonElementAttribute = propertyInfo.GetCustomAttribute(); - - propertyName = bsonElementAttribute?.ElementName ?? property.DataModelPropertyName; - } - else - { - propertyName = property.DataModelPropertyName; - } - - storagePropertyNames[property.DataModelPropertyName] = propertyName; - } - - return storagePropertyNames; - } - /// /// Returns custom mapper, generic data model mapper or default record mapper. /// @@ -667,10 +609,10 @@ private IVectorStoreRecordMapper InitializeMapper() if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { - return (new MongoDBGenericDataModelMapper(this._propertyReader.RecordDefinition) as IVectorStoreRecordMapper)!; + return (new MongoDBGenericDataModelMapper(this._model) as IVectorStoreRecordMapper)!; } - return new MongoDBVectorStoreRecordMapper(this._propertyReader); + return new MongoDBVectorStoreRecordMapper(this._model); } private static Array VerifyVectorParam(TVector vector) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs index 54a7202eaa07..4044e42825fc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs @@ -9,6 +9,7 @@ using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -20,12 +21,12 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; // as we sometimes need to extend the collection (with for example another condition). internal class PineconeFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; - internal Metadata Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal Metadata Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -226,11 +227,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs deleted file mode 100644 index df783a230498..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeGenericDataModelMapper.cs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.VectorData; -using Pinecone; - -namespace Microsoft.SemanticKernel.Connectors.Pinecone; - -/// -/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Pinecone. -/// -internal sealed class PineconeGenericDataModelMapper : IVectorStoreRecordMapper, Vector> -{ - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A helper to access property information for the current data model and record definition. - public PineconeGenericDataModelMapper( - VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - // Validate property types. - propertyReader.VerifyKeyProperties(PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes); - propertyReader.VerifyDataProperties(PineconeVectorStoreRecordFieldMapping.s_supportedDataTypes, PineconeVectorStoreRecordFieldMapping.s_supportedEnumerableDataElementTypes); - propertyReader.VerifyVectorProperties(PineconeVectorStoreRecordFieldMapping.s_supportedVectorTypes); - - // Assign. - this._propertyReader = propertyReader; - } - - /// - public Vector MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - var metadata = new Metadata(); - - // Map data properties. - foreach (var dataProperty in this._propertyReader.DataProperties) - { - if (dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var propertyValue)) - { - var propertyStorageName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); - metadata[propertyStorageName] = propertyValue is not null - ? PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(propertyValue) - : null; - } - } - - // Map vector property. - if (dataModel.Vectors.Count != 1) - { - throw new VectorStoreRecordMappingException($"Exactly one vector is supported by the Pinecone connector, but the provided data model contains {dataModel.Vectors.Count}."); - } - - if (!dataModel.Vectors.TryGetValue(this._propertyReader.FirstVectorPropertyName!, out var valuesObject) || valuesObject is not ReadOnlyMemory values) - { - throw new VectorStoreRecordMappingException($"Vector property '{this._propertyReader.FirstVectorPropertyName}' on provided record of type {nameof(VectorStoreGenericDataModel)} must be of type ReadOnlyMemory and not null."); - } - - // TODO: what about sparse values? - var result = new Vector - { - Id = dataModel.Key, - Values = values, - Metadata = metadata, - SparseValues = null - }; - - return result; - } - - /// - public VectorStoreGenericDataModel MapFromStorageToDataModel(Vector storageModel, StorageToDataModelMapperOptions options) - { - // Construct the data model. - var dataModel = new VectorStoreGenericDataModel(storageModel.Id); - - // Set Vector. - if (options?.IncludeVectors is true) - { - dataModel.Vectors.Add(this._propertyReader.FirstVectorPropertyName!, storageModel.Values); - } - - // Set Data. - if (storageModel.Metadata != null) - { - foreach (var dataProperty in this._propertyReader.DataProperties) - { - var propertyStorageName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); - if (storageModel.Metadata.TryGetValue(propertyStorageName, out var propertyValue)) - { - dataModel.Data[dataProperty.DataModelPropertyName] = - propertyValue is not null - ? PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType(propertyValue, dataProperty.PropertyType) - : null; - } - } - } - - return dataModel; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs index 8e633c76e47e..2b819060abfb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreCollectionSearchMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -17,10 +18,10 @@ internal static class PineconeVectorStoreCollectionSearchMapping /// Build a Pinecone from a set of filter clauses. /// /// The filter clauses to build the Pinecone from. - /// A mapping from property name to the name under which the property would be stored. + /// The model. /// The Pinecone . /// Thrown for invalid property names, value types or filter clause types. - public static Metadata BuildSearchFilter(IEnumerable? filterClauses, IReadOnlyDictionary storagePropertyNamesMap) + public static Metadata BuildSearchFilter(IEnumerable? filterClauses, VectorStoreRecordModel model) { var metadataMap = new Metadata(); @@ -33,7 +34,7 @@ public static Metadata BuildSearchFilter(IEnumerable? filterClause { if (filterClause is EqualToFilterClause equalToFilterClause) { - if (!storagePropertyNamesMap.TryGetValue(equalToFilterClause.FieldName, out var storagePropertyName)) + if (!model.PropertyMap.TryGetValue(equalToFilterClause.FieldName, out var property)) { throw new InvalidOperationException($"Property '{equalToFilterClause.FieldName}' is not a valid property name."); } @@ -49,7 +50,7 @@ public static Metadata BuildSearchFilter(IEnumerable? filterClause _ => throw new NotSupportedException($"Unsupported filter value type '{equalToFilterClause.Value.GetType().Name}'.") }; - metadataMap.Add(storagePropertyName, metadataValue); + metadataMap.Add(property.StorageName, metadataValue); } else { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 080d22ed9b4f..d8df1e4b9f2a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -8,6 +8,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; using Sdk = Pinecone; @@ -27,7 +28,7 @@ public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCo private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; - private readonly VectorStoreRecordPropertyReader _propertyReader; + private readonly VectorStoreRecordModel _model; private readonly IVectorStoreRecordMapper _mapper; private IndexClient? _indexClient; @@ -47,37 +48,13 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st Verify.NotNull(pineconeClient); VerifyCollectionName(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.VectorCustomMapper is not null, PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); - this._pineconeClient = pineconeClient; this.CollectionName = collectionName; this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = true, - SupportsMultipleKeys = false, - SupportsMultipleVectors = false, - }); + this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - if (this._options.VectorCustomMapper is not null) - { - // Custom Mapper. - this._mapper = this._options.VectorCustomMapper; - } - else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - // Generic data model mapper. - this._mapper = (new PineconeGenericDataModelMapper(this._propertyReader) as IVectorStoreRecordMapper)!; - } - else - { - // Default Mapper. - this._mapper = new PineconeVectorStoreRecordMapper(this._propertyReader); - } + this._mapper = this._options.VectorCustomMapper ?? new PineconeVectorStoreRecordMapper(this._model); } /// @@ -95,18 +72,18 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // we already run through record property validation, so a single VectorStoreRecordVectorProperty is guaranteed. - var vectorProperty = this._propertyReader.VectorProperty!; + var vectorProperty = this._model.VectorProperty!; if (!string.IsNullOrEmpty(vectorProperty.IndexKind) && vectorProperty.IndexKind != "PGA") { throw new InvalidOperationException( - $"IndexKind of '{vectorProperty.IndexKind}' for property '{vectorProperty.DataModelPropertyName}' is not supported. Pinecone only supports 'PGA' (Pinecone Graph Algorithm), which is always enabled."); + $"IndexKind of '{vectorProperty.IndexKind}' for property '{vectorProperty.ModelName}' is not supported. Pinecone only supports 'PGA' (Pinecone Graph Algorithm), which is always enabled."); } CreateIndexRequest request = new() { Name = this.CollectionName, - Dimension = vectorProperty.Dimensions ?? throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."), + Dimension = vectorProperty.Dimensions ?? throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."), Metric = MapDistanceFunction(vectorProperty), Spec = new ServerlessIndexSpec { @@ -341,8 +318,8 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => PineconeVectorStoreCollectionSearchMapping.BuildSearchFilter(options.OldFilter?.FilterClauses, this._propertyReader.StoragePropertyNamesMap), - { Filter: Expression> newFilter } => new PineconeFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => PineconeVectorStoreCollectionSearchMapping.BuildSearchFilter(options.OldFilter?.FilterClauses, this._model), + { Filter: Expression> newFilter } => new PineconeFilterTranslator().Translate(newFilter, this._model), _ => null }; #pragma warning restore CS0618 @@ -438,13 +415,12 @@ private static ServerlessSpecCloud MapCloud(string serverlessIndexCloud) _ => throw new ArgumentException($"Invalid serverless index cloud: {serverlessIndexCloud}.", nameof(serverlessIndexCloud)) }; - private static CreateIndexRequestMetric MapDistanceFunction(VectorStoreRecordVectorProperty vectorProperty) + private static CreateIndexRequestMetric MapDistanceFunction(VectorStoreRecordVectorPropertyModel vectorProperty) => vectorProperty.DistanceFunction switch { - DistanceFunction.CosineSimilarity => CreateIndexRequestMetric.Cosine, + DistanceFunction.CosineSimilarity or null => CreateIndexRequestMetric.Cosine, DistanceFunction.DotProductSimilarity => CreateIndexRequestMetric.Dotproduct, DistanceFunction.EuclideanSquaredDistance => CreateIndexRequestMetric.Euclidean, - null => CreateIndexRequestMetric.Cosine, _ => throw new NotSupportedException($"Distance function '{vectorProperty.DistanceFunction}' is not supported.") }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs index 9573740f8580..3643ce5aa32f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -13,6 +14,18 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// internal static class PineconeVectorStoreRecordFieldMapping { + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = true, + SupportsMultipleKeys = false, + SupportsMultipleVectors = false, + + SupportedKeyPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes, + SupportedDataPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = [typeof(string)], + SupportedVectorPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedVectorTypes + }; + /// A set of types that a key on the provided model may have. public static readonly HashSet s_supportedKeyTypes = [typeof(string)]; @@ -20,18 +33,12 @@ internal static class PineconeVectorStoreRecordFieldMapping public static readonly HashSet s_supportedDataTypes = [ typeof(bool), - typeof(bool?), typeof(string), typeof(int), - typeof(int?), typeof(long), - typeof(long?), typeof(float), - typeof(float?), typeof(double), - typeof(double?), - typeof(decimal), - typeof(decimal?), + typeof(decimal) ]; /// A set of types that enumerable data properties on the provided model may use as their element types. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index 1163c1a66bea..27e2017d3cf9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -2,6 +2,7 @@ using System; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -10,51 +11,30 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Mapper between a Pinecone record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. -internal sealed class PineconeVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class PineconeVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper { - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A helper to access property information for the current data model and record definition. - public PineconeVectorStoreRecordMapper( - VectorStoreRecordPropertyReader propertyReader) - { - // Validate property types. - propertyReader.VerifyHasParameterlessConstructor(); - propertyReader.VerifyKeyProperties(PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes); - propertyReader.VerifyDataProperties(PineconeVectorStoreRecordFieldMapping.s_supportedDataTypes, PineconeVectorStoreRecordFieldMapping.s_supportedEnumerableDataElementTypes); - propertyReader.VerifyVectorProperties(PineconeVectorStoreRecordFieldMapping.s_supportedVectorTypes); - - // Assign. - this._propertyReader = propertyReader; - } - /// public Vector MapFromDataToStorageModel(TRecord dataModel) { - var keyObject = this._propertyReader.KeyPropertyInfo.GetValue(dataModel); + var keyObject = model.KeyProperty.GetValueAsObject(dataModel!); if (keyObject is null) { - throw new VectorStoreRecordMappingException($"Key property {this._propertyReader.KeyPropertyName} on provided record of type {typeof(TRecord).FullName} may not be null."); + throw new VectorStoreRecordMappingException($"Key property '{model.KeyProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null."); } var metadata = new Metadata(); - foreach (var dataPropertyInfo in this._propertyReader.DataPropertiesInfo) + foreach (var property in model.DataProperties) { - var propertyName = this._propertyReader.GetStoragePropertyName(dataPropertyInfo.Name); - var propertyValue = dataPropertyInfo.GetValue(dataModel); - if (propertyValue != null) + if (property.GetValueAsObject(dataModel!) is { } value) { - metadata[propertyName] = PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(propertyValue); + metadata[property.StorageName] = PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(value); } } - var valuesObject = this._propertyReader.FirstVectorPropertyInfo!.GetValue(dataModel); + var valuesObject = model.VectorProperty!.GetValueAsObject(dataModel!); if (valuesObject is not ReadOnlyMemory values) { - throw new VectorStoreRecordMappingException($"Vector property {this._propertyReader.FirstVectorPropertyName} on provided record of type {typeof(TRecord).FullName} may not be null."); + throw new VectorStoreRecordMappingException($"Vector property '{model.VectorProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null."); } // TODO: what about sparse values? @@ -72,29 +52,28 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) /// public TRecord MapFromStorageToDataModel(Vector storageModel, StorageToDataModelMapperOptions options) { - // Construct the output record. - var outputRecord = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); + var outputRecord = model.CreateRecord()!; - // Set Key. - this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, storageModel.Id); + model.KeyProperty.SetValueAsObject(outputRecord, storageModel.Id); - // Set Vector. if (options?.IncludeVectors is true) { - this._propertyReader.FirstVectorPropertyInfo!.SetValue( + model.VectorProperty.SetValueAsObject( outputRecord, storageModel.Values); } - // Set Data. if (storageModel.Metadata != null) { - VectorStoreRecordMapping.SetValuesOnProperties( - outputRecord, - this._propertyReader.DataPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel.Metadata, - PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType!); + foreach (var property in model.DataProperties) + { + if (storageModel.Metadata.TryGetValue(property.StorageName, out var metadataValue)) + { + property.SetValueAsObject( + outputRecord, + metadataValue is null ? null : PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType(metadataValue, property.Type)); + } + } } return outputRecord; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs index 933c6b0ca1e7..0581c6a6c134 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreCollectionSqlBuilder.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq.Expressions; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Pgvector; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -35,10 +36,10 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// /// The schema of the table. /// The name of the table. - /// The properties of the table. + /// The collection model. /// Specifies whether to include IF NOT EXISTS in the command. /// The built SQL command info. - PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, IReadOnlyList properties, bool ifNotExists = true); + PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, VectorStoreRecordModel model, bool ifNotExists = true); /// /// Builds a SQL command to create a vector index in the Postgres vector store. @@ -86,22 +87,22 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// /// The schema of the table. /// The name of the table. - /// The properties of the table. + /// The collection model. /// The key of the record to get. /// Specifies whether to include vectors in the record. /// The built SQL command info. - PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, IReadOnlyList properties, TKey key, bool includeVectors = false) where TKey : notnull; + PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, VectorStoreRecordModel model, TKey key, bool includeVectors = false) where TKey : notnull; /// /// Builds a SQL command to get a batch of records from the Postgres vector store. /// /// The schema of the table. /// The name of the table. - /// The properties of the table. + /// The collection model. /// The keys of the records to get. /// Specifies whether to include vectors in the records. /// The built SQL command info. - PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, IReadOnlyList properties, List keys, bool includeVectors = false) where TKey : notnull; + PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, VectorStoreRecordModel model, List keys, bool includeVectors = false) where TKey : notnull; /// /// Builds a SQL command to delete a record from the Postgres vector store. @@ -128,7 +129,7 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// /// The schema of the table. /// The name of the table. - /// The property reader. + /// The collection model. /// The property which the vectors to compare are stored in. /// The vector to match. /// The filter conditions for the query. @@ -138,6 +139,6 @@ internal interface IPostgresVectorStoreCollectionSqlBuilder /// The maximum number of records to return. /// The built SQL command info. #pragma warning disable CS0618 // VectorSearchFilter is obsolete - PostgresSqlCommandInfo BuildGetNearestMatchCommand(string schema, string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit); + PostgresSqlCommandInfo BuildGetNearestMatchCommand(string schema, string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit); #pragma warning restore CS0618 // VectorSearchFilter is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index e679ef030a03..20adedde23af 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -6,6 +6,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Npgsql; using Pgvector; @@ -39,11 +40,11 @@ internal interface IPostgresVectorStoreDbClient /// Create a table. Also creates an index on vector columns if the table has vector properties defined. /// /// The name assigned to a table of entries. - /// The properties of the record definition that define the table. + /// The collection model. /// Specifies whether to include IF NOT EXISTS in the command. /// The to monitor for cancellation requests. The default is . /// - Task CreateTableAsync(string tableName, IReadOnlyList properties, bool ifNotExists = true, CancellationToken cancellationToken = default); + Task CreateTableAsync(string tableName, VectorStoreRecordModel model, bool ifNotExists = true, CancellationToken cancellationToken = default); /// /// Drop a table. @@ -77,11 +78,11 @@ internal interface IPostgresVectorStoreDbClient /// /// The name assigned to a table of entries. /// The key of the entry to get. - /// The properties to include in the entry. + /// The collection model. /// If true, the vectors will be included in the entry. /// The to monitor for cancellation requests. The default is . /// The row if the key is found, otherwise null. - Task?> GetAsync(string tableName, TKey key, IReadOnlyList properties, bool includeVectors = false, CancellationToken cancellationToken = default) + Task?> GetAsync(string tableName, TKey key, VectorStoreRecordModel model, bool includeVectors = false, CancellationToken cancellationToken = default) where TKey : notnull; /// @@ -89,11 +90,11 @@ internal interface IPostgresVectorStoreDbClient /// /// The name assigned to a table of entries. /// The keys of the entries to get. - /// The properties of the table. + /// The collection model. /// If true, the vectors will be included in the entries. /// The to monitor for cancellation requests. The default is . /// The rows that match the given keys. - IAsyncEnumerable> GetBatchAsync(string tableName, IEnumerable keys, IReadOnlyList properties, bool includeVectors = false, CancellationToken cancellationToken = default) + IAsyncEnumerable> GetBatchAsync(string tableName, IEnumerable keys, VectorStoreRecordModel model, bool includeVectors = false, CancellationToken cancellationToken = default) where TKey : notnull; /// @@ -120,7 +121,7 @@ internal interface IPostgresVectorStoreDbClient /// Gets the nearest matches to the . /// /// The name assigned to a table of entries. - /// The property reader. + /// The collection model. /// The vector property. /// The to compare the table's vector with. /// The maximum number of similarity results to return. @@ -131,7 +132,7 @@ internal interface IPostgresVectorStoreDbClient /// The to monitor for cancellation requests. The default is . /// An asynchronous stream of result objects that the nearest matches to the . #pragma warning disable CS0618 // VectorSearchFilter is obsolete - IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, + IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, int limit, VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); #pragma warning restore CS0618 // VectorSearchFilter is obsolete } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs index f8784890e83a..f7d490503b43 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -11,58 +12,55 @@ internal static class PostgresConstants /// The name of this database for telemetry purposes. public const string DatabaseName = "Postgres"; - /// A of types that a key on the provided model may have. - public static readonly HashSet SupportedKeyTypes = - [ - typeof(short), - typeof(int), - typeof(long), - typeof(string), - typeof(Guid), - ]; + /// Validation options. + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, - /// A of types that data properties on the provided model may have. - public static readonly HashSet SupportedDataTypes = - [ - typeof(bool), - typeof(bool?), - typeof(short), - typeof(short?), - typeof(int), - typeof(int?), - typeof(long), - typeof(long?), - typeof(float), - typeof(float?), - typeof(double), - typeof(double?), - typeof(decimal), - typeof(decimal?), - typeof(string), - typeof(DateTime), - typeof(DateTime?), - typeof(DateTimeOffset), - typeof(DateTimeOffset?), - typeof(Guid), - typeof(Guid?), - typeof(byte[]), - ]; + SupportedKeyPropertyTypes = + [ + typeof(short), + typeof(int), + typeof(long), + typeof(string), + typeof(Guid) + ], - /// A of types that enumerable data properties on the provided model may use as their element types. - public static readonly HashSet SupportedEnumerableDataElementTypes = - [ - typeof(bool), - typeof(short), - typeof(int), - typeof(long), - typeof(float), - typeof(double), - typeof(decimal), - typeof(string), - typeof(DateTime), - typeof(DateTimeOffset), - typeof(Guid), - ]; + SupportedDataPropertyTypes = + [ + typeof(bool), + typeof(short), + typeof(int), + typeof(long), + typeof(float), + typeof(double), + typeof(decimal), + typeof(string), + typeof(DateTime), + typeof(DateTimeOffset), + typeof(Guid), + typeof(byte[]), + ], + + SupportedEnumerableDataPropertyElementTypes = + [ + typeof(bool), + typeof(short), + typeof(int), + typeof(long), + typeof(float), + typeof(double), + typeof(decimal), + typeof(string), + typeof(DateTime), + typeof(DateTimeOffset), + typeof(Guid), + ], + + SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory)] + }; /// A of types that vector properties on the provided model may have. public static readonly HashSet SupportedVectorTypes = diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs index b4b9707c1c99..722b2be96eaa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq.Expressions; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -11,9 +12,10 @@ internal sealed class PostgresFilterTranslator : SqlFilterTranslator private int _parameterIndex; internal PostgresFilterTranslator( - IReadOnlyDictionary storagePropertyNames, + VectorStoreRecordModel model, LambdaExpression lambdaExpression, - int startParamIndex) : base(storagePropertyNames, lambdaExpression, sql: null) + int startParamIndex) + : base(model, lambdaExpression, sql: null) { this._parameterIndex = startParamIndex; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresGenericDataModelMapper.cs deleted file mode 100644 index efdec538c772..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresGenericDataModelMapper.cs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Connectors.Postgres; - -internal sealed class PostgresGenericDataModelMapper : IVectorStoreRecordMapper, Dictionary> - where TKey : notnull -{ - /// with helpers for reading vector store model properties and their attributes. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// /// - /// with helpers for reading vector store model properties and their attributes. - public PostgresGenericDataModelMapper(VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - this._propertyReader = propertyReader; - - // Validate property types. - this._propertyReader.VerifyDataProperties(PostgresConstants.SupportedDataTypes, PostgresConstants.SupportedEnumerableDataElementTypes); - this._propertyReader.VerifyVectorProperties(PostgresConstants.SupportedVectorTypes); - } - - public Dictionary MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - var properties = new Dictionary - { - // Add key property - { this._propertyReader.KeyPropertyStoragePropertyName, dataModel.Key } - }; - - // Add data properties - if (dataModel.Data is not null) - { - foreach (var property in this._propertyReader.DataProperties) - { - if (dataModel.Data.TryGetValue(property.DataModelPropertyName, out var dataValue)) - { - properties.Add(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), dataValue); - } - } - } - - // Add vector properties - if (dataModel.Vectors is not null) - { - foreach (var property in this._propertyReader.VectorProperties) - { - if (dataModel.Vectors.TryGetValue(property.DataModelPropertyName, out var vectorValue)) - { - var result = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vectorValue); - properties.Add(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), result); - } - } - } - - return properties; - } - - public VectorStoreGenericDataModel MapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) - { - TKey key; - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); - - // Process key property. - if (storageModel.TryGetValue(this._propertyReader.KeyPropertyStoragePropertyName, out var keyObject) && keyObject is not null) - { - key = (TKey)keyObject; - } - else - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - // Process data properties. - foreach (var property in this._propertyReader.DataProperties) - { - if (storageModel.TryGetValue(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), out var dataValue)) - { - dataProperties.Add(property.DataModelPropertyName, dataValue); - } - } - - // Process vector properties - if (options.IncludeVectors) - { - foreach (var property in this._propertyReader.VectorProperties) - { - if (storageModel.TryGetValue(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), out var vectorValue)) - { - vectorProperties.Add(property.DataModelPropertyName, PostgresVectorStoreRecordPropertyMapping.MapVectorForDataModel(vectorValue)); - } - } - } - - return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 0f61e692ae7f..a1840384be84 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -54,11 +54,6 @@ public virtual IAsyncEnumerable ListCollectionNamesAsync(CancellationTok public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { - if (!PostgresConstants.SupportedKeyTypes.Contains(typeof(TKey))) - { - throw new NotSupportedException($"Unsupported key type: {typeof(TKey)}"); - } - #pragma warning disable CS0618 // IPostgresVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs index ae8b0a1e9e21..139892ff8a51 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs @@ -6,6 +6,7 @@ using System.Linq.Expressions; using System.Text; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Npgsql; using NpgsqlTypes; using Pgvector; @@ -49,69 +50,34 @@ FROM information_schema.tables } /// - public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, IReadOnlyList properties, bool ifNotExists = true) + public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, VectorStoreRecordModel model, bool ifNotExists = true) { if (string.IsNullOrWhiteSpace(tableName)) { throw new ArgumentException("Table name cannot be null or whitespace", nameof(tableName)); } - VectorStoreRecordKeyProperty? keyProperty = default; - List dataProperties = new(); - List vectorProperties = new(); - - foreach (var property in properties) - { - if (property is VectorStoreRecordKeyProperty keyProp) - { - if (keyProperty != null) - { - // Should be impossible, as property reader should have already validated that - // multiple key properties are not allowed. - throw new ArgumentException("Record definition cannot have more than one key property."); - } - keyProperty = keyProp; - } - else if (property is VectorStoreRecordDataProperty dataProp) - { - dataProperties.Add(dataProp); - } - else if (property is VectorStoreRecordVectorProperty vectorProp) - { - vectorProperties.Add(vectorProp); - } - else - { - throw new NotSupportedException($"Property type {property.GetType().Name} is not supported by this store."); - } - } - - if (keyProperty == null) - { - throw new ArgumentException("Record definition must have a key property."); - } - - var keyName = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; + var keyName = model.KeyProperty.StorageName; StringBuilder createTableCommand = new(); createTableCommand.AppendLine($"CREATE TABLE {(ifNotExists ? "IF NOT EXISTS " : "")}{schema}.\"{tableName}\" ("); // Add the key column - var keyPgTypeInfo = PostgresVectorStoreRecordPropertyMapping.GetPostgresTypeName(keyProperty.PropertyType); + var keyPgTypeInfo = PostgresVectorStoreRecordPropertyMapping.GetPostgresTypeName(model.KeyProperty.Type); createTableCommand.AppendLine($" \"{keyName}\" {keyPgTypeInfo.PgType} {(keyPgTypeInfo.IsNullable ? "" : "NOT NULL")},"); // Add the data columns - foreach (var dataProperty in dataProperties) + foreach (var dataProperty in model.DataProperties) { - string columnName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - var dataPgTypeInfo = PostgresVectorStoreRecordPropertyMapping.GetPostgresTypeName(dataProperty.PropertyType); + string columnName = dataProperty.StorageName; + var dataPgTypeInfo = PostgresVectorStoreRecordPropertyMapping.GetPostgresTypeName(dataProperty.Type); createTableCommand.AppendLine($" \"{columnName}\" {dataPgTypeInfo.PgType} {(dataPgTypeInfo.IsNullable ? "" : "NOT NULL")},"); } // Add the vector columns - foreach (var vectorProperty in vectorProperties) + foreach (var vectorProperty in model.VectorProperties) { - string columnName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + string columnName = vectorProperty.StorageName; var vectorPgTypeInfo = PostgresVectorStoreRecordPropertyMapping.GetPgVectorTypeName(vectorProperty); createTableCommand.AppendLine($" \"{columnName}\" {vectorPgTypeInfo.PgType} {(vectorPgTypeInfo.IsNullable ? "" : "NOT NULL")},"); } @@ -239,62 +205,38 @@ ON CONFLICT ("{keyColumn}") } /// - public PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, IReadOnlyList properties, TKey key, bool includeVectors = false) + public PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, VectorStoreRecordModel model, TKey key, bool includeVectors = false) where TKey : notnull { List queryColumns = new(); - string? keyColumn = null; - foreach (var property in properties) + foreach (var property in model.Properties) { - if (property is VectorStoreRecordKeyProperty keyProperty) - { - if (keyColumn != null) - { - throw new ArgumentException("Record definition cannot have more than one key property."); - } - keyColumn = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; - queryColumns.Add($"\"{keyColumn}\""); - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - string columnName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - queryColumns.Add($"\"{columnName}\""); - } - else if (property is VectorStoreRecordVectorProperty vectorProperty && includeVectors) - { - string columnName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; - queryColumns.Add($"\"{columnName}\""); - } + queryColumns.Add($"\"{property.StorageName}\""); } - Verify.NotNull(keyColumn, "Record definition must have a key property."); - var queryColumnList = string.Join(", ", queryColumns); return new PostgresSqlCommandInfo( commandText: $""" SELECT {queryColumnList} FROM {schema}."{tableName}" -WHERE "{keyColumn}" = ${1}; +WHERE "{model.KeyProperty.StorageName}" = ${1}; """, parameters: [new NpgsqlParameter() { Value = key }] ); } /// - public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, IReadOnlyList properties, List keys, bool includeVectors = false) + public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, VectorStoreRecordModel model, List keys, bool includeVectors = false) where TKey : notnull { NpgsqlDbType? keyType = PostgresVectorStoreRecordPropertyMapping.GetNpgsqlDbType(typeof(TKey)) ?? throw new ArgumentException($"Unsupported key type {typeof(TKey).Name}"); - var keyProperty = properties.OfType().FirstOrDefault() ?? throw new ArgumentException("Properties must contain a key property", nameof(properties)); - var keyColumn = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; - // Generate the column names - var columns = properties - .Where(p => includeVectors || p is not VectorStoreRecordVectorProperty) - .Select(p => p.StoragePropertyName ?? p.DataModelPropertyName) + var columns = model.Properties + .Where(p => includeVectors || p is not VectorStoreRecordVectorPropertyModel) + .Select(p => p.StorageName) .ToList(); var columnNames = string.Join(", ", columns.Select(c => $"\"{c}\"")); @@ -304,7 +246,7 @@ public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string t var commandText = $""" SELECT {columnNames} FROM {schema}."{tableName}" -WHERE "{keyColumn}" = ANY($1); +WHERE "{model.KeyProperty.StorageName}" = ANY($1); """; return new PostgresSqlCommandInfo(commandText) @@ -352,14 +294,10 @@ DELETE FROM {schema}."{tableName}" #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// public PostgresSqlCommandInfo BuildGetNearestMatchCommand( - string schema, string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, + string schema, string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit) { - var columns = string.Join(" ,", - propertyReader.RecordDefinition.Properties - .Select(property => property.StoragePropertyName ?? property.DataModelPropertyName) - .Select(column => $"\"{column}\"") - ); + var columns = string.Join(" ,", model.Properties.Select(property => $"\"{property.StorageName}\"")); var distanceFunction = vectorProperty.DistanceFunction ?? PostgresConstants.DefaultDistanceFunction; var distanceOp = distanceFunction switch @@ -373,15 +311,15 @@ public PostgresSqlCommandInfo BuildGetNearestMatchCommand( _ => throw new NotSupportedException($"Distance function {vectorProperty.DistanceFunction} is not supported.") }; - var vectorColumn = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + var vectorColumn = vectorProperty.StorageName; // Start where clause params at 2, vector takes param 1. #pragma warning disable CS0618 // VectorSearchFilter is obsolete var (where, parameters) = (oldFilter: legacyFilter, newFilter) switch { (not null, not null) => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, propertyReader.RecordDefinition.Properties, legacyFilter, startParamIndex: 2), - (null, not null) => GenerateNewFilterWhereClause(propertyReader, newFilter), + (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, model, legacyFilter, startParamIndex: 2), + (null, not null) => GenerateNewFilterWhereClause(model, newFilter), _ => (Clause: string.Empty, Parameters: []) }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete @@ -423,15 +361,15 @@ ORDER BY {PostgresConstants.DistanceColumnName} }; } - internal static (string Clause, List Parameters) GenerateNewFilterWhereClause(VectorStoreRecordPropertyReader propertyReader, LambdaExpression newFilter) + internal static (string Clause, List Parameters) GenerateNewFilterWhereClause(VectorStoreRecordModel model, LambdaExpression newFilter) { - PostgresFilterTranslator translator = new(propertyReader.StoragePropertyNamesMap, newFilter, startParamIndex: 2); + PostgresFilterTranslator translator = new(model, newFilter, startParamIndex: 2); translator.Translate(appendWhere: true); return (translator.Clause.ToString(), translator.ParameterValues); } #pragma warning disable CS0618 // VectorSearchFilter is obsolete - internal static (string Clause, List Parameters) GenerateLegacyFilterWhereClause(string schema, string tableName, IReadOnlyList properties, VectorSearchFilter legacyFilter, int startParamIndex) + internal static (string Clause, List Parameters) GenerateLegacyFilterWhereClause(string schema, string tableName, VectorStoreRecordModel model, VectorSearchFilter legacyFilter, int startParamIndex) { var whereClause = new StringBuilder("WHERE "); var filterClauses = new List(); @@ -443,26 +381,24 @@ internal static (string Clause, List Parameters) GenerateLegacyFilterWhe { if (filterClause is EqualToFilterClause equalTo) { - var property = properties.FirstOrDefault(p => p.DataModelPropertyName == equalTo.FieldName); + var property = model.Properties.FirstOrDefault(p => p.ModelName == equalTo.FieldName); if (property == null) { throw new ArgumentException($"Property {equalTo.FieldName} not found in record definition."); } - var columnName = property.StoragePropertyName ?? property.DataModelPropertyName; - filterClauses.Add($"\"{columnName}\" = ${paramIndex}"); + filterClauses.Add($"\"{property.StorageName}\" = ${paramIndex}"); parameters.Add(equalTo.Value); paramIndex++; } else if (filterClause is AnyTagEqualToFilterClause anyTagEqualTo) { - var property = properties.FirstOrDefault(p => p.DataModelPropertyName == anyTagEqualTo.FieldName); + var property = model.Properties.FirstOrDefault(p => p.ModelName == anyTagEqualTo.FieldName); if (property == null) { throw new ArgumentException($"Property {anyTagEqualTo.FieldName} not found in record definition."); } - if (property.PropertyType != typeof(List)) + if (property.Type != typeof(List)) { throw new ArgumentException($"Property {anyTagEqualTo.FieldName} must be of type List to use AnyTagEqualTo filter."); } - var columnName = property.StoragePropertyName ?? property.DataModelPropertyName; - filterClauses.Add($"\"{columnName}\" @> ARRAY[${paramIndex}::TEXT]"); + filterClauses.Add($"\"{property.StorageName}\" @> ARRAY[${paramIndex}::TEXT]"); parameters.Add(anyTagEqualTo.Value); paramIndex++; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index 6efc7aa8f037..fc097e848881 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -8,6 +8,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Npgsql; using Pgvector; @@ -67,15 +68,14 @@ public async IAsyncEnumerable GetTablesAsync([EnumeratorCancellation] Ca } /// - public async Task CreateTableAsync(string tableName, IReadOnlyList properties, bool ifNotExists = true, CancellationToken cancellationToken = default) + public async Task CreateTableAsync(string tableName, VectorStoreRecordModel model, bool ifNotExists = true, CancellationToken cancellationToken = default) { // Prepare the SQL commands. - var commandInfo = this._sqlBuilder.BuildCreateTableCommand(this._schema, tableName, properties, ifNotExists); + var commandInfo = this._sqlBuilder.BuildCreateTableCommand(this._schema, tableName, model, ifNotExists); var createIndexCommands = - PostgresVectorStoreRecordPropertyMapping.GetIndexInfo(properties) + PostgresVectorStoreRecordPropertyMapping.GetIndexInfo(model.Properties) .Select(index => - this._sqlBuilder.BuildCreateIndexCommand(this._schema, tableName, index.column, index.kind, index.function, index.isVector, ifNotExists) - ); + this._sqlBuilder.BuildCreateIndexCommand(this._schema, tableName, index.column, index.kind, index.function, index.isVector, ifNotExists)); // Execute the commands in a transaction. NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); @@ -130,18 +130,18 @@ public async Task UpsertBatchAsync(string tableName, IEnumerable - public async Task?> GetAsync(string tableName, TKey key, IReadOnlyList properties, bool includeVectors = false, CancellationToken cancellationToken = default) where TKey : notnull + public async Task?> GetAsync(string tableName, TKey key, VectorStoreRecordModel model, bool includeVectors = false, CancellationToken cancellationToken = default) where TKey : notnull { NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetCommand(this._schema, tableName, properties, key, includeVectors); + var commandInfo = this._sqlBuilder.BuildGetCommand(this._schema, tableName, model, key, includeVectors); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); if (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - return this.GetRecord(dataReader, properties, includeVectors); + return this.GetRecord(dataReader, model, includeVectors); } return null; @@ -149,7 +149,7 @@ public async Task UpsertBatchAsync(string tableName, IEnumerable - public async IAsyncEnumerable> GetBatchAsync(string tableName, IEnumerable keys, IReadOnlyList properties, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable> GetBatchAsync(string tableName, IEnumerable keys, VectorStoreRecordModel model, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) where TKey : notnull { Verify.NotNull(keys); @@ -164,12 +164,12 @@ public async Task UpsertBatchAsync(string tableName, IEnumerable(string tableName, string keyColumn, TKey key /// #pragma warning disable CS0618 // VectorSearchFilter is obsolete public async IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync( - string tableName, VectorStoreRecordPropertyReader propertyReader, VectorStoreRecordVectorProperty vectorProperty, Vector vectorValue, int limit, + string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, int limit, VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) #pragma warning restore CS0618 // VectorSearchFilter is obsolete { @@ -192,13 +192,13 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, propertyReader, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); + var commandInfo = this._sqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, model, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { var distance = dataReader.GetDouble(dataReader.GetOrdinal(PostgresConstants.DistanceColumnName)); - yield return (Row: this.GetRecord(dataReader, propertyReader.RecordDefinition.Properties, includeVectors), Distance: distance); + yield return (Row: this.GetRecord(dataReader, model, includeVectors), Distance: distance); } } } @@ -237,17 +237,17 @@ internal void SetSqlBuilder(IPostgresVectorStoreCollectionSqlBuilder sqlBuilder) private Dictionary GetRecord( NpgsqlDataReader reader, - IEnumerable properties, + VectorStoreRecordModel model, bool includeVectors = false ) { var storageModel = new Dictionary(); - foreach (var property in properties) + foreach (var property in model.Properties) { - var isEmbedding = property is VectorStoreRecordVectorProperty; - var propertyName = property.StoragePropertyName ?? property.DataModelPropertyName; - var propertyType = property.PropertyType; + var isEmbedding = property is VectorStoreRecordVectorPropertyModel; + var propertyName = property.StorageName; + var propertyType = property.Type; var propertyValue = !isEmbedding || includeVectors ? PostgresVectorStoreRecordPropertyMapping.GetPropertyValue(reader, propertyName, propertyType) : null; storageModel.Add(propertyName, propertyValue); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 6d731a05153d..eb7eeb43d20a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -7,6 +7,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Npgsql; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -30,8 +31,8 @@ public class PostgresVectorStoreRecordCollection : IVectorStoreRe // Optional configuration options for this class. private readonly PostgresVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between the data model and the Azure AI Search record. private readonly IVectorStoreRecordMapper> _mapper; @@ -64,44 +65,16 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client // Verify. Verify.NotNull(client); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.DictionaryCustomMapper is not null, PostgresConstants.SupportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._client = client; this.CollectionName = collectionName; this._options = options ?? new PostgresVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(PostgresConstants.SupportedKeyTypes); - this._propertyReader.VerifyDataProperties(PostgresConstants.SupportedDataTypes, PostgresConstants.SupportedEnumerableDataElementTypes); - this._propertyReader.VerifyVectorProperties(PostgresConstants.SupportedVectorTypes); - - // Resolve mapper. - // First, if someone has provided a custom mapper, use that. - // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. - // Otherwise, use our own default mapper implementation for all other data models. - if (this._options.DictionaryCustomMapper is not null) - { - this._mapper = this._options.DictionaryCustomMapper; - } - else if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - this._mapper = (new PostgresGenericDataModelMapper(this._propertyReader) as IVectorStoreRecordMapper>)!; - } - else - { - this._mapper = new PostgresVectorStoreRecordMapper(this._propertyReader); - } + + this._model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions) + .Build(typeof(TRecord), options?.VectorStoreRecordDefinition); + + this._mapper = this._options.DictionaryCustomMapper ?? new PostgresVectorStoreRecordMapper(this._model); } /// @@ -153,13 +126,13 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancella Verify.NotNull(storageModel); - var keyObj = storageModel[this._propertyReader.KeyPropertyStoragePropertyName]; + var keyObj = storageModel[this._model.KeyProperty.StorageName]; Verify.NotNull(keyObj); TKey key = (TKey)keyObj!; return this.RunOperationAsync(OperationName, async () => { - await this._client.UpsertAsync(this.CollectionName, storageModel, this._propertyReader.KeyPropertyStoragePropertyName, cancellationToken).ConfigureAwait(false); + await this._client.UpsertAsync(this.CollectionName, storageModel, this._model.KeyProperty.StorageName, cancellationToken).ConfigureAwait(false); return key; } ); @@ -183,10 +156,10 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec yield break; } - var keys = storageModels.Select(model => model[this._propertyReader.KeyPropertyStoragePropertyName]!).ToList(); + var keys = storageModels.Select(model => model[this._model.KeyProperty.StorageName]!).ToList(); await this.RunOperationAsync(OperationName, () => - this._client.UpsertBatchAsync(this.CollectionName, storageModels, this._propertyReader.KeyPropertyStoragePropertyName, cancellationToken) + this._client.UpsertBatchAsync(this.CollectionName, storageModels, this._model.KeyProperty.StorageName, cancellationToken) ).ConfigureAwait(false); foreach (var key in keys) { yield return (TKey)key!; } @@ -203,7 +176,7 @@ await this.RunOperationAsync(OperationName, () => return this.RunOperationAsync(OperationName, async () => { - var row = await this._client.GetAsync(this.CollectionName, key, this._propertyReader.RecordDefinition.Properties, includeVectors, cancellationToken).ConfigureAwait(false); + var row = await this._client.GetAsync(this.CollectionName, key, this._model, includeVectors, cancellationToken).ConfigureAwait(false); if (row is null) { return default; } return VectorStoreErrorHandler.RunModelConversion( @@ -224,7 +197,7 @@ public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRec bool includeVectors = options?.IncludeVectors is true; return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( - this._client.GetBatchAsync(this.CollectionName, keys, this._propertyReader.RecordDefinition.Properties, includeVectors, cancellationToken) + this._client.GetBatchAsync(this.CollectionName, keys, this._model, includeVectors, cancellationToken) .SelectAsync(row => VectorStoreErrorHandler.RunModelConversion( PostgresConstants.DatabaseName, @@ -243,7 +216,7 @@ public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = { const string OperationName = "Delete"; return this.RunOperationAsync(OperationName, () => - this._client.DeleteAsync(this.CollectionName, this._propertyReader.KeyPropertyStoragePropertyName, key, cancellationToken) + this._client.DeleteAsync(this.CollectionName, this._model.KeyProperty.StorageName, key, cancellationToken) ); } @@ -254,7 +227,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel const string OperationName = "DeleteBatch"; return this.RunOperationAsync(OperationName, () => - this._client.DeleteBatchAsync(this.CollectionName, this._propertyReader.KeyPropertyStoragePropertyName, keys, cancellationToken) + this._client.DeleteBatchAsync(this.CollectionName, this._model.KeyProperty.StorageName, keys, cancellationToken) ); } @@ -275,7 +248,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); var pgVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); @@ -289,7 +262,7 @@ public virtual Task> VectorizedSearchAsync { var results = this._client.GetNearestMatchesAsync( this.CollectionName, - this._propertyReader, + this._model, vectorProperty, pgVector, searchOptions.Top, @@ -319,7 +292,7 @@ public virtual Task> VectorizedSearchAsync private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken = default) { - return this._client.CreateTableAsync(this.CollectionName, this._propertyReader.RecordDefinition.Properties, ifNotExists, cancellationToken); + return this._client.CreateTableAsync(this.CollectionName, this._model, ifNotExists, cancellationToken); } private async Task RunOperationAsync(string operationName, Func operation) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index e656678413cc..06b5c88b2ab0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -3,59 +3,41 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Postgres; +#pragma warning disable SKEXP0020 + /// /// A mapper class that handles the conversion between data models and storage models for Postgres vector store. /// /// The type of the data model record. -internal sealed class PostgresVectorStoreRecordMapper : IVectorStoreRecordMapper> +internal sealed class PostgresVectorStoreRecordMapper(VectorStoreRecordModel model) + : IVectorStoreRecordMapper> { - /// with helpers for reading vector store model properties and their attributes. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A that defines the schema of the data in the database. - public PostgresVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - this._propertyReader = propertyReader; - - this._propertyReader.VerifyHasParameterlessConstructor(); - - // Validate property types. - this._propertyReader.VerifyDataProperties(PostgresConstants.SupportedDataTypes, PostgresConstants.SupportedEnumerableDataElementTypes); - this._propertyReader.VerifyVectorProperties(PostgresConstants.SupportedVectorTypes); - } - public Dictionary MapFromDataToStorageModel(TRecord dataModel) { + var keyProperty = model.KeyProperty; + var properties = new Dictionary { - // Add key property - { this._propertyReader.KeyPropertyStoragePropertyName, this._propertyReader.KeyPropertyInfo.GetValue(dataModel) } + { keyProperty.StorageName, keyProperty.GetValueAsObject(dataModel!) } }; - // Add data properties - foreach (var property in this._propertyReader.DataPropertiesInfo) + foreach (var property in model.DataProperties) { - properties.Add( - this._propertyReader.GetStoragePropertyName(property.Name), - property.GetValue(dataModel) - ); + properties.Add(property.StorageName, property.GetValueAsObject(dataModel!)); } - // Add vector properties - foreach (var property in this._propertyReader.VectorPropertiesInfo) + foreach (var property in model.VectorProperties) { - var propertyValue = property.GetValue(dataModel); - var result = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(propertyValue); + var propertyValue = property.GetValueAsObject(dataModel!); - properties.Add(this._propertyReader.GetStoragePropertyName(property.Name), result); + properties.Add( + property.StorageName, + PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel( + property.GetValueAsObject(dataModel!))); } return properties; @@ -63,36 +45,38 @@ public PostgresVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyR public TRecord MapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) { - var record = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); - - // Set key. - var keyPropertyValue = Convert.ChangeType( - storageModel[this._propertyReader.KeyPropertyStoragePropertyName], - this._propertyReader.KeyProperty.PropertyType); - - this._propertyReader.KeyPropertyInfo.SetValue(record, keyPropertyValue); + var record = model.CreateRecord()!; - // Process data properties. - var dataPropertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - this._propertyReader.DataPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel); + var keyProperty = model.KeyProperty; + var keyPropertyValue = Convert.ChangeType(storageModel[keyProperty.StorageName], keyProperty.Type); + keyProperty.SetValueAsObject(record, keyPropertyValue); - VectorStoreRecordMapping.SetPropertiesOnRecord(record, dataPropertiesInfoWithValues); + foreach (var dataProperty in model.DataProperties) + { + dataProperty.SetValueAsObject(record, storageModel[dataProperty.StorageName]); + } if (options.IncludeVectors) { - // Process vector properties. - var vectorPropertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - this._propertyReader.VectorPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel, - (object? vector, Type type) => + foreach (var vectorProperty in model.VectorProperties) + { + switch (storageModel[vectorProperty.StorageName]) { - return PostgresVectorStoreRecordPropertyMapping.MapVectorForDataModel(vector); - }); + case Pgvector.Vector pgVector: + vectorProperty.SetValueAsObject(record, pgVector.Memory); + continue; + + // TODO: Implement support for Half, binary, sparse embeddings (#11083) + + // TODO: We currently allow round-tripping null for the vector property; this is not supported for most (?) dedicated databases; think about it. + case null: + vectorProperty.SetValueAsObject(record, null); + continue; - VectorStoreRecordMapping.SetPropertiesOnRecord(record, vectorPropertiesInfoWithValues); + case var value: + throw new InvalidOperationException($"Embedding vector read back from PostgreSQL is of type '{value.GetType().Name}' instead of the expected Pgvector.Vector type for property '{vectorProperty.ModelName}'."); + } + } } return record; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index d0a76147bf4d..06bd10bf4773 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -3,9 +3,11 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Npgsql; using NpgsqlTypes; using Pgvector; @@ -14,50 +16,20 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; internal static class PostgresVectorStoreRecordPropertyMapping { - internal static float[] GetOrCreateArray(ReadOnlyMemory memory) => - MemoryMarshal.TryGetArray(memory, out ArraySegment array) && - array.Count == array.Array!.Length ? - array.Array : - memory.ToArray(); - - public static Vector? MapVectorForStorageModel(TVector vector) - { - if (vector == null) + public static Vector? MapVectorForStorageModel(object? vector) + => vector switch { - return null; - } - - if (vector is ReadOnlyMemory floatMemory) - { - var vecArray = MemoryMarshal.TryGetArray(floatMemory, out ArraySegment array) && - array.Count == array.Array!.Length ? - array.Array : - floatMemory.ToArray(); - return new Vector(vecArray); - } + ReadOnlyMemory floatMemory + => new Pgvector.Vector( + MemoryMarshal.TryGetArray(floatMemory, out ArraySegment segment) && + segment.Count == segment.Array!.Length ? segment.Array : floatMemory.ToArray()), - throw new NotSupportedException($"Mapping for type {typeof(TVector).FullName} to a vector is not supported."); - } - - public static ReadOnlyMemory? MapVectorForDataModel(object? vector) - { - var pgVector = vector is Vector pgv ? pgv : null; - if (pgVector == null) { return null; } - var vecArray = pgVector.ToArray(); - return vecArray != null && vecArray.Length != 0 ? (ReadOnlyMemory)vecArray : null; - } - - public static TPropertyType? GetPropertyValue(NpgsqlDataReader reader, string propertyName) - { - int propertyIndex = reader.GetOrdinal(propertyName); + // TODO: Implement support for Half, binary, sparse embeddings (#11083) - if (reader.IsDBNull(propertyIndex)) - { - return default; - } + null => null, - return reader.GetFieldValue(propertyIndex); - } + var value => throw new NotSupportedException($"Mapping for type '{value.GetType().Name}' to a vector is not supported.") + }; public static object? GetPropertyValue(NpgsqlDataReader reader, string propertyName, Type propertyType) { @@ -164,14 +136,14 @@ public static (string PgType, bool IsNullable) GetPostgresTypeName(Type property /// /// The vector property. /// The PostgreSQL vector type name. - public static (string PgType, bool IsNullable) GetPgVectorTypeName(VectorStoreRecordVectorProperty vectorProperty) + public static (string PgType, bool IsNullable) GetPgVectorTypeName(VectorStoreRecordVectorPropertyModel vectorProperty) { if (vectorProperty.Dimensions <= 0) { throw new ArgumentException("Vector property must have a positive number of dimensions."); } - return ($"VECTOR({vectorProperty.Dimensions})", Nullable.GetUnderlyingType(vectorProperty.PropertyType) != null); + return ($"VECTOR({vectorProperty.Dimensions})", Nullable.GetUnderlyingType(vectorProperty.Type) != null); } public static NpgsqlParameter GetNpgsqlParameter(object? value) @@ -206,40 +178,48 @@ public static NpgsqlParameter GetNpgsqlParameter(object? value) /// /// The default index kind is "Flat", which prevents the creation of an index. /// - public static List<(string column, string kind, string function, bool isVector)> GetIndexInfo(IReadOnlyList properties) + public static List<(string column, string kind, string function, bool isVector)> GetIndexInfo(IReadOnlyList properties) { var vectorIndexesToCreate = new List<(string column, string kind, string function, bool isVector)>(); foreach (var property in properties) { - var columnName = property.StoragePropertyName ?? property.DataModelPropertyName; - - if (property is VectorStoreRecordVectorProperty vectorProperty) + switch (property) { - var indexKind = vectorProperty.IndexKind ?? PostgresConstants.DefaultIndexKind; - var distanceFunction = vectorProperty.DistanceFunction ?? PostgresConstants.DefaultDistanceFunction; - - // Index kind of "Flat" to prevent the creation of an index. This is the default behavior. - // Otherwise, the index will be created with the specified index kind and distance function, if supported. - if (indexKind != IndexKind.Flat) - { - // Ensure the dimensionality of the vector is supported for indexing. - if (PostgresConstants.IndexMaxDimensions.TryGetValue(indexKind, out int maxDimensions) && vectorProperty.Dimensions > maxDimensions) + case VectorStoreRecordVectorPropertyModel vectorProperty: + var indexKind = vectorProperty.IndexKind ?? PostgresConstants.DefaultIndexKind; + var distanceFunction = vectorProperty.DistanceFunction ?? PostgresConstants.DefaultDistanceFunction; + + // Index kind of "Flat" to prevent the creation of an index. This is the default behavior. + // Otherwise, the index will be created with the specified index kind and distance function, if supported. + if (indexKind != IndexKind.Flat) { - throw new NotSupportedException( - $"The provided vector property {vectorProperty.DataModelPropertyName} has {vectorProperty.Dimensions} dimensions, " + - $"which is not supported by the {indexKind} index. The maximum number of dimensions supported by the {indexKind} index " + - $"is {maxDimensions}. Please reduce the number of dimensions or use a different index." - ); + // Ensure the dimensionality of the vector is supported for indexing. + if (PostgresConstants.IndexMaxDimensions.TryGetValue(indexKind, out int maxDimensions) && vectorProperty.Dimensions > maxDimensions) + { + throw new NotSupportedException( + $"The provided vector property {vectorProperty.ModelName} has {vectorProperty.Dimensions} dimensions, " + + $"which is not supported by the {indexKind} index. The maximum number of dimensions supported by the {indexKind} index " + + $"is {maxDimensions}. Please reduce the number of dimensions or use a different index." + ); + } + + vectorIndexesToCreate.Add((vectorProperty.StorageName, indexKind, distanceFunction, isVector: true)); } - vectorIndexesToCreate.Add((columnName, indexKind, distanceFunction, isVector: true)); - } - } - else if (property is VectorStoreRecordDataProperty { IsFilterable: true }) - { - vectorIndexesToCreate.Add((columnName, "", "", isVector: false)); + break; + + case VectorStoreRecordDataPropertyModel dataProperty: + if (dataProperty.IsFilterable) + { + vectorIndexesToCreate.Add((dataProperty.StorageName, "", "", isVector: false)); + } + break; + + default: + throw new UnreachableException(); } } + return vectorIndexesToCreate; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs index ffd0333f0867..ce18426cfe57 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs @@ -10,6 +10,7 @@ using System.Reflection; using System.Runtime.CompilerServices; using Google.Protobuf.Collections; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; using Range = Qdrant.Client.Grpc.Range; @@ -17,12 +18,12 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal class QdrantFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; - internal Filter Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal Filter Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -351,11 +352,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs deleted file mode 100644 index 5cce141d0223..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.VectorData; -using Qdrant.Client.Grpc; - -namespace Microsoft.SemanticKernel.Connectors.Qdrant; - -/// -/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Qdrant. -/// -internal class QdrantGenericDataModelMapper : IVectorStoreRecordMapper, PointStruct>, IVectorStoreRecordMapper, PointStruct> -{ - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. - private readonly bool _hasNamedVectors; - - /// - /// Initializes a new instance of the class. - /// - /// A helper to access property information for the current data model and record definition. - /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. - public QdrantGenericDataModelMapper( - VectorStoreRecordPropertyReader propertyReader, - bool hasNamedVectors) - { - Verify.NotNull(propertyReader); - - // Validate property types. - propertyReader.VerifyDataProperties(QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, supportEnumerable: true); - propertyReader.VerifyVectorProperties(QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes); - - // Assign. - this._propertyReader = propertyReader; - this._hasNamedVectors = hasNamedVectors; - } - - /// - public PointStruct MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - // Create point. - var pointStruct = new PointStruct - { - Id = new PointId { Num = dataModel.Key }, - Vectors = new Vectors(), - Payload = { }, - }; - - // Loop through all properties and map each from the data model to the storage model. - MapProperties( - this._propertyReader.Properties, - dataModel.Data, - dataModel.Vectors, - pointStruct, - this._hasNamedVectors); - - return pointStruct; - } - - /// - public VectorStoreGenericDataModel MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) - { - var dataModel = new VectorStoreGenericDataModel(storageModel.Id.Num); - MapProperties(this._propertyReader.Properties, storageModel, dataModel.Data, dataModel.Vectors, this._hasNamedVectors); - return dataModel; - } - - /// - PointStruct IVectorStoreRecordMapper, PointStruct>.MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - // Create point. - var pointStruct = new PointStruct - { - Id = new PointId { Uuid = dataModel.Key.ToString("D") }, - Vectors = new Vectors(), - Payload = { }, - }; - - // Loop through all properties and map each from the data model to the storage model. - MapProperties( - this._propertyReader.Properties, - dataModel.Data, - dataModel.Vectors, - pointStruct, - this._hasNamedVectors); - - return pointStruct; - } - - /// - VectorStoreGenericDataModel IVectorStoreRecordMapper, PointStruct>.MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) - { - var dataModel = new VectorStoreGenericDataModel(new Guid(storageModel.Id.Uuid)); - MapProperties(this._propertyReader.Properties, storageModel, dataModel.Data, dataModel.Vectors, this._hasNamedVectors); - return dataModel; - } - - /// - /// Map the payload and vector properties from the data model to the qdrant storage model. - /// - /// The list of properties to map. - /// The payload properties on the data model. - /// The vector properties on the data model. - /// The storage model to map to. - /// A value indicating whether qdrant is using named vectors for this collection. - /// Thrown if a vector on the data model is not a supported type. - private static void MapProperties(IEnumerable properties, Dictionary dataProperties, Dictionary vectorProperties, PointStruct pointStruct, bool hasNamedVectors) - { - if (hasNamedVectors) - { - pointStruct.Vectors.Vectors_ = new NamedVectors(); - } - - foreach (var property in properties) - { - if (property is VectorStoreRecordDataProperty dataProperty) - { - var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - - // Just skip this property if it's not in the data model. - if (!dataProperties.TryGetValue(dataProperty.DataModelPropertyName, out var propertyValue)) - { - continue; - } - - // Map. - pointStruct.Payload.Add(storagePropertyName, QdrantVectorStoreRecordFieldMapping.ConvertToGrpcFieldValue(propertyValue)); - } - else if (property is VectorStoreRecordVectorProperty vectorProperty) - { - var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; - - // Just skip this property if it's not in the data model. - if (!vectorProperties.TryGetValue(vectorProperty.DataModelPropertyName, out var vector)) - { - continue; - } - - // Validate. - if (vector is not ReadOnlyMemory floatROM) - { - throw new VectorStoreRecordMappingException($"Vector property '{vectorProperty.DataModelPropertyName}' on provided record of type {nameof(VectorStoreGenericDataModel)} must be of type ReadOnlyMemory and not null."); - } - - // Map. - if (hasNamedVectors) - { - pointStruct.Vectors.Vectors_.Vectors.Add(storagePropertyName, floatROM.ToArray()); - } - else - { - pointStruct.Vectors.Vector = floatROM.ToArray(); - } - } - } - } - - /// - /// Map the payload and vector properties from the qdrant storage model to the data model. - /// - /// The list of properties to map. - /// The storage model to map from. - /// The payload properties on the data model. - /// The vector properties on the data model. - /// A value indicating whether qdrant is using named vectors for this collection. - public static void MapProperties(IEnumerable properties, PointStruct storageModel, Dictionary dataProperties, Dictionary vectorProperties, bool hasNamedVectors) - { - foreach (var property in properties) - { - if (property is VectorStoreRecordDataProperty dataProperty) - { - var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; - - // Just skip this property if it's not in the storage model. - if (!storageModel.Payload.TryGetValue(storagePropertyName, out var propertyValue)) - { - continue; - } - - if (propertyValue.HasNullValue) - { - // Shortcut any null handling here so we don't have to check for it for each case. - dataProperties[dataProperty.DataModelPropertyName] = null; - } - else - { - var convertedValue = QdrantVectorStoreRecordFieldMapping.ConvertFromGrpcFieldValueToNativeType(propertyValue, dataProperty.PropertyType); - dataProperties[dataProperty.DataModelPropertyName] = convertedValue; - } - } - else if (property is VectorStoreRecordVectorProperty vectorProperty) - { - Vector? vector; - if (hasNamedVectors) - { - var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; - - // Just skip this property if it's not in the storage model. - if (!storageModel.Vectors.Vectors_.Vectors.TryGetValue(storagePropertyName, out vector)) - { - continue; - } - } - else - { - vector = storageModel.Vectors.Vector; - } - - vectorProperties[vectorProperty.DataModelPropertyName] = new ReadOnlyMemory(vector.Data.ToArray()); - } - } - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs index ece189e61d75..1a193b0715a7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -55,16 +56,16 @@ internal static class QdrantVectorStoreCollectionCreateMapping /// The property to map. /// The mapped . /// Thrown if the property is missing information or has unsupported options specified. - public static VectorParams MapSingleVector(VectorStoreRecordVectorProperty vectorProperty) + public static VectorParams MapSingleVector(VectorStoreRecordVectorPropertyModel vectorProperty) { if (vectorProperty!.Dimensions is not > 0) { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); + throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); } if (vectorProperty!.IndexKind is not null && vectorProperty!.IndexKind != IndexKind.Hnsw) { - throw new InvalidOperationException($"Index kind '{vectorProperty!.IndexKind}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Qdrant VectorStore."); + throw new InvalidOperationException($"Index kind '{vectorProperty!.IndexKind}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Qdrant VectorStore."); } return new VectorParams { Size = (ulong)vectorProperty.Dimensions, Distance = QdrantVectorStoreCollectionCreateMapping.GetSDKDistanceAlgorithm(vectorProperty) }; @@ -74,21 +75,16 @@ public static VectorParams MapSingleVector(VectorStoreRecordVectorProperty vecto /// Maps a collection of to a qdrant . /// /// The properties to map. - /// The mapping of property names to storage names. /// THe mapped . /// Thrown if the property is missing information or has unsupported options specified. - public static VectorParamsMap MapNamedVectors(IEnumerable vectorProperties, IReadOnlyDictionary storagePropertyNames) + public static VectorParamsMap MapNamedVectors(IEnumerable vectorProperties) { var vectorParamsMap = new VectorParamsMap(); foreach (var vectorProperty in vectorProperties) { - var storageName = storagePropertyNames[vectorProperty.DataModelPropertyName]; - // Add each vector property to the vectors map. - vectorParamsMap.Map.Add( - storageName, - MapSingleVector(vectorProperty)); + vectorParamsMap.Map.Add(vectorProperty.StorageName, MapSingleVector(vectorProperty)); } return vectorParamsMap; @@ -101,20 +97,14 @@ public static VectorParamsMap MapNamedVectors(IEnumerableThe vector property definition. /// The chosen . /// Thrown if a distance function is chosen that isn't supported by qdrant. - public static Distance GetSDKDistanceAlgorithm(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.DistanceFunction is null) + public static Distance GetSDKDistanceAlgorithm(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.DistanceFunction switch { - return Distance.Cosine; - } - - return vectorProperty.DistanceFunction switch - { - DistanceFunction.CosineSimilarity => Distance.Cosine, + DistanceFunction.CosineSimilarity or null => Distance.Cosine, DistanceFunction.DotProductSimilarity => Distance.Dot, DistanceFunction.EuclideanDistance => Distance.Euclid, DistanceFunction.ManhattanDistance => Distance.Manhattan, - _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Qdrant VectorStore.") + + _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Qdrant VectorStore.") }; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index 15a95467672f..6991ce782e58 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -17,10 +18,10 @@ internal static class QdrantVectorStoreCollectionSearchMapping /// Build a Qdrant from the provided . /// /// The to build a Qdrant from. - /// A mapping of data model property names to the names under which they are stored. + /// The model. /// The Qdrant . /// Thrown when the provided filter contains unsupported types, values or unknown properties. - public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) + public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchFilter, VectorStoreRecordModel model) { var filter = new Filter(); @@ -47,7 +48,7 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF } // Get the storage name for the field. - if (!storagePropertyNames.TryGetValue(fieldName, out var storagePropertyName)) + if (!model.PropertyMap.TryGetValue(fieldName, out var property)) { throw new InvalidOperationException($"Property name '{fieldName}' provided as part of the filter clause is not a valid property name."); } @@ -65,7 +66,7 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF Lte = Google.Protobuf.WellKnownTypes.Timestamp.FromDateTimeOffset(dateTimeOffset), }; - filter.Must.Add(new Condition() { Field = new FieldCondition() { Key = storagePropertyName, DatetimeRange = range } }); + filter.Must.Add(new Condition() { Field = new FieldCondition() { Key = property.StorageName, DatetimeRange = range } }); continue; } @@ -79,7 +80,7 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF _ => throw new InvalidOperationException($"Unsupported filter value type '{filterValue.GetType().Name}'.") }; - filter.Must.Add(new Condition() { Field = new FieldCondition() { Key = storagePropertyName, Match = match } }); + filter.Must.Add(new Condition() { Field = new FieldCondition() { Key = property.StorageName, Match = match } }); } return filter; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index d32713c4111c..e2f2d2bf37b8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -9,6 +9,7 @@ using System.Threading.Tasks; using Grpc.Core; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client; using Qdrant.Client.Grpc; @@ -25,13 +26,6 @@ public class QdrantVectorStoreRecordCollection : IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(ulong), - typeof(Guid) - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -56,8 +50,8 @@ public class QdrantVectorStoreRecordCollection : /// Optional configuration options for this class. private readonly QdrantVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between qdrant point and consumer models. private readonly IVectorStoreRecordMapper _mapper; @@ -88,46 +82,16 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st // Verify. Verify.NotNull(qdrantClient); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.PointStructCustomMapper is not null, s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._qdrantClient = qdrantClient; this._collectionName = collectionName; this._options = options ?? new QdrantVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = !this._options.HasNamedVectors, - SupportsMultipleKeys = false, - SupportsMultipleVectors = this._options.HasNamedVectors - }); - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(this._options.HasNamedVectors)) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - // Assign Mapper. - if (this._options.PointStructCustomMapper is not null) - { - // Custom Mapper. - this._mapper = this._options.PointStructCustomMapper; - } - else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel) || typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - // Generic data model mapper. - this._mapper = (IVectorStoreRecordMapper)new QdrantGenericDataModelMapper( - this._propertyReader, - this._options.HasNamedVectors); - } - else - { - // Default Mapper. - this._mapper = new QdrantVectorStoreRecordMapper( - this._propertyReader, - this._options.HasNamedVectors); - } + this._mapper = this._options.PointStructCustomMapper ?? new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); } /// @@ -147,7 +111,7 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo if (!this._options.HasNamedVectors) { // If we are not using named vectors, we can only have one vector property. We can assume we have exactly one, since this is already verified in the constructor. - var singleVectorProperty = this._propertyReader.VectorProperty; + var singleVectorProperty = this._model.VectorProperty; // Map the single vector property to the qdrant config. var vectorParams = QdrantVectorStoreCollectionCreateMapping.MapSingleVector(singleVectorProperty!); @@ -163,10 +127,10 @@ await this.RunOperationAsync( else { // Since we are using named vectors, iterate over all vector properties. - var vectorProperties = this._propertyReader.VectorProperties; + var vectorProperties = this._model.VectorProperties; // Map the named vectors to the qdrant config. - var vectorParamsMap = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties, this._propertyReader.StoragePropertyNamesMap); + var vectorParamsMap = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties); // Create the collection with named vectors. await this.RunOperationAsync( @@ -178,50 +142,48 @@ await this.RunOperationAsync( } // Add indexes for each of the data properties that require filtering. - var dataProperties = this._propertyReader.DataProperties.Where(x => x.IsFilterable); + var dataProperties = this._model.DataProperties.Where(x => x.IsFilterable); foreach (var dataProperty in dataProperties) { - var storageFieldName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); - - if (QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.TryGetValue(dataProperty.PropertyType!, out PayloadSchemaType schemaType)) + if (QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.TryGetValue(dataProperty.Type, out PayloadSchemaType schemaType)) { // Do nothing since schemaType is already set. } - else if (VectorStoreRecordPropertyVerification.IsSupportedEnumerableType(dataProperty.PropertyType) && VectorStoreRecordPropertyVerification.GetCollectionElementType(dataProperty.PropertyType) == typeof(string)) + else if (VectorStoreRecordPropertyVerification.IsSupportedEnumerableType(dataProperty.Type) && VectorStoreRecordPropertyVerification.GetCollectionElementType(dataProperty.Type) == typeof(string)) { // For enumerable of strings, use keyword schema type, since this allows tag filtering. schemaType = PayloadSchemaType.Keyword; } else { - throw new InvalidOperationException($"Property {nameof(VectorStoreRecordDataProperty.IsFilterable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.DataModelPropertyName}' is set to true, but the property type is not supported for filtering. The Qdrant VectorStore supports filtering on {string.Join(", ", QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.Keys.Select(x => x.Name))} properties only."); + // TODO: This should move to model validation + throw new InvalidOperationException($"Property {nameof(VectorStoreRecordDataProperty.IsFilterable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not supported for filtering. The Qdrant VectorStore supports filtering on {string.Join(", ", QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.Keys.Select(x => x.Name))} properties only."); } await this.RunOperationAsync( "CreatePayloadIndex", () => this._qdrantClient.CreatePayloadIndexAsync( this._collectionName, - storageFieldName, + dataProperty.StorageName, schemaType, cancellationToken: cancellationToken)).ConfigureAwait(false); } // Add indexes for each of the data properties that require full text search. - dataProperties = this._propertyReader.DataProperties.Where(x => x.IsFullTextSearchable); + dataProperties = this._model.DataProperties.Where(x => x.IsFullTextSearchable); foreach (var dataProperty in dataProperties) { - if (dataProperty.PropertyType != typeof(string)) + // TODO: This should move to model validation + if (dataProperty.Type != typeof(string)) { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.DataModelPropertyName}' is set to true, but the property type is not a string. The Qdrant VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Qdrant VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); } - var storageFieldName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); - await this.RunOperationAsync( "CreatePayloadIndex", () => this._qdrantClient.CreatePayloadIndexAsync( this._collectionName, - storageFieldName, + dataProperty.StorageName, PayloadSchemaType.Text, cancellationToken: cancellationToken)).ConfigureAwait(false); } @@ -485,26 +447,19 @@ public virtual async Task> VectorizedSearchAsync throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), - { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._model), + { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._model), _ => new Filter() }; #pragma warning restore CS0618 // Type or member is obsolete - // Specify the vector name if named vectors are used. - string? vectorName = null; - if (this._options.HasNamedVectors) - { - vectorName = this._propertyReader.GetStoragePropertyName(vectorProperty.DataModelPropertyName); - } - // Specify whether to include vectors in the search results. var vectorsSelector = new WithVectorsSelector(); vectorsSelector.Enable = internalOptions.IncludeVectors; @@ -520,7 +475,7 @@ public virtual async Task> VectorizedSearchAsync this._qdrantClient.QueryAsync( this.CollectionName, query: query, - usingVector: vectorName, + usingVector: this._options.HasNamedVectors ? vectorProperty.StorageName : null, filter: filter, limit: (ulong)internalOptions.Top, offset: (ulong)internalOptions.Skip, @@ -546,9 +501,8 @@ public async Task> HybridSearchAsync(TVect // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); - var textDataPropertyName = this._propertyReader.GetStoragePropertyName(textDataProperty.DataModelPropertyName); + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); // Build filter object. #pragma warning disable CS0618 // Type or member is obsolete @@ -556,19 +510,12 @@ public async Task> HybridSearchAsync(TVect var filter = internalOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._propertyReader.StoragePropertyNamesMap), - { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._propertyReader.StoragePropertyNamesMap), + { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._model), + { Filter: Expression> newFilter } => new QdrantFilterTranslator().Translate(newFilter, this._model), _ => new Filter() }; #pragma warning restore CS0618 // Type or member is obsolete - // Specify the vector name if named vectors are used. - string? vectorName = null; - if (this._options.HasNamedVectors) - { - vectorName = this._propertyReader.GetStoragePropertyName(vectorProperty.DataModelPropertyName); - } - // Specify whether to include vectors in the search results. var vectorsSelector = new WithVectorsSelector(); vectorsSelector.Enable = internalOptions.IncludeVectors; @@ -585,7 +532,7 @@ public async Task> HybridSearchAsync(TVect if (this._options.HasNamedVectors) { - vectorQuery.Using = vectorName; + vectorQuery.Using = this._options.HasNamedVectors ? vectorProperty.StorageName : null; } // Build the keyword query. @@ -593,7 +540,7 @@ public async Task> HybridSearchAsync(TVect var keywordSubFilter = new Filter(); foreach (string keyword in keywords) { - keywordSubFilter.Should.Add(new Condition() { Field = new FieldCondition() { Key = textDataPropertyName, Match = new Match { Text = keyword } } }); + keywordSubFilter.Should.Add(new Condition() { Field = new FieldCondition() { Key = textDataProperty.StorageName, Match = new Match { Text = keyword } } }); } keywordFilter.Must.Add(new Condition() { Filter = keywordSubFilter }); var keywordQuery = new PrefetchQuery diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs index ba125f52edbf..fd92f6494035 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -14,6 +15,19 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// internal static class QdrantVectorStoreRecordFieldMapping { + public static VectorStoreRecordModelBuildingOptions GetModelBuildOptions(bool hasNamedVectors) + => new() + { + RequiresAtLeastOneVector = !hasNamedVectors, + SupportsMultipleKeys = false, + SupportsMultipleVectors = hasNamedVectors, + + SupportedKeyPropertyTypes = [typeof(ulong), typeof(Guid)], + SupportedDataPropertyTypes = QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, + SupportedVectorPropertyTypes = QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes + }; + /// A set of types that data properties on the provided model may have. public static readonly HashSet s_supportedDataTypes = [ @@ -24,14 +38,7 @@ internal static class QdrantVectorStoreRecordFieldMapping typeof(float), typeof(bool), typeof(DateTime), - typeof(DateTimeOffset), - typeof(int?), - typeof(long?), - typeof(double?), - typeof(float?), - typeof(bool?), - typeof(DateTime?), - typeof(DateTimeOffset?), + typeof(DateTimeOffset) ]; /// A set of types that vectors on the provided model may have. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index 4b2963c464d7..2929b455fd13 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -1,8 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -11,54 +13,27 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// Mapper between a Qdrant record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. -internal sealed class QdrantVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class QdrantVectorStoreRecordMapper(VectorStoreRecordModel model, bool hasNamedVectors) + : IVectorStoreRecordMapper { - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. - private readonly bool _hasNamedVectors; - - /// - /// Initializes a new instance of the class. - /// - /// A helper to access property information for the current data model and record definition. - /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. - public QdrantVectorStoreRecordMapper( - VectorStoreRecordPropertyReader propertyReader, - bool hasNamedVectors) - { - Verify.NotNull(propertyReader); - - // Validate property types. - propertyReader.VerifyHasParameterlessConstructor(); - propertyReader.VerifyDataProperties(QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, supportEnumerable: true); - propertyReader.VerifyVectorProperties(QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes); - - // Assign. - this._propertyReader = propertyReader; - this._hasNamedVectors = hasNamedVectors; - } - /// public PointStruct MapFromDataToStorageModel(TRecord dataModel) { - PointId pointId; - var keyPropertyInfo = this._propertyReader.KeyPropertyInfo; - if (keyPropertyInfo.PropertyType == typeof(ulong)) - { - var key = keyPropertyInfo.GetValue(dataModel) as ulong? ?? throw new VectorStoreRecordMappingException($"Missing key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); - pointId = new PointId { Num = key }; - } - else if (keyPropertyInfo.PropertyType == typeof(Guid)) - { - var key = keyPropertyInfo.GetValue(dataModel) as Guid? ?? throw new VectorStoreRecordMappingException($"Missing key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); - pointId = new PointId { Uuid = key.ToString("D") }; - } - else + var keyProperty = model.KeyProperty; + + var pointId = keyProperty.Type switch { - throw new VectorStoreRecordMappingException($"Unsupported key type {keyPropertyInfo.PropertyType.FullName} for key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); - } + var t when t == typeof(ulong) => new PointId + { + Num = (ulong?)keyProperty.GetValueAsObject(dataModel!) ?? throw new VectorStoreRecordMappingException($"Missing key property '{keyProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}'.") + }, + + var t when t == typeof(Guid) => new PointId + { + Uuid = ((Guid?)keyProperty.GetValueAsObject(dataModel!))?.ToString("D") ?? throw new VectorStoreRecordMappingException($"Missing key property '{keyProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}'.") + }, + _ => throw new VectorStoreRecordMappingException($"Unsupported key type '{keyProperty.Type.Name}' for key property '{keyProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}'.") + }; // Create point. var pointStruct = new PointStruct @@ -69,25 +44,23 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) }; // Add point payload. - foreach (var dataPropertyInfo in this._propertyReader.DataPropertiesInfo) + foreach (var property in model.DataProperties) { - var propertyName = this._propertyReader.GetStoragePropertyName(dataPropertyInfo.Name); - var propertyValue = dataPropertyInfo.GetValue(dataModel); - pointStruct.Payload.Add(propertyName, QdrantVectorStoreRecordFieldMapping.ConvertToGrpcFieldValue(propertyValue)); + var propertyValue = property.GetValueAsObject(dataModel!); + pointStruct.Payload.Add(property.StorageName, QdrantVectorStoreRecordFieldMapping.ConvertToGrpcFieldValue(propertyValue)); } // Add vectors. - if (this._hasNamedVectors) + if (hasNamedVectors) { var namedVectors = new NamedVectors(); - foreach (var vectorPropertyInfo in this._propertyReader.VectorPropertiesInfo) + foreach (var property in model.VectorProperties) { - var propertyName = this._propertyReader.GetStoragePropertyName(vectorPropertyInfo.Name); - var propertyValue = vectorPropertyInfo.GetValue(dataModel); + var propertyValue = property.GetValueAsObject(dataModel!); if (propertyValue is not null) { var castPropertyValue = (ReadOnlyMemory)propertyValue; - namedVectors.Vectors.Add(propertyName, castPropertyValue.ToArray()); + namedVectors.Vectors.Add(property.StorageName, castPropertyValue.ToArray()); } } @@ -96,14 +69,14 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) else { // We already verified in the constructor via FindProperties that there is exactly one vector property when not using named vectors. - var vectorPropertyInfo = this._propertyReader.FirstVectorPropertyInfo!; - if (vectorPropertyInfo.GetValue(dataModel) is ReadOnlyMemory floatROM) + var property = model.VectorProperty; + if (property.GetValueAsObject(dataModel!) is ReadOnlyMemory floatROM) { pointStruct.Vectors.Vector = floatROM.ToArray(); } else { - throw new VectorStoreRecordMappingException($"Vector property {vectorPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName} may not be null when not using named vectors."); + throw new VectorStoreRecordMappingException($"Vector property '{property.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null when not using named vectors."); } } @@ -113,42 +86,49 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) /// public TRecord MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) { - // Get the key property name and value. - var keyPropertyValue = storageModel.Id.HasNum ? storageModel.Id.Num as object : new Guid(storageModel.Id.Uuid) as object; + var outputRecord = model.CreateRecord()!; - // Construct the output record. - var outputRecord = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); - - // Set Key - this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, keyPropertyValue); + // TODO: Set the following generically to avoid boxing + model.KeyProperty.SetValueAsObject(outputRecord, storageModel.Id switch + { + { HasNum: true } => storageModel.Id.Num, + { HasUuid: true } => Guid.Parse(storageModel.Id.Uuid), + _ => throw new UnreachableException() + }); // Set each vector property if embeddings are included in the point. if (options?.IncludeVectors is true) { - if (this._hasNamedVectors) + if (hasNamedVectors) { - VectorStoreRecordMapping.SetValuesOnProperties( - outputRecord, - this._propertyReader.VectorPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel.Vectors.Vectors_.Vectors, - (Vector vector, Type targetType) => new ReadOnlyMemory(vector.Data.ToArray())); + var storageVectors = storageModel.Vectors.Vectors_.Vectors; + + foreach (var vectorProperty in model.VectorProperties) + { + vectorProperty.SetValueAsObject( + outputRecord, + new ReadOnlyMemory(storageVectors[vectorProperty.StorageName].Data.ToArray())); + } } else { - this._propertyReader.FirstVectorPropertyInfo!.SetValue( + model.VectorProperty.SetValueAsObject( outputRecord, new ReadOnlyMemory(storageModel.Vectors.Vector.Data.ToArray())); } } - // Set each data property. - VectorStoreRecordMapping.SetValuesOnProperties( - outputRecord, - this._propertyReader.DataPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel.Payload, - QdrantVectorStoreRecordFieldMapping.ConvertFromGrpcFieldValueToNativeType); + var payload = storageModel.Payload; + + foreach (var dataProperty in model.DataProperties) + { + if (payload.TryGetValue(dataProperty.StorageName, out var fieldValue)) + { + dataProperty.SetValueAsObject( + outputRecord, + QdrantVectorStoreRecordFieldMapping.ConvertFromGrpcFieldValueToNativeType(fieldValue, dataProperty.Type)); + } + } return outputRecord; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs index bd8d833f75f8..695c2b2b1700 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs @@ -9,20 +9,21 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Text; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Redis; internal class RedisFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; private readonly StringBuilder _filter = new(); - internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { Debug.Assert(this._filter.Length == 0); - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -195,11 +196,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs index c4676976db9d..2ed9a25ce0d2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using hash sets. /// -internal class RedisHashSetGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, HashEntry[] HashEntries)> +internal sealed class RedisHashSetGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, HashEntry[] HashEntries)> { /// All the properties from the record definition. private readonly IReadOnlyList _properties; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 650bbce15586..74d5d0930f2a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -7,6 +7,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using NRedisStack.RedisStackCommands; using NRedisStack.Search; using NRedisStack.Search.Literals.Enums; @@ -25,31 +26,30 @@ public class RedisHashSetVectorStoreRecordCollection : IVectorStoreReco /// The name of this database for telemetry purposes. private const string DatabaseName = "Redis"; - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(string) - ]; + internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(int), - typeof(uint), - typeof(long), - typeof(ulong), - typeof(double), - typeof(float), - typeof(bool), - typeof(int?), - typeof(uint?), - typeof(long?), - typeof(ulong?), - typeof(double?), - typeof(float?), - typeof(bool?) - ]; + SupportedKeyPropertyTypes = [typeof(string)], + + SupportedDataPropertyTypes = + [ + typeof(string), + typeof(int), + typeof(uint), + typeof(long), + typeof(ulong), + typeof(double), + typeof(float), + typeof(bool) + ], + + SupportedEnumerableDataPropertyElementTypes = [], + + SupportedVectorPropertyTypes = s_supportedVectorTypes + }; /// A set of types that vectors on the provided model may have. private static readonly HashSet s_supportedVectorTypes = @@ -72,8 +72,8 @@ public class RedisHashSetVectorStoreRecordCollection : IVectorStoreReco /// Optional configuration options for this class. private readonly RedisHashSetVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model. + private readonly VectorStoreRecordModel _model; /// An array of the names of all the data properties that are part of the Redis payload as RedisValue objects, i.e. all properties except the key and vector properties. private readonly RedisValue[] _dataStoragePropertyNameRedisValues; @@ -96,51 +96,19 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec // Verify. Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.HashEntriesCustomMapper is not null, s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._database = database; this._collectionName = collectionName; this._options = options ?? new RedisHashSetVectorStoreRecordCollectionOptions(); - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); - this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: false); - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + this._model = new VectorStoreRecordModelBuilder(ModelBuildingOptions).Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); // Lookup storage property names. - this._dataStoragePropertyNameRedisValues = this._propertyReader.DataPropertyStoragePropertyNames - .Select(RedisValue.Unbox) - .ToArray(); - - this._dataStoragePropertyNamesWithScore = [.. this._propertyReader.DataPropertyStoragePropertyNames, "vector_score"]; + this._dataStoragePropertyNameRedisValues = this._model.DataProperties.Select(p => RedisValue.Unbox(p.StorageName)).ToArray(); + this._dataStoragePropertyNamesWithScore = [.. this._model.DataProperties.Select(p => p.StorageName), "vector_score"]; // Assign Mapper. - if (this._options.HashEntriesCustomMapper is not null) - { - // Custom Mapper. - this._mapper = this._options.HashEntriesCustomMapper; - } - else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - // Generic data model mapper. - this._mapper = (IVectorStoreRecordMapper)new RedisHashSetGenericDataModelMapper(this._propertyReader.Properties); - } - else - { - // Default Mapper. - this._mapper = new RedisHashSetVectorStoreRecordMapper(this._propertyReader); - } + this._mapper = this._options.HashEntriesCustomMapper ?? new RedisHashSetVectorStoreRecordMapper(this._model); } /// @@ -173,7 +141,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.StoragePropertyNamesMap, useDollarPrefix: false); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._model.Properties, useDollarPrefix: false); // Create the index creation params. // Add the collection name and colon as the index prefix, which means that any record where the key is prefixed with this text will be indexed by this index @@ -350,7 +318,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync> VectorizedSearchAsync { - var retrievedHashEntries = this._propertyReader.DataPropertyStoragePropertyNames - .Concat(this._propertyReader.VectorPropertyStoragePropertyNames) + var retrievedHashEntries = this._model.DataProperties.Select(p => p.StorageName) + .Concat(this._model.VectorProperties.Select(p => p.StorageName)) .Select(propertyName => new HashEntry(propertyName, result[propertyName])) .ToArray(); @@ -386,7 +354,7 @@ public virtual async Task> VectorizedSearchAsync /// The consumer data model to map to or from. -internal sealed class RedisHashSetVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class RedisHashSetVectorStoreRecordMapper(VectorStoreRecordModel model) + : IVectorStoreRecordMapper { - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A helper to access property information for the current data model and record definition. - public RedisHashSetVectorStoreRecordMapper( - VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - propertyReader.VerifyHasParameterlessConstructor(); - - this._propertyReader = propertyReader; - } - /// public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel) { - var keyValue = this._propertyReader.KeyPropertyInfo.GetValue(dataModel) as string ?? - throw new VectorStoreRecordMappingException($"Missing key property {this._propertyReader.KeyPropertyName} on provided record of type {typeof(TConsumerDataModel).FullName}."); + var keyValue = model.KeyProperty.GetValueAsObject(dataModel!) as string ?? + throw new VectorStoreRecordMappingException($"Missing key property {model.KeyProperty.ModelName} on provided record of type '{typeof(TConsumerDataModel).Name}'."); var hashEntries = new List(); - foreach (var property in this._propertyReader.DataPropertiesInfo) + foreach (var property in model.DataProperties) { - var storageName = this._propertyReader.GetStoragePropertyName(property.Name); - var value = property.GetValue(dataModel); - hashEntries.Add(new HashEntry(storageName, RedisValue.Unbox(value))); + var value = property.GetValueAsObject(dataModel!); + hashEntries.Add(new HashEntry(property.StorageName, RedisValue.Unbox(value))); } - foreach (var property in this._propertyReader.VectorPropertiesInfo) + foreach (var property in model.VectorProperties) { - var storageName = this._propertyReader.GetStoragePropertyName(property.Name); - var value = property.GetValue(dataModel); + var value = property.GetValueAsObject(dataModel!); if (value is not null) { // Convert the vector to a byte array and store it in the hash entry. // We only support float and double vectors and we do checking in the // collection constructor to ensure that the model has no other vector types. - if (value is ReadOnlyMemory rom) - { - hashEntries.Add(new HashEntry(storageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rom))); - } - else if (value is ReadOnlyMemory rod) + switch (value) { - hashEntries.Add(new HashEntry(storageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); + case ReadOnlyMemory rom: + hashEntries.Add(new HashEntry(property.StorageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rom))); + continue; + case ReadOnlyMemory rod: + hashEntries.Add(new HashEntry(property.StorageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); + continue; + default: + throw new VectorStoreRecordMappingException($"Unsupported vector type '{value.GetType()}'. Only float and double vectors are supported."); } } } @@ -75,49 +61,39 @@ public TConsumerDataModel MapFromStorageToDataModel((string Key, HashEntry[] Has var hashEntriesDictionary = storageModel.HashEntries.ToDictionary(x => (string)x.Name!, x => x.Value); // Construct the output record. - var outputRecord = (TConsumerDataModel)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); + var outputRecord = model.CreateRecord(); // Set Key. - this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, storageModel.Key); + model.KeyProperty.SetValueAsObject(outputRecord!, storageModel.Key); // Set each vector property if embeddings should be returned. if (options?.IncludeVectors is true) { - VectorStoreRecordMapping.SetValuesOnProperties( - outputRecord, - this._propertyReader.VectorPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - hashEntriesDictionary, - (RedisValue vector, Type targetType) => + foreach (var property in model.VectorProperties) + { + if (hashEntriesDictionary.TryGetValue(property.StorageName, out var vector)) { - if (targetType == typeof(ReadOnlyMemory) || targetType == typeof(ReadOnlyMemory?)) - { - var array = MemoryMarshal.Cast((byte[])vector!).ToArray(); - return new ReadOnlyMemory(array); - } - else if (targetType == typeof(ReadOnlyMemory) || targetType == typeof(ReadOnlyMemory?)) + property.SetValueAsObject(outputRecord!, property.Type switch { - var array = MemoryMarshal.Cast((byte[])vector!).ToArray(); - return new ReadOnlyMemory(array); - } - else - { - throw new VectorStoreRecordMappingException($"Unsupported vector type '{targetType}'. Only float and double vectors are supported."); - } - }); + Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) + => new ReadOnlyMemory(MemoryMarshal.Cast((byte[])vector!).ToArray()), + Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) + => new ReadOnlyMemory(MemoryMarshal.Cast((byte[])vector!).ToArray()), + _ => throw new VectorStoreRecordMappingException($"Unsupported vector type '{property.Type}'. Only float and double vectors are supported.") + }); + } + } } - // Set each data property. - VectorStoreRecordMapping.SetValuesOnProperties( - outputRecord, - this._propertyReader.DataPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - hashEntriesDictionary, - (RedisValue hashValue, Type targetType) => + foreach (var property in model.DataProperties) + { + if (hashEntriesDictionary.TryGetValue(property.StorageName, out var hashValue)) { - var typeOrNullableType = Nullable.GetUnderlyingType(targetType) ?? targetType; - return Convert.ChangeType(hashValue, typeOrNullableType); - }); + var typeOrNullableType = Nullable.GetUnderlyingType(property.Type) ?? property.Type; + var value = Convert.ChangeType(hashValue, typeOrNullableType); + property.SetValueAsObject(outputRecord!, value); + } + } return outputRecord; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs index f499b0bfb4eb..ea0ac812674a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs @@ -1,77 +1,32 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; -using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using JSON. /// -internal class RedisJsonGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, JsonNode Node)> +internal sealed class RedisJsonGenericDataModelMapper( + IReadOnlyList properties, + JsonSerializerOptions jsonSerializerOptions) + : IVectorStoreRecordMapper, (string Key, JsonNode Node)> { - /// All the properties from the record definition. - private readonly IReadOnlyList _properties; - - /// The JSON serializer options to use when converting between the data model and the Redis record. - private readonly JsonSerializerOptions _jsonSerializerOptions; - - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - public readonly Dictionary _storagePropertyNames; - - /// - /// Initializes a new instance of the class. - /// - /// All the properties from the record definition. - /// The JSON serializer options to use when converting between the data model and the Redis record. - public RedisJsonGenericDataModelMapper( - IReadOnlyList properties, - JsonSerializerOptions jsonSerializerOptions) - { - Verify.NotNull(properties); - Verify.NotNull(jsonSerializerOptions); - - this._properties = properties; - this._jsonSerializerOptions = jsonSerializerOptions; - - // Create a dictionary that maps from the data model property name to the storage property name. - this._storagePropertyNames = properties.Select(x => - { - if (x.StoragePropertyName is not null) - { - return new KeyValuePair( - x.DataModelPropertyName, - x.StoragePropertyName); - } - - if (jsonSerializerOptions.PropertyNamingPolicy is not null) - { - return new KeyValuePair( - x.DataModelPropertyName, - jsonSerializerOptions.PropertyNamingPolicy.ConvertName(x.DataModelPropertyName)); - } - - return new KeyValuePair( - x.DataModelPropertyName, - x.DataModelPropertyName); - }).ToDictionary(x => x.Key, x => x.Value); - } - /// public (string Key, JsonNode Node) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) { var jsonObject = new JsonObject(); - foreach (var property in this._properties) + foreach (var property in properties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - var sourceDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + var sourceDictionary = property is VectorStoreRecordDataPropertyModel ? dataModel.Data : dataModel.Vectors; // Only map properties across that actually exist in the input. - if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.DataModelPropertyName, out var sourceValue)) + if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.ModelName, out var sourceValue)) { continue; } @@ -79,11 +34,11 @@ public RedisJsonGenericDataModelMapper( // Replicate null if the property exists but is null. if (sourceValue is null) { - jsonObject.Add(storagePropertyName, null); + jsonObject.Add(property.StorageName, null); continue; } - jsonObject.Add(storagePropertyName, JsonSerializer.SerializeToNode(sourceValue, property.PropertyType)); + jsonObject.Add(property.StorageName, JsonSerializer.SerializeToNode(sourceValue, property.Type, jsonSerializerOptions)); } return (dataModel.Key, jsonObject); @@ -95,27 +50,19 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key var dataModel = new VectorStoreGenericDataModel(storageModel.Key); // The redis result can be either a single object or an array with a single object in the case where we are doing an MGET. - JsonObject jsonObject; - if (storageModel.Node is JsonObject topLevelJsonObject) + var jsonObject = storageModel.Node switch { - jsonObject = topLevelJsonObject; - } - else if (storageModel.Node is JsonArray jsonArray && jsonArray.Count == 1 && jsonArray[0] is JsonObject arrayEntryJsonObject) - { - jsonObject = arrayEntryJsonObject; - } - else - { - throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"); - } + JsonObject topLevelJsonObject => topLevelJsonObject, + JsonArray jsonArray and [JsonObject arrayEntryJsonObject] => arrayEntryJsonObject, + _ => throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"), + }; - foreach (var property in this._properties) + foreach (var property in properties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - var targetDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + var targetDictionary = property is VectorStoreRecordDataPropertyModel ? dataModel.Data : dataModel.Vectors; // Only map properties across that actually exist in the input. - if (!jsonObject.TryGetPropertyValue(storagePropertyName, out var sourceValue)) + if (!jsonObject.TryGetPropertyValue(property.StorageName, out var sourceValue)) { continue; } @@ -123,14 +70,14 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key // Replicate null if the property exists but is null. if (sourceValue is null) { - targetDictionary.Add(property.DataModelPropertyName, null); + targetDictionary.Add(property.ModelName, null); continue; } // Map data and vector values. - if (property is VectorStoreRecordDataProperty || property is VectorStoreRecordVectorProperty) + if (property is VectorStoreRecordDataPropertyModel or VectorStoreRecordVectorPropertyModel) { - targetDictionary.Add(property.DataModelPropertyName, JsonSerializer.Deserialize(sourceValue, property.PropertyType)); + targetDictionary.Add(property.ModelName, JsonSerializer.Deserialize(sourceValue, property.Type, jsonSerializerOptions)); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 941f18e0f7a4..b267cf0bef3b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -9,6 +9,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using NRedisStack.Json.DataTypes; using NRedisStack.RedisStackCommands; using NRedisStack.Search; @@ -25,17 +26,21 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Redis"; + internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(string) - ]; + SupportedKeyPropertyTypes = [typeof(string)], + SupportedDataPropertyTypes = null, // TODO: Validate data property types + SupportedEnumerableDataPropertyElementTypes = null, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + UsesExternalSerializer = true + }; - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = + internal static readonly HashSet s_supportedVectorTypes = [ typeof(ReadOnlyMemory), typeof(ReadOnlyMemory), @@ -43,6 +48,9 @@ public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordC typeof(ReadOnlyMemory?) ]; + /// The name of this database for telemetry purposes. + private const string DatabaseName = "Redis"; + /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -55,8 +63,8 @@ public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordC /// Optional configuration options for this class. private readonly RedisJsonVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model. + private readonly VectorStoreRecordModel _model; /// An array of the storage names of all the data properties that are part of the Redis payload, i.e. all properties except the key and vector properties. private readonly string[] _dataStoragePropertyNames; @@ -79,31 +87,17 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Verify. Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonNodeCustomMapper is not null, s_supportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._database = database; this._collectionName = collectionName; this._options = options ?? new RedisJsonVectorStoreRecordCollectionOptions(); this._jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - JsonSerializerOptions = this._jsonSerializerOptions - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + this._model = new VectorStoreRecordJsonModelBuilder(ModelBuildingOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._jsonSerializerOptions); // Lookup storage property names. - this._dataStoragePropertyNames = this._propertyReader.DataPropertyJsonNames.ToArray(); + this._dataStoragePropertyNames = this._model.DataProperties.Select(p => p.StorageName).ToArray(); // Assign Mapper. if (this._options.JsonNodeCustomMapper is not null) @@ -114,14 +108,14 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { // Generic data model mapper. - this._mapper = (IVectorStoreRecordMapper)new RedisJsonGenericDataModelMapper( - this._propertyReader.Properties, - this._jsonSerializerOptions); + this._mapper = (new RedisJsonGenericDataModelMapper( + this._model.Properties, + this._jsonSerializerOptions) as IVectorStoreRecordMapper)!; } else { // Default Mapper. - this._mapper = new RedisJsonVectorStoreRecordMapper(this._propertyReader.KeyPropertyJsonName, this._jsonSerializerOptions); + this._mapper = new RedisJsonVectorStoreRecordMapper(this._model.KeyProperty, this._jsonSerializerOptions); } } @@ -155,7 +149,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.JsonPropertyNamesMap, useDollarPrefix: true); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._model.Properties, useDollarPrefix: true); // Create the index creation params. // Add the collection name and colon as the index prefix, which means that any record where the key is prefixed with this text will be indexed by this index @@ -395,15 +389,15 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync /// The consumer data model to map to or from. -internal sealed class RedisJsonVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class RedisJsonVectorStoreRecordMapper( + VectorStoreRecordKeyPropertyModel keyProperty, + JsonSerializerOptions jsonSerializerOptions) + : IVectorStoreRecordMapper { - /// The name of the temporary json property that the key field will be serialized / parsed from. - private readonly string _keyFieldJsonPropertyName; - - /// The JSON serializer options to use when converting between the data model and the Redis record. - private readonly JsonSerializerOptions _jsonSerializerOptions; - - /// - /// Initializes a new instance of the class. - /// - /// The name of the key field on the model when serialized to json. - /// The JSON serializer options to use when converting between the data model and the Redis record. - public RedisJsonVectorStoreRecordMapper(string keyFieldJsonPropertyName, JsonSerializerOptions jsonSerializerOptions) - { - Verify.NotNullOrWhiteSpace(keyFieldJsonPropertyName); - Verify.NotNull(jsonSerializerOptions); - - this._keyFieldJsonPropertyName = keyFieldJsonPropertyName; - this._jsonSerializerOptions = jsonSerializerOptions; - } + /// The key property. + private readonly string _keyPropertyStorageName = keyProperty.StorageName; /// public (string Key, JsonNode Node) MapFromDataToStorageModel(TConsumerDataModel dataModel) @@ -38,47 +25,39 @@ public RedisJsonVectorStoreRecordMapper(string keyFieldJsonPropertyName, JsonSer // Convert the provided record into a JsonNode object and try to get the key field for it. // Since we already checked that the key field is a string in the constructor, and that it exists on the model, // the only edge case we have to be concerned about is if the key field is null. - var jsonNode = JsonSerializer.SerializeToNode(dataModel, this._jsonSerializerOptions); - if (jsonNode!.AsObject().TryGetPropertyValue(this._keyFieldJsonPropertyName, out var keyField) && keyField is JsonValue jsonValue) + var jsonNode = JsonSerializer.SerializeToNode(dataModel, jsonSerializerOptions); + if (jsonNode!.AsObject().TryGetPropertyValue(this._keyPropertyStorageName, out var keyField) && keyField is JsonValue jsonValue) { // Remove the key field from the JSON object since we don't want to store it in the redis payload. var keyValue = jsonValue.ToString(); - jsonNode.AsObject().Remove(this._keyFieldJsonPropertyName); + jsonNode.AsObject().Remove(this._keyPropertyStorageName); return (keyValue, jsonNode); } - throw new VectorStoreRecordMappingException($"Missing key field {this._keyFieldJsonPropertyName} on provided record of type {typeof(TConsumerDataModel).FullName}."); + throw new VectorStoreRecordMappingException($"Missing key field {this._keyPropertyStorageName} on provided record of type {typeof(TConsumerDataModel).FullName}."); } /// public TConsumerDataModel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) { - JsonObject jsonObject; - // The redis result can be either a single object or an array with a single object in the case where we are doing an MGET. - if (storageModel.Node is JsonObject topLevelJsonObject) + var jsonObject = storageModel.Node switch { - jsonObject = topLevelJsonObject; - } - else if (storageModel.Node is JsonArray jsonArray && jsonArray.Count == 1 && jsonArray[0] is JsonObject arrayEntryJsonObject) - { - jsonObject = arrayEntryJsonObject; - } - else - { - throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"); - } + JsonObject topLevelJsonObject => topLevelJsonObject, + JsonArray and [JsonObject arrayEntryJsonObject] => arrayEntryJsonObject, + _ => throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'") + }; // Check that the key field is not already present in the redis value. - if (jsonObject.ContainsKey(this._keyFieldJsonPropertyName)) + if (jsonObject.ContainsKey(this._keyPropertyStorageName)) { - throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'. Key property '{this._keyFieldJsonPropertyName}' is already present on retrieved object."); + throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'. Key property '{this._keyPropertyStorageName}' is already present on retrieved object."); } // Since the key is not stored in the redis value, add it back in before deserializing into the data model. - jsonObject.Add(this._keyFieldJsonPropertyName, storageModel.Key); + jsonObject.Add(this._keyPropertyStorageName, storageModel.Key); - return JsonSerializer.Deserialize(jsonObject, this._jsonSerializerOptions)!; + return JsonSerializer.Deserialize(jsonObject, jsonSerializerOptions)!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs index 8bde159e848a..169867fb24ac 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs @@ -6,6 +6,7 @@ using System.Globalization; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using NRedisStack.Search; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -47,11 +48,10 @@ internal static class RedisVectorStoreCollectionCreateMapping /// Map from the given list of items to the Redis . /// /// The property definitions to map from. - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. /// A value indicating whether to include $. prefix for field names as required in JSON mode. /// The mapped Redis . /// Thrown if there are missing required or unsupported configuration options set. - public static Schema MapToSchema(IEnumerable properties, IReadOnlyDictionary storagePropertyNames, bool useDollarPrefix) + public static Schema MapToSchema(IEnumerable properties, bool useDollarPrefix) { var schema = new Schema(); var fieldNamePrefix = useDollarPrefix ? "$." : string.Empty; @@ -59,79 +59,73 @@ public static Schema MapToSchema(IEnumerable properti // Loop through all properties and create the index fields. foreach (var property in properties) { - // Key property. - if (property is VectorStoreRecordKeyProperty keyProperty) - { - // Do nothing, since key is not stored as part of the payload and therefore doesn't have to be added to the index. - continue; - } + var storageName = property.StorageName; - // Data property. - if (property is VectorStoreRecordDataProperty dataProperty && (dataProperty.IsFilterable || dataProperty.IsFullTextSearchable)) + switch (property) { - var storageName = storagePropertyNames[dataProperty.DataModelPropertyName]; + case VectorStoreRecordKeyPropertyModel keyProperty: + // Do nothing, since key is not stored as part of the payload and therefore doesn't have to be added to the index. + continue; - if (dataProperty.IsFilterable && dataProperty.IsFullTextSearchable) - { - throw new InvalidOperationException($"Property '{dataProperty.DataModelPropertyName}' has both {nameof(VectorStoreRecordDataProperty.IsFilterable)} and {nameof(VectorStoreRecordDataProperty.IsFullTextSearchable)} set to true, and this is not supported by the Redis VectorStore."); - } - - // Add full text search field index. - if (dataProperty.IsFullTextSearchable) - { - if (dataProperty.PropertyType == typeof(string) || (typeof(IEnumerable).IsAssignableFrom(dataProperty.PropertyType) && GetEnumerableType(dataProperty.PropertyType) == typeof(string))) + case VectorStoreRecordDataPropertyModel dataProperty when dataProperty.IsFilterable || dataProperty.IsFullTextSearchable: + if (dataProperty.IsFilterable && dataProperty.IsFullTextSearchable) { - schema.AddTextField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); + throw new InvalidOperationException($"Property '{dataProperty.ModelName}' has both {nameof(VectorStoreRecordDataProperty.IsFilterable)} and {nameof(VectorStoreRecordDataProperty.IsFullTextSearchable)} set to true, and this is not supported by the Redis VectorStore."); } - else - { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.DataModelPropertyName}' is set to true, but the property type is not a string or IEnumerable. The Redis VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string or IEnumerable properties only."); - } - } - // Add filter field index. - if (dataProperty.IsFilterable) - { - if (dataProperty.PropertyType == typeof(string)) - { - schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); - } - else if (typeof(IEnumerable).IsAssignableFrom(dataProperty.PropertyType) && GetEnumerableType(dataProperty.PropertyType) == typeof(string)) + // Add full text search field index. + if (dataProperty.IsFullTextSearchable) { - schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}.*", storageName)); + if (dataProperty.Type == typeof(string) || (typeof(IEnumerable).IsAssignableFrom(dataProperty.Type) && GetEnumerableType(dataProperty.Type) == typeof(string))) + { + schema.AddTextField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); + } + else + { + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string or IEnumerable. The Redis VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string or IEnumerable properties only."); + } } - else if (RedisVectorStoreCollectionCreateMapping.s_supportedFilterableNumericDataTypes.Contains(dataProperty.PropertyType)) + + // Add filter field index. + if (dataProperty.IsFilterable) { - schema.AddNumericField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); + if (dataProperty.Type == typeof(string)) + { + schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); + } + else if (typeof(IEnumerable).IsAssignableFrom(dataProperty.Type) && GetEnumerableType(dataProperty.Type) == typeof(string)) + { + schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}.*", storageName)); + } + else if (RedisVectorStoreCollectionCreateMapping.s_supportedFilterableNumericDataTypes.Contains(dataProperty.Type)) + { + schema.AddNumericField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); + } + else + { + throw new InvalidOperationException($"Property '{dataProperty.ModelName}' is marked as {nameof(VectorStoreRecordDataProperty.IsFilterable)}, but the property type '{dataProperty.Type}' is not supported. Only string, IEnumerable and numeric properties are supported for filtering by the Redis VectorStore."); + } } - else + + continue; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + if (vectorProperty.Dimensions is not > 0) { - throw new InvalidOperationException($"Property '{dataProperty.DataModelPropertyName}' is marked as {nameof(VectorStoreRecordDataProperty.IsFilterable)}, but the property type '{dataProperty.PropertyType}' is not supported. Only string, IEnumerable and numeric properties are supported for filtering by the Redis VectorStore."); + throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); } - } - continue; - } - - // Vector property. - if (property is VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); - } - - var storageName = storagePropertyNames[vectorProperty.DataModelPropertyName]; - var indexKind = GetSDKIndexKind(vectorProperty); - var vectorType = GetSDKVectorType(vectorProperty); - var dimensions = vectorProperty.Dimensions.Value.ToString(CultureInfo.InvariantCulture); - var distanceAlgorithm = GetSDKDistanceAlgorithm(vectorProperty); - schema.AddVectorField(new FieldName($"{fieldNamePrefix}{storageName}", storageName), indexKind, new Dictionary() - { - ["TYPE"] = vectorType, - ["DIM"] = dimensions, - ["DISTANCE_METRIC"] = distanceAlgorithm - }); + var indexKind = GetSDKIndexKind(vectorProperty); + var vectorType = GetSDKVectorType(vectorProperty); + var dimensions = vectorProperty.Dimensions.Value.ToString(CultureInfo.InvariantCulture); + var distanceAlgorithm = GetSDKDistanceAlgorithm(vectorProperty); + schema.AddVectorField(new FieldName($"{fieldNamePrefix}{storageName}", storageName), indexKind, new Dictionary() + { + ["TYPE"] = vectorType, + ["DIM"] = dimensions, + ["DISTANCE_METRIC"] = distanceAlgorithm + }); + continue; } } @@ -145,20 +139,13 @@ public static Schema MapToSchema(IEnumerable properti /// The vector property definition. /// The chosen . /// Thrown if a index type was chosen that isn't supported by Redis. - public static Schema.VectorField.VectorAlgo GetSDKIndexKind(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.IndexKind is null) + public static Schema.VectorField.VectorAlgo GetSDKIndexKind(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.IndexKind switch { - return Schema.VectorField.VectorAlgo.HNSW; - } - - return vectorProperty.IndexKind switch - { - IndexKind.Hnsw => Schema.VectorField.VectorAlgo.HNSW, + IndexKind.Hnsw or null => Schema.VectorField.VectorAlgo.HNSW, IndexKind.Flat => Schema.VectorField.VectorAlgo.FLAT, - _ => throw new InvalidOperationException($"Index kind '{vectorProperty.IndexKind}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Redis VectorStore.") + _ => throw new InvalidOperationException($"Index kind '{vectorProperty.IndexKind}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Redis VectorStore.") }; - } /// /// Get the configured distance metric from the given . @@ -167,22 +154,15 @@ public static Schema.VectorField.VectorAlgo GetSDKIndexKind(VectorStoreRecordVec /// The vector property definition. /// The chosen distance metric. /// Thrown if a distance function is chosen that isn't supported by Redis. - public static string GetSDKDistanceAlgorithm(VectorStoreRecordVectorProperty vectorProperty) - { - if (vectorProperty.DistanceFunction is null) + public static string GetSDKDistanceAlgorithm(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.DistanceFunction switch { - return "COSINE"; - } - - return vectorProperty.DistanceFunction switch - { - DistanceFunction.CosineSimilarity => "COSINE", + DistanceFunction.CosineSimilarity or null => "COSINE", DistanceFunction.CosineDistance => "COSINE", DistanceFunction.DotProductSimilarity => "IP", DistanceFunction.EuclideanSquaredDistance => "L2", - _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Redis VectorStore.") + _ => throw new InvalidOperationException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Redis VectorStore.") }; - } /// /// Get the vector type to pass to the SDK based on the data type of the vector property. @@ -190,17 +170,15 @@ public static string GetSDKDistanceAlgorithm(VectorStoreRecordVectorProperty vec /// The vector property definition. /// The SDK required vector type. /// Thrown if the property data type is not supported by the connector. - public static string GetSDKVectorType(VectorStoreRecordVectorProperty vectorProperty) - { - return vectorProperty.PropertyType switch + public static string GetSDKVectorType(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.Type switch { Type t when t == typeof(ReadOnlyMemory) => "FLOAT32", Type t when t == typeof(ReadOnlyMemory?) => "FLOAT32", Type t when t == typeof(ReadOnlyMemory) => "FLOAT64", Type t when t == typeof(ReadOnlyMemory?) => "FLOAT64", - _ => throw new InvalidOperationException($"Vector data type '{vectorProperty.PropertyType.FullName}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' is not supported by the Redis VectorStore.") + _ => throw new InvalidOperationException($"Vector data type '{vectorProperty.Type.Name}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Redis VectorStore.") }; - } /// /// Gets the type of object stored in the given enumerable type. diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs index b9d199eb3361..8be596acaf57 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using NRedisStack.Search; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -24,34 +24,23 @@ internal static class RedisVectorStoreCollectionSearchMapping /// The vector converted to a byte array. /// Thrown if the vector type is not supported. public static byte[] ValidateVectorAndConvertToBytes(TVector vector, string connectorTypeName) - { - byte[] vectorBytes; - if (vector is ReadOnlyMemory floatVector) - { - vectorBytes = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - } - else if (vector is ReadOnlyMemory doubleVector) - { - vectorBytes = MemoryMarshal.AsBytes(doubleVector.Span).ToArray(); - } - else + => vector switch { - throw new NotSupportedException($"The provided vector type {vector?.GetType().FullName} is not supported by the Redis {connectorTypeName} connector."); - } - - return vectorBytes; - } + ReadOnlyMemory floatVector => MemoryMarshal.AsBytes(floatVector.Span).ToArray(), + ReadOnlyMemory doubleVector => MemoryMarshal.AsBytes(doubleVector.Span).ToArray(), + _ => throw new NotSupportedException($"The provided vector type {vector?.GetType().FullName} is not supported by the Redis {connectorTypeName} connector.") + }; /// /// Build a Redis object from the given vector and options. /// /// The vector to search the database with as a byte array. /// The options to configure the behavior of the search. - /// A mapping of data model property names to the names under which they are stored. - /// The storage name of the vector property. + /// The model. + /// The vector property. /// The set of fields to limit the results to. Null for all. /// The . - public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, IReadOnlyDictionary storagePropertyNames, string vectorStoragePropertyName, string[]? selectFields) + public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, string[]? selectFields) { // Build search query. var redisLimit = options.Top + options.Skip; @@ -60,13 +49,13 @@ public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions< var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, storagePropertyNames), - { Filter: Expression> newFilter } => new RedisFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, model), + { Filter: Expression> newFilter } => new RedisFilterTranslator().Translate(newFilter, model), _ => "*" }; #pragma warning restore CS0618 // Type or member is obsolete - var query = new Query($"{filter}=>[KNN {redisLimit} @{vectorStoragePropertyName} $embedding AS vector_score]") + var query = new Query($"{filter}=>[KNN {redisLimit} @{vectorProperty.StorageName} $embedding AS vector_score]") .AddParam("embedding", vectorBytes) .SetSortBy("vector_score") .Limit(options.Skip, redisLimit) @@ -85,17 +74,17 @@ public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions< /// Build a redis filter string from the provided . /// /// The to build the Redis filter string from. - /// A mapping of data model property names to the names under which they are stored. + /// The model. /// The Redis filter string. /// Thrown when a provided filter value is not supported. #pragma warning disable CS0618 // Type or member is obsolete - public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilter, IReadOnlyDictionary storagePropertyNames) + public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilter, VectorStoreRecordModel model) { var filterClauses = basicVectorSearchFilter.FilterClauses.Select(clause => { if (clause is EqualToFilterClause equalityFilterClause) { - var storagePropertyName = GetStoragePropertyName(storagePropertyNames, equalityFilterClause.FieldName); + var storagePropertyName = GetStoragePropertyName(model, equalityFilterClause.FieldName); return equalityFilterClause.Value switch { @@ -109,7 +98,7 @@ public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilte } else if (clause is AnyTagEqualToFilterClause tagListContainsClause) { - var storagePropertyName = GetStoragePropertyName(storagePropertyNames, tagListContainsClause.FieldName); + var storagePropertyName = GetStoragePropertyName(model, tagListContainsClause.FieldName); return $"@{storagePropertyName}:{{{tagListContainsClause.Value}}}"; } else @@ -128,7 +117,7 @@ public static string BuildLegacyFilter(VectorSearchFilter basicVectorSearchFilte /// /// The vector property to be used. /// The distance function for the vector we want to search. - public static string ResolveDistanceFunction(VectorStoreRecordVectorProperty vectorProperty) + public static string ResolveDistanceFunction(VectorStoreRecordVectorPropertyModel vectorProperty) => vectorProperty.DistanceFunction ?? DistanceFunction.CosineSimilarity; /// @@ -159,17 +148,17 @@ public static string ResolveDistanceFunction(VectorStoreRecordVectorProperty vec /// /// Gets the name of the name under which the property with the given name is stored. /// - /// A mapping of data model property names to the names under which they are stored. + /// The model. /// The name of the property in the data model. /// The name that the property os stored under. /// Thrown when the property name is not found. - private static string GetStoragePropertyName(IReadOnlyDictionary storagePropertyNames, string fieldName) + private static string GetStoragePropertyName(VectorStoreRecordModel model, string fieldName) { - if (!storagePropertyNames.TryGetValue(fieldName, out var storageFieldName)) + if (!model.PropertyMap.TryGetValue(fieldName, out var property)) { throw new InvalidOperationException($"Property name '{fieldName}' provided as part of the filter clause is not a valid property name."); } - return storageFieldName; + return property.StorageName; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs deleted file mode 100644 index ff9c7851f4cb..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/GenericRecordMapper.cs +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Connectors.SqlServer; - -internal sealed class GenericRecordMapper : IVectorStoreRecordMapper, IDictionary> - where TKey : notnull -{ - private readonly VectorStoreRecordPropertyReader _propertyReader; - - internal GenericRecordMapper(VectorStoreRecordPropertyReader propertyReader) => this._propertyReader = propertyReader; - - public IDictionary MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - Dictionary properties = new() - { - { SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty), dataModel.Key } - }; - - foreach (var property in this._propertyReader.DataProperties) - { - string name = SqlServerCommandBuilder.GetColumnName(property); - if (dataModel.Data.TryGetValue(name, out var dataValue)) - { - properties.Add(name, dataValue); - } - } - - // Add vector properties - if (dataModel.Vectors is not null) - { - foreach (var property in this._propertyReader.VectorProperties) - { - string name = SqlServerCommandBuilder.GetColumnName(property); - if (dataModel.Vectors.TryGetValue(name, out var vectorValue)) - { - if (vectorValue is ReadOnlyMemory floats) - { - properties.Add(name, floats); - } - else if (vectorValue is not null) - { - throw new VectorStoreRecordMappingException($"Vector property '{name}' contained value of non supported type: '{vectorValue.GetType().FullName}'."); - } - } - } - } - - return properties; - } - - public VectorStoreGenericDataModel MapFromStorageToDataModel(IDictionary storageModel, StorageToDataModelMapperOptions options) - { - TKey key; - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); - - if (storageModel.TryGetValue(SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty), out var keyObject) && keyObject is not null) - { - key = (TKey)keyObject; - } - else - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - foreach (var property in this._propertyReader.DataProperties) - { - string name = SqlServerCommandBuilder.GetColumnName(property); - if (storageModel.TryGetValue(name, out var dataValue)) - { - dataProperties.Add(name, dataValue); - } - } - - if (options.IncludeVectors) - { - foreach (var property in this._propertyReader.VectorProperties) - { - string name = SqlServerCommandBuilder.GetColumnName(property); - if (storageModel.TryGetValue(name, out var vectorValue)) - { - vectorProperties.Add(name, vectorValue); - } - } - } - - return new(key) { Data = dataProperties, Vectors = vectorProperties }; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs index 240f2814e044..1628948668b8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs @@ -2,37 +2,28 @@ using System; using System.Collections.Generic; -using System.Reflection; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.SqlServer; -internal sealed class RecordMapper : IVectorStoreRecordMapper> +internal sealed class RecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> { - private readonly VectorStoreRecordPropertyReader _propertyReader; - - internal RecordMapper(VectorStoreRecordPropertyReader propertyReader) => this._propertyReader = propertyReader; - public IDictionary MapFromDataToStorageModel(TRecord dataModel) { Dictionary map = new(StringComparer.Ordinal); - map[SqlServerCommandBuilder.GetColumnName(this._propertyReader.KeyProperty)] = this._propertyReader.KeyPropertyInfo.GetValue(dataModel); + map[model.KeyProperty.StorageName] = model.KeyProperty.GetValueAsObject(dataModel!); - var dataProperties = this._propertyReader.DataProperties; - var dataPropertiesInfo = this._propertyReader.DataPropertiesInfo; - for (int i = 0; i < dataProperties.Count; i++) + foreach (var property in model.DataProperties) { - object? value = dataPropertiesInfo[i].GetValue(dataModel); - map[SqlServerCommandBuilder.GetColumnName(dataProperties[i])] = value; + map[property.StorageName] = property.GetValueAsObject(dataModel!); } - var vectorProperties = this._propertyReader.VectorProperties; - var vectorPropertiesInfo = this._propertyReader.VectorPropertiesInfo; - for (int i = 0; i < vectorProperties.Count; i++) + + foreach (var property in model.VectorProperties) { // We restrict the vector properties to ReadOnlyMemory so the cast here is safe. - ReadOnlyMemory floats = (ReadOnlyMemory)vectorPropertiesInfo[i].GetValue(dataModel)!; - map[SqlServerCommandBuilder.GetColumnName(vectorProperties[i])] = floats; + map[property.StorageName] = (ReadOnlyMemory)property.GetValueAsObject(dataModel!)!; } return map; @@ -40,33 +31,32 @@ internal sealed class RecordMapper : IVectorStoreRecordMapper storageModel, StorageToDataModelMapperOptions options) { - TRecord record = Activator.CreateInstance()!; - SetValue(storageModel, record, this._propertyReader.KeyPropertyInfo, this._propertyReader.KeyProperty); - var data = this._propertyReader.DataProperties; - var dataInfo = this._propertyReader.DataPropertiesInfo; - for (int i = 0; i < data.Count; i++) + var record = model.CreateRecord()!; + + SetValue(storageModel, record, model.KeyProperty, storageModel[model.KeyProperty.StorageName]); + + foreach (var property in model.DataProperties) { - SetValue(storageModel, record, dataInfo[i], data[i]); + SetValue(storageModel, record, property, storageModel[property.StorageName]); } if (options.IncludeVectors) { - var vector = this._propertyReader.VectorProperties; - var vectorInfo = this._propertyReader.VectorPropertiesInfo; - for (int i = 0; i < vector.Count; i++) + foreach (var property in model.VectorProperties) { - object? value = storageModel[SqlServerCommandBuilder.GetColumnName(vector[i])]; + var value = storageModel[property.StorageName]; + if (value is not null) { if (value is ReadOnlyMemory floats) { - vectorInfo[i].SetValue(record, floats); + SetValue(storageModel, record, property, floats); } else { // When deserializing a string to a ReadOnlyMemory fails in SqlDataReaderDictionary, // we store the raw value so the user can handle the error in a custom mapper. - throw new VectorStoreRecordMappingException($"Failed to deserialize vector property '{vector[i].DataModelPropertyName}', it contained value '{value}'."); + throw new VectorStoreRecordMappingException($"Failed to deserialize vector property '{property.ModelName}', it contained value '{value}'."); } } } @@ -74,25 +64,15 @@ public TRecord MapFromStorageToDataModel(IDictionary storageMod return record; - static void SetValue(IDictionary storageModel, object record, PropertyInfo propertyInfo, VectorStoreRecordProperty property) + static void SetValue(IDictionary storageModel, object record, VectorStoreRecordPropertyModel property, object? value) { - // If we got here, there should be no column name mismatch (the query would fail). - object? value = storageModel[SqlServerCommandBuilder.GetColumnName(property)]; - - if (value is null) - { - // There is no need to call the reflection to set the null, - // as it's the default value of every .NET reference type field. - return; - } - try { - propertyInfo.SetValue(record, value); + property.SetValueAsObject(record, value); } catch (Exception ex) { - throw new VectorStoreRecordMappingException($"Failed to set value '{value}' on property '{propertyInfo.Name}' of type '{propertyInfo.PropertyType.FullName}'.", ex); + throw new VectorStoreRecordMappingException($"Failed to set value '{value}' on property '{property.ModelName}' of type '{property.Type.Name}'.", ex); } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs index 414ff8de4afd..81179dfc4021 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlDataReaderDictionary.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Text.Json; using Microsoft.Data.SqlClient; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -12,20 +13,12 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; /// This class is used to provide a dictionary-like interface to a . /// The goal is to avoid the need of allocating a new dictionary for each row read from the database. /// -internal sealed class SqlDataReaderDictionary : IDictionary +internal sealed class SqlDataReaderDictionary(SqlDataReader sqlDataReader, IReadOnlyList vectorProperties) + : IDictionary { - private readonly SqlDataReader _sqlDataReader; - private readonly IReadOnlyList _vectorPropertyStoragePropertyNames; - // This field will get instantiated lazily, only if needed by a custom mapper. private Dictionary? _dictionary; - internal SqlDataReaderDictionary(SqlDataReader sqlDataReader, IReadOnlyList vectorPropertyStoragePropertyNames) - { - this._sqlDataReader = sqlDataReader; - this._vectorPropertyStoragePropertyNames = vectorPropertyStoragePropertyNames; - } - private object? Unwrap(string storageName, object? value) { // Let's make sure our users don't need to learn what DBNull is. @@ -35,11 +28,11 @@ internal SqlDataReaderDictionary(SqlDataReader sqlDataReader, IReadOnlyList 0 && value is string text) + if (vectorProperties.Count > 0 && value is string text) { - for (int i = 0; i < this._vectorPropertyStoragePropertyNames.Count; i++) + for (int i = 0; i < vectorProperties.Count; i++) { - if (string.Equals(storageName, this._vectorPropertyStoragePropertyNames[i], StringComparison.Ordinal)) + if (string.Equals(storageName, vectorProperties[i].StorageName, StringComparison.Ordinal)) { try { @@ -71,7 +64,7 @@ internal SqlDataReaderDictionary(SqlDataReader sqlDataReader, IReadOnlyList this.Unwrap(key, this._sqlDataReader[key]); + get => this.Unwrap(key, sqlDataReader[key]); set => throw new InvalidOperationException(); } @@ -79,7 +72,7 @@ public object? this[string key] public ICollection Values => this.GetDictionary().Values; - public int Count => this._sqlDataReader.FieldCount; + public int Count => sqlDataReader.FieldCount; public bool IsReadOnly => true; @@ -96,7 +89,7 @@ public bool ContainsKey(string key) { try { - return this._sqlDataReader.GetOrdinal(key) >= 0; + return sqlDataReader.GetOrdinal(key) >= 0; } catch (IndexOutOfRangeException) { @@ -121,7 +114,7 @@ public bool TryGetValue(string key, out object? value) { try { - value = this.Unwrap(key, this._sqlDataReader[key]); + value = this.Unwrap(key, sqlDataReader[key]); return true; } catch (IndexOutOfRangeException) @@ -135,11 +128,11 @@ public bool TryGetValue(string key, out object? value) { if (this._dictionary is null) { - Dictionary dictionary = new(this._sqlDataReader.FieldCount, StringComparer.Ordinal); - for (int i = 0; i < this._sqlDataReader.FieldCount; i++) + Dictionary dictionary = new(sqlDataReader.FieldCount, StringComparer.Ordinal); + for (int i = 0; i < sqlDataReader.FieldCount; i++) { - string name = this._sqlDataReader.GetName(i); - dictionary.Add(name, this.Unwrap(name, this._sqlDataReader[i])); + string name = sqlDataReader.GetName(i); + dictionary.Add(name, this.Unwrap(name, sqlDataReader[i])); } this._dictionary = dictionary; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index aebcf8fe8787..2894828fec29 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -6,6 +6,7 @@ using System.Text.Json; using Microsoft.Data.SqlClient; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; #pragma warning disable CA2100 // Review SQL queries for security vulnerabilities @@ -18,9 +19,7 @@ internal static SqlCommand CreateTable( string? schema, string tableName, bool ifNotExists, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList dataProperties, - IReadOnlyList vectorProperties) + VectorStoreRecordModel model) { StringBuilder sb = new(200); if (ifNotExists) @@ -33,47 +32,48 @@ internal static SqlCommand CreateTable( sb.Append("CREATE TABLE "); sb.AppendTableName(schema, tableName); sb.AppendLine(" ("); - string keyColumnName = GetColumnName(keyProperty); - sb.AppendFormat("[{0}] {1} NOT NULL,", keyColumnName, Map(keyProperty)); + sb.AppendFormat("[{0}] {1} NOT NULL,", model.KeyProperty.StorageName, Map(model.KeyProperty)); sb.AppendLine(); - for (int i = 0; i < dataProperties.Count; i++) + + foreach (var property in model.DataProperties) { - sb.AppendFormat("[{0}] {1},", GetColumnName(dataProperties[i]), Map(dataProperties[i])); + sb.AppendFormat("[{0}] {1},", property.StorageName, Map(property)); sb.AppendLine(); } - for (int i = 0; i < vectorProperties.Count; i++) + + foreach (var property in model.VectorProperties) { - sb.AppendFormat("[{0}] VECTOR({1}),", GetColumnName(vectorProperties[i]), vectorProperties[i].Dimensions); + sb.AppendFormat("[{0}] VECTOR({1}),", property.StorageName, property.Dimensions); sb.AppendLine(); } - sb.AppendFormat("PRIMARY KEY ([{0}])", keyColumnName); + + sb.AppendFormat("PRIMARY KEY ([{0}])", model.KeyProperty.StorageName); sb.AppendLine(); sb.AppendLine(");"); // end the table definition - foreach (var dataProperty in dataProperties) + foreach (var dataProperty in model.DataProperties) { if (dataProperty.IsFilterable) { sb.AppendFormat("CREATE INDEX "); - sb.AppendIndexName(tableName, GetColumnName(dataProperty)); + sb.AppendIndexName(tableName, dataProperty.StorageName); sb.AppendFormat(" ON ").AppendTableName(schema, tableName); - sb.AppendFormat("([{0}]);", GetColumnName(dataProperty)); + sb.AppendFormat("([{0}]);", dataProperty.StorageName); sb.AppendLine(); } } - foreach (var vectorProperty in vectorProperties) + foreach (var vectorProperty in model.VectorProperties) { switch (vectorProperty.IndexKind) { - case null: - case "": - case IndexKind.Flat: + case IndexKind.Flat or null or "": // TODO: Move to early validation break; default: throw new NotSupportedException($"Index kind {vectorProperty.IndexKind} is not supported."); } } + sb.Append("END;"); return connection.CreateCommand(sb); @@ -120,8 +120,7 @@ internal static SqlCommand MergeIntoSingle( SqlConnection connection, string? schema, string tableName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList properties, + VectorStoreRecordModel model, IDictionary record) { SqlCommand command = connection.CreateCommand(); @@ -131,23 +130,25 @@ internal static SqlCommand MergeIntoSingle( sb.AppendLine(" AS t"); sb.Append("USING (VALUES ("); int paramIndex = 0; - foreach (VectorStoreRecordProperty property in properties) + + foreach (var property in model.Properties) { sb.AppendParameterName(property, ref paramIndex, out string paramName).Append(','); - command.AddParameter(property, paramName, record[GetColumnName(property)]); + command.AddParameter(property, paramName, record[property.StorageName]); } + sb[sb.Length - 1] = ')'; // replace the last comma with a closing parenthesis sb.Append(") AS s ("); - sb.AppendColumnNames(properties); + sb.AppendColumnNames(model.Properties); sb.AppendLine(")"); - sb.AppendFormat("ON (t.[{0}] = s.[{0}])", GetColumnName(keyProperty)).AppendLine(); + sb.AppendFormat("ON (t.[{0}] = s.[{0}])", model.KeyProperty.StorageName).AppendLine(); sb.AppendLine("WHEN MATCHED THEN"); sb.Append("UPDATE SET "); - foreach (VectorStoreRecordProperty property in properties) + foreach (var property in model.Properties) { - if (property != keyProperty) // don't update the key + if (property is not VectorStoreRecordKeyPropertyModel) // don't update the key { - sb.AppendFormat("t.[{0}] = s.[{0}],", GetColumnName(property)); + sb.AppendFormat("t.[{0}] = s.[{0}],", property.StorageName); } } --sb.Length; // remove the last comma @@ -156,12 +157,12 @@ internal static SqlCommand MergeIntoSingle( sb.Append("WHEN NOT MATCHED THEN"); sb.AppendLine(); sb.Append("INSERT ("); - sb.AppendColumnNames(properties); + sb.AppendColumnNames(model.Properties); sb.AppendLine(")"); sb.Append("VALUES ("); - sb.AppendColumnNames(properties, prefix: "s."); + sb.AppendColumnNames(model.Properties, prefix: "s."); sb.AppendLine(")"); - sb.AppendFormat("OUTPUT inserted.[{0}];", GetColumnName(keyProperty)); + sb.AppendFormat("OUTPUT inserted.[{0}];", model.KeyProperty.StorageName); command.CommandText = sb.ToString(); return command; @@ -171,13 +172,12 @@ internal static bool MergeIntoMany( SqlCommand command, string? schema, string tableName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList properties, + VectorStoreRecordModel model, IEnumerable> records) { StringBuilder sb = new(200); // The DECLARE statement creates a table variable to store the keys of the inserted rows. - sb.AppendFormat("DECLARE @InsertedKeys TABLE (KeyColumn {0});", Map(keyProperty)); + sb.AppendFormat("DECLARE @InsertedKeys TABLE (KeyColumn {0});", Map(model.KeyProperty)); sb.AppendLine(); // The MERGE statement performs the upsert operation and outputs the keys of the inserted rows into the table variable. sb.Append("MERGE INTO "); @@ -188,10 +188,10 @@ internal static bool MergeIntoMany( foreach (var record in records) { sb.Append('('); - foreach (VectorStoreRecordProperty property in properties) + foreach (var property in model.Properties) { sb.AppendParameterName(property, ref paramIndex, out string paramName).Append(','); - command.AddParameter(property, paramName, record[GetColumnName(property)]); + command.AddParameter(property, paramName, record[property.StorageName]); } sb[sb.Length - 1] = ')'; // replace the last comma with a closing parenthesis sb.AppendLine(","); @@ -206,16 +206,16 @@ internal static bool MergeIntoMany( sb.Length -= (1 + Environment.NewLine.Length); // remove the last comma and newline sb.Append(") AS s ("); // s stands for source - sb.AppendColumnNames(properties); + sb.AppendColumnNames(model.Properties); sb.AppendLine(")"); - sb.AppendFormat("ON (t.[{0}] = s.[{0}])", GetColumnName(keyProperty)).AppendLine(); + sb.AppendFormat("ON (t.[{0}] = s.[{0}])", model.KeyProperty.StorageName).AppendLine(); sb.AppendLine("WHEN MATCHED THEN"); sb.Append("UPDATE SET "); - foreach (VectorStoreRecordProperty property in properties) + foreach (var property in model.Properties) { - if (property != keyProperty) // don't update the key + if (property is not VectorStoreRecordKeyPropertyModel) // don't update the key { - sb.AppendFormat("t.[{0}] = s.[{0}],", GetColumnName(property)); + sb.AppendFormat("t.[{0}] = s.[{0}],", property.StorageName); } } --sb.Length; // remove the last comma @@ -223,12 +223,12 @@ internal static bool MergeIntoMany( sb.Append("WHEN NOT MATCHED THEN"); sb.AppendLine(); sb.Append("INSERT ("); - sb.AppendColumnNames(properties); + sb.AppendColumnNames(model.Properties); sb.AppendLine(")"); sb.Append("VALUES ("); - sb.AppendColumnNames(properties, prefix: "s."); + sb.AppendColumnNames(model.Properties, prefix: "s."); sb.AppendLine(")"); - sb.AppendFormat("OUTPUT inserted.[{0}] INTO @InsertedKeys (KeyColumn);", GetColumnName(keyProperty)); + sb.AppendFormat("OUTPUT inserted.[{0}] INTO @InsertedKeys (KeyColumn);", model.KeyProperty.StorageName); sb.AppendLine(); // The SELECT statement returns the keys of the inserted rows. @@ -240,7 +240,7 @@ internal static bool MergeIntoMany( internal static SqlCommand DeleteSingle( SqlConnection connection, string? schema, string tableName, - VectorStoreRecordKeyProperty keyProperty, object key) + VectorStoreRecordKeyPropertyModel keyProperty, object key) { SqlCommand command = connection.CreateCommand(); @@ -248,7 +248,7 @@ internal static SqlCommand DeleteSingle( StringBuilder sb = new(100); sb.Append("DELETE FROM "); sb.AppendTableName(schema, tableName); - sb.AppendFormat(" WHERE [{0}] = ", GetColumnName(keyProperty)); + sb.AppendFormat(" WHERE [{0}] = ", keyProperty.StorageName); sb.AppendParameterName(keyProperty, ref paramIndex, out string keyParamName); command.AddParameter(keyProperty, keyParamName, key); @@ -258,12 +258,12 @@ internal static SqlCommand DeleteSingle( internal static bool DeleteMany( SqlCommand command, string? schema, string tableName, - VectorStoreRecordKeyProperty keyProperty, IEnumerable keys) + VectorStoreRecordKeyPropertyModel keyProperty, IEnumerable keys) { StringBuilder sb = new(100); sb.Append("DELETE FROM "); sb.AppendTableName(schema, tableName); - sb.AppendFormat(" WHERE [{0}] IN (", GetColumnName(keyProperty)); + sb.AppendFormat(" WHERE [{0}] IN (", keyProperty.StorageName); sb.AppendKeyParameterList(keys, command, keyProperty, out bool emptyKeys); sb.Append(')'); // close the IN clause @@ -278,8 +278,7 @@ internal static bool DeleteMany( internal static SqlCommand SelectSingle( SqlConnection sqlConnection, string? schema, string collectionName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList properties, + VectorStoreRecordModel model, object key, bool includeVectors) { @@ -288,14 +287,14 @@ internal static SqlCommand SelectSingle( int paramIndex = 0; StringBuilder sb = new(200); sb.AppendFormat("SELECT "); - sb.AppendColumnNames(properties, includeVectors: includeVectors); + sb.AppendColumnNames(model.Properties, includeVectors: includeVectors); sb.AppendLine(); sb.Append("FROM "); sb.AppendTableName(schema, collectionName); sb.AppendLine(); - sb.AppendFormat("WHERE [{0}] = ", GetColumnName(keyProperty)); - sb.AppendParameterName(keyProperty, ref paramIndex, out string keyParamName); - command.AddParameter(keyProperty, keyParamName, key); + sb.AppendFormat("WHERE [{0}] = ", model.KeyProperty.StorageName); + sb.AppendParameterName(model.KeyProperty, ref paramIndex, out string keyParamName); + command.AddParameter(model.KeyProperty, keyParamName, key); command.CommandText = sb.ToString(); return command; @@ -303,20 +302,19 @@ internal static SqlCommand SelectSingle( internal static bool SelectMany( SqlCommand command, string? schema, string tableName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList properties, + VectorStoreRecordModel model, IEnumerable keys, bool includeVectors) { StringBuilder sb = new(200); sb.AppendFormat("SELECT "); - sb.AppendColumnNames(properties, includeVectors: includeVectors); + sb.AppendColumnNames(model.Properties, includeVectors: includeVectors); sb.AppendLine(); sb.Append("FROM "); sb.AppendTableName(schema, tableName); sb.AppendLine(); - sb.AppendFormat("WHERE [{0}] IN (", GetColumnName(keyProperty)); - sb.AppendKeyParameterList(keys, command, keyProperty, out bool emptyKeys); + sb.AppendFormat("WHERE [{0}] IN (", model.KeyProperty.StorageName); + sb.AppendKeyParameterList(keys, command, model.KeyProperty, out bool emptyKeys); sb.Append(')'); // close the IN clause if (emptyKeys) @@ -330,9 +328,8 @@ internal static bool SelectMany( internal static SqlCommand SelectVector( SqlConnection connection, string? schema, string tableName, - VectorStoreRecordVectorProperty vectorProperty, - IReadOnlyList properties, - IReadOnlyDictionary storagePropertyNamesMap, + VectorStoreRecordVectorPropertyModel vectorProperty, + VectorStoreRecordModel model, VectorSearchOptions options, ReadOnlyMemory vector) { @@ -344,10 +341,10 @@ internal static SqlCommand SelectVector( StringBuilder sb = new(200); sb.AppendFormat("SELECT "); - sb.AppendColumnNames(properties, includeVectors: options.IncludeVectors); + sb.AppendColumnNames(model.Properties, includeVectors: options.IncludeVectors); sb.AppendLine(","); sb.AppendFormat("VECTOR_DISTANCE('{0}', {1}, CAST(@vector AS VECTOR({2}))) AS [score]", - distanceMetric, GetColumnName(vectorProperty), vector.Length); + distanceMetric, vectorProperty.StorageName, vector.Length); sb.AppendLine(); sb.Append("FROM "); sb.AppendTableName(schema, tableName); @@ -356,7 +353,7 @@ internal static SqlCommand SelectVector( { int startParamIndex = command.Parameters.Count; - SqlServerFilterTranslator translator = new(storagePropertyNamesMap, options.Filter, sb, startParamIndex: startParamIndex); + SqlServerFilterTranslator translator = new(model, options.Filter, sb, startParamIndex: startParamIndex); translator.Translate(appendWhere: true); List parameters = translator.ParameterValues; @@ -376,10 +373,7 @@ internal static SqlCommand SelectVector( return command; } - internal static string GetColumnName(VectorStoreRecordProperty property) - => property.StoragePropertyName ?? property.DataModelPropertyName; - - internal static StringBuilder AppendParameterName(this StringBuilder sb, VectorStoreRecordProperty property, ref int paramIndex, out string parameterName) + internal static StringBuilder AppendParameterName(this StringBuilder sb, VectorStoreRecordPropertyModel property, ref int paramIndex, out string parameterName) { // In SQL Server, parameter names cannot be just a number like "@1". // Parameter names must start with an alphabetic character or an underscore @@ -388,10 +382,9 @@ internal static StringBuilder AppendParameterName(this StringBuilder sb, VectorS // is valid parameter name (it can contain whitespaces, or start with a number), // we just append the ASCII letters, stop on the first non-ASCII letter // and append the index. - string columnName = GetColumnName(property); int index = sb.Length; sb.Append('@'); - foreach (char character in columnName) + foreach (char character in property.StorageName) { // We don't call APIs like char.IsWhitespace as they are expensive // as they need to handle all Unicode characters. @@ -435,14 +428,14 @@ internal static StringBuilder AppendTableName(this StringBuilder sb, string? sch } private static StringBuilder AppendColumnNames(this StringBuilder sb, - IEnumerable properties, + IEnumerable properties, string? prefix = null, bool includeVectors = true) { bool any = false; - foreach (VectorStoreRecordProperty property in properties) + foreach (var property in properties) { - if (!includeVectors && property is VectorStoreRecordVectorProperty) + if (!includeVectors && property is VectorStoreRecordVectorPropertyModel) { continue; } @@ -452,7 +445,7 @@ private static StringBuilder AppendColumnNames(this StringBuilder sb, sb.Append(prefix); } // Use square brackets to escape column names. - sb.AppendFormat("[{0}],", GetColumnName(property)); + sb.AppendFormat("[{0}],", property.StorageName); any = true; } @@ -465,7 +458,7 @@ private static StringBuilder AppendColumnNames(this StringBuilder sb, } private static StringBuilder AppendKeyParameterList(this StringBuilder sb, - IEnumerable keys, SqlCommand command, VectorStoreRecordKeyProperty keyProperty, out bool emptyKeys) + IEnumerable keys, SqlCommand command, VectorStoreRecordKeyPropertyModel keyProperty, out bool emptyKeys) { int keyIndex = 0; foreach (TKey key in keys) @@ -522,11 +515,11 @@ private static SqlCommand CreateCommand(this SqlConnection connection, StringBui return command; } - private static void AddParameter(this SqlCommand command, VectorStoreRecordProperty property, string name, object? value) + private static void AddParameter(this SqlCommand command, VectorStoreRecordPropertyModel property, string name, object? value) { switch (value) { - case null when property.PropertyType == typeof(byte[]): + case null when property.Type == typeof(byte[]): command.Parameters.Add(name, System.Data.SqlDbType.VarBinary).Value = DBNull.Value; break; case null: @@ -545,15 +538,15 @@ private static void AddParameter(this SqlCommand command, VectorStoreRecordPrope } } - private static string Map(VectorStoreRecordProperty property) => property.PropertyType switch + private static string Map(VectorStoreRecordPropertyModel property) => property.Type switch { Type t when t == typeof(byte) => "TINYINT", Type t when t == typeof(short) => "SMALLINT", Type t when t == typeof(int) => "INT", Type t when t == typeof(long) => "BIGINT", Type t when t == typeof(Guid) => "UNIQUEIDENTIFIER", - Type t when t == typeof(string) && property is VectorStoreRecordKeyProperty => "NVARCHAR(4000)", - Type t when t == typeof(string) && property is VectorStoreRecordDataProperty { IsFilterable: true } => "NVARCHAR(4000)", + Type t when t == typeof(string) && property is VectorStoreRecordKeyPropertyModel => "NVARCHAR(4000)", + Type t when t == typeof(string) && property is VectorStoreRecordDataPropertyModel { IsFilterable: true } => "NVARCHAR(4000)", Type t when t == typeof(string) => "NVARCHAR(MAX)", Type t when t == typeof(byte[]) => "VARBINARY(MAX)", Type t when t == typeof(bool) => "BIT", @@ -564,7 +557,7 @@ private static void AddParameter(this SqlCommand command, VectorStoreRecordPrope Type t when t == typeof(decimal) => "DECIMAL", Type t when t == typeof(double) => "FLOAT", Type t when t == typeof(float) => "REAL", - _ => throw new NotSupportedException($"Type {property.PropertyType} is not supported.") + _ => throw new NotSupportedException($"Type {property.Type} is not supported.") }; // Source: https://learn.microsoft.com/sql/t-sql/functions/vector-distance-transact-sql diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs index 3c8c49b663d4..eed61838df81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -12,11 +13,23 @@ internal static class SqlServerConstants internal const int MaxIndexNameLength = 128; + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = SqlServerConstants.SupportedKeyTypes, + SupportedDataPropertyTypes = SqlServerConstants.SupportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = [], + SupportedVectorPropertyTypes = SqlServerConstants.SupportedVectorTypes + }; + internal static readonly HashSet SupportedKeyTypes = [ - typeof(int), // INT + typeof(int), // INT typeof(long), // BIGINT - typeof(string), // VARCHAR + typeof(string), // VARCHAR typeof(Guid), // UNIQUEIDENTIFIER typeof(DateTime), // DATETIME2 typeof(byte[]) // VARBINARY diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs index 3bd3b2f97e0b..638586d4f48b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq.Expressions; using System.Text; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -14,11 +15,11 @@ internal sealed class SqlServerFilterTranslator : SqlFilterTranslator private int _parameterIndex; internal SqlServerFilterTranslator( - IReadOnlyDictionary storagePropertyNames, + VectorStoreRecordModel model, LambdaExpression lambdaExpression, StringBuilder sql, int startParamIndex) - : base(storagePropertyNames, lambdaExpression, sql) + : base(model, lambdaExpression, sql) { this._parameterIndex = startParamIndex; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 891e65410fad..e95ba5cb07d5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -8,6 +8,7 @@ using System.Threading.Tasks; using Microsoft.Data.SqlClient; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -24,7 +25,7 @@ public sealed class SqlServerVectorStoreRecordCollection private readonly string _connectionString; private readonly SqlServerVectorStoreRecordCollectionOptions _options; - private readonly VectorStoreRecordPropertyReader _propertyReader; + private readonly VectorStoreRecordModel _model; private readonly IVectorStoreRecordMapper> _mapper; /// @@ -41,53 +42,23 @@ public SqlServerVectorStoreRecordCollection( Verify.NotNullOrWhiteSpace(connectionString); Verify.NotNull(name); - VectorStoreRecordPropertyReader propertyReader = new(typeof(TRecord), - options?.RecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - }); - - if (VectorStoreRecordPropertyVerification.IsGenericDataModel(typeof(TRecord))) - { - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.Mapper is not null, SqlServerConstants.SupportedKeyTypes); - } - else - { - propertyReader.VerifyKeyProperties(SqlServerConstants.SupportedKeyTypes); - } - propertyReader.VerifyDataProperties(SqlServerConstants.SupportedDataTypes, supportEnumerable: false); - propertyReader.VerifyVectorProperties(SqlServerConstants.SupportedVectorTypes); + this._model = new VectorStoreRecordModelBuilder(SqlServerConstants.ModelBuildingOptions) + .Build(typeof(TRecord), options?.RecordDefinition); this._connectionString = connectionString; this.CollectionName = name; // We need to create a copy, so any changes made to the option bag after // the ctor call do not affect this instance. - this._options = options is null ? s_defaultOptions + this._options = options is null + ? s_defaultOptions : new() { Schema = options.Schema, Mapper = options.Mapper, RecordDefinition = options.RecordDefinition, }; - this._propertyReader = propertyReader; - if (options is not null && options.Mapper is not null) - { - this._mapper = options.Mapper; - } - else if (typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - this._mapper = (new GenericRecordMapper(propertyReader) as IVectorStoreRecordMapper>)!; - } - else - { - propertyReader.VerifyHasParameterlessConstructor(); - - this._mapper = new RecordMapper(propertyReader); - } + this._mapper = this._options.Mapper ?? new RecordMapper(this._model); } /// @@ -118,11 +89,11 @@ public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken) { - foreach (var vectorProperty in this._propertyReader.VectorProperties) + foreach (var vectorProperty in this._model.VectorProperties) { if (vectorProperty.Dimensions is not > 0) { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.DataModelPropertyName}' must be set to a positive integer to create a collection."); + throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); } } @@ -132,9 +103,7 @@ private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken can this._options.Schema, this.CollectionName, ifNotExists, - this._propertyReader.KeyProperty, - this._propertyReader.DataProperties, - this._propertyReader.VectorProperties); + this._model); await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), @@ -163,7 +132,7 @@ public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = de connection, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, + this._model.KeyProperty, key); await ExceptionWrapper.WrapAsync(connection, command, @@ -197,7 +166,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella command, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, + this._model.KeyProperty, keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount))) { break; // keys is empty, there is nothing to delete @@ -250,8 +219,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella connection, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, + this._model, key, includeVectors); @@ -265,7 +233,7 @@ static async (cmd, ct) => return reader.HasRows ? this._mapper.MapFromStorageToDataModel( - new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), + new SqlDataReaderDictionary(reader, this._model.VectorProperties), new() { IncludeVectors = includeVectors }) : default; } @@ -293,8 +261,7 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor command, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, + this._model, keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount), includeVectors)) { @@ -313,7 +280,7 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) { yield return this._mapper.MapFromStorageToDataModel( - new SqlDataReaderDictionary(reader, this._propertyReader.VectorPropertyStoragePropertyNames), + new SqlDataReaderDictionary(reader, this._model.VectorProperties), new() { IncludeVectors = includeVectors }); } } while (command.Parameters.Count == SqlServerConstants.MaxParameterCount); @@ -329,8 +296,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati connection, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, + this._model, this._mapper.MapFromDataToStorageModel(record)); return await ExceptionWrapper.WrapAsync(connection, command, @@ -352,7 +318,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, await connection.OpenAsync(cancellationToken).ConfigureAwait(false); using SqlTransaction transaction = connection.BeginTransaction(); - int parametersPerRecord = this._propertyReader.Properties.Count; + int parametersPerRecord = this._model.Properties.Count; int taken = 0; try @@ -370,8 +336,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, command, this._options.Schema, this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.Properties, + this._model, records.Skip(taken) .Take(SqlServerConstants.MaxParameterCount / parametersPerRecord) .Select(this._mapper.MapFromDataToStorageModel))) @@ -422,10 +387,10 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, } else { - var keyProperty = this._propertyReader.KeyPropertyInfo; + var keyProperty = this._model.KeyProperty; foreach (var record in records) { - yield return (TKey)keyProperty.GetValue(record)!; + yield return (TKey)keyProperty.GetValueAsObject(record!)!; } } } @@ -449,7 +414,7 @@ public async Task> VectorizedSearchAsync(T } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); #pragma warning disable CA2000 // Dispose objects before losing scope // This connection will be disposed by the ReadVectorSearchResultsAsync @@ -461,8 +426,7 @@ public async Task> VectorizedSearchAsync(T this._options.Schema, this.CollectionName, vectorProperty, - this._propertyReader.Properties, - this._propertyReader.StoragePropertyNamesMap, + this._model, searchOptions, allowed); @@ -483,7 +447,7 @@ private async IAsyncEnumerable> ReadVectorSearchResu try { StorageToDataModelMapperOptions options = new() { IncludeVectors = includeVectors }; - var vectorPropertyStoragePropertyNames = includeVectors ? this._propertyReader.VectorPropertyStoragePropertyNames : []; + var vectorProperties = includeVectors ? this._model.VectorProperties : []; using SqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); int scoreIndex = -1; @@ -495,7 +459,7 @@ private async IAsyncEnumerable> ReadVectorSearchResu } yield return new VectorSearchResult( - this._mapper.MapFromStorageToDataModel(new SqlDataReaderDictionary(reader, vectorPropertyStoragePropertyNames), options), + this._mapper.MapFromStorageToDataModel(new SqlDataReaderDictionary(reader, vectorProperties), options), reader.GetDouble(scoreIndex)); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs index 44e0c7a63026..5bcd273203a9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -13,6 +14,18 @@ internal static class SqliteConstants /// public const string VectorSearchExtensionName = "vec0"; + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = SqliteConstants.SupportedKeyTypes, + SupportedDataPropertyTypes = SqliteConstants.SupportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = [], + SupportedVectorPropertyTypes = SqliteConstants.SupportedVectorTypes + }; + /// A of types that a key on the provided model may have. public static readonly HashSet SupportedKeyTypes = [ @@ -24,25 +37,16 @@ internal static class SqliteConstants public static readonly HashSet SupportedDataTypes = [ typeof(int), - typeof(int?), typeof(long), - typeof(long?), typeof(ulong), - typeof(ulong?), typeof(short), - typeof(short?), typeof(ushort), - typeof(ushort?), typeof(string), typeof(bool), - typeof(bool?), typeof(float), - typeof(float?), typeof(double), - typeof(double?), typeof(decimal), - typeof(decimal?), - typeof(byte[]), + typeof(byte[]) ]; /// A of types that vector properties on the provided model may have. diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs index 963c1184d274..249778162d92 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs @@ -4,6 +4,7 @@ using System.Collections; using System.Collections.Generic; using System.Linq.Expressions; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -11,8 +12,8 @@ internal sealed class SqliteFilterTranslator : SqlFilterTranslator { private readonly Dictionary _parameters = new(); - internal SqliteFilterTranslator(IReadOnlyDictionary storagePropertyNames, - LambdaExpression lambdaExpression) : base(storagePropertyNames, lambdaExpression, sql: null) + internal SqliteFilterTranslator(VectorStoreRecordModel model, LambdaExpression lambdaExpression) + : base(model, lambdaExpression, sql: null) { } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteGenericDataModelMapper.cs deleted file mode 100644 index f6b59b2c926b..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteGenericDataModelMapper.cs +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; - -namespace Microsoft.SemanticKernel.Connectors.Sqlite; - -/// -/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within SQLite. -/// -internal sealed class SqliteGenericDataModelMapper : - IVectorStoreRecordMapper, Dictionary>, - IVectorStoreRecordMapper, Dictionary> -{ - /// with helpers for reading vector store model properties and their attributes. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A that defines the schema of the data in the database. - public SqliteGenericDataModelMapper(VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - this._propertyReader = propertyReader; - - // Validate property types. - this._propertyReader.VerifyDataProperties(SqliteConstants.SupportedDataTypes, supportEnumerable: false); - this._propertyReader.VerifyVectorProperties(SqliteConstants.SupportedVectorTypes); - } - - #region Implementation of IVectorStoreRecordMapper, Dictionary> - - public Dictionary MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - return this.InternalMapFromDataToStorageModel(dataModel); - } - - public VectorStoreGenericDataModel MapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) - { - return this.InternalMapFromStorageToDataModel(storageModel, options); - } - - #endregion - - #region Implementation of IVectorStoreRecordMapper, Dictionary> - - public Dictionary MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - return this.InternalMapFromDataToStorageModel(dataModel); - } - - VectorStoreGenericDataModel IVectorStoreRecordMapper, Dictionary>.MapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) - { - return this.InternalMapFromStorageToDataModel(storageModel, options); - } - - #endregion - - #region private - - private Dictionary InternalMapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - where TKey : notnull - { - var properties = new Dictionary - { - // Add key property - { this._propertyReader.KeyPropertyStoragePropertyName, dataModel.Key } - }; - - // Add data properties - if (dataModel.Data is not null) - { - foreach (var property in this._propertyReader.DataProperties) - { - if (dataModel.Data.TryGetValue(property.DataModelPropertyName, out var dataValue)) - { - properties.Add(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), dataValue); - } - } - } - - // Add vector properties - if (dataModel.Vectors is not null) - { - foreach (var property in this._propertyReader.VectorProperties) - { - if (dataModel.Vectors.TryGetValue(property.DataModelPropertyName, out var vectorValue)) - { - object? result = null; - - if (vectorValue is not null) - { - var vector = (ReadOnlyMemory)vectorValue; - result = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - } - - properties.Add(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), result); - } - } - } - - return properties; - } - - private VectorStoreGenericDataModel InternalMapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) - where TKey : notnull - { - TKey key; - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); - - // Process key property. - if (storageModel.TryGetValue(this._propertyReader.KeyPropertyStoragePropertyName, out var keyObject) && keyObject is not null) - { - key = (TKey)keyObject; - } - else - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - // Process data properties. - foreach (var property in this._propertyReader.DataProperties) - { - if (storageModel.TryGetValue(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), out var dataValue)) - { - dataProperties.Add(property.DataModelPropertyName, dataValue); - } - } - - // Process vector properties - if (options.IncludeVectors) - { - foreach (var property in this._propertyReader.VectorProperties) - { - if (storageModel.TryGetValue(this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName), out var vectorValue) && - vectorValue is byte[] vectorBytes) - { - var vector = SqliteVectorStoreRecordPropertyMapping.MapVectorForDataModel(vectorBytes); - vectorProperties.Add(property.DataModelPropertyName, vector); - } - } - } - - return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 6707bf482fed..01a9324a008d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -39,7 +39,7 @@ public static DbCommand BuildCreateTableCommand(SqliteConnection connection, str { var builder = new StringBuilder(); - builder.AppendLine($"CREATE TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}{tableName} ("); + builder.AppendLine($"""CREATE TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}"{tableName.Replace("\"", "\"\"")}" ("""); builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); builder.Append(");"); @@ -68,7 +68,7 @@ public static DbCommand BuildCreateVirtualTableCommand( { var builder = new StringBuilder(); - builder.AppendLine($"CREATE VIRTUAL TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}{tableName} USING {extensionName}("); + builder.AppendLine($"CREATE VIRTUAL TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}'{tableName.Replace("'", "''")}' USING {extensionName}("); builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); builder.Append(");"); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 07df93d008d5..55ccffb60a9c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -3,12 +3,14 @@ using System; using System.Collections.Generic; using System.Data.Common; +using System.Diagnostics; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Data.Sqlite; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -37,23 +39,26 @@ public class SqliteVectorStoreRecordCollection : /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); - /// Contains helpers for reading vector store model properties and their attributes. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// Flag which indicates whether vector properties exist in the consumer data model. private readonly bool _vectorPropertiesExist; + /// The storage name of the key property. + private readonly string _keyStorageName; + /// Collection of properties to operate in SQLite data table. - private readonly Lazy> _dataTableProperties; + private readonly List _dataTableProperties = []; /// Collection of properties to operate in SQLite vector table. - private readonly Lazy> _vectorTableProperties; + private readonly List _vectorTableProperties = []; /// Collection of property names to operate in SQLite data table. - private readonly Lazy> _dataTableStoragePropertyNames; + private readonly List _dataTableStoragePropertyNames = []; /// Collection of property names to operate in SQLite vector table. - private readonly Lazy> _vectorTableStoragePropertyNames; + private readonly List _vectorTableStoragePropertyNames = []; /// Table name in SQLite for data properties. private readonly string _dataTableName; @@ -81,8 +86,6 @@ public SqliteVectorStoreRecordCollection( // Verify. Verify.NotNull(connectionString); Verify.NotNullOrWhiteSpace(collectionName); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.DictionaryCustomMapper is not null, SqliteConstants.SupportedKeyTypes); - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._connectionString = connectionString; @@ -93,25 +96,41 @@ public SqliteVectorStoreRecordCollection( this._dataTableName = this.CollectionName; this._vectorTableName = GetVectorTableName(this._dataTableName, this._options); - this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(SqliteConstants.SupportedKeyTypes); + this._model = new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - this._vectorPropertiesExist = this._propertyReader.VectorProperties.Count > 0; + this._vectorPropertiesExist = this._model.VectorProperties.Count > 0; - this._dataTableProperties = new(() => [this._propertyReader.KeyProperty, .. this._propertyReader.DataProperties]); - this._vectorTableProperties = new(() => [this._propertyReader.KeyProperty, .. this._propertyReader.VectorProperties]); + // Populate some collections of properties + this._keyStorageName = this._model.KeyProperty.StorageName; - this._dataTableStoragePropertyNames = new(() => [this._propertyReader.KeyPropertyStoragePropertyName, .. this._propertyReader.DataPropertyStoragePropertyNames]); - this._vectorTableStoragePropertyNames = new(() => [this._propertyReader.KeyPropertyStoragePropertyName, .. this._propertyReader.VectorPropertyStoragePropertyNames]); + foreach (var property in this._model.Properties) + { + switch (property) + { + case VectorStoreRecordKeyPropertyModel keyProperty: + this._dataTableProperties.Add(keyProperty); + this._vectorTableProperties.Add(keyProperty); + this._dataTableStoragePropertyNames.Add(keyProperty.StorageName); + this._vectorTableStoragePropertyNames.Add(keyProperty.StorageName); + break; + + case VectorStoreRecordDataPropertyModel dataProperty: + this._dataTableProperties.Add(dataProperty); + this._dataTableStoragePropertyNames.Add(dataProperty.StorageName); + break; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + this._vectorTableProperties.Add(vectorProperty); + this._vectorTableStoragePropertyNames.Add(vectorProperty.StorageName); + break; + + default: + throw new UnreachableException(); + } + } - this._mapper = this.InitializeMapper(); + this._mapper = this._options.DictionaryCustomMapper ?? new SqliteVectorStoreRecordMapper(this._model); } /// @@ -176,7 +195,7 @@ public virtual Task> VectorizedSearchAsync } var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(searchOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); var mappedArray = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); @@ -186,7 +205,7 @@ public virtual Task> VectorizedSearchAsync var conditions = new List() { - new SqliteWhereMatchCondition(this._propertyReader.GetStoragePropertyName(vectorProperty.DataModelPropertyName), mappedArray), + new SqliteWhereMatchCondition(vectorProperty.StorageName, mappedArray), new SqliteWhereEqualsCondition(LimitPropertyName, limit) }; @@ -211,7 +230,7 @@ public virtual Task> VectorizedSearchAsync } else if (searchOptions.Filter is not null) { - SqliteFilterTranslator translator = new(this._propertyReader.StoragePropertyNamesMap, searchOptions.Filter); + SqliteFilterTranslator translator = new(this._model, searchOptions.Filter); translator.Translate(appendWhere: false); extraWhereFilter = translator.Clause.ToString(); extraParameters = translator.Parameters; @@ -360,12 +379,15 @@ private async IAsyncEnumerable> EnumerateAndMapSearc var leftTableProperties = new List { DistancePropertyName }; - List properties = [this._propertyReader.KeyProperty, .. this._propertyReader.DataProperties]; + List properties = [this._model.KeyProperty, .. this._model.DataProperties]; if (searchOptions.IncludeVectors) { - leftTableProperties.AddRange(this._propertyReader.VectorPropertyStoragePropertyNames); - properties.AddRange(this._propertyReader.VectorProperties); + foreach (var property in this._model.VectorProperties) + { + leftTableProperties.Add(property.StorageName); + } + properties.AddRange(this._model.VectorProperties); } using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); @@ -373,9 +395,9 @@ private async IAsyncEnumerable> EnumerateAndMapSearc connection, this._vectorTableName, this._dataTableName, - this._propertyReader.KeyPropertyStoragePropertyName, + this._keyStorageName, leftTableProperties, - this._dataTableStoragePropertyNames.Value, + this._dataTableStoragePropertyNames, conditions, extraWhereFilter, extraParameters, @@ -400,18 +422,12 @@ private async IAsyncEnumerable> EnumerateAndMapSearc } } - private Task InternalCreateCollectionAsync(SqliteConnection connection, bool ifNotExists, CancellationToken cancellationToken) + private async Task InternalCreateCollectionAsync(SqliteConnection connection, bool ifNotExists, CancellationToken cancellationToken) { - List dataTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns( - this._dataTableProperties.Value, - this._propertyReader.StoragePropertyNamesMap); + List dataTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._dataTableProperties); - List tasks = [this.CreateTableAsync( - connection, - this._dataTableName, - dataTableColumns, - ifNotExists, - cancellationToken)]; + await this.CreateTableAsync(connection, this._dataTableName, dataTableColumns, ifNotExists, cancellationToken) + .ConfigureAwait(false); if (this._vectorPropertiesExist) { @@ -419,20 +435,11 @@ private Task InternalCreateCollectionAsync(SqliteConnection connection, bool ifN this._options.VectorSearchExtensionName : SqliteConstants.VectorSearchExtensionName; - List vectorTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns( - this._vectorTableProperties.Value, - this._propertyReader.StoragePropertyNamesMap); + List vectorTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._vectorTableProperties); - tasks.Add(this.CreateVirtualTableAsync( - connection, - this._vectorTableName, - vectorTableColumns, - ifNotExists, - extensionName!, - cancellationToken)); + await this.CreateVirtualTableAsync(connection, this._vectorTableName, vectorTableColumns, ifNotExists, extensionName!, cancellationToken) + .ConfigureAwait(false); } - - return Task.WhenAll(tasks); } private Task CreateTableAsync(SqliteConnection connection, string tableName, List columns, bool ifNotExists, CancellationToken cancellationToken) @@ -470,7 +477,7 @@ private Task DropTableAsync(SqliteConnection connection, string tableName, { Verify.NotNull(key); - var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key) + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key) { TableName = this._dataTableName }; @@ -492,7 +499,7 @@ private IAsyncEnumerable InternalGetBatchAsync( Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); - var condition = new SqliteWhereInCondition(this._propertyReader.KeyPropertyStoragePropertyName, keysList) + var condition = new SqliteWhereInCondition(this._keyStorageName, keysList) { TableName = this._dataTableName }; @@ -511,7 +518,7 @@ private async IAsyncEnumerable InternalGetBatchAsync( bool includeVectors = options?.IncludeVectors is true && this._vectorPropertiesExist; DbCommand command; - List properties = [this._propertyReader.KeyProperty, .. this._propertyReader.DataProperties]; + List properties = [this._model.KeyProperty, .. this._model.DataProperties]; if (includeVectors) { @@ -519,19 +526,19 @@ private async IAsyncEnumerable InternalGetBatchAsync( connection, this._dataTableName, this._vectorTableName, - this._propertyReader.KeyPropertyStoragePropertyName, - this._dataTableStoragePropertyNames.Value, - this._propertyReader.VectorPropertyStoragePropertyNames, + this._keyStorageName, + this._dataTableStoragePropertyNames, + this._model.VectorProperties.Select(p => p.StorageName).ToList(), [condition]); - properties.AddRange(this._propertyReader.VectorProperties); + properties.AddRange(this._model.VectorProperties); } else { command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectCommand( connection, this._dataTableName, - this._dataTableStoragePropertyNames.Value, + this._dataTableStoragePropertyNames, [condition]); } @@ -560,11 +567,11 @@ private async Task InternalUpsertAsync(SqliteConnection connection, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); - var key = storageModel[this._propertyReader.KeyPropertyStoragePropertyName]; + var key = storageModel[this._keyStorageName]; Verify.NotNull(key); - var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); var upsertedRecordKey = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) .FirstOrDefaultAsync(cancellationToken) @@ -583,9 +590,9 @@ private IAsyncEnumerable InternalUpsertBatchAsync(SqliteConnection c OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); - var keys = storageModels.Select(model => model[this._propertyReader.KeyPropertyStoragePropertyName]!).ToList(); + var keys = storageModels.Select(model => model[this._keyStorageName]!).ToList(); - var condition = new SqliteWhereInCondition(this._propertyReader.KeyPropertyStoragePropertyName, keys); + var condition = new SqliteWhereInCondition(this._keyStorageName, keys); return this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken); } @@ -613,8 +620,8 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( using var vectorInsertCommand = SqliteVectorStoreCollectionCommandBuilder.BuildInsertCommand( connection, this._vectorTableName, - this._propertyReader.KeyPropertyStoragePropertyName, - this._vectorTableStoragePropertyNames.Value, + this._keyStorageName, + this._vectorTableStoragePropertyNames, storageModels); await vectorInsertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); @@ -623,8 +630,8 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( using var dataCommand = SqliteVectorStoreCollectionCommandBuilder.BuildInsertCommand( connection, this._dataTableName, - this._propertyReader.KeyPropertyStoragePropertyName, - this._dataTableStoragePropertyNames.Value, + this._keyStorageName, + this._dataTableStoragePropertyNames, storageModels, replaceIfExists: true); @@ -647,7 +654,7 @@ private Task InternalDeleteAsync(SqliteConnection connection, TKey key, Ca { Verify.NotNull(key); - var condition = new SqliteWhereEqualsCondition(this._propertyReader.KeyPropertyStoragePropertyName, key); + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); } @@ -661,7 +668,7 @@ private Task InternalDeleteBatchAsync(SqliteConnection connection, IEnumer Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); var condition = new SqliteWhereInCondition( - this._propertyReader.KeyPropertyStoragePropertyName, + this._keyStorageName, keysList); return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); @@ -696,18 +703,15 @@ private Task InternalDeleteBatchAsync(SqliteConnection connection, SqliteWhereCo private TRecord GetAndMapRecord( string operationName, DbDataReader reader, - List properties, + List properties, bool includeVectors) { var storageModel = new Dictionary(); foreach (var property in properties) { - var propertyName = this._propertyReader.GetStoragePropertyName(property.DataModelPropertyName); - var propertyType = property.PropertyType; - var propertyValue = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, propertyName, propertyType); - - storageModel.Add(propertyName, propertyValue); + var propertyValue = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, property.StorageName, property.Type); + storageModel.Add(property.StorageName, propertyValue); } return VectorStoreErrorHandler.RunModelConversion( @@ -734,23 +738,6 @@ private async Task RunOperationAsync(string operationName, Func> o } } - private IVectorStoreRecordMapper> InitializeMapper() - { - if (this._options.DictionaryCustomMapper is not null) - { - return this._options.DictionaryCustomMapper; - } - - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel) || - typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - var mapper = new SqliteGenericDataModelMapper(this._propertyReader); - return (mapper as IVectorStoreRecordMapper>)!; - } - - return new SqliteVectorStoreRecordMapper(this._propertyReader); - } - #pragma warning disable CS0618 // VectorSearchFilter is obsolete private List? GetFilterConditions(VectorSearchFilter? filter, string? tableName = null) { @@ -767,12 +754,12 @@ private async Task RunOperationAsync(string operationName, Func> o { if (filterClause is EqualToFilterClause equalToFilterClause) { - if (!this._propertyReader.StoragePropertyNamesMap.TryGetValue(equalToFilterClause.FieldName, out var storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(equalToFilterClause.FieldName, out var property)) { throw new InvalidOperationException($"Property name '{equalToFilterClause.FieldName}' provided as part of the filter clause is not a valid property name."); } - conditions.Add(new SqliteWhereEqualsCondition(storagePropertyName, equalToFilterClause.Value) + conditions.Add(new SqliteWhereEqualsCondition(property.StorageName, equalToFilterClause.Value) { TableName = tableName }); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs index f08ed1992b01..d39a5d343f93 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -10,47 +11,24 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// Class for mapping between a dictionary and the consumer data model. /// /// The consumer data model to map to or from. -internal sealed class SqliteVectorStoreRecordMapper : IVectorStoreRecordMapper> +internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> { - /// with helpers for reading vector store model properties and their attributes. - private readonly VectorStoreRecordPropertyReader _propertyReader; - - /// - /// Initializes a new instance of the class. - /// - /// A that defines the schema of the data in the database. - public SqliteVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyReader) - { - Verify.NotNull(propertyReader); - - this._propertyReader = propertyReader; - - this._propertyReader.VerifyHasParameterlessConstructor(); - - // Validate property types. - this._propertyReader.VerifyDataProperties(SqliteConstants.SupportedDataTypes, supportEnumerable: false); - this._propertyReader.VerifyVectorProperties(SqliteConstants.SupportedVectorTypes); - } - public Dictionary MapFromDataToStorageModel(TRecord dataModel) { var properties = new Dictionary { - // Add key property - { this._propertyReader.KeyPropertyStoragePropertyName, this._propertyReader.KeyPropertyInfo.GetValue(dataModel) } + { model.KeyProperty.StorageName, model.KeyProperty.GetValueAsObject(dataModel!) } }; - // Add data properties - foreach (var property in this._propertyReader.DataPropertiesInfo) + foreach (var property in model.DataProperties) { - properties.Add(this._propertyReader.GetStoragePropertyName(property.Name), property.GetValue(dataModel)); + properties.Add(property.StorageName, property.GetValueAsObject(dataModel!)); } - // Add vector properties - foreach (var property in this._propertyReader.VectorPropertiesInfo) + foreach (var property in model.VectorProperties) { object? result = null; - var propertyValue = property.GetValue(dataModel); + var propertyValue = property.GetValueAsObject(dataModel!); if (propertyValue is not null) { @@ -58,7 +36,7 @@ public SqliteVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyRea result = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); } - properties.Add(this._propertyReader.GetStoragePropertyName(property.Name), result); + properties.Add(property.StorageName, result); } return properties; @@ -66,34 +44,27 @@ public SqliteVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyRea public TRecord MapFromStorageToDataModel(Dictionary storageModel, StorageToDataModelMapperOptions options) { - var record = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); + var record = model.CreateRecord()!; - // Set key. - var keyPropertyValue = Convert.ChangeType( - storageModel[this._propertyReader.KeyPropertyStoragePropertyName], - this._propertyReader.KeyProperty.PropertyType); + var keyPropertyValue = Convert.ChangeType(storageModel[model.KeyProperty.StorageName], model.KeyProperty.Type); + model.KeyProperty.SetValueAsObject(record, keyPropertyValue); - this._propertyReader.KeyPropertyInfo.SetValue(record, keyPropertyValue); - - // Process data properties. - var dataPropertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - this._propertyReader.DataPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel); - - VectorStoreRecordMapping.SetPropertiesOnRecord(record, dataPropertiesInfoWithValues); + foreach (var property in model.DataProperties) + { + property.SetValueAsObject(record, storageModel[property.StorageName]); + } if (options.IncludeVectors) { - // Process vector properties. - var vectorPropertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - this._propertyReader.VectorPropertiesInfo, - this._propertyReader.StoragePropertyNamesMap, - storageModel, - (object? vector, Type type) => vector is byte[] vectorBytes ? - SqliteVectorStoreRecordPropertyMapping.MapVectorForDataModel(vectorBytes) : null); + foreach (var property in model.VectorProperties) + { + if (storageModel[property.StorageName] is not byte[] vectorBytes) + { + throw new InvalidOperationException($"Retrieved value for vector property '{property.StorageName}' which is not a byte array ('{storageModel[property.StorageName]?.GetType().Name}')."); + } - VectorStoreRecordMapping.SetPropertiesOnRecord(record, vectorPropertiesInfoWithValues); + property.SetValueAsObject(record, SqliteVectorStoreRecordPropertyMapping.MapVectorForDataModel(vectorBytes)); + } } return record; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs index 0cb9137394d7..0cae1286b2b8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs @@ -5,6 +5,7 @@ using System.Data.Common; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -35,9 +36,7 @@ public static ReadOnlyMemory MapVectorForDataModel(byte[] byteArray) return new ReadOnlyMemory(array); } - public static List GetColumns( - List properties, - IReadOnlyDictionary storagePropertyNames) + public static List GetColumns(List properties) { const string DistanceMetricConfigurationName = "distance_metric"; @@ -45,18 +44,17 @@ public static List GetColumns( foreach (var property in properties) { - var isPrimary = property is VectorStoreRecordKeyProperty; - var propertyName = storagePropertyNames[property.DataModelPropertyName]; + var isPrimary = property is VectorStoreRecordKeyPropertyModel; string propertyType; Dictionary? configuration = null; - if (property is VectorStoreRecordVectorProperty vectorProperty) + if (property is VectorStoreRecordVectorPropertyModel vectorProperty) { propertyType = GetStorageVectorPropertyType(vectorProperty); configuration = new() { - [DistanceMetricConfigurationName] = GetDistanceMetric(vectorProperty.DistanceFunction, vectorProperty.DataModelPropertyName) + [DistanceMetricConfigurationName] = GetDistanceMetric(vectorProperty) }; } else @@ -64,10 +62,10 @@ public static List GetColumns( propertyType = GetStorageDataPropertyType(property); } - var column = new SqliteColumn(propertyName, propertyType, isPrimary) + var column = new SqliteColumn(property.StorageName, propertyType, isPrimary) { Configuration = configuration, - HasIndex = property is VectorStoreRecordDataProperty { IsFilterable: true } + HasIndex = property is VectorStoreRecordDataPropertyModel { IsFilterable: true } }; columns.Add(column); @@ -117,9 +115,8 @@ public static List GetColumns( #region private - private static string GetStorageDataPropertyType(VectorStoreRecordProperty property) - { - return property.PropertyType switch + private static string GetStorageDataPropertyType(VectorStoreRecordPropertyModel property) + => property.Type switch { // Integer types Type t when t == typeof(int) || t == typeof(int?) => "INTEGER", @@ -143,34 +140,20 @@ private static string GetStorageDataPropertyType(VectorStoreRecordProperty prope Type t when t == typeof(byte[]) => "BLOB", // Default fallback for unknown types - _ => throw new NotSupportedException($"Property {property.DataModelPropertyName} has type {property.PropertyType.FullName}, which is not supported by SQLite connector.") + _ => throw new NotSupportedException($"Property '{property.ModelName}' has type '{property.Type.Name}', which is not supported by SQLite connector.") }; - } - - private static string GetDistanceMetric(string? distanceFunction, string vectorPropertyName) - { - const string Cosine = "cosine"; - const string L1 = "l1"; - const string L2 = "l2"; - - if (string.IsNullOrWhiteSpace(distanceFunction)) - { - return Cosine; - } - return distanceFunction switch + private static string GetDistanceMetric(VectorStoreRecordVectorPropertyModel vectorProperty) + => vectorProperty.DistanceFunction switch { - DistanceFunction.CosineDistance => Cosine, - DistanceFunction.ManhattanDistance => L1, - DistanceFunction.EuclideanDistance => L2, - _ => throw new NotSupportedException($"Distance function '{distanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the SQLite connector.") + DistanceFunction.CosineDistance or null => "cosine", + DistanceFunction.ManhattanDistance => "l1", + DistanceFunction.EuclideanDistance => "l2", + _ => throw new NotSupportedException($"Distance function '{vectorProperty.DistanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the SQLite connector.") }; - } - private static string GetStorageVectorPropertyType(VectorStoreRecordVectorProperty vectorProperty) - { - return $"FLOAT[{vectorProperty.Dimensions}]"; - } + private static string GetStorageVectorPropertyType(VectorStoreRecordVectorPropertyModel vectorProperty) + => $"FLOAT[{vectorProperty.Dimensions}]"; #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs index 2e4be5391159..26f2a656a189 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs @@ -10,21 +10,22 @@ using System.Runtime.CompilerServices; using System.Text; using System.Text.Json; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; // https://weaviate.io/developers/weaviate/api/graphql/filters#filter-structure internal class WeaviateFilterTranslator { - private IReadOnlyDictionary _storagePropertyNames = null!; + private VectorStoreRecordModel _model = null!; private ParameterExpression _recordParameter = null!; private readonly StringBuilder _filter = new(); - internal string Translate(LambdaExpression lambdaExpression, IReadOnlyDictionary storagePropertyNames) + internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordModel model) { Debug.Assert(this._filter.Length == 0); - this._storagePropertyNames = storagePropertyNames; + this._model = model; Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; @@ -225,11 +226,12 @@ private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] { if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) { - if (!this._storagePropertyNames.TryGetValue(memberExpression.Member.Name, out storagePropertyName)) + if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) { throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); } + storagePropertyName = property.StorageName; return true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs index 7e7640744d2d..aed7448293df 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs @@ -5,6 +5,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -16,17 +17,8 @@ internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper< /// The name of the Weaviate collection. private readonly string _collectionName; - /// A property of record definition. - private readonly VectorStoreRecordKeyProperty _keyProperty; - - /// A collection of properties of record definition. - private readonly IReadOnlyList _dataProperties; - - /// A collection of properties of record definition. - private readonly IReadOnlyList _vectorProperties; - - /// A dictionary that maps from a property name to the storage name. - private readonly IReadOnlyDictionary _storagePropertyNames; + /// The model. + private readonly VectorStoreRecordModel _model; /// A for serialization/deserialization of record properties. private readonly JsonSerializerOptions _jsonSerializerOptions; @@ -35,31 +27,15 @@ internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper< /// Initializes a new instance of the class. /// /// The name of the Weaviate collection - /// A property of record definition. - /// A collection of properties of record definition. - /// A collection of properties of record definition. - /// A dictionary that maps from a property name to the storage name. + /// The model /// A for serialization/deserialization of record properties. public WeaviateGenericDataModelMapper( string collectionName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList dataProperties, - IReadOnlyList vectorProperties, - IReadOnlyDictionary storagePropertyNames, + VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) { - Verify.NotNullOrWhiteSpace(collectionName); - Verify.NotNull(keyProperty); - Verify.NotNull(dataProperties); - Verify.NotNull(vectorProperties); - Verify.NotNull(storagePropertyNames); - Verify.NotNull(jsonSerializerOptions); - this._collectionName = collectionName; - this._keyProperty = keyProperty; - this._dataProperties = dataProperties; - this._vectorProperties = vectorProperties; - this._storagePropertyNames = storagePropertyNames; + this._model = model; this._jsonSerializerOptions = jsonSerializerOptions; } @@ -77,27 +53,23 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel da }; // Populate data properties. - foreach (var property in this._dataProperties) + foreach (var property in this._model.DataProperties) { - if (dataModel.Data is not null && dataModel.Data.TryGetValue(property.DataModelPropertyName, out var dataValue)) + if (dataModel.Data is not null && dataModel.Data.TryGetValue(property.ModelName, out var dataValue)) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - - weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![storagePropertyName] = dataValue is not null ? - JsonSerializer.SerializeToNode(dataValue, property.PropertyType, this._jsonSerializerOptions) : + weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![property.StorageName] = dataValue is not null ? + JsonSerializer.SerializeToNode(dataValue, property.Type, this._jsonSerializerOptions) : null; } } // Populate vector properties. - foreach (var property in this._vectorProperties) + foreach (var property in this._model.VectorProperties) { - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(property.DataModelPropertyName, out var vectorValue)) + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(property.ModelName, out var vectorValue)) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; - - weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![storagePropertyName] = vectorValue is not null ? - JsonSerializer.SerializeToNode(vectorValue, property.PropertyType, this._jsonSerializerOptions) : + weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property.StorageName] = vectorValue is not null ? + JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions) : null; } } @@ -121,28 +93,26 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject st var vectorProperties = new Dictionary(); // Populate data properties. - foreach (var property in this._dataProperties) + foreach (var property in this._model.DataProperties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; var jsonObject = storageModel[WeaviateConstants.ReservedDataPropertyName] as JsonObject; - if (jsonObject is not null && jsonObject.TryGetPropertyValue(storagePropertyName, out var dataValue)) + if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var dataValue)) { - dataProperties.Add(property.DataModelPropertyName, dataValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); + dataProperties.Add(property.ModelName, dataValue.Deserialize(property.Type, this._jsonSerializerOptions)); } } // Populate vector properties. if (options.IncludeVectors) { - foreach (var property in this._vectorProperties) + foreach (var property in this._model.VectorProperties) { - var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; var jsonObject = storageModel[WeaviateConstants.ReservedVectorPropertyName] as JsonObject; - if (jsonObject is not null && jsonObject.TryGetPropertyValue(storagePropertyName, out var vectorValue)) + if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var vectorValue)) { - vectorProperties.Add(property.DataModelPropertyName, vectorValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); + vectorProperties.Add(property.ModelName, vectorValue.Deserialize(property.Type, this._jsonSerializerOptions)); } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs new file mode 100644 index 000000000000..eaac5cc83f44 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal class WeaviateModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) +{ + private static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = [typeof(Guid)], + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + UsesExternalSerializer = true, + ReservedKeyStorageName = WeaviateConstants.ReservedKeyPropertyName + }; + + private static readonly HashSet s_supportedDataTypes = + [ + typeof(string), + typeof(bool), + typeof(int), + typeof(long), + typeof(short), + typeof(byte), + typeof(float), + typeof(double), + typeof(decimal), + typeof(DateTime), + typeof(DateTimeOffset), + typeof(Guid), + ]; + + internal static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs index 13b944210b14..d17377c769ce 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -18,40 +19,33 @@ internal static class WeaviateVectorStoreCollectionCreateMapping /// Maps record type properties to Weaviate collection schema for collection creation. /// /// The name of the vector store collection. - /// Collection of record data properties. - /// Collection of record vector properties. - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to JSON for data and vector properties. + /// The model. /// Weaviate collection schema. - public static WeaviateCollectionSchema MapToSchema( - string collectionName, - IEnumerable dataProperties, - IEnumerable vectorProperties, - IReadOnlyDictionary storagePropertyNames) + public static WeaviateCollectionSchema MapToSchema(string collectionName, VectorStoreRecordModel model) { var schema = new WeaviateCollectionSchema(collectionName); // Handle data properties. - foreach (var property in dataProperties) + foreach (var property in model.DataProperties) { schema.Properties.Add(new WeaviateCollectionSchemaProperty { - Name = storagePropertyNames[property.DataModelPropertyName], - DataType = [MapType(property.PropertyType)], + Name = property.StorageName, + DataType = [MapType(property.Type)], IndexFilterable = property.IsFilterable, IndexSearchable = property.IsFullTextSearchable }); } // Handle vector properties. - foreach (var property in vectorProperties) + foreach (var property in model.VectorProperties) { - var vectorPropertyName = storagePropertyNames[property.DataModelPropertyName]; - schema.VectorConfigurations.Add(vectorPropertyName, new WeaviateCollectionSchemaVectorConfig + schema.VectorConfigurations.Add(property.StorageName, new WeaviateCollectionSchemaVectorConfig { - VectorIndexType = MapIndexKind(property.IndexKind, vectorPropertyName), + VectorIndexType = MapIndexKind(property.IndexKind, property.StorageName), VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig { - Distance = MapDistanceFunction(property.DistanceFunction, vectorPropertyName) + Distance = MapDistanceFunction(property.DistanceFunction, property.StorageName) } }); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index bef5d217e356..a740b1dfd414 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -12,6 +12,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -23,49 +24,6 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = - [ - typeof(Guid) - ]; - - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; - - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(bool), - typeof(bool?), - typeof(int), - typeof(int?), - typeof(long), - typeof(long?), - typeof(short), - typeof(short?), - typeof(byte), - typeof(byte?), - typeof(float), - typeof(float?), - typeof(double), - typeof(double?), - typeof(decimal), - typeof(decimal?), - typeof(DateTime), - typeof(DateTime?), - typeof(DateTimeOffset), - typeof(DateTimeOffset?), - typeof(Guid), - typeof(Guid?) - ]; - /// Default JSON serializer options. private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { @@ -89,8 +47,8 @@ public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCo /// Optional configuration options for this class. private readonly WeaviateVectorStoreRecordCollectionOptions _options; - /// A helper to access property information for the current data model and record definition. - private readonly VectorStoreRecordPropertyReader _propertyReader; + /// The model for this collection. + private readonly VectorStoreRecordModel _model; /// The mapper to use when mapping between the consumer data model and the Weaviate record. private readonly IVectorStoreRecordMapper _mapper; @@ -132,21 +90,7 @@ public WeaviateVectorStoreRecordCollection( this.CollectionName = collectionName; this._options = options ?? new(); this._apiKey = this._options.ApiKey; - this._propertyReader = new VectorStoreRecordPropertyReader( - typeof(TRecord), - this._options.VectorStoreRecordDefinition, - new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - JsonSerializerOptions = s_jsonSerializerOptions - }); - - // Validate property types. - this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); - this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); - this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + this._model = new WeaviateModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); // Assign mapper. this._mapper = this.InitializeMapper(); @@ -176,11 +120,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = return this.RunOperationAsync(OperationName, () => { - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - this.CollectionName, - this._propertyReader.DataProperties, - this._propertyReader.VectorProperties, - this._propertyReader.JsonPropertyNamesMap); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(this.CollectionName, this._model); var request = new WeaviateCreateCollectionSchemaRequest(schema).Build(); @@ -341,21 +281,15 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect VerifyVectorParam(vector); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._propertyReader.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); - var textDataProperty = this._propertyReader.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); - - var vectorPropertyName = this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName); - var textDataPropertyName = this._propertyReader.GetJsonPropertyName(textDataProperty.DataModelPropertyName); - var fields = this._propertyReader.DataPropertyJsonNames; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildHybridSearchQuery( vector, string.Join(" ", keywords), this.CollectionName, - vectorPropertyName, - this._propertyReader.KeyPropertyName, - textDataPropertyName, + this._model, + vectorProperty, + textDataProperty, s_jsonSerializerOptions, - searchOptions, - this._propertyReader.JsonPropertyNamesMap, - this._propertyReader.VectorPropertyJsonNames, - this._propertyReader.DataPropertyJsonNames); + searchOptions); return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); } @@ -500,24 +427,12 @@ private IVectorStoreRecordMapper InitializeMapper() if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) { - var mapper = new WeaviateGenericDataModelMapper( - this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.DataProperties, - this._propertyReader.VectorProperties, - this._propertyReader.JsonPropertyNamesMap, - s_jsonSerializerOptions); + var mapper = new WeaviateGenericDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions); return (mapper as IVectorStoreRecordMapper)!; } - return new WeaviateVectorStoreRecordMapper( - this.CollectionName, - this._propertyReader.KeyProperty, - this._propertyReader.DataProperties, - this._propertyReader.VectorProperties, - this._propertyReader.JsonPropertyNamesMap, - s_jsonSerializerOptions); + return new WeaviateVectorStoreRecordMapper(this.CollectionName, this._model, s_jsonSerializerOptions); } private static void VerifyVectorParam(TVector vector) @@ -526,11 +441,11 @@ private static void VerifyVectorParam(TVector vector) var vectorType = vector.GetType(); - if (!s_supportedVectorTypes.Contains(vectorType)) + if (!WeaviateModelBuilder.s_supportedVectorTypes.Contains(vectorType)) { throw new NotSupportedException( $"The provided vector type {vectorType.FullName} is not supported by the Weaviate connector. " + - $"Supported types are: {string.Join(", ", s_supportedVectorTypes.Select(l => l.FullName))}"); + $"Supported types are: {string.Join(", ", WeaviateModelBuilder.s_supportedVectorTypes.Select(l => l.FullName))}"); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 7ef8907e4969..711cf9f3bc88 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -6,6 +6,7 @@ using System.Linq.Expressions; using System.Text.Json; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -22,27 +23,20 @@ public static string BuildSearchQuery( TVector vector, string collectionName, string vectorPropertyName, - string keyPropertyName, JsonSerializerOptions jsonSerializerOptions, VectorSearchOptions searchOptions, - IReadOnlyDictionary storagePropertyNames, - IReadOnlyList vectorPropertyStorageNames, - IReadOnlyList dataPropertyStorageNames) + VectorStoreRecordModel model) { var vectorsQuery = searchOptions.IncludeVectors ? - $"vectors {{ {string.Join(" ", vectorPropertyStorageNames)} }}" : + $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : string.Empty; #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( - legacyFilter, - jsonSerializerOptions, - keyPropertyName, - storagePropertyNames), - { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, jsonSerializerOptions, model), + { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, model), _ => null }; #pragma warning restore CS0618 @@ -61,7 +55,7 @@ public static string BuildSearchQuery( vector: {{vectorArray}} } ) { - {{string.Join(" ", dataPropertyStorageNames)}} + {{string.Join(" ", model.DataProperties.Select(p => p.StorageName))}} {{WeaviateConstants.AdditionalPropertiesPropertyName}} { {{WeaviateConstants.ReservedKeyPropertyName}} {{WeaviateConstants.ScorePropertyName}} @@ -81,29 +75,22 @@ public static string BuildHybridSearchQuery( TVector vector, string keywords, string collectionName, - string vectorPropertyName, - string keyPropertyName, - string textPropertyName, + VectorStoreRecordModel model, + VectorStoreRecordVectorPropertyModel vectorProperty, + VectorStoreRecordDataPropertyModel textProperty, JsonSerializerOptions jsonSerializerOptions, - HybridSearchOptions searchOptions, - IReadOnlyDictionary storagePropertyNames, - IReadOnlyList vectorPropertyStorageNames, - IReadOnlyList dataPropertyStorageNames) + HybridSearchOptions searchOptions) { var vectorsQuery = searchOptions.IncludeVectors ? - $"vectors {{ {string.Join(" ", vectorPropertyStorageNames)} }}" : + $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : string.Empty; #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), - { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter( - legacyFilter, - jsonSerializerOptions, - keyPropertyName, - storagePropertyNames), - { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, storagePropertyNames), + { OldFilter: VectorSearchFilter legacyFilter } => BuildLegacyFilter(legacyFilter, jsonSerializerOptions, model), + { Filter: Expression> newFilter } => new WeaviateFilterTranslator().Translate(newFilter, model), _ => null }; #pragma warning restore CS0618 @@ -119,13 +106,13 @@ public static string BuildHybridSearchQuery( {{(filter is null ? "" : "where: " + filter)}} hybrid: { query: "{{keywords}}" - properties: ["{{textPropertyName}}"] - targetVectors: ["{{vectorPropertyName}}"] + properties: ["{{textProperty.StorageName}}"] + targetVectors: ["{{vectorProperty.StorageName}}"] vector: {{vectorArray}} fusionType: rankedFusion } ) { - {{string.Join(" ", dataPropertyStorageNames)}} + {{string.Join(" ", model.DataProperties.Select(p => p.StorageName))}} {{WeaviateConstants.AdditionalPropertiesPropertyName}} { {{WeaviateConstants.ReservedKeyPropertyName}} {{WeaviateConstants.HybridScorePropertyName}} @@ -147,8 +134,7 @@ public static string BuildHybridSearchQuery( private static string BuildLegacyFilter( VectorSearchFilter? vectorSearchFilter, JsonSerializerOptions jsonSerializerOptions, - string keyPropertyName, - IReadOnlyDictionary storagePropertyNames) + VectorStoreRecordModel model) { const string EqualOperator = "Equal"; const string ContainsAnyOperator = "ContainsAny"; @@ -192,18 +178,12 @@ private static string BuildLegacyFilter( nameof(AnyTagEqualToFilterClause)])}"); } - string? storagePropertyName; - - if (propertyName.Equals(keyPropertyName, StringComparison.Ordinal)) - { - storagePropertyName = WeaviateConstants.ReservedKeyPropertyName; - } - else if (!storagePropertyNames.TryGetValue(propertyName, out storagePropertyName)) + if (!model.PropertyMap.TryGetValue(propertyName, out var property)) { throw new InvalidOperationException($"Property name '{propertyName}' provided as part of the filter clause is not a valid property name."); } - var operand = $$"""{ path: ["{{storagePropertyName}}"], operator: {{filterOperator}}, {{filterValueType}}: {{propertyValue}} }"""; + var operand = $$"""{ path: ["{{property.StorageName}}"], operator: {{filterOperator}}, {{filterValueType}}: {{propertyValue}} }"""; operands.Add(operand); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs index cb1f94a41eae..37a762ece30c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -1,49 +1,27 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; -using System.Linq; +using System.Diagnostics; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class WeaviateVectorStoreRecordMapper : IVectorStoreRecordMapper { private readonly string _collectionName; - - private readonly string _keyProperty; - - private readonly IReadOnlyList _dataProperties; - - private readonly IReadOnlyList _vectorProperties; - - private readonly IReadOnlyDictionary _storagePropertyNames; - + private readonly VectorStoreRecordModel _model; private readonly JsonSerializerOptions _jsonSerializerOptions; public WeaviateVectorStoreRecordMapper( string collectionName, - VectorStoreRecordKeyProperty keyProperty, - IReadOnlyList dataProperties, - IReadOnlyList vectorProperties, - IReadOnlyDictionary storagePropertyNames, + VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) { - Verify.NotNullOrWhiteSpace(collectionName); - Verify.NotNull(keyProperty); - Verify.NotNull(dataProperties); - Verify.NotNull(vectorProperties); - Verify.NotNull(storagePropertyNames); - Verify.NotNull(jsonSerializerOptions); - this._collectionName = collectionName; - this._storagePropertyNames = storagePropertyNames; + this._model = model; this._jsonSerializerOptions = jsonSerializerOptions; - - this._keyProperty = this._storagePropertyNames[keyProperty.DataModelPropertyName]; - this._dataProperties = dataProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); - this._vectorProperties = vectorProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); } public JsonObject MapFromDataToStorageModel(TRecord dataModel) @@ -56,30 +34,33 @@ public JsonObject MapFromDataToStorageModel(TRecord dataModel) var weaviateObjectModel = new JsonObject { { WeaviateConstants.CollectionPropertyName, JsonValue.Create(this._collectionName) }, - { WeaviateConstants.ReservedKeyPropertyName, jsonNodeDataModel[this._keyProperty]!.DeepClone() }, + // The key property in Weaviate is always named 'id'. + // But the external JSON serializer used just above isn't aware of that, and will produce a JSON object with another name, taking into + // account e.g. naming policies. TemporaryStorageName gets populated in the model builder - containing that name - once VectorStoreModelBuildingOptions.ReservedKeyPropertyName is set + { WeaviateConstants.ReservedKeyPropertyName, jsonNodeDataModel[this._model.KeyProperty.TemporaryStorageName!]!.DeepClone() }, { WeaviateConstants.ReservedDataPropertyName, new JsonObject() }, { WeaviateConstants.ReservedVectorPropertyName, new JsonObject() }, }; // Populate data properties. - foreach (var property in this._dataProperties) + foreach (var property in this._model.DataProperties) { - var node = jsonNodeDataModel[property]; + var node = jsonNodeDataModel[property.StorageName]; if (node is not null) { - weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![property] = node.DeepClone(); + weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![property.StorageName] = node.DeepClone(); } } // Populate vector properties. - foreach (var property in this._vectorProperties) + foreach (var property in this._model.VectorProperties) { - var node = jsonNodeDataModel[property]; + var node = jsonNodeDataModel[property.StorageName]; if (node is not null) { - weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property] = node.DeepClone(); + weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property.StorageName] = node.DeepClone(); } } @@ -90,33 +71,37 @@ public TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataM { Verify.NotNull(storageModel); + // TemporaryStorageName gets populated in the model builder once VectorStoreModelBuildingOptions.ReservedKeyPropertyName is set + Debug.Assert(this._model.KeyProperty.TemporaryStorageName is not null); + // Transform Weaviate object model to data model. var jsonNodeDataModel = new JsonObject { - { this._keyProperty, storageModel[WeaviateConstants.ReservedKeyPropertyName]?.DeepClone() }, + // See comment above on TemporaryStorageName + { this._model.KeyProperty.TemporaryStorageName!, storageModel[WeaviateConstants.ReservedKeyPropertyName]?.DeepClone() }, }; // Populate data properties. - foreach (var property in this._dataProperties) + foreach (var property in this._model.DataProperties) { - var node = storageModel[WeaviateConstants.ReservedDataPropertyName]?[property]; + var node = storageModel[WeaviateConstants.ReservedDataPropertyName]?[property.StorageName]; if (node is not null) { - jsonNodeDataModel[property] = node.DeepClone(); + jsonNodeDataModel[property.StorageName] = node.DeepClone(); } } // Populate vector properties. if (options.IncludeVectors) { - foreach (var property in this._vectorProperties) + foreach (var property in this._model.VectorProperties) { - var node = storageModel[WeaviateConstants.ReservedVectorPropertyName]?[property]; + var node = storageModel[WeaviateConstants.ReservedVectorPropertyName]?[property.StorageName]; if (node is not null) { - jsonNodeDataModel[property] = node.DeepClone(); + jsonNodeDataModel[property.StorageName] = node.DeepClone(); } } } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj index b8969e21943e..30f8c0d5307e 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs index 1e19af61a2f4..3d29c9140124 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using Xunit; @@ -15,33 +16,30 @@ namespace SemanticKernel.Connectors.MongoDB.UnitTests; /// public sealed class MongoDBGenericDataModelMapperTests { - private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), - new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), - new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), - new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)), - }, - }; + private static readonly VectorStoreRecordModel s_model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), + new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), + new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), + new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)) + ]); private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; private static readonly double[] s_doubleVector = [1.0f, 2.0f, 3.0f]; @@ -51,7 +49,7 @@ public sealed class MongoDBGenericDataModelMapperTests public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(s_model); var dataModel = new VectorStoreGenericDataModel("key") { Data = @@ -113,16 +111,13 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() public void MapFromDataToStorageModelMapsNullValues() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)) + ]); var dataModel = new VectorStoreGenericDataModel("key") { @@ -137,7 +132,7 @@ public void MapFromDataToStorageModelMapsNullValues() }, }; - var sut = new MongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -152,7 +147,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(s_model); var storageModel = new BsonDocument { ["_id"] = "key", @@ -209,16 +204,13 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() public void MapFromStorageToDataModelMapsNullValues() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)) + ]); var storageModel = new BsonDocument { @@ -228,7 +220,7 @@ public void MapFromStorageToDataModelMapsNullValues() ["NullableFloatVector"] = BsonNull.Value }; - var sut = new MongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -244,7 +236,7 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(s_model); var storageModel = new BsonDocument(); // Act & Assert @@ -256,18 +248,15 @@ public void MapFromStorageToDataModelThrowsForMissingKey() public void MapFromDataToStorageModelSkipsMissingProperties() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + ]); var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new MongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -282,22 +271,19 @@ public void MapFromDataToStorageModelSkipsMissingProperties() public void MapFromStorageToDataModelSkipsMissingProperties() { // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + ]); var storageModel = new BsonDocument { ["_id"] = "key" }; - var sut = new MongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + var sut = new MongoDBGenericDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -307,4 +293,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() Assert.False(dataModel.Data.ContainsKey("StringDataProp")); Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); } + + private static VectorStoreRecordModel BuildModel(IReadOnlyList properties) + => new MongoDBModelBuilder().Build(typeof(VectorStoreGenericDataModel), new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs index cea02dee086c..139844d2673d 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using Xunit; @@ -16,11 +16,17 @@ namespace SemanticKernel.Connectors.MongoDB.UnitTests; /// public sealed class MongoDBVectorStoreCollectionSearchMappingTests { - private readonly Dictionary _storagePropertyNames = new() - { - ["Property1"] = "property_1", - ["Property2"] = "property_2", - }; + private readonly VectorStoreRecordModel _model = new MongoDBModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Property1", typeof(string)) { StoragePropertyName = "property_1" }, + new VectorStoreRecordDataProperty("Property2", typeof(string)) { StoragePropertyName = "property_2" }, + ] + }); [Fact] public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() @@ -29,7 +35,7 @@ public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() var vectorSearchFilter = new VectorSearchFilter().AnyTagEqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._model)); } [Fact] @@ -39,7 +45,7 @@ public void BuildFilterThrowsExceptionWithNonExistentPropertyName() var vectorSearchFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "TestValue"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._model)); } [Fact] @@ -51,19 +57,19 @@ public void BuildFilterThrowsExceptionWithMultipleFilterClausesOfSameType() .EqualTo("Property1", "TestValue2"); // Act & Assert - Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames)); + Assert.Throws(() => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._model)); } [Fact] public void BuilderFilterByDefaultReturnsValidFilter() { // Arrange - var expectedFilter = new BsonDocument() { ["property_1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; + var expectedFilter = new BsonDocument() { ["Property1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; var vectorSearchFilter = new VectorSearchFilter().EqualTo("Property1", "TestValue1"); // Act - var filter = MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._storagePropertyNames); + var filter = MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(vectorSearchFilter, this._model); - Assert.Equal(filter.ToJson(), expectedFilter.ToJson()); + Assert.Equal(expectedFilter.ToJson(), filter.ToJson()); } } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 18cc1a999c28..5b5efb539d91 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -37,7 +37,7 @@ public MongoDBVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); + var exception = Assert.Throws(() => new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs index 65ccefcc6eee..de4099553b9d 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs @@ -32,7 +32,7 @@ public MongoDBVectorStoreRecordMapperTests() ] }; - this._sut = new(new VectorStoreRecordPropertyReader(typeof(MongoDBHotelModel), definition, null)); + this._sut = new(new MongoDBModelBuilder().Build(typeof(MongoDBHotelModel), definition)); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs deleted file mode 100644 index 0a96bb41cc3b..000000000000 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeGenericDataModelMapperTests.cs +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using Pinecone; -using Xunit; - -namespace SemanticKernel.Connectors.Pinecone.UnitTests; - -/// -/// Contains tests for the class. -/// -public class PineconeGenericDataModelMapperTests -{ - private static readonly VectorStoreRecordDefinition s_singleVectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - private static readonly float[] s_vector = new float[] { 1.0f, 2.0f, 3.0f }; - private static readonly string[] s_taglist = new string[] { "tag1", "tag2" }; - private const string TestKeyString = "testKey"; - - [Fact] - public void MapFromDataToStorageModelMapsAllSupportedTypes() - { - // Arrange. - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - s_singleVectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - var dataModel = new VectorStoreGenericDataModel(TestKeyString) - { - Data = - { - ["StringDataProp"] = "string", - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["FloatDataProp"] = 5.0f, - ["NullableFloatDataProp"] = 6.0f, - ["DoubleDataProp"] = 7.0, - ["NullableDoubleDataProp"] = 8.0, - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["TagListDataProp"] = s_taglist, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_vector), - }, - }; - - // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(TestKeyString, storageModel.Id); - Assert.Equal("string", (string?)storageModel.Metadata!["StringDataProp"]!.Value); - // MetadataValue converts all numeric types to double. - Assert.Equal(1, (double?)storageModel.Metadata["IntDataProp"]!.Value); - Assert.Equal(2, (double?)storageModel.Metadata["NullableIntDataProp"]!.Value); - Assert.Equal(3L, (double?)storageModel.Metadata["LongDataProp"]!.Value); - Assert.Equal(4L, (double?)storageModel.Metadata["NullableLongDataProp"]!.Value); - Assert.Equal(5.0f, (double?)storageModel.Metadata["FloatDataProp"]!.Value); - Assert.Equal(6.0f, (double?)storageModel.Metadata["NullableFloatDataProp"]!.Value); - Assert.Equal(7.0, (double?)storageModel.Metadata["DoubleDataProp"]!.Value); - Assert.Equal(8.0, (double?)storageModel.Metadata["NullableDoubleDataProp"]!.Value); - Assert.Equal(true, (bool?)storageModel.Metadata["BoolDataProp"]!.Value); - Assert.Equal(false, (bool?)storageModel.Metadata["NullableBoolDataProp"]!.Value); - Assert.Equal(s_taglist, ((IEnumerable?)(storageModel.Metadata["TagListDataProp"]!.Value!)) - .Select(x => x.Value as string) - .ToArray()); - Assert.Equal(s_vector, storageModel.Values); - } - - [Fact] - public void MapFromDataToStorageModelMapsNullValues() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var dataModel = new VectorStoreGenericDataModel(TestKeyString) - { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - ["NullableTagListDataProp"] = null, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_vector), - }, - }; - - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - vectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - - // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(TestKeyString, storageModel.Id); - Assert.Null(storageModel.Metadata!["StringDataProp"]); - Assert.Null(storageModel.Metadata["NullableIntDataProp"]); - Assert.Null(storageModel.Metadata["NullableTagListDataProp"]); - } - - [Fact] - public void MapFromStorageToDataModelMapsAllSupportedTypes() - { - // Arrange - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - s_singleVectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - var storageModel = new Vector() - { - Id = TestKeyString, - Metadata = new Metadata() - { - ["StringDataProp"] = (MetadataValue)"string", - ["IntDataProp"] = (MetadataValue)1, - ["NullableIntDataProp"] = (MetadataValue)2, - ["LongDataProp"] = (MetadataValue)3L, - ["NullableLongDataProp"] = (MetadataValue)4L, - ["FloatDataProp"] = (MetadataValue)5.0f, - ["NullableFloatDataProp"] = (MetadataValue)6.0f, - ["DoubleDataProp"] = (MetadataValue)7.0, - ["NullableDoubleDataProp"] = (MetadataValue)8.0, - ["BoolDataProp"] = (MetadataValue)true, - ["NullableBoolDataProp"] = (MetadataValue)false, - ["TagListDataProp"] = (MetadataValue)new MetadataValue[] { "tag1", "tag2" } - }, - Values = new float[] { 1.0f, 2.0f, 3.0f } - }; - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = true }); - - // Assert - Assert.Equal(TestKeyString, dataModel.Key); - Assert.Equal("string", (string?)dataModel.Data["StringDataProp"]); - Assert.Equal(1, (int?)dataModel.Data["IntDataProp"]); - Assert.Equal(2, (int?)dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, (long?)dataModel.Data["LongDataProp"]); - Assert.Equal(4L, (long?)dataModel.Data["NullableLongDataProp"]); - Assert.Equal(5.0f, (float?)dataModel.Data["FloatDataProp"]); - Assert.Equal(6.0f, (float?)dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(7.0, (double?)dataModel.Data["DoubleDataProp"]); - Assert.Equal(8.0, (double?)dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(true, (bool?)dataModel.Data["BoolDataProp"]); - Assert.Equal(false, (bool?)dataModel.Data["NullableBoolDataProp"]); - Assert.Equal(s_taglist, (string[]?)dataModel.Data["TagListDataProp"]); - Assert.Equal(s_vector, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } - - [Fact] - public void MapFromStorageToDataModelMapsNullValues() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var storageModel = new Vector() - { - Id = TestKeyString, - Metadata = new Metadata() - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - ["NullableTagListDataProp"] = null, - }, - Values = new float[] { 1.0f, 2.0f, 3.0f } - }; - - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - vectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = true }); - - // Assert - Assert.Equal(TestKeyString, dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Data["NullableTagListDataProp"]); - Assert.Equal(s_vector, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } - - [Fact] - public void MapFromDataToStorageModelThrowsForInvalidVectorType() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - vectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - - var dataModel = new VectorStoreGenericDataModel(TestKeyString) - { - Vectors = - { - ["FloatVector"] = "not a vector", - }, - }; - - // Act - var exception = Assert.Throws(() => sut.MapFromDataToStorageModel(dataModel)); - - // Assert - Assert.Equal("Vector property 'FloatVector' on provided record of type VectorStoreGenericDataModel must be of type ReadOnlyMemory and not null.", exception.Message); - } - - [Fact] - public void MapFromDataToStorageModelSkipsMissingProperties() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - vectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - - var dataModel = new VectorStoreGenericDataModel(TestKeyString) - { - Vectors = { ["FloatVector"] = new ReadOnlyMemory(s_vector) }, - }; - - // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(TestKeyString, storageModel.Id); - Assert.False(storageModel.Metadata!.ContainsKey("StringDataProp")); - Assert.Equal(s_vector, storageModel.Values); - } - - [Fact] - public void MapFromStorageToDataModelSkipsMissingProperties() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader( - typeof(VectorStoreGenericDataModel), - vectorStoreRecordDefinition, - new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false }); - var sut = new PineconeGenericDataModelMapper(reader); - - var storageModel = new Vector() - { - Id = TestKeyString, - Values = new float[] { 1.0f, 2.0f, 3.0f } - }; - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = true }); - - // Assert - Assert.Equal(TestKeyString, dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.Equal(s_vector, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } -} diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index 0dc2620140f3..e9b3a8e3cf17 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -30,9 +30,9 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { Properties = new List { - new VectorStoreRecordKeyProperty("Id", typeof(string)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, } }; var pineconeClient = new Sdk.PineconeClient("fake api key"); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj index 5698a909022e..040d35b2de69 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresGenericDataModelMapperTests.cs deleted file mode 100644 index d9e97fc6b855..000000000000 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresGenericDataModelMapperTests.cs +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Postgres; -using Pgvector; -using Xunit; - -namespace SemanticKernel.Connectors.Postgres.UnitTests; - -/// -/// Unit tests for class. -/// -public sealed class PostgresGenericDataModelMapperTests -{ - [Fact] - public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() - { - // Arrange - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetGenericDataModel("key"); - - var mapper = new PostgresGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal("key", result["Key"]); - Assert.Equal("Value1", result["StringProperty"]); - Assert.Equal(5, result["IntProperty"]); - - var vector = result["FloatVector"] as Vector; - - Assert.NotNull(vector); - Assert.True(vector.ToArray().Length > 0); - } - - [Fact] - public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() - { - // Arrange - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetGenericDataModel(1); - - var mapper = new PostgresGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(1, result["Key"]); - Assert.Equal("Value1", result["StringProperty"]); - Assert.Equal(5, result["IntProperty"]); - - var vector = result["FloatVector"] as Vector; - - Assert.NotNull(vector); - Assert.True(vector.ToArray().Length > 0); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool includeVectors) - { - // Arrange - var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); - var storageVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - - var storageModel = new Dictionary - { - ["Key"] = "key", - ["StringProperty"] = "Value1", - ["IntProperty"] = 5, - ["FloatVector"] = storageVector - }; - - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - - var mapper = new PostgresGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); - - // Assert - Assert.Equal("key", result.Key); - Assert.Equal("Value1", result.Data["StringProperty"]); - Assert.Equal(5, result.Data["IntProperty"]); - - if (includeVectors) - { - Assert.NotNull(result.Vectors["FloatVector"]); - Assert.Equal(vector.ToArray(), ((ReadOnlyMemory)result.Vectors["FloatVector"]!).ToArray()); - } - else - { - Assert.False(result.Vectors.ContainsKey("FloatVector")); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool includeVectors) - { - // Arrange - var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); - var storageVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - - var storageModel = new Dictionary - { - ["Key"] = 1, - ["StringProperty"] = "Value1", - ["IntProperty"] = 5, - ["FloatVector"] = storageVector - }; - - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - - var mapper = new PostgresGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); - - // Assert - Assert.Equal(1, result.Key); - Assert.Equal("Value1", result.Data["StringProperty"]); - Assert.Equal(5, result.Data["IntProperty"]); - - if (includeVectors) - { - Assert.NotNull(result.Vectors["FloatVector"]); - Assert.Equal(vector.ToArray(), ((ReadOnlyMemory)result.Vectors["FloatVector"]!).ToArray()); - } - else - { - Assert.False(result.Vectors.ContainsKey("FloatVector")); - } - } - - #region private - - private static VectorStoreRecordDefinition GetRecordDefinition() - { - return new VectorStoreRecordDefinition - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(TKey)), - new VectorStoreRecordDataProperty("StringProperty", typeof(string)), - new VectorStoreRecordDataProperty("IntProperty", typeof(int)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - } - }; - } - - private static VectorStoreGenericDataModel GetGenericDataModel(TKey key) - { - return new VectorStoreGenericDataModel(key) - { - Data = new() - { - ["StringProperty"] = "Value1", - ["IntProperty"] = 5 - }, - Vectors = new() - { - ["FloatVector"] = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]) - } - }; - } - - private static VectorStoreRecordPropertyReader GetPropertyReader(VectorStoreRecordDefinition definition) - { - return new VectorStoreRecordPropertyReader(typeof(TRecord), definition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 8f0058a1c8ba..2d9d82fa5637 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Postgres; using Pgvector; using Xunit; @@ -51,7 +52,9 @@ public void TestBuildCreateTableCommand(bool ifNotExists) ] }; - var cmdInfo = builder.BuildCreateTableCommand("public", "testcollection", recordDefinition.Properties, ifNotExists: ifNotExists); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + + var cmdInfo = builder.BuildCreateTableCommand("public", "testcollection", model, ifNotExists: ifNotExists); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("public.\"testcollection\" (", cmdInfo.CommandText); @@ -292,10 +295,12 @@ public void TestBuildGetCommand() ] }; + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + var key = 123; // Act - var cmdInfo = builder.BuildGetCommand("public", "testcollection", recordDefinition.Properties, key, includeVectors: true); + var cmdInfo = builder.BuildGetCommand("public", "testcollection", model, key, includeVectors: true); // Assert Assert.Contains("SELECT", cmdInfo.CommandText); @@ -339,8 +344,10 @@ public void TestBuildGetBatchCommand() var keys = new List { 123, 124 }; + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + // Act - var cmdInfo = builder.BuildGetBatchCommand("public", "testcollection", recordDefinition.Properties, keys, includeVectors: true); + var cmdInfo = builder.BuildGetBatchCommand("public", "testcollection", model, keys, includeVectors: true); // Assert Assert.Contains("SELECT", cmdInfo.CommandText); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs index 0533ab28c3f3..a931bbea0b67 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs @@ -72,7 +72,7 @@ public void ThrowsForUnsupportedType() }; // Act & Assert - Assert.Throws(() => new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options)); + Assert.Throws(() => new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options)); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs index 11dfd2ecd564..164b4034d5ed 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Postgres; using Pgvector; using Xunit; @@ -19,10 +20,10 @@ public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() { // Arrange var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetDataModel("key"); + var model = GetModel>(definition); + var dataModel = GetRecord("key"); - var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); + var mapper = new PostgresVectorStoreRecordMapper>(model); // Act var result = mapper.MapFromDataToStorageModel(dataModel); @@ -43,17 +44,17 @@ public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() { // Arrange - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetDataModel(1); + var definition = GetRecordDefinition(); + var propertyReader = GetModel>(definition); + var dataModel = GetRecord(1); - var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); + var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); // Act var result = mapper.MapFromDataToStorageModel(dataModel); // Assert - Assert.Equal((ulong)1, result["Key"]); + Assert.Equal(1L, result["Key"]); Assert.Equal("Value1", result["StringProperty"]); Assert.Equal(5, result["IntProperty"]); Assert.Equal(new List { "Value2", "Value3" }, result["StringArray"]); @@ -83,7 +84,7 @@ public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool }; var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var propertyReader = GetModel>(definition); var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); @@ -118,23 +119,23 @@ public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool var storageModel = new Dictionary { - ["Key"] = (ulong)1, + ["Key"] = 1L, ["StringProperty"] = "Value1", ["IntProperty"] = 5, ["StringArray"] = new List { "Value2", "Value3" }, ["FloatVector"] = storageVector }; - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var definition = GetRecordDefinition(); + var propertyReader = GetModel>(definition); - var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); + var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); // Act var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); // Assert - Assert.Equal((ulong)1, result.Key); + Assert.Equal(1L, result.Key); Assert.Equal("Value1", result.StringProperty); Assert.Equal(5, result.IntProperty); Assert.Equal(new List { "Value2", "Value3" }, result.StringArray); @@ -167,7 +168,7 @@ private static VectorStoreRecordDefinition GetRecordDefinition() }; } - private static TestRecord GetDataModel(TKey key) + private static TestRecord GetRecord(TKey key) { return new TestRecord { @@ -179,15 +180,8 @@ private static TestRecord GetDataModel(TKey key) }; } - private static VectorStoreRecordPropertyReader GetPropertyReader(VectorStoreRecordDefinition definition) - { - return new VectorStoreRecordPropertyReader(typeof(TRecord), definition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - } + private static VectorStoreRecordModel GetModel(VectorStoreRecordDefinition definition) + => new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(TRecord), definition); #pragma warning disable CA1812 private sealed class TestRecord diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs index 1cccf9a5cf12..a53d4d2c19f8 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Postgres; using Pgvector; using Xunit; @@ -38,21 +39,6 @@ public void MapVectorForStorageModelReturnsVector() Assert.True(storageModelVector.ToArray().Length > 0); } - [Fact] - public void MapVectorForDataModelReturnsReadOnlyMemory() - { - // Arrange - var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); - var pgVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - - // Act - var dataModelVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForDataModel(pgVector); - - // Assert - Assert.NotNull(dataModelVector); - Assert.Equal(vector.ToArray(), dataModelVector!.Value.ToArray()); - } - [Fact] public void GetPropertyValueReturnsCorrectValuesForLists() { @@ -104,12 +90,13 @@ public void GetPropertyValueReturnsCorrectNullableValue() public void GetIndexInfoReturnsCorrectValues() { // Arrange - List vectorProperties = [ - new VectorStoreRecordVectorProperty("vector1", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 1000 }, - new VectorStoreRecordVectorProperty("vector2", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Flat, Dimensions = 3000 }, - new VectorStoreRecordVectorProperty("vector3", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 900, DistanceFunction = DistanceFunction.ManhattanDistance }, - new VectorStoreRecordDataProperty("data1", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("data2", typeof(string)) { IsFilterable = false }, + List vectorProperties = + [ + new VectorStoreRecordVectorPropertyModel("vector1", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 1000 }, + new VectorStoreRecordVectorPropertyModel("vector2", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Flat, Dimensions = 3000 }, + new VectorStoreRecordVectorPropertyModel("vector3", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 900, DistanceFunction = DistanceFunction.ManhattanDistance }, + new VectorStoreRecordDataPropertyModel("data1", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("data2", typeof(string)) { IsFilterable = false } ]; // Act @@ -147,7 +134,7 @@ public void GetIndexInfoReturnsCorrectValues() public void GetVectorIndexInfoReturnsThrowsForInvalidDimensions(string indexKind, int dimensions) { // Arrange - var vectorProperty = new VectorStoreRecordVectorProperty("vector", typeof(ReadOnlyMemory?)) { IndexKind = indexKind, Dimensions = dimensions }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("vector", typeof(ReadOnlyMemory?)) { IndexKind = indexKind, Dimensions = dimensions }; // Act & Assert Assert.Throws(() => PostgresVectorStoreRecordPropertyMapping.GetIndexInfo([vectorProperty])); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj index 87782f3d2e8f..3f2ce94b7986 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs deleted file mode 100644 index 9710bb3b0640..000000000000 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs +++ /dev/null @@ -1,405 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.VectorData; -using Qdrant.Client.Grpc; -using Xunit; - -namespace Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests; - -/// -/// Contains tests for the class. -/// -public class QdrantGenericDataModelMapperTests -{ - private static readonly VectorStoreRecordDefinition s_singleVectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - private static readonly VectorStoreRecordDefinition s_multiVectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - }, - }; - - private static readonly float[] s_vector1 = new float[] { 1.0f, 2.0f, 3.0f }; - private static readonly float[] s_vector2 = new float[] { 4.0f, 5.0f, 6.0f }; - private static readonly string[] s_taglist = new string[] { "tag1", "tag2" }; - private const string TestGuidKeyString = "11111111-1111-1111-1111-111111111111"; - private static readonly Guid s_testGuidKey = Guid.Parse(TestGuidKeyString); - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromDataToStorageModelMapsAllSupportedTypes(bool hasNamedVectors) - { - // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), hasNamedVectors ? s_multiVectorStoreRecordDefinition : s_singleVectorStoreRecordDefinition, null); - var sut = new QdrantGenericDataModelMapper(reader, hasNamedVectors); - var dataModel = new VectorStoreGenericDataModel(1ul) - { - Data = - { - ["StringDataProp"] = "string", - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["FloatDataProp"] = 5.0f, - ["NullableFloatDataProp"] = 6.0f, - ["DoubleDataProp"] = 7.0, - ["NullableDoubleDataProp"] = 8.0, - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["TagListDataProp"] = s_taglist, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_vector1), - }, - }; - - if (hasNamedVectors) - { - dataModel.Vectors.Add("NullableFloatVector", new ReadOnlyMemory(s_vector2)); - } - - // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(1ul, storageModel.Id.Num); - Assert.Equal("string", (string?)storageModel.Payload["StringDataProp"].StringValue); - Assert.Equal(1, (int?)storageModel.Payload["IntDataProp"].IntegerValue); - Assert.Equal(2, (int?)storageModel.Payload["NullableIntDataProp"].IntegerValue); - Assert.Equal(3L, (long?)storageModel.Payload["LongDataProp"].IntegerValue); - Assert.Equal(4L, (long?)storageModel.Payload["NullableLongDataProp"].IntegerValue); - Assert.Equal(5.0f, (float?)storageModel.Payload["FloatDataProp"].DoubleValue); - Assert.Equal(6.0f, (float?)storageModel.Payload["NullableFloatDataProp"].DoubleValue); - Assert.Equal(7.0, (double?)storageModel.Payload["DoubleDataProp"].DoubleValue); - Assert.Equal(8.0, (double?)storageModel.Payload["NullableDoubleDataProp"].DoubleValue); - Assert.Equal(true, (bool?)storageModel.Payload["BoolDataProp"].BoolValue); - Assert.Equal(false, (bool?)storageModel.Payload["NullableBoolDataProp"].BoolValue); - Assert.Equal(s_taglist, storageModel.Payload["TagListDataProp"].ListValue.Values.Select(x => x.StringValue).ToArray()); - - if (hasNamedVectors) - { - Assert.Equal(s_vector1, storageModel.Vectors.Vectors_.Vectors["FloatVector"].Data.ToArray()); - Assert.Equal(s_vector2, storageModel.Vectors.Vectors_.Vectors["NullableFloatVector"].Data.ToArray()); - } - else - { - Assert.Equal(s_vector1, storageModel.Vectors.Vector.Data.ToArray()); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromDataToStorageModelMapsNullValues(bool hasNamedVectors) - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var dataModel = new VectorStoreGenericDataModel(s_testGuidKey) - { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - ["NullableTagListDataProp"] = null, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_vector1), - }, - }; - - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); - var sut = (IVectorStoreRecordMapper, PointStruct>)new QdrantGenericDataModelMapper(reader, hasNamedVectors); - - // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(TestGuidKeyString, storageModel.Id.Uuid); - Assert.True(storageModel.Payload["StringDataProp"].HasNullValue); - Assert.True(storageModel.Payload["NullableIntDataProp"].HasNullValue); - Assert.True(storageModel.Payload["NullableTagListDataProp"].HasNullValue); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelMapsAllSupportedTypes(bool hasNamedVectors) - { - // Arrange - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), hasNamedVectors ? s_multiVectorStoreRecordDefinition : s_singleVectorStoreRecordDefinition, null); - var sut = new QdrantGenericDataModelMapper(reader, hasNamedVectors); - var storageModel = new PointStruct() - { - Id = new PointId() { Num = 1 }, - Payload = - { - ["StringDataProp"] = new Value() { StringValue = "string" }, - ["IntDataProp"] = new Value() { IntegerValue = 1 }, - ["NullableIntDataProp"] = new Value() { IntegerValue = 2 }, - ["LongDataProp"] = new Value() { IntegerValue = 3 }, - ["NullableLongDataProp"] = new Value() { IntegerValue = 4 }, - ["FloatDataProp"] = new Value() { DoubleValue = 5.0 }, - ["NullableFloatDataProp"] = new Value() { DoubleValue = 6.0 }, - ["DoubleDataProp"] = new Value() { DoubleValue = 7.0 }, - ["NullableDoubleDataProp"] = new Value() { DoubleValue = 8.0 }, - ["BoolDataProp"] = new Value() { BoolValue = true }, - ["NullableBoolDataProp"] = new Value() { BoolValue = false }, - ["TagListDataProp"] = new Value() - { - ListValue = new ListValue() - { - Values = - { - new Value() { StringValue = "tag1" }, - new Value() { StringValue = "tag2" }, - }, - }, - }, - }, - Vectors = new Vectors() - }; - - if (hasNamedVectors) - { - storageModel.Vectors.Vectors_ = new NamedVectors(); - storageModel.Vectors.Vectors_.Vectors.Add("FloatVector", new Vector() { Data = { 1.0f, 2.0f, 3.0f } }); - storageModel.Vectors.Vectors_.Vectors.Add("NullableFloatVector", new Vector() { Data = { 4.0f, 5.0f, 6.0f } }); - } - else - { - storageModel.Vectors.Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } }; - } - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions()); - - // Assert - Assert.Equal(1ul, dataModel.Key); - Assert.Equal("string", (string?)dataModel.Data["StringDataProp"]); - Assert.Equal(1, (int?)dataModel.Data["IntDataProp"]); - Assert.Equal(2, (int?)dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, (long?)dataModel.Data["LongDataProp"]); - Assert.Equal(4L, (long?)dataModel.Data["NullableLongDataProp"]); - Assert.Equal(5.0f, (float?)dataModel.Data["FloatDataProp"]); - Assert.Equal(6.0f, (float?)dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(7.0, (double?)dataModel.Data["DoubleDataProp"]); - Assert.Equal(8.0, (double?)dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(true, (bool?)dataModel.Data["BoolDataProp"]); - Assert.Equal(false, (bool?)dataModel.Data["NullableBoolDataProp"]); - Assert.Equal(s_taglist, (string[]?)dataModel.Data["TagListDataProp"]); - - if (hasNamedVectors) - { - Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - Assert.Equal(s_vector2, ((ReadOnlyMemory?)dataModel.Vectors["NullableFloatVector"])!.Value.ToArray()); - } - else - { - Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelMapsNullValues(bool hasNamedVectors) - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var storageModel = new PointStruct() - { - Id = new PointId() { Uuid = TestGuidKeyString }, - Payload = - { - ["StringDataProp"] = new Value() { NullValue = new NullValue() }, - ["NullableIntDataProp"] = new Value() { NullValue = new NullValue() }, - ["NullableTagListDataProp"] = new Value() { NullValue = new NullValue() }, - }, - Vectors = new Vectors() - }; - - if (hasNamedVectors) - { - storageModel.Vectors.Vectors_ = new NamedVectors(); - storageModel.Vectors.Vectors_.Vectors.Add("FloatVector", new Vector() { Data = { 1.0f, 2.0f, 3.0f } }); - } - else - { - storageModel.Vectors.Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } }; - } - - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); - var sut = (IVectorStoreRecordMapper, PointStruct>)new QdrantGenericDataModelMapper(reader, hasNamedVectors); - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions()); - - // Assert - Assert.Equal(s_testGuidKey, dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Data["NullableTagListDataProp"]); - Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } - - [Fact] - public void MapFromDataToStorageModelThrowsForInvalidVectorType() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(ulong)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); - var sut = new QdrantGenericDataModelMapper(reader, false); - - var dataModel = new VectorStoreGenericDataModel(1ul) - { - Vectors = - { - ["FloatVector"] = "not a vector", - }, - }; - - // Act - var exception = Assert.Throws(() => sut.MapFromDataToStorageModel(dataModel)); - - // Assert - Assert.Equal("Vector property 'FloatVector' on provided record of type VectorStoreGenericDataModel must be of type ReadOnlyMemory and not null.", exception.Message); - } - - [Fact] - public void MapFromDataToStorageModelSkipsMissingProperties() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(ulong)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); - var sut = new QdrantGenericDataModelMapper(reader, false); - - var dataModel = new VectorStoreGenericDataModel(1ul) - { - Vectors = { ["FloatVector"] = new ReadOnlyMemory(s_vector1) }, - }; - - // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal(1ul, storageModel.Id.Num); - Assert.False(storageModel.Payload.ContainsKey("StringDataProp")); - Assert.Equal(s_vector1, storageModel.Vectors.Vector.Data.ToArray()); - } - - [Fact] - public void MapFromStorageToDataModelSkipsMissingProperties() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(ulong)), - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - }, - }; - - var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); - var sut = new QdrantGenericDataModelMapper(reader, false); - - var storageModel = new PointStruct() - { - Id = new PointId() { Num = 1 }, - Vectors = new Vectors() - { - Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } } - }, - }; - - // Act - var dataModel = sut.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = true }); - - // Assert - Assert.Equal(1ul, dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); - } -} diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs index 9dbcec1c88b3..79c93a86fb71 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; using Xunit; @@ -17,7 +17,7 @@ public class QdrantVectorStoreCollectionCreateMappingTests public void MapSingleVectorCreatesVectorParams() { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.DotProductSimilarity }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.DotProductSimilarity }; // Act. var actual = QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty); @@ -32,7 +32,7 @@ public void MapSingleVectorCreatesVectorParams() public void MapSingleVectorDefaultsToCosine() { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4 }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4 }; // Act. var actual = QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty); @@ -45,7 +45,7 @@ public void MapSingleVectorDefaultsToCosine() public void MapSingleVectorThrowsForUnsupportedDistanceFunction() { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.CosineDistance }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.CosineDistance }; // Act and assert. Assert.Throws(() => QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty)); @@ -57,7 +57,7 @@ public void MapSingleVectorThrowsForUnsupportedDistanceFunction() public void MapSingleVectorThrowsIfDimensionsIsInvalid(int? dimensions) { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("testvector", typeof(ReadOnlyMemory)) { Dimensions = dimensions }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = dimensions }; // Act and assert. Assert.Throws(() => QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty)); @@ -67,20 +67,23 @@ public void MapSingleVectorThrowsIfDimensionsIsInvalid(int? dimensions) public void MapNamedVectorsCreatesVectorParamsMap() { // Arrange. - var vectorProperties = new VectorStoreRecordVectorProperty[] + var vectorProperties = new VectorStoreRecordVectorPropertyModel[] { - new("testvector1", typeof(ReadOnlyMemory)) { Dimensions = 10, DistanceFunction = DistanceFunction.EuclideanDistance }, - new("testvector2", typeof(ReadOnlyMemory)) { Dimensions = 20 } - }; - - var storagePropertyNames = new Dictionary - { - { "testvector1", "storage_testvector1" }, - { "testvector2", "storage_testvector2" } + new("testvector1", typeof(ReadOnlyMemory)) + { + Dimensions = 10, + DistanceFunction = DistanceFunction.EuclideanDistance, + StorageName = "storage_testvector1" + }, + new("testvector2", typeof(ReadOnlyMemory)) + { + Dimensions = 20, + StorageName = "storage_testvector2" + } }; // Act. - var actual = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties, storagePropertyNames); + var actual = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties); // Assert. Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index afd5e545030a..3180ff043606 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Moq; using Qdrant.Client.Grpc; using Xunit; @@ -17,6 +17,20 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests; /// public class QdrantVectorStoreCollectionSearchMappingTests { + private readonly VectorStoreRecordModel _model = + new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: false)) + .Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)) { StoragePropertyName = "storage_key" }, + new VectorStoreRecordDataProperty("FieldName", typeof(string)) { StoragePropertyName = "storage_FieldName" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector" }, + ] + }); + [Theory] [InlineData("string")] [InlineData("int")] @@ -37,7 +51,7 @@ public void BuildFilterMapsEqualityClause(string type) var filter = new VectorSearchFilter().EqualTo("FieldName", expected); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); + var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, this._model); // Assert. Assert.Single(actual.Must); @@ -71,7 +85,7 @@ public void BuildFilterMapsTagContainsClause() var filter = new VectorSearchFilter().AnyTagEqualTo("FieldName", "Value"); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary() { { "FieldName", "storage_FieldName" } }); + var actual = QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, this._model); // Assert. Assert.Single(actual.Must); @@ -83,10 +97,10 @@ public void BuildFilterMapsTagContainsClause() public void BuildFilterThrowsForUnknownFieldName() { // Arrange. - var filter = new VectorSearchFilter().EqualTo("FieldName", "Value"); + var filter = new VectorSearchFilter().EqualTo("UnknownFieldName", "Value"); // Act and Assert. - Assert.Throws(() => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, new Dictionary())); + Assert.Throws(() => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(filter, this._model)); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index e27674b8f3d7..06e97c33cc43 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -532,9 +532,9 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { Properties = new List { - new VectorStoreRecordKeyProperty("Id", typeof(ulong)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, + new VectorStoreRecordKeyProperty(nameof(SinglePropsModel.Key), typeof(ulong)), + new VectorStoreRecordDataProperty(nameof(SinglePropsModel.OriginalNameData), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?)) { Dimensions = 4 }, } }; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs index ad225ba7be09..95d6c8de3b1d 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Text.Json.Serialization; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Qdrant; using Qdrant.Client.Grpc; using Xunit; @@ -23,8 +24,9 @@ public void MapsSinglePropsFromDataToStorageModelWithUlong(bool hasNamedVectors) { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) + .Build(typeof(SinglePropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(5ul)); @@ -52,8 +54,9 @@ public void MapsSinglePropsFromDataToStorageModelWithGuid(bool hasNamedVectors) { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) + .Build(typeof(SinglePropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); @@ -74,8 +77,9 @@ public void MapsSinglePropsFromStorageToDataModelWithUlong(bool hasNamedVectors, { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) + .Build(typeof(SinglePropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(5, hasNamedVectors), new() { IncludeVectors = includeVectors }); @@ -104,8 +108,9 @@ public void MapsSinglePropsFromStorageToDataModelWithGuid(bool hasNamedVectors, { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) + .Build(typeof(SinglePropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111"), hasNamedVectors), new() { IncludeVectors = includeVectors }); @@ -130,8 +135,10 @@ public void MapsMultiPropsFromDataToStorageModelWithUlong() { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); - var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, true); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) + .Build(typeof(MultiPropsModel), definition); + + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(5ul)); @@ -158,8 +165,9 @@ public void MapsMultiPropsFromDataToStorageModelWithGuid() { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); - var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, true); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) + .Build(typeof(MultiPropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); @@ -188,8 +196,9 @@ public void MapsMultiPropsFromStorageToDataModelWithUlong(bool includeVectors) { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); - var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, true); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) + .Build(typeof(MultiPropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(5), new() { IncludeVectors = includeVectors }); @@ -226,8 +235,9 @@ public void MapsMultiPropsFromStorageToDataModelWithGuid(bool includeVectors) { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); - var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); - var sut = new QdrantVectorStoreRecordMapper>(reader, true); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) + .Build(typeof(MultiPropsModel), definition); + var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111")), new() { IncludeVectors = includeVectors }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj index c54e1a3b5136..a9d5ff1e1bd9 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index e8ae022b6fc6..6dd99624bf42 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -524,9 +524,9 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { Properties = new List { - new VectorStoreRecordKeyProperty("Id", typeof(string)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, + new VectorStoreRecordKeyProperty(nameof(SinglePropsModel.Key), typeof(string)), + new VectorStoreRecordDataProperty(nameof(SinglePropsModel.OriginalNameData), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?)) { Dimensions = 4 }, } }; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs index 8eb570f15329..d96f0e31dcda 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs @@ -2,6 +2,7 @@ using System; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Connectors.Redis.UnitTests; using Xunit; @@ -13,12 +14,15 @@ namespace SemanticKernel.Connectors.Redis.UnitTests; /// public sealed class RedisHashSetVectorStoreRecordMapperTests { + private static readonly VectorStoreRecordModel s_model + = new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) + .Build(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition); + [Fact] public void MapsAllFieldsFromDataToStorageModel() { // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition, null); - var sut = new RedisHashSetVectorStoreRecordMapper(reader); + var sut = new RedisHashSetVectorStoreRecordMapper(s_model); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -33,8 +37,7 @@ public void MapsAllFieldsFromDataToStorageModel() public void MapsAllFieldsFromStorageToDataModel() { // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition, null); - var sut = new RedisHashSetVectorStoreRecordMapper(reader); + var sut = new RedisHashSetVectorStoreRecordMapper(s_model); // Act. var actual = sut.MapFromStorageToDataModel(("test key", RedisHashSetVectorStoreMappingTestHelpers.CreateHashSet()), new() { IncludeVectors = true }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs index 779dddaffa94..68d5b3921853 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs @@ -6,6 +6,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Xunit; namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; @@ -17,24 +18,28 @@ public class RedisJsonGenericDataModelMapperTests { private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; - private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() - { - Properties = new List() - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, - new VectorStoreRecordDataProperty("IntData", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordDataProperty("ComplexObjectData", typeof(ComplexObject)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - } - }; + private static readonly VectorStoreRecordModel s_model + = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection>.ModelBuildingOptions) + .Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)), + new VectorStoreRecordDataProperty("IntData", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordDataProperty("ComplexObjectData", typeof(ComplexObject)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + ] + }); [Fact] public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); var dataModel = new VectorStoreGenericDataModel("key") { Data = @@ -55,7 +60,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() // Assert Assert.Equal("key", storageModel.Key); - Assert.Equal("data 1", (string)storageModel.Node["storage_string_data"]!); + Assert.Equal("data 1", (string)storageModel.Node["StringData"]!); Assert.Equal(1, (int)storageModel.Node["IntData"]!); Assert.Equal(2, (int?)storageModel.Node["NullableIntData"]!); Assert.Equal("prop 1", (string)storageModel.Node["ComplexObjectData"]!.AsObject()["Prop1"]!); @@ -66,7 +71,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() public void MapFromDataToStorageModelMapsNullValues() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); var dataModel = new VectorStoreGenericDataModel("key") { Data = @@ -98,13 +103,15 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); - var storageModel = new JsonObject(); - storageModel.Add("storage_string_data", "data 1"); - storageModel.Add("IntData", 1); - storageModel.Add("NullableIntData", 2); - storageModel.Add("ComplexObjectData", new JsonObject(new KeyValuePair[] { new("Prop1", JsonValue.Create("prop 1")), new("Prop2", JsonValue.Create("prop 2")) })); - storageModel.Add("FloatVector", new JsonArray(new[] { 1, 2, 3, 4 }.Select(x => JsonValue.Create(x)).ToArray())); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); + var storageModel = new JsonObject + { + { "StringData", "data 1" }, + { "IntData", 1 }, + { "NullableIntData", 2 }, + { "ComplexObjectData", new JsonObject(new KeyValuePair[] { new("Prop1", JsonValue.Create("prop 1")), new("Prop2", JsonValue.Create("prop 2")) }) }, + { "FloatVector", new JsonArray(new[] { 1, 2, 3, 4 }.Select(x => JsonValue.Create(x)).ToArray()) } + }; // Act. var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); @@ -122,13 +129,15 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() public void MapFromStorageToDataModelMapsNullValues() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); - var storageModel = new JsonObject(); - storageModel.Add("storage_string_data", null); - storageModel.Add("IntData", null); - storageModel.Add("NullableIntData", null); - storageModel.Add("ComplexObjectData", null); - storageModel.Add("FloatVector", null); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); + var storageModel = new JsonObject + { + { "StringData", null }, + { "IntData", null }, + { "NullableIntData", null }, + { "ComplexObjectData", null }, + { "FloatVector", null } + }; // Act. var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); @@ -146,7 +155,7 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromDataToStorageModelSkipsMissingProperties() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); var dataModel = new VectorStoreGenericDataModel("key") { Data = { }, @@ -167,7 +176,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() // Arrange. var storageModel = new JsonObject(); - var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); // Act. var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 3277f802cc66..5ef5bb1d319b 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -512,32 +512,6 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) Assert.Equal(new float[] { 1, 2, 3, 4 }, results.First().Record.Vector2!.Value.ToArray()); } - /// - /// Tests that the collection can be created even if the definition and the type do not match. - /// In this case, the expectation is that a custom mapper will be provided to map between the - /// schema as defined by the definition and the different data model. - /// - [Fact] - public void CanCreateCollectionWithMismatchedDefinitionAndType() - { - // Arrange. - var definition = new VectorStoreRecordDefinition() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Id", typeof(string)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { Dimensions = 4 }, - } - }; - - // Act. - var sut = new RedisJsonVectorStoreRecordCollection( - this._redisDatabaseMock.Object, - TestCollectionName, - new() { VectorStoreRecordDefinition = definition, JsonNodeCustomMapper = Mock.Of>() }); - } - private RedisJsonVectorStoreRecordCollection CreateRecordCollection(bool useDefinition, bool useCustomJsonSerializerOptions = false) { return new RedisJsonVectorStoreRecordCollection( diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs index fef62c68a530..c3e8eb077b39 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs @@ -5,6 +5,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Redis; using Xunit; @@ -19,7 +20,8 @@ public sealed class RedisJsonVectorStoreRecordMapperTests public void MapsAllFieldsFromDataToStorageModel() { // Arrange. - var sut = new RedisJsonVectorStoreRecordMapper("Key", JsonSerializerOptions.Default); + var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)); + var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, JsonSerializerOptions.Default); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -38,7 +40,8 @@ public void MapsAllFieldsFromDataToStorageModel() public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() { // Arrange. - var sut = new RedisJsonVectorStoreRecordMapper("key", new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "key" }; + var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -57,7 +60,8 @@ public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() public void MapsAllFieldsFromStorageToDataModel() { // Arrange. - var sut = new RedisJsonVectorStoreRecordMapper("Key", JsonSerializerOptions.Default); + var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)); + var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, JsonSerializerOptions.Default); // Act. var jsonObject = new JsonObject(); @@ -80,7 +84,8 @@ public void MapsAllFieldsFromStorageToDataModel() public void MapsAllFieldsFromStorageToDataModelWithCustomSerializerOptions() { // Arrange. - var sut = new RedisJsonVectorStoreRecordMapper("key", new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "key" }; + var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); // Act. var jsonObject = new JsonObject(); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs index ef3ba3447bad..370443e6c50c 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using NRedisStack.Search; using Xunit; using static NRedisStack.Search.Schema; @@ -20,39 +21,32 @@ public class RedisVectorStoreCollectionCreateMappingTests public void MapToSchemaCreatesSchema(bool useDollarPrefix) { // Arrange. - var properties = new VectorStoreRecordProperty[] - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - - new VectorStoreRecordDataProperty("FilterableString", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("FullTextSearchableString", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("FilterableStringEnumerable", typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty("FullTextSearchableStringEnumerable", typeof(string[])) { IsFullTextSearchable = true }, - - new VectorStoreRecordDataProperty("FilterableInt", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("FilterableNullableInt", typeof(int)) { IsFilterable = true }, - - new VectorStoreRecordDataProperty("NonFilterableString", typeof(string)), - - new VectorStoreRecordVectorProperty("VectorDefaultIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 10 }, - new VectorStoreRecordVectorProperty("VectorSpecificIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 20, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.EuclideanSquaredDistance }, - }; - - var storagePropertyNames = new Dictionary() - { - { "FilterableString", "FilterableString" }, - { "FullTextSearchableString", "FullTextSearchableString" }, - { "FilterableStringEnumerable", "FilterableStringEnumerable" }, - { "FullTextSearchableStringEnumerable", "FullTextSearchableStringEnumerable" }, - { "FilterableInt", "FilterableInt" }, - { "FilterableNullableInt", "FilterableNullableInt" }, - { "NonFilterableString", "NonFilterableString" }, - { "VectorDefaultIndexingOptions", "VectorDefaultIndexingOptions" }, - { "VectorSpecificIndexingOptions", "vector_specific_indexing_options" }, - }; + VectorStoreRecordPropertyModel[] properties = + [ + new VectorStoreRecordKeyPropertyModel("Key", typeof(string)), + + new VectorStoreRecordDataPropertyModel("FilterableString", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("FullTextSearchableString", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataPropertyModel("FilterableStringEnumerable", typeof(string[])) { IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("FullTextSearchableStringEnumerable", typeof(string[])) { IsFullTextSearchable = true }, + + new VectorStoreRecordDataPropertyModel("FilterableInt", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("FilterableNullableInt", typeof(int)) { IsFilterable = true }, + + new VectorStoreRecordDataPropertyModel("NonFilterableString", typeof(string)), + + new VectorStoreRecordVectorPropertyModel("VectorDefaultIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 10 }, + new VectorStoreRecordVectorPropertyModel("VectorSpecificIndexingOptions", typeof(ReadOnlyMemory)) + { + Dimensions = 20, + IndexKind = IndexKind.Flat, + DistanceFunction = DistanceFunction.EuclideanSquaredDistance, + StorageName = "vector_specific_indexing_options" + } + ]; // Act. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames, useDollarPrefix); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, useDollarPrefix); // Assert. Assert.NotNull(schema); @@ -109,18 +103,20 @@ public void MapToSchemaCreatesSchema(bool useDollarPrefix) public void MapToSchemaThrowsOnInvalidVectorDimensions(int? dimensions) { // Arrange. - var properties = new VectorStoreRecordProperty[] { new VectorStoreRecordVectorProperty("VectorProperty", typeof(ReadOnlyMemory)) { Dimensions = dimensions } }; - var storagePropertyNames = new Dictionary() { { "VectorProperty", "VectorProperty" } }; + VectorStoreRecordPropertyModel[] properties = + [ + new VectorStoreRecordVectorPropertyModel("VectorProperty", typeof(ReadOnlyMemory)) { Dimensions = dimensions } + ]; // Act and assert. - Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames, true)); + Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, useDollarPrefix: true)); } [Fact] public void GetSDKIndexKindThrowsOnUnsupportedIndexKind() { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("VectorProperty", typeof(ReadOnlyMemory)) { IndexKind = "Unsupported" }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("VectorProperty", typeof(ReadOnlyMemory)) { IndexKind = "Unsupported" }; // Act and assert. Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.GetSDKIndexKind(vectorProperty)); @@ -130,7 +126,7 @@ public void GetSDKIndexKindThrowsOnUnsupportedIndexKind() public void GetSDKDistanceAlgorithmThrowsOnUnsupportedDistanceFunction() { // Arrange. - var vectorProperty = new VectorStoreRecordVectorProperty("VectorProperty", typeof(ReadOnlyMemory)) { DistanceFunction = "Unsupported" }; + var vectorProperty = new VectorStoreRecordVectorPropertyModel("VectorProperty", typeof(ReadOnlyMemory)) { DistanceFunction = "Unsupported" }; // Act and assert. Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.GetSDKDistanceAlgorithm(vectorProperty)); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index 087b707a4b7c..e93c182dbb81 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -2,9 +2,9 @@ using System; using System.Collections.Generic; -using System.Linq; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Xunit; namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; @@ -66,17 +66,18 @@ public void BuildQueryBuildsRedisQueryWithDefaults() // Arrange. var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var storagePropertyNames = new Dictionary() - { - { "Vector", "storage_Vector" }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ]); // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), storagePropertyNames, storagePropertyNames.Values.Single(), null); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), model, model.VectorProperty, null); // Assert. Assert.NotNull(query); - Assert.Equal("*=>[KNN 3 @storage_Vector $embedding AS vector_score]", query.QueryString); + Assert.Equal("*=>[KNN 3 @Vector $embedding AS vector_score]", query.QueryString); Assert.Equal("vector_score", query.SortBy); Assert.True(query.WithScores); Assert.Equal(2, query.dialect); @@ -89,14 +90,15 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3 }; - var storagePropertyNames = new Dictionary() - { - { "Vector", "storage_Vector" }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_Vector" } + ]); var selectFields = new string[] { "storage_Field1", "storage_Field2" }; // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, storagePropertyNames, storagePropertyNames.Values.Single(), selectFields); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, model, model.VectorProperty, selectFields); // Assert. Assert.NotNull(query); @@ -124,13 +126,15 @@ public void BuildFilterBuildsEqualityFilter(string filterType) _ => throw new InvalidOperationException(), }; - var storagePropertyNames = new Dictionary() - { - { "Data1", "storage_Data1" }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ]); // Act. - var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, model); // Assert. switch (filterType) @@ -157,15 +161,17 @@ public void BuildFilterThrowsForInvalidValueType() { // Arrange. var basicVectorSearchFilter = new VectorSearchFilter().EqualTo("Data1", true); - var storagePropertyNames = new Dictionary() - { - { "Data1", "storage_Data1" }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ]); // Act & Assert. Assert.Throws(() => { - var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, model); }); } @@ -174,22 +180,24 @@ public void BuildFilterThrowsForUnknownFieldName() { // Arrange. var basicVectorSearchFilter = new VectorSearchFilter().EqualTo("UnknownData", "value"); - var storagePropertyNames = new Dictionary() - { - { "Data1", "storage_Data1" }, - }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ]); // Act & Assert. Assert.Throws(() => { - var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, storagePropertyNames); + var filter = RedisVectorStoreCollectionSearchMapping.BuildLegacyFilter(basicVectorSearchFilter, model); }); } [Fact] public void ResolveDistanceFunctionReturnsCosineSimilarityIfNoDistanceFunctionSpecified() { - var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)); + var property = new VectorStoreRecordVectorPropertyModel("Prop", typeof(ReadOnlyMemory)); // Act. var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(property); @@ -201,7 +209,7 @@ public void ResolveDistanceFunctionReturnsCosineSimilarityIfNoDistanceFunctionSp [Fact] public void ResolveDistanceFunctionReturnsDistanceFunctionFromProvidedProperty() { - var property = new VectorStoreRecordVectorProperty("Prop", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; + var property = new VectorStoreRecordVectorPropertyModel("Prop", typeof(ReadOnlyMemory)) { DistanceFunction = DistanceFunction.DotProductSimilarity }; // Act. var resolvedDistanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(property); @@ -232,4 +240,10 @@ public void GetOutputScoreFromRedisScoreLeavesNonConsineSimilarityUntouched(stri #pragma warning disable CA1812 // An internal class that is apparently never instantiated. If so, remove the code from the assembly. private sealed class DummyType; #pragma warning restore CA1812 + + private static VectorStoreRecordModel BuildModel(List properties) + => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) + .Build( + typeof(VectorStoreGenericDataModel), + new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj index 015df8f6e56d..128ccff8175f 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteGenericDataModelMapperTests.cs deleted file mode 100644 index 3985672bd60e..000000000000 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteGenericDataModelMapperTests.cs +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Sqlite; -using Xunit; - -namespace SemanticKernel.Connectors.Sqlite.UnitTests; - -/// -/// Unit tests for class. -/// -public sealed class SqliteGenericDataModelMapperTests -{ - [Fact] - public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() - { - // Arrange - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetGenericDataModel("key"); - - var mapper = new SqliteGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal("key", result["Key"]); - Assert.Equal("Value1", result["StringProperty"]); - Assert.Equal(5, result["IntProperty"]); - - var vectorBytes = result["FloatVector"] as byte[]; - - Assert.NotNull(vectorBytes); - Assert.True(vectorBytes.Length > 0); - } - - [Fact] - public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() - { - // Arrange - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - var dataModel = GetGenericDataModel(1); - - var mapper = new SqliteGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal((ulong)1, result["Key"]); - Assert.Equal("Value1", result["StringProperty"]); - Assert.Equal(5, result["IntProperty"]); - - var vectorBytes = result["FloatVector"] as byte[]; - - Assert.NotNull(vectorBytes); - Assert.True(vectorBytes.Length > 0); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool includeVectors) - { - // Arrange - var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); - var storageVector = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - - var storageModel = new Dictionary - { - ["Key"] = "key", - ["StringProperty"] = "Value1", - ["IntProperty"] = 5, - ["FloatVector"] = storageVector - }; - - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - - var mapper = new SqliteGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); - - // Assert - Assert.Equal("key", result.Key); - Assert.Equal("Value1", result.Data["StringProperty"]); - Assert.Equal(5, result.Data["IntProperty"]); - - if (includeVectors) - { - Assert.NotNull(result.Vectors["FloatVector"]); - Assert.Equal(vector.ToArray(), ((ReadOnlyMemory)result.Vectors["FloatVector"]!).ToArray()); - } - else - { - Assert.False(result.Vectors.ContainsKey("FloatVector")); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool includeVectors) - { - // Arrange - var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); - var storageVector = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - - var storageModel = new Dictionary - { - ["Key"] = (ulong)1, - ["StringProperty"] = "Value1", - ["IntProperty"] = 5, - ["FloatVector"] = storageVector - }; - - var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); - - IVectorStoreRecordMapper, Dictionary> mapper = new SqliteGenericDataModelMapper(propertyReader); - - // Act - var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); - - // Assert - Assert.Equal((ulong)1, result.Key); - Assert.Equal("Value1", result.Data["StringProperty"]); - Assert.Equal(5, result.Data["IntProperty"]); - - if (includeVectors) - { - Assert.NotNull(result.Vectors["FloatVector"]); - Assert.Equal(vector.ToArray(), ((ReadOnlyMemory)result.Vectors["FloatVector"]!).ToArray()); - } - else - { - Assert.False(result.Vectors.ContainsKey("FloatVector")); - } - } - - #region private - - private static VectorStoreRecordDefinition GetRecordDefinition() - { - return new VectorStoreRecordDefinition - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(TKey)), - new VectorStoreRecordDataProperty("StringProperty", typeof(string)), - new VectorStoreRecordDataProperty("IntProperty", typeof(int)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - } - }; - } - - private static VectorStoreGenericDataModel GetGenericDataModel(TKey key) - { - return new VectorStoreGenericDataModel(key) - { - Data = new() - { - ["StringProperty"] = "Value1", - ["IntProperty"] = 5 - }, - Vectors = new() - { - ["FloatVector"] = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]) - } - }; - } - - private static VectorStoreRecordPropertyReader GetPropertyReader(VectorStoreRecordDefinition definition) - { - return new VectorStoreRecordPropertyReader(typeof(TRecord), definition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs index 705b5caa7204..72b91d8dea88 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Sqlite; using Xunit; @@ -18,10 +19,10 @@ public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() { // Arrange var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var model = BuildModel(typeof(TestRecord), definition); var dataModel = GetDataModel("key"); - var mapper = new SqliteVectorStoreRecordMapper>(propertyReader); + var mapper = new SqliteVectorStoreRecordMapper>(model); // Act var result = mapper.MapFromDataToStorageModel(dataModel); @@ -42,10 +43,10 @@ public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() { // Arrange var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var model = BuildModel(typeof(TestRecord), definition); var dataModel = GetDataModel(1); - var mapper = new SqliteVectorStoreRecordMapper>(propertyReader); + var mapper = new SqliteVectorStoreRecordMapper>(model); // Act var result = mapper.MapFromDataToStorageModel(dataModel); @@ -79,9 +80,9 @@ public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool }; var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var model = BuildModel(typeof(TestRecord), definition); - var mapper = new SqliteVectorStoreRecordMapper>(propertyReader); + var mapper = new SqliteVectorStoreRecordMapper>(model); // Act var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); @@ -120,9 +121,9 @@ public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool }; var definition = GetRecordDefinition(); - var propertyReader = GetPropertyReader>(definition); + var model = BuildModel(typeof(TestRecord), definition); - var mapper = new SqliteVectorStoreRecordMapper>(propertyReader); + var mapper = new SqliteVectorStoreRecordMapper>(model); // Act var result = mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors }); @@ -170,15 +171,8 @@ private static TestRecord GetDataModel(TKey key) }; } - private static VectorStoreRecordPropertyReader GetPropertyReader(VectorStoreRecordDefinition definition) - { - return new VectorStoreRecordPropertyReader(typeof(TRecord), definition, new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true - }); - } + private static VectorStoreRecordModel BuildModel(Type type, VectorStoreRecordDefinition definition) + => new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions).Build(type, definition); #pragma warning disable CA1812 private sealed class TestRecord diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs index ddd47cce94de..a36e9f25a4c3 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Sqlite; using Xunit; @@ -55,22 +56,20 @@ public void MapVectorForDataModelReturnsReadOnlyMemory() public void GetColumnsReturnsCollectionOfColumns() { // Arrange - var properties = new List() + var properties = new List() { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.ManhattanDistance }, - }; - - var storagePropertyNames = new Dictionary - { - ["Key"] = "Key", - ["Data"] = "my_data", - ["Vector"] = "Vector" + new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "Key" }, + new VectorStoreRecordDataPropertyModel("Data", typeof(int)) { StorageName = "my_data", IsFilterable = true }, + new VectorStoreRecordVectorPropertyModel("Vector", typeof(ReadOnlyMemory)) + { + Dimensions = 4, + DistanceFunction = DistanceFunction.ManhattanDistance, + StorageName = "Vector" + } }; // Act - var columns = SqliteVectorStoreRecordPropertyMapping.GetColumns(properties, storagePropertyNames); + var columns = SqliteVectorStoreRecordPropertyMapping.GetColumns(properties); // Assert Assert.Equal("Key", columns[0].Name); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj index ca442f3b3233..55e4e0850587 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj @@ -9,6 +9,7 @@ disable false $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007 + $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs index 4eca8d8bf77f..15193f410b4f 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs @@ -7,6 +7,7 @@ using System.Text.Json.Nodes; using System.Text.Json.Serialization; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Weaviate; using Xunit; @@ -28,49 +29,47 @@ public sealed class WeaviateGenericDataModelMapperTests } }; - private static readonly VectorStoreRecordKeyProperty s_keyProperty = new("Key", typeof(Guid)); - - private static readonly List s_dataProperties = new() - { - new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), - new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), - new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), - new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), - new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), - new VectorStoreRecordDataProperty("ShortDataProp", typeof(short)), - new VectorStoreRecordDataProperty("NullableShortDataProp", typeof(short?)), - new VectorStoreRecordDataProperty("ByteDataProp", typeof(byte)), - new VectorStoreRecordDataProperty("NullableByteDataProp", typeof(byte?)), - new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), - new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), - new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), - new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), - new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), - new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), - new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), - new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), - new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), - new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), - new VectorStoreRecordDataProperty("GuidDataProp", typeof(Guid)), - new VectorStoreRecordDataProperty("NullableGuidDataProp", typeof(Guid?)), - new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), - }; - - private static readonly List s_vectorProperties = new() - { - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)), - }; - - private static readonly Dictionary s_storagePropertyNames = s_dataProperties - .Select(l => l.DataModelPropertyName) - .Concat(s_vectorProperties.Select(l => l.DataModelPropertyName)) - .Concat([s_keyProperty.DataModelPropertyName]) - .ToDictionary(k => k, v => v); + private static readonly VectorStoreRecordModel s_model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("ShortDataProp", typeof(short)), + new VectorStoreRecordDataProperty("NullableShortDataProp", typeof(short?)), + new VectorStoreRecordDataProperty("ByteDataProp", typeof(byte)), + new VectorStoreRecordDataProperty("NullableByteDataProp", typeof(byte?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), + new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), + new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), + new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("GuidDataProp", typeof(Guid)), + new VectorStoreRecordDataProperty("NullableGuidDataProp", typeof(Guid?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), + + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)) + ] + }, + s_jsonSerializerOptions); private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; private static readonly double[] s_doubleVector = [1.0f, 2.0f, 3.0f]; @@ -81,13 +80,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateGenericDataModelMapper( - "Collection", - s_keyProperty, - s_dataProperties, - s_vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); var dataModel = new VectorStoreGenericDataModel(key) { @@ -133,34 +126,34 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() // Assert Assert.Equal(key, (Guid?)storageModel["id"]); Assert.Equal("Collection", (string?)storageModel["class"]); - Assert.Equal("string", (string?)storageModel["properties"]?["StringDataProp"]); - Assert.Equal(true, (bool?)storageModel["properties"]?["BoolDataProp"]); - Assert.Equal(false, (bool?)storageModel["properties"]?["NullableBoolDataProp"]); - Assert.Equal(1, (int?)storageModel["properties"]?["IntDataProp"]); - Assert.Equal(2, (int?)storageModel["properties"]?["NullableIntDataProp"]); - Assert.Equal(3L, (long?)storageModel["properties"]?["LongDataProp"]); - Assert.Equal(4L, (long?)storageModel["properties"]?["NullableLongDataProp"]); - Assert.Equal((short)5, (short?)storageModel["properties"]?["ShortDataProp"]); - Assert.Equal((short)6, (short?)storageModel["properties"]?["NullableShortDataProp"]); - Assert.Equal((byte)7, (byte?)storageModel["properties"]?["ByteDataProp"]); - Assert.Equal((byte)8, (byte?)storageModel["properties"]?["NullableByteDataProp"]); - Assert.Equal(9.0f, (float?)storageModel["properties"]?["FloatDataProp"]); - Assert.Equal(10.0f, (float?)storageModel["properties"]?["NullableFloatDataProp"]); - Assert.Equal(11.0, (double?)storageModel["properties"]?["DoubleDataProp"]); - Assert.Equal(12.0, (double?)storageModel["properties"]?["NullableDoubleDataProp"]); - Assert.Equal(13.99m, (decimal?)storageModel["properties"]?["DecimalDataProp"]); - Assert.Equal(14.00m, (decimal?)storageModel["properties"]?["NullableDecimalDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["DateTimeDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["NullableDateTimeDataProp"]); - Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["DateTimeOffsetDataProp"]); - Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["NullableDateTimeOffsetDataProp"]); - Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), (Guid?)storageModel["properties"]?["GuidDataProp"]); - Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), (Guid?)storageModel["properties"]?["NullableGuidDataProp"]); - Assert.Equal(s_taglist, storageModel["properties"]?["TagListDataProp"]!.AsArray().GetValues().ToArray()); - Assert.Equal(s_floatVector, storageModel["vectors"]?["FloatVector"]!.AsArray().GetValues().ToArray()); - Assert.Equal(s_floatVector, storageModel["vectors"]?["NullableFloatVector"]!.AsArray().GetValues().ToArray()); - Assert.Equal(s_doubleVector, storageModel["vectors"]?["DoubleVector"]!.AsArray().GetValues().ToArray()); - Assert.Equal(s_doubleVector, storageModel["vectors"]?["NullableDoubleVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal("string", (string?)storageModel["properties"]?["stringDataProp"]); + Assert.Equal(true, (bool?)storageModel["properties"]?["boolDataProp"]); + Assert.Equal(false, (bool?)storageModel["properties"]?["nullableBoolDataProp"]); + Assert.Equal(1, (int?)storageModel["properties"]?["intDataProp"]); + Assert.Equal(2, (int?)storageModel["properties"]?["nullableIntDataProp"]); + Assert.Equal(3L, (long?)storageModel["properties"]?["longDataProp"]); + Assert.Equal(4L, (long?)storageModel["properties"]?["nullableLongDataProp"]); + Assert.Equal((short)5, (short?)storageModel["properties"]?["shortDataProp"]); + Assert.Equal((short)6, (short?)storageModel["properties"]?["nullableShortDataProp"]); + Assert.Equal((byte)7, (byte?)storageModel["properties"]?["byteDataProp"]); + Assert.Equal((byte)8, (byte?)storageModel["properties"]?["nullableByteDataProp"]); + Assert.Equal(9.0f, (float?)storageModel["properties"]?["floatDataProp"]); + Assert.Equal(10.0f, (float?)storageModel["properties"]?["nullableFloatDataProp"]); + Assert.Equal(11.0, (double?)storageModel["properties"]?["doubleDataProp"]); + Assert.Equal(12.0, (double?)storageModel["properties"]?["nullableDoubleDataProp"]); + Assert.Equal(13.99m, (decimal?)storageModel["properties"]?["decimalDataProp"]); + Assert.Equal(14.00m, (decimal?)storageModel["properties"]?["nullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["dateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["nullableDateTimeDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["dateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["nullableDateTimeOffsetDataProp"]); + Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), (Guid?)storageModel["properties"]?["guidDataProp"]); + Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), (Guid?)storageModel["properties"]?["nullableGuidDataProp"]); + Assert.Equal(s_taglist, storageModel["properties"]?["tagListDataProp"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_floatVector, storageModel["vectors"]?["floatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_floatVector, storageModel["vectors"]?["nullableFloatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_doubleVector, storageModel["vectors"]?["doubleVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_doubleVector, storageModel["vectors"]?["nullableDoubleVector"]!.AsArray().GetValues().ToArray()); } [Fact] @@ -194,13 +187,7 @@ public void MapFromDataToStorageModelMapsNullValues() }, }; - var sut = new WeaviateGenericDataModelMapper( - "Collection", - keyProperty, - dataProperties, - vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -216,50 +203,44 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateGenericDataModelMapper( - "Collection", - s_keyProperty, - s_dataProperties, - s_vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); var storageModel = new JsonObject { ["id"] = key, ["properties"] = new JsonObject { - ["StringDataProp"] = "string", - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["ShortDataProp"] = (short)5, - ["NullableShortDataProp"] = (short)6, - ["ByteDataProp"] = (byte)7, - ["NullableByteDataProp"] = (byte)8, - ["FloatDataProp"] = 9.0f, - ["NullableFloatDataProp"] = 10.0f, - ["DoubleDataProp"] = 11.0, - ["NullableDoubleDataProp"] = 12.0, - ["DecimalDataProp"] = 13.99m, - ["NullableDecimalDataProp"] = 14.00m, - ["DateTimeDataProp"] = new DateTime(2021, 1, 1), - ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1), - ["DateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["GuidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), - ["NullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), - ["TagListDataProp"] = new JsonArray(s_taglist.Select(l => (JsonValue)l).ToArray()) + ["stringDataProp"] = "string", + ["boolDataProp"] = true, + ["nullableBoolDataProp"] = false, + ["intDataProp"] = 1, + ["nullableIntDataProp"] = 2, + ["longDataProp"] = 3L, + ["nullableLongDataProp"] = 4L, + ["shortDataProp"] = (short)5, + ["nullableShortDataProp"] = (short)6, + ["byteDataProp"] = (byte)7, + ["nullableByteDataProp"] = (byte)8, + ["floatDataProp"] = 9.0f, + ["nullableFloatDataProp"] = 10.0f, + ["doubleDataProp"] = 11.0, + ["nullableDoubleDataProp"] = 12.0, + ["decimalDataProp"] = 13.99m, + ["nullableDecimalDataProp"] = 14.00m, + ["dateTimeDataProp"] = new DateTime(2021, 1, 1), + ["nullableDateTimeDataProp"] = new DateTime(2021, 1, 1), + ["dateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["nullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["guidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), + ["nullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), + ["tagListDataProp"] = new JsonArray(s_taglist.Select(l => (JsonValue)l).ToArray()) }, ["vectors"] = new JsonObject { - ["FloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), - ["NullableFloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), - ["DoubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), - ["NullableDoubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), + ["floatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["nullableFloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["doubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), + ["nullableDoubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), } }; @@ -321,22 +302,16 @@ public void MapFromStorageToDataModelMapsNullValues() ["id"] = key, ["properties"] = new JsonObject { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, + ["stringDataProp"] = null, + ["nullableIntDataProp"] = null, }, ["vectors"] = new JsonObject { - ["NullableFloatVector"] = null + ["nullableFloatVector"] = null } }; - var sut = new WeaviateGenericDataModelMapper( - "Collection", - s_keyProperty, - s_dataProperties, - s_vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -352,13 +327,7 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new WeaviateGenericDataModelMapper( - "Collection", - s_keyProperty, - s_dataProperties, - s_vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); var storageModel = new JsonObject(); @@ -371,31 +340,26 @@ public void MapFromStorageToDataModelThrowsForMissingKey() public void MapFromDataToStorageModelSkipsMissingProperties() { // Arrange - var key = new Guid("55555555-5555-5555-5555-555555555555"); - var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); - - var dataProperties = new List + var recordDefinition = new VectorStoreRecordDefinition { - new("StringDataProp", typeof(string)), - new("NullableIntDataProp", typeof(int?)), + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)) + ] }; - var vectorProperties = new List - { - new("FloatVector", typeof(ReadOnlyMemory)) - }; + var model = new WeaviateModelBuilder().Build(typeof(VectorStoreGenericDataModel), recordDefinition, s_jsonSerializerOptions); - var dataModel = new VectorStoreGenericDataModel(key); - var sut = new WeaviateGenericDataModelMapper( - "Collection", - keyProperty, - dataProperties, - vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var key = new Guid("55555555-5555-5555-5555-555555555555"); + + var record = new VectorStoreGenericDataModel(key); + var sut = new WeaviateGenericDataModelMapper("Collection", model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(record); // Assert Assert.Equal(key, (Guid?)storageModel["id"]); @@ -407,27 +371,22 @@ public void MapFromDataToStorageModelSkipsMissingProperties() public void MapFromStorageToDataModelSkipsMissingProperties() { // Arrange - var key = new Guid("55555555-5555-5555-5555-555555555555"); - var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); - - var dataProperties = new List + var recordDefinition = new VectorStoreRecordDefinition { - new("StringDataProp", typeof(string)), - new("NullableIntDataProp", typeof(int?)), + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)) + ] }; - var vectorProperties = new List - { - new("FloatVector", typeof(ReadOnlyMemory)) - }; + var model = new WeaviateModelBuilder().Build(typeof(VectorStoreGenericDataModel), recordDefinition, s_jsonSerializerOptions); - var sut = new WeaviateGenericDataModelMapper( - "Collection", - keyProperty, - dataProperties, - vectorProperties, - s_storagePropertyNames, - s_jsonSerializerOptions); + var key = new Guid("55555555-5555-5555-5555-555555555555"); + + var sut = new WeaviateGenericDataModelMapper("Collection", model, s_jsonSerializerOptions); var storageModel = new JsonObject { diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index 30de049b9ec8..0203e726c145 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Text.Json; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Weaviate; using Xunit; @@ -17,19 +18,20 @@ public sealed class WeaviateVectorStoreCollectionCreateMappingTests public void ItThrowsExceptionWithInvalidIndexKind() { // Arrange - var vectorProperties = new List - { - new("PropertyName", typeof(ReadOnlyMemory)) { IndexKind = "non-existent-index-kind" } - }; - - var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + var model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { IndexKind = "non-existent-index-kind" } + ] + }); // Act & Assert - Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - collectionName: "CollectionName", - dataProperties: [], - vectorProperties: vectorProperties, - storagePropertyNames: storagePropertyNames)); + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model)); } [Theory] @@ -39,21 +41,21 @@ public void ItThrowsExceptionWithInvalidIndexKind() public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string expectedIndexKind) { // Arrange - var vectorProperties = new List - { - new("PropertyName", typeof(ReadOnlyMemory)) { IndexKind = indexKind } - }; - - var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + var model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { IndexKind = indexKind } + ] + }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - collectionName: "CollectionName", - dataProperties: [], - vectorProperties: vectorProperties, - storagePropertyNames: storagePropertyNames); - - var actualIndexKind = schema.VectorConfigurations["propertyName"].VectorIndexType; + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); + var actualIndexKind = schema.VectorConfigurations["Vector"].VectorIndexType; // Assert Assert.Equal(expectedIndexKind, actualIndexKind); @@ -63,19 +65,20 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex public void ItThrowsExceptionWithInvalidDistanceFunction() { // Arrange - var vectorProperties = new List - { - new("PropertyName", typeof(ReadOnlyMemory)) { DistanceFunction = "non-existent-distance-function" } - }; - - var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + var model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = "non-existent-distance-function" } + ] + }); // Act & Assert - Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - collectionName: "CollectionName", - dataProperties: [], - vectorProperties: vectorProperties, - storagePropertyNames: storagePropertyNames)); + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model)); } [Theory] @@ -87,21 +90,22 @@ public void ItThrowsExceptionWithInvalidDistanceFunction() public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunction, string expectedDistanceFunction) { // Arrange - var vectorProperties = new List - { - new("PropertyName", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } - }; - - var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + var model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } + ] + }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - collectionName: "CollectionName", - dataProperties: [], - vectorProperties: vectorProperties, - storagePropertyNames: storagePropertyNames); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); - var actualDistanceFunction = schema.VectorConfigurations["propertyName"].VectorIndexConfig?.Distance; + var actualDistanceFunction = schema.VectorConfigurations["Vector"].VectorIndexConfig?.Distance; // Assert Assert.Equal(expectedDistanceFunction, actualDistanceFunction); @@ -159,19 +163,22 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyType) { // Arrange - var dataProperties = new List - { - new("PropertyName", propertyType) { IsFilterable = true, IsFullTextSearchable = true } - }; - - var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + var model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("PropertyName", propertyType) { IsFilterable = true, IsFullTextSearchable = true }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ] + }, + new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( - collectionName: "CollectionName", - dataProperties: dataProperties, - vectorProperties: [], - storagePropertyNames: storagePropertyNames); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); var property = schema.Properties[0]; diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 5a009649ab1b..2c971daed669 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Serialization; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Weaviate; using Xunit; @@ -19,7 +19,6 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests { private const string CollectionName = "Collection"; private const string VectorPropertyName = "descriptionEmbedding"; - private const string KeyPropertyName = "HotelId"; private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { @@ -32,21 +31,23 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests } }; - private readonly Dictionary _storagePropertyNames = new() - { - ["HotelId"] = "hotelId", - ["HotelName"] = "hotelName", - ["HotelCode"] = "hotelCode", - ["Tags"] = "tags", - ["DescriptionEmbedding"] = "descriptionEmbedding" - }; + private readonly VectorStoreRecordModel _model = new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)) { StoragePropertyName = "hotelId" }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { StoragePropertyName = "hotelName" }, + new VectorStoreRecordDataProperty("HotelCode", typeof(string)) { StoragePropertyName = "hotelCode" }, + new VectorStoreRecordDataProperty("Tags", typeof(string[])) { StoragePropertyName = "tags" }, + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory)) { StoragePropertyName = "descriptionEmbeddding" }, + ] + }); private readonly ReadOnlyMemory _vector = new([31f, 32f, 33f, 34f]); - private readonly List _vectorPropertyStorageNames = ["descriptionEmbedding"]; - - private readonly List _dataPropertyStorageNames = ["hotelName", "hotelCode"]; - [Fact] public void BuildSearchQueryByDefaultReturnsValidQuery() { @@ -63,7 +64,7 @@ public void BuildSearchQueryByDefaultReturnsValidQuery() vector: [31,32,33,34] } ) { - hotelName hotelCode + HotelName HotelCode Tags _additional { id distance @@ -85,12 +86,9 @@ hotelName hotelCode this._vector, CollectionName, VectorPropertyName, - KeyPropertyName, s_jsonSerializerOptions, searchOptions, - this._storagePropertyNames, - this._vectorPropertyStorageNames, - this._dataPropertyStorageNames); + this._model); // Assert Assert.Equal(expectedQuery, query); @@ -115,23 +113,20 @@ public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() this._vector, CollectionName, VectorPropertyName, - KeyPropertyName, s_jsonSerializerOptions, searchOptions, - this._storagePropertyNames, - this._vectorPropertyStorageNames, - this._dataPropertyStorageNames); + this._model); // Assert - Assert.Contains("vectors { descriptionEmbedding }", query); + Assert.Contains("vectors { DescriptionEmbedding }", query); } [Fact] public void BuildSearchQueryWithFilterReturnsValidQuery() { // Arrange - const string ExpectedFirstSubquery = """{ path: ["hotelName"], operator: Equal, valueText: "Test Name" }"""; - const string ExpectedSecondSubquery = """{ path: ["tags"], operator: ContainsAny, valueText: ["t1"] }"""; + const string ExpectedFirstSubquery = """{ path: ["HotelName"], operator: Equal, valueText: "Test Name" }"""; + const string ExpectedSecondSubquery = """{ path: ["Tags"], operator: ContainsAny, valueText: ["t1"] }"""; var searchOptions = new VectorSearchOptions { @@ -147,12 +142,9 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() this._vector, CollectionName, VectorPropertyName, - KeyPropertyName, s_jsonSerializerOptions, searchOptions, - this._storagePropertyNames, - this._vectorPropertyStorageNames, - this._dataPropertyStorageNames); + this._model); // Assert Assert.Contains(ExpectedFirstSubquery, query); @@ -175,12 +167,9 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() this._vector, CollectionName, VectorPropertyName, - KeyPropertyName, s_jsonSerializerOptions, searchOptions, - this._storagePropertyNames, - this._vectorPropertyStorageNames, - this._dataPropertyStorageNames)); + this._model)); } [Fact] @@ -199,12 +188,9 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() this._vector, CollectionName, VectorPropertyName, - KeyPropertyName, s_jsonSerializerOptions, searchOptions, - this._storagePropertyNames, - this._vectorPropertyStorageNames, - this._dataPropertyStorageNames)); + this._model)); } #region private diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index a77944379b5a..373b7f836c00 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -32,7 +32,7 @@ public WeaviateVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection")); + var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection")); Assert.Contains("No key property found", exception.Message); } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs index 5f79925c2c48..f624599f5478 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -28,37 +28,24 @@ public sealed class WeaviateVectorStoreRecordMapperTests } }; - private readonly WeaviateVectorStoreRecordMapper _sut; - - public WeaviateVectorStoreRecordMapperTests() - { - var storagePropertyNames = new Dictionary - { - ["HotelId"] = "hotelId", - ["HotelName"] = "hotelName", - ["Tags"] = "tags", - ["DescriptionEmbedding"] = "descriptionEmbedding", - }; - - var dataProperties = new List - { - new("HotelName", typeof(string)), - new("Tags", typeof(List)) - }; - - var vectorProperties = new List - { - new("DescriptionEmbedding", typeof(ReadOnlyMemory)) - }; - - this._sut = new WeaviateVectorStoreRecordMapper( + private readonly WeaviateVectorStoreRecordMapper _sut = + new( "CollectionName", - new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), - dataProperties, - vectorProperties, - storagePropertyNames, + new WeaviateModelBuilder() + .Build( + typeof(VectorStoreGenericDataModel), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory)) + ] + }, + s_jsonSerializerOptions), s_jsonSerializerOptions); - } [Fact] public void MapFromDataToStorageModelReturnsValidObject() diff --git a/dotnet/src/Connectors/Directory.Build.props b/dotnet/src/Connectors/Directory.Build.props new file mode 100644 index 000000000000..46c46d509f83 --- /dev/null +++ b/dotnet/src/Connectors/Directory.Build.props @@ -0,0 +1,9 @@ + + + + + + $(NoWarn);MEVD9001 + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs new file mode 100644 index 000000000000..4dce8d215f0b --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Represents a data property on a vector store record. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public class VectorStoreRecordDataPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) +{ + /// + /// Gets or sets a value indicating whether this data property is filterable. + /// + /// + /// The default is . + /// + public bool IsFilterable { get; set; } + + /// + /// Gets or sets a value indicating whether this data property is full text searchable. + /// + /// + /// The default is . + /// + public bool IsFullTextSearchable { get; set; } + + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override object? GetValueAsObject(object record) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var dataProperty = type.GetProperty("Data")!; + var dictionary = (Dictionary)dataProperty.GetValue(record)!; + return dictionary.TryGetValue(this.ModelName, out var value) + ? value + : null; + } + } + + return base.GetValueAsObject(record); + } + + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override void SetValueAsObject(object record, object? value) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var dataProperty = type.GetProperty("Data")!; + var dictionary = (Dictionary)dataProperty.GetValue(record)!; + dictionary[this.ModelName] = value; + return; + } + } + + base.SetValueAsObject(record, value); + } + + /// + public override string ToString() + => $"{this.ModelName} (Data, {this.Type.Name})"; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs new file mode 100644 index 000000000000..5f283aad8e76 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// A model builder that performs logic specific to connectors which use System.Text.Json for serialization. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public class VectorStoreRecordJsonModelBuilder : VectorStoreRecordModelBuilder +{ + private JsonSerializerOptions _jsonSerializerOptions = JsonSerializerOptions.Default; + + /// + /// Constructs a new . + /// + public VectorStoreRecordJsonModelBuilder(VectorStoreRecordModelBuildingOptions options) + : base(options) + { + if (!options.UsesExternalSerializer) + { + throw new ArgumentNullException(nameof(options), $"{nameof(options.UsesExternalSerializer)} must be set when using this model builder."); + } + } + + /// + /// Builds and returns an from the given and . + /// + public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition, JsonSerializerOptions? jsonSerializerOptions) + { + if (jsonSerializerOptions is not null) + { + this._jsonSerializerOptions = jsonSerializerOptions; + } + + return this.Build(type, vectorStoreRecordDefinition); + } + + /// + protected override void Customize() + { + // This mimics the naming behavior of the System.Text.Json serializer, which we use for serialization/deserialization. + // The property storage names in the model must in sync with the serializer configuration, since the model is used e.g. for filtering + // even if serialization/deserialization doesn't use the model. + var namingPolicy = this._jsonSerializerOptions.PropertyNamingPolicy; + + foreach (var property in this.Properties) + { + var keyPropertyWithReservedName = this.Options.ReservedKeyStorageName is not null && property is VectorStoreRecordKeyPropertyModel; + string storageName; + + if (property.PropertyInfo?.GetCustomAttribute() is { } jsonPropertyNameAttribute) + { + if (keyPropertyWithReservedName && jsonPropertyNameAttribute.Name != this.Options.ReservedKeyStorageName) + { + throw new InvalidOperationException($"The key property for your connector must always have the reserved name '{this.Options.ReservedKeyStorageName}' and cannot be changed."); + } + + storageName = jsonPropertyNameAttribute.Name; + } + else if (namingPolicy is not null) + { + storageName = namingPolicy.ConvertName(property.ModelName); + } + else + { + storageName = property.ModelName; + } + + if (keyPropertyWithReservedName) + { + // Somewhat hacky: + // Some providers (Weaviate, Cosmos NoSQL) have a fixed, reserved storage name for keys (id), and at the same time use an external + // JSON serializer to serialize the entire user POCO. Since the serializer is unaware of the reserved storage name, it will produce + // a storage name as usual, based on the .NET property's name, possibly with a naming policy applied to it. The connector then needs + // to look that up and replace with the reserved name. + // So we store the policy-transformed name, as StorageName contains the reserved name. + property.TemporaryStorageName = storageName; + } + else + { + property.StorageName = storageName; + } + } + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs new file mode 100644 index 000000000000..3311786c1e6a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Represents a key property on a vector store record. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public class VectorStoreRecordKeyPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) +{ + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override object? GetValueAsObject(object record) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var keyProperty = type.GetProperty("Key")!; + return keyProperty.GetValue(record); + } + } + + return base.GetValueAsObject(record); + } + + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override void SetValueAsObject(object record, object? value) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var keyProperty = type.GetProperty("Key")!; + keyProperty.SetValue(record, value); + return; + } + } + + base.SetValueAsObject(record, value); + } + + /// + public override string ToString() + => $"{this.ModelName} (Key, {this.Type.Name})"; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs new file mode 100644 index 000000000000..6f6292eda944 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs @@ -0,0 +1,241 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// A model representing a record in a vector store collection. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public sealed class VectorStoreRecordModel +{ + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)] + private readonly Type _recordType; + + private VectorStoreRecordKeyPropertyModel? _singleKeyProperty; + private VectorStoreRecordVectorPropertyModel? _singleVectorProperty; + private VectorStoreRecordDataPropertyModel? _singleFullTextSearchProperty; + + /// + /// The key properties of the record. + /// + public IReadOnlyList KeyProperties { get; } + + /// + /// The dataproperties of the record. + /// + public IReadOnlyList DataProperties { get; } + + /// + /// The vector properties of the record. + /// + public IReadOnlyList VectorProperties { get; } + + /// + /// All properties of the record, of all types. + /// + public IReadOnlyList Properties { get; } + + /// + /// All properties of the record, of all types, indexed by their model name. + /// + public IReadOnlyDictionary PropertyMap { get; } + + internal VectorStoreRecordModel( + Type recordType, + IReadOnlyList keyProperties, + IReadOnlyList dataProperties, + IReadOnlyList vectorProperties, + IReadOnlyDictionary propertyMap) + { + this._recordType = recordType; + this.KeyProperties = keyProperties; + this.DataProperties = dataProperties; + this.VectorProperties = vectorProperties; + this.PropertyMap = propertyMap; + this.Properties = propertyMap.Values.ToList(); + } + + /// + /// Returns the single key property in the model, and throws if there are multiple key properties. + /// Suitable for connectors where validation is in place for single keys only (). + /// + public VectorStoreRecordKeyPropertyModel KeyProperty => this._singleKeyProperty ??= this.KeyProperties.Single(); + + /// + /// Returns the single vector property in the model, and throws if there are multiple vector properties. + /// Suitable for connectors where validation is in place for single vectors only (). + /// + public VectorStoreRecordVectorPropertyModel VectorProperty => this._singleVectorProperty ??= this.VectorProperties.Single(); + + /// + /// Instantiates a new record of the specified type. + /// + // TODO: the pattern of first instantiating via parameterless constructor and then populating the properties isn't compatible + // with read-only types, where properties have no setters. Supporting those would be problematic given the that different + // connectors have completely different representations of the data coming back from the database, and which needs to be + // populated. + public TRecord CreateRecord() + { + Debug.Assert(typeof(TRecord) == this._recordType, "Type mismatch between record type and model type."); + + return Activator.CreateInstance() ?? throw new InvalidOperationException($"Failed to instantiate record of type '{typeof(TRecord).Name}'."); + } + + /// + /// Get the vector property with the provided name if a name is provided, and fall back + /// to a vector property in the schema if not. If no name is provided and there is more + /// than one vector property, an exception will be thrown. + /// + /// The search options. + /// Thrown if the provided property name is not a valid vector property name. + public VectorStoreRecordVectorPropertyModel GetVectorPropertyOrSingle(VectorSearchOptions? searchOptions) + { + if (searchOptions is not null) + { +#pragma warning disable CS0618 // Type or member is obsolete + string? vectorPropertyName = searchOptions.VectorPropertyName; +#pragma warning restore CS0618 // Type or member is obsolete + + // If vector property name is provided, try to find it in schema or throw an exception. + if (!string.IsNullOrWhiteSpace(vectorPropertyName)) + { + // Check vector properties by data model property name. + return this.VectorProperties.FirstOrDefault(p => p.ModelName == vectorPropertyName) + ?? throw new InvalidOperationException($"The {this._recordType.FullName} type does not have a vector property named '{vectorPropertyName}'."); + } + else if (searchOptions.VectorProperty is Expression> expression) + { + return this.GetMatchingProperty(expression); + } + } + + // If vector property name is not provided, check if there is a single vector property, or throw if there are no vectors or more than one. + return this._singleVectorProperty ??= this.VectorProperties switch + { + [var singleProperty] => singleProperty, + { Count: 0 } => throw new InvalidOperationException($"The '{this._recordType.Name}' type does not have any vector properties."), + _ => throw new InvalidOperationException($"The '{this._recordType.Name}' type has multiple vector properties, please specify your chosen property via options.") + }; + } + + /// + /// Get the text data property, that has full text search indexing enabled, with the provided name if a name is provided, and fall back + /// to a text data property in the schema if not. If no name is provided and there is more than one text data property with + /// full text search indexing enabled, an exception will be thrown. + /// + /// The full text search property selector. + /// Thrown if the provided property name is not a valid text data property name. + public VectorStoreRecordDataPropertyModel GetFullTextDataPropertyOrSingle(Expression>? expression) + { + if (expression is not null) + { + var property = this.GetMatchingProperty(expression); + + return property.IsFullTextSearchable + ? property + : throw new InvalidOperationException($"The property '{property.ModelName}' on '{this._recordType.Name}' must have full text search enabled."); + } + + if (this._singleFullTextSearchProperty is null) + { + // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. + var fullTextStringProperties = this.DataProperties + .Where(l => l.Type == typeof(string) && l.IsFullTextSearchable) + .ToList(); + + // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. + this._singleFullTextSearchProperty = fullTextStringProperties switch + { + [var singleProperty] => singleProperty, + { Count: 0 } => throw new InvalidOperationException($"The '{this._recordType.Name}' type does not have any text data properties that have full text search enabled."), + _ => throw new InvalidOperationException($"The '{this._recordType.Name}' type has multiple text data properties that have full text search enabled, please specify your chosen property via options.") + }; + } + + return this._singleFullTextSearchProperty; + } + + private TProperty GetMatchingProperty(Expression> expression) + where TProperty : VectorStoreRecordPropertyModel + { + bool data = typeof(TProperty) == typeof(VectorStoreRecordDataProperty); + string expectedGenericModelPropertyName = data + ? nameof(VectorStoreGenericDataModel.Data) + : nameof(VectorStoreGenericDataModel.Vectors); + + MemberExpression? member = expression.Body as MemberExpression; + // (TRecord r) => r.PropertyName is translated into + // (TRecord r) => (object)r.PropertyName for properties that return struct like ReadOnlyMemory. + if (member is null && expression.Body is UnaryExpression unary + && unary.Operand.NodeType == ExpressionType.MemberAccess) + { + member = unary.Operand as MemberExpression; + } + + if (member is { Member: PropertyInfo clrProperty } + && expression.Parameters.Count == 1 + && member.Expression == expression.Parameters[0]) + { + foreach (var property in this.Properties) + { + if (property.PropertyInfo == clrProperty) + { + // TODO: Property error checking if the wrong property type is selected. + return (TProperty)property; + } + } + + throw new InvalidOperationException($"The property {clrProperty.Name} of {typeof(TRecord).FullName} is not a {(data ? "Data" : "Vector")} property."); + } + // (VectorStoreGenericDataModel r) => r.Vectors["PropertyName"] + else if (expression.Body is MethodCallExpression methodCall + // It's a Func, object> + && expression.Type.IsGenericType + && expression.Type.GenericTypeArguments.Length == 2 + && expression.Type.GenericTypeArguments[0].IsGenericType + && expression.Type.GenericTypeArguments[0].GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) + // It's accessing VectorStoreGenericDataModel.Vectors (or Data) + && methodCall.Object is MemberExpression memberAccess + && memberAccess.Member.Name == expectedGenericModelPropertyName + // and has a single argument + && methodCall.Arguments.Count == 1) + { + string name = methodCall.Arguments[0] switch + { + ConstantExpression constant when constant.Value is string text => text, + MemberExpression field when TryGetCapturedValue(field, out object? capturedValue) && capturedValue is string text => text, + _ => throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression.") + }; + + // TODO: Property error checking if the wrong property type is selected. + return (TProperty)(this.Properties.FirstOrDefault(p => p.ModelName == name) + ?? throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{name}'.")); + } + + throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression."); + + static bool TryGetCapturedValue(Expression expression, out object? capturedValue) + { + if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + capturedValue = fieldInfo.GetValue(constant.Value); + return true; + } + + capturedValue = null; + return false; + } + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs new file mode 100644 index 000000000000..ef7eab63a713 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -0,0 +1,502 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Represents a builder for a . +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +/// Note that this class is single-use only, and not thread-safe. +[Experimental("MEVD9001")] +public class VectorStoreRecordModelBuilder +{ + /// + /// Options for building the model. + /// + protected VectorStoreRecordModelBuildingOptions Options { get; } + + /// + /// The key properties of the record. + /// + protected List KeyProperties { get; } = []; + + /// + /// The data properties of the record. + /// + protected List DataProperties { get; } = []; + + /// + /// The vector properties of the record. + /// + protected List VectorProperties { get; } = []; + + /// + /// All properties of the record, of all types. + /// + protected IEnumerable Properties => this.PropertyMap.Values; + + /// + /// All properties of the record, of all types, indexed by their model name. + /// + protected Dictionary PropertyMap { get; } = new(); + + /// + /// Constructs a new . + /// + public VectorStoreRecordModelBuilder(VectorStoreRecordModelBuildingOptions options) + { + if (options.SupportsMultipleKeys && options.ReservedKeyStorageName is not null) + { + throw new ArgumentException($"{nameof(VectorStoreRecordModelBuildingOptions.ReservedKeyStorageName)} cannot be set when {nameof(VectorStoreRecordModelBuildingOptions.SupportsMultipleKeys)} is set."); + } + + this.Options = options; + } + + /// + /// Builds and returns an from the given and . + /// + [RequiresDynamicCode("Currently not compatible with NativeAOT code")] + [RequiresUnreferencedCode("Currently not compatible with trimming")] // TODO + public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + var dynamicMapping = type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>); + + if (!dynamicMapping) + { + this.ProcessTypeProperties(type, vectorStoreRecordDefinition); + } + + if (vectorStoreRecordDefinition is null) + { + if (dynamicMapping) + { + throw new ArgumentException("Vector store record definition must be provided for dynamic mapping."); + } + } + else + { + this.ProcessRecordDefinition(vectorStoreRecordDefinition, dynamicMapping ? null : type); + } + + this.Customize(); + this.Validate(type); + + return new(type, this.KeyProperties, this.DataProperties, this.VectorProperties, this.PropertyMap); + } + + /// + /// As part of building the model, this method processes the properties of the given , + /// detecting and reading attributes that affect the model. Not called for dynamic mapping scenarios. + /// + // TODO: This traverses the CLR type's properties, making it incompatible with trimming (and NativeAOT). + // TODO: We could put [DynamicallyAccessedMembers] to preserve all properties, but that approach wouldn't + // TODO: work with hierarchical data models (#10957). + [RequiresUnreferencedCode("Traverses the CLR type's properties with reflection, so not compatible with trimming")] + protected virtual void ProcessTypeProperties(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + // We want to allow the user-provided record definition to override anything configured via attributes + // (allowing the same CLR type + attributes to be used with different record definitions). + foreach (var clrProperty in type.GetProperties()) + { + VectorStoreRecordPropertyModel? property = null; + string? storageName = null; + + if (clrProperty.GetCustomAttribute() is { } keyAttribute) + { + var keyProperty = new VectorStoreRecordKeyPropertyModel(clrProperty.Name, clrProperty.PropertyType); + this.KeyProperties.Add(keyProperty); + storageName = keyAttribute.StoragePropertyName; + property = keyProperty; + } + + if (clrProperty.GetCustomAttribute() is { } dataAttribute) + { + if (property is not null) + { + // TODO: Test + throw new InvalidOperationException($"Property '{type.Name}.{clrProperty.Name}' has multiple of {nameof(VectorStoreRecordKeyAttribute)}, {nameof(VectorStoreRecordDataAttribute)} or {nameof(VectorStoreRecordVectorAttribute)}. Only one of these attributes can be specified on a property."); + } + + var dataProperty = new VectorStoreRecordDataPropertyModel(clrProperty.Name, clrProperty.PropertyType) + { + IsFilterable = dataAttribute.IsFilterable, + IsFullTextSearchable = dataAttribute.IsFullTextSearchable, + }; + + this.DataProperties.Add(dataProperty); + storageName = dataAttribute.StoragePropertyName; + property = dataProperty; + } + + if (clrProperty.GetCustomAttribute() is { } vectorAttribute) + { + if (property is not null) + { + throw new InvalidOperationException($"Only one of {nameof(VectorStoreRecordKeyAttribute)}, {nameof(VectorStoreRecordDataAttribute)} and {nameof(VectorStoreRecordVectorAttribute)} can be applied to a property."); + } + + var vectorProperty = new VectorStoreRecordVectorPropertyModel(clrProperty.Name, clrProperty.PropertyType) + { + Dimensions = vectorAttribute.Dimensions, + IndexKind = vectorAttribute.IndexKind, + DistanceFunction = vectorAttribute.DistanceFunction + }; + + this.VectorProperties.Add(vectorProperty); + storageName = vectorAttribute.StoragePropertyName; + property = vectorProperty; + } + + if (property is null) + { + // No mapping attribute was found, ignore this property. + continue; + } + + this.SetPropertyStorageName(property, storageName); + + property.PropertyInfo = clrProperty; + this.PropertyMap.Add(clrProperty.Name, property); + } + } + + /// + /// As part of building the model, this method processes the given . + /// + protected virtual void ProcessRecordDefinition( + VectorStoreRecordDefinition vectorStoreRecordDefinition, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type? type) + { + foreach (VectorStoreRecordProperty definitionProperty in vectorStoreRecordDefinition.Properties) + { + if (!this.PropertyMap.TryGetValue(definitionProperty.DataModelPropertyName, out var property)) + { + // Property wasn't found attribute-annotated on the CLR type, so we need to add it. + + // TODO: Make the property CLR type optional - no need to specify it when using a CLR type. + switch (definitionProperty) + { + case VectorStoreRecordKeyProperty definitionKeyProperty: + var keyProperty = new VectorStoreRecordKeyPropertyModel(definitionKeyProperty.DataModelPropertyName, definitionKeyProperty.PropertyType); + this.KeyProperties.Add(keyProperty); + this.PropertyMap.Add(definitionKeyProperty.DataModelPropertyName, keyProperty); + property = keyProperty; + break; + case VectorStoreRecordDataProperty definitionDataProperty: + var dataProperty = new VectorStoreRecordDataPropertyModel(definitionDataProperty.DataModelPropertyName, definitionDataProperty.PropertyType); + this.DataProperties.Add(dataProperty); + this.PropertyMap.Add(definitionDataProperty.DataModelPropertyName, dataProperty); + property = dataProperty; + break; + case VectorStoreRecordVectorProperty definitionVectorProperty: + var vectorProperty = new VectorStoreRecordVectorPropertyModel(definitionVectorProperty.DataModelPropertyName, definitionVectorProperty.PropertyType); + this.VectorProperties.Add(vectorProperty); + this.PropertyMap.Add(definitionVectorProperty.DataModelPropertyName, vectorProperty); + property = vectorProperty; + break; + default: + throw new ArgumentException($"Unknown type '{definitionProperty.GetType().FullName}' in vector store record definition."); + } + + if (type is not null) + { + // If we have a CLR type (POCO, not dynamic mapping), get the .NET property's type and make sure it matches the definition. + property.PropertyInfo = type.GetProperty(property.ModelName) + ?? throw new InvalidOperationException($"Property '{property.ModelName}' not found on CLR type '{type.FullName}'."); + + if (property.PropertyInfo.PropertyType != property.Type) + { + throw new InvalidOperationException($"Property '{property.ModelName}' has a different CLR type in the record definition and on the CLR type."); + } + } + } + + this.SetPropertyStorageName(property, definitionProperty.StoragePropertyName); + + switch (definitionProperty) + { + case VectorStoreRecordKeyProperty definitionKeyProperty: + if (property is not VectorStoreRecordKeyPropertyModel keyPropertyModel) + { + throw new InvalidOperationException( + $"Property '{property.ModelName}' is present in the {nameof(VectorStoreRecordDefinition)} as a key property, but the .NET property on type '{type?.Name}' has an incompatible attribute."); + } + + break; + + case VectorStoreRecordDataProperty definitionDataProperty: + if (property is not VectorStoreRecordDataPropertyModel dataProperty) + { + throw new InvalidOperationException( + $"Property '{property.ModelName}' is present in the {nameof(VectorStoreRecordDefinition)} as a data property, but the .NET property on type '{type?.Name}' has an incompatible attribute."); + } + + dataProperty.IsFilterable = definitionDataProperty.IsFilterable; + dataProperty.IsFullTextSearchable = definitionDataProperty.IsFullTextSearchable; + + break; + + case VectorStoreRecordVectorProperty definitionVectorProperty: + if (property is not VectorStoreRecordVectorPropertyModel vectorProperty) + { + throw new InvalidOperationException( + $"Property '{property.ModelName}' is present in the {nameof(VectorStoreRecordDefinition)} as a vector property, but the .NET property on type '{type?.Name}' has an incompatible attribute."); + } + + if (definitionVectorProperty.Dimensions is not null) + { + vectorProperty.Dimensions = definitionVectorProperty.Dimensions; + } + + if (definitionVectorProperty.IndexKind is not null) + { + vectorProperty.IndexKind = definitionVectorProperty.IndexKind; + } + + if (definitionVectorProperty.DistanceFunction is not null) + { + vectorProperty.DistanceFunction = definitionVectorProperty.DistanceFunction; + } + + break; + + default: + throw new ArgumentException($"Unknown type '{definitionProperty.GetType().FullName}' in vector store record definition."); + } + } + } + + private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, string? storageName) + { + if (property is VectorStoreRecordKeyPropertyModel && this.Options.ReservedKeyStorageName is not null) + { + // If we have ReservedKeyStorageName, there can only be a single key property (validated in the constructor) + property.StorageName = this.Options.ReservedKeyStorageName; + return; + } + + if (storageName is null) + { + return; + } + + // If a custom serializer is used (e.g. JsonSerializer), it would ignore our own attributes/config, and + // our model needs to be in sync with the serializer's behavior (for e.g. storage names in filters). + // So we ignore the config here as well. + // TODO: Consider throwing here instead of ignoring + if (this.Options.UsesExternalSerializer) + { + return; + } + + property.StorageName = storageName; + } + + /// + /// Extension hook for connectors to be able to customize the model. + /// + protected virtual void Customize() + { + } + + /// + /// Validates the model after all properties have been processed. + /// + protected virtual void Validate(Type type) + { + if (!this.Options.UsesExternalSerializer && type.GetConstructor(Type.EmptyTypes) is null) + { + throw new NotSupportedException($"Type '{type.Name}' must have a parameterless constructor."); + } + + if (!this.Options.SupportsMultipleKeys && this.KeyProperties.Count > 1) + { + throw new NotSupportedException($"Multiple key properties found on type '{type.Name}' or the provided {nameof(VectorStoreRecordDefinition)} while only one is supported."); + } + + if (this.KeyProperties.Count == 0) + { + throw new NotSupportedException($"No key property found on type '{type.Name}' or the provided {nameof(VectorStoreRecordDefinition)} while at least one is required."); + } + + if (this.Options.RequiresAtLeastOneVector && this.VectorProperties.Count == 0) + { + throw new NotSupportedException($"No vector property found on type '{type.Name}' or the provided {nameof(VectorStoreRecordDefinition)} while at least one is required."); + } + + if (!this.Options.SupportsMultipleVectors && this.VectorProperties.Count > 1) + { + throw new NotSupportedException($"Multiple vector properties found on type '{type.Name}' or the provided {nameof(VectorStoreRecordDefinition)} while only one is supported."); + } + + var storageNameMap = new Dictionary(); + + foreach (var property in this.PropertyMap.Values) + { + this.ValidateProperty(property); + + if (storageNameMap.TryGetValue(property.StorageName, out var otherproperty)) + { + throw new InvalidOperationException($"Property '{property.ModelName}' is being mapped to storage name '{property.StorageName}', but property '{otherproperty.ModelName}' is already mapped to the same storage name."); + } + + storageNameMap[property.StorageName] = property; + } + } + + /// + /// Validates a single property, performing validation on it. + /// + protected virtual void ValidateProperty(VectorStoreRecordPropertyModel propertyModel) + { + var type = propertyModel.Type; + + if (type.IsGenericType && Nullable.GetUnderlyingType(type) is Type underlyingType) + { + type = underlyingType; + } + + switch (propertyModel) + { + case VectorStoreRecordKeyPropertyModel keyProperty: + if (this.Options.SupportedKeyPropertyTypes is not null) + { + ValidatePropertyType(propertyModel.ModelName, type, "Key", this.Options.SupportedKeyPropertyTypes); + } + break; + + case VectorStoreRecordDataPropertyModel dataProperty: + if (this.Options.SupportedDataPropertyTypes is not null) + { + ValidatePropertyType(propertyModel.ModelName, type, "Data", this.Options.SupportedDataPropertyTypes, this.Options.SupportedEnumerableDataPropertyElementTypes); + } + break; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + if (this.Options.SupportedVectorPropertyTypes is not null) + { + ValidatePropertyType(propertyModel.ModelName, type, "Vector", this.Options.SupportedVectorPropertyTypes); + } + + if (vectorProperty.Dimensions <= 0) + { + throw new InvalidOperationException($"Vector property '{propertyModel.ModelName}' must have a positive number of dimensions."); + } + + break; + + default: + throw new UnreachableException(); + } + } + + private static void ValidatePropertyType(string propertyName, Type propertyType, string propertyCategoryDescription, HashSet supportedTypes, HashSet? supportedEnumerableElementTypes = null) + { + // Add shortcut before testing all the more expensive scenarios. + if (supportedTypes.Contains(propertyType)) + { + return; + } + + // Check all collection scenarios and get stored type. + if (supportedEnumerableElementTypes?.Count > 0 && IsSupportedEnumerableType(propertyType)) + { + var typeToCheck = GetCollectionElementType(propertyType); + + if (!supportedEnumerableElementTypes.Contains(typeToCheck)) + { + var supportedEnumerableElementTypesString = string.Join(", ", supportedEnumerableElementTypes!.Select(t => t.FullName)); + throw new NotSupportedException($"Enumerable {propertyCategoryDescription} properties must have one of the supported element types: {supportedEnumerableElementTypesString}. Element type of the property '{propertyName}' is {typeToCheck.FullName}."); + } + } + else + { + // if we got here, we know the type is not supported + var supportedTypesString = string.Join(", ", supportedTypes.Select(t => t.FullName)); + var supportedEnumerableTypesString = supportedEnumerableElementTypes is { Count: > 0 } ? string.Join(", ", supportedEnumerableElementTypes.Select(t => t.FullName)) : null; + throw new NotSupportedException($""" + Property '{propertyName}' has unsupported type '{propertyType.Name}'. + {propertyCategoryDescription} properties must be one of the supported types: {supportedTypesString}{(supportedEnumerableElementTypes is null ? "" : ", or a collection type over: " + supportedEnumerableElementTypes)}. + """); + } + } + + private static bool IsSupportedEnumerableType(Type type) + { + if (type.IsArray || type == typeof(IEnumerable)) + { + return true; + } + +#if NET6_0_OR_GREATER + if (typeof(IList).IsAssignableFrom(type) && type.GetMemberWithSameMetadataDefinitionAs(s_objectGetDefaultConstructorInfo) != null) +#else + if (typeof(IList).IsAssignableFrom(type) && type.GetConstructor(Type.EmptyTypes) != null) +#endif + { + return true; + } + + if (type.IsGenericType) + { + var genericTypeDefinition = type.GetGenericTypeDefinition(); + if (genericTypeDefinition == typeof(ICollection<>) || + genericTypeDefinition == typeof(IEnumerable<>) || + genericTypeDefinition == typeof(IList<>) || + genericTypeDefinition == typeof(IReadOnlyCollection<>) || + genericTypeDefinition == typeof(IReadOnlyList<>)) + { + return true; + } + } + + return false; + } + + private static Type GetCollectionElementType(Type collectionType) + { + return collectionType switch + { + IEnumerable => typeof(object), + var enumerableType when GetGenericEnumerableInterface(enumerableType) is Type enumerableInterface => enumerableInterface.GetGenericArguments()[0], + var arrayType when arrayType.IsArray => arrayType.GetElementType()!, + _ => collectionType + }; + } + + [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2070:UnrecognizedReflectionPattern", + Justification = "The 'IEnumerable<>' Type must exist and so trimmer kept it. In which case " + + "It also kept it on any type which implements it. The below call to GetInterfaces " + + "may return fewer results when trimmed but it will return 'IEnumerable<>' " + + "if the type implemented it, even after trimming.")] + private static Type? GetGenericEnumerableInterface(Type type) + { + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(IEnumerable<>)) + { + return type; + } + + foreach (Type typeToCheck in type.GetInterfaces()) + { + if (typeToCheck.IsGenericType && typeToCheck.GetGenericTypeDefinition() == typeof(IEnumerable<>)) + { + return typeToCheck; + } + } + + return null; + } + +#if NET6_0_OR_GREATER + private static readonly ConstructorInfo s_objectGetDefaultConstructorInfo = typeof(object).GetConstructor(Type.EmptyTypes)!; +#endif +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs new file mode 100644 index 000000000000..8ef6e5779ce9 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Contains options affecting model building; passed to . +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public sealed class VectorStoreRecordModelBuildingOptions +{ + /// + /// Whether multiple key properties are supported. + /// + public required bool SupportsMultipleKeys { get; init; } + + /// + /// Whether multiple vector properties are supported. + /// + public required bool SupportsMultipleVectors { get; init; } + + /// + /// Whether at least one vector property is required. + /// + public required bool RequiresAtLeastOneVector { get; init; } + + /// + /// The set of types that are supported as key properties. + /// + public required HashSet? SupportedKeyPropertyTypes { get; init; } + + /// + /// The set of types that are supported as data properties. + /// + public required HashSet? SupportedDataPropertyTypes { get; init; } + + /// + /// The set of element types that are supported within collection types in data properties. + /// + public required HashSet? SupportedEnumerableDataPropertyElementTypes { get; init; } + + /// + /// The set of types that are supported as vector properties. + /// + public required HashSet? SupportedVectorPropertyTypes { get; init; } + + /// + /// Indicates that an external serializer will be used (e.g. System.Text.Json). + /// + public bool UsesExternalSerializer { get; init; } + + /// + /// Indicates that the database requires the key property to have a special, reserved name. + /// When set, the model builder will manage the key storage name, and users may not customize it. + /// + public string? ReservedKeyStorageName { get; init; } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs new file mode 100644 index 000000000000..7787cda907bd --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Represents a property on a vector store record. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public abstract class VectorStoreRecordPropertyModel(string modelName, Type type) +{ + private string? _storageName; + + /// + /// The model name of the property. If the property corresponds to a .NET property, this name is the name of that property. + /// + public string ModelName { get; set; } = modelName; + + /// + /// The storage name of the property. This is the name to which the property is mapped in the vector store. + /// + public string StorageName + { + get => this._storageName ?? this.ModelName; + set => this._storageName = value; + } + + // See comment in VectorStoreJsonModelBuilder + // TODO: Spend more time thinking about this, there may be a less hacky way to handle it. + + /// + /// A temporary storage name for the property, for use during the serialization process by certain connectors. + /// + [Experimental("MEVD9001")] + public string? TemporaryStorageName { get; set; } + + /// + /// The CLR type of the property. + /// + public Type Type { get; set; } = type; + + /// + /// The reflection for the .NET property. + /// when using dynamic mapping. + /// + public PropertyInfo? PropertyInfo { get; set; } + + /// + /// Reads the property from the given , returning the value as an . + /// + // TODO: Temporary, remove virtual once we move to Dictionary as the dynamic representation + public virtual object? GetValueAsObject(object record) + { + if (this.PropertyInfo is not null) + { + // We have a .NET property (non-dynamic POCO mapping) + + // TODO: Implement compiled delegates for better performance, #11122 + // TODO: Implement source-generated accessors for NativeAOT, #10256 + + return this.PropertyInfo.GetValue(record); + } + + throw new UnreachableException("Must be overridden by derived class (for now)."); + } + + /// + /// Writes the property from the given , accepting the value to write as an . + /// s + public virtual void SetValueAsObject(object record, object? value) + { + if (this.PropertyInfo is not null) + { + // We have a .NET property (non-dynamic POCO mapping) + + // TODO: Implement compiled delegates for better performance, #11122 + // TODO: Implement source-generated accessors for NativeAOT, #10256 + + // If the value is null, no need to set the property (it's the CLR default) + if (value is not null) + { + this.PropertyInfo.SetValue(record, value); + } + + return; + } + + throw new UnreachableException("Must be overridden by derived class (for now)."); + } + + // TODO: implement the generic accessors to avoid boxing, and make use of them in connectors +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs new file mode 100644 index 000000000000..333003655646 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +/// Represents a vector property on a vector store record. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public class VectorStoreRecordVectorPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) +{ + /// + /// The number of dimensions that the vector has. + /// + /// + /// This property is required when creating collections, but can be omitted if not using that functionality. + /// If not provided when trying to create a collection, create will fail. + /// + public int? Dimensions { get; set; } + + /// + /// The kind of index to use. + /// + /// + /// The default varies by database type. See the documentation of your chosen database connector for more information. + /// + /// + public string? IndexKind { get; set; } + + /// + /// The distance function to use when comparing vectors. + /// + /// + /// The default varies by database type. See the documentation of your chosen database connector for more information. + /// + /// + public string? DistanceFunction { get; set; } + + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override object? GetValueAsObject(object record) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var vectorProperty = type.GetProperty("Vectors")!; + var dictionary = (Dictionary)vectorProperty.GetValue(record)!; + return dictionary.TryGetValue(this.ModelName, out var value) + ? value + : null; + } + } + + return base.GetValueAsObject(record); + } + + /// + // TODO: Temporary, remove once we move to Dictionary as the dynamic representation + public override void SetValueAsObject(object record, object? value) + { + if (this.PropertyInfo is null) + { + var type = record.GetType(); + + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) + { + var vectorProperty = type.GetProperty("Vectors")!; + var dictionary = (Dictionary)vectorProperty.GetValue(record)!; + dictionary[this.ModelName] = value; + return; + } + } + + base.SetValueAsObject(record, value); + } + + /// + public override string ToString() + => $"{this.ModelName} (Vector, {this.Type.Name})"; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj index 873fc5d455b6..5b8a57c7ed74 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj @@ -30,12 +30,13 @@ Microsoft.Extensions.VectorData.IVectorStoreRecordCollection<TKey, TRecord> https://dot.net/ - + + - - + + @@ -43,4 +44,17 @@ Microsoft.Extensions.VectorData.IVectorStoreRecordCollection<TKey, TRecord> + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs index 6ab9ee119e55..4e52243d6997 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs @@ -8,13 +8,29 @@ namespace Microsoft.Extensions.VectorData; /// Represents a generic data model that can be used to store and retrieve any data from a vector store. /// /// The data type of the record key. -/// The key of the record. -public sealed class VectorStoreGenericDataModel(TKey key) +public sealed class VectorStoreGenericDataModel { + /// + /// Constructs a new . + /// +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. + public VectorStoreGenericDataModel() +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. + { + } + + /// + /// Constructs a new . + /// + public VectorStoreGenericDataModel(TKey key) + { + this.Key = key; + } + /// /// Gets or sets the key of the record. /// - public TKey Key { get; set; } = key; + public TKey Key { get; set; } /// /// Gets or sets a dictionary of data items stored in the record. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs index 1af8bbbe6863..a5b2fddc729e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Net.Http; using System.Text.Json; using System.Threading.Tasks; using Microsoft.Azure.Cosmos; @@ -27,7 +28,14 @@ public AzureCosmosDBNoSQLVectorStoreFixture() throw new ArgumentNullException($"{connectionString} string is not configured"); } - var options = new CosmosClientOptions { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }; + var options = new CosmosClientOptions + { + UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default, + ConnectionMode = ConnectionMode.Gateway, +#pragma warning disable CA5400 // HttpClient may be created without enabling CheckCertificateRevocationList + HttpClientFactory = () => new HttpClient(new HttpClientHandler { ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator }) +#pragma warning restore CA5400 // HttpClient may be created without enabling CheckCertificateRevocationList + }; this._cosmosClient = new CosmosClient(connectionString, options); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 64646640b1df..d384089d0ad7 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -380,7 +380,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() { "HotelName", "Generic Mapper Hotel" }, { "Description", "This is a generic mapper hotel" }, { "Tags", new List { "generic" } }, - { "parking_is_included", false }, + { "ParkingIncluded", false }, { "Timestamp", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, { "HotelRating", 3.6f } }, @@ -399,7 +399,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); Assert.Equal(new List { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["parking_is_included"]); + Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["Timestamp"]); Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); @@ -474,7 +474,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), new VectorStoreRecordDataProperty("HotelName", typeof(string)), new VectorStoreRecordDataProperty("HotelCode", typeof(int)), - new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)), + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)), new VectorStoreRecordDataProperty("HotelRating", typeof(float)), new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), diff --git a/dotnet/src/IntegrationTests/README.md b/dotnet/src/IntegrationTests/README.md index 90a65b9d7531..cc84d8609e57 100644 --- a/dotnet/src/IntegrationTests/README.md +++ b/dotnet/src/IntegrationTests/README.md @@ -4,7 +4,7 @@ 1. **Azure OpenAI**: go to the [Azure OpenAI Quickstart](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart) 1. Deploy the following models: - 1. `dall-e-3` DALL-E 3 generates images and is used in Text to Image tests. + 1. `dall-e-3` DALL-E 3 generates images and is used in Text to Image tests. 1. `tts` TTS is a model that converts text to natural sounding speech and is used in Text to Audio tests. 1. `whisper` The Whisper models are trained for speech recognition and translation tasks and is used in Audio to Text tests. 1. `text-embedding-ada-002` Text Embedding Ada 002 is used in Text Embedding tests. diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs index 7acd839dd0e3..456ab0b44e1a 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs @@ -44,20 +44,13 @@ internal static class MongoDBConstants internal static readonly HashSet SupportedDataTypes = [ typeof(bool), - typeof(bool?), typeof(string), typeof(int), - typeof(int?), typeof(long), - typeof(long?), typeof(float), - typeof(float?), typeof(double), - typeof(double?), typeof(decimal), - typeof(decimal?), typeof(DateTime), - typeof(DateTime?), ]; /// A containing the supported vector types. diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs index 8ec0dffb935c..ea48950a8a9a 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs @@ -3,9 +3,11 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Linq; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -14,22 +16,8 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within MongoDB. /// [ExcludeFromCodeCoverage] -internal sealed class MongoDBGenericDataModelMapper : IVectorStoreRecordMapper, BsonDocument> +internal sealed class MongoDBGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, BsonDocument> { - /// A that defines the schema of the data in the database. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; - - /// - /// Initializes a new instance of the class. - /// - /// A that defines the schema of the data in the database. - public MongoDBGenericDataModelMapper(VectorStoreRecordDefinition vectorStoreRecordDefinition) - { - Verify.NotNull(vectorStoreRecordDefinition); - - this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; - } - /// public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) { @@ -38,27 +26,30 @@ public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocumen var vectorProperties = new Dictionary(); // Loop through all known properties and map each from the storage model to the data model. - foreach (var property in this._vectorStoreRecordDefinition.Properties) + foreach (var property in model.Properties) { - var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; - - if (property is VectorStoreRecordKeyProperty keyProperty) + switch (property) { - if (storageModel.TryGetValue(MongoDBConstants.MongoReservedKeyPropertyName, out var keyValue)) - { - key = keyValue.AsString; - } - } - else if (property is VectorStoreRecordDataProperty dataProperty) - { - if (!storageModel.TryGetValue(storagePropertyName, out var dataValue)) - { + case VectorStoreRecordKeyPropertyModel keyProperty: + if (storageModel.TryGetValue(MongoDBConstants.MongoReservedKeyPropertyName, out var keyValue)) + { + key = keyValue.AsString; + } continue; - } - dataProperties.Add(dataProperty.DataModelPropertyName, GetDataPropertyValue(property.DataModelPropertyName, property.PropertyType, dataValue)); - } - else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) - { - if (!storageModel.TryGetValue(storagePropertyName, out var vectorValue)) - { + case VectorStoreRecordDataPropertyModel dataProperty: + if (storageModel.TryGetValue(dataProperty.StorageName, out var dataValue)) + { + dataProperties.Add(dataProperty.ModelName, GetDataPropertyValue(property.ModelName, property.Type, dataValue)); + } + continue; + + case VectorStoreRecordVectorPropertyModel vectorProperty: + if (storageModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) + { + vectorProperties.Add(vectorProperty.ModelName, GetVectorPropertyValue(property.ModelName, property.Type, vectorValue)); + } continue; - } - vectorProperties.Add(vectorProperty.DataModelPropertyName, GetVectorPropertyValue(property.DataModelPropertyName, property.PropertyType, vectorValue)); + default: + throw new UnreachableException(); } } diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBModelBuilder.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBModelBuilder.cs new file mode 100644 index 000000000000..447b0d0ee939 --- /dev/null +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBModelBuilder.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Reflection; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; +using MongoDB.Bson.Serialization.Attributes; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +/// +/// Customized MongoDB model builder that adds specialized configuration of property storage names +/// (Mongo's reserve key property name and [BsonElement]). +/// +internal class MongoDBModelBuilder() : VectorStoreRecordModelBuilder(s_validationOptions) +{ + private static readonly VectorStoreRecordModelBuildingOptions s_validationOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + UsesExternalSerializer = true, + + SupportedKeyPropertyTypes = MongoDBConstants.SupportedKeyTypes, + SupportedDataPropertyTypes = MongoDBConstants.SupportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = MongoDBConstants.SupportedDataTypes, + SupportedVectorPropertyTypes = MongoDBConstants.SupportedVectorTypes + }; + + protected override void ProcessTypeProperties(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + { + base.ProcessTypeProperties(type, vectorStoreRecordDefinition); + + foreach (var property in this.Properties) + { + if (property.PropertyInfo?.GetCustomAttribute() is { } bsonElementAttribute) + { + property.StorageName = bsonElementAttribute.ElementName; + } + } + } +} diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs index 2ddb4f594fd7..e2d52adfea61 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs @@ -4,6 +4,7 @@ using System.Diagnostics.CodeAnalysis; using System.Reflection; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; using MongoDB.Bson.Serialization; using MongoDB.Bson.Serialization.Attributes; @@ -15,23 +16,20 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; internal sealed class MongoDBVectorStoreRecordMapper : IVectorStoreRecordMapper { /// A key property info of the data model. - private readonly PropertyInfo _keyProperty; + private readonly PropertyInfo? _keyClrProperty; /// A key property name of the data model. - private readonly string _keyPropertyName; + private readonly string _keyPropertyModelName; /// /// Initializes a new instance of the class. /// - /// A helper to access property information for the current data model and record definition. - public MongoDBVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyReader) + /// The model. + public MongoDBVectorStoreRecordMapper(VectorStoreRecordModel model) { - propertyReader.VerifyKeyProperties(MongoDBConstants.SupportedKeyTypes); - propertyReader.VerifyDataProperties(MongoDBConstants.SupportedDataTypes, supportEnumerable: true); - propertyReader.VerifyVectorProperties(MongoDBConstants.SupportedVectorTypes); - - this._keyPropertyName = propertyReader.KeyPropertyName; - this._keyProperty = propertyReader.KeyPropertyInfo; + var keyProperty = model.KeyProperty; + this._keyPropertyModelName = keyProperty.ModelName; + this._keyClrProperty = keyProperty.PropertyInfo; var conventionPack = new ConventionPack { @@ -51,9 +49,9 @@ public BsonDocument MapFromDataToStorageModel(TRecord dataModel) // Handle key property mapping due to reserved key name in Mongo. if (!document.Contains(MongoDBConstants.MongoReservedKeyPropertyName)) { - var value = document[this._keyPropertyName]; + var value = document[this._keyPropertyModelName]; - document.Remove(this._keyPropertyName); + document.Remove(this._keyPropertyModelName); document[MongoDBConstants.MongoReservedKeyPropertyName] = value; } @@ -64,14 +62,14 @@ public BsonDocument MapFromDataToStorageModel(TRecord dataModel) public TRecord MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options) { // Handle key property mapping due to reserved key name in Mongo. - if (!this._keyPropertyName.Equals(MongoDBConstants.DataModelReservedKeyPropertyName, StringComparison.OrdinalIgnoreCase) && - this._keyProperty.GetCustomAttribute() is null) + if (!this._keyPropertyModelName.Equals(MongoDBConstants.DataModelReservedKeyPropertyName, StringComparison.OrdinalIgnoreCase) && + this._keyClrProperty?.GetCustomAttribute() is null) { var value = storageModel[MongoDBConstants.MongoReservedKeyPropertyName]; storageModel.Remove(MongoDBConstants.MongoReservedKeyPropertyName); - storageModel[this._keyPropertyName] = value; + storageModel[this._keyPropertyModelName] = value; } return BsonSerializer.Deserialize(storageModel); diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs index f5b39e396171..cd78fddb4be4 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs @@ -5,7 +5,6 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; -using System.Reflection; namespace Microsoft.Extensions.VectorData; @@ -15,83 +14,6 @@ namespace Microsoft.Extensions.VectorData; [ExcludeFromCodeCoverage] internal static class VectorStoreRecordMapping { - /// - /// Loop through the list of objects and for each one look up the storage name - /// in the and check if the value exists in the . - /// If so, set the value on the record object. - /// - /// The type of the storage properties. - /// The type of the target object. - /// The target object to set the property values on. - /// objects listing the properties on the data model to get values for. - /// Storage property names keyed by data property names. - /// A dictionary of storage values by storage property name. - /// An optional function to convert the storage property values to data property values. - public static void SetValuesOnProperties( - TRecord record, - IEnumerable dataModelPropertiesInfo, - IReadOnlyDictionary dataModelToStorageNameMapping, - IReadOnlyDictionary storageValues, - Func? storageValueConverter = null) - { - var propertiesInfoWithValues = BuildPropertiesInfoWithValues( - dataModelPropertiesInfo, - dataModelToStorageNameMapping, - storageValues, - storageValueConverter); - - SetPropertiesOnRecord(record, propertiesInfoWithValues); - } - - /// - /// Build a list of properties with their values from the given data model properties and storage values. - /// - /// The type of the storage properties. - /// objects listing the properties on the data model to get values for. - /// Storage property names keyed by data property names. - /// A dictionary of storage values by storage property name. - /// An optional function to convert the storage property values to data property values. - /// The list of data property objects and their values. - public static IEnumerable> BuildPropertiesInfoWithValues( - IEnumerable dataModelPropertiesInfo, - IReadOnlyDictionary dataModelToStorageNameMapping, - IReadOnlyDictionary storageValues, - Func? storageValueConverter = null) - { - foreach (var propertyInfo in dataModelPropertiesInfo) - { - if (dataModelToStorageNameMapping.TryGetValue(propertyInfo.Name, out var storageName) && - storageValues.TryGetValue(storageName, out var storageValue)) - { - if (storageValueConverter is not null) - { - var convertedStorageValue = storageValueConverter(storageValue, propertyInfo.PropertyType); - yield return new KeyValuePair(propertyInfo, convertedStorageValue); - } - else - { - yield return new KeyValuePair(propertyInfo, (object?)storageValue); - } - } - } - } - - /// - /// Set the given list of properties with their values on the given object. - /// - /// The type of the target object. - /// The target object to set the property values on. - /// A list of properties and their values to set. - public static void SetPropertiesOnRecord( - TRecord record, - IEnumerable> propertiesInfoWithValues) - { - foreach (var propertyInfoWithValue in propertiesInfoWithValues) - { - propertyInfoWithValue.Key.SetValue(record, propertyInfoWithValue.Value); - } - } - /// /// Create an enumerable of the required type from the input enumerable. /// diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs deleted file mode 100644 index d259a1ac0f4f..000000000000 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs +++ /dev/null @@ -1,806 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.Extensions.VectorData; - -/// -/// Contains helpers for reading vector store model properties and their attributes. -/// -[ExcludeFromCodeCoverage] -#pragma warning disable CA1812 // Used in some projects but not all, so need to suppress to avoid warnings in those it's not used in. -internal sealed class VectorStoreRecordPropertyReader -#pragma warning restore CA1812 -{ - /// The of the data model. - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicConstructors)] - private readonly Type _dataModelType; - - /// A definition of the current storage model. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; - - /// Options for configuring the behavior of this class. - private readonly VectorStoreRecordPropertyReaderOptions _options; - - /// The key properties from the definition. - private readonly List _keyProperties; - - /// The data properties from the definition. - private readonly List _dataProperties; - - /// The vector properties from the definition. - private readonly List _vectorProperties; - - /// The of the parameterless constructor from the data model if one exists. - private readonly Lazy _parameterlessConstructorInfo; - - /// The key objects from the data model. - private List? _keyPropertiesInfo; - - /// The data objects from the data model. - private List? _dataPropertiesInfo; - - /// The vector objects from the data model. - private List? _vectorPropertiesInfo; - - /// A lazy initialized map of data model property names to the names under which they are stored in the data store. - private readonly Lazy> _storagePropertyNamesMap; - - /// A lazy initialized list of storage names of key properties. - private readonly Lazy> _keyPropertyStoragePropertyNames; - - /// A lazy initialized list of storage names of data properties. - private readonly Lazy> _dataPropertyStoragePropertyNames; - - /// A lazy initialized list of storage names of vector properties. - private readonly Lazy> _vectorPropertyStoragePropertyNames; - - /// A lazy initialized map of data model property names to the names they will have if serialized to JSON. - private readonly Lazy> _jsonPropertyNamesMap; - - /// A lazy initialized list of json names of key properties. - private readonly Lazy> _keyPropertyJsonNames; - - /// A lazy initialized list of json names of data properties. - private readonly Lazy> _dataPropertyJsonNames; - - /// A lazy initialized list of json names of vector properties. - private readonly Lazy> _vectorPropertyJsonNames; - - public VectorStoreRecordPropertyReader( - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicConstructors)] Type dataModelType, - VectorStoreRecordDefinition? vectorStoreRecordDefinition, - VectorStoreRecordPropertyReaderOptions? options) - { - this._dataModelType = dataModelType; - this._options = options ?? new VectorStoreRecordPropertyReaderOptions(); - - // If a definition is provided, use it. Otherwise, create one from the type. - if (vectorStoreRecordDefinition is not null) - { - // Here we received a definition, which gives us all of the information we need. - // Some mappers though need to set properties on the data model using reflection - // so we may still need to find the PropertyInfo objects on the data model later if required. - this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; - } - else - { - // Here we didn't receive a definition, so we need to derive the information from - // the data model. Since we may need the PropertyInfo objects later to read or write - // property values on the data model, we save them for later in case we need them. - var propertiesInfo = FindPropertiesInfo(dataModelType); - this._vectorStoreRecordDefinition = CreateVectorStoreRecordDefinitionFromType(propertiesInfo); - - this._keyPropertiesInfo = propertiesInfo.KeyProperties; - this._dataPropertiesInfo = propertiesInfo.DataProperties; - this._vectorPropertiesInfo = propertiesInfo.VectorProperties; - } - - // Verify the definition to make sure it does not have too many or too few of each property type. - (this._keyProperties, this._dataProperties, this._vectorProperties) = SplitDefinitionAndVerify( - dataModelType.Name, - this._vectorStoreRecordDefinition, - this._options.SupportsMultipleKeys, - this._options.SupportsMultipleVectors, - this._options.RequiresAtLeastOneVector); - - // Setup lazy initializers. - this._storagePropertyNamesMap = new Lazy>(() => - { - return BuildPropertyNameToStorageNameMap((this._keyProperties, this._dataProperties, this._vectorProperties)); - }); - - this._parameterlessConstructorInfo = new Lazy(() => - { - var constructor = dataModelType.GetConstructor(Type.EmptyTypes); - if (constructor == null) - { - throw new ArgumentException($"Type {dataModelType.FullName} must have a parameterless constructor."); - } - - return constructor; - }); - - this._keyPropertyStoragePropertyNames = new Lazy>(() => - { - var storagePropertyNames = this._storagePropertyNamesMap.Value; - return this._keyProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); - }); - - this._dataPropertyStoragePropertyNames = new Lazy>(() => - { - var storagePropertyNames = this._storagePropertyNamesMap.Value; - return this._dataProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); - }); - - this._vectorPropertyStoragePropertyNames = new Lazy>(() => - { - var storagePropertyNames = this._storagePropertyNamesMap.Value; - return this._vectorProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); - }); - - this._jsonPropertyNamesMap = new Lazy>(() => - { - return BuildPropertyNameToJsonPropertyNameMap( - (this._keyProperties, this._dataProperties, this._vectorProperties), - dataModelType, - this._options?.JsonSerializerOptions); - }); - - this._keyPropertyJsonNames = new Lazy>(() => - { - var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; - return this._keyProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); - }); - - this._dataPropertyJsonNames = new Lazy>(() => - { - var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; - return this._dataProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); - }); - - this._vectorPropertyJsonNames = new Lazy>(() => - { - var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; - return this._vectorProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); - }); - } - - /// Gets the record definition of the current storage model. - public VectorStoreRecordDefinition RecordDefinition => this._vectorStoreRecordDefinition; - - /// Gets the list of properties from the record definition. - public IReadOnlyList Properties => this._vectorStoreRecordDefinition.Properties; - - /// Gets the first object from the record definition that was provided or that was generated from the data model. - public VectorStoreRecordKeyProperty KeyProperty => this._keyProperties[0]; - - /// Gets all objects from the record definition that was provided or that was generated from the data model. - public IReadOnlyList KeyProperties => this._keyProperties; - - /// Gets all objects from the record definition that was provided or that was generated from the data model. - public IReadOnlyList DataProperties => this._dataProperties; - - /// Gets the first objects from the record definition that was provided or that was generated from the data model. - public VectorStoreRecordVectorProperty? VectorProperty => this._vectorProperties.Count > 0 ? this._vectorProperties[0] : null; - - /// Gets all objects from the record definition that was provided or that was generated from the data model. - public IReadOnlyList VectorProperties => this._vectorProperties; - - /// Gets the parameterless constructor if one exists, throws otherwise. - public ConstructorInfo ParameterLessConstructorInfo => this._parameterlessConstructorInfo.Value; - - /// Gets the first key property info object. - public PropertyInfo KeyPropertyInfo - { - get - { - this.LoadPropertyInfoIfNeeded(); - return this._keyPropertiesInfo![0]; - } - } - - /// Gets the key property info objects. - public IReadOnlyList KeyPropertiesInfo - { - get - { - this.LoadPropertyInfoIfNeeded(); - return this._keyPropertiesInfo!; - } - } - - /// Gets the data property info objects. - public IReadOnlyList DataPropertiesInfo - { - get - { - this.LoadPropertyInfoIfNeeded(); - return this._dataPropertiesInfo!; - } - } - - /// Gets the vector property info objects. - public IReadOnlyList VectorPropertiesInfo - { - get - { - this.LoadPropertyInfoIfNeeded(); - return this._vectorPropertiesInfo!; - } - } - - /// Gets the name of the first vector property in the definition or null if there are no vectors. - public string? FirstVectorPropertyName => this._vectorProperties.FirstOrDefault()?.DataModelPropertyName; - - /// Gets the first vector PropertyInfo object in the data model or null if there are no vectors. - public PropertyInfo? FirstVectorPropertyInfo => this.VectorPropertiesInfo.Count > 0 ? this.VectorPropertiesInfo[0] : null; - - /// Gets the property name of the first key property in the definition. - public string KeyPropertyName => this._keyProperties[0].DataModelPropertyName; - - /// Gets the storage name of the first key property in the definition. - public string KeyPropertyStoragePropertyName => this._keyPropertyStoragePropertyNames.Value[0]; - - /// Gets the storage names of all the properties in the definition. - public IReadOnlyDictionary StoragePropertyNamesMap => this._storagePropertyNamesMap.Value; - - /// Gets the storage names of the key properties in the definition. - public IReadOnlyList KeyPropertyStoragePropertyNames => this._keyPropertyStoragePropertyNames.Value; - - /// Gets the storage names of the data properties in the definition. - public IReadOnlyList DataPropertyStoragePropertyNames => this._dataPropertyStoragePropertyNames.Value; - - /// Gets the storage name of the first vector property in the definition or null if there are no vectors. - public string? FirstVectorPropertyStoragePropertyName => this.FirstVectorPropertyName == null ? null : this.StoragePropertyNamesMap[this.FirstVectorPropertyName]; - - /// Gets the storage names of the vector properties in the definition. - public IReadOnlyList VectorPropertyStoragePropertyNames => this._vectorPropertyStoragePropertyNames.Value; - - /// Gets the json name of the first key property in the definition. - public string KeyPropertyJsonName => this.KeyPropertyJsonNames[0]; - - /// Gets the json names of the key properties in the definition. - public IReadOnlyList KeyPropertyJsonNames => this._keyPropertyJsonNames.Value; - - /// Gets the json names of the data properties in the definition. - public IReadOnlyList DataPropertyJsonNames => this._dataPropertyJsonNames.Value; - - /// Gets the json name of the first vector property in the definition or null if there are no vectors. - public string? FirstVectorPropertyJsonName => this.FirstVectorPropertyName == null ? null : this.JsonPropertyNamesMap[this.FirstVectorPropertyName]; - - /// Gets the json names of the vector properties in the definition. - public IReadOnlyList VectorPropertyJsonNames => this._vectorPropertyJsonNames.Value; - - /// A map of data model property names to the names they will have if serialized to JSON. - public IReadOnlyDictionary JsonPropertyNamesMap => this._jsonPropertyNamesMap.Value; - - /// Verify that the data model has a parameterless constructor. - public void VerifyHasParameterlessConstructor() - { - var constructorInfo = this._parameterlessConstructorInfo.Value; - } - - /// Verify that the types of the key properties fall within the provided set. - /// The list of supported types. - public void VerifyKeyProperties(HashSet supportedTypes) - { - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._keyProperties, supportedTypes, "Key"); - } - - /// Verify that the types of the data properties fall within the provided set. - /// The list of supported types. - /// A value indicating whether enumerable types are supported where the element type is one of the supported types. - public void VerifyDataProperties(HashSet supportedTypes, bool supportEnumerable) - { - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._dataProperties, supportedTypes, "Data", supportEnumerable); - } - - /// Verify that the types of the data properties fall within the provided set. - /// The list of supported types. - /// A value indicating whether enumerable types are supported where the element type is one of the supported types. - public void VerifyDataProperties(HashSet supportedTypes, HashSet supportedEnumerableElementTypes) - { - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._dataProperties, supportedTypes, supportedEnumerableElementTypes, "Data"); - } - - /// Verify that the types of the vector properties fall within the provided set. - /// The list of supported types. - public void VerifyVectorProperties(HashSet supportedTypes) - { - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._vectorProperties, supportedTypes, "Vector"); - } - - /// - /// Get the storage property name for the given data model property name. - /// - /// The data model property name for which to get the storage property name. - /// The storage property name. - public string GetStoragePropertyName(string dataModelPropertyName) - { - return this._storagePropertyNamesMap.Value[dataModelPropertyName]; - } - - /// - /// Get the name under which a property will be stored if serialized to JSON - /// - /// The data model property name for which to get the JSON name. - /// The JSON name. - public string GetJsonPropertyName(string dataModelPropertyName) - { - return this._jsonPropertyNamesMap.Value[dataModelPropertyName]; - } - - /// - /// Get the vector property with the provided name if a name is provided, and fall back - /// to a vector property in the schema if not. If no name is provided and there is more - /// than one vector property, an exception will be thrown. - /// - /// The search options. - /// Thrown if the provided property name is not a valid vector property name. - public VectorStoreRecordVectorProperty GetVectorPropertyOrSingle(VectorSearchOptions? searchOptions) - { - if (searchOptions is not null) - { -#pragma warning disable CS0618 // Type or member is obsolete - string? vectorPropertyName = searchOptions.VectorPropertyName; -#pragma warning restore CS0618 // Type or member is obsolete - - // If vector property name is provided, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorPropertyName)) - { - // Check vector properties by data model property name. - return this.VectorProperties.FirstOrDefault(l => l.DataModelPropertyName.Equals(vectorPropertyName, StringComparison.Ordinal)) - ?? throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have a vector property named '{vectorPropertyName}'."); - } - else if (searchOptions.VectorProperty is Expression> expression) - { - // VectorPropertiesInfo is not available for VectorStoreGenericDataModel. - IReadOnlyList infos = typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) - ? [] : this.VectorPropertiesInfo; - - return GetMatchingProperty(expression, infos, this.VectorProperties); - } - } - - // If vector property name is not provided, check if there is a single vector property, or throw if there are no vectors or more than one. - if (this.VectorProperty is null) - { - throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have any vector properties."); - } - - if (this.VectorProperties.Count > 1) - { - throw new InvalidOperationException($"The {this._dataModelType.FullName} type has multiple vector properties, please specify your chosen property via options."); - } - - return this.VectorProperty; - } - - /// - /// Get the text data property, that has full text search indexing enabled, with the provided name if a name is provided, and fall back - /// to a text data property in the schema if not. If no name is provided and there is more than one text data property with - /// full text search indexing enabled, an exception will be thrown. - /// - /// The full text search property selector. - /// Thrown if the provided property name is not a valid text data property name. - public VectorStoreRecordDataProperty GetFullTextDataPropertyOrSingle(Expression>? expression) - { - if (expression is not null) - { - // DataPropertiesInfo is not available for VectorStoreGenericDataModel. - IReadOnlyList infos = typeof(TRecord).IsGenericType && typeof(TRecord).GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) - ? [] : this.DataPropertiesInfo; - - var dataProperty = GetMatchingProperty(expression, this.DataPropertiesInfo, this.DataProperties); - return dataProperty.IsFullTextSearchable - ? dataProperty - : throw new InvalidOperationException($"The text data property named '{dataProperty.DataModelPropertyName}' on the {this._dataModelType.FullName} type must have full text search enabled."); - } - - // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. - var fullTextStringProperties = this.DataProperties - .Where(l => l.PropertyType == typeof(string) && l.IsFullTextSearchable) - .ToList(); - - if (fullTextStringProperties.Count == 0) - { - throw new InvalidOperationException($"The {this._dataModelType.FullName} type does not have any text data properties that have full text search enabled."); - } - - if (fullTextStringProperties.Count > 1) - { - throw new InvalidOperationException($"The {this._dataModelType.FullName} type has multiple text data properties that have full text search enabled, please specify your chosen property via options."); - } - - return fullTextStringProperties[0]; - } - - private static TProperty GetMatchingProperty(Expression> expression, - IReadOnlyList propertyInfos, IReadOnlyList properties) - where TProperty : VectorStoreRecordProperty - { - bool data = typeof(TProperty) == typeof(VectorStoreRecordDataProperty); - string expectedGenericModelPropertyName = data - ? nameof(VectorStoreGenericDataModel.Data) - : nameof(VectorStoreGenericDataModel.Vectors); - - MemberExpression? member = expression.Body as MemberExpression; - // (TRecord r) => r.PropertyName is translated into - // (TRecord r) => (object)r.PropertyName for properties that return struct like ReadOnlyMemory. - if (member is null && expression.Body is UnaryExpression unary - && unary.Operand.NodeType == ExpressionType.MemberAccess) - { - member = unary.Operand as MemberExpression; - } - - if (member is not null - && expression.Parameters.Count == 1 - && member.Expression == expression.Parameters[0] - && member.Member is PropertyInfo property) - { - for (int i = 0; i < propertyInfos.Count; i++) - { - if (propertyInfos[i] == property) - { - return properties[i]; - } - } - - throw new InvalidOperationException($"The property {property.Name} of {typeof(TRecord).FullName} is not a {(data ? "Data" : "Vector")} property."); - } - // (VectorStoreGenericDataModel r) => r.Vectors["PropertyName"] - else if (expression.Body is MethodCallExpression methodCall - // It's a Func, object> - && expression.Type.IsGenericType - && expression.Type.GenericTypeArguments.Length == 2 - && expression.Type.GenericTypeArguments[0].IsGenericType - && expression.Type.GenericTypeArguments[0].GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) - // It's accessing VectorStoreGenericDataModel.Vectors (or Data) - && methodCall.Object is MemberExpression memberAccess - && memberAccess.Member.Name == expectedGenericModelPropertyName - // and has a single argument - && methodCall.Arguments.Count == 1) - { - string name = methodCall.Arguments[0] switch - { - ConstantExpression constant when constant.Value is string text => text, - MemberExpression field when TryGetCapturedValue(field, out object? capturedValue) && capturedValue is string text => text, - _ => throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression.") - }; - - return properties.FirstOrDefault(l => l.DataModelPropertyName.Equals(name, StringComparison.Ordinal)) - ?? throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{name}'."); - } - - throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression."); - - static bool TryGetCapturedValue(Expression expression, out object? capturedValue) - { - if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) - { - capturedValue = fieldInfo.GetValue(constant.Value); - return true; - } - - capturedValue = null; - return false; - } - } - - /// - /// Check if we have previously loaded the objects from the data model and if not, load them. - /// - private void LoadPropertyInfoIfNeeded() - { - if (this._keyPropertiesInfo != null) - { - return; - } - - // If we previously built the definition from the data model, the PropertyInfo objects - // from the data model would already be saved. If we didn't though, there could be a mismatch - // between what is defined in the definition and what is in the data model. Therefore, this - // method will throw if any property in the definition is not on the data model. - var propertiesInfo = FindPropertiesInfo(this._dataModelType, this._vectorStoreRecordDefinition); - - this._keyPropertiesInfo = propertiesInfo.KeyProperties; - this._dataPropertiesInfo = propertiesInfo.DataProperties; - this._vectorPropertiesInfo = propertiesInfo.VectorProperties; - } - - /// - /// Split the given into key, data and vector properties and verify that we have the expected numbers of each type. - /// - /// The name of the type that the definition relates to. - /// The to split. - /// A value indicating whether multiple key properties are supported. - /// A value indicating whether multiple vectors are supported. - /// A value indicating whether we need at least one vector. - /// The properties on the split into key, data and vector groupings. - /// Thrown if there are any validation failures with the provided . - private static (List KeyProperties, List DataProperties, List VectorProperties) SplitDefinitionAndVerify( - string typeName, - VectorStoreRecordDefinition definition, - bool supportsMultipleKeys, - bool supportsMultipleVectors, - bool requiresAtLeastOneVector) - { - var keyProperties = definition.Properties.OfType().ToList(); - var dataProperties = definition.Properties.OfType().ToList(); - var vectorProperties = definition.Properties.OfType().ToList(); - - if (keyProperties.Count > 1 && !supportsMultipleKeys) - { - throw new ArgumentException($"Multiple key properties found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)}."); - } - - if (keyProperties.Count == 0) - { - throw new ArgumentException($"No key property found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)}."); - } - - if (requiresAtLeastOneVector && vectorProperties.Count == 0) - { - throw new ArgumentException($"No vector property found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)} while at least one is required."); - } - - if (!supportsMultipleVectors && vectorProperties.Count > 1) - { - throw new ArgumentException($"Multiple vector properties found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)} while only one is supported."); - } - - return (keyProperties, dataProperties, vectorProperties); - } - - /// - /// Find the properties with , and attributes - /// and verify that they exist and that we have the expected numbers of each type. - /// Return those properties in separate categories. - /// - /// The data model to find the properties on. - /// The categorized properties. - private static (List KeyProperties, List DataProperties, List VectorProperties) FindPropertiesInfo([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type type) - { - List keyProperties = new(); - List dataProperties = new(); - List vectorProperties = new(); - - foreach (var property in type.GetProperties()) - { - // Get Key property. - if (property.GetCustomAttribute() is not null) - { - keyProperties.Add(property); - } - - // Get data properties. - if (property.GetCustomAttribute() is not null) - { - dataProperties.Add(property); - } - - // Get Vector properties. - if (property.GetCustomAttribute() is not null) - { - vectorProperties.Add(property); - } - } - - return (keyProperties, dataProperties, vectorProperties); - } - - /// - /// Find the properties listed in the on the and verify - /// that they exist. - /// Return those properties in separate categories. - /// - /// The data model to find the properties on. - /// The property configuration. - /// The categorized properties. - public static (List KeyProperties, List DataProperties, List VectorProperties) FindPropertiesInfo([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type type, VectorStoreRecordDefinition vectorStoreRecordDefinition) - { - List keyProperties = new(); - List dataProperties = new(); - List vectorProperties = new(); - - foreach (VectorStoreRecordProperty property in vectorStoreRecordDefinition.Properties) - { - // Key. - if (property is VectorStoreRecordKeyProperty keyPropertyInfo) - { - var keyProperty = type.GetProperty(keyPropertyInfo.DataModelPropertyName); - if (keyProperty == null) - { - throw new ArgumentException($"Key property '{keyPropertyInfo.DataModelPropertyName}' not found on type {type.FullName}."); - } - - keyProperties.Add(keyProperty); - } - // Data. - else if (property is VectorStoreRecordDataProperty dataPropertyInfo) - { - var dataProperty = type.GetProperty(dataPropertyInfo.DataModelPropertyName); - if (dataProperty == null) - { - throw new ArgumentException($"Data property '{dataPropertyInfo.DataModelPropertyName}' not found on type {type.FullName}."); - } - - dataProperties.Add(dataProperty); - } - // Vector. - else if (property is VectorStoreRecordVectorProperty vectorPropertyInfo) - { - var vectorProperty = type.GetProperty(vectorPropertyInfo.DataModelPropertyName); - if (vectorProperty == null) - { - throw new ArgumentException($"Vector property '{vectorPropertyInfo.DataModelPropertyName}' not found on type {type.FullName}."); - } - - vectorProperties.Add(vectorProperty); - } - else - { - throw new ArgumentException($"Unknown property type '{property.GetType().FullName}' in vector store record definition."); - } - } - - return (keyProperties, dataProperties, vectorProperties); - } - - /// - /// Create a by reading the attributes on the provided objects. - /// - /// objects to build a from. - /// The based on the given objects. - private static VectorStoreRecordDefinition CreateVectorStoreRecordDefinitionFromType((List KeyProperties, List DataProperties, List VectorProperties) propertiesInfo) - { - var definitionProperties = new List(); - - // Key properties. - foreach (var keyProperty in propertiesInfo.KeyProperties) - { - var keyAttribute = keyProperty.GetCustomAttribute(); - if (keyAttribute is not null) - { - definitionProperties.Add(new VectorStoreRecordKeyProperty(keyProperty.Name, keyProperty.PropertyType) - { - StoragePropertyName = keyAttribute.StoragePropertyName - }); - } - } - - // Data properties. - foreach (var dataProperty in propertiesInfo.DataProperties) - { - var dataAttribute = dataProperty.GetCustomAttribute(); - if (dataAttribute is not null) - { - definitionProperties.Add(new VectorStoreRecordDataProperty(dataProperty.Name, dataProperty.PropertyType) - { - IsFilterable = dataAttribute.IsFilterable, - IsFullTextSearchable = dataAttribute.IsFullTextSearchable, - StoragePropertyName = dataAttribute.StoragePropertyName - }); - } - } - - // Vector properties. - foreach (var vectorProperty in propertiesInfo.VectorProperties) - { - var vectorAttribute = vectorProperty.GetCustomAttribute(); - if (vectorAttribute is not null) - { - definitionProperties.Add(new VectorStoreRecordVectorProperty(vectorProperty.Name, vectorProperty.PropertyType) - { - Dimensions = vectorAttribute.Dimensions, - IndexKind = vectorAttribute.IndexKind, - DistanceFunction = vectorAttribute.DistanceFunction, - StoragePropertyName = vectorAttribute.StoragePropertyName - }); - } - } - - return new VectorStoreRecordDefinition { Properties = definitionProperties }; - } - - /// - /// Build a map of property names to the names under which they should be saved in storage, for the given properties. - /// - /// The properties to build the map for. - /// The map from property names to the names under which they should be saved in storage. - private static Dictionary BuildPropertyNameToStorageNameMap((List keyProperties, List dataProperties, List vectorProperties) properties) - { - var storagePropertyNameMap = new Dictionary(); - - foreach (var keyProperty in properties.keyProperties) - { - storagePropertyNameMap.Add(keyProperty.DataModelPropertyName, keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName); - } - - foreach (var dataProperty in properties.dataProperties) - { - storagePropertyNameMap.Add(dataProperty.DataModelPropertyName, dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName); - } - - foreach (var vectorProperty in properties.vectorProperties) - { - storagePropertyNameMap.Add(vectorProperty.DataModelPropertyName, vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName); - } - - return storagePropertyNameMap; - } - - /// - /// Build a map of property names to the names that they would have if serialized to JSON. - /// - /// The properties to build the map for. - /// The data model type that the property belongs to. - /// The options used for JSON serialization. - /// The map from property names to the names that they would have if serialized to JSON. - private static Dictionary BuildPropertyNameToJsonPropertyNameMap( - (List keyProperties, List dataProperties, List vectorProperties) properties, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type dataModel, - JsonSerializerOptions? options) - { - var jsonPropertyNameMap = new Dictionary(); - - foreach (var keyProperty in properties.keyProperties) - { - jsonPropertyNameMap.Add(keyProperty.DataModelPropertyName, GetJsonPropertyName(keyProperty, dataModel, options)); - } - - foreach (var dataProperty in properties.dataProperties) - { - jsonPropertyNameMap.Add(dataProperty.DataModelPropertyName, GetJsonPropertyName(dataProperty, dataModel, options)); - } - - foreach (var vectorProperty in properties.vectorProperties) - { - jsonPropertyNameMap.Add(vectorProperty.DataModelPropertyName, GetJsonPropertyName(vectorProperty, dataModel, options)); - } - - return jsonPropertyNameMap; - } - - /// - /// Get the JSON property name of a property by using the if available, otherwise - /// using the if available, otherwise falling back to the property name. - /// The provided may not actually contain the property, e.g. when the user has a data model that - /// doesn't resemble the stored data and where they are using a custom mapper. - /// - /// The property to retrieve a JSON name for. - /// The data model type that the property belongs to. - /// The options used for JSON serialization. - /// The JSON property name. - private static string GetJsonPropertyName(VectorStoreRecordProperty property, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type dataModel, JsonSerializerOptions? options) - { - var propertyInfo = dataModel.GetProperty(property.DataModelPropertyName); - - if (propertyInfo != null) - { - var jsonPropertyNameAttribute = propertyInfo.GetCustomAttribute(); - if (jsonPropertyNameAttribute is not null) - { - return jsonPropertyNameAttribute.Name; - } - } - - if (options?.PropertyNamingPolicy is not null) - { - return options.PropertyNamingPolicy.ConvertName(property.DataModelPropertyName); - } - - return property.DataModelPropertyName; - } -} diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs deleted file mode 100644 index 7404106d1a27..000000000000 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using System.Text.Json; - -namespace Microsoft.Extensions.VectorData; - -/// -/// Contains options for . -/// -[ExcludeFromCodeCoverage] -internal sealed class VectorStoreRecordPropertyReaderOptions -{ - /// - /// Gets or sets a value indicating whether the connector/db supports multiple key properties. - /// - public bool SupportsMultipleKeys { get; set; } = false; - - /// - /// Gets or sets a value indicating whether the connector/db supports multiple vector properties. - /// - public bool SupportsMultipleVectors { get; set; } = true; - - /// - /// Gets or sets a value indicating whether the connector/db requires at least one vector property. - /// - public bool RequiresAtLeastOneVector { get; set; } = false; - - /// - /// Gets or sets the json serializer options that the connector might be using for storage serialization. - /// - public JsonSerializerOptions? JsonSerializerOptions { get; set; } -} diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs index 08337bd0f138..58fe5d75d7ac 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs @@ -194,62 +194,6 @@ var enumerableType when GetGenericEnumerableInterface(enumerableType) is Type en internal static bool IsGenericDataModel(Type recordType) => recordType.IsGenericType && recordType.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>); - /// - /// Checks that if the provided is a that the key type is supported by the default mappers. - /// If not supported, a custom mapper must be supplied, otherwise an exception is thrown. - /// - /// The type of the record data model used by the connector. - /// A value indicating whether a custom mapper was supplied to the connector - /// The list of key types supported by the default mappers. - /// Thrown if the key type of the is not supported by the default mappers and a custom mapper was not supplied. - public static void VerifyGenericDataModelKeyType(Type recordType, bool customMapperSupplied, IEnumerable allowedKeyTypes) - { - // If we are not dealing with a generic data model, no need to check anything else. - if (!IsGenericDataModel(recordType)) - { - return; - } - - // If the key type is supported, we are good. - var keyType = recordType.GetGenericArguments()[0]; - if (allowedKeyTypes.Contains(keyType)) - { - return; - } - - // If the key type is not supported out of the box, but a custom mapper was supplied, we are good. - if (customMapperSupplied) - { - return; - } - - throw new ArgumentException($"The key type '{keyType.FullName}' of data model '{nameof(VectorStoreGenericDataModel)}' is not supported by the default mappers. " + - $"Only the following key types are supported: {string.Join(", ", allowedKeyTypes)}. Please provide your own mapper to map to your chosen key type."); - } - - /// - /// Checks that if the provided is a that a is also provided. - /// - /// The type of the record data model used by the connector. - /// A value indicating whether a record definition was supplied to the connector. - /// Thrown if a is not provided when using . - public static void VerifyGenericDataModelDefinitionSupplied(Type recordType, bool recordDefinitionSupplied) - { - // If we are not dealing with a generic data model, no need to check anything else. - if (!recordType.IsGenericType || recordType.GetGenericTypeDefinition() != typeof(VectorStoreGenericDataModel<>)) - { - return; - } - - // If we are dealing with a generic data model, and a record definition was supplied, we are good. - if (recordDefinitionSupplied) - { - return; - } - - throw new ArgumentException($"A {nameof(VectorStoreRecordDefinition)} must be provided when using '{nameof(VectorStoreGenericDataModel)}'."); - } - #if NET6_0_OR_GREATER private static readonly ConstructorInfo s_objectGetDefaultConstructorInfo = typeof(object).GetConstructor(Type.EmptyTypes)!; #endif diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs b/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs index bba0ffc78584..f72f48d1c65f 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs @@ -7,6 +7,7 @@ #if !NETCOREAPP #pragma warning disable IDE0005 // Using directive is unnecessary. +using System.ComponentModel; using System.Diagnostics.CodeAnalysis; namespace System.Runtime.CompilerServices; @@ -23,4 +24,38 @@ public CallerArgumentExpressionAttribute(string parameterName) public string ParameterName { get; } } +/// Specifies that a type has required members or that a member is required. +[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = false, Inherited = false)] +[EditorBrowsable(EditorBrowsableState.Never)] +internal sealed class RequiredMemberAttribute : Attribute; + +[AttributeUsage(AttributeTargets.All, AllowMultiple = true, Inherited = false)] +internal sealed class CompilerFeatureRequiredAttribute : Attribute +{ + public CompilerFeatureRequiredAttribute(string featureName) + { + this.FeatureName = featureName; + } + + /// + /// The name of the compiler feature. + /// + public string FeatureName { get; } + + /// + /// If true, the compiler can choose to allow access to the location where this attribute is applied if it does not understand . + /// + public bool IsOptional { get; init; } + + /// + /// The used for the ref structs C# feature. + /// + public const string RefStructs = nameof(RefStructs); + + /// + /// The used for the required members C# feature. + /// + public const string RequiredMembers = nameof(RequiredMembers); +} + #endif diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index 2a5d5d03d961..094d79e6052f 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -48,8 +48,6 @@ - - diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs index 121d7ac38d07..68e5caa11643 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs @@ -3,8 +3,6 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Linq; -using System.Reflection; using Microsoft.Extensions.VectorData; using Xunit; @@ -12,95 +10,6 @@ namespace SemanticKernel.UnitTests.Data; public class VectorStoreRecordMappingTests { - [Fact] - public void BuildPropertiesInfoWithValuesShouldBuildPropertiesInfo() - { - // Arrange. - var dataModelPropertiesInfo = new[] - { - typeof(DataModel).GetProperty(nameof(DataModel.Key))!, - typeof(DataModel).GetProperty(nameof(DataModel.Data))! - }; - var dataModelToStorageNameMapping = new Dictionary - { - { nameof(DataModel.Key), "key" }, - { nameof(DataModel.Data), "data" }, - }; - var storageValues = new Dictionary - { - { "key", "key value" }, - { "data", "data value" }, - }; - - // Act. - var propertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - dataModelPropertiesInfo, - dataModelToStorageNameMapping, - storageValues); - - // Assert. - var propertiesInfoWithValuesArray = propertiesInfoWithValues.ToArray(); - Assert.Equal(2, propertiesInfoWithValuesArray.Length); - Assert.Equal(dataModelPropertiesInfo[0], propertiesInfoWithValuesArray[0].Key); - Assert.Equal("key value", propertiesInfoWithValuesArray[0].Value); - Assert.Equal(dataModelPropertiesInfo[1], propertiesInfoWithValuesArray[1].Key); - Assert.Equal("data value", propertiesInfoWithValuesArray[1].Value); - } - - [Fact] - public void BuildPropertiesInfoWithValuesShouldUseValueMapperIfProvided() - { - // Arrange. - var dataModelPropertiesInfo = new[] - { - typeof(DataModel).GetProperty(nameof(DataModel.Key))!, - typeof(DataModel).GetProperty(nameof(DataModel.Data))! - }; - var dataModelToStorageNameMapping = new Dictionary - { - { nameof(DataModel.Key), "key" }, - { nameof(DataModel.Data), "data" }, - }; - var storageValues = new Dictionary - { - { "key", 10 }, - { "data", 20 }, - }; - - // Act. - var propertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( - dataModelPropertiesInfo, - dataModelToStorageNameMapping, - storageValues, - (int value, Type type) => value.ToString()); - - // Assert. - var propertiesInfoWithValuesArray = propertiesInfoWithValues.ToArray(); - Assert.Equal(2, propertiesInfoWithValuesArray.Length); - Assert.Equal(dataModelPropertiesInfo[0], propertiesInfoWithValuesArray[0].Key); - Assert.Equal("10", propertiesInfoWithValuesArray[0].Value); - Assert.Equal(dataModelPropertiesInfo[1], propertiesInfoWithValuesArray[1].Key); - Assert.Equal("20", propertiesInfoWithValuesArray[1].Value); - } - - [Fact] - public void SetPropertiesOnRecordShouldSetProperties() - { - // Arrange. - var record = new DataModel(); - - // Act. - VectorStoreRecordMapping.SetPropertiesOnRecord(record, new[] - { - new KeyValuePair(typeof(DataModel).GetProperty(nameof(DataModel.Key))!, "key value"), - new KeyValuePair(typeof(DataModel).GetProperty(nameof(DataModel.Data))!, "data value"), - }); - - // Assert. - Assert.Equal("key value", record.Key); - Assert.Equal("data value", record.Data); - } - [Theory] [InlineData(typeof(List))] [InlineData(typeof(ICollection))] @@ -160,10 +69,4 @@ public void CreateEnumerableThrowsForUnsupportedType(Type expectedType) // Act & Assert. Assert.Throws(() => VectorStoreRecordMapping.CreateEnumerable(input, expectedType)); } - - private sealed class DataModel - { - public string Key { get; set; } = string.Empty; - public string Data { get; set; } = string.Empty; - } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs deleted file mode 100644 index bbaabdd3d844..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs +++ /dev/null @@ -1,814 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.Extensions.VectorData; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -public class VectorStoreRecordPropertyReaderTests -{ - [Theory] - [MemberData(nameof(NoKeyTypeAndDefinitionCombos))] - public void ConstructorFailsForNoKey(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, null)); - Assert.Equal("No key property found on type NoKeyModel or the provided VectorStoreRecordDefinition.", exception.Message); - } - - [Theory] - [MemberData(nameof(MultiKeysTypeAndDefinitionCombos))] - public void ConstructorSucceedsForSupportedMultiKeys(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var sut = new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleKeys = true }); - } - - [Theory] - [MemberData(nameof(MultiKeysTypeAndDefinitionCombos))] - public void ConstructorFailsForUnsupportedMultiKeys(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleKeys = false })); - Assert.Equal("Multiple key properties found on type MultiKeysModel or the provided VectorStoreRecordDefinition.", exception.Message); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void ConstructorSucceedsForSupportedMultiVectors(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var sut = new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleVectors = true }); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void ConstructorFailsForUnsupportedMultiVectors(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleVectors = false })); - Assert.Equal("Multiple vector properties found on type MultiPropsModel or the provided VectorStoreRecordDefinition while only one is supported.", exception.Message); - } - - [Theory] - [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] - public void ConstructorFailsForUnsupportedNoVectors(Type type, VectorStoreRecordDefinition? definition) - { - // Act & Assert. - var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { RequiresAtLeastOneVector = true })); - Assert.Equal("No vector property found on type NoVectorModel or the provided VectorStoreRecordDefinition while at least one is required.", exception.Message); - } - - [Theory] - [MemberData(nameof(TypeAndDefinitionCombos))] - public void CanGetDefinition(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.RecordDefinition; - - // Assert. - Assert.NotNull(actual); - } - - [Theory] - [MemberData(nameof(TypeAndDefinitionCombos))] - public void CanGetKeyPropertyInfo(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyInfo; - - // Assert. - Assert.NotNull(actual); - Assert.Equal("Key", actual.Name); - Assert.Equal(typeof(string), actual.PropertyType); - } - - [Theory] - [MemberData(nameof(TypeAndDefinitionCombos))] - public void CanGetKeyPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertiesInfo; - - // Assert. - Assert.NotNull(actual); - Assert.Single(actual); - Assert.Equal("Key", actual[0].Name); - Assert.Equal(typeof(string), actual[0].PropertyType); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetDataPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.DataPropertiesInfo; - - // Assert. - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal("Data1", actual[0].Name); - Assert.Equal(typeof(string), actual[0].PropertyType); - Assert.Equal("Data2", actual[1].Name); - Assert.Equal(typeof(string), actual[1].PropertyType); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetVectorPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.VectorPropertiesInfo; - - // Assert. - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal("Vector1", actual[0].Name); - Assert.Equal(typeof(ReadOnlyMemory), actual[0].PropertyType); - Assert.Equal("Vector2", actual[1].Name); - Assert.Equal(typeof(ReadOnlyMemory), actual[1].PropertyType); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetFirstVectorPropertyName(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.FirstVectorPropertyName; - - // Assert. - Assert.Equal("Vector1", actual); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetFirstVectorPropertyInfo(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.FirstVectorPropertyInfo; - - // Assert. - Assert.NotNull(actual); - Assert.Equal("Vector1", actual.Name); - Assert.Equal(typeof(ReadOnlyMemory), actual.PropertyType); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyName(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyName; - - // Assert. - Assert.Equal("Key", actual); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyStoragePropertyNameWithoutOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyStoragePropertyName; - - // Assert. - Assert.Equal("Key", actual); - } - - [Theory] - [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyStoragePropertyNameWithOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyStoragePropertyName; - - // Assert. - Assert.Equal("storage_key", actual); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetDataPropertyStoragePropertyNameWithOverrideMix(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.DataPropertyStoragePropertyNames; - - // Assert. - Assert.Equal("Data1", actual[0]); - Assert.Equal("storage_data2", actual[1]); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetVectorPropertyStoragePropertyNameWithOverrideMix(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.VectorPropertyStoragePropertyNames; - - // Assert. - Assert.Equal("Vector1", actual[0]); - Assert.Equal("storage_vector2", actual[1]); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyJsonNameWithoutOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyJsonName; - - // Assert. - Assert.Equal("Key", actual); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyJsonNameWithSerializerSettings(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, new() - { - JsonSerializerOptions = new JsonSerializerOptions() - { - PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseUpper - } - }); - - // Act. - var actual = sut.KeyPropertyJsonName; - - // Assert. - Assert.Equal("KEY", actual); - } - - [Theory] - [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] - public void CanGetKeyPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.KeyPropertyJsonName; - - // Assert. - Assert.Equal("json_key", actual); - } - - [Theory] - [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] - public void CanGetDataPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.DataPropertyJsonNames; - - // Assert. - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal("json_data1", actual[0]); - Assert.Equal("json_data2", actual[1]); - } - - [Theory] - [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] - public void CanGetVectorPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act. - var actual = sut.VectorPropertyJsonNames; - - // Assert. - Assert.NotNull(actual); - Assert.Single(actual); - Assert.Equal("json_vector", actual[0]); - } - - [Theory] - [MemberData(nameof(TypeAndDefinitionCombos))] - public void VerifyKeyPropertiesPassesForAllowedTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(string), typeof(int) }; - - // Act. - sut.VerifyKeyProperties(allowedTypes); - } - - [Theory] - [MemberData(nameof(TypeAndDefinitionCombos))] - public void VerifyKeyPropertiesFailsForDisallowedTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(long) }; - - // Act. - var exception = Assert.Throws(() => sut.VerifyKeyProperties(allowedTypes)); - Assert.Equal("Key properties must be one of the supported types: System.Int64. Type of the property 'Key' is System.String.", exception.Message); - } - - [Theory] - [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] - public void VerifyDataPropertiesPassesForAllowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(string), typeof(int) }; - - // Act. - sut.VerifyDataProperties(allowedTypes, true); - } - - [Theory] - [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] - public void VerifyDataPropertiesFailsForDisallowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(string), typeof(int) }; - - // Act. - var exception = Assert.Throws(() => sut.VerifyDataProperties(allowedTypes, false)); - Assert.Equal("Data properties must be one of the supported types: System.String, System.Int32. Type of the property 'EnumerableData' is System.Collections.Generic.IEnumerable`1[[System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]].", exception.Message); - } - - [Theory] - [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] - public void VerifyVectorPropertiesPassesForAllowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(ReadOnlyMemory) }; - - // Act. - sut.VerifyVectorProperties(allowedTypes); - } - - [Theory] - [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] - public void VerifyVectorPropertiesFailsForDisallowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var allowedTypes = new HashSet { typeof(ReadOnlyMemory) }; - - // Act. - var exception = Assert.Throws(() => sut.VerifyVectorProperties(allowedTypes)); - Assert.Equal("Vector properties must be one of the supported types: System.ReadOnlyMemory`1[[System.Double, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]. Type of the property 'Vector' is System.ReadOnlyMemory`1[[System.Single, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]].", exception.Message); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetStoragePropertyNameReturnsStorageNameWithFallback(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Equal("Data1", sut.GetStoragePropertyName("Data1")); - Assert.Equal("storage_data2", sut.GetStoragePropertyName("Data2")); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetJsonPropertyNameReturnsJsonWithFallback(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Equal("Data1", sut.GetJsonPropertyName("Data1")); - Assert.Equal("json_data2", sut.GetJsonPropertyName("Data2")); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetVectorPropertyOrSingleReturnsRequestedVectorAndThrowsForInvalidVector(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var validVector = new VectorSearchOptions() { VectorProperty = r => r.Vector2 }; - var invalidVector = new VectorSearchOptions() { VectorProperty = r => r.Data2 }; - - // Act & Assert. - Assert.Equal("Vector2", sut.GetVectorPropertyOrSingle(validVector).DataModelPropertyName); - Assert.Throws(() => sut.GetVectorPropertyOrSingle(invalidVector)); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetVectorPropertyOrSingleThrowsForMultipleVectors(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); - } - - [Theory] - [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] - public void GetVectorPropertyOrSingleThrowsForNoVectors(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Throws(() => sut.GetVectorPropertyOrSingle(null)); - } - - [Fact] - public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingConst() - { - const string TheConst = "FloatVector"; - VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(TheConst); - VectorSearchOptions> expectedConst = new() - { - VectorProperty = r => r.Vectors[TheConst] - }; - VectorSearchOptions> wrongConst = new() - { - VectorProperty = r => r.Vectors["Different"] - }; - - Assert.Equal(TheConst, sut.GetVectorPropertyOrSingle(expectedConst).DataModelPropertyName); - Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongConst)); - } - - [Fact] - public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingVariable() - { - string theVariable = "FloatVector"; - string theWrongVariable = "Different"; - VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(theVariable); - VectorSearchOptions> expectedVariable = new() - { - VectorProperty = r => r.Vectors[theVariable] - }; - VectorSearchOptions> wrongVariable = new() - { - VectorProperty = r => r.Vectors[theWrongVariable] - }; - - Assert.Equal(theVariable, sut.GetVectorPropertyOrSingle(expectedVariable).DataModelPropertyName); - Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongVariable)); - } - - [Theory] - [InlineData("FloatVector", "Different")] - // it's a Theory just for the need of testing a method expected being captured by the lambda property selector - public void GetVectorPropertyOrSingleReturnsRequestedGenericDataModelVectorWhenUsingArgument(string expected, string wrong) - { - VectorStoreRecordPropertyReader sut = CreateReaderForGenericModel(expected); - VectorSearchOptions> expectedArgument = new() - { - VectorProperty = r => r.Vectors[expected] - }; - VectorSearchOptions> wrongArgument = new() - { - VectorProperty = r => r.Vectors[wrong] - }; - - Assert.Equal("FloatVector", sut.GetVectorPropertyOrSingle(expectedArgument).DataModelPropertyName); - Assert.Throws(() => sut.GetVectorPropertyOrSingle(wrongArgument)); - } - - private static VectorStoreRecordPropertyReader CreateReaderForGenericModel(string vectorPropertyName) - { - VectorStoreGenericDataModel genericRecord = new("key") - { - Data = - { - ["Text"] = "data" - }, - Vectors = - { - [vectorPropertyName] = new ReadOnlyMemory([-1, -1, -1, -1]) - } - }; - VectorStoreRecordDefinition definition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordDataProperty("Text", typeof(string)), - new VectorStoreRecordVectorProperty(vectorPropertyName, typeof(ReadOnlyMemory)), - ] - }; - - return new(genericRecord.GetType(), definition, null); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetFullTextDataPropertyOrOnlyReturnsRequestedPropOrOnlyTextDataPropAndThrowsForInvalidProp(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(r => r.Data1).DataModelPropertyName); - Assert.Equal("Data1", sut.GetFullTextDataPropertyOrSingle(null).DataModelPropertyName); - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => r.Vector1)); - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => "DoesNotExist")); - } - - [Theory] - [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] - public void GetFullTextDataPropertyOrOnlyThrowsForNoTextDataProps(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); - } - - [Theory] - [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] - public void GetFullTextDataPropertyOrOnlyThrowsForNonFullTextSearchProp(Type type, VectorStoreRecordDefinition? definition) - { - // Arrange. - var sut = new VectorStoreRecordPropertyReader(type, definition, null); - - // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(r => r.Data2)); - } - - [Fact] - public void GetFullTextDataPropertyOrOnlyThrowsForMultipleMatchingProps() - { - // Arrange. - var properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Data2", typeof(string)) { IsFullTextSearchable = true } - }; - var definition = new VectorStoreRecordDefinition - { - Properties = properties - }; - var sut = new VectorStoreRecordPropertyReader(typeof(object), definition, null); - - // Act & Assert. - Assert.Throws(() => sut.GetFullTextDataPropertyOrSingle(null)); - } - - public static IEnumerable NoKeyTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(NoKeyModel), s_noKeyDefinition }; - yield return new object?[] { typeof(NoKeyModel), null }; - } - - public static IEnumerable NoVectorsTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(NoVectorModel), s_noVectorDefinition }; - yield return new object?[] { typeof(NoVectorModel), null }; - } - - public static IEnumerable MultiKeysTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(MultiKeysModel), s_multiKeysDefinition }; - yield return new object?[] { typeof(MultiKeysModel), null }; - } - - public static IEnumerable TypeAndDefinitionCombos() - { - yield return new object?[] { typeof(SinglePropsModel), s_singlePropsDefinition }; - yield return new object?[] { typeof(SinglePropsModel), null }; - yield return new object?[] { typeof(MultiPropsModel), s_multiPropsDefinition }; - yield return new object?[] { typeof(MultiPropsModel), null }; - yield return new object?[] { typeof(EnumerablePropsModel), s_enumerablePropsDefinition }; - yield return new object?[] { typeof(EnumerablePropsModel), null }; - } - - public static IEnumerable MultiPropsTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(MultiPropsModel), s_multiPropsDefinition }; - yield return new object?[] { typeof(MultiPropsModel), null }; - } - - public static IEnumerable StorageNamesPropsTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(StorageNamesPropsModel), s_storageNamesPropsDefinition }; - yield return new object?[] { typeof(StorageNamesPropsModel), null }; - } - - public static IEnumerable EnumerablePropsTypeAndDefinitionCombos() - { - yield return new object?[] { typeof(EnumerablePropsModel), s_enumerablePropsDefinition }; - yield return new object?[] { typeof(EnumerablePropsModel), null }; - } - -#pragma warning disable CA1812 // Invalid unused classes error, since I am using these for testing purposes above. - - private sealed class NoKeyModel - { - } - - private static readonly VectorStoreRecordDefinition s_noKeyDefinition = new(); - - private sealed class NoVectorModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - } - - private static readonly VectorStoreRecordDefinition s_noVectorDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)) - ] - }; - - private sealed class MultiKeysModel - { - [VectorStoreRecordKey] - public string Key1 { get; set; } = string.Empty; - - [VectorStoreRecordKey] - public string Key2 { get; set; } = string.Empty; - } - - private static readonly VectorStoreRecordDefinition s_multiKeysDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key1", typeof(string)), - new VectorStoreRecordKeyProperty("Key2", typeof(string)) - ] - }; - - private sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector] - public ReadOnlyMemory Vector { get; set; } - - public string NotAnnotated { get; set; } = string.Empty; - } - - private static readonly VectorStoreRecordDefinition s_singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - - private sealed class MultiPropsModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] - public string Data1 { get; set; } = string.Empty; - - [VectorStoreRecordData(StoragePropertyName = "storage_data2")] - [JsonPropertyName("json_data2")] - public string Data2 { get; set; } = string.Empty; - - [VectorStoreRecordVector(4, DistanceFunction.DotProductSimilarity, IndexKind.Flat)] - public ReadOnlyMemory Vector1 { get; set; } - - [VectorStoreRecordVector(StoragePropertyName = "storage_vector2")] - [JsonPropertyName("json_vector2")] - public ReadOnlyMemory Vector2 { get; set; } - - public string NotAnnotated { get; set; } = string.Empty; - } - - private static readonly VectorStoreRecordDefinition s_multiPropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Data2", typeof(string)) { StoragePropertyName = "storage_data2" }, - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.DotProductSimilarity }, - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector2" } - ] - }; - - private sealed class EnumerablePropsModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData] - public IEnumerable EnumerableData { get; set; } = new List(); - - [VectorStoreRecordData] - public string[] ArrayData { get; set; } = Array.Empty(); - - [VectorStoreRecordData] - public List ListData { get; set; } = new List(); - - [VectorStoreRecordVector] - public ReadOnlyMemory Vector { get; set; } - - public string NotAnnotated { get; set; } = string.Empty; - } - - private static readonly VectorStoreRecordDefinition s_enumerablePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("EnumerableData", typeof(IEnumerable)), - new VectorStoreRecordDataProperty("ArrayData", typeof(string[])), - new VectorStoreRecordDataProperty("ListData", typeof(List)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - - private sealed class StorageNamesPropsModel - { - [VectorStoreRecordKey(StoragePropertyName = "storage_key")] - [JsonPropertyName("json_key")] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData(StoragePropertyName = "storage_data1")] - [JsonPropertyName("json_data1")] - public string Data1 { get; set; } = string.Empty; - - [VectorStoreRecordData(StoragePropertyName = "storage_data2")] - [JsonPropertyName("json_data2")] - public string Data2 { get; set; } = string.Empty; - - [VectorStoreRecordVector(StoragePropertyName = "storage_vector")] - [JsonPropertyName("json_vector")] - public ReadOnlyMemory Vector { get; set; } - } - - private static readonly VectorStoreRecordDefinition s_storageNamesPropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)) { StoragePropertyName = "storage_key" }, - new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_data1" }, - new VectorStoreRecordDataProperty("Data2", typeof(string)) { StoragePropertyName = "storage_data2" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector" } - ] - }; - -#pragma warning restore CA1812 -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs deleted file mode 100644 index 9e18965d8015..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections; -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.VectorData; -using Xunit; - -namespace SemanticKernel.UnitTests.Data; - -public class VectorStoreRecordPropertyVerificationTests -{ - [Fact] - public void VerifyPropertyTypesPassForAllowedTypes() - { - // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), null, null); - - // Act. - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(string)], "Data"); - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(string)], "Data"); - } - - [Fact] - public void VerifyPropertyTypesPassForAllowedEnumerableTypes() - { - // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(EnumerablePropsModel), null, null); - - // Act. - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(string)], "Data", supportEnumerable: true); - VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._enumerablePropsDefinition.Properties.OfType(), [typeof(string)], "Data", supportEnumerable: true); - } - - [Fact] - public void VerifyPropertyTypesFailsForDisallowedTypes() - { - // Arrange. - var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), null, null); - - // Act. - var ex1 = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(int), typeof(float)], "Data")); - var ex2 = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(int), typeof(float)], "Data")); - - // Assert. - Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex1.Message); - Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex2.Message); - } - - [Theory] - [InlineData(typeof(SinglePropsModel), false, new Type[] { typeof(string) }, false)] - [InlineData(typeof(VectorStoreGenericDataModel), false, new Type[] { typeof(string), typeof(ulong) }, false)] - [InlineData(typeof(VectorStoreGenericDataModel), true, new Type[] { typeof(string), typeof(ulong) }, false)] - [InlineData(typeof(VectorStoreGenericDataModel), false, new Type[] { typeof(string), typeof(ulong) }, true)] - public void VerifyGenericDataModelKeyTypeThrowsOnlyForUnsupportedKeyTypeWithoutCustomMapper(Type recordType, bool customMapperSupplied, IEnumerable allowedKeyTypes, bool shouldThrow) - { - if (shouldThrow) - { - var ex = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(recordType, customMapperSupplied, allowedKeyTypes)); - Assert.Equal("The key type 'System.Int32' of data model 'VectorStoreGenericDataModel' is not supported by the default mappers. Only the following key types are supported: System.String, System.UInt64. Please provide your own mapper to map to your chosen key type.", ex.Message); - } - else - { - VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(recordType, customMapperSupplied, allowedKeyTypes); - } - } - - [Theory] - [InlineData(typeof(SinglePropsModel), false, false)] - [InlineData(typeof(VectorStoreGenericDataModel), true, false)] - [InlineData(typeof(VectorStoreGenericDataModel), false, true)] - public void VerifyGenericDataModelDefinitionSuppliedThrowsOnlyForMissingDefinition(Type recordType, bool definitionSupplied, bool shouldThrow) - { - if (shouldThrow) - { - var ex = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(recordType, definitionSupplied)); - Assert.Equal("A VectorStoreRecordDefinition must be provided when using 'VectorStoreGenericDataModel'.", ex.Message); - } - else - { - VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(recordType, definitionSupplied); - } - } - - [Theory] - [InlineData(typeof(List), true)] - [InlineData(typeof(ICollection), true)] - [InlineData(typeof(IEnumerable), true)] - [InlineData(typeof(IList), true)] - [InlineData(typeof(IReadOnlyCollection), true)] - [InlineData(typeof(IReadOnlyList), true)] - [InlineData(typeof(string[]), true)] - [InlineData(typeof(IEnumerable), true)] - [InlineData(typeof(ArrayList), true)] - [InlineData(typeof(string), false)] - [InlineData(typeof(HashSet), false)] - [InlineData(typeof(ISet), false)] - [InlineData(typeof(Dictionary), false)] - [InlineData(typeof(Stack), false)] - [InlineData(typeof(Queue), false)] - public void IsSupportedEnumerableTypeReturnsCorrectAnswerForEachType(Type type, bool expected) - { - // Act. - var actual = VectorStoreRecordPropertyVerification.IsSupportedEnumerableType(type); - - // Assert. - Assert.Equal(expected, actual); - } - -#pragma warning disable CA1812 // Invalid unused classes error, since I am using these for testing purposes above. - - private sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector] - public ReadOnlyMemory Vector { get; set; } - - public string NotAnnotated { get; set; } = string.Empty; - } - - private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - - private sealed class EnumerablePropsModel - { - [VectorStoreRecordKey] - public string Key { get; set; } = string.Empty; - - [VectorStoreRecordData] - public IEnumerable EnumerableData { get; set; } = new List(); - - [VectorStoreRecordData] - public string[] ArrayData { get; set; } = Array.Empty(); - - [VectorStoreRecordData] - public List ListData { get; set; } = new List(); - - [VectorStoreRecordVector] - public ReadOnlyMemory Vector { get; set; } - - public string NotAnnotated { get; set; } = string.Empty; - } - - private readonly VectorStoreRecordDefinition _enumerablePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("EnumerableData", typeof(IEnumerable)), - new VectorStoreRecordDataProperty("ArrayData", typeof(string[])), - new VectorStoreRecordDataProperty("ListData", typeof(List)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) - ] - }; - -#pragma warning restore CA1812 -} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs index 75f07161ba6f..4d7a34fec866 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Support/AzureAISearchTestStore.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Linq.Expressions; using Azure; using Azure.Identity; using Azure.Search.Documents.Indexes; @@ -46,4 +47,18 @@ protected override Task StartAsync() return Task.CompletedTask; } + + public override async Task WaitForDataAsync( + IVectorStoreRecordCollection collection, + int recordCount, + Expression>? filter = null, + int vectorSize = 3) + { + await base.WaitForDataAsync(collection, recordCount, filter, vectorSize); + + // There seems to be some asynchronicity/race condition specific to Azure AI Search which isn't taken care + // of by the generic retry loop in the base implementation. + // TODO: Investigate this and remove + await Task.Delay(TimeSpan.FromMilliseconds(1000)); + } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index 4cdaea3c7a20..6b981da2b12a 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -3,6 +3,7 @@ using System.Text; using Microsoft.Data.SqlClient; using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.SqlServer; using Xunit; @@ -34,7 +35,7 @@ public void AppendParameterName(string propertyName, string expectedPrefix) { StringBuilder builder = new(); StringBuilder expectedBuilder = new(); - VectorStoreRecordKeyProperty keyProperty = new(propertyName, typeof(string)); + VectorStoreRecordKeyPropertyModel keyProperty = new(propertyName, typeof(string)); int paramIndex = 0; // we need a dedicated variable to ensure that AppendParameterName increments the index for (int i = 0; i < 10; i++) @@ -107,26 +108,17 @@ FROM INFORMATION_SCHEMA.TABLES [InlineData(false)] public void CreateTable(bool ifNotExists) { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); - VectorStoreRecordDataProperty[] dataProperties = - [ - new VectorStoreRecordDataProperty("simpleName", typeof(string)), - new VectorStoreRecordDataProperty("with space", typeof(int)) - { - IsFilterable = true - } - ]; - VectorStoreRecordVectorProperty[] vectorProperties = - [ - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ]; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("id", typeof(long)), + new VectorStoreRecordDataProperty("simpleName", typeof(string)), + new VectorStoreRecordDataProperty("with space", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + ]); + using SqlConnection connection = CreateConnection(); - using SqlCommand command = SqlServerCommandBuilder.CreateTable(connection, "schema", "table", - ifNotExists, keyProperty, dataProperties, vectorProperties); + using SqlCommand command = SqlServerCommandBuilder.CreateTable(connection, "schema", "table", ifNotExists, model); string expectedCommand = """ @@ -152,21 +144,16 @@ PRIMARY KEY ([id]) [Fact] public void MergeIntoSingle() { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); - VectorStoreRecordProperty[] properties = - [ - keyProperty, - new VectorStoreRecordDataProperty("simpleString", typeof(string)), - new VectorStoreRecordDataProperty("simpleInt", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ]; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("id", typeof(long)), + new VectorStoreRecordDataProperty("simpleString", typeof(string)), + new VectorStoreRecordDataProperty("simpleInt", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + ]); using SqlConnection connection = CreateConnection(); - using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle(connection, "schema", "table", - keyProperty, properties, + using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle(connection, "schema", "table", model, new Dictionary { { "id", null }, @@ -202,17 +189,14 @@ WHEN NOT MATCHED THEN [Fact] public void MergeIntoMany() { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); - VectorStoreRecordProperty[] properties = - [ - keyProperty, - new VectorStoreRecordDataProperty("simpleString", typeof(string)), - new VectorStoreRecordDataProperty("simpleInt", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ]; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("id", typeof(long)), + new VectorStoreRecordDataProperty("simpleString", typeof(string)), + new VectorStoreRecordDataProperty("simpleInt", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + ]); + Dictionary[] records = [ new Dictionary @@ -234,8 +218,7 @@ public void MergeIntoMany() using SqlConnection connection = CreateConnection(); using SqlCommand command = connection.CreateCommand(); - Assert.True(SqlServerCommandBuilder.MergeIntoMany(command, "schema", "table", - keyProperty, properties, records)); + Assert.True(SqlServerCommandBuilder.MergeIntoMany(command, "schema", "table", model, records)); string expectedCommand = """" @@ -272,7 +255,7 @@ WHEN NOT MATCHED THEN [Fact] public void DeleteSingle() { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); + VectorStoreRecordKeyPropertyModel keyProperty = new("id", typeof(long)); using SqlConnection connection = CreateConnection(); using SqlCommand command = SqlServerCommandBuilder.DeleteSingle(connection, @@ -287,7 +270,7 @@ public void DeleteSingle() public void DeleteMany() { string[] keys = ["key1", "key2"]; - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(string)); + VectorStoreRecordKeyPropertyModel keyProperty = new("id", typeof(string)); using SqlConnection connection = CreateConnection(); using SqlCommand command = connection.CreateCommand(); @@ -304,20 +287,17 @@ public void DeleteMany() [Fact] public void SelectSingle() { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); - VectorStoreRecordProperty[] properties = [ - keyProperty, - new VectorStoreRecordDataProperty("name", typeof(string)), - new VectorStoreRecordDataProperty("age", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ]; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("id", typeof(long)), + new VectorStoreRecordDataProperty("name", typeof(string)), + new VectorStoreRecordDataProperty("age", typeof(int)), + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + ]); + using SqlConnection connection = CreateConnection(); - using SqlCommand command = SqlServerCommandBuilder.SelectSingle(connection, - "schema", "tableName", keyProperty, properties, 123L, includeVectors: true); + using SqlCommand command = SqlServerCommandBuilder.SelectSingle(connection, "schema", "tableName", model, 123L, includeVectors: true); AssertEqualIgnoreNewLines( """"" @@ -332,22 +312,20 @@ FROM [schema].[tableName] [Fact] public void SelectMany() { - VectorStoreRecordKeyProperty keyProperty = new("id", typeof(long)); - VectorStoreRecordProperty[] properties = [ - keyProperty, + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("name", typeof(string)), new VectorStoreRecordDataProperty("age", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ]; + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + ]); + long[] keys = [123L, 456L, 789L]; using SqlConnection connection = CreateConnection(); using SqlCommand command = connection.CreateCommand(); Assert.True(SqlServerCommandBuilder.SelectMany(command, - "schema", "tableName", keyProperty, properties, keys, includeVectors: true)); + "schema", "tableName", model, keys, includeVectors: true)); AssertEqualIgnoreNewLines( """"" @@ -371,4 +349,10 @@ private static void AssertEqualIgnoreNewLines(string expected, string actual) // We create a connection using a fake connection string just to be able to create the SqlCommand. private static SqlConnection CreateConnection() => new("Server=localhost;Database=master;Integrated Security=True;"); + + private static VectorStoreRecordModel BuildModel(List properties) + => new VectorStoreRecordModelBuilder(SqlServerConstants.ModelBuildingOptions) + .Build( + typeof(VectorStoreGenericDataModel), + new() { Properties = properties }); } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj index 4752d82818dc..ba0e332abc73 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj @@ -10,6 +10,8 @@ $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + $(NoWarn);MEVD9001 From 0a1b83c7f93f42ace5151e97a9b4e21cfa89e468 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Fri, 4 Apr 2025 11:17:26 +0200 Subject: [PATCH 28/63] .Net: Obsolete custom mappers (#11366) This obsoletes the public-facing parts of custom mappers, and suppresses internal warnings around them. After we release the next preview, we'll be removing these. First step of #11129 --- .../VectorStoreLangchainInterop/AzureAISearchFactory.cs | 2 ++ .../Memory/VectorStoreLangchainInterop/QdrantFactory.cs | 2 ++ .../Memory/VectorStore_DataIngestion_CustomMapper.cs | 2 ++ .../Step6_Use_CustomMapper.cs | 2 ++ .../AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs | 2 ++ .../AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs | 2 ++ .../AzureAISearchGenericDataModelMapper.cs | 2 ++ .../AzureAISearchVectorStoreRecordCollection.cs | 4 ++++ .../AzureAISearchVectorStoreRecordCollectionOptions.cs | 2 ++ .../AzureCosmosDBMongoDBVectorStoreRecordCollection.cs | 4 ++++ ...zureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs | 2 ++ .../AzureCosmosDBNoSQLGenericDataModelMapper.cs | 2 ++ .../AzureCosmosDBNoSQLVectorStoreRecordCollection.cs | 4 ++++ .../AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs | 2 ++ .../AzureCosmosDBNoSQLVectorStoreRecordMapper.cs | 2 ++ .../MongoDBVectorStoreRecordCollection.cs | 4 ++++ .../MongoDBVectorStoreRecordCollectionOptions.cs | 2 ++ .../PineconeVectorStoreRecordCollection.cs | 4 ++++ .../PineconeVectorStoreRecordCollectionOptions.cs | 2 ++ .../PineconeVectorStoreRecordMapper.cs | 2 ++ .../PostgresVectorStoreRecordCollection.cs | 4 ++++ .../PostgresVectorStoreRecordCollectionOptions.cs | 2 ++ .../PostgresVectorStoreRecordMapper.cs | 2 ++ .../QdrantVectorStoreCollectionSearchMapping.cs | 2 ++ .../QdrantVectorStoreRecordCollection.cs | 4 ++++ .../QdrantVectorStoreRecordCollectionOptions.cs | 2 ++ .../QdrantVectorStoreRecordMapper.cs | 2 ++ .../RedisHashSetGenericDataModelMapper.cs | 2 ++ .../RedisHashSetVectorStoreRecordCollection.cs | 4 ++++ .../RedisHashSetVectorStoreRecordCollectionOptions.cs | 2 ++ .../RedisHashSetVectorStoreRecordMapper.cs | 2 ++ .../RedisJsonGenericDataModelMapper.cs | 2 ++ .../RedisJsonVectorStoreRecordCollection.cs | 4 ++++ .../RedisJsonVectorStoreRecordCollectionOptions.cs | 2 ++ .../RedisJsonVectorStoreRecordMapper.cs | 2 ++ .../Connectors/Connectors.Memory.SqlServer/RecordMapper.cs | 2 ++ .../SqlServerVectorStoreRecordCollection.cs | 4 ++++ .../SqlServerVectorStoreRecordCollectionOptions.cs | 2 ++ .../SqliteVectorStoreRecordCollection.cs | 4 ++++ .../SqliteVectorStoreRecordCollectionOptions.cs | 2 ++ .../SqliteVectorStoreRecordMapper.cs | 2 ++ .../WeaviateGenericDataModelMapper.cs | 2 ++ .../WeaviateVectorStoreRecordCollection.cs | 4 ++++ .../WeaviateVectorStoreRecordCollectionOptions.cs | 1 + .../WeaviateVectorStoreRecordMapper.cs | 2 ++ .../MongoDBVectorStoreRecordCollectionTests.cs | 2 ++ .../PineconeVectorStoreRecordCollectionTests.cs | 2 ++ .../QdrantVectorStoreRecordCollectionTests.cs | 4 ++++ .../RedisHashSetVectorStoreRecordCollectionTests.cs | 6 ++++++ .../WeaviateVectorStoreRecordCollectionTests.cs | 2 ++ .../VectorStorage/IVectorStoreRecordMapper.cs | 3 +++ .../Memory/MongoDB/MongoDBGenericDataModelMapper.cs | 2 ++ .../Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs | 2 ++ .../SqlServerIntegrationTests/SqlServerVectorStoreTests.cs | 4 ++++ 54 files changed, 140 insertions(+) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index 2bf0cb763a7a..f509373233d9 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -9,6 +9,8 @@ namespace Memory.VectorStoreLangchainInterop; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete + /// /// Contains a factory method that can be used to create an Azure AI Search vector store that is compatible with datasets ingested using Langchain. /// diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index 53f0b399af82..79d149c24973 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -7,6 +7,8 @@ namespace Memory.VectorStoreLangchainInterop; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete + /// /// Contains a factory method that can be used to create a Qdrant vector store that is compatible with datasets ingested using Langchain. /// diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index 3f86c763acbb..54928cddfb23 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -12,6 +12,8 @@ namespace Memory; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete + /// /// An example showing how to ingest data into a vector store using with a custom mapper. /// In this example, the storage model differs significantly from the data model, so a custom mapper is used to map between the two. diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs index cc86a773b0c0..383baeaae4fa 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs @@ -10,6 +10,8 @@ namespace GettingStartedWithVectorStores; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete + /// /// Example that shows how you can use custom mappers if you wish the data model and storage schema to differ. /// diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 840acf4a3c78..17d4cbb1939e 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -489,6 +489,7 @@ await this.TestUpsertWithModelAsync( expectedPropertyName: "bson_hotel_name"); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task UpsertWithCustomMapperWorksCorrectlyAsync() { @@ -564,6 +565,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() Assert.Equal(RecordKey, result.HotelId); Assert.Equal("Name from mapper", result.HotelName); } +#pragma warning restore CS0618 [Theory] [MemberData(nameof(VectorizedSearchVectorTypeData))] diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index ea45cc39b158..af141aac095c 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -455,6 +455,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() Assert.Equal("key3", results[2]); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task UpsertWithCustomMapperWorksCorrectlyAsync() { @@ -537,6 +538,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() Assert.Equal(RecordKey, result.HotelId); Assert.Equal("Name from mapper", result.HotelName); } +#pragma warning restore CS0618 [Fact] public async Task VectorizedSearchReturnsValidRecordAsync() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs index f63fdd32bc00..3ca40ca84d15 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs @@ -15,7 +15,9 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure AI Search. /// +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class AzureAISearchGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, JsonObject> +#pragma warning restore CS0618 { /// public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 82d3bcc5fe2a..54bcc1ed33d3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -52,7 +52,9 @@ public class AzureAISearchVectorStoreRecordCollection : private readonly AzureAISearchVectorStoreRecordCollectionOptions _options; /// A mapper to use for converting between the data model and the Azure AI Search record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper? _mapper; +#pragma warning restore CS0618 /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -80,6 +82,7 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli this._model = new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.JsonSerializerOptions); +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete // Resolve mapper. // First, if someone has provided a custom mapper, use that. // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. @@ -92,6 +95,7 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli { this._mapper = new AzureAISearchGenericDataModelMapper(this._model) as IVectorStoreRecordMapper; } +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs index 5d2ec9c9bb23..5da63d55ffb8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Text.Json; using System.Text.Json.Nodes; using Azure.Search.Documents.Indexes; @@ -18,6 +19,7 @@ public sealed class AzureAISearchVectorStoreRecordCollectionOptions /// /// If not set, the default mapper that is provided by the Azure AI Search client SDK will be used. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 5c1d705f0f1c..99ea67d05b07 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -48,7 +48,9 @@ public class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorS private readonly AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions _options; /// Interface for mapping between a storage model, and the consumer record data model. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -473,6 +475,7 @@ private static Dictionary GetStoragePropertyNames( return storagePropertyNames; } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete /// /// Returns custom mapper, generic data model mapper or default record mapper. /// @@ -490,6 +493,7 @@ private IVectorStoreRecordMapper InitializeMapper() return new MongoDBVectorStoreRecordMapper(this._model); } +#pragma warning restore CS0618 #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs index d94c6c0956f1..7eb381a1095f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using MongoDB.Bson; @@ -13,6 +14,7 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions /// Gets or sets an optional custom mapper to use when converting between the data model and the Azure CosmosDB MongoDB BSON object. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? BsonDocumentCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs index b98177845ac2..6811764bf920 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs @@ -12,8 +12,10 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB NoSQL. /// +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) : IVectorStoreRecordMapper, JsonObject> +#pragma warning restore CS0618 { /// A default for serialization/deserialization of vector properties. private static readonly JsonSerializerOptions s_vectorJsonSerializerOptions = new() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index c291f84c3aa4..1bce4d37df73 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -52,7 +52,9 @@ public class AzureCosmosDBNoSQLVectorStoreRecordCollection : private readonly VectorStoreRecordPropertyModel _partitionKeyProperty; /// The mapper to use when mapping between the consumer data model and the Azure CosmosDB NoSQL record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// public string CollectionName { get; } @@ -664,6 +666,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn } } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private IVectorStoreRecordMapper InitializeMapper(JsonSerializerOptions jsonSerializerOptions) { if (this._options.JsonObjectCustomMapper is not null) @@ -679,6 +682,7 @@ private IVectorStoreRecordMapper InitializeMapper(JsonSeria return new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model.KeyProperty, this._options.JsonSerializerOptions); } +#pragma warning restore CS0618 #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs index 047cd2b56b6c..ff77da73dd19 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Azure.Cosmos; @@ -18,6 +19,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions /// If not set, the default mapper that is provided by the Azure CosmosDB NoSQL client SDK will be used. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs index 4c1ad9e7071f..4667d3956b8e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs @@ -11,8 +11,10 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// Class for mapping between a json node stored in Azure CosmosDB NoSQL and the consumer data model. /// /// The consumer data model to map to or from. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper(VectorStoreRecordKeyPropertyModel keyProperty, JsonSerializerOptions? jsonSerializerOptions) : IVectorStoreRecordMapper +#pragma warning restore CS0618 { private readonly VectorStoreRecordKeyPropertyModel _keyProperty = keyProperty; diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 05577cf8a7d6..65788581b57d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -48,7 +48,9 @@ public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCol private readonly MongoDBVectorStoreRecordCollectionOptions _options; /// Interface for mapping between a storage model, and the consumer record data model. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -597,6 +599,7 @@ private async Task RunOperationWithRetryAsync( throw new VectorStoreOperationException("Retry logic failed."); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete /// /// Returns custom mapper, generic data model mapper or default record mapper. /// @@ -614,6 +617,7 @@ private IVectorStoreRecordMapper InitializeMapper() return new MongoDBVectorStoreRecordMapper(this._model); } +#pragma warning restore CS0618 private static Array VerifyVectorParam(TVector vector) { diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs index bc591f87cdc0..64ead70bdd47 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using MongoDB.Bson; @@ -13,6 +14,7 @@ public sealed class MongoDBVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the MongoDB BSON object. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? BsonDocumentCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index d8df1e4b9f2a..32bcfe852b09 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -29,7 +29,9 @@ public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCo private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordModel _model; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 private IndexClient? _indexClient; /// @@ -54,7 +56,9 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.VectorCustomMapper ?? new PineconeVectorStoreRecordMapper(this._model); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs index feb147a75763..64c6b31b478a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Pinecone; @@ -13,6 +14,7 @@ public sealed class PineconeVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the Pinecone vector. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? VectorCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index 27e2017d3cf9..a1e8db6014a5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -11,7 +11,9 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Mapper between a Pinecone record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class PineconeVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper +#pragma warning restore CS0618 { /// public Vector MapFromDataToStorageModel(TRecord dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index eb7eeb43d20a..94393aab6221 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -35,7 +35,9 @@ public class PostgresVectorStoreRecordCollection : IVectorStoreRe private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between the data model and the Azure AI Search record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper> _mapper; +#pragma warning restore CS0618 /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -74,7 +76,9 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client this._model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions) .Build(typeof(TRecord), options?.VectorStoreRecordDefinition); +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.DictionaryCustomMapper ?? new PostgresVectorStoreRecordMapper(this._model); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs index 753713d21b3f..00a9a5624380 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; @@ -21,6 +22,7 @@ public sealed class PostgresVectorStoreRecordCollectionOptions /// /// If not set, the default mapper will be used. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper>? DictionaryCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index 06b5c88b2ab0..13b73ce87489 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -13,8 +13,10 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// A mapper class that handles the conversion between data models and storage models for Postgres vector store. /// /// The type of the data model record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class PostgresVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> +#pragma warning restore CS0618 { public Dictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index 6991ce782e58..e1f4ed9411f0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -98,7 +98,9 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF /// The name of the collection the operation is being run on. /// The type of database operation being run. /// The mapped . +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete public static VectorSearchResult MapScoredPointToVectorSearchResult(ScoredPoint point, IVectorStoreRecordMapper mapper, bool includeVectors, string databaseSystemName, string collectionName, string operationName) +#pragma warning restore CS0618 { // Since the mapper doesn't know about scored points, we need to convert the scored point to a point struct first. var pointStruct = new PointStruct diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index e2f2d2bf37b8..5ff856fa4daa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -54,7 +54,9 @@ public class QdrantVectorStoreRecordCollection : private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between qdrant point and consumer models. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// /// Initializes a new instance of the class. @@ -91,7 +93,9 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st this._model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(this._options.HasNamedVectors)) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.PointStructCustomMapper ?? new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs index bdb1a8658e59..1b1289c799b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using Qdrant.Client.Grpc; @@ -22,6 +23,7 @@ public sealed class QdrantVectorStoreRecordCollectionOptions /// /// If not set, a default mapper that uses json as an intermediary to allow automatic mapping to a wide variety of types will be used. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? PointStructCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index 2929b455fd13..da07c10b3481 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -13,8 +13,10 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// Mapper between a Qdrant record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class QdrantVectorStoreRecordMapper(VectorStoreRecordModel model, bool hasNamedVectors) : IVectorStoreRecordMapper +#pragma warning restore CS0618 { /// public PointStruct MapFromDataToStorageModel(TRecord dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs index 2ed9a25ce0d2..ef3c42722845 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs @@ -12,7 +12,9 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using hash sets. /// +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class RedisHashSetGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, HashEntry[] HashEntries)> +#pragma warning restore CS0618 { /// All the properties from the record definition. private readonly IReadOnlyList _properties; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 74d5d0930f2a..36916c550c05 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -82,7 +82,9 @@ public class RedisHashSetVectorStoreRecordCollection : IVectorStoreReco private readonly string[] _dataStoragePropertyNamesWithScore; /// The mapper to use when mapping between the consumer data model and the Redis record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// /// Initializes a new instance of the class. @@ -108,7 +110,9 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec this._dataStoragePropertyNamesWithScore = [.. this._model.DataProperties.Select(p => p.StorageName), "vector_score"]; // Assign Mapper. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.HashEntriesCustomMapper ?? new RedisHashSetVectorStoreRecordMapper(this._model); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs index 8d61c1fb74ea..121262f92c5c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using Microsoft.Extensions.VectorData; using StackExchange.Redis; @@ -23,6 +24,7 @@ public sealed class RedisHashSetVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the Redis record. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? HashEntriesCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs index 1223e025718b..520869d35c43 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs @@ -14,8 +14,10 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// Class for mapping between a hashset stored in redis, and the consumer data model. /// /// The consumer data model to map to or from. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class RedisHashSetVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper +#pragma warning restore CS0618 { /// public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs index ea0ac812674a..41a4efb63575 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs @@ -14,7 +14,9 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal sealed class RedisJsonGenericDataModelMapper( IReadOnlyList properties, JsonSerializerOptions jsonSerializerOptions) +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete : IVectorStoreRecordMapper, (string Key, JsonNode Node)> +#pragma warning restore CS0618 { /// public (string Key, JsonNode Node) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index b267cf0bef3b..ef6f48e33857 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -70,7 +70,9 @@ public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordC private readonly string[] _dataStoragePropertyNames; /// The mapper to use when mapping between the consumer data model and the Redis record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// The JSON serializer options to use when converting between the data model and the Redis record. private readonly JsonSerializerOptions _jsonSerializerOptions; @@ -99,6 +101,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Lookup storage property names. this._dataStoragePropertyNames = this._model.DataProperties.Select(p => p.StorageName).ToArray(); +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete // Assign Mapper. if (this._options.JsonNodeCustomMapper is not null) { @@ -117,6 +120,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Default Mapper. this._mapper = new RedisJsonVectorStoreRecordMapper(this._model.KeyProperty, this._jsonSerializerOptions); } +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs index d5f8696fc30d..24a3f342a755 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; @@ -27,6 +28,7 @@ public sealed class RedisJsonVectorStoreRecordCollectionOptions /// /// If not set, the default built in mapper will be used, which uses record attrigutes or the provided to map the record. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? JsonNodeCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs index 3efa31400415..b128be837bfb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs @@ -14,7 +14,9 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal sealed class RedisJsonVectorStoreRecordMapper( VectorStoreRecordKeyPropertyModel keyProperty, JsonSerializerOptions jsonSerializerOptions) +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete : IVectorStoreRecordMapper +#pragma warning restore CS0618 { /// The key property. private readonly string _keyPropertyStorageName = keyProperty.StorageName; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs index 1628948668b8..963d5f266128 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs @@ -7,7 +7,9 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class RecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> +#pragma warning restore CS0618 { public IDictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index e95ba5cb07d5..19be43247293 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -26,7 +26,9 @@ public sealed class SqlServerVectorStoreRecordCollection private readonly string _connectionString; private readonly SqlServerVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordModel _model; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper> _mapper; +#pragma warning restore CS0618 /// /// Initializes a new instance of the class. @@ -49,6 +51,7 @@ public SqlServerVectorStoreRecordCollection( this.CollectionName = name; // We need to create a copy, so any changes made to the option bag after // the ctor call do not affect this instance. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._options = options is null ? s_defaultOptions : new() @@ -59,6 +62,7 @@ public SqlServerVectorStoreRecordCollection( }; this._mapper = this._options.Mapper ?? new RecordMapper(this._model); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs index 6b21a5e35842..d8bfd40a2217 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; @@ -21,6 +22,7 @@ public sealed class SqlServerVectorStoreRecordCollectionOptions /// /// If not set, the default mapper will be used. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper>? Mapper { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 55ccffb60a9c..8f6e87c32242 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -34,7 +34,9 @@ public class SqliteVectorStoreRecordCollection : private readonly SqliteVectorStoreRecordCollectionOptions _options; /// The mapper to use when mapping between the consumer data model and the SQLite record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper> _mapper; +#pragma warning restore CS0618 /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -130,7 +132,9 @@ public SqliteVectorStoreRecordCollection( } } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.DictionaryCustomMapper ?? new SqliteVectorStoreRecordMapper(this._model); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs index 90c06511826b..a6cc642af863 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using Microsoft.Extensions.VectorData; @@ -13,6 +14,7 @@ public sealed class SqliteVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the SQLite record. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper>? DictionaryCustomMapper { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs index d39a5d343f93..8acd276cdb74 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs @@ -11,7 +11,9 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// Class for mapping between a dictionary and the consumer data model. /// /// The consumer data model to map to or from. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> +#pragma warning restore CS0618 { public Dictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs index aed7448293df..9b981631384a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs @@ -12,7 +12,9 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Weaviate. /// +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +#pragma warning restore CS0618 { /// The name of the Weaviate collection. private readonly string _collectionName; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index a740b1dfd414..aa3e934153ec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -51,7 +51,9 @@ public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCo private readonly VectorStoreRecordModel _model; /// The mapper to use when mapping between the consumer data model and the Weaviate record. +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private readonly IVectorStoreRecordMapper _mapper; +#pragma warning restore CS0618 /// Weaviate endpoint. private readonly Uri _endpoint; @@ -418,6 +420,7 @@ private async Task RunOperationAsync(string operationName, Func> o /// /// Returns custom mapper, generic data model mapper or default record mapper. /// +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private IVectorStoreRecordMapper InitializeMapper() { if (this._options.JsonObjectCustomMapper is not null) @@ -434,6 +437,7 @@ private IVectorStoreRecordMapper InitializeMapper() return new WeaviateVectorStoreRecordMapper(this.CollectionName, this._model, s_jsonSerializerOptions); } +#pragma warning restore CS0618 private static void VerifyVectorParam(TVector vector) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs index 9f812e489dcf..7dff47f28d0e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -14,6 +14,7 @@ public sealed class WeaviateVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and Weaviate record. /// + [Obsolete("Custom mappers are being obsoleted.")] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs index 37a762ece30c..93edfdaadd96 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -8,7 +8,9 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class WeaviateVectorStoreRecordMapper : IVectorStoreRecordMapper +#pragma warning restore CS0618 { private readonly string _collectionName; private readonly VectorStoreRecordModel _model; diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 5b5efb539d91..93ecc470d179 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -489,6 +489,7 @@ await this.TestUpsertWithModelAsync( expectedPropertyName: "bson_hotel_name"); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task UpsertWithCustomMapperWorksCorrectlyAsync() { @@ -564,6 +565,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() Assert.Equal(RecordKey, result.HotelId); Assert.Equal("Name from mapper", result.HotelName); } +#pragma warning restore CS0618 [Theory] [MemberData(nameof(VectorizedSearchVectorTypeData))] diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index e9b3a8e3cf17..f6aa343fbaef 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -17,6 +17,7 @@ public class PineconeVectorStoreRecordCollectionTests { private const string TestCollectionName = "testcollection"; +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete /// /// Tests that the collection can be created even if the definition and the type do not match. /// In this case, the expectation is that a custom mapper will be provided to map between the @@ -43,6 +44,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() TestCollectionName, new() { VectorStoreRecordDefinition = definition, VectorCustomMapper = Mock.Of>() }); } +#pragma warning restore CS0618 public sealed class SinglePropsModel { diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 06e97c33cc43..d917a20c505a 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -253,6 +253,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, bo Assert.Equal(testRecordKeys[1], actual[1].Key); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task CanGetRecordWithCustomMapperAsync() { @@ -298,6 +299,7 @@ public async Task CanGetRecordWithCustomMapperAsync() It.Is(x => x.IncludeVectors)), Times.Once); } +#pragma warning restore CS0618 [Theory] [InlineData(true, true)] @@ -479,6 +481,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, bool hasNa Times.Once); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task CanUpsertRecordWithCustomMapperAsync() { @@ -544,6 +547,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() TestCollectionName, new() { VectorStoreRecordDefinition = definition, PointStructCustomMapper = Mock.Of, PointStruct>>() }); } +#pragma warning restore CS0618 #pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 6dd99624bf42..a2996b31b2c0 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -236,6 +236,7 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) Assert.Equal(new float[] { 5, 6, 7, 8 }, actual[1].Vector!.Value.ToArray()); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task CanGetRecordWithCustomMapperAsync() { @@ -284,6 +285,7 @@ public async Task CanGetRecordWithCustomMapperAsync() It.Is(x => x.IncludeVectors)), Times.Once); } +#pragma warning restore CS0618 [Theory] [InlineData(true)] @@ -375,6 +377,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) Times.Once); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task CanUpsertRecordWithCustomMapperAsync() { @@ -414,6 +417,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() x => x.MapFromDataToStorageModel(It.Is(x => x == model)), Times.Once); } +#pragma warning restore CS0618 #pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] @@ -511,6 +515,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc } #pragma warning restore CS0618 // VectorSearchFilter is obsolete +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete /// /// Tests that the collection can be created even if the definition and the type do not match. /// In this case, the expectation is that a custom mapper will be provided to map between the @@ -536,6 +541,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() TestCollectionName, new() { VectorStoreRecordDefinition = definition, HashEntriesCustomMapper = Mock.Of>() }); } +#pragma warning restore CS0618 private RedisHashSetVectorStoreRecordCollection CreateRecordCollection(bool useDefinition) { diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 373b7f836c00..5c456296ff89 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -349,6 +349,7 @@ public async Task UpsertReturnsRecordKeysAsync() Assert.Equal("Test Name 2", jsonObject2["properties"]?["hotelName"]?.GetValue()); } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [Fact] public async Task UpsertWithCustomMapperWorksCorrectlyAsync() { @@ -427,6 +428,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() Assert.Equal(id, result.HotelId); Assert.Equal("Test Name from mapper", result.HotelName); } +#pragma warning restore CS0618 [Theory] [InlineData(true, "http://test-endpoint/schema", "Bearer fake-key")] diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs index 3bac47a89121..3af4f5315871 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; + namespace Microsoft.Extensions.VectorData; /// @@ -7,6 +9,7 @@ namespace Microsoft.Extensions.VectorData; /// /// The consumer record data model to map to or from. /// The storage model to map to or from. +[Obsolete("Custom mappers are being obsoleted.")] public interface IVectorStoreRecordMapper { /// diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs index ea48950a8a9a..4ac5b274c819 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs @@ -16,7 +16,9 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within MongoDB. /// [ExcludeFromCodeCoverage] +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class MongoDBGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, BsonDocument> +#pragma warning restore CS0618 { /// public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs index e2d52adfea61..78dd7c931419 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs @@ -13,7 +13,9 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; [ExcludeFromCodeCoverage] +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class MongoDBVectorStoreRecordMapper : IVectorStoreRecordMapper +#pragma warning restore CS0618 { /// A key property info of the data model. private readonly PropertyInfo? _keyClrProperty; diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index 0c2f56a59c3e..41ae4910bfb4 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -150,6 +150,7 @@ public async Task WrongModels() } } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete [ConditionalFact] public async Task CustomMapper() { @@ -201,6 +202,7 @@ public async Task CustomMapper() await collection.DeleteCollectionAsync(); } } +#pragma warning restore CS0618 [ConditionalFact] public async Task BatchCRUD() @@ -467,6 +469,7 @@ public sealed class FancyTestModel public ReadOnlyMemory Floats { get; set; } } +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete private sealed class TestModelMapper : IVectorStoreRecordMapper> { internal bool MapFromDataToStorageModel_WasCalled { get; set; } @@ -499,4 +502,5 @@ public TestModel MapFromStorageToDataModel(IDictionary storageM }; } } +#pragma warning restore CS0618 } From f703f696259b184f9a4b4befe81906757da9954d Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Fri, 4 Apr 2025 16:07:19 +0200 Subject: [PATCH 29/63] .Net MEVD: Make Top mandatory argument (#11376) fixes #10193 --- .../Caching/SemanticCachingWithFilters.cs | 2 +- ...extEmbeddingVectorStoreRecordCollection.cs | 10 ++++----- .../Memory/VectorStore_EmbeddingGeneration.cs | 2 +- ...Store_HybridSearch_Simple_AzureAISearch.cs | 6 +++--- .../Memory/VectorStore_Langchain_Interop.cs | 2 +- ...torStore_VectorSearch_MultiStore_Common.cs | 6 +++--- .../VectorStore_VectorSearch_MultiVector.cs | 5 ++--- .../Memory/VectorStore_VectorSearch_Paging.cs | 2 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 6 +++--- .../Memory/VolatileVectorStore_LoadData.cs | 4 ++-- .../Optimization/FrugalGPTWithFilters.cs | 2 +- .../PluginSelectionWithFilters.cs | 2 +- .../Concepts/Search/VectorStore_TextSearch.cs | 4 ++-- .../Step2_Vector_Search.cs | 7 ++----- .../Step5_Use_GenericDataModel.cs | 5 +---- .../Step6_Use_CustomMapper.cs | 5 +---- ...ISearchVectorStoreRecordCollectionTests.cs | 4 ++-- ...MongoDBVectorStoreRecordCollectionTests.cs | 11 +++++----- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 6 +++--- ...nMemoryVectorStoreRecordCollectionTests.cs | 7 ++++++- ...zureAISearchVectorStoreRecordCollection.cs | 21 +++++++++++-------- ...mosDBMongoDBVectorStoreRecordCollection.cs | 4 +++- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 9 +++++--- .../InMemoryVectorStoreRecordCollection.cs | 5 +++-- .../MongoDBVectorStoreRecordCollection.cs | 9 +++++--- .../PineconeVectorStoreRecordCollection.cs | 5 +++-- .../PostgresVectorStoreRecordCollection.cs | 7 ++++--- .../QdrantVectorStoreRecordCollection.cs | 10 +++++---- ...RedisHashSetVectorStoreRecordCollection.cs | 4 +++- .../RedisJsonVectorStoreRecordCollection.cs | 4 +++- ...RedisVectorStoreCollectionSearchMapping.cs | 5 +++-- .../SqlServerCommandBuilder.cs | 3 ++- .../SqlServerVectorStoreRecordCollection.cs | 4 +++- .../SqliteVectorStoreRecordCollection.cs | 5 +++-- .../WeaviateVectorStoreRecordCollection.cs | 7 ++++++- ...VectorStoreRecordCollectionQueryBuilder.cs | 6 ++++-- ...MongoDBVectorStoreRecordCollectionTests.cs | 11 +++++----- .../QdrantVectorStoreRecordCollectionTests.cs | 3 ++- ...HashSetVectorStoreRecordCollectionTests.cs | 2 +- ...disJsonVectorStoreRecordCollectionTests.cs | 2 +- ...VectorStoreCollectionSearchMappingTests.cs | 6 +++--- ...rStoreRecordCollectionQueryBuilderTests.cs | 10 ++++----- ...eaviateVectorStoreRecordCollectionTests.cs | 5 +++-- .../CompatibilitySuppressions.xml | 12 +++++------ .../VectorSearch/HybridSearchOptions.cs | 5 ----- .../VectorSearch/IKeywordHybridSearch.cs | 2 ++ .../VectorSearch/IVectorizableTextSearch.cs | 2 ++ .../VectorSearch/IVectorizedSearch.cs | 2 ++ .../VectorSearch/VectorSearchOptions.cs | 20 +----------------- ...ISearchVectorStoreRecordCollectionTests.cs | 3 +++ ...MongoDBVectorStoreRecordCollectionTests.cs | 7 +++---- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 8 +++---- .../BaseVectorStoreRecordCollectionTests.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 7 +++---- ...ostgresVectorStoreRecordCollectionTests.cs | 8 +++---- .../QdrantVectorStoreRecordCollectionTests.cs | 2 ++ ...HashSetVectorStoreRecordCollectionTests.cs | 6 ++++-- ...disJsonVectorStoreRecordCollectionTests.cs | 6 ++++-- .../SqliteVectorStoreRecordCollectionTests.cs | 7 +++---- ...eaviateVectorStoreRecordCollectionTests.cs | 11 ++++------ .../Data/BaseVectorStoreTextSearchTests.cs | 4 ++-- .../src/Diagnostics/Verify.cs | 8 +++++++ .../Search/MockVectorizableTextSearch.cs | 2 +- .../Data/TextSearch/VectorStoreTextSearch.cs | 5 ++--- .../Data/VectorStoreTextSearchTestBase.cs | 4 ++-- .../SqlServerVectorStoreTests.cs | 4 ++-- .../Filter/BasicFilterTests.cs | 8 +++---- ...rdVectorizedHybridSearchComplianceTests.cs | 14 ++++++------- .../Support/TestStore.cs | 2 +- ...orSearchDistanceFunctionComplianceTests.cs | 6 +++--- 70 files changed, 214 insertions(+), 198 deletions(-) diff --git a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs index 78c54df49434..ce57d908597c 100644 --- a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs +++ b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs @@ -199,7 +199,7 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func -/// Decorator for a that generates embeddings for records on upsert and when using . +/// Decorator for a that generates embeddings for records on upsert and when using . /// /// /// This class is part of the sample. @@ -120,16 +120,16 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E } /// - public Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { - return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, options, cancellationToken); + return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, top, options, cancellationToken); } /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var embeddingValue = await this._textEmbeddingGenerationService.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await this.VectorizedSearchAsync(embeddingValue, options, cancellationToken).ConfigureAwait(false); + return await this.VectorizedSearchAsync(embeddingValue, top, options, cancellationToken).ConfigureAwait(false); } /// diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs index b9796e3709b9..6c994109fa7a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs @@ -50,7 +50,7 @@ public async Task UseEmbeddingGenerationViaDecoratorAsync() // Search the collection using a vectorizable text search. var search = collection as IVectorizableTextSearch; var searchString = "What is an Application Programming Interface"; - var searchResult = await search!.VectorizableTextSearchAsync(searchString, new() { Top = 1 }); + var searchResult = await search!.VectorizableTextSearchAsync(searchString, top: 1); var resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs index 521b8f03434a..508c3c9b68c5 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs @@ -56,7 +56,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search. var searchString = "What is an Application Programming Interface"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Application", "Programming", "Interface"], new() { Top = 1 }); + var searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Application", "Programming", "Interface"], top: 1); var resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); @@ -66,7 +66,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], new() { Top = 1 }); + searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 1); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); @@ -76,7 +76,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], new() { Top = 3, Filter = g => g.Category == "External Definitions" }); + searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 3, new() { Filter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs index 5466e7fd30af..e48c3700beff 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs @@ -96,7 +96,7 @@ private async Task ReadDataFromCollectionAsync(IVectorStore vectorStore, string // Search the data set. var searchString = "I'm looking for an animal that is loyal and will make a great companion"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + var searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); var resultRecords = await searchResult.Results.ToListAsync(); this.Output.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index 10d7c05e7df1..64604da51a12 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -51,7 +51,7 @@ public async Task IngestDataAndSearchAsync(string collectionName, Func(string collectionName, Func(string collectionName, Func g.Category == "External Definitions" }); + searchResult = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); output.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs index 645a1040c115..44048302f051 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs @@ -55,9 +55,8 @@ public async Task VectorSearchWithMultiVectorRecordAsync() var searchString = "I am looking for a reasonably priced coffee maker"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); var searchResult = await collection.VectorizedSearchAsync( - searchVector, new() + searchVector, top: 1, new() { - Top = 1, VectorProperty = r => r.DescriptionEmbedding }); var resultRecords = await searchResult.Results.ToListAsync(); @@ -72,9 +71,9 @@ public async Task VectorSearchWithMultiVectorRecordAsync() searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); searchResult = await collection.VectorizedSearchAsync( searchVector, + top: 1, new() { - Top = 1, VectorProperty = r => r.FeatureListEmbedding }); resultRecords = await searchResult.Results.ToListAsync(); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs index c8b136f72542..8c825df3e59a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs @@ -49,9 +49,9 @@ public async Task VectorSearchWithPagingAsync() // Get the next page of results by asking for 10 results, and using 'Skip' to skip the results from the previous pages. var currentPageResults = await collection.VectorizedSearchAsync( searchVector, + top: 10, new() { - Top = 10, Skip = page * 10 }); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index 9a43c01aeb43..8ce21fec5656 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -50,7 +50,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. var searchString = "What is an Application Programming Interface"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + var searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); var resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); @@ -60,7 +60,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); @@ -70,7 +70,7 @@ public async Task ExampleAsync() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync(searchVector, new() { Top = 3, Filter = g => g.Category == "External Definitions" }); + searchResult = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }); resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); diff --git a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs index 9e70c987aed3..fd5134c245f1 100644 --- a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs +++ b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs @@ -71,7 +71,7 @@ static DataModel CreateRecord(string text, ReadOnlyMemory embedding) // Search the collection using a vector search. var searchString = "What is the Semantic Kernel?"; var searchVector = await embeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await vectorSearch!.VectorizedSearchAsync(searchVector, new() { Top = 1 }); + var searchResult = await vectorSearch!.VectorizedSearchAsync(searchVector, top: 1); var resultRecords = await searchResult.Results.ToListAsync(); Console.WriteLine("Search string: " + searchString); @@ -116,7 +116,7 @@ static DataModel CreateRecord(TextSearchResult searchResult, ReadOnlyMemory l.Record).ToList(); // Override arguments to use only top N examples, which will be sent to LLM. diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index cdaff88edeb9..278ceeccb25c 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -298,7 +298,7 @@ public async Task> GetBestFunctionsAsync( await collection.CreateCollectionIfNotExistsAsync(cancellationToken); // Find best functions to call for original request. - var searchResults = await collection.VectorizedSearchAsync(requestEmbedding, new() { Top = numberOfBestFunctions }, cancellationToken); + var searchResults = await collection.VectorizedSearchAsync(requestEmbedding, top: numberOfBestFunctions, cancellationToken: cancellationToken); var recordKeys = (await searchResults.Results.ToListAsync(cancellationToken)).Select(l => l.Record.Id); return plugins diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index f6a3d4ab6356..f5d39c702790 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -144,11 +144,11 @@ internal static async Task> CreateCo private sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, options, cancellationToken); + return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } } diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs index 2eda86863a60..80a0a44c2365 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs @@ -45,10 +45,7 @@ internal static async Task> SearchVectorStoreAsync( // Search the store and get the single most relevant result. var searchResult = await collection.VectorizedSearchAsync( searchVector, - new() - { - Top = 1 - }); + top: 1); var searchResultItems = await searchResult.Results.ToListAsync(); return searchResultItems.First(); } @@ -68,9 +65,9 @@ public async Task SearchAnInMemoryVectorStoreWithFilteringAsync() // Search the store with a filter and get the single most relevant result. var searchResult = await collection.VectorizedSearchAsync( searchVector, + top: 1, new() { - Top = 1, Filter = g => g.Category == "AI" }); var searchResultItems = await searchResult.Results.ToListAsync(); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs index 449daf1c19b1..ae045e539b3b 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs @@ -58,10 +58,7 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() // Search the generic data model collection and get the single most relevant result. var searchResult = await genericDataModelCollection.VectorizedSearchAsync( searchVector, - new() - { - Top = 1, - }); + top: 1); var searchResultItems = await searchResult.Results.ToListAsync(); // Write the search result with its score to the console. diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs index 383baeaae4fa..7c113164114c 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs @@ -80,10 +80,7 @@ await collection.UpsertAsync(new ComplexGlossary // Search the vector store. var searchResult = await collection.VectorizedSearchAsync( searchVector, - new() - { - Top = 1 - }); + top: 1); var searchResultItem = await searchResult.Results.FirstAsync(); // Write the search result with its score to the console. diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index 37545e0c67e0..e9be1d92180c 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -570,9 +570,9 @@ public async Task CanSearchWithVectorAndFilterAsync() // Act. var searchResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[4]), + top: 5, new() { - Top = 5, Skip = 3, OldFilter = filter, VectorProperty = record => record.Vector1 @@ -612,9 +612,9 @@ public async Task CanSearchWithTextAndFilterAsync() // Act. var searchResults = await sut.VectorizableTextSearchAsync( "search string", + top: 5, new() { - Top = 5, Skip = 3, OldFilter = filter, VectorProperty = record => record.Vector1 diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 17d4cbb1939e..b4930e4247d2 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -581,11 +581,11 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector)); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3)); } else { - var actual = await sut.VectorizedSearchAsync(vector); + var actual = await sut.VectorizedSearchAsync(vector, top: 3); Assert.NotNull(actual); } @@ -646,10 +646,9 @@ public async Task VectorizedSearchUsesValidQueryAsync( }; // Act - var actual = await sut.VectorizedSearchAsync(vector, new() + var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, - Top = actualTop, }); // Assert @@ -675,7 +674,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options)).Results.FirstOrDefaultAsync()); } [Fact] @@ -689,7 +688,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); // Assert var result = await actual.Results.FirstOrDefaultAsync(); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index af141aac095c..596e5fc6deda 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -581,7 +581,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); var results = await actual.Results.ToListAsync(); var result = results[0]; @@ -603,7 +603,7 @@ public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync( // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new List([1, 2, 3]))).Results.ToListAsync()); + await (await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3)).Results.ToListAsync()); } [Fact] @@ -618,7 +618,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), searchOptions)).Results.ToListAsync()); + await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, searchOptions)).Results.ToListAsync()); } public static TheoryData, string, bool> CollectionExistsData => new() diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index 8a84216757f7..34c5d1f569d9 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -293,6 +293,7 @@ public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKe // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + top: 3, new() { IncludeVectors = true }, this._testCancellationToken); @@ -338,6 +339,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + top: 3, new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, this._testCancellationToken); @@ -391,6 +393,7 @@ public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFu // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), + top: 3, new() { IncludeVectors = true }, this._testCancellationToken); @@ -432,7 +435,8 @@ public async Task CanSearchManyRecordsAsync(bool useDefinition) // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - new() { IncludeVectors = true, Top = 10, Skip = 10, IncludeTotalCount = true }, + top: 10, + new() { IncludeVectors = true, Skip = 10, IncludeTotalCount = true }, this._testCancellationToken); // Assert @@ -508,6 +512,7 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, // Act var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), + top: 3, new() { IncludeVectors = true, VectorProperty = r => r.Vectors["Vector"] }, this._testCancellationToken); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 54bcc1ed33d3..5d9b8ddc9269 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -287,9 +287,10 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; @@ -298,7 +299,7 @@ public virtual Task> VectorizedSearchAsync // Configure search settings. var vectorQueries = new List { - new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + new VectorizedQuery(floatVector) { KNearestNeighborsCount = top, Fields = { vectorProperty.StorageName } } }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete @@ -316,7 +317,7 @@ public virtual Task> VectorizedSearchAsync var searchOptions = new SearchOptions { VectorSearch = new(), - Size = internalOptions.Top, + Size = top, Skip = internalOptions.Skip, IncludeTotalCount = internalOptions.IncludeTotalCount, }; @@ -343,9 +344,10 @@ public virtual Task> VectorizedSearchAsync } /// - public virtual Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(searchText); + Verify.NotLessThan(top, 1); if (this._model.VectorProperties.Count == 0) { @@ -359,7 +361,7 @@ public virtual Task> VectorizableTextSearchAsync(st // Configure search settings. var vectorQueries = new List { - new VectorizableTextQuery(searchText) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + new VectorizableTextQuery(searchText) { KNearestNeighborsCount = top, Fields = { vectorProperty.StorageName } } }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete @@ -377,7 +379,7 @@ public virtual Task> VectorizableTextSearchAsync(st var searchOptions = new SearchOptions { VectorSearch = new(), - Size = internalOptions.Top, + Size = top, Skip = internalOptions.Skip, IncludeTotalCount = internalOptions.IncludeTotalCount, }; @@ -404,10 +406,11 @@ public virtual Task> VectorizableTextSearchAsync(st } /// - public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(keywords); var floatVector = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; @@ -417,7 +420,7 @@ public Task> HybridSearchAsync(TVector vec // Configure search settings. var vectorQueries = new List { - new VectorizedQuery(floatVector) { KNearestNeighborsCount = internalOptions.Top, Fields = { vectorProperty.StorageName } } + new VectorizedQuery(floatVector) { KNearestNeighborsCount = top, Fields = { vectorProperty.StorageName } } }; #pragma warning disable CS0618 // VectorSearchFilter is obsolete @@ -435,7 +438,7 @@ public Task> HybridSearchAsync(TVector vec var searchOptions = new SearchOptions { VectorSearch = new(), - Size = internalOptions.Top, + Size = top, Skip = internalOptions.Skip, Filter = filter, IncludeTotalCount = internalOptions.IncludeTotalCount, diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 99ea67d05b07..1bf8c8b9110b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -242,10 +242,12 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r /// public virtual async Task> VectorizedSearchAsync( TVector vector, + int top, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); Array vectorArray = vector switch { @@ -273,7 +275,7 @@ public virtual async Task> VectorizedSearchAsync IVectorStoreRecordCollect /// public virtual Task> VectorizedSearchAsync( TVector vector, + int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { @@ -307,6 +308,7 @@ public virtual Task> VectorizedSearchAsync const string ScorePropertyName = "SimilarityScore"; this.VerifyVectorType(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultVectorSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); @@ -321,7 +323,7 @@ public virtual Task> VectorizedSearchAsync ScorePropertyName, searchOptions.OldFilter, searchOptions.Filter, - searchOptions.Top, + top, searchOptions.Skip, searchOptions.IncludeVectors); #pragma warning restore CS0618 // Type or member is obsolete @@ -337,12 +339,13 @@ public virtual Task> VectorizedSearchAsync } /// - public Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; const string ScorePropertyName = "SimilarityScore"; this.VerifyVectorType(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); @@ -358,7 +361,7 @@ public Task> HybridSearchAsync(TVector vec ScorePropertyName, searchOptions.OldFilter, searchOptions.Filter, - searchOptions.Top, + top, searchOptions.Skip, searchOptions.IncludeVectors); #pragma warning restore CS0618 // Type or member is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index de7d1a83ad5e..a59501d662b1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -240,10 +240,11 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E /// #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) #pragma warning restore CS1998 { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); if (vector is not ReadOnlyMemory floatVector) { @@ -294,7 +295,7 @@ public async Task> VectorizedSearchAsync(T var sortedScoredResults = InMemoryVectorStoreCollectionSearchMapping.ShouldSortDescending(vectorProperty.DistanceFunction) ? nonNullResults.OrderByDescending(x => x.score) : nonNullResults.OrderBy(x => x.score); - var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(internalOptions.Top); + var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(top); // Build the response. var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 65788581b57d..48f629f8aa64 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -248,10 +248,12 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r /// public virtual async Task> VectorizedSearchAsync( TVector vector, + int top, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Array vectorArray = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultVectorSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); @@ -268,7 +270,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { Array vectorArray = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); @@ -322,7 +325,7 @@ public async Task> HybridSearchAsync(TVect // Constructing a query to fetch "skip + top" total items // to perform skip logic locally, since skip option is not part of API. - var itemsAmount = searchOptions.Skip + searchOptions.Top; + var itemsAmount = searchOptions.Skip + top; var numCandidates = this._options.NumCandidates ?? itemsAmount * MongoDBConstants.DefaultNumCandidatesRatio; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 32bcfe852b09..43c744d1d47d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -306,9 +306,10 @@ await this.RunIndexOperationAsync( } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); if (vector is not ReadOnlyMemory floatVector) { @@ -330,7 +331,7 @@ public virtual async Task> VectorizedSearchAsync keys, CancellationToken cancel } /// - public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; Verify.NotNull(vector); + Verify.NotLessThan(top, 1); var vectorType = vector.GetType(); @@ -260,7 +261,7 @@ public virtual Task> VectorizedSearchAsync // Simulating skip/offset logic locally, since OFFSET can work only with LIMIT in combination // and LIMIT is not supported in vector search extension, instead of LIMIT - "k" parameter is used. - var limit = searchOptions.Top + searchOptions.Skip; + var limit = top + searchOptions.Skip; return this.RunOperationAsync(OperationName, () => { @@ -269,7 +270,7 @@ public virtual Task> VectorizedSearchAsync this._model, vectorProperty, pgVector, - searchOptions.Top, + top, #pragma warning disable CS0618 // VectorSearchFilter is obsolete searchOptions.OldFilter, #pragma warning restore CS0618 // VectorSearchFilter is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 5ff856fa4daa..df33b1e8e54a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -445,9 +445,10 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); // Resolve options. var internalOptions = options ?? s_defaultVectorSearchOptions; @@ -481,7 +482,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); // Resolve options. var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; @@ -565,7 +567,7 @@ public async Task> HybridSearchAsync(TVect this.CollectionName, prefetch: new List() { vectorQuery, keywordQuery }, query: fusionQuery, - limit: (ulong)internalOptions.Top, + limit: (ulong)top, offset: (ulong)internalOptions.Skip, vectorsSelector: vectorsSelector, cancellationToken: cancellationToken)).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 36916c550c05..656f490c1640 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -317,9 +317,10 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); var internalOptions = options ?? s_defaultVectorSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); @@ -329,6 +330,7 @@ public virtual async Task> VectorizedSearchAsync - public virtual async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); var internalOptions = options ?? s_defaultVectorSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); @@ -399,6 +400,7 @@ public virtual async Task> VectorizedSearchAsync(TVector vector, st /// Build a Redis object from the given vector and options. /// /// The vector to search the database with as a byte array. + /// The maximum number of elements to return. /// The options to configure the behavior of the search. /// The model. /// The vector property. /// The set of fields to limit the results to. Null for all. /// The . - public static Query BuildQuery(byte[] vectorBytes, VectorSearchOptions options, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, string[]? selectFields) + public static Query BuildQuery(byte[] vectorBytes, int top, VectorSearchOptions options, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, string[]? selectFields) { // Build search query. - var redisLimit = options.Top + options.Skip; + var redisLimit = top + options.Skip; #pragma warning disable CS0618 // Type or member is obsolete var filter = options switch diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index 2894828fec29..73b7a4b172de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -330,6 +330,7 @@ internal static SqlCommand SelectVector( SqlConnection connection, string? schema, string tableName, VectorStoreRecordVectorPropertyModel vectorProperty, VectorStoreRecordModel model, + int top, VectorSearchOptions options, ReadOnlyMemory vector) { @@ -367,7 +368,7 @@ internal static SqlCommand SelectVector( sb.AppendLine(); // Negative Skip and Top values are rejected by the VectorSearchOptions property setters. // 0 is a legal value for OFFSET. - sb.AppendFormat("OFFSET {0} ROWS FETCH NEXT {1} ROWS ONLY;", options.Skip, options.Top); + sb.AppendFormat("OFFSET {0} ROWS FETCH NEXT {1} ROWS ONLY;", options.Skip, top); command.CommandText = sb.ToString(); return command; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 19be43247293..991020d0647d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -400,9 +400,10 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); + Verify.NotLessThan(top, 1); if (vector is not ReadOnlyMemory allowed) { @@ -431,6 +432,7 @@ public async Task> VectorizedSearchAsync(T this.CollectionName, vectorProperty, this._model, + top, searchOptions, allowed); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 8f6e87c32242..25cd0c07da7a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -184,11 +184,12 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo } /// - public virtual Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string LimitPropertyName = "k"; Verify.NotNull(vector); + Verify.NotLessThan(top, 1); var vectorType = vector.GetType(); if (!SqliteConstants.SupportedVectorTypes.Contains(vectorType)) @@ -205,7 +206,7 @@ public virtual Task> VectorizedSearchAsync // Simulating skip/offset logic locally, since OFFSET can work only with LIMIT in combination // and LIMIT is not supported in vector search extension, instead of LIMIT - "k" parameter is used. - var limit = searchOptions.Top + searchOptions.Skip; + var limit = top + searchOptions.Skip; var conditions = new List() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index aa3e934153ec..f50c25d8d573 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -275,12 +275,14 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec /// public virtual async Task> VectorizedSearchAsync( TVector vector, + int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorSearch"; VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultVectorSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); @@ -290,6 +292,7 @@ public virtual async Task> VectorizedSearchAsync> VectorizedSearchAsync - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "HybridSearch"; VerifyVectorParam(vector); + Verify.NotLessThan(top, 1); var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); @@ -309,6 +313,7 @@ public async Task> HybridSearchAsync(TVect var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildHybridSearchQuery( vector, + top, string.Join(" ", keywords), this.CollectionName, this._model, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 711cf9f3bc88..3d71dd7255eb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -24,6 +24,7 @@ public static string BuildSearchQuery( string collectionName, string vectorPropertyName, JsonSerializerOptions jsonSerializerOptions, + int top, VectorSearchOptions searchOptions, VectorStoreRecordModel model) { @@ -47,7 +48,7 @@ public static string BuildSearchQuery( { Get { {{collectionName}} ( - limit: {{searchOptions.Top}} + limit: {{top}} offset: {{searchOptions.Skip}} {{(filter is null ? "" : "where: " + filter)}} nearVector: { @@ -73,6 +74,7 @@ public static string BuildSearchQuery( /// public static string BuildHybridSearchQuery( TVector vector, + int top, string keywords, string collectionName, VectorStoreRecordModel model, @@ -101,7 +103,7 @@ public static string BuildHybridSearchQuery( { Get { {{collectionName}} ( - limit: {{searchOptions.Top}} + limit: {{top}} offset: {{searchOptions.Skip}} {{(filter is null ? "" : "where: " + filter)}} hybrid: { diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 93ecc470d179..93728bd17067 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -581,11 +581,11 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector)); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3)); } else { - var actual = await sut.VectorizedSearchAsync(vector); + var actual = await sut.VectorizedSearchAsync(vector, top: 3); Assert.NotNull(actual); } @@ -642,10 +642,9 @@ public async Task VectorizedSearchUsesValidQueryAsync( }; // Act - var actual = await sut.VectorizedSearchAsync(vector, new() + var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, - Top = actualTop, }); // Assert @@ -671,7 +670,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), options)).Results.FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options)).Results.FirstOrDefaultAsync()); } [Fact] @@ -685,7 +684,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); // Assert var result = await actual.Results.FirstOrDefaultAsync(); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index d917a20c505a..c8c8a0f79430 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -565,7 +565,8 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bo // Act. var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), - new() { IncludeVectors = true, OldFilter = filter, Top = 5, Skip = 2 }, + top: 5, + new() { IncludeVectors = true, OldFilter = filter, Skip = 2 }, this._testCancellationToken); // Assert. diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index a2996b31b2c0..8b4324a6a98d 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -452,11 +452,11 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc // Act. var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), + top: 5, new() { IncludeVectors = includeVectors, OldFilter = filter, - Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 5ef5bb1d319b..5a58000d3b48 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -468,12 +468,12 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) // Act. var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), + top: 5, new() { IncludeVectors = true, OldFilter = filter, VectorProperty = r => r.Vector1, - Top = 5, Skip = 2 }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index e93c182dbb81..eb7be9f4e1fb 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -73,7 +73,7 @@ public void BuildQueryBuildsRedisQueryWithDefaults() ]); // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, new VectorSearchOptions(), model, model.VectorProperty, null); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, top: 3, new VectorSearchOptions(), model, model.VectorProperty, null); // Assert. Assert.NotNull(query); @@ -89,7 +89,7 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() // Arrange. var floatVector = new ReadOnlyMemory(new float[] { 1.0f, 2.0f, 3.0f }); var byteArray = MemoryMarshal.AsBytes(floatVector.Span).ToArray(); - var vectorSearchOptions = new VectorSearchOptions { Top = 5, Skip = 3 }; + var vectorSearchOptions = new VectorSearchOptions { Skip = 3 }; var model = BuildModel( [ new VectorStoreRecordKeyProperty("Key", typeof(string)), @@ -98,7 +98,7 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() var selectFields = new string[] { "storage_Field1", "storage_Field2" }; // Act. - var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, vectorSearchOptions, model, model.VectorProperty, selectFields); + var query = RedisVectorStoreCollectionSearchMapping.BuildQuery(byteArray, top: 5, vectorSearchOptions, model, model.VectorProperty, selectFields); // Assert. Assert.NotNull(query); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 2c971daed669..27cf9dc4d82c 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -78,7 +78,6 @@ HotelName HotelCode Tags var searchOptions = new VectorSearchOptions { Skip = 2, - Top = 3, }; // Act @@ -87,6 +86,7 @@ HotelName HotelCode Tags CollectionName, VectorPropertyName, s_jsonSerializerOptions, + top: 3, searchOptions, this._model); @@ -104,7 +104,6 @@ public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() var searchOptions = new VectorSearchOptions { Skip = 2, - Top = 3, IncludeVectors = true }; @@ -114,6 +113,7 @@ public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() CollectionName, VectorPropertyName, s_jsonSerializerOptions, + top: 3, searchOptions, this._model); @@ -131,7 +131,6 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() var searchOptions = new VectorSearchOptions { Skip = 2, - Top = 3, OldFilter = new VectorSearchFilter() .EqualTo("HotelName", "Test Name") .AnyTagEqualTo("Tags", "t1") @@ -143,6 +142,7 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() CollectionName, VectorPropertyName, s_jsonSerializerOptions, + top: 3, searchOptions, this._model); @@ -158,7 +158,6 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() var searchOptions = new VectorSearchOptions { Skip = 2, - Top = 3, OldFilter = new VectorSearchFilter().EqualTo("HotelName", new TestFilterValue()) }; @@ -168,6 +167,7 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() CollectionName, VectorPropertyName, s_jsonSerializerOptions, + top: 3, searchOptions, this._model)); } @@ -179,7 +179,6 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() var searchOptions = new VectorSearchOptions { Skip = 2, - Top = 3, OldFilter = new VectorSearchFilter().EqualTo("NonExistentProperty", "value") }; @@ -189,6 +188,7 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() CollectionName, VectorPropertyName, s_jsonSerializerOptions, + top: 3, searchOptions, this._model)); } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 5c456296ff89..0016cde2b950 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -505,7 +505,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act - var actual = await sut.VectorizedSearchAsync(vector, new() + var actual = await sut.VectorizedSearchAsync(vector, top: 3, new() { IncludeVectors = includeVectors }); @@ -547,7 +547,7 @@ public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync( // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new List([1, 2, 3]))).Results.ToListAsync()); + await (await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3)).Results.ToListAsync()); } [Fact] @@ -560,6 +560,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync( new ReadOnlyMemory([1f, 2f, 3f]), + top: 3, new() { VectorProperty = r => "non-existent-property" })) .Results.ToListAsync()); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index 332b2c435565..d4b4c39149eb 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -213,14 +213,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -262,14 +262,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -311,14 +311,14 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs index 0711cd0aba43..34d5b03d7f78 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs @@ -41,11 +41,6 @@ public class HybridSearchOptions /// public Expression>? AdditionalProperty { get; init; } - /// - /// Gets or sets the maximum number of results to return. - /// - public int Top { get; init; } = 3; - /// /// Gets or sets the number of results to skip before returning results, i.e. the index of the first result to return. /// diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs index 53d2e062fcda..7e089fae04c1 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs @@ -18,12 +18,14 @@ public interface IKeywordHybridSearch /// The type of the vector. /// The vector to search the store with. /// A collection of keywords to search the store with. + /// The maximum number of results to return. /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the hybrid search, including their result scores. Task> HybridSearchAsync( TVector vector, ICollection keywords, + int top, HybridSearchOptions? options = default, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs index 5368c5301828..f300bed658c3 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs @@ -15,11 +15,13 @@ public interface IVectorizableTextSearch /// Searches the vector store for records that match the given text and filter. The text string will be vectorized downstream and used for the vector search. /// /// The text to search the store with. + /// The maximum number of results to return. /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the vector search, including their result scores. Task> VectorizableTextSearchAsync( string searchText, + int top, VectorSearchOptions? options = default, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs index b2a5a54194a6..b7a5de5c19d2 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs @@ -16,11 +16,13 @@ public interface IVectorizedSearch /// /// The type of the vector. /// The vector to search the store with. + /// The maximum number of results to return. /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the vector search, including their result scores. Task> VectorizedSearchAsync( TVector vector, + int top, VectorSearchOptions? options = default, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 7f6cc16f5dfa..221c5c790606 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.Extensions.VectorData; /// public class VectorSearchOptions { - private int _top = 3, _skip = 0; + private int _skip = 0; /// /// Gets or sets a search filter to use before doing the vector search. @@ -44,24 +44,6 @@ public class VectorSearchOptions /// public Expression>? VectorProperty { get; init; } - /// - /// Gets or sets the maximum number of results to return. - /// - /// Thrown when the value is less than 1. - public int Top - { - get => this._top; - init - { - if (value < 1) - { - throw new ArgumentOutOfRangeException(nameof(value), "Top must be greater than or equal to 1."); - } - - this._top = value; - } - } - /// /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return. /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 115740f99576..552bc508e4af 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -65,6 +65,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var actual = await sut.VectorizedSearchAsync( embedding, + top: 3, new() { IncludeVectors = true, @@ -347,6 +348,7 @@ public async Task ItCanSearchWithVectorAndFiltersAsync(string option, bool inclu var filter = option == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "Hotel 3") : new VectorSearchFilter().AnyTagEqualTo("Tags", "bar"); var actual = await sut.VectorizedSearchAsync( await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), + top: 3, new() { IncludeVectors = includeVectors, @@ -387,6 +389,7 @@ public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() var filter = new VectorSearchFilter().EqualTo("HotelName", "Hotel 3"); var actual = await sut.VectorizableTextSearchAsync( "A hotel with great views.", + top: 3, new() { VectorProperty = r => r.DescriptionEmbedding, diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 73f1799a5706..7466797a78d6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -343,7 +343,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); // Assert var searchResults = await actual.Results.ToListAsync(); @@ -374,9 +374,8 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { - Top = 2, Skip = 2 }); @@ -407,7 +406,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(AzureCosmosDBMongoDBHotel.HotelName), "My Hotel key2") }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 4bff2e354b1b..c66afc788e59 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -276,7 +276,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); // Assert var searchResults = await actual.Results.ToListAsync(); @@ -307,9 +307,8 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { - Top = 2, Skip = 2 }); @@ -341,10 +340,9 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() { OldFilter = filter, - Top = 4, }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index 2305590062e4..1b406a5bc61e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -97,7 +97,7 @@ public async Task VectorSearchShouldReturnExpectedScoresAsync(string distanceFun await Task.Delay(this.DelayAfterUploadInMilliseconds); // Act - var searchResult = await sut.VectorizedSearchAsync(baseVector); + var searchResult = await sut.VectorizedSearchAsync(baseVector, top: 3); // Assert var results = await searchResult.Results.ToListAsync(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 83732c19c620..8dc1698f1efd 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -344,7 +344,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f])); + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); // Assert var searchResults = await actual.Results.ToListAsync(); @@ -375,9 +375,8 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { - Top = 2, Skip = 2 }); @@ -408,7 +407,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(MongoDBHotel.HotelName), "My Hotel key2") }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index f322339faaed..6077f6605be0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -367,7 +367,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), top: 3, new() { IncludeVectors = includeVectors }); @@ -405,10 +405,9 @@ public async Task VectorizedSearchWithEqualToFilterReturnsValidResultsAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() { IncludeVectors = false, - Top = 5, OldFilter = new([ new EqualToFilterClause("HotelRating", 2.5f) ]) @@ -438,10 +437,9 @@ public async Task VectorizedSearchWithAnyTagFilterReturnsValidResultsAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() { IncludeVectors = false, - Top = 5, OldFilter = new([ new AnyTagEqualToFilterClause("Tags", "tag2") ]) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 727cb3185257..6b98bf10eb03 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -68,6 +68,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); // Assert @@ -400,6 +401,7 @@ public async Task ItCanSearchWithFilterAsync(bool useRecordDefinition, string co var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "My Hotel 13").EqualTo("LastRenovationDate", new DateTimeOffset(2020, 02, 01, 0, 0, 0, TimeSpan.Zero)) : new VectorSearchFilter().AnyTagEqualTo("Tags", "t13.2"); var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { OldFilter = filter diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index af0a2382c7dd..2beb8c7f92ba 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -67,6 +67,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var actual = await sut .VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), + top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); // Assert @@ -318,6 +319,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType, // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { IncludeVectors = includeVectors, @@ -362,9 +364,9 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { - Top = 3, Skip = 2 }); @@ -392,10 +394,10 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 1, new() { IncludeVectors = includeVectors, - Top = 1 }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 611c74593d18..0939866d218c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -66,6 +66,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var getResult = await sut.GetAsync("Upsert-10", new GetRecordOptions { IncludeVectors = true }); var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), + top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); // Assert @@ -348,6 +349,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType) // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { IncludeVectors = true, OldFilter = filter }); // Assert @@ -386,9 +388,9 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 3, new() { - Top = 3, Skip = 2 }); @@ -416,10 +418,10 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) // Act var actual = await sut.VectorizedSearchAsync( vector, + top: 1, new() { IncludeVectors = includeVectors, - Top = 1 }); // Assert diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index d910cec21cf5..688c2967fd93 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -353,7 +353,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { IncludeVectors = includeVectors }); @@ -390,9 +390,8 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { - Top = 2, Skip = 2 }); @@ -424,7 +423,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(SqliteHotel.HotelName), "My Hotel key2") }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index d384089d0ad7..4e10d17566f6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -226,7 +226,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { IncludeVectors = includeVectors }); @@ -264,9 +264,8 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { - Top = 2, Skip = 2 }); @@ -298,10 +297,9 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() { OldFilter = filter, - Top = 4, }); // Assert @@ -343,10 +341,9 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT await sut.UpsertAsync([hotel4, hotel2, hotel5, hotel3, hotel1]).ToListAsync(); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), new() + var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), top: 4, new() { OldFilter = filter, - Top = 4, }); // Assert diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 143c61f69e5f..4831e1720c9c 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -102,11 +102,11 @@ public Task>> GenerateEmbeddingsAsync(IList protected sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, options, cancellationToken); + return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs index 73a2eefc0fc0..688bc7d76541 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs @@ -175,4 +175,12 @@ internal static void ValidHostnameSegment(string hostNameSegment, [CallerArgumen throw new ArgumentException($"The location '{hostNameSegment}' is not valid. Location must start and end with alphanumeric characters and can contain hyphens and underscores.", paramName); } } + + internal static void NotLessThan(int value, int limit, [CallerArgumentExpression(nameof(value))] string? paramName = null) + { + if (value < limit) + { + throw new ArgumentOutOfRangeException(paramName, $"{paramName} must be greater than or equal to {limit}."); + } + } } diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs index b39976adbebf..b6f824a16b0f 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs @@ -13,7 +13,7 @@ public MockVectorizableTextSearch(IEnumerable> searc this._searchResults = ToAsyncEnumerable(searchResults); } - public Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { return Task.FromResult(new VectorSearchResults(this._searchResults)); } diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs index 68ae09c883d5..a0103059d72c 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs @@ -203,17 +203,16 @@ private async Task> ExecuteVectorSearchAsync(string OldFilter = searchOptions.Filter?.FilterClauses is not null ? new VectorSearchFilter(searchOptions.Filter.FilterClauses) : null, #pragma warning restore CS0618 // VectorSearchFilter is obsolete Skip = searchOptions.Skip, - Top = searchOptions.Top, }; if (this._vectorizedSearch is not null) { var vectorizedQuery = await this._textEmbeddingGeneration!.GenerateEmbeddingAsync(query, cancellationToken: cancellationToken).ConfigureAwait(false); - return await this._vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, vectorSearchOptions, cancellationToken).ConfigureAwait(false); + return await this._vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false); } - return await this._vectorizableTextSearch!.VectorizableTextSearchAsync(query, vectorSearchOptions, cancellationToken).ConfigureAwait(false); + return await this._vectorizableTextSearch!.VectorizableTextSearchAsync(query, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false); } /// diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index c01fe06eddf4..968ff4fdac7f 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -126,10 +126,10 @@ public Task>> GenerateEmbeddingsAsync(IList public sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, options, cancellationToken); + return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index 41ae4910bfb4..d6b987571a69 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -82,14 +82,14 @@ public async Task RecordCRUD() received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); AssertEquality(updated, received); - VectorSearchResult vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() + VectorSearchResult vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = true })).Results.SingleAsync(); AssertEquality(updated, vectorSearchResult.Record); - vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, new() + vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = false diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index dd03c1b1bda7..18c28e089c48 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -245,10 +245,10 @@ protected virtual async Task TestFilterAsync( var results = await fixture.Collection.VectorizedSearchAsync( new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, new() { - Filter = filter, - Top = fixture.TestData.Count + Filter = filter }); var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); @@ -280,10 +280,10 @@ protected virtual async Task TestLegacyFilterAsync( var results = await fixture.Collection.VectorizedSearchAsync( new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, new() { - OldFilter = legacyFilter, - Top = fixture.TestData.Count + OldFilter = legacyFilter }); var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs index c25bb065ba74..38e4bdf85c35 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs @@ -29,7 +29,7 @@ public async Task SearchShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the third contains Grapes, so searching for // Grapes should return the third record first. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Grapes"]); + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Grapes"], top: 3); // Assert var results = await searchResult.Results.ToListAsync(); @@ -55,7 +55,7 @@ public async Task SearchWithFilterShouldReturnExpectedResultsAsync() OldFilter = new VectorSearchFilter().EqualTo("Code", 1) }; #pragma warning restore CS0618 // Type or member is obsolete - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], options); + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, options); // Assert var results = await searchResult.Results.ToListAsync(); @@ -75,7 +75,7 @@ public async Task SearchWithTopShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the second contains Oranges, so the // second should be returned first. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { Top = 1 }); + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 1); // Assert var results = await searchResult.Results.ToListAsync(); @@ -95,7 +95,7 @@ public async Task SearchWithSkipShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the first and third contain healthy, // so when skipping the first two results, we should get the second record. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["healthy"], new() { Skip = 2 }); + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["healthy"], top: 3, new() { Skip = 2 }); // Assert var results = await searchResult.Results.ToListAsync(); @@ -113,7 +113,7 @@ public async Task SearchWithMultipleKeywordsShouldRankMatchedKeywordsHigherAsync var vector = new ReadOnlyMemory([1, 0, 0, 0]); // Act - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["tangy", "nourishing"]); + var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["tangy", "nourishing"], top: 3); // Assert var results = await searchResult.Results.ToListAsync(); @@ -133,8 +133,8 @@ public async Task SearchWithMultiTextRecordSearchesRequestedFieldAsync() var vector = new ReadOnlyMemory([1, 0, 0, 0]); // Act - var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], new() { AdditionalProperty = r => r.Text2 }); - var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], new() { AdditionalProperty = r => r.Text2 }); + var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], top: 3, new() { AdditionalProperty = r => r.Text2 }); + var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, new() { AdditionalProperty = r => r.Text2 }); // Assert var results1 = await searchResult1.Results.ToListAsync(); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index cd92e070abf8..20ce25343299 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -86,9 +86,9 @@ public virtual async Task WaitForDataAsync( { var results = await collection.VectorizedSearchAsync( new ReadOnlyMemory(vector), + top: recordCount, new() { - Top = recordCount, // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, // so filtered searches show empty results. Add a filter to the seed data check below. Filter = filter diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index ee23bb92dc9c..e811372d0735 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -103,11 +103,11 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM { await collection.UpsertAsync(insertedRecords).ToArrayAsync(); - var searchResult = await collection.VectorizedSearchAsync(baseVector); + var searchResult = await collection.VectorizedSearchAsync(baseVector, top: 3); var results = await searchResult.Results.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: false); - searchResult = await collection.VectorizedSearchAsync(baseVector, new() { IncludeVectors = true }); + searchResult = await collection.VectorizedSearchAsync(baseVector, top: 3, new() { IncludeVectors = true }); results = await searchResult.Results.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: true); @@ -116,10 +116,10 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM for (int top = Math.Max(1, skip); top <= insertedRecords.Count; top++) { searchResult = await collection.VectorizedSearchAsync(baseVector, + top: top, new() { Skip = skip, - Top = top, IncludeVectors = true }); results = await searchResult.Results.ToListAsync(); From 802a14dfa1945891006dc15a5d7aed34998ef45b Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Fri, 4 Apr 2025 16:45:55 +0100 Subject: [PATCH 30/63] .Net: Add skip validation for hybrid search options (#11380) ### Motivation and Context #11270 ### Description Add skip validation for hybrid search options Not adding for top since we have a parallel pr to remove top ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../VectorSearch/HybridSearchOptions.cs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs index 34d5b03d7f78..65a8a025207b 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs @@ -10,6 +10,8 @@ namespace Microsoft.Extensions.VectorData; /// public class HybridSearchOptions { + private int _skip = 0; + /// /// Gets or sets a search filter to use before doing the hybrid search. /// @@ -42,9 +44,22 @@ public class HybridSearchOptions public Expression>? AdditionalProperty { get; init; } /// - /// Gets or sets the number of results to skip before returning results, i.e. the index of the first result to return. + /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return. /// - public int Skip { get; init; } = 0; + /// Thrown when the value is less than 0. + public int Skip + { + get => this._skip; + init + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Skip must be greater than or equal to 0."); + } + + this._skip = value; + } + } /// /// Gets or sets a value indicating whether to include vectors in the retrieval result. From f77e266c4651f4f3c288d03acab148ff9e8e161d Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Mon, 7 Apr 2025 07:27:01 -0700 Subject: [PATCH 31/63] .Net: [MEVD] Added GetService and Metadata for vector store abstractions (#11055) ### Motivation and Context Related: https://github.com/microsoft/semantic-kernel/issues/11013 This PR marks `IVectorStoreRecordCollection.CollectionName` property as obsolete in favor of `Metadata` classes and `GetService` method (the approach which is also used in MEAI). `GetService` method is added to each vector store interface and each implementation is updated to return `Metadata` classes or underlying services used for breaking-glass scenarios. (**Note: this is a breaking change for already existing implementations, since new `GetService` method should be implemented**). The changes allow to: - Extend vector store related metadata by exposing not only `CollectionName`, but other properties like `VectorStoreName`, `DatabaseName` and any other potential properties in the future. These properties are going to be used in telemetry decorators, which will be implemented in separate PR. - Get access to underlying services used by vector store implementations for breaking-glass scenarios (when functionality from abstraction is not enough to handle a specific use case). @westey-m @roji ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- dotnet/samples/Concepts/Concepts.csproj | 2 +- .../TextEmbeddingVectorStore.cs | 10 + ...extEmbeddingVectorStoreRecordCollection.cs | 10 + .../Concepts/Search/VectorStore_TextSearch.cs | 10 + ...ISearchVectorStoreRecordCollectionTests.cs | 1 + .../AzureAISearchVectorStoreTests.cs | 1 + .../AzureAISearchConstants.cs | 2 + .../AzureAISearchVectorStore.cs | 30 ++- ...zureAISearchVectorStoreRecordCollection.cs | 38 ++- .../AzureCosmosDBMongoDBConstants.cs | 8 + .../AzureCosmosDBMongoDBVectorStore.cs | 22 ++ ...mosDBMongoDBVectorStoreRecordCollection.cs | 39 ++- .../AzureCosmosDBNoSQLConstants.cs | 2 + .../AzureCosmosDBNoSQLVectorStore.cs | 22 ++ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 32 ++- .../InMemoryConstants.cs | 8 + .../InMemoryVectorStore.cs | 26 ++ .../InMemoryVectorStoreRecordCollection.cs | 24 +- .../MongoDBVectorStore.cs | 22 ++ .../MongoDBVectorStoreRecordCollection.cs | 43 +++- .../PineconeConstants.cs | 8 + .../PineconeVectorStore.cs | 25 +- .../PineconeVectorStoreRecordCollection.cs | 40 +++- .../IPostgresVectorStoreDbClient.cs | 5 + .../PostgresConstants.cs | 4 +- .../PostgresVectorStore.cs | 31 ++- .../PostgresVectorStoreDbClient.cs | 4 + .../PostgresVectorStoreRecordCollection.cs | 37 ++- .../PostgresVectorStoreUtils.cs | 12 +- .../QdrantConstants.cs | 8 + .../QdrantVectorStore.cs | 24 +- .../QdrantVectorStoreRecordCollection.cs | 43 +++- .../Connectors.Memory.Redis/RedisConstants.cs | 8 + ...RedisHashSetVectorStoreRecordCollection.cs | 36 ++- .../RedisJsonVectorStoreRecordCollection.cs | 42 +++- .../RedisVectorStore.cs | 25 +- .../ExceptionWrapper.cs | 6 +- .../SqlServerConstants.cs | 2 + .../SqlServerVectorStore.cs | 24 ++ .../SqlServerVectorStoreRecordCollection.cs | 29 ++- .../SqliteConstants.cs | 2 + .../SqliteVectorStore.cs | 23 ++ .../SqliteVectorStoreRecordCollection.cs | 34 ++- .../WeaviateConstants.cs | 4 +- .../WeaviateVectorStore.cs | 23 +- .../WeaviateVectorStoreRecordCollection.cs | 32 ++- ...ostgresVectorStoreRecordCollectionTests.cs | 1 + .../PostgresVectorStoreTests.cs | 4 + ...HashSetVectorStoreRecordCollectionTests.cs | 1 + ...disJsonVectorStoreRecordCollectionTests.cs | 1 + .../RedisVectorStoreTests.cs | 1 + dotnet/src/Connectors/Directory.Build.props | 2 +- .../CompatibilitySuppressions.xml | 223 +++++++++++------- .../VectorData.Abstractions.csproj | 2 +- .../VectorSearch/IKeywordHybridSearch.cs | 15 ++ .../VectorSearch/IVectorizableTextSearch.cs | 15 ++ .../VectorSearch/IVectorizedSearch.cs | 15 ++ .../VectorStorage/IVectorStore.cs | 15 ++ .../VectorStorage/VectorStoreMetadata.cs | 23 ++ .../VectorStoreRecordCollectionMetadata.cs | 28 +++ .../Data/BaseVectorStoreTextSearchTests.cs | 10 + .../IntegrationTests/IntegrationTests.csproj | 2 +- .../Memory/MongoDB/MongoDBConstants.cs | 2 + .../Search/MockVectorizableTextSearch.cs | 10 + .../Data/VectorStoreTextSearchTestBase.cs | 10 + .../SemanticKernel.UnitTests.csproj | 2 +- .../CRUD/SqlServerBatchConformanceTests.cs | 6 +- .../SqlServerIntegrationTests.csproj | 2 +- .../Collections/CollectionConformanceTests.cs | 36 ++- 69 files changed, 1092 insertions(+), 217 deletions(-) create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantConstants.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Redis/RedisConstants.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreMetadata.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordCollectionMetadata.cs diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 0ac3dc6a4586..808841ad2d37 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,7 +8,7 @@ false true - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724,MEVD9000 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs index 6848b38af48f..4fd62592adf3 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs @@ -40,6 +40,16 @@ public IVectorStoreRecordCollection GetCollection( return embeddingStore; } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + this._decoratedVectorStore.GetService(serviceType, serviceKey); + } + /// public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index 9702e938b2d9..8e53b42711af 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -132,6 +132,16 @@ public async Task> VectorizableTextSearchAsync(stri return await this.VectorizedSearchAsync(embeddingValue, top, options, cancellationToken).ConfigureAwait(false); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + this._decoratedVectorStoreRecordCollection.GetService(serviceType, serviceKey); + } + /// /// Generate and add embeddings for each embedding field that has a on the provided record. /// diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index f5d39c702790..490a379c5cd8 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -150,6 +150,16 @@ public async Task> VectorizableTextSearchAsync(stri return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + vectorizedSearch.GetService(serviceType, serviceKey); + } } /// diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index e9be1d92180c..9cdc26661a0c 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -41,6 +41,7 @@ public AzureAISearchVectorStoreRecordCollectionTests() this._searchClientMock = new Mock(MockBehavior.Strict); this._searchIndexClientMock = new Mock(MockBehavior.Strict); this._searchIndexClientMock.Setup(x => x.GetSearchClient(TestCollectionName)).Returns(this._searchClientMock.Object); + this._searchIndexClientMock.Setup(x => x.ServiceName).Returns("TestService"); } [Theory] diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs index b79b048a5f38..f4eed15172fb 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs @@ -32,6 +32,7 @@ public AzureAISearchVectorStoreTests() this._searchClientMock = new Mock(MockBehavior.Strict); this._searchIndexClientMock = new Mock(MockBehavior.Strict); this._searchIndexClientMock.Setup(x => x.GetSearchClient(TestCollectionName)).Returns(this._searchClientMock.Object); + this._searchIndexClientMock.Setup(x => x.ServiceName).Returns("TestService"); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs index 75ceb3b8a632..0ae13cf923ab 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs @@ -8,6 +8,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; internal static class AzureAISearchConstants { + internal const string VectorStoreSystemName = "azure.aisearch"; + /// A set of types that a key on the provided model may have. private static readonly HashSet s_supportedKeyTypes = [typeof(string)]; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index 409c36d1e05a..b270d699b6ec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -19,8 +19,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// public class AzureAISearchVectorStore : IVectorStore { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "AzureAISearch"; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. private readonly SearchIndexClient _searchIndexClient; @@ -39,6 +39,12 @@ public AzureAISearchVectorStore(SearchIndexClient searchIndexClient, AzureAISear this._searchIndexClient = searchIndexClient; this._options = options ?? new AzureAISearchVectorStoreOptions(); + + this._metadata = new() + { + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = searchIndexClient.ServiceName + }; } /// @@ -83,6 +89,19 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(SearchIndexClient) ? this._searchIndexClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Helper method to get the next index name from the enumerator with a try catch around the move next call to convert /// any to , since try catch is not supported @@ -90,7 +109,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat /// /// The enumerator to get the next result from. /// A value indicating whether there are more results and the current string if true. - private static async Task<(string name, bool more)> GetNextIndexNameAsync(ConfiguredCancelableAsyncEnumerable.Enumerator enumerator) + private static async Task<(string name, bool more)> GetNextIndexNameAsync( + ConfiguredCancelableAsyncEnumerable.Enumerator enumerator) { const string OperationName = "GetIndexNames"; @@ -103,7 +123,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, OperationName = OperationName }; } @@ -111,7 +131,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, OperationName = OperationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 5d9b8ddc9269..daed4951624a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -30,8 +30,8 @@ public class AzureAISearchVectorStoreRecordCollection : IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "AzureAISearch"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -95,7 +95,13 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli { this._mapper = new AzureAISearchGenericDataModelMapper(this._model) as IVectorStoreRecordMapper; } -#pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = searchIndexClient.ServiceName, + CollectionName = collectionName + }; } /// @@ -117,7 +123,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = "GetIndex" }; @@ -462,6 +468,20 @@ public Task> HybridSearchAsync(TVector vec return this.SearchAndMapToDataModelAsync(keywordsCombined, searchOptions, internalOptions.IncludeVectors, cancellationToken); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(SearchIndexClient) ? this._searchIndexClient : + serviceType == typeof(SearchClient) ? this._searchClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Get the document with the given key and map it to the data model using the configured mapper type. /// @@ -491,7 +511,7 @@ public Task> HybridSearchAsync(TVector vec } return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureAISearchConstants.VectorStoreSystemName, this._collectionName, OperationName, () => this._mapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); @@ -554,7 +574,7 @@ private Task> MapToStorageModelAndUploadDocumentA if (this._mapper is not null) { var jsonObjects = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureAISearchConstants.VectorStoreSystemName!, this._collectionName, OperationName, () => records.Select(this._mapper!.MapFromDataToStorageModel)); @@ -582,7 +602,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn await foreach (var result in results.ConfigureAwait(false)) { var document = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureAISearchConstants.VectorStoreSystemName, this._collectionName, operationName, () => this._mapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); @@ -666,7 +686,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; @@ -675,7 +695,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs new file mode 100644 index 000000000000..b6a003c3e548 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +internal static class AzureCosmosDBMongoDBConstants +{ + public const string VectorStoreSystemName = "azure.cosmosdbmongodb"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 76dc9e8500a4..7a94c003a28c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -17,6 +17,9 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// public class AzureCosmosDBMongoDBVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// that can be used to manage the collections in Azure CosmosDB MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -34,6 +37,12 @@ public AzureCosmosDBMongoDBVectorStore(IMongoDatabase mongoDatabase, AzureCosmos this._mongoDatabase = mongoDatabase; this._options = options ?? new(); + + this._metadata = new() + { + VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName + }; } /// @@ -75,4 +84,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(IMongoDatabase) ? this._mongoDatabase : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 1bf8c8b9110b..2a48e74ecea3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -26,8 +26,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; public class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "AzureCosmosDBMongoDB"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; /// Property name to be used for search similarity score value. private const string ScorePropertyName = "similarityScore"; @@ -80,6 +80,13 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = this.InitializeMapper(); + + this._collectionMetadata = new() + { + VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName, + CollectionName = collectionName + }; } /// @@ -95,7 +102,7 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = DatabaseName, + VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }; @@ -160,7 +167,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); @@ -187,7 +194,7 @@ public virtual async IAsyncEnumerable GetAsync( if (record is not null) { yield return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); @@ -205,7 +212,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancel var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -310,6 +317,20 @@ public virtual async Task> VectorizedSearchAsync(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions, cancellationToken)); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(IMongoDatabase) ? this._mongoDatabase : + serviceType == typeof(IMongoCollection) ? this._mongoCollection : + serviceType.IsInstanceOfType(this) ? this : + null; + } + #region private private async Task CreateIndexesAsync(string collectionName, CancellationToken cancellationToken) @@ -383,7 +404,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc { var score = response[ScorePropertyName].AsDouble; var record = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new())); @@ -422,7 +443,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -439,7 +460,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs index 6dbb0d440b45..f667488e7fd9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs @@ -4,6 +4,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; internal static class AzureCosmosDBNoSQLConstants { + internal const string VectorStoreSystemName = "azure.cosmosdbnosql"; + /// /// Reserved key property name in Azure CosmosDB NoSQL. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index 39320e0a8ae2..0f1cc01c4b29 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -17,6 +17,9 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// public class AzureCosmosDBNoSQLVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// that can be used to manage the collections in Azure CosmosDB NoSQL. private readonly Database _database; @@ -34,6 +37,12 @@ public AzureCosmosDBNoSQLVectorStore(Database database, AzureCosmosDBNoSQLVector this._database = database; this._options = options ?? new(); + + this._metadata = new() + { + VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + VectorStoreName = database.Id + }; } /// @@ -84,4 +93,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(Database) ? this._database : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 9a490a032060..d1941dc52129 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -29,8 +29,8 @@ public class AzureCosmosDBNoSQLVectorStoreRecordCollection : IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "AzureCosmosDBNoSQL"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -105,6 +105,13 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( // If partition key is not provided, use key property as a partition key. this._partitionKeyProperty = this._model.KeyProperty; } + + this._collectionMetadata = new() + { + VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + VectorStoreName = database.Id, + CollectionName = collectionName + }; } /// @@ -378,6 +385,19 @@ public Task> HybridSearchAsync(TVector vec #endregion + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(Database) ? this._database : + serviceType.IsInstanceOfType(this) ? this : + null; + } + #region private private void VerifyVectorType(TVector? vector) @@ -404,7 +424,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -564,7 +584,7 @@ private async IAsyncEnumerable InternalGetAsync( await foreach (var jsonObject in this.GetItemsAsync(queryDefinition, cancellationToken).ConfigureAwait(false)) { yield return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); @@ -580,7 +600,7 @@ private async Task InternalUpsertAsync( const string OperationName = "UpsertItem"; var jsonObject = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -660,7 +680,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn jsonObject.Remove(scorePropertyName); var record = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryConstants.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryConstants.cs new file mode 100644 index 000000000000..af318ef12622 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryConstants.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.InMemory; + +internal static class InMemoryConstants +{ + internal const string VectorStoreSystemName = "inmemory"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs index 2db7013b0d27..3710cfea1dcd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs @@ -14,6 +14,9 @@ namespace Microsoft.SemanticKernel.Connectors.InMemory; /// public sealed class InMemoryVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// Internal storage for the record collection. private readonly ConcurrentDictionary> _internalCollection; @@ -26,6 +29,11 @@ public sealed class InMemoryVectorStore : IVectorStore public InMemoryVectorStore() { this._internalCollection = new(); + + this._metadata = new() + { + VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName, + }; } /// @@ -35,6 +43,11 @@ public InMemoryVectorStore() internal InMemoryVectorStore(ConcurrentDictionary> internalCollection) { this._internalCollection = internalCollection; + + this._metadata = new() + { + VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName + }; } /// @@ -59,4 +72,17 @@ public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cance { return this._internalCollection.Keys.ToAsyncEnumerable(); } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(ConcurrentDictionary>) ? this._internalCollection : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index a59501d662b1..0608ae865587 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -23,6 +23,9 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull { + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -103,6 +106,12 @@ public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVector return property.GetValueAsObject(record!); }; #pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + + this._collectionMetadata = new() + { + VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName, + CollectionName = collectionName + }; } /// @@ -144,7 +153,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) return Task.FromException(new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = "InMemory", + VectorStoreType = InMemoryConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }); @@ -302,6 +311,19 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResults(vectorSearchResultList) { TotalCount = count }; } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(ConcurrentDictionary>) ? this._internalCollections : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Get the collection dictionary from the internal storage, throws if it does not exist. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index 27169e3e9557..47f79724b382 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -17,6 +17,9 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// public class MongoDBVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// that can be used to manage the collections in MongoDB. private readonly IMongoDatabase _mongoDatabase; @@ -34,6 +37,12 @@ public MongoDBVectorStore(IMongoDatabase mongoDatabase, MongoDBVectorStoreOption this._mongoDatabase = mongoDatabase; this._options = options ?? new(); + + this._metadata = new() + { + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName + }; } /// @@ -75,4 +84,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(IMongoDatabase) ? this._mongoDatabase : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 48f629f8aa64..8652fb70ebfc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -23,8 +23,8 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "MongoDB"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; /// Property name to be used for search similarity score value. private const string ScorePropertyName = "similarityScore"; @@ -80,6 +80,13 @@ public MongoDBVectorStoreRecordCollection( this._options = options ?? new MongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = this.InitializeMapper(); + + this._collectionMetadata = new() + { + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName, + CollectionName = collectionName + }; } /// @@ -95,7 +102,7 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = DatabaseName, + VectorStoreType = MongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }; @@ -166,7 +173,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + MongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); @@ -193,7 +200,7 @@ public virtual async IAsyncEnumerable GetAsync( if (record is not null) { yield return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + MongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); @@ -211,7 +218,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancel var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + MongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -358,6 +365,20 @@ public async Task> HybridSearchAsync(TVect cancellationToken).ConfigureAwait(false); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(IMongoDatabase) ? this._mongoDatabase : + serviceType == typeof(IMongoCollection) ? this._mongoCollection : + serviceType.IsInstanceOfType(this) ? this : + null; + } + #region private private async Task CreateIndexesAsync(string collectionName, CancellationToken cancellationToken) @@ -468,7 +489,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc { var score = response[ScorePropertyName].AsDouble; var record = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + MongoDBConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new() { IncludeVectors = includeVectors })); @@ -507,7 +528,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = MongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -524,7 +545,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = MongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -555,7 +576,7 @@ private async Task RunOperationWithRetryAsync( { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = MongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -589,7 +610,7 @@ private async Task RunOperationWithRetryAsync( { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = MongoDBConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeConstants.cs new file mode 100644 index 000000000000..a8b4cfadfed2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeConstants.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Pinecone; + +internal static class PineconeConstants +{ + internal const string VectorStoreSystemName = "pinecone"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index a072ea6e7336..7e8da0194443 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -18,11 +18,12 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// public class PineconeVectorStore : IVectorStore { - private const string DatabaseName = "Pinecone"; - private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreOptions _options; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// /// Initializes a new instance of the class. /// @@ -34,6 +35,11 @@ public PineconeVectorStore(Sdk.PineconeClient pineconeClient, PineconeVectorStor this._pineconeClient = pineconeClient; this._options = options ?? new PineconeVectorStoreOptions(); + + this._metadata = new() + { + VectorStoreSystemName = PineconeConstants.VectorStoreSystemName + }; } /// @@ -71,7 +77,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = PineconeConstants.VectorStoreSystemName, OperationName = "ListCollections" }; } @@ -84,4 +90,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(Sdk.PineconeClient) ? this._pineconeClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 43c744d1d47d..201bf69cb3fe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -22,10 +22,11 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - private const string DatabaseName = "Pinecone"; - private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordModel _model; @@ -59,6 +60,12 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.VectorCustomMapper ?? new PineconeVectorStoreRecordMapper(this._model); #pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, + CollectionName = collectionName + }; } /// @@ -134,7 +141,7 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Call to vector store failed.", other) { - VectorStoreType = DatabaseName, + VectorStoreType = PineconeConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = "DeleteCollection" }; @@ -164,7 +171,7 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + PineconeConstants.VectorStoreSystemName, this.CollectionName, "Get", () => this._mapper.MapFromStorageToDataModel(result, mapperOptions)); @@ -200,7 +207,7 @@ public virtual async IAsyncEnumerable GetAsync( StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; var records = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + PineconeConstants.VectorStoreSystemName, this.CollectionName, "GetBatch", () => response.Vectors.Values.Select(x => this._mapper.MapFromStorageToDataModel(x, mapperOptions))); @@ -255,7 +262,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken Verify.NotNull(record); var vector = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + PineconeConstants.VectorStoreSystemName, this.CollectionName, "Upsert", () => this._mapper.MapFromDataToStorageModel(record)); @@ -279,7 +286,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r Verify.NotNull(records); var vectors = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + PineconeConstants.VectorStoreSystemName, this.CollectionName, "UpsertBatch", () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -354,7 +361,7 @@ public virtual async Task> VectorizedSearchAsync skippedResults.Select(x => new VectorSearchResult(this._mapper.MapFromStorageToDataModel(new Sdk.Vector() @@ -369,6 +376,19 @@ public virtual async Task> VectorizedSearchAsync + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(Sdk.PineconeClient) ? this._pineconeClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + private async Task RunIndexOperationAsync(string operationName, Func> operation) { try @@ -387,7 +407,7 @@ private async Task RunIndexOperationAsync(string operationName, Func RunCollectionOperationAsync(string operationName, Func< { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = PineconeConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index 20adedde23af..9b3ff9273182 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -22,6 +22,11 @@ internal interface IPostgresVectorStoreDbClient /// NpgsqlDataSource DataSource { get; } + /// + /// The name of the database. + /// + string? DatabaseName { get; } + /// /// Check if a table exists. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs index f7d490503b43..21e0f003d4eb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs @@ -9,8 +9,8 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; internal static class PostgresConstants { - /// The name of this database for telemetry purposes. - public const string DatabaseName = "Postgres"; + /// The name of this vector store for telemetry purposes. + public const string VectorStoreSystemName = "postgresql"; /// Validation options. public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index a1840384be84..933f61173ebe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -17,6 +17,9 @@ public class PostgresVectorStore : IVectorStore private readonly NpgsqlDataSource? _dataSource; private readonly PostgresVectorStoreOptions _options; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// /// Initializes a new instance of the class. /// @@ -27,6 +30,12 @@ public PostgresVectorStore(NpgsqlDataSource dataSource, PostgresVectorStoreOptio this._dataSource = dataSource; this._options = options ?? new PostgresVectorStoreOptions(); this._postgresClient = new PostgresVectorStoreDbClient(this._dataSource, this._options.Schema); + + this._metadata = new() + { + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = this._postgresClient.DatabaseName + }; } /// @@ -38,15 +47,20 @@ internal PostgresVectorStore(IPostgresVectorStoreDbClient postgresDbClient, Post { this._postgresClient = postgresDbClient; this._options = options ?? new PostgresVectorStoreOptions(); + + this._metadata = new() + { + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = this._postgresClient.DatabaseName + }; } /// public virtual IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) { - const string OperationName = "ListCollectionNames"; return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( this._postgresClient.GetTablesAsync(cancellationToken), - OperationName + "ListCollectionNames" ); } @@ -69,4 +83,17 @@ public virtual IVectorStoreRecordCollection GetCollection ?? throw new InvalidOperationException("Failed to cast record collection."); } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(NpgsqlDataSource) ? this._dataSource : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index fc097e848881..178f60589503 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -27,10 +27,14 @@ internal class PostgresVectorStoreDbClient(NpgsqlDataSource dataSource, string s { private readonly string _schema = schema; + private readonly NpgsqlConnectionStringBuilder _connectionStringBuilder = new(dataSource.ConnectionString); + private IPostgresVectorStoreCollectionSqlBuilder _sqlBuilder = new PostgresVectorStoreCollectionSqlBuilder(); public NpgsqlDataSource DataSource { get; } = dataSource; + public string? DatabaseName => this._connectionStringBuilder.Database; + /// public async Task DoesTableExistsAsync(string tableName, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index eb9eae628b67..e9b5be465ae8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -25,6 +25,9 @@ public class PostgresVectorStoreRecordCollection : IVectorStoreRe /// public string CollectionName { get; } + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + /// Postgres client that is used to interact with the database. private readonly IPostgresVectorStoreDbClient _client; @@ -79,6 +82,13 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.DictionaryCustomMapper ?? new PostgresVectorStoreRecordMapper(this._model); #pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = this._client.DatabaseName, + CollectionName = collectionName + }; } /// @@ -123,7 +133,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancella const string OperationName = "Upsert"; var storageModel = VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.DatabaseName, + PostgresConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -150,7 +160,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec const string OperationName = "UpsertBatch"; var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.DatabaseName, + PostgresConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -184,7 +194,7 @@ await this.RunOperationAsync(OperationName, () => if (row is null) { return default; } return VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.DatabaseName, + PostgresConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })); @@ -204,7 +214,7 @@ public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRec this._client.GetBatchAsync(this.CollectionName, keys, this._model, includeVectors, cancellationToken) .SelectAsync(row => VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.DatabaseName, + PostgresConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })), @@ -281,7 +291,7 @@ public virtual Task> VectorizedSearchAsync .SelectAsync(result => { var record = VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.DatabaseName, + PostgresConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel( @@ -295,6 +305,19 @@ public virtual Task> VectorizedSearchAsync }); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(NpgsqlDataSource) ? this._client.DataSource : + serviceType.IsInstanceOfType(this) ? this : + null; + } + private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken = default) { return this._client.CreateTableAsync(this.CollectionName, this._model, ifNotExists, cancellationToken); @@ -310,7 +333,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.DatabaseName, + VectorStoreType = PostgresConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -327,7 +350,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.DatabaseName, + VectorStoreType = PostgresConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs index 27fa7181bdc5..c97280f7f929 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs @@ -19,7 +19,10 @@ internal static class PostgresVectorStoreUtils /// The name of the operation being performed. /// The name of the collection being operated on. /// An async enumerable that will throw a if an exception is thrown while iterating over the original enumerator. - public static async IAsyncEnumerable WrapAsyncEnumerableAsync(IAsyncEnumerable asyncEnumerable, string operationName, string? collectionName = null) + public static async IAsyncEnumerable WrapAsyncEnumerableAsync( + IAsyncEnumerable asyncEnumerable, + string operationName, + string? collectionName = null) { var enumerator = asyncEnumerable.ConfigureAwait(false).GetAsyncEnumerator(); @@ -39,7 +42,10 @@ public static async IAsyncEnumerable WrapAsyncEnumerableAsync(IAsyncEnumer /// The name of the operation being performed. /// The name of the collection being operated on. /// A value indicating whether there are more results and the current string if true. - public static async Task<(T item, bool more)> GetNextAsync(ConfiguredCancelableAsyncEnumerable.Enumerator enumerator, string operationName, string? collectionName = null) + public static async Task<(T item, bool more)> GetNextAsync( + ConfiguredCancelableAsyncEnumerable.Enumerator enumerator, + string operationName, + string? collectionName = null) { try { @@ -50,7 +56,7 @@ public static async IAsyncEnumerable WrapAsyncEnumerableAsync(IAsyncEnumer { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.DatabaseName, + VectorStoreType = PostgresConstants.VectorStoreSystemName, CollectionName = collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantConstants.cs new file mode 100644 index 000000000000..6e983cb76806 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantConstants.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + +internal static class QdrantConstants +{ + internal const string VectorStoreSystemName = "qdrant"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index bfac788a7cfd..ee2f4f0ec35f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -18,8 +18,8 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// public class QdrantVectorStore : IVectorStore { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Qdrant"; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; /// Qdrant client that can be used to manage the collections and points in a Qdrant store. private readonly MockableQdrantClient _qdrantClient; @@ -48,6 +48,11 @@ internal QdrantVectorStore(MockableQdrantClient qdrantClient, QdrantVectorStoreO this._qdrantClient = qdrantClient; this._options = options ?? new QdrantVectorStoreOptions(); + + this._metadata = new() + { + VectorStoreSystemName = QdrantConstants.VectorStoreSystemName + }; } /// @@ -88,7 +93,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = QdrantConstants.VectorStoreSystemName, OperationName = "ListCollections" }; } @@ -98,4 +103,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat yield return collection; } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(QdrantClient) ? this._qdrantClient.QdrantClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index df33b1e8e54a..d2a3a45b1c82 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -26,15 +26,15 @@ public class QdrantVectorStoreRecordCollection : IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); /// The default options for hybrid vector search. private static readonly HybridSearchOptions s_defaultKeywordVectorizedHybridSearchOptions = new(); - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Qdrant"; - /// The name of the upsert operation for telemetry purposes. private const string UpsertName = "Upsert"; @@ -96,6 +96,12 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.PointStructCustomMapper ?? new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); #pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = QdrantConstants.VectorStoreSystemName, + CollectionName = collectionName + }; } /// @@ -318,7 +324,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken c // Create point from record. var pointStruct = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, UpsertName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -337,7 +343,7 @@ async Task IVectorStoreRecordCollection.UpsertAsync(TRecord // Create point from record. var pointStruct = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, UpsertName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -356,7 +362,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable re // Create points from records. var pointStructs = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, UpsertName, () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -379,7 +385,7 @@ async IAsyncEnumerable IVectorStoreRecordCollection.UpsertA // Create points from records. var pointStructs = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, UpsertName, () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -437,7 +443,7 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( } yield return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); @@ -492,7 +498,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect point, this._mapper, internalOptions.IncludeVectors, - DatabaseName, + QdrantConstants.VectorStoreSystemName, this._collectionName, "Query")); return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(QdrantClient) ? this._qdrantClient.QdrantClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Run the given operation and wrap any with ."/> /// @@ -600,7 +619,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = QdrantConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; @@ -624,7 +643,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = QdrantConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisConstants.cs new file mode 100644 index 000000000000..8d3e442ff671 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisConstants.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +internal static class RedisConstants +{ + internal const string VectorStoreSystemName = "redis"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 656f490c1640..4929c4bb052f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -23,8 +23,8 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; public class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Redis"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { @@ -113,6 +113,13 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.HashEntriesCustomMapper ?? new RedisHashSetVectorStoreRecordMapper(this._model); #pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = database.Database.ToString(), + CollectionName = collectionName + }; } /// @@ -134,7 +141,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = "FT.INFO" }; @@ -223,7 +230,7 @@ await this.RunOperationAsync("FT.DROPINDEX", // Convert to the caller's data model. return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, operationName, () => @@ -281,7 +288,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken // Map. var redisHashSetRecord = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, "HSET", () => this._mapper.MapFromDataToStorageModel(record)); @@ -351,7 +358,7 @@ public virtual async Task> VectorizedSearchAsync @@ -370,6 +377,19 @@ public virtual async Task> VectorizedSearchAsync(mappedResults.ToAsyncEnumerable()); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(IDatabase) ? this._database : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Prefix the key with the collection name if the option is set. /// @@ -419,7 +439,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; @@ -442,7 +462,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index c1a1d283985f..7c3631aab808 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -26,6 +26,9 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = false, @@ -48,9 +51,6 @@ public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordC typeof(ReadOnlyMemory?) ]; - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Redis"; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -121,6 +121,13 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio this._mapper = new RedisJsonVectorStoreRecordMapper(this._model.KeyProperty, this._jsonSerializerOptions); } #pragma warning restore CS0618 + + this._collectionMetadata = new() + { + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = database.Database.ToString(), + CollectionName = collectionName + }; } /// @@ -142,7 +149,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = "FT.INFO" }; @@ -231,7 +238,7 @@ await this.RunOperationAsync("FT.DROPINDEX", // Convert to the caller's data model. return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, "GET", () => @@ -280,7 +287,7 @@ public virtual async IAsyncEnumerable GetAsync(IEnumerable keys // Convert to the caller's data model. yield return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, "MGET", () => @@ -324,7 +331,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken // Map. var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, "SET", () => @@ -358,7 +365,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r foreach (var record in records) { var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + RedisConstants.VectorStoreSystemName, this._collectionName, "MSET", () => @@ -416,7 +423,7 @@ public virtual async Task> VectorizedSearchAsync @@ -438,6 +445,19 @@ public virtual async Task> VectorizedSearchAsync(mappedResults.ToAsyncEnumerable()); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(IDatabase) ? this._database : + serviceType.IsInstanceOfType(this) ? this : + null; + } + /// /// Prefix the key with the collection name if the option is set. /// @@ -486,7 +506,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; @@ -510,7 +530,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 4966917d3990..7deae3f65867 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -18,8 +18,8 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// public class RedisVectorStore : IVectorStore { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "Redis"; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; /// The redis database to read/write indices from. private readonly IDatabase _database; @@ -38,6 +38,12 @@ public RedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = d this._database = database; this._options = options ?? new RedisVectorStoreOptions(); + + this._metadata = new() + { + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = database.Database.ToString() + }; } /// @@ -82,7 +88,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = RedisConstants.VectorStoreSystemName, OperationName = OperationName }; } @@ -96,4 +102,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(IDatabase) ? this._metadata : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs index 6690f1d564a4..f24b4a350993 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs @@ -12,8 +12,6 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; internal static class ExceptionWrapper { - internal const string VectorStoreType = "SqlServer"; - internal static async Task WrapAsync( SqlConnection connection, SqlCommand command, @@ -42,7 +40,7 @@ internal static async Task WrapAsync( throw new VectorStoreOperationException(ex.Message, ex) { OperationName = operationName, - VectorStoreType = VectorStoreType, + VectorStoreType = SqlServerConstants.VectorStoreSystemName, CollectionName = collectionName }; } @@ -63,7 +61,7 @@ internal static async Task WrapReadAsync( throw new VectorStoreOperationException(ex.Message, ex) { OperationName = operationName, - VectorStoreType = VectorStoreType, + VectorStoreType = SqlServerConstants.VectorStoreSystemName, CollectionName = collectionName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs index eed61838df81..f74de4a8fcb5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -8,6 +8,8 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; internal static class SqlServerConstants { + internal const string VectorStoreSystemName = "microsoft.sql_server"; + // The actual number is actually higher (2_100), but we want to avoid any kind of "off by one" errors. internal const int MaxParameterCount = 2_000; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs index d9481ffc467d..b8517a49baba 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; @@ -16,6 +17,9 @@ public sealed class SqlServerVectorStore : IVectorStore private readonly string _connectionString; private readonly SqlServerVectorStoreOptions _options; + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// /// Initializes a new instance of the class. /// @@ -31,6 +35,14 @@ public SqlServerVectorStore(string connectionString, SqlServerVectorStoreOptions this._options = options is not null ? new() { Schema = options.Schema } : SqlServerVectorStoreOptions.Defaults; + + var connectionStringBuilder = new SqlConnectionStringBuilder(connectionString); + + this._metadata = new() + { + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = connectionStringBuilder.InitialCatalog + }; } /// @@ -63,4 +75,16 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel yield return reader.GetString(reader.GetOrdinal("table_name")); } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 991020d0647d..adb2a6b8b834 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -20,6 +20,9 @@ public sealed class SqlServerVectorStoreRecordCollection #pragma warning restore CA1711 : IVectorStoreRecordCollection where TKey : notnull { + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); private static readonly SqlServerVectorStoreRecordCollectionOptions s_defaultOptions = new(); @@ -60,9 +63,17 @@ public SqlServerVectorStoreRecordCollection( Mapper = options.Mapper, RecordDefinition = options.RecordDefinition, }; - this._mapper = this._options.Mapper ?? new RecordMapper(this._model); #pragma warning restore CS0618 + + var connectionStringBuilder = new SqlConnectionStringBuilder(connectionString); + + this._collectionMetadata = new() + { + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = connectionStringBuilder.InitialCatalog, + CollectionName = name + }; } /// @@ -205,7 +216,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella throw new VectorStoreOperationException(ex.Message, ex) { OperationName = "DeleteBatch", - VectorStoreType = ExceptionWrapper.VectorStoreType, + VectorStoreType = SqlServerConstants.VectorStoreSystemName, CollectionName = this.CollectionName }; } @@ -377,7 +388,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, throw new VectorStoreOperationException(ex.Message, ex) { OperationName = "UpsertBatch", - VectorStoreType = ExceptionWrapper.VectorStoreType, + VectorStoreType = SqlServerConstants.VectorStoreSystemName, CollectionName = this.CollectionName }; } @@ -444,6 +455,18 @@ public async Task> VectorizedSearchAsync(T }, cancellationToken, "VectorizedSearch", this.CollectionName).ConfigureAwait(false); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType.IsInstanceOfType(this) ? this : + null; + } + private async IAsyncEnumerable> ReadVectorSearchResultsAsync( SqlConnection connection, SqlCommand command, diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs index 5bcd273203a9..9c40b2668062 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs @@ -8,6 +8,8 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; internal static class SqliteConstants { + internal const string VectorStoreSystemName = "sqlite"; + /// /// SQLite extension name for vector search. /// More information here: . diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index f5b9615884ff..6e1e5d7bc9ea 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -18,6 +18,9 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// public class SqliteVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// The connection string for the SQLite database represented by this . private readonly string _connectionString; @@ -35,6 +38,14 @@ public SqliteVectorStore(string connectionString, SqliteVectorStoreOptions? opti this._connectionString = connectionString; this._options = options ?? new(); + + var connectionStringBuilder = new SqliteConnectionStringBuilder(connectionString); + + this._metadata = new() + { + VectorStoreSystemName = SqliteConstants.VectorStoreSystemName, + VectorStoreName = connectionStringBuilder.DataSource + }; } /// @@ -100,4 +111,16 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat yield return reader.GetString(ordinal); } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 25cd0c07da7a..28e88f7febe0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -24,8 +24,8 @@ public class SqliteVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect { - /// The name of this database for telemetry purposes. - private const string DatabaseName = "SQLite"; + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; /// The connection string for the SQLite database represented by this . private readonly string _connectionString; @@ -131,10 +131,18 @@ public SqliteVectorStoreRecordCollection( throw new UnreachableException(); } } - #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.DictionaryCustomMapper ?? new SqliteVectorStoreRecordMapper(this._model); #pragma warning restore CS0618 + + var connectionStringBuilder = new SqliteConnectionStringBuilder(connectionString); + + this._collectionMetadata = new() + { + VectorStoreSystemName = SqliteConstants.VectorStoreSystemName, + VectorStoreName = connectionStringBuilder.DataSource, + CollectionName = collectionName + }; } /// @@ -362,6 +370,18 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancel #endregion + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType.IsInstanceOfType(this) ? this : + null; + } + #region private private async ValueTask GetConnectionAsync(CancellationToken cancellationToken = default) @@ -567,7 +587,7 @@ private async Task InternalUpsertAsync(SqliteConnection connection, const string OperationName = "Upsert"; var storageModel = VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + SqliteConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -590,7 +610,7 @@ private IAsyncEnumerable InternalUpsertBatchAsync(SqliteConnection c const string OperationName = "UpsertBatch"; var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + SqliteConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -720,7 +740,7 @@ private TRecord GetAndMapRecord( } return VectorStoreErrorHandler.RunModelConversion( - DatabaseName, + SqliteConstants.VectorStoreSystemName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -736,7 +756,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = DatabaseName, + VectorStoreType = SqliteConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs index 3bb4b18c8991..f98b4ec35fde 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs @@ -4,8 +4,8 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class WeaviateConstants { - /// The name of this database for telemetry purposes. - public const string DatabaseName = "Weaviate"; + /// The name of this vector store for telemetry purposes. + public const string VectorStoreSystemName = "weaviate"; /// Reserved key property name in Weaviate. internal const string ReservedKeyPropertyName = "id"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 6df456e872b2..91a2944d7658 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -18,6 +18,9 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// public class WeaviateVectorStore : IVectorStore { + /// Metadata about vector store. + private readonly VectorStoreMetadata _metadata; + /// that is used to interact with Weaviate API. private readonly HttpClient _httpClient; @@ -39,6 +42,11 @@ public WeaviateVectorStore(HttpClient httpClient, WeaviateVectorStoreOptions? op this._httpClient = httpClient; this._options = options ?? new(); + + this._metadata = new() + { + VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName + }; } /// @@ -91,7 +99,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", e) { - VectorStoreType = WeaviateConstants.DatabaseName, + VectorStoreType = WeaviateConstants.VectorStoreSystemName, OperationName = "ListCollectionNames" }; } @@ -104,4 +112,17 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat } } } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreMetadata) ? this._metadata : + serviceType == typeof(HttpClient) ? this._httpClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index f50c25d8d573..e3914c5e2f13 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -24,6 +24,9 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { + /// Metadata about vector store record collection. + private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + /// Default JSON serializer options. private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { @@ -96,6 +99,12 @@ public WeaviateVectorStoreRecordCollection( // Assign mapper. this._mapper = this.InitializeMapper(); + + this._collectionMetadata = new() + { + VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, + CollectionName = collectionName + }; } /// @@ -208,7 +217,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel } return VectorStoreErrorHandler.RunModelConversion( - WeaviateConstants.DatabaseName, + WeaviateConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject!, new() { IncludeVectors = includeVectors })); @@ -250,7 +259,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec var responses = await this.RunOperationAsync(OperationName, async () => { var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - WeaviateConstants.DatabaseName, + WeaviateConstants.VectorStoreSystemName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -325,6 +334,19 @@ public async Task> HybridSearchAsync(TVect return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + Verify.NotNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(VectorStoreRecordCollectionMetadata) ? this._collectionMetadata : + serviceType == typeof(HttpClient) ? this._httpClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + #region private private async Task> ExecuteQueryAsync(string query, bool includeVectors, string scorePropertyName, string operationName, CancellationToken cancellationToken) @@ -339,7 +361,7 @@ private async Task> ExecuteQueryAsync(string query, { throw new VectorStoreOperationException($"Error occurred during vector search. Response: {content}") { - VectorStoreType = WeaviateConstants.DatabaseName, + VectorStoreType = WeaviateConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -350,7 +372,7 @@ private async Task> ExecuteQueryAsync(string query, var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result!, scorePropertyName); var record = VectorStoreErrorHandler.RunModelConversion( - WeaviateConstants.DatabaseName, + WeaviateConstants.VectorStoreSystemName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -415,7 +437,7 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = WeaviateConstants.DatabaseName, + VectorStoreType = WeaviateConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs index a931bbea0b67..74545d03cda3 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs @@ -22,6 +22,7 @@ public class PostgresVectorStoreRecordCollectionTests public PostgresVectorStoreRecordCollectionTests() { this._postgresClientMock = new Mock(MockBehavior.Strict); + this._postgresClientMock.Setup(l => l.DatabaseName).Returns("TestDatabase"); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs index 33cfc005a7bc..8a89582fc0f8 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreTests.cs @@ -26,6 +26,7 @@ public class PostgresVectorStoreTests public PostgresVectorStoreTests() { this._postgresClientMock = new Mock(MockBehavior.Strict); + this._postgresClientMock.Setup(l => l.DatabaseName).Returns("TestDatabase"); } [Fact] @@ -60,7 +61,10 @@ public void GetCollectionCallsFactoryIfProvided() var factoryMock = new Mock(MockBehavior.Strict); var collectionMock = new Mock>>(MockBehavior.Strict); var clientMock = new Mock(MockBehavior.Strict); + clientMock.Setup(x => x.DataSource).Returns(null); + clientMock.Setup(x => x.DatabaseName).Returns("TestDatabase"); + factoryMock .Setup(x => x.CreateVectorStoreRecordCollection>(It.IsAny(), TestCollectionName, null)) .Returns(collectionMock.Object); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 8b4324a6a98d..e95a83321b05 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -28,6 +28,7 @@ public class RedisHashSetVectorStoreRecordCollectionTests public RedisHashSetVectorStoreRecordCollectionTests() { this._redisDatabaseMock = new Mock(MockBehavior.Strict); + this._redisDatabaseMock.Setup(l => l.Database).Returns(0); var batchMock = new Mock(); this._redisDatabaseMock.Setup(x => x.CreateBatch(It.IsAny())).Returns(batchMock.Object); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 5a58000d3b48..50755b9624f0 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -32,6 +32,7 @@ public class RedisJsonVectorStoreRecordCollectionTests public RedisJsonVectorStoreRecordCollectionTests() { this._redisDatabaseMock = new Mock(MockBehavior.Strict); + this._redisDatabaseMock.Setup(l => l.Database).Returns(0); var batchMock = new Mock(); this._redisDatabaseMock.Setup(x => x.CreateBatch(It.IsAny())).Returns(batchMock.Object); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs index baf2564c81a2..9280051fd266 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs @@ -22,6 +22,7 @@ public class RedisVectorStoreTests public RedisVectorStoreTests() { this._redisDatabaseMock = new Mock(MockBehavior.Strict); + this._redisDatabaseMock.Setup(l => l.Database).Returns(0); var batchMock = new Mock(); this._redisDatabaseMock.Setup(x => x.CreateBatch(It.IsAny())).Returns(batchMock.Object); diff --git a/dotnet/src/Connectors/Directory.Build.props b/dotnet/src/Connectors/Directory.Build.props index 46c46d509f83..dd75c63ffd61 100644 --- a/dotnet/src/Connectors/Directory.Build.props +++ b/dotnet/src/Connectors/Directory.Build.props @@ -3,7 +3,7 @@ - $(NoWarn);MEVD9001 + $(NoWarn);MEVD9000,MEVD9001 \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index d4b4c39149eb..9e79fa44b1bb 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -2,141 +2,113 @@ - CP0001 - T:Microsoft.Extensions.VectorData.DeleteRecordOptions + CP0002 + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.get_Top lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true - CP0001 - T:Microsoft.Extensions.VectorData.UpsertRecordOptions + CP0002 + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.set_Top(System.Int32) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true - CP0001 - T:Microsoft.Extensions.VectorData.VectorSearchOptions + CP0002 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true - CP0001 - T:Microsoft.Extensions.VectorData.DeleteRecordOptions - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - true - - - CP0001 - T:Microsoft.Extensions.VectorData.UpsertRecordOptions - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - true - - - CP0001 - T:Microsoft.Extensions.VectorData.VectorSearchOptions - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll - true - - - CP0001 - T:Microsoft.Extensions.VectorData.DeleteRecordOptions - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll - true - - - CP0001 - T:Microsoft.Extensions.VectorData.UpsertRecordOptions - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true - CP0001 - T:Microsoft.Extensions.VectorData.VectorSearchOptions - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll - lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.get_Top lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.set_Top(System.Int32) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll - lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.get_Top + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) - lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll - lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.set_Top(System.Int32) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -150,42 +122,63 @@ CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.get_Top lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.set_Top(System.Int32) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.get_Top lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.set_Top(System.Int32) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.DeleteRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteBatchAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -199,18 +192,46 @@ CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0002 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertBatchAsync(System.Collections.Generic.IEnumerable{`1},Microsoft.Extensions.VectorData.UpsertRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.get_Top + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.VectorSearchOptions`1.set_Top(System.Int32) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.GetService(System.Type,System.Object) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},System.Int32,Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.GetService(System.Type,System.Object) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) @@ -220,35 +241,35 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.GetService(System.Type,System.Object) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStore.GetService(System.Type,System.Object) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -260,6 +281,27 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.GetService(System.Type,System.Object) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},System.Int32,Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.GetService(System.Type,System.Object) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) @@ -269,35 +311,35 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.GetService(System.Type,System.Object) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStore.GetService(System.Type,System.Object) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true @@ -309,6 +351,27 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.GetService(System.Type,System.Object) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.HybridSearchAsync``1(``0,System.Collections.Generic.ICollection{System.String},System.Int32,Microsoft.Extensions.VectorData.HybridSearchOptions{`0},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.GetService(System.Type,System.Object) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorizableTextSearch`1.VectorizableTextSearchAsync(System.String,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) @@ -318,35 +381,35 @@ CP0006 - M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.GetService(System.Type,System.Object) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(`0,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorizedSearch`1.VectorizedSearchAsync``1(``0,System.Int32,Microsoft.Extensions.VectorData.VectorSearchOptions{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStore.GetService(System.Type,System.Object) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.DeleteAsync(System.Collections.Generic.IEnumerable{`0},System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true CP0006 - M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(`1,System.Threading.CancellationToken) + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Collections.Generic.IEnumerable{`0},Microsoft.Extensions.VectorData.GetRecordOptions,System.Threading.CancellationToken) lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj index 5b8a57c7ed74..c70073466ff8 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj @@ -13,7 +13,7 @@ 9.0.0-preview.1.25161.1 9.0.0.0 - 9.0.0-preview.1.25078.1 + 9.0.0-preview.1.25161.1 Microsoft.Extensions.VectorData.Abstractions $(AssemblyName) Abstractions for vector database access. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs index 7e089fae04c1..671664bf3a74 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -28,4 +30,17 @@ Task> HybridSearchAsync( int top, HybridSearchOptions? options = default, CancellationToken cancellationToken = default); + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , + /// including itself or any services it might be wrapping. For example, to access the for the instance, + /// may be used to request it. + /// + [Experimental("MEVD9000")] + object? GetService(Type serviceType, object? serviceKey = null); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs index f300bed658c3..3198e518e9ce 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -24,4 +26,17 @@ Task> VectorizableTextSearchAsync( int top, VectorSearchOptions? options = default, CancellationToken cancellationToken = default); + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , + /// including itself or any services it might be wrapping. For example, to access the for the instance, + /// may be used to request it. + /// + [Experimental("MEVD9000")] + object? GetService(Type serviceType, object? serviceKey = null); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs index b7a5de5c19d2..e4ff7dec7238 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -25,4 +27,17 @@ Task> VectorizedSearchAsync( int top, VectorSearchOptions? options = default, CancellationToken cancellationToken = default); + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , + /// including itself or any services it might be wrapping. For example, to access the for the instance, + /// may be used to request it. + /// + [Experimental("MEVD9000")] + object? GetService(Type serviceType, object? serviceKey = null); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs index a3ac5466323c..6810e99722be 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading; namespace Microsoft.Extensions.VectorData; @@ -38,4 +40,17 @@ IVectorStoreRecordCollection GetCollection(string /// The to monitor for cancellation requests. The default is . /// The list of names of all the collections in the vector store. IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default); + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , + /// including itself or any services it might be wrapping. For example, to access the for the instance, + /// may be used to request it. + /// + [Experimental("MEVD9000")] + object? GetService(Type serviceType, object? serviceKey = null); } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreMetadata.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreMetadata.cs new file mode 100644 index 000000000000..f89884736cb6 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreMetadata.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides metadata about an . +[Experimental("MEVD9000")] +public class VectorStoreMetadata +{ + /// The name of the vector store system. + /// + /// Where possible, this maps to the "db.system.name" attribute defined in the + /// OpenTelemetry Semantic Conventions for database calls and systems, see . + /// Example: redis, sqlite, mysql. + /// + public string? VectorStoreSystemName { get; init; } + + /// + /// The name of the vector store (database). + /// + public string? VectorStoreName { get; init; } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordCollectionMetadata.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordCollectionMetadata.cs new file mode 100644 index 000000000000..b2ea092878c3 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreRecordCollectionMetadata.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides metadata about an . +[Experimental("MEVD9000")] +public class VectorStoreRecordCollectionMetadata +{ + /// The name of the vector store system. + /// + /// Where possible, this maps to the "db.system.name" attribute defined in the + /// OpenTelemetry Semantic Conventions for database calls and systems, see . + /// Example: redis, sqlite, mysql. + /// + public string? VectorStoreSystemName { get; init; } + + /// + /// The name of the vector store (database). + /// + public string? VectorStoreName { get; init; } + + /// + /// The name of a collection (table, container) within the vector store (database). + /// + public string? CollectionName { get; init; } +} diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 4831e1720c9c..427f0a123b61 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -108,6 +108,16 @@ public async Task> VectorizableTextSearchAsync(stri return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + vectorizedSearch.GetService(serviceType, serviceKey); + } } /// diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index cb21d20b7f4a..d80c297f4d26 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -5,7 +5,7 @@ net8.0 true false - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110,OPENAI001 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110,OPENAI001,MEVD9000 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs index 456ab0b44e1a..279da36b7895 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBConstants.cs @@ -13,6 +13,8 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; [ExcludeFromCodeCoverage] internal static class MongoDBConstants { + internal const string VectorStoreSystemName = "mongodb"; + /// Default ratio of number of nearest neighbors to number of documents to return. internal const int DefaultNumCandidatesRatio = 10; diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs index b6f824a16b0f..eb9ed46823e3 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs @@ -18,6 +18,16 @@ public Task> VectorizableTextSearchAsync(string sea return Task.FromResult(new VectorSearchResults(this._searchResults)); } + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + null; + } + private static async IAsyncEnumerable> ToAsyncEnumerable(IEnumerable> searchResults) { foreach (var result in searchResults) diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index 968ff4fdac7f..7882846ab2a8 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -131,6 +131,16 @@ public async Task> VectorizableTextSearchAsync(stri var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); } + + /// + public object? GetService(Type serviceType, object? serviceKey = null) + { + ArgumentNullException.ThrowIfNull(serviceType); + + return + serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + vectorizedSearch.GetService(serviceType, serviceKey); + } } /// diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index 8580c9a173ab..3311fb3b3553 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -6,7 +6,7 @@ net8.0 true false - $(NoWarn);CA2007,CA1861,IDE1006,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0110,SKEXP0120 + $(NoWarn);CA2007,CA1861,IDE1006,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0110,SKEXP0120,MEVD9000 diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index b9e6f54eb752..1e7273aa9a7c 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -66,7 +66,11 @@ public async Task UpsertBatchIsAtomic() VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertAsync(inserted).ToArrayAsync().AsTask()); Assert.Equal("UpsertBatch", ex.OperationName); - Assert.Equal(collection.CollectionName, ex.CollectionName); + + var metadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; + + Assert.NotNull(metadata?.CollectionName); + Assert.Equal(metadata.CollectionName, ex.CollectionName); // Make sure that no records were inserted! Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj index ba0e332abc73..d15fc2fc0058 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj @@ -11,7 +11,7 @@ $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 - $(NoWarn);MEVD9001 + $(NoWarn);MEVD9000,MEVD9001 diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index 16f8679df842..be3becc603cc 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -86,7 +86,17 @@ private async Task CreateCollection() try { Assert.True(await collection.CollectionExistsAsync()); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collection.CollectionName)); + +#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; +#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.VectorStoreName); + Assert.NotNull(collectionMetadata.CollectionName); + + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); } finally { @@ -103,7 +113,17 @@ private async Task CreateCollectionIfNotExistsMoreThanOnce() try { Assert.True(await collection.CollectionExistsAsync()); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collection.CollectionName)); + +#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; +#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.VectorStoreName); + Assert.NotNull(collectionMetadata.CollectionName); + + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); await collection.CreateCollectionIfNotExistsAsync(); } @@ -122,7 +142,17 @@ private async Task CreateCollectionMoreThanOnce() try { Assert.True(await collection.CollectionExistsAsync()); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collection.CollectionName)); + +#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; +#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.VectorStoreName); + Assert.NotNull(collectionMetadata.CollectionName); + + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); await collection.CreateCollectionIfNotExistsAsync(); From b19fe4cf0b5465a74923484ba550bff7c41f1985 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Mon, 7 Apr 2025 16:58:13 +0200 Subject: [PATCH 32/63] .Net: Add integration tests for no-vector models and fix bugs (#11383) ### Motivation and Context Some databases support crud operations without a vector. #11274 ### Description Add integration tests for no-vector models and fix bugs ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- ...ostgresVectorStoreRecordPropertyMapping.cs | 4 + .../RedisJsonVectorStoreRecordCollection.cs | 2 +- .../RedisJsonVectorStoreRecordMapper.cs | 16 +- .../RedisJsonVectorStoreRecordMapperTests.cs | 22 ++- .../AzureAISearchNoVectorConformanceTests.cs | 17 ++ .../CosmosMongoDBNoVectorConformanceTests.cs | 17 ++ .../CosmosMongoDBIntegrationTests.csproj | 2 + .../CosmosMongoDBSimpleModelFixture.cs | 4 - .../Support/CosmosMongoDBTestEnvironment.cs | 1 + .../Support/CosmosMongoDBTestStore.cs | 4 + .../CosmosNoSQLNoVectorConformanceTests.cs | 17 ++ .../CRUD/InMemoryNoVectorConformanceTests.cs | 17 ++ .../CRUD/MongoDBNoVectorConformanceTests.cs | 17 ++ .../CRUD/PostgresNoVectorConformanceTests.cs | 17 ++ .../RedisHashSetNoVectorConformanceTests.cs | 17 ++ .../CRUD/RedisJsonNoVectorConformanceTests.cs | 17 ++ .../CRUD/SqlServerBatchConformanceTests.cs | 8 +- .../CRUD/SqlServerNoVectorConformanceTests.cs | 17 ++ .../CRUD/SqliteNoVectorConformanceTests.cs | 17 ++ .../CRUD/BatchConformanceTests.cs | 18 +- .../CRUD/NoVectorConformanceTests.cs | 172 ++++++++++++++++++ .../CRUD/RecordConformanceTests.cs | 8 +- .../Collections/CollectionConformanceTests.cs | 8 +- .../{SimpleModel.cs => SimpleRecord.cs} | 4 +- .../Support/SimpleModelFixture.cs | 22 +-- .../CRUD/WeaviateNoVectorConformanceTests.cs | 22 +++ 26 files changed, 436 insertions(+), 51 deletions(-) create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoVectorConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/{SimpleModel.cs => SimpleRecord.cs} (91%) create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index 06bd10bf4773..d16383c567ba 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -185,6 +185,10 @@ public static NpgsqlParameter GetNpgsqlParameter(object? value) { switch (property) { + case VectorStoreRecordKeyPropertyModel: + // There is no need to create a separate index for the key property. + break; + case VectorStoreRecordVectorPropertyModel vectorProperty: var indexKind = vectorProperty.IndexKind ?? PostgresConstants.DefaultIndexKind; var distanceFunction = vectorProperty.DistanceFunction ?? PostgresConstants.DefaultDistanceFunction; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 7c3631aab808..97c5fd75bd1d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -118,7 +118,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio else { // Default Mapper. - this._mapper = new RedisJsonVectorStoreRecordMapper(this._model.KeyProperty, this._jsonSerializerOptions); + this._mapper = new RedisJsonVectorStoreRecordMapper(this._model, this._jsonSerializerOptions); } #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs index b128be837bfb..64a0ce9e5b76 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; @@ -12,14 +13,14 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The consumer data model to map to or from. internal sealed class RedisJsonVectorStoreRecordMapper( - VectorStoreRecordKeyPropertyModel keyProperty, + VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete : IVectorStoreRecordMapper #pragma warning restore CS0618 { /// The key property. - private readonly string _keyPropertyStorageName = keyProperty.StorageName; + private readonly string _keyPropertyStorageName = model.KeyProperty.StorageName; /// public (string Key, JsonNode Node) MapFromDataToStorageModel(TConsumerDataModel dataModel) @@ -37,17 +38,24 @@ internal sealed class RedisJsonVectorStoreRecordMapper( return (keyValue, jsonNode); } - throw new VectorStoreRecordMappingException($"Missing key field {this._keyPropertyStorageName} on provided record of type {typeof(TConsumerDataModel).FullName}."); + throw new VectorStoreRecordMappingException($"Missing key field '{this._keyPropertyStorageName}' on provided record of type {typeof(TConsumerDataModel).FullName}."); } /// public TConsumerDataModel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) { - // The redis result can be either a single object or an array with a single object in the case where we are doing an MGET. + // The redis result can have one of three different formats: + // 1. a single object + // 2. an array with a single object in the case where we are doing an MGET + // 3. a single value (string, number, etc.) in the case where there is only one property being requested because the model has only one property apart from the key var jsonObject = storageModel.Node switch { JsonObject topLevelJsonObject => topLevelJsonObject, JsonArray and [JsonObject arrayEntryJsonObject] => arrayEntryJsonObject, + JsonValue when model.DataProperties.Count + model.VectorProperties.Count == 1 => new JsonObject + { + [model.DataProperties.Concat(model.VectorProperties).First().StorageName] = storageModel.Node + }, _ => throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'") }; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs index c3e8eb077b39..bb1b0889bd6e 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs @@ -20,8 +20,9 @@ public sealed class RedisJsonVectorStoreRecordMapperTests public void MapsAllFieldsFromDataToStorageModel() { // Arrange. - var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)); - var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, JsonSerializerOptions.Default); + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); + var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -40,8 +41,10 @@ public void MapsAllFieldsFromDataToStorageModel() public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() { // Arrange. - var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "key" }; - var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); + var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -60,8 +63,9 @@ public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() public void MapsAllFieldsFromStorageToDataModel() { // Arrange. - var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)); - var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, JsonSerializerOptions.Default); + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); + var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); // Act. var jsonObject = new JsonObject(); @@ -84,8 +88,10 @@ public void MapsAllFieldsFromStorageToDataModel() public void MapsAllFieldsFromStorageToDataModelWithCustomSerializerOptions() { // Arrange. - var keyProperty = new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "key" }; - var sut = new RedisJsonVectorStoreRecordMapper(keyProperty, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); + var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); // Act. var jsonObject = new JsonObject(); diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs new file mode 100644 index 000000000000..9d16e29ed86c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace AzureAISearchIntegrationTests.CRUD; + +public class AzureAISearchNoVectorConformanceTests(AzureAISearchNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => AzureAISearchTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoVectorConformanceTests.cs new file mode 100644 index 000000000000..134707f35575 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosMongoDBIntegrationTests.CRUD; + +public class CosmosMongoDBNoVectorConformanceTests(CosmosMongoDBNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj index 59a720d7dddd..0e4200084d4e 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoDBIntegrationTests.csproj @@ -7,6 +7,7 @@ true false CosmosMongoDBIntegrationTests + b7762d10-e29b-4bb1-8b74-b6d69a667dd4 @@ -19,6 +20,7 @@ + diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs index 90c0e3efc659..42d6a8dbf3a9 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBSimpleModelFixture.cs @@ -7,8 +7,4 @@ namespace CosmosMongoDBIntegrationTests.Support; public class CosmosMongoDBSimpleModelFixture : SimpleModelFixture { public override TestStore TestStore => CosmosMongoDBTestStore.Instance; - - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs index df6550d05237..faf467122f2f 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestEnvironment.cs @@ -18,6 +18,7 @@ static CosmosMongoDBTestEnvironment() .AddJsonFile(path: "testsettings.json", optional: true) .AddJsonFile(path: "testsettings.development.json", optional: true) .AddEnvironmentVariables() + .AddUserSecrets() .Build(); ConnectionString = configuration["AzureCosmosDBMongoDB:ConnectionString"]; diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs index fba1d18c8a7f..f7727ee324fb 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Support/CosmosMongoDBTestStore.cs @@ -21,6 +21,10 @@ public sealed class CosmosMongoDBTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + public override string DefaultIndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; + + public override string DefaultDistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + public AzureCosmosDBMongoDBVectorStore GetVectorStore(AzureCosmosDBMongoDBVectorStoreOptions options) => new(this.Database, options); diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs new file mode 100644 index 000000000000..29f2b80ac866 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosNoSQLIntegrationTests.CRUD; + +public class CosmosNoSQLNoVectorConformanceTests(CosmosNoSQLNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoVectorConformanceTests.cs new file mode 100644 index 000000000000..7c8c759c8d3c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace InMemoryIntegrationTests.CRUD; + +public class InMemoryNoVectorConformanceTests(InMemoryNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => InMemoryTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoVectorConformanceTests.cs new file mode 100644 index 000000000000..f4a597f6bcaa --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace MongoDBIntegrationTests.CRUD; + +public class MongoDBNoVectorConformanceTests(MongoDBNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => MongoDBTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoVectorConformanceTests.cs new file mode 100644 index 000000000000..93f6a0fff133 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PostgresIntegrationTests.CRUD; + +public class PostgresNoVectorConformanceTests(PostgresNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => PostgresTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoVectorConformanceTests.cs new file mode 100644 index 000000000000..dddd33af25e9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisHashSetNoVectorConformanceTests(RedisHashSetNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => RedisTestStore.HashSetInstance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoVectorConformanceTests.cs new file mode 100644 index 000000000000..3d3828244bd9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisJsonNoVectorConformanceTests(RedisJsonNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => RedisTestStore.JsonInstance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index 1e7273aa9a7c..3ae677ae03f9 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -25,12 +25,12 @@ public Task CanSplitBatchToAccountForMaxParameterLimit_WithoutVectors() private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVectors) { var collection = fixture.Collection; - SimpleModel[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleModel() + SimpleRecord[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleRecord() { Id = fixture.GenerateNextKey(), Number = 100 + i, Text = i.ToString(), - Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); var keys = inserted.Select(record => record.Id).ToArray(); @@ -52,13 +52,13 @@ private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVector public async Task UpsertBatchIsAtomic() { var collection = fixture.Collection; - SimpleModel[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleModel() + SimpleRecord[] inserted = Enumerable.Range(0, SqlServerMaxParameters + 1).Select(i => new SimpleRecord() { // The last Id is set to NULL, so it must not be inserted and the whole batch should fail Id = i < SqlServerMaxParameters ? fixture.GenerateNextKey() : null!, Number = 100 + i, Text = i.ToString(), - Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); var keys = inserted.Select(record => record.Id).Where(key => key is not null).ToArray(); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoVectorConformanceTests.cs new file mode 100644 index 000000000000..7b70e75d7b70 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqlServerIntegrationTests.CRUD; + +public class SqlServerNoVectorConformanceTests(SqlServerNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => SqlServerTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoVectorConformanceTests.cs new file mode 100644 index 000000000000..1b0e800e8ca7 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoVectorConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqliteIntegrationTests.CRUD; + +public class SqliteNoVectorConformanceTests(SqliteNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => SqliteTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index 849789b5e910..ac50caf47040 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -67,12 +67,12 @@ public Task UpsertBatchAsyncCanInsertNewRecord_WithoutVectors() private async Task UpsertBatchAsyncCanInsertNewRecords(bool includeVectors) { var collection = fixture.Collection; - SimpleModel[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleModel() + SimpleRecord[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleRecord() { Id = fixture.GenerateNextKey(), Number = 100 + i, Text = i.ToString(), - Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); var keys = inserted.Select(record => record.Id).ToArray(); @@ -97,16 +97,16 @@ public Task UpsertBatchAsyncCanUpdateExistingRecords_WithoutVectors() private async Task UpsertBatchAsyncCanUpdateExistingRecords(bool includeVectors) { - SimpleModel[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleModel() + SimpleRecord[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleRecord() { Id = fixture.GenerateNextKey(), Number = 100 + i, Text = i.ToString(), - Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); await fixture.Collection.UpsertAsync(inserted).ToArrayAsync(); - SimpleModel[] updated = inserted.Select(i => new SimpleModel() + SimpleRecord[] updated = inserted.Select(i => new SimpleRecord() { Id = i.Id, Text = i.Text + "updated", @@ -136,16 +136,16 @@ public Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch_WithoutVectors() private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool includeVectors) { - SimpleModel[] records = Enumerable.Range(0, 10).Select(i => new SimpleModel() + SimpleRecord[] records = Enumerable.Range(0, 10).Select(i => new SimpleRecord() { Id = fixture.GenerateNextKey(), Number = 100 + i, Text = i.ToString(), - Floats = Enumerable.Range(0, SimpleModel.DimensionCount).Select(j => (float)(i + j)).ToArray() + Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); // We take first half of the records and insert them. - SimpleModel[] firstHalf = records.Take(records.Length / 2).ToArray(); + SimpleRecord[] firstHalf = records.Take(records.Length / 2).ToArray(); TKey[] insertedKeys = await fixture.Collection.UpsertAsync(firstHalf).ToArrayAsync(); Assert.Equal( firstHalf.Select(r => r.Id).OrderBy(id => id).ToArray(), @@ -196,6 +196,6 @@ public async Task DeleteBatchAsyncDeletesTheRecords() // The order of records in the received array is not guaranteed // to match the order of keys in the requested keys array. - protected SimpleModel GetRecord(SimpleModel[] received, TKey key) + protected SimpleRecord GetRecord(SimpleRecord[] received, TKey key) => received.Single(r => r.Id!.Equals(key)); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs new file mode 100644 index 000000000000..d5c468f07cb9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.CRUD; + +/// +/// Tests CRUD operations using a model without a vector. +/// This is only supported by a subset of databases so only extend if applicable for your database. +/// +public class NoVectorConformanceTests(NoVectorConformanceTests.Fixture fixture) where TKey : notnull +{ + [ConditionalFact] + public Task GetAsyncReturnsInsertedRecord_WithVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: true); + + [ConditionalFact] + public Task GetAsyncReturnsInsertedRecord_WithoutVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: false); + + private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) + { + var expectedRecord = fixture.TestData[0]; + + var received = await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = includeVectors }); + + expectedRecord.AssertEqual(received); + } + + [ConditionalFact] + public Task UpsertAsyncCanInsertNewRecord_WithVectors() + => this.UpsertAsyncCanInsertNewRecord(includeVectors: true); + + [ConditionalFact] + public Task UpsertAsyncCanInsertNewRecord_WithoutVectors() + => this.UpsertAsyncCanInsertNewRecord(includeVectors: false); + + private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) + { + var collection = fixture.Collection; + TKey expectedKey = fixture.GenerateNextKey(); + NoVectorRecord inserted = new() + { + Id = expectedKey, + Text = "some" + }; + + Assert.Null(await collection.GetAsync(expectedKey)); + TKey key = await collection.UpsertAsync(inserted); + Assert.Equal(expectedKey, key); + + var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = includeVectors }); + inserted.AssertEqual(received); + } + + [ConditionalFact] + public Task UpsertAsyncCanUpdateExistingRecord_WithVectors() + => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: true); + + [ConditionalFact] + public Task UpsertAsyncCanUpdateExistingRecord__WithoutVectors() + => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: false); + + private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) + { + var collection = fixture.Collection; + var existingRecord = fixture.TestData[1]; + NoVectorRecord updated = new() + { + Id = existingRecord.Id, + Text = "updated" + }; + + Assert.NotNull(await collection.GetAsync(existingRecord.Id)); + TKey key = await collection.UpsertAsync(updated); + Assert.Equal(existingRecord.Id, key); + + var received = await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = includeVectors }); + updated.AssertEqual(received); + } + + [ConditionalFact] + public async Task DeleteAsyncDeletesTheRecord() + { + var recordToRemove = fixture.TestData[2]; + + Assert.NotNull(await fixture.Collection.GetAsync(recordToRemove.Id)); + await fixture.Collection.DeleteAsync(recordToRemove.Id); + Assert.Null(await fixture.Collection.GetAsync(recordToRemove.Id)); + } + + /// + /// This class is for testing databases that support having no vector. + /// Not all DBs support this. + /// + public sealed class NoVectorRecord + { + public const int DimensionCount = 3; + + [VectorStoreRecordKey(StoragePropertyName = "key")] + public TKey Id { get; set; } = default!; + + [VectorStoreRecordData(StoragePropertyName = "text")] + public string? Text { get; set; } + + public void AssertEqual(NoVectorRecord? other) + { + Assert.NotNull(other); + Assert.Equal(this.Id, other.Id); + Assert.Equal(this.Text, other.Text); + } + } + + /// + /// Provides data and configuration for a model without a vector, which is supported by some connectors. + /// + public abstract class Fixture : VectorStoreCollectionFixture + { + protected override List BuildTestData() => + [ + new() + { + Id = this.GenerateNextKey(), + Text = "UsedByGetTests", + }, + new() + { + Id = this.GenerateNextKey(), + Text = "UsedByUpdateTests", + }, + new() + { + Id = this.GenerateNextKey(), + Text = "UsedByDeleteTests", + }, + new() + { + Id = this.GenerateNextKey(), + Text = "UsedByDeleteBatchTests", + } + ]; + + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(NoVectorRecord.Id), typeof(TKey)), + new VectorStoreRecordDataProperty(nameof(NoVectorRecord.Text), typeof(string)) { IsFilterable = true }, + ] + }; + + protected override async Task WaitForDataAsync() + { + for (var i = 0; i < 20; i++) + { + var results = await this.Collection.GetAsync([this.TestData[0].Id, this.TestData[1].Id, this.TestData[2].Id, this.TestData[3].Id]).ToArrayAsync(); + if (results.Length == 4 && results.All(r => r != null)) + { + return; + } + + await Task.Delay(TimeSpan.FromMilliseconds(100)); + } + + throw new InvalidOperationException("Data did not appear in the collection within the expected time."); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs index c2a63b86cae8..a13f19696d55 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs @@ -53,12 +53,12 @@ private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) { var collection = fixture.Collection; TKey expectedKey = fixture.GenerateNextKey(); - SimpleModel inserted = new() + SimpleRecord inserted = new() { Id = expectedKey, Text = "some", Number = 123, - Floats = new ReadOnlyMemory(Enumerable.Repeat(0.1f, SimpleModel.DimensionCount).ToArray()) + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.1f, SimpleRecord.DimensionCount).ToArray()) }; Assert.Null(await collection.GetAsync(expectedKey)); @@ -81,12 +81,12 @@ private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) { var collection = fixture.Collection; var existingRecord = fixture.TestData[1]; - SimpleModel updated = new() + SimpleRecord updated = new() { Id = existingRecord.Id, Text = "updated", Number = 456, - Floats = new ReadOnlyMemory(Enumerable.Repeat(0.2f, SimpleModel.DimensionCount).ToArray()) + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.2f, SimpleRecord.DimensionCount).ToArray()) }; Assert.NotNull(await collection.GetAsync(existingRecord.Id)); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index be3becc603cc..3af9eae17e58 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -12,7 +12,7 @@ public abstract class CollectionConformanceTests(VectorStoreFixture fixtur { [ConditionalFact] public Task DeleteCollectionDoesNotThrowForNonExistingCollection() - => this.DeleteNonExistingCollection>(); + => this.DeleteNonExistingCollection>(); [ConditionalFact] public Task DeleteCollectionDoesNotThrowForNonExistingCollection_GenericDataModel() @@ -20,7 +20,7 @@ public Task DeleteCollectionDoesNotThrowForNonExistingCollection_GenericDataMode [ConditionalFact] public Task CreateCollectionCreatesTheCollection() - => this.CreateCollection>(); + => this.CreateCollection>(); [ConditionalFact] public Task CreateCollectionCreatesTheCollection_GenericDataModel() @@ -28,7 +28,7 @@ public Task CreateCollectionCreatesTheCollection_GenericDataModel() [ConditionalFact] public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow() - => this.CreateCollectionIfNotExistsMoreThanOnce>(); + => this.CreateCollectionIfNotExistsMoreThanOnce>(); [ConditionalFact] public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow_GenericDataModel() @@ -36,7 +36,7 @@ public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow_GenericDat [ConditionalFact] public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException() - => this.CreateCollectionMoreThanOnce>(); + => this.CreateCollectionMoreThanOnce>(); [ConditionalFact] public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException_GenericDataModel() diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs similarity index 91% rename from dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs rename to dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs index 13a47e386516..1d2222204045 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleModel.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs @@ -10,7 +10,7 @@ namespace VectorDataSpecificationTests.Models; /// a key, int, string and an embedding. /// /// TKey is a generic parameter because different connectors support different key types. -public sealed class SimpleModel +public sealed class SimpleRecord { public const int DimensionCount = 3; @@ -26,7 +26,7 @@ public sealed class SimpleModel [VectorStoreRecordVector(Dimensions: DimensionCount, StoragePropertyName = "embedding")] public ReadOnlyMemory Floats { get; set; } - public void AssertEqual(SimpleModel? other, bool includeVectors) + public void AssertEqual(SimpleRecord? other, bool includeVectors) { Assert.NotNull(other); Assert.Equal(this.Id, other.Id); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs index b5c688c01835..370a77fae976 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs @@ -5,38 +5,38 @@ namespace VectorDataSpecificationTests.Support; -public abstract class SimpleModelFixture : VectorStoreCollectionFixture> +public abstract class SimpleModelFixture : VectorStoreCollectionFixture> where TKey : notnull { - protected override List> BuildTestData() => + protected override List> BuildTestData() => [ new() { Id = this.GenerateNextKey(), Number = 1, Text = "UsedByGetTests", - Floats = Enumerable.Repeat(0.1f, SimpleModel.DimensionCount).ToArray() + Floats = Enumerable.Repeat(0.1f, SimpleRecord.DimensionCount).ToArray() }, new() { Id = this.GenerateNextKey(), Number = 2, Text = "UsedByUpdateTests", - Floats = Enumerable.Repeat(0.2f, SimpleModel.DimensionCount).ToArray() + Floats = Enumerable.Repeat(0.2f, SimpleRecord.DimensionCount).ToArray() }, new() { Id = this.GenerateNextKey(), Number = 3, Text = "UsedByDeleteTests", - Floats = Enumerable.Repeat(0.3f, SimpleModel.DimensionCount).ToArray() + Floats = Enumerable.Repeat(0.3f, SimpleRecord.DimensionCount).ToArray() }, new() { Id = this.GenerateNextKey(), Number = 4, Text = "UsedByDeleteBatchTests", - Floats = Enumerable.Repeat(0.4f, SimpleModel.DimensionCount).ToArray() + Floats = Enumerable.Repeat(0.4f, SimpleRecord.DimensionCount).ToArray() } ]; @@ -45,16 +45,16 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() { Properties = [ - new VectorStoreRecordKeyProperty(nameof(SimpleModel.Id), typeof(TKey)), - new VectorStoreRecordVectorProperty(nameof(SimpleModel.Floats), typeof(ReadOnlyMemory?)) + new VectorStoreRecordKeyProperty(nameof(SimpleRecord.Id), typeof(TKey)), + new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory?)) { - Dimensions = SimpleModel.DimensionCount, + Dimensions = SimpleRecord.DimensionCount, DistanceFunction = this.DistanceFunction, IndexKind = this.IndexKind }, - new VectorStoreRecordDataProperty(nameof(SimpleModel.Number), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(SimpleModel.Text), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { IsFilterable = true }, ] }; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs new file mode 100644 index 000000000000..016ab64870e6 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.CRUD; + +public class WeaviateNoVectorConformanceTests(WeaviateNoVectorConformanceTests.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoVectorConformanceTests.Fixture + { + public override TestStore TestStore => WeaviateTestStore.Instance; + + /// + /// Weaviate collections must start with an uppercase letter. + /// + protected override string CollectionName => "NoVectorCollection"; + } +} From 1efa055b9f8178d5c67d7a51195f7de93fc1035e Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Tue, 8 Apr 2025 08:13:51 -0700 Subject: [PATCH 33/63] .Net: [MEVD] Updated VectorStoreException. Made implementations sealed (#11414) ### Motivation and Context Resolves: https://github.com/microsoft/semantic-kernel/issues/11384 Resolves: https://github.com/microsoft/semantic-kernel/issues/11385 - Made vector store implementations `sealed`. - Updated `VectorStoreException` with new `VectorStoreName` property and renamed `VectorStoreType` to `VectorStoreSystemName`. ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- .../AzureAISearchFactory.cs | 15 +++-- .../PineconeFactory.cs | 15 +++-- .../QdrantFactory.cs | 15 +++-- .../RedisFactory.cs | 15 +++-- .../VectorStore_DataIngestion_CustomMapper.cs | 17 +++-- .../AzureAISearchVectorStore.cs | 18 ++--- ...zureAISearchVectorStoreRecordCollection.cs | 40 +++++++----- .../AzureCosmosDBMongoDBVectorStore.cs | 6 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 37 ++++++----- .../AzureCosmosDBNoSQLVectorStore.cs | 6 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 38 ++++++----- .../InMemoryVectorStoreRecordCollection.cs | 2 +- .../MongoDBVectorStore.cs | 6 +- .../MongoDBVectorStoreRecordCollection.cs | 43 +++++++----- .../PineconeVectorStore.cs | 9 +-- .../PineconeVectorStoreRecordCollection.cs | 38 ++++++----- .../PostgresVectorStore.cs | 9 +-- .../PostgresVectorStoreRecordCollection.cs | 36 ++++++---- .../PostgresVectorStoreUtils.cs | 11 +++- .../QdrantVectorStore.cs | 9 +-- ...drantVectorStoreCollectionSearchMapping.cs | 15 ++++- .../QdrantVectorStoreRecordCollection.cs | 45 ++++++++----- ...RedisHashSetVectorStoreRecordCollection.cs | 36 +++++----- .../RedisJsonVectorStoreRecordCollection.cs | 38 ++++++----- .../RedisVectorStore.cs | 9 +-- .../ExceptionWrapper.cs | 24 ++++--- .../SqlServerVectorStore.cs | 14 +++- .../SqlServerVectorStoreRecordCollection.cs | 65 ++++++++++++++----- .../SqliteVectorStore.cs | 6 +- .../SqliteVectorStoreRecordCollection.cs | 14 ++-- .../WeaviateVectorStore.cs | 9 +-- .../WeaviateVectorStoreRecordCollection.cs | 33 ++++++---- ...VectorStoreCollectionSearchMappingTests.cs | 2 +- .../CompatibilitySuppressions.xml | 42 ++++++++++++ .../VectorStorage/VectorStoreException.cs | 12 +++- .../src/Data/VectorStoreErrorHandler.cs | 13 +++- 36 files changed, 499 insertions(+), 263 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index f509373233d9..a837e38d0ad8 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -39,14 +39,17 @@ public static class AzureAISearchFactory /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. /// The . public static IVectorStore CreateQdrantLangchainInteropVectorStore(SearchIndexClient searchIndexClient) - => new AzureAISearchLangchainInteropVectorStore(searchIndexClient); + => new AzureAISearchLangchainInteropVectorStore(new AzureAISearchVectorStore(searchIndexClient), searchIndexClient); - private sealed class AzureAISearchLangchainInteropVectorStore(SearchIndexClient searchIndexClient, AzureAISearchVectorStoreOptions? options = default) - : AzureAISearchVectorStore(searchIndexClient, options) + private sealed class AzureAISearchLangchainInteropVectorStore( + IVectorStore innerStore, + SearchIndexClient searchIndexClient) + : IVectorStore { private readonly SearchIndexClient _searchIndexClient = searchIndexClient; - public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -68,6 +71,10 @@ public override IVectorStoreRecordCollection GetCollection }) as IVectorStoreRecordCollection)!; } + + public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); + + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); } /// diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index 2f878199b62a..76d671debcbe 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -34,14 +34,17 @@ public static class PineconeFactory /// Pinecone client that can be used to manage the collections and points in a Pinecone store. /// The . public static IVectorStore CreatePineconeLangchainInteropVectorStore(Sdk.PineconeClient pineconeClient) - => new PineconeLangchainInteropVectorStore(pineconeClient); + => new PineconeLangchainInteropVectorStore(new PineconeVectorStore(pineconeClient), pineconeClient); - private sealed class PineconeLangchainInteropVectorStore(Sdk.PineconeClient pineconeClient) - : PineconeVectorStore(pineconeClient) + private sealed class PineconeLangchainInteropVectorStore( + IVectorStore innerStore, + Sdk.PineconeClient pineconeClient) + : IVectorStore { private readonly Sdk.PineconeClient _pineconeClient = pineconeClient; - public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -59,5 +62,9 @@ public override IVectorStoreRecordCollection GetCollection)!; } + + public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); + + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); } } diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index 79d149c24973..6932774c785d 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -37,14 +37,17 @@ public static class QdrantFactory /// Qdrant client that can be used to manage the collections and points in a Qdrant store. /// The . public static IVectorStore CreateQdrantLangchainInteropVectorStore(QdrantClient qdrantClient) - => new QdrantLangchainInteropVectorStore(qdrantClient); + => new QdrantLangchainInteropVectorStore(new QdrantVectorStore(qdrantClient), qdrantClient); - private sealed class QdrantLangchainInteropVectorStore(QdrantClient qdrantClient) - : QdrantVectorStore(qdrantClient) + private sealed class QdrantLangchainInteropVectorStore( + IVectorStore innerStore, + QdrantClient qdrantClient) + : IVectorStore { private readonly QdrantClient _qdrantClient = qdrantClient; - public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull { // Create a Qdrant collection. To be compatible with Langchain // we need to use a custom record definition that matches the @@ -90,6 +93,10 @@ public override IVectorStoreRecordCollection GetCollection record types or string keys and LangchainDocument record types"); } + + public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); + + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); } /// diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index 23fd026401b4..83a642bbe444 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -34,14 +34,17 @@ public static class RedisFactory /// The redis database to read/write from. /// The . public static IVectorStore CreateRedisLangchainInteropVectorStore(IDatabase database) - => new RedisLangchainInteropVectorStore(database); + => new RedisLangchainInteropVectorStore(new RedisVectorStore(database), database); - private sealed class RedisLangchainInteropVectorStore(IDatabase database) - : RedisVectorStore(database) + private sealed class RedisLangchainInteropVectorStore( + IVectorStore innerStore, + IDatabase database) + : IVectorStore { private readonly IDatabase _database = database; - public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { @@ -60,5 +63,9 @@ public override IVectorStoreRecordCollection GetCollection)!; } + + public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); + + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); } } diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index 54928cddfb23..139412bdfd79 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -66,7 +66,9 @@ public async Task ExampleAsync() // Initiate the docker container and construct the vector store using the custom factory for creating collections. await redisFixture.ManualInitializeAsync(); ConnectionMultiplexer redis = ConnectionMultiplexer.Connect("localhost:6379"); - var vectorStore = new CustomRedisVectorStore(redis.GetDatabase()); + + var database = redis.GetDatabase(); + var vectorStore = new CustomRedisVectorStore(new RedisVectorStore(database), database); // Get and create collection if it doesn't exist, using the record definition containing the storage model. var collection = vectorStore.GetCollection("skglossary", s_glossaryDefinition); @@ -131,12 +133,15 @@ public GenericDataModel MapFromStorageToDataModel((string Key, JsonNode Node) st } } - private sealed class CustomRedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = default) - : RedisVectorStore(database, options) + private sealed class CustomRedisVectorStore( + IVectorStore innerStore, + IDatabase database) + : IVectorStore { private readonly IDatabase _database = database; - public override IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull { // If the record definition is the glossary definition and the record type is the generic data model, inject the custom mapper into the collection options. if (vectorStoreRecordDefinition == s_glossaryDefinition && typeof(TRecord) == typeof(GenericDataModel)) @@ -149,6 +154,10 @@ public override IVectorStoreRecordCollection GetCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; return collection!; } + + public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); + + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index b270d699b6ec..d5830cba605b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class AzureAISearchVectorStore : IVectorStore +public sealed class AzureAISearchVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -48,7 +48,7 @@ public AzureAISearchVectorStore(SearchIndexClient searchIndexClient, AzureAISear } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IAzureAISearchVectorStoreRecordCollectionFactor is obsolete @@ -76,16 +76,16 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { var indexNamesEnumerable = this._searchIndexClient.GetIndexNamesAsync(cancellationToken).ConfigureAwait(false); var indexNamesEnumerator = indexNamesEnumerable.GetAsyncEnumerator(); - var nextResult = await GetNextIndexNameAsync(indexNamesEnumerator).ConfigureAwait(false); + var nextResult = await this.GetNextIndexNameAsync(indexNamesEnumerator).ConfigureAwait(false); while (nextResult.more) { yield return nextResult.name; - nextResult = await GetNextIndexNameAsync(indexNamesEnumerator).ConfigureAwait(false); + nextResult = await this.GetNextIndexNameAsync(indexNamesEnumerator).ConfigureAwait(false); } } @@ -109,7 +109,7 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat /// /// The enumerator to get the next result from. /// A value indicating whether there are more results and the current string if true. - private static async Task<(string name, bool more)> GetNextIndexNameAsync( + private async Task<(string name, bool more)> GetNextIndexNameAsync( ConfiguredCancelableAsyncEnumerable.Enumerator enumerator) { const string OperationName = "GetIndexNames"; @@ -123,7 +123,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = OperationName }; } @@ -131,7 +132,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = OperationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index daed4951624a..9573128089e1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -24,7 +24,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class AzureAISearchVectorStoreRecordCollection : +public sealed class AzureAISearchVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch, IKeywordHybridSearch @@ -108,7 +108,7 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli public string CollectionName => this._collectionName; /// - public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -123,7 +123,8 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = "GetIndex" }; @@ -131,7 +132,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { var vectorSearchConfig = new VectorSearch(); var searchFields = new List(); @@ -173,7 +174,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -182,7 +183,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync( "DeleteIndex", @@ -200,7 +201,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual Task GetAsync(string key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task GetAsync(string key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -213,7 +214,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -234,7 +235,7 @@ public virtual async IAsyncEnumerable GetAsync(IEnumerable keys } /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -245,7 +246,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); if (!keys.Any()) @@ -260,7 +261,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -273,7 +274,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); if (!records.Any()) @@ -293,7 +294,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -350,7 +351,7 @@ public virtual Task> VectorizedSearchAsync } /// - public virtual Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(searchText); Verify.NotLessThan(top, 1); @@ -512,6 +513,7 @@ public Task> HybridSearchAsync(TVector vec return VectorStoreErrorHandler.RunModelConversion( AzureAISearchConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, () => this._mapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); @@ -574,7 +576,8 @@ private Task> MapToStorageModelAndUploadDocumentA if (this._mapper is not null) { var jsonObjects = VectorStoreErrorHandler.RunModelConversion( - AzureAISearchConstants.VectorStoreSystemName!, + AzureAISearchConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, () => records.Select(this._mapper!.MapFromDataToStorageModel)); @@ -603,6 +606,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn { var document = VectorStoreErrorHandler.RunModelConversion( AzureAISearchConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, operationName, () => this._mapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); @@ -686,7 +690,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; @@ -695,7 +700,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 7a94c003a28c..469eb60354fe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class AzureCosmosDBMongoDBVectorStore : IVectorStore +public sealed class AzureCosmosDBMongoDBVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -46,7 +46,7 @@ public AzureCosmosDBMongoDBVectorStore(IMongoDatabase mongoDatabase, AzureCosmos } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory is obsolete @@ -70,7 +70,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var cursor = await this._mongoDatabase .ListCollectionNamesAsync(cancellationToken: cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 2a48e74ecea3..1c3d80a78622 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -23,7 +23,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -90,11 +90,11 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("ListCollectionNames", () => this.InternalCollectionExistsAsync(cancellationToken)); /// - public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // The IMongoDatabase.CreateCollectionAsync "Creates a new collection if not already available". // To make sure that all the connectors are consistent, we throw when the collection exists. @@ -102,7 +102,8 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }; @@ -112,7 +113,7 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { await this.RunOperationAsync("CreateCollection", () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); @@ -122,7 +123,7 @@ await this.RunOperationAsync("CreateIndexes", } /// - public virtual async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -131,7 +132,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public virtual async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -140,11 +141,11 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -168,13 +169,14 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = return VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -195,6 +197,7 @@ public virtual async IAsyncEnumerable GetAsync( { yield return VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); @@ -204,7 +207,7 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -213,6 +216,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancel var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -230,7 +234,7 @@ await this._mongoCollection } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -247,7 +251,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual async Task> VectorizedSearchAsync( + public async Task> VectorizedSearchAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, @@ -405,6 +409,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc var score = response[ScorePropertyName].AsDouble; var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new())); @@ -443,7 +448,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -460,7 +466,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index 0f1cc01c4b29..680dd52a4b83 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class AzureCosmosDBNoSQLVectorStore : IVectorStore +public sealed class AzureCosmosDBNoSQLVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -46,7 +46,7 @@ public AzureCosmosDBNoSQLVectorStore(Database database, AzureCosmosDBNoSQLVector } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory is obsolete @@ -77,7 +77,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string Query = "SELECT VALUE(c.id) FROM c"; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index d1941dc52129..6de899c1e100 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -23,7 +23,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class AzureCosmosDBNoSQLVectorStoreRecordCollection : +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection, IKeywordHybridSearch @@ -115,7 +115,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("GetContainerQueryIterator", async () => { @@ -140,14 +140,14 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("CreateContainer", () => this._database.CreateContainerAsync(this.GetContainerProperties(), cancellationToken: cancellationToken)); } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -156,7 +156,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("DeleteContainer", () => this._database @@ -167,7 +167,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = #region Implementation of IVectorStoreRecordCollection /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { // Use record key as partition key var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); @@ -176,7 +176,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { // Use record keys as partition keys var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); @@ -185,7 +185,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { // Use record key as partition key var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); @@ -196,7 +196,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -214,7 +214,7 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { var key = await this.InternalUpsertAsync(record, cancellationToken).ConfigureAwait(false); @@ -222,7 +222,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -244,7 +244,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r #region Implementation of IVectorStoreRecordCollection /// - public virtual async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { return await this.InternalGetAsync([key], options, cancellationToken) .FirstOrDefaultAsync(cancellationToken) @@ -252,7 +252,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -267,13 +267,13 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) + public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync([key], cancellationToken); } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { return this.InternalDeleteAsync(keys, cancellationToken); } @@ -305,7 +305,7 @@ async IAsyncEnumerable IVectorStoreRecordCollect } /// - public virtual Task> VectorizedSearchAsync( + public Task> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = null, @@ -424,7 +424,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -585,6 +586,7 @@ private async IAsyncEnumerable InternalGetAsync( { yield return VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); @@ -601,6 +603,7 @@ private async Task InternalUpsertAsync( var jsonObject = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -681,6 +684,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 0608ae865587..a6c95587fcc5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -153,7 +153,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) return Task.FromException(new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = InMemoryConstants.VectorStoreSystemName, + VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index 47f79724b382..180ef381462d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class MongoDBVectorStore : IVectorStore +public sealed class MongoDBVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -46,7 +46,7 @@ public MongoDBVectorStore(IMongoDatabase mongoDatabase, MongoDBVectorStoreOption } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IMongoDBVectorStoreRecordCollectionFactoryß is obsolete @@ -70,7 +70,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var cursor = await this._mongoDatabase .ListCollectionNamesAsync(cancellationToken: cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 8652fb70ebfc..5c6431ee33cc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -20,7 +20,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch +public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -90,11 +90,11 @@ public MongoDBVectorStoreRecordCollection( } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("ListCollectionNames", () => this.InternalCollectionExistsAsync(cancellationToken)); /// - public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // The IMongoDatabase.CreateCollectionAsync "Creates a new collection if not already available". // To make sure that all the connectors are consistent, we throw when the collection exists. @@ -102,7 +102,8 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Collection already exists.") { - VectorStoreType = MongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = "CreateCollection" }; @@ -112,7 +113,7 @@ public virtual async Task CreateCollectionAsync(CancellationToken cancellationTo } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { // The IMongoDatabase.CreateCollectionAsync "Creates a new collection if not already available". // So for CreateCollectionIfNotExistsAsync, we don't perform an additional check. @@ -128,7 +129,7 @@ await this.RunOperationWithRetryAsync( } /// - public virtual async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -137,7 +138,7 @@ await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneA } /// - public virtual async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -146,11 +147,11 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -174,13 +175,14 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = return VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -201,6 +203,7 @@ public virtual async IAsyncEnumerable GetAsync( { yield return VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); @@ -210,7 +213,7 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -219,6 +222,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancel var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -236,7 +240,7 @@ await this._mongoCollection } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -253,7 +257,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual async Task> VectorizedSearchAsync( + public async Task> VectorizedSearchAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, @@ -490,6 +494,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc var score = response[ScorePropertyName].AsDouble; var record = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new() { IncludeVectors = includeVectors })); @@ -528,7 +533,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = MongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -545,7 +551,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = MongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -576,7 +583,8 @@ private async Task RunOperationWithRetryAsync( { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = MongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -610,7 +618,8 @@ private async Task RunOperationWithRetryAsync( { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = MongoDBConstants.VectorStoreSystemName, + VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index 7e8da0194443..9aa4b066d210 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class PineconeVectorStore : IVectorStore +public sealed class PineconeVectorStore : IVectorStore { private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreOptions _options; @@ -43,7 +43,7 @@ public PineconeVectorStore(Sdk.PineconeClient pineconeClient, PineconeVectorStor } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IPineconeVectorStoreRecordCollectionFactory is obsolete @@ -65,7 +65,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { IndexList indexList; @@ -77,7 +77,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PineconeConstants.VectorStoreSystemName, + VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = "ListCollections" }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 201bf69cb3fe..030b0bbf8adc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -69,7 +69,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) => this.RunCollectionOperationAsync( "CollectionExists", async () => @@ -80,7 +80,7 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo }); /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // we already run through record property validation, so a single VectorStoreRecordVectorProperty is guaranteed. var vectorProperty = this._model.VectorProperty!; @@ -111,7 +111,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -127,7 +127,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { try { @@ -141,7 +141,8 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo { throw new VectorStoreOperationException("Call to vector store failed.", other) { - VectorStoreType = PineconeConstants.VectorStoreSystemName, + VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = "DeleteCollection" }; @@ -149,7 +150,7 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo } /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -172,13 +173,14 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; return VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, "Get", () => this._mapper.MapFromStorageToDataModel(result, mapperOptions)); } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -208,6 +210,7 @@ public virtual async IAsyncEnumerable GetAsync( StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options?.IncludeVectors is true }; var records = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, "GetBatch", () => response.Vectors.Values.Select(x => this._mapper.MapFromStorageToDataModel(x, mapperOptions))); @@ -219,7 +222,7 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -235,7 +238,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -257,12 +260,13 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); var vector = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, "Upsert", () => this._mapper.MapFromDataToStorageModel(record)); @@ -281,12 +285,13 @@ await this.RunIndexOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); var vectors = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, "UpsertBatch", () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -313,7 +318,7 @@ await this.RunIndexOperationAsync( } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -362,6 +367,7 @@ public virtual async Task> VectorizedSearchAsync skippedResults.Select(x => new VectorSearchResult(this._mapper.MapFromStorageToDataModel(new Sdk.Vector() @@ -407,7 +413,8 @@ private async Task RunIndexOperationAsync(string operationName, Func RunCollectionOperationAsync(string operationName, Func< { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PineconeConstants.VectorStoreSystemName, + VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 933f61173ebe..1d51a5765363 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// Represents a vector store implementation using PostgreSQL. /// -public class PostgresVectorStore : IVectorStore +public sealed class PostgresVectorStore : IVectorStore { private readonly IPostgresVectorStoreDbClient _postgresClient; private readonly NpgsqlDataSource? _dataSource; @@ -56,16 +56,17 @@ internal PostgresVectorStore(IPostgresVectorStoreDbClient postgresDbClient, Post } /// - public virtual IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) + public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) { return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( this._postgresClient.GetTablesAsync(cancellationToken), - "ListCollectionNames" + "ListCollectionNames", + this._metadata.VectorStoreName ); } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IPostgresVectorStoreRecordCollectionFactory is obsolete diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index e9b5be465ae8..995edf67dbe8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -18,7 +18,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// The type of the key. /// The type of the record. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class PostgresVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class PostgresVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull { @@ -92,7 +92,7 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "DoesTableExists"; return this.RunOperationAsync(OperationName, () => @@ -101,7 +101,7 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollection"; return this.RunOperationAsync(OperationName, () => @@ -110,7 +110,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollectionIfNotExists"; return this.RunOperationAsync(OperationName, () => @@ -119,7 +119,7 @@ public virtual Task CreateCollectionIfNotExistsAsync(CancellationToken cancellat } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "DeleteCollection"; return this.RunOperationAsync(OperationName, () => @@ -128,12 +128,13 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { const string OperationName = "Upsert"; var storageModel = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -153,7 +154,7 @@ public virtual Task UpsertAsync(TRecord record, CancellationToken cancella } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -161,6 +162,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -180,7 +182,7 @@ await this.RunOperationAsync(OperationName, () => } /// - public virtual Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "Get"; @@ -195,6 +197,7 @@ await this.RunOperationAsync(OperationName, () => if (row is null) { return default; } return VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })); @@ -202,7 +205,7 @@ await this.RunOperationAsync(OperationName, () => } /// - public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetBatch"; @@ -215,18 +218,20 @@ public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRec .SelectAsync(row => VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })), cancellationToken ), OperationName, + this._collectionMetadata.VectorStoreName, this.CollectionName ); } /// - public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { const string OperationName = "Delete"; return this.RunOperationAsync(OperationName, () => @@ -235,7 +240,7 @@ public virtual Task DeleteAsync(TKey key, CancellationToken cancellationToken = } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -246,7 +251,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel } /// - public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; @@ -292,6 +297,7 @@ public virtual Task> VectorizedSearchAsync { var record = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel( @@ -333,7 +339,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.VectorStoreSystemName, + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -350,7 +357,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.VectorStoreSystemName, + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs index c97280f7f929..20d19717fee9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs @@ -17,20 +17,22 @@ internal static class PostgresVectorStoreUtils /// The type of the items in the async enumerable. /// The async enumerable to wrap. /// The name of the operation being performed. + /// The name of the vector store. /// The name of the collection being operated on. /// An async enumerable that will throw a if an exception is thrown while iterating over the original enumerator. public static async IAsyncEnumerable WrapAsyncEnumerableAsync( IAsyncEnumerable asyncEnumerable, string operationName, + string? vectorStoreName = null, string? collectionName = null) { var enumerator = asyncEnumerable.ConfigureAwait(false).GetAsyncEnumerator(); - var nextResult = await GetNextAsync(enumerator, operationName, collectionName).ConfigureAwait(false); + var nextResult = await GetNextAsync(enumerator, operationName, vectorStoreName, collectionName).ConfigureAwait(false); while (nextResult.more) { yield return nextResult.item; - nextResult = await GetNextAsync(enumerator, operationName, collectionName).ConfigureAwait(false); + nextResult = await GetNextAsync(enumerator, operationName, vectorStoreName, collectionName).ConfigureAwait(false); } } @@ -40,11 +42,13 @@ public static async IAsyncEnumerable WrapAsyncEnumerableAsync( /// /// The enumerator to get the next result from. /// The name of the operation being performed. + /// The name of the vector store. /// The name of the collection being operated on. /// A value indicating whether there are more results and the current string if true. public static async Task<(T item, bool more)> GetNextAsync( ConfiguredCancelableAsyncEnumerable.Enumerator enumerator, string operationName, + string? vectorStoreName = null, string? collectionName = null) { try @@ -56,7 +60,8 @@ public static async IAsyncEnumerable WrapAsyncEnumerableAsync( { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = PostgresConstants.VectorStoreSystemName, + VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, + VectorStoreName = vectorStoreName, CollectionName = collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index ee2f4f0ec35f..6167b8c9ef30 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class QdrantVectorStore : IVectorStore +public sealed class QdrantVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -56,7 +56,7 @@ internal QdrantVectorStore(MockableQdrantClient qdrantClient, QdrantVectorStoreO } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IQdrantVectorStoreRecordCollectionFactory is obsolete @@ -81,7 +81,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { IReadOnlyList collections; @@ -93,7 +93,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = QdrantConstants.VectorStoreSystemName, + VectorStoreSystemName = QdrantConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = "ListCollections" }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index e1f4ed9411f0..a7de27756fd8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -94,12 +94,20 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF /// The point to map to a . /// The mapper to perform the main mapping operation with. /// A value indicating whether to include vectors in the mapped result. - /// The name of the database system the operation is being run on. + /// The name of the vector store system the operation is being run on. + /// The name of the vector store the operation is being run on. /// The name of the collection the operation is being run on. /// The type of database operation being run. /// The mapped . #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - public static VectorSearchResult MapScoredPointToVectorSearchResult(ScoredPoint point, IVectorStoreRecordMapper mapper, bool includeVectors, string databaseSystemName, string collectionName, string operationName) + public static VectorSearchResult MapScoredPointToVectorSearchResult( + ScoredPoint point, + IVectorStoreRecordMapper mapper, + bool includeVectors, + string vectorStoreSystemName, + string? vectorStoreName, + string collectionName, + string operationName) #pragma warning restore CS0618 { // Since the mapper doesn't know about scored points, we need to convert the scored point to a point struct first. @@ -118,7 +126,8 @@ public static VectorSearchResult MapScoredPointToVectorSearchResult( VectorStoreErrorHandler.RunModelConversion( - databaseSystemName, + vectorStoreSystemName, + vectorStoreName, collectionName, operationName, () => mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })), diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index d2a3a45b1c82..f18d09b09cf6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -20,7 +20,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class QdrantVectorStoreRecordCollection : +public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection, IKeywordHybridSearch @@ -108,7 +108,7 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st public string CollectionName => this._collectionName; /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync( "CollectionExists", @@ -116,7 +116,7 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo } /// - public virtual async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { if (!this._options.HasNamedVectors) { @@ -200,7 +200,7 @@ await this.RunOperationAsync( } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -209,7 +209,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DeleteCollection", async () => { @@ -232,7 +232,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = }); /// - public virtual async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -241,7 +241,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -250,19 +250,19 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Num = key }, options, cancellationToken); } /// - public virtual IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { return this.GetBatchByPointIdAsync(keys, key => new PointId { Uuid = key.ToString("D") }, options, cancellationToken); } /// - public virtual Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) + public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -276,7 +276,7 @@ public virtual Task DeleteAsync(ulong key, CancellationToken cancellationToken = } /// - public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -290,7 +290,7 @@ public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -304,7 +304,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cance } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -318,13 +318,14 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); // Create point from record. var pointStruct = VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, UpsertName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -344,6 +345,7 @@ async Task IVectorStoreRecordCollection.UpsertAsync(TRecord // Create point from record. var pointStruct = VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, UpsertName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -356,13 +358,14 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); // Create points from records. var pointStructs = VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, UpsertName, () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -386,6 +389,7 @@ async IAsyncEnumerable IVectorStoreRecordCollection.UpsertA // Create points from records. var pointStructs = VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, UpsertName, () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -444,6 +448,7 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( yield return VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); @@ -451,7 +456,7 @@ private async IAsyncEnumerable GetBatchByPointIdAsync( } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -499,6 +504,7 @@ public virtual async Task> VectorizedSearchAsync> HybridSearchAsync(TVect this._mapper, internalOptions.IncludeVectors, QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "Query")); @@ -619,7 +626,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = QdrantConstants.VectorStoreSystemName, + VectorStoreSystemName = QdrantConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; @@ -643,7 +651,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = QdrantConstants.VectorStoreSystemName, + VectorStoreSystemName = QdrantConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 4929c4bb052f..427c80c3b683 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -20,7 +20,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -126,7 +126,7 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec public string CollectionName => this._collectionName; /// - public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -141,7 +141,8 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = "FT.INFO" }; @@ -149,7 +150,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._model.Properties, useDollarPrefix: false); @@ -165,7 +166,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -174,7 +175,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { try { @@ -196,7 +197,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -231,6 +232,7 @@ await this.RunOperationAsync("FT.DROPINDEX", // Convert to the caller's data model. return VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, operationName, () => @@ -240,7 +242,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -257,7 +259,7 @@ public virtual async IAsyncEnumerable GetAsync(IEnumerable keys } /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -272,7 +274,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -282,13 +284,14 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); // Map. var redisHashSetRecord = VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "HSET", () => this._mapper.MapFromDataToStorageModel(record)); @@ -307,7 +310,7 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -324,7 +327,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -359,6 +362,7 @@ public virtual async Task> VectorizedSearchAsync @@ -439,7 +443,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; @@ -462,7 +467,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 97c5fd75bd1d..0098de90bd6d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -23,7 +23,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -134,7 +134,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio public string CollectionName => this._collectionName; /// - public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { try { @@ -149,7 +149,8 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = "FT.INFO" }; @@ -157,7 +158,7 @@ public virtual async Task CollectionExistsAsync(CancellationToken cancella } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._model.Properties, useDollarPrefix: true); @@ -173,7 +174,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -182,7 +183,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { try { @@ -204,7 +205,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -239,6 +240,7 @@ await this.RunOperationAsync("FT.DROPINDEX", // Convert to the caller's data model. return VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "GET", () => @@ -249,7 +251,7 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public virtual async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); var keysList = keys.ToList(); @@ -288,6 +290,7 @@ public virtual async IAsyncEnumerable GetAsync(IEnumerable keys // Convert to the caller's data model. yield return VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "MGET", () => @@ -299,7 +302,7 @@ public virtual async IAsyncEnumerable GetAsync(IEnumerable keys } /// - public virtual Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(string key, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(key); @@ -315,7 +318,7 @@ public virtual Task DeleteAsync(string key, CancellationToken cancellationToken } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -325,13 +328,14 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken canc } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); // Map. var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "SET", () => @@ -356,7 +360,7 @@ await this.RunOperationAsync( } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -366,6 +370,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable r { var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, "MSET", () => @@ -395,7 +400,7 @@ await this.RunOperationAsync( } /// - public virtual async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -424,6 +429,7 @@ public virtual async Task> VectorizedSearchAsync @@ -506,7 +512,8 @@ private async Task RunOperationAsync(string operationName, Func operation) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; @@ -530,7 +537,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this._collectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 7deae3f65867..66449d428e8d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class RedisVectorStore : IVectorStore +public sealed class RedisVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -47,7 +47,7 @@ public RedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = d } /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IRedisVectorStoreRecordCollectionFactory is obsolete @@ -75,7 +75,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = ""; RedisResult[] listResult; @@ -88,7 +88,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = RedisConstants.VectorStoreSystemName, + VectorStoreSystemName = RedisConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = OperationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs index f24b4a350993..43a58f791223 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/ExceptionWrapper.cs @@ -16,9 +16,10 @@ internal static async Task WrapAsync( SqlConnection connection, SqlCommand command, Func> func, - CancellationToken cancellationToken, string operationName, - string? collectionName = null) + string? vectorStoreName = null, + string? collectionName = null, + CancellationToken cancellationToken = default) { if (connection.State != System.Data.ConnectionState.Open) { @@ -39,18 +40,20 @@ internal static async Task WrapAsync( throw new VectorStoreOperationException(ex.Message, ex) { - OperationName = operationName, - VectorStoreType = SqlServerConstants.VectorStoreSystemName, - CollectionName = collectionName + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = vectorStoreName, + CollectionName = collectionName, + OperationName = operationName }; } } internal static async Task WrapReadAsync( SqlDataReader reader, - CancellationToken cancellationToken, string operationName, - string? collectionName = null) + string? vectorStoreName = null, + string? collectionName = null, + CancellationToken cancellationToken = default) { try { @@ -60,9 +63,10 @@ internal static async Task WrapReadAsync( { throw new VectorStoreOperationException(ex.Message, ex) { - OperationName = operationName, - VectorStoreType = SqlServerConstants.VectorStoreSystemName, - CollectionName = collectionName + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = vectorStoreName, + CollectionName = collectionName, + OperationName = operationName }; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs index b8517a49baba..2dc42dee74bf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -66,11 +66,19 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.SelectTableNames(connection, this._options.Schema); - using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, + using SqlDataReader reader = await ExceptionWrapper.WrapAsync( + connection, + command, static (cmd, ct) => cmd.ExecuteReaderAsync(ct), - cancellationToken, "ListCollection").ConfigureAwait(false); + operationName: "ListCollectionNames", + vectorStoreName: this._metadata.VectorStoreName, + cancellationToken: cancellationToken).ConfigureAwait(false); - while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "ListCollection").ConfigureAwait(false)) + while (await ExceptionWrapper.WrapReadAsync( + reader, + operationName: "ListCollectionNames", + vectorStoreName: this._metadata.VectorStoreName, + cancellationToken: cancellationToken).ConfigureAwait(false)) { yield return reader.GetString(reader.GetOrdinal("table_name")); } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index adb2a6b8b834..605a1239f9ba 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -91,7 +91,11 @@ static async (cmd, ct) => { using SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); return await reader.ReadAsync(ct).ConfigureAwait(false); - }, cancellationToken, "CollectionExists", this.CollectionName).ConfigureAwait(false); + }, + "CollectionExists", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// @@ -122,7 +126,10 @@ private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken can await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), - cancellationToken, "CreateCollection", this.CollectionName).ConfigureAwait(false); + "CreateCollection", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// @@ -134,7 +141,10 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), - cancellationToken, "DeleteCollection", this.CollectionName).ConfigureAwait(false); + "DeleteCollection", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// @@ -152,7 +162,10 @@ public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = de await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), - cancellationToken, "Delete", this.CollectionName).ConfigureAwait(false); + "Delete", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// @@ -215,9 +228,10 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella throw new VectorStoreOperationException(ex.Message, ex) { - OperationName = "DeleteBatch", - VectorStoreType = SqlServerConstants.VectorStoreSystemName, - CollectionName = this.CollectionName + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, + CollectionName = this.CollectionName, + OperationName = "DeleteBatch" }; } } @@ -244,7 +258,11 @@ static async (cmd, ct) => SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); await reader.ReadAsync(ct).ConfigureAwait(false); return reader; - }, cancellationToken, "Get", this.CollectionName).ConfigureAwait(false); + }, + "Get", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); return reader.HasRows ? this._mapper.MapFromStorageToDataModel( @@ -290,9 +308,17 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteReaderAsync(ct), - cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false); + "GetBatch", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); - while (await ExceptionWrapper.WrapReadAsync(reader, cancellationToken, "GetBatch", this.CollectionName).ConfigureAwait(false)) + while (await ExceptionWrapper.WrapReadAsync( + reader, + "GetBatch", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false)) { yield return this._mapper.MapFromStorageToDataModel( new SqlDataReaderDictionary(reader, this._model.VectorProperties), @@ -320,7 +346,11 @@ async static (cmd, ct) => using SqlDataReader reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false); await reader.ReadAsync(ct).ConfigureAwait(false); return reader.GetFieldValue(0); - }, cancellationToken, "Upsert", this.CollectionName).ConfigureAwait(false); + }, + "Upsert", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// @@ -387,9 +417,10 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, throw new VectorStoreOperationException(ex.Message, ex) { - OperationName = "UpsertBatch", - VectorStoreType = SqlServerConstants.VectorStoreSystemName, - CollectionName = this.CollectionName + VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, + CollectionName = this.CollectionName, + OperationName = "UpsertBatch" }; } @@ -452,7 +483,11 @@ public async Task> VectorizedSearchAsync(T { var results = this.ReadVectorSearchResultsAsync(connection, cmd, searchOptions.IncludeVectors, ct); return Task.FromResult(new VectorSearchResults(results)); - }, cancellationToken, "VectorizedSearch", this.CollectionName).ConfigureAwait(false); + }, + "VectorizedSearch", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index 6e1e5d7bc9ea..afdab01e938d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class SqliteVectorStore : IVectorStore +public sealed class SqliteVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -60,7 +60,7 @@ public SqliteVectorStore( => throw new InvalidOperationException("Use the constructor that accepts a connection string instead."); /// - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // ISqliteVectorStoreRecordCollectionFactory is obsolete @@ -92,7 +92,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { const string TablePropertyName = "name"; const string Query = $"SELECT {TablePropertyName} FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 28e88f7febe0..f33c24f252ad 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class SqliteVectorStoreRecordCollection : +public sealed class SqliteVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect @@ -146,7 +146,7 @@ public SqliteVectorStoreRecordCollection( } /// - public virtual async Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "TableCount"; @@ -179,7 +179,7 @@ await this.InternalCreateCollectionAsync(connection, ifNotExists: true, cancella } /// - public virtual async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); @@ -192,7 +192,7 @@ public virtual async Task DeleteCollectionAsync(CancellationToken cancellationTo } /// - public virtual Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string LimitPropertyName = "k"; @@ -588,6 +588,7 @@ private async Task InternalUpsertAsync(SqliteConnection connection, var storageModel = VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -611,6 +612,7 @@ private IAsyncEnumerable InternalUpsertBatchAsync(SqliteConnection c var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -741,6 +743,7 @@ private TRecord GetAndMapRecord( return VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -756,7 +759,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = SqliteConstants.VectorStoreSystemName, + VectorStoreSystemName = SqliteConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 91a2944d7658..4d9f0236beab 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. /// -public class WeaviateVectorStore : IVectorStore +public sealed class WeaviateVectorStore : IVectorStore { /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -51,7 +51,7 @@ public WeaviateVectorStore(HttpClient httpClient, WeaviateVectorStoreOptions? op /// /// The collection name must start with a capital letter and contain only ASCII letters and digits. - public virtual IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull { #pragma warning disable CS0618 // IWeaviateVectorStoreRecordCollectionFactory is obsolete @@ -83,7 +83,7 @@ public virtual IVectorStoreRecordCollection GetCollection - public virtual async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { using var request = new WeaviateGetCollectionsRequest().Build(); WeaviateGetCollectionsResponse collectionsResponse; @@ -99,7 +99,8 @@ public virtual async IAsyncEnumerable ListCollectionNamesAsync([Enumerat { throw new VectorStoreOperationException("Call to vector store failed.", e) { - VectorStoreType = WeaviateConstants.VectorStoreSystemName, + VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, + VectorStoreName = this._metadata.VectorStoreName, OperationName = "ListCollectionNames" }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index e3914c5e2f13..400c0d9f721b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch +public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -108,7 +108,7 @@ public WeaviateVectorStoreRecordCollection( } /// - public virtual Task CollectionExistsAsync(CancellationToken cancellationToken = default) + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) { const string OperationName = "GetCollectionSchema"; @@ -125,7 +125,7 @@ public virtual Task CollectionExistsAsync(CancellationToken cancellationTo } /// - public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = default) + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollectionSchema"; @@ -140,7 +140,7 @@ public virtual Task CreateCollectionAsync(CancellationToken cancellationToken = } /// - public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) { @@ -149,7 +149,7 @@ public virtual async Task CreateCollectionIfNotExistsAsync(CancellationToken can } /// - public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "DeleteCollectionSchema"; @@ -162,7 +162,7 @@ public virtual Task DeleteCollectionAsync(CancellationToken cancellationToken = } /// - public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObject"; @@ -175,7 +175,7 @@ public virtual Task DeleteAsync(Guid key, CancellationToken cancellationToken = } /// - public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObjectBatch"; const string ContainsAnyOperator = "ContainsAny"; @@ -200,7 +200,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel } /// - public virtual Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetCollectionObject"; @@ -218,6 +218,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel return VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject!, new() { IncludeVectors = includeVectors })); @@ -225,7 +226,7 @@ public virtual Task DeleteAsync(IEnumerable keys, CancellationToken cancel } /// - public virtual async IAsyncEnumerable GetAsync( + public async IAsyncEnumerable GetAsync( IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -244,7 +245,7 @@ public virtual async IAsyncEnumerable GetAsync( } /// - public virtual async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { return await this.UpsertAsync([record], cancellationToken) .FirstOrDefaultAsync(cancellationToken) @@ -252,7 +253,7 @@ public virtual async Task UpsertAsync(TRecord record, CancellationToken ca } /// - public virtual async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; @@ -260,6 +261,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec { var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -282,7 +284,7 @@ public virtual async IAsyncEnumerable UpsertAsync(IEnumerable rec } /// - public virtual async Task> VectorizedSearchAsync( + public async Task> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = null, @@ -361,7 +363,8 @@ private async Task> ExecuteQueryAsync(string query, { throw new VectorStoreOperationException($"Error occurred during vector search. Response: {content}") { - VectorStoreType = WeaviateConstants.VectorStoreSystemName, + VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; @@ -373,6 +376,7 @@ private async Task> ExecuteQueryAsync(string query, var record = VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this.CollectionName, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -437,7 +441,8 @@ private async Task RunOperationAsync(string operationName, Func> o { throw new VectorStoreOperationException("Call to vector store failed.", ex) { - VectorStoreType = WeaviateConstants.VectorStoreSystemName, + VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, CollectionName = this.CollectionName, OperationName = operationName }; diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index 3180ff043606..e16da205a646 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -119,7 +119,7 @@ public void MapScoredPointToVectorSearchResultMapsResults() mapperMock.Setup(x => x.MapFromStorageToDataModel(It.IsAny(), It.IsAny())).Returns(new DataModel { Id = 1, DataField = "data 1", Embedding = new float[] { 1, 2, 3 } }); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult(scoredPoint, mapperMock.Object, true, "Qdrant", "mycollection", "query"); + var actual = QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult(scoredPoint, mapperMock.Object, true, "Qdrant", "myvectorstore", "mycollection", "query"); // Assert. Assert.Equal(1ul, actual.Record.Id); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index 9e79fa44b1bb..486b1be80c27 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -71,6 +71,20 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.get_VectorStoreType + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.set_VectorStoreType(System.String) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.get_Top @@ -141,6 +155,20 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.get_VectorStoreType + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.set_VectorStoreType(System.String) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0002 M:Microsoft.Extensions.VectorData.HybridSearchOptions`1.get_Top @@ -211,6 +239,20 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.get_VectorStoreType + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0002 + M:Microsoft.Extensions.VectorData.VectorStoreException.set_VectorStoreType(System.String) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IKeywordHybridSearch`1.GetService(System.Type,System.Object) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs index dc0f5bd1d1b5..42e18181fe1b 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreException.cs @@ -33,10 +33,18 @@ protected VectorStoreException(string? message, Exception? innerException) : bas { } + /// The name of the vector store system. + /// + /// Where possible, this maps to the "db.system.name" attribute defined in the + /// OpenTelemetry Semantic Conventions for database calls and systems, see . + /// Example: redis, sqlite, mysql. + /// + public string? VectorStoreSystemName { get; init; } + /// - /// Gets or sets the type of vector store that the failing operation was performed on. + /// The name of the vector store (database). /// - public string? VectorStoreType { get; init; } + public string? VectorStoreName { get; init; } /// /// Gets or sets the name of the vector store collection that the failing operation was performed on. diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreErrorHandler.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreErrorHandler.cs index 714befadc810..2a57108caead 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreErrorHandler.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreErrorHandler.cs @@ -16,13 +16,19 @@ internal static class VectorStoreErrorHandler /// Run the given model conversion and wrap any exceptions with . /// /// The response type of the operation. - /// The name of the database system the operation is being run on. + /// The name of the vector store system the operation is being run on. + /// The name of the vector store the operation is being run on. /// The name of the collection the operation is being run on. /// The type of database operation being run. /// The operation to run. /// The result of the operation. [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static T RunModelConversion(string databaseSystemName, string collectionName, string operationName, Func operation) + public static T RunModelConversion( + string vectorStoreSystemName, + string? vectorStoreName, + string collectionName, + string operationName, + Func operation) { try { @@ -32,7 +38,8 @@ public static T RunModelConversion(string databaseSystemName, string collecti { throw new VectorStoreRecordMappingException("Failed to convert vector store record.", ex) { - VectorStoreType = databaseSystemName, + VectorStoreSystemName = vectorStoreSystemName, + VectorStoreName = vectorStoreName, CollectionName = collectionName, OperationName = operationName }; From 89de29ceb6eb7c2e8bb2eed2c4915b659824e741 Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Tue, 8 Apr 2025 10:29:09 -0700 Subject: [PATCH 34/63] .Net: [MEVD] Improved Cosmos NoSQL initialization logic (#11446) ### Motivation and Context Resolves: https://github.com/microsoft/semantic-kernel/issues/10351 Updated Cosmos NoSQL initialization logic with exception message about using `CosmosClientOptions.UseSystemTextJsonSerializerWithOptions` property. ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- ...smosDBNoSQLKernelBuilderExtensionsTests.cs | 17 +++++++++++-- ...DBNoSQLServiceCollectionExtensionsTests.cs | 17 +++++++++++-- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 25 +++++++++++++++++-- .../AzureCosmosDBNoSQLVectorStoreTests.cs | 12 +++++++++ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 7 ++++++ 5 files changed, 72 insertions(+), 6 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs index 1ad9fc3ea68a..8f9f7cd72ab3 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Reflection; +using System.Text.Json; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; @@ -18,12 +19,24 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; public sealed class AzureCosmosDBNoSQLKernelBuilderExtensionsTests { private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + private readonly Mock _mockDatabase = new(); + + public AzureCosmosDBNoSQLKernelBuilderExtensionsTests() + { + var mockClient = new Mock(); + + mockClient.Setup(l => l.ClientOptions).Returns(new CosmosClientOptions() { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }); + + this._mockDatabase + .Setup(l => l.Client) + .Returns(mockClient.Object); + } [Fact] public void AddVectorStoreRegistersClass() { // Arrange - this._kernelBuilder.Services.AddSingleton(Mock.Of()); + this._kernelBuilder.Services.AddSingleton(this._mockDatabase.Object); // Act this._kernelBuilder.AddAzureCosmosDBNoSQLVectorStore(); @@ -55,7 +68,7 @@ public void AddVectorStoreWithConnectionStringRegistersClass() public void AddVectorStoreRecordCollectionRegistersClass() { // Arrange - this._kernelBuilder.Services.AddSingleton(Mock.Of()); + this._kernelBuilder.Services.AddSingleton(this._mockDatabase.Object); // Act this._kernelBuilder.AddAzureCosmosDBNoSQLVectorStoreRecordCollection("testcollection"); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs index 4574415ecb2e..3ade14ddaf34 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Reflection; +using System.Text.Json; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; @@ -18,12 +19,24 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; public sealed class AzureCosmosDBNoSQLServiceCollectionExtensionsTests { private readonly IServiceCollection _serviceCollection = new ServiceCollection(); + private readonly Mock _mockDatabase = new(); + + public AzureCosmosDBNoSQLServiceCollectionExtensionsTests() + { + var mockClient = new Mock(); + + mockClient.Setup(l => l.ClientOptions).Returns(new CosmosClientOptions() { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }); + + this._mockDatabase + .Setup(l => l.Client) + .Returns(mockClient.Object); + } [Fact] public void AddVectorStoreRegistersClass() { // Arrange - this._serviceCollection.AddSingleton(Mock.Of()); + this._serviceCollection.AddSingleton(this._mockDatabase.Object); // Act this._serviceCollection.AddAzureCosmosDBNoSQLVectorStore(); @@ -56,7 +69,7 @@ public void AddVectorStoreWithConnectionStringRegistersClass() public void AddVectorStoreRecordCollectionRegistersClass() { // Arrange - this._serviceCollection.AddSingleton(Mock.Of()); + this._serviceCollection.AddSingleton(this._mockDatabase.Object); // Act this._serviceCollection.AddAzureCosmosDBNoSQLVectorStoreRecordCollection("testcollection"); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 596e5fc6deda..1bea4be23ede 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Text.Json; using System.Text.Json.Nodes; using System.Threading; using System.Threading.Tasks; @@ -26,9 +27,15 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests public AzureCosmosDBNoSQLVectorStoreRecordCollectionTests() { + this._mockDatabase.Setup(l => l.GetContainer(It.IsAny())).Returns(this._mockContainer.Object); + + var mockClient = new Mock(); + + mockClient.Setup(l => l.ClientOptions).Returns(new CosmosClientOptions() { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }); + this._mockDatabase - .Setup(l => l.GetContainer(It.IsAny())) - .Returns(this._mockContainer.Object); + .Setup(l => l.Client) + .Returns(mockClient.Object); } [Fact] @@ -39,6 +46,20 @@ public void ConstructorForModelWithoutKeyThrowsException() Assert.Contains("No key property found", exception.Message); } + [Fact] + public void ConstructorWithoutSystemTextJsonSerializerOptionsThrowsArgumentException() + { + // Arrange + var mockDatabase = new Mock(); + var mockClient = new Mock(); + + mockDatabase.Setup(l => l.Client).Returns(mockClient.Object); + + // Act & Assert + var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(mockDatabase.Object, "collection")); + Assert.Contains(nameof(CosmosClientOptions.UseSystemTextJsonSerializerWithOptions), exception.Message); + } + [Fact] public void ConstructorWithDeclarativeModelInitializesCollection() { diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs index 84ad3b36f4a6..aa1e14a4a771 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Cosmos; @@ -20,6 +21,17 @@ public sealed class AzureCosmosDBNoSQLVectorStoreTests { private readonly Mock _mockDatabase = new(); + public AzureCosmosDBNoSQLVectorStoreTests() + { + var mockClient = new Mock(); + + mockClient.Setup(l => l.ClientOptions).Returns(new CosmosClientOptions() { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }); + + this._mockDatabase + .Setup(l => l.Client) + .Returns(mockClient.Object); + } + [Fact] public void GetCollectionWithNotSupportedKeyThrowsException() { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 6de899c1e100..86eb0d8baef7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -74,6 +74,13 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + if (database.Client?.ClientOptions?.UseSystemTextJsonSerializerWithOptions is null) + { + throw new ArgumentException( + $"Property {nameof(CosmosClientOptions.UseSystemTextJsonSerializerWithOptions)} in CosmosClient.ClientOptions " + + $"is required to be configured for {nameof(AzureCosmosDBNoSQLVectorStoreRecordCollection)}."); + } + // Assign. this._database = database; this.CollectionName = collectionName; From 747531cb9b6928ad6ee70df585c1b531a7b70d57 Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Wed, 9 Apr 2025 11:31:53 +0200 Subject: [PATCH 35/63] .Net MEVD: Filter-only search API (without vector similarity) (#11112) Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> --- ...extEmbeddingVectorStoreRecordCollection.cs | 5 + ...earchVectorStoreCollectionCreateMapping.cs | 15 +- ...zureAISearchVectorStoreRecordCollection.cs | 50 ++++++ ...mosDBMongoDBVectorStoreRecordCollection.cs | 56 ++++++ ...BNoSQLVectorStoreCollectionQueryBuilder.cs | 58 +++++++ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 34 ++++ .../InMemoryVectorStoreRecordCollection.cs | 37 ++++ .../MongoDBVectorStoreRecordCollection.cs | 58 +++++++ .../PineconeVectorStoreRecordCollection.cs | 59 ++++++- .../IPostgresVectorStoreDbClient.cs | 3 + .../PostgresFilterTranslator.cs | 5 +- ...ionSqlBuilder.cs => PostgresSqlBuilder.cs} | 77 ++++++-- .../PostgresVectorStoreDbClient.cs | 61 ++++--- .../PostgresVectorStoreRecordCollection.cs | 29 ++++ .../MockableQdrantClient.cs | 19 ++ ...drantVectorStoreCollectionSearchMapping.cs | 32 ++++ .../QdrantVectorStoreRecordCollection.cs | 56 ++++++ ...RedisHashSetVectorStoreRecordCollection.cs | 36 ++++ .../RedisJsonVectorStoreRecordCollection.cs | 34 ++++ ...RedisVectorStoreCollectionSearchMapping.cs | 23 +++ .../SqlServerCommandBuilder.cs | 64 ++++++- .../SqlServerVectorStoreRecordCollection.cs | 32 ++++ ...liteVectorStoreCollectionCommandBuilder.cs | 55 +++++- .../SqliteVectorStoreRecordCollection.cs | 40 ++++- .../WeaviateVectorStoreRecordCollection.cs | 24 +++ ...VectorStoreRecordCollectionQueryBuilder.cs | 45 +++++ ...resVectorStoreCollectionSqlBuilderTests.cs | 44 ++--- .../CompatibilitySuppressions.xml | 21 +++ .../VectorStoreRecordModel.cs | 17 +- .../RecordOptions/GetFilteredRecordOptions.cs | 113 ++++++++++++ .../IVectorStoreRecordCollection.cs | 14 ++ .../AzureAISearchIntegrationTests.csproj | 2 +- .../Filter/AzureAISearchBasicQueryTests.cs | 24 +++ .../Filter/CosmosMongoBasicQueryTests.cs | 53 ++++++ .../Filter/CosmosNoSQLBasicQueryTests.cs | 35 ++++ .../Filter/InMemoryBasicQueryTests.cs | 17 ++ .../Filter/MongoDBBasicQueryTests.cs | 50 ++++++ .../Filter/PineconeBasicQueryTests.cs | 59 +++++++ .../Filter/PostgresBasicQueryTests.cs | 36 ++++ .../Filter/QdrantBasicQueryTests.cs | 35 ++++ .../Filter/RedisBasicQueryTests.cs | 164 ++++++++++++++++++ .../Filter/SqlServerBasicQueryTests.cs | 54 ++++++ .../Filter/SqliteBasicQueryTests.cs | 54 ++++++ .../Filter/BasicFilterTests.cs | 42 +++-- .../Filter/BasicQueryTests.cs | 53 ++++++ .../Filter/WeaviateBasicQueryTests.cs | 72 ++++++++ 46 files changed, 1855 insertions(+), 111 deletions(-) rename dotnet/src/Connectors/Connectors.Memory.Postgres/{PostgresVectorStoreCollectionSqlBuilder.cs => PostgresSqlBuilder.cs} (81%) create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetFilteredRecordOptions.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index 8e53b42711af..faa506d71cb7 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; @@ -125,6 +126,10 @@ public Task> VectorizedSearchAsync(TVector return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, top, options, cancellationToken); } + /// + public IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) + => this._decoratedVectorStoreRecordCollection.GetAsync(filter, top, options, cancellationToken); + /// public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs index b7e2b90fbdf8..33c73ba68c3a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs @@ -40,10 +40,21 @@ public static SimpleField MapDataField(VectorStoreRecordDataPropertyModel dataPr throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Azure AI Search VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); } - return new SearchableField(dataProperty.StorageName) { IsFilterable = dataProperty.IsFilterable }; + return new SearchableField(dataProperty.StorageName) + { + IsFilterable = dataProperty.IsFilterable, + // Sometimes the users ask to also OrderBy given filterable property, so we make it sortable. + IsSortable = dataProperty.IsFilterable + }; } - return new SimpleField(dataProperty.StorageName, AzureAISearchVectorStoreCollectionCreateMapping.GetSDKFieldDataType(dataProperty.Type)) { IsFilterable = dataProperty.IsFilterable }; + var fieldType = AzureAISearchVectorStoreCollectionCreateMapping.GetSDKFieldDataType(dataProperty.Type); + return new SimpleField(dataProperty.StorageName, fieldType) + { + IsFilterable = dataProperty.IsFilterable, + // Sometimes the users ask to also OrderBy given filterable property, so we make it sortable. + IsSortable = dataProperty.IsFilterable && !fieldType.IsCollection + }; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 9573128089e1..8831467ada89 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -350,6 +350,56 @@ public Task> VectorizedSearchAsync(TVector return this.SearchAndMapToDataModelAsync(null, searchOptions, internalOptions.IncludeVectors, cancellationToken); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + SearchOptions searchOptions = new() + { + VectorSearch = new(), + Size = top, + Skip = options.Skip, + Filter = new AzureAISearchFilterTranslator().Translate(filter, this._model), + }; + + // Filter out vector fields if requested. + if (!options.IncludeVectors) + { + searchOptions.Select.Add(this._model.KeyProperty.StorageName); + + foreach (var dataProperty in this._model.DataProperties) + { + searchOptions.Select.Add(dataProperty.StorageName); + } + } + + foreach (var pair in options.OrderBy.Values) + { + VectorStoreRecordPropertyModel property = this._model.GetDataOrKeyProperty(pair.PropertySelector); + string name = property.StorageName; + // From https://learn.microsoft.com/dotnet/api/azure.search.documents.searchoptions.orderby: + // "Each expression can be followed by asc to indicate ascending, or desc to indicate descending". + // "The default is ascending order." + if (!pair.Ascending) + { + name += " desc"; + } + + searchOptions.OrderBy.Add(name); + } + + VectorSearchResults vectorSearchResults = await this.SearchAndMapToDataModelAsync(null, searchOptions, options.IncludeVectors, cancellationToken).ConfigureAwait(false); + await foreach (var result in vectorSearchResults.Results.ConfigureAwait(false)) + { + yield return result.Record; + } + } + /// public Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 1c3d80a78622..57f5e48140ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -335,6 +335,62 @@ public async Task> VectorizedSearchAsync( null; } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + // Translate the filter now, so if it fails, we throw immediately. + var translatedFilter = new AzureCosmosDBMongoDBFilterTranslator().Translate(filter, this._model); + + SortDefinition? sortDefinition = null; + if (options.OrderBy.Values.Count > 0) + { + sortDefinition = Builders.Sort.Combine( + options.OrderBy.Values.Select(pair => + { + var storageName = this._model.GetDataOrKeyProperty(pair.PropertySelector).StorageName; + + return pair.Ascending + ? Builders.Sort.Ascending(storageName) + : Builders.Sort.Descending(storageName); + })); + } + + using IAsyncCursor cursor = await this.RunOperationAsync( + "GetAsync", + async () => + { + return await this._mongoCollection.FindAsync(translatedFilter, + new() + { + Limit = top, + Skip = options.Skip, + Sort = sortDefinition + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var response in cursor.Current) + { + var record = VectorStoreErrorHandler.RunModelConversion( + AzureCosmosDBMongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "GetAsync", + () => this._mapper.MapFromStorageToDataModel(response, new() { IncludeVectors = options.IncludeVectors })); + + yield return record; + } + } + } + #region private private async Task CreateIndexesAsync(string collectionName, CancellationToken cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index beea8cf3f368..240c40bf9db0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -125,6 +125,64 @@ public static QueryDefinition BuildSearchQuery( return queryDefinition; } + internal static QueryDefinition BuildSearchQuery( + VectorStoreRecordModel model, + string whereClause, Dictionary filterParameters, + GetFilteredRecordOptions filterOptions, + int top) + { + var tableVariableName = AzureCosmosDBNoSQLConstants.ContainerAlias; + + IEnumerable projectionProperties = model.Properties; + if (!filterOptions.IncludeVectors) + { + projectionProperties = projectionProperties.Where(p => p is not VectorStoreRecordVectorPropertyModel); + } + + var fieldsArgument = projectionProperties.Select(field => $"{tableVariableName}.{field}"); + + var selectClauseArguments = string.Join(SelectClauseDelimiter, [.. fieldsArgument]); + + // If Offset is not configured, use Top parameter instead of Limit/Offset + // since it's more optimized. + var topArgument = filterOptions.Skip == 0 ? $"TOP {top} " : string.Empty; + + var builder = new StringBuilder(); + + builder.AppendLine($"SELECT {topArgument}{selectClauseArguments}"); + builder.AppendLine($"FROM {tableVariableName}"); + builder.Append("WHERE ").AppendLine(whereClause); + + if (filterOptions.OrderBy.Values.Count > 0) + { + builder.Append("ORDER BY "); + + foreach (var sortInfo in filterOptions.OrderBy.Values) + { + builder.AppendFormat("{0}.{1} {2},", tableVariableName, + model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName, + sortInfo.Ascending ? "ASC" : "DESC"); + } + + builder.Length--; // remove the last comma + builder.AppendLine(); + } + + if (string.IsNullOrEmpty(topArgument)) + { + builder.AppendLine($"OFFSET {filterOptions.Skip} LIMIT {top}"); + } + + var queryDefinition = new QueryDefinition(builder.ToString()); + + foreach (var queryParameter in filterParameters) + { + queryDefinition.WithParameter(queryParameter.Key, queryParameter.Value); + } + + return queryDefinition; + } + /// /// Builds to get items from Azure CosmosDB NoSQL. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 86eb0d8baef7..2ef839f61aec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Text.Json; using System.Text.Json.Nodes; @@ -352,6 +353,39 @@ public Task> VectorizedSearchAsync( return Task.FromResult(new VectorSearchResults(mappedResults)); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + var (whereClause, filterParameters) = new AzureCosmosDBNoSqlFilterTranslator().Translate(filter, this._model); + + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( + this._model, + whereClause, + filterParameters, + options, + top); + + var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); + + await foreach (var jsonObject in searchResults.ConfigureAwait(false)) + { + var record = VectorStoreErrorHandler.RunModelConversion( + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "GetAsync", + () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = options.IncludeVectors })); + + yield return record; + } + } + /// public Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index a6c95587fcc5..769635a99101 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -324,6 +324,43 @@ public async Task> VectorizedSearchAsync(T null; } + /// + public IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + var records = this.GetCollectionDictionary().Values.Cast() + .AsQueryable() + .Where(filter); + + if (options.OrderBy.Values.Count > 0) + { + var first = options.OrderBy.Values[0]; + var sorted = first.Ascending + ? records.OrderBy(first.PropertySelector) + : records.OrderByDescending(first.PropertySelector); + + for (int i = 1; i < options.OrderBy.Values.Count; i++) + { + var next = options.OrderBy.Values[i]; + sorted = next.Ascending + ? sorted.ThenBy(next.PropertySelector) + : sorted.ThenByDescending(next.PropertySelector); + } + + records = sorted; + } + + return records + .Skip(options.Skip) + .Take(top) + .ToAsyncEnumerable(); + } + /// /// Get the collection dictionary from the internal storage, throws if it does not exist. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 5c6431ee33cc..f94b47f6ad13 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -314,6 +314,64 @@ public async Task> VectorizedSearchAsync( cancellationToken).ConfigureAwait(false); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + // Translate the filter now, so if it fails, we throw immediately. + var translatedFilter = new MongoDBFilterTranslator().Translate(filter, this._model); + SortDefinition? sortDefinition = null; + if (options.OrderBy.Values.Count > 0) + { + sortDefinition = Builders.Sort.Combine( + options.OrderBy.Values.Select(pair => + { + var storageName = this._model.GetDataOrKeyProperty(pair.PropertySelector).StorageName; + + return pair.Ascending + ? Builders.Sort.Ascending(storageName) + : Builders.Sort.Descending(storageName); + })); + } + + using IAsyncCursor cursor = await this.RunOperationWithRetryAsync( + "GetAsync", + this._options.MaxRetries, + this._options.DelayInMilliseconds, + async () => + { + return await this._mongoCollection.FindAsync(translatedFilter, + new() + { + Limit = top, + Skip = options.Skip, + Sort = sortDefinition + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + }, + cancellationToken).ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var response in cursor.Current) + { + var record = VectorStoreErrorHandler.RunModelConversion( + MongoDBConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "GetAsync", + () => this._mapper.MapFromStorageToDataModel(response, new() { IncludeVectors = options.IncludeVectors })); + + yield return record; + } + } + } + /// public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 030b0bbf8adc..613383381458 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -352,7 +352,7 @@ public async Task> VectorizedSearchAsync(T }; Sdk.QueryResponse response = await this.RunIndexOperationAsync( - "Query", + "VectorizedSearch", indexClient => indexClient.QueryAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); if (response.Matches is null) @@ -369,7 +369,7 @@ public async Task> VectorizedSearchAsync(T PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.CollectionName, - "Query", + "VectorizedSearch", () => skippedResults.Select(x => new VectorSearchResult(this._mapper.MapFromStorageToDataModel(new Sdk.Vector() { Id = x.Id, @@ -382,6 +382,61 @@ public async Task> VectorizedSearchAsync(T return new(records); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + if (options?.OrderBy.Values.Count > 0) + { + throw new NotSupportedException("Pinecone does not support ordering."); + } + + options ??= new(); + + Sdk.QueryRequest request = new() + { + TopK = (uint)(top + options.Skip), + Namespace = this._options.IndexNamespace, + IncludeValues = options.IncludeVectors, + IncludeMetadata = true, + // "Either 'vector' or 'ID' must be provided" + // Since we are doing a query, we don't have a vector to provide, so we fake one. + // When https://github.com/pinecone-io/pinecone-dotnet-client/issues/43 gets implemented, we need to switch. + Vector = new ReadOnlyMemory(new float[this._model.VectorProperty.Dimensions!.Value]), + Filter = new PineconeFilterTranslator().Translate(filter, this._model), + }; + + Sdk.QueryResponse response = await this.RunIndexOperationAsync( + "Get", + indexClient => indexClient.QueryAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); + + if (response.Matches is null) + { + yield break; + } + + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors is true }; + var records = VectorStoreErrorHandler.RunModelConversion( + PineconeConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "Query", + () => response.Matches.Skip(options.Skip).Select(x => this._mapper.MapFromStorageToDataModel(new Sdk.Vector() + { + Id = x.Id, + Values = x.Values ?? Array.Empty(), + Metadata = x.Metadata, + SparseValues = x.SparseValues + }, mapperOptions))); + + foreach (var record in records) + { + yield return record; + } + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index 9b3ff9273182..69309e2761b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -140,4 +140,7 @@ internal interface IPostgresVectorStoreDbClient IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, int limit, VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); #pragma warning restore CS0618 // VectorSearchFilter is obsolete + + IAsyncEnumerable> GetMatchingRecordsAsync(string tableName, VectorStoreRecordModel model, + Expression> filter, int top, GetFilteredRecordOptions options, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs index 722b2be96eaa..60382990aecf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq.Expressions; +using System.Text; using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -14,8 +15,8 @@ internal sealed class PostgresFilterTranslator : SqlFilterTranslator internal PostgresFilterTranslator( VectorStoreRecordModel model, LambdaExpression lambdaExpression, - int startParamIndex) - : base(model, lambdaExpression, sql: null) + int startParamIndex, + StringBuilder? sql = null) : base(model, lambdaExpression, sql) { this._parameterIndex = startParamIndex; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresSqlBuilder.cs similarity index 81% rename from dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs rename to dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresSqlBuilder.cs index 139892ff8a51..e744d88efce3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreCollectionSqlBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresSqlBuilder.cs @@ -16,10 +16,10 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// Provides methods to build SQL commands for managing vector store collections in PostgreSQL. /// -internal class PostgresVectorStoreCollectionSqlBuilder : IPostgresVectorStoreCollectionSqlBuilder +internal static class PostgresSqlBuilder { /// - public PostgresSqlCommandInfo BuildDoesTableExistCommand(string schema, string tableName) + internal static PostgresSqlCommandInfo BuildDoesTableExistCommand(string schema, string tableName) { return new PostgresSqlCommandInfo( commandText: """ @@ -37,7 +37,7 @@ FROM information_schema.tables } /// - public PostgresSqlCommandInfo BuildGetTablesCommand(string schema) + internal static PostgresSqlCommandInfo BuildGetTablesCommand(string schema) { return new PostgresSqlCommandInfo( commandText: """ @@ -50,7 +50,7 @@ FROM information_schema.tables } /// - public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, VectorStoreRecordModel model, bool ifNotExists = true) + internal static PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tableName, VectorStoreRecordModel model, bool ifNotExists = true) { if (string.IsNullOrWhiteSpace(tableName)) { @@ -90,7 +90,7 @@ public PostgresSqlCommandInfo BuildCreateTableCommand(string schema, string tabl } /// - public PostgresSqlCommandInfo BuildCreateIndexCommand(string schema, string tableName, string columnName, string indexKind, string distanceFunction, bool isVector, bool ifNotExists) + internal static PostgresSqlCommandInfo BuildCreateIndexCommand(string schema, string tableName, string columnName, string indexKind, string distanceFunction, bool isVector, bool ifNotExists) { var indexName = $"{tableName}_{columnName}_index"; @@ -127,7 +127,7 @@ public PostgresSqlCommandInfo BuildCreateIndexCommand(string schema, string tabl } /// - public PostgresSqlCommandInfo BuildDropTableCommand(string schema, string tableName) + internal static PostgresSqlCommandInfo BuildDropTableCommand(string schema, string tableName) { return new PostgresSqlCommandInfo( commandText: $@"DROP TABLE IF EXISTS {schema}.""{tableName}""" @@ -135,7 +135,7 @@ public PostgresSqlCommandInfo BuildDropTableCommand(string schema, string tableN } /// - public PostgresSqlCommandInfo BuildUpsertCommand(string schema, string tableName, string keyColumn, Dictionary row) + internal static PostgresSqlCommandInfo BuildUpsertCommand(string schema, string tableName, string keyColumn, Dictionary row) { var columns = row.Keys.ToList(); var columnNames = string.Join(", ", columns.Select(k => $"\"{k}\"")); @@ -158,7 +158,7 @@ ON CONFLICT ("{keyColumn}") } /// - public PostgresSqlCommandInfo BuildUpsertBatchCommand(string schema, string tableName, string keyColumn, List> rows) + internal static PostgresSqlCommandInfo BuildUpsertBatchCommand(string schema, string tableName, string keyColumn, List> rows) { if (rows == null || rows.Count == 0) { @@ -205,7 +205,7 @@ ON CONFLICT ("{keyColumn}") } /// - public PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, VectorStoreRecordModel model, TKey key, bool includeVectors = false) + internal static PostgresSqlCommandInfo BuildGetCommand(string schema, string tableName, VectorStoreRecordModel model, TKey key, bool includeVectors = false) where TKey : notnull { List queryColumns = new(); @@ -228,7 +228,7 @@ public PostgresSqlCommandInfo BuildGetCommand(string schema, string tableN } /// - public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, VectorStoreRecordModel model, List keys, bool includeVectors = false) + internal static PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string tableName, VectorStoreRecordModel model, List keys, bool includeVectors = false) where TKey : notnull { NpgsqlDbType? keyType = PostgresVectorStoreRecordPropertyMapping.GetNpgsqlDbType(typeof(TKey)) ?? throw new ArgumentException($"Unsupported key type {typeof(TKey).Name}"); @@ -256,7 +256,7 @@ public PostgresSqlCommandInfo BuildGetBatchCommand(string schema, string t } /// - public PostgresSqlCommandInfo BuildDeleteCommand(string schema, string tableName, string keyColumn, TKey key) + internal static PostgresSqlCommandInfo BuildDeleteCommand(string schema, string tableName, string keyColumn, TKey key) { return new PostgresSqlCommandInfo( commandText: $""" @@ -268,7 +268,7 @@ DELETE FROM {schema}."{tableName}" } /// - public PostgresSqlCommandInfo BuildDeleteBatchCommand(string schema, string tableName, string keyColumn, List keys) + internal static PostgresSqlCommandInfo BuildDeleteBatchCommand(string schema, string tableName, string keyColumn, List keys) { NpgsqlDbType? keyType = PostgresVectorStoreRecordPropertyMapping.GetNpgsqlDbType(typeof(TKey)) ?? throw new ArgumentException($"Unsupported key type {typeof(TKey).Name}"); @@ -293,7 +293,7 @@ DELETE FROM {schema}."{tableName}" #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// - public PostgresSqlCommandInfo BuildGetNearestMatchCommand( + internal static PostgresSqlCommandInfo BuildGetNearestMatchCommand( string schema, string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, VectorSearchFilter? legacyFilter, Expression>? newFilter, int? skip, bool includeVectors, int limit) { @@ -319,7 +319,7 @@ public PostgresSqlCommandInfo BuildGetNearestMatchCommand( { (not null, not null) => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), (not null, null) => GenerateLegacyFilterWhereClause(schema, tableName, model, legacyFilter, startParamIndex: 2), - (null, not null) => GenerateNewFilterWhereClause(model, newFilter), + (null, not null) => GenerateNewFilterWhereClause(model, newFilter, startParamIndex: 2), _ => (Clause: string.Empty, Parameters: []) }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete @@ -361,9 +361,54 @@ ORDER BY {PostgresConstants.DistanceColumnName} }; } - internal static (string Clause, List Parameters) GenerateNewFilterWhereClause(VectorStoreRecordModel model, LambdaExpression newFilter) + internal static PostgresSqlCommandInfo BuildSelectWhereCommand( + string schema, string tableName, VectorStoreRecordModel model, + Expression> filter, int top, GetFilteredRecordOptions options) { - PostgresFilterTranslator translator = new(model, newFilter, startParamIndex: 2); + StringBuilder query = new(200); + query.Append("SELECT "); + foreach (var property in model.Properties) + { + if (options.IncludeVectors || property is not VectorStoreRecordVectorPropertyModel) + { + query.AppendFormat("\"{0}\",", property.StorageName); + } + } + query.Length--; // Remove trailing comma + query.AppendLine(); + query.AppendFormat("FROM {0}.\"{1}\"", schema, tableName).AppendLine(); + + PostgresFilterTranslator translator = new(model, filter, startParamIndex: 1, query); + translator.Translate(appendWhere: true); + query.AppendLine(); + + if (options.OrderBy.Values.Count > 0) + { + query.Append("ORDER BY "); + + foreach (var sortInfo in options.OrderBy.Values) + { + query.AppendFormat("\"{0}\" {1},", + model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName, + sortInfo.Ascending ? "ASC" : "DESC"); + } + + query.Length--; // remove the last comma + query.AppendLine(); + } + + query.AppendFormat("OFFSET {0}", options.Skip).AppendLine(); + query.AppendFormat("LIMIT {0}", top).AppendLine(); + + return new PostgresSqlCommandInfo(query.ToString()) + { + Parameters = translator.ParameterValues.Select(p => new NpgsqlParameter { Value = p }).ToList() + }; + } + + internal static (string Clause, List Parameters) GenerateNewFilterWhereClause(VectorStoreRecordModel model, LambdaExpression newFilter, int startParamIndex) + { + PostgresFilterTranslator translator = new(model, newFilter, startParamIndex); translator.Translate(appendWhere: true); return (translator.Clause.ToString(), translator.ParameterValues); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index 178f60589503..27a2b55110a8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -29,8 +29,6 @@ internal class PostgresVectorStoreDbClient(NpgsqlDataSource dataSource, string s private readonly NpgsqlConnectionStringBuilder _connectionStringBuilder = new(dataSource.ConnectionString); - private IPostgresVectorStoreCollectionSqlBuilder _sqlBuilder = new PostgresVectorStoreCollectionSqlBuilder(); - public NpgsqlDataSource DataSource { get; } = dataSource; public string? DatabaseName => this._connectionStringBuilder.Database; @@ -42,7 +40,7 @@ public async Task DoesTableExistsAsync(string tableName, CancellationToken await using (connection) { - var commandInfo = this._sqlBuilder.BuildDoesTableExistCommand(this._schema, tableName); + var commandInfo = PostgresSqlBuilder.BuildDoesTableExistCommand(this._schema, tableName); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); if (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) @@ -61,7 +59,7 @@ public async IAsyncEnumerable GetTablesAsync([EnumeratorCancellation] Ca await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetTablesCommand(this._schema); + var commandInfo = PostgresSqlBuilder.BuildGetTablesCommand(this._schema); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) @@ -75,11 +73,11 @@ public async IAsyncEnumerable GetTablesAsync([EnumeratorCancellation] Ca public async Task CreateTableAsync(string tableName, VectorStoreRecordModel model, bool ifNotExists = true, CancellationToken cancellationToken = default) { // Prepare the SQL commands. - var commandInfo = this._sqlBuilder.BuildCreateTableCommand(this._schema, tableName, model, ifNotExists); + var commandInfo = PostgresSqlBuilder.BuildCreateTableCommand(this._schema, tableName, model, ifNotExists); var createIndexCommands = PostgresVectorStoreRecordPropertyMapping.GetIndexInfo(model.Properties) .Select(index => - this._sqlBuilder.BuildCreateIndexCommand(this._schema, tableName, index.column, index.kind, index.function, index.isVector, ifNotExists)); + PostgresSqlBuilder.BuildCreateIndexCommand(this._schema, tableName, index.column, index.kind, index.function, index.isVector, ifNotExists)); // Execute the commands in a transaction. NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); @@ -115,21 +113,21 @@ public async Task CreateTableAsync(string tableName, VectorStoreRecordModel mode /// public async Task DeleteTableAsync(string tableName, CancellationToken cancellationToken = default) { - var commandInfo = this._sqlBuilder.BuildDropTableCommand(this._schema, tableName); + var commandInfo = PostgresSqlBuilder.BuildDropTableCommand(this._schema, tableName); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } /// public async Task UpsertAsync(string tableName, Dictionary row, string keyColumn, CancellationToken cancellationToken = default) { - var commandInfo = this._sqlBuilder.BuildUpsertCommand(this._schema, tableName, keyColumn, row); + var commandInfo = PostgresSqlBuilder.BuildUpsertCommand(this._schema, tableName, keyColumn, row); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } /// public async Task UpsertBatchAsync(string tableName, IEnumerable> rows, string keyColumn, CancellationToken cancellationToken = default) { - var commandInfo = this._sqlBuilder.BuildUpsertBatchCommand(this._schema, tableName, keyColumn, rows.ToList()); + var commandInfo = PostgresSqlBuilder.BuildUpsertBatchCommand(this._schema, tableName, keyColumn, rows.ToList()); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } @@ -140,7 +138,7 @@ public async Task UpsertBatchAsync(string tableName, IEnumerable public async Task DeleteAsync(string tableName, string keyColumn, TKey key, CancellationToken cancellationToken = default) { - var commandInfo = this._sqlBuilder.BuildDeleteCommand(this._schema, tableName, keyColumn, key); + var commandInfo = PostgresSqlBuilder.BuildDeleteCommand(this._schema, tableName, keyColumn, key); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } @@ -196,7 +194,7 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key await using (connection) { - var commandInfo = this._sqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, model, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); + var commandInfo = PostgresSqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, model, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) @@ -207,6 +205,24 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key } } + public async IAsyncEnumerable> GetMatchingRecordsAsync(string tableName, VectorStoreRecordModel model, + Expression> filter, int top, GetFilteredRecordOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); + + await using (connection) + { + var commandInfo = PostgresSqlBuilder.BuildSelectWhereCommand(this._schema, tableName, model, filter, top, options); + using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); + using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + + while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + yield return this.GetRecord(dataReader, model, options.IncludeVectors); + } + } + } + /// public async Task DeleteBatchAsync(string tableName, string keyColumn, IEnumerable keys, CancellationToken cancellationToken = default) { @@ -216,27 +232,10 @@ public async Task DeleteBatchAsync(string tableName, string keyColumn, IEn return; } - var commandInfo = this._sqlBuilder.BuildDeleteBatchCommand(this._schema, tableName, keyColumn, listOfKeys); + var commandInfo = PostgresSqlBuilder.BuildDeleteBatchCommand(this._schema, tableName, keyColumn, listOfKeys); await this.ExecuteNonQueryAsync(commandInfo, cancellationToken).ConfigureAwait(false); } - #region internal =============================================================================== - - /// - /// Sets the SQL builder for the client. - /// - /// - /// - /// This method is used for other Semnatic Kernel connectors that may need to override the default SQL - /// used by this client. - /// - internal void SetSqlBuilder(IPostgresVectorStoreCollectionSqlBuilder sqlBuilder) - { - this._sqlBuilder = sqlBuilder; - } - - #endregion - #region private ================================================================================ private Dictionary GetRecord( diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 995edf67dbe8..21d61be689a1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -311,6 +312,34 @@ public Task> VectorizedSearchAsync(TVector }); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; + + await foreach (var dictionary in this._client.GetMatchingRecordsAsync( + this.CollectionName, + this._model, + filter, + top, + options, + cancellationToken).ConfigureAwait(false)) + { + yield return VectorStoreErrorHandler.RunModelConversion( + PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "Get", + () => this._mapper.MapFromStorageToDataModel(dictionary, mapperOptions)); + } + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/MockableQdrantClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/MockableQdrantClient.cs index 8575bc3bef7d..7461b574184e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/MockableQdrantClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/MockableQdrantClient.cs @@ -312,4 +312,23 @@ public virtual Task> QueryAsync( lookupFrom, timeout, cancellationToken); + + public virtual Task ScrollAsync( + string collectionName, + Filter filter, + WithVectorsSelector vectorsSelector, + uint limit = 10, + OrderBy? orderBy = null, + CancellationToken cancellationToken = default) + => this._qdrantClient.ScrollAsync( + collectionName, + filter, + limit, + offset: null, + payloadSelector: null, + vectorsSelector, + readConsistency: null, + shardKeySelector: null, + orderBy, + cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index a7de27756fd8..eccb2d4e89fd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -133,4 +133,36 @@ public static VectorSearchResult MapScoredPointToVectorSearchResult mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })), point.Score); } + +#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete + internal static TRecord MapRetrievedPointToVectorSearchResult(RetrievedPoint point, + IVectorStoreRecordMapper mapper, + bool includeVectors, + string vectorStoreSystemName, + string? vectorStoreName, + string collectionName, + string operationName) +#pragma warning restore CS0618 + { + // Since the mapper doesn't know about scored points, we need to convert the scored point to a point struct first. + var pointStruct = new PointStruct + { + Id = point.Id, + Vectors = point.Vectors, + Payload = { } + }; + + foreach (KeyValuePair payloadEntry in point.Payload) + { + pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); + } + + // Do the mapping with error handling. + return VectorStoreErrorHandler.RunModelConversion( + vectorStoreSystemName, + vectorStoreName, + collectionName, + operationName, + () => mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index f18d09b09cf6..563bcef22824 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -511,6 +511,62 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + var translatedFilter = new QdrantFilterTranslator().Translate(filter, this._model); + + // Specify whether to include vectors in the search results. + WithVectorsSelector vectorsSelector = new() { Enable = options.IncludeVectors }; + + var sortInfo = options.OrderBy.Values.Count switch + { + 0 => null, + 1 => options.OrderBy.Values[0], + _ => throw new NotSupportedException("Qdrant does not support ordering by more than one property.") + }; + + OrderBy? orderBy = null; + if (sortInfo is not null) + { + var orderByName = this._model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName; + orderBy = new(orderByName) + { + Direction = sortInfo.Ascending ? global::Qdrant.Client.Grpc.Direction.Asc : global::Qdrant.Client.Grpc.Direction.Desc + }; + } + + var scrollResponse = await this.RunOperationAsync( + "Scroll", + () => this._qdrantClient.ScrollAsync( + this.CollectionName, + translatedFilter, + vectorsSelector, + limit: (uint)(top + options.Skip), + orderBy, + cancellationToken: cancellationToken)).ConfigureAwait(false); + + var mappedResults = scrollResponse.Result.Skip(options.Skip).Select(point => QdrantVectorStoreCollectionSearchMapping.MapRetrievedPointToVectorSearchResult( + point, + this._mapper, + options.IncludeVectors, + QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this._collectionName, + "Scroll")); + + foreach (var mappedResult in mappedResults) + { + yield return mappedResult; + } + } + /// public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 427c80c3b683..31fc2d16d95d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -381,6 +382,41 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + Query query = RedisVectorStoreCollectionSearchMapping.BuildQuery(filter, top, options ??= new(), this._model); + + var results = await this.RunOperationAsync( + "FT.SEARCH", + () => this._database + .FT() + .SearchAsync(this._collectionName, query)).ConfigureAwait(false); + + foreach (var document in results.Documents) + { + var retrievedHashEntries = this._model.DataProperties.Select(p => p.StorageName) + .Concat(this._model.VectorProperties.Select(p => p.StorageName)) + .Select(propertyName => new HashEntry(propertyName, document[propertyName])) + .ToArray(); + + // Convert to the caller's data model. + yield return VectorStoreErrorHandler.RunModelConversion( + RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this._collectionName, + "FT.SEARCH", + () => + { + return this._mapper.MapFromStorageToDataModel((this.RemoveKeyPrefixIfNeeded(document.Id), retrievedHashEntries), new() { IncludeVectors = options.IncludeVectors }); + }); + } + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 0098de90bd6d..85ccc13f8667 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Text.Json; using System.Text.Json.Nodes; @@ -451,6 +452,39 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + Query query = RedisVectorStoreCollectionSearchMapping.BuildQuery(filter, top, options ??= new(), this._model); + + var results = await this.RunOperationAsync( + "FT.SEARCH", + () => this._database + .FT() + .SearchAsync(this._collectionName, query)).ConfigureAwait(false); + + foreach (var document in results.Documents) + { + var redisResultString = document["json"].ToString(); + yield return VectorStoreErrorHandler.RunModelConversion( + RedisConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this._collectionName, + "FT.SEARCH", + () => + { + var node = JsonSerializer.Deserialize(redisResultString, this._jsonSerializerOptions)!; + return this._mapper.MapFromStorageToDataModel( + (this.RemoveKeyPrefixIfNeeded(document.Id), node), + new() { IncludeVectors = options.IncludeVectors }); + }); + } + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs index adbd37da289c..2140f3b24c48 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionSearchMapping.cs @@ -71,6 +71,29 @@ public static Query BuildQuery(byte[] vectorBytes, int top, VectorSearc return query; } + internal static Query BuildQuery(Expression> filter, int top, GetFilteredRecordOptions options, VectorStoreRecordModel model) + { + var translatedFilter = new RedisFilterTranslator().Translate(filter, model); + Query query = new Query(translatedFilter) + .Limit(options.Skip, top) + .Dialect(2); + + var sortInfo = options.OrderBy.Values.Count switch + { + 0 => null, + 1 => options.OrderBy.Values[0], + _ => throw new NotSupportedException("Redis does not support ordering by more than one property.") + }; + + if (sortInfo is not null) + { + string storageName = model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName; + query = query.SetSortBy(field: storageName, ascending: sortInfo.Ascending); + } + + return query; + } + /// /// Build a redis filter string from the provided . /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index 73b7a4b172de..eb67f4897dc0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Linq.Expressions; using System.Text; using System.Text.Json; using Microsoft.Data.SqlClient; @@ -374,6 +375,65 @@ internal static SqlCommand SelectVector( return command; } + internal static SqlCommand SelectWhere( + Expression> filter, + int top, + GetFilteredRecordOptions options, + SqlConnection connection, string? schema, string tableName, + VectorStoreRecordModel model) + { + SqlCommand command = connection.CreateCommand(); + + StringBuilder sb = new(200); + sb.AppendFormat("SELECT "); + sb.AppendColumnNames(model.Properties, includeVectors: options.IncludeVectors); + sb.AppendLine(); + sb.Append("FROM "); + sb.AppendTableName(schema, tableName); + sb.AppendLine(); + if (filter is not null) + { + int startParamIndex = command.Parameters.Count; + + SqlServerFilterTranslator translator = new(model, filter, sb, startParamIndex: startParamIndex); + translator.Translate(appendWhere: true); + List parameters = translator.ParameterValues; + + foreach (object parameter in parameters) + { + command.AddParameter(property: null, $"@_{startParamIndex++}", parameter); + } + sb.AppendLine(); + } + + if (options.OrderBy.Values.Count > 0) + { + sb.Append("ORDER BY "); + + foreach (var sortInfo in options.OrderBy.Values) + { + sb.AppendFormat("[{0}] {1},", + model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName, + sortInfo.Ascending ? "ASC" : "DESC"); + } + + sb.Length--; // remove the last comma + sb.AppendLine(); + } + else + { + // no order by properties, but we need to add something for OFFSET and NEXT to work + sb.AppendLine("ORDER BY (SELECT 1)"); + } + + // Negative Skip and Top values are rejected by the GetFilteredRecordOptions property setters. + // 0 is a legal value for OFFSET. + sb.AppendFormat("OFFSET {0} ROWS FETCH NEXT {1} ROWS ONLY;", options.Skip, top); + + command.CommandText = sb.ToString(); + return command; + } + internal static StringBuilder AppendParameterName(this StringBuilder sb, VectorStoreRecordPropertyModel property, ref int paramIndex, out string parameterName) { // In SQL Server, parameter names cannot be just a number like "@1". @@ -516,11 +576,11 @@ private static SqlCommand CreateCommand(this SqlConnection connection, StringBui return command; } - private static void AddParameter(this SqlCommand command, VectorStoreRecordPropertyModel property, string name, object? value) + private static void AddParameter(this SqlCommand command, VectorStoreRecordPropertyModel? property, string name, object? value) { switch (value) { - case null when property.Type == typeof(byte[]): + case null when property?.Type == typeof(byte[]): command.Parameters.Add(name, System.Data.SqlDbType.VarBinary).Value = DBNull.Value; break; case null: diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 605a1239f9ba..5076a3813277 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -532,4 +533,35 @@ private async IAsyncEnumerable> ReadVectorSearchResu connection.Dispose(); } } + + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + using SqlConnection connection = new(this._connectionString); + using SqlCommand command = SqlServerCommandBuilder.SelectWhere( + filter, + top, + options, + connection, + this._options.Schema, + this.CollectionName, + this._model); + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + "GetAsync", this._collectionMetadata.VectorStoreName, this.CollectionName, cancellationToken).ConfigureAwait(false); + + var vectorProperties = options.IncludeVectors ? this._model.VectorProperties : []; + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + yield return this._mapper.MapFromStorageToDataModel(new SqlDataReaderDictionary(reader, vectorProperties), mapperOptions); + } + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index 01a9324a008d..caa33cba46cc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -8,6 +8,8 @@ using System.Linq; using System.Text; using Microsoft.Data.Sqlite; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -184,6 +186,57 @@ .. rightTablePropertyNames.Select(property => $"{rightTable}.{property}"), return command; } + internal static DbCommand BuildSelectWhereCommand( + VectorStoreRecordModel model, + SqliteConnection connection, + int top, + GetFilteredRecordOptions options, + string table, + IReadOnlyList properties, + string whereFilter, + Dictionary whereParameters) + { + StringBuilder builder = new(200); + + var (command, whereClause) = GetCommandWithWhereClause(connection, Array.Empty(), whereFilter, whereParameters); + + builder.Append("SELECT "); + foreach (var property in properties) + { + if (options.IncludeVectors || property is not VectorStoreRecordVectorPropertyModel) + { + builder.AppendFormat("\"{0}\",", property.StorageName); + } + } + builder.Length--; // Remove the trailing comma + builder.AppendLine(); + + builder.AppendFormat("FROM {0}", table).AppendLine(); + builder.AppendFormat("WHERE {0}", whereClause).AppendLine(); + + if (options.OrderBy.Values.Count > 0) + { + builder.Append("ORDER BY "); + + foreach (var sortInfo in options.OrderBy.Values) + { + builder.AppendFormat("[{0}] {1},", + model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName, + sortInfo.Ascending ? "ASC" : "DESC"); + } + + builder.Length--; // remove the last comma + builder.AppendLine(); + } + + builder.AppendFormat("LIMIT {0}", top).AppendLine(); + builder.AppendFormat("OFFSET {0}", options.Skip).AppendLine(); + + command.CommandText = builder.ToString(); + + return command; + } + public static DbCommand BuildDeleteCommand( SqliteConnection connection, string tableName, @@ -242,7 +295,7 @@ private static string GetColumnDefinition(SqliteColumn column) private static (DbCommand Command, string WhereClause) GetCommandWithWhereClause( SqliteConnection connection, - List conditions, + IReadOnlyList conditions, string? extraWhereFilter = null, Dictionary? extraParameters = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index f33c24f252ad..abc435cf3139 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -5,6 +5,7 @@ using System.Data.Common; using System.Diagnostics; using System.Linq; +using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -260,6 +261,43 @@ public Task> VectorizedSearchAsync(TVector return Task.FromResult(vectorSearchResults); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + SqliteFilterTranslator translator = new(this._model, filter); + translator.Translate(appendWhere: false); + + IReadOnlyList properties = options.IncludeVectors + ? this._model.Properties + : [this._model.KeyProperty, .. this._model.DataProperties]; + + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectWhereCommand( + this._model, + connection, + top, + options, + this._dataTableName, + this._model.Properties, + translator.Clause.ToString(), + translator.Parameters); + + using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + yield return this.GetAndMapRecord( + "Get", + reader, + properties, + options.IncludeVectors); + } + } + #region Implementation of IVectorStoreRecordCollection /// @@ -730,7 +768,7 @@ private Task InternalDeleteBatchAsync(SqliteConnection connection, SqliteWhereCo private TRecord GetAndMapRecord( string operationName, DbDataReader reader, - List properties, + IReadOnlyList properties, bool includeVectors) { var storageModel = new Dictionary(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 400c0d9f721b..fd6de088260d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Linq.Expressions; using System.Net; using System.Net.Http; using System.Net.Http.Headers; @@ -310,6 +311,29 @@ public async Task> VectorizedSearchAsync( return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.ScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); } + /// + public async IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(filter); + Verify.NotLessThan(top, 1); + + options ??= new(); + + var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildQuery( + filter, + top, + options, + this.CollectionName, + this._model); + + var results = await this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.ScorePropertyName, "GetAsync", cancellationToken).ConfigureAwait(false); + await foreach (var record in results.Results.ConfigureAwait(false)) + { + yield return record.Record; + } + } + /// public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 3d71dd7255eb..5ed91595af5f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -68,6 +68,51 @@ public static string BuildSearchQuery( """; } + /// + /// Builds Weaviate search query. + /// More information here: . + /// + public static string BuildQuery( + Expression> filter, + int top, + GetFilteredRecordOptions queryOptions, + string collectionName, + VectorStoreRecordModel model) + { + var vectorsQuery = queryOptions.IncludeVectors ? + $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : + string.Empty; + + var sortPaths = string.Join(",", queryOptions.OrderBy.Values.Select(sortInfo => + { + string sortPath = model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName; + + return $$"""{ path: ["{{sortPath}}"], order: {{(sortInfo.Ascending ? "asc" : "desc")}} }"""; + })); + + var translatedFilter = new WeaviateFilterTranslator().Translate(filter, model); + + return $$""" + { + Get { + {{collectionName}} ( + limit: {{top}} + offset: {{queryOptions.Skip}} + where: {{translatedFilter}} + sort: [ {{sortPaths}} ] + ) { + {{string.Join(" ", model.DataProperties.Select(p => p.StorageName))}} + {{WeaviateConstants.AdditionalPropertiesPropertyName}} { + {{WeaviateConstants.ReservedKeyPropertyName}} + {{WeaviateConstants.ScorePropertyName}} + {{vectorsQuery}} + } + } + } + } + """; + } + /// /// Builds Weaviate hybrid search query. /// More information here: . diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 2d9d82fa5637..5fd2f62e4282 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -27,8 +27,6 @@ public PostgresVectorStoreCollectionSqlBuilderTests(ITestOutputHelper output) [InlineData(false)] public void TestBuildCreateTableCommand(bool ifNotExists) { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var recordDefinition = new VectorStoreRecordDefinition() { Properties = [ @@ -54,7 +52,7 @@ public void TestBuildCreateTableCommand(bool ifNotExists) var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); - var cmdInfo = builder.BuildCreateTableCommand("public", "testcollection", model, ifNotExists: ifNotExists); + var cmdInfo = PostgresSqlBuilder.BuildCreateTableCommand("public", "testcollection", model, ifNotExists: ifNotExists); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("public.\"testcollection\" (", cmdInfo.CommandText); @@ -87,18 +85,16 @@ public void TestBuildCreateTableCommand(bool ifNotExists) [InlineData(IndexKind.Hnsw, DistanceFunction.CosineDistance, false)] public void TestBuildCreateIndexCommand(string indexKind, string distanceFunction, bool ifNotExists) { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var vectorColumn = "embedding1"; if (indexKind != IndexKind.Hnsw) { - Assert.Throws(() => builder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); - Assert.Throws(() => builder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); + Assert.Throws(() => PostgresSqlBuilder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); + Assert.Throws(() => PostgresSqlBuilder.BuildCreateIndexCommand("public", "testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists)); return; } - var cmdInfo = builder.BuildCreateIndexCommand("public", "1testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists); + var cmdInfo = PostgresSqlBuilder.BuildCreateIndexCommand("public", "1testcollection", vectorColumn, indexKind, distanceFunction, true, ifNotExists); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("CREATE INDEX ", cmdInfo.CommandText); @@ -141,9 +137,7 @@ public void TestBuildCreateIndexCommand(string indexKind, string distanceFunctio [InlineData(false)] public void TestBuildCreateNonVectorIndexCommand(bool ifNotExists) { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - - var cmdInfo = builder.BuildCreateIndexCommand("schema", "tableName", "columnName", indexKind: "", distanceFunction: "", isVector: false, ifNotExists); + var cmdInfo = PostgresSqlBuilder.BuildCreateIndexCommand("schema", "tableName", "columnName", indexKind: "", distanceFunction: "", isVector: false, ifNotExists); var expectedCommandText = ifNotExists ? "CREATE INDEX IF NOT EXISTS \"tableName_columnName_index\" ON schema.\"tableName\" (\"columnName\");" @@ -155,9 +149,7 @@ public void TestBuildCreateNonVectorIndexCommand(bool ifNotExists) [Fact] public void TestBuildDropTableCommand() { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - - var cmdInfo = builder.BuildDropTableCommand("public", "testcollection"); + var cmdInfo = PostgresSqlBuilder.BuildDropTableCommand("public", "testcollection"); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("DROP TABLE IF EXISTS public.\"testcollection\"", cmdInfo.CommandText); @@ -169,8 +161,6 @@ public void TestBuildDropTableCommand() [Fact] public void TestBuildUpsertCommand() { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var row = new Dictionary() { ["id"] = 123, @@ -185,7 +175,7 @@ public void TestBuildUpsertCommand() var keyColumn = "id"; - var cmdInfo = builder.BuildUpsertCommand("public", "testcollection", keyColumn, row); + var cmdInfo = PostgresSqlBuilder.BuildUpsertCommand("public", "testcollection", keyColumn, row); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("INSERT INTO public.\"testcollection\" (", cmdInfo.CommandText); @@ -210,8 +200,6 @@ public void TestBuildUpsertCommand() [Fact] public void TestBuildUpsertBatchCommand() { - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var rows = new List>() { new() @@ -241,7 +229,7 @@ public void TestBuildUpsertBatchCommand() var keyColumn = "id"; var columnCount = rows.First().Count; - var cmdInfo = builder.BuildUpsertBatchCommand("public", "testcollection", keyColumn, rows); + var cmdInfo = PostgresSqlBuilder.BuildUpsertBatchCommand("public", "testcollection", keyColumn, rows); // Check for expected properties; integration tests will validate the actual SQL. Assert.Contains("INSERT INTO public.\"testcollection\" (", cmdInfo.CommandText); @@ -270,8 +258,6 @@ public void TestBuildUpsertBatchCommand() public void TestBuildGetCommand() { // Arrange - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var recordDefinition = new VectorStoreRecordDefinition() { Properties = [ @@ -300,7 +286,7 @@ public void TestBuildGetCommand() var key = 123; // Act - var cmdInfo = builder.BuildGetCommand("public", "testcollection", model, key, includeVectors: true); + var cmdInfo = PostgresSqlBuilder.BuildGetCommand("public", "testcollection", model, key, includeVectors: true); // Assert Assert.Contains("SELECT", cmdInfo.CommandText); @@ -317,8 +303,6 @@ public void TestBuildGetCommand() public void TestBuildGetBatchCommand() { // Arrange - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var recordDefinition = new VectorStoreRecordDefinition() { Properties = [ @@ -347,7 +331,7 @@ public void TestBuildGetBatchCommand() var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); // Act - var cmdInfo = builder.BuildGetBatchCommand("public", "testcollection", model, keys, includeVectors: true); + var cmdInfo = PostgresSqlBuilder.BuildGetBatchCommand("public", "testcollection", model, keys, includeVectors: true); // Assert Assert.Contains("SELECT", cmdInfo.CommandText); @@ -367,12 +351,10 @@ public void TestBuildGetBatchCommand() public void TestBuildDeleteCommand() { // Arrange - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var key = 123; // Act - var cmdInfo = builder.BuildDeleteCommand("public", "testcollection", "id", key); + var cmdInfo = PostgresSqlBuilder.BuildDeleteCommand("public", "testcollection", "id", key); // Assert Assert.Contains("DELETE", cmdInfo.CommandText); @@ -387,12 +369,10 @@ public void TestBuildDeleteCommand() public void TestBuildDeleteBatchCommand() { // Arrange - var builder = new PostgresVectorStoreCollectionSqlBuilder(); - var keys = new List { 123, 124 }; // Act - var cmdInfo = builder.BuildDeleteBatchCommand("public", "testcollection", "id", keys); + var cmdInfo = PostgresSqlBuilder.BuildDeleteBatchCommand("public", "testcollection", "id", keys); // Assert Assert.Contains("DELETE", cmdInfo.CommandText); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index 486b1be80c27..78a93ffa29c0 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -316,6 +316,13 @@ lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Linq.Expressions.Expression{System.Func{`1,System.Boolean}},System.Int32,Microsoft.Extensions.VectorData.GetFilteredRecordOptions{`1},System.Threading.CancellationToken) + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) @@ -386,6 +393,13 @@ lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Linq.Expressions.Expression{System.Func{`1,System.Boolean}},System.Int32,Microsoft.Extensions.VectorData.GetFilteredRecordOptions{`1},System.Threading.CancellationToken) + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) @@ -456,6 +470,13 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0006 + M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.GetAsync(System.Linq.Expressions.Expression{System.Func{`1,System.Boolean}},System.Int32,Microsoft.Extensions.VectorData.GetFilteredRecordOptions{`1},System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + CP0006 M:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2.UpsertAsync(System.Collections.Generic.IEnumerable{`1},System.Threading.CancellationToken) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs index 6f6292eda944..cea74a78218b 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs @@ -31,7 +31,7 @@ public sealed class VectorStoreRecordModel public IReadOnlyList KeyProperties { get; } /// - /// The dataproperties of the record. + /// The data properties of the record. /// public IReadOnlyList DataProperties { get; } @@ -115,7 +115,7 @@ public VectorStoreRecordVectorPropertyModel GetVectorPropertyOrSingle(V } else if (searchOptions.VectorProperty is Expression> expression) { - return this.GetMatchingProperty(expression); + return this.GetMatchingProperty(expression, data: false); } } @@ -139,7 +139,7 @@ public VectorStoreRecordDataPropertyModel GetFullTextDataPropertyOrSingle(expression); + var property = this.GetMatchingProperty(expression, data: true); return property.IsFullTextSearchable ? property @@ -165,10 +165,17 @@ public VectorStoreRecordDataPropertyModel GetFullTextDataPropertyOrSingle(Expression> expression) + /// + /// Get the data or key property selected by provided expression. + /// + /// The property selector. + /// Thrown if the provided property name is not a valid data or key property name. + public VectorStoreRecordPropertyModel GetDataOrKeyProperty(Expression> expression) + => this.GetMatchingProperty(expression, data: true); + + private TProperty GetMatchingProperty(Expression> expression, bool data) where TProperty : VectorStoreRecordPropertyModel { - bool data = typeof(TProperty) == typeof(VectorStoreRecordDataProperty); string expectedGenericModelPropertyName = data ? nameof(VectorStoreGenericDataModel.Data) : nameof(VectorStoreGenericDataModel.Vectors); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetFilteredRecordOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetFilteredRecordOptions.cs new file mode 100644 index 000000000000..6843061e369a --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordOptions/GetFilteredRecordOptions.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq.Expressions; + +namespace Microsoft.Extensions.VectorData; + +/// +/// Defines options for filter search. +/// +/// Type of the record. +public sealed class GetFilteredRecordOptions +{ + private int _skip = 0; + + /// + /// Gets or sets the number of results to skip before returning results, that is, the index of the first result to return. + /// + /// Thrown when the value is less than 0. + public int Skip + { + get => this._skip; + init + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Skip must be greater than or equal to 0."); + } + + this._skip = value; + } + } + + /// + /// Gets or sets the data property to order by. + /// + /// + /// If not provided, the order of returned results is non-deterministic. + /// + public OrderByDefinition OrderBy { get; } = new(); + + /// + /// Gets or sets a value indicating whether to include vectors in the retrieval result. + /// + public bool IncludeVectors { get; init; } = false; + + /// + /// A builder for sorting. + /// + // This type does not derive any collection in order to avoid Intellisense suggesting LINQ methods. + public sealed class OrderByDefinition + { + private readonly List _values = new(); + + /// + /// Gets the expressions to sort by. + /// + /// This property is intended to be consumed by the connectors to retrieve the configuration. + public IReadOnlyList Values => this._values; + + /// + /// Creates an ascending sort. + /// + public OrderByDefinition Ascending(Expression> propertySelector) + { + if (propertySelector is null) + { + throw new ArgumentNullException(nameof(propertySelector)); + } + + this._values.Add(new(propertySelector, true)); + return this; + } + + /// + /// Creates a descending sort. + /// + public OrderByDefinition Descending(Expression> propertySelector) + { + if (propertySelector is null) + { + throw new ArgumentNullException(nameof(propertySelector)); + } + + this._values.Add(new(propertySelector, false)); + return this; + } + + /// + /// Provides a way to define property ordering. + /// + /// This class is intended to be consumed by the connectors to retrieve the configuration. + public sealed class SortInfo + { + internal SortInfo(Expression> propertySelector, bool isAscending) + { + this.PropertySelector = propertySelector; + this.Ascending = isAscending; + } + + /// + /// The expression to select the property to sort by. + /// + public Expression> PropertySelector { get; } + + /// + /// True if the sort is ascending; otherwise, false. + /// + public bool Ascending { get; } + } + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index b834176834f3..572945ccd782 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; +using System.Linq.Expressions; using System.Threading; using System.Threading.Tasks; @@ -129,4 +131,16 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// The command fails to execute for any reason. /// The mapping between the storage model and record data model fails. IAsyncEnumerable UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default); + + /// + /// Gets matching records from the vector store. Does not guarantee that the collection exists. + /// + /// The predicate to filter the records. + /// The maximum number of results to return. + /// Options for retrieving the records. + /// The to monitor for cancellation requests. The default is . + /// The records matching given predicate. + /// The command fails to execute for any reason. + /// The mapping between the storage model and record data model fails. + IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj index 1572f6821fd8..0aa90f87ee3e 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/AzureAISearchIntegrationTests.csproj @@ -16,8 +16,8 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all - + diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs new file mode 100644 index 000000000000..2a38ca59dacc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace AzureAISearchIntegrationTests.Filter; + +public class AzureAISearchBasicQueryTests(AzureAISearchBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + // Azure AI Search only supports search.in() over strings + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => AzureAISearchTestStore.Instance; + + // Azure AI search only supports lowercase letters, digits or dashes. + protected override string CollectionName => "query-tests"; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs new file mode 100644 index 000000000000..8a5bc6f70be5 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace CosmosMongoDBIntegrationTests.Filter; + +public class CosmosMongoBasicQueryTests(CosmosMongoBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + // Specialized MongoDB syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + // MongoDB currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.IvfFlat; + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs new file mode 100644 index 000000000000..5d2d776c4757 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using CosmosNoSQLIntegrationTests.Support; +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosNoSQLIntegrationTests.Filter; + +public class CosmosNoSQLBasicQueryTests(CosmosNoSQLBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + // CosmosDB supports ordering by multiple fields only when a composite index is created up-front: + // https://learn.microsoft.com/en-us/azure/cosmos-db/index-policy#composite-indexes + // The index requires the order to be also provided up front (ASC or DESC), + // we don't expose API for such customization, so for now we just order by one field. + protected override List GetOrderedRecords(IQueryable filtered) + => filtered.OrderBy(r => r.Int2).ToList(); + + protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) + { + GetFilteredRecordOptions options = new(); + + options.OrderBy.Ascending(r => r.Int2); + + return await collection.GetAsync(filter, top, options).ToListAsync(); + } + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs new file mode 100644 index 000000000000..c7db1f873dca --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PostgresIntegrationTests.Filter; + +public class InMemoryBasicQueryTests(InMemoryBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => InMemoryTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs new file mode 100644 index 000000000000..c3893dee6a15 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace MongoDBIntegrationTests.Filter; + +public class MongoDBBasicQueryTests(MongoDBBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + // Specialized MongoDB syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + // MongoDB currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => MongoDBTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs new file mode 100644 index 000000000000..019435ce7ab5 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using Microsoft.Extensions.VectorData; +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace PineconeIntegrationTests.Filter; + +public class PineconeBasicQueryTests(PineconeBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) + // Pinecone doesn't support OrderBy in GetAsync, so we have to sort the results manually + => (await collection.GetAsync(filter, top).ToListAsync()).OrderBy(r => r.Int).ThenByDescending(r => r.String).ToList(); + + // Specialized Pinecone syntax for NOT over Contains ($nin) + [ConditionalFact] + public virtual Task Not_over_Contains() + => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + + // Pinecone currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + // Pinecone currently doesn't support NOT in vector search pre-filters + // (https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-stage/#atlas-vector-search-pre-filter) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => PineconeTestStore.Instance; + + // https://docs.pinecone.io/troubleshooting/restrictions-on-index-names + protected override string CollectionName => "query-tests"; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs new file mode 100644 index 000000000000..13d6e8d09579 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace PostgresIntegrationTests.Filter; + +public class PostgresBasicQueryTests(PostgresBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => PostgresTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs new file mode 100644 index 000000000000..f500a33abe92 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using Microsoft.Extensions.VectorData; +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace QdrantIntegrationTests.Filter; + +public class QdrantBasicQueryTests(QdrantBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + // Qdrant does not support ordering by multiple fields, so we order by only one field. + protected override List GetOrderedRecords(IQueryable filtered) + => filtered.OrderBy(r => r.Int2).ToList(); + + protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) + { + GetFilteredRecordOptions options = new(); + + options.OrderBy.Ascending(r => r.Int2); + + return await collection.GetAsync(filter, top, options).ToListAsync(); + } + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; + + // Qdrant doesn't support the default Flat index kind + protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs new file mode 100644 index 000000000000..94d859233faa --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel.Connectors.Redis; +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace RedisIntegrationTests.Filter; + +public abstract class RedisBasicQueryTests(BasicQueryTests.QueryFixture fixture) + : BasicQueryTests(fixture) +{ + // Redis does not support ordering by multiple fields, so we order by only one field. + protected override List GetOrderedRecords(IQueryable filtered) + => filtered.OrderBy(r => r.Int2).ToList(); + + protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) + { + GetFilteredRecordOptions options = new(); + + options.OrderBy.Ascending(r => r.Int2); + + return await collection.GetAsync(filter, top, options).ToListAsync(); + } + + #region Equality with null + + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + #endregion + + #region Bool + + public override Task Bool() + => Assert.ThrowsAsync(() => base.Bool()); + + public override Task Not_over_bool() + => Assert.ThrowsAsync(() => base.Not_over_bool()); + + public override Task Bool_And_Bool() + => Assert.ThrowsAsync(() => base.Bool_And_Bool()); + + public override Task Bool_Or_Not_Bool() + => Assert.ThrowsAsync(() => base.Bool_Or_Not_Bool()); + + public override Task Not_over_bool_And_Comparison() + => Assert.ThrowsAsync(() => base.Not_over_bool_And_Comparison()); + + #endregion + + #region Contains + + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array()); + + public override Task Contains_over_inline_string_array_with_weird_chars() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array_with_weird_chars()); + + public override Task Contains_over_captured_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_captured_string_array()); + + #endregion +} + +public class RedisJsonCollectionBasicQueryTests(RedisJsonCollectionBasicQueryTests.Fixture fixture) + : RedisBasicQueryTests(fixture), IClassFixture +{ + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => RedisTestStore.JsonInstance; + + protected override string CollectionName => "JsonCollectionQueryTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis/JSON + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() + }; + + protected override IVectorStoreRecordCollection CreateCollection() + => new RedisJsonVectorStoreRecordCollection( + RedisTestStore.JsonInstance.Database, + this.CollectionName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); + } +} + +public class RedisHashSetCollectionBasicQueryTests(RedisHashSetCollectionBasicQueryTests.Fixture fixture) + : RedisBasicQueryTests(fixture), IClassFixture +{ + // Null values are not supported in Redis HashSet + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + // Array fields not supported on Redis HashSet + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => RedisTestStore.HashSetInstance; + + protected override string CollectionName => "HashSetCollectionQueryTests"; + + // Override to remove the bool property, which isn't (currently) supported on Redis + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => + p.PropertyType != typeof(bool) && + p.PropertyType != typeof(string[]) && + p.PropertyType != typeof(List)).ToList() + }; + + protected override IVectorStoreRecordCollection CreateCollection() + => new RedisHashSetVectorStoreRecordCollection( + RedisTestStore.HashSetInstance.Database, + this.CollectionName, + new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); + + protected override List BuildTestData() + { + var testData = base.BuildTestData(); + + foreach (var record in testData) + { + // Null values are not supported in Redis hashsets + record.String ??= string.Empty; + } + + return testData; + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs new file mode 100644 index 000000000000..101e5f3b57cf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace SqlServerIntegrationTests.Filter; + +public class SqlServerBasicQueryTests(SqlServerBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + private static readonly string s_uniqueName = Guid.NewGuid().ToString(); + + public override TestStore TestStore => SqlServerTestStore.Instance; + + protected override string CollectionName => s_uniqueName; + + // Override to remove the string collection properties, which aren't (currently) supported on SqlServer + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() + }; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs new file mode 100644 index 000000000000..42f6215ab696 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace SqliteIntegrationTests.Filter; + +public class SqliteBasicQueryTests(SqliteBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + public override async Task Not_over_Or() + { + // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) + // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. + await Assert.ThrowsAsync(() => base.Not_over_Or()); + + // Compensate by adding a null check: + await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + } + + public override async Task NotEqual_with_string() + { + // As above, null semantics + negation + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + + await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + } + + // Array fields not (currently) supported on SQLite (see #10343) + public override Task Contains_over_field_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_array()); + + // List fields not (currently) supported on SQLite (see #10343) + public override Task Contains_over_field_string_List() + => Assert.ThrowsAsync(() => base.Contains_over_field_string_List()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => SqliteTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + + // Override to remove the string array property, which isn't (currently) supported on SQLite + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() + }; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 18c28e089c48..a59bb04f7d8d 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -226,12 +226,15 @@ public virtual Task Legacy_AnyTagEqualTo_List() #endregion Legacy filter support + protected virtual List GetOrderedRecords(IQueryable filtered) + => filtered.OrderBy(r => r.Key).ToList(); + protected virtual async Task TestFilterAsync( Expression> filter, bool expectZeroResults = false, bool expectAllResults = false) { - var expected = fixture.TestData.AsQueryable().Where(filter).OrderBy(r => r.Key).ToList(); + var expected = this.GetOrderedRecords(fixture.TestData.AsQueryable().Where(filter)); if (expected.Count == 0 && !expectZeroResults) { @@ -243,7 +246,17 @@ protected virtual async Task TestFilterAsync( Assert.Fail("The test returns all results, and so is unreliable"); } - var results = await fixture.Collection.VectorizedSearchAsync( + var actual = await this.GetResults(fixture.Collection, filter, fixture.TestData.Count); + + Assert.Equal(expected, actual, (e, a) => + e.Int == a.Int && + e.String == a.String && + e.Int2 == a.Int2); + } + + protected virtual async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) + { + var results = await collection.VectorizedSearchAsync( new ReadOnlyMemory([1, 2, 3]), top: fixture.TestData.Count, new() @@ -251,12 +264,7 @@ protected virtual async Task TestFilterAsync( Filter = filter }); - var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); - - Assert.Equal(expected, actual, (e, a) => - e.Int == a.Int && - e.String == a.String && - e.Int2 == a.Int2); + return await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); } [Obsolete("Legacy filter support")] @@ -315,6 +323,11 @@ public abstract class Fixture : VectorStoreCollectionFixture { protected override string CollectionName => "FilterTests"; + protected virtual ReadOnlyMemory GetVector(int count) + // All records have the same vector - this fixture is about testing criteria filtering only + // Derived types may override this to provide different vectors for different records. + => new(Enumerable.Range(1, count).Select(i => (float)i).ToArray()); + protected override VectorStoreRecordDefinition GetRecordDefinition() => new() { @@ -339,9 +352,6 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() protected override List BuildTestData() { - // All records have the same vector - this fixture is about testing criteria filtering only - var vector = new ReadOnlyMemory([1, 2, 3]); - return [ new() @@ -353,7 +363,7 @@ protected override List BuildTestData() Int2 = 80, StringArray = ["x", "y"], StringList = ["x", "y"], - Vector = vector + Vector = this.GetVector(3) }, new() { @@ -364,7 +374,7 @@ protected override List BuildTestData() Int2 = 90, StringArray = ["a", "b"], StringList = ["a", "b"], - Vector = vector + Vector = this.GetVector(3) }, new() { @@ -375,7 +385,7 @@ protected override List BuildTestData() Int2 = 9, StringArray = ["x"], StringList = ["x"], - Vector = vector + Vector = this.GetVector(3) }, new() { @@ -386,7 +396,7 @@ protected override List BuildTestData() Int2 = 100, StringArray = ["x", "y", "z"], StringList = ["x", "y", "z"], - Vector = vector + Vector = this.GetVector(3) }, new() { @@ -397,7 +407,7 @@ protected override List BuildTestData() Int2 = 101, StringArray = ["y", "z"], StringList = ["y", "z"], - Vector = vector + Vector = this.GetVector(3) } ]; } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs new file mode 100644 index 000000000000..31f21ae2a66b --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq.Expressions; +using Microsoft.Extensions.VectorData; + +namespace VectorDataSpecificationTests.Filter; + +public abstract class BasicQueryTests(BasicQueryTests.QueryFixture fixture) + : BasicFilterTests(fixture) where TKey : notnull +{ + // Not all of the connectors allow to sort by the Key, so we sort by the Int. + protected override List GetOrderedRecords(IQueryable filtered) + => filtered.OrderBy(r => r.Int).ThenByDescending(r => r.String).ToList(); + + protected override async Task> GetResults(IVectorStoreRecordCollection collection, + Expression> filter, int top) + { + GetFilteredRecordOptions options = new(); + + options.OrderBy + .Ascending(r => r.Int) + .Descending(r => r.String); + + return await collection.GetAsync(filter, top, options).ToListAsync(); + } + + [Obsolete("Not used by derived types")] + public sealed override Task Legacy_And() => Task.CompletedTask; + + [Obsolete("Not used by derived types")] + public sealed override Task Legacy_equality() => Task.CompletedTask; + + [Obsolete("Not used by derived types")] + public sealed override Task Legacy_AnyTagEqualTo_array() => Task.CompletedTask; + + [Obsolete("Not used by derived types")] + public sealed override Task Legacy_AnyTagEqualTo_List() => Task.CompletedTask; + + public abstract class QueryFixture : BasicFilterTests.Fixture + { + private static readonly Random s_random = new(); + + protected override string CollectionName => "QueryTests"; + + /// + /// Use random vectors to make sure that the values don't matter for GetAsync. + /// + protected override ReadOnlyMemory GetVector(int count) +#pragma warning disable CA5394 // Do not use insecure randomness + => new(Enumerable.Range(0, count).Select(_ => (float)s_random.NextDouble()).ToArray()); +#pragma warning restore CA5394 // Do not use insecure randomness + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs new file mode 100644 index 000000000000..1f79a826a56a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Filter; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; +using Xunit; +using Xunit.Sdk; + +namespace WeaviateIntegrationTests.Filter; + +public class WeaviateBasicQueryTests(WeaviateBasicQueryTests.Fixture fixture) + : BasicQueryTests(fixture), IClassFixture +{ + #region Filter by null + + // Null-state indexing needs to be set up, but that's not supported yet (#10358). + // We could interact with Weaviate directly (not via the abstraction) to do this. + + public override Task Equal_with_null_reference_type() + => Assert.ThrowsAsync(() => base.Equal_with_null_reference_type()); + + public override Task Equal_with_null_captured() + => Assert.ThrowsAsync(() => base.Equal_with_null_captured()); + + public override Task NotEqual_with_null_captured() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_captured()); + + public override Task NotEqual_with_null_reference_type() + => Assert.ThrowsAsync(() => base.NotEqual_with_null_reference_type()); + + #endregion + + #region Not + + // Weaviate currently doesn't support NOT (https://github.com/weaviate/weaviate/issues/3683) + public override Task Not_over_And() + => Assert.ThrowsAsync(() => base.Not_over_And()); + + public override Task Not_over_Or() + => Assert.ThrowsAsync(() => base.Not_over_Or()); + + #endregion + + #region Unsupported Contains scenarios + + public override Task Contains_over_captured_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_captured_string_array()); + + public override Task Contains_over_inline_int_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array() + => Assert.ThrowsAsync(() => base.Contains_over_inline_int_array()); + + public override Task Contains_over_inline_string_array_with_weird_chars() + => Assert.ThrowsAsync(() => base.Contains_over_inline_string_array_with_weird_chars()); + + #endregion + + // In Weaviate, string equality on multi-word textual properties depends on tokenization + // (https://weaviate.io/developers/weaviate/api/graphql/filters#multi-word-queries-in-equal-filters) + public override Task Equal_with_string_is_not_Contains() + => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); + + public new class Fixture : BasicQueryTests.QueryFixture + { + public override TestStore TestStore => WeaviateTestStore.Instance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + } +} From 8a132c081e71d1d841f622155af078daff611fcb Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 9 Apr 2025 10:57:55 +0100 Subject: [PATCH 36/63] .Net: Rename IsFilterable to IsIndexed and IsFullTextSearchable to IsFullTextIndexed (#11441) ### Motivation and Context #11130 ### Description Rename IsFilterable to IsIndexed and IsFullTextSearchable to IsFullTextIndexed ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../Memory/VectorStore_EmbeddingGeneration.cs | 2 +- ...Store_HybridSearch_Simple_AzureAISearch.cs | 4 +-- ...torStore_VectorSearch_MultiStore_Common.cs | 2 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 2 +- .../InMemoryVectorStoreFixture.cs | 2 +- .../Glossary.cs | 2 +- .../Step4_NonStringKey_VectorStore.cs | 2 +- ...VectorStoreCollectionCreateMappingTests.cs | 10 +++--- ...ISearchVectorStoreRecordCollectionTests.cs | 2 +- .../AzureCosmosDBMongoDBHotelModel.cs | 2 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 4 +-- ...nMemoryVectorStoreRecordCollectionTests.cs | 8 ++--- ...earchVectorStoreCollectionCreateMapping.cs | 12 +++---- ...ngoDBVectorStoreCollectionCreateMapping.cs | 2 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 4 +-- ...ngoDBVectorStoreCollectionCreateMapping.cs | 4 +-- ...ostgresVectorStoreRecordPropertyMapping.cs | 2 +- .../QdrantVectorStoreRecordCollection.cs | 8 ++--- ...RedisVectorStoreCollectionCreateMapping.cs | 14 ++++---- .../SqlServerCommandBuilder.cs | 4 +-- .../SqliteVectorStoreRecordPropertyMapping.cs | 2 +- ...viateVectorStoreCollectionCreateMapping.cs | 4 +-- .../MongoDBHotelModel.cs | 2 +- ...ostgresVectorStoreRecordCollectionTests.cs | 10 +++--- ...esVectorStoreRecordPropertyMappingTests.cs | 4 +-- .../QdrantVectorStoreRecordCollectionTests.cs | 8 ++--- ...HashSetVectorStoreRecordCollectionTests.cs | 4 +-- ...disJsonVectorStoreRecordCollectionTests.cs | 8 ++--- ...VectorStoreCollectionCreateMappingTests.cs | 12 +++---- .../SqliteHotel.cs | 2 +- ...teVectorStoreRecordPropertyMappingTests.cs | 2 +- .../WeaviateHotel.cs | 4 +-- ...VectorStoreCollectionCreateMappingTests.cs | 2 +- .../VectorStoreRecordDataPropertyModel.cs | 8 ++--- .../VectorStoreRecordModel.cs | 14 ++++---- .../VectorStoreRecordModelBuilder.cs | 8 ++--- .../VectorStoreRecordDataAttribute.cs | 20 +++++++++++- .../VectorStoreRecordDataProperty.cs | 22 +++++++++++-- .../VectorSearch/HybridSearchOptions.cs | 4 +-- .../AzureAISearch/AzureAISearchHotel.cs | 8 ++--- .../AzureAISearchVectorStoreFixture.cs | 8 ++--- .../AzureCosmosDBMongoDBHotel.cs | 2 +- .../AzureCosmosDBNoSQLHotel.cs | 4 +-- .../Connectors/Memory/MongoDB/MongoDBHotel.cs | 2 +- .../Memory/Qdrant/QdrantVectorStoreFixture.cs | 32 +++++++++---------- .../Connectors/Memory/Redis/RedisHotel.cs | 16 +++++----- .../Memory/Redis/RedisVectorStoreFixture.cs | 16 +++++----- .../Connectors/Memory/Sqlite/SqliteHotel.cs | 2 +- .../Memory/Weaviate/WeaviateHotel.cs | 14 ++++---- .../Data/BaseVectorStoreTextSearchTests.cs | 2 +- .../Data/VectorStoreTextSearchTestBase.cs | 2 +- .../SqlServerCommandBuilderTests.cs | 2 +- .../CRUD/NoVectorConformanceTests.cs | 2 +- .../Filter/BasicFilterTests.cs | 12 +++---- ...rdVectorizedHybridSearchComplianceTests.cs | 10 +++--- .../Support/SimpleModelFixture.cs | 4 +-- ...orSearchDistanceFunctionComplianceTests.cs | 4 +-- 57 files changed, 205 insertions(+), 169 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs index 6c994109fa7a..a803b18d328c 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs @@ -73,7 +73,7 @@ private sealed class Glossary [VectorStoreRecordKey] public ulong Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] diff --git a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs index 508c3c9b68c5..94236168e06c 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs @@ -99,13 +99,13 @@ private sealed class Glossary [VectorStoreRecordKey] public string Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] public string Term { get; set; } - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string Definition { get; set; } [VectorStoreRecordVector(1536)] diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index 64604da51a12..7ff84ecd94e5 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -128,7 +128,7 @@ private sealed class Glossary [VectorStoreRecordKey] public TKey Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index 8ce21fec5656..ae02ebde075b 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -93,7 +93,7 @@ private sealed class Glossary [VectorStoreRecordKey] public ulong Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] diff --git a/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs b/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs index 2af880f4bdc2..8e8279072e70 100644 --- a/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs +++ b/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs @@ -150,7 +150,7 @@ public sealed class DataModel [TextSearchResultLink] public string Link { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public required string Tag { get; init; } [VectorStoreRecordVector(1536)] diff --git a/dotnet/samples/GettingStartedWithVectorStores/Glossary.cs b/dotnet/samples/GettingStartedWithVectorStores/Glossary.cs index 8fc0ee87b4ad..58491513dcbd 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Glossary.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Glossary.cs @@ -16,7 +16,7 @@ internal sealed class Glossary [VectorStoreRecordKey] public string Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs index 86b6273e0e86..2798cd4d9e0c 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs @@ -65,7 +65,7 @@ private sealed class UlongGlossary [VectorStoreRecordKey] public ulong Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Category { get; set; } [VectorStoreRecordData] diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs index b2adb694bf58..966307ed7d2f 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs @@ -39,7 +39,7 @@ public void MapFilterableStringDataFieldCreatesSimpleField(bool isFilterable) // Arrange var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(string)) { - IsFilterable = isFilterable, + IsIndexed = isFilterable, StorageName = "test_data" }; @@ -62,8 +62,8 @@ public void MapFullTextSearchableStringDataFieldCreatesSearchableField(bool isFi // Arrange var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(string)) { - IsFilterable = isFilterable, - IsFullTextSearchable = true, + IsIndexed = isFilterable, + IsFullTextIndexed = true, StorageName = "test_data" }; @@ -84,7 +84,7 @@ public void MapFullTextSearchableStringDataFieldThrowsForInvalidType() // Arrange var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(int)) { - IsFullTextSearchable = true, + IsFullTextIndexed = true, StorageName = "test_data" }; @@ -100,7 +100,7 @@ public void MapDataFieldCreatesSimpleField(bool isFilterable) // Arrange var dataProperty = new VectorStoreRecordDataPropertyModel("testdata", typeof(int)) { - IsFilterable = isFilterable, + IsIndexed = isFilterable, StorageName = "test_data" }; diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index 9cdc26661a0c..a39a019b3efd 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -685,7 +685,7 @@ public sealed class MultiPropsModel public string Key { get; set; } = string.Empty; [JsonPropertyName("storage_data1")] - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Data1 { get; set; } = string.Empty; [VectorStoreRecordData] diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs index 7fe5e3875fb8..e376a90efcb5 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs @@ -14,7 +14,7 @@ public class AzureCosmosDBMongoDBHotelModel(string hotelId) public string HotelId { get; init; } = hotelId; /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 1bea4be23ede..b1e59b5a1613 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -727,10 +727,10 @@ private sealed class TestIndexingModel [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance, IndexKind: IndexKind.DiskAnn)] public ReadOnlyMemory? DescriptionEmbedding4 { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? IndexableData1 { get; set; } - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string? IndexableData2 { get; set; } [VectorStoreRecordData] diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index 34c5d1f569d9..5fa4ea476b74 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -558,8 +558,8 @@ private InMemoryVectorStoreRecordCollection> Create Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Data", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("Data", typeof(string)) { IsIndexed = true }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) ] }; @@ -569,10 +569,10 @@ public sealed class SinglePropsModel [VectorStoreRecordKey] public TKey? Key { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public List Tags { get; set; } = new List(); - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Data { get; set; } = string.Empty; [VectorStoreRecordVector] diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs index 33c73ba68c3a..8373c3b362e8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs @@ -33,27 +33,27 @@ public static SearchableField MapKeyField(VectorStoreRecordKeyPropertyModel keyP /// Throws when the definition is missing required information. public static SimpleField MapDataField(VectorStoreRecordDataPropertyModel dataProperty) { - if (dataProperty.IsFullTextSearchable) + if (dataProperty.IsFullTextIndexed) { if (dataProperty.Type != typeof(string)) { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Azure AI Search VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextIndexed)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Azure AI Search VectorStore supports {nameof(dataProperty.IsFullTextIndexed)} on string properties only."); } return new SearchableField(dataProperty.StorageName) { - IsFilterable = dataProperty.IsFilterable, + IsFilterable = dataProperty.IsIndexed, // Sometimes the users ask to also OrderBy given filterable property, so we make it sortable. - IsSortable = dataProperty.IsFilterable + IsSortable = dataProperty.IsIndexed }; } var fieldType = AzureAISearchVectorStoreCollectionCreateMapping.GetSDKFieldDataType(dataProperty.Type); return new SimpleField(dataProperty.StorageName, fieldType) { - IsFilterable = dataProperty.IsFilterable, + IsFilterable = dataProperty.IsIndexed, // Sometimes the users ask to also OrderBy given filterable property, so we make it sortable. - IsSortable = dataProperty.IsFilterable && !fieldType.IsCollection + IsSortable = dataProperty.IsIndexed && !fieldType.IsCollection }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs index c1c8f1b5d02e..0263af542c2e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionCreateMapping.cs @@ -79,7 +79,7 @@ public static BsonArray GetFilterableDataIndexes( // Create separate index for each data property foreach (var property in dataProperties) { - if (property.IsFilterable) + if (property.IsIndexed) { // Use index name same as data property name with underscore var indexName = $"{property.StorageName}_"; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 2ef839f61aec..48cb900956ca 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -532,11 +532,11 @@ private ContainerProperties GetContainerProperties() // Process Data properties. foreach (var property in this._model.DataProperties) { - if (property.IsFilterable || property.IsFullTextSearchable) + if (property.IsIndexed || property.IsFullTextIndexed) { indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = $"/{property.StorageName}/?" }); } - if (property.IsFullTextSearchable) + if (property.IsFullTextIndexed) { indexingPolicy.FullTextIndexes.Add(new FullTextIndexPath { Path = $"/{property.StorageName}" }); // TODO: Switch to using language from a setting. diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs index d7462cbe2b7b..5eebe8a91001 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreCollectionCreateMapping.cs @@ -49,7 +49,7 @@ public static BsonArray GetFilterableDataIndexFields(IReadOnlyList GetFullTextSearchableDataIndexFields(IReadOnlyLi // Create separate index for each data property foreach (var property in dataProperties) { - if (property.IsFullTextSearchable) + if (property.IsFullTextIndexed) { fieldElements.Add(new BsonElement(property.StorageName, new BsonArray() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index d16383c567ba..2229ec00d29c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -213,7 +213,7 @@ public static NpgsqlParameter GetNpgsqlParameter(object? value) break; case VectorStoreRecordDataPropertyModel dataProperty: - if (dataProperty.IsFilterable) + if (dataProperty.IsIndexed) { vectorIndexesToCreate.Add((dataProperty.StorageName, "", "", isVector: false)); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 563bcef22824..08620a5bd5be 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -152,7 +152,7 @@ await this.RunOperationAsync( } // Add indexes for each of the data properties that require filtering. - var dataProperties = this._model.DataProperties.Where(x => x.IsFilterable); + var dataProperties = this._model.DataProperties.Where(x => x.IsIndexed); foreach (var dataProperty in dataProperties) { if (QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.TryGetValue(dataProperty.Type, out PayloadSchemaType schemaType)) @@ -167,7 +167,7 @@ await this.RunOperationAsync( else { // TODO: This should move to model validation - throw new InvalidOperationException($"Property {nameof(VectorStoreRecordDataProperty.IsFilterable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not supported for filtering. The Qdrant VectorStore supports filtering on {string.Join(", ", QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.Keys.Select(x => x.Name))} properties only."); + throw new InvalidOperationException($"Property {nameof(VectorStoreRecordDataProperty.IsIndexed)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not supported for filtering. The Qdrant VectorStore supports filtering on {string.Join(", ", QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap.Keys.Select(x => x.Name))} properties only."); } await this.RunOperationAsync( @@ -180,13 +180,13 @@ await this.RunOperationAsync( } // Add indexes for each of the data properties that require full text search. - dataProperties = this._model.DataProperties.Where(x => x.IsFullTextSearchable); + dataProperties = this._model.DataProperties.Where(x => x.IsFullTextIndexed); foreach (var dataProperty in dataProperties) { // TODO: This should move to model validation if (dataProperty.Type != typeof(string)) { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Qdrant VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextIndexed)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string. The Qdrant VectorStore supports {nameof(dataProperty.IsFullTextIndexed)} on string properties only."); } await this.RunOperationAsync( diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs index 169867fb24ac..bd392f6adbd6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs @@ -67,14 +67,14 @@ public static Schema MapToSchema(IEnumerable pro // Do nothing, since key is not stored as part of the payload and therefore doesn't have to be added to the index. continue; - case VectorStoreRecordDataPropertyModel dataProperty when dataProperty.IsFilterable || dataProperty.IsFullTextSearchable: - if (dataProperty.IsFilterable && dataProperty.IsFullTextSearchable) + case VectorStoreRecordDataPropertyModel dataProperty when dataProperty.IsIndexed || dataProperty.IsFullTextIndexed: + if (dataProperty.IsIndexed && dataProperty.IsFullTextIndexed) { - throw new InvalidOperationException($"Property '{dataProperty.ModelName}' has both {nameof(VectorStoreRecordDataProperty.IsFilterable)} and {nameof(VectorStoreRecordDataProperty.IsFullTextSearchable)} set to true, and this is not supported by the Redis VectorStore."); + throw new InvalidOperationException($"Property '{dataProperty.ModelName}' has both {nameof(VectorStoreRecordDataProperty.IsIndexed)} and {nameof(VectorStoreRecordDataProperty.IsFullTextIndexed)} set to true, and this is not supported by the Redis VectorStore."); } // Add full text search field index. - if (dataProperty.IsFullTextSearchable) + if (dataProperty.IsFullTextIndexed) { if (dataProperty.Type == typeof(string) || (typeof(IEnumerable).IsAssignableFrom(dataProperty.Type) && GetEnumerableType(dataProperty.Type) == typeof(string))) { @@ -82,12 +82,12 @@ public static Schema MapToSchema(IEnumerable pro } else { - throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string or IEnumerable. The Redis VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string or IEnumerable properties only."); + throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextIndexed)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.ModelName}' is set to true, but the property type is not a string or IEnumerable. The Redis VectorStore supports {nameof(dataProperty.IsFullTextIndexed)} on string or IEnumerable properties only."); } } // Add filter field index. - if (dataProperty.IsFilterable) + if (dataProperty.IsIndexed) { if (dataProperty.Type == typeof(string)) { @@ -103,7 +103,7 @@ public static Schema MapToSchema(IEnumerable pro } else { - throw new InvalidOperationException($"Property '{dataProperty.ModelName}' is marked as {nameof(VectorStoreRecordDataProperty.IsFilterable)}, but the property type '{dataProperty.Type}' is not supported. Only string, IEnumerable and numeric properties are supported for filtering by the Redis VectorStore."); + throw new InvalidOperationException($"Property '{dataProperty.ModelName}' is marked as {nameof(VectorStoreRecordDataProperty.IsIndexed)}, but the property type '{dataProperty.Type}' is not supported. Only string, IEnumerable and numeric properties are supported for filtering by the Redis VectorStore."); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs index eb67f4897dc0..cb56e9f318ca 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerCommandBuilder.cs @@ -54,7 +54,7 @@ internal static SqlCommand CreateTable( foreach (var dataProperty in model.DataProperties) { - if (dataProperty.IsFilterable) + if (dataProperty.IsIndexed) { sb.AppendFormat("CREATE INDEX "); sb.AppendIndexName(tableName, dataProperty.StorageName); @@ -607,7 +607,7 @@ private static void AddParameter(this SqlCommand command, VectorStoreRecordPrope Type t when t == typeof(long) => "BIGINT", Type t when t == typeof(Guid) => "UNIQUEIDENTIFIER", Type t when t == typeof(string) && property is VectorStoreRecordKeyPropertyModel => "NVARCHAR(4000)", - Type t when t == typeof(string) && property is VectorStoreRecordDataPropertyModel { IsFilterable: true } => "NVARCHAR(4000)", + Type t when t == typeof(string) && property is VectorStoreRecordDataPropertyModel { IsIndexed: true } => "NVARCHAR(4000)", Type t when t == typeof(string) => "NVARCHAR(MAX)", Type t when t == typeof(byte[]) => "VARBINARY(MAX)", Type t when t == typeof(bool) => "BIT", diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs index 0cae1286b2b8..5081f8fc8f02 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs @@ -65,7 +65,7 @@ public static List GetColumns(List var column = new SqliteColumn(property.StorageName, propertyType, isPrimary) { Configuration = configuration, - HasIndex = property is VectorStoreRecordDataPropertyModel { IsFilterable: true } + HasIndex = property is VectorStoreRecordDataPropertyModel { IsIndexed: true } }; columns.Add(column); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs index d17377c769ce..2448ceea8f18 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs @@ -32,8 +32,8 @@ public static WeaviateCollectionSchema MapToSchema(string collectionName, Vector { Name = property.StorageName, DataType = [MapType(property.Type)], - IndexFilterable = property.IsFilterable, - IndexSearchable = property.IsFullTextSearchable + IndexFilterable = property.IsIndexed, + IndexSearchable = property.IsFullTextIndexed }); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs index 46374a5cc408..a7dfe4dea140 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs @@ -14,7 +14,7 @@ public class MongoDBHotelModel(string hotelId) public string HotelId { get; init; } = hotelId; /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs index 74545d03cda3..69f716fecac8 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs @@ -33,10 +33,10 @@ public async Task CreatesCollectionForGenericModelAsync() { Properties = [ new VectorStoreRecordKeyProperty("HotelId", typeof(int)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { IsFilterable = true, StoragePropertyName = "parking_is_included" }, - new VectorStoreRecordDataProperty("HotelRating", typeof(float)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { IsIndexed = true, StoragePropertyName = "parking_is_included" }, + new VectorStoreRecordDataProperty("HotelRating", typeof(float)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 100, DistanceFunction = DistanceFunction.ManhattanDistance } @@ -64,7 +64,7 @@ public void ThrowsForUnsupportedType() { Properties = [ new VectorStoreRecordKeyProperty("HotelId", typeof(ulong)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, ] }; var options = new PostgresVectorStoreRecordCollectionOptions>() diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs index a53d4d2c19f8..14c73e8b42de 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordPropertyMappingTests.cs @@ -95,8 +95,8 @@ public void GetIndexInfoReturnsCorrectValues() new VectorStoreRecordVectorPropertyModel("vector1", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 1000 }, new VectorStoreRecordVectorPropertyModel("vector2", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Flat, Dimensions = 3000 }, new VectorStoreRecordVectorPropertyModel("vector3", typeof(ReadOnlyMemory?)) { IndexKind = IndexKind.Hnsw, Dimensions = 900, DistanceFunction = DistanceFunction.ManhattanDistance }, - new VectorStoreRecordDataPropertyModel("data1", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataPropertyModel("data2", typeof(string)) { IsFilterable = false } + new VectorStoreRecordDataPropertyModel("data1", typeof(string)) { IsIndexed = true }, + new VectorStoreRecordDataPropertyModel("data2", typeof(string)) { IsIndexed = false } ]; // Act diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index c8c8a0f79430..c91abee5481e 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -799,8 +799,8 @@ private static VectorStoreRecordDefinition CreateSinglePropsDefinition(Type keyT Properties = [ new VectorStoreRecordKeyProperty("Key", keyType), - new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Data", typeof(string)) { IsFilterable = true, StoragePropertyName = "data_storage_name" }, + new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("Data", typeof(string)) { IsIndexed = true, StoragePropertyName = "data_storage_name" }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector_storage_name" } ] }; @@ -811,11 +811,11 @@ public sealed class SinglePropsModel [VectorStoreRecordKey] public required T Key { get; set; } - [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] + [VectorStoreRecordData(IsIndexed = true, IsFullTextIndexed = true)] public string OriginalNameData { get; set; } = string.Empty; [JsonPropertyName("ignored_data_json_name")] - [VectorStoreRecordData(IsFilterable = true, StoragePropertyName = "data_storage_name")] + [VectorStoreRecordData(IsIndexed = true, StoragePropertyName = "data_storage_name")] public string Data { get; set; } = string.Empty; [JsonPropertyName("ignored_vector_json_name")] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index e95a83321b05..9d46a7c643b2 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -634,11 +634,11 @@ public sealed class SinglePropsModel [VectorStoreRecordKey] public string Key { get; set; } = string.Empty; - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string OriginalNameData { get; set; } = string.Empty; [JsonPropertyName("ignored_data_json_name")] - [VectorStoreRecordData(IsFilterable = true, StoragePropertyName = "data_storage_name")] + [VectorStoreRecordData(IsIndexed = true, StoragePropertyName = "data_storage_name")] public string Data { get; set; } = string.Empty; [JsonPropertyName("ignored_vector_json_name")] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 50755b9624f0..d57066cca612 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -599,8 +599,8 @@ private static MultiPropsModel CreateModel(string key, bool withVectors) Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsFilterable = true, StoragePropertyName = "ignored_data1_storage_name" }, - new VectorStoreRecordDataProperty("Data2", typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsIndexed = true, StoragePropertyName = "ignored_data1_storage_name" }, + new VectorStoreRecordDataProperty("Data2", typeof(string)) { IsIndexed = true }, new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.CosineDistance, StoragePropertyName = "ignored_vector1_storage_name" }, new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { Dimensions = 4 } ] @@ -612,10 +612,10 @@ public sealed class MultiPropsModel public string Key { get; set; } = string.Empty; [JsonPropertyName("data1_json_name")] - [VectorStoreRecordData(IsFilterable = true, StoragePropertyName = "ignored_data1_storage_name")] + [VectorStoreRecordData(IsIndexed = true, StoragePropertyName = "ignored_data1_storage_name")] public string Data1 { get; set; } = string.Empty; - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string Data2 { get; set; } = string.Empty; [JsonPropertyName("vector1_json_name")] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs index 370443e6c50c..c191cd322fa6 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs @@ -25,13 +25,13 @@ public void MapToSchemaCreatesSchema(bool useDollarPrefix) [ new VectorStoreRecordKeyPropertyModel("Key", typeof(string)), - new VectorStoreRecordDataPropertyModel("FilterableString", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataPropertyModel("FullTextSearchableString", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataPropertyModel("FilterableStringEnumerable", typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataPropertyModel("FullTextSearchableStringEnumerable", typeof(string[])) { IsFullTextSearchable = true }, + new VectorStoreRecordDataPropertyModel("FilterableString", typeof(string)) { IsIndexed = true }, + new VectorStoreRecordDataPropertyModel("FullTextSearchableString", typeof(string)) { IsFullTextIndexed = true }, + new VectorStoreRecordDataPropertyModel("FilterableStringEnumerable", typeof(string[])) { IsIndexed = true }, + new VectorStoreRecordDataPropertyModel("FullTextSearchableStringEnumerable", typeof(string[])) { IsFullTextIndexed = true }, - new VectorStoreRecordDataPropertyModel("FilterableInt", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataPropertyModel("FilterableNullableInt", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("FilterableInt", typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataPropertyModel("FilterableNullableInt", typeof(int)) { IsIndexed = true }, new VectorStoreRecordDataPropertyModel("NonFilterableString", typeof(string)), diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs index 8adb64a8bc88..30afc4c24a81 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs @@ -12,7 +12,7 @@ public class SqliteHotel() public TKey? HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs index a36e9f25a4c3..f3891f326ee9 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs @@ -59,7 +59,7 @@ public void GetColumnsReturnsCollectionOfColumns() var properties = new List() { new VectorStoreRecordKeyPropertyModel("Key", typeof(string)) { StorageName = "Key" }, - new VectorStoreRecordDataPropertyModel("Data", typeof(int)) { StorageName = "my_data", IsFilterable = true }, + new VectorStoreRecordDataPropertyModel("Data", typeof(int)) { StorageName = "my_data", IsIndexed = true }, new VectorStoreRecordVectorPropertyModel("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs index 9ce781b39b8b..5456ef05e907 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs @@ -16,7 +16,7 @@ public sealed record WeaviateHotel public Guid HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. @@ -37,7 +37,7 @@ public sealed record WeaviateHotel public List Tags { get; set; } = []; /// A data field. - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string Description { get; set; } [VectorStoreRecordData] diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index 0203e726c145..62ab99c428d1 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -171,7 +171,7 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordDataProperty("PropertyName", propertyType) { IsFilterable = true, IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("PropertyName", propertyType) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) ] }, diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs index 4dce8d215f0b..dc23d2e7c122 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs @@ -14,20 +14,20 @@ namespace Microsoft.Extensions.VectorData.ConnectorSupport; public class VectorStoreRecordDataPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) { /// - /// Gets or sets a value indicating whether this data property is filterable. + /// Gets or sets a value indicating whether this data property is indexed. /// /// /// The default is . /// - public bool IsFilterable { get; set; } + public bool IsIndexed { get; set; } /// - /// Gets or sets a value indicating whether this data property is full text searchable. + /// Gets or sets a value indicating whether this data property is indexed for full-text search. /// /// /// The default is . /// - public bool IsFullTextSearchable { get; set; } + public bool IsFullTextIndexed { get; set; } /// // TODO: Temporary, remove once we move to Dictionary as the dynamic representation diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs index cea74a78218b..622cfe07a772 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs @@ -141,24 +141,24 @@ public VectorStoreRecordDataPropertyModel GetFullTextDataPropertyOrSingle(expression, data: true); - return property.IsFullTextSearchable + return property.IsFullTextIndexed ? property - : throw new InvalidOperationException($"The property '{property.ModelName}' on '{this._recordType.Name}' must have full text search enabled."); + : throw new InvalidOperationException($"The property '{property.ModelName}' on '{this._recordType.Name}' must have full text search indexing enabled."); } if (this._singleFullTextSearchProperty is null) { - // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. + // If text data property name is not provided, check if a single full text indexed text property exists or throw otherwise. var fullTextStringProperties = this.DataProperties - .Where(l => l.Type == typeof(string) && l.IsFullTextSearchable) + .Where(l => l.Type == typeof(string) && l.IsFullTextIndexed) .ToList(); - // If text data property name is not provided, check if a single full text searchable text property exists or throw otherwise. + // If text data property name is not provided, check if a single full text indexed text property exists or throw otherwise. this._singleFullTextSearchProperty = fullTextStringProperties switch { [var singleProperty] => singleProperty, - { Count: 0 } => throw new InvalidOperationException($"The '{this._recordType.Name}' type does not have any text data properties that have full text search enabled."), - _ => throw new InvalidOperationException($"The '{this._recordType.Name}' type has multiple text data properties that have full text search enabled, please specify your chosen property via options.") + { Count: 0 } => throw new InvalidOperationException($"The '{this._recordType.Name}' type does not have any text data properties that have full text indexing enabled."), + _ => throw new InvalidOperationException($"The '{this._recordType.Name}' type has multiple text data properties that have full text indexing enabled, please specify your chosen property via options.") }; } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index ef7eab63a713..eda35d595ede 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -128,8 +128,8 @@ protected virtual void ProcessTypeProperties(Type type, VectorStoreRecordDefinit var dataProperty = new VectorStoreRecordDataPropertyModel(clrProperty.Name, clrProperty.PropertyType) { - IsFilterable = dataAttribute.IsFilterable, - IsFullTextSearchable = dataAttribute.IsFullTextSearchable, + IsIndexed = dataAttribute.IsIndexed, + IsFullTextIndexed = dataAttribute.IsFullTextIndexed, }; this.DataProperties.Add(dataProperty); @@ -240,8 +240,8 @@ protected virtual void ProcessRecordDefinition( $"Property '{property.ModelName}' is present in the {nameof(VectorStoreRecordDefinition)} as a data property, but the .NET property on type '{type?.Name}' has an incompatible attribute."); } - dataProperty.IsFilterable = definitionDataProperty.IsFilterable; - dataProperty.IsFullTextSearchable = definitionDataProperty.IsFullTextSearchable; + dataProperty.IsIndexed = definitionDataProperty.IsIndexed; + dataProperty.IsFullTextIndexed = definitionDataProperty.IsFullTextIndexed; break; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs index 38302c7fecc8..c64ababda3a8 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs @@ -21,16 +21,34 @@ public sealed class VectorStoreRecordDataAttribute : Attribute /// /// The default is . /// + [Obsolete("This property is now obsolete and will have no affect if used. Please use IsIndexed instead", error: true)] public bool IsFilterable { get; init; } /// - /// Gets or sets a value indicating whether this data property is full-text searchable. + /// Gets or sets a value indicating whether this data property is full text searchable. /// /// /// The default is . /// + [Obsolete("This property is now obsolete and will have no affect if used. Please use IsFullTextIndexed instead", error: true)] public bool IsFullTextSearchable { get; init; } + /// + /// Gets or sets a value indicating whether this data property is indexed. + /// + /// + /// The default is . + /// + public bool IsIndexed { get; init; } + + /// + /// Gets or sets a value indicating whether this data property is indexed for full-text search. + /// + /// + /// The default is . + /// + public bool IsFullTextIndexed { get; init; } + /// /// Gets or sets an optional name to use for the property in storage, if different from the property name. /// diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs index e3e5c22296b5..5cc543f73474 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDataProperty.cs @@ -29,8 +29,8 @@ public VectorStoreRecordDataProperty(string propertyName, Type propertyType) public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source) : base(source) { - this.IsFilterable = source.IsFilterable; - this.IsFullTextSearchable = source.IsFullTextSearchable; + this.IsIndexed = source.IsIndexed; + this.IsFullTextIndexed = source.IsFullTextIndexed; } /// @@ -39,6 +39,7 @@ public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source) /// /// The default is . /// + [Obsolete("This property is now obsolete and will have no affect if used. Please use IsIndexed instead", error: true)] public bool IsFilterable { get; init; } /// @@ -47,5 +48,22 @@ public VectorStoreRecordDataProperty(VectorStoreRecordDataProperty source) /// /// The default is . /// + [Obsolete("This property is now obsolete and will have no affect if used. Please use IsFullTextIndexed instead", error: true)] public bool IsFullTextSearchable { get; init; } + + /// + /// Gets or sets a value indicating whether this data property is indexed. + /// + /// + /// The default is . + /// + public bool IsIndexed { get; init; } + + /// + /// Gets or sets a value indicating whether this data property is indexed for full-text search. + /// + /// + /// The default is . + /// + public bool IsFullTextIndexed { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs index 65a8a025207b..96c251a086f0 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/HybridSearchOptions.cs @@ -37,8 +37,8 @@ public class HybridSearchOptions /// /// Gets or sets the additional target property to do the text/keyword search on. - /// The property must have full text search enabled. - /// If not provided will look if there is a text property with full text search enabled, and + /// The property must have full text indexing enabled. + /// If not provided will look if there is a text property with full text indexing enabled, and /// will throw if either none or multiple exist. /// public Expression>? AdditionalProperty { get; init; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchHotel.cs index 3f979fe2b828..430838d7e6ed 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchHotel.cs @@ -17,7 +17,7 @@ public class AzureAISearchHotel public string HotelId { get; set; } [SearchableField(IsFilterable = true, IsSortable = true)] - [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] + [VectorStoreRecordData(IsIndexed = true, IsFullTextIndexed = true)] public string HotelName { get; set; } [SearchableField(AnalyzerName = LexicalAnalyzerName.Values.EnLucene)] @@ -28,18 +28,18 @@ public class AzureAISearchHotel public ReadOnlyMemory? DescriptionEmbedding { get; set; } [SearchableField(IsFilterable = true, IsFacetable = true)] - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] #pragma warning disable CA1819 // Properties should not return arrays public string[] Tags { get; set; } #pragma warning restore CA1819 // Properties should not return arrays [JsonPropertyName("parking_is_included")] [SimpleField(IsFilterable = true, IsSortable = true, IsFacetable = true)] - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public bool? ParkingIncluded { get; set; } [SimpleField(IsFilterable = true, IsSortable = true, IsFacetable = true)] - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public DateTimeOffset? LastRenovationDate { get; set; } [SimpleField(IsFilterable = true, IsSortable = true, IsFacetable = true)] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs index 6a973333ca3f..b5fc160325a9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs @@ -70,12 +70,12 @@ public AzureAISearchVectorStoreFixture() Properties = new List { new VectorStoreRecordKeyProperty("HotelId", typeof(string)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 1536 }, - new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool?)) { IsFilterable = true, StoragePropertyName = "parking_is_included" }, - new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTimeOffset?)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsIndexed = true }, + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool?)) { IsIndexed = true, StoragePropertyName = "parking_is_included" }, + new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTimeOffset?)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Rating", typeof(double?)) } }; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs index 7a8830ea2842..0127bb5405d2 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs @@ -15,7 +15,7 @@ public class AzureCosmosDBMongoDBHotel public string HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs index e7d353486504..2bf730a792f9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs @@ -16,11 +16,11 @@ public record AzureCosmosDBNoSQLHotel() public string HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public int HotelCode { get; set; } /// A float metadata field. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs index b3adb2e723a1..b0bd04f2f400 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs @@ -15,7 +15,7 @@ public class MongoDBHotel public string HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs index 638bf1f5602f..4a10a8915e1e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs @@ -53,13 +53,13 @@ public QdrantVectorStoreFixture() Properties = new List { new VectorStoreRecordKeyProperty("HotelId", typeof(ulong)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { IsFilterable = true, StoragePropertyName = "parking_is_included" }, - new VectorStoreRecordDataProperty("HotelRating", typeof(float)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTime)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("OpeningDate", typeof(DateTimeOffset)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { IsIndexed = true, StoragePropertyName = "parking_is_included" }, + new VectorStoreRecordDataProperty("HotelRating", typeof(float)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTime)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("OpeningDate", typeof(DateTimeOffset)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance } } @@ -69,7 +69,7 @@ public QdrantVectorStoreFixture() Properties = new List { new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance } } @@ -320,30 +320,30 @@ public record HotelInfo() public ulong HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] + [VectorStoreRecordData(IsIndexed = true, IsFullTextIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public int HotelCode { get; set; } /// A float metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public float? HotelRating { get; set; } /// A bool metadata field. - [VectorStoreRecordData(IsFilterable = true, StoragePropertyName = "parking_is_included")] + [VectorStoreRecordData(IsIndexed = true, StoragePropertyName = "parking_is_included")] public bool ParkingIncluded { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public List Tags { get; set; } = new List(); /// A datetime metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public DateTime? LastRenovationDate { get; set; } /// A datetimeoffset metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public DateTimeOffset? OpeningDate { get; set; } /// A data field. @@ -366,7 +366,7 @@ public record HotelInfoWithGuidId() public Guid HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] + [VectorStoreRecordData(IsIndexed = true, IsFullTextIndexed = true)] public string? HotelName { get; set; } /// A data field. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHotel.cs index 87dc5c2fb89b..5a0dbb64459f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHotel.cs @@ -16,23 +16,23 @@ public class RedisHotel [VectorStoreRecordKey] public string HotelId { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string HotelName { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public int HotelCode { get; init; } - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string Description { get; init; } [VectorStoreRecordVector(4)] public ReadOnlyMemory? DescriptionEmbedding { get; init; } #pragma warning disable CA1819 // Properties should not return arrays - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string[] Tags { get; init; } - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string[] FTSTags { get; init; } #pragma warning restore CA1819 // Properties should not return arrays @@ -67,13 +67,13 @@ public class RedisBasicHotel [VectorStoreRecordKey] public string HotelId { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string HotelName { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public int HotelCode { get; init; } - [VectorStoreRecordData(IsFullTextSearchable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true)] public string Description { get; init; } [VectorStoreRecordVector(4)] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs index bec643a13d5b..c60669b842ee 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs @@ -40,12 +40,12 @@ public RedisVectorStoreFixture() Properties = new List { new VectorStoreRecordKeyProperty("HotelId", typeof(string)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextIndexed = true }, new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, - new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty("FTSTags", typeof(string[])) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsIndexed = true }, + new VectorStoreRecordDataProperty("FTSTags", typeof(string[])) { IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTimeOffset)), new VectorStoreRecordDataProperty("Rating", typeof(double)), @@ -57,9 +57,9 @@ public RedisVectorStoreFixture() Properties = new List { new VectorStoreRecordKeyProperty("HotelId", typeof(string)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextSearchable = true }, + new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextIndexed = true }, new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("Rating", typeof(double)), diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs index 761b0ce9631f..784736e9dd05 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs @@ -12,7 +12,7 @@ public record SqliteHotel() public TKey? HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs index bfcd78c9a51c..e2442e49057e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs @@ -16,31 +16,31 @@ public sealed record WeaviateHotel public Guid HotelId { get; init; } /// A string metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public string? HotelName { get; set; } /// An int metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public int HotelCode { get; set; } /// A float metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public float? HotelRating { get; set; } /// A bool metadata field. [JsonPropertyName("parking_is_included")] - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public bool ParkingIncluded { get; set; } /// An array metadata field. - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public List Tags { get; set; } = []; /// A data field. - [VectorStoreRecordData(IsFullTextSearchable = true, IsFilterable = true)] + [VectorStoreRecordData(IsFullTextIndexed = true, IsIndexed = true)] public string Description { get; set; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public DateTimeOffset Timestamp { get; set; } /// A vector field. diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 427f0a123b61..df857b5f26e7 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -140,7 +140,7 @@ protected sealed class DataModel [VectorStoreRecordData] public required string Link { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public required string Tag { get; init; } [VectorStoreRecordVector(1536)] diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index 7882846ab2a8..8f0fea275978 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -160,7 +160,7 @@ public sealed class DataModel [VectorStoreRecordData] public required string Text { get; init; } - [VectorStoreRecordData(IsFilterable = true)] + [VectorStoreRecordData(IsIndexed = true)] public required string Tag { get; init; } [VectorStoreRecordVector(1536)] diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index 6b981da2b12a..aaf281c550e5 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -112,7 +112,7 @@ public void CreateTable(bool ifNotExists) [ new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("simpleName", typeof(string)), - new VectorStoreRecordDataProperty("with space", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("with space", typeof(int)) { IsIndexed = true }, new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } ]); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs index d5c468f07cb9..f8cb930a8746 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs @@ -149,7 +149,7 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = [ new VectorStoreRecordKeyProperty(nameof(NoVectorRecord.Id), typeof(TKey)), - new VectorStoreRecordDataProperty(nameof(NoVectorRecord.Text), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(NoVectorRecord.Text), typeof(string)) { IsIndexed = true }, ] }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index a59bb04f7d8d..56e3905116aa 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -341,12 +341,12 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() IndexKind = this.IndexKind }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.Int), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.String), typeof(string)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.Bool), typeof(bool)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.Int2), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.StringArray), typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(FilterRecord.StringList), typeof(List)) { IsFilterable = true } + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int), typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.String), typeof(string)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Bool), typeof(bool)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.Int2), typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringArray), typeof(string[])) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(FilterRecord.StringList), typeof(List)) { IsIndexed = true } ] }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs index 38e4bdf85c35..224e8eeb7320 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs @@ -184,8 +184,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = new List() { new VectorStoreRecordKeyProperty("Key", typeof(TKey)), - new VectorStoreRecordDataProperty("Text", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Code", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Text", typeof(string)) { IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("Code", typeof(int)) { IsIndexed = true }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, } }; @@ -237,9 +237,9 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = new List() { new VectorStoreRecordKeyProperty("Key", typeof(TKey)), - new VectorStoreRecordDataProperty("Text1", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Text2", typeof(string)) { IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Code", typeof(int)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("Text1", typeof(string)) { IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("Text2", typeof(string)) { IsFullTextIndexed = true }, + new VectorStoreRecordDataProperty("Code", typeof(int)) { IsIndexed = true }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, } }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs index 370a77fae976..e7c8fb6857ca 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs @@ -53,8 +53,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() IndexKind = this.IndexKind }, - new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { IsIndexed = true }, ] }; } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index e811372d0735..12cb73f25861 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -171,8 +171,8 @@ private VectorStoreRecordDefinition GetRecordDefinition(string distanceFunction) DistanceFunction = distanceFunction, IndexKind = this.IndexKind }, - new VectorStoreRecordDataProperty(nameof(SearchRecord.Int), typeof(int)) { IsFilterable = true }, - new VectorStoreRecordDataProperty(nameof(SearchRecord.String), typeof(string)) { IsFilterable = true }, + new VectorStoreRecordDataProperty(nameof(SearchRecord.Int), typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(SearchRecord.String), typeof(string)) { IsIndexed = true }, ] }; From 137e284ba51b028bb7679951daaf0d717f444bb4 Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Wed, 9 Apr 2025 18:26:27 +0200 Subject: [PATCH 37/63] .Net: enforce TRecord : notnull (#11467) fixes #11131 --- .../TextEmbeddingVectorStore.cs | 1 + .../TextEmbeddingVectorStoreExtensions.cs | 1 + ...extEmbeddingVectorStoreRecordCollection.cs | 1 + .../Concepts/Memory/VectorStoreExtensions.cs | 2 + .../AzureAISearchFactory.cs | 1 + .../PineconeFactory.cs | 1 + .../QdrantFactory.cs | 1 + .../RedisFactory.cs | 1 + .../VectorStore_DataIngestion_CustomMapper.cs | 1 + .../Concepts/Search/VectorStore_TextSearch.cs | 1 + .../Extensions/VectorStoreExtensions.cs | 1 + .../MCPServer/Program.cs | 2 +- .../InMemoryVectorStoreFixture.cs | 1 + .../AzureAISearchKernelBuilderExtensions.cs | 3 ++ ...zureAISearchServiceCollectionExtensions.cs | 5 ++- .../AzureAISearchVectorStore.cs | 1 + ...zureAISearchVectorStoreRecordCollection.cs | 1 + ...earchVectorStoreRecordCollectionFactory.cs | 3 +- ...eCosmosDBMongoDBKernelBuilderExtensions.cs | 2 + ...mosDBMongoDBServiceCollectionExtensions.cs | 4 +- .../AzureCosmosDBMongoDBVectorStore.cs | 1 + ...mosDBMongoDBVectorStoreRecordCollection.cs | 1 + ...ngoDBVectorStoreRecordCollectionFactory.cs | 3 +- ...ureCosmosDBNoSQLKernelBuilderExtensions.cs | 2 + ...osmosDBNoSQLServiceCollectionExtensions.cs | 4 +- .../AzureCosmosDBNoSQLVectorStore.cs | 1 + ...osmosDBNoSQLVectorStoreRecordCollection.cs | 1 + ...NoSQLVectorStoreRecordCollectionFactory.cs | 3 +- .../InMemoryKernelBuilderExtensions.cs | 1 + .../InMemoryServiceCollectionExtensions.cs | 1 + .../InMemoryVectorStore.cs | 1 + .../InMemoryVectorStoreExtensions.cs | 2 + .../InMemoryVectorStoreRecordCollection.cs | 5 ++- ...ngoDBVectorStoreRecordCollectionFactory.cs | 3 +- .../MongoDBServiceCollectionExtensions.cs | 4 +- .../MongoDBVectorStore.cs | 1 + .../MongoDBVectorStoreRecordCollection.cs | 1 + ...econeVectorStoreRecordCollectionFactory.cs | 3 +- .../PineconeKernelBuilderExtensions.cs | 2 + .../PineconeServiceCollectionExtensions.cs | 4 +- .../PineconeVectorStore.cs | 1 + .../PineconeVectorStoreRecordCollection.cs | 1 + ...tgresVectorStoreRecordCollectionFactory.cs | 3 +- .../PostgresServiceCollectionExtensions.cs | 3 ++ .../PostgresVectorStore.cs | 1 + .../PostgresVectorStoreRecordCollection.cs | 1 + ...drantVectorStoreRecordCollectionFactory.cs | 3 +- .../QdrantKernelBuilderExtensions.cs | 2 + .../QdrantServiceCollectionExtensions.cs | 3 ++ .../QdrantVectorStore.cs | 1 + .../QdrantVectorStoreRecordCollection.cs | 1 + ...RedisVectorStoreRecordCollectionFactory.cs | 3 +- ...RedisHashSetVectorStoreRecordCollection.cs | 2 +- .../RedisJsonVectorStoreRecordCollection.cs | 1 + .../RedisKernelBuilderExtensions.cs | 4 ++ .../RedisServiceCollectionExtensions.cs | 6 ++- .../RedisVectorStore.cs | 1 + .../SqlServerVectorStore.cs | 4 +- .../SqlServerVectorStoreRecordCollection.cs | 8 ++-- ...qliteVectorStoreRecordCollectionFactory.cs | 3 +- .../SqliteServiceCollectionExtensions.cs | 3 +- .../SqliteVectorStore.cs | 1 + .../SqliteVectorStoreRecordCollection.cs | 1 + ...viateVectorStoreRecordCollectionFactory.cs | 3 +- .../WeaviateKernelBuilderExtensions.cs | 1 + .../WeaviateServiceCollectionExtensions.cs | 3 +- .../WeaviateVectorStore.cs | 1 + .../WeaviateVectorStoreRecordCollection.cs | 1 + .../RedisKernelBuilderExtensionsTests.cs | 4 +- .../RedisServiceCollectionExtensionsTests.cs | 4 +- .../CompatibilitySuppressions.xml | 42 +++++++++++++++++++ .../VectorData.Abstractions.csproj | 3 +- .../VectorStorage/IVectorStore.cs | 3 +- .../IVectorStoreRecordCollection.cs | 1 + .../BaseVectorStoreRecordCollectionTests.cs | 2 +- .../Connectors/Memory/BaseVectorStoreTests.cs | 1 + .../Memory/Sqlite/SqliteVectorStoreFixture.cs | 2 +- .../Data/BaseVectorStoreTextSearchTests.cs | 1 + .../Data/VectorStoreExtensions.cs | 2 + .../Collections/CollectionConformanceTests.cs | 10 ++--- .../Support/TestStore.cs | 1 + .../Support/VectorStoreCollectionFixture.cs | 1 + 82 files changed, 182 insertions(+), 39 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs index 4fd62592adf3..47ca0933d53f 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs @@ -34,6 +34,7 @@ public TextEmbeddingVectorStore(IVectorStore decoratedVectorStore, ITextEmbeddin /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { var collection = this._decoratedVectorStore.GetCollection(name, vectorStoreRecordDefinition); var embeddingStore = new TextEmbeddingVectorStoreRecordCollection(collection, this._textEmbeddingGenerationService); diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs index e1b6c779fdb8..edda917b99cd 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs @@ -34,6 +34,7 @@ public static IVectorStore UseTextEmbeddingGeneration(this IVectorStore vectorSt /// The with text embedding added. public static IVectorStoreRecordCollection UseTextEmbeddingGeneration(this IVectorStoreRecordCollection vectorStoreRecordCollection, ITextEmbeddingGenerationService textEmbeddingGenerationService) where TKey : notnull + where TRecord : notnull { return new TextEmbeddingVectorStoreRecordCollection(vectorStoreRecordCollection, textEmbeddingGenerationService); } diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index faa506d71cb7..14b7352b62d3 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -20,6 +20,7 @@ namespace Memory.VectorStoreEmbeddingGeneration; public class TextEmbeddingVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull + where TRecord : notnull { /// The decorated . private readonly IVectorStoreRecordCollection _decoratedVectorStoreRecordCollection; diff --git a/dotnet/samples/Concepts/Memory/VectorStoreExtensions.cs b/dotnet/samples/Concepts/Memory/VectorStoreExtensions.cs index 3d54787aee79..3a2183ba34ee 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreExtensions.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreExtensions.cs @@ -45,6 +45,7 @@ internal static async Task> CreateCo ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromString createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); @@ -80,6 +81,7 @@ internal static async Task> CreateCo ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromTextSearchResult createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index a837e38d0ad8..28c6d6350313 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -50,6 +50,7 @@ private sealed class AzureAISearchLangchainInteropVectorStore( public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index 76d671debcbe..d66a245df759 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -45,6 +45,7 @@ private sealed class PineconeLangchainInteropVectorStore( public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index 6932774c785d..5219963a29b1 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -48,6 +48,7 @@ private sealed class QdrantLangchainInteropVectorStore( public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { // Create a Qdrant collection. To be compatible with Langchain // we need to use a custom record definition that matches the diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index 83a642bbe444..6bfe7ce90d87 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -45,6 +45,7 @@ private sealed class RedisLangchainInteropVectorStore( public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) { diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index 139412bdfd79..ab9fa55daa4c 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -142,6 +142,7 @@ private sealed class CustomRedisVectorStore( public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { // If the record definition is the glossary definition and the record type is the generic data model, inject the custom mapper into the collection options. if (vectorStoreRecordDefinition == s_glossaryDefinition && typeof(TRecord) == typeof(GenericDataModel)) diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index 490a379c5cd8..4015bd1bb209 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -122,6 +122,7 @@ internal static async Task> CreateCo ITextEmbeddingGenerationService embeddingGenerationService, CreateRecord createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); diff --git a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Extensions/VectorStoreExtensions.cs b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Extensions/VectorStoreExtensions.cs index dacb15ff410a..8d06423301e0 100644 --- a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Extensions/VectorStoreExtensions.cs +++ b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Extensions/VectorStoreExtensions.cs @@ -35,6 +35,7 @@ public static async Task> CreateColl ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromString createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); diff --git a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs index be56fd48db98..c5928d02b7b3 100644 --- a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs +++ b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs @@ -119,7 +119,7 @@ static TextDataModel CreateRecord(string text, ReadOnlyMemory embedding) ReadOnlyMemory promptEmbedding = await embeddingGenerationService.GenerateEmbeddingAsync(prompt, cancellationToken: cancellationToken); // Retrieve top three matching records from the vector store - VectorSearchResults result = await vsCollection.VectorizedSearchAsync(promptEmbedding, new() { Top = 3 }, cancellationToken); + VectorSearchResults result = await vsCollection.VectorizedSearchAsync(promptEmbedding, top: 3, cancellationToken: cancellationToken); // Return the records as resource contents List contents = []; diff --git a/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs b/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs index 8e8279072e70..23da3fff00ea 100644 --- a/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs +++ b/dotnet/samples/GettingStartedWithTextSearch/InMemoryVectorStoreFixture.cs @@ -113,6 +113,7 @@ private async Task> CreateCollection string[] entries, CreateRecord createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = this.InMemoryVectorStore.GetCollection(this.CollectionName); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs index 5ebd018354be..fb93ed16d0d3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs @@ -73,6 +73,7 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureAISearchVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -97,6 +98,7 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureAISearchVectorStoreRecordCollection(collectionName, endpoint, tokenCredential, options, serviceId); return builder; @@ -121,6 +123,7 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureAISearchVectorStoreRecordCollection(collectionName, endpoint, credential, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs index 0daa73595cbd..3bed2d72bae4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs @@ -122,6 +122,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { // If we are not constructing the SearchIndexClient, add the IVectorStore as transient, since we // cannot make assumptions about how SearchIndexClient is being managed. @@ -162,6 +163,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { Verify.NotNull(endpoint); Verify.NotNull(tokenCredential); @@ -205,6 +207,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { Verify.NotNull(endpoint); Verify.NotNull(credential); @@ -235,7 +238,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollectionThe type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index d5830cba605b..fd756592f099 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -50,6 +50,7 @@ public AzureAISearchVectorStore(SearchIndexClient searchIndexClient, AzureAISear /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IAzureAISearchVectorStoreRecordCollectionFactor is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 8831467ada89..c5c8b433eea7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -29,6 +29,7 @@ public sealed class AzureAISearchVectorStoreRecordCollection : IVectorizableTextSearch, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs index 2c9def54ae18..14f094d659d9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/IAzureAISearchVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IAzureAISearchVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(SearchIndexClient searchIndexClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs index d4e77e583ff3..12c47576f243 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs @@ -66,6 +66,7 @@ public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStoreRecordCollection< string collectionName, AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureCosmosDBMongoDBVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -90,6 +91,7 @@ public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStoreRecordCollection< string databaseName, AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureCosmosDBMongoDBVectorStoreRecordCollection(collectionName, connectionString, databaseName, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs index f4f77082a271..105c15c1414c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs @@ -93,6 +93,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect string collectionName, AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -128,6 +129,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect string databaseName, AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -155,7 +157,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 469eb60354fe..3f7d66e5c05f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -48,6 +48,7 @@ public AzureCosmosDBMongoDBVectorStore(IMongoDatabase mongoDatabase, AzureCosmos /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 57f5e48140ae..58deb0d28749 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -25,6 +25,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs index 5aeec3f3f4ff..e1ca5eebcbe9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IMongoDatabase mongoDatabase, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs index 1e4905af255b..f3fa50210626 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs @@ -71,6 +71,7 @@ public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureCosmosDBNoSQLVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -95,6 +96,7 @@ public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddAzureCosmosDBNoSQLVectorStoreRecordCollection(collectionName, connectionString, databaseName, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs index 1c70d360ee62..ad0645feb098 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs @@ -94,6 +94,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio string collectionName, AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -129,6 +130,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio string databaseName, AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -157,7 +159,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index 680dd52a4b83..d8d30c4cc113 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -48,6 +48,7 @@ public AzureCosmosDBNoSQLVectorStore(Database database, AzureCosmosDBNoSQLVector /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 48cb900956ca..1196a6758031 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -29,6 +29,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs index 8d51dbb555b0..626677bccc40 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs @@ -25,5 +25,6 @@ IVectorStoreRecordCollection CreateVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { builder.Services.AddInMemoryVectorStoreRecordCollection(collectionName, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs index b541aad65b98..f7d0aadf111b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs @@ -42,6 +42,7 @@ public static IServiceCollection AddInMemoryVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs index 3710cfea1dcd..f2cf7def2360 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs @@ -53,6 +53,7 @@ internal InMemoryVectorStore(ConcurrentDictionary public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { if (this._internalCollectionTypes.TryGetValue(name, out var existingCollectionDataType) && existingCollectionDataType != typeof(TRecord)) { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreExtensions.cs index b81f3e16d062..7975f783d5d3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreExtensions.cs @@ -32,6 +32,7 @@ public static async Task SerializeCollectionAsJsonAsync( Stream stream, JsonSerializerOptions? jsonSerializerOptions = null) where TKey : notnull + where TRecord : notnull { // Get collection and verify that it exists. var collection = vectorStore.GetCollection(collectionName); @@ -59,6 +60,7 @@ public static async Task SerializeCollectionAsJsonAsync( this InMemoryVectorStore vectorStore, Stream stream) where TKey : notnull + where TRecord : notnull { IVectorStoreRecordCollection? collection = null; diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 769635a99101..b2814654b940 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -22,6 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.InMemory; public sealed class InMemoryVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -86,7 +87,7 @@ public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVector // TODO: Make generic to avoid boxing #pragma warning disable MEVD9000 // KeyResolver and VectorResolver are experimental this._keyResolver = this._options.KeyResolver is null - ? record => (TKey)this._model.KeyProperty.GetValueAsObject(record!)! + ? record => (TKey)this._model.KeyProperty.GetValueAsObject(record)! : this._options.KeyResolver; this._vectorResolver = this._options.VectorResolver is not null @@ -103,7 +104,7 @@ public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVector throw new InvalidOperationException($"The property '{vectorPropertyName}' isn't a vector property."); } - return property.GetValueAsObject(record!); + return property.GetValueAsObject(record); }; #pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs index 0726870eb56c..d395b17b30e6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/IMongoDBVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IMongoDBVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IMongoDatabase mongoDatabase, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs index b8e89aab82da..4df82f2bb4c1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs @@ -93,6 +93,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( string collectionName, MongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -128,6 +129,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( string databaseName, MongoDBVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -155,7 +157,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index 180ef381462d..c9923703dea1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -48,6 +48,7 @@ public MongoDBVectorStore(IMongoDatabase mongoDatabase, MongoDBVectorStoreOption /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IMongoDBVectorStoreRecordCollectionFactoryß is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index f94b47f6ad13..280fbed8c7a9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -22,6 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs index 25b6efae42de..347412b3e861 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IPineconeVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs index ed6dfc37033a..d27bd325e7a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs @@ -55,6 +55,7 @@ public static IKernelBuilder AddPineconeVectorStoreRecordCollection( string collectionName, PineconeVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddPineconeVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -77,6 +78,7 @@ public static IKernelBuilder AddPineconeVectorStoreRecordCollection( string apiKey, PineconeVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddPineconeVectorStoreRecordCollection(collectionName, apiKey, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs index 5e7658eb923f..9b5c8e135355 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs @@ -78,6 +78,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection string collectionName, PineconeVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { // If we are not constructing the PineconeClient, add the IVectorStore as transient, since we // cannot make assumptions about how PineconeClient is being managed. @@ -116,6 +117,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection string apiKey, PineconeVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -141,7 +143,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index 9aa4b066d210..81a6b70b5ed8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -45,6 +45,7 @@ public PineconeVectorStore(Sdk.PineconeClient pineconeClient, PineconeVectorStor /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IPineconeVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 613383381458..bde6cdeb5a29 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -21,6 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public sealed class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs index 58384ba767ac..58af348ac87d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IPostgresVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(NpgsqlDataSource dataSource, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs index 983b8e7db443..23f8d131bd4b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs @@ -91,6 +91,7 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -126,6 +127,7 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { string? npgsqlServiceId = serviceId == null ? default : $"{serviceId}_NpgsqlDataSource"; // Register NpgsqlDataSource to ensure proper disposal. @@ -161,6 +163,7 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollectionThe service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TKey : notnull + where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 1d51a5765363..3c034e135c37 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -68,6 +68,7 @@ public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cance /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IPostgresVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 21d61be689a1..24821c7f18d8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -22,6 +22,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; public sealed class PostgresVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull + where TRecord : notnull { /// public string CollectionName { get; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs index 32dd7ed47d91..994e3629e81b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IQdrantVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(QdrantClient qdrantClient, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs index 2f71d42ab074..8a97f8536920 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs @@ -59,6 +59,7 @@ public static IKernelBuilder AddQdrantVectorStoreRecordCollection QdrantVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { builder.Services.AddQdrantVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -89,6 +90,7 @@ public static IKernelBuilder AddQdrantVectorStoreRecordCollection QdrantVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { builder.Services.AddQdrantVectorStoreRecordCollection(collectionName, host, port, https, apiKey, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs index 693d8d94fc3b..cf68dcff7708 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs @@ -82,6 +82,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -123,6 +124,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -148,6 +150,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollectionThe service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TKey : notnull + where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index 6167b8c9ef30..a2618f3ae1a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -58,6 +58,7 @@ internal QdrantVectorStore(MockableQdrantClient qdrantClient, QdrantVectorStoreO /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IQdrantVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 08620a5bd5be..17ce98485767 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -25,6 +25,7 @@ public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs index ea98a9a6308d..519ef2151eb2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisVectorStoreRecordCollectionFactory.cs @@ -22,5 +22,6 @@ public interface IRedisVectorStoreRecordCollectionFactory /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. /// The new instance of . IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IDatabase database, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 31fc2d16d95d..3fadb85ba4e9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection +public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection where TRecord : notnull #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 85ccc13f8667..f00af56f7d43 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -26,6 +26,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public sealed class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisKernelBuilderExtensions.cs index 1f7ed194856f..6421c68928ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisKernelBuilderExtensions.cs @@ -53,6 +53,7 @@ public static IKernelBuilder AddRedisHashSetVectorStoreRecordCollection string collectionName, RedisHashSetVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddRedisHashSetVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -75,6 +76,7 @@ public static IKernelBuilder AddRedisHashSetVectorStoreRecordCollection string redisConnectionConfiguration, RedisHashSetVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddRedisHashSetVectorStoreRecordCollection(collectionName, redisConnectionConfiguration, options, serviceId); return builder; @@ -95,6 +97,7 @@ public static IKernelBuilder AddRedisJsonVectorStoreRecordCollection( string collectionName, RedisJsonVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddRedisJsonVectorStoreRecordCollection(collectionName, options, serviceId); return builder; @@ -117,6 +120,7 @@ public static IKernelBuilder AddRedisJsonVectorStoreRecordCollection( string redisConnectionConfiguration, RedisJsonVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddRedisJsonVectorStoreRecordCollection(collectionName, redisConnectionConfiguration, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs index 778c1e75a88a..feaa7fe541f6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs @@ -80,6 +80,7 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -113,6 +114,7 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -144,6 +146,7 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -177,6 +180,7 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -199,7 +203,7 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollectionThe type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 66449d428e8d..6b481ad57ac0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -49,6 +49,7 @@ public RedisVectorStore(IDatabase database, RedisVectorStoreOptions? options = d /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IRedisVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs index 2dc42dee74bf..38dfcffec6cc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -46,7 +46,9 @@ public SqlServerVectorStore(string connectionString, SqlServerVectorStoreOptions } /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull + where TRecord : notnull { Verify.NotNull(name); diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 5076a3813277..79059665b7ca 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -19,7 +19,9 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix (Collection) public sealed class SqlServerVectorStoreRecordCollection #pragma warning restore CA1711 - : IVectorStoreRecordCollection where TKey : notnull + : IVectorStoreRecordCollection + where TKey : notnull + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -429,7 +431,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, { foreach (var record in records) { - yield return ((VectorStoreGenericDataModel)(object)record!).Key; + yield return ((VectorStoreGenericDataModel)(object)record).Key; } } else @@ -437,7 +439,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, var keyProperty = this._model.KeyProperty; foreach (var record in records) { - yield return (TKey)keyProperty.GetValueAsObject(record!)!; + yield return (TKey)keyProperty.GetValueAsObject(record)!; } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs index 5c75ea0ec6eb..b8fa2ba0cc53 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/ISqliteVectorStoreRecordCollectionFactory.cs @@ -24,5 +24,6 @@ IVectorStoreRecordCollection CreateVectorStoreRecordCollection? options = default, string? serviceId = default) where TKey : notnull + where TRecord : notnull { services.AddKeyedSingleton>( serviceId, @@ -109,7 +110,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollectionThe type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull where TKey : notnull => services.AddKeyedSingleton>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index afdab01e938d..c4c09c92d77e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -62,6 +62,7 @@ public SqliteVectorStore( /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // ISqliteVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index abc435cf3139..e59b8e720892 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -24,6 +24,7 @@ public sealed class SqliteVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorStoreRecordCollection #pragma warning restore CA1711 // Identifiers should not have incorrect + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs index 10210eb8fb82..36e4daf5075d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs @@ -25,5 +25,6 @@ IVectorStoreRecordCollection CreateVectorStoreRecordCollection( HttpClient? httpClient = default, WeaviateVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { builder.Services.AddWeaviateVectorStoreRecordCollection(collectionName, httpClient, options, serviceId); return builder; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs index 7f6dfe48a404..68dc9d930a2d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs @@ -64,6 +64,7 @@ public static IServiceCollection AddWeaviateVectorStoreRecordCollection HttpClient? httpClient = default, WeaviateVectorStoreRecordCollectionOptions? options = default, string? serviceId = default) + where TRecord : notnull { services.AddKeyedTransient>( serviceId, @@ -86,7 +87,7 @@ public static IServiceCollection AddWeaviateVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. - private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) + private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { services.AddKeyedTransient>( serviceId, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 4d9f0236beab..92f569b3875d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -53,6 +53,7 @@ public WeaviateVectorStore(HttpClient httpClient, WeaviateVectorStoreOptions? op /// The collection name must start with a capital letter and contain only ASCII letters and digits. public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull + where TRecord : notnull { #pragma warning disable CS0618 // IWeaviateVectorStoreRecordCollectionFactory is obsolete if (this._options.VectorStoreCollectionFactory is not null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index fd6de088260d..dbb6e0c039cf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -24,6 +24,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1711 // Identifiers should not have incorrect suffix public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TRecord : notnull { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs index d83ab4ca403b..02e11b785a99 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs @@ -69,7 +69,7 @@ private void AssertVectorStoreCreated() Assert.IsType(vectorStore); } - private void AssertHashSetVectorStoreRecordCollectionCreated() + private void AssertHashSetVectorStoreRecordCollectionCreated() where TRecord : notnull { var kernel = this._kernelBuilder.Build(); var collection = kernel.Services.GetRequiredService>(); @@ -77,7 +77,7 @@ private void AssertHashSetVectorStoreRecordCollectionCreated() Assert.IsType>(collection); } - private void AssertJsonVectorStoreRecordCollectionCreated() + private void AssertJsonVectorStoreRecordCollectionCreated() where TRecord : notnull { var kernel = this._kernelBuilder.Build(); var collection = kernel.Services.GetRequiredService>(); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs index 8c6455b4a226..0f7322d0f7ce 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs @@ -69,7 +69,7 @@ private void AssertVectorStoreCreated() Assert.IsType(vectorStore); } - private void AssertHashSetVectorStoreRecordCollectionCreated() + private void AssertHashSetVectorStoreRecordCollectionCreated() where TRecord : notnull { var serviceProvider = this._serviceCollection.BuildServiceProvider(); var collection = serviceProvider.GetRequiredService>(); @@ -77,7 +77,7 @@ private void AssertHashSetVectorStoreRecordCollectionCreated() Assert.IsType>(collection); } - private void AssertJsonVectorStoreRecordCollectionCreated() + private void AssertJsonVectorStoreRecordCollectionCreated() where TRecord : notnull { var serviceProvider = this._serviceCollection.BuildServiceProvider(); var collection = serviceProvider.GetRequiredService>(); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml index 78a93ffa29c0..66bc881859c6 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml +++ b/dotnet/src/Connectors/VectorData.Abstractions/CompatibilitySuppressions.xml @@ -484,4 +484,46 @@ lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll true + + CP0021 + M:Microsoft.Extensions.VectorData.IVectorStore.GetCollection``2(System.String,Microsoft.Extensions.VectorData.VectorStoreRecordDefinition)``1:notnull + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0021 + T:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2``1:notnull + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net462/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0021 + M:Microsoft.Extensions.VectorData.IVectorStore.GetCollection``2(System.String,Microsoft.Extensions.VectorData.VectorStoreRecordDefinition)``1:notnull + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0021 + T:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2``1:notnull + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/net8.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0021 + M:Microsoft.Extensions.VectorData.IVectorStore.GetCollection``2(System.String,Microsoft.Extensions.VectorData.VectorStoreRecordDefinition)``1:notnull + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + + + CP0021 + T:Microsoft.Extensions.VectorData.IVectorStoreRecordCollection`2``1:notnull + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + lib/netstandard2.0/Microsoft.Extensions.VectorData.Abstractions.dll + true + \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj index c70073466ff8..d6014cedc379 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj @@ -4,7 +4,8 @@ Microsoft.Extensions.VectorData.Abstractions Microsoft.Extensions.VectorData net8.0;netstandard2.0;net462 - true + + false diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs index 6810e99722be..21c47ec9c238 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs @@ -32,7 +32,8 @@ public interface IVectorStore /// /// IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull; + where TKey : notnull + where TRecord : notnull; /// /// Retrieves the names of all the collections in the vector store. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index 572945ccd782..c9b0faf3409d 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -20,6 +20,7 @@ namespace Microsoft.Extensions.VectorData; public interface IVectorStoreRecordCollection : IVectorizedSearch #pragma warning restore CA1711 // Identifiers should not have incorrect suffix where TKey : notnull + where TRecord : notnull { /// /// Gets the name of the collection. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index 1b406a5bc61e..afcf32cad843 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -24,7 +24,7 @@ public abstract class BaseVectorStoreRecordCollectionTests protected abstract HashSet GetSupportedDistanceFunctions(); - protected abstract IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition); + protected abstract IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) where TRecord : notnull; protected virtual int DelayAfterIndexCreateInMilliseconds { get; } = 0; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreTests.cs index 3a9aff375be3..607a847e9028 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreTests.cs @@ -14,6 +14,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory; /// public abstract class BaseVectorStoreTests(IVectorStore vectorStore) where TKey : notnull + where TRecord : notnull { protected virtual IEnumerable CollectionNames => ["listcollectionnames1", "listcollectionnames2", "listcollectionnames3"]; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs index c62d07b62041..02dfae8cce1a 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs @@ -14,7 +14,7 @@ public class SqliteVectorStoreFixture : IDisposable public SqliteVectorStoreRecordCollection GetCollection( string collectionName, - SqliteVectorStoreRecordCollectionOptions? options = default) + SqliteVectorStoreRecordCollectionOptions? options = default) where TRecord : notnull { return new SqliteVectorStoreRecordCollection( this.ConnectionString, diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index df857b5f26e7..9d9d072e0b07 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -41,6 +41,7 @@ public static async Task> AddRecords ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromString createRecord) where TKey : notnull + where TRecord : notnull { var lines = await File.ReadAllLinesAsync("./TestData/semantic-kernel-info.txt"); diff --git a/dotnet/src/IntegrationTests/Data/VectorStoreExtensions.cs b/dotnet/src/IntegrationTests/Data/VectorStoreExtensions.cs index 9f730ce5d259..dea981c16d9f 100644 --- a/dotnet/src/IntegrationTests/Data/VectorStoreExtensions.cs +++ b/dotnet/src/IntegrationTests/Data/VectorStoreExtensions.cs @@ -48,6 +48,7 @@ internal static async Task> CreateCo ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromString createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); @@ -83,6 +84,7 @@ internal static async Task> CreateCo ITextEmbeddingGenerationService embeddingGenerationService, CreateRecordFromTextSearchResult createRecord) where TKey : notnull + where TRecord : notnull { // Get and create collection if it doesn't exist. var collection = vectorStore.GetCollection(collectionName); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index 3af9eae17e58..a323e0a09cd4 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -42,7 +42,7 @@ public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationExceptio public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException_GenericDataModel() => this.CreateCollectionMoreThanOnce>(); - private async Task> GetNonExistingCollectionAsync() + private async Task> GetNonExistingCollectionAsync() where TRecord : notnull { var collectionName = fixture.GetUniqueCollectionName(); VectorStoreRecordDefinition? definition = null; @@ -70,14 +70,14 @@ private async Task> GetNonExistingCo return collection; } - private async Task DeleteNonExistingCollection() + private async Task DeleteNonExistingCollection() where TRecord : notnull { var collection = await this.GetNonExistingCollectionAsync(); await collection.DeleteCollectionAsync(); } - private async Task CreateCollection() + private async Task CreateCollection() where TRecord : notnull { var collection = await this.GetNonExistingCollectionAsync(); @@ -104,7 +104,7 @@ private async Task CreateCollection() } } - private async Task CreateCollectionIfNotExistsMoreThanOnce() + private async Task CreateCollectionIfNotExistsMoreThanOnce() where TRecord : notnull { var collection = await this.GetNonExistingCollectionAsync(); @@ -133,7 +133,7 @@ private async Task CreateCollectionIfNotExistsMoreThanOnce() } } - private async Task CreateCollectionMoreThanOnce() + private async Task CreateCollectionMoreThanOnce() where TRecord : notnull { var collection = await this.GetNonExistingCollectionAsync(); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index 20ce25343299..25ab73a4f8ab 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -75,6 +75,7 @@ public virtual async Task WaitForDataAsync( Expression>? filter = null, int vectorSize = 3) where TKey : notnull + where TRecord : notnull { var vector = new float[vectorSize]; for (var i = 0; i < vectorSize; i++) diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs index 6bbc59c59e42..3facc4a3f84f 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -10,6 +10,7 @@ namespace VectorDataSpecificationTests.Support; /// public abstract class VectorStoreCollectionFixture : VectorStoreFixture where TKey : notnull + where TRecord : notnull { private List? _testData; From 3f30faed61f5c0effb44a6246f443f03d0a153ad Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Thu, 10 Apr 2025 15:31:44 +0200 Subject: [PATCH 38/63] .Net: Replace VectorStoreGenericDataModel with Dictionary (#11392) Closes #10802 Closes #10468 Closes #10736 --- .../AzureAISearchFactory.cs | 2 +- .../PineconeFactory.cs | 2 +- .../QdrantFactory.cs | 2 +- .../RedisFactory.cs | 2 +- .../VectorStore_DataIngestion_CustomMapper.cs | 4 +- ...> VectorStore_DynamicDataModel_Interop.cs} | 68 ++-- dotnet/samples/Concepts/README.md | 2 +- ...Model.cs => Step5_Use_DynamicDataModel.cs} | 26 +- .../Step6_Use_CustomMapper.cs | 2 +- ...ureAISearchDynamicDataModelMapperTests.cs} | 128 +++---- ...ureAISearchKernelBuilderExtensionsTests.cs | 4 +- ...ISearchServiceCollectionExtensionsTests.cs | 4 +- ...ISearchVectorStoreRecordCollectionTests.cs | 18 +- .../AzureAISearchVectorStoreTests.cs | 2 +- ...osDBMongoDBKernelBuilderExtensionsTests.cs | 4 +- ...MongoDBServiceCollectionExtensionsTests.cs | 4 +- ...VectorStoreCollectionSearchMappingTests.cs | 4 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 42 +-- ...smosDBNoSQLDynamicDataModelMapperTests.cs} | 152 ++++---- ...smosDBNoSQLKernelBuilderExtensionsTests.cs | 4 +- ...DBNoSQLServiceCollectionExtensionsTests.cs | 4 +- ...LVectorStoreCollectionQueryBuilderTests.cs | 4 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 50 +-- ...nMemoryVectorStoreRecordCollectionTests.cs | 44 +-- ...=> AzureAISearchDynamicDataModelMapper.cs} | 40 +- .../AzureAISearchFilterTranslator.cs | 73 +++- .../AzureAISearchKernelBuilderExtensions.cs | 12 +- ...zureAISearchServiceCollectionExtensions.cs | 18 +- .../AzureAISearchVectorStore.cs | 7 +- .../AzureAISearchVectorStoreOptions.cs | 2 +- ...zureAISearchVectorStoreRecordCollection.cs | 73 ++-- ...earchVectorStoreRecordCollectionOptions.cs | 2 +- .../AzureCosmosDBMongoDBFilterTranslator.cs | 113 +++--- ...mosDBMongoDBServiceCollectionExtensions.cs | 4 +- .../AzureCosmosDBMongoDBVectorStore.cs | 7 +- .../AzureCosmosDBMongoDBVectorStoreOptions.cs | 2 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 65 ++-- ...ngoDBVectorStoreRecordCollectionOptions.cs | 2 +- ...ureCosmosDBNoSQLDynamicDataModelMapper.cs} | 48 ++- ...osmosDBNoSQLServiceCollectionExtensions.cs | 4 +- .../AzureCosmosDBNoSQLVectorStore.cs | 7 +- .../AzureCosmosDBNoSQLVectorStoreOptions.cs | 2 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 273 +++++--------- ...NoSQLVectorStoreRecordCollectionOptions.cs | 2 +- .../AzureCosmosDBNoSqlFilterTranslator.cs | 64 +++- .../SqlFilterTranslator.cs | 115 ++++-- .../InMemoryVectorStoreRecordCollection.cs | 6 + .../MongoDBFilterTranslator.cs | 113 +++--- .../MongoDBServiceCollectionExtensions.cs | 4 +- .../MongoDBVectorStore.cs | 7 +- .../MongoDBVectorStoreOptions.cs | 2 +- .../MongoDBVectorStoreRecordCollection.cs | 66 ++-- ...ngoDBVectorStoreRecordCollectionOptions.cs | 2 +- .../PineconeFilterTranslator.cs | 111 +++--- .../PineconeKernelBuilderExtensions.cs | 8 +- .../PineconeServiceCollectionExtensions.cs | 12 +- .../PineconeVectorStore.cs | 7 +- .../PineconeVectorStoreOptions.cs | 2 +- .../PineconeVectorStoreRecordCollection.cs | 70 ++-- ...econeVectorStoreRecordCollectionOptions.cs | 2 +- .../PineconeVectorStoreRecordMapper.cs | 11 +- .../PostgresFilterTranslator.cs | 12 +- .../QdrantFilterTranslator.cs | 174 +++++---- .../QdrantServiceCollectionExtensions.cs | 4 +- .../QdrantVectorStore.cs | 7 +- .../QdrantVectorStoreOptions.cs | 2 +- .../QdrantVectorStoreRecordCollection.cs | 343 ++++++++++-------- ...drantVectorStoreRecordCollectionOptions.cs | 2 +- .../RedisFilterTranslator.cs | 57 ++- .../RedisHashSetGenericDataModelMapper.cs | 135 ------- ...RedisHashSetVectorStoreRecordCollection.cs | 52 ++- .../RedisHashSetVectorStoreRecordMapper.cs | 16 +- ....cs => RedisJsonDynamicDataModelMapper.cs} | 54 ++- .../RedisJsonVectorStoreRecordCollection.cs | 72 ++-- ...sJsonVectorStoreRecordCollectionOptions.cs | 2 +- .../RedisServiceCollectionExtensions.cs | 8 +- .../RedisVectorStore.cs | 9 +- .../RedisVectorStoreOptions.cs | 2 +- .../SqlServerFilterTranslator.cs | 21 +- .../SqlServerVectorStoreRecordCollection.cs | 17 +- .../SqliteFilterTranslator.cs | 6 +- .../SqliteServiceCollectionExtensions.cs | 2 +- .../SqliteVectorStore.cs | 7 +- .../SqliteVectorStoreOptions.cs | 2 +- .../SqliteVectorStoreRecordCollection.cs | 261 +++++-------- ...qliteVectorStoreRecordCollectionOptions.cs | 2 +- ...r.cs => WeaviateDynamicDataModelMapper.cs} | 55 +-- .../WeaviateFilterTranslator.cs | 177 +++++---- .../WeaviateServiceCollectionExtensions.cs | 2 +- .../WeaviateVectorStore.cs | 9 +- .../WeaviateVectorStoreOptions.cs | 2 +- .../WeaviateVectorStoreRecordCollection.cs | 92 +++-- ...viateVectorStoreRecordCollectionOptions.cs | 2 +- ... => MongoDBDynamicDataModelMapperTests.cs} | 144 ++++---- ...MongoDBServiceCollectionExtensionsTests.cs | 4 +- ...VectorStoreCollectionSearchMappingTests.cs | 3 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 42 +-- .../PineconeKernelBuilderExtensionsTests.cs | 4 +- ...ineconeServiceCollectionExtensionsTests.cs | 4 +- ...ineconeVectorStoreRecordCollectionTests.cs | 4 +- ...resVectorStoreCollectionSqlBuilderTests.cs | 6 +- ...ostgresVectorStoreRecordCollectionTests.cs | 8 +- .../PostgresVectorStoreRecordMapperTests.cs | 4 +- .../QdrantKernelBuilderExtensionsTests.cs | 4 +- .../QdrantServiceCollectionExtensionsTests.cs | 4 +- ...VectorStoreCollectionSearchMappingTests.cs | 3 +- .../QdrantVectorStoreRecordCollectionTests.cs | 16 +- .../QdrantVectorStoreTests.cs | 2 +- ...RedisHashSetDynamicDataModelMapperTests.cs | 212 +++++++++++ ...RedisHashSetGenericDataModelMapperTests.cs | 206 ----------- ...HashSetVectorStoreRecordCollectionTests.cs | 16 +- ...edisHashSetVectorStoreRecordMapperTests.cs | 2 +- ...> RedisJsonDynamicDataModelMapperTests.cs} | 93 ++--- ...disJsonVectorStoreRecordCollectionTests.cs | 12 +- .../RedisJsonVectorStoreRecordMapperTests.cs | 8 +- .../RedisKernelBuilderExtensionsTests.cs | 4 +- .../RedisServiceCollectionExtensionsTests.cs | 4 +- ...VectorStoreCollectionSearchMappingTests.cs | 4 +- .../RedisVectorStoreTests.cs | 4 +- .../SqliteServiceCollectionExtensionsTests.cs | 8 +- .../SqliteVectorStoreRecordMapperTests.cs | 4 +- ...=> WeaviateDynamicDataModelMapperTests.cs} | 185 +++++----- .../WeaviateKernelBuilderExtensionsTests.cs | 4 +- ...eaviateServiceCollectionExtensionsTests.cs | 4 +- ...VectorStoreCollectionCreateMappingTests.cs | 10 +- ...rStoreRecordCollectionQueryBuilderTests.cs | 3 +- ...eaviateVectorStoreRecordCollectionTests.cs | 42 +-- .../WeaviateVectorStoreRecordMapperTests.cs | 2 +- .../WeaviateVectorStoreTests.cs | 2 +- .../VectorStoreRecordDataPropertyModel.cs | 42 --- .../VectorStoreRecordKeyPropertyModel.cs | 37 -- .../VectorStoreRecordModel.cs | 71 ++-- .../VectorStoreRecordModelBuilder.cs | 2 +- .../VectorStoreRecordPropertyModel.cs | 50 ++- .../VectorStoreRecordVectorPropertyModel.cs | 42 --- .../IVectorStoreRecordCollection.cs | 4 +- .../VectorStoreGenericDataModel.cs | 2 + ...ISearchVectorStoreRecordCollectionTests.cs | 98 +++-- ...MongoDBVectorStoreRecordCollectionTests.cs | 79 ++-- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 70 ++-- ...MongoDBVectorStoreRecordCollectionTests.cs | 79 ++-- ...ostgresVectorStoreRecordCollectionTests.cs | 62 ++-- ...nQdrantVectorStoreRecordCollectionTests.cs | 2 +- .../Memory/Qdrant/QdrantTextSearchTests.cs | 2 +- .../QdrantVectorStoreRecordCollectionTests.cs | 96 +++-- .../Memory/Qdrant/QdrantVectorStoreTests.cs | 2 +- ...HashsetVectorStoreRecordCollectionTests.cs | 2 +- ...disJsonVectorStoreRecordCollectionTests.cs | 2 +- ...HashSetVectorStoreRecordCollectionTests.cs | 92 +++-- ...disJsonVectorStoreRecordCollectionTests.cs | 123 +++---- .../Memory/Redis/RedisVectorStoreFixture.cs | 16 +- .../SqliteServiceCollectionExtensionsTests.cs | 8 +- .../Memory/Sqlite/SqliteVectorStoreFixture.cs | 8 +- .../SqliteVectorStoreRecordCollectionTests.cs | 106 +++--- ...eaviateVectorStoreRecordCollectionTests.cs | 2 +- ...eaviateVectorStoreRecordCollectionTests.cs | 69 ++-- ...er.cs => MongoDBDynamicDataModelMapper.cs} | 46 +-- .../VectorStoreRecordPropertyVerification.cs | 3 - .../Filter/CosmosMongoBasicFilterTests.cs | 4 +- .../Filter/CosmosMongoBasicQueryTests.cs | 4 +- .../Filter/CosmosNoSQLBasicQueryTests.cs | 18 - .../CRUD/InMemoryBatchConformanceTests.cs | 25 ++ .../InMemoryDynamicRecordConformanceTests.cs | 24 ++ .../CRUD/InMemoryRecordConformanceTests.cs | 20 + .../Filter/InMemoryBasicFilterTests.cs | 10 +- .../Filter/InMemoryBasicQueryTests.cs | 8 + .../InMemoryIntegrationTests.csproj | 8 +- .../InMemoryDynamicDataModelFixture.cs | 10 + .../Support/InMemorySimpleModelFixture.cs | 10 + .../Filter/MongoDBBasicFilterTests.cs | 4 +- .../Filter/MongoDBBasicQueryTests.cs | 4 +- ...ineconeDynamicDataModelConformanceTests.cs | 12 + ...ineconeGenericDataModelConformanceTests.cs | 12 - .../Filter/PineconeBasicFilterTests.cs | 6 +- .../Filter/PineconeBasicQueryTests.cs | 12 +- ....cs => PineconeDynamicDataModelFixture.cs} | 2 +- ...ostgresDynamicDataModelConformanceTests.cs | 12 + ...ostgresGenericDataModelConformanceTests.cs | 12 - .../Filter/PostgresBasicFilterTests.cs | 14 +- .../Filter/PostgresBasicQueryTests.cs | 14 +- ....cs => PostgresDynamicDataModelFixture.cs} | 2 +- .../Filter/QdrantBasicQueryTests.cs | 15 - .../RedisGenericDataModelConformanceTests.cs | 12 - ...HashSetDynamicDataModelConformanceTests.cs | 12 + .../RedisHashSetRecordConformanceTests.cs | 12 + ...disJsonDynamicDataModelConformanceTests.cs | 12 + ....cs => RedisJsonRecordConformanceTests.cs} | 4 +- ...RedisHashSetCollectionConformanceTests.cs} | 2 +- ...=> RedisJsonCollectionConformanceTests.cs} | 2 +- .../Filter/RedisBasicFilterTests.cs | 8 +- .../Filter/RedisBasicQueryTests.cs | 22 +- .../RedisHashSetDynamicDataModelFixture.cs | 10 + .../Support/RedisHashSetSimpleModelFixture.cs | 10 + ...cs => RedisJsonDynamicDataModelFixture.cs} | 2 +- ...ture.cs => RedisJsonSimpleModelFixture.cs} | 2 +- ...lServerDynamicDataModelConformanceTests.cs | 12 + ...lServerGenericDataModelConformanceTests.cs | 12 - .../Filter/SqlServerBasicFilterTests.cs | 14 +- .../Filter/SqlServerBasicQueryTests.cs | 14 +- .../SqlServerCommandBuilderTests.cs | 22 +- ...cs => SqlServerDynamicDataModelFixture.cs} | 2 +- .../Filter/SqliteBasicFilterTests.cs | 14 +- .../Filter/SqliteBasicQueryTests.cs | 14 +- .../CRUD/BatchConformanceTests.cs | 57 +-- .../CRUD/DynamicDataModelConformanceTests.cs | 131 +++++++ .../CRUD/GenericDataModelConformanceTests.cs | 148 -------- .../CRUD/RecordConformanceTests.cs | 44 +-- .../Collections/CollectionConformanceTests.cs | 216 +++++------ .../Filter/BasicFilterTests.cs | 281 +++++++++++--- .../Filter/BasicQueryTests.cs | 19 - .../Support/DynamicDataModelFixture.cs | 61 ++++ .../Support/GenericDataModelFixture.cs | 82 ----- .../Support/VectorStoreCollectionFixture.cs | 6 +- .../VectorDataIntegrationTests.csproj | 1 + .../Xunit/ConditionalTheoryAttribute.cs | 2 +- .../Xunit/ConditionalTheoryDiscoverer.cs | 39 ++ .../Xunit/ConditionalTheoryTestCase.cs | 38 ++ .../CRUD/WeaviateBatchConformanceTests.cs | 12 + .../WeaviateDynamicRecordConformanceTests.cs | 12 + .../CRUD/WeaviateRecordConformanceTests.cs | 12 + .../Filter/WeaviateBasicFilterTests.cs | 4 +- .../Filter/WeaviateBasicQueryTests.cs | 2 +- .../WeaviateDynamicDataModelFixture.cs | 14 + .../Support/WeaviateSimpleModelFixture.cs | 14 + .../Support/WeaviateTestStore.cs | 2 + .../WeaviateIntegrationTests.csproj | 10 +- 226 files changed, 3897 insertions(+), 3628 deletions(-) rename dotnet/samples/Concepts/Memory/{VectorStore_GenericDataModel_Interop.cs => VectorStore_DynamicDataModel_Interop.cs} (71%) rename dotnet/samples/GettingStartedWithVectorStores/{Step5_Use_GenericDataModel.cs => Step5_Use_DynamicDataModel.cs} (74%) rename dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/{AzureAISearchGenericDataModelMapperTests.cs => AzureAISearchDynamicDataModelMapperTests.cs} (73%) rename dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/{AzureCosmosDBNoSQLGenericDataModelMapperTests.cs => AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs} (74%) rename dotnet/src/Connectors/Connectors.Memory.AzureAISearch/{AzureAISearchGenericDataModelMapper.cs => AzureAISearchDynamicDataModelMapper.cs} (72%) rename dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/{AzureCosmosDBNoSQLGenericDataModelMapper.cs => AzureCosmosDBNoSQLDynamicDataModelMapper.cs} (64%) delete mode 100644 dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs rename dotnet/src/Connectors/Connectors.Memory.Redis/{RedisJsonGenericDataModelMapper.cs => RedisJsonDynamicDataModelMapper.cs} (50%) rename dotnet/src/Connectors/Connectors.Memory.Weaviate/{WeaviateGenericDataModelMapper.cs => WeaviateDynamicDataModelMapper.cs} (61%) rename dotnet/src/Connectors/Connectors.MongoDB.UnitTests/{MongoDBGenericDataModelMapperTests.cs => MongoDBDynamicDataModelMapperTests.cs} (71%) create mode 100644 dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs rename dotnet/src/Connectors/Connectors.Redis.UnitTests/{RedisJsonGenericDataModelMapperTests.cs => RedisJsonDynamicDataModelMapperTests.cs} (63%) rename dotnet/src/Connectors/Connectors.Weaviate.UnitTests/{WeaviateGenericDataModelMapperTests.cs => WeaviateDynamicDataModelMapperTests.cs} (74%) rename dotnet/src/InternalUtilities/connectors/Memory/MongoDB/{MongoDBGenericDataModelMapper.cs => MongoDBDynamicDataModelMapper.cs} (72%) create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryDynamicDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemorySimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeDynamicDataModelConformanceTests.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/{PineconeGenericDataModelFixture.cs => PineconeDynamicDataModelFixture.cs} (78%) create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresDynamicDataModelConformanceTests.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/{PostgresGenericDataModelFixture.cs => PostgresDynamicDataModelFixture.cs} (78%) delete mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisGenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetDynamicDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonDynamicDataModelConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/{RedisRecordConformanceTests.cs => RedisJsonRecordConformanceTests.cs} (70%) rename dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/{RedisCollectionConformanceTests_HashSet.cs => RedisHashSetCollectionConformanceTests.cs} (77%) rename dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/{RedisCollectionConformanceTests_Json.cs => RedisJsonCollectionConformanceTests.cs} (78%) create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetDynamicDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetSimpleModelFixture.cs rename dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/{RedisGenericDataModelFixture.cs => RedisJsonDynamicDataModelFixture.cs} (72%) rename dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/{RedisSimpleModelFixture.cs => RedisJsonSimpleModelFixture.cs} (75%) create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerDynamicDataModelConformanceTests.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/{SqlServerGenericDataModelFixture.cs => SqlServerDynamicDataModelFixture.cs} (78%) create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs delete mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/GenericDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryDiscoverer.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryTestCase.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index 28c6d6350313..da5c18354818 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -63,7 +63,7 @@ public IVectorStoreRecordCollection GetCollection( // since the Langchain schema includes a metadata field that is // a JSON string containing the source property. Parsing this // string and extracting the source is not supported by the default mapper. - return (new AzureAISearchVectorStoreRecordCollection( + return (new AzureAISearchVectorStoreRecordCollection( _searchIndexClient, name, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index d66a245df759..a645ac53997b 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -55,7 +55,7 @@ public IVectorStoreRecordCollection GetCollection( // Create a Pinecone collection and pass in our custom record definition that matches // the schema used by Langchain so that the default mapper can use the storage names // in it, to map to the storage scheme. - return (new PineconeVectorStoreRecordCollection( + return (new PineconeVectorStoreRecordCollection( _pineconeClient, name, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index 5219963a29b1..2edcaa555462 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -57,7 +57,7 @@ public IVectorStoreRecordCollection GetCollection( // a struct and this isn't supported by the default mapper. // Since langchain creates collections without named vector support // we should set HasNamedVectors to false. - var collection = new QdrantVectorStoreRecordCollection>( + var collection = new QdrantVectorStoreRecordCollection>( _qdrantClient, name, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index 6bfe7ce90d87..2a1e01ac4583 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -56,7 +56,7 @@ public IVectorStoreRecordCollection GetCollection( // Also pass in our custom record definition that matches the schema used by Langchain // so that the default mapper can use the storage names in it, to map to the storage // scheme. - return (new RedisHashSetVectorStoreRecordCollection( + return (new RedisHashSetVectorStoreRecordCollection( _database, name, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index ab9fa55daa4c..7ae548016c6f 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -147,12 +147,12 @@ public IVectorStoreRecordCollection GetCollection( // If the record definition is the glossary definition and the record type is the generic data model, inject the custom mapper into the collection options. if (vectorStoreRecordDefinition == s_glossaryDefinition && typeof(TRecord) == typeof(GenericDataModel)) { - var customCollection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, JsonNodeCustomMapper = new Mapper() }) as IVectorStoreRecordCollection; + var customCollection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, JsonNodeCustomMapper = new Mapper() }) as IVectorStoreRecordCollection; return customCollection!; } // Otherwise, just create a standard collection with the default mapper. - var collection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var collection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; return collection!; } diff --git a/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs similarity index 71% rename from dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs rename to dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs index 50c99dfcd03c..74e90490d37a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs @@ -12,15 +12,15 @@ namespace Memory; /// -/// Semantic Kernel provides a generic data model for vector stores that can be used with any +/// Semantic Kernel support dynamic data modeling for vector stores that can be used with any /// schema. The schema still has to be provided in the form of a record definition, but no -/// custom data model is required. +/// custom .NET data model is required; a simple dictionary can be used. /// /// The sample shows how to -/// 1. Upsert data using the generic data model and retrieve it from the vector store using a custom data model. -/// 2. Upsert data using a custom data model and retrieve it from the vector store using the generic data model. +/// 1. Upsert data using dynamic data modeling and retrieve it from the vector store using a custom data model. +/// 2. Upsert data using a custom data model and retrieve it from the vector store using the dynamic data modeling. /// -public class VectorStore_GenericDataModel_Interop(ITestOutputHelper output, VectorStoreQdrantContainerFixture qdrantFixture) : BaseTest(output), IClassFixture +public class VectorStore_DynamicDataModel_Interop(ITestOutputHelper output, VectorStoreQdrantContainerFixture qdrantFixture) : BaseTest(output), IClassFixture { private static readonly JsonSerializerOptions s_indentedSerializerOptions = new() { WriteIndented = true }; @@ -36,7 +36,7 @@ public class VectorStore_GenericDataModel_Interop(ITestOutputHelper output, Vect }; [Fact] - public async Task UpsertWithGenericRetrieveWithCustomAsync() + public async Task UpsertWithDynamicRetrieveWithCustomAsync() { // Create an embedding generation service. var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( @@ -48,27 +48,27 @@ public async Task UpsertWithGenericRetrieveWithCustomAsync() await qdrantFixture.ManualInitializeAsync(); var vectorStore = new QdrantVectorStore(new QdrantClient("localhost")); - // Get and create collection if it doesn't exist using the generic data model and record definition that defines the schema. - var genericDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); - await genericDataModelCollection.CreateCollectionIfNotExistsAsync(); + // Get and create collection if it doesn't exist using the dynamic data model and record definition that defines the schema. + var dynamicDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); + await dynamicDataModelCollection.CreateCollectionIfNotExistsAsync(); // Create glossary entries and generate embeddings for them. - var glossaryEntries = CreateGenericGlossaryEntries().ToList(); + var glossaryEntries = CreateDynamicGlossaryEntries().ToList(); var tasks = glossaryEntries.Select(entry => Task.Run(async () => { - entry.Vectors["DefinitionEmbedding"] = await textEmbeddingGenerationService.GenerateEmbeddingAsync((string)entry.Data["Definition"]!); + entry["DefinitionEmbedding"] = await textEmbeddingGenerationService.GenerateEmbeddingAsync((string)entry["Definition"]!); })); await Task.WhenAll(tasks); // Upsert the glossary entries into the collection and return their keys. - var upsertedKeysTasks = glossaryEntries.Select(x => genericDataModelCollection.UpsertAsync(x)); + var upsertedKeysTasks = glossaryEntries.Select(x => dynamicDataModelCollection.UpsertAsync(x)); var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); // Get the collection using the custom data model. var customDataModelCollection = vectorStore.GetCollection("skglossary"); // Retrieve one of the upserted records from the collection. - var upsertedRecord = await customDataModelCollection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); + var upsertedRecord = await customDataModelCollection.GetAsync((ulong)upsertedKeys.First(), new() { IncludeVectors = true }); // Write upserted keys and one of the upserted records to the console. Console.WriteLine($"Upserted keys: {string.Join(", ", upsertedKeys)}"); @@ -76,7 +76,7 @@ public async Task UpsertWithGenericRetrieveWithCustomAsync() } [Fact] - public async Task UpsertWithCustomRetrieveWithGenericAsync() + public async Task UpsertWithCustomRetrieveWithDynamicAsync() { // Create an embedding generation service. var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( @@ -104,11 +104,11 @@ public async Task UpsertWithCustomRetrieveWithGenericAsync() var upsertedKeysTasks = glossaryEntries.Select(x => customDataModelCollection.UpsertAsync(x)); var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); - // Get the collection using the generic data model. - var genericDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); + // Get the collection using the dynamic data model. + var dynamicDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); // Retrieve one of the upserted records from the collection. - var upsertedRecord = await genericDataModelCollection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); + var upsertedRecord = await dynamicDataModelCollection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); // Write upserted keys and one of the upserted records to the console. Console.WriteLine($"Upserted keys: {string.Join(", ", upsertedKeys)}"); @@ -166,36 +166,30 @@ private static IEnumerable CreateCustomGlossaryEntries() } /// - /// Create some sample glossary entries using the generic data model. + /// Create some sample glossary entries using dynamic data modeling. /// /// A list of sample glossary entries. - private static IEnumerable> CreateGenericGlossaryEntries() + private static IEnumerable> CreateDynamicGlossaryEntries() { - yield return new VectorStoreGenericDataModel(1) + yield return new Dictionary { - Data = new Dictionary - { - ["Term"] = "API", - ["Definition"] = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data.", - } + ["Key"] = 1, + ["Term"] = "API", + ["Definition"] = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." }; - yield return new VectorStoreGenericDataModel(2) + yield return new Dictionary { - Data = new Dictionary - { - ["Term"] = "Connectors", - ["Definition"] = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc.", - } + ["Key"] = 2, + ["Term"] = "Connectors", + ["Definition"] = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." }; - yield return new VectorStoreGenericDataModel(3) + yield return new Dictionary { - Data = new Dictionary - { - ["Term"] = "RAG", - ["Definition"] = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt).", - } + ["Key"] = 3, + ["Term"] = "RAG", + ["Definition"] = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." }; } } diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 627c502ed36f..1c54a6ef891e 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -144,7 +144,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [VectorStore_VectorSearch_MultiVector: An example showing how to pick a target vector when doing vector search on a record that contains multiple vectors.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs) - [VectorStore_VectorSearch_MultiStore_Common: An example showing how to write vector database agnostic code with different vector databases.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs) - [VectorStore_HybridSearch_Simple_AzureAISearch: An example showing how to do hybrid search using AzureAISearch.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs) -- [VectorStore_GenericDataModel_Interop: An example that shows how you can use the built-in, generic data model from Semantic Kernel to read and write to a Vector Store.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs) +- [VectorStore_DynamicDataModel_Interop: An example that shows how you can use dynamic data modeling from Semantic Kernel to read and write to a Vector Store.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs) - [VectorStore_ConsumeFromMemoryStore_AzureAISearch: An example that shows how you can use the AzureAISearchVectorStore to consume data that was ingested using the AzureAISearchMemoryStore.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_AzureAISearch.cs) - [VectorStore_ConsumeFromMemoryStore_Qdrant: An example that shows how you can use the QdrantVectorStore to consume data that was ingested using the QdrantMemoryStore.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Qdrant.cs) - [VectorStore_ConsumeFromMemoryStore_Redis: An example that shows how you can use the RedisVectorStore to consume data that was ingested using the RedisMemoryStore.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_ConsumeFromMemoryStore_Redis.cs) diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs similarity index 74% rename from dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs rename to dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs index ae045e539b3b..b5b0f6c7cd72 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_GenericDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs @@ -8,13 +8,13 @@ namespace GettingStartedWithVectorStores; /// -/// Example that shows that you can use the generic data model to interact with a vector database. -/// This makes it possible to use the vector store abstractions without having to create your own data model. +/// Example that shows that you can use the dynamic data modeling to interact with a vector database. +/// This makes it possible to use the vector store abstractions without having to create your own strongly-typed data model. /// -public class Step5_Use_GenericDataModel(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture +public class Step5_Use_DynamicDataModel(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture { /// - /// Example showing how to query a vector store that uses the generic data model. + /// Example showing how to query a vector store that uses dynamic data modeling. /// /// This example requires a Redis server running on localhost:6379. To run a Redis server in a Docker container, use the following command: /// docker run -d --name redis-stack -p 6379:6379 -p 8001:8001 redis/redis-stack:latest @@ -32,7 +32,7 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() var customDataModelCollection = vectorStore.GetCollection("skglossary"); await Step1_Ingest_Data.IngestDataIntoVectorStoreAsync(customDataModelCollection, fixture.TextEmbeddingGenerationService); - // To use the generic data model, we still have to describe the storage schema to the vector store + // To use dynamic data modeling, we still have to describe the storage schema to the vector store // using a record definition. The benefit over a custom data model is that this definition // does not have to be known at compile time. // E.g. it can be read from a configuration or retrieved from a service. @@ -48,27 +48,27 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() } }; - // Now, let's create a collection that uses the generic data model. - var genericDataModelCollection = vectorStore.GetCollection>("skglossary", recordDefinition); + // Now, let's create a collection that uses a dynamic data model. + var dynamicDataModelCollection = vectorStore.GetCollection>("skglossary", recordDefinition); // Generate an embedding from the search string. var searchString = "How do I provide additional context to an LLM?"; var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the generic data model collection and get the single most relevant result. - var searchResult = await genericDataModelCollection.VectorizedSearchAsync( + var searchResult = await dynamicDataModelCollection.VectorizedSearchAsync( searchVector, top: 1); var searchResultItems = await searchResult.Results.ToListAsync(); // Write the search result with its score to the console. - // Note that here we can loop through all the data properties - // without knowing the schema, since the data properties are + // Note that here we can loop through all the properties + // without knowing the schema, since the properties are // stored as a dictionary of string keys and object values - // when using the generic data model. - foreach (var dataProperty in searchResultItems.First().Record.Data) + // when using the dynamic data model. + foreach (var property in searchResultItems.First().Record) { - Console.WriteLine($"{dataProperty.Key}: {dataProperty.Value}"); + Console.WriteLine($"{property.Key}: {property.Value}"); } Console.WriteLine(searchResultItems.First().Score); } diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs index 7c113164114c..2171f59ef711 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs @@ -43,7 +43,7 @@ public async Task UseCustomMapperAsync() // Construct an Azure AI Search vector store collection and // pass in the custom mapper and record definition. - var collection = new AzureAISearchVectorStoreRecordCollection( + var collection = new AzureAISearchVectorStoreRecordCollection( new SearchIndexClient( new Uri(TestConfiguration.AzureAISearch.Endpoint), new AzureKeyCredential(TestConfiguration.AzureAISearch.ApiKey)), diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs rename to dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs index d1840740da54..338fabab03f3 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs @@ -12,9 +12,9 @@ namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; /// -/// Tests for the class. +/// Tests for the class. /// -public class AzureAISearchGenericDataModelMapperTests +public class AzureAISearchDynamicDataModelMapperTests { private static readonly VectorStoreRecordModel s_model = BuildModel( [ @@ -45,31 +45,28 @@ public class AzureAISearchGenericDataModelMapperTests public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureAISearchGenericDataModelMapper(s_model); - var dataModel = new VectorStoreGenericDataModel("key") + var sut = new AzureAISearchDynamicDataModelMapper(s_model); + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = "string", - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["FloatDataProp"] = 5.0f, - ["NullableFloatDataProp"] = 6.0f, - ["DoubleDataProp"] = 7.0, - ["NullableDoubleDataProp"] = 8.0, - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["TagListDataProp"] = s_taglist, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_vector1), - ["NullableFloatVector"] = new ReadOnlyMemory(s_vector2), - }, + ["Key"] = "key", + + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["TagListDataProp"] = s_taglist, + + ["FloatVector"] = new ReadOnlyMemory(s_vector1), + ["NullableFloatVector"] = new ReadOnlyMemory(s_vector2) }; // Act @@ -107,20 +104,15 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), ]); - var dataModel = new VectorStoreGenericDataModel("key") + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - }, - Vectors = - { - ["NullableFloatVector"] = null, - }, + ["Key"] = "key", + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableFloatVector"] = null }; - var sut = new AzureAISearchGenericDataModelMapper(model); + var sut = new AzureAISearchDynamicDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -135,7 +127,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureAISearchGenericDataModelMapper(s_model); + var sut = new AzureAISearchDynamicDataModelMapper(s_model); var storageModel = new JsonObject(); storageModel["Key"] = "key"; storageModel["StringDataProp"] = "string"; @@ -159,23 +151,23 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Equal("string", dataModel.Data["StringDataProp"]); - Assert.Equal(1, dataModel.Data["IntDataProp"]); - Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, dataModel.Data["LongDataProp"]); - Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); - Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); - Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); - Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(true, dataModel.Data["BoolDataProp"]); - Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); - Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); - Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); - Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); - Assert.Equal(s_vector1, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); - Assert.Equal(s_vector2, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); + Assert.Equal("key", dataModel["Key"]); + Assert.Equal("string", dataModel["StringDataProp"]); + Assert.Equal(1, dataModel["IntDataProp"]); + Assert.Equal(2, dataModel["NullableIntDataProp"]); + Assert.Equal(3L, dataModel["LongDataProp"]); + Assert.Equal(4L, dataModel["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel["FloatDataProp"]); + Assert.Equal(6.0f, dataModel["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel["DoubleDataProp"]); + Assert.Equal(8.0, dataModel["NullableDoubleDataProp"]); + Assert.Equal(true, dataModel["BoolDataProp"]); + Assert.Equal(false, dataModel["NullableBoolDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, dataModel["TagListDataProp"]); + Assert.Equal(s_vector1, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + Assert.Equal(s_vector2, ((ReadOnlyMemory)dataModel["NullableFloatVector"]!)!.ToArray()); } [Fact] @@ -196,16 +188,16 @@ public void MapFromStorageToDataModelMapsNullValues() storageModel["NullableIntDataProp"] = null; storageModel["NullableFloatVector"] = null; - var sut = new AzureAISearchGenericDataModelMapper(model); + var sut = new AzureAISearchDynamicDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Vectors["NullableFloatVector"]); + Assert.Equal("key", dataModel["Key"]); + Assert.Null(dataModel["StringDataProp"]); + Assert.Null(dataModel["NullableIntDataProp"]); + Assert.Null(dataModel["NullableFloatVector"]); } [Fact] @@ -220,7 +212,7 @@ public void MapFromStorageToDataModelThrowsForMissingKey() new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), ]); - var sut = new AzureAISearchGenericDataModelMapper(model); + var sut = new AzureAISearchDynamicDataModelMapper(model); var storageModel = new JsonObject(); // Act @@ -241,8 +233,8 @@ public void MapFromDataToStorageModelSkipsMissingProperties() new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), ]); - var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new AzureAISearchGenericDataModelMapper(model); + var dataModel = new Dictionary { ["Key"] = "key" }; + var sut = new AzureAISearchDynamicDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -267,20 +259,20 @@ public void MapFromStorageToDataModelSkipsMissingProperties() var storageModel = new JsonObject(); storageModel["Key"] = "key"; - var sut = new AzureAISearchGenericDataModelMapper(model); + var sut = new AzureAISearchDynamicDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + Assert.Equal("key", dataModel["Key"]); + Assert.False(dataModel.ContainsKey("StringDataProp")); + Assert.False(dataModel.ContainsKey("FloatVector")); } private static VectorStoreRecordModel BuildModel(List properties) => new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs index e7c567b7895c..40686e1c3a7c 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs @@ -105,11 +105,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs index 0310aa2ca4a2..ab6edca0f940 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs @@ -105,11 +105,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index a39a019b3efd..17f60b78cd9a 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -23,7 +23,7 @@ namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Contains tests for the class. +/// Contains tests for the class. /// public class AzureAISearchVectorStoreRecordCollectionTests { @@ -65,7 +65,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN .ThrowsAsync(new RequestFailedException(404, "Index not found")); } - var sut = new AzureAISearchVectorStoreRecordCollection(this._searchIndexClientMock.Object, collectionName); + var sut = new AzureAISearchVectorStoreRecordCollection(this._searchIndexClientMock.Object, collectionName); // Act. var actual = await sut.CollectionExistsAsync(this._testCancellationToken); @@ -293,7 +293,7 @@ public async Task CanGetRecordWithCustomMapperAsync() .Returns(CreateModel(TestRecordKey1, true)); // Arrange target with custom mapper. - var sut = new AzureAISearchVectorStoreRecordCollection( + var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName, new() @@ -508,7 +508,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() .Returns(storageObject); // Arrange target with custom mapper. - var sut = new AzureAISearchVectorStoreRecordCollection( + var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName, new() @@ -546,7 +546,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() }; // Act. - var sut = new AzureAISearchVectorStoreRecordCollection( + var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName, new() { VectorStoreRecordDefinition = definition, JsonObjectCustomMapper = Mock.Of>() }); @@ -563,7 +563,7 @@ public async Task CanSearchWithVectorAndFilterAsync() .Setup(x => x.SearchAsync(null, It.IsAny(), It.IsAny())) .ReturnsAsync(Response.FromValue(searchResultsMock, Mock.Of())); - var sut = new AzureAISearchVectorStoreRecordCollection( + var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName); var filter = new VectorSearchFilter().EqualTo(nameof(MultiPropsModel.Data1), "Data1FilterValue"); @@ -605,7 +605,7 @@ public async Task CanSearchWithTextAndFilterAsync() .Setup(x => x.SearchAsync(null, It.IsAny(), It.IsAny())) .ReturnsAsync(Response.FromValue(searchResultsMock, Mock.Of())); - var sut = new AzureAISearchVectorStoreRecordCollection( + var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName); var filter = new VectorSearchFilter().EqualTo(nameof(MultiPropsModel.Data1), "Data1FilterValue"); @@ -637,9 +637,9 @@ public async Task CanSearchWithTextAndFilterAsync() Times.Once); } - private AzureAISearchVectorStoreRecordCollection CreateRecordCollection(bool useDefinition, bool useCustomJsonSerializerOptions = false) + private AzureAISearchVectorStoreRecordCollection CreateRecordCollection(bool useDefinition, bool useCustomJsonSerializerOptions = false) { - return new AzureAISearchVectorStoreRecordCollection( + return new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName, new() diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs index f4eed15172fb..17e9dff36e5a 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreTests.cs @@ -46,7 +46,7 @@ public void GetCollectionReturnsCollection() // Assert. Assert.NotNull(actual); - Assert.IsType>(actual); + Assert.IsType>(actual); } #pragma warning disable CS0618 // IAzureAISearchVectorStoreRecordCollectionFactory is obsolete diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs index 41151c77eba0..60928e06f48b 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs @@ -82,11 +82,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs index 9484be5ba373..994d283190a2 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs @@ -82,11 +82,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs index 0b9330c668fc..98973fdcba19 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs @@ -20,7 +20,7 @@ public sealed class AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests { private readonly VectorStoreRecordModel _model = new MongoDBModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = @@ -104,6 +104,6 @@ public void BuilderFilterByDefaultReturnsValidFilter() private static VectorStoreRecordModel BuildModel(List properties) => new MongoDBModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index b4930e4247d2..09b59d505f63 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -19,7 +19,7 @@ namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionTests { @@ -37,7 +37,7 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); + var exception = Assert.Throws(() => new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } @@ -45,7 +45,7 @@ public void ConstructorForModelWithoutKeyThrowsException() public void ConstructorWithDeclarativeModelInitializesCollection() { // Act & Assert - var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -62,7 +62,7 @@ public void ConstructorWithImperativeModelInitializesCollection() }; // Act - var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { VectorStoreRecordDefinition = definition }); @@ -90,7 +90,7 @@ public async Task CollectionExistsReturnsValidResultAsync(List collectio .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, collectionName); @@ -144,7 +144,7 @@ public async Task CreateCollectionInvokesValidMethodsAsync(bool indexExists, int .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, CollectionName); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, CollectionName); // Act await sut.CreateCollectionAsync(); @@ -207,7 +207,7 @@ public async Task CreateCollectionIfNotExistsInvokesValidMethodsAsync() .Setup(l => l.Indexes) .Returns(mockMongoIndexManager.Object); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, CollectionName); @@ -231,7 +231,7 @@ public async Task DeleteInvokesValidMethodsAsync() // Arrange const string RecordKey = "key"; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -255,7 +255,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() // Arrange List recordKeys = ["key1", "key2"]; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -279,7 +279,7 @@ public async Task DeleteCollectionInvokesValidMethodsAsync() // Arrange const string CollectionName = "collection"; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, CollectionName); @@ -316,7 +316,7 @@ public async Task GetReturnsValidRecordAsync() It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -354,7 +354,7 @@ public async Task GetBatchReturnsValidRecordAsync() It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -385,7 +385,7 @@ public async Task UpsertReturnsRecordKeyAsync() var documentSerializer = serializerRegistry.GetSerializer(); var expectedDefinition = Builders.Filter.Eq(document => document["_id"], "key"); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -413,7 +413,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() var hotel2 = new AzureCosmosDBMongoDBHotelModel("key2") { HotelName = "Test Name 2" }; var hotel3 = new AzureCosmosDBMongoDBHotelModel("key3") { HotelName = "Test Name 3" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -502,7 +502,7 @@ public async Task UpsertWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) .Returns(new BsonDocument { ["_id"] = "key", ["my_name"] = "Test Name" }); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { BsonDocumentCustomMapper = mockMapper.Object }); @@ -552,7 +552,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) .Returns(new AzureCosmosDBMongoDBHotelModel(RecordKey) { HotelName = "Name from mapper" }); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { BsonDocumentCustomMapper = mockMapper.Object }); @@ -574,7 +574,7 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Arrange this.MockCollectionForSearch(); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -634,7 +634,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( this.MockCollectionForSearch(); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -667,7 +667,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa // Arrange this.MockCollectionForSearch(); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -683,7 +683,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() // Arrange this.MockCollectionForSearch(); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -773,7 +773,7 @@ private async Task TestUpsertWithModelAsync( new() { VectorStoreRecordDefinition = definition } : null; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", options); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs similarity index 74% rename from dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs rename to dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs index 800fee716c53..4fad5c339f78 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs @@ -13,15 +13,15 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// -public sealed class AzureCosmosDBNoSQLGenericDataModelMapperTests +public sealed class AzureCosmosDBNoSQLDynamicDataModelMapperTests { private static readonly JsonSerializerOptions s_jsonSerializerOptions = JsonSerializerOptions.Default; private static readonly VectorStoreRecordModel s_model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = new List @@ -66,40 +66,37 @@ public sealed class AzureCosmosDBNoSQLGenericDataModelMapperTests public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); - var dataModel = new VectorStoreGenericDataModel("key") + var dataModel = new Dictionary { - Data = - { - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["StringDataProp"] = "string", - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["FloatDataProp"] = 5.0f, - ["NullableFloatDataProp"] = 6.0f, - ["DoubleDataProp"] = 7.0, - ["NullableDoubleDataProp"] = 8.0, - ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["TagListDataProp"] = s_taglist, - }, - Vectors = - { + ["Key"] = "key", + + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["TagListDataProp"] = s_taglist, + #if NET5_0_OR_GREATER - ["HalfVector"] = new ReadOnlyMemory(s_halfVector), - ["NullableHalfVector"] = new ReadOnlyMemory(s_halfVector), + ["HalfVector"] = new ReadOnlyMemory(s_halfVector), + ["NullableHalfVector"] = new ReadOnlyMemory(s_halfVector), #endif - ["FloatVector"] = new ReadOnlyMemory(s_floatVector), - ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), - ["ByteVector"] = new ReadOnlyMemory(s_byteVector), - ["NullableByteVector"] = new ReadOnlyMemory(s_byteVector), - ["SByteVector"] = new ReadOnlyMemory(s_sbyteVector), - ["NullableSByteVector"] = new ReadOnlyMemory(s_sbyteVector) - }, + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["ByteVector"] = new ReadOnlyMemory(s_byteVector), + ["NullableByteVector"] = new ReadOnlyMemory(s_byteVector), + ["SByteVector"] = new ReadOnlyMemory(s_sbyteVector), + ["NullableSByteVector"] = new ReadOnlyMemory(s_sbyteVector) }; // Act @@ -148,20 +145,15 @@ public void MapFromDataToStorageModelMapsNullValues() }, }; - var dataModel = new VectorStoreGenericDataModel("key") + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - }, - Vectors = - { - ["NullableFloatVector"] = null, - }, + ["Key"] = "key", + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableFloatVector"] = null }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -176,7 +168,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -211,31 +203,31 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Equal(true, dataModel.Data["BoolDataProp"]); - Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); - Assert.Equal("string", dataModel.Data["StringDataProp"]); - Assert.Equal(1, dataModel.Data["IntDataProp"]); - Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, dataModel.Data["LongDataProp"]); - Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); - Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); - Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); - Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); - Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); - Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); + Assert.Equal("key", dataModel["Key"]); + Assert.Equal(true, dataModel["BoolDataProp"]); + Assert.Equal(false, dataModel["NullableBoolDataProp"]); + Assert.Equal("string", dataModel["StringDataProp"]); + Assert.Equal(1, dataModel["IntDataProp"]); + Assert.Equal(2, dataModel["NullableIntDataProp"]); + Assert.Equal(3L, dataModel["LongDataProp"]); + Assert.Equal(4L, dataModel["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel["FloatDataProp"]); + Assert.Equal(6.0f, dataModel["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel["DoubleDataProp"]); + Assert.Equal(8.0, dataModel["NullableDoubleDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, dataModel["TagListDataProp"]); #if NET5_0_OR_GREATER - Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel.Vectors["HalfVector"]!).ToArray()); - Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel.Vectors["NullableHalfVector"]!)!.ToArray()); + Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel["HalfVector"]!).ToArray()); + Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel["NullableHalfVector"]!)!.ToArray()); #endif - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); - Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel.Vectors["ByteVector"]!).ToArray()); - Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel.Vectors["NullableByteVector"]!)!.ToArray()); - Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel.Vectors["SByteVector"]!).ToArray()); - Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel.Vectors["NullableSByteVector"]!)!.ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel["ByteVector"]!).ToArray()); + Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel["NullableByteVector"]!)!.ToArray()); + Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel["SByteVector"]!).ToArray()); + Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel["NullableSByteVector"]!)!.ToArray()); } [Fact] @@ -261,23 +253,23 @@ public void MapFromStorageToDataModelMapsNullValues() ["NullableFloatVector"] = null }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Vectors["NullableFloatVector"]); + Assert.Equal("key", dataModel["Key"]); + Assert.Null(dataModel["StringDataProp"]); + Assert.Null(dataModel["NullableIntDataProp"]); + Assert.Null(dataModel["NullableFloatVector"]); } [Fact] public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); var storageModel = new JsonObject(); @@ -300,8 +292,8 @@ public void MapFromDataToStorageModelSkipsMissingProperties() }, }; - var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var dataModel = new Dictionary { ["Key"] = "key" }; + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -331,14 +323,14 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ["id"] = "key" }; - var sut = new AzureCosmosDBNoSQLGenericDataModelMapper(s_model, s_jsonSerializerOptions); + var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + Assert.Equal("key", dataModel["Key"]); + Assert.False(dataModel.ContainsKey("StringDataProp")); + Assert.False(dataModel.ContainsKey("FloatVector")); } } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs index 8f9f7cd72ab3..2b8a83824c01 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs @@ -93,11 +93,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs index 3ade14ddaf34..9a76d9e0d119 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs @@ -94,11 +94,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index 14f15a8ee264..a1797cbea98d 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -19,7 +19,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests private const string ScorePropertyName = "TestScore"; private readonly VectorStoreRecordModel _model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = @@ -193,7 +193,7 @@ FROM x const string PartitionKeyPropertyName = "TestProperty1"; var model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index b1e59b5a1613..a4136536e1d9 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -18,7 +18,7 @@ namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests { @@ -42,7 +42,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(this._mockDatabase.Object, "collection")); + var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(this._mockDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } @@ -56,7 +56,7 @@ public void ConstructorWithoutSystemTextJsonSerializerOptionsThrowsArgumentExcep mockDatabase.Setup(l => l.Client).Returns(mockClient.Object); // Act & Assert - var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(mockDatabase.Object, "collection")); + var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(mockDatabase.Object, "collection")); Assert.Contains(nameof(CosmosClientOptions.UseSystemTextJsonSerializerWithOptions), exception.Message); } @@ -64,7 +64,7 @@ public void ConstructorWithoutSystemTextJsonSerializerOptionsThrowsArgumentExcep public void ConstructorWithDeclarativeModelInitializesCollection() { // Act & Assert - var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -81,7 +81,7 @@ public void ConstructorWithImperativeModelInitializesCollection() }; // Act - var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection", new() { VectorStoreRecordDefinition = definition }); @@ -117,7 +117,7 @@ public async Task CollectionExistsReturnsValidResultAsync(List collectio It.IsAny())) .Returns(mockFeedIterator.Object); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, collectionName); @@ -137,7 +137,7 @@ public async Task CreateCollectionUsesValidContainerPropertiesAsync(IndexingMode // Arrange const string CollectionName = "collection"; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, CollectionName, new() { IndexingMode = indexingMode, Automatic = indexingMode != IndexingMode.None }); @@ -237,7 +237,7 @@ public async Task CreateCollectionIfNotExistsInvokesValidMethodsAsync(List())) .Returns(mockFeedIterator.Object); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, CollectionName); @@ -264,18 +264,22 @@ public async Task DeleteInvokesValidMethodsAsync( const string RecordKey = "recordKey"; const string PartitionKey = "partitionKey"; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( - this._mockDatabase.Object, - "collection"); - // Act if (useCompositeKeyCollection) { + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + await ((IVectorStoreRecordCollection)sut).DeleteAsync( new AzureCosmosDBNoSQLCompositeKey(RecordKey, PartitionKey)); } else { + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + await ((IVectorStoreRecordCollection)sut).DeleteAsync( RecordKey); } @@ -295,7 +299,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() // Arrange List recordKeys = ["key1", "key2"]; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -318,7 +322,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() public async Task DeleteCollectionInvokesValidMethodsAsync() { // Arrange - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -362,7 +366,7 @@ public async Task GetReturnsValidRecordAsync() It.IsAny())) .Returns(mockFeedIterator.Object); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -405,7 +409,7 @@ public async Task GetBatchReturnsValidRecordAsync() It.IsAny())) .Returns(mockFeedIterator.Object); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -432,7 +436,7 @@ public async Task UpsertReturnsRecordKeyAsync() // Arrange var hotel = new AzureCosmosDBNoSQLHotel("key") { HotelName = "Test Name" }; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -460,7 +464,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() var hotel2 = new AzureCosmosDBNoSQLHotel("key2") { HotelName = "Test Name 2" }; var hotel3 = new AzureCosmosDBNoSQLHotel("key3") { HotelName = "Test Name 3" }; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -489,7 +493,7 @@ public async Task UpsertWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) .Returns(new JsonObject { ["id"] = "key", ["my_name"] = "Test Name" }); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection", new() { JsonObjectCustomMapper = mockMapper.Object }); @@ -546,7 +550,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) .Returns(new AzureCosmosDBNoSQLHotel(RecordKey) { HotelName = "Name from mapper" }); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection", new() { JsonObjectCustomMapper = mockMapper.Object }); @@ -597,7 +601,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync() It.IsAny())) .Returns(mockFeedIterator.Object); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -618,7 +622,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync() public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync() { // Arrange - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); @@ -631,7 +635,7 @@ await Assert.ThrowsAsync(async () => public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExceptionAsync() { // Arrange - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._mockDatabase.Object, "collection"); diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index 5fa4ea476b74..ab8e797a0eb2 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -465,33 +465,23 @@ public async Task CanSearchManyRecordsAsync(bool useDefinition) [Theory] [InlineData(TestRecordKey1, TestRecordKey2)] [InlineData(TestRecordIntKey1, TestRecordIntKey2)] - public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, TKey testKey2) + public async Task ItCanSearchUsingTheDynamicDataModelAsync(TKey testKey1, TKey testKey2) where TKey : notnull { // Arrange - var record1 = new VectorStoreGenericDataModel(testKey1) + var record1 = new Dictionary { - Data = new Dictionary - { - ["Data"] = $"data {testKey1}", - ["Tags"] = new List { "default tag", "tag " + testKey1 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) - } + ["Key"] = testKey1, + ["Data"] = $"data {testKey1}", + ["Tags"] = new List { "default tag", "tag " + testKey1 }, + ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) }; - var record2 = new VectorStoreGenericDataModel(testKey2) + var record2 = new Dictionary { - Data = new Dictionary - { - ["Data"] = $"data {testKey2}", - ["Tags"] = new List { "default tag", "tag " + testKey2 } - }, - Vectors = new Dictionary - { - ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) - } + ["Key"] = testKey2, + ["Data"] = $"data {testKey2}", + ["Tags"] = new List { "default tag", "tag " + testKey2 }, + ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) }; var collection = new ConcurrentDictionary(); @@ -500,7 +490,7 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, this._collectionStore.TryAdd(TestCollectionName, collection); - var sut = new InMemoryVectorStoreRecordCollection>( + var sut = new InMemoryVectorStoreRecordCollection>( this._collectionStore, this._collectionStoreTypes, TestCollectionName, @@ -513,18 +503,18 @@ public async Task ItCanSearchUsingTheGenericDataModelAsync(TKey testKey1, var actual = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), top: 3, - new() { IncludeVectors = true, VectorProperty = r => r.Vectors["Vector"] }, + new() { IncludeVectors = true, VectorProperty = r => r["Vector"] }, this._testCancellationToken); // Assert Assert.NotNull(actual); var actualResults = await actual.Results.ToListAsync(); Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data["Data"]); + Assert.Equal(testKey1, actualResults[0].Record["Key"]); + Assert.Equal($"data {testKey1}", actualResults[0].Record["Data"]); Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data["Data"]); + Assert.Equal(testKey2, actualResults[1].Record["Key"]); + Assert.Equal($"data {testKey2}", actualResults[1].Record["Data"]); Assert.Equal(-1, actualResults[1].Score); } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs similarity index 72% rename from dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs rename to dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs index 3ca40ca84d15..20a9eb00243e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs @@ -16,11 +16,11 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure AI Search. /// #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class AzureAISearchGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, JsonObject> +internal sealed class AzureAISearchDynamicDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, JsonObject> #pragma warning restore CS0618 { /// - public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + public JsonObject MapFromDataToStorageModel(Dictionary dataModel) { Verify.NotNull(dataModel); @@ -32,22 +32,15 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel switch (property) { case VectorStoreRecordKeyPropertyModel keyProperty: - storageJsonObject.Add(keyProperty.StorageName, dataModel.Key); + storageJsonObject.Add(keyProperty.StorageName, (string)model.KeyProperty.GetValueAsObject(dataModel)!); continue; case VectorStoreRecordDataPropertyModel dataProperty: - if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.ModelName, out var dataValue)) - { - var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); - storageJsonObject.Add(dataProperty.ModelName, serializedJsonNode); - } - continue; - case VectorStoreRecordVectorPropertyModel vectorProperty: - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.ModelName, out var vectorValue)) + if (dataModel.TryGetValue(property.ModelName, out var dataValue)) { - var serializedJsonNode = JsonSerializer.SerializeToNode(vectorValue); - storageJsonObject.Add(vectorProperty.StorageName, serializedJsonNode); + var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); + storageJsonObject.Add(property.ModelName, serializedJsonNode); } continue; @@ -60,14 +53,12 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel } /// - public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + public Dictionary MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) { Verify.NotNull(storageModel); // Create variables to store the response properties. - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); - string? key = null; + var result = new Dictionary(); // Loop through all known properties and map each from json to the data type. foreach (var property in model.Properties) @@ -75,7 +66,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject switch (property) { case VectorStoreRecordKeyPropertyModel keyProperty: - key = (string?)storageModel[keyProperty.StorageName] + result[keyProperty.ModelName] = (string?)storageModel[keyProperty.StorageName] ?? throw new VectorStoreRecordMappingException($"The key property '{keyProperty.StorageName}' is missing from the record retrieved from storage."); continue; @@ -84,7 +75,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject { if (storageModel.TryGetPropertyValue(dataProperty.StorageName, out var value)) { - dataProperties.Add(dataProperty.ModelName, value is null ? null : GetDataPropertyValue(property.Type, value)); + result.Add(dataProperty.ModelName, value is null ? null : GetDataPropertyValue(property.Type, value)); } continue; } @@ -96,11 +87,11 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject if (value is not null) { ReadOnlyMemory vector = value.AsArray().Select(x => (float)x!).ToArray(); - vectorProperties.Add(vectorProperty.ModelName, vector); + result.Add(vectorProperty.ModelName, vector); } else { - vectorProperties.Add(vectorProperty.ModelName, null); + result.Add(vectorProperty.ModelName, null); } } @@ -115,12 +106,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject } } - if (key is null) - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + return result; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs index c491d43bcb6f..a3b6768ec43d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs @@ -141,8 +141,8 @@ private void TranslateMember(MemberExpression memberExpression) { switch (memberExpression) { - case var _ when this.TryGetField(memberExpression, out var column): - this._filter.Append(column); // TODO: Escape + case var _ when this.TryBindProperty(memberExpression, out var property): + this._filter.Append(property.StorageName); // TODO: Escape return; // Identify captured lambda variables, inline them as constants @@ -159,6 +159,11 @@ private void TranslateMethodCall(MethodCallExpression methodCall) { switch (methodCall) { + // Dictionary access for dynamic mapping (r => r["SomeString"] == "foo") + case MethodCallExpression when this.TryBindProperty(methodCall, out var property): + this._filter.Append(property.StorageName); // TODO: Escape + return; + // Enumerable.Contains() case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains when contains.Method.DeclaringType == typeof(Enumerable): @@ -189,7 +194,7 @@ private void TranslateContains(Expression source, Expression item) switch (source) { // Contains over array field (r => r.Strings.Contains("foo")) - case var _ when this.TryGetField(source, out _): + case var _ when this.TryBindProperty(source, out _): this.Translate(source); this._filter.Append("/any(t: t eq "); this.Translate(item); @@ -207,6 +212,11 @@ private void TranslateContains(Expression source, Expression item) throw new NotSupportedException("Invalid element in array"); } + if (elementValue is not string) + { + throw new NotSupportedException("Contains over non-string arrays is not supported"); + } + elements[i] = elementValue; } @@ -225,11 +235,6 @@ private void TranslateContains(Expression source, Expression item) void ProcessInlineEnumerable(IEnumerable elements, Expression item) { - if (item.Type != typeof(string)) - { - throw new NotSupportedException("Contains over non-string arrays is not supported"); - } - this._filter.Append("search.in("); this.Translate(item); this._filter.Append(", '"); @@ -312,26 +317,60 @@ private void TranslateUnary(UnaryExpression unary) this._filter.Append(')'); return; + // Handle convert over member access, for dynamic dictionary access (r => (int)r["SomeInt"] == 8) + case ExpressionType.Convert when this.TryBindProperty(unary.Operand, out var property) && unary.Type == property.Type: + this._filter.Append(property.StorageName); // TODO: Escape + return; + default: throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); } } - private bool TryGetField(Expression expression, [NotNullWhen(true)] out string? field) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression member && member.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, - field = property.StorageName; - return true; + _ => null + }; + + if (modelName is null) + { + property = null; + return false; } - field = null; - return false; + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); + } + + return true; } private static bool TryGetCapturedValue(Expression expression, out object? capturedValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs index fb93ed16d0d3..03b9b2bf03f2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs @@ -64,8 +64,8 @@ public static IKernelBuilder AddAzureAISearchVectorStore(this IKernelBuilder bui /// /// The type of the data model that the collection should contain. /// The builder to register the on. - /// The name of the collection that this will access. - /// Optional configuration options to pass to the . + /// The name of the collection that this will access. + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The kernel builder. public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection( @@ -85,10 +85,10 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The builder to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The service endpoint for Azure AI Search. /// The credential to authenticate to Azure AI Search with. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The kernel builder. public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection( @@ -110,10 +110,10 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The builder to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The service endpoint for Azure AI Search. /// The credential to authenticate to Azure AI Search with. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The kernel builder. public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection( diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs index 3bed2d72bae4..661eed32ece9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs @@ -113,8 +113,8 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec /// /// The type of the data model that the collection should contain. /// The to register the on. - /// The name of the collection that this will access. - /// Optional configuration options to pass to the . + /// The name of the collection that this will access. + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The service collection. public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection( @@ -133,7 +133,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection(); var selectedOptions = options ?? sp.GetService>(); - return new AzureAISearchVectorStoreRecordCollection( + return new AzureAISearchVectorStoreRecordCollection( searchIndexClient, collectionName, selectedOptions); @@ -150,10 +150,10 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The service endpoint for Azure AI Search. /// The credential to authenticate to Azure AI Search with. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The service collection. public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection( @@ -177,7 +177,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection( + return new AzureAISearchVectorStoreRecordCollection( searchIndexClient, collectionName, selectedOptions); @@ -194,10 +194,10 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection /// The type of the data model that the collection should contain. /// The to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The service endpoint for Azure AI Search. /// The credential to authenticate to Azure AI Search with. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The service collection. public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection( @@ -221,7 +221,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection( + return new AzureAISearchVectorStoreRecordCollection( searchIndexClient, collectionName, selectedOptions); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index fd756592f099..afacf870d602 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -59,12 +59,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string)) - { - throw new NotSupportedException("Only string keys are supported."); - } - - var recordCollection = new AzureAISearchVectorStoreRecordCollection( + var recordCollection = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClient, name, new AzureAISearchVectorStoreRecordCollectionOptions() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs index 4c17ed4195e6..c502f4828b99 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; public sealed class AzureAISearchVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureAISearchVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index c5c8b433eea7..8351c43e35e7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -22,14 +22,16 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// /// Service for storing and retrieving vector records, that uses Azure AI Search as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureAISearchVectorStoreRecordCollection : - IVectorStoreRecordCollection, +public sealed class AzureAISearchVectorStoreRecordCollection : + IVectorStoreRecordCollection, IVectorizableTextSearch, IKeywordHybridSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -46,7 +48,7 @@ public sealed class AzureAISearchVectorStoreRecordCollection : /// Azure AI Search client that can be used to manage data in an Azure AI Search Service index. private readonly SearchClient _searchClient; - /// The name of the collection that this will access. + /// The name of the collection that this will access. private readonly string _collectionName; /// Optional configuration options for this class. @@ -61,10 +63,10 @@ public sealed class AzureAISearchVectorStoreRecordCollection : private readonly VectorStoreRecordModel _model; /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown when is null. /// Thrown when options are misconfigured. @@ -74,6 +76,11 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli Verify.NotNull(searchIndexClient); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)"); + } + // Assign. this._searchIndexClient = searchIndexClient; this._collectionName = collectionName; @@ -92,9 +99,9 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli { this._mapper = this._options.JsonObjectCustomMapper; } - else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + else if (typeof(TRecord) == typeof(Dictionary)) { - this._mapper = new AzureAISearchGenericDataModelMapper(this._model) as IVectorStoreRecordMapper; + this._mapper = new AzureAISearchDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper; } this._collectionMetadata = new() @@ -202,10 +209,8 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task GetAsync(string key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) + public Task GetAsync(TKey key, GetRecordOptions? options = default, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); - // Create Options. var innerOptions = this.ConvertGetDocumentOptions(options); var includeVectors = options?.IncludeVectors ?? false; @@ -215,7 +220,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -236,18 +241,18 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRec } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); // Remove record. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, [key], new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, [stringKey], new IndexDocumentsOptions(), cancellationToken)); } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); if (!keys.Any()) @@ -255,14 +260,16 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellation return Task.CompletedTask; } + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); + // Remove records. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, keys, new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._model.KeyProperty.StorageName, stringKeys, new IndexDocumentsOptions(), cancellationToken)); } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -271,11 +278,12 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancella // Upsert record. var results = await this.MapToStorageModelAndUploadDocumentAsync([record], innerOptions, cancellationToken).ConfigureAwait(false); - return results.Value.Results[0].Key; + + return (TKey)(object)results.Value.Results[0].Key; } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); if (!records.Any()) @@ -290,8 +298,10 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, var results = await this.MapToStorageModelAndUploadDocumentAsync(records, innerOptions, cancellationToken).ConfigureAwait(false); // Get results - var resultKeys = results.Value.Results.Select(x => x.Key).ToList(); - foreach (var resultKey in resultKeys) { yield return resultKey; } + foreach (var key in results.Value.Results.Select(x => x.Key)) + { + yield return (TKey)(object)key; + } } /// @@ -543,19 +553,21 @@ public Task> HybridSearchAsync(TVector vec /// The to monitor for cancellation requests. The default is . /// The retrieved document, mapped to the consumer data model. private async Task GetDocumentAndMapToDataModelAsync( - string key, + TKey key, bool includeVectors, GetDocumentOptions innerOptions, CancellationToken cancellationToken) { const string OperationName = "GetDocument"; + var stringKey = this.GetStringKey(key); + // Use the user provided mapper. if (this._mapper is not null) { var jsonObject = await this.RunOperationAsync( OperationName, - () => GetDocumentWithNotFoundHandlingAsync(this._searchClient, key, innerOptions, cancellationToken)).ConfigureAwait(false); + () => GetDocumentWithNotFoundHandlingAsync(this._searchClient, stringKey, innerOptions, cancellationToken)).ConfigureAwait(false); if (jsonObject is null) { @@ -573,7 +585,7 @@ public Task> HybridSearchAsync(TVector vec // Use the built in Azure AI Search mapper. return await this.RunOperationAsync( OperationName, - () => GetDocumentWithNotFoundHandlingAsync(this._searchClient, key, innerOptions, cancellationToken)).ConfigureAwait(false); + () => GetDocumentWithNotFoundHandlingAsync(this._searchClient, stringKey, innerOptions, cancellationToken)).ConfigureAwait(false); } /// @@ -770,4 +782,15 @@ private static ReadOnlyMemory VerifyVectorParam(TVector vector) return floatVector; } + + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs index 5da63d55ffb8..be7c668b264b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class AzureAISearchVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs index ade730443c81..e54a378993c2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs @@ -47,9 +47,9 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual UnaryExpression { NodeType: ExpressionType.Not } not => this.TranslateNot(not), - // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) - => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + // Special handling for bool constant as the filter expression (r => r.Bool) + Expression when node.Type == typeof(bool) && this.TryBindProperty(node, out var property) + => this.GenerateEqualityComparison(property, value: true, ExpressionType.Equal), MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), @@ -57,36 +57,36 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private BsonDocument TranslateEqualityComparison(BinaryExpression binary) + => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) + || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) + ? this.GenerateEqualityComparison(property, value, binary.NodeType) + : throw new NotSupportedException("Invalid equality/comparison"); + + private BsonDocument GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) { - if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) - || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + if (value is null) { - if (value is null) - { - throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); - } - - // Short form of equality (instead of $eq) - if (binary.NodeType is ExpressionType.Equal) - { - return new BsonDocument { [storagePropertyName] = BsonValue.Create(value) }; - } + throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); + } - var filterOperator = binary.NodeType switch - { - ExpressionType.NotEqual => "$ne", - ExpressionType.GreaterThan => "$gt", - ExpressionType.GreaterThanOrEqual => "$gte", - ExpressionType.LessThan => "$lt", - ExpressionType.LessThanOrEqual => "$lte", + // Short form of equality (instead of $eq) + if (nodeType is ExpressionType.Equal) + { + return new BsonDocument { [property.StorageName] = BsonValue.Create(value) }; + } - _ => throw new UnreachableException() - }; + var filterOperator = nodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", - return new BsonDocument { [storagePropertyName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; - } + _ => throw new UnreachableException() + }; - throw new NotSupportedException("Invalid equality/comparison"); + return new BsonDocument { [property.StorageName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; } private BsonDocument TranslateAndOr(BinaryExpression andOr) @@ -131,9 +131,9 @@ private BsonDocument TranslateNot(UnaryExpression not) binary.Left, binary.Right)); - // Not over bool field (Filter => r => !r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): - return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + // Not over bool field (r => !r.Bool) + case var negated when negated.Type == typeof(bool) && this.TryBindProperty(negated, out var property): + return this.GenerateEqualityComparison(property, false, ExpressionType.Equal); } var operand = this.Translate(not.Operand); @@ -175,7 +175,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) switch (source) { // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryTranslateFieldAccess(source, out _): + case var _ when this.TryBindProperty(source, out _): throw new NotSupportedException("MongoDB does not support Contains within array fields ($elemMatch) in vector search pre-filters"); // Contains over inline enumerable @@ -205,14 +205,14 @@ private BsonDocument TranslateContains(Expression source, Expression item) BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) { - if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + if (!this.TryBindProperty(item, out var property)) { throw new NotSupportedException("Unsupported item type in Contains"); } return new BsonDocument { - [storagePropertyName] = new BsonDocument + [property.StorageName] = new BsonDocument { ["$in"] = new BsonArray(from object? element in elements select BsonValue.Create(element)) } @@ -220,21 +220,50 @@ BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) } } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, + + _ => null + }; - storagePropertyName = property.StorageName; - return true; + if (modelName is null) + { + property = null; + return false; + } + + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs index 105c15c1414c..671646ab2f5e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs @@ -102,7 +102,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect var database = sp.GetRequiredService(); var selectedOptions = options ?? sp.GetService>(); - return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -143,7 +143,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect var selectedOptions = options ?? sp.GetService>(); - return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 3f7d66e5c05f..dc75467c17f1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -57,12 +57,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string)) - { - throw new NotSupportedException("Only string keys are supported."); - } - - var recordCollection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var recordCollection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mongoDatabase, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs index 8e9b2cccbc6e..69e3b94233ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; public sealed class AzureCosmosDBMongoDBVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 58deb0d28749..c4a031fce000 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Reflection; @@ -21,11 +22,13 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// /// Service for storing and retrieving vector records, that uses Azure CosmosDB MongoDB as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -60,10 +63,10 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : I public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// that can be used to manage the collections in Azure CosmosDB MongoDB. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public AzureCosmosDBMongoDBVectorStoreRecordCollection( IMongoDatabase mongoDatabase, @@ -74,6 +77,11 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( Verify.NotNull(mongoDatabase); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)"); + } + // Assign. this._mongoDatabase = mongoDatabase; this._mongoCollection = mongoDatabase.GetCollection(collectionName); @@ -124,20 +132,22 @@ await this.RunOperationAsync("CreateIndexes", } /// - public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); - await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneAsync(this.GetFilterById(key), cancellationToken)) + await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneAsync(this.GetFilterById(stringKey), cancellationToken)) .ConfigureAwait(false); } /// - public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteManyAsync(this.GetFilterByIds(keys), cancellationToken)) + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); + + await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteManyAsync(this.GetFilterByIds(stringKeys), cancellationToken)) .ConfigureAwait(false); } @@ -146,18 +156,18 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); - const string OperationName = "Find"; + var stringKey = this.GetStringKey(key); + var includeVectors = options?.IncludeVectors ?? false; var record = await this.RunOperationAsync(OperationName, async () => { using var cursor = await this - .FindAsync(this.GetFilterById(key), options, cancellationToken) + .FindAsync(this.GetFilterById(stringKey), options, cancellationToken) .ConfigureAwait(false); return await cursor.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false); @@ -178,7 +188,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) /// public async IAsyncEnumerable GetAsync( - IEnumerable keys, + IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { @@ -186,8 +196,10 @@ public async IAsyncEnumerable GetAsync( const string OperationName = "Find"; + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); + using var cursor = await this - .FindAsync(this.GetFilterByIds(keys), options, cancellationToken) + .FindAsync(this.GetFilterByIds(stringKeys), options, cancellationToken) .ConfigureAwait(false); while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) @@ -208,7 +220,7 @@ public async IAsyncEnumerable GetAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -230,12 +242,12 @@ await this._mongoCollection .ReplaceOneAsync(this.GetFilterById(key), storageModel, replaceOptions, cancellationToken) .ConfigureAwait(false); - return key; + return (TKey)(object)key; }); } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -246,7 +258,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, { if (result is not null) { - yield return result; + yield return (TKey)(object)result; } } } @@ -573,14 +585,25 @@ private IVectorStoreRecordMapper InitializeMapper() return this._options.BsonDocumentCustomMapper; } - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + if (typeof(TRecord) == typeof(Dictionary)) { - return (new MongoDBGenericDataModelMapper(this._model) as IVectorStoreRecordMapper)!; + return (new MongoDBDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper)!; } return new MongoDBVectorStoreRecordMapper(this._model); } #pragma warning restore CS0618 + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs index 7eb381a1095f..5a274559ebc9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs similarity index 64% rename from dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs rename to dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs index 6811764bf920..813899e6845c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Diagnostics; using System.Text.Json; @@ -13,8 +14,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB NoSQL. /// #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) - : IVectorStoreRecordMapper, JsonObject> +internal sealed class AzureCosmosDBNoSQLDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) + : IVectorStoreRecordMapper, JsonObject> #pragma warning restore CS0618 { /// A default for serialization/deserialization of vector properties. @@ -23,7 +24,7 @@ internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper(VectorStoreRecord Converters = { new AzureCosmosDBNoSQLReadOnlyMemoryByteConverter() } }; - public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + public JsonObject MapFromDataToStorageModel(Dictionary dataModel) { Verify.NotNull(dataModel); @@ -35,11 +36,13 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel switch (property) { case VectorStoreRecordKeyPropertyModel keyProperty: - jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = dataModel.Key; + jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = (string)(dataModel[keyProperty.ModelName] + ?? throw new InvalidOperationException($"Key property '{keyProperty.ModelName}' is null.")); + break; case VectorStoreRecordDataPropertyModel dataProperty: - if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.StorageName, out var dataValue)) + if (dataModel.TryGetValue(dataProperty.StorageName, out var dataValue)) { jsonObject[dataProperty.StorageName] = dataValue is not null ? JsonSerializer.SerializeToNode(dataValue, property.Type, jsonSerializerOptions) : @@ -48,7 +51,7 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel break; case VectorStoreRecordVectorPropertyModel vectorProperty: - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.StorageName, out var vectorValue)) + if (dataModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) { jsonObject[vectorProperty.StorageName] = vectorValue is not null ? JsonSerializer.SerializeToNode(vectorValue, property.Type, s_vectorJsonSerializerOptions) : @@ -64,14 +67,11 @@ public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel return jsonObject; } - public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + public Dictionary MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) { Verify.NotNull(storageModel); - // Create variables to store the response properties. - string? key = null; - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); + var result = new Dictionary(); // Loop through all known properties and map each from the storage model to the data model. foreach (var property in model.Properties) @@ -79,36 +79,30 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject switch (property) { case VectorStoreRecordKeyPropertyModel keyProperty: - if (storageModel.TryGetPropertyValue(AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, out var keyValue)) - { - key = keyValue?.GetValue(); - } - break; + result[keyProperty.ModelName] = storageModel.TryGetPropertyValue(AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, out var keyValue) + ? keyValue?.GetValue() + : throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + continue; case VectorStoreRecordDataPropertyModel dataProperty: if (storageModel.TryGetPropertyValue(dataProperty.StorageName, out var dataValue)) { - dataProperties.Add(property.ModelName, dataValue.Deserialize(property.Type, jsonSerializerOptions)); + result.Add(property.ModelName, dataValue.Deserialize(property.Type, jsonSerializerOptions)); } - break; + continue; - case VectorStoreRecordVectorPropertyModel vectorProperty when options.IncludeVectors: + case VectorStoreRecordVectorPropertyModel vectorProperty: if (options.IncludeVectors && storageModel.TryGetPropertyValue(vectorProperty.StorageName, out var vectorValue)) { - vectorProperties.Add(property.ModelName, vectorValue.Deserialize(property.Type, s_vectorJsonSerializerOptions)); + result.Add(property.ModelName, vectorValue.Deserialize(property.Type, s_vectorJsonSerializerOptions)); } - break; + continue; default: throw new UnreachableException(); } } - if (key is null) - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + return result; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs index ad0645feb098..2410e7f5826e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs @@ -103,7 +103,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio var database = sp.GetRequiredService(); var selectedOptions = options ?? sp.GetService>(); - return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -145,7 +145,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio var database = cosmosClient.GetDatabase(databaseName); var selectedOptions = options ?? sp.GetService>(); - return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index d8d30c4cc113..f93cf0ad1517 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -60,12 +60,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(AzureCosmosDBNoSQLCompositeKey)) - { - throw new NotSupportedException($"Only {nameof(String)} and {nameof(AzureCosmosDBNoSQLCompositeKey)} keys are supported."); - } - - var recordCollection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var recordCollection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( this._database, name, new() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs index edbfe436f136..2439b1362367 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; public sealed class AzureCosmosDBNoSQLVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 1196a6758031..83a34b0d4090 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -22,14 +23,13 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// Service for storing and retrieving vector records, that uses Azure CosmosDB NoSQL as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : - IVectorStoreRecordCollection, - IVectorStoreRecordCollection, - IKeywordHybridSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -62,10 +62,10 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// that can be used to manage the collections in Azure CosmosDB NoSQL. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public AzureCosmosDBNoSQLVectorStoreRecordCollection( Database database, @@ -76,11 +76,16 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(AzureCosmosDBNoSQLCompositeKey) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException($"Only {nameof(String)} and {nameof(AzureCosmosDBNoSQLCompositeKey)} keys are supported (and object for dynamic mapping)."); + } + if (database.Client?.ClientOptions?.UseSystemTextJsonSerializerWithOptions is null) { throw new ArgumentException( $"Property {nameof(CosmosClientOptions.UseSystemTextJsonSerializerWithOptions)} in CosmosClient.ClientOptions " + - $"is required to be configured for {nameof(AzureCosmosDBNoSQLVectorStoreRecordCollection)}."); + $"is required to be configured for {nameof(AzureCosmosDBNoSQLVectorStoreRecordCollection)}."); } // Assign. @@ -173,48 +178,65 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) .DeleteContainerAsync(cancellationToken: cancellationToken)); } - #region Implementation of IVectorStoreRecordCollection + /// + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) + => this.DeleteAsync([key], cancellationToken); /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { - // Use record key as partition key - var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); + Verify.NotNull(keys); - return this.InternalDeleteAsync([compositeKey], cancellationToken); - } + var tasks = GetCompositeKeys(keys).Select(key => + { + Verify.NotNullOrWhiteSpace(key.RecordKey); + Verify.NotNullOrWhiteSpace(key.PartitionKey); - /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - // Use record keys as partition keys - var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); + return this.RunOperationAsync("DeleteItem", () => + this._database + .GetContainer(this.CollectionName) + .DeleteItemAsync(key.RecordKey, new PartitionKey(key.PartitionKey), cancellationToken: cancellationToken)); + }); - return this.InternalDeleteAsync(compositeKeys, cancellationToken); + await Task.WhenAll(tasks).ConfigureAwait(false); } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - // Use record key as partition key - var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); - - return await this.InternalGetAsync([compositeKey], options, cancellationToken) + return await this.GetAsync([key], options, cancellationToken) .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); } /// public async IAsyncEnumerable GetAsync( - IEnumerable keys, + IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - // Use record keys as partition keys - var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); + Verify.NotNull(keys); + + const string OperationName = "GetItemQueryIterator"; - await foreach (var record in this.InternalGetAsync(compositeKeys, options, cancellationToken).ConfigureAwait(false)) + var includeVectors = options?.IncludeVectors ?? false; + + var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSelectQuery( + this._model, + this._model.KeyProperty.StorageName, + this._partitionKeyProperty.StorageName, + GetCompositeKeys(keys).ToList(), + includeVectors); + + await foreach (var jsonObject in this.GetItemsAsync(queryDefinition, cancellationToken).ConfigureAwait(false)) { + var record = VectorStoreErrorHandler.RunModelConversion( + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); + if (record is not null) { yield return record; @@ -223,84 +245,53 @@ public async IAsyncEnumerable GetAsync( } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - var key = await this.InternalUpsertAsync(record, cancellationToken).ConfigureAwait(false); - - return key.RecordKey; - } + Verify.NotNull(record); - /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(records); + const string OperationName = "UpsertItem"; - var tasks = records.Select(record => this.InternalUpsertAsync(record, cancellationToken)); + var jsonObject = VectorStoreErrorHandler.RunModelConversion( + AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record)); - var keys = await Task.WhenAll(tasks).ConfigureAwait(false); + var keyValue = jsonObject.TryGetPropertyValue(this._model.KeyProperty.StorageName!, out var jsonKey) ? jsonKey?.ToString() : null; + var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyProperty.StorageName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; - foreach (var key in keys) + if (string.IsNullOrWhiteSpace(keyValue)) { - if (key is not null) - { - yield return key.RecordKey; - } + throw new VectorStoreOperationException($"Key property {this._model.KeyProperty.ModelName} is not initialized."); } - } - #endregion - - #region Implementation of IVectorStoreRecordCollection - - /// - public async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - return await this.InternalGetAsync([key], options, cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - } - - /// - public async IAsyncEnumerable GetAsync( - IEnumerable keys, - GetRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var record in this.InternalGetAsync(keys, options, cancellationToken).ConfigureAwait(false)) + if (string.IsNullOrWhiteSpace(partitionKeyValue)) { - if (record is not null) - { - yield return record; - } + throw new VectorStoreOperationException($"Partition key property {this._partitionKeyProperty.ModelName} is not initialized."); } - } - /// - public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, CancellationToken cancellationToken = default) - { - return this.InternalDeleteAsync([key], cancellationToken); - } - - /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - return this.InternalDeleteAsync(keys, cancellationToken); - } + await this.RunOperationAsync(OperationName, () => + this._database + .GetContainer(this.CollectionName) + .UpsertItemAsync(jsonObject, new PartitionKey(partitionKeyValue), cancellationToken: cancellationToken)) + .ConfigureAwait(false); - /// - Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) - { - return this.InternalUpsertAsync(record, cancellationToken); + return typeof(TKey) switch + { + var t when t == typeof(AzureCosmosDBNoSQLCompositeKey) || t == typeof(object) => (TKey)(object)new AzureCosmosDBNoSQLCompositeKey(keyValue!, partitionKeyValue!), + var t when t == typeof(string) => (TKey)(object)keyValue!, + _ => throw new UnreachableException() + }; } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync( - IEnumerable records, - [EnumeratorCancellation] CancellationToken cancellationToken) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); - var tasks = records.Select(record => this.InternalUpsertAsync(record, cancellationToken)); + // TODO: Do proper bulk upsert rather than parallel single inserts, #11350 + var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); var keys = await Task.WhenAll(tasks).ConfigureAwait(false); @@ -425,8 +416,6 @@ public Task> HybridSearchAsync(TVector vec return Task.FromResult(new VectorSearchResults(mappedResults)); } - #endregion - /// public object? GetService(Type serviceType, object? serviceKey = null) { @@ -606,90 +595,6 @@ private static VectorDataType GetDataType(Type vectorDataType, string vectorProp _ => throw new InvalidOperationException($"Data type '{vectorDataType}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") }; - private async IAsyncEnumerable InternalGetAsync( - IEnumerable keys, - GetRecordOptions? options = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(keys); - - const string OperationName = "GetItemQueryIterator"; - - var includeVectors = options?.IncludeVectors ?? false; - - var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSelectQuery( - this._model, - this._model.KeyProperty.StorageName, - this._partitionKeyProperty.StorageName, - keys.ToList(), - includeVectors); - - await foreach (var jsonObject in this.GetItemsAsync(queryDefinition, cancellationToken).ConfigureAwait(false)) - { - yield return VectorStoreErrorHandler.RunModelConversion( - AzureCosmosDBNoSQLConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - OperationName, - () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); - } - } - - private async Task InternalUpsertAsync( - TRecord record, - CancellationToken cancellationToken) - { - Verify.NotNull(record); - - const string OperationName = "UpsertItem"; - - var jsonObject = VectorStoreErrorHandler.RunModelConversion( - AzureCosmosDBNoSQLConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); - - var keyValue = jsonObject.TryGetPropertyValue(this._model.KeyProperty.StorageName!, out var jsonKey) ? jsonKey?.ToString() : null; - var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyProperty.StorageName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; - - if (string.IsNullOrWhiteSpace(keyValue)) - { - throw new VectorStoreOperationException($"Key property {this._model.KeyProperty.ModelName} is not initialized."); - } - - if (string.IsNullOrWhiteSpace(partitionKeyValue)) - { - throw new VectorStoreOperationException($"Partition key property {this._partitionKeyProperty.ModelName} is not initialized."); - } - - await this.RunOperationAsync(OperationName, () => - this._database - .GetContainer(this.CollectionName) - .UpsertItemAsync(jsonObject, new PartitionKey(partitionKeyValue), cancellationToken: cancellationToken)) - .ConfigureAwait(false); - - return new AzureCosmosDBNoSQLCompositeKey(keyValue!, partitionKeyValue!); - } - - private async Task InternalDeleteAsync(IEnumerable keys, CancellationToken cancellationToken) - { - Verify.NotNull(keys); - - var tasks = keys.Select(key => - { - Verify.NotNullOrWhiteSpace(key.RecordKey); - Verify.NotNullOrWhiteSpace(key.PartitionKey); - - return this.RunOperationAsync("DeleteItem", () => - this._database - .GetContainer(this.CollectionName) - .DeleteItemAsync(key.RecordKey, new PartitionKey(key.PartitionKey), cancellationToken: cancellationToken)); - }); - - await Task.WhenAll(tasks).ConfigureAwait(false); - } - private async IAsyncEnumerable GetItemsAsync(QueryDefinition queryDefinition, [EnumeratorCancellation] CancellationToken cancellationToken) { var iterator = this._database @@ -743,9 +648,9 @@ private IVectorStoreRecordMapper InitializeMapper(JsonSeria return this._options.JsonObjectCustomMapper; } - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + if (typeof(TRecord) == typeof(Dictionary)) { - var mapper = new AzureCosmosDBNoSQLGenericDataModelMapper(this._model, jsonSerializerOptions); + var mapper = new AzureCosmosDBNoSQLDynamicDataModelMapper(this._model, jsonSerializerOptions); return (mapper as IVectorStoreRecordMapper)!; } @@ -753,5 +658,19 @@ private IVectorStoreRecordMapper InitializeMapper(JsonSeria } #pragma warning restore CS0618 + private static IEnumerable GetCompositeKeys(IEnumerable keys) + => keys switch + { + IEnumerable k => k, + IEnumerable k => k.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)), + IEnumerable k => k.Select(key => key switch + { + string s => new AzureCosmosDBNoSQLCompositeKey(recordKey: s, partitionKey: s), + AzureCosmosDBNoSQLCompositeKey ck => ck, + _ => throw new ArgumentException($"Invalid key type '{key.GetType().Name}'.") + }), + _ => throw new UnreachableException() + }; + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs index ff77da73dd19..e04867e68afc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs index b5c3ce30fbfb..da70fa354bc7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs @@ -140,8 +140,8 @@ private void TranslateMember(MemberExpression memberExpression) { switch (memberExpression) { - case var _ when this.TryGetPropertyAccess(memberExpression, out var column): - this._sql.Append(AzureCosmosDBNoSQLConstants.ContainerAlias).Append("[\"").Append(column).Append("\"]"); + case var _ when this.TryBindProperty(memberExpression, out var property): + this.GeneratePropertyAccess(property); return; // Identify captured lambda variables, translate to Cosmos parameters (@foo, @bar...) @@ -189,6 +189,11 @@ private void TranslateMethodCall(MethodCallExpression methodCall) { switch (methodCall) { + // Dictionary access for dynamic mapping (r => r["SomeString"] == "foo") + case MethodCallExpression when this.TryBindProperty(methodCall, out var property): + this.GeneratePropertyAccess(property); + return; + // Enumerable.Contains() case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains when contains.Method.DeclaringType == typeof(Enumerable): @@ -244,26 +249,63 @@ private void TranslateUnary(UnaryExpression unary) this._sql.Append(')'); return; + // Handle convert over member access, for dynamic dictionary access (r => (int)r["SomeInt"] == 8) + case ExpressionType.Convert when this.TryBindProperty(unary.Operand, out var property) && unary.Type == property.Type: + this.GeneratePropertyAccess(property); + return; + default: throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); } } - private bool TryGetPropertyAccess(Expression expression, [NotNullWhen(true)] out string? column) + protected virtual void GeneratePropertyAccess(VectorStoreRecordPropertyModel property) + => this._sql.Append(AzureCosmosDBNoSQLConstants.ContainerAlias).Append("[\"").Append(property.StorageName).Append("\"]"); + + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression member && member.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) { - if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch + { + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, - column = property.StorageName; - return true; + _ => null + }; + + if (modelName is null) + { + property = null; + return false; } - column = null; - return false; + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); + } + + return true; } private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) diff --git a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs index 50dbce83aa4b..0d73e6e4ef9f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs @@ -43,10 +43,10 @@ internal void Translate(bool appendWhere) this._sql.Append("WHERE "); } - this.Translate(this._lambdaExpression.Body, null); + this.Translate(this._lambdaExpression.Body, isSearchCondition: true); } - protected void Translate(Expression? node, Expression? parent) + protected void Translate(Expression? node, bool isSearchCondition = false) { switch (node) { @@ -59,15 +59,15 @@ protected void Translate(Expression? node, Expression? parent) return; case MemberExpression member: - this.TranslateMember(member, parent); + this.TranslateMember(member, isSearchCondition); return; case MethodCallExpression methodCall: - this.TranslateMethodCall(methodCall); + this.TranslateMethodCall(methodCall, isSearchCondition); return; case UnaryExpression unary: - this.TranslateUnary(unary); + this.TranslateUnary(unary, isSearchCondition); return; default: @@ -82,29 +82,29 @@ protected void TranslateBinary(BinaryExpression binary) { case ExpressionType.Equal when IsNull(binary.Right): this._sql.Append('('); - this.Translate(binary.Left, binary); + this.Translate(binary.Left); this._sql.Append(" IS NULL)"); return; case ExpressionType.NotEqual when IsNull(binary.Right): this._sql.Append('('); - this.Translate(binary.Left, binary); + this.Translate(binary.Left); this._sql.Append(" IS NOT NULL)"); return; case ExpressionType.Equal when IsNull(binary.Left): this._sql.Append('('); - this.Translate(binary.Right, binary); + this.Translate(binary.Right); this._sql.Append(" IS NULL)"); return; case ExpressionType.NotEqual when IsNull(binary.Left): this._sql.Append('('); - this.Translate(binary.Right, binary); + this.Translate(binary.Right); this._sql.Append(" IS NOT NULL)"); return; } this._sql.Append('('); - this.Translate(binary.Left, binary); + this.Translate(binary.Left, isSearchCondition: binary.NodeType is ExpressionType.AndAlso or ExpressionType.OrElse); this._sql.Append(binary.NodeType switch { @@ -122,7 +122,8 @@ protected void TranslateBinary(BinaryExpression binary) _ => throw new NotSupportedException("Unsupported binary expression node type: " + binary.NodeType) }); - this.Translate(binary.Right, binary); + this.Translate(binary.Right, isSearchCondition: binary.NodeType is ExpressionType.AndAlso or ExpressionType.OrElse); + this._sql.Append(')'); static bool IsNull(Expression expression) @@ -172,12 +173,12 @@ protected virtual void TranslateConstant(object? value) } } - private void TranslateMember(MemberExpression memberExpression, Expression? parent) + private void TranslateMember(MemberExpression memberExpression, bool isSearchCondition) { switch (memberExpression) { - case var _ when this.TryGetColumn(memberExpression, out var column): - this.TranslateColumn(column, memberExpression, parent); + case var _ when this.TryBindProperty(memberExpression, out var property): + this.GenerateColumn(property.StorageName, isSearchCondition); return; case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): @@ -189,19 +190,24 @@ private void TranslateMember(MemberExpression memberExpression, Expression? pare } } - protected virtual void TranslateColumn(string column, MemberExpression memberExpression, Expression? parent) - => this._sql.Append('"').Append(column).Append('"'); + protected virtual void GenerateColumn(string column, bool isSearchCondition = false) + => this._sql.Append('"').Append(column.Replace("\"", "\"\"")).Append('"'); protected abstract void TranslateCapturedVariable(string name, object? capturedValue); - private void TranslateMethodCall(MethodCallExpression methodCall) + private void TranslateMethodCall(MethodCallExpression methodCall, bool isSearchCondition = false) { switch (methodCall) { + // Dictionary access for dynamic mapping (r => r["SomeString"] == "foo") + case MethodCallExpression when this.TryBindProperty(methodCall, out var property): + this.GenerateColumn(property.StorageName, isSearchCondition); + return; + // Enumerable.Contains() case { Method.Name: nameof(Enumerable.Contains), Arguments: [var source, var item] } contains when contains.Method.DeclaringType == typeof(Enumerable): - this.TranslateContains(source, item, methodCall); + this.TranslateContains(source, item); return; // List.Contains() @@ -215,7 +221,7 @@ private void TranslateMethodCall(MethodCallExpression methodCall) Object: Expression source, Arguments: [var item] } when declaringType.GetGenericTypeDefinition() == typeof(List<>): - this.TranslateContains(source, item, methodCall); + this.TranslateContains(source, item); return; default: @@ -223,18 +229,18 @@ private void TranslateMethodCall(MethodCallExpression methodCall) } } - private void TranslateContains(Expression source, Expression item, MethodCallExpression parent) + private void TranslateContains(Expression source, Expression item) { switch (source) { // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryGetColumn(source, out _): - this.TranslateContainsOverArrayColumn(source, item, parent); + case var _ when this.TryBindProperty(source, out _): + this.TranslateContainsOverArrayColumn(source, item); return; // Contains over inline array (r => new[] { "foo", "bar" }.Contains(r.String)) case NewArrayExpression newArray: - this.Translate(item, parent); + this.Translate(item); this._sql.Append(" IN ("); var isFirst = true; @@ -249,7 +255,7 @@ private void TranslateContains(Expression source, Expression item, MethodCallExp this._sql.Append(", "); } - this.Translate(element, parent); + this.Translate(element); } this._sql.Append(')'); @@ -257,7 +263,7 @@ private void TranslateContains(Expression source, Expression item, MethodCallExp // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) case var _ when TryGetCapturedValue(source, out _, out var value): - this.TranslateContainsOverCapturedArray(source, item, parent, value); + this.TranslateContainsOverCapturedArray(source, item, value); return; default: @@ -265,11 +271,11 @@ private void TranslateContains(Expression source, Expression item, MethodCallExp } } - protected abstract void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent); + protected abstract void TranslateContainsOverArrayColumn(Expression source, Expression item); - protected abstract void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value); + protected abstract void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value); - private void TranslateUnary(UnaryExpression unary) + private void TranslateUnary(UnaryExpression unary, bool isSearchCondition) { switch (unary.NodeType) { @@ -286,31 +292,64 @@ private void TranslateUnary(UnaryExpression unary) } this._sql.Append("(NOT "); - this.Translate(unary.Operand, unary); + this.Translate(unary.Operand, isSearchCondition); this._sql.Append(')'); return; + // Handle convert over member access, for dynamic dictionary access (r => (int)r["SomeInt"] == 8) + case ExpressionType.Convert when this.TryBindProperty(unary.Operand, out var property) && unary.Type == property.Type: + this.GenerateColumn(property.StorageName, isSearchCondition); + return; + default: throw new NotSupportedException("Unsupported unary expression node type: " + unary.NodeType); } } - private bool TryGetColumn(Expression expression, [NotNullWhen(true)] out string? column) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression member && member.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) { - if (!this._model.PropertyMap.TryGetValue(member.Member.Name, out var property)) + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch + { + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{member.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, - column = property.StorageName; + _ => null + }; - return true; + if (modelName is null) + { + property = null; + return false; } - column = null; - return false; + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); + } + + return true; } private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index b2814654b940..1d004674e254 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -193,6 +193,8 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) /// public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { + Verify.NotNull(keys); + foreach (var key in keys) { var record = await this.GetAsync(key, options, cancellationToken).ConfigureAwait(false); @@ -216,6 +218,8 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) /// public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { + Verify.NotNull(keys); + var collectionDictionary = this.GetCollectionDictionary(); foreach (var key in keys) @@ -242,6 +246,8 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke /// public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { + Verify.NotNull(records); + foreach (var record in records) { yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs index 80414a43910c..61457cd4cb10 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs @@ -47,9 +47,9 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual UnaryExpression { NodeType: ExpressionType.Not } not => this.TranslateNot(not), - // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) - => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + // Special handling for bool constant as the filter expression (r => r.Bool) + Expression when node.Type == typeof(bool) && this.TryBindProperty(node, out var property) + => this.GenerateEqualityComparison(property, value: true, ExpressionType.Equal), MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), @@ -57,36 +57,36 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private BsonDocument TranslateEqualityComparison(BinaryExpression binary) + => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) + || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) + ? this.GenerateEqualityComparison(property, value, binary.NodeType) + : throw new NotSupportedException("Invalid equality/comparison"); + + private BsonDocument GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) { - if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) - || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + if (value is null) { - if (value is null) - { - throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); - } - - // Short form of equality (instead of $eq) - if (binary.NodeType is ExpressionType.Equal) - { - return new BsonDocument { [storagePropertyName] = BsonValue.Create(value) }; - } + throw new NotSupportedException("MongogDB does not support null checks in vector search pre-filters"); + } - var filterOperator = binary.NodeType switch - { - ExpressionType.NotEqual => "$ne", - ExpressionType.GreaterThan => "$gt", - ExpressionType.GreaterThanOrEqual => "$gte", - ExpressionType.LessThan => "$lt", - ExpressionType.LessThanOrEqual => "$lte", + // Short form of equality (instead of $eq) + if (nodeType is ExpressionType.Equal) + { + return new BsonDocument { [property.StorageName] = BsonValue.Create(value) }; + } - _ => throw new UnreachableException() - }; + var filterOperator = nodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", - return new BsonDocument { [storagePropertyName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; - } + _ => throw new UnreachableException() + }; - throw new NotSupportedException("Invalid equality/comparison"); + return new BsonDocument { [property.StorageName] = new BsonDocument { [filterOperator] = BsonValue.Create(value) } }; } private BsonDocument TranslateAndOr(BinaryExpression andOr) @@ -131,9 +131,9 @@ private BsonDocument TranslateNot(UnaryExpression not) binary.Left, binary.Right)); - // Not over bool field (Filter => r => !r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): - return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + // Not over bool field (r => !r.Bool) + case var negated when negated.Type == typeof(bool) && this.TryBindProperty(negated, out var property): + return this.GenerateEqualityComparison(property, false, ExpressionType.Equal); } var operand = this.Translate(not.Operand); @@ -175,7 +175,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) switch (source) { // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryTranslateFieldAccess(source, out _): + case var _ when this.TryBindProperty(source, out _): throw new NotSupportedException("MongoDB does not support Contains within array fields ($elemMatch) in vector search pre-filters"); // Contains over inline enumerable @@ -205,14 +205,14 @@ private BsonDocument TranslateContains(Expression source, Expression item) BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) { - if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + if (!this.TryBindProperty(item, out var property)) { throw new NotSupportedException("Unsupported item type in Contains"); } return new BsonDocument { - [storagePropertyName] = new BsonDocument + [property.StorageName] = new BsonDocument { ["$in"] = new BsonArray(from object? element in elements select BsonValue.Create(element)) } @@ -220,21 +220,50 @@ BsonDocument ProcessInlineEnumerable(IEnumerable elements, Expression item) } } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, + + _ => null + }; - storagePropertyName = property.StorageName; - return true; + if (modelName is null) + { + property = null; + return false; + } + + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs index 4df82f2bb4c1..784dcf8220df 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs @@ -102,7 +102,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( var database = sp.GetRequiredService(); var selectedOptions = options ?? sp.GetService>(); - return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -143,7 +143,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( var selectedOptions = options ?? sp.GetService>(); - return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index c9923703dea1..bbe3ac92f988 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -57,12 +57,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string)) - { - throw new NotSupportedException("Only string keys are supported."); - } - - var recordCollection = new MongoDBVectorStoreRecordCollection( + var recordCollection = new MongoDBVectorStoreRecordCollection( this._mongoDatabase, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs index 3382019ea1f6..52b58b6f8994 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; public sealed class MongoDBVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IMongoDBVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 280fbed8c7a9..7b7cfbb20447 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -18,11 +19,13 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// /// Service for storing and retrieving vector records, that uses MongoDB as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -60,10 +63,10 @@ public sealed class MongoDBVectorStoreRecordCollection : IVectorStoreRe public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// that can be used to manage the collections in MongoDB. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public MongoDBVectorStoreRecordCollection( IMongoDatabase mongoDatabase, @@ -74,6 +77,11 @@ public MongoDBVectorStoreRecordCollection( Verify.NotNull(mongoDatabase); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)"); + } + // Assign. this._mongoDatabase = mongoDatabase; this._mongoCollection = mongoDatabase.GetCollection(collectionName); @@ -130,20 +138,22 @@ await this.RunOperationWithRetryAsync( } /// - public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); - await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneAsync(this.GetFilterById(key), cancellationToken)) + await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneAsync(this.GetFilterById(stringKey), cancellationToken)) .ConfigureAwait(false); } /// - public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteManyAsync(this.GetFilterByIds(keys), cancellationToken)) + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); + + await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteManyAsync(this.GetFilterByIds(stringKeys), cancellationToken)) .ConfigureAwait(false); } @@ -152,18 +162,18 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); - const string OperationName = "Find"; + var stringKey = this.GetStringKey(key); + var includeVectors = options?.IncludeVectors ?? false; var record = await this.RunOperationAsync(OperationName, async () => { using var cursor = await this - .FindAsync(this.GetFilterById(key), options, cancellationToken) + .FindAsync(this.GetFilterById(stringKey), options, cancellationToken) .ConfigureAwait(false); return await cursor.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false); @@ -184,7 +194,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) /// public async IAsyncEnumerable GetAsync( - IEnumerable keys, + IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { @@ -192,8 +202,10 @@ public async IAsyncEnumerable GetAsync( const string OperationName = "Find"; + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); + using var cursor = await this - .FindAsync(this.GetFilterByIds(keys), options, cancellationToken) + .FindAsync(this.GetFilterByIds(stringKeys), options, cancellationToken) .ConfigureAwait(false); while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) @@ -214,7 +226,7 @@ public async IAsyncEnumerable GetAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -236,12 +248,12 @@ await this._mongoCollection .ReplaceOneAsync(this.GetFilterById(key), storageModel, replaceOptions, cancellationToken) .ConfigureAwait(false); - return key; + return (TKey)(object)key; }); } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -252,7 +264,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, { if (result is not null) { - yield return result; + yield return (TKey)(object)result; } } } @@ -702,9 +714,9 @@ private IVectorStoreRecordMapper InitializeMapper() return this._options.BsonDocumentCustomMapper; } - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + if (typeof(TRecord) == typeof(Dictionary)) { - return (new MongoDBGenericDataModelMapper(this._model) as IVectorStoreRecordMapper)!; + return (new MongoDBDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper)!; } return new MongoDBVectorStoreRecordMapper(this._model); @@ -726,5 +738,17 @@ private static Array VerifyVectorParam(TVector vector) typeof(ReadOnlyMemory).FullName])}") }; } + + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs index 64ead70bdd47..5b22d07b1557 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class MongoDBVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs index 4044e42825fc..a54346b918d4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs @@ -50,9 +50,9 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual UnaryExpression { NodeType: ExpressionType.Not } not => this.TranslateNot(not), - // MemberExpression is generally handled within e.g. TranslateEqualityComparison; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) - => this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))), + // Special handling for bool constant as the filter expression (r => r.Bool) + Expression when node.Type == typeof(bool) && this.TryBindProperty(node, out var property) + => this.GenerateEqualityComparison(property, true, ExpressionType.Equal), MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), @@ -60,36 +60,36 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private Metadata TranslateEqualityComparison(BinaryExpression binary) + => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) + || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) + ? this.GenerateEqualityComparison(property, value, binary.NodeType) + : throw new NotSupportedException("Invalid equality/comparison"); + + private Metadata GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) { - if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) - || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + if (value is null) { - if (value is null) - { - throw new NotSupportedException("Pincone does not support null checks in vector search pre-filters"); - } - - // Short form of equality (instead of $eq) - if (binary.NodeType is ExpressionType.Equal) - { - return new Metadata { [storagePropertyName] = ToMetadata(value) }; - } + throw new NotSupportedException("Pincone does not support null checks in vector search pre-filters"); + } - var filterOperator = binary.NodeType switch - { - ExpressionType.NotEqual => "$ne", - ExpressionType.GreaterThan => "$gt", - ExpressionType.GreaterThanOrEqual => "$gte", - ExpressionType.LessThan => "$lt", - ExpressionType.LessThanOrEqual => "$lte", + // Short form of equality (instead of $eq) + if (nodeType is ExpressionType.Equal) + { + return new Metadata { [property.StorageName] = ToMetadata(value) }; + } - _ => throw new UnreachableException() - }; + var filterOperator = nodeType switch + { + ExpressionType.NotEqual => "$ne", + ExpressionType.GreaterThan => "$gt", + ExpressionType.GreaterThanOrEqual => "$gte", + ExpressionType.LessThan => "$lt", + ExpressionType.LessThanOrEqual => "$lte", - return new Metadata { [storagePropertyName] = new Metadata { [filterOperator] = ToMetadata(value) } }; - } + _ => throw new UnreachableException() + }; - throw new NotSupportedException("Invalid equality/comparison"); + return new Metadata { [property.StorageName] = new Metadata { [filterOperator] = ToMetadata(value) } }; } private Metadata TranslateAndOr(BinaryExpression andOr) @@ -135,8 +135,8 @@ private Metadata TranslateNot(UnaryExpression not) binary.Right)); // Not over bool field (Filter => r => !r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): - return this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + case Expression when not.Operand.Type == typeof(bool) && this.TryBindProperty(not.Operand, out var property): + return this.GenerateEqualityComparison(property, false, ExpressionType.Equal); } var operand = this.Translate(not.Operand); @@ -178,7 +178,7 @@ private Metadata TranslateContains(Expression source, Expression item) switch (source) { // Contains over array column (r => r.Strings.Contains("foo")) - case var _ when this.TryTranslateFieldAccess(source, out _): + case var _ when this.TryBindProperty(source, out _): throw new NotSupportedException("Pinecone does not support Contains within array fields ($elemMatch) in vector search pre-filters"); // Contains over inline enumerable @@ -208,14 +208,14 @@ private Metadata TranslateContains(Expression source, Expression item) Metadata ProcessInlineEnumerable(IEnumerable elements, Expression item) { - if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + if (!this.TryBindProperty(item, out var property)) { throw new NotSupportedException("Unsupported item type in Contains"); } return new Metadata { - [storagePropertyName] = new Metadata + [property.StorageName] = new Metadata { ["$in"] = new MetadataValue(elements.Cast().Select(ToMetadata).ToList()) } @@ -223,21 +223,50 @@ Metadata ProcessInlineEnumerable(IEnumerable elements, Expression item) } } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, + + _ => null + }; - storagePropertyName = property.StorageName; - return true; + if (modelName is null) + { + property = null; + return false; + } + + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs index d27bd325e7a4..236582017474 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs @@ -46,8 +46,8 @@ public static IKernelBuilder AddPineconeVectorStore(this IKernelBuilder builder, /// /// The type of the data model that the collection should contain. /// The builder to register the on. - /// The name of the collection that this will access. - /// Optional configuration options to pass to the . + /// The name of the collection that this will access. + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The kernel builder. public static IKernelBuilder AddPineconeVectorStoreRecordCollection( @@ -67,9 +67,9 @@ public static IKernelBuilder AddPineconeVectorStoreRecordCollection( /// /// The type of the data model that the collection should contain. /// The builder to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The api key for Pinecone. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The kernel builder. public static IKernelBuilder AddPineconeVectorStoreRecordCollection( diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs index 9b5c8e135355..a7c3ad3d9a43 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs @@ -69,8 +69,8 @@ public static IServiceCollection AddPineconeVectorStore(this IServiceCollection /// /// The type of the data model that the collection should contain. /// The to register the on. - /// The name of the collection that this will access. - /// Optional configuration options to pass to the . + /// The name of the collection that this will access. + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The service collection. public static IServiceCollection AddPineconeVectorStoreRecordCollection( @@ -89,7 +89,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection var pineconeClient = sp.GetRequiredService(); var selectedOptions = options ?? sp.GetService>(); - return new PineconeVectorStoreRecordCollection( + return new PineconeVectorStoreRecordCollection( pineconeClient, collectionName, selectedOptions); @@ -106,9 +106,9 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection /// /// The type of the data model that the collection should contain. /// The to register the on. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// The api key for Pinecone. - /// Optional configuration options to pass to the . + /// Optional configuration options to pass to the . /// An optional service id to use as the service key. /// The service collection. public static IServiceCollection AddPineconeVectorStoreRecordCollection( @@ -126,7 +126,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection var pineconeClient = new Sdk.PineconeClient(apiKey); var selectedOptions = options ?? sp.GetService>(); - return new PineconeVectorStoreRecordCollection( + return new PineconeVectorStoreRecordCollection( pineconeClient, collectionName, selectedOptions); diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index 81a6b70b5ed8..f0a837d660b8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -54,12 +54,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string)) - { - throw new NotSupportedException("Only string keys are supported."); - } - - return (new PineconeVectorStoreRecordCollection( + return (new PineconeVectorStoreRecordCollection( this._pineconeClient, name, new PineconeVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection)!; diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs index 310cce39d533..bf0034fac1d9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; public sealed class PineconeVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IPineconeVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index bde6cdeb5a29..d997edf836d2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -17,11 +18,13 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Service for storing and retrieving vector records, that uses Pinecone as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class PineconeVectorStoreRecordCollection : IVectorStoreRecordCollection + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -40,24 +43,28 @@ public sealed class PineconeVectorStoreRecordCollection : IVectorStoreR public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// Pinecone client that can be used to manage the collections and vectors in a Pinecone store. /// Optional configuration options for this class. /// Thrown if the is null. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Thrown for any misconfigured options. public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string collectionName, PineconeVectorStoreRecordCollectionOptions? options = null) { Verify.NotNull(pineconeClient); VerifyCollectionName(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)"); + } + this._pineconeClient = pineconeClient; this.CollectionName = collectionName; this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._mapper = this._options.VectorCustomMapper ?? new PineconeVectorStoreRecordMapper(this._model); #pragma warning restore CS0618 @@ -151,14 +158,12 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNull(key); - Sdk.FetchRequest request = new() { Namespace = this._options.IndexNamespace, - Ids = [key] + Ids = [this.GetStringKey(key)] }; var response = await this.RunIndexOperationAsync( @@ -182,13 +187,21 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de /// public async IAsyncEnumerable GetAsync( - IEnumerable keys, + IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); - List keysList = keys.ToList(); +#pragma warning disable CA1851 // Bogus: Possible multiple enumerations of 'IEnumerable' collection + var keysList = keys switch + { + IEnumerable k => k.ToList(), + IEnumerable k => k.Cast().ToList(), + _ => throw new UnreachableException("string key should have been validated during model building") + }; +#pragma warning restore CA1851 + if (keysList.Count == 0) { yield break; @@ -223,14 +236,12 @@ public async IAsyncEnumerable GetAsync( } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); - Sdk.DeleteRequest request = new() { Namespace = this._options.IndexNamespace, - Ids = [key] + Ids = [this.GetStringKey(key)] }; return this.RunIndexOperationAsync( @@ -239,11 +250,17 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - List keysList = keys.ToList(); + var keysList = keys switch + { + IEnumerable k => k.ToList(), + IEnumerable k => k.Cast().ToList(), + _ => throw new UnreachableException("string key should have been validated during model building") + }; + if (keysList.Count == 0) { return Task.CompletedTask; @@ -261,7 +278,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellation } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -282,11 +299,11 @@ await this.RunIndexOperationAsync( "Upsert", indexClient => indexClient.UpsertAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); - return vector.Id; + return (TKey)(object)vector.Id; } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -314,7 +331,7 @@ await this.RunIndexOperationAsync( foreach (var vector in vectors) { - yield return vector.Id; + yield return (TKey)(object)vector.Id; } } @@ -526,4 +543,15 @@ private static void VerifyCollectionName(string collectionName) } } } + + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs index 64c6b31b478a..6c30de4c31a6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class PineconeVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index a1e8db6014a5..ba0ce35e2490 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -69,12 +69,11 @@ public TRecord MapFromStorageToDataModel(Vector storageModel, StorageToDataModel { foreach (var property in model.DataProperties) { - if (storageModel.Metadata.TryGetValue(property.StorageName, out var metadataValue)) - { - property.SetValueAsObject( - outputRecord, - metadataValue is null ? null : PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType(metadataValue, property.Type)); - } + property.SetValueAsObject( + outputRecord, + storageModel.Metadata.TryGetValue(property.StorageName, out var metadataValue) && metadataValue is not null + ? PineconeVectorStoreRecordFieldMapping.ConvertFromMetadataValueToNativeType(metadataValue, property.Type) + : null); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs index 60382990aecf..500bd7b9b18b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -23,19 +23,19 @@ internal PostgresFilterTranslator( internal List ParameterValues => this._parameterValues; - protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item) { - this.Translate(source, parent); + this.Translate(source); this._sql.Append(" @> ARRAY["); - this.Translate(item, parent); + this.Translate(item); this._sql.Append(']'); } - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) { - this.Translate(item, parent); + this.Translate(item); this._sql.Append(" = ANY ("); - this.Translate(source, parent); + this.Translate(source); this._sql.Append(')'); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs index ce18426cfe57..3ced37909283 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs @@ -47,9 +47,9 @@ private Filter Translate(Expression? node) BinaryExpression { NodeType: ExpressionType.OrElse } orElse => this.TranslateOrElse(orElse.Left, orElse.Right), UnaryExpression { NodeType: ExpressionType.Not } not => this.TranslateNot(not.Operand), - // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _) - => this.TranslateEqual(member, Expression.Constant(true)), + // Special handling for bool constant as the filter expression (r => r.Bool) + Expression when node.Type == typeof(bool) && this.TryBindProperty(node, out var property) + => this.GenerateEqual(property.StorageName, value: true), MethodCallExpression methodCall => this.TranslateMethodCall(methodCall), @@ -57,53 +57,44 @@ private Filter Translate(Expression? node) }; private Filter TranslateEqual(Expression left, Expression right, bool negated = false) - { - return TryProcessEqual(left, right, out var result) - ? result - : TryProcessEqual(right, left, out result) - ? result + => (this.TryBindProperty(left, out var property) && TryGetConstant(right, out var constantValue)) + || (this.TryBindProperty(right, out property) && TryGetConstant(left, out constantValue)) + ? this.GenerateEqual(property.StorageName, constantValue, negated) : throw new NotSupportedException("Equality expression not supported by Qdrant"); - bool TryProcessEqual(Expression first, Expression second, [NotNullWhen(true)] out Filter? result) - { - // TODO: Nullable - if (this.TryTranslateFieldAccess(first, out var storagePropertyName) - && TryGetConstant(second, out var constantValue)) + private Filter GenerateEqual(string propertyStorageName, object? value, bool negated = false) + { + var condition = value is null + ? new Condition { IsNull = new() { Key = propertyStorageName } } + : new Condition { - var condition = constantValue is null - ? new Condition { IsNull = new() { Key = storagePropertyName } } - : new Condition + Field = new FieldCondition + { + Key = propertyStorageName, + Match = value switch { - Field = new FieldCondition - { - Key = storagePropertyName, - Match = constantValue switch - { - string stringValue => new Match { Keyword = stringValue }, - int intValue => new Match { Integer = intValue }, - long longValue => new Match { Integer = longValue }, - bool boolValue => new Match { Boolean = boolValue }, - - _ => throw new InvalidOperationException($"Unsupported filter value type '{constantValue.GetType().Name}'.") - } - } - }; + string stringValue => new Match { Keyword = stringValue }, + int intValue => new Match { Integer = intValue }, + long longValue => new Match { Integer = longValue }, + bool boolValue => new Match { Boolean = boolValue }, - result = new Filter(); - if (negated) - { - result.MustNot.Add(condition); - } - else - { - result.Must.Add(condition); + _ => throw new InvalidOperationException($"Unsupported filter value type '{value.GetType().Name}'.") + } } - return true; - } + }; - result = null; - return false; + var result = new Filter(); + + if (negated) + { + result.MustNot.Add(condition); + } + else + { + result.Must.Add(condition); } + + return result; } private Filter TranslateComparison(BinaryExpression comparison) @@ -117,7 +108,7 @@ private Filter TranslateComparison(BinaryExpression comparison) bool TryProcessComparison(Expression first, Expression second, [NotNullWhen(true)] out Filter? result) { // TODO: Nullable - if (this.TryTranslateFieldAccess(first, out var storagePropertyName) + if (this.TryBindProperty(first, out var property) && TryGetConstant(second, out var constantValue)) { double doubleConstantValue = constantValue switch @@ -133,7 +124,7 @@ bool TryProcessComparison(Expression first, Expression second, [NotNullWhen(true { Field = new FieldCondition { - Key = storagePropertyName, + Key = property.StorageName, Range = comparison.NodeType switch { ExpressionType.GreaterThan => new Range { Gt = doubleConstantValue }, @@ -280,7 +271,7 @@ private Filter TranslateContains(Expression source, Expression item) switch (source) { // Contains over field enumerable - case var _ when this.TryTranslateFieldAccess(source, out _): + case var _ when this.TryBindProperty(source, out _): // Oddly, in Qdrant, tag list contains is handled using a Match condition, just like equality. return this.TranslateEqual(source, item); @@ -311,58 +302,87 @@ private Filter TranslateContains(Expression source, Expression item) Filter ProcessInlineEnumerable(IEnumerable elements, Expression item) { - if (!this.TryTranslateFieldAccess(item, out var storagePropertyName)) + if (!this.TryBindProperty(item, out var property)) { throw new NotSupportedException("Unsupported item type in Contains"); } - if (item.Type == typeof(string)) + switch (property.Type) { - var strings = new RepeatedStrings(); + case var t when t == typeof(string): + var strings = new RepeatedStrings(); - foreach (var value in elements) - { - strings.Strings.Add(value is string or null - ? (string?)value - : throw new ArgumentException("Non-string element in string Contains array")); - } + foreach (var value in elements) + { + strings.Strings.Add(value is string or null + ? (string?)value + : throw new ArgumentException("Non-string element in string Contains array")); + } - return new Filter { Must = { new Condition { Field = new FieldCondition { Key = storagePropertyName, Match = new Match { Keywords = strings } } } } }; - } + return new Filter { Must = { new Condition { Field = new FieldCondition { Key = property.StorageName, Match = new Match { Keywords = strings } } } } }; - if (item.Type == typeof(int)) - { - var ints = new RepeatedIntegers(); + case var t when t == typeof(int): + var ints = new RepeatedIntegers(); - foreach (var value in elements) - { - ints.Integers.Add(value is int intValue - ? intValue - : throw new ArgumentException("Non-int element in string Contains array")); - } + foreach (var value in elements) + { + ints.Integers.Add(value is int intValue + ? intValue + : throw new ArgumentException("Non-int element in string Contains array")); + } - return new Filter { Must = { new Condition { Field = new FieldCondition { Key = storagePropertyName, Match = new Match { Integers = ints } } } } }; - } + return new Filter { Must = { new Condition { Field = new FieldCondition { Key = property.StorageName, Match = new Match { Integers = ints } } } } }; - throw new NotSupportedException("Contains only supported over array of ints or strings"); + default: + throw new NotSupportedException("Contains only supported over array of ints or strings"); + } } } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch + { + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, - storagePropertyName = property.StorageName; - return true; + _ => null + }; + + if (modelName is null) + { + property = null; + return false; + } + + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs index cf68dcff7708..c61eab57376a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs @@ -91,7 +91,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection(); var selectedOptions = options ?? sp.GetService>(); - return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; }); AddVectorizedSearch(services, serviceId); @@ -133,7 +133,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection>(); - return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index a2618f3ae1a4..79c46770ba9e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -67,12 +67,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(Guid)) - { - throw new NotSupportedException("Only ulong and Guid keys are supported."); - } - - var recordCollection = new QdrantVectorStoreRecordCollection(this._qdrantClient, name, new QdrantVectorStoreRecordCollectionOptions() + var recordCollection = new QdrantVectorStoreRecordCollection(this._qdrantClient, name, new QdrantVectorStoreRecordCollectionOptions() { HasNamedVectors = this._options.HasNamedVectors, VectorStoreRecordDefinition = vectorStoreRecordDefinition diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs index e7ce3f053970..5ad87a389833 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs @@ -16,7 +16,7 @@ public sealed class QdrantVectorStoreOptions public bool HasNamedVectors { get; set; } = false; /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IQdrantVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 17ce98485767..e99cfa04a61b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -1,7 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -18,14 +20,13 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Service for storing and retrieving vector records, that uses Qdrant as the underlying storage. /// +/// The data type of the record key. Can be either or , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class QdrantVectorStoreRecordCollection : - IVectorStoreRecordCollection, - IVectorStoreRecordCollection, - IKeywordHybridSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -45,7 +46,7 @@ public sealed class QdrantVectorStoreRecordCollection : /// Qdrant client that can be used to manage the collections and points in a Qdrant store. private readonly MockableQdrantClient _qdrantClient; - /// The name of the collection that this will access. + /// The name of the collection that this will access. private readonly string _collectionName; /// Optional configuration options for this class. @@ -60,10 +61,10 @@ public sealed class QdrantVectorStoreRecordCollection : #pragma warning restore CS0618 /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// Qdrant client that can be used to manage the collections and points in a Qdrant store. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown if the is null. /// Thrown for any misconfigured options. @@ -73,10 +74,10 @@ public QdrantVectorStoreRecordCollection(QdrantClient qdrantClient, string colle } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// Qdrant client that can be used to manage the collections and points in a Qdrant store. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown if the is null. /// Thrown for any misconfigured options. @@ -86,6 +87,11 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st Verify.NotNull(qdrantClient); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(Guid) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only ulong and Guid keys are supported (and object for dynamic mapping)."); + } + // Assign. this._qdrantClient = qdrantClient; this._collectionName = collectionName; @@ -233,7 +239,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) }); /// - public async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(key); @@ -242,104 +248,187 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public async Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync( + IEnumerable keys, + GetRecordOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) { - Verify.NotNull(key); + const string OperationName = "Retrieve"; - var retrievedPoints = await this.GetAsync([key], options, cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); - return retrievedPoints.FirstOrDefault(); - } + Verify.NotNull(keys); - /// - public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) - { - return this.GetBatchByPointIdAsync(keys, key => new PointId { Num = key }, options, cancellationToken); - } + // Create options. + var pointsIds = new List(); - /// - public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, CancellationToken cancellationToken = default) - { - return this.GetBatchByPointIdAsync(keys, key => new PointId { Uuid = key.ToString("D") }, options, cancellationToken); - } + Type? keyType = null; - /// - public Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) - { - Verify.NotNull(key); + foreach (var key in keys) + { + switch (key) + { + case ulong id: + if (keyType == typeof(Guid)) + { + throw new NotSupportedException("Mixing ulong and Guid keys is not supported"); + } - return this.RunOperationAsync( - DeleteName, - () => this._qdrantClient.DeleteAsync( + keyType = typeof(ulong); + pointsIds.Add(new PointId { Num = id }); + break; + + case Guid id: + if (keyType == typeof(ulong)) + { + throw new NotSupportedException("Mixing ulong and Guid keys is not supported"); + } + + pointsIds.Add(new PointId { Uuid = id.ToString("D") }); + keyType = typeof(Guid); + break; + + default: + throw new NotSupportedException($"The provided key type '{key.GetType().Name}' is not supported by Qdrant."); + } + } + + var includeVectors = options?.IncludeVectors ?? false; + + // Retrieve data points. + var retrievedPoints = await this.RunOperationAsync( + OperationName, + () => this._qdrantClient.RetrieveAsync(this._collectionName, pointsIds, true, includeVectors, cancellationToken: cancellationToken)).ConfigureAwait(false); + + // Convert the retrieved points to the target data model. + foreach (var retrievedPoint in retrievedPoints) + { + var pointStruct = new PointStruct + { + Id = retrievedPoint.Id, + Vectors = retrievedPoint.Vectors, + Payload = { } + }; + + foreach (KeyValuePair payloadEntry in retrievedPoint.Payload) + { + pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); + } + + yield return VectorStoreErrorHandler.RunModelConversion( + QdrantConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, this._collectionName, - key, - wait: true, - cancellationToken: cancellationToken)); + OperationName, + () => this._mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); + } } /// - public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { Verify.NotNull(key); return this.RunOperationAsync( DeleteName, - () => this._qdrantClient.DeleteAsync( - this._collectionName, - key, - wait: true, - cancellationToken: cancellationToken)); + () => key switch + { + ulong id => this._qdrantClient.DeleteAsync(this._collectionName, id, wait: true, cancellationToken: cancellationToken), + Guid id => this._qdrantClient.DeleteAsync(this._collectionName, id, wait: true, cancellationToken: cancellationToken), + _ => throw new NotSupportedException($"The provided key type '{key.GetType().Name}' is not supported by Qdrant.") + }); } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - return this.RunOperationAsync( - DeleteName, - () => this._qdrantClient.DeleteAsync( - this._collectionName, - keys.ToList(), - wait: true, - cancellationToken: cancellationToken)); - } + IList? keyList = null; - /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - Verify.NotNull(keys); + switch (keys) + { + case IEnumerable k: + keyList = k.ToList(); + break; + + case IEnumerable k: + keyList = k.ToList(); + break; + + case IEnumerable objectKeys: + { + // We need to cast the keys to a list of the same type as the first element. + List? guidKeys = null; + List? ulongKeys = null; + + var isFirst = true; + foreach (var key in objectKeys) + { + if (isFirst) + { + switch (key) + { + case ulong l: + ulongKeys = new List { l }; + keyList = ulongKeys; + break; + + case Guid g: + guidKeys = new List { g }; + keyList = guidKeys; + break; + + default: + throw new NotSupportedException($"The provided key type '{key.GetType().Name}' is not supported by Qdrant."); + } + + isFirst = false; + continue; + } + + switch (key) + { + case ulong u when ulongKeys is not null: + ulongKeys.Add(u); + continue; + + case Guid g when guidKeys is not null: + guidKeys.Add(g); + continue; + + case Guid or ulong: + throw new NotSupportedException("Mixing ulong and Guid keys is not supported"); + + default: + throw new NotSupportedException($"The provided key type '{key.GetType().Name}' is not supported by Qdrant."); + } + } + + break; + } + } return this.RunOperationAsync( DeleteName, - () => this._qdrantClient.DeleteAsync( - this._collectionName, - keys.ToList(), - wait: true, - cancellationToken: cancellationToken)); - } - - /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - Verify.NotNull(record); + () => keyList switch + { + List keysList => this._qdrantClient.DeleteAsync( + this._collectionName, + keysList, + wait: true, + cancellationToken: cancellationToken), - // Create point from record. - var pointStruct = VectorStoreErrorHandler.RunModelConversion( - QdrantConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this._collectionName, - UpsertName, - () => this._mapper.MapFromDataToStorageModel(record)); + List keysList => this._qdrantClient.DeleteAsync( + this._collectionName, + keysList, + wait: true, + cancellationToken: cancellationToken), - // Upsert. - await this.RunOperationAsync( - UpsertName, - () => this._qdrantClient.UpsertAsync(this._collectionName, [pointStruct], true, cancellationToken: cancellationToken)).ConfigureAwait(false); - return pointStruct.Id.Num; + _ => throw new UnreachableException() + }); } /// - async Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -355,35 +444,17 @@ async Task IVectorStoreRecordCollection.UpsertAsync(TRecord await this.RunOperationAsync( UpsertName, () => this._qdrantClient.UpsertAsync(this._collectionName, [pointStruct], true, cancellationToken: cancellationToken)).ConfigureAwait(false); - return Guid.Parse(pointStruct.Id.Uuid); - } - - /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(records); - - // Create points from records. - var pointStructs = VectorStoreErrorHandler.RunModelConversion( - QdrantConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this._collectionName, - UpsertName, - () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); - - // Upsert. - await this.RunOperationAsync( - UpsertName, - () => this._qdrantClient.UpsertAsync(this._collectionName, pointStructs, true, cancellationToken: cancellationToken)).ConfigureAwait(false); - foreach (var pointStruct in pointStructs) + return pointStruct.Id switch { - yield return pointStruct.Id.Num; - } + { HasNum: true } => (TKey)(object)pointStruct.Id.Num, + { HasUuid: true } => (TKey)(object)Guid.Parse(pointStruct.Id.Uuid), + _ => throw new UnreachableException("The Qdrant point ID is neither a number nor a UUID.") + }; } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -400,59 +471,27 @@ await this.RunOperationAsync( UpsertName, () => this._qdrantClient.UpsertAsync(this._collectionName, pointStructs, true, cancellationToken: cancellationToken)).ConfigureAwait(false); - foreach (var pointStruct in pointStructs) + if (pointStructs.Count > 0) { - yield return Guid.Parse(pointStruct.Id.Uuid); - } - } - - /// - /// Get the requested records from the Qdrant store using the provided keys. - /// - /// The keys of the points to retrieve. - /// Function to convert the provided keys to point ids. - /// The retrieval options. - /// The to monitor for cancellation requests. The default is . - /// The retrieved points. - private async IAsyncEnumerable GetBatchByPointIdAsync( - IEnumerable keys, - Func keyConverter, - GetRecordOptions? options, - [EnumeratorCancellation] CancellationToken cancellationToken) - { - const string OperationName = "Retrieve"; - Verify.NotNull(keys); - - // Create options. - var pointsIds = keys.Select(key => keyConverter(key)).ToArray(); - var includeVectors = options?.IncludeVectors ?? false; - - // Retrieve data points. - var retrievedPoints = await this.RunOperationAsync( - OperationName, - () => this._qdrantClient.RetrieveAsync(this._collectionName, pointsIds, true, includeVectors, cancellationToken: cancellationToken)).ConfigureAwait(false); - - // Convert the retrieved points to the target data model. - foreach (var retrievedPoint in retrievedPoints) - { - var pointStruct = new PointStruct + switch (pointStructs[0].Id) { - Id = retrievedPoint.Id, - Vectors = retrievedPoint.Vectors, - Payload = { } - }; + case { HasNum: true }: + foreach (var pointStruct in pointStructs) + { + yield return (TKey)(object)pointStruct.Id.Num; + } + break; - foreach (KeyValuePair payloadEntry in retrievedPoint.Payload) - { - pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); - } + case { HasUuid: true }: + foreach (var pointStruct in pointStructs) + { + yield return (TKey)(object)Guid.Parse(pointStruct.Id.Uuid); + } + break; - yield return VectorStoreErrorHandler.RunModelConversion( - QdrantConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this._collectionName, - OperationName, - () => this._mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); + default: + throw new UnreachableException("The Qdrant point ID is neither a number nor a UUID."); + } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs index 1b1289c799b6..6daeafc364de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class QdrantVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs index 695c2b2b1700..799c9c84d657 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs @@ -68,7 +68,7 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual return; // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): + case MemberExpression member when member.Type == typeof(bool) && this.TryBindProperty(member, out _): { this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))); return; @@ -93,7 +93,7 @@ private void TranslateEqualityComparison(BinaryExpression binary) bool TryProcessEqualityComparison(Expression first, Expression second) { // TODO: Nullable - if (this.TryTranslateFieldAccess(first, out var storagePropertyName) + if (this.TryBindProperty(first, out var property) && TryGetConstant(second, out var constantValue)) { // Numeric negation has a special syntax (!=), for the rest we nest in a NOT @@ -104,7 +104,7 @@ bool TryProcessEqualityComparison(Expression first, Expression second) } // https://redis.io/docs/latest/develop/interact/search-and-query/query/exact-match - this._filter.Append('@').Append(storagePropertyName); + this._filter.Append('@').Append(property.StorageName); this._filter.Append( binary.NodeType switch @@ -176,13 +176,13 @@ private void TranslateMethodCall(MethodCallExpression methodCall) private void TranslateContains(Expression source, Expression item) { // Contains over tag field - if (this.TryTranslateFieldAccess(source, out var storagePropertyName) + if (this.TryBindProperty(source, out var property) && TryGetConstant(item, out var itemConstant) && itemConstant is string stringConstant) { this._filter .Append('@') - .Append(storagePropertyName) + .Append(property.StorageName) .Append(":{") .Append(stringConstant) .Append('}'); @@ -192,21 +192,50 @@ private void TranslateContains(Expression source, Expression item) throw new NotSupportedException("Contains supported only over tag field"); } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, - storagePropertyName = property.StorageName; - return true; + _ => null + }; + + if (modelName is null) + { + property = null; + return false; + } + + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs deleted file mode 100644 index ef3c42722845..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.InteropServices; -using Microsoft.Extensions.VectorData; -using StackExchange.Redis; - -namespace Microsoft.SemanticKernel.Connectors.Redis; - -/// -/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using hash sets. -/// -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class RedisHashSetGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, HashEntry[] HashEntries)> -#pragma warning restore CS0618 -{ - /// All the properties from the record definition. - private readonly IReadOnlyList _properties; - - /// - /// Initializes a new instance of the class. - /// - /// All the properties from the record definition. - public RedisHashSetGenericDataModelMapper(IReadOnlyList properties) - { - Verify.NotNull(properties); - this._properties = properties; - } - - /// - public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) - { - var hashEntries = new List(); - - foreach (var property in this._properties) - { - var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; - var sourceDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; - - // Only map properties across that actually exist in the input. - if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.DataModelPropertyName, out var sourceValue)) - { - continue; - } - - // Replicate null if the property exists but is null. - if (sourceValue is null) - { - hashEntries.Add(new HashEntry(storagePropertyName, RedisValue.Null)); - continue; - } - - // Map data Properties - if (property is VectorStoreRecordDataProperty dataProperty) - { - hashEntries.Add(new HashEntry(storagePropertyName, RedisValue.Unbox(sourceValue))); - } - // Map vector properties - else if (property is VectorStoreRecordVectorProperty vectorProperty) - { - if (sourceValue is ReadOnlyMemory rom) - { - hashEntries.Add(new HashEntry(storagePropertyName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rom))); - } - else if (sourceValue is ReadOnlyMemory rod) - { - hashEntries.Add(new HashEntry(storagePropertyName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); - } - else - { - throw new VectorStoreRecordMappingException($"Unsupported vector type {sourceValue.GetType().Name} found on property ${vectorProperty.DataModelPropertyName}. Only float and double vectors are supported."); - } - } - } - - return (dataModel.Key, hashEntries.ToArray()); - } - - /// - public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key, HashEntry[] HashEntries) storageModel, StorageToDataModelMapperOptions options) - { - var dataModel = new VectorStoreGenericDataModel(storageModel.Key); - - foreach (var property in this._properties) - { - var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; - var targetDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; - var hashEntry = storageModel.HashEntries.FirstOrDefault(x => x.Name == storagePropertyName); - - // Only map properties across that actually exist in the input. - if (!hashEntry.Name.HasValue) - { - continue; - } - - // Replicate null if the property exists but is null. - if (hashEntry.Value.IsNull) - { - targetDictionary.Add(property.DataModelPropertyName, null); - continue; - } - - // Map data Properties - if (property is VectorStoreRecordDataProperty dataProperty) - { - var typeOrNullableType = Nullable.GetUnderlyingType(property.PropertyType) ?? property.PropertyType; - var convertedValue = Convert.ChangeType(hashEntry.Value, typeOrNullableType); - dataModel.Data.Add(dataProperty.DataModelPropertyName, convertedValue); - } - - // Map vector properties - else if (property is VectorStoreRecordVectorProperty vectorProperty) - { - if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) - { - var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); - dataModel.Vectors.Add(vectorProperty.DataModelPropertyName, new ReadOnlyMemory(array)); - } - else if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) - { - var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); - dataModel.Vectors.Add(vectorProperty.DataModelPropertyName, new ReadOnlyMemory(array)); - } - else - { - throw new VectorStoreRecordMappingException($"Unsupported vector type '{property.PropertyType.Name}' found on property '{property.DataModelPropertyName}'. Only float and double vectors are supported."); - } - } - } - - return dataModel; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 3fadb85ba4e9..358ed3f80b25 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -19,9 +20,12 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// Service for storing and retrieving vector records, that uses Redis HashSets as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection where TRecord : notnull +public sealed class RedisHashSetVectorStoreRecordCollection : IVectorStoreRecordCollection + where TKey : notnull + where TRecord : notnull #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -67,7 +71,7 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVectorSt /// The Redis database to read/write records from. private readonly IDatabase _database; - /// The name of the collection that this will access. + /// The name of the collection that this will access. private readonly string _collectionName; /// Optional configuration options for this class. @@ -88,7 +92,7 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVectorSt #pragma warning restore CS0618 /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The Redis database to read/write records from. /// The name of the collection that this will access. @@ -100,6 +104,11 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)."); + } + // Assign. this._database = database; this._collectionName = collectionName; @@ -198,12 +207,12 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); // Create Options - var maybePrefixedKey = this.PrefixKeyIfNeeded(key); + var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); var includeVectors = options?.IncludeVectors ?? false; var operationName = includeVectors ? "HGETALL" : "HMGET"; @@ -238,12 +247,12 @@ await this.RunOperationAsync("FT.DROPINDEX", operationName, () => { - return this._mapper.MapFromStorageToDataModel((key, retrievedHashEntries), new() { IncludeVectors = includeVectors }); + return this._mapper.MapFromStorageToDataModel((stringKey, retrievedHashEntries), new() { IncludeVectors = includeVectors }); }); } /// - public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -260,12 +269,12 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRec } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); // Create Options - var maybePrefixedKey = this.PrefixKeyIfNeeded(key); + var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); // Remove. return this.RunOperationAsync( @@ -275,7 +284,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -285,7 +294,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellation } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -307,11 +316,11 @@ await this.RunOperationAsync( maybePrefixedKey, redisHashSetRecord.HashEntries)).ConfigureAwait(false); - return redisHashSetRecord.Key; + return (TKey)(object)redisHashSetRecord.Key; } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -322,7 +331,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, { if (result is not null) { - yield return result; + yield return (TKey)(object)result; } } } @@ -510,4 +519,15 @@ private async Task RunOperationAsync(string operationName, Func operation) }; } } + + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs index 520869d35c43..88f85b64365c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs @@ -63,10 +63,10 @@ public TConsumerDataModel MapFromStorageToDataModel((string Key, HashEntry[] Has var hashEntriesDictionary = storageModel.HashEntries.ToDictionary(x => (string)x.Name!, x => x.Value); // Construct the output record. - var outputRecord = model.CreateRecord(); + var outputRecord = model.CreateRecord()!; // Set Key. - model.KeyProperty.SetValueAsObject(outputRecord!, storageModel.Key); + model.KeyProperty.SetValueAsObject(outputRecord, storageModel.Key); // Set each vector property if embeddings should be returned. if (options?.IncludeVectors is true) @@ -75,6 +75,12 @@ public TConsumerDataModel MapFromStorageToDataModel((string Key, HashEntry[] Has { if (hashEntriesDictionary.TryGetValue(property.StorageName, out var vector)) { + if (vector.IsNull) + { + property.SetValueAsObject(outputRecord!, null); + continue; + } + property.SetValueAsObject(outputRecord!, property.Type switch { Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) @@ -91,6 +97,12 @@ public TConsumerDataModel MapFromStorageToDataModel((string Key, HashEntry[] Has { if (hashEntriesDictionary.TryGetValue(property.StorageName, out var hashValue)) { + if (hashValue.IsNull) + { + property.SetValueAsObject(outputRecord!, null); + continue; + } + var typeOrNullableType = Nullable.GetUnderlyingType(property.Type) ?? property.Type; var value = Convert.ChangeType(hashValue, typeOrNullableType); property.SetValueAsObject(outputRecord!, value); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs similarity index 50% rename from dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs rename to dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs index 41a4efb63575..2d567df2059c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs @@ -11,45 +11,46 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using JSON. /// -internal sealed class RedisJsonGenericDataModelMapper( - IReadOnlyList properties, - JsonSerializerOptions jsonSerializerOptions) +internal class RedisJsonDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - : IVectorStoreRecordMapper, (string Key, JsonNode Node)> + : IVectorStoreRecordMapper, (string Key, JsonNode Node)> #pragma warning restore CS0618 { /// - public (string Key, JsonNode Node) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + public (string Key, JsonNode Node) MapFromDataToStorageModel(Dictionary dataModel) { var jsonObject = new JsonObject(); - foreach (var property in properties) + foreach (var property in model.Properties) { - var sourceDictionary = property is VectorStoreRecordDataPropertyModel ? dataModel.Data : dataModel.Vectors; - - // Only map properties across that actually exist in the input. - if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.ModelName, out var sourceValue)) + // Key handled below, outside of the JsonNode + if (property is VectorStoreRecordKeyPropertyModel) { continue; } - // Replicate null if the property exists but is null. - if (sourceValue is null) + // Only map properties across that actually exist in the input. + if (!dataModel.TryGetValue(property.ModelName, out var sourceValue)) { - jsonObject.Add(property.StorageName, null); continue; } - jsonObject.Add(property.StorageName, JsonSerializer.SerializeToNode(sourceValue, property.Type, jsonSerializerOptions)); + // Replicate null if the property exists but is null. + jsonObject.Add(property.StorageName, sourceValue is null + ? null + : JsonSerializer.SerializeToNode(sourceValue, property.Type, jsonSerializerOptions)); } - return (dataModel.Key, jsonObject); + return ((string)dataModel[model.KeyProperty.ModelName]!, jsonObject); } /// - public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) + public Dictionary MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) { - var dataModel = new VectorStoreGenericDataModel(storageModel.Key); + var dataModel = new Dictionary + { + [model.KeyProperty.ModelName] = storageModel.Key, + }; // The redis result can be either a single object or an array with a single object in the case where we are doing an MGET. var jsonObject = storageModel.Node switch @@ -59,28 +60,23 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key _ => throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"), }; - foreach (var property in properties) + foreach (var property in model.Properties) { - var targetDictionary = property is VectorStoreRecordDataPropertyModel ? dataModel.Data : dataModel.Vectors; - - // Only map properties across that actually exist in the input. - if (!jsonObject.TryGetPropertyValue(property.StorageName, out var sourceValue)) + // Key handled above + if (property is VectorStoreRecordKeyPropertyModel) { continue; } // Replicate null if the property exists but is null. - if (sourceValue is null) + if (!jsonObject.TryGetPropertyValue(property.StorageName, out var sourceValue)) { - targetDictionary.Add(property.ModelName, null); continue; } - // Map data and vector values. - if (property is VectorStoreRecordDataPropertyModel or VectorStoreRecordVectorPropertyModel) - { - targetDictionary.Add(property.ModelName, JsonSerializer.Deserialize(sourceValue, property.Type, jsonSerializerOptions)); - } + dataModel.Add(property.ModelName, sourceValue is null + ? null + : JsonSerializer.Deserialize(sourceValue, property.Type, jsonSerializerOptions)); } return dataModel; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index f00af56f7d43..a5d355fd605d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -22,11 +23,13 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// Service for storing and retrieving vector records, that uses Redis JSON as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class RedisJsonVectorStoreRecordCollection : IVectorStoreRecordCollection + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -59,7 +62,7 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVectorStore /// The Redis database to read/write records from. private readonly IDatabase _database; - /// The name of the collection that this will access. + /// The name of the collection that this will access. private readonly string _collectionName; /// Optional configuration options for this class. @@ -80,10 +83,10 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVectorStore private readonly JsonSerializerOptions _jsonSerializerOptions; /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The Redis database to read/write records from. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Throw when parameters are invalid. public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectionName, RedisJsonVectorStoreRecordCollectionOptions? options = null) @@ -92,6 +95,11 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)."); + } + // Assign. this._database = database; this._collectionName = collectionName; @@ -110,12 +118,9 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Custom Mapper. this._mapper = this._options.JsonNodeCustomMapper; } - else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + else if (typeof(TRecord) == typeof(Dictionary)) { - // Generic data model mapper. - this._mapper = (new RedisJsonGenericDataModelMapper( - this._model.Properties, - this._jsonSerializerOptions) as IVectorStoreRecordMapper)!; + this._mapper = (IVectorStoreRecordMapper)new RedisJsonDynamicDataModelMapper(this._model, this._jsonSerializerOptions); } else { @@ -207,12 +212,12 @@ await this.RunOperationAsync("FT.DROPINDEX", } /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); // Create Options - var maybePrefixedKey = this.PrefixKeyIfNeeded(key); + var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); var includeVectors = options?.IncludeVectors ?? false; // Get the Redis value. @@ -248,15 +253,23 @@ await this.RunOperationAsync("FT.DROPINDEX", () => { var node = JsonSerializer.Deserialize(redisResultString, this._jsonSerializerOptions)!; - return this._mapper.MapFromStorageToDataModel((key, node), new() { IncludeVectors = includeVectors }); + return this._mapper.MapFromStorageToDataModel((stringKey, node), new() { IncludeVectors = includeVectors }); }); } /// - public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(keys); - var keysList = keys.ToList(); + +#pragma warning disable CA1851 // Possible multiple enumerations of 'IEnumerable' collection + var keysList = keys switch + { + IEnumerable k => k.ToList(), + IEnumerable k => k.Cast().ToList(), + _ => throw new UnreachableException() + }; +#pragma warning restore CA1851 // Possible multiple enumerations of 'IEnumerable' collection // Create Options var maybePrefixedKeys = keysList.Select(key => this.PrefixKeyIfNeeded(key)); @@ -304,12 +317,12 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRec } /// - public Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { - Verify.NotNullOrWhiteSpace(key); + var stringKey = this.GetStringKey(key); // Create Options - var maybePrefixedKey = this.PrefixKeyIfNeeded(key); + var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); // Remove. return this.RunOperationAsync( @@ -320,7 +333,7 @@ public Task DeleteAsync(string key, CancellationToken cancellationToken = defaul } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); @@ -330,7 +343,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellation } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -358,11 +371,11 @@ await this.RunOperationAsync( "$", redisJsonRecord.SerializedRecord)).ConfigureAwait(false); - return redisJsonRecord.Key; + return (TKey)(object)redisJsonRecord.Key; } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -397,7 +410,7 @@ await this.RunOperationAsync( // Return keys of upserted records. foreach (var record in redisRecords) { - yield return record.originalKey; + yield return (TKey)(object)record.originalKey; } } @@ -579,4 +592,15 @@ private async Task RunOperationAsync(string operationName, Func> o }; } } + + private string GetStringKey(TKey key) + { + Verify.NotNull(key); + + var stringKey = key as string ?? throw new UnreachableException("string key should have been validated during model building"); + + Verify.NotNullOrWhiteSpace(stringKey, nameof(key)); + + return stringKey; + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs index 24a3f342a755..5363edb4851e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class RedisJsonVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs index feaa7fe541f6..8c60f9e2a4fc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs @@ -89,7 +89,7 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection(); var selectedOptions = options ?? sp.GetService>(); - return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -123,7 +123,7 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection>(); - return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -155,7 +155,7 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection(); var selectedOptions = options ?? sp.GetService>(); - return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); @@ -189,7 +189,7 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection>(); - return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 6b481ad57ac0..582a90f3c476 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -58,19 +58,14 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string)) - { - throw new NotSupportedException("Only string keys are supported."); - } - if (this._options.StorageType == RedisStorageType.HashSet) { - var recordCollection = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var recordCollection = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; return recordCollection!; } else { - var recordCollection = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var recordCollection = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; return recordCollection!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs index c9af8554c231..660bfec012c3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; public sealed class RedisVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IRedisVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs index 638586d4f48b..2ee8935dc3f0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs @@ -45,34 +45,29 @@ protected override void TranslateConstant(object? value) } } - protected override void TranslateColumn(string column, MemberExpression memberExpression, Expression? parent) + protected override void GenerateColumn(string column, bool isSearchCondition = false) { + this._sql.Append('[').Append(column).Append(']'); + // "SELECT * FROM MyTable WHERE BooleanColumn;" is not supported. // "SELECT * FROM MyTable WHERE BooleanColumn = 1;" is supported. - if (memberExpression.Type == typeof(bool) - && (parent is null // Where(x => x.Bool) - || parent is UnaryExpression { NodeType: ExpressionType.Not } // Where(x => !x.Bool) - || parent is BinaryExpression { NodeType: ExpressionType.AndAlso or ExpressionType.OrElse })) // Where(x => x.Bool && other) - { - this.TranslateBinary(Expression.Equal(memberExpression, Expression.Constant(true))); - } - else + if (isSearchCondition) { - this._sql.Append('[').Append(column).Append(']'); + this._sql.Append(" = 1"); } } - protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item) => throw new NotSupportedException("Unsupported Contains expression"); - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) { if (value is not IEnumerable elements) { throw new NotSupportedException("Unsupported Contains expression"); } - this.Translate(item, parent); + this.Translate(item); this._sql.Append(" IN ("); var isFirst = true; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 79059665b7ca..d7e3d986223c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -427,20 +427,11 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, }; } - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - foreach (var record in records) - { - yield return ((VectorStoreGenericDataModel)(object)record).Key; - } - } - else + var keyProperty = this._model.KeyProperty; + + foreach (var record in records) { - var keyProperty = this._model.KeyProperty; - foreach (var record in records) - { - yield return (TKey)keyProperty.GetValueAsObject(record)!; - } + yield return (TKey)keyProperty.GetValueAsObject(record!)!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs index 249778162d92..60c3c94bf688 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs @@ -20,17 +20,17 @@ internal SqliteFilterTranslator(VectorStoreRecordModel model, LambdaExpression l internal Dictionary Parameters => this._parameters; // TODO: support Contains over array fields (#10343) - protected override void TranslateContainsOverArrayColumn(Expression source, Expression item, MethodCallExpression parent) + protected override void TranslateContainsOverArrayColumn(Expression source, Expression item) => throw new NotSupportedException("Unsupported Contains expression"); - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, MethodCallExpression parent, object? value) + protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) { if (value is not IEnumerable elements) { throw new NotSupportedException("Unsupported Contains expression"); } - this.Translate(item, parent); + this.Translate(item); this._sql.Append(" IN ("); var isFirst = true; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs index d72b238b3729..6ec7f34a0e29 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs @@ -92,7 +92,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollection>( serviceId, (sp, _) => ( - new SqliteVectorStoreRecordCollection( + new SqliteVectorStoreRecordCollection( connectionString, collectionName, options ?? sp.GetService>()) diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index c4c09c92d77e..bbab9a3beeeb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -74,12 +74,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(ulong)) - { - throw new NotSupportedException($"Only {nameof(String)} and {nameof(UInt64)} keys are supported."); - } - - var recordCollection = new SqliteVectorStoreRecordCollection( + var recordCollection = new SqliteVectorStoreRecordCollection( this._connectionString, name, new() diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs index cac514677f07..722e756e5faa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; public sealed class SqliteVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public ISqliteVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index e59b8e720892..99d7259ab986 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -18,13 +18,13 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// Service for storing and retrieving vector records, that uses SQLite as the underlying storage. /// +/// The data type of the record key. Can be or , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class SqliteVectorStoreRecordCollection : - IVectorStoreRecordCollection, - IVectorStoreRecordCollection -#pragma warning restore CA1711 // Identifiers should not have incorrect +public sealed class SqliteVectorStoreRecordCollection : IVectorStoreRecordCollection + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -77,10 +77,10 @@ public sealed class SqliteVectorStoreRecordCollection : public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The connection string for the SQLite database represented by this . - /// The name of the collection/table that this will access. + /// The name of the collection/table that this will access. /// Optional configuration options for this class. public SqliteVectorStoreRecordCollection( string connectionString, @@ -91,6 +91,11 @@ public SqliteVectorStoreRecordCollection( Verify.NotNull(connectionString); Verify.NotNullOrWhiteSpace(collectionName); + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException($"Only {nameof(String)} and {nameof(UInt64)} keys are supported (and object for dynamic mapping)."); + } + // Assign. this._connectionString = connectionString; this.CollectionName = collectionName; @@ -299,115 +304,125 @@ public async IAsyncEnumerable GetAsync(Expression> } } - #region Implementation of IVectorStoreRecordCollection - /// - public async Task GetAsync(ulong key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - return await this.InternalGetAsync(connection, key, options, cancellationToken).ConfigureAwait(false); - } + Verify.NotNull(key); - /// - public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) + + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key) { - yield return record; - } - } + TableName = this._dataTableName + }; - /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - return await this.InternalUpsertAsync(connection, record, cancellationToken).ConfigureAwait(false); + return await this.InternalGetBatchAsync(connection, condition, options, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await foreach (var record in this.InternalUpsertBatchAsync(connection, records, cancellationToken) - .ConfigureAwait(false)) + + Verify.NotNull(keys); + + var keysList = keys.Cast().ToList(); + + Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); + + var condition = new SqliteWhereInCondition(this._keyStorageName, keysList) + { + TableName = this._dataTableName + }; + + await foreach (var record in this.InternalGetBatchAsync(connection, condition, options, cancellationToken).ConfigureAwait(false)) { yield return record; } } /// - public async Task DeleteAsync(ulong key, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await this.InternalDeleteAsync(connection, key, cancellationToken).ConfigureAwait(false); - } + const string OperationName = "Upsert"; - /// - public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); - } - #endregion + var storageModel = VectorStoreErrorHandler.RunModelConversion( + SqliteConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record)); - #region Implementation of IVectorStoreRecordCollection + var key = storageModel[this._keyStorageName]; - /// - public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - return await this.InternalGetAsync(connection, key, options, cancellationToken).ConfigureAwait(false); - } + Verify.NotNull(key); - /// - public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await foreach (var record in this.InternalGetBatchAsync(connection, keys, options, cancellationToken).ConfigureAwait(false)) - { - yield return record; - } - } + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); - /// - async Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, CancellationToken cancellationToken) - { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - return await this.InternalUpsertAsync(connection, record, cancellationToken) + var upsertedRecordKey = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) .ConfigureAwait(false); + + return upsertedRecordKey ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); } /// - async IAsyncEnumerable IVectorStoreRecordCollection.UpsertAsync( - IEnumerable records, - [EnumeratorCancellation] CancellationToken cancellationToken) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { + const string OperationName = "UpsertBatch"; + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await foreach (var record in this.InternalUpsertBatchAsync(connection, records, cancellationToken) - .ConfigureAwait(false)) + + var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + SqliteConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + + var keys = storageModels.Select(model => model[this._keyStorageName]!).ToList(); + + var condition = new SqliteWhereInCondition(this._keyStorageName, keys); + + await foreach (var record in this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken).ConfigureAwait(false)) { yield return record; } } /// - public async Task DeleteAsync(string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { + Verify.NotNull(key); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await this.InternalDeleteAsync(connection, key, cancellationToken) - .ConfigureAwait(false); + + var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); + + await this.InternalDeleteBatchAsync(connection, condition, cancellationToken).ConfigureAwait(false); } /// - public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { + Verify.NotNull(keys); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - await this.InternalDeleteBatchAsync(connection, keys, cancellationToken).ConfigureAwait(false); - } - #endregion + var keysList = keys.Cast().ToList(); + + Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); + + var condition = new SqliteWhereInCondition( + this._keyStorageName, + keysList); + + await this.InternalDeleteBatchAsync(connection, condition, cancellationToken).ConfigureAwait(false); + } /// public object? GetService(Type serviceType, object? serviceKey = null) @@ -533,44 +548,6 @@ private Task DropTableAsync(SqliteConnection connection, string tableName, return this.RunOperationAsync(OperationName, () => command.ExecuteNonQueryAsync(cancellationToken)); } - private async Task InternalGetAsync( - SqliteConnection connection, - TKey key, - GetRecordOptions? options, - CancellationToken cancellationToken) - { - Verify.NotNull(key); - - var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key) - { - TableName = this._dataTableName - }; - - return await this.InternalGetBatchAsync(connection, condition, options, cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - } - - private IAsyncEnumerable InternalGetBatchAsync( - SqliteConnection connection, - IEnumerable keys, - GetRecordOptions? options, - CancellationToken cancellationToken) - { - Verify.NotNull(keys); - - var keysList = keys.Cast().ToList(); - - Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); - - var condition = new SqliteWhereInCondition(this._keyStorageName, keysList) - { - TableName = this._dataTableName - }; - - return this.InternalGetBatchAsync(connection, condition, options, cancellationToken); - } - private async IAsyncEnumerable InternalGetBatchAsync( SqliteConnection connection, SqliteWhereCondition condition, @@ -621,49 +598,7 @@ private async IAsyncEnumerable InternalGetBatchAsync( } } - private async Task InternalUpsertAsync(SqliteConnection connection, TRecord record, CancellationToken cancellationToken) - { - const string OperationName = "Upsert"; - - var storageModel = VectorStoreErrorHandler.RunModelConversion( - SqliteConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); - - var key = storageModel[this._keyStorageName]; - - Verify.NotNull(key); - - var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); - - var upsertedRecordKey = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - return upsertedRecordKey ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); - } - - private IAsyncEnumerable InternalUpsertBatchAsync(SqliteConnection connection, IEnumerable records, CancellationToken cancellationToken) - { - const string OperationName = "UpsertBatch"; - - var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - SqliteConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - OperationName, - () => this._mapper.MapFromDataToStorageModel(record))).ToList(); - - var keys = storageModels.Select(model => model[this._keyStorageName]!).ToList(); - - var condition = new SqliteWhereInCondition(this._keyStorageName, keys); - - return this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken); - } - - private async IAsyncEnumerable InternalUpsertBatchAsync( + private async IAsyncEnumerable InternalUpsertBatchAsync( SqliteConnection connection, List> storageModels, SqliteWhereCondition condition, @@ -716,30 +651,6 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( } } - private Task InternalDeleteAsync(SqliteConnection connection, TKey key, CancellationToken cancellationToken) - { - Verify.NotNull(key); - - var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); - - return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); - } - - private Task InternalDeleteBatchAsync(SqliteConnection connection, IEnumerable keys, CancellationToken cancellationToken) - { - Verify.NotNull(keys); - - var keysList = keys.Cast().ToList(); - - Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); - - var condition = new SqliteWhereInCondition( - this._keyStorageName, - keysList); - - return this.InternalDeleteBatchAsync(connection, condition, cancellationToken); - } - private Task InternalDeleteBatchAsync(SqliteConnection connection, SqliteWhereCondition condition, CancellationToken cancellationToken) { const string OperationName = "Delete"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs index a6cc642af863..d71c44334051 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class SqliteVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs similarity index 61% rename from dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs rename to dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs index 9b981631384a..233d2e0f31c7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs @@ -13,7 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Weaviate. /// #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +internal sealed class WeaviateDynamicDataModelMapper : IVectorStoreRecordMapper, JsonObject> #pragma warning restore CS0618 { /// The name of the Weaviate collection. @@ -26,12 +26,12 @@ internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper< private readonly JsonSerializerOptions _jsonSerializerOptions; /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The name of the Weaviate collection /// The model /// A for serialization/deserialization of record properties. - public WeaviateGenericDataModelMapper( + public WeaviateDynamicDataModelMapper( string collectionName, VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) @@ -41,48 +41,52 @@ public WeaviateGenericDataModelMapper( this._jsonSerializerOptions = jsonSerializerOptions; } - public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + public JsonObject MapFromDataToStorageModel(Dictionary dataModel) { Verify.NotNull(dataModel); // Transform generic data model to Weaviate object model. - var weaviateObjectModel = new JsonObject - { - { WeaviateConstants.CollectionPropertyName, JsonValue.Create(this._collectionName) }, - { WeaviateConstants.ReservedKeyPropertyName, dataModel.Key }, - { WeaviateConstants.ReservedDataPropertyName, new JsonObject() }, - { WeaviateConstants.ReservedVectorPropertyName, new JsonObject() }, - }; + var keyObject = JsonSerializer.SerializeToNode(dataModel[this._model.KeyProperty.ModelName]); // Populate data properties. + var dataObject = new JsonObject(); foreach (var property in this._model.DataProperties) { - if (dataModel.Data is not null && dataModel.Data.TryGetValue(property.ModelName, out var dataValue)) + if (dataModel.TryGetValue(property.ModelName, out var dataValue)) { - weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![property.StorageName] = dataValue is not null ? - JsonSerializer.SerializeToNode(dataValue, property.Type, this._jsonSerializerOptions) : - null; + dataObject[property.StorageName] = dataValue is null + ? null + : JsonSerializer.SerializeToNode(dataValue, property.Type, this._jsonSerializerOptions); } } // Populate vector properties. + var vectorObject = new JsonObject(); foreach (var property in this._model.VectorProperties) { - if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(property.ModelName, out var vectorValue)) + if (dataModel.TryGetValue(property.ModelName, out var vectorValue)) { - weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property.StorageName] = vectorValue is not null ? - JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions) : - null; + vectorObject[property.StorageName] = vectorValue is null + ? null + : JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions); } } - return weaviateObjectModel; + return new JsonObject + { + { WeaviateConstants.CollectionPropertyName, JsonValue.Create(this._collectionName) }, + { WeaviateConstants.ReservedKeyPropertyName, keyObject }, + { WeaviateConstants.ReservedDataPropertyName, dataObject }, + { WeaviateConstants.ReservedVectorPropertyName, vectorObject }, + }; } - public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + public Dictionary MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) { Verify.NotNull(storageModel); + var result = new Dictionary(); + // Create variables to store the response properties. var key = storageModel[WeaviateConstants.ReservedKeyPropertyName]?.GetValue(); @@ -91,8 +95,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject st throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); } - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); + result[this._model.KeyProperty.ModelName] = key.Value; // Populate data properties. foreach (var property in this._model.DataProperties) @@ -101,7 +104,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject st if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var dataValue)) { - dataProperties.Add(property.ModelName, dataValue.Deserialize(property.Type, this._jsonSerializerOptions)); + result.Add(property.ModelName, dataValue.Deserialize(property.Type, this._jsonSerializerOptions)); } } @@ -114,11 +117,11 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject st if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var vectorValue)) { - vectorProperties.Add(property.ModelName, vectorValue.Deserialize(property.Type, this._jsonSerializerOptions)); + result.Add(property.ModelName, vectorValue.Deserialize(property.Type, this._jsonSerializerOptions)); } } } - return new VectorStoreGenericDataModel(key.Value) { Data = dataProperties, Vectors = vectorProperties }; + return result; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs index 26f2a656a189..6e864ade70f6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs @@ -67,7 +67,7 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual { switch (not.Operand) { - // Special handling for !(a == b) and !(a != b) + // Special handling for !(a == b) and !(a != b), transforming to a != b and a == b respectively. case BinaryExpression { NodeType: ExpressionType.Equal or ExpressionType.NotEqual } binary: this.TranslateEqualityComparison( Expression.MakeBinary( @@ -76,9 +76,9 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual binary.Right)); return; - // Not over bool field (Filter => r => !r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): - this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(false))); + // Not over bool field (r => !r.Bool) + case var negated when negated.Type == typeof(bool) && this.TryBindProperty(negated, out var property): + this.GenerateEqualityComparison(property.StorageName, false, ExpressionType.Equal); return; default: @@ -86,9 +86,9 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual } } - // MemberExpression is generally handled within e.g. TranslateEqual; this is used to translate direct bool inside filter (e.g. Filter => r => r.Bool) - case MemberExpression member when member.Type == typeof(bool) && this.TryTranslateFieldAccess(member, out _): - this.TranslateEqualityComparison(Expression.Equal(member, Expression.Constant(true))); + // Special handling for bool constant as the filter expression (r => r.Bool) + case Expression when node.Type == typeof(bool) && this.TryBindProperty(node, out var property): + this.GenerateEqualityComparison(property.StorageName, true, ExpressionType.Equal); return; case MethodCallExpression methodCall: @@ -102,75 +102,79 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual private void TranslateEqualityComparison(BinaryExpression binary) { - if ((this.TryTranslateFieldAccess(binary.Left, out var storagePropertyName) && TryGetConstant(binary.Right, out var value)) - || (this.TryTranslateFieldAccess(binary.Right, out storagePropertyName) && TryGetConstant(binary.Left, out value))) + if ((this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) + || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value))) { - // { path: ["intPropName"], operator: Equal, ValueInt: 8 } - this._filter - .Append("{ path: [\"") - .Append(JsonEncodedText.Encode(storagePropertyName)) - .Append("\"], operator: "); - - // Special handling for null comparisons - if (value is null) - { - if (binary.NodeType is ExpressionType.Equal or ExpressionType.NotEqual) - { - this._filter - .Append("IsNull, valueBoolean: ") - .Append(binary.NodeType is ExpressionType.Equal ? "true" : "false") - .Append(" }"); - return; - } + this.GenerateEqualityComparison(property.StorageName, value, binary.NodeType); + return; + } - throw new NotSupportedException("null value supported only with equality/inequality checks"); - } + throw new NotSupportedException("Invalid equality/comparison"); + } - // Operator - this._filter.Append(binary.NodeType switch + private void GenerateEqualityComparison(string propertyStorageName, object? value, ExpressionType nodeType) + { + // { path: ["intPropName"], operator: Equal, ValueInt: 8 } + this._filter + .Append("{ path: [\"") + .Append(JsonEncodedText.Encode(propertyStorageName)) + .Append("\"], operator: "); + + // Special handling for null comparisons + if (value is null) + { + if (nodeType is ExpressionType.Equal or ExpressionType.NotEqual) { - ExpressionType.Equal => "Equal", - ExpressionType.NotEqual => "NotEqual", + this._filter + .Append("IsNull, valueBoolean: ") + .Append(nodeType is ExpressionType.Equal ? "true" : "false") + .Append(" }"); + return; + } - ExpressionType.GreaterThan => "GreaterThan", - ExpressionType.GreaterThanOrEqual => "GreaterThanEqual", - ExpressionType.LessThan => "LessThan", - ExpressionType.LessThanOrEqual => "LessThanEqual", + throw new NotSupportedException("null value supported only with equality/inequality checks"); + } - _ => throw new UnreachableException() - }); + // Operator + this._filter.Append(nodeType switch + { + ExpressionType.Equal => "Equal", + ExpressionType.NotEqual => "NotEqual", - this._filter.Append(", "); + ExpressionType.GreaterThan => "GreaterThan", + ExpressionType.GreaterThanOrEqual => "GreaterThanEqual", + ExpressionType.LessThan => "LessThan", + ExpressionType.LessThanOrEqual => "LessThanEqual", - // FieldType - var type = value.GetType(); - if (Nullable.GetUnderlyingType(type) is Type underlying) - { - type = underlying; - } + _ => throw new UnreachableException() + }); - this._filter.Append(value.GetType() switch - { - Type t when t == typeof(int) || t == typeof(long) || t == typeof(short) || t == typeof(byte) => "valueInt", - Type t when t == typeof(bool) => "valueBoolean", - Type t when t == typeof(string) || t == typeof(Guid) => "valueText", - Type t when t == typeof(float) || t == typeof(double) || t == typeof(decimal) => "valueNumber", - Type t when t == typeof(DateTimeOffset) => "valueDate", + this._filter.Append(", "); - _ => throw new NotSupportedException($"Unsupported value type {type.FullName} in filter.") - }); + // FieldType + var type = value.GetType(); + if (Nullable.GetUnderlyingType(type) is Type underlying) + { + type = underlying; + } - this._filter.Append(": "); + this._filter.Append(value.GetType() switch + { + Type t when t == typeof(int) || t == typeof(long) || t == typeof(short) || t == typeof(byte) => "valueInt", + Type t when t == typeof(bool) => "valueBoolean", + Type t when t == typeof(string) || t == typeof(Guid) => "valueText", + Type t when t == typeof(float) || t == typeof(double) || t == typeof(decimal) => "valueNumber", + Type t when t == typeof(DateTimeOffset) => "valueDate", - // Value - this._filter.Append(JsonSerializer.Serialize(value)); + _ => throw new NotSupportedException($"Unsupported value type {type.FullName} in filter.") + }); - this._filter.Append('}'); + this._filter.Append(": "); - return; - } + // Value + this._filter.Append(JsonSerializer.Serialize(value)); - throw new NotSupportedException("Invalid equality/comparison"); + this._filter.Append('}'); } private void TranslateMethodCall(MethodCallExpression methodCall) @@ -206,13 +210,13 @@ private void TranslateContains(Expression source, Expression item) { // Contains over array // { path: ["stringArrayPropName"], operator: ContainsAny, valueText: ["foo"] } - if (this.TryTranslateFieldAccess(source, out var storagePropertyName) + if (this.TryBindProperty(source, out var property) && TryGetConstant(item, out var itemConstant) && itemConstant is string stringConstant) { this._filter .Append("{ path: [\"") - .Append(JsonEncodedText.Encode(storagePropertyName)) + .Append(JsonEncodedText.Encode(property.StorageName)) .Append("\"], operator: ContainsAny, valueText: [") .Append(JsonEncodedText.Encode(stringConstant)) .Append("]}"); @@ -222,21 +226,50 @@ private void TranslateContains(Expression source, Expression item) throw new NotSupportedException("Contains supported only over tag field"); } - private bool TryTranslateFieldAccess(Expression expression, [NotNullWhen(true)] out string? storagePropertyName) + private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out VectorStoreRecordPropertyModel? property) { - if (expression is MemberExpression memberExpression && memberExpression.Expression == this._recordParameter) + Type? convertedClrType = null; + + if (expression is UnaryExpression { NodeType: ExpressionType.Convert } unary) + { + expression = unary.Operand; + convertedClrType = unary.Type; + } + + var modelName = expression switch { - if (!this._model.PropertyMap.TryGetValue(memberExpression.Member.Name, out var property)) + // Regular member access for strongly-typed POCO binding (e.g. r => r.SomeInt == 8) + MemberExpression memberExpression when memberExpression.Expression == this._recordParameter + => memberExpression.Member.Name, + + // Dictionary lookup for weakly-typed dynamic binding (e.g. r => r["SomeInt"] == 8) + MethodCallExpression { - throw new InvalidOperationException($"Property name '{memberExpression.Member.Name}' provided as part of the filter clause is not a valid property name."); - } + Method: { Name: "get_Item", DeclaringType: var declaringType }, + Arguments: [ConstantExpression { Value: string keyName }] + } methodCall when methodCall.Object == this._recordParameter && declaringType == typeof(Dictionary) + => keyName, + + _ => null + }; + + if (modelName is null) + { + property = null; + return false; + } - storagePropertyName = property.StorageName; - return true; + if (!this._model.PropertyMap.TryGetValue(modelName, out property)) + { + throw new InvalidOperationException($"Property name '{modelName}' provided as part of the filter clause is not a valid property name."); + } + + if (convertedClrType is not null && convertedClrType != property.Type) + { + throw new InvalidCastException($"Property '{property.ModelName}' is being cast to type '{convertedClrType.Name}', but its configured type is '{property.Type.Name}'."); } - storagePropertyName = null; - return false; + return true; } private static bool TryGetConstant(Expression expression, out object? constantValue) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs index 68dc9d930a2d..b0c60fba5088 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs @@ -73,7 +73,7 @@ public static IServiceCollection AddWeaviateVectorStoreRecordCollection var selectedHttpClient = HttpClientProvider.GetHttpClient(httpClient, sp); var selectedOptions = options ?? sp.GetService>(); - return new WeaviateVectorStoreRecordCollection(selectedHttpClient, collectionName, selectedOptions); + return new WeaviateVectorStoreRecordCollection(selectedHttpClient, collectionName, selectedOptions); }); AddVectorizedSearch(services, serviceId); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 92f569b3875d..8b3ce214fa18 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -65,12 +65,7 @@ public IVectorStoreRecordCollection GetCollection( } #pragma warning restore CS0618 - if (typeof(TKey) != typeof(Guid)) - { - throw new NotSupportedException($"Only {nameof(Guid)} key is supported."); - } - - var recordCollection = new WeaviateVectorStoreRecordCollection( + var recordCollection = new WeaviateVectorStoreRecordCollection( this._httpClient, name, new() @@ -80,7 +75,7 @@ public IVectorStoreRecordCollection GetCollection( ApiKey = this._options.ApiKey }) as IVectorStoreRecordCollection; - return recordCollection!; + return recordCollection; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs index ae73e7989d82..fb9cdc208e57 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; public sealed class WeaviateVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if a custom record collection is required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// [Obsolete("To control how collections are instantiated, extend your provider's IVectorStore implementation and override GetCollection()")] public IWeaviateVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index dbb6e0c039cf..845745225443 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Net; @@ -20,11 +21,13 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// Service for storing and retrieving vector records, that uses Weaviate as the underlying storage. /// +/// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection, IKeywordHybridSearch + where TKey : notnull where TRecord : notnull +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -70,14 +73,14 @@ public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreR public string CollectionName { get; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// /// that is used to interact with Weaviate API. /// should point to remote or local cluster and API key can be configured via . /// It's also possible to provide these parameters via . /// - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// The collection name must start with a capital letter and contain only ASCII letters and digits. public WeaviateVectorStoreRecordCollection( @@ -89,6 +92,11 @@ public WeaviateVectorStoreRecordCollection( Verify.NotNull(httpClient); VerifyCollectionName(collectionName); + if (typeof(TKey) != typeof(Guid) && typeof(TKey) != typeof(object)) + { + throw new NotSupportedException($"Only {nameof(Guid)} key is supported (and object for dynamic mapping)."); + } + var endpoint = (options?.Endpoint ?? httpClient.BaseAddress) ?? throw new ArgumentException($"Weaviate endpoint should be provided via HttpClient.BaseAddress property or {nameof(WeaviateVectorStoreRecordCollectionOptions)} options parameter."); // Assign. @@ -164,24 +172,40 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task DeleteAsync(Guid key, CancellationToken cancellationToken = default) + public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObject"; return this.RunOperationAsync(OperationName, () => { - var request = new WeaviateDeleteObjectRequest(this.CollectionName, key).Build(); + var guid = key switch + { + Guid g => g, + object o => (Guid)o, + _ => throw new UnreachableException("Guid key should have been validated during model building") + }; + + var request = new WeaviateDeleteObjectRequest(this.CollectionName, guid).Build(); return this.ExecuteRequestAsync(request, cancellationToken); }); } /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) + public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { const string OperationName = "DeleteObjectBatch"; const string ContainsAnyOperator = "ContainsAny"; + Verify.NotNull(keys); + + var stringKeys = keys.Select(key => key.ToString()).ToList(); + + if (stringKeys.Count == 0) + { + return Task.CompletedTask; + } + return this.RunOperationAsync(OperationName, () => { var match = new WeaviateQueryMatch @@ -191,7 +215,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo { Operator = ContainsAnyOperator, Path = [WeaviateConstants.ReservedKeyPropertyName], - Values = keys.Select(key => key.ToString()).ToList() + Values = stringKeys! } }; @@ -202,14 +226,21 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo } /// - public Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "GetCollectionObject"; return this.RunOperationAsync(OperationName, async () => { + var guid = key switch + { + Guid g => g, + object o => (Guid)o, + _ => throw new UnreachableException("Guid key should have been validated during model building") + }; + var includeVectors = options?.IncludeVectors is true; - var request = new WeaviateGetCollectionObjectRequest(this.CollectionName, key, includeVectors).Build(); + var request = new WeaviateGetCollectionObjectRequest(this.CollectionName, guid, includeVectors).Build(); var jsonObject = await this.ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken).ConfigureAwait(false); @@ -229,10 +260,12 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo /// public async IAsyncEnumerable GetAsync( - IEnumerable keys, + IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { + Verify.NotNull(keys); + var tasks = keys.Select(key => this.GetAsync(key, options, cancellationToken)); var records = await Task.WhenAll(tasks).ConfigureAwait(false); @@ -247,27 +280,32 @@ public async IAsyncEnumerable GetAsync( } /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - return await this.UpsertAsync([record], cancellationToken) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + => (await this.UpsertAsync([record], cancellationToken) .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - } + .ConfigureAwait(false))!; /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; - var responses = await this.RunOperationAsync(OperationName, async () => + Verify.NotNull(records); + + var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + WeaviateConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + + if (jsonObjects.Count == 0) { - var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( - WeaviateConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - OperationName, - () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + yield break; + } + var responses = await this.RunOperationAsync(OperationName, async () => + { var request = new WeaviateUpsertCollectionObjectBatchRequest(jsonObjects).Build(); return await this.ExecuteRequestAsync>(request, cancellationToken).ConfigureAwait(false); @@ -279,7 +317,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E { if (response?.Result?.IsSuccess is true) { - yield return response.Id; + yield return (TKey)(object)response.Id; } } } @@ -485,9 +523,9 @@ private IVectorStoreRecordMapper InitializeMapper() return this._options.JsonObjectCustomMapper; } - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + if (typeof(TRecord) == typeof(Dictionary)) { - var mapper = new WeaviateGenericDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions); + var mapper = new WeaviateDynamicDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions); return (mapper as IVectorStoreRecordMapper)!; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs index 7dff47f28d0e..6c913120c95b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -7,7 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// -/// Options when creating a . +/// Options when creating a . /// public sealed class WeaviateVectorStoreRecordCollectionOptions { diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs similarity index 71% rename from dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs rename to dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs index 3d29c9140124..874d9b9d6796 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs @@ -12,9 +12,9 @@ namespace SemanticKernel.Connectors.MongoDB.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// -public sealed class MongoDBGenericDataModelMapperTests +public sealed class MongoDBDynamicDataModelMapperTests { private static readonly VectorStoreRecordModel s_model = BuildModel( [ @@ -49,35 +49,32 @@ public sealed class MongoDBGenericDataModelMapperTests public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_model); - var dataModel = new VectorStoreGenericDataModel("key") + var sut = new MongoDBDynamicDataModelMapper(s_model); + var dataModel = new Dictionary { - Data = - { - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["StringDataProp"] = "string", - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["FloatDataProp"] = 5.0f, - ["NullableFloatDataProp"] = 6.0f, - ["DoubleDataProp"] = 7.0, - ["NullableDoubleDataProp"] = 8.0, - ["DecimalDataProp"] = 9.0m, - ["NullableDecimalDataProp"] = 10.0m, - ["DateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), - ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), - ["TagListDataProp"] = s_taglist, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_floatVector), - ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), - ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), - ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), - }, + ["Key"] = "key", + + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DecimalDataProp"] = 9.0m, + ["NullableDecimalDataProp"] = 10.0m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["TagListDataProp"] = s_taglist, + + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), }; // Act @@ -119,20 +116,15 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)) ]); - var dataModel = new VectorStoreGenericDataModel("key") + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - }, - Vectors = - { - ["NullableFloatVector"] = null, - }, + ["Key"] = "key", + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableFloatVector"] = null }; - var sut = new MongoDBGenericDataModelMapper(model); + var sut = new MongoDBDynamicDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -147,7 +139,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_model); + var sut = new MongoDBDynamicDataModelMapper(s_model); var storageModel = new BsonDocument { ["_id"] = "key", @@ -177,27 +169,27 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Equal(true, dataModel.Data["BoolDataProp"]); - Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); - Assert.Equal("string", dataModel.Data["StringDataProp"]); - Assert.Equal(1, dataModel.Data["IntDataProp"]); - Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, dataModel.Data["LongDataProp"]); - Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); - Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); - Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); - Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(9.0m, dataModel.Data["DecimalDataProp"]); - Assert.Equal(10.0m, dataModel.Data["NullableDecimalDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel.Data["DateTimeDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel.Data["NullableDateTimeDataProp"]); - Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); - Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); - Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["NullableDoubleVector"]!)!.ToArray()); + Assert.Equal("key", dataModel["Key"]); + Assert.Equal(true, dataModel["BoolDataProp"]); + Assert.Equal(false, dataModel["NullableBoolDataProp"]); + Assert.Equal("string", dataModel["StringDataProp"]); + Assert.Equal(1, dataModel["IntDataProp"]); + Assert.Equal(2, dataModel["NullableIntDataProp"]); + Assert.Equal(3L, dataModel["LongDataProp"]); + Assert.Equal(4L, dataModel["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel["FloatDataProp"]); + Assert.Equal(6.0f, dataModel["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel["DoubleDataProp"]); + Assert.Equal(8.0, dataModel["NullableDoubleDataProp"]); + Assert.Equal(9.0m, dataModel["DecimalDataProp"]); + Assert.Equal(10.0m, dataModel["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel["DateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel["NullableDateTimeDataProp"]); + Assert.Equal(s_taglist, dataModel["TagListDataProp"]); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel["DoubleVector"]!).ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel["NullableDoubleVector"]!)!.ToArray()); } [Fact] @@ -220,23 +212,23 @@ public void MapFromStorageToDataModelMapsNullValues() ["NullableFloatVector"] = BsonNull.Value }; - var sut = new MongoDBGenericDataModelMapper(model); + var sut = new MongoDBDynamicDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Vectors["NullableFloatVector"]); + Assert.Equal("key", dataModel["Key"]); + Assert.Null(dataModel["StringDataProp"]); + Assert.Null(dataModel["NullableIntDataProp"]); + Assert.Null(dataModel["NullableFloatVector"]); } [Fact] public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new MongoDBGenericDataModelMapper(s_model); + var sut = new MongoDBDynamicDataModelMapper(s_model); var storageModel = new BsonDocument(); // Act & Assert @@ -255,8 +247,8 @@ public void MapFromDataToStorageModelSkipsMissingProperties() new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), ]); - var dataModel = new VectorStoreGenericDataModel("key"); - var sut = new MongoDBGenericDataModelMapper(model); + var dataModel = new Dictionary { ["Key"] = "key" }; + var sut = new MongoDBDynamicDataModelMapper(model); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -283,17 +275,17 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ["_id"] = "key" }; - var sut = new MongoDBGenericDataModelMapper(model); + var sut = new MongoDBDynamicDataModelMapper(model); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal("key", dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + Assert.Equal("key", dataModel["Key"]); + Assert.False(dataModel.ContainsKey("StringDataProp")); + Assert.False(dataModel.ContainsKey("FloatVector")); } private static VectorStoreRecordModel BuildModel(IReadOnlyList properties) - => new MongoDBModelBuilder().Build(typeof(VectorStoreGenericDataModel), new() { Properties = properties }); + => new MongoDBModelBuilder().Build(typeof(Dictionary), new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs index ac6f401583ac..2a68b57ec293 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs @@ -82,11 +82,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs index 139844d2673d..2cd0e0ff815c 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.MongoDB; @@ -18,7 +19,7 @@ public sealed class MongoDBVectorStoreCollectionSearchMappingTests { private readonly VectorStoreRecordModel _model = new MongoDBModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 93728bd17067..c68a93b7edd8 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -19,7 +19,7 @@ namespace SemanticKernel.Connectors.MongoDB.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// public sealed class MongoDBVectorStoreRecordCollectionTests { @@ -37,7 +37,7 @@ public MongoDBVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); + var exception = Assert.Throws(() => new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); Assert.Contains("No key property found", exception.Message); } @@ -45,7 +45,7 @@ public void ConstructorForModelWithoutKeyThrowsException() public void ConstructorWithDeclarativeModelInitializesCollection() { // Act & Assert - var collection = new MongoDBVectorStoreRecordCollection( + var collection = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -62,7 +62,7 @@ public void ConstructorWithImperativeModelInitializesCollection() }; // Act - var collection = new MongoDBVectorStoreRecordCollection( + var collection = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { VectorStoreRecordDefinition = definition }); @@ -90,7 +90,7 @@ public async Task CollectionExistsReturnsValidResultAsync(List collectio .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, collectionName); @@ -144,7 +144,7 @@ public async Task CreateCollectionInvokesValidMethodsAsync(bool indexExists, int .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, CollectionName); + var sut = new MongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, CollectionName); // Act await sut.CreateCollectionAsync(); @@ -207,7 +207,7 @@ public async Task CreateCollectionIfNotExistsInvokesValidMethodsAsync() .Setup(l => l.Indexes) .Returns(mockMongoIndexManager.Object); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, CollectionName); @@ -231,7 +231,7 @@ public async Task DeleteInvokesValidMethodsAsync() // Arrange const string RecordKey = "key"; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -255,7 +255,7 @@ public async Task DeleteBatchInvokesValidMethodsAsync() // Arrange List recordKeys = ["key1", "key2"]; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -279,7 +279,7 @@ public async Task DeleteCollectionInvokesValidMethodsAsync() // Arrange const string CollectionName = "collection"; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, CollectionName); @@ -316,7 +316,7 @@ public async Task GetReturnsValidRecordAsync() It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -354,7 +354,7 @@ public async Task GetBatchReturnsValidRecordAsync() It.IsAny())) .ReturnsAsync(mockCursor.Object); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -385,7 +385,7 @@ public async Task UpsertReturnsRecordKeyAsync() var documentSerializer = serializerRegistry.GetSerializer(); var expectedDefinition = Builders.Filter.Eq(document => document["_id"], "key"); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -413,7 +413,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() var hotel2 = new MongoDBHotelModel("key2") { HotelName = "Test Name 2" }; var hotel3 = new MongoDBHotelModel("key3") { HotelName = "Test Name 3" }; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -502,7 +502,7 @@ public async Task UpsertWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) .Returns(new BsonDocument { ["_id"] = "key", ["my_name"] = "Test Name" }); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { BsonDocumentCustomMapper = mockMapper.Object }); @@ -552,7 +552,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) .Returns(new MongoDBHotelModel(RecordKey) { HotelName = "Name from mapper" }); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", new() { BsonDocumentCustomMapper = mockMapper.Object }); @@ -574,7 +574,7 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Arrange this.MockCollectionForSearch(); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -630,7 +630,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( this.MockCollectionForSearch(); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -663,7 +663,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa // Arrange this.MockCollectionForSearch(); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -679,7 +679,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() // Arrange this.MockCollectionForSearch(); - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection"); @@ -769,7 +769,7 @@ private async Task TestUpsertWithModelAsync( new() { VectorStoreRecordDefinition = definition } : null; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( this._mockMongoDatabase.Object, "collection", options); diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs index 21f7b6649da5..be8db092665b 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs @@ -84,11 +84,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs index 736cc3e3839d..b8ef24099c0c 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs @@ -83,11 +83,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index f6aa343fbaef..fd8d00dd9fb2 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -11,7 +11,7 @@ namespace SemanticKernel.Connectors.Pinecone.UnitTests; /// -/// Contains tests for the class. +/// Contains tests for the class. /// public class PineconeVectorStoreRecordCollectionTests { @@ -39,7 +39,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() var pineconeClient = new Sdk.PineconeClient("fake api key"); // Act. - var sut = new PineconeVectorStoreRecordCollection( + var sut = new PineconeVectorStoreRecordCollection( pineconeClient, TestCollectionName, new() { VectorStoreRecordDefinition = definition, VectorCustomMapper = Mock.Of>() }); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 5fd2f62e4282..2b4a9eda992a 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -50,7 +50,7 @@ public void TestBuildCreateTableCommand(bool ifNotExists) ] }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); var cmdInfo = PostgresSqlBuilder.BuildCreateTableCommand("public", "testcollection", model, ifNotExists: ifNotExists); @@ -281,7 +281,7 @@ public void TestBuildGetCommand() ] }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); var key = 123; @@ -328,7 +328,7 @@ public void TestBuildGetBatchCommand() var keys = new List { 123, 124 }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(VectorStoreGenericDataModel), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); // Act var cmdInfo = PostgresSqlBuilder.BuildGetBatchCommand("public", "testcollection", model, keys, includeVectors: true); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs index 69f716fecac8..fec054b178e1 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs @@ -42,11 +42,11 @@ public async Task CreatesCollectionForGenericModelAsync() new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 100, DistanceFunction = DistanceFunction.ManhattanDistance } ] }; - var options = new PostgresVectorStoreRecordCollectionOptions>() + var options = new PostgresVectorStoreRecordCollectionOptions>() { VectorStoreRecordDefinition = recordDefinition }; - var sut = new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options); + var sut = new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options); this._postgresClientMock.Setup(x => x.DoesTableExistsAsync(TestCollectionName, this._testCancellationToken)).ReturnsAsync(false); // Act @@ -67,13 +67,13 @@ public void ThrowsForUnsupportedType() new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, ] }; - var options = new PostgresVectorStoreRecordCollectionOptions>() + var options = new PostgresVectorStoreRecordCollectionOptions>() { VectorStoreRecordDefinition = recordDefinition }; // Act & Assert - Assert.Throws(() => new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options)); + Assert.Throws(() => new PostgresVectorStoreRecordCollection>(this._postgresClientMock.Object, TestCollectionName, options)); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs index 164b4034d5ed..785d2ef28317 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs @@ -68,7 +68,7 @@ public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() [Theory] [InlineData(true)] [InlineData(false)] - public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool includeVectors) + public void MapFromStorageToDataModelWithStringKeyReturnsValidDynamicModel(bool includeVectors) { // Arrange var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); @@ -111,7 +111,7 @@ public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool [Theory] [InlineData(true)] [InlineData(false)] - public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool includeVectors) + public void MapFromStorageToDataModelWithNumericKeyReturnsValidDynamicModel(bool includeVectors) { // Arrange var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs index aa1d89f7b3f4..d0f253b08bd8 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs @@ -104,11 +104,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs index 96985961aa60..8452a2d2c65d 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs @@ -104,11 +104,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index e16da205a646..8e6d17f7bedf 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -20,7 +21,7 @@ public class QdrantVectorStoreCollectionSearchMappingTests private readonly VectorStoreRecordModel _model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: false)) .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index c91abee5481e..5119874f96de 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -14,7 +14,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests; /// -/// Contains tests for the class. +/// Contains tests for the class. /// public class QdrantVectorStoreRecordCollectionTests { @@ -39,7 +39,7 @@ public QdrantVectorStoreRecordCollectionTests() public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange. - var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, collectionName); + var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, collectionName); this._qdrantClientMock .Setup(x => x.CollectionExistsAsync( @@ -58,7 +58,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN public async Task CanCreateCollectionAsync() { // Arrange. - var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, TestCollectionName); + var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, TestCollectionName); this._qdrantClientMock .Setup(x => x.CreateCollectionAsync( @@ -119,7 +119,7 @@ public async Task CanCreateCollectionAsync() public async Task CanDeleteCollectionAsync() { // Arrange. - var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, TestCollectionName); + var sut = new QdrantVectorStoreRecordCollection>(this._qdrantClientMock.Object, TestCollectionName); this._qdrantClientMock .Setup(x => x.DeleteCollectionAsync( @@ -270,7 +270,7 @@ public async Task CanGetRecordWithCustomMapperAsync() .Returns(CreateModel(UlongTestRecordKey1, true)); // Arrange target with custom mapper. - var sut = new QdrantVectorStoreRecordCollection>( + var sut = new QdrantVectorStoreRecordCollection>( this._qdrantClientMock.Object, TestCollectionName, new() @@ -501,7 +501,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() .Returns(pointStruct); // Arrange target with custom mapper. - var sut = new QdrantVectorStoreRecordCollection>( + var sut = new QdrantVectorStoreRecordCollection>( this._qdrantClientMock.Object, TestCollectionName, new() @@ -542,7 +542,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() }; // Act. - var sut = new QdrantVectorStoreRecordCollection>( + var sut = new QdrantVectorStoreRecordCollection>( this._qdrantClientMock.Object, TestCollectionName, new() { VectorStoreRecordDefinition = definition, PointStructCustomMapper = Mock.Of, PointStruct>>() }); @@ -769,7 +769,7 @@ private static ScoredPoint CreateScoredPoint(bool hasNamedVectors, TKey re private IVectorStoreRecordCollection> CreateRecordCollection(bool useDefinition, bool hasNamedVectors) where T : notnull { - var store = new QdrantVectorStoreRecordCollection>( + var store = new QdrantVectorStoreRecordCollection>( this._qdrantClientMock.Object, TestCollectionName, new() diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs index 9230b5f31fe0..19bc64df4334 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreTests.cs @@ -38,7 +38,7 @@ public void GetCollectionReturnsCollection() // Assert. Assert.NotNull(actual); - Assert.IsType>>(actual); + Assert.IsType>>(actual); } #pragma warning disable CS0618 // IQdrantVectorStoreRecordCollectionFactory is obsolete diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs new file mode 100644 index 000000000000..2a51b5a9d4eb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs @@ -0,0 +1,212 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; +using StackExchange.Redis; +using Xunit; + +namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; + +/// +/// Contains dynamic mapping tests for the class. +/// +public class RedisHashSetDynamicDataModelMapperTests +{ + private static readonly VectorStoreRecordModel s_model = BuildModel(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition); + + private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; + private static readonly double[] s_doubleVector = new double[] { 5.0d, 6.0d, 7.0d, 8.0d }; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange. + var sut = new RedisHashSetVectorStoreRecordMapper>(s_model); + var dataModel = new Dictionary + { + ["Key"] = "key", + + ["StringData"] = "data 1", + ["IntData"] = 1, + ["UIntData"] = 2u, + ["LongData"] = 3L, + ["ULongData"] = 4ul, + ["DoubleData"] = 5.5d, + ["FloatData"] = 6.6f, + ["BoolData"] = true, + ["NullableIntData"] = 7, + ["NullableUIntData"] = 8u, + ["NullableLongData"] = 9L, + ["NullableULongData"] = 10ul, + ["NullableDoubleData"] = 11.1d, + ["NullableFloatData"] = 12.2f, + ["NullableBoolData"] = false, + + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + RedisHashSetVectorStoreMappingTestHelpers.VerifyHashSet(storageModel.HashEntries); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + VectorStoreRecordModel model = BuildModel(new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + }, + }); + + var dataModel = new Dictionary + { + ["Key"] = "key", + ["StringData"] = null, + ["NullableIntData"] = null, + ["FloatVector"] = null, + }; + + var sut = new RedisHashSetVectorStoreRecordMapper>(model); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + + Assert.Equal("storage_string_data", storageModel.HashEntries[0].Name.ToString()); + Assert.True(storageModel.HashEntries[0].Value.IsNull); + + Assert.Equal("NullableIntData", storageModel.HashEntries[1].Name.ToString()); + Assert.True(storageModel.HashEntries[1].Value.IsNull); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange. + var hashSet = RedisHashSetVectorStoreMappingTestHelpers.CreateHashSet(); + var sut = new RedisHashSetVectorStoreRecordMapper>(s_model); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel["Key"]); + Assert.Equal("data 1", dataModel["StringData"]); + Assert.Equal(1, dataModel["IntData"]); + Assert.Equal(2u, dataModel["UIntData"]); + Assert.Equal(3L, dataModel["LongData"]); + Assert.Equal(4ul, dataModel["ULongData"]); + Assert.Equal(5.5d, dataModel["DoubleData"]); + Assert.Equal(6.6f, dataModel["FloatData"]); + Assert.True((bool)dataModel["BoolData"]!); + Assert.Equal(7, dataModel["NullableIntData"]); + Assert.Equal(8u, dataModel["NullableUIntData"]); + Assert.Equal(9L, dataModel["NullableLongData"]); + Assert.Equal(10ul, dataModel["NullableULongData"]); + Assert.Equal(11.1d, dataModel["NullableDoubleData"]); + Assert.Equal(12.2f, dataModel["NullableFloatData"]); + Assert.False((bool)dataModel["NullableBoolData"]!); + Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + Assert.Equal(new double[] { 5, 6, 7, 8 }, ((ReadOnlyMemory)dataModel["DoubleVector"]!).ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + var model = BuildModel(new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + } + }); + + var hashSet = new HashEntry[] + { + new("storage_string_data", RedisValue.Null), + new("NullableIntData", RedisValue.Null), + new("FloatVector", RedisValue.Null), + }; + + var sut = new RedisHashSetVectorStoreRecordMapper>(model); + + // Act + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel["Key"]); + Assert.Null(dataModel["StringData"]); + Assert.Null(dataModel["NullableIntData"]); + Assert.Null(dataModel["FloatVector"]); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange. + var model = BuildModel(new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + } + }); + + var sut = new RedisHashSetVectorStoreRecordMapper>(model); + var dataModel = new Dictionary { ["Key"] = "key" }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + + Assert.Equal("storage_string_data", storageModel.HashEntries[0].Name.ToString()); + Assert.True(storageModel.HashEntries[0].Value.IsNull); + + Assert.Equal("NullableIntData", storageModel.HashEntries[1].Name.ToString()); + Assert.True(storageModel.HashEntries[1].Value.IsNull); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange. + var hashSet = Array.Empty(); + + var sut = new RedisHashSetVectorStoreRecordMapper>(s_model); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert. + Assert.Single(dataModel); + Assert.Equal("key", dataModel["Key"]); + } + + private static VectorStoreRecordModel BuildModel(VectorStoreRecordDefinition definition) + => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection>.ModelBuildingOptions) + .Build(typeof(Dictionary), definition); +} diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs deleted file mode 100644 index ce0d0c9767d0..000000000000 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData; -using StackExchange.Redis; -using Xunit; - -namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; - -/// -/// Contains tests for the class. -/// -public class RedisHashSetGenericDataModelMapperTests -{ - private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; - private static readonly double[] s_doubleVector = new double[] { 5.0d, 6.0d, 7.0d, 8.0d }; - - [Fact] - public void MapFromDataToStorageModelMapsAllSupportedTypes() - { - // Arrange. - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - var dataModel = new VectorStoreGenericDataModel("key") - { - Data = - { - ["StringData"] = "data 1", - ["IntData"] = 1, - ["UIntData"] = 2u, - ["LongData"] = 3L, - ["ULongData"] = 4ul, - ["DoubleData"] = 5.5d, - ["FloatData"] = 6.6f, - ["BoolData"] = true, - ["NullableIntData"] = 7, - ["NullableUIntData"] = 8u, - ["NullableLongData"] = 9L, - ["NullableULongData"] = 10ul, - ["NullableDoubleData"] = 11.1d, - ["NullableFloatData"] = 12.2f, - ["NullableBoolData"] = false, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_floatVector), - ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), - }, - }; - - // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal("key", storageModel.Key); - RedisHashSetVectorStoreMappingTestHelpers.VerifyHashSet(storageModel.HashEntries); - } - - [Fact] - public void MapFromDataToStorageModelMapsNullValues() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringData", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), - }, - }; - - var dataModel = new VectorStoreGenericDataModel("key") - { - Data = - { - ["StringData"] = null, - ["NullableIntData"] = null, - }, - Vectors = - { - ["FloatVector"] = null, - }, - }; - - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - - // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal("key", storageModel.Key); - - Assert.Equal("storage_string_data", storageModel.HashEntries[0].Name.ToString()); - Assert.True(storageModel.HashEntries[0].Value.IsNull); - - Assert.Equal("NullableIntData", storageModel.HashEntries[1].Name.ToString()); - Assert.True(storageModel.HashEntries[1].Value.IsNull); - - Assert.Equal("FloatVector", storageModel.HashEntries[2].Name.ToString()); - Assert.True(storageModel.HashEntries[2].Value.IsNull); - } - - [Fact] - public void MapFromStorageToDataModelMapsAllSupportedTypes() - { - // Arrange. - var hashSet = RedisHashSetVectorStoreMappingTestHelpers.CreateHashSet(); - - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - - // Act. - var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); - - // Assert. - Assert.Equal("key", dataModel.Key); - Assert.Equal("data 1", dataModel.Data["StringData"]); - Assert.Equal(1, dataModel.Data["IntData"]); - Assert.Equal(2u, dataModel.Data["UIntData"]); - Assert.Equal(3L, dataModel.Data["LongData"]); - Assert.Equal(4ul, dataModel.Data["ULongData"]); - Assert.Equal(5.5d, dataModel.Data["DoubleData"]); - Assert.Equal(6.6f, dataModel.Data["FloatData"]); - Assert.True((bool)dataModel.Data["BoolData"]!); - Assert.Equal(7, dataModel.Data["NullableIntData"]); - Assert.Equal(8u, dataModel.Data["NullableUIntData"]); - Assert.Equal(9L, dataModel.Data["NullableLongData"]); - Assert.Equal(10ul, dataModel.Data["NullableULongData"]); - Assert.Equal(11.1d, dataModel.Data["NullableDoubleData"]); - Assert.Equal(12.2f, dataModel.Data["NullableFloatData"]); - Assert.False((bool)dataModel.Data["NullableBoolData"]!); - Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); - Assert.Equal(new double[] { 5, 6, 7, 8 }, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); - } - - [Fact] - public void MapFromStorageToDataModelMapsNullValues() - { - // Arrange - VectorStoreRecordDefinition vectorStoreRecordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringData", typeof(string)), - new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), - }, - }; - - var hashSet = new HashEntry[] - { - new("storage_string_data", RedisValue.Null), - new("NullableIntData", RedisValue.Null), - new("FloatVector", RedisValue.Null), - }; - - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - - // Act - var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); - - // Assert - Assert.Equal("key", dataModel.Key); - Assert.Null(dataModel.Data["StringData"]); - Assert.Null(dataModel.Data["NullableIntData"]); - Assert.Null(dataModel.Vectors["FloatVector"]); - } - - [Fact] - public void MapFromDataToStorageModelSkipsMissingProperties() - { - // Arrange. - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - var dataModel = new VectorStoreGenericDataModel("key") - { - Data = { }, - Vectors = { }, - }; - - // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); - - // Assert - Assert.Equal("key", storageModel.Key); - Assert.Empty(storageModel.HashEntries); - } - - [Fact] - public void MapFromStorageToDataModelSkipsMissingProperties() - { - // Arrange. - var hashSet = Array.Empty(); - - var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); - - // Act. - var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); - - // Assert. - Assert.Equal("key", dataModel.Key); - Assert.Empty(dataModel.Data); - Assert.Empty(dataModel.Vectors); - } -} diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 9d46a7c643b2..09d34ab30b5b 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; /// -/// Contains tests for the class. +/// Contains tests for the class. /// public class RedisHashSetVectorStoreRecordCollectionTests { @@ -48,7 +48,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN { SetupExecuteMock(this._redisDatabaseMock, new RedisServerException("Unknown index name")); } - var sut = new RedisHashSetVectorStoreRecordCollection( + var sut = new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, collectionName); @@ -71,7 +71,7 @@ public async Task CanCreateCollectionAsync() { // Arrange. SetupExecuteMock(this._redisDatabaseMock, string.Empty); - var sut = new RedisHashSetVectorStoreRecordCollection(this._redisDatabaseMock.Object, TestCollectionName); + var sut = new RedisHashSetVectorStoreRecordCollection(this._redisDatabaseMock.Object, TestCollectionName); // Act. await sut.CreateCollectionAsync(); @@ -259,7 +259,7 @@ public async Task CanGetRecordWithCustomMapperAsync() .Returns(CreateModel(TestRecordKey1, true)); // Arrange target with custom mapper. - var sut = new RedisHashSetVectorStoreRecordCollection( + var sut = new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() @@ -399,7 +399,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() .Returns((TestRecordKey1, hashEntries)); // Arrange target with custom mapper. - var sut = new RedisHashSetVectorStoreRecordCollection( + var sut = new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() @@ -537,16 +537,16 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() }; // Act. - var sut = new RedisHashSetVectorStoreRecordCollection( + var sut = new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() { VectorStoreRecordDefinition = definition, HashEntriesCustomMapper = Mock.Of>() }); } #pragma warning restore CS0618 - private RedisHashSetVectorStoreRecordCollection CreateRecordCollection(bool useDefinition) + private RedisHashSetVectorStoreRecordCollection CreateRecordCollection(bool useDefinition) { - return new RedisHashSetVectorStoreRecordCollection( + return new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs index d96f0e31dcda..67e756cdda1f 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs @@ -15,7 +15,7 @@ namespace SemanticKernel.Connectors.Redis.UnitTests; public sealed class RedisHashSetVectorStoreRecordMapperTests { private static readonly VectorStoreRecordModel s_model - = new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) + = new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) .Build(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition); [Fact] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs similarity index 63% rename from dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs rename to dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs index 68d5b3921853..5c6babf610ee 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs @@ -12,16 +12,16 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; /// -/// Contains tests for the class. +/// Contains tests for the class. /// -public class RedisJsonGenericDataModelMapperTests +public class RedisJsonDynamicDataModelMapperTests { private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; private static readonly VectorStoreRecordModel s_model - = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection>.ModelBuildingOptions) + = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection>.ModelBuildingOptions) .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = @@ -39,20 +39,15 @@ private static readonly VectorStoreRecordModel s_model public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); - var dataModel = new VectorStoreGenericDataModel("key") + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); + var dataModel = new Dictionary { - Data = - { - ["StringData"] = "data 1", - ["IntData"] = 1, - ["NullableIntData"] = 2, - ["ComplexObjectData"] = new ComplexObject { Prop1 = "prop 1", Prop2 = "prop 2" }, - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_floatVector), - }, + ["Key"] = "key", + ["StringData"] = "data 1", + ["IntData"] = 1, + ["NullableIntData"] = 2, + ["ComplexObjectData"] = new ComplexObject { Prop1 = "prop 1", Prop2 = "prop 2" }, + ["FloatVector"] = new ReadOnlyMemory(s_floatVector) }; // Act. @@ -71,20 +66,15 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() public void MapFromDataToStorageModelMapsNullValues() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); - var dataModel = new VectorStoreGenericDataModel("key") + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); + var dataModel = new Dictionary { - Data = - { - ["StringData"] = null, - ["IntData"] = null, - ["NullableIntData"] = null, - ["ComplexObjectData"] = null, - }, - Vectors = - { - ["FloatVector"] = null, - }, + ["Key"] = "key", + ["StringData"] = null, + ["IntData"] = null, + ["NullableIntData"] = null, + ["ComplexObjectData"] = null, + ["FloatVector"] = null, }; // Act. @@ -103,7 +93,7 @@ public void MapFromDataToStorageModelMapsNullValues() public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); var storageModel = new JsonObject { { "StringData", "data 1" }, @@ -117,19 +107,19 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); // Assert. - Assert.Equal("key", dataModel.Key); - Assert.Equal("data 1", dataModel.Data["StringData"]); - Assert.Equal(1, dataModel.Data["IntData"]); - Assert.Equal(2, dataModel.Data["NullableIntData"]); - Assert.Equal("prop 1", ((ComplexObject)dataModel.Data["ComplexObjectData"]!).Prop1); - Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal("key", dataModel["Key"]); + Assert.Equal("data 1", dataModel["StringData"]); + Assert.Equal(1, dataModel["IntData"]); + Assert.Equal(2, dataModel["NullableIntData"]); + Assert.Equal("prop 1", ((ComplexObject)dataModel["ComplexObjectData"]!).Prop1); + Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); } [Fact] public void MapFromStorageToDataModelMapsNullValues() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); var storageModel = new JsonObject { { "StringData", null }, @@ -143,24 +133,20 @@ public void MapFromStorageToDataModelMapsNullValues() var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); // Assert. - Assert.Equal("key", dataModel.Key); - Assert.Null(dataModel.Data["StringData"]); - Assert.Null(dataModel.Data["IntData"]); - Assert.Null(dataModel.Data["NullableIntData"]); - Assert.Null(dataModel.Data["ComplexObjectData"]); - Assert.Null(dataModel.Vectors["FloatVector"]); + Assert.Equal("key", dataModel["Key"]); + Assert.Null(dataModel["StringData"]); + Assert.Null(dataModel["IntData"]); + Assert.Null(dataModel["NullableIntData"]); + Assert.Null(dataModel["ComplexObjectData"]); + Assert.Null(dataModel["FloatVector"]); } [Fact] public void MapFromDataToStorageModelSkipsMissingProperties() { // Arrange. - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); - var dataModel = new VectorStoreGenericDataModel("key") - { - Data = { }, - Vectors = { }, - }; + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); + var dataModel = new Dictionary { ["Key"] = "key" }; // Act. var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -176,15 +162,14 @@ public void MapFromStorageToDataModelSkipsMissingProperties() // Arrange. var storageModel = new JsonObject(); - var sut = new RedisJsonGenericDataModelMapper(s_model.Properties, JsonSerializerOptions.Default); + var sut = new RedisJsonDynamicDataModelMapper(s_model, JsonSerializerOptions.Default); // Act. var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); // Assert. - Assert.Equal("key", dataModel.Key); - Assert.Empty(dataModel.Data); - Assert.Empty(dataModel.Vectors); + Assert.Equal("key", dataModel["Key"]); + Assert.Single(dataModel); } private sealed class ComplexObject diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index d57066cca612..69284fe66f90 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Contains tests for the class. +/// Contains tests for the class. /// public class RedisJsonVectorStoreRecordCollectionTests { @@ -52,7 +52,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN { SetupExecuteMock(this._redisDatabaseMock, new RedisServerException("Unknown index name")); } - var sut = new RedisJsonVectorStoreRecordCollection( + var sut = new RedisJsonVectorStoreRecordCollection( this._redisDatabaseMock.Object, collectionName); @@ -271,7 +271,7 @@ public async Task CanGetRecordWithCustomMapperAsync() .Returns(CreateModel(TestRecordKey1, true)); // Arrange target with custom mapper. - var sut = new RedisJsonVectorStoreRecordCollection( + var sut = new RedisJsonVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() @@ -422,7 +422,7 @@ public async Task CanUpsertRecordWithCustomMapperAsync() .Returns((TestRecordKey1, JsonNode.Parse(jsonNode)!)); // Arrange target with custom mapper. - var sut = new RedisJsonVectorStoreRecordCollection( + var sut = new RedisJsonVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() @@ -513,9 +513,9 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) Assert.Equal(new float[] { 1, 2, 3, 4 }, results.First().Record.Vector2!.Value.ToArray()); } - private RedisJsonVectorStoreRecordCollection CreateRecordCollection(bool useDefinition, bool useCustomJsonSerializerOptions = false) + private RedisJsonVectorStoreRecordCollection CreateRecordCollection(bool useDefinition, bool useCustomJsonSerializerOptions = false) { - return new RedisJsonVectorStoreRecordCollection( + return new RedisJsonVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, new() diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs index bb1b0889bd6e..eb296da0dfbe 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs @@ -20,7 +20,7 @@ public sealed class RedisJsonVectorStoreRecordMapperTests public void MapsAllFieldsFromDataToStorageModel() { // Arrange. - var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); @@ -42,7 +42,7 @@ public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() { // Arrange. var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; - var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); @@ -63,7 +63,7 @@ public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() public void MapsAllFieldsFromStorageToDataModel() { // Arrange. - var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); @@ -89,7 +89,7 @@ public void MapsAllFieldsFromStorageToDataModelWithCustomSerializerOptions() { // Arrange. var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; - var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) + var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs index 02e11b785a99..a66beeb7183a 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisKernelBuilderExtensionsTests.cs @@ -74,7 +74,7 @@ private void AssertHashSetVectorStoreRecordCollectionCreated() where TR var kernel = this._kernelBuilder.Build(); var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); } private void AssertJsonVectorStoreRecordCollectionCreated() where TRecord : notnull @@ -82,7 +82,7 @@ private void AssertJsonVectorStoreRecordCollectionCreated() where TReco var kernel = this._kernelBuilder.Build(); var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs index 0f7322d0f7ce..c4cc03b79d68 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisServiceCollectionExtensionsTests.cs @@ -74,7 +74,7 @@ private void AssertHashSetVectorStoreRecordCollectionCreated() where TR var serviceProvider = this._serviceCollection.BuildServiceProvider(); var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); } private void AssertJsonVectorStoreRecordCollectionCreated() where TRecord : notnull @@ -82,7 +82,7 @@ private void AssertJsonVectorStoreRecordCollectionCreated() where TReco var serviceProvider = this._serviceCollection.BuildServiceProvider(); var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index eb7be9f4e1fb..315f8632e932 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -242,8 +242,8 @@ private sealed class DummyType; #pragma warning restore CA1812 private static VectorStoreRecordModel BuildModel(List properties) - => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) + => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs index 9280051fd266..e2347c4ea989 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreTests.cs @@ -39,7 +39,7 @@ public void GetCollectionReturnsJsonCollection() // Assert. Assert.NotNull(actual); - Assert.IsType>>(actual); + Assert.IsType>>(actual); } [Fact] @@ -53,7 +53,7 @@ public void GetCollectionReturnsHashSetCollection() // Assert. Assert.NotNull(actual); - Assert.IsType>>(actual); + Assert.IsType>>(actual); } #pragma warning disable CS0618 // IRedisVectorStoreRecordCollectionFactory is obsolete diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs index e7f78e388c02..a3594df56cea 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs @@ -40,11 +40,11 @@ public void AddVectorStoreRecordCollectionWithStringKeyRegistersClass() // Assert var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } [Fact] @@ -58,11 +58,11 @@ public void AddVectorStoreRecordCollectionWithNumericKeyRegistersClass() // Assert var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #region private diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs index 72b91d8dea88..d77103500b8d 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs @@ -65,7 +65,7 @@ public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() [Theory] [InlineData(true)] [InlineData(false)] - public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool includeVectors) + public void MapFromStorageToDataModelWithStringKeyReturnsValidDynamicModel(bool includeVectors) { // Arrange var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); @@ -106,7 +106,7 @@ public void MapFromStorageToDataModelWithStringKeyReturnsValidGenericModel(bool [Theory] [InlineData(true)] [InlineData(false)] - public void MapFromStorageToDataModelWithNumericKeyReturnsValidGenericModel(bool includeVectors) + public void MapFromStorageToDataModelWithNumericKeyReturnsValidDynamicModel(bool includeVectors) { // Arrange var vector = new ReadOnlyMemory([1.1f, 2.2f, 3.3f, 4.4f]); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs similarity index 74% rename from dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs rename to dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs index 15193f410b4f..f80b186a0a7a 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs @@ -14,9 +14,9 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// -public sealed class WeaviateGenericDataModelMapperTests +public sealed class WeaviateDynamicDataModelMapperTests { private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { @@ -31,7 +31,7 @@ public sealed class WeaviateGenericDataModelMapperTests private static readonly VectorStoreRecordModel s_model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = @@ -80,45 +80,43 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); - var dataModel = new VectorStoreGenericDataModel(key) + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = "string", - ["BoolDataProp"] = true, - ["NullableBoolDataProp"] = false, - ["IntDataProp"] = 1, - ["NullableIntDataProp"] = 2, - ["LongDataProp"] = 3L, - ["NullableLongDataProp"] = 4L, - ["ShortDataProp"] = (short)5, - ["NullableShortDataProp"] = (short)6, - ["ByteDataProp"] = (byte)7, - ["NullableByteDataProp"] = (byte)8, - ["FloatDataProp"] = 9.0f, - ["NullableFloatDataProp"] = 10.0f, - ["DoubleDataProp"] = 11.0, - ["NullableDoubleDataProp"] = 12.0, - ["DecimalDataProp"] = 13.99m, - ["NullableDecimalDataProp"] = 14.00m, - ["DateTimeDataProp"] = new DateTime(2021, 1, 1), - ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1), - ["DateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), - ["GuidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), - ["NullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), - ["TagListDataProp"] = s_taglist - }, - Vectors = - { - ["FloatVector"] = new ReadOnlyMemory(s_floatVector), - ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), - ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), - ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), - } - }; + ["Key"] = key, + + ["StringDataProp"] = "string", + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["ShortDataProp"] = (short)5, + ["NullableShortDataProp"] = (short)6, + ["ByteDataProp"] = (byte)7, + ["NullableByteDataProp"] = (byte)8, + ["FloatDataProp"] = 9.0f, + ["NullableFloatDataProp"] = 10.0f, + ["DoubleDataProp"] = 11.0, + ["NullableDoubleDataProp"] = 12.0, + ["DecimalDataProp"] = 13.99m, + ["NullableDecimalDataProp"] = 14.00m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1), + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["GuidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), + ["NullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), + ["TagListDataProp"] = s_taglist, + + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), + } + ; // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -174,20 +172,17 @@ public void MapFromDataToStorageModelMapsNullValues() new("NullableFloatVector", typeof(ReadOnlyMemory?)) }; - var dataModel = new VectorStoreGenericDataModel(key) + var dataModel = new Dictionary { - Data = - { - ["StringDataProp"] = null, - ["NullableIntDataProp"] = null, - }, - Vectors = - { - ["NullableFloatVector"] = null, - }, + ["Key"] = key, + + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + + ["NullableFloatVector"] = null }; - var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -203,7 +198,7 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -248,35 +243,35 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal(key, dataModel.Key); - Assert.Equal("string", dataModel.Data["StringDataProp"]); - Assert.Equal(true, dataModel.Data["BoolDataProp"]); - Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); - Assert.Equal(1, dataModel.Data["IntDataProp"]); - Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); - Assert.Equal(3L, dataModel.Data["LongDataProp"]); - Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); - Assert.Equal((short)5, dataModel.Data["ShortDataProp"]); - Assert.Equal((short)6, dataModel.Data["NullableShortDataProp"]); - Assert.Equal((byte)7, dataModel.Data["ByteDataProp"]); - Assert.Equal((byte)8, dataModel.Data["NullableByteDataProp"]); - Assert.Equal(9.0f, dataModel.Data["FloatDataProp"]); - Assert.Equal(10.0f, dataModel.Data["NullableFloatDataProp"]); - Assert.Equal(11.0, dataModel.Data["DoubleDataProp"]); - Assert.Equal(12.0, dataModel.Data["NullableDoubleDataProp"]); - Assert.Equal(13.99m, dataModel.Data["DecimalDataProp"]); - Assert.Equal(14.00m, dataModel.Data["NullableDecimalDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel.Data["DateTimeDataProp"]); - Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel.Data["NullableDateTimeDataProp"]); - Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); - Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); - Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), dataModel.Data["GuidDataProp"]); - Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), dataModel.Data["NullableGuidDataProp"]); - Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); - Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); - Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); - Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["NullableDoubleVector"]!)!.ToArray()); + Assert.Equal(key, dataModel["Key"]); + Assert.Equal("string", dataModel["StringDataProp"]); + Assert.Equal(true, dataModel["BoolDataProp"]); + Assert.Equal(false, dataModel["NullableBoolDataProp"]); + Assert.Equal(1, dataModel["IntDataProp"]); + Assert.Equal(2, dataModel["NullableIntDataProp"]); + Assert.Equal(3L, dataModel["LongDataProp"]); + Assert.Equal(4L, dataModel["NullableLongDataProp"]); + Assert.Equal((short)5, dataModel["ShortDataProp"]); + Assert.Equal((short)6, dataModel["NullableShortDataProp"]); + Assert.Equal((byte)7, dataModel["ByteDataProp"]); + Assert.Equal((byte)8, dataModel["NullableByteDataProp"]); + Assert.Equal(9.0f, dataModel["FloatDataProp"]); + Assert.Equal(10.0f, dataModel["NullableFloatDataProp"]); + Assert.Equal(11.0, dataModel["DoubleDataProp"]); + Assert.Equal(12.0, dataModel["NullableDoubleDataProp"]); + Assert.Equal(13.99m, dataModel["DecimalDataProp"]); + Assert.Equal(14.00m, dataModel["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel["DateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel["NullableDateTimeDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["NullableDateTimeOffsetDataProp"]); + Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), dataModel["GuidDataProp"]); + Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), dataModel["NullableGuidDataProp"]); + Assert.Equal(s_taglist, dataModel["TagListDataProp"]); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel["DoubleVector"]!).ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel["NullableDoubleVector"]!)!.ToArray()); } [Fact] @@ -311,23 +306,23 @@ public void MapFromStorageToDataModelMapsNullValues() } }; - var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal(key, dataModel.Key); - Assert.Null(dataModel.Data["StringDataProp"]); - Assert.Null(dataModel.Data["NullableIntDataProp"]); - Assert.Null(dataModel.Vectors["NullableFloatVector"]); + Assert.Equal(key, dataModel["Key"]); + Assert.Null(dataModel["StringDataProp"]); + Assert.Null(dataModel["NullableIntDataProp"]); + Assert.Null(dataModel["NullableFloatVector"]); } [Fact] public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new WeaviateGenericDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); var storageModel = new JsonObject(); @@ -351,12 +346,12 @@ public void MapFromDataToStorageModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder().Build(typeof(VectorStoreGenericDataModel), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder().Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); - var record = new VectorStoreGenericDataModel(key); - var sut = new WeaviateGenericDataModelMapper("Collection", model, s_jsonSerializerOptions); + var record = new Dictionary { ["Key"] = key }; + var sut = new WeaviateDynamicDataModelMapper("Collection", model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(record); @@ -382,11 +377,11 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder().Build(typeof(VectorStoreGenericDataModel), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder().Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateGenericDataModelMapper("Collection", model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -397,8 +392,8 @@ public void MapFromStorageToDataModelSkipsMissingProperties() var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); // Assert - Assert.Equal(key, dataModel.Key); - Assert.False(dataModel.Data.ContainsKey("StringDataProp")); - Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + Assert.Equal(key, dataModel["Key"]); + Assert.False(dataModel.ContainsKey("StringDataProp")); + Assert.False(dataModel.ContainsKey("FloatVector")); } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs index 23b34cdbc2ba..468cc3462a05 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs @@ -50,11 +50,11 @@ public void AddWeaviateVectorStoreRecordCollectionRegistersClass() // Assert var collection = kernel.Services.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs index e33f735ebc4f..8c6afcd3ea4e 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs @@ -55,11 +55,11 @@ private void AssertVectorStoreRecordCollectionCreated() var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #pragma warning disable CA1812 // Avoid uninstantiated internal classes diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index 62ab99c428d1..e294a332ec0c 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -20,7 +20,7 @@ public void ItThrowsExceptionWithInvalidIndexKind() // Arrange var model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = @@ -43,7 +43,7 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex // Arrange var model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = @@ -67,7 +67,7 @@ public void ItThrowsExceptionWithInvalidDistanceFunction() // Arrange var model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = @@ -92,7 +92,7 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct // Arrange var model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = @@ -165,7 +165,7 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy // Arrange var model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 27cf9dc4d82c..7083c3d8cd36 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Serialization; using Microsoft.Extensions.VectorData; @@ -33,7 +34,7 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests private readonly VectorStoreRecordModel _model = new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 0016cde2b950..071000145fff 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -16,7 +16,7 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; /// -/// Unit tests for class. +/// Unit tests for class. /// public sealed class WeaviateVectorStoreRecordCollectionTests : IDisposable { @@ -32,7 +32,7 @@ public WeaviateVectorStoreRecordCollectionTests() public void ConstructorForModelWithoutKeyThrowsException() { // Act & Assert - var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection")); + var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection")); Assert.Contains("No key property found", exception.Message); } @@ -43,7 +43,7 @@ public void ConstructorWithoutEndpointThrowsException() using var httpClient = new HttpClient(); // Act & Assert - var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(httpClient, "Collection")); + var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(httpClient, "Collection")); Assert.Contains("Weaviate endpoint should be provided", exception.Message); } @@ -51,7 +51,7 @@ public void ConstructorWithoutEndpointThrowsException() public void ConstructorWithDeclarativeModelInitializesCollection() { // Act & Assert - var collection = new WeaviateVectorStoreRecordCollection( + var collection = new WeaviateVectorStoreRecordCollection( this._mockHttpClient, "Collection"); @@ -68,7 +68,7 @@ public void ConstructorWithImperativeModelInitializesCollection() }; // Act - var collection = new WeaviateVectorStoreRecordCollection( + var collection = new WeaviateVectorStoreRecordCollection( this._mockHttpClient, "Collection", new() { VectorStoreRecordDefinition = definition }); @@ -84,7 +84,7 @@ public async Task CollectionExistsReturnsValidResultAsync(HttpResponseMessage re // Arrange this._messageHandlerStub.ResponseToReturn = responseMessage; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act var actualResult = await sut.CollectionExistsAsync(); @@ -113,7 +113,7 @@ public async Task CollectionExistsReturnsValidResultAsync(HttpResponseMessage re [InlineData("containsNonAsciią")] public void CollectionCtorRejectsInvalidNames(string collectionName) { - ArgumentException argumentException = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, collectionName)); + ArgumentException argumentException = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, collectionName)); Assert.Equal("collectionName", argumentException.ParamName); } @@ -122,7 +122,7 @@ public async Task CreateCollectionUsesValidCollectionSchemaAsync() { // Arrange const string CollectionName = "Collection"; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act await sut.CreateCollectionAsync(); @@ -158,7 +158,7 @@ public async Task DeleteCollectionSendsValidRequestAsync() { // Arrange const string CollectionName = "Collection"; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act await sut.DeleteCollectionAsync(); @@ -175,7 +175,7 @@ public async Task DeleteSendsValidRequestAsync() const string CollectionName = "Collection"; var id = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act await sut.DeleteAsync(id); @@ -192,7 +192,7 @@ public async Task DeleteBatchUsesValidQueryMatchAsync() const string CollectionName = "Collection"; List ids = [new Guid("11111111-1111-1111-1111-111111111111"), new Guid("22222222-2222-2222-2222-222222222222")]; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act await sut.DeleteAsync(ids); @@ -228,7 +228,7 @@ public async Task GetExistingRecordReturnsValidRecordAsync() Content = new StringContent(JsonSerializer.Serialize(jsonObject)) }; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act var result = await sut.GetAsync(id); @@ -258,7 +258,7 @@ public async Task GetExistingBatchRecordsReturnsValidRecordsAsync() this._messageHandlerStub.ResponseQueue.Enqueue(response1); this._messageHandlerStub.ResponseQueue.Enqueue(response2); - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act var results = await sut.GetAsync([id1, id2]).ToListAsync(); @@ -287,7 +287,7 @@ public async Task UpsertReturnsRecordKeyAsync() Content = new StringContent(JsonSerializer.Serialize(batchResponse)), }; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act var result = await sut.UpsertAsync(hotel); @@ -326,7 +326,7 @@ public async Task UpsertReturnsRecordKeysAsync() Content = new StringContent(JsonSerializer.Serialize(batchResponse)), }; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act var results = await sut.UpsertAsync([hotel1, hotel2]).ToListAsync(); @@ -374,7 +374,7 @@ public async Task UpsertWithCustomMapperWorksCorrectlyAsync() Content = new StringContent(JsonSerializer.Serialize(batchResponse)), }; - var sut = new WeaviateVectorStoreRecordCollection( + var sut = new WeaviateVectorStoreRecordCollection( this._mockHttpClient, "Collection", new() { JsonObjectCustomMapper = mockMapper.Object }); @@ -415,7 +415,7 @@ public async Task GetWithCustomMapperWorksCorrectlyAsync() .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) .Returns(new WeaviateHotel { HotelId = id, HotelName = "Test Name from mapper" }); - var sut = new WeaviateVectorStoreRecordCollection( + var sut = new WeaviateVectorStoreRecordCollection( this._mockHttpClient, "Collection", new() { JsonObjectCustomMapper = mockMapper.Object }); @@ -442,7 +442,7 @@ public async Task ItUsesHttpClientParametersAsync(bool initializeOptions, string new WeaviateVectorStoreRecordCollectionOptions() { Endpoint = new Uri("http://test-endpoint"), ApiKey = "fake-key" } : null; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName, options); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName, options); // Act await sut.CreateCollectionAsync(); @@ -502,7 +502,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) Content = new StringContent(JsonSerializer.Serialize(jsonObject)) }; - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act var actual = await sut.VectorizedSearchAsync(vector, top: 3, new() @@ -543,7 +543,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync() { // Arrange - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act & Assert await Assert.ThrowsAsync(async () => @@ -554,7 +554,7 @@ await Assert.ThrowsAsync(async () => public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExceptionAsync() { // Arrange - var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act & Assert await Assert.ThrowsAsync(async () => diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs index f624599f5478..610d2df40bb5 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -33,7 +33,7 @@ public sealed class WeaviateVectorStoreRecordMapperTests "CollectionName", new WeaviateModelBuilder() .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new VectorStoreRecordDefinition { Properties = diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs index 5a99f4c1ee20..26655edeb74f 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs @@ -34,7 +34,7 @@ public void GetCollectionWithNotSupportedKeyThrowsException() var sut = new WeaviateVectorStore(this._mockHttpClient); // Act & Assert - Assert.Throws(() => sut.GetCollection("collection")); + Assert.Throws(() => sut.GetCollection("Collection")); } [Fact] diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs index dc23d2e7c122..6296bad857e9 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordDataPropertyModel.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; namespace Microsoft.Extensions.VectorData.ConnectorSupport; @@ -29,47 +28,6 @@ public class VectorStoreRecordDataPropertyModel(string modelName, Type type) : V /// public bool IsFullTextIndexed { get; set; } - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override object? GetValueAsObject(object record) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var dataProperty = type.GetProperty("Data")!; - var dictionary = (Dictionary)dataProperty.GetValue(record)!; - return dictionary.TryGetValue(this.ModelName, out var value) - ? value - : null; - } - } - - return base.GetValueAsObject(record); - } - - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override void SetValueAsObject(object record, object? value) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var dataProperty = type.GetProperty("Data")!; - var dictionary = (Dictionary)dataProperty.GetValue(record)!; - dictionary[this.ModelName] = value; - return; - } - } - - base.SetValueAsObject(record, value); - } - /// public override string ToString() => $"{this.ModelName} (Data, {this.Type.Name})"; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs index 3311786c1e6a..b791ac9d21e8 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordKeyPropertyModel.cs @@ -12,43 +12,6 @@ namespace Microsoft.Extensions.VectorData.ConnectorSupport; [Experimental("MEVD9001")] public class VectorStoreRecordKeyPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) { - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override object? GetValueAsObject(object record) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var keyProperty = type.GetProperty("Key")!; - return keyProperty.GetValue(record); - } - } - - return base.GetValueAsObject(record); - } - - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override void SetValueAsObject(object record, object? value) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var keyProperty = type.GetProperty("Key")!; - keyProperty.SetValue(record, value); - return; - } - } - - base.SetValueAsObject(record, value); - } - /// public override string ToString() => $"{this.ModelName} (Key, {this.Type.Name})"; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs index 622cfe07a772..d2b53b7f46a2 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs @@ -176,60 +176,41 @@ public VectorStoreRecordPropertyModel GetDataOrKeyProperty(Expression(Expression> expression, bool data) where TProperty : VectorStoreRecordPropertyModel { - string expectedGenericModelPropertyName = data - ? nameof(VectorStoreGenericDataModel.Data) - : nameof(VectorStoreGenericDataModel.Vectors); - - MemberExpression? member = expression.Body as MemberExpression; - // (TRecord r) => r.PropertyName is translated into - // (TRecord r) => (object)r.PropertyName for properties that return struct like ReadOnlyMemory. - if (member is null && expression.Body is UnaryExpression unary - && unary.Operand.NodeType == ExpressionType.MemberAccess) + var node = expression.Body; + + // First, unwrap any object convert node: r => (object)r.PropertyName becomes r => r.PropertyName + if (expression.Body is UnaryExpression { NodeType: ExpressionType.Convert } convert + && convert.Type == typeof(object)) { - member = unary.Operand as MemberExpression; + node = convert.Operand; } - if (member is { Member: PropertyInfo clrProperty } - && expression.Parameters.Count == 1 - && member.Expression == expression.Parameters[0]) + var propertyName = node switch { - foreach (var property in this.Properties) - { - if (property.PropertyInfo == clrProperty) + // Simple member expression over the lambda parameter (r => r.PropertyName) + MemberExpression { Member: PropertyInfo clrProperty } member when member.Expression == expression.Parameters[0] + => clrProperty.Name, + + // Dictionary access over the lambda parameter, in dynamic mapping (r => r["PropertyName"]) + MethodCallExpression { Method.Name: "get_Item", Arguments: [var keyExpression] } methodCall + => keyExpression switch { - // TODO: Property error checking if the wrong property type is selected. - return (TProperty)property; - } - } + ConstantExpression { Value: string text } => text, + MemberExpression field when TryGetCapturedValue(field, out object? capturedValue) && capturedValue is string text => text, + _ => throw new InvalidOperationException("Invalid dictionary key expression") + }, - throw new InvalidOperationException($"The property {clrProperty.Name} of {typeof(TRecord).FullName} is not a {(data ? "Data" : "Vector")} property."); - } - // (VectorStoreGenericDataModel r) => r.Vectors["PropertyName"] - else if (expression.Body is MethodCallExpression methodCall - // It's a Func, object> - && expression.Type.IsGenericType - && expression.Type.GenericTypeArguments.Length == 2 - && expression.Type.GenericTypeArguments[0].IsGenericType - && expression.Type.GenericTypeArguments[0].GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>) - // It's accessing VectorStoreGenericDataModel.Vectors (or Data) - && methodCall.Object is MemberExpression memberAccess - && memberAccess.Member.Name == expectedGenericModelPropertyName - // and has a single argument - && methodCall.Arguments.Count == 1) - { - string name = methodCall.Arguments[0] switch - { - ConstantExpression constant when constant.Value is string text => text, - MemberExpression field when TryGetCapturedValue(field, out object? capturedValue) && capturedValue is string text => text, - _ => throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression.") - }; + _ => throw new InvalidOperationException("Property selector lambda is invalid") + }; - // TODO: Property error checking if the wrong property type is selected. - return (TProperty)(this.Properties.FirstOrDefault(p => p.ModelName == name) - ?? throw new InvalidOperationException($"The {typeof(TRecord).FullName} type does not have a vector property named '{name}'.")); + if (!this.PropertyMap.TryGetValue(propertyName, out var property)) + { + throw new InvalidOperationException($"Property '{propertyName}' could not be found."); } - throw new InvalidOperationException($"The value of the provided {(data ? "Additional" : "Vector")}Property option is not a valid expression."); + return property is TProperty typedProperty + ? typedProperty + : throw new InvalidOperationException($"Property '{propertyName}' isn't of type '{typeof(TProperty).Name}'."); static bool TryGetCapturedValue(Expression expression, out object? capturedValue) { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index eda35d595ede..60ef5a74d030 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -68,7 +68,7 @@ public VectorStoreRecordModelBuilder(VectorStoreRecordModelBuildingOptions optio [RequiresUnreferencedCode("Currently not compatible with trimming")] // TODO public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition) { - var dynamicMapping = type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>); + var dynamicMapping = type == typeof(Dictionary); if (!dynamicMapping) { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs index 7787cda907bd..4d9534c0d979 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordPropertyModel.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Reflection; @@ -53,20 +54,26 @@ public string StorageName /// /// Reads the property from the given , returning the value as an . /// - // TODO: Temporary, remove virtual once we move to Dictionary as the dynamic representation public virtual object? GetValueAsObject(object record) { - if (this.PropertyInfo is not null) + if (this.PropertyInfo is null) { - // We have a .NET property (non-dynamic POCO mapping) - - // TODO: Implement compiled delegates for better performance, #11122 - // TODO: Implement source-generated accessors for NativeAOT, #10256 + if (record is Dictionary dictionary) + { + return dictionary.TryGetValue(this.ModelName, out var value) + ? value + : null; + } - return this.PropertyInfo.GetValue(record); + throw new UnreachableException("Non-dynamic mapping but PropertyInfo is null."); } - throw new UnreachableException("Must be overridden by derived class (for now)."); + // We have a CLR property (non-dynamic POCO mapping) + + // TODO: Implement compiled delegates for better performance, #11122 + // TODO: Implement source-generated accessors for NativeAOT, #10256 + + return this.PropertyInfo.GetValue(record); } /// @@ -74,23 +81,28 @@ public string StorageName /// s public virtual void SetValueAsObject(object record, object? value) { - if (this.PropertyInfo is not null) + if (this.PropertyInfo is null) { - // We have a .NET property (non-dynamic POCO mapping) - - // TODO: Implement compiled delegates for better performance, #11122 - // TODO: Implement source-generated accessors for NativeAOT, #10256 - - // If the value is null, no need to set the property (it's the CLR default) - if (value is not null) + if (record.GetType() == typeof(Dictionary)) { - this.PropertyInfo.SetValue(record, value); + var dictionary = (Dictionary)record; + dictionary[this.ModelName] = value; + return; } - return; + throw new UnreachableException("Non-dynamic mapping but ClrProperty is null."); } - throw new UnreachableException("Must be overridden by derived class (for now)."); + // We have a CLR property (non-dynamic POCO mapping) + + // TODO: Implement compiled delegates for better performance, #11122 + // TODO: Implement source-generated accessors for NativeAOT, #10256 + + // If the value is null, no need to set the property (it's the CLR default) + if (value is not null) + { + this.PropertyInfo.SetValue(record, value); + } } // TODO: implement the generic accessors to avoid boxing, and make use of them in connectors diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs index 333003655646..33eb88ce6802 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; namespace Microsoft.Extensions.VectorData.ConnectorSupport; @@ -40,47 +39,6 @@ public class VectorStoreRecordVectorPropertyModel(string modelName, Type type) : /// public string? DistanceFunction { get; set; } - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override object? GetValueAsObject(object record) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var vectorProperty = type.GetProperty("Vectors")!; - var dictionary = (Dictionary)vectorProperty.GetValue(record)!; - return dictionary.TryGetValue(this.ModelName, out var value) - ? value - : null; - } - } - - return base.GetValueAsObject(record); - } - - /// - // TODO: Temporary, remove once we move to Dictionary as the dynamic representation - public override void SetValueAsObject(object record, object? value) - { - if (this.PropertyInfo is null) - { - var type = record.GetType(); - - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>)) - { - var vectorProperty = type.GetProperty("Vectors")!; - var dictionary = (Dictionary)vectorProperty.GetValue(record)!; - dictionary[this.ModelName] = value; - return; - } - } - - base.SetValueAsObject(record, value); - } - /// public override string ToString() => $"{this.ModelName} (Vector, {this.Type.Name})"; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index c9b0faf3409d..63484c33f4de 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -16,9 +16,9 @@ namespace Microsoft.Extensions.VectorData; /// /// Unless otherwise documented, implementations of this interface can be expected to be thread-safe, and can be used concurrently from multiple threads. /// -#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix (Collection) public interface IVectorStoreRecordCollection : IVectorizedSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +#pragma warning restore CA1711 where TKey : notnull where TRecord : notnull { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs index 4e52243d6997..04570c6a816a 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStoreGenericDataModel.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; namespace Microsoft.Extensions.VectorData; @@ -8,6 +9,7 @@ namespace Microsoft.Extensions.VectorData; /// Represents a generic data model that can be used to store and retrieve any data from a vector store. /// /// The data type of the record key. +[Obsolete($"{nameof(VectorStoreGenericDataModel)} has been replaced by Dictionary", error: true)] public sealed class VectorStoreGenericDataModel { /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 552bc508e4af..1184f229b69b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; using System.Text.Json.Nodes; using System.Threading.Tasks; @@ -17,7 +18,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureAISearch; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Integration tests for class. +/// Integration tests for class. /// Tests work with an Azure AI Search Instance. /// [Collection("AzureAISearchVectorStoreCollection")] @@ -33,7 +34,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(bool expectedExist { // Arrange. var collectionName = expectedExists ? fixture.TestIndexName : "nonexistentcollection"; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, collectionName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, collectionName); // Act. var actual = await sut.CollectionExistsAsync(); @@ -54,7 +55,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe { VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, testCollectionName, options); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, testCollectionName, options); await sut.DeleteCollectionAsync(); @@ -114,7 +115,7 @@ public async Task ItCanDeleteCollectionAsync() // Arrange var tempCollectionName = fixture.TestIndexName + "-delete"; await AzureAISearchVectorStoreFixture.CreateIndexAsync(tempCollectionName, fixture.SearchIndexClient); - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, tempCollectionName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, tempCollectionName); // Act await sut.DeleteCollectionAsync(); @@ -133,7 +134,7 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition { VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); // Act var hotel = await this.CreateTestHotelAsync("Upsert-1"); @@ -163,7 +164,7 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition public async Task ItCanUpsertManyDocumentsToVectorStoreAsync() { // Arrange - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act var results = sut.UpsertAsync( @@ -201,7 +202,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool includeVectors, bool { VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); // Act var getResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = includeVectors }); @@ -234,7 +235,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool includeVectors, bool public async Task ItCanGetManyDocumentsFromVectorStoreAsync() { // Arrange - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. @@ -262,7 +263,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti { VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); await sut.UpsertAsync(await this.CreateTestHotelAsync("Remove-1")); // Act @@ -278,7 +279,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() { // Arrange - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-1")); await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-2")); await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-3")); @@ -297,7 +298,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() { // Arrange - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act & Assert Assert.Null(await sut.GetAsync("BaseSet-5", new GetRecordOptions { IncludeVectors = true })); @@ -308,7 +309,7 @@ public async Task ItThrowsOperationExceptionForFailedConnectionAsync() { // Arrange var searchIndexClient = new SearchIndexClient(new Uri("https://localhost:12345"), new AzureKeyCredential("12345")); - var sut = new AzureAISearchVectorStoreRecordCollection(searchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(searchIndexClient, fixture.TestIndexName); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); @@ -319,7 +320,7 @@ public async Task ItThrowsOperationExceptionForFailedAuthenticationAsync() { // Arrange var searchIndexClient = new SearchIndexClient(new Uri(fixture.Config.ServiceUrl), new AzureKeyCredential("12345")); - var sut = new AzureAISearchVectorStoreRecordCollection(searchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(searchIndexClient, fixture.TestIndexName); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); @@ -330,7 +331,7 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() { // Arrange var options = new AzureAISearchVectorStoreRecordCollectionOptions { JsonObjectCustomMapper = new FailingMapper() }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); @@ -342,7 +343,7 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() public async Task ItCanSearchWithVectorAndFiltersAsync(string option, bool includeVectors) { // Arrange. - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act. var filter = option == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "Hotel 3") : new VectorSearchFilter().AnyTagEqualTo("Tags", "bar"); @@ -383,7 +384,7 @@ await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() { // Arrange. - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); + var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act. var filter = new VectorSearchFilter().EqualTo("HotelName", "Hotel 3"); @@ -402,58 +403,55 @@ public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new AzureAISearchVectorStoreRecordCollectionOptions> + var options = new AzureAISearchVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = fixture.VectorStoreRecordDefinition }; - var sut = new AzureAISearchVectorStoreRecordCollection>(fixture.SearchIndexClient, fixture.TestIndexName, options); + var sut = new AzureAISearchVectorStoreRecordCollection>(fixture.SearchIndexClient, fixture.TestIndexName, options); // Act var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); var baseSetEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); - var genericMapperEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a generic mapper hotel"); - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + var dynamicMapperEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a dynamic mapper hotel"); + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "Tags", new string[] { "generic" } }, - { "ParkingIncluded", false }, - { "LastRenovationDate", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, - { "Rating", 3.6d } - }, - Vectors = - { - { "DescriptionEmbedding", genericMapperEmbedding } - } + ["HotelId"] = "DynamicMapper-1", + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["Tags"] = new string[] { "dynamic" }, + ["ParkingIncluded"] = false, + ["LastRenovationDate"] = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), + ["Rating"] = 3.6d, + + ["DescriptionEmbedding"] = dynamicMapperEmbedding }); - var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("DynamicMapper-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(baseSetGetResult); - Assert.Equal("Hotel 1", baseSetGetResult.Data["HotelName"]); - Assert.Equal("This is a great hotel", baseSetGetResult.Data["Description"]); - Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["Tags"]); - Assert.False((bool?)baseSetGetResult.Data["ParkingIncluded"]); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult.Data["LastRenovationDate"]); - Assert.Equal(3.6d, baseSetGetResult.Data["Rating"]); - Assert.Equal(baseSetEmbedding, (ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!); + Assert.Equal("Hotel 1", baseSetGetResult["HotelName"]); + Assert.Equal("This is a great hotel", baseSetGetResult["Description"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult["Tags"]); + Assert.False((bool?)baseSetGetResult["ParkingIncluded"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult["LastRenovationDate"]); + Assert.Equal(3.6d, baseSetGetResult["Rating"]); + Assert.Equal(baseSetEmbedding, (ReadOnlyMemory)baseSetGetResult["DescriptionEmbedding"]!); Assert.NotNull(upsertResult); - Assert.Equal("GenericMapper-1", upsertResult); + Assert.Equal("DynamicMapper-1", upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new[] { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["LastRenovationDate"]); - Assert.Equal(3.6d, localGetResult.Data["Rating"]); - Assert.Equal(genericMapperEmbedding, (ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new[] { "dynamic" }, localGetResult["Tags"]); + Assert.False((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult["LastRenovationDate"]); + Assert.Equal(3.6d, localGetResult["Rating"]); + Assert.Equal(dynamicMapperEmbedding, (ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!); } private async Task CreateTestHotelAsync(string hotelId) => new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 7466797a78d6..26a01f7bd38e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -25,7 +25,7 @@ public class AzureCosmosDBMongoDBVectorStoreRecordCollectionTests(AzureCosmosDBM public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); // Act var actual = await sut.CollectionExistsAsync(); @@ -38,7 +38,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act await sut.CreateCollectionAsync(); @@ -65,7 +65,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); var record = this.CreateTestHotel(HotelId); @@ -108,7 +108,7 @@ public async Task ItCanDeleteCollectionAsync() const string TempCollectionName = "temp-test"; await fixture.MongoDatabase.CreateCollectionAsync(TempCollectionName); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, TempCollectionName); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, TempCollectionName); Assert.True(await sut.CollectionExistsAsync()); @@ -124,7 +124,7 @@ public async Task ItCanGetAndDeleteRecordAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record = this.CreateTestHotel(HotelId); @@ -151,7 +151,7 @@ public async Task ItCanGetAndDeleteBatchAsync() const string HotelId2 = "22222222-2222-2222-2222-222222222222"; const string HotelId3 = "33333333-3333-3333-3333-333333333333"; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record1 = this.CreateTestHotel(HotelId1); var record2 = this.CreateTestHotel(HotelId2); @@ -180,7 +180,7 @@ public async Task ItCanUpsertRecordAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record = this.CreateTestHotel(HotelId); @@ -218,7 +218,7 @@ public async Task UpsertWithModelWorksCorrectlyAsync() var model = new TestModel { Id = "key", HotelName = "Test Name" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( fixture.MongoDatabase, fixture.TestCollection, new() { VectorStoreRecordDefinition = definition }); @@ -241,7 +241,7 @@ public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() // Arrange var model = new VectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -270,7 +270,7 @@ public async Task UpsertWithBsonModelWorksCorrectlyAsync() var model = new BsonTestModel { Id = "key", HotelName = "Test Name" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( fixture.MongoDatabase, fixture.TestCollection, new() { VectorStoreRecordDefinition = definition }); @@ -293,7 +293,7 @@ public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() // Arrange var model = new BsonVectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -313,7 +313,7 @@ public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() // Arrange var model = new BsonVectorStoreWithNameTestModel { Id = "key", HotelName = "Test Name" }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -336,7 +336,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearch"); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearch"); await sut.CreateCollectionIfNotExistsAsync(); @@ -367,7 +367,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -399,7 +399,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -423,48 +423,45 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions> + var options = new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition }; - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection>(fixture.MongoDatabase, fixture.TestCollection, options); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection>(fixture.MongoDatabase, fixture.TestCollection, options); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "Tags", new string[] { "generic" } }, - { "ParkingIncluded", false }, - { "Timestamp", new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime() }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = "DynamicMapper-1", + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["Tags"] = new string[] { "dynamic" }, + ["ParkingIncluded"] = false, + ["Timestamp"] = new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); - var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("DynamicMapper-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(upsertResult); - Assert.Equal("GenericMapper-1", upsertResult); + Assert.Equal("DynamicMapper-1", upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new[] { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), localGetResult.Data["Timestamp"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new[] { "dynamic" }, localGetResult["Tags"]); + Assert.False((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), localGetResult["Timestamp"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } #region private diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index c66afc788e59..91d3daf3cc4c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -13,10 +13,11 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; +#pragma warning disable CA1859 // Use concrete types when possible for improved performance #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Integration tests for class. +/// Integration tests for class. /// [Collection("AzureCosmosDBNoSQLVectorStoreCollection")] public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) @@ -27,7 +28,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests(AzureCosm public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "test-create-collection"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "test-create-collection"); // Act await sut.CreateCollectionAsync(); @@ -42,7 +43,7 @@ public async Task ItCanCreateCollectionAsync() public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); if (expectedExists) { @@ -75,7 +76,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo VectorStoreRecordDefinition = useRecordDefinition ? this.GetTestHotelRecordDefinition() : null }; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); var record = this.CreateTestHotel(HotelId); @@ -118,7 +119,7 @@ public async Task ItCanDeleteCollectionAsync() const string TempCollectionName = "test-delete-collection"; await fixture.Database!.CreateContainerAsync(new ContainerProperties(TempCollectionName, "/id")); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, TempCollectionName); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, TempCollectionName); Assert.True(await sut.CollectionExistsAsync()); @@ -137,7 +138,7 @@ public async Task ItCanGetAndDeleteRecordAsync(string collectionName, IndexingMo { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( fixture.Database!, collectionName, new() { IndexingMode = indexingMode, Automatic = indexingMode != IndexingMode.None }); @@ -169,7 +170,7 @@ public async Task ItCanGetAndDeleteRecordWithPartitionKeyAsync() const string HotelName = "Test Hotel Name"; IVectorStoreRecordCollection sut = - new AzureCosmosDBNoSQLVectorStoreRecordCollection( + new AzureCosmosDBNoSQLVectorStoreRecordCollection( fixture.Database!, "delete-with-partition-key", new() { PartitionKeyPropertyName = "HotelName" }); @@ -204,7 +205,7 @@ public async Task ItCanGetAndDeleteBatchAsync() const string HotelId2 = "22222222-2222-2222-2222-222222222222"; const string HotelId3 = "33333333-3333-3333-3333-333333333333"; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "get-and-delete-batch"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "get-and-delete-batch"); await sut.CreateCollectionAsync(); @@ -235,7 +236,7 @@ public async Task ItCanUpsertRecordAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "upsert-record"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "upsert-record"); await sut.CreateCollectionAsync(); @@ -269,7 +270,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-default"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-default"); await sut.CreateCollectionIfNotExistsAsync(); @@ -300,7 +301,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-with-offset"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-with-offset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -333,7 +334,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-with-filter"); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "vector-search-with-filter"); await sut.CreateCollectionIfNotExistsAsync(); @@ -353,35 +354,32 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var options = new AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions> + var options = new AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = this.GetTestHotelRecordDefinition() }; - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection>(fixture.Database!, "generic-mapper", options); + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection>(fixture.Database!, "dynamic-mapper", options); await sut.CreateCollectionAsync(); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "Tags", new List { "generic" } }, - { "parking_is_included", false }, - { "Timestamp", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = HotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["Tags"] = new List { "dynamic" }, + ["parking_is_included"] = false, + ["Timestamp"] = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); @@ -391,13 +389,13 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() Assert.Equal(HotelId, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new List { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["parking_is_included"]); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["Timestamp"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new List { "dynamic" }, localGetResult["Tags"]); + Assert.False((bool?)localGetResult["parking_is_included"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult["Timestamp"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } public static TheoryData> VectorizedSearchWithFilterData => new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 8dc1698f1efd..9b2e339ac4fb 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -26,7 +26,7 @@ public class MongoDBVectorStoreRecordCollectionTests(MongoDBVectorStoreFixture f public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); // Act var actual = await sut.CollectionExistsAsync(); @@ -39,7 +39,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act await sut.CreateCollectionAsync(); @@ -66,7 +66,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); var record = this.CreateTestHotel(HotelId); @@ -109,7 +109,7 @@ public async Task ItCanDeleteCollectionAsync() const string TempCollectionName = "temp-test"; await fixture.MongoDatabase.CreateCollectionAsync(TempCollectionName); - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, TempCollectionName); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, TempCollectionName); Assert.True(await sut.CollectionExistsAsync()); @@ -125,7 +125,7 @@ public async Task ItCanGetAndDeleteRecordAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record = this.CreateTestHotel(HotelId); @@ -152,7 +152,7 @@ public async Task ItCanGetAndDeleteBatchAsync() const string HotelId2 = "22222222-2222-2222-2222-222222222222"; const string HotelId3 = "33333333-3333-3333-3333-333333333333"; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record1 = this.CreateTestHotel(HotelId1); var record2 = this.CreateTestHotel(HotelId2); @@ -181,7 +181,7 @@ public async Task ItCanUpsertRecordAsync() { // Arrange const string HotelId = "55555555-5555-5555-5555-555555555555"; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); var record = this.CreateTestHotel(HotelId); @@ -219,7 +219,7 @@ public async Task UpsertWithModelWorksCorrectlyAsync() var model = new TestModel { Id = "key", HotelName = "Test Name" }; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( fixture.MongoDatabase, fixture.TestCollection, new() { VectorStoreRecordDefinition = definition }); @@ -242,7 +242,7 @@ public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() // Arrange var model = new VectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -271,7 +271,7 @@ public async Task UpsertWithBsonModelWorksCorrectlyAsync() var model = new BsonTestModel { Id = "key", HotelName = "Test Name" }; - var sut = new MongoDBVectorStoreRecordCollection( + var sut = new MongoDBVectorStoreRecordCollection( fixture.MongoDatabase, fixture.TestCollection, new() { VectorStoreRecordDefinition = definition }); @@ -294,7 +294,7 @@ public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() // Arrange var model = new BsonVectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -314,7 +314,7 @@ public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() // Arrange var model = new BsonVectorStoreWithNameTestModel { Id = "key", HotelName = "Test Name" }; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); // Act var upsertResult = await sut.UpsertAsync(model); @@ -337,7 +337,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearch"); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearch"); await sut.CreateCollectionIfNotExistsAsync(); @@ -368,7 +368,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -400,7 +400,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() var hotel3 = this.CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "TestVectorizedSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -424,48 +424,45 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new MongoDBVectorStoreRecordCollectionOptions> + var options = new MongoDBVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition }; - var sut = new MongoDBVectorStoreRecordCollection>(fixture.MongoDatabase, fixture.TestCollection, options); + var sut = new MongoDBVectorStoreRecordCollection>(fixture.MongoDatabase, fixture.TestCollection, options); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "Tags", new string[] { "generic" } }, - { "ParkingIncluded", false }, - { "Timestamp", new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime() }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = "DynamicMapper-1", + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["Tags"] = new string[] { "dynamic" }, + ["ParkingIncluded"] = false, + ["Timestamp"] = new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); - var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("Dynamic-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(upsertResult); - Assert.Equal("GenericMapper-1", upsertResult); + Assert.Equal("DynamicMapper-1", upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new[] { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), localGetResult.Data["Timestamp"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new[] { "dynamic" }, localGetResult["Tags"]); + Assert.False((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), localGetResult["Timestamp"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } #region private diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 6077f6605be0..233e39b22a23 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -281,31 +281,28 @@ public async Task ItCanReadManuallyInsertedRecordAsync() } [Fact] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { const int HotelId = 5; - var sut = fixture.GetCollection>("GenericMapperWithNumericKey", GetVectorStoreRecordDefinition()); + var sut = fixture.GetCollection>("DynamicMapperWithNumericKey", GetVectorStoreRecordDefinition()); await sut.CreateCollectionAsync(); var record = new PostgresHotel { HotelId = (int)HotelId, HotelName = "Hotel 1", HotelCode = 1, ParkingIncluded = true, HotelRating = 4.5f, Tags = ["tag1", "tag2"] }; // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "HotelCode", 1 }, - { "ParkingIncluded", true }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = HotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["HotelCode"] = 1, + ["ParkingIncluded"] = true, + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); @@ -314,35 +311,32 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() Assert.Equal(HotelId, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.True((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal([30f, 31f, 32f, 33f], ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.True((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal([30f, 31f, 32f, 33f], ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); // Act - update with null embeddings // Act - var upsertResult2 = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + var upsertResult2 = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "HotelCode", 1 }, - { "ParkingIncluded", true }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", null } - } + ["HotelId"] = HotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["HotelCode"] = 1, + ["ParkingIncluded"] = true, + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = null }); var localGetResult2 = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(localGetResult2); - Assert.Null(localGetResult2.Vectors["DescriptionEmbedding"]); + Assert.Null(localGetResult2["DescriptionEmbedding"]); } [Theory] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/CommonQdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/CommonQdrantVectorStoreRecordCollectionTests.cs index 3bafffc6a3bb..ebdeb14f4d02 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/CommonQdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/CommonQdrantVectorStoreRecordCollectionTests.cs @@ -21,7 +21,7 @@ public class CommonQdrantVectorStoreRecordCollectionTests(QdrantVectorStoreFixtu protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) { - return new QdrantVectorStoreRecordCollection(fixture.QdrantClient, recordCollectionName, new() + return new QdrantVectorStoreRecordCollection(fixture.QdrantClient, recordCollectionName, new() { HasNamedVectors = true, VectorStoreRecordDefinition = vectorStoreRecordDefinition diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs index fcf164bfc449..716432ba63f1 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs @@ -30,7 +30,7 @@ public override Task CreateTextSearchAsync() HasNamedVectors = true, VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition, }; - var vectorSearch = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "namedVectorsHotels", options); + var vectorSearch = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "namedVectorsHotels", options); var stringMapper = new HotelInfoTextSearchStringMapper(); var resultMapper = new HotelInfoTextSearchResultMapper(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 6b98bf10eb03..d805c48f530f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -15,10 +15,11 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Qdrant; +#pragma warning disable CA1859 // Use concrete types when possible for improved performance #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Contains tests for the class. +/// Contains tests for the class. /// /// Used for logging. /// Qdrant setup and teardown. @@ -31,7 +32,7 @@ public sealed class QdrantVectorStoreRecordCollectionTests(ITestOutputHelper out public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange. - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName); // Act. var actual = await sut.CollectionExistsAsync(); @@ -57,7 +58,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, testCollectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, testCollectionName, options); var record = await this.CreateTestHotelAsync(30, fixture.EmbeddingGenerator); @@ -115,7 +116,7 @@ await fixture.QdrantClient.CreateCollectionAsync( tempCollectionName, new VectorParams { Size = 4, Distance = Distance.Cosine }); - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, tempCollectionName); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, tempCollectionName); // Act await sut.DeleteCollectionAsync(); @@ -137,7 +138,7 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); var record = await this.CreateTestHotelAsync(20, fixture.EmbeddingGenerator); @@ -168,7 +169,7 @@ public async Task ItCanUpsertAndRemoveDocumentWithGuidIdToVectorStoreAsync() { // Arrange. var options = new QdrantVectorStoreRecordCollectionOptions { HasNamedVectors = false }; - IVectorStoreRecordCollection sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorGuidIdHotels", options); + IVectorStoreRecordCollection sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorGuidIdHotels", options); var record = new HotelInfoWithGuidId { @@ -216,7 +217,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool useRecordDefinition, HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); // Act. var getResult = await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = withEmbeddings }); @@ -259,7 +260,7 @@ public async Task ItCanGetDocumentWithGuidIdFromVectorStoreAsync(bool useRecordD HasNamedVectors = false, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelWithGuidIdVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorGuidIdHotels", options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorGuidIdHotels", options); // Act. var getResult = await sut.GetAsync(Guid.Parse("11111111-1111-1111-1111-111111111111"), new GetRecordOptions { IncludeVectors = withEmbeddings }); @@ -286,7 +287,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() { // Arrange var options = new QdrantVectorStoreRecordCollectionOptions { HasNamedVectors = true }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "namedVectorsHotels", options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "namedVectorsHotels", options); // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. @@ -317,7 +318,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); await sut.UpsertAsync(await this.CreateTestHotelAsync(20, fixture.EmbeddingGenerator)); @@ -343,7 +344,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync(bool useRecordDef HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); await sut.UpsertAsync(await this.CreateTestHotelAsync(20, fixture.EmbeddingGenerator)); @@ -360,7 +361,7 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() { // Arrange var options = new QdrantVectorStoreRecordCollectionOptions { HasNamedVectors = false }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorHotels", options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorHotels", options); // Act & Assert Assert.Null(await sut.GetAsync(15, new GetRecordOptions { IncludeVectors = true })); @@ -371,7 +372,7 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() { // Arrange var options = new QdrantVectorStoreRecordCollectionOptions { PointStructCustomMapper = new FailingMapper() }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorHotels", options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorHotels", options); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true })); @@ -394,7 +395,7 @@ public async Task ItCanSearchWithFilterAsync(bool useRecordDefinition, string co HasNamedVectors = hasNamedVectors, VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); + var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, collectionName, options); // Act. var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); @@ -421,59 +422,56 @@ public async Task ItCanSearchWithFilterAsync(bool useRecordDefinition, string co } [Fact] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new QdrantVectorStoreRecordCollectionOptions> + var options = new QdrantVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition }; - var sut = new QdrantVectorStoreRecordCollection>(fixture.QdrantClient, "singleVectorHotels", options); + var sut = new QdrantVectorStoreRecordCollection>(fixture.QdrantClient, "singleVectorHotels", options); // Act var baseSetGetResult = await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true }); - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(40) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "HotelCode", 40 }, - { "ParkingIncluded", false }, - { "HotelRating", 3.6d }, - { "Tags", new string[] { "generic" } }, - { "Description", "This is a generic mapper hotel" }, - }, - Vectors = - { - { "DescriptionEmbedding", await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a generic mapper hotel") } - } + ["HotelId"] = 40, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["HotelCode"] = 40, + ["ParkingIncluded"] = false, + ["HotelRating"] = 3.6d, + ["Tags"] = new string[] { "dynamic" }, + ["Description"] = "This is a dynamic mapper hotel", + + ["DescriptionEmbedding"] = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a dynamic mapper hotel") }); var localGetResult = await sut.GetAsync(40, new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(baseSetGetResult); - Assert.Equal(11ul, baseSetGetResult.Key); - Assert.Equal("My Hotel 11", baseSetGetResult.Data["HotelName"]); - Assert.Equal(11, baseSetGetResult.Data["HotelCode"]); - Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); - Assert.Equal(4.5f, baseSetGetResult.Data["HotelRating"]); - Assert.Equal(new[] { "t11.1", "t11.2" }, ((List)baseSetGetResult.Data["Tags"]!).ToArray()); - Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); - Assert.NotNull(baseSetGetResult.Vectors["DescriptionEmbedding"]); - Assert.IsType>(baseSetGetResult.Vectors["DescriptionEmbedding"]); + Assert.Equal(11ul, baseSetGetResult["HotelId"]); + Assert.Equal("My Hotel 11", baseSetGetResult["HotelName"]); + Assert.Equal(11, baseSetGetResult["HotelCode"]); + Assert.True((bool)baseSetGetResult["ParkingIncluded"]!); + Assert.Equal(4.5f, baseSetGetResult["HotelRating"]); + Assert.Equal(new[] { "t11.1", "t11.2" }, ((List)baseSetGetResult["Tags"]!).ToArray()); + Assert.Equal("This is a great hotel.", baseSetGetResult["Description"]); + Assert.NotNull(baseSetGetResult["DescriptionEmbedding"]); + Assert.IsType>(baseSetGetResult["DescriptionEmbedding"]); Assert.Equal(40ul, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal(40ul, localGetResult.Key); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal(40, localGetResult.Data["HotelCode"]); - Assert.False((bool)localGetResult.Data["ParkingIncluded"]!); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { "generic" }, ((List)localGetResult.Data["Tags"]!).ToArray()); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.NotNull(localGetResult.Vectors["DescriptionEmbedding"]); - Assert.IsType>(localGetResult.Vectors["DescriptionEmbedding"]); + Assert.Equal(40ul, localGetResult["HotelId"]); + Assert.Equal("Ddynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal(40, localGetResult["HotelCode"]); + Assert.False((bool)localGetResult["ParkingIncluded"]!); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { "dynamic" }, ((List)localGetResult["Tags"]!).ToArray()); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.NotNull(localGetResult["DescriptionEmbedding"]); + Assert.IsType>(localGetResult["DescriptionEmbedding"]); } private async Task CreateTestHotelAsync(uint hotelId, ITextEmbeddingGenerationService embeddingGenerator) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreTests.cs index 39551054e4bb..a66f1d563b40 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreTests.cs @@ -20,7 +20,7 @@ public async Task ItPassesSettingsFromVectorStoreToCollectionAsync() var collectionFromVS = sut.GetCollection("SettingsPassedCollection"); await collectionFromVS.CreateCollectionIfNotExistsAsync(); - var directCollection = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "SettingsPassedCollection", new() { HasNamedVectors = true }); + var directCollection = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "SettingsPassedCollection", new() { HasNamedVectors = true }); await directCollection.UpsertAsync(new QdrantVectorStoreFixture.HotelInfo { HotelId = 1ul, diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisHashsetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisHashsetVectorStoreRecordCollectionTests.cs index bfdabddb041a..d16c63998b36 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisHashsetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisHashsetVectorStoreRecordCollectionTests.cs @@ -23,7 +23,7 @@ public class CommonRedisHashsetVectorStoreRecordCollectionTests(RedisVectorStore protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) { - return new RedisHashSetVectorStoreRecordCollection(fixture.Database, recordCollectionName + "hashset", new() + return new RedisHashSetVectorStoreRecordCollection(fixture.Database, recordCollectionName + "hashset", new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisJsonVectorStoreRecordCollectionTests.cs index ba32545c8373..2f79dcce7ef9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/CommonRedisJsonVectorStoreRecordCollectionTests.cs @@ -23,7 +23,7 @@ public class CommonRedisJsonVectorStoreRecordCollectionTests(RedisVectorStoreFix protected override IVectorStoreRecordCollection GetTargetRecordCollection(string recordCollectionName, VectorStoreRecordDefinition? vectorStoreRecordDefinition) { - return new RedisJsonVectorStoreRecordCollection(fixture.Database, recordCollectionName + "json", new() + return new RedisJsonVectorStoreRecordCollection(fixture.Database, recordCollectionName + "json", new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 2beb8c7f92ba..3464afa6fe58 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -16,7 +17,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Redis; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Contains tests for the class. +/// Contains tests for the class. /// /// Used for logging. /// Redis setup and teardown. @@ -34,7 +35,7 @@ public sealed class RedisHashSetVectorStoreRecordCollectionTests(ITestOutputHelp public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange. - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, collectionName); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, collectionName); // Act. var actual = await sut.CollectionExistsAsync(); @@ -58,7 +59,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.BasicVectorStoreRecordDefinition : null }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, testCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, testCollectionName, options); // Act await sut.CreateCollectionAsync(); @@ -113,7 +114,7 @@ public async Task ItCanDeleteCollectionAsync() createParams.AddPrefix(tempCollectionName); await fixture.Database.FT().CreateAsync(tempCollectionName, createParams, schema); - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, tempCollectionName); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, tempCollectionName); // Act await sut.DeleteCollectionAsync(); @@ -133,7 +134,7 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.BasicVectorStoreRecordDefinition : null }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); var record = CreateTestHotel("HUpsert-2", 2); // Act. @@ -166,7 +167,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.BasicVectorStoreRecordDefinition : null }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. var results = sut.UpsertAsync( @@ -205,7 +206,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool includeVectors, bool PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.BasicVectorStoreRecordDefinition : null }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. var getResult = await sut.GetAsync("HBaseSet-1", new GetRecordOptions { IncludeVectors = includeVectors }); @@ -235,7 +236,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. @@ -264,7 +265,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.BasicVectorStoreRecordDefinition : null }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); var record = new RedisBasicFloat32Hotel { HotelId = "HRemove-1", @@ -290,7 +291,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); await sut.UpsertAsync(CreateTestHotel("HRemoveMany-1", 1)); await sut.UpsertAsync(CreateTestHotel("HRemoveMany-2", 2)); await sut.UpsertAsync(CreateTestHotel("HRemoveMany-3", 3)); @@ -312,7 +313,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType, { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); var vector = new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }); var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelCode", 1) : new VectorSearchFilter().EqualTo("HotelName", "My Hotel 1"); @@ -352,7 +353,7 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName + "TopSkip", options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName + "TopSkip", options); await sut.CreateCollectionIfNotExistsAsync(); await sut.UpsertAsync(new RedisBasicFloat32Hotel { HotelId = "HTopSkip_1", HotelName = "1", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 1.0f]) }); await sut.UpsertAsync(new RedisBasicFloat32Hotel { HotelId = "HTopSkip_2", HotelName = "2", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 2.0f]) }); @@ -383,7 +384,7 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName + "Float64", options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName + "Float64", options); await sut.CreateCollectionIfNotExistsAsync(); await sut.UpsertAsync(new RedisBasicFloat64Hotel { HotelId = "HFloat64_1", HotelName = "1", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0d, 1.1d, 1.2d, 1.3d]) }); await sut.UpsertAsync(new RedisBasicFloat64Hotel { HotelId = "HFloat64_2", HotelName = "2", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([2.0d, 2.1d, 2.2d, 2.3d]) }); @@ -422,7 +423,7 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() { // Arrange var options = new RedisHashSetVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act & Assert Assert.Null(await sut.GetAsync("HBaseSet-5", new GetRecordOptions { IncludeVectors = true })); @@ -437,63 +438,60 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() PrefixCollectionNameToKeyNames = true, HashEntriesCustomMapper = new FailingMapper() }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync("HBaseSet-1", new GetRecordOptions { IncludeVectors = true })); } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new RedisHashSetVectorStoreRecordCollectionOptions> + var options = new RedisHashSetVectorStoreRecordCollectionOptions> { PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = fixture.BasicVectorStoreRecordDefinition }; - var sut = new RedisHashSetVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); + var sut = new RedisHashSetVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); // Act var baseSetGetResult = await sut.GetAsync("HBaseSet-1", new GetRecordOptions { IncludeVectors = true }); - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("HGenericMapper-1") + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "HotelCode", 40 }, - { "ParkingIncluded", true }, - { "Rating", 3.6d }, - { "Description", "This is a generic mapper hotel" }, - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } - } + ["HotelId"] = "HDynamicMapper-1", + + ["HotelName"] = "Dynamic Mapper Hotel", + ["HotelCode"] = 40, + ["ParkingIncluded"] = true, + ["Rating"] = 3.6d, + ["Description"] = "This is a dynamic mapper hotel", + + ["DescriptionEmbedding"] = new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) }); - var localGetResult = await sut.GetAsync("HGenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("HDynamicMapper-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(baseSetGetResult); - Assert.Equal("HBaseSet-1", baseSetGetResult.Key); - Assert.Equal("My Hotel 1", baseSetGetResult.Data["HotelName"]); - Assert.Equal(1, baseSetGetResult.Data["HotelCode"]); - Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); - Assert.Equal(3.6d, baseSetGetResult.Data["Rating"]); - Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); - Assert.NotNull(baseSetGetResult.Vectors["DescriptionEmbedding"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("HBaseSet-1", baseSetGetResult["HotelId"]); + Assert.Equal("My Hotel 1", baseSetGetResult["HotelName"]); + Assert.Equal(1, baseSetGetResult["HotelCode"]); + Assert.True((bool)baseSetGetResult["ParkingIncluded"]!); + Assert.Equal(3.6d, baseSetGetResult["Rating"]); + Assert.Equal("This is a great hotel.", baseSetGetResult["Description"]); + Assert.NotNull(baseSetGetResult["DescriptionEmbedding"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult["DescriptionEmbedding"]!).ToArray()); Assert.Equal("HGenericMapper-1", upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("HGenericMapper-1", localGetResult.Key); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal(40, localGetResult.Data["HotelCode"]); - Assert.True((bool)localGetResult.Data["ParkingIncluded"]!); - Assert.Equal(3.6d, localGetResult.Data["Rating"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("HDynamicMapper-1", localGetResult["HotelId"]); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal(40, localGetResult["HotelCode"]); + Assert.True((bool)localGetResult["ParkingIncluded"]!); + Assert.Equal(3.6d, localGetResult["Rating"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } private static RedisBasicFloat32Hotel CreateTestHotel(string hotelId, int hotelCode) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 0939866d218c..0c63d5aac82c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; using System.Text.Json.Nodes; using System.Threading.Tasks; @@ -16,7 +17,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Redis; #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Contains tests for the class. +/// Contains tests for the class. /// /// Used for logging. /// Redis setup and teardown. @@ -24,7 +25,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Redis; public sealed class RedisJsonVectorStoreRecordCollectionTests(ITestOutputHelper output, RedisVectorStoreFixture fixture) { // If null, all tests will be enabled - private const string SkipReason = "Redis tests fail intermittently on build server"; + private const string SkipReason = null; private const string TestCollectionName = "jsonhotels"; @@ -34,7 +35,7 @@ public sealed class RedisJsonVectorStoreRecordCollectionTests(ITestOutputHelper public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) { // Arrange. - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, collectionName); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, collectionName); // Act. var actual = await sut.CollectionExistsAsync(); @@ -58,7 +59,7 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, testCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, testCollectionName, options); // Act await sut.CreateCollectionAsync(); @@ -122,7 +123,7 @@ public async Task ItCanDeleteCollectionAsync() createParams.AddPrefix(tempCollectionName); await fixture.Database.FT().CreateAsync(tempCollectionName, createParams, schema); - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, tempCollectionName); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, tempCollectionName); // Act await sut.DeleteCollectionAsync(); @@ -142,7 +143,7 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); RedisHotel record = CreateTestHotel("Upsert-2", 2); // Act. @@ -180,7 +181,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. var results = sut.UpsertAsync( @@ -219,7 +220,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool includeVectors, bool PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act. var getResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = includeVectors }); @@ -253,7 +254,7 @@ public async Task ItCanGetManyDocumentsFromVectorStoreAsync() { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act // Also include one non-existing key to test that the operation does not fail for these and returns only the found ones. @@ -276,7 +277,7 @@ public async Task ItFailsToGetDocumentsWithInvalidSchemaAsync() { // Arrange. var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act & Assert. await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-4-Invalid", new GetRecordOptions { IncludeVectors = true })); @@ -293,7 +294,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); var address = new RedisHotelAddress { City = "Seattle", Country = "USA" }; var record = new RedisHotel { @@ -320,7 +321,7 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); await sut.UpsertAsync(CreateTestHotel("RemoveMany-1", 1)); await sut.UpsertAsync(CreateTestHotel("RemoveMany-2", 2)); await sut.UpsertAsync(CreateTestHotel("RemoveMany-3", 3)); @@ -342,7 +343,7 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType) { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); var vector = new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }); var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelCode", 1) : new VectorSearchFilter().AnyTagEqualTo("Tags", "pool"); @@ -376,7 +377,7 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName + "TopSkip", options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName + "TopSkip", options); await sut.CreateCollectionIfNotExistsAsync(); await sut.UpsertAsync(new RedisBasicFloat32Hotel { HotelId = "TopSkip_1", HotelName = "1", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 1.0f]) }); await sut.UpsertAsync(new RedisBasicFloat32Hotel { HotelId = "TopSkip_2", HotelName = "2", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 2.0f]) }); @@ -407,7 +408,7 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName + "Float64", options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName + "Float64", options); await sut.CreateCollectionIfNotExistsAsync(); await sut.UpsertAsync(new RedisBasicFloat64Hotel { HotelId = "Float64_1", HotelName = "1", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([1.0d, 1.1d, 1.2d, 1.3d]) }); await sut.UpsertAsync(new RedisBasicFloat64Hotel { HotelId = "Float64_2", HotelName = "2", Description = "Nice hotel", DescriptionEmbedding = new ReadOnlyMemory([2.0d, 2.1d, 2.2d, 2.3d]) }); @@ -442,7 +443,7 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() { // Arrange var options = new RedisJsonVectorStoreRecordCollectionOptions { PrefixCollectionNameToKeyNames = true }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act & Assert Assert.Null(await sut.GetAsync("BaseSet-5", new GetRecordOptions { IncludeVectors = true })); @@ -457,75 +458,71 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() PrefixCollectionNameToKeyNames = true, JsonNodeCustomMapper = new FailingMapper() }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); // Act & Assert await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange - var options = new RedisJsonVectorStoreRecordCollectionOptions> + var options = new RedisJsonVectorStoreRecordCollectionOptions> { PrefixCollectionNameToKeyNames = true, VectorStoreRecordDefinition = fixture.VectorStoreRecordDefinition }; - var sut = new RedisJsonVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); + var sut = new RedisJsonVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); // Act var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "HotelCode", 1 }, - { "Tags", new[] { "generic 1", "generic 2" } }, - { "FTSTags", new[] { "generic 1", "generic 2" } }, - { "ParkingIncluded", true }, - { "LastRenovationDate", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, - { "Rating", 3.6 }, - { "Address", new RedisHotelAddress { City = "Seattle", Country = "USA" } }, - { "Description", "This is a generic mapper hotel" }, - { "DescriptionEmbedding", new[] { 30f, 31f, 32f, 33f } } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } - } + ["HotelId"] = "DynamicMapper-1", + + ["HotelName"] = "Dynamic Mapper Hotel", + ["HotelCode"] = 1, + ["Tags"] = new[] { "dynamic 1", "dynamic 2" }, + ["FTSTags"] = new[] { "dynamic 1", "dynamic 2" }, + ["ParkingIncluded"] = true, + ["LastRenovationDate"] = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), + ["Rating"] = 3.6, + ["Address"] = new RedisHotelAddress { City = "Seattle", Country = "USA" }, + ["Description"] = "This is a dynamic mapper hotel", + + ["DescriptionEmbedding"] = new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) }); - var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("DynamicMapper-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(baseSetGetResult); - Assert.Equal("BaseSet-1", baseSetGetResult.Key); - Assert.Equal("My Hotel 1", baseSetGetResult.Data["HotelName"]); - Assert.Equal(1, baseSetGetResult.Data["HotelCode"]); - Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["Tags"]); - Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["FTSTags"]); - Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult.Data["LastRenovationDate"]); - Assert.Equal(3.6, baseSetGetResult.Data["Rating"]); - Assert.Equal("Seattle", ((RedisHotelAddress)baseSetGetResult.Data["Address"]!).City); - Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); - - Assert.Equal("GenericMapper-1", upsertResult); + Assert.Equal("BaseSet-1", baseSetGetResult["HotelId"]); + Assert.Equal("My Hotel 1", baseSetGetResult["HotelName"]); + Assert.Equal(1, baseSetGetResult["HotelCode"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult["Tags"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult["FTSTags"]); + Assert.True((bool)baseSetGetResult["ParkingIncluded"]!); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult["LastRenovationDate"]); + Assert.Equal(3.6, baseSetGetResult["Rating"]); + Assert.Equal("Seattle", ((RedisHotelAddress)baseSetGetResult["Address"]!).City); + Assert.Equal("This is a great hotel.", baseSetGetResult["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult["DescriptionEmbedding"]!).ToArray()); + + Assert.Equal("DynamicMapper-1", upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("GenericMapper-1", localGetResult.Key); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal(1, localGetResult.Data["HotelCode"]); - Assert.Equal(new[] { "generic 1", "generic 2" }, localGetResult.Data["Tags"]); - Assert.Equal(new[] { "generic 1", "generic 2" }, localGetResult.Data["FTSTags"]); - Assert.True((bool)localGetResult.Data["ParkingIncluded"]!); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["LastRenovationDate"]); - Assert.Equal(3.6d, localGetResult.Data["Rating"]); - Assert.Equal("Seattle", ((RedisHotelAddress)localGetResult.Data["Address"]!).City); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("DynamicMapper-1", localGetResult["HotelId"]); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal(1, localGetResult["HotelCode"]); + Assert.Equal(new[] { "dynamic 1", "dynamic 2" }, localGetResult["Tags"]); + Assert.Equal(new[] { "dynamic 1", "dynamic 2" }, localGetResult["FTSTags"]); + Assert.True((bool)localGetResult["ParkingIncluded"]!); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult["LastRenovationDate"]); + Assert.Equal(3.6d, localGetResult["Rating"]); + Assert.Equal("Seattle", ((RedisHotelAddress)localGetResult["Address"]!).City); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } private static RedisHotel CreateTestHotel(string hotelId, int hotelCode) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs index c60669b842ee..20ace0af55f8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs @@ -132,14 +132,14 @@ public async Task InitializeAsync() DescriptionEmbedding = embedding, Tags = new[] { "pool", "air conditioning", "concierge" }, FTSTags = new[] { "pool", "air conditioning", "concierge" }, - parking_is_included = true, + ParkingIncluded = true, LastRenovationDate = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), Rating = 3.6, Address = address }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-2", "$", new { HotelName = "My Hotel 2", HotelCode = 2, Description = "This is a great hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-3", "$", new { HotelName = "My Hotel 3", HotelCode = 3, Description = "This is a great hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-4-Invalid", "$", new { HotelId = "AnotherId", HotelName = "My Invalid Hotel", HotelCode = 4, Description = "This is an invalid hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-2", "$", new { HotelName = "My Hotel 2", HotelCode = 2, Description = "This is a great hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-3", "$", new { HotelName = "My Hotel 3", HotelCode = 3, Description = "This is a great hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-4-Invalid", "$", new { HotelId = "AnotherId", HotelName = "My Invalid Hotel", HotelCode = 4, Description = "This is an invalid hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); // Add hashset test data. await this.Database.HashSetAsync("hashhotels:HBaseSet-1", new HashEntry[] @@ -148,7 +148,7 @@ public async Task InitializeAsync() new("HotelCode", 1), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("parking_is_included", true), + new("ParkingIncluded", true), new("Rating", 3.6) }); await this.Database.HashSetAsync("hashhotels:HBaseSet-2", new HashEntry[] @@ -157,7 +157,7 @@ public async Task InitializeAsync() new("HotelCode", 2), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("parking_is_included", false), + new("ParkingIncluded", false), }); await this.Database.HashSetAsync("hashhotels:HBaseSet-3", new HashEntry[] { @@ -165,7 +165,7 @@ public async Task InitializeAsync() new("HotelCode", 3), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("parking_is_included", false), + new("ParkingIncluded", false), }); await this.Database.HashSetAsync("hashhotels:HBaseSet-4-Invalid", new HashEntry[] { @@ -174,7 +174,7 @@ public async Task InitializeAsync() new("HotelCode", 4), new("Description", "This is an invalid hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("parking_is_included", false), + new("ParkingIncluded", false), }); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs index 7c6badf59820..f64676f3fe10 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs @@ -42,11 +42,11 @@ public void AddVectorStoreRecordCollectionWithStringKeyAndSqliteConnectionRegist // Assert var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } [Fact(Skip = SkipReason)] @@ -60,11 +60,11 @@ public void AddVectorStoreRecordCollectionWithNumericKeyAndSqliteConnectionRegis // Assert var collection = serviceProvider.GetRequiredService>(); Assert.NotNull(collection); - Assert.IsType>(collection); + Assert.IsType>(collection); var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); - Assert.IsType>(vectorizedSearch); + Assert.IsType>(vectorizedSearch); } #region private diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs index 02dfae8cce1a..1a8128a8cf03 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreFixture.cs @@ -12,11 +12,13 @@ public class SqliteVectorStoreFixture : IDisposable public string ConnectionString => $"Data Source={this._databasePath}"; - public SqliteVectorStoreRecordCollection GetCollection( + public SqliteVectorStoreRecordCollection GetCollection( string collectionName, - SqliteVectorStoreRecordCollectionOptions? options = default) where TRecord : notnull + SqliteVectorStoreRecordCollectionOptions? options = default) + where TKey : notnull + where TRecord : notnull { - return new SqliteVectorStoreRecordCollection( + return new SqliteVectorStoreRecordCollection( this.ConnectionString, collectionName, options); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index 688c2967fd93..45686fe125f8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using System.Threading.Tasks; @@ -11,10 +12,11 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.Sqlite; +#pragma warning disable CA1859 // Use concrete types when possible for improved performance #pragma warning disable CS0618 // VectorSearchFilter is obsolete /// -/// Integration tests for class. +/// Integration tests for class. /// [Collection("SqliteVectorStoreCollection")] public sealed class SqliteVectorStoreRecordCollectionTests(SqliteVectorStoreFixture fixture) @@ -27,7 +29,7 @@ public sealed class SqliteVectorStoreRecordCollectionTests(SqliteVectorStoreFixt public async Task CollectionExistsReturnsCollectionStateAsync(bool createCollection) { // Arrange - var sut = fixture.GetCollection>("CollectionExists"); + var sut = fixture.GetCollection>("CollectionExists"); if (createCollection) { @@ -45,7 +47,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(bool createCollect public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = fixture.GetCollection>("CreateCollection"); + var sut = fixture.GetCollection>("CreateCollection"); // Act await sut.CreateCollectionAsync(); @@ -58,7 +60,7 @@ public async Task ItCanCreateCollectionAsync() public async Task ItCanCreateCollectionForSupportedDistanceFunctionsAsync() { // Arrange - var sut = fixture.GetCollection("CreateCollectionForSupportedDistanceFunctions"); + var sut = fixture.GetCollection("CreateCollectionForSupportedDistanceFunctions"); // Act await sut.CreateCollectionAsync(); @@ -71,7 +73,7 @@ public async Task ItCanCreateCollectionForSupportedDistanceFunctionsAsync() public async Task ItCanDeleteCollectionAsync() { // Arrange - var sut = fixture.GetCollection>("DeleteCollection"); + var sut = fixture.GetCollection>("DeleteCollection"); await sut.CreateCollectionAsync(); @@ -102,7 +104,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo VectorStoreRecordDefinition = useRecordDefinition ? GetVectorStoreRecordDefinition() : null }; - var sut = fixture.GetCollection>("DeleteCollection", options); + var sut = fixture.GetCollection>("DeleteCollection", options); var record = CreateTestHotel(HotelId); @@ -141,7 +143,7 @@ public async Task ItCanGetAndDeleteRecordAsync() { // Arrange const ulong HotelId = 5; - var sut = fixture.GetCollection>("DeleteRecord"); + var sut = fixture.GetCollection>("DeleteRecord"); await sut.CreateCollectionAsync(); @@ -170,7 +172,7 @@ public async Task ItCanGetUpsertDeleteBatchWithNumericKeyAsync() const ulong HotelId2 = 2; const ulong HotelId3 = 3; - var sut = fixture.GetCollection>("GetUpsertDeleteBatchWithNumericKey"); + var sut = fixture.GetCollection>("GetUpsertDeleteBatchWithNumericKey"); await sut.CreateCollectionAsync(); @@ -204,7 +206,7 @@ public async Task ItCanGetUpsertDeleteBatchWithStringKeyAsync() const string HotelId2 = "22222222-2222-2222-2222-222222222222"; const string HotelId3 = "33333333-3333-3333-3333-333333333333"; - var sut = fixture.GetCollection>("GetUpsertDeleteBatchWithStringKey") as IVectorStoreRecordCollection>; + var sut = fixture.GetCollection>("GetUpsertDeleteBatchWithStringKey") as IVectorStoreRecordCollection>; await sut.CreateCollectionAsync(); @@ -240,7 +242,7 @@ public async Task ItCanGetExistingRecordAsync(bool includeVectors) var collectionName = $"Collection{collectionNamePostfix}"; const ulong HotelId = 5; - var sut = fixture.GetCollection>(collectionName); + var sut = fixture.GetCollection>(collectionName); await sut.CreateCollectionAsync(); @@ -306,7 +308,7 @@ public async Task ItCanUpsertExistingRecordAsync() { // Arrange const ulong HotelId = 5; - var sut = fixture.GetCollection>("UpsertRecord"); + var sut = fixture.GetCollection>("UpsertRecord"); await sut.CreateCollectionAsync(); @@ -346,7 +348,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include var hotel3 = CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = fixture.GetCollection>("VectorizedSearch"); + var sut = fixture.GetCollection>("VectorizedSearch"); await sut.CreateCollectionIfNotExistsAsync(); @@ -383,7 +385,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() var hotel3 = CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = fixture.GetCollection>("VectorizedSearchWithOffset"); + var sut = fixture.GetCollection>("VectorizedSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -416,7 +418,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() var hotel3 = CreateTestHotel(hotelId: "key3", embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = CreateTestHotel(hotelId: "key4", embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = fixture.GetCollection>("VectorizedSearchWithFilter"); + var sut = fixture.GetCollection>("VectorizedSearchWithFilter"); await sut.CreateCollectionIfNotExistsAsync(); @@ -441,35 +443,32 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperWithNumericKeyAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperWithNumericKeyAsync() { const ulong HotelId = 5; - var options = new SqliteVectorStoreRecordCollectionOptions> + var options = new SqliteVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = GetVectorStoreRecordDefinition() }; - var sut = fixture.GetCollection>("GenericMapperWithNumericKey", options); + var sut = fixture.GetCollection>("DynamicMapperWithNumericKey", options); await sut.CreateCollectionAsync(); var record = CreateTestHotel(HotelId); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "ParkingIncluded", true }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = HotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["ParkingIncluded"] = true, + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); @@ -478,44 +477,41 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperWithNumericKeyAsync Assert.Equal(HotelId, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.True((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.True((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } [Fact(Skip = SkipReason)] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperWithStringKeyAsync() + public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperWithStringKeyAsync() { const string HotelId = "key"; - var options = new SqliteVectorStoreRecordCollectionOptions> + var options = new SqliteVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = GetVectorStoreRecordDefinition() }; - var sut = fixture.GetCollection>("GenericMapperWithStringKey", options) - as IVectorStoreRecordCollection>; + var sut = fixture.GetCollection>("DynamicMapperWithStringKey", options) + as IVectorStoreRecordCollection>; await sut.CreateCollectionAsync(); var record = CreateTestHotel(HotelId); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "ParkingIncluded", true }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = HotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["ParkingIncluded"] = true, + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); @@ -524,11 +520,11 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperWithStringKeyAsync( Assert.Equal(HotelId, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.True((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.True((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } #region diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/CommonWeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/CommonWeaviateVectorStoreRecordCollectionTests.cs index 1398f7d48f27..be63bac6ea58 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/CommonWeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/CommonWeaviateVectorStoreRecordCollectionTests.cs @@ -28,7 +28,7 @@ protected override IVectorStoreRecordCollection GetTargetRecordCo var recordCollectionNameChars = recordCollectionName.ToCharArray(); recordCollectionNameChars[0] = char.ToUpperInvariant(recordCollectionNameChars[0]); - return new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, new string(recordCollectionNameChars), new() + return new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, new string(recordCollectionNameChars), new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 4e10d17566f6..8323ca93dedb 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -19,7 +19,7 @@ public sealed class WeaviateVectorStoreRecordCollectionTests(WeaviateVectorStore public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestCreateCollection"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestCreateCollection"); // Act await sut.CreateCollectionAsync(); @@ -34,7 +34,7 @@ public async Task ItCanCreateCollectionAsync() public async Task ItCanCheckIfCollectionExistsAsync(string collectionName, bool collectionExists) { // Arrange - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName); if (collectionExists) { @@ -63,7 +63,7 @@ public async Task ItCanUpsertAndGetRecordAsync(string collectionName, bool inclu VectorStoreRecordDefinition = useRecordDefinition ? this.GetTestHotelRecordDefinition() : null }; - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName, options); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName, options); var record = this.CreateTestHotel(hotelId); @@ -104,7 +104,7 @@ public async Task ItCanDeleteCollectionAsync() // Arrange const string CollectionName = "TestDeleteCollection"; - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, CollectionName); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, CollectionName); await sut.CreateCollectionAsync(); @@ -123,7 +123,7 @@ public async Task ItCanDeleteRecordAsync() // Arrange var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestDeleteRecord"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestDeleteRecord"); await sut.CreateCollectionAsync(); @@ -152,7 +152,7 @@ public async Task ItCanUpsertAndGetAndDeleteBatchAsync() var hotelId2 = new Guid("22222222-2222-2222-2222-222222222222"); var hotelId3 = new Guid("33333333-3333-3333-3333-333333333333"); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestBatch"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestBatch"); await sut.CreateCollectionAsync(); @@ -183,7 +183,7 @@ public async Task ItCanUpsertRecordAsync() { // Arrange var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestUpsert"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestUpsert"); await sut.CreateCollectionAsync(); @@ -219,7 +219,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include var hotel3 = this.CreateTestHotel(hotelId: new Guid("33333333-3333-3333-3333-333333333333"), embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: new Guid("44444444-4444-4444-4444-444444444444"), embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchDefault"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchDefault"); await sut.CreateCollectionIfNotExistsAsync(); @@ -257,7 +257,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() var hotel3 = this.CreateTestHotel(hotelId: new Guid("33333333-3333-3333-3333-333333333333"), embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: new Guid("44444444-4444-4444-4444-444444444444"), embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithOffset"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithOffset"); await sut.CreateCollectionIfNotExistsAsync(); @@ -290,7 +290,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc var hotel3 = this.CreateTestHotel(hotelId: new Guid("33333333-3333-3333-3333-333333333333"), embedding: new[] { 20f, 20f, 20f, 20f }); var hotel4 = this.CreateTestHotel(hotelId: new Guid("44444444-4444-4444-4444-444444444444"), embedding: new[] { -1000f, -1000f, -1000f, -1000f }); - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithFilter"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithFilter"); await sut.CreateCollectionIfNotExistsAsync(); @@ -334,7 +334,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT Timestamp = new DateTime(2024, 9, 22, 15, 59, 42) }; - var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithFilterAndDataTypes"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "VectorSearchWithFilterAndDataTypes"); await sut.CreateCollectionIfNotExistsAsync(); @@ -356,35 +356,32 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT } [Fact] - public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + public async Task ItCanUpsertAndRetrieveUsingDynamicMappingAsync() { // Arrange var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); - var options = new WeaviateVectorStoreRecordCollectionOptions> + var options = new WeaviateVectorStoreRecordCollectionOptions> { VectorStoreRecordDefinition = this.GetTestHotelRecordDefinition() }; - var sut = new WeaviateVectorStoreRecordCollection>(fixture.HttpClient!, "TestGenericMapper", options); + var sut = new WeaviateVectorStoreRecordCollection>(fixture.HttpClient!, "TestDynamicMapper", options); await sut.CreateCollectionAsync(); // Act - var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(hotelId) + var upsertResult = await sut.UpsertAsync(new Dictionary { - Data = - { - { "HotelName", "Generic Mapper Hotel" }, - { "Description", "This is a generic mapper hotel" }, - { "Tags", new List { "generic" } }, - { "ParkingIncluded", false }, - { "Timestamp", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, - { "HotelRating", 3.6f } - }, - Vectors = - { - { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } - } + ["HotelId"] = hotelId, + + ["HotelName"] = "Dynamic Mapper Hotel", + ["Description"] = "This is a dynamic mapper hotel", + ["Tags"] = new List { "dynamic" }, + ["ParkingIncluded"] = false, + ["Timestamp"] = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), + ["HotelRating"] = 3.6f, + + ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); var localGetResult = await sut.GetAsync(hotelId, new GetRecordOptions { IncludeVectors = true }); @@ -393,13 +390,13 @@ public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() Assert.Equal(hotelId, upsertResult); Assert.NotNull(localGetResult); - Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); - Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); - Assert.Equal(new List { "generic" }, localGetResult.Data["Tags"]); - Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); - Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["Timestamp"]); - Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); - Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("This is a dynamic mapper hotel", localGetResult["Description"]); + Assert.Equal(new List { "dynamic" }, localGetResult["Tags"]); + Assert.False((bool?)localGetResult["ParkingIncluded"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult["Timestamp"]); + Assert.Equal(3.6f, localGetResult["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!).ToArray()); } public static TheoryData> VectorizedSearchWithFilterData => new() @@ -471,7 +468,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), new VectorStoreRecordDataProperty("HotelName", typeof(string)), new VectorStoreRecordDataProperty("HotelCode", typeof(int)), - new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)), + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("HotelRating", typeof(float)), new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs similarity index 72% rename from dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs rename to dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs index 4ac5b274c819..131d7603d2ad 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBGenericDataModelMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs @@ -13,15 +13,15 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// -/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within MongoDB. +/// A mapper that maps between the dynamic data model and the model that the data is stored under, within MongoDB. /// [ExcludeFromCodeCoverage] #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class MongoDBGenericDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, BsonDocument> +internal sealed class MongoDBDynamicDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, BsonDocument> #pragma warning restore CS0618 { /// - public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + public BsonDocument MapFromDataToStorageModel(Dictionary dataModel) { Verify.NotNull(dataModel); @@ -33,20 +33,21 @@ public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel - public VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options) + public Dictionary MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options) { Verify.NotNull(storageModel); - // Create variables to store the response properties. - string? key = null; - var dataProperties = new Dictionary(); - var vectorProperties = new Dictionary(); + var result = new Dictionary(); // Loop through all known properties and map each from the storage model to the data model. foreach (var property in model.Properties) @@ -74,23 +72,22 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocumen switch (property) { case VectorStoreRecordKeyPropertyModel keyProperty: - if (storageModel.TryGetValue(MongoDBConstants.MongoReservedKeyPropertyName, out var keyValue)) - { - key = keyValue.AsString; - } + result[keyProperty.ModelName] = storageModel.TryGetValue(MongoDBConstants.MongoReservedKeyPropertyName, out var keyValue) + ? keyValue.AsString + : throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); continue; case VectorStoreRecordDataPropertyModel dataProperty: if (storageModel.TryGetValue(dataProperty.StorageName, out var dataValue)) { - dataProperties.Add(dataProperty.ModelName, GetDataPropertyValue(property.ModelName, property.Type, dataValue)); + result.Add(dataProperty.ModelName, GetDataPropertyValue(property.ModelName, property.Type, dataValue)); } continue; case VectorStoreRecordVectorPropertyModel vectorProperty: if (storageModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) { - vectorProperties.Add(vectorProperty.ModelName, GetVectorPropertyValue(property.ModelName, property.Type, vectorValue)); + result.Add(vectorProperty.ModelName, GetVectorPropertyValue(property.ModelName, property.Type, vectorValue)); } continue; @@ -99,12 +96,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocumen } } - if (key is null) - { - throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); - } - - return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + return result; } #region private @@ -135,7 +127,7 @@ public VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocumen Type t when t == typeof(DateTime?) => value.ToNullableUniversalTime(), Type t when typeof(IEnumerable).IsAssignableFrom(t) => value.AsBsonArray.Select( item => GetDataPropertyValue(propertyName, VectorStoreRecordPropertyVerification.GetCollectionElementType(t), item)), - _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in generic data model.") + _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in dynamic data model.") }; } @@ -152,7 +144,7 @@ Type t when typeof(IEnumerable).IsAssignableFrom(t) => value.AsBsonArray.Select( new ReadOnlyMemory(value.AsBsonArray.Select(item => (float)item.AsDouble).ToArray()), Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) => new ReadOnlyMemory(value.AsBsonArray.Select(item => item.AsDouble).ToArray()), - _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in generic data model.") + _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in dynamic data model.") }; } @@ -167,7 +159,7 @@ private static object GetVectorArray(object? vector) { ReadOnlyMemory memoryFloat => memoryFloat.ToArray(), ReadOnlyMemory memoryDouble => memoryDouble.ToArray(), - _ => throw new NotSupportedException($"Mapping for type {vector.GetType().FullName} is not supported in generic data model.") + _ => throw new NotSupportedException($"Mapping for type {vector.GetType().FullName} is not supported in dynamic data model.") }; } diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs index 58fe5d75d7ac..719b5f88fcf6 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs @@ -191,9 +191,6 @@ var enumerableType when GetGenericEnumerableInterface(enumerableType) is Type en return null; } - internal static bool IsGenericDataModel(Type recordType) - => recordType.IsGenericType && recordType.GetGenericTypeDefinition() == typeof(VectorStoreGenericDataModel<>); - #if NET6_0_OR_GREATER private static readonly ConstructorInfo s_objectGetDefaultConstructorInfo = typeof(object).GetConstructor(Type.EmptyTypes)!; #endif diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs index 50dfc677ad00..cce45cffd2ec 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicFilterTests.cs @@ -14,7 +14,9 @@ public class CosmosMongoBasicFilterTests(CosmosMongoBasicFilterTests.Fixture fix // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"]!)); #region Null checking diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs index 8a5bc6f70be5..6852f8efa085 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/Filter/CosmosMongoBasicQueryTests.cs @@ -14,7 +14,9 @@ public class CosmosMongoBasicQueryTests(CosmosMongoBasicQueryTests.Fixture fixtu // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"]!)); // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters public override Task Equal_with_null_reference_type() diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs index 5d2d776c4757..65f1bb2101bd 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq.Expressions; using CosmosNoSQLIntegrationTests.Support; -using Microsoft.Extensions.VectorData; using VectorDataSpecificationTests.Filter; using VectorDataSpecificationTests.Support; using Xunit; @@ -12,22 +10,6 @@ namespace CosmosNoSQLIntegrationTests.Filter; public class CosmosNoSQLBasicQueryTests(CosmosNoSQLBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { - // CosmosDB supports ordering by multiple fields only when a composite index is created up-front: - // https://learn.microsoft.com/en-us/azure/cosmos-db/index-policy#composite-indexes - // The index requires the order to be also provided up front (ASC or DESC), - // we don't expose API for such customization, so for now we just order by one field. - protected override List GetOrderedRecords(IQueryable filtered) - => filtered.OrderBy(r => r.Int2).ToList(); - - protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) - { - GetFilteredRecordOptions options = new(); - - options.OrderBy.Ascending(r => r.Int2); - - return await collection.GetAsync(filter, top, options).ToListAsync(); - } - public new class Fixture : BasicQueryTests.QueryFixture { public override TestStore TestStore => CosmosNoSqlTestStore.Instance; diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs new file mode 100644 index 000000000000..2b46a4f8a947 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace InMemoryIntegrationTests.CRUD; + +public class InMemoryBatchConformanceTests(InMemorySimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ + // InMemory always returns the vectors (IncludeVectors = false isn't respected) + public override async Task GetBatchAsync_WithoutVectors() + { + var expectedRecords = fixture.TestData.Take(2); // the last two records can get deleted by other tests + var ids = expectedRecords.Select(record => record.Id); + + var received = await fixture.Collection.GetAsync(ids, new() { IncludeVectors = false }).ToArrayAsync(); + + foreach (var record in expectedRecords) + { + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs new file mode 100644 index 000000000000..1250c2b29531 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace InMemoryIntegrationTests.CRUD; + +public class InMemoryDynamicRecordConformanceTests(InMemoryDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ + // InMemory always returns the vectors (IncludeVectors = false isn't respected) + public override async Task GetAsync_WithoutVectors() + { + var expectedRecord = fixture.TestData[0]; + + var received = await fixture.Collection.GetAsync( + (int)expectedRecord[DynamicDataModelFixture.KeyPropertyName]!, + new() { IncludeVectors = false }); + + AssertEquivalent(expectedRecord, received, includeVectors: true); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs new file mode 100644 index 000000000000..de7575c6f9e9 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace InMemoryIntegrationTests.CRUD; + +public class InMemoryRecordConformanceTests(InMemorySimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ + // InMemory always returns the vectors (IncludeVectors = false isn't respected) + public override async Task GetAsync_WithoutVectors() + { + var expectedRecord = fixture.TestData[0]; + var received = await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = false }); + + expectedRecord.AssertEqual(received, includeVectors: true); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs index 198178aae1a1..b103836840df 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicFilterTests.cs @@ -5,7 +5,7 @@ using VectorDataSpecificationTests.Support; using Xunit; -namespace PostgresIntegrationTests.Filter; +namespace InMemoryIntegrationTests.Filter; public class InMemoryBasicFilterTests(InMemoryBasicFilterTests.Fixture fixture) : BasicFilterTests(fixture), IClassFixture @@ -13,5 +13,13 @@ public class InMemoryBasicFilterTests(InMemoryBasicFilterTests.Fixture fixture) public new class Fixture : BasicFilterTests.Fixture { public override TestStore TestStore => InMemoryTestStore.Instance; + + // BaseFilterTests attempts to create two InMemoryVectorStoreRecordCollection with different .NET types: + // 1. One for strongly-typed mapping (TRecord=FilterRecord) + // 2. One for dynamic mapping (TRecord=Dictionary) + // Unfortunately, InMemoryVectorStore does not allow mapping the same collection name to different types; + // at the same time, it simply evaluates all filtering via .NET AsQueryable(), so actual test coverage + // isn't very important here. So we disable the dynamic tests. + public override bool TestDynamic => false; } } diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs index c7db1f873dca..8481cc14183d 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs @@ -13,5 +13,13 @@ public class InMemoryBasicQueryTests(InMemoryBasicQueryTests.Fixture fixture) public new class Fixture : BasicQueryTests.QueryFixture { public override TestStore TestStore => InMemoryTestStore.Instance; + + // BaseFilterTests attempts to create two InMemoryVectorStoreRecordCollection with different .NET types: + // 1. One for strongly-typed mapping (TRecord=FilterRecord) + // 2. One for dynamic mapping (TRecord=Dictionary) + // Unfortunately, InMemoryVectorStore does not allow mapping the same collection name to different types; + // at the same time, it simply evaluates all filtering via .NET AsQueryable(), so actual test coverage + // isn't very important here. So we disable the dynamic tests. + public override bool TestDynamic => false; } } diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj index f77fff8de939..1f5b8383e120 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryIntegrationTests.csproj @@ -10,17 +10,17 @@ - + runtime; build; native; contentfiles; analyzers; buildtransitive all - + - - + + diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryDynamicDataModelFixture.cs new file mode 100644 index 000000000000..07a05aad98f8 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryDynamicDataModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace InMemoryIntegrationTests.Support; + +public class InMemoryDynamicDataModelFixture : DynamicDataModelFixture +{ + public override TestStore TestStore => InMemoryTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemorySimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemorySimpleModelFixture.cs new file mode 100644 index 000000000000..8c32c4cc1306 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemorySimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace InMemoryIntegrationTests.Support; + +public class InMemorySimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => InMemoryTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs index 885c1503f5f7..da5ed5f46b8c 100644 --- a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicFilterTests.cs @@ -14,7 +14,9 @@ public class MongoDBBasicFilterTests(MongoDBBasicFilterTests.Fixture fixture) // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"]!)); #region Null checking diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs index c3893dee6a15..5e5d138a35b6 100644 --- a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/Filter/MongoDBBasicQueryTests.cs @@ -14,7 +14,9 @@ public class MongoDBBasicQueryTests(MongoDBBasicQueryTests.Fixture fixture) // Specialized MongoDB syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"]!)); // MongoDB currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters public override Task Equal_with_null_reference_type() diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..029a18b0f4c5 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeDynamicDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeDynamicDataModelConformanceTests(PineconeDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs deleted file mode 100644 index d18cdb99b38f..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeGenericDataModelConformanceTests.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using PineconeIntegrationTests.Support; -using VectorDataSpecificationTests.CRUD; -using Xunit; - -namespace PineconeIntegrationTests.CRUD; - -public class PineconeGenericDataModelConformanceTests(PineconeGenericDataModelFixture fixture) - : GenericDataModelConformanceTests(fixture), IClassFixture -{ -} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs index 095b0d03ebd0..3aa0fa1006a4 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs @@ -8,13 +8,17 @@ namespace PineconeIntegrationTests.Filter; +#pragma warning disable CS8605 // Unboxing a possibly null value. + public class PineconeBasicFilterTests(PineconeBasicFilterTests.Fixture fixture) : BasicFilterTests(fixture), IClassFixture { // Specialized Pinecone syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"])); #region Null checking diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs index 019435ce7ab5..d8dea0526b75 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs @@ -1,7 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq.Expressions; -using Microsoft.Extensions.VectorData; using PineconeIntegrationTests.Support; using VectorDataSpecificationTests.Filter; using VectorDataSpecificationTests.Support; @@ -10,17 +8,17 @@ namespace PineconeIntegrationTests.Filter; +#pragma warning disable CS8605 // Unboxing a possibly null value. + public class PineconeBasicQueryTests(PineconeBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { - protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) - // Pinecone doesn't support OrderBy in GetAsync, so we have to sort the results manually - => (await collection.GetAsync(filter, top).ToListAsync()).OrderBy(r => r.Int).ThenByDescending(r => r.String).ToList(); - // Specialized Pinecone syntax for NOT over Contains ($nin) [ConditionalFact] public virtual Task Not_over_Contains() - => this.TestFilterAsync(r => !new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => !new[] { 8, 10 }.Contains(r.Int), + r => !new[] { 8, 10 }.Contains((int)r["Int"])); // Pinecone currently doesn't support null checking ({ "Foo" : null }) in vector search pre-filters public override Task Equal_with_null_reference_type() diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeDynamicDataModelFixture.cs similarity index 78% rename from dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs rename to dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeDynamicDataModelFixture.cs index 91768966c9ff..5d3f5577fb56 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeGenericDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeDynamicDataModelFixture.cs @@ -4,7 +4,7 @@ namespace PineconeIntegrationTests.Support; -public class PineconeGenericDataModelFixture : GenericDataModelFixture +public class PineconeDynamicDataModelFixture : DynamicDataModelFixture { public override TestStore TestStore => PineconeTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..d23adfeb48cf --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresDynamicDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace PostgresIntegrationTests.CRUD; + +public class PostgresDynamicDataModelConformanceTests(PostgresDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs deleted file mode 100644 index 98451084af94..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresGenericDataModelConformanceTests.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using PostgresIntegrationTests.Support; -using VectorDataSpecificationTests.CRUD; -using Xunit; - -namespace PostgresIntegrationTests.CRUD; - -public class PostgresGenericDataModelConformanceTests(PostgresGenericDataModelFixture fixture) - : GenericDataModelConformanceTests(fixture), IClassFixture -{ -} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs index 955d920cbde6..223fd51b6ffe 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicFilterTests.cs @@ -8,6 +8,8 @@ namespace PostgresIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class PostgresBasicFilterTests(PostgresBasicFilterTests.Fixture fixture) : BasicFilterTests(fixture), IClassFixture { @@ -15,18 +17,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } [Obsolete("Legacy filter support")] diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs index 13d6e8d09579..a3dcaf2295ca 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Filter/PostgresBasicQueryTests.cs @@ -8,6 +8,8 @@ namespace PostgresIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class PostgresBasicQueryTests(PostgresBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { @@ -15,18 +17,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } public new class Fixture : BasicQueryTests.QueryFixture diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresGenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresDynamicDataModelFixture.cs similarity index 78% rename from dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresGenericDataModelFixture.cs rename to dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresDynamicDataModelFixture.cs index c5b9a96b405f..dc1698a280e1 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresGenericDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresDynamicDataModelFixture.cs @@ -4,7 +4,7 @@ namespace PostgresIntegrationTests.Support; -public class PostgresGenericDataModelFixture : GenericDataModelFixture +public class PostgresDynamicDataModelFixture : DynamicDataModelFixture { public override TestStore TestStore => PostgresTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs index f500a33abe92..ad7d6116cc21 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs @@ -1,7 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq.Expressions; -using Microsoft.Extensions.VectorData; using QdrantIntegrationTests.Support; using VectorDataSpecificationTests.Filter; using VectorDataSpecificationTests.Support; @@ -12,19 +10,6 @@ namespace QdrantIntegrationTests.Filter; public class QdrantBasicQueryTests(QdrantBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { - // Qdrant does not support ordering by multiple fields, so we order by only one field. - protected override List GetOrderedRecords(IQueryable filtered) - => filtered.OrderBy(r => r.Int2).ToList(); - - protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) - { - GetFilteredRecordOptions options = new(); - - options.OrderBy.Ascending(r => r.Int2); - - return await collection.GetAsync(filter, top, options).ToListAsync(); - } - public new class Fixture : BasicQueryTests.QueryFixture { public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisGenericDataModelConformanceTests.cs deleted file mode 100644 index 8806430fb9a0..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisGenericDataModelConformanceTests.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using RedisIntegrationTests.Support; -using VectorDataSpecificationTests.CRUD; -using Xunit; - -namespace RedisIntegrationTests.CRUD; - -public class RedisGenericDataModelConformanceTests(RedisGenericDataModelFixture fixture) - : GenericDataModelConformanceTests(fixture), IClassFixture -{ -} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..b323b02624b0 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetDynamicDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisHashSetDynamicDataModelConformanceTests(RedisHashSetDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetRecordConformanceTests.cs new file mode 100644 index 000000000000..157af94969c1 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisHashSetRecordConformanceTests(RedisHashSetSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..e91579a4e053 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonDynamicDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisJsonDynamicDataModelConformanceTests(RedisJsonDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonRecordConformanceTests.cs similarity index 70% rename from dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisRecordConformanceTests.cs rename to dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonRecordConformanceTests.cs index cab8188524fd..138cec84071d 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisRecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonRecordConformanceTests.cs @@ -6,7 +6,7 @@ namespace RedisIntegrationTests.CRUD; -public class RedisRecordConformanceTests(RedisSimpleModelFixture fixture) - : RecordConformanceTests(fixture), IClassFixture +public class RedisJsonRecordConformanceTests(RedisJsonSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_HashSet.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisHashSetCollectionConformanceTests.cs similarity index 77% rename from dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_HashSet.cs rename to dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisHashSetCollectionConformanceTests.cs index a3b7c411d8e4..0ebda7416993 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_HashSet.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisHashSetCollectionConformanceTests.cs @@ -6,7 +6,7 @@ namespace RedisIntegrationTests.Collections; -public class RedisCollectionConformanceTests_HashSet(RedisHashSetFixture fixture) +public class RedisHashSetCollectionConformanceTests(RedisHashSetFixture fixture) : CollectionConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_Json.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisJsonCollectionConformanceTests.cs similarity index 78% rename from dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_Json.cs rename to dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisJsonCollectionConformanceTests.cs index 97d28ef6d17e..ed3fdc36db0b 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisCollectionConformanceTests_Json.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Collections/RedisJsonCollectionConformanceTests.cs @@ -6,7 +6,7 @@ namespace RedisIntegrationTests.Collections; -public class RedisCollectionConformanceTests_Json(RedisJsonFixture fixture) +public class RedisJsonCollectionConformanceTests(RedisJsonFixture fixture) : CollectionConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs index 7ec4f834a5f0..70137ab607cf 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs @@ -81,8 +81,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() }; - protected override IVectorStoreRecordCollection CreateCollection() - => new RedisJsonVectorStoreRecordCollection( + protected override IVectorStoreRecordCollection GetCollection() + => new RedisJsonVectorStoreRecordCollection( RedisTestStore.JsonInstance.Database, this.CollectionName, new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); @@ -136,8 +136,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() p.PropertyType != typeof(List)).ToList() }; - protected override IVectorStoreRecordCollection CreateCollection() - => new RedisHashSetVectorStoreRecordCollection( + protected override IVectorStoreRecordCollection GetCollection() + => new RedisHashSetVectorStoreRecordCollection( RedisTestStore.HashSetInstance.Database, this.CollectionName, new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs index 94d859233faa..f9ab03c7cae9 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq.Expressions; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Redis; using RedisIntegrationTests.Support; @@ -14,19 +13,6 @@ namespace RedisIntegrationTests.Filter; public abstract class RedisBasicQueryTests(BasicQueryTests.QueryFixture fixture) : BasicQueryTests(fixture) { - // Redis does not support ordering by multiple fields, so we order by only one field. - protected override List GetOrderedRecords(IQueryable filtered) - => filtered.OrderBy(r => r.Int2).ToList(); - - protected override async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) - { - GetFilteredRecordOptions options = new(); - - options.OrderBy.Ascending(r => r.Int2); - - return await collection.GetAsync(filter, top, options).ToListAsync(); - } - #region Equality with null public override Task Equal_with_null_reference_type() @@ -95,8 +81,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() }; - protected override IVectorStoreRecordCollection CreateCollection() - => new RedisJsonVectorStoreRecordCollection( + protected override IVectorStoreRecordCollection GetCollection() + => new RedisJsonVectorStoreRecordCollection( RedisTestStore.JsonInstance.Database, this.CollectionName, new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); @@ -142,8 +128,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() p.PropertyType != typeof(List)).ToList() }; - protected override IVectorStoreRecordCollection CreateCollection() - => new RedisHashSetVectorStoreRecordCollection( + protected override IVectorStoreRecordCollection GetCollection() + => new RedisHashSetVectorStoreRecordCollection( RedisTestStore.HashSetInstance.Database, this.CollectionName, new() { VectorStoreRecordDefinition = this.GetRecordDefinition() }); diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetDynamicDataModelFixture.cs new file mode 100644 index 000000000000..c495c111f122 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetDynamicDataModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace RedisIntegrationTests.Support; + +public class RedisHashSetDynamicDataModelFixture : DynamicDataModelFixture +{ + public override TestStore TestStore => RedisTestStore.HashSetInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetSimpleModelFixture.cs new file mode 100644 index 000000000000..b8bcf29ead8e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisHashSetSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace RedisIntegrationTests.Support; + +public class RedisHashSetSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => RedisTestStore.HashSetInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisGenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonDynamicDataModelFixture.cs similarity index 72% rename from dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisGenericDataModelFixture.cs rename to dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonDynamicDataModelFixture.cs index 3a63d1d77f76..1023cab93ce0 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisGenericDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonDynamicDataModelFixture.cs @@ -4,7 +4,7 @@ namespace RedisIntegrationTests.Support; -public class RedisGenericDataModelFixture : GenericDataModelFixture +public class RedisJsonDynamicDataModelFixture : DynamicDataModelFixture { public override TestStore TestStore => RedisTestStore.JsonInstance; } diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonSimpleModelFixture.cs similarity index 75% rename from dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisSimpleModelFixture.cs rename to dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonSimpleModelFixture.cs index f91aefd9055c..480e00aad6df 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisSimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisJsonSimpleModelFixture.cs @@ -4,7 +4,7 @@ namespace RedisIntegrationTests.Support; -public class RedisSimpleModelFixture : SimpleModelFixture +public class RedisJsonSimpleModelFixture : SimpleModelFixture { public override TestStore TestStore => RedisTestStore.JsonInstance; } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..3a9e9c3e0672 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerDynamicDataModelConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace SqlServerIntegrationTests.CRUD; + +public class SqlServerDynamicDataModelConformanceTests(SqlServerDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs deleted file mode 100644 index d3f67389e764..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerGenericDataModelConformanceTests.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using SqlServerIntegrationTests.Support; -using VectorDataSpecificationTests.CRUD; -using Xunit; - -namespace SqlServerIntegrationTests.CRUD; - -public class SqlServerGenericDataModelConformanceTests(SqlServerGenericDataModelFixture fixture) - : GenericDataModelConformanceTests(fixture), IClassFixture -{ -} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs index 3bae6cc48552..dd15438bfcf3 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs @@ -9,6 +9,8 @@ namespace SqlServerIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class SqlServerBasicFilterTests(SqlServerBasicFilterTests.Fixture fixture) : BasicFilterTests(fixture), IClassFixture { @@ -16,18 +18,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } public override Task Contains_over_field_string_array() diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs index 101e5f3b57cf..1e10af0cb5b9 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs @@ -9,6 +9,8 @@ namespace SqlServerIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class SqlServerBasicQueryTests(SqlServerBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { @@ -16,18 +18,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } public override Task Contains_over_field_string_array() diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index aaf281c550e5..8d8bd68a6844 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -138,7 +138,7 @@ PRIMARY KEY ([id]) expectedCommand = "IF OBJECT_ID(N'[schema].[table]', N'U') IS NULL" + Environment.NewLine + expectedCommand; } - AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + Assert.Equal(expectedCommand, command.CommandText, ignoreLineEndingDifferences: true); } [Fact] @@ -175,7 +175,7 @@ WHEN NOT MATCHED THEN OUTPUT inserted.[id]; """"; - AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + Assert.Equal(expectedCommand, command.CommandText, ignoreLineEndingDifferences: true); Assert.Equal("@id_0", command.Parameters[0].ParameterName); Assert.Equal(DBNull.Value, command.Parameters[0].Value); Assert.Equal("@simpleString_1", command.Parameters[1].ParameterName); @@ -237,7 +237,7 @@ WHEN NOT MATCHED THEN SELECT KeyColumn FROM @InsertedKeys; """"; - AssertEqualIgnoreNewLines(expectedCommand, command.CommandText); + Assert.Equal(expectedCommand, command.CommandText, ignoreLineEndingDifferences: true); for (int i = 0; i < records.Length; i++) { @@ -299,12 +299,12 @@ public void SelectSingle() using SqlCommand command = SqlServerCommandBuilder.SelectSingle(connection, "schema", "tableName", model, 123L, includeVectors: true); - AssertEqualIgnoreNewLines( + Assert.Equal( """"" SELECT [id],[name],[age],[embedding] FROM [schema].[tableName] WHERE [id] = @id_0 - """"", command.CommandText); + """"", command.CommandText, ignoreLineEndingDifferences: true); Assert.Equal(123L, command.Parameters[0].Value); Assert.Equal("@id_0", command.Parameters[0].ParameterName); } @@ -327,12 +327,12 @@ public void SelectMany() Assert.True(SqlServerCommandBuilder.SelectMany(command, "schema", "tableName", model, keys, includeVectors: true)); - AssertEqualIgnoreNewLines( + Assert.Equal( """"" SELECT [id],[name],[age],[embedding] FROM [schema].[tableName] WHERE [id] IN (@id_0,@id_1,@id_2) - """"", command.CommandText); + """"", command.CommandText, ignoreLineEndingDifferences: true); for (int i = 0; i < keys.Length; i++) { Assert.Equal(keys[i], command.Parameters[i].Value); @@ -340,12 +340,6 @@ WHERE [id] IN (@id_0,@id_1,@id_2) } } - // This repo is configured with eol=lf, so the expected string should always use \n - // as long given IDE does not use \r\n. - // The actual string may use \r\n, so we just normalize both. - private static void AssertEqualIgnoreNewLines(string expected, string actual) - => Assert.Equal(expected.Replace("\r\n", "\n"), actual.Replace("\r\n", "\n")); - // We create a connection using a fake connection string just to be able to create the SqlCommand. private static SqlConnection CreateConnection() => new("Server=localhost;Database=master;Integrated Security=True;"); @@ -353,6 +347,6 @@ private static SqlConnection CreateConnection() private static VectorStoreRecordModel BuildModel(List properties) => new VectorStoreRecordModelBuilder(SqlServerConstants.ModelBuildingOptions) .Build( - typeof(VectorStoreGenericDataModel), + typeof(Dictionary), new() { Properties = properties }); } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerGenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerDynamicDataModelFixture.cs similarity index 78% rename from dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerGenericDataModelFixture.cs rename to dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerDynamicDataModelFixture.cs index d3be9dbe419d..0ff725729c49 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerGenericDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerDynamicDataModelFixture.cs @@ -4,7 +4,7 @@ namespace SqlServerIntegrationTests.Support; -public class SqlServerGenericDataModelFixture : GenericDataModelFixture +public class SqlServerDynamicDataModelFixture : DynamicDataModelFixture { public override TestStore TestStore => SqlServerTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs index 10570cc109c5..faf9369f8d78 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs @@ -9,6 +9,8 @@ namespace SqliteIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class SqliteBasicFilterTests(SqliteBasicFilterTests.Fixture fixture) : BasicFilterTests(fixture), IClassFixture { @@ -16,18 +18,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } // Array fields not (currently) supported on SQLite (see #10343) diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs index 42f6215ab696..e11b81f0ef8f 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs @@ -9,6 +9,8 @@ namespace SqliteIntegrationTests.Filter; +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast + public class SqliteBasicQueryTests(SqliteBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture { @@ -16,18 +18,22 @@ public override async Task Not_over_Or() { // Test sends: WHERE (NOT (("Int" = 8) OR ("String" = 'foo'))) // There's a NULL string in the database, and relational null semantics in conjunction with negation makes the default implementation fail. - await Assert.ThrowsAsync(() => base.Not_over_Or()); + await Assert.ThrowsAsync(() => base.Not_over_Or()); // Compensate by adding a null check: - await this.TestFilterAsync(r => r.String != null && !(r.Int == 8 || r.String == "foo")); + await this.TestFilterAsync( + r => r.String != null && !(r.Int == 8 || r.String == "foo"), + r => r["String"] != null && !((int)r["Int"]! == 8 || r["String"] == "foo")); } public override async Task NotEqual_with_string() { // As above, null semantics + negation - await Assert.ThrowsAsync(() => base.NotEqual_with_string()); + await Assert.ThrowsAsync(() => base.NotEqual_with_string()); - await this.TestFilterAsync(r => r.String != null && r.String != "foo"); + await this.TestFilterAsync( + r => r.String != null && r.String != "foo", + r => r["String"] != null && r["String"] != "foo"); } // Array fields not (currently) supported on SQLite (see #10343) diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index ac50caf47040..1c7ec863acba 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -10,24 +10,24 @@ namespace VectorDataSpecificationTests.CRUD; public abstract class BatchConformanceTests(SimpleModelFixture fixture) where TKey : notnull { [ConditionalFact] - public async Task GetBatchAsyncThrowsArgumentNullExceptionForNullKeys() + public virtual async Task GetBatchAsyncThrowsArgumentNullExceptionForNullKeys() { ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync(keys: null!).ToArrayAsync().AsTask()); Assert.Equal("keys", ex.ParamName); } [ConditionalFact] - public async Task GetBatchAsyncDoesNotThrowForEmptyBatch() + public virtual async Task GetBatchAsyncDoesNotThrowForEmptyBatch() { Assert.Empty(await fixture.Collection.GetAsync([]).ToArrayAsync()); } [ConditionalFact] - public Task GetBatchAsyncReturnsInsertedRecords_WithVectors() + public virtual Task GetBatchAsync_WithVectors() => this.GetBatchAsyncReturnsInsertedRecords(includeVectors: true); [ConditionalFact] - public Task GetBatchAsyncReturnsInsertedRecords_WithoutVectors() + public virtual Task GetBatchAsync_WithoutVectors() => this.GetBatchAsyncReturnsInsertedRecords(includeVectors: false); private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) @@ -44,27 +44,20 @@ private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) } [ConditionalFact] - public async Task UpsertBatchAsyncThrowsArgumentNullExceptionForNullBatch() + public virtual async Task UpsertBatchAsyncThrowsArgumentNullExceptionForNullBatch() { ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.UpsertAsync(records: null!).ToArrayAsync().AsTask()); Assert.Equal("records", ex.ParamName); } [ConditionalFact] - public async Task UpsertBatchAsyncDoesNotThrowForEmptyBatch() + public virtual async Task UpsertBatchAsyncDoesNotThrowForEmptyBatch() { Assert.Empty(await fixture.Collection.UpsertAsync([]).ToArrayAsync()); } [ConditionalFact] - public Task UpsertBatchAsyncCanInsertNewRecord_WithVectors() - => this.UpsertBatchAsyncCanInsertNewRecords(includeVectors: true); - - [ConditionalFact] - public Task UpsertBatchAsyncCanInsertNewRecord_WithoutVectors() - => this.UpsertBatchAsyncCanInsertNewRecords(includeVectors: false); - - private async Task UpsertBatchAsyncCanInsertNewRecords(bool includeVectors) + public virtual async Task UpsertBatchAsyncCanInsertNewRecord() { var collection = fixture.Collection; SimpleRecord[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleRecord() @@ -80,22 +73,15 @@ private async Task UpsertBatchAsyncCanInsertNewRecords(bool includeVectors) var receivedKeys = await collection.UpsertAsync(inserted).ToArrayAsync(); Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order - var received = await collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in inserted) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); } } [ConditionalFact] - public Task UpsertBatchAsyncCanUpdateExistingRecords_WithVectors() - => this.UpsertBatchAsyncCanUpdateExistingRecords(includeVectors: true); - - [ConditionalFact] - public Task UpsertBatchAsyncCanUpdateExistingRecords_WithoutVectors() - => this.UpsertBatchAsyncCanUpdateExistingRecords(includeVectors: false); - - private async Task UpsertBatchAsyncCanUpdateExistingRecords(bool includeVectors) + public virtual async Task UpsertBatchAsyncCanUpdateExistingRecords() { SimpleRecord[] inserted = Enumerable.Range(0, 10).Select(i => new SimpleRecord() { @@ -119,22 +105,15 @@ private async Task UpsertBatchAsyncCanUpdateExistingRecords(bool includeVectors) updated.Select(r => r.Id).OrderBy(id => id).ToArray(), keys.OrderBy(id => id).ToArray()); - var received = await fixture.Collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await fixture.Collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in updated) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); } } [ConditionalFact] - public Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch_WithVectors() - => this.UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(includeVectors: true); - - [ConditionalFact] - public Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch_WithoutVectors() - => this.UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(includeVectors: false); - - private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool includeVectors) + public virtual async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch() { SimpleRecord[] records = Enumerable.Range(0, 10).Select(i => new SimpleRecord() { @@ -164,28 +143,28 @@ private async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch(bool incl records.Select(r => r.Id).OrderBy(id => id).ToArray(), mixedKeys.OrderBy(id => id).ToArray()); - var received = await fixture.Collection.GetAsync(mixedKeys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); + var received = await fixture.Collection.GetAsync(mixedKeys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in records) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); } } [ConditionalFact] - public async Task DeleteBatchAsyncDoesNotThrowForEmptyBatch() + public virtual async Task DeleteBatchAsyncDoesNotThrowForEmptyBatch() { await fixture.Collection.DeleteAsync([]); } [ConditionalFact] - public async Task DeleteBatchAsyncThrowsArgumentNullExceptionForNullKeys() + public virtual async Task DeleteBatchAsyncThrowsArgumentNullExceptionForNullKeys() { ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.DeleteAsync(keys: null!)); Assert.Equal("keys", ex.ParamName); } [ConditionalFact] - public async Task DeleteBatchAsyncDeletesTheRecords() + public virtual async Task DeleteBatchAsyncDeletesTheRecords() { TKey[] idsToRemove = [fixture.TestData[2].Id, fixture.TestData[3].Id]; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..c1be244f0366 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.CRUD; + +public abstract class DynamicDataModelConformanceTests(DynamicDataModelFixture fixture) + where TKey : notnull +{ + [ConditionalFact] + public virtual async Task GetAsyncThrowsArgumentNullExceptionForNullKey() + { + // Skip this test for value type keys + if (default(TKey) is not null) + { + return; + } + + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync((TKey)default!)); + Assert.Equal("key", ex.ParamName); + } + + [ConditionalFact] + public virtual async Task GetAsyncReturnsNullForNonExistingKey() + { + TKey key = fixture.GenerateNextKey(); + + Assert.Null(await fixture.Collection.GetAsync(key)); + } + + [ConditionalFact] + public virtual Task GetAsync_WithVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: true); + + [ConditionalFact] + public virtual Task GetAsync_WithoutVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: false); + + private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) + { + var expectedRecord = fixture.TestData[0]; + + var received = await fixture.Collection.GetAsync( + (TKey)expectedRecord[DynamicDataModelFixture.KeyPropertyName]!, + new() { IncludeVectors = includeVectors }); + + AssertEquivalent(expectedRecord, received, includeVectors); + } + + [ConditionalFact] + public virtual async Task UpsertAsyncCanInsertNewRecord() + { + var collection = fixture.Collection; + TKey expectedKey = fixture.GenerateNextKey(); + var inserted = new Dictionary + { + [DynamicDataModelFixture.KeyPropertyName] = expectedKey, + [DynamicDataModelFixture.StringPropertyName] = "some", + [DynamicDataModelFixture.IntegerPropertyName] = 123, + [DynamicDataModelFixture.EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.1f, DynamicDataModelFixture.DimensionCount).ToArray()) + }; + + Assert.Null(await collection.GetAsync(expectedKey)); + var key = await collection.UpsertAsync(inserted); + Assert.Equal(expectedKey, key); + + var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = true }); + AssertEquivalent(inserted, received, includeVectors: true); + } + + [ConditionalFact] + public virtual async Task UpsertAsyncCanUpdateExistingRecord() + { + var collection = fixture.Collection; + var existingRecord = fixture.TestData[1]; + var updated = new Dictionary + { + [DynamicDataModelFixture.KeyPropertyName] = existingRecord[DynamicDataModelFixture.KeyPropertyName], + [DynamicDataModelFixture.StringPropertyName] = "different", + [DynamicDataModelFixture.IntegerPropertyName] = 456, + [DynamicDataModelFixture.EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.7f, DynamicDataModelFixture.DimensionCount).ToArray()) + }; + + Assert.NotNull(await collection.GetAsync((TKey)existingRecord[DynamicDataModelFixture.KeyPropertyName]!)); + var key = await collection.UpsertAsync(updated); + Assert.Equal(existingRecord[DynamicDataModelFixture.KeyPropertyName], key); + + var received = await collection.GetAsync((TKey)existingRecord[DynamicDataModelFixture.KeyPropertyName]!, new() { IncludeVectors = true }); + AssertEquivalent(updated, received, includeVectors: true); + } + + [ConditionalFact] + public virtual async Task DeleteAsyncDoesNotThrowForNonExistingKey() + { + TKey key = fixture.GenerateNextKey(); + + await fixture.Collection.DeleteAsync(key); + } + + [ConditionalFact] + public async Task DeleteAsyncDeletesTheRecord() + { + var recordToRemove = fixture.TestData[2]; + + Assert.NotNull(await fixture.Collection.GetAsync((TKey)recordToRemove[DynamicDataModelFixture.KeyPropertyName]!)); + await fixture.Collection.DeleteAsync((TKey)recordToRemove[DynamicDataModelFixture.KeyPropertyName]!); + Assert.Null(await fixture.Collection.GetAsync((TKey)recordToRemove[DynamicDataModelFixture.KeyPropertyName]!)); + } + + protected static void AssertEquivalent(Dictionary expected, Dictionary? actual, bool includeVectors) + { + Assert.NotNull(actual); + Assert.Equal(expected[DynamicDataModelFixture.KeyPropertyName], actual[DynamicDataModelFixture.KeyPropertyName]); + + Assert.Equal(expected[DynamicDataModelFixture.StringPropertyName], actual[DynamicDataModelFixture.StringPropertyName]); + Assert.Equal(expected[DynamicDataModelFixture.IntegerPropertyName], actual[DynamicDataModelFixture.IntegerPropertyName]); + + if (includeVectors) + { + Assert.Equal( + ((ReadOnlyMemory)expected[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray(), + ((ReadOnlyMemory)actual[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray()); + } + else + { + Assert.False(actual.ContainsKey(DynamicDataModelFixture.EmbeddingPropertyName)); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs deleted file mode 100644 index 672d25e08aa7..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/GenericDataModelConformanceTests.cs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using VectorDataSpecificationTests.Support; -using VectorDataSpecificationTests.Xunit; -using Xunit; - -namespace VectorDataSpecificationTests.CRUD; - -public abstract class GenericDataModelConformanceTests(GenericDataModelFixture fixture) where TKey : notnull -{ - [ConditionalFact] - public async Task GetAsyncThrowsArgumentNullExceptionForNullKey() - { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync((TKey)default!)); - Assert.Equal("key", ex.ParamName); - } - - [ConditionalFact] - public async Task GetAsyncReturnsNullForNonExistingKey() - { - TKey key = fixture.GenerateNextKey(); - - Assert.Null(await fixture.Collection.GetAsync(key)); - } - - [ConditionalFact] - public Task GetAsyncReturnsInsertedRecord_WithVectors() - => this.GetAsyncReturnsInsertedRecord(includeVectors: true); - - [ConditionalFact] - public Task GetAsyncReturnsInsertedRecord_WithoutVectors() - => this.GetAsyncReturnsInsertedRecord(includeVectors: false); - - private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) - { - var expectedRecord = fixture.TestData[0]; - - var received = await fixture.Collection.GetAsync(expectedRecord.Key, new() { IncludeVectors = includeVectors }); - - AssertEqual(expectedRecord, received, includeVectors); - } - - [ConditionalFact] - public Task UpsertAsyncCanInsertNewRecord_WithVectors() - => this.UpsertAsyncCanInsertNewRecord(includeVectors: true); - - [ConditionalFact] - public Task UpsertAsyncCanInsertNewRecord_WithoutVectors() - => this.UpsertAsyncCanInsertNewRecord(includeVectors: false); - - private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) - { - var collection = fixture.Collection; - TKey expectedKey = fixture.GenerateNextKey(); - VectorStoreGenericDataModel inserted = new(expectedKey) - { - Data = - { - [GenericDataModelFixture.StringPropertyName] = "some", - [GenericDataModelFixture.IntegerPropertyName] = 123 - }, - Vectors = - { - [GenericDataModelFixture.EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.1f, GenericDataModelFixture.DimensionCount).ToArray()) - } - }; - - Assert.Null(await collection.GetAsync(expectedKey)); - TKey key = await collection.UpsertAsync(inserted); - Assert.Equal(expectedKey, key); - - var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = includeVectors }); - AssertEqual(inserted, received, includeVectors); - } - - [ConditionalFact] - public Task UpsertAsyncCanUpdateExistingRecord_WithVectors() - => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: true); - - [ConditionalFact] - public Task UpsertAsyncCanUpdateExistingRecord__WithoutVectors() - => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: false); - - private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) - { - var collection = fixture.Collection; - var existingRecord = fixture.TestData[1]; - VectorStoreGenericDataModel updated = new(existingRecord.Key) - { - Data = - { - [GenericDataModelFixture.StringPropertyName] = "different", - [GenericDataModelFixture.IntegerPropertyName] = 456 - }, - Vectors = - { - [GenericDataModelFixture.EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.7f, GenericDataModelFixture.DimensionCount).ToArray()) - } - }; - - Assert.NotNull(await collection.GetAsync(existingRecord.Key)); - TKey key = await collection.UpsertAsync(updated); - Assert.Equal(existingRecord.Key, key); - - var received = await collection.GetAsync(existingRecord.Key, new() { IncludeVectors = includeVectors }); - AssertEqual(updated, received, includeVectors); - } - - [ConditionalFact] - public async Task DeleteAsyncDoesNotThrowForNonExistingKey() - { - TKey key = fixture.GenerateNextKey(); - - await fixture.Collection.DeleteAsync(key); - } - - [ConditionalFact] - public async Task DeleteAsyncDeletesTheRecord() - { - var recordToRemove = fixture.TestData[2]; - - Assert.NotNull(await fixture.Collection.GetAsync(recordToRemove.Key)); - await fixture.Collection.DeleteAsync(recordToRemove.Key); - Assert.Null(await fixture.Collection.GetAsync(recordToRemove.Key)); - } - - private static void AssertEqual(VectorStoreGenericDataModel expected, VectorStoreGenericDataModel? actual, bool includeVectors) - { - Assert.NotNull(actual); - Assert.Equal(expected.Key, actual.Key); - foreach (var pair in expected.Data) - { - Assert.Equal(pair.Value, actual.Data[pair.Key]); - } - - if (includeVectors) - { - Assert.Equal( - ((ReadOnlyMemory)expected.Vectors[GenericDataModelFixture.EmbeddingPropertyName]!).ToArray(), - ((ReadOnlyMemory)actual.Vectors[GenericDataModelFixture.EmbeddingPropertyName]!).ToArray()); - } - else - { - Assert.Empty(actual.Vectors); - } - } -} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs index a13f19696d55..092e16e2528c 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs @@ -10,14 +10,20 @@ namespace VectorDataSpecificationTests.CRUD; public class RecordConformanceTests(SimpleModelFixture fixture) where TKey : notnull { [ConditionalFact] - public async Task GetAsyncThrowsArgumentNullExceptionForNullKey() + public virtual async Task GetAsyncThrowsArgumentNullExceptionForNullKey() { + // Skip this test for value type keys + if (default(TKey) is not null) + { + return; + } + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.GetAsync((TKey)default!)); Assert.Equal("key", ex.ParamName); } [ConditionalFact] - public async Task GetAsyncReturnsNullForNonExistingKey() + public virtual async Task GetAsyncReturnsNullForNonExistingKey() { TKey key = fixture.GenerateNextKey(); @@ -25,11 +31,11 @@ public async Task GetAsyncReturnsNullForNonExistingKey() } [ConditionalFact] - public Task GetAsyncReturnsInsertedRecord_WithVectors() + public virtual Task GetAsync_WithVectors() => this.GetAsyncReturnsInsertedRecord(includeVectors: true); [ConditionalFact] - public Task GetAsyncReturnsInsertedRecord_WithoutVectors() + public virtual Task GetAsync_WithoutVectors() => this.GetAsyncReturnsInsertedRecord(includeVectors: false); private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) @@ -42,14 +48,7 @@ private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) } [ConditionalFact] - public Task UpsertAsyncCanInsertNewRecord_WithVectors() - => this.UpsertAsyncCanInsertNewRecord(includeVectors: true); - - [ConditionalFact] - public Task UpsertAsyncCanInsertNewRecord_WithoutVectors() - => this.UpsertAsyncCanInsertNewRecord(includeVectors: false); - - private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) + public virtual async Task UpsertAsyncCanInsertNewRecord() { var collection = fixture.Collection; TKey expectedKey = fixture.GenerateNextKey(); @@ -65,19 +64,12 @@ private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) TKey key = await collection.UpsertAsync(inserted); Assert.Equal(expectedKey, key); - var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = includeVectors }); - inserted.AssertEqual(received, includeVectors); + var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = true }); + inserted.AssertEqual(received, includeVectors: true); } [ConditionalFact] - public Task UpsertAsyncCanUpdateExistingRecord_WithVectors() - => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: true); - - [ConditionalFact] - public Task UpsertAsyncCanUpdateExistingRecord__WithoutVectors() - => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: false); - - private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) + public virtual async Task UpsertAsyncCanUpdateExistingRecord() { var collection = fixture.Collection; var existingRecord = fixture.TestData[1]; @@ -93,12 +85,12 @@ private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) TKey key = await collection.UpsertAsync(updated); Assert.Equal(existingRecord.Id, key); - var received = await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = includeVectors }); - updated.AssertEqual(received, includeVectors); + var received = await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = true }); + updated.AssertEqual(received, includeVectors: true); } [ConditionalFact] - public async Task DeleteAsyncDoesNotThrowForNonExistingKey() + public virtual async Task DeleteAsyncDoesNotThrowForNonExistingKey() { TKey key = fixture.GenerateNextKey(); @@ -106,7 +98,7 @@ public async Task DeleteAsyncDoesNotThrowForNonExistingKey() } [ConditionalFact] - public async Task DeleteAsyncDeletesTheRecord() + public virtual async Task DeleteAsyncDeletesTheRecord() { var recordToRemove = fixture.TestData[2]; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index a323e0a09cd4..611094aecb65 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -8,159 +8,139 @@ namespace VectorDataSpecificationTests.Collections; -public abstract class CollectionConformanceTests(VectorStoreFixture fixture) where TKey : notnull +public abstract class CollectionConformanceTests(VectorStoreFixture fixture) + where TKey : notnull { - [ConditionalFact] - public Task DeleteCollectionDoesNotThrowForNonExistingCollection() - => this.DeleteNonExistingCollection>(); - - [ConditionalFact] - public Task DeleteCollectionDoesNotThrowForNonExistingCollection_GenericDataModel() - => this.DeleteNonExistingCollection>(); - - [ConditionalFact] - public Task CreateCollectionCreatesTheCollection() - => this.CreateCollection>(); - - [ConditionalFact] - public Task CreateCollectionCreatesTheCollection_GenericDataModel() - => this.CreateCollection>(); - - [ConditionalFact] - public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow() - => this.CreateCollectionIfNotExistsMoreThanOnce>(); - - [ConditionalFact] - public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow_GenericDataModel() - => this.CreateCollectionIfNotExistsMoreThanOnce>(); - - [ConditionalFact] - public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException() - => this.CreateCollectionMoreThanOnce>(); - - [ConditionalFact] - public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException_GenericDataModel() - => this.CreateCollectionMoreThanOnce>(); - - private async Task> GetNonExistingCollectionAsync() where TRecord : notnull + [ConditionalTheory] + [MemberData(nameof(UseDynamicMappingData))] + public Task DeleteCollectionDoesNotThrowForNonExistingCollection(bool useDynamicMapping) { - var collectionName = fixture.GetUniqueCollectionName(); - VectorStoreRecordDefinition? definition = null; - if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) - { - definition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty(nameof(VectorStoreGenericDataModel.Key), typeof(TKey)), - new VectorStoreRecordDataProperty("string", typeof(string)), - new VectorStoreRecordDataProperty("integer", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) - { - Dimensions = 10 - } - ] - }; - } - - var collection = fixture.TestStore.DefaultVectorStore.GetCollection(collectionName, definition); + return useDynamicMapping ? Core>() : Core>(); - Assert.False(await collection.CollectionExistsAsync()); + async Task Core() where TRecord : notnull + { + var collection = await this.GetNonExistingCollectionAsync(); - return collection; + await collection.DeleteCollectionAsync(); + } } - private async Task DeleteNonExistingCollection() where TRecord : notnull + [ConditionalTheory] + [MemberData(nameof(UseDynamicMappingData))] + public Task CreateCollectionCreatesTheCollection(bool useDynamicMapping) { - var collection = await this.GetNonExistingCollectionAsync(); + return useDynamicMapping ? Core>() : Core>(); - await collection.DeleteCollectionAsync(); - } - - private async Task CreateCollection() where TRecord : notnull - { - var collection = await this.GetNonExistingCollectionAsync(); + async Task Core() where TRecord : notnull + { + var collection = await this.GetNonExistingCollectionAsync(); - await collection.CreateCollectionAsync(); + await collection.CreateCollectionAsync(); - try - { - Assert.True(await collection.CollectionExistsAsync()); + try + { + Assert.True(await collection.CollectionExistsAsync()); -#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; -#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; - Assert.NotNull(collectionMetadata); - Assert.NotNull(collectionMetadata.VectorStoreSystemName); - Assert.NotNull(collectionMetadata.VectorStoreName); - Assert.NotNull(collectionMetadata.CollectionName); + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.CollectionName); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); - } - finally - { - await collection.DeleteCollectionAsync(); + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); + } + finally + { + await collection.DeleteCollectionAsync(); + } } } - private async Task CreateCollectionIfNotExistsMoreThanOnce() where TRecord : notnull + [ConditionalTheory] + [MemberData(nameof(UseDynamicMappingData))] + public Task CreateCollectionIfNotExistsCalledMoreThanOnceDoesNotThrow(bool useDynamicMapping) { - var collection = await this.GetNonExistingCollectionAsync(); - - await collection.CreateCollectionIfNotExistsAsync(); + return useDynamicMapping ? Core>() : Core>(); - try + async Task Core() where TRecord : notnull { - Assert.True(await collection.CollectionExistsAsync()); + var collection = await this.GetNonExistingCollectionAsync(); -#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; -#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + await collection.CreateCollectionIfNotExistsAsync(); - Assert.NotNull(collectionMetadata); - Assert.NotNull(collectionMetadata.VectorStoreSystemName); - Assert.NotNull(collectionMetadata.VectorStoreName); - Assert.NotNull(collectionMetadata.CollectionName); + try + { + Assert.True(await collection.CollectionExistsAsync()); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; - await collection.CreateCollectionIfNotExistsAsync(); - } - finally - { - await collection.DeleteCollectionAsync(); + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.CollectionName); + + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); + + await collection.CreateCollectionIfNotExistsAsync(); + } + finally + { + await collection.DeleteCollectionAsync(); + } } } - private async Task CreateCollectionMoreThanOnce() where TRecord : notnull + [ConditionalTheory] + [MemberData(nameof(UseDynamicMappingData))] + public Task CreateCollectionCalledMoreThanOnceThrowsVectorStoreOperationException(bool useDynamicMapping) { - var collection = await this.GetNonExistingCollectionAsync(); - - await collection.CreateCollectionAsync(); + return useDynamicMapping ? Core>() : Core>(); - try + async Task Core() where TRecord : notnull { - Assert.True(await collection.CollectionExistsAsync()); + var collection = await this.GetNonExistingCollectionAsync(); -#pragma warning disable MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; -#pragma warning restore MEVD9000 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + await collection.CreateCollectionAsync(); - Assert.NotNull(collectionMetadata); - Assert.NotNull(collectionMetadata.VectorStoreSystemName); - Assert.NotNull(collectionMetadata.VectorStoreName); - Assert.NotNull(collectionMetadata.CollectionName); + try + { + Assert.True(await collection.CollectionExistsAsync()); - Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); + var collectionMetadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; - await collection.CreateCollectionIfNotExistsAsync(); + Assert.NotNull(collectionMetadata); + Assert.NotNull(collectionMetadata.VectorStoreSystemName); + Assert.NotNull(collectionMetadata.CollectionName); + + Assert.True(await fixture.TestStore.DefaultVectorStore.ListCollectionNamesAsync().ContainsAsync(collectionMetadata.CollectionName)); + + await collection.CreateCollectionIfNotExistsAsync(); - await Assert.ThrowsAsync(() => collection.CreateCollectionAsync()); + await Assert.ThrowsAsync(() => collection.CreateCollectionAsync()); + } + finally + { + await collection.DeleteCollectionAsync(); + } } - finally + } + + protected virtual async Task> GetNonExistingCollectionAsync() where TRecord : notnull + { + var definition = new VectorStoreRecordDefinition() { - await collection.DeleteCollectionAsync(); - } + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(SimpleRecord.Id), typeof(TKey)) { StoragePropertyName = "key" }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { StoragePropertyName = "text" }, + new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { StoragePropertyName = "number" }, + new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory)) { Dimensions = 10 } + ] + }; + + var collection = fixture.TestStore.DefaultVectorStore.GetCollection(fixture.GetUniqueCollectionName(), definition); + await collection.DeleteCollectionAsync(); + return collection; } + + public static readonly IEnumerable UseDynamicMappingData = [[false], [true]]; } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 56e3905116aa..085890078236 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -6,6 +6,10 @@ using VectorDataSpecificationTests.Xunit; using Xunit; +#pragma warning disable CS8605 // Unboxing a possibly null value. +#pragma warning disable CS0252 // Possible unintended reference comparison; left hand side needs cast +#pragma warning disable RCS1098 // Constant values should be placed on right side of comparisons + namespace VectorDataSpecificationTests.Filter; public abstract class BasicFilterTests(BasicFilterTests.Fixture fixture) @@ -15,71 +19,103 @@ public abstract class BasicFilterTests(BasicFilterTests.Fixture fixt [ConditionalFact] public virtual Task Equal_with_int() - => this.TestFilterAsync(r => r.Int == 8); + => this.TestFilterAsync( + r => r.Int == 8, + r => (int)r["Int"] == 8); [ConditionalFact] public virtual Task Equal_with_string() - => this.TestFilterAsync(r => r.String == "foo"); + => this.TestFilterAsync( + r => r.String == "foo", + r => r["String"] == "foo"); [ConditionalFact] public virtual Task Equal_with_string_containing_special_characters() - => this.TestFilterAsync(r => r.String == """with some special"characters'and\stuff"""); + => this.TestFilterAsync( + r => r.String == """with some special"characters'and\stuff""", + r => r["String"] == """with some special"characters'and\stuff"""); [ConditionalFact] public virtual Task Equal_with_string_is_not_Contains() - => this.TestFilterAsync(r => r.String == "some", expectZeroResults: true); + => this.TestFilterAsync( + r => r.String == "some", + r => r["String"] == "some", + expectZeroResults: true); [ConditionalFact] public virtual Task Equal_reversed() - => this.TestFilterAsync(r => 8 == r.Int); + => this.TestFilterAsync( + r => 8 == r.Int, + r => 8 == (int)r["Int"]); [ConditionalFact] public virtual Task Equal_with_null_reference_type() - => this.TestFilterAsync(r => r.String == null); + => this.TestFilterAsync( + r => r.String == null, + r => r["String"] == null); [ConditionalFact] public virtual Task Equal_with_null_captured() { string? s = null; - return this.TestFilterAsync(r => r.String == s); + return this.TestFilterAsync( + r => r.String == s, + r => r["String"] == s); } [ConditionalFact] public virtual Task NotEqual_with_int() - => this.TestFilterAsync(r => r.Int != 8); + => this.TestFilterAsync( + r => r.Int != 8, + r => (int)r["Int"] != 8); [ConditionalFact] public virtual Task NotEqual_with_string() - => this.TestFilterAsync(r => r.String != "foo"); + => this.TestFilterAsync( + r => r.String != "foo", + r => r["String"] != "foo"); [ConditionalFact] public virtual Task NotEqual_reversed() - => this.TestFilterAsync(r => r.Int != 8); + => this.TestFilterAsync( + r => r.Int != 8, + r => (int)r["Int"] != 8); [ConditionalFact] public virtual Task NotEqual_with_null_reference_type() - => this.TestFilterAsync(r => r.String != null); + => this.TestFilterAsync( + r => r.String != null, + r => r["String"] != null); [ConditionalFact] public virtual Task NotEqual_with_null_captured() { string? s = null; - return this.TestFilterAsync(r => r.String != s); + return this.TestFilterAsync( + r => r.String != s, + r => r["String"] != s); } [ConditionalFact] public virtual Task Bool() - => this.TestFilterAsync(r => r.Bool); + => this.TestFilterAsync( + r => r.Bool, + r => (bool)r["Bool"]); [ConditionalFact] public virtual Task Bool_And_Bool() - => this.TestFilterAsync(r => r.Bool && r.Bool); + => this.TestFilterAsync( + r => r.Bool && r.Bool, + r => (bool)r["Bool"] && (bool)r["Bool"]); [ConditionalFact] public virtual Task Bool_Or_Not_Bool() - => this.TestFilterAsync(r => r.Bool || !r.Bool, expectAllResults: true); + => this.TestFilterAsync( + r => r.Bool || !r.Bool, + r => (bool)r["Bool"] || !(bool)r["Bool"], + expectAllResults: true); #endregion Equality @@ -87,19 +123,27 @@ public virtual Task Bool_Or_Not_Bool() [ConditionalFact] public virtual Task GreaterThan_with_int() - => this.TestFilterAsync(r => r.Int > 9); + => this.TestFilterAsync( + r => r.Int > 9, + r => (int)r["Int"] > 9); [ConditionalFact] public virtual Task GreaterThanOrEqual_with_int() - => this.TestFilterAsync(r => r.Int >= 9); + => this.TestFilterAsync( + r => r.Int >= 9, + r => (int)r["Int"] >= 9); [ConditionalFact] public virtual Task LessThan_with_int() - => this.TestFilterAsync(r => r.Int < 10); + => this.TestFilterAsync( + r => r.Int < 10, + r => (int)r["Int"] < 10); [ConditionalFact] public virtual Task LessThanOrEqual_with_int() - => this.TestFilterAsync(r => r.Int <= 10); + => this.TestFilterAsync( + r => r.Int <= 10, + r => (int)r["Int"] <= 10); #endregion Comparison @@ -107,49 +151,71 @@ public virtual Task LessThanOrEqual_with_int() [ConditionalFact] public virtual Task And() - => this.TestFilterAsync(r => r.Int == 8 && r.String == "foo"); + => this.TestFilterAsync( + r => r.Int == 8 && r.String == "foo", + r => (int)r["Int"] == 8 && r["String"] == "foo"); [ConditionalFact] public virtual Task Or() - => this.TestFilterAsync(r => r.Int == 8 || r.String == "foo"); + => this.TestFilterAsync( + r => r.Int == 8 || r.String == "foo", + r => (int)r["Int"] == 8 || r["String"] == "foo"); [ConditionalFact] public virtual Task And_within_And() - => this.TestFilterAsync(r => (r.Int == 8 && r.String == "foo") && r.Int2 == 80); + => this.TestFilterAsync( + r => (r.Int == 8 && r.String == "foo") && r.Int2 == 80, + r => ((int)r["Int"] == 8 && r["String"] == "foo") && (int)r["Int2"] == 80); [ConditionalFact] public virtual Task And_within_Or() - => this.TestFilterAsync(r => (r.Int == 8 && r.String == "foo") || r.Int2 == 100); + => this.TestFilterAsync( + r => (r.Int == 8 && r.String == "foo") || r.Int2 == 100, + r => ((int)r["Int"] == 8 && r["String"] == "foo") || (int)r["Int2"] == 100); [ConditionalFact] public virtual Task Or_within_And() - => this.TestFilterAsync(r => (r.Int == 8 || r.Int == 9) && r.String == "foo"); + => this.TestFilterAsync( + r => (r.Int == 8 || r.Int == 9) && r.String == "foo", + r => ((int)r["Int"] == 8 || (int)r["Int"] == 9) && r["String"] == "foo"); [ConditionalFact] public virtual Task Not_over_Equal() // ReSharper disable once NegativeEqualityExpression - => this.TestFilterAsync(r => !(r.Int == 8)); + => this.TestFilterAsync( + r => !(r.Int == 8), + r => !((int)r["Int"] == 8)); [ConditionalFact] public virtual Task Not_over_NotEqual() // ReSharper disable once NegativeEqualityExpression - => this.TestFilterAsync(r => !(r.Int != 8)); + => this.TestFilterAsync( + r => !(r.Int != 8), + r => !((int)r["Int"] != 8)); [ConditionalFact] public virtual Task Not_over_And() - => this.TestFilterAsync(r => !(r.Int == 8 && r.String == "foo")); + => this.TestFilterAsync( + r => !(r.Int == 8 && r.String == "foo"), + r => !((int)r["Int"] == 8 && r["String"] == "foo")); [ConditionalFact] public virtual Task Not_over_Or() - => this.TestFilterAsync(r => !(r.Int == 8 || r.String == "foo")); + => this.TestFilterAsync( + r => !(r.Int == 8 || r.String == "foo"), + r => !((int)r["Int"] == 8 || r["String"] == "foo")); [ConditionalFact] public virtual Task Not_over_bool() - => this.TestFilterAsync(r => !r.Bool); + => this.TestFilterAsync( + r => !r.Bool, + r => !(bool)r["Bool"]); [ConditionalFact] public virtual Task Not_over_bool_And_Comparison() - => this.TestFilterAsync(r => !r.Bool && r.Int != int.MaxValue); + => this.TestFilterAsync( + r => !r.Bool && r.Int != int.MaxValue, + r => !(bool)r["Bool"] && (int)r["Int"] != int.MaxValue); #endregion Logical operators @@ -157,30 +223,42 @@ public virtual Task Not_over_bool_And_Comparison() [ConditionalFact] public virtual Task Contains_over_field_string_array() - => this.TestFilterAsync(r => r.StringArray.Contains("x")); + => this.TestFilterAsync( + r => r.StringArray.Contains("x"), + r => ((string[])r["StringArray"]!).Contains("x")); [ConditionalFact] public virtual Task Contains_over_field_string_List() - => this.TestFilterAsync(r => r.StringList.Contains("x")); + => this.TestFilterAsync( + r => r.StringList.Contains("x"), + r => ((List)r["StringList"]!).Contains("x")); [ConditionalFact] public virtual Task Contains_over_inline_int_array() - => this.TestFilterAsync(r => new[] { 8, 10 }.Contains(r.Int)); + => this.TestFilterAsync( + r => new[] { 8, 10 }.Contains(r.Int), + r => new[] { 8, 10 }.Contains((int)r["Int"])); [ConditionalFact] public virtual Task Contains_over_inline_string_array() - => this.TestFilterAsync(r => new[] { "foo", "baz", "unknown" }.Contains(r.String)); + => this.TestFilterAsync( + r => new[] { "foo", "baz", "unknown" }.Contains(r.String), + r => new[] { "foo", "baz", "unknown" }.Contains(r["String"])); [ConditionalFact] public virtual Task Contains_over_inline_string_array_with_weird_chars() - => this.TestFilterAsync(r => new[] { "foo", "baz", "un , ' \"" }.Contains(r.String)); + => this.TestFilterAsync( + r => new[] { "foo", "baz", "un , ' \"" }.Contains(r.String), + r => new[] { "foo", "baz", "un , ' \"" }.Contains(r["String"])); [ConditionalFact] public virtual Task Contains_over_captured_string_array() { var array = new[] { "foo", "baz", "unknown" }; - return this.TestFilterAsync(r => array.Contains(r.String)); + return this.TestFilterAsync( + r => array.Contains(r.String), + r => array.Contains(r["String"])); } #endregion Contains @@ -191,7 +269,9 @@ public virtual Task Captured_variable() // ReSharper disable once ConvertToConstant.Local var i = 8; - return this.TestFilterAsync(r => r.Int == i); + return this.TestFilterAsync( + r => r.Int == i, + r => (int)r["Int"] == i); } #region Legacy filter support @@ -226,15 +306,13 @@ public virtual Task Legacy_AnyTagEqualTo_List() #endregion Legacy filter support - protected virtual List GetOrderedRecords(IQueryable filtered) - => filtered.OrderBy(r => r.Key).ToList(); - protected virtual async Task TestFilterAsync( Expression> filter, + Expression, bool>> dynamicFilter, bool expectZeroResults = false, bool expectAllResults = false) { - var expected = this.GetOrderedRecords(fixture.TestData.AsQueryable().Where(filter)); + var expected = fixture.TestData.AsQueryable().Where(filter).OrderBy(r => r.Key).ToList(); if (expected.Count == 0 && !expectZeroResults) { @@ -246,25 +324,44 @@ protected virtual async Task TestFilterAsync( Assert.Fail("The test returns all results, and so is unreliable"); } - var actual = await this.GetResults(fixture.Collection, filter, fixture.TestData.Count); - - Assert.Equal(expected, actual, (e, a) => - e.Int == a.Int && - e.String == a.String && - e.Int2 == a.Int2); - } - - protected virtual async Task> GetResults(IVectorStoreRecordCollection collection, Expression> filter, int top) - { - var results = await collection.VectorizedSearchAsync( + // Execute the query against the vector store, once using the strongly typed filter + // and once using the dynamic filter + var results = await fixture.Collection.VectorizedSearchAsync( new ReadOnlyMemory([1, 2, 3]), top: fixture.TestData.Count, - new() + new() { Filter = filter }); + + var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + + if (actual.Count != expected.Count) + { + Assert.Fail($"Expected {expected.Count} results, but got {actual.Count}"); + } + + foreach (var (e, a) in expected.Zip(actual, (e, a) => (e, a))) + { + fixture.AssertEqualFilterRecord(e, a); + } + + if (fixture.TestDynamic) + { + var dynamicResults = await fixture.DynamicCollection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, + new() { Filter = dynamicFilter }); + + var dynamicActual = await dynamicResults.Results.Select(r => r.Record).OrderBy(r => r[nameof(FilterRecord.Key)]).ToListAsync(); + + if (dynamicActual.Count != expected.Count) { - Filter = filter - }); + Assert.Fail($"Expected {expected.Count} results, but got {actual.Count}"); + } - return await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + foreach (var (e, a) in expected.Zip(dynamicActual, (e, a) => (e, a))) + { + fixture.AssertEqualDynamic(e, a); + } + } } [Obsolete("Legacy filter support")] @@ -296,10 +393,10 @@ protected virtual async Task TestLegacyFilterAsync( var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); - Assert.Equal(expected, actual, (e, a) => - e.Int == a.Int && - e.String == a.String && - e.Int2 == a.Int2); + foreach (var (e, a) in expected.Zip(actual, (e, a) => (e, a))) + { + fixture.AssertEqualFilterRecord(e, a); + } } #pragma warning disable CS1819 // Properties should not return arrays @@ -328,6 +425,20 @@ protected virtual ReadOnlyMemory GetVector(int count) // Derived types may override this to provide different vectors for different records. => new(Enumerable.Range(1, count).Select(i => (float)i).ToArray()); + public virtual IVectorStoreRecordCollection> DynamicCollection { get; protected set; } = null!; + + public virtual bool TestDynamic => true; + + public override async Task InitializeAsync() + { + await base.InitializeAsync(); + + if (this.TestDynamic) + { + this.DynamicCollection = this.TestStore.DefaultVectorStore.GetCollection>(this.CollectionName, this.GetRecordDefinition()); + } + } + protected override VectorStoreRecordDefinition GetRecordDefinition() => new() { @@ -412,6 +523,56 @@ protected override List BuildTestData() ]; } + public virtual void AssertEqualFilterRecord(FilterRecord x, FilterRecord y) + { + var definitionProperties = this.GetRecordDefinition().Properties; + + Assert.Equal(x.Key, y.Key); + Assert.Equal(x.Int, y.Int); + Assert.Equal(x.String, y.String); + Assert.Equal(x.Int2, y.Int2); + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.Bool))) + { + Assert.Equal(x.Bool, y.Bool); + } + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.StringArray))) + { + Assert.Equivalent(x.StringArray, y.StringArray); + } + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.StringList))) + { + Assert.Equivalent(x.StringList, y.StringList); + } + } + + public virtual void AssertEqualDynamic(FilterRecord x, Dictionary y) + { + var definitionProperties = this.GetRecordDefinition().Properties; + + Assert.Equal(x.Key, y["Key"]); + Assert.Equal(x.Int, y["Int"]); + Assert.Equal(x.String, y["String"]); + Assert.Equal(x.Int2, y["Int2"]); + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.Bool))) + { + Assert.Equal(x.Bool, y["Bool"]); + } + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.StringArray))) + { + Assert.Equivalent(x.StringArray, y["StringArray"]); + } + + if (definitionProperties.Any(p => p.DataModelPropertyName == nameof(FilterRecord.StringList))) + { + Assert.Equivalent(x.StringList, y["StringList"]); + } + } + // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, // so filtered searches show empty results. Add a filter to the seed data check below. protected override Task WaitForDataAsync() diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs index 31f21ae2a66b..67cea2645f11 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs @@ -1,29 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq.Expressions; -using Microsoft.Extensions.VectorData; - namespace VectorDataSpecificationTests.Filter; public abstract class BasicQueryTests(BasicQueryTests.QueryFixture fixture) : BasicFilterTests(fixture) where TKey : notnull { - // Not all of the connectors allow to sort by the Key, so we sort by the Int. - protected override List GetOrderedRecords(IQueryable filtered) - => filtered.OrderBy(r => r.Int).ThenByDescending(r => r.String).ToList(); - - protected override async Task> GetResults(IVectorStoreRecordCollection collection, - Expression> filter, int top) - { - GetFilteredRecordOptions options = new(); - - options.OrderBy - .Ascending(r => r.Int) - .Descending(r => r.String); - - return await collection.GetAsync(filter, top, options).ToListAsync(); - } - [Obsolete("Not used by derived types")] public sealed override Task Legacy_And() => Task.CompletedTask; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs new file mode 100644 index 000000000000..1fd5f6b700be --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; + +namespace VectorDataSpecificationTests.Support; + +public abstract class DynamicDataModelFixture : VectorStoreCollectionFixture> +{ + public const string KeyPropertyName = "key"; + public const string StringPropertyName = "text"; + public const string IntegerPropertyName = "integer"; + public const string EmbeddingPropertyName = "embedding"; + public const int DimensionCount = 3; + + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(KeyPropertyName, typeof(TKey)), + new VectorStoreRecordDataProperty(StringPropertyName, typeof(string)), + new VectorStoreRecordDataProperty(IntegerPropertyName, typeof(int)), + new VectorStoreRecordVectorProperty(EmbeddingPropertyName, typeof(ReadOnlyMemory)) + { + Dimensions = DimensionCount + } + ] + }; + + protected override List> BuildTestData() => + [ + new() + { + [KeyPropertyName] = this.GenerateNextKey(), + [StringPropertyName] = "first", + [IntegerPropertyName] = 1, + [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.1f, DimensionCount).ToArray()) + }, + new() + { + [KeyPropertyName] = this.GenerateNextKey(), + [StringPropertyName] = "second", + [IntegerPropertyName] = 2, + [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.2f, DimensionCount).ToArray()) + }, + new() + { + [KeyPropertyName] = this.GenerateNextKey(), + [StringPropertyName] = "third", + [IntegerPropertyName] = 3, + [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.3f, DimensionCount).ToArray()) + }, + new() + { + [KeyPropertyName] = this.GenerateNextKey(), + [StringPropertyName] = "fourth", + [IntegerPropertyName] = 4, + [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.4f, DimensionCount).ToArray()) + } + ]; +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/GenericDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/GenericDataModelFixture.cs deleted file mode 100644 index 333ec1cdfea8..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/GenericDataModelFixture.cs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; - -namespace VectorDataSpecificationTests.Support; - -public abstract class GenericDataModelFixture : VectorStoreCollectionFixture> - where TKey : notnull -{ - public const string KeyPropertyName = "key"; - public const string StringPropertyName = "text"; - public const string IntegerPropertyName = "integer"; - public const string EmbeddingPropertyName = "embedding"; - public const int DimensionCount = 3; - - protected override VectorStoreRecordDefinition GetRecordDefinition() - => new() - { - Properties = - [ - new VectorStoreRecordKeyProperty(KeyPropertyName, typeof(TKey)), - new VectorStoreRecordDataProperty(StringPropertyName, typeof(string)), - new VectorStoreRecordDataProperty(IntegerPropertyName, typeof(int)), - new VectorStoreRecordVectorProperty(EmbeddingPropertyName, typeof(ReadOnlyMemory)) - { - Dimensions = DimensionCount - } - ] - }; - - protected override List> BuildTestData() => - [ - new(this.GenerateNextKey()) - { - Data = - { - [StringPropertyName] = "first", - [IntegerPropertyName] = 1 - }, - Vectors = - { - [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.1f, DimensionCount).ToArray()) - } - }, - new(this.GenerateNextKey()) - { - Data = - { - [StringPropertyName] = "second", - [IntegerPropertyName] = 2 - }, - Vectors = - { - [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.2f, DimensionCount).ToArray()) - } - }, - new(this.GenerateNextKey()) - { - Data = - { - [StringPropertyName] = "third", - [IntegerPropertyName] = 3 - }, - Vectors = - { - [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.3f, DimensionCount).ToArray()) - } - }, - new(this.GenerateNextKey()) - { - Data = - { - [StringPropertyName] = "fourth", - [IntegerPropertyName] = 4 - }, - Vectors = - { - [EmbeddingPropertyName] = new ReadOnlyMemory(Enumerable.Repeat(0.4f, DimensionCount).ToArray()) - } - } - ]; -} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs index 3facc4a3f84f..329457c486bd 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -4,6 +4,8 @@ namespace VectorDataSpecificationTests.Support; +#pragma warning disable CA1721 // Property names should not match get methods + /// /// A test fixture that sets up a single collection in the test vector store, with a specific record definition /// and test data. @@ -21,14 +23,14 @@ public abstract class VectorStoreCollectionFixture : VectorStoreF protected virtual string DistanceFunction => this.TestStore.DefaultDistanceFunction; protected virtual string IndexKind => this.TestStore.DefaultIndexKind; - protected virtual IVectorStoreRecordCollection CreateCollection() + protected virtual IVectorStoreRecordCollection GetCollection() => this.TestStore.DefaultVectorStore.GetCollection(this.CollectionName, this.GetRecordDefinition()); public override async Task InitializeAsync() { await base.InitializeAsync(); - this.Collection = this.CreateCollection(); + this.Collection = this.GetCollection(); if (await this.Collection.CollectionExistsAsync()) { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj index 77fc8e90dbb2..5b14dc1e41c1 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj @@ -6,6 +6,7 @@ enable false VectorDataSpecificationTests + $(NoWarn);MEVD9000 diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs index 529f42ef1310..6e14179e7e99 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryAttribute.cs @@ -6,5 +6,5 @@ namespace VectorDataSpecificationTests.Xunit; [AttributeUsage(AttributeTargets.Method)] -[XunitTestCaseDiscoverer("VectorDataSpecificationTests.Xunit.VectorStoreFactDiscoverer", "VectorDataIntegrationTests")] +[XunitTestCaseDiscoverer("VectorDataSpecificationTests.Xunit.ConditionalTheoryDiscoverer", "VectorDataIntegrationTests")] public sealed class ConditionalTheoryAttribute : TheoryAttribute; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryDiscoverer.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryDiscoverer.cs new file mode 100644 index 000000000000..ade08a828148 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryDiscoverer.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +/// +/// Used dynamically from . +/// Make sure to update that class if you move this type. +/// +public class ConditionalTheoryDiscoverer(IMessageSink messageSink) : TheoryDiscoverer(messageSink) +{ + protected override IEnumerable CreateTestCasesForTheory( + ITestFrameworkDiscoveryOptions discoveryOptions, + ITestMethod testMethod, + IAttributeInfo theoryAttribute) + { + yield return new ConditionalTheoryTestCase( + this.DiagnosticMessageSink, + discoveryOptions.MethodDisplayOrDefault(), + discoveryOptions.MethodDisplayOptionsOrDefault(), + testMethod); + } + + protected override IEnumerable CreateTestCasesForDataRow( + ITestFrameworkDiscoveryOptions discoveryOptions, + ITestMethod testMethod, + IAttributeInfo theoryAttribute, + object[] dataRow) + { + yield return new ConditionalFactTestCase( + this.DiagnosticMessageSink, + discoveryOptions.MethodDisplayOrDefault(), + discoveryOptions.MethodDisplayOptionsOrDefault(), + testMethod, + dataRow); + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryTestCase.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryTestCase.cs new file mode 100644 index 000000000000..f96ec3d9d691 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Xunit/ConditionalTheoryTestCase.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace VectorDataSpecificationTests.Xunit; + +public sealed class ConditionalTheoryTestCase : XunitTheoryTestCase +{ + [Obsolete("Called by the de-serializer; should only be called by deriving classes for de-serialization purposes")] + public ConditionalTheoryTestCase() + { + } + + public ConditionalTheoryTestCase( + IMessageSink diagnosticMessageSink, + TestMethodDisplay defaultMethodDisplay, + TestMethodDisplayOptions defaultMethodDisplayOptions, + ITestMethod testMethod) + : base(diagnosticMessageSink, defaultMethodDisplay, defaultMethodDisplayOptions, testMethod) + { + } + + public override async Task RunAsync( + IMessageSink diagnosticMessageSink, + IMessageBus messageBus, + object[] constructorArguments, + ExceptionAggregator aggregator, + CancellationTokenSource cancellationTokenSource) + => await XunitTestCaseExtensions.TrySkipAsync(this, messageBus) + ? new RunSummary { Total = 1, Skipped = 1 } + : await base.RunAsync( + diagnosticMessageSink, + messageBus, + constructorArguments, + aggregator, + cancellationTokenSource); +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs new file mode 100644 index 000000000000..9d4b065d6c4c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.CRUD; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.CRUD; + +public class WeaviateBatchConformanceTests(WeaviateSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs new file mode 100644 index 000000000000..7e024a7a50bb --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.CRUD; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.CRUD; + +public class WeaviateDynamicRecordConformanceTests(WeaviateDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs new file mode 100644 index 000000000000..3beb7b6e70e5 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.CRUD; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.CRUD; + +public class WeaviateRecordConformanceTests(WeaviateSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs index 6238ca6d9b6a..32082232591d 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs @@ -61,12 +61,10 @@ public override Task Contains_over_inline_string_array_with_weird_chars() // In Weaviate, string equality on multi-word textual properties depends on tokenization // (https://weaviate.io/developers/weaviate/api/graphql/filters#multi-word-queries-in-equal-filters) public override Task Equal_with_string_is_not_Contains() - => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); + => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); public new class Fixture : BasicFilterTests.Fixture { public override TestStore TestStore => WeaviateTestStore.Instance; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs index 1f79a826a56a..312ac21b8372 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs @@ -61,7 +61,7 @@ public override Task Contains_over_inline_string_array_with_weird_chars() // In Weaviate, string equality on multi-word textual properties depends on tokenization // (https://weaviate.io/developers/weaviate/api/graphql/filters#multi-word-queries-in-equal-filters) public override Task Equal_with_string_is_not_Contains() - => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); + => Assert.ThrowsAsync(() => base.Equal_with_string_is_not_Contains()); public new class Fixture : BasicQueryTests.QueryFixture { diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs new file mode 100644 index 000000000000..cd69271c5a12 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace WeaviateIntegrationTests.Support; + +public class WeaviateDynamicDataModelFixture : DynamicDataModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.Instance; + + // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. + // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names + protected override string CollectionName => $"A{Guid.NewGuid():N}"; +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs new file mode 100644 index 000000000000..0fe7c713e46b --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace WeaviateIntegrationTests.Support; + +public class WeaviateSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.Instance; + + // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. + // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names + protected override string CollectionName => $"A{Guid.NewGuid():N}"; +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs index d112a2abfe49..a7700149dad3 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs @@ -22,6 +22,8 @@ public sealed class WeaviateTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + public override string DefaultDistanceFunction => DistanceFunction.CosineDistance; + public WeaviateVectorStore GetVectorStore(WeaviateVectorStoreOptions options) => new(this.Client, options); diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj index eb98407f35ee..486583668bb8 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateIntegrationTests.csproj @@ -10,18 +10,18 @@ - + runtime; build; native; contentfiles; analyzers; buildtransitive all - - + + - - + + From 4fc76f2d2e20fba5a06050c3060813ca3cbc77bc Mon Sep 17 00:00:00 2001 From: Damien Guard Date: Thu, 10 Apr 2025 15:24:19 +0100 Subject: [PATCH 39/63] .Net MEVD: Enable & fix integration tests on MongoDB (#11405) Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> --- .../Memory/MongoDB/MongoDBVectorStoreFixture.cs | 2 +- .../MongoDB/MongoDBVectorStoreRecordCollectionTests.cs | 9 ++++++--- .../Connectors/Memory/MongoDB/MongoDBVectorStoreTests.cs | 2 -- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs index 3d975dffbdf3..9d099d0c2f91 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs @@ -54,7 +54,7 @@ public MongoDBVectorStoreFixture() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTime)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineSimilarity } ] }; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 9b2e339ac4fb..47d0a6943a79 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -18,7 +18,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; public class MongoDBVectorStoreRecordCollectionTests(MongoDBVectorStoreFixture fixture) { // If null, all tests will be enabled - private const string? SkipReason = "The tests are for manual verification."; + private const string? SkipReason = null; [Theory(Skip = SkipReason)] [InlineData("sk-test-hotels", true)] @@ -39,13 +39,15 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var newCollection = Guid.NewGuid().ToString(); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, newCollection); // Act await sut.CreateCollectionAsync(); // Assert Assert.True(await sut.CollectionExistsAsync()); + await sut.DeleteCollectionAsync(); } [Theory(Skip = SkipReason)] @@ -59,6 +61,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo const string HotelId = "55555555-5555-5555-5555-555555555555"; var collectionNamePostfix = useRecordDefinition ? "with-definition" : "with-type"; + collectionNamePostfix += includeVectors ? "-with-vectors" : "-without-vectors"; var collectionName = $"collection-{collectionNamePostfix}"; var options = new MongoDBVectorStoreRecordCollectionOptions @@ -66,7 +69,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null }; - var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + var sut = new MongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName, options); var record = this.CreateTestHotel(HotelId); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreTests.cs index fd6cd229091d..7673800714d1 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreTests.cs @@ -2,13 +2,11 @@ using Microsoft.SemanticKernel.Connectors.MongoDB; using SemanticKernel.IntegrationTests.Connectors.Memory; -using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; [Collection("MongoDBVectorStoreCollection")] -[DisableVectorStoreTests(Skip = "The tests are for manual verification.")] public class MongoDBVectorStoreTests(MongoDBVectorStoreFixture fixture) : BaseVectorStoreTests(new MongoDBVectorStore(fixture.MongoDatabase)) { From ae0fa6261187b8d33e665d6035a0ab9269baf470 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Thu, 10 Apr 2025 22:18:08 +0200 Subject: [PATCH 40/63] .Net: Change UpsertAsync return type to IReadOnlyList (#11402) Closes #10692 --- ...extEmbeddingVectorStoreRecordCollection.cs | 9 ++---- .../Memory/VectorStore_EmbeddingGeneration.cs | 2 +- .../Optimization/FrugalGPTWithFilters.cs | 2 +- .../PluginSelectionWithFilters.cs | 2 +- .../Demos/VectorStoreRAG/DataLoader.cs | 4 +-- ...ISearchVectorStoreRecordCollectionTests.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- ...nMemoryVectorStoreRecordCollectionTests.cs | 2 +- ...zureAISearchVectorStoreRecordCollection.cs | 10 ++----- ...mosDBMongoDBVectorStoreRecordCollection.cs | 11 ++----- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 12 ++------ .../InMemoryVectorStoreRecordCollection.cs | 30 +++++++++---------- .../MongoDBVectorStoreRecordCollection.cs | 11 ++----- .../PineconeVectorStoreRecordCollection.cs | 9 ++---- .../PostgresVectorStoreRecordCollection.cs | 6 ++-- .../QdrantVectorStoreRecordCollection.cs | 30 +++++-------------- ...RedisHashSetVectorStoreRecordCollection.cs | 10 ++----- .../RedisJsonVectorStoreRecordCollection.cs | 8 ++--- .../SqlServerVectorStoreRecordCollection.cs | 8 ++--- .../SqliteVectorStoreRecordCollection.cs | 21 +++++++------ .../WeaviateVectorStoreRecordCollection.cs | 18 +++++++---- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 +- .../QdrantVectorStoreRecordCollectionTests.cs | 2 +- ...HashSetVectorStoreRecordCollectionTests.cs | 2 +- ...disJsonVectorStoreRecordCollectionTests.cs | 2 +- ...eaviateVectorStoreRecordCollectionTests.cs | 2 +- .../IVectorStoreRecordCollection.cs | 17 ++++++++--- ...ISearchVectorStoreRecordCollectionTests.cs | 13 ++++---- ...MongoDBVectorStoreRecordCollectionTests.cs | 8 ++--- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 8 ++--- .../BaseVectorStoreRecordCollectionTests.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 8 ++--- ...ostgresVectorStoreRecordCollectionTests.cs | 8 ++--- ...HashSetVectorStoreRecordCollectionTests.cs | 2 +- ...disJsonVectorStoreRecordCollectionTests.cs | 2 +- .../SqliteVectorStoreRecordCollectionTests.cs | 10 +++---- ...eaviateVectorStoreRecordCollectionTests.cs | 10 +++---- .../CRUD/PineconeAllSupportedTypesTests.cs | 2 +- .../CRUD/SqlServerBatchConformanceTests.cs | 4 +-- .../SqlServerVectorStoreTests.cs | 4 +-- .../CRUD/BatchConformanceTests.cs | 14 ++++----- .../Support/VectorStoreCollectionFixture.cs | 6 +--- ...orSearchDistanceFunctionComplianceTests.cs | 2 +- 44 files changed, 143 insertions(+), 198 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index 14b7352b62d3..1b6401954bc2 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -2,7 +2,6 @@ using System.Linq.Expressions; using System.Reflection; -using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Embeddings; @@ -110,15 +109,11 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { var recordWithEmbeddingsTasks = records.Select(r => this.AddEmbeddingsAsync(r, cancellationToken)); var recordWithEmbeddings = await Task.WhenAll(recordWithEmbeddingsTasks).ConfigureAwait(false); - var upsertResults = this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken); - await foreach (var upsertResult in upsertResults.ConfigureAwait(false)) - { - yield return upsertResult; - } + return await this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken); } /// diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs index a803b18d328c..b79b41e2f777 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs @@ -45,7 +45,7 @@ public async Task UseEmbeddingGenerationViaDecoratorAsync() await collection.CreateCollectionIfNotExistsAsync(); // Create and upsert glossary entries into the collection. - await collection.UpsertAsync(CreateGlossaryEntries()).ToListAsync(); + await collection.UpsertAsync(CreateGlossaryEntries()); // Search the collection using a vectorizable text search. var search = collection as IVectorizableTextSearch; diff --git a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs index 150f0b187aec..a889559af875 100644 --- a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs @@ -210,7 +210,7 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func(CollectionName); await collection.CreateCollectionIfNotExistsAsync(context.CancellationToken); - await collection.UpsertAsync(exampleRecords, cancellationToken: context.CancellationToken).ToListAsync(context.CancellationToken); + await collection.UpsertAsync(exampleRecords, cancellationToken: context.CancellationToken); // Generate embedding for original request. var requestEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(request, cancellationToken: context.CancellationToken); diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index 278ceeccb25c..b4d702cc9fa1 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -341,7 +341,7 @@ public async Task SaveAsync(string collectionName, KernelPluginCollection plugin var collection = vectorStore.GetCollection(collectionName); await collection.CreateCollectionIfNotExistsAsync(cancellationToken); - await collection.UpsertAsync(functionRecords, cancellationToken: cancellationToken).ToListAsync(cancellationToken); + await collection.UpsertAsync(functionRecords, cancellationToken: cancellationToken); } private static List<(KernelFunction Function, string TextToVectorize)> GetFunctionsData(KernelPluginCollection plugins) diff --git a/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs b/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs index 678044fd8fc9..c23f00ee9a56 100644 --- a/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs +++ b/dotnet/samples/Demos/VectorStoreRAG/DataLoader.cs @@ -66,8 +66,8 @@ public async Task LoadPdf(string pdfPath, int batchSize, int betweenBatchDelayIn // Upsert the records into the vector store. var records = await Task.WhenAll(recordTasks).ConfigureAwait(false); - var upsertedKeys = vectorStoreRecordCollection.UpsertAsync(records, cancellationToken: cancellationToken); - await foreach (var key in upsertedKeys.ConfigureAwait(false)) + var upsertedKeys = await vectorStoreRecordCollection.UpsertAsync(records, cancellationToken: cancellationToken).ConfigureAwait(false); + foreach (var key in upsertedKeys) { Console.WriteLine($"Upserted record '{key}' into VectorDB"); } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index 17f60b78cd9a..fa82dd316d51 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -458,7 +458,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) // Act. var actual = await sut.UpsertAsync( [model1, model2], - cancellationToken: this._testCancellationToken).ToListAsync(); + cancellationToken: this._testCancellationToken); // Assert. Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 09b59d505f63..11c795ea727b 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -418,7 +418,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index a4136536e1d9..27008c3973bb 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -469,7 +469,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index ab8e797a0eb2..8c18d13321ca 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -257,7 +257,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testK // Act var actual = await sut.UpsertAsync( [record1, record2], - cancellationToken: this._testCancellationToken).ToListAsync(); + cancellationToken: this._testCancellationToken); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 8351c43e35e7..c1383d5b9d02 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -283,12 +283,12 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); if (!records.Any()) { - yield break; + return []; } // Create Options @@ -297,11 +297,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E // Upsert records var results = await this.MapToStorageModelAndUploadDocumentAsync(records, innerOptions, cancellationToken).ConfigureAwait(false); - // Get results - foreach (var key in results.Value.Results.Select(x => x.Key)) - { - yield return (TKey)(object)key; - } + return results.Value.Results.Select(x => (TKey)(object)x.Key).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index c4a031fce000..35e6b9f0dea0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -247,20 +247,13 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); - - foreach (var result in results) - { - if (result is not null) - { - yield return (TKey)(object)result; - } - } + return results.Where(r => r is not null).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 83a34b0d4090..3f1e2fbaddd8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -286,22 +286,14 @@ await this.RunOperationAsync(OperationName, () => } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); // TODO: Do proper bulk upsert rather than parallel single inserts, #11350 var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); - var keys = await Task.WhenAll(tasks).ConfigureAwait(false); - - foreach (var key in keys) - { - if (key is not null) - { - yield return key; - } - } + return keys.Where(k => k is not null).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 1d004674e254..2b35f7d0b3cd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -232,6 +232,17 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo /// public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + => Task.FromResult(this.Upsert(record)); + + /// + public Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) + { + Verify.NotNull(records); + + return Task.FromResult>(records.Select(this.Upsert).ToList()); + } + + private TKey Upsert(TRecord record) { Verify.NotNull(record); @@ -240,24 +251,11 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke var key = (TKey)this._keyResolver(record)!; collectionDictionary.AddOrUpdate(key!, record, (key, currentValue) => record); - return Task.FromResult(key!); - } - - /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(records); - - foreach (var record in records) - { - yield return await this.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - } + return key!; } /// -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - Need to satisfy the interface which returns IAsyncEnumerable - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) -#pragma warning restore CS1998 + public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -315,7 +313,7 @@ public async Task> VectorizedSearchAsync(T // Build the response. var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); - return new VectorSearchResults(vectorSearchResultList) { TotalCount = count }; + return Task.FromResult(new VectorSearchResults(vectorSearchResultList) { TotalCount = count }); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 7b7cfbb20447..48d28c49dfe9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -253,20 +253,13 @@ await this._mongoCollection } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); var tasks = records.Select(record => this.UpsertAsync(record, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); - - foreach (var result in results) - { - if (result is not null) - { - yield return (TKey)(object)result; - } - } + return results.Where(r => r is not null).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index d997edf836d2..f817881333fd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -303,7 +303,7 @@ await this.RunIndexOperationAsync( } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -316,7 +316,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E if (vectors.Count == 0) { - yield break; + return []; } Sdk.UpsertRequest request = new() @@ -329,10 +329,7 @@ await this.RunIndexOperationAsync( "UpsertBatch", indexClient => indexClient.UpsertAsync(request, cancellationToken: cancellationToken)).ConfigureAwait(false); - foreach (var vector in vectors) - { - yield return (TKey)(object)vector.Id; - } + return vectors.Select(x => (TKey)(object)x.Id).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 24821c7f18d8..8bbe1fe548eb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -156,7 +156,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -171,7 +171,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E if (storageModels.Count == 0) { - yield break; + return []; } var keys = storageModels.Select(model => model[this._model.KeyProperty.StorageName]!).ToList(); @@ -180,7 +180,7 @@ await this.RunOperationAsync(OperationName, () => this._client.UpsertBatchAsync(this.CollectionName, storageModels, this._model.KeyProperty.StorageName, cancellationToken) ).ConfigureAwait(false); - foreach (var key in keys) { yield return (TKey)key!; } + return keys.Select(key => (TKey)key!).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index e99cfa04a61b..3b0f5e2c6a78 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -454,7 +454,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -471,28 +471,14 @@ await this.RunOperationAsync( UpsertName, () => this._qdrantClient.UpsertAsync(this._collectionName, pointStructs, true, cancellationToken: cancellationToken)).ConfigureAwait(false); - if (pointStructs.Count > 0) - { - switch (pointStructs[0].Id) + return pointStructs.Count == 0 + ? [] + : pointStructs[0].Id switch { - case { HasNum: true }: - foreach (var pointStruct in pointStructs) - { - yield return (TKey)(object)pointStruct.Id.Num; - } - break; - - case { HasUuid: true }: - foreach (var pointStruct in pointStructs) - { - yield return (TKey)(object)Guid.Parse(pointStruct.Id.Uuid); - } - break; - - default: - throw new UnreachableException("The Qdrant point ID is neither a number nor a UUID."); - } - } + { HasNum: true } => pointStructs.Select(pointStruct => (TKey)(object)pointStruct.Id.Num).ToList(), + { HasUuid: true } => pointStructs.Select(pointStruct => (TKey)(object)Guid.Parse(pointStruct.Id.Uuid)).ToList(), + _ => throw new UnreachableException("The Qdrant point ID is neither a number nor a UUID.") + }; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 358ed3f80b25..adbd3fe50d46 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -320,20 +320,14 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); // Upsert records in parallel. var tasks = records.Select(x => this.UpsertAsync(x, cancellationToken)); var results = await Task.WhenAll(tasks).ConfigureAwait(false); - foreach (var result in results) - { - if (result is not null) - { - yield return (TKey)(object)result; - } - } + return results.Where(r => r is not null).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index a5d355fd605d..8298770b9e54 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -375,7 +375,7 @@ await this.RunOperationAsync( } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -407,11 +407,7 @@ await this.RunOperationAsync( .JSON() .MSetAsync(keyPathValues)).ConfigureAwait(false); - // Return keys of upserted records. - foreach (var record in redisRecords) - { - yield return (TKey)(object)record.originalKey; - } + return redisRecords.Select(x => (TKey)(object)x.originalKey).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index d7e3d986223c..1ed1bee0cd4c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -357,8 +357,7 @@ async static (cmd, ct) => } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); @@ -429,10 +428,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, var keyProperty = this._model.KeyProperty; - foreach (var record in records) - { - yield return (TKey)keyProperty.GetValueAsObject(record!)!; - } + return records.Select(r => (TKey)keyProperty.GetValueAsObject(r)!).ToList(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 99d7259ab986..b633aac6558e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -363,15 +363,14 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati var condition = new SqliteWhereEqualsCondition(this._keyStorageName, key); - var upsertedRecordKey = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) - .FirstOrDefaultAsync(cancellationToken) + var upsertedRecordKeys = await this.InternalUpsertBatchAsync(connection, [storageModel], condition, cancellationToken) .ConfigureAwait(false); - return upsertedRecordKey ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); + return upsertedRecordKeys.Single() ?? throw new VectorStoreOperationException("Error occurred during upsert operation."); } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { const string OperationName = "UpsertBatch"; @@ -388,10 +387,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E var condition = new SqliteWhereInCondition(this._keyStorageName, keys); - await foreach (var record in this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken).ConfigureAwait(false)) - { - yield return record; - } + return await this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken).ConfigureAwait(false); } /// @@ -598,11 +594,11 @@ private async IAsyncEnumerable InternalGetBatchAsync( } } - private async IAsyncEnumerable InternalUpsertBatchAsync( + private async Task> InternalUpsertBatchAsync( SqliteConnection connection, List> storageModels, SqliteWhereCondition condition, - [EnumeratorCancellation] CancellationToken cancellationToken) + CancellationToken cancellationToken) { Verify.NotNull(storageModels); Verify.True(storageModels.Count > 0, "Number of provided records should be greater than zero."); @@ -637,6 +633,7 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( replaceIfExists: true); using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + var keys = new List(); while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { @@ -644,11 +641,13 @@ private async IAsyncEnumerable InternalUpsertBatchAsync( if (key is not null) { - yield return key; + keys.Add(key); } await reader.NextResultAsync(cancellationToken).ConfigureAwait(false); } + + return keys; } private Task InternalDeleteBatchAsync(SqliteConnection connection, SqliteWhereCondition condition, CancellationToken cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 845745225443..e9fda5c38484 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -281,12 +281,14 @@ public async IAsyncEnumerable GetAsync( /// public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - => (await this.UpsertAsync([record], cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false))!; + { + var keys = await this.UpsertAsync([record], cancellationToken).ConfigureAwait(false); + + return keys.Single(); + } /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { const string OperationName = "UpsertCollectionObject"; @@ -301,7 +303,7 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E if (jsonObjects.Count == 0) { - yield break; + return []; } var responses = await this.RunOperationAsync(OperationName, async () => @@ -311,16 +313,20 @@ public async IAsyncEnumerable UpsertAsync(IEnumerable records, [E return await this.ExecuteRequestAsync>(request, cancellationToken).ConfigureAwait(false); }).ConfigureAwait(false); + var keys = new List(jsonObjects.Count); + if (responses is not null) { foreach (var response in responses) { if (response?.Result?.IsSuccess is true) { - yield return (TKey)(object)response.Id; + keys.Add((TKey)(object)response.Id); } } } + + return keys; } /// diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index c68a93b7edd8..99bc60b9104b 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -418,7 +418,7 @@ public async Task UpsertBatchReturnsRecordKeysAsync() "collection"); // Act - var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2, hotel3]); // Assert Assert.NotNull(results); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index 5119874f96de..a8f0c8e2a20f 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -458,7 +458,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, bool hasNa // Act var actual = await sut.UpsertAsync( models, - cancellationToken: this._testCancellationToken).ToListAsync(); + cancellationToken: this._testCancellationToken); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 09d34ab30b5b..a4844133ac31 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -356,7 +356,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) var model2 = CreateModel(TestRecordKey2, true); // Act - var actual = await sut.UpsertAsync([model1, model2]).ToListAsync(); + var actual = await sut.UpsertAsync([model1, model2]); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index 69284fe66f90..d3e5c449cd41 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -390,7 +390,7 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) var model2 = CreateModel(TestRecordKey2, true); // Act - var actual = await sut.UpsertAsync([model1, model2]).ToListAsync(); + var actual = await sut.UpsertAsync([model1, model2]); // Assert Assert.NotNull(actual); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 071000145fff..f78cb1c8e500 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -329,7 +329,7 @@ public async Task UpsertReturnsRecordKeysAsync() var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act - var results = await sut.UpsertAsync([hotel1, hotel2]).ToListAsync(); + var results = await sut.UpsertAsync([hotel1, hotel2]); // Assert Assert.Contains(id1, results); diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index 63484c33f4de..97e898fd7b56 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -113,7 +113,7 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// /// The record to upsert. /// The to monitor for cancellation requests. The default is . - /// The unique identifier for the record. + /// The key for the records, to be used when keys are generated in the database. /// The command fails to execute for any reason. /// The mapping between the storage model and record data model fails. Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default); @@ -125,13 +125,22 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// /// The records to upsert. /// The to monitor for cancellation requests. The default is . - /// The unique identifiers for the records. + /// The keys for the records, to be used when keys are generated in the database. /// - /// Upserts are made in a single request or in a single parallel batch depending on the available store functionality. + /// + /// The exact method of upserting the batch is implementation-specific and can vary based on database support; some databases support batch upserts via a single, efficient + /// request, while in other cases the implementation might send multiple upserts in parallel. + /// + /// + /// Similarly, the error behavior can vary across databases: where possible, the batch will be upserted atomically, so that any errors cause the entire batch to be rolled + /// back. Where not supported, some records may be upserted while others are not. If key properties are set by the user, then the entire upsert operation is idempotent, + /// and can simply be retried again if an error occurs. However, if store-generated keys are in use, the upsert operation is no longer idempotent; in that case, if the + /// database doesn't guarantee atomicity, retrying could cause duplicate records to be created. + /// /// /// The command fails to execute for any reason. /// The mapping between the storage model and record data model fails. - IAsyncEnumerable UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default); + Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default); /// /// Gets matching records from the vector store. Does not guarantee that the collection exists. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 1184f229b69b..2f86994e73e4 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -167,7 +167,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync() var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); // Act - var results = sut.UpsertAsync( + var results = await sut.UpsertAsync( [ await this.CreateTestHotelAsync("UpsertMany-1"), await this.CreateTestHotelAsync("UpsertMany-2"), @@ -176,15 +176,14 @@ await this.CreateTestHotelAsync("UpsertMany-3"), // Assert Assert.NotNull(results); - var resultsList = await results.ToListAsync(); - Assert.Equal(3, resultsList.Count); - Assert.Contains("UpsertMany-1", resultsList); - Assert.Contains("UpsertMany-2", resultsList); - Assert.Contains("UpsertMany-3", resultsList); + Assert.Equal(3, results.Count); + Assert.Contains("UpsertMany-1", results); + Assert.Contains("UpsertMany-2", results); + Assert.Contains("UpsertMany-3", results); // Output - foreach (var result in resultsList) + foreach (var result in results) { output.WriteLine(result); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 26a01f7bd38e..d33dc2c0e1b0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -157,7 +157,7 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -340,7 +340,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); @@ -371,7 +371,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() @@ -403,7 +403,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 91d3daf3cc4c..a3e3e7c64091 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -213,7 +213,7 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -274,7 +274,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); @@ -305,7 +305,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() @@ -338,7 +338,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index afcf32cad843..75db689d61fc 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -93,7 +93,7 @@ public async Task VectorSearchShouldReturnExpectedScoresAsync(string distanceFun Vector = orthogonalVector, }; - await sut.UpsertAsync([baseRecord, oppositeRecord, orthogonalRecord]).ToListAsync(); + await sut.UpsertAsync([baseRecord, oppositeRecord, orthogonalRecord]); await Task.Delay(this.DelayAfterUploadInMilliseconds); // Act diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 47d0a6943a79..0545491d5f2b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -161,7 +161,7 @@ public async Task ItCanGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(HotelId2); var record3 = this.CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -344,7 +344,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); @@ -375,7 +375,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() @@ -407,7 +407,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 233e39b22a23..09ce1a902933 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -192,7 +192,7 @@ public async Task ItCanGetUpsertDeleteBatchAsync() var record2 = new PostgresHotel { HotelId = HotelId2, HotelName = "Hotel 2", HotelCode = 1, ParkingIncluded = false, HotelRating = 3.5f, Tags = ["tag1", "tag3"] }; var record3 = new PostgresHotel { HotelId = HotelId3, HotelName = "Hotel 3", HotelCode = 1, ParkingIncluded = true, HotelRating = 2.5f, Tags = ["tag1", "tag4"] }; - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -358,7 +358,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), top: 3, new() @@ -396,7 +396,7 @@ public async Task VectorizedSearchWithEqualToFilterReturnsValidResultsAsync() await sut.CreateCollectionAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() @@ -428,7 +428,7 @@ public async Task VectorizedSearchWithAnyTagFilterReturnsValidResultsAsync() await sut.CreateCollectionAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 3464afa6fe58..01a102f26ebb 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -179,7 +179,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin // Assert. Assert.NotNull(results); - var resultsList = await results.ToListAsync(); + var resultsList = await results; Assert.Equal(3, resultsList.Count); Assert.Contains("HUpsertMany-1", resultsList); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 0c63d5aac82c..4b0972a97437 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -193,7 +193,7 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync(bool useRecordDefin // Assert. Assert.NotNull(results); - var resultsList = await results.ToListAsync(); + var resultsList = await results; Assert.Equal(3, resultsList.Count); Assert.Contains("UpsertMany-1", resultsList); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index 45686fe125f8..bbe3c6a87431 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -180,7 +180,7 @@ public async Task ItCanGetUpsertDeleteBatchWithNumericKeyAsync() var record2 = CreateTestHotel(HotelId2); var record3 = CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -214,7 +214,7 @@ public async Task ItCanGetUpsertDeleteBatchWithStringKeyAsync() var record2 = CreateTestHotel(HotelId2); var record3 = CreateTestHotel(HotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); @@ -352,7 +352,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() @@ -389,7 +389,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() @@ -422,7 +422,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 8323ca93dedb..27e63ac000b4 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -160,7 +160,7 @@ public async Task ItCanUpsertAndGetAndDeleteBatchAsync() var record2 = this.CreateTestHotel(hotelId2); var record3 = this.CreateTestHotel(hotelId3); - var upsertResults = await sut.UpsertAsync([record1, record2, record3]).ToListAsync(); + var upsertResults = await sut.UpsertAsync([record1, record2, record3]); var getResults = await sut.GetAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); Assert.Equal([hotelId1, hotelId2, hotelId3], upsertResults); @@ -223,7 +223,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() @@ -261,7 +261,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() @@ -294,7 +294,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() @@ -338,7 +338,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT await sut.CreateCollectionIfNotExistsAsync(); - await sut.UpsertAsync([hotel4, hotel2, hotel5, hotel3, hotel1]).ToListAsync(); + await sut.UpsertAsync([hotel4, hotel2, hotel5, hotel3, hotel1]); // Act var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), top: 4, new() diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs index 10f0f5c858cf..ddb702b22104 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeAllSupportedTypesTests.cs @@ -61,7 +61,7 @@ public async Task AllTypesBatchGetAsync() } ]; - await collection.UpsertAsync(records).ToArrayAsync(); + await collection.UpsertAsync(records); var allTypes = await collection.GetAsync(records.Select(r => r.Id), new GetRecordOptions { IncludeVectors = true }).ToListAsync(); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index 3ae677ae03f9..19dc832aa3d0 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -35,7 +35,7 @@ private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVector var keys = inserted.Select(record => record.Id).ToArray(); Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); - var receivedKeys = await collection.UpsertAsync(inserted).ToArrayAsync(); + var receivedKeys = await collection.UpsertAsync(inserted); Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order var received = await collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); @@ -64,7 +64,7 @@ public async Task UpsertBatchIsAtomic() var keys = inserted.Select(record => record.Id).Where(key => key is not null).ToArray(); Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); - VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertAsync(inserted).ToArrayAsync().AsTask()); + VectorStoreOperationException ex = await Assert.ThrowsAsync(() => collection.UpsertAsync(inserted)); Assert.Equal("UpsertBatch", ex.OperationName); var metadata = collection.GetService(typeof(VectorStoreRecordCollectionMetadata)) as VectorStoreRecordCollectionMetadata; diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index d6b987571a69..7675c412f293 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -222,7 +222,7 @@ public async Task BatchCRUD() Floats = Enumerable.Range(0, 10).Select(j => (float)(i + j)).ToArray() }).ToArray(); - string[] keys = await collection.UpsertAsync(inserted).ToArrayAsync(); + var keys = await collection.UpsertAsync(inserted); for (int i = 0; i < inserted.Length; i++) { Assert.Equal(inserted[i].Id, keys[i]); @@ -241,7 +241,7 @@ public async Task BatchCRUD() Floats = i.Floats }).ToArray(); - keys = await collection.UpsertAsync(updated).ToArrayAsync(); + keys = await collection.UpsertAsync(updated); for (int i = 0; i < updated.Length; i++) { Assert.Equal(updated[i].Id, keys[i]); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index 1c7ec863acba..e51f2619444e 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -46,14 +46,14 @@ private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) [ConditionalFact] public virtual async Task UpsertBatchAsyncThrowsArgumentNullExceptionForNullBatch() { - ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.UpsertAsync(records: null!).ToArrayAsync().AsTask()); + ArgumentNullException ex = await Assert.ThrowsAsync(() => fixture.Collection.UpsertAsync(records: null!)); Assert.Equal("records", ex.ParamName); } [ConditionalFact] public virtual async Task UpsertBatchAsyncDoesNotThrowForEmptyBatch() { - Assert.Empty(await fixture.Collection.UpsertAsync([]).ToArrayAsync()); + Assert.Empty(await fixture.Collection.UpsertAsync([])); } [ConditionalFact] @@ -70,7 +70,7 @@ public virtual async Task UpsertBatchAsyncCanInsertNewRecord() var keys = inserted.Select(record => record.Id).ToArray(); Assert.Empty(await collection.GetAsync(keys).ToArrayAsync()); - var receivedKeys = await collection.UpsertAsync(inserted).ToArrayAsync(); + var receivedKeys = await collection.UpsertAsync(inserted); Assert.Equal(keys.ToHashSet(), receivedKeys.ToHashSet()); // .ToHashSet() to ignore order var received = await collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); @@ -90,7 +90,7 @@ public virtual async Task UpsertBatchAsyncCanUpdateExistingRecords() Text = i.ToString(), Floats = Enumerable.Range(0, SimpleRecord.DimensionCount).Select(j => (float)(i + j)).ToArray() }).ToArray(); - await fixture.Collection.UpsertAsync(inserted).ToArrayAsync(); + await fixture.Collection.UpsertAsync(inserted); SimpleRecord[] updated = inserted.Select(i => new SimpleRecord() { @@ -100,7 +100,7 @@ public virtual async Task UpsertBatchAsyncCanUpdateExistingRecords() Floats = i.Floats }).ToArray(); - var keys = await fixture.Collection.UpsertAsync(updated).ToArrayAsync(); + var keys = await fixture.Collection.UpsertAsync(updated); Assert.Equal( updated.Select(r => r.Id).OrderBy(id => id).ToArray(), keys.OrderBy(id => id).ToArray()); @@ -125,7 +125,7 @@ public virtual async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch() // We take first half of the records and insert them. SimpleRecord[] firstHalf = records.Take(records.Length / 2).ToArray(); - TKey[] insertedKeys = await fixture.Collection.UpsertAsync(firstHalf).ToArrayAsync(); + var insertedKeys = await fixture.Collection.UpsertAsync(firstHalf); Assert.Equal( firstHalf.Select(r => r.Id).OrderBy(id => id).ToArray(), insertedKeys.OrderBy(id => id).ToArray()); @@ -138,7 +138,7 @@ public virtual async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch() } // And now we upsert all the records (the first half is an update, the second is an insert). - TKey[] mixedKeys = await fixture.Collection.UpsertAsync(records).ToArrayAsync(); + var mixedKeys = await fixture.Collection.UpsertAsync(records); Assert.Equal( records.Select(r => r.Id).OrderBy(id => id).ToArray(), mixedKeys.OrderBy(id => id).ToArray()); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs index 329457c486bd..9c3c71fabea3 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -47,11 +47,7 @@ public override async Task InitializeAsync() protected virtual async Task SeedAsync() { - // TODO: UpsertBatchAsync returns IAsyncEnumerable (to support server-generated keys?), but this makes it quite hard to use: - await foreach (var _ in this.Collection.UpsertAsync(this.TestData)) - { - } - + await this.Collection.UpsertAsync(this.TestData); await this.WaitForDataAsync(); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 12cb73f25861..1f4c3537b3bd 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -101,7 +101,7 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM try { - await collection.UpsertAsync(insertedRecords).ToArrayAsync(); + await collection.UpsertAsync(insertedRecords); var searchResult = await collection.VectorizedSearchAsync(baseVector, top: 3); var results = await searchResult.Results.ToListAsync(); From fe185c14031d3c5f5519cb6d8838d54682aec0e5 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Fri, 11 Apr 2025 09:37:04 +0100 Subject: [PATCH 41/63] .Net: Refactor MEVD Vector Attribute and make Dimensions mandatory (#11490) ### Motivation and Context #11365 #11318 ### Description - Make Dimensions property mandatory for both the Attribute and Definition Property - Remove unnecessary constructor overloads for Vector attribute in favor of property setters. ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../AzureAISearchFactory.cs | 2 +- .../PineconeFactory.cs | 2 +- .../QdrantFactory.cs | 2 +- .../RedisFactory.cs | 2 +- .../VectorStore_DataIngestion_CustomMapper.cs | 2 +- .../VectorStore_DynamicDataModel_Interop.cs | 2 +- .../Optimization/FrugalGPTWithFilters.cs | 2 +- .../PluginSelectionWithFilters.cs | 2 +- .../Step5_Use_DynamicDataModel.cs | 2 +- .../Step6_Use_CustomMapper.cs | 2 +- ...zureAISearchDynamicDataModelMapperTests.cs | 14 ++++----- ...VectorStoreCollectionCreateMappingTests.cs | 10 ------- ...ISearchVectorStoreRecordCollectionTests.cs | 6 ++-- .../AzureCosmosDBMongoDBHotelModel.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 4 +-- ...osmosDBNoSQLDynamicDataModelMapperTests.cs | 24 +++++++-------- .../AzureCosmosDBNoSQLHotel.cs | 2 +- ...LVectorStoreCollectionQueryBuilderTests.cs | 2 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 6 ++-- ...nMemoryVectorStoreRecordCollectionTests.cs | 15 +++++----- ...earchVectorStoreCollectionCreateMapping.cs | 7 +---- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 5 ---- .../PineconeVectorStoreRecordCollection.cs | 4 +-- ...ostgresVectorStoreRecordPropertyMapping.cs | 5 ---- ...drantVectorStoreCollectionCreateMapping.cs | 5 ---- ...RedisVectorStoreCollectionCreateMapping.cs | 7 +---- .../SqlServerVectorStoreRecordCollection.cs | 8 ----- .../MongoDBDynamicDataModelMapperTests.cs | 16 +++++----- .../MongoDBHotelModel.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 4 +-- .../MongoDBVectorStoreRecordMapperTests.cs | 2 +- ...ineconeVectorStoreRecordCollectionTests.cs | 2 +- .../PostgresHotel.cs | 2 +- ...resVectorStoreCollectionSqlBuilderTests.cs | 16 ++++------ ...ostgresVectorStoreRecordCollectionTests.cs | 4 +-- .../PostgresVectorStoreRecordMapperTests.cs | 4 +-- ...VectorStoreCollectionCreateMappingTests.cs | 12 -------- ...VectorStoreCollectionSearchMappingTests.cs | 2 +- .../QdrantVectorStoreRecordCollectionTests.cs | 4 +-- .../QdrantVectorStoreRecordMapperTests.cs | 12 ++++---- ...RedisHashSetDynamicDataModelMapperTests.cs | 6 ++-- ...disHashSetVectorStoreMappingTestHelpers.cs | 4 +-- ...HashSetVectorStoreRecordCollectionTests.cs | 6 ++-- ...edisHashSetVectorStoreRecordMapperTests.cs | 4 +-- .../RedisJsonDynamicDataModelMapperTests.cs | 2 +- ...disJsonVectorStoreRecordCollectionTests.cs | 6 ++-- .../RedisJsonVectorStoreRecordMapperTests.cs | 4 +-- ...VectorStoreCollectionCreateMappingTests.cs | 15 ---------- ...VectorStoreCollectionSearchMappingTests.cs | 10 +++---- .../SqliteHotel.cs | 2 +- .../SqliteVectorStoreRecordMapperTests.cs | 4 +-- .../WeaviateDynamicDataModelMapperTests.cs | 16 +++++----- .../WeaviateHotel.cs | 2 +- ...VectorStoreCollectionCreateMappingTests.cs | 10 +++---- ...rStoreRecordCollectionQueryBuilderTests.cs | 2 +- .../WeaviateVectorStoreRecordMapperTests.cs | 2 +- .../VectorStoreRecordModelBuilder.cs | 5 +--- .../VectorStoreRecordVectorPropertyModel.cs | 17 ++++++++++- .../VectorStoreRecordDataAttribute.cs | 2 +- .../VectorStoreRecordKeyAttribute.cs | 2 +- .../VectorStoreRecordVectorAttribute.cs | 20 ++++++++++--- .../VectorStoreRecordVectorProperty.cs | 30 ++++++++++++++++++- .../AzureAISearchVectorStoreFixture.cs | 2 +- .../AzureCosmosDBMongoDBHotel.cs | 2 +- .../AzureCosmosDBMongoDBVectorStoreFixture.cs | 2 +- .../AzureCosmosDBNoSQLHotel.cs | 2 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 2 +- .../BaseVectorStoreRecordCollectionTests.cs | 2 +- .../Connectors/Memory/MongoDB/MongoDBHotel.cs | 2 +- .../MongoDB/MongoDBVectorStoreFixture.cs | 2 +- .../Memory/Postgres/PostgresHotel.cs | 2 +- ...ostgresVectorStoreRecordCollectionTests.cs | 4 +-- .../Memory/Qdrant/QdrantVectorStoreFixture.cs | 8 ++--- .../Memory/Redis/RedisVectorStoreFixture.cs | 4 +-- .../Connectors/Memory/Sqlite/SqliteHotel.cs | 2 +- .../SqliteVectorStoreRecordCollectionTests.cs | 8 ++--- .../Memory/Weaviate/WeaviateHotel.cs | 2 +- ...eaviateVectorStoreRecordCollectionTests.cs | 2 +- .../Support/PineconeAllTypes.cs | 4 +-- .../SqlServerCommandBuilderTests.cs | 10 +++---- .../Collections/CollectionConformanceTests.cs | 2 +- .../Filter/BasicFilterTests.cs | 3 +- ...rdVectorizedHybridSearchComplianceTests.cs | 4 +-- .../Support/DynamicDataModelFixture.cs | 5 +--- .../Support/SimpleModelFixture.cs | 3 +- ...orSearchDistanceFunctionComplianceTests.cs | 3 +- 86 files changed, 226 insertions(+), 255 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs index da5c18354818..b76b3ae188cb 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs @@ -29,7 +29,7 @@ public static class AzureAISearchFactory new VectorStoreRecordKeyProperty("id", typeof(string)), new VectorStoreRecordDataProperty("content", typeof(string)), new VectorStoreRecordDataProperty("metadata", typeof(string)), - new VectorStoreRecordVectorProperty("content_vector", typeof(ReadOnlyMemory)) { Dimensions = 1536 } + new VectorStoreRecordVectorProperty("content_vector", typeof(ReadOnlyMemory), 1536) } }; diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index a645ac53997b..5c10b7b0d860 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -24,7 +24,7 @@ public static class PineconeFactory new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Content", typeof(string)) { StoragePropertyName = "text" }, new VectorStoreRecordDataProperty("Source", typeof(string)) { StoragePropertyName = "source" }, - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { StoragePropertyName = "embedding", Dimensions = 1536 } + new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 1536) { StoragePropertyName = "embedding" } } }; diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs index 2edcaa555462..9a56141ff9c9 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs @@ -27,7 +27,7 @@ public static class QdrantFactory Properties = new List { new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { StoragePropertyName = "embedding", Dimensions = 1536 } + new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 1536) { StoragePropertyName = "embedding" } } }; diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index 2a1e01ac4583..fc543c71c1a0 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -24,7 +24,7 @@ public static class RedisFactory new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Content", typeof(string)) { StoragePropertyName = "text" }, new VectorStoreRecordDataProperty("Source", typeof(string)) { StoragePropertyName = "source" }, - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory)) { StoragePropertyName = "embedding", Dimensions = 1536 } + new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 1536) { StoragePropertyName = "embedding" } } }; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index 7ae548016c6f..ca652ff7d6e9 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -50,7 +50,7 @@ public class VectorStore_DataIngestion_CustomMapper(ITestOutputHelper output, Ve new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Term", typeof(string)), new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory)) { Dimensions = 1536, DistanceFunction = DistanceFunction.DotProductSimilarity } + new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536) { DistanceFunction = DistanceFunction.DotProductSimilarity } } }; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs index 74e90490d37a..d7bb667284f4 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DynamicDataModel_Interop.cs @@ -31,7 +31,7 @@ public class VectorStore_DynamicDataModel_Interop(ITestOutputHelper output, Vect new VectorStoreRecordKeyProperty("Key", typeof(ulong)), new VectorStoreRecordDataProperty("Term", typeof(string)), new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory)) { Dimensions = 1536 } + new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536) } }; diff --git a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs index a889559af875..0b4dad80997a 100644 --- a/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs @@ -323,7 +323,7 @@ private sealed class ExampleRecord [VectorStoreRecordData] public string Example { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(1536)] public ReadOnlyMemory ExampleEmbedding { get; set; } } } diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index b4d702cc9fa1..0b83daced50b 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -422,7 +422,7 @@ private sealed class FunctionRecord [VectorStoreRecordData] public string FunctionInfo { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(1536)] public ReadOnlyMemory FunctionInfoEmbedding { get; set; } } diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs index b5b0f6c7cd72..0b97a03dfd32 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs @@ -44,7 +44,7 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() new VectorStoreRecordDataProperty("Category", typeof(string)), new VectorStoreRecordDataProperty("Term", typeof(string)), new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory)) { Dimensions = 1536 }, + new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536), } }; diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs index 2171f59ef711..7df2189ccd48 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs @@ -37,7 +37,7 @@ public async Task UseCustomMapperAsync() new VectorStoreRecordDataProperty("Category", typeof(string)), new VectorStoreRecordDataProperty("Term", typeof(string)), new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory)) { Dimensions = 1536 }, + new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536) } }; diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs index 338fabab03f3..75e1860772fe 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs @@ -33,8 +33,8 @@ public class AzureAISearchDynamicDataModelMapperTests new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), ]); private static readonly float[] s_vector1 = [1.0f, 2.0f, 3.0f]; @@ -101,7 +101,7 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), ]); var dataModel = new Dictionary @@ -179,7 +179,7 @@ public void MapFromStorageToDataModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), ]); var storageModel = new JsonObject(); @@ -209,7 +209,7 @@ public void MapFromStorageToDataModelThrowsForMissingKey() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), ]); var sut = new AzureAISearchDynamicDataModelMapper(model); @@ -230,7 +230,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ]); var dataModel = new Dictionary { ["Key"] = "key" }; @@ -253,7 +253,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ]); var storageModel = new JsonObject(); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs index 966307ed7d2f..8c4aca17be0a 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreCollectionCreateMappingTests.cs @@ -198,16 +198,6 @@ public void MapVectorFieldThrowsForUnsupportedDistanceFunction() Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty)); } - [Fact] - public void MapVectorFieldThrowsForMissingDimensionsCount() - { - // Arrange - var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)); - - // Act & Assert - Assert.Throws(() => AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField(vectorProperty)); - } - [Theory] [MemberData(nameof(DataTypeMappingOptions))] public void GetSDKFieldDataTypeMapsTypesCorrectly(Type propertyType, SearchFieldDataType searchFieldDataType) diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index fa82dd316d51..e21da5dc4372 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -541,7 +541,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)), - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory), 4), } }; @@ -674,8 +674,8 @@ private static MultiPropsModel CreateModel(string key, bool withVectors) new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)), new VectorStoreRecordDataProperty("Data2", typeof(string)), - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4 }, - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { Dimensions = 4 } + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory), 4), + new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory), 4) ] }; diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs index e376a90efcb5..f8c541aa20ca 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs @@ -39,6 +39,6 @@ public class AzureCosmosDBMongoDBHotelModel(string hotelId) public string? Description { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 11c795ea727b..0b466bafb47c 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -862,11 +862,11 @@ private sealed class VectorSearchModel [VectorStoreRecordData] public string? HotelName { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat, StoragePropertyName = "test_embedding_1")] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat, StoragePropertyName = "test_embedding_1")] public ReadOnlyMemory TestEmbedding1 { get; set; } [BsonElement("test_embedding_2")] - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat)] public ReadOnlyMemory TestEmbedding2 { get; set; } } #pragma warning restore CA1812 diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs index 4fad5c339f78..70a896148799 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs @@ -42,15 +42,15 @@ public sealed class AzureCosmosDBNoSQLDynamicDataModelMapperTests new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), #if NET5_0_OR_GREATER - new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?), 10), #endif - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableByteVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), + new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableByteVector", typeof(ReadOnlyMemory?), 10), + new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?), 10), }, }); @@ -141,7 +141,7 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), }, }; @@ -241,7 +241,7 @@ public void MapFromStorageToDataModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), }, }; @@ -288,7 +288,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() { new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), }, }; @@ -314,7 +314,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() { new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), }, }; diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs index df06e97d3846..331758b02202 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs @@ -39,6 +39,6 @@ public class AzureCosmosDBNoSQLHotel(string hotelId) /// A vector field. [JsonPropertyName("description_embedding")] - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineSimilarity, IndexKind: IndexKind.Flat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.Flat)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index a1797cbea98d..93264fd7384e 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -25,7 +25,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordVectorProperty("TestProperty1", typeof(string)) { StoragePropertyName = "test_property_1" }, + new VectorStoreRecordVectorProperty("TestProperty1", typeof(string), 10) { StoragePropertyName = "test_property_1" }, new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) { StoragePropertyName = "test_property_2" }, new VectorStoreRecordDataProperty("TestProperty3", typeof(string)) { StoragePropertyName = "test_property_3" } ] diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 27008c3973bb..9cc4a635189c 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -722,13 +722,13 @@ private sealed class TestIndexingModel [VectorStoreRecordKey] public string? Id { get; set; } - [VectorStoreRecordVector(Dimensions: 2, DistanceFunction: DistanceFunction.CosineSimilarity, IndexKind: IndexKind.Flat)] + [VectorStoreRecordVector(Dimensions: 2, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.Flat)] public ReadOnlyMemory? DescriptionEmbedding2 { get; set; } - [VectorStoreRecordVector(Dimensions: 3, DistanceFunction: DistanceFunction.DotProductSimilarity, IndexKind: IndexKind.QuantizedFlat)] + [VectorStoreRecordVector(Dimensions: 3, DistanceFunction = DistanceFunction.DotProductSimilarity, IndexKind = IndexKind.QuantizedFlat)] public ReadOnlyMemory? DescriptionEmbedding3 { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance, IndexKind: IndexKind.DiskAnn)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.EuclideanDistance, IndexKind = IndexKind.DiskAnn)] public ReadOnlyMemory? DescriptionEmbedding4 { get; set; } [VectorStoreRecordData(IsIndexed = true)] diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index 8c18d13321ca..b30ec699d192 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -373,12 +373,11 @@ public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFu VectorStoreRecordDefinition singlePropsDefinition = new() { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } - ] + Properties = this._singlePropsDefinition.Properties.Select(x => x switch + { + VectorStoreRecordVectorProperty vectorProperty => new VectorStoreRecordVectorProperty(vectorProperty) { DistanceFunction = distanceFunction }, + _ => x + }).ToList() }; var sut = new InMemoryVectorStoreRecordCollection>( @@ -550,7 +549,7 @@ private InMemoryVectorStoreRecordCollection> Create new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Data", typeof(string)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ] }; @@ -565,7 +564,7 @@ public sealed class SinglePropsModel [VectorStoreRecordData(IsIndexed = true)] public string Data { get; set; } = string.Empty; - [VectorStoreRecordVector] + [VectorStoreRecordVector(10)] public ReadOnlyMemory? Vector { get; set; } public string? NotAnnotated { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs index 8373c3b362e8..14ef7f40376d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreCollectionCreateMapping.cs @@ -65,11 +65,6 @@ public static SimpleField MapDataField(VectorStoreRecordDataPropertyModel dataPr /// Throws when the definition is missing required information, or unsupported options are configured. public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) MapVectorField(VectorStoreRecordVectorPropertyModel vectorProperty) { - if (vectorProperty.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); - } - // Build a name for the profile and algorithm configuration based on the property name // since we'll just create a separate one for each vector property. var vectorSearchProfileName = $"{vectorProperty.StorageName}Profile"; @@ -87,7 +82,7 @@ public static (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfigu }; var vectorSearchProfile = new VectorSearchProfile(vectorSearchProfileName, algorithmConfigName); - return (new VectorSearchField(vectorProperty.StorageName, vectorProperty.Dimensions.Value, vectorSearchProfileName), algorithmConfiguration, vectorSearchProfile); + return (new VectorSearchField(vectorProperty.StorageName, vectorProperty.Dimensions, vectorSearchProfileName), algorithmConfiguration, vectorSearchProfile); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 3f1e2fbaddd8..393c85605e47 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -481,11 +481,6 @@ private ContainerProperties GetContainerProperties() foreach (var property in this._model.VectorProperties) { - if (property.Dimensions is not > 0) - { - throw new VectorStoreOperationException($"Property {nameof(property.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{property.ModelName}' must be set to a positive integer to create a collection."); - } - var path = $"/{property.StorageName}"; var embedding = new Embedding diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index f817881333fd..528370ca695c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -102,7 +102,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) CreateIndexRequest request = new() { Name = this.CollectionName, - Dimension = vectorProperty.Dimensions ?? throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."), + Dimension = vectorProperty.Dimensions, Metric = MapDistanceFunction(vectorProperty), Spec = new ServerlessIndexSpec { @@ -419,7 +419,7 @@ public async IAsyncEnumerable GetAsync(Expression> // "Either 'vector' or 'ID' must be provided" // Since we are doing a query, we don't have a vector to provide, so we fake one. // When https://github.com/pinecone-io/pinecone-dotnet-client/issues/43 gets implemented, we need to switch. - Vector = new ReadOnlyMemory(new float[this._model.VectorProperty.Dimensions!.Value]), + Vector = new ReadOnlyMemory(new float[this._model.VectorProperty.Dimensions]), Filter = new PineconeFilterTranslator().Translate(filter, this._model), }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index 2229ec00d29c..f7ad49259c34 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -138,11 +138,6 @@ public static (string PgType, bool IsNullable) GetPostgresTypeName(Type property /// The PostgreSQL vector type name. public static (string PgType, bool IsNullable) GetPgVectorTypeName(VectorStoreRecordVectorPropertyModel vectorProperty) { - if (vectorProperty.Dimensions <= 0) - { - throw new ArgumentException("Vector property must have a positive number of dimensions."); - } - return ($"VECTOR({vectorProperty.Dimensions})", Nullable.GetUnderlyingType(vectorProperty.Type) != null); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs index 1a193b0715a7..a2505b4d1775 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs @@ -58,11 +58,6 @@ internal static class QdrantVectorStoreCollectionCreateMapping /// Thrown if the property is missing information or has unsupported options specified. public static VectorParams MapSingleVector(VectorStoreRecordVectorPropertyModel vectorProperty) { - if (vectorProperty!.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); - } - if (vectorProperty!.IndexKind is not null && vectorProperty!.IndexKind != IndexKind.Hnsw) { throw new InvalidOperationException($"Index kind '{vectorProperty!.IndexKind}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Qdrant VectorStore."); diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs index bd392f6adbd6..a6c3545bcea2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs @@ -110,14 +110,9 @@ public static Schema MapToSchema(IEnumerable pro continue; case VectorStoreRecordVectorPropertyModel vectorProperty: - if (vectorProperty.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); - } - var indexKind = GetSDKIndexKind(vectorProperty); var vectorType = GetSDKVectorType(vectorProperty); - var dimensions = vectorProperty.Dimensions.Value.ToString(CultureInfo.InvariantCulture); + var dimensions = vectorProperty.Dimensions.ToString(CultureInfo.InvariantCulture); var distanceAlgorithm = GetSDKDistanceAlgorithm(vectorProperty); schema.AddVectorField(new FieldName($"{fieldNamePrefix}{storageName}", storageName), indexKind, new Dictionary() { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 1ed1bee0cd4c..1e4acc69afb4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -111,14 +111,6 @@ public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken) { - foreach (var vectorProperty in this._model.VectorProperties) - { - if (vectorProperty.Dimensions is not > 0) - { - throw new InvalidOperationException($"Property {nameof(vectorProperty.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' must be set to a positive integer to create a collection."); - } - } - using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.CreateTable( connection, diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs index 874d9b9d6796..82ebe2cec560 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs @@ -35,10 +35,10 @@ public sealed class MongoDBDynamicDataModelMapperTests new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?), 10) ]); private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; @@ -113,7 +113,7 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10) ]); var dataModel = new Dictionary @@ -201,7 +201,7 @@ public void MapFromStorageToDataModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10) ]); var storageModel = new BsonDocument @@ -244,7 +244,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ]); var dataModel = new Dictionary { ["Key"] = "key" }; @@ -267,7 +267,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ]); var storageModel = new BsonDocument diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs index a7dfe4dea140..6313fa3cc0dd 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBHotelModel.cs @@ -39,6 +39,6 @@ public class MongoDBHotelModel(string hotelId) public string? Description { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineSimilarity)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineSimilarity)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 99bc60b9104b..77a26fe52c22 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -858,11 +858,11 @@ private sealed class VectorSearchModel [VectorStoreRecordData] public string? HotelName { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat, StoragePropertyName = "test_embedding_1")] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat, StoragePropertyName = "test_embedding_1")] public ReadOnlyMemory TestEmbedding1 { get; set; } [BsonElement("test_embedding_2")] - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat)] public ReadOnlyMemory TestEmbedding2 { get; set; } } #pragma warning restore CA1812 diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs index de4099553b9d..d7a1b3607ad1 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs @@ -28,7 +28,7 @@ public MongoDBVectorStoreRecordMapperTests() new VectorStoreRecordDataProperty("HotelName", typeof(string)), new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 10) ] }; diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index fd8d00dd9fb2..4c8bc30ade07 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -33,7 +33,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory?), 4), } }; var pineconeClient = new Sdk.PineconeClient("fake api key"); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresHotel.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresHotel.cs index e8e84badf292..c50fd11567a9 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresHotel.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresHotel.cs @@ -45,7 +45,7 @@ public record PostgresHotel() public DateTimeOffset UpdatedAt { get; set; } = DateTimeOffset.UtcNow; /// A vector field. - [VectorStoreRecordVector(4, IndexKind.Hnsw, DistanceFunction.ManhattanDistance)] + [VectorStoreRecordVector(4, DistanceFunction = IndexKind.Hnsw, IndexKind = DistanceFunction.ManhattanDistance)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } #pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index 2b4a9eda992a..c99110dfbd19 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -37,12 +37,12 @@ public void TestBuildCreateTableCommand(bool ifNotExists) new VectorStoreRecordDataProperty("description", typeof(string)), new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)) { StoragePropertyName = "free_parking" }, new VectorStoreRecordDataProperty("tags", typeof(List)), - new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory), 10) { Dimensions = 10, IndexKind = "hnsw", }, - new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?), 10) { Dimensions = 10, IndexKind = "hnsw", @@ -268,14 +268,12 @@ public void TestBuildGetCommand() new VectorStoreRecordDataProperty("description", typeof(string)), new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)) { StoragePropertyName = "free_parking" }, new VectorStoreRecordDataProperty("tags", typeof(List)), - new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory), 10) { - Dimensions = 10, IndexKind = "hnsw", }, - new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?), 10) { - Dimensions = 10, IndexKind = "hnsw", } ] @@ -313,14 +311,12 @@ public void TestBuildGetBatchCommand() new VectorStoreRecordDataProperty("description", typeof(string)), new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)) { StoragePropertyName = "free_parking" }, new VectorStoreRecordDataProperty("tags", typeof(List)), - new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("embedding1", typeof(ReadOnlyMemory), 10) { - Dimensions = 10, IndexKind = "hnsw", }, - new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("embedding2", typeof(ReadOnlyMemory?), 10) { - Dimensions = 10, IndexKind = "hnsw", } ] diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs index fec054b178e1..6e071966facd 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordCollectionTests.cs @@ -39,7 +39,7 @@ public async Task CreatesCollectionForGenericModelAsync() new VectorStoreRecordDataProperty("HotelRating", typeof(float)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 100, DistanceFunction = DistanceFunction.ManhattanDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 100) { DistanceFunction = DistanceFunction.ManhattanDistance } ] }; var options = new PostgresVectorStoreRecordCollectionOptions>() @@ -190,7 +190,7 @@ private sealed class TestRecord [VectorStoreRecordData] public string? Data { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance)] public ReadOnlyMemory? Vector { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs index 785d2ef28317..9a9198b7669c 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs @@ -163,7 +163,7 @@ private static VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordDataProperty("StringProperty", typeof(string)), new VectorStoreRecordDataProperty("IntProperty", typeof(int)), new VectorStoreRecordDataProperty("StringArray", typeof(IEnumerable)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), } }; } @@ -198,7 +198,7 @@ private sealed class TestRecord [VectorStoreRecordData] public IEnumerable? StringArray { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance)] public ReadOnlyMemory? FloatVector { get; set; } } #pragma warning restore CA1812 diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs index 79c93a86fb71..f5ec56fff9a7 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionCreateMappingTests.cs @@ -51,18 +51,6 @@ public void MapSingleVectorThrowsForUnsupportedDistanceFunction() Assert.Throws(() => QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty)); } - [Theory] - [InlineData(null)] - [InlineData(0)] - public void MapSingleVectorThrowsIfDimensionsIsInvalid(int? dimensions) - { - // Arrange. - var vectorProperty = new VectorStoreRecordVectorPropertyModel("testvector", typeof(ReadOnlyMemory)) { Dimensions = dimensions }; - - // Act and assert. - Assert.Throws(() => QdrantVectorStoreCollectionCreateMapping.MapSingleVector(vectorProperty)); - } - [Fact] public void MapNamedVectorsCreatesVectorParamsMap() { diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index 8e6d17f7bedf..c2a50e978fab 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -28,7 +28,7 @@ public class QdrantVectorStoreCollectionSearchMappingTests [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)) { StoragePropertyName = "storage_key" }, new VectorStoreRecordDataProperty("FieldName", typeof(string)) { StoragePropertyName = "storage_FieldName" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "storage_vector" }, ] }); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index a8f0c8e2a20f..ae30bd5432cb 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -537,7 +537,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { new VectorStoreRecordKeyProperty(nameof(SinglePropsModel.Key), typeof(ulong)), new VectorStoreRecordDataProperty(nameof(SinglePropsModel.OriginalNameData), typeof(string)), - new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?), 4), } }; @@ -801,7 +801,7 @@ private static VectorStoreRecordDefinition CreateSinglePropsDefinition(Type keyT new VectorStoreRecordKeyProperty("Key", keyType), new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Data", typeof(string)) { IsIndexed = true, StoragePropertyName = "data_storage_name" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector_storage_name" } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 4) { StoragePropertyName = "vector_storage_name" } ] }; } diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs index 95d6c8de3b1d..56a7e6c22b64 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs @@ -376,7 +376,7 @@ private static void AddDataToMultiPropsPointStruct(PointStruct pointStruct) { new VectorStoreRecordKeyProperty("Key", keyType) { StoragePropertyName = "key" }, new VectorStoreRecordDataProperty("Data", typeof(string)) { StoragePropertyName = "data" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "vector" }, }, }; @@ -388,7 +388,7 @@ private sealed class SinglePropsModel [VectorStoreRecordData(StoragePropertyName = "data")] public string Data { get; set; } = string.Empty; - [VectorStoreRecordVector(StoragePropertyName = "vector")] + [VectorStoreRecordVector(10, StoragePropertyName = "vector")] public ReadOnlyMemory? Vector { get; set; } public string NotAnnotated { get; set; } = string.Empty; @@ -408,8 +408,8 @@ private sealed class SinglePropsModel new VectorStoreRecordDataProperty("DataDateTime", typeof(DateTime)) { StoragePropertyName = "dataDateTime" }, new VectorStoreRecordDataProperty("DataDateTimeOffset", typeof(DateTimeOffset)) { StoragePropertyName = "dataDateTimeOffset" }, new VectorStoreRecordDataProperty("DataArrayInt", typeof(List)) { StoragePropertyName = "dataArrayInt" }, - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector1" }, - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector2" }, + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "vector1" }, + new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "vector2" }, }, }; @@ -446,10 +446,10 @@ private sealed class MultiPropsModel [VectorStoreRecordData(StoragePropertyName = "dataArrayInt")] public List? DataArrayInt { get; set; } - [VectorStoreRecordVector(StoragePropertyName = "vector1")] + [VectorStoreRecordVector(10, StoragePropertyName = "vector1")] public ReadOnlyMemory? Vector1 { get; set; } - [VectorStoreRecordVector(StoragePropertyName = "vector2")] + [VectorStoreRecordVector(10, StoragePropertyName = "vector2")] public ReadOnlyMemory? Vector2 { get; set; } public string NotAnnotated { get; set; } = string.Empty; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs index 2a51b5a9d4eb..9392906c876c 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs @@ -67,7 +67,7 @@ public void MapFromDataToStorageModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?), 10), }, }); @@ -136,7 +136,7 @@ public void MapFromStorageToDataModelMapsNullValues() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?), 10), } }); @@ -170,7 +170,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?), 10), } }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs index 8b46f69b844b..5ed25e96dcc5 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs @@ -35,8 +35,8 @@ internal static class RedisHashSetVectorStoreMappingTestHelpers new VectorStoreRecordDataProperty("NullableDoubleData", typeof(double?)), new VectorStoreRecordDataProperty("NullableFloatData", typeof(float?)), new VectorStoreRecordDataProperty("NullableBoolData", typeof(bool?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory), 10), } }; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index a4844133ac31..8cf0cbf20af5 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -532,7 +532,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() { new VectorStoreRecordKeyProperty(nameof(SinglePropsModel.Key), typeof(string)), new VectorStoreRecordDataProperty(nameof(SinglePropsModel.OriginalNameData), typeof(string)), - new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty(nameof(SinglePropsModel.Vector), typeof(ReadOnlyMemory?), 4), } }; @@ -625,7 +625,7 @@ private static SinglePropsModel CreateModel(string key, bool withVectors) new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("OriginalNameData", typeof(string)), new VectorStoreRecordDataProperty("Data", typeof(string)) { StoragePropertyName = "data_storage_name" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector_storage_name", DistanceFunction = DistanceFunction.CosineDistance } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "vector_storage_name", DistanceFunction = DistanceFunction.CosineDistance } ] }; @@ -642,7 +642,7 @@ public sealed class SinglePropsModel public string Data { get; set; } = string.Empty; [JsonPropertyName("ignored_vector_json_name")] - [VectorStoreRecordVector(4, DistanceFunction.CosineDistance, StoragePropertyName = "vector_storage_name")] + [VectorStoreRecordVector(4, DistanceFunction = DistanceFunction.CosineDistance, StoragePropertyName = "vector_storage_name")] public ReadOnlyMemory? Vector { get; set; } public string? NotAnnotated { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs index 67e756cdda1f..042b44b27bcf 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs @@ -141,10 +141,10 @@ private sealed class AllTypesModel [VectorStoreRecordData] public bool? NullableBoolData { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(10)] public ReadOnlyMemory? FloatVector { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(10)] public ReadOnlyMemory? DoubleVector { get; set; } public string NotAnnotated { get; set; } = string.Empty; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs index 5c6babf610ee..9c0c65373138 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs @@ -31,7 +31,7 @@ private static readonly VectorStoreRecordModel s_model new VectorStoreRecordDataProperty("IntData", typeof(int)), new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), new VectorStoreRecordDataProperty("ComplexObjectData", typeof(ComplexObject)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ] }); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index d3e5c449cd41..f8c5d8f4b226 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -601,8 +601,8 @@ private static MultiPropsModel CreateModel(string key, bool withVectors) new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsIndexed = true, StoragePropertyName = "ignored_data1_storage_name" }, new VectorStoreRecordDataProperty("Data2", typeof(string)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4, DistanceFunction = DistanceFunction.CosineDistance, StoragePropertyName = "ignored_vector1_storage_name" }, - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { Dimensions = 4 } + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory), 4) { DistanceFunction = DistanceFunction.CosineDistance, StoragePropertyName = "ignored_vector1_storage_name" }, + new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory), 4) ] }; @@ -619,7 +619,7 @@ public sealed class MultiPropsModel public string Data2 { get; set; } = string.Empty; [JsonPropertyName("vector1_json_name")] - [VectorStoreRecordVector(4, DistanceFunction.CosineDistance, StoragePropertyName = "ignored_vector1_storage_name")] + [VectorStoreRecordVector(4, DistanceFunction = DistanceFunction.CosineDistance, StoragePropertyName = "ignored_vector1_storage_name")] public ReadOnlyMemory? Vector1 { get; set; } [VectorStoreRecordVector(4)] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs index eb296da0dfbe..37692a7eced5 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs @@ -134,10 +134,10 @@ private sealed class MultiPropsModel [VectorStoreRecordData] public string Data2 { get; set; } = string.Empty; - [VectorStoreRecordVector] + [VectorStoreRecordVector(10)] public ReadOnlyMemory? Vector1 { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(10)] public ReadOnlyMemory? Vector2 { get; set; } public string NotAnnotated { get; set; } = string.Empty; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs index c191cd322fa6..47013e1bb361 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs @@ -97,21 +97,6 @@ public void MapToSchemaCreatesSchema(bool useDollarPrefix) Assert.Equal("L2", ((VectorField)schema.Fields[7]).Attributes!["DISTANCE_METRIC"]); } - [Theory] - [InlineData(null)] - [InlineData(0)] - public void MapToSchemaThrowsOnInvalidVectorDimensions(int? dimensions) - { - // Arrange. - VectorStoreRecordPropertyModel[] properties = - [ - new VectorStoreRecordVectorPropertyModel("VectorProperty", typeof(ReadOnlyMemory)) { Dimensions = dimensions } - ]; - - // Act and assert. - Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, useDollarPrefix: true)); - } - [Fact] public void GetSDKIndexKindThrowsOnUnsupportedIndexKind() { diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index 315f8632e932..7895d4b09b65 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -69,7 +69,7 @@ public void BuildQueryBuildsRedisQueryWithDefaults() var model = BuildModel( [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ]); // Act. @@ -93,7 +93,7 @@ public void BuildQueryBuildsRedisQueryWithCustomVectorName() var model = BuildModel( [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_Vector" } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "storage_Vector" } ]); var selectFields = new string[] { "storage_Field1", "storage_Field2" }; @@ -130,7 +130,7 @@ public void BuildFilterBuildsEqualityFilter(string filterType) [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ]); // Act. @@ -165,7 +165,7 @@ public void BuildFilterThrowsForInvalidValueType() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ]); // Act & Assert. @@ -184,7 +184,7 @@ public void BuildFilterThrowsForUnknownFieldName() [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_Data1" }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ]); // Act & Assert. diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs index 30afc4c24a81..0deb6f8ade9e 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteHotel.cs @@ -32,6 +32,6 @@ public class SqliteHotel() public string? Description { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.EuclideanDistance)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs index d77103500b8d..3f1f3a225001 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs @@ -155,7 +155,7 @@ private static VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordKeyProperty("Key", typeof(TKey)), new VectorStoreRecordDataProperty("StringProperty", typeof(string)), new VectorStoreRecordDataProperty("IntProperty", typeof(int)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), } }; } @@ -186,7 +186,7 @@ private sealed class TestRecord [VectorStoreRecordData] public int? IntProperty { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance)] public ReadOnlyMemory? FloatVector { get; set; } } #pragma warning restore CA1812 diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs index f80b186a0a7a..e36990bc3630 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs @@ -63,10 +63,10 @@ public sealed class WeaviateDynamicDataModelMapperTests new VectorStoreRecordDataProperty("NullableGuidDataProp", typeof(Guid?)), new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?), 10) ] }, s_jsonSerializerOptions); @@ -169,7 +169,7 @@ public void MapFromDataToStorageModelMapsNullValues() var vectorProperties = new List { - new("NullableFloatVector", typeof(ReadOnlyMemory?)) + new("NullableFloatVector", typeof(ReadOnlyMemory?), 10) }; var dataModel = new Dictionary @@ -289,7 +289,7 @@ public void MapFromStorageToDataModelMapsNullValues() var vectorProperties = new List { - new("NullableFloatVector", typeof(ReadOnlyMemory?)) + new("NullableFloatVector", typeof(ReadOnlyMemory?), 10) }; var storageModel = new JsonObject @@ -342,7 +342,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10) ] }; @@ -373,7 +373,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10) ] }; diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs index 5456ef05e907..d57084ad5100 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs @@ -44,6 +44,6 @@ public sealed record WeaviateHotel public DateTimeOffset Timestamp { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.Hnsw)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.Hnsw)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index e294a332ec0c..23570ac0bf5b 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -26,7 +26,7 @@ public void ItThrowsExceptionWithInvalidIndexKind() Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { IndexKind = "non-existent-index-kind" } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { IndexKind = "non-existent-index-kind" } ] }); @@ -49,7 +49,7 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { IndexKind = indexKind } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { IndexKind = indexKind } ] }); @@ -73,7 +73,7 @@ public void ItThrowsExceptionWithInvalidDistanceFunction() Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = "non-existent-distance-function" } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = "non-existent-distance-function" } ] }); @@ -98,7 +98,7 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = distanceFunction } ] }); @@ -172,7 +172,7 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordDataProperty("PropertyName", propertyType) { IsIndexed = true, IsFullTextIndexed = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ] }, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 7083c3d8cd36..b76aacfb281e 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -43,7 +43,7 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests new VectorStoreRecordDataProperty("HotelName", typeof(string)) { StoragePropertyName = "hotelName" }, new VectorStoreRecordDataProperty("HotelCode", typeof(string)) { StoragePropertyName = "hotelCode" }, new VectorStoreRecordDataProperty("Tags", typeof(string[])) { StoragePropertyName = "tags" }, - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory)) { StoragePropertyName = "descriptionEmbeddding" }, + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "descriptionEmbeddding" }, ] }); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs index 610d2df40bb5..78763fc0a59a 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -41,7 +41,7 @@ public sealed class WeaviateVectorStoreRecordMapperTests new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), new VectorStoreRecordDataProperty("HotelName", typeof(string)), new VectorStoreRecordDataProperty("Tags", typeof(List)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) ] }, s_jsonSerializerOptions), diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index 60ef5a74d030..1cac1ace9576 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -252,10 +252,7 @@ protected virtual void ProcessRecordDefinition( $"Property '{property.ModelName}' is present in the {nameof(VectorStoreRecordDefinition)} as a vector property, but the .NET property on type '{type?.Name}' has an incompatible attribute."); } - if (definitionVectorProperty.Dimensions is not null) - { - vectorProperty.Dimensions = definitionVectorProperty.Dimensions; - } + vectorProperty.Dimensions = definitionVectorProperty.Dimensions; if (definitionVectorProperty.IndexKind is not null) { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs index 33eb88ce6802..68160987b3ec 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs @@ -12,6 +12,8 @@ namespace Microsoft.Extensions.VectorData.ConnectorSupport; [Experimental("MEVD9001")] public class VectorStoreRecordVectorPropertyModel(string modelName, Type type) : VectorStoreRecordPropertyModel(modelName, type) { + private int _dimensions; + /// /// The number of dimensions that the vector has. /// @@ -19,7 +21,20 @@ public class VectorStoreRecordVectorPropertyModel(string modelName, Type type) : /// This property is required when creating collections, but can be omitted if not using that functionality. /// If not provided when trying to create a collection, create will fail. /// - public int? Dimensions { get; set; } + public int Dimensions + { + get => this._dimensions; + + set + { + if (value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Dimensions must be greater than zero."); + } + + this._dimensions = value; + } + } /// /// The kind of index to use. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs index c64ababda3a8..8239bb55bf51 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordDataAttribute.cs @@ -55,5 +55,5 @@ public sealed class VectorStoreRecordDataAttribute : Attribute /// /// For example, the property name might be "MyProperty" and the storage name might be "my_property". /// - public string? StoragePropertyName { get; set; } + public string? StoragePropertyName { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs index 318521355f1b..769c09802f15 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordKeyAttribute.cs @@ -19,5 +19,5 @@ public sealed class VectorStoreRecordKeyAttribute : Attribute /// /// For example, the property name might be "MyProperty" and the storage name might be "my_property". /// - public string? StoragePropertyName { get; set; } + public string? StoragePropertyName { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs index a69e50bd7029..229127df1ca5 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordAttributes/VectorStoreRecordVectorAttribute.cs @@ -16,6 +16,7 @@ public sealed class VectorStoreRecordVectorAttribute : Attribute /// /// Initializes a new instance of the class. /// + [Obsolete("This constructor is obsolete, since Dimensions is now a required parameter.", error: true)] public VectorStoreRecordVectorAttribute() { } @@ -26,6 +27,11 @@ public VectorStoreRecordVectorAttribute() /// The number of dimensions that the vector has. public VectorStoreRecordVectorAttribute(int Dimensions) { + if (Dimensions <= 0) + { + throw new ArgumentOutOfRangeException(nameof(Dimensions), "Dimensions must be greater than zero."); + } + this.Dimensions = Dimensions; } @@ -34,6 +40,7 @@ public VectorStoreRecordVectorAttribute(int Dimensions) /// /// The number of dimensions that the vector has. /// The distance function to use when comparing vectors. + [Obsolete("This constructor is obsolete. Use the constructor that takes Dimensions as a parameter and set the DistanceFunction property directly, e.g. [[VectorStoreRecordVector(Dimensions: 1536, DistanceFunction = DistanceFunction.CosineSimilarity)]]", error: true)] public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction) { this.Dimensions = Dimensions; @@ -46,6 +53,7 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction /// The number of dimensions that the vector has. /// The distance function to use when comparing vectors. /// The kind of index to use. + [Obsolete("This constructor is obsolete. Use the constructor that takes Dimensions as a parameter and set the DistanceFunction and IndexKind properties directly, e.g. [[VectorStoreRecordVector(Dimensions: 1536, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.Flat)]]", error: true)] public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction, string? IndexKind) { this.Dimensions = Dimensions; @@ -60,7 +68,7 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction /// This property is required when creating collections, but can be omitted if not using that functionality. /// If not provided when trying to create a collection, create will fail. /// - public int? Dimensions { get; private set; } + public int Dimensions { get; private set; } /// /// Gets the kind of index to use. @@ -69,7 +77,9 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction /// The default value varies by database type. See the documentation of your chosen database connector for more information. /// /// - public string? IndexKind { get; private set; } +#pragma warning disable CA1019 // Define accessors for attribute arguments: The constructor overload that contains this property is obsolete. + public string? IndexKind { get; init; } +#pragma warning restore CA1019 /// /// Gets the distance function to use when comparing vectors. @@ -78,7 +88,9 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction /// The default value varies by database type. See the documentation of your chosen database connector for more information. /// /// - public string? DistanceFunction { get; private set; } +#pragma warning disable CA1019 // Define accessors for attribute arguments: The constructor overload that contains this property is obsolete. + public string? DistanceFunction { get; init; } +#pragma warning restore CA1019 /// /// Gets or sets an optional name to use for the property in storage, if different from the property name. @@ -86,5 +98,5 @@ public VectorStoreRecordVectorAttribute(int Dimensions, string? DistanceFunction /// /// For example, the property name might be "MyProperty" and the storage name might be "my_property". /// - public string? StoragePropertyName { get; set; } + public string? StoragePropertyName { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs index 1d1791ed555f..bc8a18034966 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs @@ -12,16 +12,31 @@ namespace Microsoft.Extensions.VectorData; /// public sealed class VectorStoreRecordVectorProperty : VectorStoreRecordProperty { + private int _dimensions; + /// /// Initializes a new instance of the class. /// /// The name of the property. /// The type of the property. + [Obsolete("This constructor is obsolete, since dimensions is now a required parameter.", error: true)] public VectorStoreRecordVectorProperty(string propertyName, Type propertyType) : base(propertyName, propertyType) { } + /// + /// Initializes a new instance of the class. + /// + /// The name of the property. + /// The type of the property. + /// The number of dimensions that the vector has. + public VectorStoreRecordVectorProperty(string propertyName, Type propertyType, int dimensions) + : base(propertyName, propertyType) + { + this.Dimensions = dimensions; + } + /// /// Initializes a new instance of the class by cloning the given source. /// @@ -41,7 +56,20 @@ public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source) /// This property is required when creating collections, but can be omitted if not using that functionality. /// If not provided when trying to create a collection, create will fail. /// - public int? Dimensions { get; init; } + public int Dimensions + { + get => this._dimensions; + + init + { + if (value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Dimensions must be greater than zero."); + } + + this._dimensions = value; + } + } /// /// Gets or sets the kind of index to use. diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs index b5fc160325a9..440ea94bb3e6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs @@ -72,7 +72,7 @@ public AzureAISearchVectorStoreFixture() new VectorStoreRecordKeyProperty("HotelId", typeof(string)), new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 1536 }, + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 1536), new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsIndexed = true }, new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool?)) { IsIndexed = true, StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTimeOffset?)) { IsIndexed = true }, diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs index 0127bb5405d2..bf933707041c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBHotel.cs @@ -43,6 +43,6 @@ public class AzureCosmosDBMongoDBHotel public DateTime Timestamp { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.IvfFlat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.IvfFlat)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs index a56f8b41399c..9cac223fa88e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs @@ -55,7 +55,7 @@ public AzureCosmosDBMongoDBVectorStoreFixture() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTime)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } ] }; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs index 2bf730a792f9..49b1ac8da6b2 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs @@ -45,6 +45,6 @@ public record AzureCosmosDBNoSQLHotel() public DateTimeOffset Timestamp { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineSimilarity, IndexKind: IndexKind.Flat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.Flat)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index a3e3e7c64091..7e2c0f53e522 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -459,7 +459,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.CosineSimilarity } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.CosineSimilarity } ] }; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index 75db689d61fc..4214a74d7708 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -123,7 +123,7 @@ private static VectorStoreRecordDefinition CreateKeyWithVectorRecordDefinition(i Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(TKey)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = vectorDimensions, DistanceFunction = distanceFunction }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), vectorDimensions) { DistanceFunction = distanceFunction }, ], }; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs index b0bd04f2f400..a5b3fd3a09e9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBHotel.cs @@ -43,6 +43,6 @@ public class MongoDBHotel public DateTime Timestamp { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineSimilarity, IndexKind: IndexKind.IvfFlat)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineSimilarity, IndexKind = IndexKind.IvfFlat)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs index 9d099d0c2f91..5b6606d34652 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs @@ -54,7 +54,7 @@ public MongoDBVectorStoreFixture() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTime)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineSimilarity } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineSimilarity } ] }; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresHotel.cs index 48a8f5f36a41..1ab2619aa869 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresHotel.cs @@ -44,7 +44,7 @@ public record PostgresHotel() public string Description { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance, IndexKind: IndexKind.Hnsw)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.EuclideanDistance, IndexKind = IndexKind.Hnsw)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } public DateTime CreatedAt { get; set; } = DateTime.UtcNow; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index 09ce1a902933..ffa795f7db1f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -515,7 +515,7 @@ public async Task ItCanUpsertAndGetEnumerableTypesAsync() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("ListInts", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.Hnsw, DistanceFunction = distanceFunction } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.Hnsw, DistanceFunction = distanceFunction } ] }; @@ -553,7 +553,7 @@ private sealed class RecordWithEnumerables [VectorStoreRecordKey] public int Id { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance)] public ReadOnlyMemory? Embedding { get; set; } [VectorStoreRecordData] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs index 4a10a8915e1e..839e87335f67 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreFixture.cs @@ -61,7 +61,7 @@ public QdrantVectorStoreFixture() new VectorStoreRecordDataProperty("OpeningDate", typeof(DateTimeOffset)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), VectorDimensions) { DistanceFunction = DistanceFunction.ManhattanDistance } } }; this.HotelWithGuidIdVectorStoreRecordDefinition = new VectorStoreRecordDefinition @@ -71,7 +71,7 @@ public QdrantVectorStoreFixture() new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true, IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), VectorDimensions) { DistanceFunction = DistanceFunction.ManhattanDistance } } }; AzureOpenAIConfiguration? embeddingsConfig = s_configuration.GetSection("AzureOpenAIEmbeddings").Get(); @@ -351,7 +351,7 @@ public record HotelInfo() public string Description { get; set; } /// A vector field. - [VectorStoreRecordVector(VectorDimensions, DistanceFunction.ManhattanDistance, IndexKind.Hnsw)] + [VectorStoreRecordVector(VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance, IndexKind = IndexKind.Hnsw)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } @@ -374,7 +374,7 @@ public record HotelInfoWithGuidId() public string Description { get; set; } /// A vector field. - [VectorStoreRecordVector(VectorDimensions, DistanceFunction.ManhattanDistance, IndexKind.Hnsw)] + [VectorStoreRecordVector(VectorDimensions, DistanceFunction = DistanceFunction.ManhattanDistance, IndexKind = IndexKind.Hnsw)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs index 20ace0af55f8..f34627086e8c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs @@ -43,7 +43,7 @@ public RedisVectorStoreFixture() new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true }, new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextIndexed = true }, - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4), new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsIndexed = true }, new VectorStoreRecordDataProperty("FTSTags", typeof(string[])) { IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, @@ -60,7 +60,7 @@ public RedisVectorStoreFixture() new VectorStoreRecordDataProperty("HotelName", typeof(string)) { IsIndexed = true }, new VectorStoreRecordDataProperty("HotelCode", typeof(int)) { IsIndexed = true }, new VectorStoreRecordDataProperty("Description", typeof(string)) { IsFullTextIndexed = true }, - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4), new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("Rating", typeof(double)), } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs index 784736e9dd05..d7db1e61a9d7 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteHotel.cs @@ -32,6 +32,6 @@ public record SqliteHotel() public string? Description { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.EuclideanDistance)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index bbe3c6a87431..33189c600b0f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -539,7 +539,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperWithStringKeyAsync( new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("HotelRating", typeof(float)), new VectorStoreRecordDataProperty("Description", typeof(string)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } ] }; @@ -574,13 +574,13 @@ private sealed class RecordWithSupportedDistanceFunctions [VectorStoreRecordKey] public ulong Id { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance)] public ReadOnlyMemory? Embedding1 { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.EuclideanDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.EuclideanDistance)] public ReadOnlyMemory? Embedding2 { get; set; } - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.ManhattanDistance)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.ManhattanDistance)] public ReadOnlyMemory? Embedding3 { get; set; } } #pragma warning restore CA1812 diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs index e2442e49057e..1338eceef6fe 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs @@ -44,6 +44,6 @@ public sealed record WeaviateHotel public DateTimeOffset Timestamp { get; set; } /// A vector field. - [VectorStoreRecordVector(Dimensions: 4, DistanceFunction: DistanceFunction.CosineDistance, IndexKind: IndexKind.Hnsw)] + [VectorStoreRecordVector(Dimensions: 4, DistanceFunction = DistanceFunction.CosineDistance, IndexKind = IndexKind.Hnsw)] public ReadOnlyMemory? DescriptionEmbedding { get; set; } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 27e63ac000b4..ae13b799fa04 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -473,7 +473,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) {IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } ] }; } diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs index 54d98f72c251..be73146024a4 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeAllTypes.cs @@ -48,7 +48,7 @@ public record PineconeAllTypes() [VectorStoreRecordData] public List? NullableStringList { get; set; } - [VectorStoreRecordVector(Dimensions: 8, DistanceFunction: DistanceFunction.DotProductSimilarity)] + [VectorStoreRecordVector(Dimensions: 8, DistanceFunction = DistanceFunction.DotProductSimilarity)] public ReadOnlyMemory? Embedding { get; set; } internal void AssertEqual(PineconeAllTypes other) @@ -95,7 +95,7 @@ internal static VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringArray), typeof(string[])), new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.StringList), typeof(List)), new VectorStoreRecordDataProperty(nameof(PineconeAllTypes.NullableStringList), typeof(List)), - new VectorStoreRecordVectorProperty(nameof(PineconeAllTypes.Embedding), typeof(ReadOnlyMemory?)) { Dimensions = 8, DistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction.DotProductSimilarity } + new VectorStoreRecordVectorProperty(nameof(PineconeAllTypes.Embedding), typeof(ReadOnlyMemory?), 8) { DistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction.DotProductSimilarity } ] }; } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index 8d8bd68a6844..47324b0672e1 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -113,7 +113,7 @@ public void CreateTable(bool ifNotExists) new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("simpleName", typeof(string)), new VectorStoreRecordDataProperty("with space", typeof(int)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory), 10) ]); using SqlConnection connection = CreateConnection(); @@ -149,7 +149,7 @@ public void MergeIntoSingle() new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("simpleString", typeof(string)), new VectorStoreRecordDataProperty("simpleInt", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory), 10) ]); using SqlConnection connection = CreateConnection(); @@ -194,7 +194,7 @@ public void MergeIntoMany() new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("simpleString", typeof(string)), new VectorStoreRecordDataProperty("simpleInt", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory), 10) ]); Dictionary[] records = @@ -292,7 +292,7 @@ public void SelectSingle() new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("name", typeof(string)), new VectorStoreRecordDataProperty("age", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory), 10) ]); using SqlConnection connection = CreateConnection(); @@ -317,7 +317,7 @@ public void SelectMany() new VectorStoreRecordKeyProperty("id", typeof(long)), new VectorStoreRecordDataProperty("name", typeof(string)), new VectorStoreRecordDataProperty("age", typeof(int)), - new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty("embedding", typeof(ReadOnlyMemory), 10) ]); long[] keys = [123L, 456L, 789L]; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index 611094aecb65..ff61fecdea42 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -133,7 +133,7 @@ protected virtual async Task> GetNon new VectorStoreRecordKeyProperty(nameof(SimpleRecord.Id), typeof(TKey)) { StoragePropertyName = "key" }, new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { StoragePropertyName = "text" }, new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { StoragePropertyName = "number" }, - new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory)) { Dimensions = 10 } + new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory), 10) ] }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 085890078236..ec4b0d9b7ef1 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -445,9 +445,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = [ new VectorStoreRecordKeyProperty(nameof(FilterRecord.Key), typeof(TKey)), - new VectorStoreRecordVectorProperty(nameof(FilterRecord.Vector), typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty(nameof(FilterRecord.Vector), typeof(ReadOnlyMemory?), 3) { - Dimensions = 3, DistanceFunction = this.DistanceFunction, IndexKind = this.IndexKind }, diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs index 224e8eeb7320..7ff11de3c7dd 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs @@ -186,7 +186,7 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordKeyProperty("Key", typeof(TKey)), new VectorStoreRecordDataProperty("Text", typeof(string)) { IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Code", typeof(int)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 4) { IndexKind = this.IndexKind }, } }; @@ -240,7 +240,7 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordDataProperty("Text1", typeof(string)) { IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Text2", typeof(string)) { IsFullTextIndexed = true }, new VectorStoreRecordDataProperty("Code", typeof(int)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = this.IndexKind }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 4) { IndexKind = this.IndexKind }, } }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs index 1fd5f6b700be..7e99e4580649 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs @@ -20,10 +20,7 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() new VectorStoreRecordKeyProperty(KeyPropertyName, typeof(TKey)), new VectorStoreRecordDataProperty(StringPropertyName, typeof(string)), new VectorStoreRecordDataProperty(IntegerPropertyName, typeof(int)), - new VectorStoreRecordVectorProperty(EmbeddingPropertyName, typeof(ReadOnlyMemory)) - { - Dimensions = DimensionCount - } + new VectorStoreRecordVectorProperty(EmbeddingPropertyName, typeof(ReadOnlyMemory), DimensionCount) ] }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs index e7c8fb6857ca..21ace375aae9 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs @@ -46,9 +46,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() Properties = [ new VectorStoreRecordKeyProperty(nameof(SimpleRecord.Id), typeof(TKey)), - new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory?)) + new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory?), SimpleRecord.DimensionCount) { - Dimensions = SimpleRecord.DimensionCount, DistanceFunction = this.DistanceFunction, IndexKind = this.IndexKind }, diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 1f4c3537b3bd..9724256d40ee 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -165,9 +165,8 @@ private VectorStoreRecordDefinition GetRecordDefinition(string distanceFunction) Properties = [ new VectorStoreRecordKeyProperty(nameof(SearchRecord.Key), typeof(TKey)), - new VectorStoreRecordVectorProperty(nameof(SearchRecord.Vector), typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty(nameof(SearchRecord.Vector), typeof(ReadOnlyMemory), 4) { - Dimensions = 4, DistanceFunction = distanceFunction, IndexKind = this.IndexKind }, From 4f80c122f04b614e97407e67ba9c0145651ede2b Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Sat, 12 Apr 2025 15:59:54 +0200 Subject: [PATCH 42/63] .Net MEVD Replace VectorSearchResults with IAsyncEnumerable (#11486) fixes #11048 --- .../Caching/SemanticCachingWithFilters.cs | 4 +- ...extEmbeddingVectorStoreRecordCollection.cs | 10 ++- .../Memory/VectorStore_EmbeddingGeneration.cs | 3 +- ...Store_HybridSearch_Simple_AzureAISearch.cs | 9 +-- .../Memory/VectorStore_Langchain_Interop.cs | 3 +- ...torStore_VectorSearch_MultiStore_Common.cs | 9 +-- .../VectorStore_VectorSearch_MultiVector.cs | 10 ++- .../Memory/VectorStore_VectorSearch_Paging.cs | 4 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 9 +-- .../Memory/VolatileVectorStore_LoadData.cs | 6 +- .../Optimization/FrugalGPTWithFilters.cs | 4 +- .../PluginSelectionWithFilters.cs | 4 +- .../Concepts/Search/VectorStore_TextSearch.cs | 8 ++- .../MCPServer/Program.cs | 4 +- .../Step2_Vector_Search.cs | 10 ++- .../Step5_Use_DynamicDataModel.cs | 5 +- .../Step6_Use_CustomMapper.cs | 5 +- ...ISearchVectorStoreRecordCollectionTests.cs | 4 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 15 ++-- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 8 +-- ...nMemoryVectorStoreRecordCollectionTests.cs | 39 ++++------- ...zureAISearchVectorStoreRecordCollection.cs | 39 +++++------ ...mosDBMongoDBVectorStoreRecordCollection.cs | 9 ++- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 10 ++- .../InMemoryVectorStoreRecordCollection.cs | 12 +--- .../MongoDBVectorStoreRecordCollection.cs | 24 +++++-- .../PineconeVectorStoreRecordCollection.cs | 12 ++-- .../IPostgresVectorStoreDbClient.cs | 9 +-- .../PostgresVectorStoreDbClient.cs | 12 ++-- .../PostgresVectorStoreRecordCollection.cs | 70 ++++++++----------- .../PostgresVectorStoreUtils.cs | 2 +- .../QdrantVectorStoreRecordCollection.cs | 14 ++-- ...RedisHashSetVectorStoreRecordCollection.cs | 7 +- .../RedisJsonVectorStoreRecordCollection.cs | 7 +- .../SqlServerVectorStoreRecordCollection.cs | 35 +++++----- .../SqliteVectorStoreRecordCollection.cs | 8 +-- .../WeaviateVectorStoreRecordCollection.cs | 46 ++++++------ ...MongoDBVectorStoreRecordCollectionTests.cs | 15 ++-- .../QdrantVectorStoreRecordCollectionTests.cs | 5 +- ...HashSetVectorStoreRecordCollectionTests.cs | 5 +- ...disJsonVectorStoreRecordCollectionTests.cs | 5 +- .../SqliteVectorStoreRecordCollectionTests.cs | 3 +- ...eaviateVectorStoreRecordCollectionTests.cs | 13 ++-- .../VectorSearch/IKeywordHybridSearch.cs | 3 +- .../VectorSearch/IVectorizableTextSearch.cs | 4 +- .../VectorSearch/IVectorizedSearch.cs | 4 +- .../VectorSearch/VectorSearchOptions.cs | 1 + .../VectorSearch/VectorSearchResults.cs | 32 --------- ...ISearchVectorStoreRecordCollectionTests.cs | 15 ++-- ...MongoDBVectorStoreRecordCollectionTests.cs | 13 ++-- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 13 ++-- .../BaseVectorStoreRecordCollectionTests.cs | 3 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 13 ++-- ...ostgresVectorStoreRecordCollectionTests.cs | 18 ++--- .../QdrantVectorStoreRecordCollectionTests.cs | 10 ++- ...HashSetVectorStoreRecordCollectionTests.cs | 20 +++--- ...disJsonVectorStoreRecordCollectionTests.cs | 20 +++--- .../SqliteVectorStoreRecordCollectionTests.cs | 18 ++--- ...eaviateVectorStoreRecordCollectionTests.cs | 21 +++--- .../Data/BaseVectorStoreTextSearchTests.cs | 8 ++- .../Search/MockVectorizableTextSearch.cs | 14 +--- .../Data/TextSearch/VectorStoreTextSearch.cs | 32 +++++---- .../Data/VectorStoreTextSearchTestBase.cs | 10 ++- .../SqlServerVectorStoreTests.cs | 8 +-- .../Filter/BasicFilterTests.cs | 39 +++++------ ...rdVectorizedHybridSearchComplianceTests.cs | 22 ++---- .../Support/TestStore.cs | 4 +- ...orSearchDistanceFunctionComplianceTests.cs | 12 ++-- 68 files changed, 389 insertions(+), 503 deletions(-) delete mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs diff --git a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs index ce57d908597c..5629f6cfb8a7 100644 --- a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs +++ b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs @@ -199,8 +199,8 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func> UpsertAsync(IEnumerable records, } /// - public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, top, options, cancellationToken); } @@ -127,10 +128,13 @@ public IAsyncEnumerable GetAsync(Expression> filter => this._decoratedVectorStoreRecordCollection.GetAsync(filter, top, options, cancellationToken); /// - public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var embeddingValue = await this._textEmbeddingGenerationService.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await this.VectorizedSearchAsync(embeddingValue, top, options, cancellationToken).ConfigureAwait(false); + await foreach (var result in this.VectorizedSearchAsync(embeddingValue, top, options, cancellationToken)) + { + yield return result; + } } /// diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs index b79b41e2f777..03527556a2d0 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs @@ -50,8 +50,7 @@ public async Task UseEmbeddingGenerationViaDecoratorAsync() // Search the collection using a vectorizable text search. var search = collection as IVectorizableTextSearch; var searchString = "What is an Application Programming Interface"; - var searchResult = await search!.VectorizableTextSearchAsync(searchString, top: 1); - var resultRecords = await searchResult.Results.ToListAsync(); + var resultRecords = await search!.VectorizableTextSearchAsync(searchString, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs index 94236168e06c..1ce7b2e87be0 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_HybridSearch_Simple_AzureAISearch.cs @@ -56,8 +56,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search. var searchString = "What is an Application Programming Interface"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Application", "Programming", "Interface"], top: 1); - var resultRecords = await searchResult.Results.ToListAsync(); + var resultRecords = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Application", "Programming", "Interface"], top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -66,8 +65,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 1); - resultRecords = await searchResult.Results.ToListAsync(); + resultRecords = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -76,8 +74,7 @@ public async Task IngestDataAndUseHybridSearch() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 3, new() { Filter = g => g.Category == "External Definitions" }); - resultRecords = await searchResult.Results.ToListAsync(); + resultRecords = await hybridSearchCollection.HybridSearchAsync(searchVector, ["Retrieval", "Augmented", "Generation"], top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Number of results: " + resultRecords.Count); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs index e48c3700beff..43bfbd4cca07 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs @@ -96,8 +96,7 @@ private async Task ReadDataFromCollectionAsync(IVectorStore vectorStore, string // Search the data set. var searchString = "I'm looking for an animal that is loyal and will make a great companion"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); - var resultRecords = await searchResult.Results.ToListAsync(); + var resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); this.Output.WriteLine("Search string: " + searchString); this.Output.WriteLine("Source: " + resultRecords.First().Record.Source); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index 7ff84ecd94e5..f5abe719105c 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -51,8 +51,7 @@ public async Task IngestDataAndSearchAsync(string collectionName, Func(string collectionName, Func(string collectionName, Func g.Category == "External Definitions" }); - resultRecords = await searchResult.Results.ToListAsync(); + resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); output.WriteLine("Search string: " + searchString); output.WriteLine("Number of results: " + resultRecords.Count); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs index 44048302f051..c0ffcdab3a9a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs @@ -54,12 +54,11 @@ public async Task VectorSearchWithMultiVectorRecordAsync() // Search the store using the description embedding. var searchString = "I am looking for a reasonably priced coffee maker"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync( + var resultRecords = await collection.VectorizedSearchAsync( searchVector, top: 1, new() { VectorProperty = r => r.DescriptionEmbedding - }); - var resultRecords = await searchResult.Results.ToListAsync(); + }).ToListAsync(); WriteLine("Search string: " + searchString); WriteLine("Result: " + resultRecords.First().Record.Description); @@ -69,14 +68,13 @@ public async Task VectorSearchWithMultiVectorRecordAsync() // Search the store using the feature list embedding. searchString = "I am looking for a handheld vacuum cleaner that will remove pet hair"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync( + resultRecords = await collection.VectorizedSearchAsync( searchVector, top: 1, new() { VectorProperty = r => r.FeatureListEmbedding - }); - resultRecords = await searchResult.Results.ToListAsync(); + }).ToListAsync(); WriteLine("Search string: " + searchString); WriteLine("Result: " + resultRecords.First().Record.Description); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs index 8c825df3e59a..50315134a965 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs @@ -47,7 +47,7 @@ public async Task VectorSearchWithPagingAsync() while (moreResults) { // Get the next page of results by asking for 10 results, and using 'Skip' to skip the results from the previous pages. - var currentPageResults = await collection.VectorizedSearchAsync( + var currentPageResults = collection.VectorizedSearchAsync( searchVector, top: 10, new() @@ -57,7 +57,7 @@ public async Task VectorSearchWithPagingAsync() // Print the results. var pageCount = 0; - await foreach (var result in currentPageResults.Results) + await foreach (var result in currentPageResults) { Console.WriteLine($"Key: {result.Record.Key}, Text: {result.Record.Text}"); pageCount++; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index ae02ebde075b..650026e4e9c9 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -50,8 +50,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. var searchString = "What is an Application Programming Interface"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); - var resultRecords = await searchResult.Results.ToListAsync(); + var resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -60,8 +59,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync(searchVector, top: 1); - resultRecords = await searchResult.Results.ToListAsync(); + resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -70,8 +68,7 @@ public async Task ExampleAsync() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - searchResult = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }); - resultRecords = await searchResult.Results.ToListAsync(); + resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Number of results: " + resultRecords.Count); diff --git a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs index fd5134c245f1..ac7f71b37c2a 100644 --- a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs +++ b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs @@ -71,8 +71,7 @@ static DataModel CreateRecord(string text, ReadOnlyMemory embedding) // Search the collection using a vector search. var searchString = "What is the Semantic Kernel?"; var searchVector = await embeddingGenerationService.GenerateEmbeddingAsync(searchString); - var searchResult = await vectorSearch!.VectorizedSearchAsync(searchVector, top: 1); - var resultRecords = await searchResult.Results.ToListAsync(); + var resultRecords = await vectorSearch!.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Text); @@ -116,8 +115,7 @@ static DataModel CreateRecord(TextSearchResult searchResult, ReadOnlyMemory l.Record).ToList(); + var topNExamples = (await collection.VectorizedSearchAsync(requestEmbedding, top: TopN, cancellationToken: context.CancellationToken) + .ToListAsync(context.CancellationToken)).Select(l => l.Record).ToList(); // Override arguments to use only top N examples, which will be sent to LLM. context.Arguments["Examples"] = topNExamples.Select(l => l.Example); diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index 0b83daced50b..81d75ae649e9 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -298,8 +298,8 @@ public async Task> GetBestFunctionsAsync( await collection.CreateCollectionIfNotExistsAsync(cancellationToken); // Find best functions to call for original request. - var searchResults = await collection.VectorizedSearchAsync(requestEmbedding, top: numberOfBestFunctions, cancellationToken: cancellationToken); - var recordKeys = (await searchResults.Results.ToListAsync(cancellationToken)).Select(l => l.Record.Id); + var recordKeys = (await collection.VectorizedSearchAsync(requestEmbedding, top: numberOfBestFunctions, cancellationToken: cancellationToken) + .ToListAsync(cancellationToken)).Select(l => l.Record.Id); return plugins .SelectMany(plugin => plugin) diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index 4015bd1bb209..fc0e6e2bf0ac 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; using Microsoft.SemanticKernel.Connectors.OpenAI; @@ -145,11 +146,14 @@ internal static async Task> CreateCo private sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); + await foreach (var result in vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken)) + { + yield return result; + } } /// diff --git a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs index c5928d02b7b3..0c999b1b6bea 100644 --- a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs +++ b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs @@ -119,12 +119,12 @@ static TextDataModel CreateRecord(string text, ReadOnlyMemory embedding) ReadOnlyMemory promptEmbedding = await embeddingGenerationService.GenerateEmbeddingAsync(prompt, cancellationToken: cancellationToken); // Retrieve top three matching records from the vector store - VectorSearchResults result = await vsCollection.VectorizedSearchAsync(promptEmbedding, top: 3, cancellationToken: cancellationToken); + var result = vsCollection.VectorizedSearchAsync(promptEmbedding, top: 3, cancellationToken: cancellationToken); // Return the records as resource contents List contents = []; - await foreach (var record in result.Results) + await foreach (var record in result) { contents.Add(new TextResourceContents() { diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs index 80a0a44c2365..5a89940fef5c 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs @@ -43,10 +43,9 @@ internal static async Task> SearchVectorStoreAsync( var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the store and get the single most relevant result. - var searchResult = await collection.VectorizedSearchAsync( + var searchResultItems = await collection.VectorizedSearchAsync( searchVector, - top: 1); - var searchResultItems = await searchResult.Results.ToListAsync(); + top: 1).ToListAsync(); return searchResultItems.First(); } @@ -63,14 +62,13 @@ public async Task SearchAnInMemoryVectorStoreWithFilteringAsync() var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the store with a filter and get the single most relevant result. - var searchResult = await collection.VectorizedSearchAsync( + var searchResultItems = await collection.VectorizedSearchAsync( searchVector, top: 1, new() { Filter = g => g.Category == "AI" - }); - var searchResultItems = await searchResult.Results.ToListAsync(); + }).ToListAsync(); // Write the search result with its score to the console. Console.WriteLine(searchResultItems.First().Record.Definition); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs index 0b97a03dfd32..d2f01e07b6e5 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs @@ -56,10 +56,9 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the generic data model collection and get the single most relevant result. - var searchResult = await dynamicDataModelCollection.VectorizedSearchAsync( + var searchResultItems = await dynamicDataModelCollection.VectorizedSearchAsync( searchVector, - top: 1); - var searchResultItems = await searchResult.Results.ToListAsync(); + top: 1).ToListAsync(); // Write the search result with its score to the console. // Note that here we can loop through all the properties diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs index 7df2189ccd48..9600ef839bb1 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs @@ -78,10 +78,9 @@ await collection.UpsertAsync(new ComplexGlossary var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync("How do two software applications interact with another?"); // Search the vector store. - var searchResult = await collection.VectorizedSearchAsync( + var searchResultItem = await collection.VectorizedSearchAsync( searchVector, - top: 1); - var searchResultItem = await searchResult.Results.FirstAsync(); + top: 1).FirstAsync(); // Write the search result with its score to the console. Console.WriteLine(searchResultItem.Record.Metadata.Term); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index e21da5dc4372..e6eea1cffd7b 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -578,7 +578,7 @@ public async Task CanSearchWithVectorAndFilterAsync() OldFilter = filter, VectorProperty = record => record.Vector1 }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert. this._searchClientMock.Verify( @@ -620,7 +620,7 @@ public async Task CanSearchWithTextAndFilterAsync() OldFilter = filter, VectorProperty = record => record.Vector1 }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert. this._searchClientMock.Verify( diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 0b466bafb47c..3741730ca726 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -581,13 +581,11 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3)); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3).ToListAsync()); } else { - var actual = await sut.VectorizedSearchAsync(vector, top: 3); - - Assert.NotNull(actual); + Assert.NotNull(await sut.VectorizedSearchAsync(vector, top: 3).FirstOrDefaultAsync()); } } @@ -649,10 +647,10 @@ public async Task VectorizedSearchUsesValidQueryAsync( var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, - }); + }).FirstOrDefaultAsync(); // Assert - Assert.NotNull(await actual.Results.FirstOrDefaultAsync()); + Assert.NotNull(actual); this._mockMongoCollection.Verify(l => l.AggregateAsync( It.Is>(pipeline => @@ -674,7 +672,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options)).Results.FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); } [Fact] @@ -688,10 +686,9 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); + var result = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); // Assert - var result = await actual.Results.FirstOrDefaultAsync(); Assert.NotNull(result); Assert.Equal("key", result.Record.HotelId); Assert.Equal("Test Name", result.Record.HotelName); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 9cc4a635189c..34f4dcd429d4 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -606,9 +606,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); - - var results = await actual.Results.ToListAsync(); + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).ToListAsync(); var result = results[0]; // Assert @@ -628,7 +626,7 @@ public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync( // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3)).Results.ToListAsync()); + await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3).ToListAsync()); } [Fact] @@ -643,7 +641,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, searchOptions)).Results.ToListAsync()); + await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, searchOptions).ToListAsync()); } public static TheoryData, string, bool> CollectionExistsData => new() diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs index b30ec699d192..545db4feb005 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs @@ -291,16 +291,13 @@ public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKe var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.VectorizedSearchAsync( + var actualResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), top: 3, new() { IncludeVectors = true }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert - Assert.NotNull(actual); - Assert.Null(actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); Assert.Equal(2, actualResults.Count); Assert.Equal(testKey1, actualResults[0].Record.Key); Assert.Equal($"data {testKey1}", actualResults[0].Record.Data); @@ -337,16 +334,13 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TK // Act var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); - var actual = await sut.VectorizedSearchAsync( + var actualResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), top: 3, - new() { IncludeVectors = true, OldFilter = filter, IncludeTotalCount = true }, - this._testCancellationToken); + new() { IncludeVectors = true, OldFilter = filter }, + this._testCancellationToken).ToListAsync(); // Assert - Assert.NotNull(actual); - Assert.Equal(1, actual.TotalCount); - var actualResults = await actual.Results.ToListAsync(); Assert.Single(actualResults); Assert.Equal(testKey2, actualResults[0].Record.Key); Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); @@ -390,15 +384,13 @@ public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFu }); // Act - var actual = await sut.VectorizedSearchAsync( + var actualResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), top: 3, new() { IncludeVectors = true }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); Assert.Equal(2, actualResults.Count); Assert.Equal(TestRecordKey1, actualResults[0].Record.Key); Assert.Equal($"data {TestRecordKey1}", actualResults[0].Record.Data); @@ -432,18 +424,13 @@ public async Task CanSearchManyRecordsAsync(bool useDefinition) var sut = this.CreateRecordCollection(useDefinition); // Act - var actual = await sut.VectorizedSearchAsync( + var actualResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), top: 10, - new() { IncludeVectors = true, Skip = 10, IncludeTotalCount = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(1000, actual.TotalCount); + new() { IncludeVectors = true, Skip = 10 }, + this._testCancellationToken).ToListAsync(); // Assert that top was respected - var actualResults = await actual.Results.ToListAsync(); Assert.Equal(10, actualResults.Count); var actualIds = actualResults.Select(r => r.Record.Key).ToList(); for (int i = 0; i < 10; i++) @@ -499,15 +486,13 @@ public async Task ItCanSearchUsingTheDynamicDataModelAsync(TKey testKey1, }); // Act - var actual = await sut.VectorizedSearchAsync( + var actualResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory([1, 1, 1, 1]), top: 3, new() { IncludeVectors = true, VectorProperty = r => r["Vector"] }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert - Assert.NotNull(actual); - var actualResults = await actual.Results.ToListAsync(); Assert.Equal(2, actualResults.Count); Assert.Equal(testKey1, actualResults[0].Record["Key"]); Assert.Equal($"data {testKey1}", actualResults[0].Record["Data"]); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index c1383d5b9d02..df9fc6e49ecf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -301,7 +301,7 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -333,7 +333,6 @@ public Task> VectorizedSearchAsync(TVector VectorSearch = new(), Size = top, Skip = internalOptions.Skip, - IncludeTotalCount = internalOptions.IncludeTotalCount, }; if (filter is not null) @@ -358,8 +357,8 @@ public Task> VectorizedSearchAsync(TVector } /// - public async IAsyncEnumerable GetAsync(Expression> filter, int top, - GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(filter); Verify.NotLessThan(top, 1); @@ -400,15 +399,12 @@ public async IAsyncEnumerable GetAsync(Expression> searchOptions.OrderBy.Add(name); } - VectorSearchResults vectorSearchResults = await this.SearchAndMapToDataModelAsync(null, searchOptions, options.IncludeVectors, cancellationToken).ConfigureAwait(false); - await foreach (var result in vectorSearchResults.Results.ConfigureAwait(false)) - { - yield return result.Record; - } + return this.SearchAndMapToDataModelAsync(null, searchOptions, options.IncludeVectors, cancellationToken) + .SelectAsync(result => result.Record, cancellationToken); } /// - public Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(searchText); Verify.NotLessThan(top, 1); @@ -445,7 +441,6 @@ public Task> VectorizableTextSearchAsync(string sea VectorSearch = new(), Size = top, Skip = internalOptions.Skip, - IncludeTotalCount = internalOptions.IncludeTotalCount, }; if (filter is not null) @@ -470,7 +465,7 @@ public Task> VectorizableTextSearchAsync(string sea } /// - public Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(keywords); var floatVector = VerifyVectorParam(vector); @@ -592,11 +587,11 @@ public Task> HybridSearchAsync(TVector vec /// A value indicating whether to include vectors in the result or not. /// The to monitor for cancellation requests. The default is . /// The mapped search results. - private async Task> SearchAndMapToDataModelAsync( + private async IAsyncEnumerable> SearchAndMapToDataModelAsync( string? searchText, SearchOptions searchOptions, bool includeVectors, - CancellationToken cancellationToken) + [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "Search"; @@ -607,14 +602,18 @@ private async Task> SearchAndMapToDataModelAsync( OperationName, () => this._searchClient.SearchAsync(searchText, searchOptions, cancellationToken)).ConfigureAwait(false); - var mappedJsonObjectResults = this.MapSearchResultsAsync(jsonObjectResults.Value.GetResultsAsync(), OperationName, includeVectors); - return new VectorSearchResults(mappedJsonObjectResults) { TotalCount = jsonObjectResults.Value.TotalCount }; + await foreach (var result in this.MapSearchResultsAsync(jsonObjectResults.Value.GetResultsAsync(), OperationName, includeVectors).ConfigureAwait(false)) + { + yield return result; + } } // Execute search and map using the built in Azure AI Search mapper. Response> results = await this.RunOperationAsync(OperationName, () => this._searchClient.SearchAsync(searchText, searchOptions, cancellationToken)).ConfigureAwait(false); - var mappedResults = this.MapSearchResultsAsync(results.Value.GetResultsAsync()); - return new VectorSearchResults(mappedResults) { TotalCount = results.Value.TotalCount }; + await foreach (var result in this.MapSearchResultsAsync(results.Value.GetResultsAsync()).ConfigureAwait(false)) + { + yield return result; + } } /// @@ -653,7 +652,7 @@ private Task> MapToStorageModelAndUploadDocumentA } /// - /// Map the search results from to objects using the configured mapper type. + /// Map the search results from to objects using the configured mapper type. /// /// The search results to map. /// The name of the current operation for telemetry purposes. @@ -674,7 +673,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn } /// - /// Map the search results from to objects. + /// Map the search results from to objects. /// /// The search results to map. /// The mapped results. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 35e6b9f0dea0..11743cec19cb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -257,11 +257,11 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync( + public async IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, - CancellationToken cancellationToken = default) + [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -324,7 +324,10 @@ public async Task> VectorizedSearchAsync( .AggregateAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions, cancellationToken)); + await foreach (var result in this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions, cancellationToken).ConfigureAwait(false)) + { + yield return result; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 393c85605e47..526d7edb13e2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -297,7 +297,7 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public Task> VectorizedSearchAsync( + public IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = null, @@ -328,13 +328,12 @@ public Task> VectorizedSearchAsync( #pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); - var mappedResults = this.MapSearchResultsAsync( + return this.MapSearchResultsAsync( searchResults, ScorePropertyName, OperationName, searchOptions.IncludeVectors, cancellationToken); - return Task.FromResult(new VectorSearchResults(mappedResults)); } /// @@ -371,7 +370,7 @@ public async IAsyncEnumerable GetAsync(Expression> } /// - public Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; const string ScorePropertyName = "SimilarityScore"; @@ -399,13 +398,12 @@ public Task> HybridSearchAsync(TVector vec #pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); - var mappedResults = this.MapSearchResultsAsync( + return this.MapSearchResultsAsync( searchResults, ScorePropertyName, OperationName, searchOptions.IncludeVectors, cancellationToken); - return Task.FromResult(new VectorSearchResults(mappedResults)); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 2b35f7d0b3cd..419996e7bc00 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -255,7 +255,7 @@ private TKey Upsert(TRecord record) } /// - public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -298,13 +298,6 @@ public Task> VectorizedSearchAsync(TVector // Get the non-null results since any record with a null vector results in a null result. var nonNullResults = results.Where(x => x.HasValue).Select(x => x!.Value); - // Calculate the total results count if requested. - long? count = null; - if (internalOptions.IncludeTotalCount) - { - count = nonNullResults.Count(); - } - // Sort the results appropriately for the selected distance function and get the right page of results . var sortedScoredResults = InMemoryVectorStoreCollectionSearchMapping.ShouldSortDescending(vectorProperty.DistanceFunction) ? nonNullResults.OrderByDescending(x => x.score) : @@ -312,8 +305,7 @@ public Task> VectorizedSearchAsync(TVector var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(top); // Build the response. - var vectorSearchResultList = resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); - return Task.FromResult(new VectorSearchResults(vectorSearchResultList) { TotalCount = count }); + return resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 48d28c49dfe9..aba4d40220ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -263,11 +263,11 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync( + public async IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, - CancellationToken cancellationToken = default) + [EnumeratorCancellation] CancellationToken cancellationToken = default) { Array vectorArray = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -305,7 +305,7 @@ public async Task> VectorizedSearchAsync( BsonDocument[] pipeline = [searchQuery, projectionQuery]; - return await this.RunOperationWithRetryAsync( + var results = await this.RunOperationWithRetryAsync( "VectorizedSearch", this._options.MaxRetries, this._options.DelayInMilliseconds, @@ -315,9 +315,14 @@ public async Task> VectorizedSearchAsync( .AggregateAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken)); + return this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken); }, cancellationToken).ConfigureAwait(false); + + await foreach (var result in results.ConfigureAwait(false)) + { + yield return result; + } } /// @@ -379,7 +384,7 @@ public async IAsyncEnumerable GetAsync(Expression> } /// - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Array vectorArray = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -418,7 +423,7 @@ public async Task> HybridSearchAsync(TVect numCandidates, filter); - return await this.RunOperationWithRetryAsync( + var results = await this.RunOperationWithRetryAsync( "KeywordVectorizedHybridSearch", this._options.MaxRetries, this._options.DelayInMilliseconds, @@ -428,9 +433,14 @@ public async Task> HybridSearchAsync(TVect .AggregateAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken)); + return this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken); }, cancellationToken).ConfigureAwait(false); + + await foreach (var result in results.ConfigureAwait(false)) + { + yield return result; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 528370ca695c..f95bd14e452b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -333,7 +333,7 @@ await this.RunIndexOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -372,7 +372,7 @@ public async Task> VectorizedSearchAsync(T if (response.Matches is null) { - return new VectorSearchResults(Array.Empty>().ToAsyncEnumerable()); + yield break; } // Pinecone does not provide a way to skip results, so we need to do it manually. @@ -391,10 +391,12 @@ public async Task> VectorizedSearchAsync(T Values = x.Values ?? Array.Empty(), Metadata = x.Metadata, SparseValues = x.SparseValues - }, mapperOptions), x.Score))) - .ToAsyncEnumerable(); + }, mapperOptions), x.Score))); - return new(records); + foreach (var record in records) + { + yield return record; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs index 69309e2761b6..157c4aacce61 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresVectorStoreDbClient.cs @@ -130,16 +130,11 @@ internal interface IPostgresVectorStoreDbClient /// The vector property. /// The to compare the table's vector with. /// The maximum number of similarity results to return. - /// Optional conditions to filter the results. - /// Optional conditions to filter the results. - /// The number of entries to skip. - /// If true, the vectors will be returned in the entries. + /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// An asynchronous stream of result objects that the nearest matches to the . -#pragma warning disable CS0618 // VectorSearchFilter is obsolete IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync(string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, int limit, - VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, CancellationToken cancellationToken = default); -#pragma warning restore CS0618 // VectorSearchFilter is obsolete + VectorSearchOptions options, CancellationToken cancellationToken = default); IAsyncEnumerable> GetMatchingRecordsAsync(string tableName, VectorStoreRecordModel model, Expression> filter, int top, GetFilteredRecordOptions options, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs index 27a2b55110a8..4031f913233a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreDbClient.cs @@ -184,23 +184,25 @@ public async Task DeleteAsync(string tableName, string keyColumn, TKey key } /// -#pragma warning disable CS0618 // VectorSearchFilter is obsolete public async IAsyncEnumerable<(Dictionary Row, double Distance)> GetNearestMatchesAsync( string tableName, VectorStoreRecordModel model, VectorStoreRecordVectorPropertyModel vectorProperty, Vector vectorValue, int limit, - VectorSearchFilter? legacyFilter = default, Expression>? newFilter = default, int? skip = default, bool includeVectors = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) -#pragma warning restore CS0618 // VectorSearchFilter is obsolete + VectorSearchOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) { NpgsqlConnection connection = await this.DataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); await using (connection) { - var commandInfo = PostgresSqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, model, vectorProperty, vectorValue, legacyFilter, newFilter, skip, includeVectors, limit); + var commandInfo = PostgresSqlBuilder.BuildGetNearestMatchCommand(this._schema, tableName, model, vectorProperty, vectorValue, +#pragma warning disable CS0618 // VectorSearchFilter is obsolete + options.OldFilter, +#pragma warning restore CS0618 // VectorSearchFilter is obsolete + options.Filter, options.Skip, options.IncludeVectors, limit); using NpgsqlCommand cmd = commandInfo.ToNpgsqlCommand(connection); using NpgsqlDataReader dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { var distance = dataReader.GetDouble(dataReader.GetOrdinal(PostgresConstants.DistanceColumnName)); - yield return (Row: this.GetRecord(dataReader, model, includeVectors), Distance: distance); + yield return (Row: this.GetRecord(dataReader, model, options.IncludeVectors), Distance: distance); } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 8bbe1fe548eb..44fcb40fa3d1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; -using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -253,7 +252,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo } /// - public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "VectorizedSearch"; @@ -280,42 +279,30 @@ public Task> VectorizedSearchAsync(TVector // and LIMIT is not supported in vector search extension, instead of LIMIT - "k" parameter is used. var limit = top + searchOptions.Skip; - return this.RunOperationAsync(OperationName, () => - { - var results = this._client.GetNearestMatchesAsync( - this.CollectionName, - this._model, - vectorProperty, - pgVector, - top, -#pragma warning disable CS0618 // VectorSearchFilter is obsolete - searchOptions.OldFilter, -#pragma warning restore CS0618 // VectorSearchFilter is obsolete - searchOptions.Filter, - searchOptions.Skip, - searchOptions.IncludeVectors, - cancellationToken) - .SelectAsync(result => + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = searchOptions.IncludeVectors }; + + return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( + this._client.GetNearestMatchesAsync(this.CollectionName, this._model, vectorProperty, pgVector, top, searchOptions, cancellationToken) + .SelectAsync(result => { var record = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.CollectionName, OperationName, - () => this._mapper.MapFromStorageToDataModel( - result.Row, new StorageToDataModelMapperOptions() { IncludeVectors = searchOptions.IncludeVectors }) - ); + () => this._mapper.MapFromStorageToDataModel(result.Row, mapperOptions)); return new VectorSearchResult(record, result.Distance); - }, cancellationToken); - - return Task.FromResult(new VectorSearchResults(results)); - }); + }, cancellationToken), + OperationName, + this._collectionMetadata.VectorStoreName, + this.CollectionName + ); } /// - public async IAsyncEnumerable GetAsync(Expression> filter, int top, - GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(filter); Verify.NotLessThan(top, 1); @@ -324,21 +311,20 @@ public async IAsyncEnumerable GetAsync(Expression> StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; - await foreach (var dictionary in this._client.GetMatchingRecordsAsync( - this.CollectionName, - this._model, - filter, - top, - options, - cancellationToken).ConfigureAwait(false)) - { - yield return VectorStoreErrorHandler.RunModelConversion( - PostgresConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - "Get", - () => this._mapper.MapFromStorageToDataModel(dictionary, mapperOptions)); - } + return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( + this._client.GetMatchingRecordsAsync(this.CollectionName, this._model, filter, top, options, cancellationToken) + .SelectAsync(dictionary => + { + return VectorStoreErrorHandler.RunModelConversion( + PostgresConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + "Get", + () => this._mapper.MapFromStorageToDataModel(dictionary, mapperOptions)); + }, cancellationToken), + "Get", + this._collectionMetadata.VectorStoreName, + this.CollectionName); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs index 20d19717fee9..87efc8e547b3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs @@ -56,7 +56,7 @@ public static async IAsyncEnumerable WrapAsyncEnumerableAsync( var more = await enumerator.MoveNextAsync(); return (enumerator.Current, more); } - catch (Exception ex) + catch (Exception ex) when (ex is not (NotSupportedException or ArgumentException)) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 3b0f5e2c6a78..f0ab07038231 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -482,7 +482,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -534,7 +534,10 @@ public async Task> VectorizedSearchAsync(T this._collectionName, "Query")); - return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); + foreach (var result in mappedResults) + { + yield return result; + } } /// @@ -594,7 +597,7 @@ public async IAsyncEnumerable GetAsync(Expression> } /// - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); @@ -676,7 +679,10 @@ public async Task> HybridSearchAsync(TVect this._collectionName, "Query")); - return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); + foreach (var result in mappedResults) + { + yield return result; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index adbd3fe50d46..6d2138d3fb83 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -331,7 +331,7 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -382,7 +382,10 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResult(dataModel, score); }); - return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); + foreach (var result in mappedResults) + { + yield return result; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 8298770b9e54..04689522f7cb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -411,7 +411,7 @@ await this.RunOperationAsync( } /// - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -459,7 +459,10 @@ public async Task> VectorizedSearchAsync(T return new VectorSearchResult(mappedRecord, score); }); - return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); + foreach (var result in mappedResults) + { + yield return result; + } } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 1e4acc69afb4..222ccf6083c7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -424,7 +424,7 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -446,11 +446,10 @@ public async Task> VectorizedSearchAsync(T var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); #pragma warning disable CA2000 // Dispose objects before losing scope - // This connection will be disposed by the ReadVectorSearchResultsAsync + // Connection and command are going to be disposed by the ReadVectorSearchResultsAsync, // when the user is done with the results. SqlConnection connection = new(this._connectionString); -#pragma warning restore CA2000 // Dispose objects before losing scope - using SqlCommand command = SqlServerCommandBuilder.SelectVector( + SqlCommand command = SqlServerCommandBuilder.SelectVector( connection, this._options.Schema, this.CollectionName, @@ -459,17 +458,9 @@ public async Task> VectorizedSearchAsync(T top, searchOptions, allowed); +#pragma warning restore CA2000 // Dispose objects before losing scope - return await ExceptionWrapper.WrapAsync(connection, command, - (cmd, ct) => - { - var results = this.ReadVectorSearchResultsAsync(connection, cmd, searchOptions.IncludeVectors, ct); - return Task.FromResult(new VectorSearchResults(results)); - }, - "VectorizedSearch", - this._collectionMetadata.VectorStoreName, - this.CollectionName, - cancellationToken).ConfigureAwait(false); + return this.ReadVectorSearchResultsAsync(connection, command, searchOptions.IncludeVectors, cancellationToken); } /// @@ -494,10 +485,21 @@ private async IAsyncEnumerable> ReadVectorSearchResu { StorageToDataModelMapperOptions options = new() { IncludeVectors = includeVectors }; var vectorProperties = includeVectors ? this._model.VectorProperties : []; - using SqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + + using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, + static (cmd, ct) => cmd.ExecuteReaderAsync(ct), + "VectorizedSearch", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false); int scoreIndex = -1; - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + while (await ExceptionWrapper.WrapReadAsync( + reader, + "VectorizedSearch", + this._collectionMetadata.VectorStoreName, + this.CollectionName, + cancellationToken).ConfigureAwait(false)) { if (scoreIndex < 0) { @@ -511,6 +513,7 @@ private async IAsyncEnumerable> ReadVectorSearchResu } finally { + command.Dispose(); connection.Dispose(); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index b633aac6558e..1b2ed2ba6171 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -199,7 +199,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de } /// - public Task> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { const string LimitPropertyName = "k"; @@ -257,14 +257,12 @@ public Task> VectorizedSearchAsync(TVector } #pragma warning restore CS0618 // VectorSearchFilter is obsolete - var vectorSearchResults = new VectorSearchResults(this.EnumerateAndMapSearchResultsAsync( + return this.EnumerateAndMapSearchResultsAsync( conditions, extraWhereFilter, extraParameters, searchOptions, - cancellationToken)); - - return Task.FromResult(vectorSearchResults); + cancellationToken); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index e9fda5c38484..e3fdf3ac2fcf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -330,7 +330,7 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public async Task> VectorizedSearchAsync( + public IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = null, @@ -353,12 +353,12 @@ public async Task> VectorizedSearchAsync( searchOptions, this._model); - return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.ScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); + return this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.ScorePropertyName, OperationName, cancellationToken); } /// - public async IAsyncEnumerable GetAsync(Expression> filter, int top, - GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public IAsyncEnumerable GetAsync(Expression> filter, int top, + GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) { Verify.NotNull(filter); Verify.NotLessThan(top, 1); @@ -372,15 +372,12 @@ public async IAsyncEnumerable GetAsync(Expression> this.CollectionName, this._model); - var results = await this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.ScorePropertyName, "GetAsync", cancellationToken).ConfigureAwait(false); - await foreach (var record in results.Results.ConfigureAwait(false)) - { - yield return record.Record; - } + return this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.ScorePropertyName, "GetAsync", cancellationToken) + .SelectAsync(result => result.Record, cancellationToken: cancellationToken); } /// - public async Task> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { const string OperationName = "HybridSearch"; @@ -402,7 +399,7 @@ public async Task> HybridSearchAsync(TVect s_jsonSerializerOptions, searchOptions); - return await this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken).ConfigureAwait(false); + return this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken); } /// @@ -420,7 +417,7 @@ public async Task> HybridSearchAsync(TVect #region private - private async Task> ExecuteQueryAsync(string query, bool includeVectors, string scorePropertyName, string operationName, CancellationToken cancellationToken) + private async IAsyncEnumerable> ExecuteQueryAsync(string query, bool includeVectors, string scorePropertyName, string operationName, [EnumeratorCancellation] CancellationToken cancellationToken) { using var request = new WeaviateVectorSearchRequest(query).Build(); @@ -439,21 +436,22 @@ private async Task> ExecuteQueryAsync(string query, }; } - var mappedResults = collectionResults.Where(x => x is not null).Select(result => + foreach (var result in collectionResults) { - var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result!, scorePropertyName); - - var record = VectorStoreErrorHandler.RunModelConversion( - WeaviateConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this.CollectionName, - operationName, - () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); + if (result is not null) + { + var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result, scorePropertyName); - return new VectorSearchResult(record, score); - }); + var record = VectorStoreErrorHandler.RunModelConversion( + WeaviateConstants.VectorStoreSystemName, + this._collectionMetadata.VectorStoreName, + this.CollectionName, + operationName, + () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); - return new VectorSearchResults(mappedResults.ToAsyncEnumerable()); + yield return new VectorSearchResult(record, score); + } + } } private Task ExecuteRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 77a26fe52c22..3b641e03046f 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -581,13 +581,11 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3)); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3).ToListAsync()); } else { - var actual = await sut.VectorizedSearchAsync(vector, top: 3); - - Assert.NotNull(actual); + Assert.NotNull(await sut.VectorizedSearchAsync(vector, top: 3).FirstOrDefaultAsync()); } } @@ -645,10 +643,10 @@ public async Task VectorizedSearchUsesValidQueryAsync( var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, - }); + }).FirstOrDefaultAsync(); // Assert - Assert.NotNull(await actual.Results.FirstOrDefaultAsync()); + Assert.NotNull(actual); this._mockMongoCollection.Verify(l => l.AggregateAsync( It.Is>(pipeline => @@ -670,7 +668,7 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await (await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options)).Results.FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); } [Fact] @@ -684,10 +682,9 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3); + var result = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); // Assert - var result = await actual.Results.FirstOrDefaultAsync(); Assert.NotNull(result); Assert.Equal("key", result.Record.HotelId); Assert.Equal("Test Name", result.Record.HotelName); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index ae30bd5432cb..c223a57142e5 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -563,11 +563,11 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bo var filter = new VectorSearchFilter().EqualTo(nameof(SinglePropsModel.Data), "data 1"); // Act. - var actual = await sut.VectorizedSearchAsync( + var results = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), top: 5, new() { IncludeVectors = true, OldFilter = filter, Skip = 2 }, - this._testCancellationToken); + this._testCancellationToken).ToListAsync(); // Assert. this._qdrantClientMock @@ -591,7 +591,6 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bo this._testCancellationToken), Times.Once); - var results = await actual.Results.ToListAsync(); Assert.Single(results); Assert.Equal(testRecordKey, results.First().Record.Key); Assert.Equal("data 1", results.First().Record.OriginalNameData); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index 8cf0cbf20af5..fa7ce7a04235 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -451,7 +451,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc var filter = new VectorSearchFilter().EqualTo(nameof(SinglePropsModel.Data), "data 1"); // Act. - var actual = await sut.VectorizedSearchAsync( + var results = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), top: 5, new() @@ -459,7 +459,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc IncludeVectors = includeVectors, OldFilter = filter, Skip = 2 - }); + }).ToListAsync(); // Assert. var expectedArgsPart1 = new object[] @@ -499,7 +499,6 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, bool inc It.Is(x => x.Where(y => !(y is byte[])).SequenceEqual(expectedArgs.Where(y => !(y is byte[]))))), Times.Once); - var results = await actual.Results.ToListAsync(); Assert.Single(results); Assert.Equal(TestRecordKey1, results.First().Record.Key); Assert.Equal(0.25d, results.First().Score); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index f8c5d8f4b226..b82841a159c9 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -467,7 +467,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) var filter = new VectorSearchFilter().EqualTo(nameof(MultiPropsModel.Data1), "data 1"); // Act. - var actual = await sut.VectorizedSearchAsync( + var results = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 1f, 2f, 3f, 4f }), top: 5, new() @@ -476,7 +476,7 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) OldFilter = filter, VectorProperty = r => r.Vector1, Skip = 2 - }); + }).ToListAsync(); // Assert. var expectedArgs = new object[] @@ -503,7 +503,6 @@ public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition) It.Is(x => x.Where(y => !(y is byte[])).SequenceEqual(expectedArgs.Where(y => !(y is byte[]))))), Times.Once); - var results = await actual.Results.ToListAsync(); Assert.Single(results); Assert.Equal(TestRecordKey1, results.First().Record.Key); Assert.Equal(0.25d, results.First().Score); diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs index 59cc3c3401e4..3c93b99e6dba 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordCollectionTests.cs @@ -120,8 +120,7 @@ public async Task VectorizedSearchReturnsRecordAsync(bool includeVectors) var sut = new SqliteVectorStoreRecordCollection>(fakeConnection, "VectorizedSearch"); // Act - var results = await sut.VectorizedSearchAsync(expectedRecord.Vector, new() { IncludeVectors = includeVectors }); - var result = await results.Results.FirstOrDefaultAsync(); + var result = await sut.VectorizedSearchAsync(expectedRecord.Vector, new() { IncludeVectors = includeVectors }).FirstOrDefaultAsync(); // Assert Assert.NotNull(result); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index f78cb1c8e500..f9f02c2ff716 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -505,13 +505,12 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act - var actual = await sut.VectorizedSearchAsync(vector, top: 3, new() + var results = await sut.VectorizedSearchAsync(vector, top: 3, new() { IncludeVectors = includeVectors - }); + }).ToListAsync(); // Assert - var results = await actual.Results.ToListAsync(); Assert.Single(results); var score = results[0].Score; @@ -547,7 +546,7 @@ public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync( // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3)).Results.ToListAsync()); + await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3).ToListAsync()); } [Fact] @@ -558,11 +557,11 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti // Act & Assert await Assert.ThrowsAsync(async () => - await (await sut.VectorizedSearchAsync( + await sut.VectorizedSearchAsync( new ReadOnlyMemory([1f, 2f, 3f]), top: 3, - new() { VectorProperty = r => "non-existent-property" })) - .Results.ToListAsync()); + new() { VectorProperty = r => "non-existent-property" }) + .ToListAsync()); } public void Dispose() diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs index 671664bf3a74..26f0f5f5a81c 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IKeywordHybridSearch.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Threading; -using System.Threading.Tasks; namespace Microsoft.Extensions.VectorData; @@ -24,7 +23,7 @@ public interface IKeywordHybridSearch /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the hybrid search, including their result scores. - Task> HybridSearchAsync( + IAsyncEnumerable> HybridSearchAsync( TVector vector, ICollection keywords, int top, diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs index 3198e518e9ce..29b49f86ee21 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Threading; -using System.Threading.Tasks; namespace Microsoft.Extensions.VectorData; @@ -21,7 +21,7 @@ public interface IVectorizableTextSearch /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the vector search, including their result scores. - Task> VectorizableTextSearchAsync( + IAsyncEnumerable> VectorizableTextSearchAsync( string searchText, int top, VectorSearchOptions? options = default, diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs index e4ff7dec7238..7958679b43d3 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Threading; -using System.Threading.Tasks; namespace Microsoft.Extensions.VectorData; @@ -22,7 +22,7 @@ public interface IVectorizedSearch /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the vector search, including their result scores. - Task> VectorizedSearchAsync( + IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = default, diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs index 221c5c790606..533ede18348d 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchOptions.cs @@ -77,5 +77,6 @@ public int Skip /// Not all vector search implementations support this option, in which case the total /// count will be null even if requested via this option. /// + [Obsolete("Total count is no longer included in the results.", error: true)] public bool IncludeTotalCount { get; init; } = false; } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs deleted file mode 100644 index 293315ee554a..000000000000 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchResults.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; - -namespace Microsoft.Extensions.VectorData; - -/// -/// Contains the full list of search results for a vector search operation with metadata. -/// -/// The record data model to use for retrieving data from the store. -/// The list of records returned by the search operation. -public class VectorSearchResults(IAsyncEnumerable> results) -{ - /// - /// Gets or sets the total count of results found by the search operation, or null - /// if the count was not requested or cannot be computed. - /// - /// - /// This value represents the total number of results that are available for the current query and not the number of results being returned. - /// - public long? TotalCount { get; init; } - - /// - /// Gets or sets the metadata associated with the content. - /// - public IReadOnlyDictionary? Metadata { get; init; } - - /// - /// Gets the search results. - /// - public IAsyncEnumerable> Results { get; } = results; -} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 2f86994e73e4..b8c0aca87be4 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -64,14 +64,14 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe var upsertResult = await sut.UpsertAsync(hotel); var getResult = await sut.GetAsync("Upsert-1"); var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( embedding, top: 3, new() { IncludeVectors = true, OldFilter = new VectorSearchFilter().EqualTo("HotelName", "MyHotel Upsert-1") - }); + }).ToListAsync(); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -91,7 +91,6 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe Assert.Equal(hotel.LastRenovationDate, getResult.LastRenovationDate); Assert.Equal(hotel.Rating, getResult.Rating); - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResultRecord = searchResults.First().Record; Assert.Equal(hotel.HotelName, searchResultRecord.HotelName); @@ -346,7 +345,7 @@ public async Task ItCanSearchWithVectorAndFiltersAsync(string option, bool inclu // Act. var filter = option == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "Hotel 3") : new VectorSearchFilter().AnyTagEqualTo("Tags", "bar"); - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), top: 3, new() @@ -354,10 +353,9 @@ await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), IncludeVectors = includeVectors, VectorProperty = r => r.DescriptionEmbedding, OldFilter = filter, - }); + }).ToListAsync(); // Assert. - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResult = searchResults.First(); Assert.Equal("BaseSet-3", searchResult.Record.HotelId); @@ -387,17 +385,16 @@ public async Task ItCanSearchWithVectorizableTextAndFiltersAsync() // Act. var filter = new VectorSearchFilter().EqualTo("HotelName", "Hotel 3"); - var actual = await sut.VectorizableTextSearchAsync( + var searchResults = await sut.VectorizableTextSearchAsync( "A hotel with great views.", top: 3, new() { VectorProperty = r => r.DescriptionEmbedding, OldFilter = filter, - }); + }).ToListAsync(); // Assert. - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index d33dc2c0e1b0..15d7349fb076 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -343,10 +343,9 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key1", ids[0]); @@ -374,13 +373,12 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key3", ids[0]); @@ -406,13 +404,12 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(AzureCosmosDBMongoDBHotel.HotelName), "My Hotel key2") - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key2", ids[0]); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 7e2c0f53e522..4569c75514b3 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -277,10 +277,9 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key1", ids[0]); @@ -308,13 +307,12 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key3", ids[0]); @@ -341,13 +339,12 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() { OldFilter = filter, - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var actualIds = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal(expectedIds, actualIds); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index 4214a74d7708..d033c68bde54 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -97,10 +97,9 @@ public async Task VectorSearchShouldReturnExpectedScoresAsync(string distanceFun await Task.Delay(this.DelayAfterUploadInMilliseconds); // Act - var searchResult = await sut.VectorizedSearchAsync(baseVector, top: 3); + var results = await sut.VectorizedSearchAsync(baseVector, top: 3).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Equal(3, results.Count); Assert.Equal(keyDictionary[resultOrder[0]], results[0].Record.Key); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 0545491d5f2b..654ae7cedb75 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -347,10 +347,9 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3); + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key1", ids[0]); @@ -378,13 +377,12 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key3", ids[0]); @@ -410,13 +408,12 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(MongoDBHotel.HotelName), "My Hotel key2") - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId).ToList(); Assert.Equal("key2", ids[0]); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs index ffa795f7db1f..8a2200ae6bc3 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresVectorStoreRecordCollectionTests.cs @@ -361,12 +361,10 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), top: 3, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([0.9f, 0.1f, 0.5f, 0.8f]), top: 3, new() { IncludeVectors = includeVectors - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); @@ -399,15 +397,13 @@ public async Task VectorizedSearchWithEqualToFilterReturnsValidResultsAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() { IncludeVectors = false, OldFilter = new([ new EqualToFilterClause("HotelRating", 2.5f) ]) - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); @@ -431,15 +427,13 @@ public async Task VectorizedSearchWithAnyTagFilterReturnsValidResultsAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 29f, 28f, 27f]), top: 5, new() { IncludeVectors = false, OldFilter = new([ new AnyTagEqualToFilterClause("Tags", "tag2") ]) - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index d805c48f530f..3eb7bc1f8752 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -67,10 +67,10 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec var upsertResult = await sut.UpsertAsync(record); var getResult = await sut.GetAsync(30, new GetRecordOptions { IncludeVectors = true }); var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, - new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 30).AnyTagEqualTo("Tags", "t2") }).ToListAsync(); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -88,7 +88,6 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool hasNamedVec Assert.Equal(record.Tags.ToArray(), getResult?.Tags.ToArray()); Assert.Equal(record.Description, getResult?.Description); - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResultRecord = searchResults.First().Record; Assert.Equal(record.HotelId, searchResultRecord?.HotelId); @@ -400,16 +399,15 @@ public async Task ItCanSearchWithFilterAsync(bool useRecordDefinition, string co // Act. var vector = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "My Hotel 13").EqualTo("LastRenovationDate", new DateTimeOffset(2020, 02, 01, 0, 0, 0, TimeSpan.Zero)) : new VectorSearchFilter().AnyTagEqualTo("Tags", "t13.2"); - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, new() { OldFilter = filter - }); + }).ToListAsync(); // Assert. - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResultRecord = searchResults.First().Record; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 01a102f26ebb..a96e4bf1b9f1 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -65,11 +65,11 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe await sut.CreateCollectionAsync(); var upsertResult = await sut.UpsertAsync(record); var getResult = await sut.GetAsync("HUpsert-1", new GetRecordOptions { IncludeVectors = true }); - var actual = await sut + var searchResults = await sut .VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), top: 3, - new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 1), IncludeVectors = true }).ToListAsync(); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -85,7 +85,6 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe Assert.Equal(record.Description, getResult?.Description); Assert.Equal(record.DescriptionEmbedding?.ToArray(), getResult?.DescriptionEmbedding?.ToArray()); - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); Assert.Equal(1, searchResults.First().Score); var searchResultRecord = searchResults.First().Record; @@ -318,17 +317,16 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType, var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelCode", 1) : new VectorSearchFilter().EqualTo("HotelName", "My Hotel 1"); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, new() { IncludeVectors = includeVectors, OldFilter = filter - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); Assert.Equal(1, searchResults.First().Score); var searchResult = searchResults.First().Record; @@ -363,16 +361,15 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() var vector = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 1.0f]); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Equal(3, searchResults.Count); Assert.True(searchResults.Select(x => x.Record.HotelId).SequenceEqual(["HTopSkip_3", "HTopSkip_4", "HTopSkip_5"])); } @@ -393,16 +390,15 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) var vector = new ReadOnlyMemory([2.0d, 2.1d, 2.2d, 2.3d]); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 1, new() { IncludeVectors = includeVectors, - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResult = searchResults.First().Record; Assert.Equal("HFloat64_2", searchResult?.HotelId); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 4b0972a97437..4add283ba03f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -65,10 +65,10 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe await sut.CreateCollectionAsync(); var upsertResult = await sut.UpsertAsync(record); var getResult = await sut.GetAsync("Upsert-10", new GetRecordOptions { IncludeVectors = true }); - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }), top: 3, - new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 10) }); + new() { OldFilter = new VectorSearchFilter().EqualTo("HotelCode", 10) }).ToListAsync(); // Assert var collectionExistResult = await sut.CollectionExistsAsync(); @@ -89,7 +89,6 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe Assert.Equal(record.Description, getResult?.Description); Assert.Equal(record.DescriptionEmbedding?.ToArray(), getResult?.DescriptionEmbedding?.ToArray()); - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); Assert.Equal(1, searchResults.First().Score); var searchResultRecord = searchResults.First().Record; @@ -348,13 +347,12 @@ public async Task ItCanSearchWithFloat32VectorAndFilterAsync(string filterType) var filter = filterType == "equality" ? new VectorSearchFilter().EqualTo("HotelCode", 1) : new VectorSearchFilter().AnyTagEqualTo("Tags", "pool"); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, - new() { IncludeVectors = true, OldFilter = filter }); + new() { IncludeVectors = true, OldFilter = filter }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); Assert.Equal(1, searchResults.First().Score); var searchResult = searchResults.First().Record; @@ -387,16 +385,15 @@ public async Task ItCanSearchWithFloat32VectorAndTopSkipAsync() var vector = new ReadOnlyMemory([1.0f, 1.0f, 1.0f, 1.0f]); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 3, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Equal(3, searchResults.Count); Assert.True(searchResults.Select(x => x.Record.HotelId).SequenceEqual(["TopSkip_3", "TopSkip_4", "TopSkip_5"])); } @@ -417,16 +414,15 @@ public async Task ItCanSearchWithFloat64VectorAsync(bool includeVectors) var vector = new ReadOnlyMemory([2.0d, 2.1d, 2.2d, 2.3d]); // Act - var actual = await sut.VectorizedSearchAsync( + var searchResults = await sut.VectorizedSearchAsync( vector, top: 1, new() { IncludeVectors = includeVectors, - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); Assert.Single(searchResults); var searchResult = searchResults.First().Record; Assert.Equal("Float64_2", searchResult?.HotelId); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index 33189c600b0f..bd70dd934b4b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -355,12 +355,10 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { IncludeVectors = includeVectors - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); @@ -392,12 +390,10 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { Skip = 2 - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); @@ -425,12 +421,10 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() + var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { OldFilter = new VectorSearchFilter().EqualTo(nameof(SqliteHotel.HotelName), "My Hotel key2") - }); - - var results = await searchResults.Results.ToListAsync(); + }).ToListAsync(); // Assert var ids = results.Select(l => l.Record.HotelId).ToList(); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index ae13b799fa04..747e1ede5fc9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -226,13 +226,12 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync(bool include await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 3, new() { IncludeVectors = includeVectors - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId.ToString()).ToList(); Assert.Equal("11111111-1111-1111-1111-111111111111", ids[0]); @@ -264,13 +263,12 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 2, new() { Skip = 2 - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var ids = searchResults.Select(l => l.Record.HotelId.ToString()).ToList(); Assert.Equal("33333333-3333-3333-3333-333333333333", ids[0]); @@ -297,13 +295,12 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc await sut.UpsertAsync([hotel4, hotel2, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([30f, 31f, 32f, 33f]), top: 4, new() { OldFilter = filter, - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var actualIds = searchResults.Select(l => l.Record.HotelId.ToString()).ToList(); Assert.Equal(expectedIds, actualIds); @@ -341,13 +338,12 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAndDifferentDataT await sut.UpsertAsync([hotel4, hotel2, hotel5, hotel3, hotel1]); // Act - var actual = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), top: 4, new() + var searchResults = await sut.VectorizedSearchAsync(new ReadOnlyMemory([40f, 40f, 40f, 40f]), top: 4, new() { OldFilter = filter, - }); + }).ToListAsync(); // Assert - var searchResults = await actual.Results.ToListAsync(); var actualIds = searchResults.Select(l => l.Record.HotelId.ToString()).ToList(); Assert.Single(actualIds); @@ -474,6 +470,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) {IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } + ] }; } diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 9d9d072e0b07..98d84a301c6e 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; +using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; @@ -103,11 +104,14 @@ public Task>> GenerateEmbeddingsAsync(IList protected sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); + await foreach (var result in vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken)) + { + yield return result; + } } /// diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs index eb9ed46823e3..d8e33919c2e3 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs @@ -10,12 +10,12 @@ internal sealed class MockVectorizableTextSearch : IVectorizableTextSea public MockVectorizableTextSearch(IEnumerable> searchResults) { - this._searchResults = ToAsyncEnumerable(searchResults); + this._searchResults = searchResults.ToAsyncEnumerable(); } - public Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) { - return Task.FromResult(new VectorSearchResults(this._searchResults)); + return this._searchResults; } /// @@ -27,12 +27,4 @@ public Task> VectorizableTextSearchAsync(string sea serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; } - - private static async IAsyncEnumerable> ToAsyncEnumerable(IEnumerable> searchResults) - { - foreach (var result in searchResults) - { - yield return result; - } - } } diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs index a0103059d72c..dea90327d1ba 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs @@ -116,27 +116,27 @@ public VectorStoreTextSearch( } /// - public async Task> SearchAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) + public Task> SearchAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) { - VectorSearchResults searchResponse = await this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken).ConfigureAwait(false); + var searchResponse = this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken); - return new KernelSearchResults(this.GetResultsAsStringAsync(searchResponse.Results, cancellationToken), searchResponse.TotalCount, searchResponse.Metadata); + return Task.FromResult(new KernelSearchResults(this.GetResultsAsStringAsync(searchResponse, cancellationToken))); } /// - public async Task> GetTextSearchResultsAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) + public Task> GetTextSearchResultsAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) { - VectorSearchResults searchResponse = await this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken).ConfigureAwait(false); + var searchResponse = this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken); - return new KernelSearchResults(this.GetResultsAsTextSearchResultAsync(searchResponse.Results, cancellationToken), searchResponse.TotalCount, searchResponse.Metadata); + return Task.FromResult(new KernelSearchResults(this.GetResultsAsTextSearchResultAsync(searchResponse, cancellationToken))); } /// - public async Task> GetSearchResultsAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) + public Task> GetSearchResultsAsync(string query, TextSearchOptions? searchOptions = null, CancellationToken cancellationToken = default) { - VectorSearchResults searchResponse = await this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken).ConfigureAwait(false); + var searchResponse = this.ExecuteVectorSearchAsync(query, searchOptions, cancellationToken); - return new KernelSearchResults(this.GetResultsAsRecordAsync(searchResponse.Results, cancellationToken), searchResponse.TotalCount, searchResponse.Metadata); + return Task.FromResult(new KernelSearchResults(this.GetResultsAsRecordAsync(searchResponse, cancellationToken))); } #region private @@ -194,7 +194,7 @@ private TextSearchStringMapper CreateTextSearchStringMapper() /// What to search for. /// Search options. /// The to monitor for cancellation requests. The default is . - private async Task> ExecuteVectorSearchAsync(string query, TextSearchOptions? searchOptions, CancellationToken cancellationToken) + private async IAsyncEnumerable> ExecuteVectorSearchAsync(string query, TextSearchOptions? searchOptions, [EnumeratorCancellation] CancellationToken cancellationToken) { searchOptions ??= new TextSearchOptions(); var vectorSearchOptions = new VectorSearchOptions @@ -209,10 +209,18 @@ private async Task> ExecuteVectorSearchAsync(string { var vectorizedQuery = await this._textEmbeddingGeneration!.GenerateEmbeddingAsync(query, cancellationToken: cancellationToken).ConfigureAwait(false); - return await this._vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false); + await foreach (var result in this._vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) + { + yield return result; + } + + yield break; } - return await this._vectorizableTextSearch!.VectorizableTextSearchAsync(query, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false); + await foreach (var result in this._vectorizableTextSearch!.VectorizableTextSearchAsync(query, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) + { + yield return result; + } } /// diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index 8f0fea275978..ccb606ce9465 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; +using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -126,10 +127,13 @@ public Task>> GenerateEmbeddingsAsync(IList public sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// - public async Task> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - return await vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken); + var vectorizedQuery = await textEmbeddingGeneration.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); + await foreach (var result in vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken)) + { + yield return result; + } } /// diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index 7675c412f293..bda0fbac24a2 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -82,18 +82,18 @@ public async Task RecordCRUD() received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); AssertEquality(updated, received); - VectorSearchResult vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() + VectorSearchResult vectorSearchResult = await (collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = true - })).Results.SingleAsync(); + })).SingleAsync(); AssertEquality(updated, vectorSearchResult.Record); - vectorSearchResult = await (await collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() + vectorSearchResult = await (collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = false - })).Results.SingleAsync(); + })).SingleAsync(); // Make sure the vectors are not included in the result. Assert.Equal(0, vectorSearchResult.Record.Floats.Length); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index ec4b0d9b7ef1..39fcf6ddc1c6 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -326,12 +326,11 @@ protected virtual async Task TestFilterAsync( // Execute the query against the vector store, once using the strongly typed filter // and once using the dynamic filter - var results = await fixture.Collection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - top: fixture.TestData.Count, - new() { Filter = filter }); - - var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + var actual = await fixture.Collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, + new() { Filter = filter }) + .Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); if (actual.Count != expected.Count) { @@ -345,12 +344,11 @@ protected virtual async Task TestFilterAsync( if (fixture.TestDynamic) { - var dynamicResults = await fixture.DynamicCollection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - top: fixture.TestData.Count, - new() { Filter = dynamicFilter }); - - var dynamicActual = await dynamicResults.Results.Select(r => r.Record).OrderBy(r => r[nameof(FilterRecord.Key)]).ToListAsync(); + var dynamicActual = await fixture.DynamicCollection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, + new() { Filter = dynamicFilter }) + .Select(r => r.Record).OrderBy(r => r[nameof(FilterRecord.Key)]).ToListAsync(); if (dynamicActual.Count != expected.Count) { @@ -383,15 +381,14 @@ protected virtual async Task TestLegacyFilterAsync( Assert.Fail("The test returns all results, and so is unreliable"); } - var results = await fixture.Collection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - top: fixture.TestData.Count, - new() - { - OldFilter = legacyFilter - }); - - var actual = await results.Results.Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + var actual = await fixture.Collection.VectorizedSearchAsync( + new ReadOnlyMemory([1, 2, 3]), + top: fixture.TestData.Count, + new() + { + OldFilter = legacyFilter + }) + .Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); foreach (var (e, a) in expected.Zip(actual, (e, a) => (e, a))) { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs index 7ff11de3c7dd..df30a4c0abb9 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs @@ -29,10 +29,8 @@ public async Task SearchShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the third contains Grapes, so searching for // Grapes should return the third record first. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Grapes"], top: 3); - + var results = await hybridSearch!.HybridSearchAsync(vector, ["Grapes"], top: 3).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Equal(3, results.Count); Assert.Equal(3, results[0].Record.Code); @@ -55,10 +53,9 @@ public async Task SearchWithFilterShouldReturnExpectedResultsAsync() OldFilter = new VectorSearchFilter().EqualTo("Code", 1) }; #pragma warning restore CS0618 // Type or member is obsolete - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, options); + var results = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, options).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Single(results); Assert.Equal(1, results[0].Record.Code); @@ -75,10 +72,9 @@ public async Task SearchWithTopShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the second contains Oranges, so the // second should be returned first. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 1); + var results = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 1).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Single(results); Assert.Equal(2, results[0].Record.Code); @@ -95,10 +91,9 @@ public async Task SearchWithSkipShouldReturnExpectedResultsAsync() // Act // All records have the same vector, but the first and third contain healthy, // so when skipping the first two results, we should get the second record. - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["healthy"], top: 3, new() { Skip = 2 }); + var results = await hybridSearch!.HybridSearchAsync(vector, ["healthy"], top: 3, new() { Skip = 2 }).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Single(results); Assert.Equal(2, results[0].Record.Code); @@ -113,10 +108,9 @@ public async Task SearchWithMultipleKeywordsShouldRankMatchedKeywordsHigherAsync var vector = new ReadOnlyMemory([1, 0, 0, 0]); // Act - var searchResult = await hybridSearch!.HybridSearchAsync(vector, ["tangy", "nourishing"], top: 3); + var results = await hybridSearch!.HybridSearchAsync(vector, ["tangy", "nourishing"], top: 3).ToListAsync(); // Assert - var results = await searchResult.Results.ToListAsync(); Assert.Equal(3, results.Count); Assert.True(results[0].Record.Code.Equals(1) || results[0].Record.Code.Equals(2)); @@ -133,17 +127,15 @@ public async Task SearchWithMultiTextRecordSearchesRequestedFieldAsync() var vector = new ReadOnlyMemory([1, 0, 0, 0]); // Act - var searchResult1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], top: 3, new() { AdditionalProperty = r => r.Text2 }); - var searchResult2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, new() { AdditionalProperty = r => r.Text2 }); + var results1 = await hybridSearch!.HybridSearchAsync(vector, ["Apples"], top: 3, new() { AdditionalProperty = r => r.Text2 }).ToListAsync(); + var results2 = await hybridSearch!.HybridSearchAsync(vector, ["Oranges"], top: 3, new() { AdditionalProperty = r => r.Text2 }).ToListAsync(); // Assert - var results1 = await searchResult1.Results.ToListAsync(); Assert.Equal(2, results1.Count); Assert.Equal(2, results1[0].Record.Code); Assert.Equal(1, results1[1].Record.Code); - var results2 = await searchResult2.Results.ToListAsync(); Assert.Equal(2, results2.Count); Assert.Equal(1, results2[0].Record.Code); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index 25ab73a4f8ab..f6b8b60b77c7 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -85,7 +85,7 @@ public virtual async Task WaitForDataAsync( for (var i = 0; i < 20; i++) { - var results = await collection.VectorizedSearchAsync( + var results = collection.VectorizedSearchAsync( new ReadOnlyMemory(vector), top: recordCount, new() @@ -94,7 +94,7 @@ public virtual async Task WaitForDataAsync( // so filtered searches show empty results. Add a filter to the seed data check below. Filter = filter }); - var count = await results.Results.CountAsync(); + var count = await results.CountAsync(); if (count == recordCount) { return; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 9724256d40ee..4d4b430a1698 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -103,26 +103,26 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM { await collection.UpsertAsync(insertedRecords); - var searchResult = await collection.VectorizedSearchAsync(baseVector, top: 3); - var results = await searchResult.Results.ToListAsync(); + var searchResult = collection.VectorizedSearchAsync(baseVector, top: 3); + var results = await searchResult.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: false); - searchResult = await collection.VectorizedSearchAsync(baseVector, top: 3, new() { IncludeVectors = true }); - results = await searchResult.Results.ToListAsync(); + searchResult = collection.VectorizedSearchAsync(baseVector, top: 3, new() { IncludeVectors = true }); + results = await searchResult.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: true); for (int skip = 0; skip <= insertedRecords.Count; skip++) { for (int top = Math.Max(1, skip); top <= insertedRecords.Count; top++) { - searchResult = await collection.VectorizedSearchAsync(baseVector, + searchResult = collection.VectorizedSearchAsync(baseVector, top: top, new() { Skip = skip, IncludeVectors = true }); - results = await searchResult.Results.ToListAsync(); + results = await searchResult.ToListAsync(); VerifySearchResults( expectedRecords.Skip(skip).Take(top).ToArray(), From d1a8b510e637b04d88b562e6372f7422a7c0d65d Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Mon, 14 Apr 2025 12:43:05 +0200 Subject: [PATCH 43/63] .Net: Remove custom mappers (#11530) We obsoleted custom mappers in #11366. However, while work on IEmbeddingGenerator integration (#10492),I realized that for proper embedding generation, we need to generate all embeddings for an UpsertAsync batch in one call, rather than one-by-one (which would be many roundtrips). This means that mapping can't happen record by record, but must work on a batch, breaking the current design of IVectorStoreRecordMapper. As IVectorStoreRecordMapper is getting in the way, and custom mappers is a feature we don't think many people are using, this PR removes support for it (making it obsolete-with-error). --- .../AzureAISearchFactory.cs | 138 ----------- .../QdrantFactory.cs | 143 ------------ .../VectorStore_DataIngestion_CustomMapper.cs | 215 ------------------ .../Memory/VectorStore_Langchain_Interop.cs | 29 --- .../Step6_Use_CustomMapper.cs | 147 ------------ ...ISearchVectorStoreRecordCollectionTests.cs | 98 +------- ...MongoDBVectorStoreRecordCollectionTests.cs | 78 ------- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 85 ------- .../AzureAISearchDynamicDataModelMapper.cs | 2 +- ...zureAISearchVectorStoreRecordCollection.cs | 32 ++- ...earchVectorStoreRecordCollectionOptions.cs | 2 +- ...mosDBMongoDBVectorStoreRecordCollection.cs | 28 +-- ...ngoDBVectorStoreRecordCollectionOptions.cs | 2 +- ...zureCosmosDBNoSQLDynamicDataModelMapper.cs | 4 +- ...osmosDBNoSQLVectorStoreRecordCollection.cs | 26 +-- ...NoSQLVectorStoreRecordCollectionOptions.cs | 2 +- ...ureCosmosDBNoSQLVectorStoreRecordMapper.cs | 4 +- .../ICosmosNoSQLMapper.cs | 24 ++ .../MongoDBVectorStoreRecordCollection.cs | 28 +-- ...ngoDBVectorStoreRecordCollectionOptions.cs | 2 +- .../PineconeVectorStoreRecordCollection.cs | 8 +- ...econeVectorStoreRecordCollectionOptions.cs | 2 +- .../PineconeVectorStoreRecordMapper.cs | 4 +- .../PostgresVectorStoreRecordCollection.cs | 8 +- ...tgresVectorStoreRecordCollectionOptions.cs | 2 +- .../PostgresVectorStoreRecordMapper.cs | 3 - ...drantVectorStoreCollectionSearchMapping.cs | 6 +- .../QdrantVectorStoreRecordCollection.cs | 8 +- ...drantVectorStoreRecordCollectionOptions.cs | 2 +- .../QdrantVectorStoreRecordMapper.cs | 3 - .../IRedisJsonMapper.cs | 24 ++ ...RedisHashSetVectorStoreRecordCollection.cs | 8 +- ...shSetVectorStoreRecordCollectionOptions.cs | 2 +- .../RedisHashSetVectorStoreRecordMapper.cs | 3 - .../RedisJsonDynamicDataModelMapper.cs | 5 +- .../RedisJsonVectorStoreRecordCollection.cs | 23 +- ...sJsonVectorStoreRecordCollectionOptions.cs | 2 +- .../RedisJsonVectorStoreRecordMapper.cs | 4 +- .../RecordMapper.cs | 4 +- .../SqlServerVectorStoreRecordCollection.cs | 9 +- ...erverVectorStoreRecordCollectionOptions.cs | 2 +- .../SqliteVectorStoreRecordCollection.cs | 8 +- ...qliteVectorStoreRecordCollectionOptions.cs | 2 +- .../SqliteVectorStoreRecordMapper.cs | 4 +- .../IWeaviateMapper.cs | 24 ++ .../WeaviateDynamicDataModelMapper.cs | 4 +- .../WeaviateVectorStoreRecordCollection.cs | 30 +-- ...viateVectorStoreRecordCollectionOptions.cs | 2 +- .../WeaviateVectorStoreRecordMapper.cs | 2 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 78 ------- ...ineconeVectorStoreRecordCollectionTests.cs | 5 +- ...VectorStoreCollectionSearchMappingTests.cs | 19 +- .../QdrantVectorStoreRecordCollectionTests.cs | 92 +------- ...HashSetVectorStoreRecordCollectionTests.cs | 95 +------- ...disJsonVectorStoreRecordCollectionTests.cs | 80 ------- ...eaviateVectorStoreRecordCollectionTests.cs | 82 ------- .../VectorStorage/IVectorStoreRecordMapper.cs | 2 +- ...ISearchVectorStoreRecordCollectionTests.cs | 25 -- .../QdrantVectorStoreRecordCollectionTests.cs | 24 -- ...HashSetVectorStoreRecordCollectionTests.cs | 29 --- ...disJsonVectorStoreRecordCollectionTests.cs | 29 --- .../Memory/MongoDB/IMongoDBMapper.cs | 22 ++ .../MongoDB/MongoDBDynamicDataModelMapper.cs | 2 +- .../MongoDB/MongoDBVectorStoreRecordMapper.cs | 2 +- .../SqlServerVectorStoreTests.cs | 90 -------- 65 files changed, 187 insertions(+), 1786 deletions(-) delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs delete mode 100644 dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs create mode 100644 dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs deleted file mode 100644 index b76b3ae188cb..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/AzureAISearchFactory.cs +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using Azure.Search.Documents.Indexes; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.AzureAISearch; - -namespace Memory.VectorStoreLangchainInterop; - -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - -/// -/// Contains a factory method that can be used to create an Azure AI Search vector store that is compatible with datasets ingested using Langchain. -/// -/// -/// This class is used with the sample. -/// -public static class AzureAISearchFactory -{ - /// - /// Record definition that matches the storage format used by Langchain for Azure AI Search. - /// - private static readonly VectorStoreRecordDefinition s_recordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("id", typeof(string)), - new VectorStoreRecordDataProperty("content", typeof(string)), - new VectorStoreRecordDataProperty("metadata", typeof(string)), - new VectorStoreRecordVectorProperty("content_vector", typeof(ReadOnlyMemory), 1536) - } - }; - - /// - /// Create a new Azure AI Search-backed that can be used to read data that was ingested using Langchain. - /// - /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. - /// The . - public static IVectorStore CreateQdrantLangchainInteropVectorStore(SearchIndexClient searchIndexClient) - => new AzureAISearchLangchainInteropVectorStore(new AzureAISearchVectorStore(searchIndexClient), searchIndexClient); - - private sealed class AzureAISearchLangchainInteropVectorStore( - IVectorStore innerStore, - SearchIndexClient searchIndexClient) - : IVectorStore - { - private readonly SearchIndexClient _searchIndexClient = searchIndexClient; - - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull - where TRecord : notnull - { - if (typeof(TKey) != typeof(string) || typeof(TRecord) != typeof(LangchainDocument)) - { - throw new NotSupportedException("This VectorStore is only usable with string keys and LangchainDocument record types"); - } - - // Create an Azure AI Search collection. To be compatible with Langchain - // we need to use a custom record definition that matches the - // schema used by Langchain. We also need to use a custom mapper - // since the Langchain schema includes a metadata field that is - // a JSON string containing the source property. Parsing this - // string and extracting the source is not supported by the default mapper. - return (new AzureAISearchVectorStoreRecordCollection( - _searchIndexClient, - name, - new() - { - VectorStoreRecordDefinition = s_recordDefinition, - JsonObjectCustomMapper = new LangchainInteropMapper() as IVectorStoreRecordMapper - }) as IVectorStoreRecordCollection)!; - } - - public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); - - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); - } - - /// - /// Custom mapper to map the metadata string field, since it contains JSON as a string and this is not supported - /// automatically by the built in mapper. - /// - private sealed class LangchainInteropMapper : IVectorStoreRecordMapper, JsonObject> - { - public JsonObject MapFromDataToStorageModel(LangchainDocument dataModel) - { - var storageDocument = new AzureAISearchLangchainDocument() - { - Key = dataModel.Key, - Content = dataModel.Content, - Metadata = $"{{\"source\": \"{dataModel.Source}\"}}", - Embedding = dataModel.Embedding - }; - - return JsonSerializer.SerializeToNode(storageDocument)!.AsObject(); - } - - public LangchainDocument MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) - { - var storageDocument = JsonSerializer.Deserialize(storageModel)!; - var metadataDocument = JsonSerializer.Deserialize(storageDocument.Metadata); - var source = metadataDocument?["source"]?.AsValue()?.ToString(); - - return new LangchainDocument() - { - Key = storageDocument.Key, - Content = storageDocument.Content, - Source = source!, - Embedding = storageDocument.Embedding - }; - } - } - - /// - /// Model class that matches the storage format used by Langchain for Azure AI Search. - /// - private sealed class AzureAISearchLangchainDocument - { - [JsonPropertyName("id")] - public string Key { get; set; } - - [JsonPropertyName("content")] - public string Content { get; set; } - - /// - /// The storage format used by Langchain stores the source information - /// in the metadata field as a JSON string. - /// E.g. {"source": "my-doc"} - /// - [JsonPropertyName("metadata")] - public string Metadata { get; set; } - - [JsonPropertyName("content_vector")] - public ReadOnlyMemory Embedding { get; set; } - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs deleted file mode 100644 index 9a56141ff9c9..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/QdrantFactory.cs +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Qdrant; -using Qdrant.Client; -using Qdrant.Client.Grpc; - -namespace Memory.VectorStoreLangchainInterop; - -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - -/// -/// Contains a factory method that can be used to create a Qdrant vector store that is compatible with datasets ingested using Langchain. -/// -/// -/// This class is used with the sample. -/// -public static class QdrantFactory -{ - /// - /// Record definition that matches the storage format used by Langchain for Qdrant. - /// There is no need to list the data fields, since they have no indexing requirements and Qdrant - /// doesn't require individual fields to be defined on index creation. - /// - private static readonly VectorStoreRecordDefinition s_recordDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 1536) { StoragePropertyName = "embedding" } - } - }; - - /// - /// Create a new Qdrant-backed that can be used to read data that was ingested using Langchain. - /// - /// Qdrant client that can be used to manage the collections and points in a Qdrant store. - /// The . - public static IVectorStore CreateQdrantLangchainInteropVectorStore(QdrantClient qdrantClient) - => new QdrantLangchainInteropVectorStore(new QdrantVectorStore(qdrantClient), qdrantClient); - - private sealed class QdrantLangchainInteropVectorStore( - IVectorStore innerStore, - QdrantClient qdrantClient) - : IVectorStore - { - private readonly QdrantClient _qdrantClient = qdrantClient; - - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull - where TRecord : notnull - { - // Create a Qdrant collection. To be compatible with Langchain - // we need to use a custom record definition that matches the - // schema used by Langchain. We also need to use a custom mapper - // since the Langchain schema includes a metadata field that is - // a struct and this isn't supported by the default mapper. - // Since langchain creates collections without named vector support - // we should set HasNamedVectors to false. - var collection = new QdrantVectorStoreRecordCollection>( - _qdrantClient, - name, - new() - { - HasNamedVectors = false, - VectorStoreRecordDefinition = s_recordDefinition, - PointStructCustomMapper = new LangchainInteropMapper() - }); - - // If the user asked for a guid key, we can return the collection as is. - if (typeof(TKey) == typeof(Guid) && typeof(TRecord) == typeof(LangchainDocument)) - { - return (collection as IVectorStoreRecordCollection)!; - } - -#if DISABLED_FOR_NOW // TODO: See note on MappingVectorStoreRecordCollection - // If the user asked for a string key, we can add a decorator which converts back and forth between string and guid. - // The string that the user provides will still need to contain a valid guid, since the Langchain created collection - // uses guid keys. - // Supporting string keys like this is useful since it means you can work with the collection in the same way as with - // collections from other vector stores that support string keys. - if (typeof(TKey) == typeof(string) && typeof(TRecord) == typeof(LangchainDocument)) - { - var stringKeyCollection = new MappingVectorStoreRecordCollection, LangchainDocument>( - collection, - p => Guid.Parse(p), - i => i.ToString("D"), - p => new LangchainDocument { Key = Guid.Parse(p.Key), Content = p.Content, Source = p.Source, Embedding = p.Embedding }, - i => new LangchainDocument { Key = i.Key.ToString("D"), Content = i.Content, Source = i.Source, Embedding = i.Embedding }); - - return (stringKeyCollection as IVectorStoreRecordCollection)!; - } -#endif - - throw new NotSupportedException("This VectorStore is only usable with Guid keys and LangchainDocument record types or string keys and LangchainDocument record types"); - } - - public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); - - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); - } - - /// - /// A custom mapper that is required to map the metadata struct. While the other - /// fields in the record can be mapped by the default Qdrant mapper, the default - /// mapper doesn't support complex types like metadata, which is a Qdrant struct - /// containing a source field. - /// - private sealed class LangchainInteropMapper : IVectorStoreRecordMapper, PointStruct> - { - public PointStruct MapFromDataToStorageModel(LangchainDocument dataModel) - { - var metadataStruct = new Struct() - { - Fields = { ["source"] = dataModel.Source } - }; - - var pointStruct = new PointStruct() - { - Id = new PointId() { Uuid = dataModel.Key.ToString("D") }, - Vectors = new Vectors() { Vector = dataModel.Embedding.ToArray() }, - Payload = - { - ["page_content"] = dataModel.Content, - ["metadata"] = new Value() { StructValue = metadataStruct } - }, - }; - - return pointStruct; - } - - public LangchainDocument MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) - { - return new LangchainDocument() - { - Key = new Guid(storageModel.Id.Uuid), - Content = storageModel.Payload["page_content"].StringValue, - Source = storageModel.Payload["metadata"].StructValue.Fields["source"].StringValue, - Embedding = options.IncludeVectors ? storageModel.Vectors.Vector.Data.ToArray() : null - }; - } - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs deleted file mode 100644 index ca652ff7d6e9..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using System.Text.Json.Nodes; -using Azure.Identity; -using Memory.VectorStoreFixtures; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using Microsoft.SemanticKernel.Connectors.Redis; -using Microsoft.SemanticKernel.Embeddings; -using StackExchange.Redis; - -namespace Memory; - -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - -/// -/// An example showing how to ingest data into a vector store using with a custom mapper. -/// In this example, the storage model differs significantly from the data model, so a custom mapper is used to map between the two. -/// A is used to define the schema of the storage model, and this means that the connector -/// will not try and infer the schema from the data model. -/// In storage the data is stored as a JSON object that looks similar to this: -/// -/// { -/// "Term": "API", -/// "Definition": "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data.", -/// "DefinitionEmbedding": [ ... ] -/// } -/// -/// However, the data model is a class with a property for key and two dictionaries for the data (Term and Definition) and vector (DefinitionEmbedding). -/// -/// The example shows the following steps: -/// 1. Create an embedding generator. -/// 2. Create a Redis Vector Store using a custom factory for creating collections. -/// When constructing a collection, the factory injects a custom mapper that maps between the data model and the storage model if required. -/// 3. Ingest some data into the vector store. -/// 4. Read the data back from the vector store. -/// -/// You need a local instance of Docker running, since the associated fixture will try and start a Redis container in the local docker instance to run against. -/// -public class VectorStore_DataIngestion_CustomMapper(ITestOutputHelper output, VectorStoreRedisContainerFixture redisFixture) : BaseTest(output), IClassFixture -{ - /// - /// A record definition for the glossary entries that defines the storage schema of the record. - /// - private static readonly VectorStoreRecordDefinition s_glossaryDefinition = new() - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Term", typeof(string)), - new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536) { DistanceFunction = DistanceFunction.DotProductSimilarity } - } - }; - - [Fact] - public async Task ExampleAsync() - { - // Create an embedding generation service. - var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - new AzureCliCredential()); - - // Initiate the docker container and construct the vector store using the custom factory for creating collections. - await redisFixture.ManualInitializeAsync(); - ConnectionMultiplexer redis = ConnectionMultiplexer.Connect("localhost:6379"); - - var database = redis.GetDatabase(); - var vectorStore = new CustomRedisVectorStore(new RedisVectorStore(database), database); - - // Get and create collection if it doesn't exist, using the record definition containing the storage model. - var collection = vectorStore.GetCollection("skglossary", s_glossaryDefinition); - await collection.CreateCollectionIfNotExistsAsync(); - - // Create glossary entries and generate embeddings for them. - var glossaryEntries = CreateGlossaryEntries().ToList(); - var tasks = glossaryEntries.Select(entry => Task.Run(async () => - { - entry.Vectors["DefinitionEmbedding"] = await textEmbeddingGenerationService.GenerateEmbeddingAsync((string)entry.Data["Definition"]); - })); - await Task.WhenAll(tasks); - - // Upsert the glossary entries into the collection and return their keys. - var upsertedKeysTasks = glossaryEntries.Select(x => collection.UpsertAsync(x)); - var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); - - // Retrieve one of the upserted records from the collection. - var upsertedRecord = await collection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); - - // Write upserted keys and one of the upserted records to the console. - Console.WriteLine($"Upserted keys: {string.Join(", ", upsertedKeys)}"); - Console.WriteLine($"Upserted record: {JsonSerializer.Serialize(upsertedRecord)}"); - } - - /// - /// A custom mapper that maps between the data model and the storage model. - /// - private sealed class Mapper : IVectorStoreRecordMapper - { - public (string Key, JsonNode Node) MapFromDataToStorageModel(GenericDataModel dataModel) - { - var jsonObject = new JsonObject(); - - jsonObject.Add("Term", dataModel.Data["Term"].ToString()); - jsonObject.Add("Definition", dataModel.Data["Definition"].ToString()); - - var vector = (ReadOnlyMemory)dataModel.Vectors["DefinitionEmbedding"]; - var jsonArray = new JsonArray(vector.ToArray().Select(x => JsonValue.Create(x)).ToArray()); - jsonObject.Add("DefinitionEmbedding", jsonArray); - - return (dataModel.Key, jsonObject); - } - - public GenericDataModel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) - { - var dataModel = new GenericDataModel - { - Key = storageModel.Key, - Data = new Dictionary - { - { "Term", (string)storageModel.Node["Term"]! }, - { "Definition", (string)storageModel.Node["Definition"]! } - }, - Vectors = new Dictionary - { - { "DefinitionEmbedding", new ReadOnlyMemory(storageModel.Node["DefinitionEmbedding"]!.AsArray().Select(x => (float)x!).ToArray()) } - } - }; - - return dataModel; - } - } - - private sealed class CustomRedisVectorStore( - IVectorStore innerStore, - IDatabase database) - : IVectorStore - { - private readonly IDatabase _database = database; - - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull - where TRecord : notnull - { - // If the record definition is the glossary definition and the record type is the generic data model, inject the custom mapper into the collection options. - if (vectorStoreRecordDefinition == s_glossaryDefinition && typeof(TRecord) == typeof(GenericDataModel)) - { - var customCollection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, JsonNodeCustomMapper = new Mapper() }) as IVectorStoreRecordCollection; - return customCollection!; - } - - // Otherwise, just create a standard collection with the default mapper. - var collection = new RedisJsonVectorStoreRecordCollection(_database, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; - return collection!; - } - - public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); - - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); - } - - /// - /// Sample generic data model class that can store any data. - /// - private sealed class GenericDataModel - { - public string Key { get; set; } - - public Dictionary Data { get; set; } - - public Dictionary Vectors { get; set; } - } - - /// - /// Create some sample glossary entries using the generic data model. - /// - /// A list of sample glossary entries. - private static IEnumerable CreateGlossaryEntries() - { - yield return new GenericDataModel - { - Key = "1", - Data = new() - { - { "Term", "API" }, - { "Definition", "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." } - }, - Vectors = new() - }; - - yield return new GenericDataModel - { - Key = "2", - Data = new() - { - { "Term", "Connectors" }, - { "Definition", "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." } - }, - Vectors = new() - }; - - yield return new GenericDataModel - { - Key = "3", - Data = new() - { - { "Term", "RAG" }, - { "Definition", "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." } - }, - Vectors = new() - }; - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs index 43bfbd4cca07..e72dd1d52a39 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs @@ -1,13 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure; using Azure.Identity; -using Azure.Search.Documents.Indexes; using Memory.VectorStoreLangchainInterop; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Embeddings; -using Qdrant.Client; using StackExchange.Redis; using Sdk = Pinecone; @@ -28,32 +25,6 @@ namespace Memory; /// public class VectorStore_Langchain_Interop(ITestOutputHelper output) : BaseTest(output) { - /// - /// Shows how to read data from an Azure AI Search collection that was created and ingested using Langchain. - /// - [Fact] - public async Task ReadDataFromLangchainAzureAISearchAsync() - { - var searchIndexClient = new SearchIndexClient( - new Uri(TestConfiguration.AzureAISearch.Endpoint), - new AzureKeyCredential(TestConfiguration.AzureAISearch.ApiKey)); - var vectorStore = AzureAISearchFactory.CreateQdrantLangchainInteropVectorStore(searchIndexClient); - await this.ReadDataFromCollectionAsync(vectorStore, "pets"); - } - - /// - /// Shows how to read data from a Qdrant collection that was created and ingested using Langchain. - /// Also adds a converter to expose keys as strings containing GUIDs instead of objects, - /// to match the document schema of the other vector stores. - /// - [Fact] - public async Task ReadDataFromLangchainQdrantAsync() - { - var qdrantClient = new QdrantClient("localhost"); - var vectorStore = QdrantFactory.CreateQdrantLangchainInteropVectorStore(qdrantClient); - await this.ReadDataFromCollectionAsync(vectorStore, "pets"); - } - /// /// Shows how to read data from a Pinecone collection that was created and ingested using Langchain. /// diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs b/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs deleted file mode 100644 index 9600ef839bb1..000000000000 --- a/dotnet/samples/GettingStartedWithVectorStores/Step6_Use_CustomMapper.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using System.Text.Json.Nodes; -using Azure; -using Azure.Search.Documents.Indexes; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.AzureAISearch; -using Microsoft.SemanticKernel.Embeddings; - -namespace GettingStartedWithVectorStores; - -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - -/// -/// Example that shows how you can use custom mappers if you wish the data model and storage schema to differ. -/// -public class Step6_Use_CustomMapper(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture -{ - /// - /// Example showing how to upsert and query records when using a custom mapper if you wish - /// the data model and storage schema to differ. - /// - /// This example requires an Azure AI Search service to be available. - /// - [Fact] - public async Task UseCustomMapperAsync() - { - // When using a custom mapper, we still have to describe the storage schema to the vector store - // using a record definition. Since the storage schema does not match the data model - // it won't make sense for the vector store to infer the schema from the data model. - var recordDefinition = new VectorStoreRecordDefinition - { - Properties = new List - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Category", typeof(string)), - new VectorStoreRecordDataProperty("Term", typeof(string)), - new VectorStoreRecordDataProperty("Definition", typeof(string)), - new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory), 1536) - } - }; - - // Construct an Azure AI Search vector store collection and - // pass in the custom mapper and record definition. - var collection = new AzureAISearchVectorStoreRecordCollection( - new SearchIndexClient( - new Uri(TestConfiguration.AzureAISearch.Endpoint), - new AzureKeyCredential(TestConfiguration.AzureAISearch.ApiKey)), - "skglossary", - new() - { - JsonObjectCustomMapper = new CustomMapper(), - VectorStoreRecordDefinition = recordDefinition - }); - - // Create the collection if it doesn't exist. - // This call will use the schena defined by the record definition - // above for creating the collection. - await collection.CreateCollectionIfNotExistsAsync(); - - // Now we can upsert a record using - // the data model, even though it doesn't match the storage schema. - var definition = "A set of rules and protocols that allows one software application to interact with another."; - await collection.UpsertAsync(new ComplexGlossary - { - Key = "1", - Metadata = new Metadata - { - Category = "API", - Term = "Application Programming Interface" - }, - Definition = definition, - DefinitionEmbedding = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(definition) - }); - - // Generate an embedding from the search string. - var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync("How do two software applications interact with another?"); - - // Search the vector store. - var searchResultItem = await collection.VectorizedSearchAsync( - searchVector, - top: 1).FirstAsync(); - - // Write the search result with its score to the console. - Console.WriteLine(searchResultItem.Record.Metadata.Term); - Console.WriteLine(searchResultItem.Record.Definition); - Console.WriteLine(searchResultItem.Score); - } - - /// - /// Sample mapper class that maps between the custom data model - /// and the that should match the storage schema. - /// - private sealed class CustomMapper : IVectorStoreRecordMapper - { - public JsonObject MapFromDataToStorageModel(ComplexGlossary dataModel) - { - return new JsonObject - { - ["Key"] = dataModel.Key, - ["Category"] = dataModel.Metadata.Category, - ["Term"] = dataModel.Metadata.Term, - ["Definition"] = dataModel.Definition, - ["DefinitionEmbedding"] = JsonSerializer.SerializeToNode(dataModel.DefinitionEmbedding.ToArray()) - }; - } - - public ComplexGlossary MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) - { - return new ComplexGlossary - { - Key = storageModel["Key"]!.ToString(), - Metadata = new Metadata - { - Category = storageModel["Category"]!.ToString(), - Term = storageModel["Term"]!.ToString() - }, - Definition = storageModel["Definition"]!.ToString(), - DefinitionEmbedding = JsonSerializer.Deserialize>(storageModel["DefinitionEmbedding"]) - }; - } - } - - /// - /// Sample model class that represents a glossary entry. - /// This model differs from the model used in previous steps by having a complex property - /// that contains the category and term. - /// - private sealed class ComplexGlossary - { - public string Key { get; set; } - - public Metadata Metadata { get; set; } - - public string Definition { get; set; } - - public ReadOnlyMemory DefinitionEmbedding { get; set; } - } - - private sealed class Metadata - { - public string Category { get; set; } - - public string Term { get; set; } - } -} diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index e6eea1cffd7b..d5fb1dbd585c 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Linq; using System.Text.Json; -using System.Text.Json.Nodes; using System.Text.Json.Serialization; using System.Threading; using System.Threading.Tasks; @@ -270,49 +269,6 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) Assert.Equal(TestRecordKey2, actual[1].Key); } - [Fact] - public async Task CanGetRecordWithCustomMapperAsync() - { - // Arrange. - var storageObject = JsonSerializer.SerializeToNode(CreateModel(TestRecordKey1, true))!.AsObject(); - - // Arrange GetDocumentAsync mock returning JsonObject. - this._searchClientMock.Setup( - x => x.GetDocumentAsync( - TestRecordKey1, - It.Is(x => !x.SelectedFields.Any()), - this._testCancellationToken)) - .ReturnsAsync(Response.FromValue(storageObject, Mock.Of())); - - // Arrange mapper mock from JsonObject to data model. - var mapperMock = new Mock>(MockBehavior.Strict); - mapperMock.Setup( - x => x.MapFromStorageToDataModel( - storageObject, - It.Is(x => x.IncludeVectors))) - .Returns(CreateModel(TestRecordKey1, true)); - - // Arrange target with custom mapper. - var sut = new AzureAISearchVectorStoreRecordCollection( - this._searchIndexClientMock.Object, - TestCollectionName, - new() - { - JsonObjectCustomMapper = mapperMock.Object - }); - - // Act. - var actual = await sut.GetAsync(TestRecordKey1, new() { IncludeVectors = true }, this._testCancellationToken); - - // Assert. - Assert.NotNull(actual); - Assert.Equal(TestRecordKey1, actual.Key); - Assert.Equal("data 1", actual.Data1); - Assert.Equal("data 2", actual.Data2); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector1!.Value.ToArray()); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector2!.Value.ToArray()); - } - [Theory] [InlineData(true)] [InlineData(false)] @@ -474,58 +430,6 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) Times.Once); } - [Fact] - public async Task CanUpsertRecordWithCustomMapperAsync() - { - // Arrange. -#pragma warning disable Moq1002 // Moq: No matching constructor - var indexingResult = new Mock(MockBehavior.Strict, TestRecordKey1, true, 200); - var indexingResults = new List(); - indexingResults.Add(indexingResult.Object); - var indexDocumentsResultMock = new Mock(MockBehavior.Strict, indexingResults); -#pragma warning restore Moq1002 // Moq: No matching constructor - - var model = CreateModel(TestRecordKey1, true); - var storageObject = JsonSerializer.SerializeToNode(model)!.AsObject(); - - // Arrange UploadDocumentsAsync mock returning upsert result. - this._searchClientMock.Setup( - x => x.UploadDocumentsAsync( - It.IsAny>(), - It.IsAny(), - this._testCancellationToken)) - .ReturnsAsync((IEnumerable documents, IndexDocumentsOptions options, CancellationToken cancellationToken) => - { - // Need to force a materialization of the documents enumerable here, otherwise the mapper (and therefore its mock) doesn't get invoked. - var materializedDocuments = documents.ToList(); - return Response.FromValue(indexDocumentsResultMock.Object, Mock.Of()); - }); - - // Arrange mapper mock from data model to JsonObject. - var mapperMock = new Mock>(MockBehavior.Strict); - mapperMock - .Setup(x => x.MapFromDataToStorageModel(It.IsAny())) - .Returns(storageObject); - - // Arrange target with custom mapper. - var sut = new AzureAISearchVectorStoreRecordCollection( - this._searchIndexClientMock.Object, - TestCollectionName, - new() - { - JsonObjectCustomMapper = mapperMock.Object - }); - - // Act. - await sut.UpsertAsync(model, this._testCancellationToken); - - // Assert. - mapperMock - .Verify( - x => x.MapFromDataToStorageModel(It.Is(x => x.Key == TestRecordKey1)), - Times.Once); - } - /// /// Tests that the collection can be created even if the definition and the type do not match. /// In this case, the expectation is that a custom mapper will be provided to map between the @@ -549,7 +453,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() var sut = new AzureAISearchVectorStoreRecordCollection( this._searchIndexClientMock.Object, TestCollectionName, - new() { VectorStoreRecordDefinition = definition, JsonObjectCustomMapper = Mock.Of>() }); + new() { VectorStoreRecordDefinition = definition }); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 3741730ca726..4ad78d8a86f2 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -489,84 +489,6 @@ await this.TestUpsertWithModelAsync( expectedPropertyName: "bson_hotel_name"); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task UpsertWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - var hotel = new AzureCosmosDBMongoDBHotelModel("key") { HotelName = "Test Name" }; - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) - .Returns(new BsonDocument { ["_id"] = "key", ["my_name"] = "Test Name" }); - - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( - this._mockMongoDatabase.Object, - "collection", - new() { BsonDocumentCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.UpsertAsync(hotel); - - // Assert - Assert.Equal("key", result); - - this._mockMongoCollection.Verify(l => l.ReplaceOneAsync( - It.IsAny>(), - It.Is(document => - document["_id"] == "key" && - document["my_name"] == "Test Name"), - It.IsAny(), - It.IsAny()), Times.Once()); - } - - [Fact] - public async Task GetWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - const string RecordKey = "key"; - - var document = new BsonDocument { ["_id"] = RecordKey, ["my_name"] = "Test Name" }; - - var mockCursor = new Mock>(); - mockCursor - .Setup(l => l.MoveNextAsync(It.IsAny())) - .ReturnsAsync(true); - - mockCursor - .Setup(l => l.Current) - .Returns([document]); - - this._mockMongoCollection - .Setup(l => l.FindAsync( - It.IsAny>(), - It.IsAny>(), - It.IsAny())) - .ReturnsAsync(mockCursor.Object); - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) - .Returns(new AzureCosmosDBMongoDBHotelModel(RecordKey) { HotelName = "Name from mapper" }); - - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( - this._mockMongoDatabase.Object, - "collection", - new() { BsonDocumentCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.GetAsync(RecordKey); - - // Assert - Assert.NotNull(result); - Assert.Equal(RecordKey, result.HotelId); - Assert.Equal("Name from mapper", result.HotelName); - } -#pragma warning restore CS0618 - [Theory] [MemberData(nameof(VectorizedSearchVectorTypeData))] public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 34f4dcd429d4..48cc293f8da1 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -480,91 +480,6 @@ public async Task UpsertBatchReturnsRecordKeysAsync() Assert.Equal("key3", results[2]); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task UpsertWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - var hotel = new AzureCosmosDBNoSQLHotel("key") { HotelName = "Test Name" }; - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) - .Returns(new JsonObject { ["id"] = "key", ["my_name"] = "Test Name" }); - - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( - this._mockDatabase.Object, - "collection", - new() { JsonObjectCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.UpsertAsync(hotel); - - // Assert - Assert.Equal("key", result); - - this._mockContainer.Verify(l => l.UpsertItemAsync( - It.Is(node => - node["id"]!.ToString() == "key" && - node["my_name"]!.ToString() == "Test Name"), - new PartitionKey("key"), - It.IsAny(), - It.IsAny()), - Times.Once()); - } - - [Fact] - public async Task GetWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - const string RecordKey = "key"; - - var jsonObject = new JsonObject { ["id"] = RecordKey, ["HotelName"] = "Test Name" }; - - var mockFeedResponse = new Mock>(); - mockFeedResponse - .Setup(l => l.Resource) - .Returns([jsonObject]); - - var mockFeedIterator = new Mock>(); - mockFeedIterator - .SetupSequence(l => l.HasMoreResults) - .Returns(true) - .Returns(false); - - mockFeedIterator - .Setup(l => l.ReadNextAsync(It.IsAny())) - .ReturnsAsync(mockFeedResponse.Object); - - this._mockContainer - .Setup(l => l.GetItemQueryIterator( - It.IsAny(), - It.IsAny(), - It.IsAny())) - .Returns(mockFeedIterator.Object); - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) - .Returns(new AzureCosmosDBNoSQLHotel(RecordKey) { HotelName = "Name from mapper" }); - - var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( - this._mockDatabase.Object, - "collection", - new() { JsonObjectCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.GetAsync(RecordKey); - - // Assert - Assert.NotNull(result); - Assert.Equal(RecordKey, result.HotelId); - Assert.Equal("Name from mapper", result.HotelName); - } -#pragma warning restore CS0618 - [Fact] public async Task VectorizedSearchReturnsValidRecordAsync() { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs index 20a9eb00243e..dec24ef30c0c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure AI Search. /// #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class AzureAISearchDynamicDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, JsonObject> +internal sealed class AzureAISearchDynamicDataModelMapper(VectorStoreRecordModel model) #pragma warning restore CS0618 { /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index df9fc6e49ecf..60a5e26e320f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -55,9 +55,7 @@ public sealed class AzureAISearchVectorStoreRecordCollection : private readonly AzureAISearchVectorStoreRecordCollectionOptions _options; /// A mapper to use for converting between the data model and the Azure AI Search record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper? _mapper; -#pragma warning restore CS0618 + private readonly AzureAISearchDynamicDataModelMapper? _dynamicMapper; /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -90,18 +88,12 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli this._model = new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.JsonSerializerOptions); -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete // Resolve mapper. - // First, if someone has provided a custom mapper, use that. // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. // Otherwise, don't set the mapper, and we'll default to just using Azure AI Search's built in json serialization and deserialization. - if (this._options.JsonObjectCustomMapper is not null) - { - this._mapper = this._options.JsonObjectCustomMapper; - } - else if (typeof(TRecord) == typeof(Dictionary)) + if (typeof(TRecord) == typeof(Dictionary)) { - this._mapper = new AzureAISearchDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper; + this._dynamicMapper = new AzureAISearchDynamicDataModelMapper(this._model); } this._collectionMetadata = new() @@ -554,8 +546,10 @@ public IAsyncEnumerable> HybridSearchAsync( var stringKey = this.GetStringKey(key); // Use the user provided mapper. - if (this._mapper is not null) + if (this._dynamicMapper is not null) { + Debug.Assert(typeof(TRecord) == typeof(Dictionary)); + var jsonObject = await this.RunOperationAsync( OperationName, () => GetDocumentWithNotFoundHandlingAsync(this._searchClient, stringKey, innerOptions, cancellationToken)).ConfigureAwait(false); @@ -570,7 +564,7 @@ public IAsyncEnumerable> HybridSearchAsync( this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, - () => this._mapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); + () => (TRecord)(object)this._dynamicMapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); } // Use the built in Azure AI Search mapper. @@ -596,8 +590,10 @@ private async IAsyncEnumerable> SearchAndMapToDataMo const string OperationName = "Search"; // Execute search and map using the user provided mapper. - if (this._mapper is not null) + if (this._dynamicMapper is not null) { + Debug.Assert(typeof(TRecord) == typeof(Dictionary)); + var jsonObjectResults = await this.RunOperationAsync( OperationName, () => this._searchClient.SearchAsync(searchText, searchOptions, cancellationToken)).ConfigureAwait(false); @@ -631,14 +627,16 @@ private Task> MapToStorageModelAndUploadDocumentA const string OperationName = "UploadDocuments"; // Use the user provided mapper. - if (this._mapper is not null) + if (this._dynamicMapper is not null) { + Debug.Assert(typeof(TRecord) == typeof(Dictionary)); + var jsonObjects = VectorStoreErrorHandler.RunModelConversion( AzureAISearchConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, - () => records.Select(this._mapper!.MapFromDataToStorageModel)); + () => records.Select(r => this._dynamicMapper!.MapFromDataToStorageModel((Dictionary)(object)r))); return this.RunOperationAsync( OperationName, @@ -667,7 +665,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn this._collectionMetadata.VectorStoreName, this._collectionName, operationName, - () => this._mapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); + () => (TRecord)(object)this._dynamicMapper!.MapFromStorageToDataModel(result.Document, new() { IncludeVectors = includeVectors })); yield return new VectorSearchResult(document, result.Score); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs index be7c668b264b..d396f77c37d2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs @@ -19,7 +19,7 @@ public sealed class AzureAISearchVectorStoreRecordCollectionOptions /// /// If not set, the default mapper that is provided by the Azure AI Search client SDK will be used. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 11743cec19cb..c9cdc5a9c7d5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -52,9 +52,7 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection _options; /// Interface for mapping between a storage model, and the consumer record data model. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly IMongoDBMapper _mapper; /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -88,7 +86,9 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( this.CollectionName = collectionName; this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - this._mapper = this.InitializeMapper(); + this._mapper = typeof(TRecord) == typeof(Dictionary) + ? (new MongoDBDynamicDataModelMapper(this._model) as IMongoDBMapper)! + : new MongoDBVectorStoreRecordMapper(this._model); this._collectionMetadata = new() { @@ -570,26 +570,6 @@ private static Dictionary GetStoragePropertyNames( return storagePropertyNames; } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - /// - /// Returns custom mapper, generic data model mapper or default record mapper. - /// - private IVectorStoreRecordMapper InitializeMapper() - { - if (this._options.BsonDocumentCustomMapper is not null) - { - return this._options.BsonDocumentCustomMapper; - } - - if (typeof(TRecord) == typeof(Dictionary)) - { - return (new MongoDBDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper)!; - } - - return new MongoDBVectorStoreRecordMapper(this._model); - } -#pragma warning restore CS0618 - private string GetStringKey(TKey key) { Verify.NotNull(key); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs index 5a274559ebc9..084bbfd0a72b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs @@ -14,7 +14,7 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions /// Gets or sets an optional custom mapper to use when converting between the data model and the Azure CosmosDB MongoDB BSON object. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? BsonDocumentCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs index 813899e6845c..8bf1f9a6c5c8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs @@ -13,10 +13,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB NoSQL. /// -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class AzureCosmosDBNoSQLDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) - : IVectorStoreRecordMapper, JsonObject> -#pragma warning restore CS0618 + : ICosmosNoSQLMapper> { /// A default for serialization/deserialization of vector properties. private static readonly JsonSerializerOptions s_vectorJsonSerializerOptions = new() diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 526d7edb13e2..fff3d2fd5a56 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -54,9 +54,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection private readonly VectorStoreRecordPropertyModel _partitionKeyProperty; /// The mapper to use when mapping between the consumer data model and the Azure CosmosDB NoSQL record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly ICosmosNoSQLMapper _mapper; /// public string CollectionName { get; } @@ -97,7 +95,9 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, jsonSerializerOptions); // Assign mapper. - this._mapper = this.InitializeMapper(jsonSerializerOptions); + this._mapper = typeof(TRecord) == typeof(Dictionary) + ? (new AzureCosmosDBNoSQLDynamicDataModelMapper(this._model, jsonSerializerOptions) as ICosmosNoSQLMapper)! + : new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model.KeyProperty, this._options.JsonSerializerOptions); // Setup partition key property if (this._options.PartitionKeyPropertyName is not null) @@ -625,24 +625,6 @@ private async IAsyncEnumerable> MapSearchResultsAsyn } } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private IVectorStoreRecordMapper InitializeMapper(JsonSerializerOptions jsonSerializerOptions) - { - if (this._options.JsonObjectCustomMapper is not null) - { - return this._options.JsonObjectCustomMapper; - } - - if (typeof(TRecord) == typeof(Dictionary)) - { - var mapper = new AzureCosmosDBNoSQLDynamicDataModelMapper(this._model, jsonSerializerOptions); - return (mapper as IVectorStoreRecordMapper)!; - } - - return new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model.KeyProperty, this._options.JsonSerializerOptions); - } -#pragma warning restore CS0618 - private static IEnumerable GetCompositeKeys(IEnumerable keys) => keys switch { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs index e04867e68afc..bd0371b3c835 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs @@ -19,7 +19,7 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions /// If not set, the default mapper that is provided by the Azure CosmosDB NoSQL client SDK will be used. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs index 4667d3956b8e..200853d43fea 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs @@ -11,10 +11,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// Class for mapping between a json node stored in Azure CosmosDB NoSQL and the consumer data model. /// /// The consumer data model to map to or from. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper(VectorStoreRecordKeyPropertyModel keyProperty, JsonSerializerOptions? jsonSerializerOptions) - : IVectorStoreRecordMapper -#pragma warning restore CS0618 + : ICosmosNoSQLMapper { private readonly VectorStoreRecordKeyPropertyModel _keyProperty = keyProperty; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs new file mode 100644 index 000000000000..3e8c0d8ce7a8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Nodes; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +internal interface ICosmosNoSQLMapper +{ + /// + /// Maps from the consumer record data model to the storage model. + /// + /// The consumer record data model record to map. + /// The mapped result. + JsonObject MapFromDataToStorageModel(TRecord dataModel); + + /// + /// Maps from the storage model to the consumer record data model. + /// + /// The storage data model record to map. + /// Options to control the mapping behavior. + /// The mapped result. + TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index aba4d40220ae..38244c4292a1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -52,9 +52,7 @@ public sealed class MongoDBVectorStoreRecordCollection : IVectorS private readonly MongoDBVectorStoreRecordCollectionOptions _options; /// Interface for mapping between a storage model, and the consumer record data model. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly IMongoDBMapper _mapper; /// The model for this collection. private readonly VectorStoreRecordModel _model; @@ -88,7 +86,9 @@ public MongoDBVectorStoreRecordCollection( this.CollectionName = collectionName; this._options = options ?? new MongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); - this._mapper = this.InitializeMapper(); + this._mapper = typeof(TRecord) == typeof(Dictionary) + ? (new MongoDBDynamicDataModelMapper(this._model) as IMongoDBMapper)! + : new MongoDBVectorStoreRecordMapper(this._model); this._collectionMetadata = new() { @@ -706,26 +706,6 @@ private async Task RunOperationWithRetryAsync( throw new VectorStoreOperationException("Retry logic failed."); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - /// - /// Returns custom mapper, generic data model mapper or default record mapper. - /// - private IVectorStoreRecordMapper InitializeMapper() - { - if (this._options.BsonDocumentCustomMapper is not null) - { - return this._options.BsonDocumentCustomMapper; - } - - if (typeof(TRecord) == typeof(Dictionary)) - { - return (new MongoDBDynamicDataModelMapper(this._model) as IVectorStoreRecordMapper)!; - } - - return new MongoDBVectorStoreRecordMapper(this._model); - } -#pragma warning restore CS0618 - private static Array VerifyVectorParam(TVector vector) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs index 5b22d07b1557..d4356fc3bc52 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs @@ -14,7 +14,7 @@ public sealed class MongoDBVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the MongoDB BSON object. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? BsonDocumentCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index f95bd14e452b..220b1d0110e9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -34,9 +34,7 @@ public sealed class PineconeVectorStoreRecordCollection : IVector private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordModel _model; -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly PineconeVectorStoreRecordMapper _mapper; private IndexClient? _indexClient; /// @@ -65,9 +63,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - this._mapper = this._options.VectorCustomMapper ?? new PineconeVectorStoreRecordMapper(this._model); -#pragma warning restore CS0618 + this._mapper = new PineconeVectorStoreRecordMapper(this._model); this._collectionMetadata = new() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs index 6c30de4c31a6..fbec059a0064 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs @@ -14,7 +14,7 @@ public sealed class PineconeVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the Pinecone vector. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? VectorCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index ba0ce35e2490..b3e6717ace66 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -11,9 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// Mapper between a Pinecone record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class PineconeVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper -#pragma warning restore CS0618 +internal sealed class PineconeVectorStoreRecordMapper(VectorStoreRecordModel model) { /// public Vector MapFromDataToStorageModel(TRecord dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 44fcb40fa3d1..aab25cfe6d1f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -39,9 +39,7 @@ public sealed class PostgresVectorStoreRecordCollection : IVector private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between the data model and the Azure AI Search record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper> _mapper; -#pragma warning restore CS0618 + private readonly PostgresVectorStoreRecordMapper _mapper; /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -80,9 +78,7 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client this._model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions) .Build(typeof(TRecord), options?.VectorStoreRecordDefinition); -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - this._mapper = this._options.DictionaryCustomMapper ?? new PostgresVectorStoreRecordMapper(this._model); -#pragma warning restore CS0618 + this._mapper = new PostgresVectorStoreRecordMapper(this._model); this._collectionMetadata = new() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs index 00a9a5624380..0f2595e76c02 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs @@ -22,7 +22,7 @@ public sealed class PostgresVectorStoreRecordCollectionOptions /// /// If not set, the default mapper will be used. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper>? DictionaryCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index 13b73ce87489..78e2c2d48c75 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -13,10 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// A mapper class that handles the conversion between data models and storage models for Postgres vector store. /// /// The type of the data model record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class PostgresVectorStoreRecordMapper(VectorStoreRecordModel model) - : IVectorStoreRecordMapper> -#pragma warning restore CS0618 { public Dictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index eccb2d4e89fd..a3f67fb93a5f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -99,16 +99,14 @@ public static Filter BuildFromLegacyFilter(VectorSearchFilter basicVectorSearchF /// The name of the collection the operation is being run on. /// The type of database operation being run. /// The mapped . -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete public static VectorSearchResult MapScoredPointToVectorSearchResult( ScoredPoint point, - IVectorStoreRecordMapper mapper, + QdrantVectorStoreRecordMapper mapper, bool includeVectors, string vectorStoreSystemName, string? vectorStoreName, string collectionName, string operationName) -#pragma warning restore CS0618 { // Since the mapper doesn't know about scored points, we need to convert the scored point to a point struct first. var pointStruct = new PointStruct @@ -136,7 +134,7 @@ public static VectorSearchResult MapScoredPointToVectorSearchResult(RetrievedPoint point, - IVectorStoreRecordMapper mapper, + QdrantVectorStoreRecordMapper mapper, bool includeVectors, string vectorStoreSystemName, string? vectorStoreName, diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index f0ab07038231..91aeb02b170d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -56,9 +56,7 @@ public sealed class QdrantVectorStoreRecordCollection : IVectorSt private readonly VectorStoreRecordModel _model; /// A mapper to use for converting between qdrant point and consumer models. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly QdrantVectorStoreRecordMapper _mapper; /// /// Initializes a new instance of the class. @@ -100,9 +98,7 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st this._model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(this._options.HasNamedVectors)) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - this._mapper = this._options.PointStructCustomMapper ?? new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); -#pragma warning restore CS0618 + this._mapper = new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); this._collectionMetadata = new() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs index 6daeafc364de..c5c1df41e64c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs @@ -23,7 +23,7 @@ public sealed class QdrantVectorStoreRecordCollectionOptions /// /// If not set, a default mapper that uses json as an intermediary to allow automatic mapping to a wide variety of types will be used. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? PointStructCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index da07c10b3481..322b4d869c93 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -13,10 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// Mapper between a Qdrant record and the consumer data model that uses json as an intermediary to allow supporting a wide range of models. /// /// The consumer data model to map to or from. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class QdrantVectorStoreRecordMapper(VectorStoreRecordModel model, bool hasNamedVectors) - : IVectorStoreRecordMapper -#pragma warning restore CS0618 { /// public PointStruct MapFromDataToStorageModel(TRecord dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs new file mode 100644 index 000000000000..d597fadd652d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Nodes; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +internal interface IRedisJsonMapper +{ + /// + /// Maps from the consumer record data model to the storage model. + /// + /// The consumer record data model record to map. + /// The mapped result. + (string Key, JsonNode Node) MapFromDataToStorageModel(TRecord dataModel); + + /// + /// Maps from the storage model to the consumer record data model. + /// + /// The storage data model record to map. + /// Options to control the mapping behavior. + /// The mapped result. + TRecord MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 6d2138d3fb83..219a865cc12c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -87,9 +87,7 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVe private readonly string[] _dataStoragePropertyNamesWithScore; /// The mapper to use when mapping between the consumer data model and the Redis record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly RedisHashSetVectorStoreRecordMapper _mapper; /// /// Initializes a new instance of the class. @@ -120,9 +118,7 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec this._dataStoragePropertyNamesWithScore = [.. this._model.DataProperties.Select(p => p.StorageName), "vector_score"]; // Assign Mapper. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - this._mapper = this._options.HashEntriesCustomMapper ?? new RedisHashSetVectorStoreRecordMapper(this._model); -#pragma warning restore CS0618 + this._mapper = new RedisHashSetVectorStoreRecordMapper(this._model); this._collectionMetadata = new() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs index 121262f92c5c..f88e959d2ea7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs @@ -24,7 +24,7 @@ public sealed class RedisHashSetVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the Redis record. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? HashEntriesCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs index 88f85b64365c..4413b296dc6a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs @@ -14,10 +14,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// Class for mapping between a hashset stored in redis, and the consumer data model. /// /// The consumer data model to map to or from. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class RedisHashSetVectorStoreRecordMapper(VectorStoreRecordModel model) - : IVectorStoreRecordMapper -#pragma warning restore CS0618 { /// public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs index 2d567df2059c..58cff019fa28 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs @@ -11,10 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using JSON. /// -internal class RedisJsonDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - : IVectorStoreRecordMapper, (string Key, JsonNode Node)> -#pragma warning restore CS0618 +internal class RedisJsonDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) : IRedisJsonMapper> { /// public (string Key, JsonNode Node) MapFromDataToStorageModel(Dictionary dataModel) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 04689522f7cb..097095953866 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -75,9 +75,7 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVecto private readonly string[] _dataStoragePropertyNames; /// The mapper to use when mapping between the consumer data model and the Redis record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly IRedisJsonMapper _mapper; /// The JSON serializer options to use when converting between the data model and the Redis record. private readonly JsonSerializerOptions _jsonSerializerOptions; @@ -111,23 +109,10 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Lookup storage property names. this._dataStoragePropertyNames = this._model.DataProperties.Select(p => p.StorageName).ToArray(); -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete // Assign Mapper. - if (this._options.JsonNodeCustomMapper is not null) - { - // Custom Mapper. - this._mapper = this._options.JsonNodeCustomMapper; - } - else if (typeof(TRecord) == typeof(Dictionary)) - { - this._mapper = (IVectorStoreRecordMapper)new RedisJsonDynamicDataModelMapper(this._model, this._jsonSerializerOptions); - } - else - { - // Default Mapper. - this._mapper = new RedisJsonVectorStoreRecordMapper(this._model, this._jsonSerializerOptions); - } -#pragma warning restore CS0618 + this._mapper = typeof(TRecord) == typeof(Dictionary) + ? (IRedisJsonMapper)new RedisJsonDynamicDataModelMapper(this._model, this._jsonSerializerOptions) + : new RedisJsonVectorStoreRecordMapper(this._model, this._jsonSerializerOptions); this._collectionMetadata = new() { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs index 5363edb4851e..eea8cc025988 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs @@ -28,7 +28,7 @@ public sealed class RedisJsonVectorStoreRecordCollectionOptions /// /// If not set, the default built in mapper will be used, which uses record attrigutes or the provided to map the record. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? JsonNodeCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs index 64a0ce9e5b76..d0bde9db3fc1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs @@ -15,9 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal sealed class RedisJsonVectorStoreRecordMapper( VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - : IVectorStoreRecordMapper -#pragma warning restore CS0618 + : IRedisJsonMapper { /// The key property. private readonly string _keyPropertyStorageName = model.KeyProperty.StorageName; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs index 963d5f266128..d964681e18ab 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs @@ -7,9 +7,7 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class RecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> -#pragma warning restore CS0618 +internal sealed class RecordMapper(VectorStoreRecordModel model) { public IDictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 222ccf6083c7..9036cddd2575 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -32,9 +32,7 @@ public sealed class SqlServerVectorStoreRecordCollection private readonly string _connectionString; private readonly SqlServerVectorStoreRecordCollectionOptions _options; private readonly VectorStoreRecordModel _model; -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper> _mapper; -#pragma warning restore CS0618 + private readonly RecordMapper _mapper; /// /// Initializes a new instance of the class. @@ -57,17 +55,14 @@ public SqlServerVectorStoreRecordCollection( this.CollectionName = name; // We need to create a copy, so any changes made to the option bag after // the ctor call do not affect this instance. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete this._options = options is null ? s_defaultOptions : new() { Schema = options.Schema, - Mapper = options.Mapper, RecordDefinition = options.RecordDefinition, }; - this._mapper = this._options.Mapper ?? new RecordMapper(this._model); -#pragma warning restore CS0618 + this._mapper = new RecordMapper(this._model); var connectionStringBuilder = new SqlConnectionStringBuilder(connectionString); diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs index d8bfd40a2217..26a41f86d9de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs @@ -22,7 +22,7 @@ public sealed class SqlServerVectorStoreRecordCollectionOptions /// /// If not set, the default mapper will be used. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper>? Mapper { get; init; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 1b2ed2ba6171..4006d5c21006 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -36,9 +36,7 @@ public sealed class SqliteVectorStoreRecordCollection : IVectorSt private readonly SqliteVectorStoreRecordCollectionOptions _options; /// The mapper to use when mapping between the consumer data model and the SQLite record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper> _mapper; -#pragma warning restore CS0618 + private readonly SqliteVectorStoreRecordMapper _mapper; /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -138,9 +136,7 @@ public SqliteVectorStoreRecordCollection( throw new UnreachableException(); } } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - this._mapper = this._options.DictionaryCustomMapper ?? new SqliteVectorStoreRecordMapper(this._model); -#pragma warning restore CS0618 + this._mapper = new SqliteVectorStoreRecordMapper(this._model); var connectionStringBuilder = new SqliteConnectionStringBuilder(connectionString); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs index d71c44334051..4735e119f292 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs @@ -14,7 +14,7 @@ public sealed class SqliteVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and the SQLite record. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper>? DictionaryCustomMapper { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs index 8acd276cdb74..a84276ee8c95 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs @@ -11,9 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// Class for mapping between a dictionary and the consumer data model. /// /// The consumer data model to map to or from. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper> -#pragma warning restore CS0618 +internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordModel model) { public Dictionary MapFromDataToStorageModel(TRecord dataModel) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs new file mode 100644 index 000000000000..f70ee026dcd8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Nodes; +using Microsoft.Extensions.VectorData; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal interface IWeaviateMapper +{ + /// + /// Maps from the consumer record data model to the storage model. + /// + /// The consumer record data model record to map. + /// The mapped result. + JsonObject MapFromDataToStorageModel(TRecord dataModel); + + /// + /// Maps from the storage model to the consumer record data model. + /// + /// The storage data model record to map. + /// Options to control the mapping behavior. + /// The mapped result. + TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs index 233d2e0f31c7..fe28c5c62e66 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs @@ -12,9 +12,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Weaviate. /// -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class WeaviateDynamicDataModelMapper : IVectorStoreRecordMapper, JsonObject> -#pragma warning restore CS0618 +internal sealed class WeaviateDynamicDataModelMapper : IWeaviateMapper> { /// The name of the Weaviate collection. private readonly string _collectionName; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index e3fdf3ac2fcf..49a07cad043e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -59,9 +59,7 @@ public sealed class WeaviateVectorStoreRecordCollection : IVector private readonly VectorStoreRecordModel _model; /// The mapper to use when mapping between the consumer data model and the Weaviate record. -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private readonly IVectorStoreRecordMapper _mapper; -#pragma warning restore CS0618 + private readonly IWeaviateMapper _mapper; /// Weaviate endpoint. private readonly Uri _endpoint; @@ -108,7 +106,9 @@ public WeaviateVectorStoreRecordCollection( this._model = new WeaviateModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); // Assign mapper. - this._mapper = this.InitializeMapper(); + this._mapper = typeof(TRecord) == typeof(Dictionary) + ? (new WeaviateDynamicDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions) as IWeaviateMapper)! + : new WeaviateVectorStoreRecordMapper(this.CollectionName, this._model, s_jsonSerializerOptions); this._collectionMetadata = new() { @@ -516,28 +516,6 @@ private async Task RunOperationAsync(string operationName, Func> o } } - /// - /// Returns custom mapper, generic data model mapper or default record mapper. - /// -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private IVectorStoreRecordMapper InitializeMapper() - { - if (this._options.JsonObjectCustomMapper is not null) - { - return this._options.JsonObjectCustomMapper; - } - - if (typeof(TRecord) == typeof(Dictionary)) - { - var mapper = new WeaviateDynamicDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions); - - return (mapper as IVectorStoreRecordMapper)!; - } - - return new WeaviateVectorStoreRecordMapper(this.CollectionName, this._model, s_jsonSerializerOptions); - } -#pragma warning restore CS0618 - private static void VerifyVectorParam(TVector vector) { Verify.NotNull(vector); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs index 6c913120c95b..f758d77499a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -14,7 +14,7 @@ public sealed class WeaviateVectorStoreRecordCollectionOptions /// /// Gets or sets an optional custom mapper to use when converting between the data model and Weaviate record. /// - [Obsolete("Custom mappers are being obsoleted.")] + [Obsolete("Custom mappers are no longer supported.", error: true)] public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs index 93edfdaadd96..ce5a38ebc0e2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -9,7 +9,7 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class WeaviateVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class WeaviateVectorStoreRecordMapper : IWeaviateMapper #pragma warning restore CS0618 { private readonly string _collectionName; diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 3b641e03046f..5daa6af97e23 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -489,84 +489,6 @@ await this.TestUpsertWithModelAsync( expectedPropertyName: "bson_hotel_name"); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task UpsertWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - var hotel = new MongoDBHotelModel("key") { HotelName = "Test Name" }; - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) - .Returns(new BsonDocument { ["_id"] = "key", ["my_name"] = "Test Name" }); - - var sut = new MongoDBVectorStoreRecordCollection( - this._mockMongoDatabase.Object, - "collection", - new() { BsonDocumentCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.UpsertAsync(hotel); - - // Assert - Assert.Equal("key", result); - - this._mockMongoCollection.Verify(l => l.ReplaceOneAsync( - It.IsAny>(), - It.Is(document => - document["_id"] == "key" && - document["my_name"] == "Test Name"), - It.IsAny(), - It.IsAny()), Times.Once()); - } - - [Fact] - public async Task GetWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - const string RecordKey = "key"; - - var document = new BsonDocument { ["_id"] = RecordKey, ["my_name"] = "Test Name" }; - - var mockCursor = new Mock>(); - mockCursor - .Setup(l => l.MoveNextAsync(It.IsAny())) - .ReturnsAsync(true); - - mockCursor - .Setup(l => l.Current) - .Returns([document]); - - this._mockMongoCollection - .Setup(l => l.FindAsync( - It.IsAny>(), - It.IsAny>(), - It.IsAny())) - .ReturnsAsync(mockCursor.Object); - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) - .Returns(new MongoDBHotelModel(RecordKey) { HotelName = "Name from mapper" }); - - var sut = new MongoDBVectorStoreRecordCollection( - this._mockMongoDatabase.Object, - "collection", - new() { BsonDocumentCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.GetAsync(RecordKey); - - // Assert - Assert.NotNull(result); - Assert.Equal(RecordKey, result.HotelId); - Assert.Equal("Name from mapper", result.HotelName); - } -#pragma warning restore CS0618 - [Theory] [MemberData(nameof(VectorizedSearchVectorTypeData))] public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs index 4c8bc30ade07..4def919f657b 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeVectorStoreRecordCollectionTests.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Pinecone; -using Moq; using Xunit; using Sdk = Pinecone; @@ -17,7 +16,6 @@ public class PineconeVectorStoreRecordCollectionTests { private const string TestCollectionName = "testcollection"; -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete /// /// Tests that the collection can be created even if the definition and the type do not match. /// In this case, the expectation is that a custom mapper will be provided to map between the @@ -42,9 +40,8 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() var sut = new PineconeVectorStoreRecordCollection( pineconeClient, TestCollectionName, - new() { VectorStoreRecordDefinition = definition, VectorCustomMapper = Mock.Of>() }); + new() { VectorStoreRecordDefinition = definition }); } -#pragma warning restore CS0618 public sealed class SinglePropsModel { diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index c2a50e978fab..e60c2d31bb1b 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -5,7 +5,6 @@ using System.Linq; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; -using Moq; using Qdrant.Client.Grpc; using Xunit; @@ -116,11 +115,23 @@ public void MapScoredPointToVectorSearchResultMapsResults() Score = 0.5f }; - var mapperMock = new Mock>(MockBehavior.Strict); - mapperMock.Setup(x => x.MapFromStorageToDataModel(It.IsAny(), It.IsAny())).Returns(new DataModel { Id = 1, DataField = "data 1", Embedding = new float[] { 1, 2, 3 } }); + var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: false)) + .Build( + typeof(DataModel), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Id", typeof(ulong)), + new VectorStoreRecordDataProperty("DataField", typeof(string)) { StoragePropertyName = "storage_DataField" }, + new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 10), + ] + }); + + var mapper = new QdrantVectorStoreRecordMapper(model, hasNamedVectors: false); // Act. - var actual = QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult(scoredPoint, mapperMock.Object, true, "Qdrant", "myvectorstore", "mycollection", "query"); + var actual = QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult(scoredPoint, mapper, true, "Qdrant", "myvectorstore", "mycollection", "query"); // Assert. Assert.Equal(1ul, actual.Record.Id); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs index c223a57142e5..e92a7696495c 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs @@ -253,54 +253,6 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, bo Assert.Equal(testRecordKeys[1], actual[1].Key); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task CanGetRecordWithCustomMapperAsync() - { - // Arrange. - var retrievedPoint = CreateRetrievedPoint(true, UlongTestRecordKey1); - this.SetupRetrieveMock([retrievedPoint]); - - // Arrange mapper mock from PointStruct to data model. - var mapperMock = new Mock, PointStruct>>(MockBehavior.Strict); - mapperMock.Setup( - x => x.MapFromStorageToDataModel( - It.IsAny(), - It.IsAny())) - .Returns(CreateModel(UlongTestRecordKey1, true)); - - // Arrange target with custom mapper. - var sut = new QdrantVectorStoreRecordCollection>( - this._qdrantClientMock.Object, - TestCollectionName, - new() - { - HasNamedVectors = true, - PointStructCustomMapper = mapperMock.Object - }); - - // Act - var actual = await sut.GetAsync( - UlongTestRecordKey1, - new() { IncludeVectors = true }, - this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(UlongTestRecordKey1, actual.Key); - Assert.Equal("data 1", actual.OriginalNameData); - Assert.Equal("data 1", actual.Data); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); - - mapperMock - .Verify( - x => x.MapFromStorageToDataModel( - It.Is(x => x.Id.Num == UlongTestRecordKey1), - It.Is(x => x.IncludeVectors)), - Times.Once); - } -#pragma warning restore CS0618 - [Theory] [InlineData(true, true)] [InlineData(true, false)] @@ -481,47 +433,6 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition, bool hasNa Times.Once); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task CanUpsertRecordWithCustomMapperAsync() - { - // Arrange. - this.SetupUpsertMock(); - var pointStruct = new PointStruct - { - Id = new() { Num = UlongTestRecordKey1 }, - Payload = { ["OriginalNameData"] = "data 1", ["data_storage_name"] = "data 1" }, - Vectors = new[] { 1f, 2f, 3f, 4f } - }; - - // Arrange mapper mock from data model to PointStruct. - var mapperMock = new Mock, PointStruct>>(MockBehavior.Strict); - mapperMock - .Setup(x => x.MapFromDataToStorageModel(It.IsAny>())) - .Returns(pointStruct); - - // Arrange target with custom mapper. - var sut = new QdrantVectorStoreRecordCollection>( - this._qdrantClientMock.Object, - TestCollectionName, - new() - { - HasNamedVectors = false, - PointStructCustomMapper = mapperMock.Object - }); - - var model = CreateModel(UlongTestRecordKey1, true); - - // Act - await sut.UpsertAsync(model, this._testCancellationToken); - - // Assert - mapperMock - .Verify( - x => x.MapFromDataToStorageModel(It.Is>(x => x == model)), - Times.Once); - } - /// /// Tests that the collection can be created even if the definition and the type do not match. /// In this case, the expectation is that a custom mapper will be provided to map between the @@ -545,9 +456,8 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() var sut = new QdrantVectorStoreRecordCollection>( this._qdrantClientMock.Object, TestCollectionName, - new() { VectorStoreRecordDefinition = definition, PointStructCustomMapper = Mock.Of, PointStruct>>() }); + new() { VectorStoreRecordDefinition = definition }); } -#pragma warning restore CS0618 #pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index fa7ce7a04235..928c65480143 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -237,57 +237,6 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) Assert.Equal(new float[] { 5, 6, 7, 8 }, actual[1].Vector!.Value.ToArray()); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task CanGetRecordWithCustomMapperAsync() - { - // Arrange. - var hashEntries = new HashEntry[] - { - new("OriginalNameData", "data 1"), - new("data_storage_name", "data 1"), - new("vector_storage_name", MemoryMarshal.AsBytes(new ReadOnlySpan(new float[] { 1, 2, 3, 4 })).ToArray()) - }; - this._redisDatabaseMock.Setup(x => x.HashGetAllAsync(It.IsAny(), CommandFlags.None)).ReturnsAsync(hashEntries); - - // Arrange mapper mock from JsonNode to data model. - var mapperMock = new Mock>(MockBehavior.Strict); - mapperMock.Setup( - x => x.MapFromStorageToDataModel( - It.IsAny<(string key, HashEntry[] hashEntries)>(), - It.IsAny())) - .Returns(CreateModel(TestRecordKey1, true)); - - // Arrange target with custom mapper. - var sut = new RedisHashSetVectorStoreRecordCollection( - this._redisDatabaseMock.Object, - TestCollectionName, - new() - { - HashEntriesCustomMapper = mapperMock.Object - }); - - // Act - var actual = await sut.GetAsync( - TestRecordKey1, - new() { IncludeVectors = true }); - - // Assert - Assert.NotNull(actual); - Assert.Equal(TestRecordKey1, actual.Key); - Assert.Equal("data 1", actual.OriginalNameData); - Assert.Equal("data 1", actual.Data); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); - - mapperMock - .Verify( - x => x.MapFromStorageToDataModel( - It.Is<(string key, HashEntry[] hashEntries)>(x => x.key == TestRecordKey1), - It.Is(x => x.IncludeVectors)), - Times.Once); - } -#pragma warning restore CS0618 - [Theory] [InlineData(true)] [InlineData(false)] @@ -378,48 +327,6 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) Times.Once); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task CanUpsertRecordWithCustomMapperAsync() - { - // Arrange. - this._redisDatabaseMock.Setup(x => x.HashSetAsync(It.IsAny(), It.IsAny(), CommandFlags.None)).Returns(Task.CompletedTask); - - // Arrange mapper mock from data model to JsonNode. - var mapperMock = new Mock>(MockBehavior.Strict); - var hashEntries = new HashEntry[] - { - new("OriginalNameData", "data 1"), - new("data_storage_name", "data 1"), - new("vector_storage_name", "[1,2,3,4]"), - new("NotAnnotated", RedisValue.Null) - }; - mapperMock - .Setup(x => x.MapFromDataToStorageModel(It.IsAny())) - .Returns((TestRecordKey1, hashEntries)); - - // Arrange target with custom mapper. - var sut = new RedisHashSetVectorStoreRecordCollection( - this._redisDatabaseMock.Object, - TestCollectionName, - new() - { - HashEntriesCustomMapper = mapperMock.Object - }); - - var model = CreateModel(TestRecordKey1, true); - - // Act - await sut.UpsertAsync(model); - - // Assert - mapperMock - .Verify( - x => x.MapFromDataToStorageModel(It.Is(x => x == model)), - Times.Once); - } -#pragma warning restore CS0618 - #pragma warning disable CS0618 // VectorSearchFilter is obsolete [Theory] [InlineData(true, true)] @@ -539,7 +446,7 @@ public void CanCreateCollectionWithMismatchedDefinitionAndType() var sut = new RedisHashSetVectorStoreRecordCollection( this._redisDatabaseMock.Object, TestCollectionName, - new() { VectorStoreRecordDefinition = definition, HashEntriesCustomMapper = Mock.Of>() }); + new() { VectorStoreRecordDefinition = definition }); } #pragma warning restore CS0618 diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs index b82841a159c9..9b83f5d13cc1 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordCollectionTests.cs @@ -5,7 +5,6 @@ using System.Linq; using System.Runtime.InteropServices; using System.Text.Json; -using System.Text.Json.Nodes; using System.Text.Json.Serialization; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; @@ -255,51 +254,6 @@ public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition) Assert.Equal(new float[] { 5, 6, 7, 8 }, actual[1].Vector1!.Value.ToArray()); } - [Fact] - public async Task CanGetRecordWithCustomMapperAsync() - { - // Arrange. - var redisResultString = """{ "data1_json_name": "data 1", "Data2": "data 2", "vector1_json_name": [1, 2, 3, 4], "Vector2": [1, 2, 3, 4] }"""; - SetupExecuteMock(this._redisDatabaseMock, redisResultString); - - // Arrange mapper mock from JsonNode to data model. - var mapperMock = new Mock>(MockBehavior.Strict); - mapperMock.Setup( - x => x.MapFromStorageToDataModel( - It.IsAny<(string key, JsonNode node)>(), - It.IsAny())) - .Returns(CreateModel(TestRecordKey1, true)); - - // Arrange target with custom mapper. - var sut = new RedisJsonVectorStoreRecordCollection( - this._redisDatabaseMock.Object, - TestCollectionName, - new() - { - JsonNodeCustomMapper = mapperMock.Object - }); - - // Act - var actual = await sut.GetAsync( - TestRecordKey1, - new() { IncludeVectors = true }); - - // Assert - Assert.NotNull(actual); - Assert.Equal(TestRecordKey1, actual.Key); - Assert.Equal("data 1", actual.Data1); - Assert.Equal("data 2", actual.Data2); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector1!.Value.ToArray()); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector2!.Value.ToArray()); - - mapperMock - .Verify( - x => x.MapFromStorageToDataModel( - It.Is<(string key, JsonNode node)>(x => x.key == TestRecordKey1), - It.Is(x => x.IncludeVectors)), - Times.Once); - } - [Theory] [InlineData(true)] [InlineData(false)] @@ -408,40 +362,6 @@ public async Task CanUpsertManyRecordsAsync(bool useDefinition) Times.Once); } - [Fact] - public async Task CanUpsertRecordWithCustomMapperAsync() - { - // Arrange. - SetupExecuteMock(this._redisDatabaseMock, "OK"); - - // Arrange mapper mock from data model to JsonNode. - var mapperMock = new Mock>(MockBehavior.Strict); - var jsonNode = """{"data1_json_name":"data 1","Data2": "data 2","vector1_json_name":[1,2,3,4],"Vector2":[1,2,3,4],"NotAnnotated":null}"""; - mapperMock - .Setup(x => x.MapFromDataToStorageModel(It.IsAny())) - .Returns((TestRecordKey1, JsonNode.Parse(jsonNode)!)); - - // Arrange target with custom mapper. - var sut = new RedisJsonVectorStoreRecordCollection( - this._redisDatabaseMock.Object, - TestCollectionName, - new() - { - JsonNodeCustomMapper = mapperMock.Object - }); - - var model = CreateModel(TestRecordKey1, true); - - // Act - await sut.UpsertAsync(model); - - // Assert - mapperMock - .Verify( - x => x.MapFromDataToStorageModel(It.Is(x => x == model)), - Times.Once); - } - [Theory] [InlineData(true)] [InlineData(false)] diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index f9f02c2ff716..4dc94227b174 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -10,7 +10,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Weaviate; -using Moq; using Xunit; namespace SemanticKernel.Connectors.Weaviate.UnitTests; @@ -349,87 +348,6 @@ public async Task UpsertReturnsRecordKeysAsync() Assert.Equal("Test Name 2", jsonObject2["properties"]?["hotelName"]?.GetValue()); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [Fact] - public async Task UpsertWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - var id = new Guid("11111111-1111-1111-1111-111111111111"); - var hotel = new WeaviateHotel { HotelId = id, HotelName = "Test Name" }; - - var jsonObject = new JsonObject { ["id"] = id.ToString(), ["properties"] = new JsonObject() }; - - jsonObject["properties"]!["hotel_name"] = "Test Name from Mapper"; - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) - .Returns(jsonObject); - - var batchResponse = new List { new() { Id = id, Result = new() { Status = "Success" } } }; - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(JsonSerializer.Serialize(batchResponse)), - }; - - var sut = new WeaviateVectorStoreRecordCollection( - this._mockHttpClient, - "Collection", - new() { JsonObjectCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.UpsertAsync(hotel); - - // Assert - Assert.Equal(id, result); - - var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); - - Assert.NotNull(request?.CollectionObjects); - - var requestObject = request.CollectionObjects[0]; - - Assert.Equal("11111111-1111-1111-1111-111111111111", requestObject["id"]?.GetValue()); - Assert.Equal("Test Name from Mapper", requestObject["properties"]?["hotel_name"]?.GetValue()); - } - - [Fact] - public async Task GetWithCustomMapperWorksCorrectlyAsync() - { - // Arrange - var id = new Guid("11111111-1111-1111-1111-111111111111"); - var jsonObject = new JsonObject { ["id"] = id.ToString(), ["properties"] = new JsonObject() }; - - jsonObject["properties"]!["hotelName"] = "Test Name"; - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(JsonSerializer.Serialize(jsonObject)) - }; - - var mockMapper = new Mock>(); - - mockMapper - .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) - .Returns(new WeaviateHotel { HotelId = id, HotelName = "Test Name from mapper" }); - - var sut = new WeaviateVectorStoreRecordCollection( - this._mockHttpClient, - "Collection", - new() { JsonObjectCustomMapper = mockMapper.Object }); - - // Act - var result = await sut.GetAsync(id); - - // Assert - Assert.NotNull(result); - Assert.Equal(id, result.HotelId); - Assert.Equal("Test Name from mapper", result.HotelName); - } -#pragma warning restore CS0618 - [Theory] [InlineData(true, "http://test-endpoint/schema", "Bearer fake-key")] [InlineData(false, "http://default-endpoint/schema", null)] diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs index 3af4f5315871..e61cfe4be48a 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordMapper.cs @@ -9,7 +9,7 @@ namespace Microsoft.Extensions.VectorData; /// /// The consumer record data model to map to or from. /// The storage model to map to or from. -[Obsolete("Custom mappers are being obsoleted.")] +[Obsolete("Custom mappers are no longer supported.", error: true)] public interface IVectorStoreRecordMapper { /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index b8c0aca87be4..dc4c7659fe2e 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Text.Json.Nodes; using System.Threading.Tasks; using Azure; using Azure.Search.Documents.Indexes; @@ -324,17 +323,6 @@ public async Task ItThrowsOperationExceptionForFailedAuthenticationAsync() await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); } - [Fact(Skip = SkipReason)] - public async Task ItThrowsMappingExceptionForFailedMapperAsync() - { - // Arrange - var options = new AzureAISearchVectorStoreRecordCollectionOptions { JsonObjectCustomMapper = new FailingMapper() }; - var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); - - // Act & Assert - await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); - } - [Theory(Skip = SkipReason)] [InlineData("equality", true)] [InlineData("tagContains", false)] @@ -461,17 +449,4 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() LastRenovationDate = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), Rating = 3.6 }; - - private sealed class FailingMapper : IVectorStoreRecordMapper - { - public JsonObject MapFromDataToStorageModel(AzureAISearchHotel dataModel) - { - throw new NotImplementedException(); - } - - public AzureAISearchHotel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 3eb7bc1f8752..ccc3df83c9ab 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -366,17 +366,6 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() Assert.Null(await sut.GetAsync(15, new GetRecordOptions { IncludeVectors = true })); } - [Fact] - public async Task ItThrowsMappingExceptionForFailedMapperAsync() - { - // Arrange - var options = new QdrantVectorStoreRecordCollectionOptions { PointStructCustomMapper = new FailingMapper() }; - var sut = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, "singleVectorHotels", options); - - // Act & Assert - await Assert.ThrowsAsync(async () => await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true })); - } - [Theory] [InlineData(true, "singleVectorHotels", false, "equality")] [InlineData(false, "singleVectorHotels", false, "equality")] @@ -488,17 +477,4 @@ private async Task CreateTestHotelAsync(uint hotelId, ITextEmbeddingG DescriptionEmbedding = await embeddingGenerator.GenerateEmbeddingAsync("This is a great hotel."), }; } - - private sealed class FailingMapper : IVectorStoreRecordMapper - { - public PointStruct MapFromDataToStorageModel(HotelInfo dataModel) - { - throw new NotImplementedException(); - } - - public HotelInfo MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index a96e4bf1b9f1..8b64c881b786 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -8,7 +8,6 @@ using Microsoft.SemanticKernel.Connectors.Redis; using NRedisStack.RedisStackCommands; using NRedisStack.Search; -using StackExchange.Redis; using Xunit; using Xunit.Abstractions; @@ -425,21 +424,6 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() Assert.Null(await sut.GetAsync("HBaseSet-5", new GetRecordOptions { IncludeVectors = true })); } - [Fact(Skip = SkipReason)] - public async Task ItThrowsMappingExceptionForFailedMapperAsync() - { - // Arrange - var options = new RedisHashSetVectorStoreRecordCollectionOptions - { - PrefixCollectionNameToKeyNames = true, - HashEntriesCustomMapper = new FailingMapper() - }; - var sut = new RedisHashSetVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); - - // Act & Assert - await Assert.ThrowsAsync(async () => await sut.GetAsync("HBaseSet-1", new GetRecordOptions { IncludeVectors = true })); - } - [Fact(Skip = SkipReason)] public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { @@ -504,17 +488,4 @@ private static RedisBasicFloat32Hotel CreateTestHotel(string hotelId, int hotelC }; return record; } - - private sealed class FailingMapper : IVectorStoreRecordMapper - { - public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(RedisBasicFloat32Hotel dataModel) - { - throw new NotImplementedException(); - } - - public RedisBasicFloat32Hotel MapFromStorageToDataModel((string Key, HashEntry[] HashEntries) storageModel, StorageToDataModelMapperOptions options) - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 4add283ba03f..2bb8bce7c1de 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Text.Json.Nodes; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Redis; @@ -445,21 +444,6 @@ public async Task ItReturnsNullWhenGettingNonExistentRecordAsync() Assert.Null(await sut.GetAsync("BaseSet-5", new GetRecordOptions { IncludeVectors = true })); } - [Fact(Skip = SkipReason)] - public async Task ItThrowsMappingExceptionForFailedMapperAsync() - { - // Arrange - var options = new RedisJsonVectorStoreRecordCollectionOptions - { - PrefixCollectionNameToKeyNames = true, - JsonNodeCustomMapper = new FailingMapper() - }; - var sut = new RedisJsonVectorStoreRecordCollection(fixture.Database, TestCollectionName, options); - - // Act & Assert - await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); - } - [Fact(Skip = SkipReason)] public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { @@ -540,17 +524,4 @@ private static RedisHotel CreateTestHotel(string hotelId, int hotelCode) }; return record; } - - private sealed class FailingMapper : IVectorStoreRecordMapper - { - public (string Key, JsonNode Node) MapFromDataToStorageModel(RedisHotel dataModel) - { - throw new NotImplementedException(); - } - - public RedisHotel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs new file mode 100644 index 000000000000..8f92ed9aaa7d --- /dev/null +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using MongoDB.Bson; + +internal interface IMongoDBMapper +{ + /// + /// Maps from the consumer record data model to the storage model. + /// + /// The consumer record data model record to map. + /// The mapped result. + BsonDocument MapFromDataToStorageModel(TRecord dataModel); + + /// + /// Maps from the storage model to the consumer record data model. + /// + /// The storage data model record to map. + /// Options to control the mapping behavior. + /// The mapped result. + TRecord MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options); +} diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs index 131d7603d2ad..979c1ce9fbda 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs @@ -17,7 +17,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// [ExcludeFromCodeCoverage] #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class MongoDBDynamicDataModelMapper(VectorStoreRecordModel model) : IVectorStoreRecordMapper, BsonDocument> +internal sealed class MongoDBDynamicDataModelMapper(VectorStoreRecordModel model) : IMongoDBMapper> #pragma warning restore CS0618 { /// diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs index 78dd7c931419..0dbf0235e1e7 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs @@ -14,7 +14,7 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; [ExcludeFromCodeCoverage] #pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete -internal sealed class MongoDBVectorStoreRecordMapper : IVectorStoreRecordMapper +internal sealed class MongoDBVectorStoreRecordMapper : IMongoDBMapper #pragma warning restore CS0618 { /// A key property info of the data model. diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index bda0fbac24a2..7928588bb13e 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -2,7 +2,6 @@ using Microsoft.Data.SqlClient; using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.SqlServer; using SqlServerIntegrationTests.Support; using VectorDataSpecificationTests.Xunit; using Xunit; @@ -150,60 +149,6 @@ public async Task WrongModels() } } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - [ConditionalFact] - public async Task CustomMapper() - { - string collectionName = GetUniqueCollectionName(); - TestModelMapper mapper = new(); - SqlServerVectorStoreRecordCollectionOptions options = new() - { - Mapper = mapper - }; - SqlServerVectorStoreRecordCollection collection = new(SqlServerTestEnvironment.ConnectionString!, collectionName, options); - - try - { - await collection.CreateCollectionIfNotExistsAsync(); - - TestModel inserted = new() - { - Id = "MyId", - Number = 100, - Floats = Enumerable.Range(0, 10).Select(i => (float)i).ToArray() - }; - string key = await collection.UpsertAsync(inserted); - Assert.Equal(inserted.Id, key); - Assert.True(mapper.MapFromDataToStorageModel_WasCalled); - Assert.False(mapper.MapFromStorageToDataModel_WasCalled); - - TestModel? received = await collection.GetAsync(inserted.Id, new() { IncludeVectors = true }); - AssertEquality(inserted, received); - Assert.True(mapper.MapFromStorageToDataModel_WasCalled); - - TestModel updated = new() - { - Id = inserted.Id, - Number = inserted.Number + 200, // change one property - Floats = inserted.Floats - }; - key = await collection.UpsertAsync(updated); - Assert.Equal(inserted.Id, key); - - received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); - AssertEquality(updated, received); - - await collection.DeleteAsync(inserted.Id); - - Assert.Null(await collection.GetAsync(inserted.Id)); - } - finally - { - await collection.DeleteCollectionAsync(); - } - } -#pragma warning restore CS0618 - [ConditionalFact] public async Task BatchCRUD() { @@ -468,39 +413,4 @@ public sealed class FancyTestModel [VectorStoreRecordVector(Dimensions: 10, StoragePropertyName = "embedding")] public ReadOnlyMemory Floats { get; set; } } - -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - private sealed class TestModelMapper : IVectorStoreRecordMapper> - { - internal bool MapFromDataToStorageModel_WasCalled { get; set; } - internal bool MapFromStorageToDataModel_WasCalled { get; set; } - - public IDictionary MapFromDataToStorageModel(TestModel dataModel) - { - this.MapFromDataToStorageModel_WasCalled = true; - - return new Dictionary() - { - { "key", dataModel.Id }, - { "text", dataModel.Text }, - { "column", dataModel.Number }, - // Please note that we are not dealing with JSON directly here. - { "embedding", dataModel.Floats } - }; - } - - public TestModel MapFromStorageToDataModel(IDictionary storageModel, StorageToDataModelMapperOptions options) - { - this.MapFromStorageToDataModel_WasCalled = true; - - return new() - { - Id = (string)storageModel["key"]!, - Text = (string?)storageModel["text"], - Number = (int)storageModel["column"]!, - Floats = (ReadOnlyMemory)storageModel["embedding"]! - }; - } - } -#pragma warning restore CS0618 } From 71da48563a23db5df6a6baff75547ea26bab123c Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 16 Apr 2025 09:28:15 +0100 Subject: [PATCH 44/63] .Net: Merge fixes after updating to latest version of qdrant. (#11572) ### Motivation and Context - The latest version of the qdrant SDK has breaking changes. - Since all mappers are custom, there's an opportunity the simplify the mapping logic. ### Description - Fixing issues introduced by merging SDK updates. - Removing mapping to a common point before mapping from data to storage model ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- ...drantVectorStoreCollectionSearchMapping.cs | 55 ++----------------- .../QdrantVectorStoreRecordCollection.cs | 34 +----------- .../QdrantVectorStoreRecordMapper.cs | 15 +++-- .../QdrantVectorStoreRecordMapperTests.cs | 53 ++++++++++-------- .../QdrantVectorStoreRecordCollectionTests.cs | 8 +-- 5 files changed, 48 insertions(+), 117 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs index bf42c870d309..3646127798e9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionSearchMapping.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; -using System.Linq; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; @@ -105,37 +103,6 @@ public static VectorSearchResult MapScoredPointToVectorSearchResult payloadEntry in point.Payload) - { - pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); - } - // Do the mapping with error handling. return new VectorSearchResult( VectorStoreErrorHandler.RunModelConversion( @@ -143,39 +110,25 @@ public static VectorSearchResult MapScoredPointToVectorSearchResult mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })), + () => mapper.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors })), point.Score); } -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete - internal static TRecord MapRetrievedPointToVectorSearchResult(RetrievedPoint point, + internal static TRecord MapRetrievedPointToRecord( + RetrievedPoint point, QdrantVectorStoreRecordMapper mapper, bool includeVectors, string vectorStoreSystemName, string? vectorStoreName, string collectionName, string operationName) -#pragma warning restore CS0618 { - // Since the mapper doesn't know about scored points, we need to convert the scored point to a point struct first. - var pointStruct = new PointStruct - { - Id = point.Id, - Vectors = point.Vectors, - Payload = { } - }; - - foreach (KeyValuePair payloadEntry in point.Payload) - { - pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); - } - // Do the mapping with error handling. return VectorStoreErrorHandler.RunModelConversion( vectorStoreSystemName, vectorStoreName, collectionName, operationName, - () => mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); + () => mapper.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors })); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 9dfc8fd52cd8..99f6eb4403f4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -297,42 +297,12 @@ public async IAsyncEnumerable GetAsync( // Convert the retrieved points to the target data model. foreach (var retrievedPoint in retrievedPoints) { - var pointStruct = new PointStruct - { - Id = retrievedPoint.Id, - Payload = { } - }; - - if (includeVectors) - { - pointStruct.Vectors = new(); - switch (retrievedPoint.Vectors.VectorsOptionsCase) - { - case VectorsOutput.VectorsOptionsOneofCase.Vector: - pointStruct.Vectors.Vector = retrievedPoint.Vectors.Vector.Data.ToArray(); - break; - case VectorsOutput.VectorsOptionsOneofCase.Vectors: - pointStruct.Vectors.Vectors_ = new(); - foreach (var v in retrievedPoint.Vectors.Vectors.Vectors) - { - // TODO: Refactor mapper to not require pre-mapping to pointstruct to avoid this ToArray conversion. - pointStruct.Vectors.Vectors_.Vectors.Add(v.Key, v.Value.Data.ToArray()); - } - break; - } - } - - foreach (KeyValuePair payloadEntry in retrievedPoint.Payload) - { - pointStruct.Payload.Add(payloadEntry.Key, payloadEntry.Value); - } - yield return VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this._collectionName, OperationName, - () => this._mapper.MapFromStorageToDataModel(pointStruct, new() { IncludeVectors = includeVectors })); + () => this._mapper.MapFromStorageToDataModel(retrievedPoint.Id, retrievedPoint.Payload, retrievedPoint.Vectors, new() { IncludeVectors = includeVectors })); } } @@ -595,7 +565,7 @@ public async IAsyncEnumerable GetAsync(Expression> orderBy, cancellationToken: cancellationToken)).ConfigureAwait(false); - var mappedResults = scrollResponse.Result.Skip(options.Skip).Select(point => QdrantVectorStoreCollectionSearchMapping.MapRetrievedPointToVectorSearchResult( + var mappedResults = scrollResponse.Result.Skip(options.Skip).Select(point => QdrantVectorStoreCollectionSearchMapping.MapRetrievedPointToRecord( point, this._mapper, options.IncludeVectors, diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index 322b4d869c93..0fe15b3ea781 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -3,6 +3,7 @@ using System; using System.Diagnostics; using System.Linq; +using Google.Protobuf.Collections; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; @@ -83,15 +84,15 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) } /// - public TRecord MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) + public TRecord MapFromStorageToDataModel(PointId pointId, MapField payload, VectorsOutput vectorsOutput, StorageToDataModelMapperOptions options) { var outputRecord = model.CreateRecord()!; // TODO: Set the following generically to avoid boxing - model.KeyProperty.SetValueAsObject(outputRecord, storageModel.Id switch + model.KeyProperty.SetValueAsObject(outputRecord, pointId switch { - { HasNum: true } => storageModel.Id.Num, - { HasUuid: true } => Guid.Parse(storageModel.Id.Uuid), + { HasNum: true } => pointId.Num, + { HasUuid: true } => Guid.Parse(pointId.Uuid), _ => throw new UnreachableException() }); @@ -100,7 +101,7 @@ public TRecord MapFromStorageToDataModel(PointStruct storageModel, StorageToData { if (hasNamedVectors) { - var storageVectors = storageModel.Vectors.Vectors_.Vectors; + var storageVectors = vectorsOutput.Vectors.Vectors; foreach (var vectorProperty in model.VectorProperties) { @@ -113,12 +114,10 @@ public TRecord MapFromStorageToDataModel(PointStruct storageModel, StorageToData { model.VectorProperty.SetValueAsObject( outputRecord, - new ReadOnlyMemory(storageModel.Vectors.Vector.Data.ToArray())); + new ReadOnlyMemory(vectorsOutput.Vector.Data.ToArray())); } } - var payload = storageModel.Payload; - foreach (var dataProperty in model.DataProperties) { if (payload.TryGetValue(dataProperty.StorageName, out var fieldValue)) diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs index d60965924877..af1f43b6a321 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs @@ -82,7 +82,8 @@ public void MapsSinglePropsFromStorageToDataModelWithUlong(bool hasNamedVectors, var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. - var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(5, hasNamedVectors), new() { IncludeVectors = includeVectors }); + var point = CreateSinglePropsPointStruct(5, hasNamedVectors); + var actual = sut.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors }); // Assert. Assert.NotNull(actual); @@ -113,7 +114,8 @@ public void MapsSinglePropsFromStorageToDataModelWithGuid(bool hasNamedVectors, var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. - var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111"), hasNamedVectors), new() { IncludeVectors = includeVectors }); + var point = CreateSinglePropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111"), hasNamedVectors); + var actual = sut.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors }); // Assert. Assert.NotNull(actual); @@ -199,7 +201,8 @@ public void MapsMultiPropsFromStorageToDataModelWithUlong(bool includeVectors) var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. - var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(5), new() { IncludeVectors = includeVectors }); + var point = CreateMultiPropsPointStruct(5); + var actual = sut.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors }); // Assert. Assert.NotNull(actual); @@ -237,7 +240,8 @@ public void MapsMultiPropsFromStorageToDataModelWithGuid(bool includeVectors) var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. - var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111")), new() { IncludeVectors = includeVectors }); + var point = CreateMultiPropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111")); + var actual = sut.MapFromStorageToDataModel(point.Id, point.Payload, point.Vectors, new() { IncludeVectors = includeVectors }); // Assert. Assert.NotNull(actual); @@ -293,55 +297,57 @@ private static MultiPropsModel CreateMultiPropsModel(TKey key) }; } - private static PointStruct CreateSinglePropsPointStruct(ulong id, bool hasNamedVectors) + private static RetrievedPoint CreateSinglePropsPointStruct(ulong id, bool hasNamedVectors) { - var pointStruct = new PointStruct(); + var pointStruct = new RetrievedPoint(); pointStruct.Id = new PointId() { Num = id }; AddDataToSinglePropsPointStruct(pointStruct, hasNamedVectors); return pointStruct; } - private static PointStruct CreateSinglePropsPointStruct(Guid id, bool hasNamedVectors) + private static RetrievedPoint CreateSinglePropsPointStruct(Guid id, bool hasNamedVectors) { - var pointStruct = new PointStruct(); + var pointStruct = new RetrievedPoint(); pointStruct.Id = new PointId() { Uuid = id.ToString() }; AddDataToSinglePropsPointStruct(pointStruct, hasNamedVectors); return pointStruct; } - private static void AddDataToSinglePropsPointStruct(PointStruct pointStruct, bool hasNamedVectors) + private static void AddDataToSinglePropsPointStruct(RetrievedPoint pointStruct, bool hasNamedVectors) { + var responseVector = VectorOutput.Parser.ParseJson("{ \"data\": [1, 2, 3, 4] }"); + pointStruct.Payload.Add("data", "data value"); if (hasNamedVectors) { - var namedVectors = new NamedVectors(); - namedVectors.Vectors.Add("vector", new[] { 1f, 2f, 3f, 4f }); - pointStruct.Vectors = new Vectors() { Vectors_ = namedVectors }; + var namedVectors = new NamedVectorsOutput(); + namedVectors.Vectors.Add("vector", responseVector); + pointStruct.Vectors = new VectorsOutput() { Vectors = namedVectors }; } else { - pointStruct.Vectors = new[] { 1f, 2f, 3f, 4f }; + pointStruct.Vectors = new VectorsOutput() { Vector = responseVector }; } } - private static PointStruct CreateMultiPropsPointStruct(ulong id) + private static RetrievedPoint CreateMultiPropsPointStruct(ulong id) { - var pointStruct = new PointStruct(); + var pointStruct = new RetrievedPoint(); pointStruct.Id = new PointId() { Num = id }; AddDataToMultiPropsPointStruct(pointStruct); return pointStruct; } - private static PointStruct CreateMultiPropsPointStruct(Guid id) + private static RetrievedPoint CreateMultiPropsPointStruct(Guid id) { - var pointStruct = new PointStruct(); + var pointStruct = new RetrievedPoint(); pointStruct.Id = new PointId() { Uuid = id.ToString() }; AddDataToMultiPropsPointStruct(pointStruct); return pointStruct; } - private static void AddDataToMultiPropsPointStruct(PointStruct pointStruct) + private static void AddDataToMultiPropsPointStruct(RetrievedPoint pointStruct) { pointStruct.Payload.Add("dataString", "data 1"); pointStruct.Payload.Add("dataInt", 5); @@ -358,10 +364,13 @@ private static void AddDataToMultiPropsPointStruct(PointStruct pointStruct) dataIntArray.Values.Add(4); pointStruct.Payload.Add("dataArrayInt", new Value { ListValue = dataIntArray }); - var namedVectors = new NamedVectors(); - namedVectors.Vectors.Add("vector1", new[] { 1f, 2f, 3f, 4f }); - namedVectors.Vectors.Add("vector2", new[] { 5f, 6f, 7f, 8f }); - pointStruct.Vectors = new Vectors() { Vectors_ = namedVectors }; + var responseVector1 = VectorOutput.Parser.ParseJson("{ \"data\": [1, 2, 3, 4] }"); + var responseVector2 = VectorOutput.Parser.ParseJson("{ \"data\": [5, 6, 7, 8] }"); + + var namedVectors = new NamedVectorsOutput(); + namedVectors.Vectors.Add("vector1", responseVector1); + namedVectors.Vectors.Add("vector2", responseVector2); + pointStruct.Vectors = new VectorsOutput() { Vectors = namedVectors }; } private static VectorStoreRecordDefinition CreateSinglePropsVectorStoreRecordDefinition(Type keyType) => new() diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index dcaa7fbeb851..30e8d5bfcd01 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -416,10 +416,10 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() var sut = new QdrantVectorStoreRecordCollection>(fixture.QdrantClient, "singleVectorHotels", options); // Act - var baseSetGetResult = await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true }); + var baseSetGetResult = await sut.GetAsync(11ul, new GetRecordOptions { IncludeVectors = true }); var upsertResult = await sut.UpsertAsync(new Dictionary { - ["HotelId"] = 40, + ["HotelId"] = 40ul, ["HotelName"] = "Dynamic Mapper Hotel", ["HotelCode"] = 40, @@ -430,7 +430,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() ["DescriptionEmbedding"] = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a dynamic mapper hotel") }); - var localGetResult = await sut.GetAsync(40, new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync(40ul, new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(baseSetGetResult); @@ -448,7 +448,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() Assert.NotNull(localGetResult); Assert.Equal(40ul, localGetResult["HotelId"]); - Assert.Equal("Ddynamic Mapper Hotel", localGetResult["HotelName"]); + Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); Assert.Equal(40, localGetResult["HotelCode"]); Assert.False((bool)localGetResult["ParkingIncluded"]!); Assert.Equal(3.6f, localGetResult["HotelRating"]); From 59be52f9aa8600fd97fa45228584243072287d7d Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Wed, 16 Apr 2025 13:40:05 +0200 Subject: [PATCH 45/63] .Net MEVD: fix two bugs (#11585) --- ...zureAISearchVectorStoreRecordCollection.cs | 2 ++ .../Filter/BasicFilterTests.cs | 28 ++++++++++++------- .../Filter/BasicQueryTests.cs | 8 ++++++ 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 60a5e26e320f..4b8ada82e60b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -602,6 +602,8 @@ private async IAsyncEnumerable> SearchAndMapToDataMo { yield return result; } + + yield break; } // Execute search and map using the built in Azure AI Search mapper. diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 39fcf6ddc1c6..925fecbb607d 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -306,6 +306,22 @@ public virtual Task Legacy_AnyTagEqualTo_List() #endregion Legacy filter support + protected virtual async Task> GetRecords( + Expression> filter, int top, ReadOnlyMemory vector) + => await fixture.Collection.VectorizedSearchAsync( + vector, + top: top, + new() { Filter = filter }) + .Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + + protected virtual async Task>> GetDynamicRecords( + Expression, bool>> dynamicFilter, int top, ReadOnlyMemory vector) + => await fixture.DynamicCollection.VectorizedSearchAsync( + vector, + top: top, + new() { Filter = dynamicFilter }) + .Select(r => r.Record).OrderBy(r => r[nameof(FilterRecord.Key)]).ToListAsync(); + protected virtual async Task TestFilterAsync( Expression> filter, Expression, bool>> dynamicFilter, @@ -326,11 +342,7 @@ protected virtual async Task TestFilterAsync( // Execute the query against the vector store, once using the strongly typed filter // and once using the dynamic filter - var actual = await fixture.Collection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - top: fixture.TestData.Count, - new() { Filter = filter }) - .Select(r => r.Record).OrderBy(r => r.Key).ToListAsync(); + var actual = await this.GetRecords(filter, fixture.TestData.Count, new ReadOnlyMemory([1, 2, 3])); if (actual.Count != expected.Count) { @@ -344,11 +356,7 @@ protected virtual async Task TestFilterAsync( if (fixture.TestDynamic) { - var dynamicActual = await fixture.DynamicCollection.VectorizedSearchAsync( - new ReadOnlyMemory([1, 2, 3]), - top: fixture.TestData.Count, - new() { Filter = dynamicFilter }) - .Select(r => r.Record).OrderBy(r => r[nameof(FilterRecord.Key)]).ToListAsync(); + var dynamicActual = await this.GetDynamicRecords(dynamicFilter, fixture.TestData.Count, new ReadOnlyMemory([1, 2, 3])); if (dynamicActual.Count != expected.Count) { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs index 67cea2645f11..b1f942dc076a 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs @@ -1,10 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Linq.Expressions; + namespace VectorDataSpecificationTests.Filter; public abstract class BasicQueryTests(BasicQueryTests.QueryFixture fixture) : BasicFilterTests(fixture) where TKey : notnull { + protected override async Task> GetRecords(Expression> filter, int top, ReadOnlyMemory vector) + => (await fixture.Collection.GetAsync(filter, top).ToListAsync()).OrderBy(r => r.Key).ToList(); + + protected override async Task>> GetDynamicRecords(Expression, bool>> dynamicFilter, int top, ReadOnlyMemory vector) + => (await fixture.DynamicCollection.GetAsync(dynamicFilter, top).ToListAsync()).OrderBy(r => r[nameof(FilterRecord.Key)]!).ToList(); + [Obsolete("Not used by derived types")] public sealed override Task Legacy_And() => Task.CompletedTask; From 4d7c0458a08beb55c4f1fe726ec05f5cc9d350d1 Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Wed, 16 Apr 2025 07:21:08 -0700 Subject: [PATCH 46/63] .Net: [MEVD] Added GetRequiredService and renamed CollectionName (#11573) ### Motivation and Context Resolves: https://github.com/microsoft/semantic-kernel/issues/11408 In this PR: - Renamed `IVectorStoreRecordCollection.CollectionName` property to `Name`. - Added `GetRequiredService` extension method for vector store interfaces. ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- ...extEmbeddingVectorStoreRecordCollection.cs | 2 +- ...zureAISearchVectorStoreRecordCollection.cs | 14 ++--- ...mosDBMongoDBVectorStoreRecordCollection.cs | 38 ++++++------ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 36 +++++------ .../InMemoryVectorStoreRecordCollection.cs | 14 ++--- .../MongoDBVectorStoreRecordCollection.cs | 44 ++++++------- .../PineconeVectorStoreRecordCollection.cs | 38 ++++++------ .../PostgresVectorStoreRecordCollection.cs | 62 +++++++++---------- .../QdrantVectorStoreRecordCollection.cs | 24 +++---- ...RedisHashSetVectorStoreRecordCollection.cs | 12 ++-- .../RedisJsonVectorStoreRecordCollection.cs | 12 ++-- .../SqlServerVectorStoreRecordCollection.cs | 52 ++++++++-------- .../SqliteVectorStoreRecordCollection.cs | 22 +++---- .../WeaviateVectorStoreRecordCollection.cs | 46 +++++++------- .../VectorData.Abstractions/Throw.cs | 14 +++++ .../KeywordHybridSearchExtensions.cs | 33 ++++++++++ .../VectorizableTextSearchExtensions.cs | 33 ++++++++++ .../VectorizedSearchExtensions.cs | 33 ++++++++++ .../IVectorStoreRecordCollection.cs | 2 +- .../VectorStorage/VectorStoreExtensions.cs | 33 ++++++++++ 20 files changed, 355 insertions(+), 209 deletions(-) create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/Throw.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/KeywordHybridSearchExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreExtensions.cs diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs index d3ae50ec7130..eaf346e90020 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs @@ -49,7 +49,7 @@ public TextEmbeddingVectorStoreRecordCollection(IVectorStoreRecordCollection - public string CollectionName => this._decoratedVectorStoreRecordCollection.CollectionName; + public string Name => this._decoratedVectorStoreRecordCollection.Name; /// public Task CollectionExistsAsync(CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 4b8ada82e60b..d1c7eb6728f9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -64,15 +64,15 @@ public sealed class AzureAISearchVectorStoreRecordCollection : /// Initializes a new instance of the class. /// /// Azure AI Search client that can be used to manage the list of indices in an Azure AI Search Service. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown when is null. /// Thrown when options are misconfigured. - public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexClient, string collectionName, AzureAISearchVectorStoreRecordCollectionOptions? options = default) + public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexClient, string name, AzureAISearchVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(searchIndexClient); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -81,9 +81,9 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli // Assign. this._searchIndexClient = searchIndexClient; - this._collectionName = collectionName; + this._collectionName = name; this._options = options ?? new AzureAISearchVectorStoreRecordCollectionOptions(); - this._searchClient = this._searchIndexClient.GetSearchClient(collectionName); + this._searchClient = this._searchIndexClient.GetSearchClient(name); this._model = new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.JsonSerializerOptions); @@ -100,12 +100,12 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli { VectorStoreSystemName = AzureAISearchConstants.VectorStoreSystemName, VectorStoreName = searchIndexClient.ServiceName, - CollectionName = collectionName + CollectionName = name }; } /// - public string CollectionName => this._collectionName; + public string Name => this._collectionName; /// public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index c9cdc5a9c7d5..77ab871771c9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -58,22 +58,22 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. /// /// that can be used to manage the collections in Azure CosmosDB MongoDB. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public AzureCosmosDBMongoDBVectorStoreRecordCollection( IMongoDatabase mongoDatabase, - string collectionName, + string name, AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(mongoDatabase); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -82,8 +82,8 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( // Assign. this._mongoDatabase = mongoDatabase; - this._mongoCollection = mongoDatabase.GetCollection(collectionName); - this.CollectionName = collectionName; + this._mongoCollection = mongoDatabase.GetCollection(name); + this.Name = name; this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = typeof(TRecord) == typeof(Dictionary) @@ -94,7 +94,7 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( { VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName, - CollectionName = collectionName + CollectionName = name }; } @@ -113,7 +113,7 @@ public async Task CreateCollectionAsync(CancellationToken cancellationToken = de { VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "CreateCollection" }; } @@ -125,10 +125,10 @@ public async Task CreateCollectionAsync(CancellationToken cancellationToken = de public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) { await this.RunOperationAsync("CreateCollection", - () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); + () => this._mongoDatabase.CreateCollectionAsync(this.Name, cancellationToken: cancellationToken)).ConfigureAwait(false); await this.RunOperationAsync("CreateIndexes", - () => this.CreateIndexesAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); + () => this.CreateIndexesAsync(this.Name, cancellationToken: cancellationToken)).ConfigureAwait(false); } /// @@ -153,7 +153,7 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan /// public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); + => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.Name, cancellationToken)); /// public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) @@ -181,7 +181,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) return VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); } @@ -211,7 +211,7 @@ public async IAsyncEnumerable GetAsync( yield return VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); } @@ -230,7 +230,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke var storageModel = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -391,7 +391,7 @@ public async IAsyncEnumerable GetAsync(Expression> var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "GetAsync", () => this._mapper.MapFromStorageToDataModel(response, new() { IncludeVectors = options.IncludeVectors })); @@ -475,7 +475,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new())); @@ -495,7 +495,7 @@ private FilterDefinition GetFilterByIds(IEnumerable ids) private async Task InternalCollectionExistsAsync(CancellationToken cancellationToken) { - var filter = new BsonDocument("name", this.CollectionName); + var filter = new BsonDocument("name", this.Name); var options = new ListCollectionNamesOptions { Filter = filter }; using var cursor = await this._mongoDatabase.ListCollectionNamesAsync(options, cancellationToken: cancellationToken).ConfigureAwait(false); @@ -515,7 +515,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -533,7 +533,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = AzureCosmosDBMongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index fff3d2fd5a56..b61112ff5439 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -57,22 +57,22 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection private readonly ICosmosNoSQLMapper _mapper; /// - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. /// /// that can be used to manage the collections in Azure CosmosDB NoSQL. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public AzureCosmosDBNoSQLVectorStoreRecordCollection( Database database, - string collectionName, + string name, AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(database); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(AzureCosmosDBNoSQLCompositeKey) && typeof(TKey) != typeof(object)) { @@ -88,7 +88,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( // Assign. this._database = database; - this.CollectionName = collectionName; + this.Name = name; this._options = options ?? new(); var jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; this._model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() @@ -124,7 +124,7 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( { VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, VectorStoreName = database.Id, - CollectionName = collectionName + CollectionName = name }; } @@ -135,7 +135,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de { const string Query = "SELECT VALUE(c.id) FROM c WHERE c.id = @collectionName"; - var queryDefinition = new QueryDefinition(Query).WithParameter("@collectionName", this.CollectionName); + var queryDefinition = new QueryDefinition(Query).WithParameter("@collectionName", this.Name); using var feedIterator = this._database.GetContainerQueryIterator(queryDefinition); @@ -174,7 +174,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { return this.RunOperationAsync("DeleteContainer", () => this._database - .GetContainer(this.CollectionName) + .GetContainer(this.Name) .DeleteContainerAsync(cancellationToken: cancellationToken)); } @@ -194,7 +194,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella return this.RunOperationAsync("DeleteItem", () => this._database - .GetContainer(this.CollectionName) + .GetContainer(this.Name) .DeleteItemAsync(key.RecordKey, new PartitionKey(key.PartitionKey), cancellationToken: cancellationToken)); }); @@ -233,7 +233,7 @@ public async IAsyncEnumerable GetAsync( var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); @@ -254,7 +254,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati var jsonObject = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -273,7 +273,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati await this.RunOperationAsync(OperationName, () => this._database - .GetContainer(this.CollectionName) + .GetContainer(this.Name) .UpsertItemAsync(jsonObject, new PartitionKey(partitionKeyValue), cancellationToken: cancellationToken)) .ConfigureAwait(false); @@ -361,7 +361,7 @@ public async IAsyncEnumerable GetAsync(Expression> var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "GetAsync", () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = options.IncludeVectors })); @@ -447,7 +447,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -471,7 +471,7 @@ private ContainerProperties GetContainerProperties() if (this._options.IndexingMode == IndexingMode.None) { - return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") + return new ContainerProperties(this.Name, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") { IndexingPolicy = indexingPolicy }; @@ -528,7 +528,7 @@ private ContainerProperties GetContainerProperties() indexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = $"{vectorIndexPath.Path}/*" }); } - return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") + return new ContainerProperties(this.Name, partitionKeyPath: $"/{this._partitionKeyProperty.StorageName}") { VectorEmbeddingPolicy = vectorEmbeddingPolicy, IndexingPolicy = indexingPolicy, @@ -583,7 +583,7 @@ private static VectorDataType GetDataType(Type vectorDataType, string vectorProp private async IAsyncEnumerable GetItemsAsync(QueryDefinition queryDefinition, [EnumeratorCancellation] CancellationToken cancellationToken) { var iterator = this._database - .GetContainer(this.CollectionName) + .GetContainer(this.Name) .GetItemQueryIterator(queryDefinition); while (iterator.HasMoreResults) @@ -617,7 +617,7 @@ private async IAsyncEnumerable> MapSearchResultsAsyn var record = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, operationName, () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 419996e7bc00..810f9932245b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -67,15 +67,15 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector /// /// Initializes a new instance of the class. /// - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. - public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVectorStoreRecordCollectionOptions? options = default) + public InMemoryVectorStoreRecordCollection(string name, InMemoryVectorStoreRecordCollectionOptions? options = default) { // Verify. - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); // Assign. - this._collectionName = collectionName; + this._collectionName = name; this._internalCollections = new(); this._internalCollectionTypes = new(); this._options = options ?? new InMemoryVectorStoreRecordCollectionOptions(); @@ -111,7 +111,7 @@ public InMemoryVectorStoreRecordCollection(string collectionName, InMemoryVector this._collectionMetadata = new() { VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName, - CollectionName = collectionName + CollectionName = name }; } @@ -134,7 +134,7 @@ internal InMemoryVectorStoreRecordCollection( } /// - public string CollectionName => this._collectionName; + public string Name => this._collectionName; /// public Task CollectionExistsAsync(CancellationToken cancellationToken = default) @@ -155,7 +155,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) return Task.FromException(new VectorStoreOperationException("Collection already exists.") { VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "CreateCollection" }); } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 38244c4292a1..7ed0a3cc0b45 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -58,22 +58,22 @@ public sealed class MongoDBVectorStoreRecordCollection : IVectorS private readonly VectorStoreRecordModel _model; /// - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. /// /// that can be used to manage the collections in MongoDB. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. public MongoDBVectorStoreRecordCollection( IMongoDatabase mongoDatabase, - string collectionName, + string name, MongoDBVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(mongoDatabase); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -82,8 +82,8 @@ public MongoDBVectorStoreRecordCollection( // Assign. this._mongoDatabase = mongoDatabase; - this._mongoCollection = mongoDatabase.GetCollection(collectionName); - this.CollectionName = collectionName; + this._mongoCollection = mongoDatabase.GetCollection(name); + this.Name = name; this._options = options ?? new MongoDBVectorStoreRecordCollectionOptions(); this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); this._mapper = typeof(TRecord) == typeof(Dictionary) @@ -94,7 +94,7 @@ public MongoDBVectorStoreRecordCollection( { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = mongoDatabase.DatabaseNamespace?.DatabaseName, - CollectionName = collectionName + CollectionName = name }; } @@ -113,7 +113,7 @@ public async Task CreateCollectionAsync(CancellationToken cancellationToken = de { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "CreateCollection" }; } @@ -127,13 +127,13 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio // The IMongoDatabase.CreateCollectionAsync "Creates a new collection if not already available". // So for CreateCollectionIfNotExistsAsync, we don't perform an additional check. await this.RunOperationAsync("CreateCollection", - () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); + () => this._mongoDatabase.CreateCollectionAsync(this.Name, cancellationToken: cancellationToken)).ConfigureAwait(false); await this.RunOperationWithRetryAsync( "CreateIndexes", this._options.MaxRetries, this._options.DelayInMilliseconds, - () => this.CreateIndexesAsync(this.CollectionName, cancellationToken), + () => this.CreateIndexesAsync(this.Name, cancellationToken), cancellationToken).ConfigureAwait(false); } @@ -159,7 +159,7 @@ await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteMan /// public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); + => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.Name, cancellationToken)); /// public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) @@ -187,7 +187,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) return VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); } @@ -217,7 +217,7 @@ public async IAsyncEnumerable GetAsync( yield return VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(record, new())); } @@ -236,7 +236,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke var storageModel = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -374,7 +374,7 @@ public async IAsyncEnumerable GetAsync(Expression> var record = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "GetAsync", () => this._mapper.MapFromStorageToDataModel(response, new() { IncludeVectors = options.IncludeVectors })); @@ -412,7 +412,7 @@ public async IAsyncEnumerable> HybridSearchAsync> EnumerateAndMapSearc var record = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(response[DocumentPropertyName].AsBsonDocument, new() { IncludeVectors = includeVectors })); @@ -589,7 +589,7 @@ private FilterDefinition GetFilterByIds(IEnumerable ids) private async Task InternalCollectionExistsAsync(CancellationToken cancellationToken) { - var filter = new BsonDocument("name", this.CollectionName); + var filter = new BsonDocument("name", this.Name); var options = new ListCollectionNamesOptions { Filter = filter }; using var cursor = await this._mongoDatabase.ListCollectionNamesAsync(options, cancellationToken: cancellationToken).ConfigureAwait(false); @@ -609,7 +609,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -627,7 +627,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -659,7 +659,7 @@ private async Task RunOperationWithRetryAsync( { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -694,7 +694,7 @@ private async Task RunOperationWithRetryAsync( { VectorStoreSystemName = MongoDBConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 220b1d0110e9..92b150310696 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -38,7 +38,7 @@ public sealed class PineconeVectorStoreRecordCollection : IVector private IndexClient? _indexClient; /// - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. @@ -46,12 +46,12 @@ public sealed class PineconeVectorStoreRecordCollection : IVector /// Pinecone client that can be used to manage the collections and vectors in a Pinecone store. /// Optional configuration options for this class. /// Thrown if the is null. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Thrown for any misconfigured options. - public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string collectionName, PineconeVectorStoreRecordCollectionOptions? options = null) + public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, string name, PineconeVectorStoreRecordCollectionOptions? options = null) { Verify.NotNull(pineconeClient); - VerifyCollectionName(collectionName); + VerifyCollectionName(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -59,7 +59,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st } this._pineconeClient = pineconeClient; - this.CollectionName = collectionName; + this.Name = name; this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); @@ -68,7 +68,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st this._collectionMetadata = new() { VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, - CollectionName = collectionName + CollectionName = name }; } @@ -80,7 +80,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de { var collections = await this._pineconeClient.ListIndexesAsync(cancellationToken: cancellationToken).ConfigureAwait(false); - return collections.Indexes?.Any(x => x.Name == this.CollectionName) is true; + return collections.Indexes?.Any(x => x.Name == this.Name) is true; }); /// @@ -97,7 +97,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) CreateIndexRequest request = new() { - Name = this.CollectionName, + Name = this.Name, Dimension = vectorProperty.Dimensions, Metric = MapDistanceFunction(vectorProperty), Spec = new ServerlessIndexSpec @@ -135,7 +135,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de { try { - await this._pineconeClient.DeleteIndexAsync(this.CollectionName, cancellationToken: cancellationToken).ConfigureAwait(false); + await this._pineconeClient.DeleteIndexAsync(this.Name, cancellationToken: cancellationToken).ConfigureAwait(false); } catch (NotFoundError) { @@ -147,7 +147,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de { VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "DeleteCollection" }; } @@ -176,7 +176,7 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de return VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "Get", () => this._mapper.MapFromStorageToDataModel(result, mapperOptions)); } @@ -221,7 +221,7 @@ public async IAsyncEnumerable GetAsync( var records = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "GetBatch", () => response.Vectors.Values.Select(x => this._mapper.MapFromStorageToDataModel(x, mapperOptions))); @@ -281,7 +281,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati var vector = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "Upsert", () => this._mapper.MapFromDataToStorageModel(record)); @@ -306,7 +306,7 @@ public async Task> UpsertAsync(IEnumerable records, var vectors = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "UpsertBatch", () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); @@ -379,7 +379,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync var records = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "VectorizedSearch", () => skippedResults.Select(x => new VectorSearchResult(this._mapper.MapFromStorageToDataModel(new Sdk.Vector() { @@ -434,7 +434,7 @@ public async IAsyncEnumerable GetAsync(Expression> var records = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "Query", () => response.Matches.Skip(options.Skip).Select(x => this._mapper.MapFromStorageToDataModel(new Sdk.Vector() { @@ -471,7 +471,7 @@ private async Task RunIndexOperationAsync(string operationName, Func RunIndexOperationAsync(string operationName, Func RunCollectionOperationAsync(string operationName, Func< { VectorStoreSystemName = PineconeConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index aab25cfe6d1f..3932bebb2ac8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -24,7 +24,7 @@ public sealed class PostgresVectorStoreRecordCollection : IVector where TRecord : notnull { /// - public string CollectionName { get; } + public string Name { get; } /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; @@ -48,10 +48,10 @@ public sealed class PostgresVectorStoreRecordCollection : IVector /// Initializes a new instance of the class. /// /// The data source to use for connecting to the database. - /// The name of the collection. + /// The name of the collection. /// Optional configuration options for this class. - public PostgresVectorStoreRecordCollection(NpgsqlDataSource dataSource, string collectionName, PostgresVectorStoreRecordCollectionOptions? options = default) - : this(new PostgresVectorStoreDbClient(dataSource), collectionName, options) + public PostgresVectorStoreRecordCollection(NpgsqlDataSource dataSource, string name, PostgresVectorStoreRecordCollectionOptions? options = default) + : this(new PostgresVectorStoreDbClient(dataSource), name, options) { } @@ -59,20 +59,20 @@ public PostgresVectorStoreRecordCollection(NpgsqlDataSource dataSource, string c /// Initializes a new instance of the class. /// /// The client to use for interacting with the database. - /// The name of the collection. + /// The name of the collection. /// Optional configuration options for this class. /// /// This constructor is internal. It allows internal code to create an instance of this class with a custom client. /// - internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client, string collectionName, PostgresVectorStoreRecordCollectionOptions? options = default) + internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client, string name, PostgresVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(client); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); // Assign. this._client = client; - this.CollectionName = collectionName; + this.Name = name; this._options = options ?? new PostgresVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions) @@ -84,7 +84,7 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client { VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, VectorStoreName = this._client.DatabaseName, - CollectionName = collectionName + CollectionName = name }; } @@ -93,7 +93,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de { const string OperationName = "DoesTableExists"; return this.RunOperationAsync(OperationName, () => - this._client.DoesTableExistsAsync(this.CollectionName, cancellationToken) + this._client.DoesTableExistsAsync(this.Name, cancellationToken) ); } @@ -120,7 +120,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "DeleteCollection"; return this.RunOperationAsync(OperationName, () => - this._client.DeleteTableAsync(this.CollectionName, cancellationToken) + this._client.DeleteTableAsync(this.Name, cancellationToken) ); } @@ -132,7 +132,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke var storageModel = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -144,7 +144,7 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke return this.RunOperationAsync(OperationName, async () => { - await this._client.UpsertAsync(this.CollectionName, storageModel, this._model.KeyProperty.StorageName, cancellationToken).ConfigureAwait(false); + await this._client.UpsertAsync(this.Name, storageModel, this._model.KeyProperty.StorageName, cancellationToken).ConfigureAwait(false); return key; } ); @@ -160,7 +160,7 @@ public async Task> UpsertAsync(IEnumerable records, var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -172,7 +172,7 @@ public async Task> UpsertAsync(IEnumerable records, var keys = storageModels.Select(model => model[this._model.KeyProperty.StorageName]!).ToList(); await this.RunOperationAsync(OperationName, () => - this._client.UpsertBatchAsync(this.CollectionName, storageModels, this._model.KeyProperty.StorageName, cancellationToken) + this._client.UpsertBatchAsync(this.Name, storageModels, this._model.KeyProperty.StorageName, cancellationToken) ).ConfigureAwait(false); return keys.Select(key => (TKey)key!).ToList(); @@ -189,13 +189,13 @@ await this.RunOperationAsync(OperationName, () => return this.RunOperationAsync(OperationName, async () => { - var row = await this._client.GetAsync(this.CollectionName, key, this._model, includeVectors, cancellationToken).ConfigureAwait(false); + var row = await this._client.GetAsync(this.Name, key, this._model, includeVectors, cancellationToken).ConfigureAwait(false); if (row is null) { return default; } return VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })); }); @@ -211,19 +211,19 @@ public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptio bool includeVectors = options?.IncludeVectors is true; return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( - this._client.GetBatchAsync(this.CollectionName, keys, this._model, includeVectors, cancellationToken) + this._client.GetBatchAsync(this.Name, keys, this._model, includeVectors, cancellationToken) .SelectAsync(row => VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(row, new() { IncludeVectors = includeVectors })), cancellationToken ), OperationName, this._collectionMetadata.VectorStoreName, - this.CollectionName + this.Name ); } @@ -232,7 +232,7 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) { const string OperationName = "Delete"; return this.RunOperationAsync(OperationName, () => - this._client.DeleteAsync(this.CollectionName, this._model.KeyProperty.StorageName, key, cancellationToken) + this._client.DeleteAsync(this.Name, this._model.KeyProperty.StorageName, key, cancellationToken) ); } @@ -243,7 +243,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo const string OperationName = "DeleteBatch"; return this.RunOperationAsync(OperationName, () => - this._client.DeleteBatchAsync(this.CollectionName, this._model.KeyProperty.StorageName, keys, cancellationToken) + this._client.DeleteBatchAsync(this.Name, this._model.KeyProperty.StorageName, keys, cancellationToken) ); } @@ -278,13 +278,13 @@ public IAsyncEnumerable> VectorizedSearchAsync { var record = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(result.Row, mapperOptions)); @@ -292,7 +292,7 @@ public IAsyncEnumerable> VectorizedSearchAsync GetAsync(Expression> filter StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( - this._client.GetMatchingRecordsAsync(this.CollectionName, this._model, filter, top, options, cancellationToken) + this._client.GetMatchingRecordsAsync(this.Name, this._model, filter, top, options, cancellationToken) .SelectAsync(dictionary => { return VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, "Get", () => this._mapper.MapFromStorageToDataModel(dictionary, mapperOptions)); }, cancellationToken), "Get", this._collectionMetadata.VectorStoreName, - this.CollectionName); + this.Name); } /// @@ -338,7 +338,7 @@ public IAsyncEnumerable GetAsync(Expression> filter private Task InternalCreateCollectionAsync(bool ifNotExists, CancellationToken cancellationToken = default) { - return this._client.CreateTableAsync(this.CollectionName, this._model, ifNotExists, cancellationToken); + return this._client.CreateTableAsync(this.Name, this._model, ifNotExists, cancellationToken); } private async Task RunOperationAsync(string operationName, Func operation) @@ -353,7 +353,7 @@ private async Task RunOperationAsync(string operationName, Func operation) { VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -371,7 +371,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = PostgresConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index 99f6eb4403f4..c4a5bea72d7f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -62,12 +62,12 @@ public sealed class QdrantVectorStoreRecordCollection : IVectorSt /// Initializes a new instance of the class. /// /// Qdrant client that can be used to manage the collections and points in a Qdrant store. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown if the is null. /// Thrown for any misconfigured options. - public QdrantVectorStoreRecordCollection(QdrantClient qdrantClient, string collectionName, QdrantVectorStoreRecordCollectionOptions? options = null) - : this(new MockableQdrantClient(qdrantClient), collectionName, options) + public QdrantVectorStoreRecordCollection(QdrantClient qdrantClient, string name, QdrantVectorStoreRecordCollectionOptions? options = null) + : this(new MockableQdrantClient(qdrantClient), name, options) { } @@ -75,15 +75,15 @@ public QdrantVectorStoreRecordCollection(QdrantClient qdrantClient, string colle /// Initializes a new instance of the class. /// /// Qdrant client that can be used to manage the collections and points in a Qdrant store. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Thrown if the is null. /// Thrown for any misconfigured options. - internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, string collectionName, QdrantVectorStoreRecordCollectionOptions? options = null) + internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, string name, QdrantVectorStoreRecordCollectionOptions? options = null) { // Verify. Verify.NotNull(qdrantClient); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(Guid) && typeof(TKey) != typeof(object)) { @@ -92,7 +92,7 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st // Assign. this._qdrantClient = qdrantClient; - this._collectionName = collectionName; + this._collectionName = name; this._options = options ?? new QdrantVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(this._options.HasNamedVectors)) @@ -103,12 +103,12 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st this._collectionMetadata = new() { VectorStoreSystemName = QdrantConstants.VectorStoreSystemName, - CollectionName = collectionName + CollectionName = name }; } /// - public string CollectionName => this._collectionName; + public string Name => this._collectionName; /// public Task CollectionExistsAsync(CancellationToken cancellationToken = default) @@ -499,7 +499,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync var points = await this.RunOperationAsync( "Query", () => this._qdrantClient.QueryAsync( - this.CollectionName, + this.Name, query: query, usingVector: this._options.HasNamedVectors ? vectorProperty.StorageName : null, filter: filter, @@ -558,7 +558,7 @@ public async IAsyncEnumerable GetAsync(Expression> var scrollResponse = await this.RunOperationAsync( "Scroll", () => this._qdrantClient.ScrollAsync( - this.CollectionName, + this.Name, translatedFilter, vectorsSelector, limit: (uint)(top + options.Skip), @@ -645,7 +645,7 @@ public async IAsyncEnumerable> HybridSearchAsync this._qdrantClient.QueryAsync( - this.CollectionName, + this.Name, prefetch: new List() { vectorQuery, keywordQuery }, query: fusionQuery, limit: (ulong)top, diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 219a865cc12c..53000a1dcbb1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -93,14 +93,14 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVe /// Initializes a new instance of the class. /// /// The Redis database to read/write records from. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Throw when parameters are invalid. - public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collectionName, RedisHashSetVectorStoreRecordCollectionOptions? options = null) + public RedisHashSetVectorStoreRecordCollection(IDatabase database, string name, RedisHashSetVectorStoreRecordCollectionOptions? options = null) { // Verify. Verify.NotNull(database); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -109,7 +109,7 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec // Assign. this._database = database; - this._collectionName = collectionName; + this._collectionName = name; this._options = options ?? new RedisHashSetVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(ModelBuildingOptions).Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); @@ -124,12 +124,12 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec { VectorStoreSystemName = RedisConstants.VectorStoreSystemName, VectorStoreName = database.Database.ToString(), - CollectionName = collectionName + CollectionName = name }; } /// - public string CollectionName => this._collectionName; + public string Name => this._collectionName; /// public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 097095953866..0c79dd5d65e9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -84,14 +84,14 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVecto /// Initializes a new instance of the class. /// /// The Redis database to read/write records from. - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// Throw when parameters are invalid. - public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectionName, RedisJsonVectorStoreRecordCollectionOptions? options = null) + public RedisJsonVectorStoreRecordCollection(IDatabase database, string name, RedisJsonVectorStoreRecordCollectionOptions? options = null) { // Verify. Verify.NotNull(database); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(object)) { @@ -100,7 +100,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Assign. this._database = database; - this._collectionName = collectionName; + this._collectionName = name; this._options = options ?? new RedisJsonVectorStoreRecordCollectionOptions(); this._jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; this._model = new VectorStoreRecordJsonModelBuilder(ModelBuildingOptions) @@ -118,12 +118,12 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio { VectorStoreSystemName = RedisConstants.VectorStoreSystemName, VectorStoreName = database.Database.ToString(), - CollectionName = collectionName + CollectionName = name }; } /// - public string CollectionName => this._collectionName; + public string Name => this._collectionName; /// public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 9036cddd2575..6d20ef10d89c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -52,7 +52,7 @@ public SqlServerVectorStoreRecordCollection( .Build(typeof(TRecord), options?.RecordDefinition); this._connectionString = connectionString; - this.CollectionName = name; + this.Name = name; // We need to create a copy, so any changes made to the option bag after // the ctor call do not affect this instance. this._options = options is null @@ -75,14 +75,14 @@ public SqlServerVectorStoreRecordCollection( } /// - public string CollectionName { get; } + public string Name { get; } /// public async Task CollectionExistsAsync(CancellationToken cancellationToken = default) { using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.SelectTableName( - connection, this._options.Schema, this.CollectionName); + connection, this._options.Schema, this.Name); return await ExceptionWrapper.WrapAsync(connection, command, static async (cmd, ct) => @@ -92,7 +92,7 @@ static async (cmd, ct) => }, "CollectionExists", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); } @@ -110,7 +110,7 @@ private async Task CreateCollectionAsync(bool ifNotExists, CancellationToken can using SqlCommand command = SqlServerCommandBuilder.CreateTable( connection, this._options.Schema, - this.CollectionName, + this.Name, ifNotExists, this._model); @@ -118,7 +118,7 @@ await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), "CreateCollection", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); } @@ -127,13 +127,13 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de { using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.DropTableIfExists( - connection, this._options.Schema, this.CollectionName); + connection, this._options.Schema, this.Name); await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), "DeleteCollection", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); } @@ -146,7 +146,7 @@ public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = de using SqlCommand command = SqlServerCommandBuilder.DeleteSingle( connection, this._options.Schema, - this.CollectionName, + this.Name, this._model.KeyProperty, key); @@ -154,7 +154,7 @@ await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteNonQueryAsync(ct), "Delete", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); } @@ -183,7 +183,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella if (!SqlServerCommandBuilder.DeleteMany( command, this._options.Schema, - this.CollectionName, + this.Name, this._model.KeyProperty, keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount))) { @@ -220,7 +220,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella { VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "DeleteBatch" }; } @@ -237,7 +237,7 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella using SqlCommand command = SqlServerCommandBuilder.SelectSingle( connection, this._options.Schema, - this.CollectionName, + this.Name, this._model, key, includeVectors); @@ -251,7 +251,7 @@ static async (cmd, ct) => }, "Get", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); return reader.HasRows @@ -283,7 +283,7 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor if (!SqlServerCommandBuilder.SelectMany( command, this._options.Schema, - this.CollectionName, + this.Name, this._model, keys.Skip(taken).Take(SqlServerConstants.MaxParameterCount), includeVectors)) @@ -300,14 +300,14 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor static (cmd, ct) => cmd.ExecuteReaderAsync(ct), "GetBatch", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); while (await ExceptionWrapper.WrapReadAsync( reader, "GetBatch", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false)) { yield return this._mapper.MapFromStorageToDataModel( @@ -326,7 +326,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle( connection, this._options.Schema, - this.CollectionName, + this.Name, this._model, this._mapper.MapFromDataToStorageModel(record)); @@ -339,7 +339,7 @@ async static (cmd, ct) => }, "Upsert", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); } @@ -369,7 +369,7 @@ public async Task> UpsertAsync(IEnumerable records, if (!SqlServerCommandBuilder.MergeIntoMany( command, this._options.Schema, - this.CollectionName, + this.Name, this._model, records.Skip(taken) .Take(SqlServerConstants.MaxParameterCount / parametersPerRecord) @@ -408,7 +408,7 @@ public async Task> UpsertAsync(IEnumerable records, { VectorStoreSystemName = SqlServerConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = "UpsertBatch" }; } @@ -447,7 +447,7 @@ public IAsyncEnumerable> VectorizedSearchAsync> ReadVectorSearchResu static (cmd, ct) => cmd.ExecuteReaderAsync(ct), "VectorizedSearch", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false); int scoreIndex = -1; @@ -493,7 +493,7 @@ private async IAsyncEnumerable> ReadVectorSearchResu reader, "VectorizedSearch", this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, cancellationToken).ConfigureAwait(false)) { if (scoreIndex < 0) @@ -529,12 +529,12 @@ public async IAsyncEnumerable GetAsync(Expression> options, connection, this._options.Schema, - this.CollectionName, + this.Name, this._model); using SqlDataReader reader = await ExceptionWrapper.WrapAsync(connection, command, static (cmd, ct) => cmd.ExecuteReaderAsync(ct), - "GetAsync", this._collectionMetadata.VectorStoreName, this.CollectionName, cancellationToken).ConfigureAwait(false); + "GetAsync", this._collectionMetadata.VectorStoreName, this.Name, cancellationToken).ConfigureAwait(false); var vectorProperties = options.IncludeVectors ? this._model.VectorProperties : []; StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 4006d5c21006..1d1a044f6eb5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -72,22 +72,22 @@ public sealed class SqliteVectorStoreRecordCollection : IVectorSt private readonly string _vectorSearchExtensionName; /// - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. /// /// The connection string for the SQLite database represented by this . - /// The name of the collection/table that this will access. + /// The name of the collection/table that this will access. /// Optional configuration options for this class. public SqliteVectorStoreRecordCollection( string connectionString, - string collectionName, + string name, SqliteVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(connectionString); - Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNullOrWhiteSpace(name); if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(ulong) && typeof(TKey) != typeof(object)) { @@ -96,11 +96,11 @@ public SqliteVectorStoreRecordCollection( // Assign. this._connectionString = connectionString; - this.CollectionName = collectionName; + this.Name = name; this._options = options ?? new(); this._vectorSearchExtensionName = this._options.VectorSearchExtensionName ?? SqliteConstants.VectorSearchExtensionName; - this._dataTableName = this.CollectionName; + this._dataTableName = this.Name; this._vectorTableName = GetVectorTableName(this._dataTableName, this._options); this._model = new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) @@ -144,7 +144,7 @@ public SqliteVectorStoreRecordCollection( { VectorStoreSystemName = SqliteConstants.VectorStoreSystemName, VectorStoreName = connectionStringBuilder.DataSource, - CollectionName = collectionName + CollectionName = name }; } @@ -347,7 +347,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati var storageModel = VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record)); @@ -373,7 +373,7 @@ public async Task> UpsertAsync(IEnumerable records, var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -687,7 +687,7 @@ private TRecord GetAndMapRecord( return VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); } @@ -704,7 +704,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = SqliteConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 49a07cad043e..7275b1b5e08b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -68,7 +68,7 @@ public sealed class WeaviateVectorStoreRecordCollection : IVector private readonly string? _apiKey; /// - public string CollectionName { get; } + public string Name { get; } /// /// Initializes a new instance of the class. @@ -78,17 +78,17 @@ public sealed class WeaviateVectorStoreRecordCollection : IVector /// should point to remote or local cluster and API key can be configured via . /// It's also possible to provide these parameters via . /// - /// The name of the collection that this will access. + /// The name of the collection that this will access. /// Optional configuration options for this class. /// The collection name must start with a capital letter and contain only ASCII letters and digits. public WeaviateVectorStoreRecordCollection( HttpClient httpClient, - string collectionName, + string name, WeaviateVectorStoreRecordCollectionOptions? options = default) { // Verify. Verify.NotNull(httpClient); - VerifyCollectionName(collectionName); + VerifyCollectionName(name); if (typeof(TKey) != typeof(Guid) && typeof(TKey) != typeof(object)) { @@ -100,20 +100,20 @@ public WeaviateVectorStoreRecordCollection( // Assign. this._httpClient = httpClient; this._endpoint = endpoint; - this.CollectionName = collectionName; + this.Name = name; this._options = options ?? new(); this._apiKey = this._options.ApiKey; this._model = new WeaviateModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); // Assign mapper. this._mapper = typeof(TRecord) == typeof(Dictionary) - ? (new WeaviateDynamicDataModelMapper(this.CollectionName, this._model, s_jsonSerializerOptions) as IWeaviateMapper)! - : new WeaviateVectorStoreRecordMapper(this.CollectionName, this._model, s_jsonSerializerOptions); + ? (new WeaviateDynamicDataModelMapper(this.Name, this._model, s_jsonSerializerOptions) as IWeaviateMapper)! + : new WeaviateVectorStoreRecordMapper(this.Name, this._model, s_jsonSerializerOptions); this._collectionMetadata = new() { VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, - CollectionName = collectionName + CollectionName = name }; } @@ -124,7 +124,7 @@ public Task CollectionExistsAsync(CancellationToken cancellationToken = de return this.RunOperationAsync(OperationName, async () => { - var request = new WeaviateGetCollectionSchemaRequest(this.CollectionName).Build(); + var request = new WeaviateGetCollectionSchemaRequest(this.Name).Build(); var response = await this .ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken) @@ -141,7 +141,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) return this.RunOperationAsync(OperationName, () => { - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(this.CollectionName, this._model); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(this.Name, this._model); var request = new WeaviateCreateCollectionSchemaRequest(schema).Build(); @@ -165,7 +165,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) return this.RunOperationAsync(OperationName, () => { - var request = new WeaviateDeleteCollectionSchemaRequest(this.CollectionName).Build(); + var request = new WeaviateDeleteCollectionSchemaRequest(this.Name).Build(); return this.ExecuteRequestAsync(request, cancellationToken); }); @@ -185,7 +185,7 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) _ => throw new UnreachableException("Guid key should have been validated during model building") }; - var request = new WeaviateDeleteObjectRequest(this.CollectionName, guid).Build(); + var request = new WeaviateDeleteObjectRequest(this.Name, guid).Build(); return this.ExecuteRequestAsync(request, cancellationToken); }); @@ -210,7 +210,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo { var match = new WeaviateQueryMatch { - CollectionName = this.CollectionName, + CollectionName = this.Name, WhereClause = new WeaviateQueryMatchWhereClause { Operator = ContainsAnyOperator, @@ -240,7 +240,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo }; var includeVectors = options?.IncludeVectors is true; - var request = new WeaviateGetCollectionObjectRequest(this.CollectionName, guid, includeVectors).Build(); + var request = new WeaviateGetCollectionObjectRequest(this.Name, guid, includeVectors).Build(); var jsonObject = await this.ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken).ConfigureAwait(false); @@ -252,7 +252,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo return VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromStorageToDataModel(jsonObject!, new() { IncludeVectors = includeVectors })); }); @@ -297,7 +297,7 @@ public async Task> UpsertAsync(IEnumerable records, var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); @@ -346,7 +346,7 @@ public IAsyncEnumerable> VectorizedSearchAsync GetAsync(Expression> filter filter, top, options, - this.CollectionName, + this.Name, this._model); return this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.ScorePropertyName, "GetAsync", cancellationToken) @@ -392,7 +392,7 @@ public IAsyncEnumerable> HybridSearchAsync( vector, top, string.Join(" ", keywords), - this.CollectionName, + this.Name, this._model, vectorProperty, textDataProperty, @@ -423,7 +423,7 @@ private async IAsyncEnumerable> ExecuteQueryAsync(st var (responseModel, content) = await this.ExecuteRequestWithResponseContentAsync(request, cancellationToken).ConfigureAwait(false); - var collectionResults = responseModel?.Data?.GetOperation?[this.CollectionName]; + var collectionResults = responseModel?.Data?.GetOperation?[this.Name]; if (collectionResults is null) { @@ -431,7 +431,7 @@ private async IAsyncEnumerable> ExecuteQueryAsync(st { VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } @@ -445,7 +445,7 @@ private async IAsyncEnumerable> ExecuteQueryAsync(st var record = VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, - this.CollectionName, + this.Name, operationName, () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); @@ -510,7 +510,7 @@ private async Task RunOperationAsync(string operationName, Func> o { VectorStoreSystemName = WeaviateConstants.VectorStoreSystemName, VectorStoreName = this._collectionMetadata.VectorStoreName, - CollectionName = this.CollectionName, + CollectionName = this.Name, OperationName = operationName }; } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/Throw.cs b/dotnet/src/Connectors/VectorData.Abstractions/Throw.cs new file mode 100644 index 000000000000..42682c708155 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/Throw.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.Extensions.VectorData; + +internal static class Throw +{ + /// Throws an exception indicating that a required service is not available. + public static InvalidOperationException CreateMissingServiceException(Type serviceType, object? serviceKey) => + new(serviceKey is null ? + $"No service of type '{serviceType}' is available." : + $"No service of type '{serviceType}' for the key '{serviceKey}' is available."); +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/KeywordHybridSearchExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/KeywordHybridSearchExtensions.cs new file mode 100644 index 000000000000..0e8435ae25c2 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/KeywordHybridSearchExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides a collection of static methods for extending instances. +[Experimental("MEVD9000")] +public static class KeywordHybridSearchExtensions +{ + /// + /// Asks the for an object of the specified type + /// and throw an exception if one isn't available. + /// + /// The record data model to use for retrieving data from the store. + /// The keyword hybrid search. + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// is . + /// No service of the requested type for the specified key is available. + public static object GetRequiredService(this IKeywordHybridSearch keywordHybridSearch, Type serviceType, object? serviceKey = null) + { + if (keywordHybridSearch is null) { throw new ArgumentNullException(nameof(keywordHybridSearch)); } + if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } + + return + keywordHybridSearch.GetService(serviceType, serviceKey) ?? + throw Throw.CreateMissingServiceException(serviceType, serviceKey); + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs new file mode 100644 index 000000000000..442155bdcccc --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides a collection of static methods for extending instances. +[Experimental("MEVD9000")] +public static class VectorizableTextSearchExtensions +{ + /// + /// Asks the for an object of the specified type + /// and throw an exception if one isn't available. + /// + /// The record data model to use for retrieving data from the store. + /// The vectorizable text search. + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// is . + /// No service of the requested type for the specified key is available. + public static object GetRequiredService(this IVectorizableTextSearch vectorizableTextSearch, Type serviceType, object? serviceKey = null) + { + if (vectorizableTextSearch is null) { throw new ArgumentNullException(nameof(vectorizableTextSearch)); } + if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } + + return + vectorizableTextSearch.GetService(serviceType, serviceKey) ?? + throw Throw.CreateMissingServiceException(serviceType, serviceKey); + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs new file mode 100644 index 000000000000..07b40aa39b21 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides a collection of static methods for extending instances. +[Experimental("MEVD9000")] +public static class VectorizedSearchExtensions +{ + /// + /// Asks the for an object of the specified type + /// and throw an exception if one isn't available. + /// + /// The record data model to use for retrieving data from the store. + /// The vectorized search. + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// is . + /// No service of the requested type for the specified key is available. + public static object GetRequiredService(this IVectorizedSearch vectorizedSearch, Type serviceType, object? serviceKey = null) + { + if (vectorizedSearch is null) { throw new ArgumentNullException(nameof(vectorizedSearch)); } + if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } + + return + vectorizedSearch.GetService(serviceType, serviceKey) ?? + throw Throw.CreateMissingServiceException(serviceType, serviceKey); + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index 97e898fd7b56..84acae5bb6d2 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -25,7 +25,7 @@ public interface IVectorStoreRecordCollection : IVectorizedSearch /// /// Gets the name of the collection. /// - string CollectionName { get; } + string Name { get; } /// /// Checks if the collection exists in the vector store. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreExtensions.cs new file mode 100644 index 000000000000..9d50678cc118 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/VectorStoreExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Extensions.VectorData; + +/// Provides a collection of static methods for extending instances. +[Experimental("MEVD9000")] +public static class VectorStoreExtensions +{ + /// + /// Asks the for an object of the specified type + /// and throw an exception if one isn't available. + /// + /// The record data model to use for retrieving data from the store. + /// The vector store. + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object. + /// is . + /// is . + /// No service of the requested type for the specified key is available. + public static object GetRequiredService(this IVectorStore vectorStore, Type serviceType, object? serviceKey = null) + { + if (vectorStore is null) { throw new ArgumentNullException(nameof(vectorStore)); } + if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } + + return + vectorStore.GetService(serviceType, serviceKey) ?? + throw Throw.CreateMissingServiceException(serviceType, serviceKey); + } +} From 473a736de7c3e2c1d8d93e25123cfd2685d12a5c Mon Sep 17 00:00:00 2001 From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Date: Wed, 16 Apr 2025 07:46:55 -0700 Subject: [PATCH 47/63] .Net: [MEVD] Added support for unnamed vectors in Weaviate connector (#11454) ### Motivation and Context Resolves: https://github.com/microsoft/semantic-kernel/issues/9470 1. Added support for unnamed vectors 2. Minor fixes in HTTP communication logic 3. Fixes in unit and integration tests ### Contribution Checklist - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone :smile: --- .../HttpV2/WeaviateGetCollectionsRequest.cs | 2 +- .../ModelV2/WeaviateCollectionSchema.cs | 9 ++ .../WeaviateCollectionSchemaVectorConfig.cs | 4 +- .../WeaviateConstants.cs | 6 ++ .../WeaviateDynamicDataModelMapper.cs | 69 ++++++++++--- .../WeaviateModelBuilder.cs | 38 ++++--- .../WeaviateVectorStore.cs | 6 +- ...viateVectorStoreCollectionCreateMapping.cs | 31 ++++-- ...viateVectorStoreCollectionSearchMapping.cs | 13 ++- .../WeaviateVectorStoreOptions.cs | 7 ++ .../WeaviateVectorStoreRecordCollection.cs | 56 +++++++---- ...viateVectorStoreRecordCollectionOptions.cs | 7 ++ ...VectorStoreRecordCollectionQueryBuilder.cs | 42 +++++--- .../WeaviateVectorStoreRecordMapper.cs | 49 +++++++-- .../WeaviateDynamicDataModelMapperTests.cs | 99 +++++++++++++++++-- ...VectorStoreCollectionCreateMappingTests.cs | 72 +++++++++++--- ...VectorStoreCollectionSearchMappingTests.cs | 33 +++++-- ...rStoreRecordCollectionQueryBuilderTests.cs | 35 ++++--- .../WeaviateVectorStoreRecordMapperTests.cs | 79 ++++++++++----- .../CRUD/WeaviateBatchConformanceTests.cs | 9 +- .../WeaviateDynamicRecordConformanceTests.cs | 9 +- .../CRUD/WeaviateNoVectorConformanceTests.cs | 2 +- .../CRUD/WeaviateRecordConformanceTests.cs | 9 +- .../WeaviateCollectionConformanceTests.cs | 9 +- .../Filter/WeaviateBasicFilterTests.cs | 4 +- .../Filter/WeaviateBasicQueryTests.cs | 2 +- ...viateKeywordVectorizedHybridSearchTests.cs | 40 ++++++-- .../Support/TestContainer/WeaviateBuilder.cs | 2 +- .../WeaviateDynamicDataModelFixture.cs | 16 ++- .../Support/WeaviateFixture.cs | 14 --- .../Support/WeaviateSimpleModelFixture.cs | 18 +++- .../Support/WeaviateTestStore.cs | 15 +-- ...orSearchDistanceFunctionComplianceTests.cs | 42 ++++++++ 33 files changed, 647 insertions(+), 201 deletions(-) delete mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/VectorSearch/WeaviateVectorSearchDistanceFunctionComplianceTests.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs index f31017ca8685..40012278a076 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs @@ -10,6 +10,6 @@ internal sealed class WeaviateGetCollectionsRequest public HttpRequestMessage Build() { - return HttpRequest.CreateGetRequest(ApiRoute, this); + return HttpRequest.CreateGetRequest(ApiRoute); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs index e0f403ddb0e8..c6122eea8967 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs @@ -21,4 +21,13 @@ public WeaviateCollectionSchema(string collectionName) [JsonPropertyName("properties")] public List Properties { get; set; } = []; + + [JsonPropertyName("vectorizer")] + public string Vectorizer { get; set; } = WeaviateConstants.DefaultVectorizer; + + [JsonPropertyName("vectorIndexType")] + public string? VectorIndexType { get; set; } + + [JsonPropertyName("vectorIndexConfig")] + public WeaviateCollectionSchemaVectorIndexConfig? VectorIndexConfig { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs index 75bd33471eb7..77830facd893 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs @@ -7,10 +7,8 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class WeaviateCollectionSchemaVectorConfig { - private const string DefaultVectorizer = "none"; - [JsonPropertyName("vectorizer")] - public Dictionary Vectorizer { get; set; } = new() { [DefaultVectorizer] = null }; + public Dictionary Vectorizer { get; set; } = new() { [WeaviateConstants.DefaultVectorizer] = null }; [JsonPropertyName("vectorIndexType")] public string? VectorIndexType { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs index f98b4ec35fde..f98d4a6304cd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs @@ -16,6 +16,9 @@ internal sealed class WeaviateConstants /// Reserved vector property name in Weaviate. internal const string ReservedVectorPropertyName = "vectors"; + /// Reserved single vector property name in Weaviate. + internal const string ReservedSingleVectorPropertyName = "vector"; + /// Collection property name in Weaviate. internal const string CollectionPropertyName = "class"; @@ -27,4 +30,7 @@ internal sealed class WeaviateConstants /// Additional properties property name in Weaviate. internal const string AdditionalPropertiesPropertyName = "_additional"; + + /// Default vectorizer for vector properties in Weaviate. + internal const string DefaultVectorizer = "none"; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs index fe28c5c62e66..e80d34becf67 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs @@ -23,20 +23,33 @@ internal sealed class WeaviateDynamicDataModelMapper : IWeaviateMapperA for serialization/deserialization of record properties. private readonly JsonSerializerOptions _jsonSerializerOptions; + /// Gets a value indicating whether the vectors in the store are named and multiple vectors are supported, or whether there is just a single unnamed vector in Weaviate collection. + private readonly bool _hasNamedVectors; + + /// Gets a vector property named used in Weaviate collection. + private readonly string _vectorPropertyName; + /// /// Initializes a new instance of the class. /// /// The name of the Weaviate collection + /// Gets or sets a value indicating whether the vectors in the store are named and multiple vectors are supported, or whether there is just a single unnamed vector in Weaviate collection /// The model /// A for serialization/deserialization of record properties. public WeaviateDynamicDataModelMapper( string collectionName, + bool hasNamedVectors, VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) { this._collectionName = collectionName; + this._hasNamedVectors = hasNamedVectors; this._model = model; this._jsonSerializerOptions = jsonSerializerOptions; + + this._vectorPropertyName = hasNamedVectors ? + WeaviateConstants.ReservedVectorPropertyName : + WeaviateConstants.ReservedSingleVectorPropertyName; } public JsonObject MapFromDataToStorageModel(Dictionary dataModel) @@ -44,38 +57,52 @@ public JsonObject MapFromDataToStorageModel(Dictionary dataMode Verify.NotNull(dataModel); // Transform generic data model to Weaviate object model. - var keyObject = JsonSerializer.SerializeToNode(dataModel[this._model.KeyProperty.ModelName]); + var keyNode = JsonSerializer.SerializeToNode(dataModel[this._model.KeyProperty.ModelName]); // Populate data properties. - var dataObject = new JsonObject(); + var dataNode = new JsonObject(); foreach (var property in this._model.DataProperties) { if (dataModel.TryGetValue(property.ModelName, out var dataValue)) { - dataObject[property.StorageName] = dataValue is null + dataNode[property.StorageName] = dataValue is null ? null : JsonSerializer.SerializeToNode(dataValue, property.Type, this._jsonSerializerOptions); } } // Populate vector properties. - var vectorObject = new JsonObject(); - foreach (var property in this._model.VectorProperties) + JsonNode? vectorNode = null; + + if (this._hasNamedVectors) { - if (dataModel.TryGetValue(property.ModelName, out var vectorValue)) + vectorNode = new JsonObject(); + foreach (var property in this._model.VectorProperties) { - vectorObject[property.StorageName] = vectorValue is null + if (dataModel.TryGetValue(property.ModelName, out var vectorValue)) + { + vectorNode[property.StorageName] = vectorValue is null + ? null + : JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions); + } + } + } + else + { + if (dataModel.TryGetValue(this._model.VectorProperty.ModelName, out var vectorValue)) + { + vectorNode = vectorValue is null ? null - : JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions); + : JsonSerializer.SerializeToNode(vectorValue, this._model.VectorProperty.Type, this._jsonSerializerOptions); } } return new JsonObject { { WeaviateConstants.CollectionPropertyName, JsonValue.Create(this._collectionName) }, - { WeaviateConstants.ReservedKeyPropertyName, keyObject }, - { WeaviateConstants.ReservedDataPropertyName, dataObject }, - { WeaviateConstants.ReservedVectorPropertyName, vectorObject }, + { WeaviateConstants.ReservedKeyPropertyName, keyNode }, + { WeaviateConstants.ReservedDataPropertyName, dataNode }, + { this._vectorPropertyName, vectorNode }, }; } @@ -109,13 +136,25 @@ public JsonObject MapFromDataToStorageModel(Dictionary dataMode // Populate vector properties. if (options.IncludeVectors) { - foreach (var property in this._model.VectorProperties) + if (this._hasNamedVectors) + { + foreach (var property in this._model.VectorProperties) + { + var jsonObject = storageModel[WeaviateConstants.ReservedVectorPropertyName] as JsonObject; + + if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var vectorValue)) + { + result.Add(property.ModelName, vectorValue.Deserialize(property.Type, this._jsonSerializerOptions)); + } + } + } + else { - var jsonObject = storageModel[WeaviateConstants.ReservedVectorPropertyName] as JsonObject; + var jsonNode = storageModel[WeaviateConstants.ReservedSingleVectorPropertyName]; - if (jsonObject is not null && jsonObject.TryGetPropertyValue(property.StorageName, out var vectorValue)) + if (jsonNode is not null) { - result.Add(property.ModelName, vectorValue.Deserialize(property.Type, this._jsonSerializerOptions)); + result.Add(this._model.VectorProperty.ModelName, jsonNode.Deserialize(this._model.VectorProperty.Type, this._jsonSerializerOptions)); } } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs index eaac5cc83f44..680d9ad3d40c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs @@ -6,37 +6,51 @@ namespace Microsoft.SemanticKernel.Connectors.Weaviate; -internal class WeaviateModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) +internal class WeaviateModelBuilder(bool hasNamedVectors) : VectorStoreRecordJsonModelBuilder(GetModelBuildingOptions(hasNamedVectors)) { - private static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + private static VectorStoreRecordModelBuildingOptions GetModelBuildingOptions(bool hasNamedVectors) { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, + return new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = hasNamedVectors, - SupportedKeyPropertyTypes = [typeof(Guid)], - SupportedDataPropertyTypes = s_supportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, - SupportedVectorPropertyTypes = s_supportedVectorTypes, + SupportedKeyPropertyTypes = [typeof(Guid)], + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, - UsesExternalSerializer = true, - ReservedKeyStorageName = WeaviateConstants.ReservedKeyPropertyName - }; + UsesExternalSerializer = true, + ReservedKeyStorageName = WeaviateConstants.ReservedKeyPropertyName + }; + } private static readonly HashSet s_supportedDataTypes = [ typeof(string), typeof(bool), + typeof(bool?), typeof(int), + typeof(int?), typeof(long), + typeof(long?), typeof(short), + typeof(short?), typeof(byte), + typeof(byte?), typeof(float), + typeof(float?), typeof(double), + typeof(double?), typeof(decimal), + typeof(decimal?), typeof(DateTime), + typeof(DateTime?), typeof(DateTimeOffset), + typeof(DateTimeOffset?), typeof(Guid), + typeof(Guid?) ]; internal static readonly HashSet s_supportedVectorTypes = diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 8b3ce214fa18..1e5a29bd541b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -72,7 +72,8 @@ public IVectorStoreRecordCollection GetCollection( { VectorStoreRecordDefinition = vectorStoreRecordDefinition, Endpoint = this._options.Endpoint, - ApiKey = this._options.ApiKey + ApiKey = this._options.ApiKey, + HasNamedVectors = this._options.HasNamedVectors }) as IVectorStoreRecordCollection; return recordCollection; @@ -87,6 +88,9 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel try { var httpResponse = await this._httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false); + + httpResponse.EnsureSuccessStatusCode(); + var httpResponseContent = await httpResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); collectionsResponse = JsonSerializer.Deserialize(httpResponseContent)!; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs index 2448ceea8f18..852339436432 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs @@ -19,9 +19,10 @@ internal static class WeaviateVectorStoreCollectionCreateMapping /// Maps record type properties to Weaviate collection schema for collection creation. /// /// The name of the vector store collection. + /// Gets a value indicating whether the vectors in the store are named and multiple vectors are supported, or whether there is just a single unnamed vector in Weaviate collection. /// The model. /// Weaviate collection schema. - public static WeaviateCollectionSchema MapToSchema(string collectionName, VectorStoreRecordModel model) + public static WeaviateCollectionSchema MapToSchema(string collectionName, bool hasNamedVectors, VectorStoreRecordModel model) { var schema = new WeaviateCollectionSchema(collectionName); @@ -38,16 +39,28 @@ public static WeaviateCollectionSchema MapToSchema(string collectionName, Vector } // Handle vector properties. - foreach (var property in model.VectorProperties) + if (hasNamedVectors) { - schema.VectorConfigurations.Add(property.StorageName, new WeaviateCollectionSchemaVectorConfig + foreach (var property in model.VectorProperties) { - VectorIndexType = MapIndexKind(property.IndexKind, property.StorageName), - VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig + schema.VectorConfigurations.Add(property.StorageName, new WeaviateCollectionSchemaVectorConfig { - Distance = MapDistanceFunction(property.DistanceFunction, property.StorageName) - } - }); + VectorIndexType = MapIndexKind(property.IndexKind, property.StorageName), + VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig + { + Distance = MapDistanceFunction(property.DistanceFunction, property.StorageName) + } + }); + } + } + else + { + var vectorProperty = model.VectorProperty; + schema.VectorIndexType = MapIndexKind(vectorProperty.IndexKind, vectorProperty.StorageName); + schema.VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig + { + Distance = MapDistanceFunction(vectorProperty.DistanceFunction, vectorProperty.StorageName) + }; } return schema; @@ -110,7 +123,7 @@ private static string MapDistanceFunction(string? distanceFunction, string vecto DistanceFunction.EuclideanSquaredDistance => EuclideanSquared, DistanceFunction.Hamming => Hamming, DistanceFunction.ManhattanDistance => Manhattan, - _ => throw new InvalidOperationException( + _ => throw new NotSupportedException( $"Distance function '{distanceFunction}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Weaviate VectorStore. " + $"Supported distance functions: {string.Join(", ", DistanceFunction.CosineDistance, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs index 3842a3aded97..02ec36be81a1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionSearchMapping.cs @@ -13,7 +13,10 @@ internal static class WeaviateVectorStoreCollectionSearchMapping /// /// Maps vector search result to the format, which is processable by . /// - public static (JsonObject StorageModel, double? Score) MapSearchResult(JsonNode result, string scorePropertyName) + public static (JsonObject StorageModel, double? Score) MapSearchResult( + JsonNode result, + string scorePropertyName, + bool hasNamedVectors) { var additionalProperties = result[WeaviateConstants.AdditionalPropertiesPropertyName]; @@ -25,14 +28,18 @@ public static (JsonObject StorageModel, double? Score) MapSearchResult(JsonNode _ => null }; + var vectorPropertyName = hasNamedVectors ? + WeaviateConstants.ReservedVectorPropertyName : + WeaviateConstants.ReservedSingleVectorPropertyName; + var id = additionalProperties?[WeaviateConstants.ReservedKeyPropertyName]; - var vectors = additionalProperties?[WeaviateConstants.ReservedVectorPropertyName]; + var vectors = additionalProperties?[vectorPropertyName]; var storageModel = new JsonObject { { WeaviateConstants.ReservedKeyPropertyName, id?.DeepClone() }, { WeaviateConstants.ReservedDataPropertyName, result?.DeepClone() }, - { WeaviateConstants.ReservedVectorPropertyName, vectors?.DeepClone() }, + { vectorPropertyName, vectors?.DeepClone() }, }; return (storageModel, score); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs index fb9cdc208e57..9d955f8d48a6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs @@ -27,4 +27,11 @@ public sealed class WeaviateVectorStoreOptions /// This parameter is optional because authentication may be disabled in local clusters for testing purposes. /// public string? ApiKey { get; set; } = null; + + /// + /// Gets or sets a value indicating whether the vectors in the store are named and multiple vectors are supported, or whether there is just a single unnamed vector in Weaviate collection. + /// Defaults to multiple named vectors. + /// . + /// + public bool HasNamedVectors { get; set; } = true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 7275b1b5e08b..997189a8731d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -103,12 +103,13 @@ public WeaviateVectorStoreRecordCollection( this.Name = name; this._options = options ?? new(); this._apiKey = this._options.ApiKey; - this._model = new WeaviateModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); + this._model = new WeaviateModelBuilder(this._options.HasNamedVectors) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); // Assign mapper. this._mapper = typeof(TRecord) == typeof(Dictionary) - ? (new WeaviateDynamicDataModelMapper(this.Name, this._model, s_jsonSerializerOptions) as IWeaviateMapper)! - : new WeaviateVectorStoreRecordMapper(this.Name, this._model, s_jsonSerializerOptions); + ? (new WeaviateDynamicDataModelMapper(this.Name, this._options.HasNamedVectors, this._model, s_jsonSerializerOptions) as IWeaviateMapper)! + : new WeaviateVectorStoreRecordMapper(this.Name, this._options.HasNamedVectors, this._model, s_jsonSerializerOptions); this._collectionMetadata = new() { @@ -139,13 +140,16 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { const string OperationName = "CreateCollectionSchema"; + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + this.Name, + this._options.HasNamedVectors, + this._model); + return this.RunOperationAsync(OperationName, () => { - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(this.Name, this._model); - var request = new WeaviateCreateCollectionSchemaRequest(schema).Build(); - return this.ExecuteRequestAsync(request, cancellationToken); + return this.ExecuteRequestAsync(request, cancellationToken: cancellationToken); }); } @@ -167,7 +171,7 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { var request = new WeaviateDeleteCollectionSchemaRequest(this.Name).Build(); - return this.ExecuteRequestAsync(request, cancellationToken); + return this.ExecuteRequestAsync(request, cancellationToken: cancellationToken); }); } @@ -187,7 +191,7 @@ public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) var request = new WeaviateDeleteObjectRequest(this.Name, guid).Build(); - return this.ExecuteRequestAsync(request, cancellationToken); + return this.ExecuteRequestAsync(request, cancellationToken: cancellationToken); }); } @@ -221,7 +225,7 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo var request = new WeaviateDeleteObjectBatchRequest(match).Build(); - return this.ExecuteRequestAsync(request, cancellationToken); + return this.ExecuteRequestAsync(request, cancellationToken: cancellationToken); }); } @@ -351,7 +355,8 @@ public IAsyncEnumerable> VectorizedSearchAsync GetAsync(Expression> filter top, options, this.Name, - this._model); + this._model, + this._options.HasNamedVectors); return this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.ScorePropertyName, "GetAsync", cancellationToken) .SelectAsync(result => result.Record, cancellationToken: cancellationToken); @@ -397,7 +403,8 @@ public IAsyncEnumerable> HybridSearchAsync( vectorProperty, textDataProperty, s_jsonSerializerOptions, - searchOptions); + searchOptions, + this._options.HasNamedVectors); return this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken); } @@ -440,7 +447,7 @@ private async IAsyncEnumerable> ExecuteQueryAsync(st { if (result is not null) { - var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result, scorePropertyName); + var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(result, scorePropertyName, this._options.HasNamedVectors); var record = VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, @@ -454,7 +461,10 @@ private async IAsyncEnumerable> ExecuteQueryAsync(st } } - private Task ExecuteRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + private async Task ExecuteRequestAsync( + HttpRequestMessage request, + bool ensureSuccessStatusCode = true, + CancellationToken cancellationToken = default) { request.RequestUri = new Uri(this._endpoint, request.RequestUri!); @@ -463,12 +473,21 @@ private Task ExecuteRequestAsync(HttpRequestMessage request request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", this._apiKey); } - return this._httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken); + var response = await this._httpClient + .SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken) + .ConfigureAwait(false); + + if (ensureSuccessStatusCode) + { + response.EnsureSuccessStatusCode(); + } + + return response; } private async Task<(TResponse?, string)> ExecuteRequestWithResponseContentAsync(HttpRequestMessage request, CancellationToken cancellationToken) { - var response = await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); + var response = await this.ExecuteRequestAsync(request, cancellationToken: cancellationToken).ConfigureAwait(false); var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); @@ -486,12 +505,15 @@ private Task ExecuteRequestAsync(HttpRequestMessage request private async Task ExecuteRequestWithNotFoundHandlingAsync(HttpRequestMessage request, CancellationToken cancellationToken) { - var response = await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); + var response = await this.ExecuteRequestAsync(request, ensureSuccessStatusCode: false, cancellationToken: cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) { return default; } + response.EnsureSuccessStatusCode(); + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); var responseModel = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs index f758d77499a2..60becffbaad1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -39,4 +39,11 @@ public sealed class WeaviateVectorStoreRecordCollectionOptions /// This parameter is optional because authentication may be disabled in local clusters for testing purposes. /// public string? ApiKey { get; set; } = null; + + /// + /// Gets or sets a value indicating whether the vectors in the store are named and multiple vectors are supported, or whether there is just a single unnamed vector in Weaviate collection. + /// Defaults to multiple named vectors. + /// . + /// + public bool HasNamedVectors { get; set; } = true; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs index 5ed91595af5f..c0dfd17fe124 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionQueryBuilder.cs @@ -26,11 +26,10 @@ public static string BuildSearchQuery( JsonSerializerOptions jsonSerializerOptions, int top, VectorSearchOptions searchOptions, - VectorStoreRecordModel model) + VectorStoreRecordModel model, + bool hasNamedVectors) { - var vectorsQuery = searchOptions.IncludeVectors ? - $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : - string.Empty; + var vectorsQuery = GetVectorsPropertyQuery(searchOptions.IncludeVectors, hasNamedVectors, model); #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch @@ -52,7 +51,7 @@ public static string BuildSearchQuery( offset: {{searchOptions.Skip}} {{(filter is null ? "" : "where: " + filter)}} nearVector: { - targetVectors: ["{{vectorPropertyName}}"] + {{GetTargetVectorsQuery(hasNamedVectors, vectorPropertyName)}} vector: {{vectorArray}} } ) { @@ -77,11 +76,10 @@ public static string BuildQuery( int top, GetFilteredRecordOptions queryOptions, string collectionName, - VectorStoreRecordModel model) + VectorStoreRecordModel model, + bool hasNamedVectors) { - var vectorsQuery = queryOptions.IncludeVectors ? - $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : - string.Empty; + var vectorsQuery = GetVectorsPropertyQuery(queryOptions.IncludeVectors, hasNamedVectors, model); var sortPaths = string.Join(",", queryOptions.OrderBy.Values.Select(sortInfo => { @@ -126,11 +124,10 @@ public static string BuildHybridSearchQuery( VectorStoreRecordVectorPropertyModel vectorProperty, VectorStoreRecordDataPropertyModel textProperty, JsonSerializerOptions jsonSerializerOptions, - HybridSearchOptions searchOptions) + HybridSearchOptions searchOptions, + bool hasNamedVectors) { - var vectorsQuery = searchOptions.IncludeVectors ? - $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" : - string.Empty; + var vectorsQuery = GetVectorsPropertyQuery(searchOptions.IncludeVectors, hasNamedVectors, model); #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = searchOptions switch @@ -154,7 +151,7 @@ public static string BuildHybridSearchQuery( hybrid: { query: "{{keywords}}" properties: ["{{textProperty.StorageName}}"] - targetVectors: ["{{vectorProperty.StorageName}}"] + {{GetTargetVectorsQuery(hasNamedVectors, vectorProperty.StorageName)}} vector: {{vectorArray}} fusionType: rankedFusion } @@ -173,6 +170,23 @@ public static string BuildHybridSearchQuery( #region private + private static string GetTargetVectorsQuery(bool hasNamedVectors, string vectorPropertyName) + { + return hasNamedVectors ? $"targetVectors: [\"{vectorPropertyName}\"]" : string.Empty; + } + + private static string GetVectorsPropertyQuery( + bool includeVectors, + bool hasNamedVectors, + VectorStoreRecordModel model) + { + return includeVectors + ? hasNamedVectors + ? $"vectors {{ {string.Join(" ", model.VectorProperties.Select(p => p.StorageName))} }}" + : WeaviateConstants.ReservedSingleVectorPropertyName + : string.Empty; + } + #pragma warning disable CS0618 // Type or member is obsolete /// /// Builds filter for Weaviate search query. diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs index ce5a38ebc0e2..1ecf80ad651d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -13,17 +13,26 @@ internal sealed class WeaviateVectorStoreRecordMapper : IWeaviateMapper #pragma warning restore CS0618 { private readonly string _collectionName; + private readonly bool _hasNamedVectors; private readonly VectorStoreRecordModel _model; private readonly JsonSerializerOptions _jsonSerializerOptions; + private readonly string _vectorPropertyName; + public WeaviateVectorStoreRecordMapper( string collectionName, + bool hasNamedVectors, VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) { this._collectionName = collectionName; + this._hasNamedVectors = hasNamedVectors; this._model = model; this._jsonSerializerOptions = jsonSerializerOptions; + + this._vectorPropertyName = hasNamedVectors ? + WeaviateConstants.ReservedVectorPropertyName : + WeaviateConstants.ReservedSingleVectorPropertyName; } public JsonObject MapFromDataToStorageModel(TRecord dataModel) @@ -41,7 +50,7 @@ public JsonObject MapFromDataToStorageModel(TRecord dataModel) // account e.g. naming policies. TemporaryStorageName gets populated in the model builder - containing that name - once VectorStoreModelBuildingOptions.ReservedKeyPropertyName is set { WeaviateConstants.ReservedKeyPropertyName, jsonNodeDataModel[this._model.KeyProperty.TemporaryStorageName!]!.DeepClone() }, { WeaviateConstants.ReservedDataPropertyName, new JsonObject() }, - { WeaviateConstants.ReservedVectorPropertyName, new JsonObject() }, + { this._vectorPropertyName, new JsonObject() }, }; // Populate data properties. @@ -56,13 +65,26 @@ public JsonObject MapFromDataToStorageModel(TRecord dataModel) } // Populate vector properties. - foreach (var property in this._model.VectorProperties) + if (this._hasNamedVectors) { - var node = jsonNodeDataModel[property.StorageName]; + foreach (var property in this._model.VectorProperties) + { + var node = jsonNodeDataModel[property.StorageName]; + + if (node is not null) + { + weaviateObjectModel[this._vectorPropertyName]![property.StorageName] = node.DeepClone(); + } + } + } + else + { + var vectorProperty = this._model.VectorProperty; + var node = jsonNodeDataModel[vectorProperty.StorageName]; if (node is not null) { - weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property.StorageName] = node.DeepClone(); + weaviateObjectModel[this._vectorPropertyName] = node.DeepClone(); } } @@ -97,13 +119,26 @@ public TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataM // Populate vector properties. if (options.IncludeVectors) { - foreach (var property in this._model.VectorProperties) + if (this._hasNamedVectors) + { + foreach (var property in this._model.VectorProperties) + { + var node = storageModel[this._vectorPropertyName]?[property.StorageName]; + + if (node is not null) + { + jsonNodeDataModel[property.StorageName] = node.DeepClone(); + } + } + } + else { - var node = storageModel[WeaviateConstants.ReservedVectorPropertyName]?[property.StorageName]; + var vectorProperty = this._model.VectorProperty; + var node = storageModel[this._vectorPropertyName]; if (node is not null) { - jsonNodeDataModel[property.StorageName] = node.DeepClone(); + jsonNodeDataModel[vectorProperty.StorageName] = node.DeepClone(); } } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs index e36990bc3630..e15b354cc4ee 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs @@ -18,6 +18,8 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; /// public sealed class WeaviateDynamicDataModelMapperTests { + private const bool HasNamedVectors = true; + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, @@ -29,7 +31,7 @@ public sealed class WeaviateDynamicDataModelMapperTests } }; - private static readonly VectorStoreRecordModel s_model = new WeaviateModelBuilder() + private static readonly VectorStoreRecordModel s_model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -80,7 +82,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); var dataModel = new Dictionary { @@ -182,7 +184,7 @@ public void MapFromDataToStorageModelMapsNullValues() ["NullableFloatVector"] = null }; - var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(dataModel); @@ -198,7 +200,7 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() { // Arrange var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -306,7 +308,7 @@ public void MapFromStorageToDataModelMapsNullValues() } }; - var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); // Act var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); @@ -322,7 +324,7 @@ public void MapFromStorageToDataModelMapsNullValues() public void MapFromStorageToDataModelThrowsForMissingKey() { // Arrange - var sut = new WeaviateDynamicDataModelMapper("Collection", s_model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); var storageModel = new JsonObject(); @@ -346,12 +348,12 @@ public void MapFromDataToStorageModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder().Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); var record = new Dictionary { ["Key"] = key }; - var sut = new WeaviateDynamicDataModelMapper("Collection", model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, model, s_jsonSerializerOptions); // Act var storageModel = sut.MapFromDataToStorageModel(record); @@ -377,11 +379,11 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder().Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); - var sut = new WeaviateDynamicDataModelMapper("Collection", model, s_jsonSerializerOptions); + var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, model, s_jsonSerializerOptions); var storageModel = new JsonObject { @@ -396,4 +398,81 @@ public void MapFromStorageToDataModelSkipsMissingProperties() Assert.False(dataModel.ContainsKey("StringDataProp")); Assert.False(dataModel.ContainsKey("FloatVector")); } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromDataToStorageModelMapsNamedVectorsCorrectly(bool hasNamedVectors) + { + // Arrange + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 4) + ] + }; + + var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + + var key = new Guid("55555555-5555-5555-5555-555555555555"); + + var record = new Dictionary { ["Key"] = key, ["FloatVector"] = new ReadOnlyMemory(s_floatVector) }; + var sut = new WeaviateDynamicDataModelMapper("Collection", hasNamedVectors, model, s_jsonSerializerOptions); + + // Act + var storageModel = sut.MapFromDataToStorageModel(record); + + // Assert + var vectorProperty = hasNamedVectors ? storageModel["vectors"]!["floatVector"] : storageModel["vector"]; + + Assert.Equal(key, (Guid?)storageModel["id"]); + Assert.Equal(s_floatVector, vectorProperty!.AsArray().GetValues().ToArray()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromStorageToDataModelMapsNamedVectorsCorrectly(bool hasNamedVectors) + { + // Arrange + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 4) + ] + }; + + var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + + var key = new Guid("55555555-5555-5555-5555-555555555555"); + + var sut = new WeaviateDynamicDataModelMapper("Collection", hasNamedVectors, model, s_jsonSerializerOptions); + + var storageModel = new JsonObject { ["id"] = key }; + + var vector = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()); + + if (hasNamedVectors) + { + storageModel["vectors"] = new JsonObject + { + ["floatVector"] = vector + }; + } + else + { + storageModel["vector"] = vector; + } + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal(key, dataModel["Key"]); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); + } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index 23570ac0bf5b..20f0d560c8ef 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -14,11 +14,13 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; /// public sealed class WeaviateVectorStoreCollectionCreateMappingTests { + private const bool HasNamedVectors = true; + [Fact] public void ItThrowsExceptionWithInvalidIndexKind() { // Arrange - var model = new WeaviateModelBuilder() + var model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -31,7 +33,7 @@ public void ItThrowsExceptionWithInvalidIndexKind() }); // Act & Assert - Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model)); + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model)); } [Theory] @@ -41,7 +43,7 @@ public void ItThrowsExceptionWithInvalidIndexKind() public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string expectedIndexKind) { // Arrange - var model = new WeaviateModelBuilder() + var model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -54,7 +56,7 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model); var actualIndexKind = schema.VectorConfigurations["Vector"].VectorIndexType; // Assert @@ -62,10 +64,10 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex } [Fact] - public void ItThrowsExceptionWithInvalidDistanceFunction() + public void ItThrowsExceptionWithUnsupportedDistanceFunction() { // Arrange - var model = new WeaviateModelBuilder() + var model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -73,12 +75,12 @@ public void ItThrowsExceptionWithInvalidDistanceFunction() Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(Guid)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = "non-existent-distance-function" } + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = "unsupported-distance-function" } ] }); // Act & Assert - Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model)); + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model)); } [Theory] @@ -90,7 +92,7 @@ public void ItThrowsExceptionWithInvalidDistanceFunction() public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunction, string expectedDistanceFunction) { // Arrange - var model = new WeaviateModelBuilder() + var model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -103,7 +105,7 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model); var actualDistanceFunction = schema.VectorConfigurations["Vector"].VectorIndexConfig?.Distance; @@ -158,12 +160,10 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct [InlineData(typeof(bool?), "boolean")] [InlineData(typeof(List), "boolean[]")] [InlineData(typeof(List), "boolean[]")] - [InlineData(typeof(object), "object")] - [InlineData(typeof(List), "object[]")] public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyType) { // Arrange - var model = new WeaviateModelBuilder() + var model = new WeaviateModelBuilder(HasNamedVectors) .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -178,7 +178,7 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); // Act - var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", model); + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model); var property = schema.Properties[0]; @@ -188,4 +188,48 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy Assert.True(property.IndexSearchable); Assert.True(property.IndexFilterable); } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ItReturnsCorrectSchemaWithValidVectorConfiguration(bool hasNamedVectors) + { + // Arrange + var model = new WeaviateModelBuilder(hasNamedVectors) + .Build( + typeof(Dictionary), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 4) + { + DistanceFunction = DistanceFunction.CosineDistance, + IndexKind = IndexKind.Hnsw + } + ] + }); + + // Act + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", hasNamedVectors, model); + + // Assert + if (hasNamedVectors) + { + Assert.Null(schema.VectorIndexConfig?.Distance); + Assert.Null(schema.VectorIndexType); + Assert.True(schema.VectorConfigurations.ContainsKey("Vector")); + + Assert.Equal("cosine", schema.VectorConfigurations["Vector"].VectorIndexConfig?.Distance); + Assert.Equal("hnsw", schema.VectorConfigurations["Vector"].VectorIndexType); + } + else + { + Assert.False(schema.VectorConfigurations.ContainsKey("Vector")); + + Assert.Equal("cosine", schema.VectorIndexConfig?.Distance); + Assert.Equal("hnsw", schema.VectorIndexType); + } + } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs index 35a00c0376fc..3016b0083e2c 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionSearchMappingTests.cs @@ -13,8 +13,10 @@ namespace SemanticKernel.Connectors.Weaviate.UnitTests; /// public sealed class WeaviateVectorStoreCollectionSearchMappingTests { - [Fact] - public void MapSearchResultByDefaultReturnsValidResult() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapSearchResultByDefaultReturnsValidResult(bool hasNamedVectors) { // Arrange var jsonObject = new JsonObject @@ -22,11 +24,7 @@ public void MapSearchResultByDefaultReturnsValidResult() ["_additional"] = new JsonObject { ["distance"] = 0.5, - ["id"] = "55555555-5555-5555-5555-555555555555", - ["vectors"] = new JsonObject - { - ["descriptionEmbedding"] = new JsonArray(new List { 30, 31, 32, 33 }.Select(l => (JsonNode)l).ToArray()) - } + ["id"] = "55555555-5555-5555-5555-555555555555" }, ["description"] = "This is a great hotel.", ["hotelCode"] = 42, @@ -37,14 +35,27 @@ public void MapSearchResultByDefaultReturnsValidResult() ["timestamp"] = "2024-08-28T10:11:12-07:00" }; + var vector = new JsonArray(new List { 30, 31, 32, 33 }.Select(l => (JsonNode)l).ToArray()); + + if (hasNamedVectors) + { + jsonObject["_additional"]!["vectors"] = new JsonObject + { + ["descriptionEmbedding"] = vector + }; + } + else + { + jsonObject["_additional"]!["vector"] = vector; + } + // Act - var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(jsonObject, "distance"); + var (storageModel, score) = WeaviateVectorStoreCollectionSearchMapping.MapSearchResult(jsonObject, "distance", hasNamedVectors); // Assert Assert.Equal(0.5, score); Assert.Equal("55555555-5555-5555-5555-555555555555", storageModel["id"]!.GetValue()); - Assert.Equal([30f, 31f, 32f, 33f], storageModel["vectors"]!["descriptionEmbedding"]!.AsArray().Select(l => l!.GetValue())); Assert.Equal("This is a great hotel.", storageModel["properties"]!["description"]!.GetValue()); Assert.Equal(42, storageModel["properties"]!["hotelCode"]!.GetValue()); Assert.Equal(4.5, storageModel["properties"]!["hotelRating"]!.GetValue()); @@ -52,5 +63,9 @@ public void MapSearchResultByDefaultReturnsValidResult() Assert.True(storageModel["properties"]!["parking_is_included"]!.GetValue()); Assert.Equal(["t1", "t2"], storageModel["properties"]!["tags"]!.AsArray().Select(l => l!.GetValue())); Assert.Equal("2024-08-28T10:11:12-07:00", storageModel["properties"]!["timestamp"]!.GetValue()); + + var vectorProperty = hasNamedVectors ? storageModel["vectors"]!["descriptionEmbedding"] : storageModel["vector"]; + + Assert.Equal([30f, 31f, 32f, 33f], vectorProperty!.AsArray().Select(l => l!.GetValue())); } } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index b76aacfb281e..8bbdf51404e3 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -32,7 +32,7 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests } }; - private readonly VectorStoreRecordModel _model = new WeaviateModelBuilder() + private readonly VectorStoreRecordModel _model = new WeaviateModelBuilder(hasNamedVectors: true) .Build( typeof(Dictionary), new() @@ -49,8 +49,10 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests private readonly ReadOnlyMemory _vector = new([31f, 32f, 33f, 34f]); - [Fact] - public void BuildSearchQueryByDefaultReturnsValidQuery() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void BuildSearchQueryByDefaultReturnsValidQuery(bool hasNamedVectors) { // Arrange var expectedQuery = $$""" @@ -61,7 +63,7 @@ public void BuildSearchQueryByDefaultReturnsValidQuery() offset: 2 {{string.Empty}} nearVector: { - targetVectors: ["descriptionEmbedding"] + {{(hasNamedVectors ? "targetVectors: [\"descriptionEmbedding\"]" : string.Empty)}} vector: [31,32,33,34] } ) { @@ -89,7 +91,8 @@ HotelName HotelCode Tags s_jsonSerializerOptions, top: 3, searchOptions, - this._model); + this._model, + hasNamedVectors); // Assert Assert.Equal(expectedQuery, query); @@ -98,8 +101,10 @@ HotelName HotelCode Tags Assert.DoesNotContain("where", query); } - [Fact] - public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery(bool hasNamedVectors) { // Arrange var searchOptions = new VectorSearchOptions @@ -116,10 +121,13 @@ public void BuildSearchQueryWithIncludedVectorsReturnsValidQuery() s_jsonSerializerOptions, top: 3, searchOptions, - this._model); + this._model, + hasNamedVectors); // Assert - Assert.Contains("vectors { DescriptionEmbedding }", query); + var vectorQuery = hasNamedVectors ? "vectors { DescriptionEmbedding }" : "vector"; + + Assert.Contains(vectorQuery, query); } [Fact] @@ -145,7 +153,8 @@ public void BuildSearchQueryWithFilterReturnsValidQuery() s_jsonSerializerOptions, top: 3, searchOptions, - this._model); + this._model, + hasNamedVectors: true); // Assert Assert.Contains(ExpectedFirstSubquery, query); @@ -170,7 +179,8 @@ public void BuildSearchQueryWithInvalidFilterValueThrowsException() s_jsonSerializerOptions, top: 3, searchOptions, - this._model)); + this._model, + hasNamedVectors: true)); } [Fact] @@ -191,7 +201,8 @@ public void BuildSearchQueryWithNonExistentPropertyInFilterThrowsException() s_jsonSerializerOptions, top: 3, searchOptions, - this._model)); + this._model, + hasNamedVectors: true)); } #region private diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs index 78763fc0a59a..8524c03ae574 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -28,27 +28,10 @@ public sealed class WeaviateVectorStoreRecordMapperTests } }; - private readonly WeaviateVectorStoreRecordMapper _sut = - new( - "CollectionName", - new WeaviateModelBuilder() - .Build( - typeof(Dictionary), - new VectorStoreRecordDefinition - { - Properties = - [ - new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), - new VectorStoreRecordDataProperty("HotelName", typeof(string)), - new VectorStoreRecordDataProperty("Tags", typeof(List)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) - ] - }, - s_jsonSerializerOptions), - s_jsonSerializerOptions); - - [Fact] - public void MapFromDataToStorageModelReturnsValidObject() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromDataToStorageModelReturnsValidObject(bool hasNamedVectors) { // Arrange var hotel = new WeaviateHotel @@ -59,8 +42,10 @@ public void MapFromDataToStorageModelReturnsValidObject() DescriptionEmbedding = new ReadOnlyMemory([1f, 2f, 3f]) }; + var sut = GetMapper(hasNamedVectors); + // Act - var document = this._sut.MapFromDataToStorageModel(hotel); + var document = sut.MapFromDataToStorageModel(hotel); // Assert Assert.NotNull(document); @@ -68,11 +53,16 @@ public void MapFromDataToStorageModelReturnsValidObject() Assert.Equal("55555555-5555-5555-5555-555555555555", document["id"]!.GetValue()); Assert.Equal("Test Name", document["properties"]!["hotelName"]!.GetValue()); Assert.Equal(["tag1", "tag2"], document["properties"]!["tags"]!.AsArray().Select(l => l!.GetValue())); - Assert.Equal([1f, 2f, 3f], document["vectors"]!["descriptionEmbedding"]!.AsArray().Select(l => l!.GetValue())); + + var vectorNode = hasNamedVectors ? document["vectors"]!["descriptionEmbedding"] : document["vector"]; + + Assert.Equal([1f, 2f, 3f], vectorNode!.AsArray().Select(l => l!.GetValue())); } - [Fact] - public void MapFromStorageToDataModelReturnsValidObject() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromStorageToDataModelReturnsValidObject(bool hasNamedVectors) { // Arrange var document = new JsonObject @@ -84,10 +74,22 @@ public void MapFromStorageToDataModelReturnsValidObject() document["properties"]!["hotelName"] = "Test Name"; document["properties"]!["tags"] = new JsonArray(new List { "tag1", "tag2" }.Select(l => JsonValue.Create(l)).ToArray()); - document["vectors"]!["descriptionEmbedding"] = new JsonArray(new List { 1f, 2f, 3f }.Select(l => JsonValue.Create(l)).ToArray()); + + var vectorNode = new JsonArray(new List { 1f, 2f, 3f }.Select(l => JsonValue.Create(l)).ToArray()); + + if (hasNamedVectors) + { + document["vectors"]!["descriptionEmbedding"] = vectorNode; + } + else + { + document["vector"] = vectorNode; + } + + var sut = GetMapper(hasNamedVectors); // Act - var hotel = this._sut.MapFromStorageToDataModel(document, new() { IncludeVectors = true }); + var hotel = sut.MapFromStorageToDataModel(document, new() { IncludeVectors = true }); // Assert Assert.NotNull(hotel); @@ -97,4 +99,27 @@ public void MapFromStorageToDataModelReturnsValidObject() Assert.Equal(["tag1", "tag2"], hotel.Tags); Assert.True(new ReadOnlyMemory([1f, 2f, 3f]).Span.SequenceEqual(hotel.DescriptionEmbedding!.Value.Span)); } + + #region private + + private static WeaviateVectorStoreRecordMapper GetMapper(bool hasNamedVectors) => new( + "CollectionName", + hasNamedVectors, + new WeaviateModelBuilder(hasNamedVectors) + .Build( + typeof(Dictionary), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) + ] + }, + s_jsonSerializerOptions), + s_jsonSerializerOptions); + + #endregion } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs index 9d4b065d6c4c..4222608ab7c0 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateBatchConformanceTests.cs @@ -6,7 +6,12 @@ namespace WeaviateIntegrationTests.CRUD; -public class WeaviateBatchConformanceTests(WeaviateSimpleModelFixture fixture) - : BatchConformanceTests(fixture), IClassFixture +public class WeaviateBatchConformanceTests_NamedVectors(WeaviateSimpleModelNamedVectorsFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} + +public class WeaviateBatchConformanceTests_UnnamedVector(WeaviateSimpleModelUnnamedVectorFixture fixture) + : BatchConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs index 7e024a7a50bb..62825fac4ab1 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateDynamicRecordConformanceTests.cs @@ -6,7 +6,12 @@ namespace WeaviateIntegrationTests.CRUD; -public class WeaviateDynamicRecordConformanceTests(WeaviateDynamicDataModelFixture fixture) - : DynamicDataModelConformanceTests(fixture), IClassFixture +public class WeaviateDynamicRecordConformanceTests_NamedVectors(WeaviateDynamicDataModelNamedVectorsFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} + +public class WeaviateDynamicRecordConformanceTests_UnnamedVector(WeaviateDynamicDataModelUnnamedVectorFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs index 016ab64870e6..20d5888ad851 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs @@ -12,7 +12,7 @@ public class WeaviateNoVectorConformanceTests(WeaviateNoVectorConformanceTests.F { public new class Fixture : NoVectorConformanceTests.Fixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; /// /// Weaviate collections must start with an uppercase letter. diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs index 3beb7b6e70e5..c2ad732eb59d 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateRecordConformanceTests.cs @@ -6,7 +6,12 @@ namespace WeaviateIntegrationTests.CRUD; -public class WeaviateRecordConformanceTests(WeaviateSimpleModelFixture fixture) - : RecordConformanceTests(fixture), IClassFixture +public class WeaviateRecordConformanceTests_NamedVectors(WeaviateSimpleModelNamedVectorsFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} + +public class WeaviateRecordConformanceTests_UnnamedVector(WeaviateSimpleModelUnnamedVectorFixture fixture) + : RecordConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Collections/WeaviateCollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Collections/WeaviateCollectionConformanceTests.cs index e839b02ad942..3c817890ac16 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Collections/WeaviateCollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Collections/WeaviateCollectionConformanceTests.cs @@ -6,7 +6,12 @@ namespace WeaviateIntegrationTests.Collections; -public class WeaviateCollectionConformanceTests(WeaviateFixture fixture) - : CollectionConformanceTests(fixture), IClassFixture +public class WeaviateCollectionConformanceTests_NamedVectors(WeaviateSimpleModelNamedVectorsFixture fixture) + : CollectionConformanceTests(fixture), IClassFixture +{ +} + +public class WeaviateCollectionConformanceTests_UnnamedVector(WeaviateSimpleModelUnnamedVectorFixture fixture) + : CollectionConformanceTests(fixture), IClassFixture { } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs index 32082232591d..f8f76dd27943 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs @@ -65,6 +65,8 @@ public override Task Equal_with_string_is_not_Contains() public new class Fixture : BasicFilterTests.Fixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs index 312ac21b8372..c03a8fd8076b 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs @@ -65,7 +65,7 @@ public override Task Equal_with_string_is_not_Contains() public new class Fixture : BasicQueryTests.QueryFixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs index 30d6bc0516f5..1b386273b6d6 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs @@ -7,16 +7,16 @@ namespace WeaviateIntegrationTests.HybridSearch; -public class WeaviateKeywordVectorizedHybridSearchTests( - WeaviateKeywordVectorizedHybridSearchTests.VectorAndStringFixture vectorAndStringFixture, - WeaviateKeywordVectorizedHybridSearchTests.MultiTextFixture multiTextFixture) +public class WeaviateKeywordVectorizedHybridSearchTests_NamedVectors( + WeaviateKeywordVectorizedHybridSearchTests_NamedVectors.VectorAndStringFixture vectorAndStringFixture, + WeaviateKeywordVectorizedHybridSearchTests_NamedVectors.MultiTextFixture multiTextFixture) : KeywordVectorizedHybridSearchComplianceTests(vectorAndStringFixture, multiTextFixture), - IClassFixture, - IClassFixture + IClassFixture, + IClassFixture { public new class VectorAndStringFixture : KeywordVectorizedHybridSearchComplianceTests.VectorAndStringFixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; @@ -25,7 +25,33 @@ public class WeaviateKeywordVectorizedHybridSearchTests( public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + + protected override string CollectionName => "MultiTextHybridSearch"; + } +} + +public class WeaviateKeywordVectorizedHybridSearchTests_UnnamedVector( + WeaviateKeywordVectorizedHybridSearchTests_UnnamedVector.VectorAndStringFixture vectorAndStringFixture, + WeaviateKeywordVectorizedHybridSearchTests_UnnamedVector.MultiTextFixture multiTextFixture) + : KeywordVectorizedHybridSearchComplianceTests(vectorAndStringFixture, multiTextFixture), + IClassFixture, + IClassFixture +{ + public new class VectorAndStringFixture : KeywordVectorizedHybridSearchComplianceTests.VectorAndStringFixture + { + public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + + protected override string CollectionName => "VectorAndStringHybridSearch"; + } + + public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture + { + public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs index 1745a902a348..831f05734d6b 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/TestContainer/WeaviateBuilder.cs @@ -8,7 +8,7 @@ namespace WeaviateIntegrationTests.Support.TestContainer; public sealed class WeaviateBuilder : ContainerBuilder { - public const string WeaviateImage = "semitechnologies/weaviate:1.26.4"; + public const string WeaviateImage = "semitechnologies/weaviate:1.28.12"; public const ushort WeaviateHttpPort = 8080; public const ushort WeaviateGrpcPort = 50051; diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs index cd69271c5a12..038de8fa5fd2 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs @@ -6,9 +6,21 @@ namespace WeaviateIntegrationTests.Support; public class WeaviateDynamicDataModelFixture : DynamicDataModelFixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names - protected override string CollectionName => $"A{Guid.NewGuid():N}"; + protected override string CollectionName => this.GetUniqueCollectionName(); + + public override string GetUniqueCollectionName() => $"A{Guid.NewGuid():N}"; +} + +public class WeaviateDynamicDataModelNamedVectorsFixture : WeaviateDynamicDataModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; +} + +public class WeaviateDynamicDataModelUnnamedVectorFixture : WeaviateDynamicDataModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateFixture.cs deleted file mode 100644 index ac3b64f89006..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateFixture.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using VectorDataSpecificationTests.Support; - -namespace WeaviateIntegrationTests.Support; - -public class WeaviateFixture : VectorStoreFixture -{ - public override TestStore TestStore => WeaviateTestStore.Instance; - - // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. - // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names - public override string GetUniqueCollectionName() => $"A{Guid.NewGuid():N}"; -} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs index 0fe7c713e46b..a10525cbb906 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs @@ -6,9 +6,23 @@ namespace WeaviateIntegrationTests.Support; public class WeaviateSimpleModelFixture : SimpleModelFixture { - public override TestStore TestStore => WeaviateTestStore.Instance; + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; + + protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names - protected override string CollectionName => $"A{Guid.NewGuid():N}"; + protected override string CollectionName => this.GetUniqueCollectionName(); + + public override string GetUniqueCollectionName() => $"A{Guid.NewGuid():N}"; +} + +public class WeaviateSimpleModelNamedVectorsFixture : WeaviateSimpleModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; +} + +public class WeaviateSimpleModelUnnamedVectorFixture : WeaviateSimpleModelFixture +{ + public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs index a7700149dad3..76aa72077a3a 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs @@ -12,9 +12,11 @@ namespace WeaviateIntegrationTests.Support; public sealed class WeaviateTestStore : TestStore { - public static WeaviateTestStore Instance { get; } = new(); + public static WeaviateTestStore NamedVectorsInstance { get; } = new(hasNamedVectors: true); + public static WeaviateTestStore UnnamedVectorInstance { get; } = new(hasNamedVectors: false); private readonly WeaviateContainer _container = new WeaviateBuilder().Build(); + private readonly bool _hasNamedVectors; public HttpClient? _httpClient { get; private set; } private WeaviateVectorStore? _defaultVectorStore; @@ -22,20 +24,13 @@ public sealed class WeaviateTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); - public override string DefaultDistanceFunction => DistanceFunction.CosineDistance; - - public WeaviateVectorStore GetVectorStore(WeaviateVectorStoreOptions options) - => new(this.Client, options); - - private WeaviateTestStore() - { - } + private WeaviateTestStore(bool hasNamedVectors) => this._hasNamedVectors = hasNamedVectors; protected override async Task StartAsync() { await this._container.StartAsync(); this._httpClient = new HttpClient { BaseAddress = new Uri($"http://localhost:{this._container.GetMappedPublicPort(WeaviateBuilder.WeaviateHttpPort)}/v1/") }; - this._defaultVectorStore = new(this._httpClient); + this._defaultVectorStore = new(this._httpClient, new() { HasNamedVectors = this._hasNamedVectors }); } protected override Task StopAsync() diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/VectorSearch/WeaviateVectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/VectorSearch/WeaviateVectorSearchDistanceFunctionComplianceTests.cs new file mode 100644 index 000000000000..0c6a5aadd390 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/VectorSearch/WeaviateVectorSearchDistanceFunctionComplianceTests.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.VectorSearch; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.VectorSearch; + +public class WeaviateVectorSearchDistanceFunctionComplianceTests_NamedVectors(WeaviateSimpleModelNamedVectorsFixture fixture) + : VectorSearchDistanceFunctionComplianceTests(fixture), IClassFixture +{ + public override Task CosineSimilarity() => Assert.ThrowsAsync(base.CosineSimilarity); + + public override Task DotProductSimilarity() => Assert.ThrowsAsync(base.DotProductSimilarity); + + public override Task EuclideanDistance() => Assert.ThrowsAsync(base.EuclideanDistance); + + /// + /// Tests vector search using , computing -(u · v) as a distance metric per Weaviate's convention. + /// Expects scores of -1 (exact match), 1 (opposite), and 0 (orthogonal), sorted ascending ([0, 2, 1]), with lower scores indicating closer matches. + /// . + /// + public override Task NegativeDotProductSimilarity() => this.SimpleSearch(DistanceFunction.NegativeDotProductSimilarity, -1, 1, 0, [0, 2, 1]); +} + +public class WeaviateVectorSearchDistanceFunctionComplianceTests_UnnamedVector(WeaviateDynamicDataModelNamedVectorsFixture fixture) + : VectorSearchDistanceFunctionComplianceTests(fixture), IClassFixture +{ + public override Task CosineSimilarity() => Assert.ThrowsAsync(base.CosineSimilarity); + + public override Task DotProductSimilarity() => Assert.ThrowsAsync(base.DotProductSimilarity); + + public override Task EuclideanDistance() => Assert.ThrowsAsync(base.EuclideanDistance); + + /// + /// Tests vector search using , computing -(u · v) as a distance metric per Weaviate's convention. + /// Expects scores of -1 (exact match), 1 (opposite), and 0 (orthogonal), sorted ascending ([0, 2, 1]), with lower scores indicating closer matches. + /// . + /// + public override Task NegativeDotProductSimilarity() => this.SimpleSearch(DistanceFunction.NegativeDotProductSimilarity, -1, 1, 0, [0, 2, 1]); +} From 4d75e8a0bcc9c5243294748edff793a216888453 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Thu, 17 Apr 2025 11:06:08 +0100 Subject: [PATCH 48/63] .Net: Add delete and exists methods for IVectorStore (#11596) ### Motivation and Context #10881 ### Description - Add CollectionExists and DeleteCollection methods to VectorStore - Add implementations for all vector stores that proxy to the collection - Add integration tests - Make some small fixes for some integration tests. ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../TextEmbeddingVectorStore.cs | 6 +++ .../PineconeFactory.cs | 4 ++ .../RedisFactory.cs | 4 ++ .../AzureAISearchVectorStore.cs | 17 +++++++ .../AzureCosmosDBMongoDBVectorStore.cs | 18 +++++++ .../AzureCosmosDBNoSQLVectorStore.cs | 18 +++++++ ...osmosDBNoSQLVectorStoreRecordCollection.cs | 26 ++++++++-- .../InMemoryVectorStore.cs | 27 ++++++++--- .../MongoDBVectorStore.cs | 18 +++++++ .../PineconeVectorStore.cs | 18 +++++++ .../PostgresVectorStore.cs | 18 +++++++ .../QdrantVectorStore.cs | 18 +++++++ .../RedisVectorStore.cs | 18 +++++++ .../SqlServerVectorStore.cs | 18 +++++++ .../SqliteVectorStore.cs | 18 +++++++ .../WeaviateVectorStore.cs | 18 +++++++ .../VectorStorage/IVectorStore.cs | 17 +++++++ .../Support/CosmosNoSQLTestStore.cs | 2 + .../Collections/CollectionConformanceTests.cs | 48 ++++++++++++++++++- 19 files changed, 319 insertions(+), 12 deletions(-) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs index 47ca0933d53f..545a4f9254c0 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs @@ -41,6 +41,12 @@ public IVectorStoreRecordCollection GetCollection( return embeddingStore; } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) => _decoratedVectorStore.CollectionExistsAsync(name, cancellationToken); + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) => _decoratedVectorStore.DeleteCollectionAsync(name, cancellationToken); + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs index 5c10b7b0d860..6e391fffc16a 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/PineconeFactory.cs @@ -67,5 +67,9 @@ public IVectorStoreRecordCollection GetCollection( public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); + + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) => innerStore.CollectionExistsAsync(name, cancellationToken); + + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) => innerStore.DeleteCollectionAsync(name, cancellationToken); } } diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs index fc543c71c1a0..86e54937bdf6 100644 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs +++ b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/RedisFactory.cs @@ -68,5 +68,9 @@ public IVectorStoreRecordCollection GetCollection( public object? GetService(Type serviceType, object? serviceKey = null) => innerStore.GetService(serviceType, serviceKey); public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) => innerStore.ListCollectionNamesAsync(cancellationToken); + + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) => innerStore.CollectionExistsAsync(name, cancellationToken); + + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) => innerStore.DeleteCollectionAsync(name, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index afacf870d602..114055526a96 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -28,6 +28,9 @@ public sealed class AzureAISearchVectorStore : IVectorStore /// Optional configuration options for this class. private readonly AzureAISearchVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -85,6 +88,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index dc75467c17f1..7e63cd7a50ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using MongoDB.Driver; @@ -26,6 +27,9 @@ public sealed class AzureCosmosDBMongoDBVectorStore : IVectorStore /// Optional configuration options for this class. private readonly AzureCosmosDBMongoDBVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -81,6 +85,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index f93cf0ad1517..e6aeacc85f24 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; @@ -26,6 +27,9 @@ public sealed class AzureCosmosDBNoSQLVectorStore : IVectorStore /// Optional configuration options for this class. private readonly AzureCosmosDBNoSQLVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -90,6 +94,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index b61112ff5439..993c3c3debb0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -170,12 +170,28 @@ public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellatio } /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(CancellationToken cancellationToken = default) { - return this.RunOperationAsync("DeleteContainer", () => - this._database + try + { + await this._database .GetContainer(this.Name) - .DeleteContainerAsync(cancellationToken: cancellationToken)); + .DeleteContainerAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (CosmosException ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) + { + // Do nothing, since the container is already deleted. + } + catch (CosmosException ex) + { + throw new VectorStoreOperationException("Call to vector store failed.", ex) + { + VectorStoreSystemName = AzureCosmosDBNoSQLConstants.VectorStoreSystemName, + VectorStoreName = this._collectionMetadata.VectorStoreName, + CollectionName = this.Name, + OperationName = "DeleteContainer" + }; + } } /// @@ -441,7 +457,7 @@ private async Task RunOperationAsync(string operationName, Func> o { return await operation.Invoke().ConfigureAwait(false); } - catch (Exception ex) + catch (CosmosException ex) { throw new VectorStoreOperationException("Call to vector store failed.", ex) { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs index f2cf7def2360..f5b495341775 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Linq; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.InMemory; @@ -18,7 +19,7 @@ public sealed class InMemoryVectorStore : IVectorStore private readonly VectorStoreMetadata _metadata; /// Internal storage for the record collection. - private readonly ConcurrentDictionary> _internalCollection; + private readonly ConcurrentDictionary> _internalCollections; /// The data type of each collection, to enforce a single type per collection. private readonly ConcurrentDictionary _internalCollectionTypes = new(); @@ -28,7 +29,7 @@ public sealed class InMemoryVectorStore : IVectorStore /// public InMemoryVectorStore() { - this._internalCollection = new(); + this._internalCollections = new(); this._metadata = new() { @@ -42,7 +43,7 @@ public InMemoryVectorStore() /// Allows passing in the dictionary used for storage, for testing purposes. internal InMemoryVectorStore(ConcurrentDictionary> internalCollection) { - this._internalCollection = internalCollection; + this._internalCollections = internalCollection; this._metadata = new() { @@ -61,7 +62,7 @@ public IVectorStoreRecordCollection GetCollection( } var collection = new InMemoryVectorStoreRecordCollection( - this._internalCollection, + this._internalCollections, this._internalCollectionTypes, name, new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; @@ -71,7 +72,21 @@ public IVectorStoreRecordCollection GetCollection( /// public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) { - return this._internalCollection.Keys.ToAsyncEnumerable(); + return this._internalCollections.Keys.ToAsyncEnumerable(); + } + + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + return this._internalCollections.ContainsKey(name) ? Task.FromResult(true) : Task.FromResult(false); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + this._internalCollections.TryRemove(name, out _); + this._internalCollectionTypes.TryRemove(name, out _); + return Task.CompletedTask; } /// @@ -82,7 +97,7 @@ public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cance return serviceKey is not null ? null : serviceType == typeof(VectorStoreMetadata) ? this._metadata : - serviceType == typeof(ConcurrentDictionary>) ? this._internalCollection : + serviceType == typeof(ConcurrentDictionary>) ? this._internalCollections : serviceType.IsInstanceOfType(this) ? this : null; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index bbe3ac92f988..5c5dd3e2072a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using MongoDB.Driver; @@ -26,6 +27,9 @@ public sealed class MongoDBVectorStore : IVectorStore /// Optional configuration options for this class. private readonly MongoDBVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -81,6 +85,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index f0a837d660b8..ab31038cbd10 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using Pinecone; using Sdk = Pinecone; @@ -24,6 +25,9 @@ public sealed class PineconeVectorStore : IVectorStore /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 1)] }; + /// /// Initializes a new instance of the class. /// @@ -88,6 +92,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 3c034e135c37..5bf1a7950a2b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using Npgsql; @@ -20,6 +21,9 @@ public sealed class PostgresVectorStore : IVectorStore /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -86,6 +90,20 @@ public IVectorStoreRecordCollection GetCollection( return recordCollection as IVectorStoreRecordCollection ?? throw new InvalidOperationException("Failed to cast record collection."); } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index 79c46770ba9e..ff9bba5e36a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Grpc.Core; using Microsoft.Extensions.VectorData; using Qdrant.Client; @@ -27,6 +28,9 @@ public sealed class QdrantVectorStore : IVectorStore /// Optional configuration options for this class. private readonly QdrantVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(ulong)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 1)] }; + /// /// Initializes a new instance of the class. /// @@ -101,6 +105,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 582a90f3c476..3df29e9758b0 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using NRedisStack.RedisStackCommands; using StackExchange.Redis; @@ -27,6 +28,9 @@ public sealed class RedisVectorStore : IVectorStore /// Optional configuration options for this class. private readonly RedisVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -100,6 +104,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs index 38dfcffec6cc..d1e76def38ec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Data.SqlClient; using Microsoft.Extensions.VectorData; @@ -20,6 +21,9 @@ public sealed class SqlServerVectorStore : IVectorStore /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -86,6 +90,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index bbab9a3beeeb..b06d243572d8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -5,6 +5,7 @@ using System.Data.Common; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Microsoft.Data.Sqlite; using Microsoft.Extensions.VectorData; @@ -27,6 +28,9 @@ public sealed class SqliteVectorStore : IVectorStore /// Optional configuration options for this class. private readonly SqliteVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(string))] }; + /// /// Initializes a new instance of the class. /// @@ -108,6 +112,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 1e5a29bd541b..83873f1eca00 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -6,6 +6,7 @@ using System.Runtime.CompilerServices; using System.Text.Json; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -27,6 +28,9 @@ public sealed class WeaviateVectorStore : IVectorStore /// Optional configuration options for this class. private readonly WeaviateVectorStoreOptions _options; + /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(Guid))] }; + /// /// Initializes a new instance of the class. /// @@ -114,6 +118,20 @@ public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancel } } + /// + public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.CollectionExistsAsync(cancellationToken); + } + + /// + public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) + { + var collection = this.GetCollection>(name, s_generalPurposeDefinition); + return collection.DeleteCollectionAsync(cancellationToken); + } + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs index 21c47ec9c238..df4372e3ecbf 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStore.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Extensions.VectorData; @@ -42,6 +43,22 @@ IVectorStoreRecordCollection GetCollection(string /// The list of names of all the collections in the vector store. IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default); + /// + /// Checks if the collection exists in the vector store. + /// + /// The name of the collection. + /// The to monitor for cancellation requests. The default is . + /// if the collection exists, otherwise. + Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default); + + /// + /// Deletes the collection from the vector store. + /// + /// The name of the collection. + /// The to monitor for cancellation requests. The default is . + /// A that completes when the collection has been deleted. + Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default); + /// Asks the for an object of the specified type . /// The type of object being requested. /// An optional key that can be used to help identify the target service. diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs index 7e3269ba2a27..c45bac264fbd 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs @@ -21,6 +21,8 @@ internal sealed class CosmosNoSqlTestStore : TestStore private Database? _database; private AzureCosmosDBNoSQLVectorStore? _defaultVectorStore; + public override string DefaultIndexKind => Microsoft.Extensions.VectorData.IndexKind.Flat; + public CosmosClient Client => this._client ?? throw new InvalidOperationException("Call InitializeAsync() first"); diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs index ff61fecdea42..c031add9a1d4 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Collections/CollectionConformanceTests.cs @@ -11,6 +11,52 @@ namespace VectorDataSpecificationTests.Collections; public abstract class CollectionConformanceTests(VectorStoreFixture fixture) where TKey : notnull { + [ConditionalFact] + public async Task VectorStoreDeleteCollectionDeletesExistingCollection() + { + // Arrange. + var collection = await this.GetNonExistingCollectionAsync>(); + await collection.CreateCollectionAsync(); + Assert.True(await collection.CollectionExistsAsync()); + + // Act. + await fixture.TestStore.DefaultVectorStore.DeleteCollectionAsync(collection.Name); + + // Assert. + Assert.False(await collection.CollectionExistsAsync()); + } + + [ConditionalFact] + public async Task VectorStoreDeleteCollectionDoesNotThrowForNonExistingCollection() + { + await fixture.TestStore.DefaultVectorStore.DeleteCollectionAsync(fixture.GetUniqueCollectionName()); + } + + [ConditionalFact] + public async Task VectorStoreCollectionExistsReturnsTrueForExistingCollection() + { + // Arrange. + var collection = await this.GetNonExistingCollectionAsync>(); + + try + { + await collection.CreateCollectionAsync(); + + // Act & Assert. + Assert.True(await fixture.TestStore.DefaultVectorStore.CollectionExistsAsync(collection.Name)); + } + finally + { + await collection.DeleteCollectionAsync(); + } + } + + [ConditionalFact] + public async Task VectorStoreCollectionExistsReturnsFalseForNonExistingCollection() + { + Assert.False(await fixture.TestStore.DefaultVectorStore.CollectionExistsAsync(fixture.GetUniqueCollectionName())); + } + [ConditionalTheory] [MemberData(nameof(UseDynamicMappingData))] public Task DeleteCollectionDoesNotThrowForNonExistingCollection(bool useDynamicMapping) @@ -133,7 +179,7 @@ protected virtual async Task> GetNon new VectorStoreRecordKeyProperty(nameof(SimpleRecord.Id), typeof(TKey)) { StoragePropertyName = "key" }, new VectorStoreRecordDataProperty(nameof(SimpleRecord.Text), typeof(string)) { StoragePropertyName = "text" }, new VectorStoreRecordDataProperty(nameof(SimpleRecord.Number), typeof(int)) { StoragePropertyName = "number" }, - new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory), 10) + new VectorStoreRecordVectorProperty(nameof(SimpleRecord.Floats), typeof(ReadOnlyMemory), 10) { IndexKind = fixture.TestStore.DefaultIndexKind } ] }; From cd0f9854a22facd2a01f1f3964a2806e2af4e9cb Mon Sep 17 00:00:00 2001 From: Adam Sitnik Date: Fri, 18 Apr 2025 21:18:30 +0200 Subject: [PATCH 49/63] .Net MEVD Sqlite: Escaping (#11534) Escaping and quoting everything and everywhere for Sqlite connector: - escape the table names when we compute them for the first time, then always use escaped values - escape the storage names when we compute them, so all usages of .StorageName are safe - quote table and column names everywhere - conformance tests + fixes Contributes to #11154 --- .../Conditions/SqliteWhereCondition.cs | 4 +- .../SqliteConstants.cs | 4 +- ...liteVectorStoreCollectionCommandBuilder.cs | 227 +++++++++++------- .../SqliteVectorStoreRecordCollection.cs | 214 ++++++++--------- .../SqliteVectorStoreRecordPropertyMapping.cs | 23 +- .../SqliteConditionsTests.cs | 18 +- ...ectorStoreCollectionCommandBuilderTests.cs | 100 +++++--- ...teVectorStoreRecordPropertyMappingTests.cs | 38 +-- .../VectorStoreRecordModelBuilder.cs | 4 +- .../VectorStoreRecordModelBuildingOptions.cs | 5 + .../CRUD/SqliteBatchConformanceTests.cs | 17 ++ .../CRUD/SqliteRecordConformanceTests.cs | 17 ++ .../Support/SqliteSimpleModelFixture.cs | 11 + .../Support/SqliteTestStore.cs | 2 + 14 files changed, 414 insertions(+), 270 deletions(-) create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteRecordConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteSimpleModelFixture.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Conditions/SqliteWhereCondition.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Conditions/SqliteWhereCondition.cs index ea3f702a42b8..a31b7c5a5050 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Conditions/SqliteWhereCondition.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Conditions/SqliteWhereCondition.cs @@ -15,6 +15,6 @@ internal abstract class SqliteWhereCondition(string operand, List values public abstract string BuildQuery(List parameterNames); protected string GetOperand() => !string.IsNullOrWhiteSpace(this.TableName) ? - $"{this.TableName}.{this.Operand}" : - this.Operand; + $"\"{this.TableName}\".\"{this.Operand}\"" : + $"\"{this.Operand}\""; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs index 9c40b2668062..4fdb0d420c0e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs @@ -25,7 +25,9 @@ internal static class SqliteConstants SupportedKeyPropertyTypes = SqliteConstants.SupportedKeyTypes, SupportedDataPropertyTypes = SqliteConstants.SupportedDataTypes, SupportedEnumerableDataPropertyElementTypes = [], - SupportedVectorPropertyTypes = SqliteConstants.SupportedVectorTypes + SupportedVectorPropertyTypes = SqliteConstants.SupportedVectorTypes, + + EscapeIdentifier = SqliteVectorStoreCollectionCommandBuilder.EscapeIdentifier }; /// A of types that a key on the provided model may have. diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs index caa33cba46cc..8844e9494357 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreCollectionCommandBuilder.cs @@ -19,6 +19,10 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; [SuppressMessage("Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "User input is passed using command parameters.")] internal static class SqliteVectorStoreCollectionCommandBuilder { + internal const string DistancePropertyName = "distance"; + + internal static string EscapeIdentifier(this string value) => value.Replace("'", "''").Replace("\"", "\"\""); + public static DbCommand BuildTableCountCommand(SqliteConnection connection, string tableName) { Verify.NotNullOrWhiteSpace(tableName); @@ -41,16 +45,16 @@ public static DbCommand BuildCreateTableCommand(SqliteConnection connection, str { var builder = new StringBuilder(); - builder.AppendLine($"""CREATE TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}"{tableName.Replace("\"", "\"\"")}" ("""); + builder.AppendLine($"CREATE TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}\"{tableName}\" ("); - builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); - builder.Append(");"); + builder.AppendLine(string.Join(",\n", columns.Select(column => GetColumnDefinition(column, quote: true)))); + builder.AppendLine(");"); foreach (var column in columns) { if (column.HasIndex) { - builder.AppendLine($"CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}{tableName}_{column.Name}_index ON {tableName}({column.Name});"); + builder.AppendLine($"CREATE INDEX {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}\"{tableName}_{column.Name}_index\" ON \"{tableName}\"(\"{column.Name}\");"); } } @@ -70,9 +74,10 @@ public static DbCommand BuildCreateVirtualTableCommand( { var builder = new StringBuilder(); - builder.AppendLine($"CREATE VIRTUAL TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}'{tableName.Replace("'", "''")}' USING {extensionName}("); + builder.AppendLine($"CREATE VIRTUAL TABLE {(ifNotExists ? "IF NOT EXISTS " : string.Empty)}\"{tableName}\" USING {extensionName}("); - builder.AppendLine(string.Join(",\n", columns.Select(GetColumnDefinition))); + // The vector extension is currently uncapable of handling quoted identifiers. + builder.AppendLine(string.Join(",\n", columns.Select(column => GetColumnDefinition(column, quote: false)))); builder.Append(");"); var command = connection.CreateCommand(); @@ -84,7 +89,7 @@ public static DbCommand BuildCreateVirtualTableCommand( public static DbCommand BuildDropTableCommand(SqliteConnection connection, string tableName) { - string query = $"DROP TABLE IF EXISTS [{tableName}];"; + string query = $"DROP TABLE IF EXISTS \"{tableName}\";"; var command = connection.CreateCommand(); @@ -97,8 +102,9 @@ public static DbCommand BuildInsertCommand( SqliteConnection connection, string tableName, string rowIdentifier, - IReadOnlyList columnNames, + IReadOnlyList properties, IReadOnlyList> records, + bool data, bool replaceIfExists = false) { var builder = new StringBuilder(); @@ -111,11 +117,12 @@ public static DbCommand BuildInsertCommand( var rowIdentifierParameterName = GetParameterName(rowIdentifier, recordIndex); var (columns, parameters, values) = GetQueryParts( - columnNames, + properties, records[recordIndex], - recordIndex); + recordIndex, + data); - builder.AppendLine($"INSERT{replacePlaceholder} INTO {tableName} ({string.Join(", ", columns)})"); + builder.AppendLine($"INSERT{replacePlaceholder} INTO \"{tableName}\" ({string.Join(", ", columns)})"); builder.AppendLine($"VALUES ({string.Join(", ", parameters)})"); builder.AppendLine($"RETURNING {rowIdentifier};"); @@ -130,154 +137,187 @@ public static DbCommand BuildInsertCommand( return command; } - public static DbCommand BuildSelectCommand( + public static DbCommand BuildSelectDataCommand( SqliteConnection connection, string tableName, - IReadOnlyList columnNames, + VectorStoreRecordModel model, List conditions, - string? orderByPropertyName = null) + GetFilteredRecordOptions? filterOptions = null, + string? extraWhereFilter = null, + Dictionary? extraParameters = null, + int top = 0, + int skip = 0) { var builder = new StringBuilder(); - var (command, whereClause) = GetCommandWithWhereClause(connection, conditions); + var (command, whereClause) = GetCommandWithWhereClause(connection, conditions, extraWhereFilter, extraParameters); + + builder.Append("SELECT "); + builder.AppendColumnNames(includeVectors: false, model.Properties); + builder.AppendLine($"FROM \"{tableName}\""); + builder.AppendWhereClause(whereClause); - builder.AppendLine($"SELECT {string.Join(", ", columnNames)}"); - builder.AppendLine($"FROM {tableName}"); + if (filterOptions is not null) + { + builder.AppendOrderBy(model, filterOptions); + } - AppendWhereClauseIfExists(builder, whereClause); - AppendOrderByIfExists(builder, orderByPropertyName); + builder.AppendLimits(top, skip); command.CommandText = builder.ToString(); return command; } - public static DbCommand BuildSelectLeftJoinCommand( + public static DbCommand BuildSelectLeftJoinCommand( SqliteConnection connection, - string leftTable, - string rightTable, + string vectorTableName, + string dataTableName, string joinColumnName, - IReadOnlyList leftTablePropertyNames, - IReadOnlyList rightTablePropertyNames, - List conditions, + VectorStoreRecordModel model, + IReadOnlyList conditions, + bool includeDistance, + GetFilteredRecordOptions? filterOptions = null, string? extraWhereFilter = null, Dictionary? extraParameters = null, - string? orderByPropertyName = null) + int top = 0, + int skip = 0) { var builder = new StringBuilder(); - List propertyNames = - [ - .. leftTablePropertyNames.Select(property => $"{leftTable}.{property}"), - .. rightTablePropertyNames.Select(property => $"{rightTable}.{property}"), - ]; - var (command, whereClause) = GetCommandWithWhereClause(connection, conditions, extraWhereFilter, extraParameters); - builder.AppendLine($"SELECT {string.Join(", ", propertyNames)}"); - builder.AppendLine($"FROM {leftTable} "); - builder.AppendLine($"LEFT JOIN {rightTable} ON {leftTable}.{joinColumnName} = {rightTable}.{joinColumnName}"); + builder.Append("SELECT "); + builder.AppendColumnNames(includeVectors: true, model.Properties, vectorTableName, dataTableName); + if (includeDistance) + { + builder.AppendLine($", \"{vectorTableName}\".\"{DistancePropertyName}\""); + } + builder.AppendLine($"FROM \"{vectorTableName}\""); + builder.AppendLine($"LEFT JOIN \"{dataTableName}\" ON \"{vectorTableName}\".\"{joinColumnName}\" = \"{dataTableName}\".\"{joinColumnName}\""); + builder.AppendWhereClause(whereClause); + + if (filterOptions is not null) + { + builder.AppendOrderBy(model, filterOptions, dataTableName); + } + else if (includeDistance) + { + builder.AppendLine($"ORDER BY \"{vectorTableName}\".\"{DistancePropertyName}\""); + } - AppendWhereClauseIfExists(builder, whereClause); - AppendOrderByIfExists(builder, orderByPropertyName); + builder.AppendLimits(top, skip); command.CommandText = builder.ToString(); return command; } - internal static DbCommand BuildSelectWhereCommand( - VectorStoreRecordModel model, + public static DbCommand BuildDeleteCommand( SqliteConnection connection, - int top, - GetFilteredRecordOptions options, - string table, - IReadOnlyList properties, - string whereFilter, - Dictionary whereParameters) + string tableName, + IReadOnlyList conditions) { - StringBuilder builder = new(200); + var builder = new StringBuilder(); - var (command, whereClause) = GetCommandWithWhereClause(connection, Array.Empty(), whereFilter, whereParameters); + var (command, whereClause) = GetCommandWithWhereClause(connection, conditions); - builder.Append("SELECT "); + builder.AppendLine($"DELETE FROM \"{tableName}\""); + builder.AppendWhereClause(whereClause); + + command.CommandText = builder.ToString(); + + return command; + } + + #region private + + private static StringBuilder AppendColumnNames(this StringBuilder builder, bool includeVectors, IReadOnlyList properties, + string? escapedVectorTableName = null, string? escapedDataTableName = null) + { foreach (var property in properties) { - if (options.IncludeVectors || property is not VectorStoreRecordVectorPropertyModel) + string? tableName = escapedDataTableName; + if (property is VectorStoreRecordVectorPropertyModel) + { + if (!includeVectors) + { + continue; + } + tableName = escapedVectorTableName; + } + + if (tableName is not null) + { + builder.AppendFormat("\"{0}\".\"{1}\",", tableName, property.StorageName); + } + else { builder.AppendFormat("\"{0}\",", property.StorageName); } } + builder.Length--; // Remove the trailing comma builder.AppendLine(); + return builder; + } - builder.AppendFormat("FROM {0}", table).AppendLine(); - builder.AppendFormat("WHERE {0}", whereClause).AppendLine(); - + private static StringBuilder AppendOrderBy(this StringBuilder builder, VectorStoreRecordModel model, + GetFilteredRecordOptions options, string? tableName = null) + { if (options.OrderBy.Values.Count > 0) { builder.Append("ORDER BY "); foreach (var sortInfo in options.OrderBy.Values) { - builder.AppendFormat("[{0}] {1},", - model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName, - sortInfo.Ascending ? "ASC" : "DESC"); + var storageName = model.GetDataOrKeyProperty(sortInfo.PropertySelector).StorageName; + + if (tableName is not null) + { + builder.AppendFormat("\"{0}\".", tableName); + } + + builder.AppendFormat("\"{0}\" {1},", storageName, sortInfo.Ascending ? "ASC" : "DESC"); } builder.Length--; // remove the last comma builder.AppendLine(); } - builder.AppendFormat("LIMIT {0}", top).AppendLine(); - builder.AppendFormat("OFFSET {0}", options.Skip).AppendLine(); - - command.CommandText = builder.ToString(); - - return command; + return builder; } - public static DbCommand BuildDeleteCommand( - SqliteConnection connection, - string tableName, - List conditions) + private static StringBuilder AppendLimits(this StringBuilder builder, int top, int skip) { - var builder = new StringBuilder(); - - var (command, whereClause) = GetCommandWithWhereClause(connection, conditions); - - builder.AppendLine($"DELETE FROM [{tableName}]"); - - AppendWhereClauseIfExists(builder, whereClause); + if (top > 0) + { + builder.AppendFormat("LIMIT {0}", top).AppendLine(); + } - command.CommandText = builder.ToString(); + if (skip > 0) + { + builder.AppendFormat("OFFSET {0}", skip).AppendLine(); + } - return command; + return builder; } - #region private - - private static void AppendWhereClauseIfExists(StringBuilder builder, string? whereClause) + private static StringBuilder AppendWhereClause(this StringBuilder builder, string? whereClause) { if (!string.IsNullOrWhiteSpace(whereClause)) { builder.AppendLine($"WHERE {whereClause}"); } - } - private static void AppendOrderByIfExists(StringBuilder builder, string? propertyName) - { - if (!string.IsNullOrWhiteSpace(propertyName)) - { - builder.AppendLine($"ORDER BY {propertyName}"); - } + return builder; } - private static string GetColumnDefinition(SqliteColumn column) + private static string GetColumnDefinition(SqliteColumn column, bool quote) { const string PrimaryKeyIdentifier = "PRIMARY KEY"; - List columnDefinitionParts = [column.Name, column.Type]; + List columnDefinitionParts = [quote ? $"\"{column.Name}\"" : column.Name, column.Type]; if (column.IsPrimary) { @@ -342,19 +382,24 @@ private static (DbCommand Command, string WhereClause) GetCommandWithWhereClause } private static (List Columns, List ParameterNames, List ParameterValues) GetQueryParts( - IReadOnlyList propertyNames, + IReadOnlyList properties, Dictionary record, - int index) + int index, + bool data) { var columns = new List(); var parameterNames = new List(); var parameterValues = new List(); - foreach (var propertyName in propertyNames) + foreach (var property in properties) { - if (record.TryGetValue(propertyName, out var value)) + bool include = property is VectorStoreRecordKeyPropertyModel // The Key column is included in both Vector and Data tables. + || (data == property is VectorStoreRecordDataPropertyModel); // The Data column is included only in the Data table. + + string propertyName = property.StorageName; + if (include && record.TryGetValue(propertyName, out var value)) { - columns.Add(propertyName); + columns.Add($"\"{propertyName}\""); parameterNames.Add(GetParameterName(propertyName, index)); parameterValues.Add(value ?? DBNull.Value); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 1d1a044f6eb5..69d30e3ca18e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Data.Common; -using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; @@ -50,18 +49,6 @@ public sealed class SqliteVectorStoreRecordCollection : IVectorSt /// The storage name of the key property. private readonly string _keyStorageName; - /// Collection of properties to operate in SQLite data table. - private readonly List _dataTableProperties = []; - - /// Collection of properties to operate in SQLite vector table. - private readonly List _vectorTableProperties = []; - - /// Collection of property names to operate in SQLite data table. - private readonly List _dataTableStoragePropertyNames = []; - - /// Collection of property names to operate in SQLite vector table. - private readonly List _vectorTableStoragePropertyNames = []; - /// Table name in SQLite for data properties. private readonly string _dataTableName; @@ -100,8 +87,9 @@ public SqliteVectorStoreRecordCollection( this._options = options ?? new(); this._vectorSearchExtensionName = this._options.VectorSearchExtensionName ?? SqliteConstants.VectorSearchExtensionName; - this._dataTableName = this.Name; - this._vectorTableName = GetVectorTableName(this._dataTableName, this._options); + // Escape both table names before exposing them to anything that may build SQL commands. + this._dataTableName = name.EscapeIdentifier(); + this._vectorTableName = GetVectorTableName(name, this._options).EscapeIdentifier(); this._model = new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); @@ -110,32 +98,6 @@ public SqliteVectorStoreRecordCollection( // Populate some collections of properties this._keyStorageName = this._model.KeyProperty.StorageName; - - foreach (var property in this._model.Properties) - { - switch (property) - { - case VectorStoreRecordKeyPropertyModel keyProperty: - this._dataTableProperties.Add(keyProperty); - this._vectorTableProperties.Add(keyProperty); - this._dataTableStoragePropertyNames.Add(keyProperty.StorageName); - this._vectorTableStoragePropertyNames.Add(keyProperty.StorageName); - break; - - case VectorStoreRecordDataPropertyModel dataProperty: - this._dataTableProperties.Add(dataProperty); - this._dataTableStoragePropertyNames.Add(dataProperty.StorageName); - break; - - case VectorStoreRecordVectorPropertyModel vectorProperty: - this._vectorTableProperties.Add(vectorProperty); - this._vectorTableStoragePropertyNames.Add(vectorProperty.StorageName); - break; - - default: - throw new UnreachableException(); - } - } this._mapper = new SqliteVectorStoreRecordMapper(this._model); var connectionStringBuilder = new SqliteConnectionStringBuilder(connectionString); @@ -272,29 +234,51 @@ public async IAsyncEnumerable GetAsync(Expression> SqliteFilterTranslator translator = new(this._model, filter); translator.Translate(appendWhere: false); - IReadOnlyList properties = options.IncludeVectors - ? this._model.Properties - : [this._model.KeyProperty, .. this._model.DataProperties]; - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - using var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectWhereCommand( - this._model, - connection, - top, - options, - this._dataTableName, - this._model.Properties, - translator.Clause.ToString(), - translator.Parameters); + DbCommand? command = null; - using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); - while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + if (options.IncludeVectors) + { + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + connection, + this._vectorTableName, + this._dataTableName, + this._keyStorageName, + this._model, + conditions: [], + includeDistance: false, + filterOptions: options, + translator.Clause.ToString(), + translator.Parameters, + top: top, + skip: options.Skip); + } + else { - yield return this.GetAndMapRecord( - "Get", - reader, - properties, - options.IncludeVectors); + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectDataCommand( + connection, + this._dataTableName, + this._model, + conditions: [], + filterOptions: options, + translator.Clause.ToString(), + translator.Parameters, + top: top, + skip: options.Skip); + } + + using (command) + { + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = options.IncludeVectors }; + using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + yield return this.GetAndMapRecord( + "Get", + reader, + this._model.Properties, + mapperOptions); + } } } @@ -318,13 +302,14 @@ public async IAsyncEnumerable GetAsync(Expression> /// public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - Verify.NotNull(keys); - var keysList = keys.Cast().ToList(); + if (keysList.Count == 0) + { + yield break; + } - Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); var condition = new SqliteWhereInCondition(this._keyStorageName, keysList) { @@ -340,6 +325,8 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor /// public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { + Verify.NotNull(record); + const string OperationName = "Upsert"; using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); @@ -366,9 +353,9 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati /// public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { - const string OperationName = "UpsertBatch"; + Verify.NotNull(records); - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); + const string OperationName = "UpsertBatch"; var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, @@ -377,8 +364,14 @@ public async Task> UpsertAsync(IEnumerable records, OperationName, () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + if (storageModels.Count == 0) + { + return []; + } + var keys = storageModels.Select(model => model[this._keyStorageName]!).ToList(); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); var condition = new SqliteWhereInCondition(this._keyStorageName, keys); return await this.InternalUpsertBatchAsync(connection, storageModels, condition, cancellationToken).ConfigureAwait(false); @@ -400,12 +393,13 @@ public async Task DeleteAsync(TKey key, CancellationToken cancellationToken = de public async Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) { Verify.NotNull(keys); - - using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - var keysList = keys.Cast().ToList(); + if (keysList.Count == 0) + { + return; + } - Verify.True(keysList.Count > 0, "Number of provided keys should be greater than zero."); + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); var condition = new SqliteWhereInCondition( this._keyStorageName, @@ -444,47 +438,33 @@ private async IAsyncEnumerable> EnumerateAndMapSearc [EnumeratorCancellation] CancellationToken cancellationToken) { const string OperationName = "VectorizedSearch"; - const string DistancePropertyName = "distance"; - - var leftTableProperties = new List { DistancePropertyName }; - - List properties = [this._model.KeyProperty, .. this._model.DataProperties]; - - if (searchOptions.IncludeVectors) - { - foreach (var property in this._model.VectorProperties) - { - leftTableProperties.Add(property.StorageName); - } - properties.AddRange(this._model.VectorProperties); - } using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); - using var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + using var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( connection, this._vectorTableName, this._dataTableName, this._keyStorageName, - leftTableProperties, - this._dataTableStoragePropertyNames, + this._model, conditions, - extraWhereFilter, - extraParameters, - DistancePropertyName); + includeDistance: true, + extraWhereFilter: extraWhereFilter, + extraParameters: extraParameters); using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = searchOptions.IncludeVectors }; for (var recordCounter = 0; await reader.ReadAsync(cancellationToken).ConfigureAwait(false); recordCounter++) { if (recordCounter >= searchOptions.Skip) { - var score = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, DistancePropertyName); + var score = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, SqliteVectorStoreCollectionCommandBuilder.DistancePropertyName); var record = this.GetAndMapRecord( OperationName, reader, - properties, - searchOptions.IncludeVectors); + this._model.Properties, + mapperOptions); yield return new VectorSearchResult(record, score); } @@ -493,7 +473,7 @@ private async IAsyncEnumerable> EnumerateAndMapSearc private async Task InternalCreateCollectionAsync(SqliteConnection connection, bool ifNotExists, CancellationToken cancellationToken) { - List dataTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._dataTableProperties); + List dataTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._model.Properties, data: true); await this.CreateTableAsync(connection, this._dataTableName, dataTableColumns, ifNotExists, cancellationToken) .ConfigureAwait(false); @@ -504,7 +484,7 @@ await this.CreateTableAsync(connection, this._dataTableName, dataTableColumns, i this._options.VectorSearchExtensionName : SqliteConstants.VectorSearchExtensionName; - List vectorTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._vectorTableProperties); + List vectorTableColumns = SqliteVectorStoreRecordPropertyMapping.GetColumns(this._model.Properties, data: false); await this.CreateVirtualTableAsync(connection, this._vectorTableName, vectorTableColumns, ifNotExists, extensionName!, cancellationToken) .ConfigureAwait(false); @@ -549,32 +529,31 @@ private async IAsyncEnumerable InternalGetBatchAsync( bool includeVectors = options?.IncludeVectors is true && this._vectorPropertiesExist; DbCommand command; - List properties = [this._model.KeyProperty, .. this._model.DataProperties]; if (includeVectors) { - command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( connection, - this._dataTableName, this._vectorTableName, + this._dataTableName, this._keyStorageName, - this._dataTableStoragePropertyNames, - this._model.VectorProperties.Select(p => p.StorageName).ToList(), - [condition]); - - properties.AddRange(this._model.VectorProperties); + this._model, + [condition], + includeDistance: false); } else { - command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectCommand( + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectDataCommand( connection, this._dataTableName, - this._dataTableStoragePropertyNames, + this._model, [condition]); } using (command) { + StorageToDataModelMapperOptions mapperOptions = new() { IncludeVectors = includeVectors }; + using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) @@ -582,8 +561,8 @@ private async IAsyncEnumerable InternalGetBatchAsync( yield return this.GetAndMapRecord( OperationName, reader, - properties, - includeVectors); + this._model.Properties, + mapperOptions); } } } @@ -612,8 +591,9 @@ private async Task> InternalUpsertBatchAsync( connection, this._vectorTableName, this._keyStorageName, - this._vectorTableStoragePropertyNames, - storageModels); + this._model.Properties, + storageModels, + data: false); await vectorInsertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } @@ -622,8 +602,9 @@ private async Task> InternalUpsertBatchAsync( connection, this._dataTableName, this._keyStorageName, - this._dataTableStoragePropertyNames, + this._model.Properties, storageModels, + data: true, replaceIfExists: true); using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); @@ -674,14 +655,17 @@ private TRecord GetAndMapRecord( string operationName, DbDataReader reader, IReadOnlyList properties, - bool includeVectors) + StorageToDataModelMapperOptions options) { var storageModel = new Dictionary(); foreach (var property in properties) { - var propertyValue = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, property.StorageName, property.Type); - storageModel.Add(property.StorageName, propertyValue); + if (options.IncludeVectors || property is not VectorStoreRecordVectorPropertyModel) + { + var propertyValue = SqliteVectorStoreRecordPropertyMapping.GetPropertyValue(reader, property.StorageName, property.Type); + storageModel.Add(property.StorageName, propertyValue); + } } return VectorStoreErrorHandler.RunModelConversion( @@ -689,7 +673,7 @@ private TRecord GetAndMapRecord( this._collectionMetadata.VectorStoreName, this.Name, operationName, - () => this._mapper.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = includeVectors })); + () => this._mapper.MapFromStorageToDataModel(storageModel, options)); } private async Task RunOperationAsync(string operationName, Func> operation) diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs index 5081f8fc8f02..2dc0c7369a5e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordPropertyMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Data.Common; +using System.Diagnostics; using System.Runtime.InteropServices; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -36,7 +37,7 @@ public static ReadOnlyMemory MapVectorForDataModel(byte[] byteArray) return new ReadOnlyMemory(array); } - public static List GetColumns(List properties) + public static List GetColumns(IReadOnlyList properties, bool data) { const string DistanceMetricConfigurationName = "distance_metric"; @@ -44,22 +45,40 @@ public static List GetColumns(List foreach (var property in properties) { - var isPrimary = property is VectorStoreRecordKeyPropertyModel; + var isPrimary = false; string propertyType; Dictionary? configuration = null; if (property is VectorStoreRecordVectorPropertyModel vectorProperty) { + if (data) + { + continue; + } + propertyType = GetStorageVectorPropertyType(vectorProperty); configuration = new() { [DistanceMetricConfigurationName] = GetDistanceMetric(vectorProperty) }; } + else if (property is VectorStoreRecordDataPropertyModel dataProperty) + { + if (!data) + { + continue; + } + + propertyType = GetStorageDataPropertyType(property); + } else { + // The Key column in included in both Vector and Data tables. + Debug.Assert(property is VectorStoreRecordKeyPropertyModel, "property is VectorStoreRecordKeyPropertyModel"); + propertyType = GetStorageDataPropertyType(property); + isPrimary = true; } var column = new SqliteColumn(property.StorageName, propertyType, isPrimary) diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteConditionsTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteConditionsTests.cs index 7f02575e9b88..aab78c2150f4 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteConditionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteConditionsTests.cs @@ -22,9 +22,9 @@ public void SqliteWhereEqualsConditionWithoutParameterNamesThrowsException() } [Theory] - [InlineData(null, "Name = @Name0")] - [InlineData("", "Name = @Name0")] - [InlineData("TableName", "TableName.Name = @Name0")] + [InlineData(null, "\"Name\" = @Name0")] + [InlineData("", "\"Name\" = @Name0")] + [InlineData("TableName", "\"TableName\".\"Name\" = @Name0")] public void SqliteWhereEqualsConditionBuildsValidQuery(string? tableName, string expectedQuery) { // Arrange @@ -48,9 +48,9 @@ public void SqliteWhereInConditionWithoutParameterNamesThrowsException() } [Theory] - [InlineData(null, "Name IN (@Name0, @Name1)")] - [InlineData("", "Name IN (@Name0, @Name1)")] - [InlineData("TableName", "TableName.Name IN (@Name0, @Name1)")] + [InlineData(null, "\"Name\" IN (@Name0, @Name1)")] + [InlineData("", "\"Name\" IN (@Name0, @Name1)")] + [InlineData("TableName", "\"TableName\".\"Name\" IN (@Name0, @Name1)")] public void SqliteWhereInConditionBuildsValidQuery(string? tableName, string expectedQuery) { // Arrange @@ -74,9 +74,9 @@ public void SqliteWhereMatchConditionWithoutParameterNamesThrowsException() } [Theory] - [InlineData(null, "Name MATCH @Name0")] - [InlineData("", "Name MATCH @Name0")] - [InlineData("TableName", "TableName.Name MATCH @Name0")] + [InlineData(null, "\"Name\" MATCH @Name0")] + [InlineData("", "\"Name\" MATCH @Name0")] + [InlineData("TableName", "\"TableName\".\"Name\" MATCH @Name0")] public void SqliteWhereMatchConditionBuildsValidQuery(string? tableName, string expectedQuery) { // Arrange diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs index 7ad790f91089..8227a85aa778 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs @@ -3,6 +3,8 @@ using System; using System.Collections.Generic; using Microsoft.Data.Sqlite; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; using Microsoft.SemanticKernel.Connectors.Sqlite; using Xunit; @@ -60,8 +62,8 @@ public void ItBuildsCreateTableCommand(bool ifNotExists) Assert.Equal(ifNotExists, command.CommandText.Contains("IF NOT EXISTS")); - Assert.Contains("Column1 Type1 PRIMARY KEY", command.CommandText); - Assert.Contains("Column2 Type2 distance_metric=l2", command.CommandText); + Assert.Contains("\"Column1\" Type1 PRIMARY KEY", command.CommandText); + Assert.Contains("\"Column2\" Type2 distance_metric=l2", command.CommandText); } [Theory] @@ -103,7 +105,7 @@ public void ItBuildsDropTableCommand() var command = SqliteVectorStoreCollectionCommandBuilder.BuildDropTableCommand(this._connection, TableName); // Assert - Assert.Equal("DROP TABLE IF EXISTS [TestTable];", command.CommandText); + Assert.Equal("DROP TABLE IF EXISTS \"TestTable\";", command.CommandText); } [Theory] @@ -115,7 +117,13 @@ public void ItBuildsInsertCommand(bool replaceIfExists) const string TableName = "TestTable"; const string RowIdentifier = "Id"; - var columnNames = new List { "Id", "Name", "Age", "Address" }; + VectorStoreRecordPropertyModel[] properties = + [ + new VectorStoreRecordKeyPropertyModel("Id", typeof(string)), + new VectorStoreRecordDataPropertyModel("Name", typeof(string)), + new VectorStoreRecordDataPropertyModel("Age", typeof(int)), + new VectorStoreRecordDataPropertyModel("Address", typeof(string)), + ]; var records = new List> { new() { ["Id"] = "IdValue1", ["Name"] = "NameValue1", ["Age"] = "AgeValue1", ["Address"] = "AddressValue1" }, @@ -127,14 +135,15 @@ public void ItBuildsInsertCommand(bool replaceIfExists) this._connection, TableName, RowIdentifier, - columnNames, + properties, records, + data: true, replaceIfExists); // Assert Assert.Equal(replaceIfExists, command.CommandText.Contains("OR REPLACE")); - Assert.Contains($"INTO {TableName} (Id, Name, Age, Address)", command.CommandText); + Assert.Contains($"INTO \"{TableName}\" (\"Id\", \"Name\", \"Age\", \"Address\")", command.CommandText); Assert.Contains("VALUES (@Id0, @Name0, @Age0, @Address0)", command.CommandText); Assert.Contains("VALUES (@Id1, @Name1, @Age1, @Address1)", command.CommandText); Assert.Contains("RETURNING Id", command.CommandText); @@ -173,24 +182,35 @@ public void ItBuildsSelectCommand(string? orderByPropertyName) // Arrange const string TableName = "TestTable"; - var columnNames = new List { "Id", "Name", "Age", "Address" }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("Name", typeof(string)), + new VectorStoreRecordDataProperty("Age", typeof(string)), + new VectorStoreRecordDataProperty("Address", typeof(string)), + ]); var conditions = new List { new SqliteWhereEqualsCondition("Name", "NameValue"), new SqliteWhereInCondition("Age", [10, 20, 30]), }; + GetFilteredRecordOptions> filterOptions = new(); + if (!string.IsNullOrWhiteSpace(orderByPropertyName)) + { + filterOptions.OrderBy.Ascending(record => record[orderByPropertyName]); + } // Act - var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectCommand(this._connection, TableName, columnNames, conditions, orderByPropertyName); + var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectDataCommand>(this._connection, TableName, model, conditions, filterOptions); // Assert - Assert.Contains("SELECT Id, Name, Age, Address", command.CommandText); - Assert.Contains($"FROM {TableName}", command.CommandText); + Assert.Contains("SELECT \"Id\",\"Name\",\"Age\",\"Address\"", command.CommandText); + Assert.Contains($"FROM \"{TableName}\"", command.CommandText); - Assert.Contains("Name = @Name0", command.CommandText); - Assert.Contains("Age IN (@Age0, @Age1, @Age2)", command.CommandText); + Assert.Contains("\"Name\" = @Name0", command.CommandText); + Assert.Contains("\"Age\" IN (@Age0, @Age1, @Age2)", command.CommandText); - Assert.Equal(!string.IsNullOrWhiteSpace(orderByPropertyName), command.CommandText.Contains($"ORDER BY {orderByPropertyName}")); + Assert.Equal(!string.IsNullOrWhiteSpace(orderByPropertyName), command.CommandText.Contains($"ORDER BY \"{orderByPropertyName}\"")); Assert.Equal("@Name0", command.Parameters[0].ParameterName); Assert.Equal("NameValue", command.Parameters[0].Value); @@ -212,42 +232,50 @@ public void ItBuildsSelectCommand(string? orderByPropertyName) public void ItBuildsSelectLeftJoinCommand(string? orderByPropertyName) { // Arrange - const string LeftTable = "LeftTable"; - const string RightTable = "RightTable"; + const string DataTable = "DataTable"; + const string VectorTable = "VectorTable"; const string JoinColumnName = "Id"; - var leftTablePropertyNames = new List { "Id", "Name" }; - var rightTablePropertyNames = new List { "Age", "Address" }; + var model = BuildModel( + [ + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("Name", typeof(string)), + new VectorStoreRecordVectorProperty("Age", typeof(ReadOnlyMemory), 10), + new VectorStoreRecordVectorProperty("Address", typeof(ReadOnlyMemory), 10), + ]); var conditions = new List { new SqliteWhereEqualsCondition("Name", "NameValue"), new SqliteWhereInCondition("Age", [10, 20, 30]), }; + GetFilteredRecordOptions> filterOptions = new(); + if (!string.IsNullOrWhiteSpace(orderByPropertyName)) + { + filterOptions.OrderBy.Ascending(record => record[orderByPropertyName]); + } // Act var command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( this._connection, - LeftTable, - RightTable, + VectorTable, + DataTable, JoinColumnName, - leftTablePropertyNames, - rightTablePropertyNames, + model, conditions, - extraWhereFilter: null, - extraParameters: null, - orderByPropertyName); + true, + filterOptions); // Assert - Assert.Contains("SELECT LeftTable.Id, LeftTable.Name, RightTable.Age, RightTable.Address", command.CommandText); - Assert.Contains("FROM LeftTable", command.CommandText); + Assert.Contains("SELECT \"DataTable\".\"Id\",\"DataTable\".\"Name\",\"VectorTable\".\"Age\",\"VectorTable\".\"Address\"", command.CommandText); + Assert.Contains("FROM \"VectorTable\"", command.CommandText); - Assert.Contains("LEFT JOIN RightTable ON LeftTable.Id = RightTable.Id", command.CommandText); + Assert.Contains("LEFT JOIN \"DataTable\" ON \"VectorTable\".\"Id\" = \"DataTable\".\"Id\"", command.CommandText); - Assert.Contains("Name = @Name0", command.CommandText); - Assert.Contains("Age IN (@Age0, @Age1, @Age2)", command.CommandText); + Assert.Contains("\"Name\" = @Name0", command.CommandText); + Assert.Contains("\"Age\" IN (@Age0, @Age1, @Age2)", command.CommandText); - Assert.Equal(!string.IsNullOrWhiteSpace(orderByPropertyName), command.CommandText.Contains($"ORDER BY {orderByPropertyName}")); + Assert.Equal(!string.IsNullOrWhiteSpace(orderByPropertyName), command.CommandText.Contains($"ORDER BY \"DataTable\".\"{orderByPropertyName}\"")); Assert.Equal("@Name0", command.Parameters[0].ParameterName); Assert.Equal("NameValue", command.Parameters[0].Value); @@ -278,10 +306,10 @@ public void ItBuildsDeleteCommand() var command = SqliteVectorStoreCollectionCommandBuilder.BuildDeleteCommand(this._connection, TableName, conditions); // Assert - Assert.Contains("DELETE FROM [TestTable]", command.CommandText); + Assert.Contains("DELETE FROM \"TestTable\"", command.CommandText); - Assert.Contains("Name = @Name0", command.CommandText); - Assert.Contains("Age IN (@Age0, @Age1, @Age2)", command.CommandText); + Assert.Contains("\"Name\" = @Name0", command.CommandText); + Assert.Contains("\"Age\" IN (@Age0, @Age1, @Age2)", command.CommandText); Assert.Equal("@Name0", command.Parameters[0].ParameterName); Assert.Equal("NameValue", command.Parameters[0].Value); @@ -301,4 +329,10 @@ public void Dispose() this._command.Dispose(); this._connection.Dispose(); } + + private static VectorStoreRecordModel BuildModel(List properties) + => new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) + .Build( + typeof(Dictionary), + new() { Properties = properties }); } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs index f3891f326ee9..b2036be6fcc7 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordPropertyMappingTests.cs @@ -52,8 +52,10 @@ public void MapVectorForDataModelReturnsReadOnlyMemory() Assert.Equal(vector.Span.ToArray(), dataModelVector.Span.ToArray()); } - [Fact] - public void GetColumnsReturnsCollectionOfColumns() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void GetColumnsReturnsCollectionOfColumns(bool data) { // Arrange var properties = new List() @@ -69,7 +71,7 @@ public void GetColumnsReturnsCollectionOfColumns() }; // Act - var columns = SqliteVectorStoreRecordPropertyMapping.GetColumns(properties); + var columns = SqliteVectorStoreRecordPropertyMapping.GetColumns(properties, data: data); // Assert Assert.Equal("Key", columns[0].Name); @@ -78,18 +80,22 @@ public void GetColumnsReturnsCollectionOfColumns() Assert.Null(columns[0].Configuration); Assert.False(columns[0].HasIndex); - Assert.Equal("my_data", columns[1].Name); - Assert.Equal("INTEGER", columns[1].Type); - Assert.False(columns[1].IsPrimary); - Assert.Null(columns[1].Configuration); - Assert.True(columns[1].HasIndex); - - Assert.Equal("Vector", columns[2].Name); - Assert.Equal("FLOAT[4]", columns[2].Type); - Assert.False(columns[2].IsPrimary); - Assert.NotNull(columns[2].Configuration); - Assert.False(columns[2].HasIndex); - - Assert.Equal("l1", columns[2].Configuration!["distance_metric"]); + if (data) + { + Assert.Equal("my_data", columns[1].Name); + Assert.Equal("INTEGER", columns[1].Type); + Assert.False(columns[1].IsPrimary); + Assert.Null(columns[1].Configuration); + Assert.True(columns[1].HasIndex); + } + else + { + Assert.Equal("Vector", columns[1].Name); + Assert.Equal("FLOAT[4]", columns[1].Type); + Assert.False(columns[1].IsPrimary); + Assert.NotNull(columns[1].Configuration); + Assert.False(columns[1].HasIndex); + Assert.Equal("l1", columns[1].Configuration!["distance_metric"]); + } } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index 1cac1ace9576..6eae33cc6ddc 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -295,7 +295,9 @@ private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, str return; } - property.StorageName = storageName; + property.StorageName = this.Options.EscapeIdentifier is not null + ? this.Options.EscapeIdentifier(storageName) + : storageName; } /// diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs index 8ef6e5779ce9..1e5fd8b230ca 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs @@ -58,4 +58,9 @@ public sealed class VectorStoreRecordModelBuildingOptions /// When set, the model builder will manage the key storage name, and users may not customize it. /// public string? ReservedKeyStorageName { get; init; } + + /// + /// A method for escaping storage names. + /// + public Func? EscapeIdentifier { get; init; } } diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteBatchConformanceTests.cs new file mode 100644 index 000000000000..21893736060e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteBatchConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace SqliteIntegrationTests.CRUD; + +public class SqliteBatchConformanceTests_string(SqliteSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture> +{ +} + +public class SqliteBatchConformanceTests_ulong(SqliteSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture> +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteRecordConformanceTests.cs new file mode 100644 index 000000000000..2d5f95f8592c --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteRecordConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace SqliteIntegrationTests.CRUD; + +public class SqliteRecordConformanceTests_string(SqliteSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture> +{ +} + +public class SqliteRecordConformanceTests_ulong(SqliteSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture> +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteSimpleModelFixture.cs new file mode 100644 index 000000000000..70550525aa74 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteSimpleModelFixture.cs @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace SqliteIntegrationTests.Support; + +public class SqliteSimpleModelFixture : SimpleModelFixture + where TKey : notnull +{ + public override TestStore TestStore => SqliteTestStore.Instance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs index 9b025c66610f..3ea3b05d69d7 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs @@ -16,6 +16,8 @@ internal sealed class SqliteTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Call InitializeAsync() first"); + public override string DefaultDistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + private SqliteTestStore() { } From 4a92af9249bcba334310e7cf7e38118c69109535 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 22 Apr 2025 11:20:13 +0100 Subject: [PATCH 50/63] .Net: Add tests for CRUD with no data fields and bug/test fixes (#11641) ### Motivation and Context Adding tests to check for the failure reported in: #11274 The code for this area has been changed, and the issue is not reproduceable anymore. ### Description - Added tests to verify CRUD operations when no data fields are defined on the schema - Add missing CRUD test for Qdrant - Fix bugs - Fix tests where broken ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../QdrantVectorStoreRecordCollection.cs | 10 + .../WeaviateModelBuilder.cs | 2 +- .../WeaviateVectorStore.cs | 2 +- .../AzureAISearchNoDataConformanceTests.cs | 24 +++ .../AzureAISearchNoVectorConformanceTests.cs | 7 + .../CosmosMongoDBNoDataConformanceTests.cs | 17 ++ .../CRUD/CosmosNoSQLNoDataConformanceTests.cs | 17 ++ .../CRUD/InMemoryBatchConformanceTests.cs | 2 +- .../InMemoryDynamicRecordConformanceTests.cs | 2 +- .../CRUD/InMemoryNoDataConformanceTests.cs | 17 ++ .../CRUD/InMemoryRecordConformanceTests.cs | 2 +- .../CRUD/MongoDBNoDataConformanceTests.cs | 17 ++ .../CRUD/PineconeNoDataConformanceTests.cs | 17 ++ .../CRUD/PostgresNoDataConformanceTests.cs | 17 ++ .../CRUD/QdrantBatchConformanceTests.cs | 17 ++ .../QdrantDynamicDataModelConformanceTests.cs | 17 ++ .../CRUD/QdrantNoDataConformanceTests.cs | 26 +++ .../CRUD/QdrantRecordConformanceTests.cs | 17 ++ ...cs => QdrantCollectionConformanceTests.cs} | 5 + ...CollectionConformanceTests_NamedVectors.cs | 12 -- .../Filter/QdrantBasicFilterTests.cs | 3 - .../Filter/QdrantBasicQueryTests.cs | 3 - .../QdrantNamedDynamicDataModelFixture.cs | 10 + .../Support/QdrantNamedSimpleModelFixture.cs | 10 + .../Support/QdrantTestStore.cs | 10 + .../QdrantUnnamedDynamicDataModelFixture.cs | 10 + .../QdrantUnnamedSimpleModelFixture.cs | 10 + .../RedisHashSetNoDataConformanceTests.cs | 40 ++++ .../CRUD/RedisJsonNoDataConformanceTests.cs | 17 ++ .../Support/RedisTestStore.cs | 2 + .../CRUD/SqlServerBatchConformanceTests.cs | 2 +- .../CRUD/SqlServerNoDataConformanceTests.cs | 17 ++ .../CRUD/SqliteNoDataConformanceTests.cs | 17 ++ .../CRUD/BatchConformanceTests.cs | 8 +- .../CRUD/DynamicDataModelConformanceTests.cs | 19 +- .../CRUD/NoDataConformanceTests.cs | 184 ++++++++++++++++++ .../CRUD/NoVectorConformanceTests.cs | 4 +- .../CRUD/RecordConformanceTests.cs | 8 +- .../Models/SimpleRecord.cs | 9 +- .../Support/TestStore.cs | 5 + .../CRUD/WeaviateNoDataConformanceTests.cs | 36 ++++ .../CRUD/WeaviateNoVectorConformanceTests.cs | 6 +- 42 files changed, 632 insertions(+), 45 deletions(-) create mode 100644 dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantBatchConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantDynamicDataModelConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantRecordConformanceTests.cs rename dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/{QdrantCollectionConformanceTests_UnnamedVector.cs => QdrantCollectionConformanceTests.cs} (67%) delete mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_NamedVectors.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedDynamicDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedSimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedDynamicDataModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedSimpleModelFixture.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index c4a5bea72d7f..f61f73069170 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -391,6 +391,11 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo } } + if (keyList is { Count: 0 }) + { + return Task.CompletedTask; + } + return this.RunOperationAsync( DeleteName, () => keyList switch @@ -450,6 +455,11 @@ public async Task> UpsertAsync(IEnumerable records, UpsertName, () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); + if (pointStructs is { Count: 0 }) + { + return Array.Empty(); + } + // Upsert. await this.RunOperationAsync( UpsertName, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs index 680d9ad3d40c..fcc645826465 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateModelBuilder.cs @@ -12,7 +12,7 @@ private static VectorStoreRecordModelBuildingOptions GetModelBuildingOptions(boo { return new() { - RequiresAtLeastOneVector = false, + RequiresAtLeastOneVector = !hasNamedVectors, SupportsMultipleKeys = false, SupportsMultipleVectors = hasNamedVectors, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 83873f1eca00..9daedf433330 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -29,7 +29,7 @@ public sealed class WeaviateVectorStore : IVectorStore private readonly WeaviateVectorStoreOptions _options; /// A general purpose definition that can be used to construct a collection when needing to proxy schema agnostic operations. - private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(Guid))] }; + private static readonly VectorStoreRecordDefinition s_generalPurposeDefinition = new() { Properties = [new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 1)] }; /// /// Initializes a new instance of the class. diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs new file mode 100644 index 000000000000..1751a4fd02fd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.RegularExpressions; +using AzureAISearchIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace AzureAISearchIntegrationTests.CRUD; + +public class AzureAISearchNoDataConformanceTests(AzureAISearchNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ +#pragma warning disable CA1308 // Normalize strings to uppercase + private static readonly string _testIndexPostfix = new Regex("[^a-zA-Z0-9]").Replace(Environment.MachineName.ToLowerInvariant(), ""); +#pragma warning restore CA1308 // Normalize strings to uppercase + + public new class Fixture : NoDataConformanceTests.Fixture + { + protected override string CollectionName => "nodata-" + _testIndexPostfix; + + public override TestStore TestStore => AzureAISearchTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs index 9d16e29ed86c..c14d6d622665 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Text.RegularExpressions; using AzureAISearchIntegrationTests.Support; using VectorDataSpecificationTests.CRUD; using VectorDataSpecificationTests.Support; @@ -10,8 +11,14 @@ namespace AzureAISearchIntegrationTests.CRUD; public class AzureAISearchNoVectorConformanceTests(AzureAISearchNoVectorConformanceTests.Fixture fixture) : NoVectorConformanceTests(fixture), IClassFixture { +#pragma warning disable CA1308 // Normalize strings to uppercase + private static readonly string _testIndexPostfix = new Regex("[^a-zA-Z0-9]").Replace(Environment.MachineName.ToLowerInvariant(), ""); +#pragma warning restore CA1308 // Normalize strings to uppercase + public new class Fixture : NoVectorConformanceTests.Fixture { + protected override string CollectionName => "novector-" + _testIndexPostfix; + public override TestStore TestStore => AzureAISearchTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoDataConformanceTests.cs new file mode 100644 index 000000000000..1cdb1ed6d4cc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CRUD/CosmosMongoDBNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosMongoDBIntegrationTests.CRUD; + +public class CosmosMongoDBNoDataConformanceTests(CosmosMongoDBNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs new file mode 100644 index 000000000000..459b2f6344c7 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosNoSQLIntegrationTests.CRUD; + +public class CosmosNoSQLNoDataConformanceTests(CosmosNoSQLNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs index 2b46a4f8a947..f778fbb7154d 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryBatchConformanceTests.cs @@ -19,7 +19,7 @@ public override async Task GetBatchAsync_WithoutVectors() foreach (var record in expectedRecords) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true, fixture.TestStore.VectorsComparable); } } } diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs index 1250c2b29531..940d8686e8ea 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryDynamicRecordConformanceTests.cs @@ -19,6 +19,6 @@ public override async Task GetAsync_WithoutVectors() (int)expectedRecord[DynamicDataModelFixture.KeyPropertyName]!, new() { IncludeVectors = false }); - AssertEquivalent(expectedRecord, received, includeVectors: true); + AssertEquivalent(expectedRecord, received, includeVectors: true, fixture.TestStore.VectorsComparable); } } diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoDataConformanceTests.cs new file mode 100644 index 000000000000..cbd60656cecd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace InMemoryIntegrationTests.CRUD; + +public class InMemoryNoDataConformanceTests(InMemoryNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => InMemoryTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs index de7575c6f9e9..02534141ec76 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/CRUD/InMemoryRecordConformanceTests.cs @@ -15,6 +15,6 @@ public override async Task GetAsync_WithoutVectors() var expectedRecord = fixture.TestData[0]; var received = await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = false }); - expectedRecord.AssertEqual(received, includeVectors: true); + expectedRecord.AssertEqual(received, includeVectors: true, fixture.TestStore.VectorsComparable); } } diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoDataConformanceTests.cs new file mode 100644 index 000000000000..97a11c6b4624 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/CRUD/MongoDBNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace MongoDBIntegrationTests.CRUD; + +public class MongoDBNoDataConformanceTests(MongoDBNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => MongoDBTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeNoDataConformanceTests.cs new file mode 100644 index 000000000000..d987ebc26907 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/CRUD/PineconeNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PineconeIntegrationTests.CRUD; + +public class PineconeNoDataConformanceTests(PineconeNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => PineconeTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoDataConformanceTests.cs new file mode 100644 index 000000000000..ad136ca85d23 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/CRUD/PostgresNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PostgresIntegrationTests.CRUD; + +public class PostgresNoDataConformanceTests(PostgresNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => PostgresTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantBatchConformanceTests.cs new file mode 100644 index 000000000000..ccabbb8697f4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantBatchConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace QdrantIntegrationTests.CRUD; + +public class QdrantBatchConformanceTests_NamedVectors(QdrantNamedSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} + +public class QdrantBatchConformanceTests_UnnamedVector(QdrantUnnamedSimpleModelFixture fixture) + : BatchConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantDynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantDynamicDataModelConformanceTests.cs new file mode 100644 index 000000000000..37db1c56e0cc --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantDynamicDataModelConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace QdrantIntegrationTests.CRUD; + +public class QdrantDynamicDataModelConformanceTests_NamedVectors(QdrantNamedDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} + +public class QdrantDynamicDataModelConformanceTests_UnnamedVector(QdrantUnnamedDynamicDataModelFixture fixture) + : DynamicDataModelConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantNoDataConformanceTests.cs new file mode 100644 index 000000000000..61437f987b83 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantNoDataConformanceTests.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace QdrantIntegrationTests.CRUD; + +public class QdrantNoDataConformanceTests_NamedVectors(QdrantNoDataConformanceTests_NamedVectors.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; + } +} + +public class QdrantNoDataConformanceTests_UnnamedVectors(QdrantNoDataConformanceTests_UnnamedVectors.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantRecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantRecordConformanceTests.cs new file mode 100644 index 000000000000..210b980fc6b7 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/CRUD/QdrantRecordConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using Xunit; + +namespace QdrantIntegrationTests.CRUD; + +public class QdrantRecordConformanceTests_NamedVectors(QdrantNamedSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} + +public class QdrantRecordConformanceTests_UnnamedVectors(QdrantUnnamedSimpleModelFixture fixture) + : RecordConformanceTests(fixture), IClassFixture +{ +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_UnnamedVector.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests.cs similarity index 67% rename from dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_UnnamedVector.cs rename to dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests.cs index 5471d83c8996..518331a721f2 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_UnnamedVector.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests.cs @@ -6,6 +6,11 @@ namespace QdrantIntegrationTests.Collections; +public class QdrantCollectionConformanceTests_NamedVectors(QdrantNamedVectorsFixture fixture) + : CollectionConformanceTests(fixture), IClassFixture +{ +} + public class QdrantCollectionConformanceTests_UnnamedVector(QdrantUnnamedVectorFixture fixture) : CollectionConformanceTests(fixture), IClassFixture { diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_NamedVectors.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_NamedVectors.cs deleted file mode 100644 index 7f4d0f138907..000000000000 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Collections/QdrantCollectionConformanceTests_NamedVectors.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using QdrantIntegrationTests.Support; -using VectorDataSpecificationTests.Collections; -using Xunit; - -namespace QdrantIntegrationTests.Collections; - -public class QdrantCollectionConformanceTests_NamedVectors(QdrantNamedVectorsFixture fixture) - : CollectionConformanceTests(fixture), IClassFixture -{ -} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs index 2ba3b454231b..bc2e23af9688 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicFilterTests.cs @@ -13,8 +13,5 @@ public class QdrantBasicFilterTests(QdrantBasicFilterTests.Fixture fixture) public new class Fixture : BasicFilterTests.Fixture { public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } } diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs index ad7d6116cc21..bb6d77864f31 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Filter/QdrantBasicQueryTests.cs @@ -13,8 +13,5 @@ public class QdrantBasicQueryTests(QdrantBasicQueryTests.Fixture fixture) public new class Fixture : BasicQueryTests.QueryFixture { public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } } diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedDynamicDataModelFixture.cs new file mode 100644 index 000000000000..851f76450d1b --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedDynamicDataModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Support; + +public class QdrantNamedDynamicDataModelFixture : DynamicDataModelFixture +{ + public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedSimpleModelFixture.cs new file mode 100644 index 000000000000..77a241f94b4e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantNamedSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Support; + +public class QdrantNamedSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs index c7148c291de4..c0d4b67768a6 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs @@ -29,6 +29,16 @@ public QdrantVectorStore GetVectorStore(QdrantVectorStoreOptions options) private QdrantTestStore(bool hasNamedVectors) => this._hasNamedVectors = hasNamedVectors; + public override string DefaultIndexKind => IndexKind.Hnsw; + + /// + /// Qdrant normalizes vectors on upsert, so we cannot compare + /// what we upserted and what we retrieve, we can only check + /// that a vector was returned. + /// https://github.com/qdrant/qdrant-client/discussions/727 + /// + public override bool VectorsComparable => false; + protected override async Task StartAsync() { await this._container.StartAsync(); diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedDynamicDataModelFixture.cs new file mode 100644 index 000000000000..6771c223356e --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedDynamicDataModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Support; + +public class QdrantUnnamedDynamicDataModelFixture : DynamicDataModelFixture +{ + public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedSimpleModelFixture.cs new file mode 100644 index 000000000000..64159ea93901 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantUnnamedSimpleModelFixture.cs @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.Support; + +namespace QdrantIntegrationTests.Support; + +public class QdrantUnnamedSimpleModelFixture : SimpleModelFixture +{ + public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoDataConformanceTests.cs new file mode 100644 index 000000000000..35be99c1e93f --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisHashSetNoDataConformanceTests.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisHashSetNoDataConformanceTests(RedisHashSetNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + [ConditionalFact] + public override async Task GetAsyncReturnsInsertedRecord_WithoutVectors() + { + var expectedRecord = fixture.TestData[0]; + + // When using HashSets there is no way to distinguish between no fields being returned and + // the record not existing. + Assert.Null(await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = false })); + } + + [ConditionalFact(Skip = "When using HashSets there is no way to distinguish between no fields being returned and the record not existing so this test isn't useful.")] + public override Task UpsertAsyncCanInsertNewRecord_WithoutVectors() + { + return base.UpsertAsyncCanInsertNewRecord_WithoutVectors(); + } + + [ConditionalFact(Skip = "When using HashSets there is no way to distinguish between no fields being returned and the record not existing so this test isn't useful.")] + public override Task UpsertAsyncCanUpdateExistingRecord_WithoutVectors() + { + return base.UpsertAsyncCanUpdateExistingRecord_WithoutVectors(); + } + + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => RedisTestStore.HashSetInstance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoDataConformanceTests.cs new file mode 100644 index 000000000000..255e093efad1 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/CRUD/RedisJsonNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace RedisIntegrationTests.CRUD; + +public class RedisJsonNoDataConformanceTests(RedisJsonNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => RedisTestStore.JsonInstance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs index 6ee6f058da46..5744dd6e53c4 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Support/RedisTestStore.cs @@ -15,6 +15,8 @@ internal sealed class RedisTestStore : TestStore private readonly RedisContainer _container = new RedisBuilder() .WithImage("redis/redis-stack") + .WithPortBinding(6379, assignRandomHostPort: true) + .WithPortBinding(8001, assignRandomHostPort: true) .Build(); private readonly RedisStorageType _storageType; diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs index 19dc832aa3d0..bb2536fafd5e 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerBatchConformanceTests.cs @@ -41,7 +41,7 @@ private async Task CanSplitBatchToAccountForMaxParameterLimit(bool includeVector var received = await collection.GetAsync(keys, new() { IncludeVectors = includeVectors }).ToArrayAsync(); foreach (var record in inserted) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors, fixture.TestStore.VectorsComparable); } await collection.DeleteAsync(keys); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoDataConformanceTests.cs new file mode 100644 index 000000000000..e3303861968a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/CRUD/SqlServerNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqlServerIntegrationTests.CRUD; + +public class SqlServerNoDataConformanceTests(SqlServerNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => SqlServerTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoDataConformanceTests.cs new file mode 100644 index 000000000000..934cbd8a4032 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/CRUD/SqliteNoDataConformanceTests.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqliteIntegrationTests.CRUD; + +public class SqliteNoDataConformanceTests(SqliteNoDataConformanceTests.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => SqliteTestStore.Instance; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs index e51f2619444e..4b642de36488 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/BatchConformanceTests.cs @@ -39,7 +39,7 @@ private async Task GetBatchAsyncReturnsInsertedRecords(bool includeVectors) foreach (var record in expectedRecords) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors, fixture.TestStore.VectorsComparable); } } @@ -76,7 +76,7 @@ public virtual async Task UpsertBatchAsyncCanInsertNewRecord() var received = await collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in inserted) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true, fixture.TestStore.VectorsComparable); } } @@ -108,7 +108,7 @@ public virtual async Task UpsertBatchAsyncCanUpdateExistingRecords() var received = await fixture.Collection.GetAsync(keys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in updated) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true, fixture.TestStore.VectorsComparable); } } @@ -146,7 +146,7 @@ public virtual async Task UpsertCanBothInsertAndUpdateRecordsFromTheSameBatch() var received = await fixture.Collection.GetAsync(mixedKeys, new() { IncludeVectors = true }).ToArrayAsync(); foreach (var record in records) { - record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true); + record.AssertEqual(this.GetRecord(received, record.Id), includeVectors: true, fixture.TestStore.VectorsComparable); } } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs index c1be244f0366..2fddb4dd699c 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/DynamicDataModelConformanceTests.cs @@ -46,7 +46,7 @@ private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) (TKey)expectedRecord[DynamicDataModelFixture.KeyPropertyName]!, new() { IncludeVectors = includeVectors }); - AssertEquivalent(expectedRecord, received, includeVectors); + AssertEquivalent(expectedRecord, received, includeVectors, fixture.TestStore.VectorsComparable); } [ConditionalFact] @@ -67,7 +67,7 @@ public virtual async Task UpsertAsyncCanInsertNewRecord() Assert.Equal(expectedKey, key); var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = true }); - AssertEquivalent(inserted, received, includeVectors: true); + AssertEquivalent(inserted, received, includeVectors: true, fixture.TestStore.VectorsComparable); } [ConditionalFact] @@ -88,7 +88,7 @@ public virtual async Task UpsertAsyncCanUpdateExistingRecord() Assert.Equal(existingRecord[DynamicDataModelFixture.KeyPropertyName], key); var received = await collection.GetAsync((TKey)existingRecord[DynamicDataModelFixture.KeyPropertyName]!, new() { IncludeVectors = true }); - AssertEquivalent(updated, received, includeVectors: true); + AssertEquivalent(updated, received, includeVectors: true, fixture.TestStore.VectorsComparable); } [ConditionalFact] @@ -109,7 +109,7 @@ public async Task DeleteAsyncDeletesTheRecord() Assert.Null(await fixture.Collection.GetAsync((TKey)recordToRemove[DynamicDataModelFixture.KeyPropertyName]!)); } - protected static void AssertEquivalent(Dictionary expected, Dictionary? actual, bool includeVectors) + protected static void AssertEquivalent(Dictionary expected, Dictionary? actual, bool includeVectors, bool compareVectors) { Assert.NotNull(actual); Assert.Equal(expected[DynamicDataModelFixture.KeyPropertyName], actual[DynamicDataModelFixture.KeyPropertyName]); @@ -120,8 +120,15 @@ protected static void AssertEquivalent(Dictionary expected, Dic if (includeVectors) { Assert.Equal( - ((ReadOnlyMemory)expected[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray(), - ((ReadOnlyMemory)actual[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray()); + ((ReadOnlyMemory)expected[DynamicDataModelFixture.EmbeddingPropertyName]!).Length, + ((ReadOnlyMemory)actual[DynamicDataModelFixture.EmbeddingPropertyName]!).Length); + + if (compareVectors) + { + Assert.Equal( + ((ReadOnlyMemory)expected[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray(), + ((ReadOnlyMemory)actual[DynamicDataModelFixture.EmbeddingPropertyName]!).ToArray()); + } } else { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs new file mode 100644 index 000000000000..7dc6da6b29fd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests.CRUD; + +/// +/// Tests CRUD operations using a model without data fields. +/// +public class NoDataConformanceTests(NoDataConformanceTests.Fixture fixture) where TKey : notnull +{ + [ConditionalFact] + public virtual Task GetAsyncReturnsInsertedRecord_WithVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: true); + + [ConditionalFact] + public virtual Task GetAsyncReturnsInsertedRecord_WithoutVectors() + => this.GetAsyncReturnsInsertedRecord(includeVectors: false); + + private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) + { + var expectedRecord = fixture.TestData[0]; + + var received = await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = includeVectors }); + + expectedRecord.AssertEqual(received, includeVectors, fixture.TestStore.VectorsComparable); + } + + [ConditionalFact] + public virtual Task UpsertAsyncCanInsertNewRecord_WithVectors() + => this.UpsertAsyncCanInsertNewRecord(includeVectors: true); + + [ConditionalFact] + public virtual Task UpsertAsyncCanInsertNewRecord_WithoutVectors() + => this.UpsertAsyncCanInsertNewRecord(includeVectors: false); + + private async Task UpsertAsyncCanInsertNewRecord(bool includeVectors) + { + var collection = fixture.Collection; + TKey expectedKey = fixture.GenerateNextKey(); + NoDataRecord inserted = new() + { + Id = expectedKey, + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.1f, NoDataRecord.DimensionCount).ToArray()) + }; + + Assert.Null(await collection.GetAsync(expectedKey)); + TKey key = await collection.UpsertAsync(inserted); + Assert.Equal(expectedKey, key); + + var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = includeVectors }); + inserted.AssertEqual(received, includeVectors, fixture.TestStore.VectorsComparable); + } + + [ConditionalFact] + public virtual Task UpsertAsyncCanUpdateExistingRecord_WithVectors() + => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: true); + + [ConditionalFact] + public virtual Task UpsertAsyncCanUpdateExistingRecord_WithoutVectors() + => this.UpsertAsyncCanUpdateExistingRecord(includeVectors: false); + + private async Task UpsertAsyncCanUpdateExistingRecord(bool includeVectors) + { + var collection = fixture.Collection; + var existingRecord = fixture.TestData[1]; + NoDataRecord updated = new() + { + Id = existingRecord.Id, + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.25f, NoDataRecord.DimensionCount).ToArray()) + }; + + Assert.NotNull(await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = true })); + TKey key = await collection.UpsertAsync(updated); + Assert.Equal(existingRecord.Id, key); + + var received = await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = includeVectors }); + updated.AssertEqual(received, includeVectors, fixture.TestStore.VectorsComparable); + } + + [ConditionalFact] + public virtual async Task DeleteAsyncDeletesTheRecord() + { + var recordToRemove = fixture.TestData[2]; + + Assert.NotNull(await fixture.Collection.GetAsync(recordToRemove.Id, new() { IncludeVectors = true })); + await fixture.Collection.DeleteAsync(recordToRemove.Id); + Assert.Null(await fixture.Collection.GetAsync(recordToRemove.Id)); + } + + /// + /// This class is for testing databases that support having no data fields. + /// + public sealed class NoDataRecord + { + public const int DimensionCount = 3; + + [VectorStoreRecordKey(StoragePropertyName = "key")] + public TKey Id { get; set; } = default!; + + [VectorStoreRecordVector(DimensionCount, StoragePropertyName = "embedding")] + public ReadOnlyMemory Floats { get; set; } + + public void AssertEqual(NoDataRecord? other, bool includeVectors, bool compareVectors) + { + Assert.NotNull(other); + Assert.Equal(this.Id, other.Id); + + if (includeVectors) + { + Assert.Equal(this.Floats.Span.Length, other.Floats.Span.Length); + + if (compareVectors) + { + Assert.True(this.Floats.Span.SequenceEqual(other.Floats.Span)); + } + } + } + } + + /// + /// Provides data and configuration for a model without data fields. + /// + public abstract class Fixture : VectorStoreCollectionFixture + { + protected override List BuildTestData() => + [ + new() + { + Id = this.GenerateNextKey(), + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.1f, NoDataRecord.DimensionCount).ToArray()) + }, + new() + { + Id = this.GenerateNextKey(), + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.2f, NoDataRecord.DimensionCount).ToArray()) + }, + new() + { + Id = this.GenerateNextKey(), + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.3f, NoDataRecord.DimensionCount).ToArray()) + }, + new() + { + Id = this.GenerateNextKey(), + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.4f, NoDataRecord.DimensionCount).ToArray()) + } + ]; + + protected override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(NoDataRecord.Id), typeof(TKey)) { StoragePropertyName = "key" }, + new VectorStoreRecordVectorProperty(nameof(NoDataRecord.Floats), typeof(ReadOnlyMemory), NoDataRecord.DimensionCount) + { + StoragePropertyName = "embedding", + IndexKind = this.IndexKind, + } + ] + }; + + protected override async Task WaitForDataAsync() + { + for (var i = 0; i < 20; i++) + { + var getOptions = new GetRecordOptions { IncludeVectors = true }; + var results = await this.Collection.GetAsync([this.TestData[0].Id, this.TestData[1].Id, this.TestData[2].Id, this.TestData[3].Id], getOptions).ToArrayAsync(); + if (results.Length == 4 && results.All(r => r != null)) + { + return; + } + + await Task.Delay(TimeSpan.FromMilliseconds(100)); + } + + throw new InvalidOperationException("Data did not appear in the collection within the expected time."); + } + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs index f8cb930a8746..3e879f46837f 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs @@ -148,8 +148,8 @@ protected override VectorStoreRecordDefinition GetRecordDefinition() { Properties = [ - new VectorStoreRecordKeyProperty(nameof(NoVectorRecord.Id), typeof(TKey)), - new VectorStoreRecordDataProperty(nameof(NoVectorRecord.Text), typeof(string)) { IsIndexed = true }, + new VectorStoreRecordKeyProperty(nameof(NoVectorRecord.Id), typeof(TKey)) { StoragePropertyName = "key" }, + new VectorStoreRecordDataProperty(nameof(NoVectorRecord.Text), typeof(string)) { IsIndexed = true, StoragePropertyName = "text" }, ] }; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs index 092e16e2528c..a1354caa4efe 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/RecordConformanceTests.cs @@ -44,7 +44,7 @@ private async Task GetAsyncReturnsInsertedRecord(bool includeVectors) var received = await fixture.Collection.GetAsync(expectedRecord.Id, new() { IncludeVectors = includeVectors }); - expectedRecord.AssertEqual(received, includeVectors); + expectedRecord.AssertEqual(received, includeVectors, fixture.TestStore.VectorsComparable); } [ConditionalFact] @@ -65,7 +65,7 @@ public virtual async Task UpsertAsyncCanInsertNewRecord() Assert.Equal(expectedKey, key); var received = await collection.GetAsync(expectedKey, new() { IncludeVectors = true }); - inserted.AssertEqual(received, includeVectors: true); + inserted.AssertEqual(received, includeVectors: true, fixture.TestStore.VectorsComparable); } [ConditionalFact] @@ -78,7 +78,7 @@ public virtual async Task UpsertAsyncCanUpdateExistingRecord() Id = existingRecord.Id, Text = "updated", Number = 456, - Floats = new ReadOnlyMemory(Enumerable.Repeat(0.2f, SimpleRecord.DimensionCount).ToArray()) + Floats = new ReadOnlyMemory(Enumerable.Repeat(0.25f, SimpleRecord.DimensionCount).ToArray()) }; Assert.NotNull(await collection.GetAsync(existingRecord.Id)); @@ -86,7 +86,7 @@ public virtual async Task UpsertAsyncCanUpdateExistingRecord() Assert.Equal(existingRecord.Id, key); var received = await collection.GetAsync(existingRecord.Id, new() { IncludeVectors = true }); - updated.AssertEqual(received, includeVectors: true); + updated.AssertEqual(received, includeVectors: true, fixture.TestStore.VectorsComparable); } [ConditionalFact] diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs index 1d2222204045..57f15a00e9a4 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Models/SimpleRecord.cs @@ -26,7 +26,7 @@ public sealed class SimpleRecord [VectorStoreRecordVector(Dimensions: DimensionCount, StoragePropertyName = "embedding")] public ReadOnlyMemory Floats { get; set; } - public void AssertEqual(SimpleRecord? other, bool includeVectors) + public void AssertEqual(SimpleRecord? other, bool includeVectors, bool compareVectors) { Assert.NotNull(other); Assert.Equal(this.Id, other.Id); @@ -35,7 +35,12 @@ public void AssertEqual(SimpleRecord? other, bool includeVectors) if (includeVectors) { - Assert.Equal(this.Floats.ToArray(), other.Floats.ToArray()); + Assert.Equal(this.Floats.Span.Length, other.Floats.Span.Length); + + if (compareVectors) + { + Assert.Equal(this.Floats.ToArray(), other.Floats.ToArray()); + } } else { diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index f6b8b60b77c7..6d3d2f935e54 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -13,6 +13,11 @@ public abstract class TestStore private readonly SemaphoreSlim _lock = new(1, 1); private int _referenceCount; + /// + /// Some databases modify vectors on upsert, e.g. normalizing them, so vectors + /// returned cannot be compared with the original ones. + /// + public virtual bool VectorsComparable => true; public virtual string DefaultDistanceFunction => DistanceFunction.CosineSimilarity; public virtual string DefaultIndexKind => IndexKind.Flat; diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs new file mode 100644 index 000000000000..fcadf45f574a --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using VectorDataSpecificationTests.CRUD; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests.CRUD; + +public class WeaviateNoDataConformanceTests_NamedVectors(WeaviateNoDataConformanceTests_NamedVectors.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; + + /// + /// Weaviate collections must start with an uppercase letter. + /// + protected override string CollectionName => "NoDataNamedCollection"; + } +} + +public class WeaviateNoDataConformanceTests_UnnamedVector(WeaviateNoDataConformanceTests_UnnamedVector.Fixture fixture) + : NoDataConformanceTests(fixture), IClassFixture +{ + public new class Fixture : NoDataConformanceTests.Fixture + { + public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; + + /// + /// Weaviate collections must start with an uppercase letter. + /// + protected override string CollectionName => "NoDataUnnamedCollection"; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs index 20d5888ad851..c8a04bf1abb3 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs @@ -7,8 +7,8 @@ namespace WeaviateIntegrationTests.CRUD; -public class WeaviateNoVectorConformanceTests(WeaviateNoVectorConformanceTests.Fixture fixture) - : NoVectorConformanceTests(fixture), IClassFixture +public class WeaviateNoVectorConformanceTests_NamedVectors(WeaviateNoVectorConformanceTests_NamedVectors.Fixture fixture) + : NoVectorConformanceTests(fixture), IClassFixture { public new class Fixture : NoVectorConformanceTests.Fixture { @@ -17,6 +17,6 @@ public class WeaviateNoVectorConformanceTests(WeaviateNoVectorConformanceTests.F /// /// Weaviate collections must start with an uppercase letter. /// - protected override string CollectionName => "NoVectorCollection"; + protected override string CollectionName => "NoVectorNamedCollection"; } } From b811688c3878dc65d7a35b48093abe8efd11e458 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 22 Apr 2025 12:08:32 +0100 Subject: [PATCH 51/63] Fix merge issue. --- dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs index d0bb468eecb2..da5bf57dd103 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs @@ -15,9 +15,15 @@ namespace Microsoft.SemanticKernel; internal static partial class Verify { #if NET + [GeneratedRegex("^[0-9A-Za-z_]*$")] + private static partial Regex AsciiLettersDigitsUnderscoresRegex(); + [GeneratedRegex("^[^.]+\\.[^.]+$")] private static partial Regex FilenameRegex(); #else + private static Regex AsciiLettersDigitsUnderscoresRegex() => s_asciiLettersDigitsUnderscoresRegex; + private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$", RegexOptions.Compiled); + private static Regex FilenameRegex() => s_filenameRegex; private static readonly Regex s_filenameRegex = new("^[^.]+\\.[^.]+$", RegexOptions.Compiled); #endif From 17ce3f06aa4453a9a3d424db7a41daa04b14e6ac Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 22 Apr 2025 12:17:42 +0100 Subject: [PATCH 52/63] Fix merge issues. --- .../src/Agents/Runtime/Abstractions/Runtime.Abstractions.csproj | 1 + dotnet/src/Agents/Runtime/Core/Runtime.Core.csproj | 1 + dotnet/src/Agents/Runtime/InProcess/Runtime.InProcess.csproj | 1 + 3 files changed, 3 insertions(+) diff --git a/dotnet/src/Agents/Runtime/Abstractions/Runtime.Abstractions.csproj b/dotnet/src/Agents/Runtime/Abstractions/Runtime.Abstractions.csproj index 9f687750928e..2e32ccad3c7a 100644 --- a/dotnet/src/Agents/Runtime/Abstractions/Runtime.Abstractions.csproj +++ b/dotnet/src/Agents/Runtime/Abstractions/Runtime.Abstractions.csproj @@ -15,6 +15,7 @@ + diff --git a/dotnet/src/Agents/Runtime/Core/Runtime.Core.csproj b/dotnet/src/Agents/Runtime/Core/Runtime.Core.csproj index 2b996f882698..5607805fa2d3 100644 --- a/dotnet/src/Agents/Runtime/Core/Runtime.Core.csproj +++ b/dotnet/src/Agents/Runtime/Core/Runtime.Core.csproj @@ -14,6 +14,7 @@ + diff --git a/dotnet/src/Agents/Runtime/InProcess/Runtime.InProcess.csproj b/dotnet/src/Agents/Runtime/InProcess/Runtime.InProcess.csproj index dc585e3b2ef9..fe2326664ca5 100644 --- a/dotnet/src/Agents/Runtime/InProcess/Runtime.InProcess.csproj +++ b/dotnet/src/Agents/Runtime/InProcess/Runtime.InProcess.csproj @@ -14,6 +14,7 @@ + From 8b8ccd6b83d01706997cbcfad95a0b6b796ca0ba Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Sat, 26 Apr 2025 16:07:24 +0200 Subject: [PATCH 53/63] .Net: Introduce a LINQ filter preprocessor in MEVD (#11702) This is a refactor-only change, with no functional changes. It extracts out all logic for identifying and processing captured variables ("parameters") out to a new FilterTranslationProcessor connectos support type in MEVD. * Most databases don't support parameterization; for these we simply inline the captured variables. Connector translators see these as constants without ever having to know about captured variables etc. * SQL databases do support parameterization. For these, the preprocessor transforms the captured variable (which is a complex tree construct) into a simple QueryParameterExpression, which can easily be pattern-matched in connector translator code. This is preparatory work for #11673 (we need a centralized place to do this kind of thing). --- .../AzureAISearchFilterTranslator.cs | 64 ++++--------------- .../AzureCosmosDBMongoDBFilterTranslator.cs | 41 ++++-------- .../AzureCosmosDBNoSqlFilterTranslator.cs | 5 ++ .../SqlFilterTranslator.cs | 55 ++++++---------- .../MongoDBFilterTranslator.cs | 42 ++++-------- .../PineconeFilterTranslator.cs | 42 ++++-------- .../PostgresFilterTranslator.cs | 8 +-- .../QdrantFilterTranslator.cs | 49 ++++---------- .../RedisFilterTranslator.cs | 36 ++--------- .../SqlServerFilterTranslator.cs | 8 +-- .../SqliteFilterTranslator.cs | 8 +-- .../WeaviateFilterTranslator.cs | 44 ++++--------- .../Filter/FilterTranslationPreprocessor.cs | 54 ++++++++++++++++ .../Filter/QueryParameterExpression.cs | 33 ++++++++++ 14 files changed, 204 insertions(+), 285 deletions(-) create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/FilterTranslationPreprocessor.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/QueryParameterExpression.cs diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs index a3b6768ec43d..8c9d172ca863 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchFilterTranslator.cs @@ -7,10 +7,9 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using System.Text; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -32,7 +31,11 @@ internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordMo Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + this.Translate(preprocessedExpression); + return this._filter.ToString(); } @@ -139,20 +142,13 @@ private void GenerateLiteral(object? value) private void TranslateMember(MemberExpression memberExpression) { - switch (memberExpression) + if (this.TryBindProperty(memberExpression, out var property)) { - case var _ when this.TryBindProperty(memberExpression, out var property): - this._filter.Append(property.StorageName); // TODO: Escape - return; - - // Identify captured lambda variables, inline them as constants - case var _ when TryGetCapturedValue(memberExpression, out var capturedValue): - this.GenerateLiteral(capturedValue); - return; - - default: - throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + this._filter.Append(property.StorageName); // TODO: Escape + return; } + + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); } private void TranslateMethodCall(MethodCallExpression methodCall) @@ -207,7 +203,7 @@ private void TranslateContains(Expression source, Expression item) for (var i = 0; i < newArray.Expressions.Count; i++) { - if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + if (newArray.Expressions[i] is not ConstantExpression { Value: var elementValue }) { throw new NotSupportedException("Invalid element in array"); } @@ -223,9 +219,7 @@ private void TranslateContains(Expression source, Expression item) ProcessInlineEnumerable(elements, item); return; - // Contains over captured enumerable (we inline) - case var _ when TryGetConstant(source, out var constantEnumerable) - && constantEnumerable is IEnumerable enumerable and not string: + case ConstantExpression { Value: IEnumerable enumerable and not string }: ProcessInlineEnumerable(enumerable, item); return; @@ -372,36 +366,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetCapturedValue(Expression expression, out object? capturedValue) - { - if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) - { - capturedValue = fieldInfo.GetValue(constant.Value); - return true; - } - - capturedValue = null; - return false; - } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - case var _ when TryGetCapturedValue(expression, out var capturedValue): - constantValue = capturedValue; - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs index e54a378993c2..94fbb845fc20 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBFilterTranslator.cs @@ -7,9 +7,8 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -28,7 +27,10 @@ internal BsonDocument Translate(LambdaExpression lambdaExpression, VectorStoreRe Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - return this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + return this.Translate(preprocessedExpression); } private BsonDocument Translate(Expression? node) @@ -57,9 +59,10 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private BsonDocument TranslateEqualityComparison(BinaryExpression binary) - => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) - || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) - ? this.GenerateEqualityComparison(property, value, binary.NodeType) + => this.TryBindProperty(binary.Left, out var property) && binary.Right is ConstantExpression { Value: var rightConstant } + ? this.GenerateEqualityComparison(property, rightConstant, binary.NodeType) + : this.TryBindProperty(binary.Right, out property) && binary.Left is ConstantExpression { Value: var leftConstant } + ? this.GenerateEqualityComparison(property, leftConstant, binary.NodeType) : throw new NotSupportedException("Invalid equality/comparison"); private BsonDocument GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) @@ -184,7 +187,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) for (var i = 0; i < newArray.Expressions.Count; i++) { - if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + if (newArray.Expressions[i] is not ConstantExpression { Value: var elementValue }) { throw new NotSupportedException("Invalid element in array"); } @@ -195,8 +198,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) return ProcessInlineEnumerable(elements, item); // Contains over captured enumerable (we inline) - case var _ when TryGetConstant(source, out var constantEnumerable) - && constantEnumerable is IEnumerable enumerable and not string: + case ConstantExpression { Value: IEnumerable enumerable and not string }: return ProcessInlineEnumerable(enumerable, item); default: @@ -265,25 +267,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs index da70fa354bc7..84587a1a9cdf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlFilterTranslator.cs @@ -10,6 +10,7 @@ using System.Runtime.CompilerServices; using System.Text; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -30,7 +31,11 @@ internal class AzureCosmosDBNoSqlFilterTranslator Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = false }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression); + this.Translate(lambdaExpression.Body); + return (this._sql.ToString(), this._parameters); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs index 0d73e6e4ef9f..7bc56615cb0c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Common/SqlFilterTranslator.cs @@ -6,10 +6,9 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using System.Text; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; namespace Microsoft.SemanticKernel.Connectors; @@ -43,7 +42,10 @@ internal void Translate(bool appendWhere) this._sql.Append("WHERE "); } - this.Translate(this._lambdaExpression.Body, isSearchCondition: true); + var preprocessor = new FilterTranslationPreprocessor { TransformCapturedVariablesToQueryParameterExpressions = true }; + var preprocessedExpression = preprocessor.Visit(this._lambdaExpression.Body); + + this.Translate(preprocessedExpression, isSearchCondition: true); } protected void Translate(Expression? node, bool isSearchCondition = false) @@ -58,6 +60,10 @@ protected void Translate(Expression? node, bool isSearchCondition = false) this.TranslateConstant(constant.Value); return; + case QueryParameterExpression { Name: var name, Value: var value }: + this.TranslateQueryParameter(name, value); + return; + case MemberExpression member: this.TranslateMember(member, isSearchCondition); return; @@ -127,8 +133,7 @@ protected void TranslateBinary(BinaryExpression binary) this._sql.Append(')'); static bool IsNull(Expression expression) - => expression is ConstantExpression { Value: null } - || (TryGetCapturedValue(expression, out _, out var capturedValue) && capturedValue is null); + => expression is ConstantExpression { Value: null } or QueryParameterExpression { Value: null }; } protected virtual void TranslateConstant(object? value) @@ -175,25 +180,19 @@ protected virtual void TranslateConstant(object? value) private void TranslateMember(MemberExpression memberExpression, bool isSearchCondition) { - switch (memberExpression) + if (this.TryBindProperty(memberExpression, out var property)) { - case var _ when this.TryBindProperty(memberExpression, out var property): - this.GenerateColumn(property.StorageName, isSearchCondition); - return; - - case var _ when TryGetCapturedValue(memberExpression, out var name, out var value): - this.TranslateCapturedVariable(name, value); - return; - - default: - throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); + this.GenerateColumn(property.StorageName, isSearchCondition); + return; } + + throw new NotSupportedException($"Member access for '{memberExpression.Member.Name}' is unsupported - only member access over the filter parameter are supported"); } protected virtual void GenerateColumn(string column, bool isSearchCondition = false) => this._sql.Append('"').Append(column.Replace("\"", "\"\"")).Append('"'); - protected abstract void TranslateCapturedVariable(string name, object? capturedValue); + protected abstract void TranslateQueryParameter(string name, object? value); private void TranslateMethodCall(MethodCallExpression methodCall, bool isSearchCondition = false) { @@ -262,8 +261,8 @@ private void TranslateContains(Expression source, Expression item) return; // Contains over captured array (r => arrayLocalVariable.Contains(r.String)) - case var _ when TryGetCapturedValue(source, out _, out var value): - this.TranslateContainsOverCapturedArray(source, item, value); + case QueryParameterExpression { Value: var value }: + this.TranslateContainsOverParameterizedArray(source, item, value); return; default: @@ -273,7 +272,7 @@ private void TranslateContains(Expression source, Expression item) protected abstract void TranslateContainsOverArrayColumn(Expression source, Expression item); - protected abstract void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value); + protected abstract void TranslateContainsOverParameterizedArray(Expression source, Expression item, object? value); private void TranslateUnary(UnaryExpression unary, bool isSearchCondition) { @@ -351,20 +350,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetCapturedValue(Expression expression, [NotNullWhen(true)] out string? name, out object? value) - { - if (expression is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) - { - name = fieldInfo.Name; - value = fieldInfo.GetValue(constant.Value); - return true; - } - - name = null; - value = null; - return false; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs index 61457cd4cb10..0280aee116c3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBFilterTranslator.cs @@ -7,9 +7,8 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; using MongoDB.Bson; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -28,7 +27,10 @@ internal BsonDocument Translate(LambdaExpression lambdaExpression, VectorStoreRe Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - return this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + return this.Translate(preprocessedExpression); } private BsonDocument Translate(Expression? node) @@ -57,9 +59,10 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private BsonDocument TranslateEqualityComparison(BinaryExpression binary) - => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) - || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) - ? this.GenerateEqualityComparison(property, value, binary.NodeType) + => this.TryBindProperty(binary.Left, out var property) && binary.Right is ConstantExpression { Value: var rightConstant } + ? this.GenerateEqualityComparison(property, rightConstant, binary.NodeType) + : this.TryBindProperty(binary.Right, out property) && binary.Left is ConstantExpression { Value: var leftConstant } + ? this.GenerateEqualityComparison(property, leftConstant, binary.NodeType) : throw new NotSupportedException("Invalid equality/comparison"); private BsonDocument GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) @@ -184,7 +187,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) for (var i = 0; i < newArray.Expressions.Count; i++) { - if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + if (newArray.Expressions[i] is not ConstantExpression { Value: var elementValue }) { throw new NotSupportedException("Invalid element in array"); } @@ -194,9 +197,7 @@ private BsonDocument TranslateContains(Expression source, Expression item) return ProcessInlineEnumerable(elements, item); - // Contains over captured enumerable (we inline) - case var _ when TryGetConstant(source, out var constantEnumerable) - && constantEnumerable is IEnumerable enumerable and not string: + case ConstantExpression { Value: IEnumerable enumerable and not string }: return ProcessInlineEnumerable(enumerable, item); default: @@ -265,25 +266,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs index a54346b918d4..7725e76ed633 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeFilterTranslator.cs @@ -7,9 +7,8 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; using Pinecone; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -31,7 +30,10 @@ internal Metadata Translate(LambdaExpression lambdaExpression, VectorStoreRecord Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - return this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + return this.Translate(preprocessedExpression); } private Metadata Translate(Expression? node) @@ -60,9 +62,10 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual }; private Metadata TranslateEqualityComparison(BinaryExpression binary) - => (this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) - || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value)) - ? this.GenerateEqualityComparison(property, value, binary.NodeType) + => this.TryBindProperty(binary.Left, out var property) && binary.Right is ConstantExpression { Value: var rightConstant } + ? this.GenerateEqualityComparison(property, rightConstant, binary.NodeType) + : this.TryBindProperty(binary.Right, out property) && binary.Left is ConstantExpression { Value: var leftConstant } + ? this.GenerateEqualityComparison(property, leftConstant, binary.NodeType) : throw new NotSupportedException("Invalid equality/comparison"); private Metadata GenerateEqualityComparison(VectorStoreRecordPropertyModel property, object? value, ExpressionType nodeType) @@ -187,7 +190,7 @@ private Metadata TranslateContains(Expression source, Expression item) for (var i = 0; i < newArray.Expressions.Count; i++) { - if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + if (newArray.Expressions[i] is not ConstantExpression { Value: var elementValue }) { throw new NotSupportedException("Invalid element in array"); } @@ -197,9 +200,7 @@ private Metadata TranslateContains(Expression source, Expression item) return ProcessInlineEnumerable(elements, item); - // Contains over captured enumerable (we inline) - case var _ when TryGetConstant(source, out var constantEnumerable) - && constantEnumerable is IEnumerable enumerable and not string: + case ConstantExpression { Value: IEnumerable enumerable and not string }: return ProcessInlineEnumerable(enumerable, item); default: @@ -269,27 +270,6 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } - private static MetadataValue? ToMetadata(object? value) => value is null ? null : PineconeVectorStoreRecordFieldMapping.ConvertToMetadataValue(value); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs index 500bd7b9b18b..52a230865065 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresFilterTranslator.cs @@ -31,7 +31,7 @@ protected override void TranslateContainsOverArrayColumn(Expression source, Expr this._sql.Append(']'); } - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) + protected override void TranslateContainsOverParameterizedArray(Expression source, Expression item, object? value) { this.Translate(item); this._sql.Append(" = ANY ("); @@ -39,17 +39,17 @@ protected override void TranslateContainsOverCapturedArray(Expression source, Ex this._sql.Append(')'); } - protected override void TranslateCapturedVariable(string name, object? capturedValue) + protected override void TranslateQueryParameter(string name, object? value) { // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) - if (capturedValue is null) + if (value is null) { this._sql.Append("NULL"); } else { - this._parameterValues.Add(capturedValue); + this._parameterValues.Add(value); this._sql.Append('$').Append(this._parameterIndex++); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs index 3ced37909283..bf88e98b5bc9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantFilterTranslator.cs @@ -7,10 +7,9 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using Google.Protobuf.Collections; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; using Qdrant.Client.Grpc; using Range = Qdrant.Client.Grpc.Range; @@ -28,7 +27,10 @@ internal Filter Translate(LambdaExpression lambdaExpression, VectorStoreRecordMo Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - return this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + return this.Translate(preprocessedExpression); } private Filter Translate(Expression? node) @@ -57,10 +59,11 @@ private Filter Translate(Expression? node) }; private Filter TranslateEqual(Expression left, Expression right, bool negated = false) - => (this.TryBindProperty(left, out var property) && TryGetConstant(right, out var constantValue)) - || (this.TryBindProperty(right, out property) && TryGetConstant(left, out constantValue)) - ? this.GenerateEqual(property.StorageName, constantValue, negated) - : throw new NotSupportedException("Equality expression not supported by Qdrant"); + => this.TryBindProperty(left, out var property) && right is ConstantExpression { Value: var rightConstant } + ? this.GenerateEqual(property.StorageName, rightConstant, negated) + : this.TryBindProperty(right, out property) && left is ConstantExpression { Value: var leftConstant } + ? this.GenerateEqual(property.StorageName, leftConstant, negated) + : throw new NotSupportedException("Invalid equality/comparison"); private Filter GenerateEqual(string propertyStorageName, object? value, bool negated = false) { @@ -108,8 +111,7 @@ private Filter TranslateComparison(BinaryExpression comparison) bool TryProcessComparison(Expression first, Expression second, [NotNullWhen(true)] out Filter? result) { // TODO: Nullable - if (this.TryBindProperty(first, out var property) - && TryGetConstant(second, out var constantValue)) + if (this.TryBindProperty(first, out var property) && second is ConstantExpression { Value: var constantValue }) { double doubleConstantValue = constantValue switch { @@ -281,9 +283,9 @@ private Filter TranslateContains(Expression source, Expression item) for (var i = 0; i < newArray.Expressions.Count; i++) { - if (!TryGetConstant(newArray.Expressions[i], out var elementValue)) + if (newArray.Expressions[i] is not ConstantExpression { Value: var elementValue }) { - throw new NotSupportedException("Invalid element in array"); + throw new NotSupportedException("Inline array elements must be constants"); } elements[i] = elementValue; @@ -291,9 +293,7 @@ private Filter TranslateContains(Expression source, Expression item) return ProcessInlineEnumerable(elements, item); - // Contains over captured enumerable (we inline) - case var _ when TryGetConstant(source, out var constantEnumerable) - && constantEnumerable is IEnumerable enumerable and not string: + case ConstantExpression { Value: IEnumerable enumerable and not string }: return ProcessInlineEnumerable(enumerable, item); default: @@ -384,25 +384,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs index 799c9c84d657..92ecdd3fd798 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisFilterTranslator.cs @@ -6,10 +6,9 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using System.Text; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -28,7 +27,10 @@ internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordMo Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + this.Translate(preprocessedExpression); return this._filter.ToString(); } @@ -93,8 +95,7 @@ private void TranslateEqualityComparison(BinaryExpression binary) bool TryProcessEqualityComparison(Expression first, Expression second) { // TODO: Nullable - if (this.TryBindProperty(first, out var property) - && TryGetConstant(second, out var constantValue)) + if (this.TryBindProperty(first, out var property) && second is ConstantExpression { Value: var constantValue }) { // Numeric negation has a special syntax (!=), for the rest we nest in a NOT if (binary.NodeType is ExpressionType.NotEqual && constantValue is not int or long or float or double) @@ -176,9 +177,7 @@ private void TranslateMethodCall(MethodCallExpression methodCall) private void TranslateContains(Expression source, Expression item) { // Contains over tag field - if (this.TryBindProperty(source, out var property) - && TryGetConstant(item, out var itemConstant) - && itemConstant is string stringConstant) + if (this.TryBindProperty(source, out var property) && item is ConstantExpression { Value: string stringConstant }) { this._filter .Append('@') @@ -237,25 +236,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs index 2ee8935dc3f0..ec819362072c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerFilterTranslator.cs @@ -60,7 +60,7 @@ protected override void GenerateColumn(string column, bool isSearchCondition = f protected override void TranslateContainsOverArrayColumn(Expression source, Expression item) => throw new NotSupportedException("Unsupported Contains expression"); - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) + protected override void TranslateContainsOverParameterizedArray(Expression source, Expression item, object? value) { if (value is not IEnumerable elements) { @@ -88,17 +88,17 @@ protected override void TranslateContainsOverCapturedArray(Expression source, Ex this._sql.Append(')'); } - protected override void TranslateCapturedVariable(string name, object? capturedValue) + protected override void TranslateQueryParameter(string name, object? value) { // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) - if (capturedValue is null) + if (value is null) { this._sql.Append("NULL"); } else { - this._parameterValues.Add(capturedValue); + this._parameterValues.Add(value); // SQL Server parameters can't start with a digit (but underscore is OK). this._sql.Append("@_").Append(this._parameterIndex++); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs index 60c3c94bf688..a602b0542373 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteFilterTranslator.cs @@ -23,7 +23,7 @@ internal SqliteFilterTranslator(VectorStoreRecordModel model, LambdaExpression l protected override void TranslateContainsOverArrayColumn(Expression source, Expression item) => throw new NotSupportedException("Unsupported Contains expression"); - protected override void TranslateContainsOverCapturedArray(Expression source, Expression item, object? value) + protected override void TranslateContainsOverParameterizedArray(Expression source, Expression item, object? value) { if (value is not IEnumerable elements) { @@ -51,11 +51,11 @@ protected override void TranslateContainsOverCapturedArray(Expression source, Ex this._sql.Append(')'); } - protected override void TranslateCapturedVariable(string name, object? capturedValue) + protected override void TranslateQueryParameter(string name, object? value) { // For null values, simply inline rather than parameterize; parameterized NULLs require setting NpgsqlDbType which is a bit more complicated, // plus in any case equality with NULL requires different SQL (x IS NULL rather than x = y) - if (capturedValue is null) + if (value is null) { this._sql.Append("NULL"); } @@ -73,7 +73,7 @@ protected override void TranslateCapturedVariable(string name, object? capturedV } while (this._parameters.ContainsKey(name)); } - this._parameters.Add(name, capturedValue); + this._parameters.Add(name, value); this._sql.Append('@').Append(name); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs index 6e864ade70f6..87aa773617f3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateFilterTranslator.cs @@ -6,11 +6,10 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; using System.Text; using System.Text.Json; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.ConnectorSupport.Filter; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -30,7 +29,10 @@ internal string Translate(LambdaExpression lambdaExpression, VectorStoreRecordMo Debug.Assert(lambdaExpression.Parameters.Count == 1); this._recordParameter = lambdaExpression.Parameters[0]; - this.Translate(lambdaExpression.Body); + var preprocessor = new FilterTranslationPreprocessor { InlineCapturedVariables = true }; + var preprocessedExpression = preprocessor.Visit(lambdaExpression.Body); + + this.Translate(preprocessedExpression); return this._filter.ToString(); } @@ -102,10 +104,15 @@ or ExpressionType.LessThan or ExpressionType.LessThanOrEqual private void TranslateEqualityComparison(BinaryExpression binary) { - if ((this.TryBindProperty(binary.Left, out var property) && TryGetConstant(binary.Right, out var value)) - || (this.TryBindProperty(binary.Right, out property) && TryGetConstant(binary.Left, out value))) + if (this.TryBindProperty(binary.Left, out var property) && binary.Right is ConstantExpression { Value: var rightConstant }) { - this.GenerateEqualityComparison(property.StorageName, value, binary.NodeType); + this.GenerateEqualityComparison(property.StorageName, rightConstant, binary.NodeType); + return; + } + + if (this.TryBindProperty(binary.Right, out property) && binary.Left is ConstantExpression { Value: var leftConstant }) + { + this.GenerateEqualityComparison(property.StorageName, leftConstant, binary.NodeType); return; } @@ -210,9 +217,7 @@ private void TranslateContains(Expression source, Expression item) { // Contains over array // { path: ["stringArrayPropName"], operator: ContainsAny, valueText: ["foo"] } - if (this.TryBindProperty(source, out var property) - && TryGetConstant(item, out var itemConstant) - && itemConstant is string stringConstant) + if (this.TryBindProperty(source, out var property) && item is ConstantExpression { Value: string stringConstant }) { this._filter .Append("{ path: [\"") @@ -271,25 +276,4 @@ private bool TryBindProperty(Expression expression, [NotNullWhen(true)] out Vect return true; } - - private static bool TryGetConstant(Expression expression, out object? constantValue) - { - switch (expression) - { - case ConstantExpression { Value: var v }: - constantValue = v; - return true; - - // This identifies compiler-generated closure types which contain captured variables. - case MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } - when constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) - && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true): - constantValue = fieldInfo.GetValue(constant.Value); - return true; - - default: - constantValue = null; - return false; - } - } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/FilterTranslationPreprocessor.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/FilterTranslationPreprocessor.cs new file mode 100644 index 000000000000..23fa6d776ff4 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/FilterTranslationPreprocessor.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Linq.Expressions; +using System.Reflection; +using System.Runtime.CompilerServices; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport.Filter; + +/// +/// A processor for user-provided filter expressions which performs various common transformations before actual translation takes place. +/// This is an internal support type meant for use by connectors only, and not for use by applications. +/// +[Experimental("MEVD9001")] +public class FilterTranslationPreprocessor : ExpressionVisitor +{ + /// + /// Whether to inline captured variables in the filter expression (when the database doesn't support parameters). + /// + public bool InlineCapturedVariables { get; init; } + + /// + /// Whether to transform captured variables in the filter expression to (when the database supports parameters). + /// + public bool TransformCapturedVariablesToQueryParameterExpressions { get; init; } + + /// + protected override Expression VisitMember(MemberExpression node) + { + // This identifies compiler-generated closure types which contain captured variables. + // Some databases - mostly relational ones - support out-of-band parameters which can be referenced via placeholders + // from the query itself. For those databases, we transform the captured variable to QueryParameterExpression (this simplifies things for those + // connectors, and centralizes the pattern matching in a single centralized place). + // For databases which don't support parameters, we simply inline the captured variable as a constant in the tree, so that translators don't + // even need to be aware of the captured variable. + // For all other databases, we simply inline the captured variable as a constant in the tree. + if (node is MemberExpression { Expression: ConstantExpression constant, Member: FieldInfo fieldInfo } + && constant.Type.Attributes.HasFlag(TypeAttributes.NestedPrivate) + && Attribute.IsDefined(constant.Type, typeof(CompilerGeneratedAttribute), inherit: true)) + { + return (this.InlineCapturedVariables, this.TransformCapturedVariablesToQueryParameterExpressions) switch + { + (true, false) => Expression.Constant(fieldInfo.GetValue(constant.Value), node.Type), + (false, true) => new QueryParameterExpression(fieldInfo.Name, fieldInfo.GetValue(constant.Value), node.Type), + + (true, true) => throw new InvalidOperationException("InlineCapturedVariables and TransformCapturedVariablesToQueryParameterExpressions cannot both be true."), + (false, false) => base.VisitMember(node) + }; + } + + return base.VisitMember(node); + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/QueryParameterExpression.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/QueryParameterExpression.cs new file mode 100644 index 000000000000..caa86b665b77 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/Filter/QueryParameterExpression.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Linq.Expressions; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport.Filter; + +/// +/// An expression representation a query parameter (captured variable) in the filter expression. +/// +[Experimental("MEVD9001")] +public class QueryParameterExpression(string name, object? value, Type type) : Expression +{ + /// + /// The name of the parameter. + /// + public string Name { get; } = name; + + /// + /// The value of the parameter. + /// + public object? Value { get; } = value; + + /// + public override ExpressionType NodeType => ExpressionType.Extension; + + /// + public override Type Type => type; + + /// + protected override Expression VisitChildren(ExpressionVisitor visitor) => this; +} From aada9a24c5860af553e874aedae184fe5fd34101 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Sun, 27 Apr 2025 22:27:31 +0200 Subject: [PATCH 54/63] .Net: Fix filtering GetAsync for Cosmos NoSQL (#11754) --- .../AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs index 240c40bf9db0..258e3a4755ac 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.cs @@ -139,7 +139,7 @@ internal static QueryDefinition BuildSearchQuery( projectionProperties = projectionProperties.Where(p => p is not VectorStoreRecordVectorPropertyModel); } - var fieldsArgument = projectionProperties.Select(field => $"{tableVariableName}.{field}"); + var fieldsArgument = projectionProperties.Select(field => $"{tableVariableName}.{field.StorageName}"); var selectClauseArguments = string.Join(SelectClauseDelimiter, [.. fieldsArgument]); From 39254cec35d0a6673b65203a4950cb1f9330725d Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Mon, 28 Apr 2025 16:51:15 +0200 Subject: [PATCH 55/63] .Net: Integrate IEmbeddingGenerator (#11682) Adds support for using IEmbeddingGenerator to generate embeddings from arbitrary user property types. Closes #10492 Closes #11527 --- dotnet/SK-dotnet.sln | 9 + .../Caching/SemanticCachingWithFilters.cs | 2 +- .../GenerateTextEmbeddingAttribute.cs | 39 -- .../TextEmbeddingVectorStore.cs | 65 -- .../TextEmbeddingVectorStoreExtensions.cs | 41 -- ...extEmbeddingVectorStoreRecordCollection.cs | 208 ------- .../Memory/VectorStore_EmbeddingGeneration.cs | 119 ---- .../Memory/VectorStore_Langchain_Interop.cs | 2 +- ...torStore_VectorSearch_MultiStore_Common.cs | 6 +- .../VectorStore_VectorSearch_MultiVector.cs | 4 +- .../Memory/VectorStore_VectorSearch_Paging.cs | 2 +- .../Memory/VectorStore_VectorSearch_Simple.cs | 6 +- .../Memory/VolatileVectorStore_LoadData.cs | 4 +- .../Optimization/FrugalGPTWithFilters.cs | 2 +- .../PluginSelectionWithFilters.cs | 2 +- .../Concepts/Search/VectorStore_TextSearch.cs | 8 +- .../MCPServer/Program.cs | 2 +- dotnet/samples/Demos/OnnxSimpleRAG/Program.cs | 3 + .../Step4_Search_With_VectorStore.cs | 29 +- .../Step2_Vector_Search.cs | 4 +- .../Step5_Use_DynamicDataModel.cs | 4 +- ...zureAISearchDynamicDataModelMapperTests.cs | 5 +- ...ureAISearchKernelBuilderExtensionsTests.cs | 2 +- ...ISearchServiceCollectionExtensionsTests.cs | 2 +- ...osDBMongoDBKernelBuilderExtensionsTests.cs | 2 +- ...MongoDBServiceCollectionExtensionsTests.cs | 2 +- ...VectorStoreCollectionSearchMappingTests.cs | 6 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 22 +- ...osmosDBNoSQLDynamicDataModelMapperTests.cs | 34 +- ...smosDBNoSQLKernelBuilderExtensionsTests.cs | 2 +- ...DBNoSQLServiceCollectionExtensionsTests.cs | 2 +- ...LVectorStoreCollectionQueryBuilderTests.cs | 12 +- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 6 +- ...smosDBNoSQLVectorStoreRecordMapperTests.cs | 23 +- .../InMemoryKernelBuilderExtensionsTests.cs | 2 +- ...nMemoryServiceCollectionExtensionsTests.cs | 2 +- ...nMemoryVectorStoreRecordCollectionTests.cs | 557 ------------------ .../InMemoryVectorStoreTests.cs | 19 - .../AzureAISearchConstants.cs | 45 -- .../AzureAISearchKernelBuilderExtensions.cs | 6 +- .../AzureAISearchModelBuilder.cs | 64 ++ ...zureAISearchServiceCollectionExtensions.cs | 76 +-- .../AzureAISearchVectorStore.cs | 3 +- .../AzureAISearchVectorStoreOptions.cs | 6 + ...zureAISearchVectorStoreRecordCollection.cs | 81 ++- ...earchVectorStoreRecordCollectionOptions.cs | 6 + ...eCosmosDBMongoDBKernelBuilderExtensions.cs | 4 +- ...mosDBMongoDBServiceCollectionExtensions.cs | 33 +- .../AzureCosmosDBMongoDBVectorStore.cs | 6 +- .../AzureCosmosDBMongoDBVectorStoreOptions.cs | 6 + ...mosDBMongoDBVectorStoreRecordCollection.cs | 140 ++++- ...ngoDBVectorStoreRecordCollectionOptions.cs | 6 + ...zureCosmosDBNoSQLDynamicDataModelMapper.cs | 70 ++- ...ureCosmosDBNoSQLKernelBuilderExtensions.cs | 4 +- ...osmosDBNoSQLServiceCollectionExtensions.cs | 33 +- .../AzureCosmosDBNoSQLVectorStore.cs | 3 +- .../AzureCosmosDBNoSQLVectorStoreOptions.cs | 6 + ...osmosDBNoSQLVectorStoreRecordCollection.cs | 181 +++++- ...NoSQLVectorStoreRecordCollectionOptions.cs | 6 + ...ureCosmosDBNoSQLVectorStoreRecordMapper.cs | 31 +- ...ureCosmosDBNoSqlVectorStoreModelBuilder.cs | 36 +- .../ICosmosNoSQLMapper.cs | 8 +- .../InMemoryKernelBuilderExtensions.cs | 4 +- .../InMemoryModelBuilder.cs | 22 + .../InMemoryServiceCollectionExtensions.cs | 27 +- .../InMemoryVectorRecordWrapper.cs | 12 + .../InMemoryVectorStore.cs | 26 +- ...emoryVectorStoreCollectionSearchMapping.cs | 14 +- .../InMemoryVectorStoreOptions.cs | 16 + .../InMemoryVectorStoreRecordCollection.cs | 250 ++++++-- ...emoryVectorStoreRecordCollectionOptions.cs | 6 + .../MongoDBServiceCollectionExtensions.cs | 33 +- .../MongoDBVectorStore.cs | 6 +- .../MongoDBVectorStoreOptions.cs | 6 + .../MongoDBVectorStoreRecordCollection.cs | 152 ++++- ...ngoDBVectorStoreRecordCollectionOptions.cs | 6 + .../PineconeKernelBuilderExtensions.cs | 4 +- .../PineconeServiceCollectionExtensions.cs | 47 +- .../PineconeVectorStore.cs | 6 +- .../PineconeVectorStoreOptions.cs | 6 + .../PineconeVectorStoreRecordCollection.cs | 138 ++++- ...econeVectorStoreRecordCollectionOptions.cs | 6 + .../PineconeVectorStoreRecordFieldMapping.cs | 51 +- .../PineconeVectorStoreRecordMapper.cs | 12 +- .../PostgresConstants.cs | 6 +- .../PostgresServiceCollectionExtensions.cs | 39 +- .../PostgresVectorStore.cs | 7 +- .../PostgresVectorStoreOptions.cs | 6 + .../PostgresVectorStoreRecordCollection.cs | 181 +++++- ...tgresVectorStoreRecordCollectionOptions.cs | 6 + .../PostgresVectorStoreRecordMapper.cs | 23 +- ...ostgresVectorStoreRecordPropertyMapping.cs | 2 +- .../QdrantKernelBuilderExtensions.cs | 4 +- .../QdrantServiceCollectionExtensions.cs | 41 +- .../QdrantVectorStore.cs | 3 +- .../QdrantVectorStoreOptions.cs | 6 + .../QdrantVectorStoreRecordCollection.cs | 168 ++++-- ...drantVectorStoreRecordCollectionOptions.cs | 6 + .../QdrantVectorStoreRecordMapper.cs | 46 +- .../IRedisJsonMapper.cs | 9 +- ...RedisHashSetVectorStoreRecordCollection.cs | 155 ++++- ...shSetVectorStoreRecordCollectionOptions.cs | 6 + .../RedisHashSetVectorStoreRecordMapper.cs | 18 +- .../RedisJsonDynamicDataModelMapper.cs | 80 ++- .../RedisJsonVectorStoreRecordCollection.cs | 137 ++++- ...sJsonVectorStoreRecordCollectionOptions.cs | 6 + .../RedisJsonVectorStoreRecordMapper.cs | 54 +- .../RedisServiceCollectionExtensions.cs | 51 +- .../RedisVectorStore.cs | 12 +- ...RedisVectorStoreCollectionCreateMapping.cs | 4 +- .../RedisVectorStoreOptions.cs | 6 + .../RedisVectorStoreRecordFieldMapping.cs | 63 ++ .../RecordMapper.cs | 18 +- .../SqlServerConstants.cs | 75 ++- .../SqlServerVectorStore.cs | 5 +- .../SqlServerVectorStoreOptions.cs | 7 + .../SqlServerVectorStoreRecordCollection.cs | 167 +++++- ...erverVectorStoreRecordCollectionOptions.cs | 6 + .../SqliteConstants.cs | 62 +- .../SqliteServiceCollectionExtensions.cs | 16 +- .../SqliteVectorStore.cs | 3 +- .../SqliteVectorStoreOptions.cs | 6 + .../SqliteVectorStoreRecordCollection.cs | 175 +++++- ...qliteVectorStoreRecordCollectionOptions.cs | 6 + .../SqliteVectorStoreRecordMapper.cs | 24 +- .../IWeaviateMapper.cs | 9 +- .../WeaviateDynamicDataModelMapper.cs | 38 +- .../WeaviateKernelBuilderExtensions.cs | 2 +- .../WeaviateServiceCollectionExtensions.cs | 19 +- .../WeaviateVectorStore.cs | 3 +- .../WeaviateVectorStoreOptions.cs | 6 + .../WeaviateVectorStoreRecordCollection.cs | 171 +++++- ...viateVectorStoreRecordCollectionOptions.cs | 6 + .../WeaviateVectorStoreRecordMapper.cs | 50 +- .../MongoDBDynamicDataModelMapperTests.cs | 8 +- ...MongoDBServiceCollectionExtensionsTests.cs | 2 +- ...VectorStoreCollectionSearchMappingTests.cs | 3 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 22 +- .../MongoDBVectorStoreRecordMapperTests.cs | 4 +- .../PineconeKernelBuilderExtensionsTests.cs | 2 +- ...ineconeServiceCollectionExtensionsTests.cs | 2 +- ...ostgresServiceCollectionExtensionsTests.cs | 2 +- ...resVectorStoreCollectionSqlBuilderTests.cs | 6 +- .../PostgresVectorStoreRecordMapperTests.cs | 6 +- .../QdrantKernelBuilderExtensionsTests.cs | 2 +- .../QdrantServiceCollectionExtensionsTests.cs | 2 +- ...VectorStoreCollectionSearchMappingTests.cs | 6 +- .../QdrantVectorStoreRecordMapperTests.cs | 24 +- ...RedisHashSetDynamicDataModelMapperTests.cs | 8 +- ...edisHashSetVectorStoreRecordMapperTests.cs | 4 +- .../RedisJsonDynamicDataModelMapperTests.cs | 9 +- .../RedisJsonVectorStoreRecordMapperTests.cs | 12 +- ...VectorStoreCollectionCreateMappingTests.cs | 5 +- ...VectorStoreCollectionSearchMappingTests.cs | 3 +- .../SqliteServiceCollectionExtensionsTests.cs | 4 +- ...ectorStoreCollectionCommandBuilderTests.cs | 3 +- .../SqliteVectorStoreRecordMapperTests.cs | 6 +- .../WeaviateDynamicDataModelMapperTests.cs | 17 +- .../WeaviateKernelBuilderExtensionsTests.cs | 2 +- ...eaviateServiceCollectionExtensionsTests.cs | 2 +- ...VectorStoreCollectionCreateMappingTests.cs | 16 +- ...rStoreRecordCollectionQueryBuilderTests.cs | 3 +- ...eaviateVectorStoreRecordCollectionTests.cs | 14 +- .../WeaviateVectorStoreRecordMapperTests.cs | 3 +- dotnet/src/Connectors/Directory.Build.props | 1 + .../VectorData.Abstractions/.editorconfig | 3 + .../VectorData.Abstractions/AssemblyInfo.cs | 1 - .../VectorStoreRecordJsonModelBuilder.cs | 9 +- .../VectorStoreRecordModel.cs | 27 +- .../VectorStoreRecordModelBuilder.cs | 119 +++- .../VectorStoreRecordModelBuildingOptions.cs | 2 +- .../VectorStoreRecordVectorPropertyModel.cs | 147 +++++ ...rStoreRecordVectorPropertyModel{TInput}.cs | 83 +++ .../Properties/AssemblyInfo.cs | 3 + .../Properties/VectorDataStrings.Designer.cs | 121 ++++ .../Properties/VectorDataStrings.resx | 45 ++ .../VectorStoreRecordDefinition.cs | 6 + .../VectorStoreRecordVectorProperty.cs | 30 +- ...VectorStoreRecordVectorProperty{TInput}.cs | 42 ++ .../VectorData.Abstractions.csproj | 20 + .../VectorSearch/IVectorSearch.cs | 71 +++ .../VectorSearch/IVectorizableTextSearch.cs | 1 + .../VectorSearch/IVectorizedSearch.cs | 19 +- ...xtensions.cs => VectorSearchExtensions.cs} | 14 +- .../VectorizableTextSearchExtensions.cs | 33 -- .../IVectorStoreRecordCollection.cs | 4 +- .../VectorData.UnitTests.csproj | 45 ++ .../VectorStoreRecordModelBuilderTests.cs | 335 +++++++++++ .../AzureAISearchTextSearchTests.cs | 4 + .../BaseVectorStoreRecordCollectionTests.cs | 2 +- .../InMemoryVectorStoreTextSearchTests.cs | 3 + .../Memory/Qdrant/QdrantTextSearchTests.cs | 4 + .../SqliteServiceCollectionExtensionsTests.cs | 4 +- .../Data/BaseVectorStoreTextSearchTests.cs | 28 - .../Memory/MongoDB/IMongoDBMapper.cs | 8 +- .../MongoDB/MongoDBDynamicDataModelMapper.cs | 65 +- .../MongoDB/MongoDBVectorStoreRecordMapper.cs | 43 +- .../Search/MockVectorizableTextSearch.cs | 19 +- .../Search/VectorStoreTextSearchTests.cs | 2 +- .../CompatibilitySuppressions.xml | 60 ++ .../TextSearchServiceCollectionExtensions.cs | 47 +- .../Data/TextSearch/VectorStoreTextSearch.cs | 43 +- ...tSearchServiceCollectionExtensionsTests.cs | 127 ++-- .../Data/VectorStoreTextSearchTestBase.cs | 114 ++-- .../Data/VectorStoreTextSearchTests.cs | 45 +- .../AzureAISearchNoDataConformanceTests.cs | 2 +- .../AzureAISearchNoVectorConformanceTests.cs | 2 +- .../Filter/AzureAISearchBasicFilterTests.cs | 2 +- .../Filter/AzureAISearchBasicQueryTests.cs | 2 +- ...earchKeywordVectorizedHybridSearchTests.cs | 4 +- .../CosmosMongoEmbeddingGenerationTests.cs | 38 ++ .../CRUD/CosmosNoSQLNoDataConformanceTests.cs | 2 +- .../CosmosNoSQLNoVectorConformanceTests.cs | 2 +- .../CosmosNoSQLEmbeddingGenerationTests.cs | 38 ++ .../Filter/CosmosNoSQLBasicFilterTests.cs | 2 +- .../Filter/CosmosNoSQLBasicQueryTests.cs | 2 +- ...NoSQLKeywordVectorizedHybridSearchTests.cs | 4 +- .../Support/CosmosNoSQLFixture.cs | 2 +- .../Support/CosmosNoSQLTestStore.cs | 6 +- .../Directory.Build.props | 2 + .../Filter/InMemoryBasicQueryTests.cs | 2 +- .../InMemoryEmbeddingGenerationTests.cs | 53 ++ .../Support/InMemoryTestStore.cs | 9 +- .../MongoDBEmbeddingGenerationTests.cs | 38 ++ .../Filter/PineconeBasicFilterTests.cs | 2 +- .../Filter/PineconeBasicQueryTests.cs | 2 +- .../PineconeEmbeddingGenerationTests.cs | 54 ++ .../Support/PineconeTestStore.cs | 3 + .../PostgresEmbeddingGenerationTests.cs | 38 ++ .../PostgresIntegrationTests.csproj | 3 + ...ectorizedHybridSearchTests_NamedVectors.cs | 6 - ...torizedHybridSearchTests_UnnamedVectors.cs | 6 - .../QdrantEmbeddingGenerationTests.cs | 38 ++ .../Support/QdrantTestStore.cs | 5 +- .../Filter/RedisBasicFilterTests.cs | 8 +- .../Filter/RedisBasicQueryTests.cs | 8 +- .../RedisHashSetEmbeddingGenerationTests.cs | 41 ++ .../RedisJsonEmbeddingGenerationTests.cs | 38 ++ .../Filter/SqlServerBasicFilterTests.cs | 4 +- .../Filter/SqlServerBasicQueryTests.cs | 4 +- .../SqlServerCommandBuilderTests.cs | 3 +- .../SqlServerEmbeddingGenerationTests.cs | 33 ++ .../SqlServerVectorStoreTests.cs | 4 +- .../Support/SqlServerTestStore.cs | 16 +- .../Filter/SqliteBasicFilterTests.cs | 4 +- .../Filter/SqliteBasicQueryTests.cs | 4 +- .../SqliteEmbeddingGenerationTests.cs | 34 ++ .../Support/SqliteTestStore.cs | 9 +- .../CRUD/NoDataConformanceTests.cs | 2 +- .../CRUD/NoVectorConformanceTests.cs | 2 +- .../EmbeddingGenerationTests.cs | 516 ++++++++++++++++ .../Filter/BasicFilterTests.cs | 8 +- .../Filter/BasicQueryTests.cs | 2 +- ...rdVectorizedHybridSearchComplianceTests.cs | 8 +- .../Support/DynamicDataModelFixture.cs | 2 +- .../Support/SimpleModelFixture.cs | 2 +- .../Support/TestStore.cs | 15 +- .../Support/VectorStoreCollectionFixture.cs | 4 +- .../VectorDataIntegrationTests.csproj | 2 + ...orSearchDistanceFunctionComplianceTests.cs | 6 +- .../CRUD/WeaviateNoDataConformanceTests.cs | 4 +- .../CRUD/WeaviateNoVectorConformanceTests.cs | 2 +- .../Filter/WeaviateBasicFilterTests.cs | 2 - .../Filter/WeaviateBasicQueryTests.cs | 2 - ...viateKeywordVectorizedHybridSearchTests.cs | 16 +- .../WeaviateDynamicDataModelFixture.cs | 2 +- .../Support/WeaviateSimpleModelFixture.cs | 4 +- .../Support/WeaviateTestStore.cs | 5 + .../WeaviateEmbeddingGenerationTests.cs | 38 ++ 269 files changed, 5729 insertions(+), 2485 deletions(-) delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/GenerateTextEmbeddingAttribute.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs delete mode 100644 dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs delete mode 100644 dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryModelBuilder.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorRecordWrapper.cs create mode 100644 dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreOptions.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/.editorconfig delete mode 100644 dotnet/src/Connectors/VectorData.Abstractions/AssemblyInfo.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel{TInput}.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/Properties/AssemblyInfo.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.Designer.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.resx create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty{TInput}.cs create mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorSearch.cs rename dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/{VectorizedSearchExtensions.cs => VectorSearchExtensions.cs} (70%) delete mode 100644 dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj create mode 100644 dotnet/src/Connectors/VectorData.UnitTests/VectorStoreRecordModelBuilderTests.cs create mode 100644 dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisHashSetEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisJsonEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteEmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/EmbeddingGenerationTests.cs create mode 100644 dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateEmbeddingGenerationTests.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 09c1822ae672..6517eeca2b29 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -550,6 +550,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Runtime.InProcess", "src\Ag EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Runtime.InProcess.Tests", "src\Agents\Runtime\InProcess.Tests\Runtime.InProcess.Tests.csproj", "{DA6B4ED4-ED0B-D25C-889C-9F940E714891}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "VectorData.UnitTests", "src\Connectors\VectorData.UnitTests\VectorData.UnitTests.csproj", "{AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1511,6 +1513,12 @@ Global {DA6B4ED4-ED0B-D25C-889C-9F940E714891}.Publish|Any CPU.Build.0 = Release|Any CPU {DA6B4ED4-ED0B-D25C-889C-9F940E714891}.Release|Any CPU.ActiveCfg = Release|Any CPU {DA6B4ED4-ED0B-D25C-889C-9F940E714891}.Release|Any CPU.Build.0 = Release|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Publish|Any CPU.Build.0 = Debug|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1716,6 +1724,7 @@ Global {A4F05541-7D23-A5A9-033D-382F1E13D0FE} = {A70ED5A7-F8E1-4A57-9455-3C05989542DA} {CCC909E4-5269-A31E-0BFD-4863B4B29BBB} = {A70ED5A7-F8E1-4A57-9455-3C05989542DA} {DA6B4ED4-ED0B-D25C-889C-9F940E714891} = {A70ED5A7-F8E1-4A57-9455-3C05989542DA} + {AAC7B5E8-CC4E-49D0-AF6A-2B4F7B43BD84} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs index 5629f6cfb8a7..4d9eb15d36a2 100644 --- a/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs +++ b/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs @@ -199,7 +199,7 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func -/// An attribute that can be used for an embedding property to indicate that it should -/// be generated from one or more text properties located on the same class. -/// -/// -/// This class is part of the sample. -/// -[AttributeUsage(AttributeTargets.Property, AllowMultiple = false, Inherited = true)] -public sealed class GenerateTextEmbeddingAttribute : Attribute -{ - /// - /// Initializes a new instance of the class. - /// - /// The name of the property that the embedding should be generated from. -#pragma warning disable CA1019 // Define accessors for attribute arguments - public GenerateTextEmbeddingAttribute(string sourcePropertyName) -#pragma warning restore CA1019 // Define accessors for attribute arguments - { - this.SourcePropertyNames = [sourcePropertyName]; - } - - /// - /// Initializes a new instance of the class. - /// - /// The names of the properties that the embedding should be generated from. - public GenerateTextEmbeddingAttribute(string[] sourcePropertyNames) - { - this.SourcePropertyNames = sourcePropertyNames; - } - - /// - /// Gets the name of the property to use as the source for generating the embedding. - /// - public string[] SourcePropertyNames { get; } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs deleted file mode 100644 index 545a4f9254c0..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStore.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Embeddings; - -namespace Memory.VectorStoreEmbeddingGeneration; - -/// -/// Decorator for a that generates embeddings for records on upsert. -/// -/// -/// This class is part of the sample. -/// -public class TextEmbeddingVectorStore : IVectorStore -{ - /// The decorated . - private readonly IVectorStore _decoratedVectorStore; - - /// The service to use for generating the embeddings. - private readonly ITextEmbeddingGenerationService _textEmbeddingGenerationService; - - /// - /// Initializes a new instance of the class. - /// - /// The decorated . - /// The service to use for generating the embeddings. - public TextEmbeddingVectorStore(IVectorStore decoratedVectorStore, ITextEmbeddingGenerationService textEmbeddingGenerationService) - { - // Verify & Assign. - this._decoratedVectorStore = decoratedVectorStore ?? throw new ArgumentNullException(nameof(decoratedVectorStore)); - this._textEmbeddingGenerationService = textEmbeddingGenerationService ?? throw new ArgumentNullException(nameof(textEmbeddingGenerationService)); - } - - /// - public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) - where TKey : notnull - where TRecord : notnull - { - var collection = this._decoratedVectorStore.GetCollection(name, vectorStoreRecordDefinition); - var embeddingStore = new TextEmbeddingVectorStoreRecordCollection(collection, this._textEmbeddingGenerationService); - return embeddingStore; - } - - /// - public Task CollectionExistsAsync(string name, CancellationToken cancellationToken = default) => _decoratedVectorStore.CollectionExistsAsync(name, cancellationToken); - - /// - public Task DeleteCollectionAsync(string name, CancellationToken cancellationToken = default) => _decoratedVectorStore.DeleteCollectionAsync(name, cancellationToken); - - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - ArgumentNullException.ThrowIfNull(serviceType); - - return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : - this._decoratedVectorStore.GetService(serviceType, serviceKey); - } - - /// - public IAsyncEnumerable ListCollectionNamesAsync(CancellationToken cancellationToken = default) - { - return this._decoratedVectorStore.ListCollectionNamesAsync(cancellationToken); - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs deleted file mode 100644 index edda917b99cd..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Embeddings; - -namespace Memory.VectorStoreEmbeddingGeneration; - -/// -/// Contains extension methods to help add text embedding generation to a or -/// -/// -/// This class is part of the sample. -/// -public static class TextEmbeddingVectorStoreExtensions -{ - /// - /// Add text embedding generation to a . - /// - /// The to add text embedding generation to. - /// The service to use for generating text embeddings. - /// The with text embedding added. - public static IVectorStore UseTextEmbeddingGeneration(this IVectorStore vectorStore, ITextEmbeddingGenerationService textEmbeddingGenerationService) - { - return new TextEmbeddingVectorStore(vectorStore, textEmbeddingGenerationService); - } - - /// - /// Add text embedding generation to a . - /// - /// The to add text embedding generation to. - /// The service to use for generating text embeddings. - /// The data type of the record key. - /// The record data model to use for adding, updating and retrieving data from the store. - /// The with text embedding added. - public static IVectorStoreRecordCollection UseTextEmbeddingGeneration(this IVectorStoreRecordCollection vectorStoreRecordCollection, ITextEmbeddingGenerationService textEmbeddingGenerationService) - where TKey : notnull - where TRecord : notnull - { - return new TextEmbeddingVectorStoreRecordCollection(vectorStoreRecordCollection, textEmbeddingGenerationService); - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs deleted file mode 100644 index eaf346e90020..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreEmbeddingGeneration/TextEmbeddingVectorStoreRecordCollection.cs +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Linq.Expressions; -using System.Reflection; -using System.Runtime.CompilerServices; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Embeddings; - -namespace Memory.VectorStoreEmbeddingGeneration; - -/// -/// Decorator for a that generates embeddings for records on upsert and when using . -/// -/// -/// This class is part of the sample. -/// -/// The data type of the record key. -/// The record data model to use for adding, updating and retrieving data from the store. -#pragma warning disable CA1711 // Identifiers should not have incorrect suffix -public class TextEmbeddingVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch -#pragma warning restore CA1711 // Identifiers should not have incorrect suffix - where TKey : notnull - where TRecord : notnull -{ - /// The decorated . - private readonly IVectorStoreRecordCollection _decoratedVectorStoreRecordCollection; - - /// The service to use for generating the embeddings. - private readonly ITextEmbeddingGenerationService _textEmbeddingGenerationService; - - /// Optional configuration options for this class. - private readonly IEnumerable<(PropertyInfo EmbeddingPropertyInfo, IList SourcePropertiesInfo)> _embeddingPropertiesWithSourceProperties; - - /// - /// Initializes a new instance of the class. - /// - /// The decorated . - /// The service to use for generating the embeddings. - /// Thrown when embedding properties are referencing data source properties that do not exist. - /// Thrown when required parameters are null. - public TextEmbeddingVectorStoreRecordCollection(IVectorStoreRecordCollection decoratedVectorStoreRecordCollection, ITextEmbeddingGenerationService textEmbeddingGenerationService) - { - // Assign. - this._decoratedVectorStoreRecordCollection = decoratedVectorStoreRecordCollection ?? throw new ArgumentNullException(nameof(decoratedVectorStoreRecordCollection)); - this._textEmbeddingGenerationService = textEmbeddingGenerationService ?? throw new ArgumentNullException(nameof(textEmbeddingGenerationService)); - - // Find all the embedding properties to generate embeddings for. - this._embeddingPropertiesWithSourceProperties = FindDataPropertiesWithEmbeddingProperties(typeof(TRecord)); - } - - /// - public string Name => this._decoratedVectorStoreRecordCollection.Name; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.CollectionExistsAsync(cancellationToken); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.CreateCollectionAsync(cancellationToken); - } - - /// - public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) - { - await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); - } - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.DeleteCollectionAsync(cancellationToken); - } - - /// - public Task DeleteAsync(TKey key, CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.DeleteAsync(key, cancellationToken); - } - - /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.DeleteAsync(keys, cancellationToken); - } - - /// - public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.GetAsync(key, options, cancellationToken); - } - - /// - public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.GetAsync(keys, options, cancellationToken); - } - - /// - public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - { - var recordWithEmbeddings = await this.AddEmbeddingsAsync(record, cancellationToken).ConfigureAwait(false); - return await this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken).ConfigureAwait(false); - } - - /// - public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) - { - var recordWithEmbeddingsTasks = records.Select(r => this.AddEmbeddingsAsync(r, cancellationToken)); - var recordWithEmbeddings = await Task.WhenAll(recordWithEmbeddingsTasks).ConfigureAwait(false); - return await this._decoratedVectorStoreRecordCollection.UpsertAsync(recordWithEmbeddings, cancellationToken); - } - - /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - return this._decoratedVectorStoreRecordCollection.VectorizedSearchAsync(vector, top, options, cancellationToken); - } - - /// - public IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) - => this._decoratedVectorStoreRecordCollection.GetAsync(filter, top, options, cancellationToken); - - /// - public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var embeddingValue = await this._textEmbeddingGenerationService.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - await foreach (var result in this.VectorizedSearchAsync(embeddingValue, top, options, cancellationToken)) - { - yield return result; - } - } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - ArgumentNullException.ThrowIfNull(serviceType); - - return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : - this._decoratedVectorStoreRecordCollection.GetService(serviceType, serviceKey); - } - - /// - /// Generate and add embeddings for each embedding field that has a on the provided record. - /// - /// The record to generate embeddings for. - /// The to monitor for cancellation requests. - /// The record with embeddings added. - private async Task AddEmbeddingsAsync(TRecord record, CancellationToken cancellationToken) - { - foreach (var (embeddingPropertyInfo, sourcePropertiesInfo) in this._embeddingPropertiesWithSourceProperties) - { - var sourceValues = sourcePropertiesInfo.Select(x => x.GetValue(record)).Cast().Where(x => !string.IsNullOrWhiteSpace(x)); - var sourceString = string.Join("\n", sourceValues); - - var embeddingValue = await this._textEmbeddingGenerationService.GenerateEmbeddingAsync(sourceString, cancellationToken: cancellationToken).ConfigureAwait(false); - embeddingPropertyInfo.SetValue(record, embeddingValue); - } - - return record; - } - - /// - /// Get the list of properties with from the data model. - /// - /// The type of the data model to find - /// The list of properties with with the properties from which the embedding can be generated. - private static IEnumerable<(PropertyInfo EmbeddingPropertyInfo, IList SourcePropertiesInfo)> FindDataPropertiesWithEmbeddingProperties(Type dataModelType) - { - var allProperties = dataModelType.GetProperties(); - var propertiesDictionary = allProperties.ToDictionary(p => p.Name); - - // Loop through all the properties to find the ones that have the GenerateTextEmbeddingAttribute. - foreach (var property in allProperties) - { - var attribute = property.GetCustomAttribute(); - if (attribute is not null) - { - // Find the source properties that the embedding should be generated from. - var sourcePropertiesInfo = new List(); - foreach (var sourcePropertyName in attribute.SourcePropertyNames) - { - if (!propertiesDictionary.TryGetValue(sourcePropertyName, out var sourcePropertyInfo)) - { - throw new ArgumentException($"The source property '{sourcePropertyName}' as referenced by embedding property '{property.Name}' does not exist in the record model."); - } - else if (sourcePropertyInfo.PropertyType != typeof(string)) - { - throw new ArgumentException($"The source property '{sourcePropertyName}' as referenced by embedding property '{property.Name}' has type {sourcePropertyInfo.PropertyType} but must be a string."); - } - else - { - sourcePropertiesInfo.Add(sourcePropertyInfo); - } - } - - yield return (property, sourcePropertiesInfo); - } - } - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs deleted file mode 100644 index 03527556a2d0..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStore_EmbeddingGeneration.cs +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Identity; -using Memory.VectorStoreEmbeddingGeneration; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using Microsoft.SemanticKernel.Connectors.InMemory; - -namespace Memory; - -/// -/// This sample shows how to abstract embedding generation away from usage by -/// using the decorator pattern. -/// -/// In the sample we create an and then using -/// an extension method -/// we wrap the with a that will automatically generate embeddings for properties -/// that have the attribute. -/// -/// The decorated vector store also adds the additional interface to the collection -/// which allows us to search the collection using a text string without having to manually generate the embeddings. -/// -/// Note that the demonstrated here are part of this sample and not part of the Semantic Kernel libraries. -/// To use it, you will need to copy it to your own project. -/// -public class VectorStore_EmbeddingGeneration(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task UseEmbeddingGenerationViaDecoratorAsync() - { - // Create an embedding generation service. - var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - new AzureCliCredential()); - - // Construct an InMemory vector store with embedding generation. - // The UseTextEmbeddingGeneration method adds an embedding generation - // decorator class to the vector store that will automatically generate - // embeddings for properties that are decorated with the GenerateTextEmbeddingAttribute. - var vectorStore = new InMemoryVectorStore().UseTextEmbeddingGeneration(textEmbeddingGenerationService); - - // Get and create collection if it doesn't exist. - var collection = vectorStore.GetCollection("skglossary"); - await collection.CreateCollectionIfNotExistsAsync(); - - // Create and upsert glossary entries into the collection. - await collection.UpsertAsync(CreateGlossaryEntries()); - - // Search the collection using a vectorizable text search. - var search = collection as IVectorizableTextSearch; - var searchString = "What is an Application Programming Interface"; - var resultRecords = await search!.VectorizableTextSearchAsync(searchString, top: 1).ToListAsync(); - - Console.WriteLine("Search string: " + searchString); - Console.WriteLine("Result: " + resultRecords.First().Record.Definition); - Console.WriteLine(); - } - - /// - /// Sample model class that represents a glossary entry. - /// - /// - /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. - /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. - /// - /// The property is also decorated with the attribute which - /// allows the vector store to automatically generate an embedding for the property when the record is upserted. - /// - private sealed class Glossary - { - [VectorStoreRecordKey] - public ulong Key { get; set; } - - [VectorStoreRecordData(IsIndexed = true)] - public string Category { get; set; } - - [VectorStoreRecordData] - public string Term { get; set; } - - [VectorStoreRecordData] - public string Definition { get; set; } - - [GenerateTextEmbedding(nameof(Definition))] - [VectorStoreRecordVector(1536)] - public ReadOnlyMemory DefinitionEmbedding { get; set; } - } - - /// - /// Create some sample glossary entries. - /// - /// A list of sample glossary entries. - private static IEnumerable CreateGlossaryEntries() - { - yield return new Glossary - { - Key = 1, - Category = "External Definitions", - Term = "API", - Definition = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data." - }; - - yield return new Glossary - { - Key = 2, - Category = "Core Definitions", - Term = "Connectors", - Definition = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc." - }; - - yield return new Glossary - { - Key = 3, - Category = "External Definitions", - Term = "RAG", - Definition = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a user’s question (prompt)." - }; - } -} diff --git a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs index e72dd1d52a39..ca10dbe496ee 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_Langchain_Interop.cs @@ -67,7 +67,7 @@ private async Task ReadDataFromCollectionAsync(IVectorStore vectorStore, string // Search the data set. var searchString = "I'm looking for an animal that is loyal and will make a great companion"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); + var resultRecords = await collection.SearchEmbeddingAsync(searchVector, top: 1).ToListAsync(); this.Output.WriteLine("Search string: " + searchString); this.Output.WriteLine("Source: " + resultRecords.First().Record.Source); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs index f5abe719105c..9f50d8b56b28 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Common.cs @@ -51,7 +51,7 @@ public async Task IngestDataAndSearchAsync(string collectionName, Func(string collectionName, Func(string collectionName, Func g.Category == "External Definitions" }).ToListAsync(); + resultRecords = await collection.SearchEmbeddingAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); output.WriteLine("Search string: " + searchString); output.WriteLine("Number of results: " + resultRecords.Count); diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs index c0ffcdab3a9a..2cd98b672944 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs @@ -54,7 +54,7 @@ public async Task VectorSearchWithMultiVectorRecordAsync() // Search the store using the description embedding. var searchString = "I am looking for a reasonably priced coffee maker"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var resultRecords = await collection.VectorizedSearchAsync( + var resultRecords = await collection.SearchEmbeddingAsync( searchVector, top: 1, new() { VectorProperty = r => r.DescriptionEmbedding @@ -68,7 +68,7 @@ public async Task VectorSearchWithMultiVectorRecordAsync() // Search the store using the feature list embedding. searchString = "I am looking for a handheld vacuum cleaner that will remove pet hair"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - resultRecords = await collection.VectorizedSearchAsync( + resultRecords = await collection.SearchEmbeddingAsync( searchVector, top: 1, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs index 50315134a965..ad8881ea7d30 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Paging.cs @@ -47,7 +47,7 @@ public async Task VectorSearchWithPagingAsync() while (moreResults) { // Get the next page of results by asking for 10 results, and using 'Skip' to skip the results from the previous pages. - var currentPageResults = collection.VectorizedSearchAsync( + var currentPageResults = collection.SearchEmbeddingAsync( searchVector, top: 10, new() diff --git a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs index 650026e4e9c9..9f2e7f1315db 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs @@ -50,7 +50,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. var searchString = "What is an Application Programming Interface"; var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - var resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); + var resultRecords = await collection.SearchEmbeddingAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -59,7 +59,7 @@ public async Task ExampleAsync() // Search the collection using a vector search. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); + resultRecords = await collection.SearchEmbeddingAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Definition); @@ -68,7 +68,7 @@ public async Task ExampleAsync() // Search the collection using a vector search with pre-filtering. searchString = "What is Retrieval Augmented Generation"; searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); - resultRecords = await collection.VectorizedSearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); + resultRecords = await collection.SearchEmbeddingAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Number of results: " + resultRecords.Count); diff --git a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs index ac7f71b37c2a..e3a2c2dc0e64 100644 --- a/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs +++ b/dotnet/samples/Concepts/Memory/VolatileVectorStore_LoadData.cs @@ -71,7 +71,7 @@ static DataModel CreateRecord(string text, ReadOnlyMemory embedding) // Search the collection using a vector search. var searchString = "What is the Semantic Kernel?"; var searchVector = await embeddingGenerationService.GenerateEmbeddingAsync(searchString); - var resultRecords = await vectorSearch!.VectorizedSearchAsync(searchVector, top: 1).ToListAsync(); + var resultRecords = await vectorSearch!.SearchEmbeddingAsync(searchVector, top: 1).ToListAsync(); Console.WriteLine("Search string: " + searchString); Console.WriteLine("Result: " + resultRecords.First().Record.Text); @@ -115,7 +115,7 @@ static DataModel CreateRecord(TextSearchResult searchResult, ReadOnlyMemory l.Record).ToList(); // Override arguments to use only top N examples, which will be sent to LLM. diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index 81d75ae649e9..73a4c8fd0815 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -298,7 +298,7 @@ public async Task> GetBestFunctionsAsync( await collection.CreateCollectionIfNotExistsAsync(cancellationToken); // Find best functions to call for original request. - var recordKeys = (await collection.VectorizedSearchAsync(requestEmbedding, top: numberOfBestFunctions, cancellationToken: cancellationToken) + var recordKeys = (await collection.SearchEmbeddingAsync(requestEmbedding, top: numberOfBestFunctions, cancellationToken: cancellationToken) .ToListAsync(cancellationToken)).Select(l => l.Record.Id); return plugins diff --git a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs index fc0e6e2bf0ac..f6f7a4adfdbe 100644 --- a/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs +++ b/dotnet/samples/Concepts/Search/VectorStore_TextSearch.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +#if DISABLED + using System.Runtime.CompilerServices; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; @@ -141,9 +143,9 @@ internal static async Task> CreateCo } /// - /// Decorator for a that generates embeddings for text search queries. + /// Decorator for a that generates embeddings for text search queries. /// - private sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch + private sealed class VectorizedSearchWrapper(IVectorSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch { /// public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -188,3 +190,5 @@ private sealed class DataModel public ReadOnlyMemory Embedding { get; init; } } } + +#endif diff --git a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs index 096ad8d1b2e3..aef6893f40d0 100644 --- a/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs +++ b/dotnet/samples/Demos/ModelContextProtocolClientServer/MCPServer/Program.cs @@ -126,7 +126,7 @@ static TextDataModel CreateRecord(string text, ReadOnlyMemory embedding) ReadOnlyMemory promptEmbedding = await embeddingGenerationService.GenerateEmbeddingAsync(prompt, cancellationToken: cancellationToken); // Retrieve top three matching records from the vector store - var result = vsCollection.VectorizedSearchAsync(promptEmbedding, top: 3, cancellationToken: cancellationToken); + var result = vsCollection.SearchEmbeddingAsync(promptEmbedding, top: 3, cancellationToken: cancellationToken); // Return the records as resource contents List contents = []; diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs index 83525c1a2e77..e500c241febe 100644 --- a/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs +++ b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs @@ -64,7 +64,10 @@ await collection.UpsertAsync(new InformationItem() } // Add a plugin to search the database with. +// TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the InMemoryVectorStore above instead of passing it here. +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete var vectorStoreTextSearch = new VectorStoreTextSearch(collection, embeddingService); +#pragma warning restore CS0618 kernel.Plugins.Add(vectorStoreTextSearch.CreateWithSearch("SearchPlugin")); // Start the conversation diff --git a/dotnet/samples/GettingStartedWithTextSearch/Step4_Search_With_VectorStore.cs b/dotnet/samples/GettingStartedWithTextSearch/Step4_Search_With_VectorStore.cs index 9c48c3a6880b..a2f950fb3804 100644 --- a/dotnet/samples/GettingStartedWithTextSearch/Step4_Search_With_VectorStore.cs +++ b/dotnet/samples/GettingStartedWithTextSearch/Step4_Search_With_VectorStore.cs @@ -24,10 +24,13 @@ public async Task UsingInMemoryVectorStoreRecordTextSearchAsync() { // Use embedding generation service and record collection for the fixture. var textEmbeddingGeneration = fixture.TextEmbeddingGenerationService; - var vectorizedSearch = fixture.VectorStoreRecordCollection; + var collection = fixture.VectorStoreRecordCollection; // Create a text search instance using the InMemory vector store. - var textSearch = new VectorStoreTextSearch(vectorizedSearch, textEmbeddingGeneration); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the collection +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete + var textSearch = new VectorStoreTextSearch(collection, textEmbeddingGeneration); +#pragma warning restore CS0618 // Search and return results as TextSearchResult items var query = "What is the Semantic Kernel?"; @@ -57,10 +60,13 @@ public async Task RagWithInMemoryVectorStoreTextSearchAsync() // Use embedding generation service and record collection for the fixture. var textEmbeddingGeneration = fixture.TextEmbeddingGenerationService; - var vectorizedSearch = fixture.VectorStoreRecordCollection; + var collection = fixture.VectorStoreRecordCollection; // Create a text search instance using the InMemory vector store. - var textSearch = new VectorStoreTextSearch(vectorizedSearch, textEmbeddingGeneration); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the collection +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete + var textSearch = new VectorStoreTextSearch(collection, textEmbeddingGeneration); +#pragma warning restore CS0618 // Build a text search plugin with vector store search and add to the kernel var searchPlugin = textSearch.CreateWithGetTextSearchResults("SearchPlugin"); @@ -69,14 +75,14 @@ public async Task RagWithInMemoryVectorStoreTextSearchAsync() // Invoke prompt and use text search plugin to provide grounding information var query = "What is the Semantic Kernel?"; string promptTemplate = """ - {{#with (SearchPlugin-GetTextSearchResults query)}} - {{#each this}} + {{#with (SearchPlugin-GetTextSearchResults query)}} + {{#each this}} Name: {{Name}} Value: {{Value}} Link: {{Link}} ----------------- - {{/each}} - {{/with}} + {{/each}} + {{/with}} {{query}} @@ -108,10 +114,13 @@ public async Task FunctionCallingWithInMemoryVectorStoreTextSearchAsync() // Use embedding generation service and record collection for the fixture. var textEmbeddingGeneration = fixture.TextEmbeddingGenerationService; - var vectorizedSearch = fixture.VectorStoreRecordCollection; + var collection = fixture.VectorStoreRecordCollection; // Create a text search instance using the InMemory vector store. - var textSearch = new VectorStoreTextSearch(vectorizedSearch, textEmbeddingGeneration); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the collection +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete + var textSearch = new VectorStoreTextSearch(collection, textEmbeddingGeneration); +#pragma warning restore CS0618 // Build a text search plugin with vector store search and add to the kernel var searchPlugin = textSearch.CreateWithGetTextSearchResults("SearchPlugin"); diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs index 5a89940fef5c..195d638573f7 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step2_Vector_Search.cs @@ -43,7 +43,7 @@ internal static async Task> SearchVectorStoreAsync( var searchVector = await textEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the store and get the single most relevant result. - var searchResultItems = await collection.VectorizedSearchAsync( + var searchResultItems = await collection.SearchEmbeddingAsync( searchVector, top: 1).ToListAsync(); return searchResultItems.First(); @@ -62,7 +62,7 @@ public async Task SearchAnInMemoryVectorStoreWithFilteringAsync() var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the store with a filter and get the single most relevant result. - var searchResultItems = await collection.VectorizedSearchAsync( + var searchResultItems = await collection.SearchEmbeddingAsync( searchVector, top: 1, new() diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs index d2f01e07b6e5..cdea73134d21 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs @@ -20,7 +20,7 @@ public class Step5_Use_DynamicDataModel(ITestOutputHelper output, VectorStoresFi /// docker run -d --name redis-stack -p 6379:6379 -p 8001:8001 redis/redis-stack:latest /// [Fact] - public async Task SearchAVectorStoreWithGenericDataModelAsync() + public async Task SearchAVectorStoreWithDynamicMappingAsync() { // Construct a redis vector store. var vectorStore = new RedisVectorStore(ConnectionMultiplexer.Connect("localhost:6379").GetDatabase()); @@ -56,7 +56,7 @@ public async Task SearchAVectorStoreWithGenericDataModelAsync() var searchVector = await fixture.TextEmbeddingGenerationService.GenerateEmbeddingAsync(searchString); // Search the generic data model collection and get the single most relevant result. - var searchResultItems = await dynamicDataModelCollection.VectorizedSearchAsync( + var searchResultItems = await dynamicDataModelCollection.SearchEmbeddingAsync( searchVector, top: 1).ToListAsync(); diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs index 75e1860772fe..1afd89e1897b 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchDynamicDataModelMapperTests.cs @@ -271,8 +271,9 @@ public void MapFromStorageToDataModelSkipsMissingProperties() } private static VectorStoreRecordModel BuildModel(List properties) - => new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) + => new VectorStoreRecordJsonModelBuilder(AzureAISearchModelBuilder.s_modelBuildingOptions) .Build( typeof(Dictionary), - new() { Properties = properties }); + new() { Properties = properties }, + defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs index 40686e1c3a7c..f2f95374128c 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchKernelBuilderExtensionsTests.cs @@ -107,7 +107,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs index ab6edca0f940..d1b24704c3f2 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchServiceCollectionExtensionsTests.cs @@ -107,7 +107,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs index 60928e06f48b..46bc932fe707 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs @@ -84,7 +84,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs index 994d283190a2..996add717588 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs @@ -84,7 +84,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs index 98973fdcba19..5e039902ec32 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs @@ -28,7 +28,8 @@ public sealed class AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests new VectorStoreRecordKeyProperty("Property1", typeof(string)) { StoragePropertyName = "property_1" }, new VectorStoreRecordDataProperty("Property2", typeof(string)) { StoragePropertyName = "property_2" } ] - }); + }, + defaultEmbeddingGenerator: null); [Fact] public void BuildFilterWithNullVectorSearchFilterReturnsNull() @@ -105,5 +106,6 @@ private static VectorStoreRecordModel BuildModel(List => new MongoDBModelBuilder() .Build( typeof(Dictionary), - new() { Properties = properties }); + new() { Properties = properties }, + defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 4ad78d8a86f2..4e1fe0fd1e35 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -490,8 +490,8 @@ await this.TestUpsertWithModelAsync( } [Theory] - [MemberData(nameof(VectorizedSearchVectorTypeData))] - public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) + [MemberData(nameof(SearchEmbeddingVectorTypeData))] + public async Task SearchEmbeddingThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) { // Arrange this.MockCollectionForSearch(); @@ -503,18 +503,18 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3).ToListAsync()); + await Assert.ThrowsAsync(async () => await sut.SearchEmbeddingAsync(vector, top: 3).ToListAsync()); } else { - Assert.NotNull(await sut.VectorizedSearchAsync(vector, top: 3).FirstOrDefaultAsync()); + Assert.NotNull(await sut.SearchEmbeddingAsync(vector, top: 3).FirstOrDefaultAsync()); } } [Theory] [InlineData("TestEmbedding1", "TestEmbedding1", 3, 3)] [InlineData("TestEmbedding2", "test_embedding_2", 4, 4)] - public async Task VectorizedSearchUsesValidQueryAsync( + public async Task SearchEmbeddingUsesValidQueryAsync( string? vectorPropertyName, string expectedVectorPropertyName, int actualTop, @@ -566,7 +566,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( }; // Act - var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() + var actual = await sut.SearchEmbeddingAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, }).FirstOrDefaultAsync(); @@ -582,7 +582,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( } [Fact] - public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNameAsync() + public async Task SearchEmbeddingThrowsExceptionWithNonExistentVectorPropertyNameAsync() { // Arrange this.MockCollectionForSearch(); @@ -594,11 +594,11 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); } [Fact] - public async Task VectorizedSearchReturnsRecordWithScoreAsync() + public async Task SearchEmbeddingReturnsRecordWithScoreAsync() { // Arrange this.MockCollectionForSearch(); @@ -608,7 +608,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var result = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); + var result = await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); // Assert Assert.NotNull(result); @@ -629,7 +629,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() { [], 1 } }; - public static TheoryData VectorizedSearchVectorTypeData => new() + public static TheoryData SearchEmbeddingVectorTypeData => new() { { new ReadOnlyMemory([1f, 2f, 3f]), false }, { new ReadOnlyMemory([1f, 2f, 3f]), false }, diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs index 70a896148799..3b3ef3f9eb94 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLDynamicDataModelMapperTests.cs @@ -19,7 +19,7 @@ public sealed class AzureCosmosDBNoSQLDynamicDataModelMapperTests { private static readonly JsonSerializerOptions s_jsonSerializerOptions = JsonSerializerOptions.Default; - private static readonly VectorStoreRecordModel s_model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() + private static readonly VectorStoreRecordModel s_model = new AzureCosmosDBNoSQLVectorStoreModelBuilder() .Build( typeof(Dictionary), new VectorStoreRecordDefinition @@ -41,10 +41,6 @@ public sealed class AzureCosmosDBNoSQLDynamicDataModelMapperTests new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), - #if NET5_0_OR_GREATER - new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory), 10), - new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?), 10), - #endif new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?), 10), new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory), 10), @@ -52,11 +48,9 @@ public sealed class AzureCosmosDBNoSQLDynamicDataModelMapperTests new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory), 10), new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?), 10), }, - }); + }, + defaultEmbeddingGenerator: null); -#if NET5_0_OR_GREATER - private static readonly Half[] s_halfVector = [(Half)1.0f, (Half)2.0f, (Half)3.0f]; -#endif private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; private static readonly byte[] s_byteVector = [1, 2, 3]; private static readonly sbyte[] s_sbyteVector = [1, 2, 3]; @@ -87,10 +81,6 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), ["TagListDataProp"] = s_taglist, -#if NET5_0_OR_GREATER - ["HalfVector"] = new ReadOnlyMemory(s_halfVector), - ["NullableHalfVector"] = new ReadOnlyMemory(s_halfVector), -#endif ["FloatVector"] = new ReadOnlyMemory(s_floatVector), ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), ["ByteVector"] = new ReadOnlyMemory(s_byteVector), @@ -100,7 +90,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() }; // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Equal("key", (string?)storageModel["id"]); @@ -118,10 +108,6 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["DateTimeOffsetDataProp"]); Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["NullableDateTimeOffsetDataProp"]); Assert.Equal(s_taglist, storageModel["TagListDataProp"]!.AsArray().GetValues().ToArray()); -#if NET5_0_OR_GREATER - Assert.Equal(s_halfVector, storageModel["HalfVector"]!.AsArray().Select(l => (Half)(float)l!).ToArray()); - Assert.Equal(s_halfVector, storageModel["NullableHalfVector"]!.AsArray().Select(l => (Half)(float)l!).ToArray()); -#endif Assert.Equal(s_floatVector, storageModel["FloatVector"]!.AsArray().GetValues().ToArray()); Assert.Equal(s_floatVector, storageModel["NullableFloatVector"]!.AsArray().GetValues().ToArray()); Assert.Equal(s_byteVector, storageModel["ByteVector"]!.AsArray().GetValues().ToArray()); @@ -156,7 +142,7 @@ public void MapFromDataToStorageModelMapsNullValues() var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Null(storageModel["StringDataProp"]); @@ -187,10 +173,6 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), ["TagListDataProp"] = new JsonArray(s_taglist.Select(l => (JsonValue)l).ToArray()), -#if NET5_0_OR_GREATER - ["HalfVector"] = new JsonArray(s_halfVector.Select(l => (JsonValue)(float)l).ToArray()), - ["NullableHalfVector"] = new JsonArray(s_halfVector.Select(l => (JsonValue)(float)l).ToArray()), -#endif ["FloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), ["NullableFloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), ["ByteVector"] = new JsonArray(s_byteVector.Select(l => (JsonValue)l).ToArray()), @@ -218,10 +200,6 @@ public void MapFromStorageToDataModelMapsAllSupportedTypes() Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["DateTimeOffsetDataProp"]); Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel["NullableDateTimeOffsetDataProp"]); Assert.Equal(s_taglist, dataModel["TagListDataProp"]); -#if NET5_0_OR_GREATER - Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel["HalfVector"]!).ToArray()); - Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel["NullableHalfVector"]!)!.ToArray()); -#endif Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["FloatVector"]!).ToArray()); Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel["NullableFloatVector"]!)!.ToArray()); Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel["ByteVector"]!).ToArray()); @@ -296,7 +274,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() var sut = new AzureCosmosDBNoSQLDynamicDataModelMapper(s_model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Equal("key", (string?)storageModel["id"]); diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs index 2b8a83824c01..59a697c2e869 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs @@ -95,7 +95,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs index 9a76d9e0d119..07900cb0dfc0 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs @@ -96,7 +96,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs index 93264fd7384e..c3a9bcb78c3a 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests.cs @@ -18,18 +18,19 @@ public sealed class AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilderTests { private const string ScorePropertyName = "TestScore"; - private readonly VectorStoreRecordModel _model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( + private readonly VectorStoreRecordModel _model = new AzureCosmosDBNoSQLVectorStoreModelBuilder().Build( typeof(Dictionary), new() { Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordVectorProperty("TestProperty1", typeof(string), 10) { StoragePropertyName = "test_property_1" }, + new VectorStoreRecordVectorProperty("TestProperty1", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "test_property_1" }, new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) { StoragePropertyName = "test_property_2" }, new VectorStoreRecordDataProperty("TestProperty3", typeof(string)) { StoragePropertyName = "test_property_3" } ] - }); + }, + defaultEmbeddingGenerator: null); [Fact] public void BuildSearchQueryByDefaultReturnsValidQueryDefinition() @@ -192,7 +193,7 @@ FROM x const string KeyStoragePropertyName = "id"; const string PartitionKeyPropertyName = "TestProperty1"; - var model = new AzureCosmosDBNoSqlVectorStoreModelBuilder().Build( + var model = new AzureCosmosDBNoSQLVectorStoreModelBuilder().Build( typeof(Dictionary), new() { @@ -202,7 +203,8 @@ FROM x new VectorStoreRecordDataProperty("TestProperty1", typeof(string)), new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) ] - }); + }, + defaultEmbeddingGenerator: null); var keys = new List { new("id", "TestProperty1") }; // Act diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 48cc293f8da1..073120889d47 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -521,7 +521,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync() "collection"); // Act - var results = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).ToListAsync(); + var results = await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).ToListAsync(); var result = results[0]; // Assert @@ -541,7 +541,7 @@ public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync( // Act & Assert await Assert.ThrowsAsync(async () => - await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3).ToListAsync()); + await sut.SearchEmbeddingAsync(new List([1, 2, 3]), top: 3).ToListAsync()); } [Fact] @@ -556,7 +556,7 @@ public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExcepti // Act & Assert await Assert.ThrowsAsync(async () => - await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, searchOptions).ToListAsync()); + await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, searchOptions).ToListAsync()); } public static TheoryData, string, bool> CollectionExistsData => new() diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs index 3ecbfeccc4ce..9446a05a5045 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs @@ -5,7 +5,7 @@ using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; -using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; using Xunit; @@ -18,11 +18,20 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordMapperTests { private readonly AzureCosmosDBNoSQLVectorStoreRecordMapper _sut = new( - new VectorStoreRecordKeyPropertyModel("HotelId", typeof(string)) - { - StorageName = "id", - TemporaryStorageName = "HotelId" - }, + new AzureCosmosDBNoSQLVectorStoreModelBuilder().Build( + typeof(Dictionary), + new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(string)), + new VectorStoreRecordVectorProperty("TestProperty1", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "test_property_1" }, + new VectorStoreRecordDataProperty("TestProperty2", typeof(string)) { StoragePropertyName = "test_property_2" }, + new VectorStoreRecordDataProperty("TestProperty3", typeof(string)) { StoragePropertyName = "test_property_3" } + ] + }, + defaultEmbeddingGenerator: null, + JsonSerializerOptions.Default), JsonSerializerOptions.Default); [Fact] @@ -37,7 +46,7 @@ public void MapFromDataToStorageModelReturnsValidObject() }; // Act - var document = this._sut.MapFromDataToStorageModel(hotel); + var document = this._sut.MapFromDataToStorageModel(hotel, generatedEmbeddings: null); // Assert Assert.NotNull(document); diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryKernelBuilderExtensionsTests.cs index b3ce5286c9d6..aa6dceb543dd 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryKernelBuilderExtensionsTests.cs @@ -52,7 +52,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryServiceCollectionExtensionsTests.cs index f195f9267711..99d64b820d2c 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryServiceCollectionExtensionsTests.cs @@ -52,7 +52,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs deleted file mode 100644 index 545db4feb005..000000000000 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreRecordCollectionTests.cs +++ /dev/null @@ -1,557 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.InMemory; -using Xunit; - -namespace SemanticKernel.Connectors.InMemory.UnitTests; - -/// -/// Contains tests for the class. -/// -public class InMemoryVectorStoreRecordCollectionTests -{ - private const string TestCollectionName = "testcollection"; - private const string TestRecordKey1 = "testid1"; - private const string TestRecordKey2 = "testid2"; - private const int TestRecordIntKey1 = 1; - private const int TestRecordIntKey2 = 2; - - private readonly CancellationToken _testCancellationToken = new(false); - - private readonly ConcurrentDictionary> _collectionStore; - private readonly ConcurrentDictionary _collectionStoreTypes; - - public InMemoryVectorStoreRecordCollectionTests() - { - this._collectionStore = new(); - this._collectionStoreTypes = new(); - } - - [Theory] - [InlineData(TestCollectionName, true)] - [InlineData("nonexistentcollection", false)] - public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new InMemoryVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - collectionName); - - // Act - var actual = await sut.CollectionExistsAsync(this._testCancellationToken); - - // Assert - Assert.Equal(expectedExists, actual); - } - - [Fact] - public async Task CanCreateCollectionAsync() - { - // Arrange - var sut = this.CreateRecordCollection(false); - - // Act - await sut.CreateCollectionAsync(this._testCancellationToken); - - // Assert - Assert.True(this._collectionStore.ContainsKey(TestCollectionName)); - } - - [Fact] - public async Task DeleteCollectionRemovesCollectionFromDictionaryAsync() - { - // Arrange - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(false); - - // Act - await sut.DeleteCollectionAsync(this._testCancellationToken); - - // Assert - Assert.Empty(this._collectionStore); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanGetRecordWithVectorsAsync(bool useDefinition, TKey testKey) - where TKey : notnull - { - // Arrange - var record = CreateModel(testKey, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey!, record); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetAsync( - testKey, - new() - { - IncludeVectors = true - }, - this._testCancellationToken); - - // Assert - var expectedArgs = new object[] { TestRecordKey1 }; - - Assert.NotNull(actual); - Assert.Equal(testKey, actual.Key); - Assert.Equal($"data {testKey}", actual.Data); - Assert.Equal(new float[] { 1, 2, 3, 4 }, actual.Vector!.Value.ToArray()); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanGetManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1!, record1); - collection.TryAdd(testKey2!, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.GetAsync( - [testKey1, testKey2], - new() - { - IncludeVectors = true - }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0].Key); - Assert.Equal($"data {testKey1}", actual[0].Data); - Assert.Equal(testKey2, actual[1].Key); - Assert.Equal($"data {testKey2}", actual[1].Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteRecordAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteAsync( - testKey1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.True(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanDeleteManyRecordsWithVectorsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - await sut.DeleteAsync( - [testKey1, testKey2], - cancellationToken: this._testCancellationToken); - - // Assert - Assert.False(collection.ContainsKey(testKey1)); - Assert.False(collection.ContainsKey(testKey2)); - } - - [Theory] - [InlineData(true, TestRecordKey1)] - [InlineData(true, TestRecordIntKey1)] - [InlineData(false, TestRecordKey1)] - [InlineData(false, TestRecordIntKey1)] - public async Task CanUpsertRecordAsync(bool useDefinition, TKey testKey1) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var upsertResult = await sut.UpsertAsync( - record1, - cancellationToken: this._testCancellationToken); - - // Assert - Assert.Equal(testKey1, upsertResult); - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanUpsertManyRecordsAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true); - var record2 = CreateModel(testKey2, withVectors: true); - - var collection = new ConcurrentDictionary(); - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actual = await sut.UpsertAsync( - [record1, record2], - cancellationToken: this._testCancellationToken); - - // Assert - Assert.NotNull(actual); - Assert.Equal(2, actual.Count); - Assert.Equal(testKey1, actual[0]); - Assert.Equal(testKey2, actual[1]); - - Assert.True(collection.ContainsKey(testKey1)); - Assert.IsType>(collection[testKey1]); - Assert.Equal($"data {testKey1}", (collection[testKey1] as SinglePropsModel)!.Data); - } - - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2)] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2)] - [InlineData(false, TestRecordKey1, TestRecordKey2)] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2)] - public async Task CanSearchWithVectorAsync(bool useDefinition, TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actualResults = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - top: 3, - new() { IncludeVectors = true }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record.Key); - Assert.Equal($"data {testKey1}", actualResults[0].Record.Data); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[1].Record.Data); - Assert.Equal(-1, actualResults[1].Score); - } - -#pragma warning disable CS0618 // VectorSearchFilter is obsolete - [Theory] - [InlineData(true, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "Equality")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "Equality")] - [InlineData(true, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(true, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - [InlineData(false, TestRecordKey1, TestRecordKey2, "TagListContains")] - [InlineData(false, TestRecordIntKey1, TestRecordIntKey2, "TagListContains")] - public async Task CanSearchWithVectorAndFilterAsync(bool useDefinition, TKey testKey1, TKey testKey2, string filterType) - where TKey : notnull - { - // Arrange - var record1 = CreateModel(testKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(testKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var filter = filterType == "Equality" ? new VectorSearchFilter().EqualTo("Data", $"data {testKey2}") : new VectorSearchFilter().AnyTagEqualTo("Tags", $"tag {testKey2}"); - var actualResults = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - top: 3, - new() { IncludeVectors = true, OldFilter = filter }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.Single(actualResults); - Assert.Equal(testKey2, actualResults[0].Record.Key); - Assert.Equal($"data {testKey2}", actualResults[0].Record.Data); - Assert.Equal(-1, actualResults[0].Score); - } -#pragma warning restore CS0618 // Type or member is obsolete - - [Theory] - [InlineData(DistanceFunction.CosineSimilarity, 1, -1)] - [InlineData(DistanceFunction.CosineDistance, 0, 2)] - [InlineData(DistanceFunction.DotProductSimilarity, 4, -4)] - [InlineData(DistanceFunction.EuclideanDistance, 0, 4)] - public async Task CanSearchWithDifferentDistanceFunctionsAsync(string distanceFunction, double expectedScoreResult1, double expectedScoreResult2) - { - // Arrange - var record1 = CreateModel(TestRecordKey1, withVectors: true, new float[] { 1, 1, 1, 1 }); - var record2 = CreateModel(TestRecordKey2, withVectors: true, new float[] { -1, -1, -1, -1 }); - - var collection = new ConcurrentDictionary(); - collection.TryAdd(TestRecordKey1, record1); - collection.TryAdd(TestRecordKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - VectorStoreRecordDefinition singlePropsDefinition = new() - { - Properties = this._singlePropsDefinition.Properties.Select(x => x switch - { - VectorStoreRecordVectorProperty vectorProperty => new VectorStoreRecordVectorProperty(vectorProperty) { DistanceFunction = distanceFunction }, - _ => x - }).ToList() - }; - - var sut = new InMemoryVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = singlePropsDefinition - }); - - // Act - var actualResults = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - top: 3, - new() { IncludeVectors = true }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.Equal(2, actualResults.Count); - Assert.Equal(TestRecordKey1, actualResults[0].Record.Key); - Assert.Equal($"data {TestRecordKey1}", actualResults[0].Record.Data); - Assert.Equal(expectedScoreResult1, actualResults[0].Score); - Assert.Equal(TestRecordKey2, actualResults[1].Record.Key); - Assert.Equal($"data {TestRecordKey2}", actualResults[1].Record.Data); - Assert.Equal(expectedScoreResult2, actualResults[1].Score); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task CanSearchManyRecordsAsync(bool useDefinition) - { - // Arrange - var collection = new ConcurrentDictionary(); - for (int i = 0; i < 1000; i++) - { - if (i <= 14) - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { 1, 1, 1, 1 })); - } - else - { - collection.TryAdd(i, CreateModel(i, withVectors: true, new float[] { -1, -1, -1, -1 })); - } - } - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = this.CreateRecordCollection(useDefinition); - - // Act - var actualResults = await sut.VectorizedSearchAsync( - new ReadOnlyMemory(new float[] { 1, 1, 1, 1 }), - top: 10, - new() { IncludeVectors = true, Skip = 10 }, - this._testCancellationToken).ToListAsync(); - - // Assert that top was respected - Assert.Equal(10, actualResults.Count); - var actualIds = actualResults.Select(r => r.Record.Key).ToList(); - for (int i = 0; i < 10; i++) - { - // Assert that skip was respected - Assert.Contains(i + 10, actualIds); - if (i <= 4) - { - Assert.Equal(1, actualResults[i].Score); - } - else - { - Assert.Equal(-1, actualResults[i].Score); - } - } - } - - [Theory] - [InlineData(TestRecordKey1, TestRecordKey2)] - [InlineData(TestRecordIntKey1, TestRecordIntKey2)] - public async Task ItCanSearchUsingTheDynamicDataModelAsync(TKey testKey1, TKey testKey2) - where TKey : notnull - { - // Arrange - var record1 = new Dictionary - { - ["Key"] = testKey1, - ["Data"] = $"data {testKey1}", - ["Tags"] = new List { "default tag", "tag " + testKey1 }, - ["Vector"] = new ReadOnlyMemory([1, 1, 1, 1]) - }; - var record2 = new Dictionary - { - ["Key"] = testKey2, - ["Data"] = $"data {testKey2}", - ["Tags"] = new List { "default tag", "tag " + testKey2 }, - ["Vector"] = new ReadOnlyMemory([-1, -1, -1, -1]) - }; - - var collection = new ConcurrentDictionary(); - collection.TryAdd(testKey1, record1); - collection.TryAdd(testKey2, record2); - - this._collectionStore.TryAdd(TestCollectionName, collection); - - var sut = new InMemoryVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = this._singlePropsDefinition - }); - - // Act - var actualResults = await sut.VectorizedSearchAsync( - new ReadOnlyMemory([1, 1, 1, 1]), - top: 3, - new() { IncludeVectors = true, VectorProperty = r => r["Vector"] }, - this._testCancellationToken).ToListAsync(); - - // Assert - Assert.Equal(2, actualResults.Count); - Assert.Equal(testKey1, actualResults[0].Record["Key"]); - Assert.Equal($"data {testKey1}", actualResults[0].Record["Data"]); - Assert.Equal(1, actualResults[0].Score); - Assert.Equal(testKey2, actualResults[1].Record["Key"]); - Assert.Equal($"data {testKey2}", actualResults[1].Record["Data"]); - Assert.Equal(-1, actualResults[1].Score); - } - - private static SinglePropsModel CreateModel(TKey key, bool withVectors, float[]? vector = null) - { - return new SinglePropsModel - { - Key = key, - Data = "data " + key, - Tags = new List { "default tag", "tag " + key }, - Vector = vector ?? (withVectors ? new float[] { 1, 2, 3, 4 } : null), - NotAnnotated = null, - }; - } - - private InMemoryVectorStoreRecordCollection> CreateRecordCollection(bool useDefinition) - where TKey : notnull - { - return new InMemoryVectorStoreRecordCollection>( - this._collectionStore, - this._collectionStoreTypes, - TestCollectionName, - new() - { - VectorStoreRecordDefinition = useDefinition ? this._singlePropsDefinition : null - }); - } - - private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() - { - Properties = - [ - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("Tags", typeof(List)) { IsIndexed = true }, - new VectorStoreRecordDataProperty("Data", typeof(string)) { IsIndexed = true }, - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) - ] - }; - - public sealed class SinglePropsModel - { - [VectorStoreRecordKey] - public TKey? Key { get; set; } - - [VectorStoreRecordData(IsIndexed = true)] - public List Tags { get; set; } = new List(); - - [VectorStoreRecordData(IsIndexed = true)] - public string Data { get; set; } = string.Empty; - - [VectorStoreRecordVector(10)] - public ReadOnlyMemory? Vector { get; set; } - - public string? NotAnnotated { get; set; } - } -} diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreTests.cs index 14d54969d8c3..fe9717c80c70 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/InMemoryVectorStoreTests.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Concurrent; -using System.Linq; using System.Threading.Tasks; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; @@ -45,23 +43,6 @@ public void GetCollectionReturnsCollectionWithNonStringKey() Assert.IsType>>(actual); } - [Fact] - public async Task ListCollectionNamesReadsDictionaryAsync() - { - // Arrange. - var collectionStore = new ConcurrentDictionary>(); - collectionStore.TryAdd("collection1", new ConcurrentDictionary()); - collectionStore.TryAdd("collection2", new ConcurrentDictionary()); - var sut = new InMemoryVectorStore(collectionStore); - - // Act. - var collectionNames = sut.ListCollectionNamesAsync(); - - // Assert. - var collectionNamesList = await collectionNames.ToListAsync(); - Assert.Equal(new[] { "collection1", "collection2" }, collectionNamesList); - } - [Fact] public async Task GetCollectionDoesNotAllowADifferentDataTypeThanPreviouslyUsedAsync() { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs index 0ae13cf923ab..fccfa847a0a3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs @@ -1,53 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; -using Microsoft.Extensions.VectorData.ConnectorSupport; - namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; internal static class AzureAISearchConstants { internal const string VectorStoreSystemName = "azure.aisearch"; - - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = [typeof(string)]; - - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(int), - typeof(long), - typeof(double), - typeof(float), - typeof(bool), - typeof(DateTimeOffset) - ]; - - /// A set of types that vectors on the provided model may have. - /// - /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the - /// SDK yet. We will update this list as the SDK is updated. - /// - /// - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; - - internal static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - - SupportedKeyPropertyTypes = s_supportedKeyTypes, - SupportedDataPropertyTypes = s_supportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, - SupportedVectorPropertyTypes = s_supportedVectorTypes, - - UsesExternalSerializer = true - }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs index 03b9b2bf03f2..76c54f8cb412 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchKernelBuilderExtensions.cs @@ -59,7 +59,7 @@ public static IKernelBuilder AddAzureAISearchVectorStore(this IKernelBuilder bui } /// - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// specified service ID and where is retrieved from the dependency injection container. /// /// The type of the data model that the collection should contain. @@ -80,7 +80,7 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// provided and and the specified service ID. /// /// The type of the data model that the collection should contain. @@ -105,7 +105,7 @@ public static IKernelBuilder AddAzureAISearchVectorStoreRecordCollection - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// provided and and the specified service ID. /// /// The type of the data model that the collection should contain. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs new file mode 100644 index 000000000000..668115b92c29 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +internal class AzureAISearchModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) +{ + /// A set of types that a key on the provided model may have. + private static readonly HashSet s_supportedKeyTypes = [typeof(string)]; + + /// A set of types that data properties on the provided model may have. + private static readonly HashSet s_supportedDataTypes = + [ + typeof(string), + typeof(int), + typeof(long), + typeof(double), + typeof(float), + typeof(bool), + typeof(DateTimeOffset) + ]; + + /// A set of types that vectors on the provided model may have. + /// + /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the + /// SDK yet. We will update this list as the SDK is updated. + /// + /// + private static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; + + internal static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = s_supportedKeyTypes, + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + UsesExternalSerializer = true + }; + + protected override void Validate(Type type) + { + base.Validate(type); + + if (this.VectorProperties.FirstOrDefault(p => p.EmbeddingGenerator is not null) is VectorStoreRecordPropertyModel property) + { + throw new NotSupportedException( + $"The Azure AI Search connector does not currently support a custom embedding generator (configured for property '{property.ModelName}' on type '{type.Name}'). " + + "However, you can configure embedding generation in Azure AI Search itself, without requiring a .NET IEmbeddingGenerator."); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs index 661eed32ece9..7ce1d876bcc9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs @@ -7,6 +7,7 @@ using Azure.Core.Serialization; using Azure.Search.Documents; using Azure.Search.Documents.Indexes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -35,11 +36,12 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec (sp, obj) => { var searchIndexClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureAISearchVectorStore( - searchIndexClient, - selectedOptions); + return new AzureAISearchVectorStore(searchIndexClient, options); }); return services; @@ -63,14 +65,15 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec serviceId, (sp, obj) => { - var selectedOptions = options ?? sp.GetService(); - var searchClientOptions = BuildSearchClientOptions(selectedOptions?.JsonSerializerOptions); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + var searchClientOptions = BuildSearchClientOptions(options?.JsonSerializerOptions); var searchIndexClient = new SearchIndexClient(endpoint, tokenCredential, searchClientOptions); // Construct the vector store. - return new AzureAISearchVectorStore( - searchIndexClient, - selectedOptions); + return new AzureAISearchVectorStore(searchIndexClient, options); }); return services; @@ -94,21 +97,22 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec serviceId, (sp, obj) => { - var selectedOptions = options ?? sp.GetService(); - var searchClientOptions = BuildSearchClientOptions(selectedOptions?.JsonSerializerOptions); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + var searchClientOptions = BuildSearchClientOptions(options?.JsonSerializerOptions); var searchIndexClient = new SearchIndexClient(endpoint, credential, searchClientOptions); // Construct the vector store. - return new AzureAISearchVectorStore( - searchIndexClient, - selectedOptions); + return new AzureAISearchVectorStore(searchIndexClient, options); }); return services; } /// - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// specified service ID and where is retrieved from the dependency injection container. /// /// The type of the data model that the collection should contain. @@ -131,12 +135,12 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection { var searchIndexClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureAISearchVectorStoreRecordCollection( - searchIndexClient, - collectionName, - selectedOptions); + return new AzureAISearchVectorStoreRecordCollection(searchIndexClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -145,7 +149,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// provided and and the specified service ID. /// /// The type of the data model that the collection should contain. @@ -172,15 +176,15 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection { - var selectedOptions = options ?? sp.GetService>(); - var searchClientOptions = BuildSearchClientOptions(selectedOptions?.JsonSerializerOptions); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + var searchClientOptions = BuildSearchClientOptions(options?.JsonSerializerOptions); var searchIndexClient = new SearchIndexClient(endpoint, tokenCredential, searchClientOptions); // Construct the vector store. - return new AzureAISearchVectorStoreRecordCollection( - searchIndexClient, - collectionName, - selectedOptions); + return new AzureAISearchVectorStoreRecordCollection(searchIndexClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -189,7 +193,7 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection - /// Register an Azure AI Search , and with the + /// Register an Azure AI Search , and with the /// provided and and the specified service ID. /// /// The type of the data model that the collection should contain. @@ -216,15 +220,15 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection { - var selectedOptions = options ?? sp.GetService>(); - var searchClientOptions = BuildSearchClientOptions(selectedOptions?.JsonSerializerOptions); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + var searchClientOptions = BuildSearchClientOptions(options?.JsonSerializerOptions); var searchIndexClient = new SearchIndexClient(endpoint, credential, searchClientOptions); // Construct the vector store. - return new AzureAISearchVectorStoreRecordCollection( - searchIndexClient, - collectionName, - selectedOptions); + return new AzureAISearchVectorStoreRecordCollection(searchIndexClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -233,14 +237,14 @@ public static IServiceCollection AddAzureAISearchVectorStoreRecordCollection - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index 114055526a96..eb866c2998c8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -68,7 +68,8 @@ public IVectorStoreRecordCollection GetCollection( new AzureAISearchVectorStoreRecordCollectionOptions() { JsonSerializerOptions = this._options.JsonSerializerOptions, - VectorStoreRecordDefinition = vectorStoreRecordDefinition + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator }) as IVectorStoreRecordCollection; return recordCollection!; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs index c502f4828b99..c18a882042e6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs @@ -3,6 +3,7 @@ using System; using System.Text.Json; using Azure.Search.Documents.Indexes; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -23,4 +24,9 @@ public sealed class AzureAISearchVectorStoreOptions /// to provide the same set of both here and when constructing the . /// public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index d1c7eb6728f9..21650c41b192 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -25,12 +25,14 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// The data type of the record key. Can be either , or for dynamic mapping. /// The data model to use for adding, updating and retrieving data from storage. #pragma warning disable CA1711 // Identifiers should not have incorrect suffix +#pragma warning disable CS0618 // IVectorizableTextSearch is obsolete public sealed class AzureAISearchVectorStoreRecordCollection : IVectorStoreRecordCollection, IVectorizableTextSearch, IKeywordHybridSearch where TKey : notnull where TRecord : notnull +#pragma warning restore CS0618 // IVectorizableTextSearch is obsolete #pragma warning restore CA1711 // Identifiers should not have incorrect suffix { /// Metadata about vector store record collection. @@ -85,8 +87,8 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli this._options = options ?? new AzureAISearchVectorStoreRecordCollectionOptions(); this._searchClient = this._searchIndexClient.GetSearchClient(name); - this._model = new VectorStoreRecordJsonModelBuilder(AzureAISearchConstants.s_modelBuildingOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.JsonSerializerOptions); + this._model = new AzureAISearchModelBuilder() + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._options.JsonSerializerOptions); // Resolve mapper. // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. @@ -293,15 +295,19 @@ public async Task> UpsertAsync(IEnumerable records, } /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); - // Resolve options. - var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); - // Configure search settings. var vectorQueries = new List { @@ -310,7 +316,7 @@ public IAsyncEnumerable> VectorizedSearchAsync throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), @@ -324,7 +330,7 @@ public IAsyncEnumerable> VectorizedSearchAsync> VectorizedSearchAsync> VectorizedSearchAsync + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + /// public IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) @@ -396,8 +408,20 @@ public IAsyncEnumerable GetAsync(Expression> filter } /// - public IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + CancellationToken cancellationToken = default) + where TInput : notnull { + var searchText = value switch + { + string s => s, + null => throw new ArgumentNullException(nameof(value)), + _ => throw new ArgumentException($"The provided search type '{value?.GetType().Name}' is not supported by the Azure AI Search connector, pass a string.") + }; + Verify.NotNull(searchText); Verify.NotLessThan(top, 1); @@ -407,8 +431,8 @@ public IAsyncEnumerable> VectorizableTextSearchAsync } // Resolve options. - var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); // Configure search settings. var vectorQueries = new List @@ -418,7 +442,7 @@ public IAsyncEnumerable> VectorizableTextSearchAsync #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. - var filter = internalOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), @@ -432,7 +456,7 @@ public IAsyncEnumerable> VectorizableTextSearchAsync { VectorSearch = new(), Size = top, - Skip = internalOptions.Skip, + Skip = options.Skip, }; if (filter is not null) @@ -443,7 +467,7 @@ public IAsyncEnumerable> VectorizableTextSearchAsync searchOptions.VectorSearch.Queries.AddRange(vectorQueries); // Filter out vector fields if requested. - if (!internalOptions.IncludeVectors) + if (!options.IncludeVectors) { searchOptions.Select.Add(this._model.KeyProperty.StorageName); @@ -453,9 +477,14 @@ public IAsyncEnumerable> VectorizableTextSearchAsync } } - return this.SearchAndMapToDataModelAsync(null, searchOptions, internalOptions.IncludeVectors, cancellationToken); + return this.SearchAndMapToDataModelAsync(null, searchOptions, options.IncludeVectors, cancellationToken); } + /// + [Obsolete("Use SearchAsync")] + public IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + => this.SearchAsync(searchText, top, options, cancellationToken); + /// public IAsyncEnumerable> HybridSearchAsync(TVector vector, ICollection keywords, int top, HybridSearchOptions? options = null, CancellationToken cancellationToken = default) { @@ -464,9 +493,9 @@ public IAsyncEnumerable> HybridSearchAsync( Verify.NotLessThan(top, 1); // Resolve options. - var internalOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = internalOptions.VectorProperty }); - var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); + options ??= s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = options.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(options.AdditionalProperty); // Configure search settings. var vectorQueries = new List @@ -476,7 +505,7 @@ public IAsyncEnumerable> HybridSearchAsync( #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Build filter object. - var filter = internalOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => AzureAISearchVectorStoreCollectionSearchMapping.BuildLegacyFilterString(legacyFilter, this._model), @@ -490,15 +519,15 @@ public IAsyncEnumerable> HybridSearchAsync( { VectorSearch = new(), Size = top, - Skip = internalOptions.Skip, + Skip = options.Skip, Filter = filter, - IncludeTotalCount = internalOptions.IncludeTotalCount, + IncludeTotalCount = options.IncludeTotalCount, }; searchOptions.VectorSearch.Queries.AddRange(vectorQueries); searchOptions.SearchFields.Add(textDataProperty.StorageName); // Filter out vector fields if requested. - if (!internalOptions.IncludeVectors) + if (!options.IncludeVectors) { searchOptions.Select.Add(this._model.KeyProperty.StorageName); @@ -510,7 +539,7 @@ public IAsyncEnumerable> HybridSearchAsync( var keywordsCombined = string.Join(" ", keywords); - return this.SearchAndMapToDataModelAsync(keywordsCombined, searchOptions, internalOptions.IncludeVectors, cancellationToken); + return this.SearchAndMapToDataModelAsync(keywordsCombined, searchOptions, options.IncludeVectors, cancellationToken); } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs index d396f77c37d2..0b405f4710bd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs @@ -4,6 +4,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Azure.Search.Documents.Indexes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -38,4 +39,9 @@ public sealed class AzureAISearchVectorStoreRecordCollectionOptions /// to provide the same set of both here and when constructing the . /// public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs index 12c47576f243..9c3f0ea120e5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs @@ -52,7 +52,7 @@ public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStore( } /// - /// Register an Azure CosmosDB MongoDB and with the specified service ID + /// Register an Azure CosmosDB MongoDB and with the specified service ID /// and where the Azure CosmosDB MongoDB is retrieved from the dependency injection container. /// /// The type of the record. @@ -73,7 +73,7 @@ public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStoreRecordCollection< } /// - /// Register an Azure CosmosDB MongoDB and with the specified service ID + /// Register an Azure CosmosDB MongoDB and with the specified service ID /// and where the Azure CosmosDB MongoDB is constructed using the provided and . /// /// The type of the record. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs index 671646ab2f5e..58a55be23741 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; @@ -33,7 +34,10 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStore( (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new AzureCosmosDBMongoDBVectorStore(database, options); }); @@ -70,7 +74,10 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStore( var mongoClient = new MongoClient(settings); var database = mongoClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new AzureCosmosDBMongoDBVectorStore(database, options); }); @@ -79,7 +86,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStore( } /// - /// Register an Azure CosmosDB MongoDB and with the specified service ID + /// Register an Azure CosmosDB MongoDB and with the specified service ID /// and where the Azure CosmosDB MongoDB is retrieved from the dependency injection container. /// /// The type of the record. @@ -100,9 +107,12 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -111,7 +121,7 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect } /// - /// Register an Azure CosmosDB MongoDB and with the specified service ID + /// Register an Azure CosmosDB MongoDB and with the specified service ID /// and where the Azure CosmosDB MongoDB is constructed using the provided and . /// /// The type of the record. @@ -141,9 +151,12 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect var mongoClient = new MongoClient(settings); var database = mongoClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBMongoDBVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -152,14 +165,14 @@ public static IServiceCollection AddAzureCosmosDBMongoDBVectorStoreRecordCollect } /// - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs index 7e63cd7a50ae..50d2295284ba 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -64,7 +64,11 @@ public IVectorStoreRecordCollection GetCollection( var recordCollection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mongoDatabase, name, - new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection; return recordCollection!; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs index 69e3b94233ae..45e6363fa3ca 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; @@ -9,6 +10,11 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; /// public sealed class AzureCosmosDBMongoDBVectorStoreOptions { + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs index 77ab871771c9..225d990fee08 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -9,8 +9,10 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; @@ -85,7 +87,7 @@ public AzureCosmosDBMongoDBVectorStoreRecordCollection( this._mongoCollection = mongoDatabase.GetCollection(name); this.Name = name; this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); - this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); this._mapper = typeof(TRecord) == typeof(Dictionary) ? (new MongoDBDynamicDataModelMapper(this._model) as IMongoDBMapper)! : new MongoDBVectorStoreRecordMapper(this._model); @@ -163,6 +165,10 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) var stringKey = this.GetStringKey(key); var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var record = await this.RunOperationAsync(OperationName, async () => { @@ -196,6 +202,11 @@ public async IAsyncEnumerable GetAsync( const string OperationName = "Find"; + if (options?.IncludeVectors == true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); using var cursor = await this @@ -220,30 +231,61 @@ public async IAsyncEnumerable GetAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); const string OperationName = "ReplaceOne"; + Embedding?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await floatTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var doubleTask)) + { + generatedEmbeddings ??= new Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await doubleTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBMongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, generatedEmbeddings)); var key = storageModel[MongoDBConstants.MongoReservedKeyPropertyName].AsString; - return this.RunOperationAsync(OperationName, async () => + return await this.RunOperationAsync(OperationName, async () => { await this._mongoCollection .ReplaceOneAsync(this.GetFilterById(key), storageModel, replaceOptions, cancellationToken) .ConfigureAwait(false); return (TKey)(object)key; - }); + }).ConfigureAwait(false); } /// @@ -256,12 +298,78 @@ public async Task> UpsertAsync(IEnumerable records, return results.Where(r => r is not null).ToList(); } + #region Search + /// - public async IAsyncEnumerable> VectorizedSearchAsync( + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + MEVD.VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + MongoDBConstants.SupportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + MEVD.VectorSearchOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -277,11 +385,13 @@ public async IAsyncEnumerable> VectorizedSearchAsync typeof(ReadOnlyMemory).FullName])}") }; - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // VectorSearchFilter is obsolete - var filter = searchOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.BuildFilter(legacyFilter, this._model), @@ -292,7 +402,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync // Constructing a query to fetch "skip + top" total items // to perform skip logic locally, since skip option is not part of API. - var itemsAmount = searchOptions.Skip + top; + var itemsAmount = options.Skip + top; var vectorPropertyIndexKind = AzureCosmosDBMongoDBVectorStoreCollectionSearchMapping.GetVectorPropertyIndexKind(vectorProperty.IndexKind); @@ -324,12 +434,20 @@ public async IAsyncEnumerable> VectorizedSearchAsync .AggregateAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - await foreach (var result in this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions, cancellationToken).ConfigureAwait(false)) + await foreach (var result in this.EnumerateAndMapSearchResultsAsync(cursor, options, cancellationToken).ConfigureAwait(false)) { yield return result; } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs index 084bbfd0a72b..0eee8b280525 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using MongoDB.Bson; @@ -27,6 +28,11 @@ public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// This integer is the number of clusters that the inverted file (IVF) index uses to group the vector data. Default is 1. /// We recommend that numLists is set to documentCount/1000 for up to 1 million documents and to sqrt(documentCount) diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs index 8bf1f9a6c5c8..0d9fec10f740 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLDynamicDataModelMapper.cs @@ -7,6 +7,7 @@ using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using MEAI = Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -22,43 +23,58 @@ internal sealed class AzureCosmosDBNoSQLDynamicDataModelMapper(VectorStoreRecord Converters = { new AzureCosmosDBNoSQLReadOnlyMemoryByteConverter() } }; - public JsonObject MapFromDataToStorageModel(Dictionary dataModel) + public JsonObject MapFromDataToStorageModel(Dictionary dataModel, MEAI.Embedding?[]? generatedEmbeddings) { Verify.NotNull(dataModel); var jsonObject = new JsonObject(); - // Loop through all known properties and map each from the data model to the storage model. - foreach (var property in model.Properties) - { - switch (property) + jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = !dataModel.TryGetValue(model.KeyProperty.ModelName, out var keyValue) + ? throw new InvalidOperationException($"Missing value for key property '{model.KeyProperty.ModelName}") + : keyValue switch { - case VectorStoreRecordKeyPropertyModel keyProperty: - jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = (string)(dataModel[keyProperty.ModelName] - ?? throw new InvalidOperationException($"Key property '{keyProperty.ModelName}' is null.")); + string s => s, + null => throw new InvalidOperationException($"Key property '{model.KeyProperty.ModelName}' is null."), + _ => throw new InvalidCastException($"Key property '{model.KeyProperty.ModelName}' must be a string.") + }; - break; + foreach (var dataProperty in model.DataProperties) + { + if (dataModel.TryGetValue(dataProperty.StorageName, out var dataValue)) + { + jsonObject[dataProperty.StorageName] = dataValue is not null ? + JsonSerializer.SerializeToNode(dataValue, dataProperty.Type, jsonSerializerOptions) : + null; + } + } - case VectorStoreRecordDataPropertyModel dataProperty: - if (dataModel.TryGetValue(dataProperty.StorageName, out var dataValue)) - { - jsonObject[dataProperty.StorageName] = dataValue is not null ? - JsonSerializer.SerializeToNode(dataValue, property.Type, jsonSerializerOptions) : - null; - } - break; + for (var i = 0; i < model.VectorProperties.Count; i++) + { + var property = model.VectorProperties[i]; - case VectorStoreRecordVectorPropertyModel vectorProperty: - if (dataModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) + if (generatedEmbeddings?[i] is null) + { + // No generated embedding, read the vector directly from the data model + if (dataModel.TryGetValue(property.ModelName, out var sourceValue)) + { + jsonObject.Add(property.StorageName, sourceValue is null + ? null + : JsonSerializer.SerializeToNode(sourceValue, property.Type, s_vectorJsonSerializerOptions)); + } + } + else + { + Debug.Assert(property.EmbeddingGenerator is not null); + var embedding = generatedEmbeddings[i]; + jsonObject.Add( + property.StorageName, + embedding switch { - jsonObject[vectorProperty.StorageName] = vectorValue is not null ? - JsonSerializer.SerializeToNode(vectorValue, property.Type, s_vectorJsonSerializerOptions) : - null; - } - break; - - default: - throw new UnreachableException(); + MEAI.Embedding e => JsonSerializer.SerializeToNode(e.Vector, s_vectorJsonSerializerOptions), + MEAI.Embedding e => JsonSerializer.SerializeToNode(e.Vector, s_vectorJsonSerializerOptions), + MEAI.Embedding e => JsonSerializer.SerializeToNode(e.Vector, s_vectorJsonSerializerOptions), + _ => throw new UnreachableException() + }); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs index f3fa50210626..f2b914078f14 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs @@ -57,7 +57,7 @@ public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStore( } /// - /// Register an Azure CosmosDB NoSQL and with the specified service ID + /// Register an Azure CosmosDB NoSQL and with the specified service ID /// and where the Azure CosmosDB NoSQL is retrieved from the dependency injection container. /// /// The type of the record. @@ -78,7 +78,7 @@ public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStoreRecordCollection - /// Register an Azure CosmosDB NoSQL and with the specified service ID + /// Register an Azure CosmosDB NoSQL and with the specified service ID /// and where the Azure CosmosDB NoSQL is constructed using the provided and . /// /// The type of the record. diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs index 2410e7f5826e..de7910ba078d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs @@ -2,6 +2,7 @@ using System.Text.Json; using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -34,7 +35,10 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStore( (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new AzureCosmosDBNoSQLVectorStore(database, options); }); @@ -71,7 +75,10 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStore( }); var database = cosmosClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new AzureCosmosDBNoSQLVectorStore(database, options); }); @@ -80,7 +87,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStore( } /// - /// Register an Azure CosmosDB NoSQL and with the specified service ID + /// Register an Azure CosmosDB NoSQL and with the specified service ID /// and where the Azure CosmosDB NoSQL is retrieved from the dependency injection container. /// /// The type of the record. @@ -101,9 +108,12 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -112,7 +122,7 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio } /// - /// Register an Azure CosmosDB NoSQL and with the specified service ID + /// Register an Azure CosmosDB NoSQL and with the specified service ID /// and where the Azure CosmosDB NoSQL is constructed using the provided and . /// /// The type of the record. @@ -143,9 +153,12 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio }); var database = cosmosClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new AzureCosmosDBNoSQLVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -154,14 +167,14 @@ public static IServiceCollection AddAzureCosmosDBNoSQLVectorStoreRecordCollectio } /// - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs index e6aeacc85f24..d74c4df364fc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -70,7 +70,8 @@ public IVectorStoreRecordCollection GetCollection( new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition, - JsonSerializerOptions = this._options.JsonSerializerOptions + JsonSerializerOptions = this._options.JsonSerializerOptions, + EmbeddingGenerator = this._options.EmbeddingGenerator }) as IVectorStoreRecordCollection; return recordCollection!; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs index 2439b1362367..6120a3c26630 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs @@ -2,6 +2,7 @@ using System; using System.Text.Json; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -20,4 +21,9 @@ public sealed class AzureCosmosDBNoSQLVectorStoreOptions /// Gets or sets the JSON serializer options to use when converting between the data model and the Azure CosmosDB NoSQL record. /// public JsonSerializerOptions? JsonSerializerOptions { get; init; } + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs index 993c3c3debb0..9c566ee111cf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -12,10 +12,13 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction; using IndexKind = Microsoft.Extensions.VectorData.IndexKind; +using MEAI = Microsoft.Extensions.AI; using SKDistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -91,13 +94,13 @@ public AzureCosmosDBNoSQLVectorStoreRecordCollection( this.Name = name; this._options = options ?? new(); var jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; - this._model = new AzureCosmosDBNoSqlVectorStoreModelBuilder() - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, jsonSerializerOptions); + this._model = new AzureCosmosDBNoSQLVectorStoreModelBuilder() + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, jsonSerializerOptions); // Assign mapper. this._mapper = typeof(TRecord) == typeof(Dictionary) ? (new AzureCosmosDBNoSQLDynamicDataModelMapper(this._model, jsonSerializerOptions) as ICosmosNoSQLMapper)! - : new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model.KeyProperty, this._options.JsonSerializerOptions); + : new AzureCosmosDBNoSQLVectorStoreRecordMapper(this._model, this._options.JsonSerializerOptions); // Setup partition key property if (this._options.PartitionKeyPropertyName is not null) @@ -236,6 +239,10 @@ public async IAsyncEnumerable GetAsync( const string OperationName = "GetItemQueryIterator"; var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSelectQuery( this._model, @@ -267,12 +274,48 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati const string OperationName = "UpsertItem"; + MEAI.Embedding?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new MEAI.Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await floatTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var byteTask)) + { + generatedEmbeddings ??= new MEAI.Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await byteTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var sbyteTask)) + { + generatedEmbeddings ??= new MEAI.Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await sbyteTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of types '{typeof(Embedding).Name}', '{typeof(Embedding).Name}' or '{typeof(Embedding).Name}' for the given input type."); + } + } + var jsonObject = VectorStoreErrorHandler.RunModelConversion( AzureCosmosDBNoSQLConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, generatedEmbeddings)); var keyValue = jsonObject.TryGetPropertyValue(this._model.KeyProperty.StorageName!, out var jsonKey) ? jsonKey?.ToString() : null; var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyProperty.StorageName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; @@ -312,12 +355,90 @@ public async Task> UpsertAsync(IEnumerable records, return keys.Where(k => k is not null).ToList(); } + #region Search + + /// + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + AzureCosmosDBNoSQLVectorStoreModelBuilder.s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + /// - public IAsyncEnumerable> VectorizedSearchAsync( + public IAsyncEnumerable> SearchEmbeddingAsync( TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull { const string OperationName = "VectorizedSearch"; const string ScorePropertyName = "SimilarityScore"; @@ -325,8 +446,10 @@ public IAsyncEnumerable> VectorizedSearchAsync p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // Type or member is obsolete var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -336,11 +459,11 @@ public IAsyncEnumerable> VectorizedSearchAsync(queryDefinition, cancellationToken); @@ -348,10 +471,18 @@ public IAsyncEnumerable> VectorizedSearchAsync + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -394,9 +525,9 @@ public IAsyncEnumerable> HybridSearchAsync( this.VerifyVectorType(vector); Verify.NotLessThan(top, 1); - var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); - var textProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); + options ??= s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = options.VectorProperty }); + var textProperty = this._model.GetFullTextDataPropertyOrSingle(options.AdditionalProperty); #pragma warning disable CS0618 // Type or member is obsolete var queryDefinition = AzureCosmosDBNoSQLVectorStoreCollectionQueryBuilder.BuildSearchQuery( @@ -406,11 +537,11 @@ public IAsyncEnumerable> HybridSearchAsync( vectorProperty.StorageName, textProperty.StorageName, ScorePropertyName, - searchOptions.OldFilter, - searchOptions.Filter, + options.OldFilter, + options.Filter, top, - searchOptions.Skip, - searchOptions.IncludeVectors); + options.Skip, + options.IncludeVectors); #pragma warning restore CS0618 // Type or member is obsolete var searchResults = this.GetItemsAsync(queryDefinition, cancellationToken); @@ -418,7 +549,7 @@ public IAsyncEnumerable> HybridSearchAsync( searchResults, ScorePropertyName, OperationName, - searchOptions.IncludeVectors, + options.IncludeVectors, cancellationToken); } @@ -443,11 +574,11 @@ private void VerifyVectorType(TVector? vector) var vectorType = vector.GetType(); - if (!AzureCosmosDBNoSqlVectorStoreModelBuilder.s_supportedVectorTypes.Contains(vectorType)) + if (!AzureCosmosDBNoSQLVectorStoreModelBuilder.s_supportedVectorTypes.Contains(vectorType)) { throw new NotSupportedException( $"The provided vector type {vectorType.FullName} is not supported by the Azure CosmosDB NoSQL connector. " + - $"Supported types are: {string.Join(", ", AzureCosmosDBNoSqlVectorStoreModelBuilder.s_supportedVectorTypes.Select(l => l.FullName))}"); + $"Supported types are: {string.Join(", ", AzureCosmosDBNoSQLVectorStoreModelBuilder.s_supportedVectorTypes.Select(l => l.FullName))}"); } } @@ -476,7 +607,7 @@ private async Task RunOperationAsync(string operationName, Func> o private ContainerProperties GetContainerProperties() { // Process Vector properties. - var embeddings = new Collection(); + var embeddings = new Collection(); var vectorIndexPaths = new Collection(); var indexingPolicy = new IndexingPolicy @@ -497,9 +628,9 @@ private ContainerProperties GetContainerProperties() { var path = $"/{property.StorageName}"; - var embedding = new Embedding + var embedding = new Azure.Cosmos.Embedding { - DataType = GetDataType(property.Type, property.StorageName), + DataType = GetDataType(property.EmbeddingType, property.StorageName), Dimensions = (int)property.Dimensions, DistanceFunction = GetDistanceFunction(property.DistanceFunction, property.StorageName), Path = path diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs index bd0371b3c835..55c5ddf9aad6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs @@ -4,6 +4,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -58,4 +59,9 @@ public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions. /// public bool Automatic { get; init; } = true; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs index 200853d43fea..4a2bede9820e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs @@ -1,9 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using MEAI = Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -11,12 +14,12 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; /// Class for mapping between a json node stored in Azure CosmosDB NoSQL and the consumer data model. /// /// The consumer data model to map to or from. -internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper(VectorStoreRecordKeyPropertyModel keyProperty, JsonSerializerOptions? jsonSerializerOptions) +internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper(VectorStoreRecordModel model, JsonSerializerOptions? jsonSerializerOptions) : ICosmosNoSQLMapper { - private readonly VectorStoreRecordKeyPropertyModel _keyProperty = keyProperty; + private readonly VectorStoreRecordKeyPropertyModel _keyProperty = model.KeyProperty; - public JsonObject MapFromDataToStorageModel(TRecord dataModel) + public JsonObject MapFromDataToStorageModel(TRecord dataModel, MEAI.Embedding?[]? generatedEmbeddings) { var jsonObject = JsonSerializer.SerializeToNode(dataModel, jsonSerializerOptions)!.AsObject(); @@ -25,6 +28,28 @@ public JsonObject MapFromDataToStorageModel(TRecord dataModel) // account e.g. naming policies. TemporaryStorageName gets populated in the model builder - containing that name - once VectorStoreModelBuildingOptions.ReservedKeyPropertyName is set RenameJsonProperty(jsonObject, this._keyProperty.TemporaryStorageName!, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName); + // Go over the vector properties; those which have an embedding generator configured on them will have embedding generators, overwrite + // the value in the JSON object with that. + if (generatedEmbeddings is not null) + { + for (var i = 0; i < model.VectorProperties.Count; i++) + { + if (generatedEmbeddings[i] is not null) + { + var property = model.VectorProperties[i]; + Debug.Assert(property.EmbeddingGenerator is not null); + var embedding = generatedEmbeddings[i]; + jsonObject[property.StorageName] = embedding switch + { + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + _ => throw new UnreachableException() + }; + } + } + } + return jsonObject; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs index 6bff6d46c5cf..66b5a6fd6970 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSqlVectorStoreModelBuilder.cs @@ -6,24 +6,8 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; -internal class AzureCosmosDBNoSqlVectorStoreModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) +internal class AzureCosmosDBNoSQLVectorStoreModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) { - private static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - UsesExternalSerializer = true, - - // TODO: Cosmos supports other key types (int, Guid...) - SupportedKeyPropertyTypes = [typeof(string)], - SupportedDataPropertyTypes = s_supportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, - SupportedVectorPropertyTypes = s_supportedVectorTypes, - - ReservedKeyStorageName = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, - }; - private static readonly HashSet s_supportedDataTypes = [ typeof(bool), @@ -40,11 +24,29 @@ internal class AzureCosmosDBNoSqlVectorStoreModelBuilder() : VectorStoreRecordJs // Float32 typeof(ReadOnlyMemory), typeof(ReadOnlyMemory?), + // Uint8 typeof(ReadOnlyMemory), typeof(ReadOnlyMemory?), + // Int8 typeof(ReadOnlyMemory), typeof(ReadOnlyMemory?), ]; + + private static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + UsesExternalSerializer = true, + + // TODO: Cosmos supports other key types (int, Guid...) + SupportedKeyPropertyTypes = [typeof(string)], + SupportedDataPropertyTypes = s_supportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, + SupportedVectorPropertyTypes = s_supportedVectorTypes, + + ReservedKeyStorageName = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, + }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs index 3e8c0d8ce7a8..e286b304cf11 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/ICosmosNoSQLMapper.cs @@ -2,6 +2,7 @@ using System.Text.Json.Nodes; using Microsoft.Extensions.VectorData; +using MEAI = Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; @@ -10,15 +11,10 @@ internal interface ICosmosNoSQLMapper /// /// Maps from the consumer record data model to the storage model. /// - /// The consumer record data model record to map. - /// The mapped result. - JsonObject MapFromDataToStorageModel(TRecord dataModel); + JsonObject MapFromDataToStorageModel(TRecord dataModel, MEAI.Embedding?[]? generatedEmbeddings); /// /// Maps from the storage model to the consumer record data model. /// - /// The storage data model record to map. - /// Options to control the mapping behavior. - /// The mapped result. TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options); } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs index a719b1bb764e..ade5a79b5861 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryKernelBuilderExtensions.cs @@ -20,12 +20,12 @@ public static class InMemoryKernelBuilderExtensions /// The kernel builder. public static IKernelBuilder AddInMemoryVectorStore(this IKernelBuilder builder, string? serviceId = default) { - builder.Services.AddInMemoryVectorStore(serviceId); + builder.Services.AddInMemoryVectorStore(serviceId: serviceId); return builder; } /// - /// Register an InMemory and with the specified service ID. + /// Register an InMemory and with the specified service ID. /// /// The type of the key. /// The type of the record. diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryModelBuilder.cs new file mode 100644 index 000000000000..096ba01d467a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryModelBuilder.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.InMemory; + +internal class InMemoryModelBuilder() : VectorStoreRecordModelBuilder(ValidationOptions) +{ + internal static readonly VectorStoreRecordModelBuildingOptions ValidationOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + // Disable property type validation + SupportedKeyPropertyTypes = null, + SupportedDataPropertyTypes = null, + SupportedEnumerableDataPropertyElementTypes = null, + SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory), typeof(ReadOnlyMemory?)] + }; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs index f7d0aadf111b..56f1c8769bfe 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryServiceCollectionExtensions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; @@ -17,17 +18,30 @@ public static class InMemoryServiceCollectionExtensions /// Register an InMemory with the specified service ID. /// /// The to register the on. + /// Optional options to further configure the . /// An optional service id to use as the service key. /// The service collection. - public static IServiceCollection AddInMemoryVectorStore(this IServiceCollection services, string? serviceId = default) + public static IServiceCollection AddInMemoryVectorStore(this IServiceCollection services, InMemoryVectorStoreOptions? options = default, string? serviceId = default) { + services.AddKeyedTransient( + serviceId, + (sp, obj) => + { + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + + return new InMemoryVectorStore(options); + }); + services.AddKeyedSingleton(serviceId); services.AddKeyedSingleton(serviceId, (sp, obj) => sp.GetRequiredKeyedService(serviceId)); return services; } /// - /// Register an InMemory and with the specified service ID. + /// Register an InMemory and with the specified service ID. /// /// The type of the key. /// The type of the record. @@ -48,11 +62,14 @@ public static IServiceCollection AddInMemoryVectorStoreRecordCollection { - var selectedOptions = options ?? sp.GetService>(); - return (new InMemoryVectorStoreRecordCollection(collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + return (new InMemoryVectorStoreRecordCollection(collectionName, options) as IVectorStoreRecordCollection)!; }); - services.AddKeyedSingleton>( + services.AddKeyedSingleton>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorRecordWrapper.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorRecordWrapper.cs new file mode 100644 index 000000000000..856b1c1a2d9b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorRecordWrapper.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Connectors.InMemory; + +internal readonly struct InMemoryVectorRecordWrapper(TRecord record) +{ + public TRecord Record { get; } = record; + public Dictionary> EmbeddingGeneratedVectors { get; } = new(); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs index f5b495341775..85b623551980 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStore.cs @@ -15,6 +15,8 @@ namespace Microsoft.SemanticKernel.Connectors.InMemory; /// public sealed class InMemoryVectorStore : IVectorStore { + private readonly InMemoryVectorStoreOptions _options; + /// Metadata about vector store. private readonly VectorStoreMetadata _metadata; @@ -27,8 +29,10 @@ public sealed class InMemoryVectorStore : IVectorStore /// /// Initializes a new instance of the class. /// - public InMemoryVectorStore() + /// Optional configuration options for this class + public InMemoryVectorStore(InMemoryVectorStoreOptions? options = default) { + this._options = options ?? new InMemoryVectorStoreOptions(); this._internalCollections = new(); this._metadata = new() @@ -37,20 +41,6 @@ public InMemoryVectorStore() }; } - /// - /// Initializes a new instance of the class. - /// - /// Allows passing in the dictionary used for storage, for testing purposes. - internal InMemoryVectorStore(ConcurrentDictionary> internalCollection) - { - this._internalCollections = internalCollection; - - this._metadata = new() - { - VectorStoreSystemName = InMemoryConstants.VectorStoreSystemName - }; - } - /// public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) where TKey : notnull @@ -65,7 +55,11 @@ public IVectorStoreRecordCollection GetCollection( this._internalCollections, this._internalCollectionTypes, name, - new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection; return collection!; } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs index 6b33671cef9f..33e0afda04b7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreCollectionSearchMapping.cs @@ -92,15 +92,15 @@ public static float ConvertScore(float score, string? distanceFunction) /// /// Filter the provided records using the provided filter definition. /// - /// The filter definition to filter the with. - /// The records to filter. + /// The filter definition to filter the with. + /// The records to filter. /// The filtered records. /// Thrown when an unsupported filter clause is encountered. - public static IEnumerable FilterRecords(VectorSearchFilter filter, IEnumerable records) + public static IEnumerable> FilterRecords(VectorSearchFilter filter, IEnumerable> recordWrappers) { - return records.Where(record => + return recordWrappers.Where(wrapper => { - if (record is null) + if (wrapper.Record is null) { return false; } @@ -114,7 +114,7 @@ public static IEnumerable FilterRecords(VectorSearchFilter fil { if (clause is EqualToFilterClause equalToFilter) { - result = result && CheckEqualTo(record, equalToFilter); + result = result && CheckEqualTo(wrapper.Record, equalToFilter); if (result == false) { @@ -123,7 +123,7 @@ public static IEnumerable FilterRecords(VectorSearchFilter fil } else if (clause is AnyTagEqualToFilterClause anyTagEqualToFilter) { - result = result && CheckAnyTagEqualTo(record, anyTagEqualToFilter); + result = result && CheckAnyTagEqualTo(wrapper.Record, anyTagEqualToFilter); if (result == false) { diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreOptions.cs new file mode 100644 index 000000000000..e3fb530e1821 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreOptions.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; + +namespace Microsoft.SemanticKernel.Connectors.InMemory; + +/// +/// Options when creating a . +/// +public sealed class InMemoryVectorStoreOptions +{ + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 810f9932245b..2d1d6685860e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -3,13 +3,16 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; namespace Microsoft.SemanticKernel.Connectors.InMemory; @@ -51,19 +54,6 @@ public sealed class InMemoryVectorStoreRecordCollection : IVector /// An function to look up keys from the records. private readonly InMemoryVectorStoreKeyResolver _keyResolver; - private static readonly VectorStoreRecordModelBuildingOptions s_validationOptions = new() - { - RequiresAtLeastOneVector = false, - SupportsMultipleKeys = false, - SupportsMultipleVectors = true, - - // Disable property type validation - SupportedKeyPropertyTypes = null, - SupportedDataPropertyTypes = null, - SupportedEnumerableDataPropertyElementTypes = null, - SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory)] - }; - /// /// Initializes a new instance of the class. /// @@ -80,8 +70,8 @@ public InMemoryVectorStoreRecordCollection(string name, InMemoryVectorStoreRecor this._internalCollectionTypes = new(); this._options = options ?? new InMemoryVectorStoreRecordCollectionOptions(); - this._model = new VectorStoreRecordModelBuilder(s_validationOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + this._model = new InMemoryModelBuilder() + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); // Assign resolvers. // TODO: Make generic to avoid boxing @@ -180,11 +170,16 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) /// public Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { + if (options?.IncludeVectors == true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + var collectionDictionary = this.GetCollectionDictionary(); if (collectionDictionary.TryGetValue(key, out var record)) { - return Task.FromResult((TRecord?)record); + return Task.FromResult(((InMemoryVectorRecordWrapper)record).Record); } return Task.FromResult(default); @@ -195,6 +190,11 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor { Verify.NotNull(keys); + if (options?.IncludeVectors == true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + foreach (var key in keys) { var record = await this.GetAsync(key, options, cancellationToken).ConfigureAwait(false); @@ -231,31 +231,157 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) - => Task.FromResult(this.Upsert(record)); + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + var keys = await this.UpsertAsync([record], cancellationToken).ConfigureAwait(false); + + return keys.Single(); + } /// - public Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) { Verify.NotNull(records); - return Task.FromResult>(records.Select(this.Upsert).ToList()); + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = (IReadOnlyList>)await floatTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + + var keys = new List(); + var collectionDictionary = this.GetCollectionDictionary(); + + var recordIndex = 0; + foreach (var record in records) + { + var key = (TKey)this._keyResolver(record)!; + var wrappedRecord = new InMemoryVectorRecordWrapper(record); + + if (generatedEmbeddings is not null) + { + for (var i = 0; i < this._model.VectorProperties.Count; i++) + { + if (generatedEmbeddings![i] is IReadOnlyList propertyEmbeddings) + { + var property = this._model.VectorProperties[i]; + + wrappedRecord.EmbeddingGeneratedVectors[property.ModelName] = propertyEmbeddings[recordIndex] switch + { + Embedding e => e.Vector, + _ => throw new UnreachableException() + }; + } + } + } + + collectionDictionary.AddOrUpdate(key!, wrappedRecord, (key, currentValue) => wrappedRecord); + + keys.Add(key); + + recordIndex++; + } + + return keys; } - private TKey Upsert(TRecord record) + #region Search + + /// + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull { - Verify.NotNull(record); + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); - var collectionDictionary = this.GetCollectionDictionary(); + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); - var key = (TKey)this._keyResolver(record)!; - collectionDictionary.AddOrUpdate(key!, record, (key, currentValue) => record); + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } - return key!; + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + InMemoryModelBuilder.ValidationOptions.SupportedVectorPropertyTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } } /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -265,34 +391,45 @@ public IAsyncEnumerable> VectorizedSearchAsync p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // VectorSearchFilter is obsolete // Filter records using the provided filter before doing the vector comparison. - var allValues = this.GetCollectionDictionary().Values.Cast(); - var filteredRecords = internalOptions switch + var allValues = this.GetCollectionDictionary().Values.Cast>(); + var filteredRecords = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => InMemoryVectorStoreCollectionSearchMapping.FilterRecords(legacyFilter, allValues), - { Filter: Expression> newFilter } => allValues.AsQueryable().Where(newFilter), + { Filter: Expression> newFilter } => allValues.AsQueryable().Where(this.ConvertFilter(newFilter)), _ => allValues }; #pragma warning restore CS0618 // VectorSearchFilter is obsolete // Compare each vector in the filtered results with the provided vector. - var results = filteredRecords.Select(record => + var results = filteredRecords.Select, (TRecord record, float score)?>(wrapper => { - var vectorObject = this._vectorResolver(vectorProperty.ModelName!, record); - if (vectorObject is not ReadOnlyMemory dbVector) + ReadOnlyMemory vector; + + if (vectorProperty.EmbeddingGenerator is null) + { + var vectorObject = this._vectorResolver(vectorProperty.ModelName!, wrapper.Record); + if (vectorObject is not ReadOnlyMemory dbVector) + { + return null; + } + vector = dbVector; + } + else { - return null; + vector = wrapper.EmbeddingGeneratedVectors[vectorProperty.ModelName]; } - var score = InMemoryVectorStoreCollectionSearchMapping.CompareVectors(floatVector.Span, dbVector.Span, vectorProperty.DistanceFunction); + var score = InMemoryVectorStoreCollectionSearchMapping.CompareVectors(floatVector.Span, vector.Span, vectorProperty.DistanceFunction); var convertedscore = InMemoryVectorStoreCollectionSearchMapping.ConvertScore(score, vectorProperty.DistanceFunction); - return (record, convertedscore); + return (wrapper.Record, convertedscore); }); // Get the non-null results since any record with a null vector results in a null result. @@ -302,12 +439,20 @@ public IAsyncEnumerable> VectorizedSearchAsync x.score) : nonNullResults.OrderBy(x => x.score); - var resultsPage = sortedScoredResults.Skip(internalOptions.Skip).Take(top); + var resultsPage = sortedScoredResults.Skip(options.Skip).Take(top); // Build the response. return resultsPage.Select(x => new VectorSearchResult((TRecord)x.record, x.score)).ToAsyncEnumerable(); } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public object? GetService(Type serviceType, object? serviceKey = null) { @@ -330,6 +475,11 @@ public IAsyncEnumerable GetAsync(Expression> filter options ??= new(); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + var records = this.GetCollectionDictionary().Values.Cast() .AsQueryable() .Where(filter); @@ -371,4 +521,24 @@ internal ConcurrentDictionary GetCollectionDictionary() return collectionDictionary; } + + /// + /// The user provides a filter expression accepting a Record, but we internally store it wrapped in an InMemoryVectorRecordWrapper. + /// This method converts a filter expression accepting a Record to one accepting an InMemoryVectorRecordWrapper. + /// + private Expression, bool>> ConvertFilter(Expression> recordFilter) + { + var wrapperParameter = Expression.Parameter(typeof(InMemoryVectorRecordWrapper), "w"); + var replacement = Expression.Property(wrapperParameter, nameof(InMemoryVectorRecordWrapper.Record)); + + return Expression.Lambda, bool>>( + new ParameterReplacer(recordFilter.Parameters.Single(), replacement).Visit(recordFilter.Body), + wrapperParameter); + } + + private sealed class ParameterReplacer(ParameterExpression originalRecordParameter, Expression replacementExpression) : ExpressionVisitor + { + protected override Expression VisitParameter(ParameterExpression node) + => node == originalRecordParameter ? replacementExpression : base.VisitParameter(node); + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs index 7a98830df1bf..b93a3caf66de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.InMemory; @@ -23,6 +24,11 @@ public sealed class InMemoryVectorStoreRecordCollectionOptions /// public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional function that can be used to look up vectors from a record. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs index 784dcf8220df..43c28c93974f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.MongoDB; @@ -33,7 +34,10 @@ public static IServiceCollection AddMongoDBVectorStore( (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new MongoDBVectorStore(database, options); }); @@ -70,7 +74,10 @@ public static IServiceCollection AddMongoDBVectorStore( var mongoClient = new MongoClient(settings); var database = mongoClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new MongoDBVectorStore(database, options); }); @@ -79,7 +86,7 @@ public static IServiceCollection AddMongoDBVectorStore( } /// - /// Register a MongoDB and with the specified service ID + /// Register a MongoDB and with the specified service ID /// and where the MongoDB is retrieved from the dependency injection container. /// /// The type of the record. @@ -100,9 +107,12 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new MongoDBVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -111,7 +121,7 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( } /// - /// Register a MongoDB and with the specified service ID + /// Register a MongoDB and with the specified service ID /// and where the MongoDB is constructed using the provided and . /// /// The type of the record. @@ -141,9 +151,12 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( var mongoClient = new MongoClient(settings); var database = mongoClient.GetDatabase(databaseName); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new MongoDBVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new MongoDBVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -152,14 +165,14 @@ public static IServiceCollection AddMongoDBVectorStoreRecordCollection( } /// - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs index 5c5dd3e2072a..56429596cd03 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStore.cs @@ -64,7 +64,11 @@ public IVectorStoreRecordCollection GetCollection( var recordCollection = new MongoDBVectorStoreRecordCollection( this._mongoDatabase, name, - new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection; return recordCollection!; } diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs index 52b58b6f8994..f53bc2fb6b92 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.MongoDB; @@ -9,6 +10,11 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; /// public sealed class MongoDBVectorStoreOptions { + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs index 7ed0a3cc0b45..062c22d936e8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollection.cs @@ -8,8 +8,10 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using MongoDB.Bson; using MongoDB.Driver; using MEVD = Microsoft.Extensions.VectorData; @@ -85,7 +87,7 @@ public MongoDBVectorStoreRecordCollection( this._mongoCollection = mongoDatabase.GetCollection(name); this.Name = name; this._options = options ?? new MongoDBVectorStoreRecordCollectionOptions(); - this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + this._model = new MongoDBModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); this._mapper = typeof(TRecord) == typeof(Dictionary) ? (new MongoDBDynamicDataModelMapper(this._model) as IMongoDBMapper)! : new MongoDBVectorStoreRecordMapper(this._model); @@ -169,6 +171,10 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) var stringKey = this.GetStringKey(key); var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var record = await this.RunOperationAsync(OperationName, async () => { @@ -202,6 +208,11 @@ public async IAsyncEnumerable GetAsync( const string OperationName = "Find"; + if (options?.IncludeVectors == true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + var stringKeys = keys is IEnumerable k ? k : keys.Cast(); using var cursor = await this @@ -226,30 +237,61 @@ public async IAsyncEnumerable GetAsync( } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { Verify.NotNull(record); const string OperationName = "ReplaceOne"; + Embedding?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await floatTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var doubleTask)) + { + generatedEmbeddings ??= new Embedding?[vectorPropertyCount]; + generatedEmbeddings[i] = await doubleTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + var replaceOptions = new ReplaceOptions { IsUpsert = true }; var storageModel = VectorStoreErrorHandler.RunModelConversion( MongoDBConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, generatedEmbeddings)); var key = storageModel[MongoDBConstants.MongoReservedKeyPropertyName].AsString; - return this.RunOperationAsync(OperationName, async () => + return await this.RunOperationAsync(OperationName, async () => { await this._mongoCollection .ReplaceOneAsync(this.GetFilterById(key), storageModel, replaceOptions, cancellationToken) .ConfigureAwait(false); return (TKey)(object)key; - }); + }).ConfigureAwait(false); } /// @@ -262,21 +304,89 @@ public async Task> UpsertAsync(IEnumerable records, return results.Where(r => r is not null).ToList(); } + #region Search + /// - public async IAsyncEnumerable> VectorizedSearchAsync( + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + MEVD.VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + MongoDBConstants.SupportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( TVector vector, int top, MEVD.VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + MEVD.VectorSearchOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { Array vectorArray = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // VectorSearchFilter is obsolete - var filter = searchOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._model), @@ -287,7 +397,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync // Constructing a query to fetch "skip + top" total items // to perform skip logic locally, since skip option is not part of API. - var itemsAmount = searchOptions.Skip + top; + var itemsAmount = options.Skip + top; var numCandidates = this._options.NumCandidates ?? itemsAmount * MongoDBConstants.DefaultNumCandidatesRatio; @@ -315,7 +425,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync .AggregateAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken); + return this.EnumerateAndMapSearchResultsAsync(cursor, options.Skip, options.IncludeVectors, cancellationToken); }, cancellationToken).ConfigureAwait(false); @@ -325,6 +435,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, MEVD.VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -389,12 +507,12 @@ public async IAsyncEnumerable> HybridSearchAsync(new() { VectorProperty = searchOptions.VectorProperty }); - var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); + options ??= s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = options.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(options.AdditionalProperty); #pragma warning disable CS0618 // VectorSearchFilter is obsolete - var filter = searchOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => MongoDBVectorStoreCollectionSearchMapping.BuildLegacyFilter(legacyFilter, this._model), @@ -405,7 +523,7 @@ public async IAsyncEnumerable> HybridSearchAsync> HybridSearchAsync(pipeline, cancellationToken: cancellationToken) .ConfigureAwait(false); - return this.EnumerateAndMapSearchResultsAsync(cursor, searchOptions.Skip, searchOptions.IncludeVectors, cancellationToken); + return this.EnumerateAndMapSearchResultsAsync(cursor, options.Skip, options.IncludeVectors, cancellationToken); }, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs index d4356fc3bc52..f2d9dee36f97 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using MongoDB.Bson; @@ -27,6 +28,11 @@ public sealed class MongoDBVectorStoreRecordCollectionOptions /// public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// Vector index name to use. If null, the default "vector_index" name will be used. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs index 236582017474..694e5327ba9b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs @@ -41,7 +41,7 @@ public static IKernelBuilder AddPineconeVectorStore(this IKernelBuilder builder, } /// - /// Register a Pinecone and with the + /// Register a Pinecone and with the /// specified service ID and where is retrieved from the dependency injection container. /// /// The type of the data model that the collection should contain. @@ -62,7 +62,7 @@ public static IKernelBuilder AddPineconeVectorStoreRecordCollection( } /// - /// Register a Pinecone and with the + /// Register a Pinecone and with the /// provided and the specified service ID. /// /// The type of the data model that the collection should contain. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs index a7c3ad3d9a43..9b0e3a67a3d8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Pinecone; @@ -28,11 +29,12 @@ public static IServiceCollection AddPineconeVectorStore(this IServiceCollection (sp, obj) => { var pineconeClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PineconeVectorStore( - pineconeClient, - selectedOptions); + return new PineconeVectorStore(pineconeClient, options); }); return services; @@ -53,18 +55,19 @@ public static IServiceCollection AddPineconeVectorStore(this IServiceCollection (sp, obj) => { var pineconeClient = new Sdk.PineconeClient(apiKey); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PineconeVectorStore( - pineconeClient, - selectedOptions); + return new PineconeVectorStore(pineconeClient, options); }); return services; } /// - /// Register a Pinecone and with the + /// Register a Pinecone and with the /// specified service ID and where is retrieved from the dependency injection container. /// /// The type of the data model that the collection should contain. @@ -87,12 +90,12 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection (sp, obj) => { var pineconeClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PineconeVectorStoreRecordCollection( - pineconeClient, - collectionName, - selectedOptions); + return new PineconeVectorStoreRecordCollection(pineconeClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -101,7 +104,7 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection } /// - /// Register a Pinecone and with the + /// Register a Pinecone and with the /// provided and the specified service ID. /// /// The type of the data model that the collection should contain. @@ -124,12 +127,12 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection (sp, obj) => { var pineconeClient = new Sdk.PineconeClient(apiKey); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PineconeVectorStoreRecordCollection( - pineconeClient, - collectionName, - selectedOptions); + return new PineconeVectorStoreRecordCollection(pineconeClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -138,14 +141,14 @@ public static IServiceCollection AddPineconeVectorStoreRecordCollection } /// - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs index ab31038cbd10..03763a26601a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStore.cs @@ -61,7 +61,11 @@ public IVectorStoreRecordCollection GetCollection( return (new PineconeVectorStoreRecordCollection( this._pineconeClient, name, - new PineconeVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection)!; + new PineconeVectorStoreRecordCollectionOptions() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection)!; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs index bf0034fac1d9..f27b8a54239f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -9,6 +10,11 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// public sealed class PineconeVectorStoreOptions { + /// + /// Gets or sets the default embedding generator for vector properties in this collection. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 92b150310696..a4aab608c8ec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -8,8 +8,10 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using Pinecone; using Sdk = Pinecone; @@ -62,7 +64,7 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st this.Name = name; this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PineconeVectorStoreRecordFieldMapping.ModelBuildingOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); this._mapper = new PineconeVectorStoreRecordMapper(this._model); this._collectionMetadata = new() @@ -156,6 +158,11 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de /// public async Task GetAsync(TKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) { + if (options?.IncludeVectors is true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + Sdk.FetchRequest request = new() { Namespace = this._options.IndexNamespace, @@ -189,6 +196,11 @@ public async IAsyncEnumerable GetAsync( { Verify.NotNull(keys); + if (options?.IncludeVectors is true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + #pragma warning disable CA1851 // Bogus: Possible multiple enumerations of 'IEnumerable' collection var keysList = keys switch { @@ -278,12 +290,29 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati { Verify.NotNull(record); + // If an embedding generator is defined, invoke it once for all records. + Embedding? generatedEmbedding = null; + + Debug.Assert(this._model.VectorProperties.Count <= 1); + if (this._model.VectorProperties is [{ EmbeddingGenerator: not null } vectorProperty]) + { + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var task)) + { + generatedEmbedding = await task.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + var vector = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, "Upsert", - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, generatedEmbedding)); Sdk.UpsertRequest request = new() { @@ -303,12 +332,39 @@ public async Task> UpsertAsync(IEnumerable records, { Verify.NotNull(records); + // If an embedding generator is defined, invoke it once for all records. + GeneratedEmbeddings>? generatedEmbeddings = null; + + if (this._model.VectorProperties is [{ EmbeddingGenerator: not null } vectorProperty]) + { + var recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var task)) + { + generatedEmbeddings = await task.ConfigureAwait(false); + + Debug.Assert(generatedEmbeddings.Count == recordsList.Count); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + var vectors = VectorStoreErrorHandler.RunModelConversion( PineconeConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, "UpsertBatch", - () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); + () => records.Select((r, i) => this._mapper.MapFromDataToStorageModel(r, generatedEmbeddings?[i])).ToList()); if (vectors.Count == 0) { @@ -328,8 +384,64 @@ await this.RunIndexOperationAsync( return vectors.Select(x => (TKey)(object)x.Id).ToList(); } + #region Search + /// - public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + PineconeVectorStoreRecordFieldMapping.s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -340,7 +452,10 @@ public async IAsyncEnumerable> VectorizedSearchAsync $"Supported types are: {typeof(ReadOnlyMemory).FullName}"); } - options ??= s_defaultVectorSearchOptions; + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // VectorSearchFilter is obsolete var filter = options switch @@ -395,6 +510,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { @@ -408,6 +531,11 @@ public async IAsyncEnumerable GetAsync(Expression> options ??= new(); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + Sdk.QueryRequest request = new() { TopK = (uint)(top + options.Skip), diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs index fbec059a0064..4be187f80b16 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Pinecone; @@ -47,4 +48,9 @@ public sealed class PineconeVectorStoreRecordCollectionOptions /// This option is only used when creating a new Pinecone index. Default value is 'us-east-1'. /// public string ServerlessIndexRegion { get; init; } = "us-east-1"; + + /// + /// Gets or sets the default embedding generator for vector properties in this collection. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs index 3643ce5aa32f..0210ea23d467 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordFieldMapping.cs @@ -14,45 +14,36 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// internal static class PineconeVectorStoreRecordFieldMapping { + /// A set of types that vectors on the provided model may have. + public static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + ]; + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = true, SupportsMultipleKeys = false, SupportsMultipleVectors = false, - SupportedKeyPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedKeyTypes, - SupportedDataPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = [typeof(string)], - SupportedVectorPropertyTypes = PineconeVectorStoreRecordFieldMapping.s_supportedVectorTypes - }; + SupportedKeyPropertyTypes = [typeof(string)], - /// A set of types that a key on the provided model may have. - public static readonly HashSet s_supportedKeyTypes = [typeof(string)]; - - /// A set of types that data properties on the provided model may have. - public static readonly HashSet s_supportedDataTypes = - [ - typeof(bool), - typeof(string), - typeof(int), - typeof(long), - typeof(float), - typeof(double), - typeof(decimal) - ]; + SupportedDataPropertyTypes = + [ + typeof(bool), + typeof(string), + typeof(int), + typeof(long), + typeof(float), + typeof(double), + typeof(decimal) + ], - /// A set of types that enumerable data properties on the provided model may use as their element types. - public static readonly HashSet s_supportedEnumerableDataElementTypes = - [ - typeof(string) - ]; + SupportedEnumerableDataPropertyElementTypes = [typeof(string)], - /// A set of types that vectors on the provided model may have. - public static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - ]; + SupportedVectorPropertyTypes = s_supportedVectorTypes + }; public static object? ConvertFromMetadataValueToNativeType(MetadataValue metadataValue, Type targetType) => metadataValue.Value switch diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index b3e6717ace66..08abd5947201 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using Pinecone; @@ -14,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; internal sealed class PineconeVectorStoreRecordMapper(VectorStoreRecordModel model) { /// - public Vector MapFromDataToStorageModel(TRecord dataModel) + public Vector MapFromDataToStorageModel(TRecord dataModel, Embedding? generatedEmbedding) { var keyObject = model.KeyProperty.GetValueAsObject(dataModel!); if (keyObject is null) @@ -31,11 +32,12 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) } } - var valuesObject = model.VectorProperty!.GetValueAsObject(dataModel!); - if (valuesObject is not ReadOnlyMemory values) + var values = (generatedEmbedding?.Vector ?? model.VectorProperty!.GetValueAsObject(dataModel!)) switch { - throw new VectorStoreRecordMappingException($"Vector property '{model.VectorProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null."); - } + ReadOnlyMemory floats => floats, + null => throw new VectorStoreRecordMappingException($"Vector property '{model.VectorProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null."), + _ => throw new VectorStoreRecordMappingException($"Unsupported vector type '{model.VectorProperty.Type.Name}' for vector property '{model.VectorProperty.ModelName}' on provided record of type '{typeof(TRecord).Name}'.") + }; // TODO: what about sparse values? var result = new Vector diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs index 21e0f003d4eb..1ab2fc44211e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresConstants.cs @@ -59,7 +59,11 @@ internal static class PostgresConstants typeof(Guid), ], - SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory)] + SupportedVectorPropertyTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ] }; /// A of types that vector properties on the provided model may have. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs index 23f8d131bd4b..bae79e63c4c4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Postgres; @@ -28,11 +29,12 @@ public static IServiceCollection AddPostgresVectorStore(this IServiceCollection (sp, obj) => { var dataSource = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PostgresVectorStore( - dataSource, - selectedOptions); + return new PostgresVectorStore(dataSource, options); }); return services; @@ -64,18 +66,19 @@ public static IServiceCollection AddPostgresVectorStore(this IServiceCollection (sp, obj) => { var dataSource = sp.GetRequiredKeyedService(npgsqlServiceId); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new PostgresVectorStore( - dataSource, - selectedOptions); + return new PostgresVectorStore(dataSource, options); }); return services; } /// - /// Register a Postgres and with the specified service ID + /// Register a Postgres and with the specified service ID /// and where the NpgsqlDataSource is retrieved from the dependency injection container. /// /// The type of the key. @@ -98,9 +101,12 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection { var dataSource = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return (new PostgresVectorStoreRecordCollection(dataSource, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + return (new PostgresVectorStoreRecordCollection(dataSource, collectionName, options) as IVectorStoreRecordCollection)!; }); AddVectorizedSearch(services, serviceId); @@ -109,7 +115,7 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection - /// Register a Postgres and with the specified service ID + /// Register a Postgres and with the specified service ID /// and where the NpgsqlDataSource is constructed using the provided parameters. /// /// The type of the key. @@ -146,6 +152,11 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection(npgsqlServiceId); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; + return (new PostgresVectorStoreRecordCollection(dataSource, collectionName, options) as IVectorStoreRecordCollection)!; }); @@ -155,7 +166,7 @@ public static IServiceCollection AddPostgresVectorStoreRecordCollection - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the key. /// The type of the data model that the collection should contain. @@ -165,7 +176,7 @@ private static void AddVectorizedSearch(IServiceCollection servic where TKey : notnull where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs index 5bf1a7950a2b..18b72eb89376 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStore.cs @@ -84,7 +84,12 @@ public IVectorStoreRecordCollection GetCollection( var recordCollection = new PostgresVectorStoreRecordCollection( this._postgresClient, name, - new PostgresVectorStoreRecordCollectionOptions() { Schema = this._options.Schema, VectorStoreRecordDefinition = vectorStoreRecordDefinition } + new PostgresVectorStoreRecordCollectionOptions() + { + Schema = this._options.Schema, + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator, + } ); return recordCollection as IVectorStoreRecordCollection ?? throw new InvalidOperationException("Failed to cast record collection."); diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs index 5add40eed8ee..f96926143126 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -14,6 +15,11 @@ public sealed class PostgresVectorStoreOptions /// public string Schema { get; init; } = "public"; + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs index 3932bebb2ac8..2c3bda1627d6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollection.cs @@ -4,10 +4,13 @@ using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; +using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using Npgsql; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -76,7 +79,7 @@ internal PostgresVectorStoreRecordCollection(IPostgresVectorStoreDbClient client this._options = options ?? new PostgresVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions) - .Build(typeof(TRecord), options?.VectorStoreRecordDefinition); + .Build(typeof(TRecord), options?.VectorStoreRecordDefinition, options?.EmbeddingGenerator); this._mapper = new PostgresVectorStoreRecordMapper(this._model); @@ -125,16 +128,42 @@ public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) } /// - public Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) { const string OperationName = "Upsert"; + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = [await floatTask.ConfigureAwait(false)]; + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + var storageModel = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings)); Verify.NotNull(storageModel); @@ -142,12 +171,11 @@ public Task UpsertAsync(TRecord record, CancellationToken cancellationToke Verify.NotNull(keyObj); TKey key = (TKey)keyObj!; - return this.RunOperationAsync(OperationName, async () => + return await this.RunOperationAsync(OperationName, async () => { await this._client.UpsertAsync(this.Name, storageModel, this._model.KeyProperty.StorageName, cancellationToken).ConfigureAwait(false); return key; - } - ); + }).ConfigureAwait(false); } /// @@ -157,12 +185,55 @@ public async Task> UpsertAsync(IEnumerable records, const string OperationName = "UpsertBatch"; - var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = (IReadOnlyList>)await floatTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + + var storageModels = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + () => records.Select((r, i) => this._mapper.MapFromDataToStorageModel(r, i, generatedEmbeddings)).ToList()); if (storageModels.Count == 0) { @@ -187,6 +258,11 @@ await this.RunOperationAsync(OperationName, () => bool includeVectors = options?.IncludeVectors is true; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + return this.RunOperationAsync(OperationName, async () => { var row = await this._client.GetAsync(this.Name, key, this._model, includeVectors, cancellationToken).ConfigureAwait(false); @@ -210,6 +286,11 @@ public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptio bool includeVectors = options?.IncludeVectors is true; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + return PostgresVectorStoreUtils.WrapAsyncEnumerableAsync( this._client.GetBatchAsync(this.Name, keys, this._model, includeVectors, cancellationToken) .SelectAsync(row => @@ -247,11 +328,67 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo ); } + #region Search + /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull { - const string OperationName = "VectorizedSearch"; + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + + // TODO: Implement support for Half, binary, sparse embeddings (#11083) + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + PostgresConstants.SupportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull + { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -264,8 +401,10 @@ public IAsyncEnumerable> VectorizedSearchAsync l.FullName))}"); } - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var pgVector = PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); @@ -273,29 +412,37 @@ public IAsyncEnumerable> VectorizedSearchAsync { var record = VectorStoreErrorHandler.RunModelConversion( PostgresConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, - OperationName, + operationName, () => this._mapper.MapFromStorageToDataModel(result.Row, mapperOptions)); return new VectorSearchResult(record, result.Distance); }, cancellationToken), - OperationName, + operationName, this._collectionMetadata.VectorStoreName, this.Name ); } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs index 0f2595e76c02..1e440a53878d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordCollectionOptions.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Postgres; @@ -34,4 +35,9 @@ public sealed class PostgresVectorStoreRecordCollectionOptions /// See , and . /// public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// Gets or sets the default embedding generator for vector properties in this collection. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index 78e2c2d48c75..47ec60218022 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -2,6 +2,8 @@ using System; using System.Collections.Generic; +using System.Diagnostics; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -14,29 +16,36 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// The type of the data model record. internal sealed class PostgresVectorStoreRecordMapper(VectorStoreRecordModel model) + where TRecord : notnull { - public Dictionary MapFromDataToStorageModel(TRecord dataModel) + public Dictionary MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { var keyProperty = model.KeyProperty; var properties = new Dictionary { - { keyProperty.StorageName, keyProperty.GetValueAsObject(dataModel!) } + { keyProperty.StorageName, keyProperty.GetValueAsObject(dataModel) } }; foreach (var property in model.DataProperties) { - properties.Add(property.StorageName, property.GetValueAsObject(dataModel!)); + properties.Add(property.StorageName, property.GetValueAsObject(dataModel)); } - foreach (var property in model.VectorProperties) + for (var i = 0; i < model.VectorProperties.Count; i++) { - var propertyValue = property.GetValueAsObject(dataModel!); + var property = model.VectorProperties[i]; properties.Add( property.StorageName, - PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel( - property.GetValueAsObject(dataModel!))); + PostgresVectorStoreRecordPropertyMapping.MapVectorForStorageModel( + generatedEmbeddings?[i] is IReadOnlyList e + ? e[recordIndex] switch + { + Embedding fe => fe.Vector, + _ => throw new UnreachableException() + } + : (ReadOnlyMemory)property.GetValueAsObject(dataModel!)!)); } return properties; diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs index f7ad49259c34..761c1ea8f21b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordPropertyMapping.cs @@ -138,7 +138,7 @@ public static (string PgType, bool IsNullable) GetPostgresTypeName(Type property /// The PostgreSQL vector type name. public static (string PgType, bool IsNullable) GetPgVectorTypeName(VectorStoreRecordVectorPropertyModel vectorProperty) { - return ($"VECTOR({vectorProperty.Dimensions})", Nullable.GetUnderlyingType(vectorProperty.Type) != null); + return ($"VECTOR({vectorProperty.Dimensions})", Nullable.GetUnderlyingType(vectorProperty.EmbeddingType) != null); } public static NpgsqlParameter GetNpgsqlParameter(object? value) diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs index 8a97f8536920..ae78eac52689 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs @@ -43,7 +43,7 @@ public static IKernelBuilder AddQdrantVectorStore(this IKernelBuilder builder, s } /// - /// Register a Qdrant and with the specified service ID + /// Register a Qdrant and with the specified service ID /// and where the Qdrant is retrieved from the dependency injection container. /// /// The type of the key. @@ -66,7 +66,7 @@ public static IKernelBuilder AddQdrantVectorStoreRecordCollection } /// - /// Register a Qdrant and with the specified service ID + /// Register a Qdrant and with the specified service ID /// and where the Qdrant is constructed using the provided parameters. /// /// The type of the key. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs index c61eab57376a..1abd7708030b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Qdrant; @@ -28,11 +29,12 @@ public static IServiceCollection AddQdrantVectorStore(this IServiceCollection se (sp, obj) => { var qdrantClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new QdrantVectorStore( - qdrantClient, - selectedOptions); + return new QdrantVectorStore(qdrantClient, options); }); return services; @@ -55,18 +57,19 @@ public static IServiceCollection AddQdrantVectorStore(this IServiceCollection se (sp, obj) => { var qdrantClient = new QdrantClient(host, port, https, apiKey); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new QdrantVectorStore( - qdrantClient, - selectedOptions); + return new QdrantVectorStore(qdrantClient, options); }); return services; } /// - /// Register a Qdrant and with the specified service ID + /// Register a Qdrant and with the specified service ID /// and where the Qdrant is retrieved from the dependency injection container. /// /// The type of the key. @@ -89,9 +92,12 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection { var qdrantClient = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, options) as IVectorStoreRecordCollection)!; }); AddVectorizedSearch(services, serviceId); @@ -100,7 +106,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection - /// Register a Qdrant and with the specified service ID + /// Register a Qdrant and with the specified service ID /// and where the Qdrant is constructed using the provided parameters. /// /// The type of the key. @@ -131,9 +137,12 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection { var qdrantClient = new QdrantClient(host, port, https, apiKey); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, selectedOptions) as IVectorStoreRecordCollection)!; + return (new QdrantVectorStoreRecordCollection(qdrantClient, collectionName, options) as IVectorStoreRecordCollection)!; }); AddVectorizedSearch(services, serviceId); @@ -142,7 +151,7 @@ public static IServiceCollection AddQdrantVectorStoreRecordCollection - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the key. /// The type of the data model that the collection should contain. @@ -152,7 +161,7 @@ private static void AddVectorizedSearch(IServiceCollection servic where TKey : notnull where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index ff9bba5e36a2..dd55b3da663f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -74,7 +74,8 @@ public IVectorStoreRecordCollection GetCollection( var recordCollection = new QdrantVectorStoreRecordCollection(this._qdrantClient, name, new QdrantVectorStoreRecordCollectionOptions() { HasNamedVectors = this._options.HasNamedVectors, - VectorStoreRecordDefinition = vectorStoreRecordDefinition + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator }); var castRecordCollection = recordCollection as IVectorStoreRecordCollection; return castRecordCollection!; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs index 5ad87a389833..97f59d5149a4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -15,6 +16,11 @@ public sealed class QdrantVectorStoreOptions /// public bool HasNamedVectors { get; set; } = false; + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// An optional factory to use for constructing instances, if a custom record collection is required. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index f61f73069170..7673933d99af 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -10,8 +10,10 @@ using System.Threading; using System.Threading.Tasks; using Grpc.Core; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using Qdrant.Client; using Qdrant.Client.Grpc; @@ -96,7 +98,7 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st this._options = options ?? new QdrantVectorStoreRecordCollectionOptions(); this._model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(this._options.HasNamedVectors)) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, options?.EmbeddingGenerator); this._mapper = new QdrantVectorStoreRecordMapper(this._model, this._options.HasNamedVectors); @@ -288,6 +290,10 @@ public async IAsyncEnumerable GetAsync( } var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } // Retrieve data points. var retrievedPoints = await this.RunOperationAsync( @@ -421,25 +427,9 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati { Verify.NotNull(record); - // Create point from record. - var pointStruct = VectorStoreErrorHandler.RunModelConversion( - QdrantConstants.VectorStoreSystemName, - this._collectionMetadata.VectorStoreName, - this._collectionName, - UpsertName, - () => this._mapper.MapFromDataToStorageModel(record)); - - // Upsert. - await this.RunOperationAsync( - UpsertName, - () => this._qdrantClient.UpsertAsync(this._collectionName, [pointStruct], true, cancellationToken: cancellationToken)).ConfigureAwait(false); + var keys = await this.UpsertAsync([record], cancellationToken).ConfigureAwait(false); - return pointStruct.Id switch - { - { HasNum: true } => (TKey)(object)pointStruct.Id.Num, - { HasUuid: true } => (TKey)(object)Guid.Parse(pointStruct.Id.Uuid), - _ => throw new UnreachableException("The Qdrant point ID is neither a number nor a UUID.") - }; + return keys.Single(); } /// @@ -447,13 +437,54 @@ public async Task> UpsertAsync(IEnumerable records, { Verify.NotNull(records); + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + GeneratedEmbeddings>?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var task)) + { + generatedEmbeddings ??= new GeneratedEmbeddings>?[vectorPropertyCount]; + generatedEmbeddings[i] = await task.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + // Create points from records. var pointStructs = VectorStoreErrorHandler.RunModelConversion( QdrantConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this._collectionName, UpsertName, - () => records.Select(this._mapper.MapFromDataToStorageModel).ToList()); + () => records.Select((r, i) => this._mapper.MapFromDataToStorageModel(r, i, generatedEmbeddings)).ToList()); if (pointStructs is { Count: 0 }) { @@ -475,19 +506,76 @@ await this.RunOperationAsync( }; } + #region Search + /// - public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { var floatVector = VerifyVectorParam(vector); Verify.NotLessThan(top, 1); - // Resolve options. - var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CS0618 // Type or member is obsolete // Build filter object. - var filter = internalOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._model), @@ -498,7 +586,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync // Specify whether to include vectors in the search results. var vectorsSelector = new WithVectorsSelector(); - vectorsSelector.Enable = internalOptions.IncludeVectors; + vectorsSelector.Enable = options.IncludeVectors; var query = new Query { @@ -507,14 +595,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync // Execute Search. var points = await this.RunOperationAsync( - "Query", + operationName, () => this._qdrantClient.QueryAsync( this.Name, query: query, usingVector: this._options.HasNamedVectors ? vectorProperty.StorageName : null, filter: filter, limit: (ulong)top, - offset: (ulong)internalOptions.Skip, + offset: (ulong)options.Skip, vectorsSelector: vectorsSelector, cancellationToken: cancellationToken)).ConfigureAwait(false); @@ -522,7 +610,7 @@ public async IAsyncEnumerable> VectorizedSearchAsync var mappedResults = points.Select(point => QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult( point, this._mapper, - internalOptions.IncludeVectors, + options.IncludeVectors, QdrantConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this._collectionName, @@ -534,6 +622,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -597,14 +693,14 @@ public async IAsyncEnumerable> HybridSearchAsync(new() { VectorProperty = internalOptions.VectorProperty }); - var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(internalOptions.AdditionalProperty); + options ??= s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = options.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(options.AdditionalProperty); // Build filter object. #pragma warning disable CS0618 // Type or member is obsolete // Build filter object. - var filter = internalOptions switch + var filter = options switch { { OldFilter: not null, Filter: not null } => throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"), { OldFilter: VectorSearchFilter legacyFilter } => QdrantVectorStoreCollectionSearchMapping.BuildFromLegacyFilter(legacyFilter, this._model), @@ -615,7 +711,7 @@ public async IAsyncEnumerable> HybridSearchAsync> HybridSearchAsync() { vectorQuery, keywordQuery }, query: fusionQuery, limit: (ulong)top, - offset: (ulong)internalOptions.Skip, + offset: (ulong)options.Skip, vectorsSelector: vectorsSelector, cancellationToken: cancellationToken)).ConfigureAwait(false); @@ -667,7 +763,7 @@ public async IAsyncEnumerable> HybridSearchAsync QdrantVectorStoreCollectionSearchMapping.MapScoredPointToVectorSearchResult( point, this._mapper, - internalOptions.IncludeVectors, + options.IncludeVectors, QdrantConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this._collectionName, diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs index c5c1df41e64c..78d633435a6f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Qdrant.Client.Grpc; @@ -35,4 +36,9 @@ public sealed class QdrantVectorStoreRecordCollectionOptions /// See , and . /// public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// Gets or sets the default embedding generator for vector properties in this collection. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index 0fe15b3ea781..368e4510b094 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -4,6 +4,7 @@ using System.Diagnostics; using System.Linq; using Google.Protobuf.Collections; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using Qdrant.Client.Grpc; @@ -17,7 +18,7 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class QdrantVectorStoreRecordMapper(VectorStoreRecordModel model, bool hasNamedVectors) { /// - public PointStruct MapFromDataToStorageModel(TRecord dataModel) + public PointStruct MapFromDataToStorageModel(TRecord dataModel, int recordIndex, GeneratedEmbeddings>?[]? generatedEmbeddings) { var keyProperty = model.KeyProperty; @@ -54,14 +55,18 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) if (hasNamedVectors) { var namedVectors = new NamedVectors(); - foreach (var property in model.VectorProperties) + + for (var i = 0; i < model.VectorProperties.Count; i++) { - var propertyValue = property.GetValueAsObject(dataModel!); - if (propertyValue is not null) - { - var castPropertyValue = (ReadOnlyMemory)propertyValue; - namedVectors.Vectors.Add(property.StorageName, castPropertyValue.ToArray()); - } + var property = model.VectorProperties[i]; + + namedVectors.Vectors.Add( + property.StorageName, + GetVector( + property, + generatedEmbeddings?[i] is GeneratedEmbeddings> e + ? e[recordIndex] + : property.GetValueAsObject(dataModel!))); } pointStruct.Vectors.Vectors_ = namedVectors; @@ -69,18 +74,25 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) else { // We already verified in the constructor via FindProperties that there is exactly one vector property when not using named vectors. - var property = model.VectorProperty; - if (property.GetValueAsObject(dataModel!) is ReadOnlyMemory floatROM) - { - pointStruct.Vectors.Vector = floatROM.ToArray(); - } - else - { - throw new VectorStoreRecordMappingException($"Vector property '{property.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null when not using named vectors."); - } + Debug.Assert( + generatedEmbeddings is null || generatedEmbeddings.Length == 1 && generatedEmbeddings[0] is not null, + "There should be exactly one generated embedding when not using named vectors (single vector property)."); + pointStruct.Vectors.Vector = GetVector( + model.VectorProperty, + generatedEmbeddings is null + ? model.VectorProperty.GetValueAsObject(dataModel!) + : generatedEmbeddings[0]![recordIndex].Vector); } return pointStruct; + + Vector GetVector(VectorStoreRecordPropertyModel property, object? embedding) + => embedding switch + { + ReadOnlyMemory floatVector => floatVector.ToArray(), + null => throw new VectorStoreRecordMappingException($"Vector property '{property.ModelName}' on provided record of type '{typeof(TRecord).Name}' may not be null when not using named vectors."), + var unknownEmbedding => throw new VectorStoreRecordMappingException($"Vector property '{property.ModelName}' on provided record of type '{typeof(TRecord).Name}' has unsupported embedding type '{unknownEmbedding.GetType().Name}'.") + }; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs index d597fadd652d..0585c0055fb1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/IRedisJsonMapper.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -10,15 +12,10 @@ internal interface IRedisJsonMapper /// /// Maps from the consumer record data model to the storage model. /// - /// The consumer record data model record to map. - /// The mapped result. - (string Key, JsonNode Node) MapFromDataToStorageModel(TRecord dataModel); + (string Key, JsonNode Node) MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings); /// /// Maps from the storage model to the consumer record data model. /// - /// The storage data model record to map. - /// Options to control the mapping behavior. - /// The mapped result. TRecord MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index 53000a1dcbb1..6a69967d42f3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -8,8 +8,10 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using NRedisStack.RedisStackCommands; using NRedisStack.Search; using NRedisStack.Search.Literals.Enums; @@ -31,6 +33,15 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVe /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + /// A set of types that vectors on the provided model may have. + private static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + typeof(ReadOnlyMemory?) + ]; + internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = false, @@ -56,15 +67,6 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVe SupportedVectorPropertyTypes = s_supportedVectorTypes }; - /// A set of types that vectors on the provided model may have. - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - typeof(ReadOnlyMemory?) - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -111,7 +113,8 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string name, this._database = database; this._collectionName = name; this._options = options ?? new RedisHashSetVectorStoreRecordCollectionOptions(); - this._model = new VectorStoreRecordModelBuilder(ModelBuildingOptions).Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + this._model = new VectorStoreRecordModelBuilder(ModelBuildingOptions) + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); // Lookup storage property names. this._dataStoragePropertyNameRedisValues = this._model.DataProperties.Select(p => RedisValue.Unbox(p.StorageName)).ToArray(); @@ -209,7 +212,13 @@ await this.RunOperationAsync("FT.DROPINDEX", // Create Options var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); + var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + var operationName = includeVectors ? "HGETALL" : "HMGET"; // Get the Redis value. @@ -291,6 +300,26 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo /// public async Task UpsertAsync(TRecord record, CancellationToken cancellationToken = default) + { + (_, var generatedEmbeddings) = await RedisVectorStoreRecordFieldMapping.ProcessEmbeddingsAsync(this._model, [record], cancellationToken).ConfigureAwait(false); + + return await this.UpsertCoreAsync(record, 0, generatedEmbeddings, cancellationToken).ConfigureAwait(false); + } + + /// + public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) + { + Verify.NotNull(records); + + (records, var generatedEmbeddings) = await RedisVectorStoreRecordFieldMapping.ProcessEmbeddingsAsync(this._model, records, cancellationToken).ConfigureAwait(false); + + // Upsert records in parallel. + var tasks = records.Select((r, i) => this.UpsertCoreAsync(r, i, generatedEmbeddings, cancellationToken)); + var results = await Task.WhenAll(tasks).ConfigureAwait(false); + return results.Where(r => r is not null).ToList(); + } + + private async Task UpsertCoreAsync(TRecord record, int recordIndex, IReadOnlyList?[]? generatedEmbeddings, CancellationToken cancellationToken = default) { Verify.NotNull(record); @@ -300,7 +329,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati this._collectionMetadata.VectorStoreName, this._collectionName, "HSET", - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, recordIndex, generatedEmbeddings)); // Upsert. var maybePrefixedKey = this.PrefixKeyIfNeeded(redisHashSetRecord.Key); @@ -315,33 +344,94 @@ await this.RunOperationAsync( return (TKey)(object)redisHashSetRecord.Key; } + #region Search + /// - public async Task> UpsertAsync(IEnumerable records, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull { - Verify.NotNull(records); + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); - // Upsert records in parallel. - var tasks = records.Select(x => this.UpsertAsync(x, cancellationToken)); - var results = await Task.WhenAll(tasks).ConfigureAwait(false); - return results.Where(r => r is not null).ToList(); + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } } /// - public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); - var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } // Build query & search. - var selectFields = internalOptions.IncludeVectors ? null : this._dataStoragePropertyNamesWithScore; + var selectFields = options.IncludeVectors ? null : this._dataStoragePropertyNamesWithScore; byte[] vectorBytes = RedisVectorStoreCollectionSearchMapping.ValidateVectorAndConvertToBytes(vector, "HashSet"); var query = RedisVectorStoreCollectionSearchMapping.BuildQuery( vectorBytes, top, - internalOptions, + options, this._model, vectorProperty, selectFields); @@ -367,11 +457,11 @@ public async IAsyncEnumerable> VectorizedSearchAsync "FT.SEARCH", () => { - return this._mapper.MapFromStorageToDataModel((this.RemoveKeyPrefixIfNeeded(result.Id), retrievedHashEntries), new() { IncludeVectors = internalOptions.IncludeVectors }); + return this._mapper.MapFromStorageToDataModel((this.RemoveKeyPrefixIfNeeded(result.Id), retrievedHashEntries), new() { IncludeVectors = options.IncludeVectors }); }); // Process the score of the result item. - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); var distanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(vectorProperty); var score = RedisVectorStoreCollectionSearchMapping.GetOutputScoreFromRedisScore(result["vector_score"].HasValue ? (float)result["vector_score"] : null, distanceFunction); @@ -384,6 +474,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -391,7 +489,14 @@ public async IAsyncEnumerable GetAsync(Expression> Verify.NotNull(filter); Verify.NotLessThan(top, 1); - Query query = RedisVectorStoreCollectionSearchMapping.BuildQuery(filter, top, options ??= new(), this._model); + options ??= new(); + + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + + Query query = RedisVectorStoreCollectionSearchMapping.BuildQuery(filter, top, options, this._model); var results = await this.RunOperationAsync( "FT.SEARCH", diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs index f88e959d2ea7..c465e203cc1c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using StackExchange.Redis; @@ -36,4 +37,9 @@ public sealed class RedisHashSetVectorStoreRecordCollectionOptions /// See , and . /// public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs index 4413b296dc6a..5a01d33d53fa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using StackExchange.Redis; @@ -17,7 +18,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal sealed class RedisHashSetVectorStoreRecordMapper(VectorStoreRecordModel model) { /// - public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel) + public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { var keyValue = model.KeyProperty.GetValueAsObject(dataModel!) as string ?? throw new VectorStoreRecordMappingException($"Missing key property {model.KeyProperty.ModelName} on provided record of type '{typeof(TConsumerDataModel).Name}'."); @@ -29,9 +30,12 @@ internal sealed class RedisHashSetVectorStoreRecordMapper(Ve hashEntries.Add(new HashEntry(property.StorageName, RedisValue.Unbox(value))); } - foreach (var property in model.VectorProperties) + for (var i = 0; i < model.VectorProperties.Count; i++) { - var value = property.GetValueAsObject(dataModel!); + var property = model.VectorProperties[i]; + + var value = generatedEmbeddings?[i]?[recordIndex] ?? property.GetValueAsObject(dataModel!); + if (value is not null) { // Convert the vector to a byte array and store it in the hash entry. @@ -45,6 +49,14 @@ internal sealed class RedisHashSetVectorStoreRecordMapper(Ve case ReadOnlyMemory rod: hashEntries.Add(new HashEntry(property.StorageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); continue; + + case Embedding embedding: + hashEntries.Add(new HashEntry(property.StorageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(embedding.Vector))); + continue; + case Embedding embedding: + hashEntries.Add(new HashEntry(property.StorageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(embedding.Vector))); + continue; + default: throw new VectorStoreRecordMappingException($"Unsupported vector type '{value.GetType()}'. Only float and double vectors are supported."); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs index 58cff019fa28..0c1cd9e61ae7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonDynamicDataModelMapper.cs @@ -1,8 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Diagnostics; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -14,28 +16,49 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal class RedisJsonDynamicDataModelMapper(VectorStoreRecordModel model, JsonSerializerOptions jsonSerializerOptions) : IRedisJsonMapper> { /// - public (string Key, JsonNode Node) MapFromDataToStorageModel(Dictionary dataModel) + public (string Key, JsonNode Node) MapFromDataToStorageModel(Dictionary dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { var jsonObject = new JsonObject(); - foreach (var property in model.Properties) + // Key handled below, outside of the JsonNode + + foreach (var dataProperty in model.DataProperties) { - // Key handled below, outside of the JsonNode - if (property is VectorStoreRecordKeyPropertyModel) + if (dataModel.TryGetValue(dataProperty.ModelName, out var sourceValue)) { - continue; + jsonObject.Add(dataProperty.StorageName, sourceValue is null + ? null + : JsonSerializer.SerializeToNode(sourceValue, dataProperty.Type, jsonSerializerOptions)); } + } + + for (var i = 0; i < model.VectorProperties.Count; i++) + { + var property = model.VectorProperties[i]; - // Only map properties across that actually exist in the input. - if (!dataModel.TryGetValue(property.ModelName, out var sourceValue)) + if (generatedEmbeddings?[i] is IReadOnlyList propertyEmbedding) { - continue; - } + Debug.Assert(property.EmbeddingGenerator is not null); - // Replicate null if the property exists but is null. - jsonObject.Add(property.StorageName, sourceValue is null - ? null - : JsonSerializer.SerializeToNode(sourceValue, property.Type, jsonSerializerOptions)); + jsonObject.Add( + property.StorageName, + propertyEmbedding[recordIndex] switch + { + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + _ => throw new UnreachableException() + }); + } + else + { + // No generated embedding, read the vector directly from the data model + if (dataModel.TryGetValue(property.ModelName, out var sourceValue)) + { + jsonObject.Add(property.StorageName, sourceValue is null + ? null + : JsonSerializer.SerializeToNode(sourceValue, property.Type, jsonSerializerOptions)); + } + } } return ((string)dataModel[model.KeyProperty.ModelName]!, jsonObject); @@ -57,23 +80,36 @@ internal class RedisJsonDynamicDataModelMapper(VectorStoreRecordModel model, Jso _ => throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"), }; - foreach (var property in model.Properties) + // The key was handled above + + foreach (var dataProperty in model.DataProperties) { - // Key handled above - if (property is VectorStoreRecordKeyPropertyModel) + // Replicate null if the property exists but is null. + if (jsonObject.TryGetPropertyValue(dataProperty.StorageName, out var sourceValue)) { - continue; + dataModel.Add(dataProperty.ModelName, sourceValue is null + ? null + : JsonSerializer.Deserialize(sourceValue, dataProperty.Type, jsonSerializerOptions)); } + } - // Replicate null if the property exists but is null. - if (!jsonObject.TryGetPropertyValue(property.StorageName, out var sourceValue)) + foreach (var vectorProperty in model.VectorProperties) + { + // For vector properties which have embedding generation configured, we need to remove the embeddings before deserializing + // (we can't go back from an embedding to e.g. string). + // For other cases (no embedding generation), we leave the properties even if IncludeVectors is false. + if (vectorProperty.EmbeddingGenerator is not null) { continue; } - dataModel.Add(property.ModelName, sourceValue is null - ? null - : JsonSerializer.Deserialize(sourceValue, property.Type, jsonSerializerOptions)); + // Replicate null if the property exists but is null. + if (jsonObject.TryGetPropertyValue(vectorProperty.StorageName, out var sourceValue)) + { + dataModel.Add(vectorProperty.ModelName, sourceValue is null + ? null + : JsonSerializer.Deserialize(sourceValue, vectorProperty.Type, jsonSerializerOptions)); + } } return dataModel; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 0c79dd5d65e9..d67372eba098 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -10,8 +10,10 @@ using System.Text.Json.Nodes; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; using NRedisStack.Json.DataTypes; using NRedisStack.RedisStackCommands; using NRedisStack.Search; @@ -34,6 +36,14 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVecto /// Metadata about vector store record collection. private readonly VectorStoreRecordCollectionMetadata _collectionMetadata; + internal static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + typeof(ReadOnlyMemory?) + ]; + internal static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = false, @@ -48,14 +58,6 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVecto UsesExternalSerializer = true }; - internal static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - typeof(ReadOnlyMemory?) - ]; - /// The default options for vector search. private static readonly VectorSearchOptions s_defaultVectorSearchOptions = new(); @@ -104,7 +106,7 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string name, Red this._options = options ?? new RedisJsonVectorStoreRecordCollectionOptions(); this._jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; this._model = new VectorStoreRecordJsonModelBuilder(ModelBuildingOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._jsonSerializerOptions); + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._jsonSerializerOptions); // Lookup storage property names. this._dataStoragePropertyNames = this._model.DataProperties.Select(p => p.StorageName).ToArray(); @@ -205,6 +207,11 @@ await this.RunOperationAsync("FT.DROPINDEX", var maybePrefixedKey = this.PrefixKeyIfNeeded(stringKey); var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + // Get the Redis value. var redisResult = await this.RunOperationAsync( "GET", @@ -260,6 +267,10 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor var maybePrefixedKeys = keysList.Select(key => this.PrefixKeyIfNeeded(key)); var redisKeys = maybePrefixedKeys.Select(x => new RedisKey(x)).ToArray(); var includeVectors = options?.IncludeVectors ?? false; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } // Get the list of Redis results. var redisResults = await this.RunOperationAsync( @@ -333,6 +344,8 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati Verify.NotNull(record); // Map. + (_, var generatedEmbeddings) = await RedisVectorStoreRecordFieldMapping.ProcessEmbeddingsAsync(this._model, [record], cancellationToken).ConfigureAwait(false); + var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( RedisConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, @@ -340,7 +353,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati "SET", () => { - var mapResult = this._mapper.MapFromDataToStorageModel(record); + var mapResult = this._mapper.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings); var serializedRecord = JsonSerializer.Serialize(mapResult.Node, this._jsonSerializerOptions); return new { Key = mapResult.Key, SerializedRecord = serializedRecord }; }); @@ -365,7 +378,12 @@ public async Task> UpsertAsync(IEnumerable records, Verify.NotNull(records); // Map. + (records, var generatedEmbeddings) = await RedisVectorStoreRecordFieldMapping.ProcessEmbeddingsAsync(this._model, records, cancellationToken).ConfigureAwait(false); + var redisRecords = new List<(string maybePrefixedKey, string originalKey, string serializedRecord)>(); + + var recordIndex = 0; + foreach (var record in records) { var redisJsonRecord = VectorStoreErrorHandler.RunModelConversion( @@ -375,7 +393,7 @@ public async Task> UpsertAsync(IEnumerable records, "MSET", () => { - var mapResult = this._mapper.MapFromDataToStorageModel(record); + var mapResult = this._mapper.MapFromDataToStorageModel(record, recordIndex++, generatedEmbeddings); var serializedRecord = JsonSerializer.Serialize(mapResult.Node, this._jsonSerializerOptions); return new { Key = mapResult.Key, SerializedRecord = serializedRecord }; }); @@ -395,21 +413,93 @@ await this.RunOperationAsync( return redisRecords.Select(x => (TKey)(object)x.originalKey).ToList(); } + #region Search + /// - public async IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private async IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); - var internalOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } // Build query & search. byte[] vectorBytes = RedisVectorStoreCollectionSearchMapping.ValidateVectorAndConvertToBytes(vector, "JSON"); var query = RedisVectorStoreCollectionSearchMapping.BuildQuery( vectorBytes, top, - internalOptions, + options, this._model, vectorProperty, null); @@ -433,11 +523,11 @@ public async IAsyncEnumerable> VectorizedSearchAsync var node = JsonSerializer.Deserialize(redisResultString, this._jsonSerializerOptions)!; return this._mapper.MapFromStorageToDataModel( (this.RemoveKeyPrefixIfNeeded(result.Id), node), - new() { IncludeVectors = internalOptions.IncludeVectors }); + new() { IncludeVectors = options.IncludeVectors }); }); // Process the score of the result item. - var vectorProperty = this._model.GetVectorPropertyOrSingle(internalOptions); + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); var distanceFunction = RedisVectorStoreCollectionSearchMapping.ResolveDistanceFunction(vectorProperty); var score = RedisVectorStoreCollectionSearchMapping.GetOutputScoreFromRedisScore(result["vector_score"].HasValue ? (float)result["vector_score"] : null, distanceFunction); @@ -450,6 +540,14 @@ public async IAsyncEnumerable> VectorizedSearchAsync } } + /// + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -457,6 +555,11 @@ public async IAsyncEnumerable GetAsync(Expression> Verify.NotNull(filter); Verify.NotLessThan(top, 1); + if (options?.IncludeVectors == true && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + Query query = RedisVectorStoreCollectionSearchMapping.BuildQuery(filter, top, options ??= new(), this._model); var results = await this.RunOperationAsync( diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs index eea8cc025988..f9c1c4e08fb4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollectionOptions.cs @@ -3,6 +3,7 @@ using System; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -45,4 +46,9 @@ public sealed class RedisJsonVectorStoreRecordCollectionOptions /// Gets or sets the JSON serializer options to use when converting between the data model and the Redis record. /// public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs index d0bde9db3fc1..a2a29acdb9c9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordMapper.cs @@ -1,8 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -21,22 +24,43 @@ internal sealed class RedisJsonVectorStoreRecordMapper( private readonly string _keyPropertyStorageName = model.KeyProperty.StorageName; /// - public (string Key, JsonNode Node) MapFromDataToStorageModel(TConsumerDataModel dataModel) + public (string Key, JsonNode Node) MapFromDataToStorageModel(TConsumerDataModel dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { // Convert the provided record into a JsonNode object and try to get the key field for it. // Since we already checked that the key field is a string in the constructor, and that it exists on the model, // the only edge case we have to be concerned about is if the key field is null. - var jsonNode = JsonSerializer.SerializeToNode(dataModel, jsonSerializerOptions); - if (jsonNode!.AsObject().TryGetPropertyValue(this._keyPropertyStorageName, out var keyField) && keyField is JsonValue jsonValue) + var jsonNode = JsonSerializer.SerializeToNode(dataModel, jsonSerializerOptions)!.AsObject(); + + if (!(jsonNode.TryGetPropertyValue(this._keyPropertyStorageName, out var keyField) && keyField is JsonValue jsonValue)) { - // Remove the key field from the JSON object since we don't want to store it in the redis payload. - var keyValue = jsonValue.ToString(); - jsonNode.AsObject().Remove(this._keyPropertyStorageName); + throw new VectorStoreRecordMappingException($"Missing key field '{this._keyPropertyStorageName}' on provided record of type {typeof(TConsumerDataModel).FullName}."); + } + + // Remove the key field from the JSON object since we don't want to store it in the redis payload. + var keyValue = jsonValue.ToString(); + jsonNode.Remove(this._keyPropertyStorageName); - return (keyValue, jsonNode); + // Go over the vector properties; those which have an embedding generator configured on them will have embedding generators, overwrite + // the value in the JSON object with that. + if (generatedEmbeddings is not null) + { + for (var i = 0; i < model.VectorProperties.Count; i++) + { + if (generatedEmbeddings[i] is IReadOnlyList propertyEmbeddings) + { + var property = model.VectorProperties[i]; + Debug.Assert(property.EmbeddingGenerator is not null); + jsonNode[property.StorageName] = propertyEmbeddings[recordIndex] switch + { + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + Embedding e => JsonSerializer.SerializeToNode(e.Vector, jsonSerializerOptions), + _ => throw new UnreachableException() + }; + } + } } - throw new VectorStoreRecordMappingException($"Missing key field '{this._keyPropertyStorageName}' on provided record of type {typeof(TConsumerDataModel).FullName}."); + return (keyValue, jsonNode); } /// @@ -66,6 +90,20 @@ public TConsumerDataModel MapFromStorageToDataModel((string Key, JsonNode Node) // Since the key is not stored in the redis value, add it back in before deserializing into the data model. jsonObject.Add(this._keyPropertyStorageName, storageModel.Key); + // For vector properties which have embedding generation configured, we need to remove the embeddings before deserializing + // (we can't go back from an embedding to e.g. string). + // For other cases (no embedding generation), we leave the properties even if IncludeVectors is false. + if (!options.IncludeVectors) + { + foreach (var vectorProperty in model.VectorProperties) + { + if (vectorProperty.EmbeddingGenerator is not null) + { + jsonObject.Remove(vectorProperty.StorageName); + } + } + } + return JsonSerializer.Deserialize(jsonObject, jsonSerializerOptions)!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs index 8c60f9e2a4fc..ed00293afd27 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisServiceCollectionExtensions.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Redis; @@ -28,11 +29,12 @@ public static IServiceCollection AddRedisVectorStore(this IServiceCollection ser (sp, obj) => { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisVectorStore( - database, - selectedOptions); + return new RedisVectorStore(database, options); }); return services; @@ -55,11 +57,12 @@ public static IServiceCollection AddRedisVectorStore(this IServiceCollection ser (sp, obj) => { var database = ConnectionMultiplexer.Connect(redisConnectionConfiguration).GetDatabase(); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisVectorStore( - database, - selectedOptions); + return new RedisVectorStore(database, options); }); return services; @@ -87,9 +90,12 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisHashSetVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -121,9 +127,12 @@ public static IServiceCollection AddRedisHashSetVectorStoreRecordCollection { var database = ConnectionMultiplexer.Connect(redisConnectionConfiguration).GetDatabase(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisHashSetVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisHashSetVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -153,9 +162,12 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection { var database = sp.GetRequiredService(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisJsonVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -187,9 +199,12 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection { var database = ConnectionMultiplexer.Connect(redisConnectionConfiguration).GetDatabase(); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new RedisJsonVectorStoreRecordCollection(database, collectionName, selectedOptions); + return new RedisJsonVectorStoreRecordCollection(database, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -198,14 +213,14 @@ public static IServiceCollection AddRedisJsonVectorStoreRecordCollection - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 3df29e9758b0..156acb9bed66 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -64,12 +64,20 @@ public IVectorStoreRecordCollection GetCollection( if (this._options.StorageType == RedisStorageType.HashSet) { - var recordCollection = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var recordCollection = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection; return recordCollection!; } else { - var recordCollection = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + var recordCollection = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator + }) as IVectorStoreRecordCollection; return recordCollection!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs index a6c3545bcea2..6ee4e163eadf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs @@ -3,6 +3,7 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Diagnostics; using System.Globalization; using System.Linq; using Microsoft.Extensions.VectorData; @@ -166,12 +167,13 @@ public static string GetSDKDistanceAlgorithm(VectorStoreRecordVectorPropertyMode /// The SDK required vector type. /// Thrown if the property data type is not supported by the connector. public static string GetSDKVectorType(VectorStoreRecordVectorPropertyModel vectorProperty) - => vectorProperty.Type switch + => vectorProperty.EmbeddingType switch { Type t when t == typeof(ReadOnlyMemory) => "FLOAT32", Type t when t == typeof(ReadOnlyMemory?) => "FLOAT32", Type t when t == typeof(ReadOnlyMemory) => "FLOAT64", Type t when t == typeof(ReadOnlyMemory?) => "FLOAT64", + null => throw new UnreachableException("null embedding type"), _ => throw new InvalidOperationException($"Vector data type '{vectorProperty.Type.Name}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorProperty.ModelName}' is not supported by the Redis VectorStore.") }; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs index 660bfec012c3..c31580ec1730 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -19,4 +20,9 @@ public sealed class RedisVectorStoreOptions /// Indicates the way in which data should be stored in redis. Default is . /// public RedisStorageType? StorageType { get; init; } = RedisStorageType.Json; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs index fd9d183330a4..40b9c9d0c120 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs @@ -1,7 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; +using System.Linq; using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Redis; @@ -29,4 +35,61 @@ public static byte[] ConvertVectorToBytes(ReadOnlyMemory vector) { return MemoryMarshal.AsBytes(vector.Span).ToArray(); } + + internal static async ValueTask<(IEnumerable records, IReadOnlyList?[]?)> ProcessEmbeddingsAsync( + VectorStoreRecordModel model, + IEnumerable records, + CancellationToken cancellationToken) + where TRecord : notnull + { + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return (records, null); + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = await floatTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var doubleTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = await doubleTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + + return (records, generatedEmbeddings); + } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs index d964681e18ab..ed2cbbcf5df4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/RecordMapper.cs @@ -2,6 +2,8 @@ using System; using System.Collections.Generic; +using System.Diagnostics; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -9,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.SqlServer; internal sealed class RecordMapper(VectorStoreRecordModel model) { - public IDictionary MapFromDataToStorageModel(TRecord dataModel) + public IDictionary MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { Dictionary map = new(StringComparer.Ordinal); @@ -20,10 +22,18 @@ internal sealed class RecordMapper(VectorStoreRecordModel model) map[property.StorageName] = property.GetValueAsObject(dataModel!); } - foreach (var property in model.VectorProperties) + for (var i = 0; i < model.VectorProperties.Count; i++) { - // We restrict the vector properties to ReadOnlyMemory so the cast here is safe. - map[property.StorageName] = (ReadOnlyMemory)property.GetValueAsObject(dataModel!)!; + var property = model.VectorProperties[i]; + + // We restrict the vector properties to ReadOnlyMemory in model validation + map[property.StorageName] = generatedEmbeddings?[i] is IReadOnlyList e + ? e[recordIndex] switch + { + Embedding fe => fe.Vector, + _ => throw new UnreachableException() + } + : (ReadOnlyMemory)property.GetValueAsObject(dataModel!)!; } return map; diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs index f74de4a8fcb5..072bfd58c689 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerConstants.cs @@ -15,52 +15,51 @@ internal static class SqlServerConstants internal const int MaxIndexNameLength = 128; + internal static readonly HashSet SupportedVectorTypes = + [ + typeof(ReadOnlyMemory), // VECTOR + typeof(ReadOnlyMemory?) + ]; + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true, - SupportedKeyPropertyTypes = SqlServerConstants.SupportedKeyTypes, - SupportedDataPropertyTypes = SqlServerConstants.SupportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = [], - SupportedVectorPropertyTypes = SqlServerConstants.SupportedVectorTypes - }; - - internal static readonly HashSet SupportedKeyTypes = - [ - typeof(int), // INT - typeof(long), // BIGINT - typeof(string), // VARCHAR - typeof(Guid), // UNIQUEIDENTIFIER - typeof(DateTime), // DATETIME2 - typeof(byte[]) // VARBINARY - ]; + SupportedKeyPropertyTypes = + [ + typeof(int), // INT + typeof(long), // BIGINT + typeof(string), // VARCHAR + typeof(Guid), // UNIQUEIDENTIFIER + typeof(DateTime), // DATETIME2 + typeof(byte[]) // VARBINARY + ], - internal static readonly HashSet SupportedDataTypes = - [ - typeof(int), // INT - typeof(short), // SMALLINT - typeof(byte), // TINYINT - typeof(long), // BIGINT. - typeof(Guid), // UNIQUEIDENTIFIER. - typeof(string), // NVARCHAR - typeof(byte[]), // VARBINARY - typeof(bool), // BIT - typeof(DateTime), // DATETIME2 + SupportedDataPropertyTypes = + [ + typeof(int), // INT + typeof(short), // SMALLINT + typeof(byte), // TINYINT + typeof(long), // BIGINT. + typeof(Guid), // UNIQUEIDENTIFIER. + typeof(string), // NVARCHAR + typeof(byte[]), // VARBINARY + typeof(bool), // BIT + typeof(DateTime), // DATETIME2 #if NET - // We don't support mapping TimeSpan to TIME on purpose - // See https://github.com/microsoft/semantic-kernel/pull/10623#discussion_r1980350721 - typeof(TimeOnly), // TIME + // We don't support mapping TimeSpan to TIME on purpose + // See https://github.com/microsoft/semantic-kernel/pull/10623#discussion_r1980350721 + typeof(TimeOnly), // TIME #endif - typeof(decimal), // DECIMAL - typeof(double), // FLOAT - typeof(float), // REAL - ]; + typeof(decimal), // DECIMAL + typeof(double), // FLOAT + typeof(float), // REAL + ], - internal static readonly HashSet SupportedVectorTypes = - [ - typeof(ReadOnlyMemory), // VECTOR - typeof(ReadOnlyMemory?) - ]; + SupportedEnumerableDataPropertyElementTypes = [], + + SupportedVectorPropertyTypes = SupportedVectorTypes + }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs index d1e76def38ec..943bd3fd21dc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStore.cs @@ -37,7 +37,7 @@ public SqlServerVectorStore(string connectionString, SqlServerVectorStoreOptions // We need to create a copy, so any changes made to the option bag after // the ctor call do not affect this instance. this._options = options is not null - ? new() { Schema = options.Schema } + ? new() { Schema = options.Schema, EmbeddingGenerator = options.EmbeddingGenerator } : SqlServerVectorStoreOptions.Defaults; var connectionStringBuilder = new SqlConnectionStringBuilder(connectionString); @@ -62,7 +62,8 @@ public IVectorStoreRecordCollection GetCollection( new() { Schema = this._options.Schema, - RecordDefinition = vectorStoreRecordDefinition + RecordDefinition = vectorStoreRecordDefinition, + EmbeddingGenerator = this._options.EmbeddingGenerator }); } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs index a90b474a3d5f..9fbbd69bc8b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreOptions.cs @@ -1,5 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; + namespace Microsoft.SemanticKernel.Connectors.SqlServer; /// @@ -13,4 +15,9 @@ public sealed class SqlServerVectorStoreOptions /// Gets or sets the database schema. /// public string? Schema { get; init; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs index 6d20ef10d89c..fb2617621475 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollection.cs @@ -8,8 +8,10 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Data.SqlClient; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -49,7 +51,7 @@ public SqlServerVectorStoreRecordCollection( Verify.NotNull(name); this._model = new VectorStoreRecordModelBuilder(SqlServerConstants.ModelBuildingOptions) - .Build(typeof(TRecord), options?.RecordDefinition); + .Build(typeof(TRecord), options?.RecordDefinition, options?.EmbeddingGenerator); this._connectionString = connectionString; this.Name = name; @@ -233,6 +235,11 @@ public async Task DeleteAsync(IEnumerable keys, CancellationToken cancella bool includeVectors = options?.IncludeVectors is true; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.SelectSingle( connection, @@ -269,6 +276,11 @@ public async IAsyncEnumerable GetAsync(IEnumerable keys, GetRecor bool includeVectors = options?.IncludeVectors is true; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + using SqlConnection connection = new(this._connectionString); using SqlCommand command = connection.CreateCommand(); int taken = 0; @@ -322,13 +334,39 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati { Verify.NotNull(record); + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = [await floatTask.ConfigureAwait(false)]; + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + using SqlConnection connection = new(this._connectionString); using SqlCommand command = SqlServerCommandBuilder.MergeIntoSingle( connection, this._options.Schema, this.Name, this._model, - this._mapper.MapFromDataToStorageModel(record)); + this._mapper.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings)); return await ExceptionWrapper.WrapAsync(connection, command, async static (cmd, ct) => @@ -348,6 +386,49 @@ public async Task> UpsertAsync(IEnumerable records, { Verify.NotNull(records); + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = (IReadOnlyList>)await floatTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + using SqlConnection connection = new(this._connectionString); await connection.OpenAsync(cancellationToken).ConfigureAwait(false); @@ -373,7 +454,7 @@ public async Task> UpsertAsync(IEnumerable records, this._model, records.Skip(taken) .Take(SqlServerConstants.MaxParameterCount / parametersPerRecord) - .Select(this._mapper.MapFromDataToStorageModel))) + .Select((r, i) => this._mapper.MapFromDataToStorageModel(r, taken + i, generatedEmbeddings)))) { break; // records is empty } @@ -418,8 +499,64 @@ public async Task> UpsertAsync(IEnumerable records, return records.Select(r => (TKey)keyProperty.GetValueAsObject(r)!).ToList(); } - /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + #region Search + + /// + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + SqlServerConstants.SupportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull { Verify.NotNull(vector); Verify.NotLessThan(top, 1); @@ -431,14 +568,16 @@ public IAsyncEnumerable> VectorizedSearchAsync l.FullName))}"); } #pragma warning disable CS0618 // Type or member is obsolete - else if (options is not null && options.OldFilter is not null) + else if (options.OldFilter is not null) #pragma warning restore CS0618 // Type or member is obsolete { throw new NotSupportedException("The obsolete Filter is not supported by the SQL Server connector, use NewFilter instead."); } - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } #pragma warning disable CA2000 // Dispose objects before losing scope // Connection and command are going to be disposed by the ReadVectorSearchResultsAsync, @@ -451,13 +590,21 @@ public IAsyncEnumerable> VectorizedSearchAsync + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public object? GetService(Type serviceType, object? serviceKey = null) { diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs index 26a41f86d9de..baec6bd86e7e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerVectorStoreRecordCollectionOptions.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.SqlServer; @@ -34,4 +35,9 @@ public sealed class SqlServerVectorStoreRecordCollectionOptions /// See , and . /// public VectorStoreRecordDefinition? RecordDefinition { get; init; } + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs index 4fdb0d420c0e..9f94432d28e3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteConstants.cs @@ -16,47 +16,43 @@ internal static class SqliteConstants /// public const string VectorSearchExtensionName = "vec0"; + /// A of types that vector properties on the provided model may have. + public static readonly HashSet SupportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; + public static readonly VectorStoreRecordModelBuildingOptions ModelBuildingOptions = new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true, - SupportedKeyPropertyTypes = SqliteConstants.SupportedKeyTypes, - SupportedDataPropertyTypes = SqliteConstants.SupportedDataTypes, + SupportedKeyPropertyTypes = + [ + typeof(ulong), + typeof(string) + ], + + SupportedDataPropertyTypes = + [ + typeof(int), + typeof(long), + typeof(ulong), + typeof(short), + typeof(ushort), + typeof(string), + typeof(bool), + typeof(float), + typeof(double), + typeof(decimal), + typeof(byte[]) + ], + SupportedEnumerableDataPropertyElementTypes = [], - SupportedVectorPropertyTypes = SqliteConstants.SupportedVectorTypes, + SupportedVectorPropertyTypes = SupportedVectorTypes, EscapeIdentifier = SqliteVectorStoreCollectionCommandBuilder.EscapeIdentifier }; - - /// A of types that a key on the provided model may have. - public static readonly HashSet SupportedKeyTypes = - [ - typeof(ulong), - typeof(string) - ]; - - /// A of types that data properties on the provided model may have. - public static readonly HashSet SupportedDataTypes = - [ - typeof(int), - typeof(long), - typeof(ulong), - typeof(short), - typeof(ushort), - typeof(string), - typeof(bool), - typeof(float), - typeof(double), - typeof(decimal), - typeof(byte[]) - ]; - - /// A of types that vector properties on the provided model may have. - public static readonly HashSet SupportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs index 6ec7f34a0e29..b3db0e1bc4b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteServiceCollectionExtensions.cs @@ -2,6 +2,7 @@ using System; using Microsoft.Data.Sqlite; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Sqlite; @@ -45,10 +46,10 @@ public static IServiceCollection AddSqliteVectorStore( string? serviceId = default) => services.AddKeyedSingleton( serviceId, - (sp, _) => new SqliteVectorStore(connectionString, options ?? sp.GetService())); + (sp, _) => new SqliteVectorStore(connectionString, options ?? sp.GetService() ?? new() { EmbeddingGenerator = sp.GetService() })); /// - /// Register a SQLite and with the specified service ID + /// Register a SQLite and with the specified service ID /// and where the SQLite is retrieved from the dependency injection container. /// In this case vector search extension loading should be handled manually. /// @@ -69,7 +70,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollection throw new InvalidOperationException("Use AddSqliteVectorStore with connectionString instead."); /// - /// Register a SQLite and with the specified service ID. + /// Register a SQLite and with the specified service ID. /// instance will be initialized, connection will be opened and vector search extension with be loaded. /// /// The type of the key. @@ -95,7 +96,10 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollection( connectionString, collectionName, - options ?? sp.GetService>()) + options ?? sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }) as IVectorStoreRecordCollection)!); AddVectorizedSearch(services, serviceId); @@ -104,7 +108,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollection - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the key. /// The type of the data model that the collection should contain. @@ -112,7 +116,7 @@ public static IServiceCollection AddSqliteVectorStoreRecordCollectionThe service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull where TKey : notnull - => services.AddKeyedSingleton>( + => services.AddKeyedSingleton>( serviceId, (sp, _) => sp.GetRequiredKeyedService>(serviceId)); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs index b06d243572d8..c820d7baf68c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStore.cs @@ -85,7 +85,8 @@ public IVectorStoreRecordCollection GetCollection( { VectorStoreRecordDefinition = vectorStoreRecordDefinition, VectorSearchExtensionName = this._options.VectorSearchExtensionName, - VectorVirtualTableName = this._options.VectorVirtualTableName + VectorVirtualTableName = this._options.VectorVirtualTableName, + EmbeddingGenerator = this._options.EmbeddingGenerator }) as IVectorStoreRecordCollection; return recordCollection!; diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs index 722e756e5faa..63b96715c47d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -29,4 +30,9 @@ public sealed class SqliteVectorStoreOptions /// If not provided, collection name with prefix "vec_" will be used as virtual table name. /// public string? VectorVirtualTableName { get; set; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs index 69d30e3ca18e..b7f0ec49dae7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollection.cs @@ -9,8 +9,10 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Data.Sqlite; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -92,7 +94,7 @@ public SqliteVectorStoreRecordCollection( this._vectorTableName = GetVectorTableName(name, this._options).EscapeIdentifier(); this._model = new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition); + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator); this._vectorPropertiesExist = this._model.VectorProperties.Count > 0; @@ -156,8 +158,64 @@ public async Task DeleteCollectionAsync(CancellationToken cancellationToken = de } } + #region Search + /// - public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + SqliteConstants.SupportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = null, + CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull { const string LimitPropertyName = "k"; @@ -172,14 +230,16 @@ public IAsyncEnumerable> VectorizedSearchAsync l.FullName))}"); } - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var mappedArray = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); // Simulating skip/offset logic locally, since OFFSET can work only with LIMIT in combination // and LIMIT is not supported in vector search extension, instead of LIMIT - "k" parameter is used. - var limit = top + searchOptions.Skip; + var limit = top + options.Skip; var conditions = new List() { @@ -191,24 +251,24 @@ public IAsyncEnumerable> VectorizedSearchAsync? extraParameters = null; - if (searchOptions.OldFilter is not null) + if (options.OldFilter is not null) { - if (searchOptions.Filter is not null) + if (options.Filter is not null) { throw new ArgumentException("Either Filter or OldFilter can be specified, but not both"); } // Old filter, we translate it to a list of SqliteWhereCondition, and merge these into the conditions we already have - var filterConditions = this.GetFilterConditions(searchOptions.OldFilter, this._dataTableName); + var filterConditions = this.GetFilterConditions(options.OldFilter, this._dataTableName); if (filterConditions is { Count: > 0 }) { conditions.AddRange(filterConditions); } } - else if (searchOptions.Filter is not null) + else if (options.Filter is not null) { - SqliteFilterTranslator translator = new(this._model, searchOptions.Filter); + SqliteFilterTranslator translator = new(this._model, options.Filter); translator.Translate(appendWhere: false); extraWhereFilter = translator.Clause.ToString(); extraParameters = translator.Parameters; @@ -219,10 +279,18 @@ public IAsyncEnumerable> VectorizedSearchAsync + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public async IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { @@ -231,6 +299,11 @@ public async IAsyncEnumerable GetAsync(Expression> options ??= new(); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + SqliteFilterTranslator translator = new(this._model, filter); translator.Translate(appendWhere: false); @@ -329,6 +402,32 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati const string OperationName = "Upsert"; + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbedding, ReadOnlyMemory>(record, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = [await floatTask.ConfigureAwait(false)]; + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + using var connection = await this.GetConnectionAsync(cancellationToken).ConfigureAwait(false); var storageModel = VectorStoreErrorHandler.RunModelConversion( @@ -336,7 +435,7 @@ public async Task UpsertAsync(TRecord record, CancellationToken cancellati this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record)); + () => this._mapper.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings)); var key = storageModel[this._keyStorageName]; @@ -357,12 +456,55 @@ public async Task> UpsertAsync(IEnumerable records, const string OperationName = "UpsertBatch"; - var storageModels = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = (IReadOnlyList>)await floatTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + + var storageModels = VectorStoreErrorHandler.RunModelConversion( SqliteConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + () => records.Select((r, i) => this._mapper.MapFromDataToStorageModel(r, i, generatedEmbeddings)).ToList()); if (storageModels.Count == 0) { @@ -532,6 +674,11 @@ private async IAsyncEnumerable InternalGetBatchAsync( if (includeVectors) { + if (this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } + command = SqliteVectorStoreCollectionCommandBuilder.BuildSelectLeftJoinCommand( connection, this._vectorTableName, diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs index 4735e119f292..c9dc17db4919 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordCollectionOptions.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Sqlite; @@ -39,4 +40,9 @@ public sealed class SqliteVectorStoreRecordCollectionOptions /// If not provided, collection name with prefix will be used as virtual table name. /// public string? VectorVirtualTableName { get; set; } = null; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs index a84276ee8c95..91fa5528b5de 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteVectorStoreRecordMapper.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -13,7 +14,7 @@ namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// The consumer data model to map to or from. internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordModel model) { - public Dictionary MapFromDataToStorageModel(TRecord dataModel) + public Dictionary MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { var properties = new Dictionary { @@ -25,18 +26,19 @@ internal sealed class SqliteVectorStoreRecordMapper(VectorStoreRecordMo properties.Add(property.StorageName, property.GetValueAsObject(dataModel!)); } - foreach (var property in model.VectorProperties) + for (var i = 0; i < model.VectorProperties.Count; i++) { - object? result = null; - var propertyValue = property.GetValueAsObject(dataModel!); + var property = model.VectorProperties[i]; + var vector = generatedEmbeddings?[i] is IReadOnlyList e ? ((Embedding)e[recordIndex]).Vector : property.GetValueAsObject(dataModel!); - if (propertyValue is not null) - { - var vector = (ReadOnlyMemory)propertyValue; - result = SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(vector); - } - - properties.Add(property.StorageName, result); + properties.Add( + property.StorageName, + vector switch + { + ReadOnlyMemory floats => SqliteVectorStoreRecordPropertyMapping.MapVectorForStorageModel(floats), + null => null, + _ => throw new InvalidOperationException($"Retrieved value for vector property '{property.StorageName}' which is not a ReadOnlyMemory ('{vector?.GetType().Name}').") + }); } return properties; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs index f70ee026dcd8..b0083d3073a5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateMapper.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -10,15 +12,10 @@ internal interface IWeaviateMapper /// /// Maps from the consumer record data model to the storage model. /// - /// The consumer record data model record to map. - /// The mapped result. - JsonObject MapFromDataToStorageModel(TRecord dataModel); + JsonObject MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings); /// /// Maps from the storage model to the consumer record data model. /// - /// The storage data model record to map. - /// Options to control the mapping behavior. - /// The mapped result. TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs index e80d34becf67..5fdce5df16f8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateDynamicDataModelMapper.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -52,7 +53,7 @@ public WeaviateDynamicDataModelMapper( WeaviateConstants.ReservedSingleVectorPropertyName; } - public JsonObject MapFromDataToStorageModel(Dictionary dataModel) + public JsonObject MapFromDataToStorageModel(Dictionary dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { Verify.NotNull(dataModel); @@ -77,24 +78,35 @@ public JsonObject MapFromDataToStorageModel(Dictionary dataMode if (this._hasNamedVectors) { vectorNode = new JsonObject(); - foreach (var property in this._model.VectorProperties) + + for (var i = 0; i < this._model.VectorProperties.Count; i++) { - if (dataModel.TryGetValue(property.ModelName, out var vectorValue)) + var property = this._model.VectorProperties[i]; + + var vectorValue = generatedEmbeddings?[i] switch { - vectorNode[property.StorageName] = vectorValue is null - ? null - : JsonSerializer.SerializeToNode(vectorValue, property.Type, this._jsonSerializerOptions); - } + IReadOnlyList> e => e[recordIndex].Vector, + IReadOnlyList> e => e[recordIndex].Vector, + null => dataModel.TryGetValue(property.ModelName, out var v) ? v : null, + _ => throw new NotSupportedException($"Unsupported embedding type '{generatedEmbeddings?[i]?.GetType().Name}' for property '{property.ModelName}'.") + }; + + vectorNode[property.StorageName] = vectorValue is null + ? null + : JsonSerializer.SerializeToNode(vectorValue, property.EmbeddingType, this._jsonSerializerOptions); } } else { - if (dataModel.TryGetValue(this._model.VectorProperty.ModelName, out var vectorValue)) - { - vectorNode = vectorValue is null - ? null - : JsonSerializer.SerializeToNode(vectorValue, this._model.VectorProperty.Type, this._jsonSerializerOptions); - } + var vectorValue = generatedEmbeddings?[0] is IReadOnlyList> e + ? e[recordIndex].Vector + : dataModel.TryGetValue(this._model.VectorProperty.ModelName, out var v) + ? v + : null; + + vectorNode = vectorValue is null + ? null + : JsonSerializer.SerializeToNode(vectorValue, this._model.VectorProperty.EmbeddingType, this._jsonSerializerOptions); } return new JsonObject diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs index 01ada6ecd6fd..23f8b0881ee5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs @@ -36,7 +36,7 @@ public static IKernelBuilder AddWeaviateVectorStore( } /// - /// Register a Weaviate and with the specified service ID. + /// Register a Weaviate and with the specified service ID. /// /// The type of the record. /// The builder to register the on. diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs index b0c60fba5088..f277b48ec34d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs @@ -2,6 +2,7 @@ using System; using System.Net.Http; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Weaviate; @@ -37,7 +38,10 @@ public static IServiceCollection AddWeaviateVectorStore( (sp, obj) => { var selectedHttpClient = HttpClientProvider.GetHttpClient(httpClient, sp); - var selectedOptions = options ?? sp.GetService(); + options ??= sp.GetService() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; return new WeaviateVectorStore(selectedHttpClient, options); }); @@ -45,7 +49,7 @@ public static IServiceCollection AddWeaviateVectorStore( } /// - /// Register a Weaviate and with the specified service ID. + /// Register a Weaviate and with the specified service ID. /// /// The type of the record. /// The to register the on. @@ -71,9 +75,12 @@ public static IServiceCollection AddWeaviateVectorStoreRecordCollection (sp, obj) => { var selectedHttpClient = HttpClientProvider.GetHttpClient(httpClient, sp); - var selectedOptions = options ?? sp.GetService>(); + options ??= sp.GetService>() ?? new() + { + EmbeddingGenerator = sp.GetService() + }; - return new WeaviateVectorStoreRecordCollection(selectedHttpClient, collectionName, selectedOptions); + return new WeaviateVectorStoreRecordCollection(selectedHttpClient, collectionName, options); }); AddVectorizedSearch(services, serviceId); @@ -82,14 +89,14 @@ public static IServiceCollection AddWeaviateVectorStoreRecordCollection } /// - /// Also register the with the given as a . + /// Also register the with the given as a . /// /// The type of the data model that the collection should contain. /// The service collection to register on. /// The service id that the registrations should use. private static void AddVectorizedSearch(IServiceCollection services, string? serviceId) where TRecord : notnull { - services.AddKeyedTransient>( + services.AddKeyedTransient>( serviceId, (sp, obj) => { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs index 9daedf433330..98e8ca2a84f1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -77,7 +77,8 @@ public IVectorStoreRecordCollection GetCollection( VectorStoreRecordDefinition = vectorStoreRecordDefinition, Endpoint = this._options.Endpoint, ApiKey = this._options.ApiKey, - HasNamedVectors = this._options.HasNamedVectors + HasNamedVectors = this._options.HasNamedVectors, + EmbeddingGenerator = this._options.EmbeddingGenerator }) as IVectorStoreRecordCollection; return recordCollection; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs index 9d955f8d48a6..c4b048c6fe4a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -34,4 +35,9 @@ public sealed class WeaviateVectorStoreOptions /// . /// public bool HasNamedVectors { get; set; } = true; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs index 997189a8731d..91d76424dc0c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -13,8 +13,10 @@ using System.Text.Json.Nodes; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -104,7 +106,7 @@ public WeaviateVectorStoreRecordCollection( this._options = options ?? new(); this._apiKey = this._options.ApiKey; this._model = new WeaviateModelBuilder(this._options.HasNamedVectors) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, s_jsonSerializerOptions); + .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, s_jsonSerializerOptions); // Assign mapper. this._mapper = typeof(TRecord) == typeof(Dictionary) @@ -234,17 +236,16 @@ public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationTo { const string OperationName = "GetCollectionObject"; - return this.RunOperationAsync(OperationName, async () => + var guid = key as Guid? ?? throw new InvalidCastException("Only Guid keys are supported"); + var includeVectors = options?.IncludeVectors is true; + if (includeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) { - var guid = key switch - { - Guid g => g, - object o => (Guid)o, - _ => throw new UnreachableException("Guid key should have been validated during model building") - }; + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } - var includeVectors = options?.IncludeVectors is true; - var request = new WeaviateGetCollectionObjectRequest(this.Name, guid, includeVectors).Build(); + return this.RunOperationAsync(OperationName, async () => + { + using var request = new WeaviateGetCollectionObjectRequest(this.Name, guid, includeVectors).Build(); var jsonObject = await this.ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken).ConfigureAwait(false); @@ -298,12 +299,60 @@ public async Task> UpsertAsync(IEnumerable records, Verify.NotNull(records); - var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + IReadOnlyList? recordsList = null; + + // If an embedding generator is defined, invoke it once per property for all records. + IReadOnlyList?[]? generatedEmbeddings = null; + + var vectorPropertyCount = this._model.VectorProperties.Count; + for (var i = 0; i < vectorPropertyCount; i++) + { + var vectorProperty = this._model.VectorProperties[i]; + + if (vectorProperty.EmbeddingGenerator is null) + { + continue; + } + + // We have a property with embedding generation; materialize the records' enumerable if needed, to + // prevent multiple enumeration. + if (recordsList is null) + { + recordsList = records is IReadOnlyList r ? r : records.ToList(); + + if (recordsList.Count == 0) + { + return []; + } + + records = recordsList; + } + + // TODO: Ideally we'd group together vector properties using the same generator (and with the same input and output properties), + // and generate embeddings for them in a single batch. That's some more complexity though. + if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var floatTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = (IReadOnlyList>)await floatTask.ConfigureAwait(false); + } + else if (vectorProperty.TryGenerateEmbeddings, ReadOnlyMemory>(records, cancellationToken, out var doubleTask)) + { + generatedEmbeddings ??= new IReadOnlyList?[vectorPropertyCount]; + generatedEmbeddings[i] = await doubleTask.ConfigureAwait(false); + } + else + { + throw new InvalidOperationException( + $"The embedding generator configured on property '{vectorProperty.ModelName}' cannot produce an embedding of type '{typeof(Embedding).Name}' for the given input type."); + } + } + + var jsonObjects = records.Select((record, i) => VectorStoreErrorHandler.RunModelConversion( WeaviateConstants.VectorStoreSystemName, this._collectionMetadata.VectorStoreName, this.Name, OperationName, - () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + () => this._mapper.MapFromDataToStorageModel(record, i, generatedEmbeddings))).ToList(); if (jsonObjects.Count == 0) { @@ -333,20 +382,88 @@ public async Task> UpsertAsync(IEnumerable records, return keys; } + #region Search + /// - public IAsyncEnumerable> VectorizedSearchAsync( + public async IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + where TInput : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + switch (vectorProperty.EmbeddingGenerator) + { + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case IEmbeddingGenerator> generator: + { + var embedding = await generator.GenerateEmbeddingAsync(value, new() { Dimensions = vectorProperty.Dimensions }, cancellationToken).ConfigureAwait(false); + + await foreach (var record in this.SearchCoreAsync(embedding.Vector, top, vectorProperty, operationName: "Search", options, cancellationToken).ConfigureAwait(false)) + { + yield return record; + } + + yield break; + } + + case null: + throw new InvalidOperationException(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch); + + default: + throw new InvalidOperationException( + WeaviateModelBuilder.s_supportedVectorTypes.Contains(typeof(TInput)) + ? string.Format(VectorDataStrings.EmbeddingTypePassedToSearchAsync) + : string.Format(VectorDataStrings.IncompatibleEmbeddingGeneratorWasConfiguredForInputType, typeof(TInput).Name, vectorProperty.EmbeddingGenerator.GetType().Name)); + } + } + + /// + public IAsyncEnumerable> SearchEmbeddingAsync( TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + { + options ??= s_defaultVectorSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(options); + + return this.SearchCoreAsync(vector, top, vectorProperty, operationName: "SearchEmbedding", options, cancellationToken); + } + + private IAsyncEnumerable> SearchCoreAsync( + TVector vector, + int top, + VectorStoreRecordVectorPropertyModel vectorProperty, + string operationName, + VectorSearchOptions options, + CancellationToken cancellationToken = default) + where TVector : notnull { const string OperationName = "VectorSearch"; VerifyVectorParam(vector); Verify.NotLessThan(top, 1); - var searchOptions = options ?? s_defaultVectorSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(searchOptions); + if (options.IncludeVectors && this._model.VectorProperties.Any(p => p.EmbeddingGenerator is not null)) + { + throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); + } var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildSearchQuery( vector, @@ -354,13 +471,21 @@ public IAsyncEnumerable> VectorizedSearchAsync + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] + public IAsyncEnumerable> VectorizedSearchAsync(TVector vector, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + where TVector : notnull + => this.SearchEmbeddingAsync(vector, top, options, cancellationToken); + + #endregion Search + /// public IAsyncEnumerable GetAsync(Expression> filter, int top, GetFilteredRecordOptions? options = null, CancellationToken cancellationToken = default) @@ -390,9 +515,9 @@ public IAsyncEnumerable> HybridSearchAsync( VerifyVectorParam(vector); Verify.NotLessThan(top, 1); - var searchOptions = options ?? s_defaultKeywordVectorizedHybridSearchOptions; - var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = searchOptions.VectorProperty }); - var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(searchOptions.AdditionalProperty); + options ??= s_defaultKeywordVectorizedHybridSearchOptions; + var vectorProperty = this._model.GetVectorPropertyOrSingle(new() { VectorProperty = options.VectorProperty }); + var textDataProperty = this._model.GetFullTextDataPropertyOrSingle(options.AdditionalProperty); var query = WeaviateVectorStoreRecordCollectionQueryBuilder.BuildHybridSearchQuery( vector, @@ -403,10 +528,10 @@ public IAsyncEnumerable> HybridSearchAsync( vectorProperty, textDataProperty, s_jsonSerializerOptions, - searchOptions, + options, this._options.HasNamedVectors); - return this.ExecuteQueryAsync(query, searchOptions.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken); + return this.ExecuteQueryAsync(query, options.IncludeVectors, WeaviateConstants.HybridScorePropertyName, OperationName, cancellationToken); } /// @@ -488,9 +613,9 @@ private async Task ExecuteRequestAsync( private async Task<(TResponse?, string)> ExecuteRequestWithResponseContentAsync(HttpRequestMessage request, CancellationToken cancellationToken) { var response = await this.ExecuteRequestAsync(request, cancellationToken: cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - var responseModel = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); return (responseModel, responseContent); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs index 60becffbaad1..572203816faf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -2,6 +2,7 @@ using System; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -46,4 +47,9 @@ public sealed class WeaviateVectorStoreRecordCollectionOptions /// . /// public bool HasNamedVectors { get; set; } = true; + + /// + /// Gets or sets the default embedding generator to use when generating vectors embeddings with this vector store. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs index 1ecf80ad651d..c19eb908a6ff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -1,16 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; using System.Diagnostics; +using System.Linq; using System.Text.Json; using System.Text.Json.Nodes; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.SemanticKernel.Connectors.Weaviate; -#pragma warning disable CS0618 // IVectorStoreRecordMapper is obsolete internal sealed class WeaviateVectorStoreRecordMapper : IWeaviateMapper -#pragma warning restore CS0618 { private readonly string _collectionName; private readonly bool _hasNamedVectors; @@ -35,7 +36,7 @@ public WeaviateVectorStoreRecordMapper( WeaviateConstants.ReservedSingleVectorPropertyName; } - public JsonObject MapFromDataToStorageModel(TRecord dataModel) + public JsonObject MapFromDataToStorageModel(TRecord dataModel, int recordIndex, IReadOnlyList?[]? generatedEmbeddings) { Verify.NotNull(dataModel); @@ -67,24 +68,51 @@ public JsonObject MapFromDataToStorageModel(TRecord dataModel) // Populate vector properties. if (this._hasNamedVectors) { - foreach (var property in this._model.VectorProperties) + for (var i = 0; i < this._model.VectorProperties.Count; i++) { - var node = jsonNodeDataModel[property.StorageName]; + var property = this._model.VectorProperties[i]; - if (node is not null) + if (generatedEmbeddings?[i] is IReadOnlyList e) + { + weaviateObjectModel[this._vectorPropertyName]![property.StorageName] = e[recordIndex] switch + { + Embedding fe => JsonValue.Create(fe.Vector.ToArray()), + Embedding de => JsonValue.Create(de.Vector.ToArray()), + _ => throw new UnreachableException() + }; + } + else { - weaviateObjectModel[this._vectorPropertyName]![property.StorageName] = node.DeepClone(); + var node = jsonNodeDataModel[property.StorageName]; + + if (node is not null) + { + weaviateObjectModel[this._vectorPropertyName]![property.StorageName] = node.DeepClone(); + } } } } else { - var vectorProperty = this._model.VectorProperty; - var node = jsonNodeDataModel[vectorProperty.StorageName]; + var property = this._model.VectorProperty; - if (node is not null) + if (generatedEmbeddings?.Single() is IReadOnlyList e) { - weaviateObjectModel[this._vectorPropertyName] = node.DeepClone(); + weaviateObjectModel[this._vectorPropertyName] = e[recordIndex] switch + { + Embedding fe => JsonValue.Create(fe.Vector.ToArray()), + Embedding de => JsonValue.Create(de.Vector.ToArray()), + _ => throw new UnreachableException() + }; + } + else + { + var node = jsonNodeDataModel[property.StorageName]; + + if (node is not null) + { + weaviateObjectModel[this._vectorPropertyName] = node.DeepClone(); + } } } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs index 82ebe2cec560..6534c3dfef0d 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBDynamicDataModelMapperTests.cs @@ -78,7 +78,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() }; // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel["_id"]); @@ -127,7 +127,7 @@ public void MapFromDataToStorageModelMapsNullValues() var sut = new MongoDBDynamicDataModelMapper(model); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Equal(BsonNull.Value, storageModel["StringDataProp"]); @@ -251,7 +251,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() var sut = new MongoDBDynamicDataModelMapper(model); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, generatedEmbeddings: null); // Assert Assert.Equal("key", (string?)storageModel["_id"]); @@ -287,5 +287,5 @@ public void MapFromStorageToDataModelSkipsMissingProperties() } private static VectorStoreRecordModel BuildModel(IReadOnlyList properties) - => new MongoDBModelBuilder().Build(typeof(Dictionary), new() { Properties = properties }); + => new MongoDBModelBuilder().Build(typeof(Dictionary), new() { Properties = properties }, defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs index 2a68b57ec293..1098ebfe1ee3 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBServiceCollectionExtensionsTests.cs @@ -84,7 +84,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs index 2cd0e0ff815c..3ba21bb058a0 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs @@ -27,7 +27,8 @@ public sealed class MongoDBVectorStoreCollectionSearchMappingTests new VectorStoreRecordKeyProperty("Property1", typeof(string)) { StoragePropertyName = "property_1" }, new VectorStoreRecordDataProperty("Property2", typeof(string)) { StoragePropertyName = "property_2" }, ] - }); + }, + defaultEmbeddingGenerator: null); [Fact] public void BuildFilterThrowsExceptionWithUnsupportedFilterClause() diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs index 5daa6af97e23..89774107b140 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordCollectionTests.cs @@ -490,8 +490,8 @@ await this.TestUpsertWithModelAsync( } [Theory] - [MemberData(nameof(VectorizedSearchVectorTypeData))] - public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) + [MemberData(nameof(SearchEmbeddingVectorTypeData))] + public async Task SearchEmbeddingThrowsExceptionWithInvalidVectorTypeAsync(object vector, bool exceptionExpected) { // Arrange this.MockCollectionForSearch(); @@ -503,18 +503,18 @@ public async Task VectorizedSearchThrowsExceptionWithInvalidVectorTypeAsync(obje // Act & Assert if (exceptionExpected) { - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(vector, top: 3).ToListAsync()); + await Assert.ThrowsAsync(async () => await sut.SearchEmbeddingAsync(vector, top: 3).ToListAsync()); } else { - Assert.NotNull(await sut.VectorizedSearchAsync(vector, top: 3).FirstOrDefaultAsync()); + Assert.NotNull(await sut.SearchEmbeddingAsync(vector, top: 3).FirstOrDefaultAsync()); } } [Theory] [InlineData("TestEmbedding1", "TestEmbedding1", 3, 3)] [InlineData("TestEmbedding2", "test_embedding_2", 4, 4)] - public async Task VectorizedSearchUsesValidQueryAsync( + public async Task SearchEmbeddingUsesValidQueryAsync( string? vectorPropertyName, string expectedVectorPropertyName, int actualTop, @@ -562,7 +562,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( }; // Act - var actual = await sut.VectorizedSearchAsync(vector, top: actualTop, new() + var actual = await sut.SearchEmbeddingAsync(vector, top: actualTop, new() { VectorProperty = vectorSelector, }).FirstOrDefaultAsync(); @@ -578,7 +578,7 @@ public async Task VectorizedSearchUsesValidQueryAsync( } [Fact] - public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNameAsync() + public async Task SearchEmbeddingThrowsExceptionWithNonExistentVectorPropertyNameAsync() { // Arrange this.MockCollectionForSearch(); @@ -590,11 +590,11 @@ public async Task VectorizedSearchThrowsExceptionWithNonExistentVectorPropertyNa var options = new MEVD.VectorSearchOptions { VectorProperty = r => "non-existent-property" }; // Act & Assert - await Assert.ThrowsAsync(async () => await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); + await Assert.ThrowsAsync(async () => await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3, options).FirstOrDefaultAsync()); } [Fact] - public async Task VectorizedSearchReturnsRecordWithScoreAsync() + public async Task SearchEmbeddingReturnsRecordWithScoreAsync() { // Arrange this.MockCollectionForSearch(); @@ -604,7 +604,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() "collection"); // Act - var result = await sut.VectorizedSearchAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); + var result = await sut.SearchEmbeddingAsync(new ReadOnlyMemory([1f, 2f, 3f]), top: 3).FirstOrDefaultAsync(); // Assert Assert.NotNull(result); @@ -625,7 +625,7 @@ public async Task VectorizedSearchReturnsRecordWithScoreAsync() { [], 1 } }; - public static TheoryData VectorizedSearchVectorTypeData => new() + public static TheoryData SearchEmbeddingVectorTypeData => new() { { new ReadOnlyMemory([1f, 2f, 3f]), false }, { new ReadOnlyMemory([1f, 2f, 3f]), false }, diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs index d7a1b3607ad1..ea8ab15172f0 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreRecordMapperTests.cs @@ -32,7 +32,7 @@ public MongoDBVectorStoreRecordMapperTests() ] }; - this._sut = new(new MongoDBModelBuilder().Build(typeof(MongoDBHotelModel), definition)); + this._sut = new(new MongoDBModelBuilder().Build(typeof(MongoDBHotelModel), definition, defaultEmbeddingGenerator: null)); } [Fact] @@ -48,7 +48,7 @@ public void MapFromDataToStorageModelReturnsValidObject() }; // Act - var document = this._sut.MapFromDataToStorageModel(hotel); + var document = this._sut.MapFromDataToStorageModel(hotel, generatedEmbeddings: null); // Assert Assert.NotNull(document); diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs index be8db092665b..7bc63973812c 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeKernelBuilderExtensionsTests.cs @@ -86,7 +86,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs index b8ef24099c0c..191fd89c52b9 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/PineconeServiceCollectionExtensionsTests.cs @@ -85,7 +85,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresServiceCollectionExtensionsTests.cs index f667d86eee30..8bd047dab572 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresServiceCollectionExtensionsTests.cs @@ -51,7 +51,7 @@ public void AddVectorStoreRecordCollectionRegistersClass() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs index c99110dfbd19..8cc59d2aec83 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreCollectionSqlBuilderTests.cs @@ -50,7 +50,7 @@ public void TestBuildCreateTableCommand(bool ifNotExists) ] }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null); var cmdInfo = PostgresSqlBuilder.BuildCreateTableCommand("public", "testcollection", model, ifNotExists: ifNotExists); @@ -279,7 +279,7 @@ public void TestBuildGetCommand() ] }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null); var key = 123; @@ -324,7 +324,7 @@ public void TestBuildGetBatchCommand() var keys = new List { 123, 124 }; - var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition); + var model = new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null); // Act var cmdInfo = PostgresSqlBuilder.BuildGetBatchCommand("public", "testcollection", model, keys, includeVectors: true); diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs index 9a9198b7669c..c3791b5bff85 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/PostgresVectorStoreRecordMapperTests.cs @@ -26,7 +26,7 @@ public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() var mapper = new PostgresVectorStoreRecordMapper>(model); // Act - var result = mapper.MapFromDataToStorageModel(dataModel); + var result = mapper.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", result["Key"]); @@ -51,7 +51,7 @@ public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() var mapper = new PostgresVectorStoreRecordMapper>(propertyReader); // Act - var result = mapper.MapFromDataToStorageModel(dataModel); + var result = mapper.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal(1L, result["Key"]); @@ -181,7 +181,7 @@ private static TestRecord GetRecord(TKey key) } private static VectorStoreRecordModel GetModel(VectorStoreRecordDefinition definition) - => new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(TRecord), definition); + => new VectorStoreRecordModelBuilder(PostgresConstants.ModelBuildingOptions).Build(typeof(TRecord), definition, defaultEmbeddingGenerator: null); #pragma warning disable CA1812 private sealed class TestRecord diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs index d0f253b08bd8..2bd0b26ccfd7 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantKernelBuilderExtensionsTests.cs @@ -106,7 +106,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs index 8452a2d2c65d..5e07567030f9 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantServiceCollectionExtensionsTests.cs @@ -106,7 +106,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs index abae6d8a4896..c2bdb2f76e24 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreCollectionSearchMappingTests.cs @@ -29,7 +29,8 @@ public class QdrantVectorStoreCollectionSearchMappingTests new VectorStoreRecordDataProperty("FieldName", typeof(string)) { StoragePropertyName = "storage_FieldName" }, new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "storage_vector" }, ] - }); + }, + defaultEmbeddingGenerator: null); [Theory] [InlineData("string")] @@ -128,7 +129,8 @@ public void MapScoredPointToVectorSearchResultMapsResults() new VectorStoreRecordDataProperty("DataField", typeof(string)) { StoragePropertyName = "storage_DataField" }, new VectorStoreRecordVectorProperty("Embedding", typeof(ReadOnlyMemory), 10), ] - }); + }, + defaultEmbeddingGenerator: null); var mapper = new QdrantVectorStoreRecordMapper(model, hasNamedVectors: false); diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs index af1f43b6a321..c172129f83b7 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs @@ -25,11 +25,11 @@ public void MapsSinglePropsFromDataToStorageModelWithUlong(bool hasNamedVectors) // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) - .Build(typeof(SinglePropsModel), definition); + .Build(typeof(SinglePropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. - var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(5ul)); + var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(5ul), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual); @@ -55,11 +55,11 @@ public void MapsSinglePropsFromDataToStorageModelWithGuid(bool hasNamedVectors) // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) - .Build(typeof(SinglePropsModel), definition); + .Build(typeof(SinglePropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. - var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); + var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111")), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual); @@ -78,7 +78,7 @@ public void MapsSinglePropsFromStorageToDataModelWithUlong(bool hasNamedVectors, // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) - .Build(typeof(SinglePropsModel), definition); + .Build(typeof(SinglePropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. @@ -110,7 +110,7 @@ public void MapsSinglePropsFromStorageToDataModelWithGuid(bool hasNamedVectors, // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors)) - .Build(typeof(SinglePropsModel), definition); + .Build(typeof(SinglePropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors); // Act. @@ -138,12 +138,12 @@ public void MapsMultiPropsFromDataToStorageModelWithUlong() // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) - .Build(typeof(MultiPropsModel), definition); + .Build(typeof(MultiPropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. - var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(5ul)); + var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(5ul), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual); @@ -167,11 +167,11 @@ public void MapsMultiPropsFromDataToStorageModelWithGuid() // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) - .Build(typeof(MultiPropsModel), definition); + .Build(typeof(MultiPropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. - var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); + var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111")), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual); @@ -197,7 +197,7 @@ public void MapsMultiPropsFromStorageToDataModelWithUlong(bool includeVectors) // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) - .Build(typeof(MultiPropsModel), definition); + .Build(typeof(MultiPropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. @@ -236,7 +236,7 @@ public void MapsMultiPropsFromStorageToDataModelWithGuid(bool includeVectors) // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); var model = new VectorStoreRecordModelBuilder(QdrantVectorStoreRecordFieldMapping.GetModelBuildOptions(hasNamedVectors: true)) - .Build(typeof(MultiPropsModel), definition); + .Build(typeof(MultiPropsModel), definition, defaultEmbeddingGenerator: null); var sut = new QdrantVectorStoreRecordMapper>(model, hasNamedVectors: true); // Act. diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs index 9392906c876c..c34eb89794c2 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetDynamicDataModelMapperTests.cs @@ -49,7 +49,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() }; // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); @@ -82,7 +82,7 @@ public void MapFromDataToStorageModelMapsNullValues() var sut = new RedisHashSetVectorStoreRecordMapper>(model); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); @@ -178,7 +178,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() var dataModel = new Dictionary { ["Key"] = "key" }; // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); @@ -208,5 +208,5 @@ public void MapFromStorageToDataModelSkipsMissingProperties() private static VectorStoreRecordModel BuildModel(VectorStoreRecordDefinition definition) => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection>.ModelBuildingOptions) - .Build(typeof(Dictionary), definition); + .Build(typeof(Dictionary), definition, defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs index 042b44b27bcf..62a44c584cfa 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs @@ -16,7 +16,7 @@ public sealed class RedisHashSetVectorStoreRecordMapperTests { private static readonly VectorStoreRecordModel s_model = new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) - .Build(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition); + .Build(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition, defaultEmbeddingGenerator: null); [Fact] public void MapsAllFieldsFromDataToStorageModel() @@ -25,7 +25,7 @@ public void MapsAllFieldsFromDataToStorageModel() var sut = new RedisHashSetVectorStoreRecordMapper(s_model); // Act. - var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); + var actual = sut.MapFromDataToStorageModel(CreateModel("test key"), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual.HashEntries); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs index 9c0c65373138..936f3d1c8865 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonDynamicDataModelMapperTests.cs @@ -33,7 +33,8 @@ private static readonly VectorStoreRecordModel s_model new VectorStoreRecordDataProperty("ComplexObjectData", typeof(ComplexObject)), new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory), 10), ] - }); + }, + defaultEmbeddingGenerator: null); [Fact] public void MapFromDataToStorageModelMapsAllSupportedTypes() @@ -51,7 +52,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() }; // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); @@ -78,7 +79,7 @@ public void MapFromDataToStorageModelMapsNullValues() }; // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); @@ -149,7 +150,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() var dataModel = new Dictionary { ["Key"] = "key" }; // Act. - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", storageModel.Key); diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs index 37692a7eced5..3bc180a0b788 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonVectorStoreRecordMapperTests.cs @@ -21,11 +21,11 @@ public void MapsAllFieldsFromDataToStorageModel() { // Arrange. var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) - .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); + .Build(typeof(MultiPropsModel), vectorStoreRecordDefinition: null, defaultEmbeddingGenerator: null, JsonSerializerOptions.Default); var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); // Act. - var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); + var actual = sut.MapFromDataToStorageModel(CreateModel("test key"), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual.Node); @@ -43,11 +43,11 @@ public void MapsAllFieldsFromDataToStorageModelWithCustomSerializerOptions() // Arrange. var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) - .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); + .Build(typeof(MultiPropsModel), vectorStoreRecordDefinition: null, defaultEmbeddingGenerator: null, jsonSerializerOptions); var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); // Act. - var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); + var actual = sut.MapFromDataToStorageModel(CreateModel("test key"), recordIndex: 0, generatedEmbeddings: null); // Assert. Assert.NotNull(actual.Node); @@ -64,7 +64,7 @@ public void MapsAllFieldsFromStorageToDataModel() { // Arrange. var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) - .Build(typeof(MultiPropsModel), null, JsonSerializerOptions.Default); + .Build(typeof(MultiPropsModel), vectorStoreRecordDefinition: null, defaultEmbeddingGenerator: null, JsonSerializerOptions.Default); var sut = new RedisJsonVectorStoreRecordMapper(model, JsonSerializerOptions.Default); // Act. @@ -90,7 +90,7 @@ public void MapsAllFieldsFromStorageToDataModelWithCustomSerializerOptions() // Arrange. var jsonSerializerOptions = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; var model = new VectorStoreRecordJsonModelBuilder(RedisJsonVectorStoreRecordCollection.ModelBuildingOptions) - .Build(typeof(MultiPropsModel), null, jsonSerializerOptions); + .Build(typeof(MultiPropsModel), vectorStoreRecordDefinition: null, defaultEmbeddingGenerator: null, jsonSerializerOptions); var sut = new RedisJsonVectorStoreRecordMapper(model, jsonSerializerOptions); // Act. diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs index 47013e1bb361..c519ec4bf4bf 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs @@ -35,13 +35,14 @@ public void MapToSchemaCreatesSchema(bool useDollarPrefix) new VectorStoreRecordDataPropertyModel("NonFilterableString", typeof(string)), - new VectorStoreRecordVectorPropertyModel("VectorDefaultIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 10 }, + new VectorStoreRecordVectorPropertyModel("VectorDefaultIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 10, EmbeddingType = typeof(ReadOnlyMemory) }, new VectorStoreRecordVectorPropertyModel("VectorSpecificIndexingOptions", typeof(ReadOnlyMemory)) { Dimensions = 20, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.EuclideanSquaredDistance, - StorageName = "vector_specific_indexing_options" + StorageName = "vector_specific_indexing_options", + EmbeddingType = typeof(ReadOnlyMemory) } ]; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs index 7895d4b09b65..f75a00a86354 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionSearchMappingTests.cs @@ -245,5 +245,6 @@ private static VectorStoreRecordModel BuildModel(List => new VectorStoreRecordModelBuilder(RedisHashSetVectorStoreRecordCollection.ModelBuildingOptions) .Build( typeof(Dictionary), - new() { Properties = properties }); + new() { Properties = properties }, + defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs index a3594df56cea..236fd84616f2 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteServiceCollectionExtensionsTests.cs @@ -42,7 +42,7 @@ public void AddVectorStoreRecordCollectionWithStringKeyRegistersClass() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } @@ -60,7 +60,7 @@ public void AddVectorStoreRecordCollectionWithNumericKeyRegistersClass() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs index 8227a85aa778..22948ed099bc 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreCollectionCommandBuilderTests.cs @@ -334,5 +334,6 @@ private static VectorStoreRecordModel BuildModel(List => new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions) .Build( typeof(Dictionary), - new() { Properties = properties }); + new() { Properties = properties }, + defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs index 3f1f3a225001..4b023f7ba9d9 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/SqliteVectorStoreRecordMapperTests.cs @@ -25,7 +25,7 @@ public void MapFromDataToStorageModelWithStringKeyReturnsValidStorageModel() var mapper = new SqliteVectorStoreRecordMapper>(model); // Act - var result = mapper.MapFromDataToStorageModel(dataModel); + var result = mapper.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal("key", result["Key"]); @@ -49,7 +49,7 @@ public void MapFromDataToStorageModelWithNumericKeyReturnsValidStorageModel() var mapper = new SqliteVectorStoreRecordMapper>(model); // Act - var result = mapper.MapFromDataToStorageModel(dataModel); + var result = mapper.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal((ulong)1, result["Key"]); @@ -172,7 +172,7 @@ private static TestRecord GetDataModel(TKey key) } private static VectorStoreRecordModel BuildModel(Type type, VectorStoreRecordDefinition definition) - => new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions).Build(type, definition); + => new VectorStoreRecordModelBuilder(SqliteConstants.ModelBuildingOptions).Build(type, definition, defaultEmbeddingGenerator: null); #pragma warning disable CA1812 private sealed class TestRecord diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs index e15b354cc4ee..772973fa47ac 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateDynamicDataModelMapperTests.cs @@ -71,6 +71,7 @@ public sealed class WeaviateDynamicDataModelMapperTests new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?), 10) ] }, + defaultEmbeddingGenerator: null, s_jsonSerializerOptions); private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; @@ -121,7 +122,7 @@ public void MapFromDataToStorageModelMapsAllSupportedTypes() ; // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal(key, (Guid?)storageModel["id"]); @@ -187,7 +188,7 @@ public void MapFromDataToStorageModelMapsNullValues() var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, s_model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(dataModel); + var storageModel = sut.MapFromDataToStorageModel(dataModel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Null(storageModel["StringDataProp"]); @@ -348,7 +349,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); @@ -356,7 +357,7 @@ public void MapFromDataToStorageModelSkipsMissingProperties() var sut = new WeaviateDynamicDataModelMapper("Collection", HasNamedVectors, model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(record); + var storageModel = sut.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.Equal(key, (Guid?)storageModel["id"]); @@ -379,7 +380,7 @@ public void MapFromStorageToDataModelSkipsMissingProperties() ] }; - var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(HasNamedVectors).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); @@ -414,7 +415,7 @@ public void MapFromDataToStorageModelMapsNamedVectorsCorrectly(bool hasNamedVect ] }; - var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); @@ -422,7 +423,7 @@ public void MapFromDataToStorageModelMapsNamedVectorsCorrectly(bool hasNamedVect var sut = new WeaviateDynamicDataModelMapper("Collection", hasNamedVectors, model, s_jsonSerializerOptions); // Act - var storageModel = sut.MapFromDataToStorageModel(record); + var storageModel = sut.MapFromDataToStorageModel(record, recordIndex: 0, generatedEmbeddings: null); // Assert var vectorProperty = hasNamedVectors ? storageModel["vectors"]!["floatVector"] : storageModel["vector"]; @@ -446,7 +447,7 @@ public void MapFromStorageToDataModelMapsNamedVectorsCorrectly(bool hasNamedVect ] }; - var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, s_jsonSerializerOptions); + var model = new WeaviateModelBuilder(hasNamedVectors).Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator: null, s_jsonSerializerOptions); var key = new Guid("55555555-5555-5555-5555-555555555555"); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs index 468cc3462a05..5b2b6eefc582 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs @@ -52,7 +52,7 @@ public void AddWeaviateVectorStoreRecordCollectionRegistersClass() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = kernel.Services.GetRequiredService>(); + var vectorizedSearch = kernel.Services.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs index 8c6afcd3ea4e..b63071e9eafb 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs @@ -57,7 +57,7 @@ private void AssertVectorStoreRecordCollectionCreated() Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs index 20f0d560c8ef..6a02f3db8c91 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -30,7 +30,8 @@ public void ItThrowsExceptionWithInvalidIndexKind() new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { IndexKind = "non-existent-index-kind" } ] - }); + }, + defaultEmbeddingGenerator: null); // Act & Assert Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model)); @@ -53,7 +54,8 @@ public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string ex new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { IndexKind = indexKind } ] - }); + }, + defaultEmbeddingGenerator: null); // Act var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model); @@ -77,7 +79,8 @@ public void ItThrowsExceptionWithUnsupportedDistanceFunction() new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = "unsupported-distance-function" } ] - }); + }, + defaultEmbeddingGenerator: null); // Act & Assert Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model)); @@ -102,7 +105,8 @@ public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunct new VectorStoreRecordKeyProperty("Key", typeof(Guid)), new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) { DistanceFunction = distanceFunction } ] - }); + }, + defaultEmbeddingGenerator: null); // Act var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", HasNamedVectors, model); @@ -175,6 +179,7 @@ public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyTy new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory), 10) ] }, + defaultEmbeddingGenerator: null, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); // Act @@ -209,7 +214,8 @@ public void ItReturnsCorrectSchemaWithValidVectorConfiguration(bool hasNamedVect IndexKind = IndexKind.Hnsw } ] - }); + }, + defaultEmbeddingGenerator: null); // Act var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema(collectionName: "CollectionName", hasNamedVectors, model); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs index 8bbdf51404e3..3bb70feda9e1 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionQueryBuilderTests.cs @@ -45,7 +45,8 @@ public sealed class WeaviateVectorStoreRecordCollectionQueryBuilderTests new VectorStoreRecordDataProperty("Tags", typeof(string[])) { StoragePropertyName = "tags" }, new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) { StoragePropertyName = "descriptionEmbeddding" }, ] - }); + }, + defaultEmbeddingGenerator: null); private readonly ReadOnlyMemory _vector = new([31f, 32f, 33f, 34f]); diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs index 4dc94227b174..99ad9d7e4660 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -376,10 +376,10 @@ public async Task ItUsesHttpClientParametersAsync(bool initializeOptions, string [Theory] [InlineData(true)] [InlineData(false)] - public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) + public async Task SearchEmbeddingReturnsValidRecordAsync(bool includeVectors) { // Arrange - const string CollectionName = "VectorizedSearchCollection"; + const string CollectionName = "SearchEmbeddingCollection"; var id = new Guid("55555555-5555-5555-5555-555555555555"); var vector = new ReadOnlyMemory([30f, 31f, 32f, 33f]); @@ -423,7 +423,7 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); // Act - var results = await sut.VectorizedSearchAsync(vector, top: 3, new() + var results = await sut.SearchEmbeddingAsync(vector, top: 3, new() { IncludeVectors = includeVectors }).ToListAsync(); @@ -457,25 +457,25 @@ public async Task VectorizedSearchReturnsValidRecordAsync(bool includeVectors) } [Fact] - public async Task VectorizedSearchWithUnsupportedVectorTypeThrowsExceptionAsync() + public async Task SearchEmbeddingWithUnsupportedVectorTypeThrowsExceptionAsync() { // Arrange var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act & Assert await Assert.ThrowsAsync(async () => - await sut.VectorizedSearchAsync(new List([1, 2, 3]), top: 3).ToListAsync()); + await sut.SearchEmbeddingAsync(new List([1, 2, 3]), top: 3).ToListAsync()); } [Fact] - public async Task VectorizedSearchWithNonExistentVectorPropertyNameThrowsExceptionAsync() + public async Task SearchEmbeddingWithNonExistentVectorPropertyNameThrowsExceptionAsync() { // Arrange var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); // Act & Assert await Assert.ThrowsAsync(async () => - await sut.VectorizedSearchAsync( + await sut.SearchEmbeddingAsync( new ReadOnlyMemory([1f, 2f, 3f]), top: 3, new() { VectorProperty = r => "non-existent-property" }) diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs index 8524c03ae574..f00d62b54255 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -45,7 +45,7 @@ public void MapFromDataToStorageModelReturnsValidObject(bool hasNamedVectors) var sut = GetMapper(hasNamedVectors); // Act - var document = sut.MapFromDataToStorageModel(hotel); + var document = sut.MapFromDataToStorageModel(hotel, recordIndex: 0, generatedEmbeddings: null); // Assert Assert.NotNull(document); @@ -118,6 +118,7 @@ public void MapFromStorageToDataModelReturnsValidObject(bool hasNamedVectors) new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory), 10) ] }, + defaultEmbeddingGenerator: null, s_jsonSerializerOptions), s_jsonSerializerOptions); diff --git a/dotnet/src/Connectors/Directory.Build.props b/dotnet/src/Connectors/Directory.Build.props index dd75c63ffd61..29561a81e526 100644 --- a/dotnet/src/Connectors/Directory.Build.props +++ b/dotnet/src/Connectors/Directory.Build.props @@ -4,6 +4,7 @@ $(NoWarn);MEVD9000,MEVD9001 + $(NoWarn);CA1863 \ No newline at end of file diff --git a/dotnet/src/Connectors/VectorData.Abstractions/.editorconfig b/dotnet/src/Connectors/VectorData.Abstractions/.editorconfig new file mode 100644 index 000000000000..acb2cb62caf4 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/.editorconfig @@ -0,0 +1,3 @@ +# Suppress missing documentation warnings for generated code (strings) +[*.Designer.cs] +dotnet_diagnostic.CS1591.severity = none diff --git a/dotnet/src/Connectors/VectorData.Abstractions/AssemblyInfo.cs b/dotnet/src/Connectors/VectorData.Abstractions/AssemblyInfo.cs deleted file mode 100644 index cbb67c1c8afd..000000000000 --- a/dotnet/src/Connectors/VectorData.Abstractions/AssemblyInfo.cs +++ /dev/null @@ -1 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs index 5f283aad8e76..e5d5f3881d99 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordJsonModelBuilder.cs @@ -5,6 +5,7 @@ using System.Reflection; using System.Text.Json; using System.Text.Json.Serialization; +using Microsoft.Extensions.AI; namespace Microsoft.Extensions.VectorData.ConnectorSupport; @@ -32,14 +33,18 @@ public VectorStoreRecordJsonModelBuilder(VectorStoreRecordModelBuildingOptions o /// /// Builds and returns an from the given and . /// - public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition, JsonSerializerOptions? jsonSerializerOptions) + public virtual VectorStoreRecordModel Build( + Type type, + VectorStoreRecordDefinition? vectorStoreRecordDefinition, + IEmbeddingGenerator? defaultEmbeddingGenerator, + JsonSerializerOptions? jsonSerializerOptions) { if (jsonSerializerOptions is not null) { this._jsonSerializerOptions = jsonSerializerOptions; } - return this.Build(type, vectorStoreRecordDefinition); + return this.Build(type, vectorStoreRecordDefinition, defaultEmbeddingGenerator); } /// diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs index d2b53b7f46a2..9df86e0663d6 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModel.cs @@ -98,25 +98,22 @@ public TRecord CreateRecord() /// /// The search options. /// Thrown if the provided property name is not a valid vector property name. - public VectorStoreRecordVectorPropertyModel GetVectorPropertyOrSingle(VectorSearchOptions? searchOptions) + public VectorStoreRecordVectorPropertyModel GetVectorPropertyOrSingle(VectorSearchOptions searchOptions) { - if (searchOptions is not null) - { #pragma warning disable CS0618 // Type or member is obsolete - string? vectorPropertyName = searchOptions.VectorPropertyName; + string? vectorPropertyName = searchOptions.VectorPropertyName; #pragma warning restore CS0618 // Type or member is obsolete - // If vector property name is provided, try to find it in schema or throw an exception. - if (!string.IsNullOrWhiteSpace(vectorPropertyName)) - { - // Check vector properties by data model property name. - return this.VectorProperties.FirstOrDefault(p => p.ModelName == vectorPropertyName) - ?? throw new InvalidOperationException($"The {this._recordType.FullName} type does not have a vector property named '{vectorPropertyName}'."); - } - else if (searchOptions.VectorProperty is Expression> expression) - { - return this.GetMatchingProperty(expression, data: false); - } + // If vector property name is provided, try to find it in schema or throw an exception. + if (!string.IsNullOrWhiteSpace(vectorPropertyName)) + { + // Check vector properties by data model property name. + return this.VectorProperties.FirstOrDefault(p => p.ModelName == vectorPropertyName) + ?? throw new InvalidOperationException($"The {this._recordType.FullName} type does not have a vector property named '{vectorPropertyName}'."); + } + else if (searchOptions.VectorProperty is Expression> expression) + { + return this.GetMatchingProperty(expression, data: false); } // If vector property name is not provided, check if there is a single vector property, or throw if there are no vectors or more than one. diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index 6eae33cc6ddc..bcb0bc2d38fb 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -7,6 +7,8 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Reflection; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData.Properties; namespace Microsoft.Extensions.VectorData.ConnectorSupport; @@ -48,6 +50,11 @@ public class VectorStoreRecordModelBuilder /// protected Dictionary PropertyMap { get; } = new(); + /// + /// The default embedding generator to use for vector properties, when none is specified at the property or collection level. + /// + protected IEmbeddingGenerator? DefaultEmbeddingGenerator { get; private set; } + /// /// Constructs a new . /// @@ -66,8 +73,10 @@ public VectorStoreRecordModelBuilder(VectorStoreRecordModelBuildingOptions optio /// [RequiresDynamicCode("Currently not compatible with NativeAOT code")] [RequiresUnreferencedCode("Currently not compatible with trimming")] // TODO - public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + public virtual VectorStoreRecordModel Build(Type type, VectorStoreRecordDefinition? vectorStoreRecordDefinition, IEmbeddingGenerator? defaultEmbeddingGenerator) { + this.DefaultEmbeddingGenerator = defaultEmbeddingGenerator; + var dynamicMapping = type == typeof(Dictionary); if (!dynamicMapping) @@ -144,12 +153,34 @@ protected virtual void ProcessTypeProperties(Type type, VectorStoreRecordDefinit throw new InvalidOperationException($"Only one of {nameof(VectorStoreRecordKeyAttribute)}, {nameof(VectorStoreRecordDataAttribute)} and {nameof(VectorStoreRecordVectorAttribute)} can be applied to a property."); } - var vectorProperty = new VectorStoreRecordVectorPropertyModel(clrProperty.Name, clrProperty.PropertyType) + // If a record definition exists for the property, we must instantiate it via that definition, as the user may be using + // a generic VectorStoreRecordVectorProperty for a custom input type. + var vectorProperty = vectorStoreRecordDefinition?.Properties.FirstOrDefault(p => p.DataModelPropertyName == clrProperty.Name) is VectorStoreRecordVectorProperty definitionVectorProperty + ? definitionVectorProperty.CreatePropertyModel() + : new VectorStoreRecordVectorPropertyModel(clrProperty.Name, clrProperty.PropertyType); + + vectorProperty.Dimensions = vectorAttribute.Dimensions; + vectorProperty.IndexKind = vectorAttribute.IndexKind; + vectorProperty.DistanceFunction = vectorAttribute.DistanceFunction; + + // If a default embedding generator is defined and the property type isn't an Embedding, we set up that embedding generator on the property. + // At this point we don't know the embedding type (it might get specified in the record definition, that's processed later). So we infer + // + // This also means that the property type is the input type (e.g. string, DataContent) rather than the embedding type. + // Since we need the property type to be the embedding type, we infer that from the generator. This allows users + // to just stick an IEmbeddingGenerator in DI, define a string property as their vector property, and as long as the embedding generator + // is compatible (supports string and ROM, assuming that's what the connector requires), everything just works. + // Note that inferring the embedding type from the IEmbeddingGenerator isn't trivial, involving both connector logic (around which embedding + // types are supported/preferred), as well as the vector property type (which knows about supported input types). + + if (this.DefaultEmbeddingGenerator is null || this.Options.SupportedVectorPropertyTypes.Contains(clrProperty.PropertyType)) { - Dimensions = vectorAttribute.Dimensions, - IndexKind = vectorAttribute.IndexKind, - DistanceFunction = vectorAttribute.DistanceFunction - }; + vectorProperty.EmbeddingType = clrProperty.PropertyType; + } + else + { + this.SetupEmbeddingGeneration(vectorProperty, this.DefaultEmbeddingGenerator, embeddingType: null); + } this.VectorProperties.Add(vectorProperty); storageName = vectorAttribute.StoragePropertyName; @@ -198,7 +229,7 @@ protected virtual void ProcessRecordDefinition( property = dataProperty; break; case VectorStoreRecordVectorProperty definitionVectorProperty: - var vectorProperty = new VectorStoreRecordVectorPropertyModel(definitionVectorProperty.DataModelPropertyName, definitionVectorProperty.PropertyType); + var vectorProperty = definitionVectorProperty.CreatePropertyModel(); this.VectorProperties.Add(vectorProperty); this.PropertyMap.Add(definitionVectorProperty.DataModelPropertyName, vectorProperty); property = vectorProperty; @@ -215,11 +246,12 @@ protected virtual void ProcessRecordDefinition( if (property.PropertyInfo.PropertyType != property.Type) { - throw new InvalidOperationException($"Property '{property.ModelName}' has a different CLR type in the record definition and on the CLR type."); + throw new InvalidOperationException($"Property '{property.ModelName}' has a different CLR type in the record definition ('{property.Type.Name}') and on the .NET property ('{property.PropertyInfo.PropertyType}')."); } } } + property.Type = definitionProperty.PropertyType; this.SetPropertyStorageName(property, definitionProperty.StoragePropertyName); switch (definitionProperty) @@ -264,6 +296,46 @@ protected virtual void ProcessRecordDefinition( vectorProperty.DistanceFunction = definitionVectorProperty.DistanceFunction; } + if (definitionVectorProperty.EmbeddingType is not null) + { + vectorProperty.EmbeddingType = definitionVectorProperty.EmbeddingType; + } + + // Check if embedding generation is configured, either on the property directly or via a default + IEmbeddingGenerator? embeddingGenerator = null; + + // Check if an embedding generator is defined specifically on the property. + if (definitionVectorProperty.EmbeddingGenerator is not null) + { + // If we have a property CLR type (POCO, not dynamic mapping) and it's an embedding type, throw as that's incompatible. + if (this.Options.SupportedVectorPropertyTypes.Contains(property.Type)) + { + throw new InvalidOperationException( + string.Format( + VectorDataStrings.EmbeddingPropertyTypeIncompatibleWithEmbeddingGenerator, + property.ModelName, + property.Type.Name)); + } + + embeddingGenerator = definitionVectorProperty.EmbeddingGenerator; + } + // If a default embedding generator is defined (at the collection or store level), configure that on the property, but only if the property type is not an embedding type. + // If the property type is an embedding type, just ignore the default embedding generator. + else if ((vectorStoreRecordDefinition.EmbeddingGenerator ?? this.DefaultEmbeddingGenerator) is IEmbeddingGenerator defaultEmbeddingGenerator + && !this.Options.SupportedVectorPropertyTypes.Contains(property.Type)) + { + embeddingGenerator = vectorStoreRecordDefinition.EmbeddingGenerator ?? this.DefaultEmbeddingGenerator; + } + + if (embeddingGenerator is null) + { + // No embedding generation - the embedding type and the property (model) type are the same. + vectorProperty.EmbeddingType = property.Type; + } + else + { + this.SetupEmbeddingGeneration(vectorProperty, embeddingGenerator, vectorProperty.EmbeddingType); + } break; default: @@ -300,6 +372,26 @@ private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, str : storageName; } + /// + /// Attempts to setup embedding generation on the given vector property, with the given embedding generator and user-configured embedding type. + /// Can be overridden by connectors to provide support for other embedding types. + /// + protected virtual void SetupEmbeddingGeneration( + VectorStoreRecordVectorPropertyModel vectorProperty, + IEmbeddingGenerator embeddingGenerator, + Type? embeddingType) + { + if (!vectorProperty.TrySetupEmbeddingGeneration, ReadOnlyMemory>(embeddingGenerator, embeddingType)) + { + throw new InvalidOperationException( + string.Format( + VectorDataStrings.IncompatibleEmbeddingGenerator, + embeddingGenerator.GetType().Name, + string.Join(", ", vectorProperty.GetSupportedInputTypes().Select(t => t.Name)), + "ReadOnlyMemory")); + } + } + /// /// Extension hook for connectors to be able to customize the model. /// @@ -359,6 +451,8 @@ protected virtual void ValidateProperty(VectorStoreRecordPropertyModel propertyM { var type = propertyModel.Type; + Debug.Assert(propertyModel.Type is not null); + if (type.IsGenericType && Nullable.GetUnderlyingType(type) is Type underlyingType) { type = underlyingType; @@ -381,9 +475,14 @@ protected virtual void ValidateProperty(VectorStoreRecordPropertyModel propertyM break; case VectorStoreRecordVectorPropertyModel vectorProperty: - if (this.Options.SupportedVectorPropertyTypes is not null) + Debug.Assert(vectorProperty.EmbeddingGenerator is null ^ vectorProperty.Type != vectorProperty.EmbeddingType); + + if (!this.Options.SupportedVectorPropertyTypes.Contains(vectorProperty.EmbeddingType)) { - ValidatePropertyType(propertyModel.ModelName, type, "Vector", this.Options.SupportedVectorPropertyTypes); + throw new InvalidOperationException( + vectorProperty.EmbeddingGenerator is null + ? string.Format(VectorDataStrings.NonEmbeddingVectorPropertyWithoutEmbeddingGenerator, vectorProperty.ModelName, vectorProperty.EmbeddingType.Name) + : string.Format(VectorDataStrings.EmbeddingGeneratorWithInvalidEmbeddingType, vectorProperty.ModelName, vectorProperty.EmbeddingType.Name)); } if (vectorProperty.Dimensions <= 0) diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs index 1e5fd8b230ca..958d24241537 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuildingOptions.cs @@ -46,7 +46,7 @@ public sealed class VectorStoreRecordModelBuildingOptions /// /// The set of types that are supported as vector properties. /// - public required HashSet? SupportedVectorPropertyTypes { get; init; } + public required HashSet SupportedVectorPropertyTypes { get; init; } /// /// Indicates that an external serializer will be used (e.g. System.Text.Json). diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs index 68160987b3ec..09a557664753 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel.cs @@ -1,7 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; +using System.Diagnostics; using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; namespace Microsoft.Extensions.VectorData.ConnectorSupport; @@ -54,6 +60,147 @@ public int Dimensions /// public string? DistanceFunction { get; set; } + /// + /// If is set, contains the type representing the embedding stored in the database. + /// Otherwise, this property is identical to . + /// + public Type EmbeddingType { get; set; } = null!; + + /// + /// The embedding generator to use for this property. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; set; } + + /// + /// Checks whether the configured on this property supports the given embedding type. + /// The implementation on this non-generic checks for + /// and as input types for . + /// + public virtual bool TrySetupEmbeddingGeneration(IEmbeddingGenerator embeddingGenerator, Type? embeddingType) + where TEmbedding : Embedding + { + // On the TInput side, this out-of-the-box/simple implementation supports string and DataContent only + // (users who want arbitrary TInput types need to use the generic subclass of this type). + // The TEmbedding side is provided by the connector via the generic type parameter to this method, as the connector controls/knows which embedding types are supported. + // Note that if the user has manually specified an embedding type (e.g. to choose Embedding rather than the default Embedding), that's provided via the embeddingType argument; + // we use that as a filter below. + switch (embeddingGenerator) + { + case IEmbeddingGenerator when this.Type == typeof(string) && (embeddingType is null || embeddingType == typeof(TUnwrappedEmbedding)): + case IEmbeddingGenerator when this.Type == typeof(DataContent) && (embeddingType is null || embeddingType == typeof(TUnwrappedEmbedding)): + this.EmbeddingGenerator = embeddingGenerator; + this.EmbeddingType = embeddingType ?? typeof(TUnwrappedEmbedding); + + return true; + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + default: + return false; + } + } + + /// + /// Attempts to generate an embedding of type from the vector property represented by this instance on the given , using + /// the configured . + /// + /// + /// + /// If supports the given , returns and sets to a + /// representing the embedding generation operation. If does not support the given , returns . + /// + /// + /// The implementation on this non-generic checks for + /// and as input types for . + /// + /// + public virtual bool TryGenerateEmbedding(TRecord record, CancellationToken cancellationToken, [NotNullWhen(true)] out Task? task) + where TRecord : notnull + where TEmbedding : Embedding + { + switch (this.EmbeddingGenerator) + { + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + { + task = generator.GenerateEmbeddingAsync( + this.GetValueAsObject(record) is var value && value is string s + ? s + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a string, but {value?.GetType().Name ?? "null"} was provided."), + new() { Dimensions = this.Dimensions }, + cancellationToken); + return true; + } + + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + { + task = generator.GenerateEmbeddingAsync( + this.GetValueAsObject(record) is var value && value is DataContent c + ? c + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a {nameof(DataContent)}, but {value?.GetType().Name ?? "null"} was provided."), + new() { Dimensions = this.Dimensions }, + cancellationToken); + return true; + } + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + + default: + task = null; + return false; + } + } + + /// + /// Attempts to generate embeddings of type from the vector property represented by this instance on the given , using + /// the configured . + /// + /// + /// + /// If supports the given , returns and sets to a + /// representing the embedding generation operation. If does not support the given , returns . + /// + /// + /// The implementation on this non-generic checks for + /// and as input types for . + /// + /// + public virtual bool TryGenerateEmbeddings(IEnumerable records, CancellationToken cancellationToken, [NotNullWhen(true)] out Task>? task) + where TRecord : notnull + where TEmbedding : Embedding + { + switch (this.EmbeddingGenerator) + { + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + task = generator.GenerateAsync( + records.Select(r => this.GetValueAsObject(r) is var value && value is string s + ? s + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a string, but {value?.GetType().Name ?? "null"} was provided.")), + new() { Dimensions = this.Dimensions }, cancellationToken); + return true; + + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + task = generator.GenerateAsync( + records.Select(r => this.GetValueAsObject(r) is var value && value is DataContent c + ? c + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a {nameof(DataContent)}, but {value?.GetType().Name ?? "null"} was provided.")), + new() { Dimensions = this.Dimensions }, cancellationToken); + return true; + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + + default: + task = null; + return false; + } + } + + /// + /// Returns the types of input that this property model supports. + /// + public virtual Type[] GetSupportedInputTypes() => [typeof(string), typeof(DataContent)]; + /// public override string ToString() => $"{this.ModelName} (Vector, {this.Type.Name})"; diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel{TInput}.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel{TInput}.cs new file mode 100644 index 000000000000..fdcb56c43e14 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordVectorPropertyModel{TInput}.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; + +namespace Microsoft.Extensions.VectorData.ConnectorSupport; + +/// +[Experimental("MEVD9001")] +public sealed class VectorStoreRecordVectorPropertyModel(string modelName) : VectorStoreRecordVectorPropertyModel(modelName, typeof(TInput)) +{ + /// + public override bool TrySetupEmbeddingGeneration(IEmbeddingGenerator embeddingGenerator, Type? embeddingType) + { + switch (embeddingGenerator) + { + case IEmbeddingGenerator when this.Type == typeof(TInput) && (embeddingType is null || embeddingType == typeof(TUnwrappedEmbedding)): + this.EmbeddingGenerator = embeddingGenerator; + this.EmbeddingType = embeddingType ?? typeof(TUnwrappedEmbedding); + + return true; + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + default: + return false; + } + } + + /// + public override bool TryGenerateEmbedding(TRecord record, CancellationToken cancellationToken, [NotNullWhen(true)] out Task? task) + { + switch (this.EmbeddingGenerator) + { + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + task = generator.GenerateEmbeddingAsync( + this.GetValueAsObject(record) is var value && value is TInput s + ? s + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a {nameof(TInput)}, but {value?.GetType().Name ?? "null"} was provided."), + new() { Dimensions = this.Dimensions }, + cancellationToken); + return true; + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + + default: + task = null; + return false; + } + } + + /// + public override bool TryGenerateEmbeddings(IEnumerable records, CancellationToken cancellationToken, [NotNullWhen(true)] out Task>? task) + { + switch (this.EmbeddingGenerator) + { + case IEmbeddingGenerator generator when this.EmbeddingType == typeof(TUnwrappedEmbedding): + task = generator.GenerateAsync( + records.Select(r => this.GetValueAsObject(r) is var value && value is TInput s + ? s + : throw new InvalidOperationException($"Property '{this.ModelName}' was configured with an embedding generator accepting a string, but {value?.GetType().Name ?? "null"} was provided.")), + new() { Dimensions = this.Dimensions }, cancellationToken); + return true; + + case null: + throw new UnreachableException("This method should only be called when an embedding generator is configured."); + + default: + task = null; + return false; + } + } + + /// + public override Type[] GetSupportedInputTypes() => [typeof(TInput)]; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/Properties/AssemblyInfo.cs b/dotnet/src/Connectors/VectorData.Abstractions/Properties/AssemblyInfo.cs new file mode 100644 index 000000000000..09647faa37af --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft. All rights reserved. + +[assembly: System.Resources.NeutralResourcesLanguage("en-US")] diff --git a/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.Designer.cs b/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.Designer.cs new file mode 100644 index 000000000000..74eb4f806842 --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.Designer.cs @@ -0,0 +1,121 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace Microsoft.Extensions.VectorData.Properties +{ + using System; + + + [System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] + [System.Diagnostics.DebuggerNonUserCodeAttribute()] + [System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + public class VectorDataStrings + { + + private static System.Resources.ResourceManager resourceMan; + + private static System.Globalization.CultureInfo resourceCulture; + + [System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] + internal VectorDataStrings() + { + } + + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Advanced)] + public static System.Resources.ResourceManager ResourceManager + { + get + { + if (object.Equals(null, resourceMan)) + { + System.Resources.ResourceManager temp = new System.Resources.ResourceManager("Microsoft.Extensions.VectorData.Properties.VectorDataStrings", typeof(VectorDataStrings).Assembly); + resourceMan = temp; + } + return resourceMan; + } + } + + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Advanced)] + public static System.Globalization.CultureInfo Culture + { + get + { + return resourceCulture; + } + set + { + resourceCulture = value; + } + } + + public static string IncompatibleEmbeddingGenerator + { + get + { + return ResourceManager.GetString("IncompatibleEmbeddingGenerator", resourceCulture); + } + } + + public static string IncompatibleEmbeddingGeneratorWasConfiguredForInputType + { + get + { + return ResourceManager.GetString("IncompatibleEmbeddingGeneratorWasConfiguredForInputType", resourceCulture); + } + } + + public static string NoEmbeddingGeneratorWasConfiguredForSearch + { + get + { + return ResourceManager.GetString("NoEmbeddingGeneratorWasConfiguredForSearch", resourceCulture); + } + } + + public static string NonEmbeddingVectorPropertyWithoutEmbeddingGenerator + { + get + { + return ResourceManager.GetString("NonEmbeddingVectorPropertyWithoutEmbeddingGenerator", resourceCulture); + } + } + + public static string EmbeddingTypePassedToSearchAsync + { + get + { + return ResourceManager.GetString("EmbeddingTypePassedToSearchAsync", resourceCulture); + } + } + + public static string EmbeddingPropertyTypeIncompatibleWithEmbeddingGenerator + { + get + { + return ResourceManager.GetString("EmbeddingPropertyTypeIncompatibleWithEmbeddingGenerator", resourceCulture); + } + } + + public static string IncludeVectorsNotSupportedWithEmbeddingGeneration + { + get + { + return ResourceManager.GetString("IncludeVectorsNotSupportedWithEmbeddingGeneration", resourceCulture); + } + } + + public static string EmbeddingGeneratorWithInvalidEmbeddingType + { + get + { + return ResourceManager.GetString("EmbeddingGeneratorWithInvalidEmbeddingType", resourceCulture); + } + } + } +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.resx b/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.resx new file mode 100644 index 000000000000..531eb0159b9e --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/Properties/VectorDataStrings.resx @@ -0,0 +1,45 @@ + + + + + + + + + + text/microsoft-resx + + + 1.3 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + Embedding generator '{0}' is incompatible with the required input and output types. The property input type must be '{1}', and the output type must be '{2}'. + + + An input of type '{0}' was provided, but an incompatible embedding generator of type '{1}' was configured. + + + 'SearchAsync' requires an embedding generator to be configured. To pass an embedding directly, use 'SearchEmbeddingAsync', otherwise configure an embedding generator with your vector store connector. + + + Property '{0}' has non-Embedding type '{1}', but no embedding generator is configured. + + + 'SearchAsync' performs embedding generation, and does not accept Embedding types directly. To search for an Embedding directly, use 'SearchEmbeddingAsync'. + + + Property '{0}' has embedding type '{1}', but an embedding generator is configured on the property. Remove the embedding generator or change the property's .NET type to a non-embedding input type to the generator (e.g. string). + + + When an embedding generator is configured, `Include Vectors` cannot be enabled. + + + An embedding generator was configured on property '{0}', but output embedding type '{1}' isn't supported by the connector. + + diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs index d33d0fd4a145..7b43508d814c 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordDefinition.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using Microsoft.Extensions.AI; namespace Microsoft.Extensions.VectorData; @@ -19,4 +20,9 @@ public sealed class VectorStoreRecordDefinition /// Gets or sets the list of properties that are stored in the record. /// public IReadOnlyList Properties { get; init; } = s_emptyFields; + + /// + /// Gets or sets the default embedding generator for vector properties in this collection. + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs index bc8a18034966..d953eb489ee1 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData.ConnectorSupport; namespace Microsoft.Extensions.VectorData; @@ -10,7 +12,7 @@ namespace Microsoft.Extensions.VectorData; /// /// The characteristics defined here influence how the property is treated by the vector store. /// -public sealed class VectorStoreRecordVectorProperty : VectorStoreRecordProperty +public class VectorStoreRecordVectorProperty : VectorStoreRecordProperty { private int _dimensions; @@ -47,8 +49,19 @@ public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source) this.Dimensions = source.Dimensions; this.IndexKind = source.IndexKind; this.DistanceFunction = source.DistanceFunction; + this.EmbeddingGenerator = source.EmbeddingGenerator; + this.EmbeddingType = source.EmbeddingType; } + /// + /// Gets or sets the default embedding generator to use for this property. + /// + /// + /// If not set, embedding generation will be performed in the database, if supported by your connector. + /// If not supported, only pre-generated embeddings can be used, e.g. via . + /// + public IEmbeddingGenerator? EmbeddingGenerator { get; init; } + /// /// Gets or sets the number of dimensions that the vector has. /// @@ -88,4 +101,19 @@ public int Dimensions /// /// public string? DistanceFunction { get; init; } + + /// + /// Gets or sets the desired embedding type (e.g. Embedding<Half>, for cases where the default (typically Embedding<float>) isn't suitable. + /// + public Type? EmbeddingType { get; init; } + + internal virtual VectorStoreRecordVectorPropertyModel CreatePropertyModel() + => new(this.DataModelPropertyName, this.PropertyType) + { + Dimensions = this.Dimensions, + IndexKind = this.IndexKind, + DistanceFunction = this.DistanceFunction, + EmbeddingGenerator = this.EmbeddingGenerator, + EmbeddingType = this.EmbeddingType! + }; } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty{TInput}.cs b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty{TInput}.cs new file mode 100644 index 000000000000..ab20a7cee39b --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/RecordDefinition/VectorStoreRecordVectorProperty{TInput}.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.Extensions.VectorData; + +/// +/// Defines a vector property on a vector store record. +/// +/// +/// +/// The characteristics defined here influence how the property is treated by the vector store. +/// +/// +/// This generic version of only needs to be used when an is +/// configured on the property, and a custom .NET type is used as input (any type other than or ). +/// +/// +public class VectorStoreRecordVectorProperty : VectorStoreRecordVectorProperty +{ + /// + public VectorStoreRecordVectorProperty(string propertyName, int dimensions) + : base(propertyName, typeof(TInput), dimensions) + { + } + + /// + public VectorStoreRecordVectorProperty(VectorStoreRecordVectorProperty source) + : base(source) + { + } + + internal override VectorStoreRecordVectorPropertyModel CreatePropertyModel() + => new VectorStoreRecordVectorPropertyModel(this.DataModelPropertyName) + { + Dimensions = this.Dimensions, + IndexKind = this.IndexKind, + DistanceFunction = this.DistanceFunction, + EmbeddingGenerator = this.EmbeddingGenerator + }; +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj index d6014cedc379..5fd1d4d4c24f 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorData.Abstractions.csproj @@ -31,6 +31,10 @@ Microsoft.Extensions.VectorData.IVectorStoreRecordCollection<TKey, TRecord> https://dot.net/ + + + + @@ -58,4 +62,20 @@ Microsoft.Extensions.VectorData.IVectorStoreRecordCollection<TKey, TRecord> + + + + PublicResXFileCodeGenerator + VectorDataStrings.Designer.cs + + + + + + True + True + VectorDataStrings.resx + $(NoWarn);1591 + + diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorSearch.cs new file mode 100644 index 000000000000..a1385d9bab8c --- /dev/null +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorSearch.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Microsoft.Extensions.AI; + +namespace Microsoft.Extensions.VectorData; + +/// +/// An interface for performing vector searches on a vector store. +/// +/// The record data model to use for retrieving data from the store. +public interface IVectorSearch +{ + /// + /// Searches the vector store for records that are similar to given value. + /// + /// + /// When using this method, is converted to an embedding internally; depending on your database, you may need to configure an embedding generator. + /// + /// The type of the input value on which to perform the similarity search. + /// The value on which to perform the similarity search. + /// The maximum number of results to return. + /// The options that control the behavior of the search. + /// The to monitor for cancellation requests. The default is . + /// The records found by the vector search, including their result scores. + IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + CancellationToken cancellationToken = default) + where TInput : notnull; + + /// + /// Searches the vector store for records that are similar to given embedding. + /// + /// + /// This is a low-level method that requires embedding generation to be handled manually. + /// Consider configuring an and using to have embeddings generated automatically. + /// + /// The type of the vector. + /// The vector to search the store with. + /// The maximum number of results to return. + /// The options that control the behavior of the search. + /// The to monitor for cancellation requests. The default is . + /// The records found by the vector search, including their result scores. + // TODO: We may also want to consider allowing the user to pass Embedding, rather than just ReadOnlyMemory (#11701). + // TODO: However, if they have an Embedding, they likely got it from an IEmbeddingGenerator, at which point why not wire that up into MEVD and use SearchAsync? + // TODO: So this raw embedding API is likely more for users who already have a ReadOnlyMemory at hand and we don't want to force them to wrap it with Embedding. + IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = default, + CancellationToken cancellationToken = default) + where TVector : notnull; + + /// Asks the for an object of the specified type . + /// The type of object being requested. + /// An optional key that can be used to help identify the target service. + /// The found object, otherwise . + /// is . + /// + /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , + /// including itself or any services it might be wrapping. For example, to access the for the instance, + /// may be used to request it. + /// + [Experimental("MEVD9000")] + object? GetService(Type serviceType, object? serviceKey = null); +} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs index 29b49f86ee21..02ff6e3b3afc 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizableTextSearch.cs @@ -11,6 +11,7 @@ namespace Microsoft.Extensions.VectorData; /// Contains a method for doing a vector search using text that will be vectorized downstream. /// /// The record data model to use for retrieving data from the store. +[Obsolete("Use IVectorStoreRecordCollection.SearchAsync instead")] public interface IVectorizableTextSearch { /// diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs index 7958679b43d3..1fef2039e4d2 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/IVectorizedSearch.cs @@ -2,7 +2,6 @@ using System; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Threading; namespace Microsoft.Extensions.VectorData; @@ -11,6 +10,7 @@ namespace Microsoft.Extensions.VectorData; /// Contains a method for doing a vector search using a vector. /// /// The record data model to use for retrieving data from the store. +[Obsolete("This interface is obsolete, use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call")] public interface IVectorizedSearch { /// @@ -22,22 +22,11 @@ public interface IVectorizedSearch /// The options that control the behavior of the search. /// The to monitor for cancellation requests. The default is . /// The records found by the vector search, including their result scores. + [Obsolete("Use either SearchEmbeddingAsync to search directly on embeddings, or SearchAsync to handle embedding generation internally as part of the call.")] IAsyncEnumerable> VectorizedSearchAsync( TVector vector, int top, VectorSearchOptions? options = default, - CancellationToken cancellationToken = default); - - /// Asks the for an object of the specified type . - /// The type of object being requested. - /// An optional key that can be used to help identify the target service. - /// The found object, otherwise . - /// is . - /// - /// The purpose of this method is to allow for the retrieval of strongly-typed services that might be provided by the , - /// including itself or any services it might be wrapping. For example, to access the for the instance, - /// may be used to request it. - /// - [Experimental("MEVD9000")] - object? GetService(Type serviceType, object? serviceKey = null); + CancellationToken cancellationToken = default) + where TVector : notnull; } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchExtensions.cs similarity index 70% rename from dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs rename to dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchExtensions.cs index 07b40aa39b21..13f8e9960d0d 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizedSearchExtensions.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorSearchExtensions.cs @@ -7,27 +7,27 @@ namespace Microsoft.Extensions.VectorData; /// Provides a collection of static methods for extending instances. [Experimental("MEVD9000")] -public static class VectorizedSearchExtensions +public static class VectorSearchExtensions { /// - /// Asks the for an object of the specified type + /// Asks the for an object of the specified type /// and throw an exception if one isn't available. /// /// The record data model to use for retrieving data from the store. - /// The vectorized search. + /// The vector search. /// The type of object being requested. /// An optional key that can be used to help identify the target service. /// The found object. - /// is . + /// is . /// is . /// No service of the requested type for the specified key is available. - public static object GetRequiredService(this IVectorizedSearch vectorizedSearch, Type serviceType, object? serviceKey = null) + public static object GetRequiredService(this IVectorSearch vectorSearch, Type serviceType, object? serviceKey = null) { - if (vectorizedSearch is null) { throw new ArgumentNullException(nameof(vectorizedSearch)); } + if (vectorSearch is null) { throw new ArgumentNullException(nameof(vectorSearch)); } if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } return - vectorizedSearch.GetService(serviceType, serviceKey) ?? + vectorSearch.GetService(serviceType, serviceKey) ?? throw Throw.CreateMissingServiceException(serviceType, serviceKey); } } diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs deleted file mode 100644 index 442155bdcccc..000000000000 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorSearch/VectorizableTextSearchExtensions.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.Extensions.VectorData; - -/// Provides a collection of static methods for extending instances. -[Experimental("MEVD9000")] -public static class VectorizableTextSearchExtensions -{ - /// - /// Asks the for an object of the specified type - /// and throw an exception if one isn't available. - /// - /// The record data model to use for retrieving data from the store. - /// The vectorizable text search. - /// The type of object being requested. - /// An optional key that can be used to help identify the target service. - /// The found object. - /// is . - /// is . - /// No service of the requested type for the specified key is available. - public static object GetRequiredService(this IVectorizableTextSearch vectorizableTextSearch, Type serviceType, object? serviceKey = null) - { - if (vectorizableTextSearch is null) { throw new ArgumentNullException(nameof(vectorizableTextSearch)); } - if (serviceType is null) { throw new ArgumentNullException(nameof(serviceType)); } - - return - vectorizableTextSearch.GetService(serviceType, serviceKey) ?? - throw Throw.CreateMissingServiceException(serviceType, serviceKey); - } -} diff --git a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs index 84acae5bb6d2..91a62b496c20 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/VectorStorage/IVectorStoreRecordCollection.cs @@ -17,7 +17,9 @@ namespace Microsoft.Extensions.VectorData; /// Unless otherwise documented, implementations of this interface can be expected to be thread-safe, and can be used concurrently from multiple threads. /// #pragma warning disable CA1711 // Identifiers should not have incorrect suffix (Collection) -public interface IVectorStoreRecordCollection : IVectorizedSearch +#pragma warning disable CS0618 // IVectorizedSearch is obsolete +public interface IVectorStoreRecordCollection : IVectorSearch, IVectorizedSearch +#pragma warning restore CS0618 // IVectorizedSearch is obsolete #pragma warning restore CA1711 where TKey : notnull where TRecord : notnull diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj new file mode 100644 index 000000000000..4dae2d3e5f1f --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorData.UnitTests.csproj @@ -0,0 +1,45 @@ + + + + VectorData.UnitTests + VectorData.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);VSTHRD111,CA2007,CS8618 + $(NoWarn);MEVD9001 + + + $(NoWarn);CA1515 + $(NoWarn);CA1707 + $(NoWarn);CA1716 + $(NoWarn);CA1720 + $(NoWarn);CA1721 + $(NoWarn);CA1861 + $(NoWarn);CA1863 + $(NoWarn);CA2007;VSTHRD111 + $(NoWarn);CS1591 + $(NoWarn);IDE1006 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/dotnet/src/Connectors/VectorData.UnitTests/VectorStoreRecordModelBuilderTests.cs b/dotnet/src/Connectors/VectorData.UnitTests/VectorStoreRecordModelBuilderTests.cs new file mode 100644 index 000000000000..38c42cbd57ea --- /dev/null +++ b/dotnet/src/Connectors/VectorData.UnitTests/VectorStoreRecordModelBuilderTests.cs @@ -0,0 +1,335 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.ConnectorSupport; +using Microsoft.Extensions.VectorData.Properties; +using Xunit; + +namespace VectorData.UnitTests; + +#pragma warning disable CA2000 // Dispose objects before losing scope + +public class VectorStoreRecordModelBuilderTests +{ + [Fact] + public void Default_embedding_generator_without_record_definition() + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + var model = new CustomModelBuilder().Build(typeof(RecordWithStringVectorProperty), vectorStoreRecordDefinition: null, embeddingGenerator); + + // The embedding's .NET type (Embedding) is inferred from the embedding generator. + Assert.Same(embeddingGenerator, model.VectorProperty.EmbeddingGenerator); + Assert.Same(typeof(string), model.VectorProperty.Type); + Assert.Same(typeof(ReadOnlyMemory), model.VectorProperty.EmbeddingType); + } + + [Fact] + public void Default_embedding_generator_with_clr_type_and_record_definition() + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(string), dimensions: 3) + { + // The following configures the property to be ReadOnlyMemory (non-default embedding type for this connector) + EmbeddingType = typeof(ReadOnlyMemory) + } + ] + }; + + var model = new CustomModelBuilder().Build(typeof(RecordWithStringVectorProperty), recordDefinition, embeddingGenerator); + + // The embedding's .NET type (Embedding) is inferred from the embedding generator. + Assert.Same(embeddingGenerator, model.VectorProperty.EmbeddingGenerator); + Assert.Same(typeof(string), model.VectorProperty.Type); + Assert.Same(typeof(ReadOnlyMemory), model.VectorProperty.EmbeddingType); + } + + [Fact] + public void Default_embedding_generator_with_dynamic() + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(string), dimensions: 3) + ] + }; + + var model = new CustomModelBuilder().Build(typeof(Dictionary), recordDefinition, embeddingGenerator); + + // The embedding's .NET type (Embedding) is inferred from the embedding generator. + Assert.Same(embeddingGenerator, model.VectorProperty.EmbeddingGenerator); + Assert.Same(typeof(string), model.VectorProperty.Type); + Assert.Same(typeof(ReadOnlyMemory), model.VectorProperty.EmbeddingType); + } + + [Fact] + public void Default_embedding_generator_with_dynamic_and_non_default_EmbeddingType() + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(string), dimensions: 3) + { + EmbeddingType = typeof(ReadOnlyMemory) + } + ] + }; + + var model = new CustomModelBuilder().Build(typeof(Dictionary), recordDefinition, embeddingGenerator); + + Assert.Same(embeddingGenerator, model.VectorProperty.EmbeddingGenerator); + Assert.Same(typeof(string), model.VectorProperty.Type); + Assert.Same(typeof(ReadOnlyMemory), model.VectorProperty.EmbeddingType); + } + + [Fact] + public void Property_embedding_generator_takes_precedence_over_default_generator() + { + using var propertyEmbeddingGenerator = new FakeEmbeddingGenerator>(); + using var defaultEmbeddingGenerator = new FakeEmbeddingGenerator>(); + + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(string), dimensions: 3) + { + EmbeddingGenerator = propertyEmbeddingGenerator + } + ] + }; + + var model = new CustomModelBuilder().Build(typeof(Dictionary), recordDefinition, defaultEmbeddingGenerator); + + Assert.Same(propertyEmbeddingGenerator, model.VectorProperty.EmbeddingGenerator); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void Embedding_property_type_with_default_embedding_generator_ignores_generator(bool dynamic) + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var model = dynamic + ? new CustomModelBuilder().Build( + typeof(Dictionary), + new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(ReadOnlyMemory), dimensions: 3) + ] + }, + embeddingGenerator) + : new CustomModelBuilder().Build(typeof(RecordWithEmbeddingVectorProperty), vectorStoreRecordDefinition: null, embeddingGenerator); + + Assert.Null(model.VectorProperty.EmbeddingGenerator); + Assert.Same(typeof(ReadOnlyMemory), model.VectorProperty.Type); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void Custom_input_type(bool dynamic) + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + // TODO: Allow custom input type without a record definition (i.e. generic attribute) + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), dimensions: 3) + ] + }; + + var model = dynamic + ? new CustomModelBuilder().Build(typeof(Dictionary), recordDefinition, embeddingGenerator) + : new CustomModelBuilder().Build(typeof(RecordWithCustomerVectorProperty), recordDefinition, embeddingGenerator); + + var vectorProperty = model.VectorProperty; + + Assert.Same(embeddingGenerator, vectorProperty.EmbeddingGenerator); + Assert.Same(typeof(Customer), vectorProperty.Type); + Assert.Same(typeof(ReadOnlyMemory), vectorProperty.EmbeddingType); + } + + [Fact] + public void Incompatible_embedding_on_embedding_generator_throws() + { + // Embedding is not a supported embedding type by the connector + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var exception = Assert.Throws(() => + new CustomModelBuilder().Build(typeof(RecordWithStringVectorProperty), vectorStoreRecordDefinition: null, embeddingGenerator)); + + Assert.Equal($"Embedding generator '{typeof(FakeEmbeddingGenerator<,>).Name}' is incompatible with the required input and output types. The property input type must be 'String, DataContent', and the output type must be 'ReadOnlyMemory, ReadOnlyMemory'.", exception.Message); + } + + [Fact] + public void Incompatible_input_on_embedding_generator_throws() + { + // int is not a supported input type for the embedding generator + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var exception = Assert.Throws(() => + new CustomModelBuilder().Build(typeof(RecordWithStringVectorProperty), vectorStoreRecordDefinition: null, embeddingGenerator)); + + Assert.Equal($"Embedding generator '{typeof(FakeEmbeddingGenerator<,>).Name}' is incompatible with the required input and output types. The property input type must be 'String, DataContent', and the output type must be 'ReadOnlyMemory, ReadOnlyMemory'.", exception.Message); + } + + [Fact] + public void Non_embedding_vector_property_without_embedding_generator_throws() + { + var exception = Assert.Throws(() => + new CustomModelBuilder().Build(typeof(RecordWithStringVectorProperty), vectorStoreRecordDefinition: null, defaultEmbeddingGenerator: null)); + + Assert.Equal($"Property '{nameof(RecordWithStringVectorProperty.Embedding)}' has non-Embedding type 'String', but no embedding generator is configured.", exception.Message); + } + + [Fact] + public void Embedding_property_type_with_property_embedding_generator_throws() + { + using var embeddingGenerator = new FakeEmbeddingGenerator>(); + + var recordDefinition = new VectorStoreRecordDefinition + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(RecordWithEmbeddingVectorProperty.Id), typeof(int)), + new VectorStoreRecordDataProperty(nameof(RecordWithEmbeddingVectorProperty.Name), typeof(string)), + new VectorStoreRecordVectorProperty(nameof(RecordWithEmbeddingVectorProperty.Embedding), typeof(ReadOnlyMemory), dimensions: 3) + { + EmbeddingGenerator = embeddingGenerator + } + ] + }; + + var exception = Assert.Throws(() => + new CustomModelBuilder().Build(typeof(RecordWithEmbeddingVectorProperty), recordDefinition, embeddingGenerator)); + + Assert.Equal( + $"Property '{nameof(RecordWithEmbeddingVectorProperty.Embedding)}' has embedding type 'ReadOnlyMemory`1', but an embedding generator is configured on the property. Remove the embedding generator or change the property's .NET type to a non-embedding input type to the generator (e.g. string).", + exception.Message); + } + + public class RecordWithStringVectorProperty + { + [VectorStoreRecordKey] + public int Id { get; set; } + + [VectorStoreRecordData] + public string Name { get; set; } + + [VectorStoreRecordVector(Dimensions: 3)] + public string Embedding { get; set; } + } + + public class RecordWithEmbeddingVectorProperty + { + [VectorStoreRecordKey] + public int Id { get; set; } + + [VectorStoreRecordData] + public string Name { get; set; } + + [VectorStoreRecordVector(Dimensions: 3)] + public ReadOnlyMemory Embedding { get; set; } + } + + public class RecordWithCustomerVectorProperty + { + [VectorStoreRecordKey] + public int Id { get; set; } + + [VectorStoreRecordData] + public string Name { get; set; } + + [VectorStoreRecordVector(Dimensions: 3)] + public Customer Embedding { get; set; } + } + + public class Customer + { + public string FirstName { get; set; } + public string LastName { get; set; } + } + + private sealed class CustomModelBuilder(VectorStoreRecordModelBuildingOptions? options = null) + : VectorStoreRecordModelBuilder(options ?? s_defaultOptions) + { + private static readonly VectorStoreRecordModelBuildingOptions s_defaultOptions = new() + { + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + RequiresAtLeastOneVector = false, + + SupportedKeyPropertyTypes = [typeof(string), typeof(int)], + SupportedDataPropertyTypes = [typeof(string), typeof(int)], + SupportedEnumerableDataPropertyElementTypes = [typeof(string), typeof(int)], + SupportedVectorPropertyTypes = [typeof(ReadOnlyMemory), typeof(ReadOnlyMemory)] + }; + + protected override void SetupEmbeddingGeneration( + VectorStoreRecordVectorPropertyModel vectorProperty, + IEmbeddingGenerator embeddingGenerator, + Type? embeddingType) + { + if (!vectorProperty.TrySetupEmbeddingGeneration, ReadOnlyMemory>(embeddingGenerator, embeddingType) + && !vectorProperty.TrySetupEmbeddingGeneration, ReadOnlyMemory>(embeddingGenerator, embeddingType)) + { + throw new InvalidOperationException( + string.Format( + VectorDataStrings.IncompatibleEmbeddingGenerator, + embeddingGenerator.GetType().Name, + string.Join(", ", vectorProperty.GetSupportedInputTypes().Select(t => t.Name)), + "ReadOnlyMemory, ReadOnlyMemory")); + } + } + } + + private sealed class FakeEmbeddingGenerator : IEmbeddingGenerator + where TEmbedding : Embedding + { + public Task> GenerateAsync( + IEnumerable values, + EmbeddingGenerationOptions? options = null, + CancellationToken cancellationToken = default) + => throw new UnreachableException(); + + public object? GetService(Type serviceType, object? serviceKey = null) + => throw new UnreachableException(); + + public void Dispose() { } + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchTextSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchTextSearchTests.cs index 115ae9aabff5..2a4932f98e6f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchTextSearchTests.cs @@ -85,7 +85,11 @@ public override Task CreateTextSearchAsync() var stringMapper = new HotelTextSearchStringMapper(); var resultMapper = new HotelTextSearchResultMapper(); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the AzureAISearchVectorStore above instead of passing it here. +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete var result = new VectorStoreTextSearch(vectorSearch, this.EmbeddingGenerator!, stringMapper, resultMapper); +#pragma warning restore CS0618 + return Task.FromResult(result); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs index d033c68bde54..74590628bdba 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/BaseVectorStoreRecordCollectionTests.cs @@ -97,7 +97,7 @@ public async Task VectorSearchShouldReturnExpectedScoresAsync(string distanceFun await Task.Delay(this.DelayAfterUploadInMilliseconds); // Act - var results = await sut.VectorizedSearchAsync(baseVector, top: 3).ToListAsync(); + var results = await sut.SearchEmbeddingAsync(baseVector, top: 3).ToListAsync(); // Assert Assert.Equal(3, results.Count); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/InMemory/InMemoryVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/InMemory/InMemoryVectorStoreTextSearchTests.cs index 27d585041e24..2bfc57eb56fc 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/InMemory/InMemoryVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/InMemory/InMemoryVectorStoreTextSearchTests.cs @@ -50,7 +50,10 @@ static DataModel CreateRecord(int index, string text, ReadOnlyMemory embe var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the InMemoryVectorStore above instead of passing it here. +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete return new VectorStoreTextSearch(vectorSearch, this.EmbeddingGenerator!, stringMapper, resultMapper); +#pragma warning restore CS0618 } /// diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs index 716432ba63f1..8f5d4a9a4b39 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs @@ -34,7 +34,11 @@ public override Task CreateTextSearchAsync() var stringMapper = new HotelInfoTextSearchStringMapper(); var resultMapper = new HotelInfoTextSearchResultMapper(); + // TODO: Once OpenAITextEmbeddingGenerationService implements MEAI's IEmbeddingGenerator (#10811), configure it with the AzureAISearchVectorStore above instead of passing it here. +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete var result = new VectorStoreTextSearch(vectorSearch, this.EmbeddingGenerator!, stringMapper, resultMapper); +#pragma warning restore CS0618 + return Task.FromResult(result); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs index f64676f3fe10..4726c2c029dc 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteServiceCollectionExtensionsTests.cs @@ -44,7 +44,7 @@ public void AddVectorStoreRecordCollectionWithStringKeyAndSqliteConnectionRegist Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } @@ -62,7 +62,7 @@ public void AddVectorStoreRecordCollectionWithNumericKeyAndSqliteConnectionRegis Assert.NotNull(collection); Assert.IsType>(collection); - var vectorizedSearch = serviceProvider.GetRequiredService>(); + var vectorizedSearch = serviceProvider.GetRequiredService>(); Assert.NotNull(vectorizedSearch); Assert.IsType>(vectorizedSearch); } diff --git a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs index 98d84a301c6e..3a1afcc4303e 100644 --- a/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs +++ b/dotnet/src/IntegrationTests/Data/BaseVectorStoreTextSearchTests.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; -using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; @@ -98,33 +97,6 @@ public Task>> GenerateEmbeddingsAsync(IList } } - /// - /// Decorator for a that generates embeddings for text search queries. - /// - protected sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch - { - /// - public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var vectorizedQuery = await textEmbeddingGeneration!.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - - await foreach (var result in vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken)) - { - yield return result; - } - } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - ArgumentNullException.ThrowIfNull(serviceType); - - return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : - vectorizedSearch.GetService(serviceType, serviceKey); - } - } - /// /// Sample model class that represents a record entry. /// diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs index 8f92ed9aaa7d..c59c97a69ce9 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/IMongoDBMapper.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using MongoDB.Bson; @@ -8,15 +9,10 @@ internal interface IMongoDBMapper /// /// Maps from the consumer record data model to the storage model. /// - /// The consumer record data model record to map. - /// The mapped result. - BsonDocument MapFromDataToStorageModel(TRecord dataModel); + BsonDocument MapFromDataToStorageModel(TRecord dataModel, Embedding?[]? generatedEmbeddings); /// /// Maps from the storage model to the consumer record data model. /// - /// The storage data model record to map. - /// Options to control the mapping behavior. - /// The mapped result. TRecord MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options); } diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs index 979c1ce9fbda..e1507273810f 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBDynamicDataModelMapper.cs @@ -6,6 +6,7 @@ using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Linq; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; @@ -21,38 +22,54 @@ internal sealed class MongoDBDynamicDataModelMapper(VectorStoreRecordModel model #pragma warning restore CS0618 { /// - public BsonDocument MapFromDataToStorageModel(Dictionary dataModel) + public BsonDocument MapFromDataToStorageModel(Dictionary dataModel, Embedding?[]? generatedEmbeddings) { Verify.NotNull(dataModel); var document = new BsonDocument(); - // Loop through all known properties and map each from the data model to the storage model. - foreach (var property in model.Properties) - { - switch (property) + document[MongoDBConstants.MongoReservedKeyPropertyName] = !dataModel.TryGetValue(model.KeyProperty.ModelName, out var keyValue) + ? throw new KeyNotFoundException($"Missing value for key property '{model.KeyProperty.ModelName}") + : keyValue switch { - case VectorStoreRecordKeyPropertyModel keyProperty: - document[MongoDBConstants.MongoReservedKeyPropertyName] = (string)(dataModel[keyProperty.ModelName] - ?? throw new InvalidOperationException($"Key property '{keyProperty.ModelName}' is null.")); - continue; + string s => s, + null => throw new InvalidOperationException($"Key property '{model.KeyProperty.ModelName}' is null."), + _ => throw new InvalidCastException($"Key property '{model.KeyProperty.ModelName}' must be a string.") + }; - case VectorStoreRecordDataPropertyModel dataProperty: - if (dataModel.TryGetValue(dataProperty.ModelName, out var dataValue)) - { - document[dataProperty.StorageName] = BsonValue.Create(dataValue); - } - continue; + document[MongoDBConstants.MongoReservedKeyPropertyName] = (string)(dataModel[model.KeyProperty.ModelName] + ?? throw new InvalidOperationException($"Key property '{model.KeyProperty.ModelName}' is null.")); - case VectorStoreRecordVectorPropertyModel vectorProperty: - if (dataModel.TryGetValue(vectorProperty.ModelName, out var vectorValue)) - { - document[vectorProperty.StorageName] = BsonArray.Create(GetVectorArray(vectorValue)); - } - continue; + foreach (var property in model.DataProperties) + { + if (dataModel.TryGetValue(property.ModelName, out var dataValue)) + { + document[property.StorageName] = BsonValue.Create(dataValue); + } + } - default: - throw new UnreachableException(); + for (var i = 0; i < model.VectorProperties.Count; i++) + { + var property = model.VectorProperties[i]; + + if (generatedEmbeddings?[i] is null) + { + // No generated embedding, read the vector directly from the data model + if (dataModel.TryGetValue(property.ModelName, out var vectorValue)) + { + document[property.StorageName] = BsonArray.Create(GetVectorArray(vectorValue)); + } + } + else + { + Debug.Assert(property.EmbeddingGenerator is not null); + var embedding = generatedEmbeddings[i]; + document[property.StorageName] = embedding switch + { + Embedding e => BsonArray.Create(e.Vector.ToArray()), + Embedding e => BsonArray.Create(e.Vector.ToArray()), + _ => throw new UnreachableException() + }; } } @@ -85,7 +102,7 @@ public BsonDocument MapFromDataToStorageModel(Dictionary dataMo continue; case VectorStoreRecordVectorPropertyModel vectorProperty: - if (storageModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) + if (options.IncludeVectors && storageModel.TryGetValue(vectorProperty.StorageName, out var vectorValue)) { result.Add(vectorProperty.ModelName, GetVectorPropertyValue(property.ModelName, property.Type, vectorValue)); } diff --git a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs index 0dbf0235e1e7..2d2b6a237229 100644 --- a/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs +++ b/dotnet/src/InternalUtilities/connectors/Memory/MongoDB/MongoDBVectorStoreRecordMapper.cs @@ -1,8 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Reflection; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.Extensions.VectorData.ConnectorSupport; using MongoDB.Bson; @@ -17,6 +19,8 @@ namespace Microsoft.SemanticKernel.Connectors.MongoDB; internal sealed class MongoDBVectorStoreRecordMapper : IMongoDBMapper #pragma warning restore CS0618 { + private readonly VectorStoreRecordModel _model; + /// A key property info of the data model. private readonly PropertyInfo? _keyClrProperty; @@ -29,6 +33,8 @@ internal sealed class MongoDBVectorStoreRecordMapper : IMongoDBMapperThe model. public MongoDBVectorStoreRecordMapper(VectorStoreRecordModel model) { + this._model = model; + var keyProperty = model.KeyProperty; this._keyPropertyModelName = keyProperty.ModelName; this._keyClrProperty = keyProperty.PropertyInfo; @@ -44,7 +50,7 @@ public MongoDBVectorStoreRecordMapper(VectorStoreRecordModel model) type => type == typeof(TRecord)); } - public BsonDocument MapFromDataToStorageModel(TRecord dataModel) + public BsonDocument MapFromDataToStorageModel(TRecord dataModel, Embedding?[]? generatedEmbeddings) { var document = dataModel.ToBsonDocument(); @@ -58,6 +64,27 @@ public BsonDocument MapFromDataToStorageModel(TRecord dataModel) document[MongoDBConstants.MongoReservedKeyPropertyName] = value; } + // Go over the vector properties; those which have an embedding generator configured on them will have embedding generators, overwrite + // the value in the JSON object with that. + if (generatedEmbeddings is not null) + { + for (var i = 0; i < this._model.VectorProperties.Count; i++) + { + if (generatedEmbeddings[i] is not null) + { + var property = this._model.VectorProperties[i]; + Debug.Assert(property.EmbeddingGenerator is not null); + var embedding = generatedEmbeddings[i]; + document[property.StorageName] = embedding switch + { + Embedding e => BsonArray.Create(e.Vector.ToArray()), + Embedding e => BsonArray.Create(e.Vector.ToArray()), + _ => throw new UnreachableException() + }; + } + } + } + return document; } @@ -74,6 +101,20 @@ public TRecord MapFromStorageToDataModel(BsonDocument storageModel, StorageToDat storageModel[this._keyPropertyModelName] = value; } + // For vector properties which have embedding generation configured, we need to remove the embeddings before deserializing + // (we can't go back from an embedding to e.g. string). + // For other cases (no embedding generation), we leave the properties even if IncludeVectors is false. + if (!options.IncludeVectors) + { + foreach (var vectorProperty in this._model.VectorProperties) + { + if (vectorProperty.EmbeddingGenerator is not null) + { + storageModel.Remove(vectorProperty.StorageName); + } + } + } + return BsonSerializer.Deserialize(storageModel); } } diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs index d8e33919c2e3..7c6c8f8af33c 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/MockVectorizableTextSearch.cs @@ -4,7 +4,7 @@ namespace SemanticKernel.AotTests.UnitTests.Search; -internal sealed class MockVectorizableTextSearch : IVectorizableTextSearch +internal sealed class MockVectorizableTextSearch : IVectorSearch { private readonly IAsyncEnumerable> _searchResults; @@ -13,7 +13,22 @@ public MockVectorizableTextSearch(IEnumerable> searc this._searchResults = searchResults.ToAsyncEnumerable(); } - public IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable> SearchAsync( + TInput value, + int top, + VectorSearchOptions? options = default, + CancellationToken cancellationToken = default) + where TInput : notnull + { + return this._searchResults; + } + + public IAsyncEnumerable> SearchEmbeddingAsync( + TVector vector, + int top, + VectorSearchOptions? options = default, + CancellationToken cancellationToken = default) + where TVector : notnull { return this._searchResults; } diff --git a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/VectorStoreTextSearchTests.cs b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/VectorStoreTextSearchTests.cs index eee8ae4db55e..e06d4b3bf741 100644 --- a/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/VectorStoreTextSearchTests.cs +++ b/dotnet/src/SemanticKernel.AotTests/UnitTests/Search/VectorStoreTextSearchTests.cs @@ -46,7 +46,7 @@ public static async Task AddVectorStoreTextSearch() }; var vectorizableTextSearch = new MockVectorizableTextSearch(testData); var serviceCollection = new ServiceCollection(); - serviceCollection.AddSingleton>(vectorizableTextSearch); + serviceCollection.AddSingleton>(vectorizableTextSearch); // Act serviceCollection.AddVectorStoreTextSearch(); diff --git a/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..97293e39d3cc --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/CompatibilitySuppressions.xml @@ -0,0 +1,60 @@ + + + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizableTextSearch{`0},Microsoft.SemanticKernel.Data.ITextSearchStringMapper,Microsoft.SemanticKernel.Data.ITextSearchResultMapper,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizableTextSearch{`0},Microsoft.SemanticKernel.Data.MapFromResultToString,Microsoft.SemanticKernel.Data.MapFromResultToTextSearchResult,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizedSearch{`0},Microsoft.SemanticKernel.Embeddings.ITextEmbeddingGenerationService,Microsoft.SemanticKernel.Data.ITextSearchStringMapper,Microsoft.SemanticKernel.Data.ITextSearchResultMapper,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizedSearch{`0},Microsoft.SemanticKernel.Embeddings.ITextEmbeddingGenerationService,Microsoft.SemanticKernel.Data.MapFromResultToString,Microsoft.SemanticKernel.Data.MapFromResultToTextSearchResult,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/net8.0/Microsoft.SemanticKernel.Core.dll + lib/net8.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizableTextSearch{`0},Microsoft.SemanticKernel.Data.ITextSearchStringMapper,Microsoft.SemanticKernel.Data.ITextSearchResultMapper,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizableTextSearch{`0},Microsoft.SemanticKernel.Data.MapFromResultToString,Microsoft.SemanticKernel.Data.MapFromResultToTextSearchResult,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizedSearch{`0},Microsoft.SemanticKernel.Embeddings.ITextEmbeddingGenerationService,Microsoft.SemanticKernel.Data.ITextSearchStringMapper,Microsoft.SemanticKernel.Data.ITextSearchResultMapper,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Data.VectorStoreTextSearch`1.#ctor(Microsoft.Extensions.VectorData.IVectorizedSearch{`0},Microsoft.SemanticKernel.Embeddings.ITextEmbeddingGenerationService,Microsoft.SemanticKernel.Data.MapFromResultToString,Microsoft.SemanticKernel.Data.MapFromResultToTextSearchResult,Microsoft.SemanticKernel.Data.VectorStoreTextSearchOptions) + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Core.dll + true + + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/TextSearchServiceCollectionExtensions.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/TextSearchServiceCollectionExtensions.cs index c36c50bafa10..4f163b69c323 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/TextSearchServiceCollectionExtensions.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/TextSearchServiceCollectionExtensions.cs @@ -39,29 +39,11 @@ public static class TextSearchServiceCollectionExtensions resultMapper ??= sp.GetService(); options ??= sp.GetService(); - var vectorizableTextSearch = sp.GetService>(); - if (vectorizableTextSearch is not null) - { - return new VectorStoreTextSearch( - vectorizableTextSearch, - stringMapper, - resultMapper, - options); - } - - var vectorizedSearch = sp.GetService>(); - var generationService = sp.GetService(); - if (vectorizedSearch is not null && generationService is not null) - { - return new VectorStoreTextSearch( - vectorizedSearch, - generationService, - stringMapper, - resultMapper, - options); - } + var vectorSearch = sp.GetService>(); - throw new InvalidOperationException("No IVectorizableTextSearch or IVectorizedSearch and ITextEmbeddingGenerationService registered."); + return vectorSearch is null + ? throw new InvalidOperationException("No IVectorSearch registered.") + : new VectorStoreTextSearch(vectorSearch, stringMapper, resultMapper, options); }); return services; @@ -71,14 +53,14 @@ public static class TextSearchServiceCollectionExtensions /// Register a instance with the specified service ID. /// /// The to register the on. - /// Service id of the to use. + /// Service id of the to use. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of /// An optional service id to use as the service key. public static IServiceCollection AddVectorStoreTextSearch<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] TRecord>( this IServiceCollection services, - string vectorizableTextSearchServiceId, + string vectorSearchServiceId, ITextSearchStringMapper? stringMapper = null, ITextSearchResultMapper? resultMapper = null, VectorStoreTextSearchOptions? options = null, @@ -95,17 +77,17 @@ public static class TextSearchServiceCollectionExtensions resultMapper ??= sp.GetService(); options ??= sp.GetService(); - var vectorizableTextSearch = sp.GetKeyedService>(vectorizableTextSearchServiceId); - if (vectorizableTextSearch is not null) + var vectorSearch = sp.GetKeyedService>(vectorSearchServiceId); + if (vectorSearch is not null) { return new VectorStoreTextSearch( - vectorizableTextSearch, + vectorSearch, stringMapper, resultMapper, options); } - throw new InvalidOperationException($"No IVectorizableTextSearch for service id {vectorizableTextSearchServiceId} registered."); + throw new InvalidOperationException($"No IVectorSearch for service id {vectorSearchServiceId} registered."); }); return services; @@ -115,15 +97,16 @@ public static class TextSearchServiceCollectionExtensions /// Register a instance with the specified service ID. /// /// The to register the on. - /// Service id of the to use. + /// Service id of the to use. /// Service id of the to use. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of /// An optional service id to use as the service key. + [Obsolete("Use the overload which doesn't accept a textEmbeddingGenerationServiceId, and configure an IEmbeddingGenerator instead with the collection represented by vectorSearchServiceId.")] public static IServiceCollection AddVectorStoreTextSearch<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] TRecord>( this IServiceCollection services, - string vectorizedSearchServiceId, + string vectorSearchServiceId, string textEmbeddingGenerationServiceId, ITextSearchStringMapper? stringMapper = null, ITextSearchResultMapper? resultMapper = null, @@ -141,10 +124,10 @@ public static class TextSearchServiceCollectionExtensions resultMapper ??= sp.GetService(); options ??= sp.GetService(); - var vectorizedSearch = sp.GetKeyedService>(vectorizedSearchServiceId); + var vectorizedSearch = sp.GetKeyedService>(vectorSearchServiceId); if (vectorizedSearch is null) { - throw new InvalidOperationException($"No IVectorizedSearch for service id {vectorizedSearchServiceId} registered."); + throw new InvalidOperationException($"No IVectorizedSearch for service id {vectorSearchServiceId} registered."); } var generationService = sp.GetKeyedService(textEmbeddingGenerationServiceId); diff --git a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs index dea90327d1ba..6d06c880462a 100644 --- a/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs +++ b/dotnet/src/SemanticKernel.Core/Data/TextSearch/VectorStoreTextSearch.cs @@ -20,22 +20,23 @@ public sealed class VectorStoreTextSearch<[DynamicallyAccessedMembers(Dynamicall { /// /// Create an instance of the with the - /// provided for performing searches and + /// provided for performing searches and /// for generating vectors from the text search query. /// - /// instance used to perform the search. + /// instance used to perform the search. /// instance used to create a vector from the text query. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of + [Obsolete("Use the constructor without an ITextEmbeddingGenerationService and pass a vectorSearch configured to perform embedding generation with IEmbeddingGenerator")] public VectorStoreTextSearch( - IVectorizedSearch vectorizedSearch, + IVectorSearch vectorSearch, ITextEmbeddingGenerationService textEmbeddingGeneration, MapFromResultToString stringMapper, MapFromResultToTextSearchResult resultMapper, VectorStoreTextSearchOptions? options = null) : this( - vectorizedSearch, + vectorSearch, textEmbeddingGeneration, stringMapper is null ? null : new TextSearchStringMapper(stringMapper), resultMapper is null ? null : new TextSearchResultMapper(resultMapper), @@ -45,25 +46,26 @@ public VectorStoreTextSearch( /// /// Create an instance of the with the - /// provided for performing searches and + /// provided for performing searches and /// for generating vectors from the text search query. /// - /// instance used to perform the search. + /// instance used to perform the search. /// instance used to create a vector from the text query. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of + [Obsolete("Use the constructor without an ITextEmbeddingGenerationService and pass a vectorSearch configured to perform embedding generation with IEmbeddingGenerator")] public VectorStoreTextSearch( - IVectorizedSearch vectorizedSearch, + IVectorSearch vectorSearch, ITextEmbeddingGenerationService textEmbeddingGeneration, ITextSearchStringMapper? stringMapper = null, ITextSearchResultMapper? resultMapper = null, VectorStoreTextSearchOptions? options = null) { - Verify.NotNull(vectorizedSearch); + Verify.NotNull(vectorSearch); Verify.NotNull(textEmbeddingGeneration); - this._vectorizedSearch = vectorizedSearch; + this._vectorSearch = vectorSearch; this._textEmbeddingGeneration = textEmbeddingGeneration; this._propertyReader = new Lazy(() => new TextSearchResultPropertyReader(typeof(TRecord))); this._stringMapper = stringMapper ?? this.CreateTextSearchStringMapper(); @@ -75,17 +77,17 @@ public VectorStoreTextSearch( /// provided for performing searches and /// for generating vectors from the text search query. /// - /// instance used to perform the text search. + /// instance used to perform the text search. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of public VectorStoreTextSearch( - IVectorizableTextSearch vectorizableTextSearch, + IVectorSearch vectorSearch, MapFromResultToString stringMapper, MapFromResultToTextSearchResult resultMapper, VectorStoreTextSearchOptions? options = null) : this( - vectorizableTextSearch, + vectorSearch, new TextSearchStringMapper(stringMapper), new TextSearchResultMapper(resultMapper), options) @@ -97,19 +99,19 @@ public VectorStoreTextSearch( /// provided for performing searches and /// for generating vectors from the text search query. /// - /// instance used to perform the text search. + /// instance used to perform the text search. /// instance that can map a TRecord to a /// instance that can map a TRecord to a /// Options used to construct an instance of public VectorStoreTextSearch( - IVectorizableTextSearch vectorizableTextSearch, + IVectorSearch vectorSearch, ITextSearchStringMapper? stringMapper = null, ITextSearchResultMapper? resultMapper = null, VectorStoreTextSearchOptions? options = null) { - Verify.NotNull(vectorizableTextSearch); + Verify.NotNull(vectorSearch); - this._vectorizableTextSearch = vectorizableTextSearch; + this._vectorSearch = vectorSearch; this._propertyReader = new Lazy(() => new TextSearchResultPropertyReader(typeof(TRecord))); this._stringMapper = stringMapper ?? this.CreateTextSearchStringMapper(); this._resultMapper = resultMapper ?? this.CreateTextSearchResultMapper(); @@ -140,9 +142,8 @@ public Task> GetSearchResultsAsync(string query, Tex } #region private - private readonly IVectorizedSearch? _vectorizedSearch; private readonly ITextEmbeddingGenerationService? _textEmbeddingGeneration; - private readonly IVectorizableTextSearch? _vectorizableTextSearch; + private readonly IVectorSearch? _vectorSearch; private readonly ITextSearchStringMapper _stringMapper; private readonly ITextSearchResultMapper _resultMapper; private readonly Lazy _propertyReader; @@ -205,11 +206,11 @@ private async IAsyncEnumerable> ExecuteVectorSearchA Skip = searchOptions.Skip, }; - if (this._vectorizedSearch is not null) + if (this._textEmbeddingGeneration is not null) { var vectorizedQuery = await this._textEmbeddingGeneration!.GenerateEmbeddingAsync(query, cancellationToken: cancellationToken).ConfigureAwait(false); - await foreach (var result in this._vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) + await foreach (var result in this._vectorSearch!.SearchEmbeddingAsync(vectorizedQuery, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) { yield return result; } @@ -217,7 +218,7 @@ private async IAsyncEnumerable> ExecuteVectorSearchA yield break; } - await foreach (var result in this._vectorizableTextSearch!.VectorizableTextSearchAsync(query, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) + await foreach (var result in this._vectorSearch!.SearchAsync(query, searchOptions.Top, vectorSearchOptions, cancellationToken).ConfigureAwait(false)) { yield return result; } diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/TextSearchServiceCollectionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/TextSearchServiceCollectionExtensionsTests.cs index c890bd0f29f6..04e51b6bbb45 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/TextSearchServiceCollectionExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/TextSearchServiceCollectionExtensionsTests.cs @@ -10,45 +10,23 @@ using Xunit; namespace SemanticKernel.UnitTests.Data; + public class TextSearchServiceCollectionExtensionsTests : VectorStoreTextSearchTestBase { [Fact] - public void AddVectorStoreTextSearchWithIVectorizableTextSearch() + public void AddVectorStoreTextSearch() { // Arrange - var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var stringMapper = new DataModelTextSearchStringMapper(); - var resultMapper = new DataModelTextSearchResultMapper(); - var vectorizableTextSearch = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); - - // Act - services.AddSingleton>(vectorizableTextSearch); - services.AddSingleton(stringMapper); - services.AddSingleton(resultMapper); - services.AddVectorStoreTextSearch(); - - // Assert - var serviceProvider = services.BuildServiceProvider(); - var result = serviceProvider.GetRequiredService>(); - Assert.NotNull(result); - } + using var embeddingGenerator = new MockTextEmbeddingGenerator(); - [Fact] - public void AddVectorStoreTextSearchWithIVectorizedSearch() - { - // Arrange var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + var collection = vectorStore.GetCollection("records"); var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); - var textGeneration = new MockTextEmbeddingGenerationService(); // Act - services.AddSingleton>(vectorSearch); - services.AddSingleton(textGeneration); + services.AddSingleton>(collection); services.AddSingleton(stringMapper); services.AddSingleton(resultMapper); services.AddVectorStoreTextSearch(); @@ -60,36 +38,17 @@ public void AddVectorStoreTextSearchWithIVectorizedSearch() } [Fact] - public void AddVectorStoreTextSearchWithIVectorizableTextSearchAndNoMappers() + public void AddVectorStoreTextSearchWithNoMappers() { // Arrange - var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var vectorizableTextSearch = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); + using var embeddingGenerator = new MockTextEmbeddingGenerator(); - // Act - services.AddSingleton>(vectorizableTextSearch); - services.AddVectorStoreTextSearch(); - - // Assert - var serviceProvider = services.BuildServiceProvider(); - var result = serviceProvider.GetRequiredService>(); - Assert.NotNull(result); - } - - [Fact] - public void AddVectorStoreTextSearchWithIVectorizedSearchAndNoMappers() - { - // Arrange var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var textGeneration = new MockTextEmbeddingGenerationService(); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + var collection = vectorStore.GetCollection("records"); // Act - services.AddSingleton>(vectorSearch); - services.AddSingleton(textGeneration); + services.AddSingleton>(collection); services.AddVectorStoreTextSearch(); // Assert @@ -99,17 +58,18 @@ public void AddVectorStoreTextSearchWithIVectorizedSearchAndNoMappers() } [Fact] - public void AddVectorStoreTextSearchWithKeyedIVectorizableTextSearch() + public void AddVectorStoreTextSearchWithKeyedIVectorSearch() { // Arrange + using var embeddingGenerator = new MockTextEmbeddingGenerator(); + var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var vectorizableTextSearch1 = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + var collection = vectorStore.GetCollection("records"); // Act - services.AddKeyedSingleton>("vts1", vectorizableTextSearch1); - services.AddVectorStoreTextSearch("vts1"); + services.AddKeyedSingleton>("vs1", collection); + services.AddVectorStoreTextSearch("vs1"); // Assert var serviceProvider = services.BuildServiceProvider(); @@ -118,62 +78,64 @@ public void AddVectorStoreTextSearchWithKeyedIVectorizableTextSearch() } [Fact] - public void AddVectorStoreTextSearchFailsMissingKeyedVectorizableTextSearch() + public void AddVectorStoreTextSearchFailsMissingKeyedIVectorSearch() { // Arrange + using var embeddingGenerator = new MockTextEmbeddingGenerator(); + var services = new ServiceCollection(); - var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var vectorizableTextSearch1 = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + var collection = vectorStore.GetCollection("records"); // Act - services.AddKeyedSingleton>("vts1", vectorizableTextSearch1); - services.AddVectorStoreTextSearch("vts2"); + services.AddKeyedSingleton>("vs1", collection); + services.AddVectorStoreTextSearch("vs2"); // Assert var serviceProvider = services.BuildServiceProvider(); Assert.Throws(() => serviceProvider.GetRequiredService>()); } +#pragma warning disable CS0618 // Type or member is obsolete [Fact] - public void AddVectorStoreTextSearchWithKeyedIVectorizedSearch() + public void AddVectorStoreTextSearchWithKeyedVectorSearchAndEmbeddingGenerationService() { // Arrange var services = new ServiceCollection(); var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var textGeneration = new MockTextEmbeddingGenerationService(); + var collection = vectorStore.GetCollection("records"); + using var generator = new MockTextEmbeddingGenerator(); // Act - services.AddKeyedSingleton>("vs1", vectorSearch); - services.AddKeyedSingleton("tegs1", textGeneration); + services.AddKeyedSingleton>("vs1", collection); + services.AddKeyedSingleton("tegs1", generator); - services.AddVectorStoreTextSearch("vs1", "tegs1"); + services.AddVectorStoreTextSearch("vs1", "tegs1"); // Assert var serviceProvider = services.BuildServiceProvider(); - var result = serviceProvider.GetRequiredService>(); + var result = serviceProvider.GetRequiredService>(); Assert.NotNull(result); } [Fact] - public void AddVectorStoreTextSearchFailsMissingKeyedVectorizedSearch() + public void AddVectorStoreTextSearchFailsMissingKeyedVectorSearch() { // Arrange var services = new ServiceCollection(); var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var textGeneration = new MockTextEmbeddingGenerationService(); + var collection = vectorStore.GetCollection("records"); + using var textGeneration = new MockTextEmbeddingGenerator(); // Act - services.AddKeyedSingleton>("vs1", vectorSearch); + services.AddKeyedSingleton>("vs1", collection); services.AddKeyedSingleton("tegs1", textGeneration); - services.AddVectorStoreTextSearch("vs2", "tegs1"); + services.AddVectorStoreTextSearch("vs2", "tegs1"); // Assert var serviceProvider = services.BuildServiceProvider(); - Assert.Throws(() => serviceProvider.GetRequiredService>()); + Assert.Throws(() => serviceProvider.GetRequiredService>()); } [Fact] @@ -182,17 +144,18 @@ public void AddVectorStoreTextSearchFailsMissingKeyedTextEmbeddingGenerationServ // Arrange var services = new ServiceCollection(); var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); - var textGeneration = new MockTextEmbeddingGenerationService(); + var vectorSearch = vectorStore.GetCollection("records"); + using var textGeneration = new MockTextEmbeddingGenerator(); // Act - services.AddKeyedSingleton>("vs1", vectorSearch); + services.AddKeyedSingleton>("vs1", vectorSearch); services.AddKeyedSingleton("tegs1", textGeneration); - services.AddVectorStoreTextSearch("vs1", "tegs2"); + services.AddVectorStoreTextSearch("vs1", "tegs2"); // Assert var serviceProvider = services.BuildServiceProvider(); - Assert.Throws(() => serviceProvider.GetRequiredService>()); + Assert.Throws(() => serviceProvider.GetRequiredService>()); } +#pragma warning restore CS0618 // Type or member is obsolete } diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs index ccb606ce9465..c6feb2e0d047 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTestBase.cs @@ -3,9 +3,9 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; -using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.InMemory; @@ -23,31 +23,31 @@ public class VectorStoreTextSearchTestBase /// /// Create a from a . /// - public static async Task> CreateVectorStoreTextSearchFromVectorizedSearchAsync() + [Obsolete("VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete")] + public static async Task> CreateVectorStoreTextSearchWithEmbeddingGenerationServiceAsync() { var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); + var vectorSearch = vectorStore.GetCollection("records"); var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); - var embeddingService = new MockTextEmbeddingGenerationService(); + using var embeddingService = new MockTextEmbeddingGenerator(); await AddRecordsAsync(vectorSearch, embeddingService); - var sut = new VectorStoreTextSearch(vectorSearch, embeddingService, stringMapper, resultMapper); + var sut = new VectorStoreTextSearch(vectorSearch, embeddingService, stringMapper, resultMapper); return sut; } /// /// Create a from a . /// - public static async Task> CreateVectorStoreTextSearchFromVectorizableTextSearchAsync() + public static async Task> CreateVectorStoreTextSearchAsync() { - var vectorStore = new InMemoryVectorStore(); + using var embeddingGenerator = new MockTextEmbeddingGenerator(); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); var vectorSearch = vectorStore.GetCollection("records"); var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); - var embeddingService = new MockTextEmbeddingGenerationService(); - await AddRecordsAsync(vectorSearch, embeddingService); - var vectorizableTextSearch = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); - var sut = new VectorStoreTextSearch(vectorizableTextSearch, stringMapper, resultMapper); + await AddRecordsAsync(vectorSearch); + var sut = new VectorStoreTextSearch(vectorSearch, stringMapper, resultMapper); return sut; } @@ -56,13 +56,34 @@ public static async Task> CreateVectorStoreText /// public static async Task AddRecordsAsync( IVectorStoreRecordCollection recordCollection, - ITextEmbeddingGenerationService embeddingService, int? count = 10) { await recordCollection.CreateCollectionIfNotExistsAsync(); for (var i = 0; i < count; i++) { DataModel dataModel = new() + { + Key = Guid.NewGuid(), + Text = $"Record {i}", + Tag = i % 2 == 0 ? "Even" : "Odd", + Embedding = $"Record {i}" + }; + await recordCollection.UpsertAsync(dataModel); + } + } + + /// + /// Add sample records to the vector store record collection. + /// + public static async Task AddRecordsAsync( + IVectorStoreRecordCollection recordCollection, + ITextEmbeddingGenerationService embeddingService, + int? count = 10) + { + await recordCollection.CreateCollectionIfNotExistsAsync(); + for (var i = 0; i < count; i++) + { + DataModelWithRawEmbedding dataModel = new() { Key = Guid.NewGuid(), Text = $"Record {i}", @@ -80,13 +101,12 @@ public sealed class DataModelTextSearchStringMapper : ITextSearchStringMapper { /// public string MapFromResultToString(object result) - { - if (result is DataModel dataModel) + => result switch { - return dataModel.Text; - } - throw new ArgumentException("Invalid result type."); - } + DataModel dataModel => dataModel.Text, + DataModelWithRawEmbedding dataModelWithRawEmbedding => dataModelWithRawEmbedding.Text, + _ => throw new ArgumentException("Invalid result type.") + }; } /// @@ -96,20 +116,26 @@ public sealed class DataModelTextSearchResultMapper : ITextSearchResultMapper { /// public TextSearchResult MapFromResultToTextSearchResult(object result) - { - if (result is DataModel dataModel) + => result switch { - return new TextSearchResult(value: dataModel.Text) { Name = dataModel.Key.ToString() }; - } - throw new ArgumentException("Invalid result type."); - } + DataModel dataModel => new TextSearchResult(value: dataModel.Text) { Name = dataModel.Key.ToString() }, + DataModelWithRawEmbedding dataModelWithRawEmbedding => new TextSearchResult(value: dataModelWithRawEmbedding.Text) { Name = dataModelWithRawEmbedding.Key.ToString() }, + _ => throw new ArgumentException("Invalid result type.") + }; } /// /// Mock implementation of . /// - public sealed class MockTextEmbeddingGenerationService : ITextEmbeddingGenerationService + public sealed class MockTextEmbeddingGenerator : IEmbeddingGenerator>, ITextEmbeddingGenerationService { + public Task>> GenerateAsync(IEnumerable values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default) + => Task.FromResult(new GeneratedEmbeddings>([new(new float[] { 0, 1, 2, 3 })])); + + public void Dispose() { } + + public object? GetService(Type serviceType, object? serviceKey = null) => null; + /// public IReadOnlyDictionary Attributes { get; } = ReadOnlyDictionary.Empty; @@ -122,29 +148,27 @@ public Task>> GenerateEmbeddingsAsync(IList } /// - /// Decorator for a that generates embeddings for text search queries. + /// Sample model class that represents a record entry. /// - public sealed class VectorizedSearchWrapper(IVectorizedSearch vectorizedSearch, ITextEmbeddingGenerationService textEmbeddingGeneration) : IVectorizableTextSearch + /// + /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. + /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + public sealed class DataModel +#pragma warning restore CA1812 // Avoid uninstantiated internal classes { - /// - public async IAsyncEnumerable> VectorizableTextSearchAsync(string searchText, int top, VectorSearchOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var vectorizedQuery = await textEmbeddingGeneration.GenerateEmbeddingAsync(searchText, cancellationToken: cancellationToken).ConfigureAwait(false); - await foreach (var result in vectorizedSearch.VectorizedSearchAsync(vectorizedQuery, top, options, cancellationToken)) - { - yield return result; - } - } + [VectorStoreRecordKey] + public Guid Key { get; init; } - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - ArgumentNullException.ThrowIfNull(serviceType); + [VectorStoreRecordData] + public required string Text { get; init; } - return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : - vectorizedSearch.GetService(serviceType, serviceKey); - } + [VectorStoreRecordData(IsIndexed = true)] + public required string Tag { get; init; } + + [VectorStoreRecordVector(1536)] + public string? Embedding { get; init; } } /// @@ -155,7 +179,7 @@ public async IAsyncEnumerable> VectorizableTextSearc /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. /// #pragma warning disable CA1812 // Avoid uninstantiated internal classes - public sealed class DataModel + public sealed class DataModelWithRawEmbedding #pragma warning restore CA1812 // Avoid uninstantiated internal classes { [VectorStoreRecordKey] diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTests.cs index dcb2d310eda7..9737c52b2de0 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreTextSearchTests.cs @@ -1,4 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. + using System; using System.Linq; using System.Threading.Tasks; @@ -9,44 +10,46 @@ namespace SemanticKernel.UnitTests.Data; public class VectorStoreTextSearchTests : VectorStoreTextSearchTestBase { +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete [Fact] - public void CanCreateVectorStoreTextSearchWithIVectorizedSearch() + public void CanCreateVectorStoreTextSearchWithEmbeddingGenerationService() { // Arrange. var vectorStore = new InMemoryVectorStore(); - var vectorSearch = vectorStore.GetCollection("records"); + var vectorSearch = vectorStore.GetCollection("records"); var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); + using var embeddingGenerationService = new MockTextEmbeddingGenerator(); // Act. - var sut = new VectorStoreTextSearch(vectorSearch, new MockTextEmbeddingGenerationService(), stringMapper, resultMapper); + var sut = new VectorStoreTextSearch(vectorSearch, embeddingGenerationService, stringMapper, resultMapper); // Assert. Assert.NotNull(sut); } +#pragma warning restore CS0618 [Fact] - public void CanCreateVectorStoreTextSearchWithIVectorizableTextSearch() + public void CanCreateVectorStoreTextSearchWithIVectorSearch() { // Arrange. - var vectorStore = new InMemoryVectorStore(); + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = new MockTextEmbeddingGenerator() }); var vectorSearch = vectorStore.GetCollection("records"); - var vectorizableTextSearch = new VectorizedSearchWrapper(vectorSearch, new MockTextEmbeddingGenerationService()); var stringMapper = new DataModelTextSearchStringMapper(); var resultMapper = new DataModelTextSearchResultMapper(); // Act. - var sut = new VectorStoreTextSearch(vectorizableTextSearch, stringMapper, resultMapper); + var sut = new VectorStoreTextSearch(vectorSearch, stringMapper, resultMapper); // Assert. Assert.NotNull(sut); } [Fact] - public async Task CanSearchWithVectorizedSearchAsync() + public async Task CanSearchAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizedSearchAsync(); + var sut = await CreateVectorStoreTextSearchAsync(); // Act. KernelSearchResults searchResults = await sut.SearchAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -56,10 +59,10 @@ public async Task CanSearchWithVectorizedSearchAsync() } [Fact] - public async Task CanGetTextSearchResultsWithVectorizedSearchAsync() + public async Task CanGetTextSearchResultsAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizedSearchAsync(); + var sut = await CreateVectorStoreTextSearchAsync(); // Act. KernelSearchResults searchResults = await sut.GetTextSearchResultsAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -69,10 +72,10 @@ public async Task CanGetTextSearchResultsWithVectorizedSearchAsync() } [Fact] - public async Task CanGetSearchResultsWithVectorizedSearchAsync() + public async Task CanGetSearchResultAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizedSearchAsync(); + var sut = await CreateVectorStoreTextSearchAsync(); // Act. KernelSearchResults searchResults = await sut.GetSearchResultsAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -81,11 +84,12 @@ public async Task CanGetSearchResultsWithVectorizedSearchAsync() Assert.Equal(2, results.Count); } +#pragma warning disable CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete [Fact] - public async Task CanSearchWithVectorizableTextSearchAsync() + public async Task CanSearchWithEmbeddingGenerationServiceAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizableTextSearchAsync(); + var sut = await CreateVectorStoreTextSearchWithEmbeddingGenerationServiceAsync(); // Act. KernelSearchResults searchResults = await sut.SearchAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -95,10 +99,10 @@ public async Task CanSearchWithVectorizableTextSearchAsync() } [Fact] - public async Task CanGetTextSearchResultsWithVectorizableTextSearchAsync() + public async Task CanGetTextSearchResultsWithEmbeddingGenerationServiceAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizableTextSearchAsync(); + var sut = await CreateVectorStoreTextSearchWithEmbeddingGenerationServiceAsync(); // Act. KernelSearchResults searchResults = await sut.GetTextSearchResultsAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -108,10 +112,10 @@ public async Task CanGetTextSearchResultsWithVectorizableTextSearchAsync() } [Fact] - public async Task CanGetSearchResultsWithVectorizableTextSearchAsync() + public async Task CanGetSearchResultsWithEmbeddingGenerationServiceAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizableTextSearchAsync(); + var sut = await CreateVectorStoreTextSearchWithEmbeddingGenerationServiceAsync(); // Act. KernelSearchResults searchResults = await sut.GetSearchResultsAsync("What is the Semantic Kernel?", new() { Top = 2, Skip = 0 }); @@ -119,12 +123,13 @@ public async Task CanGetSearchResultsWithVectorizableTextSearchAsync() Assert.Equal(2, results.Count); } +#pragma warning restore CS0618 // VectorStoreTextSearch with ITextEmbeddingGenerationService is obsolete [Fact] public async Task CanFilterGetSearchResultsWithVectorizedSearchAsync() { // Arrange. - var sut = await CreateVectorStoreTextSearchFromVectorizedSearchAsync(); + var sut = await CreateVectorStoreTextSearchAsync(); TextSearchFilter evenFilter = new(); evenFilter.Equality("Tag", "Even"); TextSearchFilter oddFilter = new(); diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs index 1751a4fd02fd..69861413facc 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoDataConformanceTests.cs @@ -17,7 +17,7 @@ public class AzureAISearchNoDataConformanceTests(AzureAISearchNoDataConformanceT public new class Fixture : NoDataConformanceTests.Fixture { - protected override string CollectionName => "nodata-" + _testIndexPostfix; + public override string CollectionName => "nodata-" + _testIndexPostfix; public override TestStore TestStore => AzureAISearchTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs index c14d6d622665..a48f648d079f 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/CRUD/AzureAISearchNoVectorConformanceTests.cs @@ -17,7 +17,7 @@ public class AzureAISearchNoVectorConformanceTests(AzureAISearchNoVectorConforma public new class Fixture : NoVectorConformanceTests.Fixture { - protected override string CollectionName => "novector-" + _testIndexPostfix; + public override string CollectionName => "novector-" + _testIndexPostfix; public override TestStore TestStore => AzureAISearchTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs index 6a7e8a1df408..eb6de0cf988c 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicFilterTests.cs @@ -19,6 +19,6 @@ public override Task Contains_over_inline_int_array() public override TestStore TestStore => AzureAISearchTestStore.Instance; // Azure AI search only supports lowercase letters, digits or dashes. - protected override string CollectionName => "filter-tests"; + public override string CollectionName => "filter-tests"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs index 2a38ca59dacc..38dd343a8aa6 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/Filter/AzureAISearchBasicQueryTests.cs @@ -19,6 +19,6 @@ public override Task Contains_over_inline_int_array() public override TestStore TestStore => AzureAISearchTestStore.Instance; // Azure AI search only supports lowercase letters, digits or dashes. - protected override string CollectionName => "query-tests"; + public override string CollectionName => "query-tests"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/HybridSearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/HybridSearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs index 3860489b9471..edde0f48405a 100644 --- a/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/HybridSearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/AzureAISearchIntegrationTests/HybridSearch/AzureAISearchKeywordVectorizedHybridSearchTests.cs @@ -28,7 +28,7 @@ public class AzureAISearchKeywordVectorizedHybridSearchTests( public override TestStore TestStore => AzureAISearchTestStore.Instance; // Azure AI search only supports lowercase letters, digits or dashes. - protected override string CollectionName => "vecstring-hybrid-search-" + _testIndexPostfix; + public override string CollectionName => "vecstring-hybrid-search-" + _testIndexPostfix; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture @@ -36,6 +36,6 @@ public class AzureAISearchKeywordVectorizedHybridSearchTests( public override TestStore TestStore => AzureAISearchTestStore.Instance; // Azure AI search only supports lowercase letters, digits or dashes. - protected override string CollectionName => "multitext-hybrid-search-" + _testIndexPostfix; + public override string CollectionName => "multitext-hybrid-search-" + _testIndexPostfix; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..45e6c28d68b4 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosMongoDBIntegrationTests/CosmosMongoEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosMongoDBIntegrationTests.Support; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosMongoDBIntegrationTests; + +public class CosmosMongoEmbeddingGenerationTests(CosmosMongoEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => CosmosMongoDBTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => CosmosMongoDBTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(CosmosMongoDBTestStore.Instance.Database) + .AddAzureCosmosDBMongoDBVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(CosmosMongoDBTestStore.Instance.Database) + .AddAzureCosmosDBMongoDBVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs index 459b2f6344c7..9c2f4659a6bb 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoDataConformanceTests.cs @@ -12,6 +12,6 @@ public class CosmosNoSQLNoDataConformanceTests(CosmosNoSQLNoDataConformanceTests { public new class Fixture : NoDataConformanceTests.Fixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs index 29f2b80ac866..b6581771ae05 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CRUD/CosmosNoSQLNoVectorConformanceTests.cs @@ -12,6 +12,6 @@ public class CosmosNoSQLNoVectorConformanceTests(CosmosNoSQLNoVectorConformanceT { public new class Fixture : NoVectorConformanceTests.Fixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..3d8be02c4552 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/CosmosNoSQLEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using CosmosNoSQLIntegrationTests.Support; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace CosmosNoSQLIntegrationTests; + +public class CosmosNoSQLEmbeddingGenerationTests(CosmosNoSQLEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => CosmosNoSQLTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(CosmosNoSQLTestStore.Instance.Database) + .AddAzureCosmosDBNoSQLVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(CosmosNoSQLTestStore.Instance.Database) + .AddAzureCosmosDBNoSQLVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs index 4058ea8674a7..266145d86485 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicFilterTests.cs @@ -12,6 +12,6 @@ public class CosmosNoSQLBasicFilterTests(CosmosNoSQLBasicFilterTests.Fixture fix { public new class Fixture : BasicFilterTests.Fixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs index 65f1bb2101bd..bd1bd9d9fd9a 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Filter/CosmosNoSQLBasicQueryTests.cs @@ -12,6 +12,6 @@ public class CosmosNoSQLBasicQueryTests(CosmosNoSQLBasicQueryTests.Fixture fixtu { public new class Fixture : BasicQueryTests.QueryFixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/HybridSearch/CosmosNoSQLKeywordVectorizedHybridSearchTests.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/HybridSearch/CosmosNoSQLKeywordVectorizedHybridSearchTests.cs index 24935b4ffc2d..081df134257f 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/HybridSearch/CosmosNoSQLKeywordVectorizedHybridSearchTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/HybridSearch/CosmosNoSQLKeywordVectorizedHybridSearchTests.cs @@ -16,11 +16,11 @@ public class CosmosNoSQLKeywordVectorizedHybridSearchTests( { public new class VectorAndStringFixture : KeywordVectorizedHybridSearchComplianceTests.VectorAndStringFixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLFixture.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLFixture.cs index 9ddaad05be85..f1823af21bb8 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLFixture.cs @@ -6,5 +6,5 @@ namespace CosmosNoSQLIntegrationTests.Support; public class CosmosNoSQLFixture : VectorStoreFixture { - public override TestStore TestStore => CosmosNoSqlTestStore.Instance; + public override TestStore TestStore => CosmosNoSQLTestStore.Instance; } diff --git a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs index c45bac264fbd..fe72f99d8695 100644 --- a/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/CosmosNoSQLIntegrationTests/Support/CosmosNoSQLTestStore.cs @@ -13,9 +13,9 @@ namespace CosmosNoSQLIntegrationTests.Support; #pragma warning disable CA1001 // Type owns disposable fields (_connection) but is not disposable -internal sealed class CosmosNoSqlTestStore : TestStore +internal sealed class CosmosNoSQLTestStore : TestStore { - public static CosmosNoSqlTestStore Instance { get; } = new(); + public static CosmosNoSQLTestStore Instance { get; } = new(); private CosmosClient? _client; private Database? _database; @@ -35,7 +35,7 @@ public override IVectorStore DefaultVectorStore public AzureCosmosDBNoSQLVectorStore GetVectorStore(AzureCosmosDBNoSQLVectorStoreOptions options) => new(this.Database, options); - private CosmosNoSqlTestStore() + private CosmosNoSQLTestStore() { } diff --git a/dotnet/src/VectorDataIntegrationTests/Directory.Build.props b/dotnet/src/VectorDataIntegrationTests/Directory.Build.props index f5d133b5fd9f..eacdeec35e93 100644 --- a/dotnet/src/VectorDataIntegrationTests/Directory.Build.props +++ b/dotnet/src/VectorDataIntegrationTests/Directory.Build.props @@ -6,7 +6,9 @@ $(NoWarn);CA1707 $(NoWarn);CA1716 $(NoWarn);CA1720 + $(NoWarn);CA1721 $(NoWarn);CA1861 + $(NoWarn);CA1863 $(NoWarn);CA2007;VSTHRD111 $(NoWarn);CS1591 $(NoWarn);IDE1006 diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs index 8481cc14183d..432fbdff3fbd 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Filter/InMemoryBasicQueryTests.cs @@ -5,7 +5,7 @@ using VectorDataSpecificationTests.Support; using Xunit; -namespace PostgresIntegrationTests.Filter; +namespace InMemoryIntegrationTests.Filter; public class InMemoryBasicQueryTests(InMemoryBasicQueryTests.Fixture fixture) : BasicQueryTests(fixture), IClassFixture diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..6d1cedeee544 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/InMemoryEmbeddingGenerationTests.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using InMemoryIntegrationTests.Support; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace InMemoryIntegrationTests; + +public class InMemoryEmbeddingGenerationTests(InMemoryEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + // InMemory doesn't allowing accessing the same collection via different .NET types (it's unique in this). + // The following dynamic tests attempt to access the fixture collection - which is created with Record - via + // Dictionary. + public override Task SearchAsync_with_property_generator_dynamic() => Task.CompletedTask; + public override Task UpsertAsync_dynamic() => Task.CompletedTask; + public override Task UpsertAsync_batch_dynamic() => Task.CompletedTask; + + // The same applies to the custom type test: + public override Task SearchAsync_with_custom_input_type() => Task.CompletedTask; + + // The test relies on creating a new InMemoryVectorStore configured with a store-default generator, but with InMemory that store + // doesn't share the seeded data with the fixture store (since each InMemoryVectorStore has its own private data). + // Test coverage is already largely sufficient via the property and collection tests. + public override Task SearchAsync_with_store_generator() => Task.CompletedTask; + + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => InMemoryTestStore.Instance; + + // Note that with InMemory specifically, we can't create a vector store with an embedding generator, since it wouldn't share the seeded data with the fixture store. + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => InMemoryTestStore.Instance.DefaultVectorStore; + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + // The InMemory DI methods register a new vector store instance, which doesn't share the collection seeded by the + // fixture and the test fails. + // services => services.AddInMemoryVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + // The InMemory DI methods register a new vector store instance, which doesn't share the collection seeded by the + // fixture and the test fails. + // services => services.AddInMemoryVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs index 246d5166c831..81f44e3339df 100644 --- a/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/InMemoryIntegrationTests/Support/InMemoryTestStore.cs @@ -10,9 +10,12 @@ internal sealed class InMemoryTestStore : TestStore { public static InMemoryTestStore Instance { get; } = new(); - private InMemoryVectorStore _vectorStore = new(); + private InMemoryVectorStore _defaultVectorStore = new(); - public override IVectorStore DefaultVectorStore => this._vectorStore; + public override IVectorStore DefaultVectorStore => this._defaultVectorStore; + + public InMemoryVectorStore GetVectorStore(InMemoryVectorStoreOptions options) + => new(new() { EmbeddingGenerator = options.EmbeddingGenerator }); private InMemoryTestStore() { @@ -20,7 +23,7 @@ private InMemoryTestStore() protected override Task StartAsync() { - this._vectorStore = new InMemoryVectorStore(); + this._defaultVectorStore = new InMemoryVectorStore(); return Task.CompletedTask; } diff --git a/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..aa91f172d3b8 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/MongoDBIntegrationTests/MongoDBEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using MongoDBIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace MongoDBIntegrationTests; + +public class MongoDBEmbeddingGenerationTests(MongoDBEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => MongoDBTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => MongoDBTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(MongoDBTestStore.Instance.Database) + .AddMongoDBVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(MongoDBTestStore.Instance.Database) + .AddMongoDBVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs index 3aa0fa1006a4..b6a38a0ff09e 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicFilterTests.cs @@ -69,6 +69,6 @@ public override Task Legacy_AnyTagEqualTo_List() public override TestStore TestStore => PineconeTestStore.Instance; // https://docs.pinecone.io/troubleshooting/restrictions-on-index-names - protected override string CollectionName => "filter-tests"; + public override string CollectionName => "filter-tests"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs index d8dea0526b75..2ebf1e17b451 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Filter/PineconeBasicQueryTests.cs @@ -52,6 +52,6 @@ public override Task Contains_over_field_string_List() public override TestStore TestStore => PineconeTestStore.Instance; // https://docs.pinecone.io/troubleshooting/restrictions-on-index-names - protected override string CollectionName => "query-tests"; + public override string CollectionName => "query-tests"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..642260212a2d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeEmbeddingGenerationTests.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.Properties; +using Microsoft.SemanticKernel; +using PineconeIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PineconeIntegrationTests; + +public class PineconeEmbeddingGenerationTests(PineconeEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + // Overriding since Pinecone requires collection names to only contain ASCII lowercase letters, digits and dashes. + public override async Task SearchAsync_without_generator_throws() + { + // The database doesn't support embedding generation, and no client-side generator has been configured at any level, + // so SearchAsync should throw. + var collection = fixture.GetCollection(fixture.TestStore.DefaultVectorStore, fixture.CollectionName + "-without-generator"); + + var exception = await Assert.ThrowsAsync(() => collection.SearchAsync("foo", top: 1).ToListAsync().AsTask()); + + Assert.Equal(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch, exception.Message); + } + + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => PineconeTestStore.Instance; + + // https://docs.pinecone.io/troubleshooting/restrictions-on-index-names + public override string CollectionName => "embedding-generation-tests"; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => PineconeTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(PineconeTestStore.Instance.Client) + .AddPineconeVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(PineconeTestStore.Instance.Client) + .AddPineconeVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs index 40d3e221e777..f97a1d0488f2 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/Support/PineconeTestStore.cs @@ -35,6 +35,9 @@ internal sealed class PineconeTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + public PineconeVectorStore GetVectorStore(PineconeVectorStoreOptions options) + => new(this.Client, options); + // Pinecone does not support distance functions other than PGA which is always enabled. public override string DefaultIndexKind => ""; diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..a28e06e425b1 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using PostgresIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace PostgresIntegrationTests; + +public class PostgresEmbeddingGenerationTests(PostgresEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => PostgresTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => PostgresTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(PostgresTestStore.Instance.DataSource) + .AddPostgresVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(PostgresTestStore.Instance.DataSource) + .AddPostgresVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj index 0a039793dc49..d3eb1b72853c 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/PostgresIntegrationTests.csproj @@ -17,6 +17,9 @@ + + + diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_NamedVectors.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_NamedVectors.cs index 86a878167626..dc48a3916d82 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_NamedVectors.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_NamedVectors.cs @@ -17,16 +17,10 @@ public class QdrantKeywordVectorizedHybridSearchTests_NamedVectors( public new class VectorAndStringFixture : KeywordVectorizedHybridSearchComplianceTests.VectorAndStringFixture { public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { public override TestStore TestStore => QdrantTestStore.NamedVectorsInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } } diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_UnnamedVectors.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_UnnamedVectors.cs index e9492cd7ef21..4d3ff6f4b320 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_UnnamedVectors.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/HybridSearch/QdrantKeywordVectorizedHybridSearchTests_UnnamedVectors.cs @@ -17,16 +17,10 @@ public class QdrantKeywordVectorizedHybridSearchTests_UnnamedVectors( public new class VectorAndStringFixture : KeywordVectorizedHybridSearchComplianceTests.VectorAndStringFixture { public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; - - // Qdrant doesn't support the default Flat index kind - protected override string IndexKind => Microsoft.Extensions.VectorData.IndexKind.Hnsw; } } diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..841bb1f06f2d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/QdrantEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using QdrantIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace QdrantIntegrationTests; + +public class QdrantEmbeddingGenerationTests(QdrantEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => QdrantTestStore.UnnamedVectorInstance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => QdrantTestStore.UnnamedVectorInstance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(QdrantTestStore.UnnamedVectorInstance.Client) + .AddQdrantVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(QdrantTestStore.UnnamedVectorInstance.Client) + .AddQdrantVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs index c0d4b67768a6..0d48a54df7ed 100644 --- a/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/QdrantIntegrationTests/Support/QdrantTestStore.cs @@ -15,6 +15,9 @@ internal sealed class QdrantTestStore : TestStore public static QdrantTestStore NamedVectorsInstance { get; } = new(hasNamedVectors: true); public static QdrantTestStore UnnamedVectorInstance { get; } = new(hasNamedVectors: false); + // Qdrant doesn't support the default Flat index kind + public override string DefaultIndexKind => IndexKind.Hnsw; + private readonly QdrantContainer _container = new QdrantBuilder().Build(); private readonly bool _hasNamedVectors; private QdrantClient? _client; @@ -29,8 +32,6 @@ public QdrantVectorStore GetVectorStore(QdrantVectorStoreOptions options) private QdrantTestStore(bool hasNamedVectors) => this._hasNamedVectors = hasNamedVectors; - public override string DefaultIndexKind => IndexKind.Hnsw; - /// /// Qdrant normalizes vectors on upsert, so we cannot compare /// what we upserted and what we retrieve, we can only check diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs index 70137ab607cf..437048200f62 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicFilterTests.cs @@ -72,10 +72,10 @@ public class RedisJsonCollectionBasicFilterTests(RedisJsonCollectionBasicFilterT { public override TestStore TestStore => RedisTestStore.JsonInstance; - protected override string CollectionName => "JsonCollectionFilterTests"; + public override string CollectionName => "JsonCollectionFilterTests"; // Override to remove the bool property, which isn't (currently) supported on Redis/JSON - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() @@ -124,10 +124,10 @@ public override Task Legacy_AnyTagEqualTo_List() { public override TestStore TestStore => RedisTestStore.HashSetInstance; - protected override string CollectionName => "HashSetCollectionFilterTests"; + public override string CollectionName => "HashSetCollectionFilterTests"; // Override to remove the bool property, which isn't (currently) supported on Redis - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs index f9ab03c7cae9..a75cb113a3d4 100644 --- a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/Filter/RedisBasicQueryTests.cs @@ -72,10 +72,10 @@ public class RedisJsonCollectionBasicQueryTests(RedisJsonCollectionBasicQueryTes { public override TestStore TestStore => RedisTestStore.JsonInstance; - protected override string CollectionName => "JsonCollectionQueryTests"; + public override string CollectionName => "JsonCollectionQueryTests"; // Override to remove the bool property, which isn't (currently) supported on Redis/JSON - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(bool)).ToList() @@ -116,10 +116,10 @@ public override Task Contains_over_field_string_List() { public override TestStore TestStore => RedisTestStore.HashSetInstance; - protected override string CollectionName => "HashSetCollectionQueryTests"; + public override string CollectionName => "HashSetCollectionQueryTests"; // Override to remove the bool property, which isn't (currently) supported on Redis - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisHashSetEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisHashSetEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..924e26ab4866 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisHashSetEmbeddingGenerationTests.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Redis; +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace RedisIntegrationTests; + +public class RedisHashSetEmbeddingGenerationTests(RedisHashSetEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => RedisTestStore.HashSetInstance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => RedisTestStore.HashSetInstance.GetVectorStore(new() { StorageType = RedisStorageType.HashSet, EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + // TODO: This doesn't work because if a RedisVectorStoreOptions is provided (and it needs to be for HashSet), the embedding generator + // isn't looked up in DI. The options are also immutable so we can't inject an embedding generator into them. + // services => services + // .AddSingleton(RedisTestStore.HashSetInstance.Database) + // .AddRedisVectorStore(new RedisVectorStoreOptions() { StorageType = RedisStorageType.HashSet}) + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(RedisTestStore.HashSetInstance.Database) + .AddRedisHashSetVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisJsonEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisJsonEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..af52948ad43b --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/RedisIntegrationTests/RedisJsonEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using RedisIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace RedisIntegrationTests; + +public class RedisJsonEmbeddingGenerationTests(RedisJsonEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => RedisTestStore.JsonInstance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => RedisTestStore.JsonInstance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(RedisTestStore.JsonInstance.Database) + .AddRedisVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(RedisTestStore.JsonInstance.Database) + .AddRedisJsonVectorStoreRecordCollection(this.CollectionName) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs index dd15438bfcf3..209b95a73d45 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicFilterTests.cs @@ -64,10 +64,10 @@ public override Task Contains_over_field_string_List() public override TestStore TestStore => SqlServerTestStore.Instance; - protected override string CollectionName => s_uniqueName; + public override string CollectionName => s_uniqueName; // Override to remove the string collection properties, which aren't (currently) supported on SqlServer - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs index 1e10af0cb5b9..519e8f0e40f6 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Filter/SqlServerBasicQueryTests.cs @@ -48,10 +48,10 @@ public override Task Contains_over_field_string_List() public override TestStore TestStore => SqlServerTestStore.Instance; - protected override string CollectionName => s_uniqueName; + public override string CollectionName => s_uniqueName; // Override to remove the string collection properties, which aren't (currently) supported on SqlServer - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs index 47324b0672e1..dda2cf5252fe 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerCommandBuilderTests.cs @@ -348,5 +348,6 @@ private static VectorStoreRecordModel BuildModel(List => new VectorStoreRecordModelBuilder(SqlServerConstants.ModelBuildingOptions) .Build( typeof(Dictionary), - new() { Properties = properties }); + new() { Properties = properties }, + defaultEmbeddingGenerator: null); } diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..ea72466716dd --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerEmbeddingGenerationTests.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using SqlServerIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqlServerIntegrationTests; + +public class SqlServerEmbeddingGenerationTests(SqlServerEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => SqlServerTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => SqlServerTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + // TODO: Implement DI registration for SqlServer (https://github.com/microsoft/semantic-kernel/issues/10948) + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + // TODO: Implement DI registration for SqlServer (https://github.com/microsoft/semantic-kernel/issues/10948) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs index 7928588bb13e..563473def484 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerVectorStoreTests.cs @@ -81,14 +81,14 @@ public async Task RecordCRUD() received = await collection.GetAsync(updated.Id, new() { IncludeVectors = true }); AssertEquality(updated, received); - VectorSearchResult vectorSearchResult = await (collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() + VectorSearchResult vectorSearchResult = await (collection.SearchEmbeddingAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = true })).SingleAsync(); AssertEquality(updated, vectorSearchResult.Record); - vectorSearchResult = await (collection.VectorizedSearchAsync(inserted.Floats, top: 3, new() + vectorSearchResult = await (collection.SearchEmbeddingAsync(inserted.Floats, top: 3, new() { VectorProperty = r => r.Floats, IncludeVectors = false diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs index 421bf7621d7f..b99303b6ef6c 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/Support/SqlServerTestStore.cs @@ -8,23 +8,31 @@ namespace SqlServerIntegrationTests.Support; public sealed class SqlServerTestStore : TestStore { + private string? _connectionString; + public string ConnectionString => this._connectionString ?? throw new InvalidOperationException("Not initialized"); + public static readonly SqlServerTestStore Instance = new(); public override IVectorStore DefaultVectorStore - => this._connectedStore ?? throw new InvalidOperationException("Not initialized"); + => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + + public SqlServerVectorStore GetVectorStore(SqlServerVectorStoreOptions options) + => new(this.ConnectionString, options); public override string DefaultDistanceFunction => DistanceFunction.CosineDistance; - private SqlServerVectorStore? _connectedStore; + private SqlServerVectorStore? _defaultVectorStore; protected override Task StartAsync() { - if (string.IsNullOrWhiteSpace(SqlServerTestEnvironment.ConnectionString)) + this._connectionString = SqlServerTestEnvironment.ConnectionString; + + if (string.IsNullOrWhiteSpace(this._connectionString)) { throw new InvalidOperationException("Connection string is not configured, set the SqlServer:ConnectionString environment variable"); } - this._connectedStore = new(SqlServerTestEnvironment.ConnectionString); + this._defaultVectorStore = new(this._connectionString); return Task.CompletedTask; } diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs index faf9369f8d78..996b69847455 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicFilterTests.cs @@ -57,10 +57,8 @@ public override Task Legacy_AnyTagEqualTo_List() { public override TestStore TestStore => SqliteTestStore.Instance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - // Override to remove the string array property, which isn't (currently) supported on SQLite - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs index e11b81f0ef8f..ff73aa802c95 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Filter/SqliteBasicQueryTests.cs @@ -48,10 +48,8 @@ public override Task Contains_over_field_string_List() { public override TestStore TestStore => SqliteTestStore.Instance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - // Override to remove the string array property, which isn't (currently) supported on SQLite - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = base.GetRecordDefinition().Properties.Where(p => p.PropertyType != typeof(string[]) && p.PropertyType != typeof(List)).ToList() diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..7a7e2a2ba067 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/SqliteEmbeddingGenerationTests.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using SqliteIntegrationTests.Support; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using Xunit; + +namespace SqliteIntegrationTests; + +public class SqliteEmbeddingGenerationTests(SqliteEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => SqliteTestStore.Instance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => SqliteTestStore.Instance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services.AddSqliteVectorStore(SqliteTestStore.Instance.ConnectionString) + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services.AddSqliteVectorStoreRecordCollection(this.CollectionName, SqliteTestStore.Instance.ConnectionString) + ]; + } +} diff --git a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs index 3ea3b05d69d7..bed7ed7c8de0 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/SqliteIntegrationTests/Support/SqliteTestStore.cs @@ -10,6 +10,9 @@ internal sealed class SqliteTestStore : TestStore { private string? _databasePath; + private string? _connectionString; + public string ConnectionString => this._connectionString ?? throw new InvalidOperationException("Not initialized"); + public static SqliteTestStore Instance { get; } = new(); private SqliteVectorStore? _defaultVectorStore; @@ -18,6 +21,9 @@ public override IVectorStore DefaultVectorStore public override string DefaultDistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + public SqliteVectorStore GetVectorStore(SqliteVectorStoreOptions options) + => new(this.ConnectionString, options); + private SqliteTestStore() { } @@ -25,7 +31,8 @@ private SqliteTestStore() protected override Task StartAsync() { this._databasePath = Path.GetTempFileName(); - this._defaultVectorStore = new SqliteVectorStore($"Data Source={this._databasePath}"); + this._connectionString = $"Data Source={this._databasePath}"; + this._defaultVectorStore = new SqliteVectorStore(this._connectionString); return Task.CompletedTask; } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs index 7dc6da6b29fd..2eca14714b0e 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoDataConformanceTests.cs @@ -150,7 +150,7 @@ protected override List BuildTestData() => } ]; - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs index 3e879f46837f..4d923d71304e 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/CRUD/NoVectorConformanceTests.cs @@ -143,7 +143,7 @@ protected override List BuildTestData() => } ]; - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/EmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/EmbeddingGenerationTests.cs new file mode 100644 index 000000000000..18824613be0d --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/EmbeddingGenerationTests.cs @@ -0,0 +1,516 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.Extensions.VectorData.Properties; +using VectorDataSpecificationTests.Support; +using VectorDataSpecificationTests.Xunit; +using Xunit; + +namespace VectorDataSpecificationTests; + +#pragma warning disable CA1819 // Properties should not return arrays +#pragma warning disable CA2000 // Don't actually need to dispose FakeEmbeddingGenerator +#pragma warning disable CS8605 // Unboxing a possibly null value. + +public abstract class EmbeddingGenerationTests(EmbeddingGenerationTests.Fixture fixture) + where TKey : notnull +{ + #region Search + + [ConditionalFact] + public virtual async Task SearchAsync_with_property_generator() + { + // Property level: embedding generators are defined at all levels. The property generator should take precedence. + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Property ([1, 1, 3])", result.Record.Text); + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_property_generator_dynamic() + { + // Property level: embedding generators are defined at all levels. The property generator should take precedence. + var collection = this.GetCollection>(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Property ([1, 1, 3])", result.Record[nameof(Record.Text)]); + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_collection_generator() + { + // Collection level: embedding generators are defined at the collection and store level - the collection generator should take precedence. + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: false); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Collection ([1, 1, 2])", result.Record.Text); + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_store_generator() + { + // Store level: an embedding generator is defined at the store level only. + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: false, propertyGenerator: false); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Store ([1, 1, 1])", result.Record.Text); + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_store_dependency_injection() + { + foreach (var registrationDelegate in fixture.DependencyInjectionStoreRegistrationDelegates) + { + IServiceCollection serviceCollection = new ServiceCollection(); + + serviceCollection.AddSingleton(new FakeEmbeddingGenerator(replaceLast: 1)); + registrationDelegate(serviceCollection); + + await using var serviceProvider = serviceCollection.BuildServiceProvider(); + + var vectorStore = serviceProvider.GetRequiredService(); + var collection = vectorStore.GetCollection(fixture.CollectionName, fixture.GetRecordDefinition()); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Store ([1, 1, 1])", result.Record.Text); + } + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_collection_dependency_injection() + { + foreach (var registrationDelegate in fixture.DependencyInjectionCollectionRegistrationDelegates) + { + IServiceCollection serviceCollection = new ServiceCollection(); + + serviceCollection.AddSingleton(new FakeEmbeddingGenerator(replaceLast: 1)); + registrationDelegate(serviceCollection); + + await using var serviceProvider = serviceCollection.BuildServiceProvider(); + + var collection = serviceProvider.GetRequiredService>(); + + var result = await collection.SearchAsync("[1, 1, 0]", top: 1).SingleAsync(); + + Assert.Equal("Store ([1, 1, 1])", result.Record.Text); + } + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_custom_input_type() + { + var recordDefinition = new VectorStoreRecordDefinition() + { + Properties = fixture.GetRecordDefinition().Properties + .Select(p => p is VectorStoreRecordVectorProperty vectorProperty + ? new VectorStoreRecordVectorProperty(nameof(Record.Embedding), dimensions: 3) + { + DistanceFunction = fixture.DefaultDistanceFunction, + IndexKind = fixture.DefaultIndexKind + } + : p) + .ToList() + }; + + var collection = fixture.GetCollection( + fixture.CreateVectorStore(new FakeCustomerEmbeddingGenerator([1, 1, 1])), + fixture.CollectionName, + recordDefinition); + + var result = await collection.SearchAsync(new Customer(), top: 1).SingleAsync(); + + Assert.Equal("Store ([1, 1, 1])", result.Record.Text); + } + + [ConditionalFact] + public virtual async Task SearchAsync_without_generator_throws() + { + // The database doesn't support embedding generation, and no client-side generator has been configured at any level, + // so SearchAsync should throw. + var collection = fixture.GetCollection(fixture.TestStore.DefaultVectorStore, fixture.CollectionName + "WithoutGenerator"); + + var exception = await Assert.ThrowsAsync(() => collection.SearchAsync("foo", top: 1).ToListAsync().AsTask()); + + Assert.Equal(VectorDataStrings.NoEmbeddingGeneratorWasConfiguredForSearch, exception.Message); + } + + public class RawRecord + { + [VectorStoreRecordKey] + public TKey Key { get; set; } = default!; + [VectorStoreRecordVector(Dimensions: 3)] + public ReadOnlyMemory Embedding { get; set; } + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_embedding_argument_throws() + { + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var exception = await Assert.ThrowsAsync(() => collection.SearchAsync(new ReadOnlyMemory([1, 2, 3]), top: 1).ToListAsync().AsTask()); + + Assert.Equal(VectorDataStrings.EmbeddingTypePassedToSearchAsync, exception.Message); + } + + [ConditionalFact] + public virtual async Task SearchAsync_with_incompatible_generator_throws() + { + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + // We have a generator configured for string, not int. + var exception = await Assert.ThrowsAsync(() => collection.SearchAsync(8, top: 1).ToListAsync().AsTask()); + + Assert.Equal($"An input of type 'Int32' was provided, but an incompatible embedding generator of type '{nameof(FakeEmbeddingGenerator)}' was configured.", exception.Message); + } + + #endregion Search + + #region Upsert + + [ConditionalFact] + public virtual async Task UpsertAsync() + { + var counter = fixture.GenerateNextCounter(); + + var record = new Record + { + Key = fixture.GenerateNextKey(), + Embedding = "[100, 1, 0]", + Counter = counter, + Text = nameof(UpsertAsync) + }; + + // Property level: embedding generators are defined at all levels. The property generator should take precedence. + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + await collection.UpsertAsync(record).ConfigureAwait(false); + + await fixture.TestStore.WaitForDataAsync(collection, 1, filter: r => r.Counter == counter); + + var result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([100, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter, result.Record.Counter); + } + + [ConditionalFact] + public virtual async Task UpsertAsync_dynamic() + { + var counter = fixture.GenerateNextCounter(); + + var record = new Dictionary + { + [nameof(Record.Key)] = fixture.GenerateNextKey(), + [nameof(Record.Embedding)] = "[200, 1, 0]", + [nameof(Record.Counter)] = counter, + [nameof(Record.Text)] = nameof(UpsertAsync_dynamic) + }; + + // Property level: embedding generators are defined at all levels. The property generator should take precedence. + var collection = this.GetCollection>(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + await collection.UpsertAsync(record).ConfigureAwait(false); + + await fixture.TestStore.WaitForDataAsync(collection, 1, filter: r => (int)r[nameof(Record.Counter)] == counter); + + var result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([200, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter, result.Record[nameof(Record.Counter)]); + } + + [ConditionalFact] + public virtual async Task UpsertAsync_batch() + { + var (counter1, counter2) = (fixture.GenerateNextCounter(), fixture.GenerateNextCounter()); + + Record[] records = + [ + new() + { + Key = fixture.GenerateNextKey(), + Embedding = "[300, 1, 0]", + Counter = counter1, + Text = nameof(UpsertAsync_batch) + "1" + }, + new() + { + Key = fixture.GenerateNextKey(), + Embedding = "[400, 1, 0]", + Counter = counter2, + Text = nameof(UpsertAsync_batch) + "2" + } + ]; + + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + await collection.UpsertAsync(records).ConfigureAwait(false); + + await fixture.TestStore.WaitForDataAsync(collection, 2, filter: r => (int)r.Counter == counter1 || (int)r.Counter == counter2); + + var result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([300, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter1, result.Record.Counter); + + result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([400, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter2, result.Record.Counter); + } + + [ConditionalFact] + public virtual async Task UpsertAsync_batch_dynamic() + { + var (counter1, counter2) = (fixture.GenerateNextCounter(), fixture.GenerateNextCounter()); + + Dictionary[] records = + [ + new() + { + [nameof(Record.Key)] = fixture.GenerateNextKey(), + [nameof(Record.Embedding)] = "[500, 1, 0]", + [nameof(Record.Counter)] = counter1, + [nameof(Record.Text)] = nameof(UpsertAsync_batch_dynamic) + "1" + }, + new() + { + [nameof(Record.Key)] = fixture.GenerateNextKey(), + [nameof(Record.Embedding)] = "[600, 1, 0]", + [nameof(Record.Counter)] = counter2, + [nameof(Record.Text)] = nameof(UpsertAsync_batch_dynamic) + "2" + } + ]; + + var collection = this.GetCollection>(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + await collection.UpsertAsync(records).ConfigureAwait(false); + + await fixture.TestStore.WaitForDataAsync(collection, 2, filter: r => (int)r[nameof(Record.Counter)] == counter1 || (int)r[nameof(Record.Counter)] == counter2); + + var result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([500, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter1, result.Record[nameof(Record.Counter)]); + + result = await collection.SearchEmbeddingAsync(new ReadOnlyMemory([600, 1, 3]), top: 1).SingleAsync(); + Assert.Equal(counter2, result.Record[nameof(Record.Counter)]); + } + + #endregion Upsert + + #region IncludeVectors + + [ConditionalFact] + public virtual async Task SearchAsync_with_IncludeVectors_throws() + { + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var exception = await Assert.ThrowsAsync(() => collection.SearchAsync("[1, 0, 0]", top: 1, new() { IncludeVectors = true }).ToListAsync().AsTask()); + + Assert.Equal("When an embedding generator is configured, `Include Vectors` cannot be enabled.", exception.Message); + } + + [ConditionalFact] + public virtual async Task GetAsync_with_IncludeVectors_throws() + { + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var exception = await Assert.ThrowsAsync(() => collection.GetAsync(fixture.TestData[0].Key, new() { IncludeVectors = true })); + + Assert.Equal("When an embedding generator is configured, `Include Vectors` cannot be enabled.", exception.Message); + } + + [ConditionalFact] + public virtual async Task GetAsync_enumerable_with_IncludeVectors_throws() + { + var collection = this.GetCollection(storeGenerator: true, collectionGenerator: true, propertyGenerator: true); + + var exception = await Assert.ThrowsAsync(() => + collection.GetAsync( + [fixture.TestData[0].Key, fixture.TestData[1].Key], + new() { IncludeVectors = true }) + .ToListAsync().AsTask()); + + Assert.Equal("When an embedding generator is configured, `Include Vectors` cannot be enabled.", exception.Message); + } + + #endregion IncludeVectors + + #region Support + + public class Record + { + public TKey Key { get; set; } = default!; + public string? Embedding { get; set; } + + public int Counter { get; set; } + public string? Text { get; set; } + } + + public class RecordWithAttributes + { + [VectorStoreRecordKey] + public TKey Key { get; set; } = default!; + + [VectorStoreRecordVector(Dimensions: 3)] + public string? Embedding { get; set; } + + [VectorStoreRecordData(IsIndexed = true)] + public int Counter { get; set; } + + [VectorStoreRecordData] + public string? Text { get; set; } + } + + public class RecordWithCustomerVectorProperty + { + public TKey Key { get; set; } = default!; + public Customer? Embedding { get; set; } + + public int Counter { get; set; } + public string? Text { get; set; } + } + + public class Customer + { + public string? FirstName { get; set; } + public string? LastName { get; set; } + } + + private IVectorStoreRecordCollection GetCollection( + bool storeGenerator = false, + bool collectionGenerator = false, + bool propertyGenerator = false) + where TRecord : notnull + { + var properties = fixture.GetRecordDefinition().Properties; + + properties = properties + .Select(p => p is VectorStoreRecordVectorProperty vectorProperty && propertyGenerator + ? new VectorStoreRecordVectorProperty(vectorProperty) { EmbeddingGenerator = new FakeEmbeddingGenerator(replaceLast: 3) } + : p) + .ToList(); + + var recordDefinition = new VectorStoreRecordDefinition + { + EmbeddingGenerator = collectionGenerator ? new FakeEmbeddingGenerator(replaceLast: 2) : null, + Properties = properties + }; + + return fixture.GetCollection( + fixture.CreateVectorStore(storeGenerator ? new FakeEmbeddingGenerator(replaceLast: 1) : null), + fixture.CollectionName, + recordDefinition); + } + + public abstract class Fixture : VectorStoreCollectionFixture + { + private int _counter; + + public override string CollectionName => "EmbeddingGenerationTests"; + + public override VectorStoreRecordDefinition GetRecordDefinition() + => new() + { + Properties = + [ + new VectorStoreRecordKeyProperty(nameof(Record.Key), typeof(TKey)), + new VectorStoreRecordVectorProperty(nameof(Record.Embedding), typeof(string), dimensions: 3) + { + DistanceFunction = this.DefaultDistanceFunction, + IndexKind = this.DefaultIndexKind + }, + + new VectorStoreRecordDataProperty(nameof(Record.Counter), typeof(int)) { IsIndexed = true }, + new VectorStoreRecordDataProperty(nameof(Record.Text), typeof(string)) + ], + EmbeddingGenerator = new FakeEmbeddingGenerator() + }; + + protected override List BuildTestData() => + [ + new() + { + Key = this.GenerateNextKey(), + Embedding = "[1, 1, 1]", + Counter = this.GenerateNextCounter(), + Text = "Store ([1, 1, 1])" + }, + new() + { + Key = this.GenerateNextKey(), + Embedding = "[1, 1, 2]", + Counter = this.GenerateNextCounter(), + Text = "Collection ([1, 1, 2])" + }, + new() + { + Key = this.GenerateNextKey(), + Embedding = "[1, 1, 3]", + Counter = this.GenerateNextCounter(), + Text = "Property ([1, 1, 3])" + } + ]; + + public virtual IVectorStoreRecordCollection GetCollection( + IVectorStore vectorStore, + string collectionName, + VectorStoreRecordDefinition? recordDefinition = null) + where TRecord : notnull + => vectorStore.GetCollection(collectionName, recordDefinition); + + public abstract IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator = null); + + public abstract Func[] DependencyInjectionStoreRegistrationDelegates { get; } + public abstract Func[] DependencyInjectionCollectionRegistrationDelegates { get; } + + public virtual int GenerateNextCounter() + => Interlocked.Increment(ref this._counter); + } + + private sealed class FakeEmbeddingGenerator(int? replaceLast = null) : IEmbeddingGenerator> + { + public Task>> GenerateAsync( + IEnumerable values, + EmbeddingGenerationOptions? options = null, + CancellationToken cancellationToken = default) + { + var results = new GeneratedEmbeddings>(); + + foreach (var value in values) + { + var vector = value.TrimStart('[').TrimEnd(']').Split(',').Select(s => float.Parse(s.Trim())).ToArray(); + + if (replaceLast is not null) + { + vector[vector.Length - 1] = replaceLast.Value; + } + + results.Add(new Embedding(vector)); + } + + return Task.FromResult(results); + } + + public object? GetService(Type serviceType, object? serviceKey = null) + => null; + + public void Dispose() + { + } + } + + private sealed class FakeCustomerEmbeddingGenerator(float[] embedding) : IEmbeddingGenerator> + { + public Task>> GenerateAsync(IEnumerable values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default) + => Task.FromResult(new GeneratedEmbeddings> { new(embedding) }); + + public object? GetService(Type serviceType, object? serviceKey = null) + => null; + + public void Dispose() + { + } + } + + #endregion Support +} diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs index 925fecbb607d..f1d06ee74aaf 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicFilterTests.cs @@ -308,7 +308,7 @@ public virtual Task Legacy_AnyTagEqualTo_List() protected virtual async Task> GetRecords( Expression> filter, int top, ReadOnlyMemory vector) - => await fixture.Collection.VectorizedSearchAsync( + => await fixture.Collection.SearchEmbeddingAsync( vector, top: top, new() { Filter = filter }) @@ -316,7 +316,7 @@ protected virtual async Task> GetRecords( protected virtual async Task>> GetDynamicRecords( Expression, bool>> dynamicFilter, int top, ReadOnlyMemory vector) - => await fixture.DynamicCollection.VectorizedSearchAsync( + => await fixture.DynamicCollection.SearchEmbeddingAsync( vector, top: top, new() { Filter = dynamicFilter }) @@ -423,7 +423,7 @@ public class FilterRecord public abstract class Fixture : VectorStoreCollectionFixture { - protected override string CollectionName => "FilterTests"; + public override string CollectionName => "FilterTests"; protected virtual ReadOnlyMemory GetVector(int count) // All records have the same vector - this fixture is about testing criteria filtering only @@ -444,7 +444,7 @@ public override async Task InitializeAsync() } } - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs index b1f942dc076a..1a8c6649cd87 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Filter/BasicQueryTests.cs @@ -29,7 +29,7 @@ public abstract class QueryFixture : BasicFilterTests.Fixture { private static readonly Random s_random = new(); - protected override string CollectionName => "QueryTests"; + public override string CollectionName => "QueryTests"; /// /// Use random vectors to make sure that the values don't matter for GetAsync. diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs index df30a4c0abb9..b89a65ad6bfa 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/HybridSearch/KeywordVectorizedHybridSearchComplianceTests.cs @@ -168,9 +168,9 @@ public sealed class MultiTextStringRecord public abstract class VectorAndStringFixture : VectorStoreCollectionFixture> { - protected override string CollectionName => "KeywordHybridSearch" + this.GetUniqueCollectionName(); + public override string CollectionName => "KeywordHybridSearch" + this.GetUniqueCollectionName(); - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = new List() @@ -221,9 +221,9 @@ protected override Task WaitForDataAsync() public abstract class MultiTextFixture : VectorStoreCollectionFixture> { - protected override string CollectionName => "KeywordHybridSearch" + this.GetUniqueCollectionName(); + public override string CollectionName => "KeywordHybridSearch" + this.GetUniqueCollectionName(); - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = new List() diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs index 7e99e4580649..089b253b79f9 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/DynamicDataModelFixture.cs @@ -12,7 +12,7 @@ public abstract class DynamicDataModelFixture : VectorStoreCollectionFixtu public const string EmbeddingPropertyName = "embedding"; public const int DimensionCount = 3; - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs index 21ace375aae9..a3091cdc995e 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/SimpleModelFixture.cs @@ -40,7 +40,7 @@ protected override List> BuildTestData() => } ]; - protected override VectorStoreRecordDefinition GetRecordDefinition() + public override VectorStoreRecordDefinition GetRecordDefinition() => new() { Properties = diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs index 6d3d2f935e54..b8fd45a2ae34 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/TestStore.cs @@ -90,20 +90,19 @@ public virtual async Task WaitForDataAsync( for (var i = 0; i < 20; i++) { - var results = collection.VectorizedSearchAsync( + var results = collection.SearchEmbeddingAsync( new ReadOnlyMemory(vector), - top: recordCount, - new() - { - // In some databases (Azure AI Search), the data shows up but the filtering index isn't yet updated, - // so filtered searches show empty results. Add a filter to the seed data check below. - Filter = filter - }); + top: 1000, // TODO: this should be recordCount, but see #11655 + new() { Filter = filter }); var count = await results.CountAsync(); if (count == recordCount) { return; } + if (count > recordCount) + { + throw new InvalidOperationException($"Expected at most {recordCount} records, but found {count}."); + } await Task.Delay(TimeSpan.FromMilliseconds(100)); } diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs index 9c3c71fabea3..c76f75d46d47 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/Support/VectorStoreCollectionFixture.cs @@ -16,10 +16,10 @@ public abstract class VectorStoreCollectionFixture : VectorStoreF { private List? _testData; - protected abstract VectorStoreRecordDefinition GetRecordDefinition(); + public abstract VectorStoreRecordDefinition GetRecordDefinition(); protected abstract List BuildTestData(); - protected virtual string CollectionName => Guid.NewGuid().ToString(); + public virtual string CollectionName => Guid.NewGuid().ToString(); protected virtual string DistanceFunction => this.TestStore.DefaultDistanceFunction; protected virtual string IndexKind => this.TestStore.DefaultIndexKind; diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj index 5b14dc1e41c1..382f8eb16e78 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorDataIntegrationTests.csproj @@ -12,6 +12,8 @@ + + diff --git a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs index 4d4b430a1698..0e2021c60f1f 100644 --- a/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/VectorDataIntegrationTests/VectorSearch/VectorSearchDistanceFunctionComplianceTests.cs @@ -103,11 +103,11 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM { await collection.UpsertAsync(insertedRecords); - var searchResult = collection.VectorizedSearchAsync(baseVector, top: 3); + var searchResult = collection.SearchEmbeddingAsync(baseVector, top: 3); var results = await searchResult.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: false); - searchResult = collection.VectorizedSearchAsync(baseVector, top: 3, new() { IncludeVectors = true }); + searchResult = collection.SearchEmbeddingAsync(baseVector, top: 3, new() { IncludeVectors = true }); results = await searchResult.ToListAsync(); VerifySearchResults(expectedRecords, expectedScores, results, includeVectors: true); @@ -115,7 +115,7 @@ protected async Task SimpleSearch(string distanceFunction, double expectedExactM { for (int top = Math.Max(1, skip); top <= insertedRecords.Count; top++) { - searchResult = collection.VectorizedSearchAsync(baseVector, + searchResult = collection.SearchEmbeddingAsync(baseVector, top: top, new() { diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs index fcadf45f574a..e8a6deceb1e8 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoDataConformanceTests.cs @@ -17,7 +17,7 @@ public class WeaviateNoDataConformanceTests_NamedVectors(WeaviateNoDataConforman /// /// Weaviate collections must start with an uppercase letter. /// - protected override string CollectionName => "NoDataNamedCollection"; + public override string CollectionName => "NoDataNamedCollection"; } } @@ -31,6 +31,6 @@ public class WeaviateNoDataConformanceTests_UnnamedVector(WeaviateNoDataConforma /// /// Weaviate collections must start with an uppercase letter. /// - protected override string CollectionName => "NoDataUnnamedCollection"; + public override string CollectionName => "NoDataUnnamedCollection"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs index c8a04bf1abb3..b2fb42f176df 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/CRUD/WeaviateNoVectorConformanceTests.cs @@ -17,6 +17,6 @@ public class WeaviateNoVectorConformanceTests_NamedVectors(WeaviateNoVectorConfo /// /// Weaviate collections must start with an uppercase letter. /// - protected override string CollectionName => "NoVectorNamedCollection"; + public override string CollectionName => "NoVectorNamedCollection"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs index f8f76dd27943..b4dbf228ee37 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicFilterTests.cs @@ -66,7 +66,5 @@ public override Task Equal_with_string_is_not_Contains() public new class Fixture : BasicFilterTests.Fixture { public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs index c03a8fd8076b..fefa13f83515 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Filter/WeaviateBasicQueryTests.cs @@ -66,7 +66,5 @@ public override Task Equal_with_string_is_not_Contains() public new class Fixture : BasicQueryTests.QueryFixture { public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; - - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs index 1b386273b6d6..b5a262c4c47c 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/HybridSearch/WeaviateKeywordVectorizedHybridSearchTests.cs @@ -18,18 +18,14 @@ public class WeaviateKeywordVectorizedHybridSearchTests_NamedVectors( { public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - - protected override string CollectionName => "VectorAndStringHybridSearch"; + public override string CollectionName => "VectorAndStringHybridSearch"; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - - protected override string CollectionName => "MultiTextHybridSearch"; + public override string CollectionName => "MultiTextHybridSearch"; } } @@ -44,17 +40,13 @@ public class WeaviateKeywordVectorizedHybridSearchTests_UnnamedVector( { public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - - protected override string CollectionName => "VectorAndStringHybridSearch"; + public override string CollectionName => "VectorAndStringHybridSearch"; } public new class MultiTextFixture : KeywordVectorizedHybridSearchComplianceTests.MultiTextFixture { public override TestStore TestStore => WeaviateTestStore.UnnamedVectorInstance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - - protected override string CollectionName => "MultiTextHybridSearch"; + public override string CollectionName => "MultiTextHybridSearch"; } } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs index 038de8fa5fd2..874b771b5a8f 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateDynamicDataModelFixture.cs @@ -10,7 +10,7 @@ public class WeaviateDynamicDataModelFixture : DynamicDataModelFixture // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names - protected override string CollectionName => this.GetUniqueCollectionName(); + public override string CollectionName => this.GetUniqueCollectionName(); public override string GetUniqueCollectionName() => $"A{Guid.NewGuid():N}"; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs index a10525cbb906..829172f8503f 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateSimpleModelFixture.cs @@ -8,11 +8,9 @@ public class WeaviateSimpleModelFixture : SimpleModelFixture { public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; - protected override string DistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; - // Weaviate requires the name to start with a capital letter and not contain any chars other than a-Z and 0-9. // Source: https://weaviate.io/developers/weaviate/starter-guides/managing-collections#collection--property-names - protected override string CollectionName => this.GetUniqueCollectionName(); + public override string CollectionName => this.GetUniqueCollectionName(); public override string GetUniqueCollectionName() => $"A{Guid.NewGuid():N}"; } diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs index 76aa72077a3a..8ddea21255d4 100644 --- a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/Support/WeaviateTestStore.cs @@ -15,6 +15,8 @@ public sealed class WeaviateTestStore : TestStore public static WeaviateTestStore NamedVectorsInstance { get; } = new(hasNamedVectors: true); public static WeaviateTestStore UnnamedVectorInstance { get; } = new(hasNamedVectors: false); + public override string DefaultDistanceFunction => Microsoft.Extensions.VectorData.DistanceFunction.CosineDistance; + private readonly WeaviateContainer _container = new WeaviateBuilder().Build(); private readonly bool _hasNamedVectors; public HttpClient? _httpClient { get; private set; } @@ -24,6 +26,9 @@ public sealed class WeaviateTestStore : TestStore public override IVectorStore DefaultVectorStore => this._defaultVectorStore ?? throw new InvalidOperationException("Not initialized"); + public WeaviateVectorStore GetVectorStore(WeaviateVectorStoreOptions options) + => new(this.Client, options); + private WeaviateTestStore(bool hasNamedVectors) => this._hasNamedVectors = hasNamedVectors; protected override async Task StartAsync() diff --git a/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateEmbeddingGenerationTests.cs b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..44f8e96c5f95 --- /dev/null +++ b/dotnet/src/VectorDataIntegrationTests/WeaviateIntegrationTests/WeaviateEmbeddingGenerationTests.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.VectorData; +using Microsoft.SemanticKernel; +using VectorDataSpecificationTests; +using VectorDataSpecificationTests.Support; +using WeaviateIntegrationTests.Support; +using Xunit; + +namespace WeaviateIntegrationTests; + +public class WeaviateEmbeddingGenerationTests(WeaviateEmbeddingGenerationTests.Fixture fixture) + : EmbeddingGenerationTests(fixture), IClassFixture +{ + public new class Fixture : EmbeddingGenerationTests.Fixture + { + public override TestStore TestStore => WeaviateTestStore.NamedVectorsInstance; + + public override IVectorStore CreateVectorStore(IEmbeddingGenerator? embeddingGenerator) + => WeaviateTestStore.NamedVectorsInstance.GetVectorStore(new() { EmbeddingGenerator = embeddingGenerator }); + + public override Func[] DependencyInjectionStoreRegistrationDelegates => + [ + services => services + .AddSingleton(WeaviateTestStore.NamedVectorsInstance.Client) + .AddWeaviateVectorStore() + ]; + + public override Func[] DependencyInjectionCollectionRegistrationDelegates => + [ + services => services + .AddSingleton(WeaviateTestStore.NamedVectorsInstance.Client) + .AddWeaviateVectorStoreRecordCollection(this.CollectionName) + ]; + } +} From e9ab1cabf24d536938b7fd1371c068998c4295cf Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Mon, 28 Apr 2025 15:55:19 +0100 Subject: [PATCH 56/63] .Net: Cleanup of SKEXP0020 flag, removing MemoryStore samples and stop publishing DuckDB and Kusto connectors. (#11743) ### Motivation and Context - We are suppressing the SKEXP0020 experimental flag everywhere, but it is mostly not being used anymore, so removing unnecessary suppressions. - Also removing / replacing any MemoryStore samples, to stop advertising them, since it will be obsoleted in future. - Stopping publishing the DuckDB and Kusto connectors, since we won't be building VectorStore implementations for them. #10918 #8540 ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- dotnet/samples/Concepts/Concepts.csproj | 2 +- .../Memory/MemoryStore_CustomReadOnly.cs | 239 ------------- .../Memory/SemanticTextMemory_Building.cs | 171 --------- ...tMemoryPlugin_GeminiEmbeddingGeneration.cs | 279 --------------- .../TextMemoryPlugin_MultipleMemoryStore.cs | 337 ------------------ ...ugin_RecallJsonSerializationWithOptions.cs | 80 ----- dotnet/samples/Concepts/RAG/WithPlugins.cs | 60 +++- dotnet/samples/Concepts/README.md | 5 - .../Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj | 2 +- ...ramework.Aspire.ProcessOrchestrator.csproj | 2 +- .../ProcessWithCloudEvents.Grpc.csproj | 2 +- .../ProcessWithCloudEvents.Processes.csproj | 2 +- .../ProcessWithDapr/ProcessWithDapr.csproj | 2 +- .../VectorStoreRAG/VectorStoreRAG.csproj | 2 +- .../GettingStarted/GettingStarted.csproj | 2 +- .../GettingStartedWithAgents.csproj | 2 +- .../GettingStartedWithProcesses.csproj | 2 +- .../GettingStartedWithTextSearch.csproj | 2 +- .../GettingStartedWithVectorStores.csproj | 2 +- .../LearnResources/LearnResources.csproj | 2 +- .../Connectors.AzureAISearch.UnitTests.csproj | 2 +- ...tors.AzureCosmosDBMongoDB.UnitTests.csproj | 2 +- ...ectors.AzureCosmosDBNoSQL.UnitTests.csproj | 2 +- .../Connectors.Google.UnitTests.csproj | 2 +- .../Connectors.InMemory.UnitTests.csproj | 2 +- .../Connectors.Memory.DuckDB.csproj | 1 + .../Connectors.Memory.Kusto.csproj | 1 + .../IPostgresDbClient.cs | 2 - .../PostgresVectorStoreRecordMapper.cs | 2 - .../Connectors.Memory.SqlServer/README.md | 2 +- .../Connectors.MongoDB.UnitTests.csproj | 2 +- .../Connectors.Pinecone.UnitTests.csproj | 2 +- .../Connectors.Postgres.UnitTests.csproj | 2 +- .../Connectors.Qdrant.UnitTests.csproj | 2 +- .../Connectors.Redis.UnitTests.csproj | 2 +- .../Connectors.Sqlite.UnitTests.csproj | 2 +- .../Connectors.UnitTests.csproj | 2 +- .../Memory/Chroma/ChromaMemoryStoreTests.cs | 2 + .../Memory/DuckDB/DuckDBMemoryStoreTests.cs | 2 + .../Memory/Kusto/KustoMemoryStoreTests.cs | 2 + .../Connectors.Weaviate.UnitTests.csproj | 2 +- .../Process.IntegrationTestHost.Dapr.csproj | 2 +- .../Process.IntegrationTestRunner.Dapr.csproj | 2 +- .../Memory/Chroma/ChromaMemoryStoreTests.cs | 2 + .../Memory/Milvus/MilvusMemoryStoreTests.cs | 2 + .../IntegrationTests/IntegrationTests.csproj | 2 +- .../SemanticKernel.UnitTests.csproj | 2 +- .../PineconeIntegrationTests.csproj | 2 +- .../Support/PostgresTestStore.cs | 2 - .../SqlServerIntegrationTests.csproj | 2 +- 50 files changed, 93 insertions(+), 1162 deletions(-) delete mode 100644 dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs delete mode 100644 dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs delete mode 100644 dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs delete mode 100644 dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs delete mode 100644 dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 808841ad2d37..618bff3e06af 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,7 +8,7 @@ false true - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724,MEVD9000 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724,MEVD9000 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs b/dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs deleted file mode 100644 index e8994db01afd..000000000000 --- a/dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Numerics.Tensors; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; -using System.Text.Json; -using Microsoft.SemanticKernel.Memory; - -namespace Memory; - -/// -/// This sample provides a custom implementation of that is read only. -/// In this sample, the data is stored in a JSON string and deserialized into an -/// . For this specific sample, the implementation -/// of has a single collection, and thus does not need to be named. -/// It also assumes that the JSON formatted data can be deserialized into objects. -/// -public class MemoryStore_CustomReadOnly(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task RunAsync() - { - var store = new ReadOnlyMemoryStore(s_jsonVectorEntries); - - var embedding = new ReadOnlyMemory([22, 4, 6]); - - Console.WriteLine("Reading data from custom read-only memory store"); - var memoryRecord = await store.GetAsync("collection", "key3"); - if (memoryRecord is not null) - { - Console.WriteLine($"ID = {memoryRecord.Metadata.Id}, Embedding = {string.Join(", ", MemoryMarshal.ToEnumerable(memoryRecord.Embedding))}"); - } - - Console.WriteLine($"Getting most similar vector to {string.Join(", ", MemoryMarshal.ToEnumerable(embedding))}"); - var result = await store.GetNearestMatchAsync("collection", embedding, 0.0); - if (result.HasValue) - { - Console.WriteLine($"ID = {string.Join(", ", MemoryMarshal.ToEnumerable(result.Value.Item1.Embedding))}, Embedding = {result.Value.Item2}"); - } - } - - private sealed class ReadOnlyMemoryStore : IMemoryStore - { - private readonly MemoryRecord[]? _memoryRecords = null; - private readonly int _vectorSize = 3; - - public ReadOnlyMemoryStore(string valueString) - { - s_jsonVectorEntries = s_jsonVectorEntries.Replace("\n", string.Empty, StringComparison.Ordinal); - s_jsonVectorEntries = s_jsonVectorEntries.Replace(" ", string.Empty, StringComparison.Ordinal); - this._memoryRecords = JsonSerializer.Deserialize(valueString); - - if (this._memoryRecords is null) - { - throw new Exception("Unable to deserialize memory records"); - } - } - - public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public Task GetAsync(string collectionName, string key, bool withEmbedding = false, CancellationToken cancellationToken = default) - { - // Note: with this simple implementation, the MemoryRecord will always contain the embedding. - return Task.FromResult(this._memoryRecords?.FirstOrDefault(x => x.Key == key)); - } - - public async IAsyncEnumerable GetBatchAsync(string collectionName, IEnumerable keys, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - // Note: with this simple implementation, the MemoryRecord will always contain the embedding. - if (this._memoryRecords is not null) - { - foreach (var memoryRecord in this._memoryRecords) - { - if (keys.Contains(memoryRecord.Key)) - { - yield return memoryRecord; - } - } - } - } - - public IAsyncEnumerable GetCollectionsAsync(CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public async Task<(MemoryRecord, double)?> GetNearestMatchAsync(string collectionName, ReadOnlyMemory embedding, double minRelevanceScore = 0, - bool withEmbedding = false, CancellationToken cancellationToken = default) - { - // Note: with this simple implementation, the MemoryRecord will always contain the embedding. - await foreach (var item in this.GetNearestMatchesAsync( - collectionName: collectionName, - embedding: embedding, - limit: 1, - minRelevanceScore: minRelevanceScore, - withEmbeddings: withEmbedding, - cancellationToken: cancellationToken).ConfigureAwait(false)) - { - return item; - } - - return default; - } - - public async IAsyncEnumerable<(MemoryRecord, double)> GetNearestMatchesAsync(string collectionName, ReadOnlyMemory embedding, int limit, - double minRelevanceScore = 0, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - // Note: with this simple implementation, the MemoryRecord will always contain the embedding. - if (this._memoryRecords is null || this._memoryRecords.Length == 0) - { - yield break; - } - - if (embedding.Length != this._vectorSize) - { - throw new Exception($"Embedding vector size {embedding.Length} does not match expected size of {this._vectorSize}"); - } - - List<(MemoryRecord Record, double Score)> embeddings = []; - - foreach (var item in this._memoryRecords) - { - double similarity = TensorPrimitives.CosineSimilarity(embedding.Span, item.Embedding.Span); - if (similarity >= minRelevanceScore) - { - embeddings.Add(new(item, similarity)); - } - } - - foreach (var item in embeddings.OrderByDescending(l => l.Score).Take(limit)) - { - yield return (item.Record, item.Score); - } - } - - public Task RemoveAsync(string collectionName, string key, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public Task UpsertAsync(string collectionName, MemoryRecord record, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - - public IAsyncEnumerable UpsertBatchAsync(string collectionName, IEnumerable records, CancellationToken cancellationToken = default) - { - throw new System.NotImplementedException(); - } - } - - private static string s_jsonVectorEntries = """ - [ - { - "embedding": [0, 0, 0], - "metadata": { - "is_reference": false, - "external_source_name": "externalSourceName", - "id": "Id1", - "description": "description", - "text": "text", - "additional_metadata" : "value:" - }, - "key": "key1", - "timestamp": null - }, - { - "embedding": [0, 0, 10], - "metadata": { - "is_reference": false, - "external_source_name": "externalSourceName", - "id": "Id2", - "description": "description", - "text": "text", - "additional_metadata" : "value:" - }, - "key": "key2", - "timestamp": null - }, - { - "embedding": [1, 2, 3], - "metadata": { - "is_reference": false, - "external_source_name": "externalSourceName", - "id": "Id3", - "description": "description", - "text": "text", - "additional_metadata" : "value:" - }, - "key": "key3", - "timestamp": null - }, - { - "embedding": [-1, -2, -3], - "metadata": { - "is_reference": false, - "external_source_name": "externalSourceName", - "id": "Id4", - "description": "description", - "text": "text", - "additional_metadata" : "value:" - }, - "key": "key4", - "timestamp": null - }, - { - "embedding": [12, 8, 4], - "metadata": { - "is_reference": false, - "external_source_name": "externalSourceName", - "id": "Id5", - "description": "description", - "text": "text", - "additional_metadata" : "value:" - }, - "key": "key5", - "timestamp": null - } - ] - """; -} diff --git a/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs b/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs deleted file mode 100644 index f4c1ea45407b..000000000000 --- a/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Connectors.AzureAISearch; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Memory; - -namespace Memory; - -/* The files contains two examples about SK Semantic Memory. - * - * 1. Memory using Azure AI Search. - * 2. Memory using a custom embedding generator and vector engine. - * - * Semantic Memory allows to store your data like traditional DBs, - * adding the ability to query it using natural language. - */ -[Obsolete("The IMemoryStore abstraction is being obsoleted")] -public class SemanticTextMemory_Building(ITestOutputHelper output) : BaseTest(output) -{ - private const string MemoryCollectionName = "SKGitHub"; - - [Fact] - public async Task RunAsync() - { - Console.WriteLine("=============================================================="); - Console.WriteLine("======== Semantic Memory using Azure AI Search ========"); - Console.WriteLine("=============================================================="); - - /* This example leverages Azure AI Search to provide SK with Semantic Memory. - * - * Azure AI Search automatically indexes your data semantically, so you don't - * need to worry about embedding generation. - */ - - var memoryWithACS = new MemoryBuilder() - .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) - .WithMemoryStore(new AzureAISearchMemoryStore(TestConfiguration.AzureAISearch.Endpoint, TestConfiguration.AzureAISearch.ApiKey)) - .Build(); - - await RunExampleAsync(memoryWithACS); - - Console.WriteLine("===================================================="); - Console.WriteLine("======== Semantic Memory (volatile, in RAM) ========"); - Console.WriteLine("===================================================="); - - /* You can build your own semantic memory combining an Embedding Generator - * with a Memory storage that supports search by similarity (ie semantic search). - * - * In this example we use a volatile memory, a local simulation of a vector DB. - * - * You can replace VolatileMemoryStore with Qdrant (see QdrantMemoryStore connector) - * or implement your connectors for Pinecone, Vespa, Postgres + pgvector, SQLite VSS, etc. - */ - - var memoryWithCustomDb = new MemoryBuilder() - .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) - .WithMemoryStore(new VolatileMemoryStore()) - .Build(); - - // Uncomment the following line to use GoogleAI embeddings - // var memoryWithCustomDb = new MemoryBuilder() - // .WithGoogleAITextEmbeddingGeneration(TestConfiguration.GoogleAI.EmbeddingModelId, TestConfiguration.GoogleAI.ApiKey) - // .WithMemoryStore(new VolatileMemoryStore()) - // .Build(); - - await RunExampleAsync(memoryWithCustomDb); - } - - private async Task RunExampleAsync(ISemanticTextMemory memory) - { - await StoreMemoryAsync(memory); - - await SearchMemoryAsync(memory, "How do I get started?"); - - /* - Output: - - Query: How do I get started? - - Result 1: - URL: : https://github.com/microsoft/semantic-kernel/blob/main/README.md - Title : README: Installation, getting started, and how to contribute - - Result 2: - URL: : https://github.com/microsoft/semantic-kernel/blob/main/samples/dotnet-jupyter-notebooks/00-getting-started.ipynb - Title : Jupyter notebook describing how to get started with the Semantic Kernel - - */ - - await SearchMemoryAsync(memory, "Can I build a chat with SK?"); - - /* - Output: - - Query: Can I build a chat with SK? - - Result 1: - URL: : https://github.com/microsoft/semantic-kernel/tree/main/prompt_template_samples/ChatPlugin/ChatGPT - Title : Sample demonstrating how to create a chat plugin interfacing with ChatGPT - - Result 2: - URL: : https://github.com/microsoft/semantic-kernel/blob/main/samples/apps/chat-summary-webapp-react/README.md - Title : README: README associated with a sample chat summary react-based webapp - - */ - } - - private async Task SearchMemoryAsync(ISemanticTextMemory memory, string query) - { - Console.WriteLine("\nQuery: " + query + "\n"); - - var memoryResults = memory.SearchAsync(MemoryCollectionName, query, limit: 2, minRelevanceScore: 0.5); - - int i = 0; - await foreach (MemoryQueryResult memoryResult in memoryResults) - { - Console.WriteLine($"Result {++i}:"); - Console.WriteLine(" URL: : " + memoryResult.Metadata.Id); - Console.WriteLine(" Title : " + memoryResult.Metadata.Description); - Console.WriteLine(" Relevance: " + memoryResult.Relevance); - Console.WriteLine(); - } - - Console.WriteLine("----------------------"); - } - - private async Task StoreMemoryAsync(ISemanticTextMemory memory) - { - /* Store some data in the semantic memory. - * - * When using Azure AI Search the data is automatically indexed on write. - * - * When using the combination of VolatileStore and Embedding generation, SK takes - * care of creating and storing the index - */ - - Console.WriteLine("\nAdding some GitHub file URLs and their descriptions to the semantic memory."); - var githubFiles = SampleData(); - var i = 0; - foreach (var entry in githubFiles) - { - await memory.SaveReferenceAsync( - collection: MemoryCollectionName, - externalSourceName: "GitHub", - externalId: entry.Key, - description: entry.Value, - text: entry.Value); - - Console.Write($" #{++i} saved."); - } - - Console.WriteLine("\n----------------------"); - } - - private static Dictionary SampleData() - { - return new Dictionary - { - ["https://github.com/microsoft/semantic-kernel/blob/main/README.md"] - = "README: Installation, getting started, and how to contribute", - ["https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/02-running-prompts-from-file.ipynb"] - = "Jupyter notebook describing how to pass prompts from a file to a semantic plugin or function", - ["https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/00-getting-started.ipynb"] - = "Jupyter notebook describing how to get started with the Semantic Kernel", - ["https://github.com/microsoft/semantic-kernel/tree/main/prompt_template_samples/ChatPlugin/ChatGPT"] - = "Sample demonstrating how to create a chat plugin interfacing with ChatGPT", - ["https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs"] - = "C# class that defines a volatile embedding store", - }; - } -} diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs deleted file mode 100644 index 0313370782e0..000000000000 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs +++ /dev/null @@ -1,279 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.Google; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Memory; - -namespace Memory; - -/// -/// Represents an example class for Gemini Embedding Generation with volatile memory store. -/// -public sealed class TextMemoryPlugin_GeminiEmbeddingGeneration(ITestOutputHelper output) : BaseTest(output) -{ - private const string MemoryCollectionName = "aboutMe"; - - [Fact] - public async Task GoogleAIAsync() - { - Console.WriteLine("============= Google AI - Gemini Embedding Generation ============="); - - Assert.NotNull(TestConfiguration.GoogleAI.ApiKey); - Assert.NotNull(TestConfiguration.GoogleAI.EmbeddingModelId); - - Kernel kernel = Kernel.CreateBuilder() - .AddGoogleAIGeminiChatCompletion( - modelId: TestConfiguration.GoogleAI.EmbeddingModelId, - apiKey: TestConfiguration.GoogleAI.ApiKey) - .AddGoogleAIEmbeddingGeneration( - modelId: TestConfiguration.GoogleAI.EmbeddingModelId, - apiKey: TestConfiguration.GoogleAI.ApiKey) - .Build(); - - await this.RunSimpleSampleAsync(kernel); - await this.RunTextMemoryPluginSampleAsync(kernel); - } - - [Fact] - public async Task VertexAIAsync() - { - Console.WriteLine("============= Vertex AI - Gemini Embedding Generation ============="); - - Assert.NotNull(TestConfiguration.VertexAI.BearerKey); - Assert.NotNull(TestConfiguration.VertexAI.Location); - Assert.NotNull(TestConfiguration.VertexAI.ProjectId); - Assert.NotNull(TestConfiguration.VertexAI.Gemini.ModelId); - Assert.NotNull(TestConfiguration.VertexAI.EmbeddingModelId); - - Kernel kernel = Kernel.CreateBuilder() - .AddVertexAIGeminiChatCompletion( - modelId: TestConfiguration.VertexAI.Gemini.ModelId, - bearerKey: TestConfiguration.VertexAI.BearerKey, - location: TestConfiguration.VertexAI.Location, - projectId: TestConfiguration.VertexAI.ProjectId) - .AddVertexAIEmbeddingGeneration( - modelId: TestConfiguration.VertexAI.EmbeddingModelId, - bearerKey: TestConfiguration.VertexAI.BearerKey, - location: TestConfiguration.VertexAI.Location, - projectId: TestConfiguration.VertexAI.ProjectId) - .Build(); - - // To generate bearer key, you need installed google sdk or use google web console with command: - // - // gcloud auth print-access-token - // - // Above code pass bearer key as string, it is not recommended way in production code, - // especially if IChatCompletionService and IEmbeddingGenerationService will be long lived, tokens generated by google sdk lives for 1 hour. - // You should use bearer key provider, which will be used to generate token on demand: - // - // Example: - // - // Kernel kernel = Kernel.CreateBuilder() - // .AddVertexAIGeminiChatCompletion( - // modelId: TestConfiguration.VertexAI.Gemini.ModelId, - // bearerKeyProvider: () => - // { - // // This is just example, in production we recommend using Google SDK to generate your BearerKey token. - // // This delegate will be called on every request, - // // when providing the token consider using caching strategy and refresh token logic when it is expired or close to expiration. - // return GetBearerKey(); - // }, - // location: TestConfiguration.VertexAI.Location, - // projectId: TestConfiguration.VertexAI.ProjectId) - // .AddVertexAIEmbeddingGeneration( - // modelId: embeddingModelId, - // bearerKeyProvider: () => - // { - // // This is just example, in production we recommend using Google SDK to generate your BearerKey token. - // // This delegate will be called on every request, - // // when providing the token consider using caching strategy and refresh token logic when it is expired or close to expiration. - // return GetBearerKey(); - // }, - // location: geminiLocation, - // projectId: geminiProject); - - await this.RunSimpleSampleAsync(kernel); - await this.RunTextMemoryPluginSampleAsync(kernel); - } - - private async Task RunSimpleSampleAsync(Kernel kernel) - { - Console.WriteLine("== Simple Sample: Generating Embeddings =="); - - // Obtain an embedding generator. - var embeddingGenerator = kernel.GetRequiredService(); - - var generatedEmbeddings = await embeddingGenerator.GenerateEmbeddingAsync("My name is Andrea"); - Console.WriteLine($"Generated Embeddings count: {generatedEmbeddings.Length}, " + - $"First five: {string.Join(", ", generatedEmbeddings[..5])}..."); - Console.WriteLine(); - } - - private async Task RunTextMemoryPluginSampleAsync(Kernel kernel) - { - Console.WriteLine("== Complex Sample: TextMemoryPlugin =="); - - var memoryStore = new VolatileMemoryStore(); - - // Obtain an embedding generator to use for semantic memory. - var embeddingGenerator = kernel.GetRequiredService(); - - // The combination of the text embedding generator and the memory store makes up the 'SemanticTextMemory' object used to - // store and retrieve memories. - Microsoft.SemanticKernel.Memory.SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 1: Store and retrieve memories using the ISemanticTextMemory (textMemory) object. - // - // This is a simple way to store memories from a code perspective, without using the Kernel. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 1: Saving Memories through the ISemanticTextMemory object =="); - - Console.WriteLine("Saving memory with key 'info1': \"My name is Andrea\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea"); - - Console.WriteLine("Saving memory with key 'info2': \"I work as a tourist operator\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator"); - - Console.WriteLine("Saving memory with key 'info3': \"I've been living in Seattle since 2005\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005"); - - Console.WriteLine("Saving memory with key 'info4': \"I visited France and Italy five times since 2015\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015"); - - Console.WriteLine(); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 2: Create TextMemoryPlugin, store memories through the Kernel. - // - // This enables prompt functions and the AI (via Planners) to access memories - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 2: Saving Memories through the Kernel with TextMemoryPlugin and the 'Save' function =="); - - // Import the TextMemoryPlugin into the Kernel for other functions - var memoryPlugin = kernel.ImportPluginFromObject(new Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin(textMemory)); - - // Save a memory with the Kernel - Console.WriteLine("Saving memory with key 'info5': \"My family is from New York\""); - await kernel.InvokeAsync(memoryPlugin["Save"], new() - { - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.InputParam] = "My family is from New York", - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.KeyParam] = "info5", - }); - - Console.WriteLine(); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 3: Recall similar ideas with semantic search - // - // Uses AI Embeddings for fuzzy lookup of memories based on intent, rather than a specific key. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 3: Recall (similarity search) with AI Embeddings =="); - - Console.WriteLine("== PART 3a: Recall (similarity search) with ISemanticTextMemory =="); - Console.WriteLine("Ask: live in Seattle?"); - - await foreach (var answer in textMemory.SearchAsync( - collection: MemoryCollectionName, - query: "live in Seattle?", - limit: 2, - minRelevanceScore: 0.79, - withEmbeddings: true)) - { - Console.WriteLine($"Answer: {answer.Metadata.Text}"); - } - - /* Possible output: - Answer: I've been living in Seattle since 2005 - */ - - Console.WriteLine("== PART 3b: Recall (similarity search) with Kernel and TextMemoryPlugin 'Recall' function =="); - Console.WriteLine("Ask: my family is from?"); - - var result = await kernel.InvokeAsync(memoryPlugin["Recall"], new() - { - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.InputParam] = "Ask: my family is from?", - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.LimitParam] = "2", - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - Console.WriteLine($"Answer: {result.GetValue()}"); - Console.WriteLine(); - - /* Possible output: - Answer: ["My family is from New York"] - */ - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 4: TextMemoryPlugin Recall in a Prompt Function - // - // Looks up related memories when rendering a prompt template, then sends the rendered prompt to - // the text generation model to answer a natural language query. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 4: Using TextMemoryPlugin 'Recall' function in a Prompt Function =="); - - // Build a prompt function that uses memory to find facts - const string RecallFunctionDefinition = @" -Consider only the facts below when answering questions: - -BEGIN FACTS -About me: {{recall 'live in Seattle?'}} -About me: {{recall 'my family is from?'}} -END FACTS - -Question: {{$input}} - -Answer: -"; - - result = await kernel.InvokePromptAsync(RecallFunctionDefinition, new(new GeminiPromptExecutionSettings { MaxTokens = 1000 }) - { - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.InputParam] = "Where are my family from?", - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.LimitParam] = "2", - [Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - Console.WriteLine("Ask: Where are my family from?"); - Console.WriteLine($"Answer: {result.GetValue()}"); - - /* Possible output: - Answer: New York - */ - - Console.WriteLine(); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 5: Cleanup, deleting database collection - // - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 5: Cleanup, deleting database collection =="); - - Console.WriteLine("Printing Collections in DB..."); - var collections = memoryStore.GetCollectionsAsync(); - await foreach (var collection in collections) - { - Console.WriteLine(collection); - } - - Console.WriteLine(); - - Console.WriteLine($"Removing Collection {MemoryCollectionName}"); - await memoryStore.DeleteCollectionAsync(MemoryCollectionName); - Console.WriteLine(); - - Console.WriteLine($"Printing Collections in DB (after removing {MemoryCollectionName})..."); - collections = memoryStore.GetCollectionsAsync(); - await foreach (var collection in collections) - { - Console.WriteLine(collection); - } - } -} diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs deleted file mode 100644 index c7b9d2cb40ee..000000000000 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs +++ /dev/null @@ -1,337 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AzureAISearch; -using Microsoft.SemanticKernel.Connectors.Chroma; -using Microsoft.SemanticKernel.Connectors.DuckDB; -using Microsoft.SemanticKernel.Connectors.Kusto; -using Microsoft.SemanticKernel.Connectors.MongoDB; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Connectors.Pinecone; -using Microsoft.SemanticKernel.Connectors.Postgres; -using Microsoft.SemanticKernel.Connectors.Qdrant; -using Microsoft.SemanticKernel.Connectors.Redis; -using Microsoft.SemanticKernel.Connectors.Sqlite; -using Microsoft.SemanticKernel.Connectors.Weaviate; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; -using Npgsql; -using StackExchange.Redis; - -namespace Memory; - -[Obsolete("The IMemoryStore abstraction is being obsoleted")] -public class TextMemoryPlugin_MultipleMemoryStore(ITestOutputHelper output) : BaseTest(output) -{ - private const string MemoryCollectionName = "aboutMe"; - - [Theory] - [InlineData("Volatile")] - [InlineData("AzureAISearch")] - public async Task RunAsync(string provider) - { - // Volatile Memory Store - an in-memory store that is not persisted - IMemoryStore store = provider switch - { - "AzureAISearch" => CreateSampleAzureAISearchMemoryStore(), - _ => new VolatileMemoryStore(), - }; - - /////////////////////////////////////////////////////////////////////////////////////////////////// - // INSTRUCTIONS: uncomment one of the following lines to select a different memory store to use. // - /////////////////////////////////////////////////////////////////////////////////////////////////// - - // Sqlite Memory Store - a file-based store that persists data in a Sqlite database - // store = await CreateSampleSqliteMemoryStoreAsync(); - - // DuckDB Memory Store - a file-based store that persists data in a DuckDB database - // store = await CreateSampleDuckDbMemoryStoreAsync(); - - // MongoDB Memory Store - a store that persists data in a MongoDB database - // store = CreateSampleMongoDBMemoryStore(); - - // Azure AI Search Memory Store - a store that persists data in a hosted Azure AI Search database - // store = CreateSampleAzureAISearchMemoryStore(); - - // Qdrant Memory Store - a store that persists data in a local or remote Qdrant database - // store = CreateSampleQdrantMemoryStore(); - - // Chroma Memory Store - // store = CreateSampleChromaMemoryStore(); - - // Pinecone Memory Store - a store that persists data in a hosted Pinecone database - // store = CreateSamplePineconeMemoryStore(); - - // Weaviate Memory Store - // store = CreateSampleWeaviateMemoryStore(); - - // Redis Memory Store - // store = await CreateSampleRedisMemoryStoreAsync(); - - // Postgres Memory Store - // store = CreateSamplePostgresMemoryStore(); - - // Kusto Memory Store - // store = CreateSampleKustoMemoryStore(); - - await RunWithStoreAsync(store); - } - - private async Task CreateSampleSqliteMemoryStoreAsync() - { - IMemoryStore store = await SqliteMemoryStore.ConnectAsync("memories.sqlite"); - return store; - } - - private async Task CreateSampleDuckDbMemoryStoreAsync() - { - IMemoryStore store = await DuckDBMemoryStore.ConnectAsync("memories.duckdb", 1536); - return store; - } - - private IMemoryStore CreateSampleMongoDBMemoryStore() - { - IMemoryStore store = new MongoDBMemoryStore(TestConfiguration.MongoDB.ConnectionString, "memoryPluginExample"); - return store; - } - - private IMemoryStore CreateSampleAzureAISearchMemoryStore() - { - IMemoryStore store = new AzureAISearchMemoryStore(TestConfiguration.AzureAISearch.Endpoint, TestConfiguration.AzureAISearch.ApiKey); - return store; - } - - private IMemoryStore CreateSampleChromaMemoryStore() - { - IMemoryStore store = new ChromaMemoryStore(TestConfiguration.Chroma.Endpoint, this.LoggerFactory); - return store; - } - - private IMemoryStore CreateSampleQdrantMemoryStore() - { - IMemoryStore store = new QdrantMemoryStore(TestConfiguration.Qdrant.Endpoint, 1536, this.LoggerFactory); - return store; - } - - private IMemoryStore CreateSamplePineconeMemoryStore() - { - IMemoryStore store = new PineconeMemoryStore(TestConfiguration.Pinecone.Environment, TestConfiguration.Pinecone.ApiKey, this.LoggerFactory); - return store; - } - - private IMemoryStore CreateSampleWeaviateMemoryStore() - { - IMemoryStore store = new WeaviateMemoryStore(TestConfiguration.Weaviate.Endpoint, TestConfiguration.Weaviate.ApiKey); - return store; - } - - private async Task CreateSampleRedisMemoryStoreAsync() - { - string configuration = TestConfiguration.Redis.Configuration; - ConnectionMultiplexer connectionMultiplexer = await ConnectionMultiplexer.ConnectAsync(configuration); - IDatabase database = connectionMultiplexer.GetDatabase(); - IMemoryStore store = new RedisMemoryStore(database, vectorSize: 1536); - return store; - } - - private static IMemoryStore CreateSamplePostgresMemoryStore() - { - NpgsqlDataSourceBuilder dataSourceBuilder = new(TestConfiguration.Postgres.ConnectionString); - dataSourceBuilder.UseVector(); - NpgsqlDataSource dataSource = dataSourceBuilder.Build(); - IMemoryStore store = new PostgresMemoryStore(dataSource, vectorSize: 1536, schema: "public"); - return store; - } - - private static IMemoryStore CreateSampleKustoMemoryStore() - { - var connectionString = new Kusto.Data.KustoConnectionStringBuilder(TestConfiguration.Kusto.ConnectionString).WithAadUserPromptAuthentication(); - IMemoryStore store = new KustoMemoryStore(connectionString, "MyDatabase"); - return store; - } - - private async Task RunWithStoreAsync(IMemoryStore memoryStore) - { - var kernel = Kernel.CreateBuilder() - .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) - .AddOpenAITextEmbeddingGeneration(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Create an embedding generator to use for semantic memory. - var embeddingGenerator = new OpenAITextEmbeddingGenerationService(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey); - - // The combination of the text embedding generator and the memory store makes up the 'SemanticTextMemory' object used to - // store and retrieve memories. - SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 1: Store and retrieve memories using the ISemanticTextMemory (textMemory) object. - // - // This is a simple way to store memories from a code perspective, without using the Kernel. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 1a: Saving Memories through the ISemanticTextMemory object =="); - - Console.WriteLine("Saving memory with key 'info1': \"My name is Andrea\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea"); - - Console.WriteLine("Saving memory with key 'info2': \"I work as a tourist operator\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator"); - - Console.WriteLine("Saving memory with key 'info3': \"I've been living in Seattle since 2005\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005"); - - Console.WriteLine("Saving memory with key 'info4': \"I visited France and Italy five times since 2015\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015"); - - // Retrieve a memory - Console.WriteLine("== PART 1b: Retrieving Memories through the ISemanticTextMemory object =="); - MemoryQueryResult? lookup = await textMemory.GetAsync(MemoryCollectionName, "info1"); - Console.WriteLine("Memory with key 'info1':" + lookup?.Metadata.Text ?? "ERROR: memory not found"); - Console.WriteLine(); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 2: Create TextMemoryPlugin, store and retrieve memories through the Kernel. - // - // This enables prompt functions and the AI (via Planners) to access memories - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 2a: Saving Memories through the Kernel with TextMemoryPlugin and the 'Save' function =="); - - // Import the TextMemoryPlugin into the Kernel for other functions - var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); - - // Save a memory with the Kernel - Console.WriteLine("Saving memory with key 'info5': \"My family is from New York\""); - await kernel.InvokeAsync(memoryPlugin["Save"], new() - { - [TextMemoryPlugin.InputParam] = "My family is from New York", - [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [TextMemoryPlugin.KeyParam] = "info5", - }); - - // Retrieve a specific memory with the Kernel - Console.WriteLine("== PART 2b: Retrieving Memories through the Kernel with TextMemoryPlugin and the 'Retrieve' function =="); - var result = await kernel.InvokeAsync(memoryPlugin["Retrieve"], new KernelArguments() - { - [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [TextMemoryPlugin.KeyParam] = "info5" - }); - - Console.WriteLine("Memory with key 'info5':" + result.GetValue() ?? "ERROR: memory not found"); - Console.WriteLine(); - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 3: Recall similar ideas with semantic search - // - // Uses AI Embeddings for fuzzy lookup of memories based on intent, rather than a specific key. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 3: Recall (similarity search) with AI Embeddings =="); - - Console.WriteLine("== PART 3a: Recall (similarity search) with ISemanticTextMemory =="); - Console.WriteLine("Ask: where did I grow up?"); - - await foreach (var answer in textMemory.SearchAsync( - collection: MemoryCollectionName, - query: "where did I grow up?", - limit: 2, - minRelevanceScore: 0.79, - withEmbeddings: true)) - { - Console.WriteLine($"Answer: {answer.Metadata.Text}"); - } - - Console.WriteLine("== PART 3b: Recall (similarity search) with Kernel and TextMemoryPlugin 'Recall' function =="); - Console.WriteLine("Ask: where do I live?"); - - result = await kernel.InvokeAsync(memoryPlugin["Recall"], new() - { - [TextMemoryPlugin.InputParam] = "Ask: where do I live?", - [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [TextMemoryPlugin.LimitParam] = "2", - [TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - Console.WriteLine($"Answer: {result.GetValue()}"); - Console.WriteLine(); - - /* - Output: - - Ask: where did I grow up? - Answer: - ["My family is from New York","I\u0027ve been living in Seattle since 2005"] - - Ask: where do I live? - Answer: - ["I\u0027ve been living in Seattle since 2005","My family is from New York"] - */ - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 4: TextMemoryPlugin Recall in a Prompt Function - // - // Looks up related memories when rendering a prompt template, then sends the rendered prompt to - // the text generation model to answer a natural language query. - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 4: Using TextMemoryPlugin 'Recall' function in a Prompt Function =="); - - // Build a prompt function that uses memory to find facts - const string RecallFunctionDefinition = @" -Consider only the facts below when answering questions: - -BEGIN FACTS -About me: {{recall 'where did I grow up?'}} -About me: {{recall 'where do I live now?'}} -END FACTS - -Question: {{$input}} - -Answer: -"; - - var aboutMeOracle = kernel.CreateFunctionFromPrompt(RecallFunctionDefinition, new OpenAIPromptExecutionSettings() { MaxTokens = 100 }); - - result = await kernel.InvokeAsync(aboutMeOracle, new() - { - [TextMemoryPlugin.InputParam] = "Do I live in the same town where I grew up?", - [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, - [TextMemoryPlugin.LimitParam] = "2", - [TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - Console.WriteLine("Ask: Do I live in the same town where I grew up?"); - Console.WriteLine($"Answer: {result.GetValue()}"); - - /* - Approximate Output: - Answer: No, I do not live in the same town where I grew up since my family is from New York and I have been living in Seattle since 2005. - */ - - ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 5: Cleanup, deleting database collection - // - ///////////////////////////////////////////////////////////////////////////////////////////////////// - - Console.WriteLine("== PART 5: Cleanup, deleting database collection =="); - - Console.WriteLine("Printing Collections in DB..."); - var collections = memoryStore.GetCollectionsAsync(); - await foreach (var collection in collections) - { - Console.WriteLine(collection); - } - Console.WriteLine(); - - Console.WriteLine($"Removing Collection {MemoryCollectionName}"); - await memoryStore.DeleteCollectionAsync(MemoryCollectionName); - Console.WriteLine(); - - Console.WriteLine($"Printing Collections in DB (after removing {MemoryCollectionName})..."); - collections = memoryStore.GetCollectionsAsync(); - await foreach (var collection in collections) - { - Console.WriteLine(collection); - } - } -} diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs deleted file mode 100644 index 883195b68df9..000000000000 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Encodings.Web; -using System.Text.Json; -using System.Text.Unicode; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AzureOpenAI; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; - -namespace Memory; - -/// -/// This example shows how to use custom when serializing multiple results during recall using . -/// -/// -/// When multiple results are returned during recall, has to turn these results into a string to pass back to the kernel. -/// The uses to turn the results into a string. -/// In some cases though, the default serialization options may not work, e.g. if the memories contain non-latin text, -/// will escape these characters by default. In this case, you can provide custom to the to control how the memories are serialized. -/// -public class TextMemoryPlugin_RecallJsonSerializationWithOptions(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task RunAsync() - { - // Create a Kernel. - var kernelWithoutOptions = Kernel.CreateBuilder() - .Build(); - - // Create an embedding generator to use for semantic memory. - var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService(TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, TestConfiguration.AzureOpenAIEmbeddings.Endpoint, TestConfiguration.AzureOpenAIEmbeddings.ApiKey); - - // Using an in memory store for this example. - var memoryStore = new VolatileMemoryStore(); - - // The combination of the text embedding generator and the memory store makes up the 'SemanticTextMemory' object used to - // store and retrieve memories. - SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); - await textMemory.SaveInformationAsync("samples", "First example of some text in Thai and Bengali: วรรณยุกต์ চলিতভাষা", "test-record-1"); - await textMemory.SaveInformationAsync("samples", "Second example of some text in Thai and Bengali: วรรณยุกต์ চলিতভাষা", "test-record-2"); - - // Import the TextMemoryPlugin into the Kernel without any custom JsonSerializerOptions. - var memoryPluginWithoutOptions = kernelWithoutOptions.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); - - // Retrieve the memories using the TextMemoryPlugin. - var resultWithoutOptions = await kernelWithoutOptions.InvokeAsync(memoryPluginWithoutOptions["Recall"], new() - { - [TextMemoryPlugin.InputParam] = "Text examples", - [TextMemoryPlugin.CollectionParam] = "samples", - [TextMemoryPlugin.LimitParam] = "2", - [TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - // The recall operation returned the following text, where the Thai and Bengali text was escaped: - // ["Second example of some text in Thai and Bengali: \u0E27\u0E23\u0E23\u0E13\u0E22\u0E38\u0E01\u0E15\u0E4C \u099A\u09B2\u09BF\u09A4\u09AD\u09BE\u09B7\u09BE","First example of some text in Thai and Bengali: \u0E27\u0E23\u0E23\u0E13\u0E22\u0E38\u0E01\u0E15\u0E4C \u099A\u09B2\u09BF\u09A4\u09AD\u09BE\u09B7\u09BE"] - Console.WriteLine(resultWithoutOptions.GetValue()); - - // Create a Kernel. - var kernelWithOptions = Kernel.CreateBuilder() - .Build(); - - // Import the TextMemoryPlugin into the Kernel with custom JsonSerializerOptions that allow Thai and Bengali script to be serialized unescaped. - var options = new JsonSerializerOptions { Encoder = JavaScriptEncoder.Create(UnicodeRanges.BasicLatin, UnicodeRanges.Thai, UnicodeRanges.Bengali) }; - var memoryPluginWithOptions = kernelWithOptions.ImportPluginFromObject(new TextMemoryPlugin(textMemory, jsonSerializerOptions: options)); - - // Retrieve the memories using the TextMemoryPlugin. - var result = await kernelWithOptions.InvokeAsync(memoryPluginWithOptions["Recall"], new() - { - [TextMemoryPlugin.InputParam] = "Text examples", - [TextMemoryPlugin.CollectionParam] = "samples", - [TextMemoryPlugin.LimitParam] = "2", - [TextMemoryPlugin.RelevanceParam] = "0.79", - }); - - // The recall operation returned the following text, where the Thai and Bengali text was not escaped: - // ["Second example of some text in Thai and Bengali: วรรณยุกต์ চলিতভাষা","First example of some text in Thai and Bengali: วรรณยุกต์ চলিতভাষা"] - Console.WriteLine(result.GetValue()); - } -} diff --git a/dotnet/samples/Concepts/RAG/WithPlugins.cs b/dotnet/samples/Concepts/RAG/WithPlugins.cs index 267a6c3618a9..e8a55ab8d660 100644 --- a/dotnet/samples/Concepts/RAG/WithPlugins.cs +++ b/dotnet/samples/Concepts/RAG/WithPlugins.cs @@ -2,10 +2,12 @@ using System.Net.Http.Headers; using System.Text.Json; +using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.Chroma; +using Microsoft.SemanticKernel.Connectors.InMemory; using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Resources; namespace RAG; @@ -27,23 +29,46 @@ public async Task RAGWithCustomPluginAsync() } /// - /// Shows how to use RAG pattern with . + /// Shows how to use RAG pattern with . /// - [Fact(Skip = "Requires Chroma server up and running")] - public async Task RAGWithTextMemoryPluginAsync() + [Fact] + public async Task RAGWithInMemoryVectorStoreAndPluginAsync() { - var memory = new MemoryBuilder() - .WithMemoryStore(new ChromaMemoryStore("http://localhost:8000")) - .WithOpenAITextEmbeddingGeneration(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) - .Build(); + var vectorStore = new InMemoryVectorStore(); + var textEmbeddingGenerator = new OpenAITextEmbeddingGenerationService( + TestConfiguration.OpenAI.EmbeddingModelId, + TestConfiguration.OpenAI.ApiKey); var kernel = Kernel.CreateBuilder() .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .Build(); - kernel.ImportPluginFromObject(new Microsoft.SemanticKernel.Plugins.Memory.TextMemoryPlugin(memory)); - - var result = await kernel.InvokePromptAsync("{{recall 'budget by year' collection='finances'}} What is my budget for 2024?"); + // Create the collection and add data + var collection = vectorStore.GetCollection("finances"); + await collection.CreateCollectionAsync(); + string[] budgetInfo = + { + "The budget for 2020 is EUR 100 000", + "The budget for 2021 is EUR 120 000", + "The budget for 2022 is EUR 150 000", + "The budget for 2023 is EUR 200 000", + "The budget for 2024 is EUR 364 000" + }; + var vectors = await textEmbeddingGenerator.GenerateEmbeddingsAsync(budgetInfo); + var records = budgetInfo.Zip(vectors).Select((input, index) => new FinanceInfo { Key = index.ToString(), Text = input.First, Embedding = input.Second }).ToList(); + await collection.UpsertAsync(records); + + // Add the collection to the kernel as a plugin. + var textSearch = new VectorStoreTextSearch(collection, textEmbeddingGenerator); + kernel.Plugins.Add(textSearch.CreateWithSearch("FinanceSearch", "Can search for budget information")); + + // Invoke the kernel, using the plugin from within the prompt. + KernelArguments arguments = new() { { "query", "What is my budget for 2024?" } }; + var result = await kernel.InvokePromptAsync( + "{{FinanceSearch-Search query}} {{query}}", + arguments, + templateFormat: HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + promptTemplateFactory: new HandlebarsPromptTemplateFactory()); Console.WriteLine(result); } @@ -91,5 +116,16 @@ public async Task SearchAsync(string query) } } + private sealed class FinanceInfo + { + [VectorStoreRecordKey] + public string Key { get; set; } = string.Empty; + [TextSearchResultValue] + [VectorStoreRecordData] + public string Text { get; set; } = string.Empty; + [VectorStoreRecordVector(1536)] + public ReadOnlyMemory Embedding { get; set; } + } + #endregion } diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 4b5bb994c293..20c44cf6aede 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -130,13 +130,8 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [Ollama_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs) - [Onnx_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/Onnx_EmbeddingGeneration.cs) - [HuggingFace_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/HuggingFace_EmbeddingGeneration.cs) -- [MemoryStore_CustomReadOnly](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs) -- [SemanticTextMemory_Building](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs) - [TextChunkerUsage](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextChunkerUsage.cs) - [TextChunkingAndEmbedding](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs) -- [TextMemoryPlugin_GeminiEmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs) -- [TextMemoryPlugin_MultipleMemoryStore](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) -- [TextMemoryPlugin_RecallJsonSerializationWithOptions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs) - [VectorStore_DataIngestion_Simple: A simple example of how to do data ingestion into a vector store when getting started.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs) - [VectorStore_DataIngestion_MultiStore: An example of data ingestion that uses the same code to ingest into multiple vector stores types.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs) - [VectorStore_DataIngestion_CustomMapper: An example that shows how to use a custom mapper for when your data model and storage model doesn't match.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs) diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj b/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj index bbb5f38ba81d..24a28cf88ab8 100644 --- a/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj +++ b/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj @@ -3,7 +3,7 @@ Exe net8.0 - $(NoWarn);CA2007;CS0612;VSTHRD111;SKEXP0070;SKEXP0050;SKEXP0001;SKEXP0020 + $(NoWarn);CA2007;CS0612;VSTHRD111;SKEXP0070;SKEXP0050;SKEXP0001 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj index 846843bdca9e..7d1d3995191d 100644 --- a/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj +++ b/dotnet/samples/Demos/ProcessFrameworkWithAspire/ProcessFramework.Aspire/ProcessFramework.Aspire.ProcessOrchestrator/ProcessFramework.Aspire.ProcessOrchestrator.csproj @@ -6,7 +6,7 @@ enable enable - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 diff --git a/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Grpc/ProcessWithCloudEvents.Grpc.csproj b/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Grpc/ProcessWithCloudEvents.Grpc.csproj index 5724e503f68e..b2d5022ffa34 100644 --- a/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Grpc/ProcessWithCloudEvents.Grpc.csproj +++ b/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Grpc/ProcessWithCloudEvents.Grpc.csproj @@ -5,7 +5,7 @@ enable enable - $(NoWarn);CA2007,CS1591,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 + $(NoWarn);CA2007,CS1591,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Processes/ProcessWithCloudEvents.Processes.csproj b/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Processes/ProcessWithCloudEvents.Processes.csproj index eb4cbc961b66..1fafc3012f07 100644 --- a/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Processes/ProcessWithCloudEvents.Processes.csproj +++ b/dotnet/samples/Demos/ProcessWithCloudEvents/ProcessWithCloudEvents.Processes/ProcessWithCloudEvents.Processes.csproj @@ -5,7 +5,7 @@ enable enable - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 diff --git a/dotnet/samples/Demos/ProcessWithDapr/ProcessWithDapr.csproj b/dotnet/samples/Demos/ProcessWithDapr/ProcessWithDapr.csproj index 69628bbacda5..d1bd90408672 100644 --- a/dotnet/samples/Demos/ProcessWithDapr/ProcessWithDapr.csproj +++ b/dotnet/samples/Demos/ProcessWithDapr/ProcessWithDapr.csproj @@ -5,7 +5,7 @@ enable enable - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 diff --git a/dotnet/samples/Demos/VectorStoreRAG/VectorStoreRAG.csproj b/dotnet/samples/Demos/VectorStoreRAG/VectorStoreRAG.csproj index 7b1557a8005c..a5d2dbd59e7a 100644 --- a/dotnet/samples/Demos/VectorStoreRAG/VectorStoreRAG.csproj +++ b/dotnet/samples/Demos/VectorStoreRAG/VectorStoreRAG.csproj @@ -5,7 +5,7 @@ net8.0 enable enable - $(NoWarn);SKEXP0001;SKEXP0010;SKEXP0020 + $(NoWarn);SKEXP0001;SKEXP0010 c4203b00-7179-47c1-8701-ee352e381412 diff --git a/dotnet/samples/GettingStarted/GettingStarted.csproj b/dotnet/samples/GettingStarted/GettingStarted.csproj index 6341c4dbae5a..c5c77c4238a2 100644 --- a/dotnet/samples/GettingStarted/GettingStarted.csproj +++ b/dotnet/samples/GettingStarted/GettingStarted.csproj @@ -7,7 +7,7 @@ true false - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj index 1698d6be44b5..90818906f219 100644 --- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj +++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj @@ -9,7 +9,7 @@ true - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj b/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj index e77aab6ba7f3..1da2089382b7 100644 --- a/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj +++ b/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj @@ -10,7 +10,7 @@ - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/GettingStartedWithTextSearch/GettingStartedWithTextSearch.csproj b/dotnet/samples/GettingStartedWithTextSearch/GettingStartedWithTextSearch.csproj index 41fc7813300f..03a522206317 100644 --- a/dotnet/samples/GettingStartedWithTextSearch/GettingStartedWithTextSearch.csproj +++ b/dotnet/samples/GettingStartedWithTextSearch/GettingStartedWithTextSearch.csproj @@ -7,7 +7,7 @@ true false - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/GettingStartedWithVectorStores/GettingStartedWithVectorStores.csproj b/dotnet/samples/GettingStartedWithVectorStores/GettingStartedWithVectorStores.csproj index 5160921a3bbd..dec156215f6d 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/GettingStartedWithVectorStores.csproj +++ b/dotnet/samples/GettingStartedWithVectorStores/GettingStartedWithVectorStores.csproj @@ -7,7 +7,7 @@ true false - $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 + $(NoWarn);CS8618,IDE0009,IDE1006,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/LearnResources/LearnResources.csproj b/dotnet/samples/LearnResources/LearnResources.csproj index f347bb620e21..398e4883a6a1 100644 --- a/dotnet/samples/LearnResources/LearnResources.csproj +++ b/dotnet/samples/LearnResources/LearnResources.csproj @@ -7,7 +7,7 @@ enable false - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0101 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0101 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj index b4d5908dbed9..27d2f0811843 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/Connectors.AzureAISearch.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020 + $(NoWarn);SKEXP0001 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj index a31e4b802b52..21b4d379162f 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020 + $(NoWarn);SKEXP0001 diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj index 3ee93b149127..032bd3bd9eed 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020 + $(NoWarn);SKEXP0001 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Connectors.Google.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Google.UnitTests/Connectors.Google.UnitTests.csproj index adff4d81e1b0..4468b0001333 100644 --- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Connectors.Google.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Connectors.Google.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0070 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050,SKEXP0070 diff --git a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/Connectors.InMemory.UnitTests.csproj b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/Connectors.InMemory.UnitTests.csproj index a125a758c729..1b00bcec55de 100644 --- a/dotnet/src/Connectors/Connectors.InMemory.UnitTests/Connectors.InMemory.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.InMemory.UnitTests/Connectors.InMemory.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0020 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001 diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj index ce8c96eb0be3..bc40eced6ff8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj @@ -6,6 +6,7 @@ $(AssemblyName) net8.0;netstandard2.0;net462 alpha + false diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj index 3e4edcde8b69..3d9ba43628ab 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj @@ -7,6 +7,7 @@ alpha $(NoWarn);NU5104 + false diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs index bf9301aad476..11c5a504dcbc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs @@ -8,8 +8,6 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; -#pragma warning disable SKEXP0020 - /// /// Interface for client managing postgres database operations for . /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index 47ec60218022..276a34643142 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -9,8 +9,6 @@ namespace Microsoft.SemanticKernel.Connectors.Postgres; -#pragma warning disable SKEXP0020 - /// /// A mapper class that handles the conversion between data models and storage models for Postgres vector store. /// diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/README.md b/dotnet/src/Connectors/Connectors.Memory.SqlServer/README.md index bb78b9c9e4fa..b753e40879da 100644 --- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/README.md @@ -36,7 +36,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Connectors.SqlServer; using Microsoft.SemanticKernel.Memory; -#pragma warning disable SKEXP0001, SKEXP0010, SKEXP0020 +#pragma warning disable SKEXP0001, SKEXP0010 // Replace with your Azure OpenAI endpoint const string AzureOpenAIEndpoint = "https://.openai.azure.com/"; diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj index 30f8c0d5307e..88918bc7e1df 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/Connectors.MongoDB.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);SKEXP0001,VSTHRD111,CA2007,CS1591 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/Connectors.Pinecone.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/Connectors.Pinecone.UnitTests.csproj index 56a1152f4a46..4e89355c5856 100644 --- a/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/Connectors.Pinecone.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Pinecone.UnitTests/Connectors.Pinecone.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050 diff --git a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj index 040d35b2de69..0f93fb8022e6 100644 --- a/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Postgres.UnitTests/Connectors.Postgres.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);SKEXP0001,VSTHRD111,CA2007,CS1591 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj index 3f2ce94b7986..322eb096b3e2 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj index a9d5ff1e1bd9..1593ec444e1b 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj index 128ccff8175f..6bc1914ba63b 100644 --- a/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Sqlite.UnitTests/Connectors.Sqlite.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007,CS1591 + $(NoWarn);SKEXP0001,VSTHRD111,CA2007,CS1591 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj index 8d27acc67d94..2573821640b1 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0120 + $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050,SKEXP0120 diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs index fbbf445ef7e7..bf2da33d900c 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Text.Json; @@ -18,6 +19,7 @@ namespace SemanticKernel.Connectors.UnitTests.Chroma; /// /// Unit tests for class. /// +[Experimental("SKEXP0020")] public sealed class ChromaMemoryStoreTests : IDisposable { private const string CollectionId = "fake-collection-id"; diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs index e9e09599a1b4..b6c706734d30 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Collections.Immutable; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Threading.Tasks; @@ -16,6 +17,7 @@ namespace SemanticKernel.Connectors.UnitTests.DuckDB; /// /// Unit tests of . /// +[Experimental("SKEXP0020")] [Collection("Sequential")] public class DuckDBMemoryStoreTests { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs index 7cdec0210775..e586b58fe0cc 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Data; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Threading; using System.Threading.Tasks; @@ -19,6 +20,7 @@ namespace SemanticKernel.Connectors.UnitTests.Kusto; /// /// Unit tests for class. /// +[Experimental("SKEXP0020")] public class KustoMemoryStoreTests { private const string CollectionName = "fake_collection"; diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj index 55e4e0850587..8312d6ba5b60 100644 --- a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj @@ -8,7 +8,7 @@ enable disable false - $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007 + $(NoWarn);SKEXP0001,VSTHRD111,CA2007 $(NoWarn);MEVD9001 diff --git a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Process.IntegrationTestHost.Dapr.csproj b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Process.IntegrationTestHost.Dapr.csproj index 91277c4692ad..9ce38a616a68 100644 --- a/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Process.IntegrationTestHost.Dapr.csproj +++ b/dotnet/src/Experimental/Process.IntegrationTestHost.Dapr/Process.IntegrationTestHost.Dapr.csproj @@ -7,7 +7,7 @@ enable enable false - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 true diff --git a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj index 2d35183b3648..1f3c09de232e 100644 --- a/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj +++ b/dotnet/src/Experimental/Process.IntegrationTestRunner.Dapr/Process.IntegrationTestRunner.Dapr.csproj @@ -7,7 +7,7 @@ enable enable false - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 true diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs index d337641ad071..770400778817 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http; using System.Threading.Tasks; @@ -16,6 +17,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Chroma; /// Integration tests for class. /// Tests work with local Chroma server. To setup the server, see dotnet/src/Connectors/Connectors.Memory.Chroma/README.md. /// +[Experimental("SKEXP0020")] public sealed class ChromaMemoryStoreTests : IDisposable { // If null, all tests will be enabled diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs index 5fba220a3ad4..4ee21728816a 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel.Connectors.Milvus; @@ -11,6 +12,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Milvus; +[Experimental("SKEXP0020")] public class MilvusMemoryStoreTests(MilvusFixture milvusFixture) : IClassFixture, IAsyncLifetime { private const string CollectionName = "test"; diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 2b9e61026859..660a7a40ed7c 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -5,7 +5,7 @@ net8.0 true false - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110,OPENAI001,MEVD9000 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110,OPENAI001,MEVD9000 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index 3311fb3b3553..de5aa0586a38 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -6,7 +6,7 @@ net8.0 true false - $(NoWarn);CA2007,CA1861,IDE1006,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0110,SKEXP0120,MEVD9000 + $(NoWarn);CA2007,CA1861,IDE1006,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050,SKEXP0110,SKEXP0120,MEVD9000 diff --git a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj index bc92e1816858..3d1612762740 100644 --- a/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/PineconeIntegrationTests/PineconeIntegrationTests.csproj @@ -9,7 +9,7 @@ false true - $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 + $(NoWarn);CA2007,SKEXP0001,VSTHRD111;CS1685 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 diff --git a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs index 1d4c540c216a..7289d91e6bb1 100644 --- a/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs +++ b/dotnet/src/VectorDataIntegrationTests/PostgresIntegrationTests/Support/PostgresTestStore.cs @@ -8,8 +8,6 @@ namespace PostgresIntegrationTests.Support; -#pragma warning disable SKEXP0020 - internal sealed class PostgresTestStore : TestStore { public static PostgresTestStore Instance { get; } = new(); diff --git a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj index d15fc2fc0058..e81066f2d839 100644 --- a/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj +++ b/dotnet/src/VectorDataIntegrationTests/SqlServerIntegrationTests/SqlServerIntegrationTests.csproj @@ -8,7 +8,7 @@ false true - $(NoWarn);CA2007,SKEXP0001,SKEXP0020,VSTHRD111;CS1685 + $(NoWarn);CA2007,SKEXP0001,VSTHRD111;CS1685 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 $(NoWarn);MEVD9000,MEVD9001 From 2ec7d24222afa74d9673e7773902b27f103b857a Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Mon, 28 Apr 2025 16:50:05 +0100 Subject: [PATCH 57/63] .Net: Remove custom mapper samples (#11742) ### Motivation and Context #11480 ### Description - remove any samples doing any kind of mapping. ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../MappingVectorStoreRecordCollection.cs | 142 ------------- dotnet/samples/Concepts/RAG/WithPlugins.cs | 22 +- .../Step4_NonStringKey_VectorStore.cs | 200 ------------------ ...Model.cs => Step4_Use_DynamicDataModel.cs} | 2 +- 4 files changed, 13 insertions(+), 353 deletions(-) delete mode 100644 dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs delete mode 100644 dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs rename dotnet/samples/GettingStartedWithVectorStores/{Step5_Use_DynamicDataModel.cs => Step4_Use_DynamicDataModel.cs} (98%) diff --git a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs b/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs deleted file mode 100644 index 1951f3a6dbee..000000000000 --- a/dotnet/samples/Concepts/Memory/VectorStoreLangchainInterop/MappingVectorStoreRecordCollection.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// TODO: Commented out as part of implementing LINQ-based filtering, since MappingVectorStoreRecordCollection is no longer easy/feasible. -// TODO: The user provides an expression tree accepting a TPublicRecord, but we require an expression tree accepting a TInternalRecord. -// TODO: This is something that the user must provide, and is quite advanced. - -#if DISABLED - -using System.Runtime.CompilerServices; -using Microsoft.Extensions.VectorData; - -namespace Memory.VectorStoreLangchainInterop; - -/// -/// Decorator class that allows conversion of keys and records between public and internal representations. -/// -/// -/// This class is useful if a vector store implementation exposes keys or records in a way that is not -/// suitable for the user of the vector store. E.g. let's say that the vector store supports Guid keys -/// but you want to work with string keys that contain Guids. This class allows you to map between the -/// public string Guids and the internal Guids. -/// -/// The type of the key that the user of this class will use. -/// The type of the key that the internal collection exposes. -/// The type of the record that the user of this class will use. -/// The type of the record that the internal collection exposes. -internal sealed class MappingVectorStoreRecordCollection : IVectorStoreRecordCollection - where TPublicKey : notnull - where TInternalKey : notnull -{ - private readonly IVectorStoreRecordCollection _collection; - private readonly Func _publicToInternalKeyMapper; - private readonly Func _internalToPublicKeyMapper; - private readonly Func _publicToInternalRecordMapper; - private readonly Func _internalToPublicRecordMapper; - - public MappingVectorStoreRecordCollection( - IVectorStoreRecordCollection collection, - Func publicToInternalKeyMapper, - Func internalToPublicKeyMapper, - Func publicToInternalRecordMapper, - Func internalToPublicRecordMapper) - { - this._collection = collection; - this._publicToInternalKeyMapper = publicToInternalKeyMapper; - this._internalToPublicKeyMapper = internalToPublicKeyMapper; - this._publicToInternalRecordMapper = publicToInternalRecordMapper; - this._internalToPublicRecordMapper = internalToPublicRecordMapper; - } - - /// - public string CollectionName => this._collection.CollectionName; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return this._collection.CollectionExistsAsync(cancellationToken); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - return this._collection.CreateCollectionAsync(cancellationToken); - } - - /// - public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - return this._collection.CreateCollectionIfNotExistsAsync(cancellationToken); - } - - /// - public Task DeleteAsync(TPublicKey key, CancellationToken cancellationToken = default) - { - return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), cancellationToken); - } - - /// - public Task DeleteBatchAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - return this._collection.DeleteBatchAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - return this._collection.DeleteCollectionAsync(cancellationToken); - } - - /// - public async Task GetAsync(TPublicKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var internalRecord = await this._collection.GetAsync(this._publicToInternalKeyMapper(key), options, cancellationToken).ConfigureAwait(false); - if (internalRecord == null) - { - return default; - } - - return this._internalToPublicRecordMapper(internalRecord); - } - - /// - public IAsyncEnumerable GetBatchAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var internalRecords = this._collection.GetBatchAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); - return internalRecords.Select(this._internalToPublicRecordMapper); - } - - /// - public async Task UpsertAsync(TPublicRecord record, CancellationToken cancellationToken = default) - { - var internalRecord = this._publicToInternalRecordMapper(record); - var internalKey = await this._collection.UpsertAsync(internalRecord, cancellationToken).ConfigureAwait(false); - return this._internalToPublicKeyMapper(internalKey); - } - - /// - public async IAsyncEnumerable UpsertBatchAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var internalRecords = records.Select(this._publicToInternalRecordMapper); - var internalKeys = this._collection.UpsertBatchAsync(internalRecords, cancellationToken); - await foreach (var internalKey in internalKeys.ConfigureAwait(false)) - { - yield return this._internalToPublicKeyMapper(internalKey); - } - } - - /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - var searchResults = await this._collection.VectorizedSearchAsync(vector, options, cancellationToken).ConfigureAwait(false); - var publicResultRecords = searchResults.Results.Select(result => new VectorSearchResult(this._internalToPublicRecordMapper(result.Record), result.Score)); - - return new VectorSearchResults(publicResultRecords) - { - TotalCount = searchResults.TotalCount, - Metadata = searchResults.Metadata, - }; - } -} - -#endif diff --git a/dotnet/samples/Concepts/RAG/WithPlugins.cs b/dotnet/samples/Concepts/RAG/WithPlugins.cs index e8a55ab8d660..24419dd7fdb7 100644 --- a/dotnet/samples/Concepts/RAG/WithPlugins.cs +++ b/dotnet/samples/Concepts/RAG/WithPlugins.cs @@ -2,12 +2,13 @@ using System.Net.Http.Headers; using System.Text.Json; +using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.InMemory; -using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Data; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using OpenAI; using Resources; namespace RAG; @@ -34,16 +35,16 @@ public async Task RAGWithCustomPluginAsync() [Fact] public async Task RAGWithInMemoryVectorStoreAndPluginAsync() { - var vectorStore = new InMemoryVectorStore(); - var textEmbeddingGenerator = new OpenAITextEmbeddingGenerationService( - TestConfiguration.OpenAI.EmbeddingModelId, - TestConfiguration.OpenAI.ApiKey); + var textEmbeddingGenerator = new OpenAIClient(TestConfiguration.OpenAI.ApiKey) + .GetEmbeddingClient(TestConfiguration.OpenAI.EmbeddingModelId) + .AsIEmbeddingGenerator(); var kernel = Kernel.CreateBuilder() .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .Build(); // Create the collection and add data + var vectorStore = new InMemoryVectorStore(new() { EmbeddingGenerator = textEmbeddingGenerator }); var collection = vectorStore.GetCollection("finances"); await collection.CreateCollectionAsync(); string[] budgetInfo = @@ -54,12 +55,11 @@ public async Task RAGWithInMemoryVectorStoreAndPluginAsync() "The budget for 2023 is EUR 200 000", "The budget for 2024 is EUR 364 000" }; - var vectors = await textEmbeddingGenerator.GenerateEmbeddingsAsync(budgetInfo); - var records = budgetInfo.Zip(vectors).Select((input, index) => new FinanceInfo { Key = index.ToString(), Text = input.First, Embedding = input.Second }).ToList(); + var records = budgetInfo.Select((input, index) => new FinanceInfo { Key = index.ToString(), Text = input }); await collection.UpsertAsync(records); // Add the collection to the kernel as a plugin. - var textSearch = new VectorStoreTextSearch(collection, textEmbeddingGenerator); + var textSearch = new VectorStoreTextSearch(collection); kernel.Plugins.Add(textSearch.CreateWithSearch("FinanceSearch", "Can search for budget information")); // Invoke the kernel, using the plugin from within the prompt. @@ -120,12 +120,14 @@ private sealed class FinanceInfo { [VectorStoreRecordKey] public string Key { get; set; } = string.Empty; + [TextSearchResultValue] [VectorStoreRecordData] public string Text { get; set; } = string.Empty; + [VectorStoreRecordVector(1536)] - public ReadOnlyMemory Embedding { get; set; } + public string Embedding => this.Text; } - #endregion + #endregion Custom Plugin } diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs deleted file mode 100644 index 2798cd4d9e0c..000000000000 --- a/dotnet/samples/GettingStartedWithVectorStores/Step4_NonStringKey_VectorStore.cs +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -#if DISABLED_FOR_NOW // TODO: See note in MappingVectorStoreRecordCollection - -using System.Runtime.CompilerServices; -using Microsoft.Extensions.VectorData; -using Microsoft.SemanticKernel.Connectors.Qdrant; -using Qdrant.Client; - -namespace GettingStartedWithVectorStores; - - -/// -/// Example that shows that you can switch between different vector stores with the same code, in this case -/// with a vector store that doesn't use string keys. -/// This sample demonstrates one possible approach, however it is also possible to use generics -/// in the common code to achieve code reuse. -/// -public class Step4_NonStringKey_VectorStore(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture -{ - /// - /// Here we are going to use the same code that we used in and - /// but now with an . - /// Qdrant uses Guid or ulong as the key type, but the common code works with a string key. The string keys of the records created - /// in contain numbers though, so it's possible for us to convert them to ulong. - /// In this example, we'll demonstrate how to do that. - /// - /// This example requires a Qdrant server up and running. To run a Qdrant server in a Docker container, use the following command: - /// docker run -d --name qdrant -p 6333:6333 -p 6334:6334 qdrant/qdrant:latest - /// - [Fact] - public async Task UseAQdrantVectorStoreAsync() - { - // Construct a Qdrant vector store collection. - var collection = new QdrantVectorStoreRecordCollection(new QdrantClient("localhost"), "skglossary"); - - // Wrap the collection using a decorator that allows us to expose a version that uses string keys, but internally - // we convert to and from ulong. - var stringKeyCollection = new MappingVectorStoreRecordCollection( - collection, - p => ulong.Parse(p), - i => i.ToString(), - p => new UlongGlossary { Key = ulong.Parse(p.Key), Category = p.Category, Term = p.Term, Definition = p.Definition, DefinitionEmbedding = p.DefinitionEmbedding }, - i => new Glossary { Key = i.Key.ToString("D"), Category = i.Category, Term = i.Term, Definition = i.Definition, DefinitionEmbedding = i.DefinitionEmbedding }); - - // Ingest data into the collection using the same code as we used in Step1 with the InMemory Vector Store. - await Step1_Ingest_Data.IngestDataIntoVectorStoreAsync(stringKeyCollection, fixture.TextEmbeddingGenerationService); - - // Search the vector store using the same code as we used in Step2 with the InMemory Vector Store. - var searchResultItem = await Step2_Vector_Search.SearchVectorStoreAsync( - stringKeyCollection, - "What is an Application Programming Interface?", - fixture.TextEmbeddingGenerationService); - - // Write the search result with its score to the console. - Console.WriteLine(searchResultItem.Record.Definition); - Console.WriteLine(searchResultItem.Score); - } - - /// - /// Data model that uses a ulong as the key type instead of a string. - /// - private sealed class UlongGlossary - { - [VectorStoreRecordKey] - public ulong Key { get; set; } - - [VectorStoreRecordData(IsIndexed = true)] - public string Category { get; set; } - - [VectorStoreRecordData] - public string Term { get; set; } - - [VectorStoreRecordData] - public string Definition { get; set; } - - [VectorStoreRecordVector(Dimensions: 1536)] - public ReadOnlyMemory DefinitionEmbedding { get; set; } - } - - /// - /// Simple decorator class that allows conversion of keys and records from one type to another. - /// - private sealed class MappingVectorStoreRecordCollection : IVectorStoreRecordCollection - where TPublicKey : notnull - where TInternalKey : notnull - { - private readonly IVectorStoreRecordCollection _collection; - private readonly Func _publicToInternalKeyMapper; - private readonly Func _internalToPublicKeyMapper; - private readonly Func _publicToInternalRecordMapper; - private readonly Func _internalToPublicRecordMapper; - - public MappingVectorStoreRecordCollection( - IVectorStoreRecordCollection collection, - Func publicToInternalKeyMapper, - Func internalToPublicKeyMapper, - Func publicToInternalRecordMapper, - Func internalToPublicRecordMapper) - { - this._collection = collection; - this._publicToInternalKeyMapper = publicToInternalKeyMapper; - this._internalToPublicKeyMapper = internalToPublicKeyMapper; - this._publicToInternalRecordMapper = publicToInternalRecordMapper; - this._internalToPublicRecordMapper = internalToPublicRecordMapper; - } - - /// - public string CollectionName => this._collection.CollectionName; - - /// - public Task CollectionExistsAsync(CancellationToken cancellationToken = default) - { - return this._collection.CollectionExistsAsync(cancellationToken); - } - - /// - public Task CreateCollectionAsync(CancellationToken cancellationToken = default) - { - return this._collection.CreateCollectionAsync(cancellationToken); - } - - /// - public Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) - { - return this._collection.CreateCollectionIfNotExistsAsync(cancellationToken); - } - - /// - public Task DeleteAsync(TPublicKey key, CancellationToken cancellationToken = default) - { - return this._collection.DeleteAsync(this._publicToInternalKeyMapper(key), cancellationToken); - } - - /// - public Task DeleteAsync(IEnumerable keys, CancellationToken cancellationToken = default) - { - return this._collection.DeleteAsync(keys.Select(this._publicToInternalKeyMapper), cancellationToken); - } - - /// - public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) - { - return this._collection.DeleteCollectionAsync(cancellationToken); - } - - /// - public async Task GetAsync(TPublicKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var internalRecord = await this._collection.GetAsync(this._publicToInternalKeyMapper(key), options, cancellationToken).ConfigureAwait(false); - if (internalRecord == null) - { - return default; - } - - return this._internalToPublicRecordMapper(internalRecord); - } - - /// - public IAsyncEnumerable GetAsync(IEnumerable keys, GetRecordOptions? options = null, CancellationToken cancellationToken = default) - { - var internalRecords = this._collection.GetAsync(keys.Select(this._publicToInternalKeyMapper), options, cancellationToken); - return internalRecords.Select(this._internalToPublicRecordMapper); - } - - /// - public async Task UpsertAsync(TPublicRecord record, CancellationToken cancellationToken = default) - { - var internalRecord = this._publicToInternalRecordMapper(record); - var internalKey = await this._collection.UpsertAsync(internalRecord, cancellationToken).ConfigureAwait(false); - return this._internalToPublicKeyMapper(internalKey); - } - - /// - public async IAsyncEnumerable UpsertAsync(IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var internalRecords = records.Select(this._publicToInternalRecordMapper); - var internalKeys = this._collection.UpsertAsync(internalRecords, cancellationToken); - await foreach (var internalKey in internalKeys.ConfigureAwait(false)) - { - yield return this._internalToPublicKeyMapper(internalKey); - } - } - - /// - public async Task> VectorizedSearchAsync(TVector vector, VectorSearchOptions? options = null, CancellationToken cancellationToken = default) - { - var searchResults = await this._collection.VectorizedSearchAsync(vector, options, cancellationToken).ConfigureAwait(false); - var publicResultRecords = searchResults.Results.Select(result => new VectorSearchResult(this._internalToPublicRecordMapper(result.Record), result.Score)); - - return new VectorSearchResults(publicResultRecords) - { - TotalCount = searchResults.TotalCount, - Metadata = searchResults.Metadata, - }; - } - } -} - -#endif diff --git a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs b/dotnet/samples/GettingStartedWithVectorStores/Step4_Use_DynamicDataModel.cs similarity index 98% rename from dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs rename to dotnet/samples/GettingStartedWithVectorStores/Step4_Use_DynamicDataModel.cs index cdea73134d21..63ed0ef1d34f 100644 --- a/dotnet/samples/GettingStartedWithVectorStores/Step5_Use_DynamicDataModel.cs +++ b/dotnet/samples/GettingStartedWithVectorStores/Step4_Use_DynamicDataModel.cs @@ -11,7 +11,7 @@ namespace GettingStartedWithVectorStores; /// Example that shows that you can use the dynamic data modeling to interact with a vector database. /// This makes it possible to use the vector store abstractions without having to create your own strongly-typed data model. /// -public class Step5_Use_DynamicDataModel(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture +public class Step4_Use_DynamicDataModel(ITestOutputHelper output, VectorStoresFixture fixture) : BaseTest(output), IClassFixture { /// /// Example showing how to query a vector store that uses dynamic data modeling. From c8b129df5d185116f836effd73d648d82f59ab62 Mon Sep 17 00:00:00 2001 From: Shay Rojansky Date: Mon, 28 Apr 2025 21:37:39 +0200 Subject: [PATCH 58/63] .Net: Tiny fixes to make MEVD integration tests pass (#11776) --- .../Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs | 4 ++-- .../Weaviate/WeaviateVectorStoreRecordCollectionTests.cs | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs index bd70dd934b4b..d5324fc4ecf0 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Sqlite/SqliteVectorStoreRecordCollectionTests.cs @@ -439,7 +439,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() [Fact(Skip = SkipReason)] public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperWithNumericKeyAsync() { - const ulong HotelId = 5; + const long HotelId = 5; var options = new SqliteVectorStoreRecordCollectionOptions> { @@ -468,7 +468,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperWithNumericKeyAsync var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); // Assert - Assert.Equal(HotelId, upsertResult); + Assert.Equal(HotelId, (long)upsertResult); Assert.NotNull(localGetResult); Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs index 747e1ede5fc9..fe39b2c3ce62 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -469,8 +469,7 @@ private VectorStoreRecordDefinition GetTestHotelRecordDefinition() new VectorStoreRecordDataProperty("Tags", typeof(List)), new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), - new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) {IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } - + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } ] }; } From a8814647d8ce3c50d0046a1f022eb63198185e0d Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Mon, 28 Apr 2025 22:25:41 +0100 Subject: [PATCH 59/63] .Net: Fix Integration tests and a few small bugs. (#11778) ### Motivation and Context Fix AzureAISearch tests and two small bugs. Improve MongoDB tests with a small fix too. Fix CosmosNoSQL integration tests Fixes for postgres and mongodb Fixes for redis ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- ...VectorStoreCollectionSearchMappingTests.cs | 2 +- .../AzureAISearchConstants.cs | 30 ++++++++++++++ .../AzureAISearchDynamicDataModelMapper.cs | 2 +- .../AzureAISearchDynamicModelBuilder.cs | 36 +++++++++++++++++ .../AzureAISearchModelBuilder.cs | 36 ++--------------- ...zureAISearchVectorStoreRecordCollection.cs | 5 ++- .../PostgresVectorStoreRecordMapper.cs | 2 +- .../RedisJsonVectorStoreRecordCollection.cs | 9 +++-- ...VectorStoreCollectionSearchMappingTests.cs | 2 +- .../VectorStoreRecordModelBuilder.cs | 8 ++-- .../AzureAISearchVectorStoreFixture.cs | 20 ++++++---- ...ISearchVectorStoreRecordCollectionTests.cs | 39 +++++++++---------- .../AzureCosmosDBMongoDBVectorStoreFixture.cs | 3 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 18 ++++++--- ...DBNoSQLVectorStoreRecordCollectionTests.cs | 32 ++++++++------- .../AzureCosmosDBNoSQLVectorStoreTests.cs | 3 +- ...MongoDBVectorStoreRecordCollectionTests.cs | 2 +- .../Memory/Redis/RedisVectorStoreFixture.cs | 16 ++++---- 18 files changed, 161 insertions(+), 104 deletions(-) create mode 100644 dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicModelBuilder.cs diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs index 5e039902ec32..4f16055088b1 100644 --- a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreCollectionSearchMappingTests.cs @@ -93,7 +93,7 @@ public void BuildFilterThrowsExceptionWithMultipleFilterClausesOfSameType() public void BuilderFilterByDefaultReturnsValidFilter() { // Arrange - var expectedFilter = new BsonDocument() { ["Property1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; + var expectedFilter = new BsonDocument() { ["property_1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; var vectorSearchFilter = new VectorSearchFilter().EqualTo("Property1", "TestValue1"); // Act diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs index fccfa847a0a3..8737519c2eba 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchConstants.cs @@ -1,8 +1,38 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; + namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; internal static class AzureAISearchConstants { internal const string VectorStoreSystemName = "azure.aisearch"; + + /// A set of types that a key on the provided model may have. + internal static readonly HashSet SupportedKeyTypes = [typeof(string)]; + + /// A set of types that data properties on the provided model may have. + internal static readonly HashSet SupportedDataTypes = + [ + typeof(string), + typeof(int), + typeof(long), + typeof(double), + typeof(float), + typeof(bool), + typeof(DateTimeOffset) + ]; + + /// A set of types that vectors on the provided model may have. + /// + /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the + /// SDK yet. We will update this list as the SDK is updated. + /// + /// + internal static readonly HashSet SupportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs index dec24ef30c0c..b5b4e9ed08e8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicDataModelMapper.cs @@ -40,7 +40,7 @@ public JsonObject MapFromDataToStorageModel(Dictionary dataMode if (dataModel.TryGetValue(property.ModelName, out var dataValue)) { var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); - storageJsonObject.Add(property.ModelName, serializedJsonNode); + storageJsonObject.Add(property.StorageName, serializedJsonNode); } continue; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicModelBuilder.cs new file mode 100644 index 000000000000..059d073ca3c3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchDynamicModelBuilder.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using Microsoft.Extensions.VectorData.ConnectorSupport; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +internal class AzureAISearchDynamicModelBuilder() : VectorStoreRecordModelBuilder(s_modelBuildingOptions) +{ + internal static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + + SupportedKeyPropertyTypes = AzureAISearchConstants.SupportedKeyTypes, + SupportedDataPropertyTypes = AzureAISearchConstants.SupportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = AzureAISearchConstants.SupportedDataTypes, + SupportedVectorPropertyTypes = AzureAISearchConstants.SupportedVectorTypes, + + UsesExternalSerializer = true + }; + + protected override void Validate(Type type) + { + base.Validate(type); + + if (this.VectorProperties.FirstOrDefault(p => p.EmbeddingGenerator is not null) is VectorStoreRecordPropertyModel property) + { + throw new NotSupportedException( + $"The Azure AI Search connector does not currently support a custom embedding generator (configured for property '{property.ModelName}' on type '{type.Name}'). " + + "However, you can configure embedding generation in Azure AI Search itself, without requiring a .NET IEmbeddingGenerator."); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs index 668115b92c29..e3ff3b5c2983 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchModelBuilder.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.VectorData.ConnectorSupport; @@ -9,43 +8,16 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; internal class AzureAISearchModelBuilder() : VectorStoreRecordJsonModelBuilder(s_modelBuildingOptions) { - /// A set of types that a key on the provided model may have. - private static readonly HashSet s_supportedKeyTypes = [typeof(string)]; - - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(int), - typeof(long), - typeof(double), - typeof(float), - typeof(bool), - typeof(DateTimeOffset) - ]; - - /// A set of types that vectors on the provided model may have. - /// - /// Azure AI Search is adding support for more types than just float32, but these are not available for use via the - /// SDK yet. We will update this list as the SDK is updated. - /// - /// - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; - internal static readonly VectorStoreRecordModelBuildingOptions s_modelBuildingOptions = new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true, - SupportedKeyPropertyTypes = s_supportedKeyTypes, - SupportedDataPropertyTypes = s_supportedDataTypes, - SupportedEnumerableDataPropertyElementTypes = s_supportedDataTypes, - SupportedVectorPropertyTypes = s_supportedVectorTypes, + SupportedKeyPropertyTypes = AzureAISearchConstants.SupportedKeyTypes, + SupportedDataPropertyTypes = AzureAISearchConstants.SupportedDataTypes, + SupportedEnumerableDataPropertyElementTypes = AzureAISearchConstants.SupportedDataTypes, + SupportedVectorPropertyTypes = AzureAISearchConstants.SupportedVectorTypes, UsesExternalSerializer = true }; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 21650c41b192..c3b5eefb210e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -87,8 +87,9 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli this._options = options ?? new AzureAISearchVectorStoreRecordCollectionOptions(); this._searchClient = this._searchIndexClient.GetSearchClient(name); - this._model = new AzureAISearchModelBuilder() - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._options.JsonSerializerOptions); + this._model = typeof(TRecord) == typeof(Dictionary) ? + new AzureAISearchDynamicModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator) : + new AzureAISearchModelBuilder().Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._options.JsonSerializerOptions); // Resolve mapper. // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs index 276a34643142..7dc507badd09 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreRecordMapper.cs @@ -43,7 +43,7 @@ internal sealed class PostgresVectorStoreRecordMapper(VectorStoreRecord Embedding fe => fe.Vector, _ => throw new UnreachableException() } - : (ReadOnlyMemory)property.GetValueAsObject(dataModel!)!)); + : (ReadOnlyMemory?)property.GetValueAsObject(dataModel!)!)); } return properties; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index d67372eba098..af6d2bdb7c18 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -100,19 +100,22 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string name, Red throw new NotSupportedException("Only string keys are supported (and object for dynamic mapping)."); } + var isDynamic = typeof(TRecord) == typeof(Dictionary); + // Assign. this._database = database; this._collectionName = name; this._options = options ?? new RedisJsonVectorStoreRecordCollectionOptions(); this._jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; - this._model = new VectorStoreRecordJsonModelBuilder(ModelBuildingOptions) - .Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._jsonSerializerOptions); + this._model = isDynamic ? + new VectorStoreRecordModelBuilder(ModelBuildingOptions).Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator) : + new VectorStoreRecordJsonModelBuilder(ModelBuildingOptions).Build(typeof(TRecord), this._options.VectorStoreRecordDefinition, this._options.EmbeddingGenerator, this._jsonSerializerOptions); // Lookup storage property names. this._dataStoragePropertyNames = this._model.DataProperties.Select(p => p.StorageName).ToArray(); // Assign Mapper. - this._mapper = typeof(TRecord) == typeof(Dictionary) + this._mapper = isDynamic ? (IRedisJsonMapper)new RedisJsonDynamicDataModelMapper(this._model, this._jsonSerializerOptions) : new RedisJsonVectorStoreRecordMapper(this._model, this._jsonSerializerOptions); diff --git a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs index 3ba21bb058a0..c8c5fcf9f39b 100644 --- a/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.MongoDB.UnitTests/MongoDBVectorStoreCollectionSearchMappingTests.cs @@ -66,7 +66,7 @@ public void BuildFilterThrowsExceptionWithMultipleFilterClausesOfSameType() public void BuilderFilterByDefaultReturnsValidFilter() { // Arrange - var expectedFilter = new BsonDocument() { ["Property1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; + var expectedFilter = new BsonDocument() { ["property_1"] = new BsonDocument() { ["$eq"] = "TestValue1" } }; var vectorSearchFilter = new VectorSearchFilter().EqualTo("Property1", "TestValue1"); // Act diff --git a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs index bcb0bc2d38fb..dfd68ffb6466 100644 --- a/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs +++ b/dotnet/src/Connectors/VectorData.Abstractions/ConnectorSupport/VectorStoreRecordModelBuilder.cs @@ -193,7 +193,7 @@ protected virtual void ProcessTypeProperties(Type type, VectorStoreRecordDefinit continue; } - this.SetPropertyStorageName(property, storageName); + this.SetPropertyStorageName(property, storageName, type); property.PropertyInfo = clrProperty; this.PropertyMap.Add(clrProperty.Name, property); @@ -252,7 +252,7 @@ protected virtual void ProcessRecordDefinition( } property.Type = definitionProperty.PropertyType; - this.SetPropertyStorageName(property, definitionProperty.StoragePropertyName); + this.SetPropertyStorageName(property, definitionProperty.StoragePropertyName, type); switch (definitionProperty) { @@ -344,7 +344,7 @@ protected virtual void ProcessRecordDefinition( } } - private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, string? storageName) + private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, string? storageName, Type? type) { if (property is VectorStoreRecordKeyPropertyModel && this.Options.ReservedKeyStorageName is not null) { @@ -362,7 +362,7 @@ private void SetPropertyStorageName(VectorStoreRecordPropertyModel property, str // our model needs to be in sync with the serializer's behavior (for e.g. storage names in filters). // So we ignore the config here as well. // TODO: Consider throwing here instead of ignoring - if (this.Options.UsesExternalSerializer) + if (this.Options.UsesExternalSerializer && type != null) { return; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs index 440ea94bb3e6..3ff4493ee79b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Runtime.CompilerServices; using System.Text.RegularExpressions; using System.Threading.Tasks; using Azure; @@ -114,6 +115,11 @@ public AzureAISearchVectorStoreFixture() /// public ITextEmbeddingGenerationService EmbeddingGenerator { get; private set; } + /// + /// Gets the embedding used for all test documents that the collection is seeded with. + /// + public ReadOnlyMemory Embedding { get; private set; } + /// /// Create / Recreate index and upload documents before test run. /// @@ -122,7 +128,7 @@ public async Task InitializeAsync() { await AzureAISearchVectorStoreFixture.DeleteIndexIfExistsAsync(this._testIndexName, this.SearchIndexClient); await AzureAISearchVectorStoreFixture.CreateIndexAsync(this._testIndexName, this.SearchIndexClient); - await AzureAISearchVectorStoreFixture.UploadDocumentsAsync(this.SearchIndexClient.GetSearchClient(this._testIndexName), this.EmbeddingGenerator); + await this.UploadDocumentsAsync(this.SearchIndexClient.GetSearchClient(this._testIndexName), this.EmbeddingGenerator); } /// @@ -193,9 +199,9 @@ public static async Task CreateIndexAsync(string indexName, SearchIndexClient ad /// /// The client to use for uploading the documents. /// An instance of to generate embeddings. - public static async Task UploadDocumentsAsync(SearchClient searchClient, ITextEmbeddingGenerationService embeddingGenerator) + public async Task UploadDocumentsAsync(SearchClient searchClient, ITextEmbeddingGenerationService embeddingGenerator) { - var embedding = await embeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); + this.Embedding = await embeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); IndexDocumentsBatch batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Upload( @@ -204,7 +210,7 @@ public static async Task UploadDocumentsAsync(SearchClient searchClient, ITextEm HotelId = "BaseSet-1", HotelName = "Hotel 1", Description = "This is a great hotel", - DescriptionEmbedding = embedding, + DescriptionEmbedding = this.Embedding, Tags = new[] { "pool", "air conditioning", "concierge" }, ParkingIncluded = false, LastRenovationDate = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), @@ -216,7 +222,7 @@ public static async Task UploadDocumentsAsync(SearchClient searchClient, ITextEm HotelId = "BaseSet-2", HotelName = "Hotel 2", Description = "This is a great hotel", - DescriptionEmbedding = embedding, + DescriptionEmbedding = this.Embedding, Tags = new[] { "pool", "free wifi", "concierge" }, ParkingIncluded = false, LastRenovationDate = new DateTimeOffset(1979, 2, 18, 0, 0, 0, TimeSpan.Zero), @@ -228,7 +234,7 @@ public static async Task UploadDocumentsAsync(SearchClient searchClient, ITextEm HotelId = "BaseSet-3", HotelName = "Hotel 3", Description = "This is a great hotel", - DescriptionEmbedding = embedding, + DescriptionEmbedding = this.Embedding, Tags = new[] { "air conditioning", "bar", "continental breakfast" }, ParkingIncluded = true, LastRenovationDate = new DateTimeOffset(2015, 9, 20, 0, 0, 0, TimeSpan.Zero), @@ -240,7 +246,7 @@ public static async Task UploadDocumentsAsync(SearchClient searchClient, ITextEm HotelId = "BaseSet-4", HotelName = "Hotel 4", Description = "This is a great hotel", - DescriptionEmbedding = embedding, + DescriptionEmbedding = this.Embedding, Tags = new[] { "concierge", "view", "24-hour front desk service" }, ParkingIncluded = true, LastRenovationDate = new DateTimeOffset(1960, 2, 06, 0, 0, 0, TimeSpan.Zero), diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index dc4c7659fe2e..eeb7b46e7ab2 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -8,7 +8,6 @@ using Azure.Search.Documents.Indexes; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureAISearch; -using Microsoft.SemanticKernel.Embeddings; using Xunit; using Xunit.Abstractions; @@ -48,7 +47,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(bool expectedExist public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDefinition) { // Arrange - var hotel = await this.CreateTestHotelAsync("Upsert-1"); + var hotel = this.CreateTestHotel("Upsert-1"); var testCollectionName = $"{fixture.TestIndexName}-createtest"; var options = new AzureAISearchVectorStoreRecordCollectionOptions { @@ -61,8 +60,8 @@ public async Task ItCanCreateACollectionUpsertGetAndSearchAsync(bool useRecordDe // Act await sut.CreateCollectionAsync(); var upsertResult = await sut.UpsertAsync(hotel); - var getResult = await sut.GetAsync("Upsert-1"); - var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"); + var getResult = await sut.GetAsync("Upsert-1", new() { IncludeVectors = true }); + var embedding = fixture.Embedding; var searchResults = await sut.VectorizedSearchAsync( embedding, top: 3, @@ -135,9 +134,9 @@ public async Task ItCanUpsertDocumentToVectorStoreAsync(bool useRecordDefinition var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName, options); // Act - var hotel = await this.CreateTestHotelAsync("Upsert-1"); + var hotel = this.CreateTestHotel("Upsert-1"); var upsertResult = await sut.UpsertAsync(hotel); - var getResult = await sut.GetAsync("Upsert-1"); + var getResult = await sut.GetAsync("Upsert-1", new() { IncludeVectors = true }); // Assert Assert.NotNull(upsertResult); @@ -167,9 +166,9 @@ public async Task ItCanUpsertManyDocumentsToVectorStoreAsync() // Act var results = await sut.UpsertAsync( [ - await this.CreateTestHotelAsync("UpsertMany-1"), - await this.CreateTestHotelAsync("UpsertMany-2"), - await this.CreateTestHotelAsync("UpsertMany-3"), + this.CreateTestHotel("UpsertMany-1"), + this.CreateTestHotel("UpsertMany-2"), + this.CreateTestHotel("UpsertMany-3"), ]); // Assert @@ -212,7 +211,7 @@ public async Task ItCanGetDocumentFromVectorStoreAsync(bool includeVectors, bool Assert.Equal(includeVectors, getResult.DescriptionEmbedding != null); if (includeVectors) { - var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); + var embedding = fixture.Embedding; Assert.Equal(embedding, getResult.DescriptionEmbedding!.Value.ToArray()); } else @@ -261,7 +260,7 @@ public async Task ItCanRemoveDocumentFromVectorStoreAsync(bool useRecordDefiniti VectorStoreRecordDefinition = useRecordDefinition ? fixture.VectorStoreRecordDefinition : null }; var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); - await sut.UpsertAsync(await this.CreateTestHotelAsync("Remove-1")); + await sut.UpsertAsync(this.CreateTestHotel("Remove-1")); // Act await sut.DeleteAsync("Remove-1"); @@ -277,9 +276,9 @@ public async Task ItCanRemoveManyDocumentsFromVectorStoreAsync() { // Arrange var sut = new AzureAISearchVectorStoreRecordCollection(fixture.SearchIndexClient, fixture.TestIndexName); - await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-1")); - await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-2")); - await sut.UpsertAsync(await this.CreateTestHotelAsync("RemoveMany-3")); + await sut.UpsertAsync(this.CreateTestHotel("RemoveMany-1")); + await sut.UpsertAsync(this.CreateTestHotel("RemoveMany-2")); + await sut.UpsertAsync(this.CreateTestHotel("RemoveMany-3")); // Act // Also include a non-existing key to test that the operation does not fail for these. @@ -334,7 +333,7 @@ public async Task ItCanSearchWithVectorAndFiltersAsync(string option, bool inclu // Act. var filter = option == "equality" ? new VectorSearchFilter().EqualTo("HotelName", "Hotel 3") : new VectorSearchFilter().AnyTagEqualTo("Tags", "bar"); var searchResults = await sut.VectorizedSearchAsync( - await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), + fixture.Embedding, top: 3, new() { @@ -356,7 +355,7 @@ await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("A great hotel"), if (includeVectors) { Assert.NotNull(searchResult.Record.DescriptionEmbedding); - var embedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); + var embedding = fixture.Embedding; Assert.Equal(embedding, searchResult.Record.DescriptionEmbedding!.Value.ToArray()); } else @@ -398,8 +397,8 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() // Act var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); - var baseSetEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a great hotel"); - var dynamicMapperEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("This is a dynamic mapper hotel"); + var baseSetEmbedding = fixture.Embedding; + var dynamicMapperEmbedding = fixture.Embedding; var upsertResult = await sut.UpsertAsync(new Dictionary { ["HotelId"] = "DynamicMapper-1", @@ -438,12 +437,12 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() Assert.Equal(dynamicMapperEmbedding, (ReadOnlyMemory)localGetResult["DescriptionEmbedding"]!); } - private async Task CreateTestHotelAsync(string hotelId) => new() + private AzureAISearchHotel CreateTestHotel(string hotelId) => new() { HotelId = hotelId, HotelName = $"MyHotel {hotelId}", Description = "My Hotel is great.", - DescriptionEmbedding = await fixture.EmbeddingGenerator.GenerateEmbeddingAsync("My hotel is great"), + DescriptionEmbedding = fixture.Embedding, Tags = ["pool", "air conditioning", "concierge"], ParkingIncluded = true, LastRenovationDate = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs index 9cac223fa88e..10d52bce99e8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs @@ -32,10 +32,11 @@ public AzureCosmosDBMongoDBVectorStoreFixture() .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile( path: "testsettings.development.json", - optional: false, + optional: true, reloadOnChange: true ) .AddEnvironmentVariables() + .AddUserSecrets() .Build(); var connectionString = GetConnectionString(configuration); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs index 15d7349fb076..b52a7fc56b9f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -38,13 +38,21 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN public async Task ItCanCreateCollectionAsync() { // Arrange - var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, "sk-test-create-collection"); - // Act - await sut.CreateCollectionAsync(); + try + { + // Act + await sut.CreateCollectionAsync(); - // Assert - Assert.True(await sut.CollectionExistsAsync()); + // Assert + Assert.True(await sut.CollectionExistsAsync()); + } + finally + { + // Clean up + await sut.DeleteCollectionAsync(); + } } [Theory(Skip = SkipReason)] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs index 4569c75514b3..462818320dd8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -7,6 +7,7 @@ using Microsoft.Azure.Cosmos; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; using Xunit; using DistanceFunction = Microsoft.Extensions.VectorData.DistanceFunction; using IndexKind = Microsoft.Extensions.VectorData.IndexKind; @@ -20,11 +21,10 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; /// Integration tests for class. /// [Collection("AzureCosmosDBNoSQLVectorStoreCollection")] +[AzureCosmosDBNoSQLConnectionStringSetCondition] public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) { - private const string? SkipReason = "Azure CosmosDB NoSQL cluster is required"; - - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanCreateCollectionAsync() { // Arrange @@ -37,7 +37,7 @@ public async Task ItCanCreateCollectionAsync() Assert.True(await sut.CollectionExistsAsync()); } - [Theory(Skip = SkipReason)] + [VectorStoreTheory] [InlineData("sk-test-hotels", true)] [InlineData("nonexistentcollection", false)] public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) @@ -57,7 +57,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN Assert.Equal(expectedExists, actual); } - [Theory(Skip = SkipReason)] + [VectorStoreTheory] [InlineData(true, true)] [InlineData(true, false)] [InlineData(false, true)] @@ -112,7 +112,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo } } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanDeleteCollectionAsync() { // Arrange @@ -130,7 +130,7 @@ public async Task ItCanDeleteCollectionAsync() Assert.False(await sut.CollectionExistsAsync()); } - [Theory(Skip = SkipReason)] + [VectorStoreTheory] [InlineData("consistent-mode-collection", IndexingMode.Consistent)] [InlineData("lazy-mode-collection", IndexingMode.Lazy)] [InlineData("none-mode-collection", IndexingMode.None)] @@ -162,7 +162,7 @@ public async Task ItCanGetAndDeleteRecordAsync(string collectionName, IndexingMo Assert.Null(getResult); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanGetAndDeleteRecordWithPartitionKeyAsync() { // Arrange @@ -197,7 +197,7 @@ public async Task ItCanGetAndDeleteRecordWithPartitionKeyAsync() Assert.Null(getResult); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanGetAndDeleteBatchAsync() { // Arrange @@ -231,7 +231,7 @@ public async Task ItCanGetAndDeleteBatchAsync() Assert.Empty(getResults); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanUpsertRecordAsync() { // Arrange @@ -261,7 +261,7 @@ public async Task ItCanUpsertRecordAsync() Assert.Equal(10, getResult.HotelRating); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() { // Arrange @@ -291,7 +291,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() Assert.Equal(1, searchResults.First(l => l.Record.HotelId == "key1").Score); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() { // Arrange @@ -322,7 +322,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() Assert.DoesNotContain("key2", ids); } - [Theory(Skip = SkipReason)] + [VectorStoreTheory] [MemberData(nameof(VectorizedSearchWithFilterData))] public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearchFilter filter, List expectedIds) { @@ -350,7 +350,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync(VectorSearc Assert.Equal(expectedIds, actualIds); } - [Fact(Skip = SkipReason)] + [VectorStoreFact] public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange @@ -383,7 +383,9 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() // Assert Assert.NotNull(upsertResult); - Assert.Equal(HotelId, upsertResult); + var upsertCompositeKey = (AzureCosmosDBNoSQLCompositeKey)upsertResult; + Assert.Equal(HotelId, upsertCompositeKey.PartitionKey); + Assert.Equal(HotelId, upsertCompositeKey.RecordKey); Assert.NotNull(localGetResult); Assert.Equal("Dynamic Mapper Hotel", localGetResult["HotelName"]); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs index 78c87350c23d..da92d728b31b 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; -using SemanticKernel.IntegrationTests.Connectors.Memory.Xunit; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; @@ -10,7 +9,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; /// Integration tests for . /// [Collection("AzureCosmosDBNoSQLVectorStoreCollection")] -[DisableVectorStoreTests(Skip = "Azure CosmosDB NoSQL cluster is required")] +[AzureCosmosDBNoSQLConnectionStringSetCondition] public sealed class AzureCosmosDBNoSQLVectorStoreTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) : BaseVectorStoreTests(new AzureCosmosDBNoSQLVectorStore(fixture.Database!)) { diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 654ae7cedb75..4c47f71d2d7f 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -449,7 +449,7 @@ public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() ["DescriptionEmbedding"] = new ReadOnlyMemory([30f, 31f, 32f, 33f]) }); - var localGetResult = await sut.GetAsync("Dynamic-1", new GetRecordOptions { IncludeVectors = true }); + var localGetResult = await sut.GetAsync("DynamicMapper-1", new GetRecordOptions { IncludeVectors = true }); // Assert Assert.NotNull(upsertResult); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs index f34627086e8c..febee4bed7da 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisVectorStoreFixture.cs @@ -132,14 +132,14 @@ public async Task InitializeAsync() DescriptionEmbedding = embedding, Tags = new[] { "pool", "air conditioning", "concierge" }, FTSTags = new[] { "pool", "air conditioning", "concierge" }, - ParkingIncluded = true, + parking_is_included = true, LastRenovationDate = new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), Rating = 3.6, Address = address }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-2", "$", new { HotelName = "My Hotel 2", HotelCode = 2, Description = "This is a great hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-3", "$", new { HotelName = "My Hotel 3", HotelCode = 3, Description = "This is a great hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); - await this.Database.JSON().SetAsync("jsonhotels:BaseSet-4-Invalid", "$", new { HotelId = "AnotherId", HotelName = "My Invalid Hotel", HotelCode = 4, Description = "This is an invalid hotel.", DescriptionEmbedding = embedding, ParkingIncluded = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-2", "$", new { HotelName = "My Hotel 2", HotelCode = 2, Description = "This is a great hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-3", "$", new { HotelName = "My Hotel 3", HotelCode = 3, Description = "This is a great hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); + await this.Database.JSON().SetAsync("jsonhotels:BaseSet-4-Invalid", "$", new { HotelId = "AnotherId", HotelName = "My Invalid Hotel", HotelCode = 4, Description = "This is an invalid hotel.", DescriptionEmbedding = embedding, parking_is_included = false }); // Add hashset test data. await this.Database.HashSetAsync("hashhotels:HBaseSet-1", new HashEntry[] @@ -148,7 +148,7 @@ public async Task InitializeAsync() new("HotelCode", 1), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("ParkingIncluded", true), + new("parking_is_included", true), new("Rating", 3.6) }); await this.Database.HashSetAsync("hashhotels:HBaseSet-2", new HashEntry[] @@ -157,7 +157,7 @@ public async Task InitializeAsync() new("HotelCode", 2), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("ParkingIncluded", false), + new("parking_is_included", false), }); await this.Database.HashSetAsync("hashhotels:HBaseSet-3", new HashEntry[] { @@ -165,7 +165,7 @@ public async Task InitializeAsync() new("HotelCode", 3), new("Description", "This is a great hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("ParkingIncluded", false), + new("parking_is_included", false), }); await this.Database.HashSetAsync("hashhotels:HBaseSet-4-Invalid", new HashEntry[] { @@ -174,7 +174,7 @@ public async Task InitializeAsync() new("HotelCode", 4), new("Description", "This is an invalid hotel."), new("DescriptionEmbedding", MemoryMarshal.AsBytes(new ReadOnlySpan(embedding)).ToArray()), - new("ParkingIncluded", false), + new("parking_is_included", false), }); } From 87dd8b1e9e8978d8e96f8c3598cd688f122df228 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:12:37 +0100 Subject: [PATCH 60/63] .Net: Small InMemory bug fix. (#11799) ### Motivation and Context We changed the storage type of the InMemory store to contain a wrapper with both the original record and generated vectors, and this code was returning the wrapper instead of the original record on get. ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../InMemoryVectorStoreRecordCollection.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs index 2d1d6685860e..d4b4646b1671 100644 --- a/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.InMemory/InMemoryVectorStoreRecordCollection.cs @@ -480,7 +480,10 @@ public IAsyncEnumerable GetAsync(Expression> filter throw new NotSupportedException(VectorDataStrings.IncludeVectorsNotSupportedWithEmbeddingGeneration); } - var records = this.GetCollectionDictionary().Values.Cast() + var records = this.GetCollectionDictionary() + .Values + .Cast>() + .Select(x => x.Record) .AsQueryable() .Where(filter); From fe58faa04e71e28bb874aaa3711ef6b9ae7414af Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 29 Apr 2025 11:28:39 +0100 Subject: [PATCH 61/63] .Net: Update vector store readme's (#11801) ### Motivation and Context We have some broken links in the readme's and they are still referring to memory store which is being replaced. ### Description - Updated readme files to not advertise memory store anymore. - Updated readme files to point to learn site docs. ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../Connectors.Memory.Chroma/README.md | 2 - .../Connectors.Memory.Kusto/README.md | 9 ++- .../Connectors.Memory.Milvus/README.md | 1 - .../Connectors.Memory.MongoDB/README.md | 40 +------------ .../Connectors.Memory.Postgres/README.md | 56 +------------------ .../Connectors.Memory.Redis/README.md | 17 +----- 6 files changed, 10 insertions(+), 115 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md b/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md index 42fc0f468a6b..47b9037200c6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md @@ -21,8 +21,6 @@ docker-compose up -d --build 3. Use Semantic Kernel with Chroma, using server local endpoint `http://localhost:8000`: - > See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. - ```csharp const string endpoint = "http://localhost:8000"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md b/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md index f7c276c7e9c3..53a014bcd93f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md @@ -1,13 +1,12 @@ # Microsoft.SemanticKernel.Connectors.Kusto -This connector uses [Azure Data Explorer (Kusto)](https://learn.microsoft.com/en-us/azure/data-explorer/) to implement Semantic Memory. +This connector uses [Azure Data Explorer (Kusto)](https://learn.microsoft.com/azure/data-explorer/) to implement Semantic Memory. ## Quick Start -1. Create a cluster and database in Azure Data Explorer (Kusto) - see https://learn.microsoft.com/en-us/azure/data-explorer/create-cluster-and-database?tabs=free +1. Create a cluster and database in Azure Data Explorer (Kusto) - see https://learn.microsoft.com/azure/data-explorer/create-cluster-and-database?tabs=free 2. To use Kusto as a semantic memory store, use the following code: - > See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. ```csharp using Kusto.Data; @@ -37,9 +36,9 @@ The function is called `series_cosine_similarity_fl` and is located in the `Func Kusto is an append-only store. This means that when a fact is updated, the old fact is not deleted. This isn't a problem for the semantic memory connector, as it always utilizes the most recent fact. -This is made possible by using the [arg_max](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/arg-max-aggfunction) aggregation function in conjunction with the [ingestion_time](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/ingestiontimefunction) function. +This is made possible by using the [arg_max](https://learn.microsoft.com/azure/data-explorer/kusto/query/arg-max-aggfunction) aggregation function in conjunction with the [ingestion_time](https://learn.microsoft.com/azure/data-explorer/kusto/query/ingestiontimefunction) function. However, users manually querying the underlying table should be aware of this behavior. ### Authentication -Please note that the authentication used in the example above is not recommended for production use. You can find more details here: https://learn.microsoft.com/en-us/azure/data-explorer/kusto/api/connection-strings/kusto +Please note that the authentication used in the example above is not recommended for production use. You can find more details here: https://learn.microsoft.com/azure/data-explorer/kusto/api/connection-strings/kusto diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md b/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md index b4d8e71d5a2c..cbdb1f99f35c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md @@ -19,7 +19,6 @@ docker-compose up -d ``` 3. Use Semantic Kernel with Milvus, connecting to `localhost` with the default (gRPC) port of 1536: - > See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. ```csharp using MilvusMemoryStore memoryStore = new("localhost"); diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md b/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md index 4a6ddcda3483..923dd3d9252c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md @@ -6,44 +6,8 @@ This connector uses [MongoDB Atlas Vector Search](https://www.mongodb.com/produc 1. Create [Atlas cluster](https://www.mongodb.com/docs/atlas/getting-started/) -2. Create a [collection](https://www.mongodb.com/docs/atlas/atlas-ui/collections/) +2. Create a Mongo DB Vector Store using instructions on the [Microsoft Learn site](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/out-of-the-box-connectors/mongodb-connector). -3. Create [Vector Search Index](https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-overview/) for the collection. The index has to be defined on a field called `embedding`. For example: - -``` -{ - "type": "vectorSearch", - "fields": [ - { - "numDimensions": , - "path": "embedding", - "similarity": "euclidean | cosine | dotProduct", - "type": "vector" - } - ] -} -``` - -4. Create the MongoDB memory store - > See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. - -```csharp -var connectionString = "MONGODB ATLAS CONNECTION STRING" -MongoDBMemoryStore memoryStore = new(connectionString, "MyDatabase"); - -var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); - -SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); - -var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); -``` +3. Use the [getting started instructions](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/?pivots=programming-language-csharp#getting-started-with-vector-store-connectors) on the Microsoft Leearn site to learn more about using the vector store. > Guide to find the connection string: https://www.mongodb.com/docs/manual/reference/connection-string/ - -## Important Notes - -### Vector search indexes - -In this version, vector search index management is outside of `MongoDBMemoryStore` scope. -Creation and maintenance of the indexes have to be done by the user. Please note that deleting a collection -(`memoryStore.DeleteCollectionAsync`) will delete the index as well. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md b/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md index e9ed71109fbb..4d98642cb02b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md @@ -37,58 +37,6 @@ sk_demo=# CREATE EXTENSION vector; See [this sample](../../../samples/Concepts/Memory/VectorStore_VectorSearch_MultiStore_Postgres.cs) for an example of using the vector store. -### Using PostgresMemoryStore +For more information on using Postgres as a vector store, see the [PostgresVectorStore](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/out-of-the-box-connectors/postgres-connector) documentation. -> See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. - -```csharp -NpgsqlDataSourceBuilder dataSourceBuilder = new NpgsqlDataSourceBuilder("Host=localhost;Port=5432;Database=sk_demo;User Id=postgres;Password=mysecretpassword"); -dataSourceBuilder.UseVector(); -NpgsqlDataSource dataSource = dataSourceBuilder.Build(); - -var memoryWithPostgres = new MemoryBuilder() - .WithPostgresMemoryStore(dataSource, vectorSize: 1536/*, schema: "public" */) - .WithLoggerFactory(loggerFactory) - .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", apiKey) - .Build(); - -var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(memoryWithPostgres)); -``` - -### Create Index - -> By default, pgvector performs exact nearest neighbor search, which provides perfect recall. - -> You can add an index to use approximate nearest neighbor search, which trades some recall for performance. Unlike typical indexes, you will see different results for queries after adding an approximate index. - -> Three keys to achieving good recall are: -> -> - Create the index after the table has some data -> - Choose an appropriate number of lists - a good place to start is rows / 1000 for up to 1M rows and sqrt(rows) for over 1M rows -> - When querying, specify an appropriate number of probes (higher is better for recall, lower is better for speed) - a good place to start is sqrt(lists) - -Please read [the documentation](https://github.com/pgvector/pgvector#indexing) for more information. - -Based on the data rows of your collection table, consider the following statement to create an index. - -```sql -DO $$ -DECLARE - collection TEXT; - c_count INTEGER; -BEGIN - SELECT 'REPLACE YOUR COLLECTION TABLE NAME' INTO collection; - - -- Get count of records in collection - EXECUTE format('SELECT count(*) FROM public.%I;', collection) INTO c_count; - - -- Create Index (https://github.com/pgvector/pgvector#indexing) - IF c_count > 10000000 THEN - EXECUTE format('CREATE INDEX %I ON public.%I USING ivfflat (embedding vector_cosine_ops) WITH (lists = %s);', - collection || '_ix', collection, ROUND(sqrt(c_count))); - ELSIF c_count > 10000 THEN - EXECUTE format('CREATE INDEX %I ON public.%I USING ivfflat (embedding vector_cosine_ops) WITH (lists = %s);', - collection || '_ix', collection, c_count / 1000); - END IF; -END $$; -``` +Use the [getting started instructions](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/?pivots=programming-language-csharp#getting-started-with-vector-store-connectors) on the Microsoft Leearn site to learn more about using the vector store. diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/README.md b/dotnet/src/Connectors/Connectors.Memory.Redis/README.md index 8acfd839a810..c0feab4eb169 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/README.md @@ -22,19 +22,6 @@ Ways to get RediSearch: docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest ``` -2. To use Redis as a semantic memory store: - > See [Example 14](../../../samples/Concepts/Memory/SemanticTextMemory_Building.cs) and [Example 15](../../../samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs) for more memory usage examples with the kernel. +2. Create a Redis Vector Store using instructions on the [Microsoft Learn site](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/out-of-the-box-connectors/redis-connector). -```csharp -// ConnectionMultiplexer should be a singleton instance in your application, please consider to dispose of it when your application shuts down. -// See https://stackexchange.github.io/StackExchange.Redis/Basics#basic-usage -ConnectionMultiplexer connectionMultiplexer = await ConnectionMultiplexer.ConnectAsync("localhost:6379"); -IDatabase database = connectionMultiplexer.GetDatabase(); -RedisMemoryStore memoryStore = new RedisMemoryStore(database, vectorSize: 1536); - -var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); - -SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); - -var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); -``` +3. Use the [getting started instructions](https://learn.microsoft.com/semantic-kernel/concepts/vector-store-connectors/?pivots=programming-language-csharp#getting-started-with-vector-store-connectors) on the Microsoft Leearn site to learn more about using the vector store. From 454f36838ebb88e27290d2d962ac3090210b850b Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 29 Apr 2025 11:39:14 +0100 Subject: [PATCH 62/63] .Net: Try to reduce mongodb test flakiness (#11802) ### Motivation and Context The docker container for mongo db is often not accessible when running the tests ### Description - Switching to the mongo db test container to try and reduce the flakiness of the tests - Adding retries ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../MongoDB/MongoDBVectorStoreFixture.cs | 84 ++++--------------- ...MongoDBVectorStoreRecordCollectionTests.cs | 34 ++++---- .../IntegrationTests/IntegrationTests.csproj | 1 + 3 files changed, 35 insertions(+), 84 deletions(-) diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs index 5b6606d34652..edb37e83509c 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreFixture.cs @@ -3,42 +3,41 @@ using System; using System.Collections.Generic; using System.Threading.Tasks; -using Docker.DotNet; -using Docker.DotNet.Models; using Microsoft.Extensions.VectorData; using MongoDB.Driver; +using Testcontainers.MongoDb; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring as nullable. + public class MongoDBVectorStoreFixture : IAsyncLifetime { + private readonly MongoDbContainer _container = new MongoDbBuilder() + .WithImage("mongodb/mongodb-atlas-local:7.0.6") + .Build(); + private readonly List _testCollections = ["sk-test-hotels", "sk-test-contacts", "sk-test-addresses"]; /// Main test collection for tests. public string TestCollection => this._testCollections[0]; /// that can be used to manage the collections in MongoDB. - public IMongoDatabase MongoDatabase { get; } + public IMongoDatabase MongoDatabase { get; private set; } /// Gets the manually created vector store record definition for MongoDB test model. public VectorStoreRecordDefinition HotelVectorStoreRecordDefinition { get; private set; } - /// The id of the MongoDB container that we are testing with. - private string? _containerId = null; - - /// The Docker client we are using to create a MongoDB container with. - private readonly DockerClient _client; - - /// - /// Initializes a new instance of the class. - /// - public MongoDBVectorStoreFixture() + public async Task InitializeAsync() { - using var dockerClientConfiguration = new DockerClientConfiguration(); - this._client = dockerClientConfiguration.CreateClient(); + await this._container.StartAsync(); - var mongoClient = new MongoClient("mongodb://localhost:27017/?directConnection=true"); + var mongoClient = new MongoClient(new MongoClientSettings + { + Server = new MongoServerAddress(this._container.Hostname, this._container.GetMappedPublicPort(MongoDbBuilder.MongoDbPort)), + DirectConnection = true, + }); this.MongoDatabase = mongoClient.GetDatabase("test"); @@ -57,11 +56,6 @@ public MongoDBVectorStoreFixture() new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?), 4) { IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineSimilarity } ] }; - } - - public async Task InitializeAsync() - { - this._containerId = await SetupMongoDBContainerAsync(this._client); foreach (var collection in this._testCollections) { @@ -81,52 +75,6 @@ public async Task DisposeAsync() } } - if (this._containerId != null) - { - await this._client.Containers.StopContainerAsync(this._containerId, new ContainerStopParameters()); - await this._client.Containers.RemoveContainerAsync(this._containerId, new ContainerRemoveParameters()); - } + await this._container.StopAsync(); } - - #region private - - private static async Task SetupMongoDBContainerAsync(DockerClient client) - { - const string Image = "mongodb/mongodb-atlas-local"; - const string Tag = "latest"; - - await client.Images.CreateImageAsync( - new ImagesCreateParameters - { - FromImage = Image, - Tag = Tag, - }, - null, - new Progress()); - - var container = await client.Containers.CreateContainerAsync(new CreateContainerParameters() - { - Image = $"{Image}:{Tag}", - HostConfig = new HostConfig() - { - PortBindings = new Dictionary> - { - { "27017", new List { new() { HostPort = "27017" } } }, - }, - PublishAllPorts = true - }, - ExposedPorts = new Dictionary - { - { "27017", default }, - }, - }); - - await client.Containers.StartContainerAsync( - container.ID, - new ContainerStartParameters()); - - return container.ID; - } - - #endregion } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs index 4c47f71d2d7f..a5ce7239ac78 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBVectorStoreRecordCollectionTests.cs @@ -8,6 +8,8 @@ using Microsoft.SemanticKernel.Connectors.MongoDB; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; @@ -20,7 +22,7 @@ public class MongoDBVectorStoreRecordCollectionTests(MongoDBVectorStoreFixture f // If null, all tests will be enabled private const string? SkipReason = null; - [Theory(Skip = SkipReason)] + [RetryTheory(typeof(MongoCommandException), Skip = SkipReason)] [InlineData("sk-test-hotels", true)] [InlineData("nonexistentcollection", false)] public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) @@ -35,7 +37,7 @@ public async Task CollectionExistsReturnsCollectionStateAsync(string collectionN Assert.Equal(expectedExists, actual); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanCreateCollectionAsync() { // Arrange @@ -50,7 +52,7 @@ public async Task ItCanCreateCollectionAsync() await sut.DeleteCollectionAsync(); } - [Theory(Skip = SkipReason)] + [RetryTheory(typeof(MongoCommandException), Skip = SkipReason)] [InlineData(true, true)] [InlineData(true, false)] [InlineData(false, true)] @@ -105,7 +107,7 @@ public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bo } } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanDeleteCollectionAsync() { // Arrange @@ -123,7 +125,7 @@ public async Task ItCanDeleteCollectionAsync() Assert.False(await sut.CollectionExistsAsync()); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanGetAndDeleteRecordAsync() { // Arrange @@ -147,7 +149,7 @@ public async Task ItCanGetAndDeleteRecordAsync() Assert.Null(getResult); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanGetAndDeleteBatchAsync() { // Arrange @@ -179,7 +181,7 @@ public async Task ItCanGetAndDeleteBatchAsync() Assert.Empty(getResults); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanUpsertRecordAsync() { // Arrange @@ -207,7 +209,7 @@ public async Task ItCanUpsertRecordAsync() Assert.Equal(10, getResult.HotelRating); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task UpsertWithModelWorksCorrectlyAsync() { // Arrange @@ -239,7 +241,7 @@ public async Task UpsertWithModelWorksCorrectlyAsync() Assert.Equal("Test Name", getResult.HotelName); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() { // Arrange @@ -259,7 +261,7 @@ public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() Assert.Equal("Test Name", getResult.HotelName); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task UpsertWithBsonModelWorksCorrectlyAsync() { // Arrange @@ -291,7 +293,7 @@ public async Task UpsertWithBsonModelWorksCorrectlyAsync() Assert.Equal("Test Name", getResult.HotelName); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() { // Arrange @@ -311,7 +313,7 @@ public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() Assert.Equal("Test Name", getResult.HotelName); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() { // Arrange @@ -331,7 +333,7 @@ public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() Assert.Equal("Test Name", getResult.HotelName); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() { // Arrange @@ -361,7 +363,7 @@ public async Task VectorizedSearchReturnsValidResultsByDefaultAsync() Assert.Equal(1, searchResults.First(l => l.Record.HotelId == "key1").Score); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() { // Arrange @@ -392,7 +394,7 @@ public async Task VectorizedSearchReturnsValidResultsWithOffsetAsync() Assert.DoesNotContain("key2", ids); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() { // Arrange @@ -423,7 +425,7 @@ public async Task VectorizedSearchReturnsValidResultsWithFilterAsync() Assert.DoesNotContain("key4", ids); } - [Fact(Skip = SkipReason)] + [RetryFact(typeof(MongoCommandException), Skip = SkipReason)] public async Task ItCanUpsertAndRetrieveUsingTheDynamicMapperAsync() { // Arrange diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 660a7a40ed7c..b53f678c5d4b 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -65,6 +65,7 @@ all + From 81f8151f955a35055843246e7a009841356e6e1c Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 29 Apr 2025 13:39:53 +0100 Subject: [PATCH 63/63] Delete 00NN-hybrid-search.md --- docs/decisions/00NN-hybrid-search.md | 395 --------------------------- 1 file changed, 395 deletions(-) delete mode 100644 docs/decisions/00NN-hybrid-search.md diff --git a/docs/decisions/00NN-hybrid-search.md b/docs/decisions/00NN-hybrid-search.md deleted file mode 100644 index 486530972c1a..000000000000 --- a/docs/decisions/00NN-hybrid-search.md +++ /dev/null @@ -1,395 +0,0 @@ ---- -# These are optional elements. Feel free to remove any of them. -status: {proposed | rejected | accepted | deprecated | � | superseded by [ADR-0001](0001-madr-architecture-decisions.md)} -contact: westey-m -date: 2024-11-27 -deciders: {list everyone involved in the decision} -consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} -informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} ---- - -# Support Hybrid Search in VectorStore abstractions - -## Context and Problem Statement - -In addition to simple vector search, many databases also support Hybrid search. -Hybrid search typically results in higher quality search results, and therefore the ability to do Hybrid search via VectorStore abstractions -is an important feature to add. - -The way in which Hybrid search is supported varies by database. The two most common ways of supporting hybrid search is: - -1. Using dense vector search and keyword/fulltext search in parallel, and then combining the results. -1. Using dense vector search and sparse vector search in parallel, and then combining the results. - -Sparse vectors are different from dense vectors in that they typically have many more dimensions, but with many of the dimensions being zero. -Sparse vectors, when used with text search, have a dimension for each word/token in a vocabulary, with the value indicating the importance of the word -in the source text. -The more common the word in a specific chunk of text, and the less common the word is in the corpus, the higher the value in the sparse vector. - -There are various mechanisms for generating sparse vectors, such as - -- [TF-IDF](https://en.wikipedia.org/wiki/Tf%E2%80%93idf) -- [SPLADE](https://www.pinecone.io/learn/splade/) -- [BGE-m3 sparse embedding model](https://huggingface.co/BAAI/bge-m3). -- [pinecone-sparse-english-v0](https://docs.pinecone.io/models/pinecone-sparse-english-v0) - -While these are supported well in Python, they are not well supported in .net today. -Adding support for generating sparse vectors is out of scope of this ADR. - -More background information: - -- [Background article from Qdrant about using sparse vectors for Hybrid Search](https://qdrant.tech/articles/sparse-vectors) -- [TF-IDF explainer for beginners](https://medium.com/@coldstart_coder/understanding-and-implementing-tf-idf-in-python-a325d1301484) - -ML.Net contains an implementation of TF-IDF that could be used to generate sparse vectors in .net. See [here](https://github.com/dotnet/machinelearning/blob/886e2ff125c0060f5a251056c7eb2a7d28738984/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceWordBags.cs#L55-L105) for an example. - -### Hybrid search support in different databases - -|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| -|-|-|-|-|-|-|-|-|-|-|-|-| -|Hybrid search supported|Y|Y|N (No parallel execution with fusion)|N|Y|Y|Y|Y|Y|Y|Y| -|Hybrid search definition|Vector + FullText|[Vector + Keyword (BM25F)](https://weaviate.io/developers/weaviate/search/hybrid)|||[Vector + Sparse Vector for keywords](https://docs.pinecone.io/guides/get-started/key-features#hybrid-search)|[Vector + Keyword](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|[Vector + SparseVector / Keyword](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + FullText|[Vector + Fulltext (BM25)](https://learn.microsoft.com/en-us/azure/cosmos-db/gen-ai/hybrid-search)|[Vector + FullText](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| -|Fusion method configurable|N|Y|||?|Y|Y|Y|Y, but only one option|Y, but only one option|N| -|Fusion methods|[RRF](https://learn.microsoft.com/en-us/azure/search/hybrid-search-ranking)|Ranked/RelativeScore|||?|[Build your own](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|RRF / DBSF|[RRF / Weighted](https://milvus.io/docs/multi-vector-search.md)|[RRF](https://www.elastic.co/search-labs/tutorials/search-tutorial/vector-search/hybrid-search)|[RRF](https://learn.microsoft.com/en-us/azure/cosmos-db/nosql/query/rrf)|[RRF](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search)| -|Hybrid Search Input Params|Vector + string|[Vector + string](https://weaviate.io/developers/weaviate/api/graphql/search-operators#hybrid)|||Vector + SparseVector|Vector + String|[Vector + SparseVector](https://qdrant.tech/documentation/concepts/hybrid-queries/)|[Vector + SparseVector](https://milvus.io/docs/multi-vector-search.md)|Vector + string|Vector + string array|Vector + string| -|Sparse Distance Function|n/a|n/a|||[dotproduct only for both dense and sparse, 1 setting for both](https://docs.pinecone.io/guides/data/understanding-hybrid-search#sparse-dense-workflow)|n/a|dotproduct|Inner Product|n/a|n/a|n/a| -|Sparse Indexing options|n/a|n/a|||no separate config to dense|n/a|ondisk / inmemory + IDF|[SPARSE_INVERTED_INDEX / SPARSE_WAND](https://milvus.io/docs/index.md?tab=sparse)|n/a|n/a|n/a| -|Sparse data model|n/a|n/a|||[indices & values arrays](https://docs.pinecone.io/guides/data/upsert-sparse-dense-vectors)|n/a|indices & values arrays|[sparse matrix / List of dict / list of tuples](https://milvus.io/docs/sparse_vector.md#Use-sparse-vectors-in-Milvus)|n/a|n/a|n/a| -|Keyword matching behavior|[Space Separated with SearchMode=any does OR, searchmode=all does AND](https://learn.microsoft.com/en-us/azure/search/search-lucene-query-architecture)|[Tokenization with split by space, affects ranking](https://weaviate.io/developers/weaviate/search/bm25)|||n/a|[Tokenization](https://www.postgresql.org/docs/current/textsearch-controls.html)|[

No FTS Index: Exact Substring match

FTS Index present: All words must be present

](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|[And/Or capabilities](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-bool-prefix-query.html)|-|[Allows multiple multi-word phrases with OR](https://www.mongodb.com/docs/atlas/atlas-search/phrase/) and [a single multi-word prhase where the words can be OR'd or AND'd](https://www.mongodb.com/docs/atlas/atlas-search/text/)| - -Glossary: - -- RRF = Reciprical Rank Fusion -- DBSF = Distribution-Based Score Fusion -- IDF = Inverse Document Frequency - -### Language required for Cosmos DB NoSQL full text search configuration - -Cosmos DB NoSQL requires a language to be specified for full text search and it requires full text search indexing for hybrid search to be enabled. -We therefore need to support a way of specifying the language when creating the index. - -Cosmos DB NoSQL is the only database from our sample that has a required setting of this type. - -|Feature|Azure AI Search|Weaviate|Redis|Chroma|Pinecone|PostgreSql|Qdrant|Milvus|Elasticsearch|CosmosDB NoSql|MongoDB| -|-|-|-|-|-|-|-|-|-|-|-|-| -|Requires FullTextSearch indexing for hybrid search|Y|Y|n/a|n/a|n/a|Y|N [optional](https://qdrant.tech/documentation/concepts/filtering/#full-text-match)|n/a|Y|Y|[Y](https://www.mongodb.com/docs/atlas/atlas-search/tutorial/hybrid-search/?msockid=04b550d92f2f619c271a45a42e066050#create-the-atlas-vector-search-and-fts-indexes)| -|Required FullTextSearch index options|None required, [many optional](https://learn.microsoft.com/en-us/rest/api/searchservice/indexes/create?view=rest-searchservice-2024-07-01&tabs=HTTP)|None required, [none optional](https://weaviate.io/developers/weaviate/concepts/indexing#collections-without-indexes)||||[language required](https://jkatz05.com/post/postgres/hybrid-search-postgres-pgvector/)|none required, [some optional](https://qdrant.tech/documentation/concepts/indexing/#full-text-index)||None required, [many optional](https://elastic.github.io/elasticsearch-net/8.16.3/api/Elastic.Clients.Elasticsearch.Mapping.TextProperty.html)|Language Required|None required, [many optional](https://www.mongodb.com/docs/atlas/atlas-search/field-types/string-type/#configure-fts-field-type-field-properties)| - -### Keyword Search interface options - -Each DB has different keyword search capabilities. Some only support a very basic interface when it comes to listing keywords for hybrid search. The following table is to list the compatibility of each DB with a specific keyword public interface we may want to support. - -|Feature|Azure AI Search|Weaviate|PostgreSql|Qdrant|Elasticsearch|CosmosDB NoSql|MongoDB| -|-|-|-|-|-|-|-|-| -|

string[] keyword

One word per element

Any matching word boosts ranking.

|Y|Y (have to join with spaces)|[Y (have to join with spaces)](https://www.postgresql.org/docs/current/textsearch-controls.html)|Y (via filter with multiple OR'd matches)|Y|Y|[Y (have to join with spaces)](https://www.mongodb.com/docs/drivers/node/current/fundamentals/crud/read-operations/text/)| -|

string[] keyword

One or more words per element

All words in a single element have to be present to boost the ranking.

|Y|N|Y|Y (via filter with multiple OR'd matches and FTS Index)|-|N|N| -|

string[] keyword

One or more words per element

Multiple words in a single element is a phrase that must match exactly to boost the ranking.

|Y|N|Y|Only via filter with multiple OR'd matches and NO Index|-|N|Y| -|

string keyword

Space separated words

Any matching word boosts ranking.

|Y|Y|Y|N (would need to split words)|-|N (would need to split words)|Y| - -### Naming Options - -|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| -|-|-|-|-|-|-| -|KeywordVectorizedHybridSearch|KeywordVectorizedHybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|SparseVectorizedHybridSearch|SparseVectorizedHybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| -|KeywordVectorizableTextHybridSearch|KeywordVectorizableTextHybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|SparseVectorizableTextHybridSearch|SparseVectorizableTextHybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| - -|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| -|-|-|-|-|-|-| -|KeywordVectorizedHybridSearch|HybridSearch|string[] + Dense Vector|KeywordVectorizedHybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|SparseVectorizedHybridSearch|HybridSearch|Sparse Vector + Dense Vector|SparseVectorizedHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| -|KeywordVectorizableTextHybridSearch|HybridSearch|string[] + string|KeywordVectorizableTextHybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|SparseVectorizableTextHybridSearch|HybridSearch|string[] + string|SparseVectorizableTextHybridSearchOptions|SparseVectorPropertyName|VectorPropertyName| - -|Interface Name|Method Name|Parameters|Options Class Name|Keyword Property Selector|Dense Vector Property Selector| -|-|-|-|-|-|-| -|HybridSearchWithKeywords|HybridSearch|string[] + Dense Vector|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|HybridSearchWithSparseVector|HybridSearchWithSparseVector|Sparse Vector + Dense Vector|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| -|HybridSearchWithKeywordsAndVectorizableText|HybridSearch|string[] + string|HybridSearchOptions|FullTextPropertyName|VectorPropertyName| -|HybridSearchWithVectorizableKeywordsAndText|HybridSearchWithSparseVector|string[] + string|HybridSearchWithSparseVectorOptions|SparseVectorPropertyName|VectorPropertyName| - -|Area|Type of search|Method Name| -|-|-|-| -|**Non-vector Search**||| -|Non-vector Search||Search| -|**Vector Search**||| -|Vector Search|With Vector|VectorSearch| -|Vector Search|With Vectorizable Text (string)|VectorSearchWithText| -|Vector Search|With Vectorizable Image (string/byte[]/other)|VectorSearchWithImage| -|**Hybrid Search**||| -|Hybrid Search|With DenseVector and string[] keywords|HybridSearch| -|Hybrid Search|With vectorizable string and string[] keywords|HybridSearch| -|Hybrid Search|With DenseVector and SparseVector|HybridSearchWithSparseVector| -|Hybrid Search|With vectorizable string and sparse vectorisable string[] keywords|HybridSearchWithSparseVector| - -### Keyword based hybrid search - -```csharp -interface IKeywordVectorizedHybridSearch -{ - Task> KeywordVectorizedHybridSearch( - TVector vector, - ICollection keywords, - KeywordVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); -} - -class KeywordVectorizedHybridSearchOptions -{ - // The name of the property to target the vector search against. - public string? VectorPropertyName { get; init; } - - // The name of the property to target the text search against. - public string? FullTextPropertyName { get; init; } - - public VectorSearchFilter? Filter { get; init; } - public int Top { get; init; } = 3; - public int Skip { get; init; } = 0; - public bool IncludeVectors { get; init; } = false; - public bool IncludeTotalCount { get; init; } = false; -} -``` - -### Sparse Vector based hybrid search - -```csharp -interface ISparseVectorizedHybridSearch -{ - Task> SparseVectorizedHybridSearch( - TVector vector, - TSparseVector sparsevector, - SparseVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); -} - -class SparseVectorizedHybridSearchOptions -{ - // The name of the property to target the dense vector search against. - public string? VectorPropertyName { get; init; } - // The name of the property to target the sparse vector search against. - public string? SparseVectorPropertyName { get; init; } - - public VectorSearchFilter? Filter { get; init; } - public int Top { get; init; } = 3; - public int Skip { get; init; } = 0; - public bool IncludeVectors { get; init; } = false; - public bool IncludeTotalCount { get; init; } = false; -} -``` - -### Keyword Vectorizable text based hybrid search - -```csharp -interface IKeywordVectorizableHybridSearch -{ - Task> KeywordVectorizableHybridSearch( - string searchText, - ICollection keywords, - KeywordVectorizableHybridSearchOptions options = default, - CancellationToken cancellationToken = default); -} - -class KeywordVectorizableHybridSearchOptions -{ - // The name of the property to target the dense vector search against. - public string? VectorPropertyName { get; init; } - // The name of the property to target the text search against. - public string? FullTextPropertyName { get; init; } - - public VectorSearchFilter? Filter { get; init; } - public int Top { get; init; } = 3; - public int Skip { get; init; } = 0; - public bool IncludeVectors { get; init; } = false; - public bool IncludeTotalCount { get; init; } = false; -} -``` - -### Sparse Vector based Vectorizable text hybrid search - -```csharp -interface ISparseVectorizableTextHybridSearch -{ - Task> SparseVectorizableTextHybridSearch( - string searchText, - ICollection keywords, - SparseVectorizableTextHybridSearchOptions options = default, - CancellationToken cancellationToken = default); -} - -class SparseVectorizableTextHybridSearchOptions -{ - // The name of the property to target the dense vector search against. - public string? VectorPropertyName { get; init; } - // The name of the property to target the sparse vector search against. - public string? SparseVectorPropertyName { get; init; } - - public VectorSearchFilter? Filter { get; init; } - public int Top { get; init; } = 3; - public int Skip { get; init; } = 0; - public bool IncludeVectors { get; init; } = false; - public bool IncludeTotalCount { get; init; } = false; -} -``` - -## Decision Drivers - -- Support for generating sparse vectors is required to make sparse vector based hybrid search viable. -- Multiple vectors per record scenarios need to be supported. -- No database in our evaluation set have been identified as supporting converting text to sparse vectors in the database on upsert and storing those sparse vectors in a retrievable field. Of course some of these DBs may use sparse vectors internally to implement keyword search, without exposing them to the caller. - -## Scoping Considered Options - -### 1. Keyword Hybrid Search Only - -Only implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch for now, until -we can add support for generating sparse vectors. - -### 2. Keyword and SparseVectorized Hybrid Search - -Implement KeywordVectorizedHybridSearch & KeywordVectorizableTextHybridSearch but only -KeywordVectorizableTextHybridSearch, since no database in our evaluation set supports generating sparse vectors in the database. -This will require us to produce code that can generate sparse vectors from text. - -### 3. All abovementioned Hybrid Search - -Create all four interfaces and implement an implementation of SparseVectorizableTextHybridSearch that -generates the sparse vector in the client code. -This will require us to produce code that can generate sparse vectors from text. - -### 4. Generalized Hybrid Search - -Some databases support a more generalized version of hybrid search, where you can take two (or sometimes more) searches of any type and combine the results of these using your chosen fusion method. -You can implement Vector + Keyword search using this more generalized search. -For databases that support only Vector + Keyword hybrid search though, it is not possible to implement the generalized hybrid search on top of those databases. - -## PropertyName Naming Considered Options - -### 1. Explicit Dense naming - -DenseVectorPropertyName -SparseVectorPropertyName - -DenseVectorPropertyName -FullTextPropertyName - -- Pros: This is more explicit, considering that there are also sparse vectors involved. -- Cons: It is inconsistent with the naming in the non-hybrid vector search. - -### 2. Implicit Dense naming - -VectorPropertyName -SparseVectorPropertyName - -VectorPropertyName -FullTextPropertyName - -- Pros: This is consistent with the naming in the non-hybrid vector search. -- Cons: It is internally inconsistent, i.e. we have sparse vector, but for dense it's just vector. - -## Keyword splitting Considered Options - -### 1. Accept Split keywords in interface - -Accept an ICollection of string where each value is a separate keyword. -A version that takes a single keyword and calls the `ICollection` version can also be provided as an extension method. - -```csharp - Task> KeywordVectorizedHybridSearch( - TVector vector, - ICollection keywords, - KeywordVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); -``` - -- Pros: Easier to use in the connector if the underlying DB requires split keywords -- Pros: Only solution broadly supported, see comparison table above. - -### 2. Accept single string in interface - -Accept a single string containing all the keywords. - -```csharp - Task> KeywordVectorizedHybridSearch( - TVector vector, - string keywords, - KeywordVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); -``` - -- Pros: Easier for a user to use, since they don't need to do any keyword splitting. -- Cons: We don't have the capabilities to properly sanitise the string, e.g. splitting words appropriately for the language, and potentially removing filler words. - -### 3. Accept either in interface - -Accept either option and either combine or split the keywords in the connector as needed by the underlying db. - -```csharp - Task> KeywordVectorizedHybridSearch( - TVector vector, - ICollection keywords, - KeywordVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); - Task> KeywordVectorizedHybridSearch( - TVector vector, - string keywords, - KeywordVectorizedHybridSearchOptions options, - CancellationToken cancellationToken); -``` - -- Pros: Easier for a user to use, since they can pick whichever suits them better -- Cons: We have to still convert to/from the internal presentation by either combining keywords or splitting them. -- Cons: We don't have the capabilities to properly sanitise the single string, e.g. splitting words appropriately for the language, and potentially removing filler words. - -### 4. Accept either in interface but throw for not supported - -Accept either option but throw for the one not supported by the underlying DB. - -- Pros: Easier for us to implement. -- Cons: Harder for users to use. - -### 5. Separate interfaces for each - -Create a separate interface for the Enumerable and single string options, and only implement the one that is supported by the underlying system for each db. - -- Pros: Easier for us to implement. -- Cons: Harder for users to use. - -## Full text search index mandatory configuration Considered Options - -Cosmos DB NoSQL requires a language to be specified when creating a full text search index. -Other DBs have optional values that can be set. - -### 1. Pass option in via collection options - -This option does the minimum by just adding a language option to the collection's options class. -This language would then be used for all full text search indexes created by the collection. - -- Pros: Simplest to implement -- Cons: Doesn't allow multiple languages to be used for different fields in one record -- Cons: Doesn't add support for all full text search options for all dbs - -### 2. Add extensions for RecordDefinition and data model Attributes - -Add a property bag to the VectorStoreRecordProperty allowing database specific metadata to be provided. -Add an abstract base attribute that can be inherited from that allows extra metadata to be added to the data model, -where each database has their own attributes to specify their settings, with a method to convert the contents to -the property bag required by VectorStoreRecordProperty. - -- Pros: Allows multiple languages to be used for different fields in one record -- Pros: Allows other DBs to add their own settings via their own attributes -- Cons: More work to implement - -## Decision Outcome - -### Scoping - -Chosen option "1. Keyword Hybrid Search Only", since enterprise support for generating sparse vectors is poor and without an end to end story, the value is low. - -### PropertyName Naming - -Chosen option "2. Implicit Dense naming", since it is consistent with the existing vector search options naming. - -### Keyword splitting - -Chosen option "1. Accept Split keywords in interface", since it is the only one with broad support amongst databases.